summaryrefslogtreecommitdiffstats
path: root/third_party/python
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 14:29:10 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 14:29:10 +0000
commit2aa4a82499d4becd2284cdb482213d541b8804dd (patch)
treeb80bf8bf13c3766139fbacc530efd0dd9d54394c /third_party/python
parentInitial commit. (diff)
downloadfirefox-2aa4a82499d4becd2284cdb482213d541b8804dd.tar.xz
firefox-2aa4a82499d4becd2284cdb482213d541b8804dd.zip
Adding upstream version 86.0.1.upstream/86.0.1upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/python')
-rw-r--r--third_party/python/Click/CHANGES.rst635
-rw-r--r--third_party/python/Click/CONTRIBUTING.rst61
-rw-r--r--third_party/python/Click/LICENSE.rst39
-rw-r--r--third_party/python/Click/MANIFEST.in11
-rw-r--r--third_party/python/Click/PKG-INFO119
-rw-r--r--third_party/python/Click/README.rst91
-rw-r--r--third_party/python/Click/artwork/logo.svg75
-rw-r--r--third_party/python/Click/click/__init__.py97
-rw-r--r--third_party/python/Click/click/_bashcomplete.py293
-rw-r--r--third_party/python/Click/click/_compat.py703
-rw-r--r--third_party/python/Click/click/_termui_impl.py621
-rw-r--r--third_party/python/Click/click/_textwrap.py38
-rw-r--r--third_party/python/Click/click/_unicodefun.py125
-rw-r--r--third_party/python/Click/click/_winconsole.py307
-rw-r--r--third_party/python/Click/click/core.py1856
-rw-r--r--third_party/python/Click/click/decorators.py311
-rw-r--r--third_party/python/Click/click/exceptions.py235
-rw-r--r--third_party/python/Click/click/formatting.py256
-rw-r--r--third_party/python/Click/click/globals.py48
-rw-r--r--third_party/python/Click/click/parser.py427
-rw-r--r--third_party/python/Click/click/termui.py606
-rw-r--r--third_party/python/Click/click/testing.py374
-rw-r--r--third_party/python/Click/click/types.py668
-rw-r--r--third_party/python/Click/click/utils.py440
-rw-r--r--third_party/python/Click/examples/README12
-rw-r--r--third_party/python/Click/examples/aliases/README17
-rw-r--r--third_party/python/Click/examples/aliases/aliases.ini2
-rw-r--r--third_party/python/Click/examples/aliases/aliases.py111
-rw-r--r--third_party/python/Click/examples/aliases/setup.py15
-rw-r--r--third_party/python/Click/examples/bashcompletion/README12
-rw-r--r--third_party/python/Click/examples/bashcompletion/bashcompletion.py45
-rw-r--r--third_party/python/Click/examples/bashcompletion/setup.py15
-rw-r--r--third_party/python/Click/examples/colors/README11
-rw-r--r--third_party/python/Click/examples/colors/colors.py28
-rw-r--r--third_party/python/Click/examples/colors/setup.py17
-rw-r--r--third_party/python/Click/examples/complex/README16
-rw-r--r--third_party/python/Click/examples/complex/complex/__init__.py0
-rw-r--r--third_party/python/Click/examples/complex/complex/cli.py65
-rw-r--r--third_party/python/Click/examples/complex/complex/commands/__init__.py0
-rw-r--r--third_party/python/Click/examples/complex/complex/commands/cmd_init.py13
-rw-r--r--third_party/python/Click/examples/complex/complex/commands/cmd_status.py10
-rw-r--r--third_party/python/Click/examples/complex/setup.py15
-rw-r--r--third_party/python/Click/examples/imagepipe/.gitignore1
-rw-r--r--third_party/python/Click/examples/imagepipe/README13
-rw-r--r--third_party/python/Click/examples/imagepipe/example01.jpgbin0 -> 51677 bytes
-rw-r--r--third_party/python/Click/examples/imagepipe/example02.jpgbin0 -> 39106 bytes
-rw-r--r--third_party/python/Click/examples/imagepipe/imagepipe.py266
-rw-r--r--third_party/python/Click/examples/imagepipe/setup.py16
-rw-r--r--third_party/python/Click/examples/inout/README10
-rw-r--r--third_party/python/Click/examples/inout/inout.py30
-rw-r--r--third_party/python/Click/examples/inout/setup.py15
-rw-r--r--third_party/python/Click/examples/naval/README14
-rw-r--r--third_party/python/Click/examples/naval/naval.py70
-rw-r--r--third_party/python/Click/examples/naval/setup.py15
-rw-r--r--third_party/python/Click/examples/repo/README9
-rw-r--r--third_party/python/Click/examples/repo/repo.py151
-rw-r--r--third_party/python/Click/examples/repo/setup.py15
-rw-r--r--third_party/python/Click/examples/termui/README9
-rw-r--r--third_party/python/Click/examples/termui/setup.py17
-rw-r--r--third_party/python/Click/examples/termui/termui.py156
-rw-r--r--third_party/python/Click/examples/validation/README12
-rw-r--r--third_party/python/Click/examples/validation/setup.py15
-rw-r--r--third_party/python/Click/examples/validation/validation.py44
-rw-r--r--third_party/python/Click/setup.cfg25
-rw-r--r--third_party/python/Click/setup.py44
-rw-r--r--third_party/python/Click/tox.ini39
-rw-r--r--third_party/python/Jinja2/CHANGES.rst774
-rw-r--r--third_party/python/Jinja2/LICENSE.rst28
-rw-r--r--third_party/python/Jinja2/MANIFEST.in9
-rw-r--r--third_party/python/Jinja2/PKG-INFO102
-rw-r--r--third_party/python/Jinja2/README.rst66
-rw-r--r--third_party/python/Jinja2/artwork/jinjalogo.svg132
-rw-r--r--third_party/python/Jinja2/examples/basic/cycle.py18
-rw-r--r--third_party/python/Jinja2/examples/basic/debugger.py8
-rw-r--r--third_party/python/Jinja2/examples/basic/inheritance.py15
-rw-r--r--third_party/python/Jinja2/examples/basic/templates/broken.html6
-rw-r--r--third_party/python/Jinja2/examples/basic/templates/subbroken.html3
-rw-r--r--third_party/python/Jinja2/examples/basic/test.py31
-rw-r--r--third_party/python/Jinja2/examples/basic/test_filter_and_linestatements.py29
-rw-r--r--third_party/python/Jinja2/examples/basic/test_loop_filter.py15
-rw-r--r--third_party/python/Jinja2/examples/basic/translate.py20
-rw-r--r--third_party/python/Jinja2/ext/Vim/jinja.vim138
-rw-r--r--third_party/python/Jinja2/setup.cfg39
-rw-r--r--third_party/python/Jinja2/setup.py56
-rw-r--r--third_party/python/Jinja2/src/jinja2/__init__.py44
-rw-r--r--third_party/python/Jinja2/src/jinja2/_compat.py132
-rw-r--r--third_party/python/Jinja2/src/jinja2/_identifier.py6
-rw-r--r--third_party/python/Jinja2/src/jinja2/asyncfilters.py158
-rw-r--r--third_party/python/Jinja2/src/jinja2/asyncsupport.py264
-rw-r--r--third_party/python/Jinja2/src/jinja2/bccache.py350
-rw-r--r--third_party/python/Jinja2/src/jinja2/compiler.py1843
-rw-r--r--third_party/python/Jinja2/src/jinja2/constants.py21
-rw-r--r--third_party/python/Jinja2/src/jinja2/debug.py268
-rw-r--r--third_party/python/Jinja2/src/jinja2/defaults.py44
-rw-r--r--third_party/python/Jinja2/src/jinja2/environment.py1362
-rw-r--r--third_party/python/Jinja2/src/jinja2/exceptions.py177
-rw-r--r--third_party/python/Jinja2/src/jinja2/ext.py704
-rw-r--r--third_party/python/Jinja2/src/jinja2/filters.py1382
-rw-r--r--third_party/python/Jinja2/src/jinja2/idtracking.py290
-rw-r--r--third_party/python/Jinja2/src/jinja2/lexer.py848
-rw-r--r--third_party/python/Jinja2/src/jinja2/loaders.py504
-rw-r--r--third_party/python/Jinja2/src/jinja2/meta.py101
-rw-r--r--third_party/python/Jinja2/src/jinja2/nativetypes.py94
-rw-r--r--third_party/python/Jinja2/src/jinja2/nodes.py1088
-rw-r--r--third_party/python/Jinja2/src/jinja2/optimizer.py41
-rw-r--r--third_party/python/Jinja2/src/jinja2/parser.py939
-rw-r--r--third_party/python/Jinja2/src/jinja2/runtime.py1011
-rw-r--r--third_party/python/Jinja2/src/jinja2/sandbox.py510
-rw-r--r--third_party/python/Jinja2/src/jinja2/tests.py215
-rw-r--r--third_party/python/Jinja2/src/jinja2/utils.py732
-rw-r--r--third_party/python/Jinja2/src/jinja2/visitor.py81
-rw-r--r--third_party/python/Jinja2/tox.ini20
-rw-r--r--third_party/python/MarkupSafe/CHANGES.rst97
-rw-r--r--third_party/python/MarkupSafe/LICENSE.rst28
-rw-r--r--third_party/python/MarkupSafe/MANIFEST.in8
-rw-r--r--third_party/python/MarkupSafe/PKG-INFO101
-rw-r--r--third_party/python/MarkupSafe/README.rst69
-rw-r--r--third_party/python/MarkupSafe/setup.cfg27
-rw-r--r--third_party/python/MarkupSafe/setup.py125
-rw-r--r--third_party/python/MarkupSafe/src/markupsafe/__init__.py327
-rw-r--r--third_party/python/MarkupSafe/src/markupsafe/_compat.py33
-rw-r--r--third_party/python/MarkupSafe/src/markupsafe/_constants.py264
-rw-r--r--third_party/python/MarkupSafe/src/markupsafe/_native.py69
-rw-r--r--third_party/python/MarkupSafe/src/markupsafe/_speedups.c423
-rw-r--r--third_party/python/MarkupSafe/tox.ini44
-rw-r--r--third_party/python/PyYAML/CHANGES242
-rw-r--r--third_party/python/PyYAML/LICENSE20
-rw-r--r--third_party/python/PyYAML/PKG-INFO38
-rw-r--r--third_party/python/PyYAML/README43
-rw-r--r--third_party/python/PyYAML/examples/pygments-lexer/example.yaml302
-rw-r--r--third_party/python/PyYAML/examples/pygments-lexer/yaml.py431
-rw-r--r--third_party/python/PyYAML/examples/yaml-highlight/yaml_hl.cfg115
-rwxr-xr-xthird_party/python/PyYAML/examples/yaml-highlight/yaml_hl.py114
-rw-r--r--third_party/python/PyYAML/ext/_yaml.c28743
-rw-r--r--third_party/python/PyYAML/ext/_yaml.h23
-rw-r--r--third_party/python/PyYAML/ext/_yaml.pxd251
-rw-r--r--third_party/python/PyYAML/ext/_yaml.pyx1527
-rw-r--r--third_party/python/PyYAML/lib/yaml/__init__.py431
-rw-r--r--third_party/python/PyYAML/lib/yaml/composer.py139
-rw-r--r--third_party/python/PyYAML/lib/yaml/constructor.py760
-rw-r--r--third_party/python/PyYAML/lib/yaml/cyaml.py101
-rw-r--r--third_party/python/PyYAML/lib/yaml/dumper.py62
-rw-r--r--third_party/python/PyYAML/lib/yaml/emitter.py1144
-rw-r--r--third_party/python/PyYAML/lib/yaml/error.py75
-rw-r--r--third_party/python/PyYAML/lib/yaml/events.py86
-rw-r--r--third_party/python/PyYAML/lib/yaml/loader.py63
-rw-r--r--third_party/python/PyYAML/lib/yaml/nodes.py49
-rw-r--r--third_party/python/PyYAML/lib/yaml/parser.py589
-rw-r--r--third_party/python/PyYAML/lib/yaml/reader.py188
-rw-r--r--third_party/python/PyYAML/lib/yaml/representer.py489
-rw-r--r--third_party/python/PyYAML/lib/yaml/resolver.py227
-rw-r--r--third_party/python/PyYAML/lib/yaml/scanner.py1444
-rw-r--r--third_party/python/PyYAML/lib/yaml/serializer.py111
-rw-r--r--third_party/python/PyYAML/lib/yaml/tokens.py104
-rw-r--r--third_party/python/PyYAML/lib3/yaml/__init__.py427
-rw-r--r--third_party/python/PyYAML/lib3/yaml/composer.py139
-rw-r--r--third_party/python/PyYAML/lib3/yaml/constructor.py748
-rw-r--r--third_party/python/PyYAML/lib3/yaml/cyaml.py101
-rw-r--r--third_party/python/PyYAML/lib3/yaml/dumper.py62
-rw-r--r--third_party/python/PyYAML/lib3/yaml/emitter.py1137
-rw-r--r--third_party/python/PyYAML/lib3/yaml/error.py75
-rw-r--r--third_party/python/PyYAML/lib3/yaml/events.py86
-rw-r--r--third_party/python/PyYAML/lib3/yaml/loader.py63
-rw-r--r--third_party/python/PyYAML/lib3/yaml/nodes.py49
-rw-r--r--third_party/python/PyYAML/lib3/yaml/parser.py589
-rw-r--r--third_party/python/PyYAML/lib3/yaml/reader.py185
-rw-r--r--third_party/python/PyYAML/lib3/yaml/representer.py389
-rw-r--r--third_party/python/PyYAML/lib3/yaml/resolver.py227
-rw-r--r--third_party/python/PyYAML/lib3/yaml/scanner.py1435
-rw-r--r--third_party/python/PyYAML/lib3/yaml/serializer.py111
-rw-r--r--third_party/python/PyYAML/lib3/yaml/tokens.py104
-rw-r--r--third_party/python/PyYAML/setup.cfg25
-rw-r--r--third_party/python/PyYAML/setup.py315
-rw-r--r--third_party/python/appdirs/.gitignore8
-rw-r--r--third_party/python/appdirs/.travis.yml10
-rw-r--r--third_party/python/appdirs/CHANGES.rst93
-rw-r--r--third_party/python/appdirs/Dockerfile13
-rw-r--r--third_party/python/appdirs/HACKING.md16
-rw-r--r--third_party/python/appdirs/LICENSE.txt23
-rw-r--r--third_party/python/appdirs/MANIFEST.in5
-rw-r--r--third_party/python/appdirs/PKG-INFO262
-rw-r--r--third_party/python/appdirs/README.rst138
-rw-r--r--third_party/python/appdirs/TODO.md1
-rw-r--r--third_party/python/appdirs/appdirs.py608
-rw-r--r--third_party/python/appdirs/setup.cfg7
-rw-r--r--third_party/python/appdirs/setup.py64
-rw-r--r--third_party/python/appdirs/tox.ini5
-rw-r--r--third_party/python/atomicwrites/LICENSE19
-rw-r--r--third_party/python/atomicwrites/MANIFEST.in6
-rw-r--r--third_party/python/atomicwrites/PKG-INFO112
-rw-r--r--third_party/python/atomicwrites/README.rst102
-rw-r--r--third_party/python/atomicwrites/atomicwrites/__init__.py201
-rw-r--r--third_party/python/atomicwrites/setup.cfg8
-rw-r--r--third_party/python/atomicwrites/setup.py27
-rw-r--r--third_party/python/attrs/.coveragerc13
-rw-r--r--third_party/python/attrs/.github/CODE_OF_CONDUCT.rst55
-rw-r--r--third_party/python/attrs/.github/CONTRIBUTING.rst250
-rw-r--r--third_party/python/attrs/.github/PULL_REQUEST_TEMPLATE.md18
-rw-r--r--third_party/python/attrs/.pre-commit-config.yaml33
-rw-r--r--third_party/python/attrs/.readthedocs.yml10
-rw-r--r--third_party/python/attrs/.travis.yml77
-rw-r--r--third_party/python/attrs/AUTHORS.rst11
-rw-r--r--third_party/python/attrs/CHANGELOG.rst559
-rw-r--r--third_party/python/attrs/LICENSE21
-rw-r--r--third_party/python/attrs/MANIFEST.in23
-rw-r--r--third_party/python/attrs/PKG-INFO231
-rw-r--r--third_party/python/attrs/README.rst138
-rw-r--r--third_party/python/attrs/changelog.d/towncrier_template.rst35
-rw-r--r--third_party/python/attrs/codecov.yml10
-rw-r--r--third_party/python/attrs/conftest.py41
-rw-r--r--third_party/python/attrs/pyproject.toml36
-rw-r--r--third_party/python/attrs/setup.cfg31
-rw-r--r--third_party/python/attrs/setup.py122
-rw-r--r--third_party/python/attrs/src/attr/__init__.py65
-rw-r--r--third_party/python/attrs/src/attr/__init__.pyi255
-rw-r--r--third_party/python/attrs/src/attr/_compat.py159
-rw-r--r--third_party/python/attrs/src/attr/_config.py23
-rw-r--r--third_party/python/attrs/src/attr/_funcs.py290
-rw-r--r--third_party/python/attrs/src/attr/_make.py2086
-rw-r--r--third_party/python/attrs/src/attr/converters.py78
-rw-r--r--third_party/python/attrs/src/attr/converters.pyi12
-rw-r--r--third_party/python/attrs/src/attr/exceptions.py57
-rw-r--r--third_party/python/attrs/src/attr/exceptions.pyi7
-rw-r--r--third_party/python/attrs/src/attr/filters.py52
-rw-r--r--third_party/python/attrs/src/attr/filters.pyi5
-rw-r--r--third_party/python/attrs/src/attr/py.typed0
-rw-r--r--third_party/python/attrs/src/attr/validators.py282
-rw-r--r--third_party/python/attrs/src/attr/validators.pyi24
-rw-r--r--third_party/python/attrs/tox.ini85
-rw-r--r--third_party/python/backports/shutil_which/__init__.py78
-rw-r--r--third_party/python/blessings/LICENSE19
-rw-r--r--third_party/python/blessings/MANIFEST.in3
-rw-r--r--third_party/python/blessings/PKG-INFO560
-rw-r--r--third_party/python/blessings/README.rst531
-rw-r--r--third_party/python/blessings/blessings/__init__.py556
-rw-r--r--third_party/python/blessings/blessings/tests.py269
-rw-r--r--third_party/python/blessings/setup.cfg5
-rw-r--r--third_party/python/blessings/setup.py49
-rw-r--r--third_party/python/blessings/tox.ini8
-rw-r--r--third_party/python/cbor2/.gitignore13
-rw-r--r--third_party/python/cbor2/.travis.yml28
-rw-r--r--third_party/python/cbor2/LICENSE.txt19
-rw-r--r--third_party/python/cbor2/PKG-INFO45
-rw-r--r--third_party/python/cbor2/README.rst24
-rw-r--r--third_party/python/cbor2/cbor2/__init__.py3
-rw-r--r--third_party/python/cbor2/cbor2/compat.py49
-rw-r--r--third_party/python/cbor2/cbor2/decoder.py411
-rw-r--r--third_party/python/cbor2/cbor2/encoder.py362
-rw-r--r--third_party/python/cbor2/cbor2/types.py55
-rw-r--r--third_party/python/cbor2/docs/conf.py33
-rw-r--r--third_party/python/cbor2/docs/customizing.rst132
-rw-r--r--third_party/python/cbor2/docs/index.rst15
-rw-r--r--third_party/python/cbor2/docs/modules/decoder.rst5
-rw-r--r--third_party/python/cbor2/docs/modules/encoder.rst5
-rw-r--r--third_party/python/cbor2/docs/modules/types.rst5
-rw-r--r--third_party/python/cbor2/docs/usage.rst80
-rw-r--r--third_party/python/cbor2/docs/versionhistory.rst73
-rw-r--r--third_party/python/cbor2/setup.cfg21
-rw-r--r--third_party/python/cbor2/setup.py43
-rw-r--r--third_party/python/cbor2/tests/test_decoder.py327
-rw-r--r--third_party/python/cbor2/tests/test_encoder.py260
-rw-r--r--third_party/python/cbor2/tests/test_types.py36
-rw-r--r--third_party/python/cbor2/tox.ini20
-rw-r--r--third_party/python/certifi/LICENSE21
-rw-r--r--third_party/python/certifi/MANIFEST.in1
-rw-r--r--third_party/python/certifi/PKG-INFO69
-rw-r--r--third_party/python/certifi/README.rst46
-rw-r--r--third_party/python/certifi/certifi/__init__.py3
-rw-r--r--third_party/python/certifi/certifi/__main__.py2
-rw-r--r--third_party/python/certifi/certifi/cacert.pem4400
-rw-r--r--third_party/python/certifi/certifi/core.py37
-rw-r--r--third_party/python/certifi/setup.cfg11
-rwxr-xr-xthird_party/python/certifi/setup.py67
-rw-r--r--third_party/python/compare-locales/PKG-INFO82
-rw-r--r--third_party/python/compare-locales/README.md56
-rw-r--r--third_party/python/compare-locales/compare_locales/__init__.py1
-rw-r--r--third_party/python/compare-locales/compare_locales/checks/__init__.py30
-rw-r--r--third_party/python/compare-locales/compare_locales/checks/android.py253
-rw-r--r--third_party/python/compare-locales/compare_locales/checks/base.py127
-rw-r--r--third_party/python/compare-locales/compare_locales/checks/dtd.py246
-rw-r--r--third_party/python/compare-locales/compare_locales/checks/fluent.py356
-rw-r--r--third_party/python/compare-locales/compare_locales/checks/properties.py173
-rw-r--r--third_party/python/compare-locales/compare_locales/commands.py205
-rw-r--r--third_party/python/compare-locales/compare_locales/compare/__init__.py91
-rw-r--r--third_party/python/compare-locales/compare_locales/compare/content.py307
-rw-r--r--third_party/python/compare-locales/compare_locales/compare/observer.py218
-rw-r--r--third_party/python/compare-locales/compare_locales/compare/utils.py140
-rw-r--r--third_party/python/compare-locales/compare_locales/integration_tests/__init__.py5
-rw-r--r--third_party/python/compare-locales/compare_locales/integration_tests/test_plurals.py52
-rw-r--r--third_party/python/compare-locales/compare_locales/keyedtuple.py58
-rw-r--r--third_party/python/compare-locales/compare_locales/lint/__init__.py0
-rw-r--r--third_party/python/compare-locales/compare_locales/lint/cli.py95
-rw-r--r--third_party/python/compare-locales/compare_locales/lint/linter.py123
-rw-r--r--third_party/python/compare-locales/compare_locales/lint/util.py40
-rw-r--r--third_party/python/compare-locales/compare_locales/merge.py143
-rw-r--r--third_party/python/compare-locales/compare_locales/mozpath.py155
-rw-r--r--third_party/python/compare-locales/compare_locales/parser/__init__.py83
-rw-r--r--third_party/python/compare-locales/compare_locales/parser/android.py305
-rw-r--r--third_party/python/compare-locales/compare_locales/parser/base.py451
-rw-r--r--third_party/python/compare-locales/compare_locales/parser/defines.py106
-rw-r--r--third_party/python/compare-locales/compare_locales/parser/dtd.py118
-rw-r--r--third_party/python/compare-locales/compare_locales/parser/fluent.py220
-rw-r--r--third_party/python/compare-locales/compare_locales/parser/ini.py58
-rw-r--r--third_party/python/compare-locales/compare_locales/parser/po.py127
-rw-r--r--third_party/python/compare-locales/compare_locales/parser/properties.py116
-rw-r--r--third_party/python/compare-locales/compare_locales/paths/__init__.py54
-rw-r--r--third_party/python/compare-locales/compare_locales/paths/configparser.py140
-rw-r--r--third_party/python/compare-locales/compare_locales/paths/files.py223
-rw-r--r--third_party/python/compare-locales/compare_locales/paths/ini.py227
-rw-r--r--third_party/python/compare-locales/compare_locales/paths/matcher.py472
-rw-r--r--third_party/python/compare-locales/compare_locales/paths/project.py265
-rw-r--r--third_party/python/compare-locales/compare_locales/plurals.py218
-rw-r--r--third_party/python/compare-locales/compare_locales/serializer.py137
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/__init__.py82
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/android/__init__.py0
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/android/test_checks.py344
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/android/test_merge.py82
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/android/test_parser.py128
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/data/bug121341.properties68
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/data/test.properties14
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/data/triple-license.dtd38
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/dtd/__init__.py0
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/dtd/test_checks.py335
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/dtd/test_merge.py133
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/dtd/test_parser.py271
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/fluent/__init__.py0
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/fluent/test_checks.py581
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/fluent/test_merge.py283
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/fluent/test_parser.py310
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/lint/__init__.py0
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/lint/test_linter.py97
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/lint/test_util.py91
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/merge/__init__.py0
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/merge/test_comments.py188
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/merge/test_messages.py93
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/merge/test_unknown.py22
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/merge/test_whitespace.py76
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/paths/__init__.py132
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/paths/test_configparser.py126
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/paths/test_files.py572
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/paths/test_ini.py90
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/paths/test_matcher.py500
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/paths/test_paths.py28
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/paths/test_project.py229
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/po/__init__.py0
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/po/test_parser.py139
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/properties/__init__.py0
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/properties/test_checks.py109
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/properties/test_merge.py68
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/properties/test_parser.py243
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/serializer/__init__.py34
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/serializer/test_android.py218
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/serializer/test_fluent.py79
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/serializer/test_properties.py106
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/test_apps.py168
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/test_checks.py89
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/test_compare.py229
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/test_defines.py251
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/test_ini.py223
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/test_keyedtuple.py54
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/test_merge.py1408
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/test_mozpath.py139
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/test_parser.py118
-rw-r--r--third_party/python/compare-locales/compare_locales/tests/test_util.py30
-rw-r--r--third_party/python/compare-locales/compare_locales/util.py11
-rw-r--r--third_party/python/compare-locales/setup.cfg7
-rwxr-xr-xthird_party/python/compare-locales/setup.py62
-rw-r--r--third_party/python/cookies/PKG-INFO109
-rw-r--r--third_party/python/cookies/README88
-rw-r--r--third_party/python/cookies/cookies.py1169
-rw-r--r--third_party/python/cookies/setup.cfg8
-rw-r--r--third_party/python/cookies/setup.py45
-rw-r--r--third_party/python/cookies/test_cookies.py2447
-rw-r--r--third_party/python/coverage/.editorconfig44
-rw-r--r--third_party/python/coverage/.readthedocs.yml22
-rw-r--r--third_party/python/coverage/.travis.yml52
-rw-r--r--third_party/python/coverage/CHANGES.rst2743
-rw-r--r--third_party/python/coverage/CONTRIBUTORS.txt136
-rw-r--r--third_party/python/coverage/LICENSE.txt177
-rw-r--r--third_party/python/coverage/MANIFEST.in49
-rw-r--r--third_party/python/coverage/Makefile162
-rw-r--r--third_party/python/coverage/NOTICE.txt14
-rw-r--r--third_party/python/coverage/PKG-INFO187
-rw-r--r--third_party/python/coverage/README.rst152
-rw-r--r--third_party/python/coverage/__main__.py12
-rw-r--r--third_party/python/coverage/appveyor.yml169
-rw-r--r--third_party/python/coverage/ci/README.txt1
-rw-r--r--third_party/python/coverage/ci/download_appveyor.py95
-rw-r--r--third_party/python/coverage/ci/install.ps1203
-rwxr-xr-xthird_party/python/coverage/ci/manylinux.sh60
-rw-r--r--third_party/python/coverage/ci/run_with_env.cmd91
-rw-r--r--third_party/python/coverage/ci/upload_relnotes.py122
-rw-r--r--third_party/python/coverage/coverage/__init__.py36
-rw-r--r--third_party/python/coverage/coverage/__main__.py8
-rw-r--r--third_party/python/coverage/coverage/annotate.py108
-rw-r--r--third_party/python/coverage/coverage/backunittest.py33
-rw-r--r--third_party/python/coverage/coverage/backward.py253
-rw-r--r--third_party/python/coverage/coverage/bytecode.py19
-rw-r--r--third_party/python/coverage/coverage/cmdline.py866
-rw-r--r--third_party/python/coverage/coverage/collector.py429
-rw-r--r--third_party/python/coverage/coverage/config.py555
-rw-r--r--third_party/python/coverage/coverage/context.py91
-rw-r--r--third_party/python/coverage/coverage/control.py1110
-rw-r--r--third_party/python/coverage/coverage/ctracer/datastack.c50
-rw-r--r--third_party/python/coverage/coverage/ctracer/datastack.h45
-rw-r--r--third_party/python/coverage/coverage/ctracer/filedisp.c85
-rw-r--r--third_party/python/coverage/coverage/ctracer/filedisp.h26
-rw-r--r--third_party/python/coverage/coverage/ctracer/module.c108
-rw-r--r--third_party/python/coverage/coverage/ctracer/stats.h31
-rw-r--r--third_party/python/coverage/coverage/ctracer/tracer.c1186
-rw-r--r--third_party/python/coverage/coverage/ctracer/tracer.h74
-rw-r--r--third_party/python/coverage/coverage/ctracer/util.h67
-rw-r--r--third_party/python/coverage/coverage/data.py124
-rw-r--r--third_party/python/coverage/coverage/debug.py406
-rw-r--r--third_party/python/coverage/coverage/disposition.py37
-rw-r--r--third_party/python/coverage/coverage/env.py99
-rw-r--r--third_party/python/coverage/coverage/execfile.py362
-rw-r--r--third_party/python/coverage/coverage/files.py432
-rw-r--r--third_party/python/coverage/coverage/fullcoverage/encodings.py60
-rw-r--r--third_party/python/coverage/coverage/html.py511
-rw-r--r--third_party/python/coverage/coverage/htmlfiles/coverage_html.js589
-rw-r--r--third_party/python/coverage/coverage/htmlfiles/index.html118
-rw-r--r--third_party/python/coverage/coverage/htmlfiles/jquery.ba-throttle-debounce.min.js9
-rw-r--r--third_party/python/coverage/coverage/htmlfiles/jquery.hotkeys.js99
-rw-r--r--third_party/python/coverage/coverage/htmlfiles/jquery.isonscreen.js53
-rw-r--r--third_party/python/coverage/coverage/htmlfiles/jquery.min.js4
-rw-r--r--third_party/python/coverage/coverage/htmlfiles/jquery.tablesorter.min.js2
-rw-r--r--third_party/python/coverage/coverage/htmlfiles/keybd_closed.pngbin0 -> 112 bytes
-rw-r--r--third_party/python/coverage/coverage/htmlfiles/keybd_open.pngbin0 -> 112 bytes
-rw-r--r--third_party/python/coverage/coverage/htmlfiles/pyfile.html112
-rw-r--r--third_party/python/coverage/coverage/htmlfiles/style.css124
-rw-r--r--third_party/python/coverage/coverage/htmlfiles/style.scss537
-rw-r--r--third_party/python/coverage/coverage/inorout.py469
-rw-r--r--third_party/python/coverage/coverage/jsonreport.py103
-rw-r--r--third_party/python/coverage/coverage/misc.py361
-rw-r--r--third_party/python/coverage/coverage/multiproc.py111
-rw-r--r--third_party/python/coverage/coverage/numbits.py163
-rw-r--r--third_party/python/coverage/coverage/optional.py68
-rw-r--r--third_party/python/coverage/coverage/parser.py1251
-rw-r--r--third_party/python/coverage/coverage/phystokens.py297
-rw-r--r--third_party/python/coverage/coverage/plugin.py533
-rw-r--r--third_party/python/coverage/coverage/plugin_support.py281
-rw-r--r--third_party/python/coverage/coverage/python.py249
-rw-r--r--third_party/python/coverage/coverage/pytracer.py245
-rw-r--r--third_party/python/coverage/coverage/report.py86
-rw-r--r--third_party/python/coverage/coverage/results.py346
-rw-r--r--third_party/python/coverage/coverage/sqldata.py1106
-rw-r--r--third_party/python/coverage/coverage/summary.py155
-rw-r--r--third_party/python/coverage/coverage/templite.py302
-rw-r--r--third_party/python/coverage/coverage/tomlconfig.py164
-rw-r--r--third_party/python/coverage/coverage/version.py33
-rw-r--r--third_party/python/coverage/coverage/xmlreport.py230
-rw-r--r--third_party/python/coverage/howto.txt122
-rw-r--r--third_party/python/coverage/igor.py395
-rw-r--r--third_party/python/coverage/metacov.ini88
-rw-r--r--third_party/python/coverage/pylintrc335
-rw-r--r--third_party/python/coverage/requirements/ci.pip8
-rw-r--r--third_party/python/coverage/requirements/dev.pip24
-rw-r--r--third_party/python/coverage/requirements/pytest.pip21
-rw-r--r--third_party/python/coverage/requirements/tox.pip7
-rw-r--r--third_party/python/coverage/requirements/wheel.pip7
-rw-r--r--third_party/python/coverage/setup.cfg19
-rw-r--r--third_party/python/coverage/setup.py217
-rw-r--r--third_party/python/coverage/tox.ini95
-rw-r--r--third_party/python/coverage/tox_wheels.ini21
-rw-r--r--third_party/python/cram/cram/__init__.py6
-rw-r--r--third_party/python/cram/cram/__main__.py10
-rw-r--r--third_party/python/cram/cram/_cli.py134
-rw-r--r--third_party/python/cram/cram/_diff.py158
-rw-r--r--third_party/python/cram/cram/_encoding.py106
-rw-r--r--third_party/python/cram/cram/_main.py211
-rw-r--r--third_party/python/cram/cram/_process.py54
-rw-r--r--third_party/python/cram/cram/_run.py77
-rw-r--r--third_party/python/cram/cram/_test.py230
-rw-r--r--third_party/python/cram/cram/_xunit.py173
-rw-r--r--third_party/python/diskcache/LICENSE12
-rw-r--r--third_party/python/diskcache/MANIFEST.in1
-rw-r--r--third_party/python/diskcache/PKG-INFO428
-rw-r--r--third_party/python/diskcache/README.rst404
-rw-r--r--third_party/python/diskcache/diskcache/__init__.py51
-rw-r--r--third_party/python/diskcache/diskcache/cli.py1
-rw-r--r--third_party/python/diskcache/diskcache/core.py2481
-rw-r--r--third_party/python/diskcache/diskcache/djangocache.py433
-rw-r--r--third_party/python/diskcache/diskcache/fanout.py677
-rw-r--r--third_party/python/diskcache/diskcache/persistent.py1403
-rw-r--r--third_party/python/diskcache/diskcache/recipes.py437
-rw-r--r--third_party/python/diskcache/setup.cfg4
-rw-r--r--third_party/python/diskcache/setup.py51
-rw-r--r--third_party/python/distro/CHANGELOG.md147
-rw-r--r--third_party/python/distro/CONTRIBUTING.md54
-rw-r--r--third_party/python/distro/CONTRIBUTORS.md13
-rw-r--r--third_party/python/distro/LICENSE202
-rw-r--r--third_party/python/distro/MANIFEST.in12
-rw-r--r--third_party/python/distro/Makefile145
-rw-r--r--third_party/python/distro/PKG-INFO168
-rw-r--r--third_party/python/distro/README.md140
-rw-r--r--third_party/python/distro/dev-requirements.txt3
-rwxr-xr-xthird_party/python/distro/distro.py1216
-rwxr-xr-xthird_party/python/distro/query_local_distro.py45
-rw-r--r--third_party/python/distro/setup.cfg10
-rw-r--r--third_party/python/distro/setup.py67
-rw-r--r--third_party/python/dlmanager/README.rst59
-rwxr-xr-xthird_party/python/dlmanager/check.py67
-rw-r--r--third_party/python/dlmanager/dlmanager/__init__.py18
-rw-r--r--third_party/python/dlmanager/dlmanager/fs.py116
-rw-r--r--third_party/python/dlmanager/dlmanager/manager.py323
-rw-r--r--third_party/python/dlmanager/dlmanager/persist_limit.py65
-rw-r--r--third_party/python/dlmanager/doc/Makefile216
-rw-r--r--third_party/python/dlmanager/doc/api.rst25
-rw-r--r--third_party/python/dlmanager/doc/conf.py289
-rw-r--r--third_party/python/dlmanager/doc/index.rst26
-rw-r--r--third_party/python/dlmanager/doc/make.bat263
-rw-r--r--third_party/python/dlmanager/examples/dl_progressbar.py41
-rw-r--r--third_party/python/dlmanager/examples/dl_tqdm.py45
-rw-r--r--third_party/python/dlmanager/requirements.txt2
-rw-r--r--third_party/python/dlmanager/setup.cfg2
-rw-r--r--third_party/python/dlmanager/setup.py60
-rw-r--r--third_party/python/dlmanager/test-requirements.txt7
-rw-r--r--third_party/python/dlmanager/tests/__init__.py0
-rw-r--r--third_party/python/dlmanager/tests/test_manager.py251
-rw-r--r--third_party/python/dlmanager/tests/test_persist_limit.py56
-rw-r--r--third_party/python/ecdsa/LICENSE24
-rw-r--r--third_party/python/ecdsa/MANIFEST.in3
-rw-r--r--third_party/python/ecdsa/NEWS213
-rw-r--r--third_party/python/ecdsa/PKG-INFO620
-rw-r--r--third_party/python/ecdsa/README.md595
-rw-r--r--third_party/python/ecdsa/setup.cfg15
-rwxr-xr-xthird_party/python/ecdsa/setup.py48
-rw-r--r--third_party/python/ecdsa/src/ecdsa/__init__.py25
-rw-r--r--third_party/python/ecdsa/src/ecdsa/_compat.py39
-rw-r--r--third_party/python/ecdsa/src/ecdsa/_rwlock.py85
-rw-r--r--third_party/python/ecdsa/src/ecdsa/_version.py21
-rw-r--r--third_party/python/ecdsa/src/ecdsa/curves.py128
-rw-r--r--third_party/python/ecdsa/src/ecdsa/der.py384
-rw-r--r--third_party/python/ecdsa/src/ecdsa/ecdh.py306
-rw-r--r--third_party/python/ecdsa/src/ecdsa/ecdsa.py446
-rw-r--r--third_party/python/ecdsa/src/ecdsa/ellipticcurve.py780
-rw-r--r--third_party/python/ecdsa/src/ecdsa/keys.py1219
-rw-r--r--third_party/python/ecdsa/src/ecdsa/numbertheory.py600
-rw-r--r--third_party/python/ecdsa/src/ecdsa/rfc6979.py107
-rw-r--r--third_party/python/ecdsa/src/ecdsa/test_der.py384
-rw-r--r--third_party/python/ecdsa/src/ecdsa/test_ecdh.py350
-rw-r--r--third_party/python/ecdsa/src/ecdsa/test_ecdsa.py448
-rw-r--r--third_party/python/ecdsa/src/ecdsa/test_ellipticcurve.py188
-rw-r--r--third_party/python/ecdsa/src/ecdsa/test_jacobi.py365
-rw-r--r--third_party/python/ecdsa/src/ecdsa/test_keys.py373
-rw-r--r--third_party/python/ecdsa/src/ecdsa/test_malformed_sigs.py306
-rw-r--r--third_party/python/ecdsa/src/ecdsa/test_numbertheory.py275
-rw-r--r--third_party/python/ecdsa/src/ecdsa/test_pyecdsa.py1445
-rw-r--r--third_party/python/ecdsa/src/ecdsa/test_rw_lock.py175
-rw-r--r--third_party/python/ecdsa/src/ecdsa/util.py401
-rw-r--r--third_party/python/ecdsa/versioneer.py1817
-rw-r--r--third_party/python/enum34/MANIFEST.in9
-rw-r--r--third_party/python/enum34/PKG-INFO62
-rw-r--r--third_party/python/enum34/README3
-rw-r--r--third_party/python/enum34/enum/LICENSE32
-rw-r--r--third_party/python/enum34/enum/README3
-rw-r--r--third_party/python/enum34/enum/__init__.py837
-rw-r--r--third_party/python/enum34/enum/doc/enum.pdf2237
-rw-r--r--third_party/python/enum34/enum/doc/enum.rst735
-rw-r--r--third_party/python/enum34/enum/test.py1820
-rw-r--r--third_party/python/enum34/setup.cfg5
-rw-r--r--third_party/python/enum34/setup.py99
-rw-r--r--third_party/python/esprima/PKG-INFO143
-rw-r--r--third_party/python/esprima/README117
-rw-r--r--third_party/python/esprima/esprima/__init__.py29
-rw-r--r--third_party/python/esprima/esprima/__main__.py105
-rw-r--r--third_party/python/esprima/esprima/character.py125
-rw-r--r--third_party/python/esprima/esprima/comment_handler.py176
-rw-r--r--third_party/python/esprima/esprima/compat.py72
-rw-r--r--third_party/python/esprima/esprima/error_handler.py74
-rw-r--r--third_party/python/esprima/esprima/esprima.py125
-rw-r--r--third_party/python/esprima/esprima/jsx_nodes.py100
-rw-r--r--third_party/python/esprima/esprima/jsx_parser.py584
-rw-r--r--third_party/python/esprima/esprima/jsx_syntax.py38
-rw-r--r--third_party/python/esprima/esprima/messages.py90
-rw-r--r--third_party/python/esprima/esprima/nodes.py620
-rw-r--r--third_party/python/esprima/esprima/objects.py46
-rw-r--r--third_party/python/esprima/esprima/parser.py3104
-rw-r--r--third_party/python/esprima/esprima/scanner.py1189
-rw-r--r--third_party/python/esprima/esprima/syntax.py100
-rw-r--r--third_party/python/esprima/esprima/token.py50
-rw-r--r--third_party/python/esprima/esprima/tokenizer.py193
-rw-r--r--third_party/python/esprima/esprima/utils.py40
-rw-r--r--third_party/python/esprima/esprima/visitor.py288
-rw-r--r--third_party/python/esprima/esprima/xhtml_entities.py281
-rw-r--r--third_party/python/esprima/setup.cfg4
-rw-r--r--third_party/python/esprima/setup.py55
-rw-r--r--third_party/python/fluent.migrate/PKG-INFO62
-rw-r--r--third_party/python/fluent.migrate/README.md44
-rw-r--r--third_party/python/fluent.migrate/fluent/__init__.py1
-rw-r--r--third_party/python/fluent.migrate/fluent/migrate/__init__.py5
-rw-r--r--third_party/python/fluent.migrate/fluent/migrate/_context.py333
-rw-r--r--third_party/python/fluent.migrate/fluent/migrate/blame.py84
-rw-r--r--third_party/python/fluent.migrate/fluent/migrate/changesets.py59
-rw-r--r--third_party/python/fluent.migrate/fluent/migrate/context.py152
-rw-r--r--third_party/python/fluent.migrate/fluent/migrate/errors.py22
-rw-r--r--third_party/python/fluent.migrate/fluent/migrate/evaluator.py28
-rw-r--r--third_party/python/fluent.migrate/fluent/migrate/helpers.py150
-rw-r--r--third_party/python/fluent.migrate/fluent/migrate/merge.py59
-rwxr-xr-xthird_party/python/fluent.migrate/fluent/migrate/tool.py184
-rw-r--r--third_party/python/fluent.migrate/fluent/migrate/transforms.py572
-rw-r--r--third_party/python/fluent.migrate/fluent/migrate/util.py114
-rw-r--r--third_party/python/fluent.migrate/fluent/migrate/validator.py339
-rw-r--r--third_party/python/fluent.migrate/setup.cfg16
-rw-r--r--third_party/python/fluent.migrate/setup.py34
-rw-r--r--third_party/python/fluent.syntax/PKG-INFO39
-rw-r--r--third_party/python/fluent.syntax/README.rst22
-rw-r--r--third_party/python/fluent.syntax/fluent/__init__.py1
-rw-r--r--third_party/python/fluent.syntax/fluent/syntax/__init__.py16
-rw-r--r--third_party/python/fluent.syntax/fluent/syntax/ast.py349
-rw-r--r--third_party/python/fluent.syntax/fluent/syntax/errors.py70
-rw-r--r--third_party/python/fluent.syntax/fluent/syntax/parser.py683
-rw-r--r--third_party/python/fluent.syntax/fluent/syntax/serializer.py233
-rw-r--r--third_party/python/fluent.syntax/fluent/syntax/stream.py282
-rw-r--r--third_party/python/fluent.syntax/fluent/syntax/visitor.py65
-rw-r--r--third_party/python/fluent.syntax/setup.cfg19
-rwxr-xr-xthird_party/python/fluent.syntax/setup.py29
-rw-r--r--third_party/python/funcsigs/CHANGELOG24
-rw-r--r--third_party/python/funcsigs/LICENSE13
-rw-r--r--third_party/python/funcsigs/MANIFEST.in7
-rw-r--r--third_party/python/funcsigs/PKG-INFO378
-rw-r--r--third_party/python/funcsigs/README.rst353
-rw-r--r--third_party/python/funcsigs/docs/Makefile153
-rw-r--r--third_party/python/funcsigs/docs/_templates/page.html9
-rw-r--r--third_party/python/funcsigs/docs/conf.py251
-rw-r--r--third_party/python/funcsigs/docs/index.rst353
-rw-r--r--third_party/python/funcsigs/funcsigs/__init__.py829
-rw-r--r--third_party/python/funcsigs/funcsigs/version.py1
-rw-r--r--third_party/python/funcsigs/setup.cfg8
-rw-r--r--third_party/python/funcsigs/setup.py52
-rw-r--r--third_party/python/funcsigs/tests/__init__.py0
-rw-r--r--third_party/python/funcsigs/tests/test_formatannotation.py17
-rw-r--r--third_party/python/funcsigs/tests/test_funcsigs.py91
-rw-r--r--third_party/python/funcsigs/tests/test_inspect.py1002
-rw-r--r--third_party/python/futures/CHANGES107
-rw-r--r--third_party/python/futures/LICENSE21
-rw-r--r--third_party/python/futures/MANIFEST.in5
-rw-r--r--third_party/python/futures/PKG-INFO16
-rw-r--r--third_party/python/futures/concurrent/__init__.py3
-rw-r--r--third_party/python/futures/concurrent/futures/__init__.py23
-rw-r--r--third_party/python/futures/concurrent/futures/_base.py607
-rw-r--r--third_party/python/futures/concurrent/futures/process.py359
-rw-r--r--third_party/python/futures/concurrent/futures/thread.py134
-rw-r--r--third_party/python/futures/crawl.py74
-rw-r--r--third_party/python/futures/docs/Makefile88
-rw-r--r--third_party/python/futures/docs/conf.py194
-rw-r--r--third_party/python/futures/docs/index.rst347
-rw-r--r--third_party/python/futures/docs/make.bat112
-rw-r--r--third_party/python/futures/futures.egg-info/PKG-INFO16
-rw-r--r--third_party/python/futures/futures.egg-info/SOURCES.txt24
-rw-r--r--third_party/python/futures/futures.egg-info/dependency_links.txt1
-rw-r--r--third_party/python/futures/futures.egg-info/not-zip-safe1
-rw-r--r--third_party/python/futures/futures.egg-info/pbr.json1
-rw-r--r--third_party/python/futures/futures.egg-info/top_level.txt1
-rw-r--r--third_party/python/futures/primes.py50
-rw-r--r--third_party/python/futures/setup.cfg12
-rwxr-xr-xthird_party/python/futures/setup.py34
-rw-r--r--third_party/python/futures/test_futures.py727
-rw-r--r--third_party/python/futures/tox.ini8
-rw-r--r--third_party/python/glean_parser/.circleci/config.yml201
-rw-r--r--third_party/python/glean_parser/.editorconfig21
-rw-r--r--third_party/python/glean_parser/.flake82
-rw-r--r--third_party/python/glean_parser/.github/ISSUE_TEMPLATE.md15
-rw-r--r--third_party/python/glean_parser/.github/dependabot.yml6
-rw-r--r--third_party/python/glean_parser/.github/pull_request_template.md8
-rw-r--r--third_party/python/glean_parser/.gitignore110
-rw-r--r--third_party/python/glean_parser/.swiftlint.yml6
-rw-r--r--third_party/python/glean_parser/AUTHORS.rst23
-rw-r--r--third_party/python/glean_parser/CODE_OF_CONDUCT.md15
-rw-r--r--third_party/python/glean_parser/CONTRIBUTING.rst160
-rw-r--r--third_party/python/glean_parser/HISTORY.rst415
-rw-r--r--third_party/python/glean_parser/LICENSE373
-rw-r--r--third_party/python/glean_parser/MANIFEST.in14
-rw-r--r--third_party/python/glean_parser/Makefile75
-rw-r--r--third_party/python/glean_parser/PKG-INFO490
-rw-r--r--third_party/python/glean_parser/README.rst54
-rw-r--r--third_party/python/glean_parser/glean_parser/__init__.py18
-rw-r--r--third_party/python/glean_parser/glean_parser/__main__.py169
-rw-r--r--third_party/python/glean_parser/glean_parser/csharp.py153
-rw-r--r--third_party/python/glean_parser/glean_parser/kotlin.py261
-rw-r--r--third_party/python/glean_parser/glean_parser/lint.py442
-rw-r--r--third_party/python/glean_parser/glean_parser/markdown.py244
-rw-r--r--third_party/python/glean_parser/glean_parser/metrics.py356
-rw-r--r--third_party/python/glean_parser/glean_parser/parser.py373
-rw-r--r--third_party/python/glean_parser/glean_parser/pings.py81
-rw-r--r--third_party/python/glean_parser/glean_parser/schemas/metrics.1-0-0.schema.yaml600
-rw-r--r--third_party/python/glean_parser/glean_parser/schemas/pings.1-0-0.schema.yaml141
-rw-r--r--third_party/python/glean_parser/glean_parser/swift.py176
-rw-r--r--third_party/python/glean_parser/glean_parser/templates/csharp.jinja299
-rw-r--r--third_party/python/glean_parser/glean_parser/templates/kotlin.geckoview.jinja2124
-rw-r--r--third_party/python/glean_parser/glean_parser/templates/kotlin.jinja281
-rw-r--r--third_party/python/glean_parser/glean_parser/templates/markdown.jinja293
-rw-r--r--third_party/python/glean_parser/glean_parser/templates/swift.jinja2108
-rw-r--r--third_party/python/glean_parser/glean_parser/translate.py176
-rw-r--r--third_party/python/glean_parser/glean_parser/util.py438
-rw-r--r--third_party/python/glean_parser/glean_parser/validate_ping.py74
-rw-r--r--third_party/python/glean_parser/requirements_dev.txt14
-rw-r--r--third_party/python/glean_parser/setup.cfg16
-rwxr-xr-xthird_party/python/glean_parser/setup.py74
-rwxr-xr-xthird_party/python/glean_parser/tools/extract_data_categories.py176
-rw-r--r--third_party/python/gyp/.gitignore1
-rw-r--r--third_party/python/gyp/.travis.yml23
-rw-r--r--third_party/python/gyp/AUTHORS17
-rw-r--r--third_party/python/gyp/DEPS23
-rw-r--r--third_party/python/gyp/LICENSE27
-rw-r--r--third_party/python/gyp/OWNERS1
-rw-r--r--third_party/python/gyp/PRESUBMIT.py125
-rw-r--r--third_party/python/gyp/README.md5
-rwxr-xr-xthird_party/python/gyp/buildbot/buildbot_run.py138
-rw-r--r--third_party/python/gyp/buildbot/commit_queue/OWNERS6
-rw-r--r--third_party/python/gyp/buildbot/commit_queue/README3
-rw-r--r--third_party/python/gyp/buildbot/commit_queue/cq_config.json15
-rwxr-xr-xthird_party/python/gyp/buildbot/travis-checkout.sh27
-rwxr-xr-xthird_party/python/gyp/buildbot/travis-test.sh12
-rw-r--r--third_party/python/gyp/codereview.settings6
-rw-r--r--third_party/python/gyp/data/win/large-pdb-shim.cc12
-rwxr-xr-xthird_party/python/gyp/gyp8
-rwxr-xr-xthird_party/python/gyp/gyp.bat5
-rwxr-xr-xthird_party/python/gyp/gyp_main.py16
-rwxr-xr-xthird_party/python/gyp/gyptest.py243
-rw-r--r--third_party/python/gyp/pylib/gyp/MSVSNew.py353
-rw-r--r--third_party/python/gyp/pylib/gyp/MSVSProject.py208
-rw-r--r--third_party/python/gyp/pylib/gyp/MSVSSettings.py1106
-rwxr-xr-xthird_party/python/gyp/pylib/gyp/MSVSSettings_test.py1486
-rw-r--r--third_party/python/gyp/pylib/gyp/MSVSToolFile.py58
-rw-r--r--third_party/python/gyp/pylib/gyp/MSVSUserFile.py147
-rw-r--r--third_party/python/gyp/pylib/gyp/MSVSUtil.py271
-rw-r--r--third_party/python/gyp/pylib/gyp/MSVSVersion.py504
-rwxr-xr-xthird_party/python/gyp/pylib/gyp/__init__.py555
-rw-r--r--third_party/python/gyp/pylib/gyp/common.py619
-rwxr-xr-xthird_party/python/gyp/pylib/gyp/common_test.py73
-rw-r--r--third_party/python/gyp/pylib/gyp/easy_xml.py170
-rwxr-xr-xthird_party/python/gyp/pylib/gyp/easy_xml_test.py106
-rwxr-xr-xthird_party/python/gyp/pylib/gyp/flock_tool.py54
-rw-r--r--third_party/python/gyp/pylib/gyp/generator/__init__.py0
-rw-r--r--third_party/python/gyp/pylib/gyp/generator/analyzer.py744
-rw-r--r--third_party/python/gyp/pylib/gyp/generator/cmake.py1256
-rw-r--r--third_party/python/gyp/pylib/gyp/generator/dump_dependency_json.py101
-rw-r--r--third_party/python/gyp/pylib/gyp/generator/eclipse.py425
-rw-r--r--third_party/python/gyp/pylib/gyp/generator/gypd.py94
-rw-r--r--third_party/python/gyp/pylib/gyp/generator/gypsh.py56
-rw-r--r--third_party/python/gyp/pylib/gyp/generator/make.py2260
-rw-r--r--third_party/python/gyp/pylib/gyp/generator/msvs.py3537
-rwxr-xr-xthird_party/python/gyp/pylib/gyp/generator/msvs_test.py40
-rw-r--r--third_party/python/gyp/pylib/gyp/generator/ninja.py2500
-rw-r--r--third_party/python/gyp/pylib/gyp/generator/ninja_test.py46
-rw-r--r--third_party/python/gyp/pylib/gyp/generator/xcode.py1302
-rw-r--r--third_party/python/gyp/pylib/gyp/generator/xcode_test.py23
-rw-r--r--third_party/python/gyp/pylib/gyp/input.py2908
-rwxr-xr-xthird_party/python/gyp/pylib/gyp/input_test.py90
-rwxr-xr-xthird_party/python/gyp/pylib/gyp/mac_tool.py721
-rw-r--r--third_party/python/gyp/pylib/gyp/msvs_emulation.py1112
-rw-r--r--third_party/python/gyp/pylib/gyp/ninja_syntax.py168
-rw-r--r--third_party/python/gyp/pylib/gyp/simple_copy.py57
-rwxr-xr-xthird_party/python/gyp/pylib/gyp/win_tool.py326
-rw-r--r--third_party/python/gyp/pylib/gyp/xcode_emulation.py1798
-rw-r--r--third_party/python/gyp/pylib/gyp/xcode_ninja.py289
-rw-r--r--third_party/python/gyp/pylib/gyp/xcodeproj_file.py2995
-rw-r--r--third_party/python/gyp/pylib/gyp/xml_fix.py68
-rwxr-xr-xthird_party/python/gyp/samples/samples83
-rw-r--r--third_party/python/gyp/samples/samples.bat5
-rwxr-xr-xthird_party/python/gyp/setup.py19
-rwxr-xr-xthird_party/python/gyp/test/actions-bare/gyptest-bare.py24
-rw-r--r--third_party/python/gyp/test/actions-bare/src/bare.gyp25
-rwxr-xr-xthird_party/python/gyp/test/actions-bare/src/bare.py11
-rw-r--r--third_party/python/gyp/test/actions-depfile/depfile.gyp42
-rw-r--r--third_party/python/gyp/test/actions-depfile/gyptest-all.py30
-rw-r--r--third_party/python/gyp/test/actions-depfile/input.txt1
-rw-r--r--third_party/python/gyp/test/actions-depfile/touch.py18
-rwxr-xr-xthird_party/python/gyp/test/actions-multiple-outputs-with-dependencies/gyptest-action.py45
-rw-r--r--third_party/python/gyp/test/actions-multiple-outputs-with-dependencies/src/action.gyp28
-rw-r--r--third_party/python/gyp/test/actions-multiple-outputs-with-dependencies/src/rcopy.py20
-rwxr-xr-xthird_party/python/gyp/test/actions-multiple-outputs/gyptest-multiple-outputs.py45
-rw-r--r--third_party/python/gyp/test/actions-multiple-outputs/src/multiple-outputs.gyp23
-rw-r--r--third_party/python/gyp/test/actions-multiple-outputs/src/touch.py16
-rwxr-xr-xthird_party/python/gyp/test/actions-multiple/gyptest-all.py72
-rw-r--r--third_party/python/gyp/test/actions-multiple/src/actions.gyp226
-rwxr-xr-xthird_party/python/gyp/test/actions-multiple/src/copyfile.py9
-rwxr-xr-xthird_party/python/gyp/test/actions-multiple/src/filter.py12
-rw-r--r--third_party/python/gyp/test/actions-multiple/src/foo.c11
-rw-r--r--third_party/python/gyp/test/actions-multiple/src/input.txt1
-rw-r--r--third_party/python/gyp/test/actions-multiple/src/main.c22
-rwxr-xr-xthird_party/python/gyp/test/actions-none/gyptest-none.py24
-rw-r--r--third_party/python/gyp/test/actions-none/src/fake_cross.py12
-rw-r--r--third_party/python/gyp/test/actions-none/src/foo.cc1
-rw-r--r--third_party/python/gyp/test/actions-none/src/none_with_source_files.gyp35
-rwxr-xr-xthird_party/python/gyp/test/actions-subdir/gyptest-action.py26
-rwxr-xr-xthird_party/python/gyp/test/actions-subdir/src/make-file.py11
-rw-r--r--third_party/python/gyp/test/actions-subdir/src/none.gyp31
-rwxr-xr-xthird_party/python/gyp/test/actions-subdir/src/subdir/make-subdir-file.py11
-rw-r--r--third_party/python/gyp/test/actions-subdir/src/subdir/subdir.gyp28
-rw-r--r--third_party/python/gyp/test/actions/generated-header/action.py11
-rw-r--r--third_party/python/gyp/test/actions/generated-header/main.cc7
-rw-r--r--third_party/python/gyp/test/actions/generated-header/test.gyp34
-rwxr-xr-xthird_party/python/gyp/test/actions/gyptest-all.py101
-rwxr-xr-xthird_party/python/gyp/test/actions/gyptest-default.py68
-rwxr-xr-xthird_party/python/gyp/test/actions/gyptest-errors.py24
-rw-r--r--third_party/python/gyp/test/actions/gyptest-generated-header.py38
-rw-r--r--third_party/python/gyp/test/actions/src/action_missing_name.gyp24
-rw-r--r--third_party/python/gyp/test/actions/src/actions.gyp114
-rwxr-xr-xthird_party/python/gyp/test/actions/src/confirm-dep-files.py21
-rwxr-xr-xthird_party/python/gyp/test/actions/src/subdir1/counter.py44
-rw-r--r--third_party/python/gyp/test/actions/src/subdir1/executable.gyp74
-rwxr-xr-xthird_party/python/gyp/test/actions/src/subdir1/make-prog1.py20
-rwxr-xr-xthird_party/python/gyp/test/actions/src/subdir1/make-prog2.py20
-rw-r--r--third_party/python/gyp/test/actions/src/subdir1/program.c12
-rwxr-xr-xthird_party/python/gyp/test/actions/src/subdir2/make-file.py11
-rw-r--r--third_party/python/gyp/test/actions/src/subdir2/none.gyp33
-rwxr-xr-xthird_party/python/gyp/test/actions/src/subdir3/generate_main.py21
-rw-r--r--third_party/python/gyp/test/actions/src/subdir3/null_input.gyp29
-rwxr-xr-xthird_party/python/gyp/test/additional-targets/gyptest-additional.py63
-rw-r--r--third_party/python/gyp/test/additional-targets/src/all.gyp13
-rw-r--r--third_party/python/gyp/test/additional-targets/src/dir1/actions.gyp56
-rwxr-xr-xthird_party/python/gyp/test/additional-targets/src/dir1/emit.py11
-rw-r--r--third_party/python/gyp/test/additional-targets/src/dir1/lib1.c6
-rw-r--r--third_party/python/gyp/test/analyzer/common.gypi6
-rw-r--r--third_party/python/gyp/test/analyzer/gyptest-analyzer.py427
-rw-r--r--third_party/python/gyp/test/analyzer/static_library_test.gyp34
-rw-r--r--third_party/python/gyp/test/analyzer/subdir/subdir.gyp36
-rw-r--r--third_party/python/gyp/test/analyzer/subdir/subdir2/subdir2.gyp15
-rw-r--r--third_party/python/gyp/test/analyzer/subdir2/subdir.gyp18
-rw-r--r--third_party/python/gyp/test/analyzer/subdir2/subdir.includes.gypi9
-rw-r--r--third_party/python/gyp/test/analyzer/test.gyp114
-rw-r--r--third_party/python/gyp/test/analyzer/test2.gyp25
-rw-r--r--third_party/python/gyp/test/analyzer/test2.includes.gypi13
-rw-r--r--third_party/python/gyp/test/analyzer/test2.includes.includes.gypi9
-rw-r--r--third_party/python/gyp/test/analyzer/test2.toplevel_includes.gypi15
-rw-r--r--third_party/python/gyp/test/analyzer/test3.gyp77
-rw-r--r--third_party/python/gyp/test/analyzer/test4.gyp80
-rw-r--r--third_party/python/gyp/test/analyzer/test5.gyp25
-rw-r--r--third_party/python/gyp/test/arflags/gyptest-arflags.py26
-rw-r--r--third_party/python/gyp/test/arflags/lib.cc0
-rw-r--r--third_party/python/gyp/test/arflags/test.gyp10
-rwxr-xr-xthird_party/python/gyp/test/assembly/gyptest-assembly.py31
-rw-r--r--third_party/python/gyp/test/assembly/gyptest-override.py24
-rw-r--r--third_party/python/gyp/test/assembly/src/as.bat4
-rw-r--r--third_party/python/gyp/test/assembly/src/assembly.gyp62
-rw-r--r--third_party/python/gyp/test/assembly/src/lib1.S15
-rw-r--r--third_party/python/gyp/test/assembly/src/lib1.c3
-rw-r--r--third_party/python/gyp/test/assembly/src/override.gyp34
-rw-r--r--third_party/python/gyp/test/assembly/src/override_asm.asm8
-rw-r--r--third_party/python/gyp/test/assembly/src/program.c12
-rwxr-xr-xthird_party/python/gyp/test/build-option/gyptest-build.py27
-rw-r--r--third_party/python/gyp/test/build-option/hello.c13
-rw-r--r--third_party/python/gyp/test/build-option/hello.gyp15
-rwxr-xr-xthird_party/python/gyp/test/builddir/gyptest-all.py85
-rwxr-xr-xthird_party/python/gyp/test/builddir/gyptest-default.py85
-rw-r--r--third_party/python/gyp/test/builddir/src/builddir.gypi18
-rw-r--r--third_party/python/gyp/test/builddir/src/func1.c6
-rw-r--r--third_party/python/gyp/test/builddir/src/func2.c6
-rw-r--r--third_party/python/gyp/test/builddir/src/func3.c6
-rw-r--r--third_party/python/gyp/test/builddir/src/func4.c6
-rw-r--r--third_party/python/gyp/test/builddir/src/func5.c6
-rw-r--r--third_party/python/gyp/test/builddir/src/prog1.c10
-rw-r--r--third_party/python/gyp/test/builddir/src/prog1.gyp30
-rw-r--r--third_party/python/gyp/test/builddir/src/subdir2/prog2.c10
-rw-r--r--third_party/python/gyp/test/builddir/src/subdir2/prog2.gyp19
-rw-r--r--third_party/python/gyp/test/builddir/src/subdir2/subdir3/prog3.c10
-rw-r--r--third_party/python/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp19
-rw-r--r--third_party/python/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c10
-rw-r--r--third_party/python/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp19
-rw-r--r--third_party/python/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c10
-rw-r--r--third_party/python/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp19
-rw-r--r--third_party/python/gyp/test/cflags/cflags.c15
-rw-r--r--third_party/python/gyp/test/cflags/cflags.gyp23
-rwxr-xr-xthird_party/python/gyp/test/cflags/gyptest-cflags.py75
-rwxr-xr-xthird_party/python/gyp/test/compilable/gyptest-headers.py29
-rw-r--r--third_party/python/gyp/test/compilable/src/headers.gyp26
-rw-r--r--third_party/python/gyp/test/compilable/src/lib1.cpp7
-rw-r--r--third_party/python/gyp/test/compilable/src/lib1.hpp6
-rw-r--r--third_party/python/gyp/test/compilable/src/program.cpp9
-rw-r--r--third_party/python/gyp/test/compiler-override/compiler-exe.gyp16
-rw-r--r--third_party/python/gyp/test/compiler-override/compiler-global-settings.gyp.in34
-rw-r--r--third_party/python/gyp/test/compiler-override/compiler-host.gyp17
-rw-r--r--third_party/python/gyp/test/compiler-override/compiler-shared-lib.gyp16
-rw-r--r--third_party/python/gyp/test/compiler-override/cxxtest.cc7
-rw-r--r--third_party/python/gyp/test/compiler-override/gyptest-compiler-env-toolchain.py78
-rwxr-xr-xthird_party/python/gyp/test/compiler-override/gyptest-compiler-env.py110
-rwxr-xr-xthird_party/python/gyp/test/compiler-override/gyptest-compiler-global-settings.py82
-rwxr-xr-xthird_party/python/gyp/test/compiler-override/my_cc.py7
-rwxr-xr-xthird_party/python/gyp/test/compiler-override/my_cxx.py7
-rwxr-xr-xthird_party/python/gyp/test/compiler-override/my_ld.py7
-rwxr-xr-xthird_party/python/gyp/test/compiler-override/my_nm.py9
-rwxr-xr-xthird_party/python/gyp/test/compiler-override/my_readelf.py9
-rw-r--r--third_party/python/gyp/test/compiler-override/test.c7
-rw-r--r--third_party/python/gyp/test/conditions/elseif/elseif.gyp43
-rw-r--r--third_party/python/gyp/test/conditions/elseif/elseif_bad1.gyp20
-rw-r--r--third_party/python/gyp/test/conditions/elseif/elseif_bad2.gyp22
-rw-r--r--third_party/python/gyp/test/conditions/elseif/elseif_bad3.gyp23
-rw-r--r--third_party/python/gyp/test/conditions/elseif/elseif_conditions.gypi15
-rw-r--r--third_party/python/gyp/test/conditions/elseif/gyptest_elseif.py33
-rw-r--r--third_party/python/gyp/test/conditions/elseif/program.cc10
-rw-r--r--third_party/python/gyp/test/configurations/basics/configurations.c15
-rw-r--r--third_party/python/gyp/test/configurations/basics/configurations.gyp32
-rwxr-xr-xthird_party/python/gyp/test/configurations/basics/gyptest-configurations.py29
-rw-r--r--third_party/python/gyp/test/configurations/inheritance/configurations.c21
-rw-r--r--third_party/python/gyp/test/configurations/inheritance/configurations.gyp40
-rw-r--r--third_party/python/gyp/test/configurations/inheritance/duplicates.gyp27
-rw-r--r--third_party/python/gyp/test/configurations/inheritance/duplicates.gypd.golden12
-rwxr-xr-xthird_party/python/gyp/test/configurations/inheritance/gyptest-duplicates.py36
-rwxr-xr-xthird_party/python/gyp/test/configurations/inheritance/gyptest-inheritance.py33
-rw-r--r--third_party/python/gyp/test/configurations/invalid/actions.gyp18
-rw-r--r--third_party/python/gyp/test/configurations/invalid/all_dependent_settings.gyp18
-rw-r--r--third_party/python/gyp/test/configurations/invalid/configurations.gyp18
-rw-r--r--third_party/python/gyp/test/configurations/invalid/dependencies.gyp18
-rw-r--r--third_party/python/gyp/test/configurations/invalid/direct_dependent_settings.gyp18
-rwxr-xr-xthird_party/python/gyp/test/configurations/invalid/gyptest-configurations.py36
-rw-r--r--third_party/python/gyp/test/configurations/invalid/libraries.gyp18
-rw-r--r--third_party/python/gyp/test/configurations/invalid/link_settings.gyp18
-rw-r--r--third_party/python/gyp/test/configurations/invalid/sources.gyp18
-rw-r--r--third_party/python/gyp/test/configurations/invalid/standalone_static_library.gyp17
-rw-r--r--third_party/python/gyp/test/configurations/invalid/target_name.gyp18
-rw-r--r--third_party/python/gyp/test/configurations/invalid/type.gyp18
-rw-r--r--third_party/python/gyp/test/configurations/target_platform/configurations.gyp58
-rw-r--r--third_party/python/gyp/test/configurations/target_platform/front.c8
-rwxr-xr-xthird_party/python/gyp/test/configurations/target_platform/gyptest-target_platform.py40
-rw-r--r--third_party/python/gyp/test/configurations/target_platform/left.c3
-rw-r--r--third_party/python/gyp/test/configurations/target_platform/right.c3
-rw-r--r--third_party/python/gyp/test/configurations/x64/configurations.c12
-rw-r--r--third_party/python/gyp/test/configurations/x64/configurations.gyp38
-rwxr-xr-xthird_party/python/gyp/test/configurations/x64/gyptest-x86.py31
-rwxr-xr-xthird_party/python/gyp/test/copies/gyptest-all.py42
-rw-r--r--third_party/python/gyp/test/copies/gyptest-attribs.py41
-rwxr-xr-xthird_party/python/gyp/test/copies/gyptest-default.py42
-rwxr-xr-xthird_party/python/gyp/test/copies/gyptest-samedir.py28
-rwxr-xr-xthird_party/python/gyp/test/copies/gyptest-slash.py39
-rw-r--r--third_party/python/gyp/test/copies/gyptest-sourceless-shared-lib.py20
-rwxr-xr-xthird_party/python/gyp/test/copies/gyptest-updir.py32
-rw-r--r--third_party/python/gyp/test/copies/src/copies-attribs.gyp20
-rw-r--r--third_party/python/gyp/test/copies/src/copies-samedir.gyp37
-rw-r--r--third_party/python/gyp/test/copies/src/copies-slash.gyp36
-rw-r--r--third_party/python/gyp/test/copies/src/copies-sourceless-shared-lib.gyp27
-rw-r--r--third_party/python/gyp/test/copies/src/copies-updir.gyp21
-rw-r--r--third_party/python/gyp/test/copies/src/copies.gyp70
-rw-r--r--third_party/python/gyp/test/copies/src/directory/file31
-rw-r--r--third_party/python/gyp/test/copies/src/directory/file41
-rw-r--r--third_party/python/gyp/test/copies/src/directory/subdir/file51
-rwxr-xr-xthird_party/python/gyp/test/copies/src/executable-file.sh3
-rw-r--r--third_party/python/gyp/test/copies/src/file11
-rw-r--r--third_party/python/gyp/test/copies/src/file21
-rw-r--r--third_party/python/gyp/test/copies/src/foo.c13
-rw-r--r--third_party/python/gyp/test/copies/src/parentdir/subdir/file61
-rwxr-xr-xthird_party/python/gyp/test/custom-generator/gyptest-custom-generator.py18
-rw-r--r--third_party/python/gyp/test/custom-generator/mygenerator.py14
-rw-r--r--third_party/python/gyp/test/custom-generator/test.gyp15
-rw-r--r--third_party/python/gyp/test/cxxflags/cxxflags.cc15
-rw-r--r--third_party/python/gyp/test/cxxflags/cxxflags.gyp15
-rwxr-xr-xthird_party/python/gyp/test/cxxflags/gyptest-cxxflags.py45
-rw-r--r--third_party/python/gyp/test/defines-escaping/defines-escaping.c11
-rw-r--r--third_party/python/gyp/test/defines-escaping/defines-escaping.gyp19
-rwxr-xr-xthird_party/python/gyp/test/defines-escaping/gyptest-defines-escaping.py184
-rw-r--r--third_party/python/gyp/test/defines/defines-env.gyp22
-rw-r--r--third_party/python/gyp/test/defines/defines.c23
-rw-r--r--third_party/python/gyp/test/defines/defines.gyp38
-rwxr-xr-xthird_party/python/gyp/test/defines/gyptest-define-override.py43
-rwxr-xr-xthird_party/python/gyp/test/defines/gyptest-defines-env-regyp.py51
-rwxr-xr-xthird_party/python/gyp/test/defines/gyptest-defines-env.py85
-rwxr-xr-xthird_party/python/gyp/test/defines/gyptest-defines.py39
-rwxr-xr-xthird_party/python/gyp/test/dependencies/a.c9
-rw-r--r--third_party/python/gyp/test/dependencies/adso/all_dependent_settings_order.gyp45
-rwxr-xr-xthird_party/python/gyp/test/dependencies/adso/write_args.py11
-rwxr-xr-xthird_party/python/gyp/test/dependencies/b/b.c3
-rwxr-xr-xthird_party/python/gyp/test/dependencies/b/b.gyp22
-rwxr-xr-xthird_party/python/gyp/test/dependencies/b/b3.c9
-rw-r--r--third_party/python/gyp/test/dependencies/c/c.c4
-rw-r--r--third_party/python/gyp/test/dependencies/c/c.gyp22
-rw-r--r--third_party/python/gyp/test/dependencies/c/d.c3
-rw-r--r--third_party/python/gyp/test/dependencies/double_dependency.gyp23
-rw-r--r--third_party/python/gyp/test/dependencies/double_dependent.gyp12
-rw-r--r--third_party/python/gyp/test/dependencies/extra_targets.gyp18
-rw-r--r--third_party/python/gyp/test/dependencies/gyptest-all-dependent-settings-order.py19
-rw-r--r--third_party/python/gyp/test/dependencies/gyptest-double-dependency.py19
-rwxr-xr-xthird_party/python/gyp/test/dependencies/gyptest-extra-targets.py22
-rw-r--r--third_party/python/gyp/test/dependencies/gyptest-indirect-module-dependency.py22
-rwxr-xr-xthird_party/python/gyp/test/dependencies/gyptest-lib-only.py39
-rwxr-xr-xthird_party/python/gyp/test/dependencies/gyptest-none-traversal.py25
-rw-r--r--third_party/python/gyp/test/dependencies/gyptest-sharedlib-linksettings.py21
-rwxr-xr-xthird_party/python/gyp/test/dependencies/lib_only.gyp16
-rw-r--r--third_party/python/gyp/test/dependencies/main.c14
-rw-r--r--third_party/python/gyp/test/dependencies/module-dep/a.cc7
-rw-r--r--third_party/python/gyp/test/dependencies/module-dep/dll.cc9
-rw-r--r--third_party/python/gyp/test/dependencies/module-dep/exe.cc7
-rw-r--r--third_party/python/gyp/test/dependencies/module-dep/indirect-module-dependency.gyp37
-rwxr-xr-xthird_party/python/gyp/test/dependencies/none_traversal.gyp46
-rw-r--r--third_party/python/gyp/test/dependencies/sharedlib-linksettings/program.c25
-rw-r--r--third_party/python/gyp/test/dependencies/sharedlib-linksettings/sharedlib.c16
-rw-r--r--third_party/python/gyp/test/dependencies/sharedlib-linksettings/staticlib.c24
-rw-r--r--third_party/python/gyp/test/dependencies/sharedlib-linksettings/test.gyp37
-rwxr-xr-xthird_party/python/gyp/test/dependency-copy/gyptest-copy.py26
-rw-r--r--third_party/python/gyp/test/dependency-copy/src/copies.gyp25
-rw-r--r--third_party/python/gyp/test/dependency-copy/src/file1.c7
-rw-r--r--third_party/python/gyp/test/dependency-copy/src/file2.c7
-rw-r--r--third_party/python/gyp/test/dependent-settings/nested-dependent-settings/all-dependent-settings.gyp19
-rw-r--r--third_party/python/gyp/test/dependent-settings/nested-dependent-settings/direct-dependent-settings.gyp19
-rw-r--r--third_party/python/gyp/test/dependent-settings/nested-dependent-settings/gyptest-nested-dependent-settings.py18
-rw-r--r--third_party/python/gyp/test/determinism/determinism.gyp59
-rw-r--r--third_party/python/gyp/test/determinism/empty-targets.gyp32
-rw-r--r--third_party/python/gyp/test/determinism/gyptest-determinism.py30
-rw-r--r--third_party/python/gyp/test/determinism/gyptest-empty-target-names.py30
-rw-r--r--third_party/python/gyp/test/determinism/gyptest-needed-variables.py30
-rw-r--r--third_party/python/gyp/test/determinism/gyptest-solibs.py37
-rw-r--r--third_party/python/gyp/test/determinism/main.cc5
-rw-r--r--third_party/python/gyp/test/determinism/needed-variables.gyp33
-rw-r--r--third_party/python/gyp/test/determinism/rule.py8
-rw-r--r--third_party/python/gyp/test/determinism/solib.cc8
-rw-r--r--third_party/python/gyp/test/determinism/solibs.gyp32
-rw-r--r--third_party/python/gyp/test/empty-target/empty-target.gyp12
-rw-r--r--third_party/python/gyp/test/empty-target/gyptest-empty-target.py18
-rw-r--r--third_party/python/gyp/test/errors/dependency_cycle.gyp23
-rw-r--r--third_party/python/gyp/test/errors/duplicate_basenames.gyp13
-rw-r--r--third_party/python/gyp/test/errors/duplicate_node.gyp12
-rw-r--r--third_party/python/gyp/test/errors/duplicate_rule.gyp22
-rw-r--r--third_party/python/gyp/test/errors/duplicate_targets.gyp14
-rw-r--r--third_party/python/gyp/test/errors/error_command.gyp12
-rw-r--r--third_party/python/gyp/test/errors/file_cycle0.gyp17
-rw-r--r--third_party/python/gyp/test/errors/file_cycle1.gyp13
-rwxr-xr-xthird_party/python/gyp/test/errors/gyptest-errors.py80
-rw-r--r--third_party/python/gyp/test/errors/missing_command.gyp12
-rw-r--r--third_party/python/gyp/test/errors/missing_dep.gyp15
-rw-r--r--third_party/python/gyp/test/errors/missing_targets.gyp8
-rw-r--r--third_party/python/gyp/test/escaping/colon/test.gyp21
-rw-r--r--third_party/python/gyp/test/escaping/gyptest-colon.py51
-rw-r--r--third_party/python/gyp/test/exclusion/exclusion.gyp23
-rwxr-xr-xthird_party/python/gyp/test/exclusion/gyptest-exclusion.py22
-rw-r--r--third_party/python/gyp/test/exclusion/hello.c15
-rwxr-xr-xthird_party/python/gyp/test/external-cross-compile/gyptest-cross.py31
-rw-r--r--third_party/python/gyp/test/external-cross-compile/src/bogus1.cc1
-rw-r--r--third_party/python/gyp/test/external-cross-compile/src/bogus2.c1
-rw-r--r--third_party/python/gyp/test/external-cross-compile/src/cross.gyp83
-rw-r--r--third_party/python/gyp/test/external-cross-compile/src/cross_compile.gypi23
-rw-r--r--third_party/python/gyp/test/external-cross-compile/src/fake_cross.py18
-rw-r--r--third_party/python/gyp/test/external-cross-compile/src/program.cc16
-rw-r--r--third_party/python/gyp/test/external-cross-compile/src/test1.cc1
-rw-r--r--third_party/python/gyp/test/external-cross-compile/src/test2.c1
-rw-r--r--third_party/python/gyp/test/external-cross-compile/src/test3.cc1
-rw-r--r--third_party/python/gyp/test/external-cross-compile/src/test4.c1
-rw-r--r--third_party/python/gyp/test/external-cross-compile/src/tochar.py13
-rw-r--r--third_party/python/gyp/test/generator-output/actions/actions.gyp16
-rw-r--r--third_party/python/gyp/test/generator-output/actions/build/README.txt4
-rw-r--r--third_party/python/gyp/test/generator-output/actions/subdir1/actions-out/README.txt4
-rw-r--r--third_party/python/gyp/test/generator-output/actions/subdir1/build/README.txt4
-rw-r--r--third_party/python/gyp/test/generator-output/actions/subdir1/executable.gyp44
-rwxr-xr-xthird_party/python/gyp/test/generator-output/actions/subdir1/make-prog1.py20
-rwxr-xr-xthird_party/python/gyp/test/generator-output/actions/subdir1/make-prog2.py20
-rw-r--r--third_party/python/gyp/test/generator-output/actions/subdir1/program.c12
-rw-r--r--third_party/python/gyp/test/generator-output/actions/subdir2/actions-out/README.txt4
-rw-r--r--third_party/python/gyp/test/generator-output/actions/subdir2/build/README.txt4
-rwxr-xr-xthird_party/python/gyp/test/generator-output/actions/subdir2/make-file.py11
-rw-r--r--third_party/python/gyp/test/generator-output/actions/subdir2/none.gyp31
-rw-r--r--third_party/python/gyp/test/generator-output/copies/build/README.txt4
-rw-r--r--third_party/python/gyp/test/generator-output/copies/copies-out/README.txt4
-rw-r--r--third_party/python/gyp/test/generator-output/copies/copies.gyp50
-rw-r--r--third_party/python/gyp/test/generator-output/copies/file11
-rw-r--r--third_party/python/gyp/test/generator-output/copies/file21
-rw-r--r--third_party/python/gyp/test/generator-output/copies/subdir/build/README.txt4
-rw-r--r--third_party/python/gyp/test/generator-output/copies/subdir/copies-out/README.txt4
-rw-r--r--third_party/python/gyp/test/generator-output/copies/subdir/file31
-rw-r--r--third_party/python/gyp/test/generator-output/copies/subdir/file41
-rw-r--r--third_party/python/gyp/test/generator-output/copies/subdir/subdir.gyp32
-rwxr-xr-xthird_party/python/gyp/test/generator-output/gyptest-actions.py57
-rwxr-xr-xthird_party/python/gyp/test/generator-output/gyptest-copies.py59
-rwxr-xr-xthird_party/python/gyp/test/generator-output/gyptest-depth.py58
-rw-r--r--third_party/python/gyp/test/generator-output/gyptest-mac-bundle.py33
-rwxr-xr-xthird_party/python/gyp/test/generator-output/gyptest-relocate.py59
-rwxr-xr-xthird_party/python/gyp/test/generator-output/gyptest-rules.py58
-rwxr-xr-xthird_party/python/gyp/test/generator-output/gyptest-subdir2-deep.py36
-rwxr-xr-xthird_party/python/gyp/test/generator-output/gyptest-symlink.py44
-rwxr-xr-xthird_party/python/gyp/test/generator-output/gyptest-top-all.py53
-rw-r--r--third_party/python/gyp/test/generator-output/mac-bundle/Info.plist32
-rw-r--r--third_party/python/gyp/test/generator-output/mac-bundle/app.order1
-rw-r--r--third_party/python/gyp/test/generator-output/mac-bundle/header.h1
-rw-r--r--third_party/python/gyp/test/generator-output/mac-bundle/main.c1
-rw-r--r--third_party/python/gyp/test/generator-output/mac-bundle/resource.sb1
-rw-r--r--third_party/python/gyp/test/generator-output/mac-bundle/test.gyp25
-rw-r--r--third_party/python/gyp/test/generator-output/rules/build/README.txt4
-rwxr-xr-xthird_party/python/gyp/test/generator-output/rules/copy-file.py12
-rw-r--r--third_party/python/gyp/test/generator-output/rules/rules.gyp16
-rw-r--r--third_party/python/gyp/test/generator-output/rules/subdir1/build/README.txt4
-rw-r--r--third_party/python/gyp/test/generator-output/rules/subdir1/define3.in01
-rw-r--r--third_party/python/gyp/test/generator-output/rules/subdir1/define4.in01
-rw-r--r--third_party/python/gyp/test/generator-output/rules/subdir1/executable.gyp59
-rw-r--r--third_party/python/gyp/test/generator-output/rules/subdir1/function1.in16
-rw-r--r--third_party/python/gyp/test/generator-output/rules/subdir1/function2.in16
-rw-r--r--third_party/python/gyp/test/generator-output/rules/subdir1/program.c18
-rw-r--r--third_party/python/gyp/test/generator-output/rules/subdir2/build/README.txt4
-rw-r--r--third_party/python/gyp/test/generator-output/rules/subdir2/file1.in01
-rw-r--r--third_party/python/gyp/test/generator-output/rules/subdir2/file2.in01
-rw-r--r--third_party/python/gyp/test/generator-output/rules/subdir2/file3.in11
-rw-r--r--third_party/python/gyp/test/generator-output/rules/subdir2/file4.in11
-rw-r--r--third_party/python/gyp/test/generator-output/rules/subdir2/none.gyp49
-rw-r--r--third_party/python/gyp/test/generator-output/rules/subdir2/rules-out/README.txt4
-rw-r--r--third_party/python/gyp/test/generator-output/src/build/README.txt4
-rw-r--r--third_party/python/gyp/test/generator-output/src/inc.h1
-rw-r--r--third_party/python/gyp/test/generator-output/src/inc1/include1.h1
-rw-r--r--third_party/python/gyp/test/generator-output/src/prog1.c18
-rw-r--r--third_party/python/gyp/test/generator-output/src/prog1.gyp28
-rw-r--r--third_party/python/gyp/test/generator-output/src/subdir2/build/README.txt4
-rw-r--r--third_party/python/gyp/test/generator-output/src/subdir2/deeper/build/README.txt4
-rw-r--r--third_party/python/gyp/test/generator-output/src/subdir2/deeper/deeper.c7
-rw-r--r--third_party/python/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp18
-rw-r--r--third_party/python/gyp/test/generator-output/src/subdir2/deeper/deeper.h1
-rw-r--r--third_party/python/gyp/test/generator-output/src/subdir2/inc2/include2.h1
-rw-r--r--third_party/python/gyp/test/generator-output/src/subdir2/prog2.c18
-rw-r--r--third_party/python/gyp/test/generator-output/src/subdir2/prog2.gyp28
-rw-r--r--third_party/python/gyp/test/generator-output/src/subdir3/build/README.txt4
-rw-r--r--third_party/python/gyp/test/generator-output/src/subdir3/inc3/include3.h1
-rw-r--r--third_party/python/gyp/test/generator-output/src/subdir3/prog3.c18
-rw-r--r--third_party/python/gyp/test/generator-output/src/subdir3/prog3.gyp25
-rw-r--r--third_party/python/gyp/test/generator-output/src/symroot.gypi16
-rw-r--r--third_party/python/gyp/test/gyp-defines/defines.gyp26
-rw-r--r--third_party/python/gyp/test/gyp-defines/echo.py11
-rw-r--r--third_party/python/gyp/test/gyp-defines/gyptest-multiple-values.py36
-rw-r--r--third_party/python/gyp/test/gyp-defines/gyptest-regyp.py40
-rwxr-xr-xthird_party/python/gyp/test/hard_dependency/gyptest-exported-hard-dependency.py37
-rwxr-xr-xthird_party/python/gyp/test/hard_dependency/gyptest-no-exported-hard-dependency.py36
-rw-r--r--third_party/python/gyp/test/hard_dependency/src/a.c9
-rw-r--r--third_party/python/gyp/test/hard_dependency/src/a.h12
-rw-r--r--third_party/python/gyp/test/hard_dependency/src/b.c9
-rw-r--r--third_party/python/gyp/test/hard_dependency/src/b.h12
-rw-r--r--third_party/python/gyp/test/hard_dependency/src/c.c10
-rw-r--r--third_party/python/gyp/test/hard_dependency/src/c.h10
-rw-r--r--third_party/python/gyp/test/hard_dependency/src/d.c9
-rwxr-xr-xthird_party/python/gyp/test/hard_dependency/src/emit.py11
-rw-r--r--third_party/python/gyp/test/hard_dependency/src/hard_dependency.gyp78
-rwxr-xr-xthird_party/python/gyp/test/hello/gyptest-all.py24
-rwxr-xr-xthird_party/python/gyp/test/hello/gyptest-default.py24
-rwxr-xr-xthird_party/python/gyp/test/hello/gyptest-disable-regyp.py32
-rw-r--r--third_party/python/gyp/test/hello/gyptest-regyp-output.py36
-rwxr-xr-xthird_party/python/gyp/test/hello/gyptest-regyp.py32
-rwxr-xr-xthird_party/python/gyp/test/hello/gyptest-target.py24
-rw-r--r--third_party/python/gyp/test/hello/hello.c11
-rw-r--r--third_party/python/gyp/test/hello/hello.gyp15
-rw-r--r--third_party/python/gyp/test/hello/hello2.c11
-rw-r--r--third_party/python/gyp/test/hello/hello2.gyp15
-rwxr-xr-xthird_party/python/gyp/test/home_dot_gyp/gyptest-home-includes-config-arg.py31
-rwxr-xr-xthird_party/python/gyp/test/home_dot_gyp/gyptest-home-includes-config-env.py33
-rwxr-xr-xthird_party/python/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py44
-rwxr-xr-xthird_party/python/gyp/test/home_dot_gyp/gyptest-home-includes.py30
-rw-r--r--third_party/python/gyp/test/home_dot_gyp/home/.gyp/include.gypi5
-rw-r--r--third_party/python/gyp/test/home_dot_gyp/home2/.gyp/include.gypi5
-rw-r--r--third_party/python/gyp/test/home_dot_gyp/home2/.gyp_new/include.gypi5
-rw-r--r--third_party/python/gyp/test/home_dot_gyp/src/all.gyp22
-rw-r--r--third_party/python/gyp/test/home_dot_gyp/src/printfoo.c7
-rwxr-xr-xthird_party/python/gyp/test/include_dirs/gyptest-all.py43
-rwxr-xr-xthird_party/python/gyp/test/include_dirs/gyptest-default.py43
-rw-r--r--third_party/python/gyp/test/include_dirs/src/inc.h1
-rw-r--r--third_party/python/gyp/test/include_dirs/src/inc1/include1.h1
-rw-r--r--third_party/python/gyp/test/include_dirs/src/includes.c19
-rw-r--r--third_party/python/gyp/test/include_dirs/src/includes.gyp27
-rw-r--r--third_party/python/gyp/test/include_dirs/src/shadow1/shadow.h1
-rw-r--r--third_party/python/gyp/test/include_dirs/src/shadow2/shadow.h1
-rw-r--r--third_party/python/gyp/test/include_dirs/src/subdir/inc.h1
-rw-r--r--third_party/python/gyp/test/include_dirs/src/subdir/inc2/include2.h1
-rw-r--r--third_party/python/gyp/test/include_dirs/src/subdir/subdir_includes.c14
-rw-r--r--third_party/python/gyp/test/include_dirs/src/subdir/subdir_includes.gyp20
-rwxr-xr-xthird_party/python/gyp/test/intermediate_dir/gyptest-intermediate-dir.py44
-rwxr-xr-xthird_party/python/gyp/test/intermediate_dir/src/script.py22
-rw-r--r--third_party/python/gyp/test/intermediate_dir/src/shared_infile.txt1
-rw-r--r--third_party/python/gyp/test/intermediate_dir/src/test.gyp42
-rw-r--r--third_party/python/gyp/test/intermediate_dir/src/test2.gyp42
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/InfoPlist-error.strings3
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/InfoPlist.strings3
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/LanguageMap.plist8
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/MainMenu.xib17
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/Main_iPhone.storyboard27
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/AppIcon.appiconset/Contents.json58
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/Contents.json23
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain.pngbin0 -> 3263 bytes
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@2x.pngbin0 -> 3847 bytes
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@3x.pngbin0 -> 4394 bytes
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/TestApp/TestApp-Info.plist28
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/TestApp/check_no_signature.py13
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/TestApp/main.m13
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/TestApp/only-compile-in-32-bits.m7
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/TestApp/only-compile-in-64-bits.m7
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/test-archs.gyp109
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/test-assets-catalog.gyp45
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/test-crosscompile.gyp47
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/test-device.gyp109
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/test.gyp75
-rw-r--r--third_party/python/gyp/test/ios/app-bundle/tool_main.cc7
-rw-r--r--third_party/python/gyp/test/ios/copies-with-xcode-envvars/Info.plist24
-rw-r--r--third_party/python/gyp/test/ios/copies-with-xcode-envvars/copies-with-xcode-envvars.gyp97
-rw-r--r--third_party/python/gyp/test/ios/copies-with-xcode-envvars/empty.c1
-rw-r--r--third_party/python/gyp/test/ios/copies-with-xcode-envvars/file01
-rw-r--r--third_party/python/gyp/test/ios/copies-with-xcode-envvars/file11
-rw-r--r--third_party/python/gyp/test/ios/copies-with-xcode-envvars/file101
-rw-r--r--third_party/python/gyp/test/ios/copies-with-xcode-envvars/file111
-rw-r--r--third_party/python/gyp/test/ios/copies-with-xcode-envvars/file21
-rw-r--r--third_party/python/gyp/test/ios/copies-with-xcode-envvars/file31
-rw-r--r--third_party/python/gyp/test/ios/copies-with-xcode-envvars/file41
-rw-r--r--third_party/python/gyp/test/ios/copies-with-xcode-envvars/file51
-rw-r--r--third_party/python/gyp/test/ios/copies-with-xcode-envvars/file61
-rw-r--r--third_party/python/gyp/test/ios/copies-with-xcode-envvars/file71
-rw-r--r--third_party/python/gyp/test/ios/copies-with-xcode-envvars/file81
-rw-r--r--third_party/python/gyp/test/ios/copies-with-xcode-envvars/file91
-rw-r--r--third_party/python/gyp/test/ios/deployment-target/check-version-min.c33
-rw-r--r--third_party/python/gyp/test/ios/deployment-target/deployment-target.gyp34
-rw-r--r--third_party/python/gyp/test/ios/extension/ActionExtension/ActionViewController.h9
-rw-r--r--third_party/python/gyp/test/ios/extension/ActionExtension/ActionViewController.m31
-rw-r--r--third_party/python/gyp/test/ios/extension/ActionExtension/Info.plist42
-rw-r--r--third_party/python/gyp/test/ios/extension/ActionExtension/MainInterface.storyboard63
-rw-r--r--third_party/python/gyp/test/ios/extension/ExtensionContainer/AppDelegate.h12
-rw-r--r--third_party/python/gyp/test/ios/extension/ExtensionContainer/AppDelegate.m19
-rw-r--r--third_party/python/gyp/test/ios/extension/ExtensionContainer/Base.lproj/Main.storyboard25
-rw-r--r--third_party/python/gyp/test/ios/extension/ExtensionContainer/Images.xcassets/AppIcon.appiconset/Contents.json53
-rw-r--r--third_party/python/gyp/test/ios/extension/ExtensionContainer/Images.xcassets/LaunchImage.launchimage/Contents.json51
-rw-r--r--third_party/python/gyp/test/ios/extension/ExtensionContainer/Info.plist32
-rw-r--r--third_party/python/gyp/test/ios/extension/ExtensionContainer/ViewController.h11
-rw-r--r--third_party/python/gyp/test/ios/extension/ExtensionContainer/ViewController.m24
-rw-r--r--third_party/python/gyp/test/ios/extension/ExtensionContainer/main.m13
-rw-r--r--third_party/python/gyp/test/ios/extension/extension.gyp91
-rw-r--r--third_party/python/gyp/test/ios/framework/framework.gyp43
-rw-r--r--third_party/python/gyp/test/ios/framework/iOSFramework/Info.plist26
-rw-r--r--third_party/python/gyp/test/ios/framework/iOSFramework/Thing.h10
-rw-r--r--third_party/python/gyp/test/ios/framework/iOSFramework/Thing.m22
-rw-r--r--third_party/python/gyp/test/ios/framework/iOSFramework/iOSFramework.h9
-rwxr-xr-xthird_party/python/gyp/test/ios/gyptest-app-ios-assets-catalog.py57
-rwxr-xr-xthird_party/python/gyp/test/ios/gyptest-app-ios.py76
-rw-r--r--third_party/python/gyp/test/ios/gyptest-archs.py62
-rw-r--r--third_party/python/gyp/test/ios/gyptest-copies-with-xcode-envvars.py65
-rw-r--r--third_party/python/gyp/test/ios/gyptest-crosscompile.py34
-rw-r--r--third_party/python/gyp/test/ios/gyptest-deployment-target.py23
-rwxr-xr-xthird_party/python/gyp/test/ios/gyptest-extension.py51
-rwxr-xr-xthird_party/python/gyp/test/ios/gyptest-framework.py37
-rw-r--r--third_party/python/gyp/test/ios/gyptest-per-config-settings.py190
-rwxr-xr-xthird_party/python/gyp/test/ios/gyptest-watch.py44
-rw-r--r--third_party/python/gyp/test/ios/gyptest-xcode-ninja.py25
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchApp/Images.xcassets/AppIcon.appiconset/Contents.json62
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchApp/Images.xcassets/LaunchImage.launchimage/Contents.json24
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchApp/Info.plist35
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchApp/Interface.storyboard15
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchContainer/AppDelegate.h12
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchContainer/AppDelegate.m19
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchContainer/Base.lproj/Main.storyboard25
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchContainer/Images.xcassets/AppIcon.appiconset/Contents.json53
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchContainer/Images.xcassets/LaunchImage.launchimage/Contents.json51
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchContainer/Info.plist32
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchContainer/ViewController.h11
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchContainer/ViewController.m24
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchContainer/main.m13
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchKitExtension/Images.xcassets/MyImage.imageset/Contents.json20
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchKitExtension/Info.plist38
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchKitExtension/InterfaceController.h10
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchKitExtension/InterfaceController.m25
-rw-r--r--third_party/python/gyp/test/ios/watch/WatchKitExtension/MainInterface.storyboard63
-rw-r--r--third_party/python/gyp/test/ios/watch/watch.gyp105
-rw-r--r--third_party/python/gyp/test/ios/xctests/App/AppDelegate.h11
-rw-r--r--third_party/python/gyp/test/ios/xctests/App/AppDelegate.m18
-rw-r--r--third_party/python/gyp/test/ios/xctests/App/Base.lproj/LaunchScreen.xib41
-rw-r--r--third_party/python/gyp/test/ios/xctests/App/Base.lproj/Main.storyboard25
-rw-r--r--third_party/python/gyp/test/ios/xctests/App/Images.xcassets/AppIcon.appiconset/Contents.json68
-rw-r--r--third_party/python/gyp/test/ios/xctests/App/Info.plist47
-rw-r--r--third_party/python/gyp/test/ios/xctests/App/ViewController.h9
-rw-r--r--third_party/python/gyp/test/ios/xctests/App/ViewController.m21
-rw-r--r--third_party/python/gyp/test/ios/xctests/App/main.m13
-rw-r--r--third_party/python/gyp/test/ios/xctests/AppTests/AppTests.m31
-rw-r--r--third_party/python/gyp/test/ios/xctests/AppTests/Info.plist24
-rw-r--r--third_party/python/gyp/test/ios/xctests/gyptest-xctests.py49
-rw-r--r--third_party/python/gyp/test/ios/xctests/xctests.gyp74
-rw-r--r--third_party/python/gyp/test/lib/README.txt17
-rw-r--r--third_party/python/gyp/test/lib/TestCmd.py1597
-rw-r--r--third_party/python/gyp/test/lib/TestCommon.py591
-rw-r--r--third_party/python/gyp/test/lib/TestGyp.py1259
-rw-r--r--third_party/python/gyp/test/lib/TestMac.py76
-rw-r--r--third_party/python/gyp/test/lib/TestWin.py101
-rwxr-xr-xthird_party/python/gyp/test/library/gyptest-shared-obj-install-path.py39
-rwxr-xr-xthird_party/python/gyp/test/library/gyptest-shared.py84
-rwxr-xr-xthird_party/python/gyp/test/library/gyptest-static.py84
-rw-r--r--third_party/python/gyp/test/library/src/lib1.c10
-rw-r--r--third_party/python/gyp/test/library/src/lib1_moveable.c10
-rw-r--r--third_party/python/gyp/test/library/src/lib2.c10
-rw-r--r--third_party/python/gyp/test/library/src/lib2_moveable.c10
-rw-r--r--third_party/python/gyp/test/library/src/library.gyp58
-rw-r--r--third_party/python/gyp/test/library/src/program.c15
-rw-r--r--third_party/python/gyp/test/library/src/shared_dependency.gyp33
-rw-r--r--third_party/python/gyp/test/library_dirs/gyptest-library-dirs.py50
-rw-r--r--third_party/python/gyp/test/library_dirs/subdir/README.txt1
-rw-r--r--third_party/python/gyp/test/library_dirs/subdir/hello.cc11
-rw-r--r--third_party/python/gyp/test/library_dirs/subdir/mylib.cc9
-rw-r--r--third_party/python/gyp/test/library_dirs/subdir/mylib.h12
-rw-r--r--third_party/python/gyp/test/library_dirs/subdir/test-win.gyp60
-rw-r--r--third_party/python/gyp/test/library_dirs/subdir/test.gyp68
-rwxr-xr-xthird_party/python/gyp/test/link-dependency/gyptest-link-dependency.py23
-rw-r--r--third_party/python/gyp/test/link-dependency/main.c7
-rw-r--r--third_party/python/gyp/test/link-dependency/mymalloc.c12
-rw-r--r--third_party/python/gyp/test/link-dependency/test.gyp37
-rw-r--r--third_party/python/gyp/test/link-objects/base.c6
-rw-r--r--third_party/python/gyp/test/link-objects/extra.c5
-rwxr-xr-xthird_party/python/gyp/test/link-objects/gyptest-all.py28
-rw-r--r--third_party/python/gyp/test/link-objects/link-objects.gyp24
-rw-r--r--third_party/python/gyp/test/linux/gyptest-implicit-rpath.py48
-rw-r--r--third_party/python/gyp/test/linux/gyptest-ldflags-duplicates.py22
-rw-r--r--third_party/python/gyp/test/linux/gyptest-ldflags-from-environment.py45
-rw-r--r--third_party/python/gyp/test/linux/gyptest-target-rpath.py43
-rw-r--r--third_party/python/gyp/test/linux/implicit-rpath/file.c1
-rw-r--r--third_party/python/gyp/test/linux/implicit-rpath/main.c1
-rw-r--r--third_party/python/gyp/test/linux/implicit-rpath/test.gyp47
-rwxr-xr-xthird_party/python/gyp/test/linux/ldflags-duplicates/check-ldflags.py28
-rw-r--r--third_party/python/gyp/test/linux/ldflags-duplicates/lib1.c6
-rw-r--r--third_party/python/gyp/test/linux/ldflags-duplicates/lib2.c6
-rw-r--r--third_party/python/gyp/test/linux/ldflags-duplicates/main.c7
-rw-r--r--third_party/python/gyp/test/linux/ldflags-duplicates/test.gyp45
-rw-r--r--third_party/python/gyp/test/linux/ldflags-from-environment/main.c7
-rw-r--r--third_party/python/gyp/test/linux/ldflags-from-environment/test.gyp23
-rw-r--r--third_party/python/gyp/test/linux/target-rpath/file.c1
-rw-r--r--third_party/python/gyp/test/linux/target-rpath/main.c1
-rw-r--r--third_party/python/gyp/test/linux/target-rpath/test.gyp47
-rw-r--r--third_party/python/gyp/test/mac/action-envvars/action/action.gyp34
-rwxr-xr-xthird_party/python/gyp/test/mac/action-envvars/action/action.sh8
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/InfoPlist-error.strings3
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/InfoPlist.strings3
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/MainMenu.xib4119
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/utf-16be.stringsbin0 -> 208 bytes
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/utf-16le.stringsbin0 -> 208 bytes
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/AppIcon.appiconset/Contents.json58
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/Contents.json23
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain.pngbin0 -> 3263 bytes
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@2x.pngbin0 -> 3847 bytes
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@3x.pngbin0 -> 4394 bytes
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/TestApp/TestApp-Info.plist34
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/TestApp/TestAppAppDelegate.h13
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/TestApp/TestAppAppDelegate.m15
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/TestApp/main.m10
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/empty.c0
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/test-assets-catalog.gyp43
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/test-error.gyp31
-rw-r--r--third_party/python/gyp/test/mac/app-bundle/test.gyp41
-rw-r--r--third_party/python/gyp/test/mac/archs/empty_main.cc1
-rw-r--r--third_party/python/gyp/test/mac/archs/file.mm1
-rw-r--r--third_party/python/gyp/test/mac/archs/file_a.cc8
-rw-r--r--third_party/python/gyp/test/mac/archs/file_a.h10
-rw-r--r--third_party/python/gyp/test/mac/archs/file_b.cc8
-rw-r--r--third_party/python/gyp/test/mac/archs/file_b.h10
-rw-r--r--third_party/python/gyp/test/mac/archs/file_c.cc11
-rw-r--r--third_party/python/gyp/test/mac/archs/file_d.cc11
-rw-r--r--third_party/python/gyp/test/mac/archs/header.h1
-rw-r--r--third_party/python/gyp/test/mac/archs/my_file.cc4
-rw-r--r--third_party/python/gyp/test/mac/archs/my_main_file.cc9
-rw-r--r--third_party/python/gyp/test/mac/archs/test-archs-multiarch.gyp92
-rw-r--r--third_party/python/gyp/test/mac/archs/test-archs-x86_64.gyp27
-rw-r--r--third_party/python/gyp/test/mac/archs/test-dependencies.gyp92
-rw-r--r--third_party/python/gyp/test/mac/archs/test-no-archs.gyp21
-rw-r--r--third_party/python/gyp/test/mac/archs/test-valid-archs.gyp28
-rwxr-xr-xthird_party/python/gyp/test/mac/bundle-resources/change.sh3
-rwxr-xr-xthird_party/python/gyp/test/mac/bundle-resources/executable-file.sh3
-rw-r--r--third_party/python/gyp/test/mac/bundle-resources/secret.txt1
-rw-r--r--third_party/python/gyp/test/mac/bundle-resources/test.gyp59
-rw-r--r--third_party/python/gyp/test/mac/cflags/ccfile.cc7
-rw-r--r--third_party/python/gyp/test/mac/cflags/ccfile_withcflags.cc7
-rw-r--r--third_party/python/gyp/test/mac/cflags/cfile.c7
-rw-r--r--third_party/python/gyp/test/mac/cflags/cppfile.cpp7
-rw-r--r--third_party/python/gyp/test/mac/cflags/cppfile_withcflags.cpp7
-rw-r--r--third_party/python/gyp/test/mac/cflags/cxxfile.cxx7
-rw-r--r--third_party/python/gyp/test/mac/cflags/cxxfile_withcflags.cxx7
-rw-r--r--third_party/python/gyp/test/mac/cflags/mfile.m7
-rw-r--r--third_party/python/gyp/test/mac/cflags/mmfile.mm7
-rw-r--r--third_party/python/gyp/test/mac/cflags/mmfile_withcflags.mm7
-rw-r--r--third_party/python/gyp/test/mac/cflags/test.gyp132
-rw-r--r--third_party/python/gyp/test/mac/clang-cxx-language-standard/c++11.cc8
-rw-r--r--third_party/python/gyp/test/mac/clang-cxx-language-standard/c++98.cc24
-rw-r--r--third_party/python/gyp/test/mac/clang-cxx-language-standard/clang-cxx-language-standard.gyp30
-rw-r--r--third_party/python/gyp/test/mac/clang-cxx-library/clang-cxx-library.gyp32
-rw-r--r--third_party/python/gyp/test/mac/clang-cxx-library/libc++.cc11
-rw-r--r--third_party/python/gyp/test/mac/clang-cxx-library/libstdc++.cc11
-rw-r--r--third_party/python/gyp/test/mac/copies-with-xcode-envvars/copies-with-xcode-envvars.gyp87
-rw-r--r--third_party/python/gyp/test/mac/copies-with-xcode-envvars/empty.c1
-rw-r--r--third_party/python/gyp/test/mac/copies-with-xcode-envvars/file01
-rw-r--r--third_party/python/gyp/test/mac/copies-with-xcode-envvars/file11
-rw-r--r--third_party/python/gyp/test/mac/copies-with-xcode-envvars/file101
-rw-r--r--third_party/python/gyp/test/mac/copies-with-xcode-envvars/file111
-rw-r--r--third_party/python/gyp/test/mac/copies-with-xcode-envvars/file21
-rw-r--r--third_party/python/gyp/test/mac/copies-with-xcode-envvars/file31
-rw-r--r--third_party/python/gyp/test/mac/copies-with-xcode-envvars/file41
-rw-r--r--third_party/python/gyp/test/mac/copies-with-xcode-envvars/file51
-rw-r--r--third_party/python/gyp/test/mac/copies-with-xcode-envvars/file61
-rw-r--r--third_party/python/gyp/test/mac/copies-with-xcode-envvars/file71
-rw-r--r--third_party/python/gyp/test/mac/copies-with-xcode-envvars/file81
-rw-r--r--third_party/python/gyp/test/mac/copies-with-xcode-envvars/file91
-rw-r--r--third_party/python/gyp/test/mac/copy-dylib/empty.c1
-rw-r--r--third_party/python/gyp/test/mac/copy-dylib/test.gyp31
-rw-r--r--third_party/python/gyp/test/mac/debuginfo/file.c6
-rw-r--r--third_party/python/gyp/test/mac/debuginfo/test.gyp82
-rw-r--r--third_party/python/gyp/test/mac/depend-on-bundle/English.lproj/InfoPlist.strings1
-rw-r--r--third_party/python/gyp/test/mac/depend-on-bundle/Info.plist28
-rw-r--r--third_party/python/gyp/test/mac/depend-on-bundle/bundle.c1
-rw-r--r--third_party/python/gyp/test/mac/depend-on-bundle/executable.c4
-rw-r--r--third_party/python/gyp/test/mac/depend-on-bundle/test.gyp28
-rw-r--r--third_party/python/gyp/test/mac/deployment-target/check-version-min.c33
-rw-r--r--third_party/python/gyp/test/mac/deployment-target/deployment-target.gyp28
-rw-r--r--third_party/python/gyp/test/mac/framework-dirs/calculate.c15
-rw-r--r--third_party/python/gyp/test/mac/framework-dirs/framework-dirs.gyp21
-rw-r--r--third_party/python/gyp/test/mac/framework-headers/myframework.h8
-rw-r--r--third_party/python/gyp/test/mac/framework-headers/myframework.m8
-rw-r--r--third_party/python/gyp/test/mac/framework-headers/test.gyp44
-rw-r--r--third_party/python/gyp/test/mac/framework/TestFramework/English.lproj/InfoPlist.strings2
-rw-r--r--third_party/python/gyp/test/mac/framework/TestFramework/Info.plist28
-rw-r--r--third_party/python/gyp/test/mac/framework/TestFramework/ObjCVector.h28
-rw-r--r--third_party/python/gyp/test/mac/framework/TestFramework/ObjCVector.mm63
-rw-r--r--third_party/python/gyp/test/mac/framework/TestFramework/ObjCVectorInternal.h9
-rw-r--r--third_party/python/gyp/test/mac/framework/TestFramework/TestFramework_Prefix.pch7
-rw-r--r--third_party/python/gyp/test/mac/framework/empty.c0
-rw-r--r--third_party/python/gyp/test/mac/framework/framework.gyp108
-rw-r--r--third_party/python/gyp/test/mac/global-settings/src/dir1/dir1.gyp11
-rw-r--r--third_party/python/gyp/test/mac/global-settings/src/dir2/dir2.gyp22
-rw-r--r--third_party/python/gyp/test/mac/global-settings/src/dir2/file.txt1
-rw-r--r--third_party/python/gyp/test/mac/gyptest-action-envvars.py36
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-app-assets-catalog.py125
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-app-error.py49
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-app.py122
-rw-r--r--third_party/python/gyp/test/mac/gyptest-archs.py96
-rw-r--r--third_party/python/gyp/test/mac/gyptest-bundle-resources.py64
-rw-r--r--third_party/python/gyp/test/mac/gyptest-cflags.py21
-rw-r--r--third_party/python/gyp/test/mac/gyptest-clang-cxx-language-standard.py25
-rw-r--r--third_party/python/gyp/test/mac/gyptest-clang-cxx-library.py32
-rw-r--r--third_party/python/gyp/test/mac/gyptest-copies-with-xcode-envvars.py65
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-copies.py62
-rw-r--r--third_party/python/gyp/test/mac/gyptest-copy-dylib.py25
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-debuginfo.py36
-rw-r--r--third_party/python/gyp/test/mac/gyptest-depend-on-bundle.py45
-rw-r--r--third_party/python/gyp/test/mac/gyptest-deployment-target.py27
-rw-r--r--third_party/python/gyp/test/mac/gyptest-framework-dirs.py23
-rw-r--r--third_party/python/gyp/test/mac/gyptest-framework-headers.py38
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-framework.py80
-rw-r--r--third_party/python/gyp/test/mac/gyptest-global-settings.py33
-rw-r--r--third_party/python/gyp/test/mac/gyptest-identical-name.py45
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-infoplist-process.py56
-rw-r--r--third_party/python/gyp/test/mac/gyptest-installname.py85
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-kext.py27
-rw-r--r--third_party/python/gyp/test/mac/gyptest-ldflags-passed-to-libtool.py37
-rw-r--r--third_party/python/gyp/test/mac/gyptest-ldflags.py74
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-libraries.py30
-rw-r--r--third_party/python/gyp/test/mac/gyptest-libtool-zero.py26
-rw-r--r--third_party/python/gyp/test/mac/gyptest-loadable-module-bundle-product-extension.py31
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-loadable-module.py54
-rw-r--r--third_party/python/gyp/test/mac/gyptest-lto.py66
-rw-r--r--third_party/python/gyp/test/mac/gyptest-missing-cfbundlesignature.py34
-rw-r--r--third_party/python/gyp/test/mac/gyptest-non-strs-flattened-to-env.py38
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-objc-arc.py26
-rw-r--r--third_party/python/gyp/test/mac/gyptest-objc-gc.py51
-rw-r--r--third_party/python/gyp/test/mac/gyptest-postbuild-copy-bundle.py75
-rw-r--r--third_party/python/gyp/test/mac/gyptest-postbuild-defaults.py34
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-postbuild-fail.py71
-rw-r--r--third_party/python/gyp/test/mac/gyptest-postbuild-multiple-configurations.py26
-rw-r--r--third_party/python/gyp/test/mac/gyptest-postbuild-static-library.py28
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-postbuild.py53
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-prefixheader.py20
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-rebuild.py46
-rw-r--r--third_party/python/gyp/test/mac/gyptest-rpath.py50
-rw-r--r--third_party/python/gyp/test/mac/gyptest-sdkroot.py56
-rw-r--r--third_party/python/gyp/test/mac/gyptest-sourceless-module.py77
-rw-r--r--third_party/python/gyp/test/mac/gyptest-strip-default.py97
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-strip.py66
-rw-r--r--third_party/python/gyp/test/mac/gyptest-swift-library.py67
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-type-envvars.py26
-rw-r--r--third_party/python/gyp/test/mac/gyptest-unicode-settings.py20
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-xcode-env-order.py95
-rw-r--r--third_party/python/gyp/test/mac/gyptest-xcode-gcc-clang.py40
-rw-r--r--third_party/python/gyp/test/mac/gyptest-xcode-gcc.py60
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-xcode-support-actions.py25
-rw-r--r--third_party/python/gyp/test/mac/gyptest-xctest.py41
-rwxr-xr-xthird_party/python/gyp/test/mac/gyptest-xcuitest.py39
-rw-r--r--third_party/python/gyp/test/mac/identical-name/proxy/proxy.cc2
-rw-r--r--third_party/python/gyp/test/mac/identical-name/proxy/proxy.gyp9
-rw-r--r--third_party/python/gyp/test/mac/identical-name/proxy/testlib/testlib.cc2
-rw-r--r--third_party/python/gyp/test/mac/identical-name/proxy/testlib/testlib.gyp8
-rw-r--r--third_party/python/gyp/test/mac/identical-name/test-should-fail.gyp10
-rw-r--r--third_party/python/gyp/test/mac/identical-name/test.gyp11
-rw-r--r--third_party/python/gyp/test/mac/identical-name/test.gypi7
-rw-r--r--third_party/python/gyp/test/mac/identical-name/testlib/main.cc3
-rw-r--r--third_party/python/gyp/test/mac/identical-name/testlib/testlib.gyp14
-rw-r--r--third_party/python/gyp/test/mac/identical-name/testlib/void.cc2
-rw-r--r--third_party/python/gyp/test/mac/infoplist-process/Info.plist36
-rw-r--r--third_party/python/gyp/test/mac/infoplist-process/main.c7
-rw-r--r--third_party/python/gyp/test/mac/infoplist-process/test1.gyp25
-rw-r--r--third_party/python/gyp/test/mac/infoplist-process/test2.gyp25
-rw-r--r--third_party/python/gyp/test/mac/infoplist-process/test3.gyp25
-rw-r--r--third_party/python/gyp/test/mac/installname/Info.plist28
-rw-r--r--third_party/python/gyp/test/mac/installname/file.c1
-rw-r--r--third_party/python/gyp/test/mac/installname/main.c1
-rw-r--r--third_party/python/gyp/test/mac/installname/test.gyp93
-rw-r--r--third_party/python/gyp/test/mac/kext/GypKext/GypKext-Info.plist35
-rw-r--r--third_party/python/gyp/test/mac/kext/GypKext/GypKext.c16
-rw-r--r--third_party/python/gyp/test/mac/kext/kext.gyp18
-rw-r--r--third_party/python/gyp/test/mac/ldflags-libtool/file.c1
-rw-r--r--third_party/python/gyp/test/mac/ldflags-libtool/test.gyp17
-rw-r--r--third_party/python/gyp/test/mac/ldflags/subdirectory/Info.plist8
-rw-r--r--third_party/python/gyp/test/mac/ldflags/subdirectory/file.c2
-rw-r--r--third_party/python/gyp/test/mac/ldflags/subdirectory/symbol_list.def1
-rw-r--r--third_party/python/gyp/test/mac/ldflags/subdirectory/test.gyp66
-rw-r--r--third_party/python/gyp/test/mac/libraries/subdir/README.txt1
-rw-r--r--third_party/python/gyp/test/mac/libraries/subdir/hello.cc10
-rw-r--r--third_party/python/gyp/test/mac/libraries/subdir/mylib.c7
-rw-r--r--third_party/python/gyp/test/mac/libraries/subdir/test.gyp65
-rw-r--r--third_party/python/gyp/test/mac/libtool-zero/mylib.c7
-rw-r--r--third_party/python/gyp/test/mac/libtool-zero/test.gyp18
-rw-r--r--third_party/python/gyp/test/mac/loadable-module-bundle-product-extension/src.cc7
-rw-r--r--third_party/python/gyp/test/mac/loadable-module-bundle-product-extension/test.gyp24
-rw-r--r--third_party/python/gyp/test/mac/loadable-module/Info.plist26
-rw-r--r--third_party/python/gyp/test/mac/loadable-module/module.c11
-rw-r--r--third_party/python/gyp/test/mac/loadable-module/test.gyp18
-rw-r--r--third_party/python/gyp/test/mac/lto/asmfile.S2
-rw-r--r--third_party/python/gyp/test/mac/lto/ccfile.cc1
-rw-r--r--third_party/python/gyp/test/mac/lto/cfile.c1
-rw-r--r--third_party/python/gyp/test/mac/lto/mfile.m1
-rw-r--r--third_party/python/gyp/test/mac/lto/mmfile.mm1
-rw-r--r--third_party/python/gyp/test/mac/lto/test.gyp35
-rw-r--r--third_party/python/gyp/test/mac/missing-cfbundlesignature/Info.plist10
-rw-r--r--third_party/python/gyp/test/mac/missing-cfbundlesignature/Other-Info.plist12
-rw-r--r--third_party/python/gyp/test/mac/missing-cfbundlesignature/Third-Info.plist12
-rw-r--r--third_party/python/gyp/test/mac/missing-cfbundlesignature/file.c1
-rw-r--r--third_party/python/gyp/test/mac/missing-cfbundlesignature/test.gyp34
-rw-r--r--third_party/python/gyp/test/mac/non-strs-flattened-to-env/Info.plist15
-rw-r--r--third_party/python/gyp/test/mac/non-strs-flattened-to-env/main.c7
-rw-r--r--third_party/python/gyp/test/mac/non-strs-flattened-to-env/test.gyp27
-rw-r--r--third_party/python/gyp/test/mac/objc-arc/c-file.c6
-rw-r--r--third_party/python/gyp/test/mac/objc-arc/cc-file.cc5
-rw-r--r--third_party/python/gyp/test/mac/objc-arc/m-file-no-arc.m5
-rw-r--r--third_party/python/gyp/test/mac/objc-arc/m-file.m5
-rw-r--r--third_party/python/gyp/test/mac/objc-arc/mm-file-no-arc.mm5
-rw-r--r--third_party/python/gyp/test/mac/objc-arc/mm-file.mm5
-rw-r--r--third_party/python/gyp/test/mac/objc-arc/test.gyp45
-rw-r--r--third_party/python/gyp/test/mac/objc-gc/c-file.c1
-rw-r--r--third_party/python/gyp/test/mac/objc-gc/cc-file.cc1
-rw-r--r--third_party/python/gyp/test/mac/objc-gc/main.m6
-rw-r--r--third_party/python/gyp/test/mac/objc-gc/needs-gc-mm.mm1
-rw-r--r--third_party/python/gyp/test/mac/objc-gc/needs-gc.m1
-rw-r--r--third_party/python/gyp/test/mac/objc-gc/test.gyp102
-rw-r--r--third_party/python/gyp/test/mac/postbuild-copy-bundle/Framework-Info.plist30
-rw-r--r--third_party/python/gyp/test/mac/postbuild-copy-bundle/TestApp-Info.plist32
-rw-r--r--third_party/python/gyp/test/mac/postbuild-copy-bundle/copied.txt1
-rw-r--r--third_party/python/gyp/test/mac/postbuild-copy-bundle/empty.c0
-rw-r--r--third_party/python/gyp/test/mac/postbuild-copy-bundle/main.c4
-rwxr-xr-xthird_party/python/gyp/test/mac/postbuild-copy-bundle/postbuild-copy-framework.sh9
-rw-r--r--third_party/python/gyp/test/mac/postbuild-copy-bundle/resource_file.sb1
-rw-r--r--third_party/python/gyp/test/mac/postbuild-copy-bundle/test.gyp49
-rw-r--r--third_party/python/gyp/test/mac/postbuild-defaults/Info.plist13
-rw-r--r--third_party/python/gyp/test/mac/postbuild-defaults/main.c7
-rwxr-xr-xthird_party/python/gyp/test/mac/postbuild-defaults/postbuild-defaults.sh15
-rw-r--r--third_party/python/gyp/test/mac/postbuild-defaults/test.gyp26
-rw-r--r--third_party/python/gyp/test/mac/postbuild-fail/file.c6
-rwxr-xr-xthird_party/python/gyp/test/mac/postbuild-fail/postbuild-fail.sh6
-rw-r--r--third_party/python/gyp/test/mac/postbuild-fail/test.gyp38
-rwxr-xr-xthird_party/python/gyp/test/mac/postbuild-fail/touch-dynamic.sh7
-rwxr-xr-xthird_party/python/gyp/test/mac/postbuild-fail/touch-static.sh7
-rw-r--r--third_party/python/gyp/test/mac/postbuild-multiple-configurations/main.c4
-rwxr-xr-xthird_party/python/gyp/test/mac/postbuild-multiple-configurations/postbuild-touch-file.sh7
-rw-r--r--third_party/python/gyp/test/mac/postbuild-multiple-configurations/test.gyp26
-rw-r--r--third_party/python/gyp/test/mac/postbuild-static-library/empty.c4
-rwxr-xr-xthird_party/python/gyp/test/mac/postbuild-static-library/postbuild-touch-file.sh7
-rw-r--r--third_party/python/gyp/test/mac/postbuild-static-library/test.gyp34
-rwxr-xr-xthird_party/python/gyp/test/mac/postbuilds/copy.sh3
-rw-r--r--third_party/python/gyp/test/mac/postbuilds/file.c4
-rw-r--r--third_party/python/gyp/test/mac/postbuilds/file_g.c4
-rw-r--r--third_party/python/gyp/test/mac/postbuilds/file_h.c4
-rwxr-xr-xthird_party/python/gyp/test/mac/postbuilds/script/shared_library_postbuild.sh23
-rwxr-xr-xthird_party/python/gyp/test/mac/postbuilds/script/static_library_postbuild.sh23
-rw-r--r--third_party/python/gyp/test/mac/postbuilds/subdirectory/copied_file.txt1
-rw-r--r--third_party/python/gyp/test/mac/postbuilds/subdirectory/nested_target.gyp53
-rw-r--r--third_party/python/gyp/test/mac/postbuilds/test.gyp93
-rw-r--r--third_party/python/gyp/test/mac/prefixheader/file.c1
-rw-r--r--third_party/python/gyp/test/mac/prefixheader/file.cc1
-rw-r--r--third_party/python/gyp/test/mac/prefixheader/file.m1
-rw-r--r--third_party/python/gyp/test/mac/prefixheader/file.mm1
-rw-r--r--third_party/python/gyp/test/mac/prefixheader/header.h1
-rw-r--r--third_party/python/gyp/test/mac/prefixheader/test.gyp82
-rw-r--r--third_party/python/gyp/test/mac/rebuild/TestApp-Info.plist32
-rwxr-xr-xthird_party/python/gyp/test/mac/rebuild/delay-touch.sh6
-rw-r--r--third_party/python/gyp/test/mac/rebuild/empty.c0
-rw-r--r--third_party/python/gyp/test/mac/rebuild/main.c1
-rw-r--r--third_party/python/gyp/test/mac/rebuild/test.gyp56
-rw-r--r--third_party/python/gyp/test/mac/rpath/file.c1
-rw-r--r--third_party/python/gyp/test/mac/rpath/main.c1
-rw-r--r--third_party/python/gyp/test/mac/rpath/test.gyp48
-rw-r--r--third_party/python/gyp/test/mac/sdkroot/file.cc5
-rw-r--r--third_party/python/gyp/test/mac/sdkroot/test.gyp35
-rwxr-xr-xthird_party/python/gyp/test/mac/sdkroot/test_shorthand.sh20
-rw-r--r--third_party/python/gyp/test/mac/sourceless-module/empty.c1
-rw-r--r--third_party/python/gyp/test/mac/sourceless-module/empty.txt2
-rw-r--r--third_party/python/gyp/test/mac/sourceless-module/fun.c1
-rw-r--r--third_party/python/gyp/test/mac/sourceless-module/test.gyp96
-rw-r--r--third_party/python/gyp/test/mac/strip/file.c22
-rw-r--r--third_party/python/gyp/test/mac/strip/main.c25
-rw-r--r--third_party/python/gyp/test/mac/strip/strip.saves5
-rw-r--r--third_party/python/gyp/test/mac/strip/subdirectory/nested_file.c1
-rw-r--r--third_party/python/gyp/test/mac/strip/subdirectory/nested_strip.saves5
-rw-r--r--third_party/python/gyp/test/mac/strip/subdirectory/subdirectory.gyp38
-rwxr-xr-xthird_party/python/gyp/test/mac/strip/subdirectory/test_reading_save_file_from_postbuild.sh5
-rw-r--r--third_party/python/gyp/test/mac/strip/test-defaults.gyp51
-rw-r--r--third_party/python/gyp/test/mac/strip/test.gyp119
-rw-r--r--third_party/python/gyp/test/mac/swift-library/Info.plist28
-rw-r--r--third_party/python/gyp/test/mac/swift-library/file.swift9
-rw-r--r--third_party/python/gyp/test/mac/swift-library/test.gyp21
-rw-r--r--third_party/python/gyp/test/mac/type_envvars/file.c6
-rw-r--r--third_party/python/gyp/test/mac/type_envvars/test.gyp100
-rwxr-xr-xthird_party/python/gyp/test/mac/type_envvars/test_bundle_executable.sh30
-rwxr-xr-xthird_party/python/gyp/test/mac/type_envvars/test_bundle_loadable_module.sh35
-rwxr-xr-xthird_party/python/gyp/test/mac/type_envvars/test_bundle_shared_library.sh38
-rwxr-xr-xthird_party/python/gyp/test/mac/type_envvars/test_check_sdkroot.sh47
-rwxr-xr-xthird_party/python/gyp/test/mac/type_envvars/test_nonbundle_executable.sh33
-rwxr-xr-xthird_party/python/gyp/test/mac/type_envvars/test_nonbundle_loadable_module.sh31
-rwxr-xr-xthird_party/python/gyp/test/mac/type_envvars/test_nonbundle_none.sh32
-rwxr-xr-xthird_party/python/gyp/test/mac/type_envvars/test_nonbundle_shared_library.sh31
-rwxr-xr-xthird_party/python/gyp/test/mac/type_envvars/test_nonbundle_static_library.sh31
-rw-r--r--third_party/python/gyp/test/mac/unicode-settings/file.cc2
-rw-r--r--third_party/python/gyp/test/mac/unicode-settings/test.gyp23
-rwxr-xr-xthird_party/python/gyp/test/mac/unicode-settings/test_bundle_display_name.sh7
-rw-r--r--third_party/python/gyp/test/mac/xcode-env-order/Info.plist56
-rw-r--r--third_party/python/gyp/test/mac/xcode-env-order/file.ext10
-rw-r--r--third_party/python/gyp/test/mac/xcode-env-order/file.ext20
-rw-r--r--third_party/python/gyp/test/mac/xcode-env-order/file.ext30
-rw-r--r--third_party/python/gyp/test/mac/xcode-env-order/main.c7
-rw-r--r--third_party/python/gyp/test/mac/xcode-env-order/test.gyp121
-rw-r--r--third_party/python/gyp/test/mac/xcode-gcc/aliasing.cc13
-rw-r--r--third_party/python/gyp/test/mac/xcode-gcc/test-clang.gyp42
-rw-r--r--third_party/python/gyp/test/mac/xcode-gcc/test.gyp60
-rw-r--r--third_party/python/gyp/test/mac/xcode-gcc/valid_c.c8
-rw-r--r--third_party/python/gyp/test/mac/xcode-gcc/valid_cc.cc8
-rw-r--r--third_party/python/gyp/test/mac/xcode-gcc/valid_m.m8
-rw-r--r--third_party/python/gyp/test/mac/xcode-gcc/valid_mm.mm8
-rw-r--r--third_party/python/gyp/test/mac/xcode-gcc/warn_about_invalid_offsetof_macro.cc15
-rw-r--r--third_party/python/gyp/test/mac/xcode-gcc/warn_about_missing_newline.c8
-rw-r--r--third_party/python/gyp/test/mac/xcode-support-actions/source.c0
-rw-r--r--third_party/python/gyp/test/mac/xcode-support-actions/test.gyp26
-rw-r--r--third_party/python/gyp/test/mac/xctest/MyClass.h8
-rw-r--r--third_party/python/gyp/test/mac/xctest/MyClass.m8
-rw-r--r--third_party/python/gyp/test/mac/xctest/TestCase.m16
-rw-r--r--third_party/python/gyp/test/mac/xctest/resource.txt1
-rw-r--r--third_party/python/gyp/test/mac/xctest/test.gyp47
-rw-r--r--third_party/python/gyp/test/mac/xctest/test.xcodeproj/xcshareddata/xcschemes/classes.xcscheme69
-rw-r--r--third_party/python/gyp/test/mac/xcuitest/Info.plist28
-rw-r--r--third_party/python/gyp/test/mac/xcuitest/MyAppDelegate.h8
-rw-r--r--third_party/python/gyp/test/mac/xcuitest/MyAppDelegate.m19
-rw-r--r--third_party/python/gyp/test/mac/xcuitest/TestCase.m15
-rw-r--r--third_party/python/gyp/test/mac/xcuitest/main.m15
-rw-r--r--third_party/python/gyp/test/mac/xcuitest/resource.txt1
-rw-r--r--third_party/python/gyp/test/mac/xcuitest/test.gyp69
-rw-r--r--third_party/python/gyp/test/make/dependencies.gyp15
-rwxr-xr-xthird_party/python/gyp/test/make/gyptest-dependencies.py26
-rwxr-xr-xthird_party/python/gyp/test/make/gyptest-noload.py57
-rw-r--r--third_party/python/gyp/test/make/main.cc12
-rw-r--r--third_party/python/gyp/test/make/main.h0
-rw-r--r--third_party/python/gyp/test/make/noload/all.gyp18
-rw-r--r--third_party/python/gyp/test/make/noload/lib/shared.c3
-rw-r--r--third_party/python/gyp/test/make/noload/lib/shared.gyp16
-rw-r--r--third_party/python/gyp/test/make/noload/lib/shared.h1
-rw-r--r--third_party/python/gyp/test/make/noload/main.c9
-rw-r--r--third_party/python/gyp/test/make_global_settings/ar/gyptest-make_global_settings_ar.py126
-rw-r--r--third_party/python/gyp/test/make_global_settings/ar/make_global_settings_ar.gyp29
-rw-r--r--third_party/python/gyp/test/make_global_settings/basics/gyptest-make_global_settings.py51
-rw-r--r--third_party/python/gyp/test/make_global_settings/basics/make_global_settings.gyp17
-rw-r--r--third_party/python/gyp/test/make_global_settings/env-wrapper/gyptest-wrapper.py51
-rw-r--r--third_party/python/gyp/test/make_global_settings/env-wrapper/wrapper.gyp17
-rw-r--r--third_party/python/gyp/test/make_global_settings/full-toolchain/bar.cc1
-rw-r--r--third_party/python/gyp/test/make_global_settings/full-toolchain/foo.c1
-rw-r--r--third_party/python/gyp/test/make_global_settings/full-toolchain/gyptest-make_global_settings.py53
-rw-r--r--third_party/python/gyp/test/make_global_settings/full-toolchain/make_global_settings.gyp22
-rwxr-xr-xthird_party/python/gyp/test/make_global_settings/full-toolchain/my_nm.py9
-rwxr-xr-xthird_party/python/gyp/test/make_global_settings/full-toolchain/my_readelf.py9
-rw-r--r--third_party/python/gyp/test/make_global_settings/ld/gyptest-make_global_settings_ld.py130
-rw-r--r--third_party/python/gyp/test/make_global_settings/ld/make_global_settings_ld.gyp29
-rw-r--r--third_party/python/gyp/test/make_global_settings/wrapper/gyptest-wrapper.py52
-rw-r--r--third_party/python/gyp/test/make_global_settings/wrapper/wrapper.gyp21
-rw-r--r--third_party/python/gyp/test/many-actions/file00
-rw-r--r--third_party/python/gyp/test/many-actions/file10
-rw-r--r--third_party/python/gyp/test/many-actions/file20
-rw-r--r--third_party/python/gyp/test/many-actions/file30
-rw-r--r--third_party/python/gyp/test/many-actions/file40
-rw-r--r--third_party/python/gyp/test/many-actions/gyptest-many-actions-unsorted.py43
-rw-r--r--third_party/python/gyp/test/many-actions/gyptest-many-actions.py29
-rw-r--r--third_party/python/gyp/test/many-actions/many-actions-unsorted.gyp154
-rw-r--r--third_party/python/gyp/test/many-actions/many-actions.gyp1817
-rwxr-xr-xthird_party/python/gyp/test/module/gyptest-default.py28
-rw-r--r--third_party/python/gyp/test/module/src/lib1.c10
-rw-r--r--third_party/python/gyp/test/module/src/lib2.c10
-rw-r--r--third_party/python/gyp/test/module/src/module.gyp53
-rw-r--r--third_party/python/gyp/test/module/src/program.c111
-rw-r--r--third_party/python/gyp/test/msvs/buildevents/buildevents.gyp14
-rwxr-xr-xthird_party/python/gyp/test/msvs/buildevents/gyptest-msbuild-supports-prepostbuild.py24
-rwxr-xr-xthird_party/python/gyp/test/msvs/buildevents/gyptest-ninja-warnings.py29
-rw-r--r--third_party/python/gyp/test/msvs/buildevents/main.cc5
-rw-r--r--third_party/python/gyp/test/msvs/config_attrs/gyptest-config_attrs.py41
-rw-r--r--third_party/python/gyp/test/msvs/config_attrs/hello.c11
-rw-r--r--third_party/python/gyp/test/msvs/config_attrs/hello.gyp21
-rw-r--r--third_party/python/gyp/test/msvs/express/base/base.gyp22
-rw-r--r--third_party/python/gyp/test/msvs/express/express.gyp19
-rwxr-xr-xthird_party/python/gyp/test/msvs/express/gyptest-express.py29
-rw-r--r--third_party/python/gyp/test/msvs/external_builder/external.gyp68
-rw-r--r--third_party/python/gyp/test/msvs/external_builder/external_builder.py9
-rw-r--r--third_party/python/gyp/test/msvs/external_builder/gyptest-all.py59
-rw-r--r--third_party/python/gyp/test/msvs/external_builder/hello.cpp10
-rw-r--r--third_party/python/gyp/test/msvs/external_builder/hello.z6
-rw-r--r--third_party/python/gyp/test/msvs/external_builder/msbuild_action.py9
-rw-r--r--third_party/python/gyp/test/msvs/external_builder/msbuild_rule.py11
-rw-r--r--third_party/python/gyp/test/msvs/filters/filters.gyp47
-rw-r--r--third_party/python/gyp/test/msvs/filters/gyptest-filters-2008.py68
-rw-r--r--third_party/python/gyp/test/msvs/filters/gyptest-filters-2010.py57
-rw-r--r--third_party/python/gyp/test/msvs/list_excluded/gyptest-all.py51
-rw-r--r--third_party/python/gyp/test/msvs/list_excluded/hello.cpp10
-rw-r--r--third_party/python/gyp/test/msvs/list_excluded/hello_exclude.gyp19
-rw-r--r--third_party/python/gyp/test/msvs/list_excluded/hello_mac.cpp10
-rw-r--r--third_party/python/gyp/test/msvs/missing_sources/gyptest-missing.py43
-rw-r--r--third_party/python/gyp/test/msvs/missing_sources/hello_missing.gyp15
-rw-r--r--third_party/python/gyp/test/msvs/multiple_actions_error_handling/action_fail.py7
-rw-r--r--third_party/python/gyp/test/msvs/multiple_actions_error_handling/action_succeed.py7
-rw-r--r--third_party/python/gyp/test/msvs/multiple_actions_error_handling/actions.gyp40
-rw-r--r--third_party/python/gyp/test/msvs/multiple_actions_error_handling/gyptest.py26
-rw-r--r--third_party/python/gyp/test/msvs/props/AppName.props14
-rw-r--r--third_party/python/gyp/test/msvs/props/AppName.vsprops11
-rw-r--r--third_party/python/gyp/test/msvs/props/gyptest-props.py22
-rw-r--r--third_party/python/gyp/test/msvs/props/hello.c11
-rw-r--r--third_party/python/gyp/test/msvs/props/hello.gyp22
-rw-r--r--third_party/python/gyp/test/msvs/rules_stdout_stderr/dummy.bar5
-rw-r--r--third_party/python/gyp/test/msvs/rules_stdout_stderr/dummy.foo5
-rw-r--r--third_party/python/gyp/test/msvs/rules_stdout_stderr/gyptest-rules-stdout-stderr.py29
-rw-r--r--third_party/python/gyp/test/msvs/rules_stdout_stderr/rule_stderr.py8
-rw-r--r--third_party/python/gyp/test/msvs/rules_stdout_stderr/rule_stdout.py7
-rw-r--r--third_party/python/gyp/test/msvs/rules_stdout_stderr/rules-stdout-stderr.gyp52
-rw-r--r--third_party/python/gyp/test/msvs/shared_output/common.gypi17
-rw-r--r--third_party/python/gyp/test/msvs/shared_output/gyptest-shared_output.py41
-rw-r--r--third_party/python/gyp/test/msvs/shared_output/hello.c12
-rw-r--r--third_party/python/gyp/test/msvs/shared_output/hello.gyp21
-rw-r--r--third_party/python/gyp/test/msvs/shared_output/there/there.c12
-rw-r--r--third_party/python/gyp/test/msvs/shared_output/there/there.gyp16
-rw-r--r--third_party/python/gyp/test/msvs/uldi2010/gyptest-all.py20
-rw-r--r--third_party/python/gyp/test/msvs/uldi2010/hello.c13
-rw-r--r--third_party/python/gyp/test/msvs/uldi2010/hello.gyp26
-rw-r--r--third_party/python/gyp/test/msvs/uldi2010/hello2.c10
-rwxr-xr-xthird_party/python/gyp/test/multiple-targets/gyptest-all.py30
-rwxr-xr-xthird_party/python/gyp/test/multiple-targets/gyptest-default.py30
-rw-r--r--third_party/python/gyp/test/multiple-targets/src/common.c7
-rw-r--r--third_party/python/gyp/test/multiple-targets/src/multiple.gyp24
-rw-r--r--third_party/python/gyp/test/multiple-targets/src/prog1.c10
-rw-r--r--third_party/python/gyp/test/multiple-targets/src/prog2.c10
-rw-r--r--third_party/python/gyp/test/ninja/action-rule-hash/gyptest-action-rule-hash.py32
-rw-r--r--third_party/python/gyp/test/ninja/action-rule-hash/subdir/action-rule-hash.gyp29
-rw-r--r--third_party/python/gyp/test/ninja/action-rule-hash/subdir/emit.py13
-rwxr-xr-xthird_party/python/gyp/test/ninja/action_dependencies/gyptest-action-dependencies.py64
-rw-r--r--third_party/python/gyp/test/ninja/action_dependencies/src/a.c10
-rw-r--r--third_party/python/gyp/test/ninja/action_dependencies/src/a.h13
-rw-r--r--third_party/python/gyp/test/ninja/action_dependencies/src/action_dependencies.gyp88
-rw-r--r--third_party/python/gyp/test/ninja/action_dependencies/src/b.c18
-rw-r--r--third_party/python/gyp/test/ninja/action_dependencies/src/b.h13
-rw-r--r--third_party/python/gyp/test/ninja/action_dependencies/src/c.c10
-rw-r--r--third_party/python/gyp/test/ninja/action_dependencies/src/c.h13
-rwxr-xr-xthird_party/python/gyp/test/ninja/action_dependencies/src/emit.py11
-rw-r--r--third_party/python/gyp/test/ninja/chained-dependency/chained-dependency.gyp53
-rw-r--r--third_party/python/gyp/test/ninja/chained-dependency/chained.c5
-rwxr-xr-xthird_party/python/gyp/test/ninja/chained-dependency/gyptest-chained-dependency.py30
-rw-r--r--third_party/python/gyp/test/ninja/empty-and-non-empty-duplicate-name/gyptest-empty-and-non-empty-duplicate-name.py23
-rw-r--r--third_party/python/gyp/test/ninja/empty-and-non-empty-duplicate-name/subdir/included.gyp19
-rw-r--r--third_party/python/gyp/test/ninja/empty-and-non-empty-duplicate-name/test.gyp19
-rw-r--r--third_party/python/gyp/test/ninja/normalize-paths-win/gyptest-normalize-paths.py46
-rw-r--r--third_party/python/gyp/test/ninja/normalize-paths-win/hello.cc7
-rw-r--r--third_party/python/gyp/test/ninja/normalize-paths-win/normalize-paths.gyp68
-rw-r--r--third_party/python/gyp/test/ninja/s-needs-no-depfiles/empty.s1
-rwxr-xr-xthird_party/python/gyp/test/ninja/s-needs-no-depfiles/gyptest-s-needs-no-depfiles.py42
-rw-r--r--third_party/python/gyp/test/ninja/s-needs-no-depfiles/s-needs-no-depfiles.gyp13
-rwxr-xr-xthird_party/python/gyp/test/ninja/solibs_avoid_relinking/gyptest-solibs-avoid-relinking.py48
-rw-r--r--third_party/python/gyp/test/ninja/solibs_avoid_relinking/main.cc5
-rw-r--r--third_party/python/gyp/test/ninja/solibs_avoid_relinking/solib.cc8
-rw-r--r--third_party/python/gyp/test/ninja/solibs_avoid_relinking/solibs_avoid_relinking.gyp38
-rw-r--r--third_party/python/gyp/test/ninja/use-console/foo.bar5
-rw-r--r--third_party/python/gyp/test/ninja/use-console/gyptest-use-console.py29
-rw-r--r--third_party/python/gyp/test/ninja/use-console/use-console.gyp60
-rw-r--r--third_party/python/gyp/test/ninja/use-custom-environment-files/gyptest-use-custom-environment-files.py28
-rw-r--r--third_party/python/gyp/test/ninja/use-custom-environment-files/use-custom-environment-files.cc7
-rw-r--r--third_party/python/gyp/test/ninja/use-custom-environment-files/use-custom-environment-files.gyp15
-rw-r--r--third_party/python/gyp/test/no-cpp/gyptest-no-cpp.py53
-rw-r--r--third_party/python/gyp/test/no-cpp/src/call-f-main.c2
-rw-r--r--third_party/python/gyp/test/no-cpp/src/empty-main.c1
-rw-r--r--third_party/python/gyp/test/no-cpp/src/f.cc3
-rw-r--r--third_party/python/gyp/test/no-cpp/src/test.gyp25
-rwxr-xr-xthird_party/python/gyp/test/no-output/gyptest-no-output.py21
-rw-r--r--third_party/python/gyp/test/no-output/src/nooutput.gyp17
-rwxr-xr-xthird_party/python/gyp/test/product/gyptest-product.py43
-rw-r--r--third_party/python/gyp/test/product/hello.c15
-rw-r--r--third_party/python/gyp/test/product/product.gyp128
-rw-r--r--third_party/python/gyp/test/prune_targets/gyptest-prune-targets.py66
-rw-r--r--third_party/python/gyp/test/prune_targets/lib1.cc6
-rw-r--r--third_party/python/gyp/test/prune_targets/lib2.cc6
-rw-r--r--third_party/python/gyp/test/prune_targets/lib3.cc6
-rw-r--r--third_party/python/gyp/test/prune_targets/lib_indirect.cc6
-rw-r--r--third_party/python/gyp/test/prune_targets/program.cc7
-rw-r--r--third_party/python/gyp/test/prune_targets/test1.gyp26
-rw-r--r--third_party/python/gyp/test/prune_targets/test2.gyp30
-rw-r--r--third_party/python/gyp/test/relative/foo/a/a.cc9
-rw-r--r--third_party/python/gyp/test/relative/foo/a/a.gyp13
-rw-r--r--third_party/python/gyp/test/relative/foo/a/c/c.cc9
-rw-r--r--third_party/python/gyp/test/relative/foo/a/c/c.gyp12
-rw-r--r--third_party/python/gyp/test/relative/foo/b/b.cc9
-rw-r--r--third_party/python/gyp/test/relative/foo/b/b.gyp9
-rwxr-xr-xthird_party/python/gyp/test/relative/gyptest-default.py25
-rw-r--r--third_party/python/gyp/test/rename/filecase/file.c1
-rw-r--r--third_party/python/gyp/test/rename/filecase/test-casesensitive.gyp15
-rw-r--r--third_party/python/gyp/test/rename/filecase/test.gyp14
-rw-r--r--third_party/python/gyp/test/rename/gyptest-filecase.py35
-rw-r--r--third_party/python/gyp/test/restat/gyptest-restat.py31
-rw-r--r--third_party/python/gyp/test/restat/src/create_intermediate.py17
-rw-r--r--third_party/python/gyp/test/restat/src/restat.gyp50
-rw-r--r--third_party/python/gyp/test/restat/src/touch.py16
-rwxr-xr-xthird_party/python/gyp/test/rules-dirname/gyptest-dirname.py57
-rw-r--r--third_party/python/gyp/test/rules-dirname/src/actions.gyp15
-rwxr-xr-xthird_party/python/gyp/test/rules-dirname/src/copy-file.py11
-rw-r--r--third_party/python/gyp/test/rules-dirname/src/subdir/a/b/c.gencc8
-rw-r--r--third_party/python/gyp/test/rules-dirname/src/subdir/a/b/c.printvars1
-rw-r--r--third_party/python/gyp/test/rules-dirname/src/subdir/foo/bar/baz.gencc8
-rw-r--r--third_party/python/gyp/test/rules-dirname/src/subdir/foo/bar/baz.printvars1
-rw-r--r--third_party/python/gyp/test/rules-dirname/src/subdir/input-rule-dirname.gyp140
-rw-r--r--third_party/python/gyp/test/rules-dirname/src/subdir/main.cc14
-rw-r--r--third_party/python/gyp/test/rules-dirname/src/subdir/nodir.gencc8
-rwxr-xr-xthird_party/python/gyp/test/rules-dirname/src/subdir/printvars.py14
-rwxr-xr-xthird_party/python/gyp/test/rules-rebuild/gyptest-all.py70
-rwxr-xr-xthird_party/python/gyp/test/rules-rebuild/gyptest-default.py91
-rw-r--r--third_party/python/gyp/test/rules-rebuild/src/main.c12
-rwxr-xr-xthird_party/python/gyp/test/rules-rebuild/src/make-sources.py19
-rw-r--r--third_party/python/gyp/test/rules-rebuild/src/prog1.in7
-rw-r--r--third_party/python/gyp/test/rules-rebuild/src/prog2.in7
-rw-r--r--third_party/python/gyp/test/rules-rebuild/src/same_target.gyp31
-rwxr-xr-xthird_party/python/gyp/test/rules-use-built-dependencies/gyptest-use-built-dependencies.py23
-rw-r--r--third_party/python/gyp/test/rules-use-built-dependencies/src/main.cc17
-rw-r--r--third_party/python/gyp/test/rules-use-built-dependencies/src/use-built-dependencies-rule.gyp42
-rwxr-xr-xthird_party/python/gyp/test/rules-variables/gyptest-rules-variables.py35
-rw-r--r--third_party/python/gyp/test/rules-variables/src/input_ext.c9
-rw-r--r--third_party/python/gyp/test/rules-variables/src/input_name/test.c9
-rw-r--r--third_party/python/gyp/test/rules-variables/src/input_path/subdir/test.c9
-rw-r--r--third_party/python/gyp/test/rules-variables/src/subdir/input_dirname.c9
-rw-r--r--third_party/python/gyp/test/rules-variables/src/subdir/test.c18
-rw-r--r--third_party/python/gyp/test/rules-variables/src/test.input_root.c9
-rw-r--r--third_party/python/gyp/test/rules-variables/src/variables.gyp40
-rwxr-xr-xthird_party/python/gyp/test/rules/gyptest-all.py84
-rwxr-xr-xthird_party/python/gyp/test/rules/gyptest-default.py70
-rwxr-xr-xthird_party/python/gyp/test/rules/gyptest-input-root.py26
-rw-r--r--third_party/python/gyp/test/rules/gyptest-special-variables.py18
-rw-r--r--third_party/python/gyp/test/rules/src/actions.gyp23
-rw-r--r--third_party/python/gyp/test/rules/src/an_asm.S6
-rw-r--r--third_party/python/gyp/test/rules/src/as.bat7
-rwxr-xr-xthird_party/python/gyp/test/rules/src/copy-file.py11
-rw-r--r--third_party/python/gyp/test/rules/src/external/external.gyp66
-rw-r--r--third_party/python/gyp/test/rules/src/external/file1.in1
-rw-r--r--third_party/python/gyp/test/rules/src/external/file2.in1
-rw-r--r--third_party/python/gyp/test/rules/src/input-root.gyp24
-rw-r--r--third_party/python/gyp/test/rules/src/noaction/file1.in1
-rw-r--r--third_party/python/gyp/test/rules/src/noaction/no_action_with_rules_fails.gyp37
-rwxr-xr-xthird_party/python/gyp/test/rules/src/rule.py17
-rw-r--r--third_party/python/gyp/test/rules/src/somefile.ext0
-rw-r--r--third_party/python/gyp/test/rules/src/special-variables.gyp34
-rw-r--r--third_party/python/gyp/test/rules/src/subdir1/executable.gyp37
-rw-r--r--third_party/python/gyp/test/rules/src/subdir1/function1.in6
-rw-r--r--third_party/python/gyp/test/rules/src/subdir1/function2.in6
-rw-r--r--third_party/python/gyp/test/rules/src/subdir1/program.c12
-rw-r--r--third_party/python/gyp/test/rules/src/subdir2/both_rule_and_action_input.gyp50
-rw-r--r--third_party/python/gyp/test/rules/src/subdir2/file1.in1
-rw-r--r--third_party/python/gyp/test/rules/src/subdir2/file2.in1
-rw-r--r--third_party/python/gyp/test/rules/src/subdir2/never_used.gyp31
-rw-r--r--third_party/python/gyp/test/rules/src/subdir2/no_action.gyp38
-rw-r--r--third_party/python/gyp/test/rules/src/subdir2/no_inputs.gyp32
-rw-r--r--third_party/python/gyp/test/rules/src/subdir2/none.gyp33
-rw-r--r--third_party/python/gyp/test/rules/src/subdir2/program.c12
-rw-r--r--third_party/python/gyp/test/rules/src/subdir3/executable2.gyp37
-rw-r--r--third_party/python/gyp/test/rules/src/subdir3/function3.in6
-rw-r--r--third_party/python/gyp/test/rules/src/subdir3/program.c10
-rw-r--r--third_party/python/gyp/test/rules/src/subdir4/asm-function.assem10
-rw-r--r--third_party/python/gyp/test/rules/src/subdir4/build-asm.gyp49
-rw-r--r--third_party/python/gyp/test/rules/src/subdir4/program.c19
-rwxr-xr-xthird_party/python/gyp/test/same-gyp-name/gyptest-all.py38
-rwxr-xr-xthird_party/python/gyp/test/same-gyp-name/gyptest-default.py38
-rw-r--r--third_party/python/gyp/test/same-gyp-name/gyptest-library.py20
-rw-r--r--third_party/python/gyp/test/same-gyp-name/library/one/sub.gyp11
-rw-r--r--third_party/python/gyp/test/same-gyp-name/library/test.gyp15
-rw-r--r--third_party/python/gyp/test/same-gyp-name/library/two/sub.gyp11
-rw-r--r--third_party/python/gyp/test/same-gyp-name/src/all.gyp16
-rw-r--r--third_party/python/gyp/test/same-gyp-name/src/subdir1/executable.gyp15
-rw-r--r--third_party/python/gyp/test/same-gyp-name/src/subdir1/main1.cc6
-rw-r--r--third_party/python/gyp/test/same-gyp-name/src/subdir2/executable.gyp15
-rw-r--r--third_party/python/gyp/test/same-gyp-name/src/subdir2/main2.cc6
-rw-r--r--third_party/python/gyp/test/same-rule-output-file-name/gyptest-all.py23
-rw-r--r--third_party/python/gyp/test/same-rule-output-file-name/src/subdir1/subdir1.gyp30
-rw-r--r--third_party/python/gyp/test/same-rule-output-file-name/src/subdir2/subdir2.gyp30
-rw-r--r--third_party/python/gyp/test/same-rule-output-file-name/src/subdirs.gyp16
-rw-r--r--third_party/python/gyp/test/same-rule-output-file-name/src/touch.py11
-rwxr-xr-xthird_party/python/gyp/test/same-source-file-name/gyptest-all.py34
-rwxr-xr-xthird_party/python/gyp/test/same-source-file-name/gyptest-default.py34
-rwxr-xr-xthird_party/python/gyp/test/same-source-file-name/gyptest-pass-executable.py33
-rwxr-xr-xthird_party/python/gyp/test/same-source-file-name/gyptest-pass-shared.py18
-rwxr-xr-xthird_party/python/gyp/test/same-source-file-name/gyptest-static.py34
-rw-r--r--third_party/python/gyp/test/same-source-file-name/src/all.gyp30
-rw-r--r--third_party/python/gyp/test/same-source-file-name/src/double-executable.gyp21
-rw-r--r--third_party/python/gyp/test/same-source-file-name/src/double-shared.gyp27
-rw-r--r--third_party/python/gyp/test/same-source-file-name/src/double-static.gyp22
-rw-r--r--third_party/python/gyp/test/same-source-file-name/src/func.c6
-rw-r--r--third_party/python/gyp/test/same-source-file-name/src/prog1.c16
-rw-r--r--third_party/python/gyp/test/same-source-file-name/src/prog2.c16
-rw-r--r--third_party/python/gyp/test/same-source-file-name/src/prog3.c18
-rw-r--r--third_party/python/gyp/test/same-source-file-name/src/subdir1/func.c6
-rw-r--r--third_party/python/gyp/test/same-source-file-name/src/subdir2/func.c6
-rw-r--r--third_party/python/gyp/test/same-target-name-different-directory/gyptest-all.py41
-rw-r--r--third_party/python/gyp/test/same-target-name-different-directory/src/subdir1/subdir1.gyp66
-rw-r--r--third_party/python/gyp/test/same-target-name-different-directory/src/subdir2/subdir2.gyp66
-rw-r--r--third_party/python/gyp/test/same-target-name-different-directory/src/subdirs.gyp16
-rw-r--r--third_party/python/gyp/test/same-target-name-different-directory/src/touch.py11
-rwxr-xr-xthird_party/python/gyp/test/same-target-name/gyptest-same-target-name.py18
-rw-r--r--third_party/python/gyp/test/same-target-name/src/all.gyp16
-rw-r--r--third_party/python/gyp/test/same-target-name/src/executable1.gyp15
-rw-r--r--third_party/python/gyp/test/same-target-name/src/executable2.gyp15
-rw-r--r--third_party/python/gyp/test/sanitize-rule-names/blah.S0
-rw-r--r--third_party/python/gyp/test/sanitize-rule-names/gyptest-sanitize-rule-names.py17
-rw-r--r--third_party/python/gyp/test/sanitize-rule-names/hello.cc7
-rw-r--r--third_party/python/gyp/test/sanitize-rule-names/sanitize-rule-names.gyp27
-rw-r--r--third_party/python/gyp/test/sanitize-rule-names/script.py10
-rw-r--r--third_party/python/gyp/test/self-dependency/common.gypi13
-rw-r--r--third_party/python/gyp/test/self-dependency/dep.gyp23
-rwxr-xr-xthird_party/python/gyp/test/self-dependency/gyptest-self-dependency.py19
-rw-r--r--third_party/python/gyp/test/self-dependency/self_dependency.gyp15
-rwxr-xr-xthird_party/python/gyp/test/sibling/gyptest-all.py42
-rwxr-xr-xthird_party/python/gyp/test/sibling/gyptest-relocate.py44
-rw-r--r--third_party/python/gyp/test/sibling/src/build/all.gyp16
-rw-r--r--third_party/python/gyp/test/sibling/src/prog1/prog1.c7
-rw-r--r--third_party/python/gyp/test/sibling/src/prog1/prog1.gyp15
-rw-r--r--third_party/python/gyp/test/sibling/src/prog2/prog2.c7
-rw-r--r--third_party/python/gyp/test/sibling/src/prog2/prog2.gyp15
-rwxr-xr-xthird_party/python/gyp/test/small/gyptest-small.py56
-rw-r--r--third_party/python/gyp/test/standalone-static-library/gyptest-standalone-static-library.py50
-rw-r--r--third_party/python/gyp/test/standalone-static-library/invalid.gyp16
-rw-r--r--third_party/python/gyp/test/standalone-static-library/mylib.c7
-rw-r--r--third_party/python/gyp/test/standalone-static-library/mylib.gyp26
-rw-r--r--third_party/python/gyp/test/standalone-static-library/prog.c7
-rw-r--r--third_party/python/gyp/test/standalone/gyptest-standalone.py35
-rw-r--r--third_party/python/gyp/test/standalone/standalone.gyp12
-rwxr-xr-xthird_party/python/gyp/test/subdirectory/gyptest-SYMROOT-all.py36
-rwxr-xr-xthird_party/python/gyp/test/subdirectory/gyptest-SYMROOT-default.py37
-rwxr-xr-xthird_party/python/gyp/test/subdirectory/gyptest-subdir-all.py34
-rwxr-xr-xthird_party/python/gyp/test/subdirectory/gyptest-subdir-default.py34
-rwxr-xr-xthird_party/python/gyp/test/subdirectory/gyptest-subdir2-deep.py25
-rwxr-xr-xthird_party/python/gyp/test/subdirectory/gyptest-top-all.py43
-rwxr-xr-xthird_party/python/gyp/test/subdirectory/gyptest-top-default.py43
-rw-r--r--third_party/python/gyp/test/subdirectory/src/prog1.c7
-rw-r--r--third_party/python/gyp/test/subdirectory/src/prog1.gyp21
-rw-r--r--third_party/python/gyp/test/subdirectory/src/subdir/prog2.c7
-rw-r--r--third_party/python/gyp/test/subdirectory/src/subdir/prog2.gyp18
-rw-r--r--third_party/python/gyp/test/subdirectory/src/subdir/subdir2/prog3.c7
-rw-r--r--third_party/python/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp18
-rw-r--r--third_party/python/gyp/test/subdirectory/src/symroot.gypi16
-rwxr-xr-xthird_party/python/gyp/test/symlinks/gyptest-symlinks.py66
-rw-r--r--third_party/python/gyp/test/symlinks/hello.c12
-rw-r--r--third_party/python/gyp/test/symlinks/hello.gyp15
-rw-r--r--third_party/python/gyp/test/target/gyptest-target.py37
-rw-r--r--third_party/python/gyp/test/target/hello.c7
-rw-r--r--third_party/python/gyp/test/target/target.gyp24
-rwxr-xr-xthird_party/python/gyp/test/toolsets/gyptest-toolsets.py31
-rw-r--r--third_party/python/gyp/test/toolsets/main.cc13
-rw-r--r--third_party/python/gyp/test/toolsets/toolsets.cc11
-rw-r--r--third_party/python/gyp/test/toolsets/toolsets.gyp62
-rw-r--r--third_party/python/gyp/test/toolsets/toolsets_shared.cc11
-rwxr-xr-xthird_party/python/gyp/test/toplevel-dir/gyptest-toplevel-dir.py31
-rw-r--r--third_party/python/gyp/test/toplevel-dir/src/sub1/main.gyp18
-rw-r--r--third_party/python/gyp/test/toplevel-dir/src/sub1/prog1.c7
-rw-r--r--third_party/python/gyp/test/toplevel-dir/src/sub2/prog2.c7
-rw-r--r--third_party/python/gyp/test/toplevel-dir/src/sub2/prog2.gyp15
-rw-r--r--third_party/python/gyp/test/variables/commands/commands-repeated.gyp128
-rw-r--r--third_party/python/gyp/test/variables/commands/commands-repeated.gyp.stdout136
-rw-r--r--third_party/python/gyp/test/variables/commands/commands-repeated.gypd.golden77
-rw-r--r--third_party/python/gyp/test/variables/commands/commands.gyp91
-rw-r--r--third_party/python/gyp/test/variables/commands/commands.gyp.ignore-env.stdout96
-rw-r--r--third_party/python/gyp/test/variables/commands/commands.gyp.stdout96
-rw-r--r--third_party/python/gyp/test/variables/commands/commands.gypd.golden66
-rw-r--r--third_party/python/gyp/test/variables/commands/commands.gypi23
-rwxr-xr-xthird_party/python/gyp/test/variables/commands/gyptest-commands-ignore-env.py47
-rwxr-xr-xthird_party/python/gyp/test/variables/commands/gyptest-commands-repeated-multidir.py23
-rwxr-xr-xthird_party/python/gyp/test/variables/commands/gyptest-commands-repeated.py40
-rwxr-xr-xthird_party/python/gyp/test/variables/commands/gyptest-commands.py40
-rw-r--r--third_party/python/gyp/test/variables/commands/repeated_multidir/dir_1/test_1.gyp13
-rw-r--r--third_party/python/gyp/test/variables/commands/repeated_multidir/dir_2/test_2.gyp13
-rw-r--r--third_party/python/gyp/test/variables/commands/repeated_multidir/main.gyp16
-rwxr-xr-xthird_party/python/gyp/test/variables/commands/repeated_multidir/print_cwd_basename.py11
-rw-r--r--third_party/python/gyp/test/variables/commands/repeated_multidir/repeated_command_common.gypi25
-rw-r--r--third_party/python/gyp/test/variables/commands/test.py7
-rwxr-xr-xthird_party/python/gyp/test/variables/commands/update_golden11
-rw-r--r--third_party/python/gyp/test/variables/empty/empty.gyp13
-rw-r--r--third_party/python/gyp/test/variables/empty/empty.gypi9
-rwxr-xr-xthird_party/python/gyp/test/variables/empty/gyptest-empty.py19
-rw-r--r--third_party/python/gyp/test/variables/filelist/filelist.gyp.stdout26
-rw-r--r--third_party/python/gyp/test/variables/filelist/filelist.gypd.golden43
-rw-r--r--third_party/python/gyp/test/variables/filelist/gyptest-filelist-golden.py53
-rwxr-xr-xthird_party/python/gyp/test/variables/filelist/gyptest-filelist.py29
-rw-r--r--third_party/python/gyp/test/variables/filelist/src/dummy.py5
-rw-r--r--third_party/python/gyp/test/variables/filelist/src/filelist.gyp93
-rw-r--r--third_party/python/gyp/test/variables/filelist/src/filelist2.gyp40
-rwxr-xr-xthird_party/python/gyp/test/variables/filelist/update_golden8
-rwxr-xr-xthird_party/python/gyp/test/variables/latelate/gyptest-latelate.py25
-rw-r--r--third_party/python/gyp/test/variables/latelate/src/latelate.gyp34
-rw-r--r--third_party/python/gyp/test/variables/latelate/src/program.cc13
-rw-r--r--third_party/python/gyp/test/variables/variable-in-path/C1/hello.cc7
-rw-r--r--third_party/python/gyp/test/variables/variable-in-path/gyptest-variable-in-path.py23
-rw-r--r--third_party/python/gyp/test/variables/variable-in-path/variable-in-path.gyp31
-rw-r--r--third_party/python/gyp/test/win/asm-files/asm-files.gyp17
-rw-r--r--third_party/python/gyp/test/win/asm-files/b.s0
-rw-r--r--third_party/python/gyp/test/win/asm-files/c.S0
-rw-r--r--third_party/python/gyp/test/win/asm-files/hello.cc7
-rw-r--r--third_party/python/gyp/test/win/batch-file-action/batch-file-action.gyp21
-rw-r--r--third_party/python/gyp/test/win/batch-file-action/infile1
-rw-r--r--third_party/python/gyp/test/win/batch-file-action/somecmd.bat5
-rw-r--r--third_party/python/gyp/test/win/command-quote/a.S0
-rw-r--r--third_party/python/gyp/test/win/command-quote/bat with spaces.bat7
-rw-r--r--third_party/python/gyp/test/win/command-quote/command-quote.gyp79
-rw-r--r--third_party/python/gyp/test/win/command-quote/go.bat7
-rw-r--r--third_party/python/gyp/test/win/command-quote/subdir/and/another/in-subdir.gyp27
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/additional-include-dirs.cc10
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/additional-include-dirs.gyp20
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/additional-options.cc10
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/additional-options.gyp31
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/analysis.gyp40
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/buffer-security-check.gyp51
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/buffer-security.cc12
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/calling-convention-cdecl.def6
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/calling-convention-fastcall.def6
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/calling-convention-stdcall.def6
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/calling-convention-vectorcall.def6
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/calling-convention.cc6
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/calling-convention.gyp66
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/character-set-mbcs.cc11
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/character-set-unicode.cc15
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/character-set.gyp35
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/compile-as-managed.cc9
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/compile-as-managed.gyp29
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/compile-as-winrt.cc12
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/compile-as-winrt.gyp20
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/debug-format.gyp48
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/default-char-is-unsigned.cc15
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/default-char-is-unsigned.gyp20
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/disable-specific-warnings.cc9
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/disable-specific-warnings.gyp29
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/enable-enhanced-instruction-set.cc28
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/enable-enhanced-instruction-set.gyp68
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/exception-handling-on.cc24
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/exception-handling.gyp46
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/floating-point-model-fast.cc19
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/floating-point-model-precise.cc19
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/floating-point-model-strict.cc19
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/floating-point-model.gyp43
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/force-include-files-with-precompiled.cc10
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/force-include-files.cc8
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/force-include-files.gyp36
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/function-level-linking.cc11
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/function-level-linking.gyp28
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/hello.cc7
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/optimizations.gyp207
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/pdbname-override.gyp26
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/pdbname.cc7
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/pdbname.gyp24
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/precomp.cc6
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/rtti-on.cc11
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/rtti.gyp37
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/runtime-checks.cc11
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/runtime-checks.gyp29
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/runtime-library-md.cc19
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/runtime-library-mdd.cc19
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/runtime-library-mt.cc19
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/runtime-library-mtd.cc19
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/runtime-library.gyp48
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/subdir/header.h0
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type.gyp33
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type1.cc11
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type2.cc11
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/uninit.cc13
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/warning-as-error.cc9
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/warning-as-error.gyp37
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/warning-level.gyp115
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/warning-level1.cc8
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/warning-level2.cc14
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/warning-level3.cc11
-rw-r--r--third_party/python/gyp/test/win/compiler-flags/warning-level4.cc10
-rw-r--r--third_party/python/gyp/test/win/enable-winrt/dllmain.cc30
-rw-r--r--third_party/python/gyp/test/win/enable-winrt/enable-winrt.gyp39
-rw-r--r--third_party/python/gyp/test/win/generator-output-different-drive/gyptest-generator-output-different-drive.py44
-rw-r--r--third_party/python/gyp/test/win/generator-output-different-drive/prog.c10
-rw-r--r--third_party/python/gyp/test/win/generator-output-different-drive/prog.gyp15
-rw-r--r--third_party/python/gyp/test/win/gyptest-asm-files.py26
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-additional-include-dirs.py22
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-additional-options.py28
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-analysis.py30
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-buffer-security-check.py53
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-calling-convention.py22
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-character-set.py22
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-compile-as-managed.py24
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-compile-as-winrt.py20
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-debug-format.py43
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-default-char-is-unsigned.py22
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-disable-specific-warnings.py32
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-enable-enhanced-instruction-set.py49
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-exception-handling.py33
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-floating-point-model.py22
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-force-include-files.py22
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-function-level-linking.py54
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-optimizations.py105
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-pdbname-override.py27
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-pdbname.py30
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-rtti.py30
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-runtime-checks.py30
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-runtime-library.py22
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-treat-wchar-t-as-built-in-type.py22
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-warning-as-error.py30
-rw-r--r--third_party/python/gyp/test/win/gyptest-cl-warning-level.py41
-rw-r--r--third_party/python/gyp/test/win/gyptest-command-quote.py42
-rw-r--r--third_party/python/gyp/test/win/gyptest-crosscompile-ar.py29
-rw-r--r--third_party/python/gyp/test/win/gyptest-lib-ltcg.py22
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-additional-deps.py22
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-additional-options.py22
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-aslr.py35
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-base-address.py62
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-debug-info.py26
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-default-libs.py22
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-deffile.py43
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-defrelink.py56
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-delay-load-dlls.py35
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-embed-manifest.py100
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-enable-uac.py104
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-enable-winrt-app-revision.py43
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-enable-winrt-target-platform-version.py47
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-enable-winrt.py37
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-entrypointsymbol.py24
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-fixed-base.py40
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-force-symbol-reference.py26
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-generate-manifest.py127
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-incremental.py37
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-large-address-aware.py35
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-large-pdb.py76
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-library-adjust.py21
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-library-directories.py35
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-ltcg.py44
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-mapfile.py44
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-nodefaultlib.py24
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-noimportlib.py30
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-nxcompat.py37
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-opt-icf.py41
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-opt-ref.py40
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-ordering.py103
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-outputfile.py28
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-pdb-no-output.py25
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-pdb-output.py33
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-pdb.py35
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-pgo.py75
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-profile.py37
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-restat-importlib.py47
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-safeseh.py46
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-shard.py30
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-stacksize.py62
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-subsystem.py38
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-target-machine.py28
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-tsaware.py33
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-uldi-depending-on-module.py24
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-uldi.py28
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-unsupported-manifest.py27
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-update-manifest.py104
-rw-r--r--third_party/python/gyp/test/win/gyptest-link-warnings-as-errors.py24
-rw-r--r--third_party/python/gyp/test/win/gyptest-long-command-line.py23
-rw-r--r--third_party/python/gyp/test/win/gyptest-macro-projectname.py24
-rw-r--r--third_party/python/gyp/test/win/gyptest-macro-targetext.py26
-rw-r--r--third_party/python/gyp/test/win/gyptest-macro-targetfilename.py37
-rw-r--r--third_party/python/gyp/test/win/gyptest-macro-targetname.py29
-rw-r--r--third_party/python/gyp/test/win/gyptest-macro-targetpath.py30
-rw-r--r--third_party/python/gyp/test/win/gyptest-macro-vcinstalldir.py24
-rw-r--r--third_party/python/gyp/test/win/gyptest-macros-containing-gyp.py21
-rw-r--r--third_party/python/gyp/test/win/gyptest-macros-in-inputs-and-outputs.py27
-rw-r--r--third_party/python/gyp/test/win/gyptest-midl-excluded.py22
-rw-r--r--third_party/python/gyp/test/win/gyptest-midl-includedirs.py21
-rw-r--r--third_party/python/gyp/test/win/gyptest-midl-rules.py28
-rw-r--r--third_party/python/gyp/test/win/gyptest-ml-safeseh.py22
-rw-r--r--third_party/python/gyp/test/win/gyptest-quoting-commands.py25
-rw-r--r--third_party/python/gyp/test/win/gyptest-rc-build.py29
-rw-r--r--third_party/python/gyp/test/win/gyptest-sys.py27
-rw-r--r--third_party/python/gyp/test/win/gyptest-system-include.py21
-rw-r--r--third_party/python/gyp/test/win/idl-excluded/bad.idl6
-rw-r--r--third_party/python/gyp/test/win/idl-excluded/copy-file.py11
-rw-r--r--third_party/python/gyp/test/win/idl-excluded/idl-excluded.gyp58
-rw-r--r--third_party/python/gyp/test/win/idl-excluded/program.cc7
-rw-r--r--third_party/python/gyp/test/win/idl-includedirs/hello.cc7
-rw-r--r--third_party/python/gyp/test/win/idl-includedirs/idl-includedirs.gyp26
-rw-r--r--third_party/python/gyp/test/win/idl-includedirs/subdir/bar.idl13
-rw-r--r--third_party/python/gyp/test/win/idl-includedirs/subdir/foo.idl14
-rw-r--r--third_party/python/gyp/test/win/idl-rules/Window.idl9
-rw-r--r--third_party/python/gyp/test/win/idl-rules/basic-idl.gyp67
-rw-r--r--third_party/python/gyp/test/win/idl-rules/history_indexer.idl17
-rw-r--r--third_party/python/gyp/test/win/idl-rules/history_indexer_user.cc15
-rw-r--r--third_party/python/gyp/test/win/idl-rules/idl_compiler.py17
-rw-r--r--third_party/python/gyp/test/win/importlib/dll_no_exports.cc9
-rw-r--r--third_party/python/gyp/test/win/importlib/has-exports.cc10
-rw-r--r--third_party/python/gyp/test/win/importlib/hello.cc9
-rw-r--r--third_party/python/gyp/test/win/importlib/importlib.gyp30
-rw-r--r--third_party/python/gyp/test/win/importlib/noimplib.gyp16
-rw-r--r--third_party/python/gyp/test/win/large-pdb/dllmain.cc9
-rw-r--r--third_party/python/gyp/test/win/large-pdb/large-pdb.gyp98
-rw-r--r--third_party/python/gyp/test/win/large-pdb/main.cc7
-rw-r--r--third_party/python/gyp/test/win/lib-crosscompile/answer.cc9
-rw-r--r--third_party/python/gyp/test/win/lib-crosscompile/answer.h5
-rw-r--r--third_party/python/gyp/test/win/lib-crosscompile/use_host_ar.gyp17
-rw-r--r--third_party/python/gyp/test/win/lib-flags/answer.cc9
-rw-r--r--third_party/python/gyp/test/win/lib-flags/answer.h5
-rw-r--r--third_party/python/gyp/test/win/lib-flags/ltcg.gyp21
-rw-r--r--third_party/python/gyp/test/win/linker-flags/a/x.cc7
-rw-r--r--third_party/python/gyp/test/win/linker-flags/a/z.cc7
-rw-r--r--third_party/python/gyp/test/win/linker-flags/additional-deps.cc10
-rw-r--r--third_party/python/gyp/test/win/linker-flags/additional-deps.gyp30
-rw-r--r--third_party/python/gyp/test/win/linker-flags/additional-options.gyp29
-rw-r--r--third_party/python/gyp/test/win/linker-flags/aslr.gyp35
-rw-r--r--third_party/python/gyp/test/win/linker-flags/b/y.cc7
-rw-r--r--third_party/python/gyp/test/win/linker-flags/base-address.gyp38
-rw-r--r--third_party/python/gyp/test/win/linker-flags/debug-info.gyp28
-rw-r--r--third_party/python/gyp/test/win/linker-flags/deffile-multiple.gyp17
-rw-r--r--third_party/python/gyp/test/win/linker-flags/deffile.cc13
-rw-r--r--third_party/python/gyp/test/win/linker-flags/deffile.def8
-rw-r--r--third_party/python/gyp/test/win/linker-flags/deffile.gyp38
-rw-r--r--third_party/python/gyp/test/win/linker-flags/delay-load-dlls.gyp35
-rw-r--r--third_party/python/gyp/test/win/linker-flags/delay-load.cc10
-rw-r--r--third_party/python/gyp/test/win/linker-flags/embed-manifest.gyp109
-rw-r--r--third_party/python/gyp/test/win/linker-flags/enable-uac.gyp45
-rw-r--r--third_party/python/gyp/test/win/linker-flags/entrypointsymbol.cc13
-rw-r--r--third_party/python/gyp/test/win/linker-flags/entrypointsymbol.gyp28
-rw-r--r--third_party/python/gyp/test/win/linker-flags/extra.manifest11
-rw-r--r--third_party/python/gyp/test/win/linker-flags/extra2.manifest11
-rw-r--r--third_party/python/gyp/test/win/linker-flags/fixed-base.gyp52
-rw-r--r--third_party/python/gyp/test/win/linker-flags/force-symbol-reference.gyp39
-rw-r--r--third_party/python/gyp/test/win/linker-flags/generate-manifest.gyp166
-rw-r--r--third_party/python/gyp/test/win/linker-flags/hello.cc7
-rw-r--r--third_party/python/gyp/test/win/linker-flags/incremental.gyp65
-rw-r--r--third_party/python/gyp/test/win/linker-flags/inline_test.cc12
-rw-r--r--third_party/python/gyp/test/win/linker-flags/inline_test.h5
-rw-r--r--third_party/python/gyp/test/win/linker-flags/inline_test_main.cc15
-rw-r--r--third_party/python/gyp/test/win/linker-flags/large-address-aware.gyp28
-rw-r--r--third_party/python/gyp/test/win/linker-flags/library-adjust.cc10
-rw-r--r--third_party/python/gyp/test/win/linker-flags/library-adjust.gyp16
-rw-r--r--third_party/python/gyp/test/win/linker-flags/library-directories-define.cc7
-rw-r--r--third_party/python/gyp/test/win/linker-flags/library-directories-reference.cc10
-rw-r--r--third_party/python/gyp/test/win/linker-flags/library-directories.gyp42
-rw-r--r--third_party/python/gyp/test/win/linker-flags/link-ordering.gyp95
-rw-r--r--third_party/python/gyp/test/win/linker-flags/link-warning.cc10
-rw-r--r--third_party/python/gyp/test/win/linker-flags/ltcg.gyp42
-rw-r--r--third_party/python/gyp/test/win/linker-flags/main-crt.c8
-rw-r--r--third_party/python/gyp/test/win/linker-flags/manifest-in-comment.cc13
-rw-r--r--third_party/python/gyp/test/win/linker-flags/mapfile.cc12
-rw-r--r--third_party/python/gyp/test/win/linker-flags/mapfile.gyp45
-rw-r--r--third_party/python/gyp/test/win/linker-flags/no-default-libs.cc18
-rw-r--r--third_party/python/gyp/test/win/linker-flags/no-default-libs.gyp13
-rw-r--r--third_party/python/gyp/test/win/linker-flags/nodefaultlib.cc13
-rw-r--r--third_party/python/gyp/test/win/linker-flags/nodefaultlib.gyp30
-rw-r--r--third_party/python/gyp/test/win/linker-flags/nxcompat.gyp35
-rw-r--r--third_party/python/gyp/test/win/linker-flags/opt-icf.cc29
-rw-r--r--third_party/python/gyp/test/win/linker-flags/opt-icf.gyp63
-rw-r--r--third_party/python/gyp/test/win/linker-flags/opt-ref.cc11
-rw-r--r--third_party/python/gyp/test/win/linker-flags/opt-ref.gyp56
-rw-r--r--third_party/python/gyp/test/win/linker-flags/outputfile.gyp58
-rw-r--r--third_party/python/gyp/test/win/linker-flags/pdb-output.gyp49
-rw-r--r--third_party/python/gyp/test/win/linker-flags/pgo.gyp143
-rw-r--r--third_party/python/gyp/test/win/linker-flags/profile.gyp50
-rw-r--r--third_party/python/gyp/test/win/linker-flags/program-database.gyp40
-rw-r--r--third_party/python/gyp/test/win/linker-flags/safeseh.gyp79
-rw-r--r--third_party/python/gyp/test/win/linker-flags/safeseh_hello.cc11
-rw-r--r--third_party/python/gyp/test/win/linker-flags/safeseh_zero.asm10
-rw-r--r--third_party/python/gyp/test/win/linker-flags/safeseh_zero64.asm9
-rw-r--r--third_party/python/gyp/test/win/linker-flags/stacksize.gyp44
-rw-r--r--third_party/python/gyp/test/win/linker-flags/subdir/library.gyp13
-rw-r--r--third_party/python/gyp/test/win/linker-flags/subsystem-windows.cc9
-rw-r--r--third_party/python/gyp/test/win/linker-flags/subsystem.gyp70
-rw-r--r--third_party/python/gyp/test/win/linker-flags/target-machine.gyp48
-rw-r--r--third_party/python/gyp/test/win/linker-flags/tsaware.gyp28
-rw-r--r--third_party/python/gyp/test/win/linker-flags/unsupported-manifest.gyp13
-rw-r--r--third_party/python/gyp/test/win/linker-flags/update_pgd.py35
-rw-r--r--third_party/python/gyp/test/win/linker-flags/warn-as-error.gyp33
-rw-r--r--third_party/python/gyp/test/win/linker-flags/x.cc7
-rw-r--r--third_party/python/gyp/test/win/linker-flags/y.cc7
-rw-r--r--third_party/python/gyp/test/win/linker-flags/z.cc7
-rw-r--r--third_party/python/gyp/test/win/long-command-line/function.cc7
-rw-r--r--third_party/python/gyp/test/win/long-command-line/hello.cc7
-rw-r--r--third_party/python/gyp/test/win/long-command-line/long-command-line.gyp54
-rw-r--r--third_party/python/gyp/test/win/ml-safeseh/a.asm10
-rw-r--r--third_party/python/gyp/test/win/ml-safeseh/hello.cc11
-rw-r--r--third_party/python/gyp/test/win/ml-safeseh/ml-safeseh.gyp24
-rw-r--r--third_party/python/gyp/test/win/precompiled/gyptest-all.py21
-rw-r--r--third_party/python/gyp/test/win/precompiled/hello.c14
-rw-r--r--third_party/python/gyp/test/win/precompiled/hello.gyp28
-rw-r--r--third_party/python/gyp/test/win/precompiled/hello2.c13
-rw-r--r--third_party/python/gyp/test/win/precompiled/precomp.c8
-rw-r--r--third_party/python/gyp/test/win/rc-build/Resource.h26
-rw-r--r--third_party/python/gyp/test/win/rc-build/hello.cpp30
-rw-r--r--third_party/python/gyp/test/win/rc-build/hello.gyp92
-rw-r--r--third_party/python/gyp/test/win/rc-build/hello.h3
-rw-r--r--third_party/python/gyp/test/win/rc-build/hello.icobin0 -> 23558 bytes
-rw-r--r--third_party/python/gyp/test/win/rc-build/hello.rc86
-rw-r--r--third_party/python/gyp/test/win/rc-build/hello3.rc87
-rw-r--r--third_party/python/gyp/test/win/rc-build/small.icobin0 -> 23558 bytes
-rw-r--r--third_party/python/gyp/test/win/rc-build/subdir/hello2.rc87
-rw-r--r--third_party/python/gyp/test/win/rc-build/subdir/include.h1
-rw-r--r--third_party/python/gyp/test/win/rc-build/targetver.h24
-rw-r--r--third_party/python/gyp/test/win/shard/hello.cc7
-rw-r--r--third_party/python/gyp/test/win/shard/hello1.cc7
-rw-r--r--third_party/python/gyp/test/win/shard/hello2.cc7
-rw-r--r--third_party/python/gyp/test/win/shard/hello3.cc7
-rw-r--r--third_party/python/gyp/test/win/shard/hello4.cc7
-rw-r--r--third_party/python/gyp/test/win/shard/shard.gyp31
-rw-r--r--third_party/python/gyp/test/win/shard/shard_ref.gyp41
-rw-r--r--third_party/python/gyp/test/win/system-include/bar/header.h0
-rw-r--r--third_party/python/gyp/test/win/system-include/common/commonheader.h0
-rw-r--r--third_party/python/gyp/test/win/system-include/foo/header.h0
-rw-r--r--third_party/python/gyp/test/win/system-include/main.cc4
-rw-r--r--third_party/python/gyp/test/win/system-include/test.gyp26
-rw-r--r--third_party/python/gyp/test/win/uldi/a.cc7
-rw-r--r--third_party/python/gyp/test/win/uldi/b.cc7
-rw-r--r--third_party/python/gyp/test/win/uldi/dll.cc6
-rw-r--r--third_party/python/gyp/test/win/uldi/exe.cc7
-rw-r--r--third_party/python/gyp/test/win/uldi/main.cc10
-rw-r--r--third_party/python/gyp/test/win/uldi/uldi-depending-on-module.gyp42
-rw-r--r--third_party/python/gyp/test/win/uldi/uldi.gyp45
-rw-r--r--third_party/python/gyp/test/win/vs-macros/as.py20
-rw-r--r--third_party/python/gyp/test/win/vs-macros/containing-gyp.gyp39
-rw-r--r--third_party/python/gyp/test/win/vs-macros/do_stuff.py8
-rw-r--r--third_party/python/gyp/test/win/vs-macros/hello.cc7
-rw-r--r--third_party/python/gyp/test/win/vs-macros/input-output-macros.gyp32
-rw-r--r--third_party/python/gyp/test/win/vs-macros/input.S0
-rw-r--r--third_party/python/gyp/test/win/vs-macros/projectname.gyp29
-rw-r--r--third_party/python/gyp/test/win/vs-macros/stuff.blah1
-rw-r--r--third_party/python/gyp/test/win/vs-macros/targetext.gyp59
-rw-r--r--third_party/python/gyp/test/win/vs-macros/targetfilename.gyp59
-rw-r--r--third_party/python/gyp/test/win/vs-macros/targetname.gyp52
-rw-r--r--third_party/python/gyp/test/win/vs-macros/targetpath.gyp59
-rw-r--r--third_party/python/gyp/test/win/vs-macros/test_exists.py10
-rw-r--r--third_party/python/gyp/test/win/vs-macros/vcinstalldir.gyp41
-rw-r--r--third_party/python/gyp/test/win/win-driver-target-type/win-driver-target-type.c10
-rw-r--r--third_party/python/gyp/test/win/win-driver-target-type/win-driver-target-type.gyp32
-rw-r--r--third_party/python/gyp/test/win/win-driver-target-type/win-driver-target-type.h13
-rw-r--r--third_party/python/gyp/test/win/win-driver-target-type/win-driver-target-type.rc14
-rw-r--r--third_party/python/gyp/test/win/win-tool/copies_readonly_files.gyp29
-rw-r--r--third_party/python/gyp/test/win/win-tool/gyptest-win-tool-handles-readonly-files.py55
-rw-r--r--third_party/python/gyp/test/win/winrt-app-type-revision/dllmain.cc30
-rw-r--r--third_party/python/gyp/test/win/winrt-app-type-revision/winrt-app-type-revison.gyp43
-rw-r--r--third_party/python/gyp/test/win/winrt-target-platform-version/dllmain.cc30
-rw-r--r--third_party/python/gyp/test/win/winrt-target-platform-version/winrt-target-platform-version.gyp49
-rw-r--r--third_party/python/gyp/test/xcode-ninja/list_excluded/gyptest-all.py49
-rw-r--r--third_party/python/gyp/test/xcode-ninja/list_excluded/hello.cpp7
-rw-r--r--third_party/python/gyp/test/xcode-ninja/list_excluded/hello_exclude.gyp19
-rw-r--r--third_party/python/gyp/test/xcode-ninja/list_excluded/hello_excluded.cpp7
-rw-r--r--third_party/python/gyp/tools/README15
-rw-r--r--third_party/python/gyp/tools/Xcode/README5
-rw-r--r--third_party/python/gyp/tools/Xcode/Specifications/gyp.pbfilespec27
-rw-r--r--third_party/python/gyp/tools/Xcode/Specifications/gyp.xclangspec226
-rw-r--r--third_party/python/gyp/tools/emacs/README12
-rw-r--r--third_party/python/gyp/tools/emacs/gyp-tests.el63
-rw-r--r--third_party/python/gyp/tools/emacs/gyp.el275
-rwxr-xr-xthird_party/python/gyp/tools/emacs/run-unit-tests.sh7
-rw-r--r--third_party/python/gyp/tools/emacs/testdata/media.gyp1105
-rw-r--r--third_party/python/gyp/tools/emacs/testdata/media.gyp.fontified1107
-rwxr-xr-xthird_party/python/gyp/tools/graphviz.py102
-rwxr-xr-xthird_party/python/gyp/tools/pretty_gyp.py156
-rwxr-xr-xthird_party/python/gyp/tools/pretty_sln.py171
-rwxr-xr-xthird_party/python/gyp/tools/pretty_vcproj.py337
-rw-r--r--third_party/python/importlib_metadata/.gitlab-ci.yml50
-rw-r--r--third_party/python/importlib_metadata/.readthedocs.yml5
-rw-r--r--third_party/python/importlib_metadata/LICENSE13
-rw-r--r--third_party/python/importlib_metadata/MANIFEST.in5
-rw-r--r--third_party/python/importlib_metadata/PKG-INFO55
-rw-r--r--third_party/python/importlib_metadata/README.rst36
-rw-r--r--third_party/python/importlib_metadata/codecov.yml2
-rw-r--r--third_party/python/importlib_metadata/coverage.ini24
-rw-r--r--third_party/python/importlib_metadata/coverplug.py21
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata.egg-info/PKG-INFO55
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata.egg-info/SOURCES.txt35
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata.egg-info/dependency_links.txt1
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata.egg-info/requires.txt17
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata.egg-info/top_level.txt1
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata/__init__.py623
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata/_compat.py152
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata/docs/__init__.py0
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata/docs/changelog.rst297
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata/docs/conf.py185
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata/docs/index.rst50
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata/docs/using.rst260
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata/tests/__init__.py0
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata/tests/data/__init__.py0
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata/tests/data/example-21.12-py3-none-any.whlbin0 -> 1455 bytes
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata/tests/data/example-21.12-py3.6.eggbin0 -> 1497 bytes
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata/tests/fixtures.py232
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata/tests/test_api.py176
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata/tests/test_integration.py54
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata/tests/test_main.py285
-rw-r--r--third_party/python/importlib_metadata/importlib_metadata/tests/test_zip.py80
-rw-r--r--third_party/python/importlib_metadata/prepare/example/example/__init__.py2
-rw-r--r--third_party/python/importlib_metadata/prepare/example/setup.py10
-rw-r--r--third_party/python/importlib_metadata/pyproject.toml2
-rw-r--r--third_party/python/importlib_metadata/setup.cfg61
-rw-r--r--third_party/python/importlib_metadata/setup.py3
-rw-r--r--third_party/python/importlib_metadata/tox.ini97
-rw-r--r--third_party/python/iso8601/LICENSE20
-rw-r--r--third_party/python/iso8601/MANIFEST.in2
-rw-r--r--third_party/python/iso8601/PKG-INFO203
-rw-r--r--third_party/python/iso8601/README.rst190
-rw-r--r--third_party/python/iso8601/dev-requirements.txt6
-rw-r--r--third_party/python/iso8601/iso8601/__init__.py1
-rw-r--r--third_party/python/iso8601/iso8601/iso8601.py214
-rw-r--r--third_party/python/iso8601/iso8601/test_iso8601.py102
-rw-r--r--third_party/python/iso8601/setup.cfg4
-rw-r--r--third_party/python/iso8601/setup.py25
-rw-r--r--third_party/python/iso8601/tox.ini8
-rw-r--r--third_party/python/jsmin/CHANGELOG.txt11
-rw-r--r--third_party/python/jsmin/LICENSE.txt23
-rw-r--r--third_party/python/jsmin/MANIFEST.in1
-rw-r--r--third_party/python/jsmin/PKG-INFO117
-rw-r--r--third_party/python/jsmin/README.rst80
-rw-r--r--third_party/python/jsmin/jsmin/__init__.py238
-rw-r--r--third_party/python/jsmin/jsmin/__main__.py10
-rw-r--r--third_party/python/jsmin/jsmin/test.py394
-rw-r--r--third_party/python/jsmin/setup.cfg5
-rw-r--r--third_party/python/jsmin/setup.py47
-rw-r--r--third_party/python/json-e/MANIFEST.in3
-rw-r--r--third_party/python/json-e/PKG-INFO11
-rw-r--r--third_party/python/json-e/README.md730
-rw-r--r--third_party/python/json-e/jsone/__init__.py21
-rw-r--r--third_party/python/json-e/jsone/builtins.py121
-rw-r--r--third_party/python/json-e/jsone/interpreter.py289
-rw-r--r--third_party/python/json-e/jsone/prattparser.py191
-rw-r--r--third_party/python/json-e/jsone/render.py354
-rw-r--r--third_party/python/json-e/jsone/shared.py131
-rw-r--r--third_party/python/json-e/jsone/six.py23
-rw-r--r--third_party/python/json-e/package.json35
-rw-r--r--third_party/python/json-e/setup.cfg8
-rw-r--r--third_party/python/json-e/setup.py31
-rw-r--r--third_party/python/jsonschema/.appveyor.yml33
-rw-r--r--third_party/python/jsonschema/.coveragerc5
-rw-r--r--third_party/python/jsonschema/.github/FUNDING.yml5
-rw-r--r--third_party/python/jsonschema/.github/SECURITY.md21
-rw-r--r--third_party/python/jsonschema/.gitignore5
-rw-r--r--third_party/python/jsonschema/.travis.yml30
-rw-r--r--third_party/python/jsonschema/CHANGELOG.rst196
-rw-r--r--third_party/python/jsonschema/COPYING19
-rw-r--r--third_party/python/jsonschema/DEMO.ipynb167
-rw-r--r--third_party/python/jsonschema/MANIFEST.in4
-rw-r--r--third_party/python/jsonschema/PKG-INFO206
-rw-r--r--third_party/python/jsonschema/README.rst179
-rw-r--r--third_party/python/jsonschema/codecov.yml11
-rw-r--r--third_party/python/jsonschema/demo.yml2
-rw-r--r--third_party/python/jsonschema/json/.gitignore1
-rw-r--r--third_party/python/jsonschema/json/.travis.yml8
-rw-r--r--third_party/python/jsonschema/json/LICENSE19
-rw-r--r--third_party/python/jsonschema/json/README.md181
-rwxr-xr-xthird_party/python/jsonschema/json/bin/jsonschema_suite298
-rw-r--r--third_party/python/jsonschema/json/index.js45
-rw-r--r--third_party/python/jsonschema/json/package.json28
-rw-r--r--third_party/python/jsonschema/json/remotes/folder/folderInteger.json3
-rw-r--r--third_party/python/jsonschema/json/remotes/integer.json3
-rw-r--r--third_party/python/jsonschema/json/remotes/name-defs.json15
-rw-r--r--third_party/python/jsonschema/json/remotes/name.json15
-rw-r--r--third_party/python/jsonschema/json/remotes/subSchemas.json8
-rw-r--r--third_party/python/jsonschema/json/test-schema.json59
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/additionalItems.json87
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/additionalProperties.json133
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/allOf.json218
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/anchor.json87
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/anyOf.json189
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/boolean_schema.json104
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/const.json170
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/contains.json95
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/default.json49
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/defs.json24
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/dependencies.json268
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/enum.json179
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/exclusiveMaximum.json30
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/exclusiveMinimum.json30
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/format.json614
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/if-then-else.json188
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/items.json250
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/maxItems.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/maxLength.json33
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/maxProperties.json38
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/maximum.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/minItems.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/minLength.json33
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/minProperties.json38
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/minimum.json59
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/multipleOf.json60
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/not.json117
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/oneOf.json206
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/bignum.json105
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/content.json77
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/ecmascript-regex.json213
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/format/date-time.json53
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/format/date.json23
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/format/email.json18
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/format/hostname.json33
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/format/idn-email.json18
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/format/idn-hostname.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/format/ipv4.json33
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/format/ipv6.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/format/iri-reference.json43
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/format/iri.json53
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/format/json-pointer.json168
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/format/regex.json18
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/format/relative-json-pointer.json33
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/format/time.json23
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/format/uri-reference.json43
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/format/uri-template.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/format/uri.json103
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/optional/zeroTerminatedFloats.json15
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/pattern.json34
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/patternProperties.json151
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/properties.json167
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/propertyNames.json78
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/ref.json359
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/refRemote.json167
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/required.json105
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/type.json464
-rw-r--r--third_party/python/jsonschema/json/tests/draft2019-09/uniqueItems.json173
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/additionalItems.json82
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/additionalProperties.json133
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/default.json49
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/dependencies.json118
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/disallow.json80
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/divisibleBy.json60
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/enum.json71
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/extends.json94
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/format.json362
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/items.json46
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/maxItems.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/maxLength.json33
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/maximum.json42
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/minItems.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/minLength.json33
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/minimum.json73
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/optional/bignum.json107
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/optional/ecmascript-regex.json18
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/optional/format.json227
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/optional/zeroTerminatedFloats.json15
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/pattern.json34
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/patternProperties.json115
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/properties.json97
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/ref.json192
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/refRemote.json74
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/required.json53
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/type.json489
-rw-r--r--third_party/python/jsonschema/json/tests/draft3/uniqueItems.json163
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/additionalItems.json87
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/additionalProperties.json133
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/allOf.json185
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/anyOf.json156
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/default.json49
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/definitions.json32
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/dependencies.json194
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/enum.json179
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/format.json218
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/items.json195
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/maxItems.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/maxLength.json33
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/maxProperties.json38
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/maximum.json73
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/minItems.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/minLength.json33
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/minProperties.json38
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/minimum.json104
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/multipleOf.json60
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/not.json96
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/oneOf.json162
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/optional/bignum.json107
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/optional/ecmascript-regex.json213
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/optional/format.json253
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/optional/zeroTerminatedFloats.json15
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/pattern.json34
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/patternProperties.json120
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/properties.json136
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/ref.json411
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/refRemote.json171
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/required.json89
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/type.json464
-rw-r--r--third_party/python/jsonschema/json/tests/draft4/uniqueItems.json173
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/additionalItems.json87
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/additionalProperties.json133
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/allOf.json218
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/anyOf.json189
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/boolean_schema.json104
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/const.json170
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/contains.json100
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/default.json49
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/definitions.json32
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/dependencies.json268
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/enum.json179
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/exclusiveMaximum.json30
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/exclusiveMinimum.json30
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/format.json326
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/items.json250
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/maxItems.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/maxLength.json33
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/maxProperties.json38
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/maximum.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/minItems.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/minLength.json33
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/minProperties.json38
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/minimum.json59
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/multipleOf.json60
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/not.json117
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/oneOf.json206
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/optional/bignum.json105
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/optional/ecmascript-regex.json213
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/optional/format.json491
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/optional/zeroTerminatedFloats.json15
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/pattern.json34
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/patternProperties.json151
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/properties.json167
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/propertyNames.json78
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/ref.json443
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/refRemote.json171
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/required.json105
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/type.json464
-rw-r--r--third_party/python/jsonschema/json/tests/draft6/uniqueItems.json173
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/additionalItems.json87
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/additionalProperties.json133
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/allOf.json218
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/anyOf.json189
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/boolean_schema.json104
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/const.json170
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/contains.json100
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/default.json49
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/definitions.json32
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/dependencies.json268
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/enum.json179
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/exclusiveMaximum.json30
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/exclusiveMinimum.json30
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/format.json614
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/if-then-else.json188
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/items.json250
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/maxItems.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/maxLength.json33
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/maxProperties.json38
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/maximum.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/minItems.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/minLength.json33
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/minProperties.json38
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/minimum.json59
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/multipleOf.json60
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/not.json117
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/oneOf.json206
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/bignum.json105
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/content.json77
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/ecmascript-regex.json213
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/format/date-time.json53
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/format/date.json23
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/format/email.json18
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/format/hostname.json33
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/format/idn-email.json18
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/format/idn-hostname.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/format/ipv4.json33
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/format/ipv6.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/format/iri-reference.json43
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/format/iri.json53
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/format/json-pointer.json168
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/format/regex.json18
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/format/relative-json-pointer.json33
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/format/time.json23
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/format/uri-reference.json43
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/format/uri-template.json28
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/format/uri.json103
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/optional/zeroTerminatedFloats.json15
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/pattern.json34
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/patternProperties.json151
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/properties.json167
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/propertyNames.json78
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/ref.json443
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/refRemote.json171
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/required.json105
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/type.json464
-rw-r--r--third_party/python/jsonschema/json/tests/draft7/uniqueItems.json173
-rw-r--r--third_party/python/jsonschema/json/tests/latest/additionalItems.json87
-rw-r--r--third_party/python/jsonschema/json/tests/latest/additionalProperties.json133
-rw-r--r--third_party/python/jsonschema/json/tests/latest/allOf.json218
-rw-r--r--third_party/python/jsonschema/json/tests/latest/anchor.json87
-rw-r--r--third_party/python/jsonschema/json/tests/latest/anyOf.json189
-rw-r--r--third_party/python/jsonschema/json/tests/latest/boolean_schema.json104
-rw-r--r--third_party/python/jsonschema/json/tests/latest/const.json170
-rw-r--r--third_party/python/jsonschema/json/tests/latest/contains.json95
-rw-r--r--third_party/python/jsonschema/json/tests/latest/default.json49
-rw-r--r--third_party/python/jsonschema/json/tests/latest/defs.json24
-rw-r--r--third_party/python/jsonschema/json/tests/latest/dependencies.json268
-rw-r--r--third_party/python/jsonschema/json/tests/latest/enum.json179
-rw-r--r--third_party/python/jsonschema/json/tests/latest/exclusiveMaximum.json30
-rw-r--r--third_party/python/jsonschema/json/tests/latest/exclusiveMinimum.json30
-rw-r--r--third_party/python/jsonschema/json/tests/latest/format.json614
-rw-r--r--third_party/python/jsonschema/json/tests/latest/if-then-else.json188
-rw-r--r--third_party/python/jsonschema/json/tests/latest/items.json250
-rw-r--r--third_party/python/jsonschema/json/tests/latest/maxItems.json28
-rw-r--r--third_party/python/jsonschema/json/tests/latest/maxLength.json33
-rw-r--r--third_party/python/jsonschema/json/tests/latest/maxProperties.json38
-rw-r--r--third_party/python/jsonschema/json/tests/latest/maximum.json28
-rw-r--r--third_party/python/jsonschema/json/tests/latest/minItems.json28
-rw-r--r--third_party/python/jsonschema/json/tests/latest/minLength.json33
-rw-r--r--third_party/python/jsonschema/json/tests/latest/minProperties.json38
-rw-r--r--third_party/python/jsonschema/json/tests/latest/minimum.json59
-rw-r--r--third_party/python/jsonschema/json/tests/latest/multipleOf.json60
-rw-r--r--third_party/python/jsonschema/json/tests/latest/not.json117
-rw-r--r--third_party/python/jsonschema/json/tests/latest/oneOf.json206
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/bignum.json105
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/content.json77
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/ecmascript-regex.json213
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/format/date-time.json53
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/format/date.json23
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/format/email.json18
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/format/hostname.json33
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/format/idn-email.json18
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/format/idn-hostname.json28
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/format/ipv4.json33
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/format/ipv6.json28
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/format/iri-reference.json43
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/format/iri.json53
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/format/json-pointer.json168
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/format/regex.json18
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/format/relative-json-pointer.json33
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/format/time.json23
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/format/uri-reference.json43
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/format/uri-template.json28
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/format/uri.json103
-rw-r--r--third_party/python/jsonschema/json/tests/latest/optional/zeroTerminatedFloats.json15
-rw-r--r--third_party/python/jsonschema/json/tests/latest/pattern.json34
-rw-r--r--third_party/python/jsonschema/json/tests/latest/patternProperties.json151
-rw-r--r--third_party/python/jsonschema/json/tests/latest/properties.json167
-rw-r--r--third_party/python/jsonschema/json/tests/latest/propertyNames.json78
-rw-r--r--third_party/python/jsonschema/json/tests/latest/ref.json359
-rw-r--r--third_party/python/jsonschema/json/tests/latest/refRemote.json167
-rw-r--r--third_party/python/jsonschema/json/tests/latest/required.json105
-rw-r--r--third_party/python/jsonschema/json/tests/latest/type.json464
-rw-r--r--third_party/python/jsonschema/json/tests/latest/uniqueItems.json173
-rw-r--r--third_party/python/jsonschema/json/tox.ini9
-rw-r--r--third_party/python/jsonschema/jsonschema.egg-info/PKG-INFO206
-rw-r--r--third_party/python/jsonschema/jsonschema.egg-info/SOURCES.txt354
-rw-r--r--third_party/python/jsonschema/jsonschema.egg-info/dependency_links.txt1
-rw-r--r--third_party/python/jsonschema/jsonschema.egg-info/entry_points.txt3
-rw-r--r--third_party/python/jsonschema/jsonschema.egg-info/requires.txt24
-rw-r--r--third_party/python/jsonschema/jsonschema.egg-info/top_level.txt1
-rw-r--r--third_party/python/jsonschema/jsonschema/__init__.py34
-rw-r--r--third_party/python/jsonschema/jsonschema/__main__.py2
-rw-r--r--third_party/python/jsonschema/jsonschema/_format.py425
-rw-r--r--third_party/python/jsonschema/jsonschema/_legacy_validators.py141
-rw-r--r--third_party/python/jsonschema/jsonschema/_reflect.py155
-rw-r--r--third_party/python/jsonschema/jsonschema/_types.py188
-rw-r--r--third_party/python/jsonschema/jsonschema/_utils.py212
-rw-r--r--third_party/python/jsonschema/jsonschema/_validators.py373
-rw-r--r--third_party/python/jsonschema/jsonschema/benchmarks/__init__.py5
-rw-r--r--third_party/python/jsonschema/jsonschema/benchmarks/issue232.py26
-rw-r--r--third_party/python/jsonschema/jsonschema/benchmarks/issue232/issue.json2653
-rw-r--r--third_party/python/jsonschema/jsonschema/benchmarks/json_schema_test_suite.py14
-rw-r--r--third_party/python/jsonschema/jsonschema/cli.py90
-rw-r--r--third_party/python/jsonschema/jsonschema/compat.py55
-rw-r--r--third_party/python/jsonschema/jsonschema/exceptions.py374
-rw-r--r--third_party/python/jsonschema/jsonschema/schemas/draft3.json199
-rw-r--r--third_party/python/jsonschema/jsonschema/schemas/draft4.json222
-rw-r--r--third_party/python/jsonschema/jsonschema/schemas/draft6.json153
-rw-r--r--third_party/python/jsonschema/jsonschema/schemas/draft7.json166
-rw-r--r--third_party/python/jsonschema/jsonschema/tests/__init__.py0
-rw-r--r--third_party/python/jsonschema/jsonschema/tests/_helpers.py5
-rw-r--r--third_party/python/jsonschema/jsonschema/tests/_suite.py239
-rw-r--r--third_party/python/jsonschema/jsonschema/tests/test_cli.py151
-rw-r--r--third_party/python/jsonschema/jsonschema/tests/test_exceptions.py462
-rw-r--r--third_party/python/jsonschema/jsonschema/tests/test_format.py89
-rw-r--r--third_party/python/jsonschema/jsonschema/tests/test_jsonschema_test_suite.py277
-rw-r--r--third_party/python/jsonschema/jsonschema/tests/test_types.py190
-rw-r--r--third_party/python/jsonschema/jsonschema/tests/test_validators.py1762
-rw-r--r--third_party/python/jsonschema/jsonschema/validators.py970
-rw-r--r--third_party/python/jsonschema/pyproject.toml8
-rw-r--r--third_party/python/jsonschema/setup.cfg81
-rw-r--r--third_party/python/jsonschema/setup.py2
-rw-r--r--third_party/python/jsonschema/test-requirements.txt1
-rw-r--r--third_party/python/jsonschema/tox.ini153
-rw-r--r--third_party/python/mock-1.0.0/LICENSE.txt26
-rw-r--r--third_party/python/mock-1.0.0/MANIFEST.in2
-rw-r--r--third_party/python/mock-1.0.0/PKG-INFO208
-rw-r--r--third_party/python/mock-1.0.0/README.txt177
-rw-r--r--third_party/python/mock-1.0.0/docs/changelog.txt725
-rw-r--r--third_party/python/mock-1.0.0/docs/compare.txt628
-rw-r--r--third_party/python/mock-1.0.0/docs/conf.py209
-rw-r--r--third_party/python/mock-1.0.0/docs/examples.txt1063
-rw-r--r--third_party/python/mock-1.0.0/docs/getting-started.txt479
-rw-r--r--third_party/python/mock-1.0.0/docs/helpers.txt583
-rw-r--r--third_party/python/mock-1.0.0/docs/index.txt411
-rw-r--r--third_party/python/mock-1.0.0/docs/magicmock.txt258
-rw-r--r--third_party/python/mock-1.0.0/docs/mock.txt842
-rw-r--r--third_party/python/mock-1.0.0/docs/patch.txt636
-rw-r--r--third_party/python/mock-1.0.0/docs/sentinel.txt58
-rw-r--r--third_party/python/mock-1.0.0/html/.doctrees/changelog.doctreebin0 -> 282659 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/.doctrees/compare.doctreebin0 -> 56915 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/.doctrees/examples.doctreebin0 -> 167478 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/.doctrees/getting-started.doctreebin0 -> 70942 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/.doctrees/index.doctreebin0 -> 98784 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/.doctrees/magicmock.doctreebin0 -> 75713 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/.doctrees/mock.doctreebin0 -> 152111 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/.doctrees/mocksignature.doctreebin0 -> 42324 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/.doctrees/patch.doctreebin0 -> 123511 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/.doctrees/sentinel.doctreebin0 -> 10632 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/_sources/changelog.txt725
-rw-r--r--third_party/python/mock-1.0.0/html/_sources/compare.txt628
-rw-r--r--third_party/python/mock-1.0.0/html/_sources/examples.txt1063
-rw-r--r--third_party/python/mock-1.0.0/html/_sources/getting-started.txt479
-rw-r--r--third_party/python/mock-1.0.0/html/_sources/index.txt411
-rw-r--r--third_party/python/mock-1.0.0/html/_sources/magicmock.txt258
-rw-r--r--third_party/python/mock-1.0.0/html/_sources/mock.txt842
-rw-r--r--third_party/python/mock-1.0.0/html/_sources/mocksignature.txt262
-rw-r--r--third_party/python/mock-1.0.0/html/_sources/patch.txt636
-rw-r--r--third_party/python/mock-1.0.0/html/_sources/sentinel.txt58
-rw-r--r--third_party/python/mock-1.0.0/html/_static/adctheme.css757
-rw-r--r--third_party/python/mock-1.0.0/html/_static/basic.css540
-rw-r--r--third_party/python/mock-1.0.0/html/_static/breadcrumb_background.pngbin0 -> 136 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/_static/default.css256
-rw-r--r--third_party/python/mock-1.0.0/html/_static/doctools.js247
-rw-r--r--third_party/python/mock-1.0.0/html/_static/documentation.pngbin0 -> 412 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/_static/file.pngbin0 -> 392 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/_static/header_sm_mid.pngbin0 -> 159 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/_static/jquery.js154
-rw-r--r--third_party/python/mock-1.0.0/html/_static/minus.pngbin0 -> 199 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/_static/mobile.css17
-rw-r--r--third_party/python/mock-1.0.0/html/_static/plus.pngbin0 -> 199 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/_static/pygments.css62
-rw-r--r--third_party/python/mock-1.0.0/html/_static/scrn1.pngbin0 -> 108046 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/_static/scrn2.pngbin0 -> 121395 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/_static/searchfield_leftcap.pngbin0 -> 855 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/_static/searchfield_repeat.pngbin0 -> 158 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/_static/searchfield_rightcap.pngbin0 -> 530 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/_static/searchtools.js560
-rw-r--r--third_party/python/mock-1.0.0/html/_static/sidebar.js148
-rw-r--r--third_party/python/mock-1.0.0/html/_static/title_background.pngbin0 -> 132 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/_static/toc.js20
-rw-r--r--third_party/python/mock-1.0.0/html/_static/triangle_closed.pngbin0 -> 181 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/_static/triangle_left.pngbin0 -> 195 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/_static/triangle_open.pngbin0 -> 191 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/_static/underscore.js23
-rw-r--r--third_party/python/mock-1.0.0/html/changelog.html839
-rw-r--r--third_party/python/mock-1.0.0/html/compare.html672
-rw-r--r--third_party/python/mock-1.0.0/html/examples.html1006
-rw-r--r--third_party/python/mock-1.0.0/html/genindex.html479
-rw-r--r--third_party/python/mock-1.0.0/html/getting-started.html510
-rw-r--r--third_party/python/mock-1.0.0/html/index.html529
-rw-r--r--third_party/python/mock-1.0.0/html/magicmock.html347
-rw-r--r--third_party/python/mock-1.0.0/html/mock.html875
-rw-r--r--third_party/python/mock-1.0.0/html/mocksignature.html352
-rw-r--r--third_party/python/mock-1.0.0/html/objects.invbin0 -> 711 bytes
-rw-r--r--third_party/python/mock-1.0.0/html/output.txt126
-rw-r--r--third_party/python/mock-1.0.0/html/patch.html648
-rw-r--r--third_party/python/mock-1.0.0/html/search.html99
-rw-r--r--third_party/python/mock-1.0.0/html/searchindex.js1
-rw-r--r--third_party/python/mock-1.0.0/html/sentinel.html156
-rw-r--r--third_party/python/mock-1.0.0/mock.egg-info/PKG-INFO208
-rw-r--r--third_party/python/mock-1.0.0/mock.egg-info/SOURCES.txt94
-rw-r--r--third_party/python/mock-1.0.0/mock.egg-info/dependency_links.txt1
-rw-r--r--third_party/python/mock-1.0.0/mock.egg-info/top_level.txt1
-rw-r--r--third_party/python/mock-1.0.0/mock.py2356
-rw-r--r--third_party/python/mock-1.0.0/setup.cfg12
-rwxr-xr-xthird_party/python/mock-1.0.0/setup.py72
-rw-r--r--third_party/python/mock-1.0.0/tests/__init__.py3
-rw-r--r--third_party/python/mock-1.0.0/tests/_testwith.py181
-rw-r--r--third_party/python/mock-1.0.0/tests/support.py41
-rw-r--r--third_party/python/mock-1.0.0/tests/support_with.py93
-rw-r--r--third_party/python/mock-1.0.0/tests/testcallable.py158
-rw-r--r--third_party/python/mock-1.0.0/tests/testhelpers.py940
-rw-r--r--third_party/python/mock-1.0.0/tests/testmagicmethods.py486
-rw-r--r--third_party/python/mock-1.0.0/tests/testmock.py1351
-rw-r--r--third_party/python/mock-1.0.0/tests/testpatch.py1790
-rw-r--r--third_party/python/mock-1.0.0/tests/testsentinel.py33
-rw-r--r--third_party/python/mock-1.0.0/tests/testwith.py16
-rw-r--r--third_party/python/mock-1.0.0/tox.ini40
-rw-r--r--third_party/python/mohawk/PKG-INFO19
-rw-r--r--third_party/python/mohawk/README.rst25
-rw-r--r--third_party/python/mohawk/mohawk/__init__.py2
-rw-r--r--third_party/python/mohawk/mohawk/base.py230
-rw-r--r--third_party/python/mohawk/mohawk/bewit.py167
-rw-r--r--third_party/python/mohawk/mohawk/exc.py98
-rw-r--r--third_party/python/mohawk/mohawk/receiver.py170
-rw-r--r--third_party/python/mohawk/mohawk/sender.py178
-rw-r--r--third_party/python/mohawk/mohawk/tests.py823
-rw-r--r--third_party/python/mohawk/mohawk/util.py267
-rw-r--r--third_party/python/mohawk/setup.cfg5
-rw-r--r--third_party/python/mohawk/setup.py25
-rw-r--r--third_party/python/more-itertools/LICENSE19
-rw-r--r--third_party/python/more-itertools/MANIFEST.in9
-rw-r--r--third_party/python/more-itertools/PKG-INFO430
-rw-r--r--third_party/python/more-itertools/README.rst154
-rw-r--r--third_party/python/more-itertools/more_itertools/__init__.py2
-rw-r--r--third_party/python/more-itertools/more_itertools/more.py2211
-rw-r--r--third_party/python/more-itertools/more_itertools/recipes.py565
-rw-r--r--third_party/python/more-itertools/more_itertools/tests/__init__.py0
-rw-r--r--third_party/python/more-itertools/more_itertools/tests/test_more.py2074
-rw-r--r--third_party/python/more-itertools/more_itertools/tests/test_recipes.py616
-rw-r--r--third_party/python/more-itertools/setup.cfg8
-rw-r--r--third_party/python/more-itertools/setup.py59
-rw-r--r--third_party/python/more-itertools/tox.ini5
-rw-r--r--third_party/python/moz.build97
-rw-r--r--third_party/python/mozilla-version/LICENSE363
-rw-r--r--third_party/python/mozilla-version/MANIFEST.in8
-rw-r--r--third_party/python/mozilla-version/PKG-INFO13
-rw-r--r--third_party/python/mozilla-version/README.md28
-rw-r--r--third_party/python/mozilla-version/mozilla_version/__init__.py1
-rw-r--r--third_party/python/mozilla-version/mozilla_version/balrog.py142
-rw-r--r--third_party/python/mozilla-version/mozilla_version/errors.py64
-rw-r--r--third_party/python/mozilla-version/mozilla_version/gecko.py435
-rw-r--r--third_party/python/mozilla-version/mozilla_version/maven.py56
-rw-r--r--third_party/python/mozilla-version/mozilla_version/parser.py48
-rw-r--r--third_party/python/mozilla-version/mozilla_version/test/__init__.py5
-rw-r--r--third_party/python/mozilla-version/mozilla_version/test/test_balrog.py172
-rw-r--r--third_party/python/mozilla-version/mozilla_version/test/test_gecko.py411
-rw-r--r--third_party/python/mozilla-version/mozilla_version/test/test_maven.py87
-rw-r--r--third_party/python/mozilla-version/mozilla_version/test/test_version.py171
-rw-r--r--third_party/python/mozilla-version/mozilla_version/version.py177
-rw-r--r--third_party/python/mozilla-version/requirements-coveralls.txt140
-rw-r--r--third_party/python/mozilla-version/requirements-docs.txt122
-rw-r--r--third_party/python/mozilla-version/requirements-test.txt140
-rw-r--r--third_party/python/mozilla-version/requirements.txt14
-rw-r--r--third_party/python/mozilla-version/requirements.txt.in3
-rw-r--r--third_party/python/mozilla-version/setup.cfg4
-rw-r--r--third_party/python/mozilla-version/setup.py38
-rw-r--r--third_party/python/mozilla-version/version.txt1
-rw-r--r--third_party/python/pathlib2/CHANGELOG.rst137
-rw-r--r--third_party/python/pathlib2/LICENSE.rst23
-rw-r--r--third_party/python/pathlib2/MANIFEST.in10
-rw-r--r--third_party/python/pathlib2/PKG-INFO72
-rw-r--r--third_party/python/pathlib2/README.rst52
-rw-r--r--third_party/python/pathlib2/VERSION1
-rw-r--r--third_party/python/pathlib2/pathlib2/__init__.py1670
-rw-r--r--third_party/python/pathlib2/requirements.txt3
-rw-r--r--third_party/python/pathlib2/setup.cfg13
-rw-r--r--third_party/python/pathlib2/setup.py49
-rw-r--r--third_party/python/pathspec/CHANGES.rst202
-rw-r--r--third_party/python/pathspec/LICENSE373
-rw-r--r--third_party/python/pathspec/MANIFEST.in2
-rw-r--r--third_party/python/pathspec/PKG-INFO380
-rw-r--r--third_party/python/pathspec/README.rst153
-rw-r--r--third_party/python/pathspec/pathspec/__init__.py68
-rw-r--r--third_party/python/pathspec/pathspec/compat.py38
-rw-r--r--third_party/python/pathspec/pathspec/pathspec.py185
-rw-r--r--third_party/python/pathspec/pathspec/pattern.py146
-rw-r--r--third_party/python/pathspec/pathspec/patterns/__init__.py8
-rw-r--r--third_party/python/pathspec/pathspec/patterns/gitwildmatch.py330
-rw-r--r--third_party/python/pathspec/pathspec/tests/__init__.py0
-rw-r--r--third_party/python/pathspec/pathspec/tests/test_gitwildmatch.py474
-rw-r--r--third_party/python/pathspec/pathspec/tests/test_pathspec.py129
-rw-r--r--third_party/python/pathspec/pathspec/tests/test_util.py380
-rw-r--r--third_party/python/pathspec/pathspec/util.py600
-rw-r--r--third_party/python/pathspec/setup.cfg7
-rw-r--r--third_party/python/pathspec/setup.py44
-rw-r--r--third_party/python/pep487/PKG-INFO87
-rw-r--r--third_party/python/pep487/README.rst68
-rw-r--r--third_party/python/pep487/lib/pep487/__init__.py81
-rw-r--r--third_party/python/pep487/lib/pep487/version.py27
-rw-r--r--third_party/python/pep487/setup.cfg12
-rw-r--r--third_party/python/pep487/setup.py121
-rw-r--r--third_party/python/pip-tools/.appveyor.yml52
-rw-r--r--third_party/python/pip-tools/.bandit2
-rw-r--r--third_party/python/pip-tools/.coveragerc8
-rw-r--r--third_party/python/pip-tools/.fussyfox.yml3
-rw-r--r--third_party/python/pip-tools/.github/ISSUE_TEMPLATE/bug-report.md28
-rw-r--r--third_party/python/pip-tools/.github/ISSUE_TEMPLATE/feature-request.md19
-rw-r--r--third_party/python/pip-tools/.github/PULL_REQUEST_TEMPLATE.md9
-rw-r--r--third_party/python/pip-tools/.github/workflows/ci.yml66
-rw-r--r--third_party/python/pip-tools/.github/workflows/cron.yml73
-rw-r--r--third_party/python/pip-tools/.github/workflows/qa.yml43
-rw-r--r--third_party/python/pip-tools/.gitignore31
-rw-r--r--third_party/python/pip-tools/.pre-commit-config.yaml24
-rw-r--r--third_party/python/pip-tools/.pre-commit-hooks.yaml7
-rw-r--r--third_party/python/pip-tools/.travis.yml72
-rw-r--r--third_party/python/pip-tools/CHANGELOG.md607
-rw-r--r--third_party/python/pip-tools/CONTRIBUTING.md49
-rw-r--r--third_party/python/pip-tools/LICENSE26
-rw-r--r--third_party/python/pip-tools/PKG-INFO495
-rw-r--r--third_party/python/pip-tools/README.rst466
-rw-r--r--third_party/python/pip-tools/examples/django.in3
-rw-r--r--third_party/python/pip-tools/examples/flask.in2
-rw-r--r--third_party/python/pip-tools/examples/hypothesis.in1
-rw-r--r--third_party/python/pip-tools/examples/protection.in3
-rw-r--r--third_party/python/pip-tools/examples/sentry.in2
-rw-r--r--third_party/python/pip-tools/img/pip-tools-overview.pngbin0 -> 23961 bytes
-rw-r--r--third_party/python/pip-tools/piptools/__init__.py11
-rw-r--r--third_party/python/pip-tools/piptools/__main__.py17
-rw-r--r--third_party/python/pip-tools/piptools/_compat/__init__.py12
-rw-r--r--third_party/python/pip-tools/piptools/_compat/contextlib.py18
-rw-r--r--third_party/python/pip-tools/piptools/_compat/pip_compat.py29
-rw-r--r--third_party/python/pip-tools/piptools/_compat/tempfile.py88
-rw-r--r--third_party/python/pip-tools/piptools/cache.py170
-rw-r--r--third_party/python/pip-tools/piptools/click.py6
-rw-r--r--third_party/python/pip-tools/piptools/exceptions.py66
-rw-r--r--third_party/python/pip-tools/piptools/locations.py25
-rw-r--r--third_party/python/pip-tools/piptools/logging.py62
-rw-r--r--third_party/python/pip-tools/piptools/repositories/__init__.py3
-rw-r--r--third_party/python/pip-tools/piptools/repositories/base.py55
-rw-r--r--third_party/python/pip-tools/piptools/repositories/local.py99
-rw-r--r--third_party/python/pip-tools/piptools/repositories/pypi.py524
-rw-r--r--third_party/python/pip-tools/piptools/resolver.py408
-rw-r--r--third_party/python/pip-tools/piptools/scripts/__init__.py0
-rwxr-xr-xthird_party/python/pip-tools/piptools/scripts/compile.py501
-rwxr-xr-xthird_party/python/pip-tools/piptools/scripts/sync.py217
-rw-r--r--third_party/python/pip-tools/piptools/sync.py223
-rw-r--r--third_party/python/pip-tools/piptools/utils.py388
-rw-r--r--third_party/python/pip-tools/piptools/writer.py239
-rw-r--r--third_party/python/pip-tools/setup.cfg85
-rw-r--r--third_party/python/pip-tools/setup.py3
-rw-r--r--third_party/python/pip-tools/tox.ini52
-rw-r--r--third_party/python/pluggy/LICENSE22
-rw-r--r--third_party/python/pluggy/MANIFEST.in7
-rw-r--r--third_party/python/pluggy/PKG-INFO112
-rw-r--r--third_party/python/pluggy/README.rst80
-rw-r--r--third_party/python/pluggy/pluggy/__init__.py684
-rw-r--r--third_party/python/pluggy/pluggy/callers.py201
-rw-r--r--third_party/python/pluggy/setup.cfg13
-rw-r--r--third_party/python/pluggy/setup.py51
-rw-r--r--third_party/python/pluggy/testing/benchmark.py59
-rw-r--r--third_party/python/pluggy/testing/conftest.py30
-rw-r--r--third_party/python/pluggy/testing/test_details.py103
-rw-r--r--third_party/python/pluggy/testing/test_helpers.py68
-rw-r--r--third_party/python/pluggy/testing/test_hookrelay.py210
-rw-r--r--third_party/python/pluggy/testing/test_method_ordering.py322
-rw-r--r--third_party/python/pluggy/testing/test_multicall.py194
-rw-r--r--third_party/python/pluggy/testing/test_pluginmanager.py374
-rw-r--r--third_party/python/pluggy/testing/test_tracer.py89
-rw-r--r--third_party/python/pluggy/tox.ini44
-rw-r--r--third_party/python/ply/ANNOUNCE40
-rw-r--r--third_party/python/ply/CHANGES1394
-rw-r--r--third_party/python/ply/MANIFEST.in8
-rw-r--r--third_party/python/ply/PKG-INFO22
-rw-r--r--third_party/python/ply/README.md273
-rw-r--r--third_party/python/ply/TODO16
-rw-r--r--third_party/python/ply/example/BASIC/README79
-rw-r--r--third_party/python/ply/example/BASIC/basic.py65
-rw-r--r--third_party/python/ply/example/BASIC/basiclex.py61
-rw-r--r--third_party/python/ply/example/BASIC/basiclog.py73
-rw-r--r--third_party/python/ply/example/BASIC/basinterp.py496
-rw-r--r--third_party/python/ply/example/BASIC/basparse.py474
-rw-r--r--third_party/python/ply/example/BASIC/dim.bas14
-rw-r--r--third_party/python/ply/example/BASIC/func.bas5
-rw-r--r--third_party/python/ply/example/BASIC/gcd.bas22
-rw-r--r--third_party/python/ply/example/BASIC/gosub.bas13
-rw-r--r--third_party/python/ply/example/BASIC/hello.bas4
-rw-r--r--third_party/python/ply/example/BASIC/linear.bas17
-rw-r--r--third_party/python/ply/example/BASIC/maxsin.bas12
-rw-r--r--third_party/python/ply/example/BASIC/powers.bas13
-rw-r--r--third_party/python/ply/example/BASIC/rand.bas4
-rw-r--r--third_party/python/ply/example/BASIC/sales.bas20
-rw-r--r--third_party/python/ply/example/BASIC/sears.bas18
-rw-r--r--third_party/python/ply/example/BASIC/sqrt1.bas5
-rw-r--r--third_party/python/ply/example/BASIC/sqrt2.bas4
-rw-r--r--third_party/python/ply/example/GardenSnake/GardenSnake.py777
-rw-r--r--third_party/python/ply/example/GardenSnake/README5
-rw-r--r--third_party/python/ply/example/README10
-rw-r--r--third_party/python/ply/example/ansic/README2
-rw-r--r--third_party/python/ply/example/ansic/clex.py168
-rw-r--r--third_party/python/ply/example/ansic/cparse.py1048
-rw-r--r--third_party/python/ply/example/calc/calc.py123
-rw-r--r--third_party/python/ply/example/calcdebug/calc.py129
-rw-r--r--third_party/python/ply/example/calceof/calc.py132
-rwxr-xr-xthird_party/python/ply/example/classcalc/calc.py165
-rwxr-xr-xthird_party/python/ply/example/cleanup.sh2
-rw-r--r--third_party/python/ply/example/closurecalc/calc.py132
-rw-r--r--third_party/python/ply/example/hedit/hedit.py48
-rwxr-xr-xthird_party/python/ply/example/newclasscalc/calc.py167
-rw-r--r--third_party/python/ply/example/optcalc/README9
-rw-r--r--third_party/python/ply/example/optcalc/calc.py134
-rw-r--r--third_party/python/ply/example/unicalc/calc.py133
-rw-r--r--third_party/python/ply/example/yply/README41
-rw-r--r--third_party/python/ply/example/yply/ylex.py119
-rw-r--r--third_party/python/ply/example/yply/yparse.py244
-rwxr-xr-xthird_party/python/ply/example/yply/yply.py51
-rw-r--r--third_party/python/ply/ply/__init__.py5
-rw-r--r--third_party/python/ply/ply/cpp.py918
-rw-r--r--third_party/python/ply/ply/ctokens.py133
-rw-r--r--third_party/python/ply/ply/lex.py1100
-rw-r--r--third_party/python/ply/ply/yacc.py3494
-rw-r--r--third_party/python/ply/ply/ygen.py74
-rw-r--r--third_party/python/ply/setup.cfg11
-rw-r--r--third_party/python/ply/setup.py31
-rw-r--r--third_party/python/py/.gitignore14
-rw-r--r--third_party/python/py/.travis.yml44
-rw-r--r--third_party/python/py/AUTHORS24
-rw-r--r--third_party/python/py/CHANGELOG1160
-rw-r--r--third_party/python/py/HOWTORELEASE.rst17
-rw-r--r--third_party/python/py/LICENSE19
-rw-r--r--third_party/python/py/MANIFEST.in10
-rw-r--r--third_party/python/py/PKG-INFO67
-rw-r--r--third_party/python/py/README.rst34
-rw-r--r--third_party/python/py/appveyor.yml26
-rw-r--r--third_party/python/py/bench/localpath.py75
-rw-r--r--third_party/python/py/conftest.py60
-rw-r--r--third_party/python/py/py/__init__.py156
-rw-r--r--third_party/python/py/py/__metainfo.py2
-rw-r--r--third_party/python/py/py/_builtin.py248
-rw-r--r--third_party/python/py/py/_code/__init__.py1
-rw-r--r--third_party/python/py/py/_code/_assertionnew.py322
-rw-r--r--third_party/python/py/py/_code/_assertionold.py556
-rw-r--r--third_party/python/py/py/_code/_py2traceback.py79
-rw-r--r--third_party/python/py/py/_code/assertion.py90
-rw-r--r--third_party/python/py/py/_code/code.py796
-rw-r--r--third_party/python/py/py/_code/source.py410
-rw-r--r--third_party/python/py/py/_error.py91
-rw-r--r--third_party/python/py/py/_io/__init__.py1
-rw-r--r--third_party/python/py/py/_io/capture.py371
-rw-r--r--third_party/python/py/py/_io/saferepr.py71
-rw-r--r--third_party/python/py/py/_io/terminalwriter.py384
-rw-r--r--third_party/python/py/py/_log/__init__.py2
-rw-r--r--third_party/python/py/py/_log/log.py206
-rw-r--r--third_party/python/py/py/_log/warning.py79
-rw-r--r--third_party/python/py/py/_path/__init__.py1
-rw-r--r--third_party/python/py/py/_path/cacheutil.py114
-rw-r--r--third_party/python/py/py/_path/common.py453
-rw-r--r--third_party/python/py/py/_path/local.py992
-rw-r--r--third_party/python/py/py/_path/svnurl.py380
-rw-r--r--third_party/python/py/py/_path/svnwc.py1240
-rw-r--r--third_party/python/py/py/_process/__init__.py1
-rw-r--r--third_party/python/py/py/_process/cmdexec.py49
-rw-r--r--third_party/python/py/py/_process/forkedfunc.py120
-rw-r--r--third_party/python/py/py/_process/killproc.py23
-rw-r--r--third_party/python/py/py/_std.py26
-rw-r--r--third_party/python/py/py/_vendored_packages/__init__.py0
-rw-r--r--third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/DESCRIPTION.rst87
-rw-r--r--third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/INSTALLER1
-rw-r--r--third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/METADATA109
-rw-r--r--third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/RECORD9
-rw-r--r--third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/WHEEL6
-rw-r--r--third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/metadata.json1
-rw-r--r--third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/top_level.txt1
-rw-r--r--third_party/python/py/py/_vendored_packages/apipkg.py205
-rw-r--r--third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/DESCRIPTION.rst53
-rw-r--r--third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/INSTALLER1
-rw-r--r--third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/METADATA78
-rw-r--r--third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/RECORD9
-rw-r--r--third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/WHEEL5
-rw-r--r--third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/metadata.json1
-rw-r--r--third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/top_level.txt1
-rw-r--r--third_party/python/py/py/_vendored_packages/iniconfig.py165
-rw-r--r--third_party/python/py/py/_version.py4
-rw-r--r--third_party/python/py/py/_xmlgen.py255
-rw-r--r--third_party/python/py/py/test.py10
-rw-r--r--third_party/python/py/setup.cfg13
-rw-r--r--third_party/python/py/setup.py41
-rw-r--r--third_party/python/py/tasks/__init__.py12
-rw-r--r--third_party/python/py/tasks/vendoring.py23
-rw-r--r--third_party/python/py/testing/code/test_assertion.py305
-rw-r--r--third_party/python/py/testing/code/test_code.py159
-rw-r--r--third_party/python/py/testing/code/test_excinfo.py956
-rw-r--r--third_party/python/py/testing/code/test_source.py648
-rw-r--r--third_party/python/py/testing/conftest.py3
-rw-r--r--third_party/python/py/testing/io_/__init__.py1
-rw-r--r--third_party/python/py/testing/io_/test_capture.py501
-rw-r--r--third_party/python/py/testing/io_/test_saferepr.py75
-rw-r--r--third_party/python/py/testing/io_/test_terminalwriter.py292
-rw-r--r--third_party/python/py/testing/log/__init__.py0
-rw-r--r--third_party/python/py/testing/log/test_log.py191
-rw-r--r--third_party/python/py/testing/log/test_warning.py86
-rw-r--r--third_party/python/py/testing/path/common.py492
-rw-r--r--third_party/python/py/testing/path/conftest.py80
-rw-r--r--third_party/python/py/testing/path/repotest.dump228
-rw-r--r--third_party/python/py/testing/path/svntestbase.py31
-rw-r--r--third_party/python/py/testing/path/test_cacheutil.py89
-rw-r--r--third_party/python/py/testing/path/test_local.py976
-rw-r--r--third_party/python/py/testing/path/test_svnauth.py460
-rw-r--r--third_party/python/py/testing/path/test_svnurl.py95
-rw-r--r--third_party/python/py/testing/path/test_svnwc.py557
-rw-r--r--third_party/python/py/testing/process/__init__.py1
-rw-r--r--third_party/python/py/testing/process/test_cmdexec.py41
-rw-r--r--third_party/python/py/testing/process/test_forkedfunc.py173
-rw-r--r--third_party/python/py/testing/process/test_killproc.py18
-rw-r--r--third_party/python/py/testing/root/__init__.py1
-rw-r--r--third_party/python/py/testing/root/test_builtin.py179
-rw-r--r--third_party/python/py/testing/root/test_error.py76
-rw-r--r--third_party/python/py/testing/root/test_py_imports.py71
-rw-r--r--third_party/python/py/testing/root/test_std.py13
-rw-r--r--third_party/python/py/testing/root/test_xmlgen.py146
-rw-r--r--third_party/python/py/tox.ini33
-rw-r--r--third_party/python/pyasn1-modules/CHANGES.txt124
-rw-r--r--third_party/python/pyasn1-modules/LICENSE.txt24
-rw-r--r--third_party/python/pyasn1-modules/MANIFEST.in4
-rw-r--r--third_party/python/pyasn1-modules/PKG-INFO35
-rw-r--r--third_party/python/pyasn1-modules/README.md30
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules.egg-info/PKG-INFO35
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules.egg-info/SOURCES.txt66
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules.egg-info/dependency_links.txt1
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules.egg-info/requires.txt1
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules.egg-info/top_level.txt1
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules.egg-info/zip-safe1
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/__init__.py2
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/pem.py65
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc1155.py93
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc1157.py122
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc1901.py20
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc1902.py126
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc1905.py130
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc2251.py559
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc2314.py48
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc2315.py272
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc2437.py66
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc2459.py1311
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc2511.py258
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc2560.py220
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc3279.py231
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc3280.py1537
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc3281.py331
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc3412.py50
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc3414.py26
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc3447.py43
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc3852.py701
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc4210.py788
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc4211.py391
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc5208.py56
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc5280.py1597
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc5652.py706
-rw-r--r--third_party/python/pyasn1-modules/pyasn1_modules/rfc6402.py561
-rw-r--r--third_party/python/pyasn1-modules/requirements.txt1
-rw-r--r--third_party/python/pyasn1-modules/setup.cfg8
-rw-r--r--third_party/python/pyasn1-modules/setup.py135
-rw-r--r--third_party/python/pyasn1-modules/tests/__init__.py1
-rw-r--r--third_party/python/pyasn1-modules/tests/__main__.py28
-rw-r--r--third_party/python/pyasn1-modules/tests/test_rfc2314.py57
-rw-r--r--third_party/python/pyasn1-modules/tests/test_rfc2315.py179
-rw-r--r--third_party/python/pyasn1-modules/tests/test_rfc2437.py47
-rw-r--r--third_party/python/pyasn1-modules/tests/test_rfc2459.py111
-rw-r--r--third_party/python/pyasn1-modules/tests/test_rfc2511.py49
-rw-r--r--third_party/python/pyasn1-modules/tests/test_rfc2560.py81
-rw-r--r--third_party/python/pyasn1-modules/tests/test_rfc4210.py129
-rw-r--r--third_party/python/pyasn1-modules/tests/test_rfc5208.py74
-rw-r--r--third_party/python/pyasn1-modules/tests/test_rfc5280.py82
-rw-r--r--third_party/python/pyasn1-modules/tests/test_rfc5652.py87
-rwxr-xr-xthird_party/python/pyasn1-modules/tools/cmcdump.py56
-rwxr-xr-xthird_party/python/pyasn1-modules/tools/cmpdump.py33
-rwxr-xr-xthird_party/python/pyasn1-modules/tools/crldump.py41
-rwxr-xr-xthird_party/python/pyasn1-modules/tools/crmfdump.py30
-rwxr-xr-xthird_party/python/pyasn1-modules/tools/ocspclient.py165
-rwxr-xr-xthird_party/python/pyasn1-modules/tools/ocspreqdump.py30
-rwxr-xr-xthird_party/python/pyasn1-modules/tools/ocsprspdump.py30
-rwxr-xr-xthird_party/python/pyasn1-modules/tools/pkcs10dump.py43
-rwxr-xr-xthird_party/python/pyasn1-modules/tools/pkcs1dump.py50
-rwxr-xr-xthird_party/python/pyasn1-modules/tools/pkcs7dump.py51
-rwxr-xr-xthird_party/python/pyasn1-modules/tools/pkcs8dump.py49
-rwxr-xr-xthird_party/python/pyasn1-modules/tools/snmpget.py44
-rwxr-xr-xthird_party/python/pyasn1-modules/tools/x509dump-rfc5280.py46
-rwxr-xr-xthird_party/python/pyasn1-modules/tools/x509dump.py44
-rw-r--r--third_party/python/pyasn1/CHANGES.rst716
-rw-r--r--third_party/python/pyasn1/LICENSE.rst24
-rw-r--r--third_party/python/pyasn1/MANIFEST.in5
-rw-r--r--third_party/python/pyasn1/PKG-INFO35
-rw-r--r--third_party/python/pyasn1/README.md184
-rw-r--r--third_party/python/pyasn1/TODO.rst92
-rw-r--r--third_party/python/pyasn1/pyasn1.egg-info/PKG-INFO35
-rw-r--r--third_party/python/pyasn1/pyasn1.egg-info/SOURCES.txt161
-rw-r--r--third_party/python/pyasn1/pyasn1.egg-info/dependency_links.txt1
-rw-r--r--third_party/python/pyasn1/pyasn1.egg-info/top_level.txt1
-rw-r--r--third_party/python/pyasn1/pyasn1.egg-info/zip-safe1
-rw-r--r--third_party/python/pyasn1/pyasn1/__init__.py7
-rw-r--r--third_party/python/pyasn1/pyasn1/codec/__init__.py1
-rw-r--r--third_party/python/pyasn1/pyasn1/codec/ber/__init__.py1
-rw-r--r--third_party/python/pyasn1/pyasn1/codec/ber/decoder.py1682
-rw-r--r--third_party/python/pyasn1/pyasn1/codec/ber/encoder.py890
-rw-r--r--third_party/python/pyasn1/pyasn1/codec/ber/eoo.py28
-rw-r--r--third_party/python/pyasn1/pyasn1/codec/cer/__init__.py1
-rw-r--r--third_party/python/pyasn1/pyasn1/codec/cer/decoder.py114
-rw-r--r--third_party/python/pyasn1/pyasn1/codec/cer/encoder.py313
-rw-r--r--third_party/python/pyasn1/pyasn1/codec/der/__init__.py1
-rw-r--r--third_party/python/pyasn1/pyasn1/codec/der/decoder.py94
-rw-r--r--third_party/python/pyasn1/pyasn1/codec/der/encoder.py107
-rw-r--r--third_party/python/pyasn1/pyasn1/codec/native/__init__.py1
-rw-r--r--third_party/python/pyasn1/pyasn1/codec/native/decoder.py213
-rw-r--r--third_party/python/pyasn1/pyasn1/codec/native/encoder.py256
-rw-r--r--third_party/python/pyasn1/pyasn1/compat/__init__.py1
-rw-r--r--third_party/python/pyasn1/pyasn1/compat/binary.py33
-rw-r--r--third_party/python/pyasn1/pyasn1/compat/calling.py20
-rw-r--r--third_party/python/pyasn1/pyasn1/compat/dateandtime.py22
-rw-r--r--third_party/python/pyasn1/pyasn1/compat/integer.py110
-rw-r--r--third_party/python/pyasn1/pyasn1/compat/octets.py46
-rw-r--r--third_party/python/pyasn1/pyasn1/compat/string.py26
-rw-r--r--third_party/python/pyasn1/pyasn1/debug.py157
-rw-r--r--third_party/python/pyasn1/pyasn1/error.py75
-rw-r--r--third_party/python/pyasn1/pyasn1/type/__init__.py1
-rw-r--r--third_party/python/pyasn1/pyasn1/type/base.py707
-rw-r--r--third_party/python/pyasn1/pyasn1/type/char.py335
-rw-r--r--third_party/python/pyasn1/pyasn1/type/constraint.py756
-rw-r--r--third_party/python/pyasn1/pyasn1/type/error.py11
-rw-r--r--third_party/python/pyasn1/pyasn1/type/namedtype.py561
-rw-r--r--third_party/python/pyasn1/pyasn1/type/namedval.py192
-rw-r--r--third_party/python/pyasn1/pyasn1/type/opentype.py104
-rw-r--r--third_party/python/pyasn1/pyasn1/type/tag.py335
-rw-r--r--third_party/python/pyasn1/pyasn1/type/tagmap.py96
-rw-r--r--third_party/python/pyasn1/pyasn1/type/univ.py3321
-rw-r--r--third_party/python/pyasn1/pyasn1/type/useful.py191
-rw-r--r--third_party/python/pyasn1/setup.cfg10
-rw-r--r--third_party/python/pyasn1/setup.py121
-rw-r--r--third_party/python/pyflakes/AUTHORS22
-rw-r--r--third_party/python/pyflakes/LICENSE21
-rw-r--r--third_party/python/pyflakes/MANIFEST.in3
-rw-r--r--third_party/python/pyflakes/NEWS.rst266
-rw-r--r--third_party/python/pyflakes/PKG-INFO116
-rw-r--r--third_party/python/pyflakes/README.rst89
-rwxr-xr-xthird_party/python/pyflakes/bin/pyflakes3
-rw-r--r--third_party/python/pyflakes/pyflakes/__init__.py1
-rw-r--r--third_party/python/pyflakes/pyflakes/__main__.py5
-rw-r--r--third_party/python/pyflakes/pyflakes/api.py213
-rw-r--r--third_party/python/pyflakes/pyflakes/checker.py2249
-rw-r--r--third_party/python/pyflakes/pyflakes/messages.py371
-rw-r--r--third_party/python/pyflakes/pyflakes/reporter.py82
-rw-r--r--third_party/python/pyflakes/pyflakes/scripts/__init__.py0
-rw-r--r--third_party/python/pyflakes/pyflakes/scripts/pyflakes.py8
-rw-r--r--third_party/python/pyflakes/pyflakes/test/__init__.py0
-rw-r--r--third_party/python/pyflakes/pyflakes/test/harness.py72
-rw-r--r--third_party/python/pyflakes/pyflakes/test/test_api.py835
-rw-r--r--third_party/python/pyflakes/pyflakes/test/test_builtin.py41
-rw-r--r--third_party/python/pyflakes/pyflakes/test/test_checker.py186
-rw-r--r--third_party/python/pyflakes/pyflakes/test/test_code_segment.py132
-rw-r--r--third_party/python/pyflakes/pyflakes/test/test_dict.py213
-rw-r--r--third_party/python/pyflakes/pyflakes/test/test_doctests.py465
-rw-r--r--third_party/python/pyflakes/pyflakes/test/test_imports.py1221
-rw-r--r--third_party/python/pyflakes/pyflakes/test/test_is_literal.py222
-rw-r--r--third_party/python/pyflakes/pyflakes/test/test_other.py2142
-rw-r--r--third_party/python/pyflakes/pyflakes/test/test_return_with_arguments_inside_generator.py34
-rw-r--r--third_party/python/pyflakes/pyflakes/test/test_type_annotations.py554
-rw-r--r--third_party/python/pyflakes/pyflakes/test/test_undefined_names.py854
-rw-r--r--third_party/python/pyflakes/setup.cfg10
-rwxr-xr-xthird_party/python/pyflakes/setup.py66
-rw-r--r--third_party/python/pylru/pylru.py556
-rw-r--r--third_party/python/pylru/test.py238
-rw-r--r--third_party/python/pyrsistent/CHANGES.txt333
-rw-r--r--third_party/python/pyrsistent/LICENCE.mit22
-rw-r--r--third_party/python/pyrsistent/MANIFEST.in5
-rw-r--r--third_party/python/pyrsistent/PKG-INFO742
-rw-r--r--third_party/python/pyrsistent/README725
-rw-r--r--third_party/python/pyrsistent/README.rst725
-rw-r--r--third_party/python/pyrsistent/_pyrsistent_version.py1
-rw-r--r--third_party/python/pyrsistent/pvectorcmodule.c1642
-rw-r--r--third_party/python/pyrsistent/pyrsistent/__init__.py47
-rw-r--r--third_party/python/pyrsistent/pyrsistent/__init__.pyi213
-rw-r--r--third_party/python/pyrsistent/pyrsistent/_checked_types.py542
-rw-r--r--third_party/python/pyrsistent/pyrsistent/_compat.py31
-rw-r--r--third_party/python/pyrsistent/pyrsistent/_field_common.py330
-rw-r--r--third_party/python/pyrsistent/pyrsistent/_helpers.py82
-rw-r--r--third_party/python/pyrsistent/pyrsistent/_immutable.py105
-rw-r--r--third_party/python/pyrsistent/pyrsistent/_pbag.py267
-rw-r--r--third_party/python/pyrsistent/pyrsistent/_pclass.py264
-rw-r--r--third_party/python/pyrsistent/pyrsistent/_pdeque.py376
-rw-r--r--third_party/python/pyrsistent/pyrsistent/_plist.py313
-rw-r--r--third_party/python/pyrsistent/pyrsistent/_pmap.py460
-rw-r--r--third_party/python/pyrsistent/pyrsistent/_precord.py169
-rw-r--r--third_party/python/pyrsistent/pyrsistent/_pset.py229
-rw-r--r--third_party/python/pyrsistent/pyrsistent/_pvector.py713
-rw-r--r--third_party/python/pyrsistent/pyrsistent/_toolz.py83
-rw-r--r--third_party/python/pyrsistent/pyrsistent/_transformations.py143
-rw-r--r--third_party/python/pyrsistent/pyrsistent/py.typed0
-rw-r--r--third_party/python/pyrsistent/pyrsistent/typing.py80
-rw-r--r--third_party/python/pyrsistent/pyrsistent/typing.pyi292
-rw-r--r--third_party/python/pyrsistent/setup.cfg7
-rw-r--r--third_party/python/pyrsistent/setup.py81
-rw-r--r--third_party/python/pystache/.gitignore17
-rw-r--r--third_party/python/pystache/.travis.yml14
-rw-r--r--third_party/python/pystache/HISTORY.md169
-rw-r--r--third_party/python/pystache/LICENSE22
-rw-r--r--third_party/python/pystache/MANIFEST.in13
-rw-r--r--third_party/python/pystache/README.md276
-rw-r--r--third_party/python/pystache/TODO.md16
-rw-r--r--third_party/python/pystache/gh/images/logo_phillips.pngbin0 -> 173595 bytes
-rw-r--r--third_party/python/pystache/pystache/__init__.py13
-rw-r--r--third_party/python/pystache/pystache/commands/__init__.py4
-rw-r--r--third_party/python/pystache/pystache/commands/render.py95
-rw-r--r--third_party/python/pystache/pystache/commands/test.py18
-rw-r--r--third_party/python/pystache/pystache/common.py71
-rw-r--r--third_party/python/pystache/pystache/context.py342
-rw-r--r--third_party/python/pystache/pystache/defaults.py65
-rw-r--r--third_party/python/pystache/pystache/init.py19
-rw-r--r--third_party/python/pystache/pystache/loader.py170
-rw-r--r--third_party/python/pystache/pystache/locator.py171
-rw-r--r--third_party/python/pystache/pystache/parsed.py50
-rw-r--r--third_party/python/pystache/pystache/parser.py378
-rw-r--r--third_party/python/pystache/pystache/renderengine.py181
-rw-r--r--third_party/python/pystache/pystache/renderer.py460
-rw-r--r--third_party/python/pystache/pystache/specloader.py90
-rw-r--r--third_party/python/pystache/pystache/template_spec.py53
-rw-r--r--third_party/python/pystache/setup.py413
-rw-r--r--third_party/python/pystache/setup_description.rst513
-rw-r--r--third_party/python/pystache/test_pystache.py30
-rw-r--r--third_party/python/pystache/tox.ini36
-rw-r--r--third_party/python/pytest/.coveragerc4
-rw-r--r--third_party/python/pytest/.github/ISSUE_TEMPLATE.md8
-rw-r--r--third_party/python/pytest/.github/PULL_REQUEST_TEMPLATE.md14
-rw-r--r--third_party/python/pytest/.gitignore40
-rw-r--r--third_party/python/pytest/.pre-commit-config.yaml36
-rw-r--r--third_party/python/pytest/.travis.yml83
-rw-r--r--third_party/python/pytest/AUTHORS213
-rw-r--r--third_party/python/pytest/CHANGELOG.rst4883
-rw-r--r--third_party/python/pytest/CONTRIBUTING.rst294
-rw-r--r--third_party/python/pytest/HOWTORELEASE.rst49
-rw-r--r--third_party/python/pytest/LICENSE21
-rw-r--r--third_party/python/pytest/PKG-INFO149
-rw-r--r--third_party/python/pytest/README.rst116
-rw-r--r--third_party/python/pytest/appveyor.yml48
-rw-r--r--third_party/python/pytest/bench/bench.py13
-rw-r--r--third_party/python/pytest/bench/bench_argcomplete.py22
-rw-r--r--third_party/python/pytest/bench/empty.py4
-rw-r--r--third_party/python/pytest/bench/manyparam.py15
-rw-r--r--third_party/python/pytest/bench/skip.py11
-rw-r--r--third_party/python/pytest/changelog/README.rst32
-rw-r--r--third_party/python/pytest/changelog/_template.rst40
-rw-r--r--third_party/python/pytest/extra/get_issues.py84
-rw-r--r--third_party/python/pytest/extra/setup-py.test/setup.py11
-rw-r--r--third_party/python/pytest/pyproject.toml43
-rw-r--r--third_party/python/pytest/scripts/call-tox.bat8
-rw-r--r--third_party/python/pytest/scripts/install-pypy.bat6
-rw-r--r--third_party/python/pytest/setup.cfg25
-rw-r--r--third_party/python/pytest/setup.py124
-rw-r--r--third_party/python/pytest/src/_pytest/__init__.py8
-rw-r--r--third_party/python/pytest/src/_pytest/_argcomplete.py107
-rw-r--r--third_party/python/pytest/src/_pytest/_code/__init__.py10
-rw-r--r--third_party/python/pytest/src/_pytest/_code/_py2traceback.py89
-rw-r--r--third_party/python/pytest/src/_pytest/_code/code.py970
-rw-r--r--third_party/python/pytest/src/_pytest/_code/source.py379
-rw-r--r--third_party/python/pytest/src/_pytest/_version.py4
-rw-r--r--third_party/python/pytest/src/_pytest/assertion/__init__.py151
-rw-r--r--third_party/python/pytest/src/_pytest/assertion/rewrite.py954
-rw-r--r--third_party/python/pytest/src/_pytest/assertion/truncate.py99
-rw-r--r--third_party/python/pytest/src/_pytest/assertion/util.py338
-rwxr-xr-xthird_party/python/pytest/src/_pytest/cacheprovider.py339
-rw-r--r--third_party/python/pytest/src/_pytest/capture.py724
-rw-r--r--third_party/python/pytest/src/_pytest/compat.py371
-rw-r--r--third_party/python/pytest/src/_pytest/config/__init__.py979
-rw-r--r--third_party/python/pytest/src/_pytest/config/argparsing.py394
-rw-r--r--third_party/python/pytest/src/_pytest/config/exceptions.py8
-rw-r--r--third_party/python/pytest/src/_pytest/config/findpaths.py140
-rw-r--r--third_party/python/pytest/src/_pytest/debugging.py162
-rw-r--r--third_party/python/pytest/src/_pytest/deprecated.py65
-rw-r--r--third_party/python/pytest/src/_pytest/doctest.py520
-rw-r--r--third_party/python/pytest/src/_pytest/fixtures.py1229
-rw-r--r--third_party/python/pytest/src/_pytest/freeze_support.py45
-rw-r--r--third_party/python/pytest/src/_pytest/helpconfig.py212
-rw-r--r--third_party/python/pytest/src/_pytest/hookspec.py563
-rw-r--r--third_party/python/pytest/src/_pytest/junitxml.py569
-rw-r--r--third_party/python/pytest/src/_pytest/logging.py575
-rw-r--r--third_party/python/pytest/src/_pytest/main.py611
-rw-r--r--third_party/python/pytest/src/_pytest/mark/__init__.py174
-rw-r--r--third_party/python/pytest/src/_pytest/mark/evaluate.py121
-rw-r--r--third_party/python/pytest/src/_pytest/mark/legacy.py97
-rw-r--r--third_party/python/pytest/src/_pytest/mark/structures.py428
-rw-r--r--third_party/python/pytest/src/_pytest/monkeypatch.py283
-rw-r--r--third_party/python/pytest/src/_pytest/nodes.py419
-rw-r--r--third_party/python/pytest/src/_pytest/nose.py72
-rw-r--r--third_party/python/pytest/src/_pytest/outcomes.py157
-rw-r--r--third_party/python/pytest/src/_pytest/pastebin.py109
-rw-r--r--third_party/python/pytest/src/_pytest/pytester.py1272
-rw-r--r--third_party/python/pytest/src/_pytest/python.py1331
-rw-r--r--third_party/python/pytest/src/_pytest/python_api.py674
-rw-r--r--third_party/python/pytest/src/_pytest/recwarn.py243
-rw-r--r--third_party/python/pytest/src/_pytest/resultlog.py120
-rw-r--r--third_party/python/pytest/src/_pytest/runner.py570
-rw-r--r--third_party/python/pytest/src/_pytest/setuponly.py84
-rw-r--r--third_party/python/pytest/src/_pytest/setupplan.py29
-rw-r--r--third_party/python/pytest/src/_pytest/skipping.py293
-rw-r--r--third_party/python/pytest/src/_pytest/terminal.py829
-rw-r--r--third_party/python/pytest/src/_pytest/tmpdir.py131
-rw-r--r--third_party/python/pytest/src/_pytest/unittest.py253
-rw-r--r--third_party/python/pytest/src/_pytest/warnings.py112
-rw-r--r--third_party/python/pytest/src/pytest.py72
-rw-r--r--third_party/python/pytest/tasks/__init__.py10
-rw-r--r--third_party/python/pytest/tasks/generate.py118
-rw-r--r--third_party/python/pytest/tasks/release.minor.rst27
-rw-r--r--third_party/python/pytest/tasks/release.patch.rst17
-rw-r--r--third_party/python/pytest/tasks/requirements.txt6
-rw-r--r--third_party/python/pytest/testing/acceptance_test.py1066
-rw-r--r--third_party/python/pytest/testing/code/test_code.py210
-rw-r--r--third_party/python/pytest/testing/code/test_excinfo.py1357
-rw-r--r--third_party/python/pytest/testing/code/test_source.py758
-rw-r--r--third_party/python/pytest/testing/code/test_source_multiline_block.py29
-rw-r--r--third_party/python/pytest/testing/deprecated_test.py265
-rw-r--r--third_party/python/pytest/testing/freeze/.gitignore3
-rw-r--r--third_party/python/pytest/testing/freeze/create_executable.py12
-rw-r--r--third_party/python/pytest/testing/freeze/runtests_script.py10
-rw-r--r--third_party/python/pytest/testing/freeze/tests/test_doctest.txt6
-rw-r--r--third_party/python/pytest/testing/freeze/tests/test_trivial.py6
-rw-r--r--third_party/python/pytest/testing/freeze/tox_run.py12
-rw-r--r--third_party/python/pytest/testing/logging/test_fixture.py119
-rw-r--r--third_party/python/pytest/testing/logging/test_formatter.py37
-rw-r--r--third_party/python/pytest/testing/logging/test_reporting.py874
-rw-r--r--third_party/python/pytest/testing/python/approx.py406
-rw-r--r--third_party/python/pytest/testing/python/collect.py1555
-rw-r--r--third_party/python/pytest/testing/python/fixture.py4011
-rw-r--r--third_party/python/pytest/testing/python/integration.py453
-rw-r--r--third_party/python/pytest/testing/python/metafunc.py1768
-rw-r--r--third_party/python/pytest/testing/python/raises.py177
-rw-r--r--third_party/python/pytest/testing/python/setup_only.py269
-rw-r--r--third_party/python/pytest/testing/python/setup_plan.py19
-rw-r--r--third_party/python/pytest/testing/python/show_fixtures_per_test.py183
-rw-r--r--third_party/python/pytest/testing/python/test_deprecations.py22
-rw-r--r--third_party/python/pytest/testing/test_argcomplete.py109
-rw-r--r--third_party/python/pytest/testing/test_assertion.py1172
-rw-r--r--third_party/python/pytest/testing/test_assertrewrite.py1144
-rw-r--r--third_party/python/pytest/testing/test_cacheprovider.py820
-rw-r--r--third_party/python/pytest/testing/test_capture.py1394
-rw-r--r--third_party/python/pytest/testing/test_collection.py944
-rw-r--r--third_party/python/pytest/testing/test_compat.py110
-rw-r--r--third_party/python/pytest/testing/test_config.py1068
-rw-r--r--third_party/python/pytest/testing/test_conftest.py543
-rw-r--r--third_party/python/pytest/testing/test_doctest.py1206
-rw-r--r--third_party/python/pytest/testing/test_entry_points.py14
-rw-r--r--third_party/python/pytest/testing/test_helpconfig.py72
-rw-r--r--third_party/python/pytest/testing/test_junitxml.py1231
-rw-r--r--third_party/python/pytest/testing/test_mark.py1134
-rw-r--r--third_party/python/pytest/testing/test_modimport.py29
-rw-r--r--third_party/python/pytest/testing/test_monkeypatch.py368
-rw-r--r--third_party/python/pytest/testing/test_nodes.py21
-rw-r--r--third_party/python/pytest/testing/test_nose.py433
-rw-r--r--third_party/python/pytest/testing/test_parseopt.py336
-rw-r--r--third_party/python/pytest/testing/test_pastebin.py132
-rw-r--r--third_party/python/pytest/testing/test_pdb.py702
-rw-r--r--third_party/python/pytest/testing/test_pluginmanager.py386
-rw-r--r--third_party/python/pytest/testing/test_pytester.py401
-rw-r--r--third_party/python/pytest/testing/test_recwarn.py347
-rw-r--r--third_party/python/pytest/testing/test_resultlog.py243
-rw-r--r--third_party/python/pytest/testing/test_runner.py951
-rw-r--r--third_party/python/pytest/testing/test_runner_xunit.py352
-rw-r--r--third_party/python/pytest/testing/test_session.py343
-rw-r--r--third_party/python/pytest/testing/test_skipping.py1194
-rw-r--r--third_party/python/pytest/testing/test_terminal.py1267
-rw-r--r--third_party/python/pytest/testing/test_tmpdir.py212
-rw-r--r--third_party/python/pytest/testing/test_unittest.py992
-rw-r--r--third_party/python/pytest/testing/test_warnings.py290
-rw-r--r--third_party/python/pytest/tox.ini212
-rw-r--r--third_party/python/python-hglib/LICENSE20
-rw-r--r--third_party/python/python-hglib/Makefile17
-rw-r--r--third_party/python/python-hglib/PKG-INFO26
-rw-r--r--third_party/python/python-hglib/README9
-rw-r--r--third_party/python/python-hglib/examples/stats.py35
-rw-r--r--third_party/python/python-hglib/hglib/__init__.py40
-rw-r--r--third_party/python/python-hglib/hglib/client.py1717
-rw-r--r--third_party/python/python-hglib/hglib/context.py238
-rw-r--r--third_party/python/python-hglib/hglib/error.py18
-rw-r--r--third_party/python/python-hglib/hglib/merge.py21
-rw-r--r--third_party/python/python-hglib/hglib/templates.py4
-rw-r--r--third_party/python/python-hglib/hglib/util.py217
-rw-r--r--third_party/python/python-hglib/setup.py54
-rw-r--r--third_party/python/python-hglib/test.py7
-rw-r--r--third_party/python/pytoml/PKG-INFO10
-rw-r--r--third_party/python/pytoml/pytoml/__init__.py3
-rw-r--r--third_party/python/pytoml/pytoml/core.py13
-rw-r--r--third_party/python/pytoml/pytoml/parser.py366
-rw-r--r--third_party/python/pytoml/pytoml/writer.py120
-rw-r--r--third_party/python/pytoml/setup.cfg5
-rw-r--r--third_party/python/pytoml/setup.py17
-rw-r--r--third_party/python/redo/PKG-INFO10
-rw-r--r--third_party/python/redo/README.md147
-rw-r--r--third_party/python/redo/redo.egg-info/PKG-INFO10
-rw-r--r--third_party/python/redo/redo.egg-info/SOURCES.txt10
-rw-r--r--third_party/python/redo/redo.egg-info/dependency_links.txt1
-rw-r--r--third_party/python/redo/redo.egg-info/entry_points.txt3
-rw-r--r--third_party/python/redo/redo.egg-info/top_level.txt1
-rw-r--r--third_party/python/redo/redo/__init__.py265
-rw-r--r--third_party/python/redo/redo/cmd.py70
-rw-r--r--third_party/python/redo/setup.cfg7
-rw-r--r--third_party/python/redo/setup.py16
-rw-r--r--third_party/python/requests-unixsocket/.travis.yml17
-rw-r--r--third_party/python/requests-unixsocket/AUTHORS6
-rw-r--r--third_party/python/requests-unixsocket/ChangeLog67
-rw-r--r--third_party/python/requests-unixsocket/LICENSE202
-rw-r--r--third_party/python/requests-unixsocket/PKG-INFO84
-rw-r--r--third_party/python/requests-unixsocket/README.rst62
-rw-r--r--third_party/python/requests-unixsocket/pytest.ini2
-rw-r--r--third_party/python/requests-unixsocket/requests_unixsocket/__init__.py77
-rw-r--r--third_party/python/requests-unixsocket/requests_unixsocket/adapters.py60
-rwxr-xr-xthird_party/python/requests-unixsocket/requests_unixsocket/tests/test_requests_unixsocket.py121
-rw-r--r--third_party/python/requests-unixsocket/requests_unixsocket/testutils.py97
-rw-r--r--third_party/python/requests-unixsocket/requirements.txt2
-rw-r--r--third_party/python/requests-unixsocket/setup.cfg33
-rwxr-xr-xthird_party/python/requests-unixsocket/setup.py8
-rw-r--r--third_party/python/requests-unixsocket/test-requirements.txt4
-rw-r--r--third_party/python/requests-unixsocket/tox.ini48
-rw-r--r--third_party/python/requests/HISTORY.rst1130
-rw-r--r--third_party/python/requests/LICENSE13
-rw-r--r--third_party/python/requests/MANIFEST.in1
-rw-r--r--third_party/python/requests/NOTICE54
-rw-r--r--third_party/python/requests/PKG-INFO1238
-rw-r--r--third_party/python/requests/README.rst86
-rw-r--r--third_party/python/requests/requests.egg-info/PKG-INFO1238
-rw-r--r--third_party/python/requests/requests.egg-info/SOURCES.txt96
-rw-r--r--third_party/python/requests/requests.egg-info/dependency_links.txt1
-rw-r--r--third_party/python/requests/requests.egg-info/not-zip-safe1
-rw-r--r--third_party/python/requests/requests.egg-info/requires.txt5
-rw-r--r--third_party/python/requests/requests.egg-info/top_level.txt1
-rw-r--r--third_party/python/requests/requests/__init__.py83
-rw-r--r--third_party/python/requests/requests/adapters.py453
-rw-r--r--third_party/python/requests/requests/api.py145
-rw-r--r--third_party/python/requests/requests/auth.py223
-rw-r--r--third_party/python/requests/requests/cacert.pem5616
-rw-r--r--third_party/python/requests/requests/certs.py25
-rw-r--r--third_party/python/requests/requests/compat.py62
-rw-r--r--third_party/python/requests/requests/cookies.py487
-rw-r--r--third_party/python/requests/requests/exceptions.py114
-rw-r--r--third_party/python/requests/requests/hooks.py34
-rw-r--r--third_party/python/requests/requests/models.py851
-rw-r--r--third_party/python/requests/requests/packages/__init__.py36
-rw-r--r--third_party/python/requests/requests/packages/chardet/__init__.py32
-rw-r--r--third_party/python/requests/requests/packages/chardet/big5freq.py925
-rw-r--r--third_party/python/requests/requests/packages/chardet/big5prober.py42
-rwxr-xr-xthird_party/python/requests/requests/packages/chardet/chardetect.py80
-rw-r--r--third_party/python/requests/requests/packages/chardet/chardistribution.py231
-rw-r--r--third_party/python/requests/requests/packages/chardet/charsetgroupprober.py106
-rw-r--r--third_party/python/requests/requests/packages/chardet/charsetprober.py62
-rw-r--r--third_party/python/requests/requests/packages/chardet/codingstatemachine.py61
-rw-r--r--third_party/python/requests/requests/packages/chardet/compat.py34
-rw-r--r--third_party/python/requests/requests/packages/chardet/constants.py39
-rw-r--r--third_party/python/requests/requests/packages/chardet/cp949prober.py44
-rw-r--r--third_party/python/requests/requests/packages/chardet/escprober.py86
-rw-r--r--third_party/python/requests/requests/packages/chardet/escsm.py242
-rw-r--r--third_party/python/requests/requests/packages/chardet/eucjpprober.py90
-rw-r--r--third_party/python/requests/requests/packages/chardet/euckrfreq.py596
-rw-r--r--third_party/python/requests/requests/packages/chardet/euckrprober.py42
-rw-r--r--third_party/python/requests/requests/packages/chardet/euctwfreq.py428
-rw-r--r--third_party/python/requests/requests/packages/chardet/euctwprober.py41
-rw-r--r--third_party/python/requests/requests/packages/chardet/gb2312freq.py472
-rw-r--r--third_party/python/requests/requests/packages/chardet/gb2312prober.py41
-rw-r--r--third_party/python/requests/requests/packages/chardet/hebrewprober.py283
-rw-r--r--third_party/python/requests/requests/packages/chardet/jisfreq.py569
-rw-r--r--third_party/python/requests/requests/packages/chardet/jpcntx.py227
-rw-r--r--third_party/python/requests/requests/packages/chardet/langbulgarianmodel.py229
-rw-r--r--third_party/python/requests/requests/packages/chardet/langcyrillicmodel.py329
-rw-r--r--third_party/python/requests/requests/packages/chardet/langgreekmodel.py225
-rw-r--r--third_party/python/requests/requests/packages/chardet/langhebrewmodel.py201
-rw-r--r--third_party/python/requests/requests/packages/chardet/langhungarianmodel.py225
-rw-r--r--third_party/python/requests/requests/packages/chardet/langthaimodel.py200
-rw-r--r--third_party/python/requests/requests/packages/chardet/latin1prober.py139
-rw-r--r--third_party/python/requests/requests/packages/chardet/mbcharsetprober.py86
-rw-r--r--third_party/python/requests/requests/packages/chardet/mbcsgroupprober.py54
-rw-r--r--third_party/python/requests/requests/packages/chardet/mbcssm.py572
-rw-r--r--third_party/python/requests/requests/packages/chardet/sbcharsetprober.py120
-rw-r--r--third_party/python/requests/requests/packages/chardet/sbcsgroupprober.py69
-rw-r--r--third_party/python/requests/requests/packages/chardet/sjisprober.py91
-rw-r--r--third_party/python/requests/requests/packages/chardet/universaldetector.py170
-rw-r--r--third_party/python/requests/requests/packages/chardet/utf8prober.py76
-rw-r--r--third_party/python/requests/requests/packages/urllib3/__init__.py93
-rw-r--r--third_party/python/requests/requests/packages/urllib3/_collections.py324
-rw-r--r--third_party/python/requests/requests/packages/urllib3/connection.py288
-rw-r--r--third_party/python/requests/requests/packages/urllib3/connectionpool.py818
-rw-r--r--third_party/python/requests/requests/packages/urllib3/contrib/__init__.py0
-rw-r--r--third_party/python/requests/requests/packages/urllib3/contrib/appengine.py223
-rw-r--r--third_party/python/requests/requests/packages/urllib3/contrib/ntlmpool.py115
-rw-r--r--third_party/python/requests/requests/packages/urllib3/contrib/pyopenssl.py310
-rw-r--r--third_party/python/requests/requests/packages/urllib3/exceptions.py201
-rw-r--r--third_party/python/requests/requests/packages/urllib3/fields.py178
-rw-r--r--third_party/python/requests/requests/packages/urllib3/filepost.py94
-rw-r--r--third_party/python/requests/requests/packages/urllib3/packages/__init__.py5
-rw-r--r--third_party/python/requests/requests/packages/urllib3/packages/ordered_dict.py259
-rw-r--r--third_party/python/requests/requests/packages/urllib3/packages/six.py385
-rw-r--r--third_party/python/requests/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py13
-rw-r--r--third_party/python/requests/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py105
-rw-r--r--third_party/python/requests/requests/packages/urllib3/poolmanager.py281
-rw-r--r--third_party/python/requests/requests/packages/urllib3/request.py151
-rw-r--r--third_party/python/requests/requests/packages/urllib3/response.py514
-rw-r--r--third_party/python/requests/requests/packages/urllib3/util/__init__.py44
-rw-r--r--third_party/python/requests/requests/packages/urllib3/util/connection.py101
-rw-r--r--third_party/python/requests/requests/packages/urllib3/util/request.py72
-rw-r--r--third_party/python/requests/requests/packages/urllib3/util/response.py74
-rw-r--r--third_party/python/requests/requests/packages/urllib3/util/retry.py286
-rw-r--r--third_party/python/requests/requests/packages/urllib3/util/ssl_.py317
-rw-r--r--third_party/python/requests/requests/packages/urllib3/util/timeout.py242
-rw-r--r--third_party/python/requests/requests/packages/urllib3/util/url.py217
-rw-r--r--third_party/python/requests/requests/sessions.py680
-rw-r--r--third_party/python/requests/requests/status_codes.py90
-rw-r--r--third_party/python/requests/requests/structures.py104
-rw-r--r--third_party/python/requests/requests/utils.py721
-rw-r--r--third_party/python/requests/requirements.txt6
-rw-r--r--third_party/python/requests/setup.cfg8
-rwxr-xr-xthird_party/python/requests/setup.py74
-rwxr-xr-xthird_party/python/requests/test_requests.py1746
-rw-r--r--third_party/python/requirements.in49
-rw-r--r--third_party/python/requirements.txt246
-rw-r--r--third_party/python/responses/CHANGES119
-rw-r--r--third_party/python/responses/LICENSE201
-rw-r--r--third_party/python/responses/MANIFEST.in3
-rw-r--r--third_party/python/responses/PKG-INFO443
-rw-r--r--third_party/python/responses/README.rst420
-rw-r--r--third_party/python/responses/responses.py653
-rw-r--r--third_party/python/responses/setup.cfg16
-rw-r--r--third_party/python/responses/setup.py85
-rw-r--r--third_party/python/responses/test_responses.py924
-rw-r--r--third_party/python/responses/tox.ini7
-rw-r--r--third_party/python/rsa/LICENSE13
-rw-r--r--third_party/python/rsa/MANIFEST.in5
-rw-r--r--third_party/python/rsa/PKG-INFO18
-rw-r--r--third_party/python/rsa/README.rst31
-rwxr-xr-xthird_party/python/rsa/create_timing_table.py29
-rwxr-xr-xthird_party/python/rsa/playstuff.py41
-rw-r--r--third_party/python/rsa/rsa.egg-info/PKG-INFO18
-rw-r--r--third_party/python/rsa/rsa.egg-info/SOURCES.txt46
-rw-r--r--third_party/python/rsa/rsa.egg-info/dependency_links.txt1
-rw-r--r--third_party/python/rsa/rsa.egg-info/entry_points.txt10
-rw-r--r--third_party/python/rsa/rsa.egg-info/requires.txt1
-rw-r--r--third_party/python/rsa/rsa.egg-info/top_level.txt1
-rw-r--r--third_party/python/rsa/rsa/__init__.py45
-rw-r--r--third_party/python/rsa/rsa/_compat.py160
-rw-r--r--third_party/python/rsa/rsa/_version133.py442
-rw-r--r--third_party/python/rsa/rsa/_version200.py529
-rw-r--r--third_party/python/rsa/rsa/asn1.py35
-rw-r--r--third_party/python/rsa/rsa/bigfile.py87
-rw-r--r--third_party/python/rsa/rsa/cli.py379
-rw-r--r--third_party/python/rsa/rsa/common.py185
-rw-r--r--third_party/python/rsa/rsa/core.py58
-rw-r--r--third_party/python/rsa/rsa/key.py612
-rw-r--r--third_party/python/rsa/rsa/parallel.py94
-rw-r--r--third_party/python/rsa/rsa/pem.py120
-rw-r--r--third_party/python/rsa/rsa/pkcs1.py391
-rw-r--r--third_party/python/rsa/rsa/prime.py166
-rw-r--r--third_party/python/rsa/rsa/randnum.py85
-rw-r--r--third_party/python/rsa/rsa/transform.py220
-rw-r--r--third_party/python/rsa/rsa/util.py81
-rw-r--r--third_party/python/rsa/rsa/varblock.py155
-rw-r--r--third_party/python/rsa/run_tests.py43
-rw-r--r--third_party/python/rsa/setup.cfg8
-rwxr-xr-xthird_party/python/rsa/setup.py41
-rw-r--r--third_party/python/rsa/tests/__init__.py0
-rw-r--r--third_party/python/rsa/tests/constants.py9
-rw-r--r--third_party/python/rsa/tests/py2kconstants.py3
-rw-r--r--third_party/python/rsa/tests/py3kconstants.py3
-rw-r--r--third_party/python/rsa/tests/test_bigfile.py60
-rw-r--r--third_party/python/rsa/tests/test_common.py61
-rw-r--r--third_party/python/rsa/tests/test_compat.py17
-rw-r--r--third_party/python/rsa/tests/test_integers.py36
-rw-r--r--third_party/python/rsa/tests/test_load_save_keys.py127
-rw-r--r--third_party/python/rsa/tests/test_pem.py14
-rw-r--r--third_party/python/rsa/tests/test_pkcs1.py94
-rw-r--r--third_party/python/rsa/tests/test_strings.py28
-rw-r--r--third_party/python/rsa/tests/test_transform.py67
-rw-r--r--third_party/python/rsa/tests/test_varblock.py82
-rw-r--r--third_party/python/scandir/LICENSE.txt27
-rw-r--r--third_party/python/scandir/MANIFEST.in6
-rw-r--r--third_party/python/scandir/PKG-INFO238
-rw-r--r--third_party/python/scandir/README.rst211
-rw-r--r--third_party/python/scandir/_scandir.c1833
-rw-r--r--third_party/python/scandir/benchmark.py192
-rw-r--r--third_party/python/scandir/osdefs.h48
-rw-r--r--third_party/python/scandir/scandir.py693
-rw-r--r--third_party/python/scandir/setup.cfg4
-rw-r--r--third_party/python/scandir/setup.py80
-rw-r--r--third_party/python/scandir/test/run_tests.py25
-rw-r--r--third_party/python/scandir/test/test_scandir.py320
-rw-r--r--third_party/python/scandir/test/test_walk.py213
-rw-r--r--third_party/python/scandir/winreparse.h53
-rw-r--r--third_party/python/sentry-sdk/LICENSE9
-rw-r--r--third_party/python/sentry-sdk/MANIFEST.in2
-rw-r--r--third_party/python/sentry-sdk/PKG-INFO43
-rw-r--r--third_party/python/sentry-sdk/README.md42
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk.egg-info/PKG-INFO43
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk.egg-info/SOURCES.txt64
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk.egg-info/dependency_links.txt1
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk.egg-info/not-zip-safe1
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk.egg-info/requires.txt39
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk.egg-info/top_level.txt1
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/__init__.py25
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/_compat.py92
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/_types.py37
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/api.py256
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/client.py406
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/consts.py97
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/debug.py44
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/envelope.py293
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/hub.py647
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/__init__.py183
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/_wsgi_common.py180
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/aiohttp.py211
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/argv.py33
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/asgi.py194
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/atexit.py62
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/aws_lambda.py254
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/beam.py184
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/bottle.py199
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/celery.py258
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/dedupe.py43
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/django/__init__.py484
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/django/asgi.py47
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/django/middleware.py136
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/django/templates.py121
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/django/transactions.py134
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/excepthook.py76
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/falcon.py209
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/flask.py260
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/gnu_backtrace.py107
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/logging.py237
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/modules.py56
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/pyramid.py217
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/redis.py70
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/rq.py150
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/sanic.py233
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/serverless.py87
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/spark/__init__.py4
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/spark/spark_driver.py263
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/spark/spark_worker.py120
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/sqlalchemy.py86
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/stdlib.py230
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/threading.py90
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/tornado.py203
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/trytond.py55
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/integrations/wsgi.py309
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/py.typed0
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/scope.py408
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/serializer.py336
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/sessions.py249
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/tracing.py498
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/transport.py365
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/utils.py831
-rw-r--r--third_party/python/sentry-sdk/sentry_sdk/worker.py142
-rw-r--r--third_party/python/sentry-sdk/setup.cfg7
-rw-r--r--third_party/python/sentry-sdk/setup.py58
-rw-r--r--third_party/python/six/CHANGES315
-rw-r--r--third_party/python/six/LICENSE18
-rw-r--r--third_party/python/six/MANIFEST.in6
-rw-r--r--third_party/python/six/PKG-INFO50
-rw-r--r--third_party/python/six/README.rst32
-rw-r--r--third_party/python/six/documentation/Makefile130
-rw-r--r--third_party/python/six/documentation/conf.py217
-rw-r--r--third_party/python/six/documentation/index.rst875
-rw-r--r--third_party/python/six/setup.cfg24
-rw-r--r--third_party/python/six/setup.py58
-rw-r--r--third_party/python/six/six.py963
-rw-r--r--third_party/python/six/test_six.py1060
-rw-r--r--third_party/python/slugid/PKG-INFO14
-rw-r--r--third_party/python/slugid/README.rst122
-rw-r--r--third_party/python/slugid/setup.cfg5
-rwxr-xr-xthird_party/python/slugid/setup.py39
-rw-r--r--third_party/python/slugid/slugid/__init__.py48
-rw-r--r--third_party/python/slugid/slugid/slugid.py48
-rw-r--r--third_party/python/taskcluster-urls/LICENSE373
-rw-r--r--third_party/python/taskcluster-urls/MANIFEST.in4
-rw-r--r--third_party/python/taskcluster-urls/PKG-INFO253
-rw-r--r--third_party/python/taskcluster-urls/README.md236
-rw-r--r--third_party/python/taskcluster-urls/package.json25
-rw-r--r--third_party/python/taskcluster-urls/setup.cfg7
-rw-r--r--third_party/python/taskcluster-urls/setup.py28
-rw-r--r--third_party/python/taskcluster-urls/taskcluster_urls/__init__.py66
-rw-r--r--third_party/python/taskcluster/PKG-INFO13
-rw-r--r--third_party/python/taskcluster/README.md4383
-rw-r--r--third_party/python/taskcluster/setup.cfg8
-rw-r--r--third_party/python/taskcluster/setup.py89
-rw-r--r--third_party/python/taskcluster/taskcluster/__init__.py17
-rw-r--r--third_party/python/taskcluster/taskcluster/_client_importer.py18
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/__init__.py16
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/_client_importer.py18
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/asyncclient.py400
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/asyncutils.py116
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/auth.py867
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/authevents.py180
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/awsprovisioner.py450
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/awsprovisionerevents.py142
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/ec2manager.py475
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/github.py205
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/githubevents.py194
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/hooks.py324
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/index.py278
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/login.py89
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/notify.py125
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/pulse.py135
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/purgecache.py124
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/purgecacheevents.py73
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/queue.py1134
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/queueevents.py718
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/secrets.py149
-rw-r--r--third_party/python/taskcluster/taskcluster/aio/treeherderevents.py72
-rw-r--r--third_party/python/taskcluster/taskcluster/auth.py867
-rw-r--r--third_party/python/taskcluster/taskcluster/authevents.py180
-rw-r--r--third_party/python/taskcluster/taskcluster/awsprovisioner.py450
-rw-r--r--third_party/python/taskcluster/taskcluster/awsprovisionerevents.py142
-rw-r--r--third_party/python/taskcluster/taskcluster/client.py712
-rw-r--r--third_party/python/taskcluster/taskcluster/ec2manager.py475
-rw-r--r--third_party/python/taskcluster/taskcluster/exceptions.py36
-rw-r--r--third_party/python/taskcluster/taskcluster/github.py205
-rw-r--r--third_party/python/taskcluster/taskcluster/githubevents.py194
-rw-r--r--third_party/python/taskcluster/taskcluster/hooks.py324
-rw-r--r--third_party/python/taskcluster/taskcluster/index.py278
-rw-r--r--third_party/python/taskcluster/taskcluster/login.py89
-rw-r--r--third_party/python/taskcluster/taskcluster/notify.py125
-rw-r--r--third_party/python/taskcluster/taskcluster/pulse.py135
-rw-r--r--third_party/python/taskcluster/taskcluster/purgecache.py124
-rw-r--r--third_party/python/taskcluster/taskcluster/purgecacheevents.py73
-rw-r--r--third_party/python/taskcluster/taskcluster/queue.py1134
-rw-r--r--third_party/python/taskcluster/taskcluster/queueevents.py718
-rw-r--r--third_party/python/taskcluster/taskcluster/secrets.py149
-rw-r--r--third_party/python/taskcluster/taskcluster/treeherderevents.py72
-rw-r--r--third_party/python/taskcluster/taskcluster/utils.py348
-rw-r--r--third_party/python/taskcluster/test/test_async.py63
-rw-r--r--third_party/python/taskcluster/test/test_client.py955
-rw-r--r--third_party/python/taskcluster/test/test_utils.py439
-rw-r--r--third_party/python/urllib3/CHANGES.rst1112
-rw-r--r--third_party/python/urllib3/CONTRIBUTORS.txt304
-rw-r--r--third_party/python/urllib3/LICENSE.txt21
-rw-r--r--third_party/python/urllib3/MANIFEST.in5
-rw-r--r--third_party/python/urllib3/PKG-INFO1253
-rw-r--r--third_party/python/urllib3/README.rst104
-rw-r--r--third_party/python/urllib3/dev-requirements.txt16
-rw-r--r--third_party/python/urllib3/dummyserver/__init__.py0
-rw-r--r--third_party/python/urllib3/dummyserver/certs/README.rst17
-rw-r--r--third_party/python/urllib3/dummyserver/certs/cacert.key27
-rw-r--r--third_party/python/urllib3/dummyserver/certs/cacert.pem21
-rw-r--r--third_party/python/urllib3/dummyserver/certs/server.crt21
-rw-r--r--third_party/python/urllib3/dummyserver/certs/server.key27
-rw-r--r--third_party/python/urllib3/dummyserver/handlers.py328
-rwxr-xr-xthird_party/python/urllib3/dummyserver/proxy.py139
-rwxr-xr-xthird_party/python/urllib3/dummyserver/server.py188
-rw-r--r--third_party/python/urllib3/dummyserver/testcase.py210
-rw-r--r--third_party/python/urllib3/setup.cfg31
-rwxr-xr-xthird_party/python/urllib3/setup.py83
-rw-r--r--third_party/python/urllib3/src/urllib3.egg-info/PKG-INFO1253
-rw-r--r--third_party/python/urllib3/src/urllib3.egg-info/SOURCES.txt115
-rw-r--r--third_party/python/urllib3/src/urllib3.egg-info/dependency_links.txt1
-rw-r--r--third_party/python/urllib3/src/urllib3.egg-info/requires.txt15
-rw-r--r--third_party/python/urllib3/src/urllib3.egg-info/top_level.txt1
-rw-r--r--third_party/python/urllib3/src/urllib3/__init__.py86
-rw-r--r--third_party/python/urllib3/src/urllib3/_collections.py336
-rw-r--r--third_party/python/urllib3/src/urllib3/connection.py423
-rw-r--r--third_party/python/urllib3/src/urllib3/connectionpool.py1033
-rw-r--r--third_party/python/urllib3/src/urllib3/contrib/__init__.py0
-rw-r--r--third_party/python/urllib3/src/urllib3/contrib/_appengine_environ.py36
-rw-r--r--third_party/python/urllib3/src/urllib3/contrib/_securetransport/__init__.py0
-rw-r--r--third_party/python/urllib3/src/urllib3/contrib/_securetransport/bindings.py493
-rw-r--r--third_party/python/urllib3/src/urllib3/contrib/_securetransport/low_level.py328
-rw-r--r--third_party/python/urllib3/src/urllib3/contrib/appengine.py314
-rw-r--r--third_party/python/urllib3/src/urllib3/contrib/ntlmpool.py121
-rw-r--r--third_party/python/urllib3/src/urllib3/contrib/pyopenssl.py501
-rw-r--r--third_party/python/urllib3/src/urllib3/contrib/securetransport.py864
-rw-r--r--third_party/python/urllib3/src/urllib3/contrib/socks.py210
-rw-r--r--third_party/python/urllib3/src/urllib3/exceptions.py272
-rw-r--r--third_party/python/urllib3/src/urllib3/fields.py273
-rw-r--r--third_party/python/urllib3/src/urllib3/filepost.py98
-rw-r--r--third_party/python/urllib3/src/urllib3/packages/__init__.py5
-rw-r--r--third_party/python/urllib3/src/urllib3/packages/backports/__init__.py0
-rw-r--r--third_party/python/urllib3/src/urllib3/packages/backports/makefile.py52
-rw-r--r--third_party/python/urllib3/src/urllib3/packages/six.py1021
-rw-r--r--third_party/python/urllib3/src/urllib3/packages/ssl_match_hostname/__init__.py19
-rw-r--r--third_party/python/urllib3/src/urllib3/packages/ssl_match_hostname/_implementation.py160
-rw-r--r--third_party/python/urllib3/src/urllib3/poolmanager.py492
-rw-r--r--third_party/python/urllib3/src/urllib3/request.py171
-rw-r--r--third_party/python/urllib3/src/urllib3/response.py821
-rw-r--r--third_party/python/urllib3/src/urllib3/util/__init__.py46
-rw-r--r--third_party/python/urllib3/src/urllib3/util/connection.py138
-rw-r--r--third_party/python/urllib3/src/urllib3/util/queue.py21
-rw-r--r--third_party/python/urllib3/src/urllib3/util/request.py135
-rw-r--r--third_party/python/urllib3/src/urllib3/util/response.py86
-rw-r--r--third_party/python/urllib3/src/urllib3/util/retry.py453
-rw-r--r--third_party/python/urllib3/src/urllib3/util/ssl_.py414
-rw-r--r--third_party/python/urllib3/src/urllib3/util/timeout.py261
-rw-r--r--third_party/python/urllib3/src/urllib3/util/url.py430
-rw-r--r--third_party/python/urllib3/src/urllib3/util/wait.py153
-rw-r--r--third_party/python/virtualenv/README_MOZILLA10
-rw-r--r--third_party/python/virtualenv/__main__.py168
-rw-r--r--third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/LICENSE.txt23
-rw-r--r--third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/METADATA264
-rw-r--r--third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/RECORD6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/WHEEL6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/top_level.txt1
-rw-r--r--third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py608
-rw-r--r--third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/__init__.py6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/configparser/__init__.py1473
-rw-r--r--third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/configparser/helpers.py274
-rw-r--r--third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/LICENSE7
-rw-r--r--third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/METADATA259
-rw-r--r--third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/RECORD9
-rw-r--r--third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/WHEEL6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/top_level.txt2
-rw-r--r--third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser.py61
-rw-r--r--third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/LICENSE.txt122
-rw-r--r--third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/METADATA70
-rw-r--r--third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/RECORD6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/WHEEL6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/top_level.txt1
-rw-r--r--third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2.py518
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/METADATA24
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/RECORD26
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/WHEEL5
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py23
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py41
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py764
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.cfg84
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py786
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py2607
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py1120
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py1339
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py516
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py1302
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py393
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py131
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py1056
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py355
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py419
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/t32.exebin0 -> 96768 bytes
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/t64.exebin0 -> 105984 bytes
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py1761
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py736
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/w32.exebin0 -> 90112 bytes
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/w64.exebin0 -> 99840 bytes
-rw-r--r--third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py1018
-rw-r--r--third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/LICENSE24
-rw-r--r--third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/METADATA156
-rw-r--r--third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/RECORD6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/WHEEL5
-rw-r--r--third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/top_level.txt1
-rw-r--r--third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py451
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/LICENSE13
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/METADATA65
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/RECORD21
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/WHEEL6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/top_level.txt1
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/__init__.py541
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/_compat.py143
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/LICENSE13
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/METADATA73
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/RECORD7
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/WHEEL6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/top_level.txt1
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata/__init__.py644
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata/_compat.py152
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/LICENSE13
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/METADATA94
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/RECORD7
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/WHEEL5
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/top_level.txt1
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/__init__.py631
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/_compat.py75
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/LICENSE13
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/METADATA49
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/RECORD39
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/WHEEL6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/top_level.txt1
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/__init__.py36
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_compat.py23
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py2.py270
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py3.py312
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/abc.py58
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/version.txt1
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/LICENSE13
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/METADATA41
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/RECORD42
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/WHEEL6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/top_level.txt1
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/__init__.py53
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_common.py120
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_compat.py139
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_py2.py107
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_py3.py164
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/abc.py142
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/py.typed0
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/readers.py123
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/trees.py6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/LICENSE13
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/METADATA41
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/RECORD42
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/WHEEL6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/top_level.txt1
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/__init__.py53
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_common.py120
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_compat.py139
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_py2.py107
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_py3.py160
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/abc.py142
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/py.typed0
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/readers.py123
-rw-r--r--third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/trees.py6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/DESCRIPTION.rst61
-rw-r--r--third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/METADATA88
-rw-r--r--third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/RECORD7
-rw-r--r--third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/WHEEL6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/metadata.json1
-rw-r--r--third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/top_level.txt1
-rw-r--r--third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2/__init__.py1809
-rw-r--r--third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/LICENSE.txt27
-rw-r--r--third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/METADATA238
-rw-r--r--third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/RECORD6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/WHEEL5
-rw-r--r--third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/top_level.txt2
-rw-r--r--third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir.py693
-rw-r--r--third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/LICENSE18
-rw-r--r--third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/METADATA49
-rw-r--r--third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/RECORD6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/WHEEL6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/top_level.txt1
-rw-r--r--third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six.py982
-rw-r--r--third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/LICENSE254
-rw-r--r--third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/METADATA41
-rw-r--r--third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/RECORD6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/WHEEL5
-rw-r--r--third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/top_level.txt1
-rw-r--r--third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing.py2422
-rw-r--r--third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/LICENSE254
-rw-r--r--third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/METADATA50
-rw-r--r--third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/RECORD6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/WHEEL5
-rw-r--r--third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/top_level.txt1
-rw-r--r--third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing.py2550
-rw-r--r--third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/LICENSE7
-rw-r--r--third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/METADATA49
-rw-r--r--third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/RECORD6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/WHEEL6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/top_level.txt1
-rw-r--r--third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp.py286
-rw-r--r--third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/LICENSE19
-rw-r--r--third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/METADATA54
-rw-r--r--third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/RECORD6
-rw-r--r--third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/WHEEL5
-rw-r--r--third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/top_level.txt1
-rw-r--r--third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp.py314
-rw-r--r--third_party/python/virtualenv/distributions.json83
-rw-r--r--third_party/python/virtualenv/modules.json314
-rw-r--r--third_party/python/virtualenv/virtualenv-20.2.2.dist-info/LICENSE20
-rw-r--r--third_party/python/virtualenv/virtualenv-20.2.2.dist-info/METADATA92
-rw-r--r--third_party/python/virtualenv/virtualenv-20.2.2.dist-info/RECORD122
-rw-r--r--third_party/python/virtualenv/virtualenv-20.2.2.dist-info/WHEEL6
-rw-r--r--third_party/python/virtualenv/virtualenv-20.2.2.dist-info/entry_points.txt32
-rw-r--r--third_party/python/virtualenv/virtualenv-20.2.2.dist-info/top_level.txt1
-rw-r--r--third_party/python/virtualenv/virtualenv-20.2.2.dist-info/zip-safe1
-rw-r--r--third_party/python/virtualenv/virtualenv.py55
-rw-r--r--third_party/python/virtualenv/virtualenv/__init__.py10
-rw-r--r--third_party/python/virtualenv/virtualenv/__main__.py77
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/__init__.py19
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/activator.py44
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/bash/__init__.py13
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/bash/activate.sh87
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/batch/__init__.py23
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/batch/activate.bat40
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/batch/deactivate.bat19
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/batch/pydoc.bat1
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/cshell/__init__.py14
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/cshell/activate.csh55
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/fish/__init__.py10
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/fish/activate.fish100
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/powershell/__init__.py10
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/powershell/activate.ps160
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/python/__init__.py35
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/python/activate_this.py32
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/via_template.py67
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/xonsh/__init__.py14
-rw-r--r--third_party/python/virtualenv/virtualenv/activation/xonsh/activate.xsh46
-rw-r--r--third_party/python/virtualenv/virtualenv/app_data/__init__.py57
-rw-r--r--third_party/python/virtualenv/virtualenv/app_data/base.py95
-rw-r--r--third_party/python/virtualenv/virtualenv/app_data/na.py66
-rw-r--r--third_party/python/virtualenv/virtualenv/app_data/read_only.py34
-rw-r--r--third_party/python/virtualenv/virtualenv/app_data/via_disk_folder.py177
-rw-r--r--third_party/python/virtualenv/virtualenv/app_data/via_tempdir.py27
-rw-r--r--third_party/python/virtualenv/virtualenv/config/__init__.py1
-rw-r--r--third_party/python/virtualenv/virtualenv/config/cli/__init__.py1
-rw-r--r--third_party/python/virtualenv/virtualenv/config/cli/parser.py120
-rw-r--r--third_party/python/virtualenv/virtualenv/config/convert.py98
-rw-r--r--third_party/python/virtualenv/virtualenv/config/env_var.py29
-rw-r--r--third_party/python/virtualenv/virtualenv/config/ini.py83
-rw-r--r--third_party/python/virtualenv/virtualenv/create/__init__.py1
-rw-r--r--third_party/python/virtualenv/virtualenv/create/creator.py238
-rw-r--r--third_party/python/virtualenv/virtualenv/create/debug.py110
-rw-r--r--third_party/python/virtualenv/virtualenv/create/describe.py117
-rw-r--r--third_party/python/virtualenv/virtualenv/create/pyenv_cfg.py61
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/__init__.py0
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/_virtualenv.py130
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/api.py112
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/__init__.py0
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/builtin_way.py17
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/__init__.py1
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/common.py65
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/cpython2.py102
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/cpython3.py84
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/mac_os.py298
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/__init__.py0
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/common.py53
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py121
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py63
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/python2/__init__.py0
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/python2/python2.py111
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/python2/site.py164
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/ref.py172
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/via_global_self_do.py114
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/store.py26
-rw-r--r--third_party/python/virtualenv/virtualenv/create/via_global_ref/venv.py83
-rw-r--r--third_party/python/virtualenv/virtualenv/discovery/__init__.py1
-rw-r--r--third_party/python/virtualenv/virtualenv/discovery/builtin.py163
-rw-r--r--third_party/python/virtualenv/virtualenv/discovery/cached_py_info.py148
-rw-r--r--third_party/python/virtualenv/virtualenv/discovery/discover.py46
-rw-r--r--third_party/python/virtualenv/virtualenv/discovery/py_info.py490
-rw-r--r--third_party/python/virtualenv/virtualenv/discovery/py_spec.py122
-rw-r--r--third_party/python/virtualenv/virtualenv/discovery/windows/__init__.py28
-rw-r--r--third_party/python/virtualenv/virtualenv/discovery/windows/pep514.py161
-rw-r--r--third_party/python/virtualenv/virtualenv/info.py65
-rw-r--r--third_party/python/virtualenv/virtualenv/report.py57
-rw-r--r--third_party/python/virtualenv/virtualenv/run/__init__.py151
-rw-r--r--third_party/python/virtualenv/virtualenv/run/plugin/__init__.py0
-rw-r--r--third_party/python/virtualenv/virtualenv/run/plugin/activators.py53
-rw-r--r--third_party/python/virtualenv/virtualenv/run/plugin/base.py58
-rw-r--r--third_party/python/virtualenv/virtualenv/run/plugin/creators.py77
-rw-r--r--third_party/python/virtualenv/virtualenv/run/plugin/discovery.py32
-rw-r--r--third_party/python/virtualenv/virtualenv/run/plugin/seeders.py35
-rw-r--r--third_party/python/virtualenv/virtualenv/run/session.py91
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/__init__.py1
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/embed/__init__.py0
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/embed/base_embed.py118
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/embed/pip_invoke.py56
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/__init__.py0
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/__init__.py0
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/base.py158
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/copy.py35
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/symlink.py61
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/via_app_data.py139
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/seeder.py39
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/wheels/__init__.py11
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/wheels/acquire.py120
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/wheels/bundle.py49
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/wheels/embed/__init__.py62
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-19.1.1-py2.py3-none-any.whlbin0 -> 1360957 bytes
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-20.3.1-py2.py3-none-any.whlbin0 -> 1518513 bytes
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-43.0.0-py2.py3-none-any.whlbin0 -> 583228 bytes
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-44.1.1-py2.py3-none-any.whlbin0 -> 583493 bytes
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-50.3.2-py3-none-any.whlbin0 -> 785194 bytes
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-51.0.0-py3-none-any.whlbin0 -> 785164 bytes
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/wheels/embed/wheel-0.33.6-py2.py3-none-any.whlbin0 -> 21556 bytes
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/wheels/embed/wheel-0.36.1-py2.py3-none-any.whlbin0 -> 34788 bytes
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/wheels/periodic_update.py367
-rw-r--r--third_party/python/virtualenv/virtualenv/seed/wheels/util.py116
-rw-r--r--third_party/python/virtualenv/virtualenv/util/__init__.py11
-rw-r--r--third_party/python/virtualenv/virtualenv/util/error.py13
-rw-r--r--third_party/python/virtualenv/virtualenv/util/lock.py168
-rw-r--r--third_party/python/virtualenv/virtualenv/util/path/__init__.py16
-rw-r--r--third_party/python/virtualenv/virtualenv/util/path/_pathlib/__init__.py63
-rw-r--r--third_party/python/virtualenv/virtualenv/util/path/_pathlib/via_os_path.py148
-rw-r--r--third_party/python/virtualenv/virtualenv/util/path/_permission.py32
-rw-r--r--third_party/python/virtualenv/virtualenv/util/path/_sync.py98
-rw-r--r--third_party/python/virtualenv/virtualenv/util/six.py50
-rw-r--r--third_party/python/virtualenv/virtualenv/util/subprocess/__init__.py40
-rw-r--r--third_party/python/virtualenv/virtualenv/util/subprocess/_win_subprocess.py175
-rw-r--r--third_party/python/virtualenv/virtualenv/util/zipapp.py33
-rw-r--r--third_party/python/virtualenv/virtualenv/version.py3
-rw-r--r--third_party/python/voluptuous/CHANGELOG.md104
-rw-r--r--third_party/python/voluptuous/COPYING25
-rw-r--r--third_party/python/voluptuous/MANIFEST.in4
-rw-r--r--third_party/python/voluptuous/PKG-INFO744
-rw-r--r--third_party/python/voluptuous/README.md723
-rw-r--r--third_party/python/voluptuous/setup.cfg9
-rw-r--r--third_party/python/voluptuous/setup.py40
-rw-r--r--third_party/python/voluptuous/voluptuous/__init__.py9
-rw-r--r--third_party/python/voluptuous/voluptuous/error.py199
-rw-r--r--third_party/python/voluptuous/voluptuous/humanize.py40
-rw-r--r--third_party/python/voluptuous/voluptuous/schema_builder.py1292
-rw-r--r--third_party/python/voluptuous/voluptuous/tests/__init__.py1
-rw-r--r--third_party/python/voluptuous/voluptuous/tests/tests.md273
-rw-r--r--third_party/python/voluptuous/voluptuous/tests/tests.py1265
-rw-r--r--third_party/python/voluptuous/voluptuous/util.py154
-rw-r--r--third_party/python/voluptuous/voluptuous/validators.py1004
-rw-r--r--third_party/python/yamllint/LICENSE674
-rw-r--r--third_party/python/yamllint/MANIFEST.in4
-rw-r--r--third_party/python/yamllint/PKG-INFO30
-rw-r--r--third_party/python/yamllint/README.rst144
-rw-r--r--third_party/python/yamllint/setup.cfg17
-rw-r--r--third_party/python/yamllint/setup.py56
-rw-r--r--third_party/python/yamllint/yamllint/__init__.py31
-rw-r--r--third_party/python/yamllint/yamllint/__main__.py4
-rw-r--r--third_party/python/yamllint/yamllint/cli.py207
-rw-r--r--third_party/python/yamllint/yamllint/conf/default.yaml33
-rw-r--r--third_party/python/yamllint/yamllint/conf/relaxed.yaml29
-rw-r--r--third_party/python/yamllint/yamllint/config.py205
-rw-r--r--third_party/python/yamllint/yamllint/linter.py240
-rw-r--r--third_party/python/yamllint/yamllint/parser.py161
-rw-r--r--third_party/python/yamllint/yamllint/rules/__init__.py70
-rw-r--r--third_party/python/yamllint/yamllint/rules/braces.py143
-rw-r--r--third_party/python/yamllint/yamllint/rules/brackets.py145
-rw-r--r--third_party/python/yamllint/yamllint/rules/colons.py105
-rw-r--r--third_party/python/yamllint/yamllint/rules/commas.py131
-rw-r--r--third_party/python/yamllint/yamllint/rules/comments.py104
-rw-r--r--third_party/python/yamllint/yamllint/rules/comments_indentation.py139
-rw-r--r--third_party/python/yamllint/yamllint/rules/common.py89
-rw-r--r--third_party/python/yamllint/yamllint/rules/document_end.py107
-rw-r--r--third_party/python/yamllint/yamllint/rules/document_start.py93
-rw-r--r--third_party/python/yamllint/yamllint/rules/empty_lines.py108
-rw-r--r--third_party/python/yamllint/yamllint/rules/empty_values.py96
-rw-r--r--third_party/python/yamllint/yamllint/rules/hyphens.py88
-rw-r--r--third_party/python/yamllint/yamllint/rules/indentation.py575
-rw-r--r--third_party/python/yamllint/yamllint/rules/key_duplicates.py100
-rw-r--r--third_party/python/yamllint/yamllint/rules/key_ordering.py109
-rw-r--r--third_party/python/yamllint/yamllint/rules/line_length.py149
-rw-r--r--third_party/python/yamllint/yamllint/rules/new_line_at_end_of_file.py37
-rw-r--r--third_party/python/yamllint/yamllint/rules/new_lines.py46
-rw-r--r--third_party/python/yamllint/yamllint/rules/octal_values.py95
-rw-r--r--third_party/python/yamllint/yamllint/rules/quoted_strings.py230
-rw-r--r--third_party/python/yamllint/yamllint/rules/trailing_spaces.py62
-rw-r--r--third_party/python/yamllint/yamllint/rules/truthy.py149
-rw-r--r--third_party/python/zipp/.coveragerc5
-rw-r--r--third_party/python/zipp/.flake89
-rw-r--r--third_party/python/zipp/.pre-commit-config.yaml10
-rw-r--r--third_party/python/zipp/.readthedocs.yml5
-rw-r--r--third_party/python/zipp/.travis.yml18
-rw-r--r--third_party/python/zipp/CHANGES.rst137
-rw-r--r--third_party/python/zipp/LICENSE19
-rw-r--r--third_party/python/zipp/PKG-INFO45
-rw-r--r--third_party/python/zipp/README.rst27
-rw-r--r--third_party/python/zipp/appveyor.yml24
-rw-r--r--third_party/python/zipp/azure-pipelines.yml71
-rw-r--r--third_party/python/zipp/conftest.py0
-rw-r--r--third_party/python/zipp/pyproject.toml8
-rw-r--r--third_party/python/zipp/setup.cfg39
-rw-r--r--third_party/python/zipp/setup.py6
-rw-r--r--third_party/python/zipp/skeleton.md161
-rw-r--r--third_party/python/zipp/test_zipp.py261
-rw-r--r--third_party/python/zipp/tox.ini41
-rw-r--r--third_party/python/zipp/zipp.py285
4290 files changed, 655471 insertions, 0 deletions
diff --git a/third_party/python/Click/CHANGES.rst b/third_party/python/Click/CHANGES.rst
new file mode 100644
index 0000000000..a98fabdb0d
--- /dev/null
+++ b/third_party/python/Click/CHANGES.rst
@@ -0,0 +1,635 @@
+Click Changelog
+===============
+
+
+Version 7.0
+-----------
+
+Released 2018-09-25
+
+- Drop support for Python 2.6 and 3.3. (`#967`_, `#976`_)
+- Wrap ``click.Choice``'s missing message. (`#202`_, `#1000`_)
+- Add native ZSH autocompletion support. (`#323`_, `#865`_)
+- Document that ANSI color info isn't parsed from bytearrays in
+ Python 2. (`#334`_)
+- Document byte-stripping behavior of ``CliRunner``. (`#334`_,
+ `#1010`_)
+- Usage errors now hint at the ``--help`` option. (`#393`_, `#557`_)
+- Implement streaming pager. (`#409`_, `#889`_)
+- Extract bar formatting to its own method. (`#414`_)
+- Add ``DateTime`` type for converting input in given date time
+ formats. (`#423`_)
+- ``secho``'s first argument can now be ``None``, like in ``echo``.
+ (`#424`_)
+- Fixes a ``ZeroDivisionError`` in ``ProgressBar.make_step``, when the
+ arg passed to the first call of ``ProgressBar.update`` is 0.
+ (`#447`_, `#1012`_)
+- Show progressbar only if total execution time is visible. (`#487`_)
+- Added the ability to hide commands and options from help. (`#500`_)
+- Document that options can be ``required=True``. (`#514`_, `#1022`_)
+- Non-standalone calls to ``Context.exit`` return the exit code,
+ rather than calling ``sys.exit``. (`#533`_, `#667`_, `#1098`_)
+- ``click.getchar()`` returns Unicode in Python 3 on Windows,
+ consistent with other platforms. (`#537`_, `#821`_, `#822`_,
+ `#1088`_, `#1108`_)
+- Added ``FloatRange`` type. (`#538`_, `#553`_)
+- Added support for bash completion of ``type=click.Choice`` for
+ ``Options`` and ``Arguments``. (`#535`_, `#681`_)
+- Only allow one positional arg for ``Argument`` parameter
+ declaration. (`#568`_, `#574`_, `#1014`_)
+- Add ``case_sensitive=False`` as an option to Choice. (`#569`_)
+- ``click.getchar()`` correctly raises ``KeyboardInterrupt`` on "^C"
+ and ``EOFError`` on "^D" on Linux. (`#583`_, `#1115`_)
+- Fix encoding issue with ``click.getchar(echo=True)`` on Linux.
+ (`#1115`_)
+- ``param_hint`` in errors now derived from param itself. (`#598`_,
+ `#704`_, `#709`_)
+- Add a test that ensures that when an argument is formatted into a
+ usage error, its metavar is used, not its name. (`#612`_)
+- Allow setting ``prog_name`` as extra in ``CliRunner.invoke``.
+ (`#616`_, `#999`_)
+- Help text taken from docstrings truncates at the ``\f`` form feed
+ character, useful for hiding Sphinx-style parameter documentation.
+ (`#629`_, `#1091`_)
+- ``launch`` now works properly under Cygwin. (`#650`_)
+- Update progress after iteration. (`#651`_, `#706`_)
+- ``CliRunner.invoke`` now may receive ``args`` as a string
+ representing a Unix shell command. (`#664`_)
+- Make ``Argument.make_metavar()`` default to type metavar. (`#675`_)
+- Add documentation for ``ignore_unknown_options``. (`#684`_)
+- Add bright colors support for ``click.style`` and fix the reset
+ option for parameters ``fg`` and ``bg``. (`#703`_, `#809`_)
+- Add ``show_envvar`` for showing environment variables in help.
+ (`#710`_)
+- Avoid ``BrokenPipeError`` during interpreter shutdown when stdout or
+ stderr is a closed pipe. (`#712`_, `#1106`_)
+- Document customizing option names. (`#725`_, `#1016`_)
+- Disable ``sys._getframes()`` on Python interpreters that don't
+ support it. (`#728`_)
+- Fix bug in test runner when calling ``sys.exit`` with ``None``.
+ (`#739`_)
+- Clarify documentation on command line options. (`#741`_, `#1003`_)
+- Fix crash on Windows console. (`#744`_)
+- Fix bug that caused bash completion to give improper completions on
+ chained commands. (`#754`_, `#774`_)
+- Added support for dynamic bash completion from a user-supplied
+ callback. (`#755`_)
+- Added support for bash completions containing spaces. (`#773`_)
+- Allow autocompletion function to determine whether or not to return
+ completions that start with the incomplete argument. (`#790`_,
+ `#806`_)
+- Fix option naming routine to match documentation and be
+ deterministic. (`#793`_, `#794`_)
+- Fix path validation bug. (`#795`_, `#1020`_)
+- Add test and documentation for ``Option`` naming: functionality.
+ (`#799`_)
+- Update doc to match arg name for ``path_type``. (`#801`_)
+- Raw strings added so correct escaping occurs. (`#807`_)
+- Fix 16k character limit of ``click.echo`` on Windows. (`#816`_,
+ `#819`_)
+- Overcome 64k character limit when writing to binary stream on
+ Windows 7. (`#825`_, `#830`_)
+- Add bool conversion for "t" and "f". (`#842`_)
+- ``NoSuchOption`` errors take ``ctx`` so that ``--help`` hint gets
+ printed in error output. (`#860`_)
+- Fixed the behavior of Click error messages with regards to Unicode
+ on 2.x and 3.x. Message is now always Unicode and the str and
+ Unicode special methods work as you expect on that platform.
+ (`#862`_)
+- Progress bar now uses stderr by default. (`#863`_)
+- Add support for auto-completion documentation. (`#866`_, `#869`_)
+- Allow ``CliRunner`` to separate stdout and stderr. (`#868`_)
+- Fix variable precedence. (`#873`_, `#874`_)
+- Fix invalid escape sequences. (`#877`_)
+- Fix ``ResourceWarning`` that occurs during some tests. (`#878`_)
+- When detecting a misconfigured locale, don't fail if the ``locale``
+ command fails. (`#880`_)
+- Add ``case_sensitive=False`` as an option to ``Choice`` types.
+ (`#887`_)
+- Force stdout/stderr writable. This works around issues with badly
+ patched standard streams like those from Jupyter. (`#918`_)
+- Fix completion of subcommand options after last argument (`#919`_,
+ `#930`_)
+- ``_AtomicFile`` now uses the ``realpath`` of the original filename
+ so that changing the working directory does not affect it.
+ (`#920`_)
+- Fix incorrect completions when defaults are present (`#925`_,
+ `#930`_)
+- Add copy option attrs so that custom classes can be re-used.
+ (`#926`_, `#994`_)
+- "x" and "a" file modes now use stdout when file is ``"-"``.
+ (`#929`_)
+- Fix missing comma in ``__all__`` list. (`#935`_)
+- Clarify how parameters are named. (`#949`_, `#1009`_)
+- Stdout is now automatically set to non blocking. (`#954`_)
+- Do not set options twice. (`#962`_)
+- Move ``fcntl`` import. (`#965`_)
+- Fix Google App Engine ``ImportError``. (`#995`_)
+- Better handling of help text for dynamic default option values.
+ (`#996`_)
+- Fix ``get_winter_size()`` so it correctly returns ``(0,0)``.
+ (`#997`_)
+- Add test case checking for custom param type. (`#1001`_)
+- Allow short width to address cmd formatting. (`#1002`_)
+- Add details about Python version support. (`#1004`_)
+- Added deprecation flag to commands. (`#1005`_)
+- Fixed issues where ``fd`` was undefined. (`#1007`_)
+- Fix formatting for short help. (`#1008`_)
+- Document how ``auto_envvar_prefix`` works with command groups.
+ (`#1011`_)
+- Don't add newlines by default for progress bars. (`#1013`_)
+- Use Python sorting order for ZSH completions. (`#1047`_, `#1059`_)
+- Document that parameter names are converted to lowercase by default.
+ (`#1055`_)
+- Subcommands that are named by the function now automatically have
+ the underscore replaced with a dash. If you register a function
+ named ``my_command`` it becomes ``my-command`` in the command line
+ interface.
+- Hide hidden commands and options from completion. (`#1058`_,
+ `#1061`_)
+- Fix absolute import blocking Click from being vendored into a
+ project on Windows. (`#1068`_, `#1069`_)
+- Fix issue where a lowercase ``auto_envvar_prefix`` would not be
+ converted to uppercase. (`#1105`_)
+
+.. _#202: https://github.com/pallets/click/issues/202
+.. _#323: https://github.com/pallets/click/issues/323
+.. _#334: https://github.com/pallets/click/issues/334
+.. _#393: https://github.com/pallets/click/issues/393
+.. _#409: https://github.com/pallets/click/issues/409
+.. _#414: https://github.com/pallets/click/pull/414
+.. _#423: https://github.com/pallets/click/pull/423
+.. _#424: https://github.com/pallets/click/pull/424
+.. _#447: https://github.com/pallets/click/issues/447
+.. _#487: https://github.com/pallets/click/pull/487
+.. _#500: https://github.com/pallets/click/pull/500
+.. _#514: https://github.com/pallets/click/issues/514
+.. _#533: https://github.com/pallets/click/pull/533
+.. _#535: https://github.com/pallets/click/issues/535
+.. _#537: https://github.com/pallets/click/issues/537
+.. _#538: https://github.com/pallets/click/pull/538
+.. _#553: https://github.com/pallets/click/pull/553
+.. _#557: https://github.com/pallets/click/pull/557
+.. _#568: https://github.com/pallets/click/issues/568
+.. _#569: https://github.com/pallets/click/issues/569
+.. _#574: https://github.com/pallets/click/issues/574
+.. _#583: https://github.com/pallets/click/issues/583
+.. _#598: https://github.com/pallets/click/issues/598
+.. _#612: https://github.com/pallets/click/pull/612
+.. _#616: https://github.com/pallets/click/issues/616
+.. _#629: https://github.com/pallets/click/pull/629
+.. _#650: https://github.com/pallets/click/pull/650
+.. _#651: https://github.com/pallets/click/issues/651
+.. _#664: https://github.com/pallets/click/pull/664
+.. _#667: https://github.com/pallets/click/issues/667
+.. _#675: https://github.com/pallets/click/pull/675
+.. _#681: https://github.com/pallets/click/pull/681
+.. _#684: https://github.com/pallets/click/pull/684
+.. _#703: https://github.com/pallets/click/issues/703
+.. _#704: https://github.com/pallets/click/issues/704
+.. _#706: https://github.com/pallets/click/pull/706
+.. _#709: https://github.com/pallets/click/pull/709
+.. _#710: https://github.com/pallets/click/pull/710
+.. _#712: https://github.com/pallets/click/pull/712
+.. _#719: https://github.com/pallets/click/issues/719
+.. _#725: https://github.com/pallets/click/issues/725
+.. _#728: https://github.com/pallets/click/pull/728
+.. _#739: https://github.com/pallets/click/pull/739
+.. _#741: https://github.com/pallets/click/issues/741
+.. _#744: https://github.com/pallets/click/issues/744
+.. _#754: https://github.com/pallets/click/issues/754
+.. _#755: https://github.com/pallets/click/pull/755
+.. _#773: https://github.com/pallets/click/pull/773
+.. _#774: https://github.com/pallets/click/pull/774
+.. _#790: https://github.com/pallets/click/issues/790
+.. _#793: https://github.com/pallets/click/issues/793
+.. _#794: https://github.com/pallets/click/pull/794
+.. _#795: https://github.com/pallets/click/issues/795
+.. _#799: https://github.com/pallets/click/pull/799
+.. _#801: https://github.com/pallets/click/pull/801
+.. _#806: https://github.com/pallets/click/pull/806
+.. _#807: https://github.com/pallets/click/pull/807
+.. _#809: https://github.com/pallets/click/pull/809
+.. _#816: https://github.com/pallets/click/pull/816
+.. _#819: https://github.com/pallets/click/pull/819
+.. _#821: https://github.com/pallets/click/issues/821
+.. _#822: https://github.com/pallets/click/issues/822
+.. _#825: https://github.com/pallets/click/issues/825
+.. _#830: https://github.com/pallets/click/pull/830
+.. _#842: https://github.com/pallets/click/pull/842
+.. _#860: https://github.com/pallets/click/issues/860
+.. _#862: https://github.com/pallets/click/issues/862
+.. _#863: https://github.com/pallets/click/pull/863
+.. _#865: https://github.com/pallets/click/pull/865
+.. _#866: https://github.com/pallets/click/issues/866
+.. _#868: https://github.com/pallets/click/pull/868
+.. _#869: https://github.com/pallets/click/pull/869
+.. _#873: https://github.com/pallets/click/issues/873
+.. _#874: https://github.com/pallets/click/pull/874
+.. _#877: https://github.com/pallets/click/pull/877
+.. _#878: https://github.com/pallets/click/pull/878
+.. _#880: https://github.com/pallets/click/pull/880
+.. _#883: https://github.com/pallets/click/pull/883
+.. _#887: https://github.com/pallets/click/pull/887
+.. _#889: https://github.com/pallets/click/pull/889
+.. _#918: https://github.com/pallets/click/pull/918
+.. _#919: https://github.com/pallets/click/issues/919
+.. _#920: https://github.com/pallets/click/pull/920
+.. _#925: https://github.com/pallets/click/issues/925
+.. _#926: https://github.com/pallets/click/issues/926
+.. _#929: https://github.com/pallets/click/pull/929
+.. _#930: https://github.com/pallets/click/pull/930
+.. _#935: https://github.com/pallets/click/pull/935
+.. _#949: https://github.com/pallets/click/issues/949
+.. _#954: https://github.com/pallets/click/pull/954
+.. _#962: https://github.com/pallets/click/pull/962
+.. _#965: https://github.com/pallets/click/pull/965
+.. _#967: https://github.com/pallets/click/pull/967
+.. _#976: https://github.com/pallets/click/pull/976
+.. _#990: https://github.com/pallets/click/pull/990
+.. _#991: https://github.com/pallets/click/pull/991
+.. _#993: https://github.com/pallets/click/pull/993
+.. _#994: https://github.com/pallets/click/pull/994
+.. _#995: https://github.com/pallets/click/pull/995
+.. _#996: https://github.com/pallets/click/pull/996
+.. _#997: https://github.com/pallets/click/pull/997
+.. _#999: https://github.com/pallets/click/pull/999
+.. _#1000: https://github.com/pallets/click/pull/1000
+.. _#1001: https://github.com/pallets/click/pull/1001
+.. _#1002: https://github.com/pallets/click/pull/1002
+.. _#1003: https://github.com/pallets/click/pull/1003
+.. _#1004: https://github.com/pallets/click/pull/1004
+.. _#1005: https://github.com/pallets/click/pull/1005
+.. _#1007: https://github.com/pallets/click/pull/1007
+.. _#1008: https://github.com/pallets/click/pull/1008
+.. _#1009: https://github.com/pallets/click/pull/1009
+.. _#1010: https://github.com/pallets/click/pull/1010
+.. _#1011: https://github.com/pallets/click/pull/1011
+.. _#1012: https://github.com/pallets/click/pull/1012
+.. _#1013: https://github.com/pallets/click/pull/1013
+.. _#1014: https://github.com/pallets/click/pull/1014
+.. _#1016: https://github.com/pallets/click/pull/1016
+.. _#1020: https://github.com/pallets/click/pull/1020
+.. _#1022: https://github.com/pallets/click/pull/1022
+.. _#1027: https://github.com/pallets/click/pull/1027
+.. _#1047: https://github.com/pallets/click/pull/1047
+.. _#1055: https://github.com/pallets/click/pull/1055
+.. _#1058: https://github.com/pallets/click/pull/1058
+.. _#1059: https://github.com/pallets/click/pull/1059
+.. _#1061: https://github.com/pallets/click/pull/1061
+.. _#1068: https://github.com/pallets/click/issues/1068
+.. _#1069: https://github.com/pallets/click/pull/1069
+.. _#1088: https://github.com/pallets/click/issues/1088
+.. _#1091: https://github.com/pallets/click/pull/1091
+.. _#1098: https://github.com/pallets/click/pull/1098
+.. _#1105: https://github.com/pallets/click/pull/1105
+.. _#1106: https://github.com/pallets/click/pull/1106
+.. _#1108: https://github.com/pallets/click/pull/1108
+.. _#1115: https://github.com/pallets/click/pull/1115
+
+
+Version 6.7
+-----------
+
+(bugfix release; released on January 6th 2017)
+
+- Make ``click.progressbar`` work with ``codecs.open`` files. See #637.
+- Fix bug in bash completion with nested subcommands. See #639.
+- Fix test runner not saving caller env correctly. See #644.
+- Fix handling of SIGPIPE. See #626
+- Deal with broken Windows environments such as Google App Engine's. See #711.
+
+Version 6.6
+-----------
+
+(bugfix release; released on April 4th 2016)
+
+- Fix bug in ``click.Path`` where it would crash when passed a ``-``. See #551.
+
+Version 6.4
+-----------
+
+(bugfix release; released on March 24th 2016)
+
+- Fix bug in bash completion where click would discard one or more trailing
+ arguments. See #471.
+
+Version 6.3
+-----------
+
+(bugfix release; released on February 22 2016)
+
+- Fix argument checks for interpreter invoke with ``-m`` and ``-c``
+ on Windows.
+- Fixed a bug that cased locale detection to error out on Python 3.
+
+Version 6.2
+-----------
+
+(bugfix release, released on November 27th 2015)
+
+- Correct fix for hidden progress bars.
+
+Version 6.1
+-----------
+
+(bugfix release, released on November 27th 2015)
+
+- Resolved an issue with invisible progress bars no longer rendering.
+- Disable chain commands with subcommands as they were inherently broken.
+- Fix ``MissingParameter`` not working without parameters passed.
+
+Version 6.0
+-----------
+
+(codename "pow pow", released on November 24th 2015)
+
+- Optimized the progressbar rendering to not render when it did not
+ actually change.
+- Explicitly disallow ``nargs=-1`` with a set default.
+- The context is now closed before it's popped from the stack.
+- Added support for short aliases for the false flag on toggles.
+- Click will now attempt to aid you with debugging locale errors
+ better by listing with the help of the OS what locales are
+ available.
+- Click used to return byte strings on Python 2 in some unit-testing
+ situations. This has been fixed to correctly return unicode strings
+ now.
+- For Windows users on Python 2, Click will now handle Unicode more
+ correctly handle Unicode coming in from the system. This also has
+ the disappointing side effect that filenames will now be always
+ unicode by default in the ``Path`` type which means that this can
+ introduce small bugs for code not aware of this.
+- Added a ``type`` parameter to ``Path`` to force a specific string type
+ on the value.
+- For users running Python on Windows the ``echo`` and ``prompt`` functions
+ now work with full unicode functionality in the Python windows console
+ by emulating an output stream. This also applies to getting the
+ virtual output and input streams via ``click.get_text_stream(...)``.
+- Unittests now always force a certain virtual terminal width.
+- Added support for allowing dashes to indicate standard streams to the
+ ``Path`` type.
+- Multi commands in chain mode no longer propagate arguments left over
+ from parsing to the callbacks. It's also now disallowed through an
+ exception when optional arguments are attached to multi commands if chain
+ mode is enabled.
+- Relaxed restriction that disallowed chained commands to have other
+ chained commands as child commands.
+- Arguments with positive nargs can now have defaults implemented.
+ Previously this configuration would often result in slightly unexpected
+ values be returned.
+
+Version 5.1
+-----------
+
+(bugfix release, released on 17th August 2015)
+
+- Fix a bug in ``pass_obj`` that would accidentally pass the context too.
+
+Version 5.0
+-----------
+
+(codename "tok tok", released on 16th August 2015)
+
+- Removed various deprecated functionality.
+- Atomic files now only accept the ``w`` mode.
+- Change the usage part of help output for very long commands to wrap
+ their arguments onto the next line, indented by 4 spaces.
+- Fix a bug where return code and error messages were incorrect when
+ using ``CliRunner``.
+- added ``get_current_context``.
+- added a ``meta`` dictionary to the context which is shared across the
+ linked list of contexts to allow click utilities to place state there.
+- introduced ``Context.scope``.
+- The ``echo`` function is now threadsafe: It calls the ``write`` method of the
+ underlying object only once.
+- ``prompt(hide_input=True)`` now prints a newline on ``^C``.
+- Click will now warn if users are using ``unicode_literals``.
+- Click will now ignore the ``PAGER`` environment variable if it is empty or
+ contains only whitespace.
+- The ``click-contrib`` GitHub organization was created.
+
+Version 4.1
+-----------
+
+(bugfix release, released on July 14th 2015)
+
+- Fix a bug where error messages would include a trailing ``None`` string.
+- Fix a bug where Click would crash on docstrings with trailing newlines.
+- Support streams with encoding set to ``None`` on Python 3 by barfing with
+ a better error.
+- Handle ^C in less-pager properly.
+- Handle return value of ``None`` from ``sys.getfilesystemencoding``
+- Fix crash when writing to unicode files with ``click.echo``.
+- Fix type inference with multiple options.
+
+Version 4.0
+-----------
+
+(codename "zoom zoom", released on March 31st 2015)
+
+- Added ``color`` parameters to lots of interfaces that directly or indirectly
+ call into echoing. This previously was always autodetection (with the
+ exception of the ``echo_via_pager`` function). Now you can forcefully
+ enable or disable it, overriding the auto detection of Click.
+- Added an ``UNPROCESSED`` type which does not perform any type changes which
+ simplifies text handling on 2.x / 3.x in some special advanced usecases.
+- Added ``NoSuchOption`` and ``BadOptionUsage`` exceptions for more generic
+ handling of errors.
+- Added support for handling of unprocessed options which can be useful in
+ situations where arguments are forwarded to underlying tools.
+- Added ``max_content_width`` parameter to the context which can be used to
+ change the maximum width of help output. By default Click will not format
+ content for more than 80 characters width.
+- Added support for writing prompts to stderr.
+- Fix a bug when showing the default for multiple arguments.
+- Added support for custom subclasses to ``option`` and ``argument``.
+- Fix bug in ``clear()`` on Windows when colorama is installed.
+- Reject ``nargs=-1`` for options properly. Options cannot be variadic.
+- Fixed an issue with bash completion not working properly for commands with
+ non ASCII characters or dashes.
+- Added a way to manually update the progressbar.
+- Changed the formatting of missing arguments. Previously the internal
+ argument name was shown in error messages, now the metavar is shown if
+ passed. In case an automated metavar is selected, it's stripped of
+ extra formatting first.
+
+Version 3.3
+-----------
+
+(bugfix release, released on September 8th 2014)
+
+- Fixed an issue with error reporting on Python 3 for invalid forwarding
+ of commands.
+
+Version 3.2
+-----------
+
+(bugfix release, released on August 22nd 2014)
+
+- Added missing ``err`` parameter forwarding to the ``secho`` function.
+- Fixed default parameters not being handled properly by the context
+ invoke method. This is a backwards incompatible change if the function
+ was used improperly. See :ref:`upgrade-to-3.2` for more information.
+- Removed the `invoked_subcommands` attribute largely. It is not possible
+ to provide it to work error free due to how the parsing works so this
+ API has been deprecated. See :ref:`upgrade-to-3.2` for more information.
+- Restored the functionality of `invoked_subcommand` which was broken as
+ a regression in 3.1.
+
+Version 3.1
+-----------
+
+(bugfix release, released on August 13th 2014)
+
+- Fixed a regression that caused contexts of subcommands to be
+ created before the parent command was invoked which was a
+ regression from earlier Click versions.
+
+Version 3.0
+-----------
+
+(codename "clonk clonk", released on August 12th 2014)
+
+- formatter now no longer attempts to accomodate for terminals
+ smaller than 50 characters. If that happens it just assumes
+ a minimal width.
+- added a way to not swallow exceptions in the test system.
+- added better support for colors with pagers and ways to
+ override the autodetection.
+- the CLI runner's result object now has a traceback attached.
+- improved automatic short help detection to work better with
+ dots that do not terminate sentences.
+- when definining options without actual valid option strings
+ now, Click will give an error message instead of silently
+ passing. This should catch situations where users wanted to
+ created arguments instead of options.
+- Restructured Click internally to support vendoring.
+- Added support for multi command chaining.
+- Added support for defaults on options with ``multiple`` and
+ options and arguments with ``nargs != 1``.
+- label passed to ``progressbar`` is no longer rendered with
+ whitespace stripped.
+- added a way to disable the standalone mode of the ``main``
+ method on a Click command to be able to handle errors better.
+- added support for returning values from command callbacks.
+- added simplifications for printing to stderr from ``echo``.
+- added result callbacks for groups.
+- entering a context multiple times defers the cleanup until
+ the last exit occurs.
+- added ``open_file``.
+
+Version 2.6
+-----------
+
+(bugfix release, released on August 11th 2014)
+
+- Fixed an issue where the wrapped streams on Python 3 would be reporting
+ incorrect values for seekable.
+
+Version 2.5
+-----------
+
+(bugfix release, released on July 28th 2014)
+
+- Fixed a bug with text wrapping on Python 3.
+
+Version 2.4
+-----------
+
+(bugfix release, released on July 4th 2014)
+
+- Corrected a bug in the change of the help option in 2.3.
+
+Version 2.3
+-----------
+
+(bugfix release, released on July 3rd 2014)
+
+- Fixed an incorrectly formatted help record for count options.
+- Add support for ansi code stripping on Windows if colorama
+ is not available.
+- restored the Click 1.0 handling of the help parameter for certain
+ edge cases.
+
+Version 2.2
+-----------
+
+(bugfix release, released on June 26th 2014)
+
+- fixed tty detection on PyPy.
+- fixed an issue that progress bars were not rendered when the
+ context manager was entered.
+
+Version 2.1
+-----------
+
+(bugfix release, released on June 14th 2014)
+
+- fixed the :func:`launch` function on windows.
+- improved the colorama support on windows to try hard to not
+ screw up the console if the application is interrupted.
+- fixed windows terminals incorrectly being reported to be 80
+ characters wide instead of 79
+- use colorama win32 bindings if available to get the correct
+ dimensions of a windows terminal.
+- fixed an issue with custom function types on Python 3.
+- fixed an issue with unknown options being incorrectly reported
+ in error messages.
+
+Version 2.0
+-----------
+
+(codename "tap tap tap", released on June 6th 2014)
+
+- added support for opening stdin/stdout on Windows in
+ binary mode correctly.
+- added support for atomic writes to files by going through
+ a temporary file.
+- introduced :exc:`BadParameter` which can be used to easily perform
+ custom validation with the same error messages as in the type system.
+- added :func:`progressbar`; a function to show progress bars.
+- added :func:`get_app_dir`; a function to calculate the home folder
+ for configs.
+- Added transparent handling for ANSI codes into the :func:`echo`
+ function through ``colorama``.
+- Added :func:`clear` function.
+- Breaking change: parameter callbacks now get the parameter object
+ passed as second argument. There is legacy support for old callbacks
+ which will warn but still execute the script.
+- Added :func:`style`, :func:`unstyle` and :func:`secho` for ANSI
+ styles.
+- Added an :func:`edit` function that invokes the default editor.
+- Added an :func:`launch` function that launches browsers and applications.
+- nargs of -1 for arguments can now be forced to be a single item through
+ the required flag. It defaults to not required.
+- setting a default for arguments now implicitly makes it non required.
+- changed "yN" / "Yn" to "y/N" and "Y/n" in confirmation prompts.
+- added basic support for bash completion.
+- added :func:`getchar` to fetch a single character from the terminal.
+- errors now go to stderr as intended.
+- fixed various issues with more exotic parameter formats like DOS/Windows
+ style arguments.
+- added :func:`pause` which works similar to the Windows ``pause`` cmd
+ built-in but becomes an automatic noop if the application is not run
+ through a terminal.
+- added a bit of extra information about missing choice parameters.
+- changed how the help function is implemented to allow global overriding
+ of the help option.
+- added support for token normalization to implement case insensitive handling.
+- added support for providing defaults for context settings.
+
+Version 1.1
+-----------
+
+(bugfix release, released on May 23rd 2014)
+
+- fixed a bug that caused text files in Python 2 to not accept
+ native strings.
+
+Version 1.0
+-----------
+
+(no codename, released on May 21st 2014)
+
+- Initial release.
diff --git a/third_party/python/Click/CONTRIBUTING.rst b/third_party/python/Click/CONTRIBUTING.rst
new file mode 100644
index 0000000000..4db9b65a3a
--- /dev/null
+++ b/third_party/python/Click/CONTRIBUTING.rst
@@ -0,0 +1,61 @@
+==========================
+How to contribute to Click
+==========================
+
+Thanks for considering contributing to Click.
+
+Support questions
+=================
+
+Please, don't use the issue tracker for this. Check whether the
+``#pocoo`` IRC channel on Freenode can help with your issue. If your problem
+is not strictly Click-specific, ``#python`` on Freenode is generally more
+active. `StackOverflow <https://stackoverflow.com/>`_ is also worth
+considering.
+
+Reporting issues
+================
+
+- Under which versions of Python does this happen? This is even more important
+ if your issue is encoding related.
+
+- Under which versions of Click does this happen? Check if this issue is fixed
+ in the repository.
+
+Submitting patches
+==================
+
+- Include tests if your patch is supposed to solve a bug, and explain clearly
+ under which circumstances the bug happens. Make sure the test fails without
+ your patch.
+
+- Try to follow `PEP8 <http://legacy.python.org/dev/peps/pep-0008/>`_, but you
+ may ignore the line-length-limit if following it would make the code uglier.
+
+- For features: Consider whether your feature would be a better fit for an
+ `external package <https://click.palletsprojects.com/en/7.x/contrib/>`_
+
+- For docs and bug fixes: Submit against the latest maintenance branch instead of master!
+
+Running the testsuite
+---------------------
+
+You probably want to set up a `virtualenv
+<https://virtualenv.readthedocs.io/en/latest/index.html>`_.
+
+The minimal requirement for running the testsuite is ``py.test``. You can
+install it with::
+
+ pip install pytest
+
+Then you can run the testsuite with::
+
+ py.test
+
+For a more isolated test environment, you can also install ``tox`` instead of
+``pytest``. You can install it with::
+
+ pip install tox
+
+The ``tox`` command will then run all tests against multiple combinations of
+Python versions and dependency versions.
diff --git a/third_party/python/Click/LICENSE.rst b/third_party/python/Click/LICENSE.rst
new file mode 100644
index 0000000000..87ce152aaf
--- /dev/null
+++ b/third_party/python/Click/LICENSE.rst
@@ -0,0 +1,39 @@
+Copyright © 2014 by the Pallets team.
+
+Some rights reserved.
+
+Redistribution and use in source and binary forms of the software as
+well as documentation, with or without modification, are permitted
+provided that the following conditions are met:
+
+- Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+- Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+- Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
+BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+
+----
+
+Click uses parts of optparse written by Gregory P. Ward and maintained
+by the Python Software Foundation. This is limited to code in parser.py.
+
+Copyright © 2001-2006 Gregory P. Ward. All rights reserved.
+Copyright © 2002-2006 Python Software Foundation. All rights reserved.
diff --git a/third_party/python/Click/MANIFEST.in b/third_party/python/Click/MANIFEST.in
new file mode 100644
index 0000000000..33a336f26a
--- /dev/null
+++ b/third_party/python/Click/MANIFEST.in
@@ -0,0 +1,11 @@
+include CHANGES.rst
+include CONTRIBUTING.rst
+include LICENSE.rst
+include README.rst
+include tox.ini
+graft artwork
+graft docs
+prune docs/_build
+graft examples
+graft tests
+global-exclude *.py[co] .DS_Store
diff --git a/third_party/python/Click/PKG-INFO b/third_party/python/Click/PKG-INFO
new file mode 100644
index 0000000000..fa75a91f44
--- /dev/null
+++ b/third_party/python/Click/PKG-INFO
@@ -0,0 +1,119 @@
+Metadata-Version: 1.2
+Name: Click
+Version: 7.0
+Summary: Composable command line interface toolkit
+Home-page: https://palletsprojects.com/p/click/
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+Maintainer: Pallets Team
+Maintainer-email: contact@palletsprojects.com
+License: BSD
+Project-URL: Documentation, https://click.palletsprojects.com/
+Project-URL: Code, https://github.com/pallets/click
+Project-URL: Issue tracker, https://github.com/pallets/click/issues
+Description: \$ click\_
+ ==========
+
+ Click is a Python package for creating beautiful command line interfaces
+ in a composable way with as little code as necessary. It's the "Command
+ Line Interface Creation Kit". It's highly configurable but comes with
+ sensible defaults out of the box.
+
+ It aims to make the process of writing command line tools quick and fun
+ while also preventing any frustration caused by the inability to
+ implement an intended CLI API.
+
+ Click in three points:
+
+ - Arbitrary nesting of commands
+ - Automatic help page generation
+ - Supports lazy loading of subcommands at runtime
+
+
+ Installing
+ ----------
+
+ Install and update using `pip`_:
+
+ .. code-block:: text
+
+ $ pip install click
+
+ Click supports Python 3.4 and newer, Python 2.7, and PyPy.
+
+ .. _pip: https://pip.pypa.io/en/stable/quickstart/
+
+
+ A Simple Example
+ ----------------
+
+ What does it look like? Here is an example of a simple Click program:
+
+ .. code-block:: python
+
+ import click
+
+ @click.command()
+ @click.option("--count", default=1, help="Number of greetings.")
+ @click.option("--name", prompt="Your name",
+ help="The person to greet.")
+ def hello(count, name):
+ """Simple program that greets NAME for a total of COUNT times."""
+ for _ in range(count):
+ click.echo("Hello, %s!" % name)
+
+ if __name__ == '__main__':
+ hello()
+
+ And what it looks like when run:
+
+ .. code-block:: text
+
+ $ python hello.py --count=3
+ Your name: Click
+ Hello, Click!
+ Hello, Click!
+ Hello, Click!
+
+
+ Donate
+ ------
+
+ The Pallets organization develops and supports Click and other popular
+ packages. In order to grow the community of contributors and users, and
+ allow the maintainers to devote more time to the projects, `please
+ donate today`_.
+
+ .. _please donate today: https://palletsprojects.com/donate
+
+
+ Links
+ -----
+
+ * Website: https://palletsprojects.com/p/click/
+ * Documentation: https://click.palletsprojects.com/
+ * License: `BSD <https://github.com/pallets/click/blob/master/LICENSE.rst>`_
+ * Releases: https://pypi.org/project/click/
+ * Code: https://github.com/pallets/click
+ * Issue tracker: https://github.com/pallets/click/issues
+ * Test status:
+
+ * Linux, Mac: https://travis-ci.org/pallets/click
+ * Windows: https://ci.appveyor.com/project/pallets/click
+
+ * Test coverage: https://codecov.io/gh/pallets/click
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
diff --git a/third_party/python/Click/README.rst b/third_party/python/Click/README.rst
new file mode 100644
index 0000000000..e4c047151c
--- /dev/null
+++ b/third_party/python/Click/README.rst
@@ -0,0 +1,91 @@
+\$ click\_
+==========
+
+Click is a Python package for creating beautiful command line interfaces
+in a composable way with as little code as necessary. It's the "Command
+Line Interface Creation Kit". It's highly configurable but comes with
+sensible defaults out of the box.
+
+It aims to make the process of writing command line tools quick and fun
+while also preventing any frustration caused by the inability to
+implement an intended CLI API.
+
+Click in three points:
+
+- Arbitrary nesting of commands
+- Automatic help page generation
+- Supports lazy loading of subcommands at runtime
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+ $ pip install click
+
+Click supports Python 3.4 and newer, Python 2.7, and PyPy.
+
+.. _pip: https://pip.pypa.io/en/stable/quickstart/
+
+
+A Simple Example
+----------------
+
+What does it look like? Here is an example of a simple Click program:
+
+.. code-block:: python
+
+ import click
+
+ @click.command()
+ @click.option("--count", default=1, help="Number of greetings.")
+ @click.option("--name", prompt="Your name",
+ help="The person to greet.")
+ def hello(count, name):
+ """Simple program that greets NAME for a total of COUNT times."""
+ for _ in range(count):
+ click.echo("Hello, %s!" % name)
+
+ if __name__ == '__main__':
+ hello()
+
+And what it looks like when run:
+
+.. code-block:: text
+
+ $ python hello.py --count=3
+ Your name: Click
+ Hello, Click!
+ Hello, Click!
+ Hello, Click!
+
+
+Donate
+------
+
+The Pallets organization develops and supports Click and other popular
+packages. In order to grow the community of contributors and users, and
+allow the maintainers to devote more time to the projects, `please
+donate today`_.
+
+.. _please donate today: https://palletsprojects.com/donate
+
+
+Links
+-----
+
+* Website: https://palletsprojects.com/p/click/
+* Documentation: https://click.palletsprojects.com/
+* License: `BSD <https://github.com/pallets/click/blob/master/LICENSE.rst>`_
+* Releases: https://pypi.org/project/click/
+* Code: https://github.com/pallets/click
+* Issue tracker: https://github.com/pallets/click/issues
+* Test status:
+
+ * Linux, Mac: https://travis-ci.org/pallets/click
+ * Windows: https://ci.appveyor.com/project/pallets/click
+
+* Test coverage: https://codecov.io/gh/pallets/click
diff --git a/third_party/python/Click/artwork/logo.svg b/third_party/python/Click/artwork/logo.svg
new file mode 100644
index 0000000000..e0e1b58839
--- /dev/null
+++ b/third_party/python/Click/artwork/logo.svg
@@ -0,0 +1,75 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="237.36929"
+ height="110.7928"
+ id="svg4837"
+ version="1.1"
+ inkscape:version="0.48.2 r9819"
+ sodipodi:docname="New document 8">
+ <defs
+ id="defs4839" />
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageopacity="0.0"
+ inkscape:pageshadow="2"
+ inkscape:zoom="0.98994949"
+ inkscape:cx="259.76814"
+ inkscape:cy="40.769955"
+ inkscape:document-units="px"
+ inkscape:current-layer="layer1"
+ showgrid="false"
+ fit-margin-top="10"
+ fit-margin-left="10"
+ fit-margin-right="10"
+ fit-margin-bottom="10"
+ inkscape:window-width="1676"
+ inkscape:window-height="1006"
+ inkscape:window-x="4"
+ inkscape:window-y="0"
+ inkscape:window-maximized="1" />
+ <metadata
+ id="metadata4842">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title></dc:title>
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ inkscape:label="Layer 1"
+ inkscape:groupmode="layer"
+ id="layer1"
+ transform="translate(-255.43458,-207.38101)">
+ <path
+ style="fill:#000000"
+ d="m 466.33424,306.48462 0,-1.6892 3.16724,0 3.16723,0 0,1.6892 0,1.68919 -3.16723,0 -3.16724,0 0,-1.68919 z m -3.37839,-5.06759 0,-3.37839 1.47804,0 1.47805,0 0,3.37839 0,3.37839 -1.47805,0 -1.47804,0 0,-3.37839 z m 10.13516,0 0,-3.37839 1.47804,0 1.47805,0 0,3.37839 0,3.37839 -1.47805,0 -1.47804,0 0,-3.37839 z m -30.82778,-28.92744 0,-28.92744 1.68919,0 c 1.68919,0 1.68919,0 1.68919,1.68919 0,1.6892 0,1.6892 1.6892,1.6892 1.68919,0 1.68919,0 1.68919,1.68919 0,1.6892 0,1.6892 1.6892,1.6892 1.68919,0 1.68919,0 1.68919,1.68919 0,1.6892 0,1.6892 -1.68919,1.6892 -1.6892,0 -1.6892,0 -1.6892,-1.6892 0,-1.68919 0,-1.68919 -1.68919,-1.68919 l -1.6892,0 0,23.85986 0,23.85985 1.6892,0 c 1.68919,0 1.68919,0 1.68919,1.6892 l 0,1.68919 -3.37839,0 -3.37838,0 0,-28.92744 z m 7.00029,25.22951 c -0.13394,-0.13393 -0.24352,-0.87696 -0.24352,-1.65118 0,-1.36423 0.0529,-1.40766 1.71469,-1.40766 l 1.71468,0 -0.13106,1.58362 c -0.12024,1.45279 -0.24178,1.5892 -1.47117,1.65118 -0.73706,0.0372 -1.44968,-0.042 -1.58362,-0.17596 z m 10.14204,-0.0392 c -0.13814,-0.22352 -0.20454,-1.7545 -0.14756,-3.40219 l 0.10361,-2.99581 1.60373,0 1.60374,0 -0.12334,3.27282 -0.12334,3.27281 -1.33284,0.12938 c -0.73305,0.0712 -1.44586,-0.0535 -1.584,-0.27702 z m 10.30707,-3.03247 0,-3.36553 1.47803,0 1.47805,0 0,3.30997 0,3.30996 -1.47805,0.0556 -1.47803,0.0556 0,-3.36553 z m -17.03271,-0.26872 c -0.15484,-0.15483 -0.28153,-0.91497 -0.28153,-1.68919 0,-1.36073 0.0563,-1.40766 1.68919,-1.40766 1.6892,0 1.6892,0 1.6892,1.6892 0,1.63289 -0.0469,1.68919 -1.40766,1.68919 -0.77422,0 -1.53436,-0.12669 -1.6892,-0.28154 z m 3.09686,-4.99719 c 0,-1.44286 0.0402,-1.47804 1.68919,-1.47804 1.64898,0 1.68919,0.0352 1.68919,1.47804 0,1.44285 -0.0402,1.47805 -1.68919,1.47805 -1.64898,0 -1.68919,-0.0352 -1.68919,-1.47805 z m 10.55746,-1.68919 0,-3.16724 6.75677,0 6.75678,0 0,-1.68919 c 0,-1.64898 0.0351,-1.6892 1.47804,-1.6892 l 1.47804,0 0,3.15652 0,3.15651 -6.6512,0.1163 -6.65119,0.1163 -0.13108,1.58361 c -0.12394,1.49763 -0.20994,1.58363 -1.58361,1.58363 l -1.45255,0 0,-3.16724 z m 10.13516,-8.44597 c 0,-1.40766 0.0704,-1.47804 1.47804,-1.47804 1.40766,0 1.47805,0.0704 1.47805,1.47804 0,1.40766 -0.0704,1.47804 -1.47805,1.47804 -1.40766,0 -1.47804,-0.0704 -1.47804,-1.47804 z m -3.37839,-3.37839 c 0,-1.40766 0.0704,-1.47804 1.47804,-1.47804 1.40767,0 1.47805,0.0704 1.47805,1.47804 0,1.40767 -0.0704,1.47805 -1.47805,1.47805 -1.40765,0 -1.47804,-0.0704 -1.47804,-1.47805 z m -3.37838,-3.37839 c 0,-1.40765 0.0704,-1.47804 1.47803,-1.47804 1.40767,0 1.47805,0.0704 1.47805,1.47804 0,1.40767 -0.0704,1.47805 -1.47805,1.47805 -1.40765,0 -1.47803,-0.0704 -1.47803,-1.47805 z m -3.37839,-3.48871 c 0,-1.52897 0.0553,-1.58931 1.47804,-1.61346 1.43567,-0.0244 1.47804,0.0211 1.47804,1.58838 0,1.56241 -0.0467,1.61345 -1.47804,1.61345 -1.42721,0 -1.47804,-0.0546 -1.47804,-1.58837 z m -3.37839,-3.47921 c 0,-1.64897 0.0351,-1.68919 1.47804,-1.68919 1.44285,0 1.47805,0.0402 1.47805,1.68919 0,1.64898 -0.0352,1.6892 -1.47805,1.6892 -1.44285,0 -1.47804,-0.0403 -1.47804,-1.6892 z m -3.45076,-3.37839 c 0.15328,-1.60738 0.23052,-1.68919 1.59478,-1.68919 1.3911,0 1.43368,0.0502 1.43368,1.68919 0,1.67253 -0.0157,1.6892 -1.59478,1.6892 l -1.59477,0 0.16109,-1.6892 z m -3.30601,-3.37838 c 0,-1.64898 0.0351,-1.6892 1.47804,-1.6892 1.44285,0 1.47804,0.0402 1.47804,1.6892 0,1.64897 -0.0352,1.68919 -1.47804,1.68919 -1.44286,0 -1.47804,-0.0402 -1.47804,-1.68919 z m -3.72061,-3.27282 c 0.12395,-1.49762 0.21706,-1.59073 1.71468,-1.71468 l 1.58363,-0.13107 0,1.71469 0,1.71468 -1.71469,0 -1.71468,0 0.13106,-1.58362 z"
+ id="path4856"
+ inkscape:connector-curvature="0" />
+ <path
+ style="fill:#000000"
+ d="m 271.34527,247.46047 c 0,-1.32826 -0.94153,-2.12734 -3.05297,-2.59109 -4.04056,-0.88746 -3.19986,-3.08429 1.01754,-2.65894 9.32239,0.94022 10.81407,-3.50307 2.60795,-7.76831 -2.71012,-1.40862 -5.36388,-3.27897 -5.89726,-4.15634 -1.47604,-2.42802 -0.0246,-6.42458 2.80076,-7.7119 1.48697,-0.67751 2.52398,-1.98054 2.52398,-3.17144 0,-1.13017 0.66136,-2.02144 1.5,-2.02144 0.825,0 1.5,0.85122 1.5,1.89159 0,1.19765 0.94321,2.12832 2.57104,2.53688 3.91591,0.98283 2.39946,3.10499 -1.95093,2.73019 -6.99788,-0.60291 -8.93527,3.8291 -2.87011,6.5657 9.0905,4.10163 11.015,9.73021 4.5,13.16112 -1.2375,0.65169 -2.25,2.06905 -2.25,3.1497 0,1.08065 -0.675,1.96482 -1.5,1.96482 -0.825,0 -1.5,-0.86425 -1.5,-1.92054 z m 38.357,-3.07104 c -4.57947,-1.84804 -6.77791,-8.48491 -4.32736,-13.06381 2.11183,-3.94599 10.30093,-6.28597 13.9414,-3.98367 2.2687,1.43477 0.36183,2.48777 -3.94232,2.177 -6.96682,-0.50302 -10.61631,6.27447 -5.91184,10.97894 1.71218,1.71218 2.95483,2.02243 6.5,1.62284 3.13626,-0.35351 4.38312,-0.13272 4.38312,0.77613 0,2.4062 -6.21813,3.27822 -10.643,1.49257 z m 21.1997,-1.23093 c -1.16379,-1.66155 -1.5567,-4.81653 -1.5567,-12.5 l 0,-10.27749 -2.5,0 c -1.55556,0 -2.5,-0.56667 -2.5,-1.5 0,-1.08333 1.11111,-1.5 4,-1.5 l 4,0 0.0153,11.25 c 0.008,6.1875 0.41056,11.87411 0.89363,12.63691 0.54214,0.85607 1.88505,1.19446 3.50856,0.88411 2.94677,-0.56331 4.58181,0.98192 2.46061,2.32547 -2.5829,1.63598 -6.70984,0.98184 -8.32141,-1.319 z m 20.4433,0.22251 c -1.55556,-1.55556 -2,-3.33333 -2,-8 0,-5.73333 -0.11111,-6 -2.5,-6 -1.55556,0 -2.5,-0.56667 -2.5,-1.5 0,-1.08333 1.11111,-1.5 4,-1.5 l 4,0 0.0153,6.75 c 0.0183,8.04891 0.82623,9.70461 4.40219,9.02102 2.94677,-0.56331 4.58181,0.98192 2.46061,2.32547 -2.3358,1.47948 -5.78176,0.99986 -7.87811,-1.09649 z m 18.357,1.00842 c -4.57947,-1.84804 -6.77791,-8.48491 -4.32736,-13.06381 2.11183,-3.94599 10.30093,-6.28597 13.9414,-3.98367 2.2687,1.43477 0.36183,2.48777 -3.94232,2.177 -6.96682,-0.50302 -10.61631,6.27447 -5.91184,10.97894 1.71218,1.71218 2.95483,2.02243 6.5,1.62284 3.13626,-0.35351 4.38312,-0.13272 4.38312,0.77613 0,2.4062 -6.21813,3.27822 -10.643,1.49257 z m 15.86478,-12.67646 c 0.20032,-9.84119 0.6282,-13.78431 1.52822,-14.08333 0.9358,-0.31091 1.25628,1.80502 1.275,8.41804 l 0.025,8.83333 4.01111,-4.25 c 2.32594,-2.46446 4.92367,-4.25 6.18321,-4.25 1.8968,0 1.6121,0.57059 -2.24643,4.50214 l -4.41851,4.50213 4.92271,4.99787 c 3.84161,3.90023 4.49929,4.99786 2.99467,4.99786 -1.06044,0 -4.08102,-2.12058 -6.71241,-4.71241 l -4.78435,-4.71241 0,4.71241 c 0,3.56082 -0.37346,4.71241 -1.52822,4.71241 -1.30999,0 -1.48849,-1.95179 -1.25,-13.66804 z m -37.85759,-9.82527 c -0.34636,-0.90262 -0.15025,-2.12063 0.43581,-2.70669 1.52361,-1.52361 4.41041,-0.13109 4.01242,1.93549 -0.4433,2.30188 -3.64766,2.85743 -4.44823,0.7712 z"
+ id="path4887"
+ inkscape:connector-curvature="0" />
+ <path
+ style="fill:#000000;fill-opacity:1;stroke:none"
+ d="m 411.32164,243.39982 c 7.38153,0.15924 14.76525,0.0821 22.14736,0.0136 -3.6814,3.83663 -9.42739,1.45689 -14.06576,2.07921 -3.69684,-0.007 -7.3939,0.0151 -11.09,0.0923 1.0029,-0.72825 2.00505,-1.45754 3.0084,-2.18514 z"
+ id="path4889"
+ inkscape:connector-curvature="0" />
+ </g>
+</svg>
diff --git a/third_party/python/Click/click/__init__.py b/third_party/python/Click/click/__init__.py
new file mode 100644
index 0000000000..d3c33660a9
--- /dev/null
+++ b/third_party/python/Click/click/__init__.py
@@ -0,0 +1,97 @@
+# -*- coding: utf-8 -*-
+"""
+click
+~~~~~
+
+Click is a simple Python module inspired by the stdlib optparse to make
+writing command line scripts fun. Unlike other modules, it's based
+around a simple API that does not come with too much magic and is
+composable.
+
+:copyright: © 2014 by the Pallets team.
+:license: BSD, see LICENSE.rst for more details.
+"""
+
+# Core classes
+from .core import Context, BaseCommand, Command, MultiCommand, Group, \
+ CommandCollection, Parameter, Option, Argument
+
+# Globals
+from .globals import get_current_context
+
+# Decorators
+from .decorators import pass_context, pass_obj, make_pass_decorator, \
+ command, group, argument, option, confirmation_option, \
+ password_option, version_option, help_option
+
+# Types
+from .types import ParamType, File, Path, Choice, IntRange, Tuple, \
+ DateTime, STRING, INT, FLOAT, BOOL, UUID, UNPROCESSED, FloatRange
+
+# Utilities
+from .utils import echo, get_binary_stream, get_text_stream, open_file, \
+ format_filename, get_app_dir, get_os_args
+
+# Terminal functions
+from .termui import prompt, confirm, get_terminal_size, echo_via_pager, \
+ progressbar, clear, style, unstyle, secho, edit, launch, getchar, \
+ pause
+
+# Exceptions
+from .exceptions import ClickException, UsageError, BadParameter, \
+ FileError, Abort, NoSuchOption, BadOptionUsage, BadArgumentUsage, \
+ MissingParameter
+
+# Formatting
+from .formatting import HelpFormatter, wrap_text
+
+# Parsing
+from .parser import OptionParser
+
+
+__all__ = [
+ # Core classes
+ 'Context', 'BaseCommand', 'Command', 'MultiCommand', 'Group',
+ 'CommandCollection', 'Parameter', 'Option', 'Argument',
+
+ # Globals
+ 'get_current_context',
+
+ # Decorators
+ 'pass_context', 'pass_obj', 'make_pass_decorator', 'command', 'group',
+ 'argument', 'option', 'confirmation_option', 'password_option',
+ 'version_option', 'help_option',
+
+ # Types
+ 'ParamType', 'File', 'Path', 'Choice', 'IntRange', 'Tuple',
+ 'DateTime', 'STRING', 'INT', 'FLOAT', 'BOOL', 'UUID', 'UNPROCESSED',
+ 'FloatRange',
+
+ # Utilities
+ 'echo', 'get_binary_stream', 'get_text_stream', 'open_file',
+ 'format_filename', 'get_app_dir', 'get_os_args',
+
+ # Terminal functions
+ 'prompt', 'confirm', 'get_terminal_size', 'echo_via_pager',
+ 'progressbar', 'clear', 'style', 'unstyle', 'secho', 'edit', 'launch',
+ 'getchar', 'pause',
+
+ # Exceptions
+ 'ClickException', 'UsageError', 'BadParameter', 'FileError',
+ 'Abort', 'NoSuchOption', 'BadOptionUsage', 'BadArgumentUsage',
+ 'MissingParameter',
+
+ # Formatting
+ 'HelpFormatter', 'wrap_text',
+
+ # Parsing
+ 'OptionParser',
+]
+
+
+# Controls if click should emit the warning about the use of unicode
+# literals.
+disable_unicode_literals_warning = False
+
+
+__version__ = '7.0'
diff --git a/third_party/python/Click/click/_bashcomplete.py b/third_party/python/Click/click/_bashcomplete.py
new file mode 100644
index 0000000000..a5f1084c9a
--- /dev/null
+++ b/third_party/python/Click/click/_bashcomplete.py
@@ -0,0 +1,293 @@
+import copy
+import os
+import re
+
+from .utils import echo
+from .parser import split_arg_string
+from .core import MultiCommand, Option, Argument
+from .types import Choice
+
+try:
+ from collections import abc
+except ImportError:
+ import collections as abc
+
+WORDBREAK = '='
+
+# Note, only BASH version 4.4 and later have the nosort option.
+COMPLETION_SCRIPT_BASH = '''
+%(complete_func)s() {
+ local IFS=$'\n'
+ COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\
+ COMP_CWORD=$COMP_CWORD \\
+ %(autocomplete_var)s=complete $1 ) )
+ return 0
+}
+
+%(complete_func)setup() {
+ local COMPLETION_OPTIONS=""
+ local BASH_VERSION_ARR=(${BASH_VERSION//./ })
+ # Only BASH version 4.4 and later have the nosort option.
+ if [ ${BASH_VERSION_ARR[0]} -gt 4 ] || ([ ${BASH_VERSION_ARR[0]} -eq 4 ] && [ ${BASH_VERSION_ARR[1]} -ge 4 ]); then
+ COMPLETION_OPTIONS="-o nosort"
+ fi
+
+ complete $COMPLETION_OPTIONS -F %(complete_func)s %(script_names)s
+}
+
+%(complete_func)setup
+'''
+
+COMPLETION_SCRIPT_ZSH = '''
+%(complete_func)s() {
+ local -a completions
+ local -a completions_with_descriptions
+ local -a response
+ response=("${(@f)$( env COMP_WORDS=\"${words[*]}\" \\
+ COMP_CWORD=$((CURRENT-1)) \\
+ %(autocomplete_var)s=\"complete_zsh\" \\
+ %(script_names)s )}")
+
+ for key descr in ${(kv)response}; do
+ if [[ "$descr" == "_" ]]; then
+ completions+=("$key")
+ else
+ completions_with_descriptions+=("$key":"$descr")
+ fi
+ done
+
+ if [ -n "$completions_with_descriptions" ]; then
+ _describe -V unsorted completions_with_descriptions -U -Q
+ fi
+
+ if [ -n "$completions" ]; then
+ compadd -U -V unsorted -Q -a completions
+ fi
+ compstate[insert]="automenu"
+}
+
+compdef %(complete_func)s %(script_names)s
+'''
+
+_invalid_ident_char_re = re.compile(r'[^a-zA-Z0-9_]')
+
+
+def get_completion_script(prog_name, complete_var, shell):
+ cf_name = _invalid_ident_char_re.sub('', prog_name.replace('-', '_'))
+ script = COMPLETION_SCRIPT_ZSH if shell == 'zsh' else COMPLETION_SCRIPT_BASH
+ return (script % {
+ 'complete_func': '_%s_completion' % cf_name,
+ 'script_names': prog_name,
+ 'autocomplete_var': complete_var,
+ }).strip() + ';'
+
+
+def resolve_ctx(cli, prog_name, args):
+ """
+ Parse into a hierarchy of contexts. Contexts are connected through the parent variable.
+ :param cli: command definition
+ :param prog_name: the program that is running
+ :param args: full list of args
+ :return: the final context/command parsed
+ """
+ ctx = cli.make_context(prog_name, args, resilient_parsing=True)
+ args = ctx.protected_args + ctx.args
+ while args:
+ if isinstance(ctx.command, MultiCommand):
+ if not ctx.command.chain:
+ cmd_name, cmd, args = ctx.command.resolve_command(ctx, args)
+ if cmd is None:
+ return ctx
+ ctx = cmd.make_context(cmd_name, args, parent=ctx,
+ resilient_parsing=True)
+ args = ctx.protected_args + ctx.args
+ else:
+ # Walk chained subcommand contexts saving the last one.
+ while args:
+ cmd_name, cmd, args = ctx.command.resolve_command(ctx, args)
+ if cmd is None:
+ return ctx
+ sub_ctx = cmd.make_context(cmd_name, args, parent=ctx,
+ allow_extra_args=True,
+ allow_interspersed_args=False,
+ resilient_parsing=True)
+ args = sub_ctx.args
+ ctx = sub_ctx
+ args = sub_ctx.protected_args + sub_ctx.args
+ else:
+ break
+ return ctx
+
+
+def start_of_option(param_str):
+ """
+ :param param_str: param_str to check
+ :return: whether or not this is the start of an option declaration (i.e. starts "-" or "--")
+ """
+ return param_str and param_str[:1] == '-'
+
+
+def is_incomplete_option(all_args, cmd_param):
+ """
+ :param all_args: the full original list of args supplied
+ :param cmd_param: the current command paramter
+ :return: whether or not the last option declaration (i.e. starts "-" or "--") is incomplete and
+ corresponds to this cmd_param. In other words whether this cmd_param option can still accept
+ values
+ """
+ if not isinstance(cmd_param, Option):
+ return False
+ if cmd_param.is_flag:
+ return False
+ last_option = None
+ for index, arg_str in enumerate(reversed([arg for arg in all_args if arg != WORDBREAK])):
+ if index + 1 > cmd_param.nargs:
+ break
+ if start_of_option(arg_str):
+ last_option = arg_str
+
+ return True if last_option and last_option in cmd_param.opts else False
+
+
+def is_incomplete_argument(current_params, cmd_param):
+ """
+ :param current_params: the current params and values for this argument as already entered
+ :param cmd_param: the current command parameter
+ :return: whether or not the last argument is incomplete and corresponds to this cmd_param. In
+ other words whether or not the this cmd_param argument can still accept values
+ """
+ if not isinstance(cmd_param, Argument):
+ return False
+ current_param_values = current_params[cmd_param.name]
+ if current_param_values is None:
+ return True
+ if cmd_param.nargs == -1:
+ return True
+ if isinstance(current_param_values, abc.Iterable) \
+ and cmd_param.nargs > 1 and len(current_param_values) < cmd_param.nargs:
+ return True
+ return False
+
+
+def get_user_autocompletions(ctx, args, incomplete, cmd_param):
+ """
+ :param ctx: context associated with the parsed command
+ :param args: full list of args
+ :param incomplete: the incomplete text to autocomplete
+ :param cmd_param: command definition
+ :return: all the possible user-specified completions for the param
+ """
+ results = []
+ if isinstance(cmd_param.type, Choice):
+ # Choices don't support descriptions.
+ results = [(c, None)
+ for c in cmd_param.type.choices if str(c).startswith(incomplete)]
+ elif cmd_param.autocompletion is not None:
+ dynamic_completions = cmd_param.autocompletion(ctx=ctx,
+ args=args,
+ incomplete=incomplete)
+ results = [c if isinstance(c, tuple) else (c, None)
+ for c in dynamic_completions]
+ return results
+
+
+def get_visible_commands_starting_with(ctx, starts_with):
+ """
+ :param ctx: context associated with the parsed command
+ :starts_with: string that visible commands must start with.
+ :return: all visible (not hidden) commands that start with starts_with.
+ """
+ for c in ctx.command.list_commands(ctx):
+ if c.startswith(starts_with):
+ command = ctx.command.get_command(ctx, c)
+ if not command.hidden:
+ yield command
+
+
+def add_subcommand_completions(ctx, incomplete, completions_out):
+ # Add subcommand completions.
+ if isinstance(ctx.command, MultiCommand):
+ completions_out.extend(
+ [(c.name, c.get_short_help_str()) for c in get_visible_commands_starting_with(ctx, incomplete)])
+
+ # Walk up the context list and add any other completion possibilities from chained commands
+ while ctx.parent is not None:
+ ctx = ctx.parent
+ if isinstance(ctx.command, MultiCommand) and ctx.command.chain:
+ remaining_commands = [c for c in get_visible_commands_starting_with(ctx, incomplete)
+ if c.name not in ctx.protected_args]
+ completions_out.extend([(c.name, c.get_short_help_str()) for c in remaining_commands])
+
+
+def get_choices(cli, prog_name, args, incomplete):
+ """
+ :param cli: command definition
+ :param prog_name: the program that is running
+ :param args: full list of args
+ :param incomplete: the incomplete text to autocomplete
+ :return: all the possible completions for the incomplete
+ """
+ all_args = copy.deepcopy(args)
+
+ ctx = resolve_ctx(cli, prog_name, args)
+ if ctx is None:
+ return []
+
+ # In newer versions of bash long opts with '='s are partitioned, but it's easier to parse
+ # without the '='
+ if start_of_option(incomplete) and WORDBREAK in incomplete:
+ partition_incomplete = incomplete.partition(WORDBREAK)
+ all_args.append(partition_incomplete[0])
+ incomplete = partition_incomplete[2]
+ elif incomplete == WORDBREAK:
+ incomplete = ''
+
+ completions = []
+ if start_of_option(incomplete):
+ # completions for partial options
+ for param in ctx.command.params:
+ if isinstance(param, Option) and not param.hidden:
+ param_opts = [param_opt for param_opt in param.opts +
+ param.secondary_opts if param_opt not in all_args or param.multiple]
+ completions.extend([(o, param.help) for o in param_opts if o.startswith(incomplete)])
+ return completions
+ # completion for option values from user supplied values
+ for param in ctx.command.params:
+ if is_incomplete_option(all_args, param):
+ return get_user_autocompletions(ctx, all_args, incomplete, param)
+ # completion for argument values from user supplied values
+ for param in ctx.command.params:
+ if is_incomplete_argument(ctx.params, param):
+ return get_user_autocompletions(ctx, all_args, incomplete, param)
+
+ add_subcommand_completions(ctx, incomplete, completions)
+ # Sort before returning so that proper ordering can be enforced in custom types.
+ return sorted(completions)
+
+
+def do_complete(cli, prog_name, include_descriptions):
+ cwords = split_arg_string(os.environ['COMP_WORDS'])
+ cword = int(os.environ['COMP_CWORD'])
+ args = cwords[1:cword]
+ try:
+ incomplete = cwords[cword]
+ except IndexError:
+ incomplete = ''
+
+ for item in get_choices(cli, prog_name, args, incomplete):
+ echo(item[0])
+ if include_descriptions:
+ # ZSH has trouble dealing with empty array parameters when returned from commands, so use a well defined character '_' to indicate no description is present.
+ echo(item[1] if item[1] else '_')
+
+ return True
+
+
+def bashcomplete(cli, prog_name, complete_var, complete_instr):
+ if complete_instr.startswith('source'):
+ shell = 'zsh' if complete_instr == 'source_zsh' else 'bash'
+ echo(get_completion_script(prog_name, complete_var, shell))
+ return True
+ elif complete_instr == 'complete' or complete_instr == 'complete_zsh':
+ return do_complete(cli, prog_name, complete_instr == 'complete_zsh')
+ return False
diff --git a/third_party/python/Click/click/_compat.py b/third_party/python/Click/click/_compat.py
new file mode 100644
index 0000000000..937e2301d4
--- /dev/null
+++ b/third_party/python/Click/click/_compat.py
@@ -0,0 +1,703 @@
+import re
+import io
+import os
+import sys
+import codecs
+from weakref import WeakKeyDictionary
+
+
+PY2 = sys.version_info[0] == 2
+CYGWIN = sys.platform.startswith('cygwin')
+# Determine local App Engine environment, per Google's own suggestion
+APP_ENGINE = ('APPENGINE_RUNTIME' in os.environ and
+ 'Development/' in os.environ['SERVER_SOFTWARE'])
+WIN = sys.platform.startswith('win') and not APP_ENGINE
+DEFAULT_COLUMNS = 80
+
+
+_ansi_re = re.compile(r'\033\[((?:\d|;)*)([a-zA-Z])')
+
+
+def get_filesystem_encoding():
+ return sys.getfilesystemencoding() or sys.getdefaultencoding()
+
+
+def _make_text_stream(stream, encoding, errors,
+ force_readable=False, force_writable=False):
+ if encoding is None:
+ encoding = get_best_encoding(stream)
+ if errors is None:
+ errors = 'replace'
+ return _NonClosingTextIOWrapper(stream, encoding, errors,
+ line_buffering=True,
+ force_readable=force_readable,
+ force_writable=force_writable)
+
+
+def is_ascii_encoding(encoding):
+ """Checks if a given encoding is ascii."""
+ try:
+ return codecs.lookup(encoding).name == 'ascii'
+ except LookupError:
+ return False
+
+
+def get_best_encoding(stream):
+ """Returns the default stream encoding if not found."""
+ rv = getattr(stream, 'encoding', None) or sys.getdefaultencoding()
+ if is_ascii_encoding(rv):
+ return 'utf-8'
+ return rv
+
+
+class _NonClosingTextIOWrapper(io.TextIOWrapper):
+
+ def __init__(self, stream, encoding, errors,
+ force_readable=False, force_writable=False, **extra):
+ self._stream = stream = _FixupStream(stream, force_readable,
+ force_writable)
+ io.TextIOWrapper.__init__(self, stream, encoding, errors, **extra)
+
+ # The io module is a place where the Python 3 text behavior
+ # was forced upon Python 2, so we need to unbreak
+ # it to look like Python 2.
+ if PY2:
+ def write(self, x):
+ if isinstance(x, str) or is_bytes(x):
+ try:
+ self.flush()
+ except Exception:
+ pass
+ return self.buffer.write(str(x))
+ return io.TextIOWrapper.write(self, x)
+
+ def writelines(self, lines):
+ for line in lines:
+ self.write(line)
+
+ def __del__(self):
+ try:
+ self.detach()
+ except Exception:
+ pass
+
+ def isatty(self):
+ # https://bitbucket.org/pypy/pypy/issue/1803
+ return self._stream.isatty()
+
+
+class _FixupStream(object):
+ """The new io interface needs more from streams than streams
+ traditionally implement. As such, this fix-up code is necessary in
+ some circumstances.
+
+ The forcing of readable and writable flags are there because some tools
+ put badly patched objects on sys (one such offender are certain version
+ of jupyter notebook).
+ """
+
+ def __init__(self, stream, force_readable=False, force_writable=False):
+ self._stream = stream
+ self._force_readable = force_readable
+ self._force_writable = force_writable
+
+ def __getattr__(self, name):
+ return getattr(self._stream, name)
+
+ def read1(self, size):
+ f = getattr(self._stream, 'read1', None)
+ if f is not None:
+ return f(size)
+ # We only dispatch to readline instead of read in Python 2 as we
+ # do not want cause problems with the different implementation
+ # of line buffering.
+ if PY2:
+ return self._stream.readline(size)
+ return self._stream.read(size)
+
+ def readable(self):
+ if self._force_readable:
+ return True
+ x = getattr(self._stream, 'readable', None)
+ if x is not None:
+ return x()
+ try:
+ self._stream.read(0)
+ except Exception:
+ return False
+ return True
+
+ def writable(self):
+ if self._force_writable:
+ return True
+ x = getattr(self._stream, 'writable', None)
+ if x is not None:
+ return x()
+ try:
+ self._stream.write('')
+ except Exception:
+ try:
+ self._stream.write(b'')
+ except Exception:
+ return False
+ return True
+
+ def seekable(self):
+ x = getattr(self._stream, 'seekable', None)
+ if x is not None:
+ return x()
+ try:
+ self._stream.seek(self._stream.tell())
+ except Exception:
+ return False
+ return True
+
+
+if PY2:
+ text_type = unicode
+ bytes = str
+ raw_input = raw_input
+ string_types = (str, unicode)
+ int_types = (int, long)
+ iteritems = lambda x: x.iteritems()
+ range_type = xrange
+
+ def is_bytes(x):
+ return isinstance(x, (buffer, bytearray))
+
+ _identifier_re = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*$')
+
+ # For Windows, we need to force stdout/stdin/stderr to binary if it's
+ # fetched for that. This obviously is not the most correct way to do
+ # it as it changes global state. Unfortunately, there does not seem to
+ # be a clear better way to do it as just reopening the file in binary
+ # mode does not change anything.
+ #
+ # An option would be to do what Python 3 does and to open the file as
+ # binary only, patch it back to the system, and then use a wrapper
+ # stream that converts newlines. It's not quite clear what's the
+ # correct option here.
+ #
+ # This code also lives in _winconsole for the fallback to the console
+ # emulation stream.
+ #
+ # There are also Windows environments where the `msvcrt` module is not
+ # available (which is why we use try-catch instead of the WIN variable
+ # here), such as the Google App Engine development server on Windows. In
+ # those cases there is just nothing we can do.
+ def set_binary_mode(f):
+ return f
+
+ try:
+ import msvcrt
+ except ImportError:
+ pass
+ else:
+ def set_binary_mode(f):
+ try:
+ fileno = f.fileno()
+ except Exception:
+ pass
+ else:
+ msvcrt.setmode(fileno, os.O_BINARY)
+ return f
+
+ try:
+ import fcntl
+ except ImportError:
+ pass
+ else:
+ def set_binary_mode(f):
+ try:
+ fileno = f.fileno()
+ except Exception:
+ pass
+ else:
+ flags = fcntl.fcntl(fileno, fcntl.F_GETFL)
+ fcntl.fcntl(fileno, fcntl.F_SETFL, flags & ~os.O_NONBLOCK)
+ return f
+
+ def isidentifier(x):
+ return _identifier_re.search(x) is not None
+
+ def get_binary_stdin():
+ return set_binary_mode(sys.stdin)
+
+ def get_binary_stdout():
+ _wrap_std_stream('stdout')
+ return set_binary_mode(sys.stdout)
+
+ def get_binary_stderr():
+ _wrap_std_stream('stderr')
+ return set_binary_mode(sys.stderr)
+
+ def get_text_stdin(encoding=None, errors=None):
+ rv = _get_windows_console_stream(sys.stdin, encoding, errors)
+ if rv is not None:
+ return rv
+ return _make_text_stream(sys.stdin, encoding, errors,
+ force_readable=True)
+
+ def get_text_stdout(encoding=None, errors=None):
+ _wrap_std_stream('stdout')
+ rv = _get_windows_console_stream(sys.stdout, encoding, errors)
+ if rv is not None:
+ return rv
+ return _make_text_stream(sys.stdout, encoding, errors,
+ force_writable=True)
+
+ def get_text_stderr(encoding=None, errors=None):
+ _wrap_std_stream('stderr')
+ rv = _get_windows_console_stream(sys.stderr, encoding, errors)
+ if rv is not None:
+ return rv
+ return _make_text_stream(sys.stderr, encoding, errors,
+ force_writable=True)
+
+ def filename_to_ui(value):
+ if isinstance(value, bytes):
+ value = value.decode(get_filesystem_encoding(), 'replace')
+ return value
+else:
+ import io
+ text_type = str
+ raw_input = input
+ string_types = (str,)
+ int_types = (int,)
+ range_type = range
+ isidentifier = lambda x: x.isidentifier()
+ iteritems = lambda x: iter(x.items())
+
+ def is_bytes(x):
+ return isinstance(x, (bytes, memoryview, bytearray))
+
+ def _is_binary_reader(stream, default=False):
+ try:
+ return isinstance(stream.read(0), bytes)
+ except Exception:
+ return default
+ # This happens in some cases where the stream was already
+ # closed. In this case, we assume the default.
+
+ def _is_binary_writer(stream, default=False):
+ try:
+ stream.write(b'')
+ except Exception:
+ try:
+ stream.write('')
+ return False
+ except Exception:
+ pass
+ return default
+ return True
+
+ def _find_binary_reader(stream):
+ # We need to figure out if the given stream is already binary.
+ # This can happen because the official docs recommend detaching
+ # the streams to get binary streams. Some code might do this, so
+ # we need to deal with this case explicitly.
+ if _is_binary_reader(stream, False):
+ return stream
+
+ buf = getattr(stream, 'buffer', None)
+
+ # Same situation here; this time we assume that the buffer is
+ # actually binary in case it's closed.
+ if buf is not None and _is_binary_reader(buf, True):
+ return buf
+
+ def _find_binary_writer(stream):
+ # We need to figure out if the given stream is already binary.
+ # This can happen because the official docs recommend detatching
+ # the streams to get binary streams. Some code might do this, so
+ # we need to deal with this case explicitly.
+ if _is_binary_writer(stream, False):
+ return stream
+
+ buf = getattr(stream, 'buffer', None)
+
+ # Same situation here; this time we assume that the buffer is
+ # actually binary in case it's closed.
+ if buf is not None and _is_binary_writer(buf, True):
+ return buf
+
+ def _stream_is_misconfigured(stream):
+ """A stream is misconfigured if its encoding is ASCII."""
+ # If the stream does not have an encoding set, we assume it's set
+ # to ASCII. This appears to happen in certain unittest
+ # environments. It's not quite clear what the correct behavior is
+ # but this at least will force Click to recover somehow.
+ return is_ascii_encoding(getattr(stream, 'encoding', None) or 'ascii')
+
+ def _is_compatible_text_stream(stream, encoding, errors):
+ stream_encoding = getattr(stream, 'encoding', None)
+ stream_errors = getattr(stream, 'errors', None)
+
+ # Perfect match.
+ if stream_encoding == encoding and stream_errors == errors:
+ return True
+
+ # Otherwise, it's only a compatible stream if we did not ask for
+ # an encoding.
+ if encoding is None:
+ return stream_encoding is not None
+
+ return False
+
+ def _force_correct_text_reader(text_reader, encoding, errors,
+ force_readable=False):
+ if _is_binary_reader(text_reader, False):
+ binary_reader = text_reader
+ else:
+ # If there is no target encoding set, we need to verify that the
+ # reader is not actually misconfigured.
+ if encoding is None and not _stream_is_misconfigured(text_reader):
+ return text_reader
+
+ if _is_compatible_text_stream(text_reader, encoding, errors):
+ return text_reader
+
+ # If the reader has no encoding, we try to find the underlying
+ # binary reader for it. If that fails because the environment is
+ # misconfigured, we silently go with the same reader because this
+ # is too common to happen. In that case, mojibake is better than
+ # exceptions.
+ binary_reader = _find_binary_reader(text_reader)
+ if binary_reader is None:
+ return text_reader
+
+ # At this point, we default the errors to replace instead of strict
+ # because nobody handles those errors anyways and at this point
+ # we're so fundamentally fucked that nothing can repair it.
+ if errors is None:
+ errors = 'replace'
+ return _make_text_stream(binary_reader, encoding, errors,
+ force_readable=force_readable)
+
+ def _force_correct_text_writer(text_writer, encoding, errors,
+ force_writable=False):
+ if _is_binary_writer(text_writer, False):
+ binary_writer = text_writer
+ else:
+ # If there is no target encoding set, we need to verify that the
+ # writer is not actually misconfigured.
+ if encoding is None and not _stream_is_misconfigured(text_writer):
+ return text_writer
+
+ if _is_compatible_text_stream(text_writer, encoding, errors):
+ return text_writer
+
+ # If the writer has no encoding, we try to find the underlying
+ # binary writer for it. If that fails because the environment is
+ # misconfigured, we silently go with the same writer because this
+ # is too common to happen. In that case, mojibake is better than
+ # exceptions.
+ binary_writer = _find_binary_writer(text_writer)
+ if binary_writer is None:
+ return text_writer
+
+ # At this point, we default the errors to replace instead of strict
+ # because nobody handles those errors anyways and at this point
+ # we're so fundamentally fucked that nothing can repair it.
+ if errors is None:
+ errors = 'replace'
+ return _make_text_stream(binary_writer, encoding, errors,
+ force_writable=force_writable)
+
+ def get_binary_stdin():
+ reader = _find_binary_reader(sys.stdin)
+ if reader is None:
+ raise RuntimeError('Was not able to determine binary '
+ 'stream for sys.stdin.')
+ return reader
+
+ def get_binary_stdout():
+ writer = _find_binary_writer(sys.stdout)
+ if writer is None:
+ raise RuntimeError('Was not able to determine binary '
+ 'stream for sys.stdout.')
+ return writer
+
+ def get_binary_stderr():
+ writer = _find_binary_writer(sys.stderr)
+ if writer is None:
+ raise RuntimeError('Was not able to determine binary '
+ 'stream for sys.stderr.')
+ return writer
+
+ def get_text_stdin(encoding=None, errors=None):
+ rv = _get_windows_console_stream(sys.stdin, encoding, errors)
+ if rv is not None:
+ return rv
+ return _force_correct_text_reader(sys.stdin, encoding, errors,
+ force_readable=True)
+
+ def get_text_stdout(encoding=None, errors=None):
+ rv = _get_windows_console_stream(sys.stdout, encoding, errors)
+ if rv is not None:
+ return rv
+ return _force_correct_text_writer(sys.stdout, encoding, errors,
+ force_writable=True)
+
+ def get_text_stderr(encoding=None, errors=None):
+ rv = _get_windows_console_stream(sys.stderr, encoding, errors)
+ if rv is not None:
+ return rv
+ return _force_correct_text_writer(sys.stderr, encoding, errors,
+ force_writable=True)
+
+ def filename_to_ui(value):
+ if isinstance(value, bytes):
+ value = value.decode(get_filesystem_encoding(), 'replace')
+ else:
+ value = value.encode('utf-8', 'surrogateescape') \
+ .decode('utf-8', 'replace')
+ return value
+
+
+def get_streerror(e, default=None):
+ if hasattr(e, 'strerror'):
+ msg = e.strerror
+ else:
+ if default is not None:
+ msg = default
+ else:
+ msg = str(e)
+ if isinstance(msg, bytes):
+ msg = msg.decode('utf-8', 'replace')
+ return msg
+
+
+def open_stream(filename, mode='r', encoding=None, errors='strict',
+ atomic=False):
+ # Standard streams first. These are simple because they don't need
+ # special handling for the atomic flag. It's entirely ignored.
+ if filename == '-':
+ if any(m in mode for m in ['w', 'a', 'x']):
+ if 'b' in mode:
+ return get_binary_stdout(), False
+ return get_text_stdout(encoding=encoding, errors=errors), False
+ if 'b' in mode:
+ return get_binary_stdin(), False
+ return get_text_stdin(encoding=encoding, errors=errors), False
+
+ # Non-atomic writes directly go out through the regular open functions.
+ if not atomic:
+ if encoding is None:
+ return open(filename, mode), True
+ return io.open(filename, mode, encoding=encoding, errors=errors), True
+
+ # Some usability stuff for atomic writes
+ if 'a' in mode:
+ raise ValueError(
+ 'Appending to an existing file is not supported, because that '
+ 'would involve an expensive `copy`-operation to a temporary '
+ 'file. Open the file in normal `w`-mode and copy explicitly '
+ 'if that\'s what you\'re after.'
+ )
+ if 'x' in mode:
+ raise ValueError('Use the `overwrite`-parameter instead.')
+ if 'w' not in mode:
+ raise ValueError('Atomic writes only make sense with `w`-mode.')
+
+ # Atomic writes are more complicated. They work by opening a file
+ # as a proxy in the same folder and then using the fdopen
+ # functionality to wrap it in a Python file. Then we wrap it in an
+ # atomic file that moves the file over on close.
+ import tempfile
+ fd, tmp_filename = tempfile.mkstemp(dir=os.path.dirname(filename),
+ prefix='.__atomic-write')
+
+ if encoding is not None:
+ f = io.open(fd, mode, encoding=encoding, errors=errors)
+ else:
+ f = os.fdopen(fd, mode)
+
+ return _AtomicFile(f, tmp_filename, os.path.realpath(filename)), True
+
+
+# Used in a destructor call, needs extra protection from interpreter cleanup.
+if hasattr(os, 'replace'):
+ _replace = os.replace
+ _can_replace = True
+else:
+ _replace = os.rename
+ _can_replace = not WIN
+
+
+class _AtomicFile(object):
+
+ def __init__(self, f, tmp_filename, real_filename):
+ self._f = f
+ self._tmp_filename = tmp_filename
+ self._real_filename = real_filename
+ self.closed = False
+
+ @property
+ def name(self):
+ return self._real_filename
+
+ def close(self, delete=False):
+ if self.closed:
+ return
+ self._f.close()
+ if not _can_replace:
+ try:
+ os.remove(self._real_filename)
+ except OSError:
+ pass
+ _replace(self._tmp_filename, self._real_filename)
+ self.closed = True
+
+ def __getattr__(self, name):
+ return getattr(self._f, name)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ self.close(delete=exc_type is not None)
+
+ def __repr__(self):
+ return repr(self._f)
+
+
+auto_wrap_for_ansi = None
+colorama = None
+get_winterm_size = None
+
+
+def strip_ansi(value):
+ return _ansi_re.sub('', value)
+
+
+def should_strip_ansi(stream=None, color=None):
+ if color is None:
+ if stream is None:
+ stream = sys.stdin
+ return not isatty(stream)
+ return not color
+
+
+# If we're on Windows, we provide transparent integration through
+# colorama. This will make ANSI colors through the echo function
+# work automatically.
+if WIN:
+ # Windows has a smaller terminal
+ DEFAULT_COLUMNS = 79
+
+ from ._winconsole import _get_windows_console_stream, _wrap_std_stream
+
+ def _get_argv_encoding():
+ import locale
+ return locale.getpreferredencoding()
+
+ if PY2:
+ def raw_input(prompt=''):
+ sys.stderr.flush()
+ if prompt:
+ stdout = _default_text_stdout()
+ stdout.write(prompt)
+ stdin = _default_text_stdin()
+ return stdin.readline().rstrip('\r\n')
+
+ try:
+ import colorama
+ except ImportError:
+ pass
+ else:
+ _ansi_stream_wrappers = WeakKeyDictionary()
+
+ def auto_wrap_for_ansi(stream, color=None):
+ """This function wraps a stream so that calls through colorama
+ are issued to the win32 console API to recolor on demand. It
+ also ensures to reset the colors if a write call is interrupted
+ to not destroy the console afterwards.
+ """
+ try:
+ cached = _ansi_stream_wrappers.get(stream)
+ except Exception:
+ cached = None
+ if cached is not None:
+ return cached
+ strip = should_strip_ansi(stream, color)
+ ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip)
+ rv = ansi_wrapper.stream
+ _write = rv.write
+
+ def _safe_write(s):
+ try:
+ return _write(s)
+ except:
+ ansi_wrapper.reset_all()
+ raise
+
+ rv.write = _safe_write
+ try:
+ _ansi_stream_wrappers[stream] = rv
+ except Exception:
+ pass
+ return rv
+
+ def get_winterm_size():
+ win = colorama.win32.GetConsoleScreenBufferInfo(
+ colorama.win32.STDOUT).srWindow
+ return win.Right - win.Left, win.Bottom - win.Top
+else:
+ def _get_argv_encoding():
+ return getattr(sys.stdin, 'encoding', None) or get_filesystem_encoding()
+
+ _get_windows_console_stream = lambda *x: None
+ _wrap_std_stream = lambda *x: None
+
+
+def term_len(x):
+ return len(strip_ansi(x))
+
+
+def isatty(stream):
+ try:
+ return stream.isatty()
+ except Exception:
+ return False
+
+
+def _make_cached_stream_func(src_func, wrapper_func):
+ cache = WeakKeyDictionary()
+ def func():
+ stream = src_func()
+ try:
+ rv = cache.get(stream)
+ except Exception:
+ rv = None
+ if rv is not None:
+ return rv
+ rv = wrapper_func()
+ try:
+ stream = src_func() # In case wrapper_func() modified the stream
+ cache[stream] = rv
+ except Exception:
+ pass
+ return rv
+ return func
+
+
+_default_text_stdin = _make_cached_stream_func(
+ lambda: sys.stdin, get_text_stdin)
+_default_text_stdout = _make_cached_stream_func(
+ lambda: sys.stdout, get_text_stdout)
+_default_text_stderr = _make_cached_stream_func(
+ lambda: sys.stderr, get_text_stderr)
+
+
+binary_streams = {
+ 'stdin': get_binary_stdin,
+ 'stdout': get_binary_stdout,
+ 'stderr': get_binary_stderr,
+}
+
+text_streams = {
+ 'stdin': get_text_stdin,
+ 'stdout': get_text_stdout,
+ 'stderr': get_text_stderr,
+}
diff --git a/third_party/python/Click/click/_termui_impl.py b/third_party/python/Click/click/_termui_impl.py
new file mode 100644
index 0000000000..00a8e5ef1c
--- /dev/null
+++ b/third_party/python/Click/click/_termui_impl.py
@@ -0,0 +1,621 @@
+# -*- coding: utf-8 -*-
+"""
+click._termui_impl
+~~~~~~~~~~~~~~~~~~
+
+This module contains implementations for the termui module. To keep the
+import time of Click down, some infrequently used functionality is
+placed in this module and only imported as needed.
+
+:copyright: © 2014 by the Pallets team.
+:license: BSD, see LICENSE.rst for more details.
+"""
+
+import os
+import sys
+import time
+import math
+import contextlib
+from ._compat import _default_text_stdout, range_type, PY2, isatty, \
+ open_stream, strip_ansi, term_len, get_best_encoding, WIN, int_types, \
+ CYGWIN
+from .utils import echo
+from .exceptions import ClickException
+
+
+if os.name == 'nt':
+ BEFORE_BAR = '\r'
+ AFTER_BAR = '\n'
+else:
+ BEFORE_BAR = '\r\033[?25l'
+ AFTER_BAR = '\033[?25h\n'
+
+
+def _length_hint(obj):
+ """Returns the length hint of an object."""
+ try:
+ return len(obj)
+ except (AttributeError, TypeError):
+ try:
+ get_hint = type(obj).__length_hint__
+ except AttributeError:
+ return None
+ try:
+ hint = get_hint(obj)
+ except TypeError:
+ return None
+ if hint is NotImplemented or \
+ not isinstance(hint, int_types) or \
+ hint < 0:
+ return None
+ return hint
+
+
+class ProgressBar(object):
+
+ def __init__(self, iterable, length=None, fill_char='#', empty_char=' ',
+ bar_template='%(bar)s', info_sep=' ', show_eta=True,
+ show_percent=None, show_pos=False, item_show_func=None,
+ label=None, file=None, color=None, width=30):
+ self.fill_char = fill_char
+ self.empty_char = empty_char
+ self.bar_template = bar_template
+ self.info_sep = info_sep
+ self.show_eta = show_eta
+ self.show_percent = show_percent
+ self.show_pos = show_pos
+ self.item_show_func = item_show_func
+ self.label = label or ''
+ if file is None:
+ file = _default_text_stdout()
+ self.file = file
+ self.color = color
+ self.width = width
+ self.autowidth = width == 0
+
+ if length is None:
+ length = _length_hint(iterable)
+ if iterable is None:
+ if length is None:
+ raise TypeError('iterable or length is required')
+ iterable = range_type(length)
+ self.iter = iter(iterable)
+ self.length = length
+ self.length_known = length is not None
+ self.pos = 0
+ self.avg = []
+ self.start = self.last_eta = time.time()
+ self.eta_known = False
+ self.finished = False
+ self.max_width = None
+ self.entered = False
+ self.current_item = None
+ self.is_hidden = not isatty(self.file)
+ self._last_line = None
+ self.short_limit = 0.5
+
+ def __enter__(self):
+ self.entered = True
+ self.render_progress()
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ self.render_finish()
+
+ def __iter__(self):
+ if not self.entered:
+ raise RuntimeError('You need to use progress bars in a with block.')
+ self.render_progress()
+ return self.generator()
+
+ def is_fast(self):
+ return time.time() - self.start <= self.short_limit
+
+ def render_finish(self):
+ if self.is_hidden or self.is_fast():
+ return
+ self.file.write(AFTER_BAR)
+ self.file.flush()
+
+ @property
+ def pct(self):
+ if self.finished:
+ return 1.0
+ return min(self.pos / (float(self.length) or 1), 1.0)
+
+ @property
+ def time_per_iteration(self):
+ if not self.avg:
+ return 0.0
+ return sum(self.avg) / float(len(self.avg))
+
+ @property
+ def eta(self):
+ if self.length_known and not self.finished:
+ return self.time_per_iteration * (self.length - self.pos)
+ return 0.0
+
+ def format_eta(self):
+ if self.eta_known:
+ t = int(self.eta)
+ seconds = t % 60
+ t //= 60
+ minutes = t % 60
+ t //= 60
+ hours = t % 24
+ t //= 24
+ if t > 0:
+ days = t
+ return '%dd %02d:%02d:%02d' % (days, hours, minutes, seconds)
+ else:
+ return '%02d:%02d:%02d' % (hours, minutes, seconds)
+ return ''
+
+ def format_pos(self):
+ pos = str(self.pos)
+ if self.length_known:
+ pos += '/%s' % self.length
+ return pos
+
+ def format_pct(self):
+ return ('% 4d%%' % int(self.pct * 100))[1:]
+
+ def format_bar(self):
+ if self.length_known:
+ bar_length = int(self.pct * self.width)
+ bar = self.fill_char * bar_length
+ bar += self.empty_char * (self.width - bar_length)
+ elif self.finished:
+ bar = self.fill_char * self.width
+ else:
+ bar = list(self.empty_char * (self.width or 1))
+ if self.time_per_iteration != 0:
+ bar[int((math.cos(self.pos * self.time_per_iteration)
+ / 2.0 + 0.5) * self.width)] = self.fill_char
+ bar = ''.join(bar)
+ return bar
+
+ def format_progress_line(self):
+ show_percent = self.show_percent
+
+ info_bits = []
+ if self.length_known and show_percent is None:
+ show_percent = not self.show_pos
+
+ if self.show_pos:
+ info_bits.append(self.format_pos())
+ if show_percent:
+ info_bits.append(self.format_pct())
+ if self.show_eta and self.eta_known and not self.finished:
+ info_bits.append(self.format_eta())
+ if self.item_show_func is not None:
+ item_info = self.item_show_func(self.current_item)
+ if item_info is not None:
+ info_bits.append(item_info)
+
+ return (self.bar_template % {
+ 'label': self.label,
+ 'bar': self.format_bar(),
+ 'info': self.info_sep.join(info_bits)
+ }).rstrip()
+
+ def render_progress(self):
+ from .termui import get_terminal_size
+
+ if self.is_hidden:
+ return
+
+ buf = []
+ # Update width in case the terminal has been resized
+ if self.autowidth:
+ old_width = self.width
+ self.width = 0
+ clutter_length = term_len(self.format_progress_line())
+ new_width = max(0, get_terminal_size()[0] - clutter_length)
+ if new_width < old_width:
+ buf.append(BEFORE_BAR)
+ buf.append(' ' * self.max_width)
+ self.max_width = new_width
+ self.width = new_width
+
+ clear_width = self.width
+ if self.max_width is not None:
+ clear_width = self.max_width
+
+ buf.append(BEFORE_BAR)
+ line = self.format_progress_line()
+ line_len = term_len(line)
+ if self.max_width is None or self.max_width < line_len:
+ self.max_width = line_len
+
+ buf.append(line)
+ buf.append(' ' * (clear_width - line_len))
+ line = ''.join(buf)
+ # Render the line only if it changed.
+
+ if line != self._last_line and not self.is_fast():
+ self._last_line = line
+ echo(line, file=self.file, color=self.color, nl=False)
+ self.file.flush()
+
+ def make_step(self, n_steps):
+ self.pos += n_steps
+ if self.length_known and self.pos >= self.length:
+ self.finished = True
+
+ if (time.time() - self.last_eta) < 1.0:
+ return
+
+ self.last_eta = time.time()
+
+ # self.avg is a rolling list of length <= 7 of steps where steps are
+ # defined as time elapsed divided by the total progress through
+ # self.length.
+ if self.pos:
+ step = (time.time() - self.start) / self.pos
+ else:
+ step = time.time() - self.start
+
+ self.avg = self.avg[-6:] + [step]
+
+ self.eta_known = self.length_known
+
+ def update(self, n_steps):
+ self.make_step(n_steps)
+ self.render_progress()
+
+ def finish(self):
+ self.eta_known = 0
+ self.current_item = None
+ self.finished = True
+
+ def generator(self):
+ """
+ Returns a generator which yields the items added to the bar during
+ construction, and updates the progress bar *after* the yielded block
+ returns.
+ """
+ if not self.entered:
+ raise RuntimeError('You need to use progress bars in a with block.')
+
+ if self.is_hidden:
+ for rv in self.iter:
+ yield rv
+ else:
+ for rv in self.iter:
+ self.current_item = rv
+ yield rv
+ self.update(1)
+ self.finish()
+ self.render_progress()
+
+
+def pager(generator, color=None):
+ """Decide what method to use for paging through text."""
+ stdout = _default_text_stdout()
+ if not isatty(sys.stdin) or not isatty(stdout):
+ return _nullpager(stdout, generator, color)
+ pager_cmd = (os.environ.get('PAGER', None) or '').strip()
+ if pager_cmd:
+ if WIN:
+ return _tempfilepager(generator, pager_cmd, color)
+ return _pipepager(generator, pager_cmd, color)
+ if os.environ.get('TERM') in ('dumb', 'emacs'):
+ return _nullpager(stdout, generator, color)
+ if WIN or sys.platform.startswith('os2'):
+ return _tempfilepager(generator, 'more <', color)
+ if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0:
+ return _pipepager(generator, 'less', color)
+
+ import tempfile
+ fd, filename = tempfile.mkstemp()
+ os.close(fd)
+ try:
+ if hasattr(os, 'system') and os.system('more "%s"' % filename) == 0:
+ return _pipepager(generator, 'more', color)
+ return _nullpager(stdout, generator, color)
+ finally:
+ os.unlink(filename)
+
+
+def _pipepager(generator, cmd, color):
+ """Page through text by feeding it to another program. Invoking a
+ pager through this might support colors.
+ """
+ import subprocess
+ env = dict(os.environ)
+
+ # If we're piping to less we might support colors under the
+ # condition that
+ cmd_detail = cmd.rsplit('/', 1)[-1].split()
+ if color is None and cmd_detail[0] == 'less':
+ less_flags = os.environ.get('LESS', '') + ' '.join(cmd_detail[1:])
+ if not less_flags:
+ env['LESS'] = '-R'
+ color = True
+ elif 'r' in less_flags or 'R' in less_flags:
+ color = True
+
+ c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE,
+ env=env)
+ encoding = get_best_encoding(c.stdin)
+ try:
+ for text in generator:
+ if not color:
+ text = strip_ansi(text)
+
+ c.stdin.write(text.encode(encoding, 'replace'))
+ except (IOError, KeyboardInterrupt):
+ pass
+ else:
+ c.stdin.close()
+
+ # Less doesn't respect ^C, but catches it for its own UI purposes (aborting
+ # search or other commands inside less).
+ #
+ # That means when the user hits ^C, the parent process (click) terminates,
+ # but less is still alive, paging the output and messing up the terminal.
+ #
+ # If the user wants to make the pager exit on ^C, they should set
+ # `LESS='-K'`. It's not our decision to make.
+ while True:
+ try:
+ c.wait()
+ except KeyboardInterrupt:
+ pass
+ else:
+ break
+
+
+def _tempfilepager(generator, cmd, color):
+ """Page through text by invoking a program on a temporary file."""
+ import tempfile
+ filename = tempfile.mktemp()
+ # TODO: This never terminates if the passed generator never terminates.
+ text = "".join(generator)
+ if not color:
+ text = strip_ansi(text)
+ encoding = get_best_encoding(sys.stdout)
+ with open_stream(filename, 'wb')[0] as f:
+ f.write(text.encode(encoding))
+ try:
+ os.system(cmd + ' "' + filename + '"')
+ finally:
+ os.unlink(filename)
+
+
+def _nullpager(stream, generator, color):
+ """Simply print unformatted text. This is the ultimate fallback."""
+ for text in generator:
+ if not color:
+ text = strip_ansi(text)
+ stream.write(text)
+
+
+class Editor(object):
+
+ def __init__(self, editor=None, env=None, require_save=True,
+ extension='.txt'):
+ self.editor = editor
+ self.env = env
+ self.require_save = require_save
+ self.extension = extension
+
+ def get_editor(self):
+ if self.editor is not None:
+ return self.editor
+ for key in 'VISUAL', 'EDITOR':
+ rv = os.environ.get(key)
+ if rv:
+ return rv
+ if WIN:
+ return 'notepad'
+ for editor in 'vim', 'nano':
+ if os.system('which %s >/dev/null 2>&1' % editor) == 0:
+ return editor
+ return 'vi'
+
+ def edit_file(self, filename):
+ import subprocess
+ editor = self.get_editor()
+ if self.env:
+ environ = os.environ.copy()
+ environ.update(self.env)
+ else:
+ environ = None
+ try:
+ c = subprocess.Popen('%s "%s"' % (editor, filename),
+ env=environ, shell=True)
+ exit_code = c.wait()
+ if exit_code != 0:
+ raise ClickException('%s: Editing failed!' % editor)
+ except OSError as e:
+ raise ClickException('%s: Editing failed: %s' % (editor, e))
+
+ def edit(self, text):
+ import tempfile
+
+ text = text or ''
+ if text and not text.endswith('\n'):
+ text += '\n'
+
+ fd, name = tempfile.mkstemp(prefix='editor-', suffix=self.extension)
+ try:
+ if WIN:
+ encoding = 'utf-8-sig'
+ text = text.replace('\n', '\r\n')
+ else:
+ encoding = 'utf-8'
+ text = text.encode(encoding)
+
+ f = os.fdopen(fd, 'wb')
+ f.write(text)
+ f.close()
+ timestamp = os.path.getmtime(name)
+
+ self.edit_file(name)
+
+ if self.require_save \
+ and os.path.getmtime(name) == timestamp:
+ return None
+
+ f = open(name, 'rb')
+ try:
+ rv = f.read()
+ finally:
+ f.close()
+ return rv.decode('utf-8-sig').replace('\r\n', '\n')
+ finally:
+ os.unlink(name)
+
+
+def open_url(url, wait=False, locate=False):
+ import subprocess
+
+ def _unquote_file(url):
+ try:
+ import urllib
+ except ImportError:
+ import urllib
+ if url.startswith('file://'):
+ url = urllib.unquote(url[7:])
+ return url
+
+ if sys.platform == 'darwin':
+ args = ['open']
+ if wait:
+ args.append('-W')
+ if locate:
+ args.append('-R')
+ args.append(_unquote_file(url))
+ null = open('/dev/null', 'w')
+ try:
+ return subprocess.Popen(args, stderr=null).wait()
+ finally:
+ null.close()
+ elif WIN:
+ if locate:
+ url = _unquote_file(url)
+ args = 'explorer /select,"%s"' % _unquote_file(
+ url.replace('"', ''))
+ else:
+ args = 'start %s "" "%s"' % (
+ wait and '/WAIT' or '', url.replace('"', ''))
+ return os.system(args)
+ elif CYGWIN:
+ if locate:
+ url = _unquote_file(url)
+ args = 'cygstart "%s"' % (os.path.dirname(url).replace('"', ''))
+ else:
+ args = 'cygstart %s "%s"' % (
+ wait and '-w' or '', url.replace('"', ''))
+ return os.system(args)
+
+ try:
+ if locate:
+ url = os.path.dirname(_unquote_file(url)) or '.'
+ else:
+ url = _unquote_file(url)
+ c = subprocess.Popen(['xdg-open', url])
+ if wait:
+ return c.wait()
+ return 0
+ except OSError:
+ if url.startswith(('http://', 'https://')) and not locate and not wait:
+ import webbrowser
+ webbrowser.open(url)
+ return 0
+ return 1
+
+
+def _translate_ch_to_exc(ch):
+ if ch == u'\x03':
+ raise KeyboardInterrupt()
+ if ch == u'\x04' and not WIN: # Unix-like, Ctrl+D
+ raise EOFError()
+ if ch == u'\x1a' and WIN: # Windows, Ctrl+Z
+ raise EOFError()
+
+
+if WIN:
+ import msvcrt
+
+ @contextlib.contextmanager
+ def raw_terminal():
+ yield
+
+ def getchar(echo):
+ # The function `getch` will return a bytes object corresponding to
+ # the pressed character. Since Windows 10 build 1803, it will also
+ # return \x00 when called a second time after pressing a regular key.
+ #
+ # `getwch` does not share this probably-bugged behavior. Moreover, it
+ # returns a Unicode object by default, which is what we want.
+ #
+ # Either of these functions will return \x00 or \xe0 to indicate
+ # a special key, and you need to call the same function again to get
+ # the "rest" of the code. The fun part is that \u00e0 is
+ # "latin small letter a with grave", so if you type that on a French
+ # keyboard, you _also_ get a \xe0.
+ # E.g., consider the Up arrow. This returns \xe0 and then \x48. The
+ # resulting Unicode string reads as "a with grave" + "capital H".
+ # This is indistinguishable from when the user actually types
+ # "a with grave" and then "capital H".
+ #
+ # When \xe0 is returned, we assume it's part of a special-key sequence
+ # and call `getwch` again, but that means that when the user types
+ # the \u00e0 character, `getchar` doesn't return until a second
+ # character is typed.
+ # The alternative is returning immediately, but that would mess up
+ # cross-platform handling of arrow keys and others that start with
+ # \xe0. Another option is using `getch`, but then we can't reliably
+ # read non-ASCII characters, because return values of `getch` are
+ # limited to the current 8-bit codepage.
+ #
+ # Anyway, Click doesn't claim to do this Right(tm), and using `getwch`
+ # is doing the right thing in more situations than with `getch`.
+ if echo:
+ func = msvcrt.getwche
+ else:
+ func = msvcrt.getwch
+
+ rv = func()
+ if rv in (u'\x00', u'\xe0'):
+ # \x00 and \xe0 are control characters that indicate special key,
+ # see above.
+ rv += func()
+ _translate_ch_to_exc(rv)
+ return rv
+else:
+ import tty
+ import termios
+
+ @contextlib.contextmanager
+ def raw_terminal():
+ if not isatty(sys.stdin):
+ f = open('/dev/tty')
+ fd = f.fileno()
+ else:
+ fd = sys.stdin.fileno()
+ f = None
+ try:
+ old_settings = termios.tcgetattr(fd)
+ try:
+ tty.setraw(fd)
+ yield fd
+ finally:
+ termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
+ sys.stdout.flush()
+ if f is not None:
+ f.close()
+ except termios.error:
+ pass
+
+ def getchar(echo):
+ with raw_terminal() as fd:
+ ch = os.read(fd, 32)
+ ch = ch.decode(get_best_encoding(sys.stdin), 'replace')
+ if echo and isatty(sys.stdout):
+ sys.stdout.write(ch)
+ _translate_ch_to_exc(ch)
+ return ch
diff --git a/third_party/python/Click/click/_textwrap.py b/third_party/python/Click/click/_textwrap.py
new file mode 100644
index 0000000000..7e776031ea
--- /dev/null
+++ b/third_party/python/Click/click/_textwrap.py
@@ -0,0 +1,38 @@
+import textwrap
+from contextlib import contextmanager
+
+
+class TextWrapper(textwrap.TextWrapper):
+
+ def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
+ space_left = max(width - cur_len, 1)
+
+ if self.break_long_words:
+ last = reversed_chunks[-1]
+ cut = last[:space_left]
+ res = last[space_left:]
+ cur_line.append(cut)
+ reversed_chunks[-1] = res
+ elif not cur_line:
+ cur_line.append(reversed_chunks.pop())
+
+ @contextmanager
+ def extra_indent(self, indent):
+ old_initial_indent = self.initial_indent
+ old_subsequent_indent = self.subsequent_indent
+ self.initial_indent += indent
+ self.subsequent_indent += indent
+ try:
+ yield
+ finally:
+ self.initial_indent = old_initial_indent
+ self.subsequent_indent = old_subsequent_indent
+
+ def indent_only(self, text):
+ rv = []
+ for idx, line in enumerate(text.splitlines()):
+ indent = self.initial_indent
+ if idx > 0:
+ indent = self.subsequent_indent
+ rv.append(indent + line)
+ return '\n'.join(rv)
diff --git a/third_party/python/Click/click/_unicodefun.py b/third_party/python/Click/click/_unicodefun.py
new file mode 100644
index 0000000000..620edff37e
--- /dev/null
+++ b/third_party/python/Click/click/_unicodefun.py
@@ -0,0 +1,125 @@
+import os
+import sys
+import codecs
+
+from ._compat import PY2
+
+
+# If someone wants to vendor click, we want to ensure the
+# correct package is discovered. Ideally we could use a
+# relative import here but unfortunately Python does not
+# support that.
+click = sys.modules[__name__.rsplit('.', 1)[0]]
+
+
+def _find_unicode_literals_frame():
+ import __future__
+ if not hasattr(sys, '_getframe'): # not all Python implementations have it
+ return 0
+ frm = sys._getframe(1)
+ idx = 1
+ while frm is not None:
+ if frm.f_globals.get('__name__', '').startswith('click.'):
+ frm = frm.f_back
+ idx += 1
+ elif frm.f_code.co_flags & __future__.unicode_literals.compiler_flag:
+ return idx
+ else:
+ break
+ return 0
+
+
+def _check_for_unicode_literals():
+ if not __debug__:
+ return
+ if not PY2 or click.disable_unicode_literals_warning:
+ return
+ bad_frame = _find_unicode_literals_frame()
+ if bad_frame <= 0:
+ return
+ from warnings import warn
+ warn(Warning('Click detected the use of the unicode_literals '
+ '__future__ import. This is heavily discouraged '
+ 'because it can introduce subtle bugs in your '
+ 'code. You should instead use explicit u"" literals '
+ 'for your unicode strings. For more information see '
+ 'https://click.palletsprojects.com/python3/'),
+ stacklevel=bad_frame)
+
+
+def _verify_python3_env():
+ """Ensures that the environment is good for unicode on Python 3."""
+ if PY2:
+ return
+ try:
+ import locale
+ fs_enc = codecs.lookup(locale.getpreferredencoding()).name
+ except Exception:
+ fs_enc = 'ascii'
+ if fs_enc != 'ascii':
+ return
+
+ extra = ''
+ if os.name == 'posix':
+ import subprocess
+ try:
+ rv = subprocess.Popen(['locale', '-a'], stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE).communicate()[0]
+ except OSError:
+ rv = b''
+ good_locales = set()
+ has_c_utf8 = False
+
+ # Make sure we're operating on text here.
+ if isinstance(rv, bytes):
+ rv = rv.decode('ascii', 'replace')
+
+ for line in rv.splitlines():
+ locale = line.strip()
+ if locale.lower().endswith(('.utf-8', '.utf8')):
+ good_locales.add(locale)
+ if locale.lower() in ('c.utf8', 'c.utf-8'):
+ has_c_utf8 = True
+
+ extra += '\n\n'
+ if not good_locales:
+ extra += (
+ 'Additional information: on this system no suitable UTF-8\n'
+ 'locales were discovered. This most likely requires resolving\n'
+ 'by reconfiguring the locale system.'
+ )
+ elif has_c_utf8:
+ extra += (
+ 'This system supports the C.UTF-8 locale which is recommended.\n'
+ 'You might be able to resolve your issue by exporting the\n'
+ 'following environment variables:\n\n'
+ ' export LC_ALL=C.UTF-8\n'
+ ' export LANG=C.UTF-8'
+ )
+ else:
+ extra += (
+ 'This system lists a couple of UTF-8 supporting locales that\n'
+ 'you can pick from. The following suitable locales were\n'
+ 'discovered: %s'
+ ) % ', '.join(sorted(good_locales))
+
+ bad_locale = None
+ for locale in os.environ.get('LC_ALL'), os.environ.get('LANG'):
+ if locale and locale.lower().endswith(('.utf-8', '.utf8')):
+ bad_locale = locale
+ if locale is not None:
+ break
+ if bad_locale is not None:
+ extra += (
+ '\n\nClick discovered that you exported a UTF-8 locale\n'
+ 'but the locale system could not pick up from it because\n'
+ 'it does not exist. The exported locale is "%s" but it\n'
+ 'is not supported'
+ ) % bad_locale
+
+ raise RuntimeError(
+ 'Click will abort further execution because Python 3 was'
+ ' configured to use ASCII as encoding for the environment.'
+ ' Consult https://click.palletsprojects.com/en/7.x/python3/ for'
+ ' mitigation steps.' + extra
+ )
diff --git a/third_party/python/Click/click/_winconsole.py b/third_party/python/Click/click/_winconsole.py
new file mode 100644
index 0000000000..bbb080ddae
--- /dev/null
+++ b/third_party/python/Click/click/_winconsole.py
@@ -0,0 +1,307 @@
+# -*- coding: utf-8 -*-
+# This module is based on the excellent work by Adam Bartoš who
+# provided a lot of what went into the implementation here in
+# the discussion to issue1602 in the Python bug tracker.
+#
+# There are some general differences in regards to how this works
+# compared to the original patches as we do not need to patch
+# the entire interpreter but just work in our little world of
+# echo and prmopt.
+
+import io
+import os
+import sys
+import zlib
+import time
+import ctypes
+import msvcrt
+from ._compat import _NonClosingTextIOWrapper, text_type, PY2
+from ctypes import byref, POINTER, c_int, c_char, c_char_p, \
+ c_void_p, py_object, c_ssize_t, c_ulong, windll, WINFUNCTYPE
+try:
+ from ctypes import pythonapi
+ PyObject_GetBuffer = pythonapi.PyObject_GetBuffer
+ PyBuffer_Release = pythonapi.PyBuffer_Release
+except ImportError:
+ pythonapi = None
+from ctypes.wintypes import LPWSTR, LPCWSTR
+
+
+c_ssize_p = POINTER(c_ssize_t)
+
+kernel32 = windll.kernel32
+GetStdHandle = kernel32.GetStdHandle
+ReadConsoleW = kernel32.ReadConsoleW
+WriteConsoleW = kernel32.WriteConsoleW
+GetLastError = kernel32.GetLastError
+GetCommandLineW = WINFUNCTYPE(LPWSTR)(
+ ('GetCommandLineW', windll.kernel32))
+CommandLineToArgvW = WINFUNCTYPE(
+ POINTER(LPWSTR), LPCWSTR, POINTER(c_int))(
+ ('CommandLineToArgvW', windll.shell32))
+
+
+STDIN_HANDLE = GetStdHandle(-10)
+STDOUT_HANDLE = GetStdHandle(-11)
+STDERR_HANDLE = GetStdHandle(-12)
+
+
+PyBUF_SIMPLE = 0
+PyBUF_WRITABLE = 1
+
+ERROR_SUCCESS = 0
+ERROR_NOT_ENOUGH_MEMORY = 8
+ERROR_OPERATION_ABORTED = 995
+
+STDIN_FILENO = 0
+STDOUT_FILENO = 1
+STDERR_FILENO = 2
+
+EOF = b'\x1a'
+MAX_BYTES_WRITTEN = 32767
+
+
+class Py_buffer(ctypes.Structure):
+ _fields_ = [
+ ('buf', c_void_p),
+ ('obj', py_object),
+ ('len', c_ssize_t),
+ ('itemsize', c_ssize_t),
+ ('readonly', c_int),
+ ('ndim', c_int),
+ ('format', c_char_p),
+ ('shape', c_ssize_p),
+ ('strides', c_ssize_p),
+ ('suboffsets', c_ssize_p),
+ ('internal', c_void_p)
+ ]
+
+ if PY2:
+ _fields_.insert(-1, ('smalltable', c_ssize_t * 2))
+
+
+# On PyPy we cannot get buffers so our ability to operate here is
+# serverly limited.
+if pythonapi is None:
+ get_buffer = None
+else:
+ def get_buffer(obj, writable=False):
+ buf = Py_buffer()
+ flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE
+ PyObject_GetBuffer(py_object(obj), byref(buf), flags)
+ try:
+ buffer_type = c_char * buf.len
+ return buffer_type.from_address(buf.buf)
+ finally:
+ PyBuffer_Release(byref(buf))
+
+
+class _WindowsConsoleRawIOBase(io.RawIOBase):
+
+ def __init__(self, handle):
+ self.handle = handle
+
+ def isatty(self):
+ io.RawIOBase.isatty(self)
+ return True
+
+
+class _WindowsConsoleReader(_WindowsConsoleRawIOBase):
+
+ def readable(self):
+ return True
+
+ def readinto(self, b):
+ bytes_to_be_read = len(b)
+ if not bytes_to_be_read:
+ return 0
+ elif bytes_to_be_read % 2:
+ raise ValueError('cannot read odd number of bytes from '
+ 'UTF-16-LE encoded console')
+
+ buffer = get_buffer(b, writable=True)
+ code_units_to_be_read = bytes_to_be_read // 2
+ code_units_read = c_ulong()
+
+ rv = ReadConsoleW(self.handle, buffer, code_units_to_be_read,
+ byref(code_units_read), None)
+ if GetLastError() == ERROR_OPERATION_ABORTED:
+ # wait for KeyboardInterrupt
+ time.sleep(0.1)
+ if not rv:
+ raise OSError('Windows error: %s' % GetLastError())
+
+ if buffer[0] == EOF:
+ return 0
+ return 2 * code_units_read.value
+
+
+class _WindowsConsoleWriter(_WindowsConsoleRawIOBase):
+
+ def writable(self):
+ return True
+
+ @staticmethod
+ def _get_error_message(errno):
+ if errno == ERROR_SUCCESS:
+ return 'ERROR_SUCCESS'
+ elif errno == ERROR_NOT_ENOUGH_MEMORY:
+ return 'ERROR_NOT_ENOUGH_MEMORY'
+ return 'Windows error %s' % errno
+
+ def write(self, b):
+ bytes_to_be_written = len(b)
+ buf = get_buffer(b)
+ code_units_to_be_written = min(bytes_to_be_written,
+ MAX_BYTES_WRITTEN) // 2
+ code_units_written = c_ulong()
+
+ WriteConsoleW(self.handle, buf, code_units_to_be_written,
+ byref(code_units_written), None)
+ bytes_written = 2 * code_units_written.value
+
+ if bytes_written == 0 and bytes_to_be_written > 0:
+ raise OSError(self._get_error_message(GetLastError()))
+ return bytes_written
+
+
+class ConsoleStream(object):
+
+ def __init__(self, text_stream, byte_stream):
+ self._text_stream = text_stream
+ self.buffer = byte_stream
+
+ @property
+ def name(self):
+ return self.buffer.name
+
+ def write(self, x):
+ if isinstance(x, text_type):
+ return self._text_stream.write(x)
+ try:
+ self.flush()
+ except Exception:
+ pass
+ return self.buffer.write(x)
+
+ def writelines(self, lines):
+ for line in lines:
+ self.write(line)
+
+ def __getattr__(self, name):
+ return getattr(self._text_stream, name)
+
+ def isatty(self):
+ return self.buffer.isatty()
+
+ def __repr__(self):
+ return '<ConsoleStream name=%r encoding=%r>' % (
+ self.name,
+ self.encoding,
+ )
+
+
+class WindowsChunkedWriter(object):
+ """
+ Wraps a stream (such as stdout), acting as a transparent proxy for all
+ attribute access apart from method 'write()' which we wrap to write in
+ limited chunks due to a Windows limitation on binary console streams.
+ """
+ def __init__(self, wrapped):
+ # double-underscore everything to prevent clashes with names of
+ # attributes on the wrapped stream object.
+ self.__wrapped = wrapped
+
+ def __getattr__(self, name):
+ return getattr(self.__wrapped, name)
+
+ def write(self, text):
+ total_to_write = len(text)
+ written = 0
+
+ while written < total_to_write:
+ to_write = min(total_to_write - written, MAX_BYTES_WRITTEN)
+ self.__wrapped.write(text[written:written+to_write])
+ written += to_write
+
+
+_wrapped_std_streams = set()
+
+
+def _wrap_std_stream(name):
+ # Python 2 & Windows 7 and below
+ if PY2 and sys.getwindowsversion()[:2] <= (6, 1) and name not in _wrapped_std_streams:
+ setattr(sys, name, WindowsChunkedWriter(getattr(sys, name)))
+ _wrapped_std_streams.add(name)
+
+
+def _get_text_stdin(buffer_stream):
+ text_stream = _NonClosingTextIOWrapper(
+ io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)),
+ 'utf-16-le', 'strict', line_buffering=True)
+ return ConsoleStream(text_stream, buffer_stream)
+
+
+def _get_text_stdout(buffer_stream):
+ text_stream = _NonClosingTextIOWrapper(
+ io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)),
+ 'utf-16-le', 'strict', line_buffering=True)
+ return ConsoleStream(text_stream, buffer_stream)
+
+
+def _get_text_stderr(buffer_stream):
+ text_stream = _NonClosingTextIOWrapper(
+ io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)),
+ 'utf-16-le', 'strict', line_buffering=True)
+ return ConsoleStream(text_stream, buffer_stream)
+
+
+if PY2:
+ def _hash_py_argv():
+ return zlib.crc32('\x00'.join(sys.argv[1:]))
+
+ _initial_argv_hash = _hash_py_argv()
+
+ def _get_windows_argv():
+ argc = c_int(0)
+ argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc))
+ argv = [argv_unicode[i] for i in range(0, argc.value)]
+
+ if not hasattr(sys, 'frozen'):
+ argv = argv[1:]
+ while len(argv) > 0:
+ arg = argv[0]
+ if not arg.startswith('-') or arg == '-':
+ break
+ argv = argv[1:]
+ if arg.startswith(('-c', '-m')):
+ break
+
+ return argv[1:]
+
+
+_stream_factories = {
+ 0: _get_text_stdin,
+ 1: _get_text_stdout,
+ 2: _get_text_stderr,
+}
+
+
+def _get_windows_console_stream(f, encoding, errors):
+ if get_buffer is not None and \
+ encoding in ('utf-16-le', None) \
+ and errors in ('strict', None) and \
+ hasattr(f, 'isatty') and f.isatty():
+ func = _stream_factories.get(f.fileno())
+ if func is not None:
+ if not PY2:
+ f = getattr(f, 'buffer', None)
+ if f is None:
+ return None
+ else:
+ # If we are on Python 2 we need to set the stream that we
+ # deal with to binary mode as otherwise the exercise if a
+ # bit moot. The same problems apply as for
+ # get_binary_stdin and friends from _compat.
+ msvcrt.setmode(f.fileno(), os.O_BINARY)
+ return func(f)
diff --git a/third_party/python/Click/click/core.py b/third_party/python/Click/click/core.py
new file mode 100644
index 0000000000..7a1e3422be
--- /dev/null
+++ b/third_party/python/Click/click/core.py
@@ -0,0 +1,1856 @@
+import errno
+import inspect
+import os
+import sys
+from contextlib import contextmanager
+from itertools import repeat
+from functools import update_wrapper
+
+from .types import convert_type, IntRange, BOOL
+from .utils import PacifyFlushWrapper, make_str, make_default_short_help, \
+ echo, get_os_args
+from .exceptions import ClickException, UsageError, BadParameter, Abort, \
+ MissingParameter, Exit
+from .termui import prompt, confirm, style
+from .formatting import HelpFormatter, join_options
+from .parser import OptionParser, split_opt
+from .globals import push_context, pop_context
+
+from ._compat import PY2, isidentifier, iteritems, string_types
+from ._unicodefun import _check_for_unicode_literals, _verify_python3_env
+
+
+_missing = object()
+
+
+SUBCOMMAND_METAVAR = 'COMMAND [ARGS]...'
+SUBCOMMANDS_METAVAR = 'COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]...'
+
+DEPRECATED_HELP_NOTICE = ' (DEPRECATED)'
+DEPRECATED_INVOKE_NOTICE = 'DeprecationWarning: ' + \
+ 'The command %(name)s is deprecated.'
+
+
+def _maybe_show_deprecated_notice(cmd):
+ if cmd.deprecated:
+ echo(style(DEPRECATED_INVOKE_NOTICE % {'name': cmd.name}, fg='red'), err=True)
+
+
+def fast_exit(code):
+ """Exit without garbage collection, this speeds up exit by about 10ms for
+ things like bash completion.
+ """
+ sys.stdout.flush()
+ sys.stderr.flush()
+ os._exit(code)
+
+
+def _bashcomplete(cmd, prog_name, complete_var=None):
+ """Internal handler for the bash completion support."""
+ if complete_var is None:
+ complete_var = '_%s_COMPLETE' % (prog_name.replace('-', '_')).upper()
+ complete_instr = os.environ.get(complete_var)
+ if not complete_instr:
+ return
+
+ from ._bashcomplete import bashcomplete
+ if bashcomplete(cmd, prog_name, complete_var, complete_instr):
+ fast_exit(1)
+
+
+def _check_multicommand(base_command, cmd_name, cmd, register=False):
+ if not base_command.chain or not isinstance(cmd, MultiCommand):
+ return
+ if register:
+ hint = 'It is not possible to add multi commands as children to ' \
+ 'another multi command that is in chain mode'
+ else:
+ hint = 'Found a multi command as subcommand to a multi command ' \
+ 'that is in chain mode. This is not supported'
+ raise RuntimeError('%s. Command "%s" is set to chain and "%s" was '
+ 'added as subcommand but it in itself is a '
+ 'multi command. ("%s" is a %s within a chained '
+ '%s named "%s").' % (
+ hint, base_command.name, cmd_name,
+ cmd_name, cmd.__class__.__name__,
+ base_command.__class__.__name__,
+ base_command.name))
+
+
+def batch(iterable, batch_size):
+ return list(zip(*repeat(iter(iterable), batch_size)))
+
+
+def invoke_param_callback(callback, ctx, param, value):
+ code = getattr(callback, '__code__', None)
+ args = getattr(code, 'co_argcount', 3)
+
+ if args < 3:
+ # This will become a warning in Click 3.0:
+ from warnings import warn
+ warn(Warning('Invoked legacy parameter callback "%s". The new '
+ 'signature for such callbacks starting with '
+ 'click 2.0 is (ctx, param, value).'
+ % callback), stacklevel=3)
+ return callback(ctx, value)
+ return callback(ctx, param, value)
+
+
+@contextmanager
+def augment_usage_errors(ctx, param=None):
+ """Context manager that attaches extra information to exceptions that
+ fly.
+ """
+ try:
+ yield
+ except BadParameter as e:
+ if e.ctx is None:
+ e.ctx = ctx
+ if param is not None and e.param is None:
+ e.param = param
+ raise
+ except UsageError as e:
+ if e.ctx is None:
+ e.ctx = ctx
+ raise
+
+
+def iter_params_for_processing(invocation_order, declaration_order):
+ """Given a sequence of parameters in the order as should be considered
+ for processing and an iterable of parameters that exist, this returns
+ a list in the correct order as they should be processed.
+ """
+ def sort_key(item):
+ try:
+ idx = invocation_order.index(item)
+ except ValueError:
+ idx = float('inf')
+ return (not item.is_eager, idx)
+
+ return sorted(declaration_order, key=sort_key)
+
+
+class Context(object):
+ """The context is a special internal object that holds state relevant
+ for the script execution at every single level. It's normally invisible
+ to commands unless they opt-in to getting access to it.
+
+ The context is useful as it can pass internal objects around and can
+ control special execution features such as reading data from
+ environment variables.
+
+ A context can be used as context manager in which case it will call
+ :meth:`close` on teardown.
+
+ .. versionadded:: 2.0
+ Added the `resilient_parsing`, `help_option_names`,
+ `token_normalize_func` parameters.
+
+ .. versionadded:: 3.0
+ Added the `allow_extra_args` and `allow_interspersed_args`
+ parameters.
+
+ .. versionadded:: 4.0
+ Added the `color`, `ignore_unknown_options`, and
+ `max_content_width` parameters.
+
+ :param command: the command class for this context.
+ :param parent: the parent context.
+ :param info_name: the info name for this invocation. Generally this
+ is the most descriptive name for the script or
+ command. For the toplevel script it is usually
+ the name of the script, for commands below it it's
+ the name of the script.
+ :param obj: an arbitrary object of user data.
+ :param auto_envvar_prefix: the prefix to use for automatic environment
+ variables. If this is `None` then reading
+ from environment variables is disabled. This
+ does not affect manually set environment
+ variables which are always read.
+ :param default_map: a dictionary (like object) with default values
+ for parameters.
+ :param terminal_width: the width of the terminal. The default is
+ inherit from parent context. If no context
+ defines the terminal width then auto
+ detection will be applied.
+ :param max_content_width: the maximum width for content rendered by
+ Click (this currently only affects help
+ pages). This defaults to 80 characters if
+ not overridden. In other words: even if the
+ terminal is larger than that, Click will not
+ format things wider than 80 characters by
+ default. In addition to that, formatters might
+ add some safety mapping on the right.
+ :param resilient_parsing: if this flag is enabled then Click will
+ parse without any interactivity or callback
+ invocation. Default values will also be
+ ignored. This is useful for implementing
+ things such as completion support.
+ :param allow_extra_args: if this is set to `True` then extra arguments
+ at the end will not raise an error and will be
+ kept on the context. The default is to inherit
+ from the command.
+ :param allow_interspersed_args: if this is set to `False` then options
+ and arguments cannot be mixed. The
+ default is to inherit from the command.
+ :param ignore_unknown_options: instructs click to ignore options it does
+ not know and keeps them for later
+ processing.
+ :param help_option_names: optionally a list of strings that define how
+ the default help parameter is named. The
+ default is ``['--help']``.
+ :param token_normalize_func: an optional function that is used to
+ normalize tokens (options, choices,
+ etc.). This for instance can be used to
+ implement case insensitive behavior.
+ :param color: controls if the terminal supports ANSI colors or not. The
+ default is autodetection. This is only needed if ANSI
+ codes are used in texts that Click prints which is by
+ default not the case. This for instance would affect
+ help output.
+ """
+
+ def __init__(self, command, parent=None, info_name=None, obj=None,
+ auto_envvar_prefix=None, default_map=None,
+ terminal_width=None, max_content_width=None,
+ resilient_parsing=False, allow_extra_args=None,
+ allow_interspersed_args=None,
+ ignore_unknown_options=None, help_option_names=None,
+ token_normalize_func=None, color=None):
+ #: the parent context or `None` if none exists.
+ self.parent = parent
+ #: the :class:`Command` for this context.
+ self.command = command
+ #: the descriptive information name
+ self.info_name = info_name
+ #: the parsed parameters except if the value is hidden in which
+ #: case it's not remembered.
+ self.params = {}
+ #: the leftover arguments.
+ self.args = []
+ #: protected arguments. These are arguments that are prepended
+ #: to `args` when certain parsing scenarios are encountered but
+ #: must be never propagated to another arguments. This is used
+ #: to implement nested parsing.
+ self.protected_args = []
+ if obj is None and parent is not None:
+ obj = parent.obj
+ #: the user object stored.
+ self.obj = obj
+ self._meta = getattr(parent, 'meta', {})
+
+ #: A dictionary (-like object) with defaults for parameters.
+ if default_map is None \
+ and parent is not None \
+ and parent.default_map is not None:
+ default_map = parent.default_map.get(info_name)
+ self.default_map = default_map
+
+ #: This flag indicates if a subcommand is going to be executed. A
+ #: group callback can use this information to figure out if it's
+ #: being executed directly or because the execution flow passes
+ #: onwards to a subcommand. By default it's None, but it can be
+ #: the name of the subcommand to execute.
+ #:
+ #: If chaining is enabled this will be set to ``'*'`` in case
+ #: any commands are executed. It is however not possible to
+ #: figure out which ones. If you require this knowledge you
+ #: should use a :func:`resultcallback`.
+ self.invoked_subcommand = None
+
+ if terminal_width is None and parent is not None:
+ terminal_width = parent.terminal_width
+ #: The width of the terminal (None is autodetection).
+ self.terminal_width = terminal_width
+
+ if max_content_width is None and parent is not None:
+ max_content_width = parent.max_content_width
+ #: The maximum width of formatted content (None implies a sensible
+ #: default which is 80 for most things).
+ self.max_content_width = max_content_width
+
+ if allow_extra_args is None:
+ allow_extra_args = command.allow_extra_args
+ #: Indicates if the context allows extra args or if it should
+ #: fail on parsing.
+ #:
+ #: .. versionadded:: 3.0
+ self.allow_extra_args = allow_extra_args
+
+ if allow_interspersed_args is None:
+ allow_interspersed_args = command.allow_interspersed_args
+ #: Indicates if the context allows mixing of arguments and
+ #: options or not.
+ #:
+ #: .. versionadded:: 3.0
+ self.allow_interspersed_args = allow_interspersed_args
+
+ if ignore_unknown_options is None:
+ ignore_unknown_options = command.ignore_unknown_options
+ #: Instructs click to ignore options that a command does not
+ #: understand and will store it on the context for later
+ #: processing. This is primarily useful for situations where you
+ #: want to call into external programs. Generally this pattern is
+ #: strongly discouraged because it's not possibly to losslessly
+ #: forward all arguments.
+ #:
+ #: .. versionadded:: 4.0
+ self.ignore_unknown_options = ignore_unknown_options
+
+ if help_option_names is None:
+ if parent is not None:
+ help_option_names = parent.help_option_names
+ else:
+ help_option_names = ['--help']
+
+ #: The names for the help options.
+ self.help_option_names = help_option_names
+
+ if token_normalize_func is None and parent is not None:
+ token_normalize_func = parent.token_normalize_func
+
+ #: An optional normalization function for tokens. This is
+ #: options, choices, commands etc.
+ self.token_normalize_func = token_normalize_func
+
+ #: Indicates if resilient parsing is enabled. In that case Click
+ #: will do its best to not cause any failures and default values
+ #: will be ignored. Useful for completion.
+ self.resilient_parsing = resilient_parsing
+
+ # If there is no envvar prefix yet, but the parent has one and
+ # the command on this level has a name, we can expand the envvar
+ # prefix automatically.
+ if auto_envvar_prefix is None:
+ if parent is not None \
+ and parent.auto_envvar_prefix is not None and \
+ self.info_name is not None:
+ auto_envvar_prefix = '%s_%s' % (parent.auto_envvar_prefix,
+ self.info_name.upper())
+ else:
+ auto_envvar_prefix = auto_envvar_prefix.upper()
+ self.auto_envvar_prefix = auto_envvar_prefix
+
+ if color is None and parent is not None:
+ color = parent.color
+
+ #: Controls if styling output is wanted or not.
+ self.color = color
+
+ self._close_callbacks = []
+ self._depth = 0
+
+ def __enter__(self):
+ self._depth += 1
+ push_context(self)
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ self._depth -= 1
+ if self._depth == 0:
+ self.close()
+ pop_context()
+
+ @contextmanager
+ def scope(self, cleanup=True):
+ """This helper method can be used with the context object to promote
+ it to the current thread local (see :func:`get_current_context`).
+ The default behavior of this is to invoke the cleanup functions which
+ can be disabled by setting `cleanup` to `False`. The cleanup
+ functions are typically used for things such as closing file handles.
+
+ If the cleanup is intended the context object can also be directly
+ used as a context manager.
+
+ Example usage::
+
+ with ctx.scope():
+ assert get_current_context() is ctx
+
+ This is equivalent::
+
+ with ctx:
+ assert get_current_context() is ctx
+
+ .. versionadded:: 5.0
+
+ :param cleanup: controls if the cleanup functions should be run or
+ not. The default is to run these functions. In
+ some situations the context only wants to be
+ temporarily pushed in which case this can be disabled.
+ Nested pushes automatically defer the cleanup.
+ """
+ if not cleanup:
+ self._depth += 1
+ try:
+ with self as rv:
+ yield rv
+ finally:
+ if not cleanup:
+ self._depth -= 1
+
+ @property
+ def meta(self):
+ """This is a dictionary which is shared with all the contexts
+ that are nested. It exists so that click utilities can store some
+ state here if they need to. It is however the responsibility of
+ that code to manage this dictionary well.
+
+ The keys are supposed to be unique dotted strings. For instance
+ module paths are a good choice for it. What is stored in there is
+ irrelevant for the operation of click. However what is important is
+ that code that places data here adheres to the general semantics of
+ the system.
+
+ Example usage::
+
+ LANG_KEY = __name__ + '.lang'
+
+ def set_language(value):
+ ctx = get_current_context()
+ ctx.meta[LANG_KEY] = value
+
+ def get_language():
+ return get_current_context().meta.get(LANG_KEY, 'en_US')
+
+ .. versionadded:: 5.0
+ """
+ return self._meta
+
+ def make_formatter(self):
+ """Creates the formatter for the help and usage output."""
+ return HelpFormatter(width=self.terminal_width,
+ max_width=self.max_content_width)
+
+ def call_on_close(self, f):
+ """This decorator remembers a function as callback that should be
+ executed when the context tears down. This is most useful to bind
+ resource handling to the script execution. For instance, file objects
+ opened by the :class:`File` type will register their close callbacks
+ here.
+
+ :param f: the function to execute on teardown.
+ """
+ self._close_callbacks.append(f)
+ return f
+
+ def close(self):
+ """Invokes all close callbacks."""
+ for cb in self._close_callbacks:
+ cb()
+ self._close_callbacks = []
+
+ @property
+ def command_path(self):
+ """The computed command path. This is used for the ``usage``
+ information on the help page. It's automatically created by
+ combining the info names of the chain of contexts to the root.
+ """
+ rv = ''
+ if self.info_name is not None:
+ rv = self.info_name
+ if self.parent is not None:
+ rv = self.parent.command_path + ' ' + rv
+ return rv.lstrip()
+
+ def find_root(self):
+ """Finds the outermost context."""
+ node = self
+ while node.parent is not None:
+ node = node.parent
+ return node
+
+ def find_object(self, object_type):
+ """Finds the closest object of a given type."""
+ node = self
+ while node is not None:
+ if isinstance(node.obj, object_type):
+ return node.obj
+ node = node.parent
+
+ def ensure_object(self, object_type):
+ """Like :meth:`find_object` but sets the innermost object to a
+ new instance of `object_type` if it does not exist.
+ """
+ rv = self.find_object(object_type)
+ if rv is None:
+ self.obj = rv = object_type()
+ return rv
+
+ def lookup_default(self, name):
+ """Looks up the default for a parameter name. This by default
+ looks into the :attr:`default_map` if available.
+ """
+ if self.default_map is not None:
+ rv = self.default_map.get(name)
+ if callable(rv):
+ rv = rv()
+ return rv
+
+ def fail(self, message):
+ """Aborts the execution of the program with a specific error
+ message.
+
+ :param message: the error message to fail with.
+ """
+ raise UsageError(message, self)
+
+ def abort(self):
+ """Aborts the script."""
+ raise Abort()
+
+ def exit(self, code=0):
+ """Exits the application with a given exit code."""
+ raise Exit(code)
+
+ def get_usage(self):
+ """Helper method to get formatted usage string for the current
+ context and command.
+ """
+ return self.command.get_usage(self)
+
+ def get_help(self):
+ """Helper method to get formatted help page for the current
+ context and command.
+ """
+ return self.command.get_help(self)
+
+ def invoke(*args, **kwargs):
+ """Invokes a command callback in exactly the way it expects. There
+ are two ways to invoke this method:
+
+ 1. the first argument can be a callback and all other arguments and
+ keyword arguments are forwarded directly to the function.
+ 2. the first argument is a click command object. In that case all
+ arguments are forwarded as well but proper click parameters
+ (options and click arguments) must be keyword arguments and Click
+ will fill in defaults.
+
+ Note that before Click 3.2 keyword arguments were not properly filled
+ in against the intention of this code and no context was created. For
+ more information about this change and why it was done in a bugfix
+ release see :ref:`upgrade-to-3.2`.
+ """
+ self, callback = args[:2]
+ ctx = self
+
+ # It's also possible to invoke another command which might or
+ # might not have a callback. In that case we also fill
+ # in defaults and make a new context for this command.
+ if isinstance(callback, Command):
+ other_cmd = callback
+ callback = other_cmd.callback
+ ctx = Context(other_cmd, info_name=other_cmd.name, parent=self)
+ if callback is None:
+ raise TypeError('The given command does not have a '
+ 'callback that can be invoked.')
+
+ for param in other_cmd.params:
+ if param.name not in kwargs and param.expose_value:
+ kwargs[param.name] = param.get_default(ctx)
+
+ args = args[2:]
+ with augment_usage_errors(self):
+ with ctx:
+ return callback(*args, **kwargs)
+
+ def forward(*args, **kwargs):
+ """Similar to :meth:`invoke` but fills in default keyword
+ arguments from the current context if the other command expects
+ it. This cannot invoke callbacks directly, only other commands.
+ """
+ self, cmd = args[:2]
+
+ # It's also possible to invoke another command which might or
+ # might not have a callback.
+ if not isinstance(cmd, Command):
+ raise TypeError('Callback is not a command.')
+
+ for param in self.params:
+ if param not in kwargs:
+ kwargs[param] = self.params[param]
+
+ return self.invoke(cmd, **kwargs)
+
+
+class BaseCommand(object):
+ """The base command implements the minimal API contract of commands.
+ Most code will never use this as it does not implement a lot of useful
+ functionality but it can act as the direct subclass of alternative
+ parsing methods that do not depend on the Click parser.
+
+ For instance, this can be used to bridge Click and other systems like
+ argparse or docopt.
+
+ Because base commands do not implement a lot of the API that other
+ parts of Click take for granted, they are not supported for all
+ operations. For instance, they cannot be used with the decorators
+ usually and they have no built-in callback system.
+
+ .. versionchanged:: 2.0
+ Added the `context_settings` parameter.
+
+ :param name: the name of the command to use unless a group overrides it.
+ :param context_settings: an optional dictionary with defaults that are
+ passed to the context object.
+ """
+ #: the default for the :attr:`Context.allow_extra_args` flag.
+ allow_extra_args = False
+ #: the default for the :attr:`Context.allow_interspersed_args` flag.
+ allow_interspersed_args = True
+ #: the default for the :attr:`Context.ignore_unknown_options` flag.
+ ignore_unknown_options = False
+
+ def __init__(self, name, context_settings=None):
+ #: the name the command thinks it has. Upon registering a command
+ #: on a :class:`Group` the group will default the command name
+ #: with this information. You should instead use the
+ #: :class:`Context`\'s :attr:`~Context.info_name` attribute.
+ self.name = name
+ if context_settings is None:
+ context_settings = {}
+ #: an optional dictionary with defaults passed to the context.
+ self.context_settings = context_settings
+
+ def get_usage(self, ctx):
+ raise NotImplementedError('Base commands cannot get usage')
+
+ def get_help(self, ctx):
+ raise NotImplementedError('Base commands cannot get help')
+
+ def make_context(self, info_name, args, parent=None, **extra):
+ """This function when given an info name and arguments will kick
+ off the parsing and create a new :class:`Context`. It does not
+ invoke the actual command callback though.
+
+ :param info_name: the info name for this invokation. Generally this
+ is the most descriptive name for the script or
+ command. For the toplevel script it's usually
+ the name of the script, for commands below it it's
+ the name of the script.
+ :param args: the arguments to parse as list of strings.
+ :param parent: the parent context if available.
+ :param extra: extra keyword arguments forwarded to the context
+ constructor.
+ """
+ for key, value in iteritems(self.context_settings):
+ if key not in extra:
+ extra[key] = value
+ ctx = Context(self, info_name=info_name, parent=parent, **extra)
+ with ctx.scope(cleanup=False):
+ self.parse_args(ctx, args)
+ return ctx
+
+ def parse_args(self, ctx, args):
+ """Given a context and a list of arguments this creates the parser
+ and parses the arguments, then modifies the context as necessary.
+ This is automatically invoked by :meth:`make_context`.
+ """
+ raise NotImplementedError('Base commands do not know how to parse '
+ 'arguments.')
+
+ def invoke(self, ctx):
+ """Given a context, this invokes the command. The default
+ implementation is raising a not implemented error.
+ """
+ raise NotImplementedError('Base commands are not invokable by default')
+
+ def main(self, args=None, prog_name=None, complete_var=None,
+ standalone_mode=True, **extra):
+ """This is the way to invoke a script with all the bells and
+ whistles as a command line application. This will always terminate
+ the application after a call. If this is not wanted, ``SystemExit``
+ needs to be caught.
+
+ This method is also available by directly calling the instance of
+ a :class:`Command`.
+
+ .. versionadded:: 3.0
+ Added the `standalone_mode` flag to control the standalone mode.
+
+ :param args: the arguments that should be used for parsing. If not
+ provided, ``sys.argv[1:]`` is used.
+ :param prog_name: the program name that should be used. By default
+ the program name is constructed by taking the file
+ name from ``sys.argv[0]``.
+ :param complete_var: the environment variable that controls the
+ bash completion support. The default is
+ ``"_<prog_name>_COMPLETE"`` with prog_name in
+ uppercase.
+ :param standalone_mode: the default behavior is to invoke the script
+ in standalone mode. Click will then
+ handle exceptions and convert them into
+ error messages and the function will never
+ return but shut down the interpreter. If
+ this is set to `False` they will be
+ propagated to the caller and the return
+ value of this function is the return value
+ of :meth:`invoke`.
+ :param extra: extra keyword arguments are forwarded to the context
+ constructor. See :class:`Context` for more information.
+ """
+ # If we are in Python 3, we will verify that the environment is
+ # sane at this point or reject further execution to avoid a
+ # broken script.
+ if not PY2:
+ _verify_python3_env()
+ else:
+ _check_for_unicode_literals()
+
+ if args is None:
+ args = get_os_args()
+ else:
+ args = list(args)
+
+ if prog_name is None:
+ prog_name = make_str(os.path.basename(
+ sys.argv and sys.argv[0] or __file__))
+
+ # Hook for the Bash completion. This only activates if the Bash
+ # completion is actually enabled, otherwise this is quite a fast
+ # noop.
+ _bashcomplete(self, prog_name, complete_var)
+
+ try:
+ try:
+ with self.make_context(prog_name, args, **extra) as ctx:
+ rv = self.invoke(ctx)
+ if not standalone_mode:
+ return rv
+ # it's not safe to `ctx.exit(rv)` here!
+ # note that `rv` may actually contain data like "1" which
+ # has obvious effects
+ # more subtle case: `rv=[None, None]` can come out of
+ # chained commands which all returned `None` -- so it's not
+ # even always obvious that `rv` indicates success/failure
+ # by its truthiness/falsiness
+ ctx.exit()
+ except (EOFError, KeyboardInterrupt):
+ echo(file=sys.stderr)
+ raise Abort()
+ except ClickException as e:
+ if not standalone_mode:
+ raise
+ e.show()
+ sys.exit(e.exit_code)
+ except IOError as e:
+ if e.errno == errno.EPIPE:
+ sys.stdout = PacifyFlushWrapper(sys.stdout)
+ sys.stderr = PacifyFlushWrapper(sys.stderr)
+ sys.exit(1)
+ else:
+ raise
+ except Exit as e:
+ if standalone_mode:
+ sys.exit(e.exit_code)
+ else:
+ # in non-standalone mode, return the exit code
+ # note that this is only reached if `self.invoke` above raises
+ # an Exit explicitly -- thus bypassing the check there which
+ # would return its result
+ # the results of non-standalone execution may therefore be
+ # somewhat ambiguous: if there are codepaths which lead to
+ # `ctx.exit(1)` and to `return 1`, the caller won't be able to
+ # tell the difference between the two
+ return e.exit_code
+ except Abort:
+ if not standalone_mode:
+ raise
+ echo('Aborted!', file=sys.stderr)
+ sys.exit(1)
+
+ def __call__(self, *args, **kwargs):
+ """Alias for :meth:`main`."""
+ return self.main(*args, **kwargs)
+
+
+class Command(BaseCommand):
+ """Commands are the basic building block of command line interfaces in
+ Click. A basic command handles command line parsing and might dispatch
+ more parsing to commands nested below it.
+
+ .. versionchanged:: 2.0
+ Added the `context_settings` parameter.
+
+ :param name: the name of the command to use unless a group overrides it.
+ :param context_settings: an optional dictionary with defaults that are
+ passed to the context object.
+ :param callback: the callback to invoke. This is optional.
+ :param params: the parameters to register with this command. This can
+ be either :class:`Option` or :class:`Argument` objects.
+ :param help: the help string to use for this command.
+ :param epilog: like the help string but it's printed at the end of the
+ help page after everything else.
+ :param short_help: the short help to use for this command. This is
+ shown on the command listing of the parent command.
+ :param add_help_option: by default each command registers a ``--help``
+ option. This can be disabled by this parameter.
+ :param hidden: hide this command from help outputs.
+
+ :param deprecated: issues a message indicating that
+ the command is deprecated.
+ """
+
+ def __init__(self, name, context_settings=None, callback=None,
+ params=None, help=None, epilog=None, short_help=None,
+ options_metavar='[OPTIONS]', add_help_option=True,
+ hidden=False, deprecated=False):
+ BaseCommand.__init__(self, name, context_settings)
+ #: the callback to execute when the command fires. This might be
+ #: `None` in which case nothing happens.
+ self.callback = callback
+ #: the list of parameters for this command in the order they
+ #: should show up in the help page and execute. Eager parameters
+ #: will automatically be handled before non eager ones.
+ self.params = params or []
+ # if a form feed (page break) is found in the help text, truncate help
+ # text to the content preceding the first form feed
+ if help and '\f' in help:
+ help = help.split('\f', 1)[0]
+ self.help = help
+ self.epilog = epilog
+ self.options_metavar = options_metavar
+ self.short_help = short_help
+ self.add_help_option = add_help_option
+ self.hidden = hidden
+ self.deprecated = deprecated
+
+ def get_usage(self, ctx):
+ formatter = ctx.make_formatter()
+ self.format_usage(ctx, formatter)
+ return formatter.getvalue().rstrip('\n')
+
+ def get_params(self, ctx):
+ rv = self.params
+ help_option = self.get_help_option(ctx)
+ if help_option is not None:
+ rv = rv + [help_option]
+ return rv
+
+ def format_usage(self, ctx, formatter):
+ """Writes the usage line into the formatter."""
+ pieces = self.collect_usage_pieces(ctx)
+ formatter.write_usage(ctx.command_path, ' '.join(pieces))
+
+ def collect_usage_pieces(self, ctx):
+ """Returns all the pieces that go into the usage line and returns
+ it as a list of strings.
+ """
+ rv = [self.options_metavar]
+ for param in self.get_params(ctx):
+ rv.extend(param.get_usage_pieces(ctx))
+ return rv
+
+ def get_help_option_names(self, ctx):
+ """Returns the names for the help option."""
+ all_names = set(ctx.help_option_names)
+ for param in self.params:
+ all_names.difference_update(param.opts)
+ all_names.difference_update(param.secondary_opts)
+ return all_names
+
+ def get_help_option(self, ctx):
+ """Returns the help option object."""
+ help_options = self.get_help_option_names(ctx)
+ if not help_options or not self.add_help_option:
+ return
+
+ def show_help(ctx, param, value):
+ if value and not ctx.resilient_parsing:
+ echo(ctx.get_help(), color=ctx.color)
+ ctx.exit()
+ return Option(help_options, is_flag=True,
+ is_eager=True, expose_value=False,
+ callback=show_help,
+ help='Show this message and exit.')
+
+ def make_parser(self, ctx):
+ """Creates the underlying option parser for this command."""
+ parser = OptionParser(ctx)
+ for param in self.get_params(ctx):
+ param.add_to_parser(parser, ctx)
+ return parser
+
+ def get_help(self, ctx):
+ """Formats the help into a string and returns it. This creates a
+ formatter and will call into the following formatting methods:
+ """
+ formatter = ctx.make_formatter()
+ self.format_help(ctx, formatter)
+ return formatter.getvalue().rstrip('\n')
+
+ def get_short_help_str(self, limit=45):
+ """Gets short help for the command or makes it by shortening the long help string."""
+ return self.short_help or self.help and make_default_short_help(self.help, limit) or ''
+
+ def format_help(self, ctx, formatter):
+ """Writes the help into the formatter if it exists.
+
+ This calls into the following methods:
+
+ - :meth:`format_usage`
+ - :meth:`format_help_text`
+ - :meth:`format_options`
+ - :meth:`format_epilog`
+ """
+ self.format_usage(ctx, formatter)
+ self.format_help_text(ctx, formatter)
+ self.format_options(ctx, formatter)
+ self.format_epilog(ctx, formatter)
+
+ def format_help_text(self, ctx, formatter):
+ """Writes the help text to the formatter if it exists."""
+ if self.help:
+ formatter.write_paragraph()
+ with formatter.indentation():
+ help_text = self.help
+ if self.deprecated:
+ help_text += DEPRECATED_HELP_NOTICE
+ formatter.write_text(help_text)
+ elif self.deprecated:
+ formatter.write_paragraph()
+ with formatter.indentation():
+ formatter.write_text(DEPRECATED_HELP_NOTICE)
+
+ def format_options(self, ctx, formatter):
+ """Writes all the options into the formatter if they exist."""
+ opts = []
+ for param in self.get_params(ctx):
+ rv = param.get_help_record(ctx)
+ if rv is not None:
+ opts.append(rv)
+
+ if opts:
+ with formatter.section('Options'):
+ formatter.write_dl(opts)
+
+ def format_epilog(self, ctx, formatter):
+ """Writes the epilog into the formatter if it exists."""
+ if self.epilog:
+ formatter.write_paragraph()
+ with formatter.indentation():
+ formatter.write_text(self.epilog)
+
+ def parse_args(self, ctx, args):
+ parser = self.make_parser(ctx)
+ opts, args, param_order = parser.parse_args(args=args)
+
+ for param in iter_params_for_processing(
+ param_order, self.get_params(ctx)):
+ value, args = param.handle_parse_result(ctx, opts, args)
+
+ if args and not ctx.allow_extra_args and not ctx.resilient_parsing:
+ ctx.fail('Got unexpected extra argument%s (%s)'
+ % (len(args) != 1 and 's' or '',
+ ' '.join(map(make_str, args))))
+
+ ctx.args = args
+ return args
+
+ def invoke(self, ctx):
+ """Given a context, this invokes the attached callback (if it exists)
+ in the right way.
+ """
+ _maybe_show_deprecated_notice(self)
+ if self.callback is not None:
+ return ctx.invoke(self.callback, **ctx.params)
+
+
+class MultiCommand(Command):
+ """A multi command is the basic implementation of a command that
+ dispatches to subcommands. The most common version is the
+ :class:`Group`.
+
+ :param invoke_without_command: this controls how the multi command itself
+ is invoked. By default it's only invoked
+ if a subcommand is provided.
+ :param no_args_is_help: this controls what happens if no arguments are
+ provided. This option is enabled by default if
+ `invoke_without_command` is disabled or disabled
+ if it's enabled. If enabled this will add
+ ``--help`` as argument if no arguments are
+ passed.
+ :param subcommand_metavar: the string that is used in the documentation
+ to indicate the subcommand place.
+ :param chain: if this is set to `True` chaining of multiple subcommands
+ is enabled. This restricts the form of commands in that
+ they cannot have optional arguments but it allows
+ multiple commands to be chained together.
+ :param result_callback: the result callback to attach to this multi
+ command.
+ """
+ allow_extra_args = True
+ allow_interspersed_args = False
+
+ def __init__(self, name=None, invoke_without_command=False,
+ no_args_is_help=None, subcommand_metavar=None,
+ chain=False, result_callback=None, **attrs):
+ Command.__init__(self, name, **attrs)
+ if no_args_is_help is None:
+ no_args_is_help = not invoke_without_command
+ self.no_args_is_help = no_args_is_help
+ self.invoke_without_command = invoke_without_command
+ if subcommand_metavar is None:
+ if chain:
+ subcommand_metavar = SUBCOMMANDS_METAVAR
+ else:
+ subcommand_metavar = SUBCOMMAND_METAVAR
+ self.subcommand_metavar = subcommand_metavar
+ self.chain = chain
+ #: The result callback that is stored. This can be set or
+ #: overridden with the :func:`resultcallback` decorator.
+ self.result_callback = result_callback
+
+ if self.chain:
+ for param in self.params:
+ if isinstance(param, Argument) and not param.required:
+ raise RuntimeError('Multi commands in chain mode cannot '
+ 'have optional arguments.')
+
+ def collect_usage_pieces(self, ctx):
+ rv = Command.collect_usage_pieces(self, ctx)
+ rv.append(self.subcommand_metavar)
+ return rv
+
+ def format_options(self, ctx, formatter):
+ Command.format_options(self, ctx, formatter)
+ self.format_commands(ctx, formatter)
+
+ def resultcallback(self, replace=False):
+ """Adds a result callback to the chain command. By default if a
+ result callback is already registered this will chain them but
+ this can be disabled with the `replace` parameter. The result
+ callback is invoked with the return value of the subcommand
+ (or the list of return values from all subcommands if chaining
+ is enabled) as well as the parameters as they would be passed
+ to the main callback.
+
+ Example::
+
+ @click.group()
+ @click.option('-i', '--input', default=23)
+ def cli(input):
+ return 42
+
+ @cli.resultcallback()
+ def process_result(result, input):
+ return result + input
+
+ .. versionadded:: 3.0
+
+ :param replace: if set to `True` an already existing result
+ callback will be removed.
+ """
+ def decorator(f):
+ old_callback = self.result_callback
+ if old_callback is None or replace:
+ self.result_callback = f
+ return f
+ def function(__value, *args, **kwargs):
+ return f(old_callback(__value, *args, **kwargs),
+ *args, **kwargs)
+ self.result_callback = rv = update_wrapper(function, f)
+ return rv
+ return decorator
+
+ def format_commands(self, ctx, formatter):
+ """Extra format methods for multi methods that adds all the commands
+ after the options.
+ """
+ commands = []
+ for subcommand in self.list_commands(ctx):
+ cmd = self.get_command(ctx, subcommand)
+ # What is this, the tool lied about a command. Ignore it
+ if cmd is None:
+ continue
+ if cmd.hidden:
+ continue
+
+ commands.append((subcommand, cmd))
+
+ # allow for 3 times the default spacing
+ if len(commands):
+ limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands)
+
+ rows = []
+ for subcommand, cmd in commands:
+ help = cmd.get_short_help_str(limit)
+ rows.append((subcommand, help))
+
+ if rows:
+ with formatter.section('Commands'):
+ formatter.write_dl(rows)
+
+ def parse_args(self, ctx, args):
+ if not args and self.no_args_is_help and not ctx.resilient_parsing:
+ echo(ctx.get_help(), color=ctx.color)
+ ctx.exit()
+
+ rest = Command.parse_args(self, ctx, args)
+ if self.chain:
+ ctx.protected_args = rest
+ ctx.args = []
+ elif rest:
+ ctx.protected_args, ctx.args = rest[:1], rest[1:]
+
+ return ctx.args
+
+ def invoke(self, ctx):
+ def _process_result(value):
+ if self.result_callback is not None:
+ value = ctx.invoke(self.result_callback, value,
+ **ctx.params)
+ return value
+
+ if not ctx.protected_args:
+ # If we are invoked without command the chain flag controls
+ # how this happens. If we are not in chain mode, the return
+ # value here is the return value of the command.
+ # If however we are in chain mode, the return value is the
+ # return value of the result processor invoked with an empty
+ # list (which means that no subcommand actually was executed).
+ if self.invoke_without_command:
+ if not self.chain:
+ return Command.invoke(self, ctx)
+ with ctx:
+ Command.invoke(self, ctx)
+ return _process_result([])
+ ctx.fail('Missing command.')
+
+ # Fetch args back out
+ args = ctx.protected_args + ctx.args
+ ctx.args = []
+ ctx.protected_args = []
+
+ # If we're not in chain mode, we only allow the invocation of a
+ # single command but we also inform the current context about the
+ # name of the command to invoke.
+ if not self.chain:
+ # Make sure the context is entered so we do not clean up
+ # resources until the result processor has worked.
+ with ctx:
+ cmd_name, cmd, args = self.resolve_command(ctx, args)
+ ctx.invoked_subcommand = cmd_name
+ Command.invoke(self, ctx)
+ sub_ctx = cmd.make_context(cmd_name, args, parent=ctx)
+ with sub_ctx:
+ return _process_result(sub_ctx.command.invoke(sub_ctx))
+
+ # In chain mode we create the contexts step by step, but after the
+ # base command has been invoked. Because at that point we do not
+ # know the subcommands yet, the invoked subcommand attribute is
+ # set to ``*`` to inform the command that subcommands are executed
+ # but nothing else.
+ with ctx:
+ ctx.invoked_subcommand = args and '*' or None
+ Command.invoke(self, ctx)
+
+ # Otherwise we make every single context and invoke them in a
+ # chain. In that case the return value to the result processor
+ # is the list of all invoked subcommand's results.
+ contexts = []
+ while args:
+ cmd_name, cmd, args = self.resolve_command(ctx, args)
+ sub_ctx = cmd.make_context(cmd_name, args, parent=ctx,
+ allow_extra_args=True,
+ allow_interspersed_args=False)
+ contexts.append(sub_ctx)
+ args, sub_ctx.args = sub_ctx.args, []
+
+ rv = []
+ for sub_ctx in contexts:
+ with sub_ctx:
+ rv.append(sub_ctx.command.invoke(sub_ctx))
+ return _process_result(rv)
+
+ def resolve_command(self, ctx, args):
+ cmd_name = make_str(args[0])
+ original_cmd_name = cmd_name
+
+ # Get the command
+ cmd = self.get_command(ctx, cmd_name)
+
+ # If we can't find the command but there is a normalization
+ # function available, we try with that one.
+ if cmd is None and ctx.token_normalize_func is not None:
+ cmd_name = ctx.token_normalize_func(cmd_name)
+ cmd = self.get_command(ctx, cmd_name)
+
+ # If we don't find the command we want to show an error message
+ # to the user that it was not provided. However, there is
+ # something else we should do: if the first argument looks like
+ # an option we want to kick off parsing again for arguments to
+ # resolve things like --help which now should go to the main
+ # place.
+ if cmd is None and not ctx.resilient_parsing:
+ if split_opt(cmd_name)[0]:
+ self.parse_args(ctx, ctx.args)
+ ctx.fail('No such command "%s".' % original_cmd_name)
+
+ return cmd_name, cmd, args[1:]
+
+ def get_command(self, ctx, cmd_name):
+ """Given a context and a command name, this returns a
+ :class:`Command` object if it exists or returns `None`.
+ """
+ raise NotImplementedError()
+
+ def list_commands(self, ctx):
+ """Returns a list of subcommand names in the order they should
+ appear.
+ """
+ return []
+
+
+class Group(MultiCommand):
+ """A group allows a command to have subcommands attached. This is the
+ most common way to implement nesting in Click.
+
+ :param commands: a dictionary of commands.
+ """
+
+ def __init__(self, name=None, commands=None, **attrs):
+ MultiCommand.__init__(self, name, **attrs)
+ #: the registered subcommands by their exported names.
+ self.commands = commands or {}
+
+ def add_command(self, cmd, name=None):
+ """Registers another :class:`Command` with this group. If the name
+ is not provided, the name of the command is used.
+ """
+ name = name or cmd.name
+ if name is None:
+ raise TypeError('Command has no name.')
+ _check_multicommand(self, name, cmd, register=True)
+ self.commands[name] = cmd
+
+ def command(self, *args, **kwargs):
+ """A shortcut decorator for declaring and attaching a command to
+ the group. This takes the same arguments as :func:`command` but
+ immediately registers the created command with this instance by
+ calling into :meth:`add_command`.
+ """
+ def decorator(f):
+ cmd = command(*args, **kwargs)(f)
+ self.add_command(cmd)
+ return cmd
+ return decorator
+
+ def group(self, *args, **kwargs):
+ """A shortcut decorator for declaring and attaching a group to
+ the group. This takes the same arguments as :func:`group` but
+ immediately registers the created command with this instance by
+ calling into :meth:`add_command`.
+ """
+ def decorator(f):
+ cmd = group(*args, **kwargs)(f)
+ self.add_command(cmd)
+ return cmd
+ return decorator
+
+ def get_command(self, ctx, cmd_name):
+ return self.commands.get(cmd_name)
+
+ def list_commands(self, ctx):
+ return sorted(self.commands)
+
+
+class CommandCollection(MultiCommand):
+ """A command collection is a multi command that merges multiple multi
+ commands together into one. This is a straightforward implementation
+ that accepts a list of different multi commands as sources and
+ provides all the commands for each of them.
+ """
+
+ def __init__(self, name=None, sources=None, **attrs):
+ MultiCommand.__init__(self, name, **attrs)
+ #: The list of registered multi commands.
+ self.sources = sources or []
+
+ def add_source(self, multi_cmd):
+ """Adds a new multi command to the chain dispatcher."""
+ self.sources.append(multi_cmd)
+
+ def get_command(self, ctx, cmd_name):
+ for source in self.sources:
+ rv = source.get_command(ctx, cmd_name)
+ if rv is not None:
+ if self.chain:
+ _check_multicommand(self, cmd_name, rv)
+ return rv
+
+ def list_commands(self, ctx):
+ rv = set()
+ for source in self.sources:
+ rv.update(source.list_commands(ctx))
+ return sorted(rv)
+
+
+class Parameter(object):
+ r"""A parameter to a command comes in two versions: they are either
+ :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently
+ not supported by design as some of the internals for parsing are
+ intentionally not finalized.
+
+ Some settings are supported by both options and arguments.
+
+ .. versionchanged:: 2.0
+ Changed signature for parameter callback to also be passed the
+ parameter. In Click 2.0, the old callback format will still work,
+ but it will raise a warning to give you change to migrate the
+ code easier.
+
+ :param param_decls: the parameter declarations for this option or
+ argument. This is a list of flags or argument
+ names.
+ :param type: the type that should be used. Either a :class:`ParamType`
+ or a Python type. The later is converted into the former
+ automatically if supported.
+ :param required: controls if this is optional or not.
+ :param default: the default value if omitted. This can also be a callable,
+ in which case it's invoked when the default is needed
+ without any arguments.
+ :param callback: a callback that should be executed after the parameter
+ was matched. This is called as ``fn(ctx, param,
+ value)`` and needs to return the value. Before Click
+ 2.0, the signature was ``(ctx, value)``.
+ :param nargs: the number of arguments to match. If not ``1`` the return
+ value is a tuple instead of single value. The default for
+ nargs is ``1`` (except if the type is a tuple, then it's
+ the arity of the tuple).
+ :param metavar: how the value is represented in the help page.
+ :param expose_value: if this is `True` then the value is passed onwards
+ to the command callback and stored on the context,
+ otherwise it's skipped.
+ :param is_eager: eager values are processed before non eager ones. This
+ should not be set for arguments or it will inverse the
+ order of processing.
+ :param envvar: a string or list of strings that are environment variables
+ that should be checked.
+ """
+ param_type_name = 'parameter'
+
+ def __init__(self, param_decls=None, type=None, required=False,
+ default=None, callback=None, nargs=None, metavar=None,
+ expose_value=True, is_eager=False, envvar=None,
+ autocompletion=None):
+ self.name, self.opts, self.secondary_opts = \
+ self._parse_decls(param_decls or (), expose_value)
+
+ self.type = convert_type(type, default)
+
+ # Default nargs to what the type tells us if we have that
+ # information available.
+ if nargs is None:
+ if self.type.is_composite:
+ nargs = self.type.arity
+ else:
+ nargs = 1
+
+ self.required = required
+ self.callback = callback
+ self.nargs = nargs
+ self.multiple = False
+ self.expose_value = expose_value
+ self.default = default
+ self.is_eager = is_eager
+ self.metavar = metavar
+ self.envvar = envvar
+ self.autocompletion = autocompletion
+
+ @property
+ def human_readable_name(self):
+ """Returns the human readable name of this parameter. This is the
+ same as the name for options, but the metavar for arguments.
+ """
+ return self.name
+
+ def make_metavar(self):
+ if self.metavar is not None:
+ return self.metavar
+ metavar = self.type.get_metavar(self)
+ if metavar is None:
+ metavar = self.type.name.upper()
+ if self.nargs != 1:
+ metavar += '...'
+ return metavar
+
+ def get_default(self, ctx):
+ """Given a context variable this calculates the default value."""
+ # Otherwise go with the regular default.
+ if callable(self.default):
+ rv = self.default()
+ else:
+ rv = self.default
+ return self.type_cast_value(ctx, rv)
+
+ def add_to_parser(self, parser, ctx):
+ pass
+
+ def consume_value(self, ctx, opts):
+ value = opts.get(self.name)
+ if value is None:
+ value = self.value_from_envvar(ctx)
+ if value is None:
+ value = ctx.lookup_default(self.name)
+ return value
+
+ def type_cast_value(self, ctx, value):
+ """Given a value this runs it properly through the type system.
+ This automatically handles things like `nargs` and `multiple` as
+ well as composite types.
+ """
+ if self.type.is_composite:
+ if self.nargs <= 1:
+ raise TypeError('Attempted to invoke composite type '
+ 'but nargs has been set to %s. This is '
+ 'not supported; nargs needs to be set to '
+ 'a fixed value > 1.' % self.nargs)
+ if self.multiple:
+ return tuple(self.type(x or (), self, ctx) for x in value or ())
+ return self.type(value or (), self, ctx)
+
+ def _convert(value, level):
+ if level == 0:
+ return self.type(value, self, ctx)
+ return tuple(_convert(x, level - 1) for x in value or ())
+ return _convert(value, (self.nargs != 1) + bool(self.multiple))
+
+ def process_value(self, ctx, value):
+ """Given a value and context this runs the logic to convert the
+ value as necessary.
+ """
+ # If the value we were given is None we do nothing. This way
+ # code that calls this can easily figure out if something was
+ # not provided. Otherwise it would be converted into an empty
+ # tuple for multiple invocations which is inconvenient.
+ if value is not None:
+ return self.type_cast_value(ctx, value)
+
+ def value_is_missing(self, value):
+ if value is None:
+ return True
+ if (self.nargs != 1 or self.multiple) and value == ():
+ return True
+ return False
+
+ def full_process_value(self, ctx, value):
+ value = self.process_value(ctx, value)
+
+ if value is None and not ctx.resilient_parsing:
+ value = self.get_default(ctx)
+
+ if self.required and self.value_is_missing(value):
+ raise MissingParameter(ctx=ctx, param=self)
+
+ return value
+
+ def resolve_envvar_value(self, ctx):
+ if self.envvar is None:
+ return
+ if isinstance(self.envvar, (tuple, list)):
+ for envvar in self.envvar:
+ rv = os.environ.get(envvar)
+ if rv is not None:
+ return rv
+ else:
+ return os.environ.get(self.envvar)
+
+ def value_from_envvar(self, ctx):
+ rv = self.resolve_envvar_value(ctx)
+ if rv is not None and self.nargs != 1:
+ rv = self.type.split_envvar_value(rv)
+ return rv
+
+ def handle_parse_result(self, ctx, opts, args):
+ with augment_usage_errors(ctx, param=self):
+ value = self.consume_value(ctx, opts)
+ try:
+ value = self.full_process_value(ctx, value)
+ except Exception:
+ if not ctx.resilient_parsing:
+ raise
+ value = None
+ if self.callback is not None:
+ try:
+ value = invoke_param_callback(
+ self.callback, ctx, self, value)
+ except Exception:
+ if not ctx.resilient_parsing:
+ raise
+
+ if self.expose_value:
+ ctx.params[self.name] = value
+ return value, args
+
+ def get_help_record(self, ctx):
+ pass
+
+ def get_usage_pieces(self, ctx):
+ return []
+
+ def get_error_hint(self, ctx):
+ """Get a stringified version of the param for use in error messages to
+ indicate which param caused the error.
+ """
+ hint_list = self.opts or [self.human_readable_name]
+ return ' / '.join('"%s"' % x for x in hint_list)
+
+
+class Option(Parameter):
+ """Options are usually optional values on the command line and
+ have some extra features that arguments don't have.
+
+ All other parameters are passed onwards to the parameter constructor.
+
+ :param show_default: controls if the default value should be shown on the
+ help page. Normally, defaults are not shown. If this
+ value is a string, it shows the string instead of the
+ value. This is particularly useful for dynamic options.
+ :param show_envvar: controls if an environment variable should be shown on
+ the help page. Normally, environment variables
+ are not shown.
+ :param prompt: if set to `True` or a non empty string then the user will be
+ prompted for input. If set to `True` the prompt will be the
+ option name capitalized.
+ :param confirmation_prompt: if set then the value will need to be confirmed
+ if it was prompted for.
+ :param hide_input: if this is `True` then the input on the prompt will be
+ hidden from the user. This is useful for password
+ input.
+ :param is_flag: forces this option to act as a flag. The default is
+ auto detection.
+ :param flag_value: which value should be used for this flag if it's
+ enabled. This is set to a boolean automatically if
+ the option string contains a slash to mark two options.
+ :param multiple: if this is set to `True` then the argument is accepted
+ multiple times and recorded. This is similar to ``nargs``
+ in how it works but supports arbitrary number of
+ arguments.
+ :param count: this flag makes an option increment an integer.
+ :param allow_from_autoenv: if this is enabled then the value of this
+ parameter will be pulled from an environment
+ variable in case a prefix is defined on the
+ context.
+ :param help: the help string.
+ :param hidden: hide this option from help outputs.
+ """
+ param_type_name = 'option'
+
+ def __init__(self, param_decls=None, show_default=False,
+ prompt=False, confirmation_prompt=False,
+ hide_input=False, is_flag=None, flag_value=None,
+ multiple=False, count=False, allow_from_autoenv=True,
+ type=None, help=None, hidden=False, show_choices=True,
+ show_envvar=False, **attrs):
+ default_is_missing = attrs.get('default', _missing) is _missing
+ Parameter.__init__(self, param_decls, type=type, **attrs)
+
+ if prompt is True:
+ prompt_text = self.name.replace('_', ' ').capitalize()
+ elif prompt is False:
+ prompt_text = None
+ else:
+ prompt_text = prompt
+ self.prompt = prompt_text
+ self.confirmation_prompt = confirmation_prompt
+ self.hide_input = hide_input
+ self.hidden = hidden
+
+ # Flags
+ if is_flag is None:
+ if flag_value is not None:
+ is_flag = True
+ else:
+ is_flag = bool(self.secondary_opts)
+ if is_flag and default_is_missing:
+ self.default = False
+ if flag_value is None:
+ flag_value = not self.default
+ self.is_flag = is_flag
+ self.flag_value = flag_value
+ if self.is_flag and isinstance(self.flag_value, bool) \
+ and type is None:
+ self.type = BOOL
+ self.is_bool_flag = True
+ else:
+ self.is_bool_flag = False
+
+ # Counting
+ self.count = count
+ if count:
+ if type is None:
+ self.type = IntRange(min=0)
+ if default_is_missing:
+ self.default = 0
+
+ self.multiple = multiple
+ self.allow_from_autoenv = allow_from_autoenv
+ self.help = help
+ self.show_default = show_default
+ self.show_choices = show_choices
+ self.show_envvar = show_envvar
+
+ # Sanity check for stuff we don't support
+ if __debug__:
+ if self.nargs < 0:
+ raise TypeError('Options cannot have nargs < 0')
+ if self.prompt and self.is_flag and not self.is_bool_flag:
+ raise TypeError('Cannot prompt for flags that are not bools.')
+ if not self.is_bool_flag and self.secondary_opts:
+ raise TypeError('Got secondary option for non boolean flag.')
+ if self.is_bool_flag and self.hide_input \
+ and self.prompt is not None:
+ raise TypeError('Hidden input does not work with boolean '
+ 'flag prompts.')
+ if self.count:
+ if self.multiple:
+ raise TypeError('Options cannot be multiple and count '
+ 'at the same time.')
+ elif self.is_flag:
+ raise TypeError('Options cannot be count and flags at '
+ 'the same time.')
+
+ def _parse_decls(self, decls, expose_value):
+ opts = []
+ secondary_opts = []
+ name = None
+ possible_names = []
+
+ for decl in decls:
+ if isidentifier(decl):
+ if name is not None:
+ raise TypeError('Name defined twice')
+ name = decl
+ else:
+ split_char = decl[:1] == '/' and ';' or '/'
+ if split_char in decl:
+ first, second = decl.split(split_char, 1)
+ first = first.rstrip()
+ if first:
+ possible_names.append(split_opt(first))
+ opts.append(first)
+ second = second.lstrip()
+ if second:
+ secondary_opts.append(second.lstrip())
+ else:
+ possible_names.append(split_opt(decl))
+ opts.append(decl)
+
+ if name is None and possible_names:
+ possible_names.sort(key=lambda x: -len(x[0])) # group long options first
+ name = possible_names[0][1].replace('-', '_').lower()
+ if not isidentifier(name):
+ name = None
+
+ if name is None:
+ if not expose_value:
+ return None, opts, secondary_opts
+ raise TypeError('Could not determine name for option')
+
+ if not opts and not secondary_opts:
+ raise TypeError('No options defined but a name was passed (%s). '
+ 'Did you mean to declare an argument instead '
+ 'of an option?' % name)
+
+ return name, opts, secondary_opts
+
+ def add_to_parser(self, parser, ctx):
+ kwargs = {
+ 'dest': self.name,
+ 'nargs': self.nargs,
+ 'obj': self,
+ }
+
+ if self.multiple:
+ action = 'append'
+ elif self.count:
+ action = 'count'
+ else:
+ action = 'store'
+
+ if self.is_flag:
+ kwargs.pop('nargs', None)
+ if self.is_bool_flag and self.secondary_opts:
+ parser.add_option(self.opts, action=action + '_const',
+ const=True, **kwargs)
+ parser.add_option(self.secondary_opts, action=action +
+ '_const', const=False, **kwargs)
+ else:
+ parser.add_option(self.opts, action=action + '_const',
+ const=self.flag_value,
+ **kwargs)
+ else:
+ kwargs['action'] = action
+ parser.add_option(self.opts, **kwargs)
+
+ def get_help_record(self, ctx):
+ if self.hidden:
+ return
+ any_prefix_is_slash = []
+
+ def _write_opts(opts):
+ rv, any_slashes = join_options(opts)
+ if any_slashes:
+ any_prefix_is_slash[:] = [True]
+ if not self.is_flag and not self.count:
+ rv += ' ' + self.make_metavar()
+ return rv
+
+ rv = [_write_opts(self.opts)]
+ if self.secondary_opts:
+ rv.append(_write_opts(self.secondary_opts))
+
+ help = self.help or ''
+ extra = []
+ if self.show_envvar:
+ envvar = self.envvar
+ if envvar is None:
+ if self.allow_from_autoenv and \
+ ctx.auto_envvar_prefix is not None:
+ envvar = '%s_%s' % (ctx.auto_envvar_prefix, self.name.upper())
+ if envvar is not None:
+ extra.append('env var: %s' % (
+ ', '.join('%s' % d for d in envvar)
+ if isinstance(envvar, (list, tuple))
+ else envvar, ))
+ if self.default is not None and self.show_default:
+ if isinstance(self.show_default, string_types):
+ default_string = '({})'.format(self.show_default)
+ elif isinstance(self.default, (list, tuple)):
+ default_string = ', '.join('%s' % d for d in self.default)
+ elif inspect.isfunction(self.default):
+ default_string = "(dynamic)"
+ else:
+ default_string = self.default
+ extra.append('default: {}'.format(default_string))
+
+ if self.required:
+ extra.append('required')
+ if extra:
+ help = '%s[%s]' % (help and help + ' ' or '', '; '.join(extra))
+
+ return ((any_prefix_is_slash and '; ' or ' / ').join(rv), help)
+
+ def get_default(self, ctx):
+ # If we're a non boolean flag out default is more complex because
+ # we need to look at all flags in the same group to figure out
+ # if we're the the default one in which case we return the flag
+ # value as default.
+ if self.is_flag and not self.is_bool_flag:
+ for param in ctx.command.params:
+ if param.name == self.name and param.default:
+ return param.flag_value
+ return None
+ return Parameter.get_default(self, ctx)
+
+ def prompt_for_value(self, ctx):
+ """This is an alternative flow that can be activated in the full
+ value processing if a value does not exist. It will prompt the
+ user until a valid value exists and then returns the processed
+ value as result.
+ """
+ # Calculate the default before prompting anything to be stable.
+ default = self.get_default(ctx)
+
+ # If this is a prompt for a flag we need to handle this
+ # differently.
+ if self.is_bool_flag:
+ return confirm(self.prompt, default)
+
+ return prompt(self.prompt, default=default, type=self.type,
+ hide_input=self.hide_input, show_choices=self.show_choices,
+ confirmation_prompt=self.confirmation_prompt,
+ value_proc=lambda x: self.process_value(ctx, x))
+
+ def resolve_envvar_value(self, ctx):
+ rv = Parameter.resolve_envvar_value(self, ctx)
+ if rv is not None:
+ return rv
+ if self.allow_from_autoenv and \
+ ctx.auto_envvar_prefix is not None:
+ envvar = '%s_%s' % (ctx.auto_envvar_prefix, self.name.upper())
+ return os.environ.get(envvar)
+
+ def value_from_envvar(self, ctx):
+ rv = self.resolve_envvar_value(ctx)
+ if rv is None:
+ return None
+ value_depth = (self.nargs != 1) + bool(self.multiple)
+ if value_depth > 0 and rv is not None:
+ rv = self.type.split_envvar_value(rv)
+ if self.multiple and self.nargs != 1:
+ rv = batch(rv, self.nargs)
+ return rv
+
+ def full_process_value(self, ctx, value):
+ if value is None and self.prompt is not None \
+ and not ctx.resilient_parsing:
+ return self.prompt_for_value(ctx)
+ return Parameter.full_process_value(self, ctx, value)
+
+
+class Argument(Parameter):
+ """Arguments are positional parameters to a command. They generally
+ provide fewer features than options but can have infinite ``nargs``
+ and are required by default.
+
+ All parameters are passed onwards to the parameter constructor.
+ """
+ param_type_name = 'argument'
+
+ def __init__(self, param_decls, required=None, **attrs):
+ if required is None:
+ if attrs.get('default') is not None:
+ required = False
+ else:
+ required = attrs.get('nargs', 1) > 0
+ Parameter.__init__(self, param_decls, required=required, **attrs)
+ if self.default is not None and self.nargs < 0:
+ raise TypeError('nargs=-1 in combination with a default value '
+ 'is not supported.')
+
+ @property
+ def human_readable_name(self):
+ if self.metavar is not None:
+ return self.metavar
+ return self.name.upper()
+
+ def make_metavar(self):
+ if self.metavar is not None:
+ return self.metavar
+ var = self.type.get_metavar(self)
+ if not var:
+ var = self.name.upper()
+ if not self.required:
+ var = '[%s]' % var
+ if self.nargs != 1:
+ var += '...'
+ return var
+
+ def _parse_decls(self, decls, expose_value):
+ if not decls:
+ if not expose_value:
+ return None, [], []
+ raise TypeError('Could not determine name for argument')
+ if len(decls) == 1:
+ name = arg = decls[0]
+ name = name.replace('-', '_').lower()
+ else:
+ raise TypeError('Arguments take exactly one '
+ 'parameter declaration, got %d' % len(decls))
+ return name, [arg], []
+
+ def get_usage_pieces(self, ctx):
+ return [self.make_metavar()]
+
+ def get_error_hint(self, ctx):
+ return '"%s"' % self.make_metavar()
+
+ def add_to_parser(self, parser, ctx):
+ parser.add_argument(dest=self.name, nargs=self.nargs,
+ obj=self)
+
+
+# Circular dependency between decorators and core
+from .decorators import command, group
diff --git a/third_party/python/Click/click/decorators.py b/third_party/python/Click/click/decorators.py
new file mode 100644
index 0000000000..c57c530861
--- /dev/null
+++ b/third_party/python/Click/click/decorators.py
@@ -0,0 +1,311 @@
+import sys
+import inspect
+
+from functools import update_wrapper
+
+from ._compat import iteritems
+from ._unicodefun import _check_for_unicode_literals
+from .utils import echo
+from .globals import get_current_context
+
+
+def pass_context(f):
+ """Marks a callback as wanting to receive the current context
+ object as first argument.
+ """
+ def new_func(*args, **kwargs):
+ return f(get_current_context(), *args, **kwargs)
+ return update_wrapper(new_func, f)
+
+
+def pass_obj(f):
+ """Similar to :func:`pass_context`, but only pass the object on the
+ context onwards (:attr:`Context.obj`). This is useful if that object
+ represents the state of a nested system.
+ """
+ def new_func(*args, **kwargs):
+ return f(get_current_context().obj, *args, **kwargs)
+ return update_wrapper(new_func, f)
+
+
+def make_pass_decorator(object_type, ensure=False):
+ """Given an object type this creates a decorator that will work
+ similar to :func:`pass_obj` but instead of passing the object of the
+ current context, it will find the innermost context of type
+ :func:`object_type`.
+
+ This generates a decorator that works roughly like this::
+
+ from functools import update_wrapper
+
+ def decorator(f):
+ @pass_context
+ def new_func(ctx, *args, **kwargs):
+ obj = ctx.find_object(object_type)
+ return ctx.invoke(f, obj, *args, **kwargs)
+ return update_wrapper(new_func, f)
+ return decorator
+
+ :param object_type: the type of the object to pass.
+ :param ensure: if set to `True`, a new object will be created and
+ remembered on the context if it's not there yet.
+ """
+ def decorator(f):
+ def new_func(*args, **kwargs):
+ ctx = get_current_context()
+ if ensure:
+ obj = ctx.ensure_object(object_type)
+ else:
+ obj = ctx.find_object(object_type)
+ if obj is None:
+ raise RuntimeError('Managed to invoke callback without a '
+ 'context object of type %r existing'
+ % object_type.__name__)
+ return ctx.invoke(f, obj, *args, **kwargs)
+ return update_wrapper(new_func, f)
+ return decorator
+
+
+def _make_command(f, name, attrs, cls):
+ if isinstance(f, Command):
+ raise TypeError('Attempted to convert a callback into a '
+ 'command twice.')
+ try:
+ params = f.__click_params__
+ params.reverse()
+ del f.__click_params__
+ except AttributeError:
+ params = []
+ help = attrs.get('help')
+ if help is None:
+ help = inspect.getdoc(f)
+ if isinstance(help, bytes):
+ help = help.decode('utf-8')
+ else:
+ help = inspect.cleandoc(help)
+ attrs['help'] = help
+ _check_for_unicode_literals()
+ return cls(name=name or f.__name__.lower().replace('_', '-'),
+ callback=f, params=params, **attrs)
+
+
+def command(name=None, cls=None, **attrs):
+ r"""Creates a new :class:`Command` and uses the decorated function as
+ callback. This will also automatically attach all decorated
+ :func:`option`\s and :func:`argument`\s as parameters to the command.
+
+ The name of the command defaults to the name of the function. If you
+ want to change that, you can pass the intended name as the first
+ argument.
+
+ All keyword arguments are forwarded to the underlying command class.
+
+ Once decorated the function turns into a :class:`Command` instance
+ that can be invoked as a command line utility or be attached to a
+ command :class:`Group`.
+
+ :param name: the name of the command. This defaults to the function
+ name with underscores replaced by dashes.
+ :param cls: the command class to instantiate. This defaults to
+ :class:`Command`.
+ """
+ if cls is None:
+ cls = Command
+ def decorator(f):
+ cmd = _make_command(f, name, attrs, cls)
+ cmd.__doc__ = f.__doc__
+ return cmd
+ return decorator
+
+
+def group(name=None, **attrs):
+ """Creates a new :class:`Group` with a function as callback. This
+ works otherwise the same as :func:`command` just that the `cls`
+ parameter is set to :class:`Group`.
+ """
+ attrs.setdefault('cls', Group)
+ return command(name, **attrs)
+
+
+def _param_memo(f, param):
+ if isinstance(f, Command):
+ f.params.append(param)
+ else:
+ if not hasattr(f, '__click_params__'):
+ f.__click_params__ = []
+ f.__click_params__.append(param)
+
+
+def argument(*param_decls, **attrs):
+ """Attaches an argument to the command. All positional arguments are
+ passed as parameter declarations to :class:`Argument`; all keyword
+ arguments are forwarded unchanged (except ``cls``).
+ This is equivalent to creating an :class:`Argument` instance manually
+ and attaching it to the :attr:`Command.params` list.
+
+ :param cls: the argument class to instantiate. This defaults to
+ :class:`Argument`.
+ """
+ def decorator(f):
+ ArgumentClass = attrs.pop('cls', Argument)
+ _param_memo(f, ArgumentClass(param_decls, **attrs))
+ return f
+ return decorator
+
+
+def option(*param_decls, **attrs):
+ """Attaches an option to the command. All positional arguments are
+ passed as parameter declarations to :class:`Option`; all keyword
+ arguments are forwarded unchanged (except ``cls``).
+ This is equivalent to creating an :class:`Option` instance manually
+ and attaching it to the :attr:`Command.params` list.
+
+ :param cls: the option class to instantiate. This defaults to
+ :class:`Option`.
+ """
+ def decorator(f):
+ # Issue 926, copy attrs, so pre-defined options can re-use the same cls=
+ option_attrs = attrs.copy()
+
+ if 'help' in option_attrs:
+ option_attrs['help'] = inspect.cleandoc(option_attrs['help'])
+ OptionClass = option_attrs.pop('cls', Option)
+ _param_memo(f, OptionClass(param_decls, **option_attrs))
+ return f
+ return decorator
+
+
+def confirmation_option(*param_decls, **attrs):
+ """Shortcut for confirmation prompts that can be ignored by passing
+ ``--yes`` as parameter.
+
+ This is equivalent to decorating a function with :func:`option` with
+ the following parameters::
+
+ def callback(ctx, param, value):
+ if not value:
+ ctx.abort()
+
+ @click.command()
+ @click.option('--yes', is_flag=True, callback=callback,
+ expose_value=False, prompt='Do you want to continue?')
+ def dropdb():
+ pass
+ """
+ def decorator(f):
+ def callback(ctx, param, value):
+ if not value:
+ ctx.abort()
+ attrs.setdefault('is_flag', True)
+ attrs.setdefault('callback', callback)
+ attrs.setdefault('expose_value', False)
+ attrs.setdefault('prompt', 'Do you want to continue?')
+ attrs.setdefault('help', 'Confirm the action without prompting.')
+ return option(*(param_decls or ('--yes',)), **attrs)(f)
+ return decorator
+
+
+def password_option(*param_decls, **attrs):
+ """Shortcut for password prompts.
+
+ This is equivalent to decorating a function with :func:`option` with
+ the following parameters::
+
+ @click.command()
+ @click.option('--password', prompt=True, confirmation_prompt=True,
+ hide_input=True)
+ def changeadmin(password):
+ pass
+ """
+ def decorator(f):
+ attrs.setdefault('prompt', True)
+ attrs.setdefault('confirmation_prompt', True)
+ attrs.setdefault('hide_input', True)
+ return option(*(param_decls or ('--password',)), **attrs)(f)
+ return decorator
+
+
+def version_option(version=None, *param_decls, **attrs):
+ """Adds a ``--version`` option which immediately ends the program
+ printing out the version number. This is implemented as an eager
+ option that prints the version and exits the program in the callback.
+
+ :param version: the version number to show. If not provided Click
+ attempts an auto discovery via setuptools.
+ :param prog_name: the name of the program (defaults to autodetection)
+ :param message: custom message to show instead of the default
+ (``'%(prog)s, version %(version)s'``)
+ :param others: everything else is forwarded to :func:`option`.
+ """
+ if version is None:
+ if hasattr(sys, '_getframe'):
+ module = sys._getframe(1).f_globals.get('__name__')
+ else:
+ module = ''
+
+ def decorator(f):
+ prog_name = attrs.pop('prog_name', None)
+ message = attrs.pop('message', '%(prog)s, version %(version)s')
+
+ def callback(ctx, param, value):
+ if not value or ctx.resilient_parsing:
+ return
+ prog = prog_name
+ if prog is None:
+ prog = ctx.find_root().info_name
+ ver = version
+ if ver is None:
+ try:
+ import pkg_resources
+ except ImportError:
+ pass
+ else:
+ for dist in pkg_resources.working_set:
+ scripts = dist.get_entry_map().get('console_scripts') or {}
+ for script_name, entry_point in iteritems(scripts):
+ if entry_point.module_name == module:
+ ver = dist.version
+ break
+ if ver is None:
+ raise RuntimeError('Could not determine version')
+ echo(message % {
+ 'prog': prog,
+ 'version': ver,
+ }, color=ctx.color)
+ ctx.exit()
+
+ attrs.setdefault('is_flag', True)
+ attrs.setdefault('expose_value', False)
+ attrs.setdefault('is_eager', True)
+ attrs.setdefault('help', 'Show the version and exit.')
+ attrs['callback'] = callback
+ return option(*(param_decls or ('--version',)), **attrs)(f)
+ return decorator
+
+
+def help_option(*param_decls, **attrs):
+ """Adds a ``--help`` option which immediately ends the program
+ printing out the help page. This is usually unnecessary to add as
+ this is added by default to all commands unless suppressed.
+
+ Like :func:`version_option`, this is implemented as eager option that
+ prints in the callback and exits.
+
+ All arguments are forwarded to :func:`option`.
+ """
+ def decorator(f):
+ def callback(ctx, param, value):
+ if value and not ctx.resilient_parsing:
+ echo(ctx.get_help(), color=ctx.color)
+ ctx.exit()
+ attrs.setdefault('is_flag', True)
+ attrs.setdefault('expose_value', False)
+ attrs.setdefault('help', 'Show this message and exit.')
+ attrs.setdefault('is_eager', True)
+ attrs['callback'] = callback
+ return option(*(param_decls or ('--help',)), **attrs)(f)
+ return decorator
+
+
+# Circular dependencies between core and decorators
+from .core import Command, Group, Argument, Option
diff --git a/third_party/python/Click/click/exceptions.py b/third_party/python/Click/click/exceptions.py
new file mode 100644
index 0000000000..6fa17658cb
--- /dev/null
+++ b/third_party/python/Click/click/exceptions.py
@@ -0,0 +1,235 @@
+from ._compat import PY2, filename_to_ui, get_text_stderr
+from .utils import echo
+
+
+def _join_param_hints(param_hint):
+ if isinstance(param_hint, (tuple, list)):
+ return ' / '.join('"%s"' % x for x in param_hint)
+ return param_hint
+
+
+class ClickException(Exception):
+ """An exception that Click can handle and show to the user."""
+
+ #: The exit code for this exception
+ exit_code = 1
+
+ def __init__(self, message):
+ ctor_msg = message
+ if PY2:
+ if ctor_msg is not None:
+ ctor_msg = ctor_msg.encode('utf-8')
+ Exception.__init__(self, ctor_msg)
+ self.message = message
+
+ def format_message(self):
+ return self.message
+
+ def __str__(self):
+ return self.message
+
+ if PY2:
+ __unicode__ = __str__
+
+ def __str__(self):
+ return self.message.encode('utf-8')
+
+ def show(self, file=None):
+ if file is None:
+ file = get_text_stderr()
+ echo('Error: %s' % self.format_message(), file=file)
+
+
+class UsageError(ClickException):
+ """An internal exception that signals a usage error. This typically
+ aborts any further handling.
+
+ :param message: the error message to display.
+ :param ctx: optionally the context that caused this error. Click will
+ fill in the context automatically in some situations.
+ """
+ exit_code = 2
+
+ def __init__(self, message, ctx=None):
+ ClickException.__init__(self, message)
+ self.ctx = ctx
+ self.cmd = self.ctx and self.ctx.command or None
+
+ def show(self, file=None):
+ if file is None:
+ file = get_text_stderr()
+ color = None
+ hint = ''
+ if (self.cmd is not None and
+ self.cmd.get_help_option(self.ctx) is not None):
+ hint = ('Try "%s %s" for help.\n'
+ % (self.ctx.command_path, self.ctx.help_option_names[0]))
+ if self.ctx is not None:
+ color = self.ctx.color
+ echo(self.ctx.get_usage() + '\n%s' % hint, file=file, color=color)
+ echo('Error: %s' % self.format_message(), file=file, color=color)
+
+
+class BadParameter(UsageError):
+ """An exception that formats out a standardized error message for a
+ bad parameter. This is useful when thrown from a callback or type as
+ Click will attach contextual information to it (for instance, which
+ parameter it is).
+
+ .. versionadded:: 2.0
+
+ :param param: the parameter object that caused this error. This can
+ be left out, and Click will attach this info itself
+ if possible.
+ :param param_hint: a string that shows up as parameter name. This
+ can be used as alternative to `param` in cases
+ where custom validation should happen. If it is
+ a string it's used as such, if it's a list then
+ each item is quoted and separated.
+ """
+
+ def __init__(self, message, ctx=None, param=None,
+ param_hint=None):
+ UsageError.__init__(self, message, ctx)
+ self.param = param
+ self.param_hint = param_hint
+
+ def format_message(self):
+ if self.param_hint is not None:
+ param_hint = self.param_hint
+ elif self.param is not None:
+ param_hint = self.param.get_error_hint(self.ctx)
+ else:
+ return 'Invalid value: %s' % self.message
+ param_hint = _join_param_hints(param_hint)
+
+ return 'Invalid value for %s: %s' % (param_hint, self.message)
+
+
+class MissingParameter(BadParameter):
+ """Raised if click required an option or argument but it was not
+ provided when invoking the script.
+
+ .. versionadded:: 4.0
+
+ :param param_type: a string that indicates the type of the parameter.
+ The default is to inherit the parameter type from
+ the given `param`. Valid values are ``'parameter'``,
+ ``'option'`` or ``'argument'``.
+ """
+
+ def __init__(self, message=None, ctx=None, param=None,
+ param_hint=None, param_type=None):
+ BadParameter.__init__(self, message, ctx, param, param_hint)
+ self.param_type = param_type
+
+ def format_message(self):
+ if self.param_hint is not None:
+ param_hint = self.param_hint
+ elif self.param is not None:
+ param_hint = self.param.get_error_hint(self.ctx)
+ else:
+ param_hint = None
+ param_hint = _join_param_hints(param_hint)
+
+ param_type = self.param_type
+ if param_type is None and self.param is not None:
+ param_type = self.param.param_type_name
+
+ msg = self.message
+ if self.param is not None:
+ msg_extra = self.param.type.get_missing_message(self.param)
+ if msg_extra:
+ if msg:
+ msg += '. ' + msg_extra
+ else:
+ msg = msg_extra
+
+ return 'Missing %s%s%s%s' % (
+ param_type,
+ param_hint and ' %s' % param_hint or '',
+ msg and '. ' or '.',
+ msg or '',
+ )
+
+
+class NoSuchOption(UsageError):
+ """Raised if click attempted to handle an option that does not
+ exist.
+
+ .. versionadded:: 4.0
+ """
+
+ def __init__(self, option_name, message=None, possibilities=None,
+ ctx=None):
+ if message is None:
+ message = 'no such option: %s' % option_name
+ UsageError.__init__(self, message, ctx)
+ self.option_name = option_name
+ self.possibilities = possibilities
+
+ def format_message(self):
+ bits = [self.message]
+ if self.possibilities:
+ if len(self.possibilities) == 1:
+ bits.append('Did you mean %s?' % self.possibilities[0])
+ else:
+ possibilities = sorted(self.possibilities)
+ bits.append('(Possible options: %s)' % ', '.join(possibilities))
+ return ' '.join(bits)
+
+
+class BadOptionUsage(UsageError):
+ """Raised if an option is generally supplied but the use of the option
+ was incorrect. This is for instance raised if the number of arguments
+ for an option is not correct.
+
+ .. versionadded:: 4.0
+
+ :param option_name: the name of the option being used incorrectly.
+ """
+
+ def __init__(self, option_name, message, ctx=None):
+ UsageError.__init__(self, message, ctx)
+ self.option_name = option_name
+
+
+class BadArgumentUsage(UsageError):
+ """Raised if an argument is generally supplied but the use of the argument
+ was incorrect. This is for instance raised if the number of values
+ for an argument is not correct.
+
+ .. versionadded:: 6.0
+ """
+
+ def __init__(self, message, ctx=None):
+ UsageError.__init__(self, message, ctx)
+
+
+class FileError(ClickException):
+ """Raised if a file cannot be opened."""
+
+ def __init__(self, filename, hint=None):
+ ui_filename = filename_to_ui(filename)
+ if hint is None:
+ hint = 'unknown error'
+ ClickException.__init__(self, hint)
+ self.ui_filename = ui_filename
+ self.filename = filename
+
+ def format_message(self):
+ return 'Could not open file %s: %s' % (self.ui_filename, self.message)
+
+
+class Abort(RuntimeError):
+ """An internal signalling exception that signals Click to abort."""
+
+
+class Exit(RuntimeError):
+ """An exception that indicates that the application should exit with some
+ status code.
+
+ :param code: the status code to exit with.
+ """
+ def __init__(self, code=0):
+ self.exit_code = code
diff --git a/third_party/python/Click/click/formatting.py b/third_party/python/Click/click/formatting.py
new file mode 100644
index 0000000000..a3d6a4d389
--- /dev/null
+++ b/third_party/python/Click/click/formatting.py
@@ -0,0 +1,256 @@
+from contextlib import contextmanager
+from .termui import get_terminal_size
+from .parser import split_opt
+from ._compat import term_len
+
+
+# Can force a width. This is used by the test system
+FORCED_WIDTH = None
+
+
+def measure_table(rows):
+ widths = {}
+ for row in rows:
+ for idx, col in enumerate(row):
+ widths[idx] = max(widths.get(idx, 0), term_len(col))
+ return tuple(y for x, y in sorted(widths.items()))
+
+
+def iter_rows(rows, col_count):
+ for row in rows:
+ row = tuple(row)
+ yield row + ('',) * (col_count - len(row))
+
+
+def wrap_text(text, width=78, initial_indent='', subsequent_indent='',
+ preserve_paragraphs=False):
+ """A helper function that intelligently wraps text. By default, it
+ assumes that it operates on a single paragraph of text but if the
+ `preserve_paragraphs` parameter is provided it will intelligently
+ handle paragraphs (defined by two empty lines).
+
+ If paragraphs are handled, a paragraph can be prefixed with an empty
+ line containing the ``\\b`` character (``\\x08``) to indicate that
+ no rewrapping should happen in that block.
+
+ :param text: the text that should be rewrapped.
+ :param width: the maximum width for the text.
+ :param initial_indent: the initial indent that should be placed on the
+ first line as a string.
+ :param subsequent_indent: the indent string that should be placed on
+ each consecutive line.
+ :param preserve_paragraphs: if this flag is set then the wrapping will
+ intelligently handle paragraphs.
+ """
+ from ._textwrap import TextWrapper
+ text = text.expandtabs()
+ wrapper = TextWrapper(width, initial_indent=initial_indent,
+ subsequent_indent=subsequent_indent,
+ replace_whitespace=False)
+ if not preserve_paragraphs:
+ return wrapper.fill(text)
+
+ p = []
+ buf = []
+ indent = None
+
+ def _flush_par():
+ if not buf:
+ return
+ if buf[0].strip() == '\b':
+ p.append((indent or 0, True, '\n'.join(buf[1:])))
+ else:
+ p.append((indent or 0, False, ' '.join(buf)))
+ del buf[:]
+
+ for line in text.splitlines():
+ if not line:
+ _flush_par()
+ indent = None
+ else:
+ if indent is None:
+ orig_len = term_len(line)
+ line = line.lstrip()
+ indent = orig_len - term_len(line)
+ buf.append(line)
+ _flush_par()
+
+ rv = []
+ for indent, raw, text in p:
+ with wrapper.extra_indent(' ' * indent):
+ if raw:
+ rv.append(wrapper.indent_only(text))
+ else:
+ rv.append(wrapper.fill(text))
+
+ return '\n\n'.join(rv)
+
+
+class HelpFormatter(object):
+ """This class helps with formatting text-based help pages. It's
+ usually just needed for very special internal cases, but it's also
+ exposed so that developers can write their own fancy outputs.
+
+ At present, it always writes into memory.
+
+ :param indent_increment: the additional increment for each level.
+ :param width: the width for the text. This defaults to the terminal
+ width clamped to a maximum of 78.
+ """
+
+ def __init__(self, indent_increment=2, width=None, max_width=None):
+ self.indent_increment = indent_increment
+ if max_width is None:
+ max_width = 80
+ if width is None:
+ width = FORCED_WIDTH
+ if width is None:
+ width = max(min(get_terminal_size()[0], max_width) - 2, 50)
+ self.width = width
+ self.current_indent = 0
+ self.buffer = []
+
+ def write(self, string):
+ """Writes a unicode string into the internal buffer."""
+ self.buffer.append(string)
+
+ def indent(self):
+ """Increases the indentation."""
+ self.current_indent += self.indent_increment
+
+ def dedent(self):
+ """Decreases the indentation."""
+ self.current_indent -= self.indent_increment
+
+ def write_usage(self, prog, args='', prefix='Usage: '):
+ """Writes a usage line into the buffer.
+
+ :param prog: the program name.
+ :param args: whitespace separated list of arguments.
+ :param prefix: the prefix for the first line.
+ """
+ usage_prefix = '%*s%s ' % (self.current_indent, prefix, prog)
+ text_width = self.width - self.current_indent
+
+ if text_width >= (term_len(usage_prefix) + 20):
+ # The arguments will fit to the right of the prefix.
+ indent = ' ' * term_len(usage_prefix)
+ self.write(wrap_text(args, text_width,
+ initial_indent=usage_prefix,
+ subsequent_indent=indent))
+ else:
+ # The prefix is too long, put the arguments on the next line.
+ self.write(usage_prefix)
+ self.write('\n')
+ indent = ' ' * (max(self.current_indent, term_len(prefix)) + 4)
+ self.write(wrap_text(args, text_width,
+ initial_indent=indent,
+ subsequent_indent=indent))
+
+ self.write('\n')
+
+ def write_heading(self, heading):
+ """Writes a heading into the buffer."""
+ self.write('%*s%s:\n' % (self.current_indent, '', heading))
+
+ def write_paragraph(self):
+ """Writes a paragraph into the buffer."""
+ if self.buffer:
+ self.write('\n')
+
+ def write_text(self, text):
+ """Writes re-indented text into the buffer. This rewraps and
+ preserves paragraphs.
+ """
+ text_width = max(self.width - self.current_indent, 11)
+ indent = ' ' * self.current_indent
+ self.write(wrap_text(text, text_width,
+ initial_indent=indent,
+ subsequent_indent=indent,
+ preserve_paragraphs=True))
+ self.write('\n')
+
+ def write_dl(self, rows, col_max=30, col_spacing=2):
+ """Writes a definition list into the buffer. This is how options
+ and commands are usually formatted.
+
+ :param rows: a list of two item tuples for the terms and values.
+ :param col_max: the maximum width of the first column.
+ :param col_spacing: the number of spaces between the first and
+ second column.
+ """
+ rows = list(rows)
+ widths = measure_table(rows)
+ if len(widths) != 2:
+ raise TypeError('Expected two columns for definition list')
+
+ first_col = min(widths[0], col_max) + col_spacing
+
+ for first, second in iter_rows(rows, len(widths)):
+ self.write('%*s%s' % (self.current_indent, '', first))
+ if not second:
+ self.write('\n')
+ continue
+ if term_len(first) <= first_col - col_spacing:
+ self.write(' ' * (first_col - term_len(first)))
+ else:
+ self.write('\n')
+ self.write(' ' * (first_col + self.current_indent))
+
+ text_width = max(self.width - first_col - 2, 10)
+ lines = iter(wrap_text(second, text_width).splitlines())
+ if lines:
+ self.write(next(lines) + '\n')
+ for line in lines:
+ self.write('%*s%s\n' % (
+ first_col + self.current_indent, '', line))
+ else:
+ self.write('\n')
+
+ @contextmanager
+ def section(self, name):
+ """Helpful context manager that writes a paragraph, a heading,
+ and the indents.
+
+ :param name: the section name that is written as heading.
+ """
+ self.write_paragraph()
+ self.write_heading(name)
+ self.indent()
+ try:
+ yield
+ finally:
+ self.dedent()
+
+ @contextmanager
+ def indentation(self):
+ """A context manager that increases the indentation."""
+ self.indent()
+ try:
+ yield
+ finally:
+ self.dedent()
+
+ def getvalue(self):
+ """Returns the buffer contents."""
+ return ''.join(self.buffer)
+
+
+def join_options(options):
+ """Given a list of option strings this joins them in the most appropriate
+ way and returns them in the form ``(formatted_string,
+ any_prefix_is_slash)`` where the second item in the tuple is a flag that
+ indicates if any of the option prefixes was a slash.
+ """
+ rv = []
+ any_prefix_is_slash = False
+ for opt in options:
+ prefix = split_opt(opt)[0]
+ if prefix == '/':
+ any_prefix_is_slash = True
+ rv.append((len(prefix), opt))
+
+ rv.sort(key=lambda x: x[0])
+
+ rv = ', '.join(x[1] for x in rv)
+ return rv, any_prefix_is_slash
diff --git a/third_party/python/Click/click/globals.py b/third_party/python/Click/click/globals.py
new file mode 100644
index 0000000000..843b594abe
--- /dev/null
+++ b/third_party/python/Click/click/globals.py
@@ -0,0 +1,48 @@
+from threading import local
+
+
+_local = local()
+
+
+def get_current_context(silent=False):
+ """Returns the current click context. This can be used as a way to
+ access the current context object from anywhere. This is a more implicit
+ alternative to the :func:`pass_context` decorator. This function is
+ primarily useful for helpers such as :func:`echo` which might be
+ interested in changing its behavior based on the current context.
+
+ To push the current context, :meth:`Context.scope` can be used.
+
+ .. versionadded:: 5.0
+
+ :param silent: is set to `True` the return value is `None` if no context
+ is available. The default behavior is to raise a
+ :exc:`RuntimeError`.
+ """
+ try:
+ return getattr(_local, 'stack')[-1]
+ except (AttributeError, IndexError):
+ if not silent:
+ raise RuntimeError('There is no active click context.')
+
+
+def push_context(ctx):
+ """Pushes a new context to the current stack."""
+ _local.__dict__.setdefault('stack', []).append(ctx)
+
+
+def pop_context():
+ """Removes the top level from the stack."""
+ _local.stack.pop()
+
+
+def resolve_color_default(color=None):
+ """"Internal helper to get the default value of the color flag. If a
+ value is passed it's returned unchanged, otherwise it's looked up from
+ the current context.
+ """
+ if color is not None:
+ return color
+ ctx = get_current_context(silent=True)
+ if ctx is not None:
+ return ctx.color
diff --git a/third_party/python/Click/click/parser.py b/third_party/python/Click/click/parser.py
new file mode 100644
index 0000000000..1c3ae9c8ef
--- /dev/null
+++ b/third_party/python/Click/click/parser.py
@@ -0,0 +1,427 @@
+# -*- coding: utf-8 -*-
+"""
+click.parser
+~~~~~~~~~~~~
+
+This module started out as largely a copy paste from the stdlib's
+optparse module with the features removed that we do not need from
+optparse because we implement them in Click on a higher level (for
+instance type handling, help formatting and a lot more).
+
+The plan is to remove more and more from here over time.
+
+The reason this is a different module and not optparse from the stdlib
+is that there are differences in 2.x and 3.x about the error messages
+generated and optparse in the stdlib uses gettext for no good reason
+and might cause us issues.
+"""
+
+import re
+from collections import deque
+from .exceptions import UsageError, NoSuchOption, BadOptionUsage, \
+ BadArgumentUsage
+
+
+def _unpack_args(args, nargs_spec):
+ """Given an iterable of arguments and an iterable of nargs specifications,
+ it returns a tuple with all the unpacked arguments at the first index
+ and all remaining arguments as the second.
+
+ The nargs specification is the number of arguments that should be consumed
+ or `-1` to indicate that this position should eat up all the remainders.
+
+ Missing items are filled with `None`.
+ """
+ args = deque(args)
+ nargs_spec = deque(nargs_spec)
+ rv = []
+ spos = None
+
+ def _fetch(c):
+ try:
+ if spos is None:
+ return c.popleft()
+ else:
+ return c.pop()
+ except IndexError:
+ return None
+
+ while nargs_spec:
+ nargs = _fetch(nargs_spec)
+ if nargs == 1:
+ rv.append(_fetch(args))
+ elif nargs > 1:
+ x = [_fetch(args) for _ in range(nargs)]
+ # If we're reversed, we're pulling in the arguments in reverse,
+ # so we need to turn them around.
+ if spos is not None:
+ x.reverse()
+ rv.append(tuple(x))
+ elif nargs < 0:
+ if spos is not None:
+ raise TypeError('Cannot have two nargs < 0')
+ spos = len(rv)
+ rv.append(None)
+
+ # spos is the position of the wildcard (star). If it's not `None`,
+ # we fill it with the remainder.
+ if spos is not None:
+ rv[spos] = tuple(args)
+ args = []
+ rv[spos + 1:] = reversed(rv[spos + 1:])
+
+ return tuple(rv), list(args)
+
+
+def _error_opt_args(nargs, opt):
+ if nargs == 1:
+ raise BadOptionUsage(opt, '%s option requires an argument' % opt)
+ raise BadOptionUsage(opt, '%s option requires %d arguments' % (opt, nargs))
+
+
+def split_opt(opt):
+ first = opt[:1]
+ if first.isalnum():
+ return '', opt
+ if opt[1:2] == first:
+ return opt[:2], opt[2:]
+ return first, opt[1:]
+
+
+def normalize_opt(opt, ctx):
+ if ctx is None or ctx.token_normalize_func is None:
+ return opt
+ prefix, opt = split_opt(opt)
+ return prefix + ctx.token_normalize_func(opt)
+
+
+def split_arg_string(string):
+ """Given an argument string this attempts to split it into small parts."""
+ rv = []
+ for match in re.finditer(r"('([^'\\]*(?:\\.[^'\\]*)*)'"
+ r'|"([^"\\]*(?:\\.[^"\\]*)*)"'
+ r'|\S+)\s*', string, re.S):
+ arg = match.group().strip()
+ if arg[:1] == arg[-1:] and arg[:1] in '"\'':
+ arg = arg[1:-1].encode('ascii', 'backslashreplace') \
+ .decode('unicode-escape')
+ try:
+ arg = type(string)(arg)
+ except UnicodeError:
+ pass
+ rv.append(arg)
+ return rv
+
+
+class Option(object):
+
+ def __init__(self, opts, dest, action=None, nargs=1, const=None, obj=None):
+ self._short_opts = []
+ self._long_opts = []
+ self.prefixes = set()
+
+ for opt in opts:
+ prefix, value = split_opt(opt)
+ if not prefix:
+ raise ValueError('Invalid start character for option (%s)'
+ % opt)
+ self.prefixes.add(prefix[0])
+ if len(prefix) == 1 and len(value) == 1:
+ self._short_opts.append(opt)
+ else:
+ self._long_opts.append(opt)
+ self.prefixes.add(prefix)
+
+ if action is None:
+ action = 'store'
+
+ self.dest = dest
+ self.action = action
+ self.nargs = nargs
+ self.const = const
+ self.obj = obj
+
+ @property
+ def takes_value(self):
+ return self.action in ('store', 'append')
+
+ def process(self, value, state):
+ if self.action == 'store':
+ state.opts[self.dest] = value
+ elif self.action == 'store_const':
+ state.opts[self.dest] = self.const
+ elif self.action == 'append':
+ state.opts.setdefault(self.dest, []).append(value)
+ elif self.action == 'append_const':
+ state.opts.setdefault(self.dest, []).append(self.const)
+ elif self.action == 'count':
+ state.opts[self.dest] = state.opts.get(self.dest, 0) + 1
+ else:
+ raise ValueError('unknown action %r' % self.action)
+ state.order.append(self.obj)
+
+
+class Argument(object):
+
+ def __init__(self, dest, nargs=1, obj=None):
+ self.dest = dest
+ self.nargs = nargs
+ self.obj = obj
+
+ def process(self, value, state):
+ if self.nargs > 1:
+ holes = sum(1 for x in value if x is None)
+ if holes == len(value):
+ value = None
+ elif holes != 0:
+ raise BadArgumentUsage('argument %s takes %d values'
+ % (self.dest, self.nargs))
+ state.opts[self.dest] = value
+ state.order.append(self.obj)
+
+
+class ParsingState(object):
+
+ def __init__(self, rargs):
+ self.opts = {}
+ self.largs = []
+ self.rargs = rargs
+ self.order = []
+
+
+class OptionParser(object):
+ """The option parser is an internal class that is ultimately used to
+ parse options and arguments. It's modelled after optparse and brings
+ a similar but vastly simplified API. It should generally not be used
+ directly as the high level Click classes wrap it for you.
+
+ It's not nearly as extensible as optparse or argparse as it does not
+ implement features that are implemented on a higher level (such as
+ types or defaults).
+
+ :param ctx: optionally the :class:`~click.Context` where this parser
+ should go with.
+ """
+
+ def __init__(self, ctx=None):
+ #: The :class:`~click.Context` for this parser. This might be
+ #: `None` for some advanced use cases.
+ self.ctx = ctx
+ #: This controls how the parser deals with interspersed arguments.
+ #: If this is set to `False`, the parser will stop on the first
+ #: non-option. Click uses this to implement nested subcommands
+ #: safely.
+ self.allow_interspersed_args = True
+ #: This tells the parser how to deal with unknown options. By
+ #: default it will error out (which is sensible), but there is a
+ #: second mode where it will ignore it and continue processing
+ #: after shifting all the unknown options into the resulting args.
+ self.ignore_unknown_options = False
+ if ctx is not None:
+ self.allow_interspersed_args = ctx.allow_interspersed_args
+ self.ignore_unknown_options = ctx.ignore_unknown_options
+ self._short_opt = {}
+ self._long_opt = {}
+ self._opt_prefixes = set(['-', '--'])
+ self._args = []
+
+ def add_option(self, opts, dest, action=None, nargs=1, const=None,
+ obj=None):
+ """Adds a new option named `dest` to the parser. The destination
+ is not inferred (unlike with optparse) and needs to be explicitly
+ provided. Action can be any of ``store``, ``store_const``,
+ ``append``, ``appnd_const`` or ``count``.
+
+ The `obj` can be used to identify the option in the order list
+ that is returned from the parser.
+ """
+ if obj is None:
+ obj = dest
+ opts = [normalize_opt(opt, self.ctx) for opt in opts]
+ option = Option(opts, dest, action=action, nargs=nargs,
+ const=const, obj=obj)
+ self._opt_prefixes.update(option.prefixes)
+ for opt in option._short_opts:
+ self._short_opt[opt] = option
+ for opt in option._long_opts:
+ self._long_opt[opt] = option
+
+ def add_argument(self, dest, nargs=1, obj=None):
+ """Adds a positional argument named `dest` to the parser.
+
+ The `obj` can be used to identify the option in the order list
+ that is returned from the parser.
+ """
+ if obj is None:
+ obj = dest
+ self._args.append(Argument(dest=dest, nargs=nargs, obj=obj))
+
+ def parse_args(self, args):
+ """Parses positional arguments and returns ``(values, args, order)``
+ for the parsed options and arguments as well as the leftover
+ arguments if there are any. The order is a list of objects as they
+ appear on the command line. If arguments appear multiple times they
+ will be memorized multiple times as well.
+ """
+ state = ParsingState(args)
+ try:
+ self._process_args_for_options(state)
+ self._process_args_for_args(state)
+ except UsageError:
+ if self.ctx is None or not self.ctx.resilient_parsing:
+ raise
+ return state.opts, state.largs, state.order
+
+ def _process_args_for_args(self, state):
+ pargs, args = _unpack_args(state.largs + state.rargs,
+ [x.nargs for x in self._args])
+
+ for idx, arg in enumerate(self._args):
+ arg.process(pargs[idx], state)
+
+ state.largs = args
+ state.rargs = []
+
+ def _process_args_for_options(self, state):
+ while state.rargs:
+ arg = state.rargs.pop(0)
+ arglen = len(arg)
+ # Double dashes always handled explicitly regardless of what
+ # prefixes are valid.
+ if arg == '--':
+ return
+ elif arg[:1] in self._opt_prefixes and arglen > 1:
+ self._process_opts(arg, state)
+ elif self.allow_interspersed_args:
+ state.largs.append(arg)
+ else:
+ state.rargs.insert(0, arg)
+ return
+
+ # Say this is the original argument list:
+ # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]
+ # ^
+ # (we are about to process arg(i)).
+ #
+ # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of
+ # [arg0, ..., arg(i-1)] (any options and their arguments will have
+ # been removed from largs).
+ #
+ # The while loop will usually consume 1 or more arguments per pass.
+ # If it consumes 1 (eg. arg is an option that takes no arguments),
+ # then after _process_arg() is done the situation is:
+ #
+ # largs = subset of [arg0, ..., arg(i)]
+ # rargs = [arg(i+1), ..., arg(N-1)]
+ #
+ # If allow_interspersed_args is false, largs will always be
+ # *empty* -- still a subset of [arg0, ..., arg(i-1)], but
+ # not a very interesting subset!
+
+ def _match_long_opt(self, opt, explicit_value, state):
+ if opt not in self._long_opt:
+ possibilities = [word for word in self._long_opt
+ if word.startswith(opt)]
+ raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx)
+
+ option = self._long_opt[opt]
+ if option.takes_value:
+ # At this point it's safe to modify rargs by injecting the
+ # explicit value, because no exception is raised in this
+ # branch. This means that the inserted value will be fully
+ # consumed.
+ if explicit_value is not None:
+ state.rargs.insert(0, explicit_value)
+
+ nargs = option.nargs
+ if len(state.rargs) < nargs:
+ _error_opt_args(nargs, opt)
+ elif nargs == 1:
+ value = state.rargs.pop(0)
+ else:
+ value = tuple(state.rargs[:nargs])
+ del state.rargs[:nargs]
+
+ elif explicit_value is not None:
+ raise BadOptionUsage(opt, '%s option does not take a value' % opt)
+
+ else:
+ value = None
+
+ option.process(value, state)
+
+ def _match_short_opt(self, arg, state):
+ stop = False
+ i = 1
+ prefix = arg[0]
+ unknown_options = []
+
+ for ch in arg[1:]:
+ opt = normalize_opt(prefix + ch, self.ctx)
+ option = self._short_opt.get(opt)
+ i += 1
+
+ if not option:
+ if self.ignore_unknown_options:
+ unknown_options.append(ch)
+ continue
+ raise NoSuchOption(opt, ctx=self.ctx)
+ if option.takes_value:
+ # Any characters left in arg? Pretend they're the
+ # next arg, and stop consuming characters of arg.
+ if i < len(arg):
+ state.rargs.insert(0, arg[i:])
+ stop = True
+
+ nargs = option.nargs
+ if len(state.rargs) < nargs:
+ _error_opt_args(nargs, opt)
+ elif nargs == 1:
+ value = state.rargs.pop(0)
+ else:
+ value = tuple(state.rargs[:nargs])
+ del state.rargs[:nargs]
+
+ else:
+ value = None
+
+ option.process(value, state)
+
+ if stop:
+ break
+
+ # If we got any unknown options we re-combinate the string of the
+ # remaining options and re-attach the prefix, then report that
+ # to the state as new larg. This way there is basic combinatorics
+ # that can be achieved while still ignoring unknown arguments.
+ if self.ignore_unknown_options and unknown_options:
+ state.largs.append(prefix + ''.join(unknown_options))
+
+ def _process_opts(self, arg, state):
+ explicit_value = None
+ # Long option handling happens in two parts. The first part is
+ # supporting explicitly attached values. In any case, we will try
+ # to long match the option first.
+ if '=' in arg:
+ long_opt, explicit_value = arg.split('=', 1)
+ else:
+ long_opt = arg
+ norm_long_opt = normalize_opt(long_opt, self.ctx)
+
+ # At this point we will match the (assumed) long option through
+ # the long option matching code. Note that this allows options
+ # like "-foo" to be matched as long options.
+ try:
+ self._match_long_opt(norm_long_opt, explicit_value, state)
+ except NoSuchOption:
+ # At this point the long option matching failed, and we need
+ # to try with short options. However there is a special rule
+ # which says, that if we have a two character options prefix
+ # (applies to "--foo" for instance), we do not dispatch to the
+ # short option code and will instead raise the no option
+ # error.
+ if arg[:2] not in self._opt_prefixes:
+ return self._match_short_opt(arg, state)
+ if not self.ignore_unknown_options:
+ raise
+ state.largs.append(arg)
diff --git a/third_party/python/Click/click/termui.py b/third_party/python/Click/click/termui.py
new file mode 100644
index 0000000000..bf9a3aa163
--- /dev/null
+++ b/third_party/python/Click/click/termui.py
@@ -0,0 +1,606 @@
+import os
+import sys
+import struct
+import inspect
+import itertools
+
+from ._compat import raw_input, text_type, string_types, \
+ isatty, strip_ansi, get_winterm_size, DEFAULT_COLUMNS, WIN
+from .utils import echo
+from .exceptions import Abort, UsageError
+from .types import convert_type, Choice, Path
+from .globals import resolve_color_default
+
+
+# The prompt functions to use. The doc tools currently override these
+# functions to customize how they work.
+visible_prompt_func = raw_input
+
+_ansi_colors = {
+ 'black': 30,
+ 'red': 31,
+ 'green': 32,
+ 'yellow': 33,
+ 'blue': 34,
+ 'magenta': 35,
+ 'cyan': 36,
+ 'white': 37,
+ 'reset': 39,
+ 'bright_black': 90,
+ 'bright_red': 91,
+ 'bright_green': 92,
+ 'bright_yellow': 93,
+ 'bright_blue': 94,
+ 'bright_magenta': 95,
+ 'bright_cyan': 96,
+ 'bright_white': 97,
+}
+_ansi_reset_all = '\033[0m'
+
+
+def hidden_prompt_func(prompt):
+ import getpass
+ return getpass.getpass(prompt)
+
+
+def _build_prompt(text, suffix, show_default=False, default=None, show_choices=True, type=None):
+ prompt = text
+ if type is not None and show_choices and isinstance(type, Choice):
+ prompt += ' (' + ", ".join(map(str, type.choices)) + ')'
+ if default is not None and show_default:
+ prompt = '%s [%s]' % (prompt, default)
+ return prompt + suffix
+
+
+def prompt(text, default=None, hide_input=False, confirmation_prompt=False,
+ type=None, value_proc=None, prompt_suffix=': ', show_default=True,
+ err=False, show_choices=True):
+ """Prompts a user for input. This is a convenience function that can
+ be used to prompt a user for input later.
+
+ If the user aborts the input by sending a interrupt signal, this
+ function will catch it and raise a :exc:`Abort` exception.
+
+ .. versionadded:: 7.0
+ Added the show_choices parameter.
+
+ .. versionadded:: 6.0
+ Added unicode support for cmd.exe on Windows.
+
+ .. versionadded:: 4.0
+ Added the `err` parameter.
+
+ :param text: the text to show for the prompt.
+ :param default: the default value to use if no input happens. If this
+ is not given it will prompt until it's aborted.
+ :param hide_input: if this is set to true then the input value will
+ be hidden.
+ :param confirmation_prompt: asks for confirmation for the value.
+ :param type: the type to use to check the value against.
+ :param value_proc: if this parameter is provided it's a function that
+ is invoked instead of the type conversion to
+ convert a value.
+ :param prompt_suffix: a suffix that should be added to the prompt.
+ :param show_default: shows or hides the default value in the prompt.
+ :param err: if set to true the file defaults to ``stderr`` instead of
+ ``stdout``, the same as with echo.
+ :param show_choices: Show or hide choices if the passed type is a Choice.
+ For example if type is a Choice of either day or week,
+ show_choices is true and text is "Group by" then the
+ prompt will be "Group by (day, week): ".
+ """
+ result = None
+
+ def prompt_func(text):
+ f = hide_input and hidden_prompt_func or visible_prompt_func
+ try:
+ # Write the prompt separately so that we get nice
+ # coloring through colorama on Windows
+ echo(text, nl=False, err=err)
+ return f('')
+ except (KeyboardInterrupt, EOFError):
+ # getpass doesn't print a newline if the user aborts input with ^C.
+ # Allegedly this behavior is inherited from getpass(3).
+ # A doc bug has been filed at https://bugs.python.org/issue24711
+ if hide_input:
+ echo(None, err=err)
+ raise Abort()
+
+ if value_proc is None:
+ value_proc = convert_type(type, default)
+
+ prompt = _build_prompt(text, prompt_suffix, show_default, default, show_choices, type)
+
+ while 1:
+ while 1:
+ value = prompt_func(prompt)
+ if value:
+ break
+ elif default is not None:
+ if isinstance(value_proc, Path):
+ # validate Path default value(exists, dir_okay etc.)
+ value = default
+ break
+ return default
+ try:
+ result = value_proc(value)
+ except UsageError as e:
+ echo('Error: %s' % e.message, err=err)
+ continue
+ if not confirmation_prompt:
+ return result
+ while 1:
+ value2 = prompt_func('Repeat for confirmation: ')
+ if value2:
+ break
+ if value == value2:
+ return result
+ echo('Error: the two entered values do not match', err=err)
+
+
+def confirm(text, default=False, abort=False, prompt_suffix=': ',
+ show_default=True, err=False):
+ """Prompts for confirmation (yes/no question).
+
+ If the user aborts the input by sending a interrupt signal this
+ function will catch it and raise a :exc:`Abort` exception.
+
+ .. versionadded:: 4.0
+ Added the `err` parameter.
+
+ :param text: the question to ask.
+ :param default: the default for the prompt.
+ :param abort: if this is set to `True` a negative answer aborts the
+ exception by raising :exc:`Abort`.
+ :param prompt_suffix: a suffix that should be added to the prompt.
+ :param show_default: shows or hides the default value in the prompt.
+ :param err: if set to true the file defaults to ``stderr`` instead of
+ ``stdout``, the same as with echo.
+ """
+ prompt = _build_prompt(text, prompt_suffix, show_default,
+ default and 'Y/n' or 'y/N')
+ while 1:
+ try:
+ # Write the prompt separately so that we get nice
+ # coloring through colorama on Windows
+ echo(prompt, nl=False, err=err)
+ value = visible_prompt_func('').lower().strip()
+ except (KeyboardInterrupt, EOFError):
+ raise Abort()
+ if value in ('y', 'yes'):
+ rv = True
+ elif value in ('n', 'no'):
+ rv = False
+ elif value == '':
+ rv = default
+ else:
+ echo('Error: invalid input', err=err)
+ continue
+ break
+ if abort and not rv:
+ raise Abort()
+ return rv
+
+
+def get_terminal_size():
+ """Returns the current size of the terminal as tuple in the form
+ ``(width, height)`` in columns and rows.
+ """
+ # If shutil has get_terminal_size() (Python 3.3 and later) use that
+ if sys.version_info >= (3, 3):
+ import shutil
+ shutil_get_terminal_size = getattr(shutil, 'get_terminal_size', None)
+ if shutil_get_terminal_size:
+ sz = shutil_get_terminal_size()
+ return sz.columns, sz.lines
+
+ # We provide a sensible default for get_winterm_size() when being invoked
+ # inside a subprocess. Without this, it would not provide a useful input.
+ if get_winterm_size is not None:
+ size = get_winterm_size()
+ if size == (0, 0):
+ return (79, 24)
+ else:
+ return size
+
+ def ioctl_gwinsz(fd):
+ try:
+ import fcntl
+ import termios
+ cr = struct.unpack(
+ 'hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
+ except Exception:
+ return
+ return cr
+
+ cr = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
+ if not cr:
+ try:
+ fd = os.open(os.ctermid(), os.O_RDONLY)
+ try:
+ cr = ioctl_gwinsz(fd)
+ finally:
+ os.close(fd)
+ except Exception:
+ pass
+ if not cr or not cr[0] or not cr[1]:
+ cr = (os.environ.get('LINES', 25),
+ os.environ.get('COLUMNS', DEFAULT_COLUMNS))
+ return int(cr[1]), int(cr[0])
+
+
+def echo_via_pager(text_or_generator, color=None):
+ """This function takes a text and shows it via an environment specific
+ pager on stdout.
+
+ .. versionchanged:: 3.0
+ Added the `color` flag.
+
+ :param text_or_generator: the text to page, or alternatively, a
+ generator emitting the text to page.
+ :param color: controls if the pager supports ANSI colors or not. The
+ default is autodetection.
+ """
+ color = resolve_color_default(color)
+
+ if inspect.isgeneratorfunction(text_or_generator):
+ i = text_or_generator()
+ elif isinstance(text_or_generator, string_types):
+ i = [text_or_generator]
+ else:
+ i = iter(text_or_generator)
+
+ # convert every element of i to a text type if necessary
+ text_generator = (el if isinstance(el, string_types) else text_type(el)
+ for el in i)
+
+ from ._termui_impl import pager
+ return pager(itertools.chain(text_generator, "\n"), color)
+
+
+def progressbar(iterable=None, length=None, label=None, show_eta=True,
+ show_percent=None, show_pos=False,
+ item_show_func=None, fill_char='#', empty_char='-',
+ bar_template='%(label)s [%(bar)s] %(info)s',
+ info_sep=' ', width=36, file=None, color=None):
+ """This function creates an iterable context manager that can be used
+ to iterate over something while showing a progress bar. It will
+ either iterate over the `iterable` or `length` items (that are counted
+ up). While iteration happens, this function will print a rendered
+ progress bar to the given `file` (defaults to stdout) and will attempt
+ to calculate remaining time and more. By default, this progress bar
+ will not be rendered if the file is not a terminal.
+
+ The context manager creates the progress bar. When the context
+ manager is entered the progress bar is already displayed. With every
+ iteration over the progress bar, the iterable passed to the bar is
+ advanced and the bar is updated. When the context manager exits,
+ a newline is printed and the progress bar is finalized on screen.
+
+ No printing must happen or the progress bar will be unintentionally
+ destroyed.
+
+ Example usage::
+
+ with progressbar(items) as bar:
+ for item in bar:
+ do_something_with(item)
+
+ Alternatively, if no iterable is specified, one can manually update the
+ progress bar through the `update()` method instead of directly
+ iterating over the progress bar. The update method accepts the number
+ of steps to increment the bar with::
+
+ with progressbar(length=chunks.total_bytes) as bar:
+ for chunk in chunks:
+ process_chunk(chunk)
+ bar.update(chunks.bytes)
+
+ .. versionadded:: 2.0
+
+ .. versionadded:: 4.0
+ Added the `color` parameter. Added a `update` method to the
+ progressbar object.
+
+ :param iterable: an iterable to iterate over. If not provided the length
+ is required.
+ :param length: the number of items to iterate over. By default the
+ progressbar will attempt to ask the iterator about its
+ length, which might or might not work. If an iterable is
+ also provided this parameter can be used to override the
+ length. If an iterable is not provided the progress bar
+ will iterate over a range of that length.
+ :param label: the label to show next to the progress bar.
+ :param show_eta: enables or disables the estimated time display. This is
+ automatically disabled if the length cannot be
+ determined.
+ :param show_percent: enables or disables the percentage display. The
+ default is `True` if the iterable has a length or
+ `False` if not.
+ :param show_pos: enables or disables the absolute position display. The
+ default is `False`.
+ :param item_show_func: a function called with the current item which
+ can return a string to show the current item
+ next to the progress bar. Note that the current
+ item can be `None`!
+ :param fill_char: the character to use to show the filled part of the
+ progress bar.
+ :param empty_char: the character to use to show the non-filled part of
+ the progress bar.
+ :param bar_template: the format string to use as template for the bar.
+ The parameters in it are ``label`` for the label,
+ ``bar`` for the progress bar and ``info`` for the
+ info section.
+ :param info_sep: the separator between multiple info items (eta etc.)
+ :param width: the width of the progress bar in characters, 0 means full
+ terminal width
+ :param file: the file to write to. If this is not a terminal then
+ only the label is printed.
+ :param color: controls if the terminal supports ANSI colors or not. The
+ default is autodetection. This is only needed if ANSI
+ codes are included anywhere in the progress bar output
+ which is not the case by default.
+ """
+ from ._termui_impl import ProgressBar
+ color = resolve_color_default(color)
+ return ProgressBar(iterable=iterable, length=length, show_eta=show_eta,
+ show_percent=show_percent, show_pos=show_pos,
+ item_show_func=item_show_func, fill_char=fill_char,
+ empty_char=empty_char, bar_template=bar_template,
+ info_sep=info_sep, file=file, label=label,
+ width=width, color=color)
+
+
+def clear():
+ """Clears the terminal screen. This will have the effect of clearing
+ the whole visible space of the terminal and moving the cursor to the
+ top left. This does not do anything if not connected to a terminal.
+
+ .. versionadded:: 2.0
+ """
+ if not isatty(sys.stdout):
+ return
+ # If we're on Windows and we don't have colorama available, then we
+ # clear the screen by shelling out. Otherwise we can use an escape
+ # sequence.
+ if WIN:
+ os.system('cls')
+ else:
+ sys.stdout.write('\033[2J\033[1;1H')
+
+
+def style(text, fg=None, bg=None, bold=None, dim=None, underline=None,
+ blink=None, reverse=None, reset=True):
+ """Styles a text with ANSI styles and returns the new string. By
+ default the styling is self contained which means that at the end
+ of the string a reset code is issued. This can be prevented by
+ passing ``reset=False``.
+
+ Examples::
+
+ click.echo(click.style('Hello World!', fg='green'))
+ click.echo(click.style('ATTENTION!', blink=True))
+ click.echo(click.style('Some things', reverse=True, fg='cyan'))
+
+ Supported color names:
+
+ * ``black`` (might be a gray)
+ * ``red``
+ * ``green``
+ * ``yellow`` (might be an orange)
+ * ``blue``
+ * ``magenta``
+ * ``cyan``
+ * ``white`` (might be light gray)
+ * ``bright_black``
+ * ``bright_red``
+ * ``bright_green``
+ * ``bright_yellow``
+ * ``bright_blue``
+ * ``bright_magenta``
+ * ``bright_cyan``
+ * ``bright_white``
+ * ``reset`` (reset the color code only)
+
+ .. versionadded:: 2.0
+
+ .. versionadded:: 7.0
+ Added support for bright colors.
+
+ :param text: the string to style with ansi codes.
+ :param fg: if provided this will become the foreground color.
+ :param bg: if provided this will become the background color.
+ :param bold: if provided this will enable or disable bold mode.
+ :param dim: if provided this will enable or disable dim mode. This is
+ badly supported.
+ :param underline: if provided this will enable or disable underline.
+ :param blink: if provided this will enable or disable blinking.
+ :param reverse: if provided this will enable or disable inverse
+ rendering (foreground becomes background and the
+ other way round).
+ :param reset: by default a reset-all code is added at the end of the
+ string which means that styles do not carry over. This
+ can be disabled to compose styles.
+ """
+ bits = []
+ if fg:
+ try:
+ bits.append('\033[%dm' % (_ansi_colors[fg]))
+ except KeyError:
+ raise TypeError('Unknown color %r' % fg)
+ if bg:
+ try:
+ bits.append('\033[%dm' % (_ansi_colors[bg] + 10))
+ except KeyError:
+ raise TypeError('Unknown color %r' % bg)
+ if bold is not None:
+ bits.append('\033[%dm' % (1 if bold else 22))
+ if dim is not None:
+ bits.append('\033[%dm' % (2 if dim else 22))
+ if underline is not None:
+ bits.append('\033[%dm' % (4 if underline else 24))
+ if blink is not None:
+ bits.append('\033[%dm' % (5 if blink else 25))
+ if reverse is not None:
+ bits.append('\033[%dm' % (7 if reverse else 27))
+ bits.append(text)
+ if reset:
+ bits.append(_ansi_reset_all)
+ return ''.join(bits)
+
+
+def unstyle(text):
+ """Removes ANSI styling information from a string. Usually it's not
+ necessary to use this function as Click's echo function will
+ automatically remove styling if necessary.
+
+ .. versionadded:: 2.0
+
+ :param text: the text to remove style information from.
+ """
+ return strip_ansi(text)
+
+
+def secho(message=None, file=None, nl=True, err=False, color=None, **styles):
+ """This function combines :func:`echo` and :func:`style` into one
+ call. As such the following two calls are the same::
+
+ click.secho('Hello World!', fg='green')
+ click.echo(click.style('Hello World!', fg='green'))
+
+ All keyword arguments are forwarded to the underlying functions
+ depending on which one they go with.
+
+ .. versionadded:: 2.0
+ """
+ if message is not None:
+ message = style(message, **styles)
+ return echo(message, file=file, nl=nl, err=err, color=color)
+
+
+def edit(text=None, editor=None, env=None, require_save=True,
+ extension='.txt', filename=None):
+ r"""Edits the given text in the defined editor. If an editor is given
+ (should be the full path to the executable but the regular operating
+ system search path is used for finding the executable) it overrides
+ the detected editor. Optionally, some environment variables can be
+ used. If the editor is closed without changes, `None` is returned. In
+ case a file is edited directly the return value is always `None` and
+ `require_save` and `extension` are ignored.
+
+ If the editor cannot be opened a :exc:`UsageError` is raised.
+
+ Note for Windows: to simplify cross-platform usage, the newlines are
+ automatically converted from POSIX to Windows and vice versa. As such,
+ the message here will have ``\n`` as newline markers.
+
+ :param text: the text to edit.
+ :param editor: optionally the editor to use. Defaults to automatic
+ detection.
+ :param env: environment variables to forward to the editor.
+ :param require_save: if this is true, then not saving in the editor
+ will make the return value become `None`.
+ :param extension: the extension to tell the editor about. This defaults
+ to `.txt` but changing this might change syntax
+ highlighting.
+ :param filename: if provided it will edit this file instead of the
+ provided text contents. It will not use a temporary
+ file as an indirection in that case.
+ """
+ from ._termui_impl import Editor
+ editor = Editor(editor=editor, env=env, require_save=require_save,
+ extension=extension)
+ if filename is None:
+ return editor.edit(text)
+ editor.edit_file(filename)
+
+
+def launch(url, wait=False, locate=False):
+ """This function launches the given URL (or filename) in the default
+ viewer application for this file type. If this is an executable, it
+ might launch the executable in a new session. The return value is
+ the exit code of the launched application. Usually, ``0`` indicates
+ success.
+
+ Examples::
+
+ click.launch('https://click.palletsprojects.com/')
+ click.launch('/my/downloaded/file', locate=True)
+
+ .. versionadded:: 2.0
+
+ :param url: URL or filename of the thing to launch.
+ :param wait: waits for the program to stop.
+ :param locate: if this is set to `True` then instead of launching the
+ application associated with the URL it will attempt to
+ launch a file manager with the file located. This
+ might have weird effects if the URL does not point to
+ the filesystem.
+ """
+ from ._termui_impl import open_url
+ return open_url(url, wait=wait, locate=locate)
+
+
+# If this is provided, getchar() calls into this instead. This is used
+# for unittesting purposes.
+_getchar = None
+
+
+def getchar(echo=False):
+ """Fetches a single character from the terminal and returns it. This
+ will always return a unicode character and under certain rare
+ circumstances this might return more than one character. The
+ situations which more than one character is returned is when for
+ whatever reason multiple characters end up in the terminal buffer or
+ standard input was not actually a terminal.
+
+ Note that this will always read from the terminal, even if something
+ is piped into the standard input.
+
+ Note for Windows: in rare cases when typing non-ASCII characters, this
+ function might wait for a second character and then return both at once.
+ This is because certain Unicode characters look like special-key markers.
+
+ .. versionadded:: 2.0
+
+ :param echo: if set to `True`, the character read will also show up on
+ the terminal. The default is to not show it.
+ """
+ f = _getchar
+ if f is None:
+ from ._termui_impl import getchar as f
+ return f(echo)
+
+
+def raw_terminal():
+ from ._termui_impl import raw_terminal as f
+ return f()
+
+
+def pause(info='Press any key to continue ...', err=False):
+ """This command stops execution and waits for the user to press any
+ key to continue. This is similar to the Windows batch "pause"
+ command. If the program is not run through a terminal, this command
+ will instead do nothing.
+
+ .. versionadded:: 2.0
+
+ .. versionadded:: 4.0
+ Added the `err` parameter.
+
+ :param info: the info string to print before pausing.
+ :param err: if set to message goes to ``stderr`` instead of
+ ``stdout``, the same as with echo.
+ """
+ if not isatty(sys.stdin) or not isatty(sys.stdout):
+ return
+ try:
+ if info:
+ echo(info, nl=False, err=err)
+ try:
+ getchar()
+ except (KeyboardInterrupt, EOFError):
+ pass
+ finally:
+ if info:
+ echo(err=err)
diff --git a/third_party/python/Click/click/testing.py b/third_party/python/Click/click/testing.py
new file mode 100644
index 0000000000..1b2924e0b1
--- /dev/null
+++ b/third_party/python/Click/click/testing.py
@@ -0,0 +1,374 @@
+import os
+import sys
+import shutil
+import tempfile
+import contextlib
+import shlex
+
+from ._compat import iteritems, PY2, string_types
+
+
+# If someone wants to vendor click, we want to ensure the
+# correct package is discovered. Ideally we could use a
+# relative import here but unfortunately Python does not
+# support that.
+clickpkg = sys.modules[__name__.rsplit('.', 1)[0]]
+
+
+if PY2:
+ from cStringIO import StringIO
+else:
+ import io
+ from ._compat import _find_binary_reader
+
+
+class EchoingStdin(object):
+
+ def __init__(self, input, output):
+ self._input = input
+ self._output = output
+
+ def __getattr__(self, x):
+ return getattr(self._input, x)
+
+ def _echo(self, rv):
+ self._output.write(rv)
+ return rv
+
+ def read(self, n=-1):
+ return self._echo(self._input.read(n))
+
+ def readline(self, n=-1):
+ return self._echo(self._input.readline(n))
+
+ def readlines(self):
+ return [self._echo(x) for x in self._input.readlines()]
+
+ def __iter__(self):
+ return iter(self._echo(x) for x in self._input)
+
+ def __repr__(self):
+ return repr(self._input)
+
+
+def make_input_stream(input, charset):
+ # Is already an input stream.
+ if hasattr(input, 'read'):
+ if PY2:
+ return input
+ rv = _find_binary_reader(input)
+ if rv is not None:
+ return rv
+ raise TypeError('Could not find binary reader for input stream.')
+
+ if input is None:
+ input = b''
+ elif not isinstance(input, bytes):
+ input = input.encode(charset)
+ if PY2:
+ return StringIO(input)
+ return io.BytesIO(input)
+
+
+class Result(object):
+ """Holds the captured result of an invoked CLI script."""
+
+ def __init__(self, runner, stdout_bytes, stderr_bytes, exit_code,
+ exception, exc_info=None):
+ #: The runner that created the result
+ self.runner = runner
+ #: The standard output as bytes.
+ self.stdout_bytes = stdout_bytes
+ #: The standard error as bytes, or False(y) if not available
+ self.stderr_bytes = stderr_bytes
+ #: The exit code as integer.
+ self.exit_code = exit_code
+ #: The exception that happened if one did.
+ self.exception = exception
+ #: The traceback
+ self.exc_info = exc_info
+
+ @property
+ def output(self):
+ """The (standard) output as unicode string."""
+ return self.stdout
+
+ @property
+ def stdout(self):
+ """The standard output as unicode string."""
+ return self.stdout_bytes.decode(self.runner.charset, 'replace') \
+ .replace('\r\n', '\n')
+
+ @property
+ def stderr(self):
+ """The standard error as unicode string."""
+ if not self.stderr_bytes:
+ raise ValueError("stderr not separately captured")
+ return self.stderr_bytes.decode(self.runner.charset, 'replace') \
+ .replace('\r\n', '\n')
+
+
+ def __repr__(self):
+ return '<%s %s>' % (
+ type(self).__name__,
+ self.exception and repr(self.exception) or 'okay',
+ )
+
+
+class CliRunner(object):
+ """The CLI runner provides functionality to invoke a Click command line
+ script for unittesting purposes in a isolated environment. This only
+ works in single-threaded systems without any concurrency as it changes the
+ global interpreter state.
+
+ :param charset: the character set for the input and output data. This is
+ UTF-8 by default and should not be changed currently as
+ the reporting to Click only works in Python 2 properly.
+ :param env: a dictionary with environment variables for overriding.
+ :param echo_stdin: if this is set to `True`, then reading from stdin writes
+ to stdout. This is useful for showing examples in
+ some circumstances. Note that regular prompts
+ will automatically echo the input.
+ :param mix_stderr: if this is set to `False`, then stdout and stderr are
+ preserved as independent streams. This is useful for
+ Unix-philosophy apps that have predictable stdout and
+ noisy stderr, such that each may be measured
+ independently
+ """
+
+ def __init__(self, charset=None, env=None, echo_stdin=False,
+ mix_stderr=True):
+ if charset is None:
+ charset = 'utf-8'
+ self.charset = charset
+ self.env = env or {}
+ self.echo_stdin = echo_stdin
+ self.mix_stderr = mix_stderr
+
+ def get_default_prog_name(self, cli):
+ """Given a command object it will return the default program name
+ for it. The default is the `name` attribute or ``"root"`` if not
+ set.
+ """
+ return cli.name or 'root'
+
+ def make_env(self, overrides=None):
+ """Returns the environment overrides for invoking a script."""
+ rv = dict(self.env)
+ if overrides:
+ rv.update(overrides)
+ return rv
+
+ @contextlib.contextmanager
+ def isolation(self, input=None, env=None, color=False):
+ """A context manager that sets up the isolation for invoking of a
+ command line tool. This sets up stdin with the given input data
+ and `os.environ` with the overrides from the given dictionary.
+ This also rebinds some internals in Click to be mocked (like the
+ prompt functionality).
+
+ This is automatically done in the :meth:`invoke` method.
+
+ .. versionadded:: 4.0
+ The ``color`` parameter was added.
+
+ :param input: the input stream to put into sys.stdin.
+ :param env: the environment overrides as dictionary.
+ :param color: whether the output should contain color codes. The
+ application can still override this explicitly.
+ """
+ input = make_input_stream(input, self.charset)
+
+ old_stdin = sys.stdin
+ old_stdout = sys.stdout
+ old_stderr = sys.stderr
+ old_forced_width = clickpkg.formatting.FORCED_WIDTH
+ clickpkg.formatting.FORCED_WIDTH = 80
+
+ env = self.make_env(env)
+
+ if PY2:
+ bytes_output = StringIO()
+ if self.echo_stdin:
+ input = EchoingStdin(input, bytes_output)
+ sys.stdout = bytes_output
+ if not self.mix_stderr:
+ bytes_error = StringIO()
+ sys.stderr = bytes_error
+ else:
+ bytes_output = io.BytesIO()
+ if self.echo_stdin:
+ input = EchoingStdin(input, bytes_output)
+ input = io.TextIOWrapper(input, encoding=self.charset)
+ sys.stdout = io.TextIOWrapper(
+ bytes_output, encoding=self.charset)
+ if not self.mix_stderr:
+ bytes_error = io.BytesIO()
+ sys.stderr = io.TextIOWrapper(
+ bytes_error, encoding=self.charset)
+
+ if self.mix_stderr:
+ sys.stderr = sys.stdout
+
+ sys.stdin = input
+
+ def visible_input(prompt=None):
+ sys.stdout.write(prompt or '')
+ val = input.readline().rstrip('\r\n')
+ sys.stdout.write(val + '\n')
+ sys.stdout.flush()
+ return val
+
+ def hidden_input(prompt=None):
+ sys.stdout.write((prompt or '') + '\n')
+ sys.stdout.flush()
+ return input.readline().rstrip('\r\n')
+
+ def _getchar(echo):
+ char = sys.stdin.read(1)
+ if echo:
+ sys.stdout.write(char)
+ sys.stdout.flush()
+ return char
+
+ default_color = color
+
+ def should_strip_ansi(stream=None, color=None):
+ if color is None:
+ return not default_color
+ return not color
+
+ old_visible_prompt_func = clickpkg.termui.visible_prompt_func
+ old_hidden_prompt_func = clickpkg.termui.hidden_prompt_func
+ old__getchar_func = clickpkg.termui._getchar
+ old_should_strip_ansi = clickpkg.utils.should_strip_ansi
+ clickpkg.termui.visible_prompt_func = visible_input
+ clickpkg.termui.hidden_prompt_func = hidden_input
+ clickpkg.termui._getchar = _getchar
+ clickpkg.utils.should_strip_ansi = should_strip_ansi
+
+ old_env = {}
+ try:
+ for key, value in iteritems(env):
+ old_env[key] = os.environ.get(key)
+ if value is None:
+ try:
+ del os.environ[key]
+ except Exception:
+ pass
+ else:
+ os.environ[key] = value
+ yield (bytes_output, not self.mix_stderr and bytes_error)
+ finally:
+ for key, value in iteritems(old_env):
+ if value is None:
+ try:
+ del os.environ[key]
+ except Exception:
+ pass
+ else:
+ os.environ[key] = value
+ sys.stdout = old_stdout
+ sys.stderr = old_stderr
+ sys.stdin = old_stdin
+ clickpkg.termui.visible_prompt_func = old_visible_prompt_func
+ clickpkg.termui.hidden_prompt_func = old_hidden_prompt_func
+ clickpkg.termui._getchar = old__getchar_func
+ clickpkg.utils.should_strip_ansi = old_should_strip_ansi
+ clickpkg.formatting.FORCED_WIDTH = old_forced_width
+
+ def invoke(self, cli, args=None, input=None, env=None,
+ catch_exceptions=True, color=False, mix_stderr=False, **extra):
+ """Invokes a command in an isolated environment. The arguments are
+ forwarded directly to the command line script, the `extra` keyword
+ arguments are passed to the :meth:`~clickpkg.Command.main` function of
+ the command.
+
+ This returns a :class:`Result` object.
+
+ .. versionadded:: 3.0
+ The ``catch_exceptions`` parameter was added.
+
+ .. versionchanged:: 3.0
+ The result object now has an `exc_info` attribute with the
+ traceback if available.
+
+ .. versionadded:: 4.0
+ The ``color`` parameter was added.
+
+ :param cli: the command to invoke
+ :param args: the arguments to invoke. It may be given as an iterable
+ or a string. When given as string it will be interpreted
+ as a Unix shell command. More details at
+ :func:`shlex.split`.
+ :param input: the input data for `sys.stdin`.
+ :param env: the environment overrides.
+ :param catch_exceptions: Whether to catch any other exceptions than
+ ``SystemExit``.
+ :param extra: the keyword arguments to pass to :meth:`main`.
+ :param color: whether the output should contain color codes. The
+ application can still override this explicitly.
+ """
+ exc_info = None
+ with self.isolation(input=input, env=env, color=color) as outstreams:
+ exception = None
+ exit_code = 0
+
+ if isinstance(args, string_types):
+ args = shlex.split(args)
+
+ try:
+ prog_name = extra.pop("prog_name")
+ except KeyError:
+ prog_name = self.get_default_prog_name(cli)
+
+ try:
+ cli.main(args=args or (), prog_name=prog_name, **extra)
+ except SystemExit as e:
+ exc_info = sys.exc_info()
+ exit_code = e.code
+ if exit_code is None:
+ exit_code = 0
+
+ if exit_code != 0:
+ exception = e
+
+ if not isinstance(exit_code, int):
+ sys.stdout.write(str(exit_code))
+ sys.stdout.write('\n')
+ exit_code = 1
+
+ except Exception as e:
+ if not catch_exceptions:
+ raise
+ exception = e
+ exit_code = 1
+ exc_info = sys.exc_info()
+ finally:
+ sys.stdout.flush()
+ stdout = outstreams[0].getvalue()
+ stderr = outstreams[1] and outstreams[1].getvalue()
+
+ return Result(runner=self,
+ stdout_bytes=stdout,
+ stderr_bytes=stderr,
+ exit_code=exit_code,
+ exception=exception,
+ exc_info=exc_info)
+
+ @contextlib.contextmanager
+ def isolated_filesystem(self):
+ """A context manager that creates a temporary folder and changes
+ the current working directory to it for isolated filesystem tests.
+ """
+ cwd = os.getcwd()
+ t = tempfile.mkdtemp()
+ os.chdir(t)
+ try:
+ yield t
+ finally:
+ os.chdir(cwd)
+ try:
+ shutil.rmtree(t)
+ except (OSError, IOError):
+ pass
diff --git a/third_party/python/Click/click/types.py b/third_party/python/Click/click/types.py
new file mode 100644
index 0000000000..1f88032f54
--- /dev/null
+++ b/third_party/python/Click/click/types.py
@@ -0,0 +1,668 @@
+import os
+import stat
+from datetime import datetime
+
+from ._compat import open_stream, text_type, filename_to_ui, \
+ get_filesystem_encoding, get_streerror, _get_argv_encoding, PY2
+from .exceptions import BadParameter
+from .utils import safecall, LazyFile
+
+
+class ParamType(object):
+ """Helper for converting values through types. The following is
+ necessary for a valid type:
+
+ * it needs a name
+ * it needs to pass through None unchanged
+ * it needs to convert from a string
+ * it needs to convert its result type through unchanged
+ (eg: needs to be idempotent)
+ * it needs to be able to deal with param and context being `None`.
+ This can be the case when the object is used with prompt
+ inputs.
+ """
+ is_composite = False
+
+ #: the descriptive name of this type
+ name = None
+
+ #: if a list of this type is expected and the value is pulled from a
+ #: string environment variable, this is what splits it up. `None`
+ #: means any whitespace. For all parameters the general rule is that
+ #: whitespace splits them up. The exception are paths and files which
+ #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on
+ #: Windows).
+ envvar_list_splitter = None
+
+ def __call__(self, value, param=None, ctx=None):
+ if value is not None:
+ return self.convert(value, param, ctx)
+
+ def get_metavar(self, param):
+ """Returns the metavar default for this param if it provides one."""
+
+ def get_missing_message(self, param):
+ """Optionally might return extra information about a missing
+ parameter.
+
+ .. versionadded:: 2.0
+ """
+
+ def convert(self, value, param, ctx):
+ """Converts the value. This is not invoked for values that are
+ `None` (the missing value).
+ """
+ return value
+
+ def split_envvar_value(self, rv):
+ """Given a value from an environment variable this splits it up
+ into small chunks depending on the defined envvar list splitter.
+
+ If the splitter is set to `None`, which means that whitespace splits,
+ then leading and trailing whitespace is ignored. Otherwise, leading
+ and trailing splitters usually lead to empty items being included.
+ """
+ return (rv or '').split(self.envvar_list_splitter)
+
+ def fail(self, message, param=None, ctx=None):
+ """Helper method to fail with an invalid value message."""
+ raise BadParameter(message, ctx=ctx, param=param)
+
+
+class CompositeParamType(ParamType):
+ is_composite = True
+
+ @property
+ def arity(self):
+ raise NotImplementedError()
+
+
+class FuncParamType(ParamType):
+
+ def __init__(self, func):
+ self.name = func.__name__
+ self.func = func
+
+ def convert(self, value, param, ctx):
+ try:
+ return self.func(value)
+ except ValueError:
+ try:
+ value = text_type(value)
+ except UnicodeError:
+ value = str(value).decode('utf-8', 'replace')
+ self.fail(value, param, ctx)
+
+
+class UnprocessedParamType(ParamType):
+ name = 'text'
+
+ def convert(self, value, param, ctx):
+ return value
+
+ def __repr__(self):
+ return 'UNPROCESSED'
+
+
+class StringParamType(ParamType):
+ name = 'text'
+
+ def convert(self, value, param, ctx):
+ if isinstance(value, bytes):
+ enc = _get_argv_encoding()
+ try:
+ value = value.decode(enc)
+ except UnicodeError:
+ fs_enc = get_filesystem_encoding()
+ if fs_enc != enc:
+ try:
+ value = value.decode(fs_enc)
+ except UnicodeError:
+ value = value.decode('utf-8', 'replace')
+ return value
+ return value
+
+ def __repr__(self):
+ return 'STRING'
+
+
+class Choice(ParamType):
+ """The choice type allows a value to be checked against a fixed set
+ of supported values. All of these values have to be strings.
+
+ You should only pass a list or tuple of choices. Other iterables
+ (like generators) may lead to surprising results.
+
+ See :ref:`choice-opts` for an example.
+
+ :param case_sensitive: Set to false to make choices case
+ insensitive. Defaults to true.
+ """
+
+ name = 'choice'
+
+ def __init__(self, choices, case_sensitive=True):
+ self.choices = choices
+ self.case_sensitive = case_sensitive
+
+ def get_metavar(self, param):
+ return '[%s]' % '|'.join(self.choices)
+
+ def get_missing_message(self, param):
+ return 'Choose from:\n\t%s.' % ',\n\t'.join(self.choices)
+
+ def convert(self, value, param, ctx):
+ # Exact match
+ if value in self.choices:
+ return value
+
+ # Match through normalization and case sensitivity
+ # first do token_normalize_func, then lowercase
+ # preserve original `value` to produce an accurate message in
+ # `self.fail`
+ normed_value = value
+ normed_choices = self.choices
+
+ if ctx is not None and \
+ ctx.token_normalize_func is not None:
+ normed_value = ctx.token_normalize_func(value)
+ normed_choices = [ctx.token_normalize_func(choice) for choice in
+ self.choices]
+
+ if not self.case_sensitive:
+ normed_value = normed_value.lower()
+ normed_choices = [choice.lower() for choice in normed_choices]
+
+ if normed_value in normed_choices:
+ return normed_value
+
+ self.fail('invalid choice: %s. (choose from %s)' %
+ (value, ', '.join(self.choices)), param, ctx)
+
+ def __repr__(self):
+ return 'Choice(%r)' % list(self.choices)
+
+
+class DateTime(ParamType):
+ """The DateTime type converts date strings into `datetime` objects.
+
+ The format strings which are checked are configurable, but default to some
+ common (non-timezone aware) ISO 8601 formats.
+
+ When specifying *DateTime* formats, you should only pass a list or a tuple.
+ Other iterables, like generators, may lead to surprising results.
+
+ The format strings are processed using ``datetime.strptime``, and this
+ consequently defines the format strings which are allowed.
+
+ Parsing is tried using each format, in order, and the first format which
+ parses successfully is used.
+
+ :param formats: A list or tuple of date format strings, in the order in
+ which they should be tried. Defaults to
+ ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``,
+ ``'%Y-%m-%d %H:%M:%S'``.
+ """
+ name = 'datetime'
+
+ def __init__(self, formats=None):
+ self.formats = formats or [
+ '%Y-%m-%d',
+ '%Y-%m-%dT%H:%M:%S',
+ '%Y-%m-%d %H:%M:%S'
+ ]
+
+ def get_metavar(self, param):
+ return '[{}]'.format('|'.join(self.formats))
+
+ def _try_to_convert_date(self, value, format):
+ try:
+ return datetime.strptime(value, format)
+ except ValueError:
+ return None
+
+ def convert(self, value, param, ctx):
+ # Exact match
+ for format in self.formats:
+ dtime = self._try_to_convert_date(value, format)
+ if dtime:
+ return dtime
+
+ self.fail(
+ 'invalid datetime format: {}. (choose from {})'.format(
+ value, ', '.join(self.formats)))
+
+ def __repr__(self):
+ return 'DateTime'
+
+
+class IntParamType(ParamType):
+ name = 'integer'
+
+ def convert(self, value, param, ctx):
+ try:
+ return int(value)
+ except (ValueError, UnicodeError):
+ self.fail('%s is not a valid integer' % value, param, ctx)
+
+ def __repr__(self):
+ return 'INT'
+
+
+class IntRange(IntParamType):
+ """A parameter that works similar to :data:`click.INT` but restricts
+ the value to fit into a range. The default behavior is to fail if the
+ value falls outside the range, but it can also be silently clamped
+ between the two edges.
+
+ See :ref:`ranges` for an example.
+ """
+ name = 'integer range'
+
+ def __init__(self, min=None, max=None, clamp=False):
+ self.min = min
+ self.max = max
+ self.clamp = clamp
+
+ def convert(self, value, param, ctx):
+ rv = IntParamType.convert(self, value, param, ctx)
+ if self.clamp:
+ if self.min is not None and rv < self.min:
+ return self.min
+ if self.max is not None and rv > self.max:
+ return self.max
+ if self.min is not None and rv < self.min or \
+ self.max is not None and rv > self.max:
+ if self.min is None:
+ self.fail('%s is bigger than the maximum valid value '
+ '%s.' % (rv, self.max), param, ctx)
+ elif self.max is None:
+ self.fail('%s is smaller than the minimum valid value '
+ '%s.' % (rv, self.min), param, ctx)
+ else:
+ self.fail('%s is not in the valid range of %s to %s.'
+ % (rv, self.min, self.max), param, ctx)
+ return rv
+
+ def __repr__(self):
+ return 'IntRange(%r, %r)' % (self.min, self.max)
+
+
+class FloatParamType(ParamType):
+ name = 'float'
+
+ def convert(self, value, param, ctx):
+ try:
+ return float(value)
+ except (UnicodeError, ValueError):
+ self.fail('%s is not a valid floating point value' %
+ value, param, ctx)
+
+ def __repr__(self):
+ return 'FLOAT'
+
+
+class FloatRange(FloatParamType):
+ """A parameter that works similar to :data:`click.FLOAT` but restricts
+ the value to fit into a range. The default behavior is to fail if the
+ value falls outside the range, but it can also be silently clamped
+ between the two edges.
+
+ See :ref:`ranges` for an example.
+ """
+ name = 'float range'
+
+ def __init__(self, min=None, max=None, clamp=False):
+ self.min = min
+ self.max = max
+ self.clamp = clamp
+
+ def convert(self, value, param, ctx):
+ rv = FloatParamType.convert(self, value, param, ctx)
+ if self.clamp:
+ if self.min is not None and rv < self.min:
+ return self.min
+ if self.max is not None and rv > self.max:
+ return self.max
+ if self.min is not None and rv < self.min or \
+ self.max is not None and rv > self.max:
+ if self.min is None:
+ self.fail('%s is bigger than the maximum valid value '
+ '%s.' % (rv, self.max), param, ctx)
+ elif self.max is None:
+ self.fail('%s is smaller than the minimum valid value '
+ '%s.' % (rv, self.min), param, ctx)
+ else:
+ self.fail('%s is not in the valid range of %s to %s.'
+ % (rv, self.min, self.max), param, ctx)
+ return rv
+
+ def __repr__(self):
+ return 'FloatRange(%r, %r)' % (self.min, self.max)
+
+
+class BoolParamType(ParamType):
+ name = 'boolean'
+
+ def convert(self, value, param, ctx):
+ if isinstance(value, bool):
+ return bool(value)
+ value = value.lower()
+ if value in ('true', 't', '1', 'yes', 'y'):
+ return True
+ elif value in ('false', 'f', '0', 'no', 'n'):
+ return False
+ self.fail('%s is not a valid boolean' % value, param, ctx)
+
+ def __repr__(self):
+ return 'BOOL'
+
+
+class UUIDParameterType(ParamType):
+ name = 'uuid'
+
+ def convert(self, value, param, ctx):
+ import uuid
+ try:
+ if PY2 and isinstance(value, text_type):
+ value = value.encode('ascii')
+ return uuid.UUID(value)
+ except (UnicodeError, ValueError):
+ self.fail('%s is not a valid UUID value' % value, param, ctx)
+
+ def __repr__(self):
+ return 'UUID'
+
+
+class File(ParamType):
+ """Declares a parameter to be a file for reading or writing. The file
+ is automatically closed once the context tears down (after the command
+ finished working).
+
+ Files can be opened for reading or writing. The special value ``-``
+ indicates stdin or stdout depending on the mode.
+
+ By default, the file is opened for reading text data, but it can also be
+ opened in binary mode or for writing. The encoding parameter can be used
+ to force a specific encoding.
+
+ The `lazy` flag controls if the file should be opened immediately or upon
+ first IO. The default is to be non-lazy for standard input and output
+ streams as well as files opened for reading, `lazy` otherwise. When opening a
+ file lazily for reading, it is still opened temporarily for validation, but
+ will not be held open until first IO. lazy is mainly useful when opening
+ for writing to avoid creating the file until it is needed.
+
+ Starting with Click 2.0, files can also be opened atomically in which
+ case all writes go into a separate file in the same folder and upon
+ completion the file will be moved over to the original location. This
+ is useful if a file regularly read by other users is modified.
+
+ See :ref:`file-args` for more information.
+ """
+ name = 'filename'
+ envvar_list_splitter = os.path.pathsep
+
+ def __init__(self, mode='r', encoding=None, errors='strict', lazy=None,
+ atomic=False):
+ self.mode = mode
+ self.encoding = encoding
+ self.errors = errors
+ self.lazy = lazy
+ self.atomic = atomic
+
+ def resolve_lazy_flag(self, value):
+ if self.lazy is not None:
+ return self.lazy
+ if value == '-':
+ return False
+ elif 'w' in self.mode:
+ return True
+ return False
+
+ def convert(self, value, param, ctx):
+ try:
+ if hasattr(value, 'read') or hasattr(value, 'write'):
+ return value
+
+ lazy = self.resolve_lazy_flag(value)
+
+ if lazy:
+ f = LazyFile(value, self.mode, self.encoding, self.errors,
+ atomic=self.atomic)
+ if ctx is not None:
+ ctx.call_on_close(f.close_intelligently)
+ return f
+
+ f, should_close = open_stream(value, self.mode,
+ self.encoding, self.errors,
+ atomic=self.atomic)
+ # If a context is provided, we automatically close the file
+ # at the end of the context execution (or flush out). If a
+ # context does not exist, it's the caller's responsibility to
+ # properly close the file. This for instance happens when the
+ # type is used with prompts.
+ if ctx is not None:
+ if should_close:
+ ctx.call_on_close(safecall(f.close))
+ else:
+ ctx.call_on_close(safecall(f.flush))
+ return f
+ except (IOError, OSError) as e:
+ self.fail('Could not open file: %s: %s' % (
+ filename_to_ui(value),
+ get_streerror(e),
+ ), param, ctx)
+
+
+class Path(ParamType):
+ """The path type is similar to the :class:`File` type but it performs
+ different checks. First of all, instead of returning an open file
+ handle it returns just the filename. Secondly, it can perform various
+ basic checks about what the file or directory should be.
+
+ .. versionchanged:: 6.0
+ `allow_dash` was added.
+
+ :param exists: if set to true, the file or directory needs to exist for
+ this value to be valid. If this is not required and a
+ file does indeed not exist, then all further checks are
+ silently skipped.
+ :param file_okay: controls if a file is a possible value.
+ :param dir_okay: controls if a directory is a possible value.
+ :param writable: if true, a writable check is performed.
+ :param readable: if true, a readable check is performed.
+ :param resolve_path: if this is true, then the path is fully resolved
+ before the value is passed onwards. This means
+ that it's absolute and symlinks are resolved. It
+ will not expand a tilde-prefix, as this is
+ supposed to be done by the shell only.
+ :param allow_dash: If this is set to `True`, a single dash to indicate
+ standard streams is permitted.
+ :param path_type: optionally a string type that should be used to
+ represent the path. The default is `None` which
+ means the return value will be either bytes or
+ unicode depending on what makes most sense given the
+ input data Click deals with.
+ """
+ envvar_list_splitter = os.path.pathsep
+
+ def __init__(self, exists=False, file_okay=True, dir_okay=True,
+ writable=False, readable=True, resolve_path=False,
+ allow_dash=False, path_type=None):
+ self.exists = exists
+ self.file_okay = file_okay
+ self.dir_okay = dir_okay
+ self.writable = writable
+ self.readable = readable
+ self.resolve_path = resolve_path
+ self.allow_dash = allow_dash
+ self.type = path_type
+
+ if self.file_okay and not self.dir_okay:
+ self.name = 'file'
+ self.path_type = 'File'
+ elif self.dir_okay and not self.file_okay:
+ self.name = 'directory'
+ self.path_type = 'Directory'
+ else:
+ self.name = 'path'
+ self.path_type = 'Path'
+
+ def coerce_path_result(self, rv):
+ if self.type is not None and not isinstance(rv, self.type):
+ if self.type is text_type:
+ rv = rv.decode(get_filesystem_encoding())
+ else:
+ rv = rv.encode(get_filesystem_encoding())
+ return rv
+
+ def convert(self, value, param, ctx):
+ rv = value
+
+ is_dash = self.file_okay and self.allow_dash and rv in (b'-', '-')
+
+ if not is_dash:
+ if self.resolve_path:
+ rv = os.path.realpath(rv)
+
+ try:
+ st = os.stat(rv)
+ except OSError:
+ if not self.exists:
+ return self.coerce_path_result(rv)
+ self.fail('%s "%s" does not exist.' % (
+ self.path_type,
+ filename_to_ui(value)
+ ), param, ctx)
+
+ if not self.file_okay and stat.S_ISREG(st.st_mode):
+ self.fail('%s "%s" is a file.' % (
+ self.path_type,
+ filename_to_ui(value)
+ ), param, ctx)
+ if not self.dir_okay and stat.S_ISDIR(st.st_mode):
+ self.fail('%s "%s" is a directory.' % (
+ self.path_type,
+ filename_to_ui(value)
+ ), param, ctx)
+ if self.writable and not os.access(value, os.W_OK):
+ self.fail('%s "%s" is not writable.' % (
+ self.path_type,
+ filename_to_ui(value)
+ ), param, ctx)
+ if self.readable and not os.access(value, os.R_OK):
+ self.fail('%s "%s" is not readable.' % (
+ self.path_type,
+ filename_to_ui(value)
+ ), param, ctx)
+
+ return self.coerce_path_result(rv)
+
+
+class Tuple(CompositeParamType):
+ """The default behavior of Click is to apply a type on a value directly.
+ This works well in most cases, except for when `nargs` is set to a fixed
+ count and different types should be used for different items. In this
+ case the :class:`Tuple` type can be used. This type can only be used
+ if `nargs` is set to a fixed number.
+
+ For more information see :ref:`tuple-type`.
+
+ This can be selected by using a Python tuple literal as a type.
+
+ :param types: a list of types that should be used for the tuple items.
+ """
+
+ def __init__(self, types):
+ self.types = [convert_type(ty) for ty in types]
+
+ @property
+ def name(self):
+ return "<" + " ".join(ty.name for ty in self.types) + ">"
+
+ @property
+ def arity(self):
+ return len(self.types)
+
+ def convert(self, value, param, ctx):
+ if len(value) != len(self.types):
+ raise TypeError('It would appear that nargs is set to conflict '
+ 'with the composite type arity.')
+ return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value))
+
+
+def convert_type(ty, default=None):
+ """Converts a callable or python ty into the most appropriate param
+ ty.
+ """
+ guessed_type = False
+ if ty is None and default is not None:
+ if isinstance(default, tuple):
+ ty = tuple(map(type, default))
+ else:
+ ty = type(default)
+ guessed_type = True
+
+ if isinstance(ty, tuple):
+ return Tuple(ty)
+ if isinstance(ty, ParamType):
+ return ty
+ if ty is text_type or ty is str or ty is None:
+ return STRING
+ if ty is int:
+ return INT
+ # Booleans are only okay if not guessed. This is done because for
+ # flags the default value is actually a bit of a lie in that it
+ # indicates which of the flags is the one we want. See get_default()
+ # for more information.
+ if ty is bool and not guessed_type:
+ return BOOL
+ if ty is float:
+ return FLOAT
+ if guessed_type:
+ return STRING
+
+ # Catch a common mistake
+ if __debug__:
+ try:
+ if issubclass(ty, ParamType):
+ raise AssertionError('Attempted to use an uninstantiated '
+ 'parameter type (%s).' % ty)
+ except TypeError:
+ pass
+ return FuncParamType(ty)
+
+
+#: A dummy parameter type that just does nothing. From a user's
+#: perspective this appears to just be the same as `STRING` but internally
+#: no string conversion takes place. This is necessary to achieve the
+#: same bytes/unicode behavior on Python 2/3 in situations where you want
+#: to not convert argument types. This is usually useful when working
+#: with file paths as they can appear in bytes and unicode.
+#:
+#: For path related uses the :class:`Path` type is a better choice but
+#: there are situations where an unprocessed type is useful which is why
+#: it is is provided.
+#:
+#: .. versionadded:: 4.0
+UNPROCESSED = UnprocessedParamType()
+
+#: A unicode string parameter type which is the implicit default. This
+#: can also be selected by using ``str`` as type.
+STRING = StringParamType()
+
+#: An integer parameter. This can also be selected by using ``int`` as
+#: type.
+INT = IntParamType()
+
+#: A floating point value parameter. This can also be selected by using
+#: ``float`` as type.
+FLOAT = FloatParamType()
+
+#: A boolean parameter. This is the default for boolean flags. This can
+#: also be selected by using ``bool`` as a type.
+BOOL = BoolParamType()
+
+#: A UUID parameter.
+UUID = UUIDParameterType()
diff --git a/third_party/python/Click/click/utils.py b/third_party/python/Click/click/utils.py
new file mode 100644
index 0000000000..fc84369fc9
--- /dev/null
+++ b/third_party/python/Click/click/utils.py
@@ -0,0 +1,440 @@
+import os
+import sys
+
+from .globals import resolve_color_default
+
+from ._compat import text_type, open_stream, get_filesystem_encoding, \
+ get_streerror, string_types, PY2, binary_streams, text_streams, \
+ filename_to_ui, auto_wrap_for_ansi, strip_ansi, should_strip_ansi, \
+ _default_text_stdout, _default_text_stderr, is_bytes, WIN
+
+if not PY2:
+ from ._compat import _find_binary_writer
+elif WIN:
+ from ._winconsole import _get_windows_argv, \
+ _hash_py_argv, _initial_argv_hash
+
+
+echo_native_types = string_types + (bytes, bytearray)
+
+
+def _posixify(name):
+ return '-'.join(name.split()).lower()
+
+
+def safecall(func):
+ """Wraps a function so that it swallows exceptions."""
+ def wrapper(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except Exception:
+ pass
+ return wrapper
+
+
+def make_str(value):
+ """Converts a value into a valid string."""
+ if isinstance(value, bytes):
+ try:
+ return value.decode(get_filesystem_encoding())
+ except UnicodeError:
+ return value.decode('utf-8', 'replace')
+ return text_type(value)
+
+
+def make_default_short_help(help, max_length=45):
+ """Return a condensed version of help string."""
+ words = help.split()
+ total_length = 0
+ result = []
+ done = False
+
+ for word in words:
+ if word[-1:] == '.':
+ done = True
+ new_length = result and 1 + len(word) or len(word)
+ if total_length + new_length > max_length:
+ result.append('...')
+ done = True
+ else:
+ if result:
+ result.append(' ')
+ result.append(word)
+ if done:
+ break
+ total_length += new_length
+
+ return ''.join(result)
+
+
+class LazyFile(object):
+ """A lazy file works like a regular file but it does not fully open
+ the file but it does perform some basic checks early to see if the
+ filename parameter does make sense. This is useful for safely opening
+ files for writing.
+ """
+
+ def __init__(self, filename, mode='r', encoding=None, errors='strict',
+ atomic=False):
+ self.name = filename
+ self.mode = mode
+ self.encoding = encoding
+ self.errors = errors
+ self.atomic = atomic
+
+ if filename == '-':
+ self._f, self.should_close = open_stream(filename, mode,
+ encoding, errors)
+ else:
+ if 'r' in mode:
+ # Open and close the file in case we're opening it for
+ # reading so that we can catch at least some errors in
+ # some cases early.
+ open(filename, mode).close()
+ self._f = None
+ self.should_close = True
+
+ def __getattr__(self, name):
+ return getattr(self.open(), name)
+
+ def __repr__(self):
+ if self._f is not None:
+ return repr(self._f)
+ return '<unopened file %r %s>' % (self.name, self.mode)
+
+ def open(self):
+ """Opens the file if it's not yet open. This call might fail with
+ a :exc:`FileError`. Not handling this error will produce an error
+ that Click shows.
+ """
+ if self._f is not None:
+ return self._f
+ try:
+ rv, self.should_close = open_stream(self.name, self.mode,
+ self.encoding,
+ self.errors,
+ atomic=self.atomic)
+ except (IOError, OSError) as e:
+ from .exceptions import FileError
+ raise FileError(self.name, hint=get_streerror(e))
+ self._f = rv
+ return rv
+
+ def close(self):
+ """Closes the underlying file, no matter what."""
+ if self._f is not None:
+ self._f.close()
+
+ def close_intelligently(self):
+ """This function only closes the file if it was opened by the lazy
+ file wrapper. For instance this will never close stdin.
+ """
+ if self.should_close:
+ self.close()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ self.close_intelligently()
+
+ def __iter__(self):
+ self.open()
+ return iter(self._f)
+
+
+class KeepOpenFile(object):
+
+ def __init__(self, file):
+ self._file = file
+
+ def __getattr__(self, name):
+ return getattr(self._file, name)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ pass
+
+ def __repr__(self):
+ return repr(self._file)
+
+ def __iter__(self):
+ return iter(self._file)
+
+
+def echo(message=None, file=None, nl=True, err=False, color=None):
+ """Prints a message plus a newline to the given file or stdout. On
+ first sight, this looks like the print function, but it has improved
+ support for handling Unicode and binary data that does not fail no
+ matter how badly configured the system is.
+
+ Primarily it means that you can print binary data as well as Unicode
+ data on both 2.x and 3.x to the given file in the most appropriate way
+ possible. This is a very carefree function in that it will try its
+ best to not fail. As of Click 6.0 this includes support for unicode
+ output on the Windows console.
+
+ In addition to that, if `colorama`_ is installed, the echo function will
+ also support clever handling of ANSI codes. Essentially it will then
+ do the following:
+
+ - add transparent handling of ANSI color codes on Windows.
+ - hide ANSI codes automatically if the destination file is not a
+ terminal.
+
+ .. _colorama: https://pypi.org/project/colorama/
+
+ .. versionchanged:: 6.0
+ As of Click 6.0 the echo function will properly support unicode
+ output on the windows console. Not that click does not modify
+ the interpreter in any way which means that `sys.stdout` or the
+ print statement or function will still not provide unicode support.
+
+ .. versionchanged:: 2.0
+ Starting with version 2.0 of Click, the echo function will work
+ with colorama if it's installed.
+
+ .. versionadded:: 3.0
+ The `err` parameter was added.
+
+ .. versionchanged:: 4.0
+ Added the `color` flag.
+
+ :param message: the message to print
+ :param file: the file to write to (defaults to ``stdout``)
+ :param err: if set to true the file defaults to ``stderr`` instead of
+ ``stdout``. This is faster and easier than calling
+ :func:`get_text_stderr` yourself.
+ :param nl: if set to `True` (the default) a newline is printed afterwards.
+ :param color: controls if the terminal supports ANSI colors or not. The
+ default is autodetection.
+ """
+ if file is None:
+ if err:
+ file = _default_text_stderr()
+ else:
+ file = _default_text_stdout()
+
+ # Convert non bytes/text into the native string type.
+ if message is not None and not isinstance(message, echo_native_types):
+ message = text_type(message)
+
+ if nl:
+ message = message or u''
+ if isinstance(message, text_type):
+ message += u'\n'
+ else:
+ message += b'\n'
+
+ # If there is a message, and we're in Python 3, and the value looks
+ # like bytes, we manually need to find the binary stream and write the
+ # message in there. This is done separately so that most stream
+ # types will work as you would expect. Eg: you can write to StringIO
+ # for other cases.
+ if message and not PY2 and is_bytes(message):
+ binary_file = _find_binary_writer(file)
+ if binary_file is not None:
+ file.flush()
+ binary_file.write(message)
+ binary_file.flush()
+ return
+
+ # ANSI-style support. If there is no message or we are dealing with
+ # bytes nothing is happening. If we are connected to a file we want
+ # to strip colors. If we are on windows we either wrap the stream
+ # to strip the color or we use the colorama support to translate the
+ # ansi codes to API calls.
+ if message and not is_bytes(message):
+ color = resolve_color_default(color)
+ if should_strip_ansi(file, color):
+ message = strip_ansi(message)
+ elif WIN:
+ if auto_wrap_for_ansi is not None:
+ file = auto_wrap_for_ansi(file)
+ elif not color:
+ message = strip_ansi(message)
+
+ if message:
+ file.write(message)
+ file.flush()
+
+
+def get_binary_stream(name):
+ """Returns a system stream for byte processing. This essentially
+ returns the stream from the sys module with the given name but it
+ solves some compatibility issues between different Python versions.
+ Primarily this function is necessary for getting binary streams on
+ Python 3.
+
+ :param name: the name of the stream to open. Valid names are ``'stdin'``,
+ ``'stdout'`` and ``'stderr'``
+ """
+ opener = binary_streams.get(name)
+ if opener is None:
+ raise TypeError('Unknown standard stream %r' % name)
+ return opener()
+
+
+def get_text_stream(name, encoding=None, errors='strict'):
+ """Returns a system stream for text processing. This usually returns
+ a wrapped stream around a binary stream returned from
+ :func:`get_binary_stream` but it also can take shortcuts on Python 3
+ for already correctly configured streams.
+
+ :param name: the name of the stream to open. Valid names are ``'stdin'``,
+ ``'stdout'`` and ``'stderr'``
+ :param encoding: overrides the detected default encoding.
+ :param errors: overrides the default error mode.
+ """
+ opener = text_streams.get(name)
+ if opener is None:
+ raise TypeError('Unknown standard stream %r' % name)
+ return opener(encoding, errors)
+
+
+def open_file(filename, mode='r', encoding=None, errors='strict',
+ lazy=False, atomic=False):
+ """This is similar to how the :class:`File` works but for manual
+ usage. Files are opened non lazy by default. This can open regular
+ files as well as stdin/stdout if ``'-'`` is passed.
+
+ If stdin/stdout is returned the stream is wrapped so that the context
+ manager will not close the stream accidentally. This makes it possible
+ to always use the function like this without having to worry to
+ accidentally close a standard stream::
+
+ with open_file(filename) as f:
+ ...
+
+ .. versionadded:: 3.0
+
+ :param filename: the name of the file to open (or ``'-'`` for stdin/stdout).
+ :param mode: the mode in which to open the file.
+ :param encoding: the encoding to use.
+ :param errors: the error handling for this file.
+ :param lazy: can be flipped to true to open the file lazily.
+ :param atomic: in atomic mode writes go into a temporary file and it's
+ moved on close.
+ """
+ if lazy:
+ return LazyFile(filename, mode, encoding, errors, atomic=atomic)
+ f, should_close = open_stream(filename, mode, encoding, errors,
+ atomic=atomic)
+ if not should_close:
+ f = KeepOpenFile(f)
+ return f
+
+
+def get_os_args():
+ """This returns the argument part of sys.argv in the most appropriate
+ form for processing. What this means is that this return value is in
+ a format that works for Click to process but does not necessarily
+ correspond well to what's actually standard for the interpreter.
+
+ On most environments the return value is ``sys.argv[:1]`` unchanged.
+ However if you are on Windows and running Python 2 the return value
+ will actually be a list of unicode strings instead because the
+ default behavior on that platform otherwise will not be able to
+ carry all possible values that sys.argv can have.
+
+ .. versionadded:: 6.0
+ """
+ # We can only extract the unicode argv if sys.argv has not been
+ # changed since the startup of the application.
+ if PY2 and WIN and _initial_argv_hash == _hash_py_argv():
+ return _get_windows_argv()
+ return sys.argv[1:]
+
+
+def format_filename(filename, shorten=False):
+ """Formats a filename for user display. The main purpose of this
+ function is to ensure that the filename can be displayed at all. This
+ will decode the filename to unicode if necessary in a way that it will
+ not fail. Optionally, it can shorten the filename to not include the
+ full path to the filename.
+
+ :param filename: formats a filename for UI display. This will also convert
+ the filename into unicode without failing.
+ :param shorten: this optionally shortens the filename to strip of the
+ path that leads up to it.
+ """
+ if shorten:
+ filename = os.path.basename(filename)
+ return filename_to_ui(filename)
+
+
+def get_app_dir(app_name, roaming=True, force_posix=False):
+ r"""Returns the config folder for the application. The default behavior
+ is to return whatever is most appropriate for the operating system.
+
+ To give you an idea, for an app called ``"Foo Bar"``, something like
+ the following folders could be returned:
+
+ Mac OS X:
+ ``~/Library/Application Support/Foo Bar``
+ Mac OS X (POSIX):
+ ``~/.foo-bar``
+ Unix:
+ ``~/.config/foo-bar``
+ Unix (POSIX):
+ ``~/.foo-bar``
+ Win XP (roaming):
+ ``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo Bar``
+ Win XP (not roaming):
+ ``C:\Documents and Settings\<user>\Application Data\Foo Bar``
+ Win 7 (roaming):
+ ``C:\Users\<user>\AppData\Roaming\Foo Bar``
+ Win 7 (not roaming):
+ ``C:\Users\<user>\AppData\Local\Foo Bar``
+
+ .. versionadded:: 2.0
+
+ :param app_name: the application name. This should be properly capitalized
+ and can contain whitespace.
+ :param roaming: controls if the folder should be roaming or not on Windows.
+ Has no affect otherwise.
+ :param force_posix: if this is set to `True` then on any POSIX system the
+ folder will be stored in the home folder with a leading
+ dot instead of the XDG config home or darwin's
+ application support folder.
+ """
+ if WIN:
+ key = roaming and 'APPDATA' or 'LOCALAPPDATA'
+ folder = os.environ.get(key)
+ if folder is None:
+ folder = os.path.expanduser('~')
+ return os.path.join(folder, app_name)
+ if force_posix:
+ return os.path.join(os.path.expanduser('~/.' + _posixify(app_name)))
+ if sys.platform == 'darwin':
+ return os.path.join(os.path.expanduser(
+ '~/Library/Application Support'), app_name)
+ return os.path.join(
+ os.environ.get('XDG_CONFIG_HOME', os.path.expanduser('~/.config')),
+ _posixify(app_name))
+
+
+class PacifyFlushWrapper(object):
+ """This wrapper is used to catch and suppress BrokenPipeErrors resulting
+ from ``.flush()`` being called on broken pipe during the shutdown/final-GC
+ of the Python interpreter. Notably ``.flush()`` is always called on
+ ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any
+ other cleanup code, and the case where the underlying file is not a broken
+ pipe, all calls and attributes are proxied.
+ """
+
+ def __init__(self, wrapped):
+ self.wrapped = wrapped
+
+ def flush(self):
+ try:
+ self.wrapped.flush()
+ except IOError as e:
+ import errno
+ if e.errno != errno.EPIPE:
+ raise
+
+ def __getattr__(self, attr):
+ return getattr(self.wrapped, attr)
diff --git a/third_party/python/Click/examples/README b/third_party/python/Click/examples/README
new file mode 100644
index 0000000000..6be32961f4
--- /dev/null
+++ b/third_party/python/Click/examples/README
@@ -0,0 +1,12 @@
+Click Examples
+
+ This folder contains various Click examples. Note that
+ all of these are not runnable by themselves but should be
+ installed into a virtualenv.
+
+ This is done this way so that scripts also properly work
+ on Windows and in virtualenvs without accidentally executing
+ through the wrong interpreter.
+
+ For more information about this see the documentation:
+ https://click.palletsprojects.com/en/7.x/setuptools/
diff --git a/third_party/python/Click/examples/aliases/README b/third_party/python/Click/examples/aliases/README
new file mode 100644
index 0000000000..5a4a066566
--- /dev/null
+++ b/third_party/python/Click/examples/aliases/README
@@ -0,0 +1,17 @@
+$ aliases_
+
+ aliases is a fairly advanced example that shows how
+ to implement command aliases with Click. It uses a
+ subclass of the default group to customize how commands
+ are located.
+
+ It supports both aliases read from a config file as well
+ as automatic abbreviations.
+
+ The aliases from the config are read from the aliases.ini
+ file. Try `aliases st` and `aliases ci`!
+
+Usage:
+
+ $ pip install --editable .
+ $ aliases --help
diff --git a/third_party/python/Click/examples/aliases/aliases.ini b/third_party/python/Click/examples/aliases/aliases.ini
new file mode 100644
index 0000000000..4f1d54cd6b
--- /dev/null
+++ b/third_party/python/Click/examples/aliases/aliases.ini
@@ -0,0 +1,2 @@
+[aliases]
+ci=commit
diff --git a/third_party/python/Click/examples/aliases/aliases.py b/third_party/python/Click/examples/aliases/aliases.py
new file mode 100644
index 0000000000..38ef72c5c4
--- /dev/null
+++ b/third_party/python/Click/examples/aliases/aliases.py
@@ -0,0 +1,111 @@
+import os
+import click
+
+try:
+ import ConfigParser as configparser
+except ImportError:
+ import configparser
+
+
+class Config(object):
+ """The config in this example only holds aliases."""
+
+ def __init__(self):
+ self.path = os.getcwd()
+ self.aliases = {}
+
+ def read_config(self, filename):
+ parser = configparser.RawConfigParser()
+ parser.read([filename])
+ try:
+ self.aliases.update(parser.items('aliases'))
+ except configparser.NoSectionError:
+ pass
+
+
+pass_config = click.make_pass_decorator(Config, ensure=True)
+
+
+class AliasedGroup(click.Group):
+ """This subclass of a group supports looking up aliases in a config
+ file and with a bit of magic.
+ """
+
+ def get_command(self, ctx, cmd_name):
+ # Step one: bulitin commands as normal
+ rv = click.Group.get_command(self, ctx, cmd_name)
+ if rv is not None:
+ return rv
+
+ # Step two: find the config object and ensure it's there. This
+ # will create the config object is missing.
+ cfg = ctx.ensure_object(Config)
+
+ # Step three: lookup an explicit command aliase in the config
+ if cmd_name in cfg.aliases:
+ actual_cmd = cfg.aliases[cmd_name]
+ return click.Group.get_command(self, ctx, actual_cmd)
+
+ # Alternative option: if we did not find an explicit alias we
+ # allow automatic abbreviation of the command. "status" for
+ # instance will match "st". We only allow that however if
+ # there is only one command.
+ matches = [x for x in self.list_commands(ctx)
+ if x.lower().startswith(cmd_name.lower())]
+ if not matches:
+ return None
+ elif len(matches) == 1:
+ return click.Group.get_command(self, ctx, matches[0])
+ ctx.fail('Too many matches: %s' % ', '.join(sorted(matches)))
+
+
+def read_config(ctx, param, value):
+ """Callback that is used whenever --config is passed. We use this to
+ always load the correct config. This means that the config is loaded
+ even if the group itself never executes so our aliases stay always
+ available.
+ """
+ cfg = ctx.ensure_object(Config)
+ if value is None:
+ value = os.path.join(os.path.dirname(__file__), 'aliases.ini')
+ cfg.read_config(value)
+ return value
+
+
+@click.command(cls=AliasedGroup)
+@click.option('--config', type=click.Path(exists=True, dir_okay=False),
+ callback=read_config, expose_value=False,
+ help='The config file to use instead of the default.')
+def cli():
+ """An example application that supports aliases."""
+
+
+@cli.command()
+def push():
+ """Pushes changes."""
+ click.echo('Push')
+
+
+@cli.command()
+def pull():
+ """Pulls changes."""
+ click.echo('Pull')
+
+
+@cli.command()
+def clone():
+ """Clones a repository."""
+ click.echo('Clone')
+
+
+@cli.command()
+def commit():
+ """Commits pending changes."""
+ click.echo('Commit')
+
+
+@cli.command()
+@pass_config
+def status(config):
+ """Shows the status."""
+ click.echo('Status for %s' % config.path)
diff --git a/third_party/python/Click/examples/aliases/setup.py b/third_party/python/Click/examples/aliases/setup.py
new file mode 100644
index 0000000000..8d1d6a4068
--- /dev/null
+++ b/third_party/python/Click/examples/aliases/setup.py
@@ -0,0 +1,15 @@
+from setuptools import setup
+
+setup(
+ name='click-example-aliases',
+ version='1.0',
+ py_modules=['aliases'],
+ include_package_data=True,
+ install_requires=[
+ 'click',
+ ],
+ entry_points='''
+ [console_scripts]
+ aliases=aliases:cli
+ ''',
+)
diff --git a/third_party/python/Click/examples/bashcompletion/README b/third_party/python/Click/examples/bashcompletion/README
new file mode 100644
index 0000000000..f8a0d51ef9
--- /dev/null
+++ b/third_party/python/Click/examples/bashcompletion/README
@@ -0,0 +1,12 @@
+$ bashcompletion
+
+ bashcompletion is a simple example of an application that
+ tries to autocomplete commands, arguments and options.
+
+ This example requires Click 2.0 or higher.
+
+Usage:
+
+ $ pip install --editable .
+ $ eval "$(_BASHCOMPLETION_COMPLETE=source bashcompletion)"
+ $ bashcompletion --help
diff --git a/third_party/python/Click/examples/bashcompletion/bashcompletion.py b/third_party/python/Click/examples/bashcompletion/bashcompletion.py
new file mode 100644
index 0000000000..1072840035
--- /dev/null
+++ b/third_party/python/Click/examples/bashcompletion/bashcompletion.py
@@ -0,0 +1,45 @@
+import click
+import os
+
+
+@click.group()
+def cli():
+ pass
+
+
+def get_env_vars(ctx, args, incomplete):
+ # Completions returned as strings do not have a description displayed.
+ for key in os.environ.keys():
+ if incomplete in key:
+ yield key
+
+
+@cli.command(help='A command to print environment variables')
+@click.argument("envvar", type=click.STRING, autocompletion=get_env_vars)
+def cmd1(envvar):
+ click.echo('Environment variable: %s' % envvar)
+ click.echo('Value: %s' % os.environ[envvar])
+
+
+@click.group(help='A group that holds a subcommand')
+def group():
+ pass
+
+
+def list_users(ctx, args, incomplete):
+ # You can generate completions with descriptions by returning
+ # tuples in the form (completion, description).
+ users = [('bob', 'butcher'),
+ ('alice', 'baker'),
+ ('jerry', 'candlestick maker')]
+ # Ths will allow completion matches based on matches within the description string too!
+ return [user for user in users if incomplete in user[0] or incomplete in user[1]]
+
+
+@group.command(help='Choose a user')
+@click.argument("user", type=click.STRING, autocompletion=list_users)
+def subcmd(user):
+ click.echo('Chosen user is %s' % user)
+
+
+cli.add_command(group)
diff --git a/third_party/python/Click/examples/bashcompletion/setup.py b/third_party/python/Click/examples/bashcompletion/setup.py
new file mode 100644
index 0000000000..ad200818cd
--- /dev/null
+++ b/third_party/python/Click/examples/bashcompletion/setup.py
@@ -0,0 +1,15 @@
+from setuptools import setup
+
+setup(
+ name='click-example-bashcompletion',
+ version='1.0',
+ py_modules=['bashcompletion'],
+ include_package_data=True,
+ install_requires=[
+ 'click',
+ ],
+ entry_points='''
+ [console_scripts]
+ bashcompletion=bashcompletion:cli
+ ''',
+)
diff --git a/third_party/python/Click/examples/colors/README b/third_party/python/Click/examples/colors/README
new file mode 100644
index 0000000000..4b5b44f696
--- /dev/null
+++ b/third_party/python/Click/examples/colors/README
@@ -0,0 +1,11 @@
+$ colors_
+
+ colors is a simple example that shows how you can
+ colorize text.
+
+ For this to work on Windows, colorama is required.
+
+Usage:
+
+ $ pip install --editable .
+ $ colors
diff --git a/third_party/python/Click/examples/colors/colors.py b/third_party/python/Click/examples/colors/colors.py
new file mode 100644
index 0000000000..193b927121
--- /dev/null
+++ b/third_party/python/Click/examples/colors/colors.py
@@ -0,0 +1,28 @@
+import click
+
+
+all_colors = 'black', 'red', 'green', 'yellow', 'blue', 'magenta', \
+ 'cyan', 'white', 'bright_black', 'bright_red', \
+ 'bright_green', 'bright_yellow', 'bright_blue', \
+ 'bright_magenta', 'bright_cyan', 'bright_white'
+
+
+@click.command()
+def cli():
+ """This script prints some colors. If colorama is installed this will
+ also work on Windows. It will also automatically remove all ANSI
+ styles if data is piped into a file.
+
+ Give it a try!
+ """
+ for color in all_colors:
+ click.echo(click.style('I am colored %s' % color, fg=color))
+ for color in all_colors:
+ click.echo(click.style('I am colored %s and bold' % color,
+ fg=color, bold=True))
+ for color in all_colors:
+ click.echo(click.style('I am reverse colored %s' % color, fg=color,
+ reverse=True))
+
+ click.echo(click.style('I am blinking', blink=True))
+ click.echo(click.style('I am underlined', underline=True))
diff --git a/third_party/python/Click/examples/colors/setup.py b/third_party/python/Click/examples/colors/setup.py
new file mode 100644
index 0000000000..3f8e105fab
--- /dev/null
+++ b/third_party/python/Click/examples/colors/setup.py
@@ -0,0 +1,17 @@
+from setuptools import setup
+
+setup(
+ name='click-example-colors',
+ version='1.0',
+ py_modules=['colors'],
+ include_package_data=True,
+ install_requires=[
+ 'click',
+ # Colorama is only required for Windows.
+ 'colorama',
+ ],
+ entry_points='''
+ [console_scripts]
+ colors=colors:cli
+ ''',
+)
diff --git a/third_party/python/Click/examples/complex/README b/third_party/python/Click/examples/complex/README
new file mode 100644
index 0000000000..7eaac90372
--- /dev/null
+++ b/third_party/python/Click/examples/complex/README
@@ -0,0 +1,16 @@
+$ complex_
+
+ complex is an example of building very complex cli
+ applications that load subcommands dynamically from
+ a plugin folder and other things.
+
+ All the commands are implemented as plugins in the
+ `complex.commands` package. If a python module is
+ placed named "cmd_foo" it will show up as "foo"
+ command and the `cli` object within it will be
+ loaded as nested Click command.
+
+Usage:
+
+ $ pip install --editable .
+ $ complex --help
diff --git a/third_party/python/Click/examples/complex/complex/__init__.py b/third_party/python/Click/examples/complex/complex/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/Click/examples/complex/complex/__init__.py
diff --git a/third_party/python/Click/examples/complex/complex/cli.py b/third_party/python/Click/examples/complex/complex/cli.py
new file mode 100644
index 0000000000..bcfd14a132
--- /dev/null
+++ b/third_party/python/Click/examples/complex/complex/cli.py
@@ -0,0 +1,65 @@
+import os
+import sys
+import click
+
+
+CONTEXT_SETTINGS = dict(auto_envvar_prefix='COMPLEX')
+
+
+class Context(object):
+
+ def __init__(self):
+ self.verbose = False
+ self.home = os.getcwd()
+
+ def log(self, msg, *args):
+ """Logs a message to stderr."""
+ if args:
+ msg %= args
+ click.echo(msg, file=sys.stderr)
+
+ def vlog(self, msg, *args):
+ """Logs a message to stderr only if verbose is enabled."""
+ if self.verbose:
+ self.log(msg, *args)
+
+
+pass_context = click.make_pass_decorator(Context, ensure=True)
+cmd_folder = os.path.abspath(os.path.join(os.path.dirname(__file__),
+ 'commands'))
+
+
+class ComplexCLI(click.MultiCommand):
+
+ def list_commands(self, ctx):
+ rv = []
+ for filename in os.listdir(cmd_folder):
+ if filename.endswith('.py') and \
+ filename.startswith('cmd_'):
+ rv.append(filename[4:-3])
+ rv.sort()
+ return rv
+
+ def get_command(self, ctx, name):
+ try:
+ if sys.version_info[0] == 2:
+ name = name.encode('ascii', 'replace')
+ mod = __import__('complex.commands.cmd_' + name,
+ None, None, ['cli'])
+ except ImportError:
+ return
+ return mod.cli
+
+
+@click.command(cls=ComplexCLI, context_settings=CONTEXT_SETTINGS)
+@click.option('--home', type=click.Path(exists=True, file_okay=False,
+ resolve_path=True),
+ help='Changes the folder to operate on.')
+@click.option('-v', '--verbose', is_flag=True,
+ help='Enables verbose mode.')
+@pass_context
+def cli(ctx, verbose, home):
+ """A complex command line interface."""
+ ctx.verbose = verbose
+ if home is not None:
+ ctx.home = home
diff --git a/third_party/python/Click/examples/complex/complex/commands/__init__.py b/third_party/python/Click/examples/complex/complex/commands/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/Click/examples/complex/complex/commands/__init__.py
diff --git a/third_party/python/Click/examples/complex/complex/commands/cmd_init.py b/third_party/python/Click/examples/complex/complex/commands/cmd_init.py
new file mode 100644
index 0000000000..8c30186c3e
--- /dev/null
+++ b/third_party/python/Click/examples/complex/complex/commands/cmd_init.py
@@ -0,0 +1,13 @@
+import click
+from complex.cli import pass_context
+
+
+@click.command('init', short_help='Initializes a repo.')
+@click.argument('path', required=False, type=click.Path(resolve_path=True))
+@pass_context
+def cli(ctx, path):
+ """Initializes a repository."""
+ if path is None:
+ path = ctx.home
+ ctx.log('Initialized the repository in %s',
+ click.format_filename(path))
diff --git a/third_party/python/Click/examples/complex/complex/commands/cmd_status.py b/third_party/python/Click/examples/complex/complex/commands/cmd_status.py
new file mode 100644
index 0000000000..99e736eee6
--- /dev/null
+++ b/third_party/python/Click/examples/complex/complex/commands/cmd_status.py
@@ -0,0 +1,10 @@
+import click
+from complex.cli import pass_context
+
+
+@click.command('status', short_help='Shows file changes.')
+@pass_context
+def cli(ctx):
+ """Shows file changes in the current working directory."""
+ ctx.log('Changed files: none')
+ ctx.vlog('bla bla bla, debug info')
diff --git a/third_party/python/Click/examples/complex/setup.py b/third_party/python/Click/examples/complex/setup.py
new file mode 100644
index 0000000000..dee002c135
--- /dev/null
+++ b/third_party/python/Click/examples/complex/setup.py
@@ -0,0 +1,15 @@
+from setuptools import setup
+
+setup(
+ name='click-example-complex',
+ version='1.0',
+ packages=['complex', 'complex.commands'],
+ include_package_data=True,
+ install_requires=[
+ 'click',
+ ],
+ entry_points='''
+ [console_scripts]
+ complex=complex.cli:cli
+ ''',
+)
diff --git a/third_party/python/Click/examples/imagepipe/.gitignore b/third_party/python/Click/examples/imagepipe/.gitignore
new file mode 100644
index 0000000000..63280895b1
--- /dev/null
+++ b/third_party/python/Click/examples/imagepipe/.gitignore
@@ -0,0 +1 @@
+processed-*
diff --git a/third_party/python/Click/examples/imagepipe/README b/third_party/python/Click/examples/imagepipe/README
new file mode 100644
index 0000000000..91ec0cd26f
--- /dev/null
+++ b/third_party/python/Click/examples/imagepipe/README
@@ -0,0 +1,13 @@
+$ imagepipe_
+
+ imagepipe is an example application that implements some
+ multi commands that chain image processing instructions
+ together.
+
+ This requires pillow.
+
+Usage:
+
+ $ pip install --editable .
+ $ imagepipe open -i example01.jpg resize -w 128 display
+ $ imagepipe open -i example02.jpg blur save
diff --git a/third_party/python/Click/examples/imagepipe/example01.jpg b/third_party/python/Click/examples/imagepipe/example01.jpg
new file mode 100644
index 0000000000..f2d9397755
--- /dev/null
+++ b/third_party/python/Click/examples/imagepipe/example01.jpg
Binary files differ
diff --git a/third_party/python/Click/examples/imagepipe/example02.jpg b/third_party/python/Click/examples/imagepipe/example02.jpg
new file mode 100644
index 0000000000..b1f802ed85
--- /dev/null
+++ b/third_party/python/Click/examples/imagepipe/example02.jpg
Binary files differ
diff --git a/third_party/python/Click/examples/imagepipe/imagepipe.py b/third_party/python/Click/examples/imagepipe/imagepipe.py
new file mode 100644
index 0000000000..37a1521133
--- /dev/null
+++ b/third_party/python/Click/examples/imagepipe/imagepipe.py
@@ -0,0 +1,266 @@
+import click
+from functools import update_wrapper
+from PIL import Image, ImageFilter, ImageEnhance
+
+
+@click.group(chain=True)
+def cli():
+ """This script processes a bunch of images through pillow in a unix
+ pipe. One commands feeds into the next.
+
+ Example:
+
+ \b
+ imagepipe open -i example01.jpg resize -w 128 display
+ imagepipe open -i example02.jpg blur save
+ """
+
+
+@cli.resultcallback()
+def process_commands(processors):
+ """This result callback is invoked with an iterable of all the chained
+ subcommands. As in this example each subcommand returns a function
+ we can chain them together to feed one into the other, similar to how
+ a pipe on unix works.
+ """
+ # Start with an empty iterable.
+ stream = ()
+
+ # Pipe it through all stream processors.
+ for processor in processors:
+ stream = processor(stream)
+
+ # Evaluate the stream and throw away the items.
+ for _ in stream:
+ pass
+
+
+def processor(f):
+ """Helper decorator to rewrite a function so that it returns another
+ function from it.
+ """
+ def new_func(*args, **kwargs):
+ def processor(stream):
+ return f(stream, *args, **kwargs)
+ return processor
+ return update_wrapper(new_func, f)
+
+
+def generator(f):
+ """Similar to the :func:`processor` but passes through old values
+ unchanged and does not pass through the values as parameter.
+ """
+ @processor
+ def new_func(stream, *args, **kwargs):
+ for item in stream:
+ yield item
+ for item in f(*args, **kwargs):
+ yield item
+ return update_wrapper(new_func, f)
+
+
+def copy_filename(new, old):
+ new.filename = old.filename
+ return new
+
+
+@cli.command('open')
+@click.option('-i', '--image', 'images', type=click.Path(),
+ multiple=True, help='The image file to open.')
+@generator
+def open_cmd(images):
+ """Loads one or multiple images for processing. The input parameter
+ can be specified multiple times to load more than one image.
+ """
+ for image in images:
+ try:
+ click.echo('Opening "%s"' % image)
+ if image == '-':
+ img = Image.open(click.get_binary_stdin())
+ img.filename = '-'
+ else:
+ img = Image.open(image)
+ yield img
+ except Exception as e:
+ click.echo('Could not open image "%s": %s' % (image, e), err=True)
+
+
+@cli.command('save')
+@click.option('--filename', default='processed-%04d.png', type=click.Path(),
+ help='The format for the filename.',
+ show_default=True)
+@processor
+def save_cmd(images, filename):
+ """Saves all processed images to a series of files."""
+ for idx, image in enumerate(images):
+ try:
+ fn = filename % (idx + 1)
+ click.echo('Saving "%s" as "%s"' % (image.filename, fn))
+ yield image.save(fn)
+ except Exception as e:
+ click.echo('Could not save image "%s": %s' %
+ (image.filename, e), err=True)
+
+
+@cli.command('display')
+@processor
+def display_cmd(images):
+ """Opens all images in an image viewer."""
+ for image in images:
+ click.echo('Displaying "%s"' % image.filename)
+ image.show()
+ yield image
+
+
+@cli.command('resize')
+@click.option('-w', '--width', type=int, help='The new width of the image.')
+@click.option('-h', '--height', type=int, help='The new height of the image.')
+@processor
+def resize_cmd(images, width, height):
+ """Resizes an image by fitting it into the box without changing
+ the aspect ratio.
+ """
+ for image in images:
+ w, h = (width or image.size[0], height or image.size[1])
+ click.echo('Resizing "%s" to %dx%d' % (image.filename, w, h))
+ image.thumbnail((w, h))
+ yield image
+
+
+@cli.command('crop')
+@click.option('-b', '--border', type=int, help='Crop the image from all '
+ 'sides by this amount.')
+@processor
+def crop_cmd(images, border):
+ """Crops an image from all edges."""
+ for image in images:
+ box = [0, 0, image.size[0], image.size[1]]
+
+ if border is not None:
+ for idx, val in enumerate(box):
+ box[idx] = max(0, val - border)
+ click.echo('Cropping "%s" by %dpx' % (image.filename, border))
+ yield copy_filename(image.crop(box), image)
+ else:
+ yield image
+
+
+def convert_rotation(ctx, param, value):
+ if value is None:
+ return
+ value = value.lower()
+ if value in ('90', 'r', 'right'):
+ return (Image.ROTATE_90, 90)
+ if value in ('180', '-180'):
+ return (Image.ROTATE_180, 180)
+ if value in ('-90', '270', 'l', 'left'):
+ return (Image.ROTATE_270, 270)
+ raise click.BadParameter('invalid rotation "%s"' % value)
+
+
+def convert_flip(ctx, param, value):
+ if value is None:
+ return
+ value = value.lower()
+ if value in ('lr', 'leftright'):
+ return (Image.FLIP_LEFT_RIGHT, 'left to right')
+ if value in ('tb', 'topbottom', 'upsidedown', 'ud'):
+ return (Image.FLIP_LEFT_RIGHT, 'top to bottom')
+ raise click.BadParameter('invalid flip "%s"' % value)
+
+
+@cli.command('transpose')
+@click.option('-r', '--rotate', callback=convert_rotation,
+ help='Rotates the image (in degrees)')
+@click.option('-f', '--flip', callback=convert_flip,
+ help='Flips the image [LR / TB]')
+@processor
+def transpose_cmd(images, rotate, flip):
+ """Transposes an image by either rotating or flipping it."""
+ for image in images:
+ if rotate is not None:
+ mode, degrees = rotate
+ click.echo('Rotate "%s" by %ddeg' % (image.filename, degrees))
+ image = copy_filename(image.transpose(mode), image)
+ if flip is not None:
+ mode, direction = flip
+ click.echo('Flip "%s" %s' % (image.filename, direction))
+ image = copy_filename(image.transpose(mode), image)
+ yield image
+
+
+@cli.command('blur')
+@click.option('-r', '--radius', default=2, show_default=True,
+ help='The blur radius.')
+@processor
+def blur_cmd(images, radius):
+ """Applies gaussian blur."""
+ blur = ImageFilter.GaussianBlur(radius)
+ for image in images:
+ click.echo('Blurring "%s" by %dpx' % (image.filename, radius))
+ yield copy_filename(image.filter(blur), image)
+
+
+@cli.command('smoothen')
+@click.option('-i', '--iterations', default=1, show_default=True,
+ help='How many iterations of the smoothen filter to run.')
+@processor
+def smoothen_cmd(images, iterations):
+ """Applies a smoothening filter."""
+ for image in images:
+ click.echo('Smoothening "%s" %d time%s' %
+ (image.filename, iterations, iterations != 1 and 's' or '',))
+ for x in xrange(iterations):
+ image = copy_filename(image.filter(ImageFilter.BLUR), image)
+ yield image
+
+
+@cli.command('emboss')
+@processor
+def emboss_cmd(images):
+ """Embosses an image."""
+ for image in images:
+ click.echo('Embossing "%s"' % image.filename)
+ yield copy_filename(image.filter(ImageFilter.EMBOSS), image)
+
+
+@cli.command('sharpen')
+@click.option('-f', '--factor', default=2.0,
+ help='Sharpens the image.', show_default=True)
+@processor
+def sharpen_cmd(images, factor):
+ """Sharpens an image."""
+ for image in images:
+ click.echo('Sharpen "%s" by %f' % (image.filename, factor))
+ enhancer = ImageEnhance.Sharpness(image)
+ yield copy_filename(enhancer.enhance(max(1.0, factor)), image)
+
+
+@cli.command('paste')
+@click.option('-l', '--left', default=0, help='Offset from left.')
+@click.option('-r', '--right', default=0, help='Offset from right.')
+@processor
+def paste_cmd(images, left, right):
+ """Pastes the second image on the first image and leaves the rest
+ unchanged.
+ """
+ imageiter = iter(images)
+ image = next(imageiter, None)
+ to_paste = next(imageiter, None)
+
+ if to_paste is None:
+ if image is not None:
+ yield image
+ return
+
+ click.echo('Paste "%s" on "%s"' %
+ (to_paste.filename, image.filename))
+ mask = None
+ if to_paste.mode == 'RGBA' or 'transparency' in to_paste.info:
+ mask = to_paste
+ image.paste(to_paste, (left, right), mask)
+ image.filename += '+' + to_paste.filename
+ yield image
+
+ for image in imageiter:
+ yield image
diff --git a/third_party/python/Click/examples/imagepipe/setup.py b/third_party/python/Click/examples/imagepipe/setup.py
new file mode 100644
index 0000000000..d2d8d9911a
--- /dev/null
+++ b/third_party/python/Click/examples/imagepipe/setup.py
@@ -0,0 +1,16 @@
+from setuptools import setup
+
+setup(
+ name='click-example-imagepipe',
+ version='1.0',
+ py_modules=['imagepipe'],
+ include_package_data=True,
+ install_requires=[
+ 'click',
+ 'pillow',
+ ],
+ entry_points='''
+ [console_scripts]
+ imagepipe=imagepipe:cli
+ ''',
+)
diff --git a/third_party/python/Click/examples/inout/README b/third_party/python/Click/examples/inout/README
new file mode 100644
index 0000000000..6309bc873e
--- /dev/null
+++ b/third_party/python/Click/examples/inout/README
@@ -0,0 +1,10 @@
+$ inout_
+
+ inout is a simple example of an application that
+ can read from files and write to files but also
+ accept input from stdin or write to stdout.
+
+Usage:
+
+ $ pip install --editable .
+ $ inout input_file.txt output_file.txt
diff --git a/third_party/python/Click/examples/inout/inout.py b/third_party/python/Click/examples/inout/inout.py
new file mode 100644
index 0000000000..b93f306629
--- /dev/null
+++ b/third_party/python/Click/examples/inout/inout.py
@@ -0,0 +1,30 @@
+import click
+
+
+@click.command()
+@click.argument('input', type=click.File('rb'), nargs=-1)
+@click.argument('output', type=click.File('wb'))
+def cli(input, output):
+ """This script works similar to the Unix `cat` command but it writes
+ into a specific file (which could be the standard output as denoted by
+ the ``-`` sign).
+
+ \b
+ Copy stdin to stdout:
+ inout - -
+
+ \b
+ Copy foo.txt and bar.txt to stdout:
+ inout foo.txt bar.txt -
+
+ \b
+ Write stdin into the file foo.txt
+ inout - foo.txt
+ """
+ for f in input:
+ while True:
+ chunk = f.read(1024)
+ if not chunk:
+ break
+ output.write(chunk)
+ output.flush()
diff --git a/third_party/python/Click/examples/inout/setup.py b/third_party/python/Click/examples/inout/setup.py
new file mode 100644
index 0000000000..5c613646e2
--- /dev/null
+++ b/third_party/python/Click/examples/inout/setup.py
@@ -0,0 +1,15 @@
+from setuptools import setup
+
+setup(
+ name='click-example-inout',
+ version='0.1',
+ py_modules=['inout'],
+ include_package_data=True,
+ install_requires=[
+ 'click',
+ ],
+ entry_points='''
+ [console_scripts]
+ inout=inout:cli
+ ''',
+)
diff --git a/third_party/python/Click/examples/naval/README b/third_party/python/Click/examples/naval/README
new file mode 100644
index 0000000000..aa289a28e7
--- /dev/null
+++ b/third_party/python/Click/examples/naval/README
@@ -0,0 +1,14 @@
+$ naval_
+
+ naval is a simple example of an application that
+ is ported from the docopt example of the same name.
+
+ Unlike the original this one also runs some code and
+ prints messages and it's command line interface was
+ changed slightly to make more sense with established
+ POSIX semantics.
+
+Usage:
+
+ $ pip install --editable .
+ $ naval --help
diff --git a/third_party/python/Click/examples/naval/naval.py b/third_party/python/Click/examples/naval/naval.py
new file mode 100644
index 0000000000..2d173d84bd
--- /dev/null
+++ b/third_party/python/Click/examples/naval/naval.py
@@ -0,0 +1,70 @@
+import click
+
+
+@click.group()
+@click.version_option()
+def cli():
+ """Naval Fate.
+
+ This is the docopt example adopted to Click but with some actual
+ commands implemented and not just the empty parsing which really
+ is not all that interesting.
+ """
+
+
+@cli.group()
+def ship():
+ """Manages ships."""
+
+
+@ship.command('new')
+@click.argument('name')
+def ship_new(name):
+ """Creates a new ship."""
+ click.echo('Created ship %s' % name)
+
+
+@ship.command('move')
+@click.argument('ship')
+@click.argument('x', type=float)
+@click.argument('y', type=float)
+@click.option('--speed', metavar='KN', default=10,
+ help='Speed in knots.')
+def ship_move(ship, x, y, speed):
+ """Moves SHIP to the new location X,Y."""
+ click.echo('Moving ship %s to %s,%s with speed %s' % (ship, x, y, speed))
+
+
+@ship.command('shoot')
+@click.argument('ship')
+@click.argument('x', type=float)
+@click.argument('y', type=float)
+def ship_shoot(ship, x, y):
+ """Makes SHIP fire to X,Y."""
+ click.echo('Ship %s fires to %s,%s' % (ship, x, y))
+
+
+@cli.group('mine')
+def mine():
+ """Manages mines."""
+
+
+@mine.command('set')
+@click.argument('x', type=float)
+@click.argument('y', type=float)
+@click.option('ty', '--moored', flag_value='moored',
+ default=True,
+ help='Moored (anchored) mine. Default.')
+@click.option('ty', '--drifting', flag_value='drifting',
+ help='Drifting mine.')
+def mine_set(x, y, ty):
+ """Sets a mine at a specific coordinate."""
+ click.echo('Set %s mine at %s,%s' % (ty, x, y))
+
+
+@mine.command('remove')
+@click.argument('x', type=float)
+@click.argument('y', type=float)
+def mine_remove(x, y):
+ """Removes a mine at a specific coordinate."""
+ click.echo('Removed mine at %s,%s' % (x, y))
diff --git a/third_party/python/Click/examples/naval/setup.py b/third_party/python/Click/examples/naval/setup.py
new file mode 100644
index 0000000000..124addf430
--- /dev/null
+++ b/third_party/python/Click/examples/naval/setup.py
@@ -0,0 +1,15 @@
+from setuptools import setup
+
+setup(
+ name='click-example-naval',
+ version='2.0',
+ py_modules=['naval'],
+ include_package_data=True,
+ install_requires=[
+ 'click',
+ ],
+ entry_points='''
+ [console_scripts]
+ naval=naval:cli
+ ''',
+)
diff --git a/third_party/python/Click/examples/repo/README b/third_party/python/Click/examples/repo/README
new file mode 100644
index 0000000000..52d1fa7d0b
--- /dev/null
+++ b/third_party/python/Click/examples/repo/README
@@ -0,0 +1,9 @@
+$ repo_
+
+ repo is a simple example of an application that looks
+ and works similar to hg or git.
+
+Usage:
+
+ $ pip install --editable .
+ $ repo --help
diff --git a/third_party/python/Click/examples/repo/repo.py b/third_party/python/Click/examples/repo/repo.py
new file mode 100644
index 0000000000..2b1992d3e8
--- /dev/null
+++ b/third_party/python/Click/examples/repo/repo.py
@@ -0,0 +1,151 @@
+import os
+import sys
+import posixpath
+
+import click
+
+
+class Repo(object):
+
+ def __init__(self, home):
+ self.home = home
+ self.config = {}
+ self.verbose = False
+
+ def set_config(self, key, value):
+ self.config[key] = value
+ if self.verbose:
+ click.echo(' config[%s] = %s' % (key, value), file=sys.stderr)
+
+ def __repr__(self):
+ return '<Repo %r>' % self.home
+
+
+pass_repo = click.make_pass_decorator(Repo)
+
+
+@click.group()
+@click.option('--repo-home', envvar='REPO_HOME', default='.repo',
+ metavar='PATH', help='Changes the repository folder location.')
+@click.option('--config', nargs=2, multiple=True,
+ metavar='KEY VALUE', help='Overrides a config key/value pair.')
+@click.option('--verbose', '-v', is_flag=True,
+ help='Enables verbose mode.')
+@click.version_option('1.0')
+@click.pass_context
+def cli(ctx, repo_home, config, verbose):
+ """Repo is a command line tool that showcases how to build complex
+ command line interfaces with Click.
+
+ This tool is supposed to look like a distributed version control
+ system to show how something like this can be structured.
+ """
+ # Create a repo object and remember it as as the context object. From
+ # this point onwards other commands can refer to it by using the
+ # @pass_repo decorator.
+ ctx.obj = Repo(os.path.abspath(repo_home))
+ ctx.obj.verbose = verbose
+ for key, value in config:
+ ctx.obj.set_config(key, value)
+
+
+@cli.command()
+@click.argument('src')
+@click.argument('dest', required=False)
+@click.option('--shallow/--deep', default=False,
+ help='Makes a checkout shallow or deep. Deep by default.')
+@click.option('--rev', '-r', default='HEAD',
+ help='Clone a specific revision instead of HEAD.')
+@pass_repo
+def clone(repo, src, dest, shallow, rev):
+ """Clones a repository.
+
+ This will clone the repository at SRC into the folder DEST. If DEST
+ is not provided this will automatically use the last path component
+ of SRC and create that folder.
+ """
+ if dest is None:
+ dest = posixpath.split(src)[-1] or '.'
+ click.echo('Cloning repo %s to %s' % (src, os.path.abspath(dest)))
+ repo.home = dest
+ if shallow:
+ click.echo('Making shallow checkout')
+ click.echo('Checking out revision %s' % rev)
+
+
+@cli.command()
+@click.confirmation_option()
+@pass_repo
+def delete(repo):
+ """Deletes a repository.
+
+ This will throw away the current repository.
+ """
+ click.echo('Destroying repo %s' % repo.home)
+ click.echo('Deleted!')
+
+
+@cli.command()
+@click.option('--username', prompt=True,
+ help='The developer\'s shown username.')
+@click.option('--email', prompt='E-Mail',
+ help='The developer\'s email address')
+@click.password_option(help='The login password.')
+@pass_repo
+def setuser(repo, username, email, password):
+ """Sets the user credentials.
+
+ This will override the current user config.
+ """
+ repo.set_config('username', username)
+ repo.set_config('email', email)
+ repo.set_config('password', '*' * len(password))
+ click.echo('Changed credentials.')
+
+
+@cli.command()
+@click.option('--message', '-m', multiple=True,
+ help='The commit message. If provided multiple times each '
+ 'argument gets converted into a new line.')
+@click.argument('files', nargs=-1, type=click.Path())
+@pass_repo
+def commit(repo, files, message):
+ """Commits outstanding changes.
+
+ Commit changes to the given files into the repository. You will need to
+ "repo push" to push up your changes to other repositories.
+
+ If a list of files is omitted, all changes reported by "repo status"
+ will be committed.
+ """
+ if not message:
+ marker = '# Files to be committed:'
+ hint = ['', '', marker, '#']
+ for file in files:
+ hint.append('# U %s' % file)
+ message = click.edit('\n'.join(hint))
+ if message is None:
+ click.echo('Aborted!')
+ return
+ msg = message.split(marker)[0].rstrip()
+ if not msg:
+ click.echo('Aborted! Empty commit message')
+ return
+ else:
+ msg = '\n'.join(message)
+ click.echo('Files to be committed: %s' % (files,))
+ click.echo('Commit message:\n' + msg)
+
+
+@cli.command(short_help='Copies files.')
+@click.option('--force', is_flag=True,
+ help='forcibly copy over an existing managed file')
+@click.argument('src', nargs=-1, type=click.Path())
+@click.argument('dst', type=click.Path())
+@pass_repo
+def copy(repo, src, dst, force):
+ """Copies one or multiple files to a new location. This copies all
+ files from SRC to DST.
+ """
+ for fn in src:
+ click.echo('Copy from %s -> %s' % (fn, dst))
diff --git a/third_party/python/Click/examples/repo/setup.py b/third_party/python/Click/examples/repo/setup.py
new file mode 100644
index 0000000000..19aab7087a
--- /dev/null
+++ b/third_party/python/Click/examples/repo/setup.py
@@ -0,0 +1,15 @@
+from setuptools import setup
+
+setup(
+ name='click-example-repo',
+ version='0.1',
+ py_modules=['repo'],
+ include_package_data=True,
+ install_requires=[
+ 'click',
+ ],
+ entry_points='''
+ [console_scripts]
+ repo=repo:cli
+ ''',
+)
diff --git a/third_party/python/Click/examples/termui/README b/third_party/python/Click/examples/termui/README
new file mode 100644
index 0000000000..2c9d9dd045
--- /dev/null
+++ b/third_party/python/Click/examples/termui/README
@@ -0,0 +1,9 @@
+$ termui_
+
+ termui showcases the different terminal UI helpers that
+ Click provides.
+
+Usage:
+
+ $ pip install --editable .
+ $ termui --help
diff --git a/third_party/python/Click/examples/termui/setup.py b/third_party/python/Click/examples/termui/setup.py
new file mode 100644
index 0000000000..14558e85c1
--- /dev/null
+++ b/third_party/python/Click/examples/termui/setup.py
@@ -0,0 +1,17 @@
+from setuptools import setup
+
+setup(
+ name='click-example-termui',
+ version='1.0',
+ py_modules=['termui'],
+ include_package_data=True,
+ install_requires=[
+ 'click',
+ # Colorama is only required for Windows.
+ 'colorama',
+ ],
+ entry_points='''
+ [console_scripts]
+ termui=termui:cli
+ ''',
+)
diff --git a/third_party/python/Click/examples/termui/termui.py b/third_party/python/Click/examples/termui/termui.py
new file mode 100644
index 0000000000..793afa419b
--- /dev/null
+++ b/third_party/python/Click/examples/termui/termui.py
@@ -0,0 +1,156 @@
+# coding: utf-8
+import click
+import math
+import time
+import random
+
+try:
+ range_type = xrange
+except NameError:
+ range_type = range
+
+
+@click.group()
+def cli():
+ """This script showcases different terminal UI helpers in Click."""
+ pass
+
+
+@cli.command()
+def colordemo():
+ """Demonstrates ANSI color support."""
+ for color in 'red', 'green', 'blue':
+ click.echo(click.style('I am colored %s' % color, fg=color))
+ click.echo(click.style('I am background colored %s' % color, bg=color))
+
+
+@cli.command()
+def pager():
+ """Demonstrates using the pager."""
+ lines = []
+ for x in range_type(200):
+ lines.append('%s. Hello World!' % click.style(str(x), fg='green'))
+ click.echo_via_pager('\n'.join(lines))
+
+
+@cli.command()
+@click.option('--count', default=8000, type=click.IntRange(1, 100000),
+ help='The number of items to process.')
+def progress(count):
+ """Demonstrates the progress bar."""
+ items = range_type(count)
+
+ def process_slowly(item):
+ time.sleep(0.002 * random.random())
+
+ def filter(items):
+ for item in items:
+ if random.random() > 0.3:
+ yield item
+
+ with click.progressbar(items, label='Processing accounts',
+ fill_char=click.style('#', fg='green')) as bar:
+ for item in bar:
+ process_slowly(item)
+
+ def show_item(item):
+ if item is not None:
+ return 'Item #%d' % item
+
+ with click.progressbar(filter(items), label='Committing transaction',
+ fill_char=click.style('#', fg='yellow'),
+ item_show_func=show_item) as bar:
+ for item in bar:
+ process_slowly(item)
+
+ with click.progressbar(length=count, label='Counting',
+ bar_template='%(label)s %(bar)s | %(info)s',
+ fill_char=click.style(u'█', fg='cyan'),
+ empty_char=' ') as bar:
+ for item in bar:
+ process_slowly(item)
+
+ with click.progressbar(length=count, width=0, show_percent=False,
+ show_eta=False,
+ fill_char=click.style('#', fg='magenta')) as bar:
+ for item in bar:
+ process_slowly(item)
+
+ # 'Non-linear progress bar'
+ steps = [math.exp( x * 1. / 20) - 1 for x in range(20)]
+ count = int(sum(steps))
+ with click.progressbar(length=count, show_percent=False,
+ label='Slowing progress bar',
+ fill_char=click.style(u'█', fg='green')) as bar:
+ for item in steps:
+ time.sleep(item)
+ bar.update(item)
+
+
+@cli.command()
+@click.argument('url')
+def open(url):
+ """Opens a file or URL In the default application."""
+ click.launch(url)
+
+
+@cli.command()
+@click.argument('url')
+def locate(url):
+ """Opens a file or URL In the default application."""
+ click.launch(url, locate=True)
+
+
+@cli.command()
+def edit():
+ """Opens an editor with some text in it."""
+ MARKER = '# Everything below is ignored\n'
+ message = click.edit('\n\n' + MARKER)
+ if message is not None:
+ msg = message.split(MARKER, 1)[0].rstrip('\n')
+ if not msg:
+ click.echo('Empty message!')
+ else:
+ click.echo('Message:\n' + msg)
+ else:
+ click.echo('You did not enter anything!')
+
+
+@cli.command()
+def clear():
+ """Clears the entire screen."""
+ click.clear()
+
+
+@cli.command()
+def pause():
+ """Waits for the user to press a button."""
+ click.pause()
+
+
+@cli.command()
+def menu():
+ """Shows a simple menu."""
+ menu = 'main'
+ while 1:
+ if menu == 'main':
+ click.echo('Main menu:')
+ click.echo(' d: debug menu')
+ click.echo(' q: quit')
+ char = click.getchar()
+ if char == 'd':
+ menu = 'debug'
+ elif char == 'q':
+ menu = 'quit'
+ else:
+ click.echo('Invalid input')
+ elif menu == 'debug':
+ click.echo('Debug menu')
+ click.echo(' b: back')
+ char = click.getchar()
+ if char == 'b':
+ menu = 'main'
+ else:
+ click.echo('Invalid input')
+ elif menu == 'quit':
+ return
diff --git a/third_party/python/Click/examples/validation/README b/third_party/python/Click/examples/validation/README
new file mode 100644
index 0000000000..a69e3f4276
--- /dev/null
+++ b/third_party/python/Click/examples/validation/README
@@ -0,0 +1,12 @@
+$ validation_
+
+ validation is a simple example of an application that
+ performs custom validation of parameters in different
+ ways.
+
+ This example requires Click 2.0 or higher.
+
+Usage:
+
+ $ pip install --editable .
+ $ validation --help
diff --git a/third_party/python/Click/examples/validation/setup.py b/third_party/python/Click/examples/validation/setup.py
new file mode 100644
index 0000000000..9491f709c7
--- /dev/null
+++ b/third_party/python/Click/examples/validation/setup.py
@@ -0,0 +1,15 @@
+from setuptools import setup
+
+setup(
+ name='click-example-validation',
+ version='1.0',
+ py_modules=['validation'],
+ include_package_data=True,
+ install_requires=[
+ 'click',
+ ],
+ entry_points='''
+ [console_scripts]
+ validation=validation:cli
+ ''',
+)
diff --git a/third_party/python/Click/examples/validation/validation.py b/third_party/python/Click/examples/validation/validation.py
new file mode 100644
index 0000000000..00fa0a6001
--- /dev/null
+++ b/third_party/python/Click/examples/validation/validation.py
@@ -0,0 +1,44 @@
+import click
+try:
+ from urllib import parse as urlparse
+except ImportError:
+ import urlparse
+
+
+def validate_count(ctx, param, value):
+ if value < 0 or value % 2 != 0:
+ raise click.BadParameter('Should be a positive, even integer.')
+ return value
+
+
+class URL(click.ParamType):
+ name = 'url'
+
+ def convert(self, value, param, ctx):
+ if not isinstance(value, tuple):
+ value = urlparse.urlparse(value)
+ if value.scheme not in ('http', 'https'):
+ self.fail('invalid URL scheme (%s). Only HTTP URLs are '
+ 'allowed' % value.scheme, param, ctx)
+ return value
+
+
+@click.command()
+@click.option('--count', default=2, callback=validate_count,
+ help='A positive even number.')
+@click.option('--foo', help='A mysterious parameter.')
+@click.option('--url', help='A URL', type=URL())
+@click.version_option()
+def cli(count, foo, url):
+ """Validation.
+
+ This example validates parameters in different ways. It does it
+ through callbacks, through a custom type as well as by validating
+ manually in the function.
+ """
+ if foo is not None and foo != 'wat':
+ raise click.BadParameter('If a value is provided it needs to be the '
+ 'value "wat".', param_hint=['--foo'])
+ click.echo('count: %s' % count)
+ click.echo('foo: %s' % foo)
+ click.echo('url: %s' % repr(url))
diff --git a/third_party/python/Click/setup.cfg b/third_party/python/Click/setup.cfg
new file mode 100644
index 0000000000..525a9e7447
--- /dev/null
+++ b/third_party/python/Click/setup.cfg
@@ -0,0 +1,25 @@
+[metadata]
+license_file = LICENSE.rst
+
+[bdist_wheel]
+universal = 1
+
+[tool:pytest]
+testpaths = tests
+
+[coverage:run]
+branch = True
+source =
+ click
+ tests
+
+[coverage:paths]
+source =
+ click
+ .tox/*/lib/python*/site-packages/click
+ .tox/pypy/site-packages/click
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/Click/setup.py b/third_party/python/Click/setup.py
new file mode 100644
index 0000000000..1c18dc7843
--- /dev/null
+++ b/third_party/python/Click/setup.py
@@ -0,0 +1,44 @@
+import io
+import re
+from setuptools import setup
+
+with io.open("README.rst", "rt", encoding="utf8") as f:
+ readme = f.read()
+
+with io.open("click/__init__.py", "rt", encoding="utf8") as f:
+ version = re.search(r"__version__ = \'(.*?)\'", f.read()).group(1)
+
+setup(
+ name="Click",
+ version=version,
+ url="https://palletsprojects.com/p/click/",
+ project_urls={
+ "Documentation": "https://click.palletsprojects.com/",
+ "Code": "https://github.com/pallets/click",
+ "Issue tracker": "https://github.com/pallets/click/issues",
+ },
+ license="BSD",
+ author="Armin Ronacher",
+ author_email="armin.ronacher@active-4.com",
+ maintainer="Pallets Team",
+ maintainer_email="contact@palletsprojects.com",
+ description="Composable command line interface toolkit",
+ long_description=readme,
+ packages=["click"],
+ include_package_data=True,
+ python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
+ classifiers=[
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: BSD License",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.4",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ ],
+)
diff --git a/third_party/python/Click/tox.ini b/third_party/python/Click/tox.ini
new file mode 100644
index 0000000000..a44b110702
--- /dev/null
+++ b/third_party/python/Click/tox.ini
@@ -0,0 +1,39 @@
+[tox]
+envlist =
+ py{37,36,35,34,27,py3,py}
+ docs-html
+ coverage-report
+skip_missing_interpreters = true
+
+[testenv]
+passenv = LANG
+deps =
+ pytest
+ coverage
+ colorama
+commands = coverage run -p -m pytest {posargs}
+
+[testenv:docs-html]
+deps = -r docs/requirements.txt
+commands = sphinx-build -W -b html -d {envtmpdir}/doctrees docs {envtmpdir}/html
+
+[testenv:docs-linkcheck]
+deps = -r docs/requirements.txt
+commands = sphinx-build -W -b linkcheck -d {envtmpdir}/doctrees docs {envtmpdir}/linkcheck
+
+[testenv:coverage-report]
+deps = coverage
+skip_install = true
+commands =
+ coverage combine
+ coverage report
+ coverage html
+
+[testenv:codecov]
+passenv = CI TRAVIS TRAVIS_* APPVEYOR APPVEYOR_*
+deps = codecov
+skip_install = true
+commands =
+ coverage combine
+ coverage report
+ codecov
diff --git a/third_party/python/Jinja2/CHANGES.rst b/third_party/python/Jinja2/CHANGES.rst
new file mode 100644
index 0000000000..9b8b24ee03
--- /dev/null
+++ b/third_party/python/Jinja2/CHANGES.rst
@@ -0,0 +1,774 @@
+.. currentmodule:: jinja2
+
+Version 2.11.2
+--------------
+
+Released 2020-04-13
+
+- Fix a bug that caused callable objects with ``__getattr__``, like
+ :class:`~unittest.mock.Mock` to be treated as a
+ :func:`contextfunction`. :issue:`1145`
+- Update ``wordcount`` filter to trigger :class:`Undefined` methods
+ by wrapping the input in :func:`soft_unicode`. :pr:`1160`
+- Fix a hang when displaying tracebacks on Python 32-bit.
+ :issue:`1162`
+- Showing an undefined error for an object that raises
+ ``AttributeError`` on access doesn't cause a recursion error.
+ :issue:`1177`
+- Revert changes to :class:`~loaders.PackageLoader` from 2.10 which
+ removed the dependency on setuptools and pkg_resources, and added
+ limited support for namespace packages. The changes caused issues
+ when using Pytest. Due to the difficulty in supporting Python 2 and
+ :pep:`451` simultaneously, the changes are reverted until 3.0.
+ :pr:`1182`
+- Fix line numbers in error messages when newlines are stripped.
+ :pr:`1178`
+- The special ``namespace()`` assignment object in templates works in
+ async environments. :issue:`1180`
+- Fix whitespace being removed before tags in the middle of lines when
+ ``lstrip_blocks`` is enabled. :issue:`1138`
+- :class:`~nativetypes.NativeEnvironment` doesn't evaluate
+ intermediate strings during rendering. This prevents early
+ evaluation which could change the value of an expression.
+ :issue:`1186`
+
+
+Version 2.11.1
+--------------
+
+Released 2020-01-30
+
+- Fix a bug that prevented looking up a key after an attribute
+ (``{{ data.items[1:] }}``) in an async template. :issue:`1141`
+
+
+Version 2.11.0
+--------------
+
+Released 2020-01-27
+
+- Drop support for Python 2.6, 3.3, and 3.4. This will be the last
+ version to support Python 2.7 and 3.5.
+- Added a new ``ChainableUndefined`` class to support getitem and
+ getattr on an undefined object. :issue:`977`
+- Allow ``{%+`` syntax (with NOP behavior) when ``lstrip_blocks`` is
+ disabled. :issue:`748`
+- Added a ``default`` parameter for the ``map`` filter. :issue:`557`
+- Exclude environment globals from
+ :func:`meta.find_undeclared_variables`. :issue:`931`
+- Float literals can be written with scientific notation, like
+ 2.56e-3. :issue:`912`, :pr:`922`
+- Int and float literals can be written with the '_' separator for
+ legibility, like 12_345. :pr:`923`
+- Fix a bug causing deadlocks in ``LRUCache.setdefault``. :pr:`1000`
+- The ``trim`` filter takes an optional string of characters to trim.
+ :pr:`828`
+- A new ``jinja2.ext.debug`` extension adds a ``{% debug %}`` tag to
+ quickly dump the current context and available filters and tests.
+ :issue:`174`, :pr:`798, 983`
+- Lexing templates with large amounts of whitespace is much faster.
+ :issue:`857`, :pr:`858`
+- Parentheses around comparisons are preserved, so
+ ``{{ 2 * (3 < 5) }}`` outputs "2" instead of "False".
+ :issue:`755`, :pr:`938`
+- Add new ``boolean``, ``false``, ``true``, ``integer`` and ``float``
+ tests. :pr:`824`
+- The environment's ``finalize`` function is only applied to the
+ output of expressions (constant or not), not static template data.
+ :issue:`63`
+- When providing multiple paths to ``FileSystemLoader``, a template
+ can have the same name as a directory. :issue:`821`
+- Always return :class:`Undefined` when omitting the ``else`` clause
+ in a ``{{ 'foo' if bar }}`` expression, regardless of the
+ environment's ``undefined`` class. Omitting the ``else`` clause is a
+ valid shortcut and should not raise an error when using
+ :class:`StrictUndefined`. :issue:`710`, :pr:`1079`
+- Fix behavior of ``loop`` control variables such as ``length`` and
+ ``revindex0`` when looping over a generator. :issue:`459, 751, 794`,
+ :pr:`993`
+- Async support is only loaded the first time an environment enables
+ it, in order to avoid a slow initial import. :issue:`765`
+- In async environments, the ``|map`` filter will await the filter
+ call if needed. :pr:`913`
+- In for loops that access ``loop`` attributes, the iterator is not
+ advanced ahead of the current iteration unless ``length``,
+ ``revindex``, ``nextitem``, or ``last`` are accessed. This makes it
+ less likely to break ``groupby`` results. :issue:`555`, :pr:`1101`
+- In async environments, the ``loop`` attributes ``length`` and
+ ``revindex`` work for async iterators. :pr:`1101`
+- In async environments, values from attribute/property access will
+ be awaited if needed. :pr:`1101`
+- :class:`~loader.PackageLoader` doesn't depend on setuptools or
+ pkg_resources. :issue:`970`
+- ``PackageLoader`` has limited support for :pep:`420` namespace
+ packages. :issue:`1097`
+- Support :class:`os.PathLike` objects in
+ :class:`~loader.FileSystemLoader` and :class:`~loader.ModuleLoader`.
+ :issue:`870`
+- :class:`~nativetypes.NativeTemplate` correctly handles quotes
+ between expressions. ``"'{{ a }}', '{{ b }}'"`` renders as the tuple
+ ``('1', '2')`` rather than the string ``'1, 2'``. :issue:`1020`
+- Creating a :class:`~nativetypes.NativeTemplate` directly creates a
+ :class:`~nativetypes.NativeEnvironment` instead of a default
+ :class:`Environment`. :issue:`1091`
+- After calling ``LRUCache.copy()``, the copy's queue methods point to
+ the correct queue. :issue:`843`
+- Compiling templates always writes UTF-8 instead of defaulting to the
+ system encoding. :issue:`889`
+- ``|wordwrap`` filter treats existing newlines as separate paragraphs
+ to be wrapped individually, rather than creating short intermediate
+ lines. :issue:`175`
+- Add ``break_on_hyphens`` parameter to ``|wordwrap`` filter.
+ :issue:`550`
+- Cython compiled functions decorated as context functions will be
+ passed the context. :pr:`1108`
+- When chained comparisons of constants are evaluated at compile time,
+ the result follows Python's behavior of returning ``False`` if any
+ comparison returns ``False``, rather than only the last one.
+ :issue:`1102`
+- Tracebacks for exceptions in templates show the correct line numbers
+ and source for Python >= 3.7. :issue:`1104`
+- Tracebacks for template syntax errors in Python 3 no longer show
+ internal compiler frames. :issue:`763`
+- Add a ``DerivedContextReference`` node that can be used by
+ extensions to get the current context and local variables such as
+ ``loop``. :issue:`860`
+- Constant folding during compilation is applied to some node types
+ that were previously overlooked. :issue:`733`
+- ``TemplateSyntaxError.source`` is not empty when raised from an
+ included template. :issue:`457`
+- Passing an ``Undefined`` value to ``get_template`` (such as through
+ ``extends``, ``import``, or ``include``), raises an
+ ``UndefinedError`` consistently. ``select_template`` will show the
+ undefined message in the list of attempts rather than the empty
+ string. :issue:`1037`
+- ``TemplateSyntaxError`` can be pickled. :pr:`1117`
+
+
+Version 2.10.3
+--------------
+
+Released 2019-10-04
+
+- Fix a typo in Babel entry point in ``setup.py`` that was preventing
+ installation.
+
+
+Version 2.10.2
+--------------
+
+Released 2019-10-04
+
+- Fix Python 3.7 deprecation warnings.
+- Using ``range`` in the sandboxed environment uses ``xrange`` on
+ Python 2 to avoid memory use. :issue:`933`
+- Use Python 3.7's better traceback support to avoid a core dump when
+ using debug builds of Python 3.7. :issue:`1050`
+
+
+Version 2.10.1
+--------------
+
+Released 2019-04-06
+
+- ``SandboxedEnvironment`` securely handles ``str.format_map`` in
+ order to prevent code execution through untrusted format strings.
+ The sandbox already handled ``str.format``.
+
+
+Version 2.10
+------------
+
+Released 2017-11-08
+
+- Added a new extension node called ``OverlayScope`` which can be used
+ to create an unoptimized scope that will look up all variables from
+ a derived context.
+- Added an ``in`` test that works like the in operator. This can be
+ used in combination with ``reject`` and ``select``.
+- Added ``previtem`` and ``nextitem`` to loop contexts, providing
+ access to the previous/next item in the loop. If such an item does
+ not exist, the value is undefined.
+- Added ``changed(*values)`` to loop contexts, providing an easy way
+ of checking whether a value has changed since the last iteration (or
+ rather since the last call of the method)
+- Added a ``namespace`` function that creates a special object which
+ allows attribute assignment using the ``set`` tag. This can be used
+ to carry data across scopes, e.g. from a loop body to code that
+ comes after the loop.
+- Added a ``trimmed`` modifier to ``{% trans %}`` to strip linebreaks
+ and surrounding whitespace. Also added a new policy to enable this
+ for all ``trans`` blocks.
+- The ``random`` filter is no longer incorrectly constant folded and
+ will produce a new random choice each time the template is rendered.
+ :pr:`478`
+- Added a ``unique`` filter. :pr:`469`
+- Added ``min`` and ``max`` filters. :pr:`475`
+- Added tests for all comparison operators: ``eq``, ``ne``, ``lt``,
+ ``le``, ``gt``, ``ge``. :pr:`665`
+- ``import`` statement cannot end with a trailing comma. :pr:`617`,
+ :pr:`618`
+- ``indent`` filter will not indent blank lines by default. :pr:`685`
+- Add ``reverse`` argument for ``dictsort`` filter. :pr:`692`
+- Add a ``NativeEnvironment`` that renders templates to native Python
+ types instead of strings. :pr:`708`
+- Added filter support to the block ``set`` tag. :pr:`489`
+- ``tojson`` filter marks output as safe to match documented behavior.
+ :pr:`718`
+- Resolved a bug where getting debug locals for tracebacks could
+ modify template context.
+- Fixed a bug where having many ``{% elif ... %}`` blocks resulted in
+ a "too many levels of indentation" error. These blocks now compile
+ to native ``elif ..:`` instead of ``else: if ..:`` :issue:`759`
+
+
+Version 2.9.6
+-------------
+
+Released 2017-04-03
+
+- Fixed custom context behavior in fast resolve mode :issue:`675`
+
+
+Version 2.9.5
+-------------
+
+Released 2017-01-28
+
+- Restored the original repr of the internal ``_GroupTuple`` because
+ this caused issues with ansible and it was an unintended change.
+ :issue:`654`
+- Added back support for custom contexts that override the old
+ ``resolve`` method since it was hard for people to spot that this
+ could cause a regression.
+- Correctly use the buffer for the else block of for loops. This
+ caused invalid syntax errors to be caused on 2.x and completely
+ wrong behavior on Python 3 :issue:`669`
+- Resolve an issue where the ``{% extends %}`` tag could not be used
+ with async environments. :issue:`668`
+- Reduce memory footprint slightly by reducing our unicode database
+ dump we use for identifier matching on Python 3 :issue:`666`
+- Fixed autoescaping not working for macros in async compilation mode.
+ :issue:`671`
+
+
+Version 2.9.4
+-------------
+
+Released 2017-01-10
+
+- Solved some warnings for string literals. :issue:`646`
+- Increment the bytecode cache version which was not done due to an
+ oversight before.
+- Corrected bad code generation and scoping for filtered loops.
+ :issue:`649`
+- Resolved an issue where top-level output silencing after known
+ extend blocks could generate invalid code when blocks where
+ contained in if statements. :issue:`651`
+- Made the ``truncate.leeway`` default configurable to improve
+ compatibility with older templates.
+
+
+Version 2.9.3
+-------------
+
+Released 2017-01-08
+
+- Restored the use of blocks in macros to the extend that was possible
+ before. On Python 3 it would render a generator repr instead of the
+ block contents. :issue:`645`
+- Set a consistent behavior for assigning of variables in inner scopes
+ when the variable is also read from an outer scope. This now sets
+ the intended behavior in all situations however it does not restore
+ the old behavior where limited assignments to outer scopes was
+ possible. For more information and a discussion see :issue:`641`
+- Resolved an issue where ``block scoped`` would not take advantage of
+ the new scoping rules. In some more exotic cases a variable
+ overriden in a local scope would not make it into a block.
+- Change the code generation of the ``with`` statement to be in line
+ with the new scoping rules. This resolves some unlikely bugs in edge
+ cases. This also introduces a new internal ``With`` node that can be
+ used by extensions.
+
+
+Version 2.9.2
+-------------
+
+Released 2017-01-08
+
+- Fixed a regression that caused for loops to not be able to use the
+ same variable for the target as well as source iterator.
+ :issue:`640`
+- Add support for a previously unknown behavior of macros. It used to
+ be possible in some circumstances to explicitly provide a caller
+ argument to macros. While badly buggy and unintended it turns out
+ that this is a common case that gets copy pasted around. To not
+ completely break backwards compatibility with the most common cases
+ it's now possible to provide an explicit keyword argument for caller
+ if it's given an explicit default. :issue:`642`
+
+
+Version 2.9.1
+-------------
+
+Released 2017-01-07
+
+- Resolved a regression with call block scoping for macros. Nested
+ caller blocks that used the same identifiers as outer macros could
+ refer to the wrong variable incorrectly.
+
+
+Version 2.9
+-----------
+
+Released 2017-01-07, codename Derivation
+
+- Change cache key definition in environment. This fixes a performance
+ regression introduced in 2.8.
+- Added support for ``generator_stop`` on supported Python versions
+ (Python 3.5 and later)
+- Corrected a long standing issue with operator precedence of math
+ operations not being what was expected.
+- Added support for Python 3.6 async iterators through a new async
+ mode.
+- Added policies for filter defaults and similar things.
+- Urlize now sets "rel noopener" by default.
+- Support attribute fallback for old-style classes in 2.x.
+- Support toplevel set statements in extend situations.
+- Restored behavior of Cycler for Python 3 users.
+- Subtraction now follows the same behavior as other operators on
+ undefined values.
+- ``map`` and friends will now give better error messages if you
+ forgot to quote the parameter.
+- Depend on MarkupSafe 0.23 or higher.
+- Improved the ``truncate`` filter to support better truncation in
+ case the string is barely truncated at all.
+- Change the logic for macro autoescaping to be based on the runtime
+ autoescaping information at call time instead of macro define time.
+- Ported a modified version of the ``tojson`` filter from Flask to
+ Jinja and hooked it up with the new policy framework.
+- Block sets are now marked ``safe`` by default.
+- On Python 2 the asciification of ASCII strings can now be disabled
+ with the ``compiler.ascii_str`` policy.
+- Tests now no longer accept an arbitrary expression as first argument
+ but a restricted one. This means that you can now properly use
+ multiple tests in one expression without extra parentheses. In
+ particular you can now write ``foo is divisibleby 2 or foo is
+ divisibleby 3`` as you would expect.
+- Greatly changed the scoping system to be more consistent with what
+ template designers and developers expect. There is now no more magic
+ difference between the different include and import constructs.
+ Context is now always propagated the same way. The only remaining
+ differences is the defaults for ``with context`` and ``without
+ context``.
+- The ``with`` and ``autoescape`` tags are now built-in.
+- Added the new ``select_autoescape`` function which helps configuring
+ better autoescaping easier.
+- Fixed a runtime error in the sandbox when attributes of async
+ generators were accessed.
+
+
+Version 2.8.1
+-------------
+
+Released 2016-12-29
+
+- Fixed the ``for_qs`` flag for ``urlencode``.
+- Fixed regression when applying ``int`` to non-string values.
+- SECURITY: if the sandbox mode is used format expressions are now
+ sandboxed with the same rules as in Jinja. This solves various
+ information leakage problems that can occur with format strings.
+
+
+Version 2.8
+-----------
+
+Released 2015-07-26, codename Replacement
+
+- Added ``target`` parameter to urlize function.
+- Added support for ``followsymlinks`` to the file system loader.
+- The truncate filter now counts the length.
+- Added equalto filter that helps with select filters.
+- Changed cache keys to use absolute file names if available instead
+ of load names.
+- Fixed loop length calculation for some iterators.
+- Changed how Jinja enforces strings to be native strings in Python 2
+ to work when people break their default encoding.
+- Added ``make_logging_undefined`` which returns an undefined
+ object that logs failures into a logger.
+- If unmarshalling of cached data fails the template will be reloaded
+ now.
+- Implemented a block ``set`` tag.
+- Default cache size was increased to 400 from a low 50.
+- Fixed ``is number`` test to accept long integers in all Python
+ versions.
+- Changed ``is number`` to accept Decimal as a number.
+- Added a check for default arguments followed by non-default
+ arguments. This change makes ``{% macro m(x, y=1, z) %}`` a syntax
+ error. The previous behavior for this code was broken anyway
+ (resulting in the default value being applied to ``y``).
+- Add ability to use custom subclasses of
+ ``jinja2.compiler.CodeGenerator`` and ``jinja2.runtime.Context`` by
+ adding two new attributes to the environment
+ (``code_generator_class`` and ``context_class``). :pr:`404`
+- Added support for context/environment/evalctx decorator functions on
+ the finalize callback of the environment.
+- Escape query strings for urlencode properly. Previously slashes were
+ not escaped in that place.
+- Add 'base' parameter to 'int' filter.
+
+
+Version 2.7.3
+-------------
+
+Released 2014-06-06
+
+- Security issue: Corrected the security fix for the cache folder.
+ This fix was provided by RedHat.
+
+
+Version 2.7.2
+-------------
+
+Released 2014-01-10
+
+- Prefix loader was not forwarding the locals properly to inner
+ loaders. This is now fixed.
+- Security issue: Changed the default folder for the filesystem cache
+ to be user specific and read and write protected on UNIX systems.
+ See `Debian bug 734747`_ for more information.
+
+.. _Debian bug 734747: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=734747
+
+
+Version 2.7.1
+-------------
+
+Released 2013-08-07
+
+- Fixed a bug with ``call_filter`` not working properly on environment
+ and context filters.
+- Fixed lack of Python 3 support for bytecode caches.
+- Reverted support for defining blocks in included templates as this
+ broke existing templates for users.
+- Fixed some warnings with hashing of undefineds and nodes if Python
+ is run with warnings for Python 3.
+- Added support for properly hashing undefined objects.
+- Fixed a bug with the title filter not working on already uppercase
+ strings.
+
+
+Version 2.7
+-----------
+
+Released 2013-05-20, codename Translation
+
+- Choice and prefix loaders now dispatch source and template lookup
+ separately in order to work in combination with module loaders as
+ advertised.
+- Fixed filesizeformat.
+- Added a non-silent option for babel extraction.
+- Added ``urlencode`` filter that automatically quotes values for URL
+ safe usage with utf-8 as only supported encoding. If applications
+ want to change this encoding they can override the filter.
+- Added ``keep-trailing-newline`` configuration to environments and
+ templates to optionally preserve the final trailing newline.
+- Accessing ``last`` on the loop context no longer causes the iterator
+ to be consumed into a list.
+- Python requirement changed: 2.6, 2.7 or >= 3.3 are required now,
+ supported by same source code, using the "six" compatibility
+ library.
+- Allow ``contextfunction`` and other decorators to be applied to
+ ``__call__``.
+- Added support for changing from newline to different signs in the
+ ``wordwrap`` filter.
+- Added support for ignoring memcache errors silently.
+- Added support for keeping the trailing newline in templates.
+- Added finer grained support for stripping whitespace on the left
+ side of blocks.
+- Added ``map``, ``select``, ``reject``, ``selectattr`` and
+ ``rejectattr`` filters.
+- Added support for ``loop.depth`` to figure out how deep inside a
+ recursive loop the code is.
+- Disabled py_compile for pypy and python 3.
+
+
+Version 2.6
+-----------
+
+Released 2011-07-24, codename Convolution
+
+- Internal attributes now raise an internal attribute error now
+ instead of returning an undefined. This fixes problems when passing
+ undefined objects to Python semantics expecting APIs.
+- Traceback support now works properly for PyPy. (Tested with 1.4)
+- Implemented operator intercepting for sandboxed environments. This
+ allows application developers to disable builtin operators for
+ better security. (For instance limit the mathematical operators to
+ actual integers instead of longs)
+- Groupby filter now supports dotted notation for grouping by
+ attributes of attributes.
+- Scoped blocks now properly treat toplevel assignments and imports.
+ Previously an import suddenly "disappeared" in a scoped block.
+- Automatically detect newer Python interpreter versions before
+ loading code from bytecode caches to prevent segfaults on invalid
+ opcodes. The segfault in earlier Jinja versions here was not a
+ Jinja bug but a limitation in the underlying Python interpreter. If
+ you notice Jinja segfaulting in earlier versions after an upgrade
+ of the Python interpreter you don't have to upgrade, it's enough to
+ flush the bytecode cache. This just no longer makes this necessary,
+ Jinja will automatically detect these cases now.
+- The sum filter can now sum up values by attribute. This is a
+ backwards incompatible change. The argument to the filter previously
+ was the optional starting index which defaults to zero. This now
+ became the second argument to the function because it's rarely used.
+- Like sum, sort now also makes it possible to order items by
+ attribute.
+- Like sum and sort, join now also is able to join attributes of
+ objects as string.
+- The internal eval context now has a reference to the environment.
+- Added a mapping test to see if an object is a dict or an object with
+ a similar interface.
+
+
+Version 2.5.5
+-------------
+
+Released 2010-10-18
+
+- Built documentation is no longer part of release.
+
+
+Version 2.5.4
+-------------
+
+Released 2010-10-17
+
+- Fixed extensions not loading properly with overlays.
+- Work around a bug in cpython for the debugger that causes segfaults
+ on 64bit big-endian architectures.
+
+
+Version 2.5.3
+-------------
+
+Released 2010-10-17
+
+- Fixed an operator precedence error introduced in 2.5.2. Statements
+ like "-foo.bar" had their implicit parentheses applied around the
+ first part of the expression ("(-foo).bar") instead of the more
+ correct "-(foo.bar)".
+
+
+Version 2.5.2
+-------------
+
+Released 2010-08-18
+
+- Improved setup.py script to better work with assumptions people
+ might still have from it (``--with-speedups``).
+- Fixed a packaging error that excluded the new debug support.
+
+
+Version 2.5.1
+-------------
+
+Released 2010-08-17
+
+- StopIteration exceptions raised by functions called from templates
+ are now intercepted and converted to undefineds. This solves a lot
+ of debugging grief. (StopIteration is used internally to abort
+ template execution)
+- Improved performance of macro calls slightly.
+- Babel extraction can now properly extract newstyle gettext calls.
+- Using the variable ``num`` in newstyle gettext for something else
+ than the pluralize count will no longer raise a :exc:`KeyError`.
+- Removed builtin markup class and switched to markupsafe. For
+ backwards compatibility the pure Python implementation still exists
+ but is pulled from markupsafe by the Jinja developers. The debug
+ support went into a separate feature called "debugsupport" and is
+ disabled by default because it is only relevant for Python 2.4
+- Fixed an issue with unary operators having the wrong precedence.
+
+
+Version 2.5
+-----------
+
+Released 2010-05-29, codename Incoherence
+
+- Improved the sort filter (should have worked like this for a long
+ time) by adding support for case insensitive searches.
+- Fixed a bug for getattribute constant folding.
+- Support for newstyle gettext translations which result in a nicer
+ in-template user interface and more consistent catalogs.
+- It's now possible to register extensions after an environment was
+ created.
+
+
+Version 2.4.1
+-------------
+
+Released 2010-04-20
+
+- Fixed an error reporting bug for undefined.
+
+
+Version 2.4
+-----------
+
+Released 2010-04-13, codename Correlation
+
+- The environment template loading functions now transparently pass
+ through a template object if it was passed to it. This makes it
+ possible to import or extend from a template object that was passed
+ to the template.
+- Added a ``ModuleLoader`` that can load templates from
+ precompiled sources. The environment now features a method to
+ compile the templates from a configured loader into a zip file or
+ folder.
+- The _speedups C extension now supports Python 3.
+- Added support for autoescaping toggling sections and support for
+ evaluation contexts.
+- Extensions have a priority now.
+
+
+Version 2.3.1
+-------------
+
+Released 2010-02-19
+
+- Fixed an error reporting bug on all python versions
+- Fixed an error reporting bug on Python 2.4
+
+
+Version 2.3
+-----------
+
+Released 2010-02-10, codename 3000 Pythons
+
+- Fixes issue with code generator that causes unbound variables to be
+ generated if set was used in if-blocks and other small identifier
+ problems.
+- Include tags are now able to select between multiple templates and
+ take the first that exists, if a list of templates is given.
+- Fixed a problem with having call blocks in outer scopes that have an
+ argument that is also used as local variable in an inner frame
+ :issue:`360`.
+- Greatly improved error message reporting :pr:`339`
+- Implicit tuple expressions can no longer be totally empty. This
+ change makes ``{% if %}`` a syntax error now. :issue:`364`
+- Added support for translator comments if extracted via babel.
+- Added with-statement extension.
+- Experimental Python 3 support.
+
+
+Version 2.2.1
+-------------
+
+Released 2009-09-14
+
+- Fixes some smaller problems for Jinja on Jython.
+
+
+Version 2.2
+-----------
+
+Released 2009-09-13, codename Kong
+
+- Include statements can now be marked with ``ignore missing`` to skip
+ non existing templates.
+- Priority of ``not`` raised. It's now possible to write ``not foo in
+ bar`` as an alias to ``foo not in bar`` like in python. Previously
+ the grammar required parentheses (``not (foo in bar)``) which was
+ odd.
+- Fixed a bug that caused syntax errors when defining macros or using
+ the ``{% call %}`` tag inside loops.
+- Fixed a bug in the parser that made ``{{ foo[1, 2] }}`` impossible.
+- Made it possible to refer to names from outer scopes in included
+ templates that were unused in the callers frame :issue:`327`
+- Fixed a bug that caused internal errors if names where used as
+ iteration variable and regular variable *after* the loop if that
+ variable was unused *before* the loop. :pr:`331`
+- Added support for optional ``scoped`` modifier to blocks.
+- Added support for line-comments.
+- Added the ``meta`` module.
+- Renamed (undocumented) attribute "overlay" to "overlayed" on the
+ environment because it was clashing with a method of the same name.
+- Speedup extension is now disabled by default.
+
+
+Version 2.1.1
+-------------
+
+Released 2008-12-25
+
+- Fixed a translation error caused by looping over empty recursive
+ loops.
+
+
+Version 2.1
+-----------
+
+Released 2008-11-23, codename Yasuzō
+
+- Fixed a bug with nested loops and the special loop variable. Before
+ the change an inner loop overwrote the loop variable from the outer
+ one after iteration.
+- Fixed a bug with the i18n extension that caused the explicit
+ pluralization block to look up the wrong variable.
+- Fixed a limitation in the lexer that made ``{{ foo.0.0 }}``
+ impossible.
+- Index based subscribing of variables with a constant value returns
+ an undefined object now instead of raising an index error. This was
+ a bug caused by eager optimizing.
+- The i18n extension looks up ``foo.ugettext`` now followed by
+ ``foo.gettext`` if an translations object is installed. This makes
+ dealing with custom translations classes easier.
+- Fixed a confusing behavior with conditional extending. loops were
+ partially executed under some conditions even though they were not
+ part of a visible area.
+- Added ``sort`` filter that works like ``dictsort`` but for arbitrary
+ sequences.
+- Fixed a bug with empty statements in macros.
+- Implemented a bytecode cache system.
+- The template context is now weakref-able
+- Inclusions and imports "with context" forward all variables now, not
+ only the initial context.
+- Added a cycle helper called ``cycler``.
+- Added a joining helper called ``joiner``.
+- Added a ``compile_expression`` method to the environment that allows
+ compiling of Jinja expressions into callable Python objects.
+- Fixed an escaping bug in urlize
+
+
+Version 2.0
+-----------
+
+Released 2008-07-17, codename Jinjavitus
+
+- The subscribing of objects (looking up attributes and items) changed
+ from slightly. It's now possible to give attributes or items a
+ higher priority by either using dot-notation lookup or the bracket
+ syntax. This also changed the AST slightly. ``Subscript`` is gone
+ and was replaced with ``Getitem`` and ``Getattr``.
+- Added support for preprocessing and token stream filtering for
+ extensions. This would allow extensions to allow simplified gettext
+ calls in template data and something similar.
+- Added ``TemplateStream.dump``.
+- Added missing support for implicit string literal concatenation.
+ ``{{ "foo" "bar" }}`` is equivalent to ``{{ "foobar" }}``
+- ``else`` is optional for conditional expressions. If not given it
+ evaluates to ``false``.
+- Improved error reporting for undefined values by providing a
+ position.
+- ``filesizeformat`` filter uses decimal prefixes now per default and
+ can be set to binary mode with the second parameter.
+- Fixed bug in finalizer
+
+
+Version 2.0rc1
+--------------
+
+Released 2008-06-09
+
+- First release of Jinja 2.
diff --git a/third_party/python/Jinja2/LICENSE.rst b/third_party/python/Jinja2/LICENSE.rst
new file mode 100644
index 0000000000..c37cae49ec
--- /dev/null
+++ b/third_party/python/Jinja2/LICENSE.rst
@@ -0,0 +1,28 @@
+Copyright 2007 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/third_party/python/Jinja2/MANIFEST.in b/third_party/python/Jinja2/MANIFEST.in
new file mode 100644
index 0000000000..909102a77a
--- /dev/null
+++ b/third_party/python/Jinja2/MANIFEST.in
@@ -0,0 +1,9 @@
+include CHANGES.rst
+include tox.ini
+graft artwork
+graft docs
+prune docs/_build
+graft examples
+graft ext
+graft tests
+global-exclude *.pyc
diff --git a/third_party/python/Jinja2/PKG-INFO b/third_party/python/Jinja2/PKG-INFO
new file mode 100644
index 0000000000..87f8151831
--- /dev/null
+++ b/third_party/python/Jinja2/PKG-INFO
@@ -0,0 +1,102 @@
+Metadata-Version: 2.1
+Name: Jinja2
+Version: 2.11.2
+Summary: A very fast and expressive template engine.
+Home-page: https://palletsprojects.com/p/jinja/
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+Maintainer: Pallets
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Documentation, https://jinja.palletsprojects.com/
+Project-URL: Code, https://github.com/pallets/jinja
+Project-URL: Issue tracker, https://github.com/pallets/jinja/issues
+Description: Jinja
+ =====
+
+ Jinja is a fast, expressive, extensible templating engine. Special
+ placeholders in the template allow writing code similar to Python
+ syntax. Then the template is passed data to render the final document.
+
+ It includes:
+
+ - Template inheritance and inclusion.
+ - Define and import macros within templates.
+ - HTML templates can use autoescaping to prevent XSS from untrusted
+ user input.
+ - A sandboxed environment can safely render untrusted templates.
+ - AsyncIO support for generating templates and calling async
+ functions.
+ - I18N support with Babel.
+ - Templates are compiled to optimized Python code just-in-time and
+ cached, or can be compiled ahead-of-time.
+ - Exceptions point to the correct line in templates to make debugging
+ easier.
+ - Extensible filters, tests, functions, and even syntax.
+
+ Jinja's philosophy is that while application logic belongs in Python if
+ possible, it shouldn't make the template designer's job difficult by
+ restricting functionality too much.
+
+
+ Installing
+ ----------
+
+ Install and update using `pip`_:
+
+ .. code-block:: text
+
+ $ pip install -U Jinja2
+
+ .. _pip: https://pip.pypa.io/en/stable/quickstart/
+
+
+ In A Nutshell
+ -------------
+
+ .. code-block:: jinja
+
+ {% extends "base.html" %}
+ {% block title %}Members{% endblock %}
+ {% block content %}
+ <ul>
+ {% for user in users %}
+ <li><a href="{{ user.url }}">{{ user.username }}</a></li>
+ {% endfor %}
+ </ul>
+ {% endblock %}
+
+
+ Links
+ -----
+
+ - Website: https://palletsprojects.com/p/jinja/
+ - Documentation: https://jinja.palletsprojects.com/
+ - Releases: https://pypi.org/project/Jinja2/
+ - Code: https://github.com/pallets/jinja
+ - Issue tracker: https://github.com/pallets/jinja/issues
+ - Test status: https://dev.azure.com/pallets/jinja/_build
+ - Official chat: https://discord.gg/t6rrQZH
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Text Processing :: Markup :: HTML
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
+Description-Content-Type: text/x-rst
+Provides-Extra: i18n
diff --git a/third_party/python/Jinja2/README.rst b/third_party/python/Jinja2/README.rst
new file mode 100644
index 0000000000..060b19efee
--- /dev/null
+++ b/third_party/python/Jinja2/README.rst
@@ -0,0 +1,66 @@
+Jinja
+=====
+
+Jinja is a fast, expressive, extensible templating engine. Special
+placeholders in the template allow writing code similar to Python
+syntax. Then the template is passed data to render the final document.
+
+It includes:
+
+- Template inheritance and inclusion.
+- Define and import macros within templates.
+- HTML templates can use autoescaping to prevent XSS from untrusted
+ user input.
+- A sandboxed environment can safely render untrusted templates.
+- AsyncIO support for generating templates and calling async
+ functions.
+- I18N support with Babel.
+- Templates are compiled to optimized Python code just-in-time and
+ cached, or can be compiled ahead-of-time.
+- Exceptions point to the correct line in templates to make debugging
+ easier.
+- Extensible filters, tests, functions, and even syntax.
+
+Jinja's philosophy is that while application logic belongs in Python if
+possible, it shouldn't make the template designer's job difficult by
+restricting functionality too much.
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+ $ pip install -U Jinja2
+
+.. _pip: https://pip.pypa.io/en/stable/quickstart/
+
+
+In A Nutshell
+-------------
+
+.. code-block:: jinja
+
+ {% extends "base.html" %}
+ {% block title %}Members{% endblock %}
+ {% block content %}
+ <ul>
+ {% for user in users %}
+ <li><a href="{{ user.url }}">{{ user.username }}</a></li>
+ {% endfor %}
+ </ul>
+ {% endblock %}
+
+
+Links
+-----
+
+- Website: https://palletsprojects.com/p/jinja/
+- Documentation: https://jinja.palletsprojects.com/
+- Releases: https://pypi.org/project/Jinja2/
+- Code: https://github.com/pallets/jinja
+- Issue tracker: https://github.com/pallets/jinja/issues
+- Test status: https://dev.azure.com/pallets/jinja/_build
+- Official chat: https://discord.gg/t6rrQZH
diff --git a/third_party/python/Jinja2/artwork/jinjalogo.svg b/third_party/python/Jinja2/artwork/jinjalogo.svg
new file mode 100644
index 0000000000..0bc9ea4e8b
--- /dev/null
+++ b/third_party/python/Jinja2/artwork/jinjalogo.svg
@@ -0,0 +1,132 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://web.resource.org/cc/"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:xlink="http://www.w3.org/1999/xlink"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="300"
+ height="120"
+ id="svg2"
+ sodipodi:version="0.32"
+ inkscape:version="0.45.1"
+ version="1.0"
+ sodipodi:docbase="/Users/mitsuhiko/Development/jinja2/artwork"
+ sodipodi:docname="jinjalogo.svg"
+ inkscape:export-filename="/Users/mitsuhiko/Development/jinja2/docs/_static/jinjabanner.png"
+ inkscape:export-xdpi="60"
+ inkscape:export-ydpi="60"
+ inkscape:output_extension="org.inkscape.output.svg.inkscape">
+ <defs
+ id="defs4">
+ <linearGradient
+ id="linearGradient6558">
+ <stop
+ style="stop-color:#575757;stop-opacity:1;"
+ offset="0"
+ id="stop6560" />
+ <stop
+ style="stop-color:#2f2f2f;stop-opacity:1;"
+ offset="1"
+ id="stop6562" />
+ </linearGradient>
+ <radialGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient6558"
+ id="radialGradient6564"
+ cx="61.297766"
+ cy="60.910986"
+ fx="61.297766"
+ fy="60.910986"
+ r="44.688254"
+ gradientTransform="matrix(1,0,0,0.945104,0,3.343747)"
+ gradientUnits="userSpaceOnUse" />
+ <radialGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient6558"
+ id="radialGradient6580"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="matrix(1,0,0,0.945104,0.355158,3.334402)"
+ cx="61.297766"
+ cy="60.910986"
+ fx="61.297766"
+ fy="60.910986"
+ r="44.688254" />
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient6558"
+ id="linearGradient4173"
+ x1="255.15521"
+ y1="32.347946"
+ x2="279.8912"
+ y2="32.347946"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="matrix(0.8073249,0,0,0.8073249,57.960878,7.4036303)" />
+ <linearGradient
+ inkscape:collect="always"
+ xlink:href="#linearGradient6558"
+ id="linearGradient5145"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="matrix(0.7902775,0,0,0.82474,60.019977,8.0684132)"
+ x1="255.15521"
+ y1="32.347946"
+ x2="279.8912"
+ y2="32.347946" />
+ </defs>
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ gridtolerance="10000"
+ guidetolerance="10"
+ objecttolerance="10"
+ inkscape:pageopacity="0.0"
+ inkscape:pageshadow="2"
+ inkscape:zoom="2.8"
+ inkscape:cx="137.4752"
+ inkscape:cy="57.574575"
+ inkscape:document-units="px"
+ inkscape:current-layer="layer1"
+ width="300px"
+ height="120px"
+ showguides="true"
+ inkscape:guide-bbox="true"
+ inkscape:window-width="1396"
+ inkscape:window-height="900"
+ inkscape:window-x="0"
+ inkscape:window-y="22" />
+ <metadata
+ id="metadata7">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ inkscape:label="Layer 1"
+ inkscape:groupmode="layer"
+ id="layer1">
+ <path
+ style="font-size:12px;font-style:normal;font-weight:normal;fill:#f4f4f4;fill-opacity:1;stroke:#e7e7e7;stroke-width:0.8;stroke-linecap:butt;stroke-linejoin:round;stroke-opacity:1;font-family:Bitstream Vera Sans;stroke-miterlimit:4;stroke-dasharray:none"
+ d="M 165.36463,80.874808 L 165.36463,80.874808 L 153.32556,80.874808 L 153.32556,81.8344 L 147.64994,81.8344 L 147.64994,36.035583 L 165.36463,36.035583 L 165.36463,20.333129 C 170.58154,21.031083 173.07533,22.077914 172.84609,23.473621 C 172.78871,24.055258 172.21545,24.549594 171.12624,24.956624 L 171.12624,36.035583 L 189.09895,36.035583 L 189.09895,82.532286 L 183.33733,82.532286 L 183.33733,80.874808 L 171.12624,80.874808 L 171.12624,102.94548 L 165.36463,102.94548 L 165.36463,80.874808 M 153.32556,55.489173 L 153.32556,55.489173 L 165.36463,55.489173 L 165.36463,41.793146 L 153.32556,41.793146 L 153.32556,55.489173 M 171.12624,55.489173 L 171.12624,55.489173 L 183.33733,55.489173 L 183.33733,41.793146 L 171.12624,41.793146 L 171.12624,55.489173 M 183.33733,61.333977 L 183.33733,61.333977 L 171.12624,61.333977 L 171.12624,75.030006 L 183.33733,75.030006 L 183.33733,61.333977 M 165.36463,61.333977 L 165.36463,61.333977 L 153.32556,61.333977 L 153.32556,75.030006 L 165.36463,75.030006 L 165.36463,61.333977 M 132.85897,59.414792 C 137.33069,63.136883 140.99969,67.934848 143.86618,73.808701 L 139.13654,77.385372 C 137.24467,72.965445 134.6362,69.12707 131.31114,65.87024 L 131.31114,102.94548 L 125.63554,102.94548 L 125.63554,68.57455 C 122.31042,71.947693 118.52671,74.913707 114.28436,77.47261 L 109.64069,73.372526 C 121.50782,67.091566 130.62312,55.489212 136.98668,38.565417 L 116.26221,38.565417 L 116.26221,32.720615 L 125.80754,32.720615 L 125.80754,20.333129 C 130.85245,21.031083 133.31761,22.048838 133.20299,23.386383 C 133.14561,24.026183 132.57235,24.549594 131.48307,24.956624 L 131.48307,32.720615 L 140.77043,32.720615 L 143.60824,36.733469 C 140.68444,45.51526 137.10137,53.075692 132.85897,59.414792 M 254.11016,49.469901 L 254.11016,49.469901 L 254.11016,20.333129 C 259.21243,21.031083 261.67755,22.048838 261.50562,23.386383 C 261.44823,23.909869 261.04699,24.346044 260.30172,24.694917 C 260.30164,24.694986 260.30164,24.694986 260.30172,24.694917 L 260.30172,24.694917 L 259.78578,24.956624 L 259.78578,49.469901 L 277.15652,49.469901 L 277.15652,55.227471 L 259.78578,55.227471 L 259.78578,93.785712 L 281.45616,93.785712 L 281.45616,99.63051 L 232.35378,99.63051 L 232.35378,93.785712 L 254.11016,93.785712 L 254.11016,55.227471 L 236.22346,55.227471 L 236.22346,49.469901 L 254.11016,49.469901 M 225.5603,59.327554 C 231.12111,63.107798 235.62145,67.876693 239.06127,73.634235 L 234.76157,77.647079 C 231.60845,72.180322 227.82475,67.934848 223.41044,64.910648 L 223.41044,102.94548 L 217.73484,102.94548 L 217.73484,67.44049 C 212.91919,71.627831 207.70222,75.030021 202.084,77.647079 L 197.87027,73.198053 C 212.66118,66.917101 224.01239,55.372897 231.92377,38.565417 L 205.35172,38.565417 L 205.35172,32.720615 L 217.99283,32.720615 L 217.99283,20.333129 C 223.03774,21.031083 225.50291,22.048838 225.38829,23.386383 C 225.33089,24.026183 224.75765,24.549594 223.66837,24.956624 L 223.66837,32.720615 L 236.22346,32.720615 L 238.80326,36.733469 C 235.13421,45.51526 230.71987,53.046611 225.5603,59.327554"
+ id="text4761" />
+ <path
+ style="font-size:44.09793472px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;fill:#b41717;fill-opacity:1;stroke:#7f2828;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Candara;stroke-miterlimit:4;stroke-dasharray:none"
+ d="M 149.14708,37.774469 C 148.97807,41.117899 148.84526,44.824225 148.74871,48.893456 C 148.67626,52.962754 148.3818,70.641328 148.38184,75.524422 C 148.3818,79.065795 148.05588,81.991266 147.40406,84.300835 C 146.75219,86.610422 145.72612,88.557071 144.32585,90.140779 C 142.94969,91.724494 141.17522,92.901283 139.00239,93.671139 C 136.82953,94.440996 134.22211,94.825935 131.18014,94.825935 C 128.83828,94.825935 126.73787,94.59498 124.87889,94.133049 L 125.4221,89.31593 C 127.13623,90.0418 128.92278,90.404734 130.78177,90.404733 C 132.85805,90.404734 134.66875,90.140782 136.2139,89.612876 C 137.78315,89.062981 139.02651,88.216133 139.94396,87.072335 C 140.8855,85.928548 141.54942,84.520804 141.93572,82.8491 C 142.34613,81.177412 142.55134,78.988811 142.55136,76.283285 C 142.55134,66.297119 142.62852,44.659257 142.26641,37.774469 L 149.14708,37.774469 M 166.38498,80.732697 L 159.83024,80.732697 C 160.16821,76.333498 160.33723,71.307412 160.33723,65.654424 C 160.33723,59.2976 159.91471,53.963567 159.06973,49.652319 L 166.31257,48.761483 C 166.02284,53.358679 165.87799,58.98965 165.87799,65.654424 C 165.87799,70.933479 166.04699,75.959565 166.38498,80.732697 M 167.90601,39.490159 C 167.90598,40.611994 167.5076,41.590815 166.7109,42.42662 C 165.91418,43.240515 164.79155,43.647442 163.343,43.647399 C 162.11172,43.647442 161.146,43.295504 160.44588,42.591595 C 159.76988,41.865769 159.43188,40.996927 159.43188,39.98507 C 159.43188,38.885304 159.84231,37.928485 160.66315,37.114591 C 161.48399,36.30078 162.61869,35.893853 164.06727,35.893811 C 165.25023,35.893853 166.17975,36.256783 166.85575,36.982609 C 167.55588,37.686526 167.90598,38.522373 167.90601,39.490159 M 206.72748,80.732697 L 200.13651,80.732697 C 200.66763,74.947749 200.93319,68.634899 200.9332,61.794122 C 200.93319,58.406756 200.1727,56.097177 198.65174,54.865371 C 197.15487,53.61163 195.00619,52.984747 192.20564,52.984714 C 188.77731,52.984747 185.61465,54.117535 182.71753,56.383099 C 182.71753,63.883761 182.76583,72.000287 182.86238,80.732697 L 176.27142,80.732697 C 176.68182,73.254058 176.88707,67.843042 176.88707,64.499632 C 176.88707,59.352589 176.3559,54.359493 175.29363,49.520339 L 181.66734,48.695493 L 182.35539,52.720761 L 182.64511,52.720761 C 186.21823,49.773323 190.04483,48.299592 194.12499,48.299567 C 198.13265,48.299592 201.23499,49.113454 203.43201,50.741118 C 205.62895,52.346863 206.72747,55.217334 206.72748,59.352563 C 206.72747,59.770507 206.70331,60.595362 206.65507,61.827118 C 206.60675,63.058915 206.5826,63.883761 206.58262,64.30167 C 206.5826,67.975018 206.63088,73.452022 206.72748,80.732697 M 222.69791,48.695493 C 222.28747,55.514282 222.08225,62.355041 222.08225,69.21778 C 222.08225,71.043461 222.14262,73.463019 222.26332,76.476468 C 222.40822,79.467925 222.4806,81.502559 222.48063,82.580363 C 222.4806,89.685068 219.51105,93.996287 213.57195,95.514024 L 211.76124,93.006484 C 213.90995,91.356766 215.2378,89.597085 215.74478,87.727431 C 216.49321,85.043912 216.86743,79.324953 216.86743,70.570535 C 216.86743,61.178248 216.3846,54.16153 215.41887,49.520339 L 222.69791,48.695493 M 224.2551,39.490159 C 224.2551,40.611994 223.85673,41.590815 223.06006,42.42662 C 222.26332,43.240515 221.14069,43.647442 219.69213,43.647399 C 218.46084,43.647442 217.49515,43.295504 216.795,42.591595 C 216.119,41.865769 215.781,40.996927 215.781,39.98507 C 215.781,38.885304 216.19144,37.928485 217.01231,37.114591 C 217.83316,36.30078 218.96785,35.893853 220.4164,35.893811 C 221.5994,35.893853 222.52889,36.256783 223.20492,36.982609 C 223.90503,37.686526 224.2551,38.522373 224.2551,39.490159 M 259.60008,80.732697 L 253.91446,80.930661 C 253.62473,79.852857 253.47987,78.830045 253.4799,77.862216 L 253.11774,77.862216 C 250.14817,80.325772 246.10427,81.557546 240.98606,81.557547 C 238.20962,81.557546 235.8195,80.820682 233.81563,79.346948 C 231.81178,77.851221 230.80988,75.728607 230.80988,72.979099 C 230.80988,69.591724 232.37914,66.875216 235.51769,64.829574 C 238.65625,62.761967 244.48667,61.67316 253.00913,61.563165 C 253.08155,61.035275 253.11772,60.430386 253.11774,59.748497 C 253.11772,57.043003 252.32104,55.239336 250.72765,54.337474 C 249.15832,53.435661 246.76819,52.984747 243.55721,52.984714 C 239.76681,52.984747 236.03678,53.413668 232.3671,54.271484 L 232.9827,49.718301 C 236.60411,48.77251 240.76873,48.299592 245.47658,48.299567 C 249.77395,48.299592 253.09359,49.113454 255.43545,50.741118 C 257.77728,52.346863 258.94819,55.096363 258.94824,58.989625 C 258.94819,60.023469 258.88785,61.904117 258.76715,64.631608 C 258.67054,67.337133 258.62228,69.140806 258.6223,70.042632 C 258.62228,74.045913 258.94819,77.609265 259.60008,80.732697 M 253.19019,74.331856 C 253.06945,70.988469 253.00909,67.986016 253.00913,65.324484 C 248.47027,65.324498 245.01786,65.632443 242.65187,66.248318 C 238.69248,67.348131 236.71278,69.448748 236.71278,72.550177 C 236.71278,75.541643 239.03044,77.037371 243.66588,77.037366 C 247.64942,77.037371 250.82416,76.135534 253.19019,74.331856"
+ id="text3736"
+ sodipodi:nodetypes="ccsscssccscccsccccsccsccsscsssccccscscccsccccscsssccscscccscccsscsssccccccscsccscsccscscsccccssc" />
+ <path
+ style="fill:url(#radialGradient6564);fill-opacity:1.0;fill-rule:evenodd;stroke:#323232;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;stroke-miterlimit:4;stroke-dasharray:none"
+ d="M 105.45673,18.675923 C 105.45673,18.675923 88.211949,26.918461 74.172834,28.737898 C 60.133727,30.557333 33.360434,32.377571 28.045622,31.093256 C 22.730818,29.808941 18.915645,28.309196 18.915645,28.309196 L 20.021441,32.056583 L 16.609513,35.052471 L 17.2144,36.121726 L 18.61792,36.22764 L 22.92773,36.762252 L 23.532621,38.688909 L 25.937975,38.905784 L 27.143021,42.970927 C 27.143021,42.970927 32.254764,43.399628 33.758953,43.399628 C 35.263142,43.399628 38.271966,43.187802 38.271966,43.187802 L 38.371202,44.791657 L 39.477002,45.003495 L 39.477002,46.824227 L 37.066917,48.967759 L 37.671807,49.073671 L 37.671807,49.820127 C 37.671807,49.820127 32.255457,50.252157 30.049301,49.93109 C 27.843157,49.610006 27.440747,49.608286 27.440747,49.608286 L 27.242258,49.820127 L 27.143021,50.783455 L 27.643946,50.783455 L 27.84242,54.959544 L 38.976091,54.530844 L 38.172728,68.980747 L 38.073481,70.796442 L 28.645781,70.261816 L 28.546544,66.408513 L 30.649462,66.408513 L 30.852673,64.910557 L 32.757107,64.481857 L 33.059555,64.058192 L 25.937975,62.343374 L 20.522364,63.947229 L 21.42496,64.698732 L 22.327572,64.698732 L 22.426809,65.984848 L 24.331254,66.09076 L 24.331254,69.838147 L 22.228335,70.372777 L 22.630009,71.225146 L 23.130934,71.547931 L 23.130934,74.437917 L 24.435218,74.437917 L 24.435218,87.813529 L 22.327572,88.13632 L 22.630009,91.989617 L 23.929569,92.206492 L 23.731093,100.98236 L 29.449141,101.08826 L 28.244105,92.418334 L 36.868446,92.206492 L 36.268285,96.912181 L 35.464925,100.23086 L 44.188501,100.33677 L 44.287739,91.777793 L 50.303506,91.243181 L 50.005786,96.700351 L 49.802585,99.90807 L 54.920484,99.90807 L 54.717274,91.132217 L 55.421397,91.243181 L 55.619882,87.067076 L 54.816521,87.067076 L 54.518798,85.352258 L 54.017874,80.429702 L 54.216359,74.760706 L 55.31743,74.760706 L 55.31743,71.336105 L 53.913913,71.442015 L 54.117112,67.402096 L 55.747469,67.240708 L 55.823083,65.929374 L 56.749319,65.793192 L 57.699176,65.071956 L 51.985842,63.896802 L 46.31977,65.15265 L 46.872668,66.060507 L 47.47283,66.010066 L 48.172228,65.984848 L 48.299828,67.639144 L 49.878196,67.563497 L 49.906548,71.144447 L 43.111042,70.988097 L 43.337879,67.160002 L 43.559978,63.679927 L 43.559978,59.105378 L 43.763188,54.288748 L 57.373101,53.592733 L 73.567955,52.659674 L 73.71917,55.736265 L 73.142647,63.120082 L 72.892183,69.9945 L 66.928387,69.888585 L 66.900039,65.071956 L 69.106918,64.991267 L 69.206169,63.629486 L 70.108765,63.493308 L 70.061506,63.226006 L 70.964116,63.175568 L 71.465028,62.504773 L 64.721507,60.926122 L 58.001612,62.368592 L 58.4789,63.200785 L 59.230285,63.1453 L 59.230285,63.523577 L 60.156518,63.523577 L 60.156518,65.046738 L 62.136575,65.071956 L 62.112937,69.298485 L 60.109259,69.298485 L 60.080907,70.261816 L 60.785031,70.342507 L 60.70942,74.009202 L 62.188552,74.089909 L 62.013701,88.620507 L 60.057282,89.018952 L 60.080907,89.714967 L 60.761406,89.714967 L 60.761406,93.437137 L 61.886113,93.437137 L 61.588391,98.52109 L 61.210343,102.95945 L 68.331912,103.14605 L 68.105084,99.29275 L 67.580538,96.085028 L 67.476575,93.300955 L 73.520696,93.195041 L 73.345845,97.502272 L 73.317494,102.05159 L 76.729426,102.3189 L 81.3653,102.1323 L 82.820807,101.70358 L 82.017437,99.26753 L 81.818959,95.439438 L 81.440912,92.710853 L 87.206218,92.499027 L 86.955759,95.842931 L 86.932133,101.08826 L 89.238253,101.30009 L 91.520751,101.24965 L 92.621828,100.90165 L 91.969693,95.923633 L 91.747577,92.176239 L 92.725793,92.070324 L 92.749427,88.726422 L 93.02352,88.670945 L 92.976244,87.949712 L 91.846823,87.949712 L 91.619996,85.488427 L 91.520751,74.811143 L 92.371377,74.785924 L 92.371377,71.280616 L 92.725793,71.336105 L 92.725793,70.640088 L 91.468773,70.529127 L 91.497126,66.463987 L 93.600043,66.277382 L 93.477182,64.910557 L 94.403419,64.829863 L 94.351424,64.562549 L 95.580099,63.947229 L 89.337489,62.69138 L 82.995657,63.977495 L 83.39733,64.723951 L 84.375543,64.643256 L 84.427528,64.966046 L 85.254515,64.966046 L 85.301775,66.569901 L 87.357445,66.544681 L 87.532293,70.478688 L 80.264217,70.423216 L 79.413593,64.512124 L 78.733106,61.380041 L 78.184923,55.761484 L 78.510996,52.473053 L 92.999878,51.373557 L 93.047136,46.476221 L 93.774891,46.289613 L 93.727651,45.543159 L 93.174743,45.220372 C 93.174629,45.220372 85.252181,46.395266 82.745197,46.66284 C 82.0389,46.738209 82.09239,46.733258 81.516524,46.79397 L 81.440912,45.886118 L 78.444837,44.317564 L 78.482644,42.491786 L 79.512842,42.461518 L 79.588444,39.949808 C 79.588444,39.949808 85.728225,39.546834 88.009582,39.0117 C 90.290937,38.476559 93.524432,37.942456 93.524432,37.942456 L 95.055545,33.79662 L 98.089437,32.913987 L 98.339888,32.217972 L 105.20628,30.316548 L 105.98602,29.676006 L 103.37744,23.976741 L 103.62792,22.690624 L 104.95584,21.994611 L 105.91041,19.079404 L 105.45673,18.675923 z M 72.466874,40.403728 L 72.429067,42.476654 L 73.983813,42.542211 L 73.884576,44.509221 L 70.836515,46.506487 L 70.647496,47.081457 L 71.876167,47.091543 L 71.866712,47.575729 L 62.552432,48.029652 L 62.613863,46.652742 L 63.039175,45.966809 L 63.067524,45.528025 L 63.07698,44.579832 L 63.341609,43.949374 L 63.440849,43.439982 L 63.440849,43.076841 L 63.842533,41.47297 L 72.466874,40.403728 z M 52.987688,42.168984 L 52.760853,43.561027 L 53.488599,44.418431 L 53.441349,45.916386 L 54.117112,46.960408 L 53.942262,48.191039 L 54.443185,48.912273 L 44.939872,49.2855 L 44.916247,48.967759 L 46.017333,48.831579 L 46.069307,48.428097 L 43.66394,47.121797 L 43.536351,45.03375 L 44.689411,44.978276 L 44.788661,42.72883 L 52.987688,42.168984 z M 67.051262,74.276518 L 72.81657,74.649742 L 72.618099,82.411833 L 73.36947,88.776857 L 67.254465,88.565018 L 67.051262,74.276518 z M 28.44258,74.599304 L 37.671807,75.078442 L 36.868446,80.429702 L 36.868446,84.928593 L 37.520583,87.440302 L 28.494569,87.869006 L 28.44258,74.599304 z M 87.508658,74.649742 L 87.508658,87.924488 L 81.644113,88.353194 L 81.440912,81.342592 L 80.788764,74.811143 L 87.508658,74.649742 z M 43.087416,74.947312 L 49.906548,74.972531 L 49.977434,87.278902 L 43.611966,87.389863 L 43.285891,83.400379 L 43.262266,79.441156 L 43.087416,74.947312 z "
+ id="path4735" />
+ </g>
+</svg>
diff --git a/third_party/python/Jinja2/examples/basic/cycle.py b/third_party/python/Jinja2/examples/basic/cycle.py
new file mode 100644
index 0000000000..25dcb0b090
--- /dev/null
+++ b/third_party/python/Jinja2/examples/basic/cycle.py
@@ -0,0 +1,18 @@
+from __future__ import print_function
+
+from jinja2 import Environment
+
+env = Environment(
+ line_statement_prefix="#", variable_start_string="${", variable_end_string="}"
+)
+print(
+ env.from_string(
+ """\
+<ul>
+# for item in range(10)
+ <li class="${loop.cycle('odd', 'even')}">${item}</li>
+# endfor
+</ul>\
+"""
+ ).render()
+)
diff --git a/third_party/python/Jinja2/examples/basic/debugger.py b/third_party/python/Jinja2/examples/basic/debugger.py
new file mode 100644
index 0000000000..d3c1a60a7a
--- /dev/null
+++ b/third_party/python/Jinja2/examples/basic/debugger.py
@@ -0,0 +1,8 @@
+from __future__ import print_function
+
+from jinja2 import Environment
+from jinja2.loaders import FileSystemLoader
+
+env = Environment(loader=FileSystemLoader("templates"))
+tmpl = env.get_template("broken.html")
+print(tmpl.render(seq=[3, 2, 4, 5, 3, 2, 0, 2, 1]))
diff --git a/third_party/python/Jinja2/examples/basic/inheritance.py b/third_party/python/Jinja2/examples/basic/inheritance.py
new file mode 100644
index 0000000000..4a881bf8a8
--- /dev/null
+++ b/third_party/python/Jinja2/examples/basic/inheritance.py
@@ -0,0 +1,15 @@
+from __future__ import print_function
+
+from jinja2 import Environment
+from jinja2.loaders import DictLoader
+
+env = Environment(
+ loader=DictLoader(
+ {
+ "a": "[A[{% block body %}{% endblock %}]]",
+ "b": "{% extends 'a' %}{% block body %}[B]{% endblock %}",
+ "c": "{% extends 'b' %}{% block body %}###{{ super() }}###{% endblock %}",
+ }
+ )
+)
+print(env.get_template("c").render())
diff --git a/third_party/python/Jinja2/examples/basic/templates/broken.html b/third_party/python/Jinja2/examples/basic/templates/broken.html
new file mode 100644
index 0000000000..294d5c9989
--- /dev/null
+++ b/third_party/python/Jinja2/examples/basic/templates/broken.html
@@ -0,0 +1,6 @@
+{% from 'subbroken.html' import may_break %}
+<ul>
+{% for item in seq %}
+ <li>{{ may_break(item) }}</li>
+{% endfor %}
+</ul>
diff --git a/third_party/python/Jinja2/examples/basic/templates/subbroken.html b/third_party/python/Jinja2/examples/basic/templates/subbroken.html
new file mode 100644
index 0000000000..245eb7e6e6
--- /dev/null
+++ b/third_party/python/Jinja2/examples/basic/templates/subbroken.html
@@ -0,0 +1,3 @@
+{% macro may_break(item) -%}
+ [{{ item / 0 }}]
+{%- endmacro %}
diff --git a/third_party/python/Jinja2/examples/basic/test.py b/third_party/python/Jinja2/examples/basic/test.py
new file mode 100644
index 0000000000..80b9d1f052
--- /dev/null
+++ b/third_party/python/Jinja2/examples/basic/test.py
@@ -0,0 +1,31 @@
+from __future__ import print_function
+
+from jinja2 import Environment
+from jinja2.loaders import DictLoader
+
+env = Environment(
+ loader=DictLoader(
+ {
+ "child.html": u"""\
+{% extends master_layout or 'master.html' %}
+{% include helpers = 'helpers.html' %}
+{% macro get_the_answer() %}42{% endmacro %}
+{% title = 'Hello World' %}
+{% block body %}
+ {{ get_the_answer() }}
+ {{ helpers.conspirate() }}
+{% endblock %}
+""",
+ "master.html": u"""\
+<!doctype html>
+<title>{{ title }}</title>
+{% block body %}{% endblock %}
+""",
+ "helpers.html": u"""\
+{% macro conspirate() %}23{% endmacro %}
+""",
+ }
+ )
+)
+tmpl = env.get_template("child.html")
+print(tmpl.render())
diff --git a/third_party/python/Jinja2/examples/basic/test_filter_and_linestatements.py b/third_party/python/Jinja2/examples/basic/test_filter_and_linestatements.py
new file mode 100644
index 0000000000..673b67ed76
--- /dev/null
+++ b/third_party/python/Jinja2/examples/basic/test_filter_and_linestatements.py
@@ -0,0 +1,29 @@
+from __future__ import print_function
+
+from jinja2 import Environment
+
+env = Environment(
+ line_statement_prefix="%", variable_start_string="${", variable_end_string="}"
+)
+tmpl = env.from_string(
+ """\
+% macro foo()
+ ${caller(42)}
+% endmacro
+<ul>
+% for item in seq
+ <li>${item}</li>
+% endfor
+</ul>
+% call(var) foo()
+ [${var}]
+% endcall
+% filter escape
+ <hello world>
+ % for item in [1, 2, 3]
+ - ${item}
+ % endfor
+% endfilter
+"""
+)
+print(tmpl.render(seq=range(10)))
diff --git a/third_party/python/Jinja2/examples/basic/test_loop_filter.py b/third_party/python/Jinja2/examples/basic/test_loop_filter.py
new file mode 100644
index 0000000000..39be08d61c
--- /dev/null
+++ b/third_party/python/Jinja2/examples/basic/test_loop_filter.py
@@ -0,0 +1,15 @@
+from __future__ import print_function
+
+from jinja2 import Environment
+
+tmpl = Environment().from_string(
+ """\
+<ul>
+{%- for item in [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] if item % 2 == 0 %}
+ <li>{{ loop.index }} / {{ loop.length }}: {{ item }}</li>
+{%- endfor %}
+</ul>
+if condition: {{ 1 if foo else 0 }}
+"""
+)
+print(tmpl.render(foo=True))
diff --git a/third_party/python/Jinja2/examples/basic/translate.py b/third_party/python/Jinja2/examples/basic/translate.py
new file mode 100644
index 0000000000..71547f4649
--- /dev/null
+++ b/third_party/python/Jinja2/examples/basic/translate.py
@@ -0,0 +1,20 @@
+from __future__ import print_function
+
+from jinja2 import Environment
+
+env = Environment(extensions=["jinja2.ext.i18n"])
+env.globals["gettext"] = {"Hello %(user)s!": "Hallo %(user)s!"}.__getitem__
+env.globals["ngettext"] = lambda s, p, n: {
+ "%(count)s user": "%(count)d Benutzer",
+ "%(count)s users": "%(count)d Benutzer",
+}[n == 1 and s or p]
+print(
+ env.from_string(
+ """\
+{% trans %}Hello {{ user }}!{% endtrans %}
+{% trans count=users|count -%}
+{{ count }} user{% pluralize %}{{ count }} users
+{% endtrans %}
+"""
+ ).render(user="someone", users=[1, 2, 3])
+)
diff --git a/third_party/python/Jinja2/ext/Vim/jinja.vim b/third_party/python/Jinja2/ext/Vim/jinja.vim
new file mode 100644
index 0000000000..e2a5bbf6c9
--- /dev/null
+++ b/third_party/python/Jinja2/ext/Vim/jinja.vim
@@ -0,0 +1,138 @@
+" Vim syntax file
+" Language: Jinja template
+" Maintainer: Armin Ronacher <armin.ronacher@active-4.com>
+" Last Change: 2008 May 9
+" Version: 1.1
+"
+" Known Bugs:
+" because of odd limitations dicts and the modulo operator
+" appear wrong in the template.
+"
+" Changes:
+"
+" 2008 May 9: Added support for Jinja 2 changes (new keyword rules)
+
+" .vimrc variable to disable html highlighting
+if !exists('g:jinja_syntax_html')
+ let g:jinja_syntax_html=1
+endif
+
+" For version 5.x: Clear all syntax items
+" For version 6.x: Quit when a syntax file was already loaded
+if !exists("main_syntax")
+ if v:version < 600
+ syntax clear
+ elseif exists("b:current_syntax")
+ finish
+ endif
+ let main_syntax = 'jinja'
+endif
+
+" Pull in the HTML syntax.
+if g:jinja_syntax_html
+ if v:version < 600
+ so <sfile>:p:h/html.vim
+ else
+ runtime! syntax/html.vim
+ unlet b:current_syntax
+ endif
+endif
+
+syntax case match
+
+" Jinja template built-in tags and parameters (without filter, macro, is and raw, they
+" have special threatment)
+syn keyword jinjaStatement containedin=jinjaVarBlock,jinjaTagBlock,jinjaNested contained and if else in not or recursive as import
+
+syn keyword jinjaStatement containedin=jinjaVarBlock,jinjaTagBlock,jinjaNested contained is filter skipwhite nextgroup=jinjaFilter
+syn keyword jinjaStatement containedin=jinjaTagBlock contained macro skipwhite nextgroup=jinjaFunction
+syn keyword jinjaStatement containedin=jinjaTagBlock contained block skipwhite nextgroup=jinjaBlockName
+
+" Variable Names
+syn match jinjaVariable containedin=jinjaVarBlock,jinjaTagBlock,jinjaNested contained /[a-zA-Z_][a-zA-Z0-9_]*/
+syn keyword jinjaSpecial containedin=jinjaVarBlock,jinjaTagBlock,jinjaNested contained false true none False True None loop super caller varargs kwargs
+
+" Filters
+syn match jinjaOperator "|" containedin=jinjaVarBlock,jinjaTagBlock,jinjaNested contained skipwhite nextgroup=jinjaFilter
+syn match jinjaFilter contained /[a-zA-Z_][a-zA-Z0-9_]*/
+syn match jinjaFunction contained /[a-zA-Z_][a-zA-Z0-9_]*/
+syn match jinjaBlockName contained /[a-zA-Z_][a-zA-Z0-9_]*/
+
+" Jinja template constants
+syn region jinjaString containedin=jinjaVarBlock,jinjaTagBlock,jinjaNested contained start=/"/ skip=/\(\\\)\@<!\(\(\\\\\)\@>\)*\\"/ end=/"/
+syn region jinjaString containedin=jinjaVarBlock,jinjaTagBlock,jinjaNested contained start=/'/ skip=/\(\\\)\@<!\(\(\\\\\)\@>\)*\\'/ end=/'/
+syn match jinjaNumber containedin=jinjaVarBlock,jinjaTagBlock,jinjaNested contained /[0-9]\+\(\.[0-9]\+\)\?/
+
+" Operators
+syn match jinjaOperator containedin=jinjaVarBlock,jinjaTagBlock,jinjaNested contained /[+\-*\/<>=!,:]/
+syn match jinjaPunctuation containedin=jinjaVarBlock,jinjaTagBlock,jinjaNested contained /[()\[\]]/
+syn match jinjaOperator containedin=jinjaVarBlock,jinjaTagBlock,jinjaNested contained /\./ nextgroup=jinjaAttribute
+syn match jinjaAttribute contained /[a-zA-Z_][a-zA-Z0-9_]*/
+
+" Jinja template tag and variable blocks
+syn region jinjaNested matchgroup=jinjaOperator start="(" end=")" transparent display containedin=jinjaVarBlock,jinjaTagBlock,jinjaNested contained
+syn region jinjaNested matchgroup=jinjaOperator start="\[" end="\]" transparent display containedin=jinjaVarBlock,jinjaTagBlock,jinjaNested contained
+syn region jinjaNested matchgroup=jinjaOperator start="{" end="}" transparent display containedin=jinjaVarBlock,jinjaTagBlock,jinjaNested contained
+syn region jinjaTagBlock matchgroup=jinjaTagDelim start=/{%-\?/ end=/-\?%}/ containedin=ALLBUT,jinjaTagBlock,jinjaVarBlock,jinjaRaw,jinjaString,jinjaNested,jinjaComment
+
+syn region jinjaVarBlock matchgroup=jinjaVarDelim start=/{{-\?/ end=/-\?}}/ containedin=ALLBUT,jinjaTagBlock,jinjaVarBlock,jinjaRaw,jinjaString,jinjaNested,jinjaComment
+
+" Jinja template 'raw' tag
+syn region jinjaRaw matchgroup=jinjaRawDelim start="{%\s*raw\s*%}" end="{%\s*endraw\s*%}" containedin=ALLBUT,jinjaTagBlock,jinjaVarBlock,jinjaString,jinjaComment
+
+" Jinja comments
+syn region jinjaComment matchgroup=jinjaCommentDelim start="{#" end="#}" containedin=ALLBUT,jinjaTagBlock,jinjaVarBlock,jinjaString,jinjaComment
+" help support folding for some setups
+setlocal commentstring={#%s#}
+setlocal comments=s:{#,e:#}
+
+" Block start keywords. A bit tricker. We only highlight at the start of a
+" tag block and only if the name is not followed by a comma or equals sign
+" which usually means that we have to deal with an assignment.
+syn match jinjaStatement containedin=jinjaTagBlock contained /\({%-\?\s*\)\@<=\<[a-zA-Z_][a-zA-Z0-9_]*\>\(\s*[,=]\)\@!/
+
+" and context modifiers
+syn match jinjaStatement containedin=jinjaTagBlock contained /\<with\(out\)\?\s\+context\>/
+
+
+" Define the default highlighting.
+" For version 5.7 and earlier: only when not done already
+" For version 5.8 and later: only when an item doesn't have highlighting yet
+if v:version >= 508 || !exists("did_jinja_syn_inits")
+ if v:version < 508
+ let did_jinja_syn_inits = 1
+ command -nargs=+ HiLink hi link <args>
+ else
+ command -nargs=+ HiLink hi def link <args>
+ endif
+
+ HiLink jinjaPunctuation jinjaOperator
+ HiLink jinjaAttribute jinjaVariable
+ HiLink jinjaFunction jinjaFilter
+
+ HiLink jinjaTagDelim jinjaTagBlock
+ HiLink jinjaVarDelim jinjaVarBlock
+ HiLink jinjaCommentDelim jinjaComment
+ HiLink jinjaRawDelim jinja
+
+ HiLink jinjaSpecial Special
+ HiLink jinjaOperator Normal
+ HiLink jinjaRaw Normal
+ HiLink jinjaTagBlock PreProc
+ HiLink jinjaVarBlock PreProc
+ HiLink jinjaStatement Statement
+ HiLink jinjaFilter Function
+ HiLink jinjaBlockName Function
+ HiLink jinjaVariable Identifier
+ HiLink jinjaString Constant
+ HiLink jinjaNumber Constant
+ HiLink jinjaComment Comment
+
+ delcommand HiLink
+endif
+
+let b:current_syntax = "jinja"
+
+if main_syntax ==# 'jinja'
+ unlet main_syntax
+endif
diff --git a/third_party/python/Jinja2/setup.cfg b/third_party/python/Jinja2/setup.cfg
new file mode 100644
index 0000000000..3387720ba8
--- /dev/null
+++ b/third_party/python/Jinja2/setup.cfg
@@ -0,0 +1,39 @@
+[metadata]
+license_file = LICENSE.rst
+long_description_content_type = text/x-rst
+
+[bdist_wheel]
+universal = true
+
+[tool:pytest]
+testpaths = tests
+filterwarnings =
+ error
+ ignore:the sets module:DeprecationWarning:jinja2.sandbox
+
+[coverage:run]
+branch = True
+source =
+ jinja2
+ tests
+
+[coverage:paths]
+source =
+ src
+ */site-packages
+
+[flake8]
+select = B, E, F, W, B9
+ignore =
+ E203
+ E501
+ E722
+ W503
+max-line-length = 80
+per-file-ignores =
+ src/jinja2/__init__.py: F401
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/Jinja2/setup.py b/third_party/python/Jinja2/setup.py
new file mode 100644
index 0000000000..7d94cd3ae0
--- /dev/null
+++ b/third_party/python/Jinja2/setup.py
@@ -0,0 +1,56 @@
+import io
+import re
+
+from setuptools import find_packages
+from setuptools import setup
+
+with io.open("README.rst", "rt", encoding="utf8") as f:
+ readme = f.read()
+
+with io.open("src/jinja2/__init__.py", "rt", encoding="utf8") as f:
+ version = re.search(r'__version__ = "(.*?)"', f.read(), re.M).group(1)
+
+setup(
+ name="Jinja2",
+ version=version,
+ url="https://palletsprojects.com/p/jinja/",
+ project_urls={
+ "Documentation": "https://jinja.palletsprojects.com/",
+ "Code": "https://github.com/pallets/jinja",
+ "Issue tracker": "https://github.com/pallets/jinja/issues",
+ },
+ license="BSD-3-Clause",
+ author="Armin Ronacher",
+ author_email="armin.ronacher@active-4.com",
+ maintainer="Pallets",
+ maintainer_email="contact@palletsprojects.com",
+ description="A very fast and expressive template engine.",
+ long_description=readme,
+ classifiers=[
+ "Development Status :: 5 - Production/Stable",
+ "Environment :: Web Environment",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: BSD License",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Programming Language :: Python :: Implementation :: PyPy",
+ "Topic :: Internet :: WWW/HTTP :: Dynamic Content",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+ "Topic :: Text Processing :: Markup :: HTML",
+ ],
+ packages=find_packages("src"),
+ package_dir={"": "src"},
+ include_package_data=True,
+ python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
+ install_requires=["MarkupSafe>=0.23"],
+ extras_require={"i18n": ["Babel>=0.8"]},
+ entry_points={"babel.extractors": ["jinja2 = jinja2.ext:babel_extract[i18n]"]},
+)
diff --git a/third_party/python/Jinja2/src/jinja2/__init__.py b/third_party/python/Jinja2/src/jinja2/__init__.py
new file mode 100644
index 0000000000..1229ba4275
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/__init__.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+"""Jinja is a template engine written in pure Python. It provides a
+non-XML syntax that supports inline expressions and an optional
+sandboxed environment.
+"""
+from markupsafe import escape
+from markupsafe import Markup
+
+from .bccache import BytecodeCache
+from .bccache import FileSystemBytecodeCache
+from .bccache import MemcachedBytecodeCache
+from .environment import Environment
+from .environment import Template
+from .exceptions import TemplateAssertionError
+from .exceptions import TemplateError
+from .exceptions import TemplateNotFound
+from .exceptions import TemplateRuntimeError
+from .exceptions import TemplatesNotFound
+from .exceptions import TemplateSyntaxError
+from .exceptions import UndefinedError
+from .filters import contextfilter
+from .filters import environmentfilter
+from .filters import evalcontextfilter
+from .loaders import BaseLoader
+from .loaders import ChoiceLoader
+from .loaders import DictLoader
+from .loaders import FileSystemLoader
+from .loaders import FunctionLoader
+from .loaders import ModuleLoader
+from .loaders import PackageLoader
+from .loaders import PrefixLoader
+from .runtime import ChainableUndefined
+from .runtime import DebugUndefined
+from .runtime import make_logging_undefined
+from .runtime import StrictUndefined
+from .runtime import Undefined
+from .utils import clear_caches
+from .utils import contextfunction
+from .utils import environmentfunction
+from .utils import evalcontextfunction
+from .utils import is_undefined
+from .utils import select_autoescape
+
+__version__ = "2.11.2"
diff --git a/third_party/python/Jinja2/src/jinja2/_compat.py b/third_party/python/Jinja2/src/jinja2/_compat.py
new file mode 100644
index 0000000000..1f044954a0
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/_compat.py
@@ -0,0 +1,132 @@
+# -*- coding: utf-8 -*-
+# flake8: noqa
+import marshal
+import sys
+
+PY2 = sys.version_info[0] == 2
+PYPY = hasattr(sys, "pypy_translation_info")
+_identity = lambda x: x
+
+if not PY2:
+ unichr = chr
+ range_type = range
+ text_type = str
+ string_types = (str,)
+ integer_types = (int,)
+
+ iterkeys = lambda d: iter(d.keys())
+ itervalues = lambda d: iter(d.values())
+ iteritems = lambda d: iter(d.items())
+
+ import pickle
+ from io import BytesIO, StringIO
+
+ NativeStringIO = StringIO
+
+ def reraise(tp, value, tb=None):
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+
+ ifilter = filter
+ imap = map
+ izip = zip
+ intern = sys.intern
+
+ implements_iterator = _identity
+ implements_to_string = _identity
+ encode_filename = _identity
+
+ marshal_dump = marshal.dump
+ marshal_load = marshal.load
+
+else:
+ unichr = unichr
+ text_type = unicode
+ range_type = xrange
+ string_types = (str, unicode)
+ integer_types = (int, long)
+
+ iterkeys = lambda d: d.iterkeys()
+ itervalues = lambda d: d.itervalues()
+ iteritems = lambda d: d.iteritems()
+
+ import cPickle as pickle
+ from cStringIO import StringIO as BytesIO, StringIO
+
+ NativeStringIO = BytesIO
+
+ exec("def reraise(tp, value, tb=None):\n raise tp, value, tb")
+
+ from itertools import imap, izip, ifilter
+
+ intern = intern
+
+ def implements_iterator(cls):
+ cls.next = cls.__next__
+ del cls.__next__
+ return cls
+
+ def implements_to_string(cls):
+ cls.__unicode__ = cls.__str__
+ cls.__str__ = lambda x: x.__unicode__().encode("utf-8")
+ return cls
+
+ def encode_filename(filename):
+ if isinstance(filename, unicode):
+ return filename.encode("utf-8")
+ return filename
+
+ def marshal_dump(code, f):
+ if isinstance(f, file):
+ marshal.dump(code, f)
+ else:
+ f.write(marshal.dumps(code))
+
+ def marshal_load(f):
+ if isinstance(f, file):
+ return marshal.load(f)
+ return marshal.loads(f.read())
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a
+ # dummy metaclass for one level of class instantiation that replaces
+ # itself with the actual metaclass.
+ class metaclass(type):
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+
+ return type.__new__(metaclass, "temporary_class", (), {})
+
+
+try:
+ from urllib.parse import quote_from_bytes as url_quote
+except ImportError:
+ from urllib import quote as url_quote
+
+
+try:
+ from collections import abc
+except ImportError:
+ import collections as abc
+
+
+try:
+ from os import fspath
+except ImportError:
+ try:
+ from pathlib import PurePath
+ except ImportError:
+ PurePath = None
+
+ def fspath(path):
+ if hasattr(path, "__fspath__"):
+ return path.__fspath__()
+
+ # Python 3.5 doesn't have __fspath__ yet, use str.
+ if PurePath is not None and isinstance(path, PurePath):
+ return str(path)
+
+ return path
diff --git a/third_party/python/Jinja2/src/jinja2/_identifier.py b/third_party/python/Jinja2/src/jinja2/_identifier.py
new file mode 100644
index 0000000000..224d5449d1
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/_identifier.py
@@ -0,0 +1,6 @@
+import re
+
+# generated by scripts/generate_identifier_pattern.py
+pattern = re.compile(
+ r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛ࣔ-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఃా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഁ-ഃാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳸᳹᷀-᷵᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑅳𑄴𑆀-𑆂𑆳-𑇊𑇀-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950
+)
diff --git a/third_party/python/Jinja2/src/jinja2/asyncfilters.py b/third_party/python/Jinja2/src/jinja2/asyncfilters.py
new file mode 100644
index 0000000000..3d98dbcc00
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/asyncfilters.py
@@ -0,0 +1,158 @@
+from functools import wraps
+
+from . import filters
+from .asyncsupport import auto_aiter
+from .asyncsupport import auto_await
+
+
+async def auto_to_seq(value):
+ seq = []
+ if hasattr(value, "__aiter__"):
+ async for item in value:
+ seq.append(item)
+ else:
+ for item in value:
+ seq.append(item)
+ return seq
+
+
+async def async_select_or_reject(args, kwargs, modfunc, lookup_attr):
+ seq, func = filters.prepare_select_or_reject(args, kwargs, modfunc, lookup_attr)
+ if seq:
+ async for item in auto_aiter(seq):
+ if func(item):
+ yield item
+
+
+def dualfilter(normal_filter, async_filter):
+ wrap_evalctx = False
+ if getattr(normal_filter, "environmentfilter", False) is True:
+
+ def is_async(args):
+ return args[0].is_async
+
+ wrap_evalctx = False
+ else:
+ has_evalctxfilter = getattr(normal_filter, "evalcontextfilter", False) is True
+ has_ctxfilter = getattr(normal_filter, "contextfilter", False) is True
+ wrap_evalctx = not has_evalctxfilter and not has_ctxfilter
+
+ def is_async(args):
+ return args[0].environment.is_async
+
+ @wraps(normal_filter)
+ def wrapper(*args, **kwargs):
+ b = is_async(args)
+ if wrap_evalctx:
+ args = args[1:]
+ if b:
+ return async_filter(*args, **kwargs)
+ return normal_filter(*args, **kwargs)
+
+ if wrap_evalctx:
+ wrapper.evalcontextfilter = True
+
+ wrapper.asyncfiltervariant = True
+
+ return wrapper
+
+
+def asyncfiltervariant(original):
+ def decorator(f):
+ return dualfilter(original, f)
+
+ return decorator
+
+
+@asyncfiltervariant(filters.do_first)
+async def do_first(environment, seq):
+ try:
+ return await auto_aiter(seq).__anext__()
+ except StopAsyncIteration:
+ return environment.undefined("No first item, sequence was empty.")
+
+
+@asyncfiltervariant(filters.do_groupby)
+async def do_groupby(environment, value, attribute):
+ expr = filters.make_attrgetter(environment, attribute)
+ return [
+ filters._GroupTuple(key, await auto_to_seq(values))
+ for key, values in filters.groupby(
+ sorted(await auto_to_seq(value), key=expr), expr
+ )
+ ]
+
+
+@asyncfiltervariant(filters.do_join)
+async def do_join(eval_ctx, value, d=u"", attribute=None):
+ return filters.do_join(eval_ctx, await auto_to_seq(value), d, attribute)
+
+
+@asyncfiltervariant(filters.do_list)
+async def do_list(value):
+ return await auto_to_seq(value)
+
+
+@asyncfiltervariant(filters.do_reject)
+async def do_reject(*args, **kwargs):
+ return async_select_or_reject(args, kwargs, lambda x: not x, False)
+
+
+@asyncfiltervariant(filters.do_rejectattr)
+async def do_rejectattr(*args, **kwargs):
+ return async_select_or_reject(args, kwargs, lambda x: not x, True)
+
+
+@asyncfiltervariant(filters.do_select)
+async def do_select(*args, **kwargs):
+ return async_select_or_reject(args, kwargs, lambda x: x, False)
+
+
+@asyncfiltervariant(filters.do_selectattr)
+async def do_selectattr(*args, **kwargs):
+ return async_select_or_reject(args, kwargs, lambda x: x, True)
+
+
+@asyncfiltervariant(filters.do_map)
+async def do_map(*args, **kwargs):
+ seq, func = filters.prepare_map(args, kwargs)
+ if seq:
+ async for item in auto_aiter(seq):
+ yield await auto_await(func(item))
+
+
+@asyncfiltervariant(filters.do_sum)
+async def do_sum(environment, iterable, attribute=None, start=0):
+ rv = start
+ if attribute is not None:
+ func = filters.make_attrgetter(environment, attribute)
+ else:
+
+ def func(x):
+ return x
+
+ async for item in auto_aiter(iterable):
+ rv += func(item)
+ return rv
+
+
+@asyncfiltervariant(filters.do_slice)
+async def do_slice(value, slices, fill_with=None):
+ return filters.do_slice(await auto_to_seq(value), slices, fill_with)
+
+
+ASYNC_FILTERS = {
+ "first": do_first,
+ "groupby": do_groupby,
+ "join": do_join,
+ "list": do_list,
+ # we intentionally do not support do_last because that would be
+ # ridiculous
+ "reject": do_reject,
+ "rejectattr": do_rejectattr,
+ "map": do_map,
+ "select": do_select,
+ "selectattr": do_selectattr,
+ "sum": do_sum,
+ "slice": do_slice,
+}
diff --git a/third_party/python/Jinja2/src/jinja2/asyncsupport.py b/third_party/python/Jinja2/src/jinja2/asyncsupport.py
new file mode 100644
index 0000000000..78ba3739d8
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/asyncsupport.py
@@ -0,0 +1,264 @@
+# -*- coding: utf-8 -*-
+"""The code for async support. Importing this patches Jinja on supported
+Python versions.
+"""
+import asyncio
+import inspect
+from functools import update_wrapper
+
+from markupsafe import Markup
+
+from .environment import TemplateModule
+from .runtime import LoopContext
+from .utils import concat
+from .utils import internalcode
+from .utils import missing
+
+
+async def concat_async(async_gen):
+ rv = []
+
+ async def collect():
+ async for event in async_gen:
+ rv.append(event)
+
+ await collect()
+ return concat(rv)
+
+
+async def generate_async(self, *args, **kwargs):
+ vars = dict(*args, **kwargs)
+ try:
+ async for event in self.root_render_func(self.new_context(vars)):
+ yield event
+ except Exception:
+ yield self.environment.handle_exception()
+
+
+def wrap_generate_func(original_generate):
+ def _convert_generator(self, loop, args, kwargs):
+ async_gen = self.generate_async(*args, **kwargs)
+ try:
+ while 1:
+ yield loop.run_until_complete(async_gen.__anext__())
+ except StopAsyncIteration:
+ pass
+
+ def generate(self, *args, **kwargs):
+ if not self.environment.is_async:
+ return original_generate(self, *args, **kwargs)
+ return _convert_generator(self, asyncio.get_event_loop(), args, kwargs)
+
+ return update_wrapper(generate, original_generate)
+
+
+async def render_async(self, *args, **kwargs):
+ if not self.environment.is_async:
+ raise RuntimeError("The environment was not created with async mode enabled.")
+
+ vars = dict(*args, **kwargs)
+ ctx = self.new_context(vars)
+
+ try:
+ return await concat_async(self.root_render_func(ctx))
+ except Exception:
+ return self.environment.handle_exception()
+
+
+def wrap_render_func(original_render):
+ def render(self, *args, **kwargs):
+ if not self.environment.is_async:
+ return original_render(self, *args, **kwargs)
+ loop = asyncio.get_event_loop()
+ return loop.run_until_complete(self.render_async(*args, **kwargs))
+
+ return update_wrapper(render, original_render)
+
+
+def wrap_block_reference_call(original_call):
+ @internalcode
+ async def async_call(self):
+ rv = await concat_async(self._stack[self._depth](self._context))
+ if self._context.eval_ctx.autoescape:
+ rv = Markup(rv)
+ return rv
+
+ @internalcode
+ def __call__(self):
+ if not self._context.environment.is_async:
+ return original_call(self)
+ return async_call(self)
+
+ return update_wrapper(__call__, original_call)
+
+
+def wrap_macro_invoke(original_invoke):
+ @internalcode
+ async def async_invoke(self, arguments, autoescape):
+ rv = await self._func(*arguments)
+ if autoescape:
+ rv = Markup(rv)
+ return rv
+
+ @internalcode
+ def _invoke(self, arguments, autoescape):
+ if not self._environment.is_async:
+ return original_invoke(self, arguments, autoescape)
+ return async_invoke(self, arguments, autoescape)
+
+ return update_wrapper(_invoke, original_invoke)
+
+
+@internalcode
+async def get_default_module_async(self):
+ if self._module is not None:
+ return self._module
+ self._module = rv = await self.make_module_async()
+ return rv
+
+
+def wrap_default_module(original_default_module):
+ @internalcode
+ def _get_default_module(self):
+ if self.environment.is_async:
+ raise RuntimeError("Template module attribute is unavailable in async mode")
+ return original_default_module(self)
+
+ return _get_default_module
+
+
+async def make_module_async(self, vars=None, shared=False, locals=None):
+ context = self.new_context(vars, shared, locals)
+ body_stream = []
+ async for item in self.root_render_func(context):
+ body_stream.append(item)
+ return TemplateModule(self, context, body_stream)
+
+
+def patch_template():
+ from . import Template
+
+ Template.generate = wrap_generate_func(Template.generate)
+ Template.generate_async = update_wrapper(generate_async, Template.generate_async)
+ Template.render_async = update_wrapper(render_async, Template.render_async)
+ Template.render = wrap_render_func(Template.render)
+ Template._get_default_module = wrap_default_module(Template._get_default_module)
+ Template._get_default_module_async = get_default_module_async
+ Template.make_module_async = update_wrapper(
+ make_module_async, Template.make_module_async
+ )
+
+
+def patch_runtime():
+ from .runtime import BlockReference, Macro
+
+ BlockReference.__call__ = wrap_block_reference_call(BlockReference.__call__)
+ Macro._invoke = wrap_macro_invoke(Macro._invoke)
+
+
+def patch_filters():
+ from .filters import FILTERS
+ from .asyncfilters import ASYNC_FILTERS
+
+ FILTERS.update(ASYNC_FILTERS)
+
+
+def patch_all():
+ patch_template()
+ patch_runtime()
+ patch_filters()
+
+
+async def auto_await(value):
+ if inspect.isawaitable(value):
+ return await value
+ return value
+
+
+async def auto_aiter(iterable):
+ if hasattr(iterable, "__aiter__"):
+ async for item in iterable:
+ yield item
+ return
+ for item in iterable:
+ yield item
+
+
+class AsyncLoopContext(LoopContext):
+ _to_iterator = staticmethod(auto_aiter)
+
+ @property
+ async def length(self):
+ if self._length is not None:
+ return self._length
+
+ try:
+ self._length = len(self._iterable)
+ except TypeError:
+ iterable = [x async for x in self._iterator]
+ self._iterator = self._to_iterator(iterable)
+ self._length = len(iterable) + self.index + (self._after is not missing)
+
+ return self._length
+
+ @property
+ async def revindex0(self):
+ return await self.length - self.index
+
+ @property
+ async def revindex(self):
+ return await self.length - self.index0
+
+ async def _peek_next(self):
+ if self._after is not missing:
+ return self._after
+
+ try:
+ self._after = await self._iterator.__anext__()
+ except StopAsyncIteration:
+ self._after = missing
+
+ return self._after
+
+ @property
+ async def last(self):
+ return await self._peek_next() is missing
+
+ @property
+ async def nextitem(self):
+ rv = await self._peek_next()
+
+ if rv is missing:
+ return self._undefined("there is no next item")
+
+ return rv
+
+ def __aiter__(self):
+ return self
+
+ async def __anext__(self):
+ if self._after is not missing:
+ rv = self._after
+ self._after = missing
+ else:
+ rv = await self._iterator.__anext__()
+
+ self.index0 += 1
+ self._before = self._current
+ self._current = rv
+ return rv, self
+
+
+async def make_async_loop_context(iterable, undefined, recurse=None, depth0=0):
+ import warnings
+
+ warnings.warn(
+ "This template must be recompiled with at least Jinja 2.11, or"
+ " it will fail in 3.0.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return AsyncLoopContext(iterable, undefined, recurse, depth0)
+
+
+patch_all()
diff --git a/third_party/python/Jinja2/src/jinja2/bccache.py b/third_party/python/Jinja2/src/jinja2/bccache.py
new file mode 100644
index 0000000000..9c0661030f
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/bccache.py
@@ -0,0 +1,350 @@
+# -*- coding: utf-8 -*-
+"""The optional bytecode cache system. This is useful if you have very
+complex template situations and the compilation of all those templates
+slows down your application too much.
+
+Situations where this is useful are often forking web applications that
+are initialized on the first request.
+"""
+import errno
+import fnmatch
+import os
+import stat
+import sys
+import tempfile
+from hashlib import sha1
+from os import listdir
+from os import path
+
+from ._compat import BytesIO
+from ._compat import marshal_dump
+from ._compat import marshal_load
+from ._compat import pickle
+from ._compat import text_type
+from .utils import open_if_exists
+
+bc_version = 4
+# Magic bytes to identify Jinja bytecode cache files. Contains the
+# Python major and minor version to avoid loading incompatible bytecode
+# if a project upgrades its Python version.
+bc_magic = (
+ b"j2"
+ + pickle.dumps(bc_version, 2)
+ + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2)
+)
+
+
+class Bucket(object):
+ """Buckets are used to store the bytecode for one template. It's created
+ and initialized by the bytecode cache and passed to the loading functions.
+
+ The buckets get an internal checksum from the cache assigned and use this
+ to automatically reject outdated cache material. Individual bytecode
+ cache subclasses don't have to care about cache invalidation.
+ """
+
+ def __init__(self, environment, key, checksum):
+ self.environment = environment
+ self.key = key
+ self.checksum = checksum
+ self.reset()
+
+ def reset(self):
+ """Resets the bucket (unloads the bytecode)."""
+ self.code = None
+
+ def load_bytecode(self, f):
+ """Loads bytecode from a file or file like object."""
+ # make sure the magic header is correct
+ magic = f.read(len(bc_magic))
+ if magic != bc_magic:
+ self.reset()
+ return
+ # the source code of the file changed, we need to reload
+ checksum = pickle.load(f)
+ if self.checksum != checksum:
+ self.reset()
+ return
+ # if marshal_load fails then we need to reload
+ try:
+ self.code = marshal_load(f)
+ except (EOFError, ValueError, TypeError):
+ self.reset()
+ return
+
+ def write_bytecode(self, f):
+ """Dump the bytecode into the file or file like object passed."""
+ if self.code is None:
+ raise TypeError("can't write empty bucket")
+ f.write(bc_magic)
+ pickle.dump(self.checksum, f, 2)
+ marshal_dump(self.code, f)
+
+ def bytecode_from_string(self, string):
+ """Load bytecode from a string."""
+ self.load_bytecode(BytesIO(string))
+
+ def bytecode_to_string(self):
+ """Return the bytecode as string."""
+ out = BytesIO()
+ self.write_bytecode(out)
+ return out.getvalue()
+
+
+class BytecodeCache(object):
+ """To implement your own bytecode cache you have to subclass this class
+ and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of
+ these methods are passed a :class:`~jinja2.bccache.Bucket`.
+
+ A very basic bytecode cache that saves the bytecode on the file system::
+
+ from os import path
+
+ class MyCache(BytecodeCache):
+
+ def __init__(self, directory):
+ self.directory = directory
+
+ def load_bytecode(self, bucket):
+ filename = path.join(self.directory, bucket.key)
+ if path.exists(filename):
+ with open(filename, 'rb') as f:
+ bucket.load_bytecode(f)
+
+ def dump_bytecode(self, bucket):
+ filename = path.join(self.directory, bucket.key)
+ with open(filename, 'wb') as f:
+ bucket.write_bytecode(f)
+
+ A more advanced version of a filesystem based bytecode cache is part of
+ Jinja.
+ """
+
+ def load_bytecode(self, bucket):
+ """Subclasses have to override this method to load bytecode into a
+ bucket. If they are not able to find code in the cache for the
+ bucket, it must not do anything.
+ """
+ raise NotImplementedError()
+
+ def dump_bytecode(self, bucket):
+ """Subclasses have to override this method to write the bytecode
+ from a bucket back to the cache. If it unable to do so it must not
+ fail silently but raise an exception.
+ """
+ raise NotImplementedError()
+
+ def clear(self):
+ """Clears the cache. This method is not used by Jinja but should be
+ implemented to allow applications to clear the bytecode cache used
+ by a particular environment.
+ """
+
+ def get_cache_key(self, name, filename=None):
+ """Returns the unique hash key for this template name."""
+ hash = sha1(name.encode("utf-8"))
+ if filename is not None:
+ filename = "|" + filename
+ if isinstance(filename, text_type):
+ filename = filename.encode("utf-8")
+ hash.update(filename)
+ return hash.hexdigest()
+
+ def get_source_checksum(self, source):
+ """Returns a checksum for the source."""
+ return sha1(source.encode("utf-8")).hexdigest()
+
+ def get_bucket(self, environment, name, filename, source):
+ """Return a cache bucket for the given template. All arguments are
+ mandatory but filename may be `None`.
+ """
+ key = self.get_cache_key(name, filename)
+ checksum = self.get_source_checksum(source)
+ bucket = Bucket(environment, key, checksum)
+ self.load_bytecode(bucket)
+ return bucket
+
+ def set_bucket(self, bucket):
+ """Put the bucket into the cache."""
+ self.dump_bytecode(bucket)
+
+
+class FileSystemBytecodeCache(BytecodeCache):
+ """A bytecode cache that stores bytecode on the filesystem. It accepts
+ two arguments: The directory where the cache items are stored and a
+ pattern string that is used to build the filename.
+
+ If no directory is specified a default cache directory is selected. On
+ Windows the user's temp directory is used, on UNIX systems a directory
+ is created for the user in the system temp directory.
+
+ The pattern can be used to have multiple separate caches operate on the
+ same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s``
+ is replaced with the cache key.
+
+ >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache')
+
+ This bytecode cache supports clearing of the cache using the clear method.
+ """
+
+ def __init__(self, directory=None, pattern="__jinja2_%s.cache"):
+ if directory is None:
+ directory = self._get_default_cache_dir()
+ self.directory = directory
+ self.pattern = pattern
+
+ def _get_default_cache_dir(self):
+ def _unsafe_dir():
+ raise RuntimeError(
+ "Cannot determine safe temp directory. You "
+ "need to explicitly provide one."
+ )
+
+ tmpdir = tempfile.gettempdir()
+
+ # On windows the temporary directory is used specific unless
+ # explicitly forced otherwise. We can just use that.
+ if os.name == "nt":
+ return tmpdir
+ if not hasattr(os, "getuid"):
+ _unsafe_dir()
+
+ dirname = "_jinja2-cache-%d" % os.getuid()
+ actual_dir = os.path.join(tmpdir, dirname)
+
+ try:
+ os.mkdir(actual_dir, stat.S_IRWXU)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ try:
+ os.chmod(actual_dir, stat.S_IRWXU)
+ actual_dir_stat = os.lstat(actual_dir)
+ if (
+ actual_dir_stat.st_uid != os.getuid()
+ or not stat.S_ISDIR(actual_dir_stat.st_mode)
+ or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
+ ):
+ _unsafe_dir()
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ actual_dir_stat = os.lstat(actual_dir)
+ if (
+ actual_dir_stat.st_uid != os.getuid()
+ or not stat.S_ISDIR(actual_dir_stat.st_mode)
+ or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
+ ):
+ _unsafe_dir()
+
+ return actual_dir
+
+ def _get_cache_filename(self, bucket):
+ return path.join(self.directory, self.pattern % bucket.key)
+
+ def load_bytecode(self, bucket):
+ f = open_if_exists(self._get_cache_filename(bucket), "rb")
+ if f is not None:
+ try:
+ bucket.load_bytecode(f)
+ finally:
+ f.close()
+
+ def dump_bytecode(self, bucket):
+ f = open(self._get_cache_filename(bucket), "wb")
+ try:
+ bucket.write_bytecode(f)
+ finally:
+ f.close()
+
+ def clear(self):
+ # imported lazily here because google app-engine doesn't support
+ # write access on the file system and the function does not exist
+ # normally.
+ from os import remove
+
+ files = fnmatch.filter(listdir(self.directory), self.pattern % "*")
+ for filename in files:
+ try:
+ remove(path.join(self.directory, filename))
+ except OSError:
+ pass
+
+
+class MemcachedBytecodeCache(BytecodeCache):
+ """This class implements a bytecode cache that uses a memcache cache for
+ storing the information. It does not enforce a specific memcache library
+ (tummy's memcache or cmemcache) but will accept any class that provides
+ the minimal interface required.
+
+ Libraries compatible with this class:
+
+ - `cachelib <https://github.com/pallets/cachelib>`_
+ - `python-memcached <https://pypi.org/project/python-memcached/>`_
+
+ (Unfortunately the django cache interface is not compatible because it
+ does not support storing binary data, only unicode. You can however pass
+ the underlying cache client to the bytecode cache which is available
+ as `django.core.cache.cache._client`.)
+
+ The minimal interface for the client passed to the constructor is this:
+
+ .. class:: MinimalClientInterface
+
+ .. method:: set(key, value[, timeout])
+
+ Stores the bytecode in the cache. `value` is a string and
+ `timeout` the timeout of the key. If timeout is not provided
+ a default timeout or no timeout should be assumed, if it's
+ provided it's an integer with the number of seconds the cache
+ item should exist.
+
+ .. method:: get(key)
+
+ Returns the value for the cache key. If the item does not
+ exist in the cache the return value must be `None`.
+
+ The other arguments to the constructor are the prefix for all keys that
+ is added before the actual cache key and the timeout for the bytecode in
+ the cache system. We recommend a high (or no) timeout.
+
+ This bytecode cache does not support clearing of used items in the cache.
+ The clear method is a no-operation function.
+
+ .. versionadded:: 2.7
+ Added support for ignoring memcache errors through the
+ `ignore_memcache_errors` parameter.
+ """
+
+ def __init__(
+ self,
+ client,
+ prefix="jinja2/bytecode/",
+ timeout=None,
+ ignore_memcache_errors=True,
+ ):
+ self.client = client
+ self.prefix = prefix
+ self.timeout = timeout
+ self.ignore_memcache_errors = ignore_memcache_errors
+
+ def load_bytecode(self, bucket):
+ try:
+ code = self.client.get(self.prefix + bucket.key)
+ except Exception:
+ if not self.ignore_memcache_errors:
+ raise
+ code = None
+ if code is not None:
+ bucket.bytecode_from_string(code)
+
+ def dump_bytecode(self, bucket):
+ args = (self.prefix + bucket.key, bucket.bytecode_to_string())
+ if self.timeout is not None:
+ args += (self.timeout,)
+ try:
+ self.client.set(*args)
+ except Exception:
+ if not self.ignore_memcache_errors:
+ raise
diff --git a/third_party/python/Jinja2/src/jinja2/compiler.py b/third_party/python/Jinja2/src/jinja2/compiler.py
new file mode 100644
index 0000000000..63297b42c3
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/compiler.py
@@ -0,0 +1,1843 @@
+# -*- coding: utf-8 -*-
+"""Compiles nodes from the parser into Python code."""
+from collections import namedtuple
+from functools import update_wrapper
+from itertools import chain
+from keyword import iskeyword as is_python_keyword
+
+from markupsafe import escape
+from markupsafe import Markup
+
+from . import nodes
+from ._compat import imap
+from ._compat import iteritems
+from ._compat import izip
+from ._compat import NativeStringIO
+from ._compat import range_type
+from ._compat import string_types
+from ._compat import text_type
+from .exceptions import TemplateAssertionError
+from .idtracking import Symbols
+from .idtracking import VAR_LOAD_ALIAS
+from .idtracking import VAR_LOAD_PARAMETER
+from .idtracking import VAR_LOAD_RESOLVE
+from .idtracking import VAR_LOAD_UNDEFINED
+from .nodes import EvalContext
+from .optimizer import Optimizer
+from .utils import concat
+from .visitor import NodeVisitor
+
+operators = {
+ "eq": "==",
+ "ne": "!=",
+ "gt": ">",
+ "gteq": ">=",
+ "lt": "<",
+ "lteq": "<=",
+ "in": "in",
+ "notin": "not in",
+}
+
+# what method to iterate over items do we want to use for dict iteration
+# in generated code? on 2.x let's go with iteritems, on 3.x with items
+if hasattr(dict, "iteritems"):
+ dict_item_iter = "iteritems"
+else:
+ dict_item_iter = "items"
+
+code_features = ["division"]
+
+# does this python version support generator stops? (PEP 0479)
+try:
+ exec("from __future__ import generator_stop")
+ code_features.append("generator_stop")
+except SyntaxError:
+ pass
+
+# does this python version support yield from?
+try:
+ exec("def f(): yield from x()")
+except SyntaxError:
+ supports_yield_from = False
+else:
+ supports_yield_from = True
+
+
+def optimizeconst(f):
+ def new_func(self, node, frame, **kwargs):
+ # Only optimize if the frame is not volatile
+ if self.optimized and not frame.eval_ctx.volatile:
+ new_node = self.optimizer.visit(node, frame.eval_ctx)
+ if new_node != node:
+ return self.visit(new_node, frame)
+ return f(self, node, frame, **kwargs)
+
+ return update_wrapper(new_func, f)
+
+
+def generate(
+ node, environment, name, filename, stream=None, defer_init=False, optimized=True
+):
+ """Generate the python source for a node tree."""
+ if not isinstance(node, nodes.Template):
+ raise TypeError("Can't compile non template nodes")
+ generator = environment.code_generator_class(
+ environment, name, filename, stream, defer_init, optimized
+ )
+ generator.visit(node)
+ if stream is None:
+ return generator.stream.getvalue()
+
+
+def has_safe_repr(value):
+ """Does the node have a safe representation?"""
+ if value is None or value is NotImplemented or value is Ellipsis:
+ return True
+ if type(value) in (bool, int, float, complex, range_type, Markup) + string_types:
+ return True
+ if type(value) in (tuple, list, set, frozenset):
+ for item in value:
+ if not has_safe_repr(item):
+ return False
+ return True
+ elif type(value) is dict:
+ for key, value in iteritems(value):
+ if not has_safe_repr(key):
+ return False
+ if not has_safe_repr(value):
+ return False
+ return True
+ return False
+
+
+def find_undeclared(nodes, names):
+ """Check if the names passed are accessed undeclared. The return value
+ is a set of all the undeclared names from the sequence of names found.
+ """
+ visitor = UndeclaredNameVisitor(names)
+ try:
+ for node in nodes:
+ visitor.visit(node)
+ except VisitorExit:
+ pass
+ return visitor.undeclared
+
+
+class MacroRef(object):
+ def __init__(self, node):
+ self.node = node
+ self.accesses_caller = False
+ self.accesses_kwargs = False
+ self.accesses_varargs = False
+
+
+class Frame(object):
+ """Holds compile time information for us."""
+
+ def __init__(self, eval_ctx, parent=None, level=None):
+ self.eval_ctx = eval_ctx
+ self.symbols = Symbols(parent and parent.symbols or None, level=level)
+
+ # a toplevel frame is the root + soft frames such as if conditions.
+ self.toplevel = False
+
+ # the root frame is basically just the outermost frame, so no if
+ # conditions. This information is used to optimize inheritance
+ # situations.
+ self.rootlevel = False
+
+ # in some dynamic inheritance situations the compiler needs to add
+ # write tests around output statements.
+ self.require_output_check = parent and parent.require_output_check
+
+ # inside some tags we are using a buffer rather than yield statements.
+ # this for example affects {% filter %} or {% macro %}. If a frame
+ # is buffered this variable points to the name of the list used as
+ # buffer.
+ self.buffer = None
+
+ # the name of the block we're in, otherwise None.
+ self.block = parent and parent.block or None
+
+ # the parent of this frame
+ self.parent = parent
+
+ if parent is not None:
+ self.buffer = parent.buffer
+
+ def copy(self):
+ """Create a copy of the current one."""
+ rv = object.__new__(self.__class__)
+ rv.__dict__.update(self.__dict__)
+ rv.symbols = self.symbols.copy()
+ return rv
+
+ def inner(self, isolated=False):
+ """Return an inner frame."""
+ if isolated:
+ return Frame(self.eval_ctx, level=self.symbols.level + 1)
+ return Frame(self.eval_ctx, self)
+
+ def soft(self):
+ """Return a soft frame. A soft frame may not be modified as
+ standalone thing as it shares the resources with the frame it
+ was created of, but it's not a rootlevel frame any longer.
+
+ This is only used to implement if-statements.
+ """
+ rv = self.copy()
+ rv.rootlevel = False
+ return rv
+
+ __copy__ = copy
+
+
+class VisitorExit(RuntimeError):
+ """Exception used by the `UndeclaredNameVisitor` to signal a stop."""
+
+
+class DependencyFinderVisitor(NodeVisitor):
+ """A visitor that collects filter and test calls."""
+
+ def __init__(self):
+ self.filters = set()
+ self.tests = set()
+
+ def visit_Filter(self, node):
+ self.generic_visit(node)
+ self.filters.add(node.name)
+
+ def visit_Test(self, node):
+ self.generic_visit(node)
+ self.tests.add(node.name)
+
+ def visit_Block(self, node):
+ """Stop visiting at blocks."""
+
+
+class UndeclaredNameVisitor(NodeVisitor):
+ """A visitor that checks if a name is accessed without being
+ declared. This is different from the frame visitor as it will
+ not stop at closure frames.
+ """
+
+ def __init__(self, names):
+ self.names = set(names)
+ self.undeclared = set()
+
+ def visit_Name(self, node):
+ if node.ctx == "load" and node.name in self.names:
+ self.undeclared.add(node.name)
+ if self.undeclared == self.names:
+ raise VisitorExit()
+ else:
+ self.names.discard(node.name)
+
+ def visit_Block(self, node):
+ """Stop visiting a blocks."""
+
+
+class CompilerExit(Exception):
+ """Raised if the compiler encountered a situation where it just
+ doesn't make sense to further process the code. Any block that
+ raises such an exception is not further processed.
+ """
+
+
+class CodeGenerator(NodeVisitor):
+ def __init__(
+ self, environment, name, filename, stream=None, defer_init=False, optimized=True
+ ):
+ if stream is None:
+ stream = NativeStringIO()
+ self.environment = environment
+ self.name = name
+ self.filename = filename
+ self.stream = stream
+ self.created_block_context = False
+ self.defer_init = defer_init
+ self.optimized = optimized
+ if optimized:
+ self.optimizer = Optimizer(environment)
+
+ # aliases for imports
+ self.import_aliases = {}
+
+ # a registry for all blocks. Because blocks are moved out
+ # into the global python scope they are registered here
+ self.blocks = {}
+
+ # the number of extends statements so far
+ self.extends_so_far = 0
+
+ # some templates have a rootlevel extends. In this case we
+ # can safely assume that we're a child template and do some
+ # more optimizations.
+ self.has_known_extends = False
+
+ # the current line number
+ self.code_lineno = 1
+
+ # registry of all filters and tests (global, not block local)
+ self.tests = {}
+ self.filters = {}
+
+ # the debug information
+ self.debug_info = []
+ self._write_debug_info = None
+
+ # the number of new lines before the next write()
+ self._new_lines = 0
+
+ # the line number of the last written statement
+ self._last_line = 0
+
+ # true if nothing was written so far.
+ self._first_write = True
+
+ # used by the `temporary_identifier` method to get new
+ # unique, temporary identifier
+ self._last_identifier = 0
+
+ # the current indentation
+ self._indentation = 0
+
+ # Tracks toplevel assignments
+ self._assign_stack = []
+
+ # Tracks parameter definition blocks
+ self._param_def_block = []
+
+ # Tracks the current context.
+ self._context_reference_stack = ["context"]
+
+ # -- Various compilation helpers
+
+ def fail(self, msg, lineno):
+ """Fail with a :exc:`TemplateAssertionError`."""
+ raise TemplateAssertionError(msg, lineno, self.name, self.filename)
+
+ def temporary_identifier(self):
+ """Get a new unique identifier."""
+ self._last_identifier += 1
+ return "t_%d" % self._last_identifier
+
+ def buffer(self, frame):
+ """Enable buffering for the frame from that point onwards."""
+ frame.buffer = self.temporary_identifier()
+ self.writeline("%s = []" % frame.buffer)
+
+ def return_buffer_contents(self, frame, force_unescaped=False):
+ """Return the buffer contents of the frame."""
+ if not force_unescaped:
+ if frame.eval_ctx.volatile:
+ self.writeline("if context.eval_ctx.autoescape:")
+ self.indent()
+ self.writeline("return Markup(concat(%s))" % frame.buffer)
+ self.outdent()
+ self.writeline("else:")
+ self.indent()
+ self.writeline("return concat(%s)" % frame.buffer)
+ self.outdent()
+ return
+ elif frame.eval_ctx.autoescape:
+ self.writeline("return Markup(concat(%s))" % frame.buffer)
+ return
+ self.writeline("return concat(%s)" % frame.buffer)
+
+ def indent(self):
+ """Indent by one."""
+ self._indentation += 1
+
+ def outdent(self, step=1):
+ """Outdent by step."""
+ self._indentation -= step
+
+ def start_write(self, frame, node=None):
+ """Yield or write into the frame buffer."""
+ if frame.buffer is None:
+ self.writeline("yield ", node)
+ else:
+ self.writeline("%s.append(" % frame.buffer, node)
+
+ def end_write(self, frame):
+ """End the writing process started by `start_write`."""
+ if frame.buffer is not None:
+ self.write(")")
+
+ def simple_write(self, s, frame, node=None):
+ """Simple shortcut for start_write + write + end_write."""
+ self.start_write(frame, node)
+ self.write(s)
+ self.end_write(frame)
+
+ def blockvisit(self, nodes, frame):
+ """Visit a list of nodes as block in a frame. If the current frame
+ is no buffer a dummy ``if 0: yield None`` is written automatically.
+ """
+ try:
+ self.writeline("pass")
+ for node in nodes:
+ self.visit(node, frame)
+ except CompilerExit:
+ pass
+
+ def write(self, x):
+ """Write a string into the output stream."""
+ if self._new_lines:
+ if not self._first_write:
+ self.stream.write("\n" * self._new_lines)
+ self.code_lineno += self._new_lines
+ if self._write_debug_info is not None:
+ self.debug_info.append((self._write_debug_info, self.code_lineno))
+ self._write_debug_info = None
+ self._first_write = False
+ self.stream.write(" " * self._indentation)
+ self._new_lines = 0
+ self.stream.write(x)
+
+ def writeline(self, x, node=None, extra=0):
+ """Combination of newline and write."""
+ self.newline(node, extra)
+ self.write(x)
+
+ def newline(self, node=None, extra=0):
+ """Add one or more newlines before the next write."""
+ self._new_lines = max(self._new_lines, 1 + extra)
+ if node is not None and node.lineno != self._last_line:
+ self._write_debug_info = node.lineno
+ self._last_line = node.lineno
+
+ def signature(self, node, frame, extra_kwargs=None):
+ """Writes a function call to the stream for the current node.
+ A leading comma is added automatically. The extra keyword
+ arguments may not include python keywords otherwise a syntax
+ error could occur. The extra keyword arguments should be given
+ as python dict.
+ """
+ # if any of the given keyword arguments is a python keyword
+ # we have to make sure that no invalid call is created.
+ kwarg_workaround = False
+ for kwarg in chain((x.key for x in node.kwargs), extra_kwargs or ()):
+ if is_python_keyword(kwarg):
+ kwarg_workaround = True
+ break
+
+ for arg in node.args:
+ self.write(", ")
+ self.visit(arg, frame)
+
+ if not kwarg_workaround:
+ for kwarg in node.kwargs:
+ self.write(", ")
+ self.visit(kwarg, frame)
+ if extra_kwargs is not None:
+ for key, value in iteritems(extra_kwargs):
+ self.write(", %s=%s" % (key, value))
+ if node.dyn_args:
+ self.write(", *")
+ self.visit(node.dyn_args, frame)
+
+ if kwarg_workaround:
+ if node.dyn_kwargs is not None:
+ self.write(", **dict({")
+ else:
+ self.write(", **{")
+ for kwarg in node.kwargs:
+ self.write("%r: " % kwarg.key)
+ self.visit(kwarg.value, frame)
+ self.write(", ")
+ if extra_kwargs is not None:
+ for key, value in iteritems(extra_kwargs):
+ self.write("%r: %s, " % (key, value))
+ if node.dyn_kwargs is not None:
+ self.write("}, **")
+ self.visit(node.dyn_kwargs, frame)
+ self.write(")")
+ else:
+ self.write("}")
+
+ elif node.dyn_kwargs is not None:
+ self.write(", **")
+ self.visit(node.dyn_kwargs, frame)
+
+ def pull_dependencies(self, nodes):
+ """Pull all the dependencies."""
+ visitor = DependencyFinderVisitor()
+ for node in nodes:
+ visitor.visit(node)
+ for dependency in "filters", "tests":
+ mapping = getattr(self, dependency)
+ for name in getattr(visitor, dependency):
+ if name not in mapping:
+ mapping[name] = self.temporary_identifier()
+ self.writeline(
+ "%s = environment.%s[%r]" % (mapping[name], dependency, name)
+ )
+
+ def enter_frame(self, frame):
+ undefs = []
+ for target, (action, param) in iteritems(frame.symbols.loads):
+ if action == VAR_LOAD_PARAMETER:
+ pass
+ elif action == VAR_LOAD_RESOLVE:
+ self.writeline("%s = %s(%r)" % (target, self.get_resolve_func(), param))
+ elif action == VAR_LOAD_ALIAS:
+ self.writeline("%s = %s" % (target, param))
+ elif action == VAR_LOAD_UNDEFINED:
+ undefs.append(target)
+ else:
+ raise NotImplementedError("unknown load instruction")
+ if undefs:
+ self.writeline("%s = missing" % " = ".join(undefs))
+
+ def leave_frame(self, frame, with_python_scope=False):
+ if not with_python_scope:
+ undefs = []
+ for target, _ in iteritems(frame.symbols.loads):
+ undefs.append(target)
+ if undefs:
+ self.writeline("%s = missing" % " = ".join(undefs))
+
+ def func(self, name):
+ if self.environment.is_async:
+ return "async def %s" % name
+ return "def %s" % name
+
+ def macro_body(self, node, frame):
+ """Dump the function def of a macro or call block."""
+ frame = frame.inner()
+ frame.symbols.analyze_node(node)
+ macro_ref = MacroRef(node)
+
+ explicit_caller = None
+ skip_special_params = set()
+ args = []
+ for idx, arg in enumerate(node.args):
+ if arg.name == "caller":
+ explicit_caller = idx
+ if arg.name in ("kwargs", "varargs"):
+ skip_special_params.add(arg.name)
+ args.append(frame.symbols.ref(arg.name))
+
+ undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs"))
+
+ if "caller" in undeclared:
+ # In older Jinja versions there was a bug that allowed caller
+ # to retain the special behavior even if it was mentioned in
+ # the argument list. However thankfully this was only really
+ # working if it was the last argument. So we are explicitly
+ # checking this now and error out if it is anywhere else in
+ # the argument list.
+ if explicit_caller is not None:
+ try:
+ node.defaults[explicit_caller - len(node.args)]
+ except IndexError:
+ self.fail(
+ "When defining macros or call blocks the "
+ 'special "caller" argument must be omitted '
+ "or be given a default.",
+ node.lineno,
+ )
+ else:
+ args.append(frame.symbols.declare_parameter("caller"))
+ macro_ref.accesses_caller = True
+ if "kwargs" in undeclared and "kwargs" not in skip_special_params:
+ args.append(frame.symbols.declare_parameter("kwargs"))
+ macro_ref.accesses_kwargs = True
+ if "varargs" in undeclared and "varargs" not in skip_special_params:
+ args.append(frame.symbols.declare_parameter("varargs"))
+ macro_ref.accesses_varargs = True
+
+ # macros are delayed, they never require output checks
+ frame.require_output_check = False
+ frame.symbols.analyze_node(node)
+ self.writeline("%s(%s):" % (self.func("macro"), ", ".join(args)), node)
+ self.indent()
+
+ self.buffer(frame)
+ self.enter_frame(frame)
+
+ self.push_parameter_definitions(frame)
+ for idx, arg in enumerate(node.args):
+ ref = frame.symbols.ref(arg.name)
+ self.writeline("if %s is missing:" % ref)
+ self.indent()
+ try:
+ default = node.defaults[idx - len(node.args)]
+ except IndexError:
+ self.writeline(
+ "%s = undefined(%r, name=%r)"
+ % (ref, "parameter %r was not provided" % arg.name, arg.name)
+ )
+ else:
+ self.writeline("%s = " % ref)
+ self.visit(default, frame)
+ self.mark_parameter_stored(ref)
+ self.outdent()
+ self.pop_parameter_definitions()
+
+ self.blockvisit(node.body, frame)
+ self.return_buffer_contents(frame, force_unescaped=True)
+ self.leave_frame(frame, with_python_scope=True)
+ self.outdent()
+
+ return frame, macro_ref
+
+ def macro_def(self, macro_ref, frame):
+ """Dump the macro definition for the def created by macro_body."""
+ arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args)
+ name = getattr(macro_ref.node, "name", None)
+ if len(macro_ref.node.args) == 1:
+ arg_tuple += ","
+ self.write(
+ "Macro(environment, macro, %r, (%s), %r, %r, %r, "
+ "context.eval_ctx.autoescape)"
+ % (
+ name,
+ arg_tuple,
+ macro_ref.accesses_kwargs,
+ macro_ref.accesses_varargs,
+ macro_ref.accesses_caller,
+ )
+ )
+
+ def position(self, node):
+ """Return a human readable position for the node."""
+ rv = "line %d" % node.lineno
+ if self.name is not None:
+ rv += " in " + repr(self.name)
+ return rv
+
+ def dump_local_context(self, frame):
+ return "{%s}" % ", ".join(
+ "%r: %s" % (name, target)
+ for name, target in iteritems(frame.symbols.dump_stores())
+ )
+
+ def write_commons(self):
+ """Writes a common preamble that is used by root and block functions.
+ Primarily this sets up common local helpers and enforces a generator
+ through a dead branch.
+ """
+ self.writeline("resolve = context.resolve_or_missing")
+ self.writeline("undefined = environment.undefined")
+ # always use the standard Undefined class for the implicit else of
+ # conditional expressions
+ self.writeline("cond_expr_undefined = Undefined")
+ self.writeline("if 0: yield None")
+
+ def push_parameter_definitions(self, frame):
+ """Pushes all parameter targets from the given frame into a local
+ stack that permits tracking of yet to be assigned parameters. In
+ particular this enables the optimization from `visit_Name` to skip
+ undefined expressions for parameters in macros as macros can reference
+ otherwise unbound parameters.
+ """
+ self._param_def_block.append(frame.symbols.dump_param_targets())
+
+ def pop_parameter_definitions(self):
+ """Pops the current parameter definitions set."""
+ self._param_def_block.pop()
+
+ def mark_parameter_stored(self, target):
+ """Marks a parameter in the current parameter definitions as stored.
+ This will skip the enforced undefined checks.
+ """
+ if self._param_def_block:
+ self._param_def_block[-1].discard(target)
+
+ def push_context_reference(self, target):
+ self._context_reference_stack.append(target)
+
+ def pop_context_reference(self):
+ self._context_reference_stack.pop()
+
+ def get_context_ref(self):
+ return self._context_reference_stack[-1]
+
+ def get_resolve_func(self):
+ target = self._context_reference_stack[-1]
+ if target == "context":
+ return "resolve"
+ return "%s.resolve" % target
+
+ def derive_context(self, frame):
+ return "%s.derived(%s)" % (
+ self.get_context_ref(),
+ self.dump_local_context(frame),
+ )
+
+ def parameter_is_undeclared(self, target):
+ """Checks if a given target is an undeclared parameter."""
+ if not self._param_def_block:
+ return False
+ return target in self._param_def_block[-1]
+
+ def push_assign_tracking(self):
+ """Pushes a new layer for assignment tracking."""
+ self._assign_stack.append(set())
+
+ def pop_assign_tracking(self, frame):
+ """Pops the topmost level for assignment tracking and updates the
+ context variables if necessary.
+ """
+ vars = self._assign_stack.pop()
+ if not frame.toplevel or not vars:
+ return
+ public_names = [x for x in vars if x[:1] != "_"]
+ if len(vars) == 1:
+ name = next(iter(vars))
+ ref = frame.symbols.ref(name)
+ self.writeline("context.vars[%r] = %s" % (name, ref))
+ else:
+ self.writeline("context.vars.update({")
+ for idx, name in enumerate(vars):
+ if idx:
+ self.write(", ")
+ ref = frame.symbols.ref(name)
+ self.write("%r: %s" % (name, ref))
+ self.write("})")
+ if public_names:
+ if len(public_names) == 1:
+ self.writeline("context.exported_vars.add(%r)" % public_names[0])
+ else:
+ self.writeline(
+ "context.exported_vars.update((%s))"
+ % ", ".join(imap(repr, public_names))
+ )
+
+ # -- Statement Visitors
+
+ def visit_Template(self, node, frame=None):
+ assert frame is None, "no root frame allowed"
+ eval_ctx = EvalContext(self.environment, self.name)
+
+ from .runtime import exported
+
+ self.writeline("from __future__ import %s" % ", ".join(code_features))
+ self.writeline("from jinja2.runtime import " + ", ".join(exported))
+
+ if self.environment.is_async:
+ self.writeline(
+ "from jinja2.asyncsupport import auto_await, "
+ "auto_aiter, AsyncLoopContext"
+ )
+
+ # if we want a deferred initialization we cannot move the
+ # environment into a local name
+ envenv = not self.defer_init and ", environment=environment" or ""
+
+ # do we have an extends tag at all? If not, we can save some
+ # overhead by just not processing any inheritance code.
+ have_extends = node.find(nodes.Extends) is not None
+
+ # find all blocks
+ for block in node.find_all(nodes.Block):
+ if block.name in self.blocks:
+ self.fail("block %r defined twice" % block.name, block.lineno)
+ self.blocks[block.name] = block
+
+ # find all imports and import them
+ for import_ in node.find_all(nodes.ImportedName):
+ if import_.importname not in self.import_aliases:
+ imp = import_.importname
+ self.import_aliases[imp] = alias = self.temporary_identifier()
+ if "." in imp:
+ module, obj = imp.rsplit(".", 1)
+ self.writeline("from %s import %s as %s" % (module, obj, alias))
+ else:
+ self.writeline("import %s as %s" % (imp, alias))
+
+ # add the load name
+ self.writeline("name = %r" % self.name)
+
+ # generate the root render function.
+ self.writeline(
+ "%s(context, missing=missing%s):" % (self.func("root"), envenv), extra=1
+ )
+ self.indent()
+ self.write_commons()
+
+ # process the root
+ frame = Frame(eval_ctx)
+ if "self" in find_undeclared(node.body, ("self",)):
+ ref = frame.symbols.declare_parameter("self")
+ self.writeline("%s = TemplateReference(context)" % ref)
+ frame.symbols.analyze_node(node)
+ frame.toplevel = frame.rootlevel = True
+ frame.require_output_check = have_extends and not self.has_known_extends
+ if have_extends:
+ self.writeline("parent_template = None")
+ self.enter_frame(frame)
+ self.pull_dependencies(node.body)
+ self.blockvisit(node.body, frame)
+ self.leave_frame(frame, with_python_scope=True)
+ self.outdent()
+
+ # make sure that the parent root is called.
+ if have_extends:
+ if not self.has_known_extends:
+ self.indent()
+ self.writeline("if parent_template is not None:")
+ self.indent()
+ if supports_yield_from and not self.environment.is_async:
+ self.writeline("yield from parent_template.root_render_func(context)")
+ else:
+ self.writeline(
+ "%sfor event in parent_template."
+ "root_render_func(context):"
+ % (self.environment.is_async and "async " or "")
+ )
+ self.indent()
+ self.writeline("yield event")
+ self.outdent()
+ self.outdent(1 + (not self.has_known_extends))
+
+ # at this point we now have the blocks collected and can visit them too.
+ for name, block in iteritems(self.blocks):
+ self.writeline(
+ "%s(context, missing=missing%s):"
+ % (self.func("block_" + name), envenv),
+ block,
+ 1,
+ )
+ self.indent()
+ self.write_commons()
+ # It's important that we do not make this frame a child of the
+ # toplevel template. This would cause a variety of
+ # interesting issues with identifier tracking.
+ block_frame = Frame(eval_ctx)
+ undeclared = find_undeclared(block.body, ("self", "super"))
+ if "self" in undeclared:
+ ref = block_frame.symbols.declare_parameter("self")
+ self.writeline("%s = TemplateReference(context)" % ref)
+ if "super" in undeclared:
+ ref = block_frame.symbols.declare_parameter("super")
+ self.writeline("%s = context.super(%r, block_%s)" % (ref, name, name))
+ block_frame.symbols.analyze_node(block)
+ block_frame.block = name
+ self.enter_frame(block_frame)
+ self.pull_dependencies(block.body)
+ self.blockvisit(block.body, block_frame)
+ self.leave_frame(block_frame, with_python_scope=True)
+ self.outdent()
+
+ self.writeline(
+ "blocks = {%s}" % ", ".join("%r: block_%s" % (x, x) for x in self.blocks),
+ extra=1,
+ )
+
+ # add a function that returns the debug info
+ self.writeline(
+ "debug_info = %r" % "&".join("%s=%s" % x for x in self.debug_info)
+ )
+
+ def visit_Block(self, node, frame):
+ """Call a block and register it for the template."""
+ level = 0
+ if frame.toplevel:
+ # if we know that we are a child template, there is no need to
+ # check if we are one
+ if self.has_known_extends:
+ return
+ if self.extends_so_far > 0:
+ self.writeline("if parent_template is None:")
+ self.indent()
+ level += 1
+
+ if node.scoped:
+ context = self.derive_context(frame)
+ else:
+ context = self.get_context_ref()
+
+ if (
+ supports_yield_from
+ and not self.environment.is_async
+ and frame.buffer is None
+ ):
+ self.writeline(
+ "yield from context.blocks[%r][0](%s)" % (node.name, context), node
+ )
+ else:
+ loop = self.environment.is_async and "async for" or "for"
+ self.writeline(
+ "%s event in context.blocks[%r][0](%s):" % (loop, node.name, context),
+ node,
+ )
+ self.indent()
+ self.simple_write("event", frame)
+ self.outdent()
+
+ self.outdent(level)
+
+ def visit_Extends(self, node, frame):
+ """Calls the extender."""
+ if not frame.toplevel:
+ self.fail("cannot use extend from a non top-level scope", node.lineno)
+
+ # if the number of extends statements in general is zero so
+ # far, we don't have to add a check if something extended
+ # the template before this one.
+ if self.extends_so_far > 0:
+
+ # if we have a known extends we just add a template runtime
+ # error into the generated code. We could catch that at compile
+ # time too, but i welcome it not to confuse users by throwing the
+ # same error at different times just "because we can".
+ if not self.has_known_extends:
+ self.writeline("if parent_template is not None:")
+ self.indent()
+ self.writeline("raise TemplateRuntimeError(%r)" % "extended multiple times")
+
+ # if we have a known extends already we don't need that code here
+ # as we know that the template execution will end here.
+ if self.has_known_extends:
+ raise CompilerExit()
+ else:
+ self.outdent()
+
+ self.writeline("parent_template = environment.get_template(", node)
+ self.visit(node.template, frame)
+ self.write(", %r)" % self.name)
+ self.writeline(
+ "for name, parent_block in parent_template.blocks.%s():" % dict_item_iter
+ )
+ self.indent()
+ self.writeline("context.blocks.setdefault(name, []).append(parent_block)")
+ self.outdent()
+
+ # if this extends statement was in the root level we can take
+ # advantage of that information and simplify the generated code
+ # in the top level from this point onwards
+ if frame.rootlevel:
+ self.has_known_extends = True
+
+ # and now we have one more
+ self.extends_so_far += 1
+
+ def visit_Include(self, node, frame):
+ """Handles includes."""
+ if node.ignore_missing:
+ self.writeline("try:")
+ self.indent()
+
+ func_name = "get_or_select_template"
+ if isinstance(node.template, nodes.Const):
+ if isinstance(node.template.value, string_types):
+ func_name = "get_template"
+ elif isinstance(node.template.value, (tuple, list)):
+ func_name = "select_template"
+ elif isinstance(node.template, (nodes.Tuple, nodes.List)):
+ func_name = "select_template"
+
+ self.writeline("template = environment.%s(" % func_name, node)
+ self.visit(node.template, frame)
+ self.write(", %r)" % self.name)
+ if node.ignore_missing:
+ self.outdent()
+ self.writeline("except TemplateNotFound:")
+ self.indent()
+ self.writeline("pass")
+ self.outdent()
+ self.writeline("else:")
+ self.indent()
+
+ skip_event_yield = False
+ if node.with_context:
+ loop = self.environment.is_async and "async for" or "for"
+ self.writeline(
+ "%s event in template.root_render_func("
+ "template.new_context(context.get_all(), True, "
+ "%s)):" % (loop, self.dump_local_context(frame))
+ )
+ elif self.environment.is_async:
+ self.writeline(
+ "for event in (await "
+ "template._get_default_module_async())"
+ "._body_stream:"
+ )
+ else:
+ if supports_yield_from:
+ self.writeline("yield from template._get_default_module()._body_stream")
+ skip_event_yield = True
+ else:
+ self.writeline(
+ "for event in template._get_default_module()._body_stream:"
+ )
+
+ if not skip_event_yield:
+ self.indent()
+ self.simple_write("event", frame)
+ self.outdent()
+
+ if node.ignore_missing:
+ self.outdent()
+
+ def visit_Import(self, node, frame):
+ """Visit regular imports."""
+ self.writeline("%s = " % frame.symbols.ref(node.target), node)
+ if frame.toplevel:
+ self.write("context.vars[%r] = " % node.target)
+ if self.environment.is_async:
+ self.write("await ")
+ self.write("environment.get_template(")
+ self.visit(node.template, frame)
+ self.write(", %r)." % self.name)
+ if node.with_context:
+ self.write(
+ "make_module%s(context.get_all(), True, %s)"
+ % (
+ self.environment.is_async and "_async" or "",
+ self.dump_local_context(frame),
+ )
+ )
+ elif self.environment.is_async:
+ self.write("_get_default_module_async()")
+ else:
+ self.write("_get_default_module()")
+ if frame.toplevel and not node.target.startswith("_"):
+ self.writeline("context.exported_vars.discard(%r)" % node.target)
+
+ def visit_FromImport(self, node, frame):
+ """Visit named imports."""
+ self.newline(node)
+ self.write(
+ "included_template = %senvironment.get_template("
+ % (self.environment.is_async and "await " or "")
+ )
+ self.visit(node.template, frame)
+ self.write(", %r)." % self.name)
+ if node.with_context:
+ self.write(
+ "make_module%s(context.get_all(), True, %s)"
+ % (
+ self.environment.is_async and "_async" or "",
+ self.dump_local_context(frame),
+ )
+ )
+ elif self.environment.is_async:
+ self.write("_get_default_module_async()")
+ else:
+ self.write("_get_default_module()")
+
+ var_names = []
+ discarded_names = []
+ for name in node.names:
+ if isinstance(name, tuple):
+ name, alias = name
+ else:
+ alias = name
+ self.writeline(
+ "%s = getattr(included_template, "
+ "%r, missing)" % (frame.symbols.ref(alias), name)
+ )
+ self.writeline("if %s is missing:" % frame.symbols.ref(alias))
+ self.indent()
+ self.writeline(
+ "%s = undefined(%r %% "
+ "included_template.__name__, "
+ "name=%r)"
+ % (
+ frame.symbols.ref(alias),
+ "the template %%r (imported on %s) does "
+ "not export the requested name %s"
+ % (self.position(node), repr(name)),
+ name,
+ )
+ )
+ self.outdent()
+ if frame.toplevel:
+ var_names.append(alias)
+ if not alias.startswith("_"):
+ discarded_names.append(alias)
+
+ if var_names:
+ if len(var_names) == 1:
+ name = var_names[0]
+ self.writeline(
+ "context.vars[%r] = %s" % (name, frame.symbols.ref(name))
+ )
+ else:
+ self.writeline(
+ "context.vars.update({%s})"
+ % ", ".join(
+ "%r: %s" % (name, frame.symbols.ref(name)) for name in var_names
+ )
+ )
+ if discarded_names:
+ if len(discarded_names) == 1:
+ self.writeline("context.exported_vars.discard(%r)" % discarded_names[0])
+ else:
+ self.writeline(
+ "context.exported_vars.difference_"
+ "update((%s))" % ", ".join(imap(repr, discarded_names))
+ )
+
+ def visit_For(self, node, frame):
+ loop_frame = frame.inner()
+ test_frame = frame.inner()
+ else_frame = frame.inner()
+
+ # try to figure out if we have an extended loop. An extended loop
+ # is necessary if the loop is in recursive mode if the special loop
+ # variable is accessed in the body.
+ extended_loop = node.recursive or "loop" in find_undeclared(
+ node.iter_child_nodes(only=("body",)), ("loop",)
+ )
+
+ loop_ref = None
+ if extended_loop:
+ loop_ref = loop_frame.symbols.declare_parameter("loop")
+
+ loop_frame.symbols.analyze_node(node, for_branch="body")
+ if node.else_:
+ else_frame.symbols.analyze_node(node, for_branch="else")
+
+ if node.test:
+ loop_filter_func = self.temporary_identifier()
+ test_frame.symbols.analyze_node(node, for_branch="test")
+ self.writeline("%s(fiter):" % self.func(loop_filter_func), node.test)
+ self.indent()
+ self.enter_frame(test_frame)
+ self.writeline(self.environment.is_async and "async for " or "for ")
+ self.visit(node.target, loop_frame)
+ self.write(" in ")
+ self.write(self.environment.is_async and "auto_aiter(fiter)" or "fiter")
+ self.write(":")
+ self.indent()
+ self.writeline("if ", node.test)
+ self.visit(node.test, test_frame)
+ self.write(":")
+ self.indent()
+ self.writeline("yield ")
+ self.visit(node.target, loop_frame)
+ self.outdent(3)
+ self.leave_frame(test_frame, with_python_scope=True)
+
+ # if we don't have an recursive loop we have to find the shadowed
+ # variables at that point. Because loops can be nested but the loop
+ # variable is a special one we have to enforce aliasing for it.
+ if node.recursive:
+ self.writeline(
+ "%s(reciter, loop_render_func, depth=0):" % self.func("loop"), node
+ )
+ self.indent()
+ self.buffer(loop_frame)
+
+ # Use the same buffer for the else frame
+ else_frame.buffer = loop_frame.buffer
+
+ # make sure the loop variable is a special one and raise a template
+ # assertion error if a loop tries to write to loop
+ if extended_loop:
+ self.writeline("%s = missing" % loop_ref)
+
+ for name in node.find_all(nodes.Name):
+ if name.ctx == "store" and name.name == "loop":
+ self.fail(
+ "Can't assign to special loop variable in for-loop target",
+ name.lineno,
+ )
+
+ if node.else_:
+ iteration_indicator = self.temporary_identifier()
+ self.writeline("%s = 1" % iteration_indicator)
+
+ self.writeline(self.environment.is_async and "async for " or "for ", node)
+ self.visit(node.target, loop_frame)
+ if extended_loop:
+ if self.environment.is_async:
+ self.write(", %s in AsyncLoopContext(" % loop_ref)
+ else:
+ self.write(", %s in LoopContext(" % loop_ref)
+ else:
+ self.write(" in ")
+
+ if node.test:
+ self.write("%s(" % loop_filter_func)
+ if node.recursive:
+ self.write("reciter")
+ else:
+ if self.environment.is_async and not extended_loop:
+ self.write("auto_aiter(")
+ self.visit(node.iter, frame)
+ if self.environment.is_async and not extended_loop:
+ self.write(")")
+ if node.test:
+ self.write(")")
+
+ if node.recursive:
+ self.write(", undefined, loop_render_func, depth):")
+ else:
+ self.write(extended_loop and ", undefined):" or ":")
+
+ self.indent()
+ self.enter_frame(loop_frame)
+
+ self.blockvisit(node.body, loop_frame)
+ if node.else_:
+ self.writeline("%s = 0" % iteration_indicator)
+ self.outdent()
+ self.leave_frame(
+ loop_frame, with_python_scope=node.recursive and not node.else_
+ )
+
+ if node.else_:
+ self.writeline("if %s:" % iteration_indicator)
+ self.indent()
+ self.enter_frame(else_frame)
+ self.blockvisit(node.else_, else_frame)
+ self.leave_frame(else_frame)
+ self.outdent()
+
+ # if the node was recursive we have to return the buffer contents
+ # and start the iteration code
+ if node.recursive:
+ self.return_buffer_contents(loop_frame)
+ self.outdent()
+ self.start_write(frame, node)
+ if self.environment.is_async:
+ self.write("await ")
+ self.write("loop(")
+ if self.environment.is_async:
+ self.write("auto_aiter(")
+ self.visit(node.iter, frame)
+ if self.environment.is_async:
+ self.write(")")
+ self.write(", loop)")
+ self.end_write(frame)
+
+ def visit_If(self, node, frame):
+ if_frame = frame.soft()
+ self.writeline("if ", node)
+ self.visit(node.test, if_frame)
+ self.write(":")
+ self.indent()
+ self.blockvisit(node.body, if_frame)
+ self.outdent()
+ for elif_ in node.elif_:
+ self.writeline("elif ", elif_)
+ self.visit(elif_.test, if_frame)
+ self.write(":")
+ self.indent()
+ self.blockvisit(elif_.body, if_frame)
+ self.outdent()
+ if node.else_:
+ self.writeline("else:")
+ self.indent()
+ self.blockvisit(node.else_, if_frame)
+ self.outdent()
+
+ def visit_Macro(self, node, frame):
+ macro_frame, macro_ref = self.macro_body(node, frame)
+ self.newline()
+ if frame.toplevel:
+ if not node.name.startswith("_"):
+ self.write("context.exported_vars.add(%r)" % node.name)
+ self.writeline("context.vars[%r] = " % node.name)
+ self.write("%s = " % frame.symbols.ref(node.name))
+ self.macro_def(macro_ref, macro_frame)
+
+ def visit_CallBlock(self, node, frame):
+ call_frame, macro_ref = self.macro_body(node, frame)
+ self.writeline("caller = ")
+ self.macro_def(macro_ref, call_frame)
+ self.start_write(frame, node)
+ self.visit_Call(node.call, frame, forward_caller=True)
+ self.end_write(frame)
+
+ def visit_FilterBlock(self, node, frame):
+ filter_frame = frame.inner()
+ filter_frame.symbols.analyze_node(node)
+ self.enter_frame(filter_frame)
+ self.buffer(filter_frame)
+ self.blockvisit(node.body, filter_frame)
+ self.start_write(frame, node)
+ self.visit_Filter(node.filter, filter_frame)
+ self.end_write(frame)
+ self.leave_frame(filter_frame)
+
+ def visit_With(self, node, frame):
+ with_frame = frame.inner()
+ with_frame.symbols.analyze_node(node)
+ self.enter_frame(with_frame)
+ for target, expr in izip(node.targets, node.values):
+ self.newline()
+ self.visit(target, with_frame)
+ self.write(" = ")
+ self.visit(expr, frame)
+ self.blockvisit(node.body, with_frame)
+ self.leave_frame(with_frame)
+
+ def visit_ExprStmt(self, node, frame):
+ self.newline(node)
+ self.visit(node.node, frame)
+
+ _FinalizeInfo = namedtuple("_FinalizeInfo", ("const", "src"))
+ #: The default finalize function if the environment isn't configured
+ #: with one. Or if the environment has one, this is called on that
+ #: function's output for constants.
+ _default_finalize = text_type
+ _finalize = None
+
+ def _make_finalize(self):
+ """Build the finalize function to be used on constants and at
+ runtime. Cached so it's only created once for all output nodes.
+
+ Returns a ``namedtuple`` with the following attributes:
+
+ ``const``
+ A function to finalize constant data at compile time.
+
+ ``src``
+ Source code to output around nodes to be evaluated at
+ runtime.
+ """
+ if self._finalize is not None:
+ return self._finalize
+
+ finalize = default = self._default_finalize
+ src = None
+
+ if self.environment.finalize:
+ src = "environment.finalize("
+ env_finalize = self.environment.finalize
+
+ def finalize(value):
+ return default(env_finalize(value))
+
+ if getattr(env_finalize, "contextfunction", False) is True:
+ src += "context, "
+ finalize = None # noqa: F811
+ elif getattr(env_finalize, "evalcontextfunction", False) is True:
+ src += "context.eval_ctx, "
+ finalize = None
+ elif getattr(env_finalize, "environmentfunction", False) is True:
+ src += "environment, "
+
+ def finalize(value):
+ return default(env_finalize(self.environment, value))
+
+ self._finalize = self._FinalizeInfo(finalize, src)
+ return self._finalize
+
+ def _output_const_repr(self, group):
+ """Given a group of constant values converted from ``Output``
+ child nodes, produce a string to write to the template module
+ source.
+ """
+ return repr(concat(group))
+
+ def _output_child_to_const(self, node, frame, finalize):
+ """Try to optimize a child of an ``Output`` node by trying to
+ convert it to constant, finalized data at compile time.
+
+ If :exc:`Impossible` is raised, the node is not constant and
+ will be evaluated at runtime. Any other exception will also be
+ evaluated at runtime for easier debugging.
+ """
+ const = node.as_const(frame.eval_ctx)
+
+ if frame.eval_ctx.autoescape:
+ const = escape(const)
+
+ # Template data doesn't go through finalize.
+ if isinstance(node, nodes.TemplateData):
+ return text_type(const)
+
+ return finalize.const(const)
+
+ def _output_child_pre(self, node, frame, finalize):
+ """Output extra source code before visiting a child of an
+ ``Output`` node.
+ """
+ if frame.eval_ctx.volatile:
+ self.write("(escape if context.eval_ctx.autoescape else to_string)(")
+ elif frame.eval_ctx.autoescape:
+ self.write("escape(")
+ else:
+ self.write("to_string(")
+
+ if finalize.src is not None:
+ self.write(finalize.src)
+
+ def _output_child_post(self, node, frame, finalize):
+ """Output extra source code after visiting a child of an
+ ``Output`` node.
+ """
+ self.write(")")
+
+ if finalize.src is not None:
+ self.write(")")
+
+ def visit_Output(self, node, frame):
+ # If an extends is active, don't render outside a block.
+ if frame.require_output_check:
+ # A top-level extends is known to exist at compile time.
+ if self.has_known_extends:
+ return
+
+ self.writeline("if parent_template is None:")
+ self.indent()
+
+ finalize = self._make_finalize()
+ body = []
+
+ # Evaluate constants at compile time if possible. Each item in
+ # body will be either a list of static data or a node to be
+ # evaluated at runtime.
+ for child in node.nodes:
+ try:
+ if not (
+ # If the finalize function requires runtime context,
+ # constants can't be evaluated at compile time.
+ finalize.const
+ # Unless it's basic template data that won't be
+ # finalized anyway.
+ or isinstance(child, nodes.TemplateData)
+ ):
+ raise nodes.Impossible()
+
+ const = self._output_child_to_const(child, frame, finalize)
+ except (nodes.Impossible, Exception):
+ # The node was not constant and needs to be evaluated at
+ # runtime. Or another error was raised, which is easier
+ # to debug at runtime.
+ body.append(child)
+ continue
+
+ if body and isinstance(body[-1], list):
+ body[-1].append(const)
+ else:
+ body.append([const])
+
+ if frame.buffer is not None:
+ if len(body) == 1:
+ self.writeline("%s.append(" % frame.buffer)
+ else:
+ self.writeline("%s.extend((" % frame.buffer)
+
+ self.indent()
+
+ for item in body:
+ if isinstance(item, list):
+ # A group of constant data to join and output.
+ val = self._output_const_repr(item)
+
+ if frame.buffer is None:
+ self.writeline("yield " + val)
+ else:
+ self.writeline(val + ",")
+ else:
+ if frame.buffer is None:
+ self.writeline("yield ", item)
+ else:
+ self.newline(item)
+
+ # A node to be evaluated at runtime.
+ self._output_child_pre(item, frame, finalize)
+ self.visit(item, frame)
+ self._output_child_post(item, frame, finalize)
+
+ if frame.buffer is not None:
+ self.write(",")
+
+ if frame.buffer is not None:
+ self.outdent()
+ self.writeline(")" if len(body) == 1 else "))")
+
+ if frame.require_output_check:
+ self.outdent()
+
+ def visit_Assign(self, node, frame):
+ self.push_assign_tracking()
+ self.newline(node)
+ self.visit(node.target, frame)
+ self.write(" = ")
+ self.visit(node.node, frame)
+ self.pop_assign_tracking(frame)
+
+ def visit_AssignBlock(self, node, frame):
+ self.push_assign_tracking()
+ block_frame = frame.inner()
+ # This is a special case. Since a set block always captures we
+ # will disable output checks. This way one can use set blocks
+ # toplevel even in extended templates.
+ block_frame.require_output_check = False
+ block_frame.symbols.analyze_node(node)
+ self.enter_frame(block_frame)
+ self.buffer(block_frame)
+ self.blockvisit(node.body, block_frame)
+ self.newline(node)
+ self.visit(node.target, frame)
+ self.write(" = (Markup if context.eval_ctx.autoescape else identity)(")
+ if node.filter is not None:
+ self.visit_Filter(node.filter, block_frame)
+ else:
+ self.write("concat(%s)" % block_frame.buffer)
+ self.write(")")
+ self.pop_assign_tracking(frame)
+ self.leave_frame(block_frame)
+
+ # -- Expression Visitors
+
+ def visit_Name(self, node, frame):
+ if node.ctx == "store" and frame.toplevel:
+ if self._assign_stack:
+ self._assign_stack[-1].add(node.name)
+ ref = frame.symbols.ref(node.name)
+
+ # If we are looking up a variable we might have to deal with the
+ # case where it's undefined. We can skip that case if the load
+ # instruction indicates a parameter which are always defined.
+ if node.ctx == "load":
+ load = frame.symbols.find_load(ref)
+ if not (
+ load is not None
+ and load[0] == VAR_LOAD_PARAMETER
+ and not self.parameter_is_undeclared(ref)
+ ):
+ self.write(
+ "(undefined(name=%r) if %s is missing else %s)"
+ % (node.name, ref, ref)
+ )
+ return
+
+ self.write(ref)
+
+ def visit_NSRef(self, node, frame):
+ # NSRefs can only be used to store values; since they use the normal
+ # `foo.bar` notation they will be parsed as a normal attribute access
+ # when used anywhere but in a `set` context
+ ref = frame.symbols.ref(node.name)
+ self.writeline("if not isinstance(%s, Namespace):" % ref)
+ self.indent()
+ self.writeline(
+ "raise TemplateRuntimeError(%r)"
+ % "cannot assign attribute on non-namespace object"
+ )
+ self.outdent()
+ self.writeline("%s[%r]" % (ref, node.attr))
+
+ def visit_Const(self, node, frame):
+ val = node.as_const(frame.eval_ctx)
+ if isinstance(val, float):
+ self.write(str(val))
+ else:
+ self.write(repr(val))
+
+ def visit_TemplateData(self, node, frame):
+ try:
+ self.write(repr(node.as_const(frame.eval_ctx)))
+ except nodes.Impossible:
+ self.write(
+ "(Markup if context.eval_ctx.autoescape else identity)(%r)" % node.data
+ )
+
+ def visit_Tuple(self, node, frame):
+ self.write("(")
+ idx = -1
+ for idx, item in enumerate(node.items):
+ if idx:
+ self.write(", ")
+ self.visit(item, frame)
+ self.write(idx == 0 and ",)" or ")")
+
+ def visit_List(self, node, frame):
+ self.write("[")
+ for idx, item in enumerate(node.items):
+ if idx:
+ self.write(", ")
+ self.visit(item, frame)
+ self.write("]")
+
+ def visit_Dict(self, node, frame):
+ self.write("{")
+ for idx, item in enumerate(node.items):
+ if idx:
+ self.write(", ")
+ self.visit(item.key, frame)
+ self.write(": ")
+ self.visit(item.value, frame)
+ self.write("}")
+
+ def binop(operator, interceptable=True): # noqa: B902
+ @optimizeconst
+ def visitor(self, node, frame):
+ if (
+ self.environment.sandboxed
+ and operator in self.environment.intercepted_binops
+ ):
+ self.write("environment.call_binop(context, %r, " % operator)
+ self.visit(node.left, frame)
+ self.write(", ")
+ self.visit(node.right, frame)
+ else:
+ self.write("(")
+ self.visit(node.left, frame)
+ self.write(" %s " % operator)
+ self.visit(node.right, frame)
+ self.write(")")
+
+ return visitor
+
+ def uaop(operator, interceptable=True): # noqa: B902
+ @optimizeconst
+ def visitor(self, node, frame):
+ if (
+ self.environment.sandboxed
+ and operator in self.environment.intercepted_unops
+ ):
+ self.write("environment.call_unop(context, %r, " % operator)
+ self.visit(node.node, frame)
+ else:
+ self.write("(" + operator)
+ self.visit(node.node, frame)
+ self.write(")")
+
+ return visitor
+
+ visit_Add = binop("+")
+ visit_Sub = binop("-")
+ visit_Mul = binop("*")
+ visit_Div = binop("/")
+ visit_FloorDiv = binop("//")
+ visit_Pow = binop("**")
+ visit_Mod = binop("%")
+ visit_And = binop("and", interceptable=False)
+ visit_Or = binop("or", interceptable=False)
+ visit_Pos = uaop("+")
+ visit_Neg = uaop("-")
+ visit_Not = uaop("not ", interceptable=False)
+ del binop, uaop
+
+ @optimizeconst
+ def visit_Concat(self, node, frame):
+ if frame.eval_ctx.volatile:
+ func_name = "(context.eval_ctx.volatile and markup_join or unicode_join)"
+ elif frame.eval_ctx.autoescape:
+ func_name = "markup_join"
+ else:
+ func_name = "unicode_join"
+ self.write("%s((" % func_name)
+ for arg in node.nodes:
+ self.visit(arg, frame)
+ self.write(", ")
+ self.write("))")
+
+ @optimizeconst
+ def visit_Compare(self, node, frame):
+ self.write("(")
+ self.visit(node.expr, frame)
+ for op in node.ops:
+ self.visit(op, frame)
+ self.write(")")
+
+ def visit_Operand(self, node, frame):
+ self.write(" %s " % operators[node.op])
+ self.visit(node.expr, frame)
+
+ @optimizeconst
+ def visit_Getattr(self, node, frame):
+ if self.environment.is_async:
+ self.write("(await auto_await(")
+
+ self.write("environment.getattr(")
+ self.visit(node.node, frame)
+ self.write(", %r)" % node.attr)
+
+ if self.environment.is_async:
+ self.write("))")
+
+ @optimizeconst
+ def visit_Getitem(self, node, frame):
+ # slices bypass the environment getitem method.
+ if isinstance(node.arg, nodes.Slice):
+ self.visit(node.node, frame)
+ self.write("[")
+ self.visit(node.arg, frame)
+ self.write("]")
+ else:
+ if self.environment.is_async:
+ self.write("(await auto_await(")
+
+ self.write("environment.getitem(")
+ self.visit(node.node, frame)
+ self.write(", ")
+ self.visit(node.arg, frame)
+ self.write(")")
+
+ if self.environment.is_async:
+ self.write("))")
+
+ def visit_Slice(self, node, frame):
+ if node.start is not None:
+ self.visit(node.start, frame)
+ self.write(":")
+ if node.stop is not None:
+ self.visit(node.stop, frame)
+ if node.step is not None:
+ self.write(":")
+ self.visit(node.step, frame)
+
+ @optimizeconst
+ def visit_Filter(self, node, frame):
+ if self.environment.is_async:
+ self.write("await auto_await(")
+ self.write(self.filters[node.name] + "(")
+ func = self.environment.filters.get(node.name)
+ if func is None:
+ self.fail("no filter named %r" % node.name, node.lineno)
+ if getattr(func, "contextfilter", False) is True:
+ self.write("context, ")
+ elif getattr(func, "evalcontextfilter", False) is True:
+ self.write("context.eval_ctx, ")
+ elif getattr(func, "environmentfilter", False) is True:
+ self.write("environment, ")
+
+ # if the filter node is None we are inside a filter block
+ # and want to write to the current buffer
+ if node.node is not None:
+ self.visit(node.node, frame)
+ elif frame.eval_ctx.volatile:
+ self.write(
+ "(context.eval_ctx.autoescape and"
+ " Markup(concat(%s)) or concat(%s))" % (frame.buffer, frame.buffer)
+ )
+ elif frame.eval_ctx.autoescape:
+ self.write("Markup(concat(%s))" % frame.buffer)
+ else:
+ self.write("concat(%s)" % frame.buffer)
+ self.signature(node, frame)
+ self.write(")")
+ if self.environment.is_async:
+ self.write(")")
+
+ @optimizeconst
+ def visit_Test(self, node, frame):
+ self.write(self.tests[node.name] + "(")
+ if node.name not in self.environment.tests:
+ self.fail("no test named %r" % node.name, node.lineno)
+ self.visit(node.node, frame)
+ self.signature(node, frame)
+ self.write(")")
+
+ @optimizeconst
+ def visit_CondExpr(self, node, frame):
+ def write_expr2():
+ if node.expr2 is not None:
+ return self.visit(node.expr2, frame)
+ self.write(
+ "cond_expr_undefined(%r)"
+ % (
+ "the inline if-"
+ "expression on %s evaluated to false and "
+ "no else section was defined." % self.position(node)
+ )
+ )
+
+ self.write("(")
+ self.visit(node.expr1, frame)
+ self.write(" if ")
+ self.visit(node.test, frame)
+ self.write(" else ")
+ write_expr2()
+ self.write(")")
+
+ @optimizeconst
+ def visit_Call(self, node, frame, forward_caller=False):
+ if self.environment.is_async:
+ self.write("await auto_await(")
+ if self.environment.sandboxed:
+ self.write("environment.call(context, ")
+ else:
+ self.write("context.call(")
+ self.visit(node.node, frame)
+ extra_kwargs = forward_caller and {"caller": "caller"} or None
+ self.signature(node, frame, extra_kwargs)
+ self.write(")")
+ if self.environment.is_async:
+ self.write(")")
+
+ def visit_Keyword(self, node, frame):
+ self.write(node.key + "=")
+ self.visit(node.value, frame)
+
+ # -- Unused nodes for extensions
+
+ def visit_MarkSafe(self, node, frame):
+ self.write("Markup(")
+ self.visit(node.expr, frame)
+ self.write(")")
+
+ def visit_MarkSafeIfAutoescape(self, node, frame):
+ self.write("(context.eval_ctx.autoescape and Markup or identity)(")
+ self.visit(node.expr, frame)
+ self.write(")")
+
+ def visit_EnvironmentAttribute(self, node, frame):
+ self.write("environment." + node.name)
+
+ def visit_ExtensionAttribute(self, node, frame):
+ self.write("environment.extensions[%r].%s" % (node.identifier, node.name))
+
+ def visit_ImportedName(self, node, frame):
+ self.write(self.import_aliases[node.importname])
+
+ def visit_InternalName(self, node, frame):
+ self.write(node.name)
+
+ def visit_ContextReference(self, node, frame):
+ self.write("context")
+
+ def visit_DerivedContextReference(self, node, frame):
+ self.write(self.derive_context(frame))
+
+ def visit_Continue(self, node, frame):
+ self.writeline("continue", node)
+
+ def visit_Break(self, node, frame):
+ self.writeline("break", node)
+
+ def visit_Scope(self, node, frame):
+ scope_frame = frame.inner()
+ scope_frame.symbols.analyze_node(node)
+ self.enter_frame(scope_frame)
+ self.blockvisit(node.body, scope_frame)
+ self.leave_frame(scope_frame)
+
+ def visit_OverlayScope(self, node, frame):
+ ctx = self.temporary_identifier()
+ self.writeline("%s = %s" % (ctx, self.derive_context(frame)))
+ self.writeline("%s.vars = " % ctx)
+ self.visit(node.context, frame)
+ self.push_context_reference(ctx)
+
+ scope_frame = frame.inner(isolated=True)
+ scope_frame.symbols.analyze_node(node)
+ self.enter_frame(scope_frame)
+ self.blockvisit(node.body, scope_frame)
+ self.leave_frame(scope_frame)
+ self.pop_context_reference()
+
+ def visit_EvalContextModifier(self, node, frame):
+ for keyword in node.options:
+ self.writeline("context.eval_ctx.%s = " % keyword.key)
+ self.visit(keyword.value, frame)
+ try:
+ val = keyword.value.as_const(frame.eval_ctx)
+ except nodes.Impossible:
+ frame.eval_ctx.volatile = True
+ else:
+ setattr(frame.eval_ctx, keyword.key, val)
+
+ def visit_ScopedEvalContextModifier(self, node, frame):
+ old_ctx_name = self.temporary_identifier()
+ saved_ctx = frame.eval_ctx.save()
+ self.writeline("%s = context.eval_ctx.save()" % old_ctx_name)
+ self.visit_EvalContextModifier(node, frame)
+ for child in node.body:
+ self.visit(child, frame)
+ frame.eval_ctx.revert(saved_ctx)
+ self.writeline("context.eval_ctx.revert(%s)" % old_ctx_name)
diff --git a/third_party/python/Jinja2/src/jinja2/constants.py b/third_party/python/Jinja2/src/jinja2/constants.py
new file mode 100644
index 0000000000..bf7f2ca721
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/constants.py
@@ -0,0 +1,21 @@
+# -*- coding: utf-8 -*-
+#: list of lorem ipsum words used by the lipsum() helper function
+LOREM_IPSUM_WORDS = u"""\
+a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
+auctor augue bibendum blandit class commodo condimentum congue consectetuer
+consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
+diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend
+elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames
+faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac
+hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum
+justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem
+luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie
+mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non
+nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque
+penatibus per pharetra phasellus placerat platea porta porttitor posuere
+potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus
+ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit
+sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor
+tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices
+ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus
+viverra volutpat vulputate"""
diff --git a/third_party/python/Jinja2/src/jinja2/debug.py b/third_party/python/Jinja2/src/jinja2/debug.py
new file mode 100644
index 0000000000..5d8aec31d0
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/debug.py
@@ -0,0 +1,268 @@
+import sys
+from types import CodeType
+
+from . import TemplateSyntaxError
+from ._compat import PYPY
+from .utils import internal_code
+from .utils import missing
+
+
+def rewrite_traceback_stack(source=None):
+ """Rewrite the current exception to replace any tracebacks from
+ within compiled template code with tracebacks that look like they
+ came from the template source.
+
+ This must be called within an ``except`` block.
+
+ :param exc_info: A :meth:`sys.exc_info` tuple. If not provided,
+ the current ``exc_info`` is used.
+ :param source: For ``TemplateSyntaxError``, the original source if
+ known.
+ :return: A :meth:`sys.exc_info` tuple that can be re-raised.
+ """
+ exc_type, exc_value, tb = sys.exc_info()
+
+ if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated:
+ exc_value.translated = True
+ exc_value.source = source
+
+ try:
+ # Remove the old traceback on Python 3, otherwise the frames
+ # from the compiler still show up.
+ exc_value.with_traceback(None)
+ except AttributeError:
+ pass
+
+ # Outside of runtime, so the frame isn't executing template
+ # code, but it still needs to point at the template.
+ tb = fake_traceback(
+ exc_value, None, exc_value.filename or "<unknown>", exc_value.lineno
+ )
+ else:
+ # Skip the frame for the render function.
+ tb = tb.tb_next
+
+ stack = []
+
+ # Build the stack of traceback object, replacing any in template
+ # code with the source file and line information.
+ while tb is not None:
+ # Skip frames decorated with @internalcode. These are internal
+ # calls that aren't useful in template debugging output.
+ if tb.tb_frame.f_code in internal_code:
+ tb = tb.tb_next
+ continue
+
+ template = tb.tb_frame.f_globals.get("__jinja_template__")
+
+ if template is not None:
+ lineno = template.get_corresponding_lineno(tb.tb_lineno)
+ fake_tb = fake_traceback(exc_value, tb, template.filename, lineno)
+ stack.append(fake_tb)
+ else:
+ stack.append(tb)
+
+ tb = tb.tb_next
+
+ tb_next = None
+
+ # Assign tb_next in reverse to avoid circular references.
+ for tb in reversed(stack):
+ tb_next = tb_set_next(tb, tb_next)
+
+ return exc_type, exc_value, tb_next
+
+
+def fake_traceback(exc_value, tb, filename, lineno):
+ """Produce a new traceback object that looks like it came from the
+ template source instead of the compiled code. The filename, line
+ number, and location name will point to the template, and the local
+ variables will be the current template context.
+
+ :param exc_value: The original exception to be re-raised to create
+ the new traceback.
+ :param tb: The original traceback to get the local variables and
+ code info from.
+ :param filename: The template filename.
+ :param lineno: The line number in the template source.
+ """
+ if tb is not None:
+ # Replace the real locals with the context that would be
+ # available at that point in the template.
+ locals = get_template_locals(tb.tb_frame.f_locals)
+ locals.pop("__jinja_exception__", None)
+ else:
+ locals = {}
+
+ globals = {
+ "__name__": filename,
+ "__file__": filename,
+ "__jinja_exception__": exc_value,
+ }
+ # Raise an exception at the correct line number.
+ code = compile("\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec")
+
+ # Build a new code object that points to the template file and
+ # replaces the location with a block name.
+ try:
+ location = "template"
+
+ if tb is not None:
+ function = tb.tb_frame.f_code.co_name
+
+ if function == "root":
+ location = "top-level template code"
+ elif function.startswith("block_"):
+ location = 'block "%s"' % function[6:]
+
+ # Collect arguments for the new code object. CodeType only
+ # accepts positional arguments, and arguments were inserted in
+ # new Python versions.
+ code_args = []
+
+ for attr in (
+ "argcount",
+ "posonlyargcount", # Python 3.8
+ "kwonlyargcount", # Python 3
+ "nlocals",
+ "stacksize",
+ "flags",
+ "code", # codestring
+ "consts", # constants
+ "names",
+ "varnames",
+ ("filename", filename),
+ ("name", location),
+ "firstlineno",
+ "lnotab",
+ "freevars",
+ "cellvars",
+ ):
+ if isinstance(attr, tuple):
+ # Replace with given value.
+ code_args.append(attr[1])
+ continue
+
+ try:
+ # Copy original value if it exists.
+ code_args.append(getattr(code, "co_" + attr))
+ except AttributeError:
+ # Some arguments were added later.
+ continue
+
+ code = CodeType(*code_args)
+ except Exception:
+ # Some environments such as Google App Engine don't support
+ # modifying code objects.
+ pass
+
+ # Execute the new code, which is guaranteed to raise, and return
+ # the new traceback without this frame.
+ try:
+ exec(code, globals, locals)
+ except BaseException:
+ return sys.exc_info()[2].tb_next
+
+
+def get_template_locals(real_locals):
+ """Based on the runtime locals, get the context that would be
+ available at that point in the template.
+ """
+ # Start with the current template context.
+ ctx = real_locals.get("context")
+
+ if ctx:
+ data = ctx.get_all().copy()
+ else:
+ data = {}
+
+ # Might be in a derived context that only sets local variables
+ # rather than pushing a context. Local variables follow the scheme
+ # l_depth_name. Find the highest-depth local that has a value for
+ # each name.
+ local_overrides = {}
+
+ for name, value in real_locals.items():
+ if not name.startswith("l_") or value is missing:
+ # Not a template variable, or no longer relevant.
+ continue
+
+ try:
+ _, depth, name = name.split("_", 2)
+ depth = int(depth)
+ except ValueError:
+ continue
+
+ cur_depth = local_overrides.get(name, (-1,))[0]
+
+ if cur_depth < depth:
+ local_overrides[name] = (depth, value)
+
+ # Modify the context with any derived context.
+ for name, (_, value) in local_overrides.items():
+ if value is missing:
+ data.pop(name, None)
+ else:
+ data[name] = value
+
+ return data
+
+
+if sys.version_info >= (3, 7):
+ # tb_next is directly assignable as of Python 3.7
+ def tb_set_next(tb, tb_next):
+ tb.tb_next = tb_next
+ return tb
+
+
+elif PYPY:
+ # PyPy might have special support, and won't work with ctypes.
+ try:
+ import tputil
+ except ImportError:
+ # Without tproxy support, use the original traceback.
+ def tb_set_next(tb, tb_next):
+ return tb
+
+ else:
+ # With tproxy support, create a proxy around the traceback that
+ # returns the new tb_next.
+ def tb_set_next(tb, tb_next):
+ def controller(op):
+ if op.opname == "__getattribute__" and op.args[0] == "tb_next":
+ return tb_next
+
+ return op.delegate()
+
+ return tputil.make_proxy(controller, obj=tb)
+
+
+else:
+ # Use ctypes to assign tb_next at the C level since it's read-only
+ # from Python.
+ import ctypes
+
+ class _CTraceback(ctypes.Structure):
+ _fields_ = [
+ # Extra PyObject slots when compiled with Py_TRACE_REFS.
+ ("PyObject_HEAD", ctypes.c_byte * object().__sizeof__()),
+ # Only care about tb_next as an object, not a traceback.
+ ("tb_next", ctypes.py_object),
+ ]
+
+ def tb_set_next(tb, tb_next):
+ c_tb = _CTraceback.from_address(id(tb))
+
+ # Clear out the old tb_next.
+ if tb.tb_next is not None:
+ c_tb_next = ctypes.py_object(tb.tb_next)
+ c_tb.tb_next = ctypes.py_object()
+ ctypes.pythonapi.Py_DecRef(c_tb_next)
+
+ # Assign the new tb_next.
+ if tb_next is not None:
+ c_tb_next = ctypes.py_object(tb_next)
+ ctypes.pythonapi.Py_IncRef(c_tb_next)
+ c_tb.tb_next = c_tb_next
+
+ return tb
diff --git a/third_party/python/Jinja2/src/jinja2/defaults.py b/third_party/python/Jinja2/src/jinja2/defaults.py
new file mode 100644
index 0000000000..8e0e7d7710
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/defaults.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+from ._compat import range_type
+from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401
+from .tests import TESTS as DEFAULT_TESTS # noqa: F401
+from .utils import Cycler
+from .utils import generate_lorem_ipsum
+from .utils import Joiner
+from .utils import Namespace
+
+# defaults for the parser / lexer
+BLOCK_START_STRING = "{%"
+BLOCK_END_STRING = "%}"
+VARIABLE_START_STRING = "{{"
+VARIABLE_END_STRING = "}}"
+COMMENT_START_STRING = "{#"
+COMMENT_END_STRING = "#}"
+LINE_STATEMENT_PREFIX = None
+LINE_COMMENT_PREFIX = None
+TRIM_BLOCKS = False
+LSTRIP_BLOCKS = False
+NEWLINE_SEQUENCE = "\n"
+KEEP_TRAILING_NEWLINE = False
+
+# default filters, tests and namespace
+
+DEFAULT_NAMESPACE = {
+ "range": range_type,
+ "dict": dict,
+ "lipsum": generate_lorem_ipsum,
+ "cycler": Cycler,
+ "joiner": Joiner,
+ "namespace": Namespace,
+}
+
+# default policies
+DEFAULT_POLICIES = {
+ "compiler.ascii_str": True,
+ "urlize.rel": "noopener",
+ "urlize.target": None,
+ "truncate.leeway": 5,
+ "json.dumps_function": None,
+ "json.dumps_kwargs": {"sort_keys": True},
+ "ext.i18n.trimmed": False,
+}
diff --git a/third_party/python/Jinja2/src/jinja2/environment.py b/third_party/python/Jinja2/src/jinja2/environment.py
new file mode 100644
index 0000000000..8430390eea
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/environment.py
@@ -0,0 +1,1362 @@
+# -*- coding: utf-8 -*-
+"""Classes for managing templates and their runtime and compile time
+options.
+"""
+import os
+import sys
+import weakref
+from functools import partial
+from functools import reduce
+
+from markupsafe import Markup
+
+from . import nodes
+from ._compat import encode_filename
+from ._compat import implements_iterator
+from ._compat import implements_to_string
+from ._compat import iteritems
+from ._compat import PY2
+from ._compat import PYPY
+from ._compat import reraise
+from ._compat import string_types
+from ._compat import text_type
+from .compiler import CodeGenerator
+from .compiler import generate
+from .defaults import BLOCK_END_STRING
+from .defaults import BLOCK_START_STRING
+from .defaults import COMMENT_END_STRING
+from .defaults import COMMENT_START_STRING
+from .defaults import DEFAULT_FILTERS
+from .defaults import DEFAULT_NAMESPACE
+from .defaults import DEFAULT_POLICIES
+from .defaults import DEFAULT_TESTS
+from .defaults import KEEP_TRAILING_NEWLINE
+from .defaults import LINE_COMMENT_PREFIX
+from .defaults import LINE_STATEMENT_PREFIX
+from .defaults import LSTRIP_BLOCKS
+from .defaults import NEWLINE_SEQUENCE
+from .defaults import TRIM_BLOCKS
+from .defaults import VARIABLE_END_STRING
+from .defaults import VARIABLE_START_STRING
+from .exceptions import TemplateNotFound
+from .exceptions import TemplateRuntimeError
+from .exceptions import TemplatesNotFound
+from .exceptions import TemplateSyntaxError
+from .exceptions import UndefinedError
+from .lexer import get_lexer
+from .lexer import TokenStream
+from .nodes import EvalContext
+from .parser import Parser
+from .runtime import Context
+from .runtime import new_context
+from .runtime import Undefined
+from .utils import concat
+from .utils import consume
+from .utils import have_async_gen
+from .utils import import_string
+from .utils import internalcode
+from .utils import LRUCache
+from .utils import missing
+
+# for direct template usage we have up to ten living environments
+_spontaneous_environments = LRUCache(10)
+
+
+def get_spontaneous_environment(cls, *args):
+ """Return a new spontaneous environment. A spontaneous environment
+ is used for templates created directly rather than through an
+ existing environment.
+
+ :param cls: Environment class to create.
+ :param args: Positional arguments passed to environment.
+ """
+ key = (cls, args)
+
+ try:
+ return _spontaneous_environments[key]
+ except KeyError:
+ _spontaneous_environments[key] = env = cls(*args)
+ env.shared = True
+ return env
+
+
+def create_cache(size):
+ """Return the cache class for the given size."""
+ if size == 0:
+ return None
+ if size < 0:
+ return {}
+ return LRUCache(size)
+
+
+def copy_cache(cache):
+ """Create an empty copy of the given cache."""
+ if cache is None:
+ return None
+ elif type(cache) is dict:
+ return {}
+ return LRUCache(cache.capacity)
+
+
+def load_extensions(environment, extensions):
+ """Load the extensions from the list and bind it to the environment.
+ Returns a dict of instantiated environments.
+ """
+ result = {}
+ for extension in extensions:
+ if isinstance(extension, string_types):
+ extension = import_string(extension)
+ result[extension.identifier] = extension(environment)
+ return result
+
+
+def fail_for_missing_callable(string, name):
+ msg = string % name
+ if isinstance(name, Undefined):
+ try:
+ name._fail_with_undefined_error()
+ except Exception as e:
+ msg = "%s (%s; did you forget to quote the callable name?)" % (msg, e)
+ raise TemplateRuntimeError(msg)
+
+
+def _environment_sanity_check(environment):
+ """Perform a sanity check on the environment."""
+ assert issubclass(
+ environment.undefined, Undefined
+ ), "undefined must be a subclass of undefined because filters depend on it."
+ assert (
+ environment.block_start_string
+ != environment.variable_start_string
+ != environment.comment_start_string
+ ), "block, variable and comment start strings must be different"
+ assert environment.newline_sequence in (
+ "\r",
+ "\r\n",
+ "\n",
+ ), "newline_sequence set to unknown line ending string."
+ return environment
+
+
+class Environment(object):
+ r"""The core component of Jinja is the `Environment`. It contains
+ important shared variables like configuration, filters, tests,
+ globals and others. Instances of this class may be modified if
+ they are not shared and if no template was loaded so far.
+ Modifications on environments after the first template was loaded
+ will lead to surprising effects and undefined behavior.
+
+ Here are the possible initialization parameters:
+
+ `block_start_string`
+ The string marking the beginning of a block. Defaults to ``'{%'``.
+
+ `block_end_string`
+ The string marking the end of a block. Defaults to ``'%}'``.
+
+ `variable_start_string`
+ The string marking the beginning of a print statement.
+ Defaults to ``'{{'``.
+
+ `variable_end_string`
+ The string marking the end of a print statement. Defaults to
+ ``'}}'``.
+
+ `comment_start_string`
+ The string marking the beginning of a comment. Defaults to ``'{#'``.
+
+ `comment_end_string`
+ The string marking the end of a comment. Defaults to ``'#}'``.
+
+ `line_statement_prefix`
+ If given and a string, this will be used as prefix for line based
+ statements. See also :ref:`line-statements`.
+
+ `line_comment_prefix`
+ If given and a string, this will be used as prefix for line based
+ comments. See also :ref:`line-statements`.
+
+ .. versionadded:: 2.2
+
+ `trim_blocks`
+ If this is set to ``True`` the first newline after a block is
+ removed (block, not variable tag!). Defaults to `False`.
+
+ `lstrip_blocks`
+ If this is set to ``True`` leading spaces and tabs are stripped
+ from the start of a line to a block. Defaults to `False`.
+
+ `newline_sequence`
+ The sequence that starts a newline. Must be one of ``'\r'``,
+ ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a
+ useful default for Linux and OS X systems as well as web
+ applications.
+
+ `keep_trailing_newline`
+ Preserve the trailing newline when rendering templates.
+ The default is ``False``, which causes a single newline,
+ if present, to be stripped from the end of the template.
+
+ .. versionadded:: 2.7
+
+ `extensions`
+ List of Jinja extensions to use. This can either be import paths
+ as strings or extension classes. For more information have a
+ look at :ref:`the extensions documentation <jinja-extensions>`.
+
+ `optimized`
+ should the optimizer be enabled? Default is ``True``.
+
+ `undefined`
+ :class:`Undefined` or a subclass of it that is used to represent
+ undefined values in the template.
+
+ `finalize`
+ A callable that can be used to process the result of a variable
+ expression before it is output. For example one can convert
+ ``None`` implicitly into an empty string here.
+
+ `autoescape`
+ If set to ``True`` the XML/HTML autoescaping feature is enabled by
+ default. For more details about autoescaping see
+ :class:`~markupsafe.Markup`. As of Jinja 2.4 this can also
+ be a callable that is passed the template name and has to
+ return ``True`` or ``False`` depending on autoescape should be
+ enabled by default.
+
+ .. versionchanged:: 2.4
+ `autoescape` can now be a function
+
+ `loader`
+ The template loader for this environment.
+
+ `cache_size`
+ The size of the cache. Per default this is ``400`` which means
+ that if more than 400 templates are loaded the loader will clean
+ out the least recently used template. If the cache size is set to
+ ``0`` templates are recompiled all the time, if the cache size is
+ ``-1`` the cache will not be cleaned.
+
+ .. versionchanged:: 2.8
+ The cache size was increased to 400 from a low 50.
+
+ `auto_reload`
+ Some loaders load templates from locations where the template
+ sources may change (ie: file system or database). If
+ ``auto_reload`` is set to ``True`` (default) every time a template is
+ requested the loader checks if the source changed and if yes, it
+ will reload the template. For higher performance it's possible to
+ disable that.
+
+ `bytecode_cache`
+ If set to a bytecode cache object, this object will provide a
+ cache for the internal Jinja bytecode so that templates don't
+ have to be parsed if they were not changed.
+
+ See :ref:`bytecode-cache` for more information.
+
+ `enable_async`
+ If set to true this enables async template execution which allows
+ you to take advantage of newer Python features. This requires
+ Python 3.6 or later.
+ """
+
+ #: if this environment is sandboxed. Modifying this variable won't make
+ #: the environment sandboxed though. For a real sandboxed environment
+ #: have a look at jinja2.sandbox. This flag alone controls the code
+ #: generation by the compiler.
+ sandboxed = False
+
+ #: True if the environment is just an overlay
+ overlayed = False
+
+ #: the environment this environment is linked to if it is an overlay
+ linked_to = None
+
+ #: shared environments have this set to `True`. A shared environment
+ #: must not be modified
+ shared = False
+
+ #: the class that is used for code generation. See
+ #: :class:`~jinja2.compiler.CodeGenerator` for more information.
+ code_generator_class = CodeGenerator
+
+ #: the context class thatis used for templates. See
+ #: :class:`~jinja2.runtime.Context` for more information.
+ context_class = Context
+
+ def __init__(
+ self,
+ block_start_string=BLOCK_START_STRING,
+ block_end_string=BLOCK_END_STRING,
+ variable_start_string=VARIABLE_START_STRING,
+ variable_end_string=VARIABLE_END_STRING,
+ comment_start_string=COMMENT_START_STRING,
+ comment_end_string=COMMENT_END_STRING,
+ line_statement_prefix=LINE_STATEMENT_PREFIX,
+ line_comment_prefix=LINE_COMMENT_PREFIX,
+ trim_blocks=TRIM_BLOCKS,
+ lstrip_blocks=LSTRIP_BLOCKS,
+ newline_sequence=NEWLINE_SEQUENCE,
+ keep_trailing_newline=KEEP_TRAILING_NEWLINE,
+ extensions=(),
+ optimized=True,
+ undefined=Undefined,
+ finalize=None,
+ autoescape=False,
+ loader=None,
+ cache_size=400,
+ auto_reload=True,
+ bytecode_cache=None,
+ enable_async=False,
+ ):
+ # !!Important notice!!
+ # The constructor accepts quite a few arguments that should be
+ # passed by keyword rather than position. However it's important to
+ # not change the order of arguments because it's used at least
+ # internally in those cases:
+ # - spontaneous environments (i18n extension and Template)
+ # - unittests
+ # If parameter changes are required only add parameters at the end
+ # and don't change the arguments (or the defaults!) of the arguments
+ # existing already.
+
+ # lexer / parser information
+ self.block_start_string = block_start_string
+ self.block_end_string = block_end_string
+ self.variable_start_string = variable_start_string
+ self.variable_end_string = variable_end_string
+ self.comment_start_string = comment_start_string
+ self.comment_end_string = comment_end_string
+ self.line_statement_prefix = line_statement_prefix
+ self.line_comment_prefix = line_comment_prefix
+ self.trim_blocks = trim_blocks
+ self.lstrip_blocks = lstrip_blocks
+ self.newline_sequence = newline_sequence
+ self.keep_trailing_newline = keep_trailing_newline
+
+ # runtime information
+ self.undefined = undefined
+ self.optimized = optimized
+ self.finalize = finalize
+ self.autoescape = autoescape
+
+ # defaults
+ self.filters = DEFAULT_FILTERS.copy()
+ self.tests = DEFAULT_TESTS.copy()
+ self.globals = DEFAULT_NAMESPACE.copy()
+
+ # set the loader provided
+ self.loader = loader
+ self.cache = create_cache(cache_size)
+ self.bytecode_cache = bytecode_cache
+ self.auto_reload = auto_reload
+
+ # configurable policies
+ self.policies = DEFAULT_POLICIES.copy()
+
+ # load extensions
+ self.extensions = load_extensions(self, extensions)
+
+ self.enable_async = enable_async
+ self.is_async = self.enable_async and have_async_gen
+ if self.is_async:
+ # runs patch_all() to enable async support
+ from . import asyncsupport # noqa: F401
+
+ _environment_sanity_check(self)
+
+ def add_extension(self, extension):
+ """Adds an extension after the environment was created.
+
+ .. versionadded:: 2.5
+ """
+ self.extensions.update(load_extensions(self, [extension]))
+
+ def extend(self, **attributes):
+ """Add the items to the instance of the environment if they do not exist
+ yet. This is used by :ref:`extensions <writing-extensions>` to register
+ callbacks and configuration values without breaking inheritance.
+ """
+ for key, value in iteritems(attributes):
+ if not hasattr(self, key):
+ setattr(self, key, value)
+
+ def overlay(
+ self,
+ block_start_string=missing,
+ block_end_string=missing,
+ variable_start_string=missing,
+ variable_end_string=missing,
+ comment_start_string=missing,
+ comment_end_string=missing,
+ line_statement_prefix=missing,
+ line_comment_prefix=missing,
+ trim_blocks=missing,
+ lstrip_blocks=missing,
+ extensions=missing,
+ optimized=missing,
+ undefined=missing,
+ finalize=missing,
+ autoescape=missing,
+ loader=missing,
+ cache_size=missing,
+ auto_reload=missing,
+ bytecode_cache=missing,
+ ):
+ """Create a new overlay environment that shares all the data with the
+ current environment except for cache and the overridden attributes.
+ Extensions cannot be removed for an overlayed environment. An overlayed
+ environment automatically gets all the extensions of the environment it
+ is linked to plus optional extra extensions.
+
+ Creating overlays should happen after the initial environment was set
+ up completely. Not all attributes are truly linked, some are just
+ copied over so modifications on the original environment may not shine
+ through.
+ """
+ args = dict(locals())
+ del args["self"], args["cache_size"], args["extensions"]
+
+ rv = object.__new__(self.__class__)
+ rv.__dict__.update(self.__dict__)
+ rv.overlayed = True
+ rv.linked_to = self
+
+ for key, value in iteritems(args):
+ if value is not missing:
+ setattr(rv, key, value)
+
+ if cache_size is not missing:
+ rv.cache = create_cache(cache_size)
+ else:
+ rv.cache = copy_cache(self.cache)
+
+ rv.extensions = {}
+ for key, value in iteritems(self.extensions):
+ rv.extensions[key] = value.bind(rv)
+ if extensions is not missing:
+ rv.extensions.update(load_extensions(rv, extensions))
+
+ return _environment_sanity_check(rv)
+
+ lexer = property(get_lexer, doc="The lexer for this environment.")
+
+ def iter_extensions(self):
+ """Iterates over the extensions by priority."""
+ return iter(sorted(self.extensions.values(), key=lambda x: x.priority))
+
+ def getitem(self, obj, argument):
+ """Get an item or attribute of an object but prefer the item."""
+ try:
+ return obj[argument]
+ except (AttributeError, TypeError, LookupError):
+ if isinstance(argument, string_types):
+ try:
+ attr = str(argument)
+ except Exception:
+ pass
+ else:
+ try:
+ return getattr(obj, attr)
+ except AttributeError:
+ pass
+ return self.undefined(obj=obj, name=argument)
+
+ def getattr(self, obj, attribute):
+ """Get an item or attribute of an object but prefer the attribute.
+ Unlike :meth:`getitem` the attribute *must* be a bytestring.
+ """
+ try:
+ return getattr(obj, attribute)
+ except AttributeError:
+ pass
+ try:
+ return obj[attribute]
+ except (TypeError, LookupError, AttributeError):
+ return self.undefined(obj=obj, name=attribute)
+
+ def call_filter(
+ self, name, value, args=None, kwargs=None, context=None, eval_ctx=None
+ ):
+ """Invokes a filter on a value the same way the compiler does it.
+
+ Note that on Python 3 this might return a coroutine in case the
+ filter is running from an environment in async mode and the filter
+ supports async execution. It's your responsibility to await this
+ if needed.
+
+ .. versionadded:: 2.7
+ """
+ func = self.filters.get(name)
+ if func is None:
+ fail_for_missing_callable("no filter named %r", name)
+ args = [value] + list(args or ())
+ if getattr(func, "contextfilter", False) is True:
+ if context is None:
+ raise TemplateRuntimeError(
+ "Attempted to invoke context filter without context"
+ )
+ args.insert(0, context)
+ elif getattr(func, "evalcontextfilter", False) is True:
+ if eval_ctx is None:
+ if context is not None:
+ eval_ctx = context.eval_ctx
+ else:
+ eval_ctx = EvalContext(self)
+ args.insert(0, eval_ctx)
+ elif getattr(func, "environmentfilter", False) is True:
+ args.insert(0, self)
+ return func(*args, **(kwargs or {}))
+
+ def call_test(self, name, value, args=None, kwargs=None):
+ """Invokes a test on a value the same way the compiler does it.
+
+ .. versionadded:: 2.7
+ """
+ func = self.tests.get(name)
+ if func is None:
+ fail_for_missing_callable("no test named %r", name)
+ return func(value, *(args or ()), **(kwargs or {}))
+
+ @internalcode
+ def parse(self, source, name=None, filename=None):
+ """Parse the sourcecode and return the abstract syntax tree. This
+ tree of nodes is used by the compiler to convert the template into
+ executable source- or bytecode. This is useful for debugging or to
+ extract information from templates.
+
+ If you are :ref:`developing Jinja extensions <writing-extensions>`
+ this gives you a good overview of the node tree generated.
+ """
+ try:
+ return self._parse(source, name, filename)
+ except TemplateSyntaxError:
+ self.handle_exception(source=source)
+
+ def _parse(self, source, name, filename):
+ """Internal parsing function used by `parse` and `compile`."""
+ return Parser(self, source, name, encode_filename(filename)).parse()
+
+ def lex(self, source, name=None, filename=None):
+ """Lex the given sourcecode and return a generator that yields
+ tokens as tuples in the form ``(lineno, token_type, value)``.
+ This can be useful for :ref:`extension development <writing-extensions>`
+ and debugging templates.
+
+ This does not perform preprocessing. If you want the preprocessing
+ of the extensions to be applied you have to filter source through
+ the :meth:`preprocess` method.
+ """
+ source = text_type(source)
+ try:
+ return self.lexer.tokeniter(source, name, filename)
+ except TemplateSyntaxError:
+ self.handle_exception(source=source)
+
+ def preprocess(self, source, name=None, filename=None):
+ """Preprocesses the source with all extensions. This is automatically
+ called for all parsing and compiling methods but *not* for :meth:`lex`
+ because there you usually only want the actual source tokenized.
+ """
+ return reduce(
+ lambda s, e: e.preprocess(s, name, filename),
+ self.iter_extensions(),
+ text_type(source),
+ )
+
+ def _tokenize(self, source, name, filename=None, state=None):
+ """Called by the parser to do the preprocessing and filtering
+ for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`.
+ """
+ source = self.preprocess(source, name, filename)
+ stream = self.lexer.tokenize(source, name, filename, state)
+ for ext in self.iter_extensions():
+ stream = ext.filter_stream(stream)
+ if not isinstance(stream, TokenStream):
+ stream = TokenStream(stream, name, filename)
+ return stream
+
+ def _generate(self, source, name, filename, defer_init=False):
+ """Internal hook that can be overridden to hook a different generate
+ method in.
+
+ .. versionadded:: 2.5
+ """
+ return generate(
+ source,
+ self,
+ name,
+ filename,
+ defer_init=defer_init,
+ optimized=self.optimized,
+ )
+
+ def _compile(self, source, filename):
+ """Internal hook that can be overridden to hook a different compile
+ method in.
+
+ .. versionadded:: 2.5
+ """
+ return compile(source, filename, "exec")
+
+ @internalcode
+ def compile(self, source, name=None, filename=None, raw=False, defer_init=False):
+ """Compile a node or template source code. The `name` parameter is
+ the load name of the template after it was joined using
+ :meth:`join_path` if necessary, not the filename on the file system.
+ the `filename` parameter is the estimated filename of the template on
+ the file system. If the template came from a database or memory this
+ can be omitted.
+
+ The return value of this method is a python code object. If the `raw`
+ parameter is `True` the return value will be a string with python
+ code equivalent to the bytecode returned otherwise. This method is
+ mainly used internally.
+
+ `defer_init` is use internally to aid the module code generator. This
+ causes the generated code to be able to import without the global
+ environment variable to be set.
+
+ .. versionadded:: 2.4
+ `defer_init` parameter added.
+ """
+ source_hint = None
+ try:
+ if isinstance(source, string_types):
+ source_hint = source
+ source = self._parse(source, name, filename)
+ source = self._generate(source, name, filename, defer_init=defer_init)
+ if raw:
+ return source
+ if filename is None:
+ filename = "<template>"
+ else:
+ filename = encode_filename(filename)
+ return self._compile(source, filename)
+ except TemplateSyntaxError:
+ self.handle_exception(source=source_hint)
+
+ def compile_expression(self, source, undefined_to_none=True):
+ """A handy helper method that returns a callable that accepts keyword
+ arguments that appear as variables in the expression. If called it
+ returns the result of the expression.
+
+ This is useful if applications want to use the same rules as Jinja
+ in template "configuration files" or similar situations.
+
+ Example usage:
+
+ >>> env = Environment()
+ >>> expr = env.compile_expression('foo == 42')
+ >>> expr(foo=23)
+ False
+ >>> expr(foo=42)
+ True
+
+ Per default the return value is converted to `None` if the
+ expression returns an undefined value. This can be changed
+ by setting `undefined_to_none` to `False`.
+
+ >>> env.compile_expression('var')() is None
+ True
+ >>> env.compile_expression('var', undefined_to_none=False)()
+ Undefined
+
+ .. versionadded:: 2.1
+ """
+ parser = Parser(self, source, state="variable")
+ try:
+ expr = parser.parse_expression()
+ if not parser.stream.eos:
+ raise TemplateSyntaxError(
+ "chunk after expression", parser.stream.current.lineno, None, None
+ )
+ expr.set_environment(self)
+ except TemplateSyntaxError:
+ if sys.exc_info() is not None:
+ self.handle_exception(source=source)
+
+ body = [nodes.Assign(nodes.Name("result", "store"), expr, lineno=1)]
+ template = self.from_string(nodes.Template(body, lineno=1))
+ return TemplateExpression(template, undefined_to_none)
+
+ def compile_templates(
+ self,
+ target,
+ extensions=None,
+ filter_func=None,
+ zip="deflated",
+ log_function=None,
+ ignore_errors=True,
+ py_compile=False,
+ ):
+ """Finds all the templates the loader can find, compiles them
+ and stores them in `target`. If `zip` is `None`, instead of in a
+ zipfile, the templates will be stored in a directory.
+ By default a deflate zip algorithm is used. To switch to
+ the stored algorithm, `zip` can be set to ``'stored'``.
+
+ `extensions` and `filter_func` are passed to :meth:`list_templates`.
+ Each template returned will be compiled to the target folder or
+ zipfile.
+
+ By default template compilation errors are ignored. In case a
+ log function is provided, errors are logged. If you want template
+ syntax errors to abort the compilation you can set `ignore_errors`
+ to `False` and you will get an exception on syntax errors.
+
+ If `py_compile` is set to `True` .pyc files will be written to the
+ target instead of standard .py files. This flag does not do anything
+ on pypy and Python 3 where pyc files are not picked up by itself and
+ don't give much benefit.
+
+ .. versionadded:: 2.4
+ """
+ from .loaders import ModuleLoader
+
+ if log_function is None:
+
+ def log_function(x):
+ pass
+
+ if py_compile:
+ if not PY2 or PYPY:
+ import warnings
+
+ warnings.warn(
+ "'py_compile=True' has no effect on PyPy or Python"
+ " 3 and will be removed in version 3.0",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ py_compile = False
+ else:
+ import imp
+ import marshal
+
+ py_header = imp.get_magic() + u"\xff\xff\xff\xff".encode("iso-8859-15")
+
+ # Python 3.3 added a source filesize to the header
+ if sys.version_info >= (3, 3):
+ py_header += u"\x00\x00\x00\x00".encode("iso-8859-15")
+
+ def write_file(filename, data):
+ if zip:
+ info = ZipInfo(filename)
+ info.external_attr = 0o755 << 16
+ zip_file.writestr(info, data)
+ else:
+ if isinstance(data, text_type):
+ data = data.encode("utf8")
+
+ with open(os.path.join(target, filename), "wb") as f:
+ f.write(data)
+
+ if zip is not None:
+ from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED, ZIP_STORED
+
+ zip_file = ZipFile(
+ target, "w", dict(deflated=ZIP_DEFLATED, stored=ZIP_STORED)[zip]
+ )
+ log_function('Compiling into Zip archive "%s"' % target)
+ else:
+ if not os.path.isdir(target):
+ os.makedirs(target)
+ log_function('Compiling into folder "%s"' % target)
+
+ try:
+ for name in self.list_templates(extensions, filter_func):
+ source, filename, _ = self.loader.get_source(self, name)
+ try:
+ code = self.compile(source, name, filename, True, True)
+ except TemplateSyntaxError as e:
+ if not ignore_errors:
+ raise
+ log_function('Could not compile "%s": %s' % (name, e))
+ continue
+
+ filename = ModuleLoader.get_module_filename(name)
+
+ if py_compile:
+ c = self._compile(code, encode_filename(filename))
+ write_file(filename + "c", py_header + marshal.dumps(c))
+ log_function('Byte-compiled "%s" as %s' % (name, filename + "c"))
+ else:
+ write_file(filename, code)
+ log_function('Compiled "%s" as %s' % (name, filename))
+ finally:
+ if zip:
+ zip_file.close()
+
+ log_function("Finished compiling templates")
+
+ def list_templates(self, extensions=None, filter_func=None):
+ """Returns a list of templates for this environment. This requires
+ that the loader supports the loader's
+ :meth:`~BaseLoader.list_templates` method.
+
+ If there are other files in the template folder besides the
+ actual templates, the returned list can be filtered. There are two
+ ways: either `extensions` is set to a list of file extensions for
+ templates, or a `filter_func` can be provided which is a callable that
+ is passed a template name and should return `True` if it should end up
+ in the result list.
+
+ If the loader does not support that, a :exc:`TypeError` is raised.
+
+ .. versionadded:: 2.4
+ """
+ names = self.loader.list_templates()
+
+ if extensions is not None:
+ if filter_func is not None:
+ raise TypeError(
+ "either extensions or filter_func can be passed, but not both"
+ )
+
+ def filter_func(x):
+ return "." in x and x.rsplit(".", 1)[1] in extensions
+
+ if filter_func is not None:
+ names = [name for name in names if filter_func(name)]
+
+ return names
+
+ def handle_exception(self, source=None):
+ """Exception handling helper. This is used internally to either raise
+ rewritten exceptions or return a rendered traceback for the template.
+ """
+ from .debug import rewrite_traceback_stack
+
+ reraise(*rewrite_traceback_stack(source=source))
+
+ def join_path(self, template, parent):
+ """Join a template with the parent. By default all the lookups are
+ relative to the loader root so this method returns the `template`
+ parameter unchanged, but if the paths should be relative to the
+ parent template, this function can be used to calculate the real
+ template name.
+
+ Subclasses may override this method and implement template path
+ joining here.
+ """
+ return template
+
+ @internalcode
+ def _load_template(self, name, globals):
+ if self.loader is None:
+ raise TypeError("no loader for this environment specified")
+ cache_key = (weakref.ref(self.loader), name)
+ if self.cache is not None:
+ template = self.cache.get(cache_key)
+ if template is not None and (
+ not self.auto_reload or template.is_up_to_date
+ ):
+ return template
+ template = self.loader.load(self, name, globals)
+ if self.cache is not None:
+ self.cache[cache_key] = template
+ return template
+
+ @internalcode
+ def get_template(self, name, parent=None, globals=None):
+ """Load a template from the loader. If a loader is configured this
+ method asks the loader for the template and returns a :class:`Template`.
+ If the `parent` parameter is not `None`, :meth:`join_path` is called
+ to get the real template name before loading.
+
+ The `globals` parameter can be used to provide template wide globals.
+ These variables are available in the context at render time.
+
+ If the template does not exist a :exc:`TemplateNotFound` exception is
+ raised.
+
+ .. versionchanged:: 2.4
+ If `name` is a :class:`Template` object it is returned from the
+ function unchanged.
+ """
+ if isinstance(name, Template):
+ return name
+ if parent is not None:
+ name = self.join_path(name, parent)
+ return self._load_template(name, self.make_globals(globals))
+
+ @internalcode
+ def select_template(self, names, parent=None, globals=None):
+ """Works like :meth:`get_template` but tries a number of templates
+ before it fails. If it cannot find any of the templates, it will
+ raise a :exc:`TemplatesNotFound` exception.
+
+ .. versionchanged:: 2.11
+ If names is :class:`Undefined`, an :exc:`UndefinedError` is
+ raised instead. If no templates were found and names
+ contains :class:`Undefined`, the message is more helpful.
+
+ .. versionchanged:: 2.4
+ If `names` contains a :class:`Template` object it is returned
+ from the function unchanged.
+
+ .. versionadded:: 2.3
+ """
+ if isinstance(names, Undefined):
+ names._fail_with_undefined_error()
+
+ if not names:
+ raise TemplatesNotFound(
+ message=u"Tried to select from an empty list " u"of templates."
+ )
+ globals = self.make_globals(globals)
+ for name in names:
+ if isinstance(name, Template):
+ return name
+ if parent is not None:
+ name = self.join_path(name, parent)
+ try:
+ return self._load_template(name, globals)
+ except (TemplateNotFound, UndefinedError):
+ pass
+ raise TemplatesNotFound(names)
+
+ @internalcode
+ def get_or_select_template(self, template_name_or_list, parent=None, globals=None):
+ """Does a typecheck and dispatches to :meth:`select_template`
+ if an iterable of template names is given, otherwise to
+ :meth:`get_template`.
+
+ .. versionadded:: 2.3
+ """
+ if isinstance(template_name_or_list, (string_types, Undefined)):
+ return self.get_template(template_name_or_list, parent, globals)
+ elif isinstance(template_name_or_list, Template):
+ return template_name_or_list
+ return self.select_template(template_name_or_list, parent, globals)
+
+ def from_string(self, source, globals=None, template_class=None):
+ """Load a template from a string. This parses the source given and
+ returns a :class:`Template` object.
+ """
+ globals = self.make_globals(globals)
+ cls = template_class or self.template_class
+ return cls.from_code(self, self.compile(source), globals, None)
+
+ def make_globals(self, d):
+ """Return a dict for the globals."""
+ if not d:
+ return self.globals
+ return dict(self.globals, **d)
+
+
+class Template(object):
+ """The central template object. This class represents a compiled template
+ and is used to evaluate it.
+
+ Normally the template object is generated from an :class:`Environment` but
+ it also has a constructor that makes it possible to create a template
+ instance directly using the constructor. It takes the same arguments as
+ the environment constructor but it's not possible to specify a loader.
+
+ Every template object has a few methods and members that are guaranteed
+ to exist. However it's important that a template object should be
+ considered immutable. Modifications on the object are not supported.
+
+ Template objects created from the constructor rather than an environment
+ do have an `environment` attribute that points to a temporary environment
+ that is probably shared with other templates created with the constructor
+ and compatible settings.
+
+ >>> template = Template('Hello {{ name }}!')
+ >>> template.render(name='John Doe') == u'Hello John Doe!'
+ True
+ >>> stream = template.stream(name='John Doe')
+ >>> next(stream) == u'Hello John Doe!'
+ True
+ >>> next(stream)
+ Traceback (most recent call last):
+ ...
+ StopIteration
+ """
+
+ #: Type of environment to create when creating a template directly
+ #: rather than through an existing environment.
+ environment_class = Environment
+
+ def __new__(
+ cls,
+ source,
+ block_start_string=BLOCK_START_STRING,
+ block_end_string=BLOCK_END_STRING,
+ variable_start_string=VARIABLE_START_STRING,
+ variable_end_string=VARIABLE_END_STRING,
+ comment_start_string=COMMENT_START_STRING,
+ comment_end_string=COMMENT_END_STRING,
+ line_statement_prefix=LINE_STATEMENT_PREFIX,
+ line_comment_prefix=LINE_COMMENT_PREFIX,
+ trim_blocks=TRIM_BLOCKS,
+ lstrip_blocks=LSTRIP_BLOCKS,
+ newline_sequence=NEWLINE_SEQUENCE,
+ keep_trailing_newline=KEEP_TRAILING_NEWLINE,
+ extensions=(),
+ optimized=True,
+ undefined=Undefined,
+ finalize=None,
+ autoescape=False,
+ enable_async=False,
+ ):
+ env = get_spontaneous_environment(
+ cls.environment_class,
+ block_start_string,
+ block_end_string,
+ variable_start_string,
+ variable_end_string,
+ comment_start_string,
+ comment_end_string,
+ line_statement_prefix,
+ line_comment_prefix,
+ trim_blocks,
+ lstrip_blocks,
+ newline_sequence,
+ keep_trailing_newline,
+ frozenset(extensions),
+ optimized,
+ undefined,
+ finalize,
+ autoescape,
+ None,
+ 0,
+ False,
+ None,
+ enable_async,
+ )
+ return env.from_string(source, template_class=cls)
+
+ @classmethod
+ def from_code(cls, environment, code, globals, uptodate=None):
+ """Creates a template object from compiled code and the globals. This
+ is used by the loaders and environment to create a template object.
+ """
+ namespace = {"environment": environment, "__file__": code.co_filename}
+ exec(code, namespace)
+ rv = cls._from_namespace(environment, namespace, globals)
+ rv._uptodate = uptodate
+ return rv
+
+ @classmethod
+ def from_module_dict(cls, environment, module_dict, globals):
+ """Creates a template object from a module. This is used by the
+ module loader to create a template object.
+
+ .. versionadded:: 2.4
+ """
+ return cls._from_namespace(environment, module_dict, globals)
+
+ @classmethod
+ def _from_namespace(cls, environment, namespace, globals):
+ t = object.__new__(cls)
+ t.environment = environment
+ t.globals = globals
+ t.name = namespace["name"]
+ t.filename = namespace["__file__"]
+ t.blocks = namespace["blocks"]
+
+ # render function and module
+ t.root_render_func = namespace["root"]
+ t._module = None
+
+ # debug and loader helpers
+ t._debug_info = namespace["debug_info"]
+ t._uptodate = None
+
+ # store the reference
+ namespace["environment"] = environment
+ namespace["__jinja_template__"] = t
+
+ return t
+
+ def render(self, *args, **kwargs):
+ """This method accepts the same arguments as the `dict` constructor:
+ A dict, a dict subclass or some keyword arguments. If no arguments
+ are given the context will be empty. These two calls do the same::
+
+ template.render(knights='that say nih')
+ template.render({'knights': 'that say nih'})
+
+ This will return the rendered template as unicode string.
+ """
+ vars = dict(*args, **kwargs)
+ try:
+ return concat(self.root_render_func(self.new_context(vars)))
+ except Exception:
+ self.environment.handle_exception()
+
+ def render_async(self, *args, **kwargs):
+ """This works similar to :meth:`render` but returns a coroutine
+ that when awaited returns the entire rendered template string. This
+ requires the async feature to be enabled.
+
+ Example usage::
+
+ await template.render_async(knights='that say nih; asynchronously')
+ """
+ # see asyncsupport for the actual implementation
+ raise NotImplementedError(
+ "This feature is not available for this version of Python"
+ )
+
+ def stream(self, *args, **kwargs):
+ """Works exactly like :meth:`generate` but returns a
+ :class:`TemplateStream`.
+ """
+ return TemplateStream(self.generate(*args, **kwargs))
+
+ def generate(self, *args, **kwargs):
+ """For very large templates it can be useful to not render the whole
+ template at once but evaluate each statement after another and yield
+ piece for piece. This method basically does exactly that and returns
+ a generator that yields one item after another as unicode strings.
+
+ It accepts the same arguments as :meth:`render`.
+ """
+ vars = dict(*args, **kwargs)
+ try:
+ for event in self.root_render_func(self.new_context(vars)):
+ yield event
+ except Exception:
+ yield self.environment.handle_exception()
+
+ def generate_async(self, *args, **kwargs):
+ """An async version of :meth:`generate`. Works very similarly but
+ returns an async iterator instead.
+ """
+ # see asyncsupport for the actual implementation
+ raise NotImplementedError(
+ "This feature is not available for this version of Python"
+ )
+
+ def new_context(self, vars=None, shared=False, locals=None):
+ """Create a new :class:`Context` for this template. The vars
+ provided will be passed to the template. Per default the globals
+ are added to the context. If shared is set to `True` the data
+ is passed as is to the context without adding the globals.
+
+ `locals` can be a dict of local variables for internal usage.
+ """
+ return new_context(
+ self.environment, self.name, self.blocks, vars, shared, self.globals, locals
+ )
+
+ def make_module(self, vars=None, shared=False, locals=None):
+ """This method works like the :attr:`module` attribute when called
+ without arguments but it will evaluate the template on every call
+ rather than caching it. It's also possible to provide
+ a dict which is then used as context. The arguments are the same
+ as for the :meth:`new_context` method.
+ """
+ return TemplateModule(self, self.new_context(vars, shared, locals))
+
+ def make_module_async(self, vars=None, shared=False, locals=None):
+ """As template module creation can invoke template code for
+ asynchronous executions this method must be used instead of the
+ normal :meth:`make_module` one. Likewise the module attribute
+ becomes unavailable in async mode.
+ """
+ # see asyncsupport for the actual implementation
+ raise NotImplementedError(
+ "This feature is not available for this version of Python"
+ )
+
+ @internalcode
+ def _get_default_module(self):
+ if self._module is not None:
+ return self._module
+ self._module = rv = self.make_module()
+ return rv
+
+ @property
+ def module(self):
+ """The template as module. This is used for imports in the
+ template runtime but is also useful if one wants to access
+ exported template variables from the Python layer:
+
+ >>> t = Template('{% macro foo() %}42{% endmacro %}23')
+ >>> str(t.module)
+ '23'
+ >>> t.module.foo() == u'42'
+ True
+
+ This attribute is not available if async mode is enabled.
+ """
+ return self._get_default_module()
+
+ def get_corresponding_lineno(self, lineno):
+ """Return the source line number of a line number in the
+ generated bytecode as they are not in sync.
+ """
+ for template_line, code_line in reversed(self.debug_info):
+ if code_line <= lineno:
+ return template_line
+ return 1
+
+ @property
+ def is_up_to_date(self):
+ """If this variable is `False` there is a newer version available."""
+ if self._uptodate is None:
+ return True
+ return self._uptodate()
+
+ @property
+ def debug_info(self):
+ """The debug info mapping."""
+ if self._debug_info:
+ return [tuple(map(int, x.split("="))) for x in self._debug_info.split("&")]
+ return []
+
+ def __repr__(self):
+ if self.name is None:
+ name = "memory:%x" % id(self)
+ else:
+ name = repr(self.name)
+ return "<%s %s>" % (self.__class__.__name__, name)
+
+
+@implements_to_string
+class TemplateModule(object):
+ """Represents an imported template. All the exported names of the
+ template are available as attributes on this object. Additionally
+ converting it into an unicode- or bytestrings renders the contents.
+ """
+
+ def __init__(self, template, context, body_stream=None):
+ if body_stream is None:
+ if context.environment.is_async:
+ raise RuntimeError(
+ "Async mode requires a body stream "
+ "to be passed to a template module. Use "
+ "the async methods of the API you are "
+ "using."
+ )
+ body_stream = list(template.root_render_func(context))
+ self._body_stream = body_stream
+ self.__dict__.update(context.get_exported())
+ self.__name__ = template.name
+
+ def __html__(self):
+ return Markup(concat(self._body_stream))
+
+ def __str__(self):
+ return concat(self._body_stream)
+
+ def __repr__(self):
+ if self.__name__ is None:
+ name = "memory:%x" % id(self)
+ else:
+ name = repr(self.__name__)
+ return "<%s %s>" % (self.__class__.__name__, name)
+
+
+class TemplateExpression(object):
+ """The :meth:`jinja2.Environment.compile_expression` method returns an
+ instance of this object. It encapsulates the expression-like access
+ to the template with an expression it wraps.
+ """
+
+ def __init__(self, template, undefined_to_none):
+ self._template = template
+ self._undefined_to_none = undefined_to_none
+
+ def __call__(self, *args, **kwargs):
+ context = self._template.new_context(dict(*args, **kwargs))
+ consume(self._template.root_render_func(context))
+ rv = context.vars["result"]
+ if self._undefined_to_none and isinstance(rv, Undefined):
+ rv = None
+ return rv
+
+
+@implements_iterator
+class TemplateStream(object):
+ """A template stream works pretty much like an ordinary python generator
+ but it can buffer multiple items to reduce the number of total iterations.
+ Per default the output is unbuffered which means that for every unbuffered
+ instruction in the template one unicode string is yielded.
+
+ If buffering is enabled with a buffer size of 5, five items are combined
+ into a new unicode string. This is mainly useful if you are streaming
+ big templates to a client via WSGI which flushes after each iteration.
+ """
+
+ def __init__(self, gen):
+ self._gen = gen
+ self.disable_buffering()
+
+ def dump(self, fp, encoding=None, errors="strict"):
+ """Dump the complete stream into a file or file-like object.
+ Per default unicode strings are written, if you want to encode
+ before writing specify an `encoding`.
+
+ Example usage::
+
+ Template('Hello {{ name }}!').stream(name='foo').dump('hello.html')
+ """
+ close = False
+ if isinstance(fp, string_types):
+ if encoding is None:
+ encoding = "utf-8"
+ fp = open(fp, "wb")
+ close = True
+ try:
+ if encoding is not None:
+ iterable = (x.encode(encoding, errors) for x in self)
+ else:
+ iterable = self
+ if hasattr(fp, "writelines"):
+ fp.writelines(iterable)
+ else:
+ for item in iterable:
+ fp.write(item)
+ finally:
+ if close:
+ fp.close()
+
+ def disable_buffering(self):
+ """Disable the output buffering."""
+ self._next = partial(next, self._gen)
+ self.buffered = False
+
+ def _buffered_generator(self, size):
+ buf = []
+ c_size = 0
+ push = buf.append
+
+ while 1:
+ try:
+ while c_size < size:
+ c = next(self._gen)
+ push(c)
+ if c:
+ c_size += 1
+ except StopIteration:
+ if not c_size:
+ return
+ yield concat(buf)
+ del buf[:]
+ c_size = 0
+
+ def enable_buffering(self, size=5):
+ """Enable buffering. Buffer `size` items before yielding them."""
+ if size <= 1:
+ raise ValueError("buffer size too small")
+
+ self.buffered = True
+ self._next = partial(next, self._buffered_generator(size))
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ return self._next()
+
+
+# hook in default template class. if anyone reads this comment: ignore that
+# it's possible to use custom templates ;-)
+Environment.template_class = Template
diff --git a/third_party/python/Jinja2/src/jinja2/exceptions.py b/third_party/python/Jinja2/src/jinja2/exceptions.py
new file mode 100644
index 0000000000..0bf2003e30
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/exceptions.py
@@ -0,0 +1,177 @@
+# -*- coding: utf-8 -*-
+from ._compat import imap
+from ._compat import implements_to_string
+from ._compat import PY2
+from ._compat import text_type
+
+
+class TemplateError(Exception):
+ """Baseclass for all template errors."""
+
+ if PY2:
+
+ def __init__(self, message=None):
+ if message is not None:
+ message = text_type(message).encode("utf-8")
+ Exception.__init__(self, message)
+
+ @property
+ def message(self):
+ if self.args:
+ message = self.args[0]
+ if message is not None:
+ return message.decode("utf-8", "replace")
+
+ def __unicode__(self):
+ return self.message or u""
+
+ else:
+
+ def __init__(self, message=None):
+ Exception.__init__(self, message)
+
+ @property
+ def message(self):
+ if self.args:
+ message = self.args[0]
+ if message is not None:
+ return message
+
+
+@implements_to_string
+class TemplateNotFound(IOError, LookupError, TemplateError):
+ """Raised if a template does not exist.
+
+ .. versionchanged:: 2.11
+ If the given name is :class:`Undefined` and no message was
+ provided, an :exc:`UndefinedError` is raised.
+ """
+
+ # looks weird, but removes the warning descriptor that just
+ # bogusly warns us about message being deprecated
+ message = None
+
+ def __init__(self, name, message=None):
+ IOError.__init__(self, name)
+
+ if message is None:
+ from .runtime import Undefined
+
+ if isinstance(name, Undefined):
+ name._fail_with_undefined_error()
+
+ message = name
+
+ self.message = message
+ self.name = name
+ self.templates = [name]
+
+ def __str__(self):
+ return self.message
+
+
+class TemplatesNotFound(TemplateNotFound):
+ """Like :class:`TemplateNotFound` but raised if multiple templates
+ are selected. This is a subclass of :class:`TemplateNotFound`
+ exception, so just catching the base exception will catch both.
+
+ .. versionchanged:: 2.11
+ If a name in the list of names is :class:`Undefined`, a message
+ about it being undefined is shown rather than the empty string.
+
+ .. versionadded:: 2.2
+ """
+
+ def __init__(self, names=(), message=None):
+ if message is None:
+ from .runtime import Undefined
+
+ parts = []
+
+ for name in names:
+ if isinstance(name, Undefined):
+ parts.append(name._undefined_message)
+ else:
+ parts.append(name)
+
+ message = u"none of the templates given were found: " + u", ".join(
+ imap(text_type, parts)
+ )
+ TemplateNotFound.__init__(self, names and names[-1] or None, message)
+ self.templates = list(names)
+
+
+@implements_to_string
+class TemplateSyntaxError(TemplateError):
+ """Raised to tell the user that there is a problem with the template."""
+
+ def __init__(self, message, lineno, name=None, filename=None):
+ TemplateError.__init__(self, message)
+ self.lineno = lineno
+ self.name = name
+ self.filename = filename
+ self.source = None
+
+ # this is set to True if the debug.translate_syntax_error
+ # function translated the syntax error into a new traceback
+ self.translated = False
+
+ def __str__(self):
+ # for translated errors we only return the message
+ if self.translated:
+ return self.message
+
+ # otherwise attach some stuff
+ location = "line %d" % self.lineno
+ name = self.filename or self.name
+ if name:
+ location = 'File "%s", %s' % (name, location)
+ lines = [self.message, " " + location]
+
+ # if the source is set, add the line to the output
+ if self.source is not None:
+ try:
+ line = self.source.splitlines()[self.lineno - 1]
+ except IndexError:
+ line = None
+ if line:
+ lines.append(" " + line.strip())
+
+ return u"\n".join(lines)
+
+ def __reduce__(self):
+ # https://bugs.python.org/issue1692335 Exceptions that take
+ # multiple required arguments have problems with pickling.
+ # Without this, raises TypeError: __init__() missing 1 required
+ # positional argument: 'lineno'
+ return self.__class__, (self.message, self.lineno, self.name, self.filename)
+
+
+class TemplateAssertionError(TemplateSyntaxError):
+ """Like a template syntax error, but covers cases where something in the
+ template caused an error at compile time that wasn't necessarily caused
+ by a syntax error. However it's a direct subclass of
+ :exc:`TemplateSyntaxError` and has the same attributes.
+ """
+
+
+class TemplateRuntimeError(TemplateError):
+ """A generic runtime error in the template engine. Under some situations
+ Jinja may raise this exception.
+ """
+
+
+class UndefinedError(TemplateRuntimeError):
+ """Raised if a template tries to operate on :class:`Undefined`."""
+
+
+class SecurityError(TemplateRuntimeError):
+ """Raised if a template tries to do something insecure if the
+ sandbox is enabled.
+ """
+
+
+class FilterArgumentError(TemplateRuntimeError):
+ """This error is raised if a filter was called with inappropriate
+ arguments
+ """
diff --git a/third_party/python/Jinja2/src/jinja2/ext.py b/third_party/python/Jinja2/src/jinja2/ext.py
new file mode 100644
index 0000000000..9141be4dac
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/ext.py
@@ -0,0 +1,704 @@
+# -*- coding: utf-8 -*-
+"""Extension API for adding custom tags and behavior."""
+import pprint
+import re
+from sys import version_info
+
+from markupsafe import Markup
+
+from . import nodes
+from ._compat import iteritems
+from ._compat import string_types
+from ._compat import with_metaclass
+from .defaults import BLOCK_END_STRING
+from .defaults import BLOCK_START_STRING
+from .defaults import COMMENT_END_STRING
+from .defaults import COMMENT_START_STRING
+from .defaults import KEEP_TRAILING_NEWLINE
+from .defaults import LINE_COMMENT_PREFIX
+from .defaults import LINE_STATEMENT_PREFIX
+from .defaults import LSTRIP_BLOCKS
+from .defaults import NEWLINE_SEQUENCE
+from .defaults import TRIM_BLOCKS
+from .defaults import VARIABLE_END_STRING
+from .defaults import VARIABLE_START_STRING
+from .environment import Environment
+from .exceptions import TemplateAssertionError
+from .exceptions import TemplateSyntaxError
+from .nodes import ContextReference
+from .runtime import concat
+from .utils import contextfunction
+from .utils import import_string
+
+# the only real useful gettext functions for a Jinja template. Note
+# that ugettext must be assigned to gettext as Jinja doesn't support
+# non unicode strings.
+GETTEXT_FUNCTIONS = ("_", "gettext", "ngettext")
+
+_ws_re = re.compile(r"\s*\n\s*")
+
+
+class ExtensionRegistry(type):
+ """Gives the extension an unique identifier."""
+
+ def __new__(mcs, name, bases, d):
+ rv = type.__new__(mcs, name, bases, d)
+ rv.identifier = rv.__module__ + "." + rv.__name__
+ return rv
+
+
+class Extension(with_metaclass(ExtensionRegistry, object)):
+ """Extensions can be used to add extra functionality to the Jinja template
+ system at the parser level. Custom extensions are bound to an environment
+ but may not store environment specific data on `self`. The reason for
+ this is that an extension can be bound to another environment (for
+ overlays) by creating a copy and reassigning the `environment` attribute.
+
+ As extensions are created by the environment they cannot accept any
+ arguments for configuration. One may want to work around that by using
+ a factory function, but that is not possible as extensions are identified
+ by their import name. The correct way to configure the extension is
+ storing the configuration values on the environment. Because this way the
+ environment ends up acting as central configuration storage the
+ attributes may clash which is why extensions have to ensure that the names
+ they choose for configuration are not too generic. ``prefix`` for example
+ is a terrible name, ``fragment_cache_prefix`` on the other hand is a good
+ name as includes the name of the extension (fragment cache).
+ """
+
+ #: if this extension parses this is the list of tags it's listening to.
+ tags = set()
+
+ #: the priority of that extension. This is especially useful for
+ #: extensions that preprocess values. A lower value means higher
+ #: priority.
+ #:
+ #: .. versionadded:: 2.4
+ priority = 100
+
+ def __init__(self, environment):
+ self.environment = environment
+
+ def bind(self, environment):
+ """Create a copy of this extension bound to another environment."""
+ rv = object.__new__(self.__class__)
+ rv.__dict__.update(self.__dict__)
+ rv.environment = environment
+ return rv
+
+ def preprocess(self, source, name, filename=None):
+ """This method is called before the actual lexing and can be used to
+ preprocess the source. The `filename` is optional. The return value
+ must be the preprocessed source.
+ """
+ return source
+
+ def filter_stream(self, stream):
+ """It's passed a :class:`~jinja2.lexer.TokenStream` that can be used
+ to filter tokens returned. This method has to return an iterable of
+ :class:`~jinja2.lexer.Token`\\s, but it doesn't have to return a
+ :class:`~jinja2.lexer.TokenStream`.
+ """
+ return stream
+
+ def parse(self, parser):
+ """If any of the :attr:`tags` matched this method is called with the
+ parser as first argument. The token the parser stream is pointing at
+ is the name token that matched. This method has to return one or a
+ list of multiple nodes.
+ """
+ raise NotImplementedError()
+
+ def attr(self, name, lineno=None):
+ """Return an attribute node for the current extension. This is useful
+ to pass constants on extensions to generated template code.
+
+ ::
+
+ self.attr('_my_attribute', lineno=lineno)
+ """
+ return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
+
+ def call_method(
+ self, name, args=None, kwargs=None, dyn_args=None, dyn_kwargs=None, lineno=None
+ ):
+ """Call a method of the extension. This is a shortcut for
+ :meth:`attr` + :class:`jinja2.nodes.Call`.
+ """
+ if args is None:
+ args = []
+ if kwargs is None:
+ kwargs = []
+ return nodes.Call(
+ self.attr(name, lineno=lineno),
+ args,
+ kwargs,
+ dyn_args,
+ dyn_kwargs,
+ lineno=lineno,
+ )
+
+
+@contextfunction
+def _gettext_alias(__context, *args, **kwargs):
+ return __context.call(__context.resolve("gettext"), *args, **kwargs)
+
+
+def _make_new_gettext(func):
+ @contextfunction
+ def gettext(__context, __string, **variables):
+ rv = __context.call(func, __string)
+ if __context.eval_ctx.autoescape:
+ rv = Markup(rv)
+ # Always treat as a format string, even if there are no
+ # variables. This makes translation strings more consistent
+ # and predictable. This requires escaping
+ return rv % variables
+
+ return gettext
+
+
+def _make_new_ngettext(func):
+ @contextfunction
+ def ngettext(__context, __singular, __plural, __num, **variables):
+ variables.setdefault("num", __num)
+ rv = __context.call(func, __singular, __plural, __num)
+ if __context.eval_ctx.autoescape:
+ rv = Markup(rv)
+ # Always treat as a format string, see gettext comment above.
+ return rv % variables
+
+ return ngettext
+
+
+class InternationalizationExtension(Extension):
+ """This extension adds gettext support to Jinja."""
+
+ tags = {"trans"}
+
+ # TODO: the i18n extension is currently reevaluating values in a few
+ # situations. Take this example:
+ # {% trans count=something() %}{{ count }} foo{% pluralize
+ # %}{{ count }} fooss{% endtrans %}
+ # something is called twice here. One time for the gettext value and
+ # the other time for the n-parameter of the ngettext function.
+
+ def __init__(self, environment):
+ Extension.__init__(self, environment)
+ environment.globals["_"] = _gettext_alias
+ environment.extend(
+ install_gettext_translations=self._install,
+ install_null_translations=self._install_null,
+ install_gettext_callables=self._install_callables,
+ uninstall_gettext_translations=self._uninstall,
+ extract_translations=self._extract,
+ newstyle_gettext=False,
+ )
+
+ def _install(self, translations, newstyle=None):
+ gettext = getattr(translations, "ugettext", None)
+ if gettext is None:
+ gettext = translations.gettext
+ ngettext = getattr(translations, "ungettext", None)
+ if ngettext is None:
+ ngettext = translations.ngettext
+ self._install_callables(gettext, ngettext, newstyle)
+
+ def _install_null(self, newstyle=None):
+ self._install_callables(
+ lambda x: x, lambda s, p, n: (n != 1 and (p,) or (s,))[0], newstyle
+ )
+
+ def _install_callables(self, gettext, ngettext, newstyle=None):
+ if newstyle is not None:
+ self.environment.newstyle_gettext = newstyle
+ if self.environment.newstyle_gettext:
+ gettext = _make_new_gettext(gettext)
+ ngettext = _make_new_ngettext(ngettext)
+ self.environment.globals.update(gettext=gettext, ngettext=ngettext)
+
+ def _uninstall(self, translations):
+ for key in "gettext", "ngettext":
+ self.environment.globals.pop(key, None)
+
+ def _extract(self, source, gettext_functions=GETTEXT_FUNCTIONS):
+ if isinstance(source, string_types):
+ source = self.environment.parse(source)
+ return extract_from_ast(source, gettext_functions)
+
+ def parse(self, parser):
+ """Parse a translatable tag."""
+ lineno = next(parser.stream).lineno
+ num_called_num = False
+
+ # find all the variables referenced. Additionally a variable can be
+ # defined in the body of the trans block too, but this is checked at
+ # a later state.
+ plural_expr = None
+ plural_expr_assignment = None
+ variables = {}
+ trimmed = None
+ while parser.stream.current.type != "block_end":
+ if variables:
+ parser.stream.expect("comma")
+
+ # skip colon for python compatibility
+ if parser.stream.skip_if("colon"):
+ break
+
+ name = parser.stream.expect("name")
+ if name.value in variables:
+ parser.fail(
+ "translatable variable %r defined twice." % name.value,
+ name.lineno,
+ exc=TemplateAssertionError,
+ )
+
+ # expressions
+ if parser.stream.current.type == "assign":
+ next(parser.stream)
+ variables[name.value] = var = parser.parse_expression()
+ elif trimmed is None and name.value in ("trimmed", "notrimmed"):
+ trimmed = name.value == "trimmed"
+ continue
+ else:
+ variables[name.value] = var = nodes.Name(name.value, "load")
+
+ if plural_expr is None:
+ if isinstance(var, nodes.Call):
+ plural_expr = nodes.Name("_trans", "load")
+ variables[name.value] = plural_expr
+ plural_expr_assignment = nodes.Assign(
+ nodes.Name("_trans", "store"), var
+ )
+ else:
+ plural_expr = var
+ num_called_num = name.value == "num"
+
+ parser.stream.expect("block_end")
+
+ plural = None
+ have_plural = False
+ referenced = set()
+
+ # now parse until endtrans or pluralize
+ singular_names, singular = self._parse_block(parser, True)
+ if singular_names:
+ referenced.update(singular_names)
+ if plural_expr is None:
+ plural_expr = nodes.Name(singular_names[0], "load")
+ num_called_num = singular_names[0] == "num"
+
+ # if we have a pluralize block, we parse that too
+ if parser.stream.current.test("name:pluralize"):
+ have_plural = True
+ next(parser.stream)
+ if parser.stream.current.type != "block_end":
+ name = parser.stream.expect("name")
+ if name.value not in variables:
+ parser.fail(
+ "unknown variable %r for pluralization" % name.value,
+ name.lineno,
+ exc=TemplateAssertionError,
+ )
+ plural_expr = variables[name.value]
+ num_called_num = name.value == "num"
+ parser.stream.expect("block_end")
+ plural_names, plural = self._parse_block(parser, False)
+ next(parser.stream)
+ referenced.update(plural_names)
+ else:
+ next(parser.stream)
+
+ # register free names as simple name expressions
+ for var in referenced:
+ if var not in variables:
+ variables[var] = nodes.Name(var, "load")
+
+ if not have_plural:
+ plural_expr = None
+ elif plural_expr is None:
+ parser.fail("pluralize without variables", lineno)
+
+ if trimmed is None:
+ trimmed = self.environment.policies["ext.i18n.trimmed"]
+ if trimmed:
+ singular = self._trim_whitespace(singular)
+ if plural:
+ plural = self._trim_whitespace(plural)
+
+ node = self._make_node(
+ singular,
+ plural,
+ variables,
+ plural_expr,
+ bool(referenced),
+ num_called_num and have_plural,
+ )
+ node.set_lineno(lineno)
+ if plural_expr_assignment is not None:
+ return [plural_expr_assignment, node]
+ else:
+ return node
+
+ def _trim_whitespace(self, string, _ws_re=_ws_re):
+ return _ws_re.sub(" ", string.strip())
+
+ def _parse_block(self, parser, allow_pluralize):
+ """Parse until the next block tag with a given name."""
+ referenced = []
+ buf = []
+ while 1:
+ if parser.stream.current.type == "data":
+ buf.append(parser.stream.current.value.replace("%", "%%"))
+ next(parser.stream)
+ elif parser.stream.current.type == "variable_begin":
+ next(parser.stream)
+ name = parser.stream.expect("name").value
+ referenced.append(name)
+ buf.append("%%(%s)s" % name)
+ parser.stream.expect("variable_end")
+ elif parser.stream.current.type == "block_begin":
+ next(parser.stream)
+ if parser.stream.current.test("name:endtrans"):
+ break
+ elif parser.stream.current.test("name:pluralize"):
+ if allow_pluralize:
+ break
+ parser.fail(
+ "a translatable section can have only one pluralize section"
+ )
+ parser.fail(
+ "control structures in translatable sections are not allowed"
+ )
+ elif parser.stream.eos:
+ parser.fail("unclosed translation block")
+ else:
+ raise RuntimeError("internal parser error")
+
+ return referenced, concat(buf)
+
+ def _make_node(
+ self, singular, plural, variables, plural_expr, vars_referenced, num_called_num
+ ):
+ """Generates a useful node from the data provided."""
+ # no variables referenced? no need to escape for old style
+ # gettext invocations only if there are vars.
+ if not vars_referenced and not self.environment.newstyle_gettext:
+ singular = singular.replace("%%", "%")
+ if plural:
+ plural = plural.replace("%%", "%")
+
+ # singular only:
+ if plural_expr is None:
+ gettext = nodes.Name("gettext", "load")
+ node = nodes.Call(gettext, [nodes.Const(singular)], [], None, None)
+
+ # singular and plural
+ else:
+ ngettext = nodes.Name("ngettext", "load")
+ node = nodes.Call(
+ ngettext,
+ [nodes.Const(singular), nodes.Const(plural), plural_expr],
+ [],
+ None,
+ None,
+ )
+
+ # in case newstyle gettext is used, the method is powerful
+ # enough to handle the variable expansion and autoescape
+ # handling itself
+ if self.environment.newstyle_gettext:
+ for key, value in iteritems(variables):
+ # the function adds that later anyways in case num was
+ # called num, so just skip it.
+ if num_called_num and key == "num":
+ continue
+ node.kwargs.append(nodes.Keyword(key, value))
+
+ # otherwise do that here
+ else:
+ # mark the return value as safe if we are in an
+ # environment with autoescaping turned on
+ node = nodes.MarkSafeIfAutoescape(node)
+ if variables:
+ node = nodes.Mod(
+ node,
+ nodes.Dict(
+ [
+ nodes.Pair(nodes.Const(key), value)
+ for key, value in variables.items()
+ ]
+ ),
+ )
+ return nodes.Output([node])
+
+
+class ExprStmtExtension(Extension):
+ """Adds a `do` tag to Jinja that works like the print statement just
+ that it doesn't print the return value.
+ """
+
+ tags = set(["do"])
+
+ def parse(self, parser):
+ node = nodes.ExprStmt(lineno=next(parser.stream).lineno)
+ node.node = parser.parse_tuple()
+ return node
+
+
+class LoopControlExtension(Extension):
+ """Adds break and continue to the template engine."""
+
+ tags = set(["break", "continue"])
+
+ def parse(self, parser):
+ token = next(parser.stream)
+ if token.value == "break":
+ return nodes.Break(lineno=token.lineno)
+ return nodes.Continue(lineno=token.lineno)
+
+
+class WithExtension(Extension):
+ pass
+
+
+class AutoEscapeExtension(Extension):
+ pass
+
+
+class DebugExtension(Extension):
+ """A ``{% debug %}`` tag that dumps the available variables,
+ filters, and tests.
+
+ .. code-block:: html+jinja
+
+ <pre>{% debug %}</pre>
+
+ .. code-block:: text
+
+ {'context': {'cycler': <class 'jinja2.utils.Cycler'>,
+ ...,
+ 'namespace': <class 'jinja2.utils.Namespace'>},
+ 'filters': ['abs', 'attr', 'batch', 'capitalize', 'center', 'count', 'd',
+ ..., 'urlencode', 'urlize', 'wordcount', 'wordwrap', 'xmlattr'],
+ 'tests': ['!=', '<', '<=', '==', '>', '>=', 'callable', 'defined',
+ ..., 'odd', 'sameas', 'sequence', 'string', 'undefined', 'upper']}
+
+ .. versionadded:: 2.11.0
+ """
+
+ tags = {"debug"}
+
+ def parse(self, parser):
+ lineno = parser.stream.expect("name:debug").lineno
+ context = ContextReference()
+ result = self.call_method("_render", [context], lineno=lineno)
+ return nodes.Output([result], lineno=lineno)
+
+ def _render(self, context):
+ result = {
+ "context": context.get_all(),
+ "filters": sorted(self.environment.filters.keys()),
+ "tests": sorted(self.environment.tests.keys()),
+ }
+
+ # Set the depth since the intent is to show the top few names.
+ if version_info[:2] >= (3, 4):
+ return pprint.pformat(result, depth=3, compact=True)
+ else:
+ return pprint.pformat(result, depth=3)
+
+
+def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS, babel_style=True):
+ """Extract localizable strings from the given template node. Per
+ default this function returns matches in babel style that means non string
+ parameters as well as keyword arguments are returned as `None`. This
+ allows Babel to figure out what you really meant if you are using
+ gettext functions that allow keyword arguments for placeholder expansion.
+ If you don't want that behavior set the `babel_style` parameter to `False`
+ which causes only strings to be returned and parameters are always stored
+ in tuples. As a consequence invalid gettext calls (calls without a single
+ string parameter or string parameters after non-string parameters) are
+ skipped.
+
+ This example explains the behavior:
+
+ >>> from jinja2 import Environment
+ >>> env = Environment()
+ >>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}')
+ >>> list(extract_from_ast(node))
+ [(1, '_', 'foo'), (1, '_', ()), (1, 'ngettext', ('foo', 'bar', None))]
+ >>> list(extract_from_ast(node, babel_style=False))
+ [(1, '_', ('foo',)), (1, 'ngettext', ('foo', 'bar'))]
+
+ For every string found this function yields a ``(lineno, function,
+ message)`` tuple, where:
+
+ * ``lineno`` is the number of the line on which the string was found,
+ * ``function`` is the name of the ``gettext`` function used (if the
+ string was extracted from embedded Python code), and
+ * ``message`` is the string itself (a ``unicode`` object, or a tuple
+ of ``unicode`` objects for functions with multiple string arguments).
+
+ This extraction function operates on the AST and is because of that unable
+ to extract any comments. For comment support you have to use the babel
+ extraction interface or extract comments yourself.
+ """
+ for node in node.find_all(nodes.Call):
+ if (
+ not isinstance(node.node, nodes.Name)
+ or node.node.name not in gettext_functions
+ ):
+ continue
+
+ strings = []
+ for arg in node.args:
+ if isinstance(arg, nodes.Const) and isinstance(arg.value, string_types):
+ strings.append(arg.value)
+ else:
+ strings.append(None)
+
+ for _ in node.kwargs:
+ strings.append(None)
+ if node.dyn_args is not None:
+ strings.append(None)
+ if node.dyn_kwargs is not None:
+ strings.append(None)
+
+ if not babel_style:
+ strings = tuple(x for x in strings if x is not None)
+ if not strings:
+ continue
+ else:
+ if len(strings) == 1:
+ strings = strings[0]
+ else:
+ strings = tuple(strings)
+ yield node.lineno, node.node.name, strings
+
+
+class _CommentFinder(object):
+ """Helper class to find comments in a token stream. Can only
+ find comments for gettext calls forwards. Once the comment
+ from line 4 is found, a comment for line 1 will not return a
+ usable value.
+ """
+
+ def __init__(self, tokens, comment_tags):
+ self.tokens = tokens
+ self.comment_tags = comment_tags
+ self.offset = 0
+ self.last_lineno = 0
+
+ def find_backwards(self, offset):
+ try:
+ for _, token_type, token_value in reversed(
+ self.tokens[self.offset : offset]
+ ):
+ if token_type in ("comment", "linecomment"):
+ try:
+ prefix, comment = token_value.split(None, 1)
+ except ValueError:
+ continue
+ if prefix in self.comment_tags:
+ return [comment.rstrip()]
+ return []
+ finally:
+ self.offset = offset
+
+ def find_comments(self, lineno):
+ if not self.comment_tags or self.last_lineno > lineno:
+ return []
+ for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset :]):
+ if token_lineno > lineno:
+ return self.find_backwards(self.offset + idx)
+ return self.find_backwards(len(self.tokens))
+
+
+def babel_extract(fileobj, keywords, comment_tags, options):
+ """Babel extraction method for Jinja templates.
+
+ .. versionchanged:: 2.3
+ Basic support for translation comments was added. If `comment_tags`
+ is now set to a list of keywords for extraction, the extractor will
+ try to find the best preceding comment that begins with one of the
+ keywords. For best results, make sure to not have more than one
+ gettext call in one line of code and the matching comment in the
+ same line or the line before.
+
+ .. versionchanged:: 2.5.1
+ The `newstyle_gettext` flag can be set to `True` to enable newstyle
+ gettext calls.
+
+ .. versionchanged:: 2.7
+ A `silent` option can now be provided. If set to `False` template
+ syntax errors are propagated instead of being ignored.
+
+ :param fileobj: the file-like object the messages should be extracted from
+ :param keywords: a list of keywords (i.e. function names) that should be
+ recognized as translation functions
+ :param comment_tags: a list of translator tags to search for and include
+ in the results.
+ :param options: a dictionary of additional options (optional)
+ :return: an iterator over ``(lineno, funcname, message, comments)`` tuples.
+ (comments will be empty currently)
+ """
+ extensions = set()
+ for extension in options.get("extensions", "").split(","):
+ extension = extension.strip()
+ if not extension:
+ continue
+ extensions.add(import_string(extension))
+ if InternationalizationExtension not in extensions:
+ extensions.add(InternationalizationExtension)
+
+ def getbool(options, key, default=False):
+ return options.get(key, str(default)).lower() in ("1", "on", "yes", "true")
+
+ silent = getbool(options, "silent", True)
+ environment = Environment(
+ options.get("block_start_string", BLOCK_START_STRING),
+ options.get("block_end_string", BLOCK_END_STRING),
+ options.get("variable_start_string", VARIABLE_START_STRING),
+ options.get("variable_end_string", VARIABLE_END_STRING),
+ options.get("comment_start_string", COMMENT_START_STRING),
+ options.get("comment_end_string", COMMENT_END_STRING),
+ options.get("line_statement_prefix") or LINE_STATEMENT_PREFIX,
+ options.get("line_comment_prefix") or LINE_COMMENT_PREFIX,
+ getbool(options, "trim_blocks", TRIM_BLOCKS),
+ getbool(options, "lstrip_blocks", LSTRIP_BLOCKS),
+ NEWLINE_SEQUENCE,
+ getbool(options, "keep_trailing_newline", KEEP_TRAILING_NEWLINE),
+ frozenset(extensions),
+ cache_size=0,
+ auto_reload=False,
+ )
+
+ if getbool(options, "trimmed"):
+ environment.policies["ext.i18n.trimmed"] = True
+ if getbool(options, "newstyle_gettext"):
+ environment.newstyle_gettext = True
+
+ source = fileobj.read().decode(options.get("encoding", "utf-8"))
+ try:
+ node = environment.parse(source)
+ tokens = list(environment.lex(environment.preprocess(source)))
+ except TemplateSyntaxError:
+ if not silent:
+ raise
+ # skip templates with syntax errors
+ return
+
+ finder = _CommentFinder(tokens, comment_tags)
+ for lineno, func, message in extract_from_ast(node, keywords):
+ yield lineno, func, message, finder.find_comments(lineno)
+
+
+#: nicer import names
+i18n = InternationalizationExtension
+do = ExprStmtExtension
+loopcontrols = LoopControlExtension
+with_ = WithExtension
+autoescape = AutoEscapeExtension
+debug = DebugExtension
diff --git a/third_party/python/Jinja2/src/jinja2/filters.py b/third_party/python/Jinja2/src/jinja2/filters.py
new file mode 100644
index 0000000000..9741567351
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/filters.py
@@ -0,0 +1,1382 @@
+# -*- coding: utf-8 -*-
+"""Built-in template filters used with the ``|`` operator."""
+import math
+import random
+import re
+import warnings
+from collections import namedtuple
+from itertools import chain
+from itertools import groupby
+
+from markupsafe import escape
+from markupsafe import Markup
+from markupsafe import soft_unicode
+
+from ._compat import abc
+from ._compat import imap
+from ._compat import iteritems
+from ._compat import string_types
+from ._compat import text_type
+from .exceptions import FilterArgumentError
+from .runtime import Undefined
+from .utils import htmlsafe_json_dumps
+from .utils import pformat
+from .utils import unicode_urlencode
+from .utils import urlize
+
+_word_re = re.compile(r"\w+", re.UNICODE)
+_word_beginning_split_re = re.compile(r"([-\s\(\{\[\<]+)", re.UNICODE)
+
+
+def contextfilter(f):
+ """Decorator for marking context dependent filters. The current
+ :class:`Context` will be passed as first argument.
+ """
+ f.contextfilter = True
+ return f
+
+
+def evalcontextfilter(f):
+ """Decorator for marking eval-context dependent filters. An eval
+ context object is passed as first argument. For more information
+ about the eval context, see :ref:`eval-context`.
+
+ .. versionadded:: 2.4
+ """
+ f.evalcontextfilter = True
+ return f
+
+
+def environmentfilter(f):
+ """Decorator for marking environment dependent filters. The current
+ :class:`Environment` is passed to the filter as first argument.
+ """
+ f.environmentfilter = True
+ return f
+
+
+def ignore_case(value):
+ """For use as a postprocessor for :func:`make_attrgetter`. Converts strings
+ to lowercase and returns other types as-is."""
+ return value.lower() if isinstance(value, string_types) else value
+
+
+def make_attrgetter(environment, attribute, postprocess=None, default=None):
+ """Returns a callable that looks up the given attribute from a
+ passed object with the rules of the environment. Dots are allowed
+ to access attributes of attributes. Integer parts in paths are
+ looked up as integers.
+ """
+ attribute = _prepare_attribute_parts(attribute)
+
+ def attrgetter(item):
+ for part in attribute:
+ item = environment.getitem(item, part)
+
+ if default and isinstance(item, Undefined):
+ item = default
+
+ if postprocess is not None:
+ item = postprocess(item)
+
+ return item
+
+ return attrgetter
+
+
+def make_multi_attrgetter(environment, attribute, postprocess=None):
+ """Returns a callable that looks up the given comma separated
+ attributes from a passed object with the rules of the environment.
+ Dots are allowed to access attributes of each attribute. Integer
+ parts in paths are looked up as integers.
+
+ The value returned by the returned callable is a list of extracted
+ attribute values.
+
+ Examples of attribute: "attr1,attr2", "attr1.inner1.0,attr2.inner2.0", etc.
+ """
+ attribute_parts = (
+ attribute.split(",") if isinstance(attribute, string_types) else [attribute]
+ )
+ attribute = [
+ _prepare_attribute_parts(attribute_part) for attribute_part in attribute_parts
+ ]
+
+ def attrgetter(item):
+ items = [None] * len(attribute)
+ for i, attribute_part in enumerate(attribute):
+ item_i = item
+ for part in attribute_part:
+ item_i = environment.getitem(item_i, part)
+
+ if postprocess is not None:
+ item_i = postprocess(item_i)
+
+ items[i] = item_i
+ return items
+
+ return attrgetter
+
+
+def _prepare_attribute_parts(attr):
+ if attr is None:
+ return []
+ elif isinstance(attr, string_types):
+ return [int(x) if x.isdigit() else x for x in attr.split(".")]
+ else:
+ return [attr]
+
+
+def do_forceescape(value):
+ """Enforce HTML escaping. This will probably double escape variables."""
+ if hasattr(value, "__html__"):
+ value = value.__html__()
+ return escape(text_type(value))
+
+
+def do_urlencode(value):
+ """Quote data for use in a URL path or query using UTF-8.
+
+ Basic wrapper around :func:`urllib.parse.quote` when given a
+ string, or :func:`urllib.parse.urlencode` for a dict or iterable.
+
+ :param value: Data to quote. A string will be quoted directly. A
+ dict or iterable of ``(key, value)`` pairs will be joined as a
+ query string.
+
+ When given a string, "/" is not quoted. HTTP servers treat "/" and
+ "%2F" equivalently in paths. If you need quoted slashes, use the
+ ``|replace("/", "%2F")`` filter.
+
+ .. versionadded:: 2.7
+ """
+ if isinstance(value, string_types) or not isinstance(value, abc.Iterable):
+ return unicode_urlencode(value)
+
+ if isinstance(value, dict):
+ items = iteritems(value)
+ else:
+ items = iter(value)
+
+ return u"&".join(
+ "%s=%s" % (unicode_urlencode(k, for_qs=True), unicode_urlencode(v, for_qs=True))
+ for k, v in items
+ )
+
+
+@evalcontextfilter
+def do_replace(eval_ctx, s, old, new, count=None):
+ """Return a copy of the value with all occurrences of a substring
+ replaced with a new one. The first argument is the substring
+ that should be replaced, the second is the replacement string.
+ If the optional third argument ``count`` is given, only the first
+ ``count`` occurrences are replaced:
+
+ .. sourcecode:: jinja
+
+ {{ "Hello World"|replace("Hello", "Goodbye") }}
+ -> Goodbye World
+
+ {{ "aaaaargh"|replace("a", "d'oh, ", 2) }}
+ -> d'oh, d'oh, aaargh
+ """
+ if count is None:
+ count = -1
+ if not eval_ctx.autoescape:
+ return text_type(s).replace(text_type(old), text_type(new), count)
+ if (
+ hasattr(old, "__html__")
+ or hasattr(new, "__html__")
+ and not hasattr(s, "__html__")
+ ):
+ s = escape(s)
+ else:
+ s = soft_unicode(s)
+ return s.replace(soft_unicode(old), soft_unicode(new), count)
+
+
+def do_upper(s):
+ """Convert a value to uppercase."""
+ return soft_unicode(s).upper()
+
+
+def do_lower(s):
+ """Convert a value to lowercase."""
+ return soft_unicode(s).lower()
+
+
+@evalcontextfilter
+def do_xmlattr(_eval_ctx, d, autospace=True):
+ """Create an SGML/XML attribute string based on the items in a dict.
+ All values that are neither `none` nor `undefined` are automatically
+ escaped:
+
+ .. sourcecode:: html+jinja
+
+ <ul{{ {'class': 'my_list', 'missing': none,
+ 'id': 'list-%d'|format(variable)}|xmlattr }}>
+ ...
+ </ul>
+
+ Results in something like this:
+
+ .. sourcecode:: html
+
+ <ul class="my_list" id="list-42">
+ ...
+ </ul>
+
+ As you can see it automatically prepends a space in front of the item
+ if the filter returned something unless the second parameter is false.
+ """
+ rv = u" ".join(
+ u'%s="%s"' % (escape(key), escape(value))
+ for key, value in iteritems(d)
+ if value is not None and not isinstance(value, Undefined)
+ )
+ if autospace and rv:
+ rv = u" " + rv
+ if _eval_ctx.autoescape:
+ rv = Markup(rv)
+ return rv
+
+
+def do_capitalize(s):
+ """Capitalize a value. The first character will be uppercase, all others
+ lowercase.
+ """
+ return soft_unicode(s).capitalize()
+
+
+def do_title(s):
+ """Return a titlecased version of the value. I.e. words will start with
+ uppercase letters, all remaining characters are lowercase.
+ """
+ return "".join(
+ [
+ item[0].upper() + item[1:].lower()
+ for item in _word_beginning_split_re.split(soft_unicode(s))
+ if item
+ ]
+ )
+
+
+def do_dictsort(value, case_sensitive=False, by="key", reverse=False):
+ """Sort a dict and yield (key, value) pairs. Because python dicts are
+ unsorted you may want to use this function to order them by either
+ key or value:
+
+ .. sourcecode:: jinja
+
+ {% for item in mydict|dictsort %}
+ sort the dict by key, case insensitive
+
+ {% for item in mydict|dictsort(reverse=true) %}
+ sort the dict by key, case insensitive, reverse order
+
+ {% for item in mydict|dictsort(true) %}
+ sort the dict by key, case sensitive
+
+ {% for item in mydict|dictsort(false, 'value') %}
+ sort the dict by value, case insensitive
+ """
+ if by == "key":
+ pos = 0
+ elif by == "value":
+ pos = 1
+ else:
+ raise FilterArgumentError('You can only sort by either "key" or "value"')
+
+ def sort_func(item):
+ value = item[pos]
+
+ if not case_sensitive:
+ value = ignore_case(value)
+
+ return value
+
+ return sorted(value.items(), key=sort_func, reverse=reverse)
+
+
+@environmentfilter
+def do_sort(environment, value, reverse=False, case_sensitive=False, attribute=None):
+ """Sort an iterable using Python's :func:`sorted`.
+
+ .. sourcecode:: jinja
+
+ {% for city in cities|sort %}
+ ...
+ {% endfor %}
+
+ :param reverse: Sort descending instead of ascending.
+ :param case_sensitive: When sorting strings, sort upper and lower
+ case separately.
+ :param attribute: When sorting objects or dicts, an attribute or
+ key to sort by. Can use dot notation like ``"address.city"``.
+ Can be a list of attributes like ``"age,name"``.
+
+ The sort is stable, it does not change the relative order of
+ elements that compare equal. This makes it is possible to chain
+ sorts on different attributes and ordering.
+
+ .. sourcecode:: jinja
+
+ {% for user in users|sort(attribute="name")
+ |sort(reverse=true, attribute="age") %}
+ ...
+ {% endfor %}
+
+ As a shortcut to chaining when the direction is the same for all
+ attributes, pass a comma separate list of attributes.
+
+ .. sourcecode:: jinja
+
+ {% for user users|sort(attribute="age,name") %}
+ ...
+ {% endfor %}
+
+ .. versionchanged:: 2.11.0
+ The ``attribute`` parameter can be a comma separated list of
+ attributes, e.g. ``"age,name"``.
+
+ .. versionchanged:: 2.6
+ The ``attribute`` parameter was added.
+ """
+ key_func = make_multi_attrgetter(
+ environment, attribute, postprocess=ignore_case if not case_sensitive else None
+ )
+ return sorted(value, key=key_func, reverse=reverse)
+
+
+@environmentfilter
+def do_unique(environment, value, case_sensitive=False, attribute=None):
+ """Returns a list of unique items from the given iterable.
+
+ .. sourcecode:: jinja
+
+ {{ ['foo', 'bar', 'foobar', 'FooBar']|unique|list }}
+ -> ['foo', 'bar', 'foobar']
+
+ The unique items are yielded in the same order as their first occurrence in
+ the iterable passed to the filter.
+
+ :param case_sensitive: Treat upper and lower case strings as distinct.
+ :param attribute: Filter objects with unique values for this attribute.
+ """
+ getter = make_attrgetter(
+ environment, attribute, postprocess=ignore_case if not case_sensitive else None
+ )
+ seen = set()
+
+ for item in value:
+ key = getter(item)
+
+ if key not in seen:
+ seen.add(key)
+ yield item
+
+
+def _min_or_max(environment, value, func, case_sensitive, attribute):
+ it = iter(value)
+
+ try:
+ first = next(it)
+ except StopIteration:
+ return environment.undefined("No aggregated item, sequence was empty.")
+
+ key_func = make_attrgetter(
+ environment, attribute, postprocess=ignore_case if not case_sensitive else None
+ )
+ return func(chain([first], it), key=key_func)
+
+
+@environmentfilter
+def do_min(environment, value, case_sensitive=False, attribute=None):
+ """Return the smallest item from the sequence.
+
+ .. sourcecode:: jinja
+
+ {{ [1, 2, 3]|min }}
+ -> 1
+
+ :param case_sensitive: Treat upper and lower case strings as distinct.
+ :param attribute: Get the object with the min value of this attribute.
+ """
+ return _min_or_max(environment, value, min, case_sensitive, attribute)
+
+
+@environmentfilter
+def do_max(environment, value, case_sensitive=False, attribute=None):
+ """Return the largest item from the sequence.
+
+ .. sourcecode:: jinja
+
+ {{ [1, 2, 3]|max }}
+ -> 3
+
+ :param case_sensitive: Treat upper and lower case strings as distinct.
+ :param attribute: Get the object with the max value of this attribute.
+ """
+ return _min_or_max(environment, value, max, case_sensitive, attribute)
+
+
+def do_default(value, default_value=u"", boolean=False):
+ """If the value is undefined it will return the passed default value,
+ otherwise the value of the variable:
+
+ .. sourcecode:: jinja
+
+ {{ my_variable|default('my_variable is not defined') }}
+
+ This will output the value of ``my_variable`` if the variable was
+ defined, otherwise ``'my_variable is not defined'``. If you want
+ to use default with variables that evaluate to false you have to
+ set the second parameter to `true`:
+
+ .. sourcecode:: jinja
+
+ {{ ''|default('the string was empty', true) }}
+
+ .. versionchanged:: 2.11
+ It's now possible to configure the :class:`~jinja2.Environment` with
+ :class:`~jinja2.ChainableUndefined` to make the `default` filter work
+ on nested elements and attributes that may contain undefined values
+ in the chain without getting an :exc:`~jinja2.UndefinedError`.
+ """
+ if isinstance(value, Undefined) or (boolean and not value):
+ return default_value
+ return value
+
+
+@evalcontextfilter
+def do_join(eval_ctx, value, d=u"", attribute=None):
+ """Return a string which is the concatenation of the strings in the
+ sequence. The separator between elements is an empty string per
+ default, you can define it with the optional parameter:
+
+ .. sourcecode:: jinja
+
+ {{ [1, 2, 3]|join('|') }}
+ -> 1|2|3
+
+ {{ [1, 2, 3]|join }}
+ -> 123
+
+ It is also possible to join certain attributes of an object:
+
+ .. sourcecode:: jinja
+
+ {{ users|join(', ', attribute='username') }}
+
+ .. versionadded:: 2.6
+ The `attribute` parameter was added.
+ """
+ if attribute is not None:
+ value = imap(make_attrgetter(eval_ctx.environment, attribute), value)
+
+ # no automatic escaping? joining is a lot easier then
+ if not eval_ctx.autoescape:
+ return text_type(d).join(imap(text_type, value))
+
+ # if the delimiter doesn't have an html representation we check
+ # if any of the items has. If yes we do a coercion to Markup
+ if not hasattr(d, "__html__"):
+ value = list(value)
+ do_escape = False
+ for idx, item in enumerate(value):
+ if hasattr(item, "__html__"):
+ do_escape = True
+ else:
+ value[idx] = text_type(item)
+ if do_escape:
+ d = escape(d)
+ else:
+ d = text_type(d)
+ return d.join(value)
+
+ # no html involved, to normal joining
+ return soft_unicode(d).join(imap(soft_unicode, value))
+
+
+def do_center(value, width=80):
+ """Centers the value in a field of a given width."""
+ return text_type(value).center(width)
+
+
+@environmentfilter
+def do_first(environment, seq):
+ """Return the first item of a sequence."""
+ try:
+ return next(iter(seq))
+ except StopIteration:
+ return environment.undefined("No first item, sequence was empty.")
+
+
+@environmentfilter
+def do_last(environment, seq):
+ """
+ Return the last item of a sequence.
+
+ Note: Does not work with generators. You may want to explicitly
+ convert it to a list:
+
+ .. sourcecode:: jinja
+
+ {{ data | selectattr('name', '==', 'Jinja') | list | last }}
+ """
+ try:
+ return next(iter(reversed(seq)))
+ except StopIteration:
+ return environment.undefined("No last item, sequence was empty.")
+
+
+@contextfilter
+def do_random(context, seq):
+ """Return a random item from the sequence."""
+ try:
+ return random.choice(seq)
+ except IndexError:
+ return context.environment.undefined("No random item, sequence was empty.")
+
+
+def do_filesizeformat(value, binary=False):
+ """Format the value like a 'human-readable' file size (i.e. 13 kB,
+ 4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega,
+ Giga, etc.), if the second parameter is set to `True` the binary
+ prefixes are used (Mebi, Gibi).
+ """
+ bytes = float(value)
+ base = binary and 1024 or 1000
+ prefixes = [
+ (binary and "KiB" or "kB"),
+ (binary and "MiB" or "MB"),
+ (binary and "GiB" or "GB"),
+ (binary and "TiB" or "TB"),
+ (binary and "PiB" or "PB"),
+ (binary and "EiB" or "EB"),
+ (binary and "ZiB" or "ZB"),
+ (binary and "YiB" or "YB"),
+ ]
+ if bytes == 1:
+ return "1 Byte"
+ elif bytes < base:
+ return "%d Bytes" % bytes
+ else:
+ for i, prefix in enumerate(prefixes):
+ unit = base ** (i + 2)
+ if bytes < unit:
+ return "%.1f %s" % ((base * bytes / unit), prefix)
+ return "%.1f %s" % ((base * bytes / unit), prefix)
+
+
+def do_pprint(value, verbose=False):
+ """Pretty print a variable. Useful for debugging.
+
+ With Jinja 1.2 onwards you can pass it a parameter. If this parameter
+ is truthy the output will be more verbose (this requires `pretty`)
+ """
+ return pformat(value, verbose=verbose)
+
+
+@evalcontextfilter
+def do_urlize(
+ eval_ctx, value, trim_url_limit=None, nofollow=False, target=None, rel=None
+):
+ """Converts URLs in plain text into clickable links.
+
+ If you pass the filter an additional integer it will shorten the urls
+ to that number. Also a third argument exists that makes the urls
+ "nofollow":
+
+ .. sourcecode:: jinja
+
+ {{ mytext|urlize(40, true) }}
+ links are shortened to 40 chars and defined with rel="nofollow"
+
+ If *target* is specified, the ``target`` attribute will be added to the
+ ``<a>`` tag:
+
+ .. sourcecode:: jinja
+
+ {{ mytext|urlize(40, target='_blank') }}
+
+ .. versionchanged:: 2.8+
+ The *target* parameter was added.
+ """
+ policies = eval_ctx.environment.policies
+ rel = set((rel or "").split() or [])
+ if nofollow:
+ rel.add("nofollow")
+ rel.update((policies["urlize.rel"] or "").split())
+ if target is None:
+ target = policies["urlize.target"]
+ rel = " ".join(sorted(rel)) or None
+ rv = urlize(value, trim_url_limit, rel=rel, target=target)
+ if eval_ctx.autoescape:
+ rv = Markup(rv)
+ return rv
+
+
+def do_indent(s, width=4, first=False, blank=False, indentfirst=None):
+ """Return a copy of the string with each line indented by 4 spaces. The
+ first line and blank lines are not indented by default.
+
+ :param width: Number of spaces to indent by.
+ :param first: Don't skip indenting the first line.
+ :param blank: Don't skip indenting empty lines.
+
+ .. versionchanged:: 2.10
+ Blank lines are not indented by default.
+
+ Rename the ``indentfirst`` argument to ``first``.
+ """
+ if indentfirst is not None:
+ warnings.warn(
+ "The 'indentfirst' argument is renamed to 'first' and will"
+ " be removed in version 3.0.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ first = indentfirst
+
+ indention = u" " * width
+ newline = u"\n"
+
+ if isinstance(s, Markup):
+ indention = Markup(indention)
+ newline = Markup(newline)
+
+ s += newline # this quirk is necessary for splitlines method
+
+ if blank:
+ rv = (newline + indention).join(s.splitlines())
+ else:
+ lines = s.splitlines()
+ rv = lines.pop(0)
+
+ if lines:
+ rv += newline + newline.join(
+ indention + line if line else line for line in lines
+ )
+
+ if first:
+ rv = indention + rv
+
+ return rv
+
+
+@environmentfilter
+def do_truncate(env, s, length=255, killwords=False, end="...", leeway=None):
+ """Return a truncated copy of the string. The length is specified
+ with the first parameter which defaults to ``255``. If the second
+ parameter is ``true`` the filter will cut the text at length. Otherwise
+ it will discard the last word. If the text was in fact
+ truncated it will append an ellipsis sign (``"..."``). If you want a
+ different ellipsis sign than ``"..."`` you can specify it using the
+ third parameter. Strings that only exceed the length by the tolerance
+ margin given in the fourth parameter will not be truncated.
+
+ .. sourcecode:: jinja
+
+ {{ "foo bar baz qux"|truncate(9) }}
+ -> "foo..."
+ {{ "foo bar baz qux"|truncate(9, True) }}
+ -> "foo ba..."
+ {{ "foo bar baz qux"|truncate(11) }}
+ -> "foo bar baz qux"
+ {{ "foo bar baz qux"|truncate(11, False, '...', 0) }}
+ -> "foo bar..."
+
+ The default leeway on newer Jinja versions is 5 and was 0 before but
+ can be reconfigured globally.
+ """
+ if leeway is None:
+ leeway = env.policies["truncate.leeway"]
+ assert length >= len(end), "expected length >= %s, got %s" % (len(end), length)
+ assert leeway >= 0, "expected leeway >= 0, got %s" % leeway
+ if len(s) <= length + leeway:
+ return s
+ if killwords:
+ return s[: length - len(end)] + end
+ result = s[: length - len(end)].rsplit(" ", 1)[0]
+ return result + end
+
+
+@environmentfilter
+def do_wordwrap(
+ environment,
+ s,
+ width=79,
+ break_long_words=True,
+ wrapstring=None,
+ break_on_hyphens=True,
+):
+ """Wrap a string to the given width. Existing newlines are treated
+ as paragraphs to be wrapped separately.
+
+ :param s: Original text to wrap.
+ :param width: Maximum length of wrapped lines.
+ :param break_long_words: If a word is longer than ``width``, break
+ it across lines.
+ :param break_on_hyphens: If a word contains hyphens, it may be split
+ across lines.
+ :param wrapstring: String to join each wrapped line. Defaults to
+ :attr:`Environment.newline_sequence`.
+
+ .. versionchanged:: 2.11
+ Existing newlines are treated as paragraphs wrapped separately.
+
+ .. versionchanged:: 2.11
+ Added the ``break_on_hyphens`` parameter.
+
+ .. versionchanged:: 2.7
+ Added the ``wrapstring`` parameter.
+ """
+
+ import textwrap
+
+ if not wrapstring:
+ wrapstring = environment.newline_sequence
+
+ # textwrap.wrap doesn't consider existing newlines when wrapping.
+ # If the string has a newline before width, wrap will still insert
+ # a newline at width, resulting in a short line. Instead, split and
+ # wrap each paragraph individually.
+ return wrapstring.join(
+ [
+ wrapstring.join(
+ textwrap.wrap(
+ line,
+ width=width,
+ expand_tabs=False,
+ replace_whitespace=False,
+ break_long_words=break_long_words,
+ break_on_hyphens=break_on_hyphens,
+ )
+ )
+ for line in s.splitlines()
+ ]
+ )
+
+
+def do_wordcount(s):
+ """Count the words in that string."""
+ return len(_word_re.findall(soft_unicode(s)))
+
+
+def do_int(value, default=0, base=10):
+ """Convert the value into an integer. If the
+ conversion doesn't work it will return ``0``. You can
+ override this default using the first parameter. You
+ can also override the default base (10) in the second
+ parameter, which handles input with prefixes such as
+ 0b, 0o and 0x for bases 2, 8 and 16 respectively.
+ The base is ignored for decimal numbers and non-string values.
+ """
+ try:
+ if isinstance(value, string_types):
+ return int(value, base)
+ return int(value)
+ except (TypeError, ValueError):
+ # this quirk is necessary so that "42.23"|int gives 42.
+ try:
+ return int(float(value))
+ except (TypeError, ValueError):
+ return default
+
+
+def do_float(value, default=0.0):
+ """Convert the value into a floating point number. If the
+ conversion doesn't work it will return ``0.0``. You can
+ override this default using the first parameter.
+ """
+ try:
+ return float(value)
+ except (TypeError, ValueError):
+ return default
+
+
+def do_format(value, *args, **kwargs):
+ """Apply the given values to a `printf-style`_ format string, like
+ ``string % values``.
+
+ .. sourcecode:: jinja
+
+ {{ "%s, %s!"|format(greeting, name) }}
+ Hello, World!
+
+ In most cases it should be more convenient and efficient to use the
+ ``%`` operator or :meth:`str.format`.
+
+ .. code-block:: text
+
+ {{ "%s, %s!" % (greeting, name) }}
+ {{ "{}, {}!".format(greeting, name) }}
+
+ .. _printf-style: https://docs.python.org/library/stdtypes.html
+ #printf-style-string-formatting
+ """
+ if args and kwargs:
+ raise FilterArgumentError(
+ "can't handle positional and keyword arguments at the same time"
+ )
+ return soft_unicode(value) % (kwargs or args)
+
+
+def do_trim(value, chars=None):
+ """Strip leading and trailing characters, by default whitespace."""
+ return soft_unicode(value).strip(chars)
+
+
+def do_striptags(value):
+ """Strip SGML/XML tags and replace adjacent whitespace by one space."""
+ if hasattr(value, "__html__"):
+ value = value.__html__()
+ return Markup(text_type(value)).striptags()
+
+
+def do_slice(value, slices, fill_with=None):
+ """Slice an iterator and return a list of lists containing
+ those items. Useful if you want to create a div containing
+ three ul tags that represent columns:
+
+ .. sourcecode:: html+jinja
+
+ <div class="columnwrapper">
+ {%- for column in items|slice(3) %}
+ <ul class="column-{{ loop.index }}">
+ {%- for item in column %}
+ <li>{{ item }}</li>
+ {%- endfor %}
+ </ul>
+ {%- endfor %}
+ </div>
+
+ If you pass it a second argument it's used to fill missing
+ values on the last iteration.
+ """
+ seq = list(value)
+ length = len(seq)
+ items_per_slice = length // slices
+ slices_with_extra = length % slices
+ offset = 0
+ for slice_number in range(slices):
+ start = offset + slice_number * items_per_slice
+ if slice_number < slices_with_extra:
+ offset += 1
+ end = offset + (slice_number + 1) * items_per_slice
+ tmp = seq[start:end]
+ if fill_with is not None and slice_number >= slices_with_extra:
+ tmp.append(fill_with)
+ yield tmp
+
+
+def do_batch(value, linecount, fill_with=None):
+ """
+ A filter that batches items. It works pretty much like `slice`
+ just the other way round. It returns a list of lists with the
+ given number of items. If you provide a second parameter this
+ is used to fill up missing items. See this example:
+
+ .. sourcecode:: html+jinja
+
+ <table>
+ {%- for row in items|batch(3, '&nbsp;') %}
+ <tr>
+ {%- for column in row %}
+ <td>{{ column }}</td>
+ {%- endfor %}
+ </tr>
+ {%- endfor %}
+ </table>
+ """
+ tmp = []
+ for item in value:
+ if len(tmp) == linecount:
+ yield tmp
+ tmp = []
+ tmp.append(item)
+ if tmp:
+ if fill_with is not None and len(tmp) < linecount:
+ tmp += [fill_with] * (linecount - len(tmp))
+ yield tmp
+
+
+def do_round(value, precision=0, method="common"):
+ """Round the number to a given precision. The first
+ parameter specifies the precision (default is ``0``), the
+ second the rounding method:
+
+ - ``'common'`` rounds either up or down
+ - ``'ceil'`` always rounds up
+ - ``'floor'`` always rounds down
+
+ If you don't specify a method ``'common'`` is used.
+
+ .. sourcecode:: jinja
+
+ {{ 42.55|round }}
+ -> 43.0
+ {{ 42.55|round(1, 'floor') }}
+ -> 42.5
+
+ Note that even if rounded to 0 precision, a float is returned. If
+ you need a real integer, pipe it through `int`:
+
+ .. sourcecode:: jinja
+
+ {{ 42.55|round|int }}
+ -> 43
+ """
+ if method not in {"common", "ceil", "floor"}:
+ raise FilterArgumentError("method must be common, ceil or floor")
+ if method == "common":
+ return round(value, precision)
+ func = getattr(math, method)
+ return func(value * (10 ** precision)) / (10 ** precision)
+
+
+# Use a regular tuple repr here. This is what we did in the past and we
+# really want to hide this custom type as much as possible. In particular
+# we do not want to accidentally expose an auto generated repr in case
+# people start to print this out in comments or something similar for
+# debugging.
+_GroupTuple = namedtuple("_GroupTuple", ["grouper", "list"])
+_GroupTuple.__repr__ = tuple.__repr__
+_GroupTuple.__str__ = tuple.__str__
+
+
+@environmentfilter
+def do_groupby(environment, value, attribute):
+ """Group a sequence of objects by an attribute using Python's
+ :func:`itertools.groupby`. The attribute can use dot notation for
+ nested access, like ``"address.city"``. Unlike Python's ``groupby``,
+ the values are sorted first so only one group is returned for each
+ unique value.
+
+ For example, a list of ``User`` objects with a ``city`` attribute
+ can be rendered in groups. In this example, ``grouper`` refers to
+ the ``city`` value of the group.
+
+ .. sourcecode:: html+jinja
+
+ <ul>{% for city, items in users|groupby("city") %}
+ <li>{{ city }}
+ <ul>{% for user in items %}
+ <li>{{ user.name }}
+ {% endfor %}</ul>
+ </li>
+ {% endfor %}</ul>
+
+ ``groupby`` yields namedtuples of ``(grouper, list)``, which
+ can be used instead of the tuple unpacking above. ``grouper`` is the
+ value of the attribute, and ``list`` is the items with that value.
+
+ .. sourcecode:: html+jinja
+
+ <ul>{% for group in users|groupby("city") %}
+ <li>{{ group.grouper }}: {{ group.list|join(", ") }}
+ {% endfor %}</ul>
+
+ .. versionchanged:: 2.6
+ The attribute supports dot notation for nested access.
+ """
+ expr = make_attrgetter(environment, attribute)
+ return [
+ _GroupTuple(key, list(values))
+ for key, values in groupby(sorted(value, key=expr), expr)
+ ]
+
+
+@environmentfilter
+def do_sum(environment, iterable, attribute=None, start=0):
+ """Returns the sum of a sequence of numbers plus the value of parameter
+ 'start' (which defaults to 0). When the sequence is empty it returns
+ start.
+
+ It is also possible to sum up only certain attributes:
+
+ .. sourcecode:: jinja
+
+ Total: {{ items|sum(attribute='price') }}
+
+ .. versionchanged:: 2.6
+ The `attribute` parameter was added to allow suming up over
+ attributes. Also the `start` parameter was moved on to the right.
+ """
+ if attribute is not None:
+ iterable = imap(make_attrgetter(environment, attribute), iterable)
+ return sum(iterable, start)
+
+
+def do_list(value):
+ """Convert the value into a list. If it was a string the returned list
+ will be a list of characters.
+ """
+ return list(value)
+
+
+def do_mark_safe(value):
+ """Mark the value as safe which means that in an environment with automatic
+ escaping enabled this variable will not be escaped.
+ """
+ return Markup(value)
+
+
+def do_mark_unsafe(value):
+ """Mark a value as unsafe. This is the reverse operation for :func:`safe`."""
+ return text_type(value)
+
+
+def do_reverse(value):
+ """Reverse the object or return an iterator that iterates over it the other
+ way round.
+ """
+ if isinstance(value, string_types):
+ return value[::-1]
+ try:
+ return reversed(value)
+ except TypeError:
+ try:
+ rv = list(value)
+ rv.reverse()
+ return rv
+ except TypeError:
+ raise FilterArgumentError("argument must be iterable")
+
+
+@environmentfilter
+def do_attr(environment, obj, name):
+ """Get an attribute of an object. ``foo|attr("bar")`` works like
+ ``foo.bar`` just that always an attribute is returned and items are not
+ looked up.
+
+ See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details.
+ """
+ try:
+ name = str(name)
+ except UnicodeError:
+ pass
+ else:
+ try:
+ value = getattr(obj, name)
+ except AttributeError:
+ pass
+ else:
+ if environment.sandboxed and not environment.is_safe_attribute(
+ obj, name, value
+ ):
+ return environment.unsafe_undefined(obj, name)
+ return value
+ return environment.undefined(obj=obj, name=name)
+
+
+@contextfilter
+def do_map(*args, **kwargs):
+ """Applies a filter on a sequence of objects or looks up an attribute.
+ This is useful when dealing with lists of objects but you are really
+ only interested in a certain value of it.
+
+ The basic usage is mapping on an attribute. Imagine you have a list
+ of users but you are only interested in a list of usernames:
+
+ .. sourcecode:: jinja
+
+ Users on this page: {{ users|map(attribute='username')|join(', ') }}
+
+ You can specify a ``default`` value to use if an object in the list
+ does not have the given attribute.
+
+ .. sourcecode:: jinja
+
+ {{ users|map(attribute="username", default="Anonymous")|join(", ") }}
+
+ Alternatively you can let it invoke a filter by passing the name of the
+ filter and the arguments afterwards. A good example would be applying a
+ text conversion filter on a sequence:
+
+ .. sourcecode:: jinja
+
+ Users on this page: {{ titles|map('lower')|join(', ') }}
+
+ Similar to a generator comprehension such as:
+
+ .. code-block:: python
+
+ (u.username for u in users)
+ (u.username or "Anonymous" for u in users)
+ (do_lower(x) for x in titles)
+
+ .. versionchanged:: 2.11.0
+ Added the ``default`` parameter.
+
+ .. versionadded:: 2.7
+ """
+ seq, func = prepare_map(args, kwargs)
+ if seq:
+ for item in seq:
+ yield func(item)
+
+
+@contextfilter
+def do_select(*args, **kwargs):
+ """Filters a sequence of objects by applying a test to each object,
+ and only selecting the objects with the test succeeding.
+
+ If no test is specified, each object will be evaluated as a boolean.
+
+ Example usage:
+
+ .. sourcecode:: jinja
+
+ {{ numbers|select("odd") }}
+ {{ numbers|select("odd") }}
+ {{ numbers|select("divisibleby", 3) }}
+ {{ numbers|select("lessthan", 42) }}
+ {{ strings|select("equalto", "mystring") }}
+
+ Similar to a generator comprehension such as:
+
+ .. code-block:: python
+
+ (n for n in numbers if test_odd(n))
+ (n for n in numbers if test_divisibleby(n, 3))
+
+ .. versionadded:: 2.7
+ """
+ return select_or_reject(args, kwargs, lambda x: x, False)
+
+
+@contextfilter
+def do_reject(*args, **kwargs):
+ """Filters a sequence of objects by applying a test to each object,
+ and rejecting the objects with the test succeeding.
+
+ If no test is specified, each object will be evaluated as a boolean.
+
+ Example usage:
+
+ .. sourcecode:: jinja
+
+ {{ numbers|reject("odd") }}
+
+ Similar to a generator comprehension such as:
+
+ .. code-block:: python
+
+ (n for n in numbers if not test_odd(n))
+
+ .. versionadded:: 2.7
+ """
+ return select_or_reject(args, kwargs, lambda x: not x, False)
+
+
+@contextfilter
+def do_selectattr(*args, **kwargs):
+ """Filters a sequence of objects by applying a test to the specified
+ attribute of each object, and only selecting the objects with the
+ test succeeding.
+
+ If no test is specified, the attribute's value will be evaluated as
+ a boolean.
+
+ Example usage:
+
+ .. sourcecode:: jinja
+
+ {{ users|selectattr("is_active") }}
+ {{ users|selectattr("email", "none") }}
+
+ Similar to a generator comprehension such as:
+
+ .. code-block:: python
+
+ (u for user in users if user.is_active)
+ (u for user in users if test_none(user.email))
+
+ .. versionadded:: 2.7
+ """
+ return select_or_reject(args, kwargs, lambda x: x, True)
+
+
+@contextfilter
+def do_rejectattr(*args, **kwargs):
+ """Filters a sequence of objects by applying a test to the specified
+ attribute of each object, and rejecting the objects with the test
+ succeeding.
+
+ If no test is specified, the attribute's value will be evaluated as
+ a boolean.
+
+ .. sourcecode:: jinja
+
+ {{ users|rejectattr("is_active") }}
+ {{ users|rejectattr("email", "none") }}
+
+ Similar to a generator comprehension such as:
+
+ .. code-block:: python
+
+ (u for user in users if not user.is_active)
+ (u for user in users if not test_none(user.email))
+
+ .. versionadded:: 2.7
+ """
+ return select_or_reject(args, kwargs, lambda x: not x, True)
+
+
+@evalcontextfilter
+def do_tojson(eval_ctx, value, indent=None):
+ """Dumps a structure to JSON so that it's safe to use in ``<script>``
+ tags. It accepts the same arguments and returns a JSON string. Note that
+ this is available in templates through the ``|tojson`` filter which will
+ also mark the result as safe. Due to how this function escapes certain
+ characters this is safe even if used outside of ``<script>`` tags.
+
+ The following characters are escaped in strings:
+
+ - ``<``
+ - ``>``
+ - ``&``
+ - ``'``
+
+ This makes it safe to embed such strings in any place in HTML with the
+ notable exception of double quoted attributes. In that case single
+ quote your attributes or HTML escape it in addition.
+
+ The indent parameter can be used to enable pretty printing. Set it to
+ the number of spaces that the structures should be indented with.
+
+ Note that this filter is for use in HTML contexts only.
+
+ .. versionadded:: 2.9
+ """
+ policies = eval_ctx.environment.policies
+ dumper = policies["json.dumps_function"]
+ options = policies["json.dumps_kwargs"]
+ if indent is not None:
+ options = dict(options)
+ options["indent"] = indent
+ return htmlsafe_json_dumps(value, dumper=dumper, **options)
+
+
+def prepare_map(args, kwargs):
+ context = args[0]
+ seq = args[1]
+ default = None
+
+ if len(args) == 2 and "attribute" in kwargs:
+ attribute = kwargs.pop("attribute")
+ default = kwargs.pop("default", None)
+ if kwargs:
+ raise FilterArgumentError(
+ "Unexpected keyword argument %r" % next(iter(kwargs))
+ )
+ func = make_attrgetter(context.environment, attribute, default=default)
+ else:
+ try:
+ name = args[2]
+ args = args[3:]
+ except LookupError:
+ raise FilterArgumentError("map requires a filter argument")
+
+ def func(item):
+ return context.environment.call_filter(
+ name, item, args, kwargs, context=context
+ )
+
+ return seq, func
+
+
+def prepare_select_or_reject(args, kwargs, modfunc, lookup_attr):
+ context = args[0]
+ seq = args[1]
+ if lookup_attr:
+ try:
+ attr = args[2]
+ except LookupError:
+ raise FilterArgumentError("Missing parameter for attribute name")
+ transfunc = make_attrgetter(context.environment, attr)
+ off = 1
+ else:
+ off = 0
+
+ def transfunc(x):
+ return x
+
+ try:
+ name = args[2 + off]
+ args = args[3 + off :]
+
+ def func(item):
+ return context.environment.call_test(name, item, args, kwargs)
+
+ except LookupError:
+ func = bool
+
+ return seq, lambda item: modfunc(func(transfunc(item)))
+
+
+def select_or_reject(args, kwargs, modfunc, lookup_attr):
+ seq, func = prepare_select_or_reject(args, kwargs, modfunc, lookup_attr)
+ if seq:
+ for item in seq:
+ if func(item):
+ yield item
+
+
+FILTERS = {
+ "abs": abs,
+ "attr": do_attr,
+ "batch": do_batch,
+ "capitalize": do_capitalize,
+ "center": do_center,
+ "count": len,
+ "d": do_default,
+ "default": do_default,
+ "dictsort": do_dictsort,
+ "e": escape,
+ "escape": escape,
+ "filesizeformat": do_filesizeformat,
+ "first": do_first,
+ "float": do_float,
+ "forceescape": do_forceescape,
+ "format": do_format,
+ "groupby": do_groupby,
+ "indent": do_indent,
+ "int": do_int,
+ "join": do_join,
+ "last": do_last,
+ "length": len,
+ "list": do_list,
+ "lower": do_lower,
+ "map": do_map,
+ "min": do_min,
+ "max": do_max,
+ "pprint": do_pprint,
+ "random": do_random,
+ "reject": do_reject,
+ "rejectattr": do_rejectattr,
+ "replace": do_replace,
+ "reverse": do_reverse,
+ "round": do_round,
+ "safe": do_mark_safe,
+ "select": do_select,
+ "selectattr": do_selectattr,
+ "slice": do_slice,
+ "sort": do_sort,
+ "string": soft_unicode,
+ "striptags": do_striptags,
+ "sum": do_sum,
+ "title": do_title,
+ "trim": do_trim,
+ "truncate": do_truncate,
+ "unique": do_unique,
+ "upper": do_upper,
+ "urlencode": do_urlencode,
+ "urlize": do_urlize,
+ "wordcount": do_wordcount,
+ "wordwrap": do_wordwrap,
+ "xmlattr": do_xmlattr,
+ "tojson": do_tojson,
+}
diff --git a/third_party/python/Jinja2/src/jinja2/idtracking.py b/third_party/python/Jinja2/src/jinja2/idtracking.py
new file mode 100644
index 0000000000..9a0d838017
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/idtracking.py
@@ -0,0 +1,290 @@
+from ._compat import iteritems
+from .visitor import NodeVisitor
+
+VAR_LOAD_PARAMETER = "param"
+VAR_LOAD_RESOLVE = "resolve"
+VAR_LOAD_ALIAS = "alias"
+VAR_LOAD_UNDEFINED = "undefined"
+
+
+def find_symbols(nodes, parent_symbols=None):
+ sym = Symbols(parent=parent_symbols)
+ visitor = FrameSymbolVisitor(sym)
+ for node in nodes:
+ visitor.visit(node)
+ return sym
+
+
+def symbols_for_node(node, parent_symbols=None):
+ sym = Symbols(parent=parent_symbols)
+ sym.analyze_node(node)
+ return sym
+
+
+class Symbols(object):
+ def __init__(self, parent=None, level=None):
+ if level is None:
+ if parent is None:
+ level = 0
+ else:
+ level = parent.level + 1
+ self.level = level
+ self.parent = parent
+ self.refs = {}
+ self.loads = {}
+ self.stores = set()
+
+ def analyze_node(self, node, **kwargs):
+ visitor = RootVisitor(self)
+ visitor.visit(node, **kwargs)
+
+ def _define_ref(self, name, load=None):
+ ident = "l_%d_%s" % (self.level, name)
+ self.refs[name] = ident
+ if load is not None:
+ self.loads[ident] = load
+ return ident
+
+ def find_load(self, target):
+ if target in self.loads:
+ return self.loads[target]
+ if self.parent is not None:
+ return self.parent.find_load(target)
+
+ def find_ref(self, name):
+ if name in self.refs:
+ return self.refs[name]
+ if self.parent is not None:
+ return self.parent.find_ref(name)
+
+ def ref(self, name):
+ rv = self.find_ref(name)
+ if rv is None:
+ raise AssertionError(
+ "Tried to resolve a name to a reference that "
+ "was unknown to the frame (%r)" % name
+ )
+ return rv
+
+ def copy(self):
+ rv = object.__new__(self.__class__)
+ rv.__dict__.update(self.__dict__)
+ rv.refs = self.refs.copy()
+ rv.loads = self.loads.copy()
+ rv.stores = self.stores.copy()
+ return rv
+
+ def store(self, name):
+ self.stores.add(name)
+
+ # If we have not see the name referenced yet, we need to figure
+ # out what to set it to.
+ if name not in self.refs:
+ # If there is a parent scope we check if the name has a
+ # reference there. If it does it means we might have to alias
+ # to a variable there.
+ if self.parent is not None:
+ outer_ref = self.parent.find_ref(name)
+ if outer_ref is not None:
+ self._define_ref(name, load=(VAR_LOAD_ALIAS, outer_ref))
+ return
+
+ # Otherwise we can just set it to undefined.
+ self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None))
+
+ def declare_parameter(self, name):
+ self.stores.add(name)
+ return self._define_ref(name, load=(VAR_LOAD_PARAMETER, None))
+
+ def load(self, name):
+ target = self.find_ref(name)
+ if target is None:
+ self._define_ref(name, load=(VAR_LOAD_RESOLVE, name))
+
+ def branch_update(self, branch_symbols):
+ stores = {}
+ for branch in branch_symbols:
+ for target in branch.stores:
+ if target in self.stores:
+ continue
+ stores[target] = stores.get(target, 0) + 1
+
+ for sym in branch_symbols:
+ self.refs.update(sym.refs)
+ self.loads.update(sym.loads)
+ self.stores.update(sym.stores)
+
+ for name, branch_count in iteritems(stores):
+ if branch_count == len(branch_symbols):
+ continue
+ target = self.find_ref(name)
+ assert target is not None, "should not happen"
+
+ if self.parent is not None:
+ outer_target = self.parent.find_ref(name)
+ if outer_target is not None:
+ self.loads[target] = (VAR_LOAD_ALIAS, outer_target)
+ continue
+ self.loads[target] = (VAR_LOAD_RESOLVE, name)
+
+ def dump_stores(self):
+ rv = {}
+ node = self
+ while node is not None:
+ for name in node.stores:
+ if name not in rv:
+ rv[name] = self.find_ref(name)
+ node = node.parent
+ return rv
+
+ def dump_param_targets(self):
+ rv = set()
+ node = self
+ while node is not None:
+ for target, (instr, _) in iteritems(self.loads):
+ if instr == VAR_LOAD_PARAMETER:
+ rv.add(target)
+ node = node.parent
+ return rv
+
+
+class RootVisitor(NodeVisitor):
+ def __init__(self, symbols):
+ self.sym_visitor = FrameSymbolVisitor(symbols)
+
+ def _simple_visit(self, node, **kwargs):
+ for child in node.iter_child_nodes():
+ self.sym_visitor.visit(child)
+
+ visit_Template = (
+ visit_Block
+ ) = (
+ visit_Macro
+ ) = (
+ visit_FilterBlock
+ ) = visit_Scope = visit_If = visit_ScopedEvalContextModifier = _simple_visit
+
+ def visit_AssignBlock(self, node, **kwargs):
+ for child in node.body:
+ self.sym_visitor.visit(child)
+
+ def visit_CallBlock(self, node, **kwargs):
+ for child in node.iter_child_nodes(exclude=("call",)):
+ self.sym_visitor.visit(child)
+
+ def visit_OverlayScope(self, node, **kwargs):
+ for child in node.body:
+ self.sym_visitor.visit(child)
+
+ def visit_For(self, node, for_branch="body", **kwargs):
+ if for_branch == "body":
+ self.sym_visitor.visit(node.target, store_as_param=True)
+ branch = node.body
+ elif for_branch == "else":
+ branch = node.else_
+ elif for_branch == "test":
+ self.sym_visitor.visit(node.target, store_as_param=True)
+ if node.test is not None:
+ self.sym_visitor.visit(node.test)
+ return
+ else:
+ raise RuntimeError("Unknown for branch")
+ for item in branch or ():
+ self.sym_visitor.visit(item)
+
+ def visit_With(self, node, **kwargs):
+ for target in node.targets:
+ self.sym_visitor.visit(target)
+ for child in node.body:
+ self.sym_visitor.visit(child)
+
+ def generic_visit(self, node, *args, **kwargs):
+ raise NotImplementedError(
+ "Cannot find symbols for %r" % node.__class__.__name__
+ )
+
+
+class FrameSymbolVisitor(NodeVisitor):
+ """A visitor for `Frame.inspect`."""
+
+ def __init__(self, symbols):
+ self.symbols = symbols
+
+ def visit_Name(self, node, store_as_param=False, **kwargs):
+ """All assignments to names go through this function."""
+ if store_as_param or node.ctx == "param":
+ self.symbols.declare_parameter(node.name)
+ elif node.ctx == "store":
+ self.symbols.store(node.name)
+ elif node.ctx == "load":
+ self.symbols.load(node.name)
+
+ def visit_NSRef(self, node, **kwargs):
+ self.symbols.load(node.name)
+
+ def visit_If(self, node, **kwargs):
+ self.visit(node.test, **kwargs)
+
+ original_symbols = self.symbols
+
+ def inner_visit(nodes):
+ self.symbols = rv = original_symbols.copy()
+ for subnode in nodes:
+ self.visit(subnode, **kwargs)
+ self.symbols = original_symbols
+ return rv
+
+ body_symbols = inner_visit(node.body)
+ elif_symbols = inner_visit(node.elif_)
+ else_symbols = inner_visit(node.else_ or ())
+
+ self.symbols.branch_update([body_symbols, elif_symbols, else_symbols])
+
+ def visit_Macro(self, node, **kwargs):
+ self.symbols.store(node.name)
+
+ def visit_Import(self, node, **kwargs):
+ self.generic_visit(node, **kwargs)
+ self.symbols.store(node.target)
+
+ def visit_FromImport(self, node, **kwargs):
+ self.generic_visit(node, **kwargs)
+ for name in node.names:
+ if isinstance(name, tuple):
+ self.symbols.store(name[1])
+ else:
+ self.symbols.store(name)
+
+ def visit_Assign(self, node, **kwargs):
+ """Visit assignments in the correct order."""
+ self.visit(node.node, **kwargs)
+ self.visit(node.target, **kwargs)
+
+ def visit_For(self, node, **kwargs):
+ """Visiting stops at for blocks. However the block sequence
+ is visited as part of the outer scope.
+ """
+ self.visit(node.iter, **kwargs)
+
+ def visit_CallBlock(self, node, **kwargs):
+ self.visit(node.call, **kwargs)
+
+ def visit_FilterBlock(self, node, **kwargs):
+ self.visit(node.filter, **kwargs)
+
+ def visit_With(self, node, **kwargs):
+ for target in node.values:
+ self.visit(target)
+
+ def visit_AssignBlock(self, node, **kwargs):
+ """Stop visiting at block assigns."""
+ self.visit(node.target, **kwargs)
+
+ def visit_Scope(self, node, **kwargs):
+ """Stop visiting at scopes."""
+
+ def visit_Block(self, node, **kwargs):
+ """Stop visiting at blocks."""
+
+ def visit_OverlayScope(self, node, **kwargs):
+ """Do not visit into overlay scopes."""
diff --git a/third_party/python/Jinja2/src/jinja2/lexer.py b/third_party/python/Jinja2/src/jinja2/lexer.py
new file mode 100644
index 0000000000..552356a12d
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/lexer.py
@@ -0,0 +1,848 @@
+# -*- coding: utf-8 -*-
+"""Implements a Jinja / Python combination lexer. The ``Lexer`` class
+is used to do some preprocessing. It filters out invalid operators like
+the bitshift operators we don't allow in templates. It separates
+template code and python code in expressions.
+"""
+import re
+from ast import literal_eval
+from collections import deque
+from operator import itemgetter
+
+from ._compat import implements_iterator
+from ._compat import intern
+from ._compat import iteritems
+from ._compat import text_type
+from .exceptions import TemplateSyntaxError
+from .utils import LRUCache
+
+# cache for the lexers. Exists in order to be able to have multiple
+# environments with the same lexer
+_lexer_cache = LRUCache(50)
+
+# static regular expressions
+whitespace_re = re.compile(r"\s+", re.U)
+newline_re = re.compile(r"(\r\n|\r|\n)")
+string_re = re.compile(
+ r"('([^'\\]*(?:\\.[^'\\]*)*)'" r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S
+)
+integer_re = re.compile(r"(\d+_)*\d+")
+float_re = re.compile(
+ r"""
+ (?<!\.) # doesn't start with a .
+ (\d+_)*\d+ # digits, possibly _ separated
+ (
+ (\.(\d+_)*\d+)? # optional fractional part
+ e[+\-]?(\d+_)*\d+ # exponent part
+ |
+ \.(\d+_)*\d+ # required fractional part
+ )
+ """,
+ re.IGNORECASE | re.VERBOSE,
+)
+
+try:
+ # check if this Python supports Unicode identifiers
+ compile("föö", "<unknown>", "eval")
+except SyntaxError:
+ # Python 2, no Unicode support, use ASCII identifiers
+ name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*")
+ check_ident = False
+else:
+ # Unicode support, import generated re pattern and set flag to use
+ # str.isidentifier to validate during lexing.
+ from ._identifier import pattern as name_re
+
+ check_ident = True
+
+# internal the tokens and keep references to them
+TOKEN_ADD = intern("add")
+TOKEN_ASSIGN = intern("assign")
+TOKEN_COLON = intern("colon")
+TOKEN_COMMA = intern("comma")
+TOKEN_DIV = intern("div")
+TOKEN_DOT = intern("dot")
+TOKEN_EQ = intern("eq")
+TOKEN_FLOORDIV = intern("floordiv")
+TOKEN_GT = intern("gt")
+TOKEN_GTEQ = intern("gteq")
+TOKEN_LBRACE = intern("lbrace")
+TOKEN_LBRACKET = intern("lbracket")
+TOKEN_LPAREN = intern("lparen")
+TOKEN_LT = intern("lt")
+TOKEN_LTEQ = intern("lteq")
+TOKEN_MOD = intern("mod")
+TOKEN_MUL = intern("mul")
+TOKEN_NE = intern("ne")
+TOKEN_PIPE = intern("pipe")
+TOKEN_POW = intern("pow")
+TOKEN_RBRACE = intern("rbrace")
+TOKEN_RBRACKET = intern("rbracket")
+TOKEN_RPAREN = intern("rparen")
+TOKEN_SEMICOLON = intern("semicolon")
+TOKEN_SUB = intern("sub")
+TOKEN_TILDE = intern("tilde")
+TOKEN_WHITESPACE = intern("whitespace")
+TOKEN_FLOAT = intern("float")
+TOKEN_INTEGER = intern("integer")
+TOKEN_NAME = intern("name")
+TOKEN_STRING = intern("string")
+TOKEN_OPERATOR = intern("operator")
+TOKEN_BLOCK_BEGIN = intern("block_begin")
+TOKEN_BLOCK_END = intern("block_end")
+TOKEN_VARIABLE_BEGIN = intern("variable_begin")
+TOKEN_VARIABLE_END = intern("variable_end")
+TOKEN_RAW_BEGIN = intern("raw_begin")
+TOKEN_RAW_END = intern("raw_end")
+TOKEN_COMMENT_BEGIN = intern("comment_begin")
+TOKEN_COMMENT_END = intern("comment_end")
+TOKEN_COMMENT = intern("comment")
+TOKEN_LINESTATEMENT_BEGIN = intern("linestatement_begin")
+TOKEN_LINESTATEMENT_END = intern("linestatement_end")
+TOKEN_LINECOMMENT_BEGIN = intern("linecomment_begin")
+TOKEN_LINECOMMENT_END = intern("linecomment_end")
+TOKEN_LINECOMMENT = intern("linecomment")
+TOKEN_DATA = intern("data")
+TOKEN_INITIAL = intern("initial")
+TOKEN_EOF = intern("eof")
+
+# bind operators to token types
+operators = {
+ "+": TOKEN_ADD,
+ "-": TOKEN_SUB,
+ "/": TOKEN_DIV,
+ "//": TOKEN_FLOORDIV,
+ "*": TOKEN_MUL,
+ "%": TOKEN_MOD,
+ "**": TOKEN_POW,
+ "~": TOKEN_TILDE,
+ "[": TOKEN_LBRACKET,
+ "]": TOKEN_RBRACKET,
+ "(": TOKEN_LPAREN,
+ ")": TOKEN_RPAREN,
+ "{": TOKEN_LBRACE,
+ "}": TOKEN_RBRACE,
+ "==": TOKEN_EQ,
+ "!=": TOKEN_NE,
+ ">": TOKEN_GT,
+ ">=": TOKEN_GTEQ,
+ "<": TOKEN_LT,
+ "<=": TOKEN_LTEQ,
+ "=": TOKEN_ASSIGN,
+ ".": TOKEN_DOT,
+ ":": TOKEN_COLON,
+ "|": TOKEN_PIPE,
+ ",": TOKEN_COMMA,
+ ";": TOKEN_SEMICOLON,
+}
+
+reverse_operators = dict([(v, k) for k, v in iteritems(operators)])
+assert len(operators) == len(reverse_operators), "operators dropped"
+operator_re = re.compile(
+ "(%s)" % "|".join(re.escape(x) for x in sorted(operators, key=lambda x: -len(x)))
+)
+
+ignored_tokens = frozenset(
+ [
+ TOKEN_COMMENT_BEGIN,
+ TOKEN_COMMENT,
+ TOKEN_COMMENT_END,
+ TOKEN_WHITESPACE,
+ TOKEN_LINECOMMENT_BEGIN,
+ TOKEN_LINECOMMENT_END,
+ TOKEN_LINECOMMENT,
+ ]
+)
+ignore_if_empty = frozenset(
+ [TOKEN_WHITESPACE, TOKEN_DATA, TOKEN_COMMENT, TOKEN_LINECOMMENT]
+)
+
+
+def _describe_token_type(token_type):
+ if token_type in reverse_operators:
+ return reverse_operators[token_type]
+ return {
+ TOKEN_COMMENT_BEGIN: "begin of comment",
+ TOKEN_COMMENT_END: "end of comment",
+ TOKEN_COMMENT: "comment",
+ TOKEN_LINECOMMENT: "comment",
+ TOKEN_BLOCK_BEGIN: "begin of statement block",
+ TOKEN_BLOCK_END: "end of statement block",
+ TOKEN_VARIABLE_BEGIN: "begin of print statement",
+ TOKEN_VARIABLE_END: "end of print statement",
+ TOKEN_LINESTATEMENT_BEGIN: "begin of line statement",
+ TOKEN_LINESTATEMENT_END: "end of line statement",
+ TOKEN_DATA: "template data / text",
+ TOKEN_EOF: "end of template",
+ }.get(token_type, token_type)
+
+
+def describe_token(token):
+ """Returns a description of the token."""
+ if token.type == TOKEN_NAME:
+ return token.value
+ return _describe_token_type(token.type)
+
+
+def describe_token_expr(expr):
+ """Like `describe_token` but for token expressions."""
+ if ":" in expr:
+ type, value = expr.split(":", 1)
+ if type == TOKEN_NAME:
+ return value
+ else:
+ type = expr
+ return _describe_token_type(type)
+
+
+def count_newlines(value):
+ """Count the number of newline characters in the string. This is
+ useful for extensions that filter a stream.
+ """
+ return len(newline_re.findall(value))
+
+
+def compile_rules(environment):
+ """Compiles all the rules from the environment into a list of rules."""
+ e = re.escape
+ rules = [
+ (
+ len(environment.comment_start_string),
+ TOKEN_COMMENT_BEGIN,
+ e(environment.comment_start_string),
+ ),
+ (
+ len(environment.block_start_string),
+ TOKEN_BLOCK_BEGIN,
+ e(environment.block_start_string),
+ ),
+ (
+ len(environment.variable_start_string),
+ TOKEN_VARIABLE_BEGIN,
+ e(environment.variable_start_string),
+ ),
+ ]
+
+ if environment.line_statement_prefix is not None:
+ rules.append(
+ (
+ len(environment.line_statement_prefix),
+ TOKEN_LINESTATEMENT_BEGIN,
+ r"^[ \t\v]*" + e(environment.line_statement_prefix),
+ )
+ )
+ if environment.line_comment_prefix is not None:
+ rules.append(
+ (
+ len(environment.line_comment_prefix),
+ TOKEN_LINECOMMENT_BEGIN,
+ r"(?:^|(?<=\S))[^\S\r\n]*" + e(environment.line_comment_prefix),
+ )
+ )
+
+ return [x[1:] for x in sorted(rules, reverse=True)]
+
+
+class Failure(object):
+ """Class that raises a `TemplateSyntaxError` if called.
+ Used by the `Lexer` to specify known errors.
+ """
+
+ def __init__(self, message, cls=TemplateSyntaxError):
+ self.message = message
+ self.error_class = cls
+
+ def __call__(self, lineno, filename):
+ raise self.error_class(self.message, lineno, filename)
+
+
+class Token(tuple):
+ """Token class."""
+
+ __slots__ = ()
+ lineno, type, value = (property(itemgetter(x)) for x in range(3))
+
+ def __new__(cls, lineno, type, value):
+ return tuple.__new__(cls, (lineno, intern(str(type)), value))
+
+ def __str__(self):
+ if self.type in reverse_operators:
+ return reverse_operators[self.type]
+ elif self.type == "name":
+ return self.value
+ return self.type
+
+ def test(self, expr):
+ """Test a token against a token expression. This can either be a
+ token type or ``'token_type:token_value'``. This can only test
+ against string values and types.
+ """
+ # here we do a regular string equality check as test_any is usually
+ # passed an iterable of not interned strings.
+ if self.type == expr:
+ return True
+ elif ":" in expr:
+ return expr.split(":", 1) == [self.type, self.value]
+ return False
+
+ def test_any(self, *iterable):
+ """Test against multiple token expressions."""
+ for expr in iterable:
+ if self.test(expr):
+ return True
+ return False
+
+ def __repr__(self):
+ return "Token(%r, %r, %r)" % (self.lineno, self.type, self.value)
+
+
+@implements_iterator
+class TokenStreamIterator(object):
+ """The iterator for tokenstreams. Iterate over the stream
+ until the eof token is reached.
+ """
+
+ def __init__(self, stream):
+ self.stream = stream
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ token = self.stream.current
+ if token.type is TOKEN_EOF:
+ self.stream.close()
+ raise StopIteration()
+ next(self.stream)
+ return token
+
+
+@implements_iterator
+class TokenStream(object):
+ """A token stream is an iterable that yields :class:`Token`\\s. The
+ parser however does not iterate over it but calls :meth:`next` to go
+ one token ahead. The current active token is stored as :attr:`current`.
+ """
+
+ def __init__(self, generator, name, filename):
+ self._iter = iter(generator)
+ self._pushed = deque()
+ self.name = name
+ self.filename = filename
+ self.closed = False
+ self.current = Token(1, TOKEN_INITIAL, "")
+ next(self)
+
+ def __iter__(self):
+ return TokenStreamIterator(self)
+
+ def __bool__(self):
+ return bool(self._pushed) or self.current.type is not TOKEN_EOF
+
+ __nonzero__ = __bool__ # py2
+
+ @property
+ def eos(self):
+ """Are we at the end of the stream?"""
+ return not self
+
+ def push(self, token):
+ """Push a token back to the stream."""
+ self._pushed.append(token)
+
+ def look(self):
+ """Look at the next token."""
+ old_token = next(self)
+ result = self.current
+ self.push(result)
+ self.current = old_token
+ return result
+
+ def skip(self, n=1):
+ """Got n tokens ahead."""
+ for _ in range(n):
+ next(self)
+
+ def next_if(self, expr):
+ """Perform the token test and return the token if it matched.
+ Otherwise the return value is `None`.
+ """
+ if self.current.test(expr):
+ return next(self)
+
+ def skip_if(self, expr):
+ """Like :meth:`next_if` but only returns `True` or `False`."""
+ return self.next_if(expr) is not None
+
+ def __next__(self):
+ """Go one token ahead and return the old one.
+
+ Use the built-in :func:`next` instead of calling this directly.
+ """
+ rv = self.current
+ if self._pushed:
+ self.current = self._pushed.popleft()
+ elif self.current.type is not TOKEN_EOF:
+ try:
+ self.current = next(self._iter)
+ except StopIteration:
+ self.close()
+ return rv
+
+ def close(self):
+ """Close the stream."""
+ self.current = Token(self.current.lineno, TOKEN_EOF, "")
+ self._iter = None
+ self.closed = True
+
+ def expect(self, expr):
+ """Expect a given token type and return it. This accepts the same
+ argument as :meth:`jinja2.lexer.Token.test`.
+ """
+ if not self.current.test(expr):
+ expr = describe_token_expr(expr)
+ if self.current.type is TOKEN_EOF:
+ raise TemplateSyntaxError(
+ "unexpected end of template, expected %r." % expr,
+ self.current.lineno,
+ self.name,
+ self.filename,
+ )
+ raise TemplateSyntaxError(
+ "expected token %r, got %r" % (expr, describe_token(self.current)),
+ self.current.lineno,
+ self.name,
+ self.filename,
+ )
+ try:
+ return self.current
+ finally:
+ next(self)
+
+
+def get_lexer(environment):
+ """Return a lexer which is probably cached."""
+ key = (
+ environment.block_start_string,
+ environment.block_end_string,
+ environment.variable_start_string,
+ environment.variable_end_string,
+ environment.comment_start_string,
+ environment.comment_end_string,
+ environment.line_statement_prefix,
+ environment.line_comment_prefix,
+ environment.trim_blocks,
+ environment.lstrip_blocks,
+ environment.newline_sequence,
+ environment.keep_trailing_newline,
+ )
+ lexer = _lexer_cache.get(key)
+ if lexer is None:
+ lexer = Lexer(environment)
+ _lexer_cache[key] = lexer
+ return lexer
+
+
+class OptionalLStrip(tuple):
+ """A special tuple for marking a point in the state that can have
+ lstrip applied.
+ """
+
+ __slots__ = ()
+
+ # Even though it looks like a no-op, creating instances fails
+ # without this.
+ def __new__(cls, *members, **kwargs):
+ return super(OptionalLStrip, cls).__new__(cls, members)
+
+
+class Lexer(object):
+ """Class that implements a lexer for a given environment. Automatically
+ created by the environment class, usually you don't have to do that.
+
+ Note that the lexer is not automatically bound to an environment.
+ Multiple environments can share the same lexer.
+ """
+
+ def __init__(self, environment):
+ # shortcuts
+ e = re.escape
+
+ def c(x):
+ return re.compile(x, re.M | re.S)
+
+ # lexing rules for tags
+ tag_rules = [
+ (whitespace_re, TOKEN_WHITESPACE, None),
+ (float_re, TOKEN_FLOAT, None),
+ (integer_re, TOKEN_INTEGER, None),
+ (name_re, TOKEN_NAME, None),
+ (string_re, TOKEN_STRING, None),
+ (operator_re, TOKEN_OPERATOR, None),
+ ]
+
+ # assemble the root lexing rule. because "|" is ungreedy
+ # we have to sort by length so that the lexer continues working
+ # as expected when we have parsing rules like <% for block and
+ # <%= for variables. (if someone wants asp like syntax)
+ # variables are just part of the rules if variable processing
+ # is required.
+ root_tag_rules = compile_rules(environment)
+
+ # block suffix if trimming is enabled
+ block_suffix_re = environment.trim_blocks and "\\n?" or ""
+
+ # If lstrip is enabled, it should not be applied if there is any
+ # non-whitespace between the newline and block.
+ self.lstrip_unless_re = c(r"[^ \t]") if environment.lstrip_blocks else None
+
+ self.newline_sequence = environment.newline_sequence
+ self.keep_trailing_newline = environment.keep_trailing_newline
+
+ # global lexing rules
+ self.rules = {
+ "root": [
+ # directives
+ (
+ c(
+ "(.*?)(?:%s)"
+ % "|".join(
+ [
+ r"(?P<raw_begin>%s(\-|\+|)\s*raw\s*(?:\-%s\s*|%s))"
+ % (
+ e(environment.block_start_string),
+ e(environment.block_end_string),
+ e(environment.block_end_string),
+ )
+ ]
+ + [
+ r"(?P<%s>%s(\-|\+|))" % (n, r)
+ for n, r in root_tag_rules
+ ]
+ )
+ ),
+ OptionalLStrip(TOKEN_DATA, "#bygroup"),
+ "#bygroup",
+ ),
+ # data
+ (c(".+"), TOKEN_DATA, None),
+ ],
+ # comments
+ TOKEN_COMMENT_BEGIN: [
+ (
+ c(
+ r"(.*?)((?:\-%s\s*|%s)%s)"
+ % (
+ e(environment.comment_end_string),
+ e(environment.comment_end_string),
+ block_suffix_re,
+ )
+ ),
+ (TOKEN_COMMENT, TOKEN_COMMENT_END),
+ "#pop",
+ ),
+ (c("(.)"), (Failure("Missing end of comment tag"),), None),
+ ],
+ # blocks
+ TOKEN_BLOCK_BEGIN: [
+ (
+ c(
+ r"(?:\-%s\s*|%s)%s"
+ % (
+ e(environment.block_end_string),
+ e(environment.block_end_string),
+ block_suffix_re,
+ )
+ ),
+ TOKEN_BLOCK_END,
+ "#pop",
+ ),
+ ]
+ + tag_rules,
+ # variables
+ TOKEN_VARIABLE_BEGIN: [
+ (
+ c(
+ r"\-%s\s*|%s"
+ % (
+ e(environment.variable_end_string),
+ e(environment.variable_end_string),
+ )
+ ),
+ TOKEN_VARIABLE_END,
+ "#pop",
+ )
+ ]
+ + tag_rules,
+ # raw block
+ TOKEN_RAW_BEGIN: [
+ (
+ c(
+ r"(.*?)((?:%s(\-|\+|))\s*endraw\s*(?:\-%s\s*|%s%s))"
+ % (
+ e(environment.block_start_string),
+ e(environment.block_end_string),
+ e(environment.block_end_string),
+ block_suffix_re,
+ )
+ ),
+ OptionalLStrip(TOKEN_DATA, TOKEN_RAW_END),
+ "#pop",
+ ),
+ (c("(.)"), (Failure("Missing end of raw directive"),), None),
+ ],
+ # line statements
+ TOKEN_LINESTATEMENT_BEGIN: [
+ (c(r"\s*(\n|$)"), TOKEN_LINESTATEMENT_END, "#pop")
+ ]
+ + tag_rules,
+ # line comments
+ TOKEN_LINECOMMENT_BEGIN: [
+ (
+ c(r"(.*?)()(?=\n|$)"),
+ (TOKEN_LINECOMMENT, TOKEN_LINECOMMENT_END),
+ "#pop",
+ )
+ ],
+ }
+
+ def _normalize_newlines(self, value):
+ """Called for strings and template data to normalize it to unicode."""
+ return newline_re.sub(self.newline_sequence, value)
+
+ def tokenize(self, source, name=None, filename=None, state=None):
+ """Calls tokeniter + tokenize and wraps it in a token stream."""
+ stream = self.tokeniter(source, name, filename, state)
+ return TokenStream(self.wrap(stream, name, filename), name, filename)
+
+ def wrap(self, stream, name=None, filename=None):
+ """This is called with the stream as returned by `tokenize` and wraps
+ every token in a :class:`Token` and converts the value.
+ """
+ for lineno, token, value in stream:
+ if token in ignored_tokens:
+ continue
+ elif token == TOKEN_LINESTATEMENT_BEGIN:
+ token = TOKEN_BLOCK_BEGIN
+ elif token == TOKEN_LINESTATEMENT_END:
+ token = TOKEN_BLOCK_END
+ # we are not interested in those tokens in the parser
+ elif token in (TOKEN_RAW_BEGIN, TOKEN_RAW_END):
+ continue
+ elif token == TOKEN_DATA:
+ value = self._normalize_newlines(value)
+ elif token == "keyword":
+ token = value
+ elif token == TOKEN_NAME:
+ value = str(value)
+ if check_ident and not value.isidentifier():
+ raise TemplateSyntaxError(
+ "Invalid character in identifier", lineno, name, filename
+ )
+ elif token == TOKEN_STRING:
+ # try to unescape string
+ try:
+ value = (
+ self._normalize_newlines(value[1:-1])
+ .encode("ascii", "backslashreplace")
+ .decode("unicode-escape")
+ )
+ except Exception as e:
+ msg = str(e).split(":")[-1].strip()
+ raise TemplateSyntaxError(msg, lineno, name, filename)
+ elif token == TOKEN_INTEGER:
+ value = int(value.replace("_", ""))
+ elif token == TOKEN_FLOAT:
+ # remove all "_" first to support more Python versions
+ value = literal_eval(value.replace("_", ""))
+ elif token == TOKEN_OPERATOR:
+ token = operators[value]
+ yield Token(lineno, token, value)
+
+ def tokeniter(self, source, name, filename=None, state=None):
+ """This method tokenizes the text and returns the tokens in a
+ generator. Use this method if you just want to tokenize a template.
+ """
+ source = text_type(source)
+ lines = source.splitlines()
+ if self.keep_trailing_newline and source:
+ for newline in ("\r\n", "\r", "\n"):
+ if source.endswith(newline):
+ lines.append("")
+ break
+ source = "\n".join(lines)
+ pos = 0
+ lineno = 1
+ stack = ["root"]
+ if state is not None and state != "root":
+ assert state in ("variable", "block"), "invalid state"
+ stack.append(state + "_begin")
+ statetokens = self.rules[stack[-1]]
+ source_length = len(source)
+ balancing_stack = []
+ lstrip_unless_re = self.lstrip_unless_re
+ newlines_stripped = 0
+ line_starting = True
+
+ while 1:
+ # tokenizer loop
+ for regex, tokens, new_state in statetokens:
+ m = regex.match(source, pos)
+ # if no match we try again with the next rule
+ if m is None:
+ continue
+
+ # we only match blocks and variables if braces / parentheses
+ # are balanced. continue parsing with the lower rule which
+ # is the operator rule. do this only if the end tags look
+ # like operators
+ if balancing_stack and tokens in (
+ TOKEN_VARIABLE_END,
+ TOKEN_BLOCK_END,
+ TOKEN_LINESTATEMENT_END,
+ ):
+ continue
+
+ # tuples support more options
+ if isinstance(tokens, tuple):
+ groups = m.groups()
+
+ if isinstance(tokens, OptionalLStrip):
+ # Rule supports lstrip. Match will look like
+ # text, block type, whitespace control, type, control, ...
+ text = groups[0]
+
+ # Skipping the text and first type, every other group is the
+ # whitespace control for each type. One of the groups will be
+ # -, +, or empty string instead of None.
+ strip_sign = next(g for g in groups[2::2] if g is not None)
+
+ if strip_sign == "-":
+ # Strip all whitespace between the text and the tag.
+ stripped = text.rstrip()
+ newlines_stripped = text[len(stripped) :].count("\n")
+ groups = (stripped,) + groups[1:]
+ elif (
+ # Not marked for preserving whitespace.
+ strip_sign != "+"
+ # lstrip is enabled.
+ and lstrip_unless_re is not None
+ # Not a variable expression.
+ and not m.groupdict().get(TOKEN_VARIABLE_BEGIN)
+ ):
+ # The start of text between the last newline and the tag.
+ l_pos = text.rfind("\n") + 1
+ if l_pos > 0 or line_starting:
+ # If there's only whitespace between the newline and the
+ # tag, strip it.
+ if not lstrip_unless_re.search(text, l_pos):
+ groups = (text[:l_pos],) + groups[1:]
+
+ for idx, token in enumerate(tokens):
+ # failure group
+ if token.__class__ is Failure:
+ raise token(lineno, filename)
+ # bygroup is a bit more complex, in that case we
+ # yield for the current token the first named
+ # group that matched
+ elif token == "#bygroup":
+ for key, value in iteritems(m.groupdict()):
+ if value is not None:
+ yield lineno, key, value
+ lineno += value.count("\n")
+ break
+ else:
+ raise RuntimeError(
+ "%r wanted to resolve "
+ "the token dynamically"
+ " but no group matched" % regex
+ )
+ # normal group
+ else:
+ data = groups[idx]
+ if data or token not in ignore_if_empty:
+ yield lineno, token, data
+ lineno += data.count("\n") + newlines_stripped
+ newlines_stripped = 0
+
+ # strings as token just are yielded as it.
+ else:
+ data = m.group()
+ # update brace/parentheses balance
+ if tokens == TOKEN_OPERATOR:
+ if data == "{":
+ balancing_stack.append("}")
+ elif data == "(":
+ balancing_stack.append(")")
+ elif data == "[":
+ balancing_stack.append("]")
+ elif data in ("}", ")", "]"):
+ if not balancing_stack:
+ raise TemplateSyntaxError(
+ "unexpected '%s'" % data, lineno, name, filename
+ )
+ expected_op = balancing_stack.pop()
+ if expected_op != data:
+ raise TemplateSyntaxError(
+ "unexpected '%s', "
+ "expected '%s'" % (data, expected_op),
+ lineno,
+ name,
+ filename,
+ )
+ # yield items
+ if data or tokens not in ignore_if_empty:
+ yield lineno, tokens, data
+ lineno += data.count("\n")
+
+ line_starting = m.group()[-1:] == "\n"
+
+ # fetch new position into new variable so that we can check
+ # if there is a internal parsing error which would result
+ # in an infinite loop
+ pos2 = m.end()
+
+ # handle state changes
+ if new_state is not None:
+ # remove the uppermost state
+ if new_state == "#pop":
+ stack.pop()
+ # resolve the new state by group checking
+ elif new_state == "#bygroup":
+ for key, value in iteritems(m.groupdict()):
+ if value is not None:
+ stack.append(key)
+ break
+ else:
+ raise RuntimeError(
+ "%r wanted to resolve the "
+ "new state dynamically but"
+ " no group matched" % regex
+ )
+ # direct state name given
+ else:
+ stack.append(new_state)
+ statetokens = self.rules[stack[-1]]
+ # we are still at the same position and no stack change.
+ # this means a loop without break condition, avoid that and
+ # raise error
+ elif pos2 == pos:
+ raise RuntimeError(
+ "%r yielded empty string without stack change" % regex
+ )
+ # publish new function and start again
+ pos = pos2
+ break
+ # if loop terminated without break we haven't found a single match
+ # either we are at the end of the file or we have a problem
+ else:
+ # end of text
+ if pos >= source_length:
+ return
+ # something went wrong
+ raise TemplateSyntaxError(
+ "unexpected char %r at %d" % (source[pos], pos),
+ lineno,
+ name,
+ filename,
+ )
diff --git a/third_party/python/Jinja2/src/jinja2/loaders.py b/third_party/python/Jinja2/src/jinja2/loaders.py
new file mode 100644
index 0000000000..457c4b59a7
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/loaders.py
@@ -0,0 +1,504 @@
+# -*- coding: utf-8 -*-
+"""API and implementations for loading templates from different data
+sources.
+"""
+import os
+import sys
+import weakref
+from hashlib import sha1
+from os import path
+from types import ModuleType
+
+from ._compat import abc
+from ._compat import fspath
+from ._compat import iteritems
+from ._compat import string_types
+from .exceptions import TemplateNotFound
+from .utils import internalcode
+from .utils import open_if_exists
+
+
+def split_template_path(template):
+ """Split a path into segments and perform a sanity check. If it detects
+ '..' in the path it will raise a `TemplateNotFound` error.
+ """
+ pieces = []
+ for piece in template.split("/"):
+ if (
+ path.sep in piece
+ or (path.altsep and path.altsep in piece)
+ or piece == path.pardir
+ ):
+ raise TemplateNotFound(template)
+ elif piece and piece != ".":
+ pieces.append(piece)
+ return pieces
+
+
+class BaseLoader(object):
+ """Baseclass for all loaders. Subclass this and override `get_source` to
+ implement a custom loading mechanism. The environment provides a
+ `get_template` method that calls the loader's `load` method to get the
+ :class:`Template` object.
+
+ A very basic example for a loader that looks up templates on the file
+ system could look like this::
+
+ from jinja2 import BaseLoader, TemplateNotFound
+ from os.path import join, exists, getmtime
+
+ class MyLoader(BaseLoader):
+
+ def __init__(self, path):
+ self.path = path
+
+ def get_source(self, environment, template):
+ path = join(self.path, template)
+ if not exists(path):
+ raise TemplateNotFound(template)
+ mtime = getmtime(path)
+ with file(path) as f:
+ source = f.read().decode('utf-8')
+ return source, path, lambda: mtime == getmtime(path)
+ """
+
+ #: if set to `False` it indicates that the loader cannot provide access
+ #: to the source of templates.
+ #:
+ #: .. versionadded:: 2.4
+ has_source_access = True
+
+ def get_source(self, environment, template):
+ """Get the template source, filename and reload helper for a template.
+ It's passed the environment and template name and has to return a
+ tuple in the form ``(source, filename, uptodate)`` or raise a
+ `TemplateNotFound` error if it can't locate the template.
+
+ The source part of the returned tuple must be the source of the
+ template as unicode string or a ASCII bytestring. The filename should
+ be the name of the file on the filesystem if it was loaded from there,
+ otherwise `None`. The filename is used by python for the tracebacks
+ if no loader extension is used.
+
+ The last item in the tuple is the `uptodate` function. If auto
+ reloading is enabled it's always called to check if the template
+ changed. No arguments are passed so the function must store the
+ old state somewhere (for example in a closure). If it returns `False`
+ the template will be reloaded.
+ """
+ if not self.has_source_access:
+ raise RuntimeError(
+ "%s cannot provide access to the source" % self.__class__.__name__
+ )
+ raise TemplateNotFound(template)
+
+ def list_templates(self):
+ """Iterates over all templates. If the loader does not support that
+ it should raise a :exc:`TypeError` which is the default behavior.
+ """
+ raise TypeError("this loader cannot iterate over all templates")
+
+ @internalcode
+ def load(self, environment, name, globals=None):
+ """Loads a template. This method looks up the template in the cache
+ or loads one by calling :meth:`get_source`. Subclasses should not
+ override this method as loaders working on collections of other
+ loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`)
+ will not call this method but `get_source` directly.
+ """
+ code = None
+ if globals is None:
+ globals = {}
+
+ # first we try to get the source for this template together
+ # with the filename and the uptodate function.
+ source, filename, uptodate = self.get_source(environment, name)
+
+ # try to load the code from the bytecode cache if there is a
+ # bytecode cache configured.
+ bcc = environment.bytecode_cache
+ if bcc is not None:
+ bucket = bcc.get_bucket(environment, name, filename, source)
+ code = bucket.code
+
+ # if we don't have code so far (not cached, no longer up to
+ # date) etc. we compile the template
+ if code is None:
+ code = environment.compile(source, name, filename)
+
+ # if the bytecode cache is available and the bucket doesn't
+ # have a code so far, we give the bucket the new code and put
+ # it back to the bytecode cache.
+ if bcc is not None and bucket.code is None:
+ bucket.code = code
+ bcc.set_bucket(bucket)
+
+ return environment.template_class.from_code(
+ environment, code, globals, uptodate
+ )
+
+
+class FileSystemLoader(BaseLoader):
+ """Loads templates from the file system. This loader can find templates
+ in folders on the file system and is the preferred way to load them.
+
+ The loader takes the path to the templates as string, or if multiple
+ locations are wanted a list of them which is then looked up in the
+ given order::
+
+ >>> loader = FileSystemLoader('/path/to/templates')
+ >>> loader = FileSystemLoader(['/path/to/templates', '/other/path'])
+
+ Per default the template encoding is ``'utf-8'`` which can be changed
+ by setting the `encoding` parameter to something else.
+
+ To follow symbolic links, set the *followlinks* parameter to ``True``::
+
+ >>> loader = FileSystemLoader('/path/to/templates', followlinks=True)
+
+ .. versionchanged:: 2.8
+ The ``followlinks`` parameter was added.
+ """
+
+ def __init__(self, searchpath, encoding="utf-8", followlinks=False):
+ if not isinstance(searchpath, abc.Iterable) or isinstance(
+ searchpath, string_types
+ ):
+ searchpath = [searchpath]
+
+ # In Python 3.5, os.path.join doesn't support Path. This can be
+ # simplified to list(searchpath) when Python 3.5 is dropped.
+ self.searchpath = [fspath(p) for p in searchpath]
+
+ self.encoding = encoding
+ self.followlinks = followlinks
+
+ def get_source(self, environment, template):
+ pieces = split_template_path(template)
+ for searchpath in self.searchpath:
+ filename = path.join(searchpath, *pieces)
+ f = open_if_exists(filename)
+ if f is None:
+ continue
+ try:
+ contents = f.read().decode(self.encoding)
+ finally:
+ f.close()
+
+ mtime = path.getmtime(filename)
+
+ def uptodate():
+ try:
+ return path.getmtime(filename) == mtime
+ except OSError:
+ return False
+
+ return contents, filename, uptodate
+ raise TemplateNotFound(template)
+
+ def list_templates(self):
+ found = set()
+ for searchpath in self.searchpath:
+ walk_dir = os.walk(searchpath, followlinks=self.followlinks)
+ for dirpath, _, filenames in walk_dir:
+ for filename in filenames:
+ template = (
+ os.path.join(dirpath, filename)[len(searchpath) :]
+ .strip(os.path.sep)
+ .replace(os.path.sep, "/")
+ )
+ if template[:2] == "./":
+ template = template[2:]
+ if template not in found:
+ found.add(template)
+ return sorted(found)
+
+
+class PackageLoader(BaseLoader):
+ """Load templates from python eggs or packages. It is constructed with
+ the name of the python package and the path to the templates in that
+ package::
+
+ loader = PackageLoader('mypackage', 'views')
+
+ If the package path is not given, ``'templates'`` is assumed.
+
+ Per default the template encoding is ``'utf-8'`` which can be changed
+ by setting the `encoding` parameter to something else. Due to the nature
+ of eggs it's only possible to reload templates if the package was loaded
+ from the file system and not a zip file.
+ """
+
+ def __init__(self, package_name, package_path="templates", encoding="utf-8"):
+ from pkg_resources import DefaultProvider
+ from pkg_resources import get_provider
+ from pkg_resources import ResourceManager
+
+ provider = get_provider(package_name)
+ self.encoding = encoding
+ self.manager = ResourceManager()
+ self.filesystem_bound = isinstance(provider, DefaultProvider)
+ self.provider = provider
+ self.package_path = package_path
+
+ def get_source(self, environment, template):
+ pieces = split_template_path(template)
+ p = "/".join((self.package_path,) + tuple(pieces))
+
+ if not self.provider.has_resource(p):
+ raise TemplateNotFound(template)
+
+ filename = uptodate = None
+
+ if self.filesystem_bound:
+ filename = self.provider.get_resource_filename(self.manager, p)
+ mtime = path.getmtime(filename)
+
+ def uptodate():
+ try:
+ return path.getmtime(filename) == mtime
+ except OSError:
+ return False
+
+ source = self.provider.get_resource_string(self.manager, p)
+ return source.decode(self.encoding), filename, uptodate
+
+ def list_templates(self):
+ path = self.package_path
+
+ if path[:2] == "./":
+ path = path[2:]
+ elif path == ".":
+ path = ""
+
+ offset = len(path)
+ results = []
+
+ def _walk(path):
+ for filename in self.provider.resource_listdir(path):
+ fullname = path + "/" + filename
+
+ if self.provider.resource_isdir(fullname):
+ _walk(fullname)
+ else:
+ results.append(fullname[offset:].lstrip("/"))
+
+ _walk(path)
+ results.sort()
+ return results
+
+
+class DictLoader(BaseLoader):
+ """Loads a template from a python dict. It's passed a dict of unicode
+ strings bound to template names. This loader is useful for unittesting:
+
+ >>> loader = DictLoader({'index.html': 'source here'})
+
+ Because auto reloading is rarely useful this is disabled per default.
+ """
+
+ def __init__(self, mapping):
+ self.mapping = mapping
+
+ def get_source(self, environment, template):
+ if template in self.mapping:
+ source = self.mapping[template]
+ return source, None, lambda: source == self.mapping.get(template)
+ raise TemplateNotFound(template)
+
+ def list_templates(self):
+ return sorted(self.mapping)
+
+
+class FunctionLoader(BaseLoader):
+ """A loader that is passed a function which does the loading. The
+ function receives the name of the template and has to return either
+ an unicode string with the template source, a tuple in the form ``(source,
+ filename, uptodatefunc)`` or `None` if the template does not exist.
+
+ >>> def load_template(name):
+ ... if name == 'index.html':
+ ... return '...'
+ ...
+ >>> loader = FunctionLoader(load_template)
+
+ The `uptodatefunc` is a function that is called if autoreload is enabled
+ and has to return `True` if the template is still up to date. For more
+ details have a look at :meth:`BaseLoader.get_source` which has the same
+ return value.
+ """
+
+ def __init__(self, load_func):
+ self.load_func = load_func
+
+ def get_source(self, environment, template):
+ rv = self.load_func(template)
+ if rv is None:
+ raise TemplateNotFound(template)
+ elif isinstance(rv, string_types):
+ return rv, None, None
+ return rv
+
+
+class PrefixLoader(BaseLoader):
+ """A loader that is passed a dict of loaders where each loader is bound
+ to a prefix. The prefix is delimited from the template by a slash per
+ default, which can be changed by setting the `delimiter` argument to
+ something else::
+
+ loader = PrefixLoader({
+ 'app1': PackageLoader('mypackage.app1'),
+ 'app2': PackageLoader('mypackage.app2')
+ })
+
+ By loading ``'app1/index.html'`` the file from the app1 package is loaded,
+ by loading ``'app2/index.html'`` the file from the second.
+ """
+
+ def __init__(self, mapping, delimiter="/"):
+ self.mapping = mapping
+ self.delimiter = delimiter
+
+ def get_loader(self, template):
+ try:
+ prefix, name = template.split(self.delimiter, 1)
+ loader = self.mapping[prefix]
+ except (ValueError, KeyError):
+ raise TemplateNotFound(template)
+ return loader, name
+
+ def get_source(self, environment, template):
+ loader, name = self.get_loader(template)
+ try:
+ return loader.get_source(environment, name)
+ except TemplateNotFound:
+ # re-raise the exception with the correct filename here.
+ # (the one that includes the prefix)
+ raise TemplateNotFound(template)
+
+ @internalcode
+ def load(self, environment, name, globals=None):
+ loader, local_name = self.get_loader(name)
+ try:
+ return loader.load(environment, local_name, globals)
+ except TemplateNotFound:
+ # re-raise the exception with the correct filename here.
+ # (the one that includes the prefix)
+ raise TemplateNotFound(name)
+
+ def list_templates(self):
+ result = []
+ for prefix, loader in iteritems(self.mapping):
+ for template in loader.list_templates():
+ result.append(prefix + self.delimiter + template)
+ return result
+
+
+class ChoiceLoader(BaseLoader):
+ """This loader works like the `PrefixLoader` just that no prefix is
+ specified. If a template could not be found by one loader the next one
+ is tried.
+
+ >>> loader = ChoiceLoader([
+ ... FileSystemLoader('/path/to/user/templates'),
+ ... FileSystemLoader('/path/to/system/templates')
+ ... ])
+
+ This is useful if you want to allow users to override builtin templates
+ from a different location.
+ """
+
+ def __init__(self, loaders):
+ self.loaders = loaders
+
+ def get_source(self, environment, template):
+ for loader in self.loaders:
+ try:
+ return loader.get_source(environment, template)
+ except TemplateNotFound:
+ pass
+ raise TemplateNotFound(template)
+
+ @internalcode
+ def load(self, environment, name, globals=None):
+ for loader in self.loaders:
+ try:
+ return loader.load(environment, name, globals)
+ except TemplateNotFound:
+ pass
+ raise TemplateNotFound(name)
+
+ def list_templates(self):
+ found = set()
+ for loader in self.loaders:
+ found.update(loader.list_templates())
+ return sorted(found)
+
+
+class _TemplateModule(ModuleType):
+ """Like a normal module but with support for weak references"""
+
+
+class ModuleLoader(BaseLoader):
+ """This loader loads templates from precompiled templates.
+
+ Example usage:
+
+ >>> loader = ChoiceLoader([
+ ... ModuleLoader('/path/to/compiled/templates'),
+ ... FileSystemLoader('/path/to/templates')
+ ... ])
+
+ Templates can be precompiled with :meth:`Environment.compile_templates`.
+ """
+
+ has_source_access = False
+
+ def __init__(self, path):
+ package_name = "_jinja2_module_templates_%x" % id(self)
+
+ # create a fake module that looks for the templates in the
+ # path given.
+ mod = _TemplateModule(package_name)
+
+ if not isinstance(path, abc.Iterable) or isinstance(path, string_types):
+ path = [path]
+
+ mod.__path__ = [fspath(p) for p in path]
+
+ sys.modules[package_name] = weakref.proxy(
+ mod, lambda x: sys.modules.pop(package_name, None)
+ )
+
+ # the only strong reference, the sys.modules entry is weak
+ # so that the garbage collector can remove it once the
+ # loader that created it goes out of business.
+ self.module = mod
+ self.package_name = package_name
+
+ @staticmethod
+ def get_template_key(name):
+ return "tmpl_" + sha1(name.encode("utf-8")).hexdigest()
+
+ @staticmethod
+ def get_module_filename(name):
+ return ModuleLoader.get_template_key(name) + ".py"
+
+ @internalcode
+ def load(self, environment, name, globals=None):
+ key = self.get_template_key(name)
+ module = "%s.%s" % (self.package_name, key)
+ mod = getattr(self.module, module, None)
+ if mod is None:
+ try:
+ mod = __import__(module, None, None, ["root"])
+ except ImportError:
+ raise TemplateNotFound(name)
+
+ # remove the entry from sys.modules, we only want the attribute
+ # on the module object we have stored on the loader.
+ sys.modules.pop(module, None)
+
+ return environment.template_class.from_module_dict(
+ environment, mod.__dict__, globals
+ )
diff --git a/third_party/python/Jinja2/src/jinja2/meta.py b/third_party/python/Jinja2/src/jinja2/meta.py
new file mode 100644
index 0000000000..3795aace59
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/meta.py
@@ -0,0 +1,101 @@
+# -*- coding: utf-8 -*-
+"""Functions that expose information about templates that might be
+interesting for introspection.
+"""
+from . import nodes
+from ._compat import iteritems
+from ._compat import string_types
+from .compiler import CodeGenerator
+
+
+class TrackingCodeGenerator(CodeGenerator):
+ """We abuse the code generator for introspection."""
+
+ def __init__(self, environment):
+ CodeGenerator.__init__(self, environment, "<introspection>", "<introspection>")
+ self.undeclared_identifiers = set()
+
+ def write(self, x):
+ """Don't write."""
+
+ def enter_frame(self, frame):
+ """Remember all undeclared identifiers."""
+ CodeGenerator.enter_frame(self, frame)
+ for _, (action, param) in iteritems(frame.symbols.loads):
+ if action == "resolve" and param not in self.environment.globals:
+ self.undeclared_identifiers.add(param)
+
+
+def find_undeclared_variables(ast):
+ """Returns a set of all variables in the AST that will be looked up from
+ the context at runtime. Because at compile time it's not known which
+ variables will be used depending on the path the execution takes at
+ runtime, all variables are returned.
+
+ >>> from jinja2 import Environment, meta
+ >>> env = Environment()
+ >>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}')
+ >>> meta.find_undeclared_variables(ast) == set(['bar'])
+ True
+
+ .. admonition:: Implementation
+
+ Internally the code generator is used for finding undeclared variables.
+ This is good to know because the code generator might raise a
+ :exc:`TemplateAssertionError` during compilation and as a matter of
+ fact this function can currently raise that exception as well.
+ """
+ codegen = TrackingCodeGenerator(ast.environment)
+ codegen.visit(ast)
+ return codegen.undeclared_identifiers
+
+
+def find_referenced_templates(ast):
+ """Finds all the referenced templates from the AST. This will return an
+ iterator over all the hardcoded template extensions, inclusions and
+ imports. If dynamic inheritance or inclusion is used, `None` will be
+ yielded.
+
+ >>> from jinja2 import Environment, meta
+ >>> env = Environment()
+ >>> ast = env.parse('{% extends "layout.html" %}{% include helper %}')
+ >>> list(meta.find_referenced_templates(ast))
+ ['layout.html', None]
+
+ This function is useful for dependency tracking. For example if you want
+ to rebuild parts of the website after a layout template has changed.
+ """
+ for node in ast.find_all(
+ (nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include)
+ ):
+ if not isinstance(node.template, nodes.Const):
+ # a tuple with some non consts in there
+ if isinstance(node.template, (nodes.Tuple, nodes.List)):
+ for template_name in node.template.items:
+ # something const, only yield the strings and ignore
+ # non-string consts that really just make no sense
+ if isinstance(template_name, nodes.Const):
+ if isinstance(template_name.value, string_types):
+ yield template_name.value
+ # something dynamic in there
+ else:
+ yield None
+ # something dynamic we don't know about here
+ else:
+ yield None
+ continue
+ # constant is a basestring, direct template name
+ if isinstance(node.template.value, string_types):
+ yield node.template.value
+ # a tuple or list (latter *should* not happen) made of consts,
+ # yield the consts that are strings. We could warn here for
+ # non string values
+ elif isinstance(node, nodes.Include) and isinstance(
+ node.template.value, (tuple, list)
+ ):
+ for template_name in node.template.value:
+ if isinstance(template_name, string_types):
+ yield template_name
+ # something else we don't care about, we could warn here
+ else:
+ yield None
diff --git a/third_party/python/Jinja2/src/jinja2/nativetypes.py b/third_party/python/Jinja2/src/jinja2/nativetypes.py
new file mode 100644
index 0000000000..a9ead4e2bb
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/nativetypes.py
@@ -0,0 +1,94 @@
+from ast import literal_eval
+from itertools import chain
+from itertools import islice
+
+from . import nodes
+from ._compat import text_type
+from .compiler import CodeGenerator
+from .compiler import has_safe_repr
+from .environment import Environment
+from .environment import Template
+
+
+def native_concat(nodes):
+ """Return a native Python type from the list of compiled nodes. If
+ the result is a single node, its value is returned. Otherwise, the
+ nodes are concatenated as strings. If the result can be parsed with
+ :func:`ast.literal_eval`, the parsed value is returned. Otherwise,
+ the string is returned.
+
+ :param nodes: Iterable of nodes to concatenate.
+ """
+ head = list(islice(nodes, 2))
+
+ if not head:
+ return None
+
+ if len(head) == 1:
+ raw = head[0]
+ else:
+ raw = u"".join([text_type(v) for v in chain(head, nodes)])
+
+ try:
+ return literal_eval(raw)
+ except (ValueError, SyntaxError, MemoryError):
+ return raw
+
+
+class NativeCodeGenerator(CodeGenerator):
+ """A code generator which renders Python types by not adding
+ ``to_string()`` around output nodes.
+ """
+
+ @staticmethod
+ def _default_finalize(value):
+ return value
+
+ def _output_const_repr(self, group):
+ return repr(u"".join([text_type(v) for v in group]))
+
+ def _output_child_to_const(self, node, frame, finalize):
+ const = node.as_const(frame.eval_ctx)
+
+ if not has_safe_repr(const):
+ raise nodes.Impossible()
+
+ if isinstance(node, nodes.TemplateData):
+ return const
+
+ return finalize.const(const)
+
+ def _output_child_pre(self, node, frame, finalize):
+ if finalize.src is not None:
+ self.write(finalize.src)
+
+ def _output_child_post(self, node, frame, finalize):
+ if finalize.src is not None:
+ self.write(")")
+
+
+class NativeEnvironment(Environment):
+ """An environment that renders templates to native Python types."""
+
+ code_generator_class = NativeCodeGenerator
+
+
+class NativeTemplate(Template):
+ environment_class = NativeEnvironment
+
+ def render(self, *args, **kwargs):
+ """Render the template to produce a native Python type. If the
+ result is a single node, its value is returned. Otherwise, the
+ nodes are concatenated as strings. If the result can be parsed
+ with :func:`ast.literal_eval`, the parsed value is returned.
+ Otherwise, the string is returned.
+ """
+ vars = dict(*args, **kwargs)
+
+ try:
+ return native_concat(self.root_render_func(self.new_context(vars)))
+ except Exception:
+ return self.environment.handle_exception()
+
+
+NativeEnvironment.template_class = NativeTemplate
diff --git a/third_party/python/Jinja2/src/jinja2/nodes.py b/third_party/python/Jinja2/src/jinja2/nodes.py
new file mode 100644
index 0000000000..95bd614a14
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/nodes.py
@@ -0,0 +1,1088 @@
+# -*- coding: utf-8 -*-
+"""AST nodes generated by the parser for the compiler. Also provides
+some node tree helper functions used by the parser and compiler in order
+to normalize nodes.
+"""
+import operator
+from collections import deque
+
+from markupsafe import Markup
+
+from ._compat import izip
+from ._compat import PY2
+from ._compat import text_type
+from ._compat import with_metaclass
+
+_binop_to_func = {
+ "*": operator.mul,
+ "/": operator.truediv,
+ "//": operator.floordiv,
+ "**": operator.pow,
+ "%": operator.mod,
+ "+": operator.add,
+ "-": operator.sub,
+}
+
+_uaop_to_func = {"not": operator.not_, "+": operator.pos, "-": operator.neg}
+
+_cmpop_to_func = {
+ "eq": operator.eq,
+ "ne": operator.ne,
+ "gt": operator.gt,
+ "gteq": operator.ge,
+ "lt": operator.lt,
+ "lteq": operator.le,
+ "in": lambda a, b: a in b,
+ "notin": lambda a, b: a not in b,
+}
+
+
+class Impossible(Exception):
+ """Raised if the node could not perform a requested action."""
+
+
+class NodeType(type):
+ """A metaclass for nodes that handles the field and attribute
+ inheritance. fields and attributes from the parent class are
+ automatically forwarded to the child."""
+
+ def __new__(mcs, name, bases, d):
+ for attr in "fields", "attributes":
+ storage = []
+ storage.extend(getattr(bases[0], attr, ()))
+ storage.extend(d.get(attr, ()))
+ assert len(bases) == 1, "multiple inheritance not allowed"
+ assert len(storage) == len(set(storage)), "layout conflict"
+ d[attr] = tuple(storage)
+ d.setdefault("abstract", False)
+ return type.__new__(mcs, name, bases, d)
+
+
+class EvalContext(object):
+ """Holds evaluation time information. Custom attributes can be attached
+ to it in extensions.
+ """
+
+ def __init__(self, environment, template_name=None):
+ self.environment = environment
+ if callable(environment.autoescape):
+ self.autoescape = environment.autoescape(template_name)
+ else:
+ self.autoescape = environment.autoescape
+ self.volatile = False
+
+ def save(self):
+ return self.__dict__.copy()
+
+ def revert(self, old):
+ self.__dict__.clear()
+ self.__dict__.update(old)
+
+
+def get_eval_context(node, ctx):
+ if ctx is None:
+ if node.environment is None:
+ raise RuntimeError(
+ "if no eval context is passed, the "
+ "node must have an attached "
+ "environment."
+ )
+ return EvalContext(node.environment)
+ return ctx
+
+
+class Node(with_metaclass(NodeType, object)):
+ """Baseclass for all Jinja nodes. There are a number of nodes available
+ of different types. There are four major types:
+
+ - :class:`Stmt`: statements
+ - :class:`Expr`: expressions
+ - :class:`Helper`: helper nodes
+ - :class:`Template`: the outermost wrapper node
+
+ All nodes have fields and attributes. Fields may be other nodes, lists,
+ or arbitrary values. Fields are passed to the constructor as regular
+ positional arguments, attributes as keyword arguments. Each node has
+ two attributes: `lineno` (the line number of the node) and `environment`.
+ The `environment` attribute is set at the end of the parsing process for
+ all nodes automatically.
+ """
+
+ fields = ()
+ attributes = ("lineno", "environment")
+ abstract = True
+
+ def __init__(self, *fields, **attributes):
+ if self.abstract:
+ raise TypeError("abstract nodes are not instantiable")
+ if fields:
+ if len(fields) != len(self.fields):
+ if not self.fields:
+ raise TypeError("%r takes 0 arguments" % self.__class__.__name__)
+ raise TypeError(
+ "%r takes 0 or %d argument%s"
+ % (
+ self.__class__.__name__,
+ len(self.fields),
+ len(self.fields) != 1 and "s" or "",
+ )
+ )
+ for name, arg in izip(self.fields, fields):
+ setattr(self, name, arg)
+ for attr in self.attributes:
+ setattr(self, attr, attributes.pop(attr, None))
+ if attributes:
+ raise TypeError("unknown attribute %r" % next(iter(attributes)))
+
+ def iter_fields(self, exclude=None, only=None):
+ """This method iterates over all fields that are defined and yields
+ ``(key, value)`` tuples. Per default all fields are returned, but
+ it's possible to limit that to some fields by providing the `only`
+ parameter or to exclude some using the `exclude` parameter. Both
+ should be sets or tuples of field names.
+ """
+ for name in self.fields:
+ if (
+ (exclude is only is None)
+ or (exclude is not None and name not in exclude)
+ or (only is not None and name in only)
+ ):
+ try:
+ yield name, getattr(self, name)
+ except AttributeError:
+ pass
+
+ def iter_child_nodes(self, exclude=None, only=None):
+ """Iterates over all direct child nodes of the node. This iterates
+ over all fields and yields the values of they are nodes. If the value
+ of a field is a list all the nodes in that list are returned.
+ """
+ for _, item in self.iter_fields(exclude, only):
+ if isinstance(item, list):
+ for n in item:
+ if isinstance(n, Node):
+ yield n
+ elif isinstance(item, Node):
+ yield item
+
+ def find(self, node_type):
+ """Find the first node of a given type. If no such node exists the
+ return value is `None`.
+ """
+ for result in self.find_all(node_type):
+ return result
+
+ def find_all(self, node_type):
+ """Find all the nodes of a given type. If the type is a tuple,
+ the check is performed for any of the tuple items.
+ """
+ for child in self.iter_child_nodes():
+ if isinstance(child, node_type):
+ yield child
+ for result in child.find_all(node_type):
+ yield result
+
+ def set_ctx(self, ctx):
+ """Reset the context of a node and all child nodes. Per default the
+ parser will all generate nodes that have a 'load' context as it's the
+ most common one. This method is used in the parser to set assignment
+ targets and other nodes to a store context.
+ """
+ todo = deque([self])
+ while todo:
+ node = todo.popleft()
+ if "ctx" in node.fields:
+ node.ctx = ctx
+ todo.extend(node.iter_child_nodes())
+ return self
+
+ def set_lineno(self, lineno, override=False):
+ """Set the line numbers of the node and children."""
+ todo = deque([self])
+ while todo:
+ node = todo.popleft()
+ if "lineno" in node.attributes:
+ if node.lineno is None or override:
+ node.lineno = lineno
+ todo.extend(node.iter_child_nodes())
+ return self
+
+ def set_environment(self, environment):
+ """Set the environment for all nodes."""
+ todo = deque([self])
+ while todo:
+ node = todo.popleft()
+ node.environment = environment
+ todo.extend(node.iter_child_nodes())
+ return self
+
+ def __eq__(self, other):
+ return type(self) is type(other) and tuple(self.iter_fields()) == tuple(
+ other.iter_fields()
+ )
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ # Restore Python 2 hashing behavior on Python 3
+ __hash__ = object.__hash__
+
+ def __repr__(self):
+ return "%s(%s)" % (
+ self.__class__.__name__,
+ ", ".join("%s=%r" % (arg, getattr(self, arg, None)) for arg in self.fields),
+ )
+
+ def dump(self):
+ def _dump(node):
+ if not isinstance(node, Node):
+ buf.append(repr(node))
+ return
+
+ buf.append("nodes.%s(" % node.__class__.__name__)
+ if not node.fields:
+ buf.append(")")
+ return
+ for idx, field in enumerate(node.fields):
+ if idx:
+ buf.append(", ")
+ value = getattr(node, field)
+ if isinstance(value, list):
+ buf.append("[")
+ for idx, item in enumerate(value):
+ if idx:
+ buf.append(", ")
+ _dump(item)
+ buf.append("]")
+ else:
+ _dump(value)
+ buf.append(")")
+
+ buf = []
+ _dump(self)
+ return "".join(buf)
+
+
+class Stmt(Node):
+ """Base node for all statements."""
+
+ abstract = True
+
+
+class Helper(Node):
+ """Nodes that exist in a specific context only."""
+
+ abstract = True
+
+
+class Template(Node):
+ """Node that represents a template. This must be the outermost node that
+ is passed to the compiler.
+ """
+
+ fields = ("body",)
+
+
+class Output(Stmt):
+ """A node that holds multiple expressions which are then printed out.
+ This is used both for the `print` statement and the regular template data.
+ """
+
+ fields = ("nodes",)
+
+
+class Extends(Stmt):
+ """Represents an extends statement."""
+
+ fields = ("template",)
+
+
+class For(Stmt):
+ """The for loop. `target` is the target for the iteration (usually a
+ :class:`Name` or :class:`Tuple`), `iter` the iterable. `body` is a list
+ of nodes that are used as loop-body, and `else_` a list of nodes for the
+ `else` block. If no else node exists it has to be an empty list.
+
+ For filtered nodes an expression can be stored as `test`, otherwise `None`.
+ """
+
+ fields = ("target", "iter", "body", "else_", "test", "recursive")
+
+
+class If(Stmt):
+ """If `test` is true, `body` is rendered, else `else_`."""
+
+ fields = ("test", "body", "elif_", "else_")
+
+
+class Macro(Stmt):
+ """A macro definition. `name` is the name of the macro, `args` a list of
+ arguments and `defaults` a list of defaults if there are any. `body` is
+ a list of nodes for the macro body.
+ """
+
+ fields = ("name", "args", "defaults", "body")
+
+
+class CallBlock(Stmt):
+ """Like a macro without a name but a call instead. `call` is called with
+ the unnamed macro as `caller` argument this node holds.
+ """
+
+ fields = ("call", "args", "defaults", "body")
+
+
+class FilterBlock(Stmt):
+ """Node for filter sections."""
+
+ fields = ("body", "filter")
+
+
+class With(Stmt):
+ """Specific node for with statements. In older versions of Jinja the
+ with statement was implemented on the base of the `Scope` node instead.
+
+ .. versionadded:: 2.9.3
+ """
+
+ fields = ("targets", "values", "body")
+
+
+class Block(Stmt):
+ """A node that represents a block."""
+
+ fields = ("name", "body", "scoped")
+
+
+class Include(Stmt):
+ """A node that represents the include tag."""
+
+ fields = ("template", "with_context", "ignore_missing")
+
+
+class Import(Stmt):
+ """A node that represents the import tag."""
+
+ fields = ("template", "target", "with_context")
+
+
+class FromImport(Stmt):
+ """A node that represents the from import tag. It's important to not
+ pass unsafe names to the name attribute. The compiler translates the
+ attribute lookups directly into getattr calls and does *not* use the
+ subscript callback of the interface. As exported variables may not
+ start with double underscores (which the parser asserts) this is not a
+ problem for regular Jinja code, but if this node is used in an extension
+ extra care must be taken.
+
+ The list of names may contain tuples if aliases are wanted.
+ """
+
+ fields = ("template", "names", "with_context")
+
+
+class ExprStmt(Stmt):
+ """A statement that evaluates an expression and discards the result."""
+
+ fields = ("node",)
+
+
+class Assign(Stmt):
+ """Assigns an expression to a target."""
+
+ fields = ("target", "node")
+
+
+class AssignBlock(Stmt):
+ """Assigns a block to a target."""
+
+ fields = ("target", "filter", "body")
+
+
+class Expr(Node):
+ """Baseclass for all expressions."""
+
+ abstract = True
+
+ def as_const(self, eval_ctx=None):
+ """Return the value of the expression as constant or raise
+ :exc:`Impossible` if this was not possible.
+
+ An :class:`EvalContext` can be provided, if none is given
+ a default context is created which requires the nodes to have
+ an attached environment.
+
+ .. versionchanged:: 2.4
+ the `eval_ctx` parameter was added.
+ """
+ raise Impossible()
+
+ def can_assign(self):
+ """Check if it's possible to assign something to this node."""
+ return False
+
+
+class BinExpr(Expr):
+ """Baseclass for all binary expressions."""
+
+ fields = ("left", "right")
+ operator = None
+ abstract = True
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ # intercepted operators cannot be folded at compile time
+ if (
+ self.environment.sandboxed
+ and self.operator in self.environment.intercepted_binops
+ ):
+ raise Impossible()
+ f = _binop_to_func[self.operator]
+ try:
+ return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx))
+ except Exception:
+ raise Impossible()
+
+
+class UnaryExpr(Expr):
+ """Baseclass for all unary expressions."""
+
+ fields = ("node",)
+ operator = None
+ abstract = True
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ # intercepted operators cannot be folded at compile time
+ if (
+ self.environment.sandboxed
+ and self.operator in self.environment.intercepted_unops
+ ):
+ raise Impossible()
+ f = _uaop_to_func[self.operator]
+ try:
+ return f(self.node.as_const(eval_ctx))
+ except Exception:
+ raise Impossible()
+
+
+class Name(Expr):
+ """Looks up a name or stores a value in a name.
+ The `ctx` of the node can be one of the following values:
+
+ - `store`: store a value in the name
+ - `load`: load that name
+ - `param`: like `store` but if the name was defined as function parameter.
+ """
+
+ fields = ("name", "ctx")
+
+ def can_assign(self):
+ return self.name not in ("true", "false", "none", "True", "False", "None")
+
+
+class NSRef(Expr):
+ """Reference to a namespace value assignment"""
+
+ fields = ("name", "attr")
+
+ def can_assign(self):
+ # We don't need any special checks here; NSRef assignments have a
+ # runtime check to ensure the target is a namespace object which will
+ # have been checked already as it is created using a normal assignment
+ # which goes through a `Name` node.
+ return True
+
+
+class Literal(Expr):
+ """Baseclass for literals."""
+
+ abstract = True
+
+
+class Const(Literal):
+ """All constant values. The parser will return this node for simple
+ constants such as ``42`` or ``"foo"`` but it can be used to store more
+ complex values such as lists too. Only constants with a safe
+ representation (objects where ``eval(repr(x)) == x`` is true).
+ """
+
+ fields = ("value",)
+
+ def as_const(self, eval_ctx=None):
+ rv = self.value
+ if (
+ PY2
+ and type(rv) is text_type
+ and self.environment.policies["compiler.ascii_str"]
+ ):
+ try:
+ rv = rv.encode("ascii")
+ except UnicodeError:
+ pass
+ return rv
+
+ @classmethod
+ def from_untrusted(cls, value, lineno=None, environment=None):
+ """Return a const object if the value is representable as
+ constant value in the generated code, otherwise it will raise
+ an `Impossible` exception.
+ """
+ from .compiler import has_safe_repr
+
+ if not has_safe_repr(value):
+ raise Impossible()
+ return cls(value, lineno=lineno, environment=environment)
+
+
+class TemplateData(Literal):
+ """A constant template string."""
+
+ fields = ("data",)
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ if eval_ctx.volatile:
+ raise Impossible()
+ if eval_ctx.autoescape:
+ return Markup(self.data)
+ return self.data
+
+
+class Tuple(Literal):
+ """For loop unpacking and some other things like multiple arguments
+ for subscripts. Like for :class:`Name` `ctx` specifies if the tuple
+ is used for loading the names or storing.
+ """
+
+ fields = ("items", "ctx")
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return tuple(x.as_const(eval_ctx) for x in self.items)
+
+ def can_assign(self):
+ for item in self.items:
+ if not item.can_assign():
+ return False
+ return True
+
+
+class List(Literal):
+ """Any list literal such as ``[1, 2, 3]``"""
+
+ fields = ("items",)
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return [x.as_const(eval_ctx) for x in self.items]
+
+
+class Dict(Literal):
+ """Any dict literal such as ``{1: 2, 3: 4}``. The items must be a list of
+ :class:`Pair` nodes.
+ """
+
+ fields = ("items",)
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return dict(x.as_const(eval_ctx) for x in self.items)
+
+
+class Pair(Helper):
+ """A key, value pair for dicts."""
+
+ fields = ("key", "value")
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
+
+
+class Keyword(Helper):
+ """A key, value pair for keyword arguments where key is a string."""
+
+ fields = ("key", "value")
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return self.key, self.value.as_const(eval_ctx)
+
+
+class CondExpr(Expr):
+ """A conditional expression (inline if expression). (``{{
+ foo if bar else baz }}``)
+ """
+
+ fields = ("test", "expr1", "expr2")
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ if self.test.as_const(eval_ctx):
+ return self.expr1.as_const(eval_ctx)
+
+ # if we evaluate to an undefined object, we better do that at runtime
+ if self.expr2 is None:
+ raise Impossible()
+
+ return self.expr2.as_const(eval_ctx)
+
+
+def args_as_const(node, eval_ctx):
+ args = [x.as_const(eval_ctx) for x in node.args]
+ kwargs = dict(x.as_const(eval_ctx) for x in node.kwargs)
+
+ if node.dyn_args is not None:
+ try:
+ args.extend(node.dyn_args.as_const(eval_ctx))
+ except Exception:
+ raise Impossible()
+
+ if node.dyn_kwargs is not None:
+ try:
+ kwargs.update(node.dyn_kwargs.as_const(eval_ctx))
+ except Exception:
+ raise Impossible()
+
+ return args, kwargs
+
+
+class Filter(Expr):
+ """This node applies a filter on an expression. `name` is the name of
+ the filter, the rest of the fields are the same as for :class:`Call`.
+
+ If the `node` of a filter is `None` the contents of the last buffer are
+ filtered. Buffers are created by macros and filter blocks.
+ """
+
+ fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+
+ if eval_ctx.volatile or self.node is None:
+ raise Impossible()
+
+ # we have to be careful here because we call filter_ below.
+ # if this variable would be called filter, 2to3 would wrap the
+ # call in a list because it is assuming we are talking about the
+ # builtin filter function here which no longer returns a list in
+ # python 3. because of that, do not rename filter_ to filter!
+ filter_ = self.environment.filters.get(self.name)
+
+ if filter_ is None or getattr(filter_, "contextfilter", False) is True:
+ raise Impossible()
+
+ # We cannot constant handle async filters, so we need to make sure
+ # to not go down this path.
+ if eval_ctx.environment.is_async and getattr(
+ filter_, "asyncfiltervariant", False
+ ):
+ raise Impossible()
+
+ args, kwargs = args_as_const(self, eval_ctx)
+ args.insert(0, self.node.as_const(eval_ctx))
+
+ if getattr(filter_, "evalcontextfilter", False) is True:
+ args.insert(0, eval_ctx)
+ elif getattr(filter_, "environmentfilter", False) is True:
+ args.insert(0, self.environment)
+
+ try:
+ return filter_(*args, **kwargs)
+ except Exception:
+ raise Impossible()
+
+
+class Test(Expr):
+ """Applies a test on an expression. `name` is the name of the test, the
+ rest of the fields are the same as for :class:`Call`.
+ """
+
+ fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
+
+ def as_const(self, eval_ctx=None):
+ test = self.environment.tests.get(self.name)
+
+ if test is None:
+ raise Impossible()
+
+ eval_ctx = get_eval_context(self, eval_ctx)
+ args, kwargs = args_as_const(self, eval_ctx)
+ args.insert(0, self.node.as_const(eval_ctx))
+
+ try:
+ return test(*args, **kwargs)
+ except Exception:
+ raise Impossible()
+
+
+class Call(Expr):
+ """Calls an expression. `args` is a list of arguments, `kwargs` a list
+ of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args`
+ and `dyn_kwargs` has to be either `None` or a node that is used as
+ node for dynamic positional (``*args``) or keyword (``**kwargs``)
+ arguments.
+ """
+
+ fields = ("node", "args", "kwargs", "dyn_args", "dyn_kwargs")
+
+
+class Getitem(Expr):
+ """Get an attribute or item from an expression and prefer the item."""
+
+ fields = ("node", "arg", "ctx")
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ if self.ctx != "load":
+ raise Impossible()
+ try:
+ return self.environment.getitem(
+ self.node.as_const(eval_ctx), self.arg.as_const(eval_ctx)
+ )
+ except Exception:
+ raise Impossible()
+
+ def can_assign(self):
+ return False
+
+
+class Getattr(Expr):
+ """Get an attribute or item from an expression that is a ascii-only
+ bytestring and prefer the attribute.
+ """
+
+ fields = ("node", "attr", "ctx")
+
+ def as_const(self, eval_ctx=None):
+ if self.ctx != "load":
+ raise Impossible()
+ try:
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return self.environment.getattr(self.node.as_const(eval_ctx), self.attr)
+ except Exception:
+ raise Impossible()
+
+ def can_assign(self):
+ return False
+
+
+class Slice(Expr):
+ """Represents a slice object. This must only be used as argument for
+ :class:`Subscript`.
+ """
+
+ fields = ("start", "stop", "step")
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+
+ def const(obj):
+ if obj is None:
+ return None
+ return obj.as_const(eval_ctx)
+
+ return slice(const(self.start), const(self.stop), const(self.step))
+
+
+class Concat(Expr):
+ """Concatenates the list of expressions provided after converting them to
+ unicode.
+ """
+
+ fields = ("nodes",)
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return "".join(text_type(x.as_const(eval_ctx)) for x in self.nodes)
+
+
+class Compare(Expr):
+ """Compares an expression with some other expressions. `ops` must be a
+ list of :class:`Operand`\\s.
+ """
+
+ fields = ("expr", "ops")
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ result = value = self.expr.as_const(eval_ctx)
+
+ try:
+ for op in self.ops:
+ new_value = op.expr.as_const(eval_ctx)
+ result = _cmpop_to_func[op.op](value, new_value)
+
+ if not result:
+ return False
+
+ value = new_value
+ except Exception:
+ raise Impossible()
+
+ return result
+
+
+class Operand(Helper):
+ """Holds an operator and an expression."""
+
+ fields = ("op", "expr")
+
+
+if __debug__:
+ Operand.__doc__ += "\nThe following operators are available: " + ", ".join(
+ sorted(
+ "``%s``" % x
+ for x in set(_binop_to_func) | set(_uaop_to_func) | set(_cmpop_to_func)
+ )
+ )
+
+
+class Mul(BinExpr):
+ """Multiplies the left with the right node."""
+
+ operator = "*"
+
+
+class Div(BinExpr):
+ """Divides the left by the right node."""
+
+ operator = "/"
+
+
+class FloorDiv(BinExpr):
+ """Divides the left by the right node and truncates conver the
+ result into an integer by truncating.
+ """
+
+ operator = "//"
+
+
+class Add(BinExpr):
+ """Add the left to the right node."""
+
+ operator = "+"
+
+
+class Sub(BinExpr):
+ """Subtract the right from the left node."""
+
+ operator = "-"
+
+
+class Mod(BinExpr):
+ """Left modulo right."""
+
+ operator = "%"
+
+
+class Pow(BinExpr):
+ """Left to the power of right."""
+
+ operator = "**"
+
+
+class And(BinExpr):
+ """Short circuited AND."""
+
+ operator = "and"
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)
+
+
+class Or(BinExpr):
+ """Short circuited OR."""
+
+ operator = "or"
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)
+
+
+class Not(UnaryExpr):
+ """Negate the expression."""
+
+ operator = "not"
+
+
+class Neg(UnaryExpr):
+ """Make the expression negative."""
+
+ operator = "-"
+
+
+class Pos(UnaryExpr):
+ """Make the expression positive (noop for most expressions)"""
+
+ operator = "+"
+
+
+# Helpers for extensions
+
+
+class EnvironmentAttribute(Expr):
+ """Loads an attribute from the environment object. This is useful for
+ extensions that want to call a callback stored on the environment.
+ """
+
+ fields = ("name",)
+
+
+class ExtensionAttribute(Expr):
+ """Returns the attribute of an extension bound to the environment.
+ The identifier is the identifier of the :class:`Extension`.
+
+ This node is usually constructed by calling the
+ :meth:`~jinja2.ext.Extension.attr` method on an extension.
+ """
+
+ fields = ("identifier", "name")
+
+
+class ImportedName(Expr):
+ """If created with an import name the import name is returned on node
+ access. For example ``ImportedName('cgi.escape')`` returns the `escape`
+ function from the cgi module on evaluation. Imports are optimized by the
+ compiler so there is no need to assign them to local variables.
+ """
+
+ fields = ("importname",)
+
+
+class InternalName(Expr):
+ """An internal name in the compiler. You cannot create these nodes
+ yourself but the parser provides a
+ :meth:`~jinja2.parser.Parser.free_identifier` method that creates
+ a new identifier for you. This identifier is not available from the
+ template and is not threated specially by the compiler.
+ """
+
+ fields = ("name",)
+
+ def __init__(self):
+ raise TypeError(
+ "Can't create internal names. Use the "
+ "`free_identifier` method on a parser."
+ )
+
+
+class MarkSafe(Expr):
+ """Mark the wrapped expression as safe (wrap it as `Markup`)."""
+
+ fields = ("expr",)
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return Markup(self.expr.as_const(eval_ctx))
+
+
+class MarkSafeIfAutoescape(Expr):
+ """Mark the wrapped expression as safe (wrap it as `Markup`) but
+ only if autoescaping is active.
+
+ .. versionadded:: 2.5
+ """
+
+ fields = ("expr",)
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ if eval_ctx.volatile:
+ raise Impossible()
+ expr = self.expr.as_const(eval_ctx)
+ if eval_ctx.autoescape:
+ return Markup(expr)
+ return expr
+
+
+class ContextReference(Expr):
+ """Returns the current template context. It can be used like a
+ :class:`Name` node, with a ``'load'`` ctx and will return the
+ current :class:`~jinja2.runtime.Context` object.
+
+ Here an example that assigns the current template name to a
+ variable named `foo`::
+
+ Assign(Name('foo', ctx='store'),
+ Getattr(ContextReference(), 'name'))
+
+ This is basically equivalent to using the
+ :func:`~jinja2.contextfunction` decorator when using the
+ high-level API, which causes a reference to the context to be passed
+ as the first argument to a function.
+ """
+
+
+class DerivedContextReference(Expr):
+ """Return the current template context including locals. Behaves
+ exactly like :class:`ContextReference`, but includes local
+ variables, such as from a ``for`` loop.
+
+ .. versionadded:: 2.11
+ """
+
+
+class Continue(Stmt):
+ """Continue a loop."""
+
+
+class Break(Stmt):
+ """Break a loop."""
+
+
+class Scope(Stmt):
+ """An artificial scope."""
+
+ fields = ("body",)
+
+
+class OverlayScope(Stmt):
+ """An overlay scope for extensions. This is a largely unoptimized scope
+ that however can be used to introduce completely arbitrary variables into
+ a sub scope from a dictionary or dictionary like object. The `context`
+ field has to evaluate to a dictionary object.
+
+ Example usage::
+
+ OverlayScope(context=self.call_method('get_context'),
+ body=[...])
+
+ .. versionadded:: 2.10
+ """
+
+ fields = ("context", "body")
+
+
+class EvalContextModifier(Stmt):
+ """Modifies the eval context. For each option that should be modified,
+ a :class:`Keyword` has to be added to the :attr:`options` list.
+
+ Example to change the `autoescape` setting::
+
+ EvalContextModifier(options=[Keyword('autoescape', Const(True))])
+ """
+
+ fields = ("options",)
+
+
+class ScopedEvalContextModifier(EvalContextModifier):
+ """Modifies the eval context and reverts it later. Works exactly like
+ :class:`EvalContextModifier` but will only modify the
+ :class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
+ """
+
+ fields = ("body",)
+
+
+# make sure nobody creates custom nodes
+def _failing_new(*args, **kwargs):
+ raise TypeError("can't create custom node types")
+
+
+NodeType.__new__ = staticmethod(_failing_new)
+del _failing_new
diff --git a/third_party/python/Jinja2/src/jinja2/optimizer.py b/third_party/python/Jinja2/src/jinja2/optimizer.py
new file mode 100644
index 0000000000..7bc78c4524
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/optimizer.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+"""The optimizer tries to constant fold expressions and modify the AST
+in place so that it should be faster to evaluate.
+
+Because the AST does not contain all the scoping information and the
+compiler has to find that out, we cannot do all the optimizations we
+want. For example, loop unrolling doesn't work because unrolled loops
+would have a different scope. The solution would be a second syntax tree
+that stored the scoping rules.
+"""
+from . import nodes
+from .visitor import NodeTransformer
+
+
+def optimize(node, environment):
+ """The context hint can be used to perform an static optimization
+ based on the context given."""
+ optimizer = Optimizer(environment)
+ return optimizer.visit(node)
+
+
+class Optimizer(NodeTransformer):
+ def __init__(self, environment):
+ self.environment = environment
+
+ def generic_visit(self, node, *args, **kwargs):
+ node = super(Optimizer, self).generic_visit(node, *args, **kwargs)
+
+ # Do constant folding. Some other nodes besides Expr have
+ # as_const, but folding them causes errors later on.
+ if isinstance(node, nodes.Expr):
+ try:
+ return nodes.Const.from_untrusted(
+ node.as_const(args[0] if args else None),
+ lineno=node.lineno,
+ environment=self.environment,
+ )
+ except nodes.Impossible:
+ pass
+
+ return node
diff --git a/third_party/python/Jinja2/src/jinja2/parser.py b/third_party/python/Jinja2/src/jinja2/parser.py
new file mode 100644
index 0000000000..d5881066f7
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/parser.py
@@ -0,0 +1,939 @@
+# -*- coding: utf-8 -*-
+"""Parse tokens from the lexer into nodes for the compiler."""
+from . import nodes
+from ._compat import imap
+from .exceptions import TemplateAssertionError
+from .exceptions import TemplateSyntaxError
+from .lexer import describe_token
+from .lexer import describe_token_expr
+
+_statement_keywords = frozenset(
+ [
+ "for",
+ "if",
+ "block",
+ "extends",
+ "print",
+ "macro",
+ "include",
+ "from",
+ "import",
+ "set",
+ "with",
+ "autoescape",
+ ]
+)
+_compare_operators = frozenset(["eq", "ne", "lt", "lteq", "gt", "gteq"])
+
+_math_nodes = {
+ "add": nodes.Add,
+ "sub": nodes.Sub,
+ "mul": nodes.Mul,
+ "div": nodes.Div,
+ "floordiv": nodes.FloorDiv,
+ "mod": nodes.Mod,
+}
+
+
+class Parser(object):
+ """This is the central parsing class Jinja uses. It's passed to
+ extensions and can be used to parse expressions or statements.
+ """
+
+ def __init__(self, environment, source, name=None, filename=None, state=None):
+ self.environment = environment
+ self.stream = environment._tokenize(source, name, filename, state)
+ self.name = name
+ self.filename = filename
+ self.closed = False
+ self.extensions = {}
+ for extension in environment.iter_extensions():
+ for tag in extension.tags:
+ self.extensions[tag] = extension.parse
+ self._last_identifier = 0
+ self._tag_stack = []
+ self._end_token_stack = []
+
+ def fail(self, msg, lineno=None, exc=TemplateSyntaxError):
+ """Convenience method that raises `exc` with the message, passed
+ line number or last line number as well as the current name and
+ filename.
+ """
+ if lineno is None:
+ lineno = self.stream.current.lineno
+ raise exc(msg, lineno, self.name, self.filename)
+
+ def _fail_ut_eof(self, name, end_token_stack, lineno):
+ expected = []
+ for exprs in end_token_stack:
+ expected.extend(imap(describe_token_expr, exprs))
+ if end_token_stack:
+ currently_looking = " or ".join(
+ "'%s'" % describe_token_expr(expr) for expr in end_token_stack[-1]
+ )
+ else:
+ currently_looking = None
+
+ if name is None:
+ message = ["Unexpected end of template."]
+ else:
+ message = ["Encountered unknown tag '%s'." % name]
+
+ if currently_looking:
+ if name is not None and name in expected:
+ message.append(
+ "You probably made a nesting mistake. Jinja "
+ "is expecting this tag, but currently looking "
+ "for %s." % currently_looking
+ )
+ else:
+ message.append(
+ "Jinja was looking for the following tags: "
+ "%s." % currently_looking
+ )
+
+ if self._tag_stack:
+ message.append(
+ "The innermost block that needs to be "
+ "closed is '%s'." % self._tag_stack[-1]
+ )
+
+ self.fail(" ".join(message), lineno)
+
+ def fail_unknown_tag(self, name, lineno=None):
+ """Called if the parser encounters an unknown tag. Tries to fail
+ with a human readable error message that could help to identify
+ the problem.
+ """
+ return self._fail_ut_eof(name, self._end_token_stack, lineno)
+
+ def fail_eof(self, end_tokens=None, lineno=None):
+ """Like fail_unknown_tag but for end of template situations."""
+ stack = list(self._end_token_stack)
+ if end_tokens is not None:
+ stack.append(end_tokens)
+ return self._fail_ut_eof(None, stack, lineno)
+
+ def is_tuple_end(self, extra_end_rules=None):
+ """Are we at the end of a tuple?"""
+ if self.stream.current.type in ("variable_end", "block_end", "rparen"):
+ return True
+ elif extra_end_rules is not None:
+ return self.stream.current.test_any(extra_end_rules)
+ return False
+
+ def free_identifier(self, lineno=None):
+ """Return a new free identifier as :class:`~jinja2.nodes.InternalName`."""
+ self._last_identifier += 1
+ rv = object.__new__(nodes.InternalName)
+ nodes.Node.__init__(rv, "fi%d" % self._last_identifier, lineno=lineno)
+ return rv
+
+ def parse_statement(self):
+ """Parse a single statement."""
+ token = self.stream.current
+ if token.type != "name":
+ self.fail("tag name expected", token.lineno)
+ self._tag_stack.append(token.value)
+ pop_tag = True
+ try:
+ if token.value in _statement_keywords:
+ return getattr(self, "parse_" + self.stream.current.value)()
+ if token.value == "call":
+ return self.parse_call_block()
+ if token.value == "filter":
+ return self.parse_filter_block()
+ ext = self.extensions.get(token.value)
+ if ext is not None:
+ return ext(self)
+
+ # did not work out, remove the token we pushed by accident
+ # from the stack so that the unknown tag fail function can
+ # produce a proper error message.
+ self._tag_stack.pop()
+ pop_tag = False
+ self.fail_unknown_tag(token.value, token.lineno)
+ finally:
+ if pop_tag:
+ self._tag_stack.pop()
+
+ def parse_statements(self, end_tokens, drop_needle=False):
+ """Parse multiple statements into a list until one of the end tokens
+ is reached. This is used to parse the body of statements as it also
+ parses template data if appropriate. The parser checks first if the
+ current token is a colon and skips it if there is one. Then it checks
+ for the block end and parses until if one of the `end_tokens` is
+ reached. Per default the active token in the stream at the end of
+ the call is the matched end token. If this is not wanted `drop_needle`
+ can be set to `True` and the end token is removed.
+ """
+ # the first token may be a colon for python compatibility
+ self.stream.skip_if("colon")
+
+ # in the future it would be possible to add whole code sections
+ # by adding some sort of end of statement token and parsing those here.
+ self.stream.expect("block_end")
+ result = self.subparse(end_tokens)
+
+ # we reached the end of the template too early, the subparser
+ # does not check for this, so we do that now
+ if self.stream.current.type == "eof":
+ self.fail_eof(end_tokens)
+
+ if drop_needle:
+ next(self.stream)
+ return result
+
+ def parse_set(self):
+ """Parse an assign statement."""
+ lineno = next(self.stream).lineno
+ target = self.parse_assign_target(with_namespace=True)
+ if self.stream.skip_if("assign"):
+ expr = self.parse_tuple()
+ return nodes.Assign(target, expr, lineno=lineno)
+ filter_node = self.parse_filter(None)
+ body = self.parse_statements(("name:endset",), drop_needle=True)
+ return nodes.AssignBlock(target, filter_node, body, lineno=lineno)
+
+ def parse_for(self):
+ """Parse a for loop."""
+ lineno = self.stream.expect("name:for").lineno
+ target = self.parse_assign_target(extra_end_rules=("name:in",))
+ self.stream.expect("name:in")
+ iter = self.parse_tuple(
+ with_condexpr=False, extra_end_rules=("name:recursive",)
+ )
+ test = None
+ if self.stream.skip_if("name:if"):
+ test = self.parse_expression()
+ recursive = self.stream.skip_if("name:recursive")
+ body = self.parse_statements(("name:endfor", "name:else"))
+ if next(self.stream).value == "endfor":
+ else_ = []
+ else:
+ else_ = self.parse_statements(("name:endfor",), drop_needle=True)
+ return nodes.For(target, iter, body, else_, test, recursive, lineno=lineno)
+
+ def parse_if(self):
+ """Parse an if construct."""
+ node = result = nodes.If(lineno=self.stream.expect("name:if").lineno)
+ while 1:
+ node.test = self.parse_tuple(with_condexpr=False)
+ node.body = self.parse_statements(("name:elif", "name:else", "name:endif"))
+ node.elif_ = []
+ node.else_ = []
+ token = next(self.stream)
+ if token.test("name:elif"):
+ node = nodes.If(lineno=self.stream.current.lineno)
+ result.elif_.append(node)
+ continue
+ elif token.test("name:else"):
+ result.else_ = self.parse_statements(("name:endif",), drop_needle=True)
+ break
+ return result
+
+ def parse_with(self):
+ node = nodes.With(lineno=next(self.stream).lineno)
+ targets = []
+ values = []
+ while self.stream.current.type != "block_end":
+ if targets:
+ self.stream.expect("comma")
+ target = self.parse_assign_target()
+ target.set_ctx("param")
+ targets.append(target)
+ self.stream.expect("assign")
+ values.append(self.parse_expression())
+ node.targets = targets
+ node.values = values
+ node.body = self.parse_statements(("name:endwith",), drop_needle=True)
+ return node
+
+ def parse_autoescape(self):
+ node = nodes.ScopedEvalContextModifier(lineno=next(self.stream).lineno)
+ node.options = [nodes.Keyword("autoescape", self.parse_expression())]
+ node.body = self.parse_statements(("name:endautoescape",), drop_needle=True)
+ return nodes.Scope([node])
+
+ def parse_block(self):
+ node = nodes.Block(lineno=next(self.stream).lineno)
+ node.name = self.stream.expect("name").value
+ node.scoped = self.stream.skip_if("name:scoped")
+
+ # common problem people encounter when switching from django
+ # to jinja. we do not support hyphens in block names, so let's
+ # raise a nicer error message in that case.
+ if self.stream.current.type == "sub":
+ self.fail(
+ "Block names in Jinja have to be valid Python "
+ "identifiers and may not contain hyphens, use an "
+ "underscore instead."
+ )
+
+ node.body = self.parse_statements(("name:endblock",), drop_needle=True)
+ self.stream.skip_if("name:" + node.name)
+ return node
+
+ def parse_extends(self):
+ node = nodes.Extends(lineno=next(self.stream).lineno)
+ node.template = self.parse_expression()
+ return node
+
+ def parse_import_context(self, node, default):
+ if self.stream.current.test_any(
+ "name:with", "name:without"
+ ) and self.stream.look().test("name:context"):
+ node.with_context = next(self.stream).value == "with"
+ self.stream.skip()
+ else:
+ node.with_context = default
+ return node
+
+ def parse_include(self):
+ node = nodes.Include(lineno=next(self.stream).lineno)
+ node.template = self.parse_expression()
+ if self.stream.current.test("name:ignore") and self.stream.look().test(
+ "name:missing"
+ ):
+ node.ignore_missing = True
+ self.stream.skip(2)
+ else:
+ node.ignore_missing = False
+ return self.parse_import_context(node, True)
+
+ def parse_import(self):
+ node = nodes.Import(lineno=next(self.stream).lineno)
+ node.template = self.parse_expression()
+ self.stream.expect("name:as")
+ node.target = self.parse_assign_target(name_only=True).name
+ return self.parse_import_context(node, False)
+
+ def parse_from(self):
+ node = nodes.FromImport(lineno=next(self.stream).lineno)
+ node.template = self.parse_expression()
+ self.stream.expect("name:import")
+ node.names = []
+
+ def parse_context():
+ if self.stream.current.value in (
+ "with",
+ "without",
+ ) and self.stream.look().test("name:context"):
+ node.with_context = next(self.stream).value == "with"
+ self.stream.skip()
+ return True
+ return False
+
+ while 1:
+ if node.names:
+ self.stream.expect("comma")
+ if self.stream.current.type == "name":
+ if parse_context():
+ break
+ target = self.parse_assign_target(name_only=True)
+ if target.name.startswith("_"):
+ self.fail(
+ "names starting with an underline can not be imported",
+ target.lineno,
+ exc=TemplateAssertionError,
+ )
+ if self.stream.skip_if("name:as"):
+ alias = self.parse_assign_target(name_only=True)
+ node.names.append((target.name, alias.name))
+ else:
+ node.names.append(target.name)
+ if parse_context() or self.stream.current.type != "comma":
+ break
+ else:
+ self.stream.expect("name")
+ if not hasattr(node, "with_context"):
+ node.with_context = False
+ return node
+
+ def parse_signature(self, node):
+ node.args = args = []
+ node.defaults = defaults = []
+ self.stream.expect("lparen")
+ while self.stream.current.type != "rparen":
+ if args:
+ self.stream.expect("comma")
+ arg = self.parse_assign_target(name_only=True)
+ arg.set_ctx("param")
+ if self.stream.skip_if("assign"):
+ defaults.append(self.parse_expression())
+ elif defaults:
+ self.fail("non-default argument follows default argument")
+ args.append(arg)
+ self.stream.expect("rparen")
+
+ def parse_call_block(self):
+ node = nodes.CallBlock(lineno=next(self.stream).lineno)
+ if self.stream.current.type == "lparen":
+ self.parse_signature(node)
+ else:
+ node.args = []
+ node.defaults = []
+
+ node.call = self.parse_expression()
+ if not isinstance(node.call, nodes.Call):
+ self.fail("expected call", node.lineno)
+ node.body = self.parse_statements(("name:endcall",), drop_needle=True)
+ return node
+
+ def parse_filter_block(self):
+ node = nodes.FilterBlock(lineno=next(self.stream).lineno)
+ node.filter = self.parse_filter(None, start_inline=True)
+ node.body = self.parse_statements(("name:endfilter",), drop_needle=True)
+ return node
+
+ def parse_macro(self):
+ node = nodes.Macro(lineno=next(self.stream).lineno)
+ node.name = self.parse_assign_target(name_only=True).name
+ self.parse_signature(node)
+ node.body = self.parse_statements(("name:endmacro",), drop_needle=True)
+ return node
+
+ def parse_print(self):
+ node = nodes.Output(lineno=next(self.stream).lineno)
+ node.nodes = []
+ while self.stream.current.type != "block_end":
+ if node.nodes:
+ self.stream.expect("comma")
+ node.nodes.append(self.parse_expression())
+ return node
+
+ def parse_assign_target(
+ self,
+ with_tuple=True,
+ name_only=False,
+ extra_end_rules=None,
+ with_namespace=False,
+ ):
+ """Parse an assignment target. As Jinja allows assignments to
+ tuples, this function can parse all allowed assignment targets. Per
+ default assignments to tuples are parsed, that can be disable however
+ by setting `with_tuple` to `False`. If only assignments to names are
+ wanted `name_only` can be set to `True`. The `extra_end_rules`
+ parameter is forwarded to the tuple parsing function. If
+ `with_namespace` is enabled, a namespace assignment may be parsed.
+ """
+ if with_namespace and self.stream.look().type == "dot":
+ token = self.stream.expect("name")
+ next(self.stream) # dot
+ attr = self.stream.expect("name")
+ target = nodes.NSRef(token.value, attr.value, lineno=token.lineno)
+ elif name_only:
+ token = self.stream.expect("name")
+ target = nodes.Name(token.value, "store", lineno=token.lineno)
+ else:
+ if with_tuple:
+ target = self.parse_tuple(
+ simplified=True, extra_end_rules=extra_end_rules
+ )
+ else:
+ target = self.parse_primary()
+ target.set_ctx("store")
+ if not target.can_assign():
+ self.fail(
+ "can't assign to %r" % target.__class__.__name__.lower(), target.lineno
+ )
+ return target
+
+ def parse_expression(self, with_condexpr=True):
+ """Parse an expression. Per default all expressions are parsed, if
+ the optional `with_condexpr` parameter is set to `False` conditional
+ expressions are not parsed.
+ """
+ if with_condexpr:
+ return self.parse_condexpr()
+ return self.parse_or()
+
+ def parse_condexpr(self):
+ lineno = self.stream.current.lineno
+ expr1 = self.parse_or()
+ while self.stream.skip_if("name:if"):
+ expr2 = self.parse_or()
+ if self.stream.skip_if("name:else"):
+ expr3 = self.parse_condexpr()
+ else:
+ expr3 = None
+ expr1 = nodes.CondExpr(expr2, expr1, expr3, lineno=lineno)
+ lineno = self.stream.current.lineno
+ return expr1
+
+ def parse_or(self):
+ lineno = self.stream.current.lineno
+ left = self.parse_and()
+ while self.stream.skip_if("name:or"):
+ right = self.parse_and()
+ left = nodes.Or(left, right, lineno=lineno)
+ lineno = self.stream.current.lineno
+ return left
+
+ def parse_and(self):
+ lineno = self.stream.current.lineno
+ left = self.parse_not()
+ while self.stream.skip_if("name:and"):
+ right = self.parse_not()
+ left = nodes.And(left, right, lineno=lineno)
+ lineno = self.stream.current.lineno
+ return left
+
+ def parse_not(self):
+ if self.stream.current.test("name:not"):
+ lineno = next(self.stream).lineno
+ return nodes.Not(self.parse_not(), lineno=lineno)
+ return self.parse_compare()
+
+ def parse_compare(self):
+ lineno = self.stream.current.lineno
+ expr = self.parse_math1()
+ ops = []
+ while 1:
+ token_type = self.stream.current.type
+ if token_type in _compare_operators:
+ next(self.stream)
+ ops.append(nodes.Operand(token_type, self.parse_math1()))
+ elif self.stream.skip_if("name:in"):
+ ops.append(nodes.Operand("in", self.parse_math1()))
+ elif self.stream.current.test("name:not") and self.stream.look().test(
+ "name:in"
+ ):
+ self.stream.skip(2)
+ ops.append(nodes.Operand("notin", self.parse_math1()))
+ else:
+ break
+ lineno = self.stream.current.lineno
+ if not ops:
+ return expr
+ return nodes.Compare(expr, ops, lineno=lineno)
+
+ def parse_math1(self):
+ lineno = self.stream.current.lineno
+ left = self.parse_concat()
+ while self.stream.current.type in ("add", "sub"):
+ cls = _math_nodes[self.stream.current.type]
+ next(self.stream)
+ right = self.parse_concat()
+ left = cls(left, right, lineno=lineno)
+ lineno = self.stream.current.lineno
+ return left
+
+ def parse_concat(self):
+ lineno = self.stream.current.lineno
+ args = [self.parse_math2()]
+ while self.stream.current.type == "tilde":
+ next(self.stream)
+ args.append(self.parse_math2())
+ if len(args) == 1:
+ return args[0]
+ return nodes.Concat(args, lineno=lineno)
+
+ def parse_math2(self):
+ lineno = self.stream.current.lineno
+ left = self.parse_pow()
+ while self.stream.current.type in ("mul", "div", "floordiv", "mod"):
+ cls = _math_nodes[self.stream.current.type]
+ next(self.stream)
+ right = self.parse_pow()
+ left = cls(left, right, lineno=lineno)
+ lineno = self.stream.current.lineno
+ return left
+
+ def parse_pow(self):
+ lineno = self.stream.current.lineno
+ left = self.parse_unary()
+ while self.stream.current.type == "pow":
+ next(self.stream)
+ right = self.parse_unary()
+ left = nodes.Pow(left, right, lineno=lineno)
+ lineno = self.stream.current.lineno
+ return left
+
+ def parse_unary(self, with_filter=True):
+ token_type = self.stream.current.type
+ lineno = self.stream.current.lineno
+ if token_type == "sub":
+ next(self.stream)
+ node = nodes.Neg(self.parse_unary(False), lineno=lineno)
+ elif token_type == "add":
+ next(self.stream)
+ node = nodes.Pos(self.parse_unary(False), lineno=lineno)
+ else:
+ node = self.parse_primary()
+ node = self.parse_postfix(node)
+ if with_filter:
+ node = self.parse_filter_expr(node)
+ return node
+
+ def parse_primary(self):
+ token = self.stream.current
+ if token.type == "name":
+ if token.value in ("true", "false", "True", "False"):
+ node = nodes.Const(token.value in ("true", "True"), lineno=token.lineno)
+ elif token.value in ("none", "None"):
+ node = nodes.Const(None, lineno=token.lineno)
+ else:
+ node = nodes.Name(token.value, "load", lineno=token.lineno)
+ next(self.stream)
+ elif token.type == "string":
+ next(self.stream)
+ buf = [token.value]
+ lineno = token.lineno
+ while self.stream.current.type == "string":
+ buf.append(self.stream.current.value)
+ next(self.stream)
+ node = nodes.Const("".join(buf), lineno=lineno)
+ elif token.type in ("integer", "float"):
+ next(self.stream)
+ node = nodes.Const(token.value, lineno=token.lineno)
+ elif token.type == "lparen":
+ next(self.stream)
+ node = self.parse_tuple(explicit_parentheses=True)
+ self.stream.expect("rparen")
+ elif token.type == "lbracket":
+ node = self.parse_list()
+ elif token.type == "lbrace":
+ node = self.parse_dict()
+ else:
+ self.fail("unexpected '%s'" % describe_token(token), token.lineno)
+ return node
+
+ def parse_tuple(
+ self,
+ simplified=False,
+ with_condexpr=True,
+ extra_end_rules=None,
+ explicit_parentheses=False,
+ ):
+ """Works like `parse_expression` but if multiple expressions are
+ delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created.
+ This method could also return a regular expression instead of a tuple
+ if no commas where found.
+
+ The default parsing mode is a full tuple. If `simplified` is `True`
+ only names and literals are parsed. The `no_condexpr` parameter is
+ forwarded to :meth:`parse_expression`.
+
+ Because tuples do not require delimiters and may end in a bogus comma
+ an extra hint is needed that marks the end of a tuple. For example
+ for loops support tuples between `for` and `in`. In that case the
+ `extra_end_rules` is set to ``['name:in']``.
+
+ `explicit_parentheses` is true if the parsing was triggered by an
+ expression in parentheses. This is used to figure out if an empty
+ tuple is a valid expression or not.
+ """
+ lineno = self.stream.current.lineno
+ if simplified:
+ parse = self.parse_primary
+ elif with_condexpr:
+ parse = self.parse_expression
+ else:
+
+ def parse():
+ return self.parse_expression(with_condexpr=False)
+
+ args = []
+ is_tuple = False
+ while 1:
+ if args:
+ self.stream.expect("comma")
+ if self.is_tuple_end(extra_end_rules):
+ break
+ args.append(parse())
+ if self.stream.current.type == "comma":
+ is_tuple = True
+ else:
+ break
+ lineno = self.stream.current.lineno
+
+ if not is_tuple:
+ if args:
+ return args[0]
+
+ # if we don't have explicit parentheses, an empty tuple is
+ # not a valid expression. This would mean nothing (literally
+ # nothing) in the spot of an expression would be an empty
+ # tuple.
+ if not explicit_parentheses:
+ self.fail(
+ "Expected an expression, got '%s'"
+ % describe_token(self.stream.current)
+ )
+
+ return nodes.Tuple(args, "load", lineno=lineno)
+
+ def parse_list(self):
+ token = self.stream.expect("lbracket")
+ items = []
+ while self.stream.current.type != "rbracket":
+ if items:
+ self.stream.expect("comma")
+ if self.stream.current.type == "rbracket":
+ break
+ items.append(self.parse_expression())
+ self.stream.expect("rbracket")
+ return nodes.List(items, lineno=token.lineno)
+
+ def parse_dict(self):
+ token = self.stream.expect("lbrace")
+ items = []
+ while self.stream.current.type != "rbrace":
+ if items:
+ self.stream.expect("comma")
+ if self.stream.current.type == "rbrace":
+ break
+ key = self.parse_expression()
+ self.stream.expect("colon")
+ value = self.parse_expression()
+ items.append(nodes.Pair(key, value, lineno=key.lineno))
+ self.stream.expect("rbrace")
+ return nodes.Dict(items, lineno=token.lineno)
+
+ def parse_postfix(self, node):
+ while 1:
+ token_type = self.stream.current.type
+ if token_type == "dot" or token_type == "lbracket":
+ node = self.parse_subscript(node)
+ # calls are valid both after postfix expressions (getattr
+ # and getitem) as well as filters and tests
+ elif token_type == "lparen":
+ node = self.parse_call(node)
+ else:
+ break
+ return node
+
+ def parse_filter_expr(self, node):
+ while 1:
+ token_type = self.stream.current.type
+ if token_type == "pipe":
+ node = self.parse_filter(node)
+ elif token_type == "name" and self.stream.current.value == "is":
+ node = self.parse_test(node)
+ # calls are valid both after postfix expressions (getattr
+ # and getitem) as well as filters and tests
+ elif token_type == "lparen":
+ node = self.parse_call(node)
+ else:
+ break
+ return node
+
+ def parse_subscript(self, node):
+ token = next(self.stream)
+ if token.type == "dot":
+ attr_token = self.stream.current
+ next(self.stream)
+ if attr_token.type == "name":
+ return nodes.Getattr(
+ node, attr_token.value, "load", lineno=token.lineno
+ )
+ elif attr_token.type != "integer":
+ self.fail("expected name or number", attr_token.lineno)
+ arg = nodes.Const(attr_token.value, lineno=attr_token.lineno)
+ return nodes.Getitem(node, arg, "load", lineno=token.lineno)
+ if token.type == "lbracket":
+ args = []
+ while self.stream.current.type != "rbracket":
+ if args:
+ self.stream.expect("comma")
+ args.append(self.parse_subscribed())
+ self.stream.expect("rbracket")
+ if len(args) == 1:
+ arg = args[0]
+ else:
+ arg = nodes.Tuple(args, "load", lineno=token.lineno)
+ return nodes.Getitem(node, arg, "load", lineno=token.lineno)
+ self.fail("expected subscript expression", token.lineno)
+
+ def parse_subscribed(self):
+ lineno = self.stream.current.lineno
+
+ if self.stream.current.type == "colon":
+ next(self.stream)
+ args = [None]
+ else:
+ node = self.parse_expression()
+ if self.stream.current.type != "colon":
+ return node
+ next(self.stream)
+ args = [node]
+
+ if self.stream.current.type == "colon":
+ args.append(None)
+ elif self.stream.current.type not in ("rbracket", "comma"):
+ args.append(self.parse_expression())
+ else:
+ args.append(None)
+
+ if self.stream.current.type == "colon":
+ next(self.stream)
+ if self.stream.current.type not in ("rbracket", "comma"):
+ args.append(self.parse_expression())
+ else:
+ args.append(None)
+ else:
+ args.append(None)
+
+ return nodes.Slice(lineno=lineno, *args)
+
+ def parse_call(self, node):
+ token = self.stream.expect("lparen")
+ args = []
+ kwargs = []
+ dyn_args = dyn_kwargs = None
+ require_comma = False
+
+ def ensure(expr):
+ if not expr:
+ self.fail("invalid syntax for function call expression", token.lineno)
+
+ while self.stream.current.type != "rparen":
+ if require_comma:
+ self.stream.expect("comma")
+ # support for trailing comma
+ if self.stream.current.type == "rparen":
+ break
+ if self.stream.current.type == "mul":
+ ensure(dyn_args is None and dyn_kwargs is None)
+ next(self.stream)
+ dyn_args = self.parse_expression()
+ elif self.stream.current.type == "pow":
+ ensure(dyn_kwargs is None)
+ next(self.stream)
+ dyn_kwargs = self.parse_expression()
+ else:
+ if (
+ self.stream.current.type == "name"
+ and self.stream.look().type == "assign"
+ ):
+ # Parsing a kwarg
+ ensure(dyn_kwargs is None)
+ key = self.stream.current.value
+ self.stream.skip(2)
+ value = self.parse_expression()
+ kwargs.append(nodes.Keyword(key, value, lineno=value.lineno))
+ else:
+ # Parsing an arg
+ ensure(dyn_args is None and dyn_kwargs is None and not kwargs)
+ args.append(self.parse_expression())
+
+ require_comma = True
+ self.stream.expect("rparen")
+
+ if node is None:
+ return args, kwargs, dyn_args, dyn_kwargs
+ return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno)
+
+ def parse_filter(self, node, start_inline=False):
+ while self.stream.current.type == "pipe" or start_inline:
+ if not start_inline:
+ next(self.stream)
+ token = self.stream.expect("name")
+ name = token.value
+ while self.stream.current.type == "dot":
+ next(self.stream)
+ name += "." + self.stream.expect("name").value
+ if self.stream.current.type == "lparen":
+ args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
+ else:
+ args = []
+ kwargs = []
+ dyn_args = dyn_kwargs = None
+ node = nodes.Filter(
+ node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno
+ )
+ start_inline = False
+ return node
+
+ def parse_test(self, node):
+ token = next(self.stream)
+ if self.stream.current.test("name:not"):
+ next(self.stream)
+ negated = True
+ else:
+ negated = False
+ name = self.stream.expect("name").value
+ while self.stream.current.type == "dot":
+ next(self.stream)
+ name += "." + self.stream.expect("name").value
+ dyn_args = dyn_kwargs = None
+ kwargs = []
+ if self.stream.current.type == "lparen":
+ args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
+ elif self.stream.current.type in (
+ "name",
+ "string",
+ "integer",
+ "float",
+ "lparen",
+ "lbracket",
+ "lbrace",
+ ) and not self.stream.current.test_any("name:else", "name:or", "name:and"):
+ if self.stream.current.test("name:is"):
+ self.fail("You cannot chain multiple tests with is")
+ arg_node = self.parse_primary()
+ arg_node = self.parse_postfix(arg_node)
+ args = [arg_node]
+ else:
+ args = []
+ node = nodes.Test(
+ node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno
+ )
+ if negated:
+ node = nodes.Not(node, lineno=token.lineno)
+ return node
+
+ def subparse(self, end_tokens=None):
+ body = []
+ data_buffer = []
+ add_data = data_buffer.append
+
+ if end_tokens is not None:
+ self._end_token_stack.append(end_tokens)
+
+ def flush_data():
+ if data_buffer:
+ lineno = data_buffer[0].lineno
+ body.append(nodes.Output(data_buffer[:], lineno=lineno))
+ del data_buffer[:]
+
+ try:
+ while self.stream:
+ token = self.stream.current
+ if token.type == "data":
+ if token.value:
+ add_data(nodes.TemplateData(token.value, lineno=token.lineno))
+ next(self.stream)
+ elif token.type == "variable_begin":
+ next(self.stream)
+ add_data(self.parse_tuple(with_condexpr=True))
+ self.stream.expect("variable_end")
+ elif token.type == "block_begin":
+ flush_data()
+ next(self.stream)
+ if end_tokens is not None and self.stream.current.test_any(
+ *end_tokens
+ ):
+ return body
+ rv = self.parse_statement()
+ if isinstance(rv, list):
+ body.extend(rv)
+ else:
+ body.append(rv)
+ self.stream.expect("block_end")
+ else:
+ raise AssertionError("internal parsing error")
+
+ flush_data()
+ finally:
+ if end_tokens is not None:
+ self._end_token_stack.pop()
+
+ return body
+
+ def parse(self):
+ """Parse the whole template into a `Template` node."""
+ result = nodes.Template(self.subparse(), lineno=1)
+ result.set_environment(self.environment)
+ return result
diff --git a/third_party/python/Jinja2/src/jinja2/runtime.py b/third_party/python/Jinja2/src/jinja2/runtime.py
new file mode 100644
index 0000000000..3ad7968624
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/runtime.py
@@ -0,0 +1,1011 @@
+# -*- coding: utf-8 -*-
+"""The runtime functions and state used by compiled templates."""
+import sys
+from itertools import chain
+from types import MethodType
+
+from markupsafe import escape # noqa: F401
+from markupsafe import Markup
+from markupsafe import soft_unicode
+
+from ._compat import abc
+from ._compat import imap
+from ._compat import implements_iterator
+from ._compat import implements_to_string
+from ._compat import iteritems
+from ._compat import PY2
+from ._compat import string_types
+from ._compat import text_type
+from ._compat import with_metaclass
+from .exceptions import TemplateNotFound # noqa: F401
+from .exceptions import TemplateRuntimeError # noqa: F401
+from .exceptions import UndefinedError
+from .nodes import EvalContext
+from .utils import concat
+from .utils import evalcontextfunction
+from .utils import internalcode
+from .utils import missing
+from .utils import Namespace # noqa: F401
+from .utils import object_type_repr
+
+# these variables are exported to the template runtime
+exported = [
+ "LoopContext",
+ "TemplateReference",
+ "Macro",
+ "Markup",
+ "TemplateRuntimeError",
+ "missing",
+ "concat",
+ "escape",
+ "markup_join",
+ "unicode_join",
+ "to_string",
+ "identity",
+ "TemplateNotFound",
+ "Namespace",
+ "Undefined",
+]
+
+#: the name of the function that is used to convert something into
+#: a string. We can just use the text type here.
+to_string = text_type
+
+
+def identity(x):
+ """Returns its argument. Useful for certain things in the
+ environment.
+ """
+ return x
+
+
+def markup_join(seq):
+ """Concatenation that escapes if necessary and converts to unicode."""
+ buf = []
+ iterator = imap(soft_unicode, seq)
+ for arg in iterator:
+ buf.append(arg)
+ if hasattr(arg, "__html__"):
+ return Markup(u"").join(chain(buf, iterator))
+ return concat(buf)
+
+
+def unicode_join(seq):
+ """Simple args to unicode conversion and concatenation."""
+ return concat(imap(text_type, seq))
+
+
+def new_context(
+ environment,
+ template_name,
+ blocks,
+ vars=None,
+ shared=None,
+ globals=None,
+ locals=None,
+):
+ """Internal helper for context creation."""
+ if vars is None:
+ vars = {}
+ if shared:
+ parent = vars
+ else:
+ parent = dict(globals or (), **vars)
+ if locals:
+ # if the parent is shared a copy should be created because
+ # we don't want to modify the dict passed
+ if shared:
+ parent = dict(parent)
+ for key, value in iteritems(locals):
+ if value is not missing:
+ parent[key] = value
+ return environment.context_class(environment, parent, template_name, blocks)
+
+
+class TemplateReference(object):
+ """The `self` in templates."""
+
+ def __init__(self, context):
+ self.__context = context
+
+ def __getitem__(self, name):
+ blocks = self.__context.blocks[name]
+ return BlockReference(name, self.__context, blocks, 0)
+
+ def __repr__(self):
+ return "<%s %r>" % (self.__class__.__name__, self.__context.name)
+
+
+def _get_func(x):
+ return getattr(x, "__func__", x)
+
+
+class ContextMeta(type):
+ def __new__(mcs, name, bases, d):
+ rv = type.__new__(mcs, name, bases, d)
+ if bases == ():
+ return rv
+
+ resolve = _get_func(rv.resolve)
+ default_resolve = _get_func(Context.resolve)
+ resolve_or_missing = _get_func(rv.resolve_or_missing)
+ default_resolve_or_missing = _get_func(Context.resolve_or_missing)
+
+ # If we have a changed resolve but no changed default or missing
+ # resolve we invert the call logic.
+ if (
+ resolve is not default_resolve
+ and resolve_or_missing is default_resolve_or_missing
+ ):
+ rv._legacy_resolve_mode = True
+ elif (
+ resolve is default_resolve
+ and resolve_or_missing is default_resolve_or_missing
+ ):
+ rv._fast_resolve_mode = True
+
+ return rv
+
+
+def resolve_or_missing(context, key, missing=missing):
+ if key in context.vars:
+ return context.vars[key]
+ if key in context.parent:
+ return context.parent[key]
+ return missing
+
+
+class Context(with_metaclass(ContextMeta)):
+ """The template context holds the variables of a template. It stores the
+ values passed to the template and also the names the template exports.
+ Creating instances is neither supported nor useful as it's created
+ automatically at various stages of the template evaluation and should not
+ be created by hand.
+
+ The context is immutable. Modifications on :attr:`parent` **must not**
+ happen and modifications on :attr:`vars` are allowed from generated
+ template code only. Template filters and global functions marked as
+ :func:`contextfunction`\\s get the active context passed as first argument
+ and are allowed to access the context read-only.
+
+ The template context supports read only dict operations (`get`,
+ `keys`, `values`, `items`, `iterkeys`, `itervalues`, `iteritems`,
+ `__getitem__`, `__contains__`). Additionally there is a :meth:`resolve`
+ method that doesn't fail with a `KeyError` but returns an
+ :class:`Undefined` object for missing variables.
+ """
+
+ # XXX: we want to eventually make this be a deprecation warning and
+ # remove it.
+ _legacy_resolve_mode = False
+ _fast_resolve_mode = False
+
+ def __init__(self, environment, parent, name, blocks):
+ self.parent = parent
+ self.vars = {}
+ self.environment = environment
+ self.eval_ctx = EvalContext(self.environment, name)
+ self.exported_vars = set()
+ self.name = name
+
+ # create the initial mapping of blocks. Whenever template inheritance
+ # takes place the runtime will update this mapping with the new blocks
+ # from the template.
+ self.blocks = dict((k, [v]) for k, v in iteritems(blocks))
+
+ # In case we detect the fast resolve mode we can set up an alias
+ # here that bypasses the legacy code logic.
+ if self._fast_resolve_mode:
+ self.resolve_or_missing = MethodType(resolve_or_missing, self)
+
+ def super(self, name, current):
+ """Render a parent block."""
+ try:
+ blocks = self.blocks[name]
+ index = blocks.index(current) + 1
+ blocks[index]
+ except LookupError:
+ return self.environment.undefined(
+ "there is no parent block called %r." % name, name="super"
+ )
+ return BlockReference(name, self, blocks, index)
+
+ def get(self, key, default=None):
+ """Returns an item from the template context, if it doesn't exist
+ `default` is returned.
+ """
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def resolve(self, key):
+ """Looks up a variable like `__getitem__` or `get` but returns an
+ :class:`Undefined` object with the name of the name looked up.
+ """
+ if self._legacy_resolve_mode:
+ rv = resolve_or_missing(self, key)
+ else:
+ rv = self.resolve_or_missing(key)
+ if rv is missing:
+ return self.environment.undefined(name=key)
+ return rv
+
+ def resolve_or_missing(self, key):
+ """Resolves a variable like :meth:`resolve` but returns the
+ special `missing` value if it cannot be found.
+ """
+ if self._legacy_resolve_mode:
+ rv = self.resolve(key)
+ if isinstance(rv, Undefined):
+ rv = missing
+ return rv
+ return resolve_or_missing(self, key)
+
+ def get_exported(self):
+ """Get a new dict with the exported variables."""
+ return dict((k, self.vars[k]) for k in self.exported_vars)
+
+ def get_all(self):
+ """Return the complete context as dict including the exported
+ variables. For optimizations reasons this might not return an
+ actual copy so be careful with using it.
+ """
+ if not self.vars:
+ return self.parent
+ if not self.parent:
+ return self.vars
+ return dict(self.parent, **self.vars)
+
+ @internalcode
+ def call(__self, __obj, *args, **kwargs): # noqa: B902
+ """Call the callable with the arguments and keyword arguments
+ provided but inject the active context or environment as first
+ argument if the callable is a :func:`contextfunction` or
+ :func:`environmentfunction`.
+ """
+ if __debug__:
+ __traceback_hide__ = True # noqa
+
+ # Allow callable classes to take a context
+ if hasattr(__obj, "__call__"): # noqa: B004
+ fn = __obj.__call__
+ for fn_type in (
+ "contextfunction",
+ "evalcontextfunction",
+ "environmentfunction",
+ ):
+ if hasattr(fn, fn_type):
+ __obj = fn
+ break
+
+ if callable(__obj):
+ if getattr(__obj, "contextfunction", False) is True:
+ args = (__self,) + args
+ elif getattr(__obj, "evalcontextfunction", False) is True:
+ args = (__self.eval_ctx,) + args
+ elif getattr(__obj, "environmentfunction", False) is True:
+ args = (__self.environment,) + args
+ try:
+ return __obj(*args, **kwargs)
+ except StopIteration:
+ return __self.environment.undefined(
+ "value was undefined because "
+ "a callable raised a "
+ "StopIteration exception"
+ )
+
+ def derived(self, locals=None):
+ """Internal helper function to create a derived context. This is
+ used in situations where the system needs a new context in the same
+ template that is independent.
+ """
+ context = new_context(
+ self.environment, self.name, {}, self.get_all(), True, None, locals
+ )
+ context.eval_ctx = self.eval_ctx
+ context.blocks.update((k, list(v)) for k, v in iteritems(self.blocks))
+ return context
+
+ def _all(meth): # noqa: B902
+ def proxy(self):
+ return getattr(self.get_all(), meth)()
+
+ proxy.__doc__ = getattr(dict, meth).__doc__
+ proxy.__name__ = meth
+ return proxy
+
+ keys = _all("keys")
+ values = _all("values")
+ items = _all("items")
+
+ # not available on python 3
+ if PY2:
+ iterkeys = _all("iterkeys")
+ itervalues = _all("itervalues")
+ iteritems = _all("iteritems")
+ del _all
+
+ def __contains__(self, name):
+ return name in self.vars or name in self.parent
+
+ def __getitem__(self, key):
+ """Lookup a variable or raise `KeyError` if the variable is
+ undefined.
+ """
+ item = self.resolve_or_missing(key)
+ if item is missing:
+ raise KeyError(key)
+ return item
+
+ def __repr__(self):
+ return "<%s %s of %r>" % (
+ self.__class__.__name__,
+ repr(self.get_all()),
+ self.name,
+ )
+
+
+abc.Mapping.register(Context)
+
+
+class BlockReference(object):
+ """One block on a template reference."""
+
+ def __init__(self, name, context, stack, depth):
+ self.name = name
+ self._context = context
+ self._stack = stack
+ self._depth = depth
+
+ @property
+ def super(self):
+ """Super the block."""
+ if self._depth + 1 >= len(self._stack):
+ return self._context.environment.undefined(
+ "there is no parent block called %r." % self.name, name="super"
+ )
+ return BlockReference(self.name, self._context, self._stack, self._depth + 1)
+
+ @internalcode
+ def __call__(self):
+ rv = concat(self._stack[self._depth](self._context))
+ if self._context.eval_ctx.autoescape:
+ rv = Markup(rv)
+ return rv
+
+
+@implements_iterator
+class LoopContext:
+ """A wrapper iterable for dynamic ``for`` loops, with information
+ about the loop and iteration.
+ """
+
+ #: Current iteration of the loop, starting at 0.
+ index0 = -1
+
+ _length = None
+ _after = missing
+ _current = missing
+ _before = missing
+ _last_changed_value = missing
+
+ def __init__(self, iterable, undefined, recurse=None, depth0=0):
+ """
+ :param iterable: Iterable to wrap.
+ :param undefined: :class:`Undefined` class to use for next and
+ previous items.
+ :param recurse: The function to render the loop body when the
+ loop is marked recursive.
+ :param depth0: Incremented when looping recursively.
+ """
+ self._iterable = iterable
+ self._iterator = self._to_iterator(iterable)
+ self._undefined = undefined
+ self._recurse = recurse
+ #: How many levels deep a recursive loop currently is, starting at 0.
+ self.depth0 = depth0
+
+ @staticmethod
+ def _to_iterator(iterable):
+ return iter(iterable)
+
+ @property
+ def length(self):
+ """Length of the iterable.
+
+ If the iterable is a generator or otherwise does not have a
+ size, it is eagerly evaluated to get a size.
+ """
+ if self._length is not None:
+ return self._length
+
+ try:
+ self._length = len(self._iterable)
+ except TypeError:
+ iterable = list(self._iterator)
+ self._iterator = self._to_iterator(iterable)
+ self._length = len(iterable) + self.index + (self._after is not missing)
+
+ return self._length
+
+ def __len__(self):
+ return self.length
+
+ @property
+ def depth(self):
+ """How many levels deep a recursive loop currently is, starting at 1."""
+ return self.depth0 + 1
+
+ @property
+ def index(self):
+ """Current iteration of the loop, starting at 1."""
+ return self.index0 + 1
+
+ @property
+ def revindex0(self):
+ """Number of iterations from the end of the loop, ending at 0.
+
+ Requires calculating :attr:`length`.
+ """
+ return self.length - self.index
+
+ @property
+ def revindex(self):
+ """Number of iterations from the end of the loop, ending at 1.
+
+ Requires calculating :attr:`length`.
+ """
+ return self.length - self.index0
+
+ @property
+ def first(self):
+ """Whether this is the first iteration of the loop."""
+ return self.index0 == 0
+
+ def _peek_next(self):
+ """Return the next element in the iterable, or :data:`missing`
+ if the iterable is exhausted. Only peeks one item ahead, caching
+ the result in :attr:`_last` for use in subsequent checks. The
+ cache is reset when :meth:`__next__` is called.
+ """
+ if self._after is not missing:
+ return self._after
+
+ self._after = next(self._iterator, missing)
+ return self._after
+
+ @property
+ def last(self):
+ """Whether this is the last iteration of the loop.
+
+ Causes the iterable to advance early. See
+ :func:`itertools.groupby` for issues this can cause.
+ The :func:`groupby` filter avoids that issue.
+ """
+ return self._peek_next() is missing
+
+ @property
+ def previtem(self):
+ """The item in the previous iteration. Undefined during the
+ first iteration.
+ """
+ if self.first:
+ return self._undefined("there is no previous item")
+
+ return self._before
+
+ @property
+ def nextitem(self):
+ """The item in the next iteration. Undefined during the last
+ iteration.
+
+ Causes the iterable to advance early. See
+ :func:`itertools.groupby` for issues this can cause.
+ The :func:`groupby` filter avoids that issue.
+ """
+ rv = self._peek_next()
+
+ if rv is missing:
+ return self._undefined("there is no next item")
+
+ return rv
+
+ def cycle(self, *args):
+ """Return a value from the given args, cycling through based on
+ the current :attr:`index0`.
+
+ :param args: One or more values to cycle through.
+ """
+ if not args:
+ raise TypeError("no items for cycling given")
+
+ return args[self.index0 % len(args)]
+
+ def changed(self, *value):
+ """Return ``True`` if previously called with a different value
+ (including when called for the first time).
+
+ :param value: One or more values to compare to the last call.
+ """
+ if self._last_changed_value != value:
+ self._last_changed_value = value
+ return True
+
+ return False
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ if self._after is not missing:
+ rv = self._after
+ self._after = missing
+ else:
+ rv = next(self._iterator)
+
+ self.index0 += 1
+ self._before = self._current
+ self._current = rv
+ return rv, self
+
+ @internalcode
+ def __call__(self, iterable):
+ """When iterating over nested data, render the body of the loop
+ recursively with the given inner iterable data.
+
+ The loop must have the ``recursive`` marker for this to work.
+ """
+ if self._recurse is None:
+ raise TypeError(
+ "The loop must have the 'recursive' marker to be called recursively."
+ )
+
+ return self._recurse(iterable, self._recurse, depth=self.depth)
+
+ def __repr__(self):
+ return "<%s %d/%d>" % (self.__class__.__name__, self.index, self.length)
+
+
+class Macro(object):
+ """Wraps a macro function."""
+
+ def __init__(
+ self,
+ environment,
+ func,
+ name,
+ arguments,
+ catch_kwargs,
+ catch_varargs,
+ caller,
+ default_autoescape=None,
+ ):
+ self._environment = environment
+ self._func = func
+ self._argument_count = len(arguments)
+ self.name = name
+ self.arguments = arguments
+ self.catch_kwargs = catch_kwargs
+ self.catch_varargs = catch_varargs
+ self.caller = caller
+ self.explicit_caller = "caller" in arguments
+ if default_autoescape is None:
+ default_autoescape = environment.autoescape
+ self._default_autoescape = default_autoescape
+
+ @internalcode
+ @evalcontextfunction
+ def __call__(self, *args, **kwargs):
+ # This requires a bit of explanation, In the past we used to
+ # decide largely based on compile-time information if a macro is
+ # safe or unsafe. While there was a volatile mode it was largely
+ # unused for deciding on escaping. This turns out to be
+ # problematic for macros because whether a macro is safe depends not
+ # on the escape mode when it was defined, but rather when it was used.
+ #
+ # Because however we export macros from the module system and
+ # there are historic callers that do not pass an eval context (and
+ # will continue to not pass one), we need to perform an instance
+ # check here.
+ #
+ # This is considered safe because an eval context is not a valid
+ # argument to callables otherwise anyway. Worst case here is
+ # that if no eval context is passed we fall back to the compile
+ # time autoescape flag.
+ if args and isinstance(args[0], EvalContext):
+ autoescape = args[0].autoescape
+ args = args[1:]
+ else:
+ autoescape = self._default_autoescape
+
+ # try to consume the positional arguments
+ arguments = list(args[: self._argument_count])
+ off = len(arguments)
+
+ # For information why this is necessary refer to the handling
+ # of caller in the `macro_body` handler in the compiler.
+ found_caller = False
+
+ # if the number of arguments consumed is not the number of
+ # arguments expected we start filling in keyword arguments
+ # and defaults.
+ if off != self._argument_count:
+ for name in self.arguments[len(arguments) :]:
+ try:
+ value = kwargs.pop(name)
+ except KeyError:
+ value = missing
+ if name == "caller":
+ found_caller = True
+ arguments.append(value)
+ else:
+ found_caller = self.explicit_caller
+
+ # it's important that the order of these arguments does not change
+ # if not also changed in the compiler's `function_scoping` method.
+ # the order is caller, keyword arguments, positional arguments!
+ if self.caller and not found_caller:
+ caller = kwargs.pop("caller", None)
+ if caller is None:
+ caller = self._environment.undefined("No caller defined", name="caller")
+ arguments.append(caller)
+
+ if self.catch_kwargs:
+ arguments.append(kwargs)
+ elif kwargs:
+ if "caller" in kwargs:
+ raise TypeError(
+ "macro %r was invoked with two values for "
+ "the special caller argument. This is "
+ "most likely a bug." % self.name
+ )
+ raise TypeError(
+ "macro %r takes no keyword argument %r"
+ % (self.name, next(iter(kwargs)))
+ )
+ if self.catch_varargs:
+ arguments.append(args[self._argument_count :])
+ elif len(args) > self._argument_count:
+ raise TypeError(
+ "macro %r takes not more than %d argument(s)"
+ % (self.name, len(self.arguments))
+ )
+
+ return self._invoke(arguments, autoescape)
+
+ def _invoke(self, arguments, autoescape):
+ """This method is being swapped out by the async implementation."""
+ rv = self._func(*arguments)
+ if autoescape:
+ rv = Markup(rv)
+ return rv
+
+ def __repr__(self):
+ return "<%s %s>" % (
+ self.__class__.__name__,
+ self.name is None and "anonymous" or repr(self.name),
+ )
+
+
+@implements_to_string
+class Undefined(object):
+ """The default undefined type. This undefined type can be printed and
+ iterated over, but every other access will raise an :exc:`UndefinedError`:
+
+ >>> foo = Undefined(name='foo')
+ >>> str(foo)
+ ''
+ >>> not foo
+ True
+ >>> foo + 42
+ Traceback (most recent call last):
+ ...
+ jinja2.exceptions.UndefinedError: 'foo' is undefined
+ """
+
+ __slots__ = (
+ "_undefined_hint",
+ "_undefined_obj",
+ "_undefined_name",
+ "_undefined_exception",
+ )
+
+ def __init__(self, hint=None, obj=missing, name=None, exc=UndefinedError):
+ self._undefined_hint = hint
+ self._undefined_obj = obj
+ self._undefined_name = name
+ self._undefined_exception = exc
+
+ @property
+ def _undefined_message(self):
+ """Build a message about the undefined value based on how it was
+ accessed.
+ """
+ if self._undefined_hint:
+ return self._undefined_hint
+
+ if self._undefined_obj is missing:
+ return "%r is undefined" % self._undefined_name
+
+ if not isinstance(self._undefined_name, string_types):
+ return "%s has no element %r" % (
+ object_type_repr(self._undefined_obj),
+ self._undefined_name,
+ )
+
+ return "%r has no attribute %r" % (
+ object_type_repr(self._undefined_obj),
+ self._undefined_name,
+ )
+
+ @internalcode
+ def _fail_with_undefined_error(self, *args, **kwargs):
+ """Raise an :exc:`UndefinedError` when operations are performed
+ on the undefined value.
+ """
+ raise self._undefined_exception(self._undefined_message)
+
+ @internalcode
+ def __getattr__(self, name):
+ if name[:2] == "__":
+ raise AttributeError(name)
+ return self._fail_with_undefined_error()
+
+ __add__ = (
+ __radd__
+ ) = (
+ __mul__
+ ) = (
+ __rmul__
+ ) = (
+ __div__
+ ) = (
+ __rdiv__
+ ) = (
+ __truediv__
+ ) = (
+ __rtruediv__
+ ) = (
+ __floordiv__
+ ) = (
+ __rfloordiv__
+ ) = (
+ __mod__
+ ) = (
+ __rmod__
+ ) = (
+ __pos__
+ ) = (
+ __neg__
+ ) = (
+ __call__
+ ) = (
+ __getitem__
+ ) = (
+ __lt__
+ ) = (
+ __le__
+ ) = (
+ __gt__
+ ) = (
+ __ge__
+ ) = (
+ __int__
+ ) = (
+ __float__
+ ) = (
+ __complex__
+ ) = __pow__ = __rpow__ = __sub__ = __rsub__ = _fail_with_undefined_error
+
+ def __eq__(self, other):
+ return type(self) is type(other)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __hash__(self):
+ return id(type(self))
+
+ def __str__(self):
+ return u""
+
+ def __len__(self):
+ return 0
+
+ def __iter__(self):
+ if 0:
+ yield None
+
+ def __nonzero__(self):
+ return False
+
+ __bool__ = __nonzero__
+
+ def __repr__(self):
+ return "Undefined"
+
+
+def make_logging_undefined(logger=None, base=None):
+ """Given a logger object this returns a new undefined class that will
+ log certain failures. It will log iterations and printing. If no
+ logger is given a default logger is created.
+
+ Example::
+
+ logger = logging.getLogger(__name__)
+ LoggingUndefined = make_logging_undefined(
+ logger=logger,
+ base=Undefined
+ )
+
+ .. versionadded:: 2.8
+
+ :param logger: the logger to use. If not provided, a default logger
+ is created.
+ :param base: the base class to add logging functionality to. This
+ defaults to :class:`Undefined`.
+ """
+ if logger is None:
+ import logging
+
+ logger = logging.getLogger(__name__)
+ logger.addHandler(logging.StreamHandler(sys.stderr))
+ if base is None:
+ base = Undefined
+
+ def _log_message(undef):
+ if undef._undefined_hint is None:
+ if undef._undefined_obj is missing:
+ hint = "%s is undefined" % undef._undefined_name
+ elif not isinstance(undef._undefined_name, string_types):
+ hint = "%s has no element %s" % (
+ object_type_repr(undef._undefined_obj),
+ undef._undefined_name,
+ )
+ else:
+ hint = "%s has no attribute %s" % (
+ object_type_repr(undef._undefined_obj),
+ undef._undefined_name,
+ )
+ else:
+ hint = undef._undefined_hint
+ logger.warning("Template variable warning: %s", hint)
+
+ class LoggingUndefined(base):
+ def _fail_with_undefined_error(self, *args, **kwargs):
+ try:
+ return base._fail_with_undefined_error(self, *args, **kwargs)
+ except self._undefined_exception as e:
+ logger.error("Template variable error: %s", str(e))
+ raise e
+
+ def __str__(self):
+ rv = base.__str__(self)
+ _log_message(self)
+ return rv
+
+ def __iter__(self):
+ rv = base.__iter__(self)
+ _log_message(self)
+ return rv
+
+ if PY2:
+
+ def __nonzero__(self):
+ rv = base.__nonzero__(self)
+ _log_message(self)
+ return rv
+
+ def __unicode__(self):
+ rv = base.__unicode__(self)
+ _log_message(self)
+ return rv
+
+ else:
+
+ def __bool__(self):
+ rv = base.__bool__(self)
+ _log_message(self)
+ return rv
+
+ return LoggingUndefined
+
+
+# No @implements_to_string decorator here because __str__
+# is not overwritten from Undefined in this class.
+# This would cause a recursion error in Python 2.
+class ChainableUndefined(Undefined):
+ """An undefined that is chainable, where both ``__getattr__`` and
+ ``__getitem__`` return itself rather than raising an
+ :exc:`UndefinedError`.
+
+ >>> foo = ChainableUndefined(name='foo')
+ >>> str(foo.bar['baz'])
+ ''
+ >>> foo.bar['baz'] + 42
+ Traceback (most recent call last):
+ ...
+ jinja2.exceptions.UndefinedError: 'foo' is undefined
+
+ .. versionadded:: 2.11.0
+ """
+
+ __slots__ = ()
+
+ def __html__(self):
+ return self.__str__()
+
+ def __getattr__(self, _):
+ return self
+
+ __getitem__ = __getattr__
+
+
+@implements_to_string
+class DebugUndefined(Undefined):
+ """An undefined that returns the debug info when printed.
+
+ >>> foo = DebugUndefined(name='foo')
+ >>> str(foo)
+ '{{ foo }}'
+ >>> not foo
+ True
+ >>> foo + 42
+ Traceback (most recent call last):
+ ...
+ jinja2.exceptions.UndefinedError: 'foo' is undefined
+ """
+
+ __slots__ = ()
+
+ def __str__(self):
+ if self._undefined_hint is None:
+ if self._undefined_obj is missing:
+ return u"{{ %s }}" % self._undefined_name
+ return "{{ no such element: %s[%r] }}" % (
+ object_type_repr(self._undefined_obj),
+ self._undefined_name,
+ )
+ return u"{{ undefined value printed: %s }}" % self._undefined_hint
+
+
+@implements_to_string
+class StrictUndefined(Undefined):
+ """An undefined that barks on print and iteration as well as boolean
+ tests and all kinds of comparisons. In other words: you can do nothing
+ with it except checking if it's defined using the `defined` test.
+
+ >>> foo = StrictUndefined(name='foo')
+ >>> str(foo)
+ Traceback (most recent call last):
+ ...
+ jinja2.exceptions.UndefinedError: 'foo' is undefined
+ >>> not foo
+ Traceback (most recent call last):
+ ...
+ jinja2.exceptions.UndefinedError: 'foo' is undefined
+ >>> foo + 42
+ Traceback (most recent call last):
+ ...
+ jinja2.exceptions.UndefinedError: 'foo' is undefined
+ """
+
+ __slots__ = ()
+ __iter__ = (
+ __str__
+ ) = (
+ __len__
+ ) = (
+ __nonzero__
+ ) = __eq__ = __ne__ = __bool__ = __hash__ = Undefined._fail_with_undefined_error
+
+
+# remove remaining slots attributes, after the metaclass did the magic they
+# are unneeded and irritating as they contain wrong data for the subclasses.
+del (
+ Undefined.__slots__,
+ ChainableUndefined.__slots__,
+ DebugUndefined.__slots__,
+ StrictUndefined.__slots__,
+)
diff --git a/third_party/python/Jinja2/src/jinja2/sandbox.py b/third_party/python/Jinja2/src/jinja2/sandbox.py
new file mode 100644
index 0000000000..cfd7993aee
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/sandbox.py
@@ -0,0 +1,510 @@
+# -*- coding: utf-8 -*-
+"""A sandbox layer that ensures unsafe operations cannot be performed.
+Useful when the template itself comes from an untrusted source.
+"""
+import operator
+import types
+import warnings
+from collections import deque
+from string import Formatter
+
+from markupsafe import EscapeFormatter
+from markupsafe import Markup
+
+from ._compat import abc
+from ._compat import PY2
+from ._compat import range_type
+from ._compat import string_types
+from .environment import Environment
+from .exceptions import SecurityError
+
+#: maximum number of items a range may produce
+MAX_RANGE = 100000
+
+#: attributes of function objects that are considered unsafe.
+if PY2:
+ UNSAFE_FUNCTION_ATTRIBUTES = {
+ "func_closure",
+ "func_code",
+ "func_dict",
+ "func_defaults",
+ "func_globals",
+ }
+else:
+ # On versions > python 2 the special attributes on functions are gone,
+ # but they remain on methods and generators for whatever reason.
+ UNSAFE_FUNCTION_ATTRIBUTES = set()
+
+#: unsafe method attributes. function attributes are unsafe for methods too
+UNSAFE_METHOD_ATTRIBUTES = {"im_class", "im_func", "im_self"}
+
+#: unsafe generator attributes.
+UNSAFE_GENERATOR_ATTRIBUTES = {"gi_frame", "gi_code"}
+
+#: unsafe attributes on coroutines
+UNSAFE_COROUTINE_ATTRIBUTES = {"cr_frame", "cr_code"}
+
+#: unsafe attributes on async generators
+UNSAFE_ASYNC_GENERATOR_ATTRIBUTES = {"ag_code", "ag_frame"}
+
+# make sure we don't warn in python 2.6 about stuff we don't care about
+warnings.filterwarnings(
+ "ignore", "the sets module", DeprecationWarning, module=__name__
+)
+
+_mutable_set_types = (set,)
+_mutable_mapping_types = (dict,)
+_mutable_sequence_types = (list,)
+
+# on python 2.x we can register the user collection types
+try:
+ from UserDict import UserDict, DictMixin
+ from UserList import UserList
+
+ _mutable_mapping_types += (UserDict, DictMixin)
+ _mutable_set_types += (UserList,)
+except ImportError:
+ pass
+
+# if sets is still available, register the mutable set from there as well
+try:
+ from sets import Set
+
+ _mutable_set_types += (Set,)
+except ImportError:
+ pass
+
+#: register Python 2.6 abstract base classes
+_mutable_set_types += (abc.MutableSet,)
+_mutable_mapping_types += (abc.MutableMapping,)
+_mutable_sequence_types += (abc.MutableSequence,)
+
+_mutable_spec = (
+ (
+ _mutable_set_types,
+ frozenset(
+ [
+ "add",
+ "clear",
+ "difference_update",
+ "discard",
+ "pop",
+ "remove",
+ "symmetric_difference_update",
+ "update",
+ ]
+ ),
+ ),
+ (
+ _mutable_mapping_types,
+ frozenset(["clear", "pop", "popitem", "setdefault", "update"]),
+ ),
+ (
+ _mutable_sequence_types,
+ frozenset(["append", "reverse", "insert", "sort", "extend", "remove"]),
+ ),
+ (
+ deque,
+ frozenset(
+ [
+ "append",
+ "appendleft",
+ "clear",
+ "extend",
+ "extendleft",
+ "pop",
+ "popleft",
+ "remove",
+ "rotate",
+ ]
+ ),
+ ),
+)
+
+
+class _MagicFormatMapping(abc.Mapping):
+ """This class implements a dummy wrapper to fix a bug in the Python
+ standard library for string formatting.
+
+ See https://bugs.python.org/issue13598 for information about why
+ this is necessary.
+ """
+
+ def __init__(self, args, kwargs):
+ self._args = args
+ self._kwargs = kwargs
+ self._last_index = 0
+
+ def __getitem__(self, key):
+ if key == "":
+ idx = self._last_index
+ self._last_index += 1
+ try:
+ return self._args[idx]
+ except LookupError:
+ pass
+ key = str(idx)
+ return self._kwargs[key]
+
+ def __iter__(self):
+ return iter(self._kwargs)
+
+ def __len__(self):
+ return len(self._kwargs)
+
+
+def inspect_format_method(callable):
+ if not isinstance(
+ callable, (types.MethodType, types.BuiltinMethodType)
+ ) or callable.__name__ not in ("format", "format_map"):
+ return None
+ obj = callable.__self__
+ if isinstance(obj, string_types):
+ return obj
+
+
+def safe_range(*args):
+ """A range that can't generate ranges with a length of more than
+ MAX_RANGE items.
+ """
+ rng = range_type(*args)
+
+ if len(rng) > MAX_RANGE:
+ raise OverflowError(
+ "Range too big. The sandbox blocks ranges larger than"
+ " MAX_RANGE (%d)." % MAX_RANGE
+ )
+
+ return rng
+
+
+def unsafe(f):
+ """Marks a function or method as unsafe.
+
+ ::
+
+ @unsafe
+ def delete(self):
+ pass
+ """
+ f.unsafe_callable = True
+ return f
+
+
+def is_internal_attribute(obj, attr):
+ """Test if the attribute given is an internal python attribute. For
+ example this function returns `True` for the `func_code` attribute of
+ python objects. This is useful if the environment method
+ :meth:`~SandboxedEnvironment.is_safe_attribute` is overridden.
+
+ >>> from jinja2.sandbox import is_internal_attribute
+ >>> is_internal_attribute(str, "mro")
+ True
+ >>> is_internal_attribute(str, "upper")
+ False
+ """
+ if isinstance(obj, types.FunctionType):
+ if attr in UNSAFE_FUNCTION_ATTRIBUTES:
+ return True
+ elif isinstance(obj, types.MethodType):
+ if attr in UNSAFE_FUNCTION_ATTRIBUTES or attr in UNSAFE_METHOD_ATTRIBUTES:
+ return True
+ elif isinstance(obj, type):
+ if attr == "mro":
+ return True
+ elif isinstance(obj, (types.CodeType, types.TracebackType, types.FrameType)):
+ return True
+ elif isinstance(obj, types.GeneratorType):
+ if attr in UNSAFE_GENERATOR_ATTRIBUTES:
+ return True
+ elif hasattr(types, "CoroutineType") and isinstance(obj, types.CoroutineType):
+ if attr in UNSAFE_COROUTINE_ATTRIBUTES:
+ return True
+ elif hasattr(types, "AsyncGeneratorType") and isinstance(
+ obj, types.AsyncGeneratorType
+ ):
+ if attr in UNSAFE_ASYNC_GENERATOR_ATTRIBUTES:
+ return True
+ return attr.startswith("__")
+
+
+def modifies_known_mutable(obj, attr):
+ """This function checks if an attribute on a builtin mutable object
+ (list, dict, set or deque) would modify it if called. It also supports
+ the "user"-versions of the objects (`sets.Set`, `UserDict.*` etc.) and
+ with Python 2.6 onwards the abstract base classes `MutableSet`,
+ `MutableMapping`, and `MutableSequence`.
+
+ >>> modifies_known_mutable({}, "clear")
+ True
+ >>> modifies_known_mutable({}, "keys")
+ False
+ >>> modifies_known_mutable([], "append")
+ True
+ >>> modifies_known_mutable([], "index")
+ False
+
+ If called with an unsupported object (such as unicode) `False` is
+ returned.
+
+ >>> modifies_known_mutable("foo", "upper")
+ False
+ """
+ for typespec, unsafe in _mutable_spec:
+ if isinstance(obj, typespec):
+ return attr in unsafe
+ return False
+
+
+class SandboxedEnvironment(Environment):
+ """The sandboxed environment. It works like the regular environment but
+ tells the compiler to generate sandboxed code. Additionally subclasses of
+ this environment may override the methods that tell the runtime what
+ attributes or functions are safe to access.
+
+ If the template tries to access insecure code a :exc:`SecurityError` is
+ raised. However also other exceptions may occur during the rendering so
+ the caller has to ensure that all exceptions are caught.
+ """
+
+ sandboxed = True
+
+ #: default callback table for the binary operators. A copy of this is
+ #: available on each instance of a sandboxed environment as
+ #: :attr:`binop_table`
+ default_binop_table = {
+ "+": operator.add,
+ "-": operator.sub,
+ "*": operator.mul,
+ "/": operator.truediv,
+ "//": operator.floordiv,
+ "**": operator.pow,
+ "%": operator.mod,
+ }
+
+ #: default callback table for the unary operators. A copy of this is
+ #: available on each instance of a sandboxed environment as
+ #: :attr:`unop_table`
+ default_unop_table = {"+": operator.pos, "-": operator.neg}
+
+ #: a set of binary operators that should be intercepted. Each operator
+ #: that is added to this set (empty by default) is delegated to the
+ #: :meth:`call_binop` method that will perform the operator. The default
+ #: operator callback is specified by :attr:`binop_table`.
+ #:
+ #: The following binary operators are interceptable:
+ #: ``//``, ``%``, ``+``, ``*``, ``-``, ``/``, and ``**``
+ #:
+ #: The default operation form the operator table corresponds to the
+ #: builtin function. Intercepted calls are always slower than the native
+ #: operator call, so make sure only to intercept the ones you are
+ #: interested in.
+ #:
+ #: .. versionadded:: 2.6
+ intercepted_binops = frozenset()
+
+ #: a set of unary operators that should be intercepted. Each operator
+ #: that is added to this set (empty by default) is delegated to the
+ #: :meth:`call_unop` method that will perform the operator. The default
+ #: operator callback is specified by :attr:`unop_table`.
+ #:
+ #: The following unary operators are interceptable: ``+``, ``-``
+ #:
+ #: The default operation form the operator table corresponds to the
+ #: builtin function. Intercepted calls are always slower than the native
+ #: operator call, so make sure only to intercept the ones you are
+ #: interested in.
+ #:
+ #: .. versionadded:: 2.6
+ intercepted_unops = frozenset()
+
+ def intercept_unop(self, operator):
+ """Called during template compilation with the name of a unary
+ operator to check if it should be intercepted at runtime. If this
+ method returns `True`, :meth:`call_unop` is executed for this unary
+ operator. The default implementation of :meth:`call_unop` will use
+ the :attr:`unop_table` dictionary to perform the operator with the
+ same logic as the builtin one.
+
+ The following unary operators are interceptable: ``+`` and ``-``
+
+ Intercepted calls are always slower than the native operator call,
+ so make sure only to intercept the ones you are interested in.
+
+ .. versionadded:: 2.6
+ """
+ return False
+
+ def __init__(self, *args, **kwargs):
+ Environment.__init__(self, *args, **kwargs)
+ self.globals["range"] = safe_range
+ self.binop_table = self.default_binop_table.copy()
+ self.unop_table = self.default_unop_table.copy()
+
+ def is_safe_attribute(self, obj, attr, value):
+ """The sandboxed environment will call this method to check if the
+ attribute of an object is safe to access. Per default all attributes
+ starting with an underscore are considered private as well as the
+ special attributes of internal python objects as returned by the
+ :func:`is_internal_attribute` function.
+ """
+ return not (attr.startswith("_") or is_internal_attribute(obj, attr))
+
+ def is_safe_callable(self, obj):
+ """Check if an object is safely callable. Per default a function is
+ considered safe unless the `unsafe_callable` attribute exists and is
+ True. Override this method to alter the behavior, but this won't
+ affect the `unsafe` decorator from this module.
+ """
+ return not (
+ getattr(obj, "unsafe_callable", False) or getattr(obj, "alters_data", False)
+ )
+
+ def call_binop(self, context, operator, left, right):
+ """For intercepted binary operator calls (:meth:`intercepted_binops`)
+ this function is executed instead of the builtin operator. This can
+ be used to fine tune the behavior of certain operators.
+
+ .. versionadded:: 2.6
+ """
+ return self.binop_table[operator](left, right)
+
+ def call_unop(self, context, operator, arg):
+ """For intercepted unary operator calls (:meth:`intercepted_unops`)
+ this function is executed instead of the builtin operator. This can
+ be used to fine tune the behavior of certain operators.
+
+ .. versionadded:: 2.6
+ """
+ return self.unop_table[operator](arg)
+
+ def getitem(self, obj, argument):
+ """Subscribe an object from sandboxed code."""
+ try:
+ return obj[argument]
+ except (TypeError, LookupError):
+ if isinstance(argument, string_types):
+ try:
+ attr = str(argument)
+ except Exception:
+ pass
+ else:
+ try:
+ value = getattr(obj, attr)
+ except AttributeError:
+ pass
+ else:
+ if self.is_safe_attribute(obj, argument, value):
+ return value
+ return self.unsafe_undefined(obj, argument)
+ return self.undefined(obj=obj, name=argument)
+
+ def getattr(self, obj, attribute):
+ """Subscribe an object from sandboxed code and prefer the
+ attribute. The attribute passed *must* be a bytestring.
+ """
+ try:
+ value = getattr(obj, attribute)
+ except AttributeError:
+ try:
+ return obj[attribute]
+ except (TypeError, LookupError):
+ pass
+ else:
+ if self.is_safe_attribute(obj, attribute, value):
+ return value
+ return self.unsafe_undefined(obj, attribute)
+ return self.undefined(obj=obj, name=attribute)
+
+ def unsafe_undefined(self, obj, attribute):
+ """Return an undefined object for unsafe attributes."""
+ return self.undefined(
+ "access to attribute %r of %r "
+ "object is unsafe." % (attribute, obj.__class__.__name__),
+ name=attribute,
+ obj=obj,
+ exc=SecurityError,
+ )
+
+ def format_string(self, s, args, kwargs, format_func=None):
+ """If a format call is detected, then this is routed through this
+ method so that our safety sandbox can be used for it.
+ """
+ if isinstance(s, Markup):
+ formatter = SandboxedEscapeFormatter(self, s.escape)
+ else:
+ formatter = SandboxedFormatter(self)
+
+ if format_func is not None and format_func.__name__ == "format_map":
+ if len(args) != 1 or kwargs:
+ raise TypeError(
+ "format_map() takes exactly one argument %d given"
+ % (len(args) + (kwargs is not None))
+ )
+
+ kwargs = args[0]
+ args = None
+
+ kwargs = _MagicFormatMapping(args, kwargs)
+ rv = formatter.vformat(s, args, kwargs)
+ return type(s)(rv)
+
+ def call(__self, __context, __obj, *args, **kwargs): # noqa: B902
+ """Call an object from sandboxed code."""
+ fmt = inspect_format_method(__obj)
+ if fmt is not None:
+ return __self.format_string(fmt, args, kwargs, __obj)
+
+ # the double prefixes are to avoid double keyword argument
+ # errors when proxying the call.
+ if not __self.is_safe_callable(__obj):
+ raise SecurityError("%r is not safely callable" % (__obj,))
+ return __context.call(__obj, *args, **kwargs)
+
+
+class ImmutableSandboxedEnvironment(SandboxedEnvironment):
+ """Works exactly like the regular `SandboxedEnvironment` but does not
+ permit modifications on the builtin mutable objects `list`, `set`, and
+ `dict` by using the :func:`modifies_known_mutable` function.
+ """
+
+ def is_safe_attribute(self, obj, attr, value):
+ if not SandboxedEnvironment.is_safe_attribute(self, obj, attr, value):
+ return False
+ return not modifies_known_mutable(obj, attr)
+
+
+# This really is not a public API apparently.
+try:
+ from _string import formatter_field_name_split
+except ImportError:
+
+ def formatter_field_name_split(field_name):
+ return field_name._formatter_field_name_split()
+
+
+class SandboxedFormatterMixin(object):
+ def __init__(self, env):
+ self._env = env
+
+ def get_field(self, field_name, args, kwargs):
+ first, rest = formatter_field_name_split(field_name)
+ obj = self.get_value(first, args, kwargs)
+ for is_attr, i in rest:
+ if is_attr:
+ obj = self._env.getattr(obj, i)
+ else:
+ obj = self._env.getitem(obj, i)
+ return obj, first
+
+
+class SandboxedFormatter(SandboxedFormatterMixin, Formatter):
+ def __init__(self, env):
+ SandboxedFormatterMixin.__init__(self, env)
+ Formatter.__init__(self)
+
+
+class SandboxedEscapeFormatter(SandboxedFormatterMixin, EscapeFormatter):
+ def __init__(self, env, escape):
+ SandboxedFormatterMixin.__init__(self, env)
+ EscapeFormatter.__init__(self, escape)
diff --git a/third_party/python/Jinja2/src/jinja2/tests.py b/third_party/python/Jinja2/src/jinja2/tests.py
new file mode 100644
index 0000000000..fabd4ce51b
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/tests.py
@@ -0,0 +1,215 @@
+# -*- coding: utf-8 -*-
+"""Built-in template tests used with the ``is`` operator."""
+import decimal
+import operator
+import re
+
+from ._compat import abc
+from ._compat import integer_types
+from ._compat import string_types
+from ._compat import text_type
+from .runtime import Undefined
+
+number_re = re.compile(r"^-?\d+(\.\d+)?$")
+regex_type = type(number_re)
+test_callable = callable
+
+
+def test_odd(value):
+ """Return true if the variable is odd."""
+ return value % 2 == 1
+
+
+def test_even(value):
+ """Return true if the variable is even."""
+ return value % 2 == 0
+
+
+def test_divisibleby(value, num):
+ """Check if a variable is divisible by a number."""
+ return value % num == 0
+
+
+def test_defined(value):
+ """Return true if the variable is defined:
+
+ .. sourcecode:: jinja
+
+ {% if variable is defined %}
+ value of variable: {{ variable }}
+ {% else %}
+ variable is not defined
+ {% endif %}
+
+ See the :func:`default` filter for a simple way to set undefined
+ variables.
+ """
+ return not isinstance(value, Undefined)
+
+
+def test_undefined(value):
+ """Like :func:`defined` but the other way round."""
+ return isinstance(value, Undefined)
+
+
+def test_none(value):
+ """Return true if the variable is none."""
+ return value is None
+
+
+def test_boolean(value):
+ """Return true if the object is a boolean value.
+
+ .. versionadded:: 2.11
+ """
+ return value is True or value is False
+
+
+def test_false(value):
+ """Return true if the object is False.
+
+ .. versionadded:: 2.11
+ """
+ return value is False
+
+
+def test_true(value):
+ """Return true if the object is True.
+
+ .. versionadded:: 2.11
+ """
+ return value is True
+
+
+# NOTE: The existing 'number' test matches booleans and floats
+def test_integer(value):
+ """Return true if the object is an integer.
+
+ .. versionadded:: 2.11
+ """
+ return isinstance(value, integer_types) and value is not True and value is not False
+
+
+# NOTE: The existing 'number' test matches booleans and integers
+def test_float(value):
+ """Return true if the object is a float.
+
+ .. versionadded:: 2.11
+ """
+ return isinstance(value, float)
+
+
+def test_lower(value):
+ """Return true if the variable is lowercased."""
+ return text_type(value).islower()
+
+
+def test_upper(value):
+ """Return true if the variable is uppercased."""
+ return text_type(value).isupper()
+
+
+def test_string(value):
+ """Return true if the object is a string."""
+ return isinstance(value, string_types)
+
+
+def test_mapping(value):
+ """Return true if the object is a mapping (dict etc.).
+
+ .. versionadded:: 2.6
+ """
+ return isinstance(value, abc.Mapping)
+
+
+def test_number(value):
+ """Return true if the variable is a number."""
+ return isinstance(value, integer_types + (float, complex, decimal.Decimal))
+
+
+def test_sequence(value):
+ """Return true if the variable is a sequence. Sequences are variables
+ that are iterable.
+ """
+ try:
+ len(value)
+ value.__getitem__
+ except Exception:
+ return False
+ return True
+
+
+def test_sameas(value, other):
+ """Check if an object points to the same memory address than another
+ object:
+
+ .. sourcecode:: jinja
+
+ {% if foo.attribute is sameas false %}
+ the foo attribute really is the `False` singleton
+ {% endif %}
+ """
+ return value is other
+
+
+def test_iterable(value):
+ """Check if it's possible to iterate over an object."""
+ try:
+ iter(value)
+ except TypeError:
+ return False
+ return True
+
+
+def test_escaped(value):
+ """Check if the value is escaped."""
+ return hasattr(value, "__html__")
+
+
+def test_in(value, seq):
+ """Check if value is in seq.
+
+ .. versionadded:: 2.10
+ """
+ return value in seq
+
+
+TESTS = {
+ "odd": test_odd,
+ "even": test_even,
+ "divisibleby": test_divisibleby,
+ "defined": test_defined,
+ "undefined": test_undefined,
+ "none": test_none,
+ "boolean": test_boolean,
+ "false": test_false,
+ "true": test_true,
+ "integer": test_integer,
+ "float": test_float,
+ "lower": test_lower,
+ "upper": test_upper,
+ "string": test_string,
+ "mapping": test_mapping,
+ "number": test_number,
+ "sequence": test_sequence,
+ "iterable": test_iterable,
+ "callable": test_callable,
+ "sameas": test_sameas,
+ "escaped": test_escaped,
+ "in": test_in,
+ "==": operator.eq,
+ "eq": operator.eq,
+ "equalto": operator.eq,
+ "!=": operator.ne,
+ "ne": operator.ne,
+ ">": operator.gt,
+ "gt": operator.gt,
+ "greaterthan": operator.gt,
+ "ge": operator.ge,
+ ">=": operator.ge,
+ "<": operator.lt,
+ "lt": operator.lt,
+ "lessthan": operator.lt,
+ "<=": operator.le,
+ "le": operator.le,
+}
diff --git a/third_party/python/Jinja2/src/jinja2/utils.py b/third_party/python/Jinja2/src/jinja2/utils.py
new file mode 100644
index 0000000000..b422ba9686
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/utils.py
@@ -0,0 +1,732 @@
+# -*- coding: utf-8 -*-
+import json
+import os
+import re
+import warnings
+from collections import deque
+from random import choice
+from random import randrange
+from threading import Lock
+
+from markupsafe import escape
+from markupsafe import Markup
+
+from ._compat import abc
+from ._compat import string_types
+from ._compat import text_type
+from ._compat import url_quote
+
+_word_split_re = re.compile(r"(\s+)")
+_punctuation_re = re.compile(
+ "^(?P<lead>(?:%s)*)(?P<middle>.*?)(?P<trail>(?:%s)*)$"
+ % (
+ "|".join(map(re.escape, ("(", "<", "&lt;"))),
+ "|".join(map(re.escape, (".", ",", ")", ">", "\n", "&gt;"))),
+ )
+)
+_simple_email_re = re.compile(r"^\S+@[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+$")
+_striptags_re = re.compile(r"(<!--.*?-->|<[^>]*>)")
+_entity_re = re.compile(r"&([^;]+);")
+_letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
+_digits = "0123456789"
+
+# special singleton representing missing values for the runtime
+missing = type("MissingType", (), {"__repr__": lambda x: "missing"})()
+
+# internal code
+internal_code = set()
+
+concat = u"".join
+
+_slash_escape = "\\/" not in json.dumps("/")
+
+
+def contextfunction(f):
+ """This decorator can be used to mark a function or method context callable.
+ A context callable is passed the active :class:`Context` as first argument when
+ called from the template. This is useful if a function wants to get access
+ to the context or functions provided on the context object. For example
+ a function that returns a sorted list of template variables the current
+ template exports could look like this::
+
+ @contextfunction
+ def get_exported_names(context):
+ return sorted(context.exported_vars)
+ """
+ f.contextfunction = True
+ return f
+
+
+def evalcontextfunction(f):
+ """This decorator can be used to mark a function or method as an eval
+ context callable. This is similar to the :func:`contextfunction`
+ but instead of passing the context, an evaluation context object is
+ passed. For more information about the eval context, see
+ :ref:`eval-context`.
+
+ .. versionadded:: 2.4
+ """
+ f.evalcontextfunction = True
+ return f
+
+
+def environmentfunction(f):
+ """This decorator can be used to mark a function or method as environment
+ callable. This decorator works exactly like the :func:`contextfunction`
+ decorator just that the first argument is the active :class:`Environment`
+ and not context.
+ """
+ f.environmentfunction = True
+ return f
+
+
+def internalcode(f):
+ """Marks the function as internally used"""
+ internal_code.add(f.__code__)
+ return f
+
+
+def is_undefined(obj):
+ """Check if the object passed is undefined. This does nothing more than
+ performing an instance check against :class:`Undefined` but looks nicer.
+ This can be used for custom filters or tests that want to react to
+ undefined variables. For example a custom default filter can look like
+ this::
+
+ def default(var, default=''):
+ if is_undefined(var):
+ return default
+ return var
+ """
+ from .runtime import Undefined
+
+ return isinstance(obj, Undefined)
+
+
+def consume(iterable):
+ """Consumes an iterable without doing anything with it."""
+ for _ in iterable:
+ pass
+
+
+def clear_caches():
+ """Jinja keeps internal caches for environments and lexers. These are
+ used so that Jinja doesn't have to recreate environments and lexers all
+ the time. Normally you don't have to care about that but if you are
+ measuring memory consumption you may want to clean the caches.
+ """
+ from .environment import _spontaneous_environments
+ from .lexer import _lexer_cache
+
+ _spontaneous_environments.clear()
+ _lexer_cache.clear()
+
+
+def import_string(import_name, silent=False):
+ """Imports an object based on a string. This is useful if you want to
+ use import paths as endpoints or something similar. An import path can
+ be specified either in dotted notation (``xml.sax.saxutils.escape``)
+ or with a colon as object delimiter (``xml.sax.saxutils:escape``).
+
+ If the `silent` is True the return value will be `None` if the import
+ fails.
+
+ :return: imported object
+ """
+ try:
+ if ":" in import_name:
+ module, obj = import_name.split(":", 1)
+ elif "." in import_name:
+ module, _, obj = import_name.rpartition(".")
+ else:
+ return __import__(import_name)
+ return getattr(__import__(module, None, None, [obj]), obj)
+ except (ImportError, AttributeError):
+ if not silent:
+ raise
+
+
+def open_if_exists(filename, mode="rb"):
+ """Returns a file descriptor for the filename if that file exists,
+ otherwise ``None``.
+ """
+ if not os.path.isfile(filename):
+ return None
+
+ return open(filename, mode)
+
+
+def object_type_repr(obj):
+ """Returns the name of the object's type. For some recognized
+ singletons the name of the object is returned instead. (For
+ example for `None` and `Ellipsis`).
+ """
+ if obj is None:
+ return "None"
+ elif obj is Ellipsis:
+ return "Ellipsis"
+
+ cls = type(obj)
+
+ # __builtin__ in 2.x, builtins in 3.x
+ if cls.__module__ in ("__builtin__", "builtins"):
+ name = cls.__name__
+ else:
+ name = cls.__module__ + "." + cls.__name__
+
+ return "%s object" % name
+
+
+def pformat(obj, verbose=False):
+ """Prettyprint an object. Either use the `pretty` library or the
+ builtin `pprint`.
+ """
+ try:
+ from pretty import pretty
+
+ return pretty(obj, verbose=verbose)
+ except ImportError:
+ from pprint import pformat
+
+ return pformat(obj)
+
+
+def urlize(text, trim_url_limit=None, rel=None, target=None):
+ """Converts any URLs in text into clickable links. Works on http://,
+ https:// and www. links. Links can have trailing punctuation (periods,
+ commas, close-parens) and leading punctuation (opening parens) and
+ it'll still do the right thing.
+
+ If trim_url_limit is not None, the URLs in link text will be limited
+ to trim_url_limit characters.
+
+ If nofollow is True, the URLs in link text will get a rel="nofollow"
+ attribute.
+
+ If target is not None, a target attribute will be added to the link.
+ """
+ trim_url = (
+ lambda x, limit=trim_url_limit: limit is not None
+ and (x[:limit] + (len(x) >= limit and "..." or ""))
+ or x
+ )
+ words = _word_split_re.split(text_type(escape(text)))
+ rel_attr = rel and ' rel="%s"' % text_type(escape(rel)) or ""
+ target_attr = target and ' target="%s"' % escape(target) or ""
+
+ for i, word in enumerate(words):
+ match = _punctuation_re.match(word)
+ if match:
+ lead, middle, trail = match.groups()
+ if middle.startswith("www.") or (
+ "@" not in middle
+ and not middle.startswith("http://")
+ and not middle.startswith("https://")
+ and len(middle) > 0
+ and middle[0] in _letters + _digits
+ and (
+ middle.endswith(".org")
+ or middle.endswith(".net")
+ or middle.endswith(".com")
+ )
+ ):
+ middle = '<a href="http://%s"%s%s>%s</a>' % (
+ middle,
+ rel_attr,
+ target_attr,
+ trim_url(middle),
+ )
+ if middle.startswith("http://") or middle.startswith("https://"):
+ middle = '<a href="%s"%s%s>%s</a>' % (
+ middle,
+ rel_attr,
+ target_attr,
+ trim_url(middle),
+ )
+ if (
+ "@" in middle
+ and not middle.startswith("www.")
+ and ":" not in middle
+ and _simple_email_re.match(middle)
+ ):
+ middle = '<a href="mailto:%s">%s</a>' % (middle, middle)
+ if lead + middle + trail != word:
+ words[i] = lead + middle + trail
+ return u"".join(words)
+
+
+def generate_lorem_ipsum(n=5, html=True, min=20, max=100):
+ """Generate some lorem ipsum for the template."""
+ from .constants import LOREM_IPSUM_WORDS
+
+ words = LOREM_IPSUM_WORDS.split()
+ result = []
+
+ for _ in range(n):
+ next_capitalized = True
+ last_comma = last_fullstop = 0
+ word = None
+ last = None
+ p = []
+
+ # each paragraph contains out of 20 to 100 words.
+ for idx, _ in enumerate(range(randrange(min, max))):
+ while True:
+ word = choice(words)
+ if word != last:
+ last = word
+ break
+ if next_capitalized:
+ word = word.capitalize()
+ next_capitalized = False
+ # add commas
+ if idx - randrange(3, 8) > last_comma:
+ last_comma = idx
+ last_fullstop += 2
+ word += ","
+ # add end of sentences
+ if idx - randrange(10, 20) > last_fullstop:
+ last_comma = last_fullstop = idx
+ word += "."
+ next_capitalized = True
+ p.append(word)
+
+ # ensure that the paragraph ends with a dot.
+ p = u" ".join(p)
+ if p.endswith(","):
+ p = p[:-1] + "."
+ elif not p.endswith("."):
+ p += "."
+ result.append(p)
+
+ if not html:
+ return u"\n\n".join(result)
+ return Markup(u"\n".join(u"<p>%s</p>" % escape(x) for x in result))
+
+
+def unicode_urlencode(obj, charset="utf-8", for_qs=False):
+ """Quote a string for use in a URL using the given charset.
+
+ This function is misnamed, it is a wrapper around
+ :func:`urllib.parse.quote`.
+
+ :param obj: String or bytes to quote. Other types are converted to
+ string then encoded to bytes using the given charset.
+ :param charset: Encode text to bytes using this charset.
+ :param for_qs: Quote "/" and use "+" for spaces.
+ """
+ if not isinstance(obj, string_types):
+ obj = text_type(obj)
+
+ if isinstance(obj, text_type):
+ obj = obj.encode(charset)
+
+ safe = b"" if for_qs else b"/"
+ rv = url_quote(obj, safe)
+
+ if not isinstance(rv, text_type):
+ rv = rv.decode("utf-8")
+
+ if for_qs:
+ rv = rv.replace("%20", "+")
+
+ return rv
+
+
+class LRUCache(object):
+ """A simple LRU Cache implementation."""
+
+ # this is fast for small capacities (something below 1000) but doesn't
+ # scale. But as long as it's only used as storage for templates this
+ # won't do any harm.
+
+ def __init__(self, capacity):
+ self.capacity = capacity
+ self._mapping = {}
+ self._queue = deque()
+ self._postinit()
+
+ def _postinit(self):
+ # alias all queue methods for faster lookup
+ self._popleft = self._queue.popleft
+ self._pop = self._queue.pop
+ self._remove = self._queue.remove
+ self._wlock = Lock()
+ self._append = self._queue.append
+
+ def __getstate__(self):
+ return {
+ "capacity": self.capacity,
+ "_mapping": self._mapping,
+ "_queue": self._queue,
+ }
+
+ def __setstate__(self, d):
+ self.__dict__.update(d)
+ self._postinit()
+
+ def __getnewargs__(self):
+ return (self.capacity,)
+
+ def copy(self):
+ """Return a shallow copy of the instance."""
+ rv = self.__class__(self.capacity)
+ rv._mapping.update(self._mapping)
+ rv._queue.extend(self._queue)
+ return rv
+
+ def get(self, key, default=None):
+ """Return an item from the cache dict or `default`"""
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def setdefault(self, key, default=None):
+ """Set `default` if the key is not in the cache otherwise
+ leave unchanged. Return the value of this key.
+ """
+ try:
+ return self[key]
+ except KeyError:
+ self[key] = default
+ return default
+
+ def clear(self):
+ """Clear the cache."""
+ self._wlock.acquire()
+ try:
+ self._mapping.clear()
+ self._queue.clear()
+ finally:
+ self._wlock.release()
+
+ def __contains__(self, key):
+ """Check if a key exists in this cache."""
+ return key in self._mapping
+
+ def __len__(self):
+ """Return the current size of the cache."""
+ return len(self._mapping)
+
+ def __repr__(self):
+ return "<%s %r>" % (self.__class__.__name__, self._mapping)
+
+ def __getitem__(self, key):
+ """Get an item from the cache. Moves the item up so that it has the
+ highest priority then.
+
+ Raise a `KeyError` if it does not exist.
+ """
+ self._wlock.acquire()
+ try:
+ rv = self._mapping[key]
+ if self._queue[-1] != key:
+ try:
+ self._remove(key)
+ except ValueError:
+ # if something removed the key from the container
+ # when we read, ignore the ValueError that we would
+ # get otherwise.
+ pass
+ self._append(key)
+ return rv
+ finally:
+ self._wlock.release()
+
+ def __setitem__(self, key, value):
+ """Sets the value for an item. Moves the item up so that it
+ has the highest priority then.
+ """
+ self._wlock.acquire()
+ try:
+ if key in self._mapping:
+ self._remove(key)
+ elif len(self._mapping) == self.capacity:
+ del self._mapping[self._popleft()]
+ self._append(key)
+ self._mapping[key] = value
+ finally:
+ self._wlock.release()
+
+ def __delitem__(self, key):
+ """Remove an item from the cache dict.
+ Raise a `KeyError` if it does not exist.
+ """
+ self._wlock.acquire()
+ try:
+ del self._mapping[key]
+ try:
+ self._remove(key)
+ except ValueError:
+ pass
+ finally:
+ self._wlock.release()
+
+ def items(self):
+ """Return a list of items."""
+ result = [(key, self._mapping[key]) for key in list(self._queue)]
+ result.reverse()
+ return result
+
+ def iteritems(self):
+ """Iterate over all items."""
+ warnings.warn(
+ "'iteritems()' will be removed in version 3.0. Use"
+ " 'iter(cache.items())' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return iter(self.items())
+
+ def values(self):
+ """Return a list of all values."""
+ return [x[1] for x in self.items()]
+
+ def itervalue(self):
+ """Iterate over all values."""
+ warnings.warn(
+ "'itervalue()' will be removed in version 3.0. Use"
+ " 'iter(cache.values())' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return iter(self.values())
+
+ def itervalues(self):
+ """Iterate over all values."""
+ warnings.warn(
+ "'itervalues()' will be removed in version 3.0. Use"
+ " 'iter(cache.values())' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return iter(self.values())
+
+ def keys(self):
+ """Return a list of all keys ordered by most recent usage."""
+ return list(self)
+
+ def iterkeys(self):
+ """Iterate over all keys in the cache dict, ordered by
+ the most recent usage.
+ """
+ warnings.warn(
+ "'iterkeys()' will be removed in version 3.0. Use"
+ " 'iter(cache.keys())' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return iter(self)
+
+ def __iter__(self):
+ return reversed(tuple(self._queue))
+
+ def __reversed__(self):
+ """Iterate over the keys in the cache dict, oldest items
+ coming first.
+ """
+ return iter(tuple(self._queue))
+
+ __copy__ = copy
+
+
+abc.MutableMapping.register(LRUCache)
+
+
+def select_autoescape(
+ enabled_extensions=("html", "htm", "xml"),
+ disabled_extensions=(),
+ default_for_string=True,
+ default=False,
+):
+ """Intelligently sets the initial value of autoescaping based on the
+ filename of the template. This is the recommended way to configure
+ autoescaping if you do not want to write a custom function yourself.
+
+ If you want to enable it for all templates created from strings or
+ for all templates with `.html` and `.xml` extensions::
+
+ from jinja2 import Environment, select_autoescape
+ env = Environment(autoescape=select_autoescape(
+ enabled_extensions=('html', 'xml'),
+ default_for_string=True,
+ ))
+
+ Example configuration to turn it on at all times except if the template
+ ends with `.txt`::
+
+ from jinja2 import Environment, select_autoescape
+ env = Environment(autoescape=select_autoescape(
+ disabled_extensions=('txt',),
+ default_for_string=True,
+ default=True,
+ ))
+
+ The `enabled_extensions` is an iterable of all the extensions that
+ autoescaping should be enabled for. Likewise `disabled_extensions` is
+ a list of all templates it should be disabled for. If a template is
+ loaded from a string then the default from `default_for_string` is used.
+ If nothing matches then the initial value of autoescaping is set to the
+ value of `default`.
+
+ For security reasons this function operates case insensitive.
+
+ .. versionadded:: 2.9
+ """
+ enabled_patterns = tuple("." + x.lstrip(".").lower() for x in enabled_extensions)
+ disabled_patterns = tuple("." + x.lstrip(".").lower() for x in disabled_extensions)
+
+ def autoescape(template_name):
+ if template_name is None:
+ return default_for_string
+ template_name = template_name.lower()
+ if template_name.endswith(enabled_patterns):
+ return True
+ if template_name.endswith(disabled_patterns):
+ return False
+ return default
+
+ return autoescape
+
+
+def htmlsafe_json_dumps(obj, dumper=None, **kwargs):
+ """Works exactly like :func:`dumps` but is safe for use in ``<script>``
+ tags. It accepts the same arguments and returns a JSON string. Note that
+ this is available in templates through the ``|tojson`` filter which will
+ also mark the result as safe. Due to how this function escapes certain
+ characters this is safe even if used outside of ``<script>`` tags.
+
+ The following characters are escaped in strings:
+
+ - ``<``
+ - ``>``
+ - ``&``
+ - ``'``
+
+ This makes it safe to embed such strings in any place in HTML with the
+ notable exception of double quoted attributes. In that case single
+ quote your attributes or HTML escape it in addition.
+ """
+ if dumper is None:
+ dumper = json.dumps
+ rv = (
+ dumper(obj, **kwargs)
+ .replace(u"<", u"\\u003c")
+ .replace(u">", u"\\u003e")
+ .replace(u"&", u"\\u0026")
+ .replace(u"'", u"\\u0027")
+ )
+ return Markup(rv)
+
+
+class Cycler(object):
+ """Cycle through values by yield them one at a time, then restarting
+ once the end is reached. Available as ``cycler`` in templates.
+
+ Similar to ``loop.cycle``, but can be used outside loops or across
+ multiple loops. For example, render a list of folders and files in a
+ list, alternating giving them "odd" and "even" classes.
+
+ .. code-block:: html+jinja
+
+ {% set row_class = cycler("odd", "even") %}
+ <ul class="browser">
+ {% for folder in folders %}
+ <li class="folder {{ row_class.next() }}">{{ folder }}
+ {% endfor %}
+ {% for file in files %}
+ <li class="file {{ row_class.next() }}">{{ file }}
+ {% endfor %}
+ </ul>
+
+ :param items: Each positional argument will be yielded in the order
+ given for each cycle.
+
+ .. versionadded:: 2.1
+ """
+
+ def __init__(self, *items):
+ if not items:
+ raise RuntimeError("at least one item has to be provided")
+ self.items = items
+ self.pos = 0
+
+ def reset(self):
+ """Resets the current item to the first item."""
+ self.pos = 0
+
+ @property
+ def current(self):
+ """Return the current item. Equivalent to the item that will be
+ returned next time :meth:`next` is called.
+ """
+ return self.items[self.pos]
+
+ def next(self):
+ """Return the current item, then advance :attr:`current` to the
+ next item.
+ """
+ rv = self.current
+ self.pos = (self.pos + 1) % len(self.items)
+ return rv
+
+ __next__ = next
+
+
+class Joiner(object):
+ """A joining helper for templates."""
+
+ def __init__(self, sep=u", "):
+ self.sep = sep
+ self.used = False
+
+ def __call__(self):
+ if not self.used:
+ self.used = True
+ return u""
+ return self.sep
+
+
+class Namespace(object):
+ """A namespace object that can hold arbitrary attributes. It may be
+ initialized from a dictionary or with keyword arguments."""
+
+ def __init__(*args, **kwargs): # noqa: B902
+ self, args = args[0], args[1:]
+ self.__attrs = dict(*args, **kwargs)
+
+ def __getattribute__(self, name):
+ # __class__ is needed for the awaitable check in async mode
+ if name in {"_Namespace__attrs", "__class__"}:
+ return object.__getattribute__(self, name)
+ try:
+ return self.__attrs[name]
+ except KeyError:
+ raise AttributeError(name)
+
+ def __setitem__(self, name, value):
+ self.__attrs[name] = value
+
+ def __repr__(self):
+ return "<Namespace %r>" % self.__attrs
+
+
+# does this python version support async for in and async generators?
+try:
+ exec("async def _():\n async for _ in ():\n yield _")
+ have_async_gen = True
+except SyntaxError:
+ have_async_gen = False
+
+
+def soft_unicode(s):
+ from markupsafe import soft_unicode
+
+ warnings.warn(
+ "'jinja2.utils.soft_unicode' will be removed in version 3.0."
+ " Use 'markupsafe.soft_unicode' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return soft_unicode(s)
diff --git a/third_party/python/Jinja2/src/jinja2/visitor.py b/third_party/python/Jinja2/src/jinja2/visitor.py
new file mode 100644
index 0000000000..d1365bf10e
--- /dev/null
+++ b/third_party/python/Jinja2/src/jinja2/visitor.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+"""API for traversing the AST nodes. Implemented by the compiler and
+meta introspection.
+"""
+from .nodes import Node
+
+
+class NodeVisitor(object):
+ """Walks the abstract syntax tree and call visitor functions for every
+ node found. The visitor functions may return values which will be
+ forwarded by the `visit` method.
+
+ Per default the visitor functions for the nodes are ``'visit_'`` +
+ class name of the node. So a `TryFinally` node visit function would
+ be `visit_TryFinally`. This behavior can be changed by overriding
+ the `get_visitor` function. If no visitor function exists for a node
+ (return value `None`) the `generic_visit` visitor is used instead.
+ """
+
+ def get_visitor(self, node):
+ """Return the visitor function for this node or `None` if no visitor
+ exists for this node. In that case the generic visit function is
+ used instead.
+ """
+ method = "visit_" + node.__class__.__name__
+ return getattr(self, method, None)
+
+ def visit(self, node, *args, **kwargs):
+ """Visit a node."""
+ f = self.get_visitor(node)
+ if f is not None:
+ return f(node, *args, **kwargs)
+ return self.generic_visit(node, *args, **kwargs)
+
+ def generic_visit(self, node, *args, **kwargs):
+ """Called if no explicit visitor function exists for a node."""
+ for node in node.iter_child_nodes():
+ self.visit(node, *args, **kwargs)
+
+
+class NodeTransformer(NodeVisitor):
+ """Walks the abstract syntax tree and allows modifications of nodes.
+
+ The `NodeTransformer` will walk the AST and use the return value of the
+ visitor functions to replace or remove the old node. If the return
+ value of the visitor function is `None` the node will be removed
+ from the previous location otherwise it's replaced with the return
+ value. The return value may be the original node in which case no
+ replacement takes place.
+ """
+
+ def generic_visit(self, node, *args, **kwargs):
+ for field, old_value in node.iter_fields():
+ if isinstance(old_value, list):
+ new_values = []
+ for value in old_value:
+ if isinstance(value, Node):
+ value = self.visit(value, *args, **kwargs)
+ if value is None:
+ continue
+ elif not isinstance(value, Node):
+ new_values.extend(value)
+ continue
+ new_values.append(value)
+ old_value[:] = new_values
+ elif isinstance(old_value, Node):
+ new_node = self.visit(old_value, *args, **kwargs)
+ if new_node is None:
+ delattr(node, field)
+ else:
+ setattr(node, field, new_node)
+ return node
+
+ def visit_list(self, node, *args, **kwargs):
+ """As transformers may return lists in some places this method
+ can be used to enforce a list as return value.
+ """
+ rv = self.visit(node, *args, **kwargs)
+ if not isinstance(rv, list):
+ rv = [rv]
+ return rv
diff --git a/third_party/python/Jinja2/tox.ini b/third_party/python/Jinja2/tox.ini
new file mode 100644
index 0000000000..679ebebe91
--- /dev/null
+++ b/third_party/python/Jinja2/tox.ini
@@ -0,0 +1,20 @@
+[tox]
+envlist =
+ py{38,37,36,35,27,py3,py}
+ style
+ docs
+skip_missing_interpreters = true
+
+[testenv]
+deps =
+ pytest
+commands = pytest --tb=short --basetemp={envtmpdir} {posargs}
+
+[testenv:style]
+deps = pre-commit
+skip_install = true
+commands = pre-commit run --all-files --show-diff-on-failure
+
+[testenv:docs]
+deps = -r docs/requirements.txt
+commands = sphinx-build -W -b html -d {envtmpdir}/doctrees docs {envtmpdir}/html
diff --git a/third_party/python/MarkupSafe/CHANGES.rst b/third_party/python/MarkupSafe/CHANGES.rst
new file mode 100644
index 0000000000..63ecd6701d
--- /dev/null
+++ b/third_party/python/MarkupSafe/CHANGES.rst
@@ -0,0 +1,97 @@
+Version 1.1.1
+-------------
+
+Released 2019-02-23
+
+- Fix segfault when ``__html__`` method raises an exception when using
+ the C speedups. The exception is now propagated correctly. (`#109`_)
+
+.. _#109: https://github.com/pallets/markupsafe/pull/109
+
+
+Version 1.1.0
+-------------
+
+Released 2018-11-05
+
+- Drop support for Python 2.6 and 3.3.
+- Build wheels for Linux, Mac, and Windows, allowing systems without
+ a compiler to take advantage of the C extension speedups. (`#104`_)
+- Use newer CPython API on Python 3, resulting in a 1.5x speedup.
+ (`#64`_)
+- ``escape`` wraps ``__html__`` result in ``Markup``, consistent with
+ documented behavior. (`#69`_)
+
+.. _#64: https://github.com/pallets/markupsafe/pull/64
+.. _#69: https://github.com/pallets/markupsafe/pull/69
+.. _#104: https://github.com/pallets/markupsafe/pull/104
+
+
+Version 1.0
+-----------
+
+Released 2017-03-07
+
+- Fixed custom types not invoking ``__unicode__`` when used with
+ ``format()``.
+- Added ``__version__`` module attribute.
+- Improve unescape code to leave lone ampersands alone.
+
+
+Version 0.18
+------------
+
+Released 2013-05-22
+
+- Fixed ``__mul__`` and string splitting on Python 3.
+
+
+Version 0.17
+------------
+
+Released 2013-05-21
+
+- Fixed a bug with broken interpolation on tuples.
+
+
+Version 0.16
+------------
+
+Released 2013-05-20
+
+- Improved Python 3 Support and removed 2to3.
+- Removed support for Python 3.2 and 2.5.
+
+
+Version 0.15
+------------
+
+Released 2011-07-20
+
+- Fixed a typo that caused the library to fail to install on pypy and
+ jython.
+
+
+Version 0.14
+------------
+
+Released 2011-07-20
+
+- Release fix for 0.13.
+
+
+Version 0.13
+------------
+
+Released 2011-07-20
+
+- Do not attempt to compile extension for PyPy or Jython.
+- Work around some 64bit Windows issues.
+
+
+Version 0.12
+------------
+
+Released 2011-02-17
+
+- Improved PyPy compatibility.
diff --git a/third_party/python/MarkupSafe/LICENSE.rst b/third_party/python/MarkupSafe/LICENSE.rst
new file mode 100644
index 0000000000..9d227a0cc4
--- /dev/null
+++ b/third_party/python/MarkupSafe/LICENSE.rst
@@ -0,0 +1,28 @@
+Copyright 2010 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/third_party/python/MarkupSafe/MANIFEST.in b/third_party/python/MarkupSafe/MANIFEST.in
new file mode 100644
index 0000000000..92a16da895
--- /dev/null
+++ b/third_party/python/MarkupSafe/MANIFEST.in
@@ -0,0 +1,8 @@
+include CHANGES.rst
+include LICENSE.rst
+include README.rst
+include tox.ini
+graft docs
+prune docs/_build
+graft tests
+global-exclude *.py[co]
diff --git a/third_party/python/MarkupSafe/PKG-INFO b/third_party/python/MarkupSafe/PKG-INFO
new file mode 100644
index 0000000000..9defab7de9
--- /dev/null
+++ b/third_party/python/MarkupSafe/PKG-INFO
@@ -0,0 +1,101 @@
+Metadata-Version: 1.2
+Name: MarkupSafe
+Version: 1.1.1
+Summary: Safely add untrusted strings to HTML/XML markup.
+Home-page: https://palletsprojects.com/p/markupsafe/
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+Maintainer: The Pallets Team
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Documentation, https://markupsafe.palletsprojects.com/
+Project-URL: Code, https://github.com/pallets/markupsafe
+Project-URL: Issue tracker, https://github.com/pallets/markupsafe/issues
+Description: MarkupSafe
+ ==========
+
+ MarkupSafe implements a text object that escapes characters so it is
+ safe to use in HTML and XML. Characters that have special meanings are
+ replaced so that they display as the actual characters. This mitigates
+ injection attacks, meaning untrusted user input can safely be displayed
+ on a page.
+
+
+ Installing
+ ----------
+
+ Install and update using `pip`_:
+
+ .. code-block:: text
+
+ pip install -U MarkupSafe
+
+ .. _pip: https://pip.pypa.io/en/stable/quickstart/
+
+
+ Examples
+ --------
+
+ .. code-block:: pycon
+
+ >>> from markupsafe import Markup, escape
+ >>> # escape replaces special characters and wraps in Markup
+ >>> escape('<script>alert(document.cookie);</script>')
+ Markup(u'&lt;script&gt;alert(document.cookie);&lt;/script&gt;')
+ >>> # wrap in Markup to mark text "safe" and prevent escaping
+ >>> Markup('<strong>Hello</strong>')
+ Markup('<strong>hello</strong>')
+ >>> escape(Markup('<strong>Hello</strong>'))
+ Markup('<strong>hello</strong>')
+ >>> # Markup is a text subclass (str on Python 3, unicode on Python 2)
+ >>> # methods and operators escape their arguments
+ >>> template = Markup("Hello <em>%s</em>")
+ >>> template % '"World"'
+ Markup('Hello <em>&#34;World&#34;</em>')
+
+
+ Donate
+ ------
+
+ The Pallets organization develops and supports MarkupSafe and other
+ libraries that use it. In order to grow the community of contributors
+ and users, and allow the maintainers to devote more time to the
+ projects, `please donate today`_.
+
+ .. _please donate today: https://palletsprojects.com/donate
+
+
+ Links
+ -----
+
+ * Website: https://palletsprojects.com/p/markupsafe/
+ * Documentation: https://markupsafe.palletsprojects.com/
+ * License: `BSD-3-Clause <https://github.com/pallets/markupsafe/blob/master/LICENSE.rst>`_
+ * Releases: https://pypi.org/project/MarkupSafe/
+ * Code: https://github.com/pallets/markupsafe
+ * Issue tracker: https://github.com/pallets/markupsafe/issues
+ * Test status:
+
+ * Linux, Mac: https://travis-ci.org/pallets/markupsafe
+ * Windows: https://ci.appveyor.com/project/pallets/markupsafe
+
+ * Test coverage: https://codecov.io/gh/pallets/markupsafe
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Text Processing :: Markup :: HTML
+Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
diff --git a/third_party/python/MarkupSafe/README.rst b/third_party/python/MarkupSafe/README.rst
new file mode 100644
index 0000000000..3548b8d1f7
--- /dev/null
+++ b/third_party/python/MarkupSafe/README.rst
@@ -0,0 +1,69 @@
+MarkupSafe
+==========
+
+MarkupSafe implements a text object that escapes characters so it is
+safe to use in HTML and XML. Characters that have special meanings are
+replaced so that they display as the actual characters. This mitigates
+injection attacks, meaning untrusted user input can safely be displayed
+on a page.
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+ pip install -U MarkupSafe
+
+.. _pip: https://pip.pypa.io/en/stable/quickstart/
+
+
+Examples
+--------
+
+.. code-block:: pycon
+
+ >>> from markupsafe import Markup, escape
+ >>> # escape replaces special characters and wraps in Markup
+ >>> escape('<script>alert(document.cookie);</script>')
+ Markup(u'&lt;script&gt;alert(document.cookie);&lt;/script&gt;')
+ >>> # wrap in Markup to mark text "safe" and prevent escaping
+ >>> Markup('<strong>Hello</strong>')
+ Markup('<strong>hello</strong>')
+ >>> escape(Markup('<strong>Hello</strong>'))
+ Markup('<strong>hello</strong>')
+ >>> # Markup is a text subclass (str on Python 3, unicode on Python 2)
+ >>> # methods and operators escape their arguments
+ >>> template = Markup("Hello <em>%s</em>")
+ >>> template % '"World"'
+ Markup('Hello <em>&#34;World&#34;</em>')
+
+
+Donate
+------
+
+The Pallets organization develops and supports MarkupSafe and other
+libraries that use it. In order to grow the community of contributors
+and users, and allow the maintainers to devote more time to the
+projects, `please donate today`_.
+
+.. _please donate today: https://palletsprojects.com/donate
+
+
+Links
+-----
+
+* Website: https://palletsprojects.com/p/markupsafe/
+* Documentation: https://markupsafe.palletsprojects.com/
+* License: `BSD-3-Clause <https://github.com/pallets/markupsafe/blob/master/LICENSE.rst>`_
+* Releases: https://pypi.org/project/MarkupSafe/
+* Code: https://github.com/pallets/markupsafe
+* Issue tracker: https://github.com/pallets/markupsafe/issues
+* Test status:
+
+ * Linux, Mac: https://travis-ci.org/pallets/markupsafe
+ * Windows: https://ci.appveyor.com/project/pallets/markupsafe
+
+* Test coverage: https://codecov.io/gh/pallets/markupsafe
diff --git a/third_party/python/MarkupSafe/setup.cfg b/third_party/python/MarkupSafe/setup.cfg
new file mode 100644
index 0000000000..cea6e06bb8
--- /dev/null
+++ b/third_party/python/MarkupSafe/setup.cfg
@@ -0,0 +1,27 @@
+[metadata]
+license_file = LICENSE.rst
+
+[tool:pytest]
+testpaths = tests
+
+[coverage:run]
+branch = True
+source =
+ markupsafe
+
+[coverage:paths]
+source =
+ src/markupsafe
+ .tox/*/lib/python*/site-packages/markupsafe
+ .tox/*/site-packages/markupsafe
+
+[flake8]
+select = B, E, F, W, B9
+ignore = E203, E501, W503
+max-line-length = 80
+exclude = src/markupsafe/_compat.py
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/MarkupSafe/setup.py b/third_party/python/MarkupSafe/setup.py
new file mode 100644
index 0000000000..5c57d1c774
--- /dev/null
+++ b/third_party/python/MarkupSafe/setup.py
@@ -0,0 +1,125 @@
+from __future__ import print_function
+
+import io
+import re
+import sys
+from distutils.errors import CCompilerError
+from distutils.errors import DistutilsExecError
+from distutils.errors import DistutilsPlatformError
+
+from setuptools import Extension
+from setuptools import find_packages
+from setuptools import setup
+from setuptools.command.build_ext import build_ext
+
+with io.open("README.rst", "rt", encoding="utf8") as f:
+ readme = f.read()
+
+with io.open("src/markupsafe/__init__.py", "rt", encoding="utf8") as f:
+ version = re.search(r'__version__ = "(.*?)"', f.read()).group(1)
+
+is_jython = "java" in sys.platform
+is_pypy = hasattr(sys, "pypy_version_info")
+
+ext_modules = [Extension("markupsafe._speedups", ["src/markupsafe/_speedups.c"])]
+
+
+class BuildFailed(Exception):
+ pass
+
+
+class ve_build_ext(build_ext):
+ """This class allows C extension building to fail."""
+
+ def run(self):
+ try:
+ build_ext.run(self)
+ except DistutilsPlatformError:
+ raise BuildFailed()
+
+ def build_extension(self, ext):
+ try:
+ build_ext.build_extension(self, ext)
+ except (CCompilerError, DistutilsExecError, DistutilsPlatformError):
+ raise BuildFailed()
+ except ValueError:
+ # this can happen on Windows 64 bit, see Python issue 7511
+ if "'path'" in str(sys.exc_info()[1]): # works with Python 2 and 3
+ raise BuildFailed()
+ raise
+
+
+def run_setup(with_binary):
+ setup(
+ name="MarkupSafe",
+ version=version,
+ url="https://palletsprojects.com/p/markupsafe/",
+ project_urls={
+ "Documentation": "https://markupsafe.palletsprojects.com/",
+ "Code": "https://github.com/pallets/markupsafe",
+ "Issue tracker": "https://github.com/pallets/markupsafe/issues",
+ },
+ license="BSD-3-Clause",
+ author="Armin Ronacher",
+ author_email="armin.ronacher@active-4.com",
+ maintainer="The Pallets Team",
+ maintainer_email="contact@palletsprojects.com",
+ description="Safely add untrusted strings to HTML/XML markup.",
+ long_description=readme,
+ classifiers=[
+ "Development Status :: 5 - Production/Stable",
+ "Environment :: Web Environment",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: BSD License",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.4",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Topic :: Internet :: WWW/HTTP :: Dynamic Content",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+ "Topic :: Text Processing :: Markup :: HTML",
+ ],
+ packages=find_packages("src"),
+ package_dir={"": "src"},
+ include_package_data=True,
+ python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
+ cmdclass={"build_ext": ve_build_ext},
+ ext_modules=ext_modules if with_binary else [],
+ )
+
+
+def show_message(*lines):
+ print("=" * 74)
+ for line in lines:
+ print(line)
+ print("=" * 74)
+
+
+if not (is_pypy or is_jython):
+ try:
+ run_setup(True)
+ except BuildFailed:
+ show_message(
+ "WARNING: The C extension could not be compiled, speedups"
+ " are not enabled.",
+ "Failure information, if any, is above.",
+ "Retrying the build without the C extension now.",
+ )
+ run_setup(False)
+ show_message(
+ "WARNING: The C extension could not be compiled, speedups"
+ " are not enabled.",
+ "Plain-Python build succeeded.",
+ )
+else:
+ run_setup(False)
+ show_message(
+ "WARNING: C extensions are not supported on this Python"
+ " platform, speedups are not enabled.",
+ "Plain-Python build succeeded.",
+ )
diff --git a/third_party/python/MarkupSafe/src/markupsafe/__init__.py b/third_party/python/MarkupSafe/src/markupsafe/__init__.py
new file mode 100644
index 0000000000..da05ed328a
--- /dev/null
+++ b/third_party/python/MarkupSafe/src/markupsafe/__init__.py
@@ -0,0 +1,327 @@
+# -*- coding: utf-8 -*-
+"""
+markupsafe
+~~~~~~~~~~
+
+Implements an escape function and a Markup string to replace HTML
+special characters with safe representations.
+
+:copyright: 2010 Pallets
+:license: BSD-3-Clause
+"""
+import re
+import string
+
+from ._compat import int_types
+from ._compat import iteritems
+from ._compat import Mapping
+from ._compat import PY2
+from ._compat import string_types
+from ._compat import text_type
+from ._compat import unichr
+
+__version__ = "1.1.1"
+
+__all__ = ["Markup", "soft_unicode", "escape", "escape_silent"]
+
+_striptags_re = re.compile(r"(<!--.*?-->|<[^>]*>)")
+_entity_re = re.compile(r"&([^& ;]+);")
+
+
+class Markup(text_type):
+ """A string that is ready to be safely inserted into an HTML or XML
+ document, either because it was escaped or because it was marked
+ safe.
+
+ Passing an object to the constructor converts it to text and wraps
+ it to mark it safe without escaping. To escape the text, use the
+ :meth:`escape` class method instead.
+
+ >>> Markup('Hello, <em>World</em>!')
+ Markup('Hello, <em>World</em>!')
+ >>> Markup(42)
+ Markup('42')
+ >>> Markup.escape('Hello, <em>World</em>!')
+ Markup('Hello &lt;em&gt;World&lt;/em&gt;!')
+
+ This implements the ``__html__()`` interface that some frameworks
+ use. Passing an object that implements ``__html__()`` will wrap the
+ output of that method, marking it safe.
+
+ >>> class Foo:
+ ... def __html__(self):
+ ... return '<a href="/foo">foo</a>'
+ ...
+ >>> Markup(Foo())
+ Markup('<a href="/foo">foo</a>')
+
+ This is a subclass of the text type (``str`` in Python 3,
+ ``unicode`` in Python 2). It has the same methods as that type, but
+ all methods escape their arguments and return a ``Markup`` instance.
+
+ >>> Markup('<em>%s</em>') % 'foo & bar'
+ Markup('<em>foo &amp; bar</em>')
+ >>> Markup('<em>Hello</em> ') + '<foo>'
+ Markup('<em>Hello</em> &lt;foo&gt;')
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, base=u"", encoding=None, errors="strict"):
+ if hasattr(base, "__html__"):
+ base = base.__html__()
+ if encoding is None:
+ return text_type.__new__(cls, base)
+ return text_type.__new__(cls, base, encoding, errors)
+
+ def __html__(self):
+ return self
+
+ def __add__(self, other):
+ if isinstance(other, string_types) or hasattr(other, "__html__"):
+ return self.__class__(super(Markup, self).__add__(self.escape(other)))
+ return NotImplemented
+
+ def __radd__(self, other):
+ if hasattr(other, "__html__") or isinstance(other, string_types):
+ return self.escape(other).__add__(self)
+ return NotImplemented
+
+ def __mul__(self, num):
+ if isinstance(num, int_types):
+ return self.__class__(text_type.__mul__(self, num))
+ return NotImplemented
+
+ __rmul__ = __mul__
+
+ def __mod__(self, arg):
+ if isinstance(arg, tuple):
+ arg = tuple(_MarkupEscapeHelper(x, self.escape) for x in arg)
+ else:
+ arg = _MarkupEscapeHelper(arg, self.escape)
+ return self.__class__(text_type.__mod__(self, arg))
+
+ def __repr__(self):
+ return "%s(%s)" % (self.__class__.__name__, text_type.__repr__(self))
+
+ def join(self, seq):
+ return self.__class__(text_type.join(self, map(self.escape, seq)))
+
+ join.__doc__ = text_type.join.__doc__
+
+ def split(self, *args, **kwargs):
+ return list(map(self.__class__, text_type.split(self, *args, **kwargs)))
+
+ split.__doc__ = text_type.split.__doc__
+
+ def rsplit(self, *args, **kwargs):
+ return list(map(self.__class__, text_type.rsplit(self, *args, **kwargs)))
+
+ rsplit.__doc__ = text_type.rsplit.__doc__
+
+ def splitlines(self, *args, **kwargs):
+ return list(map(self.__class__, text_type.splitlines(self, *args, **kwargs)))
+
+ splitlines.__doc__ = text_type.splitlines.__doc__
+
+ def unescape(self):
+ """Convert escaped markup back into a text string. This replaces
+ HTML entities with the characters they represent.
+
+ >>> Markup('Main &raquo; <em>About</em>').unescape()
+ 'Main » <em>About</em>'
+ """
+ from ._constants import HTML_ENTITIES
+
+ def handle_match(m):
+ name = m.group(1)
+ if name in HTML_ENTITIES:
+ return unichr(HTML_ENTITIES[name])
+ try:
+ if name[:2] in ("#x", "#X"):
+ return unichr(int(name[2:], 16))
+ elif name.startswith("#"):
+ return unichr(int(name[1:]))
+ except ValueError:
+ pass
+ # Don't modify unexpected input.
+ return m.group()
+
+ return _entity_re.sub(handle_match, text_type(self))
+
+ def striptags(self):
+ """:meth:`unescape` the markup, remove tags, and normalize
+ whitespace to single spaces.
+
+ >>> Markup('Main &raquo;\t<em>About</em>').striptags()
+ 'Main » About'
+ """
+ stripped = u" ".join(_striptags_re.sub("", self).split())
+ return Markup(stripped).unescape()
+
+ @classmethod
+ def escape(cls, s):
+ """Escape a string. Calls :func:`escape` and ensures that for
+ subclasses the correct type is returned.
+ """
+ rv = escape(s)
+ if rv.__class__ is not cls:
+ return cls(rv)
+ return rv
+
+ def make_simple_escaping_wrapper(name): # noqa: B902
+ orig = getattr(text_type, name)
+
+ def func(self, *args, **kwargs):
+ args = _escape_argspec(list(args), enumerate(args), self.escape)
+ _escape_argspec(kwargs, iteritems(kwargs), self.escape)
+ return self.__class__(orig(self, *args, **kwargs))
+
+ func.__name__ = orig.__name__
+ func.__doc__ = orig.__doc__
+ return func
+
+ for method in (
+ "__getitem__",
+ "capitalize",
+ "title",
+ "lower",
+ "upper",
+ "replace",
+ "ljust",
+ "rjust",
+ "lstrip",
+ "rstrip",
+ "center",
+ "strip",
+ "translate",
+ "expandtabs",
+ "swapcase",
+ "zfill",
+ ):
+ locals()[method] = make_simple_escaping_wrapper(method)
+
+ def partition(self, sep):
+ return tuple(map(self.__class__, text_type.partition(self, self.escape(sep))))
+
+ def rpartition(self, sep):
+ return tuple(map(self.__class__, text_type.rpartition(self, self.escape(sep))))
+
+ def format(self, *args, **kwargs):
+ formatter = EscapeFormatter(self.escape)
+ kwargs = _MagicFormatMapping(args, kwargs)
+ return self.__class__(formatter.vformat(self, args, kwargs))
+
+ def __html_format__(self, format_spec):
+ if format_spec:
+ raise ValueError("Unsupported format specification " "for Markup.")
+ return self
+
+ # not in python 3
+ if hasattr(text_type, "__getslice__"):
+ __getslice__ = make_simple_escaping_wrapper("__getslice__")
+
+ del method, make_simple_escaping_wrapper
+
+
+class _MagicFormatMapping(Mapping):
+ """This class implements a dummy wrapper to fix a bug in the Python
+ standard library for string formatting.
+
+ See http://bugs.python.org/issue13598 for information about why
+ this is necessary.
+ """
+
+ def __init__(self, args, kwargs):
+ self._args = args
+ self._kwargs = kwargs
+ self._last_index = 0
+
+ def __getitem__(self, key):
+ if key == "":
+ idx = self._last_index
+ self._last_index += 1
+ try:
+ return self._args[idx]
+ except LookupError:
+ pass
+ key = str(idx)
+ return self._kwargs[key]
+
+ def __iter__(self):
+ return iter(self._kwargs)
+
+ def __len__(self):
+ return len(self._kwargs)
+
+
+if hasattr(text_type, "format"):
+
+ class EscapeFormatter(string.Formatter):
+ def __init__(self, escape):
+ self.escape = escape
+
+ def format_field(self, value, format_spec):
+ if hasattr(value, "__html_format__"):
+ rv = value.__html_format__(format_spec)
+ elif hasattr(value, "__html__"):
+ if format_spec:
+ raise ValueError(
+ "Format specifier {0} given, but {1} does not"
+ " define __html_format__. A class that defines"
+ " __html__ must define __html_format__ to work"
+ " with format specifiers.".format(format_spec, type(value))
+ )
+ rv = value.__html__()
+ else:
+ # We need to make sure the format spec is unicode here as
+ # otherwise the wrong callback methods are invoked. For
+ # instance a byte string there would invoke __str__ and
+ # not __unicode__.
+ rv = string.Formatter.format_field(self, value, text_type(format_spec))
+ return text_type(self.escape(rv))
+
+
+def _escape_argspec(obj, iterable, escape):
+ """Helper for various string-wrapped functions."""
+ for key, value in iterable:
+ if hasattr(value, "__html__") or isinstance(value, string_types):
+ obj[key] = escape(value)
+ return obj
+
+
+class _MarkupEscapeHelper(object):
+ """Helper for Markup.__mod__"""
+
+ def __init__(self, obj, escape):
+ self.obj = obj
+ self.escape = escape
+
+ def __getitem__(self, item):
+ return _MarkupEscapeHelper(self.obj[item], self.escape)
+
+ def __str__(self):
+ return text_type(self.escape(self.obj))
+
+ __unicode__ = __str__
+
+ def __repr__(self):
+ return str(self.escape(repr(self.obj)))
+
+ def __int__(self):
+ return int(self.obj)
+
+ def __float__(self):
+ return float(self.obj)
+
+
+# we have to import it down here as the speedups and native
+# modules imports the markup type which is define above.
+try:
+ from ._speedups import escape, escape_silent, soft_unicode
+except ImportError:
+ from ._native import escape, escape_silent, soft_unicode
+
+if not PY2:
+ soft_str = soft_unicode
+ __all__.append("soft_str")
diff --git a/third_party/python/MarkupSafe/src/markupsafe/_compat.py b/third_party/python/MarkupSafe/src/markupsafe/_compat.py
new file mode 100644
index 0000000000..bc05090f9e
--- /dev/null
+++ b/third_party/python/MarkupSafe/src/markupsafe/_compat.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+"""
+markupsafe._compat
+~~~~~~~~~~~~~~~~~~
+
+:copyright: 2010 Pallets
+:license: BSD-3-Clause
+"""
+import sys
+
+PY2 = sys.version_info[0] == 2
+
+if not PY2:
+ text_type = str
+ string_types = (str,)
+ unichr = chr
+ int_types = (int,)
+
+ def iteritems(x):
+ return iter(x.items())
+
+ from collections.abc import Mapping
+
+else:
+ text_type = unicode
+ string_types = (str, unicode)
+ unichr = unichr
+ int_types = (int, long)
+
+ def iteritems(x):
+ return x.iteritems()
+
+ from collections import Mapping
diff --git a/third_party/python/MarkupSafe/src/markupsafe/_constants.py b/third_party/python/MarkupSafe/src/markupsafe/_constants.py
new file mode 100644
index 0000000000..7c57c2d294
--- /dev/null
+++ b/third_party/python/MarkupSafe/src/markupsafe/_constants.py
@@ -0,0 +1,264 @@
+# -*- coding: utf-8 -*-
+"""
+markupsafe._constants
+~~~~~~~~~~~~~~~~~~~~~
+
+:copyright: 2010 Pallets
+:license: BSD-3-Clause
+"""
+
+HTML_ENTITIES = {
+ "AElig": 198,
+ "Aacute": 193,
+ "Acirc": 194,
+ "Agrave": 192,
+ "Alpha": 913,
+ "Aring": 197,
+ "Atilde": 195,
+ "Auml": 196,
+ "Beta": 914,
+ "Ccedil": 199,
+ "Chi": 935,
+ "Dagger": 8225,
+ "Delta": 916,
+ "ETH": 208,
+ "Eacute": 201,
+ "Ecirc": 202,
+ "Egrave": 200,
+ "Epsilon": 917,
+ "Eta": 919,
+ "Euml": 203,
+ "Gamma": 915,
+ "Iacute": 205,
+ "Icirc": 206,
+ "Igrave": 204,
+ "Iota": 921,
+ "Iuml": 207,
+ "Kappa": 922,
+ "Lambda": 923,
+ "Mu": 924,
+ "Ntilde": 209,
+ "Nu": 925,
+ "OElig": 338,
+ "Oacute": 211,
+ "Ocirc": 212,
+ "Ograve": 210,
+ "Omega": 937,
+ "Omicron": 927,
+ "Oslash": 216,
+ "Otilde": 213,
+ "Ouml": 214,
+ "Phi": 934,
+ "Pi": 928,
+ "Prime": 8243,
+ "Psi": 936,
+ "Rho": 929,
+ "Scaron": 352,
+ "Sigma": 931,
+ "THORN": 222,
+ "Tau": 932,
+ "Theta": 920,
+ "Uacute": 218,
+ "Ucirc": 219,
+ "Ugrave": 217,
+ "Upsilon": 933,
+ "Uuml": 220,
+ "Xi": 926,
+ "Yacute": 221,
+ "Yuml": 376,
+ "Zeta": 918,
+ "aacute": 225,
+ "acirc": 226,
+ "acute": 180,
+ "aelig": 230,
+ "agrave": 224,
+ "alefsym": 8501,
+ "alpha": 945,
+ "amp": 38,
+ "and": 8743,
+ "ang": 8736,
+ "apos": 39,
+ "aring": 229,
+ "asymp": 8776,
+ "atilde": 227,
+ "auml": 228,
+ "bdquo": 8222,
+ "beta": 946,
+ "brvbar": 166,
+ "bull": 8226,
+ "cap": 8745,
+ "ccedil": 231,
+ "cedil": 184,
+ "cent": 162,
+ "chi": 967,
+ "circ": 710,
+ "clubs": 9827,
+ "cong": 8773,
+ "copy": 169,
+ "crarr": 8629,
+ "cup": 8746,
+ "curren": 164,
+ "dArr": 8659,
+ "dagger": 8224,
+ "darr": 8595,
+ "deg": 176,
+ "delta": 948,
+ "diams": 9830,
+ "divide": 247,
+ "eacute": 233,
+ "ecirc": 234,
+ "egrave": 232,
+ "empty": 8709,
+ "emsp": 8195,
+ "ensp": 8194,
+ "epsilon": 949,
+ "equiv": 8801,
+ "eta": 951,
+ "eth": 240,
+ "euml": 235,
+ "euro": 8364,
+ "exist": 8707,
+ "fnof": 402,
+ "forall": 8704,
+ "frac12": 189,
+ "frac14": 188,
+ "frac34": 190,
+ "frasl": 8260,
+ "gamma": 947,
+ "ge": 8805,
+ "gt": 62,
+ "hArr": 8660,
+ "harr": 8596,
+ "hearts": 9829,
+ "hellip": 8230,
+ "iacute": 237,
+ "icirc": 238,
+ "iexcl": 161,
+ "igrave": 236,
+ "image": 8465,
+ "infin": 8734,
+ "int": 8747,
+ "iota": 953,
+ "iquest": 191,
+ "isin": 8712,
+ "iuml": 239,
+ "kappa": 954,
+ "lArr": 8656,
+ "lambda": 955,
+ "lang": 9001,
+ "laquo": 171,
+ "larr": 8592,
+ "lceil": 8968,
+ "ldquo": 8220,
+ "le": 8804,
+ "lfloor": 8970,
+ "lowast": 8727,
+ "loz": 9674,
+ "lrm": 8206,
+ "lsaquo": 8249,
+ "lsquo": 8216,
+ "lt": 60,
+ "macr": 175,
+ "mdash": 8212,
+ "micro": 181,
+ "middot": 183,
+ "minus": 8722,
+ "mu": 956,
+ "nabla": 8711,
+ "nbsp": 160,
+ "ndash": 8211,
+ "ne": 8800,
+ "ni": 8715,
+ "not": 172,
+ "notin": 8713,
+ "nsub": 8836,
+ "ntilde": 241,
+ "nu": 957,
+ "oacute": 243,
+ "ocirc": 244,
+ "oelig": 339,
+ "ograve": 242,
+ "oline": 8254,
+ "omega": 969,
+ "omicron": 959,
+ "oplus": 8853,
+ "or": 8744,
+ "ordf": 170,
+ "ordm": 186,
+ "oslash": 248,
+ "otilde": 245,
+ "otimes": 8855,
+ "ouml": 246,
+ "para": 182,
+ "part": 8706,
+ "permil": 8240,
+ "perp": 8869,
+ "phi": 966,
+ "pi": 960,
+ "piv": 982,
+ "plusmn": 177,
+ "pound": 163,
+ "prime": 8242,
+ "prod": 8719,
+ "prop": 8733,
+ "psi": 968,
+ "quot": 34,
+ "rArr": 8658,
+ "radic": 8730,
+ "rang": 9002,
+ "raquo": 187,
+ "rarr": 8594,
+ "rceil": 8969,
+ "rdquo": 8221,
+ "real": 8476,
+ "reg": 174,
+ "rfloor": 8971,
+ "rho": 961,
+ "rlm": 8207,
+ "rsaquo": 8250,
+ "rsquo": 8217,
+ "sbquo": 8218,
+ "scaron": 353,
+ "sdot": 8901,
+ "sect": 167,
+ "shy": 173,
+ "sigma": 963,
+ "sigmaf": 962,
+ "sim": 8764,
+ "spades": 9824,
+ "sub": 8834,
+ "sube": 8838,
+ "sum": 8721,
+ "sup": 8835,
+ "sup1": 185,
+ "sup2": 178,
+ "sup3": 179,
+ "supe": 8839,
+ "szlig": 223,
+ "tau": 964,
+ "there4": 8756,
+ "theta": 952,
+ "thetasym": 977,
+ "thinsp": 8201,
+ "thorn": 254,
+ "tilde": 732,
+ "times": 215,
+ "trade": 8482,
+ "uArr": 8657,
+ "uacute": 250,
+ "uarr": 8593,
+ "ucirc": 251,
+ "ugrave": 249,
+ "uml": 168,
+ "upsih": 978,
+ "upsilon": 965,
+ "uuml": 252,
+ "weierp": 8472,
+ "xi": 958,
+ "yacute": 253,
+ "yen": 165,
+ "yuml": 255,
+ "zeta": 950,
+ "zwj": 8205,
+ "zwnj": 8204,
+}
diff --git a/third_party/python/MarkupSafe/src/markupsafe/_native.py b/third_party/python/MarkupSafe/src/markupsafe/_native.py
new file mode 100644
index 0000000000..cd08752cd8
--- /dev/null
+++ b/third_party/python/MarkupSafe/src/markupsafe/_native.py
@@ -0,0 +1,69 @@
+# -*- coding: utf-8 -*-
+"""
+markupsafe._native
+~~~~~~~~~~~~~~~~~~
+
+Native Python implementation used when the C module is not compiled.
+
+:copyright: 2010 Pallets
+:license: BSD-3-Clause
+"""
+from . import Markup
+from ._compat import text_type
+
+
+def escape(s):
+ """Replace the characters ``&``, ``<``, ``>``, ``'``, and ``"`` in
+ the string with HTML-safe sequences. Use this if you need to display
+ text that might contain such characters in HTML.
+
+ If the object has an ``__html__`` method, it is called and the
+ return value is assumed to already be safe for HTML.
+
+ :param s: An object to be converted to a string and escaped.
+ :return: A :class:`Markup` string with the escaped text.
+ """
+ if hasattr(s, "__html__"):
+ return Markup(s.__html__())
+ return Markup(
+ text_type(s)
+ .replace("&", "&amp;")
+ .replace(">", "&gt;")
+ .replace("<", "&lt;")
+ .replace("'", "&#39;")
+ .replace('"', "&#34;")
+ )
+
+
+def escape_silent(s):
+ """Like :func:`escape` but treats ``None`` as the empty string.
+ Useful with optional values, as otherwise you get the string
+ ``'None'`` when the value is ``None``.
+
+ >>> escape(None)
+ Markup('None')
+ >>> escape_silent(None)
+ Markup('')
+ """
+ if s is None:
+ return Markup()
+ return escape(s)
+
+
+def soft_unicode(s):
+ """Convert an object to a string if it isn't already. This preserves
+ a :class:`Markup` string rather than converting it back to a basic
+ string, so it will still be marked as safe and won't be escaped
+ again.
+
+ >>> value = escape('<User 1>')
+ >>> value
+ Markup('&lt;User 1&gt;')
+ >>> escape(str(value))
+ Markup('&amp;lt;User 1&amp;gt;')
+ >>> escape(soft_unicode(value))
+ Markup('&lt;User 1&gt;')
+ """
+ if not isinstance(s, text_type):
+ s = text_type(s)
+ return s
diff --git a/third_party/python/MarkupSafe/src/markupsafe/_speedups.c b/third_party/python/MarkupSafe/src/markupsafe/_speedups.c
new file mode 100644
index 0000000000..12d2c4a7d8
--- /dev/null
+++ b/third_party/python/MarkupSafe/src/markupsafe/_speedups.c
@@ -0,0 +1,423 @@
+/**
+ * markupsafe._speedups
+ * ~~~~~~~~~~~~~~~~~~~~
+ *
+ * C implementation of escaping for better performance. Used instead of
+ * the native Python implementation when compiled.
+ *
+ * :copyright: 2010 Pallets
+ * :license: BSD-3-Clause
+ */
+#include <Python.h>
+
+#if PY_MAJOR_VERSION < 3
+#define ESCAPED_CHARS_TABLE_SIZE 63
+#define UNICHR(x) (PyUnicode_AS_UNICODE((PyUnicodeObject*)PyUnicode_DecodeASCII(x, strlen(x), NULL)));
+
+static Py_ssize_t escaped_chars_delta_len[ESCAPED_CHARS_TABLE_SIZE];
+static Py_UNICODE *escaped_chars_repl[ESCAPED_CHARS_TABLE_SIZE];
+#endif
+
+static PyObject* markup;
+
+static int
+init_constants(void)
+{
+ PyObject *module;
+
+#if PY_MAJOR_VERSION < 3
+ /* mapping of characters to replace */
+ escaped_chars_repl['"'] = UNICHR("&#34;");
+ escaped_chars_repl['\''] = UNICHR("&#39;");
+ escaped_chars_repl['&'] = UNICHR("&amp;");
+ escaped_chars_repl['<'] = UNICHR("&lt;");
+ escaped_chars_repl['>'] = UNICHR("&gt;");
+
+ /* lengths of those characters when replaced - 1 */
+ memset(escaped_chars_delta_len, 0, sizeof (escaped_chars_delta_len));
+ escaped_chars_delta_len['"'] = escaped_chars_delta_len['\''] = \
+ escaped_chars_delta_len['&'] = 4;
+ escaped_chars_delta_len['<'] = escaped_chars_delta_len['>'] = 3;
+#endif
+
+ /* import markup type so that we can mark the return value */
+ module = PyImport_ImportModule("markupsafe");
+ if (!module)
+ return 0;
+ markup = PyObject_GetAttrString(module, "Markup");
+ Py_DECREF(module);
+
+ return 1;
+}
+
+#if PY_MAJOR_VERSION < 3
+static PyObject*
+escape_unicode(PyUnicodeObject *in)
+{
+ PyUnicodeObject *out;
+ Py_UNICODE *inp = PyUnicode_AS_UNICODE(in);
+ const Py_UNICODE *inp_end = PyUnicode_AS_UNICODE(in) + PyUnicode_GET_SIZE(in);
+ Py_UNICODE *next_escp;
+ Py_UNICODE *outp;
+ Py_ssize_t delta=0, erepl=0, delta_len=0;
+
+ /* First we need to figure out how long the escaped string will be */
+ while (*(inp) || inp < inp_end) {
+ if (*inp < ESCAPED_CHARS_TABLE_SIZE) {
+ delta += escaped_chars_delta_len[*inp];
+ erepl += !!escaped_chars_delta_len[*inp];
+ }
+ ++inp;
+ }
+
+ /* Do we need to escape anything at all? */
+ if (!erepl) {
+ Py_INCREF(in);
+ return (PyObject*)in;
+ }
+
+ out = (PyUnicodeObject*)PyUnicode_FromUnicode(NULL, PyUnicode_GET_SIZE(in) + delta);
+ if (!out)
+ return NULL;
+
+ outp = PyUnicode_AS_UNICODE(out);
+ inp = PyUnicode_AS_UNICODE(in);
+ while (erepl-- > 0) {
+ /* look for the next substitution */
+ next_escp = inp;
+ while (next_escp < inp_end) {
+ if (*next_escp < ESCAPED_CHARS_TABLE_SIZE &&
+ (delta_len = escaped_chars_delta_len[*next_escp])) {
+ ++delta_len;
+ break;
+ }
+ ++next_escp;
+ }
+
+ if (next_escp > inp) {
+ /* copy unescaped chars between inp and next_escp */
+ Py_UNICODE_COPY(outp, inp, next_escp-inp);
+ outp += next_escp - inp;
+ }
+
+ /* escape 'next_escp' */
+ Py_UNICODE_COPY(outp, escaped_chars_repl[*next_escp], delta_len);
+ outp += delta_len;
+
+ inp = next_escp + 1;
+ }
+ if (inp < inp_end)
+ Py_UNICODE_COPY(outp, inp, PyUnicode_GET_SIZE(in) - (inp - PyUnicode_AS_UNICODE(in)));
+
+ return (PyObject*)out;
+}
+#else /* PY_MAJOR_VERSION < 3 */
+
+#define GET_DELTA(inp, inp_end, delta) \
+ while (inp < inp_end) { \
+ switch (*inp++) { \
+ case '"': \
+ case '\'': \
+ case '&': \
+ delta += 4; \
+ break; \
+ case '<': \
+ case '>': \
+ delta += 3; \
+ break; \
+ } \
+ }
+
+#define DO_ESCAPE(inp, inp_end, outp) \
+ { \
+ Py_ssize_t ncopy = 0; \
+ while (inp < inp_end) { \
+ switch (*inp) { \
+ case '"': \
+ memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
+ outp += ncopy; ncopy = 0; \
+ *outp++ = '&'; \
+ *outp++ = '#'; \
+ *outp++ = '3'; \
+ *outp++ = '4'; \
+ *outp++ = ';'; \
+ break; \
+ case '\'': \
+ memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
+ outp += ncopy; ncopy = 0; \
+ *outp++ = '&'; \
+ *outp++ = '#'; \
+ *outp++ = '3'; \
+ *outp++ = '9'; \
+ *outp++ = ';'; \
+ break; \
+ case '&': \
+ memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
+ outp += ncopy; ncopy = 0; \
+ *outp++ = '&'; \
+ *outp++ = 'a'; \
+ *outp++ = 'm'; \
+ *outp++ = 'p'; \
+ *outp++ = ';'; \
+ break; \
+ case '<': \
+ memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
+ outp += ncopy; ncopy = 0; \
+ *outp++ = '&'; \
+ *outp++ = 'l'; \
+ *outp++ = 't'; \
+ *outp++ = ';'; \
+ break; \
+ case '>': \
+ memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
+ outp += ncopy; ncopy = 0; \
+ *outp++ = '&'; \
+ *outp++ = 'g'; \
+ *outp++ = 't'; \
+ *outp++ = ';'; \
+ break; \
+ default: \
+ ncopy++; \
+ } \
+ inp++; \
+ } \
+ memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
+ }
+
+static PyObject*
+escape_unicode_kind1(PyUnicodeObject *in)
+{
+ Py_UCS1 *inp = PyUnicode_1BYTE_DATA(in);
+ Py_UCS1 *inp_end = inp + PyUnicode_GET_LENGTH(in);
+ Py_UCS1 *outp;
+ PyObject *out;
+ Py_ssize_t delta = 0;
+
+ GET_DELTA(inp, inp_end, delta);
+ if (!delta) {
+ Py_INCREF(in);
+ return (PyObject*)in;
+ }
+
+ out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta,
+ PyUnicode_IS_ASCII(in) ? 127 : 255);
+ if (!out)
+ return NULL;
+
+ inp = PyUnicode_1BYTE_DATA(in);
+ outp = PyUnicode_1BYTE_DATA(out);
+ DO_ESCAPE(inp, inp_end, outp);
+ return out;
+}
+
+static PyObject*
+escape_unicode_kind2(PyUnicodeObject *in)
+{
+ Py_UCS2 *inp = PyUnicode_2BYTE_DATA(in);
+ Py_UCS2 *inp_end = inp + PyUnicode_GET_LENGTH(in);
+ Py_UCS2 *outp;
+ PyObject *out;
+ Py_ssize_t delta = 0;
+
+ GET_DELTA(inp, inp_end, delta);
+ if (!delta) {
+ Py_INCREF(in);
+ return (PyObject*)in;
+ }
+
+ out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 65535);
+ if (!out)
+ return NULL;
+
+ inp = PyUnicode_2BYTE_DATA(in);
+ outp = PyUnicode_2BYTE_DATA(out);
+ DO_ESCAPE(inp, inp_end, outp);
+ return out;
+}
+
+
+static PyObject*
+escape_unicode_kind4(PyUnicodeObject *in)
+{
+ Py_UCS4 *inp = PyUnicode_4BYTE_DATA(in);
+ Py_UCS4 *inp_end = inp + PyUnicode_GET_LENGTH(in);
+ Py_UCS4 *outp;
+ PyObject *out;
+ Py_ssize_t delta = 0;
+
+ GET_DELTA(inp, inp_end, delta);
+ if (!delta) {
+ Py_INCREF(in);
+ return (PyObject*)in;
+ }
+
+ out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 1114111);
+ if (!out)
+ return NULL;
+
+ inp = PyUnicode_4BYTE_DATA(in);
+ outp = PyUnicode_4BYTE_DATA(out);
+ DO_ESCAPE(inp, inp_end, outp);
+ return out;
+}
+
+static PyObject*
+escape_unicode(PyUnicodeObject *in)
+{
+ if (PyUnicode_READY(in))
+ return NULL;
+
+ switch (PyUnicode_KIND(in)) {
+ case PyUnicode_1BYTE_KIND:
+ return escape_unicode_kind1(in);
+ case PyUnicode_2BYTE_KIND:
+ return escape_unicode_kind2(in);
+ case PyUnicode_4BYTE_KIND:
+ return escape_unicode_kind4(in);
+ }
+ assert(0); /* shouldn't happen */
+ return NULL;
+}
+#endif /* PY_MAJOR_VERSION < 3 */
+
+static PyObject*
+escape(PyObject *self, PyObject *text)
+{
+ static PyObject *id_html;
+ PyObject *s = NULL, *rv = NULL, *html;
+
+ if (id_html == NULL) {
+#if PY_MAJOR_VERSION < 3
+ id_html = PyString_InternFromString("__html__");
+#else
+ id_html = PyUnicode_InternFromString("__html__");
+#endif
+ if (id_html == NULL) {
+ return NULL;
+ }
+ }
+
+ /* we don't have to escape integers, bools or floats */
+ if (PyLong_CheckExact(text) ||
+#if PY_MAJOR_VERSION < 3
+ PyInt_CheckExact(text) ||
+#endif
+ PyFloat_CheckExact(text) || PyBool_Check(text) ||
+ text == Py_None)
+ return PyObject_CallFunctionObjArgs(markup, text, NULL);
+
+ /* if the object has an __html__ method that performs the escaping */
+ html = PyObject_GetAttr(text ,id_html);
+ if (html) {
+ s = PyObject_CallObject(html, NULL);
+ Py_DECREF(html);
+ if (s == NULL) {
+ return NULL;
+ }
+ /* Convert to Markup object */
+ rv = PyObject_CallFunctionObjArgs(markup, (PyObject*)s, NULL);
+ Py_DECREF(s);
+ return rv;
+ }
+
+ /* otherwise make the object unicode if it isn't, then escape */
+ PyErr_Clear();
+ if (!PyUnicode_Check(text)) {
+#if PY_MAJOR_VERSION < 3
+ PyObject *unicode = PyObject_Unicode(text);
+#else
+ PyObject *unicode = PyObject_Str(text);
+#endif
+ if (!unicode)
+ return NULL;
+ s = escape_unicode((PyUnicodeObject*)unicode);
+ Py_DECREF(unicode);
+ }
+ else
+ s = escape_unicode((PyUnicodeObject*)text);
+
+ /* convert the unicode string into a markup object. */
+ rv = PyObject_CallFunctionObjArgs(markup, (PyObject*)s, NULL);
+ Py_DECREF(s);
+ return rv;
+}
+
+
+static PyObject*
+escape_silent(PyObject *self, PyObject *text)
+{
+ if (text != Py_None)
+ return escape(self, text);
+ return PyObject_CallFunctionObjArgs(markup, NULL);
+}
+
+
+static PyObject*
+soft_unicode(PyObject *self, PyObject *s)
+{
+ if (!PyUnicode_Check(s))
+#if PY_MAJOR_VERSION < 3
+ return PyObject_Unicode(s);
+#else
+ return PyObject_Str(s);
+#endif
+ Py_INCREF(s);
+ return s;
+}
+
+
+static PyMethodDef module_methods[] = {
+ {"escape", (PyCFunction)escape, METH_O,
+ "escape(s) -> markup\n\n"
+ "Convert the characters &, <, >, ', and \" in string s to HTML-safe\n"
+ "sequences. Use this if you need to display text that might contain\n"
+ "such characters in HTML. Marks return value as markup string."},
+ {"escape_silent", (PyCFunction)escape_silent, METH_O,
+ "escape_silent(s) -> markup\n\n"
+ "Like escape but converts None to an empty string."},
+ {"soft_unicode", (PyCFunction)soft_unicode, METH_O,
+ "soft_unicode(object) -> string\n\n"
+ "Make a string unicode if it isn't already. That way a markup\n"
+ "string is not converted back to unicode."},
+ {NULL, NULL, 0, NULL} /* Sentinel */
+};
+
+
+#if PY_MAJOR_VERSION < 3
+
+#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
+#define PyMODINIT_FUNC void
+#endif
+PyMODINIT_FUNC
+init_speedups(void)
+{
+ if (!init_constants())
+ return;
+
+ Py_InitModule3("markupsafe._speedups", module_methods, "");
+}
+
+#else /* Python 3.x module initialization */
+
+static struct PyModuleDef module_definition = {
+ PyModuleDef_HEAD_INIT,
+ "markupsafe._speedups",
+ NULL,
+ -1,
+ module_methods,
+ NULL,
+ NULL,
+ NULL,
+ NULL
+};
+
+PyMODINIT_FUNC
+PyInit__speedups(void)
+{
+ if (!init_constants())
+ return NULL;
+
+ return PyModule_Create(&module_definition);
+}
+
+#endif
diff --git a/third_party/python/MarkupSafe/tox.ini b/third_party/python/MarkupSafe/tox.ini
new file mode 100644
index 0000000000..8e07c652e3
--- /dev/null
+++ b/third_party/python/MarkupSafe/tox.ini
@@ -0,0 +1,44 @@
+[tox]
+envlist =
+ py{37,36,35,34,27,py3,py3,py}
+ stylecheck
+ docs-html
+ coverage-report
+skip_missing_interpreters = true
+
+[testenv]
+setenv =
+ COVERAGE_FILE = .coverage.{envname}
+deps =
+ pytest-cov
+commands = pytest --tb=short --cov --cov-report= {posargs}
+
+[testenv:stylecheck]
+deps = pre-commit
+skip_install = true
+commands = pre-commit run --all-files --show-diff-on-failure
+
+[testenv:docs-html]
+deps = -r docs/requirements.txt
+commands = sphinx-build -W -b html -d {envtmpdir}/doctrees docs {envtmpdir}/html
+
+[testenv:coverage-report]
+setenv =
+ COVERAGE_FILE = .coverage
+deps = coverage
+skip_install = true
+commands =
+ coverage combine
+ coverage html
+ coverage report
+
+[testenv:codecov]
+passenv = CI TRAVIS TRAVIS_*
+setenv =
+ COVERAGE_FILE = .coverage
+deps = codecov
+skip_install = true
+commands =
+ coverage combine
+ codecov
+ coverage report
diff --git a/third_party/python/PyYAML/CHANGES b/third_party/python/PyYAML/CHANGES
new file mode 100644
index 0000000000..f3facb14cf
--- /dev/null
+++ b/third_party/python/PyYAML/CHANGES
@@ -0,0 +1,242 @@
+
+For a complete changelog, see:
+
+* https://github.com/yaml/pyyaml/commits/
+* https://bitbucket.org/xi/pyyaml/commits/
+
+5.3.1 (2020-03-18)
+
+* https://github.com/yaml/pyyaml/pull/386 -- Prevents arbitrary code execution during python/object/new constructor
+
+5.3 (2020-01-06)
+
+* https://github.com/yaml/pyyaml/pull/290 -- Use `is` instead of equality for comparing with `None`
+* https://github.com/yaml/pyyaml/pull/270 -- fix typos and stylistic nit
+* https://github.com/yaml/pyyaml/pull/309 -- Fix up small typo
+* https://github.com/yaml/pyyaml/pull/161 -- Fix handling of __slots__
+* https://github.com/yaml/pyyaml/pull/358 -- Allow calling add_multi_constructor with None
+* https://github.com/yaml/pyyaml/pull/285 -- Add use of safe_load() function in README
+* https://github.com/yaml/pyyaml/pull/351 -- Fix reader for Unicode code points over 0xFFFF
+* https://github.com/yaml/pyyaml/pull/360 -- Enable certain unicode tests when maxunicode not > 0xffff
+* https://github.com/yaml/pyyaml/pull/359 -- Use full_load in yaml-highlight example
+* https://github.com/yaml/pyyaml/pull/244 -- Document that PyYAML is implemented with Cython
+* https://github.com/yaml/pyyaml/pull/329 -- Fix for Python 3.10
+* https://github.com/yaml/pyyaml/pull/310 -- increase size of index, line, and column fields
+* https://github.com/yaml/pyyaml/pull/260 -- remove some unused imports
+* https://github.com/yaml/pyyaml/pull/163 -- Create timezone-aware datetimes when parsed as such
+* https://github.com/yaml/pyyaml/pull/363 -- Add tests for timezone
+
+5.2 (2019-12-02)
+------------------
+
+* Repair incompatibilities introduced with 5.1. The default Loader was changed,
+ but several methods like add_constructor still used the old default
+ https://github.com/yaml/pyyaml/pull/279 -- A more flexible fix for custom tag constructors
+ https://github.com/yaml/pyyaml/pull/287 -- Change default loader for yaml.add_constructor
+ https://github.com/yaml/pyyaml/pull/305 -- Change default loader for add_implicit_resolver, add_path_resolver
+* Make FullLoader safer by removing python/object/apply from the default FullLoader
+ https://github.com/yaml/pyyaml/pull/347 -- Move constructor for object/apply to UnsafeConstructor
+* Fix bug introduced in 5.1 where quoting went wrong on systems with sys.maxunicode <= 0xffff
+ https://github.com/yaml/pyyaml/pull/276 -- Fix logic for quoting special characters
+* Other PRs:
+ https://github.com/yaml/pyyaml/pull/280 -- Update CHANGES for 5.1
+
+5.1.2 (2019-07-30)
+------------------
+
+* Re-release of 5.1 with regenerated Cython sources to build properly for Python 3.8b2+
+
+5.1.1 (2019-06-05)
+------------------
+
+* Re-release of 5.1 with regenerated Cython sources to build properly for Python 3.8b1
+
+5.1 (2019-03-13)
+----------------
+
+* https://github.com/yaml/pyyaml/pull/35 -- Some modernization of the test running
+* https://github.com/yaml/pyyaml/pull/42 -- Install tox in a virtualenv
+* https://github.com/yaml/pyyaml/pull/45 -- Allow colon in a plain scalar in a flow context
+* https://github.com/yaml/pyyaml/pull/48 -- Fix typos
+* https://github.com/yaml/pyyaml/pull/55 -- Improve RepresenterError creation
+* https://github.com/yaml/pyyaml/pull/59 -- Resolves #57, update readme issues link
+* https://github.com/yaml/pyyaml/pull/60 -- Document and test Python 3.6 support
+* https://github.com/yaml/pyyaml/pull/61 -- Use Travis CI built in pip cache support
+* https://github.com/yaml/pyyaml/pull/62 -- Remove tox workaround for Travis CI
+* https://github.com/yaml/pyyaml/pull/63 -- Adding support to Unicode characters over codepoint 0xffff
+* https://github.com/yaml/pyyaml/pull/75 -- add 3.12 changelog
+* https://github.com/yaml/pyyaml/pull/76 -- Fallback to Pure Python if Compilation fails
+* https://github.com/yaml/pyyaml/pull/84 -- Drop unsupported Python 3.3
+* https://github.com/yaml/pyyaml/pull/102 -- Include license file in the generated wheel package
+* https://github.com/yaml/pyyaml/pull/105 -- Removed Python 2.6 & 3.3 support
+* https://github.com/yaml/pyyaml/pull/111 -- Remove commented out Psyco code
+* https://github.com/yaml/pyyaml/pull/129 -- Remove call to `ord` in lib3 emitter code
+* https://github.com/yaml/pyyaml/pull/149 -- Test on Python 3.7-dev
+* https://github.com/yaml/pyyaml/pull/158 -- Support escaped slash in double quotes "\/"
+* https://github.com/yaml/pyyaml/pull/175 -- Updated link to pypi in release announcement
+* https://github.com/yaml/pyyaml/pull/181 -- Import Hashable from collections.abc
+* https://github.com/yaml/pyyaml/pull/194 -- Reverting https://github.com/yaml/pyyaml/pull/74
+* https://github.com/yaml/pyyaml/pull/195 -- Build libyaml on travis
+* https://github.com/yaml/pyyaml/pull/196 -- Force cython when building sdist
+* https://github.com/yaml/pyyaml/pull/254 -- Allow to turn off sorting keys in Dumper (2)
+* https://github.com/yaml/pyyaml/pull/256 -- Make default_flow_style=False
+* https://github.com/yaml/pyyaml/pull/257 -- Deprecate yaml.load and add FullLoader and UnsafeLoader classes
+* https://github.com/yaml/pyyaml/pull/261 -- Skip certain unicode tests when maxunicode not > 0xffff
+* https://github.com/yaml/pyyaml/pull/263 -- Windows Appveyor build
+
+3.13 (2018-07-05)
+-----------------
+
+* Resolved issues around PyYAML working in Python 3.7.
+
+3.12 (2016-08-28)
+-----------------
+
+* Wheel packages for Windows binaries.
+* Adding an implicit resolver to a derived loader should not affect the base loader.
+* Uniform representation for OrderedDict? across different versions of Python.
+* Fixed comparison to None warning.
+
+3.11 (2014-03-26)
+-----------------
+
+* Source and binary distributions are rebuilt against the latest
+ versions of Cython and LibYAML.
+
+3.10 (2011-05-30)
+-----------------
+
+* Do not try to build LibYAML bindings on platforms other than CPython
+ (Thank to olt(at)bogosoft(dot)com).
+* Clear cyclic references in the parser and the emitter
+ (Thank to kristjan(at)ccpgames(dot)com).
+* Dropped support for Python 2.3 and 2.4.
+
+3.09 (2009-08-31)
+-----------------
+
+* Fixed an obscure scanner error not reported when there is
+ no line break at the end of the stream (Thank to Ingy).
+* Fixed use of uninitialized memory when emitting anchors with
+ LibYAML bindings (Thank to cegner(at)yahoo-inc(dot)com).
+* Fixed emitting incorrect BOM characters for UTF-16 (Thank to
+ Valentin Nechayev)
+* Fixed the emitter for folded scalars not respecting the preferred
+ line width (Thank to Ingy).
+* Fixed a subtle ordering issue with emitting '%TAG' directives
+ (Thank to Andrey Somov).
+* Fixed performance regression with LibYAML bindings.
+
+
+3.08 (2008-12-31)
+-----------------
+
+* Python 3 support (Thank to Erick Tryzelaar).
+* Use Cython instead of Pyrex to build LibYAML bindings.
+* Refactored support for unicode and byte input/output streams.
+
+
+3.07 (2008-12-29)
+-----------------
+
+* The emitter learned to use an optional indentation indicator
+ for block scalar; thus scalars with leading whitespaces
+ could now be represented in a literal or folded style.
+* The test suite is now included in the source distribution.
+ To run the tests, type 'python setup.py test'.
+* Refactored the test suite: dropped unittest in favor of
+ a custom test appliance.
+* Fixed the path resolver in CDumper.
+* Forced an explicit document end indicator when there is
+ a possibility of parsing ambiguity.
+* More setup.py improvements: the package should be usable
+ when any combination of setuptools, Pyrex and LibYAML
+ is installed.
+* Windows binary packages are built against LibYAML-0.1.2.
+* Minor typos and corrections (Thank to Ingy dot Net
+ and Andrey Somov).
+
+
+3.06 (2008-10-03)
+-----------------
+
+* setup.py checks whether LibYAML is installed and if so, builds
+ and installs LibYAML bindings. To force or disable installation
+ of LibYAML bindings, use '--with-libyaml' or '--without-libyaml'
+ respectively.
+* The source distribution includes compiled Pyrex sources so
+ building LibYAML bindings no longer requires Pyrex installed.
+* 'yaml.load()' raises an exception if the input stream contains
+ more than one YAML document.
+* Fixed exceptions produced by LibYAML bindings.
+* Fixed a dot '.' character being recognized as !!float.
+* Fixed Python 2.3 compatibility issue in constructing !!timestamp values.
+* Windows binary packages are built against the LibYAML stable branch.
+* Added attributes 'yaml.__version__' and 'yaml.__with_libyaml__'.
+
+
+3.05 (2007-05-13)
+-----------------
+
+* Windows binary packages were built with LibYAML trunk.
+* Fixed a bug that prevent processing a live stream of YAML documents in
+ timely manner (Thanks edward(at)sweetbytes(dot)net).
+* Fixed a bug when the path in add_path_resolver contains boolean values
+ (Thanks jstroud(at)mbi(dot)ucla(dot)edu).
+* Fixed loss of microsecond precision in timestamps
+ (Thanks edemaine(at)mit(dot)edu).
+* Fixed loading an empty YAML stream.
+* Allowed immutable subclasses of YAMLObject.
+* Made the encoding of the unicode->str conversion explicit so that
+ the conversion does not depend on the default Python encoding.
+* Forced emitting float values in a YAML compatible form.
+
+
+3.04 (2006-08-20)
+-----------------
+
+* Include experimental LibYAML bindings.
+* Fully support recursive structures.
+* Sort dictionary keys. Mapping node values are now represented
+ as lists of pairs instead of dictionaries. No longer check
+ for duplicate mapping keys as it didn't work correctly anyway.
+* Fix invalid output of single-quoted scalars in cases when a single
+ quote is not escaped when preceded by whitespaces or line breaks.
+* To make porting easier, rewrite Parser not using generators.
+* Fix handling of unexpected block mapping values.
+* Fix a bug in Representer.represent_object: copy_reg.dispatch_table
+ was not correctly handled.
+* Fix a bug when a block scalar is incorrectly emitted in the simple
+ key context.
+* Hold references to the objects being represented.
+* Make Representer not try to guess !!pairs when a list is represented.
+* Fix timestamp constructing and representing.
+* Fix the 'N' plain scalar being incorrectly recognized as !!bool.
+
+
+3.03 (2006-06-19)
+-----------------
+
+* Fix Python 2.5 compatibility issues.
+* Fix numerous bugs in the float handling.
+* Fix scanning some ill-formed documents.
+* Other minor fixes.
+
+
+3.02 (2006-05-15)
+-----------------
+
+* Fix win32 installer. Apparently bdist_wininst does not work well
+ under Linux.
+* Fix a bug in add_path_resolver.
+* Add the yaml-highlight example. Try to run on a color terminal:
+ `python yaml_hl.py <any_document.yaml`.
+
+
+3.01 (2006-05-07)
+-----------------
+
+* Initial release. The version number reflects the codename
+ of the project (PyYAML 3000) and differentiates it from
+ the abandoned PyYaml module.
+
diff --git a/third_party/python/PyYAML/LICENSE b/third_party/python/PyYAML/LICENSE
new file mode 100644
index 0000000000..3d82c281ee
--- /dev/null
+++ b/third_party/python/PyYAML/LICENSE
@@ -0,0 +1,20 @@
+Copyright (c) 2017-2020 Ingy döt Net
+Copyright (c) 2006-2016 Kirill Simonov
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/third_party/python/PyYAML/PKG-INFO b/third_party/python/PyYAML/PKG-INFO
new file mode 100644
index 0000000000..bf47ef4b35
--- /dev/null
+++ b/third_party/python/PyYAML/PKG-INFO
@@ -0,0 +1,38 @@
+Metadata-Version: 1.1
+Name: PyYAML
+Version: 5.3.1
+Summary: YAML parser and emitter for Python
+Home-page: https://github.com/yaml/pyyaml
+Author: Kirill Simonov
+Author-email: xi@resolvent.net
+License: MIT
+Download-URL: https://pypi.org/project/PyYAML/
+Description: YAML is a data serialization format designed for human readability
+ and interaction with scripting languages. PyYAML is a YAML parser
+ and emitter for Python.
+
+ PyYAML features a complete YAML 1.1 parser, Unicode support, pickle
+ support, capable extension API, and sensible error messages. PyYAML
+ supports standard YAML tags and provides Python-specific tags that
+ allow to represent an arbitrary Python object.
+
+ PyYAML is applicable for a broad range of tasks from complex
+ configuration files to object serialization and persistence.
+Platform: Any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Cython
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Text Processing :: Markup
diff --git a/third_party/python/PyYAML/README b/third_party/python/PyYAML/README
new file mode 100644
index 0000000000..49c87e7642
--- /dev/null
+++ b/third_party/python/PyYAML/README
@@ -0,0 +1,43 @@
+PyYAML - The next generation YAML parser and emitter for Python.
+
+To install, type 'python setup.py install'.
+
+By default, the setup.py script checks whether LibYAML is installed
+and if so, builds and installs LibYAML bindings. To skip the check
+and force installation of LibYAML bindings, use the option '--with-libyaml':
+'python setup.py --with-libyaml install'. To disable the check and
+skip building and installing LibYAML bindings, use '--without-libyaml':
+'python setup.py --without-libyaml install'.
+
+When LibYAML bindings are installed, you may use fast LibYAML-based
+parser and emitter as follows:
+
+ >>> yaml.load(stream, Loader=yaml.CLoader)
+ >>> yaml.dump(data, Dumper=yaml.CDumper)
+
+If you don't trust the input stream, you should use:
+
+ >>> yaml.safe_load(stream)
+
+PyYAML includes a comprehensive test suite. To run the tests,
+type 'python setup.py test'.
+
+For more information, check the PyYAML homepage:
+'https://github.com/yaml/pyyaml'.
+
+For PyYAML tutorial and reference, see:
+'http://pyyaml.org/wiki/PyYAMLDocumentation'.
+
+Discuss PyYAML with the maintainers in IRC #pyyaml irc.freenode.net.
+
+You may also use the YAML-Core mailing list:
+'http://lists.sourceforge.net/lists/listinfo/yaml-core'.
+
+Submit bug reports and feature requests to the PyYAML bug tracker:
+'https://github.com/yaml/pyyaml/issues'.
+
+The PyYAML module was written by Kirill Simonov <xi@resolvent.net>.
+It is currently maintained by the YAML and Python communities.
+
+PyYAML is released under the MIT license.
+See the file LICENSE for more details.
diff --git a/third_party/python/PyYAML/examples/pygments-lexer/example.yaml b/third_party/python/PyYAML/examples/pygments-lexer/example.yaml
new file mode 100644
index 0000000000..9c0ed9d082
--- /dev/null
+++ b/third_party/python/PyYAML/examples/pygments-lexer/example.yaml
@@ -0,0 +1,302 @@
+
+#
+# Examples from the Preview section of the YAML specification
+# (http://yaml.org/spec/1.2/#Preview)
+#
+
+# Sequence of scalars
+---
+- Mark McGwire
+- Sammy Sosa
+- Ken Griffey
+
+# Mapping scalars to scalars
+---
+hr: 65 # Home runs
+avg: 0.278 # Batting average
+rbi: 147 # Runs Batted In
+
+# Mapping scalars to sequences
+---
+american:
+ - Boston Red Sox
+ - Detroit Tigers
+ - New York Yankees
+national:
+ - New York Mets
+ - Chicago Cubs
+ - Atlanta Braves
+
+# Sequence of mappings
+---
+-
+ name: Mark McGwire
+ hr: 65
+ avg: 0.278
+-
+ name: Sammy Sosa
+ hr: 63
+ avg: 0.288
+
+# Sequence of sequences
+---
+- [name , hr, avg ]
+- [Mark McGwire, 65, 0.278]
+- [Sammy Sosa , 63, 0.288]
+
+# Mapping of mappings
+---
+Mark McGwire: {hr: 65, avg: 0.278}
+Sammy Sosa: {
+ hr: 63,
+ avg: 0.288
+ }
+
+# Two documents in a stream
+--- # Ranking of 1998 home runs
+- Mark McGwire
+- Sammy Sosa
+- Ken Griffey
+--- # Team ranking
+- Chicago Cubs
+- St Louis Cardinals
+
+# Documents with the end indicator
+---
+time: 20:03:20
+player: Sammy Sosa
+action: strike (miss)
+...
+---
+time: 20:03:47
+player: Sammy Sosa
+action: grand slam
+...
+
+# Comments
+---
+hr: # 1998 hr ranking
+ - Mark McGwire
+ - Sammy Sosa
+rbi:
+ # 1998 rbi ranking
+ - Sammy Sosa
+ - Ken Griffey
+
+# Anchors and aliases
+---
+hr:
+ - Mark McGwire
+ # Following node labeled SS
+ - &SS Sammy Sosa
+rbi:
+ - *SS # Subsequent occurrence
+ - Ken Griffey
+
+# Mapping between sequences
+---
+? - Detroit Tigers
+ - Chicago cubs
+:
+ - 2001-07-23
+? [ New York Yankees,
+ Atlanta Braves ]
+: [ 2001-07-02, 2001-08-12,
+ 2001-08-14 ]
+
+# Inline nested mapping
+---
+# products purchased
+- item : Super Hoop
+ quantity: 1
+- item : Basketball
+ quantity: 4
+- item : Big Shoes
+ quantity: 1
+
+# Literal scalars
+--- | # ASCII art
+ \//||\/||
+ // || ||__
+
+# Folded scalars
+--- >
+ Mark McGwire's
+ year was crippled
+ by a knee injury.
+
+# Preserved indented block in a folded scalar
+---
+>
+ Sammy Sosa completed another
+ fine season with great stats.
+
+ 63 Home Runs
+ 0.288 Batting Average
+
+ What a year!
+
+# Indentation determines scope
+---
+name: Mark McGwire
+accomplishment: >
+ Mark set a major league
+ home run record in 1998.
+stats: |
+ 65 Home Runs
+ 0.278 Batting Average
+
+# Quoted scalars
+---
+unicode: "Sosa did fine.\u263A"
+control: "\b1998\t1999\t2000\n"
+hex esc: "\x0d\x0a is \r\n"
+single: '"Howdy!" he cried.'
+quoted: ' # not a ''comment''.'
+tie-fighter: '|\-*-/|'
+
+# Multi-line flow scalars
+---
+plain:
+ This unquoted scalar
+ spans many lines.
+quoted: "So does this
+ quoted scalar.\n"
+
+# Integers
+---
+canonical: 12345
+decimal: +12_345
+sexagesimal: 3:25:45
+octal: 014
+hexadecimal: 0xC
+
+# Floating point
+---
+canonical: 1.23015e+3
+exponential: 12.3015e+02
+sexagesimal: 20:30.15
+fixed: 1_230.15
+negative infinity: -.inf
+not a number: .NaN
+
+# Miscellaneous
+---
+null: ~
+true: boolean
+false: boolean
+string: '12345'
+
+# Timestamps
+---
+canonical: 2001-12-15T02:59:43.1Z
+iso8601: 2001-12-14t21:59:43.10-05:00
+spaced: 2001-12-14 21:59:43.10 -5
+date: 2002-12-14
+
+# Various explicit tags
+---
+not-date: !!str 2002-04-28
+picture: !!binary |
+ R0lGODlhDAAMAIQAAP//9/X
+ 17unp5WZmZgAAAOfn515eXv
+ Pz7Y6OjuDg4J+fn5OTk6enp
+ 56enmleECcgggoBADs=
+application specific tag: !something |
+ The semantics of the tag
+ above may be different for
+ different documents.
+
+# Global tags
+%TAG ! tag:clarkevans.com,2002:
+--- !shape
+ # Use the ! handle for presenting
+ # tag:clarkevans.com,2002:circle
+- !circle
+ center: &ORIGIN {x: 73, y: 129}
+ radius: 7
+- !line
+ start: *ORIGIN
+ finish: { x: 89, y: 102 }
+- !label
+ start: *ORIGIN
+ color: 0xFFEEBB
+ text: Pretty vector drawing.
+
+# Unordered sets
+--- !!set
+# sets are represented as a
+# mapping where each key is
+# associated with the empty string
+? Mark McGwire
+? Sammy Sosa
+? Ken Griff
+
+# Ordered mappings
+--- !!omap
+# ordered maps are represented as
+# a sequence of mappings, with
+# each mapping having one key
+- Mark McGwire: 65
+- Sammy Sosa: 63
+- Ken Griffy: 58
+
+# Full length example
+--- !<tag:clarkevans.com,2002:invoice>
+invoice: 34843
+date : 2001-01-23
+bill-to: &id001
+ given : Chris
+ family : Dumars
+ address:
+ lines: |
+ 458 Walkman Dr.
+ Suite #292
+ city : Royal Oak
+ state : MI
+ postal : 48046
+ship-to: *id001
+product:
+ - sku : BL394D
+ quantity : 4
+ description : Basketball
+ price : 450.00
+ - sku : BL4438H
+ quantity : 1
+ description : Super Hoop
+ price : 2392.00
+tax : 251.42
+total: 4443.52
+comments:
+ Late afternoon is best.
+ Backup contact is Nancy
+ Billsmer @ 338-4338.
+
+# Another full-length example
+---
+Time: 2001-11-23 15:01:42 -5
+User: ed
+Warning:
+ This is an error message
+ for the log file
+---
+Time: 2001-11-23 15:02:31 -5
+User: ed
+Warning:
+ A slightly different error
+ message.
+---
+Date: 2001-11-23 15:03:17 -5
+User: ed
+Fatal:
+ Unknown variable "bar"
+Stack:
+ - file: TopClass.py
+ line: 23
+ code: |
+ x = MoreObject("345\n")
+ - file: MoreClass.py
+ line: 58
+ code: |-
+ foo = bar
+
diff --git a/third_party/python/PyYAML/examples/pygments-lexer/yaml.py b/third_party/python/PyYAML/examples/pygments-lexer/yaml.py
new file mode 100644
index 0000000000..1a1bbdeb3a
--- /dev/null
+++ b/third_party/python/PyYAML/examples/pygments-lexer/yaml.py
@@ -0,0 +1,431 @@
+
+"""
+yaml.py
+
+Lexer for YAML, a human-friendly data serialization language
+(http://yaml.org/).
+
+Written by Kirill Simonov <xi@resolvent.net>.
+
+License: Whatever suitable for inclusion into the Pygments package.
+"""
+
+from pygments.lexer import \
+ ExtendedRegexLexer, LexerContext, include, bygroups
+from pygments.token import \
+ Text, Comment, Punctuation, Name, Literal
+
+__all__ = ['YAMLLexer']
+
+
+class YAMLLexerContext(LexerContext):
+ """Indentation context for the YAML lexer."""
+
+ def __init__(self, *args, **kwds):
+ super(YAMLLexerContext, self).__init__(*args, **kwds)
+ self.indent_stack = []
+ self.indent = -1
+ self.next_indent = 0
+ self.block_scalar_indent = None
+
+
+def something(TokenClass):
+ """Do not produce empty tokens."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if not text:
+ return
+ yield match.start(), TokenClass, text
+ context.pos = match.end()
+ return callback
+
+def reset_indent(TokenClass):
+ """Reset the indentation levels."""
+ def callback(lexer, match, context):
+ text = match.group()
+ context.indent_stack = []
+ context.indent = -1
+ context.next_indent = 0
+ context.block_scalar_indent = None
+ yield match.start(), TokenClass, text
+ context.pos = match.end()
+ return callback
+
+def save_indent(TokenClass, start=False):
+ """Save a possible indentation level."""
+ def callback(lexer, match, context):
+ text = match.group()
+ extra = ''
+ if start:
+ context.next_indent = len(text)
+ if context.next_indent < context.indent:
+ while context.next_indent < context.indent:
+ context.indent = context.indent_stack.pop()
+ if context.next_indent > context.indent:
+ extra = text[context.indent:]
+ text = text[:context.indent]
+ else:
+ context.next_indent += len(text)
+ if text:
+ yield match.start(), TokenClass, text
+ if extra:
+ yield match.start()+len(text), TokenClass.Error, extra
+ context.pos = match.end()
+ return callback
+
+def set_indent(TokenClass, implicit=False):
+ """Set the previously saved indentation level."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if context.indent < context.next_indent:
+ context.indent_stack.append(context.indent)
+ context.indent = context.next_indent
+ if not implicit:
+ context.next_indent += len(text)
+ yield match.start(), TokenClass, text
+ context.pos = match.end()
+ return callback
+
+def set_block_scalar_indent(TokenClass):
+ """Set an explicit indentation level for a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ context.block_scalar_indent = None
+ if not text:
+ return
+ increment = match.group(1)
+ if increment:
+ current_indent = max(context.indent, 0)
+ increment = int(increment)
+ context.block_scalar_indent = current_indent + increment
+ if text:
+ yield match.start(), TokenClass, text
+ context.pos = match.end()
+ return callback
+
+def parse_block_scalar_empty_line(IndentTokenClass, ContentTokenClass):
+ """Process an empty line in a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if (context.block_scalar_indent is None or
+ len(text) <= context.block_scalar_indent):
+ if text:
+ yield match.start(), IndentTokenClass, text
+ else:
+ indentation = text[:context.block_scalar_indent]
+ content = text[context.block_scalar_indent:]
+ yield match.start(), IndentTokenClass, indentation
+ yield (match.start()+context.block_scalar_indent,
+ ContentTokenClass, content)
+ context.pos = match.end()
+ return callback
+
+def parse_block_scalar_indent(TokenClass):
+ """Process indentation spaces in a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if context.block_scalar_indent is None:
+ if len(text) <= max(context.indent, 0):
+ context.stack.pop()
+ context.stack.pop()
+ return
+ context.block_scalar_indent = len(text)
+ else:
+ if len(text) < context.block_scalar_indent:
+ context.stack.pop()
+ context.stack.pop()
+ return
+ if text:
+ yield match.start(), TokenClass, text
+ context.pos = match.end()
+ return callback
+
+def parse_plain_scalar_indent(TokenClass):
+ """Process indentation spaces in a plain scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if len(text) <= context.indent:
+ context.stack.pop()
+ context.stack.pop()
+ return
+ if text:
+ yield match.start(), TokenClass, text
+ context.pos = match.end()
+ return callback
+
+
+class YAMLLexer(ExtendedRegexLexer):
+ """Lexer for the YAML language."""
+
+ name = 'YAML'
+ aliases = ['yaml']
+ filenames = ['*.yaml', '*.yml']
+ mimetypes = ['text/x-yaml']
+
+ tokens = {
+
+ # the root rules
+ 'root': [
+ # ignored whitespaces
+ (r'[ ]+(?=#|$)', Text.Blank),
+ # line breaks
+ (r'\n+', Text.Break),
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # the '%YAML' directive
+ (r'^%YAML(?=[ ]|$)', reset_indent(Name.Directive),
+ 'yaml-directive'),
+ # the %TAG directive
+ (r'^%TAG(?=[ ]|$)', reset_indent(Name.Directive),
+ 'tag-directive'),
+ # document start and document end indicators
+ (r'^(?:---|\.\.\.)(?=[ ]|$)',
+ reset_indent(Punctuation.Document), 'block-line'),
+ # indentation spaces
+ (r'[ ]*(?![ \t\n\r\f\v]|$)',
+ save_indent(Text.Indent, start=True),
+ ('block-line', 'indentation')),
+ ],
+
+ # trailing whitespaces after directives or a block scalar indicator
+ 'ignored-line': [
+ # ignored whitespaces
+ (r'[ ]+(?=#|$)', Text.Blank),
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # line break
+ (r'\n', Text.Break, '#pop:2'),
+ ],
+
+ # the %YAML directive
+ 'yaml-directive': [
+ # the version number
+ (r'([ ]+)([0-9]+\.[0-9]+)',
+ bygroups(Text.Blank, Literal.Version), 'ignored-line'),
+ ],
+
+ # the %YAG directive
+ 'tag-directive': [
+ # a tag handle and the corresponding prefix
+ (r'([ ]+)(!|![0-9A-Za-z_-]*!)'
+ r'([ ]+)(!|!?[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)',
+ bygroups(Text.Blank, Name.Type, Text.Blank, Name.Type),
+ 'ignored-line'),
+ ],
+
+ # block scalar indicators and indentation spaces
+ 'indentation': [
+ # trailing whitespaces are ignored
+ (r'[ ]*$', something(Text.Blank), '#pop:2'),
+ # whitespaces preceding block collection indicators
+ (r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Text.Indent)),
+ # block collection indicators
+ (r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)),
+ # the beginning a block line
+ (r'[ ]*', save_indent(Text.Indent), '#pop'),
+ ],
+
+ # an indented line in the block context
+ 'block-line': [
+ # the line end
+ (r'[ ]*(?=#|$)', something(Text.Blank), '#pop'),
+ # whitespaces separating tokens
+ (r'[ ]+', Text.Blank),
+ # tags, anchors and aliases,
+ include('descriptors'),
+ # block collections and scalars
+ include('block-nodes'),
+ # flow collections and quoted scalars
+ include('flow-nodes'),
+ # a plain scalar
+ (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`-]|[?:-][^ \t\n\r\f\v])',
+ something(Literal.Scalar.Plain),
+ 'plain-scalar-in-block-context'),
+ ],
+
+ # tags, anchors, aliases
+ 'descriptors' : [
+ # a full-form tag
+ (r'!<[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+>', Name.Type),
+ # a tag in the form '!', '!suffix' or '!handle!suffix'
+ (r'!(?:[0-9A-Za-z_-]+)?'
+ r'(?:![0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)?', Name.Type),
+ # an anchor
+ (r'&[0-9A-Za-z_-]+', Name.Anchor),
+ # an alias
+ (r'\*[0-9A-Za-z_-]+', Name.Alias),
+ ],
+
+ # block collections and scalars
+ 'block-nodes': [
+ # implicit key
+ (r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)),
+ # literal and folded scalars
+ (r'[|>]', Punctuation.Indicator,
+ ('block-scalar-content', 'block-scalar-header')),
+ ],
+
+ # flow collections and quoted scalars
+ 'flow-nodes': [
+ # a flow sequence
+ (r'\[', Punctuation.Indicator, 'flow-sequence'),
+ # a flow mapping
+ (r'\{', Punctuation.Indicator, 'flow-mapping'),
+ # a single-quoted scalar
+ (r'\'', Literal.Scalar.Flow.Quote, 'single-quoted-scalar'),
+ # a double-quoted scalar
+ (r'\"', Literal.Scalar.Flow.Quote, 'double-quoted-scalar'),
+ ],
+
+ # the content of a flow collection
+ 'flow-collection': [
+ # whitespaces
+ (r'[ ]+', Text.Blank),
+ # line breaks
+ (r'\n+', Text.Break),
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # simple indicators
+ (r'[?:,]', Punctuation.Indicator),
+ # tags, anchors and aliases
+ include('descriptors'),
+ # nested collections and quoted scalars
+ include('flow-nodes'),
+ # a plain scalar
+ (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`])',
+ something(Literal.Scalar.Plain),
+ 'plain-scalar-in-flow-context'),
+ ],
+
+ # a flow sequence indicated by '[' and ']'
+ 'flow-sequence': [
+ # include flow collection rules
+ include('flow-collection'),
+ # the closing indicator
+ (r'\]', Punctuation.Indicator, '#pop'),
+ ],
+
+ # a flow mapping indicated by '{' and '}'
+ 'flow-mapping': [
+ # include flow collection rules
+ include('flow-collection'),
+ # the closing indicator
+ (r'\}', Punctuation.Indicator, '#pop'),
+ ],
+
+ # block scalar lines
+ 'block-scalar-content': [
+ # line break
+ (r'\n', Text.Break),
+ # empty line
+ (r'^[ ]+$',
+ parse_block_scalar_empty_line(Text.Indent,
+ Literal.Scalar.Block)),
+ # indentation spaces (we may leave the state here)
+ (r'^[ ]*', parse_block_scalar_indent(Text.Indent)),
+ # line content
+ (r'[^\n\r\f\v]+', Literal.Scalar.Block),
+ ],
+
+ # the content of a literal or folded scalar
+ 'block-scalar-header': [
+ # indentation indicator followed by chomping flag
+ (r'([1-9])?[+-]?(?=[ ]|$)',
+ set_block_scalar_indent(Punctuation.Indicator),
+ 'ignored-line'),
+ # chomping flag followed by indentation indicator
+ (r'[+-]?([1-9])?(?=[ ]|$)',
+ set_block_scalar_indent(Punctuation.Indicator),
+ 'ignored-line'),
+ ],
+
+ # ignored and regular whitespaces in quoted scalars
+ 'quoted-scalar-whitespaces': [
+ # leading and trailing whitespaces are ignored
+ (r'^[ ]+|[ ]+$', Text.Blank),
+ # line breaks are ignored
+ (r'\n+', Text.Break),
+ # other whitespaces are a part of the value
+ (r'[ ]+', Literal.Scalar.Flow),
+ ],
+
+ # single-quoted scalars
+ 'single-quoted-scalar': [
+ # include whitespace and line break rules
+ include('quoted-scalar-whitespaces'),
+ # escaping of the quote character
+ (r'\'\'', Literal.Scalar.Flow.Escape),
+ # regular non-whitespace characters
+ (r'[^ \t\n\r\f\v\']+', Literal.Scalar.Flow),
+ # the closing quote
+ (r'\'', Literal.Scalar.Flow.Quote, '#pop'),
+ ],
+
+ # double-quoted scalars
+ 'double-quoted-scalar': [
+ # include whitespace and line break rules
+ include('quoted-scalar-whitespaces'),
+ # escaping of special characters
+ (r'\\[0abt\tn\nvfre "\\N_LP]', Literal.Scalar.Flow.Escape),
+ # escape codes
+ (r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})',
+ Literal.Scalar.Flow.Escape),
+ # regular non-whitespace characters
+ (r'[^ \t\n\r\f\v\"\\]+', Literal.Scalar.Flow),
+ # the closing quote
+ (r'"', Literal.Scalar.Flow.Quote, '#pop'),
+ ],
+
+ # the beginning of a new line while scanning a plain scalar
+ 'plain-scalar-in-block-context-new-line': [
+ # empty lines
+ (r'^[ ]+$', Text.Blank),
+ # line breaks
+ (r'\n+', Text.Break),
+ # document start and document end indicators
+ (r'^(?=---|\.\.\.)', something(Punctuation.Document), '#pop:3'),
+ # indentation spaces (we may leave the block line state here)
+ (r'^[ ]*', parse_plain_scalar_indent(Text.Indent), '#pop'),
+ ],
+
+ # a plain scalar in the block context
+ 'plain-scalar-in-block-context': [
+ # the scalar ends with the ':' indicator
+ (r'[ ]*(?=:[ ]|:$)', something(Text.Blank), '#pop'),
+ # the scalar ends with whitespaces followed by a comment
+ (r'[ ]+(?=#)', Text.Blank, '#pop'),
+ # trailing whitespaces are ignored
+ (r'[ ]+$', Text.Blank),
+ # line breaks are ignored
+ (r'\n+', Text.Break, 'plain-scalar-in-block-context-new-line'),
+ # other whitespaces are a part of the value
+ (r'[ ]+', Literal.Scalar.Plain),
+ # regular non-whitespace characters
+ (r'(?::(?![ \t\n\r\f\v])|[^ \t\n\r\f\v:])+',
+ Literal.Scalar.Plain),
+ ],
+
+ # a plain scalar is the flow context
+ 'plain-scalar-in-flow-context': [
+ # the scalar ends with an indicator character
+ (r'[ ]*(?=[,:?\[\]{}])', something(Text.Blank), '#pop'),
+ # the scalar ends with a comment
+ (r'[ ]+(?=#)', Text.Blank, '#pop'),
+ # leading and trailing whitespaces are ignored
+ (r'^[ ]+|[ ]+$', Text.Blank),
+ # line breaks are ignored
+ (r'\n+', Text.Break),
+ # other whitespaces are a part of the value
+ (r'[ ]+', Literal.Scalar.Plain),
+ # regular non-whitespace characters
+ (r'[^ \t\n\r\f\v,:?\[\]{}]+', Literal.Scalar.Plain),
+ ],
+
+ }
+
+ def get_tokens_unprocessed(self, text=None, context=None):
+ if context is None:
+ context = YAMLLexerContext(text, 0)
+ return super(YAMLLexer, self).get_tokens_unprocessed(text, context)
+
+
diff --git a/third_party/python/PyYAML/examples/yaml-highlight/yaml_hl.cfg b/third_party/python/PyYAML/examples/yaml-highlight/yaml_hl.cfg
new file mode 100644
index 0000000000..69bb847764
--- /dev/null
+++ b/third_party/python/PyYAML/examples/yaml-highlight/yaml_hl.cfg
@@ -0,0 +1,115 @@
+%YAML 1.1
+---
+
+ascii:
+
+ header: "\e[0;1;30;40m"
+
+ footer: "\e[0m"
+
+ tokens:
+ stream-start:
+ stream-end:
+ directive: { start: "\e[35m", end: "\e[0;1;30;40m" }
+ document-start: { start: "\e[35m", end: "\e[0;1;30;40m" }
+ document-end: { start: "\e[35m", end: "\e[0;1;30;40m" }
+ block-sequence-start:
+ block-mapping-start:
+ block-end:
+ flow-sequence-start: { start: "\e[33m", end: "\e[0;1;30;40m" }
+ flow-mapping-start: { start: "\e[33m", end: "\e[0;1;30;40m" }
+ flow-sequence-end: { start: "\e[33m", end: "\e[0;1;30;40m" }
+ flow-mapping-end: { start: "\e[33m", end: "\e[0;1;30;40m" }
+ key: { start: "\e[33m", end: "\e[0;1;30;40m" }
+ value: { start: "\e[33m", end: "\e[0;1;30;40m" }
+ block-entry: { start: "\e[33m", end: "\e[0;1;30;40m" }
+ flow-entry: { start: "\e[33m", end: "\e[0;1;30;40m" }
+ alias: { start: "\e[32m", end: "\e[0;1;30;40m" }
+ anchor: { start: "\e[32m", end: "\e[0;1;30;40m" }
+ tag: { start: "\e[32m", end: "\e[0;1;30;40m" }
+ scalar: { start: "\e[36m", end: "\e[0;1;30;40m" }
+
+ replaces:
+ - "\r\n": "\n"
+ - "\r": "\n"
+ - "\n": "\n"
+ - "\x85": "\n"
+ - "\u2028": "\n"
+ - "\u2029": "\n"
+
+html: &html
+
+ tokens:
+ stream-start:
+ stream-end:
+ directive: { start: <code class="directive_token">, end: </code> }
+ document-start: { start: <code class="document_start_token">, end: </code> }
+ document-end: { start: <code class="document_end_token">, end: </code> }
+ block-sequence-start:
+ block-mapping-start:
+ block-end:
+ flow-sequence-start: { start: <code class="delimiter_token">, end: </code> }
+ flow-mapping-start: { start: <code class="delimiter_token">, end: </code> }
+ flow-sequence-end: { start: <code class="delimiter_token">, end: </code> }
+ flow-mapping-end: { start: <code class="delimiter_token">, end: </code> }
+ key: { start: <code class="delimiter_token">, end: </code> }
+ value: { start: <code class="delimiter_token">, end: </code> }
+ block-entry: { start: <code class="delimiter_token">, end: </code> }
+ flow-entry: { start: <code class="delimiter_token">, end: </code> }
+ alias: { start: <code class="anchor_token">, end: </code> }
+ anchor: { start: <code class="anchor_token">, end: </code> }
+ tag: { start: <code class="tag_token">, end: </code> }
+ scalar: { start: <code class="scalar_token">, end: </code> }
+
+ events:
+ stream-start: { start: <pre class="yaml_stream"> }
+ stream-end: { end: </pre> }
+ document-start: { start: <span class="document"> }
+ document-end: { end: </span> }
+ sequence-start: { start: <span class="sequence"> }
+ sequence-end: { end: </span> }
+ mapping-start: { start: <span class="mapping"> }
+ mapping-end: { end: </span> }
+ scalar: { start: <span class="scalar">, end: </span> }
+
+ replaces:
+ - "\r\n": "\n"
+ - "\r": "\n"
+ - "\n": "\n"
+ - "\x85": "\n"
+ - "\u2028": "\n"
+ - "\u2029": "\n"
+ - "&": "&amp;"
+ - "<": "&lt;"
+ - ">": "&gt;"
+
+html-page:
+
+ header: |
+ <html>
+ <head>
+ <title>A YAML stream</title>
+ <style type="text/css">
+ .document { background: #FFF }
+ .sequence { background: #EEF }
+ .mapping { background: #EFE }
+ .scalar { background: #FEE }
+ .directive_token { color: #C0C }
+ .document_start_token { color: #C0C; font-weight: bold }
+ .document_end_token { color: #C0C; font-weight: bold }
+ .delimiter_token { color: #600; font-weight: bold }
+ .anchor_token { color: #090 }
+ .tag_token { color: #090 }
+ .scalar_token { color: #000 }
+ .yaml_stream { color: #999 }
+ </style>
+ <body>
+
+ footer: |
+ </body>
+ </html>
+
+ <<: *html
+
+
+# vim: ft=yaml
diff --git a/third_party/python/PyYAML/examples/yaml-highlight/yaml_hl.py b/third_party/python/PyYAML/examples/yaml-highlight/yaml_hl.py
new file mode 100755
index 0000000000..96e0ae7b1b
--- /dev/null
+++ b/third_party/python/PyYAML/examples/yaml-highlight/yaml_hl.py
@@ -0,0 +1,114 @@
+#!/usr/bin/python
+
+import yaml, codecs, sys, os.path, optparse
+
+class Style:
+
+ def __init__(self, header=None, footer=None,
+ tokens=None, events=None, replaces=None):
+ self.header = header
+ self.footer = footer
+ self.replaces = replaces
+ self.substitutions = {}
+ for domain, Class in [(tokens, 'Token'), (events, 'Event')]:
+ if not domain:
+ continue
+ for key in domain:
+ name = ''.join([part.capitalize() for part in key.split('-')])
+ cls = getattr(yaml, '%s%s' % (name, Class))
+ value = domain[key]
+ if not value:
+ continue
+ start = value.get('start')
+ end = value.get('end')
+ if start:
+ self.substitutions[cls, -1] = start
+ if end:
+ self.substitutions[cls, +1] = end
+
+ def __setstate__(self, state):
+ self.__init__(**state)
+
+yaml.add_path_resolver(u'tag:yaml.org,2002:python/object:__main__.Style',
+ [None], dict)
+yaml.add_path_resolver(u'tag:yaml.org,2002:pairs',
+ [None, u'replaces'], list)
+
+class YAMLHighlight:
+
+ def __init__(self, options):
+ config = yaml.full_load(file(options.config, 'rb').read())
+ self.style = config[options.style]
+ if options.input:
+ self.input = file(options.input, 'rb')
+ else:
+ self.input = sys.stdin
+ if options.output:
+ self.output = file(options.output, 'wb')
+ else:
+ self.output = sys.stdout
+
+ def highlight(self):
+ input = self.input.read()
+ if input.startswith(codecs.BOM_UTF16_LE):
+ input = unicode(input, 'utf-16-le')
+ elif input.startswith(codecs.BOM_UTF16_BE):
+ input = unicode(input, 'utf-16-be')
+ else:
+ input = unicode(input, 'utf-8')
+ substitutions = self.style.substitutions
+ tokens = yaml.scan(input)
+ events = yaml.parse(input)
+ markers = []
+ number = 0
+ for token in tokens:
+ number += 1
+ if token.start_mark.index != token.end_mark.index:
+ cls = token.__class__
+ if (cls, -1) in substitutions:
+ markers.append([token.start_mark.index, +2, number, substitutions[cls, -1]])
+ if (cls, +1) in substitutions:
+ markers.append([token.end_mark.index, -2, number, substitutions[cls, +1]])
+ number = 0
+ for event in events:
+ number += 1
+ cls = event.__class__
+ if (cls, -1) in substitutions:
+ markers.append([event.start_mark.index, +1, number, substitutions[cls, -1]])
+ if (cls, +1) in substitutions:
+ markers.append([event.end_mark.index, -1, number, substitutions[cls, +1]])
+ markers.sort()
+ markers.reverse()
+ chunks = []
+ position = len(input)
+ for index, weight1, weight2, substitution in markers:
+ if index < position:
+ chunk = input[index:position]
+ for substring, replacement in self.style.replaces:
+ chunk = chunk.replace(substring, replacement)
+ chunks.append(chunk)
+ position = index
+ chunks.append(substitution)
+ chunks.reverse()
+ result = u''.join(chunks)
+ if self.style.header:
+ self.output.write(self.style.header)
+ self.output.write(result.encode('utf-8'))
+ if self.style.footer:
+ self.output.write(self.style.footer)
+
+if __name__ == '__main__':
+ parser = optparse.OptionParser()
+ parser.add_option('-s', '--style', dest='style', default='ascii',
+ help="specify the highlighting style", metavar='STYLE')
+ parser.add_option('-c', '--config', dest='config',
+ default=os.path.join(os.path.dirname(sys.argv[0]), 'yaml_hl.cfg'),
+ help="set an alternative configuration file", metavar='CONFIG')
+ parser.add_option('-i', '--input', dest='input', default=None,
+ help="set the input file (default: stdin)", metavar='FILE')
+ parser.add_option('-o', '--output', dest='output', default=None,
+ help="set the output file (default: stdout)", metavar='FILE')
+ (options, args) = parser.parse_args()
+ hl = YAMLHighlight(options)
+ hl.highlight()
+
diff --git a/third_party/python/PyYAML/ext/_yaml.c b/third_party/python/PyYAML/ext/_yaml.c
new file mode 100644
index 0000000000..6784d813f3
--- /dev/null
+++ b/third_party/python/PyYAML/ext/_yaml.c
@@ -0,0 +1,28743 @@
+/* Generated by Cython 0.29.15 */
+
+#define PY_SSIZE_T_CLEAN
+#include "Python.h"
+#ifndef Py_PYTHON_H
+ #error Python headers needed to compile C extensions, please install development version of Python.
+#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
+ #error Cython requires Python 2.6+ or Python 3.3+.
+#else
+#define CYTHON_ABI "0_29_15"
+#define CYTHON_HEX_VERSION 0x001D0FF0
+#define CYTHON_FUTURE_DIVISION 0
+#include <stddef.h>
+#ifndef offsetof
+ #define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
+#endif
+#if !defined(WIN32) && !defined(MS_WINDOWS)
+ #ifndef __stdcall
+ #define __stdcall
+ #endif
+ #ifndef __cdecl
+ #define __cdecl
+ #endif
+ #ifndef __fastcall
+ #define __fastcall
+ #endif
+#endif
+#ifndef DL_IMPORT
+ #define DL_IMPORT(t) t
+#endif
+#ifndef DL_EXPORT
+ #define DL_EXPORT(t) t
+#endif
+#define __PYX_COMMA ,
+#ifndef HAVE_LONG_LONG
+ #if PY_VERSION_HEX >= 0x02070000
+ #define HAVE_LONG_LONG
+ #endif
+#endif
+#ifndef PY_LONG_LONG
+ #define PY_LONG_LONG LONG_LONG
+#endif
+#ifndef Py_HUGE_VAL
+ #define Py_HUGE_VAL HUGE_VAL
+#endif
+#ifdef PYPY_VERSION
+ #define CYTHON_COMPILING_IN_PYPY 1
+ #define CYTHON_COMPILING_IN_PYSTON 0
+ #define CYTHON_COMPILING_IN_CPYTHON 0
+ #undef CYTHON_USE_TYPE_SLOTS
+ #define CYTHON_USE_TYPE_SLOTS 0
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #if PY_VERSION_HEX < 0x03050000
+ #undef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 0
+ #elif !defined(CYTHON_USE_ASYNC_SLOTS)
+ #define CYTHON_USE_ASYNC_SLOTS 1
+ #endif
+ #undef CYTHON_USE_PYLIST_INTERNALS
+ #define CYTHON_USE_PYLIST_INTERNALS 0
+ #undef CYTHON_USE_UNICODE_INTERNALS
+ #define CYTHON_USE_UNICODE_INTERNALS 0
+ #undef CYTHON_USE_UNICODE_WRITER
+ #define CYTHON_USE_UNICODE_WRITER 0
+ #undef CYTHON_USE_PYLONG_INTERNALS
+ #define CYTHON_USE_PYLONG_INTERNALS 0
+ #undef CYTHON_AVOID_BORROWED_REFS
+ #define CYTHON_AVOID_BORROWED_REFS 1
+ #undef CYTHON_ASSUME_SAFE_MACROS
+ #define CYTHON_ASSUME_SAFE_MACROS 0
+ #undef CYTHON_UNPACK_METHODS
+ #define CYTHON_UNPACK_METHODS 0
+ #undef CYTHON_FAST_THREAD_STATE
+ #define CYTHON_FAST_THREAD_STATE 0
+ #undef CYTHON_FAST_PYCALL
+ #define CYTHON_FAST_PYCALL 0
+ #undef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 0
+ #undef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE 0
+ #undef CYTHON_USE_DICT_VERSIONS
+ #define CYTHON_USE_DICT_VERSIONS 0
+ #undef CYTHON_USE_EXC_INFO_STACK
+ #define CYTHON_USE_EXC_INFO_STACK 0
+#elif defined(PYSTON_VERSION)
+ #define CYTHON_COMPILING_IN_PYPY 0
+ #define CYTHON_COMPILING_IN_PYSTON 1
+ #define CYTHON_COMPILING_IN_CPYTHON 0
+ #ifndef CYTHON_USE_TYPE_SLOTS
+ #define CYTHON_USE_TYPE_SLOTS 1
+ #endif
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #undef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 0
+ #undef CYTHON_USE_PYLIST_INTERNALS
+ #define CYTHON_USE_PYLIST_INTERNALS 0
+ #ifndef CYTHON_USE_UNICODE_INTERNALS
+ #define CYTHON_USE_UNICODE_INTERNALS 1
+ #endif
+ #undef CYTHON_USE_UNICODE_WRITER
+ #define CYTHON_USE_UNICODE_WRITER 0
+ #undef CYTHON_USE_PYLONG_INTERNALS
+ #define CYTHON_USE_PYLONG_INTERNALS 0
+ #ifndef CYTHON_AVOID_BORROWED_REFS
+ #define CYTHON_AVOID_BORROWED_REFS 0
+ #endif
+ #ifndef CYTHON_ASSUME_SAFE_MACROS
+ #define CYTHON_ASSUME_SAFE_MACROS 1
+ #endif
+ #ifndef CYTHON_UNPACK_METHODS
+ #define CYTHON_UNPACK_METHODS 1
+ #endif
+ #undef CYTHON_FAST_THREAD_STATE
+ #define CYTHON_FAST_THREAD_STATE 0
+ #undef CYTHON_FAST_PYCALL
+ #define CYTHON_FAST_PYCALL 0
+ #undef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 0
+ #undef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE 0
+ #undef CYTHON_USE_DICT_VERSIONS
+ #define CYTHON_USE_DICT_VERSIONS 0
+ #undef CYTHON_USE_EXC_INFO_STACK
+ #define CYTHON_USE_EXC_INFO_STACK 0
+#else
+ #define CYTHON_COMPILING_IN_PYPY 0
+ #define CYTHON_COMPILING_IN_PYSTON 0
+ #define CYTHON_COMPILING_IN_CPYTHON 1
+ #ifndef CYTHON_USE_TYPE_SLOTS
+ #define CYTHON_USE_TYPE_SLOTS 1
+ #endif
+ #if PY_VERSION_HEX < 0x02070000
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #elif !defined(CYTHON_USE_PYTYPE_LOOKUP)
+ #define CYTHON_USE_PYTYPE_LOOKUP 1
+ #endif
+ #if PY_MAJOR_VERSION < 3
+ #undef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 0
+ #elif !defined(CYTHON_USE_ASYNC_SLOTS)
+ #define CYTHON_USE_ASYNC_SLOTS 1
+ #endif
+ #if PY_VERSION_HEX < 0x02070000
+ #undef CYTHON_USE_PYLONG_INTERNALS
+ #define CYTHON_USE_PYLONG_INTERNALS 0
+ #elif !defined(CYTHON_USE_PYLONG_INTERNALS)
+ #define CYTHON_USE_PYLONG_INTERNALS 1
+ #endif
+ #ifndef CYTHON_USE_PYLIST_INTERNALS
+ #define CYTHON_USE_PYLIST_INTERNALS 1
+ #endif
+ #ifndef CYTHON_USE_UNICODE_INTERNALS
+ #define CYTHON_USE_UNICODE_INTERNALS 1
+ #endif
+ #if PY_VERSION_HEX < 0x030300F0
+ #undef CYTHON_USE_UNICODE_WRITER
+ #define CYTHON_USE_UNICODE_WRITER 0
+ #elif !defined(CYTHON_USE_UNICODE_WRITER)
+ #define CYTHON_USE_UNICODE_WRITER 1
+ #endif
+ #ifndef CYTHON_AVOID_BORROWED_REFS
+ #define CYTHON_AVOID_BORROWED_REFS 0
+ #endif
+ #ifndef CYTHON_ASSUME_SAFE_MACROS
+ #define CYTHON_ASSUME_SAFE_MACROS 1
+ #endif
+ #ifndef CYTHON_UNPACK_METHODS
+ #define CYTHON_UNPACK_METHODS 1
+ #endif
+ #ifndef CYTHON_FAST_THREAD_STATE
+ #define CYTHON_FAST_THREAD_STATE 1
+ #endif
+ #ifndef CYTHON_FAST_PYCALL
+ #define CYTHON_FAST_PYCALL 1
+ #endif
+ #ifndef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000)
+ #endif
+ #ifndef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1)
+ #endif
+ #ifndef CYTHON_USE_DICT_VERSIONS
+ #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1)
+ #endif
+ #ifndef CYTHON_USE_EXC_INFO_STACK
+ #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3)
+ #endif
+#endif
+#if !defined(CYTHON_FAST_PYCCALL)
+#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1)
+#endif
+#if CYTHON_USE_PYLONG_INTERNALS
+ #include "longintrepr.h"
+ #undef SHIFT
+ #undef BASE
+ #undef MASK
+ #ifdef SIZEOF_VOID_P
+ enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) };
+ #endif
+#endif
+#ifndef __has_attribute
+ #define __has_attribute(x) 0
+#endif
+#ifndef __has_cpp_attribute
+ #define __has_cpp_attribute(x) 0
+#endif
+#ifndef CYTHON_RESTRICT
+ #if defined(__GNUC__)
+ #define CYTHON_RESTRICT __restrict__
+ #elif defined(_MSC_VER) && _MSC_VER >= 1400
+ #define CYTHON_RESTRICT __restrict
+ #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define CYTHON_RESTRICT restrict
+ #else
+ #define CYTHON_RESTRICT
+ #endif
+#endif
+#ifndef CYTHON_UNUSED
+# if defined(__GNUC__)
+# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
+# define CYTHON_UNUSED __attribute__ ((__unused__))
+# else
+# define CYTHON_UNUSED
+# endif
+# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER))
+# define CYTHON_UNUSED __attribute__ ((__unused__))
+# else
+# define CYTHON_UNUSED
+# endif
+#endif
+#ifndef CYTHON_MAYBE_UNUSED_VAR
+# if defined(__cplusplus)
+ template<class T> void CYTHON_MAYBE_UNUSED_VAR( const T& ) { }
+# else
+# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x)
+# endif
+#endif
+#ifndef CYTHON_NCP_UNUSED
+# if CYTHON_COMPILING_IN_CPYTHON
+# define CYTHON_NCP_UNUSED
+# else
+# define CYTHON_NCP_UNUSED CYTHON_UNUSED
+# endif
+#endif
+#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None)
+#ifdef _MSC_VER
+ #ifndef _MSC_STDINT_H_
+ #if _MSC_VER < 1300
+ typedef unsigned char uint8_t;
+ typedef unsigned int uint32_t;
+ #else
+ typedef unsigned __int8 uint8_t;
+ typedef unsigned __int32 uint32_t;
+ #endif
+ #endif
+#else
+ #include <stdint.h>
+#endif
+#ifndef CYTHON_FALLTHROUGH
+ #if defined(__cplusplus) && __cplusplus >= 201103L
+ #if __has_cpp_attribute(fallthrough)
+ #define CYTHON_FALLTHROUGH [[fallthrough]]
+ #elif __has_cpp_attribute(clang::fallthrough)
+ #define CYTHON_FALLTHROUGH [[clang::fallthrough]]
+ #elif __has_cpp_attribute(gnu::fallthrough)
+ #define CYTHON_FALLTHROUGH [[gnu::fallthrough]]
+ #endif
+ #endif
+ #ifndef CYTHON_FALLTHROUGH
+ #if __has_attribute(fallthrough)
+ #define CYTHON_FALLTHROUGH __attribute__((fallthrough))
+ #else
+ #define CYTHON_FALLTHROUGH
+ #endif
+ #endif
+ #if defined(__clang__ ) && defined(__apple_build_version__)
+ #if __apple_build_version__ < 7000000
+ #undef CYTHON_FALLTHROUGH
+ #define CYTHON_FALLTHROUGH
+ #endif
+ #endif
+#endif
+
+#ifndef CYTHON_INLINE
+ #if defined(__clang__)
+ #define CYTHON_INLINE __inline__ __attribute__ ((__unused__))
+ #elif defined(__GNUC__)
+ #define CYTHON_INLINE __inline__
+ #elif defined(_MSC_VER)
+ #define CYTHON_INLINE __inline
+ #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define CYTHON_INLINE inline
+ #else
+ #define CYTHON_INLINE
+ #endif
+#endif
+
+#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag)
+ #define Py_OptimizeFlag 0
+#endif
+#define __PYX_BUILD_PY_SSIZE_T "n"
+#define CYTHON_FORMAT_SSIZE_T "z"
+#if PY_MAJOR_VERSION < 3
+ #define __Pyx_BUILTIN_MODULE_NAME "__builtin__"
+ #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
+ PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
+ #define __Pyx_DefaultClassType PyClass_Type
+#else
+ #define __Pyx_BUILTIN_MODULE_NAME "builtins"
+#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2
+ #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
+ PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
+#else
+ #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
+ PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
+#endif
+ #define __Pyx_DefaultClassType PyType_Type
+#endif
+#ifndef Py_TPFLAGS_CHECKTYPES
+ #define Py_TPFLAGS_CHECKTYPES 0
+#endif
+#ifndef Py_TPFLAGS_HAVE_INDEX
+ #define Py_TPFLAGS_HAVE_INDEX 0
+#endif
+#ifndef Py_TPFLAGS_HAVE_NEWBUFFER
+ #define Py_TPFLAGS_HAVE_NEWBUFFER 0
+#endif
+#ifndef Py_TPFLAGS_HAVE_FINALIZE
+ #define Py_TPFLAGS_HAVE_FINALIZE 0
+#endif
+#ifndef METH_STACKLESS
+ #define METH_STACKLESS 0
+#endif
+#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL)
+ #ifndef METH_FASTCALL
+ #define METH_FASTCALL 0x80
+ #endif
+ typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs);
+ typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args,
+ Py_ssize_t nargs, PyObject *kwnames);
+#else
+ #define __Pyx_PyCFunctionFast _PyCFunctionFast
+ #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords
+#endif
+#if CYTHON_FAST_PYCCALL
+#define __Pyx_PyFastCFunction_Check(func)\
+ ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS)))))
+#else
+#define __Pyx_PyFastCFunction_Check(func) 0
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
+ #define PyObject_Malloc(s) PyMem_Malloc(s)
+ #define PyObject_Free(p) PyMem_Free(p)
+ #define PyObject_Realloc(p) PyMem_Realloc(p)
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1
+ #define PyMem_RawMalloc(n) PyMem_Malloc(n)
+ #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n)
+ #define PyMem_RawFree(p) PyMem_Free(p)
+#endif
+#if CYTHON_COMPILING_IN_PYSTON
+ #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co)
+ #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno)
+#else
+ #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0)
+ #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno)
+#endif
+#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000
+ #define __Pyx_PyThreadState_Current PyThreadState_GET()
+#elif PY_VERSION_HEX >= 0x03060000
+ #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet()
+#elif PY_VERSION_HEX >= 0x03000000
+ #define __Pyx_PyThreadState_Current PyThreadState_GET()
+#else
+ #define __Pyx_PyThreadState_Current _PyThreadState_Current
+#endif
+#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT)
+#include "pythread.h"
+#define Py_tss_NEEDS_INIT 0
+typedef int Py_tss_t;
+static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {
+ *key = PyThread_create_key();
+ return 0;
+}
+static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {
+ Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));
+ *key = Py_tss_NEEDS_INIT;
+ return key;
+}
+static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) {
+ PyObject_Free(key);
+}
+static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) {
+ return *key != Py_tss_NEEDS_INIT;
+}
+static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) {
+ PyThread_delete_key(*key);
+ *key = Py_tss_NEEDS_INIT;
+}
+static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) {
+ return PyThread_set_key_value(*key, value);
+}
+static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
+ return PyThread_get_key_value(*key);
+}
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)
+#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
+#else
+#define __Pyx_PyDict_NewPresized(n) PyDict_New()
+#endif
+#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION
+ #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y)
+ #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y)
+#else
+ #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y)
+ #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y)
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS
+#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash)
+#else
+#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name)
+#endif
+#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)
+ #define CYTHON_PEP393_ENABLED 1
+ #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\
+ 0 : _PyUnicode_Ready((PyObject *)(op)))
+ #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u)
+ #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i)
+ #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u)
+ #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u)
+ #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u)
+ #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
+ #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch)
+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
+#else
+ #define CYTHON_PEP393_ENABLED 0
+ #define PyUnicode_1BYTE_KIND 1
+ #define PyUnicode_2BYTE_KIND 2
+ #define PyUnicode_4BYTE_KIND 4
+ #define __Pyx_PyUnicode_READY(op) (0)
+ #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u)
+ #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i]))
+ #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111)
+ #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE))
+ #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u))
+ #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i]))
+ #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch)
+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u))
+#endif
+#if CYTHON_COMPILING_IN_PYPY
+ #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b)
+ #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b)
+#else
+ #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b)
+ #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\
+ PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b))
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains)
+ #define PyUnicode_Contains(u, s) PySequence_Contains(u, s)
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check)
+ #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type)
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format)
+ #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt)
+#endif
+#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b))
+#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b))
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b)
+#else
+ #define __Pyx_PyString_Format(a, b) PyString_Format(a, b)
+#endif
+#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII)
+ #define PyObject_ASCII(o) PyObject_Repr(o)
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyBaseString_Type PyUnicode_Type
+ #define PyStringObject PyUnicodeObject
+ #define PyString_Type PyUnicode_Type
+ #define PyString_Check PyUnicode_Check
+ #define PyString_CheckExact PyUnicode_CheckExact
+ #define PyObject_Unicode PyObject_Str
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj)
+ #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj)
+#else
+ #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj))
+ #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj))
+#endif
+#ifndef PySet_CheckExact
+ #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type)
+#endif
+#if CYTHON_ASSUME_SAFE_MACROS
+ #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq)
+#else
+ #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq)
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyIntObject PyLongObject
+ #define PyInt_Type PyLong_Type
+ #define PyInt_Check(op) PyLong_Check(op)
+ #define PyInt_CheckExact(op) PyLong_CheckExact(op)
+ #define PyInt_FromString PyLong_FromString
+ #define PyInt_FromUnicode PyLong_FromUnicode
+ #define PyInt_FromLong PyLong_FromLong
+ #define PyInt_FromSize_t PyLong_FromSize_t
+ #define PyInt_FromSsize_t PyLong_FromSsize_t
+ #define PyInt_AsLong PyLong_AsLong
+ #define PyInt_AS_LONG PyLong_AS_LONG
+ #define PyInt_AsSsize_t PyLong_AsSsize_t
+ #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask
+ #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask
+ #define PyNumber_Int PyNumber_Long
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyBoolObject PyLongObject
+#endif
+#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY
+ #ifndef PyUnicode_InternFromString
+ #define PyUnicode_InternFromString(s) PyUnicode_FromString(s)
+ #endif
+#endif
+#if PY_VERSION_HEX < 0x030200A4
+ typedef long Py_hash_t;
+ #define __Pyx_PyInt_FromHash_t PyInt_FromLong
+ #define __Pyx_PyInt_AsHash_t PyInt_AsLong
+#else
+ #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t
+ #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func))
+#else
+ #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass)
+#endif
+#if CYTHON_USE_ASYNC_SLOTS
+ #if PY_VERSION_HEX >= 0x030500B1
+ #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods
+ #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async)
+ #else
+ #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved))
+ #endif
+#else
+ #define __Pyx_PyType_AsAsync(obj) NULL
+#endif
+#ifndef __Pyx_PyAsyncMethodsStruct
+ typedef struct {
+ unaryfunc am_await;
+ unaryfunc am_aiter;
+ unaryfunc am_anext;
+ } __Pyx_PyAsyncMethodsStruct;
+#endif
+
+#if defined(WIN32) || defined(MS_WINDOWS)
+ #define _USE_MATH_DEFINES
+#endif
+#include <math.h>
+#ifdef NAN
+#define __PYX_NAN() ((float) NAN)
+#else
+static CYTHON_INLINE float __PYX_NAN() {
+ float value;
+ memset(&value, 0xFF, sizeof(value));
+ return value;
+}
+#endif
+#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL)
+#define __Pyx_truncl trunc
+#else
+#define __Pyx_truncl truncl
+#endif
+
+
+#define __PYX_ERR(f_index, lineno, Ln_error) \
+{ \
+ __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \
+}
+
+#ifndef __PYX_EXTERN_C
+ #ifdef __cplusplus
+ #define __PYX_EXTERN_C extern "C"
+ #else
+ #define __PYX_EXTERN_C extern
+ #endif
+#endif
+
+#define __PYX_HAVE___yaml
+#define __PYX_HAVE_API___yaml
+/* Early includes */
+#include "_yaml.h"
+#ifdef _OPENMP
+#include <omp.h>
+#endif /* _OPENMP */
+
+#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS)
+#define CYTHON_WITHOUT_ASSERTIONS
+#endif
+
+typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding;
+ const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry;
+
+#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0
+#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0
+#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8)
+#define __PYX_DEFAULT_STRING_ENCODING ""
+#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString
+#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
+#define __Pyx_uchar_cast(c) ((unsigned char)c)
+#define __Pyx_long_cast(x) ((long)x)
+#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\
+ (sizeof(type) < sizeof(Py_ssize_t)) ||\
+ (sizeof(type) > sizeof(Py_ssize_t) &&\
+ likely(v < (type)PY_SSIZE_T_MAX ||\
+ v == (type)PY_SSIZE_T_MAX) &&\
+ (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\
+ v == (type)PY_SSIZE_T_MIN))) ||\
+ (sizeof(type) == sizeof(Py_ssize_t) &&\
+ (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\
+ v == (type)PY_SSIZE_T_MAX))) )
+static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) {
+ return (size_t) i < (size_t) limit;
+}
+#if defined (__cplusplus) && __cplusplus >= 201103L
+ #include <cstdlib>
+ #define __Pyx_sst_abs(value) std::abs(value)
+#elif SIZEOF_INT >= SIZEOF_SIZE_T
+ #define __Pyx_sst_abs(value) abs(value)
+#elif SIZEOF_LONG >= SIZEOF_SIZE_T
+ #define __Pyx_sst_abs(value) labs(value)
+#elif defined (_MSC_VER)
+ #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value))
+#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define __Pyx_sst_abs(value) llabs(value)
+#elif defined (__GNUC__)
+ #define __Pyx_sst_abs(value) __builtin_llabs(value)
+#else
+ #define __Pyx_sst_abs(value) ((value<0) ? -value : value)
+#endif
+static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*);
+static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length);
+#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s))
+#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l)
+#define __Pyx_PyBytes_FromString PyBytes_FromString
+#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize
+static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*);
+#if PY_MAJOR_VERSION < 3
+ #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString
+ #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
+#else
+ #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString
+ #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize
+#endif
+#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s)
+#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s)
+#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s)
+#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s)
+#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s)
+static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) {
+ const Py_UNICODE *u_end = u;
+ while (*u_end++) ;
+ return (size_t)(u_end - u - 1);
+}
+#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u))
+#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode
+#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode
+#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj)
+#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None)
+static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b);
+static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*);
+static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*);
+static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x);
+#define __Pyx_PySequence_Tuple(obj)\
+ (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj))
+static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);
+static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);
+#if CYTHON_ASSUME_SAFE_MACROS
+#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))
+#else
+#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x)
+#endif
+#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x))
+#if PY_MAJOR_VERSION >= 3
+#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x))
+#else
+#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x))
+#endif
+#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x))
+#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+static int __Pyx_sys_getdefaultencoding_not_ascii;
+static int __Pyx_init_sys_getdefaultencoding_params(void) {
+ PyObject* sys;
+ PyObject* default_encoding = NULL;
+ PyObject* ascii_chars_u = NULL;
+ PyObject* ascii_chars_b = NULL;
+ const char* default_encoding_c;
+ sys = PyImport_ImportModule("sys");
+ if (!sys) goto bad;
+ default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL);
+ Py_DECREF(sys);
+ if (!default_encoding) goto bad;
+ default_encoding_c = PyBytes_AsString(default_encoding);
+ if (!default_encoding_c) goto bad;
+ if (strcmp(default_encoding_c, "ascii") == 0) {
+ __Pyx_sys_getdefaultencoding_not_ascii = 0;
+ } else {
+ char ascii_chars[128];
+ int c;
+ for (c = 0; c < 128; c++) {
+ ascii_chars[c] = c;
+ }
+ __Pyx_sys_getdefaultencoding_not_ascii = 1;
+ ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL);
+ if (!ascii_chars_u) goto bad;
+ ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL);
+ if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) {
+ PyErr_Format(
+ PyExc_ValueError,
+ "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.",
+ default_encoding_c);
+ goto bad;
+ }
+ Py_DECREF(ascii_chars_u);
+ Py_DECREF(ascii_chars_b);
+ }
+ Py_DECREF(default_encoding);
+ return 0;
+bad:
+ Py_XDECREF(default_encoding);
+ Py_XDECREF(ascii_chars_u);
+ Py_XDECREF(ascii_chars_b);
+ return -1;
+}
+#endif
+#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3
+#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL)
+#else
+#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL)
+#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
+static char* __PYX_DEFAULT_STRING_ENCODING;
+static int __Pyx_init_sys_getdefaultencoding_params(void) {
+ PyObject* sys;
+ PyObject* default_encoding = NULL;
+ char* default_encoding_c;
+ sys = PyImport_ImportModule("sys");
+ if (!sys) goto bad;
+ default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL);
+ Py_DECREF(sys);
+ if (!default_encoding) goto bad;
+ default_encoding_c = PyBytes_AsString(default_encoding);
+ if (!default_encoding_c) goto bad;
+ __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1);
+ if (!__PYX_DEFAULT_STRING_ENCODING) goto bad;
+ strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c);
+ Py_DECREF(default_encoding);
+ return 0;
+bad:
+ Py_XDECREF(default_encoding);
+ return -1;
+}
+#endif
+#endif
+
+
+/* Test for GCC > 2.95 */
+#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95)))
+ #define likely(x) __builtin_expect(!!(x), 1)
+ #define unlikely(x) __builtin_expect(!!(x), 0)
+#else /* !__GNUC__ or GCC < 2.95 */
+ #define likely(x) (x)
+ #define unlikely(x) (x)
+#endif /* __GNUC__ */
+static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; }
+
+static PyObject *__pyx_m = NULL;
+static PyObject *__pyx_d;
+static PyObject *__pyx_b;
+static PyObject *__pyx_cython_runtime = NULL;
+static PyObject *__pyx_empty_tuple;
+static PyObject *__pyx_empty_bytes;
+static PyObject *__pyx_empty_unicode;
+static int __pyx_lineno;
+static int __pyx_clineno = 0;
+static const char * __pyx_cfilenm= __FILE__;
+static const char *__pyx_filename;
+
+
+static const char *__pyx_f[] = {
+ "ext/_yaml.pyx",
+ "stringsource",
+};
+
+/*--- Type declarations ---*/
+struct __pyx_obj_5_yaml_Mark;
+struct __pyx_obj_5_yaml_CParser;
+struct __pyx_obj_5_yaml_CEmitter;
+
+/* "_yaml.pyx":64
+ * MappingNode = yaml.nodes.MappingNode
+ *
+ * cdef class Mark: # <<<<<<<<<<<<<<
+ * cdef readonly object name
+ * cdef readonly size_t index
+ */
+struct __pyx_obj_5_yaml_Mark {
+ PyObject_HEAD
+ PyObject *name;
+ size_t index;
+ size_t line;
+ size_t column;
+ PyObject *buffer;
+ PyObject *pointer;
+};
+
+
+/* "_yaml.pyx":247
+ * # self.style = style
+ *
+ * cdef class CParser: # <<<<<<<<<<<<<<
+ *
+ * cdef yaml_parser_t parser
+ */
+struct __pyx_obj_5_yaml_CParser {
+ PyObject_HEAD
+ struct __pyx_vtabstruct_5_yaml_CParser *__pyx_vtab;
+ yaml_parser_t parser;
+ yaml_event_t parsed_event;
+ PyObject *stream;
+ PyObject *stream_name;
+ PyObject *current_token;
+ PyObject *current_event;
+ PyObject *anchors;
+ PyObject *stream_cache;
+ int stream_cache_len;
+ int stream_cache_pos;
+ int unicode_source;
+};
+
+
+/* "_yaml.pyx":935
+ * return 1
+ *
+ * cdef class CEmitter: # <<<<<<<<<<<<<<
+ *
+ * cdef yaml_emitter_t emitter
+ */
+struct __pyx_obj_5_yaml_CEmitter {
+ PyObject_HEAD
+ struct __pyx_vtabstruct_5_yaml_CEmitter *__pyx_vtab;
+ yaml_emitter_t emitter;
+ PyObject *stream;
+ int document_start_implicit;
+ int document_end_implicit;
+ PyObject *use_version;
+ PyObject *use_tags;
+ PyObject *serialized_nodes;
+ PyObject *anchors;
+ int last_alias_id;
+ int closed;
+ int dump_unicode;
+ PyObject *use_encoding;
+};
+
+
+
+/* "_yaml.pyx":247
+ * # self.style = style
+ *
+ * cdef class CParser: # <<<<<<<<<<<<<<
+ *
+ * cdef yaml_parser_t parser
+ */
+
+struct __pyx_vtabstruct_5_yaml_CParser {
+ PyObject *(*_parser_error)(struct __pyx_obj_5_yaml_CParser *);
+ PyObject *(*_scan)(struct __pyx_obj_5_yaml_CParser *);
+ PyObject *(*_token_to_object)(struct __pyx_obj_5_yaml_CParser *, yaml_token_t *);
+ PyObject *(*_parse)(struct __pyx_obj_5_yaml_CParser *);
+ PyObject *(*_event_to_object)(struct __pyx_obj_5_yaml_CParser *, yaml_event_t *);
+ PyObject *(*_compose_document)(struct __pyx_obj_5_yaml_CParser *);
+ PyObject *(*_compose_node)(struct __pyx_obj_5_yaml_CParser *, PyObject *, PyObject *);
+ PyObject *(*_compose_scalar_node)(struct __pyx_obj_5_yaml_CParser *, PyObject *);
+ PyObject *(*_compose_sequence_node)(struct __pyx_obj_5_yaml_CParser *, PyObject *);
+ PyObject *(*_compose_mapping_node)(struct __pyx_obj_5_yaml_CParser *, PyObject *);
+ int (*_parse_next_event)(struct __pyx_obj_5_yaml_CParser *);
+};
+static struct __pyx_vtabstruct_5_yaml_CParser *__pyx_vtabptr_5_yaml_CParser;
+
+
+/* "_yaml.pyx":935
+ * return 1
+ *
+ * cdef class CEmitter: # <<<<<<<<<<<<<<
+ *
+ * cdef yaml_emitter_t emitter
+ */
+
+struct __pyx_vtabstruct_5_yaml_CEmitter {
+ PyObject *(*_emitter_error)(struct __pyx_obj_5_yaml_CEmitter *);
+ int (*_object_to_event)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *, yaml_event_t *);
+ int (*_anchor_node)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *);
+ int (*_serialize_node)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *, PyObject *, PyObject *);
+};
+static struct __pyx_vtabstruct_5_yaml_CEmitter *__pyx_vtabptr_5_yaml_CEmitter;
+
+/* --- Runtime support code (head) --- */
+/* Refnanny.proto */
+#ifndef CYTHON_REFNANNY
+ #define CYTHON_REFNANNY 0
+#endif
+#if CYTHON_REFNANNY
+ typedef struct {
+ void (*INCREF)(void*, PyObject*, int);
+ void (*DECREF)(void*, PyObject*, int);
+ void (*GOTREF)(void*, PyObject*, int);
+ void (*GIVEREF)(void*, PyObject*, int);
+ void* (*SetupContext)(const char*, int, const char*);
+ void (*FinishContext)(void**);
+ } __Pyx_RefNannyAPIStruct;
+ static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL;
+ static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname);
+ #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL;
+#ifdef WITH_THREAD
+ #define __Pyx_RefNannySetupContext(name, acquire_gil)\
+ if (acquire_gil) {\
+ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
+ PyGILState_Release(__pyx_gilstate_save);\
+ } else {\
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
+ }
+#else
+ #define __Pyx_RefNannySetupContext(name, acquire_gil)\
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__)
+#endif
+ #define __Pyx_RefNannyFinishContext()\
+ __Pyx_RefNanny->FinishContext(&__pyx_refnanny)
+ #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0)
+ #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0)
+ #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0)
+ #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0)
+#else
+ #define __Pyx_RefNannyDeclarations
+ #define __Pyx_RefNannySetupContext(name, acquire_gil)
+ #define __Pyx_RefNannyFinishContext()
+ #define __Pyx_INCREF(r) Py_INCREF(r)
+ #define __Pyx_DECREF(r) Py_DECREF(r)
+ #define __Pyx_GOTREF(r)
+ #define __Pyx_GIVEREF(r)
+ #define __Pyx_XINCREF(r) Py_XINCREF(r)
+ #define __Pyx_XDECREF(r) Py_XDECREF(r)
+ #define __Pyx_XGOTREF(r)
+ #define __Pyx_XGIVEREF(r)
+#endif
+#define __Pyx_XDECREF_SET(r, v) do {\
+ PyObject *tmp = (PyObject *) r;\
+ r = v; __Pyx_XDECREF(tmp);\
+ } while (0)
+#define __Pyx_DECREF_SET(r, v) do {\
+ PyObject *tmp = (PyObject *) r;\
+ r = v; __Pyx_DECREF(tmp);\
+ } while (0)
+#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0)
+#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0)
+
+/* PyObjectGetAttrStr.proto */
+#if CYTHON_USE_TYPE_SLOTS
+static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name);
+#else
+#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n)
+#endif
+
+/* GetBuiltinName.proto */
+static PyObject *__Pyx_GetBuiltinName(PyObject *name);
+
+/* RaiseArgTupleInvalid.proto */
+static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact,
+ Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found);
+
+/* RaiseDoubleKeywords.proto */
+static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name);
+
+/* ParseKeywords.proto */
+static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\
+ PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\
+ const char* function_name);
+
+/* PyErrExceptionMatches.proto */
+#if CYTHON_FAST_THREAD_STATE
+#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err)
+static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err);
+#else
+#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err)
+#endif
+
+/* PyThreadStateGet.proto */
+#if CYTHON_FAST_THREAD_STATE
+#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate;
+#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current;
+#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type
+#else
+#define __Pyx_PyThreadState_declare
+#define __Pyx_PyThreadState_assign
+#define __Pyx_PyErr_Occurred() PyErr_Occurred()
+#endif
+
+/* PyErrFetchRestore.proto */
+#if CYTHON_FAST_THREAD_STATE
+#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL)
+#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb)
+#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb)
+#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb)
+#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb)
+static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb);
+static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
+#if CYTHON_COMPILING_IN_CPYTHON
+#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL))
+#else
+#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
+#endif
+#else
+#define __Pyx_PyErr_Clear() PyErr_Clear()
+#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
+#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb)
+#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb)
+#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb)
+#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb)
+#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb)
+#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb)
+#endif
+
+/* GetAttr.proto */
+static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *);
+
+/* GetAttr3.proto */
+static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *);
+
+/* PyDictVersioning.proto */
+#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
+#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1)
+#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag)
+#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\
+ (version_var) = __PYX_GET_DICT_VERSION(dict);\
+ (cache_var) = (value);
+#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\
+ static PY_UINT64_T __pyx_dict_version = 0;\
+ static PyObject *__pyx_dict_cached_value = NULL;\
+ if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\
+ (VAR) = __pyx_dict_cached_value;\
+ } else {\
+ (VAR) = __pyx_dict_cached_value = (LOOKUP);\
+ __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\
+ }\
+}
+static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj);
+static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj);
+static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version);
+#else
+#define __PYX_GET_DICT_VERSION(dict) (0)
+#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)
+#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP);
+#endif
+
+/* GetModuleGlobalName.proto */
+#if CYTHON_USE_DICT_VERSIONS
+#define __Pyx_GetModuleGlobalName(var, name) {\
+ static PY_UINT64_T __pyx_dict_version = 0;\
+ static PyObject *__pyx_dict_cached_value = NULL;\
+ (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\
+ (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\
+ __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\
+}
+#define __Pyx_GetModuleGlobalNameUncached(var, name) {\
+ PY_UINT64_T __pyx_dict_version;\
+ PyObject *__pyx_dict_cached_value;\
+ (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\
+}
+static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value);
+#else
+#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name)
+#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name)
+static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name);
+#endif
+
+/* GetTopmostException.proto */
+#if CYTHON_USE_EXC_INFO_STACK
+static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate);
+#endif
+
+/* SaveResetException.proto */
+#if CYTHON_FAST_THREAD_STATE
+#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb)
+static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
+#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb)
+static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb);
+#else
+#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb)
+#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb)
+#endif
+
+/* GetException.proto */
+#if CYTHON_FAST_THREAD_STATE
+#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb)
+static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
+#else
+static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb);
+#endif
+
+/* PyObjectCall.proto */
+#if CYTHON_COMPILING_IN_CPYTHON
+static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw);
+#else
+#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw)
+#endif
+
+/* RaiseException.proto */
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause);
+
+/* PyFunctionFastCall.proto */
+#if CYTHON_FAST_PYCALL
+#define __Pyx_PyFunction_FastCall(func, args, nargs)\
+ __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL)
+#if 1 || PY_VERSION_HEX < 0x030600B1
+static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs);
+#else
+#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs)
+#endif
+#define __Pyx_BUILD_ASSERT_EXPR(cond)\
+ (sizeof(char [1 - 2*!(cond)]) - 1)
+#ifndef Py_MEMBER_SIZE
+#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member)
+#endif
+ static size_t __pyx_pyframe_localsplus_offset = 0;
+ #include "frameobject.h"
+ #define __Pxy_PyFrame_Initialize_Offsets()\
+ ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\
+ (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus)))
+ #define __Pyx_PyFrame_GetLocalsplus(frame)\
+ (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset))
+#endif
+
+/* PyCFunctionFastCall.proto */
+#if CYTHON_FAST_PYCCALL
+static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs);
+#else
+#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL)
+#endif
+
+/* KeywordStringCheck.proto */
+static int __Pyx_CheckKeywordStrings(PyObject *kwdict, const char* function_name, int kw_allowed);
+
+/* PySequenceContains.proto */
+static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) {
+ int result = PySequence_Contains(seq, item);
+ return unlikely(result < 0) ? result : (result == (eq == Py_EQ));
+}
+
+/* GetItemInt.proto */
+#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\
+ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\
+ __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\
+ (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\
+ __Pyx_GetItemInt_Generic(o, to_py_func(i))))
+#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\
+ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\
+ __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\
+ (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL))
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i,
+ int wraparound, int boundscheck);
+#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\
+ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\
+ __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\
+ (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL))
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i,
+ int wraparound, int boundscheck);
+static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j);
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i,
+ int is_list, int wraparound, int boundscheck);
+
+/* ObjectGetItem.proto */
+#if CYTHON_USE_TYPE_SLOTS
+static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key);
+#else
+#define __Pyx_PyObject_GetItem(obj, key) PyObject_GetItem(obj, key)
+#endif
+
+/* PyObjectCallMethO.proto */
+#if CYTHON_COMPILING_IN_CPYTHON
+static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg);
+#endif
+
+/* PyObjectCallNoArg.proto */
+#if CYTHON_COMPILING_IN_CPYTHON
+static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func);
+#else
+#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL)
+#endif
+
+/* PyObjectCallOneArg.proto */
+static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg);
+
+/* None.proto */
+static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname);
+
+/* ListAppend.proto */
+#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS
+static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) {
+ PyListObject* L = (PyListObject*) list;
+ Py_ssize_t len = Py_SIZE(list);
+ if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) {
+ Py_INCREF(x);
+ PyList_SET_ITEM(list, len, x);
+ Py_SIZE(list) = len+1;
+ return 0;
+ }
+ return PyList_Append(list, x);
+}
+#else
+#define __Pyx_PyList_Append(L,x) PyList_Append(L,x)
+#endif
+
+/* PyObjectSetAttrStr.proto */
+#if CYTHON_USE_TYPE_SLOTS
+#define __Pyx_PyObject_DelAttrStr(o,n) __Pyx_PyObject_SetAttrStr(o, n, NULL)
+static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value);
+#else
+#define __Pyx_PyObject_DelAttrStr(o,n) PyObject_DelAttr(o,n)
+#define __Pyx_PyObject_SetAttrStr(o,n,v) PyObject_SetAttr(o,n,v)
+#endif
+
+/* PyObjectCall2Args.proto */
+static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2);
+
+/* HasAttr.proto */
+static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *);
+
+/* IncludeStringH.proto */
+#include <string.h>
+
+/* BytesEquals.proto */
+static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals);
+
+/* UnicodeEquals.proto */
+static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals);
+
+/* StrEquals.proto */
+#if PY_MAJOR_VERSION >= 3
+#define __Pyx_PyString_Equals __Pyx_PyUnicode_Equals
+#else
+#define __Pyx_PyString_Equals __Pyx_PyBytes_Equals
+#endif
+
+/* RaiseTooManyValuesToUnpack.proto */
+static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected);
+
+/* RaiseNeedMoreValuesToUnpack.proto */
+static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index);
+
+/* IterFinish.proto */
+static CYTHON_INLINE int __Pyx_IterFinish(void);
+
+/* UnpackItemEndCheck.proto */
+static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected);
+
+/* Import.proto */
+static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level);
+
+/* ImportFrom.proto */
+static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name);
+
+/* PyObject_GenericGetAttrNoDict.proto */
+#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
+static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name);
+#else
+#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr
+#endif
+
+/* PyObject_GenericGetAttr.proto */
+#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
+static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name);
+#else
+#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr
+#endif
+
+/* SetupReduce.proto */
+static int __Pyx_setup_reduce(PyObject* type_obj);
+
+/* SetVTable.proto */
+static int __Pyx_SetVtable(PyObject *dict, void *vtable);
+
+/* CLineInTraceback.proto */
+#ifdef CYTHON_CLINE_IN_TRACEBACK
+#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0)
+#else
+static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line);
+#endif
+
+/* CodeObjectCache.proto */
+typedef struct {
+ PyCodeObject* code_object;
+ int code_line;
+} __Pyx_CodeObjectCacheEntry;
+struct __Pyx_CodeObjectCache {
+ int count;
+ int max_count;
+ __Pyx_CodeObjectCacheEntry* entries;
+};
+static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL};
+static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line);
+static PyCodeObject *__pyx_find_code_object(int code_line);
+static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object);
+
+/* AddTraceback.proto */
+static void __Pyx_AddTraceback(const char *funcname, int c_line,
+ int py_line, const char *filename);
+
+/* CIntToPy.proto */
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value);
+
+/* CIntToPy.proto */
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
+
+/* CIntFromPy.proto */
+static CYTHON_INLINE size_t __Pyx_PyInt_As_size_t(PyObject *);
+
+/* CIntFromPy.proto */
+static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *);
+
+/* CIntFromPy.proto */
+static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *);
+
+/* FastTypeChecks.proto */
+#if CYTHON_COMPILING_IN_CPYTHON
+#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type)
+static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b);
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type);
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2);
+#else
+#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
+#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type)
+#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2))
+#endif
+#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception)
+
+/* CheckBinaryVersion.proto */
+static int __Pyx_check_binary_version(void);
+
+/* InitStrings.proto */
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t);
+
+static PyObject *__pyx_f_5_yaml_7CParser__parser_error(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto*/
+static PyObject *__pyx_f_5_yaml_7CParser__scan(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto*/
+static PyObject *__pyx_f_5_yaml_7CParser__token_to_object(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, yaml_token_t *__pyx_v_token); /* proto*/
+static PyObject *__pyx_f_5_yaml_7CParser__parse(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto*/
+static PyObject *__pyx_f_5_yaml_7CParser__event_to_object(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, yaml_event_t *__pyx_v_event); /* proto*/
+static PyObject *__pyx_f_5_yaml_7CParser__compose_document(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto*/
+static PyObject *__pyx_f_5_yaml_7CParser__compose_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_parent, PyObject *__pyx_v_index); /* proto*/
+static PyObject *__pyx_f_5_yaml_7CParser__compose_scalar_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_anchor); /* proto*/
+static PyObject *__pyx_f_5_yaml_7CParser__compose_sequence_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_anchor); /* proto*/
+static PyObject *__pyx_f_5_yaml_7CParser__compose_mapping_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_anchor); /* proto*/
+static int __pyx_f_5_yaml_7CParser__parse_next_event(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto*/
+static PyObject *__pyx_f_5_yaml_8CEmitter__emitter_error(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self); /* proto*/
+static int __pyx_f_5_yaml_8CEmitter__object_to_event(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_event_object, yaml_event_t *__pyx_v_event); /* proto*/
+static int __pyx_f_5_yaml_8CEmitter__anchor_node(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_node); /* proto*/
+static int __pyx_f_5_yaml_8CEmitter__serialize_node(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_node, PyObject *__pyx_v_parent, PyObject *__pyx_v_index); /* proto*/
+
+/* Module declarations from '_yaml' */
+static PyTypeObject *__pyx_ptype_5_yaml_Mark = 0;
+static PyTypeObject *__pyx_ptype_5_yaml_CParser = 0;
+static PyTypeObject *__pyx_ptype_5_yaml_CEmitter = 0;
+static int __pyx_f_5_yaml_input_handler(void *, char *, size_t, size_t *); /*proto*/
+static int __pyx_f_5_yaml_output_handler(void *, char *, size_t); /*proto*/
+static PyObject *__pyx_f_5_yaml___pyx_unpickle_Mark__set_state(struct __pyx_obj_5_yaml_Mark *, PyObject *); /*proto*/
+#define __Pyx_MODULE_NAME "_yaml"
+extern int __pyx_module_is_main__yaml;
+int __pyx_module_is_main__yaml = 0;
+
+/* Implementation of '_yaml' */
+static PyObject *__pyx_builtin_MemoryError;
+static PyObject *__pyx_builtin_AttributeError;
+static PyObject *__pyx_builtin_TypeError;
+static PyObject *__pyx_builtin_ValueError;
+static const char __pyx_k__3[] = "?";
+static const char __pyx_k__6[] = "";
+static const char __pyx_k__7[] = "'";
+static const char __pyx_k__8[] = "\"";
+static const char __pyx_k__9[] = "|";
+static const char __pyx_k_TAG[] = "TAG";
+static const char __pyx_k__10[] = ">";
+static const char __pyx_k__19[] = "\r";
+static const char __pyx_k__20[] = "\n";
+static const char __pyx_k__21[] = "\r\n";
+static const char __pyx_k_new[] = "__new__";
+static const char __pyx_k_tag[] = "tag";
+static const char __pyx_k_Mark[] = "Mark";
+static const char __pyx_k_YAML[] = "YAML";
+static const char __pyx_k_dict[] = "__dict__";
+static const char __pyx_k_file[] = "<file>";
+static const char __pyx_k_line[] = "line";
+static const char __pyx_k_main[] = "__main__";
+static const char __pyx_k_name[] = "name";
+static const char __pyx_k_read[] = "read";
+static const char __pyx_k_tags[] = "tags";
+static const char __pyx_k_test[] = "__test__";
+static const char __pyx_k_yaml[] = "yaml";
+static const char __pyx_k_class[] = "__class__";
+static const char __pyx_k_error[] = "error";
+static const char __pyx_k_index[] = "index";
+static const char __pyx_k_major[] = "major";
+static const char __pyx_k_minor[] = "minor";
+static const char __pyx_k_nodes[] = "nodes";
+static const char __pyx_k_patch[] = "patch";
+static const char __pyx_k_style[] = "style";
+static const char __pyx_k_utf_8[] = "utf-8";
+static const char __pyx_k_value[] = "value";
+static const char __pyx_k_width[] = "width";
+static const char __pyx_k_write[] = "write";
+static const char __pyx_k_anchor[] = "anchor";
+static const char __pyx_k_buffer[] = "buffer";
+static const char __pyx_k_column[] = "column";
+static const char __pyx_k_events[] = "events";
+static const char __pyx_k_id_03d[] = "id%03d";
+static const char __pyx_k_import[] = "__import__";
+static const char __pyx_k_indent[] = "indent";
+static const char __pyx_k_name_2[] = "__name__";
+static const char __pyx_k_parser[] = "parser";
+static const char __pyx_k_pickle[] = "pickle";
+static const char __pyx_k_reader[] = "reader";
+static const char __pyx_k_reduce[] = "__reduce__";
+static const char __pyx_k_stream[] = "stream";
+static const char __pyx_k_tokens[] = "tokens";
+static const char __pyx_k_update[] = "update";
+static const char __pyx_k_yaml_2[] = "_yaml";
+static const char __pyx_k_CParser[] = "CParser";
+static const char __pyx_k_emitter[] = "emitter";
+static const char __pyx_k_pointer[] = "pointer";
+static const char __pyx_k_resolve[] = "resolve";
+static const char __pyx_k_scanner[] = "scanner";
+static const char __pyx_k_version[] = "version";
+static const char __pyx_k_CEmitter[] = "CEmitter";
+static const char __pyx_k_KeyToken[] = "KeyToken";
+static const char __pyx_k_TagToken[] = "TagToken";
+static const char __pyx_k_composer[] = "composer";
+static const char __pyx_k_encoding[] = "encoding";
+static const char __pyx_k_end_mark[] = "end_mark";
+static const char __pyx_k_explicit[] = "explicit";
+static const char __pyx_k_getstate[] = "__getstate__";
+static const char __pyx_k_implicit[] = "implicit";
+static const char __pyx_k_pyx_type[] = "__pyx_type";
+static const char __pyx_k_setstate[] = "__setstate__";
+static const char __pyx_k_TypeError[] = "TypeError";
+static const char __pyx_k_YAMLError[] = "YAMLError";
+static const char __pyx_k_canonical[] = "canonical";
+static const char __pyx_k_pyx_state[] = "__pyx_state";
+static const char __pyx_k_reduce_ex[] = "__reduce_ex__";
+static const char __pyx_k_utf_16_be[] = "utf-16-be";
+static const char __pyx_k_utf_16_le[] = "utf-16-le";
+static const char __pyx_k_AliasEvent[] = "AliasEvent";
+static const char __pyx_k_AliasToken[] = "AliasToken";
+static const char __pyx_k_ScalarNode[] = "ScalarNode";
+static const char __pyx_k_ValueError[] = "ValueError";
+static const char __pyx_k_ValueToken[] = "ValueToken";
+static const char __pyx_k_flow_style[] = "flow_style";
+static const char __pyx_k_line_break[] = "line_break";
+static const char __pyx_k_pyx_result[] = "__pyx_result";
+static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__";
+static const char __pyx_k_serializer[] = "serializer";
+static const char __pyx_k_start_mark[] = "start_mark";
+static const char __pyx_k_AnchorToken[] = "AnchorToken";
+static const char __pyx_k_MappingNode[] = "MappingNode";
+static const char __pyx_k_MemoryError[] = "MemoryError";
+static const char __pyx_k_ParserError[] = "ParserError";
+static const char __pyx_k_PickleError[] = "PickleError";
+static const char __pyx_k_ReaderError[] = "ReaderError";
+static const char __pyx_k_ScalarEvent[] = "ScalarEvent";
+static const char __pyx_k_ScalarToken[] = "ScalarToken";
+static const char __pyx_k_byte_string[] = "<byte string>";
+static const char __pyx_k_constructor[] = "constructor";
+static const char __pyx_k_get_version[] = "get_version";
+static const char __pyx_k_representer[] = "representer";
+static const char __pyx_k_EmitterError[] = "EmitterError";
+static const char __pyx_k_ScannerError[] = "ScannerError";
+static const char __pyx_k_SequenceNode[] = "SequenceNode";
+static const char __pyx_k_explicit_end[] = "explicit_end";
+static const char __pyx_k_pyx_checksum[] = "__pyx_checksum";
+static const char __pyx_k_stringsource[] = "stringsource";
+static const char __pyx_k_BlockEndToken[] = "BlockEndToken";
+static const char __pyx_k_ComposerError[] = "ComposerError";
+static const char __pyx_k_allow_unicode[] = "allow_unicode";
+static const char __pyx_k_ext__yaml_pyx[] = "ext/_yaml.pyx";
+static const char __pyx_k_reduce_cython[] = "__reduce_cython__";
+static const char __pyx_k_too_many_tags[] = "too many tags";
+static const char __pyx_k_AttributeError[] = "AttributeError";
+static const char __pyx_k_DirectiveToken[] = "DirectiveToken";
+static const char __pyx_k_FlowEntryToken[] = "FlowEntryToken";
+static const char __pyx_k_StreamEndEvent[] = "StreamEndEvent";
+static const char __pyx_k_StreamEndToken[] = "StreamEndToken";
+static const char __pyx_k_explicit_start[] = "explicit_start";
+static const char __pyx_k_unicode_string[] = "<unicode string>";
+static const char __pyx_k_BlockEntryToken[] = "BlockEntryToken";
+static const char __pyx_k_MappingEndEvent[] = "MappingEndEvent";
+static const char __pyx_k_SerializerError[] = "SerializerError";
+static const char __pyx_k_ascend_resolver[] = "ascend_resolver";
+static const char __pyx_k_invalid_event_s[] = "invalid event %s";
+static const char __pyx_k_no_parser_error[] = "no parser error";
+static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError";
+static const char __pyx_k_setstate_cython[] = "__setstate_cython__";
+static const char __pyx_k_ConstructorError[] = "ConstructorError";
+static const char __pyx_k_DocumentEndEvent[] = "DocumentEndEvent";
+static const char __pyx_k_DocumentEndToken[] = "DocumentEndToken";
+static const char __pyx_k_RepresenterError[] = "RepresenterError";
+static const char __pyx_k_SequenceEndEvent[] = "SequenceEndEvent";
+static const char __pyx_k_StreamStartEvent[] = "StreamStartEvent";
+static const char __pyx_k_StreamStartToken[] = "StreamStartToken";
+static const char __pyx_k_descend_resolver[] = "descend_resolver";
+static const char __pyx_k_no_emitter_error[] = "no emitter error";
+static const char __pyx_k_MappingStartEvent[] = "MappingStartEvent";
+static const char __pyx_k_pyx_unpickle_Mark[] = "__pyx_unpickle_Mark";
+static const char __pyx_k_second_occurrence[] = "second occurrence";
+static const char __pyx_k_DocumentStartEvent[] = "DocumentStartEvent";
+static const char __pyx_k_DocumentStartToken[] = "DocumentStartToken";
+static const char __pyx_k_SequenceStartEvent[] = "SequenceStartEvent";
+static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback";
+static const char __pyx_k_get_version_string[] = "get_version_string";
+static const char __pyx_k_unknown_event_type[] = "unknown event type";
+static const char __pyx_k_unknown_token_type[] = "unknown token type";
+static const char __pyx_k_FlowMappingEndToken[] = "FlowMappingEndToken";
+static const char __pyx_k_FlowSequenceEndToken[] = "FlowSequenceEndToken";
+static const char __pyx_k_in_s_line_d_column_d[] = " in \"%s\", line %d, column %d";
+static const char __pyx_k_serializer_is_closed[] = "serializer is closed";
+static const char __pyx_k_tag_must_be_a_string[] = "tag must be a string";
+static const char __pyx_k_FlowMappingStartToken[] = "FlowMappingStartToken";
+static const char __pyx_k_found_undefined_alias[] = "found undefined alias";
+static const char __pyx_k_BlockMappingStartToken[] = "BlockMappingStartToken";
+static const char __pyx_k_FlowSequenceStartToken[] = "FlowSequenceStartToken";
+static const char __pyx_k_value_must_be_a_string[] = "value must be a string";
+static const char __pyx_k_BlockSequenceStartToken[] = "BlockSequenceStartToken";
+static const char __pyx_k_anchor_must_be_a_string[] = "anchor must be a string";
+static const char __pyx_k_serializer_is_not_opened[] = "serializer is not opened";
+static const char __pyx_k_a_string_value_is_expected[] = "a string value is expected";
+static const char __pyx_k_but_found_another_document[] = "but found another document";
+static const char __pyx_k_tag_handle_must_be_a_string[] = "tag handle must be a string";
+static const char __pyx_k_tag_prefix_must_be_a_string[] = "tag prefix must be a string";
+static const char __pyx_k_serializer_is_already_opened[] = "serializer is already opened";
+static const char __pyx_k_Pickling_of_struct_members_such[] = "Pickling of struct members such as self.emitter must be explicitly requested with @auto_pickle(True)";
+static const char __pyx_k_Incompatible_checksums_s_vs_0x3f[] = "Incompatible checksums (%s vs 0x3fa45b6 = (buffer, column, index, line, name, pointer))";
+static const char __pyx_k_a_string_or_stream_input_is_requ[] = "a string or stream input is required";
+static const char __pyx_k_expected_a_single_document_in_th[] = "expected a single document in the stream";
+static const char __pyx_k_found_duplicate_anchor_first_occ[] = "found duplicate anchor; first occurrence";
+static const char __pyx_k_self_parsed_event_cannot_be_conv[] = "self.parsed_event cannot be converted to a Python object for pickling";
+static PyObject *__pyx_n_s_AliasEvent;
+static PyObject *__pyx_n_s_AliasToken;
+static PyObject *__pyx_n_s_AnchorToken;
+static PyObject *__pyx_n_s_AttributeError;
+static PyObject *__pyx_n_s_BlockEndToken;
+static PyObject *__pyx_n_s_BlockEntryToken;
+static PyObject *__pyx_n_s_BlockMappingStartToken;
+static PyObject *__pyx_n_s_BlockSequenceStartToken;
+static PyObject *__pyx_n_s_CEmitter;
+static PyObject *__pyx_n_s_CParser;
+static PyObject *__pyx_n_s_ComposerError;
+static PyObject *__pyx_n_s_ConstructorError;
+static PyObject *__pyx_n_s_DirectiveToken;
+static PyObject *__pyx_n_s_DocumentEndEvent;
+static PyObject *__pyx_n_s_DocumentEndToken;
+static PyObject *__pyx_n_s_DocumentStartEvent;
+static PyObject *__pyx_n_s_DocumentStartToken;
+static PyObject *__pyx_n_s_EmitterError;
+static PyObject *__pyx_n_s_FlowEntryToken;
+static PyObject *__pyx_n_s_FlowMappingEndToken;
+static PyObject *__pyx_n_s_FlowMappingStartToken;
+static PyObject *__pyx_n_s_FlowSequenceEndToken;
+static PyObject *__pyx_n_s_FlowSequenceStartToken;
+static PyObject *__pyx_kp_s_Incompatible_checksums_s_vs_0x3f;
+static PyObject *__pyx_n_s_KeyToken;
+static PyObject *__pyx_n_s_MappingEndEvent;
+static PyObject *__pyx_n_s_MappingNode;
+static PyObject *__pyx_n_s_MappingStartEvent;
+static PyObject *__pyx_n_s_Mark;
+static PyObject *__pyx_n_s_MemoryError;
+static PyObject *__pyx_n_s_ParserError;
+static PyObject *__pyx_n_s_PickleError;
+static PyObject *__pyx_kp_s_Pickling_of_struct_members_such;
+static PyObject *__pyx_n_s_ReaderError;
+static PyObject *__pyx_n_s_RepresenterError;
+static PyObject *__pyx_n_s_ScalarEvent;
+static PyObject *__pyx_n_s_ScalarNode;
+static PyObject *__pyx_n_s_ScalarToken;
+static PyObject *__pyx_n_s_ScannerError;
+static PyObject *__pyx_n_s_SequenceEndEvent;
+static PyObject *__pyx_n_s_SequenceNode;
+static PyObject *__pyx_n_s_SequenceStartEvent;
+static PyObject *__pyx_n_s_SerializerError;
+static PyObject *__pyx_n_s_StreamEndEvent;
+static PyObject *__pyx_n_s_StreamEndToken;
+static PyObject *__pyx_n_s_StreamStartEvent;
+static PyObject *__pyx_n_s_StreamStartToken;
+static PyObject *__pyx_n_u_TAG;
+static PyObject *__pyx_n_s_TagToken;
+static PyObject *__pyx_n_s_TypeError;
+static PyObject *__pyx_n_s_ValueError;
+static PyObject *__pyx_n_s_ValueToken;
+static PyObject *__pyx_n_u_YAML;
+static PyObject *__pyx_n_s_YAMLError;
+static PyObject *__pyx_kp_s__10;
+static PyObject *__pyx_kp_u__10;
+static PyObject *__pyx_kp_s__19;
+static PyObject *__pyx_kp_s__20;
+static PyObject *__pyx_kp_s__21;
+static PyObject *__pyx_kp_s__3;
+static PyObject *__pyx_kp_u__3;
+static PyObject *__pyx_kp_u__6;
+static PyObject *__pyx_kp_s__7;
+static PyObject *__pyx_kp_u__7;
+static PyObject *__pyx_kp_s__8;
+static PyObject *__pyx_kp_u__8;
+static PyObject *__pyx_kp_s__9;
+static PyObject *__pyx_kp_u__9;
+static PyObject *__pyx_kp_s_a_string_or_stream_input_is_requ;
+static PyObject *__pyx_kp_u_a_string_or_stream_input_is_requ;
+static PyObject *__pyx_kp_s_a_string_value_is_expected;
+static PyObject *__pyx_kp_u_a_string_value_is_expected;
+static PyObject *__pyx_n_s_allow_unicode;
+static PyObject *__pyx_n_s_anchor;
+static PyObject *__pyx_kp_s_anchor_must_be_a_string;
+static PyObject *__pyx_kp_u_anchor_must_be_a_string;
+static PyObject *__pyx_n_s_ascend_resolver;
+static PyObject *__pyx_n_s_buffer;
+static PyObject *__pyx_kp_s_but_found_another_document;
+static PyObject *__pyx_kp_u_but_found_another_document;
+static PyObject *__pyx_kp_s_byte_string;
+static PyObject *__pyx_kp_u_byte_string;
+static PyObject *__pyx_n_s_canonical;
+static PyObject *__pyx_n_s_class;
+static PyObject *__pyx_n_s_cline_in_traceback;
+static PyObject *__pyx_n_s_column;
+static PyObject *__pyx_n_s_composer;
+static PyObject *__pyx_n_s_constructor;
+static PyObject *__pyx_n_s_descend_resolver;
+static PyObject *__pyx_n_s_dict;
+static PyObject *__pyx_n_s_emitter;
+static PyObject *__pyx_n_s_encoding;
+static PyObject *__pyx_n_u_encoding;
+static PyObject *__pyx_n_s_end_mark;
+static PyObject *__pyx_n_s_error;
+static PyObject *__pyx_n_s_events;
+static PyObject *__pyx_kp_s_expected_a_single_document_in_th;
+static PyObject *__pyx_kp_u_expected_a_single_document_in_th;
+static PyObject *__pyx_n_s_explicit;
+static PyObject *__pyx_n_s_explicit_end;
+static PyObject *__pyx_n_s_explicit_start;
+static PyObject *__pyx_kp_s_ext__yaml_pyx;
+static PyObject *__pyx_kp_s_file;
+static PyObject *__pyx_kp_u_file;
+static PyObject *__pyx_n_s_flow_style;
+static PyObject *__pyx_kp_s_found_duplicate_anchor_first_occ;
+static PyObject *__pyx_kp_u_found_duplicate_anchor_first_occ;
+static PyObject *__pyx_kp_s_found_undefined_alias;
+static PyObject *__pyx_kp_u_found_undefined_alias;
+static PyObject *__pyx_n_s_get_version;
+static PyObject *__pyx_n_s_get_version_string;
+static PyObject *__pyx_n_s_getstate;
+static PyObject *__pyx_kp_u_id_03d;
+static PyObject *__pyx_n_s_implicit;
+static PyObject *__pyx_n_s_import;
+static PyObject *__pyx_kp_s_in_s_line_d_column_d;
+static PyObject *__pyx_n_s_indent;
+static PyObject *__pyx_n_s_index;
+static PyObject *__pyx_kp_s_invalid_event_s;
+static PyObject *__pyx_kp_u_invalid_event_s;
+static PyObject *__pyx_n_s_line;
+static PyObject *__pyx_n_s_line_break;
+static PyObject *__pyx_n_s_main;
+static PyObject *__pyx_n_s_major;
+static PyObject *__pyx_n_s_minor;
+static PyObject *__pyx_n_s_name;
+static PyObject *__pyx_n_s_name_2;
+static PyObject *__pyx_n_s_new;
+static PyObject *__pyx_kp_s_no_emitter_error;
+static PyObject *__pyx_kp_u_no_emitter_error;
+static PyObject *__pyx_kp_s_no_parser_error;
+static PyObject *__pyx_kp_u_no_parser_error;
+static PyObject *__pyx_n_s_nodes;
+static PyObject *__pyx_n_s_parser;
+static PyObject *__pyx_n_s_patch;
+static PyObject *__pyx_n_s_pickle;
+static PyObject *__pyx_n_s_pointer;
+static PyObject *__pyx_n_s_pyx_PickleError;
+static PyObject *__pyx_n_s_pyx_checksum;
+static PyObject *__pyx_n_s_pyx_result;
+static PyObject *__pyx_n_s_pyx_state;
+static PyObject *__pyx_n_s_pyx_type;
+static PyObject *__pyx_n_s_pyx_unpickle_Mark;
+static PyObject *__pyx_n_s_pyx_vtable;
+static PyObject *__pyx_n_s_read;
+static PyObject *__pyx_n_s_reader;
+static PyObject *__pyx_n_s_reduce;
+static PyObject *__pyx_n_s_reduce_cython;
+static PyObject *__pyx_n_s_reduce_ex;
+static PyObject *__pyx_n_s_representer;
+static PyObject *__pyx_n_s_resolve;
+static PyObject *__pyx_n_s_scanner;
+static PyObject *__pyx_kp_s_second_occurrence;
+static PyObject *__pyx_kp_u_second_occurrence;
+static PyObject *__pyx_kp_s_self_parsed_event_cannot_be_conv;
+static PyObject *__pyx_n_s_serializer;
+static PyObject *__pyx_kp_s_serializer_is_already_opened;
+static PyObject *__pyx_kp_u_serializer_is_already_opened;
+static PyObject *__pyx_kp_s_serializer_is_closed;
+static PyObject *__pyx_kp_u_serializer_is_closed;
+static PyObject *__pyx_kp_s_serializer_is_not_opened;
+static PyObject *__pyx_kp_u_serializer_is_not_opened;
+static PyObject *__pyx_n_s_setstate;
+static PyObject *__pyx_n_s_setstate_cython;
+static PyObject *__pyx_n_s_start_mark;
+static PyObject *__pyx_n_s_stream;
+static PyObject *__pyx_kp_s_stringsource;
+static PyObject *__pyx_n_s_style;
+static PyObject *__pyx_n_s_tag;
+static PyObject *__pyx_kp_s_tag_handle_must_be_a_string;
+static PyObject *__pyx_kp_u_tag_handle_must_be_a_string;
+static PyObject *__pyx_kp_s_tag_must_be_a_string;
+static PyObject *__pyx_kp_u_tag_must_be_a_string;
+static PyObject *__pyx_kp_s_tag_prefix_must_be_a_string;
+static PyObject *__pyx_kp_u_tag_prefix_must_be_a_string;
+static PyObject *__pyx_n_s_tags;
+static PyObject *__pyx_n_s_test;
+static PyObject *__pyx_n_s_tokens;
+static PyObject *__pyx_kp_s_too_many_tags;
+static PyObject *__pyx_kp_u_too_many_tags;
+static PyObject *__pyx_kp_s_unicode_string;
+static PyObject *__pyx_kp_u_unicode_string;
+static PyObject *__pyx_kp_s_unknown_event_type;
+static PyObject *__pyx_kp_u_unknown_event_type;
+static PyObject *__pyx_kp_s_unknown_token_type;
+static PyObject *__pyx_kp_u_unknown_token_type;
+static PyObject *__pyx_n_s_update;
+static PyObject *__pyx_kp_s_utf_16_be;
+static PyObject *__pyx_kp_u_utf_16_be;
+static PyObject *__pyx_kp_s_utf_16_le;
+static PyObject *__pyx_kp_u_utf_16_le;
+static PyObject *__pyx_kp_u_utf_8;
+static PyObject *__pyx_n_s_value;
+static PyObject *__pyx_kp_s_value_must_be_a_string;
+static PyObject *__pyx_kp_u_value_must_be_a_string;
+static PyObject *__pyx_n_s_version;
+static PyObject *__pyx_n_s_width;
+static PyObject *__pyx_n_s_write;
+static PyObject *__pyx_n_s_yaml;
+static PyObject *__pyx_n_s_yaml_2;
+static PyObject *__pyx_pf_5_yaml_get_version_string(CYTHON_UNUSED PyObject *__pyx_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_2get_version(CYTHON_UNUSED PyObject *__pyx_self); /* proto */
+static int __pyx_pf_5_yaml_4Mark___init__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self, PyObject *__pyx_v_name, size_t __pyx_v_index, size_t __pyx_v_line, size_t __pyx_v_column, PyObject *__pyx_v_buffer, PyObject *__pyx_v_pointer); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_2get_snippet(CYTHON_UNUSED struct __pyx_obj_5_yaml_Mark *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_4__str__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_4name___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_5index___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_4line___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_6column___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_6buffer___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_7pointer___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_6__reduce_cython__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_8__setstate_cython__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */
+static int __pyx_pf_5_yaml_7CParser___init__(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_stream); /* proto */
+static void __pyx_pf_5_yaml_7CParser_2__dealloc__(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_4dispose(CYTHON_UNUSED struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_6raw_scan(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_8get_token(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_10peek_token(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_12check_token(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_choices); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_14raw_parse(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_16get_event(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_18peek_event(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_20check_event(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_choices); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_22check_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_24get_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_26get_single_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_28__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_30__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_5_yaml_CParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */
+static int __pyx_pf_5_yaml_8CEmitter___init__(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_stream, PyObject *__pyx_v_canonical, PyObject *__pyx_v_indent, PyObject *__pyx_v_width, PyObject *__pyx_v_allow_unicode, PyObject *__pyx_v_line_break, PyObject *__pyx_v_encoding, PyObject *__pyx_v_explicit_start, PyObject *__pyx_v_explicit_end, PyObject *__pyx_v_version, PyObject *__pyx_v_tags); /* proto */
+static void __pyx_pf_5_yaml_8CEmitter_2__dealloc__(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_8CEmitter_4dispose(CYTHON_UNUSED struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_8CEmitter_6emit(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_event_object); /* proto */
+static PyObject *__pyx_pf_5_yaml_8CEmitter_8open(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_8CEmitter_10close(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_8CEmitter_12serialize(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_node); /* proto */
+static PyObject *__pyx_pf_5_yaml_8CEmitter_14__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_8CEmitter_16__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */
+static PyObject *__pyx_pf_5_yaml_4__pyx_unpickle_Mark(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */
+static PyObject *__pyx_tp_new_5_yaml_Mark(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/
+static PyObject *__pyx_tp_new_5_yaml_CParser(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/
+static PyObject *__pyx_tp_new_5_yaml_CEmitter(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/
+static PyObject *__pyx_int_0;
+static PyObject *__pyx_int_1;
+static PyObject *__pyx_int_66733494;
+static PyObject *__pyx_tuple_;
+static PyObject *__pyx_tuple__2;
+static PyObject *__pyx_tuple__4;
+static PyObject *__pyx_tuple__5;
+static PyObject *__pyx_tuple__11;
+static PyObject *__pyx_tuple__12;
+static PyObject *__pyx_tuple__13;
+static PyObject *__pyx_tuple__14;
+static PyObject *__pyx_tuple__15;
+static PyObject *__pyx_tuple__16;
+static PyObject *__pyx_tuple__17;
+static PyObject *__pyx_tuple__18;
+static PyObject *__pyx_tuple__22;
+static PyObject *__pyx_tuple__23;
+static PyObject *__pyx_tuple__24;
+static PyObject *__pyx_tuple__25;
+static PyObject *__pyx_tuple__26;
+static PyObject *__pyx_tuple__27;
+static PyObject *__pyx_tuple__28;
+static PyObject *__pyx_tuple__29;
+static PyObject *__pyx_tuple__30;
+static PyObject *__pyx_tuple__31;
+static PyObject *__pyx_tuple__32;
+static PyObject *__pyx_tuple__33;
+static PyObject *__pyx_tuple__34;
+static PyObject *__pyx_tuple__35;
+static PyObject *__pyx_tuple__36;
+static PyObject *__pyx_tuple__37;
+static PyObject *__pyx_tuple__38;
+static PyObject *__pyx_tuple__39;
+static PyObject *__pyx_tuple__40;
+static PyObject *__pyx_tuple__42;
+static PyObject *__pyx_tuple__44;
+static PyObject *__pyx_codeobj__41;
+static PyObject *__pyx_codeobj__43;
+static PyObject *__pyx_codeobj__45;
+/* Late includes */
+
+/* "_yaml.pyx":4
+ * import yaml
+ *
+ * def get_version_string(): # <<<<<<<<<<<<<<
+ * cdef char *value
+ * value = yaml_get_version_string()
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_1get_version_string(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyMethodDef __pyx_mdef_5_yaml_1get_version_string = {"get_version_string", (PyCFunction)__pyx_pw_5_yaml_1get_version_string, METH_NOARGS, 0};
+static PyObject *__pyx_pw_5_yaml_1get_version_string(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("get_version_string (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_get_version_string(__pyx_self);
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_get_version_string(CYTHON_UNUSED PyObject *__pyx_self) {
+ char *__pyx_v_value;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ PyObject *__pyx_t_2 = NULL;
+ __Pyx_RefNannySetupContext("get_version_string", 0);
+
+ /* "_yaml.pyx":6
+ * def get_version_string():
+ * cdef char *value
+ * value = yaml_get_version_string() # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * return value
+ */
+ __pyx_v_value = yaml_get_version_string();
+
+ /* "_yaml.pyx":7
+ * cdef char *value
+ * value = yaml_get_version_string()
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * return value
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":8
+ * value = yaml_get_version_string()
+ * if PY_MAJOR_VERSION < 3:
+ * return value # <<<<<<<<<<<<<<
+ * else:
+ * return PyUnicode_FromString(value)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_2 = __Pyx_PyBytes_FromString(__pyx_v_value); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 8, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":7
+ * cdef char *value
+ * value = yaml_get_version_string()
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * return value
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":10
+ * return value
+ * else:
+ * return PyUnicode_FromString(value) # <<<<<<<<<<<<<<
+ *
+ * def get_version():
+ */
+ /*else*/ {
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_2 = PyUnicode_FromString(__pyx_v_value); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+ }
+
+ /* "_yaml.pyx":4
+ * import yaml
+ *
+ * def get_version_string(): # <<<<<<<<<<<<<<
+ * cdef char *value
+ * value = yaml_get_version_string()
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_AddTraceback("_yaml.get_version_string", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":12
+ * return PyUnicode_FromString(value)
+ *
+ * def get_version(): # <<<<<<<<<<<<<<
+ * cdef int major, minor, patch
+ * yaml_get_version(&major, &minor, &patch)
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_3get_version(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyMethodDef __pyx_mdef_5_yaml_3get_version = {"get_version", (PyCFunction)__pyx_pw_5_yaml_3get_version, METH_NOARGS, 0};
+static PyObject *__pyx_pw_5_yaml_3get_version(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("get_version (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_2get_version(__pyx_self);
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_2get_version(CYTHON_UNUSED PyObject *__pyx_self) {
+ int __pyx_v_major;
+ int __pyx_v_minor;
+ int __pyx_v_patch;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ __Pyx_RefNannySetupContext("get_version", 0);
+
+ /* "_yaml.pyx":14
+ * def get_version():
+ * cdef int major, minor, patch
+ * yaml_get_version(&major, &minor, &patch) # <<<<<<<<<<<<<<
+ * return (major, minor, patch)
+ *
+ */
+ yaml_get_version((&__pyx_v_major), (&__pyx_v_minor), (&__pyx_v_patch));
+
+ /* "_yaml.pyx":15
+ * cdef int major, minor, patch
+ * yaml_get_version(&major, &minor, &patch)
+ * return (major, minor, patch) # <<<<<<<<<<<<<<
+ *
+ * #Mark = yaml.error.Mark
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_major); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 15, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_minor); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_patch); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 15, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 15, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_3);
+ __pyx_t_1 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_3 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":12
+ * return PyUnicode_FromString(value)
+ *
+ * def get_version(): # <<<<<<<<<<<<<<
+ * cdef int major, minor, patch
+ * yaml_get_version(&major, &minor, &patch)
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_AddTraceback("_yaml.get_version", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":72
+ * cdef readonly pointer
+ *
+ * def __init__(self, object name, size_t index, size_t line, size_t column, # <<<<<<<<<<<<<<
+ * object buffer, object pointer):
+ * self.name = name
+ */
+
+/* Python wrapper */
+static int __pyx_pw_5_yaml_4Mark_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static int __pyx_pw_5_yaml_4Mark_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+ PyObject *__pyx_v_name = 0;
+ size_t __pyx_v_index;
+ size_t __pyx_v_line;
+ size_t __pyx_v_column;
+ PyObject *__pyx_v_buffer = 0;
+ PyObject *__pyx_v_pointer = 0;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__init__ (wrapper)", 0);
+ {
+ static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_name,&__pyx_n_s_index,&__pyx_n_s_line,&__pyx_n_s_column,&__pyx_n_s_buffer,&__pyx_n_s_pointer,0};
+ PyObject* values[6] = {0,0,0,0,0,0};
+ if (unlikely(__pyx_kwds)) {
+ Py_ssize_t kw_args;
+ const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
+ switch (pos_args) {
+ case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5);
+ CYTHON_FALLTHROUGH;
+ case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
+ CYTHON_FALLTHROUGH;
+ case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+ CYTHON_FALLTHROUGH;
+ case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ CYTHON_FALLTHROUGH;
+ case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ CYTHON_FALLTHROUGH;
+ case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ CYTHON_FALLTHROUGH;
+ case 0: break;
+ default: goto __pyx_L5_argtuple_error;
+ }
+ kw_args = PyDict_Size(__pyx_kwds);
+ switch (pos_args) {
+ case 0:
+ if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_name)) != 0)) kw_args--;
+ else goto __pyx_L5_argtuple_error;
+ CYTHON_FALLTHROUGH;
+ case 1:
+ if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_index)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 1); __PYX_ERR(0, 72, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 2:
+ if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_line)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 2); __PYX_ERR(0, 72, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 3:
+ if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_column)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 3); __PYX_ERR(0, 72, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 4:
+ if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_buffer)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 4); __PYX_ERR(0, 72, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 5:
+ if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pointer)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 5); __PYX_ERR(0, 72, __pyx_L3_error)
+ }
+ }
+ if (unlikely(kw_args > 0)) {
+ if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 72, __pyx_L3_error)
+ }
+ } else if (PyTuple_GET_SIZE(__pyx_args) != 6) {
+ goto __pyx_L5_argtuple_error;
+ } else {
+ values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+ values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
+ values[5] = PyTuple_GET_ITEM(__pyx_args, 5);
+ }
+ __pyx_v_name = values[0];
+ __pyx_v_index = __Pyx_PyInt_As_size_t(values[1]); if (unlikely((__pyx_v_index == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 72, __pyx_L3_error)
+ __pyx_v_line = __Pyx_PyInt_As_size_t(values[2]); if (unlikely((__pyx_v_line == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 72, __pyx_L3_error)
+ __pyx_v_column = __Pyx_PyInt_As_size_t(values[3]); if (unlikely((__pyx_v_column == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 72, __pyx_L3_error)
+ __pyx_v_buffer = values[4];
+ __pyx_v_pointer = values[5];
+ }
+ goto __pyx_L4_argument_unpacking_done;
+ __pyx_L5_argtuple_error:;
+ __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 72, __pyx_L3_error)
+ __pyx_L3_error:;
+ __Pyx_AddTraceback("_yaml.Mark.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __Pyx_RefNannyFinishContext();
+ return -1;
+ __pyx_L4_argument_unpacking_done:;
+ __pyx_r = __pyx_pf_5_yaml_4Mark___init__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self), __pyx_v_name, __pyx_v_index, __pyx_v_line, __pyx_v_column, __pyx_v_buffer, __pyx_v_pointer);
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static int __pyx_pf_5_yaml_4Mark___init__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self, PyObject *__pyx_v_name, size_t __pyx_v_index, size_t __pyx_v_line, size_t __pyx_v_column, PyObject *__pyx_v_buffer, PyObject *__pyx_v_pointer) {
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__init__", 0);
+
+ /* "_yaml.pyx":74
+ * def __init__(self, object name, size_t index, size_t line, size_t column,
+ * object buffer, object pointer):
+ * self.name = name # <<<<<<<<<<<<<<
+ * self.index = index
+ * self.line = line
+ */
+ __Pyx_INCREF(__pyx_v_name);
+ __Pyx_GIVEREF(__pyx_v_name);
+ __Pyx_GOTREF(__pyx_v_self->name);
+ __Pyx_DECREF(__pyx_v_self->name);
+ __pyx_v_self->name = __pyx_v_name;
+
+ /* "_yaml.pyx":75
+ * object buffer, object pointer):
+ * self.name = name
+ * self.index = index # <<<<<<<<<<<<<<
+ * self.line = line
+ * self.column = column
+ */
+ __pyx_v_self->index = __pyx_v_index;
+
+ /* "_yaml.pyx":76
+ * self.name = name
+ * self.index = index
+ * self.line = line # <<<<<<<<<<<<<<
+ * self.column = column
+ * self.buffer = buffer
+ */
+ __pyx_v_self->line = __pyx_v_line;
+
+ /* "_yaml.pyx":77
+ * self.index = index
+ * self.line = line
+ * self.column = column # <<<<<<<<<<<<<<
+ * self.buffer = buffer
+ * self.pointer = pointer
+ */
+ __pyx_v_self->column = __pyx_v_column;
+
+ /* "_yaml.pyx":78
+ * self.line = line
+ * self.column = column
+ * self.buffer = buffer # <<<<<<<<<<<<<<
+ * self.pointer = pointer
+ *
+ */
+ __Pyx_INCREF(__pyx_v_buffer);
+ __Pyx_GIVEREF(__pyx_v_buffer);
+ __Pyx_GOTREF(__pyx_v_self->buffer);
+ __Pyx_DECREF(__pyx_v_self->buffer);
+ __pyx_v_self->buffer = __pyx_v_buffer;
+
+ /* "_yaml.pyx":79
+ * self.column = column
+ * self.buffer = buffer
+ * self.pointer = pointer # <<<<<<<<<<<<<<
+ *
+ * def get_snippet(self):
+ */
+ __Pyx_INCREF(__pyx_v_pointer);
+ __Pyx_GIVEREF(__pyx_v_pointer);
+ __Pyx_GOTREF(__pyx_v_self->pointer);
+ __Pyx_DECREF(__pyx_v_self->pointer);
+ __pyx_v_self->pointer = __pyx_v_pointer;
+
+ /* "_yaml.pyx":72
+ * cdef readonly pointer
+ *
+ * def __init__(self, object name, size_t index, size_t line, size_t column, # <<<<<<<<<<<<<<
+ * object buffer, object pointer):
+ * self.name = name
+ */
+
+ /* function exit code */
+ __pyx_r = 0;
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":81
+ * self.pointer = pointer
+ *
+ * def get_snippet(self): # <<<<<<<<<<<<<<
+ * return None
+ *
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_3get_snippet(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_3get_snippet(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("get_snippet (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_2get_snippet(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_2get_snippet(CYTHON_UNUSED struct __pyx_obj_5_yaml_Mark *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("get_snippet", 0);
+
+ /* "_yaml.pyx":82
+ *
+ * def get_snippet(self):
+ * return None # <<<<<<<<<<<<<<
+ *
+ * def __str__(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":81
+ * self.pointer = pointer
+ *
+ * def get_snippet(self): # <<<<<<<<<<<<<<
+ * return None
+ *
+ */
+
+ /* function exit code */
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":84
+ * return None
+ *
+ * def __str__(self): # <<<<<<<<<<<<<<
+ * where = " in \"%s\", line %d, column %d" \
+ * % (self.name, self.line+1, self.column+1)
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_5__str__(PyObject *__pyx_v_self); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_5__str__(PyObject *__pyx_v_self) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__str__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_4__str__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_4__str__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self) {
+ PyObject *__pyx_v_where = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ __Pyx_RefNannySetupContext("__str__", 0);
+
+ /* "_yaml.pyx":86
+ * def __str__(self):
+ * where = " in \"%s\", line %d, column %d" \
+ * % (self.name, self.line+1, self.column+1) # <<<<<<<<<<<<<<
+ * return where
+ *
+ */
+ __pyx_t_1 = __Pyx_PyInt_FromSize_t((__pyx_v_self->line + 1)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 86, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyInt_FromSize_t((__pyx_v_self->column + 1)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 86, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_3 = PyTuple_New(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 86, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_INCREF(__pyx_v_self->name);
+ __Pyx_GIVEREF(__pyx_v_self->name);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_self->name);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_t_2);
+ __pyx_t_1 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_2 = __Pyx_PyString_Format(__pyx_kp_s_in_s_line_d_column_d, __pyx_t_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 86, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_v_where = ((PyObject*)__pyx_t_2);
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":87
+ * where = " in \"%s\", line %d, column %d" \
+ * % (self.name, self.line+1, self.column+1)
+ * return where # <<<<<<<<<<<<<<
+ *
+ * #class YAMLError(Exception):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_where);
+ __pyx_r = __pyx_v_where;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":84
+ * return None
+ *
+ * def __str__(self): # <<<<<<<<<<<<<<
+ * where = " in \"%s\", line %d, column %d" \
+ * % (self.name, self.line+1, self.column+1)
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.Mark.__str__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_where);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":65
+ *
+ * cdef class Mark:
+ * cdef readonly object name # <<<<<<<<<<<<<<
+ * cdef readonly size_t index
+ * cdef readonly size_t line
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_4name_1__get__(PyObject *__pyx_v_self); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_4name_1__get__(PyObject *__pyx_v_self) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_4name___get__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_4name___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__", 0);
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_self->name);
+ __pyx_r = __pyx_v_self->name;
+ goto __pyx_L0;
+
+ /* function exit code */
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":66
+ * cdef class Mark:
+ * cdef readonly object name
+ * cdef readonly size_t index # <<<<<<<<<<<<<<
+ * cdef readonly size_t line
+ * cdef readonly size_t column
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_5index_1__get__(PyObject *__pyx_v_self); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_5index_1__get__(PyObject *__pyx_v_self) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_5index___get__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_5index___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ __Pyx_RefNannySetupContext("__get__", 0);
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = __Pyx_PyInt_FromSize_t(__pyx_v_self->index); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 66, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_r = __pyx_t_1;
+ __pyx_t_1 = 0;
+ goto __pyx_L0;
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("_yaml.Mark.index.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":67
+ * cdef readonly object name
+ * cdef readonly size_t index
+ * cdef readonly size_t line # <<<<<<<<<<<<<<
+ * cdef readonly size_t column
+ * cdef readonly buffer
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_4line_1__get__(PyObject *__pyx_v_self); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_4line_1__get__(PyObject *__pyx_v_self) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_4line___get__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_4line___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ __Pyx_RefNannySetupContext("__get__", 0);
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = __Pyx_PyInt_FromSize_t(__pyx_v_self->line); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 67, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_r = __pyx_t_1;
+ __pyx_t_1 = 0;
+ goto __pyx_L0;
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("_yaml.Mark.line.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":68
+ * cdef readonly size_t index
+ * cdef readonly size_t line
+ * cdef readonly size_t column # <<<<<<<<<<<<<<
+ * cdef readonly buffer
+ * cdef readonly pointer
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_6column_1__get__(PyObject *__pyx_v_self); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_6column_1__get__(PyObject *__pyx_v_self) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_6column___get__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_6column___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ __Pyx_RefNannySetupContext("__get__", 0);
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = __Pyx_PyInt_FromSize_t(__pyx_v_self->column); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 68, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_r = __pyx_t_1;
+ __pyx_t_1 = 0;
+ goto __pyx_L0;
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("_yaml.Mark.column.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":69
+ * cdef readonly size_t line
+ * cdef readonly size_t column
+ * cdef readonly buffer # <<<<<<<<<<<<<<
+ * cdef readonly pointer
+ *
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_6buffer_1__get__(PyObject *__pyx_v_self); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_6buffer_1__get__(PyObject *__pyx_v_self) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_6buffer___get__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_6buffer___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__", 0);
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_self->buffer);
+ __pyx_r = __pyx_v_self->buffer;
+ goto __pyx_L0;
+
+ /* function exit code */
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":70
+ * cdef readonly size_t column
+ * cdef readonly buffer
+ * cdef readonly pointer # <<<<<<<<<<<<<<
+ *
+ * def __init__(self, object name, size_t index, size_t line, size_t column,
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_7pointer_1__get__(PyObject *__pyx_v_self); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_7pointer_1__get__(PyObject *__pyx_v_self) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_7pointer___get__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_7pointer___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__", 0);
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_self->pointer);
+ __pyx_r = __pyx_v_self->pointer;
+ goto __pyx_L0;
+
+ /* function exit code */
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "(tree fragment)":1
+ * def __reduce_cython__(self): # <<<<<<<<<<<<<<
+ * cdef tuple state
+ * cdef object _dict
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_6__reduce_cython__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_6__reduce_cython__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self) {
+ PyObject *__pyx_v_state = 0;
+ PyObject *__pyx_v__dict = 0;
+ int __pyx_v_use_setstate;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_t_5;
+ int __pyx_t_6;
+ int __pyx_t_7;
+ __Pyx_RefNannySetupContext("__reduce_cython__", 0);
+
+ /* "(tree fragment)":5
+ * cdef object _dict
+ * cdef bint use_setstate
+ * state = (self.buffer, self.column, self.index, self.line, self.name, self.pointer) # <<<<<<<<<<<<<<
+ * _dict = getattr(self, '__dict__', None)
+ * if _dict is not None:
+ */
+ __pyx_t_1 = __Pyx_PyInt_FromSize_t(__pyx_v_self->column); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_self->index); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_self->line); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 5, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_self->buffer);
+ __Pyx_GIVEREF(__pyx_v_self->buffer);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->buffer);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3);
+ __Pyx_INCREF(__pyx_v_self->name);
+ __Pyx_GIVEREF(__pyx_v_self->name);
+ PyTuple_SET_ITEM(__pyx_t_4, 4, __pyx_v_self->name);
+ __Pyx_INCREF(__pyx_v_self->pointer);
+ __Pyx_GIVEREF(__pyx_v_self->pointer);
+ PyTuple_SET_ITEM(__pyx_t_4, 5, __pyx_v_self->pointer);
+ __pyx_t_1 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_3 = 0;
+ __pyx_v_state = ((PyObject*)__pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "(tree fragment)":6
+ * cdef bint use_setstate
+ * state = (self.buffer, self.column, self.index, self.line, self.name, self.pointer)
+ * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<<
+ * if _dict is not None:
+ * state += (_dict,)
+ */
+ __pyx_t_4 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_v__dict = __pyx_t_4;
+ __pyx_t_4 = 0;
+
+ /* "(tree fragment)":7
+ * state = (self.buffer, self.column, self.index, self.line, self.name, self.pointer)
+ * _dict = getattr(self, '__dict__', None)
+ * if _dict is not None: # <<<<<<<<<<<<<<
+ * state += (_dict,)
+ * use_setstate = True
+ */
+ __pyx_t_5 = (__pyx_v__dict != Py_None);
+ __pyx_t_6 = (__pyx_t_5 != 0);
+ if (__pyx_t_6) {
+
+ /* "(tree fragment)":8
+ * _dict = getattr(self, '__dict__', None)
+ * if _dict is not None:
+ * state += (_dict,) # <<<<<<<<<<<<<<
+ * use_setstate = True
+ * else:
+ */
+ __pyx_t_4 = PyTuple_New(1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 8, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v__dict);
+ __Pyx_GIVEREF(__pyx_v__dict);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v__dict);
+ __pyx_t_3 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 8, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_3));
+ __pyx_t_3 = 0;
+
+ /* "(tree fragment)":9
+ * if _dict is not None:
+ * state += (_dict,)
+ * use_setstate = True # <<<<<<<<<<<<<<
+ * else:
+ * use_setstate = self.buffer is not None or self.name is not None or self.pointer is not None
+ */
+ __pyx_v_use_setstate = 1;
+
+ /* "(tree fragment)":7
+ * state = (self.buffer, self.column, self.index, self.line, self.name, self.pointer)
+ * _dict = getattr(self, '__dict__', None)
+ * if _dict is not None: # <<<<<<<<<<<<<<
+ * state += (_dict,)
+ * use_setstate = True
+ */
+ goto __pyx_L3;
+ }
+
+ /* "(tree fragment)":11
+ * use_setstate = True
+ * else:
+ * use_setstate = self.buffer is not None or self.name is not None or self.pointer is not None # <<<<<<<<<<<<<<
+ * if use_setstate:
+ * return __pyx_unpickle_Mark, (type(self), 0x3fa45b6, None), state
+ */
+ /*else*/ {
+ __pyx_t_5 = (__pyx_v_self->buffer != Py_None);
+ __pyx_t_7 = (__pyx_t_5 != 0);
+ if (!__pyx_t_7) {
+ } else {
+ __pyx_t_6 = __pyx_t_7;
+ goto __pyx_L4_bool_binop_done;
+ }
+ __pyx_t_7 = (__pyx_v_self->name != Py_None);
+ __pyx_t_5 = (__pyx_t_7 != 0);
+ if (!__pyx_t_5) {
+ } else {
+ __pyx_t_6 = __pyx_t_5;
+ goto __pyx_L4_bool_binop_done;
+ }
+ __pyx_t_5 = (__pyx_v_self->pointer != Py_None);
+ __pyx_t_7 = (__pyx_t_5 != 0);
+ __pyx_t_6 = __pyx_t_7;
+ __pyx_L4_bool_binop_done:;
+ __pyx_v_use_setstate = __pyx_t_6;
+ }
+ __pyx_L3:;
+
+ /* "(tree fragment)":12
+ * else:
+ * use_setstate = self.buffer is not None or self.name is not None or self.pointer is not None
+ * if use_setstate: # <<<<<<<<<<<<<<
+ * return __pyx_unpickle_Mark, (type(self), 0x3fa45b6, None), state
+ * else:
+ */
+ __pyx_t_6 = (__pyx_v_use_setstate != 0);
+ if (__pyx_t_6) {
+
+ /* "(tree fragment)":13
+ * use_setstate = self.buffer is not None or self.name is not None or self.pointer is not None
+ * if use_setstate:
+ * return __pyx_unpickle_Mark, (type(self), 0x3fa45b6, None), state # <<<<<<<<<<<<<<
+ * else:
+ * return __pyx_unpickle_Mark, (type(self), 0x3fa45b6, state)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_pyx_unpickle_Mark); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 13, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))));
+ __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))));
+ PyTuple_SET_ITEM(__pyx_t_4, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))));
+ __Pyx_INCREF(__pyx_int_66733494);
+ __Pyx_GIVEREF(__pyx_int_66733494);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_int_66733494);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, Py_None);
+ __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 13, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_4);
+ __Pyx_INCREF(__pyx_v_state);
+ __Pyx_GIVEREF(__pyx_v_state);
+ PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_v_state);
+ __pyx_t_3 = 0;
+ __pyx_t_4 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "(tree fragment)":12
+ * else:
+ * use_setstate = self.buffer is not None or self.name is not None or self.pointer is not None
+ * if use_setstate: # <<<<<<<<<<<<<<
+ * return __pyx_unpickle_Mark, (type(self), 0x3fa45b6, None), state
+ * else:
+ */
+ }
+
+ /* "(tree fragment)":15
+ * return __pyx_unpickle_Mark, (type(self), 0x3fa45b6, None), state
+ * else:
+ * return __pyx_unpickle_Mark, (type(self), 0x3fa45b6, state) # <<<<<<<<<<<<<<
+ * def __setstate_cython__(self, __pyx_state):
+ * __pyx_unpickle_Mark__set_state(self, __pyx_state)
+ */
+ /*else*/ {
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_pyx_unpickle_Mark); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 15, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 15, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))));
+ __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))));
+ PyTuple_SET_ITEM(__pyx_t_4, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))));
+ __Pyx_INCREF(__pyx_int_66733494);
+ __Pyx_GIVEREF(__pyx_int_66733494);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_int_66733494);
+ __Pyx_INCREF(__pyx_v_state);
+ __Pyx_GIVEREF(__pyx_v_state);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_state);
+ __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 15, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_4);
+ __pyx_t_2 = 0;
+ __pyx_t_4 = 0;
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+ }
+
+ /* "(tree fragment)":1
+ * def __reduce_cython__(self): # <<<<<<<<<<<<<<
+ * cdef tuple state
+ * cdef object _dict
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_AddTraceback("_yaml.Mark.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_state);
+ __Pyx_XDECREF(__pyx_v__dict);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "(tree fragment)":16
+ * else:
+ * return __pyx_unpickle_Mark, (type(self), 0x3fa45b6, state)
+ * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
+ * __pyx_unpickle_Mark__set_state(self, __pyx_state)
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_8__setstate_cython__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_8__setstate_cython__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self, PyObject *__pyx_v___pyx_state) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ __Pyx_RefNannySetupContext("__setstate_cython__", 0);
+
+ /* "(tree fragment)":17
+ * return __pyx_unpickle_Mark, (type(self), 0x3fa45b6, state)
+ * def __setstate_cython__(self, __pyx_state):
+ * __pyx_unpickle_Mark__set_state(self, __pyx_state) # <<<<<<<<<<<<<<
+ */
+ if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 17, __pyx_L1_error)
+ __pyx_t_1 = __pyx_f_5_yaml___pyx_unpickle_Mark__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "(tree fragment)":16
+ * else:
+ * return __pyx_unpickle_Mark, (type(self), 0x3fa45b6, state)
+ * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
+ * __pyx_unpickle_Mark__set_state(self, __pyx_state)
+ */
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("_yaml.Mark.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":262
+ * cdef int unicode_source
+ *
+ * def __init__(self, stream): # <<<<<<<<<<<<<<
+ * cdef is_readable
+ * if yaml_parser_initialize(&self.parser) == 0:
+ */
+
+/* Python wrapper */
+static int __pyx_pw_5_yaml_7CParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static int __pyx_pw_5_yaml_7CParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+ PyObject *__pyx_v_stream = 0;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__init__ (wrapper)", 0);
+ {
+ static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_stream,0};
+ PyObject* values[1] = {0};
+ if (unlikely(__pyx_kwds)) {
+ Py_ssize_t kw_args;
+ const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
+ switch (pos_args) {
+ case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ CYTHON_FALLTHROUGH;
+ case 0: break;
+ default: goto __pyx_L5_argtuple_error;
+ }
+ kw_args = PyDict_Size(__pyx_kwds);
+ switch (pos_args) {
+ case 0:
+ if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_stream)) != 0)) kw_args--;
+ else goto __pyx_L5_argtuple_error;
+ }
+ if (unlikely(kw_args > 0)) {
+ if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 262, __pyx_L3_error)
+ }
+ } else if (PyTuple_GET_SIZE(__pyx_args) != 1) {
+ goto __pyx_L5_argtuple_error;
+ } else {
+ values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ }
+ __pyx_v_stream = values[0];
+ }
+ goto __pyx_L4_argument_unpacking_done;
+ __pyx_L5_argtuple_error:;
+ __Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 262, __pyx_L3_error)
+ __pyx_L3_error:;
+ __Pyx_AddTraceback("_yaml.CParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __Pyx_RefNannyFinishContext();
+ return -1;
+ __pyx_L4_argument_unpacking_done:;
+ __pyx_r = __pyx_pf_5_yaml_7CParser___init__(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self), __pyx_v_stream);
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static int __pyx_pf_5_yaml_7CParser___init__(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_stream) {
+ PyObject *__pyx_v_is_readable = 0;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ PyObject *__pyx_t_5 = NULL;
+ int __pyx_t_6;
+ PyObject *__pyx_t_7 = NULL;
+ PyObject *__pyx_t_8 = NULL;
+ __Pyx_RefNannySetupContext("__init__", 0);
+ __Pyx_INCREF(__pyx_v_stream);
+
+ /* "_yaml.pyx":264
+ * def __init__(self, stream):
+ * cdef is_readable
+ * if yaml_parser_initialize(&self.parser) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * self.parsed_event.type = YAML_NO_EVENT
+ */
+ __pyx_t_1 = ((yaml_parser_initialize((&__pyx_v_self->parser)) == 0) != 0);
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":265
+ * cdef is_readable
+ * if yaml_parser_initialize(&self.parser) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * self.parsed_event.type = YAML_NO_EVENT
+ * is_readable = 1
+ */
+ PyErr_NoMemory(); __PYX_ERR(0, 265, __pyx_L1_error)
+
+ /* "_yaml.pyx":264
+ * def __init__(self, stream):
+ * cdef is_readable
+ * if yaml_parser_initialize(&self.parser) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * self.parsed_event.type = YAML_NO_EVENT
+ */
+ }
+
+ /* "_yaml.pyx":266
+ * if yaml_parser_initialize(&self.parser) == 0:
+ * raise MemoryError
+ * self.parsed_event.type = YAML_NO_EVENT # <<<<<<<<<<<<<<
+ * is_readable = 1
+ * try:
+ */
+ __pyx_v_self->parsed_event.type = YAML_NO_EVENT;
+
+ /* "_yaml.pyx":267
+ * raise MemoryError
+ * self.parsed_event.type = YAML_NO_EVENT
+ * is_readable = 1 # <<<<<<<<<<<<<<
+ * try:
+ * stream.read
+ */
+ __Pyx_INCREF(__pyx_int_1);
+ __pyx_v_is_readable = __pyx_int_1;
+
+ /* "_yaml.pyx":268
+ * self.parsed_event.type = YAML_NO_EVENT
+ * is_readable = 1
+ * try: # <<<<<<<<<<<<<<
+ * stream.read
+ * except AttributeError:
+ */
+ {
+ __Pyx_PyThreadState_declare
+ __Pyx_PyThreadState_assign
+ __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4);
+ __Pyx_XGOTREF(__pyx_t_2);
+ __Pyx_XGOTREF(__pyx_t_3);
+ __Pyx_XGOTREF(__pyx_t_4);
+ /*try:*/ {
+
+ /* "_yaml.pyx":269
+ * is_readable = 1
+ * try:
+ * stream.read # <<<<<<<<<<<<<<
+ * except AttributeError:
+ * is_readable = 0
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_stream, __pyx_n_s_read); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 269, __pyx_L4_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":268
+ * self.parsed_event.type = YAML_NO_EVENT
+ * is_readable = 1
+ * try: # <<<<<<<<<<<<<<
+ * stream.read
+ * except AttributeError:
+ */
+ }
+ __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ goto __pyx_L9_try_end;
+ __pyx_L4_error:;
+ __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":270
+ * try:
+ * stream.read
+ * except AttributeError: # <<<<<<<<<<<<<<
+ * is_readable = 0
+ * self.unicode_source = 0
+ */
+ __pyx_t_6 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_AttributeError);
+ if (__pyx_t_6) {
+ __Pyx_AddTraceback("_yaml.CParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_7, &__pyx_t_8) < 0) __PYX_ERR(0, 270, __pyx_L6_except_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_GOTREF(__pyx_t_7);
+ __Pyx_GOTREF(__pyx_t_8);
+
+ /* "_yaml.pyx":271
+ * stream.read
+ * except AttributeError:
+ * is_readable = 0 # <<<<<<<<<<<<<<
+ * self.unicode_source = 0
+ * if is_readable:
+ */
+ __Pyx_INCREF(__pyx_int_0);
+ __Pyx_DECREF_SET(__pyx_v_is_readable, __pyx_int_0);
+ __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
+ goto __pyx_L5_exception_handled;
+ }
+ goto __pyx_L6_except_error;
+ __pyx_L6_except_error:;
+
+ /* "_yaml.pyx":268
+ * self.parsed_event.type = YAML_NO_EVENT
+ * is_readable = 1
+ * try: # <<<<<<<<<<<<<<
+ * stream.read
+ * except AttributeError:
+ */
+ __Pyx_XGIVEREF(__pyx_t_2);
+ __Pyx_XGIVEREF(__pyx_t_3);
+ __Pyx_XGIVEREF(__pyx_t_4);
+ __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
+ goto __pyx_L1_error;
+ __pyx_L5_exception_handled:;
+ __Pyx_XGIVEREF(__pyx_t_2);
+ __Pyx_XGIVEREF(__pyx_t_3);
+ __Pyx_XGIVEREF(__pyx_t_4);
+ __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
+ __pyx_L9_try_end:;
+ }
+
+ /* "_yaml.pyx":272
+ * except AttributeError:
+ * is_readable = 0
+ * self.unicode_source = 0 # <<<<<<<<<<<<<<
+ * if is_readable:
+ * self.stream = stream
+ */
+ __pyx_v_self->unicode_source = 0;
+
+ /* "_yaml.pyx":273
+ * is_readable = 0
+ * self.unicode_source = 0
+ * if is_readable: # <<<<<<<<<<<<<<
+ * self.stream = stream
+ * try:
+ */
+ __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_is_readable); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 273, __pyx_L1_error)
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":274
+ * self.unicode_source = 0
+ * if is_readable:
+ * self.stream = stream # <<<<<<<<<<<<<<
+ * try:
+ * self.stream_name = stream.name
+ */
+ __Pyx_INCREF(__pyx_v_stream);
+ __Pyx_GIVEREF(__pyx_v_stream);
+ __Pyx_GOTREF(__pyx_v_self->stream);
+ __Pyx_DECREF(__pyx_v_self->stream);
+ __pyx_v_self->stream = __pyx_v_stream;
+
+ /* "_yaml.pyx":275
+ * if is_readable:
+ * self.stream = stream
+ * try: # <<<<<<<<<<<<<<
+ * self.stream_name = stream.name
+ * except AttributeError:
+ */
+ {
+ __Pyx_PyThreadState_declare
+ __Pyx_PyThreadState_assign
+ __Pyx_ExceptionSave(&__pyx_t_4, &__pyx_t_3, &__pyx_t_2);
+ __Pyx_XGOTREF(__pyx_t_4);
+ __Pyx_XGOTREF(__pyx_t_3);
+ __Pyx_XGOTREF(__pyx_t_2);
+ /*try:*/ {
+
+ /* "_yaml.pyx":276
+ * self.stream = stream
+ * try:
+ * self.stream_name = stream.name # <<<<<<<<<<<<<<
+ * except AttributeError:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_stream, __pyx_n_s_name); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 276, __pyx_L13_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __Pyx_GIVEREF(__pyx_t_8);
+ __Pyx_GOTREF(__pyx_v_self->stream_name);
+ __Pyx_DECREF(__pyx_v_self->stream_name);
+ __pyx_v_self->stream_name = __pyx_t_8;
+ __pyx_t_8 = 0;
+
+ /* "_yaml.pyx":275
+ * if is_readable:
+ * self.stream = stream
+ * try: # <<<<<<<<<<<<<<
+ * self.stream_name = stream.name
+ * except AttributeError:
+ */
+ }
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
+ goto __pyx_L18_try_end;
+ __pyx_L13_error:;
+ __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
+
+ /* "_yaml.pyx":277
+ * try:
+ * self.stream_name = stream.name
+ * except AttributeError: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * self.stream_name = '<file>'
+ */
+ __pyx_t_6 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_AttributeError);
+ if (__pyx_t_6) {
+ __Pyx_AddTraceback("_yaml.CParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ if (__Pyx_GetException(&__pyx_t_8, &__pyx_t_7, &__pyx_t_5) < 0) __PYX_ERR(0, 277, __pyx_L15_except_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __Pyx_GOTREF(__pyx_t_7);
+ __Pyx_GOTREF(__pyx_t_5);
+
+ /* "_yaml.pyx":278
+ * self.stream_name = stream.name
+ * except AttributeError:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * self.stream_name = '<file>'
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":279
+ * except AttributeError:
+ * if PY_MAJOR_VERSION < 3:
+ * self.stream_name = '<file>' # <<<<<<<<<<<<<<
+ * else:
+ * self.stream_name = u'<file>'
+ */
+ __Pyx_INCREF(__pyx_kp_s_file);
+ __Pyx_GIVEREF(__pyx_kp_s_file);
+ __Pyx_GOTREF(__pyx_v_self->stream_name);
+ __Pyx_DECREF(__pyx_v_self->stream_name);
+ __pyx_v_self->stream_name = __pyx_kp_s_file;
+
+ /* "_yaml.pyx":278
+ * self.stream_name = stream.name
+ * except AttributeError:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * self.stream_name = '<file>'
+ * else:
+ */
+ goto __pyx_L21;
+ }
+
+ /* "_yaml.pyx":281
+ * self.stream_name = '<file>'
+ * else:
+ * self.stream_name = u'<file>' # <<<<<<<<<<<<<<
+ * self.stream_cache = None
+ * self.stream_cache_len = 0
+ */
+ /*else*/ {
+ __Pyx_INCREF(__pyx_kp_u_file);
+ __Pyx_GIVEREF(__pyx_kp_u_file);
+ __Pyx_GOTREF(__pyx_v_self->stream_name);
+ __Pyx_DECREF(__pyx_v_self->stream_name);
+ __pyx_v_self->stream_name = __pyx_kp_u_file;
+ }
+ __pyx_L21:;
+ __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
+ goto __pyx_L14_exception_handled;
+ }
+ goto __pyx_L15_except_error;
+ __pyx_L15_except_error:;
+
+ /* "_yaml.pyx":275
+ * if is_readable:
+ * self.stream = stream
+ * try: # <<<<<<<<<<<<<<
+ * self.stream_name = stream.name
+ * except AttributeError:
+ */
+ __Pyx_XGIVEREF(__pyx_t_4);
+ __Pyx_XGIVEREF(__pyx_t_3);
+ __Pyx_XGIVEREF(__pyx_t_2);
+ __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_3, __pyx_t_2);
+ goto __pyx_L1_error;
+ __pyx_L14_exception_handled:;
+ __Pyx_XGIVEREF(__pyx_t_4);
+ __Pyx_XGIVEREF(__pyx_t_3);
+ __Pyx_XGIVEREF(__pyx_t_2);
+ __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_3, __pyx_t_2);
+ __pyx_L18_try_end:;
+ }
+
+ /* "_yaml.pyx":282
+ * else:
+ * self.stream_name = u'<file>'
+ * self.stream_cache = None # <<<<<<<<<<<<<<
+ * self.stream_cache_len = 0
+ * self.stream_cache_pos = 0
+ */
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_GOTREF(__pyx_v_self->stream_cache);
+ __Pyx_DECREF(__pyx_v_self->stream_cache);
+ __pyx_v_self->stream_cache = Py_None;
+
+ /* "_yaml.pyx":283
+ * self.stream_name = u'<file>'
+ * self.stream_cache = None
+ * self.stream_cache_len = 0 # <<<<<<<<<<<<<<
+ * self.stream_cache_pos = 0
+ * yaml_parser_set_input(&self.parser, input_handler, <void *>self)
+ */
+ __pyx_v_self->stream_cache_len = 0;
+
+ /* "_yaml.pyx":284
+ * self.stream_cache = None
+ * self.stream_cache_len = 0
+ * self.stream_cache_pos = 0 # <<<<<<<<<<<<<<
+ * yaml_parser_set_input(&self.parser, input_handler, <void *>self)
+ * else:
+ */
+ __pyx_v_self->stream_cache_pos = 0;
+
+ /* "_yaml.pyx":285
+ * self.stream_cache_len = 0
+ * self.stream_cache_pos = 0
+ * yaml_parser_set_input(&self.parser, input_handler, <void *>self) # <<<<<<<<<<<<<<
+ * else:
+ * if PyUnicode_CheckExact(stream) != 0:
+ */
+ yaml_parser_set_input((&__pyx_v_self->parser), __pyx_f_5_yaml_input_handler, ((void *)__pyx_v_self));
+
+ /* "_yaml.pyx":273
+ * is_readable = 0
+ * self.unicode_source = 0
+ * if is_readable: # <<<<<<<<<<<<<<
+ * self.stream = stream
+ * try:
+ */
+ goto __pyx_L12;
+ }
+
+ /* "_yaml.pyx":287
+ * yaml_parser_set_input(&self.parser, input_handler, <void *>self)
+ * else:
+ * if PyUnicode_CheckExact(stream) != 0: # <<<<<<<<<<<<<<
+ * stream = PyUnicode_AsUTF8String(stream)
+ * if PY_MAJOR_VERSION < 3:
+ */
+ /*else*/ {
+ __pyx_t_1 = ((PyUnicode_CheckExact(__pyx_v_stream) != 0) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":288
+ * else:
+ * if PyUnicode_CheckExact(stream) != 0:
+ * stream = PyUnicode_AsUTF8String(stream) # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * self.stream_name = '<unicode string>'
+ */
+ __pyx_t_5 = PyUnicode_AsUTF8String(__pyx_v_stream); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 288, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF_SET(__pyx_v_stream, __pyx_t_5);
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":289
+ * if PyUnicode_CheckExact(stream) != 0:
+ * stream = PyUnicode_AsUTF8String(stream)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * self.stream_name = '<unicode string>'
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":290
+ * stream = PyUnicode_AsUTF8String(stream)
+ * if PY_MAJOR_VERSION < 3:
+ * self.stream_name = '<unicode string>' # <<<<<<<<<<<<<<
+ * else:
+ * self.stream_name = u'<unicode string>'
+ */
+ __Pyx_INCREF(__pyx_kp_s_unicode_string);
+ __Pyx_GIVEREF(__pyx_kp_s_unicode_string);
+ __Pyx_GOTREF(__pyx_v_self->stream_name);
+ __Pyx_DECREF(__pyx_v_self->stream_name);
+ __pyx_v_self->stream_name = __pyx_kp_s_unicode_string;
+
+ /* "_yaml.pyx":289
+ * if PyUnicode_CheckExact(stream) != 0:
+ * stream = PyUnicode_AsUTF8String(stream)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * self.stream_name = '<unicode string>'
+ * else:
+ */
+ goto __pyx_L23;
+ }
+
+ /* "_yaml.pyx":292
+ * self.stream_name = '<unicode string>'
+ * else:
+ * self.stream_name = u'<unicode string>' # <<<<<<<<<<<<<<
+ * self.unicode_source = 1
+ * else:
+ */
+ /*else*/ {
+ __Pyx_INCREF(__pyx_kp_u_unicode_string);
+ __Pyx_GIVEREF(__pyx_kp_u_unicode_string);
+ __Pyx_GOTREF(__pyx_v_self->stream_name);
+ __Pyx_DECREF(__pyx_v_self->stream_name);
+ __pyx_v_self->stream_name = __pyx_kp_u_unicode_string;
+ }
+ __pyx_L23:;
+
+ /* "_yaml.pyx":293
+ * else:
+ * self.stream_name = u'<unicode string>'
+ * self.unicode_source = 1 # <<<<<<<<<<<<<<
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_v_self->unicode_source = 1;
+
+ /* "_yaml.pyx":287
+ * yaml_parser_set_input(&self.parser, input_handler, <void *>self)
+ * else:
+ * if PyUnicode_CheckExact(stream) != 0: # <<<<<<<<<<<<<<
+ * stream = PyUnicode_AsUTF8String(stream)
+ * if PY_MAJOR_VERSION < 3:
+ */
+ goto __pyx_L22;
+ }
+
+ /* "_yaml.pyx":295
+ * self.unicode_source = 1
+ * else:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * self.stream_name = '<byte string>'
+ * else:
+ */
+ /*else*/ {
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":296
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ * self.stream_name = '<byte string>' # <<<<<<<<<<<<<<
+ * else:
+ * self.stream_name = u'<byte string>'
+ */
+ __Pyx_INCREF(__pyx_kp_s_byte_string);
+ __Pyx_GIVEREF(__pyx_kp_s_byte_string);
+ __Pyx_GOTREF(__pyx_v_self->stream_name);
+ __Pyx_DECREF(__pyx_v_self->stream_name);
+ __pyx_v_self->stream_name = __pyx_kp_s_byte_string;
+
+ /* "_yaml.pyx":295
+ * self.unicode_source = 1
+ * else:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * self.stream_name = '<byte string>'
+ * else:
+ */
+ goto __pyx_L24;
+ }
+
+ /* "_yaml.pyx":298
+ * self.stream_name = '<byte string>'
+ * else:
+ * self.stream_name = u'<byte string>' # <<<<<<<<<<<<<<
+ * if PyString_CheckExact(stream) == 0:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ /*else*/ {
+ __Pyx_INCREF(__pyx_kp_u_byte_string);
+ __Pyx_GIVEREF(__pyx_kp_u_byte_string);
+ __Pyx_GOTREF(__pyx_v_self->stream_name);
+ __Pyx_DECREF(__pyx_v_self->stream_name);
+ __pyx_v_self->stream_name = __pyx_kp_u_byte_string;
+ }
+ __pyx_L24:;
+ }
+ __pyx_L22:;
+
+ /* "_yaml.pyx":299
+ * else:
+ * self.stream_name = u'<byte string>'
+ * if PyString_CheckExact(stream) == 0: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("a string or stream input is required")
+ */
+ __pyx_t_1 = ((PyString_CheckExact(__pyx_v_stream) == 0) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":300
+ * self.stream_name = u'<byte string>'
+ * if PyString_CheckExact(stream) == 0:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("a string or stream input is required")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":301
+ * if PyString_CheckExact(stream) == 0:
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("a string or stream input is required") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"a string or stream input is required")
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 301, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __PYX_ERR(0, 301, __pyx_L1_error)
+
+ /* "_yaml.pyx":300
+ * self.stream_name = u'<byte string>'
+ * if PyString_CheckExact(stream) == 0:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("a string or stream input is required")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":303
+ * raise TypeError("a string or stream input is required")
+ * else:
+ * raise TypeError(u"a string or stream input is required") # <<<<<<<<<<<<<<
+ * self.stream = stream
+ * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream))
+ */
+ /*else*/ {
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 303, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __PYX_ERR(0, 303, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":299
+ * else:
+ * self.stream_name = u'<byte string>'
+ * if PyString_CheckExact(stream) == 0: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("a string or stream input is required")
+ */
+ }
+
+ /* "_yaml.pyx":304
+ * else:
+ * raise TypeError(u"a string or stream input is required")
+ * self.stream = stream # <<<<<<<<<<<<<<
+ * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream))
+ * self.current_token = None
+ */
+ __Pyx_INCREF(__pyx_v_stream);
+ __Pyx_GIVEREF(__pyx_v_stream);
+ __Pyx_GOTREF(__pyx_v_self->stream);
+ __Pyx_DECREF(__pyx_v_self->stream);
+ __pyx_v_self->stream = __pyx_v_stream;
+
+ /* "_yaml.pyx":305
+ * raise TypeError(u"a string or stream input is required")
+ * self.stream = stream
+ * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream)) # <<<<<<<<<<<<<<
+ * self.current_token = None
+ * self.current_event = None
+ */
+ yaml_parser_set_input_string((&__pyx_v_self->parser), PyString_AS_STRING(__pyx_v_stream), PyString_GET_SIZE(__pyx_v_stream));
+ }
+ __pyx_L12:;
+
+ /* "_yaml.pyx":306
+ * self.stream = stream
+ * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream))
+ * self.current_token = None # <<<<<<<<<<<<<<
+ * self.current_event = None
+ * self.anchors = {}
+ */
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_GOTREF(__pyx_v_self->current_token);
+ __Pyx_DECREF(__pyx_v_self->current_token);
+ __pyx_v_self->current_token = Py_None;
+
+ /* "_yaml.pyx":307
+ * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream))
+ * self.current_token = None
+ * self.current_event = None # <<<<<<<<<<<<<<
+ * self.anchors = {}
+ *
+ */
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_GOTREF(__pyx_v_self->current_event);
+ __Pyx_DECREF(__pyx_v_self->current_event);
+ __pyx_v_self->current_event = Py_None;
+
+ /* "_yaml.pyx":308
+ * self.current_token = None
+ * self.current_event = None
+ * self.anchors = {} # <<<<<<<<<<<<<<
+ *
+ * def __dealloc__(self):
+ */
+ __pyx_t_5 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 308, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_GIVEREF(__pyx_t_5);
+ __Pyx_GOTREF(__pyx_v_self->anchors);
+ __Pyx_DECREF(__pyx_v_self->anchors);
+ __pyx_v_self->anchors = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":262
+ * cdef int unicode_source
+ *
+ * def __init__(self, stream): # <<<<<<<<<<<<<<
+ * cdef is_readable
+ * if yaml_parser_initialize(&self.parser) == 0:
+ */
+
+ /* function exit code */
+ __pyx_r = 0;
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_7);
+ __Pyx_XDECREF(__pyx_t_8);
+ __Pyx_AddTraceback("_yaml.CParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = -1;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_is_readable);
+ __Pyx_XDECREF(__pyx_v_stream);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":310
+ * self.anchors = {}
+ *
+ * def __dealloc__(self): # <<<<<<<<<<<<<<
+ * yaml_parser_delete(&self.parser)
+ * yaml_event_delete(&self.parsed_event)
+ */
+
+/* Python wrapper */
+static void __pyx_pw_5_yaml_7CParser_3__dealloc__(PyObject *__pyx_v_self); /*proto*/
+static void __pyx_pw_5_yaml_7CParser_3__dealloc__(PyObject *__pyx_v_self) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0);
+ __pyx_pf_5_yaml_7CParser_2__dealloc__(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+}
+
+static void __pyx_pf_5_yaml_7CParser_2__dealloc__(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__dealloc__", 0);
+
+ /* "_yaml.pyx":311
+ *
+ * def __dealloc__(self):
+ * yaml_parser_delete(&self.parser) # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ *
+ */
+ yaml_parser_delete((&__pyx_v_self->parser));
+
+ /* "_yaml.pyx":312
+ * def __dealloc__(self):
+ * yaml_parser_delete(&self.parser)
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ *
+ * def dispose(self):
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":310
+ * self.anchors = {}
+ *
+ * def __dealloc__(self): # <<<<<<<<<<<<<<
+ * yaml_parser_delete(&self.parser)
+ * yaml_event_delete(&self.parsed_event)
+ */
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+}
+
+/* "_yaml.pyx":314
+ * yaml_event_delete(&self.parsed_event)
+ *
+ * def dispose(self): # <<<<<<<<<<<<<<
+ * pass
+ *
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_5dispose(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_5dispose(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("dispose (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_4dispose(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_4dispose(CYTHON_UNUSED struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("dispose", 0);
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":317
+ * pass
+ *
+ * cdef object _parser_error(self): # <<<<<<<<<<<<<<
+ * if self.parser.error == YAML_MEMORY_ERROR:
+ * return MemoryError
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__parser_error(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_context_mark = NULL;
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_problem_mark = NULL;
+ PyObject *__pyx_v_context = NULL;
+ PyObject *__pyx_v_problem = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ PyObject *__pyx_t_5 = NULL;
+ PyObject *__pyx_t_6 = NULL;
+ PyObject *__pyx_t_7 = NULL;
+ int __pyx_t_8;
+ PyObject *__pyx_t_9 = NULL;
+ __Pyx_RefNannySetupContext("_parser_error", 0);
+
+ /* "_yaml.pyx":318
+ *
+ * cdef object _parser_error(self):
+ * if self.parser.error == YAML_MEMORY_ERROR: # <<<<<<<<<<<<<<
+ * return MemoryError
+ * elif self.parser.error == YAML_READER_ERROR:
+ */
+ switch (__pyx_v_self->parser.error) {
+ case YAML_MEMORY_ERROR:
+
+ /* "_yaml.pyx":319
+ * cdef object _parser_error(self):
+ * if self.parser.error == YAML_MEMORY_ERROR:
+ * return MemoryError # <<<<<<<<<<<<<<
+ * elif self.parser.error == YAML_READER_ERROR:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_builtin_MemoryError);
+ __pyx_r = __pyx_builtin_MemoryError;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":318
+ *
+ * cdef object _parser_error(self):
+ * if self.parser.error == YAML_MEMORY_ERROR: # <<<<<<<<<<<<<<
+ * return MemoryError
+ * elif self.parser.error == YAML_READER_ERROR:
+ */
+ break;
+ case YAML_READER_ERROR:
+
+ /* "_yaml.pyx":321
+ * return MemoryError
+ * elif self.parser.error == YAML_READER_ERROR:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * return ReaderError(self.stream_name, self.parser.problem_offset,
+ * self.parser.problem_value, '?', self.parser.problem)
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":322
+ * elif self.parser.error == YAML_READER_ERROR:
+ * if PY_MAJOR_VERSION < 3:
+ * return ReaderError(self.stream_name, self.parser.problem_offset, # <<<<<<<<<<<<<<
+ * self.parser.problem_value, '?', self.parser.problem)
+ * else:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_ReaderError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 322, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parser.problem_offset); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 322, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":323
+ * if PY_MAJOR_VERSION < 3:
+ * return ReaderError(self.stream_name, self.parser.problem_offset,
+ * self.parser.problem_value, '?', self.parser.problem) # <<<<<<<<<<<<<<
+ * else:
+ * return ReaderError(self.stream_name, self.parser.problem_offset,
+ */
+ __pyx_t_5 = __Pyx_PyInt_From_int(__pyx_v_self->parser.problem_value); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 323, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_6 = __Pyx_PyBytes_FromString(__pyx_v_self->parser.problem); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 323, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_t_7 = NULL;
+ __pyx_t_8 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {
+ __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_3);
+ if (likely(__pyx_t_7)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
+ __Pyx_INCREF(__pyx_t_7);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_3, function);
+ __pyx_t_8 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_3)) {
+ PyObject *__pyx_temp[6] = {__pyx_t_7, __pyx_v_self->stream_name, __pyx_t_4, __pyx_t_5, __pyx_kp_s__3, __pyx_t_6};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 5+__pyx_t_8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 322, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) {
+ PyObject *__pyx_temp[6] = {__pyx_t_7, __pyx_v_self->stream_name, __pyx_t_4, __pyx_t_5, __pyx_kp_s__3, __pyx_t_6};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 5+__pyx_t_8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 322, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_9 = PyTuple_New(5+__pyx_t_8); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 322, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_9);
+ if (__pyx_t_7) {
+ __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_7); __pyx_t_7 = NULL;
+ }
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_8, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_8, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_5);
+ PyTuple_SET_ITEM(__pyx_t_9, 2+__pyx_t_8, __pyx_t_5);
+ __Pyx_INCREF(__pyx_kp_s__3);
+ __Pyx_GIVEREF(__pyx_kp_s__3);
+ PyTuple_SET_ITEM(__pyx_t_9, 3+__pyx_t_8, __pyx_kp_s__3);
+ __Pyx_GIVEREF(__pyx_t_6);
+ PyTuple_SET_ITEM(__pyx_t_9, 4+__pyx_t_8, __pyx_t_6);
+ __pyx_t_4 = 0;
+ __pyx_t_5 = 0;
+ __pyx_t_6 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_9, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 322, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":321
+ * return MemoryError
+ * elif self.parser.error == YAML_READER_ERROR:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * return ReaderError(self.stream_name, self.parser.problem_offset,
+ * self.parser.problem_value, '?', self.parser.problem)
+ */
+ }
+
+ /* "_yaml.pyx":325
+ * self.parser.problem_value, '?', self.parser.problem)
+ * else:
+ * return ReaderError(self.stream_name, self.parser.problem_offset, # <<<<<<<<<<<<<<
+ * self.parser.problem_value, u'?', PyUnicode_FromString(self.parser.problem))
+ * elif self.parser.error == YAML_SCANNER_ERROR \
+ */
+ /*else*/ {
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_ReaderError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 325, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_9 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parser.problem_offset); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 325, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_9);
+
+ /* "_yaml.pyx":326
+ * else:
+ * return ReaderError(self.stream_name, self.parser.problem_offset,
+ * self.parser.problem_value, u'?', PyUnicode_FromString(self.parser.problem)) # <<<<<<<<<<<<<<
+ * elif self.parser.error == YAML_SCANNER_ERROR \
+ * or self.parser.error == YAML_PARSER_ERROR:
+ */
+ __pyx_t_6 = __Pyx_PyInt_From_int(__pyx_v_self->parser.problem_value); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 326, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_t_5 = PyUnicode_FromString(__pyx_v_self->parser.problem); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 326, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_4 = NULL;
+ __pyx_t_8 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_3, function);
+ __pyx_t_8 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_3)) {
+ PyObject *__pyx_temp[6] = {__pyx_t_4, __pyx_v_self->stream_name, __pyx_t_9, __pyx_t_6, __pyx_kp_u__3, __pyx_t_5};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 5+__pyx_t_8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 325, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) {
+ PyObject *__pyx_temp[6] = {__pyx_t_4, __pyx_v_self->stream_name, __pyx_t_9, __pyx_t_6, __pyx_kp_u__3, __pyx_t_5};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 5+__pyx_t_8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 325, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_7 = PyTuple_New(5+__pyx_t_8); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 325, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ if (__pyx_t_4) {
+ __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_4); __pyx_t_4 = NULL;
+ }
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_8, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_t_9);
+ PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_8, __pyx_t_9);
+ __Pyx_GIVEREF(__pyx_t_6);
+ PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_8, __pyx_t_6);
+ __Pyx_INCREF(__pyx_kp_u__3);
+ __Pyx_GIVEREF(__pyx_kp_u__3);
+ PyTuple_SET_ITEM(__pyx_t_7, 3+__pyx_t_8, __pyx_kp_u__3);
+ __Pyx_GIVEREF(__pyx_t_5);
+ PyTuple_SET_ITEM(__pyx_t_7, 4+__pyx_t_8, __pyx_t_5);
+ __pyx_t_9 = 0;
+ __pyx_t_6 = 0;
+ __pyx_t_5 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_7, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 325, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+ }
+
+ /* "_yaml.pyx":320
+ * if self.parser.error == YAML_MEMORY_ERROR:
+ * return MemoryError
+ * elif self.parser.error == YAML_READER_ERROR: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * return ReaderError(self.stream_name, self.parser.problem_offset,
+ */
+ break;
+ case YAML_SCANNER_ERROR:
+
+ /* "_yaml.pyx":327
+ * return ReaderError(self.stream_name, self.parser.problem_offset,
+ * self.parser.problem_value, u'?', PyUnicode_FromString(self.parser.problem))
+ * elif self.parser.error == YAML_SCANNER_ERROR \ # <<<<<<<<<<<<<<
+ * or self.parser.error == YAML_PARSER_ERROR:
+ * context_mark = None
+ */
+ case YAML_PARSER_ERROR:
+
+ /* "_yaml.pyx":329
+ * elif self.parser.error == YAML_SCANNER_ERROR \
+ * or self.parser.error == YAML_PARSER_ERROR:
+ * context_mark = None # <<<<<<<<<<<<<<
+ * problem_mark = None
+ * if self.parser.context != NULL:
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_context_mark = ((struct __pyx_obj_5_yaml_Mark *)Py_None);
+
+ /* "_yaml.pyx":330
+ * or self.parser.error == YAML_PARSER_ERROR:
+ * context_mark = None
+ * problem_mark = None # <<<<<<<<<<<<<<
+ * if self.parser.context != NULL:
+ * context_mark = Mark(self.stream_name,
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_problem_mark = ((struct __pyx_obj_5_yaml_Mark *)Py_None);
+
+ /* "_yaml.pyx":331
+ * context_mark = None
+ * problem_mark = None
+ * if self.parser.context != NULL: # <<<<<<<<<<<<<<
+ * context_mark = Mark(self.stream_name,
+ * self.parser.context_mark.index,
+ */
+ __pyx_t_1 = ((__pyx_v_self->parser.context != NULL) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":333
+ * if self.parser.context != NULL:
+ * context_mark = Mark(self.stream_name,
+ * self.parser.context_mark.index, # <<<<<<<<<<<<<<
+ * self.parser.context_mark.line,
+ * self.parser.context_mark.column, None, None)
+ */
+ __pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parser.context_mark.index); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 333, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":334
+ * context_mark = Mark(self.stream_name,
+ * self.parser.context_mark.index,
+ * self.parser.context_mark.line, # <<<<<<<<<<<<<<
+ * self.parser.context_mark.column, None, None)
+ * if self.parser.problem != NULL:
+ */
+ __pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parser.context_mark.line); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 334, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":335
+ * self.parser.context_mark.index,
+ * self.parser.context_mark.line,
+ * self.parser.context_mark.column, None, None) # <<<<<<<<<<<<<<
+ * if self.parser.problem != NULL:
+ * problem_mark = Mark(self.stream_name,
+ */
+ __pyx_t_7 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parser.context_mark.column); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 335, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+
+ /* "_yaml.pyx":332
+ * problem_mark = None
+ * if self.parser.context != NULL:
+ * context_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parser.context_mark.index,
+ * self.parser.context_mark.line,
+ */
+ __pyx_t_5 = PyTuple_New(6); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 332, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_7);
+ PyTuple_SET_ITEM(__pyx_t_5, 3, __pyx_t_7);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_5, 4, Py_None);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_5, 5, Py_None);
+ __pyx_t_2 = 0;
+ __pyx_t_3 = 0;
+ __pyx_t_7 = 0;
+ __pyx_t_7 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_5_yaml_Mark), __pyx_t_5, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 332, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_DECREF_SET(__pyx_v_context_mark, ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_7));
+ __pyx_t_7 = 0;
+
+ /* "_yaml.pyx":331
+ * context_mark = None
+ * problem_mark = None
+ * if self.parser.context != NULL: # <<<<<<<<<<<<<<
+ * context_mark = Mark(self.stream_name,
+ * self.parser.context_mark.index,
+ */
+ }
+
+ /* "_yaml.pyx":336
+ * self.parser.context_mark.line,
+ * self.parser.context_mark.column, None, None)
+ * if self.parser.problem != NULL: # <<<<<<<<<<<<<<
+ * problem_mark = Mark(self.stream_name,
+ * self.parser.problem_mark.index,
+ */
+ __pyx_t_1 = ((__pyx_v_self->parser.problem != NULL) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":338
+ * if self.parser.problem != NULL:
+ * problem_mark = Mark(self.stream_name,
+ * self.parser.problem_mark.index, # <<<<<<<<<<<<<<
+ * self.parser.problem_mark.line,
+ * self.parser.problem_mark.column, None, None)
+ */
+ __pyx_t_7 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parser.problem_mark.index); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 338, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+
+ /* "_yaml.pyx":339
+ * problem_mark = Mark(self.stream_name,
+ * self.parser.problem_mark.index,
+ * self.parser.problem_mark.line, # <<<<<<<<<<<<<<
+ * self.parser.problem_mark.column, None, None)
+ * context = None
+ */
+ __pyx_t_5 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parser.problem_mark.line); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 339, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+
+ /* "_yaml.pyx":340
+ * self.parser.problem_mark.index,
+ * self.parser.problem_mark.line,
+ * self.parser.problem_mark.column, None, None) # <<<<<<<<<<<<<<
+ * context = None
+ * if self.parser.context != NULL:
+ */
+ __pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parser.problem_mark.column); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 340, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":337
+ * self.parser.context_mark.column, None, None)
+ * if self.parser.problem != NULL:
+ * problem_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parser.problem_mark.index,
+ * self.parser.problem_mark.line,
+ */
+ __pyx_t_2 = PyTuple_New(6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 337, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_t_7);
+ PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_7);
+ __Pyx_GIVEREF(__pyx_t_5);
+ PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_t_5);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_2, 3, __pyx_t_3);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_2, 4, Py_None);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_2, 5, Py_None);
+ __pyx_t_7 = 0;
+ __pyx_t_5 = 0;
+ __pyx_t_3 = 0;
+ __pyx_t_3 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_5_yaml_Mark), __pyx_t_2, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 337, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF_SET(__pyx_v_problem_mark, ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_3));
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":336
+ * self.parser.context_mark.line,
+ * self.parser.context_mark.column, None, None)
+ * if self.parser.problem != NULL: # <<<<<<<<<<<<<<
+ * problem_mark = Mark(self.stream_name,
+ * self.parser.problem_mark.index,
+ */
+ }
+
+ /* "_yaml.pyx":341
+ * self.parser.problem_mark.line,
+ * self.parser.problem_mark.column, None, None)
+ * context = None # <<<<<<<<<<<<<<
+ * if self.parser.context != NULL:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_context = Py_None;
+
+ /* "_yaml.pyx":342
+ * self.parser.problem_mark.column, None, None)
+ * context = None
+ * if self.parser.context != NULL: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * context = self.parser.context
+ */
+ __pyx_t_1 = ((__pyx_v_self->parser.context != NULL) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":343
+ * context = None
+ * if self.parser.context != NULL:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * context = self.parser.context
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":344
+ * if self.parser.context != NULL:
+ * if PY_MAJOR_VERSION < 3:
+ * context = self.parser.context # <<<<<<<<<<<<<<
+ * else:
+ * context = PyUnicode_FromString(self.parser.context)
+ */
+ __pyx_t_3 = __Pyx_PyBytes_FromString(__pyx_v_self->parser.context); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 344, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF_SET(__pyx_v_context, __pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":343
+ * context = None
+ * if self.parser.context != NULL:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * context = self.parser.context
+ * else:
+ */
+ goto __pyx_L7;
+ }
+
+ /* "_yaml.pyx":346
+ * context = self.parser.context
+ * else:
+ * context = PyUnicode_FromString(self.parser.context) # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * problem = self.parser.problem
+ */
+ /*else*/ {
+ __pyx_t_3 = PyUnicode_FromString(__pyx_v_self->parser.context); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 346, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF_SET(__pyx_v_context, __pyx_t_3);
+ __pyx_t_3 = 0;
+ }
+ __pyx_L7:;
+
+ /* "_yaml.pyx":342
+ * self.parser.problem_mark.column, None, None)
+ * context = None
+ * if self.parser.context != NULL: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * context = self.parser.context
+ */
+ }
+
+ /* "_yaml.pyx":347
+ * else:
+ * context = PyUnicode_FromString(self.parser.context)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * problem = self.parser.problem
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":348
+ * context = PyUnicode_FromString(self.parser.context)
+ * if PY_MAJOR_VERSION < 3:
+ * problem = self.parser.problem # <<<<<<<<<<<<<<
+ * else:
+ * problem = PyUnicode_FromString(self.parser.problem)
+ */
+ __pyx_t_3 = __Pyx_PyBytes_FromString(__pyx_v_self->parser.problem); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 348, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_problem = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":347
+ * else:
+ * context = PyUnicode_FromString(self.parser.context)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * problem = self.parser.problem
+ * else:
+ */
+ goto __pyx_L8;
+ }
+
+ /* "_yaml.pyx":350
+ * problem = self.parser.problem
+ * else:
+ * problem = PyUnicode_FromString(self.parser.problem) # <<<<<<<<<<<<<<
+ * if self.parser.error == YAML_SCANNER_ERROR:
+ * return ScannerError(context, context_mark, problem, problem_mark)
+ */
+ /*else*/ {
+ __pyx_t_3 = PyUnicode_FromString(__pyx_v_self->parser.problem); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 350, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_problem = __pyx_t_3;
+ __pyx_t_3 = 0;
+ }
+ __pyx_L8:;
+
+ /* "_yaml.pyx":351
+ * else:
+ * problem = PyUnicode_FromString(self.parser.problem)
+ * if self.parser.error == YAML_SCANNER_ERROR: # <<<<<<<<<<<<<<
+ * return ScannerError(context, context_mark, problem, problem_mark)
+ * else:
+ */
+ __pyx_t_1 = ((__pyx_v_self->parser.error == YAML_SCANNER_ERROR) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":352
+ * problem = PyUnicode_FromString(self.parser.problem)
+ * if self.parser.error == YAML_SCANNER_ERROR:
+ * return ScannerError(context, context_mark, problem, problem_mark) # <<<<<<<<<<<<<<
+ * else:
+ * return ParserError(context, context_mark, problem, problem_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_ScannerError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 352, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_5 = NULL;
+ __pyx_t_8 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) {
+ __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2);
+ if (likely(__pyx_t_5)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2);
+ __Pyx_INCREF(__pyx_t_5);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_2, function);
+ __pyx_t_8 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_2)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_5, __pyx_v_context, ((PyObject *)__pyx_v_context_mark), __pyx_v_problem, ((PyObject *)__pyx_v_problem_mark)};
+ __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_8, 4+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 352, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_GOTREF(__pyx_t_3);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_5, __pyx_v_context, ((PyObject *)__pyx_v_context_mark), __pyx_v_problem, ((PyObject *)__pyx_v_problem_mark)};
+ __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_8, 4+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 352, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_GOTREF(__pyx_t_3);
+ } else
+ #endif
+ {
+ __pyx_t_7 = PyTuple_New(4+__pyx_t_8); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 352, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ if (__pyx_t_5) {
+ __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_5); __pyx_t_5 = NULL;
+ }
+ __Pyx_INCREF(__pyx_v_context);
+ __Pyx_GIVEREF(__pyx_v_context);
+ PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_8, __pyx_v_context);
+ __Pyx_INCREF(((PyObject *)__pyx_v_context_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_context_mark));
+ PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_8, ((PyObject *)__pyx_v_context_mark));
+ __Pyx_INCREF(__pyx_v_problem);
+ __Pyx_GIVEREF(__pyx_v_problem);
+ PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_8, __pyx_v_problem);
+ __Pyx_INCREF(((PyObject *)__pyx_v_problem_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_problem_mark));
+ PyTuple_SET_ITEM(__pyx_t_7, 3+__pyx_t_8, ((PyObject *)__pyx_v_problem_mark));
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 352, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":351
+ * else:
+ * problem = PyUnicode_FromString(self.parser.problem)
+ * if self.parser.error == YAML_SCANNER_ERROR: # <<<<<<<<<<<<<<
+ * return ScannerError(context, context_mark, problem, problem_mark)
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":354
+ * return ScannerError(context, context_mark, problem, problem_mark)
+ * else:
+ * return ParserError(context, context_mark, problem, problem_mark) # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("no parser error")
+ */
+ /*else*/ {
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_ParserError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 354, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_7 = NULL;
+ __pyx_t_8 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) {
+ __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_2);
+ if (likely(__pyx_t_7)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2);
+ __Pyx_INCREF(__pyx_t_7);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_2, function);
+ __pyx_t_8 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_2)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_7, __pyx_v_context, ((PyObject *)__pyx_v_context_mark), __pyx_v_problem, ((PyObject *)__pyx_v_problem_mark)};
+ __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_8, 4+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 354, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_3);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_7, __pyx_v_context, ((PyObject *)__pyx_v_context_mark), __pyx_v_problem, ((PyObject *)__pyx_v_problem_mark)};
+ __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_8, 4+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 354, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_3);
+ } else
+ #endif
+ {
+ __pyx_t_5 = PyTuple_New(4+__pyx_t_8); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 354, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ if (__pyx_t_7) {
+ __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_7); __pyx_t_7 = NULL;
+ }
+ __Pyx_INCREF(__pyx_v_context);
+ __Pyx_GIVEREF(__pyx_v_context);
+ PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_8, __pyx_v_context);
+ __Pyx_INCREF(((PyObject *)__pyx_v_context_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_context_mark));
+ PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_8, ((PyObject *)__pyx_v_context_mark));
+ __Pyx_INCREF(__pyx_v_problem);
+ __Pyx_GIVEREF(__pyx_v_problem);
+ PyTuple_SET_ITEM(__pyx_t_5, 2+__pyx_t_8, __pyx_v_problem);
+ __Pyx_INCREF(((PyObject *)__pyx_v_problem_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_problem_mark));
+ PyTuple_SET_ITEM(__pyx_t_5, 3+__pyx_t_8, ((PyObject *)__pyx_v_problem_mark));
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_5, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 354, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+ }
+
+ /* "_yaml.pyx":327
+ * return ReaderError(self.stream_name, self.parser.problem_offset,
+ * self.parser.problem_value, u'?', PyUnicode_FromString(self.parser.problem))
+ * elif self.parser.error == YAML_SCANNER_ERROR \ # <<<<<<<<<<<<<<
+ * or self.parser.error == YAML_PARSER_ERROR:
+ * context_mark = None
+ */
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":355
+ * else:
+ * return ParserError(context, context_mark, problem, problem_mark)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("no parser error")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":356
+ * return ParserError(context, context_mark, problem, problem_mark)
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("no parser error") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"no parser error")
+ */
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 356, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __PYX_ERR(0, 356, __pyx_L1_error)
+
+ /* "_yaml.pyx":355
+ * else:
+ * return ParserError(context, context_mark, problem, problem_mark)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("no parser error")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":358
+ * raise ValueError("no parser error")
+ * else:
+ * raise ValueError(u"no parser error") # <<<<<<<<<<<<<<
+ *
+ * def raw_scan(self):
+ */
+ /*else*/ {
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 358, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __PYX_ERR(0, 358, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":317
+ * pass
+ *
+ * cdef object _parser_error(self): # <<<<<<<<<<<<<<
+ * if self.parser.error == YAML_MEMORY_ERROR:
+ * return MemoryError
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_6);
+ __Pyx_XDECREF(__pyx_t_7);
+ __Pyx_XDECREF(__pyx_t_9);
+ __Pyx_AddTraceback("_yaml.CParser._parser_error", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF((PyObject *)__pyx_v_context_mark);
+ __Pyx_XDECREF((PyObject *)__pyx_v_problem_mark);
+ __Pyx_XDECREF(__pyx_v_context);
+ __Pyx_XDECREF(__pyx_v_problem);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":360
+ * raise ValueError(u"no parser error")
+ *
+ * def raw_scan(self): # <<<<<<<<<<<<<<
+ * cdef yaml_token_t token
+ * cdef int done
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_7raw_scan(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_7raw_scan(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("raw_scan (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_6raw_scan(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_6raw_scan(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ yaml_token_t __pyx_v_token;
+ int __pyx_v_done;
+ int __pyx_v_count;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ __Pyx_RefNannySetupContext("raw_scan", 0);
+
+ /* "_yaml.pyx":364
+ * cdef int done
+ * cdef int count
+ * count = 0 # <<<<<<<<<<<<<<
+ * done = 0
+ * while done == 0:
+ */
+ __pyx_v_count = 0;
+
+ /* "_yaml.pyx":365
+ * cdef int count
+ * count = 0
+ * done = 0 # <<<<<<<<<<<<<<
+ * while done == 0:
+ * if yaml_parser_scan(&self.parser, &token) == 0:
+ */
+ __pyx_v_done = 0;
+
+ /* "_yaml.pyx":366
+ * count = 0
+ * done = 0
+ * while done == 0: # <<<<<<<<<<<<<<
+ * if yaml_parser_scan(&self.parser, &token) == 0:
+ * error = self._parser_error()
+ */
+ while (1) {
+ __pyx_t_1 = ((__pyx_v_done == 0) != 0);
+ if (!__pyx_t_1) break;
+
+ /* "_yaml.pyx":367
+ * done = 0
+ * while done == 0:
+ * if yaml_parser_scan(&self.parser, &token) == 0: # <<<<<<<<<<<<<<
+ * error = self._parser_error()
+ * raise error
+ */
+ __pyx_t_2 = yaml_parser_scan((&__pyx_v_self->parser), (&__pyx_v_token)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 367, __pyx_L1_error)
+ __pyx_t_1 = ((__pyx_t_2 == 0) != 0);
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":368
+ * while done == 0:
+ * if yaml_parser_scan(&self.parser, &token) == 0:
+ * error = self._parser_error() # <<<<<<<<<<<<<<
+ * raise error
+ * if token.type == YAML_NO_TOKEN:
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parser_error(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 368, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_error = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":369
+ * if yaml_parser_scan(&self.parser, &token) == 0:
+ * error = self._parser_error()
+ * raise error # <<<<<<<<<<<<<<
+ * if token.type == YAML_NO_TOKEN:
+ * done = 1
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ __PYX_ERR(0, 369, __pyx_L1_error)
+
+ /* "_yaml.pyx":367
+ * done = 0
+ * while done == 0:
+ * if yaml_parser_scan(&self.parser, &token) == 0: # <<<<<<<<<<<<<<
+ * error = self._parser_error()
+ * raise error
+ */
+ }
+
+ /* "_yaml.pyx":370
+ * error = self._parser_error()
+ * raise error
+ * if token.type == YAML_NO_TOKEN: # <<<<<<<<<<<<<<
+ * done = 1
+ * else:
+ */
+ __pyx_t_1 = ((__pyx_v_token.type == YAML_NO_TOKEN) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":371
+ * raise error
+ * if token.type == YAML_NO_TOKEN:
+ * done = 1 # <<<<<<<<<<<<<<
+ * else:
+ * count = count+1
+ */
+ __pyx_v_done = 1;
+
+ /* "_yaml.pyx":370
+ * error = self._parser_error()
+ * raise error
+ * if token.type == YAML_NO_TOKEN: # <<<<<<<<<<<<<<
+ * done = 1
+ * else:
+ */
+ goto __pyx_L6;
+ }
+
+ /* "_yaml.pyx":373
+ * done = 1
+ * else:
+ * count = count+1 # <<<<<<<<<<<<<<
+ * yaml_token_delete(&token)
+ * return count
+ */
+ /*else*/ {
+ __pyx_v_count = (__pyx_v_count + 1);
+ }
+ __pyx_L6:;
+
+ /* "_yaml.pyx":374
+ * else:
+ * count = count+1
+ * yaml_token_delete(&token) # <<<<<<<<<<<<<<
+ * return count
+ *
+ */
+ yaml_token_delete((&__pyx_v_token));
+ }
+
+ /* "_yaml.pyx":375
+ * count = count+1
+ * yaml_token_delete(&token)
+ * return count # <<<<<<<<<<<<<<
+ *
+ * cdef object _scan(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_count); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 375, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":360
+ * raise ValueError(u"no parser error")
+ *
+ * def raw_scan(self): # <<<<<<<<<<<<<<
+ * cdef yaml_token_t token
+ * cdef int done
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser.raw_scan", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":377
+ * return count
+ *
+ * cdef object _scan(self): # <<<<<<<<<<<<<<
+ * cdef yaml_token_t token
+ * if yaml_parser_scan(&self.parser, &token) == 0:
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__scan(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ yaml_token_t __pyx_v_token;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_v_token_object = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ __Pyx_RefNannySetupContext("_scan", 0);
+
+ /* "_yaml.pyx":379
+ * cdef object _scan(self):
+ * cdef yaml_token_t token
+ * if yaml_parser_scan(&self.parser, &token) == 0: # <<<<<<<<<<<<<<
+ * error = self._parser_error()
+ * raise error
+ */
+ __pyx_t_1 = yaml_parser_scan((&__pyx_v_self->parser), (&__pyx_v_token)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 379, __pyx_L1_error)
+ __pyx_t_2 = ((__pyx_t_1 == 0) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":380
+ * cdef yaml_token_t token
+ * if yaml_parser_scan(&self.parser, &token) == 0:
+ * error = self._parser_error() # <<<<<<<<<<<<<<
+ * raise error
+ * token_object = self._token_to_object(&token)
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parser_error(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 380, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_error = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":381
+ * if yaml_parser_scan(&self.parser, &token) == 0:
+ * error = self._parser_error()
+ * raise error # <<<<<<<<<<<<<<
+ * token_object = self._token_to_object(&token)
+ * yaml_token_delete(&token)
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ __PYX_ERR(0, 381, __pyx_L1_error)
+
+ /* "_yaml.pyx":379
+ * cdef object _scan(self):
+ * cdef yaml_token_t token
+ * if yaml_parser_scan(&self.parser, &token) == 0: # <<<<<<<<<<<<<<
+ * error = self._parser_error()
+ * raise error
+ */
+ }
+
+ /* "_yaml.pyx":382
+ * error = self._parser_error()
+ * raise error
+ * token_object = self._token_to_object(&token) # <<<<<<<<<<<<<<
+ * yaml_token_delete(&token)
+ * return token_object
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_token_to_object(__pyx_v_self, (&__pyx_v_token)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 382, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_token_object = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":383
+ * raise error
+ * token_object = self._token_to_object(&token)
+ * yaml_token_delete(&token) # <<<<<<<<<<<<<<
+ * return token_object
+ *
+ */
+ yaml_token_delete((&__pyx_v_token));
+
+ /* "_yaml.pyx":384
+ * token_object = self._token_to_object(&token)
+ * yaml_token_delete(&token)
+ * return token_object # <<<<<<<<<<<<<<
+ *
+ * cdef object _token_to_object(self, yaml_token_t *token):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_token_object);
+ __pyx_r = __pyx_v_token_object;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":377
+ * return count
+ *
+ * cdef object _scan(self): # <<<<<<<<<<<<<<
+ * cdef yaml_token_t token
+ * if yaml_parser_scan(&self.parser, &token) == 0:
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser._scan", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XDECREF(__pyx_v_token_object);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":386
+ * return token_object
+ *
+ * cdef object _token_to_object(self, yaml_token_t *token): # <<<<<<<<<<<<<<
+ * start_mark = Mark(self.stream_name,
+ * token.start_mark.index,
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__token_to_object(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, yaml_token_t *__pyx_v_token) {
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_start_mark = NULL;
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_end_mark = NULL;
+ PyObject *__pyx_v_encoding = NULL;
+ PyObject *__pyx_v_handle = NULL;
+ PyObject *__pyx_v_prefix = NULL;
+ PyObject *__pyx_v_value = NULL;
+ PyObject *__pyx_v_suffix = NULL;
+ int __pyx_v_plain;
+ PyObject *__pyx_v_style = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_t_5;
+ int __pyx_t_6;
+ PyObject *__pyx_t_7 = NULL;
+ int __pyx_t_8;
+ __Pyx_RefNannySetupContext("_token_to_object", 0);
+
+ /* "_yaml.pyx":388
+ * cdef object _token_to_object(self, yaml_token_t *token):
+ * start_mark = Mark(self.stream_name,
+ * token.start_mark.index, # <<<<<<<<<<<<<<
+ * token.start_mark.line,
+ * token.start_mark.column,
+ */
+ __pyx_t_1 = __Pyx_PyInt_FromSize_t(__pyx_v_token->start_mark.index); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 388, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":389
+ * start_mark = Mark(self.stream_name,
+ * token.start_mark.index,
+ * token.start_mark.line, # <<<<<<<<<<<<<<
+ * token.start_mark.column,
+ * None, None)
+ */
+ __pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_token->start_mark.line); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 389, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":390
+ * token.start_mark.index,
+ * token.start_mark.line,
+ * token.start_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * end_mark = Mark(self.stream_name,
+ */
+ __pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_token->start_mark.column); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 390, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":387
+ *
+ * cdef object _token_to_object(self, yaml_token_t *token):
+ * start_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * token.start_mark.index,
+ * token.start_mark.line,
+ */
+ __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 387, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None);
+ __pyx_t_1 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_3 = 0;
+ __pyx_t_3 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_5_yaml_Mark), __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 387, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":393
+ * None, None)
+ * end_mark = Mark(self.stream_name,
+ * token.end_mark.index, # <<<<<<<<<<<<<<
+ * token.end_mark.line,
+ * token.end_mark.column,
+ */
+ __pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_token->end_mark.index); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 393, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":394
+ * end_mark = Mark(self.stream_name,
+ * token.end_mark.index,
+ * token.end_mark.line, # <<<<<<<<<<<<<<
+ * token.end_mark.column,
+ * None, None)
+ */
+ __pyx_t_4 = __Pyx_PyInt_FromSize_t(__pyx_v_token->end_mark.line); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 394, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":395
+ * token.end_mark.index,
+ * token.end_mark.line,
+ * token.end_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * if token.type == YAML_NO_TOKEN:
+ */
+ __pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_token->end_mark.column); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 395, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":392
+ * token.start_mark.column,
+ * None, None)
+ * end_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * token.end_mark.index,
+ * token.end_mark.line,
+ */
+ __pyx_t_1 = PyTuple_New(6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 392, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_t_2);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 4, Py_None);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 5, Py_None);
+ __pyx_t_3 = 0;
+ __pyx_t_4 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_5_yaml_Mark), __pyx_t_1, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 392, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_end_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_2);
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":397
+ * token.end_mark.column,
+ * None, None)
+ * if token.type == YAML_NO_TOKEN: # <<<<<<<<<<<<<<
+ * return None
+ * elif token.type == YAML_STREAM_START_TOKEN:
+ */
+ switch (__pyx_v_token->type) {
+ case YAML_NO_TOKEN:
+
+ /* "_yaml.pyx":398
+ * None, None)
+ * if token.type == YAML_NO_TOKEN:
+ * return None # <<<<<<<<<<<<<<
+ * elif token.type == YAML_STREAM_START_TOKEN:
+ * encoding = None
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":397
+ * token.end_mark.column,
+ * None, None)
+ * if token.type == YAML_NO_TOKEN: # <<<<<<<<<<<<<<
+ * return None
+ * elif token.type == YAML_STREAM_START_TOKEN:
+ */
+ break;
+ case YAML_STREAM_START_TOKEN:
+
+ /* "_yaml.pyx":400
+ * return None
+ * elif token.type == YAML_STREAM_START_TOKEN:
+ * encoding = None # <<<<<<<<<<<<<<
+ * if token.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ * if self.unicode_source == 0:
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_encoding = ((PyObject*)Py_None);
+
+ /* "_yaml.pyx":401
+ * elif token.type == YAML_STREAM_START_TOKEN:
+ * encoding = None
+ * if token.data.stream_start.encoding == YAML_UTF8_ENCODING: # <<<<<<<<<<<<<<
+ * if self.unicode_source == 0:
+ * encoding = u"utf-8"
+ */
+ switch (__pyx_v_token->data.stream_start.encoding) {
+ case YAML_UTF8_ENCODING:
+
+ /* "_yaml.pyx":402
+ * encoding = None
+ * if token.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ * if self.unicode_source == 0: # <<<<<<<<<<<<<<
+ * encoding = u"utf-8"
+ * elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ */
+ __pyx_t_5 = ((__pyx_v_self->unicode_source == 0) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":403
+ * if token.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ * if self.unicode_source == 0:
+ * encoding = u"utf-8" # <<<<<<<<<<<<<<
+ * elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ * encoding = u"utf-16-le"
+ */
+ __Pyx_INCREF(__pyx_kp_u_utf_8);
+ __Pyx_DECREF_SET(__pyx_v_encoding, __pyx_kp_u_utf_8);
+
+ /* "_yaml.pyx":402
+ * encoding = None
+ * if token.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ * if self.unicode_source == 0: # <<<<<<<<<<<<<<
+ * encoding = u"utf-8"
+ * elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ */
+ }
+
+ /* "_yaml.pyx":401
+ * elif token.type == YAML_STREAM_START_TOKEN:
+ * encoding = None
+ * if token.data.stream_start.encoding == YAML_UTF8_ENCODING: # <<<<<<<<<<<<<<
+ * if self.unicode_source == 0:
+ * encoding = u"utf-8"
+ */
+ break;
+ case YAML_UTF16LE_ENCODING:
+
+ /* "_yaml.pyx":405
+ * encoding = u"utf-8"
+ * elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ * encoding = u"utf-16-le" # <<<<<<<<<<<<<<
+ * elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ * encoding = u"utf-16-be"
+ */
+ __Pyx_INCREF(__pyx_kp_u_utf_16_le);
+ __Pyx_DECREF_SET(__pyx_v_encoding, __pyx_kp_u_utf_16_le);
+
+ /* "_yaml.pyx":404
+ * if self.unicode_source == 0:
+ * encoding = u"utf-8"
+ * elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING: # <<<<<<<<<<<<<<
+ * encoding = u"utf-16-le"
+ * elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ */
+ break;
+ case YAML_UTF16BE_ENCODING:
+
+ /* "_yaml.pyx":407
+ * encoding = u"utf-16-le"
+ * elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ * encoding = u"utf-16-be" # <<<<<<<<<<<<<<
+ * return StreamStartToken(start_mark, end_mark, encoding)
+ * elif token.type == YAML_STREAM_END_TOKEN:
+ */
+ __Pyx_INCREF(__pyx_kp_u_utf_16_be);
+ __Pyx_DECREF_SET(__pyx_v_encoding, __pyx_kp_u_utf_16_be);
+
+ /* "_yaml.pyx":406
+ * elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ * encoding = u"utf-16-le"
+ * elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING: # <<<<<<<<<<<<<<
+ * encoding = u"utf-16-be"
+ * return StreamStartToken(start_mark, end_mark, encoding)
+ */
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":408
+ * elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ * encoding = u"utf-16-be"
+ * return StreamStartToken(start_mark, end_mark, encoding) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_STREAM_END_TOKEN:
+ * return StreamEndToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_StreamStartToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 408, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_v_encoding};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 408, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_v_encoding};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 408, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_3 = PyTuple_New(3+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 408, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (__pyx_t_4) {
+ __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_INCREF(__pyx_v_encoding);
+ __Pyx_GIVEREF(__pyx_v_encoding);
+ PyTuple_SET_ITEM(__pyx_t_3, 2+__pyx_t_6, __pyx_v_encoding);
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 408, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":399
+ * if token.type == YAML_NO_TOKEN:
+ * return None
+ * elif token.type == YAML_STREAM_START_TOKEN: # <<<<<<<<<<<<<<
+ * encoding = None
+ * if token.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ */
+ break;
+ case YAML_STREAM_END_TOKEN:
+
+ /* "_yaml.pyx":410
+ * return StreamStartToken(start_mark, end_mark, encoding)
+ * elif token.type == YAML_STREAM_END_TOKEN:
+ * return StreamEndToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN:
+ * return DirectiveToken(u"YAML",
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_StreamEndToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 410, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 410, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 410, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_4 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 410, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 410, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":409
+ * encoding = u"utf-16-be"
+ * return StreamStartToken(start_mark, end_mark, encoding)
+ * elif token.type == YAML_STREAM_END_TOKEN: # <<<<<<<<<<<<<<
+ * return StreamEndToken(start_mark, end_mark)
+ * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN:
+ */
+ break;
+ case YAML_VERSION_DIRECTIVE_TOKEN:
+
+ /* "_yaml.pyx":412
+ * return StreamEndToken(start_mark, end_mark)
+ * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN:
+ * return DirectiveToken(u"YAML", # <<<<<<<<<<<<<<
+ * (token.data.version_directive.major,
+ * token.data.version_directive.minor),
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_DirectiveToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 412, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":413
+ * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN:
+ * return DirectiveToken(u"YAML",
+ * (token.data.version_directive.major, # <<<<<<<<<<<<<<
+ * token.data.version_directive.minor),
+ * start_mark, end_mark)
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_token->data.version_directive.major); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 413, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":414
+ * return DirectiveToken(u"YAML",
+ * (token.data.version_directive.major,
+ * token.data.version_directive.minor), # <<<<<<<<<<<<<<
+ * start_mark, end_mark)
+ * elif token.type == YAML_TAG_DIRECTIVE_TOKEN:
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_token->data.version_directive.minor); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 414, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":413
+ * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN:
+ * return DirectiveToken(u"YAML",
+ * (token.data.version_directive.major, # <<<<<<<<<<<<<<
+ * token.data.version_directive.minor),
+ * start_mark, end_mark)
+ */
+ __pyx_t_7 = PyTuple_New(2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 413, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_t_3);
+ __pyx_t_4 = 0;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":415
+ * (token.data.version_directive.major,
+ * token.data.version_directive.minor),
+ * start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_TAG_DIRECTIVE_TOKEN:
+ * handle = PyUnicode_FromString(token.data.tag_directive.handle)
+ */
+ __pyx_t_3 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_3, __pyx_n_u_YAML, __pyx_t_7, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 4+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 412, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_3, __pyx_n_u_YAML, __pyx_t_7, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 4+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 412, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_4 = PyTuple_New(4+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 412, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(__pyx_n_u_YAML);
+ __Pyx_GIVEREF(__pyx_n_u_YAML);
+ PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_6, __pyx_n_u_YAML);
+ __Pyx_GIVEREF(__pyx_t_7);
+ PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_6, __pyx_t_7);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 2+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 3+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_7 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 412, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":411
+ * elif token.type == YAML_STREAM_END_TOKEN:
+ * return StreamEndToken(start_mark, end_mark)
+ * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN: # <<<<<<<<<<<<<<
+ * return DirectiveToken(u"YAML",
+ * (token.data.version_directive.major,
+ */
+ break;
+ case YAML_TAG_DIRECTIVE_TOKEN:
+
+ /* "_yaml.pyx":417
+ * start_mark, end_mark)
+ * elif token.type == YAML_TAG_DIRECTIVE_TOKEN:
+ * handle = PyUnicode_FromString(token.data.tag_directive.handle) # <<<<<<<<<<<<<<
+ * prefix = PyUnicode_FromString(token.data.tag_directive.prefix)
+ * return DirectiveToken(u"TAG", (handle, prefix),
+ */
+ __pyx_t_2 = PyUnicode_FromString(__pyx_v_token->data.tag_directive.handle); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 417, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_handle = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":418
+ * elif token.type == YAML_TAG_DIRECTIVE_TOKEN:
+ * handle = PyUnicode_FromString(token.data.tag_directive.handle)
+ * prefix = PyUnicode_FromString(token.data.tag_directive.prefix) # <<<<<<<<<<<<<<
+ * return DirectiveToken(u"TAG", (handle, prefix),
+ * start_mark, end_mark)
+ */
+ __pyx_t_2 = PyUnicode_FromString(__pyx_v_token->data.tag_directive.prefix); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 418, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_prefix = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":419
+ * handle = PyUnicode_FromString(token.data.tag_directive.handle)
+ * prefix = PyUnicode_FromString(token.data.tag_directive.prefix)
+ * return DirectiveToken(u"TAG", (handle, prefix), # <<<<<<<<<<<<<<
+ * start_mark, end_mark)
+ * elif token.type == YAML_DOCUMENT_START_TOKEN:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_DirectiveToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 419, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 419, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_handle);
+ __Pyx_GIVEREF(__pyx_v_handle);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_handle);
+ __Pyx_INCREF(__pyx_v_prefix);
+ __Pyx_GIVEREF(__pyx_v_prefix);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_v_prefix);
+
+ /* "_yaml.pyx":420
+ * prefix = PyUnicode_FromString(token.data.tag_directive.prefix)
+ * return DirectiveToken(u"TAG", (handle, prefix),
+ * start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_DOCUMENT_START_TOKEN:
+ * return DocumentStartToken(start_mark, end_mark)
+ */
+ __pyx_t_7 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_7)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_7);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_7, __pyx_n_u_TAG, __pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 4+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 419, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_7, __pyx_n_u_TAG, __pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 4+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 419, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_3 = PyTuple_New(4+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 419, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (__pyx_t_7) {
+ __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_7); __pyx_t_7 = NULL;
+ }
+ __Pyx_INCREF(__pyx_n_u_TAG);
+ __Pyx_GIVEREF(__pyx_n_u_TAG);
+ PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_6, __pyx_n_u_TAG);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_6, __pyx_t_4);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 2+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 3+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_4 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 419, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":416
+ * token.data.version_directive.minor),
+ * start_mark, end_mark)
+ * elif token.type == YAML_TAG_DIRECTIVE_TOKEN: # <<<<<<<<<<<<<<
+ * handle = PyUnicode_FromString(token.data.tag_directive.handle)
+ * prefix = PyUnicode_FromString(token.data.tag_directive.prefix)
+ */
+ break;
+ case YAML_DOCUMENT_START_TOKEN:
+
+ /* "_yaml.pyx":422
+ * start_mark, end_mark)
+ * elif token.type == YAML_DOCUMENT_START_TOKEN:
+ * return DocumentStartToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_DOCUMENT_END_TOKEN:
+ * return DocumentEndToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_DocumentStartToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 422, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 422, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 422, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_4 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 422, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 422, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":421
+ * return DirectiveToken(u"TAG", (handle, prefix),
+ * start_mark, end_mark)
+ * elif token.type == YAML_DOCUMENT_START_TOKEN: # <<<<<<<<<<<<<<
+ * return DocumentStartToken(start_mark, end_mark)
+ * elif token.type == YAML_DOCUMENT_END_TOKEN:
+ */
+ break;
+ case YAML_DOCUMENT_END_TOKEN:
+
+ /* "_yaml.pyx":424
+ * return DocumentStartToken(start_mark, end_mark)
+ * elif token.type == YAML_DOCUMENT_END_TOKEN:
+ * return DocumentEndToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN:
+ * return BlockSequenceStartToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_DocumentEndToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 424, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 424, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 424, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_3 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 424, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (__pyx_t_4) {
+ __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 424, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":423
+ * elif token.type == YAML_DOCUMENT_START_TOKEN:
+ * return DocumentStartToken(start_mark, end_mark)
+ * elif token.type == YAML_DOCUMENT_END_TOKEN: # <<<<<<<<<<<<<<
+ * return DocumentEndToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN:
+ */
+ break;
+ case YAML_BLOCK_SEQUENCE_START_TOKEN:
+
+ /* "_yaml.pyx":426
+ * return DocumentEndToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN:
+ * return BlockSequenceStartToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_BLOCK_MAPPING_START_TOKEN:
+ * return BlockMappingStartToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_BlockSequenceStartToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 426, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 426, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 426, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_4 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 426, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 426, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":425
+ * elif token.type == YAML_DOCUMENT_END_TOKEN:
+ * return DocumentEndToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN: # <<<<<<<<<<<<<<
+ * return BlockSequenceStartToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_MAPPING_START_TOKEN:
+ */
+ break;
+ case YAML_BLOCK_MAPPING_START_TOKEN:
+
+ /* "_yaml.pyx":428
+ * return BlockSequenceStartToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_MAPPING_START_TOKEN:
+ * return BlockMappingStartToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_BLOCK_END_TOKEN:
+ * return BlockEndToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_BlockMappingStartToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 428, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 428, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 428, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_3 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 428, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (__pyx_t_4) {
+ __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 428, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":427
+ * elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN:
+ * return BlockSequenceStartToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_MAPPING_START_TOKEN: # <<<<<<<<<<<<<<
+ * return BlockMappingStartToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_END_TOKEN:
+ */
+ break;
+ case YAML_BLOCK_END_TOKEN:
+
+ /* "_yaml.pyx":430
+ * return BlockMappingStartToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_END_TOKEN:
+ * return BlockEndToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN:
+ * return FlowSequenceStartToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_BlockEndToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 430, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 430, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 430, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_4 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 430, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 430, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":429
+ * elif token.type == YAML_BLOCK_MAPPING_START_TOKEN:
+ * return BlockMappingStartToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_END_TOKEN: # <<<<<<<<<<<<<<
+ * return BlockEndToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN:
+ */
+ break;
+ case YAML_FLOW_SEQUENCE_START_TOKEN:
+
+ /* "_yaml.pyx":432
+ * return BlockEndToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN:
+ * return FlowSequenceStartToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN:
+ * return FlowSequenceEndToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_FlowSequenceStartToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 432, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 432, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 432, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_3 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 432, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (__pyx_t_4) {
+ __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 432, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":431
+ * elif token.type == YAML_BLOCK_END_TOKEN:
+ * return BlockEndToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN: # <<<<<<<<<<<<<<
+ * return FlowSequenceStartToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN:
+ */
+ break;
+ case YAML_FLOW_SEQUENCE_END_TOKEN:
+
+ /* "_yaml.pyx":434
+ * return FlowSequenceStartToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN:
+ * return FlowSequenceEndToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_FLOW_MAPPING_START_TOKEN:
+ * return FlowMappingStartToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_FlowSequenceEndToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 434, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 434, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 434, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_4 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 434, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 434, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":433
+ * elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN:
+ * return FlowSequenceStartToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN: # <<<<<<<<<<<<<<
+ * return FlowSequenceEndToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_MAPPING_START_TOKEN:
+ */
+ break;
+ case YAML_FLOW_MAPPING_START_TOKEN:
+
+ /* "_yaml.pyx":436
+ * return FlowSequenceEndToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_MAPPING_START_TOKEN:
+ * return FlowMappingStartToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_FLOW_MAPPING_END_TOKEN:
+ * return FlowMappingEndToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_FlowMappingStartToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 436, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 436, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 436, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_3 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 436, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (__pyx_t_4) {
+ __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 436, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":435
+ * elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN:
+ * return FlowSequenceEndToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_MAPPING_START_TOKEN: # <<<<<<<<<<<<<<
+ * return FlowMappingStartToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_MAPPING_END_TOKEN:
+ */
+ break;
+ case YAML_FLOW_MAPPING_END_TOKEN:
+
+ /* "_yaml.pyx":438
+ * return FlowMappingStartToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_MAPPING_END_TOKEN:
+ * return FlowMappingEndToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_BLOCK_ENTRY_TOKEN:
+ * return BlockEntryToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_FlowMappingEndToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 438, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 438, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 438, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_4 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 438, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 438, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":437
+ * elif token.type == YAML_FLOW_MAPPING_START_TOKEN:
+ * return FlowMappingStartToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_MAPPING_END_TOKEN: # <<<<<<<<<<<<<<
+ * return FlowMappingEndToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_ENTRY_TOKEN:
+ */
+ break;
+ case YAML_BLOCK_ENTRY_TOKEN:
+
+ /* "_yaml.pyx":440
+ * return FlowMappingEndToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_ENTRY_TOKEN:
+ * return BlockEntryToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_FLOW_ENTRY_TOKEN:
+ * return FlowEntryToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_BlockEntryToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 440, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 440, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 440, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_3 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 440, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (__pyx_t_4) {
+ __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 440, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":439
+ * elif token.type == YAML_FLOW_MAPPING_END_TOKEN:
+ * return FlowMappingEndToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_ENTRY_TOKEN: # <<<<<<<<<<<<<<
+ * return BlockEntryToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_ENTRY_TOKEN:
+ */
+ break;
+ case YAML_FLOW_ENTRY_TOKEN:
+
+ /* "_yaml.pyx":442
+ * return BlockEntryToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_ENTRY_TOKEN:
+ * return FlowEntryToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_KEY_TOKEN:
+ * return KeyToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_FlowEntryToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 442, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 442, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 442, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_4 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 442, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 442, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":441
+ * elif token.type == YAML_BLOCK_ENTRY_TOKEN:
+ * return BlockEntryToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_ENTRY_TOKEN: # <<<<<<<<<<<<<<
+ * return FlowEntryToken(start_mark, end_mark)
+ * elif token.type == YAML_KEY_TOKEN:
+ */
+ break;
+ case YAML_KEY_TOKEN:
+
+ /* "_yaml.pyx":444
+ * return FlowEntryToken(start_mark, end_mark)
+ * elif token.type == YAML_KEY_TOKEN:
+ * return KeyToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_VALUE_TOKEN:
+ * return ValueToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_KeyToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 444, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 444, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 444, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_3 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 444, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (__pyx_t_4) {
+ __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 444, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":443
+ * elif token.type == YAML_FLOW_ENTRY_TOKEN:
+ * return FlowEntryToken(start_mark, end_mark)
+ * elif token.type == YAML_KEY_TOKEN: # <<<<<<<<<<<<<<
+ * return KeyToken(start_mark, end_mark)
+ * elif token.type == YAML_VALUE_TOKEN:
+ */
+ break;
+ case YAML_VALUE_TOKEN:
+
+ /* "_yaml.pyx":446
+ * return KeyToken(start_mark, end_mark)
+ * elif token.type == YAML_VALUE_TOKEN:
+ * return ValueToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_ALIAS_TOKEN:
+ * value = PyUnicode_FromString(token.data.alias.value)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_ValueToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 446, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 446, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 446, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_4 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 446, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 446, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":445
+ * elif token.type == YAML_KEY_TOKEN:
+ * return KeyToken(start_mark, end_mark)
+ * elif token.type == YAML_VALUE_TOKEN: # <<<<<<<<<<<<<<
+ * return ValueToken(start_mark, end_mark)
+ * elif token.type == YAML_ALIAS_TOKEN:
+ */
+ break;
+ case YAML_ALIAS_TOKEN:
+
+ /* "_yaml.pyx":448
+ * return ValueToken(start_mark, end_mark)
+ * elif token.type == YAML_ALIAS_TOKEN:
+ * value = PyUnicode_FromString(token.data.alias.value) # <<<<<<<<<<<<<<
+ * return AliasToken(value, start_mark, end_mark)
+ * elif token.type == YAML_ANCHOR_TOKEN:
+ */
+ __pyx_t_2 = PyUnicode_FromString(__pyx_v_token->data.alias.value); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 448, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_value = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":449
+ * elif token.type == YAML_ALIAS_TOKEN:
+ * value = PyUnicode_FromString(token.data.alias.value)
+ * return AliasToken(value, start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_ANCHOR_TOKEN:
+ * value = PyUnicode_FromString(token.data.anchor.value)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_AliasToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 449, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_4, __pyx_v_value, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 449, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_4, __pyx_v_value, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 449, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_3 = PyTuple_New(3+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 449, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (__pyx_t_4) {
+ __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL;
+ }
+ __Pyx_INCREF(__pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_6, __pyx_v_value);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 2+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 449, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":447
+ * elif token.type == YAML_VALUE_TOKEN:
+ * return ValueToken(start_mark, end_mark)
+ * elif token.type == YAML_ALIAS_TOKEN: # <<<<<<<<<<<<<<
+ * value = PyUnicode_FromString(token.data.alias.value)
+ * return AliasToken(value, start_mark, end_mark)
+ */
+ break;
+ case YAML_ANCHOR_TOKEN:
+
+ /* "_yaml.pyx":451
+ * return AliasToken(value, start_mark, end_mark)
+ * elif token.type == YAML_ANCHOR_TOKEN:
+ * value = PyUnicode_FromString(token.data.anchor.value) # <<<<<<<<<<<<<<
+ * return AnchorToken(value, start_mark, end_mark)
+ * elif token.type == YAML_TAG_TOKEN:
+ */
+ __pyx_t_2 = PyUnicode_FromString(__pyx_v_token->data.anchor.value); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 451, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_value = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":452
+ * elif token.type == YAML_ANCHOR_TOKEN:
+ * value = PyUnicode_FromString(token.data.anchor.value)
+ * return AnchorToken(value, start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_TAG_TOKEN:
+ * handle = PyUnicode_FromString(token.data.tag.handle)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_AnchorToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 452, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_v_value, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 452, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_v_value, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 452, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_4 = PyTuple_New(3+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 452, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(__pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_6, __pyx_v_value);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 2+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 452, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":450
+ * value = PyUnicode_FromString(token.data.alias.value)
+ * return AliasToken(value, start_mark, end_mark)
+ * elif token.type == YAML_ANCHOR_TOKEN: # <<<<<<<<<<<<<<
+ * value = PyUnicode_FromString(token.data.anchor.value)
+ * return AnchorToken(value, start_mark, end_mark)
+ */
+ break;
+ case YAML_TAG_TOKEN:
+
+ /* "_yaml.pyx":454
+ * return AnchorToken(value, start_mark, end_mark)
+ * elif token.type == YAML_TAG_TOKEN:
+ * handle = PyUnicode_FromString(token.data.tag.handle) # <<<<<<<<<<<<<<
+ * suffix = PyUnicode_FromString(token.data.tag.suffix)
+ * if not handle:
+ */
+ __pyx_t_2 = PyUnicode_FromString(__pyx_v_token->data.tag.handle); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 454, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_handle = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":455
+ * elif token.type == YAML_TAG_TOKEN:
+ * handle = PyUnicode_FromString(token.data.tag.handle)
+ * suffix = PyUnicode_FromString(token.data.tag.suffix) # <<<<<<<<<<<<<<
+ * if not handle:
+ * handle = None
+ */
+ __pyx_t_2 = PyUnicode_FromString(__pyx_v_token->data.tag.suffix); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 455, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_suffix = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":456
+ * handle = PyUnicode_FromString(token.data.tag.handle)
+ * suffix = PyUnicode_FromString(token.data.tag.suffix)
+ * if not handle: # <<<<<<<<<<<<<<
+ * handle = None
+ * return TagToken((handle, suffix), start_mark, end_mark)
+ */
+ __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_v_handle); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 456, __pyx_L1_error)
+ __pyx_t_8 = ((!__pyx_t_5) != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":457
+ * suffix = PyUnicode_FromString(token.data.tag.suffix)
+ * if not handle:
+ * handle = None # <<<<<<<<<<<<<<
+ * return TagToken((handle, suffix), start_mark, end_mark)
+ * elif token.type == YAML_SCALAR_TOKEN:
+ */
+ __Pyx_INCREF(Py_None);
+ __Pyx_DECREF_SET(__pyx_v_handle, Py_None);
+
+ /* "_yaml.pyx":456
+ * handle = PyUnicode_FromString(token.data.tag.handle)
+ * suffix = PyUnicode_FromString(token.data.tag.suffix)
+ * if not handle: # <<<<<<<<<<<<<<
+ * handle = None
+ * return TagToken((handle, suffix), start_mark, end_mark)
+ */
+ }
+
+ /* "_yaml.pyx":458
+ * if not handle:
+ * handle = None
+ * return TagToken((handle, suffix), start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_SCALAR_TOKEN:
+ * value = PyUnicode_DecodeUTF8(token.data.scalar.value,
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_TagToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 458, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 458, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_handle);
+ __Pyx_GIVEREF(__pyx_v_handle);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_handle);
+ __Pyx_INCREF(__pyx_v_suffix);
+ __Pyx_GIVEREF(__pyx_v_suffix);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_v_suffix);
+ __pyx_t_3 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 458, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 458, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_7 = PyTuple_New(3+__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 458, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_6, __pyx_t_4);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_4 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_7, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 458, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":453
+ * value = PyUnicode_FromString(token.data.anchor.value)
+ * return AnchorToken(value, start_mark, end_mark)
+ * elif token.type == YAML_TAG_TOKEN: # <<<<<<<<<<<<<<
+ * handle = PyUnicode_FromString(token.data.tag.handle)
+ * suffix = PyUnicode_FromString(token.data.tag.suffix)
+ */
+ break;
+ case YAML_SCALAR_TOKEN:
+
+ /* "_yaml.pyx":460
+ * return TagToken((handle, suffix), start_mark, end_mark)
+ * elif token.type == YAML_SCALAR_TOKEN:
+ * value = PyUnicode_DecodeUTF8(token.data.scalar.value, # <<<<<<<<<<<<<<
+ * token.data.scalar.length, 'strict')
+ * plain = False
+ */
+ __pyx_t_2 = PyUnicode_DecodeUTF8(__pyx_v_token->data.scalar.value, __pyx_v_token->data.scalar.length, ((char *)"strict")); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 460, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_value = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":462
+ * value = PyUnicode_DecodeUTF8(token.data.scalar.value,
+ * token.data.scalar.length, 'strict')
+ * plain = False # <<<<<<<<<<<<<<
+ * style = None
+ * if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ */
+ __pyx_v_plain = 0;
+
+ /* "_yaml.pyx":463
+ * token.data.scalar.length, 'strict')
+ * plain = False
+ * style = None # <<<<<<<<<<<<<<
+ * if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * plain = True
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_style = ((PyObject*)Py_None);
+
+ /* "_yaml.pyx":464
+ * plain = False
+ * style = None
+ * if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * plain = True
+ * style = u''
+ */
+ switch (__pyx_v_token->data.scalar.style) {
+ case YAML_PLAIN_SCALAR_STYLE:
+
+ /* "_yaml.pyx":465
+ * style = None
+ * if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * plain = True # <<<<<<<<<<<<<<
+ * style = u''
+ * elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ */
+ __pyx_v_plain = 1;
+
+ /* "_yaml.pyx":466
+ * if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * plain = True
+ * style = u'' # <<<<<<<<<<<<<<
+ * elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\''
+ */
+ __Pyx_INCREF(__pyx_kp_u__6);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__6);
+
+ /* "_yaml.pyx":464
+ * plain = False
+ * style = None
+ * if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * plain = True
+ * style = u''
+ */
+ break;
+ case YAML_SINGLE_QUOTED_SCALAR_STYLE:
+
+ /* "_yaml.pyx":468
+ * style = u''
+ * elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\'' # <<<<<<<<<<<<<<
+ * elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"'
+ */
+ __Pyx_INCREF(__pyx_kp_u__7);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__7);
+
+ /* "_yaml.pyx":467
+ * plain = True
+ * style = u''
+ * elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'\''
+ * elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ */
+ break;
+ case YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+
+ /* "_yaml.pyx":470
+ * style = u'\''
+ * elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"' # <<<<<<<<<<<<<<
+ * elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|'
+ */
+ __Pyx_INCREF(__pyx_kp_u__8);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__8);
+
+ /* "_yaml.pyx":469
+ * elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\''
+ * elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'"'
+ * elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ */
+ break;
+ case YAML_LITERAL_SCALAR_STYLE:
+
+ /* "_yaml.pyx":472
+ * style = u'"'
+ * elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|' # <<<<<<<<<<<<<<
+ * elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>'
+ */
+ __Pyx_INCREF(__pyx_kp_u__9);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__9);
+
+ /* "_yaml.pyx":471
+ * elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"'
+ * elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'|'
+ * elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ */
+ break;
+ case YAML_FOLDED_SCALAR_STYLE:
+
+ /* "_yaml.pyx":474
+ * style = u'|'
+ * elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>' # <<<<<<<<<<<<<<
+ * return ScalarToken(value, plain,
+ * start_mark, end_mark, style)
+ */
+ __Pyx_INCREF(__pyx_kp_u__10);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__10);
+
+ /* "_yaml.pyx":473
+ * elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|'
+ * elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'>'
+ * return ScalarToken(value, plain,
+ */
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":475
+ * elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>'
+ * return ScalarToken(value, plain, # <<<<<<<<<<<<<<
+ * start_mark, end_mark, style)
+ * else:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_ScalarToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 475, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_7 = __Pyx_PyBool_FromLong(__pyx_v_plain); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 475, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+
+ /* "_yaml.pyx":476
+ * style = u'>'
+ * return ScalarToken(value, plain,
+ * start_mark, end_mark, style) # <<<<<<<<<<<<<<
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_4 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[6] = {__pyx_t_4, __pyx_v_value, __pyx_t_7, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_v_style};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 5+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 475, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[6] = {__pyx_t_4, __pyx_v_value, __pyx_t_7, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_v_style};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 5+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 475, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_3 = PyTuple_New(5+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 475, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (__pyx_t_4) {
+ __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL;
+ }
+ __Pyx_INCREF(__pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_6, __pyx_v_value);
+ __Pyx_GIVEREF(__pyx_t_7);
+ PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_6, __pyx_t_7);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 2+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 3+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_INCREF(__pyx_v_style);
+ __Pyx_GIVEREF(__pyx_v_style);
+ PyTuple_SET_ITEM(__pyx_t_3, 4+__pyx_t_6, __pyx_v_style);
+ __pyx_t_7 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 475, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":459
+ * handle = None
+ * return TagToken((handle, suffix), start_mark, end_mark)
+ * elif token.type == YAML_SCALAR_TOKEN: # <<<<<<<<<<<<<<
+ * value = PyUnicode_DecodeUTF8(token.data.scalar.value,
+ * token.data.scalar.length, 'strict')
+ */
+ break;
+ default:
+
+ /* "_yaml.pyx":478
+ * start_mark, end_mark, style)
+ * else:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("unknown token type")
+ * else:
+ */
+ __pyx_t_8 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_8)) {
+
+ /* "_yaml.pyx":479
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("unknown token type") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"unknown token type")
+ */
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__11, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 479, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_Raise(__pyx_t_2, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __PYX_ERR(0, 479, __pyx_L1_error)
+
+ /* "_yaml.pyx":478
+ * start_mark, end_mark, style)
+ * else:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("unknown token type")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":481
+ * raise ValueError("unknown token type")
+ * else:
+ * raise ValueError(u"unknown token type") # <<<<<<<<<<<<<<
+ *
+ * def get_token(self):
+ */
+ /*else*/ {
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__12, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 481, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_Raise(__pyx_t_2, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __PYX_ERR(0, 481, __pyx_L1_error)
+ }
+ break;
+ }
+
+ /* "_yaml.pyx":386
+ * return token_object
+ *
+ * cdef object _token_to_object(self, yaml_token_t *token): # <<<<<<<<<<<<<<
+ * start_mark = Mark(self.stream_name,
+ * token.start_mark.index,
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_7);
+ __Pyx_AddTraceback("_yaml.CParser._token_to_object", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF((PyObject *)__pyx_v_start_mark);
+ __Pyx_XDECREF((PyObject *)__pyx_v_end_mark);
+ __Pyx_XDECREF(__pyx_v_encoding);
+ __Pyx_XDECREF(__pyx_v_handle);
+ __Pyx_XDECREF(__pyx_v_prefix);
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_XDECREF(__pyx_v_suffix);
+ __Pyx_XDECREF(__pyx_v_style);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":483
+ * raise ValueError(u"unknown token type")
+ *
+ * def get_token(self): # <<<<<<<<<<<<<<
+ * if self.current_token is not None:
+ * value = self.current_token
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_9get_token(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_9get_token(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("get_token (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_8get_token(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_8get_token(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_v_value = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ __Pyx_RefNannySetupContext("get_token", 0);
+
+ /* "_yaml.pyx":484
+ *
+ * def get_token(self):
+ * if self.current_token is not None: # <<<<<<<<<<<<<<
+ * value = self.current_token
+ * self.current_token = None
+ */
+ __pyx_t_1 = (__pyx_v_self->current_token != Py_None);
+ __pyx_t_2 = (__pyx_t_1 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":485
+ * def get_token(self):
+ * if self.current_token is not None:
+ * value = self.current_token # <<<<<<<<<<<<<<
+ * self.current_token = None
+ * else:
+ */
+ __pyx_t_3 = __pyx_v_self->current_token;
+ __Pyx_INCREF(__pyx_t_3);
+ __pyx_v_value = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":486
+ * if self.current_token is not None:
+ * value = self.current_token
+ * self.current_token = None # <<<<<<<<<<<<<<
+ * else:
+ * value = self._scan()
+ */
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_GOTREF(__pyx_v_self->current_token);
+ __Pyx_DECREF(__pyx_v_self->current_token);
+ __pyx_v_self->current_token = Py_None;
+
+ /* "_yaml.pyx":484
+ *
+ * def get_token(self):
+ * if self.current_token is not None: # <<<<<<<<<<<<<<
+ * value = self.current_token
+ * self.current_token = None
+ */
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":488
+ * self.current_token = None
+ * else:
+ * value = self._scan() # <<<<<<<<<<<<<<
+ * return value
+ *
+ */
+ /*else*/ {
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_scan(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 488, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_value = __pyx_t_3;
+ __pyx_t_3 = 0;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":489
+ * else:
+ * value = self._scan()
+ * return value # <<<<<<<<<<<<<<
+ *
+ * def peek_token(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_value);
+ __pyx_r = __pyx_v_value;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":483
+ * raise ValueError(u"unknown token type")
+ *
+ * def get_token(self): # <<<<<<<<<<<<<<
+ * if self.current_token is not None:
+ * value = self.current_token
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser.get_token", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":491
+ * return value
+ *
+ * def peek_token(self): # <<<<<<<<<<<<<<
+ * if self.current_token is None:
+ * self.current_token = self._scan()
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_11peek_token(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_11peek_token(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("peek_token (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_10peek_token(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_10peek_token(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ __Pyx_RefNannySetupContext("peek_token", 0);
+
+ /* "_yaml.pyx":492
+ *
+ * def peek_token(self):
+ * if self.current_token is None: # <<<<<<<<<<<<<<
+ * self.current_token = self._scan()
+ * return self.current_token
+ */
+ __pyx_t_1 = (__pyx_v_self->current_token == Py_None);
+ __pyx_t_2 = (__pyx_t_1 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":493
+ * def peek_token(self):
+ * if self.current_token is None:
+ * self.current_token = self._scan() # <<<<<<<<<<<<<<
+ * return self.current_token
+ *
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_scan(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 493, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __Pyx_GOTREF(__pyx_v_self->current_token);
+ __Pyx_DECREF(__pyx_v_self->current_token);
+ __pyx_v_self->current_token = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":492
+ *
+ * def peek_token(self):
+ * if self.current_token is None: # <<<<<<<<<<<<<<
+ * self.current_token = self._scan()
+ * return self.current_token
+ */
+ }
+
+ /* "_yaml.pyx":494
+ * if self.current_token is None:
+ * self.current_token = self._scan()
+ * return self.current_token # <<<<<<<<<<<<<<
+ *
+ * def check_token(self, *choices):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_self->current_token);
+ __pyx_r = __pyx_v_self->current_token;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":491
+ * return value
+ *
+ * def peek_token(self): # <<<<<<<<<<<<<<
+ * if self.current_token is None:
+ * self.current_token = self._scan()
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser.peek_token", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":496
+ * return self.current_token
+ *
+ * def check_token(self, *choices): # <<<<<<<<<<<<<<
+ * if self.current_token is None:
+ * self.current_token = self._scan()
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_13check_token(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_13check_token(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+ PyObject *__pyx_v_choices = 0;
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("check_token (wrapper)", 0);
+ if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "check_token", 0))) return NULL;
+ __Pyx_INCREF(__pyx_args);
+ __pyx_v_choices = __pyx_args;
+ __pyx_r = __pyx_pf_5_yaml_7CParser_12check_token(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self), __pyx_v_choices);
+
+ /* function exit code */
+ __Pyx_XDECREF(__pyx_v_choices);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_12check_token(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_choices) {
+ PyObject *__pyx_v_token_class = NULL;
+ PyObject *__pyx_v_choice = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ Py_ssize_t __pyx_t_4;
+ PyObject *__pyx_t_5 = NULL;
+ __Pyx_RefNannySetupContext("check_token", 0);
+
+ /* "_yaml.pyx":497
+ *
+ * def check_token(self, *choices):
+ * if self.current_token is None: # <<<<<<<<<<<<<<
+ * self.current_token = self._scan()
+ * if self.current_token is None:
+ */
+ __pyx_t_1 = (__pyx_v_self->current_token == Py_None);
+ __pyx_t_2 = (__pyx_t_1 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":498
+ * def check_token(self, *choices):
+ * if self.current_token is None:
+ * self.current_token = self._scan() # <<<<<<<<<<<<<<
+ * if self.current_token is None:
+ * return False
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_scan(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 498, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __Pyx_GOTREF(__pyx_v_self->current_token);
+ __Pyx_DECREF(__pyx_v_self->current_token);
+ __pyx_v_self->current_token = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":497
+ *
+ * def check_token(self, *choices):
+ * if self.current_token is None: # <<<<<<<<<<<<<<
+ * self.current_token = self._scan()
+ * if self.current_token is None:
+ */
+ }
+
+ /* "_yaml.pyx":499
+ * if self.current_token is None:
+ * self.current_token = self._scan()
+ * if self.current_token is None: # <<<<<<<<<<<<<<
+ * return False
+ * if not choices:
+ */
+ __pyx_t_2 = (__pyx_v_self->current_token == Py_None);
+ __pyx_t_1 = (__pyx_t_2 != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":500
+ * self.current_token = self._scan()
+ * if self.current_token is None:
+ * return False # <<<<<<<<<<<<<<
+ * if not choices:
+ * return True
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_False);
+ __pyx_r = Py_False;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":499
+ * if self.current_token is None:
+ * self.current_token = self._scan()
+ * if self.current_token is None: # <<<<<<<<<<<<<<
+ * return False
+ * if not choices:
+ */
+ }
+
+ /* "_yaml.pyx":501
+ * if self.current_token is None:
+ * return False
+ * if not choices: # <<<<<<<<<<<<<<
+ * return True
+ * token_class = self.current_token.__class__
+ */
+ __pyx_t_1 = (PyTuple_GET_SIZE(__pyx_v_choices) != 0);
+ __pyx_t_2 = ((!__pyx_t_1) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":502
+ * return False
+ * if not choices:
+ * return True # <<<<<<<<<<<<<<
+ * token_class = self.current_token.__class__
+ * for choice in choices:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_True);
+ __pyx_r = Py_True;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":501
+ * if self.current_token is None:
+ * return False
+ * if not choices: # <<<<<<<<<<<<<<
+ * return True
+ * token_class = self.current_token.__class__
+ */
+ }
+
+ /* "_yaml.pyx":503
+ * if not choices:
+ * return True
+ * token_class = self.current_token.__class__ # <<<<<<<<<<<<<<
+ * for choice in choices:
+ * if token_class is choice:
+ */
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->current_token, __pyx_n_s_class); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 503, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_token_class = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":504
+ * return True
+ * token_class = self.current_token.__class__
+ * for choice in choices: # <<<<<<<<<<<<<<
+ * if token_class is choice:
+ * return True
+ */
+ __pyx_t_3 = __pyx_v_choices; __Pyx_INCREF(__pyx_t_3); __pyx_t_4 = 0;
+ for (;;) {
+ if (__pyx_t_4 >= PyTuple_GET_SIZE(__pyx_t_3)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_5); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 504, __pyx_L1_error)
+ #else
+ __pyx_t_5 = PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 504, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ #endif
+ __Pyx_XDECREF_SET(__pyx_v_choice, __pyx_t_5);
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":505
+ * token_class = self.current_token.__class__
+ * for choice in choices:
+ * if token_class is choice: # <<<<<<<<<<<<<<
+ * return True
+ * return False
+ */
+ __pyx_t_2 = (__pyx_v_token_class == __pyx_v_choice);
+ __pyx_t_1 = (__pyx_t_2 != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":506
+ * for choice in choices:
+ * if token_class is choice:
+ * return True # <<<<<<<<<<<<<<
+ * return False
+ *
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_True);
+ __pyx_r = Py_True;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":505
+ * token_class = self.current_token.__class__
+ * for choice in choices:
+ * if token_class is choice: # <<<<<<<<<<<<<<
+ * return True
+ * return False
+ */
+ }
+
+ /* "_yaml.pyx":504
+ * return True
+ * token_class = self.current_token.__class__
+ * for choice in choices: # <<<<<<<<<<<<<<
+ * if token_class is choice:
+ * return True
+ */
+ }
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":507
+ * if token_class is choice:
+ * return True
+ * return False # <<<<<<<<<<<<<<
+ *
+ * def raw_parse(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_False);
+ __pyx_r = Py_False;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":496
+ * return self.current_token
+ *
+ * def check_token(self, *choices): # <<<<<<<<<<<<<<
+ * if self.current_token is None:
+ * self.current_token = self._scan()
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_AddTraceback("_yaml.CParser.check_token", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_token_class);
+ __Pyx_XDECREF(__pyx_v_choice);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":509
+ * return False
+ *
+ * def raw_parse(self): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * cdef int done
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_15raw_parse(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_15raw_parse(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("raw_parse (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_14raw_parse(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_14raw_parse(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ yaml_event_t __pyx_v_event;
+ int __pyx_v_done;
+ int __pyx_v_count;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ __Pyx_RefNannySetupContext("raw_parse", 0);
+
+ /* "_yaml.pyx":513
+ * cdef int done
+ * cdef int count
+ * count = 0 # <<<<<<<<<<<<<<
+ * done = 0
+ * while done == 0:
+ */
+ __pyx_v_count = 0;
+
+ /* "_yaml.pyx":514
+ * cdef int count
+ * count = 0
+ * done = 0 # <<<<<<<<<<<<<<
+ * while done == 0:
+ * if yaml_parser_parse(&self.parser, &event) == 0:
+ */
+ __pyx_v_done = 0;
+
+ /* "_yaml.pyx":515
+ * count = 0
+ * done = 0
+ * while done == 0: # <<<<<<<<<<<<<<
+ * if yaml_parser_parse(&self.parser, &event) == 0:
+ * error = self._parser_error()
+ */
+ while (1) {
+ __pyx_t_1 = ((__pyx_v_done == 0) != 0);
+ if (!__pyx_t_1) break;
+
+ /* "_yaml.pyx":516
+ * done = 0
+ * while done == 0:
+ * if yaml_parser_parse(&self.parser, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._parser_error()
+ * raise error
+ */
+ __pyx_t_2 = yaml_parser_parse((&__pyx_v_self->parser), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 516, __pyx_L1_error)
+ __pyx_t_1 = ((__pyx_t_2 == 0) != 0);
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":517
+ * while done == 0:
+ * if yaml_parser_parse(&self.parser, &event) == 0:
+ * error = self._parser_error() # <<<<<<<<<<<<<<
+ * raise error
+ * if event.type == YAML_NO_EVENT:
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parser_error(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 517, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_error = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":518
+ * if yaml_parser_parse(&self.parser, &event) == 0:
+ * error = self._parser_error()
+ * raise error # <<<<<<<<<<<<<<
+ * if event.type == YAML_NO_EVENT:
+ * done = 1
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ __PYX_ERR(0, 518, __pyx_L1_error)
+
+ /* "_yaml.pyx":516
+ * done = 0
+ * while done == 0:
+ * if yaml_parser_parse(&self.parser, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._parser_error()
+ * raise error
+ */
+ }
+
+ /* "_yaml.pyx":519
+ * error = self._parser_error()
+ * raise error
+ * if event.type == YAML_NO_EVENT: # <<<<<<<<<<<<<<
+ * done = 1
+ * else:
+ */
+ __pyx_t_1 = ((__pyx_v_event.type == YAML_NO_EVENT) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":520
+ * raise error
+ * if event.type == YAML_NO_EVENT:
+ * done = 1 # <<<<<<<<<<<<<<
+ * else:
+ * count = count+1
+ */
+ __pyx_v_done = 1;
+
+ /* "_yaml.pyx":519
+ * error = self._parser_error()
+ * raise error
+ * if event.type == YAML_NO_EVENT: # <<<<<<<<<<<<<<
+ * done = 1
+ * else:
+ */
+ goto __pyx_L6;
+ }
+
+ /* "_yaml.pyx":522
+ * done = 1
+ * else:
+ * count = count+1 # <<<<<<<<<<<<<<
+ * yaml_event_delete(&event)
+ * return count
+ */
+ /*else*/ {
+ __pyx_v_count = (__pyx_v_count + 1);
+ }
+ __pyx_L6:;
+
+ /* "_yaml.pyx":523
+ * else:
+ * count = count+1
+ * yaml_event_delete(&event) # <<<<<<<<<<<<<<
+ * return count
+ *
+ */
+ yaml_event_delete((&__pyx_v_event));
+ }
+
+ /* "_yaml.pyx":524
+ * count = count+1
+ * yaml_event_delete(&event)
+ * return count # <<<<<<<<<<<<<<
+ *
+ * cdef object _parse(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_count); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 524, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":509
+ * return False
+ *
+ * def raw_parse(self): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * cdef int done
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser.raw_parse", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":526
+ * return count
+ *
+ * cdef object _parse(self): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * if yaml_parser_parse(&self.parser, &event) == 0:
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__parse(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ yaml_event_t __pyx_v_event;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_v_event_object = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ __Pyx_RefNannySetupContext("_parse", 0);
+
+ /* "_yaml.pyx":528
+ * cdef object _parse(self):
+ * cdef yaml_event_t event
+ * if yaml_parser_parse(&self.parser, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._parser_error()
+ * raise error
+ */
+ __pyx_t_1 = yaml_parser_parse((&__pyx_v_self->parser), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 528, __pyx_L1_error)
+ __pyx_t_2 = ((__pyx_t_1 == 0) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":529
+ * cdef yaml_event_t event
+ * if yaml_parser_parse(&self.parser, &event) == 0:
+ * error = self._parser_error() # <<<<<<<<<<<<<<
+ * raise error
+ * event_object = self._event_to_object(&event)
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parser_error(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 529, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_error = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":530
+ * if yaml_parser_parse(&self.parser, &event) == 0:
+ * error = self._parser_error()
+ * raise error # <<<<<<<<<<<<<<
+ * event_object = self._event_to_object(&event)
+ * yaml_event_delete(&event)
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ __PYX_ERR(0, 530, __pyx_L1_error)
+
+ /* "_yaml.pyx":528
+ * cdef object _parse(self):
+ * cdef yaml_event_t event
+ * if yaml_parser_parse(&self.parser, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._parser_error()
+ * raise error
+ */
+ }
+
+ /* "_yaml.pyx":531
+ * error = self._parser_error()
+ * raise error
+ * event_object = self._event_to_object(&event) # <<<<<<<<<<<<<<
+ * yaml_event_delete(&event)
+ * return event_object
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_event_to_object(__pyx_v_self, (&__pyx_v_event)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 531, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_event_object = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":532
+ * raise error
+ * event_object = self._event_to_object(&event)
+ * yaml_event_delete(&event) # <<<<<<<<<<<<<<
+ * return event_object
+ *
+ */
+ yaml_event_delete((&__pyx_v_event));
+
+ /* "_yaml.pyx":533
+ * event_object = self._event_to_object(&event)
+ * yaml_event_delete(&event)
+ * return event_object # <<<<<<<<<<<<<<
+ *
+ * cdef object _event_to_object(self, yaml_event_t *event):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_event_object);
+ __pyx_r = __pyx_v_event_object;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":526
+ * return count
+ *
+ * cdef object _parse(self): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * if yaml_parser_parse(&self.parser, &event) == 0:
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser._parse", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XDECREF(__pyx_v_event_object);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":535
+ * return event_object
+ *
+ * cdef object _event_to_object(self, yaml_event_t *event): # <<<<<<<<<<<<<<
+ * cdef yaml_tag_directive_t *tag_directive
+ * start_mark = Mark(self.stream_name,
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__event_to_object(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, yaml_event_t *__pyx_v_event) {
+ yaml_tag_directive_t *__pyx_v_tag_directive;
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_start_mark = NULL;
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_end_mark = NULL;
+ PyObject *__pyx_v_encoding = NULL;
+ int __pyx_v_explicit;
+ PyObject *__pyx_v_version = NULL;
+ PyObject *__pyx_v_tags = NULL;
+ PyObject *__pyx_v_handle = NULL;
+ PyObject *__pyx_v_prefix = NULL;
+ PyObject *__pyx_v_anchor = NULL;
+ PyObject *__pyx_v_tag = NULL;
+ PyObject *__pyx_v_value = NULL;
+ int __pyx_v_plain_implicit;
+ int __pyx_v_quoted_implicit;
+ PyObject *__pyx_v_style = NULL;
+ int __pyx_v_implicit;
+ PyObject *__pyx_v_flow_style = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_t_5;
+ int __pyx_t_6;
+ yaml_tag_directive_t *__pyx_t_7;
+ PyObject *__pyx_t_8 = NULL;
+ __Pyx_RefNannySetupContext("_event_to_object", 0);
+
+ /* "_yaml.pyx":538
+ * cdef yaml_tag_directive_t *tag_directive
+ * start_mark = Mark(self.stream_name,
+ * event.start_mark.index, # <<<<<<<<<<<<<<
+ * event.start_mark.line,
+ * event.start_mark.column,
+ */
+ __pyx_t_1 = __Pyx_PyInt_FromSize_t(__pyx_v_event->start_mark.index); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 538, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":539
+ * start_mark = Mark(self.stream_name,
+ * event.start_mark.index,
+ * event.start_mark.line, # <<<<<<<<<<<<<<
+ * event.start_mark.column,
+ * None, None)
+ */
+ __pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_event->start_mark.line); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 539, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":540
+ * event.start_mark.index,
+ * event.start_mark.line,
+ * event.start_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * end_mark = Mark(self.stream_name,
+ */
+ __pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_event->start_mark.column); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 540, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":537
+ * cdef object _event_to_object(self, yaml_event_t *event):
+ * cdef yaml_tag_directive_t *tag_directive
+ * start_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * event.start_mark.index,
+ * event.start_mark.line,
+ */
+ __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 537, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None);
+ __pyx_t_1 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_3 = 0;
+ __pyx_t_3 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_5_yaml_Mark), __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 537, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":543
+ * None, None)
+ * end_mark = Mark(self.stream_name,
+ * event.end_mark.index, # <<<<<<<<<<<<<<
+ * event.end_mark.line,
+ * event.end_mark.column,
+ */
+ __pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_event->end_mark.index); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 543, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":544
+ * end_mark = Mark(self.stream_name,
+ * event.end_mark.index,
+ * event.end_mark.line, # <<<<<<<<<<<<<<
+ * event.end_mark.column,
+ * None, None)
+ */
+ __pyx_t_4 = __Pyx_PyInt_FromSize_t(__pyx_v_event->end_mark.line); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 544, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":545
+ * event.end_mark.index,
+ * event.end_mark.line,
+ * event.end_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * if event.type == YAML_NO_EVENT:
+ */
+ __pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_event->end_mark.column); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 545, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":542
+ * event.start_mark.column,
+ * None, None)
+ * end_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * event.end_mark.index,
+ * event.end_mark.line,
+ */
+ __pyx_t_1 = PyTuple_New(6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 542, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_t_2);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 4, Py_None);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 5, Py_None);
+ __pyx_t_3 = 0;
+ __pyx_t_4 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_5_yaml_Mark), __pyx_t_1, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 542, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_end_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_2);
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":547
+ * event.end_mark.column,
+ * None, None)
+ * if event.type == YAML_NO_EVENT: # <<<<<<<<<<<<<<
+ * return None
+ * elif event.type == YAML_STREAM_START_EVENT:
+ */
+ switch (__pyx_v_event->type) {
+ case YAML_NO_EVENT:
+
+ /* "_yaml.pyx":548
+ * None, None)
+ * if event.type == YAML_NO_EVENT:
+ * return None # <<<<<<<<<<<<<<
+ * elif event.type == YAML_STREAM_START_EVENT:
+ * encoding = None
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":547
+ * event.end_mark.column,
+ * None, None)
+ * if event.type == YAML_NO_EVENT: # <<<<<<<<<<<<<<
+ * return None
+ * elif event.type == YAML_STREAM_START_EVENT:
+ */
+ break;
+ case YAML_STREAM_START_EVENT:
+
+ /* "_yaml.pyx":550
+ * return None
+ * elif event.type == YAML_STREAM_START_EVENT:
+ * encoding = None # <<<<<<<<<<<<<<
+ * if event.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ * if self.unicode_source == 0:
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_encoding = ((PyObject*)Py_None);
+
+ /* "_yaml.pyx":551
+ * elif event.type == YAML_STREAM_START_EVENT:
+ * encoding = None
+ * if event.data.stream_start.encoding == YAML_UTF8_ENCODING: # <<<<<<<<<<<<<<
+ * if self.unicode_source == 0:
+ * encoding = u"utf-8"
+ */
+ switch (__pyx_v_event->data.stream_start.encoding) {
+ case YAML_UTF8_ENCODING:
+
+ /* "_yaml.pyx":552
+ * encoding = None
+ * if event.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ * if self.unicode_source == 0: # <<<<<<<<<<<<<<
+ * encoding = u"utf-8"
+ * elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ */
+ __pyx_t_5 = ((__pyx_v_self->unicode_source == 0) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":553
+ * if event.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ * if self.unicode_source == 0:
+ * encoding = u"utf-8" # <<<<<<<<<<<<<<
+ * elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ * encoding = u"utf-16-le"
+ */
+ __Pyx_INCREF(__pyx_kp_u_utf_8);
+ __Pyx_DECREF_SET(__pyx_v_encoding, __pyx_kp_u_utf_8);
+
+ /* "_yaml.pyx":552
+ * encoding = None
+ * if event.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ * if self.unicode_source == 0: # <<<<<<<<<<<<<<
+ * encoding = u"utf-8"
+ * elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ */
+ }
+
+ /* "_yaml.pyx":551
+ * elif event.type == YAML_STREAM_START_EVENT:
+ * encoding = None
+ * if event.data.stream_start.encoding == YAML_UTF8_ENCODING: # <<<<<<<<<<<<<<
+ * if self.unicode_source == 0:
+ * encoding = u"utf-8"
+ */
+ break;
+ case YAML_UTF16LE_ENCODING:
+
+ /* "_yaml.pyx":555
+ * encoding = u"utf-8"
+ * elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ * encoding = u"utf-16-le" # <<<<<<<<<<<<<<
+ * elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ * encoding = u"utf-16-be"
+ */
+ __Pyx_INCREF(__pyx_kp_u_utf_16_le);
+ __Pyx_DECREF_SET(__pyx_v_encoding, __pyx_kp_u_utf_16_le);
+
+ /* "_yaml.pyx":554
+ * if self.unicode_source == 0:
+ * encoding = u"utf-8"
+ * elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING: # <<<<<<<<<<<<<<
+ * encoding = u"utf-16-le"
+ * elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ */
+ break;
+ case YAML_UTF16BE_ENCODING:
+
+ /* "_yaml.pyx":557
+ * encoding = u"utf-16-le"
+ * elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ * encoding = u"utf-16-be" # <<<<<<<<<<<<<<
+ * return StreamStartEvent(start_mark, end_mark, encoding)
+ * elif event.type == YAML_STREAM_END_EVENT:
+ */
+ __Pyx_INCREF(__pyx_kp_u_utf_16_be);
+ __Pyx_DECREF_SET(__pyx_v_encoding, __pyx_kp_u_utf_16_be);
+
+ /* "_yaml.pyx":556
+ * elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ * encoding = u"utf-16-le"
+ * elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING: # <<<<<<<<<<<<<<
+ * encoding = u"utf-16-be"
+ * return StreamStartEvent(start_mark, end_mark, encoding)
+ */
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":558
+ * elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ * encoding = u"utf-16-be"
+ * return StreamStartEvent(start_mark, end_mark, encoding) # <<<<<<<<<<<<<<
+ * elif event.type == YAML_STREAM_END_EVENT:
+ * return StreamEndEvent(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_StreamStartEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 558, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_v_encoding};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 558, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_4, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_v_encoding};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 558, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_3 = PyTuple_New(3+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 558, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (__pyx_t_4) {
+ __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_INCREF(__pyx_v_encoding);
+ __Pyx_GIVEREF(__pyx_v_encoding);
+ PyTuple_SET_ITEM(__pyx_t_3, 2+__pyx_t_6, __pyx_v_encoding);
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 558, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":549
+ * if event.type == YAML_NO_EVENT:
+ * return None
+ * elif event.type == YAML_STREAM_START_EVENT: # <<<<<<<<<<<<<<
+ * encoding = None
+ * if event.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ */
+ break;
+ case YAML_STREAM_END_EVENT:
+
+ /* "_yaml.pyx":560
+ * return StreamStartEvent(start_mark, end_mark, encoding)
+ * elif event.type == YAML_STREAM_END_EVENT:
+ * return StreamEndEvent(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif event.type == YAML_DOCUMENT_START_EVENT:
+ * explicit = False
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_StreamEndEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 560, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 560, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 560, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_4 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 560, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 560, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":559
+ * encoding = u"utf-16-be"
+ * return StreamStartEvent(start_mark, end_mark, encoding)
+ * elif event.type == YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<<
+ * return StreamEndEvent(start_mark, end_mark)
+ * elif event.type == YAML_DOCUMENT_START_EVENT:
+ */
+ break;
+ case YAML_DOCUMENT_START_EVENT:
+
+ /* "_yaml.pyx":562
+ * return StreamEndEvent(start_mark, end_mark)
+ * elif event.type == YAML_DOCUMENT_START_EVENT:
+ * explicit = False # <<<<<<<<<<<<<<
+ * if event.data.document_start.implicit == 0:
+ * explicit = True
+ */
+ __pyx_v_explicit = 0;
+
+ /* "_yaml.pyx":563
+ * elif event.type == YAML_DOCUMENT_START_EVENT:
+ * explicit = False
+ * if event.data.document_start.implicit == 0: # <<<<<<<<<<<<<<
+ * explicit = True
+ * version = None
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.document_start.implicit == 0) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":564
+ * explicit = False
+ * if event.data.document_start.implicit == 0:
+ * explicit = True # <<<<<<<<<<<<<<
+ * version = None
+ * if event.data.document_start.version_directive != NULL:
+ */
+ __pyx_v_explicit = 1;
+
+ /* "_yaml.pyx":563
+ * elif event.type == YAML_DOCUMENT_START_EVENT:
+ * explicit = False
+ * if event.data.document_start.implicit == 0: # <<<<<<<<<<<<<<
+ * explicit = True
+ * version = None
+ */
+ }
+
+ /* "_yaml.pyx":565
+ * if event.data.document_start.implicit == 0:
+ * explicit = True
+ * version = None # <<<<<<<<<<<<<<
+ * if event.data.document_start.version_directive != NULL:
+ * version = (event.data.document_start.version_directive.major,
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_version = Py_None;
+
+ /* "_yaml.pyx":566
+ * explicit = True
+ * version = None
+ * if event.data.document_start.version_directive != NULL: # <<<<<<<<<<<<<<
+ * version = (event.data.document_start.version_directive.major,
+ * event.data.document_start.version_directive.minor)
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.document_start.version_directive != NULL) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":567
+ * version = None
+ * if event.data.document_start.version_directive != NULL:
+ * version = (event.data.document_start.version_directive.major, # <<<<<<<<<<<<<<
+ * event.data.document_start.version_directive.minor)
+ * tags = None
+ */
+ __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_event->data.document_start.version_directive->major); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 567, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":568
+ * if event.data.document_start.version_directive != NULL:
+ * version = (event.data.document_start.version_directive.major,
+ * event.data.document_start.version_directive.minor) # <<<<<<<<<<<<<<
+ * tags = None
+ * if event.data.document_start.tag_directives.start != NULL:
+ */
+ __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_event->data.document_start.version_directive->minor); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 568, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":567
+ * version = None
+ * if event.data.document_start.version_directive != NULL:
+ * version = (event.data.document_start.version_directive.major, # <<<<<<<<<<<<<<
+ * event.data.document_start.version_directive.minor)
+ * tags = None
+ */
+ __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 567, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1);
+ __pyx_t_2 = 0;
+ __pyx_t_1 = 0;
+ __Pyx_DECREF_SET(__pyx_v_version, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":566
+ * explicit = True
+ * version = None
+ * if event.data.document_start.version_directive != NULL: # <<<<<<<<<<<<<<
+ * version = (event.data.document_start.version_directive.major,
+ * event.data.document_start.version_directive.minor)
+ */
+ }
+
+ /* "_yaml.pyx":569
+ * version = (event.data.document_start.version_directive.major,
+ * event.data.document_start.version_directive.minor)
+ * tags = None # <<<<<<<<<<<<<<
+ * if event.data.document_start.tag_directives.start != NULL:
+ * tags = {}
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_tags = ((PyObject*)Py_None);
+
+ /* "_yaml.pyx":570
+ * event.data.document_start.version_directive.minor)
+ * tags = None
+ * if event.data.document_start.tag_directives.start != NULL: # <<<<<<<<<<<<<<
+ * tags = {}
+ * tag_directive = event.data.document_start.tag_directives.start
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.document_start.tag_directives.start != NULL) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":571
+ * tags = None
+ * if event.data.document_start.tag_directives.start != NULL:
+ * tags = {} # <<<<<<<<<<<<<<
+ * tag_directive = event.data.document_start.tag_directives.start
+ * while tag_directive != event.data.document_start.tag_directives.end:
+ */
+ __pyx_t_4 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 571, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF_SET(__pyx_v_tags, ((PyObject*)__pyx_t_4));
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":572
+ * if event.data.document_start.tag_directives.start != NULL:
+ * tags = {}
+ * tag_directive = event.data.document_start.tag_directives.start # <<<<<<<<<<<<<<
+ * while tag_directive != event.data.document_start.tag_directives.end:
+ * handle = PyUnicode_FromString(tag_directive.handle)
+ */
+ __pyx_t_7 = __pyx_v_event->data.document_start.tag_directives.start;
+ __pyx_v_tag_directive = __pyx_t_7;
+
+ /* "_yaml.pyx":573
+ * tags = {}
+ * tag_directive = event.data.document_start.tag_directives.start
+ * while tag_directive != event.data.document_start.tag_directives.end: # <<<<<<<<<<<<<<
+ * handle = PyUnicode_FromString(tag_directive.handle)
+ * prefix = PyUnicode_FromString(tag_directive.prefix)
+ */
+ while (1) {
+ __pyx_t_5 = ((__pyx_v_tag_directive != __pyx_v_event->data.document_start.tag_directives.end) != 0);
+ if (!__pyx_t_5) break;
+
+ /* "_yaml.pyx":574
+ * tag_directive = event.data.document_start.tag_directives.start
+ * while tag_directive != event.data.document_start.tag_directives.end:
+ * handle = PyUnicode_FromString(tag_directive.handle) # <<<<<<<<<<<<<<
+ * prefix = PyUnicode_FromString(tag_directive.prefix)
+ * tags[handle] = prefix
+ */
+ __pyx_t_4 = PyUnicode_FromString(__pyx_v_tag_directive->handle); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 574, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_XDECREF_SET(__pyx_v_handle, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":575
+ * while tag_directive != event.data.document_start.tag_directives.end:
+ * handle = PyUnicode_FromString(tag_directive.handle)
+ * prefix = PyUnicode_FromString(tag_directive.prefix) # <<<<<<<<<<<<<<
+ * tags[handle] = prefix
+ * tag_directive = tag_directive+1
+ */
+ __pyx_t_4 = PyUnicode_FromString(__pyx_v_tag_directive->prefix); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 575, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_XDECREF_SET(__pyx_v_prefix, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":576
+ * handle = PyUnicode_FromString(tag_directive.handle)
+ * prefix = PyUnicode_FromString(tag_directive.prefix)
+ * tags[handle] = prefix # <<<<<<<<<<<<<<
+ * tag_directive = tag_directive+1
+ * return DocumentStartEvent(start_mark, end_mark,
+ */
+ if (unlikely(PyDict_SetItem(__pyx_v_tags, __pyx_v_handle, __pyx_v_prefix) < 0)) __PYX_ERR(0, 576, __pyx_L1_error)
+
+ /* "_yaml.pyx":577
+ * prefix = PyUnicode_FromString(tag_directive.prefix)
+ * tags[handle] = prefix
+ * tag_directive = tag_directive+1 # <<<<<<<<<<<<<<
+ * return DocumentStartEvent(start_mark, end_mark,
+ * explicit, version, tags)
+ */
+ __pyx_v_tag_directive = (__pyx_v_tag_directive + 1);
+ }
+
+ /* "_yaml.pyx":570
+ * event.data.document_start.version_directive.minor)
+ * tags = None
+ * if event.data.document_start.tag_directives.start != NULL: # <<<<<<<<<<<<<<
+ * tags = {}
+ * tag_directive = event.data.document_start.tag_directives.start
+ */
+ }
+
+ /* "_yaml.pyx":578
+ * tags[handle] = prefix
+ * tag_directive = tag_directive+1
+ * return DocumentStartEvent(start_mark, end_mark, # <<<<<<<<<<<<<<
+ * explicit, version, tags)
+ * elif event.type == YAML_DOCUMENT_END_EVENT:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_DocumentStartEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 578, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":579
+ * tag_directive = tag_directive+1
+ * return DocumentStartEvent(start_mark, end_mark,
+ * explicit, version, tags) # <<<<<<<<<<<<<<
+ * elif event.type == YAML_DOCUMENT_END_EVENT:
+ * explicit = False
+ */
+ __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_explicit); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 579, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_3 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[6] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_t_2, __pyx_v_version, __pyx_v_tags};
+ __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 5+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 578, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[6] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_t_2, __pyx_v_version, __pyx_v_tags};
+ __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 5+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 578, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_8 = PyTuple_New(5+__pyx_t_6); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 578, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_8, 2+__pyx_t_6, __pyx_t_2);
+ __Pyx_INCREF(__pyx_v_version);
+ __Pyx_GIVEREF(__pyx_v_version);
+ PyTuple_SET_ITEM(__pyx_t_8, 3+__pyx_t_6, __pyx_v_version);
+ __Pyx_INCREF(__pyx_v_tags);
+ __Pyx_GIVEREF(__pyx_v_tags);
+ PyTuple_SET_ITEM(__pyx_t_8, 4+__pyx_t_6, __pyx_v_tags);
+ __pyx_t_2 = 0;
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_8, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 578, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":561
+ * elif event.type == YAML_STREAM_END_EVENT:
+ * return StreamEndEvent(start_mark, end_mark)
+ * elif event.type == YAML_DOCUMENT_START_EVENT: # <<<<<<<<<<<<<<
+ * explicit = False
+ * if event.data.document_start.implicit == 0:
+ */
+ break;
+ case YAML_DOCUMENT_END_EVENT:
+
+ /* "_yaml.pyx":581
+ * explicit, version, tags)
+ * elif event.type == YAML_DOCUMENT_END_EVENT:
+ * explicit = False # <<<<<<<<<<<<<<
+ * if event.data.document_end.implicit == 0:
+ * explicit = True
+ */
+ __pyx_v_explicit = 0;
+
+ /* "_yaml.pyx":582
+ * elif event.type == YAML_DOCUMENT_END_EVENT:
+ * explicit = False
+ * if event.data.document_end.implicit == 0: # <<<<<<<<<<<<<<
+ * explicit = True
+ * return DocumentEndEvent(start_mark, end_mark, explicit)
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.document_end.implicit == 0) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":583
+ * explicit = False
+ * if event.data.document_end.implicit == 0:
+ * explicit = True # <<<<<<<<<<<<<<
+ * return DocumentEndEvent(start_mark, end_mark, explicit)
+ * elif event.type == YAML_ALIAS_EVENT:
+ */
+ __pyx_v_explicit = 1;
+
+ /* "_yaml.pyx":582
+ * elif event.type == YAML_DOCUMENT_END_EVENT:
+ * explicit = False
+ * if event.data.document_end.implicit == 0: # <<<<<<<<<<<<<<
+ * explicit = True
+ * return DocumentEndEvent(start_mark, end_mark, explicit)
+ */
+ }
+
+ /* "_yaml.pyx":584
+ * if event.data.document_end.implicit == 0:
+ * explicit = True
+ * return DocumentEndEvent(start_mark, end_mark, explicit) # <<<<<<<<<<<<<<
+ * elif event.type == YAML_ALIAS_EVENT:
+ * anchor = PyUnicode_FromString(event.data.alias.anchor)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_DocumentEndEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 584, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_8 = __Pyx_PyBool_FromLong(__pyx_v_explicit); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 584, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __pyx_t_2 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_2)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_2);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_2, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_t_8};
+ __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 584, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_2, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_t_8};
+ __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 584, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_3 = PyTuple_New(3+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 584, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (__pyx_t_2) {
+ __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_2); __pyx_t_2 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(__pyx_t_8);
+ PyTuple_SET_ITEM(__pyx_t_3, 2+__pyx_t_6, __pyx_t_8);
+ __pyx_t_8 = 0;
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 584, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":580
+ * return DocumentStartEvent(start_mark, end_mark,
+ * explicit, version, tags)
+ * elif event.type == YAML_DOCUMENT_END_EVENT: # <<<<<<<<<<<<<<
+ * explicit = False
+ * if event.data.document_end.implicit == 0:
+ */
+ break;
+ case YAML_ALIAS_EVENT:
+
+ /* "_yaml.pyx":586
+ * return DocumentEndEvent(start_mark, end_mark, explicit)
+ * elif event.type == YAML_ALIAS_EVENT:
+ * anchor = PyUnicode_FromString(event.data.alias.anchor) # <<<<<<<<<<<<<<
+ * return AliasEvent(anchor, start_mark, end_mark)
+ * elif event.type == YAML_SCALAR_EVENT:
+ */
+ __pyx_t_4 = PyUnicode_FromString(__pyx_v_event->data.alias.anchor); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 586, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_v_anchor = __pyx_t_4;
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":587
+ * elif event.type == YAML_ALIAS_EVENT:
+ * anchor = PyUnicode_FromString(event.data.alias.anchor)
+ * return AliasEvent(anchor, start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif event.type == YAML_SCALAR_EVENT:
+ * anchor = None
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_AliasEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 587, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_v_anchor, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 587, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_v_anchor, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 587, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ } else
+ #endif
+ {
+ __pyx_t_8 = PyTuple_New(3+__pyx_t_6); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 587, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(__pyx_v_anchor);
+ __Pyx_GIVEREF(__pyx_v_anchor);
+ PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_6, __pyx_v_anchor);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_8, 2+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_8, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 587, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":585
+ * explicit = True
+ * return DocumentEndEvent(start_mark, end_mark, explicit)
+ * elif event.type == YAML_ALIAS_EVENT: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(event.data.alias.anchor)
+ * return AliasEvent(anchor, start_mark, end_mark)
+ */
+ break;
+ case YAML_SCALAR_EVENT:
+
+ /* "_yaml.pyx":589
+ * return AliasEvent(anchor, start_mark, end_mark)
+ * elif event.type == YAML_SCALAR_EVENT:
+ * anchor = None # <<<<<<<<<<<<<<
+ * if event.data.scalar.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.scalar.anchor)
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_anchor = Py_None;
+
+ /* "_yaml.pyx":590
+ * elif event.type == YAML_SCALAR_EVENT:
+ * anchor = None
+ * if event.data.scalar.anchor != NULL: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(event.data.scalar.anchor)
+ * tag = None
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.scalar.anchor != NULL) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":591
+ * anchor = None
+ * if event.data.scalar.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.scalar.anchor) # <<<<<<<<<<<<<<
+ * tag = None
+ * if event.data.scalar.tag != NULL:
+ */
+ __pyx_t_4 = PyUnicode_FromString(__pyx_v_event->data.scalar.anchor); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 591, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF_SET(__pyx_v_anchor, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":590
+ * elif event.type == YAML_SCALAR_EVENT:
+ * anchor = None
+ * if event.data.scalar.anchor != NULL: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(event.data.scalar.anchor)
+ * tag = None
+ */
+ }
+
+ /* "_yaml.pyx":592
+ * if event.data.scalar.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.scalar.anchor)
+ * tag = None # <<<<<<<<<<<<<<
+ * if event.data.scalar.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.scalar.tag)
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_tag = Py_None;
+
+ /* "_yaml.pyx":593
+ * anchor = PyUnicode_FromString(event.data.scalar.anchor)
+ * tag = None
+ * if event.data.scalar.tag != NULL: # <<<<<<<<<<<<<<
+ * tag = PyUnicode_FromString(event.data.scalar.tag)
+ * value = PyUnicode_DecodeUTF8(event.data.scalar.value,
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.scalar.tag != NULL) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":594
+ * tag = None
+ * if event.data.scalar.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.scalar.tag) # <<<<<<<<<<<<<<
+ * value = PyUnicode_DecodeUTF8(event.data.scalar.value,
+ * event.data.scalar.length, 'strict')
+ */
+ __pyx_t_4 = PyUnicode_FromString(__pyx_v_event->data.scalar.tag); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 594, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF_SET(__pyx_v_tag, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":593
+ * anchor = PyUnicode_FromString(event.data.scalar.anchor)
+ * tag = None
+ * if event.data.scalar.tag != NULL: # <<<<<<<<<<<<<<
+ * tag = PyUnicode_FromString(event.data.scalar.tag)
+ * value = PyUnicode_DecodeUTF8(event.data.scalar.value,
+ */
+ }
+
+ /* "_yaml.pyx":595
+ * if event.data.scalar.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.scalar.tag)
+ * value = PyUnicode_DecodeUTF8(event.data.scalar.value, # <<<<<<<<<<<<<<
+ * event.data.scalar.length, 'strict')
+ * plain_implicit = False
+ */
+ __pyx_t_4 = PyUnicode_DecodeUTF8(__pyx_v_event->data.scalar.value, __pyx_v_event->data.scalar.length, ((char *)"strict")); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 595, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_v_value = __pyx_t_4;
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":597
+ * value = PyUnicode_DecodeUTF8(event.data.scalar.value,
+ * event.data.scalar.length, 'strict')
+ * plain_implicit = False # <<<<<<<<<<<<<<
+ * if event.data.scalar.plain_implicit == 1:
+ * plain_implicit = True
+ */
+ __pyx_v_plain_implicit = 0;
+
+ /* "_yaml.pyx":598
+ * event.data.scalar.length, 'strict')
+ * plain_implicit = False
+ * if event.data.scalar.plain_implicit == 1: # <<<<<<<<<<<<<<
+ * plain_implicit = True
+ * quoted_implicit = False
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.scalar.plain_implicit == 1) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":599
+ * plain_implicit = False
+ * if event.data.scalar.plain_implicit == 1:
+ * plain_implicit = True # <<<<<<<<<<<<<<
+ * quoted_implicit = False
+ * if event.data.scalar.quoted_implicit == 1:
+ */
+ __pyx_v_plain_implicit = 1;
+
+ /* "_yaml.pyx":598
+ * event.data.scalar.length, 'strict')
+ * plain_implicit = False
+ * if event.data.scalar.plain_implicit == 1: # <<<<<<<<<<<<<<
+ * plain_implicit = True
+ * quoted_implicit = False
+ */
+ }
+
+ /* "_yaml.pyx":600
+ * if event.data.scalar.plain_implicit == 1:
+ * plain_implicit = True
+ * quoted_implicit = False # <<<<<<<<<<<<<<
+ * if event.data.scalar.quoted_implicit == 1:
+ * quoted_implicit = True
+ */
+ __pyx_v_quoted_implicit = 0;
+
+ /* "_yaml.pyx":601
+ * plain_implicit = True
+ * quoted_implicit = False
+ * if event.data.scalar.quoted_implicit == 1: # <<<<<<<<<<<<<<
+ * quoted_implicit = True
+ * style = None
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.scalar.quoted_implicit == 1) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":602
+ * quoted_implicit = False
+ * if event.data.scalar.quoted_implicit == 1:
+ * quoted_implicit = True # <<<<<<<<<<<<<<
+ * style = None
+ * if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ */
+ __pyx_v_quoted_implicit = 1;
+
+ /* "_yaml.pyx":601
+ * plain_implicit = True
+ * quoted_implicit = False
+ * if event.data.scalar.quoted_implicit == 1: # <<<<<<<<<<<<<<
+ * quoted_implicit = True
+ * style = None
+ */
+ }
+
+ /* "_yaml.pyx":603
+ * if event.data.scalar.quoted_implicit == 1:
+ * quoted_implicit = True
+ * style = None # <<<<<<<<<<<<<<
+ * if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * style = u''
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_style = ((PyObject*)Py_None);
+
+ /* "_yaml.pyx":604
+ * quoted_implicit = True
+ * style = None
+ * if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u''
+ * elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ */
+ switch (__pyx_v_event->data.scalar.style) {
+ case YAML_PLAIN_SCALAR_STYLE:
+
+ /* "_yaml.pyx":605
+ * style = None
+ * if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * style = u'' # <<<<<<<<<<<<<<
+ * elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\''
+ */
+ __Pyx_INCREF(__pyx_kp_u__6);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__6);
+
+ /* "_yaml.pyx":604
+ * quoted_implicit = True
+ * style = None
+ * if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u''
+ * elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ */
+ break;
+ case YAML_SINGLE_QUOTED_SCALAR_STYLE:
+
+ /* "_yaml.pyx":607
+ * style = u''
+ * elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\'' # <<<<<<<<<<<<<<
+ * elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"'
+ */
+ __Pyx_INCREF(__pyx_kp_u__7);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__7);
+
+ /* "_yaml.pyx":606
+ * if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * style = u''
+ * elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'\''
+ * elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ */
+ break;
+ case YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+
+ /* "_yaml.pyx":609
+ * style = u'\''
+ * elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"' # <<<<<<<<<<<<<<
+ * elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|'
+ */
+ __Pyx_INCREF(__pyx_kp_u__8);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__8);
+
+ /* "_yaml.pyx":608
+ * elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\''
+ * elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'"'
+ * elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ */
+ break;
+ case YAML_LITERAL_SCALAR_STYLE:
+
+ /* "_yaml.pyx":611
+ * style = u'"'
+ * elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|' # <<<<<<<<<<<<<<
+ * elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>'
+ */
+ __Pyx_INCREF(__pyx_kp_u__9);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__9);
+
+ /* "_yaml.pyx":610
+ * elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"'
+ * elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'|'
+ * elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ */
+ break;
+ case YAML_FOLDED_SCALAR_STYLE:
+
+ /* "_yaml.pyx":613
+ * style = u'|'
+ * elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>' # <<<<<<<<<<<<<<
+ * return ScalarEvent(anchor, tag,
+ * (plain_implicit, quoted_implicit),
+ */
+ __Pyx_INCREF(__pyx_kp_u__10);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__10);
+
+ /* "_yaml.pyx":612
+ * elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|'
+ * elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'>'
+ * return ScalarEvent(anchor, tag,
+ */
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":614
+ * elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>'
+ * return ScalarEvent(anchor, tag, # <<<<<<<<<<<<<<
+ * (plain_implicit, quoted_implicit),
+ * value, start_mark, end_mark, style)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_ScalarEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 614, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":615
+ * style = u'>'
+ * return ScalarEvent(anchor, tag,
+ * (plain_implicit, quoted_implicit), # <<<<<<<<<<<<<<
+ * value, start_mark, end_mark, style)
+ * elif event.type == YAML_SEQUENCE_START_EVENT:
+ */
+ __pyx_t_8 = __Pyx_PyBool_FromLong(__pyx_v_plain_implicit); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 615, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_v_quoted_implicit); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 615, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 615, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_8);
+ PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_8);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_3);
+ __pyx_t_8 = 0;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":616
+ * return ScalarEvent(anchor, tag,
+ * (plain_implicit, quoted_implicit),
+ * value, start_mark, end_mark, style) # <<<<<<<<<<<<<<
+ * elif event.type == YAML_SEQUENCE_START_EVENT:
+ * anchor = None
+ */
+ __pyx_t_3 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[8] = {__pyx_t_3, __pyx_v_anchor, __pyx_v_tag, __pyx_t_2, __pyx_v_value, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_v_style};
+ __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 7+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 614, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[8] = {__pyx_t_3, __pyx_v_anchor, __pyx_v_tag, __pyx_t_2, __pyx_v_value, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_v_style};
+ __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 7+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 614, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_8 = PyTuple_New(7+__pyx_t_6); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 614, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(__pyx_v_anchor);
+ __Pyx_GIVEREF(__pyx_v_anchor);
+ PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_6, __pyx_v_anchor);
+ __Pyx_INCREF(__pyx_v_tag);
+ __Pyx_GIVEREF(__pyx_v_tag);
+ PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_6, __pyx_v_tag);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_8, 2+__pyx_t_6, __pyx_t_2);
+ __Pyx_INCREF(__pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_8, 3+__pyx_t_6, __pyx_v_value);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_8, 4+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_8, 5+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_INCREF(__pyx_v_style);
+ __Pyx_GIVEREF(__pyx_v_style);
+ PyTuple_SET_ITEM(__pyx_t_8, 6+__pyx_t_6, __pyx_v_style);
+ __pyx_t_2 = 0;
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_8, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 614, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":588
+ * anchor = PyUnicode_FromString(event.data.alias.anchor)
+ * return AliasEvent(anchor, start_mark, end_mark)
+ * elif event.type == YAML_SCALAR_EVENT: # <<<<<<<<<<<<<<
+ * anchor = None
+ * if event.data.scalar.anchor != NULL:
+ */
+ break;
+ case YAML_SEQUENCE_START_EVENT:
+
+ /* "_yaml.pyx":618
+ * value, start_mark, end_mark, style)
+ * elif event.type == YAML_SEQUENCE_START_EVENT:
+ * anchor = None # <<<<<<<<<<<<<<
+ * if event.data.sequence_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.sequence_start.anchor)
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_anchor = Py_None;
+
+ /* "_yaml.pyx":619
+ * elif event.type == YAML_SEQUENCE_START_EVENT:
+ * anchor = None
+ * if event.data.sequence_start.anchor != NULL: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(event.data.sequence_start.anchor)
+ * tag = None
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.sequence_start.anchor != NULL) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":620
+ * anchor = None
+ * if event.data.sequence_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.sequence_start.anchor) # <<<<<<<<<<<<<<
+ * tag = None
+ * if event.data.sequence_start.tag != NULL:
+ */
+ __pyx_t_4 = PyUnicode_FromString(__pyx_v_event->data.sequence_start.anchor); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 620, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF_SET(__pyx_v_anchor, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":619
+ * elif event.type == YAML_SEQUENCE_START_EVENT:
+ * anchor = None
+ * if event.data.sequence_start.anchor != NULL: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(event.data.sequence_start.anchor)
+ * tag = None
+ */
+ }
+
+ /* "_yaml.pyx":621
+ * if event.data.sequence_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.sequence_start.anchor)
+ * tag = None # <<<<<<<<<<<<<<
+ * if event.data.sequence_start.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.sequence_start.tag)
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_tag = Py_None;
+
+ /* "_yaml.pyx":622
+ * anchor = PyUnicode_FromString(event.data.sequence_start.anchor)
+ * tag = None
+ * if event.data.sequence_start.tag != NULL: # <<<<<<<<<<<<<<
+ * tag = PyUnicode_FromString(event.data.sequence_start.tag)
+ * implicit = False
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.sequence_start.tag != NULL) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":623
+ * tag = None
+ * if event.data.sequence_start.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.sequence_start.tag) # <<<<<<<<<<<<<<
+ * implicit = False
+ * if event.data.sequence_start.implicit == 1:
+ */
+ __pyx_t_4 = PyUnicode_FromString(__pyx_v_event->data.sequence_start.tag); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 623, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF_SET(__pyx_v_tag, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":622
+ * anchor = PyUnicode_FromString(event.data.sequence_start.anchor)
+ * tag = None
+ * if event.data.sequence_start.tag != NULL: # <<<<<<<<<<<<<<
+ * tag = PyUnicode_FromString(event.data.sequence_start.tag)
+ * implicit = False
+ */
+ }
+
+ /* "_yaml.pyx":624
+ * if event.data.sequence_start.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.sequence_start.tag)
+ * implicit = False # <<<<<<<<<<<<<<
+ * if event.data.sequence_start.implicit == 1:
+ * implicit = True
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":625
+ * tag = PyUnicode_FromString(event.data.sequence_start.tag)
+ * implicit = False
+ * if event.data.sequence_start.implicit == 1: # <<<<<<<<<<<<<<
+ * implicit = True
+ * flow_style = None
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.sequence_start.implicit == 1) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":626
+ * implicit = False
+ * if event.data.sequence_start.implicit == 1:
+ * implicit = True # <<<<<<<<<<<<<<
+ * flow_style = None
+ * if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ */
+ __pyx_v_implicit = 1;
+
+ /* "_yaml.pyx":625
+ * tag = PyUnicode_FromString(event.data.sequence_start.tag)
+ * implicit = False
+ * if event.data.sequence_start.implicit == 1: # <<<<<<<<<<<<<<
+ * implicit = True
+ * flow_style = None
+ */
+ }
+
+ /* "_yaml.pyx":627
+ * if event.data.sequence_start.implicit == 1:
+ * implicit = True
+ * flow_style = None # <<<<<<<<<<<<<<
+ * if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ * flow_style = True
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_flow_style = Py_None;
+
+ /* "_yaml.pyx":628
+ * implicit = True
+ * flow_style = None
+ * if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = True
+ * elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ */
+ switch (__pyx_v_event->data.sequence_start.style) {
+ case YAML_FLOW_SEQUENCE_STYLE:
+
+ /* "_yaml.pyx":629
+ * flow_style = None
+ * if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ * flow_style = True # <<<<<<<<<<<<<<
+ * elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ * flow_style = False
+ */
+ __Pyx_INCREF(Py_True);
+ __Pyx_DECREF_SET(__pyx_v_flow_style, Py_True);
+
+ /* "_yaml.pyx":628
+ * implicit = True
+ * flow_style = None
+ * if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = True
+ * elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ */
+ break;
+ case YAML_BLOCK_SEQUENCE_STYLE:
+
+ /* "_yaml.pyx":631
+ * flow_style = True
+ * elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ * flow_style = False # <<<<<<<<<<<<<<
+ * return SequenceStartEvent(anchor, tag, implicit,
+ * start_mark, end_mark, flow_style)
+ */
+ __Pyx_INCREF(Py_False);
+ __Pyx_DECREF_SET(__pyx_v_flow_style, Py_False);
+
+ /* "_yaml.pyx":630
+ * if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ * flow_style = True
+ * elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = False
+ * return SequenceStartEvent(anchor, tag, implicit,
+ */
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":632
+ * elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ * flow_style = False
+ * return SequenceStartEvent(anchor, tag, implicit, # <<<<<<<<<<<<<<
+ * start_mark, end_mark, flow_style)
+ * elif event.type == YAML_MAPPING_START_EVENT:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_SequenceStartEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 632, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_8 = __Pyx_PyBool_FromLong(__pyx_v_implicit); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 632, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+
+ /* "_yaml.pyx":633
+ * flow_style = False
+ * return SequenceStartEvent(anchor, tag, implicit,
+ * start_mark, end_mark, flow_style) # <<<<<<<<<<<<<<
+ * elif event.type == YAML_MAPPING_START_EVENT:
+ * anchor = None
+ */
+ __pyx_t_2 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_2)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_2);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[7] = {__pyx_t_2, __pyx_v_anchor, __pyx_v_tag, __pyx_t_8, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_v_flow_style};
+ __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 6+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 632, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[7] = {__pyx_t_2, __pyx_v_anchor, __pyx_v_tag, __pyx_t_8, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_v_flow_style};
+ __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 6+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 632, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_3 = PyTuple_New(6+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 632, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (__pyx_t_2) {
+ __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_2); __pyx_t_2 = NULL;
+ }
+ __Pyx_INCREF(__pyx_v_anchor);
+ __Pyx_GIVEREF(__pyx_v_anchor);
+ PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_6, __pyx_v_anchor);
+ __Pyx_INCREF(__pyx_v_tag);
+ __Pyx_GIVEREF(__pyx_v_tag);
+ PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_6, __pyx_v_tag);
+ __Pyx_GIVEREF(__pyx_t_8);
+ PyTuple_SET_ITEM(__pyx_t_3, 2+__pyx_t_6, __pyx_t_8);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 3+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 4+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_INCREF(__pyx_v_flow_style);
+ __Pyx_GIVEREF(__pyx_v_flow_style);
+ PyTuple_SET_ITEM(__pyx_t_3, 5+__pyx_t_6, __pyx_v_flow_style);
+ __pyx_t_8 = 0;
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 632, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":617
+ * (plain_implicit, quoted_implicit),
+ * value, start_mark, end_mark, style)
+ * elif event.type == YAML_SEQUENCE_START_EVENT: # <<<<<<<<<<<<<<
+ * anchor = None
+ * if event.data.sequence_start.anchor != NULL:
+ */
+ break;
+ case YAML_MAPPING_START_EVENT:
+
+ /* "_yaml.pyx":635
+ * start_mark, end_mark, flow_style)
+ * elif event.type == YAML_MAPPING_START_EVENT:
+ * anchor = None # <<<<<<<<<<<<<<
+ * if event.data.mapping_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.mapping_start.anchor)
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_anchor = Py_None;
+
+ /* "_yaml.pyx":636
+ * elif event.type == YAML_MAPPING_START_EVENT:
+ * anchor = None
+ * if event.data.mapping_start.anchor != NULL: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(event.data.mapping_start.anchor)
+ * tag = None
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.mapping_start.anchor != NULL) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":637
+ * anchor = None
+ * if event.data.mapping_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.mapping_start.anchor) # <<<<<<<<<<<<<<
+ * tag = None
+ * if event.data.mapping_start.tag != NULL:
+ */
+ __pyx_t_4 = PyUnicode_FromString(__pyx_v_event->data.mapping_start.anchor); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 637, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF_SET(__pyx_v_anchor, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":636
+ * elif event.type == YAML_MAPPING_START_EVENT:
+ * anchor = None
+ * if event.data.mapping_start.anchor != NULL: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(event.data.mapping_start.anchor)
+ * tag = None
+ */
+ }
+
+ /* "_yaml.pyx":638
+ * if event.data.mapping_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.mapping_start.anchor)
+ * tag = None # <<<<<<<<<<<<<<
+ * if event.data.mapping_start.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.mapping_start.tag)
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_tag = Py_None;
+
+ /* "_yaml.pyx":639
+ * anchor = PyUnicode_FromString(event.data.mapping_start.anchor)
+ * tag = None
+ * if event.data.mapping_start.tag != NULL: # <<<<<<<<<<<<<<
+ * tag = PyUnicode_FromString(event.data.mapping_start.tag)
+ * implicit = False
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.mapping_start.tag != NULL) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":640
+ * tag = None
+ * if event.data.mapping_start.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.mapping_start.tag) # <<<<<<<<<<<<<<
+ * implicit = False
+ * if event.data.mapping_start.implicit == 1:
+ */
+ __pyx_t_4 = PyUnicode_FromString(__pyx_v_event->data.mapping_start.tag); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 640, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF_SET(__pyx_v_tag, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":639
+ * anchor = PyUnicode_FromString(event.data.mapping_start.anchor)
+ * tag = None
+ * if event.data.mapping_start.tag != NULL: # <<<<<<<<<<<<<<
+ * tag = PyUnicode_FromString(event.data.mapping_start.tag)
+ * implicit = False
+ */
+ }
+
+ /* "_yaml.pyx":641
+ * if event.data.mapping_start.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.mapping_start.tag)
+ * implicit = False # <<<<<<<<<<<<<<
+ * if event.data.mapping_start.implicit == 1:
+ * implicit = True
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":642
+ * tag = PyUnicode_FromString(event.data.mapping_start.tag)
+ * implicit = False
+ * if event.data.mapping_start.implicit == 1: # <<<<<<<<<<<<<<
+ * implicit = True
+ * flow_style = None
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.mapping_start.implicit == 1) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":643
+ * implicit = False
+ * if event.data.mapping_start.implicit == 1:
+ * implicit = True # <<<<<<<<<<<<<<
+ * flow_style = None
+ * if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ */
+ __pyx_v_implicit = 1;
+
+ /* "_yaml.pyx":642
+ * tag = PyUnicode_FromString(event.data.mapping_start.tag)
+ * implicit = False
+ * if event.data.mapping_start.implicit == 1: # <<<<<<<<<<<<<<
+ * implicit = True
+ * flow_style = None
+ */
+ }
+
+ /* "_yaml.pyx":644
+ * if event.data.mapping_start.implicit == 1:
+ * implicit = True
+ * flow_style = None # <<<<<<<<<<<<<<
+ * if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ * flow_style = True
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_flow_style = Py_None;
+
+ /* "_yaml.pyx":645
+ * implicit = True
+ * flow_style = None
+ * if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = True
+ * elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ */
+ switch (__pyx_v_event->data.mapping_start.style) {
+ case YAML_FLOW_MAPPING_STYLE:
+
+ /* "_yaml.pyx":646
+ * flow_style = None
+ * if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ * flow_style = True # <<<<<<<<<<<<<<
+ * elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ * flow_style = False
+ */
+ __Pyx_INCREF(Py_True);
+ __Pyx_DECREF_SET(__pyx_v_flow_style, Py_True);
+
+ /* "_yaml.pyx":645
+ * implicit = True
+ * flow_style = None
+ * if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = True
+ * elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ */
+ break;
+ case YAML_BLOCK_MAPPING_STYLE:
+
+ /* "_yaml.pyx":648
+ * flow_style = True
+ * elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ * flow_style = False # <<<<<<<<<<<<<<
+ * return MappingStartEvent(anchor, tag, implicit,
+ * start_mark, end_mark, flow_style)
+ */
+ __Pyx_INCREF(Py_False);
+ __Pyx_DECREF_SET(__pyx_v_flow_style, Py_False);
+
+ /* "_yaml.pyx":647
+ * if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ * flow_style = True
+ * elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = False
+ * return MappingStartEvent(anchor, tag, implicit,
+ */
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":649
+ * elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ * flow_style = False
+ * return MappingStartEvent(anchor, tag, implicit, # <<<<<<<<<<<<<<
+ * start_mark, end_mark, flow_style)
+ * elif event.type == YAML_SEQUENCE_END_EVENT:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_MappingStartEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 649, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_v_implicit); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 649, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":650
+ * flow_style = False
+ * return MappingStartEvent(anchor, tag, implicit,
+ * start_mark, end_mark, flow_style) # <<<<<<<<<<<<<<
+ * elif event.type == YAML_SEQUENCE_END_EVENT:
+ * return SequenceEndEvent(start_mark, end_mark)
+ */
+ __pyx_t_8 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_8)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_8);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[7] = {__pyx_t_8, __pyx_v_anchor, __pyx_v_tag, __pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_v_flow_style};
+ __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 6+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 649, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[7] = {__pyx_t_8, __pyx_v_anchor, __pyx_v_tag, __pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_v_flow_style};
+ __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 6+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 649, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_2 = PyTuple_New(6+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 649, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ if (__pyx_t_8) {
+ __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_8); __pyx_t_8 = NULL;
+ }
+ __Pyx_INCREF(__pyx_v_anchor);
+ __Pyx_GIVEREF(__pyx_v_anchor);
+ PyTuple_SET_ITEM(__pyx_t_2, 0+__pyx_t_6, __pyx_v_anchor);
+ __Pyx_INCREF(__pyx_v_tag);
+ __Pyx_GIVEREF(__pyx_v_tag);
+ PyTuple_SET_ITEM(__pyx_t_2, 1+__pyx_t_6, __pyx_v_tag);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_2, 2+__pyx_t_6, __pyx_t_3);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 3+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 4+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_INCREF(__pyx_v_flow_style);
+ __Pyx_GIVEREF(__pyx_v_flow_style);
+ PyTuple_SET_ITEM(__pyx_t_2, 5+__pyx_t_6, __pyx_v_flow_style);
+ __pyx_t_3 = 0;
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_2, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 649, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":634
+ * return SequenceStartEvent(anchor, tag, implicit,
+ * start_mark, end_mark, flow_style)
+ * elif event.type == YAML_MAPPING_START_EVENT: # <<<<<<<<<<<<<<
+ * anchor = None
+ * if event.data.mapping_start.anchor != NULL:
+ */
+ break;
+ case YAML_SEQUENCE_END_EVENT:
+
+ /* "_yaml.pyx":652
+ * start_mark, end_mark, flow_style)
+ * elif event.type == YAML_SEQUENCE_END_EVENT:
+ * return SequenceEndEvent(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif event.type == YAML_MAPPING_END_EVENT:
+ * return MappingEndEvent(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_SequenceEndEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 652, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_2)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_2);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_2, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 652, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_2, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 652, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ } else
+ #endif
+ {
+ __pyx_t_3 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 652, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (__pyx_t_2) {
+ __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_2); __pyx_t_2 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 652, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":651
+ * return MappingStartEvent(anchor, tag, implicit,
+ * start_mark, end_mark, flow_style)
+ * elif event.type == YAML_SEQUENCE_END_EVENT: # <<<<<<<<<<<<<<
+ * return SequenceEndEvent(start_mark, end_mark)
+ * elif event.type == YAML_MAPPING_END_EVENT:
+ */
+ break;
+ case YAML_MAPPING_END_EVENT:
+
+ /* "_yaml.pyx":654
+ * return SequenceEndEvent(start_mark, end_mark)
+ * elif event.type == YAML_MAPPING_END_EVENT:
+ * return MappingEndEvent(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_MappingEndEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 654, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = NULL;
+ __pyx_t_6 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_6 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 654, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_3, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark)};
+ __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 654, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_4);
+ } else
+ #endif
+ {
+ __pyx_t_2 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 654, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 0+__pyx_t_6, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 1+__pyx_t_6, ((PyObject *)__pyx_v_end_mark));
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_2, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 654, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":653
+ * elif event.type == YAML_SEQUENCE_END_EVENT:
+ * return SequenceEndEvent(start_mark, end_mark)
+ * elif event.type == YAML_MAPPING_END_EVENT: # <<<<<<<<<<<<<<
+ * return MappingEndEvent(start_mark, end_mark)
+ * else:
+ */
+ break;
+ default:
+
+ /* "_yaml.pyx":656
+ * return MappingEndEvent(start_mark, end_mark)
+ * else:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("unknown event type")
+ * else:
+ */
+ __pyx_t_5 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_5)) {
+
+ /* "_yaml.pyx":657
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("unknown event type") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"unknown event type")
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__13, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 657, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 657, __pyx_L1_error)
+
+ /* "_yaml.pyx":656
+ * return MappingEndEvent(start_mark, end_mark)
+ * else:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("unknown event type")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":659
+ * raise ValueError("unknown event type")
+ * else:
+ * raise ValueError(u"unknown event type") # <<<<<<<<<<<<<<
+ *
+ * def get_event(self):
+ */
+ /*else*/ {
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__14, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 659, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 659, __pyx_L1_error)
+ }
+ break;
+ }
+
+ /* "_yaml.pyx":535
+ * return event_object
+ *
+ * cdef object _event_to_object(self, yaml_event_t *event): # <<<<<<<<<<<<<<
+ * cdef yaml_tag_directive_t *tag_directive
+ * start_mark = Mark(self.stream_name,
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_8);
+ __Pyx_AddTraceback("_yaml.CParser._event_to_object", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF((PyObject *)__pyx_v_start_mark);
+ __Pyx_XDECREF((PyObject *)__pyx_v_end_mark);
+ __Pyx_XDECREF(__pyx_v_encoding);
+ __Pyx_XDECREF(__pyx_v_version);
+ __Pyx_XDECREF(__pyx_v_tags);
+ __Pyx_XDECREF(__pyx_v_handle);
+ __Pyx_XDECREF(__pyx_v_prefix);
+ __Pyx_XDECREF(__pyx_v_anchor);
+ __Pyx_XDECREF(__pyx_v_tag);
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_XDECREF(__pyx_v_style);
+ __Pyx_XDECREF(__pyx_v_flow_style);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":661
+ * raise ValueError(u"unknown event type")
+ *
+ * def get_event(self): # <<<<<<<<<<<<<<
+ * if self.current_event is not None:
+ * value = self.current_event
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_17get_event(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_17get_event(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("get_event (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_16get_event(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_16get_event(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_v_value = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ __Pyx_RefNannySetupContext("get_event", 0);
+
+ /* "_yaml.pyx":662
+ *
+ * def get_event(self):
+ * if self.current_event is not None: # <<<<<<<<<<<<<<
+ * value = self.current_event
+ * self.current_event = None
+ */
+ __pyx_t_1 = (__pyx_v_self->current_event != Py_None);
+ __pyx_t_2 = (__pyx_t_1 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":663
+ * def get_event(self):
+ * if self.current_event is not None:
+ * value = self.current_event # <<<<<<<<<<<<<<
+ * self.current_event = None
+ * else:
+ */
+ __pyx_t_3 = __pyx_v_self->current_event;
+ __Pyx_INCREF(__pyx_t_3);
+ __pyx_v_value = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":664
+ * if self.current_event is not None:
+ * value = self.current_event
+ * self.current_event = None # <<<<<<<<<<<<<<
+ * else:
+ * value = self._parse()
+ */
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_GOTREF(__pyx_v_self->current_event);
+ __Pyx_DECREF(__pyx_v_self->current_event);
+ __pyx_v_self->current_event = Py_None;
+
+ /* "_yaml.pyx":662
+ *
+ * def get_event(self):
+ * if self.current_event is not None: # <<<<<<<<<<<<<<
+ * value = self.current_event
+ * self.current_event = None
+ */
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":666
+ * self.current_event = None
+ * else:
+ * value = self._parse() # <<<<<<<<<<<<<<
+ * return value
+ *
+ */
+ /*else*/ {
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 666, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_value = __pyx_t_3;
+ __pyx_t_3 = 0;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":667
+ * else:
+ * value = self._parse()
+ * return value # <<<<<<<<<<<<<<
+ *
+ * def peek_event(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_value);
+ __pyx_r = __pyx_v_value;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":661
+ * raise ValueError(u"unknown event type")
+ *
+ * def get_event(self): # <<<<<<<<<<<<<<
+ * if self.current_event is not None:
+ * value = self.current_event
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser.get_event", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":669
+ * return value
+ *
+ * def peek_event(self): # <<<<<<<<<<<<<<
+ * if self.current_event is None:
+ * self.current_event = self._parse()
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_19peek_event(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_19peek_event(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("peek_event (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_18peek_event(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_18peek_event(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ __Pyx_RefNannySetupContext("peek_event", 0);
+
+ /* "_yaml.pyx":670
+ *
+ * def peek_event(self):
+ * if self.current_event is None: # <<<<<<<<<<<<<<
+ * self.current_event = self._parse()
+ * return self.current_event
+ */
+ __pyx_t_1 = (__pyx_v_self->current_event == Py_None);
+ __pyx_t_2 = (__pyx_t_1 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":671
+ * def peek_event(self):
+ * if self.current_event is None:
+ * self.current_event = self._parse() # <<<<<<<<<<<<<<
+ * return self.current_event
+ *
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 671, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __Pyx_GOTREF(__pyx_v_self->current_event);
+ __Pyx_DECREF(__pyx_v_self->current_event);
+ __pyx_v_self->current_event = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":670
+ *
+ * def peek_event(self):
+ * if self.current_event is None: # <<<<<<<<<<<<<<
+ * self.current_event = self._parse()
+ * return self.current_event
+ */
+ }
+
+ /* "_yaml.pyx":672
+ * if self.current_event is None:
+ * self.current_event = self._parse()
+ * return self.current_event # <<<<<<<<<<<<<<
+ *
+ * def check_event(self, *choices):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_self->current_event);
+ __pyx_r = __pyx_v_self->current_event;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":669
+ * return value
+ *
+ * def peek_event(self): # <<<<<<<<<<<<<<
+ * if self.current_event is None:
+ * self.current_event = self._parse()
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser.peek_event", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":674
+ * return self.current_event
+ *
+ * def check_event(self, *choices): # <<<<<<<<<<<<<<
+ * if self.current_event is None:
+ * self.current_event = self._parse()
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_21check_event(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_21check_event(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+ PyObject *__pyx_v_choices = 0;
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("check_event (wrapper)", 0);
+ if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "check_event", 0))) return NULL;
+ __Pyx_INCREF(__pyx_args);
+ __pyx_v_choices = __pyx_args;
+ __pyx_r = __pyx_pf_5_yaml_7CParser_20check_event(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self), __pyx_v_choices);
+
+ /* function exit code */
+ __Pyx_XDECREF(__pyx_v_choices);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_20check_event(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_choices) {
+ PyObject *__pyx_v_event_class = NULL;
+ PyObject *__pyx_v_choice = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ Py_ssize_t __pyx_t_4;
+ PyObject *__pyx_t_5 = NULL;
+ __Pyx_RefNannySetupContext("check_event", 0);
+
+ /* "_yaml.pyx":675
+ *
+ * def check_event(self, *choices):
+ * if self.current_event is None: # <<<<<<<<<<<<<<
+ * self.current_event = self._parse()
+ * if self.current_event is None:
+ */
+ __pyx_t_1 = (__pyx_v_self->current_event == Py_None);
+ __pyx_t_2 = (__pyx_t_1 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":676
+ * def check_event(self, *choices):
+ * if self.current_event is None:
+ * self.current_event = self._parse() # <<<<<<<<<<<<<<
+ * if self.current_event is None:
+ * return False
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 676, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __Pyx_GOTREF(__pyx_v_self->current_event);
+ __Pyx_DECREF(__pyx_v_self->current_event);
+ __pyx_v_self->current_event = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":675
+ *
+ * def check_event(self, *choices):
+ * if self.current_event is None: # <<<<<<<<<<<<<<
+ * self.current_event = self._parse()
+ * if self.current_event is None:
+ */
+ }
+
+ /* "_yaml.pyx":677
+ * if self.current_event is None:
+ * self.current_event = self._parse()
+ * if self.current_event is None: # <<<<<<<<<<<<<<
+ * return False
+ * if not choices:
+ */
+ __pyx_t_2 = (__pyx_v_self->current_event == Py_None);
+ __pyx_t_1 = (__pyx_t_2 != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":678
+ * self.current_event = self._parse()
+ * if self.current_event is None:
+ * return False # <<<<<<<<<<<<<<
+ * if not choices:
+ * return True
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_False);
+ __pyx_r = Py_False;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":677
+ * if self.current_event is None:
+ * self.current_event = self._parse()
+ * if self.current_event is None: # <<<<<<<<<<<<<<
+ * return False
+ * if not choices:
+ */
+ }
+
+ /* "_yaml.pyx":679
+ * if self.current_event is None:
+ * return False
+ * if not choices: # <<<<<<<<<<<<<<
+ * return True
+ * event_class = self.current_event.__class__
+ */
+ __pyx_t_1 = (PyTuple_GET_SIZE(__pyx_v_choices) != 0);
+ __pyx_t_2 = ((!__pyx_t_1) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":680
+ * return False
+ * if not choices:
+ * return True # <<<<<<<<<<<<<<
+ * event_class = self.current_event.__class__
+ * for choice in choices:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_True);
+ __pyx_r = Py_True;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":679
+ * if self.current_event is None:
+ * return False
+ * if not choices: # <<<<<<<<<<<<<<
+ * return True
+ * event_class = self.current_event.__class__
+ */
+ }
+
+ /* "_yaml.pyx":681
+ * if not choices:
+ * return True
+ * event_class = self.current_event.__class__ # <<<<<<<<<<<<<<
+ * for choice in choices:
+ * if event_class is choice:
+ */
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->current_event, __pyx_n_s_class); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 681, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_event_class = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":682
+ * return True
+ * event_class = self.current_event.__class__
+ * for choice in choices: # <<<<<<<<<<<<<<
+ * if event_class is choice:
+ * return True
+ */
+ __pyx_t_3 = __pyx_v_choices; __Pyx_INCREF(__pyx_t_3); __pyx_t_4 = 0;
+ for (;;) {
+ if (__pyx_t_4 >= PyTuple_GET_SIZE(__pyx_t_3)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_5); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 682, __pyx_L1_error)
+ #else
+ __pyx_t_5 = PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 682, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ #endif
+ __Pyx_XDECREF_SET(__pyx_v_choice, __pyx_t_5);
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":683
+ * event_class = self.current_event.__class__
+ * for choice in choices:
+ * if event_class is choice: # <<<<<<<<<<<<<<
+ * return True
+ * return False
+ */
+ __pyx_t_2 = (__pyx_v_event_class == __pyx_v_choice);
+ __pyx_t_1 = (__pyx_t_2 != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":684
+ * for choice in choices:
+ * if event_class is choice:
+ * return True # <<<<<<<<<<<<<<
+ * return False
+ *
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_True);
+ __pyx_r = Py_True;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":683
+ * event_class = self.current_event.__class__
+ * for choice in choices:
+ * if event_class is choice: # <<<<<<<<<<<<<<
+ * return True
+ * return False
+ */
+ }
+
+ /* "_yaml.pyx":682
+ * return True
+ * event_class = self.current_event.__class__
+ * for choice in choices: # <<<<<<<<<<<<<<
+ * if event_class is choice:
+ * return True
+ */
+ }
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":685
+ * if event_class is choice:
+ * return True
+ * return False # <<<<<<<<<<<<<<
+ *
+ * def check_node(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_False);
+ __pyx_r = Py_False;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":674
+ * return self.current_event
+ *
+ * def check_event(self, *choices): # <<<<<<<<<<<<<<
+ * if self.current_event is None:
+ * self.current_event = self._parse()
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_AddTraceback("_yaml.CParser.check_event", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_event_class);
+ __Pyx_XDECREF(__pyx_v_choice);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":687
+ * return False
+ *
+ * def check_node(self): # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_STREAM_START_EVENT:
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_23check_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_23check_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("check_node (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_22check_node(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_22check_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ __Pyx_RefNannySetupContext("check_node", 0);
+
+ /* "_yaml.pyx":688
+ *
+ * def check_node(self):
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * if self.parsed_event.type == YAML_STREAM_START_EVENT:
+ * yaml_event_delete(&self.parsed_event)
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_1 == ((int)0))) __PYX_ERR(0, 688, __pyx_L1_error)
+
+ /* "_yaml.pyx":689
+ * def check_node(self):
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_STREAM_START_EVENT: # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event()
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.type == YAML_STREAM_START_EVENT) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":690
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_STREAM_START_EVENT:
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":691
+ * if self.parsed_event.type == YAML_STREAM_START_EVENT:
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * return True
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_1 == ((int)0))) __PYX_ERR(0, 691, __pyx_L1_error)
+
+ /* "_yaml.pyx":689
+ * def check_node(self):
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_STREAM_START_EVENT: # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event()
+ */
+ }
+
+ /* "_yaml.pyx":692
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<<
+ * return True
+ * return False
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.type != YAML_STREAM_END_EVENT) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":693
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * return True # <<<<<<<<<<<<<<
+ * return False
+ *
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_True);
+ __pyx_r = Py_True;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":692
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<<
+ * return True
+ * return False
+ */
+ }
+
+ /* "_yaml.pyx":694
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * return True
+ * return False # <<<<<<<<<<<<<<
+ *
+ * def get_node(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_False);
+ __pyx_r = Py_False;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":687
+ * return False
+ *
+ * def check_node(self): # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_STREAM_START_EVENT:
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_AddTraceback("_yaml.CParser.check_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":696
+ * return False
+ *
+ * def get_node(self): # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_25get_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_25get_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("get_node (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_24get_node(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_24get_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ __Pyx_RefNannySetupContext("get_node", 0);
+
+ /* "_yaml.pyx":697
+ *
+ * def get_node(self):
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * return self._compose_document()
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_1 == ((int)0))) __PYX_ERR(0, 697, __pyx_L1_error)
+
+ /* "_yaml.pyx":698
+ * def get_node(self):
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<<
+ * return self._compose_document()
+ *
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.type != YAML_STREAM_END_EVENT) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":699
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * return self._compose_document() # <<<<<<<<<<<<<<
+ *
+ * def get_single_node(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_document(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 699, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":698
+ * def get_node(self):
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<<
+ * return self._compose_document()
+ *
+ */
+ }
+
+ /* "_yaml.pyx":696
+ * return False
+ *
+ * def get_node(self): # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ */
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser.get_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":701
+ * return self._compose_document()
+ *
+ * def get_single_node(self): # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * yaml_event_delete(&self.parsed_event)
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_27get_single_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_27get_single_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("get_single_node (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_26get_single_node(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_26get_single_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_v_document = NULL;
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_mark = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ PyObject *__pyx_t_5 = NULL;
+ PyObject *__pyx_t_6 = NULL;
+ PyObject *__pyx_t_7 = NULL;
+ __Pyx_RefNannySetupContext("get_single_node", 0);
+
+ /* "_yaml.pyx":702
+ *
+ * def get_single_node(self):
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event()
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_1 == ((int)0))) __PYX_ERR(0, 702, __pyx_L1_error)
+
+ /* "_yaml.pyx":703
+ * def get_single_node(self):
+ * self._parse_next_event()
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * document = None
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":704
+ * self._parse_next_event()
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * document = None
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_1 == ((int)0))) __PYX_ERR(0, 704, __pyx_L1_error)
+
+ /* "_yaml.pyx":705
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event()
+ * document = None # <<<<<<<<<<<<<<
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * document = self._compose_document()
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_document = Py_None;
+
+ /* "_yaml.pyx":706
+ * self._parse_next_event()
+ * document = None
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<<
+ * document = self._compose_document()
+ * self._parse_next_event()
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.type != YAML_STREAM_END_EVENT) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":707
+ * document = None
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * document = self._compose_document() # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_document(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 707, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF_SET(__pyx_v_document, __pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":706
+ * self._parse_next_event()
+ * document = None
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<<
+ * document = self._compose_document()
+ * self._parse_next_event()
+ */
+ }
+
+ /* "_yaml.pyx":708
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * document = self._compose_document()
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * mark = Mark(self.stream_name,
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_1 == ((int)0))) __PYX_ERR(0, 708, __pyx_L1_error)
+
+ /* "_yaml.pyx":709
+ * document = self._compose_document()
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<<
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.type != YAML_STREAM_END_EVENT) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":711
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column,
+ */
+ __pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.index); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 711, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":712
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ */
+ __pyx_t_4 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.line); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 712, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":713
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_5 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.column); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 713, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+
+ /* "_yaml.pyx":710
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ */
+ __pyx_t_6 = PyTuple_New(6); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 710, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_5);
+ PyTuple_SET_ITEM(__pyx_t_6, 3, __pyx_t_5);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_6, 4, Py_None);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_6, 5, Py_None);
+ __pyx_t_3 = 0;
+ __pyx_t_4 = 0;
+ __pyx_t_5 = 0;
+ __pyx_t_5 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_5_yaml_Mark), __pyx_t_6, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 710, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __pyx_v_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_5);
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":715
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ComposerError("expected a single document in the stream",
+ * document.start_mark, "but found another document", mark)
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":716
+ * None, None)
+ * if PY_MAJOR_VERSION < 3:
+ * raise ComposerError("expected a single document in the stream", # <<<<<<<<<<<<<<
+ * document.start_mark, "but found another document", mark)
+ * else:
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_ComposerError); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 716, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+
+ /* "_yaml.pyx":717
+ * if PY_MAJOR_VERSION < 3:
+ * raise ComposerError("expected a single document in the stream",
+ * document.start_mark, "but found another document", mark) # <<<<<<<<<<<<<<
+ * else:
+ * raise ComposerError(u"expected a single document in the stream",
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_document, __pyx_n_s_start_mark); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 717, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = NULL;
+ __pyx_t_1 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_6);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_6, function);
+ __pyx_t_1 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_6)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_3, __pyx_kp_s_expected_a_single_document_in_th, __pyx_t_4, __pyx_kp_s_but_found_another_document, ((PyObject *)__pyx_v_mark)};
+ __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_1, 4+__pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 716, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_3, __pyx_kp_s_expected_a_single_document_in_th, __pyx_t_4, __pyx_kp_s_but_found_another_document, ((PyObject *)__pyx_v_mark)};
+ __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_1, 4+__pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 716, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_7 = PyTuple_New(4+__pyx_t_1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 716, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(__pyx_kp_s_expected_a_single_document_in_th);
+ __Pyx_GIVEREF(__pyx_kp_s_expected_a_single_document_in_th);
+ PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_1, __pyx_kp_s_expected_a_single_document_in_th);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_1, __pyx_t_4);
+ __Pyx_INCREF(__pyx_kp_s_but_found_another_document);
+ __Pyx_GIVEREF(__pyx_kp_s_but_found_another_document);
+ PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_1, __pyx_kp_s_but_found_another_document);
+ __Pyx_INCREF(((PyObject *)__pyx_v_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_mark));
+ PyTuple_SET_ITEM(__pyx_t_7, 3+__pyx_t_1, ((PyObject *)__pyx_v_mark));
+ __pyx_t_4 = 0;
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_7, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 716, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __PYX_ERR(0, 716, __pyx_L1_error)
+
+ /* "_yaml.pyx":715
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ComposerError("expected a single document in the stream",
+ * document.start_mark, "but found another document", mark)
+ */
+ }
+
+ /* "_yaml.pyx":719
+ * document.start_mark, "but found another document", mark)
+ * else:
+ * raise ComposerError(u"expected a single document in the stream", # <<<<<<<<<<<<<<
+ * document.start_mark, u"but found another document", mark)
+ * return document
+ */
+ /*else*/ {
+ __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_ComposerError); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 719, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+
+ /* "_yaml.pyx":720
+ * else:
+ * raise ComposerError(u"expected a single document in the stream",
+ * document.start_mark, u"but found another document", mark) # <<<<<<<<<<<<<<
+ * return document
+ *
+ */
+ __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_document, __pyx_n_s_start_mark); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 720, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_4 = NULL;
+ __pyx_t_1 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_6);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_6, function);
+ __pyx_t_1 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_6)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_4, __pyx_kp_u_expected_a_single_document_in_th, __pyx_t_7, __pyx_kp_u_but_found_another_document, ((PyObject *)__pyx_v_mark)};
+ __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_1, 4+__pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 719, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_4, __pyx_kp_u_expected_a_single_document_in_th, __pyx_t_7, __pyx_kp_u_but_found_another_document, ((PyObject *)__pyx_v_mark)};
+ __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_1, 4+__pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 719, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_3 = PyTuple_New(4+__pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 719, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (__pyx_t_4) {
+ __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL;
+ }
+ __Pyx_INCREF(__pyx_kp_u_expected_a_single_document_in_th);
+ __Pyx_GIVEREF(__pyx_kp_u_expected_a_single_document_in_th);
+ PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_1, __pyx_kp_u_expected_a_single_document_in_th);
+ __Pyx_GIVEREF(__pyx_t_7);
+ PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_1, __pyx_t_7);
+ __Pyx_INCREF(__pyx_kp_u_but_found_another_document);
+ __Pyx_GIVEREF(__pyx_kp_u_but_found_another_document);
+ PyTuple_SET_ITEM(__pyx_t_3, 2+__pyx_t_1, __pyx_kp_u_but_found_another_document);
+ __Pyx_INCREF(((PyObject *)__pyx_v_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 3+__pyx_t_1, ((PyObject *)__pyx_v_mark));
+ __pyx_t_7 = 0;
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_3, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 719, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __PYX_ERR(0, 719, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":709
+ * document = self._compose_document()
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<<
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ */
+ }
+
+ /* "_yaml.pyx":721
+ * raise ComposerError(u"expected a single document in the stream",
+ * document.start_mark, u"but found another document", mark)
+ * return document # <<<<<<<<<<<<<<
+ *
+ * cdef object _compose_document(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_document);
+ __pyx_r = __pyx_v_document;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":701
+ * return self._compose_document()
+ *
+ * def get_single_node(self): # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * yaml_event_delete(&self.parsed_event)
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_6);
+ __Pyx_XDECREF(__pyx_t_7);
+ __Pyx_AddTraceback("_yaml.CParser.get_single_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_document);
+ __Pyx_XDECREF((PyObject *)__pyx_v_mark);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":723
+ * return document
+ *
+ * cdef object _compose_document(self): # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * node = self._compose_node(None, None)
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__compose_document(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_v_node = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_t_2;
+ __Pyx_RefNannySetupContext("_compose_document", 0);
+
+ /* "_yaml.pyx":724
+ *
+ * cdef object _compose_document(self):
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * node = self._compose_node(None, None)
+ * self._parse_next_event()
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":725
+ * cdef object _compose_document(self):
+ * yaml_event_delete(&self.parsed_event)
+ * node = self._compose_node(None, None) # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * yaml_event_delete(&self.parsed_event)
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_node(__pyx_v_self, Py_None, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 725, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_node = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":726
+ * yaml_event_delete(&self.parsed_event)
+ * node = self._compose_node(None, None)
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * self.anchors = {}
+ */
+ __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_2 == ((int)0))) __PYX_ERR(0, 726, __pyx_L1_error)
+
+ /* "_yaml.pyx":727
+ * node = self._compose_node(None, None)
+ * self._parse_next_event()
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * self.anchors = {}
+ * return node
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":728
+ * self._parse_next_event()
+ * yaml_event_delete(&self.parsed_event)
+ * self.anchors = {} # <<<<<<<<<<<<<<
+ * return node
+ *
+ */
+ __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 728, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ __Pyx_GOTREF(__pyx_v_self->anchors);
+ __Pyx_DECREF(__pyx_v_self->anchors);
+ __pyx_v_self->anchors = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":729
+ * yaml_event_delete(&self.parsed_event)
+ * self.anchors = {}
+ * return node # <<<<<<<<<<<<<<
+ *
+ * cdef object _compose_node(self, object parent, object index):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_node);
+ __pyx_r = __pyx_v_node;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":723
+ * return document
+ *
+ * cdef object _compose_document(self): # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * node = self._compose_node(None, None)
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("_yaml.CParser._compose_document", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_node);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":731
+ * return node
+ *
+ * cdef object _compose_node(self, object parent, object index): # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_ALIAS_EVENT:
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__compose_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_parent, PyObject *__pyx_v_index) {
+ PyObject *__pyx_v_anchor = NULL;
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_mark = NULL;
+ PyObject *__pyx_v_node = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_t_4;
+ PyObject *__pyx_t_5 = NULL;
+ PyObject *__pyx_t_6 = NULL;
+ PyObject *__pyx_t_7 = NULL;
+ PyObject *__pyx_t_8 = NULL;
+ __Pyx_RefNannySetupContext("_compose_node", 0);
+
+ /* "_yaml.pyx":732
+ *
+ * cdef object _compose_node(self, object parent, object index):
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * if self.parsed_event.type == YAML_ALIAS_EVENT:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor)
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_1 == ((int)0))) __PYX_ERR(0, 732, __pyx_L1_error)
+
+ /* "_yaml.pyx":733
+ * cdef object _compose_node(self, object parent, object index):
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_ALIAS_EVENT: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor)
+ * if anchor not in self.anchors:
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.type == YAML_ALIAS_EVENT) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":734
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_ALIAS_EVENT:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor) # <<<<<<<<<<<<<<
+ * if anchor not in self.anchors:
+ * mark = Mark(self.stream_name,
+ */
+ __pyx_t_3 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.alias.anchor); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 734, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_anchor = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":735
+ * if self.parsed_event.type == YAML_ALIAS_EVENT:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor)
+ * if anchor not in self.anchors: # <<<<<<<<<<<<<<
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ */
+ __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_v_anchor, __pyx_v_self->anchors, Py_NE)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 735, __pyx_L1_error)
+ __pyx_t_4 = (__pyx_t_2 != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":737
+ * if anchor not in self.anchors:
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column,
+ */
+ __pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.index); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 737, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":738
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ */
+ __pyx_t_5 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.line); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 738, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+
+ /* "_yaml.pyx":739
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_6 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.column); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 739, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+
+ /* "_yaml.pyx":736
+ * anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor)
+ * if anchor not in self.anchors:
+ * mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ */
+ __pyx_t_7 = PyTuple_New(6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 736, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_5);
+ PyTuple_SET_ITEM(__pyx_t_7, 2, __pyx_t_5);
+ __Pyx_GIVEREF(__pyx_t_6);
+ PyTuple_SET_ITEM(__pyx_t_7, 3, __pyx_t_6);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_7, 4, Py_None);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_7, 5, Py_None);
+ __pyx_t_3 = 0;
+ __pyx_t_5 = 0;
+ __pyx_t_6 = 0;
+ __pyx_t_6 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_5_yaml_Mark), __pyx_t_7, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 736, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __pyx_v_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_6);
+ __pyx_t_6 = 0;
+
+ /* "_yaml.pyx":741
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ComposerError(None, None, "found undefined alias", mark)
+ * else:
+ */
+ __pyx_t_4 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_4)) {
+
+ /* "_yaml.pyx":742
+ * None, None)
+ * if PY_MAJOR_VERSION < 3:
+ * raise ComposerError(None, None, "found undefined alias", mark) # <<<<<<<<<<<<<<
+ * else:
+ * raise ComposerError(None, None, u"found undefined alias", mark)
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_ComposerError); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 742, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_5 = NULL;
+ __pyx_t_1 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_7))) {
+ __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_7);
+ if (likely(__pyx_t_5)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7);
+ __Pyx_INCREF(__pyx_t_5);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_7, function);
+ __pyx_t_1 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_7)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_5, Py_None, Py_None, __pyx_kp_s_found_undefined_alias, ((PyObject *)__pyx_v_mark)};
+ __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_1, 4+__pyx_t_1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 742, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_GOTREF(__pyx_t_6);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_5, Py_None, Py_None, __pyx_kp_s_found_undefined_alias, ((PyObject *)__pyx_v_mark)};
+ __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_1, 4+__pyx_t_1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 742, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_GOTREF(__pyx_t_6);
+ } else
+ #endif
+ {
+ __pyx_t_3 = PyTuple_New(4+__pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 742, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (__pyx_t_5) {
+ __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_5); __pyx_t_5 = NULL;
+ }
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_1, Py_None);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_1, Py_None);
+ __Pyx_INCREF(__pyx_kp_s_found_undefined_alias);
+ __Pyx_GIVEREF(__pyx_kp_s_found_undefined_alias);
+ PyTuple_SET_ITEM(__pyx_t_3, 2+__pyx_t_1, __pyx_kp_s_found_undefined_alias);
+ __Pyx_INCREF(((PyObject *)__pyx_v_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 3+__pyx_t_1, ((PyObject *)__pyx_v_mark));
+ __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_3, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 742, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_Raise(__pyx_t_6, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __PYX_ERR(0, 742, __pyx_L1_error)
+
+ /* "_yaml.pyx":741
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ComposerError(None, None, "found undefined alias", mark)
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":744
+ * raise ComposerError(None, None, "found undefined alias", mark)
+ * else:
+ * raise ComposerError(None, None, u"found undefined alias", mark) # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * return self.anchors[anchor]
+ */
+ /*else*/ {
+ __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_ComposerError); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 744, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_3 = NULL;
+ __pyx_t_1 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_7))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_7);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_7, function);
+ __pyx_t_1 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_7)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_3, Py_None, Py_None, __pyx_kp_u_found_undefined_alias, ((PyObject *)__pyx_v_mark)};
+ __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_1, 4+__pyx_t_1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 744, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_6);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_3, Py_None, Py_None, __pyx_kp_u_found_undefined_alias, ((PyObject *)__pyx_v_mark)};
+ __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_1, 4+__pyx_t_1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 744, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_6);
+ } else
+ #endif
+ {
+ __pyx_t_5 = PyTuple_New(4+__pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 744, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_1, Py_None);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_1, Py_None);
+ __Pyx_INCREF(__pyx_kp_u_found_undefined_alias);
+ __Pyx_GIVEREF(__pyx_kp_u_found_undefined_alias);
+ PyTuple_SET_ITEM(__pyx_t_5, 2+__pyx_t_1, __pyx_kp_u_found_undefined_alias);
+ __Pyx_INCREF(((PyObject *)__pyx_v_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_mark));
+ PyTuple_SET_ITEM(__pyx_t_5, 3+__pyx_t_1, ((PyObject *)__pyx_v_mark));
+ __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_5, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 744, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_Raise(__pyx_t_6, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __PYX_ERR(0, 744, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":735
+ * if self.parsed_event.type == YAML_ALIAS_EVENT:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor)
+ * if anchor not in self.anchors: # <<<<<<<<<<<<<<
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ */
+ }
+
+ /* "_yaml.pyx":745
+ * else:
+ * raise ComposerError(None, None, u"found undefined alias", mark)
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * return self.anchors[anchor]
+ * anchor = None
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":746
+ * raise ComposerError(None, None, u"found undefined alias", mark)
+ * yaml_event_delete(&self.parsed_event)
+ * return self.anchors[anchor] # <<<<<<<<<<<<<<
+ * anchor = None
+ * if self.parsed_event.type == YAML_SCALAR_EVENT \
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_6 = __Pyx_PyObject_GetItem(__pyx_v_self->anchors, __pyx_v_anchor); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 746, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_r = __pyx_t_6;
+ __pyx_t_6 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":733
+ * cdef object _compose_node(self, object parent, object index):
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_ALIAS_EVENT: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor)
+ * if anchor not in self.anchors:
+ */
+ }
+
+ /* "_yaml.pyx":747
+ * yaml_event_delete(&self.parsed_event)
+ * return self.anchors[anchor]
+ * anchor = None # <<<<<<<<<<<<<<
+ * if self.parsed_event.type == YAML_SCALAR_EVENT \
+ * and self.parsed_event.data.scalar.anchor != NULL:
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_anchor = Py_None;
+
+ /* "_yaml.pyx":748
+ * return self.anchors[anchor]
+ * anchor = None
+ * if self.parsed_event.type == YAML_SCALAR_EVENT \ # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.scalar.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor)
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.type == YAML_SCALAR_EVENT) != 0);
+ if (__pyx_t_2) {
+ } else {
+ __pyx_t_4 = __pyx_t_2;
+ goto __pyx_L7_bool_binop_done;
+ }
+
+ /* "_yaml.pyx":749
+ * anchor = None
+ * if self.parsed_event.type == YAML_SCALAR_EVENT \
+ * and self.parsed_event.data.scalar.anchor != NULL: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor)
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.data.scalar.anchor != NULL) != 0);
+ __pyx_t_4 = __pyx_t_2;
+ __pyx_L7_bool_binop_done:;
+
+ /* "_yaml.pyx":748
+ * return self.anchors[anchor]
+ * anchor = None
+ * if self.parsed_event.type == YAML_SCALAR_EVENT \ # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.scalar.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor)
+ */
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":750
+ * if self.parsed_event.type == YAML_SCALAR_EVENT \
+ * and self.parsed_event.data.scalar.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor) # <<<<<<<<<<<<<<
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \
+ * and self.parsed_event.data.sequence_start.anchor != NULL:
+ */
+ __pyx_t_6 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.scalar.anchor); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 750, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF_SET(__pyx_v_anchor, __pyx_t_6);
+ __pyx_t_6 = 0;
+
+ /* "_yaml.pyx":748
+ * return self.anchors[anchor]
+ * anchor = None
+ * if self.parsed_event.type == YAML_SCALAR_EVENT \ # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.scalar.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor)
+ */
+ goto __pyx_L6;
+ }
+
+ /* "_yaml.pyx":751
+ * and self.parsed_event.data.scalar.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor)
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \ # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.sequence_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor)
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.type == YAML_SEQUENCE_START_EVENT) != 0);
+ if (__pyx_t_2) {
+ } else {
+ __pyx_t_4 = __pyx_t_2;
+ goto __pyx_L9_bool_binop_done;
+ }
+
+ /* "_yaml.pyx":752
+ * anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor)
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \
+ * and self.parsed_event.data.sequence_start.anchor != NULL: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor)
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT \
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.data.sequence_start.anchor != NULL) != 0);
+ __pyx_t_4 = __pyx_t_2;
+ __pyx_L9_bool_binop_done:;
+
+ /* "_yaml.pyx":751
+ * and self.parsed_event.data.scalar.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor)
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \ # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.sequence_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor)
+ */
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":753
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \
+ * and self.parsed_event.data.sequence_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor) # <<<<<<<<<<<<<<
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT \
+ * and self.parsed_event.data.mapping_start.anchor != NULL:
+ */
+ __pyx_t_6 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.sequence_start.anchor); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 753, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF_SET(__pyx_v_anchor, __pyx_t_6);
+ __pyx_t_6 = 0;
+
+ /* "_yaml.pyx":751
+ * and self.parsed_event.data.scalar.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor)
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \ # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.sequence_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor)
+ */
+ goto __pyx_L6;
+ }
+
+ /* "_yaml.pyx":754
+ * and self.parsed_event.data.sequence_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor)
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT \ # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.mapping_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor)
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.type == YAML_MAPPING_START_EVENT) != 0);
+ if (__pyx_t_2) {
+ } else {
+ __pyx_t_4 = __pyx_t_2;
+ goto __pyx_L11_bool_binop_done;
+ }
+
+ /* "_yaml.pyx":755
+ * anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor)
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT \
+ * and self.parsed_event.data.mapping_start.anchor != NULL: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor)
+ * if anchor is not None:
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.data.mapping_start.anchor != NULL) != 0);
+ __pyx_t_4 = __pyx_t_2;
+ __pyx_L11_bool_binop_done:;
+
+ /* "_yaml.pyx":754
+ * and self.parsed_event.data.sequence_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor)
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT \ # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.mapping_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor)
+ */
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":756
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT \
+ * and self.parsed_event.data.mapping_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor) # <<<<<<<<<<<<<<
+ * if anchor is not None:
+ * if anchor in self.anchors:
+ */
+ __pyx_t_6 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.mapping_start.anchor); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 756, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF_SET(__pyx_v_anchor, __pyx_t_6);
+ __pyx_t_6 = 0;
+
+ /* "_yaml.pyx":754
+ * and self.parsed_event.data.sequence_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor)
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT \ # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.mapping_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor)
+ */
+ }
+ __pyx_L6:;
+
+ /* "_yaml.pyx":757
+ * and self.parsed_event.data.mapping_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor)
+ * if anchor is not None: # <<<<<<<<<<<<<<
+ * if anchor in self.anchors:
+ * mark = Mark(self.stream_name,
+ */
+ __pyx_t_4 = (__pyx_v_anchor != Py_None);
+ __pyx_t_2 = (__pyx_t_4 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":758
+ * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor)
+ * if anchor is not None:
+ * if anchor in self.anchors: # <<<<<<<<<<<<<<
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ */
+ __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_v_anchor, __pyx_v_self->anchors, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 758, __pyx_L1_error)
+ __pyx_t_4 = (__pyx_t_2 != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":760
+ * if anchor in self.anchors:
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column,
+ */
+ __pyx_t_6 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.index); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 760, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+
+ /* "_yaml.pyx":761
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ */
+ __pyx_t_7 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.line); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 761, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+
+ /* "_yaml.pyx":762
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_5 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.column); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 762, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+
+ /* "_yaml.pyx":759
+ * if anchor is not None:
+ * if anchor in self.anchors:
+ * mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ */
+ __pyx_t_3 = PyTuple_New(6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 759, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_t_6);
+ PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_6);
+ __Pyx_GIVEREF(__pyx_t_7);
+ PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_t_7);
+ __Pyx_GIVEREF(__pyx_t_5);
+ PyTuple_SET_ITEM(__pyx_t_3, 3, __pyx_t_5);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_3, 4, Py_None);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_3, 5, Py_None);
+ __pyx_t_6 = 0;
+ __pyx_t_7 = 0;
+ __pyx_t_5 = 0;
+ __pyx_t_5 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_5_yaml_Mark), __pyx_t_3, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 759, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_v_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_5);
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":764
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ComposerError("found duplicate anchor; first occurrence",
+ * self.anchors[anchor].start_mark, "second occurrence", mark)
+ */
+ __pyx_t_4 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_4)) {
+
+ /* "_yaml.pyx":765
+ * None, None)
+ * if PY_MAJOR_VERSION < 3:
+ * raise ComposerError("found duplicate anchor; first occurrence", # <<<<<<<<<<<<<<
+ * self.anchors[anchor].start_mark, "second occurrence", mark)
+ * else:
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_ComposerError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 765, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":766
+ * if PY_MAJOR_VERSION < 3:
+ * raise ComposerError("found duplicate anchor; first occurrence",
+ * self.anchors[anchor].start_mark, "second occurrence", mark) # <<<<<<<<<<<<<<
+ * else:
+ * raise ComposerError(u"found duplicate anchor; first occurrence",
+ */
+ __pyx_t_7 = __Pyx_PyObject_GetItem(__pyx_v_self->anchors, __pyx_v_anchor); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 766, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_start_mark); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 766, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __pyx_t_7 = NULL;
+ __pyx_t_1 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {
+ __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_3);
+ if (likely(__pyx_t_7)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
+ __Pyx_INCREF(__pyx_t_7);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_3, function);
+ __pyx_t_1 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_3)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_7, __pyx_kp_s_found_duplicate_anchor_first_occ, __pyx_t_6, __pyx_kp_s_second_occurrence, ((PyObject *)__pyx_v_mark)};
+ __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_1, 4+__pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 765, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_7, __pyx_kp_s_found_duplicate_anchor_first_occ, __pyx_t_6, __pyx_kp_s_second_occurrence, ((PyObject *)__pyx_v_mark)};
+ __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_1, 4+__pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 765, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_8 = PyTuple_New(4+__pyx_t_1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 765, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ if (__pyx_t_7) {
+ __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_7); __pyx_t_7 = NULL;
+ }
+ __Pyx_INCREF(__pyx_kp_s_found_duplicate_anchor_first_occ);
+ __Pyx_GIVEREF(__pyx_kp_s_found_duplicate_anchor_first_occ);
+ PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_1, __pyx_kp_s_found_duplicate_anchor_first_occ);
+ __Pyx_GIVEREF(__pyx_t_6);
+ PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_1, __pyx_t_6);
+ __Pyx_INCREF(__pyx_kp_s_second_occurrence);
+ __Pyx_GIVEREF(__pyx_kp_s_second_occurrence);
+ PyTuple_SET_ITEM(__pyx_t_8, 2+__pyx_t_1, __pyx_kp_s_second_occurrence);
+ __Pyx_INCREF(((PyObject *)__pyx_v_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_mark));
+ PyTuple_SET_ITEM(__pyx_t_8, 3+__pyx_t_1, ((PyObject *)__pyx_v_mark));
+ __pyx_t_6 = 0;
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_8, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 765, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __PYX_ERR(0, 765, __pyx_L1_error)
+
+ /* "_yaml.pyx":764
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ComposerError("found duplicate anchor; first occurrence",
+ * self.anchors[anchor].start_mark, "second occurrence", mark)
+ */
+ }
+
+ /* "_yaml.pyx":768
+ * self.anchors[anchor].start_mark, "second occurrence", mark)
+ * else:
+ * raise ComposerError(u"found duplicate anchor; first occurrence", # <<<<<<<<<<<<<<
+ * self.anchors[anchor].start_mark, u"second occurrence", mark)
+ * self.descend_resolver(parent, index)
+ */
+ /*else*/ {
+ __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_ComposerError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 768, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":769
+ * else:
+ * raise ComposerError(u"found duplicate anchor; first occurrence",
+ * self.anchors[anchor].start_mark, u"second occurrence", mark) # <<<<<<<<<<<<<<
+ * self.descend_resolver(parent, index)
+ * if self.parsed_event.type == YAML_SCALAR_EVENT:
+ */
+ __pyx_t_8 = __Pyx_PyObject_GetItem(__pyx_v_self->anchors, __pyx_v_anchor); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 769, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_start_mark); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 769, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __pyx_t_8 = NULL;
+ __pyx_t_1 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {
+ __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_3);
+ if (likely(__pyx_t_8)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
+ __Pyx_INCREF(__pyx_t_8);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_3, function);
+ __pyx_t_1 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_3)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_8, __pyx_kp_u_found_duplicate_anchor_first_occ, __pyx_t_6, __pyx_kp_u_second_occurrence, ((PyObject *)__pyx_v_mark)};
+ __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_1, 4+__pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 768, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) {
+ PyObject *__pyx_temp[5] = {__pyx_t_8, __pyx_kp_u_found_duplicate_anchor_first_occ, __pyx_t_6, __pyx_kp_u_second_occurrence, ((PyObject *)__pyx_v_mark)};
+ __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_1, 4+__pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 768, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_7 = PyTuple_New(4+__pyx_t_1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 768, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ if (__pyx_t_8) {
+ __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_8); __pyx_t_8 = NULL;
+ }
+ __Pyx_INCREF(__pyx_kp_u_found_duplicate_anchor_first_occ);
+ __Pyx_GIVEREF(__pyx_kp_u_found_duplicate_anchor_first_occ);
+ PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_1, __pyx_kp_u_found_duplicate_anchor_first_occ);
+ __Pyx_GIVEREF(__pyx_t_6);
+ PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_1, __pyx_t_6);
+ __Pyx_INCREF(__pyx_kp_u_second_occurrence);
+ __Pyx_GIVEREF(__pyx_kp_u_second_occurrence);
+ PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_1, __pyx_kp_u_second_occurrence);
+ __Pyx_INCREF(((PyObject *)__pyx_v_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_mark));
+ PyTuple_SET_ITEM(__pyx_t_7, 3+__pyx_t_1, ((PyObject *)__pyx_v_mark));
+ __pyx_t_6 = 0;
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_7, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 768, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __PYX_ERR(0, 768, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":758
+ * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor)
+ * if anchor is not None:
+ * if anchor in self.anchors: # <<<<<<<<<<<<<<
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ */
+ }
+
+ /* "_yaml.pyx":757
+ * and self.parsed_event.data.mapping_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor)
+ * if anchor is not None: # <<<<<<<<<<<<<<
+ * if anchor in self.anchors:
+ * mark = Mark(self.stream_name,
+ */
+ }
+
+ /* "_yaml.pyx":770
+ * raise ComposerError(u"found duplicate anchor; first occurrence",
+ * self.anchors[anchor].start_mark, u"second occurrence", mark)
+ * self.descend_resolver(parent, index) # <<<<<<<<<<<<<<
+ * if self.parsed_event.type == YAML_SCALAR_EVENT:
+ * node = self._compose_scalar_node(anchor)
+ */
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_descend_resolver); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 770, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_7 = NULL;
+ __pyx_t_1 = 0;
+ if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) {
+ __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_3);
+ if (likely(__pyx_t_7)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
+ __Pyx_INCREF(__pyx_t_7);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_3, function);
+ __pyx_t_1 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_3)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_v_parent, __pyx_v_index};
+ __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_1, 2+__pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 770, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_5);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_v_parent, __pyx_v_index};
+ __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_1, 2+__pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 770, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_5);
+ } else
+ #endif
+ {
+ __pyx_t_6 = PyTuple_New(2+__pyx_t_1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 770, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ if (__pyx_t_7) {
+ __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_7); __pyx_t_7 = NULL;
+ }
+ __Pyx_INCREF(__pyx_v_parent);
+ __Pyx_GIVEREF(__pyx_v_parent);
+ PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_1, __pyx_v_parent);
+ __Pyx_INCREF(__pyx_v_index);
+ __Pyx_GIVEREF(__pyx_v_index);
+ PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_1, __pyx_v_index);
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_6, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 770, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":771
+ * self.anchors[anchor].start_mark, u"second occurrence", mark)
+ * self.descend_resolver(parent, index)
+ * if self.parsed_event.type == YAML_SCALAR_EVENT: # <<<<<<<<<<<<<<
+ * node = self._compose_scalar_node(anchor)
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT:
+ */
+ switch (__pyx_v_self->parsed_event.type) {
+ case YAML_SCALAR_EVENT:
+
+ /* "_yaml.pyx":772
+ * self.descend_resolver(parent, index)
+ * if self.parsed_event.type == YAML_SCALAR_EVENT:
+ * node = self._compose_scalar_node(anchor) # <<<<<<<<<<<<<<
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT:
+ * node = self._compose_sequence_node(anchor)
+ */
+ __pyx_t_5 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_scalar_node(__pyx_v_self, __pyx_v_anchor); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 772, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_node = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":771
+ * self.anchors[anchor].start_mark, u"second occurrence", mark)
+ * self.descend_resolver(parent, index)
+ * if self.parsed_event.type == YAML_SCALAR_EVENT: # <<<<<<<<<<<<<<
+ * node = self._compose_scalar_node(anchor)
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT:
+ */
+ break;
+ case YAML_SEQUENCE_START_EVENT:
+
+ /* "_yaml.pyx":774
+ * node = self._compose_scalar_node(anchor)
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT:
+ * node = self._compose_sequence_node(anchor) # <<<<<<<<<<<<<<
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT:
+ * node = self._compose_mapping_node(anchor)
+ */
+ __pyx_t_5 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_sequence_node(__pyx_v_self, __pyx_v_anchor); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 774, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_node = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":773
+ * if self.parsed_event.type == YAML_SCALAR_EVENT:
+ * node = self._compose_scalar_node(anchor)
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT: # <<<<<<<<<<<<<<
+ * node = self._compose_sequence_node(anchor)
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT:
+ */
+ break;
+ case YAML_MAPPING_START_EVENT:
+
+ /* "_yaml.pyx":776
+ * node = self._compose_sequence_node(anchor)
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT:
+ * node = self._compose_mapping_node(anchor) # <<<<<<<<<<<<<<
+ * self.ascend_resolver()
+ * return node
+ */
+ __pyx_t_5 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_mapping_node(__pyx_v_self, __pyx_v_anchor); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 776, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_node = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":775
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT:
+ * node = self._compose_sequence_node(anchor)
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT: # <<<<<<<<<<<<<<
+ * node = self._compose_mapping_node(anchor)
+ * self.ascend_resolver()
+ */
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":777
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT:
+ * node = self._compose_mapping_node(anchor)
+ * self.ascend_resolver() # <<<<<<<<<<<<<<
+ * return node
+ *
+ */
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_ascend_resolver); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 777, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_6 = NULL;
+ if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) {
+ __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_3);
+ if (likely(__pyx_t_6)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
+ __Pyx_INCREF(__pyx_t_6);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_3, function);
+ }
+ }
+ __pyx_t_5 = (__pyx_t_6) ? __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_6) : __Pyx_PyObject_CallNoArg(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
+ if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 777, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":778
+ * node = self._compose_mapping_node(anchor)
+ * self.ascend_resolver()
+ * return node # <<<<<<<<<<<<<<
+ *
+ * cdef _compose_scalar_node(self, object anchor):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ if (unlikely(!__pyx_v_node)) { __Pyx_RaiseUnboundLocalError("node"); __PYX_ERR(0, 778, __pyx_L1_error) }
+ __Pyx_INCREF(__pyx_v_node);
+ __pyx_r = __pyx_v_node;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":731
+ * return node
+ *
+ * cdef object _compose_node(self, object parent, object index): # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_ALIAS_EVENT:
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_6);
+ __Pyx_XDECREF(__pyx_t_7);
+ __Pyx_XDECREF(__pyx_t_8);
+ __Pyx_AddTraceback("_yaml.CParser._compose_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_anchor);
+ __Pyx_XDECREF((PyObject *)__pyx_v_mark);
+ __Pyx_XDECREF(__pyx_v_node);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":780
+ * return node
+ *
+ * cdef _compose_scalar_node(self, object anchor): # <<<<<<<<<<<<<<
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__compose_scalar_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_anchor) {
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_start_mark = NULL;
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_end_mark = NULL;
+ PyObject *__pyx_v_value = NULL;
+ int __pyx_v_plain_implicit;
+ int __pyx_v_quoted_implicit;
+ PyObject *__pyx_v_tag = NULL;
+ PyObject *__pyx_v_style = NULL;
+ PyObject *__pyx_v_node = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_t_5;
+ int __pyx_t_6;
+ PyObject *__pyx_t_7 = NULL;
+ PyObject *__pyx_t_8 = NULL;
+ int __pyx_t_9;
+ __Pyx_RefNannySetupContext("_compose_scalar_node", 0);
+
+ /* "_yaml.pyx":782
+ * cdef _compose_scalar_node(self, object anchor):
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column,
+ */
+ __pyx_t_1 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.index); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 782, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":783
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ */
+ __pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.line); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 783, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":784
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * end_mark = Mark(self.stream_name,
+ */
+ __pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.column); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 784, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":781
+ *
+ * cdef _compose_scalar_node(self, object anchor):
+ * start_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ */
+ __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 781, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None);
+ __pyx_t_1 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_3 = 0;
+ __pyx_t_3 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_5_yaml_Mark), __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 781, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":787
+ * None, None)
+ * end_mark = Mark(self.stream_name,
+ * self.parsed_event.end_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.line,
+ * self.parsed_event.end_mark.column,
+ */
+ __pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.end_mark.index); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 787, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":788
+ * end_mark = Mark(self.stream_name,
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.column,
+ * None, None)
+ */
+ __pyx_t_4 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.end_mark.line); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 788, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":789
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line,
+ * self.parsed_event.end_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * value = PyUnicode_DecodeUTF8(self.parsed_event.data.scalar.value,
+ */
+ __pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.end_mark.column); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 789, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":786
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ * end_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line,
+ */
+ __pyx_t_1 = PyTuple_New(6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 786, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_t_2);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 4, Py_None);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 5, Py_None);
+ __pyx_t_3 = 0;
+ __pyx_t_4 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_5_yaml_Mark), __pyx_t_1, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 786, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_end_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_2);
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":791
+ * self.parsed_event.end_mark.column,
+ * None, None)
+ * value = PyUnicode_DecodeUTF8(self.parsed_event.data.scalar.value, # <<<<<<<<<<<<<<
+ * self.parsed_event.data.scalar.length, 'strict')
+ * plain_implicit = False
+ */
+ __pyx_t_2 = PyUnicode_DecodeUTF8(__pyx_v_self->parsed_event.data.scalar.value, __pyx_v_self->parsed_event.data.scalar.length, ((char *)"strict")); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 791, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_value = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":793
+ * value = PyUnicode_DecodeUTF8(self.parsed_event.data.scalar.value,
+ * self.parsed_event.data.scalar.length, 'strict')
+ * plain_implicit = False # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.scalar.plain_implicit == 1:
+ * plain_implicit = True
+ */
+ __pyx_v_plain_implicit = 0;
+
+ /* "_yaml.pyx":794
+ * self.parsed_event.data.scalar.length, 'strict')
+ * plain_implicit = False
+ * if self.parsed_event.data.scalar.plain_implicit == 1: # <<<<<<<<<<<<<<
+ * plain_implicit = True
+ * quoted_implicit = False
+ */
+ __pyx_t_5 = ((__pyx_v_self->parsed_event.data.scalar.plain_implicit == 1) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":795
+ * plain_implicit = False
+ * if self.parsed_event.data.scalar.plain_implicit == 1:
+ * plain_implicit = True # <<<<<<<<<<<<<<
+ * quoted_implicit = False
+ * if self.parsed_event.data.scalar.quoted_implicit == 1:
+ */
+ __pyx_v_plain_implicit = 1;
+
+ /* "_yaml.pyx":794
+ * self.parsed_event.data.scalar.length, 'strict')
+ * plain_implicit = False
+ * if self.parsed_event.data.scalar.plain_implicit == 1: # <<<<<<<<<<<<<<
+ * plain_implicit = True
+ * quoted_implicit = False
+ */
+ }
+
+ /* "_yaml.pyx":796
+ * if self.parsed_event.data.scalar.plain_implicit == 1:
+ * plain_implicit = True
+ * quoted_implicit = False # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.scalar.quoted_implicit == 1:
+ * quoted_implicit = True
+ */
+ __pyx_v_quoted_implicit = 0;
+
+ /* "_yaml.pyx":797
+ * plain_implicit = True
+ * quoted_implicit = False
+ * if self.parsed_event.data.scalar.quoted_implicit == 1: # <<<<<<<<<<<<<<
+ * quoted_implicit = True
+ * if self.parsed_event.data.scalar.tag == NULL \
+ */
+ __pyx_t_5 = ((__pyx_v_self->parsed_event.data.scalar.quoted_implicit == 1) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":798
+ * quoted_implicit = False
+ * if self.parsed_event.data.scalar.quoted_implicit == 1:
+ * quoted_implicit = True # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.scalar.tag == NULL \
+ * or (self.parsed_event.data.scalar.tag[0] == c'!'
+ */
+ __pyx_v_quoted_implicit = 1;
+
+ /* "_yaml.pyx":797
+ * plain_implicit = True
+ * quoted_implicit = False
+ * if self.parsed_event.data.scalar.quoted_implicit == 1: # <<<<<<<<<<<<<<
+ * quoted_implicit = True
+ * if self.parsed_event.data.scalar.tag == NULL \
+ */
+ }
+
+ /* "_yaml.pyx":799
+ * if self.parsed_event.data.scalar.quoted_implicit == 1:
+ * quoted_implicit = True
+ * if self.parsed_event.data.scalar.tag == NULL \ # <<<<<<<<<<<<<<
+ * or (self.parsed_event.data.scalar.tag[0] == c'!'
+ * and self.parsed_event.data.scalar.tag[1] == c'\0'):
+ */
+ __pyx_t_6 = ((__pyx_v_self->parsed_event.data.scalar.tag == NULL) != 0);
+ if (!__pyx_t_6) {
+ } else {
+ __pyx_t_5 = __pyx_t_6;
+ goto __pyx_L6_bool_binop_done;
+ }
+
+ /* "_yaml.pyx":800
+ * quoted_implicit = True
+ * if self.parsed_event.data.scalar.tag == NULL \
+ * or (self.parsed_event.data.scalar.tag[0] == c'!' # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.scalar.tag[1] == c'\0'):
+ * tag = self.resolve(ScalarNode, value, (plain_implicit, quoted_implicit))
+ */
+ __pyx_t_6 = (((__pyx_v_self->parsed_event.data.scalar.tag[0]) == '!') != 0);
+ if (__pyx_t_6) {
+ } else {
+ __pyx_t_5 = __pyx_t_6;
+ goto __pyx_L6_bool_binop_done;
+ }
+
+ /* "_yaml.pyx":801
+ * if self.parsed_event.data.scalar.tag == NULL \
+ * or (self.parsed_event.data.scalar.tag[0] == c'!'
+ * and self.parsed_event.data.scalar.tag[1] == c'\0'): # <<<<<<<<<<<<<<
+ * tag = self.resolve(ScalarNode, value, (plain_implicit, quoted_implicit))
+ * else:
+ */
+ __pyx_t_6 = (((__pyx_v_self->parsed_event.data.scalar.tag[1]) == '\x00') != 0);
+ __pyx_t_5 = __pyx_t_6;
+ __pyx_L6_bool_binop_done:;
+
+ /* "_yaml.pyx":799
+ * if self.parsed_event.data.scalar.quoted_implicit == 1:
+ * quoted_implicit = True
+ * if self.parsed_event.data.scalar.tag == NULL \ # <<<<<<<<<<<<<<
+ * or (self.parsed_event.data.scalar.tag[0] == c'!'
+ * and self.parsed_event.data.scalar.tag[1] == c'\0'):
+ */
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":802
+ * or (self.parsed_event.data.scalar.tag[0] == c'!'
+ * and self.parsed_event.data.scalar.tag[1] == c'\0'):
+ * tag = self.resolve(ScalarNode, value, (plain_implicit, quoted_implicit)) # <<<<<<<<<<<<<<
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.scalar.tag)
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_resolve); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 802, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_ScalarNode); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 802, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_v_plain_implicit); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 802, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_7 = __Pyx_PyBool_FromLong(__pyx_v_quoted_implicit); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 802, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_8 = PyTuple_New(2); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 802, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_7);
+ PyTuple_SET_ITEM(__pyx_t_8, 1, __pyx_t_7);
+ __pyx_t_3 = 0;
+ __pyx_t_7 = 0;
+ __pyx_t_7 = NULL;
+ __pyx_t_9 = 0;
+ if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_7)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_7);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_9 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_7, __pyx_t_4, __pyx_v_value, __pyx_t_8};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_9, 3+__pyx_t_9); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 802, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_7, __pyx_t_4, __pyx_v_value, __pyx_t_8};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_9, 3+__pyx_t_9); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 802, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_3 = PyTuple_New(3+__pyx_t_9); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 802, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (__pyx_t_7) {
+ __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_7); __pyx_t_7 = NULL;
+ }
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_9, __pyx_t_4);
+ __Pyx_INCREF(__pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_9, __pyx_v_value);
+ __Pyx_GIVEREF(__pyx_t_8);
+ PyTuple_SET_ITEM(__pyx_t_3, 2+__pyx_t_9, __pyx_t_8);
+ __pyx_t_4 = 0;
+ __pyx_t_8 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 802, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_tag = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":799
+ * if self.parsed_event.data.scalar.quoted_implicit == 1:
+ * quoted_implicit = True
+ * if self.parsed_event.data.scalar.tag == NULL \ # <<<<<<<<<<<<<<
+ * or (self.parsed_event.data.scalar.tag[0] == c'!'
+ * and self.parsed_event.data.scalar.tag[1] == c'\0'):
+ */
+ goto __pyx_L5;
+ }
+
+ /* "_yaml.pyx":804
+ * tag = self.resolve(ScalarNode, value, (plain_implicit, quoted_implicit))
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.scalar.tag) # <<<<<<<<<<<<<<
+ * style = None
+ * if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ */
+ /*else*/ {
+ __pyx_t_2 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.scalar.tag); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 804, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_tag = __pyx_t_2;
+ __pyx_t_2 = 0;
+ }
+ __pyx_L5:;
+
+ /* "_yaml.pyx":805
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.scalar.tag)
+ * style = None # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * style = u''
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_style = ((PyObject*)Py_None);
+
+ /* "_yaml.pyx":806
+ * tag = PyUnicode_FromString(self.parsed_event.data.scalar.tag)
+ * style = None
+ * if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u''
+ * elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ */
+ switch (__pyx_v_self->parsed_event.data.scalar.style) {
+ case YAML_PLAIN_SCALAR_STYLE:
+
+ /* "_yaml.pyx":807
+ * style = None
+ * if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * style = u'' # <<<<<<<<<<<<<<
+ * elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\''
+ */
+ __Pyx_INCREF(__pyx_kp_u__6);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__6);
+
+ /* "_yaml.pyx":806
+ * tag = PyUnicode_FromString(self.parsed_event.data.scalar.tag)
+ * style = None
+ * if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u''
+ * elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ */
+ break;
+ case YAML_SINGLE_QUOTED_SCALAR_STYLE:
+
+ /* "_yaml.pyx":809
+ * style = u''
+ * elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\'' # <<<<<<<<<<<<<<
+ * elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"'
+ */
+ __Pyx_INCREF(__pyx_kp_u__7);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__7);
+
+ /* "_yaml.pyx":808
+ * if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * style = u''
+ * elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'\''
+ * elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ */
+ break;
+ case YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+
+ /* "_yaml.pyx":811
+ * style = u'\''
+ * elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"' # <<<<<<<<<<<<<<
+ * elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|'
+ */
+ __Pyx_INCREF(__pyx_kp_u__8);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__8);
+
+ /* "_yaml.pyx":810
+ * elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\''
+ * elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'"'
+ * elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ */
+ break;
+ case YAML_LITERAL_SCALAR_STYLE:
+
+ /* "_yaml.pyx":813
+ * style = u'"'
+ * elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|' # <<<<<<<<<<<<<<
+ * elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>'
+ */
+ __Pyx_INCREF(__pyx_kp_u__9);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__9);
+
+ /* "_yaml.pyx":812
+ * elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"'
+ * elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'|'
+ * elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ */
+ break;
+ case YAML_FOLDED_SCALAR_STYLE:
+
+ /* "_yaml.pyx":815
+ * style = u'|'
+ * elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>' # <<<<<<<<<<<<<<
+ * node = ScalarNode(tag, value, start_mark, end_mark, style)
+ * if anchor is not None:
+ */
+ __Pyx_INCREF(__pyx_kp_u__10);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__10);
+
+ /* "_yaml.pyx":814
+ * elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|'
+ * elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'>'
+ * node = ScalarNode(tag, value, start_mark, end_mark, style)
+ */
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":816
+ * elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>'
+ * node = ScalarNode(tag, value, start_mark, end_mark, style) # <<<<<<<<<<<<<<
+ * if anchor is not None:
+ * self.anchors[anchor] = node
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_ScalarNode); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 816, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = NULL;
+ __pyx_t_9 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_3)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_3);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_9 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[6] = {__pyx_t_3, __pyx_v_tag, __pyx_v_value, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_v_style};
+ __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_9, 5+__pyx_t_9); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 816, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[6] = {__pyx_t_3, __pyx_v_tag, __pyx_v_value, ((PyObject *)__pyx_v_start_mark), ((PyObject *)__pyx_v_end_mark), __pyx_v_style};
+ __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_9, 5+__pyx_t_9); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 816, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_GOTREF(__pyx_t_2);
+ } else
+ #endif
+ {
+ __pyx_t_8 = PyTuple_New(5+__pyx_t_9); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 816, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ if (__pyx_t_3) {
+ __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_3); __pyx_t_3 = NULL;
+ }
+ __Pyx_INCREF(__pyx_v_tag);
+ __Pyx_GIVEREF(__pyx_v_tag);
+ PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_9, __pyx_v_tag);
+ __Pyx_INCREF(__pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_9, __pyx_v_value);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_8, 2+__pyx_t_9, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_8, 3+__pyx_t_9, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_INCREF(__pyx_v_style);
+ __Pyx_GIVEREF(__pyx_v_style);
+ PyTuple_SET_ITEM(__pyx_t_8, 4+__pyx_t_9, __pyx_v_style);
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_8, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 816, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_node = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":817
+ * style = u'>'
+ * node = ScalarNode(tag, value, start_mark, end_mark, style)
+ * if anchor is not None: # <<<<<<<<<<<<<<
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event)
+ */
+ __pyx_t_5 = (__pyx_v_anchor != Py_None);
+ __pyx_t_6 = (__pyx_t_5 != 0);
+ if (__pyx_t_6) {
+
+ /* "_yaml.pyx":818
+ * node = ScalarNode(tag, value, start_mark, end_mark, style)
+ * if anchor is not None:
+ * self.anchors[anchor] = node # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * return node
+ */
+ if (unlikely(PyObject_SetItem(__pyx_v_self->anchors, __pyx_v_anchor, __pyx_v_node) < 0)) __PYX_ERR(0, 818, __pyx_L1_error)
+
+ /* "_yaml.pyx":817
+ * style = u'>'
+ * node = ScalarNode(tag, value, start_mark, end_mark, style)
+ * if anchor is not None: # <<<<<<<<<<<<<<
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event)
+ */
+ }
+
+ /* "_yaml.pyx":819
+ * if anchor is not None:
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * return node
+ *
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":820
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event)
+ * return node # <<<<<<<<<<<<<<
+ *
+ * cdef _compose_sequence_node(self, object anchor):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_node);
+ __pyx_r = __pyx_v_node;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":780
+ * return node
+ *
+ * cdef _compose_scalar_node(self, object anchor): # <<<<<<<<<<<<<<
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_7);
+ __Pyx_XDECREF(__pyx_t_8);
+ __Pyx_AddTraceback("_yaml.CParser._compose_scalar_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF((PyObject *)__pyx_v_start_mark);
+ __Pyx_XDECREF((PyObject *)__pyx_v_end_mark);
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_XDECREF(__pyx_v_tag);
+ __Pyx_XDECREF(__pyx_v_style);
+ __Pyx_XDECREF(__pyx_v_node);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":822
+ * return node
+ *
+ * cdef _compose_sequence_node(self, object anchor): # <<<<<<<<<<<<<<
+ * cdef int index
+ * start_mark = Mark(self.stream_name,
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__compose_sequence_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_anchor) {
+ int __pyx_v_index;
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_start_mark = NULL;
+ int __pyx_v_implicit;
+ PyObject *__pyx_v_tag = NULL;
+ PyObject *__pyx_v_flow_style = NULL;
+ PyObject *__pyx_v_value = NULL;
+ PyObject *__pyx_v_node = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_t_5;
+ int __pyx_t_6;
+ PyObject *__pyx_t_7 = NULL;
+ int __pyx_t_8;
+ PyObject *__pyx_t_9 = NULL;
+ int __pyx_t_10;
+ __Pyx_RefNannySetupContext("_compose_sequence_node", 0);
+
+ /* "_yaml.pyx":825
+ * cdef int index
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column,
+ */
+ __pyx_t_1 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.index); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 825, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":826
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ */
+ __pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.line); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 826, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":827
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * implicit = False
+ */
+ __pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.column); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 827, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":824
+ * cdef _compose_sequence_node(self, object anchor):
+ * cdef int index
+ * start_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ */
+ __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 824, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None);
+ __pyx_t_1 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_3 = 0;
+ __pyx_t_3 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_5_yaml_Mark), __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 824, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":829
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ * implicit = False # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.sequence_start.implicit == 1:
+ * implicit = True
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":830
+ * None, None)
+ * implicit = False
+ * if self.parsed_event.data.sequence_start.implicit == 1: # <<<<<<<<<<<<<<
+ * implicit = True
+ * if self.parsed_event.data.sequence_start.tag == NULL \
+ */
+ __pyx_t_5 = ((__pyx_v_self->parsed_event.data.sequence_start.implicit == 1) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":831
+ * implicit = False
+ * if self.parsed_event.data.sequence_start.implicit == 1:
+ * implicit = True # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.sequence_start.tag == NULL \
+ * or (self.parsed_event.data.sequence_start.tag[0] == c'!'
+ */
+ __pyx_v_implicit = 1;
+
+ /* "_yaml.pyx":830
+ * None, None)
+ * implicit = False
+ * if self.parsed_event.data.sequence_start.implicit == 1: # <<<<<<<<<<<<<<
+ * implicit = True
+ * if self.parsed_event.data.sequence_start.tag == NULL \
+ */
+ }
+
+ /* "_yaml.pyx":832
+ * if self.parsed_event.data.sequence_start.implicit == 1:
+ * implicit = True
+ * if self.parsed_event.data.sequence_start.tag == NULL \ # <<<<<<<<<<<<<<
+ * or (self.parsed_event.data.sequence_start.tag[0] == c'!'
+ * and self.parsed_event.data.sequence_start.tag[1] == c'\0'):
+ */
+ __pyx_t_6 = ((__pyx_v_self->parsed_event.data.sequence_start.tag == NULL) != 0);
+ if (!__pyx_t_6) {
+ } else {
+ __pyx_t_5 = __pyx_t_6;
+ goto __pyx_L5_bool_binop_done;
+ }
+
+ /* "_yaml.pyx":833
+ * implicit = True
+ * if self.parsed_event.data.sequence_start.tag == NULL \
+ * or (self.parsed_event.data.sequence_start.tag[0] == c'!' # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.sequence_start.tag[1] == c'\0'):
+ * tag = self.resolve(SequenceNode, None, implicit)
+ */
+ __pyx_t_6 = (((__pyx_v_self->parsed_event.data.sequence_start.tag[0]) == '!') != 0);
+ if (__pyx_t_6) {
+ } else {
+ __pyx_t_5 = __pyx_t_6;
+ goto __pyx_L5_bool_binop_done;
+ }
+
+ /* "_yaml.pyx":834
+ * if self.parsed_event.data.sequence_start.tag == NULL \
+ * or (self.parsed_event.data.sequence_start.tag[0] == c'!'
+ * and self.parsed_event.data.sequence_start.tag[1] == c'\0'): # <<<<<<<<<<<<<<
+ * tag = self.resolve(SequenceNode, None, implicit)
+ * else:
+ */
+ __pyx_t_6 = (((__pyx_v_self->parsed_event.data.sequence_start.tag[1]) == '\x00') != 0);
+ __pyx_t_5 = __pyx_t_6;
+ __pyx_L5_bool_binop_done:;
+
+ /* "_yaml.pyx":832
+ * if self.parsed_event.data.sequence_start.implicit == 1:
+ * implicit = True
+ * if self.parsed_event.data.sequence_start.tag == NULL \ # <<<<<<<<<<<<<<
+ * or (self.parsed_event.data.sequence_start.tag[0] == c'!'
+ * and self.parsed_event.data.sequence_start.tag[1] == c'\0'):
+ */
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":835
+ * or (self.parsed_event.data.sequence_start.tag[0] == c'!'
+ * and self.parsed_event.data.sequence_start.tag[1] == c'\0'):
+ * tag = self.resolve(SequenceNode, None, implicit) # <<<<<<<<<<<<<<
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.sequence_start.tag)
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_resolve); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 835, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_SequenceNode); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 835, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_implicit); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 835, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_7 = NULL;
+ __pyx_t_8 = 0;
+ if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) {
+ __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_4);
+ if (likely(__pyx_t_7)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4);
+ __Pyx_INCREF(__pyx_t_7);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_4, function);
+ __pyx_t_8 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_4)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_7, __pyx_t_2, Py_None, __pyx_t_1};
+ __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 3+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 835, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_7, __pyx_t_2, Py_None, __pyx_t_1};
+ __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 3+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 835, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_9 = PyTuple_New(3+__pyx_t_8); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 835, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_9);
+ if (__pyx_t_7) {
+ __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_7); __pyx_t_7 = NULL;
+ }
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_8, __pyx_t_2);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_8, Py_None);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_9, 2+__pyx_t_8, __pyx_t_1);
+ __pyx_t_2 = 0;
+ __pyx_t_1 = 0;
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_9, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 835, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_tag = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":832
+ * if self.parsed_event.data.sequence_start.implicit == 1:
+ * implicit = True
+ * if self.parsed_event.data.sequence_start.tag == NULL \ # <<<<<<<<<<<<<<
+ * or (self.parsed_event.data.sequence_start.tag[0] == c'!'
+ * and self.parsed_event.data.sequence_start.tag[1] == c'\0'):
+ */
+ goto __pyx_L4;
+ }
+
+ /* "_yaml.pyx":837
+ * tag = self.resolve(SequenceNode, None, implicit)
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.sequence_start.tag) # <<<<<<<<<<<<<<
+ * flow_style = None
+ * if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ */
+ /*else*/ {
+ __pyx_t_3 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.sequence_start.tag); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 837, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_tag = __pyx_t_3;
+ __pyx_t_3 = 0;
+ }
+ __pyx_L4:;
+
+ /* "_yaml.pyx":838
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.sequence_start.tag)
+ * flow_style = None # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ * flow_style = True
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_flow_style = Py_None;
+
+ /* "_yaml.pyx":839
+ * tag = PyUnicode_FromString(self.parsed_event.data.sequence_start.tag)
+ * flow_style = None
+ * if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = True
+ * elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ */
+ switch (__pyx_v_self->parsed_event.data.sequence_start.style) {
+ case YAML_FLOW_SEQUENCE_STYLE:
+
+ /* "_yaml.pyx":840
+ * flow_style = None
+ * if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ * flow_style = True # <<<<<<<<<<<<<<
+ * elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ * flow_style = False
+ */
+ __Pyx_INCREF(Py_True);
+ __Pyx_DECREF_SET(__pyx_v_flow_style, Py_True);
+
+ /* "_yaml.pyx":839
+ * tag = PyUnicode_FromString(self.parsed_event.data.sequence_start.tag)
+ * flow_style = None
+ * if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = True
+ * elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ */
+ break;
+ case YAML_BLOCK_SEQUENCE_STYLE:
+
+ /* "_yaml.pyx":842
+ * flow_style = True
+ * elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ * flow_style = False # <<<<<<<<<<<<<<
+ * value = []
+ * node = SequenceNode(tag, value, start_mark, None, flow_style)
+ */
+ __Pyx_INCREF(Py_False);
+ __Pyx_DECREF_SET(__pyx_v_flow_style, Py_False);
+
+ /* "_yaml.pyx":841
+ * if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ * flow_style = True
+ * elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = False
+ * value = []
+ */
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":843
+ * elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ * flow_style = False
+ * value = [] # <<<<<<<<<<<<<<
+ * node = SequenceNode(tag, value, start_mark, None, flow_style)
+ * if anchor is not None:
+ */
+ __pyx_t_3 = PyList_New(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 843, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_value = ((PyObject*)__pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":844
+ * flow_style = False
+ * value = []
+ * node = SequenceNode(tag, value, start_mark, None, flow_style) # <<<<<<<<<<<<<<
+ * if anchor is not None:
+ * self.anchors[anchor] = node
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_SequenceNode); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 844, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_9 = NULL;
+ __pyx_t_8 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) {
+ __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_4);
+ if (likely(__pyx_t_9)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4);
+ __Pyx_INCREF(__pyx_t_9);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_4, function);
+ __pyx_t_8 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_4)) {
+ PyObject *__pyx_temp[6] = {__pyx_t_9, __pyx_v_tag, __pyx_v_value, ((PyObject *)__pyx_v_start_mark), Py_None, __pyx_v_flow_style};
+ __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 5+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 844, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0;
+ __Pyx_GOTREF(__pyx_t_3);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) {
+ PyObject *__pyx_temp[6] = {__pyx_t_9, __pyx_v_tag, __pyx_v_value, ((PyObject *)__pyx_v_start_mark), Py_None, __pyx_v_flow_style};
+ __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 5+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 844, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0;
+ __Pyx_GOTREF(__pyx_t_3);
+ } else
+ #endif
+ {
+ __pyx_t_1 = PyTuple_New(5+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 844, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (__pyx_t_9) {
+ __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_9); __pyx_t_9 = NULL;
+ }
+ __Pyx_INCREF(__pyx_v_tag);
+ __Pyx_GIVEREF(__pyx_v_tag);
+ PyTuple_SET_ITEM(__pyx_t_1, 0+__pyx_t_8, __pyx_v_tag);
+ __Pyx_INCREF(__pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_1, 1+__pyx_t_8, __pyx_v_value);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_1, 2+__pyx_t_8, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 3+__pyx_t_8, Py_None);
+ __Pyx_INCREF(__pyx_v_flow_style);
+ __Pyx_GIVEREF(__pyx_v_flow_style);
+ PyTuple_SET_ITEM(__pyx_t_1, 4+__pyx_t_8, __pyx_v_flow_style);
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_1, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 844, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_node = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":845
+ * value = []
+ * node = SequenceNode(tag, value, start_mark, None, flow_style)
+ * if anchor is not None: # <<<<<<<<<<<<<<
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event)
+ */
+ __pyx_t_5 = (__pyx_v_anchor != Py_None);
+ __pyx_t_6 = (__pyx_t_5 != 0);
+ if (__pyx_t_6) {
+
+ /* "_yaml.pyx":846
+ * node = SequenceNode(tag, value, start_mark, None, flow_style)
+ * if anchor is not None:
+ * self.anchors[anchor] = node # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * index = 0
+ */
+ if (unlikely(PyObject_SetItem(__pyx_v_self->anchors, __pyx_v_anchor, __pyx_v_node) < 0)) __PYX_ERR(0, 846, __pyx_L1_error)
+
+ /* "_yaml.pyx":845
+ * value = []
+ * node = SequenceNode(tag, value, start_mark, None, flow_style)
+ * if anchor is not None: # <<<<<<<<<<<<<<
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event)
+ */
+ }
+
+ /* "_yaml.pyx":847
+ * if anchor is not None:
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * index = 0
+ * self._parse_next_event()
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":848
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event)
+ * index = 0 # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * while self.parsed_event.type != YAML_SEQUENCE_END_EVENT:
+ */
+ __pyx_v_index = 0;
+
+ /* "_yaml.pyx":849
+ * yaml_event_delete(&self.parsed_event)
+ * index = 0
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * while self.parsed_event.type != YAML_SEQUENCE_END_EVENT:
+ * value.append(self._compose_node(node, index))
+ */
+ __pyx_t_8 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_8 == ((int)0))) __PYX_ERR(0, 849, __pyx_L1_error)
+
+ /* "_yaml.pyx":850
+ * index = 0
+ * self._parse_next_event()
+ * while self.parsed_event.type != YAML_SEQUENCE_END_EVENT: # <<<<<<<<<<<<<<
+ * value.append(self._compose_node(node, index))
+ * index = index+1
+ */
+ while (1) {
+ __pyx_t_6 = ((__pyx_v_self->parsed_event.type != YAML_SEQUENCE_END_EVENT) != 0);
+ if (!__pyx_t_6) break;
+
+ /* "_yaml.pyx":851
+ * self._parse_next_event()
+ * while self.parsed_event.type != YAML_SEQUENCE_END_EVENT:
+ * value.append(self._compose_node(node, index)) # <<<<<<<<<<<<<<
+ * index = index+1
+ * self._parse_next_event()
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_index); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 851, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_node(__pyx_v_self, __pyx_v_node, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 851, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_10 = __Pyx_PyList_Append(__pyx_v_value, __pyx_t_4); if (unlikely(__pyx_t_10 == ((int)-1))) __PYX_ERR(0, 851, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":852
+ * while self.parsed_event.type != YAML_SEQUENCE_END_EVENT:
+ * value.append(self._compose_node(node, index))
+ * index = index+1 # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * node.end_mark = Mark(self.stream_name,
+ */
+ __pyx_v_index = (__pyx_v_index + 1);
+
+ /* "_yaml.pyx":853
+ * value.append(self._compose_node(node, index))
+ * index = index+1
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * node.end_mark = Mark(self.stream_name,
+ * self.parsed_event.end_mark.index,
+ */
+ __pyx_t_8 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_8 == ((int)0))) __PYX_ERR(0, 853, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":855
+ * self._parse_next_event()
+ * node.end_mark = Mark(self.stream_name,
+ * self.parsed_event.end_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.line,
+ * self.parsed_event.end_mark.column,
+ */
+ __pyx_t_4 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.end_mark.index); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 855, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":856
+ * node.end_mark = Mark(self.stream_name,
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.column,
+ * None, None)
+ */
+ __pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.end_mark.line); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 856, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":857
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line,
+ * self.parsed_event.end_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * yaml_event_delete(&self.parsed_event)
+ */
+ __pyx_t_1 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.end_mark.column); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 857, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":854
+ * index = index+1
+ * self._parse_next_event()
+ * node.end_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line,
+ */
+ __pyx_t_9 = PyTuple_New(6); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 854, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_9);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_9, 1, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_9, 2, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_9, 3, __pyx_t_1);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_9, 4, Py_None);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_9, 5, Py_None);
+ __pyx_t_4 = 0;
+ __pyx_t_3 = 0;
+ __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_5_yaml_Mark), __pyx_t_9, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 854, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ if (__Pyx_PyObject_SetAttrStr(__pyx_v_node, __pyx_n_s_end_mark, __pyx_t_1) < 0) __PYX_ERR(0, 854, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":859
+ * self.parsed_event.end_mark.column,
+ * None, None)
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * return node
+ *
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":860
+ * None, None)
+ * yaml_event_delete(&self.parsed_event)
+ * return node # <<<<<<<<<<<<<<
+ *
+ * cdef _compose_mapping_node(self, object anchor):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_node);
+ __pyx_r = __pyx_v_node;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":822
+ * return node
+ *
+ * cdef _compose_sequence_node(self, object anchor): # <<<<<<<<<<<<<<
+ * cdef int index
+ * start_mark = Mark(self.stream_name,
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_7);
+ __Pyx_XDECREF(__pyx_t_9);
+ __Pyx_AddTraceback("_yaml.CParser._compose_sequence_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF((PyObject *)__pyx_v_start_mark);
+ __Pyx_XDECREF(__pyx_v_tag);
+ __Pyx_XDECREF(__pyx_v_flow_style);
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_XDECREF(__pyx_v_node);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":862
+ * return node
+ *
+ * cdef _compose_mapping_node(self, object anchor): # <<<<<<<<<<<<<<
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__compose_mapping_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_anchor) {
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_start_mark = NULL;
+ int __pyx_v_implicit;
+ PyObject *__pyx_v_tag = NULL;
+ PyObject *__pyx_v_flow_style = NULL;
+ PyObject *__pyx_v_value = NULL;
+ PyObject *__pyx_v_node = NULL;
+ PyObject *__pyx_v_item_key = NULL;
+ PyObject *__pyx_v_item_value = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_t_5;
+ int __pyx_t_6;
+ PyObject *__pyx_t_7 = NULL;
+ int __pyx_t_8;
+ PyObject *__pyx_t_9 = NULL;
+ int __pyx_t_10;
+ __Pyx_RefNannySetupContext("_compose_mapping_node", 0);
+
+ /* "_yaml.pyx":864
+ * cdef _compose_mapping_node(self, object anchor):
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column,
+ */
+ __pyx_t_1 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.index); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 864, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":865
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ */
+ __pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.line); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 865, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":866
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * implicit = False
+ */
+ __pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.start_mark.column); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 866, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":863
+ *
+ * cdef _compose_mapping_node(self, object anchor):
+ * start_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ */
+ __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 863, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None);
+ __pyx_t_1 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_3 = 0;
+ __pyx_t_3 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_5_yaml_Mark), __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 863, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":868
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ * implicit = False # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.mapping_start.implicit == 1:
+ * implicit = True
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":869
+ * None, None)
+ * implicit = False
+ * if self.parsed_event.data.mapping_start.implicit == 1: # <<<<<<<<<<<<<<
+ * implicit = True
+ * if self.parsed_event.data.mapping_start.tag == NULL \
+ */
+ __pyx_t_5 = ((__pyx_v_self->parsed_event.data.mapping_start.implicit == 1) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":870
+ * implicit = False
+ * if self.parsed_event.data.mapping_start.implicit == 1:
+ * implicit = True # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.mapping_start.tag == NULL \
+ * or (self.parsed_event.data.mapping_start.tag[0] == c'!'
+ */
+ __pyx_v_implicit = 1;
+
+ /* "_yaml.pyx":869
+ * None, None)
+ * implicit = False
+ * if self.parsed_event.data.mapping_start.implicit == 1: # <<<<<<<<<<<<<<
+ * implicit = True
+ * if self.parsed_event.data.mapping_start.tag == NULL \
+ */
+ }
+
+ /* "_yaml.pyx":871
+ * if self.parsed_event.data.mapping_start.implicit == 1:
+ * implicit = True
+ * if self.parsed_event.data.mapping_start.tag == NULL \ # <<<<<<<<<<<<<<
+ * or (self.parsed_event.data.mapping_start.tag[0] == c'!'
+ * and self.parsed_event.data.mapping_start.tag[1] == c'\0'):
+ */
+ __pyx_t_6 = ((__pyx_v_self->parsed_event.data.mapping_start.tag == NULL) != 0);
+ if (!__pyx_t_6) {
+ } else {
+ __pyx_t_5 = __pyx_t_6;
+ goto __pyx_L5_bool_binop_done;
+ }
+
+ /* "_yaml.pyx":872
+ * implicit = True
+ * if self.parsed_event.data.mapping_start.tag == NULL \
+ * or (self.parsed_event.data.mapping_start.tag[0] == c'!' # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.mapping_start.tag[1] == c'\0'):
+ * tag = self.resolve(MappingNode, None, implicit)
+ */
+ __pyx_t_6 = (((__pyx_v_self->parsed_event.data.mapping_start.tag[0]) == '!') != 0);
+ if (__pyx_t_6) {
+ } else {
+ __pyx_t_5 = __pyx_t_6;
+ goto __pyx_L5_bool_binop_done;
+ }
+
+ /* "_yaml.pyx":873
+ * if self.parsed_event.data.mapping_start.tag == NULL \
+ * or (self.parsed_event.data.mapping_start.tag[0] == c'!'
+ * and self.parsed_event.data.mapping_start.tag[1] == c'\0'): # <<<<<<<<<<<<<<
+ * tag = self.resolve(MappingNode, None, implicit)
+ * else:
+ */
+ __pyx_t_6 = (((__pyx_v_self->parsed_event.data.mapping_start.tag[1]) == '\x00') != 0);
+ __pyx_t_5 = __pyx_t_6;
+ __pyx_L5_bool_binop_done:;
+
+ /* "_yaml.pyx":871
+ * if self.parsed_event.data.mapping_start.implicit == 1:
+ * implicit = True
+ * if self.parsed_event.data.mapping_start.tag == NULL \ # <<<<<<<<<<<<<<
+ * or (self.parsed_event.data.mapping_start.tag[0] == c'!'
+ * and self.parsed_event.data.mapping_start.tag[1] == c'\0'):
+ */
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":874
+ * or (self.parsed_event.data.mapping_start.tag[0] == c'!'
+ * and self.parsed_event.data.mapping_start.tag[1] == c'\0'):
+ * tag = self.resolve(MappingNode, None, implicit) # <<<<<<<<<<<<<<
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.mapping_start.tag)
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_resolve); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 874, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_MappingNode); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 874, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_implicit); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 874, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_7 = NULL;
+ __pyx_t_8 = 0;
+ if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) {
+ __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_4);
+ if (likely(__pyx_t_7)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4);
+ __Pyx_INCREF(__pyx_t_7);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_4, function);
+ __pyx_t_8 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_4)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_7, __pyx_t_2, Py_None, __pyx_t_1};
+ __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 3+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 874, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_7, __pyx_t_2, Py_None, __pyx_t_1};
+ __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 3+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 874, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_9 = PyTuple_New(3+__pyx_t_8); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 874, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_9);
+ if (__pyx_t_7) {
+ __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_7); __pyx_t_7 = NULL;
+ }
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_8, __pyx_t_2);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_8, Py_None);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_9, 2+__pyx_t_8, __pyx_t_1);
+ __pyx_t_2 = 0;
+ __pyx_t_1 = 0;
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_9, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 874, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_tag = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":871
+ * if self.parsed_event.data.mapping_start.implicit == 1:
+ * implicit = True
+ * if self.parsed_event.data.mapping_start.tag == NULL \ # <<<<<<<<<<<<<<
+ * or (self.parsed_event.data.mapping_start.tag[0] == c'!'
+ * and self.parsed_event.data.mapping_start.tag[1] == c'\0'):
+ */
+ goto __pyx_L4;
+ }
+
+ /* "_yaml.pyx":876
+ * tag = self.resolve(MappingNode, None, implicit)
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.mapping_start.tag) # <<<<<<<<<<<<<<
+ * flow_style = None
+ * if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ */
+ /*else*/ {
+ __pyx_t_3 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.mapping_start.tag); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 876, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_tag = __pyx_t_3;
+ __pyx_t_3 = 0;
+ }
+ __pyx_L4:;
+
+ /* "_yaml.pyx":877
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.mapping_start.tag)
+ * flow_style = None # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ * flow_style = True
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_flow_style = Py_None;
+
+ /* "_yaml.pyx":878
+ * tag = PyUnicode_FromString(self.parsed_event.data.mapping_start.tag)
+ * flow_style = None
+ * if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = True
+ * elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ */
+ switch (__pyx_v_self->parsed_event.data.mapping_start.style) {
+ case YAML_FLOW_MAPPING_STYLE:
+
+ /* "_yaml.pyx":879
+ * flow_style = None
+ * if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ * flow_style = True # <<<<<<<<<<<<<<
+ * elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ * flow_style = False
+ */
+ __Pyx_INCREF(Py_True);
+ __Pyx_DECREF_SET(__pyx_v_flow_style, Py_True);
+
+ /* "_yaml.pyx":878
+ * tag = PyUnicode_FromString(self.parsed_event.data.mapping_start.tag)
+ * flow_style = None
+ * if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = True
+ * elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ */
+ break;
+ case YAML_BLOCK_MAPPING_STYLE:
+
+ /* "_yaml.pyx":881
+ * flow_style = True
+ * elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ * flow_style = False # <<<<<<<<<<<<<<
+ * value = []
+ * node = MappingNode(tag, value, start_mark, None, flow_style)
+ */
+ __Pyx_INCREF(Py_False);
+ __Pyx_DECREF_SET(__pyx_v_flow_style, Py_False);
+
+ /* "_yaml.pyx":880
+ * if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ * flow_style = True
+ * elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = False
+ * value = []
+ */
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":882
+ * elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ * flow_style = False
+ * value = [] # <<<<<<<<<<<<<<
+ * node = MappingNode(tag, value, start_mark, None, flow_style)
+ * if anchor is not None:
+ */
+ __pyx_t_3 = PyList_New(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 882, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_value = ((PyObject*)__pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":883
+ * flow_style = False
+ * value = []
+ * node = MappingNode(tag, value, start_mark, None, flow_style) # <<<<<<<<<<<<<<
+ * if anchor is not None:
+ * self.anchors[anchor] = node
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_MappingNode); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 883, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_9 = NULL;
+ __pyx_t_8 = 0;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) {
+ __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_4);
+ if (likely(__pyx_t_9)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4);
+ __Pyx_INCREF(__pyx_t_9);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_4, function);
+ __pyx_t_8 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_4)) {
+ PyObject *__pyx_temp[6] = {__pyx_t_9, __pyx_v_tag, __pyx_v_value, ((PyObject *)__pyx_v_start_mark), Py_None, __pyx_v_flow_style};
+ __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 5+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 883, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0;
+ __Pyx_GOTREF(__pyx_t_3);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) {
+ PyObject *__pyx_temp[6] = {__pyx_t_9, __pyx_v_tag, __pyx_v_value, ((PyObject *)__pyx_v_start_mark), Py_None, __pyx_v_flow_style};
+ __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 5+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 883, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0;
+ __Pyx_GOTREF(__pyx_t_3);
+ } else
+ #endif
+ {
+ __pyx_t_1 = PyTuple_New(5+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 883, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (__pyx_t_9) {
+ __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_9); __pyx_t_9 = NULL;
+ }
+ __Pyx_INCREF(__pyx_v_tag);
+ __Pyx_GIVEREF(__pyx_v_tag);
+ PyTuple_SET_ITEM(__pyx_t_1, 0+__pyx_t_8, __pyx_v_tag);
+ __Pyx_INCREF(__pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_1, 1+__pyx_t_8, __pyx_v_value);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_1, 2+__pyx_t_8, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 3+__pyx_t_8, Py_None);
+ __Pyx_INCREF(__pyx_v_flow_style);
+ __Pyx_GIVEREF(__pyx_v_flow_style);
+ PyTuple_SET_ITEM(__pyx_t_1, 4+__pyx_t_8, __pyx_v_flow_style);
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_1, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 883, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_node = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":884
+ * value = []
+ * node = MappingNode(tag, value, start_mark, None, flow_style)
+ * if anchor is not None: # <<<<<<<<<<<<<<
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event)
+ */
+ __pyx_t_5 = (__pyx_v_anchor != Py_None);
+ __pyx_t_6 = (__pyx_t_5 != 0);
+ if (__pyx_t_6) {
+
+ /* "_yaml.pyx":885
+ * node = MappingNode(tag, value, start_mark, None, flow_style)
+ * if anchor is not None:
+ * self.anchors[anchor] = node # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event()
+ */
+ if (unlikely(PyObject_SetItem(__pyx_v_self->anchors, __pyx_v_anchor, __pyx_v_node) < 0)) __PYX_ERR(0, 885, __pyx_L1_error)
+
+ /* "_yaml.pyx":884
+ * value = []
+ * node = MappingNode(tag, value, start_mark, None, flow_style)
+ * if anchor is not None: # <<<<<<<<<<<<<<
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event)
+ */
+ }
+
+ /* "_yaml.pyx":886
+ * if anchor is not None:
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * while self.parsed_event.type != YAML_MAPPING_END_EVENT:
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":887
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * while self.parsed_event.type != YAML_MAPPING_END_EVENT:
+ * item_key = self._compose_node(node, None)
+ */
+ __pyx_t_8 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_8 == ((int)0))) __PYX_ERR(0, 887, __pyx_L1_error)
+
+ /* "_yaml.pyx":888
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event()
+ * while self.parsed_event.type != YAML_MAPPING_END_EVENT: # <<<<<<<<<<<<<<
+ * item_key = self._compose_node(node, None)
+ * item_value = self._compose_node(node, item_key)
+ */
+ while (1) {
+ __pyx_t_6 = ((__pyx_v_self->parsed_event.type != YAML_MAPPING_END_EVENT) != 0);
+ if (!__pyx_t_6) break;
+
+ /* "_yaml.pyx":889
+ * self._parse_next_event()
+ * while self.parsed_event.type != YAML_MAPPING_END_EVENT:
+ * item_key = self._compose_node(node, None) # <<<<<<<<<<<<<<
+ * item_value = self._compose_node(node, item_key)
+ * value.append((item_key, item_value))
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_node(__pyx_v_self, __pyx_v_node, Py_None); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 889, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_XDECREF_SET(__pyx_v_item_key, __pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":890
+ * while self.parsed_event.type != YAML_MAPPING_END_EVENT:
+ * item_key = self._compose_node(node, None)
+ * item_value = self._compose_node(node, item_key) # <<<<<<<<<<<<<<
+ * value.append((item_key, item_value))
+ * self._parse_next_event()
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_node(__pyx_v_self, __pyx_v_node, __pyx_v_item_key); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 890, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_XDECREF_SET(__pyx_v_item_value, __pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":891
+ * item_key = self._compose_node(node, None)
+ * item_value = self._compose_node(node, item_key)
+ * value.append((item_key, item_value)) # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * node.end_mark = Mark(self.stream_name,
+ */
+ __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 891, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_INCREF(__pyx_v_item_key);
+ __Pyx_GIVEREF(__pyx_v_item_key);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_item_key);
+ __Pyx_INCREF(__pyx_v_item_value);
+ __Pyx_GIVEREF(__pyx_v_item_value);
+ PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_item_value);
+ __pyx_t_10 = __Pyx_PyList_Append(__pyx_v_value, __pyx_t_3); if (unlikely(__pyx_t_10 == ((int)-1))) __PYX_ERR(0, 891, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":892
+ * item_value = self._compose_node(node, item_key)
+ * value.append((item_key, item_value))
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * node.end_mark = Mark(self.stream_name,
+ * self.parsed_event.end_mark.index,
+ */
+ __pyx_t_8 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_8 == ((int)0))) __PYX_ERR(0, 892, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":894
+ * self._parse_next_event()
+ * node.end_mark = Mark(self.stream_name,
+ * self.parsed_event.end_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.line,
+ * self.parsed_event.end_mark.column,
+ */
+ __pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.end_mark.index); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 894, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":895
+ * node.end_mark = Mark(self.stream_name,
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.column,
+ * None, None)
+ */
+ __pyx_t_4 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.end_mark.line); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 895, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":896
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line,
+ * self.parsed_event.end_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * yaml_event_delete(&self.parsed_event)
+ */
+ __pyx_t_1 = __Pyx_PyInt_FromSize_t(__pyx_v_self->parsed_event.end_mark.column); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 896, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":893
+ * value.append((item_key, item_value))
+ * self._parse_next_event()
+ * node.end_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line,
+ */
+ __pyx_t_9 = PyTuple_New(6); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 893, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_9);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_9, 1, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_9, 2, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_9, 3, __pyx_t_1);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_9, 4, Py_None);
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_9, 5, Py_None);
+ __pyx_t_3 = 0;
+ __pyx_t_4 = 0;
+ __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_5_yaml_Mark), __pyx_t_9, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 893, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ if (__Pyx_PyObject_SetAttrStr(__pyx_v_node, __pyx_n_s_end_mark, __pyx_t_1) < 0) __PYX_ERR(0, 893, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":898
+ * self.parsed_event.end_mark.column,
+ * None, None)
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * return node
+ *
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":899
+ * None, None)
+ * yaml_event_delete(&self.parsed_event)
+ * return node # <<<<<<<<<<<<<<
+ *
+ * cdef int _parse_next_event(self) except 0:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_node);
+ __pyx_r = __pyx_v_node;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":862
+ * return node
+ *
+ * cdef _compose_mapping_node(self, object anchor): # <<<<<<<<<<<<<<
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_7);
+ __Pyx_XDECREF(__pyx_t_9);
+ __Pyx_AddTraceback("_yaml.CParser._compose_mapping_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF((PyObject *)__pyx_v_start_mark);
+ __Pyx_XDECREF(__pyx_v_tag);
+ __Pyx_XDECREF(__pyx_v_flow_style);
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_XDECREF(__pyx_v_node);
+ __Pyx_XDECREF(__pyx_v_item_key);
+ __Pyx_XDECREF(__pyx_v_item_value);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":901
+ * return node
+ *
+ * cdef int _parse_next_event(self) except 0: # <<<<<<<<<<<<<<
+ * if self.parsed_event.type == YAML_NO_EVENT:
+ * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0:
+ */
+
+static int __pyx_f_5_yaml_7CParser__parse_next_event(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_v_error = NULL;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ __Pyx_RefNannySetupContext("_parse_next_event", 0);
+
+ /* "_yaml.pyx":902
+ *
+ * cdef int _parse_next_event(self) except 0:
+ * if self.parsed_event.type == YAML_NO_EVENT: # <<<<<<<<<<<<<<
+ * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0:
+ * error = self._parser_error()
+ */
+ __pyx_t_1 = ((__pyx_v_self->parsed_event.type == YAML_NO_EVENT) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":903
+ * cdef int _parse_next_event(self) except 0:
+ * if self.parsed_event.type == YAML_NO_EVENT:
+ * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0: # <<<<<<<<<<<<<<
+ * error = self._parser_error()
+ * raise error
+ */
+ __pyx_t_2 = yaml_parser_parse((&__pyx_v_self->parser), (&__pyx_v_self->parsed_event)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 903, __pyx_L1_error)
+ __pyx_t_1 = ((__pyx_t_2 == 0) != 0);
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":904
+ * if self.parsed_event.type == YAML_NO_EVENT:
+ * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0:
+ * error = self._parser_error() # <<<<<<<<<<<<<<
+ * raise error
+ * return 1
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parser_error(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 904, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_error = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":905
+ * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0:
+ * error = self._parser_error()
+ * raise error # <<<<<<<<<<<<<<
+ * return 1
+ *
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ __PYX_ERR(0, 905, __pyx_L1_error)
+
+ /* "_yaml.pyx":903
+ * cdef int _parse_next_event(self) except 0:
+ * if self.parsed_event.type == YAML_NO_EVENT:
+ * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0: # <<<<<<<<<<<<<<
+ * error = self._parser_error()
+ * raise error
+ */
+ }
+
+ /* "_yaml.pyx":902
+ *
+ * cdef int _parse_next_event(self) except 0:
+ * if self.parsed_event.type == YAML_NO_EVENT: # <<<<<<<<<<<<<<
+ * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0:
+ * error = self._parser_error()
+ */
+ }
+
+ /* "_yaml.pyx":906
+ * error = self._parser_error()
+ * raise error
+ * return 1 # <<<<<<<<<<<<<<
+ *
+ * cdef int input_handler(void *data, char *buffer, size_t size, size_t *read) except 0:
+ */
+ __pyx_r = 1;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":901
+ * return node
+ *
+ * cdef int _parse_next_event(self) except 0: # <<<<<<<<<<<<<<
+ * if self.parsed_event.type == YAML_NO_EVENT:
+ * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0:
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser._parse_next_event", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "(tree fragment)":1
+ * def __reduce_cython__(self): # <<<<<<<<<<<<<<
+ * raise TypeError("self.parsed_event cannot be converted to a Python object for pickling")
+ * def __setstate_cython__(self, __pyx_state):
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_29__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_29__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_28__reduce_cython__(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_28__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ __Pyx_RefNannySetupContext("__reduce_cython__", 0);
+
+ /* "(tree fragment)":2
+ * def __reduce_cython__(self):
+ * raise TypeError("self.parsed_event cannot be converted to a Python object for pickling") # <<<<<<<<<<<<<<
+ * def __setstate_cython__(self, __pyx_state):
+ * raise TypeError("self.parsed_event cannot be converted to a Python object for pickling")
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__15, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __PYX_ERR(1, 2, __pyx_L1_error)
+
+ /* "(tree fragment)":1
+ * def __reduce_cython__(self): # <<<<<<<<<<<<<<
+ * raise TypeError("self.parsed_event cannot be converted to a Python object for pickling")
+ * def __setstate_cython__(self, __pyx_state):
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("_yaml.CParser.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "(tree fragment)":3
+ * def __reduce_cython__(self):
+ * raise TypeError("self.parsed_event cannot be converted to a Python object for pickling")
+ * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
+ * raise TypeError("self.parsed_event cannot be converted to a Python object for pickling")
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_31__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_31__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_30__setstate_cython__(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_30__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_5_yaml_CParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ __Pyx_RefNannySetupContext("__setstate_cython__", 0);
+
+ /* "(tree fragment)":4
+ * raise TypeError("self.parsed_event cannot be converted to a Python object for pickling")
+ * def __setstate_cython__(self, __pyx_state):
+ * raise TypeError("self.parsed_event cannot be converted to a Python object for pickling") # <<<<<<<<<<<<<<
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__16, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __PYX_ERR(1, 4, __pyx_L1_error)
+
+ /* "(tree fragment)":3
+ * def __reduce_cython__(self):
+ * raise TypeError("self.parsed_event cannot be converted to a Python object for pickling")
+ * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
+ * raise TypeError("self.parsed_event cannot be converted to a Python object for pickling")
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("_yaml.CParser.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":908
+ * return 1
+ *
+ * cdef int input_handler(void *data, char *buffer, size_t size, size_t *read) except 0: # <<<<<<<<<<<<<<
+ * cdef CParser parser
+ * parser = <CParser>data
+ */
+
+static int __pyx_f_5_yaml_input_handler(void *__pyx_v_data, char *__pyx_v_buffer, size_t __pyx_v_size, size_t *__pyx_v_read) {
+ struct __pyx_obj_5_yaml_CParser *__pyx_v_parser = 0;
+ PyObject *__pyx_v_value = NULL;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_t_2;
+ int __pyx_t_3;
+ PyObject *__pyx_t_4 = NULL;
+ PyObject *__pyx_t_5 = NULL;
+ PyObject *__pyx_t_6 = NULL;
+ __Pyx_RefNannySetupContext("input_handler", 0);
+
+ /* "_yaml.pyx":910
+ * cdef int input_handler(void *data, char *buffer, size_t size, size_t *read) except 0:
+ * cdef CParser parser
+ * parser = <CParser>data # <<<<<<<<<<<<<<
+ * if parser.stream_cache is None:
+ * value = parser.stream.read(size)
+ */
+ __pyx_t_1 = ((PyObject *)__pyx_v_data);
+ __Pyx_INCREF(__pyx_t_1);
+ __pyx_v_parser = ((struct __pyx_obj_5_yaml_CParser *)__pyx_t_1);
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":911
+ * cdef CParser parser
+ * parser = <CParser>data
+ * if parser.stream_cache is None: # <<<<<<<<<<<<<<
+ * value = parser.stream.read(size)
+ * if PyUnicode_CheckExact(value) != 0:
+ */
+ __pyx_t_2 = (__pyx_v_parser->stream_cache == Py_None);
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":912
+ * parser = <CParser>data
+ * if parser.stream_cache is None:
+ * value = parser.stream.read(size) # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(value) != 0:
+ * value = PyUnicode_AsUTF8String(value)
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_parser->stream, __pyx_n_s_read); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 912, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_5 = __Pyx_PyInt_FromSize_t(__pyx_v_size); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 912, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_6 = NULL;
+ if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) {
+ __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_4);
+ if (likely(__pyx_t_6)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4);
+ __Pyx_INCREF(__pyx_t_6);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_4, function);
+ }
+ }
+ __pyx_t_1 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 912, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_value = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":913
+ * if parser.stream_cache is None:
+ * value = parser.stream.read(size)
+ * if PyUnicode_CheckExact(value) != 0: # <<<<<<<<<<<<<<
+ * value = PyUnicode_AsUTF8String(value)
+ * parser.unicode_source = 1
+ */
+ __pyx_t_3 = ((PyUnicode_CheckExact(__pyx_v_value) != 0) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":914
+ * value = parser.stream.read(size)
+ * if PyUnicode_CheckExact(value) != 0:
+ * value = PyUnicode_AsUTF8String(value) # <<<<<<<<<<<<<<
+ * parser.unicode_source = 1
+ * if PyString_CheckExact(value) == 0:
+ */
+ __pyx_t_1 = PyUnicode_AsUTF8String(__pyx_v_value); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 914, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF_SET(__pyx_v_value, __pyx_t_1);
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":915
+ * if PyUnicode_CheckExact(value) != 0:
+ * value = PyUnicode_AsUTF8String(value)
+ * parser.unicode_source = 1 # <<<<<<<<<<<<<<
+ * if PyString_CheckExact(value) == 0:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_v_parser->unicode_source = 1;
+
+ /* "_yaml.pyx":913
+ * if parser.stream_cache is None:
+ * value = parser.stream.read(size)
+ * if PyUnicode_CheckExact(value) != 0: # <<<<<<<<<<<<<<
+ * value = PyUnicode_AsUTF8String(value)
+ * parser.unicode_source = 1
+ */
+ }
+
+ /* "_yaml.pyx":916
+ * value = PyUnicode_AsUTF8String(value)
+ * parser.unicode_source = 1
+ * if PyString_CheckExact(value) == 0: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("a string value is expected")
+ */
+ __pyx_t_3 = ((PyString_CheckExact(__pyx_v_value) == 0) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":917
+ * parser.unicode_source = 1
+ * if PyString_CheckExact(value) == 0:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("a string value is expected")
+ * else:
+ */
+ __pyx_t_3 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_3)) {
+
+ /* "_yaml.pyx":918
+ * if PyString_CheckExact(value) == 0:
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("a string value is expected") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"a string value is expected")
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__17, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 918, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __PYX_ERR(0, 918, __pyx_L1_error)
+
+ /* "_yaml.pyx":917
+ * parser.unicode_source = 1
+ * if PyString_CheckExact(value) == 0:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("a string value is expected")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":920
+ * raise TypeError("a string value is expected")
+ * else:
+ * raise TypeError(u"a string value is expected") # <<<<<<<<<<<<<<
+ * parser.stream_cache = value
+ * parser.stream_cache_pos = 0
+ */
+ /*else*/ {
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__18, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 920, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __PYX_ERR(0, 920, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":916
+ * value = PyUnicode_AsUTF8String(value)
+ * parser.unicode_source = 1
+ * if PyString_CheckExact(value) == 0: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("a string value is expected")
+ */
+ }
+
+ /* "_yaml.pyx":921
+ * else:
+ * raise TypeError(u"a string value is expected")
+ * parser.stream_cache = value # <<<<<<<<<<<<<<
+ * parser.stream_cache_pos = 0
+ * parser.stream_cache_len = PyString_GET_SIZE(value)
+ */
+ __Pyx_INCREF(__pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ __Pyx_GOTREF(__pyx_v_parser->stream_cache);
+ __Pyx_DECREF(__pyx_v_parser->stream_cache);
+ __pyx_v_parser->stream_cache = __pyx_v_value;
+
+ /* "_yaml.pyx":922
+ * raise TypeError(u"a string value is expected")
+ * parser.stream_cache = value
+ * parser.stream_cache_pos = 0 # <<<<<<<<<<<<<<
+ * parser.stream_cache_len = PyString_GET_SIZE(value)
+ * if (parser.stream_cache_len - parser.stream_cache_pos) < size:
+ */
+ __pyx_v_parser->stream_cache_pos = 0;
+
+ /* "_yaml.pyx":923
+ * parser.stream_cache = value
+ * parser.stream_cache_pos = 0
+ * parser.stream_cache_len = PyString_GET_SIZE(value) # <<<<<<<<<<<<<<
+ * if (parser.stream_cache_len - parser.stream_cache_pos) < size:
+ * size = parser.stream_cache_len - parser.stream_cache_pos
+ */
+ __pyx_v_parser->stream_cache_len = PyString_GET_SIZE(__pyx_v_value);
+
+ /* "_yaml.pyx":911
+ * cdef CParser parser
+ * parser = <CParser>data
+ * if parser.stream_cache is None: # <<<<<<<<<<<<<<
+ * value = parser.stream.read(size)
+ * if PyUnicode_CheckExact(value) != 0:
+ */
+ }
+
+ /* "_yaml.pyx":924
+ * parser.stream_cache_pos = 0
+ * parser.stream_cache_len = PyString_GET_SIZE(value)
+ * if (parser.stream_cache_len - parser.stream_cache_pos) < size: # <<<<<<<<<<<<<<
+ * size = parser.stream_cache_len - parser.stream_cache_pos
+ * if size > 0:
+ */
+ __pyx_t_3 = (((__pyx_v_parser->stream_cache_len - __pyx_v_parser->stream_cache_pos) < __pyx_v_size) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":925
+ * parser.stream_cache_len = PyString_GET_SIZE(value)
+ * if (parser.stream_cache_len - parser.stream_cache_pos) < size:
+ * size = parser.stream_cache_len - parser.stream_cache_pos # <<<<<<<<<<<<<<
+ * if size > 0:
+ * memcpy(buffer, PyString_AS_STRING(parser.stream_cache)
+ */
+ __pyx_v_size = (__pyx_v_parser->stream_cache_len - __pyx_v_parser->stream_cache_pos);
+
+ /* "_yaml.pyx":924
+ * parser.stream_cache_pos = 0
+ * parser.stream_cache_len = PyString_GET_SIZE(value)
+ * if (parser.stream_cache_len - parser.stream_cache_pos) < size: # <<<<<<<<<<<<<<
+ * size = parser.stream_cache_len - parser.stream_cache_pos
+ * if size > 0:
+ */
+ }
+
+ /* "_yaml.pyx":926
+ * if (parser.stream_cache_len - parser.stream_cache_pos) < size:
+ * size = parser.stream_cache_len - parser.stream_cache_pos
+ * if size > 0: # <<<<<<<<<<<<<<
+ * memcpy(buffer, PyString_AS_STRING(parser.stream_cache)
+ * + parser.stream_cache_pos, size)
+ */
+ __pyx_t_3 = ((__pyx_v_size > 0) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":927
+ * size = parser.stream_cache_len - parser.stream_cache_pos
+ * if size > 0:
+ * memcpy(buffer, PyString_AS_STRING(parser.stream_cache) # <<<<<<<<<<<<<<
+ * + parser.stream_cache_pos, size)
+ * read[0] = size
+ */
+ __pyx_t_1 = __pyx_v_parser->stream_cache;
+ __Pyx_INCREF(__pyx_t_1);
+
+ /* "_yaml.pyx":928
+ * if size > 0:
+ * memcpy(buffer, PyString_AS_STRING(parser.stream_cache)
+ * + parser.stream_cache_pos, size) # <<<<<<<<<<<<<<
+ * read[0] = size
+ * parser.stream_cache_pos += size
+ */
+ memcpy(__pyx_v_buffer, (PyString_AS_STRING(__pyx_t_1) + __pyx_v_parser->stream_cache_pos), __pyx_v_size);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":926
+ * if (parser.stream_cache_len - parser.stream_cache_pos) < size:
+ * size = parser.stream_cache_len - parser.stream_cache_pos
+ * if size > 0: # <<<<<<<<<<<<<<
+ * memcpy(buffer, PyString_AS_STRING(parser.stream_cache)
+ * + parser.stream_cache_pos, size)
+ */
+ }
+
+ /* "_yaml.pyx":929
+ * memcpy(buffer, PyString_AS_STRING(parser.stream_cache)
+ * + parser.stream_cache_pos, size)
+ * read[0] = size # <<<<<<<<<<<<<<
+ * parser.stream_cache_pos += size
+ * if parser.stream_cache_pos == parser.stream_cache_len:
+ */
+ (__pyx_v_read[0]) = __pyx_v_size;
+
+ /* "_yaml.pyx":930
+ * + parser.stream_cache_pos, size)
+ * read[0] = size
+ * parser.stream_cache_pos += size # <<<<<<<<<<<<<<
+ * if parser.stream_cache_pos == parser.stream_cache_len:
+ * parser.stream_cache = None
+ */
+ __pyx_v_parser->stream_cache_pos = (__pyx_v_parser->stream_cache_pos + __pyx_v_size);
+
+ /* "_yaml.pyx":931
+ * read[0] = size
+ * parser.stream_cache_pos += size
+ * if parser.stream_cache_pos == parser.stream_cache_len: # <<<<<<<<<<<<<<
+ * parser.stream_cache = None
+ * return 1
+ */
+ __pyx_t_3 = ((__pyx_v_parser->stream_cache_pos == __pyx_v_parser->stream_cache_len) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":932
+ * parser.stream_cache_pos += size
+ * if parser.stream_cache_pos == parser.stream_cache_len:
+ * parser.stream_cache = None # <<<<<<<<<<<<<<
+ * return 1
+ *
+ */
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_GOTREF(__pyx_v_parser->stream_cache);
+ __Pyx_DECREF(__pyx_v_parser->stream_cache);
+ __pyx_v_parser->stream_cache = Py_None;
+
+ /* "_yaml.pyx":931
+ * read[0] = size
+ * parser.stream_cache_pos += size
+ * if parser.stream_cache_pos == parser.stream_cache_len: # <<<<<<<<<<<<<<
+ * parser.stream_cache = None
+ * return 1
+ */
+ }
+
+ /* "_yaml.pyx":933
+ * if parser.stream_cache_pos == parser.stream_cache_len:
+ * parser.stream_cache = None
+ * return 1 # <<<<<<<<<<<<<<
+ *
+ * cdef class CEmitter:
+ */
+ __pyx_r = 1;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":908
+ * return 1
+ *
+ * cdef int input_handler(void *data, char *buffer, size_t size, size_t *read) except 0: # <<<<<<<<<<<<<<
+ * cdef CParser parser
+ * parser = <CParser>data
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_6);
+ __Pyx_AddTraceback("_yaml.input_handler", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF((PyObject *)__pyx_v_parser);
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":953
+ * cdef object use_encoding
+ *
+ * def __init__(self, stream, canonical=None, indent=None, width=None, # <<<<<<<<<<<<<<
+ * allow_unicode=None, line_break=None, encoding=None,
+ * explicit_start=None, explicit_end=None, version=None, tags=None):
+ */
+
+/* Python wrapper */
+static int __pyx_pw_5_yaml_8CEmitter_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static int __pyx_pw_5_yaml_8CEmitter_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+ PyObject *__pyx_v_stream = 0;
+ PyObject *__pyx_v_canonical = 0;
+ PyObject *__pyx_v_indent = 0;
+ PyObject *__pyx_v_width = 0;
+ PyObject *__pyx_v_allow_unicode = 0;
+ PyObject *__pyx_v_line_break = 0;
+ PyObject *__pyx_v_encoding = 0;
+ PyObject *__pyx_v_explicit_start = 0;
+ PyObject *__pyx_v_explicit_end = 0;
+ PyObject *__pyx_v_version = 0;
+ PyObject *__pyx_v_tags = 0;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__init__ (wrapper)", 0);
+ {
+ static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_stream,&__pyx_n_s_canonical,&__pyx_n_s_indent,&__pyx_n_s_width,&__pyx_n_s_allow_unicode,&__pyx_n_s_line_break,&__pyx_n_s_encoding,&__pyx_n_s_explicit_start,&__pyx_n_s_explicit_end,&__pyx_n_s_version,&__pyx_n_s_tags,0};
+ PyObject* values[11] = {0,0,0,0,0,0,0,0,0,0,0};
+ values[1] = ((PyObject *)Py_None);
+ values[2] = ((PyObject *)Py_None);
+ values[3] = ((PyObject *)Py_None);
+
+ /* "_yaml.pyx":954
+ *
+ * def __init__(self, stream, canonical=None, indent=None, width=None,
+ * allow_unicode=None, line_break=None, encoding=None, # <<<<<<<<<<<<<<
+ * explicit_start=None, explicit_end=None, version=None, tags=None):
+ * if yaml_emitter_initialize(&self.emitter) == 0:
+ */
+ values[4] = ((PyObject *)Py_None);
+ values[5] = ((PyObject *)Py_None);
+ values[6] = ((PyObject *)Py_None);
+
+ /* "_yaml.pyx":955
+ * def __init__(self, stream, canonical=None, indent=None, width=None,
+ * allow_unicode=None, line_break=None, encoding=None,
+ * explicit_start=None, explicit_end=None, version=None, tags=None): # <<<<<<<<<<<<<<
+ * if yaml_emitter_initialize(&self.emitter) == 0:
+ * raise MemoryError
+ */
+ values[7] = ((PyObject *)Py_None);
+ values[8] = ((PyObject *)Py_None);
+ values[9] = ((PyObject *)Py_None);
+ values[10] = ((PyObject *)Py_None);
+ if (unlikely(__pyx_kwds)) {
+ Py_ssize_t kw_args;
+ const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
+ switch (pos_args) {
+ case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10);
+ CYTHON_FALLTHROUGH;
+ case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9);
+ CYTHON_FALLTHROUGH;
+ case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8);
+ CYTHON_FALLTHROUGH;
+ case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7);
+ CYTHON_FALLTHROUGH;
+ case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6);
+ CYTHON_FALLTHROUGH;
+ case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5);
+ CYTHON_FALLTHROUGH;
+ case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
+ CYTHON_FALLTHROUGH;
+ case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+ CYTHON_FALLTHROUGH;
+ case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ CYTHON_FALLTHROUGH;
+ case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ CYTHON_FALLTHROUGH;
+ case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ CYTHON_FALLTHROUGH;
+ case 0: break;
+ default: goto __pyx_L5_argtuple_error;
+ }
+ kw_args = PyDict_Size(__pyx_kwds);
+ switch (pos_args) {
+ case 0:
+ if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_stream)) != 0)) kw_args--;
+ else goto __pyx_L5_argtuple_error;
+ CYTHON_FALLTHROUGH;
+ case 1:
+ if (kw_args > 0) {
+ PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_canonical);
+ if (value) { values[1] = value; kw_args--; }
+ }
+ CYTHON_FALLTHROUGH;
+ case 2:
+ if (kw_args > 0) {
+ PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_indent);
+ if (value) { values[2] = value; kw_args--; }
+ }
+ CYTHON_FALLTHROUGH;
+ case 3:
+ if (kw_args > 0) {
+ PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_width);
+ if (value) { values[3] = value; kw_args--; }
+ }
+ CYTHON_FALLTHROUGH;
+ case 4:
+ if (kw_args > 0) {
+ PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_allow_unicode);
+ if (value) { values[4] = value; kw_args--; }
+ }
+ CYTHON_FALLTHROUGH;
+ case 5:
+ if (kw_args > 0) {
+ PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_line_break);
+ if (value) { values[5] = value; kw_args--; }
+ }
+ CYTHON_FALLTHROUGH;
+ case 6:
+ if (kw_args > 0) {
+ PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_encoding);
+ if (value) { values[6] = value; kw_args--; }
+ }
+ CYTHON_FALLTHROUGH;
+ case 7:
+ if (kw_args > 0) {
+ PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_explicit_start);
+ if (value) { values[7] = value; kw_args--; }
+ }
+ CYTHON_FALLTHROUGH;
+ case 8:
+ if (kw_args > 0) {
+ PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_explicit_end);
+ if (value) { values[8] = value; kw_args--; }
+ }
+ CYTHON_FALLTHROUGH;
+ case 9:
+ if (kw_args > 0) {
+ PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_version);
+ if (value) { values[9] = value; kw_args--; }
+ }
+ CYTHON_FALLTHROUGH;
+ case 10:
+ if (kw_args > 0) {
+ PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_tags);
+ if (value) { values[10] = value; kw_args--; }
+ }
+ }
+ if (unlikely(kw_args > 0)) {
+ if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 953, __pyx_L3_error)
+ }
+ } else {
+ switch (PyTuple_GET_SIZE(__pyx_args)) {
+ case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10);
+ CYTHON_FALLTHROUGH;
+ case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9);
+ CYTHON_FALLTHROUGH;
+ case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8);
+ CYTHON_FALLTHROUGH;
+ case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7);
+ CYTHON_FALLTHROUGH;
+ case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6);
+ CYTHON_FALLTHROUGH;
+ case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5);
+ CYTHON_FALLTHROUGH;
+ case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
+ CYTHON_FALLTHROUGH;
+ case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+ CYTHON_FALLTHROUGH;
+ case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ CYTHON_FALLTHROUGH;
+ case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ CYTHON_FALLTHROUGH;
+ case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ break;
+ default: goto __pyx_L5_argtuple_error;
+ }
+ }
+ __pyx_v_stream = values[0];
+ __pyx_v_canonical = values[1];
+ __pyx_v_indent = values[2];
+ __pyx_v_width = values[3];
+ __pyx_v_allow_unicode = values[4];
+ __pyx_v_line_break = values[5];
+ __pyx_v_encoding = values[6];
+ __pyx_v_explicit_start = values[7];
+ __pyx_v_explicit_end = values[8];
+ __pyx_v_version = values[9];
+ __pyx_v_tags = values[10];
+ }
+ goto __pyx_L4_argument_unpacking_done;
+ __pyx_L5_argtuple_error:;
+ __Pyx_RaiseArgtupleInvalid("__init__", 0, 1, 11, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 953, __pyx_L3_error)
+ __pyx_L3_error:;
+ __Pyx_AddTraceback("_yaml.CEmitter.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __Pyx_RefNannyFinishContext();
+ return -1;
+ __pyx_L4_argument_unpacking_done:;
+ __pyx_r = __pyx_pf_5_yaml_8CEmitter___init__(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self), __pyx_v_stream, __pyx_v_canonical, __pyx_v_indent, __pyx_v_width, __pyx_v_allow_unicode, __pyx_v_line_break, __pyx_v_encoding, __pyx_v_explicit_start, __pyx_v_explicit_end, __pyx_v_version, __pyx_v_tags);
+
+ /* "_yaml.pyx":953
+ * cdef object use_encoding
+ *
+ * def __init__(self, stream, canonical=None, indent=None, width=None, # <<<<<<<<<<<<<<
+ * allow_unicode=None, line_break=None, encoding=None,
+ * explicit_start=None, explicit_end=None, version=None, tags=None):
+ */
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static int __pyx_pf_5_yaml_8CEmitter___init__(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_stream, PyObject *__pyx_v_canonical, PyObject *__pyx_v_indent, PyObject *__pyx_v_width, PyObject *__pyx_v_allow_unicode, PyObject *__pyx_v_line_break, PyObject *__pyx_v_encoding, PyObject *__pyx_v_explicit_start, PyObject *__pyx_v_explicit_end, PyObject *__pyx_v_version, PyObject *__pyx_v_tags) {
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ PyObject *__pyx_t_2 = NULL;
+ int __pyx_t_3;
+ int __pyx_t_4;
+ __Pyx_RefNannySetupContext("__init__", 0);
+
+ /* "_yaml.pyx":956
+ * allow_unicode=None, line_break=None, encoding=None,
+ * explicit_start=None, explicit_end=None, version=None, tags=None):
+ * if yaml_emitter_initialize(&self.emitter) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * self.stream = stream
+ */
+ __pyx_t_1 = ((yaml_emitter_initialize((&__pyx_v_self->emitter)) == 0) != 0);
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":957
+ * explicit_start=None, explicit_end=None, version=None, tags=None):
+ * if yaml_emitter_initialize(&self.emitter) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * self.stream = stream
+ * self.dump_unicode = 0
+ */
+ PyErr_NoMemory(); __PYX_ERR(0, 957, __pyx_L1_error)
+
+ /* "_yaml.pyx":956
+ * allow_unicode=None, line_break=None, encoding=None,
+ * explicit_start=None, explicit_end=None, version=None, tags=None):
+ * if yaml_emitter_initialize(&self.emitter) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * self.stream = stream
+ */
+ }
+
+ /* "_yaml.pyx":958
+ * if yaml_emitter_initialize(&self.emitter) == 0:
+ * raise MemoryError
+ * self.stream = stream # <<<<<<<<<<<<<<
+ * self.dump_unicode = 0
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __Pyx_INCREF(__pyx_v_stream);
+ __Pyx_GIVEREF(__pyx_v_stream);
+ __Pyx_GOTREF(__pyx_v_self->stream);
+ __Pyx_DECREF(__pyx_v_self->stream);
+ __pyx_v_self->stream = __pyx_v_stream;
+
+ /* "_yaml.pyx":959
+ * raise MemoryError
+ * self.stream = stream
+ * self.dump_unicode = 0 # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * if getattr3(stream, 'encoding', None):
+ */
+ __pyx_v_self->dump_unicode = 0;
+
+ /* "_yaml.pyx":960
+ * self.stream = stream
+ * self.dump_unicode = 0
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * if getattr3(stream, 'encoding', None):
+ * self.dump_unicode = 1
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":961
+ * self.dump_unicode = 0
+ * if PY_MAJOR_VERSION < 3:
+ * if getattr3(stream, 'encoding', None): # <<<<<<<<<<<<<<
+ * self.dump_unicode = 1
+ * else:
+ */
+ __pyx_t_2 = __Pyx_GetAttr3(__pyx_v_stream, __pyx_n_s_encoding, Py_None); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 961, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 961, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":962
+ * if PY_MAJOR_VERSION < 3:
+ * if getattr3(stream, 'encoding', None):
+ * self.dump_unicode = 1 # <<<<<<<<<<<<<<
+ * else:
+ * if hasattr(stream, u'encoding'):
+ */
+ __pyx_v_self->dump_unicode = 1;
+
+ /* "_yaml.pyx":961
+ * self.dump_unicode = 0
+ * if PY_MAJOR_VERSION < 3:
+ * if getattr3(stream, 'encoding', None): # <<<<<<<<<<<<<<
+ * self.dump_unicode = 1
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":960
+ * self.stream = stream
+ * self.dump_unicode = 0
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * if getattr3(stream, 'encoding', None):
+ * self.dump_unicode = 1
+ */
+ goto __pyx_L4;
+ }
+
+ /* "_yaml.pyx":964
+ * self.dump_unicode = 1
+ * else:
+ * if hasattr(stream, u'encoding'): # <<<<<<<<<<<<<<
+ * self.dump_unicode = 1
+ * self.use_encoding = encoding
+ */
+ /*else*/ {
+ __pyx_t_1 = __Pyx_HasAttr(__pyx_v_stream, __pyx_n_u_encoding); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 964, __pyx_L1_error)
+ __pyx_t_3 = (__pyx_t_1 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":965
+ * else:
+ * if hasattr(stream, u'encoding'):
+ * self.dump_unicode = 1 # <<<<<<<<<<<<<<
+ * self.use_encoding = encoding
+ * yaml_emitter_set_output(&self.emitter, output_handler, <void *>self)
+ */
+ __pyx_v_self->dump_unicode = 1;
+
+ /* "_yaml.pyx":964
+ * self.dump_unicode = 1
+ * else:
+ * if hasattr(stream, u'encoding'): # <<<<<<<<<<<<<<
+ * self.dump_unicode = 1
+ * self.use_encoding = encoding
+ */
+ }
+ }
+ __pyx_L4:;
+
+ /* "_yaml.pyx":966
+ * if hasattr(stream, u'encoding'):
+ * self.dump_unicode = 1
+ * self.use_encoding = encoding # <<<<<<<<<<<<<<
+ * yaml_emitter_set_output(&self.emitter, output_handler, <void *>self)
+ * if canonical:
+ */
+ __Pyx_INCREF(__pyx_v_encoding);
+ __Pyx_GIVEREF(__pyx_v_encoding);
+ __Pyx_GOTREF(__pyx_v_self->use_encoding);
+ __Pyx_DECREF(__pyx_v_self->use_encoding);
+ __pyx_v_self->use_encoding = __pyx_v_encoding;
+
+ /* "_yaml.pyx":967
+ * self.dump_unicode = 1
+ * self.use_encoding = encoding
+ * yaml_emitter_set_output(&self.emitter, output_handler, <void *>self) # <<<<<<<<<<<<<<
+ * if canonical:
+ * yaml_emitter_set_canonical(&self.emitter, 1)
+ */
+ yaml_emitter_set_output((&__pyx_v_self->emitter), __pyx_f_5_yaml_output_handler, ((void *)__pyx_v_self));
+
+ /* "_yaml.pyx":968
+ * self.use_encoding = encoding
+ * yaml_emitter_set_output(&self.emitter, output_handler, <void *>self)
+ * if canonical: # <<<<<<<<<<<<<<
+ * yaml_emitter_set_canonical(&self.emitter, 1)
+ * if indent is not None:
+ */
+ __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_canonical); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 968, __pyx_L1_error)
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":969
+ * yaml_emitter_set_output(&self.emitter, output_handler, <void *>self)
+ * if canonical:
+ * yaml_emitter_set_canonical(&self.emitter, 1) # <<<<<<<<<<<<<<
+ * if indent is not None:
+ * yaml_emitter_set_indent(&self.emitter, indent)
+ */
+ yaml_emitter_set_canonical((&__pyx_v_self->emitter), 1);
+
+ /* "_yaml.pyx":968
+ * self.use_encoding = encoding
+ * yaml_emitter_set_output(&self.emitter, output_handler, <void *>self)
+ * if canonical: # <<<<<<<<<<<<<<
+ * yaml_emitter_set_canonical(&self.emitter, 1)
+ * if indent is not None:
+ */
+ }
+
+ /* "_yaml.pyx":970
+ * if canonical:
+ * yaml_emitter_set_canonical(&self.emitter, 1)
+ * if indent is not None: # <<<<<<<<<<<<<<
+ * yaml_emitter_set_indent(&self.emitter, indent)
+ * if width is not None:
+ */
+ __pyx_t_3 = (__pyx_v_indent != Py_None);
+ __pyx_t_1 = (__pyx_t_3 != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":971
+ * yaml_emitter_set_canonical(&self.emitter, 1)
+ * if indent is not None:
+ * yaml_emitter_set_indent(&self.emitter, indent) # <<<<<<<<<<<<<<
+ * if width is not None:
+ * yaml_emitter_set_width(&self.emitter, width)
+ */
+ __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v_indent); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 971, __pyx_L1_error)
+ yaml_emitter_set_indent((&__pyx_v_self->emitter), __pyx_t_4);
+
+ /* "_yaml.pyx":970
+ * if canonical:
+ * yaml_emitter_set_canonical(&self.emitter, 1)
+ * if indent is not None: # <<<<<<<<<<<<<<
+ * yaml_emitter_set_indent(&self.emitter, indent)
+ * if width is not None:
+ */
+ }
+
+ /* "_yaml.pyx":972
+ * if indent is not None:
+ * yaml_emitter_set_indent(&self.emitter, indent)
+ * if width is not None: # <<<<<<<<<<<<<<
+ * yaml_emitter_set_width(&self.emitter, width)
+ * if allow_unicode:
+ */
+ __pyx_t_1 = (__pyx_v_width != Py_None);
+ __pyx_t_3 = (__pyx_t_1 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":973
+ * yaml_emitter_set_indent(&self.emitter, indent)
+ * if width is not None:
+ * yaml_emitter_set_width(&self.emitter, width) # <<<<<<<<<<<<<<
+ * if allow_unicode:
+ * yaml_emitter_set_unicode(&self.emitter, 1)
+ */
+ __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v_width); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 973, __pyx_L1_error)
+ yaml_emitter_set_width((&__pyx_v_self->emitter), __pyx_t_4);
+
+ /* "_yaml.pyx":972
+ * if indent is not None:
+ * yaml_emitter_set_indent(&self.emitter, indent)
+ * if width is not None: # <<<<<<<<<<<<<<
+ * yaml_emitter_set_width(&self.emitter, width)
+ * if allow_unicode:
+ */
+ }
+
+ /* "_yaml.pyx":974
+ * if width is not None:
+ * yaml_emitter_set_width(&self.emitter, width)
+ * if allow_unicode: # <<<<<<<<<<<<<<
+ * yaml_emitter_set_unicode(&self.emitter, 1)
+ * if line_break is not None:
+ */
+ __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_allow_unicode); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 974, __pyx_L1_error)
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":975
+ * yaml_emitter_set_width(&self.emitter, width)
+ * if allow_unicode:
+ * yaml_emitter_set_unicode(&self.emitter, 1) # <<<<<<<<<<<<<<
+ * if line_break is not None:
+ * if line_break == '\r':
+ */
+ yaml_emitter_set_unicode((&__pyx_v_self->emitter), 1);
+
+ /* "_yaml.pyx":974
+ * if width is not None:
+ * yaml_emitter_set_width(&self.emitter, width)
+ * if allow_unicode: # <<<<<<<<<<<<<<
+ * yaml_emitter_set_unicode(&self.emitter, 1)
+ * if line_break is not None:
+ */
+ }
+
+ /* "_yaml.pyx":976
+ * if allow_unicode:
+ * yaml_emitter_set_unicode(&self.emitter, 1)
+ * if line_break is not None: # <<<<<<<<<<<<<<
+ * if line_break == '\r':
+ * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK)
+ */
+ __pyx_t_3 = (__pyx_v_line_break != Py_None);
+ __pyx_t_1 = (__pyx_t_3 != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":977
+ * yaml_emitter_set_unicode(&self.emitter, 1)
+ * if line_break is not None:
+ * if line_break == '\r': # <<<<<<<<<<<<<<
+ * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK)
+ * elif line_break == '\n':
+ */
+ __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_line_break, __pyx_kp_s__19, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 977, __pyx_L1_error)
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":978
+ * if line_break is not None:
+ * if line_break == '\r':
+ * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK) # <<<<<<<<<<<<<<
+ * elif line_break == '\n':
+ * yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK)
+ */
+ yaml_emitter_set_break((&__pyx_v_self->emitter), YAML_CR_BREAK);
+
+ /* "_yaml.pyx":977
+ * yaml_emitter_set_unicode(&self.emitter, 1)
+ * if line_break is not None:
+ * if line_break == '\r': # <<<<<<<<<<<<<<
+ * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK)
+ * elif line_break == '\n':
+ */
+ goto __pyx_L12;
+ }
+
+ /* "_yaml.pyx":979
+ * if line_break == '\r':
+ * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK)
+ * elif line_break == '\n': # <<<<<<<<<<<<<<
+ * yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK)
+ * elif line_break == '\r\n':
+ */
+ __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_line_break, __pyx_kp_s__20, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 979, __pyx_L1_error)
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":980
+ * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK)
+ * elif line_break == '\n':
+ * yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK) # <<<<<<<<<<<<<<
+ * elif line_break == '\r\n':
+ * yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK)
+ */
+ yaml_emitter_set_break((&__pyx_v_self->emitter), YAML_LN_BREAK);
+
+ /* "_yaml.pyx":979
+ * if line_break == '\r':
+ * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK)
+ * elif line_break == '\n': # <<<<<<<<<<<<<<
+ * yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK)
+ * elif line_break == '\r\n':
+ */
+ goto __pyx_L12;
+ }
+
+ /* "_yaml.pyx":981
+ * elif line_break == '\n':
+ * yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK)
+ * elif line_break == '\r\n': # <<<<<<<<<<<<<<
+ * yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK)
+ * self.document_start_implicit = 1
+ */
+ __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_line_break, __pyx_kp_s__21, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 981, __pyx_L1_error)
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":982
+ * yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK)
+ * elif line_break == '\r\n':
+ * yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK) # <<<<<<<<<<<<<<
+ * self.document_start_implicit = 1
+ * if explicit_start:
+ */
+ yaml_emitter_set_break((&__pyx_v_self->emitter), YAML_CRLN_BREAK);
+
+ /* "_yaml.pyx":981
+ * elif line_break == '\n':
+ * yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK)
+ * elif line_break == '\r\n': # <<<<<<<<<<<<<<
+ * yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK)
+ * self.document_start_implicit = 1
+ */
+ }
+ __pyx_L12:;
+
+ /* "_yaml.pyx":976
+ * if allow_unicode:
+ * yaml_emitter_set_unicode(&self.emitter, 1)
+ * if line_break is not None: # <<<<<<<<<<<<<<
+ * if line_break == '\r':
+ * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK)
+ */
+ }
+
+ /* "_yaml.pyx":983
+ * elif line_break == '\r\n':
+ * yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK)
+ * self.document_start_implicit = 1 # <<<<<<<<<<<<<<
+ * if explicit_start:
+ * self.document_start_implicit = 0
+ */
+ __pyx_v_self->document_start_implicit = 1;
+
+ /* "_yaml.pyx":984
+ * yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK)
+ * self.document_start_implicit = 1
+ * if explicit_start: # <<<<<<<<<<<<<<
+ * self.document_start_implicit = 0
+ * self.document_end_implicit = 1
+ */
+ __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_explicit_start); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 984, __pyx_L1_error)
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":985
+ * self.document_start_implicit = 1
+ * if explicit_start:
+ * self.document_start_implicit = 0 # <<<<<<<<<<<<<<
+ * self.document_end_implicit = 1
+ * if explicit_end:
+ */
+ __pyx_v_self->document_start_implicit = 0;
+
+ /* "_yaml.pyx":984
+ * yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK)
+ * self.document_start_implicit = 1
+ * if explicit_start: # <<<<<<<<<<<<<<
+ * self.document_start_implicit = 0
+ * self.document_end_implicit = 1
+ */
+ }
+
+ /* "_yaml.pyx":986
+ * if explicit_start:
+ * self.document_start_implicit = 0
+ * self.document_end_implicit = 1 # <<<<<<<<<<<<<<
+ * if explicit_end:
+ * self.document_end_implicit = 0
+ */
+ __pyx_v_self->document_end_implicit = 1;
+
+ /* "_yaml.pyx":987
+ * self.document_start_implicit = 0
+ * self.document_end_implicit = 1
+ * if explicit_end: # <<<<<<<<<<<<<<
+ * self.document_end_implicit = 0
+ * self.use_version = version
+ */
+ __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_explicit_end); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 987, __pyx_L1_error)
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":988
+ * self.document_end_implicit = 1
+ * if explicit_end:
+ * self.document_end_implicit = 0 # <<<<<<<<<<<<<<
+ * self.use_version = version
+ * self.use_tags = tags
+ */
+ __pyx_v_self->document_end_implicit = 0;
+
+ /* "_yaml.pyx":987
+ * self.document_start_implicit = 0
+ * self.document_end_implicit = 1
+ * if explicit_end: # <<<<<<<<<<<<<<
+ * self.document_end_implicit = 0
+ * self.use_version = version
+ */
+ }
+
+ /* "_yaml.pyx":989
+ * if explicit_end:
+ * self.document_end_implicit = 0
+ * self.use_version = version # <<<<<<<<<<<<<<
+ * self.use_tags = tags
+ * self.serialized_nodes = {}
+ */
+ __Pyx_INCREF(__pyx_v_version);
+ __Pyx_GIVEREF(__pyx_v_version);
+ __Pyx_GOTREF(__pyx_v_self->use_version);
+ __Pyx_DECREF(__pyx_v_self->use_version);
+ __pyx_v_self->use_version = __pyx_v_version;
+
+ /* "_yaml.pyx":990
+ * self.document_end_implicit = 0
+ * self.use_version = version
+ * self.use_tags = tags # <<<<<<<<<<<<<<
+ * self.serialized_nodes = {}
+ * self.anchors = {}
+ */
+ __Pyx_INCREF(__pyx_v_tags);
+ __Pyx_GIVEREF(__pyx_v_tags);
+ __Pyx_GOTREF(__pyx_v_self->use_tags);
+ __Pyx_DECREF(__pyx_v_self->use_tags);
+ __pyx_v_self->use_tags = __pyx_v_tags;
+
+ /* "_yaml.pyx":991
+ * self.use_version = version
+ * self.use_tags = tags
+ * self.serialized_nodes = {} # <<<<<<<<<<<<<<
+ * self.anchors = {}
+ * self.last_alias_id = 0
+ */
+ __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 991, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __Pyx_GOTREF(__pyx_v_self->serialized_nodes);
+ __Pyx_DECREF(__pyx_v_self->serialized_nodes);
+ __pyx_v_self->serialized_nodes = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":992
+ * self.use_tags = tags
+ * self.serialized_nodes = {}
+ * self.anchors = {} # <<<<<<<<<<<<<<
+ * self.last_alias_id = 0
+ * self.closed = -1
+ */
+ __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 992, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __Pyx_GOTREF(__pyx_v_self->anchors);
+ __Pyx_DECREF(__pyx_v_self->anchors);
+ __pyx_v_self->anchors = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":993
+ * self.serialized_nodes = {}
+ * self.anchors = {}
+ * self.last_alias_id = 0 # <<<<<<<<<<<<<<
+ * self.closed = -1
+ *
+ */
+ __pyx_v_self->last_alias_id = 0;
+
+ /* "_yaml.pyx":994
+ * self.anchors = {}
+ * self.last_alias_id = 0
+ * self.closed = -1 # <<<<<<<<<<<<<<
+ *
+ * def __dealloc__(self):
+ */
+ __pyx_v_self->closed = -1;
+
+ /* "_yaml.pyx":953
+ * cdef object use_encoding
+ *
+ * def __init__(self, stream, canonical=None, indent=None, width=None, # <<<<<<<<<<<<<<
+ * allow_unicode=None, line_break=None, encoding=None,
+ * explicit_start=None, explicit_end=None, version=None, tags=None):
+ */
+
+ /* function exit code */
+ __pyx_r = 0;
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_AddTraceback("_yaml.CEmitter.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = -1;
+ __pyx_L0:;
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":996
+ * self.closed = -1
+ *
+ * def __dealloc__(self): # <<<<<<<<<<<<<<
+ * yaml_emitter_delete(&self.emitter)
+ *
+ */
+
+/* Python wrapper */
+static void __pyx_pw_5_yaml_8CEmitter_3__dealloc__(PyObject *__pyx_v_self); /*proto*/
+static void __pyx_pw_5_yaml_8CEmitter_3__dealloc__(PyObject *__pyx_v_self) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0);
+ __pyx_pf_5_yaml_8CEmitter_2__dealloc__(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+}
+
+static void __pyx_pf_5_yaml_8CEmitter_2__dealloc__(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__dealloc__", 0);
+
+ /* "_yaml.pyx":997
+ *
+ * def __dealloc__(self):
+ * yaml_emitter_delete(&self.emitter) # <<<<<<<<<<<<<<
+ *
+ * def dispose(self):
+ */
+ yaml_emitter_delete((&__pyx_v_self->emitter));
+
+ /* "_yaml.pyx":996
+ * self.closed = -1
+ *
+ * def __dealloc__(self): # <<<<<<<<<<<<<<
+ * yaml_emitter_delete(&self.emitter)
+ *
+ */
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+}
+
+/* "_yaml.pyx":999
+ * yaml_emitter_delete(&self.emitter)
+ *
+ * def dispose(self): # <<<<<<<<<<<<<<
+ * pass
+ *
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_8CEmitter_5dispose(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_8CEmitter_5dispose(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("dispose (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_8CEmitter_4dispose(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_8CEmitter_4dispose(CYTHON_UNUSED struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("dispose", 0);
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1002
+ * pass
+ *
+ * cdef object _emitter_error(self): # <<<<<<<<<<<<<<
+ * if self.emitter.error == YAML_MEMORY_ERROR:
+ * return MemoryError
+ */
+
+static PyObject *__pyx_f_5_yaml_8CEmitter__emitter_error(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self) {
+ PyObject *__pyx_v_problem = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ __Pyx_RefNannySetupContext("_emitter_error", 0);
+
+ /* "_yaml.pyx":1003
+ *
+ * cdef object _emitter_error(self):
+ * if self.emitter.error == YAML_MEMORY_ERROR: # <<<<<<<<<<<<<<
+ * return MemoryError
+ * elif self.emitter.error == YAML_EMITTER_ERROR:
+ */
+ switch (__pyx_v_self->emitter.error) {
+ case YAML_MEMORY_ERROR:
+
+ /* "_yaml.pyx":1004
+ * cdef object _emitter_error(self):
+ * if self.emitter.error == YAML_MEMORY_ERROR:
+ * return MemoryError # <<<<<<<<<<<<<<
+ * elif self.emitter.error == YAML_EMITTER_ERROR:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_builtin_MemoryError);
+ __pyx_r = __pyx_builtin_MemoryError;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":1003
+ *
+ * cdef object _emitter_error(self):
+ * if self.emitter.error == YAML_MEMORY_ERROR: # <<<<<<<<<<<<<<
+ * return MemoryError
+ * elif self.emitter.error == YAML_EMITTER_ERROR:
+ */
+ break;
+ case YAML_EMITTER_ERROR:
+
+ /* "_yaml.pyx":1006
+ * return MemoryError
+ * elif self.emitter.error == YAML_EMITTER_ERROR:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * problem = self.emitter.problem
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1007
+ * elif self.emitter.error == YAML_EMITTER_ERROR:
+ * if PY_MAJOR_VERSION < 3:
+ * problem = self.emitter.problem # <<<<<<<<<<<<<<
+ * else:
+ * problem = PyUnicode_FromString(self.emitter.problem)
+ */
+ __pyx_t_2 = __Pyx_PyBytes_FromString(__pyx_v_self->emitter.problem); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1007, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_problem = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":1006
+ * return MemoryError
+ * elif self.emitter.error == YAML_EMITTER_ERROR:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * problem = self.emitter.problem
+ * else:
+ */
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1009
+ * problem = self.emitter.problem
+ * else:
+ * problem = PyUnicode_FromString(self.emitter.problem) # <<<<<<<<<<<<<<
+ * return EmitterError(problem)
+ * if PY_MAJOR_VERSION < 3:
+ */
+ /*else*/ {
+ __pyx_t_2 = PyUnicode_FromString(__pyx_v_self->emitter.problem); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1009, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_problem = __pyx_t_2;
+ __pyx_t_2 = 0;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":1010
+ * else:
+ * problem = PyUnicode_FromString(self.emitter.problem)
+ * return EmitterError(problem) # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("no emitter error")
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_EmitterError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1010, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = NULL;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_3, function);
+ }
+ }
+ __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_v_problem) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_problem);
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1010, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":1005
+ * if self.emitter.error == YAML_MEMORY_ERROR:
+ * return MemoryError
+ * elif self.emitter.error == YAML_EMITTER_ERROR: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * problem = self.emitter.problem
+ */
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":1011
+ * problem = PyUnicode_FromString(self.emitter.problem)
+ * return EmitterError(problem)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("no emitter error")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":1012
+ * return EmitterError(problem)
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("no emitter error") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"no emitter error")
+ */
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__22, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1012, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_Raise(__pyx_t_2, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __PYX_ERR(0, 1012, __pyx_L1_error)
+
+ /* "_yaml.pyx":1011
+ * problem = PyUnicode_FromString(self.emitter.problem)
+ * return EmitterError(problem)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("no emitter error")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1014
+ * raise ValueError("no emitter error")
+ * else:
+ * raise ValueError(u"no emitter error") # <<<<<<<<<<<<<<
+ *
+ * cdef int _object_to_event(self, object event_object, yaml_event_t *event) except 0:
+ */
+ /*else*/ {
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__23, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1014, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_Raise(__pyx_t_2, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __PYX_ERR(0, 1014, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1002
+ * pass
+ *
+ * cdef object _emitter_error(self): # <<<<<<<<<<<<<<
+ * if self.emitter.error == YAML_MEMORY_ERROR:
+ * return MemoryError
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_AddTraceback("_yaml.CEmitter._emitter_error", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_problem);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1016
+ * raise ValueError(u"no emitter error")
+ *
+ * cdef int _object_to_event(self, object event_object, yaml_event_t *event) except 0: # <<<<<<<<<<<<<<
+ * cdef yaml_encoding_t encoding
+ * cdef yaml_version_directive_t version_directive_value
+ */
+
+static int __pyx_f_5_yaml_8CEmitter__object_to_event(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_event_object, yaml_event_t *__pyx_v_event) {
+ yaml_encoding_t __pyx_v_encoding;
+ yaml_version_directive_t __pyx_v_version_directive_value;
+ yaml_version_directive_t *__pyx_v_version_directive;
+ yaml_tag_directive_t __pyx_v_tag_directives_value[0x80];
+ yaml_tag_directive_t *__pyx_v_tag_directives_start;
+ yaml_tag_directive_t *__pyx_v_tag_directives_end;
+ int __pyx_v_implicit;
+ int __pyx_v_plain_implicit;
+ int __pyx_v_quoted_implicit;
+ char *__pyx_v_anchor;
+ char *__pyx_v_tag;
+ char *__pyx_v_value;
+ int __pyx_v_length;
+ yaml_scalar_style_t __pyx_v_scalar_style;
+ yaml_sequence_style_t __pyx_v_sequence_style;
+ yaml_mapping_style_t __pyx_v_mapping_style;
+ PyObject *__pyx_v_event_class = NULL;
+ PyObject *__pyx_v_cache = NULL;
+ PyObject *__pyx_v_handle = NULL;
+ PyObject *__pyx_v_prefix = NULL;
+ PyObject *__pyx_v_anchor_object = NULL;
+ PyObject *__pyx_v_tag_object = NULL;
+ PyObject *__pyx_v_value_object = NULL;
+ PyObject *__pyx_v_style_object = NULL;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_t_2;
+ int __pyx_t_3;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_t_5;
+ Py_ssize_t __pyx_t_6;
+ PyObject *(*__pyx_t_7)(PyObject *);
+ PyObject *__pyx_t_8 = NULL;
+ int __pyx_t_9;
+ __Pyx_RefNannySetupContext("_object_to_event", 0);
+
+ /* "_yaml.pyx":1033
+ * cdef yaml_sequence_style_t sequence_style
+ * cdef yaml_mapping_style_t mapping_style
+ * event_class = event_object.__class__ # <<<<<<<<<<<<<<
+ * if event_class is StreamStartEvent:
+ * encoding = YAML_UTF8_ENCODING
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_class); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1033, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_event_class = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1034
+ * cdef yaml_mapping_style_t mapping_style
+ * event_class = event_object.__class__
+ * if event_class is StreamStartEvent: # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF8_ENCODING
+ * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le':
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_StreamStartEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1034, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = (__pyx_v_event_class == __pyx_t_1);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1035
+ * event_class = event_object.__class__
+ * if event_class is StreamStartEvent:
+ * encoding = YAML_UTF8_ENCODING # <<<<<<<<<<<<<<
+ * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le':
+ * encoding = YAML_UTF16LE_ENCODING
+ */
+ __pyx_v_encoding = YAML_UTF8_ENCODING;
+
+ /* "_yaml.pyx":1036
+ * if event_class is StreamStartEvent:
+ * encoding = YAML_UTF8_ENCODING
+ * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le': # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF16LE_ENCODING
+ * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be':
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_encoding); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1036, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = (__Pyx_PyUnicode_Equals(__pyx_t_1, __pyx_kp_u_utf_16_le, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1036, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ if (!__pyx_t_2) {
+ } else {
+ __pyx_t_3 = __pyx_t_2;
+ goto __pyx_L5_bool_binop_done;
+ }
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_encoding); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1036, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_t_1, __pyx_kp_s_utf_16_le, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1036, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_3 = __pyx_t_2;
+ __pyx_L5_bool_binop_done:;
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1037
+ * encoding = YAML_UTF8_ENCODING
+ * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le':
+ * encoding = YAML_UTF16LE_ENCODING # <<<<<<<<<<<<<<
+ * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be':
+ * encoding = YAML_UTF16BE_ENCODING
+ */
+ __pyx_v_encoding = YAML_UTF16LE_ENCODING;
+
+ /* "_yaml.pyx":1036
+ * if event_class is StreamStartEvent:
+ * encoding = YAML_UTF8_ENCODING
+ * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le': # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF16LE_ENCODING
+ * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be':
+ */
+ goto __pyx_L4;
+ }
+
+ /* "_yaml.pyx":1038
+ * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le':
+ * encoding = YAML_UTF16LE_ENCODING
+ * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be': # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF16BE_ENCODING
+ * if event_object.encoding is None:
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_encoding); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1038, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = (__Pyx_PyUnicode_Equals(__pyx_t_1, __pyx_kp_u_utf_16_be, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1038, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ if (!__pyx_t_2) {
+ } else {
+ __pyx_t_3 = __pyx_t_2;
+ goto __pyx_L7_bool_binop_done;
+ }
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_encoding); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1038, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_t_1, __pyx_kp_s_utf_16_be, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1038, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_3 = __pyx_t_2;
+ __pyx_L7_bool_binop_done:;
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1039
+ * encoding = YAML_UTF16LE_ENCODING
+ * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be':
+ * encoding = YAML_UTF16BE_ENCODING # <<<<<<<<<<<<<<
+ * if event_object.encoding is None:
+ * self.dump_unicode = 1
+ */
+ __pyx_v_encoding = YAML_UTF16BE_ENCODING;
+
+ /* "_yaml.pyx":1038
+ * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le':
+ * encoding = YAML_UTF16LE_ENCODING
+ * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be': # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF16BE_ENCODING
+ * if event_object.encoding is None:
+ */
+ }
+ __pyx_L4:;
+
+ /* "_yaml.pyx":1040
+ * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be':
+ * encoding = YAML_UTF16BE_ENCODING
+ * if event_object.encoding is None: # <<<<<<<<<<<<<<
+ * self.dump_unicode = 1
+ * if self.dump_unicode == 1:
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_encoding); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1040, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = (__pyx_t_1 == Py_None);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_2 = (__pyx_t_3 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1041
+ * encoding = YAML_UTF16BE_ENCODING
+ * if event_object.encoding is None:
+ * self.dump_unicode = 1 # <<<<<<<<<<<<<<
+ * if self.dump_unicode == 1:
+ * encoding = YAML_UTF8_ENCODING
+ */
+ __pyx_v_self->dump_unicode = 1;
+
+ /* "_yaml.pyx":1040
+ * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be':
+ * encoding = YAML_UTF16BE_ENCODING
+ * if event_object.encoding is None: # <<<<<<<<<<<<<<
+ * self.dump_unicode = 1
+ * if self.dump_unicode == 1:
+ */
+ }
+
+ /* "_yaml.pyx":1042
+ * if event_object.encoding is None:
+ * self.dump_unicode = 1
+ * if self.dump_unicode == 1: # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF8_ENCODING
+ * yaml_stream_start_event_initialize(event, encoding)
+ */
+ __pyx_t_2 = ((__pyx_v_self->dump_unicode == 1) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1043
+ * self.dump_unicode = 1
+ * if self.dump_unicode == 1:
+ * encoding = YAML_UTF8_ENCODING # <<<<<<<<<<<<<<
+ * yaml_stream_start_event_initialize(event, encoding)
+ * elif event_class is StreamEndEvent:
+ */
+ __pyx_v_encoding = YAML_UTF8_ENCODING;
+
+ /* "_yaml.pyx":1042
+ * if event_object.encoding is None:
+ * self.dump_unicode = 1
+ * if self.dump_unicode == 1: # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF8_ENCODING
+ * yaml_stream_start_event_initialize(event, encoding)
+ */
+ }
+
+ /* "_yaml.pyx":1044
+ * if self.dump_unicode == 1:
+ * encoding = YAML_UTF8_ENCODING
+ * yaml_stream_start_event_initialize(event, encoding) # <<<<<<<<<<<<<<
+ * elif event_class is StreamEndEvent:
+ * yaml_stream_end_event_initialize(event)
+ */
+ (void)(yaml_stream_start_event_initialize(__pyx_v_event, __pyx_v_encoding));
+
+ /* "_yaml.pyx":1034
+ * cdef yaml_mapping_style_t mapping_style
+ * event_class = event_object.__class__
+ * if event_class is StreamStartEvent: # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF8_ENCODING
+ * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le':
+ */
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1045
+ * encoding = YAML_UTF8_ENCODING
+ * yaml_stream_start_event_initialize(event, encoding)
+ * elif event_class is StreamEndEvent: # <<<<<<<<<<<<<<
+ * yaml_stream_end_event_initialize(event)
+ * elif event_class is DocumentStartEvent:
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_StreamEndEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1045, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = (__pyx_v_event_class == __pyx_t_1);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1046
+ * yaml_stream_start_event_initialize(event, encoding)
+ * elif event_class is StreamEndEvent:
+ * yaml_stream_end_event_initialize(event) # <<<<<<<<<<<<<<
+ * elif event_class is DocumentStartEvent:
+ * version_directive = NULL
+ */
+ (void)(yaml_stream_end_event_initialize(__pyx_v_event));
+
+ /* "_yaml.pyx":1045
+ * encoding = YAML_UTF8_ENCODING
+ * yaml_stream_start_event_initialize(event, encoding)
+ * elif event_class is StreamEndEvent: # <<<<<<<<<<<<<<
+ * yaml_stream_end_event_initialize(event)
+ * elif event_class is DocumentStartEvent:
+ */
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1047
+ * elif event_class is StreamEndEvent:
+ * yaml_stream_end_event_initialize(event)
+ * elif event_class is DocumentStartEvent: # <<<<<<<<<<<<<<
+ * version_directive = NULL
+ * if event_object.version:
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_DocumentStartEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1047, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = (__pyx_v_event_class == __pyx_t_1);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_2 = (__pyx_t_3 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1048
+ * yaml_stream_end_event_initialize(event)
+ * elif event_class is DocumentStartEvent:
+ * version_directive = NULL # <<<<<<<<<<<<<<
+ * if event_object.version:
+ * version_directive_value.major = event_object.version[0]
+ */
+ __pyx_v_version_directive = NULL;
+
+ /* "_yaml.pyx":1049
+ * elif event_class is DocumentStartEvent:
+ * version_directive = NULL
+ * if event_object.version: # <<<<<<<<<<<<<<
+ * version_directive_value.major = event_object.version[0]
+ * version_directive_value.minor = event_object.version[1]
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_version); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1049, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1049, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1050
+ * version_directive = NULL
+ * if event_object.version:
+ * version_directive_value.major = event_object.version[0] # <<<<<<<<<<<<<<
+ * version_directive_value.minor = event_object.version[1]
+ * version_directive = &version_directive_value
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_version); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1050, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = __Pyx_GetItemInt(__pyx_t_1, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1050, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_4); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 1050, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_version_directive_value.major = __pyx_t_5;
+
+ /* "_yaml.pyx":1051
+ * if event_object.version:
+ * version_directive_value.major = event_object.version[0]
+ * version_directive_value.minor = event_object.version[1] # <<<<<<<<<<<<<<
+ * version_directive = &version_directive_value
+ * tag_directives_start = NULL
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_version); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1051, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_1 = __Pyx_GetItemInt(__pyx_t_4, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1051, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 1051, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_version_directive_value.minor = __pyx_t_5;
+
+ /* "_yaml.pyx":1052
+ * version_directive_value.major = event_object.version[0]
+ * version_directive_value.minor = event_object.version[1]
+ * version_directive = &version_directive_value # <<<<<<<<<<<<<<
+ * tag_directives_start = NULL
+ * tag_directives_end = NULL
+ */
+ __pyx_v_version_directive = (&__pyx_v_version_directive_value);
+
+ /* "_yaml.pyx":1049
+ * elif event_class is DocumentStartEvent:
+ * version_directive = NULL
+ * if event_object.version: # <<<<<<<<<<<<<<
+ * version_directive_value.major = event_object.version[0]
+ * version_directive_value.minor = event_object.version[1]
+ */
+ }
+
+ /* "_yaml.pyx":1053
+ * version_directive_value.minor = event_object.version[1]
+ * version_directive = &version_directive_value
+ * tag_directives_start = NULL # <<<<<<<<<<<<<<
+ * tag_directives_end = NULL
+ * if event_object.tags:
+ */
+ __pyx_v_tag_directives_start = NULL;
+
+ /* "_yaml.pyx":1054
+ * version_directive = &version_directive_value
+ * tag_directives_start = NULL
+ * tag_directives_end = NULL # <<<<<<<<<<<<<<
+ * if event_object.tags:
+ * if len(event_object.tags) > 128:
+ */
+ __pyx_v_tag_directives_end = NULL;
+
+ /* "_yaml.pyx":1055
+ * tag_directives_start = NULL
+ * tag_directives_end = NULL
+ * if event_object.tags: # <<<<<<<<<<<<<<
+ * if len(event_object.tags) > 128:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_tags); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1055, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1055, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1056
+ * tag_directives_end = NULL
+ * if event_object.tags:
+ * if len(event_object.tags) > 128: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("too many tags")
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_tags); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1056, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_6 = PyObject_Length(__pyx_t_1); if (unlikely(__pyx_t_6 == ((Py_ssize_t)-1))) __PYX_ERR(0, 1056, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_2 = ((__pyx_t_6 > 0x80) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1057
+ * if event_object.tags:
+ * if len(event_object.tags) > 128:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("too many tags")
+ * else:
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1058
+ * if len(event_object.tags) > 128:
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("too many tags") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"too many tags")
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__24, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1058, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __PYX_ERR(0, 1058, __pyx_L1_error)
+
+ /* "_yaml.pyx":1057
+ * if event_object.tags:
+ * if len(event_object.tags) > 128:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("too many tags")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1060
+ * raise ValueError("too many tags")
+ * else:
+ * raise ValueError(u"too many tags") # <<<<<<<<<<<<<<
+ * tag_directives_start = tag_directives_value
+ * tag_directives_end = tag_directives_value
+ */
+ /*else*/ {
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__25, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1060, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __PYX_ERR(0, 1060, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1056
+ * tag_directives_end = NULL
+ * if event_object.tags:
+ * if len(event_object.tags) > 128: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("too many tags")
+ */
+ }
+
+ /* "_yaml.pyx":1061
+ * else:
+ * raise ValueError(u"too many tags")
+ * tag_directives_start = tag_directives_value # <<<<<<<<<<<<<<
+ * tag_directives_end = tag_directives_value
+ * cache = []
+ */
+ __pyx_v_tag_directives_start = __pyx_v_tag_directives_value;
+
+ /* "_yaml.pyx":1062
+ * raise ValueError(u"too many tags")
+ * tag_directives_start = tag_directives_value
+ * tag_directives_end = tag_directives_value # <<<<<<<<<<<<<<
+ * cache = []
+ * for handle in event_object.tags:
+ */
+ __pyx_v_tag_directives_end = __pyx_v_tag_directives_value;
+
+ /* "_yaml.pyx":1063
+ * tag_directives_start = tag_directives_value
+ * tag_directives_end = tag_directives_value
+ * cache = [] # <<<<<<<<<<<<<<
+ * for handle in event_object.tags:
+ * prefix = event_object.tags[handle]
+ */
+ __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1063, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_cache = ((PyObject*)__pyx_t_1);
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1064
+ * tag_directives_end = tag_directives_value
+ * cache = []
+ * for handle in event_object.tags: # <<<<<<<<<<<<<<
+ * prefix = event_object.tags[handle]
+ * if PyUnicode_CheckExact(handle):
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_tags); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1064, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (likely(PyList_CheckExact(__pyx_t_1)) || PyTuple_CheckExact(__pyx_t_1)) {
+ __pyx_t_4 = __pyx_t_1; __Pyx_INCREF(__pyx_t_4); __pyx_t_6 = 0;
+ __pyx_t_7 = NULL;
+ } else {
+ __pyx_t_6 = -1; __pyx_t_4 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1064, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_7 = Py_TYPE(__pyx_t_4)->tp_iternext; if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1064, __pyx_L1_error)
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ for (;;) {
+ if (likely(!__pyx_t_7)) {
+ if (likely(PyList_CheckExact(__pyx_t_4))) {
+ if (__pyx_t_6 >= PyList_GET_SIZE(__pyx_t_4)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_1 = PyList_GET_ITEM(__pyx_t_4, __pyx_t_6); __Pyx_INCREF(__pyx_t_1); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 1064, __pyx_L1_error)
+ #else
+ __pyx_t_1 = PySequence_ITEM(__pyx_t_4, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1064, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ #endif
+ } else {
+ if (__pyx_t_6 >= PyTuple_GET_SIZE(__pyx_t_4)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_4, __pyx_t_6); __Pyx_INCREF(__pyx_t_1); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 1064, __pyx_L1_error)
+ #else
+ __pyx_t_1 = PySequence_ITEM(__pyx_t_4, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1064, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ #endif
+ }
+ } else {
+ __pyx_t_1 = __pyx_t_7(__pyx_t_4);
+ if (unlikely(!__pyx_t_1)) {
+ PyObject* exc_type = PyErr_Occurred();
+ if (exc_type) {
+ if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
+ else __PYX_ERR(0, 1064, __pyx_L1_error)
+ }
+ break;
+ }
+ __Pyx_GOTREF(__pyx_t_1);
+ }
+ __Pyx_XDECREF_SET(__pyx_v_handle, __pyx_t_1);
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1065
+ * cache = []
+ * for handle in event_object.tags:
+ * prefix = event_object.tags[handle] # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(handle):
+ * handle = PyUnicode_AsUTF8String(handle)
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_tags); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1065, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_8 = __Pyx_PyObject_GetItem(__pyx_t_1, __pyx_v_handle); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1065, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __Pyx_XDECREF_SET(__pyx_v_prefix, __pyx_t_8);
+ __pyx_t_8 = 0;
+
+ /* "_yaml.pyx":1066
+ * for handle in event_object.tags:
+ * prefix = event_object.tags[handle]
+ * if PyUnicode_CheckExact(handle): # <<<<<<<<<<<<<<
+ * handle = PyUnicode_AsUTF8String(handle)
+ * cache.append(handle)
+ */
+ __pyx_t_2 = (PyUnicode_CheckExact(__pyx_v_handle) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1067
+ * prefix = event_object.tags[handle]
+ * if PyUnicode_CheckExact(handle):
+ * handle = PyUnicode_AsUTF8String(handle) # <<<<<<<<<<<<<<
+ * cache.append(handle)
+ * if not PyString_CheckExact(handle):
+ */
+ __pyx_t_8 = PyUnicode_AsUTF8String(__pyx_v_handle); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1067, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __Pyx_DECREF_SET(__pyx_v_handle, __pyx_t_8);
+ __pyx_t_8 = 0;
+
+ /* "_yaml.pyx":1068
+ * if PyUnicode_CheckExact(handle):
+ * handle = PyUnicode_AsUTF8String(handle)
+ * cache.append(handle) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(handle):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_9 = __Pyx_PyList_Append(__pyx_v_cache, __pyx_v_handle); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(0, 1068, __pyx_L1_error)
+
+ /* "_yaml.pyx":1066
+ * for handle in event_object.tags:
+ * prefix = event_object.tags[handle]
+ * if PyUnicode_CheckExact(handle): # <<<<<<<<<<<<<<
+ * handle = PyUnicode_AsUTF8String(handle)
+ * cache.append(handle)
+ */
+ }
+
+ /* "_yaml.pyx":1069
+ * handle = PyUnicode_AsUTF8String(handle)
+ * cache.append(handle)
+ * if not PyString_CheckExact(handle): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag handle must be a string")
+ */
+ __pyx_t_2 = ((!(PyString_CheckExact(__pyx_v_handle) != 0)) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1070
+ * cache.append(handle)
+ * if not PyString_CheckExact(handle):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag handle must be a string")
+ * else:
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1071
+ * if not PyString_CheckExact(handle):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag handle must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag handle must be a string")
+ */
+ __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__26, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1071, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __Pyx_Raise(__pyx_t_8, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __PYX_ERR(0, 1071, __pyx_L1_error)
+
+ /* "_yaml.pyx":1070
+ * cache.append(handle)
+ * if not PyString_CheckExact(handle):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag handle must be a string")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1073
+ * raise TypeError("tag handle must be a string")
+ * else:
+ * raise TypeError(u"tag handle must be a string") # <<<<<<<<<<<<<<
+ * tag_directives_end.handle = PyString_AS_STRING(handle)
+ * if PyUnicode_CheckExact(prefix):
+ */
+ /*else*/ {
+ __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__27, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1073, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __Pyx_Raise(__pyx_t_8, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __PYX_ERR(0, 1073, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1069
+ * handle = PyUnicode_AsUTF8String(handle)
+ * cache.append(handle)
+ * if not PyString_CheckExact(handle): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag handle must be a string")
+ */
+ }
+
+ /* "_yaml.pyx":1074
+ * else:
+ * raise TypeError(u"tag handle must be a string")
+ * tag_directives_end.handle = PyString_AS_STRING(handle) # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(prefix):
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ */
+ __pyx_v_tag_directives_end->handle = PyString_AS_STRING(__pyx_v_handle);
+
+ /* "_yaml.pyx":1075
+ * raise TypeError(u"tag handle must be a string")
+ * tag_directives_end.handle = PyString_AS_STRING(handle)
+ * if PyUnicode_CheckExact(prefix): # <<<<<<<<<<<<<<
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ * cache.append(prefix)
+ */
+ __pyx_t_2 = (PyUnicode_CheckExact(__pyx_v_prefix) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1076
+ * tag_directives_end.handle = PyString_AS_STRING(handle)
+ * if PyUnicode_CheckExact(prefix):
+ * prefix = PyUnicode_AsUTF8String(prefix) # <<<<<<<<<<<<<<
+ * cache.append(prefix)
+ * if not PyString_CheckExact(prefix):
+ */
+ __pyx_t_8 = PyUnicode_AsUTF8String(__pyx_v_prefix); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1076, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __Pyx_DECREF_SET(__pyx_v_prefix, __pyx_t_8);
+ __pyx_t_8 = 0;
+
+ /* "_yaml.pyx":1077
+ * if PyUnicode_CheckExact(prefix):
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ * cache.append(prefix) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(prefix):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_9 = __Pyx_PyList_Append(__pyx_v_cache, __pyx_v_prefix); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(0, 1077, __pyx_L1_error)
+
+ /* "_yaml.pyx":1075
+ * raise TypeError(u"tag handle must be a string")
+ * tag_directives_end.handle = PyString_AS_STRING(handle)
+ * if PyUnicode_CheckExact(prefix): # <<<<<<<<<<<<<<
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ * cache.append(prefix)
+ */
+ }
+
+ /* "_yaml.pyx":1078
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ * cache.append(prefix)
+ * if not PyString_CheckExact(prefix): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag prefix must be a string")
+ */
+ __pyx_t_2 = ((!(PyString_CheckExact(__pyx_v_prefix) != 0)) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1079
+ * cache.append(prefix)
+ * if not PyString_CheckExact(prefix):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag prefix must be a string")
+ * else:
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1080
+ * if not PyString_CheckExact(prefix):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag prefix must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag prefix must be a string")
+ */
+ __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__28, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1080, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __Pyx_Raise(__pyx_t_8, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __PYX_ERR(0, 1080, __pyx_L1_error)
+
+ /* "_yaml.pyx":1079
+ * cache.append(prefix)
+ * if not PyString_CheckExact(prefix):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag prefix must be a string")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1082
+ * raise TypeError("tag prefix must be a string")
+ * else:
+ * raise TypeError(u"tag prefix must be a string") # <<<<<<<<<<<<<<
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ * tag_directives_end = tag_directives_end+1
+ */
+ /*else*/ {
+ __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__29, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1082, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __Pyx_Raise(__pyx_t_8, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __PYX_ERR(0, 1082, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1078
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ * cache.append(prefix)
+ * if not PyString_CheckExact(prefix): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag prefix must be a string")
+ */
+ }
+
+ /* "_yaml.pyx":1083
+ * else:
+ * raise TypeError(u"tag prefix must be a string")
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix) # <<<<<<<<<<<<<<
+ * tag_directives_end = tag_directives_end+1
+ * implicit = 1
+ */
+ __pyx_v_tag_directives_end->prefix = PyString_AS_STRING(__pyx_v_prefix);
+
+ /* "_yaml.pyx":1084
+ * raise TypeError(u"tag prefix must be a string")
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ * tag_directives_end = tag_directives_end+1 # <<<<<<<<<<<<<<
+ * implicit = 1
+ * if event_object.explicit:
+ */
+ __pyx_v_tag_directives_end = (__pyx_v_tag_directives_end + 1);
+
+ /* "_yaml.pyx":1064
+ * tag_directives_end = tag_directives_value
+ * cache = []
+ * for handle in event_object.tags: # <<<<<<<<<<<<<<
+ * prefix = event_object.tags[handle]
+ * if PyUnicode_CheckExact(handle):
+ */
+ }
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1055
+ * tag_directives_start = NULL
+ * tag_directives_end = NULL
+ * if event_object.tags: # <<<<<<<<<<<<<<
+ * if len(event_object.tags) > 128:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ }
+
+ /* "_yaml.pyx":1085
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ * tag_directives_end = tag_directives_end+1
+ * implicit = 1 # <<<<<<<<<<<<<<
+ * if event_object.explicit:
+ * implicit = 0
+ */
+ __pyx_v_implicit = 1;
+
+ /* "_yaml.pyx":1086
+ * tag_directives_end = tag_directives_end+1
+ * implicit = 1
+ * if event_object.explicit: # <<<<<<<<<<<<<<
+ * implicit = 0
+ * if yaml_document_start_event_initialize(event, version_directive,
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_explicit); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1086, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1086, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1087
+ * implicit = 1
+ * if event_object.explicit:
+ * implicit = 0 # <<<<<<<<<<<<<<
+ * if yaml_document_start_event_initialize(event, version_directive,
+ * tag_directives_start, tag_directives_end, implicit) == 0:
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":1086
+ * tag_directives_end = tag_directives_end+1
+ * implicit = 1
+ * if event_object.explicit: # <<<<<<<<<<<<<<
+ * implicit = 0
+ * if yaml_document_start_event_initialize(event, version_directive,
+ */
+ }
+
+ /* "_yaml.pyx":1089
+ * implicit = 0
+ * if yaml_document_start_event_initialize(event, version_directive,
+ * tag_directives_start, tag_directives_end, implicit) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * elif event_class is DocumentEndEvent:
+ */
+ __pyx_t_2 = ((yaml_document_start_event_initialize(__pyx_v_event, __pyx_v_version_directive, __pyx_v_tag_directives_start, __pyx_v_tag_directives_end, __pyx_v_implicit) == 0) != 0);
+
+ /* "_yaml.pyx":1088
+ * if event_object.explicit:
+ * implicit = 0
+ * if yaml_document_start_event_initialize(event, version_directive, # <<<<<<<<<<<<<<
+ * tag_directives_start, tag_directives_end, implicit) == 0:
+ * raise MemoryError
+ */
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1090
+ * if yaml_document_start_event_initialize(event, version_directive,
+ * tag_directives_start, tag_directives_end, implicit) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * elif event_class is DocumentEndEvent:
+ * implicit = 1
+ */
+ PyErr_NoMemory(); __PYX_ERR(0, 1090, __pyx_L1_error)
+
+ /* "_yaml.pyx":1088
+ * if event_object.explicit:
+ * implicit = 0
+ * if yaml_document_start_event_initialize(event, version_directive, # <<<<<<<<<<<<<<
+ * tag_directives_start, tag_directives_end, implicit) == 0:
+ * raise MemoryError
+ */
+ }
+
+ /* "_yaml.pyx":1047
+ * elif event_class is StreamEndEvent:
+ * yaml_stream_end_event_initialize(event)
+ * elif event_class is DocumentStartEvent: # <<<<<<<<<<<<<<
+ * version_directive = NULL
+ * if event_object.version:
+ */
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1091
+ * tag_directives_start, tag_directives_end, implicit) == 0:
+ * raise MemoryError
+ * elif event_class is DocumentEndEvent: # <<<<<<<<<<<<<<
+ * implicit = 1
+ * if event_object.explicit:
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_DocumentEndEvent); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1091, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = (__pyx_v_event_class == __pyx_t_4);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1092
+ * raise MemoryError
+ * elif event_class is DocumentEndEvent:
+ * implicit = 1 # <<<<<<<<<<<<<<
+ * if event_object.explicit:
+ * implicit = 0
+ */
+ __pyx_v_implicit = 1;
+
+ /* "_yaml.pyx":1093
+ * elif event_class is DocumentEndEvent:
+ * implicit = 1
+ * if event_object.explicit: # <<<<<<<<<<<<<<
+ * implicit = 0
+ * yaml_document_end_event_initialize(event, implicit)
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_explicit); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1093, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1093, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1094
+ * implicit = 1
+ * if event_object.explicit:
+ * implicit = 0 # <<<<<<<<<<<<<<
+ * yaml_document_end_event_initialize(event, implicit)
+ * elif event_class is AliasEvent:
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":1093
+ * elif event_class is DocumentEndEvent:
+ * implicit = 1
+ * if event_object.explicit: # <<<<<<<<<<<<<<
+ * implicit = 0
+ * yaml_document_end_event_initialize(event, implicit)
+ */
+ }
+
+ /* "_yaml.pyx":1095
+ * if event_object.explicit:
+ * implicit = 0
+ * yaml_document_end_event_initialize(event, implicit) # <<<<<<<<<<<<<<
+ * elif event_class is AliasEvent:
+ * anchor = NULL
+ */
+ (void)(yaml_document_end_event_initialize(__pyx_v_event, __pyx_v_implicit));
+
+ /* "_yaml.pyx":1091
+ * tag_directives_start, tag_directives_end, implicit) == 0:
+ * raise MemoryError
+ * elif event_class is DocumentEndEvent: # <<<<<<<<<<<<<<
+ * implicit = 1
+ * if event_object.explicit:
+ */
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1096
+ * implicit = 0
+ * yaml_document_end_event_initialize(event, implicit)
+ * elif event_class is AliasEvent: # <<<<<<<<<<<<<<
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_AliasEvent); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1096, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = (__pyx_v_event_class == __pyx_t_4);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_2 = (__pyx_t_3 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1097
+ * yaml_document_end_event_initialize(event, implicit)
+ * elif event_class is AliasEvent:
+ * anchor = NULL # <<<<<<<<<<<<<<
+ * anchor_object = event_object.anchor
+ * if PyUnicode_CheckExact(anchor_object):
+ */
+ __pyx_v_anchor = NULL;
+
+ /* "_yaml.pyx":1098
+ * elif event_class is AliasEvent:
+ * anchor = NULL
+ * anchor_object = event_object.anchor # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_anchor); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1098, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_v_anchor_object = __pyx_t_4;
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1099
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ */
+ __pyx_t_2 = (PyUnicode_CheckExact(__pyx_v_anchor_object) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1100
+ * anchor_object = event_object.anchor
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_4 = PyUnicode_AsUTF8String(__pyx_v_anchor_object); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1100, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF_SET(__pyx_v_anchor_object, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1099
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ */
+ }
+
+ /* "_yaml.pyx":1101
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string")
+ */
+ __pyx_t_2 = ((!(PyString_CheckExact(__pyx_v_anchor_object) != 0)) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1102
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("anchor must be a string")
+ * else:
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1103
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__30, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1103, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1103, __pyx_L1_error)
+
+ /* "_yaml.pyx":1102
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("anchor must be a string")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1105
+ * raise TypeError("anchor must be a string")
+ * else:
+ * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<<
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if yaml_alias_event_initialize(event, anchor) == 0:
+ */
+ /*else*/ {
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__31, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1105, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1105, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1101
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string")
+ */
+ }
+
+ /* "_yaml.pyx":1106
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object) # <<<<<<<<<<<<<<
+ * if yaml_alias_event_initialize(event, anchor) == 0:
+ * raise MemoryError
+ */
+ __pyx_v_anchor = PyString_AS_STRING(__pyx_v_anchor_object);
+
+ /* "_yaml.pyx":1107
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if yaml_alias_event_initialize(event, anchor) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * elif event_class is ScalarEvent:
+ */
+ __pyx_t_2 = ((yaml_alias_event_initialize(__pyx_v_event, __pyx_v_anchor) == 0) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1108
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if yaml_alias_event_initialize(event, anchor) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * elif event_class is ScalarEvent:
+ * anchor = NULL
+ */
+ PyErr_NoMemory(); __PYX_ERR(0, 1108, __pyx_L1_error)
+
+ /* "_yaml.pyx":1107
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if yaml_alias_event_initialize(event, anchor) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * elif event_class is ScalarEvent:
+ */
+ }
+
+ /* "_yaml.pyx":1096
+ * implicit = 0
+ * yaml_document_end_event_initialize(event, implicit)
+ * elif event_class is AliasEvent: # <<<<<<<<<<<<<<
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ */
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1109
+ * if yaml_alias_event_initialize(event, anchor) == 0:
+ * raise MemoryError
+ * elif event_class is ScalarEvent: # <<<<<<<<<<<<<<
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_ScalarEvent); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1109, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = (__pyx_v_event_class == __pyx_t_4);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1110
+ * raise MemoryError
+ * elif event_class is ScalarEvent:
+ * anchor = NULL # <<<<<<<<<<<<<<
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None:
+ */
+ __pyx_v_anchor = NULL;
+
+ /* "_yaml.pyx":1111
+ * elif event_class is ScalarEvent:
+ * anchor = NULL
+ * anchor_object = event_object.anchor # <<<<<<<<<<<<<<
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object):
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_anchor); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1111, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_v_anchor_object = __pyx_t_4;
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1112
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ */
+ __pyx_t_3 = (__pyx_v_anchor_object != Py_None);
+ __pyx_t_2 = (__pyx_t_3 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1113
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ */
+ __pyx_t_2 = (PyUnicode_CheckExact(__pyx_v_anchor_object) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1114
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_4 = PyUnicode_AsUTF8String(__pyx_v_anchor_object); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1114, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF_SET(__pyx_v_anchor_object, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1113
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ */
+ }
+
+ /* "_yaml.pyx":1115
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string")
+ */
+ __pyx_t_2 = ((!(PyString_CheckExact(__pyx_v_anchor_object) != 0)) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1116
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("anchor must be a string")
+ * else:
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1117
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__30, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1117, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1117, __pyx_L1_error)
+
+ /* "_yaml.pyx":1116
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("anchor must be a string")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1119
+ * raise TypeError("anchor must be a string")
+ * else:
+ * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<<
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL
+ */
+ /*else*/ {
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__31, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1119, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1119, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1115
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string")
+ */
+ }
+
+ /* "_yaml.pyx":1120
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object) # <<<<<<<<<<<<<<
+ * tag = NULL
+ * tag_object = event_object.tag
+ */
+ __pyx_v_anchor = PyString_AS_STRING(__pyx_v_anchor_object);
+
+ /* "_yaml.pyx":1112
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ */
+ }
+
+ /* "_yaml.pyx":1121
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL # <<<<<<<<<<<<<<
+ * tag_object = event_object.tag
+ * if tag_object is not None:
+ */
+ __pyx_v_tag = NULL;
+
+ /* "_yaml.pyx":1122
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL
+ * tag_object = event_object.tag # <<<<<<<<<<<<<<
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_tag); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1122, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_v_tag_object = __pyx_t_4;
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1123
+ * tag = NULL
+ * tag_object = event_object.tag
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ __pyx_t_2 = (__pyx_v_tag_object != Py_None);
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1124
+ * tag_object = event_object.tag
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ __pyx_t_3 = (PyUnicode_CheckExact(__pyx_v_tag_object) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1125
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_4 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1125, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF_SET(__pyx_v_tag_object, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1124
+ * tag_object = event_object.tag
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ }
+
+ /* "_yaml.pyx":1126
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ __pyx_t_3 = ((!(PyString_CheckExact(__pyx_v_tag_object) != 0)) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1127
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ __pyx_t_3 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_3)) {
+
+ /* "_yaml.pyx":1128
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__32, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1128, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1128, __pyx_L1_error)
+
+ /* "_yaml.pyx":1127
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1130
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * value_object = event_object.value
+ */
+ /*else*/ {
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__33, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1130, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1130, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1126
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ }
+
+ /* "_yaml.pyx":1131
+ * else:
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<<
+ * value_object = event_object.value
+ * if PyUnicode_CheckExact(value_object):
+ */
+ __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object);
+
+ /* "_yaml.pyx":1123
+ * tag = NULL
+ * tag_object = event_object.tag
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ }
+
+ /* "_yaml.pyx":1132
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object)
+ * value_object = event_object.value # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(value_object):
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_value); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1132, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_v_value_object = __pyx_t_4;
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1133
+ * tag = PyString_AS_STRING(tag_object)
+ * value_object = event_object.value
+ * if PyUnicode_CheckExact(value_object): # <<<<<<<<<<<<<<
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object):
+ */
+ __pyx_t_3 = (PyUnicode_CheckExact(__pyx_v_value_object) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1134
+ * value_object = event_object.value
+ * if PyUnicode_CheckExact(value_object):
+ * value_object = PyUnicode_AsUTF8String(value_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(value_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_4 = PyUnicode_AsUTF8String(__pyx_v_value_object); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1134, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF_SET(__pyx_v_value_object, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1133
+ * tag = PyString_AS_STRING(tag_object)
+ * value_object = event_object.value
+ * if PyUnicode_CheckExact(value_object): # <<<<<<<<<<<<<<
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object):
+ */
+ }
+
+ /* "_yaml.pyx":1135
+ * if PyUnicode_CheckExact(value_object):
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("value must be a string")
+ */
+ __pyx_t_3 = ((!(PyString_CheckExact(__pyx_v_value_object) != 0)) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1136
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("value must be a string")
+ * else:
+ */
+ __pyx_t_3 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_3)) {
+
+ /* "_yaml.pyx":1137
+ * if not PyString_CheckExact(value_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("value must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"value must be a string")
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__34, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1137, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1137, __pyx_L1_error)
+
+ /* "_yaml.pyx":1136
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("value must be a string")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1139
+ * raise TypeError("value must be a string")
+ * else:
+ * raise TypeError(u"value must be a string") # <<<<<<<<<<<<<<
+ * value = PyString_AS_STRING(value_object)
+ * length = PyString_GET_SIZE(value_object)
+ */
+ /*else*/ {
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__35, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1139, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1139, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1135
+ * if PyUnicode_CheckExact(value_object):
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("value must be a string")
+ */
+ }
+
+ /* "_yaml.pyx":1140
+ * else:
+ * raise TypeError(u"value must be a string")
+ * value = PyString_AS_STRING(value_object) # <<<<<<<<<<<<<<
+ * length = PyString_GET_SIZE(value_object)
+ * plain_implicit = 0
+ */
+ __pyx_v_value = PyString_AS_STRING(__pyx_v_value_object);
+
+ /* "_yaml.pyx":1141
+ * raise TypeError(u"value must be a string")
+ * value = PyString_AS_STRING(value_object)
+ * length = PyString_GET_SIZE(value_object) # <<<<<<<<<<<<<<
+ * plain_implicit = 0
+ * quoted_implicit = 0
+ */
+ __pyx_v_length = PyString_GET_SIZE(__pyx_v_value_object);
+
+ /* "_yaml.pyx":1142
+ * value = PyString_AS_STRING(value_object)
+ * length = PyString_GET_SIZE(value_object)
+ * plain_implicit = 0 # <<<<<<<<<<<<<<
+ * quoted_implicit = 0
+ * if event_object.implicit is not None:
+ */
+ __pyx_v_plain_implicit = 0;
+
+ /* "_yaml.pyx":1143
+ * length = PyString_GET_SIZE(value_object)
+ * plain_implicit = 0
+ * quoted_implicit = 0 # <<<<<<<<<<<<<<
+ * if event_object.implicit is not None:
+ * plain_implicit = event_object.implicit[0]
+ */
+ __pyx_v_quoted_implicit = 0;
+
+ /* "_yaml.pyx":1144
+ * plain_implicit = 0
+ * quoted_implicit = 0
+ * if event_object.implicit is not None: # <<<<<<<<<<<<<<
+ * plain_implicit = event_object.implicit[0]
+ * quoted_implicit = event_object.implicit[1]
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_implicit); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1144, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = (__pyx_t_4 != Py_None);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_2 = (__pyx_t_3 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1145
+ * quoted_implicit = 0
+ * if event_object.implicit is not None:
+ * plain_implicit = event_object.implicit[0] # <<<<<<<<<<<<<<
+ * quoted_implicit = event_object.implicit[1]
+ * style_object = event_object.style
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_implicit); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1145, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_8 = __Pyx_GetItemInt(__pyx_t_4, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1145, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_8); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 1145, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __pyx_v_plain_implicit = __pyx_t_5;
+
+ /* "_yaml.pyx":1146
+ * if event_object.implicit is not None:
+ * plain_implicit = event_object.implicit[0]
+ * quoted_implicit = event_object.implicit[1] # <<<<<<<<<<<<<<
+ * style_object = event_object.style
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ */
+ __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_implicit); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1146, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __pyx_t_4 = __Pyx_GetItemInt(__pyx_t_8, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1146, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_4); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 1146, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_quoted_implicit = __pyx_t_5;
+
+ /* "_yaml.pyx":1144
+ * plain_implicit = 0
+ * quoted_implicit = 0
+ * if event_object.implicit is not None: # <<<<<<<<<<<<<<
+ * plain_implicit = event_object.implicit[0]
+ * quoted_implicit = event_object.implicit[1]
+ */
+ }
+
+ /* "_yaml.pyx":1147
+ * plain_implicit = event_object.implicit[0]
+ * quoted_implicit = event_object.implicit[1]
+ * style_object = event_object.style # <<<<<<<<<<<<<<
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ * if style_object == "'" or style_object == u"'":
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_style); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1147, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_v_style_object = __pyx_t_4;
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1148
+ * quoted_implicit = event_object.implicit[1]
+ * style_object = event_object.style
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * if style_object == "'" or style_object == u"'":
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ */
+ __pyx_v_scalar_style = YAML_PLAIN_SCALAR_STYLE;
+
+ /* "_yaml.pyx":1149
+ * style_object = event_object.style
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ * if style_object == "'" or style_object == u"'": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "\"" or style_object == u"\"":
+ */
+ __pyx_t_3 = (__Pyx_PyString_Equals(__pyx_v_style_object, __pyx_kp_s__7, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1149, __pyx_L1_error)
+ if (!__pyx_t_3) {
+ } else {
+ __pyx_t_2 = __pyx_t_3;
+ goto __pyx_L43_bool_binop_done;
+ }
+ __pyx_t_3 = (__Pyx_PyUnicode_Equals(__pyx_v_style_object, __pyx_kp_u__7, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1149, __pyx_L1_error)
+ __pyx_t_2 = __pyx_t_3;
+ __pyx_L43_bool_binop_done:;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1150
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ * if style_object == "'" or style_object == u"'":
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * elif style_object == "\"" or style_object == u"\"":
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ */
+ __pyx_v_scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE;
+
+ /* "_yaml.pyx":1149
+ * style_object = event_object.style
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ * if style_object == "'" or style_object == u"'": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "\"" or style_object == u"\"":
+ */
+ goto __pyx_L42;
+ }
+
+ /* "_yaml.pyx":1151
+ * if style_object == "'" or style_object == u"'":
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "\"" or style_object == u"\"": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "|" or style_object == u"|":
+ */
+ __pyx_t_3 = (__Pyx_PyString_Equals(__pyx_v_style_object, __pyx_kp_s__8, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1151, __pyx_L1_error)
+ if (!__pyx_t_3) {
+ } else {
+ __pyx_t_2 = __pyx_t_3;
+ goto __pyx_L45_bool_binop_done;
+ }
+ __pyx_t_3 = (__Pyx_PyUnicode_Equals(__pyx_v_style_object, __pyx_kp_u__8, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1151, __pyx_L1_error)
+ __pyx_t_2 = __pyx_t_3;
+ __pyx_L45_bool_binop_done:;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1152
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "\"" or style_object == u"\"":
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * elif style_object == "|" or style_object == u"|":
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ */
+ __pyx_v_scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE;
+
+ /* "_yaml.pyx":1151
+ * if style_object == "'" or style_object == u"'":
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "\"" or style_object == u"\"": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "|" or style_object == u"|":
+ */
+ goto __pyx_L42;
+ }
+
+ /* "_yaml.pyx":1153
+ * elif style_object == "\"" or style_object == u"\"":
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "|" or style_object == u"|": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ * elif style_object == ">" or style_object == u">":
+ */
+ __pyx_t_3 = (__Pyx_PyString_Equals(__pyx_v_style_object, __pyx_kp_s__9, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1153, __pyx_L1_error)
+ if (!__pyx_t_3) {
+ } else {
+ __pyx_t_2 = __pyx_t_3;
+ goto __pyx_L47_bool_binop_done;
+ }
+ __pyx_t_3 = (__Pyx_PyUnicode_Equals(__pyx_v_style_object, __pyx_kp_u__9, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1153, __pyx_L1_error)
+ __pyx_t_2 = __pyx_t_3;
+ __pyx_L47_bool_binop_done:;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1154
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "|" or style_object == u"|":
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * elif style_object == ">" or style_object == u">":
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ */
+ __pyx_v_scalar_style = YAML_LITERAL_SCALAR_STYLE;
+
+ /* "_yaml.pyx":1153
+ * elif style_object == "\"" or style_object == u"\"":
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "|" or style_object == u"|": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ * elif style_object == ">" or style_object == u">":
+ */
+ goto __pyx_L42;
+ }
+
+ /* "_yaml.pyx":1155
+ * elif style_object == "|" or style_object == u"|":
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ * elif style_object == ">" or style_object == u">": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ * if yaml_scalar_event_initialize(event, anchor, tag, value, length,
+ */
+ __pyx_t_3 = (__Pyx_PyString_Equals(__pyx_v_style_object, __pyx_kp_s__10, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1155, __pyx_L1_error)
+ if (!__pyx_t_3) {
+ } else {
+ __pyx_t_2 = __pyx_t_3;
+ goto __pyx_L49_bool_binop_done;
+ }
+ __pyx_t_3 = (__Pyx_PyUnicode_Equals(__pyx_v_style_object, __pyx_kp_u__10, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1155, __pyx_L1_error)
+ __pyx_t_2 = __pyx_t_3;
+ __pyx_L49_bool_binop_done:;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1156
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ * elif style_object == ">" or style_object == u">":
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * if yaml_scalar_event_initialize(event, anchor, tag, value, length,
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ */
+ __pyx_v_scalar_style = YAML_FOLDED_SCALAR_STYLE;
+
+ /* "_yaml.pyx":1155
+ * elif style_object == "|" or style_object == u"|":
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ * elif style_object == ">" or style_object == u">": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ * if yaml_scalar_event_initialize(event, anchor, tag, value, length,
+ */
+ }
+ __pyx_L42:;
+
+ /* "_yaml.pyx":1158
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ * if yaml_scalar_event_initialize(event, anchor, tag, value, length,
+ * plain_implicit, quoted_implicit, scalar_style) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * elif event_class is SequenceStartEvent:
+ */
+ __pyx_t_2 = ((yaml_scalar_event_initialize(__pyx_v_event, __pyx_v_anchor, __pyx_v_tag, __pyx_v_value, __pyx_v_length, __pyx_v_plain_implicit, __pyx_v_quoted_implicit, __pyx_v_scalar_style) == 0) != 0);
+
+ /* "_yaml.pyx":1157
+ * elif style_object == ">" or style_object == u">":
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ * if yaml_scalar_event_initialize(event, anchor, tag, value, length, # <<<<<<<<<<<<<<
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ * raise MemoryError
+ */
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1159
+ * if yaml_scalar_event_initialize(event, anchor, tag, value, length,
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * elif event_class is SequenceStartEvent:
+ * anchor = NULL
+ */
+ PyErr_NoMemory(); __PYX_ERR(0, 1159, __pyx_L1_error)
+
+ /* "_yaml.pyx":1157
+ * elif style_object == ">" or style_object == u">":
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ * if yaml_scalar_event_initialize(event, anchor, tag, value, length, # <<<<<<<<<<<<<<
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ * raise MemoryError
+ */
+ }
+
+ /* "_yaml.pyx":1109
+ * if yaml_alias_event_initialize(event, anchor) == 0:
+ * raise MemoryError
+ * elif event_class is ScalarEvent: # <<<<<<<<<<<<<<
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ */
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1160
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ * raise MemoryError
+ * elif event_class is SequenceStartEvent: # <<<<<<<<<<<<<<
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_SequenceStartEvent); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1160, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = (__pyx_v_event_class == __pyx_t_4);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1161
+ * raise MemoryError
+ * elif event_class is SequenceStartEvent:
+ * anchor = NULL # <<<<<<<<<<<<<<
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None:
+ */
+ __pyx_v_anchor = NULL;
+
+ /* "_yaml.pyx":1162
+ * elif event_class is SequenceStartEvent:
+ * anchor = NULL
+ * anchor_object = event_object.anchor # <<<<<<<<<<<<<<
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object):
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_anchor); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1162, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_v_anchor_object = __pyx_t_4;
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1163
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ */
+ __pyx_t_3 = (__pyx_v_anchor_object != Py_None);
+ __pyx_t_2 = (__pyx_t_3 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1164
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ */
+ __pyx_t_2 = (PyUnicode_CheckExact(__pyx_v_anchor_object) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1165
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_4 = PyUnicode_AsUTF8String(__pyx_v_anchor_object); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1165, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF_SET(__pyx_v_anchor_object, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1164
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ */
+ }
+
+ /* "_yaml.pyx":1166
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string")
+ */
+ __pyx_t_2 = ((!(PyString_CheckExact(__pyx_v_anchor_object) != 0)) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1167
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("anchor must be a string")
+ * else:
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1168
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__30, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1168, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1168, __pyx_L1_error)
+
+ /* "_yaml.pyx":1167
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("anchor must be a string")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1170
+ * raise TypeError("anchor must be a string")
+ * else:
+ * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<<
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL
+ */
+ /*else*/ {
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__31, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1170, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1170, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1166
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string")
+ */
+ }
+
+ /* "_yaml.pyx":1171
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object) # <<<<<<<<<<<<<<
+ * tag = NULL
+ * tag_object = event_object.tag
+ */
+ __pyx_v_anchor = PyString_AS_STRING(__pyx_v_anchor_object);
+
+ /* "_yaml.pyx":1163
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ */
+ }
+
+ /* "_yaml.pyx":1172
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL # <<<<<<<<<<<<<<
+ * tag_object = event_object.tag
+ * if tag_object is not None:
+ */
+ __pyx_v_tag = NULL;
+
+ /* "_yaml.pyx":1173
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL
+ * tag_object = event_object.tag # <<<<<<<<<<<<<<
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_tag); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1173, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_v_tag_object = __pyx_t_4;
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1174
+ * tag = NULL
+ * tag_object = event_object.tag
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ __pyx_t_2 = (__pyx_v_tag_object != Py_None);
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1175
+ * tag_object = event_object.tag
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ __pyx_t_3 = (PyUnicode_CheckExact(__pyx_v_tag_object) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1176
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_4 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1176, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF_SET(__pyx_v_tag_object, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1175
+ * tag_object = event_object.tag
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ }
+
+ /* "_yaml.pyx":1177
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ __pyx_t_3 = ((!(PyString_CheckExact(__pyx_v_tag_object) != 0)) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1178
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ __pyx_t_3 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_3)) {
+
+ /* "_yaml.pyx":1179
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__32, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1179, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1179, __pyx_L1_error)
+
+ /* "_yaml.pyx":1178
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1181
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * implicit = 0
+ */
+ /*else*/ {
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__33, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1181, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1181, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1177
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ }
+
+ /* "_yaml.pyx":1182
+ * else:
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<<
+ * implicit = 0
+ * if event_object.implicit:
+ */
+ __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object);
+
+ /* "_yaml.pyx":1174
+ * tag = NULL
+ * tag_object = event_object.tag
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ }
+
+ /* "_yaml.pyx":1183
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object)
+ * implicit = 0 # <<<<<<<<<<<<<<
+ * if event_object.implicit:
+ * implicit = 1
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":1184
+ * tag = PyString_AS_STRING(tag_object)
+ * implicit = 0
+ * if event_object.implicit: # <<<<<<<<<<<<<<
+ * implicit = 1
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_implicit); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1184, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1184, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1185
+ * implicit = 0
+ * if event_object.implicit:
+ * implicit = 1 # <<<<<<<<<<<<<<
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ * if event_object.flow_style:
+ */
+ __pyx_v_implicit = 1;
+
+ /* "_yaml.pyx":1184
+ * tag = PyString_AS_STRING(tag_object)
+ * implicit = 0
+ * if event_object.implicit: # <<<<<<<<<<<<<<
+ * implicit = 1
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ */
+ }
+
+ /* "_yaml.pyx":1186
+ * if event_object.implicit:
+ * implicit = 1
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE # <<<<<<<<<<<<<<
+ * if event_object.flow_style:
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ */
+ __pyx_v_sequence_style = YAML_BLOCK_SEQUENCE_STYLE;
+
+ /* "_yaml.pyx":1187
+ * implicit = 1
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ * if event_object.flow_style: # <<<<<<<<<<<<<<
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ * if yaml_sequence_start_event_initialize(event, anchor, tag,
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_flow_style); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1187, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1187, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1188
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ * if event_object.flow_style:
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE # <<<<<<<<<<<<<<
+ * if yaml_sequence_start_event_initialize(event, anchor, tag,
+ * implicit, sequence_style) == 0:
+ */
+ __pyx_v_sequence_style = YAML_FLOW_SEQUENCE_STYLE;
+
+ /* "_yaml.pyx":1187
+ * implicit = 1
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ * if event_object.flow_style: # <<<<<<<<<<<<<<
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ * if yaml_sequence_start_event_initialize(event, anchor, tag,
+ */
+ }
+
+ /* "_yaml.pyx":1190
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ * if yaml_sequence_start_event_initialize(event, anchor, tag,
+ * implicit, sequence_style) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * elif event_class is MappingStartEvent:
+ */
+ __pyx_t_3 = ((yaml_sequence_start_event_initialize(__pyx_v_event, __pyx_v_anchor, __pyx_v_tag, __pyx_v_implicit, __pyx_v_sequence_style) == 0) != 0);
+
+ /* "_yaml.pyx":1189
+ * if event_object.flow_style:
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ * if yaml_sequence_start_event_initialize(event, anchor, tag, # <<<<<<<<<<<<<<
+ * implicit, sequence_style) == 0:
+ * raise MemoryError
+ */
+ if (unlikely(__pyx_t_3)) {
+
+ /* "_yaml.pyx":1191
+ * if yaml_sequence_start_event_initialize(event, anchor, tag,
+ * implicit, sequence_style) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * elif event_class is MappingStartEvent:
+ * anchor = NULL
+ */
+ PyErr_NoMemory(); __PYX_ERR(0, 1191, __pyx_L1_error)
+
+ /* "_yaml.pyx":1189
+ * if event_object.flow_style:
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ * if yaml_sequence_start_event_initialize(event, anchor, tag, # <<<<<<<<<<<<<<
+ * implicit, sequence_style) == 0:
+ * raise MemoryError
+ */
+ }
+
+ /* "_yaml.pyx":1160
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ * raise MemoryError
+ * elif event_class is SequenceStartEvent: # <<<<<<<<<<<<<<
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ */
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1192
+ * implicit, sequence_style) == 0:
+ * raise MemoryError
+ * elif event_class is MappingStartEvent: # <<<<<<<<<<<<<<
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_MappingStartEvent); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1192, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = (__pyx_v_event_class == __pyx_t_4);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_2 = (__pyx_t_3 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1193
+ * raise MemoryError
+ * elif event_class is MappingStartEvent:
+ * anchor = NULL # <<<<<<<<<<<<<<
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None:
+ */
+ __pyx_v_anchor = NULL;
+
+ /* "_yaml.pyx":1194
+ * elif event_class is MappingStartEvent:
+ * anchor = NULL
+ * anchor_object = event_object.anchor # <<<<<<<<<<<<<<
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object):
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_anchor); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1194, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_v_anchor_object = __pyx_t_4;
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1195
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ */
+ __pyx_t_2 = (__pyx_v_anchor_object != Py_None);
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1196
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ */
+ __pyx_t_3 = (PyUnicode_CheckExact(__pyx_v_anchor_object) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1197
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_4 = PyUnicode_AsUTF8String(__pyx_v_anchor_object); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1197, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF_SET(__pyx_v_anchor_object, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1196
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ */
+ }
+
+ /* "_yaml.pyx":1198
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string")
+ */
+ __pyx_t_3 = ((!(PyString_CheckExact(__pyx_v_anchor_object) != 0)) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1199
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("anchor must be a string")
+ * else:
+ */
+ __pyx_t_3 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_3)) {
+
+ /* "_yaml.pyx":1200
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__30, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1200, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1200, __pyx_L1_error)
+
+ /* "_yaml.pyx":1199
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("anchor must be a string")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1202
+ * raise TypeError("anchor must be a string")
+ * else:
+ * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<<
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL
+ */
+ /*else*/ {
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__31, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1202, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1202, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1198
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string")
+ */
+ }
+
+ /* "_yaml.pyx":1203
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object) # <<<<<<<<<<<<<<
+ * tag = NULL
+ * tag_object = event_object.tag
+ */
+ __pyx_v_anchor = PyString_AS_STRING(__pyx_v_anchor_object);
+
+ /* "_yaml.pyx":1195
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ */
+ }
+
+ /* "_yaml.pyx":1204
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL # <<<<<<<<<<<<<<
+ * tag_object = event_object.tag
+ * if tag_object is not None:
+ */
+ __pyx_v_tag = NULL;
+
+ /* "_yaml.pyx":1205
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL
+ * tag_object = event_object.tag # <<<<<<<<<<<<<<
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_tag); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1205, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_v_tag_object = __pyx_t_4;
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1206
+ * tag = NULL
+ * tag_object = event_object.tag
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ __pyx_t_3 = (__pyx_v_tag_object != Py_None);
+ __pyx_t_2 = (__pyx_t_3 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1207
+ * tag_object = event_object.tag
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ __pyx_t_2 = (PyUnicode_CheckExact(__pyx_v_tag_object) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1208
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_4 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1208, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF_SET(__pyx_v_tag_object, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1207
+ * tag_object = event_object.tag
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ }
+
+ /* "_yaml.pyx":1209
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ __pyx_t_2 = ((!(PyString_CheckExact(__pyx_v_tag_object) != 0)) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1210
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1211
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__32, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1211, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1211, __pyx_L1_error)
+
+ /* "_yaml.pyx":1210
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1213
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * implicit = 0
+ */
+ /*else*/ {
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__33, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1213, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1213, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1209
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ }
+
+ /* "_yaml.pyx":1214
+ * else:
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<<
+ * implicit = 0
+ * if event_object.implicit:
+ */
+ __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object);
+
+ /* "_yaml.pyx":1206
+ * tag = NULL
+ * tag_object = event_object.tag
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ }
+
+ /* "_yaml.pyx":1215
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object)
+ * implicit = 0 # <<<<<<<<<<<<<<
+ * if event_object.implicit:
+ * implicit = 1
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":1216
+ * tag = PyString_AS_STRING(tag_object)
+ * implicit = 0
+ * if event_object.implicit: # <<<<<<<<<<<<<<
+ * implicit = 1
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_implicit); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1216, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1216, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1217
+ * implicit = 0
+ * if event_object.implicit:
+ * implicit = 1 # <<<<<<<<<<<<<<
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ * if event_object.flow_style:
+ */
+ __pyx_v_implicit = 1;
+
+ /* "_yaml.pyx":1216
+ * tag = PyString_AS_STRING(tag_object)
+ * implicit = 0
+ * if event_object.implicit: # <<<<<<<<<<<<<<
+ * implicit = 1
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ */
+ }
+
+ /* "_yaml.pyx":1218
+ * if event_object.implicit:
+ * implicit = 1
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE # <<<<<<<<<<<<<<
+ * if event_object.flow_style:
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ */
+ __pyx_v_mapping_style = YAML_BLOCK_MAPPING_STYLE;
+
+ /* "_yaml.pyx":1219
+ * implicit = 1
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ * if event_object.flow_style: # <<<<<<<<<<<<<<
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ * if yaml_mapping_start_event_initialize(event, anchor, tag,
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_flow_style); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1219, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1219, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1220
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ * if event_object.flow_style:
+ * mapping_style = YAML_FLOW_MAPPING_STYLE # <<<<<<<<<<<<<<
+ * if yaml_mapping_start_event_initialize(event, anchor, tag,
+ * implicit, mapping_style) == 0:
+ */
+ __pyx_v_mapping_style = YAML_FLOW_MAPPING_STYLE;
+
+ /* "_yaml.pyx":1219
+ * implicit = 1
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ * if event_object.flow_style: # <<<<<<<<<<<<<<
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ * if yaml_mapping_start_event_initialize(event, anchor, tag,
+ */
+ }
+
+ /* "_yaml.pyx":1222
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ * if yaml_mapping_start_event_initialize(event, anchor, tag,
+ * implicit, mapping_style) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * elif event_class is SequenceEndEvent:
+ */
+ __pyx_t_2 = ((yaml_mapping_start_event_initialize(__pyx_v_event, __pyx_v_anchor, __pyx_v_tag, __pyx_v_implicit, __pyx_v_mapping_style) == 0) != 0);
+
+ /* "_yaml.pyx":1221
+ * if event_object.flow_style:
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ * if yaml_mapping_start_event_initialize(event, anchor, tag, # <<<<<<<<<<<<<<
+ * implicit, mapping_style) == 0:
+ * raise MemoryError
+ */
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1223
+ * if yaml_mapping_start_event_initialize(event, anchor, tag,
+ * implicit, mapping_style) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * elif event_class is SequenceEndEvent:
+ * yaml_sequence_end_event_initialize(event)
+ */
+ PyErr_NoMemory(); __PYX_ERR(0, 1223, __pyx_L1_error)
+
+ /* "_yaml.pyx":1221
+ * if event_object.flow_style:
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ * if yaml_mapping_start_event_initialize(event, anchor, tag, # <<<<<<<<<<<<<<
+ * implicit, mapping_style) == 0:
+ * raise MemoryError
+ */
+ }
+
+ /* "_yaml.pyx":1192
+ * implicit, sequence_style) == 0:
+ * raise MemoryError
+ * elif event_class is MappingStartEvent: # <<<<<<<<<<<<<<
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ */
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1224
+ * implicit, mapping_style) == 0:
+ * raise MemoryError
+ * elif event_class is SequenceEndEvent: # <<<<<<<<<<<<<<
+ * yaml_sequence_end_event_initialize(event)
+ * elif event_class is MappingEndEvent:
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_SequenceEndEvent); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1224, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = (__pyx_v_event_class == __pyx_t_4);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1225
+ * raise MemoryError
+ * elif event_class is SequenceEndEvent:
+ * yaml_sequence_end_event_initialize(event) # <<<<<<<<<<<<<<
+ * elif event_class is MappingEndEvent:
+ * yaml_mapping_end_event_initialize(event)
+ */
+ (void)(yaml_sequence_end_event_initialize(__pyx_v_event));
+
+ /* "_yaml.pyx":1224
+ * implicit, mapping_style) == 0:
+ * raise MemoryError
+ * elif event_class is SequenceEndEvent: # <<<<<<<<<<<<<<
+ * yaml_sequence_end_event_initialize(event)
+ * elif event_class is MappingEndEvent:
+ */
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1226
+ * elif event_class is SequenceEndEvent:
+ * yaml_sequence_end_event_initialize(event)
+ * elif event_class is MappingEndEvent: # <<<<<<<<<<<<<<
+ * yaml_mapping_end_event_initialize(event)
+ * else:
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_MappingEndEvent); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1226, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = (__pyx_v_event_class == __pyx_t_4);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_2 = (__pyx_t_3 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1227
+ * yaml_sequence_end_event_initialize(event)
+ * elif event_class is MappingEndEvent:
+ * yaml_mapping_end_event_initialize(event) # <<<<<<<<<<<<<<
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ (void)(yaml_mapping_end_event_initialize(__pyx_v_event));
+
+ /* "_yaml.pyx":1226
+ * elif event_class is SequenceEndEvent:
+ * yaml_sequence_end_event_initialize(event)
+ * elif event_class is MappingEndEvent: # <<<<<<<<<<<<<<
+ * yaml_mapping_end_event_initialize(event)
+ * else:
+ */
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1229
+ * yaml_mapping_end_event_initialize(event)
+ * else:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("invalid event %s" % event_object)
+ * else:
+ */
+ /*else*/ {
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1230
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("invalid event %s" % event_object) # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"invalid event %s" % event_object)
+ */
+ __pyx_t_4 = __Pyx_PyString_FormatSafe(__pyx_kp_s_invalid_event_s, __pyx_v_event_object); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1230, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_8 = __Pyx_PyObject_CallOneArg(__pyx_builtin_TypeError, __pyx_t_4); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1230, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_Raise(__pyx_t_8, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __PYX_ERR(0, 1230, __pyx_L1_error)
+
+ /* "_yaml.pyx":1229
+ * yaml_mapping_end_event_initialize(event)
+ * else:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("invalid event %s" % event_object)
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1232
+ * raise TypeError("invalid event %s" % event_object)
+ * else:
+ * raise TypeError(u"invalid event %s" % event_object) # <<<<<<<<<<<<<<
+ * return 1
+ *
+ */
+ /*else*/ {
+ __pyx_t_8 = __Pyx_PyUnicode_FormatSafe(__pyx_kp_u_invalid_event_s, __pyx_v_event_object); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1232, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_builtin_TypeError, __pyx_t_8); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1232, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1232, __pyx_L1_error)
+ }
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":1233
+ * else:
+ * raise TypeError(u"invalid event %s" % event_object)
+ * return 1 # <<<<<<<<<<<<<<
+ *
+ * def emit(self, event_object):
+ */
+ __pyx_r = 1;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":1016
+ * raise ValueError(u"no emitter error")
+ *
+ * cdef int _object_to_event(self, object event_object, yaml_event_t *event) except 0: # <<<<<<<<<<<<<<
+ * cdef yaml_encoding_t encoding
+ * cdef yaml_version_directive_t version_directive_value
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_8);
+ __Pyx_AddTraceback("_yaml.CEmitter._object_to_event", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_event_class);
+ __Pyx_XDECREF(__pyx_v_cache);
+ __Pyx_XDECREF(__pyx_v_handle);
+ __Pyx_XDECREF(__pyx_v_prefix);
+ __Pyx_XDECREF(__pyx_v_anchor_object);
+ __Pyx_XDECREF(__pyx_v_tag_object);
+ __Pyx_XDECREF(__pyx_v_value_object);
+ __Pyx_XDECREF(__pyx_v_style_object);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1235
+ * return 1
+ *
+ * def emit(self, event_object): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * self._object_to_event(event_object, &event)
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_8CEmitter_7emit(PyObject *__pyx_v_self, PyObject *__pyx_v_event_object); /*proto*/
+static PyObject *__pyx_pw_5_yaml_8CEmitter_7emit(PyObject *__pyx_v_self, PyObject *__pyx_v_event_object) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("emit (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_8CEmitter_6emit(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self), ((PyObject *)__pyx_v_event_object));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_8CEmitter_6emit(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_event_object) {
+ yaml_event_t __pyx_v_event;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ __Pyx_RefNannySetupContext("emit", 0);
+
+ /* "_yaml.pyx":1237
+ * def emit(self, event_object):
+ * cdef yaml_event_t event
+ * self._object_to_event(event_object, &event) # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_object_to_event(__pyx_v_self, __pyx_v_event_object, (&__pyx_v_event)); if (unlikely(__pyx_t_1 == ((int)0))) __PYX_ERR(0, 1237, __pyx_L1_error)
+
+ /* "_yaml.pyx":1238
+ * cdef yaml_event_t event
+ * self._object_to_event(event_object, &event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_1 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1238, __pyx_L1_error)
+ __pyx_t_2 = ((__pyx_t_1 == 0) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1239
+ * self._object_to_event(event_object, &event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ *
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1239, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_error = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":1240
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ *
+ * def open(self):
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ __PYX_ERR(0, 1240, __pyx_L1_error)
+
+ /* "_yaml.pyx":1238
+ * cdef yaml_event_t event
+ * self._object_to_event(event_object, &event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ }
+
+ /* "_yaml.pyx":1235
+ * return 1
+ *
+ * def emit(self, event_object): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * self._object_to_event(event_object, &event)
+ */
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CEmitter.emit", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1242
+ * raise error
+ *
+ * def open(self): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * cdef yaml_encoding_t encoding
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_8CEmitter_9open(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_8CEmitter_9open(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("open (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_8CEmitter_8open(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_8CEmitter_8open(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self) {
+ yaml_event_t __pyx_v_event;
+ yaml_encoding_t __pyx_v_encoding;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ int __pyx_t_3;
+ PyObject *__pyx_t_4 = NULL;
+ PyObject *__pyx_t_5 = NULL;
+ PyObject *__pyx_t_6 = NULL;
+ __Pyx_RefNannySetupContext("open", 0);
+
+ /* "_yaml.pyx":1245
+ * cdef yaml_event_t event
+ * cdef yaml_encoding_t encoding
+ * if self.closed == -1: # <<<<<<<<<<<<<<
+ * if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le':
+ * encoding = YAML_UTF16LE_ENCODING
+ */
+ switch (__pyx_v_self->closed) {
+ case -1L:
+
+ /* "_yaml.pyx":1246
+ * cdef yaml_encoding_t encoding
+ * if self.closed == -1:
+ * if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le': # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF16LE_ENCODING
+ * elif self.use_encoding == u'utf-16-be' or self.use_encoding == 'utf-16-be':
+ */
+ __pyx_t_2 = (__Pyx_PyUnicode_Equals(__pyx_v_self->use_encoding, __pyx_kp_u_utf_16_le, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1246, __pyx_L1_error)
+ if (!__pyx_t_2) {
+ } else {
+ __pyx_t_1 = __pyx_t_2;
+ goto __pyx_L4_bool_binop_done;
+ }
+ __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_v_self->use_encoding, __pyx_kp_s_utf_16_le, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1246, __pyx_L1_error)
+ __pyx_t_1 = __pyx_t_2;
+ __pyx_L4_bool_binop_done:;
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1247
+ * if self.closed == -1:
+ * if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le':
+ * encoding = YAML_UTF16LE_ENCODING # <<<<<<<<<<<<<<
+ * elif self.use_encoding == u'utf-16-be' or self.use_encoding == 'utf-16-be':
+ * encoding = YAML_UTF16BE_ENCODING
+ */
+ __pyx_v_encoding = YAML_UTF16LE_ENCODING;
+
+ /* "_yaml.pyx":1246
+ * cdef yaml_encoding_t encoding
+ * if self.closed == -1:
+ * if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le': # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF16LE_ENCODING
+ * elif self.use_encoding == u'utf-16-be' or self.use_encoding == 'utf-16-be':
+ */
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1248
+ * if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le':
+ * encoding = YAML_UTF16LE_ENCODING
+ * elif self.use_encoding == u'utf-16-be' or self.use_encoding == 'utf-16-be': # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF16BE_ENCODING
+ * else:
+ */
+ __pyx_t_2 = (__Pyx_PyUnicode_Equals(__pyx_v_self->use_encoding, __pyx_kp_u_utf_16_be, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1248, __pyx_L1_error)
+ if (!__pyx_t_2) {
+ } else {
+ __pyx_t_1 = __pyx_t_2;
+ goto __pyx_L6_bool_binop_done;
+ }
+ __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_v_self->use_encoding, __pyx_kp_s_utf_16_be, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1248, __pyx_L1_error)
+ __pyx_t_1 = __pyx_t_2;
+ __pyx_L6_bool_binop_done:;
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1249
+ * encoding = YAML_UTF16LE_ENCODING
+ * elif self.use_encoding == u'utf-16-be' or self.use_encoding == 'utf-16-be':
+ * encoding = YAML_UTF16BE_ENCODING # <<<<<<<<<<<<<<
+ * else:
+ * encoding = YAML_UTF8_ENCODING
+ */
+ __pyx_v_encoding = YAML_UTF16BE_ENCODING;
+
+ /* "_yaml.pyx":1248
+ * if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le':
+ * encoding = YAML_UTF16LE_ENCODING
+ * elif self.use_encoding == u'utf-16-be' or self.use_encoding == 'utf-16-be': # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF16BE_ENCODING
+ * else:
+ */
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1251
+ * encoding = YAML_UTF16BE_ENCODING
+ * else:
+ * encoding = YAML_UTF8_ENCODING # <<<<<<<<<<<<<<
+ * if self.use_encoding is None:
+ * self.dump_unicode = 1
+ */
+ /*else*/ {
+ __pyx_v_encoding = YAML_UTF8_ENCODING;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":1252
+ * else:
+ * encoding = YAML_UTF8_ENCODING
+ * if self.use_encoding is None: # <<<<<<<<<<<<<<
+ * self.dump_unicode = 1
+ * if self.dump_unicode == 1:
+ */
+ __pyx_t_1 = (__pyx_v_self->use_encoding == Py_None);
+ __pyx_t_2 = (__pyx_t_1 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1253
+ * encoding = YAML_UTF8_ENCODING
+ * if self.use_encoding is None:
+ * self.dump_unicode = 1 # <<<<<<<<<<<<<<
+ * if self.dump_unicode == 1:
+ * encoding = YAML_UTF8_ENCODING
+ */
+ __pyx_v_self->dump_unicode = 1;
+
+ /* "_yaml.pyx":1252
+ * else:
+ * encoding = YAML_UTF8_ENCODING
+ * if self.use_encoding is None: # <<<<<<<<<<<<<<
+ * self.dump_unicode = 1
+ * if self.dump_unicode == 1:
+ */
+ }
+
+ /* "_yaml.pyx":1254
+ * if self.use_encoding is None:
+ * self.dump_unicode = 1
+ * if self.dump_unicode == 1: # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF8_ENCODING
+ * yaml_stream_start_event_initialize(&event, encoding)
+ */
+ __pyx_t_2 = ((__pyx_v_self->dump_unicode == 1) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1255
+ * self.dump_unicode = 1
+ * if self.dump_unicode == 1:
+ * encoding = YAML_UTF8_ENCODING # <<<<<<<<<<<<<<
+ * yaml_stream_start_event_initialize(&event, encoding)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_v_encoding = YAML_UTF8_ENCODING;
+
+ /* "_yaml.pyx":1254
+ * if self.use_encoding is None:
+ * self.dump_unicode = 1
+ * if self.dump_unicode == 1: # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF8_ENCODING
+ * yaml_stream_start_event_initialize(&event, encoding)
+ */
+ }
+
+ /* "_yaml.pyx":1256
+ * if self.dump_unicode == 1:
+ * encoding = YAML_UTF8_ENCODING
+ * yaml_stream_start_event_initialize(&event, encoding) # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ (void)(yaml_stream_start_event_initialize((&__pyx_v_event), __pyx_v_encoding));
+
+ /* "_yaml.pyx":1257
+ * encoding = YAML_UTF8_ENCODING
+ * yaml_stream_start_event_initialize(&event, encoding)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_3 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1257, __pyx_L1_error)
+ __pyx_t_2 = ((__pyx_t_3 == 0) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1258
+ * yaml_stream_start_event_initialize(&event, encoding)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * self.closed = 0
+ */
+ __pyx_t_4 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1258, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_v_error = __pyx_t_4;
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1259
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * self.closed = 0
+ * elif self.closed == 1:
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ __PYX_ERR(0, 1259, __pyx_L1_error)
+
+ /* "_yaml.pyx":1257
+ * encoding = YAML_UTF8_ENCODING
+ * yaml_stream_start_event_initialize(&event, encoding)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ }
+
+ /* "_yaml.pyx":1260
+ * error = self._emitter_error()
+ * raise error
+ * self.closed = 0 # <<<<<<<<<<<<<<
+ * elif self.closed == 1:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_v_self->closed = 0;
+
+ /* "_yaml.pyx":1245
+ * cdef yaml_event_t event
+ * cdef yaml_encoding_t encoding
+ * if self.closed == -1: # <<<<<<<<<<<<<<
+ * if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le':
+ * encoding = YAML_UTF16LE_ENCODING
+ */
+ break;
+ case 1:
+
+ /* "_yaml.pyx":1262
+ * self.closed = 0
+ * elif self.closed == 1:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise SerializerError("serializer is closed")
+ * else:
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1263
+ * elif self.closed == 1:
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is closed") # <<<<<<<<<<<<<<
+ * else:
+ * raise SerializerError(u"serializer is closed")
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_SerializerError); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1263, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_6 = NULL;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_5))) {
+ __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5);
+ if (likely(__pyx_t_6)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);
+ __Pyx_INCREF(__pyx_t_6);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_5, function);
+ }
+ }
+ __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_6, __pyx_kp_s_serializer_is_closed) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_kp_s_serializer_is_closed);
+ __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
+ if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1263, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1263, __pyx_L1_error)
+
+ /* "_yaml.pyx":1262
+ * self.closed = 0
+ * elif self.closed == 1:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise SerializerError("serializer is closed")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1265
+ * raise SerializerError("serializer is closed")
+ * else:
+ * raise SerializerError(u"serializer is closed") # <<<<<<<<<<<<<<
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ /*else*/ {
+ __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_SerializerError); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1265, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_6 = NULL;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_5))) {
+ __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5);
+ if (likely(__pyx_t_6)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);
+ __Pyx_INCREF(__pyx_t_6);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_5, function);
+ }
+ }
+ __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_6, __pyx_kp_u_serializer_is_closed) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_kp_u_serializer_is_closed);
+ __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
+ if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1265, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1265, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1261
+ * raise error
+ * self.closed = 0
+ * elif self.closed == 1: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is closed")
+ */
+ break;
+ default:
+
+ /* "_yaml.pyx":1267
+ * raise SerializerError(u"serializer is closed")
+ * else:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise SerializerError("serializer is already opened")
+ * else:
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1268
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is already opened") # <<<<<<<<<<<<<<
+ * else:
+ * raise SerializerError(u"serializer is already opened")
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_SerializerError); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1268, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_6 = NULL;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_5))) {
+ __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5);
+ if (likely(__pyx_t_6)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);
+ __Pyx_INCREF(__pyx_t_6);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_5, function);
+ }
+ }
+ __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_6, __pyx_kp_s_serializer_is_already_opened) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_kp_s_serializer_is_already_opened);
+ __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
+ if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1268, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1268, __pyx_L1_error)
+
+ /* "_yaml.pyx":1267
+ * raise SerializerError(u"serializer is closed")
+ * else:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise SerializerError("serializer is already opened")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1270
+ * raise SerializerError("serializer is already opened")
+ * else:
+ * raise SerializerError(u"serializer is already opened") # <<<<<<<<<<<<<<
+ *
+ * def close(self):
+ */
+ /*else*/ {
+ __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_SerializerError); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1270, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_6 = NULL;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_5))) {
+ __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5);
+ if (likely(__pyx_t_6)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);
+ __Pyx_INCREF(__pyx_t_6);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_5, function);
+ }
+ }
+ __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_6, __pyx_kp_u_serializer_is_already_opened) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_kp_u_serializer_is_already_opened);
+ __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
+ if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1270, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __PYX_ERR(0, 1270, __pyx_L1_error)
+ }
+ break;
+ }
+
+ /* "_yaml.pyx":1242
+ * raise error
+ *
+ * def open(self): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * cdef yaml_encoding_t encoding
+ */
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_6);
+ __Pyx_AddTraceback("_yaml.CEmitter.open", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1272
+ * raise SerializerError(u"serializer is already opened")
+ *
+ * def close(self): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * if self.closed == -1:
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_8CEmitter_11close(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_8CEmitter_11close(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("close (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_8CEmitter_10close(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_8CEmitter_10close(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self) {
+ yaml_event_t __pyx_v_event;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_t_5;
+ __Pyx_RefNannySetupContext("close", 0);
+
+ /* "_yaml.pyx":1274
+ * def close(self):
+ * cdef yaml_event_t event
+ * if self.closed == -1: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is not opened")
+ */
+ switch (__pyx_v_self->closed) {
+ case -1L:
+
+ /* "_yaml.pyx":1275
+ * cdef yaml_event_t event
+ * if self.closed == -1:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise SerializerError("serializer is not opened")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":1276
+ * if self.closed == -1:
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is not opened") # <<<<<<<<<<<<<<
+ * else:
+ * raise SerializerError(u"serializer is not opened")
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_SerializerError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1276, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = NULL;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_3, function);
+ }
+ }
+ __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_kp_s_serializer_is_not_opened) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_kp_s_serializer_is_not_opened);
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1276, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_Raise(__pyx_t_2, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __PYX_ERR(0, 1276, __pyx_L1_error)
+
+ /* "_yaml.pyx":1275
+ * cdef yaml_event_t event
+ * if self.closed == -1:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise SerializerError("serializer is not opened")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1278
+ * raise SerializerError("serializer is not opened")
+ * else:
+ * raise SerializerError(u"serializer is not opened") # <<<<<<<<<<<<<<
+ * elif self.closed == 0:
+ * yaml_stream_end_event_initialize(&event)
+ */
+ /*else*/ {
+ __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_SerializerError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1278, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = NULL;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_3, function);
+ }
+ }
+ __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_kp_u_serializer_is_not_opened) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_kp_u_serializer_is_not_opened);
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1278, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_Raise(__pyx_t_2, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __PYX_ERR(0, 1278, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1274
+ * def close(self):
+ * cdef yaml_event_t event
+ * if self.closed == -1: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is not opened")
+ */
+ break;
+ case 0:
+
+ /* "_yaml.pyx":1280
+ * raise SerializerError(u"serializer is not opened")
+ * elif self.closed == 0:
+ * yaml_stream_end_event_initialize(&event) # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ (void)(yaml_stream_end_event_initialize((&__pyx_v_event)));
+
+ /* "_yaml.pyx":1281
+ * elif self.closed == 0:
+ * yaml_stream_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_5 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1281, __pyx_L1_error)
+ __pyx_t_1 = ((__pyx_t_5 == 0) != 0);
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":1282
+ * yaml_stream_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * self.closed = 1
+ */
+ __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1282, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_error = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":1283
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * self.closed = 1
+ *
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ __PYX_ERR(0, 1283, __pyx_L1_error)
+
+ /* "_yaml.pyx":1281
+ * elif self.closed == 0:
+ * yaml_stream_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ }
+
+ /* "_yaml.pyx":1284
+ * error = self._emitter_error()
+ * raise error
+ * self.closed = 1 # <<<<<<<<<<<<<<
+ *
+ * def serialize(self, node):
+ */
+ __pyx_v_self->closed = 1;
+
+ /* "_yaml.pyx":1279
+ * else:
+ * raise SerializerError(u"serializer is not opened")
+ * elif self.closed == 0: # <<<<<<<<<<<<<<
+ * yaml_stream_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":1272
+ * raise SerializerError(u"serializer is already opened")
+ *
+ * def close(self): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * if self.closed == -1:
+ */
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_AddTraceback("_yaml.CEmitter.close", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1286
+ * self.closed = 1
+ *
+ * def serialize(self, node): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * cdef yaml_version_directive_t version_directive_value
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_8CEmitter_13serialize(PyObject *__pyx_v_self, PyObject *__pyx_v_node); /*proto*/
+static PyObject *__pyx_pw_5_yaml_8CEmitter_13serialize(PyObject *__pyx_v_self, PyObject *__pyx_v_node) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("serialize (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_8CEmitter_12serialize(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self), ((PyObject *)__pyx_v_node));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_8CEmitter_12serialize(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_node) {
+ yaml_event_t __pyx_v_event;
+ yaml_version_directive_t __pyx_v_version_directive_value;
+ yaml_version_directive_t *__pyx_v_version_directive;
+ yaml_tag_directive_t __pyx_v_tag_directives_value[0x80];
+ yaml_tag_directive_t *__pyx_v_tag_directives_start;
+ yaml_tag_directive_t *__pyx_v_tag_directives_end;
+ PyObject *__pyx_v_cache = NULL;
+ PyObject *__pyx_v_handle = NULL;
+ PyObject *__pyx_v_prefix = NULL;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_t_5;
+ Py_ssize_t __pyx_t_6;
+ PyObject *(*__pyx_t_7)(PyObject *);
+ int __pyx_t_8;
+ __Pyx_RefNannySetupContext("serialize", 0);
+
+ /* "_yaml.pyx":1293
+ * cdef yaml_tag_directive_t *tag_directives_start
+ * cdef yaml_tag_directive_t *tag_directives_end
+ * if self.closed == -1: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is not opened")
+ */
+ switch (__pyx_v_self->closed) {
+ case -1L:
+
+ /* "_yaml.pyx":1294
+ * cdef yaml_tag_directive_t *tag_directives_end
+ * if self.closed == -1:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise SerializerError("serializer is not opened")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":1295
+ * if self.closed == -1:
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is not opened") # <<<<<<<<<<<<<<
+ * else:
+ * raise SerializerError(u"serializer is not opened")
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_SerializerError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1295, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = NULL;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_3, function);
+ }
+ }
+ __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_kp_s_serializer_is_not_opened) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_kp_s_serializer_is_not_opened);
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1295, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_Raise(__pyx_t_2, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __PYX_ERR(0, 1295, __pyx_L1_error)
+
+ /* "_yaml.pyx":1294
+ * cdef yaml_tag_directive_t *tag_directives_end
+ * if self.closed == -1:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise SerializerError("serializer is not opened")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1297
+ * raise SerializerError("serializer is not opened")
+ * else:
+ * raise SerializerError(u"serializer is not opened") # <<<<<<<<<<<<<<
+ * elif self.closed == 1:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ /*else*/ {
+ __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_SerializerError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1297, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = NULL;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_3, function);
+ }
+ }
+ __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_kp_u_serializer_is_not_opened) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_kp_u_serializer_is_not_opened);
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1297, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_Raise(__pyx_t_2, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __PYX_ERR(0, 1297, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1293
+ * cdef yaml_tag_directive_t *tag_directives_start
+ * cdef yaml_tag_directive_t *tag_directives_end
+ * if self.closed == -1: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is not opened")
+ */
+ break;
+ case 1:
+
+ /* "_yaml.pyx":1299
+ * raise SerializerError(u"serializer is not opened")
+ * elif self.closed == 1:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise SerializerError("serializer is closed")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":1300
+ * elif self.closed == 1:
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is closed") # <<<<<<<<<<<<<<
+ * else:
+ * raise SerializerError(u"serializer is closed")
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_SerializerError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1300, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = NULL;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_3, function);
+ }
+ }
+ __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_kp_s_serializer_is_closed) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_kp_s_serializer_is_closed);
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1300, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_Raise(__pyx_t_2, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __PYX_ERR(0, 1300, __pyx_L1_error)
+
+ /* "_yaml.pyx":1299
+ * raise SerializerError(u"serializer is not opened")
+ * elif self.closed == 1:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise SerializerError("serializer is closed")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1302
+ * raise SerializerError("serializer is closed")
+ * else:
+ * raise SerializerError(u"serializer is closed") # <<<<<<<<<<<<<<
+ * cache = []
+ * version_directive = NULL
+ */
+ /*else*/ {
+ __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_SerializerError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1302, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = NULL;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_3, function);
+ }
+ }
+ __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_kp_u_serializer_is_closed) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_kp_u_serializer_is_closed);
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1302, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_Raise(__pyx_t_2, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __PYX_ERR(0, 1302, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1298
+ * else:
+ * raise SerializerError(u"serializer is not opened")
+ * elif self.closed == 1: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is closed")
+ */
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":1303
+ * else:
+ * raise SerializerError(u"serializer is closed")
+ * cache = [] # <<<<<<<<<<<<<<
+ * version_directive = NULL
+ * if self.use_version:
+ */
+ __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1303, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_cache = ((PyObject*)__pyx_t_2);
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":1304
+ * raise SerializerError(u"serializer is closed")
+ * cache = []
+ * version_directive = NULL # <<<<<<<<<<<<<<
+ * if self.use_version:
+ * version_directive_value.major = self.use_version[0]
+ */
+ __pyx_v_version_directive = NULL;
+
+ /* "_yaml.pyx":1305
+ * cache = []
+ * version_directive = NULL
+ * if self.use_version: # <<<<<<<<<<<<<<
+ * version_directive_value.major = self.use_version[0]
+ * version_directive_value.minor = self.use_version[1]
+ */
+ __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_self->use_version); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 1305, __pyx_L1_error)
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1306
+ * version_directive = NULL
+ * if self.use_version:
+ * version_directive_value.major = self.use_version[0] # <<<<<<<<<<<<<<
+ * version_directive_value.minor = self.use_version[1]
+ * version_directive = &version_directive_value
+ */
+ __pyx_t_2 = __Pyx_GetItemInt(__pyx_v_self->use_version, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1306, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_2); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 1306, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_v_version_directive_value.major = __pyx_t_5;
+
+ /* "_yaml.pyx":1307
+ * if self.use_version:
+ * version_directive_value.major = self.use_version[0]
+ * version_directive_value.minor = self.use_version[1] # <<<<<<<<<<<<<<
+ * version_directive = &version_directive_value
+ * tag_directives_start = NULL
+ */
+ __pyx_t_2 = __Pyx_GetItemInt(__pyx_v_self->use_version, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1307, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_2); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 1307, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_v_version_directive_value.minor = __pyx_t_5;
+
+ /* "_yaml.pyx":1308
+ * version_directive_value.major = self.use_version[0]
+ * version_directive_value.minor = self.use_version[1]
+ * version_directive = &version_directive_value # <<<<<<<<<<<<<<
+ * tag_directives_start = NULL
+ * tag_directives_end = NULL
+ */
+ __pyx_v_version_directive = (&__pyx_v_version_directive_value);
+
+ /* "_yaml.pyx":1305
+ * cache = []
+ * version_directive = NULL
+ * if self.use_version: # <<<<<<<<<<<<<<
+ * version_directive_value.major = self.use_version[0]
+ * version_directive_value.minor = self.use_version[1]
+ */
+ }
+
+ /* "_yaml.pyx":1309
+ * version_directive_value.minor = self.use_version[1]
+ * version_directive = &version_directive_value
+ * tag_directives_start = NULL # <<<<<<<<<<<<<<
+ * tag_directives_end = NULL
+ * if self.use_tags:
+ */
+ __pyx_v_tag_directives_start = NULL;
+
+ /* "_yaml.pyx":1310
+ * version_directive = &version_directive_value
+ * tag_directives_start = NULL
+ * tag_directives_end = NULL # <<<<<<<<<<<<<<
+ * if self.use_tags:
+ * if len(self.use_tags) > 128:
+ */
+ __pyx_v_tag_directives_end = NULL;
+
+ /* "_yaml.pyx":1311
+ * tag_directives_start = NULL
+ * tag_directives_end = NULL
+ * if self.use_tags: # <<<<<<<<<<<<<<
+ * if len(self.use_tags) > 128:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_self->use_tags); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 1311, __pyx_L1_error)
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1312
+ * tag_directives_end = NULL
+ * if self.use_tags:
+ * if len(self.use_tags) > 128: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("too many tags")
+ */
+ __pyx_t_2 = __pyx_v_self->use_tags;
+ __Pyx_INCREF(__pyx_t_2);
+ __pyx_t_6 = PyObject_Length(__pyx_t_2); if (unlikely(__pyx_t_6 == ((Py_ssize_t)-1))) __PYX_ERR(0, 1312, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_t_1 = ((__pyx_t_6 > 0x80) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1313
+ * if self.use_tags:
+ * if len(self.use_tags) > 128:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("too many tags")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":1314
+ * if len(self.use_tags) > 128:
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("too many tags") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"too many tags")
+ */
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__24, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1314, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_Raise(__pyx_t_2, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __PYX_ERR(0, 1314, __pyx_L1_error)
+
+ /* "_yaml.pyx":1313
+ * if self.use_tags:
+ * if len(self.use_tags) > 128:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("too many tags")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1316
+ * raise ValueError("too many tags")
+ * else:
+ * raise ValueError(u"too many tags") # <<<<<<<<<<<<<<
+ * tag_directives_start = tag_directives_value
+ * tag_directives_end = tag_directives_value
+ */
+ /*else*/ {
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__25, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1316, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_Raise(__pyx_t_2, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __PYX_ERR(0, 1316, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1312
+ * tag_directives_end = NULL
+ * if self.use_tags:
+ * if len(self.use_tags) > 128: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("too many tags")
+ */
+ }
+
+ /* "_yaml.pyx":1317
+ * else:
+ * raise ValueError(u"too many tags")
+ * tag_directives_start = tag_directives_value # <<<<<<<<<<<<<<
+ * tag_directives_end = tag_directives_value
+ * for handle in self.use_tags:
+ */
+ __pyx_v_tag_directives_start = __pyx_v_tag_directives_value;
+
+ /* "_yaml.pyx":1318
+ * raise ValueError(u"too many tags")
+ * tag_directives_start = tag_directives_value
+ * tag_directives_end = tag_directives_value # <<<<<<<<<<<<<<
+ * for handle in self.use_tags:
+ * prefix = self.use_tags[handle]
+ */
+ __pyx_v_tag_directives_end = __pyx_v_tag_directives_value;
+
+ /* "_yaml.pyx":1319
+ * tag_directives_start = tag_directives_value
+ * tag_directives_end = tag_directives_value
+ * for handle in self.use_tags: # <<<<<<<<<<<<<<
+ * prefix = self.use_tags[handle]
+ * if PyUnicode_CheckExact(handle):
+ */
+ if (likely(PyList_CheckExact(__pyx_v_self->use_tags)) || PyTuple_CheckExact(__pyx_v_self->use_tags)) {
+ __pyx_t_2 = __pyx_v_self->use_tags; __Pyx_INCREF(__pyx_t_2); __pyx_t_6 = 0;
+ __pyx_t_7 = NULL;
+ } else {
+ __pyx_t_6 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_self->use_tags); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1319, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_7 = Py_TYPE(__pyx_t_2)->tp_iternext; if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1319, __pyx_L1_error)
+ }
+ for (;;) {
+ if (likely(!__pyx_t_7)) {
+ if (likely(PyList_CheckExact(__pyx_t_2))) {
+ if (__pyx_t_6 >= PyList_GET_SIZE(__pyx_t_2)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_3 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_6); __Pyx_INCREF(__pyx_t_3); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 1319, __pyx_L1_error)
+ #else
+ __pyx_t_3 = PySequence_ITEM(__pyx_t_2, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1319, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ #endif
+ } else {
+ if (__pyx_t_6 >= PyTuple_GET_SIZE(__pyx_t_2)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_6); __Pyx_INCREF(__pyx_t_3); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 1319, __pyx_L1_error)
+ #else
+ __pyx_t_3 = PySequence_ITEM(__pyx_t_2, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1319, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ #endif
+ }
+ } else {
+ __pyx_t_3 = __pyx_t_7(__pyx_t_2);
+ if (unlikely(!__pyx_t_3)) {
+ PyObject* exc_type = PyErr_Occurred();
+ if (exc_type) {
+ if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
+ else __PYX_ERR(0, 1319, __pyx_L1_error)
+ }
+ break;
+ }
+ __Pyx_GOTREF(__pyx_t_3);
+ }
+ __Pyx_XDECREF_SET(__pyx_v_handle, __pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":1320
+ * tag_directives_end = tag_directives_value
+ * for handle in self.use_tags:
+ * prefix = self.use_tags[handle] # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(handle):
+ * handle = PyUnicode_AsUTF8String(handle)
+ */
+ __pyx_t_3 = __Pyx_PyObject_GetItem(__pyx_v_self->use_tags, __pyx_v_handle); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1320, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_XDECREF_SET(__pyx_v_prefix, __pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":1321
+ * for handle in self.use_tags:
+ * prefix = self.use_tags[handle]
+ * if PyUnicode_CheckExact(handle): # <<<<<<<<<<<<<<
+ * handle = PyUnicode_AsUTF8String(handle)
+ * cache.append(handle)
+ */
+ __pyx_t_1 = (PyUnicode_CheckExact(__pyx_v_handle) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1322
+ * prefix = self.use_tags[handle]
+ * if PyUnicode_CheckExact(handle):
+ * handle = PyUnicode_AsUTF8String(handle) # <<<<<<<<<<<<<<
+ * cache.append(handle)
+ * if not PyString_CheckExact(handle):
+ */
+ __pyx_t_3 = PyUnicode_AsUTF8String(__pyx_v_handle); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1322, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF_SET(__pyx_v_handle, __pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":1323
+ * if PyUnicode_CheckExact(handle):
+ * handle = PyUnicode_AsUTF8String(handle)
+ * cache.append(handle) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(handle):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_8 = __Pyx_PyList_Append(__pyx_v_cache, __pyx_v_handle); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(0, 1323, __pyx_L1_error)
+
+ /* "_yaml.pyx":1321
+ * for handle in self.use_tags:
+ * prefix = self.use_tags[handle]
+ * if PyUnicode_CheckExact(handle): # <<<<<<<<<<<<<<
+ * handle = PyUnicode_AsUTF8String(handle)
+ * cache.append(handle)
+ */
+ }
+
+ /* "_yaml.pyx":1324
+ * handle = PyUnicode_AsUTF8String(handle)
+ * cache.append(handle)
+ * if not PyString_CheckExact(handle): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag handle must be a string")
+ */
+ __pyx_t_1 = ((!(PyString_CheckExact(__pyx_v_handle) != 0)) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1325
+ * cache.append(handle)
+ * if not PyString_CheckExact(handle):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag handle must be a string")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":1326
+ * if not PyString_CheckExact(handle):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag handle must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag handle must be a string")
+ */
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__26, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1326, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __PYX_ERR(0, 1326, __pyx_L1_error)
+
+ /* "_yaml.pyx":1325
+ * cache.append(handle)
+ * if not PyString_CheckExact(handle):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag handle must be a string")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1328
+ * raise TypeError("tag handle must be a string")
+ * else:
+ * raise TypeError(u"tag handle must be a string") # <<<<<<<<<<<<<<
+ * tag_directives_end.handle = PyString_AS_STRING(handle)
+ * if PyUnicode_CheckExact(prefix):
+ */
+ /*else*/ {
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__27, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1328, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __PYX_ERR(0, 1328, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1324
+ * handle = PyUnicode_AsUTF8String(handle)
+ * cache.append(handle)
+ * if not PyString_CheckExact(handle): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag handle must be a string")
+ */
+ }
+
+ /* "_yaml.pyx":1329
+ * else:
+ * raise TypeError(u"tag handle must be a string")
+ * tag_directives_end.handle = PyString_AS_STRING(handle) # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(prefix):
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ */
+ __pyx_v_tag_directives_end->handle = PyString_AS_STRING(__pyx_v_handle);
+
+ /* "_yaml.pyx":1330
+ * raise TypeError(u"tag handle must be a string")
+ * tag_directives_end.handle = PyString_AS_STRING(handle)
+ * if PyUnicode_CheckExact(prefix): # <<<<<<<<<<<<<<
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ * cache.append(prefix)
+ */
+ __pyx_t_1 = (PyUnicode_CheckExact(__pyx_v_prefix) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1331
+ * tag_directives_end.handle = PyString_AS_STRING(handle)
+ * if PyUnicode_CheckExact(prefix):
+ * prefix = PyUnicode_AsUTF8String(prefix) # <<<<<<<<<<<<<<
+ * cache.append(prefix)
+ * if not PyString_CheckExact(prefix):
+ */
+ __pyx_t_3 = PyUnicode_AsUTF8String(__pyx_v_prefix); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1331, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF_SET(__pyx_v_prefix, __pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":1332
+ * if PyUnicode_CheckExact(prefix):
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ * cache.append(prefix) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(prefix):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_8 = __Pyx_PyList_Append(__pyx_v_cache, __pyx_v_prefix); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(0, 1332, __pyx_L1_error)
+
+ /* "_yaml.pyx":1330
+ * raise TypeError(u"tag handle must be a string")
+ * tag_directives_end.handle = PyString_AS_STRING(handle)
+ * if PyUnicode_CheckExact(prefix): # <<<<<<<<<<<<<<
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ * cache.append(prefix)
+ */
+ }
+
+ /* "_yaml.pyx":1333
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ * cache.append(prefix)
+ * if not PyString_CheckExact(prefix): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag prefix must be a string")
+ */
+ __pyx_t_1 = ((!(PyString_CheckExact(__pyx_v_prefix) != 0)) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1334
+ * cache.append(prefix)
+ * if not PyString_CheckExact(prefix):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag prefix must be a string")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":1335
+ * if not PyString_CheckExact(prefix):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag prefix must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag prefix must be a string")
+ */
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__28, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1335, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __PYX_ERR(0, 1335, __pyx_L1_error)
+
+ /* "_yaml.pyx":1334
+ * cache.append(prefix)
+ * if not PyString_CheckExact(prefix):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag prefix must be a string")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1337
+ * raise TypeError("tag prefix must be a string")
+ * else:
+ * raise TypeError(u"tag prefix must be a string") # <<<<<<<<<<<<<<
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ * tag_directives_end = tag_directives_end+1
+ */
+ /*else*/ {
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__29, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1337, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __PYX_ERR(0, 1337, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1333
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ * cache.append(prefix)
+ * if not PyString_CheckExact(prefix): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag prefix must be a string")
+ */
+ }
+
+ /* "_yaml.pyx":1338
+ * else:
+ * raise TypeError(u"tag prefix must be a string")
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix) # <<<<<<<<<<<<<<
+ * tag_directives_end = tag_directives_end+1
+ * if yaml_document_start_event_initialize(&event, version_directive,
+ */
+ __pyx_v_tag_directives_end->prefix = PyString_AS_STRING(__pyx_v_prefix);
+
+ /* "_yaml.pyx":1339
+ * raise TypeError(u"tag prefix must be a string")
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ * tag_directives_end = tag_directives_end+1 # <<<<<<<<<<<<<<
+ * if yaml_document_start_event_initialize(&event, version_directive,
+ * tag_directives_start, tag_directives_end,
+ */
+ __pyx_v_tag_directives_end = (__pyx_v_tag_directives_end + 1);
+
+ /* "_yaml.pyx":1319
+ * tag_directives_start = tag_directives_value
+ * tag_directives_end = tag_directives_value
+ * for handle in self.use_tags: # <<<<<<<<<<<<<<
+ * prefix = self.use_tags[handle]
+ * if PyUnicode_CheckExact(handle):
+ */
+ }
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":1311
+ * tag_directives_start = NULL
+ * tag_directives_end = NULL
+ * if self.use_tags: # <<<<<<<<<<<<<<
+ * if len(self.use_tags) > 128:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ }
+
+ /* "_yaml.pyx":1342
+ * if yaml_document_start_event_initialize(&event, version_directive,
+ * tag_directives_start, tag_directives_end,
+ * self.document_start_implicit) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_t_1 = ((yaml_document_start_event_initialize((&__pyx_v_event), __pyx_v_version_directive, __pyx_v_tag_directives_start, __pyx_v_tag_directives_end, __pyx_v_self->document_start_implicit) == 0) != 0);
+
+ /* "_yaml.pyx":1340
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ * tag_directives_end = tag_directives_end+1
+ * if yaml_document_start_event_initialize(&event, version_directive, # <<<<<<<<<<<<<<
+ * tag_directives_start, tag_directives_end,
+ * self.document_start_implicit) == 0:
+ */
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":1343
+ * tag_directives_start, tag_directives_end,
+ * self.document_start_implicit) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ PyErr_NoMemory(); __PYX_ERR(0, 1343, __pyx_L1_error)
+
+ /* "_yaml.pyx":1340
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ * tag_directives_end = tag_directives_end+1
+ * if yaml_document_start_event_initialize(&event, version_directive, # <<<<<<<<<<<<<<
+ * tag_directives_start, tag_directives_end,
+ * self.document_start_implicit) == 0:
+ */
+ }
+
+ /* "_yaml.pyx":1344
+ * self.document_start_implicit) == 0:
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_5 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1344, __pyx_L1_error)
+ __pyx_t_1 = ((__pyx_t_5 == 0) != 0);
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":1345
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * self._anchor_node(node)
+ */
+ __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1345, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_error = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":1346
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * self._anchor_node(node)
+ * self._serialize_node(node, None, None)
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ __PYX_ERR(0, 1346, __pyx_L1_error)
+
+ /* "_yaml.pyx":1344
+ * self.document_start_implicit) == 0:
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ }
+
+ /* "_yaml.pyx":1347
+ * error = self._emitter_error()
+ * raise error
+ * self._anchor_node(node) # <<<<<<<<<<<<<<
+ * self._serialize_node(node, None, None)
+ * yaml_document_end_event_initialize(&event, self.document_end_implicit)
+ */
+ __pyx_t_5 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_anchor_node(__pyx_v_self, __pyx_v_node); if (unlikely(__pyx_t_5 == ((int)0))) __PYX_ERR(0, 1347, __pyx_L1_error)
+
+ /* "_yaml.pyx":1348
+ * raise error
+ * self._anchor_node(node)
+ * self._serialize_node(node, None, None) # <<<<<<<<<<<<<<
+ * yaml_document_end_event_initialize(&event, self.document_end_implicit)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_t_5 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_serialize_node(__pyx_v_self, __pyx_v_node, Py_None, Py_None); if (unlikely(__pyx_t_5 == ((int)0))) __PYX_ERR(0, 1348, __pyx_L1_error)
+
+ /* "_yaml.pyx":1349
+ * self._anchor_node(node)
+ * self._serialize_node(node, None, None)
+ * yaml_document_end_event_initialize(&event, self.document_end_implicit) # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ (void)(yaml_document_end_event_initialize((&__pyx_v_event), __pyx_v_self->document_end_implicit));
+
+ /* "_yaml.pyx":1350
+ * self._serialize_node(node, None, None)
+ * yaml_document_end_event_initialize(&event, self.document_end_implicit)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_5 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1350, __pyx_L1_error)
+ __pyx_t_1 = ((__pyx_t_5 == 0) != 0);
+ if (unlikely(__pyx_t_1)) {
+
+ /* "_yaml.pyx":1351
+ * yaml_document_end_event_initialize(&event, self.document_end_implicit)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * self.serialized_nodes = {}
+ */
+ __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1351, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_error = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":1352
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * self.serialized_nodes = {}
+ * self.anchors = {}
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ __PYX_ERR(0, 1352, __pyx_L1_error)
+
+ /* "_yaml.pyx":1350
+ * self._serialize_node(node, None, None)
+ * yaml_document_end_event_initialize(&event, self.document_end_implicit)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ }
+
+ /* "_yaml.pyx":1353
+ * error = self._emitter_error()
+ * raise error
+ * self.serialized_nodes = {} # <<<<<<<<<<<<<<
+ * self.anchors = {}
+ * self.last_alias_id = 0
+ */
+ __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1353, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __Pyx_GOTREF(__pyx_v_self->serialized_nodes);
+ __Pyx_DECREF(__pyx_v_self->serialized_nodes);
+ __pyx_v_self->serialized_nodes = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":1354
+ * raise error
+ * self.serialized_nodes = {}
+ * self.anchors = {} # <<<<<<<<<<<<<<
+ * self.last_alias_id = 0
+ *
+ */
+ __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1354, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __Pyx_GOTREF(__pyx_v_self->anchors);
+ __Pyx_DECREF(__pyx_v_self->anchors);
+ __pyx_v_self->anchors = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":1355
+ * self.serialized_nodes = {}
+ * self.anchors = {}
+ * self.last_alias_id = 0 # <<<<<<<<<<<<<<
+ *
+ * cdef int _anchor_node(self, object node) except 0:
+ */
+ __pyx_v_self->last_alias_id = 0;
+
+ /* "_yaml.pyx":1286
+ * self.closed = 1
+ *
+ * def serialize(self, node): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * cdef yaml_version_directive_t version_directive_value
+ */
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_AddTraceback("_yaml.CEmitter.serialize", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_cache);
+ __Pyx_XDECREF(__pyx_v_handle);
+ __Pyx_XDECREF(__pyx_v_prefix);
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1357
+ * self.last_alias_id = 0
+ *
+ * cdef int _anchor_node(self, object node) except 0: # <<<<<<<<<<<<<<
+ * if node in self.anchors:
+ * if self.anchors[node] is None:
+ */
+
+static int __pyx_f_5_yaml_8CEmitter__anchor_node(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_node) {
+ PyObject *__pyx_v_node_class = NULL;
+ PyObject *__pyx_v_item = NULL;
+ PyObject *__pyx_v_key = NULL;
+ PyObject *__pyx_v_value = NULL;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ Py_ssize_t __pyx_t_5;
+ PyObject *(*__pyx_t_6)(PyObject *);
+ int __pyx_t_7;
+ PyObject *__pyx_t_8 = NULL;
+ PyObject *__pyx_t_9 = NULL;
+ PyObject *__pyx_t_10 = NULL;
+ PyObject *(*__pyx_t_11)(PyObject *);
+ __Pyx_RefNannySetupContext("_anchor_node", 0);
+
+ /* "_yaml.pyx":1358
+ *
+ * cdef int _anchor_node(self, object node) except 0:
+ * if node in self.anchors: # <<<<<<<<<<<<<<
+ * if self.anchors[node] is None:
+ * self.last_alias_id = self.last_alias_id+1
+ */
+ __pyx_t_1 = (__Pyx_PySequence_ContainsTF(__pyx_v_node, __pyx_v_self->anchors, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 1358, __pyx_L1_error)
+ __pyx_t_2 = (__pyx_t_1 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1359
+ * cdef int _anchor_node(self, object node) except 0:
+ * if node in self.anchors:
+ * if self.anchors[node] is None: # <<<<<<<<<<<<<<
+ * self.last_alias_id = self.last_alias_id+1
+ * self.anchors[node] = u"id%03d" % self.last_alias_id
+ */
+ __pyx_t_3 = __Pyx_PyObject_GetItem(__pyx_v_self->anchors, __pyx_v_node); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1359, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_2 = (__pyx_t_3 == Py_None);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_1 = (__pyx_t_2 != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1360
+ * if node in self.anchors:
+ * if self.anchors[node] is None:
+ * self.last_alias_id = self.last_alias_id+1 # <<<<<<<<<<<<<<
+ * self.anchors[node] = u"id%03d" % self.last_alias_id
+ * else:
+ */
+ __pyx_v_self->last_alias_id = (__pyx_v_self->last_alias_id + 1);
+
+ /* "_yaml.pyx":1361
+ * if self.anchors[node] is None:
+ * self.last_alias_id = self.last_alias_id+1
+ * self.anchors[node] = u"id%03d" % self.last_alias_id # <<<<<<<<<<<<<<
+ * else:
+ * self.anchors[node] = None
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_self->last_alias_id); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1361, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PyUnicode_Format(__pyx_kp_u_id_03d, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1361, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ if (unlikely(PyObject_SetItem(__pyx_v_self->anchors, __pyx_v_node, __pyx_t_4) < 0)) __PYX_ERR(0, 1361, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1359
+ * cdef int _anchor_node(self, object node) except 0:
+ * if node in self.anchors:
+ * if self.anchors[node] is None: # <<<<<<<<<<<<<<
+ * self.last_alias_id = self.last_alias_id+1
+ * self.anchors[node] = u"id%03d" % self.last_alias_id
+ */
+ }
+
+ /* "_yaml.pyx":1358
+ *
+ * cdef int _anchor_node(self, object node) except 0:
+ * if node in self.anchors: # <<<<<<<<<<<<<<
+ * if self.anchors[node] is None:
+ * self.last_alias_id = self.last_alias_id+1
+ */
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1363
+ * self.anchors[node] = u"id%03d" % self.last_alias_id
+ * else:
+ * self.anchors[node] = None # <<<<<<<<<<<<<<
+ * node_class = node.__class__
+ * if node_class is SequenceNode:
+ */
+ /*else*/ {
+ if (unlikely(PyObject_SetItem(__pyx_v_self->anchors, __pyx_v_node, Py_None) < 0)) __PYX_ERR(0, 1363, __pyx_L1_error)
+
+ /* "_yaml.pyx":1364
+ * else:
+ * self.anchors[node] = None
+ * node_class = node.__class__ # <<<<<<<<<<<<<<
+ * if node_class is SequenceNode:
+ * for item in node.value:
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_class); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1364, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_v_node_class = __pyx_t_4;
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1365
+ * self.anchors[node] = None
+ * node_class = node.__class__
+ * if node_class is SequenceNode: # <<<<<<<<<<<<<<
+ * for item in node.value:
+ * self._anchor_node(item)
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_SequenceNode); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1365, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_1 = (__pyx_v_node_class == __pyx_t_4);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_2 = (__pyx_t_1 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1366
+ * node_class = node.__class__
+ * if node_class is SequenceNode:
+ * for item in node.value: # <<<<<<<<<<<<<<
+ * self._anchor_node(item)
+ * elif node_class is MappingNode:
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1366, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ if (likely(PyList_CheckExact(__pyx_t_4)) || PyTuple_CheckExact(__pyx_t_4)) {
+ __pyx_t_3 = __pyx_t_4; __Pyx_INCREF(__pyx_t_3); __pyx_t_5 = 0;
+ __pyx_t_6 = NULL;
+ } else {
+ __pyx_t_5 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1366, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_6 = Py_TYPE(__pyx_t_3)->tp_iternext; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1366, __pyx_L1_error)
+ }
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ for (;;) {
+ if (likely(!__pyx_t_6)) {
+ if (likely(PyList_CheckExact(__pyx_t_3))) {
+ if (__pyx_t_5 >= PyList_GET_SIZE(__pyx_t_3)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_4 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_5); __Pyx_INCREF(__pyx_t_4); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 1366, __pyx_L1_error)
+ #else
+ __pyx_t_4 = PySequence_ITEM(__pyx_t_3, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1366, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ #endif
+ } else {
+ if (__pyx_t_5 >= PyTuple_GET_SIZE(__pyx_t_3)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_4 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_5); __Pyx_INCREF(__pyx_t_4); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 1366, __pyx_L1_error)
+ #else
+ __pyx_t_4 = PySequence_ITEM(__pyx_t_3, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1366, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ #endif
+ }
+ } else {
+ __pyx_t_4 = __pyx_t_6(__pyx_t_3);
+ if (unlikely(!__pyx_t_4)) {
+ PyObject* exc_type = PyErr_Occurred();
+ if (exc_type) {
+ if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
+ else __PYX_ERR(0, 1366, __pyx_L1_error)
+ }
+ break;
+ }
+ __Pyx_GOTREF(__pyx_t_4);
+ }
+ __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1367
+ * if node_class is SequenceNode:
+ * for item in node.value:
+ * self._anchor_node(item) # <<<<<<<<<<<<<<
+ * elif node_class is MappingNode:
+ * for key, value in node.value:
+ */
+ __pyx_t_7 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_anchor_node(__pyx_v_self, __pyx_v_item); if (unlikely(__pyx_t_7 == ((int)0))) __PYX_ERR(0, 1367, __pyx_L1_error)
+
+ /* "_yaml.pyx":1366
+ * node_class = node.__class__
+ * if node_class is SequenceNode:
+ * for item in node.value: # <<<<<<<<<<<<<<
+ * self._anchor_node(item)
+ * elif node_class is MappingNode:
+ */
+ }
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":1365
+ * self.anchors[node] = None
+ * node_class = node.__class__
+ * if node_class is SequenceNode: # <<<<<<<<<<<<<<
+ * for item in node.value:
+ * self._anchor_node(item)
+ */
+ goto __pyx_L5;
+ }
+
+ /* "_yaml.pyx":1368
+ * for item in node.value:
+ * self._anchor_node(item)
+ * elif node_class is MappingNode: # <<<<<<<<<<<<<<
+ * for key, value in node.value:
+ * self._anchor_node(key)
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_MappingNode); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1368, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_2 = (__pyx_v_node_class == __pyx_t_3);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_1 = (__pyx_t_2 != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1369
+ * self._anchor_node(item)
+ * elif node_class is MappingNode:
+ * for key, value in node.value: # <<<<<<<<<<<<<<
+ * self._anchor_node(key)
+ * self._anchor_node(value)
+ */
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1369, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ if (likely(PyList_CheckExact(__pyx_t_3)) || PyTuple_CheckExact(__pyx_t_3)) {
+ __pyx_t_4 = __pyx_t_3; __Pyx_INCREF(__pyx_t_4); __pyx_t_5 = 0;
+ __pyx_t_6 = NULL;
+ } else {
+ __pyx_t_5 = -1; __pyx_t_4 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1369, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_6 = Py_TYPE(__pyx_t_4)->tp_iternext; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1369, __pyx_L1_error)
+ }
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ for (;;) {
+ if (likely(!__pyx_t_6)) {
+ if (likely(PyList_CheckExact(__pyx_t_4))) {
+ if (__pyx_t_5 >= PyList_GET_SIZE(__pyx_t_4)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_3 = PyList_GET_ITEM(__pyx_t_4, __pyx_t_5); __Pyx_INCREF(__pyx_t_3); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 1369, __pyx_L1_error)
+ #else
+ __pyx_t_3 = PySequence_ITEM(__pyx_t_4, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1369, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ #endif
+ } else {
+ if (__pyx_t_5 >= PyTuple_GET_SIZE(__pyx_t_4)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_4, __pyx_t_5); __Pyx_INCREF(__pyx_t_3); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 1369, __pyx_L1_error)
+ #else
+ __pyx_t_3 = PySequence_ITEM(__pyx_t_4, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1369, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ #endif
+ }
+ } else {
+ __pyx_t_3 = __pyx_t_6(__pyx_t_4);
+ if (unlikely(!__pyx_t_3)) {
+ PyObject* exc_type = PyErr_Occurred();
+ if (exc_type) {
+ if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
+ else __PYX_ERR(0, 1369, __pyx_L1_error)
+ }
+ break;
+ }
+ __Pyx_GOTREF(__pyx_t_3);
+ }
+ if ((likely(PyTuple_CheckExact(__pyx_t_3))) || (PyList_CheckExact(__pyx_t_3))) {
+ PyObject* sequence = __pyx_t_3;
+ Py_ssize_t size = __Pyx_PySequence_SIZE(sequence);
+ if (unlikely(size != 2)) {
+ if (size > 2) __Pyx_RaiseTooManyValuesError(2);
+ else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size);
+ __PYX_ERR(0, 1369, __pyx_L1_error)
+ }
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ if (likely(PyTuple_CheckExact(sequence))) {
+ __pyx_t_8 = PyTuple_GET_ITEM(sequence, 0);
+ __pyx_t_9 = PyTuple_GET_ITEM(sequence, 1);
+ } else {
+ __pyx_t_8 = PyList_GET_ITEM(sequence, 0);
+ __pyx_t_9 = PyList_GET_ITEM(sequence, 1);
+ }
+ __Pyx_INCREF(__pyx_t_8);
+ __Pyx_INCREF(__pyx_t_9);
+ #else
+ __pyx_t_8 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1369, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __pyx_t_9 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 1369, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_9);
+ #endif
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ } else {
+ Py_ssize_t index = -1;
+ __pyx_t_10 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 1369, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_10);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_11 = Py_TYPE(__pyx_t_10)->tp_iternext;
+ index = 0; __pyx_t_8 = __pyx_t_11(__pyx_t_10); if (unlikely(!__pyx_t_8)) goto __pyx_L10_unpacking_failed;
+ __Pyx_GOTREF(__pyx_t_8);
+ index = 1; __pyx_t_9 = __pyx_t_11(__pyx_t_10); if (unlikely(!__pyx_t_9)) goto __pyx_L10_unpacking_failed;
+ __Pyx_GOTREF(__pyx_t_9);
+ if (__Pyx_IternextUnpackEndCheck(__pyx_t_11(__pyx_t_10), 2) < 0) __PYX_ERR(0, 1369, __pyx_L1_error)
+ __pyx_t_11 = NULL;
+ __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0;
+ goto __pyx_L11_unpacking_done;
+ __pyx_L10_unpacking_failed:;
+ __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0;
+ __pyx_t_11 = NULL;
+ if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index);
+ __PYX_ERR(0, 1369, __pyx_L1_error)
+ __pyx_L11_unpacking_done:;
+ }
+ __Pyx_XDECREF_SET(__pyx_v_key, __pyx_t_8);
+ __pyx_t_8 = 0;
+ __Pyx_XDECREF_SET(__pyx_v_value, __pyx_t_9);
+ __pyx_t_9 = 0;
+
+ /* "_yaml.pyx":1370
+ * elif node_class is MappingNode:
+ * for key, value in node.value:
+ * self._anchor_node(key) # <<<<<<<<<<<<<<
+ * self._anchor_node(value)
+ * return 1
+ */
+ __pyx_t_7 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_anchor_node(__pyx_v_self, __pyx_v_key); if (unlikely(__pyx_t_7 == ((int)0))) __PYX_ERR(0, 1370, __pyx_L1_error)
+
+ /* "_yaml.pyx":1371
+ * for key, value in node.value:
+ * self._anchor_node(key)
+ * self._anchor_node(value) # <<<<<<<<<<<<<<
+ * return 1
+ *
+ */
+ __pyx_t_7 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_anchor_node(__pyx_v_self, __pyx_v_value); if (unlikely(__pyx_t_7 == ((int)0))) __PYX_ERR(0, 1371, __pyx_L1_error)
+
+ /* "_yaml.pyx":1369
+ * self._anchor_node(item)
+ * elif node_class is MappingNode:
+ * for key, value in node.value: # <<<<<<<<<<<<<<
+ * self._anchor_node(key)
+ * self._anchor_node(value)
+ */
+ }
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1368
+ * for item in node.value:
+ * self._anchor_node(item)
+ * elif node_class is MappingNode: # <<<<<<<<<<<<<<
+ * for key, value in node.value:
+ * self._anchor_node(key)
+ */
+ }
+ __pyx_L5:;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":1372
+ * self._anchor_node(key)
+ * self._anchor_node(value)
+ * return 1 # <<<<<<<<<<<<<<
+ *
+ * cdef int _serialize_node(self, object node, object parent, object index) except 0:
+ */
+ __pyx_r = 1;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":1357
+ * self.last_alias_id = 0
+ *
+ * cdef int _anchor_node(self, object node) except 0: # <<<<<<<<<<<<<<
+ * if node in self.anchors:
+ * if self.anchors[node] is None:
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_8);
+ __Pyx_XDECREF(__pyx_t_9);
+ __Pyx_XDECREF(__pyx_t_10);
+ __Pyx_AddTraceback("_yaml.CEmitter._anchor_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_node_class);
+ __Pyx_XDECREF(__pyx_v_item);
+ __Pyx_XDECREF(__pyx_v_key);
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1374
+ * return 1
+ *
+ * cdef int _serialize_node(self, object node, object parent, object index) except 0: # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * cdef int implicit
+ */
+
+static int __pyx_f_5_yaml_8CEmitter__serialize_node(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_node, PyObject *__pyx_v_parent, PyObject *__pyx_v_index) {
+ yaml_event_t __pyx_v_event;
+ int __pyx_v_implicit;
+ int __pyx_v_plain_implicit;
+ int __pyx_v_quoted_implicit;
+ char *__pyx_v_anchor;
+ char *__pyx_v_tag;
+ char *__pyx_v_value;
+ int __pyx_v_length;
+ int __pyx_v_item_index;
+ yaml_scalar_style_t __pyx_v_scalar_style;
+ yaml_sequence_style_t __pyx_v_sequence_style;
+ yaml_mapping_style_t __pyx_v_mapping_style;
+ PyObject *__pyx_v_anchor_object = NULL;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_v_node_class = NULL;
+ PyObject *__pyx_v_tag_object = NULL;
+ PyObject *__pyx_v_value_object = NULL;
+ PyObject *__pyx_v_style_object = NULL;
+ PyObject *__pyx_v_item = NULL;
+ PyObject *__pyx_v_item_key = NULL;
+ PyObject *__pyx_v_item_value = NULL;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_t_2;
+ int __pyx_t_3;
+ int __pyx_t_4;
+ PyObject *__pyx_t_5 = NULL;
+ PyObject *__pyx_t_6 = NULL;
+ PyObject *__pyx_t_7 = NULL;
+ PyObject *__pyx_t_8 = NULL;
+ PyObject *__pyx_t_9 = NULL;
+ Py_ssize_t __pyx_t_10;
+ PyObject *(*__pyx_t_11)(PyObject *);
+ PyObject *(*__pyx_t_12)(PyObject *);
+ __Pyx_RefNannySetupContext("_serialize_node", 0);
+
+ /* "_yaml.pyx":1387
+ * cdef yaml_sequence_style_t sequence_style
+ * cdef yaml_mapping_style_t mapping_style
+ * anchor_object = self.anchors[node] # <<<<<<<<<<<<<<
+ * anchor = NULL
+ * if anchor_object is not None:
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetItem(__pyx_v_self->anchors, __pyx_v_node); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1387, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_anchor_object = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1388
+ * cdef yaml_mapping_style_t mapping_style
+ * anchor_object = self.anchors[node]
+ * anchor = NULL # <<<<<<<<<<<<<<
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object):
+ */
+ __pyx_v_anchor = NULL;
+
+ /* "_yaml.pyx":1389
+ * anchor_object = self.anchors[node]
+ * anchor = NULL
+ * if anchor_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ */
+ __pyx_t_2 = (__pyx_v_anchor_object != Py_None);
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1390
+ * anchor = NULL
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ */
+ __pyx_t_3 = (PyUnicode_CheckExact(__pyx_v_anchor_object) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1391
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_1 = PyUnicode_AsUTF8String(__pyx_v_anchor_object); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1391, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF_SET(__pyx_v_anchor_object, __pyx_t_1);
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1390
+ * anchor = NULL
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ */
+ }
+
+ /* "_yaml.pyx":1392
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string")
+ */
+ __pyx_t_3 = ((!(PyString_CheckExact(__pyx_v_anchor_object) != 0)) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1393
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("anchor must be a string")
+ * else:
+ */
+ __pyx_t_3 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_3)) {
+
+ /* "_yaml.pyx":1394
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__30, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1394, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __PYX_ERR(0, 1394, __pyx_L1_error)
+
+ /* "_yaml.pyx":1393
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("anchor must be a string")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1396
+ * raise TypeError("anchor must be a string")
+ * else:
+ * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<<
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if node in self.serialized_nodes:
+ */
+ /*else*/ {
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__31, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1396, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __PYX_ERR(0, 1396, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1392
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string")
+ */
+ }
+
+ /* "_yaml.pyx":1397
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object) # <<<<<<<<<<<<<<
+ * if node in self.serialized_nodes:
+ * if yaml_alias_event_initialize(&event, anchor) == 0:
+ */
+ __pyx_v_anchor = PyString_AS_STRING(__pyx_v_anchor_object);
+
+ /* "_yaml.pyx":1389
+ * anchor_object = self.anchors[node]
+ * anchor = NULL
+ * if anchor_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ */
+ }
+
+ /* "_yaml.pyx":1398
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if node in self.serialized_nodes: # <<<<<<<<<<<<<<
+ * if yaml_alias_event_initialize(&event, anchor) == 0:
+ * raise MemoryError
+ */
+ __pyx_t_3 = (__Pyx_PySequence_ContainsTF(__pyx_v_node, __pyx_v_self->serialized_nodes, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1398, __pyx_L1_error)
+ __pyx_t_2 = (__pyx_t_3 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1399
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if node in self.serialized_nodes:
+ * if yaml_alias_event_initialize(&event, anchor) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_t_2 = ((yaml_alias_event_initialize((&__pyx_v_event), __pyx_v_anchor) == 0) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1400
+ * if node in self.serialized_nodes:
+ * if yaml_alias_event_initialize(&event, anchor) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ PyErr_NoMemory(); __PYX_ERR(0, 1400, __pyx_L1_error)
+
+ /* "_yaml.pyx":1399
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if node in self.serialized_nodes:
+ * if yaml_alias_event_initialize(&event, anchor) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ }
+
+ /* "_yaml.pyx":1401
+ * if yaml_alias_event_initialize(&event, anchor) == 0:
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_4 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1401, __pyx_L1_error)
+ __pyx_t_2 = ((__pyx_t_4 == 0) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1402
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * else:
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1402, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_error = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1403
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * else:
+ * node_class = node.__class__
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ __PYX_ERR(0, 1403, __pyx_L1_error)
+
+ /* "_yaml.pyx":1401
+ * if yaml_alias_event_initialize(&event, anchor) == 0:
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ }
+
+ /* "_yaml.pyx":1398
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if node in self.serialized_nodes: # <<<<<<<<<<<<<<
+ * if yaml_alias_event_initialize(&event, anchor) == 0:
+ * raise MemoryError
+ */
+ goto __pyx_L7;
+ }
+
+ /* "_yaml.pyx":1405
+ * raise error
+ * else:
+ * node_class = node.__class__ # <<<<<<<<<<<<<<
+ * self.serialized_nodes[node] = True
+ * self.descend_resolver(parent, index)
+ */
+ /*else*/ {
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_class); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1405, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_node_class = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1406
+ * else:
+ * node_class = node.__class__
+ * self.serialized_nodes[node] = True # <<<<<<<<<<<<<<
+ * self.descend_resolver(parent, index)
+ * if node_class is ScalarNode:
+ */
+ if (unlikely(PyObject_SetItem(__pyx_v_self->serialized_nodes, __pyx_v_node, Py_True) < 0)) __PYX_ERR(0, 1406, __pyx_L1_error)
+
+ /* "_yaml.pyx":1407
+ * node_class = node.__class__
+ * self.serialized_nodes[node] = True
+ * self.descend_resolver(parent, index) # <<<<<<<<<<<<<<
+ * if node_class is ScalarNode:
+ * plain_implicit = 0
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_descend_resolver); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1407, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_6 = NULL;
+ __pyx_t_4 = 0;
+ if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) {
+ __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5);
+ if (likely(__pyx_t_6)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);
+ __Pyx_INCREF(__pyx_t_6);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_5, function);
+ __pyx_t_4 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_5)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_v_parent, __pyx_v_index};
+ __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_4, 2+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1407, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __Pyx_GOTREF(__pyx_t_1);
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_5)) {
+ PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_v_parent, __pyx_v_index};
+ __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_4, 2+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1407, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __Pyx_GOTREF(__pyx_t_1);
+ } else
+ #endif
+ {
+ __pyx_t_7 = PyTuple_New(2+__pyx_t_4); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1407, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ if (__pyx_t_6) {
+ __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_6); __pyx_t_6 = NULL;
+ }
+ __Pyx_INCREF(__pyx_v_parent);
+ __Pyx_GIVEREF(__pyx_v_parent);
+ PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_4, __pyx_v_parent);
+ __Pyx_INCREF(__pyx_v_index);
+ __Pyx_GIVEREF(__pyx_v_index);
+ PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_4, __pyx_v_index);
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_7, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1407, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1408
+ * self.serialized_nodes[node] = True
+ * self.descend_resolver(parent, index)
+ * if node_class is ScalarNode: # <<<<<<<<<<<<<<
+ * plain_implicit = 0
+ * quoted_implicit = 0
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_ScalarNode); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1408, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = (__pyx_v_node_class == __pyx_t_1);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1409
+ * self.descend_resolver(parent, index)
+ * if node_class is ScalarNode:
+ * plain_implicit = 0 # <<<<<<<<<<<<<<
+ * quoted_implicit = 0
+ * tag_object = node.tag
+ */
+ __pyx_v_plain_implicit = 0;
+
+ /* "_yaml.pyx":1410
+ * if node_class is ScalarNode:
+ * plain_implicit = 0
+ * quoted_implicit = 0 # <<<<<<<<<<<<<<
+ * tag_object = node.tag
+ * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object:
+ */
+ __pyx_v_quoted_implicit = 0;
+
+ /* "_yaml.pyx":1411
+ * plain_implicit = 0
+ * quoted_implicit = 0
+ * tag_object = node.tag # <<<<<<<<<<<<<<
+ * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object:
+ * plain_implicit = 1
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_tag); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1411, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_tag_object = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1412
+ * quoted_implicit = 0
+ * tag_object = node.tag
+ * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object: # <<<<<<<<<<<<<<
+ * plain_implicit = 1
+ * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object:
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_resolve); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1412, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_ScalarNode); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1412, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1412, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_t_8 = NULL;
+ __pyx_t_4 = 0;
+ if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) {
+ __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_5);
+ if (likely(__pyx_t_8)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);
+ __Pyx_INCREF(__pyx_t_8);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_5, function);
+ __pyx_t_4 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_5)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_8, __pyx_t_7, __pyx_t_6, __pyx_tuple__36};
+ __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1412, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_5)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_8, __pyx_t_7, __pyx_t_6, __pyx_tuple__36};
+ __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1412, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_9 = PyTuple_New(3+__pyx_t_4); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 1412, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_9);
+ if (__pyx_t_8) {
+ __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_8); __pyx_t_8 = NULL;
+ }
+ __Pyx_GIVEREF(__pyx_t_7);
+ PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_4, __pyx_t_7);
+ __Pyx_GIVEREF(__pyx_t_6);
+ PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_4, __pyx_t_6);
+ __Pyx_INCREF(__pyx_tuple__36);
+ __Pyx_GIVEREF(__pyx_tuple__36);
+ PyTuple_SET_ITEM(__pyx_t_9, 2+__pyx_t_4, __pyx_tuple__36);
+ __pyx_t_7 = 0;
+ __pyx_t_6 = 0;
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_9, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1412, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_5 = PyObject_RichCompare(__pyx_t_1, __pyx_v_tag_object, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1412, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1412, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1413
+ * tag_object = node.tag
+ * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object:
+ * plain_implicit = 1 # <<<<<<<<<<<<<<
+ * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object:
+ * quoted_implicit = 1
+ */
+ __pyx_v_plain_implicit = 1;
+
+ /* "_yaml.pyx":1412
+ * quoted_implicit = 0
+ * tag_object = node.tag
+ * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object: # <<<<<<<<<<<<<<
+ * plain_implicit = 1
+ * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object:
+ */
+ }
+
+ /* "_yaml.pyx":1414
+ * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object:
+ * plain_implicit = 1
+ * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object: # <<<<<<<<<<<<<<
+ * quoted_implicit = 1
+ * tag = NULL
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_resolve); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1414, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_GetModuleGlobalName(__pyx_t_9, __pyx_n_s_ScalarNode); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 1414, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_9);
+ __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1414, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_t_7 = NULL;
+ __pyx_t_4 = 0;
+ if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) {
+ __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_1);
+ if (likely(__pyx_t_7)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
+ __Pyx_INCREF(__pyx_t_7);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_1, function);
+ __pyx_t_4 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_7, __pyx_t_9, __pyx_t_6, __pyx_tuple__37};
+ __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1414, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_7, __pyx_t_9, __pyx_t_6, __pyx_tuple__37};
+ __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1414, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_8 = PyTuple_New(3+__pyx_t_4); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1414, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ if (__pyx_t_7) {
+ __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_7); __pyx_t_7 = NULL;
+ }
+ __Pyx_GIVEREF(__pyx_t_9);
+ PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_4, __pyx_t_9);
+ __Pyx_GIVEREF(__pyx_t_6);
+ PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_4, __pyx_t_6);
+ __Pyx_INCREF(__pyx_tuple__37);
+ __Pyx_GIVEREF(__pyx_tuple__37);
+ PyTuple_SET_ITEM(__pyx_t_8, 2+__pyx_t_4, __pyx_tuple__37);
+ __pyx_t_9 = 0;
+ __pyx_t_6 = 0;
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_8, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1414, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = PyObject_RichCompare(__pyx_t_5, __pyx_v_tag_object, Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1414, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1414, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1415
+ * plain_implicit = 1
+ * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object:
+ * quoted_implicit = 1 # <<<<<<<<<<<<<<
+ * tag = NULL
+ * if tag_object is not None:
+ */
+ __pyx_v_quoted_implicit = 1;
+
+ /* "_yaml.pyx":1414
+ * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object:
+ * plain_implicit = 1
+ * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object: # <<<<<<<<<<<<<<
+ * quoted_implicit = 1
+ * tag = NULL
+ */
+ }
+
+ /* "_yaml.pyx":1416
+ * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object:
+ * quoted_implicit = 1
+ * tag = NULL # <<<<<<<<<<<<<<
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ */
+ __pyx_v_tag = NULL;
+
+ /* "_yaml.pyx":1417
+ * quoted_implicit = 1
+ * tag = NULL
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ __pyx_t_3 = (__pyx_v_tag_object != Py_None);
+ __pyx_t_2 = (__pyx_t_3 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1418
+ * tag = NULL
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ __pyx_t_2 = (PyUnicode_CheckExact(__pyx_v_tag_object) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1419
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_1 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1419, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF_SET(__pyx_v_tag_object, __pyx_t_1);
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1418
+ * tag = NULL
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ }
+
+ /* "_yaml.pyx":1420
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ __pyx_t_2 = ((!(PyString_CheckExact(__pyx_v_tag_object) != 0)) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1421
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1422
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__32, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1422, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __PYX_ERR(0, 1422, __pyx_L1_error)
+
+ /* "_yaml.pyx":1421
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1424
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * value_object = node.value
+ */
+ /*else*/ {
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__33, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1424, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __PYX_ERR(0, 1424, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1420
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ }
+
+ /* "_yaml.pyx":1425
+ * else:
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<<
+ * value_object = node.value
+ * if PyUnicode_CheckExact(value_object):
+ */
+ __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object);
+
+ /* "_yaml.pyx":1417
+ * quoted_implicit = 1
+ * tag = NULL
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ }
+
+ /* "_yaml.pyx":1426
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object)
+ * value_object = node.value # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(value_object):
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1426, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_value_object = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1427
+ * tag = PyString_AS_STRING(tag_object)
+ * value_object = node.value
+ * if PyUnicode_CheckExact(value_object): # <<<<<<<<<<<<<<
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object):
+ */
+ __pyx_t_2 = (PyUnicode_CheckExact(__pyx_v_value_object) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1428
+ * value_object = node.value
+ * if PyUnicode_CheckExact(value_object):
+ * value_object = PyUnicode_AsUTF8String(value_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(value_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_1 = PyUnicode_AsUTF8String(__pyx_v_value_object); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1428, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF_SET(__pyx_v_value_object, __pyx_t_1);
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1427
+ * tag = PyString_AS_STRING(tag_object)
+ * value_object = node.value
+ * if PyUnicode_CheckExact(value_object): # <<<<<<<<<<<<<<
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object):
+ */
+ }
+
+ /* "_yaml.pyx":1429
+ * if PyUnicode_CheckExact(value_object):
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("value must be a string")
+ */
+ __pyx_t_2 = ((!(PyString_CheckExact(__pyx_v_value_object) != 0)) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1430
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("value must be a string")
+ * else:
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1431
+ * if not PyString_CheckExact(value_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("value must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"value must be a string")
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__34, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1431, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __PYX_ERR(0, 1431, __pyx_L1_error)
+
+ /* "_yaml.pyx":1430
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("value must be a string")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1433
+ * raise TypeError("value must be a string")
+ * else:
+ * raise TypeError(u"value must be a string") # <<<<<<<<<<<<<<
+ * value = PyString_AS_STRING(value_object)
+ * length = PyString_GET_SIZE(value_object)
+ */
+ /*else*/ {
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__35, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1433, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __PYX_ERR(0, 1433, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1429
+ * if PyUnicode_CheckExact(value_object):
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("value must be a string")
+ */
+ }
+
+ /* "_yaml.pyx":1434
+ * else:
+ * raise TypeError(u"value must be a string")
+ * value = PyString_AS_STRING(value_object) # <<<<<<<<<<<<<<
+ * length = PyString_GET_SIZE(value_object)
+ * style_object = node.style
+ */
+ __pyx_v_value = PyString_AS_STRING(__pyx_v_value_object);
+
+ /* "_yaml.pyx":1435
+ * raise TypeError(u"value must be a string")
+ * value = PyString_AS_STRING(value_object)
+ * length = PyString_GET_SIZE(value_object) # <<<<<<<<<<<<<<
+ * style_object = node.style
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ */
+ __pyx_v_length = PyString_GET_SIZE(__pyx_v_value_object);
+
+ /* "_yaml.pyx":1436
+ * value = PyString_AS_STRING(value_object)
+ * length = PyString_GET_SIZE(value_object)
+ * style_object = node.style # <<<<<<<<<<<<<<
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ * if style_object == "'" or style_object == u"'":
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_style); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1436, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_style_object = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1437
+ * length = PyString_GET_SIZE(value_object)
+ * style_object = node.style
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * if style_object == "'" or style_object == u"'":
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ */
+ __pyx_v_scalar_style = YAML_PLAIN_SCALAR_STYLE;
+
+ /* "_yaml.pyx":1438
+ * style_object = node.style
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ * if style_object == "'" or style_object == u"'": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "\"" or style_object == u"\"":
+ */
+ __pyx_t_3 = (__Pyx_PyString_Equals(__pyx_v_style_object, __pyx_kp_s__7, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1438, __pyx_L1_error)
+ if (!__pyx_t_3) {
+ } else {
+ __pyx_t_2 = __pyx_t_3;
+ goto __pyx_L21_bool_binop_done;
+ }
+ __pyx_t_3 = (__Pyx_PyUnicode_Equals(__pyx_v_style_object, __pyx_kp_u__7, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1438, __pyx_L1_error)
+ __pyx_t_2 = __pyx_t_3;
+ __pyx_L21_bool_binop_done:;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1439
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ * if style_object == "'" or style_object == u"'":
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * elif style_object == "\"" or style_object == u"\"":
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ */
+ __pyx_v_scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE;
+
+ /* "_yaml.pyx":1438
+ * style_object = node.style
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ * if style_object == "'" or style_object == u"'": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "\"" or style_object == u"\"":
+ */
+ goto __pyx_L20;
+ }
+
+ /* "_yaml.pyx":1440
+ * if style_object == "'" or style_object == u"'":
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "\"" or style_object == u"\"": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "|" or style_object == u"|":
+ */
+ __pyx_t_3 = (__Pyx_PyString_Equals(__pyx_v_style_object, __pyx_kp_s__8, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1440, __pyx_L1_error)
+ if (!__pyx_t_3) {
+ } else {
+ __pyx_t_2 = __pyx_t_3;
+ goto __pyx_L23_bool_binop_done;
+ }
+ __pyx_t_3 = (__Pyx_PyUnicode_Equals(__pyx_v_style_object, __pyx_kp_u__8, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1440, __pyx_L1_error)
+ __pyx_t_2 = __pyx_t_3;
+ __pyx_L23_bool_binop_done:;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1441
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "\"" or style_object == u"\"":
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * elif style_object == "|" or style_object == u"|":
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ */
+ __pyx_v_scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE;
+
+ /* "_yaml.pyx":1440
+ * if style_object == "'" or style_object == u"'":
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "\"" or style_object == u"\"": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "|" or style_object == u"|":
+ */
+ goto __pyx_L20;
+ }
+
+ /* "_yaml.pyx":1442
+ * elif style_object == "\"" or style_object == u"\"":
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "|" or style_object == u"|": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ * elif style_object == ">" or style_object == u">":
+ */
+ __pyx_t_3 = (__Pyx_PyString_Equals(__pyx_v_style_object, __pyx_kp_s__9, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1442, __pyx_L1_error)
+ if (!__pyx_t_3) {
+ } else {
+ __pyx_t_2 = __pyx_t_3;
+ goto __pyx_L25_bool_binop_done;
+ }
+ __pyx_t_3 = (__Pyx_PyUnicode_Equals(__pyx_v_style_object, __pyx_kp_u__9, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1442, __pyx_L1_error)
+ __pyx_t_2 = __pyx_t_3;
+ __pyx_L25_bool_binop_done:;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1443
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "|" or style_object == u"|":
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * elif style_object == ">" or style_object == u">":
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ */
+ __pyx_v_scalar_style = YAML_LITERAL_SCALAR_STYLE;
+
+ /* "_yaml.pyx":1442
+ * elif style_object == "\"" or style_object == u"\"":
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "|" or style_object == u"|": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ * elif style_object == ">" or style_object == u">":
+ */
+ goto __pyx_L20;
+ }
+
+ /* "_yaml.pyx":1444
+ * elif style_object == "|" or style_object == u"|":
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ * elif style_object == ">" or style_object == u">": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ * if yaml_scalar_event_initialize(&event, anchor, tag, value, length,
+ */
+ __pyx_t_3 = (__Pyx_PyString_Equals(__pyx_v_style_object, __pyx_kp_s__10, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1444, __pyx_L1_error)
+ if (!__pyx_t_3) {
+ } else {
+ __pyx_t_2 = __pyx_t_3;
+ goto __pyx_L27_bool_binop_done;
+ }
+ __pyx_t_3 = (__Pyx_PyUnicode_Equals(__pyx_v_style_object, __pyx_kp_u__10, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1444, __pyx_L1_error)
+ __pyx_t_2 = __pyx_t_3;
+ __pyx_L27_bool_binop_done:;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1445
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ * elif style_object == ">" or style_object == u">":
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * if yaml_scalar_event_initialize(&event, anchor, tag, value, length,
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ */
+ __pyx_v_scalar_style = YAML_FOLDED_SCALAR_STYLE;
+
+ /* "_yaml.pyx":1444
+ * elif style_object == "|" or style_object == u"|":
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ * elif style_object == ">" or style_object == u">": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ * if yaml_scalar_event_initialize(&event, anchor, tag, value, length,
+ */
+ }
+ __pyx_L20:;
+
+ /* "_yaml.pyx":1447
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ * if yaml_scalar_event_initialize(&event, anchor, tag, value, length,
+ * plain_implicit, quoted_implicit, scalar_style) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_t_2 = ((yaml_scalar_event_initialize((&__pyx_v_event), __pyx_v_anchor, __pyx_v_tag, __pyx_v_value, __pyx_v_length, __pyx_v_plain_implicit, __pyx_v_quoted_implicit, __pyx_v_scalar_style) == 0) != 0);
+
+ /* "_yaml.pyx":1446
+ * elif style_object == ">" or style_object == u">":
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ * if yaml_scalar_event_initialize(&event, anchor, tag, value, length, # <<<<<<<<<<<<<<
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ * raise MemoryError
+ */
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1448
+ * if yaml_scalar_event_initialize(&event, anchor, tag, value, length,
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ PyErr_NoMemory(); __PYX_ERR(0, 1448, __pyx_L1_error)
+
+ /* "_yaml.pyx":1446
+ * elif style_object == ">" or style_object == u">":
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ * if yaml_scalar_event_initialize(&event, anchor, tag, value, length, # <<<<<<<<<<<<<<
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ * raise MemoryError
+ */
+ }
+
+ /* "_yaml.pyx":1449
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_4 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1449, __pyx_L1_error)
+ __pyx_t_2 = ((__pyx_t_4 == 0) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1450
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * elif node_class is SequenceNode:
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1450, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_error = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1451
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * elif node_class is SequenceNode:
+ * implicit = 0
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ __PYX_ERR(0, 1451, __pyx_L1_error)
+
+ /* "_yaml.pyx":1449
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ }
+
+ /* "_yaml.pyx":1408
+ * self.serialized_nodes[node] = True
+ * self.descend_resolver(parent, index)
+ * if node_class is ScalarNode: # <<<<<<<<<<<<<<
+ * plain_implicit = 0
+ * quoted_implicit = 0
+ */
+ goto __pyx_L10;
+ }
+
+ /* "_yaml.pyx":1452
+ * error = self._emitter_error()
+ * raise error
+ * elif node_class is SequenceNode: # <<<<<<<<<<<<<<
+ * implicit = 0
+ * tag_object = node.tag
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_SequenceNode); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1452, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = (__pyx_v_node_class == __pyx_t_1);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1453
+ * raise error
+ * elif node_class is SequenceNode:
+ * implicit = 0 # <<<<<<<<<<<<<<
+ * tag_object = node.tag
+ * if self.resolve(SequenceNode, node.value, True) == tag_object:
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":1454
+ * elif node_class is SequenceNode:
+ * implicit = 0
+ * tag_object = node.tag # <<<<<<<<<<<<<<
+ * if self.resolve(SequenceNode, node.value, True) == tag_object:
+ * implicit = 1
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_tag); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1454, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_tag_object = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1455
+ * implicit = 0
+ * tag_object = node.tag
+ * if self.resolve(SequenceNode, node.value, True) == tag_object: # <<<<<<<<<<<<<<
+ * implicit = 1
+ * tag = NULL
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_resolve); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1455, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_GetModuleGlobalName(__pyx_t_8, __pyx_n_s_SequenceNode); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1455, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1455, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_t_9 = NULL;
+ __pyx_t_4 = 0;
+ if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) {
+ __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_5);
+ if (likely(__pyx_t_9)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);
+ __Pyx_INCREF(__pyx_t_9);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_5, function);
+ __pyx_t_4 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_5)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_9, __pyx_t_8, __pyx_t_6, Py_True};
+ __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1455, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0;
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_5)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_9, __pyx_t_8, __pyx_t_6, Py_True};
+ __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1455, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0;
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_7 = PyTuple_New(3+__pyx_t_4); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1455, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ if (__pyx_t_9) {
+ __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_9); __pyx_t_9 = NULL;
+ }
+ __Pyx_GIVEREF(__pyx_t_8);
+ PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_4, __pyx_t_8);
+ __Pyx_GIVEREF(__pyx_t_6);
+ PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_4, __pyx_t_6);
+ __Pyx_INCREF(Py_True);
+ __Pyx_GIVEREF(Py_True);
+ PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_4, Py_True);
+ __pyx_t_8 = 0;
+ __pyx_t_6 = 0;
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_7, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1455, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_5 = PyObject_RichCompare(__pyx_t_1, __pyx_v_tag_object, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1455, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1455, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1456
+ * tag_object = node.tag
+ * if self.resolve(SequenceNode, node.value, True) == tag_object:
+ * implicit = 1 # <<<<<<<<<<<<<<
+ * tag = NULL
+ * if tag_object is not None:
+ */
+ __pyx_v_implicit = 1;
+
+ /* "_yaml.pyx":1455
+ * implicit = 0
+ * tag_object = node.tag
+ * if self.resolve(SequenceNode, node.value, True) == tag_object: # <<<<<<<<<<<<<<
+ * implicit = 1
+ * tag = NULL
+ */
+ }
+
+ /* "_yaml.pyx":1457
+ * if self.resolve(SequenceNode, node.value, True) == tag_object:
+ * implicit = 1
+ * tag = NULL # <<<<<<<<<<<<<<
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ */
+ __pyx_v_tag = NULL;
+
+ /* "_yaml.pyx":1458
+ * implicit = 1
+ * tag = NULL
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ __pyx_t_3 = (__pyx_v_tag_object != Py_None);
+ __pyx_t_2 = (__pyx_t_3 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1459
+ * tag = NULL
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ __pyx_t_2 = (PyUnicode_CheckExact(__pyx_v_tag_object) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1460
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_5 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1460, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF_SET(__pyx_v_tag_object, __pyx_t_5);
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1459
+ * tag = NULL
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ }
+
+ /* "_yaml.pyx":1461
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ __pyx_t_2 = ((!(PyString_CheckExact(__pyx_v_tag_object) != 0)) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1462
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1463
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__32, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1463, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __PYX_ERR(0, 1463, __pyx_L1_error)
+
+ /* "_yaml.pyx":1462
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1465
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ */
+ /*else*/ {
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__33, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1465, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __PYX_ERR(0, 1465, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1461
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ }
+
+ /* "_yaml.pyx":1466
+ * else:
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<<
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ * if node.flow_style:
+ */
+ __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object);
+
+ /* "_yaml.pyx":1458
+ * implicit = 1
+ * tag = NULL
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ }
+
+ /* "_yaml.pyx":1467
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object)
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE # <<<<<<<<<<<<<<
+ * if node.flow_style:
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ */
+ __pyx_v_sequence_style = YAML_BLOCK_SEQUENCE_STYLE;
+
+ /* "_yaml.pyx":1468
+ * tag = PyString_AS_STRING(tag_object)
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ * if node.flow_style: # <<<<<<<<<<<<<<
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ * if yaml_sequence_start_event_initialize(&event, anchor, tag,
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_flow_style); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1468, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1468, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1469
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ * if node.flow_style:
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE # <<<<<<<<<<<<<<
+ * if yaml_sequence_start_event_initialize(&event, anchor, tag,
+ * implicit, sequence_style) == 0:
+ */
+ __pyx_v_sequence_style = YAML_FLOW_SEQUENCE_STYLE;
+
+ /* "_yaml.pyx":1468
+ * tag = PyString_AS_STRING(tag_object)
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ * if node.flow_style: # <<<<<<<<<<<<<<
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ * if yaml_sequence_start_event_initialize(&event, anchor, tag,
+ */
+ }
+
+ /* "_yaml.pyx":1471
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ * if yaml_sequence_start_event_initialize(&event, anchor, tag,
+ * implicit, sequence_style) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_t_2 = ((yaml_sequence_start_event_initialize((&__pyx_v_event), __pyx_v_anchor, __pyx_v_tag, __pyx_v_implicit, __pyx_v_sequence_style) == 0) != 0);
+
+ /* "_yaml.pyx":1470
+ * if node.flow_style:
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ * if yaml_sequence_start_event_initialize(&event, anchor, tag, # <<<<<<<<<<<<<<
+ * implicit, sequence_style) == 0:
+ * raise MemoryError
+ */
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1472
+ * if yaml_sequence_start_event_initialize(&event, anchor, tag,
+ * implicit, sequence_style) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ PyErr_NoMemory(); __PYX_ERR(0, 1472, __pyx_L1_error)
+
+ /* "_yaml.pyx":1470
+ * if node.flow_style:
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ * if yaml_sequence_start_event_initialize(&event, anchor, tag, # <<<<<<<<<<<<<<
+ * implicit, sequence_style) == 0:
+ * raise MemoryError
+ */
+ }
+
+ /* "_yaml.pyx":1473
+ * implicit, sequence_style) == 0:
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_4 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1473, __pyx_L1_error)
+ __pyx_t_2 = ((__pyx_t_4 == 0) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1474
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * item_index = 0
+ */
+ __pyx_t_5 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1474, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_error = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1475
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * item_index = 0
+ * for item in node.value:
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ __PYX_ERR(0, 1475, __pyx_L1_error)
+
+ /* "_yaml.pyx":1473
+ * implicit, sequence_style) == 0:
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ }
+
+ /* "_yaml.pyx":1476
+ * error = self._emitter_error()
+ * raise error
+ * item_index = 0 # <<<<<<<<<<<<<<
+ * for item in node.value:
+ * self._serialize_node(item, node, item_index)
+ */
+ __pyx_v_item_index = 0;
+
+ /* "_yaml.pyx":1477
+ * raise error
+ * item_index = 0
+ * for item in node.value: # <<<<<<<<<<<<<<
+ * self._serialize_node(item, node, item_index)
+ * item_index = item_index+1
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1477, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ if (likely(PyList_CheckExact(__pyx_t_5)) || PyTuple_CheckExact(__pyx_t_5)) {
+ __pyx_t_1 = __pyx_t_5; __Pyx_INCREF(__pyx_t_1); __pyx_t_10 = 0;
+ __pyx_t_11 = NULL;
+ } else {
+ __pyx_t_10 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1477, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_11 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 1477, __pyx_L1_error)
+ }
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ for (;;) {
+ if (likely(!__pyx_t_11)) {
+ if (likely(PyList_CheckExact(__pyx_t_1))) {
+ if (__pyx_t_10 >= PyList_GET_SIZE(__pyx_t_1)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_5 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_10); __Pyx_INCREF(__pyx_t_5); __pyx_t_10++; if (unlikely(0 < 0)) __PYX_ERR(0, 1477, __pyx_L1_error)
+ #else
+ __pyx_t_5 = PySequence_ITEM(__pyx_t_1, __pyx_t_10); __pyx_t_10++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1477, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ #endif
+ } else {
+ if (__pyx_t_10 >= PyTuple_GET_SIZE(__pyx_t_1)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_10); __Pyx_INCREF(__pyx_t_5); __pyx_t_10++; if (unlikely(0 < 0)) __PYX_ERR(0, 1477, __pyx_L1_error)
+ #else
+ __pyx_t_5 = PySequence_ITEM(__pyx_t_1, __pyx_t_10); __pyx_t_10++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1477, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ #endif
+ }
+ } else {
+ __pyx_t_5 = __pyx_t_11(__pyx_t_1);
+ if (unlikely(!__pyx_t_5)) {
+ PyObject* exc_type = PyErr_Occurred();
+ if (exc_type) {
+ if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
+ else __PYX_ERR(0, 1477, __pyx_L1_error)
+ }
+ break;
+ }
+ __Pyx_GOTREF(__pyx_t_5);
+ }
+ __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_5);
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1478
+ * item_index = 0
+ * for item in node.value:
+ * self._serialize_node(item, node, item_index) # <<<<<<<<<<<<<<
+ * item_index = item_index+1
+ * yaml_sequence_end_event_initialize(&event)
+ */
+ __pyx_t_5 = __Pyx_PyInt_From_int(__pyx_v_item_index); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1478, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_4 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_serialize_node(__pyx_v_self, __pyx_v_item, __pyx_v_node, __pyx_t_5); if (unlikely(__pyx_t_4 == ((int)0))) __PYX_ERR(0, 1478, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1479
+ * for item in node.value:
+ * self._serialize_node(item, node, item_index)
+ * item_index = item_index+1 # <<<<<<<<<<<<<<
+ * yaml_sequence_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_v_item_index = (__pyx_v_item_index + 1);
+
+ /* "_yaml.pyx":1477
+ * raise error
+ * item_index = 0
+ * for item in node.value: # <<<<<<<<<<<<<<
+ * self._serialize_node(item, node, item_index)
+ * item_index = item_index+1
+ */
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1480
+ * self._serialize_node(item, node, item_index)
+ * item_index = item_index+1
+ * yaml_sequence_end_event_initialize(&event) # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ (void)(yaml_sequence_end_event_initialize((&__pyx_v_event)));
+
+ /* "_yaml.pyx":1481
+ * item_index = item_index+1
+ * yaml_sequence_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_4 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1481, __pyx_L1_error)
+ __pyx_t_2 = ((__pyx_t_4 == 0) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1482
+ * yaml_sequence_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * elif node_class is MappingNode:
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1482, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_error = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1483
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * elif node_class is MappingNode:
+ * implicit = 0
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ __PYX_ERR(0, 1483, __pyx_L1_error)
+
+ /* "_yaml.pyx":1481
+ * item_index = item_index+1
+ * yaml_sequence_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ }
+
+ /* "_yaml.pyx":1452
+ * error = self._emitter_error()
+ * raise error
+ * elif node_class is SequenceNode: # <<<<<<<<<<<<<<
+ * implicit = 0
+ * tag_object = node.tag
+ */
+ goto __pyx_L10;
+ }
+
+ /* "_yaml.pyx":1484
+ * error = self._emitter_error()
+ * raise error
+ * elif node_class is MappingNode: # <<<<<<<<<<<<<<
+ * implicit = 0
+ * tag_object = node.tag
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_MappingNode); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1484, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = (__pyx_v_node_class == __pyx_t_1);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1485
+ * raise error
+ * elif node_class is MappingNode:
+ * implicit = 0 # <<<<<<<<<<<<<<
+ * tag_object = node.tag
+ * if self.resolve(MappingNode, node.value, True) == tag_object:
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":1486
+ * elif node_class is MappingNode:
+ * implicit = 0
+ * tag_object = node.tag # <<<<<<<<<<<<<<
+ * if self.resolve(MappingNode, node.value, True) == tag_object:
+ * implicit = 1
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_tag); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1486, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_tag_object = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1487
+ * implicit = 0
+ * tag_object = node.tag
+ * if self.resolve(MappingNode, node.value, True) == tag_object: # <<<<<<<<<<<<<<
+ * implicit = 1
+ * tag = NULL
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_resolve); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1487, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_MappingNode); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1487, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1487, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_t_8 = NULL;
+ __pyx_t_4 = 0;
+ if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) {
+ __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_5);
+ if (likely(__pyx_t_8)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);
+ __Pyx_INCREF(__pyx_t_8);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_5, function);
+ __pyx_t_4 = 1;
+ }
+ }
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(__pyx_t_5)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_8, __pyx_t_7, __pyx_t_6, Py_True};
+ __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1487, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ } else
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(__pyx_t_5)) {
+ PyObject *__pyx_temp[4] = {__pyx_t_8, __pyx_t_7, __pyx_t_6, Py_True};
+ __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1487, __pyx_L1_error)
+ __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ } else
+ #endif
+ {
+ __pyx_t_9 = PyTuple_New(3+__pyx_t_4); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 1487, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_9);
+ if (__pyx_t_8) {
+ __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_8); __pyx_t_8 = NULL;
+ }
+ __Pyx_GIVEREF(__pyx_t_7);
+ PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_4, __pyx_t_7);
+ __Pyx_GIVEREF(__pyx_t_6);
+ PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_4, __pyx_t_6);
+ __Pyx_INCREF(Py_True);
+ __Pyx_GIVEREF(Py_True);
+ PyTuple_SET_ITEM(__pyx_t_9, 2+__pyx_t_4, Py_True);
+ __pyx_t_7 = 0;
+ __pyx_t_6 = 0;
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_9, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1487, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ }
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_5 = PyObject_RichCompare(__pyx_t_1, __pyx_v_tag_object, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1487, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 1487, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1488
+ * tag_object = node.tag
+ * if self.resolve(MappingNode, node.value, True) == tag_object:
+ * implicit = 1 # <<<<<<<<<<<<<<
+ * tag = NULL
+ * if tag_object is not None:
+ */
+ __pyx_v_implicit = 1;
+
+ /* "_yaml.pyx":1487
+ * implicit = 0
+ * tag_object = node.tag
+ * if self.resolve(MappingNode, node.value, True) == tag_object: # <<<<<<<<<<<<<<
+ * implicit = 1
+ * tag = NULL
+ */
+ }
+
+ /* "_yaml.pyx":1489
+ * if self.resolve(MappingNode, node.value, True) == tag_object:
+ * implicit = 1
+ * tag = NULL # <<<<<<<<<<<<<<
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ */
+ __pyx_v_tag = NULL;
+
+ /* "_yaml.pyx":1490
+ * implicit = 1
+ * tag = NULL
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ __pyx_t_3 = (__pyx_v_tag_object != Py_None);
+ __pyx_t_2 = (__pyx_t_3 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1491
+ * tag = NULL
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ __pyx_t_2 = (PyUnicode_CheckExact(__pyx_v_tag_object) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1492
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_5 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1492, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF_SET(__pyx_v_tag_object, __pyx_t_5);
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1491
+ * tag = NULL
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ }
+
+ /* "_yaml.pyx":1493
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ __pyx_t_2 = ((!(PyString_CheckExact(__pyx_v_tag_object) != 0)) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1494
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1495
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__32, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1495, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __PYX_ERR(0, 1495, __pyx_L1_error)
+
+ /* "_yaml.pyx":1494
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ }
+
+ /* "_yaml.pyx":1497
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ */
+ /*else*/ {
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__33, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1497, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __PYX_ERR(0, 1497, __pyx_L1_error)
+ }
+
+ /* "_yaml.pyx":1493
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ }
+
+ /* "_yaml.pyx":1498
+ * else:
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<<
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ * if node.flow_style:
+ */
+ __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object);
+
+ /* "_yaml.pyx":1490
+ * implicit = 1
+ * tag = NULL
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ }
+
+ /* "_yaml.pyx":1499
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object)
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE # <<<<<<<<<<<<<<
+ * if node.flow_style:
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ */
+ __pyx_v_mapping_style = YAML_BLOCK_MAPPING_STYLE;
+
+ /* "_yaml.pyx":1500
+ * tag = PyString_AS_STRING(tag_object)
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ * if node.flow_style: # <<<<<<<<<<<<<<
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ * if yaml_mapping_start_event_initialize(&event, anchor, tag,
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_flow_style); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1500, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1500, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1501
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ * if node.flow_style:
+ * mapping_style = YAML_FLOW_MAPPING_STYLE # <<<<<<<<<<<<<<
+ * if yaml_mapping_start_event_initialize(&event, anchor, tag,
+ * implicit, mapping_style) == 0:
+ */
+ __pyx_v_mapping_style = YAML_FLOW_MAPPING_STYLE;
+
+ /* "_yaml.pyx":1500
+ * tag = PyString_AS_STRING(tag_object)
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ * if node.flow_style: # <<<<<<<<<<<<<<
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ * if yaml_mapping_start_event_initialize(&event, anchor, tag,
+ */
+ }
+
+ /* "_yaml.pyx":1503
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ * if yaml_mapping_start_event_initialize(&event, anchor, tag,
+ * implicit, mapping_style) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_t_2 = ((yaml_mapping_start_event_initialize((&__pyx_v_event), __pyx_v_anchor, __pyx_v_tag, __pyx_v_implicit, __pyx_v_mapping_style) == 0) != 0);
+
+ /* "_yaml.pyx":1502
+ * if node.flow_style:
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ * if yaml_mapping_start_event_initialize(&event, anchor, tag, # <<<<<<<<<<<<<<
+ * implicit, mapping_style) == 0:
+ * raise MemoryError
+ */
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1504
+ * if yaml_mapping_start_event_initialize(&event, anchor, tag,
+ * implicit, mapping_style) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ PyErr_NoMemory(); __PYX_ERR(0, 1504, __pyx_L1_error)
+
+ /* "_yaml.pyx":1502
+ * if node.flow_style:
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ * if yaml_mapping_start_event_initialize(&event, anchor, tag, # <<<<<<<<<<<<<<
+ * implicit, mapping_style) == 0:
+ * raise MemoryError
+ */
+ }
+
+ /* "_yaml.pyx":1505
+ * implicit, mapping_style) == 0:
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_4 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1505, __pyx_L1_error)
+ __pyx_t_2 = ((__pyx_t_4 == 0) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1506
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * for item_key, item_value in node.value:
+ */
+ __pyx_t_5 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1506, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_error = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1507
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * for item_key, item_value in node.value:
+ * self._serialize_node(item_key, node, None)
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ __PYX_ERR(0, 1507, __pyx_L1_error)
+
+ /* "_yaml.pyx":1505
+ * implicit, mapping_style) == 0:
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ }
+
+ /* "_yaml.pyx":1508
+ * error = self._emitter_error()
+ * raise error
+ * for item_key, item_value in node.value: # <<<<<<<<<<<<<<
+ * self._serialize_node(item_key, node, None)
+ * self._serialize_node(item_value, node, item_key)
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1508, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ if (likely(PyList_CheckExact(__pyx_t_5)) || PyTuple_CheckExact(__pyx_t_5)) {
+ __pyx_t_1 = __pyx_t_5; __Pyx_INCREF(__pyx_t_1); __pyx_t_10 = 0;
+ __pyx_t_11 = NULL;
+ } else {
+ __pyx_t_10 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1508, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_11 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 1508, __pyx_L1_error)
+ }
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ for (;;) {
+ if (likely(!__pyx_t_11)) {
+ if (likely(PyList_CheckExact(__pyx_t_1))) {
+ if (__pyx_t_10 >= PyList_GET_SIZE(__pyx_t_1)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_5 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_10); __Pyx_INCREF(__pyx_t_5); __pyx_t_10++; if (unlikely(0 < 0)) __PYX_ERR(0, 1508, __pyx_L1_error)
+ #else
+ __pyx_t_5 = PySequence_ITEM(__pyx_t_1, __pyx_t_10); __pyx_t_10++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1508, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ #endif
+ } else {
+ if (__pyx_t_10 >= PyTuple_GET_SIZE(__pyx_t_1)) break;
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ __pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_10); __Pyx_INCREF(__pyx_t_5); __pyx_t_10++; if (unlikely(0 < 0)) __PYX_ERR(0, 1508, __pyx_L1_error)
+ #else
+ __pyx_t_5 = PySequence_ITEM(__pyx_t_1, __pyx_t_10); __pyx_t_10++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1508, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ #endif
+ }
+ } else {
+ __pyx_t_5 = __pyx_t_11(__pyx_t_1);
+ if (unlikely(!__pyx_t_5)) {
+ PyObject* exc_type = PyErr_Occurred();
+ if (exc_type) {
+ if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
+ else __PYX_ERR(0, 1508, __pyx_L1_error)
+ }
+ break;
+ }
+ __Pyx_GOTREF(__pyx_t_5);
+ }
+ if ((likely(PyTuple_CheckExact(__pyx_t_5))) || (PyList_CheckExact(__pyx_t_5))) {
+ PyObject* sequence = __pyx_t_5;
+ Py_ssize_t size = __Pyx_PySequence_SIZE(sequence);
+ if (unlikely(size != 2)) {
+ if (size > 2) __Pyx_RaiseTooManyValuesError(2);
+ else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size);
+ __PYX_ERR(0, 1508, __pyx_L1_error)
+ }
+ #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ if (likely(PyTuple_CheckExact(sequence))) {
+ __pyx_t_9 = PyTuple_GET_ITEM(sequence, 0);
+ __pyx_t_6 = PyTuple_GET_ITEM(sequence, 1);
+ } else {
+ __pyx_t_9 = PyList_GET_ITEM(sequence, 0);
+ __pyx_t_6 = PyList_GET_ITEM(sequence, 1);
+ }
+ __Pyx_INCREF(__pyx_t_9);
+ __Pyx_INCREF(__pyx_t_6);
+ #else
+ __pyx_t_9 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 1508, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_9);
+ __pyx_t_6 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1508, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ #endif
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ } else {
+ Py_ssize_t index = -1;
+ __pyx_t_7 = PyObject_GetIter(__pyx_t_5); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1508, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_12 = Py_TYPE(__pyx_t_7)->tp_iternext;
+ index = 0; __pyx_t_9 = __pyx_t_12(__pyx_t_7); if (unlikely(!__pyx_t_9)) goto __pyx_L52_unpacking_failed;
+ __Pyx_GOTREF(__pyx_t_9);
+ index = 1; __pyx_t_6 = __pyx_t_12(__pyx_t_7); if (unlikely(!__pyx_t_6)) goto __pyx_L52_unpacking_failed;
+ __Pyx_GOTREF(__pyx_t_6);
+ if (__Pyx_IternextUnpackEndCheck(__pyx_t_12(__pyx_t_7), 2) < 0) __PYX_ERR(0, 1508, __pyx_L1_error)
+ __pyx_t_12 = NULL;
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ goto __pyx_L53_unpacking_done;
+ __pyx_L52_unpacking_failed:;
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __pyx_t_12 = NULL;
+ if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index);
+ __PYX_ERR(0, 1508, __pyx_L1_error)
+ __pyx_L53_unpacking_done:;
+ }
+ __Pyx_XDECREF_SET(__pyx_v_item_key, __pyx_t_9);
+ __pyx_t_9 = 0;
+ __Pyx_XDECREF_SET(__pyx_v_item_value, __pyx_t_6);
+ __pyx_t_6 = 0;
+
+ /* "_yaml.pyx":1509
+ * raise error
+ * for item_key, item_value in node.value:
+ * self._serialize_node(item_key, node, None) # <<<<<<<<<<<<<<
+ * self._serialize_node(item_value, node, item_key)
+ * yaml_mapping_end_event_initialize(&event)
+ */
+ __pyx_t_4 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_serialize_node(__pyx_v_self, __pyx_v_item_key, __pyx_v_node, Py_None); if (unlikely(__pyx_t_4 == ((int)0))) __PYX_ERR(0, 1509, __pyx_L1_error)
+
+ /* "_yaml.pyx":1510
+ * for item_key, item_value in node.value:
+ * self._serialize_node(item_key, node, None)
+ * self._serialize_node(item_value, node, item_key) # <<<<<<<<<<<<<<
+ * yaml_mapping_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_t_4 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_serialize_node(__pyx_v_self, __pyx_v_item_value, __pyx_v_node, __pyx_v_item_key); if (unlikely(__pyx_t_4 == ((int)0))) __PYX_ERR(0, 1510, __pyx_L1_error)
+
+ /* "_yaml.pyx":1508
+ * error = self._emitter_error()
+ * raise error
+ * for item_key, item_value in node.value: # <<<<<<<<<<<<<<
+ * self._serialize_node(item_key, node, None)
+ * self._serialize_node(item_value, node, item_key)
+ */
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1511
+ * self._serialize_node(item_key, node, None)
+ * self._serialize_node(item_value, node, item_key)
+ * yaml_mapping_end_event_initialize(&event) # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ (void)(yaml_mapping_end_event_initialize((&__pyx_v_event)));
+
+ /* "_yaml.pyx":1512
+ * self._serialize_node(item_value, node, item_key)
+ * yaml_mapping_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_4 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 1512, __pyx_L1_error)
+ __pyx_t_2 = ((__pyx_t_4 == 0) != 0);
+ if (unlikely(__pyx_t_2)) {
+
+ /* "_yaml.pyx":1513
+ * yaml_mapping_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * self.ascend_resolver()
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1513, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_error = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1514
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * self.ascend_resolver()
+ * return 1
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ __PYX_ERR(0, 1514, __pyx_L1_error)
+
+ /* "_yaml.pyx":1512
+ * self._serialize_node(item_value, node, item_key)
+ * yaml_mapping_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ }
+
+ /* "_yaml.pyx":1484
+ * error = self._emitter_error()
+ * raise error
+ * elif node_class is MappingNode: # <<<<<<<<<<<<<<
+ * implicit = 0
+ * tag_object = node.tag
+ */
+ }
+ __pyx_L10:;
+
+ /* "_yaml.pyx":1515
+ * error = self._emitter_error()
+ * raise error
+ * self.ascend_resolver() # <<<<<<<<<<<<<<
+ * return 1
+ *
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_ascend_resolver); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1515, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_6 = NULL;
+ if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) {
+ __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5);
+ if (likely(__pyx_t_6)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);
+ __Pyx_INCREF(__pyx_t_6);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_5, function);
+ }
+ }
+ __pyx_t_1 = (__pyx_t_6) ? __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_t_6) : __Pyx_PyObject_CallNoArg(__pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
+ if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1515, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ }
+ __pyx_L7:;
+
+ /* "_yaml.pyx":1516
+ * raise error
+ * self.ascend_resolver()
+ * return 1 # <<<<<<<<<<<<<<
+ *
+ * cdef int output_handler(void *data, char *buffer, size_t size) except 0:
+ */
+ __pyx_r = 1;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":1374
+ * return 1
+ *
+ * cdef int _serialize_node(self, object node, object parent, object index) except 0: # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * cdef int implicit
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_6);
+ __Pyx_XDECREF(__pyx_t_7);
+ __Pyx_XDECREF(__pyx_t_8);
+ __Pyx_XDECREF(__pyx_t_9);
+ __Pyx_AddTraceback("_yaml.CEmitter._serialize_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_anchor_object);
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XDECREF(__pyx_v_node_class);
+ __Pyx_XDECREF(__pyx_v_tag_object);
+ __Pyx_XDECREF(__pyx_v_value_object);
+ __Pyx_XDECREF(__pyx_v_style_object);
+ __Pyx_XDECREF(__pyx_v_item);
+ __Pyx_XDECREF(__pyx_v_item_key);
+ __Pyx_XDECREF(__pyx_v_item_value);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "(tree fragment)":1
+ * def __reduce_cython__(self): # <<<<<<<<<<<<<<
+ * raise TypeError("Pickling of struct members such as self.emitter must be explicitly requested with @auto_pickle(True)")
+ * def __setstate_cython__(self, __pyx_state):
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_8CEmitter_15__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_8CEmitter_15__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_8CEmitter_14__reduce_cython__(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_8CEmitter_14__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ __Pyx_RefNannySetupContext("__reduce_cython__", 0);
+
+ /* "(tree fragment)":2
+ * def __reduce_cython__(self):
+ * raise TypeError("Pickling of struct members such as self.emitter must be explicitly requested with @auto_pickle(True)") # <<<<<<<<<<<<<<
+ * def __setstate_cython__(self, __pyx_state):
+ * raise TypeError("Pickling of struct members such as self.emitter must be explicitly requested with @auto_pickle(True)")
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__38, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __PYX_ERR(1, 2, __pyx_L1_error)
+
+ /* "(tree fragment)":1
+ * def __reduce_cython__(self): # <<<<<<<<<<<<<<
+ * raise TypeError("Pickling of struct members such as self.emitter must be explicitly requested with @auto_pickle(True)")
+ * def __setstate_cython__(self, __pyx_state):
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("_yaml.CEmitter.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "(tree fragment)":3
+ * def __reduce_cython__(self):
+ * raise TypeError("Pickling of struct members such as self.emitter must be explicitly requested with @auto_pickle(True)")
+ * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
+ * raise TypeError("Pickling of struct members such as self.emitter must be explicitly requested with @auto_pickle(True)")
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_8CEmitter_17__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/
+static PyObject *__pyx_pw_5_yaml_8CEmitter_17__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_8CEmitter_16__setstate_cython__(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_8CEmitter_16__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ __Pyx_RefNannySetupContext("__setstate_cython__", 0);
+
+ /* "(tree fragment)":4
+ * raise TypeError("Pickling of struct members such as self.emitter must be explicitly requested with @auto_pickle(True)")
+ * def __setstate_cython__(self, __pyx_state):
+ * raise TypeError("Pickling of struct members such as self.emitter must be explicitly requested with @auto_pickle(True)") # <<<<<<<<<<<<<<
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__39, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __PYX_ERR(1, 4, __pyx_L1_error)
+
+ /* "(tree fragment)":3
+ * def __reduce_cython__(self):
+ * raise TypeError("Pickling of struct members such as self.emitter must be explicitly requested with @auto_pickle(True)")
+ * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
+ * raise TypeError("Pickling of struct members such as self.emitter must be explicitly requested with @auto_pickle(True)")
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("_yaml.CEmitter.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1518
+ * return 1
+ *
+ * cdef int output_handler(void *data, char *buffer, size_t size) except 0: # <<<<<<<<<<<<<<
+ * cdef CEmitter emitter
+ * emitter = <CEmitter>data
+ */
+
+static int __pyx_f_5_yaml_output_handler(void *__pyx_v_data, char *__pyx_v_buffer, size_t __pyx_v_size) {
+ struct __pyx_obj_5_yaml_CEmitter *__pyx_v_emitter = 0;
+ PyObject *__pyx_v_value = NULL;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ __Pyx_RefNannySetupContext("output_handler", 0);
+
+ /* "_yaml.pyx":1520
+ * cdef int output_handler(void *data, char *buffer, size_t size) except 0:
+ * cdef CEmitter emitter
+ * emitter = <CEmitter>data # <<<<<<<<<<<<<<
+ * if emitter.dump_unicode == 0:
+ * value = PyString_FromStringAndSize(buffer, size)
+ */
+ __pyx_t_1 = ((PyObject *)__pyx_v_data);
+ __Pyx_INCREF(__pyx_t_1);
+ __pyx_v_emitter = ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_t_1);
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1521
+ * cdef CEmitter emitter
+ * emitter = <CEmitter>data
+ * if emitter.dump_unicode == 0: # <<<<<<<<<<<<<<
+ * value = PyString_FromStringAndSize(buffer, size)
+ * else:
+ */
+ __pyx_t_2 = ((__pyx_v_emitter->dump_unicode == 0) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1522
+ * emitter = <CEmitter>data
+ * if emitter.dump_unicode == 0:
+ * value = PyString_FromStringAndSize(buffer, size) # <<<<<<<<<<<<<<
+ * else:
+ * value = PyUnicode_DecodeUTF8(buffer, size, 'strict')
+ */
+ __pyx_t_1 = PyString_FromStringAndSize(__pyx_v_buffer, __pyx_v_size); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1522, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_value = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1521
+ * cdef CEmitter emitter
+ * emitter = <CEmitter>data
+ * if emitter.dump_unicode == 0: # <<<<<<<<<<<<<<
+ * value = PyString_FromStringAndSize(buffer, size)
+ * else:
+ */
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1524
+ * value = PyString_FromStringAndSize(buffer, size)
+ * else:
+ * value = PyUnicode_DecodeUTF8(buffer, size, 'strict') # <<<<<<<<<<<<<<
+ * emitter.stream.write(value)
+ * return 1
+ */
+ /*else*/ {
+ __pyx_t_1 = PyUnicode_DecodeUTF8(__pyx_v_buffer, __pyx_v_size, ((char *)"strict")); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1524, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_value = __pyx_t_1;
+ __pyx_t_1 = 0;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":1525
+ * else:
+ * value = PyUnicode_DecodeUTF8(buffer, size, 'strict')
+ * emitter.stream.write(value) # <<<<<<<<<<<<<<
+ * return 1
+ *
+ */
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_emitter->stream, __pyx_n_s_write); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1525, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = NULL;
+ if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_3, function);
+ }
+ }
+ __pyx_t_1 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_v_value) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_value);
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1525, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1526
+ * value = PyUnicode_DecodeUTF8(buffer, size, 'strict')
+ * emitter.stream.write(value)
+ * return 1 # <<<<<<<<<<<<<<
+ *
+ */
+ __pyx_r = 1;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":1518
+ * return 1
+ *
+ * cdef int output_handler(void *data, char *buffer, size_t size) except 0: # <<<<<<<<<<<<<<
+ * cdef CEmitter emitter
+ * emitter = <CEmitter>data
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_AddTraceback("_yaml.output_handler", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF((PyObject *)__pyx_v_emitter);
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "(tree fragment)":1
+ * def __pyx_unpickle_Mark(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<<
+ * cdef object __pyx_PickleError
+ * cdef object __pyx_result
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_5__pyx_unpickle_Mark(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static PyMethodDef __pyx_mdef_5_yaml_5__pyx_unpickle_Mark = {"__pyx_unpickle_Mark", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_5_yaml_5__pyx_unpickle_Mark, METH_VARARGS|METH_KEYWORDS, 0};
+static PyObject *__pyx_pw_5_yaml_5__pyx_unpickle_Mark(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+ PyObject *__pyx_v___pyx_type = 0;
+ long __pyx_v___pyx_checksum;
+ PyObject *__pyx_v___pyx_state = 0;
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__pyx_unpickle_Mark (wrapper)", 0);
+ {
+ static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0};
+ PyObject* values[3] = {0,0,0};
+ if (unlikely(__pyx_kwds)) {
+ Py_ssize_t kw_args;
+ const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
+ switch (pos_args) {
+ case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ CYTHON_FALLTHROUGH;
+ case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ CYTHON_FALLTHROUGH;
+ case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ CYTHON_FALLTHROUGH;
+ case 0: break;
+ default: goto __pyx_L5_argtuple_error;
+ }
+ kw_args = PyDict_Size(__pyx_kwds);
+ switch (pos_args) {
+ case 0:
+ if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--;
+ else goto __pyx_L5_argtuple_error;
+ CYTHON_FALLTHROUGH;
+ case 1:
+ if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_Mark", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 2:
+ if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_Mark", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error)
+ }
+ }
+ if (unlikely(kw_args > 0)) {
+ if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_Mark") < 0)) __PYX_ERR(1, 1, __pyx_L3_error)
+ }
+ } else if (PyTuple_GET_SIZE(__pyx_args) != 3) {
+ goto __pyx_L5_argtuple_error;
+ } else {
+ values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ }
+ __pyx_v___pyx_type = values[0];
+ __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error)
+ __pyx_v___pyx_state = values[2];
+ }
+ goto __pyx_L4_argument_unpacking_done;
+ __pyx_L5_argtuple_error:;
+ __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_Mark", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error)
+ __pyx_L3_error:;
+ __Pyx_AddTraceback("_yaml.__pyx_unpickle_Mark", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __Pyx_RefNannyFinishContext();
+ return NULL;
+ __pyx_L4_argument_unpacking_done:;
+ __pyx_r = __pyx_pf_5_yaml_4__pyx_unpickle_Mark(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state);
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4__pyx_unpickle_Mark(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) {
+ PyObject *__pyx_v___pyx_PickleError = 0;
+ PyObject *__pyx_v___pyx_result = 0;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ PyObject *__pyx_t_5 = NULL;
+ int __pyx_t_6;
+ __Pyx_RefNannySetupContext("__pyx_unpickle_Mark", 0);
+
+ /* "(tree fragment)":4
+ * cdef object __pyx_PickleError
+ * cdef object __pyx_result
+ * if __pyx_checksum != 0x3fa45b6: # <<<<<<<<<<<<<<
+ * from pickle import PickleError as __pyx_PickleError
+ * raise __pyx_PickleError("Incompatible checksums (%s vs 0x3fa45b6 = (buffer, column, index, line, name, pointer))" % __pyx_checksum)
+ */
+ __pyx_t_1 = ((__pyx_v___pyx_checksum != 0x3fa45b6) != 0);
+ if (__pyx_t_1) {
+
+ /* "(tree fragment)":5
+ * cdef object __pyx_result
+ * if __pyx_checksum != 0x3fa45b6:
+ * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<<
+ * raise __pyx_PickleError("Incompatible checksums (%s vs 0x3fa45b6 = (buffer, column, index, line, name, pointer))" % __pyx_checksum)
+ * __pyx_result = Mark.__new__(__pyx_type)
+ */
+ __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(__pyx_n_s_PickleError);
+ __Pyx_GIVEREF(__pyx_n_s_PickleError);
+ PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError);
+ __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, -1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(__pyx_t_2);
+ __pyx_v___pyx_PickleError = __pyx_t_2;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+
+ /* "(tree fragment)":6
+ * if __pyx_checksum != 0x3fa45b6:
+ * from pickle import PickleError as __pyx_PickleError
+ * raise __pyx_PickleError("Incompatible checksums (%s vs 0x3fa45b6 = (buffer, column, index, line, name, pointer))" % __pyx_checksum) # <<<<<<<<<<<<<<
+ * __pyx_result = Mark.__new__(__pyx_type)
+ * if __pyx_state is not None:
+ */
+ __pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0x3f, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_INCREF(__pyx_v___pyx_PickleError);
+ __pyx_t_2 = __pyx_v___pyx_PickleError; __pyx_t_5 = NULL;
+ if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) {
+ __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2);
+ if (likely(__pyx_t_5)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2);
+ __Pyx_INCREF(__pyx_t_5);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_2, function);
+ }
+ }
+ __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_5, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __PYX_ERR(1, 6, __pyx_L1_error)
+
+ /* "(tree fragment)":4
+ * cdef object __pyx_PickleError
+ * cdef object __pyx_result
+ * if __pyx_checksum != 0x3fa45b6: # <<<<<<<<<<<<<<
+ * from pickle import PickleError as __pyx_PickleError
+ * raise __pyx_PickleError("Incompatible checksums (%s vs 0x3fa45b6 = (buffer, column, index, line, name, pointer))" % __pyx_checksum)
+ */
+ }
+
+ /* "(tree fragment)":7
+ * from pickle import PickleError as __pyx_PickleError
+ * raise __pyx_PickleError("Incompatible checksums (%s vs 0x3fa45b6 = (buffer, column, index, line, name, pointer))" % __pyx_checksum)
+ * __pyx_result = Mark.__new__(__pyx_type) # <<<<<<<<<<<<<<
+ * if __pyx_state is not None:
+ * __pyx_unpickle_Mark__set_state(<Mark> __pyx_result, __pyx_state)
+ */
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_5_yaml_Mark), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 7, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_4 = NULL;
+ if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) {
+ __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2);
+ if (likely(__pyx_t_4)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2);
+ __Pyx_INCREF(__pyx_t_4);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_2, function);
+ }
+ }
+ __pyx_t_3 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_4, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type);
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_v___pyx_result = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "(tree fragment)":8
+ * raise __pyx_PickleError("Incompatible checksums (%s vs 0x3fa45b6 = (buffer, column, index, line, name, pointer))" % __pyx_checksum)
+ * __pyx_result = Mark.__new__(__pyx_type)
+ * if __pyx_state is not None: # <<<<<<<<<<<<<<
+ * __pyx_unpickle_Mark__set_state(<Mark> __pyx_result, __pyx_state)
+ * return __pyx_result
+ */
+ __pyx_t_1 = (__pyx_v___pyx_state != Py_None);
+ __pyx_t_6 = (__pyx_t_1 != 0);
+ if (__pyx_t_6) {
+
+ /* "(tree fragment)":9
+ * __pyx_result = Mark.__new__(__pyx_type)
+ * if __pyx_state is not None:
+ * __pyx_unpickle_Mark__set_state(<Mark> __pyx_result, __pyx_state) # <<<<<<<<<<<<<<
+ * return __pyx_result
+ * cdef __pyx_unpickle_Mark__set_state(Mark __pyx_result, tuple __pyx_state):
+ */
+ if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 9, __pyx_L1_error)
+ __pyx_t_3 = __pyx_f_5_yaml___pyx_unpickle_Mark__set_state(((struct __pyx_obj_5_yaml_Mark *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 9, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+
+ /* "(tree fragment)":8
+ * raise __pyx_PickleError("Incompatible checksums (%s vs 0x3fa45b6 = (buffer, column, index, line, name, pointer))" % __pyx_checksum)
+ * __pyx_result = Mark.__new__(__pyx_type)
+ * if __pyx_state is not None: # <<<<<<<<<<<<<<
+ * __pyx_unpickle_Mark__set_state(<Mark> __pyx_result, __pyx_state)
+ * return __pyx_result
+ */
+ }
+
+ /* "(tree fragment)":10
+ * if __pyx_state is not None:
+ * __pyx_unpickle_Mark__set_state(<Mark> __pyx_result, __pyx_state)
+ * return __pyx_result # <<<<<<<<<<<<<<
+ * cdef __pyx_unpickle_Mark__set_state(Mark __pyx_result, tuple __pyx_state):
+ * __pyx_result.buffer = __pyx_state[0]; __pyx_result.column = __pyx_state[1]; __pyx_result.index = __pyx_state[2]; __pyx_result.line = __pyx_state[3]; __pyx_result.name = __pyx_state[4]; __pyx_result.pointer = __pyx_state[5]
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v___pyx_result);
+ __pyx_r = __pyx_v___pyx_result;
+ goto __pyx_L0;
+
+ /* "(tree fragment)":1
+ * def __pyx_unpickle_Mark(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<<
+ * cdef object __pyx_PickleError
+ * cdef object __pyx_result
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_AddTraceback("_yaml.__pyx_unpickle_Mark", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v___pyx_PickleError);
+ __Pyx_XDECREF(__pyx_v___pyx_result);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "(tree fragment)":11
+ * __pyx_unpickle_Mark__set_state(<Mark> __pyx_result, __pyx_state)
+ * return __pyx_result
+ * cdef __pyx_unpickle_Mark__set_state(Mark __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<<
+ * __pyx_result.buffer = __pyx_state[0]; __pyx_result.column = __pyx_state[1]; __pyx_result.index = __pyx_state[2]; __pyx_result.line = __pyx_state[3]; __pyx_result.name = __pyx_state[4]; __pyx_result.pointer = __pyx_state[5]
+ * if len(__pyx_state) > 6 and hasattr(__pyx_result, '__dict__'):
+ */
+
+static PyObject *__pyx_f_5_yaml___pyx_unpickle_Mark__set_state(struct __pyx_obj_5_yaml_Mark *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ size_t __pyx_t_2;
+ int __pyx_t_3;
+ Py_ssize_t __pyx_t_4;
+ int __pyx_t_5;
+ int __pyx_t_6;
+ PyObject *__pyx_t_7 = NULL;
+ PyObject *__pyx_t_8 = NULL;
+ PyObject *__pyx_t_9 = NULL;
+ __Pyx_RefNannySetupContext("__pyx_unpickle_Mark__set_state", 0);
+
+ /* "(tree fragment)":12
+ * return __pyx_result
+ * cdef __pyx_unpickle_Mark__set_state(Mark __pyx_result, tuple __pyx_state):
+ * __pyx_result.buffer = __pyx_state[0]; __pyx_result.column = __pyx_state[1]; __pyx_result.index = __pyx_state[2]; __pyx_result.line = __pyx_state[3]; __pyx_result.name = __pyx_state[4]; __pyx_result.pointer = __pyx_state[5] # <<<<<<<<<<<<<<
+ * if len(__pyx_state) > 6 and hasattr(__pyx_result, '__dict__'):
+ * __pyx_result.__dict__.update(__pyx_state[6])
+ */
+ if (unlikely(__pyx_v___pyx_state == Py_None)) {
+ PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable");
+ __PYX_ERR(1, 12, __pyx_L1_error)
+ }
+ __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ __Pyx_GOTREF(__pyx_v___pyx_result->buffer);
+ __Pyx_DECREF(__pyx_v___pyx_result->buffer);
+ __pyx_v___pyx_result->buffer = __pyx_t_1;
+ __pyx_t_1 = 0;
+ if (unlikely(__pyx_v___pyx_state == Py_None)) {
+ PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable");
+ __PYX_ERR(1, 12, __pyx_L1_error)
+ }
+ __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyInt_As_size_t(__pyx_t_1); if (unlikely((__pyx_t_2 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v___pyx_result->column = __pyx_t_2;
+ if (unlikely(__pyx_v___pyx_state == Py_None)) {
+ PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable");
+ __PYX_ERR(1, 12, __pyx_L1_error)
+ }
+ __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyInt_As_size_t(__pyx_t_1); if (unlikely((__pyx_t_2 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v___pyx_result->index = __pyx_t_2;
+ if (unlikely(__pyx_v___pyx_state == Py_None)) {
+ PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable");
+ __PYX_ERR(1, 12, __pyx_L1_error)
+ }
+ __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 3, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyInt_As_size_t(__pyx_t_1); if (unlikely((__pyx_t_2 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 12, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v___pyx_result->line = __pyx_t_2;
+ if (unlikely(__pyx_v___pyx_state == Py_None)) {
+ PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable");
+ __PYX_ERR(1, 12, __pyx_L1_error)
+ }
+ __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 4, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ __Pyx_GOTREF(__pyx_v___pyx_result->name);
+ __Pyx_DECREF(__pyx_v___pyx_result->name);
+ __pyx_v___pyx_result->name = __pyx_t_1;
+ __pyx_t_1 = 0;
+ if (unlikely(__pyx_v___pyx_state == Py_None)) {
+ PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable");
+ __PYX_ERR(1, 12, __pyx_L1_error)
+ }
+ __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 5, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ __Pyx_GOTREF(__pyx_v___pyx_result->pointer);
+ __Pyx_DECREF(__pyx_v___pyx_result->pointer);
+ __pyx_v___pyx_result->pointer = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "(tree fragment)":13
+ * cdef __pyx_unpickle_Mark__set_state(Mark __pyx_result, tuple __pyx_state):
+ * __pyx_result.buffer = __pyx_state[0]; __pyx_result.column = __pyx_state[1]; __pyx_result.index = __pyx_state[2]; __pyx_result.line = __pyx_state[3]; __pyx_result.name = __pyx_state[4]; __pyx_result.pointer = __pyx_state[5]
+ * if len(__pyx_state) > 6 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<<
+ * __pyx_result.__dict__.update(__pyx_state[6])
+ */
+ if (unlikely(__pyx_v___pyx_state == Py_None)) {
+ PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()");
+ __PYX_ERR(1, 13, __pyx_L1_error)
+ }
+ __pyx_t_4 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(1, 13, __pyx_L1_error)
+ __pyx_t_5 = ((__pyx_t_4 > 6) != 0);
+ if (__pyx_t_5) {
+ } else {
+ __pyx_t_3 = __pyx_t_5;
+ goto __pyx_L4_bool_binop_done;
+ }
+ __pyx_t_5 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(1, 13, __pyx_L1_error)
+ __pyx_t_6 = (__pyx_t_5 != 0);
+ __pyx_t_3 = __pyx_t_6;
+ __pyx_L4_bool_binop_done:;
+ if (__pyx_t_3) {
+
+ /* "(tree fragment)":14
+ * __pyx_result.buffer = __pyx_state[0]; __pyx_result.column = __pyx_state[1]; __pyx_result.index = __pyx_state[2]; __pyx_result.line = __pyx_state[3]; __pyx_result.name = __pyx_state[4]; __pyx_result.pointer = __pyx_state[5]
+ * if len(__pyx_state) > 6 and hasattr(__pyx_result, '__dict__'):
+ * __pyx_result.__dict__.update(__pyx_state[6]) # <<<<<<<<<<<<<<
+ */
+ __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_update); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 14, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_8);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ if (unlikely(__pyx_v___pyx_state == Py_None)) {
+ PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable");
+ __PYX_ERR(1, 14, __pyx_L1_error)
+ }
+ __pyx_t_7 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 6, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 14, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_9 = NULL;
+ if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) {
+ __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_8);
+ if (likely(__pyx_t_9)) {
+ PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8);
+ __Pyx_INCREF(__pyx_t_9);
+ __Pyx_INCREF(function);
+ __Pyx_DECREF_SET(__pyx_t_8, function);
+ }
+ }
+ __pyx_t_1 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_8, __pyx_t_9, __pyx_t_7) : __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_t_7);
+ __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0;
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 14, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "(tree fragment)":13
+ * cdef __pyx_unpickle_Mark__set_state(Mark __pyx_result, tuple __pyx_state):
+ * __pyx_result.buffer = __pyx_state[0]; __pyx_result.column = __pyx_state[1]; __pyx_result.index = __pyx_state[2]; __pyx_result.line = __pyx_state[3]; __pyx_result.name = __pyx_state[4]; __pyx_result.pointer = __pyx_state[5]
+ * if len(__pyx_state) > 6 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<<
+ * __pyx_result.__dict__.update(__pyx_state[6])
+ */
+ }
+
+ /* "(tree fragment)":11
+ * __pyx_unpickle_Mark__set_state(<Mark> __pyx_result, __pyx_state)
+ * return __pyx_result
+ * cdef __pyx_unpickle_Mark__set_state(Mark __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<<
+ * __pyx_result.buffer = __pyx_state[0]; __pyx_result.column = __pyx_state[1]; __pyx_result.index = __pyx_state[2]; __pyx_result.line = __pyx_state[3]; __pyx_result.name = __pyx_state[4]; __pyx_result.pointer = __pyx_state[5]
+ * if len(__pyx_state) > 6 and hasattr(__pyx_result, '__dict__'):
+ */
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_7);
+ __Pyx_XDECREF(__pyx_t_8);
+ __Pyx_XDECREF(__pyx_t_9);
+ __Pyx_AddTraceback("_yaml.__pyx_unpickle_Mark__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_tp_new_5_yaml_Mark(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) {
+ struct __pyx_obj_5_yaml_Mark *p;
+ PyObject *o;
+ if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) {
+ o = (*t->tp_alloc)(t, 0);
+ } else {
+ o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0);
+ }
+ if (unlikely(!o)) return 0;
+ p = ((struct __pyx_obj_5_yaml_Mark *)o);
+ p->name = Py_None; Py_INCREF(Py_None);
+ p->buffer = Py_None; Py_INCREF(Py_None);
+ p->pointer = Py_None; Py_INCREF(Py_None);
+ return o;
+}
+
+static void __pyx_tp_dealloc_5_yaml_Mark(PyObject *o) {
+ struct __pyx_obj_5_yaml_Mark *p = (struct __pyx_obj_5_yaml_Mark *)o;
+ #if CYTHON_USE_TP_FINALIZE
+ if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) {
+ if (PyObject_CallFinalizerFromDealloc(o)) return;
+ }
+ #endif
+ PyObject_GC_UnTrack(o);
+ Py_CLEAR(p->name);
+ Py_CLEAR(p->buffer);
+ Py_CLEAR(p->pointer);
+ (*Py_TYPE(o)->tp_free)(o);
+}
+
+static int __pyx_tp_traverse_5_yaml_Mark(PyObject *o, visitproc v, void *a) {
+ int e;
+ struct __pyx_obj_5_yaml_Mark *p = (struct __pyx_obj_5_yaml_Mark *)o;
+ if (p->name) {
+ e = (*v)(p->name, a); if (e) return e;
+ }
+ if (p->buffer) {
+ e = (*v)(p->buffer, a); if (e) return e;
+ }
+ if (p->pointer) {
+ e = (*v)(p->pointer, a); if (e) return e;
+ }
+ return 0;
+}
+
+static int __pyx_tp_clear_5_yaml_Mark(PyObject *o) {
+ PyObject* tmp;
+ struct __pyx_obj_5_yaml_Mark *p = (struct __pyx_obj_5_yaml_Mark *)o;
+ tmp = ((PyObject*)p->name);
+ p->name = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->buffer);
+ p->buffer = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->pointer);
+ p->pointer = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ return 0;
+}
+
+static PyObject *__pyx_getprop_5_yaml_4Mark_name(PyObject *o, CYTHON_UNUSED void *x) {
+ return __pyx_pw_5_yaml_4Mark_4name_1__get__(o);
+}
+
+static PyObject *__pyx_getprop_5_yaml_4Mark_index(PyObject *o, CYTHON_UNUSED void *x) {
+ return __pyx_pw_5_yaml_4Mark_5index_1__get__(o);
+}
+
+static PyObject *__pyx_getprop_5_yaml_4Mark_line(PyObject *o, CYTHON_UNUSED void *x) {
+ return __pyx_pw_5_yaml_4Mark_4line_1__get__(o);
+}
+
+static PyObject *__pyx_getprop_5_yaml_4Mark_column(PyObject *o, CYTHON_UNUSED void *x) {
+ return __pyx_pw_5_yaml_4Mark_6column_1__get__(o);
+}
+
+static PyObject *__pyx_getprop_5_yaml_4Mark_buffer(PyObject *o, CYTHON_UNUSED void *x) {
+ return __pyx_pw_5_yaml_4Mark_6buffer_1__get__(o);
+}
+
+static PyObject *__pyx_getprop_5_yaml_4Mark_pointer(PyObject *o, CYTHON_UNUSED void *x) {
+ return __pyx_pw_5_yaml_4Mark_7pointer_1__get__(o);
+}
+
+static PyMethodDef __pyx_methods_5_yaml_Mark[] = {
+ {"get_snippet", (PyCFunction)__pyx_pw_5_yaml_4Mark_3get_snippet, METH_NOARGS, 0},
+ {"__reduce_cython__", (PyCFunction)__pyx_pw_5_yaml_4Mark_7__reduce_cython__, METH_NOARGS, 0},
+ {"__setstate_cython__", (PyCFunction)__pyx_pw_5_yaml_4Mark_9__setstate_cython__, METH_O, 0},
+ {0, 0, 0, 0}
+};
+
+static struct PyGetSetDef __pyx_getsets_5_yaml_Mark[] = {
+ {(char *)"name", __pyx_getprop_5_yaml_4Mark_name, 0, (char *)0, 0},
+ {(char *)"index", __pyx_getprop_5_yaml_4Mark_index, 0, (char *)0, 0},
+ {(char *)"line", __pyx_getprop_5_yaml_4Mark_line, 0, (char *)0, 0},
+ {(char *)"column", __pyx_getprop_5_yaml_4Mark_column, 0, (char *)0, 0},
+ {(char *)"buffer", __pyx_getprop_5_yaml_4Mark_buffer, 0, (char *)0, 0},
+ {(char *)"pointer", __pyx_getprop_5_yaml_4Mark_pointer, 0, (char *)0, 0},
+ {0, 0, 0, 0, 0}
+};
+
+static PyTypeObject __pyx_type_5_yaml_Mark = {
+ PyVarObject_HEAD_INIT(0, 0)
+ "_yaml.Mark", /*tp_name*/
+ sizeof(struct __pyx_obj_5_yaml_Mark), /*tp_basicsize*/
+ 0, /*tp_itemsize*/
+ __pyx_tp_dealloc_5_yaml_Mark, /*tp_dealloc*/
+ #if PY_VERSION_HEX < 0x030800b4
+ 0, /*tp_print*/
+ #endif
+ #if PY_VERSION_HEX >= 0x030800b4
+ 0, /*tp_vectorcall_offset*/
+ #endif
+ 0, /*tp_getattr*/
+ 0, /*tp_setattr*/
+ #if PY_MAJOR_VERSION < 3
+ 0, /*tp_compare*/
+ #endif
+ #if PY_MAJOR_VERSION >= 3
+ 0, /*tp_as_async*/
+ #endif
+ 0, /*tp_repr*/
+ 0, /*tp_as_number*/
+ 0, /*tp_as_sequence*/
+ 0, /*tp_as_mapping*/
+ 0, /*tp_hash*/
+ 0, /*tp_call*/
+ __pyx_pw_5_yaml_4Mark_5__str__, /*tp_str*/
+ 0, /*tp_getattro*/
+ 0, /*tp_setattro*/
+ 0, /*tp_as_buffer*/
+ Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/
+ 0, /*tp_doc*/
+ __pyx_tp_traverse_5_yaml_Mark, /*tp_traverse*/
+ __pyx_tp_clear_5_yaml_Mark, /*tp_clear*/
+ 0, /*tp_richcompare*/
+ 0, /*tp_weaklistoffset*/
+ 0, /*tp_iter*/
+ 0, /*tp_iternext*/
+ __pyx_methods_5_yaml_Mark, /*tp_methods*/
+ 0, /*tp_members*/
+ __pyx_getsets_5_yaml_Mark, /*tp_getset*/
+ 0, /*tp_base*/
+ 0, /*tp_dict*/
+ 0, /*tp_descr_get*/
+ 0, /*tp_descr_set*/
+ 0, /*tp_dictoffset*/
+ __pyx_pw_5_yaml_4Mark_1__init__, /*tp_init*/
+ 0, /*tp_alloc*/
+ __pyx_tp_new_5_yaml_Mark, /*tp_new*/
+ 0, /*tp_free*/
+ 0, /*tp_is_gc*/
+ 0, /*tp_bases*/
+ 0, /*tp_mro*/
+ 0, /*tp_cache*/
+ 0, /*tp_subclasses*/
+ 0, /*tp_weaklist*/
+ 0, /*tp_del*/
+ 0, /*tp_version_tag*/
+ #if PY_VERSION_HEX >= 0x030400a1
+ 0, /*tp_finalize*/
+ #endif
+ #if PY_VERSION_HEX >= 0x030800b1
+ 0, /*tp_vectorcall*/
+ #endif
+ #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000
+ 0, /*tp_print*/
+ #endif
+};
+static struct __pyx_vtabstruct_5_yaml_CParser __pyx_vtable_5_yaml_CParser;
+
+static PyObject *__pyx_tp_new_5_yaml_CParser(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) {
+ struct __pyx_obj_5_yaml_CParser *p;
+ PyObject *o;
+ if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) {
+ o = (*t->tp_alloc)(t, 0);
+ } else {
+ o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0);
+ }
+ if (unlikely(!o)) return 0;
+ p = ((struct __pyx_obj_5_yaml_CParser *)o);
+ p->__pyx_vtab = __pyx_vtabptr_5_yaml_CParser;
+ p->stream = Py_None; Py_INCREF(Py_None);
+ p->stream_name = Py_None; Py_INCREF(Py_None);
+ p->current_token = Py_None; Py_INCREF(Py_None);
+ p->current_event = Py_None; Py_INCREF(Py_None);
+ p->anchors = Py_None; Py_INCREF(Py_None);
+ p->stream_cache = Py_None; Py_INCREF(Py_None);
+ return o;
+}
+
+static void __pyx_tp_dealloc_5_yaml_CParser(PyObject *o) {
+ struct __pyx_obj_5_yaml_CParser *p = (struct __pyx_obj_5_yaml_CParser *)o;
+ #if CYTHON_USE_TP_FINALIZE
+ if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) {
+ if (PyObject_CallFinalizerFromDealloc(o)) return;
+ }
+ #endif
+ PyObject_GC_UnTrack(o);
+ {
+ PyObject *etype, *eval, *etb;
+ PyErr_Fetch(&etype, &eval, &etb);
+ ++Py_REFCNT(o);
+ __pyx_pw_5_yaml_7CParser_3__dealloc__(o);
+ --Py_REFCNT(o);
+ PyErr_Restore(etype, eval, etb);
+ }
+ Py_CLEAR(p->stream);
+ Py_CLEAR(p->stream_name);
+ Py_CLEAR(p->current_token);
+ Py_CLEAR(p->current_event);
+ Py_CLEAR(p->anchors);
+ Py_CLEAR(p->stream_cache);
+ (*Py_TYPE(o)->tp_free)(o);
+}
+
+static int __pyx_tp_traverse_5_yaml_CParser(PyObject *o, visitproc v, void *a) {
+ int e;
+ struct __pyx_obj_5_yaml_CParser *p = (struct __pyx_obj_5_yaml_CParser *)o;
+ if (p->stream) {
+ e = (*v)(p->stream, a); if (e) return e;
+ }
+ if (p->stream_name) {
+ e = (*v)(p->stream_name, a); if (e) return e;
+ }
+ if (p->current_token) {
+ e = (*v)(p->current_token, a); if (e) return e;
+ }
+ if (p->current_event) {
+ e = (*v)(p->current_event, a); if (e) return e;
+ }
+ if (p->anchors) {
+ e = (*v)(p->anchors, a); if (e) return e;
+ }
+ if (p->stream_cache) {
+ e = (*v)(p->stream_cache, a); if (e) return e;
+ }
+ return 0;
+}
+
+static int __pyx_tp_clear_5_yaml_CParser(PyObject *o) {
+ PyObject* tmp;
+ struct __pyx_obj_5_yaml_CParser *p = (struct __pyx_obj_5_yaml_CParser *)o;
+ tmp = ((PyObject*)p->stream);
+ p->stream = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->stream_name);
+ p->stream_name = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->current_token);
+ p->current_token = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->current_event);
+ p->current_event = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->anchors);
+ p->anchors = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->stream_cache);
+ p->stream_cache = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ return 0;
+}
+
+static PyMethodDef __pyx_methods_5_yaml_CParser[] = {
+ {"dispose", (PyCFunction)__pyx_pw_5_yaml_7CParser_5dispose, METH_NOARGS, 0},
+ {"raw_scan", (PyCFunction)__pyx_pw_5_yaml_7CParser_7raw_scan, METH_NOARGS, 0},
+ {"get_token", (PyCFunction)__pyx_pw_5_yaml_7CParser_9get_token, METH_NOARGS, 0},
+ {"peek_token", (PyCFunction)__pyx_pw_5_yaml_7CParser_11peek_token, METH_NOARGS, 0},
+ {"check_token", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_5_yaml_7CParser_13check_token, METH_VARARGS|METH_KEYWORDS, 0},
+ {"raw_parse", (PyCFunction)__pyx_pw_5_yaml_7CParser_15raw_parse, METH_NOARGS, 0},
+ {"get_event", (PyCFunction)__pyx_pw_5_yaml_7CParser_17get_event, METH_NOARGS, 0},
+ {"peek_event", (PyCFunction)__pyx_pw_5_yaml_7CParser_19peek_event, METH_NOARGS, 0},
+ {"check_event", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_5_yaml_7CParser_21check_event, METH_VARARGS|METH_KEYWORDS, 0},
+ {"check_node", (PyCFunction)__pyx_pw_5_yaml_7CParser_23check_node, METH_NOARGS, 0},
+ {"get_node", (PyCFunction)__pyx_pw_5_yaml_7CParser_25get_node, METH_NOARGS, 0},
+ {"get_single_node", (PyCFunction)__pyx_pw_5_yaml_7CParser_27get_single_node, METH_NOARGS, 0},
+ {"__reduce_cython__", (PyCFunction)__pyx_pw_5_yaml_7CParser_29__reduce_cython__, METH_NOARGS, 0},
+ {"__setstate_cython__", (PyCFunction)__pyx_pw_5_yaml_7CParser_31__setstate_cython__, METH_O, 0},
+ {0, 0, 0, 0}
+};
+
+static PyTypeObject __pyx_type_5_yaml_CParser = {
+ PyVarObject_HEAD_INIT(0, 0)
+ "_yaml.CParser", /*tp_name*/
+ sizeof(struct __pyx_obj_5_yaml_CParser), /*tp_basicsize*/
+ 0, /*tp_itemsize*/
+ __pyx_tp_dealloc_5_yaml_CParser, /*tp_dealloc*/
+ #if PY_VERSION_HEX < 0x030800b4
+ 0, /*tp_print*/
+ #endif
+ #if PY_VERSION_HEX >= 0x030800b4
+ 0, /*tp_vectorcall_offset*/
+ #endif
+ 0, /*tp_getattr*/
+ 0, /*tp_setattr*/
+ #if PY_MAJOR_VERSION < 3
+ 0, /*tp_compare*/
+ #endif
+ #if PY_MAJOR_VERSION >= 3
+ 0, /*tp_as_async*/
+ #endif
+ 0, /*tp_repr*/
+ 0, /*tp_as_number*/
+ 0, /*tp_as_sequence*/
+ 0, /*tp_as_mapping*/
+ 0, /*tp_hash*/
+ 0, /*tp_call*/
+ 0, /*tp_str*/
+ 0, /*tp_getattro*/
+ 0, /*tp_setattro*/
+ 0, /*tp_as_buffer*/
+ Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/
+ 0, /*tp_doc*/
+ __pyx_tp_traverse_5_yaml_CParser, /*tp_traverse*/
+ __pyx_tp_clear_5_yaml_CParser, /*tp_clear*/
+ 0, /*tp_richcompare*/
+ 0, /*tp_weaklistoffset*/
+ 0, /*tp_iter*/
+ 0, /*tp_iternext*/
+ __pyx_methods_5_yaml_CParser, /*tp_methods*/
+ 0, /*tp_members*/
+ 0, /*tp_getset*/
+ 0, /*tp_base*/
+ 0, /*tp_dict*/
+ 0, /*tp_descr_get*/
+ 0, /*tp_descr_set*/
+ 0, /*tp_dictoffset*/
+ __pyx_pw_5_yaml_7CParser_1__init__, /*tp_init*/
+ 0, /*tp_alloc*/
+ __pyx_tp_new_5_yaml_CParser, /*tp_new*/
+ 0, /*tp_free*/
+ 0, /*tp_is_gc*/
+ 0, /*tp_bases*/
+ 0, /*tp_mro*/
+ 0, /*tp_cache*/
+ 0, /*tp_subclasses*/
+ 0, /*tp_weaklist*/
+ 0, /*tp_del*/
+ 0, /*tp_version_tag*/
+ #if PY_VERSION_HEX >= 0x030400a1
+ 0, /*tp_finalize*/
+ #endif
+ #if PY_VERSION_HEX >= 0x030800b1
+ 0, /*tp_vectorcall*/
+ #endif
+ #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000
+ 0, /*tp_print*/
+ #endif
+};
+static struct __pyx_vtabstruct_5_yaml_CEmitter __pyx_vtable_5_yaml_CEmitter;
+
+static PyObject *__pyx_tp_new_5_yaml_CEmitter(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) {
+ struct __pyx_obj_5_yaml_CEmitter *p;
+ PyObject *o;
+ if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) {
+ o = (*t->tp_alloc)(t, 0);
+ } else {
+ o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0);
+ }
+ if (unlikely(!o)) return 0;
+ p = ((struct __pyx_obj_5_yaml_CEmitter *)o);
+ p->__pyx_vtab = __pyx_vtabptr_5_yaml_CEmitter;
+ p->stream = Py_None; Py_INCREF(Py_None);
+ p->use_version = Py_None; Py_INCREF(Py_None);
+ p->use_tags = Py_None; Py_INCREF(Py_None);
+ p->serialized_nodes = Py_None; Py_INCREF(Py_None);
+ p->anchors = Py_None; Py_INCREF(Py_None);
+ p->use_encoding = Py_None; Py_INCREF(Py_None);
+ return o;
+}
+
+static void __pyx_tp_dealloc_5_yaml_CEmitter(PyObject *o) {
+ struct __pyx_obj_5_yaml_CEmitter *p = (struct __pyx_obj_5_yaml_CEmitter *)o;
+ #if CYTHON_USE_TP_FINALIZE
+ if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) {
+ if (PyObject_CallFinalizerFromDealloc(o)) return;
+ }
+ #endif
+ PyObject_GC_UnTrack(o);
+ {
+ PyObject *etype, *eval, *etb;
+ PyErr_Fetch(&etype, &eval, &etb);
+ ++Py_REFCNT(o);
+ __pyx_pw_5_yaml_8CEmitter_3__dealloc__(o);
+ --Py_REFCNT(o);
+ PyErr_Restore(etype, eval, etb);
+ }
+ Py_CLEAR(p->stream);
+ Py_CLEAR(p->use_version);
+ Py_CLEAR(p->use_tags);
+ Py_CLEAR(p->serialized_nodes);
+ Py_CLEAR(p->anchors);
+ Py_CLEAR(p->use_encoding);
+ (*Py_TYPE(o)->tp_free)(o);
+}
+
+static int __pyx_tp_traverse_5_yaml_CEmitter(PyObject *o, visitproc v, void *a) {
+ int e;
+ struct __pyx_obj_5_yaml_CEmitter *p = (struct __pyx_obj_5_yaml_CEmitter *)o;
+ if (p->stream) {
+ e = (*v)(p->stream, a); if (e) return e;
+ }
+ if (p->use_version) {
+ e = (*v)(p->use_version, a); if (e) return e;
+ }
+ if (p->use_tags) {
+ e = (*v)(p->use_tags, a); if (e) return e;
+ }
+ if (p->serialized_nodes) {
+ e = (*v)(p->serialized_nodes, a); if (e) return e;
+ }
+ if (p->anchors) {
+ e = (*v)(p->anchors, a); if (e) return e;
+ }
+ if (p->use_encoding) {
+ e = (*v)(p->use_encoding, a); if (e) return e;
+ }
+ return 0;
+}
+
+static int __pyx_tp_clear_5_yaml_CEmitter(PyObject *o) {
+ PyObject* tmp;
+ struct __pyx_obj_5_yaml_CEmitter *p = (struct __pyx_obj_5_yaml_CEmitter *)o;
+ tmp = ((PyObject*)p->stream);
+ p->stream = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->use_version);
+ p->use_version = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->use_tags);
+ p->use_tags = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->serialized_nodes);
+ p->serialized_nodes = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->anchors);
+ p->anchors = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->use_encoding);
+ p->use_encoding = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ return 0;
+}
+
+static PyMethodDef __pyx_methods_5_yaml_CEmitter[] = {
+ {"dispose", (PyCFunction)__pyx_pw_5_yaml_8CEmitter_5dispose, METH_NOARGS, 0},
+ {"emit", (PyCFunction)__pyx_pw_5_yaml_8CEmitter_7emit, METH_O, 0},
+ {"open", (PyCFunction)__pyx_pw_5_yaml_8CEmitter_9open, METH_NOARGS, 0},
+ {"close", (PyCFunction)__pyx_pw_5_yaml_8CEmitter_11close, METH_NOARGS, 0},
+ {"serialize", (PyCFunction)__pyx_pw_5_yaml_8CEmitter_13serialize, METH_O, 0},
+ {"__reduce_cython__", (PyCFunction)__pyx_pw_5_yaml_8CEmitter_15__reduce_cython__, METH_NOARGS, 0},
+ {"__setstate_cython__", (PyCFunction)__pyx_pw_5_yaml_8CEmitter_17__setstate_cython__, METH_O, 0},
+ {0, 0, 0, 0}
+};
+
+static PyTypeObject __pyx_type_5_yaml_CEmitter = {
+ PyVarObject_HEAD_INIT(0, 0)
+ "_yaml.CEmitter", /*tp_name*/
+ sizeof(struct __pyx_obj_5_yaml_CEmitter), /*tp_basicsize*/
+ 0, /*tp_itemsize*/
+ __pyx_tp_dealloc_5_yaml_CEmitter, /*tp_dealloc*/
+ #if PY_VERSION_HEX < 0x030800b4
+ 0, /*tp_print*/
+ #endif
+ #if PY_VERSION_HEX >= 0x030800b4
+ 0, /*tp_vectorcall_offset*/
+ #endif
+ 0, /*tp_getattr*/
+ 0, /*tp_setattr*/
+ #if PY_MAJOR_VERSION < 3
+ 0, /*tp_compare*/
+ #endif
+ #if PY_MAJOR_VERSION >= 3
+ 0, /*tp_as_async*/
+ #endif
+ 0, /*tp_repr*/
+ 0, /*tp_as_number*/
+ 0, /*tp_as_sequence*/
+ 0, /*tp_as_mapping*/
+ 0, /*tp_hash*/
+ 0, /*tp_call*/
+ 0, /*tp_str*/
+ 0, /*tp_getattro*/
+ 0, /*tp_setattro*/
+ 0, /*tp_as_buffer*/
+ Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/
+ 0, /*tp_doc*/
+ __pyx_tp_traverse_5_yaml_CEmitter, /*tp_traverse*/
+ __pyx_tp_clear_5_yaml_CEmitter, /*tp_clear*/
+ 0, /*tp_richcompare*/
+ 0, /*tp_weaklistoffset*/
+ 0, /*tp_iter*/
+ 0, /*tp_iternext*/
+ __pyx_methods_5_yaml_CEmitter, /*tp_methods*/
+ 0, /*tp_members*/
+ 0, /*tp_getset*/
+ 0, /*tp_base*/
+ 0, /*tp_dict*/
+ 0, /*tp_descr_get*/
+ 0, /*tp_descr_set*/
+ 0, /*tp_dictoffset*/
+ __pyx_pw_5_yaml_8CEmitter_1__init__, /*tp_init*/
+ 0, /*tp_alloc*/
+ __pyx_tp_new_5_yaml_CEmitter, /*tp_new*/
+ 0, /*tp_free*/
+ 0, /*tp_is_gc*/
+ 0, /*tp_bases*/
+ 0, /*tp_mro*/
+ 0, /*tp_cache*/
+ 0, /*tp_subclasses*/
+ 0, /*tp_weaklist*/
+ 0, /*tp_del*/
+ 0, /*tp_version_tag*/
+ #if PY_VERSION_HEX >= 0x030400a1
+ 0, /*tp_finalize*/
+ #endif
+ #if PY_VERSION_HEX >= 0x030800b1
+ 0, /*tp_vectorcall*/
+ #endif
+ #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000
+ 0, /*tp_print*/
+ #endif
+};
+
+static PyMethodDef __pyx_methods[] = {
+ {0, 0, 0, 0}
+};
+
+#if PY_MAJOR_VERSION >= 3
+#if CYTHON_PEP489_MULTI_PHASE_INIT
+static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/
+static int __pyx_pymod_exec__yaml(PyObject* module); /*proto*/
+static PyModuleDef_Slot __pyx_moduledef_slots[] = {
+ {Py_mod_create, (void*)__pyx_pymod_create},
+ {Py_mod_exec, (void*)__pyx_pymod_exec__yaml},
+ {0, NULL}
+};
+#endif
+
+static struct PyModuleDef __pyx_moduledef = {
+ PyModuleDef_HEAD_INIT,
+ "_yaml",
+ 0, /* m_doc */
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ 0, /* m_size */
+ #else
+ -1, /* m_size */
+ #endif
+ __pyx_methods /* m_methods */,
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ __pyx_moduledef_slots, /* m_slots */
+ #else
+ NULL, /* m_reload */
+ #endif
+ NULL, /* m_traverse */
+ NULL, /* m_clear */
+ NULL /* m_free */
+};
+#endif
+#ifndef CYTHON_SMALL_CODE
+#if defined(__clang__)
+ #define CYTHON_SMALL_CODE
+#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3))
+ #define CYTHON_SMALL_CODE __attribute__((cold))
+#else
+ #define CYTHON_SMALL_CODE
+#endif
+#endif
+
+static __Pyx_StringTabEntry __pyx_string_tab[] = {
+ {&__pyx_n_s_AliasEvent, __pyx_k_AliasEvent, sizeof(__pyx_k_AliasEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_AliasToken, __pyx_k_AliasToken, sizeof(__pyx_k_AliasToken), 0, 0, 1, 1},
+ {&__pyx_n_s_AnchorToken, __pyx_k_AnchorToken, sizeof(__pyx_k_AnchorToken), 0, 0, 1, 1},
+ {&__pyx_n_s_AttributeError, __pyx_k_AttributeError, sizeof(__pyx_k_AttributeError), 0, 0, 1, 1},
+ {&__pyx_n_s_BlockEndToken, __pyx_k_BlockEndToken, sizeof(__pyx_k_BlockEndToken), 0, 0, 1, 1},
+ {&__pyx_n_s_BlockEntryToken, __pyx_k_BlockEntryToken, sizeof(__pyx_k_BlockEntryToken), 0, 0, 1, 1},
+ {&__pyx_n_s_BlockMappingStartToken, __pyx_k_BlockMappingStartToken, sizeof(__pyx_k_BlockMappingStartToken), 0, 0, 1, 1},
+ {&__pyx_n_s_BlockSequenceStartToken, __pyx_k_BlockSequenceStartToken, sizeof(__pyx_k_BlockSequenceStartToken), 0, 0, 1, 1},
+ {&__pyx_n_s_CEmitter, __pyx_k_CEmitter, sizeof(__pyx_k_CEmitter), 0, 0, 1, 1},
+ {&__pyx_n_s_CParser, __pyx_k_CParser, sizeof(__pyx_k_CParser), 0, 0, 1, 1},
+ {&__pyx_n_s_ComposerError, __pyx_k_ComposerError, sizeof(__pyx_k_ComposerError), 0, 0, 1, 1},
+ {&__pyx_n_s_ConstructorError, __pyx_k_ConstructorError, sizeof(__pyx_k_ConstructorError), 0, 0, 1, 1},
+ {&__pyx_n_s_DirectiveToken, __pyx_k_DirectiveToken, sizeof(__pyx_k_DirectiveToken), 0, 0, 1, 1},
+ {&__pyx_n_s_DocumentEndEvent, __pyx_k_DocumentEndEvent, sizeof(__pyx_k_DocumentEndEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_DocumentEndToken, __pyx_k_DocumentEndToken, sizeof(__pyx_k_DocumentEndToken), 0, 0, 1, 1},
+ {&__pyx_n_s_DocumentStartEvent, __pyx_k_DocumentStartEvent, sizeof(__pyx_k_DocumentStartEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_DocumentStartToken, __pyx_k_DocumentStartToken, sizeof(__pyx_k_DocumentStartToken), 0, 0, 1, 1},
+ {&__pyx_n_s_EmitterError, __pyx_k_EmitterError, sizeof(__pyx_k_EmitterError), 0, 0, 1, 1},
+ {&__pyx_n_s_FlowEntryToken, __pyx_k_FlowEntryToken, sizeof(__pyx_k_FlowEntryToken), 0, 0, 1, 1},
+ {&__pyx_n_s_FlowMappingEndToken, __pyx_k_FlowMappingEndToken, sizeof(__pyx_k_FlowMappingEndToken), 0, 0, 1, 1},
+ {&__pyx_n_s_FlowMappingStartToken, __pyx_k_FlowMappingStartToken, sizeof(__pyx_k_FlowMappingStartToken), 0, 0, 1, 1},
+ {&__pyx_n_s_FlowSequenceEndToken, __pyx_k_FlowSequenceEndToken, sizeof(__pyx_k_FlowSequenceEndToken), 0, 0, 1, 1},
+ {&__pyx_n_s_FlowSequenceStartToken, __pyx_k_FlowSequenceStartToken, sizeof(__pyx_k_FlowSequenceStartToken), 0, 0, 1, 1},
+ {&__pyx_kp_s_Incompatible_checksums_s_vs_0x3f, __pyx_k_Incompatible_checksums_s_vs_0x3f, sizeof(__pyx_k_Incompatible_checksums_s_vs_0x3f), 0, 0, 1, 0},
+ {&__pyx_n_s_KeyToken, __pyx_k_KeyToken, sizeof(__pyx_k_KeyToken), 0, 0, 1, 1},
+ {&__pyx_n_s_MappingEndEvent, __pyx_k_MappingEndEvent, sizeof(__pyx_k_MappingEndEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_MappingNode, __pyx_k_MappingNode, sizeof(__pyx_k_MappingNode), 0, 0, 1, 1},
+ {&__pyx_n_s_MappingStartEvent, __pyx_k_MappingStartEvent, sizeof(__pyx_k_MappingStartEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_Mark, __pyx_k_Mark, sizeof(__pyx_k_Mark), 0, 0, 1, 1},
+ {&__pyx_n_s_MemoryError, __pyx_k_MemoryError, sizeof(__pyx_k_MemoryError), 0, 0, 1, 1},
+ {&__pyx_n_s_ParserError, __pyx_k_ParserError, sizeof(__pyx_k_ParserError), 0, 0, 1, 1},
+ {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1},
+ {&__pyx_kp_s_Pickling_of_struct_members_such, __pyx_k_Pickling_of_struct_members_such, sizeof(__pyx_k_Pickling_of_struct_members_such), 0, 0, 1, 0},
+ {&__pyx_n_s_ReaderError, __pyx_k_ReaderError, sizeof(__pyx_k_ReaderError), 0, 0, 1, 1},
+ {&__pyx_n_s_RepresenterError, __pyx_k_RepresenterError, sizeof(__pyx_k_RepresenterError), 0, 0, 1, 1},
+ {&__pyx_n_s_ScalarEvent, __pyx_k_ScalarEvent, sizeof(__pyx_k_ScalarEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_ScalarNode, __pyx_k_ScalarNode, sizeof(__pyx_k_ScalarNode), 0, 0, 1, 1},
+ {&__pyx_n_s_ScalarToken, __pyx_k_ScalarToken, sizeof(__pyx_k_ScalarToken), 0, 0, 1, 1},
+ {&__pyx_n_s_ScannerError, __pyx_k_ScannerError, sizeof(__pyx_k_ScannerError), 0, 0, 1, 1},
+ {&__pyx_n_s_SequenceEndEvent, __pyx_k_SequenceEndEvent, sizeof(__pyx_k_SequenceEndEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_SequenceNode, __pyx_k_SequenceNode, sizeof(__pyx_k_SequenceNode), 0, 0, 1, 1},
+ {&__pyx_n_s_SequenceStartEvent, __pyx_k_SequenceStartEvent, sizeof(__pyx_k_SequenceStartEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_SerializerError, __pyx_k_SerializerError, sizeof(__pyx_k_SerializerError), 0, 0, 1, 1},
+ {&__pyx_n_s_StreamEndEvent, __pyx_k_StreamEndEvent, sizeof(__pyx_k_StreamEndEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_StreamEndToken, __pyx_k_StreamEndToken, sizeof(__pyx_k_StreamEndToken), 0, 0, 1, 1},
+ {&__pyx_n_s_StreamStartEvent, __pyx_k_StreamStartEvent, sizeof(__pyx_k_StreamStartEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_StreamStartToken, __pyx_k_StreamStartToken, sizeof(__pyx_k_StreamStartToken), 0, 0, 1, 1},
+ {&__pyx_n_u_TAG, __pyx_k_TAG, sizeof(__pyx_k_TAG), 0, 1, 0, 1},
+ {&__pyx_n_s_TagToken, __pyx_k_TagToken, sizeof(__pyx_k_TagToken), 0, 0, 1, 1},
+ {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1},
+ {&__pyx_n_s_ValueError, __pyx_k_ValueError, sizeof(__pyx_k_ValueError), 0, 0, 1, 1},
+ {&__pyx_n_s_ValueToken, __pyx_k_ValueToken, sizeof(__pyx_k_ValueToken), 0, 0, 1, 1},
+ {&__pyx_n_u_YAML, __pyx_k_YAML, sizeof(__pyx_k_YAML), 0, 1, 0, 1},
+ {&__pyx_n_s_YAMLError, __pyx_k_YAMLError, sizeof(__pyx_k_YAMLError), 0, 0, 1, 1},
+ {&__pyx_kp_s__10, __pyx_k__10, sizeof(__pyx_k__10), 0, 0, 1, 0},
+ {&__pyx_kp_u__10, __pyx_k__10, sizeof(__pyx_k__10), 0, 1, 0, 0},
+ {&__pyx_kp_s__19, __pyx_k__19, sizeof(__pyx_k__19), 0, 0, 1, 0},
+ {&__pyx_kp_s__20, __pyx_k__20, sizeof(__pyx_k__20), 0, 0, 1, 0},
+ {&__pyx_kp_s__21, __pyx_k__21, sizeof(__pyx_k__21), 0, 0, 1, 0},
+ {&__pyx_kp_s__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 0, 1, 0},
+ {&__pyx_kp_u__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 1, 0, 0},
+ {&__pyx_kp_u__6, __pyx_k__6, sizeof(__pyx_k__6), 0, 1, 0, 0},
+ {&__pyx_kp_s__7, __pyx_k__7, sizeof(__pyx_k__7), 0, 0, 1, 0},
+ {&__pyx_kp_u__7, __pyx_k__7, sizeof(__pyx_k__7), 0, 1, 0, 0},
+ {&__pyx_kp_s__8, __pyx_k__8, sizeof(__pyx_k__8), 0, 0, 1, 0},
+ {&__pyx_kp_u__8, __pyx_k__8, sizeof(__pyx_k__8), 0, 1, 0, 0},
+ {&__pyx_kp_s__9, __pyx_k__9, sizeof(__pyx_k__9), 0, 0, 1, 0},
+ {&__pyx_kp_u__9, __pyx_k__9, sizeof(__pyx_k__9), 0, 1, 0, 0},
+ {&__pyx_kp_s_a_string_or_stream_input_is_requ, __pyx_k_a_string_or_stream_input_is_requ, sizeof(__pyx_k_a_string_or_stream_input_is_requ), 0, 0, 1, 0},
+ {&__pyx_kp_u_a_string_or_stream_input_is_requ, __pyx_k_a_string_or_stream_input_is_requ, sizeof(__pyx_k_a_string_or_stream_input_is_requ), 0, 1, 0, 0},
+ {&__pyx_kp_s_a_string_value_is_expected, __pyx_k_a_string_value_is_expected, sizeof(__pyx_k_a_string_value_is_expected), 0, 0, 1, 0},
+ {&__pyx_kp_u_a_string_value_is_expected, __pyx_k_a_string_value_is_expected, sizeof(__pyx_k_a_string_value_is_expected), 0, 1, 0, 0},
+ {&__pyx_n_s_allow_unicode, __pyx_k_allow_unicode, sizeof(__pyx_k_allow_unicode), 0, 0, 1, 1},
+ {&__pyx_n_s_anchor, __pyx_k_anchor, sizeof(__pyx_k_anchor), 0, 0, 1, 1},
+ {&__pyx_kp_s_anchor_must_be_a_string, __pyx_k_anchor_must_be_a_string, sizeof(__pyx_k_anchor_must_be_a_string), 0, 0, 1, 0},
+ {&__pyx_kp_u_anchor_must_be_a_string, __pyx_k_anchor_must_be_a_string, sizeof(__pyx_k_anchor_must_be_a_string), 0, 1, 0, 0},
+ {&__pyx_n_s_ascend_resolver, __pyx_k_ascend_resolver, sizeof(__pyx_k_ascend_resolver), 0, 0, 1, 1},
+ {&__pyx_n_s_buffer, __pyx_k_buffer, sizeof(__pyx_k_buffer), 0, 0, 1, 1},
+ {&__pyx_kp_s_but_found_another_document, __pyx_k_but_found_another_document, sizeof(__pyx_k_but_found_another_document), 0, 0, 1, 0},
+ {&__pyx_kp_u_but_found_another_document, __pyx_k_but_found_another_document, sizeof(__pyx_k_but_found_another_document), 0, 1, 0, 0},
+ {&__pyx_kp_s_byte_string, __pyx_k_byte_string, sizeof(__pyx_k_byte_string), 0, 0, 1, 0},
+ {&__pyx_kp_u_byte_string, __pyx_k_byte_string, sizeof(__pyx_k_byte_string), 0, 1, 0, 0},
+ {&__pyx_n_s_canonical, __pyx_k_canonical, sizeof(__pyx_k_canonical), 0, 0, 1, 1},
+ {&__pyx_n_s_class, __pyx_k_class, sizeof(__pyx_k_class), 0, 0, 1, 1},
+ {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1},
+ {&__pyx_n_s_column, __pyx_k_column, sizeof(__pyx_k_column), 0, 0, 1, 1},
+ {&__pyx_n_s_composer, __pyx_k_composer, sizeof(__pyx_k_composer), 0, 0, 1, 1},
+ {&__pyx_n_s_constructor, __pyx_k_constructor, sizeof(__pyx_k_constructor), 0, 0, 1, 1},
+ {&__pyx_n_s_descend_resolver, __pyx_k_descend_resolver, sizeof(__pyx_k_descend_resolver), 0, 0, 1, 1},
+ {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1},
+ {&__pyx_n_s_emitter, __pyx_k_emitter, sizeof(__pyx_k_emitter), 0, 0, 1, 1},
+ {&__pyx_n_s_encoding, __pyx_k_encoding, sizeof(__pyx_k_encoding), 0, 0, 1, 1},
+ {&__pyx_n_u_encoding, __pyx_k_encoding, sizeof(__pyx_k_encoding), 0, 1, 0, 1},
+ {&__pyx_n_s_end_mark, __pyx_k_end_mark, sizeof(__pyx_k_end_mark), 0, 0, 1, 1},
+ {&__pyx_n_s_error, __pyx_k_error, sizeof(__pyx_k_error), 0, 0, 1, 1},
+ {&__pyx_n_s_events, __pyx_k_events, sizeof(__pyx_k_events), 0, 0, 1, 1},
+ {&__pyx_kp_s_expected_a_single_document_in_th, __pyx_k_expected_a_single_document_in_th, sizeof(__pyx_k_expected_a_single_document_in_th), 0, 0, 1, 0},
+ {&__pyx_kp_u_expected_a_single_document_in_th, __pyx_k_expected_a_single_document_in_th, sizeof(__pyx_k_expected_a_single_document_in_th), 0, 1, 0, 0},
+ {&__pyx_n_s_explicit, __pyx_k_explicit, sizeof(__pyx_k_explicit), 0, 0, 1, 1},
+ {&__pyx_n_s_explicit_end, __pyx_k_explicit_end, sizeof(__pyx_k_explicit_end), 0, 0, 1, 1},
+ {&__pyx_n_s_explicit_start, __pyx_k_explicit_start, sizeof(__pyx_k_explicit_start), 0, 0, 1, 1},
+ {&__pyx_kp_s_ext__yaml_pyx, __pyx_k_ext__yaml_pyx, sizeof(__pyx_k_ext__yaml_pyx), 0, 0, 1, 0},
+ {&__pyx_kp_s_file, __pyx_k_file, sizeof(__pyx_k_file), 0, 0, 1, 0},
+ {&__pyx_kp_u_file, __pyx_k_file, sizeof(__pyx_k_file), 0, 1, 0, 0},
+ {&__pyx_n_s_flow_style, __pyx_k_flow_style, sizeof(__pyx_k_flow_style), 0, 0, 1, 1},
+ {&__pyx_kp_s_found_duplicate_anchor_first_occ, __pyx_k_found_duplicate_anchor_first_occ, sizeof(__pyx_k_found_duplicate_anchor_first_occ), 0, 0, 1, 0},
+ {&__pyx_kp_u_found_duplicate_anchor_first_occ, __pyx_k_found_duplicate_anchor_first_occ, sizeof(__pyx_k_found_duplicate_anchor_first_occ), 0, 1, 0, 0},
+ {&__pyx_kp_s_found_undefined_alias, __pyx_k_found_undefined_alias, sizeof(__pyx_k_found_undefined_alias), 0, 0, 1, 0},
+ {&__pyx_kp_u_found_undefined_alias, __pyx_k_found_undefined_alias, sizeof(__pyx_k_found_undefined_alias), 0, 1, 0, 0},
+ {&__pyx_n_s_get_version, __pyx_k_get_version, sizeof(__pyx_k_get_version), 0, 0, 1, 1},
+ {&__pyx_n_s_get_version_string, __pyx_k_get_version_string, sizeof(__pyx_k_get_version_string), 0, 0, 1, 1},
+ {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1},
+ {&__pyx_kp_u_id_03d, __pyx_k_id_03d, sizeof(__pyx_k_id_03d), 0, 1, 0, 0},
+ {&__pyx_n_s_implicit, __pyx_k_implicit, sizeof(__pyx_k_implicit), 0, 0, 1, 1},
+ {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1},
+ {&__pyx_kp_s_in_s_line_d_column_d, __pyx_k_in_s_line_d_column_d, sizeof(__pyx_k_in_s_line_d_column_d), 0, 0, 1, 0},
+ {&__pyx_n_s_indent, __pyx_k_indent, sizeof(__pyx_k_indent), 0, 0, 1, 1},
+ {&__pyx_n_s_index, __pyx_k_index, sizeof(__pyx_k_index), 0, 0, 1, 1},
+ {&__pyx_kp_s_invalid_event_s, __pyx_k_invalid_event_s, sizeof(__pyx_k_invalid_event_s), 0, 0, 1, 0},
+ {&__pyx_kp_u_invalid_event_s, __pyx_k_invalid_event_s, sizeof(__pyx_k_invalid_event_s), 0, 1, 0, 0},
+ {&__pyx_n_s_line, __pyx_k_line, sizeof(__pyx_k_line), 0, 0, 1, 1},
+ {&__pyx_n_s_line_break, __pyx_k_line_break, sizeof(__pyx_k_line_break), 0, 0, 1, 1},
+ {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1},
+ {&__pyx_n_s_major, __pyx_k_major, sizeof(__pyx_k_major), 0, 0, 1, 1},
+ {&__pyx_n_s_minor, __pyx_k_minor, sizeof(__pyx_k_minor), 0, 0, 1, 1},
+ {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1},
+ {&__pyx_n_s_name_2, __pyx_k_name_2, sizeof(__pyx_k_name_2), 0, 0, 1, 1},
+ {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1},
+ {&__pyx_kp_s_no_emitter_error, __pyx_k_no_emitter_error, sizeof(__pyx_k_no_emitter_error), 0, 0, 1, 0},
+ {&__pyx_kp_u_no_emitter_error, __pyx_k_no_emitter_error, sizeof(__pyx_k_no_emitter_error), 0, 1, 0, 0},
+ {&__pyx_kp_s_no_parser_error, __pyx_k_no_parser_error, sizeof(__pyx_k_no_parser_error), 0, 0, 1, 0},
+ {&__pyx_kp_u_no_parser_error, __pyx_k_no_parser_error, sizeof(__pyx_k_no_parser_error), 0, 1, 0, 0},
+ {&__pyx_n_s_nodes, __pyx_k_nodes, sizeof(__pyx_k_nodes), 0, 0, 1, 1},
+ {&__pyx_n_s_parser, __pyx_k_parser, sizeof(__pyx_k_parser), 0, 0, 1, 1},
+ {&__pyx_n_s_patch, __pyx_k_patch, sizeof(__pyx_k_patch), 0, 0, 1, 1},
+ {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1},
+ {&__pyx_n_s_pointer, __pyx_k_pointer, sizeof(__pyx_k_pointer), 0, 0, 1, 1},
+ {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1},
+ {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1},
+ {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1},
+ {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1},
+ {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1},
+ {&__pyx_n_s_pyx_unpickle_Mark, __pyx_k_pyx_unpickle_Mark, sizeof(__pyx_k_pyx_unpickle_Mark), 0, 0, 1, 1},
+ {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1},
+ {&__pyx_n_s_read, __pyx_k_read, sizeof(__pyx_k_read), 0, 0, 1, 1},
+ {&__pyx_n_s_reader, __pyx_k_reader, sizeof(__pyx_k_reader), 0, 0, 1, 1},
+ {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1},
+ {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1},
+ {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1},
+ {&__pyx_n_s_representer, __pyx_k_representer, sizeof(__pyx_k_representer), 0, 0, 1, 1},
+ {&__pyx_n_s_resolve, __pyx_k_resolve, sizeof(__pyx_k_resolve), 0, 0, 1, 1},
+ {&__pyx_n_s_scanner, __pyx_k_scanner, sizeof(__pyx_k_scanner), 0, 0, 1, 1},
+ {&__pyx_kp_s_second_occurrence, __pyx_k_second_occurrence, sizeof(__pyx_k_second_occurrence), 0, 0, 1, 0},
+ {&__pyx_kp_u_second_occurrence, __pyx_k_second_occurrence, sizeof(__pyx_k_second_occurrence), 0, 1, 0, 0},
+ {&__pyx_kp_s_self_parsed_event_cannot_be_conv, __pyx_k_self_parsed_event_cannot_be_conv, sizeof(__pyx_k_self_parsed_event_cannot_be_conv), 0, 0, 1, 0},
+ {&__pyx_n_s_serializer, __pyx_k_serializer, sizeof(__pyx_k_serializer), 0, 0, 1, 1},
+ {&__pyx_kp_s_serializer_is_already_opened, __pyx_k_serializer_is_already_opened, sizeof(__pyx_k_serializer_is_already_opened), 0, 0, 1, 0},
+ {&__pyx_kp_u_serializer_is_already_opened, __pyx_k_serializer_is_already_opened, sizeof(__pyx_k_serializer_is_already_opened), 0, 1, 0, 0},
+ {&__pyx_kp_s_serializer_is_closed, __pyx_k_serializer_is_closed, sizeof(__pyx_k_serializer_is_closed), 0, 0, 1, 0},
+ {&__pyx_kp_u_serializer_is_closed, __pyx_k_serializer_is_closed, sizeof(__pyx_k_serializer_is_closed), 0, 1, 0, 0},
+ {&__pyx_kp_s_serializer_is_not_opened, __pyx_k_serializer_is_not_opened, sizeof(__pyx_k_serializer_is_not_opened), 0, 0, 1, 0},
+ {&__pyx_kp_u_serializer_is_not_opened, __pyx_k_serializer_is_not_opened, sizeof(__pyx_k_serializer_is_not_opened), 0, 1, 0, 0},
+ {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1},
+ {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1},
+ {&__pyx_n_s_start_mark, __pyx_k_start_mark, sizeof(__pyx_k_start_mark), 0, 0, 1, 1},
+ {&__pyx_n_s_stream, __pyx_k_stream, sizeof(__pyx_k_stream), 0, 0, 1, 1},
+ {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0},
+ {&__pyx_n_s_style, __pyx_k_style, sizeof(__pyx_k_style), 0, 0, 1, 1},
+ {&__pyx_n_s_tag, __pyx_k_tag, sizeof(__pyx_k_tag), 0, 0, 1, 1},
+ {&__pyx_kp_s_tag_handle_must_be_a_string, __pyx_k_tag_handle_must_be_a_string, sizeof(__pyx_k_tag_handle_must_be_a_string), 0, 0, 1, 0},
+ {&__pyx_kp_u_tag_handle_must_be_a_string, __pyx_k_tag_handle_must_be_a_string, sizeof(__pyx_k_tag_handle_must_be_a_string), 0, 1, 0, 0},
+ {&__pyx_kp_s_tag_must_be_a_string, __pyx_k_tag_must_be_a_string, sizeof(__pyx_k_tag_must_be_a_string), 0, 0, 1, 0},
+ {&__pyx_kp_u_tag_must_be_a_string, __pyx_k_tag_must_be_a_string, sizeof(__pyx_k_tag_must_be_a_string), 0, 1, 0, 0},
+ {&__pyx_kp_s_tag_prefix_must_be_a_string, __pyx_k_tag_prefix_must_be_a_string, sizeof(__pyx_k_tag_prefix_must_be_a_string), 0, 0, 1, 0},
+ {&__pyx_kp_u_tag_prefix_must_be_a_string, __pyx_k_tag_prefix_must_be_a_string, sizeof(__pyx_k_tag_prefix_must_be_a_string), 0, 1, 0, 0},
+ {&__pyx_n_s_tags, __pyx_k_tags, sizeof(__pyx_k_tags), 0, 0, 1, 1},
+ {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1},
+ {&__pyx_n_s_tokens, __pyx_k_tokens, sizeof(__pyx_k_tokens), 0, 0, 1, 1},
+ {&__pyx_kp_s_too_many_tags, __pyx_k_too_many_tags, sizeof(__pyx_k_too_many_tags), 0, 0, 1, 0},
+ {&__pyx_kp_u_too_many_tags, __pyx_k_too_many_tags, sizeof(__pyx_k_too_many_tags), 0, 1, 0, 0},
+ {&__pyx_kp_s_unicode_string, __pyx_k_unicode_string, sizeof(__pyx_k_unicode_string), 0, 0, 1, 0},
+ {&__pyx_kp_u_unicode_string, __pyx_k_unicode_string, sizeof(__pyx_k_unicode_string), 0, 1, 0, 0},
+ {&__pyx_kp_s_unknown_event_type, __pyx_k_unknown_event_type, sizeof(__pyx_k_unknown_event_type), 0, 0, 1, 0},
+ {&__pyx_kp_u_unknown_event_type, __pyx_k_unknown_event_type, sizeof(__pyx_k_unknown_event_type), 0, 1, 0, 0},
+ {&__pyx_kp_s_unknown_token_type, __pyx_k_unknown_token_type, sizeof(__pyx_k_unknown_token_type), 0, 0, 1, 0},
+ {&__pyx_kp_u_unknown_token_type, __pyx_k_unknown_token_type, sizeof(__pyx_k_unknown_token_type), 0, 1, 0, 0},
+ {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1},
+ {&__pyx_kp_s_utf_16_be, __pyx_k_utf_16_be, sizeof(__pyx_k_utf_16_be), 0, 0, 1, 0},
+ {&__pyx_kp_u_utf_16_be, __pyx_k_utf_16_be, sizeof(__pyx_k_utf_16_be), 0, 1, 0, 0},
+ {&__pyx_kp_s_utf_16_le, __pyx_k_utf_16_le, sizeof(__pyx_k_utf_16_le), 0, 0, 1, 0},
+ {&__pyx_kp_u_utf_16_le, __pyx_k_utf_16_le, sizeof(__pyx_k_utf_16_le), 0, 1, 0, 0},
+ {&__pyx_kp_u_utf_8, __pyx_k_utf_8, sizeof(__pyx_k_utf_8), 0, 1, 0, 0},
+ {&__pyx_n_s_value, __pyx_k_value, sizeof(__pyx_k_value), 0, 0, 1, 1},
+ {&__pyx_kp_s_value_must_be_a_string, __pyx_k_value_must_be_a_string, sizeof(__pyx_k_value_must_be_a_string), 0, 0, 1, 0},
+ {&__pyx_kp_u_value_must_be_a_string, __pyx_k_value_must_be_a_string, sizeof(__pyx_k_value_must_be_a_string), 0, 1, 0, 0},
+ {&__pyx_n_s_version, __pyx_k_version, sizeof(__pyx_k_version), 0, 0, 1, 1},
+ {&__pyx_n_s_width, __pyx_k_width, sizeof(__pyx_k_width), 0, 0, 1, 1},
+ {&__pyx_n_s_write, __pyx_k_write, sizeof(__pyx_k_write), 0, 0, 1, 1},
+ {&__pyx_n_s_yaml, __pyx_k_yaml, sizeof(__pyx_k_yaml), 0, 0, 1, 1},
+ {&__pyx_n_s_yaml_2, __pyx_k_yaml_2, sizeof(__pyx_k_yaml_2), 0, 0, 1, 1},
+ {0, 0, 0, 0, 0, 0, 0}
+};
+static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) {
+ __pyx_builtin_MemoryError = __Pyx_GetBuiltinName(__pyx_n_s_MemoryError); if (!__pyx_builtin_MemoryError) __PYX_ERR(0, 265, __pyx_L1_error)
+ __pyx_builtin_AttributeError = __Pyx_GetBuiltinName(__pyx_n_s_AttributeError); if (!__pyx_builtin_AttributeError) __PYX_ERR(0, 270, __pyx_L1_error)
+ __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(0, 301, __pyx_L1_error)
+ __pyx_builtin_ValueError = __Pyx_GetBuiltinName(__pyx_n_s_ValueError); if (!__pyx_builtin_ValueError) __PYX_ERR(0, 356, __pyx_L1_error)
+ return 0;
+ __pyx_L1_error:;
+ return -1;
+}
+
+static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0);
+
+ /* "_yaml.pyx":301
+ * if PyString_CheckExact(stream) == 0:
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("a string or stream input is required") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"a string or stream input is required")
+ */
+ __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_s_a_string_or_stream_input_is_requ); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 301, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple_);
+ __Pyx_GIVEREF(__pyx_tuple_);
+
+ /* "_yaml.pyx":303
+ * raise TypeError("a string or stream input is required")
+ * else:
+ * raise TypeError(u"a string or stream input is required") # <<<<<<<<<<<<<<
+ * self.stream = stream
+ * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream))
+ */
+ __pyx_tuple__2 = PyTuple_Pack(1, __pyx_kp_u_a_string_or_stream_input_is_requ); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(0, 303, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__2);
+ __Pyx_GIVEREF(__pyx_tuple__2);
+
+ /* "_yaml.pyx":356
+ * return ParserError(context, context_mark, problem, problem_mark)
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("no parser error") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"no parser error")
+ */
+ __pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_s_no_parser_error); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(0, 356, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__4);
+ __Pyx_GIVEREF(__pyx_tuple__4);
+
+ /* "_yaml.pyx":358
+ * raise ValueError("no parser error")
+ * else:
+ * raise ValueError(u"no parser error") # <<<<<<<<<<<<<<
+ *
+ * def raw_scan(self):
+ */
+ __pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_u_no_parser_error); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(0, 358, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__5);
+ __Pyx_GIVEREF(__pyx_tuple__5);
+
+ /* "_yaml.pyx":479
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("unknown token type") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"unknown token type")
+ */
+ __pyx_tuple__11 = PyTuple_Pack(1, __pyx_kp_s_unknown_token_type); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(0, 479, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__11);
+ __Pyx_GIVEREF(__pyx_tuple__11);
+
+ /* "_yaml.pyx":481
+ * raise ValueError("unknown token type")
+ * else:
+ * raise ValueError(u"unknown token type") # <<<<<<<<<<<<<<
+ *
+ * def get_token(self):
+ */
+ __pyx_tuple__12 = PyTuple_Pack(1, __pyx_kp_u_unknown_token_type); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(0, 481, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__12);
+ __Pyx_GIVEREF(__pyx_tuple__12);
+
+ /* "_yaml.pyx":657
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("unknown event type") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"unknown event type")
+ */
+ __pyx_tuple__13 = PyTuple_Pack(1, __pyx_kp_s_unknown_event_type); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(0, 657, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__13);
+ __Pyx_GIVEREF(__pyx_tuple__13);
+
+ /* "_yaml.pyx":659
+ * raise ValueError("unknown event type")
+ * else:
+ * raise ValueError(u"unknown event type") # <<<<<<<<<<<<<<
+ *
+ * def get_event(self):
+ */
+ __pyx_tuple__14 = PyTuple_Pack(1, __pyx_kp_u_unknown_event_type); if (unlikely(!__pyx_tuple__14)) __PYX_ERR(0, 659, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__14);
+ __Pyx_GIVEREF(__pyx_tuple__14);
+
+ /* "(tree fragment)":2
+ * def __reduce_cython__(self):
+ * raise TypeError("self.parsed_event cannot be converted to a Python object for pickling") # <<<<<<<<<<<<<<
+ * def __setstate_cython__(self, __pyx_state):
+ * raise TypeError("self.parsed_event cannot be converted to a Python object for pickling")
+ */
+ __pyx_tuple__15 = PyTuple_Pack(1, __pyx_kp_s_self_parsed_event_cannot_be_conv); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(1, 2, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__15);
+ __Pyx_GIVEREF(__pyx_tuple__15);
+
+ /* "(tree fragment)":4
+ * raise TypeError("self.parsed_event cannot be converted to a Python object for pickling")
+ * def __setstate_cython__(self, __pyx_state):
+ * raise TypeError("self.parsed_event cannot be converted to a Python object for pickling") # <<<<<<<<<<<<<<
+ */
+ __pyx_tuple__16 = PyTuple_Pack(1, __pyx_kp_s_self_parsed_event_cannot_be_conv); if (unlikely(!__pyx_tuple__16)) __PYX_ERR(1, 4, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__16);
+ __Pyx_GIVEREF(__pyx_tuple__16);
+
+ /* "_yaml.pyx":918
+ * if PyString_CheckExact(value) == 0:
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("a string value is expected") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"a string value is expected")
+ */
+ __pyx_tuple__17 = PyTuple_Pack(1, __pyx_kp_s_a_string_value_is_expected); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(0, 918, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__17);
+ __Pyx_GIVEREF(__pyx_tuple__17);
+
+ /* "_yaml.pyx":920
+ * raise TypeError("a string value is expected")
+ * else:
+ * raise TypeError(u"a string value is expected") # <<<<<<<<<<<<<<
+ * parser.stream_cache = value
+ * parser.stream_cache_pos = 0
+ */
+ __pyx_tuple__18 = PyTuple_Pack(1, __pyx_kp_u_a_string_value_is_expected); if (unlikely(!__pyx_tuple__18)) __PYX_ERR(0, 920, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__18);
+ __Pyx_GIVEREF(__pyx_tuple__18);
+
+ /* "_yaml.pyx":1012
+ * return EmitterError(problem)
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("no emitter error") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"no emitter error")
+ */
+ __pyx_tuple__22 = PyTuple_Pack(1, __pyx_kp_s_no_emitter_error); if (unlikely(!__pyx_tuple__22)) __PYX_ERR(0, 1012, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__22);
+ __Pyx_GIVEREF(__pyx_tuple__22);
+
+ /* "_yaml.pyx":1014
+ * raise ValueError("no emitter error")
+ * else:
+ * raise ValueError(u"no emitter error") # <<<<<<<<<<<<<<
+ *
+ * cdef int _object_to_event(self, object event_object, yaml_event_t *event) except 0:
+ */
+ __pyx_tuple__23 = PyTuple_Pack(1, __pyx_kp_u_no_emitter_error); if (unlikely(!__pyx_tuple__23)) __PYX_ERR(0, 1014, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__23);
+ __Pyx_GIVEREF(__pyx_tuple__23);
+
+ /* "_yaml.pyx":1058
+ * if len(event_object.tags) > 128:
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("too many tags") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"too many tags")
+ */
+ __pyx_tuple__24 = PyTuple_Pack(1, __pyx_kp_s_too_many_tags); if (unlikely(!__pyx_tuple__24)) __PYX_ERR(0, 1058, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__24);
+ __Pyx_GIVEREF(__pyx_tuple__24);
+
+ /* "_yaml.pyx":1060
+ * raise ValueError("too many tags")
+ * else:
+ * raise ValueError(u"too many tags") # <<<<<<<<<<<<<<
+ * tag_directives_start = tag_directives_value
+ * tag_directives_end = tag_directives_value
+ */
+ __pyx_tuple__25 = PyTuple_Pack(1, __pyx_kp_u_too_many_tags); if (unlikely(!__pyx_tuple__25)) __PYX_ERR(0, 1060, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__25);
+ __Pyx_GIVEREF(__pyx_tuple__25);
+
+ /* "_yaml.pyx":1071
+ * if not PyString_CheckExact(handle):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag handle must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag handle must be a string")
+ */
+ __pyx_tuple__26 = PyTuple_Pack(1, __pyx_kp_s_tag_handle_must_be_a_string); if (unlikely(!__pyx_tuple__26)) __PYX_ERR(0, 1071, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__26);
+ __Pyx_GIVEREF(__pyx_tuple__26);
+
+ /* "_yaml.pyx":1073
+ * raise TypeError("tag handle must be a string")
+ * else:
+ * raise TypeError(u"tag handle must be a string") # <<<<<<<<<<<<<<
+ * tag_directives_end.handle = PyString_AS_STRING(handle)
+ * if PyUnicode_CheckExact(prefix):
+ */
+ __pyx_tuple__27 = PyTuple_Pack(1, __pyx_kp_u_tag_handle_must_be_a_string); if (unlikely(!__pyx_tuple__27)) __PYX_ERR(0, 1073, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__27);
+ __Pyx_GIVEREF(__pyx_tuple__27);
+
+ /* "_yaml.pyx":1080
+ * if not PyString_CheckExact(prefix):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag prefix must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag prefix must be a string")
+ */
+ __pyx_tuple__28 = PyTuple_Pack(1, __pyx_kp_s_tag_prefix_must_be_a_string); if (unlikely(!__pyx_tuple__28)) __PYX_ERR(0, 1080, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__28);
+ __Pyx_GIVEREF(__pyx_tuple__28);
+
+ /* "_yaml.pyx":1082
+ * raise TypeError("tag prefix must be a string")
+ * else:
+ * raise TypeError(u"tag prefix must be a string") # <<<<<<<<<<<<<<
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ * tag_directives_end = tag_directives_end+1
+ */
+ __pyx_tuple__29 = PyTuple_Pack(1, __pyx_kp_u_tag_prefix_must_be_a_string); if (unlikely(!__pyx_tuple__29)) __PYX_ERR(0, 1082, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__29);
+ __Pyx_GIVEREF(__pyx_tuple__29);
+
+ /* "_yaml.pyx":1103
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ */
+ __pyx_tuple__30 = PyTuple_Pack(1, __pyx_kp_s_anchor_must_be_a_string); if (unlikely(!__pyx_tuple__30)) __PYX_ERR(0, 1103, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__30);
+ __Pyx_GIVEREF(__pyx_tuple__30);
+
+ /* "_yaml.pyx":1105
+ * raise TypeError("anchor must be a string")
+ * else:
+ * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<<
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if yaml_alias_event_initialize(event, anchor) == 0:
+ */
+ __pyx_tuple__31 = PyTuple_Pack(1, __pyx_kp_u_anchor_must_be_a_string); if (unlikely(!__pyx_tuple__31)) __PYX_ERR(0, 1105, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__31);
+ __Pyx_GIVEREF(__pyx_tuple__31);
+
+ /* "_yaml.pyx":1128
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_tuple__32 = PyTuple_Pack(1, __pyx_kp_s_tag_must_be_a_string); if (unlikely(!__pyx_tuple__32)) __PYX_ERR(0, 1128, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__32);
+ __Pyx_GIVEREF(__pyx_tuple__32);
+
+ /* "_yaml.pyx":1130
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * value_object = event_object.value
+ */
+ __pyx_tuple__33 = PyTuple_Pack(1, __pyx_kp_u_tag_must_be_a_string); if (unlikely(!__pyx_tuple__33)) __PYX_ERR(0, 1130, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__33);
+ __Pyx_GIVEREF(__pyx_tuple__33);
+
+ /* "_yaml.pyx":1137
+ * if not PyString_CheckExact(value_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("value must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"value must be a string")
+ */
+ __pyx_tuple__34 = PyTuple_Pack(1, __pyx_kp_s_value_must_be_a_string); if (unlikely(!__pyx_tuple__34)) __PYX_ERR(0, 1137, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__34);
+ __Pyx_GIVEREF(__pyx_tuple__34);
+
+ /* "_yaml.pyx":1139
+ * raise TypeError("value must be a string")
+ * else:
+ * raise TypeError(u"value must be a string") # <<<<<<<<<<<<<<
+ * value = PyString_AS_STRING(value_object)
+ * length = PyString_GET_SIZE(value_object)
+ */
+ __pyx_tuple__35 = PyTuple_Pack(1, __pyx_kp_u_value_must_be_a_string); if (unlikely(!__pyx_tuple__35)) __PYX_ERR(0, 1139, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__35);
+ __Pyx_GIVEREF(__pyx_tuple__35);
+
+ /* "_yaml.pyx":1412
+ * quoted_implicit = 0
+ * tag_object = node.tag
+ * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object: # <<<<<<<<<<<<<<
+ * plain_implicit = 1
+ * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object:
+ */
+ __pyx_tuple__36 = PyTuple_Pack(2, Py_True, Py_False); if (unlikely(!__pyx_tuple__36)) __PYX_ERR(0, 1412, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__36);
+ __Pyx_GIVEREF(__pyx_tuple__36);
+
+ /* "_yaml.pyx":1414
+ * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object:
+ * plain_implicit = 1
+ * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object: # <<<<<<<<<<<<<<
+ * quoted_implicit = 1
+ * tag = NULL
+ */
+ __pyx_tuple__37 = PyTuple_Pack(2, Py_False, Py_True); if (unlikely(!__pyx_tuple__37)) __PYX_ERR(0, 1414, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__37);
+ __Pyx_GIVEREF(__pyx_tuple__37);
+
+ /* "(tree fragment)":2
+ * def __reduce_cython__(self):
+ * raise TypeError("Pickling of struct members such as self.emitter must be explicitly requested with @auto_pickle(True)") # <<<<<<<<<<<<<<
+ * def __setstate_cython__(self, __pyx_state):
+ * raise TypeError("Pickling of struct members such as self.emitter must be explicitly requested with @auto_pickle(True)")
+ */
+ __pyx_tuple__38 = PyTuple_Pack(1, __pyx_kp_s_Pickling_of_struct_members_such); if (unlikely(!__pyx_tuple__38)) __PYX_ERR(1, 2, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__38);
+ __Pyx_GIVEREF(__pyx_tuple__38);
+
+ /* "(tree fragment)":4
+ * raise TypeError("Pickling of struct members such as self.emitter must be explicitly requested with @auto_pickle(True)")
+ * def __setstate_cython__(self, __pyx_state):
+ * raise TypeError("Pickling of struct members such as self.emitter must be explicitly requested with @auto_pickle(True)") # <<<<<<<<<<<<<<
+ */
+ __pyx_tuple__39 = PyTuple_Pack(1, __pyx_kp_s_Pickling_of_struct_members_such); if (unlikely(!__pyx_tuple__39)) __PYX_ERR(1, 4, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__39);
+ __Pyx_GIVEREF(__pyx_tuple__39);
+
+ /* "_yaml.pyx":4
+ * import yaml
+ *
+ * def get_version_string(): # <<<<<<<<<<<<<<
+ * cdef char *value
+ * value = yaml_get_version_string()
+ */
+ __pyx_tuple__40 = PyTuple_Pack(1, __pyx_n_s_value); if (unlikely(!__pyx_tuple__40)) __PYX_ERR(0, 4, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__40);
+ __Pyx_GIVEREF(__pyx_tuple__40);
+ __pyx_codeobj__41 = (PyObject*)__Pyx_PyCode_New(0, 0, 1, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__40, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_ext__yaml_pyx, __pyx_n_s_get_version_string, 4, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__41)) __PYX_ERR(0, 4, __pyx_L1_error)
+
+ /* "_yaml.pyx":12
+ * return PyUnicode_FromString(value)
+ *
+ * def get_version(): # <<<<<<<<<<<<<<
+ * cdef int major, minor, patch
+ * yaml_get_version(&major, &minor, &patch)
+ */
+ __pyx_tuple__42 = PyTuple_Pack(3, __pyx_n_s_major, __pyx_n_s_minor, __pyx_n_s_patch); if (unlikely(!__pyx_tuple__42)) __PYX_ERR(0, 12, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__42);
+ __Pyx_GIVEREF(__pyx_tuple__42);
+ __pyx_codeobj__43 = (PyObject*)__Pyx_PyCode_New(0, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__42, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_ext__yaml_pyx, __pyx_n_s_get_version, 12, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__43)) __PYX_ERR(0, 12, __pyx_L1_error)
+
+ /* "(tree fragment)":1
+ * def __pyx_unpickle_Mark(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<<
+ * cdef object __pyx_PickleError
+ * cdef object __pyx_result
+ */
+ __pyx_tuple__44 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__44)) __PYX_ERR(1, 1, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple__44);
+ __Pyx_GIVEREF(__pyx_tuple__44);
+ __pyx_codeobj__45 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__44, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_Mark, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__45)) __PYX_ERR(1, 1, __pyx_L1_error)
+ __Pyx_RefNannyFinishContext();
+ return 0;
+ __pyx_L1_error:;
+ __Pyx_RefNannyFinishContext();
+ return -1;
+}
+
+static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) {
+ if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 2, __pyx_L1_error);
+ __pyx_int_0 = PyInt_FromLong(0); if (unlikely(!__pyx_int_0)) __PYX_ERR(0, 2, __pyx_L1_error)
+ __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(0, 2, __pyx_L1_error)
+ __pyx_int_66733494 = PyInt_FromLong(66733494L); if (unlikely(!__pyx_int_66733494)) __PYX_ERR(0, 2, __pyx_L1_error)
+ return 0;
+ __pyx_L1_error:;
+ return -1;
+}
+
+static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/
+
+static int __Pyx_modinit_global_init_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0);
+ /*--- Global init code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_variable_export_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0);
+ /*--- Variable export code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_function_export_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0);
+ /*--- Function export code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_type_init_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0);
+ /*--- Type init code ---*/
+ if (PyType_Ready(&__pyx_type_5_yaml_Mark) < 0) __PYX_ERR(0, 64, __pyx_L1_error)
+ #if PY_VERSION_HEX < 0x030800B1
+ __pyx_type_5_yaml_Mark.tp_print = 0;
+ #endif
+ if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_5_yaml_Mark.tp_dictoffset && __pyx_type_5_yaml_Mark.tp_getattro == PyObject_GenericGetAttr)) {
+ __pyx_type_5_yaml_Mark.tp_getattro = __Pyx_PyObject_GenericGetAttr;
+ }
+ if (PyObject_SetAttr(__pyx_m, __pyx_n_s_Mark, (PyObject *)&__pyx_type_5_yaml_Mark) < 0) __PYX_ERR(0, 64, __pyx_L1_error)
+ if (__Pyx_setup_reduce((PyObject*)&__pyx_type_5_yaml_Mark) < 0) __PYX_ERR(0, 64, __pyx_L1_error)
+ __pyx_ptype_5_yaml_Mark = &__pyx_type_5_yaml_Mark;
+ __pyx_vtabptr_5_yaml_CParser = &__pyx_vtable_5_yaml_CParser;
+ __pyx_vtable_5_yaml_CParser._parser_error = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *))__pyx_f_5_yaml_7CParser__parser_error;
+ __pyx_vtable_5_yaml_CParser._scan = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *))__pyx_f_5_yaml_7CParser__scan;
+ __pyx_vtable_5_yaml_CParser._token_to_object = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, yaml_token_t *))__pyx_f_5_yaml_7CParser__token_to_object;
+ __pyx_vtable_5_yaml_CParser._parse = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *))__pyx_f_5_yaml_7CParser__parse;
+ __pyx_vtable_5_yaml_CParser._event_to_object = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, yaml_event_t *))__pyx_f_5_yaml_7CParser__event_to_object;
+ __pyx_vtable_5_yaml_CParser._compose_document = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *))__pyx_f_5_yaml_7CParser__compose_document;
+ __pyx_vtable_5_yaml_CParser._compose_node = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, PyObject *, PyObject *))__pyx_f_5_yaml_7CParser__compose_node;
+ __pyx_vtable_5_yaml_CParser._compose_scalar_node = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, PyObject *))__pyx_f_5_yaml_7CParser__compose_scalar_node;
+ __pyx_vtable_5_yaml_CParser._compose_sequence_node = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, PyObject *))__pyx_f_5_yaml_7CParser__compose_sequence_node;
+ __pyx_vtable_5_yaml_CParser._compose_mapping_node = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, PyObject *))__pyx_f_5_yaml_7CParser__compose_mapping_node;
+ __pyx_vtable_5_yaml_CParser._parse_next_event = (int (*)(struct __pyx_obj_5_yaml_CParser *))__pyx_f_5_yaml_7CParser__parse_next_event;
+ if (PyType_Ready(&__pyx_type_5_yaml_CParser) < 0) __PYX_ERR(0, 247, __pyx_L1_error)
+ #if PY_VERSION_HEX < 0x030800B1
+ __pyx_type_5_yaml_CParser.tp_print = 0;
+ #endif
+ if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_5_yaml_CParser.tp_dictoffset && __pyx_type_5_yaml_CParser.tp_getattro == PyObject_GenericGetAttr)) {
+ __pyx_type_5_yaml_CParser.tp_getattro = __Pyx_PyObject_GenericGetAttr;
+ }
+ if (__Pyx_SetVtable(__pyx_type_5_yaml_CParser.tp_dict, __pyx_vtabptr_5_yaml_CParser) < 0) __PYX_ERR(0, 247, __pyx_L1_error)
+ if (PyObject_SetAttr(__pyx_m, __pyx_n_s_CParser, (PyObject *)&__pyx_type_5_yaml_CParser) < 0) __PYX_ERR(0, 247, __pyx_L1_error)
+ if (__Pyx_setup_reduce((PyObject*)&__pyx_type_5_yaml_CParser) < 0) __PYX_ERR(0, 247, __pyx_L1_error)
+ __pyx_ptype_5_yaml_CParser = &__pyx_type_5_yaml_CParser;
+ __pyx_vtabptr_5_yaml_CEmitter = &__pyx_vtable_5_yaml_CEmitter;
+ __pyx_vtable_5_yaml_CEmitter._emitter_error = (PyObject *(*)(struct __pyx_obj_5_yaml_CEmitter *))__pyx_f_5_yaml_8CEmitter__emitter_error;
+ __pyx_vtable_5_yaml_CEmitter._object_to_event = (int (*)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *, yaml_event_t *))__pyx_f_5_yaml_8CEmitter__object_to_event;
+ __pyx_vtable_5_yaml_CEmitter._anchor_node = (int (*)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *))__pyx_f_5_yaml_8CEmitter__anchor_node;
+ __pyx_vtable_5_yaml_CEmitter._serialize_node = (int (*)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *, PyObject *, PyObject *))__pyx_f_5_yaml_8CEmitter__serialize_node;
+ if (PyType_Ready(&__pyx_type_5_yaml_CEmitter) < 0) __PYX_ERR(0, 935, __pyx_L1_error)
+ #if PY_VERSION_HEX < 0x030800B1
+ __pyx_type_5_yaml_CEmitter.tp_print = 0;
+ #endif
+ if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_5_yaml_CEmitter.tp_dictoffset && __pyx_type_5_yaml_CEmitter.tp_getattro == PyObject_GenericGetAttr)) {
+ __pyx_type_5_yaml_CEmitter.tp_getattro = __Pyx_PyObject_GenericGetAttr;
+ }
+ if (__Pyx_SetVtable(__pyx_type_5_yaml_CEmitter.tp_dict, __pyx_vtabptr_5_yaml_CEmitter) < 0) __PYX_ERR(0, 935, __pyx_L1_error)
+ if (PyObject_SetAttr(__pyx_m, __pyx_n_s_CEmitter, (PyObject *)&__pyx_type_5_yaml_CEmitter) < 0) __PYX_ERR(0, 935, __pyx_L1_error)
+ if (__Pyx_setup_reduce((PyObject*)&__pyx_type_5_yaml_CEmitter) < 0) __PYX_ERR(0, 935, __pyx_L1_error)
+ __pyx_ptype_5_yaml_CEmitter = &__pyx_type_5_yaml_CEmitter;
+ __Pyx_RefNannyFinishContext();
+ return 0;
+ __pyx_L1_error:;
+ __Pyx_RefNannyFinishContext();
+ return -1;
+}
+
+static int __Pyx_modinit_type_import_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0);
+ /*--- Type import code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_variable_import_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0);
+ /*--- Variable import code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_function_import_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0);
+ /*--- Function import code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+
+#if PY_MAJOR_VERSION < 3
+#ifdef CYTHON_NO_PYINIT_EXPORT
+#define __Pyx_PyMODINIT_FUNC void
+#else
+#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
+#endif
+#else
+#ifdef CYTHON_NO_PYINIT_EXPORT
+#define __Pyx_PyMODINIT_FUNC PyObject *
+#else
+#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
+#endif
+#endif
+
+
+#if PY_MAJOR_VERSION < 3
+__Pyx_PyMODINIT_FUNC init_yaml(void) CYTHON_SMALL_CODE; /*proto*/
+__Pyx_PyMODINIT_FUNC init_yaml(void)
+#else
+__Pyx_PyMODINIT_FUNC PyInit__yaml(void) CYTHON_SMALL_CODE; /*proto*/
+__Pyx_PyMODINIT_FUNC PyInit__yaml(void)
+#if CYTHON_PEP489_MULTI_PHASE_INIT
+{
+ return PyModuleDef_Init(&__pyx_moduledef);
+}
+static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) {
+ #if PY_VERSION_HEX >= 0x030700A1
+ static PY_INT64_T main_interpreter_id = -1;
+ PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp);
+ if (main_interpreter_id == -1) {
+ main_interpreter_id = current_id;
+ return (unlikely(current_id == -1)) ? -1 : 0;
+ } else if (unlikely(main_interpreter_id != current_id))
+ #else
+ static PyInterpreterState *main_interpreter = NULL;
+ PyInterpreterState *current_interpreter = PyThreadState_Get()->interp;
+ if (!main_interpreter) {
+ main_interpreter = current_interpreter;
+ } else if (unlikely(main_interpreter != current_interpreter))
+ #endif
+ {
+ PyErr_SetString(
+ PyExc_ImportError,
+ "Interpreter change detected - this module can only be loaded into one interpreter per process.");
+ return -1;
+ }
+ return 0;
+}
+static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) {
+ PyObject *value = PyObject_GetAttrString(spec, from_name);
+ int result = 0;
+ if (likely(value)) {
+ if (allow_none || value != Py_None) {
+ result = PyDict_SetItemString(moddict, to_name, value);
+ }
+ Py_DECREF(value);
+ } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
+ PyErr_Clear();
+ } else {
+ result = -1;
+ }
+ return result;
+}
+static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) {
+ PyObject *module = NULL, *moddict, *modname;
+ if (__Pyx_check_single_interpreter())
+ return NULL;
+ if (__pyx_m)
+ return __Pyx_NewRef(__pyx_m);
+ modname = PyObject_GetAttrString(spec, "name");
+ if (unlikely(!modname)) goto bad;
+ module = PyModule_NewObject(modname);
+ Py_DECREF(modname);
+ if (unlikely(!module)) goto bad;
+ moddict = PyModule_GetDict(module);
+ if (unlikely(!moddict)) goto bad;
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad;
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad;
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad;
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad;
+ return module;
+bad:
+ Py_XDECREF(module);
+ return NULL;
+}
+
+
+static CYTHON_SMALL_CODE int __pyx_pymod_exec__yaml(PyObject *__pyx_pyinit_module)
+#endif
+#endif
+{
+ PyObject *__pyx_t_1 = NULL;
+ PyObject *__pyx_t_2 = NULL;
+ __Pyx_RefNannyDeclarations
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ if (__pyx_m) {
+ if (__pyx_m == __pyx_pyinit_module) return 0;
+ PyErr_SetString(PyExc_RuntimeError, "Module '_yaml' has already been imported. Re-initialisation is not supported.");
+ return -1;
+ }
+ #elif PY_MAJOR_VERSION >= 3
+ if (__pyx_m) return __Pyx_NewRef(__pyx_m);
+ #endif
+ #if CYTHON_REFNANNY
+__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny");
+if (!__Pyx_RefNanny) {
+ PyErr_Clear();
+ __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny");
+ if (!__Pyx_RefNanny)
+ Py_FatalError("failed to import 'refnanny' module");
+}
+#endif
+ __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__yaml(void)", 0);
+ if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 2, __pyx_L1_error)
+ #ifdef __Pxy_PyFrame_Initialize_Offsets
+ __Pxy_PyFrame_Initialize_Offsets();
+ #endif
+ __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 2, __pyx_L1_error)
+ __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 2, __pyx_L1_error)
+ __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 2, __pyx_L1_error)
+ #ifdef __Pyx_CyFunction_USED
+ if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 2, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_FusedFunction_USED
+ if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 2, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_Coroutine_USED
+ if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 2, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_Generator_USED
+ if (__pyx_Generator_init() < 0) __PYX_ERR(0, 2, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_AsyncGen_USED
+ if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 2, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_StopAsyncIteration_USED
+ if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 2, __pyx_L1_error)
+ #endif
+ /*--- Library function declarations ---*/
+ /*--- Threads initialization code ---*/
+ #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
+ #ifdef WITH_THREAD /* Python build with threading support? */
+ PyEval_InitThreads();
+ #endif
+ #endif
+ /*--- Module creation code ---*/
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ __pyx_m = __pyx_pyinit_module;
+ Py_INCREF(__pyx_m);
+ #else
+ #if PY_MAJOR_VERSION < 3
+ __pyx_m = Py_InitModule4("_yaml", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m);
+ #else
+ __pyx_m = PyModule_Create(&__pyx_moduledef);
+ #endif
+ if (unlikely(!__pyx_m)) __PYX_ERR(0, 2, __pyx_L1_error)
+ #endif
+ __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 2, __pyx_L1_error)
+ Py_INCREF(__pyx_d);
+ __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 2, __pyx_L1_error)
+ Py_INCREF(__pyx_b);
+ __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 2, __pyx_L1_error)
+ Py_INCREF(__pyx_cython_runtime);
+ if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 2, __pyx_L1_error);
+ /*--- Initialize various global constants etc. ---*/
+ if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 2, __pyx_L1_error)
+ #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT)
+ if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 2, __pyx_L1_error)
+ #endif
+ if (__pyx_module_is_main__yaml) {
+ if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name_2, __pyx_n_s_main) < 0) __PYX_ERR(0, 2, __pyx_L1_error)
+ }
+ #if PY_MAJOR_VERSION >= 3
+ {
+ PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 2, __pyx_L1_error)
+ if (!PyDict_GetItemString(modules, "_yaml")) {
+ if (unlikely(PyDict_SetItemString(modules, "_yaml", __pyx_m) < 0)) __PYX_ERR(0, 2, __pyx_L1_error)
+ }
+ }
+ #endif
+ /*--- Builtin init code ---*/
+ if (__Pyx_InitCachedBuiltins() < 0) goto __pyx_L1_error;
+ /*--- Constants init code ---*/
+ if (__Pyx_InitCachedConstants() < 0) goto __pyx_L1_error;
+ /*--- Global type/function init code ---*/
+ (void)__Pyx_modinit_global_init_code();
+ (void)__Pyx_modinit_variable_export_code();
+ (void)__Pyx_modinit_function_export_code();
+ if (unlikely(__Pyx_modinit_type_init_code() != 0)) goto __pyx_L1_error;
+ (void)__Pyx_modinit_type_import_code();
+ (void)__Pyx_modinit_variable_import_code();
+ (void)__Pyx_modinit_function_import_code();
+ /*--- Execution code ---*/
+ #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED)
+ if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 2, __pyx_L1_error)
+ #endif
+
+ /* "_yaml.pyx":2
+ *
+ * import yaml # <<<<<<<<<<<<<<
+ *
+ * def get_version_string():
+ */
+ __pyx_t_1 = __Pyx_Import(__pyx_n_s_yaml, 0, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 2, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_yaml, __pyx_t_1) < 0) __PYX_ERR(0, 2, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":4
+ * import yaml
+ *
+ * def get_version_string(): # <<<<<<<<<<<<<<
+ * cdef char *value
+ * value = yaml_get_version_string()
+ */
+ __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_5_yaml_1get_version_string, NULL, __pyx_n_s_yaml_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_version_string, __pyx_t_1) < 0) __PYX_ERR(0, 4, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":12
+ * return PyUnicode_FromString(value)
+ *
+ * def get_version(): # <<<<<<<<<<<<<<
+ * cdef int major, minor, patch
+ * yaml_get_version(&major, &minor, &patch)
+ */
+ __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_5_yaml_3get_version, NULL, __pyx_n_s_yaml_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 12, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_version, __pyx_t_1) < 0) __PYX_ERR(0, 12, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":18
+ *
+ * #Mark = yaml.error.Mark
+ * YAMLError = yaml.error.YAMLError # <<<<<<<<<<<<<<
+ * ReaderError = yaml.reader.ReaderError
+ * ScannerError = yaml.scanner.ScannerError
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 18, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_error); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_YAMLError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 18, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_YAMLError, __pyx_t_1) < 0) __PYX_ERR(0, 18, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":19
+ * #Mark = yaml.error.Mark
+ * YAMLError = yaml.error.YAMLError
+ * ReaderError = yaml.reader.ReaderError # <<<<<<<<<<<<<<
+ * ScannerError = yaml.scanner.ScannerError
+ * ParserError = yaml.parser.ParserError
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_reader); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 19, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ReaderError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ReaderError, __pyx_t_1) < 0) __PYX_ERR(0, 19, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":20
+ * YAMLError = yaml.error.YAMLError
+ * ReaderError = yaml.reader.ReaderError
+ * ScannerError = yaml.scanner.ScannerError # <<<<<<<<<<<<<<
+ * ParserError = yaml.parser.ParserError
+ * ComposerError = yaml.composer.ComposerError
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 20, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_scanner); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 20, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ScannerError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 20, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ScannerError, __pyx_t_1) < 0) __PYX_ERR(0, 20, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":21
+ * ReaderError = yaml.reader.ReaderError
+ * ScannerError = yaml.scanner.ScannerError
+ * ParserError = yaml.parser.ParserError # <<<<<<<<<<<<<<
+ * ComposerError = yaml.composer.ComposerError
+ * ConstructorError = yaml.constructor.ConstructorError
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 21, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_parser); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 21, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ParserError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 21, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ParserError, __pyx_t_1) < 0) __PYX_ERR(0, 21, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":22
+ * ScannerError = yaml.scanner.ScannerError
+ * ParserError = yaml.parser.ParserError
+ * ComposerError = yaml.composer.ComposerError # <<<<<<<<<<<<<<
+ * ConstructorError = yaml.constructor.ConstructorError
+ * EmitterError = yaml.emitter.EmitterError
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 22, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_composer); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ComposerError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 22, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ComposerError, __pyx_t_1) < 0) __PYX_ERR(0, 22, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":23
+ * ParserError = yaml.parser.ParserError
+ * ComposerError = yaml.composer.ComposerError
+ * ConstructorError = yaml.constructor.ConstructorError # <<<<<<<<<<<<<<
+ * EmitterError = yaml.emitter.EmitterError
+ * SerializerError = yaml.serializer.SerializerError
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 23, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_constructor); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 23, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ConstructorError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 23, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ConstructorError, __pyx_t_1) < 0) __PYX_ERR(0, 23, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":24
+ * ComposerError = yaml.composer.ComposerError
+ * ConstructorError = yaml.constructor.ConstructorError
+ * EmitterError = yaml.emitter.EmitterError # <<<<<<<<<<<<<<
+ * SerializerError = yaml.serializer.SerializerError
+ * RepresenterError = yaml.representer.RepresenterError
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 24, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_emitter); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 24, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_EmitterError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 24, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_EmitterError, __pyx_t_1) < 0) __PYX_ERR(0, 24, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":25
+ * ConstructorError = yaml.constructor.ConstructorError
+ * EmitterError = yaml.emitter.EmitterError
+ * SerializerError = yaml.serializer.SerializerError # <<<<<<<<<<<<<<
+ * RepresenterError = yaml.representer.RepresenterError
+ *
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 25, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_serializer); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 25, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_SerializerError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 25, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_SerializerError, __pyx_t_1) < 0) __PYX_ERR(0, 25, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":26
+ * EmitterError = yaml.emitter.EmitterError
+ * SerializerError = yaml.serializer.SerializerError
+ * RepresenterError = yaml.representer.RepresenterError # <<<<<<<<<<<<<<
+ *
+ * StreamStartToken = yaml.tokens.StreamStartToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 26, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_representer); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 26, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_RepresenterError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 26, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_RepresenterError, __pyx_t_1) < 0) __PYX_ERR(0, 26, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":28
+ * RepresenterError = yaml.representer.RepresenterError
+ *
+ * StreamStartToken = yaml.tokens.StreamStartToken # <<<<<<<<<<<<<<
+ * StreamEndToken = yaml.tokens.StreamEndToken
+ * DirectiveToken = yaml.tokens.DirectiveToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 28, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 28, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_StreamStartToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 28, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_StreamStartToken, __pyx_t_1) < 0) __PYX_ERR(0, 28, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":29
+ *
+ * StreamStartToken = yaml.tokens.StreamStartToken
+ * StreamEndToken = yaml.tokens.StreamEndToken # <<<<<<<<<<<<<<
+ * DirectiveToken = yaml.tokens.DirectiveToken
+ * DocumentStartToken = yaml.tokens.DocumentStartToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 29, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 29, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_StreamEndToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 29, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_StreamEndToken, __pyx_t_1) < 0) __PYX_ERR(0, 29, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":30
+ * StreamStartToken = yaml.tokens.StreamStartToken
+ * StreamEndToken = yaml.tokens.StreamEndToken
+ * DirectiveToken = yaml.tokens.DirectiveToken # <<<<<<<<<<<<<<
+ * DocumentStartToken = yaml.tokens.DocumentStartToken
+ * DocumentEndToken = yaml.tokens.DocumentEndToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 30, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 30, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_DirectiveToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 30, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_DirectiveToken, __pyx_t_1) < 0) __PYX_ERR(0, 30, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":31
+ * StreamEndToken = yaml.tokens.StreamEndToken
+ * DirectiveToken = yaml.tokens.DirectiveToken
+ * DocumentStartToken = yaml.tokens.DocumentStartToken # <<<<<<<<<<<<<<
+ * DocumentEndToken = yaml.tokens.DocumentEndToken
+ * BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 31, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 31, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_DocumentStartToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 31, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_DocumentStartToken, __pyx_t_1) < 0) __PYX_ERR(0, 31, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":32
+ * DirectiveToken = yaml.tokens.DirectiveToken
+ * DocumentStartToken = yaml.tokens.DocumentStartToken
+ * DocumentEndToken = yaml.tokens.DocumentEndToken # <<<<<<<<<<<<<<
+ * BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken
+ * BlockMappingStartToken = yaml.tokens.BlockMappingStartToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 32, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 32, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_DocumentEndToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 32, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_DocumentEndToken, __pyx_t_1) < 0) __PYX_ERR(0, 32, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":33
+ * DocumentStartToken = yaml.tokens.DocumentStartToken
+ * DocumentEndToken = yaml.tokens.DocumentEndToken
+ * BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken # <<<<<<<<<<<<<<
+ * BlockMappingStartToken = yaml.tokens.BlockMappingStartToken
+ * BlockEndToken = yaml.tokens.BlockEndToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 33, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 33, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_BlockSequenceStartToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 33, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_BlockSequenceStartToken, __pyx_t_1) < 0) __PYX_ERR(0, 33, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":34
+ * DocumentEndToken = yaml.tokens.DocumentEndToken
+ * BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken
+ * BlockMappingStartToken = yaml.tokens.BlockMappingStartToken # <<<<<<<<<<<<<<
+ * BlockEndToken = yaml.tokens.BlockEndToken
+ * FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 34, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 34, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_BlockMappingStartToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 34, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_BlockMappingStartToken, __pyx_t_1) < 0) __PYX_ERR(0, 34, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":35
+ * BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken
+ * BlockMappingStartToken = yaml.tokens.BlockMappingStartToken
+ * BlockEndToken = yaml.tokens.BlockEndToken # <<<<<<<<<<<<<<
+ * FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken
+ * FlowMappingStartToken = yaml.tokens.FlowMappingStartToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 35, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 35, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_BlockEndToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 35, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_BlockEndToken, __pyx_t_1) < 0) __PYX_ERR(0, 35, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":36
+ * BlockMappingStartToken = yaml.tokens.BlockMappingStartToken
+ * BlockEndToken = yaml.tokens.BlockEndToken
+ * FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken # <<<<<<<<<<<<<<
+ * FlowMappingStartToken = yaml.tokens.FlowMappingStartToken
+ * FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 36, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 36, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_FlowSequenceStartToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 36, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_FlowSequenceStartToken, __pyx_t_1) < 0) __PYX_ERR(0, 36, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":37
+ * BlockEndToken = yaml.tokens.BlockEndToken
+ * FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken
+ * FlowMappingStartToken = yaml.tokens.FlowMappingStartToken # <<<<<<<<<<<<<<
+ * FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken
+ * FlowMappingEndToken = yaml.tokens.FlowMappingEndToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 37, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 37, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_FlowMappingStartToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 37, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_FlowMappingStartToken, __pyx_t_1) < 0) __PYX_ERR(0, 37, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":38
+ * FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken
+ * FlowMappingStartToken = yaml.tokens.FlowMappingStartToken
+ * FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken # <<<<<<<<<<<<<<
+ * FlowMappingEndToken = yaml.tokens.FlowMappingEndToken
+ * KeyToken = yaml.tokens.KeyToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 38, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 38, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_FlowSequenceEndToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 38, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_FlowSequenceEndToken, __pyx_t_1) < 0) __PYX_ERR(0, 38, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":39
+ * FlowMappingStartToken = yaml.tokens.FlowMappingStartToken
+ * FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken
+ * FlowMappingEndToken = yaml.tokens.FlowMappingEndToken # <<<<<<<<<<<<<<
+ * KeyToken = yaml.tokens.KeyToken
+ * ValueToken = yaml.tokens.ValueToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 39, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 39, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_FlowMappingEndToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 39, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_FlowMappingEndToken, __pyx_t_1) < 0) __PYX_ERR(0, 39, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":40
+ * FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken
+ * FlowMappingEndToken = yaml.tokens.FlowMappingEndToken
+ * KeyToken = yaml.tokens.KeyToken # <<<<<<<<<<<<<<
+ * ValueToken = yaml.tokens.ValueToken
+ * BlockEntryToken = yaml.tokens.BlockEntryToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 40, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 40, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_KeyToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 40, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_KeyToken, __pyx_t_1) < 0) __PYX_ERR(0, 40, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":41
+ * FlowMappingEndToken = yaml.tokens.FlowMappingEndToken
+ * KeyToken = yaml.tokens.KeyToken
+ * ValueToken = yaml.tokens.ValueToken # <<<<<<<<<<<<<<
+ * BlockEntryToken = yaml.tokens.BlockEntryToken
+ * FlowEntryToken = yaml.tokens.FlowEntryToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 41, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 41, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ValueToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 41, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ValueToken, __pyx_t_1) < 0) __PYX_ERR(0, 41, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":42
+ * KeyToken = yaml.tokens.KeyToken
+ * ValueToken = yaml.tokens.ValueToken
+ * BlockEntryToken = yaml.tokens.BlockEntryToken # <<<<<<<<<<<<<<
+ * FlowEntryToken = yaml.tokens.FlowEntryToken
+ * AliasToken = yaml.tokens.AliasToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 42, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 42, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_BlockEntryToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 42, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_BlockEntryToken, __pyx_t_1) < 0) __PYX_ERR(0, 42, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":43
+ * ValueToken = yaml.tokens.ValueToken
+ * BlockEntryToken = yaml.tokens.BlockEntryToken
+ * FlowEntryToken = yaml.tokens.FlowEntryToken # <<<<<<<<<<<<<<
+ * AliasToken = yaml.tokens.AliasToken
+ * AnchorToken = yaml.tokens.AnchorToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 43, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 43, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_FlowEntryToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 43, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_FlowEntryToken, __pyx_t_1) < 0) __PYX_ERR(0, 43, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":44
+ * BlockEntryToken = yaml.tokens.BlockEntryToken
+ * FlowEntryToken = yaml.tokens.FlowEntryToken
+ * AliasToken = yaml.tokens.AliasToken # <<<<<<<<<<<<<<
+ * AnchorToken = yaml.tokens.AnchorToken
+ * TagToken = yaml.tokens.TagToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 44, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 44, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_AliasToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 44, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_AliasToken, __pyx_t_1) < 0) __PYX_ERR(0, 44, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":45
+ * FlowEntryToken = yaml.tokens.FlowEntryToken
+ * AliasToken = yaml.tokens.AliasToken
+ * AnchorToken = yaml.tokens.AnchorToken # <<<<<<<<<<<<<<
+ * TagToken = yaml.tokens.TagToken
+ * ScalarToken = yaml.tokens.ScalarToken
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 45, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 45, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_AnchorToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 45, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_AnchorToken, __pyx_t_1) < 0) __PYX_ERR(0, 45, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":46
+ * AliasToken = yaml.tokens.AliasToken
+ * AnchorToken = yaml.tokens.AnchorToken
+ * TagToken = yaml.tokens.TagToken # <<<<<<<<<<<<<<
+ * ScalarToken = yaml.tokens.ScalarToken
+ *
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 46, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 46, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_TagToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 46, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_TagToken, __pyx_t_1) < 0) __PYX_ERR(0, 46, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":47
+ * AnchorToken = yaml.tokens.AnchorToken
+ * TagToken = yaml.tokens.TagToken
+ * ScalarToken = yaml.tokens.ScalarToken # <<<<<<<<<<<<<<
+ *
+ * StreamStartEvent = yaml.events.StreamStartEvent
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 47, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 47, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ScalarToken); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 47, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ScalarToken, __pyx_t_1) < 0) __PYX_ERR(0, 47, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":49
+ * ScalarToken = yaml.tokens.ScalarToken
+ *
+ * StreamStartEvent = yaml.events.StreamStartEvent # <<<<<<<<<<<<<<
+ * StreamEndEvent = yaml.events.StreamEndEvent
+ * DocumentStartEvent = yaml.events.DocumentStartEvent
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 49, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 49, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_StreamStartEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 49, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_StreamStartEvent, __pyx_t_1) < 0) __PYX_ERR(0, 49, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":50
+ *
+ * StreamStartEvent = yaml.events.StreamStartEvent
+ * StreamEndEvent = yaml.events.StreamEndEvent # <<<<<<<<<<<<<<
+ * DocumentStartEvent = yaml.events.DocumentStartEvent
+ * DocumentEndEvent = yaml.events.DocumentEndEvent
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 50, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 50, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_StreamEndEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 50, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_StreamEndEvent, __pyx_t_1) < 0) __PYX_ERR(0, 50, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":51
+ * StreamStartEvent = yaml.events.StreamStartEvent
+ * StreamEndEvent = yaml.events.StreamEndEvent
+ * DocumentStartEvent = yaml.events.DocumentStartEvent # <<<<<<<<<<<<<<
+ * DocumentEndEvent = yaml.events.DocumentEndEvent
+ * AliasEvent = yaml.events.AliasEvent
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 51, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 51, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_DocumentStartEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 51, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_DocumentStartEvent, __pyx_t_1) < 0) __PYX_ERR(0, 51, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":52
+ * StreamEndEvent = yaml.events.StreamEndEvent
+ * DocumentStartEvent = yaml.events.DocumentStartEvent
+ * DocumentEndEvent = yaml.events.DocumentEndEvent # <<<<<<<<<<<<<<
+ * AliasEvent = yaml.events.AliasEvent
+ * ScalarEvent = yaml.events.ScalarEvent
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 52, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 52, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_DocumentEndEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 52, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_DocumentEndEvent, __pyx_t_1) < 0) __PYX_ERR(0, 52, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":53
+ * DocumentStartEvent = yaml.events.DocumentStartEvent
+ * DocumentEndEvent = yaml.events.DocumentEndEvent
+ * AliasEvent = yaml.events.AliasEvent # <<<<<<<<<<<<<<
+ * ScalarEvent = yaml.events.ScalarEvent
+ * SequenceStartEvent = yaml.events.SequenceStartEvent
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 53, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 53, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_AliasEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 53, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_AliasEvent, __pyx_t_1) < 0) __PYX_ERR(0, 53, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":54
+ * DocumentEndEvent = yaml.events.DocumentEndEvent
+ * AliasEvent = yaml.events.AliasEvent
+ * ScalarEvent = yaml.events.ScalarEvent # <<<<<<<<<<<<<<
+ * SequenceStartEvent = yaml.events.SequenceStartEvent
+ * SequenceEndEvent = yaml.events.SequenceEndEvent
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 54, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 54, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ScalarEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 54, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ScalarEvent, __pyx_t_1) < 0) __PYX_ERR(0, 54, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":55
+ * AliasEvent = yaml.events.AliasEvent
+ * ScalarEvent = yaml.events.ScalarEvent
+ * SequenceStartEvent = yaml.events.SequenceStartEvent # <<<<<<<<<<<<<<
+ * SequenceEndEvent = yaml.events.SequenceEndEvent
+ * MappingStartEvent = yaml.events.MappingStartEvent
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 55, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 55, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_SequenceStartEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 55, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_SequenceStartEvent, __pyx_t_1) < 0) __PYX_ERR(0, 55, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":56
+ * ScalarEvent = yaml.events.ScalarEvent
+ * SequenceStartEvent = yaml.events.SequenceStartEvent
+ * SequenceEndEvent = yaml.events.SequenceEndEvent # <<<<<<<<<<<<<<
+ * MappingStartEvent = yaml.events.MappingStartEvent
+ * MappingEndEvent = yaml.events.MappingEndEvent
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 56, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 56, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_SequenceEndEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 56, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_SequenceEndEvent, __pyx_t_1) < 0) __PYX_ERR(0, 56, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":57
+ * SequenceStartEvent = yaml.events.SequenceStartEvent
+ * SequenceEndEvent = yaml.events.SequenceEndEvent
+ * MappingStartEvent = yaml.events.MappingStartEvent # <<<<<<<<<<<<<<
+ * MappingEndEvent = yaml.events.MappingEndEvent
+ *
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 57, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 57, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_MappingStartEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 57, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_MappingStartEvent, __pyx_t_1) < 0) __PYX_ERR(0, 57, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":58
+ * SequenceEndEvent = yaml.events.SequenceEndEvent
+ * MappingStartEvent = yaml.events.MappingStartEvent
+ * MappingEndEvent = yaml.events.MappingEndEvent # <<<<<<<<<<<<<<
+ *
+ * ScalarNode = yaml.nodes.ScalarNode
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 58, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 58, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_MappingEndEvent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 58, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_MappingEndEvent, __pyx_t_1) < 0) __PYX_ERR(0, 58, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":60
+ * MappingEndEvent = yaml.events.MappingEndEvent
+ *
+ * ScalarNode = yaml.nodes.ScalarNode # <<<<<<<<<<<<<<
+ * SequenceNode = yaml.nodes.SequenceNode
+ * MappingNode = yaml.nodes.MappingNode
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 60, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_nodes); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 60, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ScalarNode); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 60, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ScalarNode, __pyx_t_1) < 0) __PYX_ERR(0, 60, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":61
+ *
+ * ScalarNode = yaml.nodes.ScalarNode
+ * SequenceNode = yaml.nodes.SequenceNode # <<<<<<<<<<<<<<
+ * MappingNode = yaml.nodes.MappingNode
+ *
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 61, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_nodes); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 61, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_SequenceNode); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 61, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_SequenceNode, __pyx_t_1) < 0) __PYX_ERR(0, 61, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":62
+ * ScalarNode = yaml.nodes.ScalarNode
+ * SequenceNode = yaml.nodes.SequenceNode
+ * MappingNode = yaml.nodes.MappingNode # <<<<<<<<<<<<<<
+ *
+ * cdef class Mark:
+ */
+ __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 62, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_nodes); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 62, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_MappingNode); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 62, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_MappingNode, __pyx_t_1) < 0) __PYX_ERR(0, 62, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "(tree fragment)":1
+ * def __pyx_unpickle_Mark(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<<
+ * cdef object __pyx_PickleError
+ * cdef object __pyx_result
+ */
+ __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_5_yaml_5__pyx_unpickle_Mark, NULL, __pyx_n_s_yaml_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 1, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_Mark, __pyx_t_1) < 0) __PYX_ERR(1, 1, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":2
+ *
+ * import yaml # <<<<<<<<<<<<<<
+ *
+ * def get_version_string():
+ */
+ __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 2, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 2, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /*--- Wrapped vars code ---*/
+
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_2);
+ if (__pyx_m) {
+ if (__pyx_d) {
+ __Pyx_AddTraceback("init _yaml", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ }
+ Py_CLEAR(__pyx_m);
+ } else if (!PyErr_Occurred()) {
+ PyErr_SetString(PyExc_ImportError, "init _yaml");
+ }
+ __pyx_L0:;
+ __Pyx_RefNannyFinishContext();
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ return (__pyx_m != NULL) ? 0 : -1;
+ #elif PY_MAJOR_VERSION >= 3
+ return __pyx_m;
+ #else
+ return;
+ #endif
+}
+
+/* --- Runtime support code --- */
+/* Refnanny */
+#if CYTHON_REFNANNY
+static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) {
+ PyObject *m = NULL, *p = NULL;
+ void *r = NULL;
+ m = PyImport_ImportModule(modname);
+ if (!m) goto end;
+ p = PyObject_GetAttrString(m, "RefNannyAPI");
+ if (!p) goto end;
+ r = PyLong_AsVoidPtr(p);
+end:
+ Py_XDECREF(p);
+ Py_XDECREF(m);
+ return (__Pyx_RefNannyAPIStruct *)r;
+}
+#endif
+
+/* PyObjectGetAttrStr */
+#if CYTHON_USE_TYPE_SLOTS
+static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) {
+ PyTypeObject* tp = Py_TYPE(obj);
+ if (likely(tp->tp_getattro))
+ return tp->tp_getattro(obj, attr_name);
+#if PY_MAJOR_VERSION < 3
+ if (likely(tp->tp_getattr))
+ return tp->tp_getattr(obj, PyString_AS_STRING(attr_name));
+#endif
+ return PyObject_GetAttr(obj, attr_name);
+}
+#endif
+
+/* GetBuiltinName */
+static PyObject *__Pyx_GetBuiltinName(PyObject *name) {
+ PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name);
+ if (unlikely(!result)) {
+ PyErr_Format(PyExc_NameError,
+#if PY_MAJOR_VERSION >= 3
+ "name '%U' is not defined", name);
+#else
+ "name '%.200s' is not defined", PyString_AS_STRING(name));
+#endif
+ }
+ return result;
+}
+
+/* RaiseArgTupleInvalid */
+static void __Pyx_RaiseArgtupleInvalid(
+ const char* func_name,
+ int exact,
+ Py_ssize_t num_min,
+ Py_ssize_t num_max,
+ Py_ssize_t num_found)
+{
+ Py_ssize_t num_expected;
+ const char *more_or_less;
+ if (num_found < num_min) {
+ num_expected = num_min;
+ more_or_less = "at least";
+ } else {
+ num_expected = num_max;
+ more_or_less = "at most";
+ }
+ if (exact) {
+ more_or_less = "exactly";
+ }
+ PyErr_Format(PyExc_TypeError,
+ "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)",
+ func_name, more_or_less, num_expected,
+ (num_expected == 1) ? "" : "s", num_found);
+}
+
+/* RaiseDoubleKeywords */
+static void __Pyx_RaiseDoubleKeywordsError(
+ const char* func_name,
+ PyObject* kw_name)
+{
+ PyErr_Format(PyExc_TypeError,
+ #if PY_MAJOR_VERSION >= 3
+ "%s() got multiple values for keyword argument '%U'", func_name, kw_name);
+ #else
+ "%s() got multiple values for keyword argument '%s'", func_name,
+ PyString_AsString(kw_name));
+ #endif
+}
+
+/* ParseKeywords */
+static int __Pyx_ParseOptionalKeywords(
+ PyObject *kwds,
+ PyObject **argnames[],
+ PyObject *kwds2,
+ PyObject *values[],
+ Py_ssize_t num_pos_args,
+ const char* function_name)
+{
+ PyObject *key = 0, *value = 0;
+ Py_ssize_t pos = 0;
+ PyObject*** name;
+ PyObject*** first_kw_arg = argnames + num_pos_args;
+ while (PyDict_Next(kwds, &pos, &key, &value)) {
+ name = first_kw_arg;
+ while (*name && (**name != key)) name++;
+ if (*name) {
+ values[name-argnames] = value;
+ continue;
+ }
+ name = first_kw_arg;
+ #if PY_MAJOR_VERSION < 3
+ if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) {
+ while (*name) {
+ if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key))
+ && _PyString_Eq(**name, key)) {
+ values[name-argnames] = value;
+ break;
+ }
+ name++;
+ }
+ if (*name) continue;
+ else {
+ PyObject*** argname = argnames;
+ while (argname != first_kw_arg) {
+ if ((**argname == key) || (
+ (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key))
+ && _PyString_Eq(**argname, key))) {
+ goto arg_passed_twice;
+ }
+ argname++;
+ }
+ }
+ } else
+ #endif
+ if (likely(PyUnicode_Check(key))) {
+ while (*name) {
+ int cmp = (**name == key) ? 0 :
+ #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
+ (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :
+ #endif
+ PyUnicode_Compare(**name, key);
+ if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
+ if (cmp == 0) {
+ values[name-argnames] = value;
+ break;
+ }
+ name++;
+ }
+ if (*name) continue;
+ else {
+ PyObject*** argname = argnames;
+ while (argname != first_kw_arg) {
+ int cmp = (**argname == key) ? 0 :
+ #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
+ (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :
+ #endif
+ PyUnicode_Compare(**argname, key);
+ if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
+ if (cmp == 0) goto arg_passed_twice;
+ argname++;
+ }
+ }
+ } else
+ goto invalid_keyword_type;
+ if (kwds2) {
+ if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad;
+ } else {
+ goto invalid_keyword;
+ }
+ }
+ return 0;
+arg_passed_twice:
+ __Pyx_RaiseDoubleKeywordsError(function_name, key);
+ goto bad;
+invalid_keyword_type:
+ PyErr_Format(PyExc_TypeError,
+ "%.200s() keywords must be strings", function_name);
+ goto bad;
+invalid_keyword:
+ PyErr_Format(PyExc_TypeError,
+ #if PY_MAJOR_VERSION < 3
+ "%.200s() got an unexpected keyword argument '%.200s'",
+ function_name, PyString_AsString(key));
+ #else
+ "%s() got an unexpected keyword argument '%U'",
+ function_name, key);
+ #endif
+bad:
+ return -1;
+}
+
+/* PyErrExceptionMatches */
+#if CYTHON_FAST_THREAD_STATE
+static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) {
+ Py_ssize_t i, n;
+ n = PyTuple_GET_SIZE(tuple);
+#if PY_MAJOR_VERSION >= 3
+ for (i=0; i<n; i++) {
+ if (exc_type == PyTuple_GET_ITEM(tuple, i)) return 1;
+ }
+#endif
+ for (i=0; i<n; i++) {
+ if (__Pyx_PyErr_GivenExceptionMatches(exc_type, PyTuple_GET_ITEM(tuple, i))) return 1;
+ }
+ return 0;
+}
+static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err) {
+ PyObject *exc_type = tstate->curexc_type;
+ if (exc_type == err) return 1;
+ if (unlikely(!exc_type)) return 0;
+ if (unlikely(PyTuple_Check(err)))
+ return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err);
+ return __Pyx_PyErr_GivenExceptionMatches(exc_type, err);
+}
+#endif
+
+/* PyErrFetchRestore */
+#if CYTHON_FAST_THREAD_STATE
+static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) {
+ PyObject *tmp_type, *tmp_value, *tmp_tb;
+ tmp_type = tstate->curexc_type;
+ tmp_value = tstate->curexc_value;
+ tmp_tb = tstate->curexc_traceback;
+ tstate->curexc_type = type;
+ tstate->curexc_value = value;
+ tstate->curexc_traceback = tb;
+ Py_XDECREF(tmp_type);
+ Py_XDECREF(tmp_value);
+ Py_XDECREF(tmp_tb);
+}
+static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) {
+ *type = tstate->curexc_type;
+ *value = tstate->curexc_value;
+ *tb = tstate->curexc_traceback;
+ tstate->curexc_type = 0;
+ tstate->curexc_value = 0;
+ tstate->curexc_traceback = 0;
+}
+#endif
+
+/* GetAttr */
+static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) {
+#if CYTHON_USE_TYPE_SLOTS
+#if PY_MAJOR_VERSION >= 3
+ if (likely(PyUnicode_Check(n)))
+#else
+ if (likely(PyString_Check(n)))
+#endif
+ return __Pyx_PyObject_GetAttrStr(o, n);
+#endif
+ return PyObject_GetAttr(o, n);
+}
+
+/* GetAttr3 */
+static PyObject *__Pyx_GetAttr3Default(PyObject *d) {
+ __Pyx_PyThreadState_declare
+ __Pyx_PyThreadState_assign
+ if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError)))
+ return NULL;
+ __Pyx_PyErr_Clear();
+ Py_INCREF(d);
+ return d;
+}
+static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) {
+ PyObject *r = __Pyx_GetAttr(o, n);
+ return (likely(r)) ? r : __Pyx_GetAttr3Default(d);
+}
+
+/* PyDictVersioning */
+#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
+static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) {
+ PyObject *dict = Py_TYPE(obj)->tp_dict;
+ return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0;
+}
+static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) {
+ PyObject **dictptr = NULL;
+ Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset;
+ if (offset) {
+#if CYTHON_COMPILING_IN_CPYTHON
+ dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj);
+#else
+ dictptr = _PyObject_GetDictPtr(obj);
+#endif
+ }
+ return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0;
+}
+static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) {
+ PyObject *dict = Py_TYPE(obj)->tp_dict;
+ if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict)))
+ return 0;
+ return obj_dict_version == __Pyx_get_object_dict_version(obj);
+}
+#endif
+
+/* GetModuleGlobalName */
+#if CYTHON_USE_DICT_VERSIONS
+static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value)
+#else
+static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name)
+#endif
+{
+ PyObject *result;
+#if !CYTHON_AVOID_BORROWED_REFS
+#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1
+ result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash);
+ __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version)
+ if (likely(result)) {
+ return __Pyx_NewRef(result);
+ } else if (unlikely(PyErr_Occurred())) {
+ return NULL;
+ }
+#else
+ result = PyDict_GetItem(__pyx_d, name);
+ __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version)
+ if (likely(result)) {
+ return __Pyx_NewRef(result);
+ }
+#endif
+#else
+ result = PyObject_GetItem(__pyx_d, name);
+ __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version)
+ if (likely(result)) {
+ return __Pyx_NewRef(result);
+ }
+ PyErr_Clear();
+#endif
+ return __Pyx_GetBuiltinName(name);
+}
+
+/* GetTopmostException */
+#if CYTHON_USE_EXC_INFO_STACK
+static _PyErr_StackItem *
+__Pyx_PyErr_GetTopmostException(PyThreadState *tstate)
+{
+ _PyErr_StackItem *exc_info = tstate->exc_info;
+ while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) &&
+ exc_info->previous_item != NULL)
+ {
+ exc_info = exc_info->previous_item;
+ }
+ return exc_info;
+}
+#endif
+
+/* SaveResetException */
+#if CYTHON_FAST_THREAD_STATE
+static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) {
+ #if CYTHON_USE_EXC_INFO_STACK
+ _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate);
+ *type = exc_info->exc_type;
+ *value = exc_info->exc_value;
+ *tb = exc_info->exc_traceback;
+ #else
+ *type = tstate->exc_type;
+ *value = tstate->exc_value;
+ *tb = tstate->exc_traceback;
+ #endif
+ Py_XINCREF(*type);
+ Py_XINCREF(*value);
+ Py_XINCREF(*tb);
+}
+static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) {
+ PyObject *tmp_type, *tmp_value, *tmp_tb;
+ #if CYTHON_USE_EXC_INFO_STACK
+ _PyErr_StackItem *exc_info = tstate->exc_info;
+ tmp_type = exc_info->exc_type;
+ tmp_value = exc_info->exc_value;
+ tmp_tb = exc_info->exc_traceback;
+ exc_info->exc_type = type;
+ exc_info->exc_value = value;
+ exc_info->exc_traceback = tb;
+ #else
+ tmp_type = tstate->exc_type;
+ tmp_value = tstate->exc_value;
+ tmp_tb = tstate->exc_traceback;
+ tstate->exc_type = type;
+ tstate->exc_value = value;
+ tstate->exc_traceback = tb;
+ #endif
+ Py_XDECREF(tmp_type);
+ Py_XDECREF(tmp_value);
+ Py_XDECREF(tmp_tb);
+}
+#endif
+
+/* GetException */
+#if CYTHON_FAST_THREAD_STATE
+static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb)
+#else
+static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb)
+#endif
+{
+ PyObject *local_type, *local_value, *local_tb;
+#if CYTHON_FAST_THREAD_STATE
+ PyObject *tmp_type, *tmp_value, *tmp_tb;
+ local_type = tstate->curexc_type;
+ local_value = tstate->curexc_value;
+ local_tb = tstate->curexc_traceback;
+ tstate->curexc_type = 0;
+ tstate->curexc_value = 0;
+ tstate->curexc_traceback = 0;
+#else
+ PyErr_Fetch(&local_type, &local_value, &local_tb);
+#endif
+ PyErr_NormalizeException(&local_type, &local_value, &local_tb);
+#if CYTHON_FAST_THREAD_STATE
+ if (unlikely(tstate->curexc_type))
+#else
+ if (unlikely(PyErr_Occurred()))
+#endif
+ goto bad;
+ #if PY_MAJOR_VERSION >= 3
+ if (local_tb) {
+ if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0))
+ goto bad;
+ }
+ #endif
+ Py_XINCREF(local_tb);
+ Py_XINCREF(local_type);
+ Py_XINCREF(local_value);
+ *type = local_type;
+ *value = local_value;
+ *tb = local_tb;
+#if CYTHON_FAST_THREAD_STATE
+ #if CYTHON_USE_EXC_INFO_STACK
+ {
+ _PyErr_StackItem *exc_info = tstate->exc_info;
+ tmp_type = exc_info->exc_type;
+ tmp_value = exc_info->exc_value;
+ tmp_tb = exc_info->exc_traceback;
+ exc_info->exc_type = local_type;
+ exc_info->exc_value = local_value;
+ exc_info->exc_traceback = local_tb;
+ }
+ #else
+ tmp_type = tstate->exc_type;
+ tmp_value = tstate->exc_value;
+ tmp_tb = tstate->exc_traceback;
+ tstate->exc_type = local_type;
+ tstate->exc_value = local_value;
+ tstate->exc_traceback = local_tb;
+ #endif
+ Py_XDECREF(tmp_type);
+ Py_XDECREF(tmp_value);
+ Py_XDECREF(tmp_tb);
+#else
+ PyErr_SetExcInfo(local_type, local_value, local_tb);
+#endif
+ return 0;
+bad:
+ *type = 0;
+ *value = 0;
+ *tb = 0;
+ Py_XDECREF(local_type);
+ Py_XDECREF(local_value);
+ Py_XDECREF(local_tb);
+ return -1;
+}
+
+/* PyObjectCall */
+#if CYTHON_COMPILING_IN_CPYTHON
+static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) {
+ PyObject *result;
+ ternaryfunc call = func->ob_type->tp_call;
+ if (unlikely(!call))
+ return PyObject_Call(func, arg, kw);
+ if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object")))
+ return NULL;
+ result = (*call)(func, arg, kw);
+ Py_LeaveRecursiveCall();
+ if (unlikely(!result) && unlikely(!PyErr_Occurred())) {
+ PyErr_SetString(
+ PyExc_SystemError,
+ "NULL result without error in PyObject_Call");
+ }
+ return result;
+}
+#endif
+
+/* RaiseException */
+#if PY_MAJOR_VERSION < 3
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb,
+ CYTHON_UNUSED PyObject *cause) {
+ __Pyx_PyThreadState_declare
+ Py_XINCREF(type);
+ if (!value || value == Py_None)
+ value = NULL;
+ else
+ Py_INCREF(value);
+ if (!tb || tb == Py_None)
+ tb = NULL;
+ else {
+ Py_INCREF(tb);
+ if (!PyTraceBack_Check(tb)) {
+ PyErr_SetString(PyExc_TypeError,
+ "raise: arg 3 must be a traceback or None");
+ goto raise_error;
+ }
+ }
+ if (PyType_Check(type)) {
+#if CYTHON_COMPILING_IN_PYPY
+ if (!value) {
+ Py_INCREF(Py_None);
+ value = Py_None;
+ }
+#endif
+ PyErr_NormalizeException(&type, &value, &tb);
+ } else {
+ if (value) {
+ PyErr_SetString(PyExc_TypeError,
+ "instance exception may not have a separate value");
+ goto raise_error;
+ }
+ value = type;
+ type = (PyObject*) Py_TYPE(type);
+ Py_INCREF(type);
+ if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) {
+ PyErr_SetString(PyExc_TypeError,
+ "raise: exception class must be a subclass of BaseException");
+ goto raise_error;
+ }
+ }
+ __Pyx_PyThreadState_assign
+ __Pyx_ErrRestore(type, value, tb);
+ return;
+raise_error:
+ Py_XDECREF(value);
+ Py_XDECREF(type);
+ Py_XDECREF(tb);
+ return;
+}
+#else
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) {
+ PyObject* owned_instance = NULL;
+ if (tb == Py_None) {
+ tb = 0;
+ } else if (tb && !PyTraceBack_Check(tb)) {
+ PyErr_SetString(PyExc_TypeError,
+ "raise: arg 3 must be a traceback or None");
+ goto bad;
+ }
+ if (value == Py_None)
+ value = 0;
+ if (PyExceptionInstance_Check(type)) {
+ if (value) {
+ PyErr_SetString(PyExc_TypeError,
+ "instance exception may not have a separate value");
+ goto bad;
+ }
+ value = type;
+ type = (PyObject*) Py_TYPE(value);
+ } else if (PyExceptionClass_Check(type)) {
+ PyObject *instance_class = NULL;
+ if (value && PyExceptionInstance_Check(value)) {
+ instance_class = (PyObject*) Py_TYPE(value);
+ if (instance_class != type) {
+ int is_subclass = PyObject_IsSubclass(instance_class, type);
+ if (!is_subclass) {
+ instance_class = NULL;
+ } else if (unlikely(is_subclass == -1)) {
+ goto bad;
+ } else {
+ type = instance_class;
+ }
+ }
+ }
+ if (!instance_class) {
+ PyObject *args;
+ if (!value)
+ args = PyTuple_New(0);
+ else if (PyTuple_Check(value)) {
+ Py_INCREF(value);
+ args = value;
+ } else
+ args = PyTuple_Pack(1, value);
+ if (!args)
+ goto bad;
+ owned_instance = PyObject_Call(type, args, NULL);
+ Py_DECREF(args);
+ if (!owned_instance)
+ goto bad;
+ value = owned_instance;
+ if (!PyExceptionInstance_Check(value)) {
+ PyErr_Format(PyExc_TypeError,
+ "calling %R should have returned an instance of "
+ "BaseException, not %R",
+ type, Py_TYPE(value));
+ goto bad;
+ }
+ }
+ } else {
+ PyErr_SetString(PyExc_TypeError,
+ "raise: exception class must be a subclass of BaseException");
+ goto bad;
+ }
+ if (cause) {
+ PyObject *fixed_cause;
+ if (cause == Py_None) {
+ fixed_cause = NULL;
+ } else if (PyExceptionClass_Check(cause)) {
+ fixed_cause = PyObject_CallObject(cause, NULL);
+ if (fixed_cause == NULL)
+ goto bad;
+ } else if (PyExceptionInstance_Check(cause)) {
+ fixed_cause = cause;
+ Py_INCREF(fixed_cause);
+ } else {
+ PyErr_SetString(PyExc_TypeError,
+ "exception causes must derive from "
+ "BaseException");
+ goto bad;
+ }
+ PyException_SetCause(value, fixed_cause);
+ }
+ PyErr_SetObject(type, value);
+ if (tb) {
+#if CYTHON_COMPILING_IN_PYPY
+ PyObject *tmp_type, *tmp_value, *tmp_tb;
+ PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb);
+ Py_INCREF(tb);
+ PyErr_Restore(tmp_type, tmp_value, tb);
+ Py_XDECREF(tmp_tb);
+#else
+ PyThreadState *tstate = __Pyx_PyThreadState_Current;
+ PyObject* tmp_tb = tstate->curexc_traceback;
+ if (tb != tmp_tb) {
+ Py_INCREF(tb);
+ tstate->curexc_traceback = tb;
+ Py_XDECREF(tmp_tb);
+ }
+#endif
+ }
+bad:
+ Py_XDECREF(owned_instance);
+ return;
+}
+#endif
+
+/* PyFunctionFastCall */
+#if CYTHON_FAST_PYCALL
+static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na,
+ PyObject *globals) {
+ PyFrameObject *f;
+ PyThreadState *tstate = __Pyx_PyThreadState_Current;
+ PyObject **fastlocals;
+ Py_ssize_t i;
+ PyObject *result;
+ assert(globals != NULL);
+ /* XXX Perhaps we should create a specialized
+ PyFrame_New() that doesn't take locals, but does
+ take builtins without sanity checking them.
+ */
+ assert(tstate != NULL);
+ f = PyFrame_New(tstate, co, globals, NULL);
+ if (f == NULL) {
+ return NULL;
+ }
+ fastlocals = __Pyx_PyFrame_GetLocalsplus(f);
+ for (i = 0; i < na; i++) {
+ Py_INCREF(*args);
+ fastlocals[i] = *args++;
+ }
+ result = PyEval_EvalFrameEx(f,0);
+ ++tstate->recursion_depth;
+ Py_DECREF(f);
+ --tstate->recursion_depth;
+ return result;
+}
+#if 1 || PY_VERSION_HEX < 0x030600B1
+static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) {
+ PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func);
+ PyObject *globals = PyFunction_GET_GLOBALS(func);
+ PyObject *argdefs = PyFunction_GET_DEFAULTS(func);
+ PyObject *closure;
+#if PY_MAJOR_VERSION >= 3
+ PyObject *kwdefs;
+#endif
+ PyObject *kwtuple, **k;
+ PyObject **d;
+ Py_ssize_t nd;
+ Py_ssize_t nk;
+ PyObject *result;
+ assert(kwargs == NULL || PyDict_Check(kwargs));
+ nk = kwargs ? PyDict_Size(kwargs) : 0;
+ if (Py_EnterRecursiveCall((char*)" while calling a Python object")) {
+ return NULL;
+ }
+ if (
+#if PY_MAJOR_VERSION >= 3
+ co->co_kwonlyargcount == 0 &&
+#endif
+ likely(kwargs == NULL || nk == 0) &&
+ co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) {
+ if (argdefs == NULL && co->co_argcount == nargs) {
+ result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals);
+ goto done;
+ }
+ else if (nargs == 0 && argdefs != NULL
+ && co->co_argcount == Py_SIZE(argdefs)) {
+ /* function called with no arguments, but all parameters have
+ a default value: use default values as arguments .*/
+ args = &PyTuple_GET_ITEM(argdefs, 0);
+ result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals);
+ goto done;
+ }
+ }
+ if (kwargs != NULL) {
+ Py_ssize_t pos, i;
+ kwtuple = PyTuple_New(2 * nk);
+ if (kwtuple == NULL) {
+ result = NULL;
+ goto done;
+ }
+ k = &PyTuple_GET_ITEM(kwtuple, 0);
+ pos = i = 0;
+ while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) {
+ Py_INCREF(k[i]);
+ Py_INCREF(k[i+1]);
+ i += 2;
+ }
+ nk = i / 2;
+ }
+ else {
+ kwtuple = NULL;
+ k = NULL;
+ }
+ closure = PyFunction_GET_CLOSURE(func);
+#if PY_MAJOR_VERSION >= 3
+ kwdefs = PyFunction_GET_KW_DEFAULTS(func);
+#endif
+ if (argdefs != NULL) {
+ d = &PyTuple_GET_ITEM(argdefs, 0);
+ nd = Py_SIZE(argdefs);
+ }
+ else {
+ d = NULL;
+ nd = 0;
+ }
+#if PY_MAJOR_VERSION >= 3
+ result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL,
+ args, (int)nargs,
+ k, (int)nk,
+ d, (int)nd, kwdefs, closure);
+#else
+ result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL,
+ args, (int)nargs,
+ k, (int)nk,
+ d, (int)nd, closure);
+#endif
+ Py_XDECREF(kwtuple);
+done:
+ Py_LeaveRecursiveCall();
+ return result;
+}
+#endif
+#endif
+
+/* PyCFunctionFastCall */
+#if CYTHON_FAST_PYCCALL
+static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) {
+ PyCFunctionObject *func = (PyCFunctionObject*)func_obj;
+ PyCFunction meth = PyCFunction_GET_FUNCTION(func);
+ PyObject *self = PyCFunction_GET_SELF(func);
+ int flags = PyCFunction_GET_FLAGS(func);
+ assert(PyCFunction_Check(func));
+ assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS)));
+ assert(nargs >= 0);
+ assert(nargs == 0 || args != NULL);
+ /* _PyCFunction_FastCallDict() must not be called with an exception set,
+ because it may clear it (directly or indirectly) and so the
+ caller loses its exception */
+ assert(!PyErr_Occurred());
+ if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) {
+ return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL);
+ } else {
+ return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs);
+ }
+}
+#endif
+
+/* KeywordStringCheck */
+static int __Pyx_CheckKeywordStrings(
+ PyObject *kwdict,
+ const char* function_name,
+ int kw_allowed)
+{
+ PyObject* key = 0;
+ Py_ssize_t pos = 0;
+#if CYTHON_COMPILING_IN_PYPY
+ if (!kw_allowed && PyDict_Next(kwdict, &pos, &key, 0))
+ goto invalid_keyword;
+ return 1;
+#else
+ while (PyDict_Next(kwdict, &pos, &key, 0)) {
+ #if PY_MAJOR_VERSION < 3
+ if (unlikely(!PyString_Check(key)))
+ #endif
+ if (unlikely(!PyUnicode_Check(key)))
+ goto invalid_keyword_type;
+ }
+ if ((!kw_allowed) && unlikely(key))
+ goto invalid_keyword;
+ return 1;
+invalid_keyword_type:
+ PyErr_Format(PyExc_TypeError,
+ "%.200s() keywords must be strings", function_name);
+ return 0;
+#endif
+invalid_keyword:
+ PyErr_Format(PyExc_TypeError,
+ #if PY_MAJOR_VERSION < 3
+ "%.200s() got an unexpected keyword argument '%.200s'",
+ function_name, PyString_AsString(key));
+ #else
+ "%s() got an unexpected keyword argument '%U'",
+ function_name, key);
+ #endif
+ return 0;
+}
+
+/* GetItemInt */
+static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) {
+ PyObject *r;
+ if (!j) return NULL;
+ r = PyObject_GetItem(o, j);
+ Py_DECREF(j);
+ return r;
+}
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i,
+ CYTHON_NCP_UNUSED int wraparound,
+ CYTHON_NCP_UNUSED int boundscheck) {
+#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ Py_ssize_t wrapped_i = i;
+ if (wraparound & unlikely(i < 0)) {
+ wrapped_i += PyList_GET_SIZE(o);
+ }
+ if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) {
+ PyObject *r = PyList_GET_ITEM(o, wrapped_i);
+ Py_INCREF(r);
+ return r;
+ }
+ return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i));
+#else
+ return PySequence_GetItem(o, i);
+#endif
+}
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i,
+ CYTHON_NCP_UNUSED int wraparound,
+ CYTHON_NCP_UNUSED int boundscheck) {
+#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
+ Py_ssize_t wrapped_i = i;
+ if (wraparound & unlikely(i < 0)) {
+ wrapped_i += PyTuple_GET_SIZE(o);
+ }
+ if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) {
+ PyObject *r = PyTuple_GET_ITEM(o, wrapped_i);
+ Py_INCREF(r);
+ return r;
+ }
+ return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i));
+#else
+ return PySequence_GetItem(o, i);
+#endif
+}
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list,
+ CYTHON_NCP_UNUSED int wraparound,
+ CYTHON_NCP_UNUSED int boundscheck) {
+#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS
+ if (is_list || PyList_CheckExact(o)) {
+ Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o);
+ if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) {
+ PyObject *r = PyList_GET_ITEM(o, n);
+ Py_INCREF(r);
+ return r;
+ }
+ }
+ else if (PyTuple_CheckExact(o)) {
+ Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o);
+ if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) {
+ PyObject *r = PyTuple_GET_ITEM(o, n);
+ Py_INCREF(r);
+ return r;
+ }
+ } else {
+ PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence;
+ if (likely(m && m->sq_item)) {
+ if (wraparound && unlikely(i < 0) && likely(m->sq_length)) {
+ Py_ssize_t l = m->sq_length(o);
+ if (likely(l >= 0)) {
+ i += l;
+ } else {
+ if (!PyErr_ExceptionMatches(PyExc_OverflowError))
+ return NULL;
+ PyErr_Clear();
+ }
+ }
+ return m->sq_item(o, i);
+ }
+ }
+#else
+ if (is_list || PySequence_Check(o)) {
+ return PySequence_GetItem(o, i);
+ }
+#endif
+ return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i));
+}
+
+/* ObjectGetItem */
+#if CYTHON_USE_TYPE_SLOTS
+static PyObject *__Pyx_PyObject_GetIndex(PyObject *obj, PyObject* index) {
+ PyObject *runerr;
+ Py_ssize_t key_value;
+ PySequenceMethods *m = Py_TYPE(obj)->tp_as_sequence;
+ if (unlikely(!(m && m->sq_item))) {
+ PyErr_Format(PyExc_TypeError, "'%.200s' object is not subscriptable", Py_TYPE(obj)->tp_name);
+ return NULL;
+ }
+ key_value = __Pyx_PyIndex_AsSsize_t(index);
+ if (likely(key_value != -1 || !(runerr = PyErr_Occurred()))) {
+ return __Pyx_GetItemInt_Fast(obj, key_value, 0, 1, 1);
+ }
+ if (PyErr_GivenExceptionMatches(runerr, PyExc_OverflowError)) {
+ PyErr_Clear();
+ PyErr_Format(PyExc_IndexError, "cannot fit '%.200s' into an index-sized integer", Py_TYPE(index)->tp_name);
+ }
+ return NULL;
+}
+static PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key) {
+ PyMappingMethods *m = Py_TYPE(obj)->tp_as_mapping;
+ if (likely(m && m->mp_subscript)) {
+ return m->mp_subscript(obj, key);
+ }
+ return __Pyx_PyObject_GetIndex(obj, key);
+}
+#endif
+
+/* PyObjectCallMethO */
+#if CYTHON_COMPILING_IN_CPYTHON
+static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) {
+ PyObject *self, *result;
+ PyCFunction cfunc;
+ cfunc = PyCFunction_GET_FUNCTION(func);
+ self = PyCFunction_GET_SELF(func);
+ if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object")))
+ return NULL;
+ result = cfunc(self, arg);
+ Py_LeaveRecursiveCall();
+ if (unlikely(!result) && unlikely(!PyErr_Occurred())) {
+ PyErr_SetString(
+ PyExc_SystemError,
+ "NULL result without error in PyObject_Call");
+ }
+ return result;
+}
+#endif
+
+/* PyObjectCallNoArg */
+#if CYTHON_COMPILING_IN_CPYTHON
+static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) {
+#if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(func)) {
+ return __Pyx_PyFunction_FastCall(func, NULL, 0);
+ }
+#endif
+#ifdef __Pyx_CyFunction_USED
+ if (likely(PyCFunction_Check(func) || __Pyx_CyFunction_Check(func)))
+#else
+ if (likely(PyCFunction_Check(func)))
+#endif
+ {
+ if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) {
+ return __Pyx_PyObject_CallMethO(func, NULL);
+ }
+ }
+ return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL);
+}
+#endif
+
+/* PyObjectCallOneArg */
+#if CYTHON_COMPILING_IN_CPYTHON
+static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) {
+ PyObject *result;
+ PyObject *args = PyTuple_New(1);
+ if (unlikely(!args)) return NULL;
+ Py_INCREF(arg);
+ PyTuple_SET_ITEM(args, 0, arg);
+ result = __Pyx_PyObject_Call(func, args, NULL);
+ Py_DECREF(args);
+ return result;
+}
+static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) {
+#if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(func)) {
+ return __Pyx_PyFunction_FastCall(func, &arg, 1);
+ }
+#endif
+ if (likely(PyCFunction_Check(func))) {
+ if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) {
+ return __Pyx_PyObject_CallMethO(func, arg);
+#if CYTHON_FAST_PYCCALL
+ } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) {
+ return __Pyx_PyCFunction_FastCall(func, &arg, 1);
+#endif
+ }
+ }
+ return __Pyx__PyObject_CallOneArg(func, arg);
+}
+#else
+static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) {
+ PyObject *result;
+ PyObject *args = PyTuple_Pack(1, arg);
+ if (unlikely(!args)) return NULL;
+ result = __Pyx_PyObject_Call(func, args, NULL);
+ Py_DECREF(args);
+ return result;
+}
+#endif
+
+/* None */
+static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname) {
+ PyErr_Format(PyExc_UnboundLocalError, "local variable '%s' referenced before assignment", varname);
+}
+
+/* PyObjectSetAttrStr */
+#if CYTHON_USE_TYPE_SLOTS
+static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value) {
+ PyTypeObject* tp = Py_TYPE(obj);
+ if (likely(tp->tp_setattro))
+ return tp->tp_setattro(obj, attr_name, value);
+#if PY_MAJOR_VERSION < 3
+ if (likely(tp->tp_setattr))
+ return tp->tp_setattr(obj, PyString_AS_STRING(attr_name), value);
+#endif
+ return PyObject_SetAttr(obj, attr_name, value);
+}
+#endif
+
+/* PyObjectCall2Args */
+static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) {
+ PyObject *args, *result = NULL;
+ #if CYTHON_FAST_PYCALL
+ if (PyFunction_Check(function)) {
+ PyObject *args[2] = {arg1, arg2};
+ return __Pyx_PyFunction_FastCall(function, args, 2);
+ }
+ #endif
+ #if CYTHON_FAST_PYCCALL
+ if (__Pyx_PyFastCFunction_Check(function)) {
+ PyObject *args[2] = {arg1, arg2};
+ return __Pyx_PyCFunction_FastCall(function, args, 2);
+ }
+ #endif
+ args = PyTuple_New(2);
+ if (unlikely(!args)) goto done;
+ Py_INCREF(arg1);
+ PyTuple_SET_ITEM(args, 0, arg1);
+ Py_INCREF(arg2);
+ PyTuple_SET_ITEM(args, 1, arg2);
+ Py_INCREF(function);
+ result = __Pyx_PyObject_Call(function, args, NULL);
+ Py_DECREF(args);
+ Py_DECREF(function);
+done:
+ return result;
+}
+
+/* HasAttr */
+static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) {
+ PyObject *r;
+ if (unlikely(!__Pyx_PyBaseString_Check(n))) {
+ PyErr_SetString(PyExc_TypeError,
+ "hasattr(): attribute name must be string");
+ return -1;
+ }
+ r = __Pyx_GetAttr(o, n);
+ if (unlikely(!r)) {
+ PyErr_Clear();
+ return 0;
+ } else {
+ Py_DECREF(r);
+ return 1;
+ }
+}
+
+/* BytesEquals */
+static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) {
+#if CYTHON_COMPILING_IN_PYPY
+ return PyObject_RichCompareBool(s1, s2, equals);
+#else
+ if (s1 == s2) {
+ return (equals == Py_EQ);
+ } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) {
+ const char *ps1, *ps2;
+ Py_ssize_t length = PyBytes_GET_SIZE(s1);
+ if (length != PyBytes_GET_SIZE(s2))
+ return (equals == Py_NE);
+ ps1 = PyBytes_AS_STRING(s1);
+ ps2 = PyBytes_AS_STRING(s2);
+ if (ps1[0] != ps2[0]) {
+ return (equals == Py_NE);
+ } else if (length == 1) {
+ return (equals == Py_EQ);
+ } else {
+ int result;
+#if CYTHON_USE_UNICODE_INTERNALS
+ Py_hash_t hash1, hash2;
+ hash1 = ((PyBytesObject*)s1)->ob_shash;
+ hash2 = ((PyBytesObject*)s2)->ob_shash;
+ if (hash1 != hash2 && hash1 != -1 && hash2 != -1) {
+ return (equals == Py_NE);
+ }
+#endif
+ result = memcmp(ps1, ps2, (size_t)length);
+ return (equals == Py_EQ) ? (result == 0) : (result != 0);
+ }
+ } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) {
+ return (equals == Py_NE);
+ } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) {
+ return (equals == Py_NE);
+ } else {
+ int result;
+ PyObject* py_result = PyObject_RichCompare(s1, s2, equals);
+ if (!py_result)
+ return -1;
+ result = __Pyx_PyObject_IsTrue(py_result);
+ Py_DECREF(py_result);
+ return result;
+ }
+#endif
+}
+
+/* UnicodeEquals */
+static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) {
+#if CYTHON_COMPILING_IN_PYPY
+ return PyObject_RichCompareBool(s1, s2, equals);
+#else
+#if PY_MAJOR_VERSION < 3
+ PyObject* owned_ref = NULL;
+#endif
+ int s1_is_unicode, s2_is_unicode;
+ if (s1 == s2) {
+ goto return_eq;
+ }
+ s1_is_unicode = PyUnicode_CheckExact(s1);
+ s2_is_unicode = PyUnicode_CheckExact(s2);
+#if PY_MAJOR_VERSION < 3
+ if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) {
+ owned_ref = PyUnicode_FromObject(s2);
+ if (unlikely(!owned_ref))
+ return -1;
+ s2 = owned_ref;
+ s2_is_unicode = 1;
+ } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) {
+ owned_ref = PyUnicode_FromObject(s1);
+ if (unlikely(!owned_ref))
+ return -1;
+ s1 = owned_ref;
+ s1_is_unicode = 1;
+ } else if (((!s2_is_unicode) & (!s1_is_unicode))) {
+ return __Pyx_PyBytes_Equals(s1, s2, equals);
+ }
+#endif
+ if (s1_is_unicode & s2_is_unicode) {
+ Py_ssize_t length;
+ int kind;
+ void *data1, *data2;
+ if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0))
+ return -1;
+ length = __Pyx_PyUnicode_GET_LENGTH(s1);
+ if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) {
+ goto return_ne;
+ }
+#if CYTHON_USE_UNICODE_INTERNALS
+ {
+ Py_hash_t hash1, hash2;
+ #if CYTHON_PEP393_ENABLED
+ hash1 = ((PyASCIIObject*)s1)->hash;
+ hash2 = ((PyASCIIObject*)s2)->hash;
+ #else
+ hash1 = ((PyUnicodeObject*)s1)->hash;
+ hash2 = ((PyUnicodeObject*)s2)->hash;
+ #endif
+ if (hash1 != hash2 && hash1 != -1 && hash2 != -1) {
+ goto return_ne;
+ }
+ }
+#endif
+ kind = __Pyx_PyUnicode_KIND(s1);
+ if (kind != __Pyx_PyUnicode_KIND(s2)) {
+ goto return_ne;
+ }
+ data1 = __Pyx_PyUnicode_DATA(s1);
+ data2 = __Pyx_PyUnicode_DATA(s2);
+ if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) {
+ goto return_ne;
+ } else if (length == 1) {
+ goto return_eq;
+ } else {
+ int result = memcmp(data1, data2, (size_t)(length * kind));
+ #if PY_MAJOR_VERSION < 3
+ Py_XDECREF(owned_ref);
+ #endif
+ return (equals == Py_EQ) ? (result == 0) : (result != 0);
+ }
+ } else if ((s1 == Py_None) & s2_is_unicode) {
+ goto return_ne;
+ } else if ((s2 == Py_None) & s1_is_unicode) {
+ goto return_ne;
+ } else {
+ int result;
+ PyObject* py_result = PyObject_RichCompare(s1, s2, equals);
+ #if PY_MAJOR_VERSION < 3
+ Py_XDECREF(owned_ref);
+ #endif
+ if (!py_result)
+ return -1;
+ result = __Pyx_PyObject_IsTrue(py_result);
+ Py_DECREF(py_result);
+ return result;
+ }
+return_eq:
+ #if PY_MAJOR_VERSION < 3
+ Py_XDECREF(owned_ref);
+ #endif
+ return (equals == Py_EQ);
+return_ne:
+ #if PY_MAJOR_VERSION < 3
+ Py_XDECREF(owned_ref);
+ #endif
+ return (equals == Py_NE);
+#endif
+}
+
+/* RaiseTooManyValuesToUnpack */
+static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) {
+ PyErr_Format(PyExc_ValueError,
+ "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected);
+}
+
+/* RaiseNeedMoreValuesToUnpack */
+static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) {
+ PyErr_Format(PyExc_ValueError,
+ "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack",
+ index, (index == 1) ? "" : "s");
+}
+
+/* IterFinish */
+static CYTHON_INLINE int __Pyx_IterFinish(void) {
+#if CYTHON_FAST_THREAD_STATE
+ PyThreadState *tstate = __Pyx_PyThreadState_Current;
+ PyObject* exc_type = tstate->curexc_type;
+ if (unlikely(exc_type)) {
+ if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) {
+ PyObject *exc_value, *exc_tb;
+ exc_value = tstate->curexc_value;
+ exc_tb = tstate->curexc_traceback;
+ tstate->curexc_type = 0;
+ tstate->curexc_value = 0;
+ tstate->curexc_traceback = 0;
+ Py_DECREF(exc_type);
+ Py_XDECREF(exc_value);
+ Py_XDECREF(exc_tb);
+ return 0;
+ } else {
+ return -1;
+ }
+ }
+ return 0;
+#else
+ if (unlikely(PyErr_Occurred())) {
+ if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) {
+ PyErr_Clear();
+ return 0;
+ } else {
+ return -1;
+ }
+ }
+ return 0;
+#endif
+}
+
+/* UnpackItemEndCheck */
+static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) {
+ if (unlikely(retval)) {
+ Py_DECREF(retval);
+ __Pyx_RaiseTooManyValuesError(expected);
+ return -1;
+ } else {
+ return __Pyx_IterFinish();
+ }
+ return 0;
+}
+
+/* Import */
+static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) {
+ PyObject *empty_list = 0;
+ PyObject *module = 0;
+ PyObject *global_dict = 0;
+ PyObject *empty_dict = 0;
+ PyObject *list;
+ #if PY_MAJOR_VERSION < 3
+ PyObject *py_import;
+ py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import);
+ if (!py_import)
+ goto bad;
+ #endif
+ if (from_list)
+ list = from_list;
+ else {
+ empty_list = PyList_New(0);
+ if (!empty_list)
+ goto bad;
+ list = empty_list;
+ }
+ global_dict = PyModule_GetDict(__pyx_m);
+ if (!global_dict)
+ goto bad;
+ empty_dict = PyDict_New();
+ if (!empty_dict)
+ goto bad;
+ {
+ #if PY_MAJOR_VERSION >= 3
+ if (level == -1) {
+ if (strchr(__Pyx_MODULE_NAME, '.')) {
+ module = PyImport_ImportModuleLevelObject(
+ name, global_dict, empty_dict, list, 1);
+ if (!module) {
+ if (!PyErr_ExceptionMatches(PyExc_ImportError))
+ goto bad;
+ PyErr_Clear();
+ }
+ }
+ level = 0;
+ }
+ #endif
+ if (!module) {
+ #if PY_MAJOR_VERSION < 3
+ PyObject *py_level = PyInt_FromLong(level);
+ if (!py_level)
+ goto bad;
+ module = PyObject_CallFunctionObjArgs(py_import,
+ name, global_dict, empty_dict, list, py_level, (PyObject *)NULL);
+ Py_DECREF(py_level);
+ #else
+ module = PyImport_ImportModuleLevelObject(
+ name, global_dict, empty_dict, list, level);
+ #endif
+ }
+ }
+bad:
+ #if PY_MAJOR_VERSION < 3
+ Py_XDECREF(py_import);
+ #endif
+ Py_XDECREF(empty_list);
+ Py_XDECREF(empty_dict);
+ return module;
+}
+
+/* ImportFrom */
+static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) {
+ PyObject* value = __Pyx_PyObject_GetAttrStr(module, name);
+ if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) {
+ PyErr_Format(PyExc_ImportError,
+ #if PY_MAJOR_VERSION < 3
+ "cannot import name %.230s", PyString_AS_STRING(name));
+ #else
+ "cannot import name %S", name);
+ #endif
+ }
+ return value;
+}
+
+/* PyObject_GenericGetAttrNoDict */
+#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
+static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) {
+ PyErr_Format(PyExc_AttributeError,
+#if PY_MAJOR_VERSION >= 3
+ "'%.50s' object has no attribute '%U'",
+ tp->tp_name, attr_name);
+#else
+ "'%.50s' object has no attribute '%.400s'",
+ tp->tp_name, PyString_AS_STRING(attr_name));
+#endif
+ return NULL;
+}
+static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) {
+ PyObject *descr;
+ PyTypeObject *tp = Py_TYPE(obj);
+ if (unlikely(!PyString_Check(attr_name))) {
+ return PyObject_GenericGetAttr(obj, attr_name);
+ }
+ assert(!tp->tp_dictoffset);
+ descr = _PyType_Lookup(tp, attr_name);
+ if (unlikely(!descr)) {
+ return __Pyx_RaiseGenericGetAttributeError(tp, attr_name);
+ }
+ Py_INCREF(descr);
+ #if PY_MAJOR_VERSION < 3
+ if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS)))
+ #endif
+ {
+ descrgetfunc f = Py_TYPE(descr)->tp_descr_get;
+ if (unlikely(f)) {
+ PyObject *res = f(descr, obj, (PyObject *)tp);
+ Py_DECREF(descr);
+ return res;
+ }
+ }
+ return descr;
+}
+#endif
+
+/* PyObject_GenericGetAttr */
+#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
+static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) {
+ if (unlikely(Py_TYPE(obj)->tp_dictoffset)) {
+ return PyObject_GenericGetAttr(obj, attr_name);
+ }
+ return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name);
+}
+#endif
+
+/* SetupReduce */
+static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) {
+ int ret;
+ PyObject *name_attr;
+ name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name_2);
+ if (likely(name_attr)) {
+ ret = PyObject_RichCompareBool(name_attr, name, Py_EQ);
+ } else {
+ ret = -1;
+ }
+ if (unlikely(ret < 0)) {
+ PyErr_Clear();
+ ret = 0;
+ }
+ Py_XDECREF(name_attr);
+ return ret;
+}
+static int __Pyx_setup_reduce(PyObject* type_obj) {
+ int ret = 0;
+ PyObject *object_reduce = NULL;
+ PyObject *object_reduce_ex = NULL;
+ PyObject *reduce = NULL;
+ PyObject *reduce_ex = NULL;
+ PyObject *reduce_cython = NULL;
+ PyObject *setstate = NULL;
+ PyObject *setstate_cython = NULL;
+#if CYTHON_USE_PYTYPE_LOOKUP
+ if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD;
+#else
+ if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD;
+#endif
+#if CYTHON_USE_PYTYPE_LOOKUP
+ object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD;
+#else
+ object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD;
+#endif
+ reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD;
+ if (reduce_ex == object_reduce_ex) {
+#if CYTHON_USE_PYTYPE_LOOKUP
+ object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD;
+#else
+ object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD;
+#endif
+ reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD;
+ if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) {
+ reduce_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_cython); if (unlikely(!reduce_cython)) goto __PYX_BAD;
+ ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
+ ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
+ setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate);
+ if (!setstate) PyErr_Clear();
+ if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) {
+ setstate_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate_cython); if (unlikely(!setstate_cython)) goto __PYX_BAD;
+ ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
+ ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
+ }
+ PyType_Modified((PyTypeObject*)type_obj);
+ }
+ }
+ goto __PYX_GOOD;
+__PYX_BAD:
+ if (!PyErr_Occurred())
+ PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name);
+ ret = -1;
+__PYX_GOOD:
+#if !CYTHON_USE_PYTYPE_LOOKUP
+ Py_XDECREF(object_reduce);
+ Py_XDECREF(object_reduce_ex);
+#endif
+ Py_XDECREF(reduce);
+ Py_XDECREF(reduce_ex);
+ Py_XDECREF(reduce_cython);
+ Py_XDECREF(setstate);
+ Py_XDECREF(setstate_cython);
+ return ret;
+}
+
+/* SetVTable */
+static int __Pyx_SetVtable(PyObject *dict, void *vtable) {
+#if PY_VERSION_HEX >= 0x02070000
+ PyObject *ob = PyCapsule_New(vtable, 0, 0);
+#else
+ PyObject *ob = PyCObject_FromVoidPtr(vtable, 0);
+#endif
+ if (!ob)
+ goto bad;
+ if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0)
+ goto bad;
+ Py_DECREF(ob);
+ return 0;
+bad:
+ Py_XDECREF(ob);
+ return -1;
+}
+
+/* CLineInTraceback */
+#ifndef CYTHON_CLINE_IN_TRACEBACK
+static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) {
+ PyObject *use_cline;
+ PyObject *ptype, *pvalue, *ptraceback;
+#if CYTHON_COMPILING_IN_CPYTHON
+ PyObject **cython_runtime_dict;
+#endif
+ if (unlikely(!__pyx_cython_runtime)) {
+ return c_line;
+ }
+ __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback);
+#if CYTHON_COMPILING_IN_CPYTHON
+ cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime);
+ if (likely(cython_runtime_dict)) {
+ __PYX_PY_DICT_LOOKUP_IF_MODIFIED(
+ use_cline, *cython_runtime_dict,
+ __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback))
+ } else
+#endif
+ {
+ PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback);
+ if (use_cline_obj) {
+ use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True;
+ Py_DECREF(use_cline_obj);
+ } else {
+ PyErr_Clear();
+ use_cline = NULL;
+ }
+ }
+ if (!use_cline) {
+ c_line = 0;
+ PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False);
+ }
+ else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) {
+ c_line = 0;
+ }
+ __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback);
+ return c_line;
+}
+#endif
+
+/* CodeObjectCache */
+static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) {
+ int start = 0, mid = 0, end = count - 1;
+ if (end >= 0 && code_line > entries[end].code_line) {
+ return count;
+ }
+ while (start < end) {
+ mid = start + (end - start) / 2;
+ if (code_line < entries[mid].code_line) {
+ end = mid;
+ } else if (code_line > entries[mid].code_line) {
+ start = mid + 1;
+ } else {
+ return mid;
+ }
+ }
+ if (code_line <= entries[mid].code_line) {
+ return mid;
+ } else {
+ return mid + 1;
+ }
+}
+static PyCodeObject *__pyx_find_code_object(int code_line) {
+ PyCodeObject* code_object;
+ int pos;
+ if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) {
+ return NULL;
+ }
+ pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
+ if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) {
+ return NULL;
+ }
+ code_object = __pyx_code_cache.entries[pos].code_object;
+ Py_INCREF(code_object);
+ return code_object;
+}
+static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {
+ int pos, i;
+ __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries;
+ if (unlikely(!code_line)) {
+ return;
+ }
+ if (unlikely(!entries)) {
+ entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry));
+ if (likely(entries)) {
+ __pyx_code_cache.entries = entries;
+ __pyx_code_cache.max_count = 64;
+ __pyx_code_cache.count = 1;
+ entries[0].code_line = code_line;
+ entries[0].code_object = code_object;
+ Py_INCREF(code_object);
+ }
+ return;
+ }
+ pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
+ if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) {
+ PyCodeObject* tmp = entries[pos].code_object;
+ entries[pos].code_object = code_object;
+ Py_DECREF(tmp);
+ return;
+ }
+ if (__pyx_code_cache.count == __pyx_code_cache.max_count) {
+ int new_max = __pyx_code_cache.max_count + 64;
+ entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc(
+ __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry));
+ if (unlikely(!entries)) {
+ return;
+ }
+ __pyx_code_cache.entries = entries;
+ __pyx_code_cache.max_count = new_max;
+ }
+ for (i=__pyx_code_cache.count; i>pos; i--) {
+ entries[i] = entries[i-1];
+ }
+ entries[pos].code_line = code_line;
+ entries[pos].code_object = code_object;
+ __pyx_code_cache.count++;
+ Py_INCREF(code_object);
+}
+
+/* AddTraceback */
+#include "compile.h"
+#include "frameobject.h"
+#include "traceback.h"
+static PyCodeObject* __Pyx_CreateCodeObjectForTraceback(
+ const char *funcname, int c_line,
+ int py_line, const char *filename) {
+ PyCodeObject *py_code = 0;
+ PyObject *py_srcfile = 0;
+ PyObject *py_funcname = 0;
+ #if PY_MAJOR_VERSION < 3
+ py_srcfile = PyString_FromString(filename);
+ #else
+ py_srcfile = PyUnicode_FromString(filename);
+ #endif
+ if (!py_srcfile) goto bad;
+ if (c_line) {
+ #if PY_MAJOR_VERSION < 3
+ py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
+ #else
+ py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
+ #endif
+ }
+ else {
+ #if PY_MAJOR_VERSION < 3
+ py_funcname = PyString_FromString(funcname);
+ #else
+ py_funcname = PyUnicode_FromString(funcname);
+ #endif
+ }
+ if (!py_funcname) goto bad;
+ py_code = __Pyx_PyCode_New(
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ __pyx_empty_bytes, /*PyObject *code,*/
+ __pyx_empty_tuple, /*PyObject *consts,*/
+ __pyx_empty_tuple, /*PyObject *names,*/
+ __pyx_empty_tuple, /*PyObject *varnames,*/
+ __pyx_empty_tuple, /*PyObject *freevars,*/
+ __pyx_empty_tuple, /*PyObject *cellvars,*/
+ py_srcfile, /*PyObject *filename,*/
+ py_funcname, /*PyObject *name,*/
+ py_line,
+ __pyx_empty_bytes /*PyObject *lnotab*/
+ );
+ Py_DECREF(py_srcfile);
+ Py_DECREF(py_funcname);
+ return py_code;
+bad:
+ Py_XDECREF(py_srcfile);
+ Py_XDECREF(py_funcname);
+ return NULL;
+}
+static void __Pyx_AddTraceback(const char *funcname, int c_line,
+ int py_line, const char *filename) {
+ PyCodeObject *py_code = 0;
+ PyFrameObject *py_frame = 0;
+ PyThreadState *tstate = __Pyx_PyThreadState_Current;
+ if (c_line) {
+ c_line = __Pyx_CLineForTraceback(tstate, c_line);
+ }
+ py_code = __pyx_find_code_object(c_line ? -c_line : py_line);
+ if (!py_code) {
+ py_code = __Pyx_CreateCodeObjectForTraceback(
+ funcname, c_line, py_line, filename);
+ if (!py_code) goto bad;
+ __pyx_insert_code_object(c_line ? -c_line : py_line, py_code);
+ }
+ py_frame = PyFrame_New(
+ tstate, /*PyThreadState *tstate,*/
+ py_code, /*PyCodeObject *code,*/
+ __pyx_d, /*PyObject *globals,*/
+ 0 /*PyObject *locals*/
+ );
+ if (!py_frame) goto bad;
+ __Pyx_PyFrame_SetLineNumber(py_frame, py_line);
+ PyTraceBack_Here(py_frame);
+bad:
+ Py_XDECREF(py_code);
+ Py_XDECREF(py_frame);
+}
+
+/* CIntFromPyVerify */
+#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\
+ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0)
+#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\
+ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1)
+#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\
+ {\
+ func_type value = func_value;\
+ if (sizeof(target_type) < sizeof(func_type)) {\
+ if (unlikely(value != (func_type) (target_type) value)) {\
+ func_type zero = 0;\
+ if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\
+ return (target_type) -1;\
+ if (is_unsigned && unlikely(value < zero))\
+ goto raise_neg_overflow;\
+ else\
+ goto raise_overflow;\
+ }\
+ }\
+ return (target_type) value;\
+ }
+
+/* CIntToPy */
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) {
+ const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0;
+ const int is_unsigned = neg_one > const_zero;
+ if (is_unsigned) {
+ if (sizeof(int) < sizeof(long)) {
+ return PyInt_FromLong((long) value);
+ } else if (sizeof(int) <= sizeof(unsigned long)) {
+ return PyLong_FromUnsignedLong((unsigned long) value);
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) {
+ return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
+#endif
+ }
+ } else {
+ if (sizeof(int) <= sizeof(long)) {
+ return PyInt_FromLong((long) value);
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) {
+ return PyLong_FromLongLong((PY_LONG_LONG) value);
+#endif
+ }
+ }
+ {
+ int one = 1; int little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&value;
+ return _PyLong_FromByteArray(bytes, sizeof(int),
+ little, !is_unsigned);
+ }
+}
+
+/* CIntToPy */
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {
+ const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0;
+ const int is_unsigned = neg_one > const_zero;
+ if (is_unsigned) {
+ if (sizeof(long) < sizeof(long)) {
+ return PyInt_FromLong((long) value);
+ } else if (sizeof(long) <= sizeof(unsigned long)) {
+ return PyLong_FromUnsignedLong((unsigned long) value);
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
+ return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
+#endif
+ }
+ } else {
+ if (sizeof(long) <= sizeof(long)) {
+ return PyInt_FromLong((long) value);
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
+ return PyLong_FromLongLong((PY_LONG_LONG) value);
+#endif
+ }
+ }
+ {
+ int one = 1; int little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&value;
+ return _PyLong_FromByteArray(bytes, sizeof(long),
+ little, !is_unsigned);
+ }
+}
+
+/* CIntFromPy */
+static CYTHON_INLINE size_t __Pyx_PyInt_As_size_t(PyObject *x) {
+ const size_t neg_one = (size_t) ((size_t) 0 - (size_t) 1), const_zero = (size_t) 0;
+ const int is_unsigned = neg_one > const_zero;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_Check(x))) {
+ if (sizeof(size_t) < sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT(size_t, long, PyInt_AS_LONG(x))
+ } else {
+ long val = PyInt_AS_LONG(x);
+ if (is_unsigned && unlikely(val < 0)) {
+ goto raise_neg_overflow;
+ }
+ return (size_t) val;
+ }
+ } else
+#endif
+ if (likely(PyLong_Check(x))) {
+ if (is_unsigned) {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (size_t) 0;
+ case 1: __PYX_VERIFY_RETURN_INT(size_t, digit, digits[0])
+ case 2:
+ if (8 * sizeof(size_t) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(size_t) >= 2 * PyLong_SHIFT) {
+ return (size_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(size_t) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(size_t) >= 3 * PyLong_SHIFT) {
+ return (size_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(size_t) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(size_t) >= 4 * PyLong_SHIFT) {
+ return (size_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ }
+ break;
+ }
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON
+ if (unlikely(Py_SIZE(x) < 0)) {
+ goto raise_neg_overflow;
+ }
+#else
+ {
+ int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
+ if (unlikely(result < 0))
+ return (size_t) -1;
+ if (unlikely(result == 1))
+ goto raise_neg_overflow;
+ }
+#endif
+ if (sizeof(size_t) <= sizeof(unsigned long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(size_t, unsigned long, PyLong_AsUnsignedLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(size_t) <= sizeof(unsigned PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(size_t, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
+#endif
+ }
+ } else {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (size_t) 0;
+ case -1: __PYX_VERIFY_RETURN_INT(size_t, sdigit, (sdigit) (-(sdigit)digits[0]))
+ case 1: __PYX_VERIFY_RETURN_INT(size_t, digit, +digits[0])
+ case -2:
+ if (8 * sizeof(size_t) - 1 > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) {
+ return (size_t) (((size_t)-1)*(((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])));
+ }
+ }
+ break;
+ case 2:
+ if (8 * sizeof(size_t) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) {
+ return (size_t) ((((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])));
+ }
+ }
+ break;
+ case -3:
+ if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) {
+ return (size_t) (((size_t)-1)*(((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(size_t) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) {
+ return (size_t) ((((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])));
+ }
+ }
+ break;
+ case -4:
+ if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(size_t) - 1 > 4 * PyLong_SHIFT) {
+ return (size_t) (((size_t)-1)*(((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(size_t) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(size_t) - 1 > 4 * PyLong_SHIFT) {
+ return (size_t) ((((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])));
+ }
+ }
+ break;
+ }
+#endif
+ if (sizeof(size_t) <= sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(size_t, long, PyLong_AsLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(size_t) <= sizeof(PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(size_t, PY_LONG_LONG, PyLong_AsLongLong(x))
+#endif
+ }
+ }
+ {
+#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
+ PyErr_SetString(PyExc_RuntimeError,
+ "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
+#else
+ size_t val;
+ PyObject *v = __Pyx_PyNumber_IntOrLong(x);
+ #if PY_MAJOR_VERSION < 3
+ if (likely(v) && !PyLong_Check(v)) {
+ PyObject *tmp = v;
+ v = PyNumber_Long(tmp);
+ Py_DECREF(tmp);
+ }
+ #endif
+ if (likely(v)) {
+ int one = 1; int is_little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&val;
+ int ret = _PyLong_AsByteArray((PyLongObject *)v,
+ bytes, sizeof(val),
+ is_little, !is_unsigned);
+ Py_DECREF(v);
+ if (likely(!ret))
+ return val;
+ }
+#endif
+ return (size_t) -1;
+ }
+ } else {
+ size_t val;
+ PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
+ if (!tmp) return (size_t) -1;
+ val = __Pyx_PyInt_As_size_t(tmp);
+ Py_DECREF(tmp);
+ return val;
+ }
+raise_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "value too large to convert to size_t");
+ return (size_t) -1;
+raise_neg_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "can't convert negative value to size_t");
+ return (size_t) -1;
+}
+
+/* CIntFromPy */
+static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) {
+ const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0;
+ const int is_unsigned = neg_one > const_zero;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_Check(x))) {
+ if (sizeof(long) < sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x))
+ } else {
+ long val = PyInt_AS_LONG(x);
+ if (is_unsigned && unlikely(val < 0)) {
+ goto raise_neg_overflow;
+ }
+ return (long) val;
+ }
+ } else
+#endif
+ if (likely(PyLong_Check(x))) {
+ if (is_unsigned) {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (long) 0;
+ case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0])
+ case 2:
+ if (8 * sizeof(long) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) {
+ return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(long) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) {
+ return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(long) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) {
+ return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
+ }
+ }
+ break;
+ }
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON
+ if (unlikely(Py_SIZE(x) < 0)) {
+ goto raise_neg_overflow;
+ }
+#else
+ {
+ int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
+ if (unlikely(result < 0))
+ return (long) -1;
+ if (unlikely(result == 1))
+ goto raise_neg_overflow;
+ }
+#endif
+ if (sizeof(long) <= sizeof(unsigned long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
+#endif
+ }
+ } else {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (long) 0;
+ case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0]))
+ case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0])
+ case -2:
+ if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
+ return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case 2:
+ if (8 * sizeof(long) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
+ return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case -3:
+ if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
+ return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(long) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
+ return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case -4:
+ if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
+ return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(long) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
+ return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ }
+#endif
+ if (sizeof(long) <= sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x))
+#endif
+ }
+ }
+ {
+#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
+ PyErr_SetString(PyExc_RuntimeError,
+ "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
+#else
+ long val;
+ PyObject *v = __Pyx_PyNumber_IntOrLong(x);
+ #if PY_MAJOR_VERSION < 3
+ if (likely(v) && !PyLong_Check(v)) {
+ PyObject *tmp = v;
+ v = PyNumber_Long(tmp);
+ Py_DECREF(tmp);
+ }
+ #endif
+ if (likely(v)) {
+ int one = 1; int is_little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&val;
+ int ret = _PyLong_AsByteArray((PyLongObject *)v,
+ bytes, sizeof(val),
+ is_little, !is_unsigned);
+ Py_DECREF(v);
+ if (likely(!ret))
+ return val;
+ }
+#endif
+ return (long) -1;
+ }
+ } else {
+ long val;
+ PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
+ if (!tmp) return (long) -1;
+ val = __Pyx_PyInt_As_long(tmp);
+ Py_DECREF(tmp);
+ return val;
+ }
+raise_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "value too large to convert to long");
+ return (long) -1;
+raise_neg_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "can't convert negative value to long");
+ return (long) -1;
+}
+
+/* CIntFromPy */
+static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) {
+ const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0;
+ const int is_unsigned = neg_one > const_zero;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_Check(x))) {
+ if (sizeof(int) < sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x))
+ } else {
+ long val = PyInt_AS_LONG(x);
+ if (is_unsigned && unlikely(val < 0)) {
+ goto raise_neg_overflow;
+ }
+ return (int) val;
+ }
+ } else
+#endif
+ if (likely(PyLong_Check(x))) {
+ if (is_unsigned) {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (int) 0;
+ case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0])
+ case 2:
+ if (8 * sizeof(int) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) {
+ return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(int) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) {
+ return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(int) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) {
+ return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
+ }
+ }
+ break;
+ }
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON
+ if (unlikely(Py_SIZE(x) < 0)) {
+ goto raise_neg_overflow;
+ }
+#else
+ {
+ int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
+ if (unlikely(result < 0))
+ return (int) -1;
+ if (unlikely(result == 1))
+ goto raise_neg_overflow;
+ }
+#endif
+ if (sizeof(int) <= sizeof(unsigned long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
+#endif
+ }
+ } else {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (int) 0;
+ case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0]))
+ case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0])
+ case -2:
+ if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
+ return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case 2:
+ if (8 * sizeof(int) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
+ return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case -3:
+ if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
+ return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(int) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
+ return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case -4:
+ if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {
+ return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(int) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {
+ return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ }
+#endif
+ if (sizeof(int) <= sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x))
+#endif
+ }
+ }
+ {
+#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
+ PyErr_SetString(PyExc_RuntimeError,
+ "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
+#else
+ int val;
+ PyObject *v = __Pyx_PyNumber_IntOrLong(x);
+ #if PY_MAJOR_VERSION < 3
+ if (likely(v) && !PyLong_Check(v)) {
+ PyObject *tmp = v;
+ v = PyNumber_Long(tmp);
+ Py_DECREF(tmp);
+ }
+ #endif
+ if (likely(v)) {
+ int one = 1; int is_little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&val;
+ int ret = _PyLong_AsByteArray((PyLongObject *)v,
+ bytes, sizeof(val),
+ is_little, !is_unsigned);
+ Py_DECREF(v);
+ if (likely(!ret))
+ return val;
+ }
+#endif
+ return (int) -1;
+ }
+ } else {
+ int val;
+ PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
+ if (!tmp) return (int) -1;
+ val = __Pyx_PyInt_As_int(tmp);
+ Py_DECREF(tmp);
+ return val;
+ }
+raise_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "value too large to convert to int");
+ return (int) -1;
+raise_neg_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "can't convert negative value to int");
+ return (int) -1;
+}
+
+/* FastTypeChecks */
+#if CYTHON_COMPILING_IN_CPYTHON
+static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) {
+ while (a) {
+ a = a->tp_base;
+ if (a == b)
+ return 1;
+ }
+ return b == &PyBaseObject_Type;
+}
+static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) {
+ PyObject *mro;
+ if (a == b) return 1;
+ mro = a->tp_mro;
+ if (likely(mro)) {
+ Py_ssize_t i, n;
+ n = PyTuple_GET_SIZE(mro);
+ for (i = 0; i < n; i++) {
+ if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b)
+ return 1;
+ }
+ return 0;
+ }
+ return __Pyx_InBases(a, b);
+}
+#if PY_MAJOR_VERSION == 2
+static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) {
+ PyObject *exception, *value, *tb;
+ int res;
+ __Pyx_PyThreadState_declare
+ __Pyx_PyThreadState_assign
+ __Pyx_ErrFetch(&exception, &value, &tb);
+ res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0;
+ if (unlikely(res == -1)) {
+ PyErr_WriteUnraisable(err);
+ res = 0;
+ }
+ if (!res) {
+ res = PyObject_IsSubclass(err, exc_type2);
+ if (unlikely(res == -1)) {
+ PyErr_WriteUnraisable(err);
+ res = 0;
+ }
+ }
+ __Pyx_ErrRestore(exception, value, tb);
+ return res;
+}
+#else
+static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) {
+ int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0;
+ if (!res) {
+ res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2);
+ }
+ return res;
+}
+#endif
+static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) {
+ Py_ssize_t i, n;
+ assert(PyExceptionClass_Check(exc_type));
+ n = PyTuple_GET_SIZE(tuple);
+#if PY_MAJOR_VERSION >= 3
+ for (i=0; i<n; i++) {
+ if (exc_type == PyTuple_GET_ITEM(tuple, i)) return 1;
+ }
+#endif
+ for (i=0; i<n; i++) {
+ PyObject *t = PyTuple_GET_ITEM(tuple, i);
+ #if PY_MAJOR_VERSION < 3
+ if (likely(exc_type == t)) return 1;
+ #endif
+ if (likely(PyExceptionClass_Check(t))) {
+ if (__Pyx_inner_PyErr_GivenExceptionMatches2(exc_type, NULL, t)) return 1;
+ } else {
+ }
+ }
+ return 0;
+}
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) {
+ if (likely(err == exc_type)) return 1;
+ if (likely(PyExceptionClass_Check(err))) {
+ if (likely(PyExceptionClass_Check(exc_type))) {
+ return __Pyx_inner_PyErr_GivenExceptionMatches2(err, NULL, exc_type);
+ } else if (likely(PyTuple_Check(exc_type))) {
+ return __Pyx_PyErr_GivenExceptionMatchesTuple(err, exc_type);
+ } else {
+ }
+ }
+ return PyErr_GivenExceptionMatches(err, exc_type);
+}
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) {
+ assert(PyExceptionClass_Check(exc_type1));
+ assert(PyExceptionClass_Check(exc_type2));
+ if (likely(err == exc_type1 || err == exc_type2)) return 1;
+ if (likely(PyExceptionClass_Check(err))) {
+ return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2);
+ }
+ return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2));
+}
+#endif
+
+/* CheckBinaryVersion */
+static int __Pyx_check_binary_version(void) {
+ char ctversion[4], rtversion[4];
+ PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION);
+ PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion());
+ if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) {
+ char message[200];
+ PyOS_snprintf(message, sizeof(message),
+ "compiletime version %s of module '%.100s' "
+ "does not match runtime version %s",
+ ctversion, __Pyx_MODULE_NAME, rtversion);
+ return PyErr_WarnEx(NULL, message, 1);
+ }
+ return 0;
+}
+
+/* InitStrings */
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) {
+ while (t->p) {
+ #if PY_MAJOR_VERSION < 3
+ if (t->is_unicode) {
+ *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL);
+ } else if (t->intern) {
+ *t->p = PyString_InternFromString(t->s);
+ } else {
+ *t->p = PyString_FromStringAndSize(t->s, t->n - 1);
+ }
+ #else
+ if (t->is_unicode | t->is_str) {
+ if (t->intern) {
+ *t->p = PyUnicode_InternFromString(t->s);
+ } else if (t->encoding) {
+ *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL);
+ } else {
+ *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1);
+ }
+ } else {
+ *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1);
+ }
+ #endif
+ if (!*t->p)
+ return -1;
+ if (PyObject_Hash(*t->p) == -1)
+ return -1;
+ ++t;
+ }
+ return 0;
+}
+
+static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) {
+ return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str));
+}
+static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) {
+ Py_ssize_t ignore;
+ return __Pyx_PyObject_AsStringAndSize(o, &ignore);
+}
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
+#if !CYTHON_PEP393_ENABLED
+static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
+ char* defenc_c;
+ PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL);
+ if (!defenc) return NULL;
+ defenc_c = PyBytes_AS_STRING(defenc);
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+ {
+ char* end = defenc_c + PyBytes_GET_SIZE(defenc);
+ char* c;
+ for (c = defenc_c; c < end; c++) {
+ if ((unsigned char) (*c) >= 128) {
+ PyUnicode_AsASCIIString(o);
+ return NULL;
+ }
+ }
+ }
+#endif
+ *length = PyBytes_GET_SIZE(defenc);
+ return defenc_c;
+}
+#else
+static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
+ if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL;
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+ if (likely(PyUnicode_IS_ASCII(o))) {
+ *length = PyUnicode_GET_LENGTH(o);
+ return PyUnicode_AsUTF8(o);
+ } else {
+ PyUnicode_AsASCIIString(o);
+ return NULL;
+ }
+#else
+ return PyUnicode_AsUTF8AndSize(o, length);
+#endif
+}
+#endif
+#endif
+static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
+ if (
+#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+ __Pyx_sys_getdefaultencoding_not_ascii &&
+#endif
+ PyUnicode_Check(o)) {
+ return __Pyx_PyUnicode_AsStringAndSize(o, length);
+ } else
+#endif
+#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE))
+ if (PyByteArray_Check(o)) {
+ *length = PyByteArray_GET_SIZE(o);
+ return PyByteArray_AS_STRING(o);
+ } else
+#endif
+ {
+ char* result;
+ int r = PyBytes_AsStringAndSize(o, &result, length);
+ if (unlikely(r < 0)) {
+ return NULL;
+ } else {
+ return result;
+ }
+ }
+}
+static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) {
+ int is_true = x == Py_True;
+ if (is_true | (x == Py_False) | (x == Py_None)) return is_true;
+ else return PyObject_IsTrue(x);
+}
+static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) {
+ int retval;
+ if (unlikely(!x)) return -1;
+ retval = __Pyx_PyObject_IsTrue(x);
+ Py_DECREF(x);
+ return retval;
+}
+static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) {
+#if PY_MAJOR_VERSION >= 3
+ if (PyLong_Check(result)) {
+ if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1,
+ "__int__ returned non-int (type %.200s). "
+ "The ability to return an instance of a strict subclass of int "
+ "is deprecated, and may be removed in a future version of Python.",
+ Py_TYPE(result)->tp_name)) {
+ Py_DECREF(result);
+ return NULL;
+ }
+ return result;
+ }
+#endif
+ PyErr_Format(PyExc_TypeError,
+ "__%.4s__ returned non-%.4s (type %.200s)",
+ type_name, type_name, Py_TYPE(result)->tp_name);
+ Py_DECREF(result);
+ return NULL;
+}
+static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) {
+#if CYTHON_USE_TYPE_SLOTS
+ PyNumberMethods *m;
+#endif
+ const char *name = NULL;
+ PyObject *res = NULL;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_Check(x) || PyLong_Check(x)))
+#else
+ if (likely(PyLong_Check(x)))
+#endif
+ return __Pyx_NewRef(x);
+#if CYTHON_USE_TYPE_SLOTS
+ m = Py_TYPE(x)->tp_as_number;
+ #if PY_MAJOR_VERSION < 3
+ if (m && m->nb_int) {
+ name = "int";
+ res = m->nb_int(x);
+ }
+ else if (m && m->nb_long) {
+ name = "long";
+ res = m->nb_long(x);
+ }
+ #else
+ if (likely(m && m->nb_int)) {
+ name = "int";
+ res = m->nb_int(x);
+ }
+ #endif
+#else
+ if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) {
+ res = PyNumber_Int(x);
+ }
+#endif
+ if (likely(res)) {
+#if PY_MAJOR_VERSION < 3
+ if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) {
+#else
+ if (unlikely(!PyLong_CheckExact(res))) {
+#endif
+ return __Pyx_PyNumber_IntOrLongWrongResultType(res, name);
+ }
+ }
+ else if (!PyErr_Occurred()) {
+ PyErr_SetString(PyExc_TypeError,
+ "an integer is required");
+ }
+ return res;
+}
+static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) {
+ Py_ssize_t ival;
+ PyObject *x;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_CheckExact(b))) {
+ if (sizeof(Py_ssize_t) >= sizeof(long))
+ return PyInt_AS_LONG(b);
+ else
+ return PyInt_AsSsize_t(b);
+ }
+#endif
+ if (likely(PyLong_CheckExact(b))) {
+ #if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)b)->ob_digit;
+ const Py_ssize_t size = Py_SIZE(b);
+ if (likely(__Pyx_sst_abs(size) <= 1)) {
+ ival = likely(size) ? digits[0] : 0;
+ if (size == -1) ival = -ival;
+ return ival;
+ } else {
+ switch (size) {
+ case 2:
+ if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {
+ return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case -2:
+ if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {
+ return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case 3:
+ if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {
+ return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case -3:
+ if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {
+ return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case 4:
+ if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {
+ return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case -4:
+ if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {
+ return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ }
+ }
+ #endif
+ return PyLong_AsSsize_t(b);
+ }
+ x = PyNumber_Index(b);
+ if (!x) return -1;
+ ival = PyInt_AsSsize_t(x);
+ Py_DECREF(x);
+ return ival;
+}
+static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) {
+ return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False);
+}
+static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) {
+ return PyInt_FromSize_t(ival);
+}
+
+
+#endif /* Py_PYTHON_H */
diff --git a/third_party/python/PyYAML/ext/_yaml.h b/third_party/python/PyYAML/ext/_yaml.h
new file mode 100644
index 0000000000..21fd6a991b
--- /dev/null
+++ b/third_party/python/PyYAML/ext/_yaml.h
@@ -0,0 +1,23 @@
+
+#include <yaml.h>
+
+#if PY_MAJOR_VERSION < 3
+
+#define PyUnicode_FromString(s) PyUnicode_DecodeUTF8((s), strlen(s), "strict")
+
+#else
+
+#define PyString_CheckExact PyBytes_CheckExact
+#define PyString_AS_STRING PyBytes_AS_STRING
+#define PyString_GET_SIZE PyBytes_GET_SIZE
+#define PyString_FromStringAndSize PyBytes_FromStringAndSize
+
+#endif
+
+#ifdef _MSC_VER /* MS Visual C++ 6.0 */
+#if _MSC_VER == 1200
+
+#define PyLong_FromUnsignedLongLong(z) PyInt_FromLong(i)
+
+#endif
+#endif
diff --git a/third_party/python/PyYAML/ext/_yaml.pxd b/third_party/python/PyYAML/ext/_yaml.pxd
new file mode 100644
index 0000000000..7937c9db51
--- /dev/null
+++ b/third_party/python/PyYAML/ext/_yaml.pxd
@@ -0,0 +1,251 @@
+
+cdef extern from "_yaml.h":
+
+ void malloc(int l)
+ void memcpy(char *d, char *s, int l)
+ int strlen(char *s)
+ int PyString_CheckExact(object o)
+ int PyUnicode_CheckExact(object o)
+ char *PyString_AS_STRING(object o)
+ int PyString_GET_SIZE(object o)
+ object PyString_FromStringAndSize(char *v, int l)
+ object PyUnicode_FromString(char *u)
+ object PyUnicode_DecodeUTF8(char *u, int s, char *e)
+ object PyUnicode_AsUTF8String(object o)
+ int PY_MAJOR_VERSION
+
+ ctypedef enum:
+ SIZEOF_VOID_P
+ ctypedef enum yaml_encoding_t:
+ YAML_ANY_ENCODING
+ YAML_UTF8_ENCODING
+ YAML_UTF16LE_ENCODING
+ YAML_UTF16BE_ENCODING
+ ctypedef enum yaml_break_t:
+ YAML_ANY_BREAK
+ YAML_CR_BREAK
+ YAML_LN_BREAK
+ YAML_CRLN_BREAK
+ ctypedef enum yaml_error_type_t:
+ YAML_NO_ERROR
+ YAML_MEMORY_ERROR
+ YAML_READER_ERROR
+ YAML_SCANNER_ERROR
+ YAML_PARSER_ERROR
+ YAML_WRITER_ERROR
+ YAML_EMITTER_ERROR
+ ctypedef enum yaml_scalar_style_t:
+ YAML_ANY_SCALAR_STYLE
+ YAML_PLAIN_SCALAR_STYLE
+ YAML_SINGLE_QUOTED_SCALAR_STYLE
+ YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ YAML_LITERAL_SCALAR_STYLE
+ YAML_FOLDED_SCALAR_STYLE
+ ctypedef enum yaml_sequence_style_t:
+ YAML_ANY_SEQUENCE_STYLE
+ YAML_BLOCK_SEQUENCE_STYLE
+ YAML_FLOW_SEQUENCE_STYLE
+ ctypedef enum yaml_mapping_style_t:
+ YAML_ANY_MAPPING_STYLE
+ YAML_BLOCK_MAPPING_STYLE
+ YAML_FLOW_MAPPING_STYLE
+ ctypedef enum yaml_token_type_t:
+ YAML_NO_TOKEN
+ YAML_STREAM_START_TOKEN
+ YAML_STREAM_END_TOKEN
+ YAML_VERSION_DIRECTIVE_TOKEN
+ YAML_TAG_DIRECTIVE_TOKEN
+ YAML_DOCUMENT_START_TOKEN
+ YAML_DOCUMENT_END_TOKEN
+ YAML_BLOCK_SEQUENCE_START_TOKEN
+ YAML_BLOCK_MAPPING_START_TOKEN
+ YAML_BLOCK_END_TOKEN
+ YAML_FLOW_SEQUENCE_START_TOKEN
+ YAML_FLOW_SEQUENCE_END_TOKEN
+ YAML_FLOW_MAPPING_START_TOKEN
+ YAML_FLOW_MAPPING_END_TOKEN
+ YAML_BLOCK_ENTRY_TOKEN
+ YAML_FLOW_ENTRY_TOKEN
+ YAML_KEY_TOKEN
+ YAML_VALUE_TOKEN
+ YAML_ALIAS_TOKEN
+ YAML_ANCHOR_TOKEN
+ YAML_TAG_TOKEN
+ YAML_SCALAR_TOKEN
+ ctypedef enum yaml_event_type_t:
+ YAML_NO_EVENT
+ YAML_STREAM_START_EVENT
+ YAML_STREAM_END_EVENT
+ YAML_DOCUMENT_START_EVENT
+ YAML_DOCUMENT_END_EVENT
+ YAML_ALIAS_EVENT
+ YAML_SCALAR_EVENT
+ YAML_SEQUENCE_START_EVENT
+ YAML_SEQUENCE_END_EVENT
+ YAML_MAPPING_START_EVENT
+ YAML_MAPPING_END_EVENT
+
+ ctypedef int yaml_read_handler_t(void *data, char *buffer,
+ size_t size, size_t *size_read) except 0
+
+ ctypedef int yaml_write_handler_t(void *data, char *buffer,
+ size_t size) except 0
+
+ ctypedef struct yaml_mark_t:
+ size_t index
+ size_t line
+ size_t column
+ ctypedef struct yaml_version_directive_t:
+ int major
+ int minor
+ ctypedef struct yaml_tag_directive_t:
+ char *handle
+ char *prefix
+
+ ctypedef struct _yaml_token_stream_start_data_t:
+ yaml_encoding_t encoding
+ ctypedef struct _yaml_token_alias_data_t:
+ char *value
+ ctypedef struct _yaml_token_anchor_data_t:
+ char *value
+ ctypedef struct _yaml_token_tag_data_t:
+ char *handle
+ char *suffix
+ ctypedef struct _yaml_token_scalar_data_t:
+ char *value
+ size_t length
+ yaml_scalar_style_t style
+ ctypedef struct _yaml_token_version_directive_data_t:
+ int major
+ int minor
+ ctypedef struct _yaml_token_tag_directive_data_t:
+ char *handle
+ char *prefix
+ ctypedef union _yaml_token_data_t:
+ _yaml_token_stream_start_data_t stream_start
+ _yaml_token_alias_data_t alias
+ _yaml_token_anchor_data_t anchor
+ _yaml_token_tag_data_t tag
+ _yaml_token_scalar_data_t scalar
+ _yaml_token_version_directive_data_t version_directive
+ _yaml_token_tag_directive_data_t tag_directive
+ ctypedef struct yaml_token_t:
+ yaml_token_type_t type
+ _yaml_token_data_t data
+ yaml_mark_t start_mark
+ yaml_mark_t end_mark
+
+ ctypedef struct _yaml_event_stream_start_data_t:
+ yaml_encoding_t encoding
+ ctypedef struct _yaml_event_document_start_data_tag_directives_t:
+ yaml_tag_directive_t *start
+ yaml_tag_directive_t *end
+ ctypedef struct _yaml_event_document_start_data_t:
+ yaml_version_directive_t *version_directive
+ _yaml_event_document_start_data_tag_directives_t tag_directives
+ int implicit
+ ctypedef struct _yaml_event_document_end_data_t:
+ int implicit
+ ctypedef struct _yaml_event_alias_data_t:
+ char *anchor
+ ctypedef struct _yaml_event_scalar_data_t:
+ char *anchor
+ char *tag
+ char *value
+ size_t length
+ int plain_implicit
+ int quoted_implicit
+ yaml_scalar_style_t style
+ ctypedef struct _yaml_event_sequence_start_data_t:
+ char *anchor
+ char *tag
+ int implicit
+ yaml_sequence_style_t style
+ ctypedef struct _yaml_event_mapping_start_data_t:
+ char *anchor
+ char *tag
+ int implicit
+ yaml_mapping_style_t style
+ ctypedef union _yaml_event_data_t:
+ _yaml_event_stream_start_data_t stream_start
+ _yaml_event_document_start_data_t document_start
+ _yaml_event_document_end_data_t document_end
+ _yaml_event_alias_data_t alias
+ _yaml_event_scalar_data_t scalar
+ _yaml_event_sequence_start_data_t sequence_start
+ _yaml_event_mapping_start_data_t mapping_start
+ ctypedef struct yaml_event_t:
+ yaml_event_type_t type
+ _yaml_event_data_t data
+ yaml_mark_t start_mark
+ yaml_mark_t end_mark
+
+ ctypedef struct yaml_parser_t:
+ yaml_error_type_t error
+ char *problem
+ size_t problem_offset
+ int problem_value
+ yaml_mark_t problem_mark
+ char *context
+ yaml_mark_t context_mark
+
+ ctypedef struct yaml_emitter_t:
+ yaml_error_type_t error
+ char *problem
+
+ char *yaml_get_version_string()
+ void yaml_get_version(int *major, int *minor, int *patch)
+
+ void yaml_token_delete(yaml_token_t *token)
+
+ int yaml_stream_start_event_initialize(yaml_event_t *event,
+ yaml_encoding_t encoding)
+ int yaml_stream_end_event_initialize(yaml_event_t *event)
+ int yaml_document_start_event_initialize(yaml_event_t *event,
+ yaml_version_directive_t *version_directive,
+ yaml_tag_directive_t *tag_directives_start,
+ yaml_tag_directive_t *tag_directives_end,
+ int implicit)
+ int yaml_document_end_event_initialize(yaml_event_t *event,
+ int implicit)
+ int yaml_alias_event_initialize(yaml_event_t *event, char *anchor)
+ int yaml_scalar_event_initialize(yaml_event_t *event,
+ char *anchor, char *tag, char *value, size_t length,
+ int plain_implicit, int quoted_implicit,
+ yaml_scalar_style_t style)
+ int yaml_sequence_start_event_initialize(yaml_event_t *event,
+ char *anchor, char *tag, int implicit, yaml_sequence_style_t style)
+ int yaml_sequence_end_event_initialize(yaml_event_t *event)
+ int yaml_mapping_start_event_initialize(yaml_event_t *event,
+ char *anchor, char *tag, int implicit, yaml_mapping_style_t style)
+ int yaml_mapping_end_event_initialize(yaml_event_t *event)
+ void yaml_event_delete(yaml_event_t *event)
+
+ int yaml_parser_initialize(yaml_parser_t *parser)
+ void yaml_parser_delete(yaml_parser_t *parser)
+ void yaml_parser_set_input_string(yaml_parser_t *parser,
+ char *input, size_t size)
+ void yaml_parser_set_input(yaml_parser_t *parser,
+ yaml_read_handler_t *handler, void *data)
+ void yaml_parser_set_encoding(yaml_parser_t *parser,
+ yaml_encoding_t encoding)
+ int yaml_parser_scan(yaml_parser_t *parser, yaml_token_t *token) except *
+ int yaml_parser_parse(yaml_parser_t *parser, yaml_event_t *event) except *
+
+ int yaml_emitter_initialize(yaml_emitter_t *emitter)
+ void yaml_emitter_delete(yaml_emitter_t *emitter)
+ void yaml_emitter_set_output_string(yaml_emitter_t *emitter,
+ char *output, size_t size, size_t *size_written)
+ void yaml_emitter_set_output(yaml_emitter_t *emitter,
+ yaml_write_handler_t *handler, void *data)
+ void yaml_emitter_set_encoding(yaml_emitter_t *emitter,
+ yaml_encoding_t encoding)
+ void yaml_emitter_set_canonical(yaml_emitter_t *emitter, int canonical)
+ void yaml_emitter_set_indent(yaml_emitter_t *emitter, int indent)
+ void yaml_emitter_set_width(yaml_emitter_t *emitter, int width)
+ void yaml_emitter_set_unicode(yaml_emitter_t *emitter, int unicode)
+ void yaml_emitter_set_break(yaml_emitter_t *emitter,
+ yaml_break_t line_break)
+ int yaml_emitter_emit(yaml_emitter_t *emitter, yaml_event_t *event) except *
+ int yaml_emitter_flush(yaml_emitter_t *emitter)
+
diff --git a/third_party/python/PyYAML/ext/_yaml.pyx b/third_party/python/PyYAML/ext/_yaml.pyx
new file mode 100644
index 0000000000..ff4efe80b5
--- /dev/null
+++ b/third_party/python/PyYAML/ext/_yaml.pyx
@@ -0,0 +1,1527 @@
+
+import yaml
+
+def get_version_string():
+ cdef char *value
+ value = yaml_get_version_string()
+ if PY_MAJOR_VERSION < 3:
+ return value
+ else:
+ return PyUnicode_FromString(value)
+
+def get_version():
+ cdef int major, minor, patch
+ yaml_get_version(&major, &minor, &patch)
+ return (major, minor, patch)
+
+#Mark = yaml.error.Mark
+YAMLError = yaml.error.YAMLError
+ReaderError = yaml.reader.ReaderError
+ScannerError = yaml.scanner.ScannerError
+ParserError = yaml.parser.ParserError
+ComposerError = yaml.composer.ComposerError
+ConstructorError = yaml.constructor.ConstructorError
+EmitterError = yaml.emitter.EmitterError
+SerializerError = yaml.serializer.SerializerError
+RepresenterError = yaml.representer.RepresenterError
+
+StreamStartToken = yaml.tokens.StreamStartToken
+StreamEndToken = yaml.tokens.StreamEndToken
+DirectiveToken = yaml.tokens.DirectiveToken
+DocumentStartToken = yaml.tokens.DocumentStartToken
+DocumentEndToken = yaml.tokens.DocumentEndToken
+BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken
+BlockMappingStartToken = yaml.tokens.BlockMappingStartToken
+BlockEndToken = yaml.tokens.BlockEndToken
+FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken
+FlowMappingStartToken = yaml.tokens.FlowMappingStartToken
+FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken
+FlowMappingEndToken = yaml.tokens.FlowMappingEndToken
+KeyToken = yaml.tokens.KeyToken
+ValueToken = yaml.tokens.ValueToken
+BlockEntryToken = yaml.tokens.BlockEntryToken
+FlowEntryToken = yaml.tokens.FlowEntryToken
+AliasToken = yaml.tokens.AliasToken
+AnchorToken = yaml.tokens.AnchorToken
+TagToken = yaml.tokens.TagToken
+ScalarToken = yaml.tokens.ScalarToken
+
+StreamStartEvent = yaml.events.StreamStartEvent
+StreamEndEvent = yaml.events.StreamEndEvent
+DocumentStartEvent = yaml.events.DocumentStartEvent
+DocumentEndEvent = yaml.events.DocumentEndEvent
+AliasEvent = yaml.events.AliasEvent
+ScalarEvent = yaml.events.ScalarEvent
+SequenceStartEvent = yaml.events.SequenceStartEvent
+SequenceEndEvent = yaml.events.SequenceEndEvent
+MappingStartEvent = yaml.events.MappingStartEvent
+MappingEndEvent = yaml.events.MappingEndEvent
+
+ScalarNode = yaml.nodes.ScalarNode
+SequenceNode = yaml.nodes.SequenceNode
+MappingNode = yaml.nodes.MappingNode
+
+cdef class Mark:
+ cdef readonly object name
+ cdef readonly size_t index
+ cdef readonly size_t line
+ cdef readonly size_t column
+ cdef readonly buffer
+ cdef readonly pointer
+
+ def __init__(self, object name, size_t index, size_t line, size_t column,
+ object buffer, object pointer):
+ self.name = name
+ self.index = index
+ self.line = line
+ self.column = column
+ self.buffer = buffer
+ self.pointer = pointer
+
+ def get_snippet(self):
+ return None
+
+ def __str__(self):
+ where = " in \"%s\", line %d, column %d" \
+ % (self.name, self.line+1, self.column+1)
+ return where
+
+#class YAMLError(Exception):
+# pass
+#
+#class MarkedYAMLError(YAMLError):
+#
+# def __init__(self, context=None, context_mark=None,
+# problem=None, problem_mark=None, note=None):
+# self.context = context
+# self.context_mark = context_mark
+# self.problem = problem
+# self.problem_mark = problem_mark
+# self.note = note
+#
+# def __str__(self):
+# lines = []
+# if self.context is not None:
+# lines.append(self.context)
+# if self.context_mark is not None \
+# and (self.problem is None or self.problem_mark is None
+# or self.context_mark.name != self.problem_mark.name
+# or self.context_mark.line != self.problem_mark.line
+# or self.context_mark.column != self.problem_mark.column):
+# lines.append(str(self.context_mark))
+# if self.problem is not None:
+# lines.append(self.problem)
+# if self.problem_mark is not None:
+# lines.append(str(self.problem_mark))
+# if self.note is not None:
+# lines.append(self.note)
+# return '\n'.join(lines)
+#
+#class ReaderError(YAMLError):
+#
+# def __init__(self, name, position, character, encoding, reason):
+# self.name = name
+# self.character = character
+# self.position = position
+# self.encoding = encoding
+# self.reason = reason
+#
+# def __str__(self):
+# if isinstance(self.character, str):
+# return "'%s' codec can't decode byte #x%02x: %s\n" \
+# " in \"%s\", position %d" \
+# % (self.encoding, ord(self.character), self.reason,
+# self.name, self.position)
+# else:
+# return "unacceptable character #x%04x: %s\n" \
+# " in \"%s\", position %d" \
+# % (ord(self.character), self.reason,
+# self.name, self.position)
+#
+#class ScannerError(MarkedYAMLError):
+# pass
+#
+#class ParserError(MarkedYAMLError):
+# pass
+#
+#class EmitterError(YAMLError):
+# pass
+#
+#cdef class Token:
+# cdef readonly Mark start_mark
+# cdef readonly Mark end_mark
+# def __init__(self, Mark start_mark, Mark end_mark):
+# self.start_mark = start_mark
+# self.end_mark = end_mark
+#
+#cdef class StreamStartToken(Token):
+# cdef readonly object encoding
+# def __init__(self, Mark start_mark, Mark end_mark, encoding):
+# self.start_mark = start_mark
+# self.end_mark = end_mark
+# self.encoding = encoding
+#
+#cdef class StreamEndToken(Token):
+# pass
+#
+#cdef class DirectiveToken(Token):
+# cdef readonly object name
+# cdef readonly object value
+# def __init__(self, name, value, Mark start_mark, Mark end_mark):
+# self.name = name
+# self.value = value
+# self.start_mark = start_mark
+# self.end_mark = end_mark
+#
+#cdef class DocumentStartToken(Token):
+# pass
+#
+#cdef class DocumentEndToken(Token):
+# pass
+#
+#cdef class BlockSequenceStartToken(Token):
+# pass
+#
+#cdef class BlockMappingStartToken(Token):
+# pass
+#
+#cdef class BlockEndToken(Token):
+# pass
+#
+#cdef class FlowSequenceStartToken(Token):
+# pass
+#
+#cdef class FlowMappingStartToken(Token):
+# pass
+#
+#cdef class FlowSequenceEndToken(Token):
+# pass
+#
+#cdef class FlowMappingEndToken(Token):
+# pass
+#
+#cdef class KeyToken(Token):
+# pass
+#
+#cdef class ValueToken(Token):
+# pass
+#
+#cdef class BlockEntryToken(Token):
+# pass
+#
+#cdef class FlowEntryToken(Token):
+# pass
+#
+#cdef class AliasToken(Token):
+# cdef readonly object value
+# def __init__(self, value, Mark start_mark, Mark end_mark):
+# self.value = value
+# self.start_mark = start_mark
+# self.end_mark = end_mark
+#
+#cdef class AnchorToken(Token):
+# cdef readonly object value
+# def __init__(self, value, Mark start_mark, Mark end_mark):
+# self.value = value
+# self.start_mark = start_mark
+# self.end_mark = end_mark
+#
+#cdef class TagToken(Token):
+# cdef readonly object value
+# def __init__(self, value, Mark start_mark, Mark end_mark):
+# self.value = value
+# self.start_mark = start_mark
+# self.end_mark = end_mark
+#
+#cdef class ScalarToken(Token):
+# cdef readonly object value
+# cdef readonly object plain
+# cdef readonly object style
+# def __init__(self, value, plain, Mark start_mark, Mark end_mark, style=None):
+# self.value = value
+# self.plain = plain
+# self.start_mark = start_mark
+# self.end_mark = end_mark
+# self.style = style
+
+cdef class CParser:
+
+ cdef yaml_parser_t parser
+ cdef yaml_event_t parsed_event
+
+ cdef object stream
+ cdef object stream_name
+ cdef object current_token
+ cdef object current_event
+ cdef object anchors
+ cdef object stream_cache
+ cdef int stream_cache_len
+ cdef int stream_cache_pos
+ cdef int unicode_source
+
+ def __init__(self, stream):
+ cdef is_readable
+ if yaml_parser_initialize(&self.parser) == 0:
+ raise MemoryError
+ self.parsed_event.type = YAML_NO_EVENT
+ is_readable = 1
+ try:
+ stream.read
+ except AttributeError:
+ is_readable = 0
+ self.unicode_source = 0
+ if is_readable:
+ self.stream = stream
+ try:
+ self.stream_name = stream.name
+ except AttributeError:
+ if PY_MAJOR_VERSION < 3:
+ self.stream_name = '<file>'
+ else:
+ self.stream_name = u'<file>'
+ self.stream_cache = None
+ self.stream_cache_len = 0
+ self.stream_cache_pos = 0
+ yaml_parser_set_input(&self.parser, input_handler, <void *>self)
+ else:
+ if PyUnicode_CheckExact(stream) != 0:
+ stream = PyUnicode_AsUTF8String(stream)
+ if PY_MAJOR_VERSION < 3:
+ self.stream_name = '<unicode string>'
+ else:
+ self.stream_name = u'<unicode string>'
+ self.unicode_source = 1
+ else:
+ if PY_MAJOR_VERSION < 3:
+ self.stream_name = '<byte string>'
+ else:
+ self.stream_name = u'<byte string>'
+ if PyString_CheckExact(stream) == 0:
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("a string or stream input is required")
+ else:
+ raise TypeError(u"a string or stream input is required")
+ self.stream = stream
+ yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream))
+ self.current_token = None
+ self.current_event = None
+ self.anchors = {}
+
+ def __dealloc__(self):
+ yaml_parser_delete(&self.parser)
+ yaml_event_delete(&self.parsed_event)
+
+ def dispose(self):
+ pass
+
+ cdef object _parser_error(self):
+ if self.parser.error == YAML_MEMORY_ERROR:
+ return MemoryError
+ elif self.parser.error == YAML_READER_ERROR:
+ if PY_MAJOR_VERSION < 3:
+ return ReaderError(self.stream_name, self.parser.problem_offset,
+ self.parser.problem_value, '?', self.parser.problem)
+ else:
+ return ReaderError(self.stream_name, self.parser.problem_offset,
+ self.parser.problem_value, u'?', PyUnicode_FromString(self.parser.problem))
+ elif self.parser.error == YAML_SCANNER_ERROR \
+ or self.parser.error == YAML_PARSER_ERROR:
+ context_mark = None
+ problem_mark = None
+ if self.parser.context != NULL:
+ context_mark = Mark(self.stream_name,
+ self.parser.context_mark.index,
+ self.parser.context_mark.line,
+ self.parser.context_mark.column, None, None)
+ if self.parser.problem != NULL:
+ problem_mark = Mark(self.stream_name,
+ self.parser.problem_mark.index,
+ self.parser.problem_mark.line,
+ self.parser.problem_mark.column, None, None)
+ context = None
+ if self.parser.context != NULL:
+ if PY_MAJOR_VERSION < 3:
+ context = self.parser.context
+ else:
+ context = PyUnicode_FromString(self.parser.context)
+ if PY_MAJOR_VERSION < 3:
+ problem = self.parser.problem
+ else:
+ problem = PyUnicode_FromString(self.parser.problem)
+ if self.parser.error == YAML_SCANNER_ERROR:
+ return ScannerError(context, context_mark, problem, problem_mark)
+ else:
+ return ParserError(context, context_mark, problem, problem_mark)
+ if PY_MAJOR_VERSION < 3:
+ raise ValueError("no parser error")
+ else:
+ raise ValueError(u"no parser error")
+
+ def raw_scan(self):
+ cdef yaml_token_t token
+ cdef int done
+ cdef int count
+ count = 0
+ done = 0
+ while done == 0:
+ if yaml_parser_scan(&self.parser, &token) == 0:
+ error = self._parser_error()
+ raise error
+ if token.type == YAML_NO_TOKEN:
+ done = 1
+ else:
+ count = count+1
+ yaml_token_delete(&token)
+ return count
+
+ cdef object _scan(self):
+ cdef yaml_token_t token
+ if yaml_parser_scan(&self.parser, &token) == 0:
+ error = self._parser_error()
+ raise error
+ token_object = self._token_to_object(&token)
+ yaml_token_delete(&token)
+ return token_object
+
+ cdef object _token_to_object(self, yaml_token_t *token):
+ start_mark = Mark(self.stream_name,
+ token.start_mark.index,
+ token.start_mark.line,
+ token.start_mark.column,
+ None, None)
+ end_mark = Mark(self.stream_name,
+ token.end_mark.index,
+ token.end_mark.line,
+ token.end_mark.column,
+ None, None)
+ if token.type == YAML_NO_TOKEN:
+ return None
+ elif token.type == YAML_STREAM_START_TOKEN:
+ encoding = None
+ if token.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ if self.unicode_source == 0:
+ encoding = u"utf-8"
+ elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ encoding = u"utf-16-le"
+ elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ encoding = u"utf-16-be"
+ return StreamStartToken(start_mark, end_mark, encoding)
+ elif token.type == YAML_STREAM_END_TOKEN:
+ return StreamEndToken(start_mark, end_mark)
+ elif token.type == YAML_VERSION_DIRECTIVE_TOKEN:
+ return DirectiveToken(u"YAML",
+ (token.data.version_directive.major,
+ token.data.version_directive.minor),
+ start_mark, end_mark)
+ elif token.type == YAML_TAG_DIRECTIVE_TOKEN:
+ handle = PyUnicode_FromString(token.data.tag_directive.handle)
+ prefix = PyUnicode_FromString(token.data.tag_directive.prefix)
+ return DirectiveToken(u"TAG", (handle, prefix),
+ start_mark, end_mark)
+ elif token.type == YAML_DOCUMENT_START_TOKEN:
+ return DocumentStartToken(start_mark, end_mark)
+ elif token.type == YAML_DOCUMENT_END_TOKEN:
+ return DocumentEndToken(start_mark, end_mark)
+ elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN:
+ return BlockSequenceStartToken(start_mark, end_mark)
+ elif token.type == YAML_BLOCK_MAPPING_START_TOKEN:
+ return BlockMappingStartToken(start_mark, end_mark)
+ elif token.type == YAML_BLOCK_END_TOKEN:
+ return BlockEndToken(start_mark, end_mark)
+ elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN:
+ return FlowSequenceStartToken(start_mark, end_mark)
+ elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN:
+ return FlowSequenceEndToken(start_mark, end_mark)
+ elif token.type == YAML_FLOW_MAPPING_START_TOKEN:
+ return FlowMappingStartToken(start_mark, end_mark)
+ elif token.type == YAML_FLOW_MAPPING_END_TOKEN:
+ return FlowMappingEndToken(start_mark, end_mark)
+ elif token.type == YAML_BLOCK_ENTRY_TOKEN:
+ return BlockEntryToken(start_mark, end_mark)
+ elif token.type == YAML_FLOW_ENTRY_TOKEN:
+ return FlowEntryToken(start_mark, end_mark)
+ elif token.type == YAML_KEY_TOKEN:
+ return KeyToken(start_mark, end_mark)
+ elif token.type == YAML_VALUE_TOKEN:
+ return ValueToken(start_mark, end_mark)
+ elif token.type == YAML_ALIAS_TOKEN:
+ value = PyUnicode_FromString(token.data.alias.value)
+ return AliasToken(value, start_mark, end_mark)
+ elif token.type == YAML_ANCHOR_TOKEN:
+ value = PyUnicode_FromString(token.data.anchor.value)
+ return AnchorToken(value, start_mark, end_mark)
+ elif token.type == YAML_TAG_TOKEN:
+ handle = PyUnicode_FromString(token.data.tag.handle)
+ suffix = PyUnicode_FromString(token.data.tag.suffix)
+ if not handle:
+ handle = None
+ return TagToken((handle, suffix), start_mark, end_mark)
+ elif token.type == YAML_SCALAR_TOKEN:
+ value = PyUnicode_DecodeUTF8(token.data.scalar.value,
+ token.data.scalar.length, 'strict')
+ plain = False
+ style = None
+ if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ plain = True
+ style = u''
+ elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ style = u'\''
+ elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ style = u'"'
+ elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ style = u'|'
+ elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ style = u'>'
+ return ScalarToken(value, plain,
+ start_mark, end_mark, style)
+ else:
+ if PY_MAJOR_VERSION < 3:
+ raise ValueError("unknown token type")
+ else:
+ raise ValueError(u"unknown token type")
+
+ def get_token(self):
+ if self.current_token is not None:
+ value = self.current_token
+ self.current_token = None
+ else:
+ value = self._scan()
+ return value
+
+ def peek_token(self):
+ if self.current_token is None:
+ self.current_token = self._scan()
+ return self.current_token
+
+ def check_token(self, *choices):
+ if self.current_token is None:
+ self.current_token = self._scan()
+ if self.current_token is None:
+ return False
+ if not choices:
+ return True
+ token_class = self.current_token.__class__
+ for choice in choices:
+ if token_class is choice:
+ return True
+ return False
+
+ def raw_parse(self):
+ cdef yaml_event_t event
+ cdef int done
+ cdef int count
+ count = 0
+ done = 0
+ while done == 0:
+ if yaml_parser_parse(&self.parser, &event) == 0:
+ error = self._parser_error()
+ raise error
+ if event.type == YAML_NO_EVENT:
+ done = 1
+ else:
+ count = count+1
+ yaml_event_delete(&event)
+ return count
+
+ cdef object _parse(self):
+ cdef yaml_event_t event
+ if yaml_parser_parse(&self.parser, &event) == 0:
+ error = self._parser_error()
+ raise error
+ event_object = self._event_to_object(&event)
+ yaml_event_delete(&event)
+ return event_object
+
+ cdef object _event_to_object(self, yaml_event_t *event):
+ cdef yaml_tag_directive_t *tag_directive
+ start_mark = Mark(self.stream_name,
+ event.start_mark.index,
+ event.start_mark.line,
+ event.start_mark.column,
+ None, None)
+ end_mark = Mark(self.stream_name,
+ event.end_mark.index,
+ event.end_mark.line,
+ event.end_mark.column,
+ None, None)
+ if event.type == YAML_NO_EVENT:
+ return None
+ elif event.type == YAML_STREAM_START_EVENT:
+ encoding = None
+ if event.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ if self.unicode_source == 0:
+ encoding = u"utf-8"
+ elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ encoding = u"utf-16-le"
+ elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ encoding = u"utf-16-be"
+ return StreamStartEvent(start_mark, end_mark, encoding)
+ elif event.type == YAML_STREAM_END_EVENT:
+ return StreamEndEvent(start_mark, end_mark)
+ elif event.type == YAML_DOCUMENT_START_EVENT:
+ explicit = False
+ if event.data.document_start.implicit == 0:
+ explicit = True
+ version = None
+ if event.data.document_start.version_directive != NULL:
+ version = (event.data.document_start.version_directive.major,
+ event.data.document_start.version_directive.minor)
+ tags = None
+ if event.data.document_start.tag_directives.start != NULL:
+ tags = {}
+ tag_directive = event.data.document_start.tag_directives.start
+ while tag_directive != event.data.document_start.tag_directives.end:
+ handle = PyUnicode_FromString(tag_directive.handle)
+ prefix = PyUnicode_FromString(tag_directive.prefix)
+ tags[handle] = prefix
+ tag_directive = tag_directive+1
+ return DocumentStartEvent(start_mark, end_mark,
+ explicit, version, tags)
+ elif event.type == YAML_DOCUMENT_END_EVENT:
+ explicit = False
+ if event.data.document_end.implicit == 0:
+ explicit = True
+ return DocumentEndEvent(start_mark, end_mark, explicit)
+ elif event.type == YAML_ALIAS_EVENT:
+ anchor = PyUnicode_FromString(event.data.alias.anchor)
+ return AliasEvent(anchor, start_mark, end_mark)
+ elif event.type == YAML_SCALAR_EVENT:
+ anchor = None
+ if event.data.scalar.anchor != NULL:
+ anchor = PyUnicode_FromString(event.data.scalar.anchor)
+ tag = None
+ if event.data.scalar.tag != NULL:
+ tag = PyUnicode_FromString(event.data.scalar.tag)
+ value = PyUnicode_DecodeUTF8(event.data.scalar.value,
+ event.data.scalar.length, 'strict')
+ plain_implicit = False
+ if event.data.scalar.plain_implicit == 1:
+ plain_implicit = True
+ quoted_implicit = False
+ if event.data.scalar.quoted_implicit == 1:
+ quoted_implicit = True
+ style = None
+ if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ style = u''
+ elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ style = u'\''
+ elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ style = u'"'
+ elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ style = u'|'
+ elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ style = u'>'
+ return ScalarEvent(anchor, tag,
+ (plain_implicit, quoted_implicit),
+ value, start_mark, end_mark, style)
+ elif event.type == YAML_SEQUENCE_START_EVENT:
+ anchor = None
+ if event.data.sequence_start.anchor != NULL:
+ anchor = PyUnicode_FromString(event.data.sequence_start.anchor)
+ tag = None
+ if event.data.sequence_start.tag != NULL:
+ tag = PyUnicode_FromString(event.data.sequence_start.tag)
+ implicit = False
+ if event.data.sequence_start.implicit == 1:
+ implicit = True
+ flow_style = None
+ if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ flow_style = True
+ elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ flow_style = False
+ return SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style)
+ elif event.type == YAML_MAPPING_START_EVENT:
+ anchor = None
+ if event.data.mapping_start.anchor != NULL:
+ anchor = PyUnicode_FromString(event.data.mapping_start.anchor)
+ tag = None
+ if event.data.mapping_start.tag != NULL:
+ tag = PyUnicode_FromString(event.data.mapping_start.tag)
+ implicit = False
+ if event.data.mapping_start.implicit == 1:
+ implicit = True
+ flow_style = None
+ if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ flow_style = True
+ elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ flow_style = False
+ return MappingStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style)
+ elif event.type == YAML_SEQUENCE_END_EVENT:
+ return SequenceEndEvent(start_mark, end_mark)
+ elif event.type == YAML_MAPPING_END_EVENT:
+ return MappingEndEvent(start_mark, end_mark)
+ else:
+ if PY_MAJOR_VERSION < 3:
+ raise ValueError("unknown event type")
+ else:
+ raise ValueError(u"unknown event type")
+
+ def get_event(self):
+ if self.current_event is not None:
+ value = self.current_event
+ self.current_event = None
+ else:
+ value = self._parse()
+ return value
+
+ def peek_event(self):
+ if self.current_event is None:
+ self.current_event = self._parse()
+ return self.current_event
+
+ def check_event(self, *choices):
+ if self.current_event is None:
+ self.current_event = self._parse()
+ if self.current_event is None:
+ return False
+ if not choices:
+ return True
+ event_class = self.current_event.__class__
+ for choice in choices:
+ if event_class is choice:
+ return True
+ return False
+
+ def check_node(self):
+ self._parse_next_event()
+ if self.parsed_event.type == YAML_STREAM_START_EVENT:
+ yaml_event_delete(&self.parsed_event)
+ self._parse_next_event()
+ if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ return True
+ return False
+
+ def get_node(self):
+ self._parse_next_event()
+ if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ return self._compose_document()
+
+ def get_single_node(self):
+ self._parse_next_event()
+ yaml_event_delete(&self.parsed_event)
+ self._parse_next_event()
+ document = None
+ if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ document = self._compose_document()
+ self._parse_next_event()
+ if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ mark = Mark(self.stream_name,
+ self.parsed_event.start_mark.index,
+ self.parsed_event.start_mark.line,
+ self.parsed_event.start_mark.column,
+ None, None)
+ if PY_MAJOR_VERSION < 3:
+ raise ComposerError("expected a single document in the stream",
+ document.start_mark, "but found another document", mark)
+ else:
+ raise ComposerError(u"expected a single document in the stream",
+ document.start_mark, u"but found another document", mark)
+ return document
+
+ cdef object _compose_document(self):
+ yaml_event_delete(&self.parsed_event)
+ node = self._compose_node(None, None)
+ self._parse_next_event()
+ yaml_event_delete(&self.parsed_event)
+ self.anchors = {}
+ return node
+
+ cdef object _compose_node(self, object parent, object index):
+ self._parse_next_event()
+ if self.parsed_event.type == YAML_ALIAS_EVENT:
+ anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor)
+ if anchor not in self.anchors:
+ mark = Mark(self.stream_name,
+ self.parsed_event.start_mark.index,
+ self.parsed_event.start_mark.line,
+ self.parsed_event.start_mark.column,
+ None, None)
+ if PY_MAJOR_VERSION < 3:
+ raise ComposerError(None, None, "found undefined alias", mark)
+ else:
+ raise ComposerError(None, None, u"found undefined alias", mark)
+ yaml_event_delete(&self.parsed_event)
+ return self.anchors[anchor]
+ anchor = None
+ if self.parsed_event.type == YAML_SCALAR_EVENT \
+ and self.parsed_event.data.scalar.anchor != NULL:
+ anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor)
+ elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \
+ and self.parsed_event.data.sequence_start.anchor != NULL:
+ anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor)
+ elif self.parsed_event.type == YAML_MAPPING_START_EVENT \
+ and self.parsed_event.data.mapping_start.anchor != NULL:
+ anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor)
+ if anchor is not None:
+ if anchor in self.anchors:
+ mark = Mark(self.stream_name,
+ self.parsed_event.start_mark.index,
+ self.parsed_event.start_mark.line,
+ self.parsed_event.start_mark.column,
+ None, None)
+ if PY_MAJOR_VERSION < 3:
+ raise ComposerError("found duplicate anchor; first occurrence",
+ self.anchors[anchor].start_mark, "second occurrence", mark)
+ else:
+ raise ComposerError(u"found duplicate anchor; first occurrence",
+ self.anchors[anchor].start_mark, u"second occurrence", mark)
+ self.descend_resolver(parent, index)
+ if self.parsed_event.type == YAML_SCALAR_EVENT:
+ node = self._compose_scalar_node(anchor)
+ elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT:
+ node = self._compose_sequence_node(anchor)
+ elif self.parsed_event.type == YAML_MAPPING_START_EVENT:
+ node = self._compose_mapping_node(anchor)
+ self.ascend_resolver()
+ return node
+
+ cdef _compose_scalar_node(self, object anchor):
+ start_mark = Mark(self.stream_name,
+ self.parsed_event.start_mark.index,
+ self.parsed_event.start_mark.line,
+ self.parsed_event.start_mark.column,
+ None, None)
+ end_mark = Mark(self.stream_name,
+ self.parsed_event.end_mark.index,
+ self.parsed_event.end_mark.line,
+ self.parsed_event.end_mark.column,
+ None, None)
+ value = PyUnicode_DecodeUTF8(self.parsed_event.data.scalar.value,
+ self.parsed_event.data.scalar.length, 'strict')
+ plain_implicit = False
+ if self.parsed_event.data.scalar.plain_implicit == 1:
+ plain_implicit = True
+ quoted_implicit = False
+ if self.parsed_event.data.scalar.quoted_implicit == 1:
+ quoted_implicit = True
+ if self.parsed_event.data.scalar.tag == NULL \
+ or (self.parsed_event.data.scalar.tag[0] == c'!'
+ and self.parsed_event.data.scalar.tag[1] == c'\0'):
+ tag = self.resolve(ScalarNode, value, (plain_implicit, quoted_implicit))
+ else:
+ tag = PyUnicode_FromString(self.parsed_event.data.scalar.tag)
+ style = None
+ if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ style = u''
+ elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ style = u'\''
+ elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ style = u'"'
+ elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ style = u'|'
+ elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ style = u'>'
+ node = ScalarNode(tag, value, start_mark, end_mark, style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ yaml_event_delete(&self.parsed_event)
+ return node
+
+ cdef _compose_sequence_node(self, object anchor):
+ cdef int index
+ start_mark = Mark(self.stream_name,
+ self.parsed_event.start_mark.index,
+ self.parsed_event.start_mark.line,
+ self.parsed_event.start_mark.column,
+ None, None)
+ implicit = False
+ if self.parsed_event.data.sequence_start.implicit == 1:
+ implicit = True
+ if self.parsed_event.data.sequence_start.tag == NULL \
+ or (self.parsed_event.data.sequence_start.tag[0] == c'!'
+ and self.parsed_event.data.sequence_start.tag[1] == c'\0'):
+ tag = self.resolve(SequenceNode, None, implicit)
+ else:
+ tag = PyUnicode_FromString(self.parsed_event.data.sequence_start.tag)
+ flow_style = None
+ if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ flow_style = True
+ elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ flow_style = False
+ value = []
+ node = SequenceNode(tag, value, start_mark, None, flow_style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ yaml_event_delete(&self.parsed_event)
+ index = 0
+ self._parse_next_event()
+ while self.parsed_event.type != YAML_SEQUENCE_END_EVENT:
+ value.append(self._compose_node(node, index))
+ index = index+1
+ self._parse_next_event()
+ node.end_mark = Mark(self.stream_name,
+ self.parsed_event.end_mark.index,
+ self.parsed_event.end_mark.line,
+ self.parsed_event.end_mark.column,
+ None, None)
+ yaml_event_delete(&self.parsed_event)
+ return node
+
+ cdef _compose_mapping_node(self, object anchor):
+ start_mark = Mark(self.stream_name,
+ self.parsed_event.start_mark.index,
+ self.parsed_event.start_mark.line,
+ self.parsed_event.start_mark.column,
+ None, None)
+ implicit = False
+ if self.parsed_event.data.mapping_start.implicit == 1:
+ implicit = True
+ if self.parsed_event.data.mapping_start.tag == NULL \
+ or (self.parsed_event.data.mapping_start.tag[0] == c'!'
+ and self.parsed_event.data.mapping_start.tag[1] == c'\0'):
+ tag = self.resolve(MappingNode, None, implicit)
+ else:
+ tag = PyUnicode_FromString(self.parsed_event.data.mapping_start.tag)
+ flow_style = None
+ if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ flow_style = True
+ elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ flow_style = False
+ value = []
+ node = MappingNode(tag, value, start_mark, None, flow_style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ yaml_event_delete(&self.parsed_event)
+ self._parse_next_event()
+ while self.parsed_event.type != YAML_MAPPING_END_EVENT:
+ item_key = self._compose_node(node, None)
+ item_value = self._compose_node(node, item_key)
+ value.append((item_key, item_value))
+ self._parse_next_event()
+ node.end_mark = Mark(self.stream_name,
+ self.parsed_event.end_mark.index,
+ self.parsed_event.end_mark.line,
+ self.parsed_event.end_mark.column,
+ None, None)
+ yaml_event_delete(&self.parsed_event)
+ return node
+
+ cdef int _parse_next_event(self) except 0:
+ if self.parsed_event.type == YAML_NO_EVENT:
+ if yaml_parser_parse(&self.parser, &self.parsed_event) == 0:
+ error = self._parser_error()
+ raise error
+ return 1
+
+cdef int input_handler(void *data, char *buffer, size_t size, size_t *read) except 0:
+ cdef CParser parser
+ parser = <CParser>data
+ if parser.stream_cache is None:
+ value = parser.stream.read(size)
+ if PyUnicode_CheckExact(value) != 0:
+ value = PyUnicode_AsUTF8String(value)
+ parser.unicode_source = 1
+ if PyString_CheckExact(value) == 0:
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("a string value is expected")
+ else:
+ raise TypeError(u"a string value is expected")
+ parser.stream_cache = value
+ parser.stream_cache_pos = 0
+ parser.stream_cache_len = PyString_GET_SIZE(value)
+ if (parser.stream_cache_len - parser.stream_cache_pos) < size:
+ size = parser.stream_cache_len - parser.stream_cache_pos
+ if size > 0:
+ memcpy(buffer, PyString_AS_STRING(parser.stream_cache)
+ + parser.stream_cache_pos, size)
+ read[0] = size
+ parser.stream_cache_pos += size
+ if parser.stream_cache_pos == parser.stream_cache_len:
+ parser.stream_cache = None
+ return 1
+
+cdef class CEmitter:
+
+ cdef yaml_emitter_t emitter
+
+ cdef object stream
+
+ cdef int document_start_implicit
+ cdef int document_end_implicit
+ cdef object use_version
+ cdef object use_tags
+
+ cdef object serialized_nodes
+ cdef object anchors
+ cdef int last_alias_id
+ cdef int closed
+ cdef int dump_unicode
+ cdef object use_encoding
+
+ def __init__(self, stream, canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None, encoding=None,
+ explicit_start=None, explicit_end=None, version=None, tags=None):
+ if yaml_emitter_initialize(&self.emitter) == 0:
+ raise MemoryError
+ self.stream = stream
+ self.dump_unicode = 0
+ if PY_MAJOR_VERSION < 3:
+ if getattr3(stream, 'encoding', None):
+ self.dump_unicode = 1
+ else:
+ if hasattr(stream, u'encoding'):
+ self.dump_unicode = 1
+ self.use_encoding = encoding
+ yaml_emitter_set_output(&self.emitter, output_handler, <void *>self)
+ if canonical:
+ yaml_emitter_set_canonical(&self.emitter, 1)
+ if indent is not None:
+ yaml_emitter_set_indent(&self.emitter, indent)
+ if width is not None:
+ yaml_emitter_set_width(&self.emitter, width)
+ if allow_unicode:
+ yaml_emitter_set_unicode(&self.emitter, 1)
+ if line_break is not None:
+ if line_break == '\r':
+ yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK)
+ elif line_break == '\n':
+ yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK)
+ elif line_break == '\r\n':
+ yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK)
+ self.document_start_implicit = 1
+ if explicit_start:
+ self.document_start_implicit = 0
+ self.document_end_implicit = 1
+ if explicit_end:
+ self.document_end_implicit = 0
+ self.use_version = version
+ self.use_tags = tags
+ self.serialized_nodes = {}
+ self.anchors = {}
+ self.last_alias_id = 0
+ self.closed = -1
+
+ def __dealloc__(self):
+ yaml_emitter_delete(&self.emitter)
+
+ def dispose(self):
+ pass
+
+ cdef object _emitter_error(self):
+ if self.emitter.error == YAML_MEMORY_ERROR:
+ return MemoryError
+ elif self.emitter.error == YAML_EMITTER_ERROR:
+ if PY_MAJOR_VERSION < 3:
+ problem = self.emitter.problem
+ else:
+ problem = PyUnicode_FromString(self.emitter.problem)
+ return EmitterError(problem)
+ if PY_MAJOR_VERSION < 3:
+ raise ValueError("no emitter error")
+ else:
+ raise ValueError(u"no emitter error")
+
+ cdef int _object_to_event(self, object event_object, yaml_event_t *event) except 0:
+ cdef yaml_encoding_t encoding
+ cdef yaml_version_directive_t version_directive_value
+ cdef yaml_version_directive_t *version_directive
+ cdef yaml_tag_directive_t tag_directives_value[128]
+ cdef yaml_tag_directive_t *tag_directives_start
+ cdef yaml_tag_directive_t *tag_directives_end
+ cdef int implicit
+ cdef int plain_implicit
+ cdef int quoted_implicit
+ cdef char *anchor
+ cdef char *tag
+ cdef char *value
+ cdef int length
+ cdef yaml_scalar_style_t scalar_style
+ cdef yaml_sequence_style_t sequence_style
+ cdef yaml_mapping_style_t mapping_style
+ event_class = event_object.__class__
+ if event_class is StreamStartEvent:
+ encoding = YAML_UTF8_ENCODING
+ if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le':
+ encoding = YAML_UTF16LE_ENCODING
+ elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be':
+ encoding = YAML_UTF16BE_ENCODING
+ if event_object.encoding is None:
+ self.dump_unicode = 1
+ if self.dump_unicode == 1:
+ encoding = YAML_UTF8_ENCODING
+ yaml_stream_start_event_initialize(event, encoding)
+ elif event_class is StreamEndEvent:
+ yaml_stream_end_event_initialize(event)
+ elif event_class is DocumentStartEvent:
+ version_directive = NULL
+ if event_object.version:
+ version_directive_value.major = event_object.version[0]
+ version_directive_value.minor = event_object.version[1]
+ version_directive = &version_directive_value
+ tag_directives_start = NULL
+ tag_directives_end = NULL
+ if event_object.tags:
+ if len(event_object.tags) > 128:
+ if PY_MAJOR_VERSION < 3:
+ raise ValueError("too many tags")
+ else:
+ raise ValueError(u"too many tags")
+ tag_directives_start = tag_directives_value
+ tag_directives_end = tag_directives_value
+ cache = []
+ for handle in event_object.tags:
+ prefix = event_object.tags[handle]
+ if PyUnicode_CheckExact(handle):
+ handle = PyUnicode_AsUTF8String(handle)
+ cache.append(handle)
+ if not PyString_CheckExact(handle):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag handle must be a string")
+ else:
+ raise TypeError(u"tag handle must be a string")
+ tag_directives_end.handle = PyString_AS_STRING(handle)
+ if PyUnicode_CheckExact(prefix):
+ prefix = PyUnicode_AsUTF8String(prefix)
+ cache.append(prefix)
+ if not PyString_CheckExact(prefix):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag prefix must be a string")
+ else:
+ raise TypeError(u"tag prefix must be a string")
+ tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ tag_directives_end = tag_directives_end+1
+ implicit = 1
+ if event_object.explicit:
+ implicit = 0
+ if yaml_document_start_event_initialize(event, version_directive,
+ tag_directives_start, tag_directives_end, implicit) == 0:
+ raise MemoryError
+ elif event_class is DocumentEndEvent:
+ implicit = 1
+ if event_object.explicit:
+ implicit = 0
+ yaml_document_end_event_initialize(event, implicit)
+ elif event_class is AliasEvent:
+ anchor = NULL
+ anchor_object = event_object.anchor
+ if PyUnicode_CheckExact(anchor_object):
+ anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ if not PyString_CheckExact(anchor_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("anchor must be a string")
+ else:
+ raise TypeError(u"anchor must be a string")
+ anchor = PyString_AS_STRING(anchor_object)
+ if yaml_alias_event_initialize(event, anchor) == 0:
+ raise MemoryError
+ elif event_class is ScalarEvent:
+ anchor = NULL
+ anchor_object = event_object.anchor
+ if anchor_object is not None:
+ if PyUnicode_CheckExact(anchor_object):
+ anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ if not PyString_CheckExact(anchor_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("anchor must be a string")
+ else:
+ raise TypeError(u"anchor must be a string")
+ anchor = PyString_AS_STRING(anchor_object)
+ tag = NULL
+ tag_object = event_object.tag
+ if tag_object is not None:
+ if PyUnicode_CheckExact(tag_object):
+ tag_object = PyUnicode_AsUTF8String(tag_object)
+ if not PyString_CheckExact(tag_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag must be a string")
+ else:
+ raise TypeError(u"tag must be a string")
+ tag = PyString_AS_STRING(tag_object)
+ value_object = event_object.value
+ if PyUnicode_CheckExact(value_object):
+ value_object = PyUnicode_AsUTF8String(value_object)
+ if not PyString_CheckExact(value_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("value must be a string")
+ else:
+ raise TypeError(u"value must be a string")
+ value = PyString_AS_STRING(value_object)
+ length = PyString_GET_SIZE(value_object)
+ plain_implicit = 0
+ quoted_implicit = 0
+ if event_object.implicit is not None:
+ plain_implicit = event_object.implicit[0]
+ quoted_implicit = event_object.implicit[1]
+ style_object = event_object.style
+ scalar_style = YAML_PLAIN_SCALAR_STYLE
+ if style_object == "'" or style_object == u"'":
+ scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ elif style_object == "\"" or style_object == u"\"":
+ scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ elif style_object == "|" or style_object == u"|":
+ scalar_style = YAML_LITERAL_SCALAR_STYLE
+ elif style_object == ">" or style_object == u">":
+ scalar_style = YAML_FOLDED_SCALAR_STYLE
+ if yaml_scalar_event_initialize(event, anchor, tag, value, length,
+ plain_implicit, quoted_implicit, scalar_style) == 0:
+ raise MemoryError
+ elif event_class is SequenceStartEvent:
+ anchor = NULL
+ anchor_object = event_object.anchor
+ if anchor_object is not None:
+ if PyUnicode_CheckExact(anchor_object):
+ anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ if not PyString_CheckExact(anchor_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("anchor must be a string")
+ else:
+ raise TypeError(u"anchor must be a string")
+ anchor = PyString_AS_STRING(anchor_object)
+ tag = NULL
+ tag_object = event_object.tag
+ if tag_object is not None:
+ if PyUnicode_CheckExact(tag_object):
+ tag_object = PyUnicode_AsUTF8String(tag_object)
+ if not PyString_CheckExact(tag_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag must be a string")
+ else:
+ raise TypeError(u"tag must be a string")
+ tag = PyString_AS_STRING(tag_object)
+ implicit = 0
+ if event_object.implicit:
+ implicit = 1
+ sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ if event_object.flow_style:
+ sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ if yaml_sequence_start_event_initialize(event, anchor, tag,
+ implicit, sequence_style) == 0:
+ raise MemoryError
+ elif event_class is MappingStartEvent:
+ anchor = NULL
+ anchor_object = event_object.anchor
+ if anchor_object is not None:
+ if PyUnicode_CheckExact(anchor_object):
+ anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ if not PyString_CheckExact(anchor_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("anchor must be a string")
+ else:
+ raise TypeError(u"anchor must be a string")
+ anchor = PyString_AS_STRING(anchor_object)
+ tag = NULL
+ tag_object = event_object.tag
+ if tag_object is not None:
+ if PyUnicode_CheckExact(tag_object):
+ tag_object = PyUnicode_AsUTF8String(tag_object)
+ if not PyString_CheckExact(tag_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag must be a string")
+ else:
+ raise TypeError(u"tag must be a string")
+ tag = PyString_AS_STRING(tag_object)
+ implicit = 0
+ if event_object.implicit:
+ implicit = 1
+ mapping_style = YAML_BLOCK_MAPPING_STYLE
+ if event_object.flow_style:
+ mapping_style = YAML_FLOW_MAPPING_STYLE
+ if yaml_mapping_start_event_initialize(event, anchor, tag,
+ implicit, mapping_style) == 0:
+ raise MemoryError
+ elif event_class is SequenceEndEvent:
+ yaml_sequence_end_event_initialize(event)
+ elif event_class is MappingEndEvent:
+ yaml_mapping_end_event_initialize(event)
+ else:
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("invalid event %s" % event_object)
+ else:
+ raise TypeError(u"invalid event %s" % event_object)
+ return 1
+
+ def emit(self, event_object):
+ cdef yaml_event_t event
+ self._object_to_event(event_object, &event)
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+
+ def open(self):
+ cdef yaml_event_t event
+ cdef yaml_encoding_t encoding
+ if self.closed == -1:
+ if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le':
+ encoding = YAML_UTF16LE_ENCODING
+ elif self.use_encoding == u'utf-16-be' or self.use_encoding == 'utf-16-be':
+ encoding = YAML_UTF16BE_ENCODING
+ else:
+ encoding = YAML_UTF8_ENCODING
+ if self.use_encoding is None:
+ self.dump_unicode = 1
+ if self.dump_unicode == 1:
+ encoding = YAML_UTF8_ENCODING
+ yaml_stream_start_event_initialize(&event, encoding)
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ self.closed = 0
+ elif self.closed == 1:
+ if PY_MAJOR_VERSION < 3:
+ raise SerializerError("serializer is closed")
+ else:
+ raise SerializerError(u"serializer is closed")
+ else:
+ if PY_MAJOR_VERSION < 3:
+ raise SerializerError("serializer is already opened")
+ else:
+ raise SerializerError(u"serializer is already opened")
+
+ def close(self):
+ cdef yaml_event_t event
+ if self.closed == -1:
+ if PY_MAJOR_VERSION < 3:
+ raise SerializerError("serializer is not opened")
+ else:
+ raise SerializerError(u"serializer is not opened")
+ elif self.closed == 0:
+ yaml_stream_end_event_initialize(&event)
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ self.closed = 1
+
+ def serialize(self, node):
+ cdef yaml_event_t event
+ cdef yaml_version_directive_t version_directive_value
+ cdef yaml_version_directive_t *version_directive
+ cdef yaml_tag_directive_t tag_directives_value[128]
+ cdef yaml_tag_directive_t *tag_directives_start
+ cdef yaml_tag_directive_t *tag_directives_end
+ if self.closed == -1:
+ if PY_MAJOR_VERSION < 3:
+ raise SerializerError("serializer is not opened")
+ else:
+ raise SerializerError(u"serializer is not opened")
+ elif self.closed == 1:
+ if PY_MAJOR_VERSION < 3:
+ raise SerializerError("serializer is closed")
+ else:
+ raise SerializerError(u"serializer is closed")
+ cache = []
+ version_directive = NULL
+ if self.use_version:
+ version_directive_value.major = self.use_version[0]
+ version_directive_value.minor = self.use_version[1]
+ version_directive = &version_directive_value
+ tag_directives_start = NULL
+ tag_directives_end = NULL
+ if self.use_tags:
+ if len(self.use_tags) > 128:
+ if PY_MAJOR_VERSION < 3:
+ raise ValueError("too many tags")
+ else:
+ raise ValueError(u"too many tags")
+ tag_directives_start = tag_directives_value
+ tag_directives_end = tag_directives_value
+ for handle in self.use_tags:
+ prefix = self.use_tags[handle]
+ if PyUnicode_CheckExact(handle):
+ handle = PyUnicode_AsUTF8String(handle)
+ cache.append(handle)
+ if not PyString_CheckExact(handle):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag handle must be a string")
+ else:
+ raise TypeError(u"tag handle must be a string")
+ tag_directives_end.handle = PyString_AS_STRING(handle)
+ if PyUnicode_CheckExact(prefix):
+ prefix = PyUnicode_AsUTF8String(prefix)
+ cache.append(prefix)
+ if not PyString_CheckExact(prefix):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag prefix must be a string")
+ else:
+ raise TypeError(u"tag prefix must be a string")
+ tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ tag_directives_end = tag_directives_end+1
+ if yaml_document_start_event_initialize(&event, version_directive,
+ tag_directives_start, tag_directives_end,
+ self.document_start_implicit) == 0:
+ raise MemoryError
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ self._anchor_node(node)
+ self._serialize_node(node, None, None)
+ yaml_document_end_event_initialize(&event, self.document_end_implicit)
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ self.serialized_nodes = {}
+ self.anchors = {}
+ self.last_alias_id = 0
+
+ cdef int _anchor_node(self, object node) except 0:
+ if node in self.anchors:
+ if self.anchors[node] is None:
+ self.last_alias_id = self.last_alias_id+1
+ self.anchors[node] = u"id%03d" % self.last_alias_id
+ else:
+ self.anchors[node] = None
+ node_class = node.__class__
+ if node_class is SequenceNode:
+ for item in node.value:
+ self._anchor_node(item)
+ elif node_class is MappingNode:
+ for key, value in node.value:
+ self._anchor_node(key)
+ self._anchor_node(value)
+ return 1
+
+ cdef int _serialize_node(self, object node, object parent, object index) except 0:
+ cdef yaml_event_t event
+ cdef int implicit
+ cdef int plain_implicit
+ cdef int quoted_implicit
+ cdef char *anchor
+ cdef char *tag
+ cdef char *value
+ cdef int length
+ cdef int item_index
+ cdef yaml_scalar_style_t scalar_style
+ cdef yaml_sequence_style_t sequence_style
+ cdef yaml_mapping_style_t mapping_style
+ anchor_object = self.anchors[node]
+ anchor = NULL
+ if anchor_object is not None:
+ if PyUnicode_CheckExact(anchor_object):
+ anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ if not PyString_CheckExact(anchor_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("anchor must be a string")
+ else:
+ raise TypeError(u"anchor must be a string")
+ anchor = PyString_AS_STRING(anchor_object)
+ if node in self.serialized_nodes:
+ if yaml_alias_event_initialize(&event, anchor) == 0:
+ raise MemoryError
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ else:
+ node_class = node.__class__
+ self.serialized_nodes[node] = True
+ self.descend_resolver(parent, index)
+ if node_class is ScalarNode:
+ plain_implicit = 0
+ quoted_implicit = 0
+ tag_object = node.tag
+ if self.resolve(ScalarNode, node.value, (True, False)) == tag_object:
+ plain_implicit = 1
+ if self.resolve(ScalarNode, node.value, (False, True)) == tag_object:
+ quoted_implicit = 1
+ tag = NULL
+ if tag_object is not None:
+ if PyUnicode_CheckExact(tag_object):
+ tag_object = PyUnicode_AsUTF8String(tag_object)
+ if not PyString_CheckExact(tag_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag must be a string")
+ else:
+ raise TypeError(u"tag must be a string")
+ tag = PyString_AS_STRING(tag_object)
+ value_object = node.value
+ if PyUnicode_CheckExact(value_object):
+ value_object = PyUnicode_AsUTF8String(value_object)
+ if not PyString_CheckExact(value_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("value must be a string")
+ else:
+ raise TypeError(u"value must be a string")
+ value = PyString_AS_STRING(value_object)
+ length = PyString_GET_SIZE(value_object)
+ style_object = node.style
+ scalar_style = YAML_PLAIN_SCALAR_STYLE
+ if style_object == "'" or style_object == u"'":
+ scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ elif style_object == "\"" or style_object == u"\"":
+ scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ elif style_object == "|" or style_object == u"|":
+ scalar_style = YAML_LITERAL_SCALAR_STYLE
+ elif style_object == ">" or style_object == u">":
+ scalar_style = YAML_FOLDED_SCALAR_STYLE
+ if yaml_scalar_event_initialize(&event, anchor, tag, value, length,
+ plain_implicit, quoted_implicit, scalar_style) == 0:
+ raise MemoryError
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ elif node_class is SequenceNode:
+ implicit = 0
+ tag_object = node.tag
+ if self.resolve(SequenceNode, node.value, True) == tag_object:
+ implicit = 1
+ tag = NULL
+ if tag_object is not None:
+ if PyUnicode_CheckExact(tag_object):
+ tag_object = PyUnicode_AsUTF8String(tag_object)
+ if not PyString_CheckExact(tag_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag must be a string")
+ else:
+ raise TypeError(u"tag must be a string")
+ tag = PyString_AS_STRING(tag_object)
+ sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ if node.flow_style:
+ sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ if yaml_sequence_start_event_initialize(&event, anchor, tag,
+ implicit, sequence_style) == 0:
+ raise MemoryError
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ item_index = 0
+ for item in node.value:
+ self._serialize_node(item, node, item_index)
+ item_index = item_index+1
+ yaml_sequence_end_event_initialize(&event)
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ elif node_class is MappingNode:
+ implicit = 0
+ tag_object = node.tag
+ if self.resolve(MappingNode, node.value, True) == tag_object:
+ implicit = 1
+ tag = NULL
+ if tag_object is not None:
+ if PyUnicode_CheckExact(tag_object):
+ tag_object = PyUnicode_AsUTF8String(tag_object)
+ if not PyString_CheckExact(tag_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag must be a string")
+ else:
+ raise TypeError(u"tag must be a string")
+ tag = PyString_AS_STRING(tag_object)
+ mapping_style = YAML_BLOCK_MAPPING_STYLE
+ if node.flow_style:
+ mapping_style = YAML_FLOW_MAPPING_STYLE
+ if yaml_mapping_start_event_initialize(&event, anchor, tag,
+ implicit, mapping_style) == 0:
+ raise MemoryError
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ for item_key, item_value in node.value:
+ self._serialize_node(item_key, node, None)
+ self._serialize_node(item_value, node, item_key)
+ yaml_mapping_end_event_initialize(&event)
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ self.ascend_resolver()
+ return 1
+
+cdef int output_handler(void *data, char *buffer, size_t size) except 0:
+ cdef CEmitter emitter
+ emitter = <CEmitter>data
+ if emitter.dump_unicode == 0:
+ value = PyString_FromStringAndSize(buffer, size)
+ else:
+ value = PyUnicode_DecodeUTF8(buffer, size, 'strict')
+ emitter.stream.write(value)
+ return 1
+
diff --git a/third_party/python/PyYAML/lib/yaml/__init__.py b/third_party/python/PyYAML/lib/yaml/__init__.py
new file mode 100644
index 0000000000..211fc8665e
--- /dev/null
+++ b/third_party/python/PyYAML/lib/yaml/__init__.py
@@ -0,0 +1,431 @@
+
+from error import *
+
+from tokens import *
+from events import *
+from nodes import *
+
+from loader import *
+from dumper import *
+
+__version__ = '5.3.1'
+
+try:
+ from cyaml import *
+ __with_libyaml__ = True
+except ImportError:
+ __with_libyaml__ = False
+
+
+#------------------------------------------------------------------------------
+# Warnings control
+#------------------------------------------------------------------------------
+
+# 'Global' warnings state:
+_warnings_enabled = {
+ 'YAMLLoadWarning': True,
+}
+
+# Get or set global warnings' state
+def warnings(settings=None):
+ if settings is None:
+ return _warnings_enabled
+
+ if type(settings) is dict:
+ for key in settings:
+ if key in _warnings_enabled:
+ _warnings_enabled[key] = settings[key]
+
+# Warn when load() is called without Loader=...
+class YAMLLoadWarning(RuntimeWarning):
+ pass
+
+def load_warning(method):
+ if _warnings_enabled['YAMLLoadWarning'] is False:
+ return
+
+ import warnings
+
+ message = (
+ "calling yaml.%s() without Loader=... is deprecated, as the "
+ "default Loader is unsafe. Please read "
+ "https://msg.pyyaml.org/load for full details."
+ ) % method
+
+ warnings.warn(message, YAMLLoadWarning, stacklevel=3)
+
+#------------------------------------------------------------------------------
+def scan(stream, Loader=Loader):
+ """
+ Scan a YAML stream and produce scanning tokens.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_token():
+ yield loader.get_token()
+ finally:
+ loader.dispose()
+
+def parse(stream, Loader=Loader):
+ """
+ Parse a YAML stream and produce parsing events.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_event():
+ yield loader.get_event()
+ finally:
+ loader.dispose()
+
+def compose(stream, Loader=Loader):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding representation tree.
+ """
+ loader = Loader(stream)
+ try:
+ return loader.get_single_node()
+ finally:
+ loader.dispose()
+
+def compose_all(stream, Loader=Loader):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding representation trees.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_node():
+ yield loader.get_node()
+ finally:
+ loader.dispose()
+
+def load(stream, Loader=None):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding Python object.
+ """
+ if Loader is None:
+ load_warning('load')
+ Loader = FullLoader
+
+ loader = Loader(stream)
+ try:
+ return loader.get_single_data()
+ finally:
+ loader.dispose()
+
+def load_all(stream, Loader=None):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding Python objects.
+ """
+ if Loader is None:
+ load_warning('load_all')
+ Loader = FullLoader
+
+ loader = Loader(stream)
+ try:
+ while loader.check_data():
+ yield loader.get_data()
+ finally:
+ loader.dispose()
+
+def full_load(stream):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding Python object.
+
+ Resolve all tags except those known to be
+ unsafe on untrusted input.
+ """
+ return load(stream, FullLoader)
+
+def full_load_all(stream):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding Python objects.
+
+ Resolve all tags except those known to be
+ unsafe on untrusted input.
+ """
+ return load_all(stream, FullLoader)
+
+def safe_load(stream):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding Python object.
+
+ Resolve only basic YAML tags. This is known
+ to be safe for untrusted input.
+ """
+ return load(stream, SafeLoader)
+
+def safe_load_all(stream):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding Python objects.
+
+ Resolve only basic YAML tags. This is known
+ to be safe for untrusted input.
+ """
+ return load_all(stream, SafeLoader)
+
+def unsafe_load(stream):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding Python object.
+
+ Resolve all tags, even those known to be
+ unsafe on untrusted input.
+ """
+ return load(stream, UnsafeLoader)
+
+def unsafe_load_all(stream):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding Python objects.
+
+ Resolve all tags, even those known to be
+ unsafe on untrusted input.
+ """
+ return load_all(stream, UnsafeLoader)
+
+def emit(events, stream=None, Dumper=Dumper,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None):
+ """
+ Emit YAML parsing events into a stream.
+ If stream is None, return the produced string instead.
+ """
+ getvalue = None
+ if stream is None:
+ from StringIO import StringIO
+ stream = StringIO()
+ getvalue = stream.getvalue
+ dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ try:
+ for event in events:
+ dumper.emit(event)
+ finally:
+ dumper.dispose()
+ if getvalue:
+ return getvalue()
+
+def serialize_all(nodes, stream=None, Dumper=Dumper,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding='utf-8', explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ """
+ Serialize a sequence of representation trees into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ getvalue = None
+ if stream is None:
+ if encoding is None:
+ from StringIO import StringIO
+ else:
+ from cStringIO import StringIO
+ stream = StringIO()
+ getvalue = stream.getvalue
+ dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break,
+ encoding=encoding, version=version, tags=tags,
+ explicit_start=explicit_start, explicit_end=explicit_end)
+ try:
+ dumper.open()
+ for node in nodes:
+ dumper.serialize(node)
+ dumper.close()
+ finally:
+ dumper.dispose()
+ if getvalue:
+ return getvalue()
+
+def serialize(node, stream=None, Dumper=Dumper, **kwds):
+ """
+ Serialize a representation tree into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ return serialize_all([node], stream, Dumper=Dumper, **kwds)
+
+def dump_all(documents, stream=None, Dumper=Dumper,
+ default_style=None, default_flow_style=False,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding='utf-8', explicit_start=None, explicit_end=None,
+ version=None, tags=None, sort_keys=True):
+ """
+ Serialize a sequence of Python objects into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ getvalue = None
+ if stream is None:
+ if encoding is None:
+ from StringIO import StringIO
+ else:
+ from cStringIO import StringIO
+ stream = StringIO()
+ getvalue = stream.getvalue
+ dumper = Dumper(stream, default_style=default_style,
+ default_flow_style=default_flow_style,
+ canonical=canonical, indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break,
+ encoding=encoding, version=version, tags=tags,
+ explicit_start=explicit_start, explicit_end=explicit_end, sort_keys=sort_keys)
+ try:
+ dumper.open()
+ for data in documents:
+ dumper.represent(data)
+ dumper.close()
+ finally:
+ dumper.dispose()
+ if getvalue:
+ return getvalue()
+
+def dump(data, stream=None, Dumper=Dumper, **kwds):
+ """
+ Serialize a Python object into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ return dump_all([data], stream, Dumper=Dumper, **kwds)
+
+def safe_dump_all(documents, stream=None, **kwds):
+ """
+ Serialize a sequence of Python objects into a YAML stream.
+ Produce only basic YAML tags.
+ If stream is None, return the produced string instead.
+ """
+ return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
+
+def safe_dump(data, stream=None, **kwds):
+ """
+ Serialize a Python object into a YAML stream.
+ Produce only basic YAML tags.
+ If stream is None, return the produced string instead.
+ """
+ return dump_all([data], stream, Dumper=SafeDumper, **kwds)
+
+def add_implicit_resolver(tag, regexp, first=None,
+ Loader=None, Dumper=Dumper):
+ """
+ Add an implicit scalar detector.
+ If an implicit scalar value matches the given regexp,
+ the corresponding tag is assigned to the scalar.
+ first is a sequence of possible initial characters or None.
+ """
+ if Loader is None:
+ loader.Loader.add_implicit_resolver(tag, regexp, first)
+ loader.FullLoader.add_implicit_resolver(tag, regexp, first)
+ loader.UnsafeLoader.add_implicit_resolver(tag, regexp, first)
+ else:
+ Loader.add_implicit_resolver(tag, regexp, first)
+ Dumper.add_implicit_resolver(tag, regexp, first)
+
+def add_path_resolver(tag, path, kind=None, Loader=None, Dumper=Dumper):
+ """
+ Add a path based resolver for the given tag.
+ A path is a list of keys that forms a path
+ to a node in the representation tree.
+ Keys can be string values, integers, or None.
+ """
+ if Loader is None:
+ loader.Loader.add_path_resolver(tag, path, kind)
+ loader.FullLoader.add_path_resolver(tag, path, kind)
+ loader.UnsafeLoader.add_path_resolver(tag, path, kind)
+ else:
+ Loader.add_path_resolver(tag, path, kind)
+ Dumper.add_path_resolver(tag, path, kind)
+
+def add_constructor(tag, constructor, Loader=None):
+ """
+ Add a constructor for the given tag.
+ Constructor is a function that accepts a Loader instance
+ and a node object and produces the corresponding Python object.
+ """
+ if Loader is None:
+ loader.Loader.add_constructor(tag, constructor)
+ loader.FullLoader.add_constructor(tag, constructor)
+ loader.UnsafeLoader.add_constructor(tag, constructor)
+ else:
+ Loader.add_constructor(tag, constructor)
+
+def add_multi_constructor(tag_prefix, multi_constructor, Loader=None):
+ """
+ Add a multi-constructor for the given tag prefix.
+ Multi-constructor is called for a node if its tag starts with tag_prefix.
+ Multi-constructor accepts a Loader instance, a tag suffix,
+ and a node object and produces the corresponding Python object.
+ """
+ if Loader is None:
+ loader.Loader.add_multi_constructor(tag_prefix, multi_constructor)
+ loader.FullLoader.add_multi_constructor(tag_prefix, multi_constructor)
+ loader.UnsafeLoader.add_multi_constructor(tag_prefix, multi_constructor)
+ else:
+ Loader.add_multi_constructor(tag_prefix, multi_constructor)
+
+def add_representer(data_type, representer, Dumper=Dumper):
+ """
+ Add a representer for the given type.
+ Representer is a function accepting a Dumper instance
+ and an instance of the given data type
+ and producing the corresponding representation node.
+ """
+ Dumper.add_representer(data_type, representer)
+
+def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
+ """
+ Add a representer for the given type.
+ Multi-representer is a function accepting a Dumper instance
+ and an instance of the given data type or subtype
+ and producing the corresponding representation node.
+ """
+ Dumper.add_multi_representer(data_type, multi_representer)
+
+class YAMLObjectMetaclass(type):
+ """
+ The metaclass for YAMLObject.
+ """
+ def __init__(cls, name, bases, kwds):
+ super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
+ if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
+ if isinstance(cls.yaml_loader, list):
+ for loader in cls.yaml_loader:
+ loader.add_constructor(cls.yaml_tag, cls.from_yaml)
+ else:
+ cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
+
+ cls.yaml_dumper.add_representer(cls, cls.to_yaml)
+
+class YAMLObject(object):
+ """
+ An object that can dump itself to a YAML stream
+ and load itself from a YAML stream.
+ """
+
+ __metaclass__ = YAMLObjectMetaclass
+ __slots__ = () # no direct instantiation, so allow immutable subclasses
+
+ yaml_loader = [Loader, FullLoader, UnsafeLoader]
+ yaml_dumper = Dumper
+
+ yaml_tag = None
+ yaml_flow_style = None
+
+ def from_yaml(cls, loader, node):
+ """
+ Convert a representation node to a Python object.
+ """
+ return loader.construct_yaml_object(node, cls)
+ from_yaml = classmethod(from_yaml)
+
+ def to_yaml(cls, dumper, data):
+ """
+ Convert a Python object to a representation node.
+ """
+ return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
+ flow_style=cls.yaml_flow_style)
+ to_yaml = classmethod(to_yaml)
+
diff --git a/third_party/python/PyYAML/lib/yaml/composer.py b/third_party/python/PyYAML/lib/yaml/composer.py
new file mode 100644
index 0000000000..df85ef653b
--- /dev/null
+++ b/third_party/python/PyYAML/lib/yaml/composer.py
@@ -0,0 +1,139 @@
+
+__all__ = ['Composer', 'ComposerError']
+
+from error import MarkedYAMLError
+from events import *
+from nodes import *
+
+class ComposerError(MarkedYAMLError):
+ pass
+
+class Composer(object):
+
+ def __init__(self):
+ self.anchors = {}
+
+ def check_node(self):
+ # Drop the STREAM-START event.
+ if self.check_event(StreamStartEvent):
+ self.get_event()
+
+ # If there are more documents available?
+ return not self.check_event(StreamEndEvent)
+
+ def get_node(self):
+ # Get the root node of the next document.
+ if not self.check_event(StreamEndEvent):
+ return self.compose_document()
+
+ def get_single_node(self):
+ # Drop the STREAM-START event.
+ self.get_event()
+
+ # Compose a document if the stream is not empty.
+ document = None
+ if not self.check_event(StreamEndEvent):
+ document = self.compose_document()
+
+ # Ensure that the stream contains no more documents.
+ if not self.check_event(StreamEndEvent):
+ event = self.get_event()
+ raise ComposerError("expected a single document in the stream",
+ document.start_mark, "but found another document",
+ event.start_mark)
+
+ # Drop the STREAM-END event.
+ self.get_event()
+
+ return document
+
+ def compose_document(self):
+ # Drop the DOCUMENT-START event.
+ self.get_event()
+
+ # Compose the root node.
+ node = self.compose_node(None, None)
+
+ # Drop the DOCUMENT-END event.
+ self.get_event()
+
+ self.anchors = {}
+ return node
+
+ def compose_node(self, parent, index):
+ if self.check_event(AliasEvent):
+ event = self.get_event()
+ anchor = event.anchor
+ if anchor not in self.anchors:
+ raise ComposerError(None, None, "found undefined alias %r"
+ % anchor.encode('utf-8'), event.start_mark)
+ return self.anchors[anchor]
+ event = self.peek_event()
+ anchor = event.anchor
+ if anchor is not None:
+ if anchor in self.anchors:
+ raise ComposerError("found duplicate anchor %r; first occurrence"
+ % anchor.encode('utf-8'), self.anchors[anchor].start_mark,
+ "second occurrence", event.start_mark)
+ self.descend_resolver(parent, index)
+ if self.check_event(ScalarEvent):
+ node = self.compose_scalar_node(anchor)
+ elif self.check_event(SequenceStartEvent):
+ node = self.compose_sequence_node(anchor)
+ elif self.check_event(MappingStartEvent):
+ node = self.compose_mapping_node(anchor)
+ self.ascend_resolver()
+ return node
+
+ def compose_scalar_node(self, anchor):
+ event = self.get_event()
+ tag = event.tag
+ if tag is None or tag == u'!':
+ tag = self.resolve(ScalarNode, event.value, event.implicit)
+ node = ScalarNode(tag, event.value,
+ event.start_mark, event.end_mark, style=event.style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ return node
+
+ def compose_sequence_node(self, anchor):
+ start_event = self.get_event()
+ tag = start_event.tag
+ if tag is None or tag == u'!':
+ tag = self.resolve(SequenceNode, None, start_event.implicit)
+ node = SequenceNode(tag, [],
+ start_event.start_mark, None,
+ flow_style=start_event.flow_style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ index = 0
+ while not self.check_event(SequenceEndEvent):
+ node.value.append(self.compose_node(node, index))
+ index += 1
+ end_event = self.get_event()
+ node.end_mark = end_event.end_mark
+ return node
+
+ def compose_mapping_node(self, anchor):
+ start_event = self.get_event()
+ tag = start_event.tag
+ if tag is None or tag == u'!':
+ tag = self.resolve(MappingNode, None, start_event.implicit)
+ node = MappingNode(tag, [],
+ start_event.start_mark, None,
+ flow_style=start_event.flow_style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ while not self.check_event(MappingEndEvent):
+ #key_event = self.peek_event()
+ item_key = self.compose_node(node, None)
+ #if item_key in node.value:
+ # raise ComposerError("while composing a mapping", start_event.start_mark,
+ # "found duplicate key", key_event.start_mark)
+ item_value = self.compose_node(node, item_key)
+ #node.value[item_key] = item_value
+ node.value.append((item_key, item_value))
+ end_event = self.get_event()
+ node.end_mark = end_event.end_mark
+ return node
+
diff --git a/third_party/python/PyYAML/lib/yaml/constructor.py b/third_party/python/PyYAML/lib/yaml/constructor.py
new file mode 100644
index 0000000000..794681cb7f
--- /dev/null
+++ b/third_party/python/PyYAML/lib/yaml/constructor.py
@@ -0,0 +1,760 @@
+
+__all__ = [
+ 'BaseConstructor',
+ 'SafeConstructor',
+ 'FullConstructor',
+ 'UnsafeConstructor',
+ 'Constructor',
+ 'ConstructorError'
+]
+
+from error import *
+from nodes import *
+
+import datetime
+
+import binascii, re, sys, types
+
+class ConstructorError(MarkedYAMLError):
+ pass
+
+
+class timezone(datetime.tzinfo):
+ def __init__(self, offset):
+ self._offset = offset
+ seconds = abs(offset).total_seconds()
+ self._name = 'UTC%s%02d:%02d' % (
+ '-' if offset.days < 0 else '+',
+ seconds // 3600,
+ seconds % 3600 // 60
+ )
+
+ def tzname(self, dt=None):
+ return self._name
+
+ def utcoffset(self, dt=None):
+ return self._offset
+
+ def dst(self, dt=None):
+ return datetime.timedelta(0)
+
+ __repr__ = __str__ = tzname
+
+
+class BaseConstructor(object):
+
+ yaml_constructors = {}
+ yaml_multi_constructors = {}
+
+ def __init__(self):
+ self.constructed_objects = {}
+ self.recursive_objects = {}
+ self.state_generators = []
+ self.deep_construct = False
+
+ def check_data(self):
+ # If there are more documents available?
+ return self.check_node()
+
+ def check_state_key(self, key):
+ """Block special attributes/methods from being set in a newly created
+ object, to prevent user-controlled methods from being called during
+ deserialization"""
+ if self.get_state_keys_blacklist_regexp().match(key):
+ raise ConstructorError(None, None,
+ "blacklisted key '%s' in instance state found" % (key,), None)
+
+ def get_data(self):
+ # Construct and return the next document.
+ if self.check_node():
+ return self.construct_document(self.get_node())
+
+ def get_single_data(self):
+ # Ensure that the stream contains a single document and construct it.
+ node = self.get_single_node()
+ if node is not None:
+ return self.construct_document(node)
+ return None
+
+ def construct_document(self, node):
+ data = self.construct_object(node)
+ while self.state_generators:
+ state_generators = self.state_generators
+ self.state_generators = []
+ for generator in state_generators:
+ for dummy in generator:
+ pass
+ self.constructed_objects = {}
+ self.recursive_objects = {}
+ self.deep_construct = False
+ return data
+
+ def construct_object(self, node, deep=False):
+ if node in self.constructed_objects:
+ return self.constructed_objects[node]
+ if deep:
+ old_deep = self.deep_construct
+ self.deep_construct = True
+ if node in self.recursive_objects:
+ raise ConstructorError(None, None,
+ "found unconstructable recursive node", node.start_mark)
+ self.recursive_objects[node] = None
+ constructor = None
+ tag_suffix = None
+ if node.tag in self.yaml_constructors:
+ constructor = self.yaml_constructors[node.tag]
+ else:
+ for tag_prefix in self.yaml_multi_constructors:
+ if tag_prefix is not None and node.tag.startswith(tag_prefix):
+ tag_suffix = node.tag[len(tag_prefix):]
+ constructor = self.yaml_multi_constructors[tag_prefix]
+ break
+ else:
+ if None in self.yaml_multi_constructors:
+ tag_suffix = node.tag
+ constructor = self.yaml_multi_constructors[None]
+ elif None in self.yaml_constructors:
+ constructor = self.yaml_constructors[None]
+ elif isinstance(node, ScalarNode):
+ constructor = self.__class__.construct_scalar
+ elif isinstance(node, SequenceNode):
+ constructor = self.__class__.construct_sequence
+ elif isinstance(node, MappingNode):
+ constructor = self.__class__.construct_mapping
+ if tag_suffix is None:
+ data = constructor(self, node)
+ else:
+ data = constructor(self, tag_suffix, node)
+ if isinstance(data, types.GeneratorType):
+ generator = data
+ data = generator.next()
+ if self.deep_construct:
+ for dummy in generator:
+ pass
+ else:
+ self.state_generators.append(generator)
+ self.constructed_objects[node] = data
+ del self.recursive_objects[node]
+ if deep:
+ self.deep_construct = old_deep
+ return data
+
+ def construct_scalar(self, node):
+ if not isinstance(node, ScalarNode):
+ raise ConstructorError(None, None,
+ "expected a scalar node, but found %s" % node.id,
+ node.start_mark)
+ return node.value
+
+ def construct_sequence(self, node, deep=False):
+ if not isinstance(node, SequenceNode):
+ raise ConstructorError(None, None,
+ "expected a sequence node, but found %s" % node.id,
+ node.start_mark)
+ return [self.construct_object(child, deep=deep)
+ for child in node.value]
+
+ def construct_mapping(self, node, deep=False):
+ if not isinstance(node, MappingNode):
+ raise ConstructorError(None, None,
+ "expected a mapping node, but found %s" % node.id,
+ node.start_mark)
+ mapping = {}
+ for key_node, value_node in node.value:
+ key = self.construct_object(key_node, deep=deep)
+ try:
+ hash(key)
+ except TypeError, exc:
+ raise ConstructorError("while constructing a mapping", node.start_mark,
+ "found unacceptable key (%s)" % exc, key_node.start_mark)
+ value = self.construct_object(value_node, deep=deep)
+ mapping[key] = value
+ return mapping
+
+ def construct_pairs(self, node, deep=False):
+ if not isinstance(node, MappingNode):
+ raise ConstructorError(None, None,
+ "expected a mapping node, but found %s" % node.id,
+ node.start_mark)
+ pairs = []
+ for key_node, value_node in node.value:
+ key = self.construct_object(key_node, deep=deep)
+ value = self.construct_object(value_node, deep=deep)
+ pairs.append((key, value))
+ return pairs
+
+ def add_constructor(cls, tag, constructor):
+ if not 'yaml_constructors' in cls.__dict__:
+ cls.yaml_constructors = cls.yaml_constructors.copy()
+ cls.yaml_constructors[tag] = constructor
+ add_constructor = classmethod(add_constructor)
+
+ def add_multi_constructor(cls, tag_prefix, multi_constructor):
+ if not 'yaml_multi_constructors' in cls.__dict__:
+ cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
+ cls.yaml_multi_constructors[tag_prefix] = multi_constructor
+ add_multi_constructor = classmethod(add_multi_constructor)
+
+class SafeConstructor(BaseConstructor):
+
+ def construct_scalar(self, node):
+ if isinstance(node, MappingNode):
+ for key_node, value_node in node.value:
+ if key_node.tag == u'tag:yaml.org,2002:value':
+ return self.construct_scalar(value_node)
+ return BaseConstructor.construct_scalar(self, node)
+
+ def flatten_mapping(self, node):
+ merge = []
+ index = 0
+ while index < len(node.value):
+ key_node, value_node = node.value[index]
+ if key_node.tag == u'tag:yaml.org,2002:merge':
+ del node.value[index]
+ if isinstance(value_node, MappingNode):
+ self.flatten_mapping(value_node)
+ merge.extend(value_node.value)
+ elif isinstance(value_node, SequenceNode):
+ submerge = []
+ for subnode in value_node.value:
+ if not isinstance(subnode, MappingNode):
+ raise ConstructorError("while constructing a mapping",
+ node.start_mark,
+ "expected a mapping for merging, but found %s"
+ % subnode.id, subnode.start_mark)
+ self.flatten_mapping(subnode)
+ submerge.append(subnode.value)
+ submerge.reverse()
+ for value in submerge:
+ merge.extend(value)
+ else:
+ raise ConstructorError("while constructing a mapping", node.start_mark,
+ "expected a mapping or list of mappings for merging, but found %s"
+ % value_node.id, value_node.start_mark)
+ elif key_node.tag == u'tag:yaml.org,2002:value':
+ key_node.tag = u'tag:yaml.org,2002:str'
+ index += 1
+ else:
+ index += 1
+ if merge:
+ node.value = merge + node.value
+
+ def construct_mapping(self, node, deep=False):
+ if isinstance(node, MappingNode):
+ self.flatten_mapping(node)
+ return BaseConstructor.construct_mapping(self, node, deep=deep)
+
+ def construct_yaml_null(self, node):
+ self.construct_scalar(node)
+ return None
+
+ bool_values = {
+ u'yes': True,
+ u'no': False,
+ u'true': True,
+ u'false': False,
+ u'on': True,
+ u'off': False,
+ }
+
+ def construct_yaml_bool(self, node):
+ value = self.construct_scalar(node)
+ return self.bool_values[value.lower()]
+
+ def construct_yaml_int(self, node):
+ value = str(self.construct_scalar(node))
+ value = value.replace('_', '')
+ sign = +1
+ if value[0] == '-':
+ sign = -1
+ if value[0] in '+-':
+ value = value[1:]
+ if value == '0':
+ return 0
+ elif value.startswith('0b'):
+ return sign*int(value[2:], 2)
+ elif value.startswith('0x'):
+ return sign*int(value[2:], 16)
+ elif value[0] == '0':
+ return sign*int(value, 8)
+ elif ':' in value:
+ digits = [int(part) for part in value.split(':')]
+ digits.reverse()
+ base = 1
+ value = 0
+ for digit in digits:
+ value += digit*base
+ base *= 60
+ return sign*value
+ else:
+ return sign*int(value)
+
+ inf_value = 1e300
+ while inf_value != inf_value*inf_value:
+ inf_value *= inf_value
+ nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99).
+
+ def construct_yaml_float(self, node):
+ value = str(self.construct_scalar(node))
+ value = value.replace('_', '').lower()
+ sign = +1
+ if value[0] == '-':
+ sign = -1
+ if value[0] in '+-':
+ value = value[1:]
+ if value == '.inf':
+ return sign*self.inf_value
+ elif value == '.nan':
+ return self.nan_value
+ elif ':' in value:
+ digits = [float(part) for part in value.split(':')]
+ digits.reverse()
+ base = 1
+ value = 0.0
+ for digit in digits:
+ value += digit*base
+ base *= 60
+ return sign*value
+ else:
+ return sign*float(value)
+
+ def construct_yaml_binary(self, node):
+ value = self.construct_scalar(node)
+ try:
+ return str(value).decode('base64')
+ except (binascii.Error, UnicodeEncodeError), exc:
+ raise ConstructorError(None, None,
+ "failed to decode base64 data: %s" % exc, node.start_mark)
+
+ timestamp_regexp = re.compile(
+ ur'''^(?P<year>[0-9][0-9][0-9][0-9])
+ -(?P<month>[0-9][0-9]?)
+ -(?P<day>[0-9][0-9]?)
+ (?:(?:[Tt]|[ \t]+)
+ (?P<hour>[0-9][0-9]?)
+ :(?P<minute>[0-9][0-9])
+ :(?P<second>[0-9][0-9])
+ (?:\.(?P<fraction>[0-9]*))?
+ (?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
+ (?::(?P<tz_minute>[0-9][0-9]))?))?)?$''', re.X)
+
+ def construct_yaml_timestamp(self, node):
+ value = self.construct_scalar(node)
+ match = self.timestamp_regexp.match(node.value)
+ values = match.groupdict()
+ year = int(values['year'])
+ month = int(values['month'])
+ day = int(values['day'])
+ if not values['hour']:
+ return datetime.date(year, month, day)
+ hour = int(values['hour'])
+ minute = int(values['minute'])
+ second = int(values['second'])
+ fraction = 0
+ tzinfo = None
+ if values['fraction']:
+ fraction = values['fraction'][:6]
+ while len(fraction) < 6:
+ fraction += '0'
+ fraction = int(fraction)
+ if values['tz_sign']:
+ tz_hour = int(values['tz_hour'])
+ tz_minute = int(values['tz_minute'] or 0)
+ delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
+ if values['tz_sign'] == '-':
+ delta = -delta
+ tzinfo = timezone(delta)
+ elif values['tz']:
+ tzinfo = timezone(datetime.timedelta(0))
+ return datetime.datetime(year, month, day, hour, minute, second, fraction,
+ tzinfo=tzinfo)
+
+ def construct_yaml_omap(self, node):
+ # Note: we do not check for duplicate keys, because it's too
+ # CPU-expensive.
+ omap = []
+ yield omap
+ if not isinstance(node, SequenceNode):
+ raise ConstructorError("while constructing an ordered map", node.start_mark,
+ "expected a sequence, but found %s" % node.id, node.start_mark)
+ for subnode in node.value:
+ if not isinstance(subnode, MappingNode):
+ raise ConstructorError("while constructing an ordered map", node.start_mark,
+ "expected a mapping of length 1, but found %s" % subnode.id,
+ subnode.start_mark)
+ if len(subnode.value) != 1:
+ raise ConstructorError("while constructing an ordered map", node.start_mark,
+ "expected a single mapping item, but found %d items" % len(subnode.value),
+ subnode.start_mark)
+ key_node, value_node = subnode.value[0]
+ key = self.construct_object(key_node)
+ value = self.construct_object(value_node)
+ omap.append((key, value))
+
+ def construct_yaml_pairs(self, node):
+ # Note: the same code as `construct_yaml_omap`.
+ pairs = []
+ yield pairs
+ if not isinstance(node, SequenceNode):
+ raise ConstructorError("while constructing pairs", node.start_mark,
+ "expected a sequence, but found %s" % node.id, node.start_mark)
+ for subnode in node.value:
+ if not isinstance(subnode, MappingNode):
+ raise ConstructorError("while constructing pairs", node.start_mark,
+ "expected a mapping of length 1, but found %s" % subnode.id,
+ subnode.start_mark)
+ if len(subnode.value) != 1:
+ raise ConstructorError("while constructing pairs", node.start_mark,
+ "expected a single mapping item, but found %d items" % len(subnode.value),
+ subnode.start_mark)
+ key_node, value_node = subnode.value[0]
+ key = self.construct_object(key_node)
+ value = self.construct_object(value_node)
+ pairs.append((key, value))
+
+ def construct_yaml_set(self, node):
+ data = set()
+ yield data
+ value = self.construct_mapping(node)
+ data.update(value)
+
+ def construct_yaml_str(self, node):
+ value = self.construct_scalar(node)
+ try:
+ return value.encode('ascii')
+ except UnicodeEncodeError:
+ return value
+
+ def construct_yaml_seq(self, node):
+ data = []
+ yield data
+ data.extend(self.construct_sequence(node))
+
+ def construct_yaml_map(self, node):
+ data = {}
+ yield data
+ value = self.construct_mapping(node)
+ data.update(value)
+
+ def construct_yaml_object(self, node, cls):
+ data = cls.__new__(cls)
+ yield data
+ if hasattr(data, '__setstate__'):
+ state = self.construct_mapping(node, deep=True)
+ data.__setstate__(state)
+ else:
+ state = self.construct_mapping(node)
+ data.__dict__.update(state)
+
+ def construct_undefined(self, node):
+ raise ConstructorError(None, None,
+ "could not determine a constructor for the tag %r" % node.tag.encode('utf-8'),
+ node.start_mark)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:null',
+ SafeConstructor.construct_yaml_null)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:bool',
+ SafeConstructor.construct_yaml_bool)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:int',
+ SafeConstructor.construct_yaml_int)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:float',
+ SafeConstructor.construct_yaml_float)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:binary',
+ SafeConstructor.construct_yaml_binary)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:timestamp',
+ SafeConstructor.construct_yaml_timestamp)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:omap',
+ SafeConstructor.construct_yaml_omap)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:pairs',
+ SafeConstructor.construct_yaml_pairs)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:set',
+ SafeConstructor.construct_yaml_set)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:str',
+ SafeConstructor.construct_yaml_str)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:seq',
+ SafeConstructor.construct_yaml_seq)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:map',
+ SafeConstructor.construct_yaml_map)
+
+SafeConstructor.add_constructor(None,
+ SafeConstructor.construct_undefined)
+
+class FullConstructor(SafeConstructor):
+ # 'extend' is blacklisted because it is used by
+ # construct_python_object_apply to add `listitems` to a newly generate
+ # python instance
+ def get_state_keys_blacklist(self):
+ return ['^extend$', '^__.*__$']
+
+ def get_state_keys_blacklist_regexp(self):
+ if not hasattr(self, 'state_keys_blacklist_regexp'):
+ self.state_keys_blacklist_regexp = re.compile('(' + '|'.join(self.get_state_keys_blacklist()) + ')')
+ return self.state_keys_blacklist_regexp
+
+ def construct_python_str(self, node):
+ return self.construct_scalar(node).encode('utf-8')
+
+ def construct_python_unicode(self, node):
+ return self.construct_scalar(node)
+
+ def construct_python_long(self, node):
+ return long(self.construct_yaml_int(node))
+
+ def construct_python_complex(self, node):
+ return complex(self.construct_scalar(node))
+
+ def construct_python_tuple(self, node):
+ return tuple(self.construct_sequence(node))
+
+ def find_python_module(self, name, mark, unsafe=False):
+ if not name:
+ raise ConstructorError("while constructing a Python module", mark,
+ "expected non-empty name appended to the tag", mark)
+ if unsafe:
+ try:
+ __import__(name)
+ except ImportError, exc:
+ raise ConstructorError("while constructing a Python module", mark,
+ "cannot find module %r (%s)" % (name.encode('utf-8'), exc), mark)
+ if name not in sys.modules:
+ raise ConstructorError("while constructing a Python module", mark,
+ "module %r is not imported" % name.encode('utf-8'), mark)
+ return sys.modules[name]
+
+ def find_python_name(self, name, mark, unsafe=False):
+ if not name:
+ raise ConstructorError("while constructing a Python object", mark,
+ "expected non-empty name appended to the tag", mark)
+ if u'.' in name:
+ module_name, object_name = name.rsplit('.', 1)
+ else:
+ module_name = '__builtin__'
+ object_name = name
+ if unsafe:
+ try:
+ __import__(module_name)
+ except ImportError, exc:
+ raise ConstructorError("while constructing a Python object", mark,
+ "cannot find module %r (%s)" % (module_name.encode('utf-8'), exc), mark)
+ if module_name not in sys.modules:
+ raise ConstructorError("while constructing a Python object", mark,
+ "module %r is not imported" % module_name.encode('utf-8'), mark)
+ module = sys.modules[module_name]
+ if not hasattr(module, object_name):
+ raise ConstructorError("while constructing a Python object", mark,
+ "cannot find %r in the module %r" % (object_name.encode('utf-8'),
+ module.__name__), mark)
+ return getattr(module, object_name)
+
+ def construct_python_name(self, suffix, node):
+ value = self.construct_scalar(node)
+ if value:
+ raise ConstructorError("while constructing a Python name", node.start_mark,
+ "expected the empty value, but found %r" % value.encode('utf-8'),
+ node.start_mark)
+ return self.find_python_name(suffix, node.start_mark)
+
+ def construct_python_module(self, suffix, node):
+ value = self.construct_scalar(node)
+ if value:
+ raise ConstructorError("while constructing a Python module", node.start_mark,
+ "expected the empty value, but found %r" % value.encode('utf-8'),
+ node.start_mark)
+ return self.find_python_module(suffix, node.start_mark)
+
+ class classobj: pass
+
+ def make_python_instance(self, suffix, node,
+ args=None, kwds=None, newobj=False, unsafe=False):
+ if not args:
+ args = []
+ if not kwds:
+ kwds = {}
+ cls = self.find_python_name(suffix, node.start_mark)
+ if not (unsafe or isinstance(cls, type) or isinstance(cls, type(self.classobj))):
+ raise ConstructorError("while constructing a Python instance", node.start_mark,
+ "expected a class, but found %r" % type(cls),
+ node.start_mark)
+ if newobj and isinstance(cls, type(self.classobj)) \
+ and not args and not kwds:
+ instance = self.classobj()
+ instance.__class__ = cls
+ return instance
+ elif newobj and isinstance(cls, type):
+ return cls.__new__(cls, *args, **kwds)
+ else:
+ return cls(*args, **kwds)
+
+ def set_python_instance_state(self, instance, state, unsafe=False):
+ if hasattr(instance, '__setstate__'):
+ instance.__setstate__(state)
+ else:
+ slotstate = {}
+ if isinstance(state, tuple) and len(state) == 2:
+ state, slotstate = state
+ if hasattr(instance, '__dict__'):
+ if not unsafe and state:
+ for key in state.keys():
+ self.check_state_key(key)
+ instance.__dict__.update(state)
+ elif state:
+ slotstate.update(state)
+ for key, value in slotstate.items():
+ if not unsafe:
+ self.check_state_key(key)
+ setattr(instance, key, value)
+
+ def construct_python_object(self, suffix, node):
+ # Format:
+ # !!python/object:module.name { ... state ... }
+ instance = self.make_python_instance(suffix, node, newobj=True)
+ yield instance
+ deep = hasattr(instance, '__setstate__')
+ state = self.construct_mapping(node, deep=deep)
+ self.set_python_instance_state(instance, state)
+
+ def construct_python_object_apply(self, suffix, node, newobj=False):
+ # Format:
+ # !!python/object/apply # (or !!python/object/new)
+ # args: [ ... arguments ... ]
+ # kwds: { ... keywords ... }
+ # state: ... state ...
+ # listitems: [ ... listitems ... ]
+ # dictitems: { ... dictitems ... }
+ # or short format:
+ # !!python/object/apply [ ... arguments ... ]
+ # The difference between !!python/object/apply and !!python/object/new
+ # is how an object is created, check make_python_instance for details.
+ if isinstance(node, SequenceNode):
+ args = self.construct_sequence(node, deep=True)
+ kwds = {}
+ state = {}
+ listitems = []
+ dictitems = {}
+ else:
+ value = self.construct_mapping(node, deep=True)
+ args = value.get('args', [])
+ kwds = value.get('kwds', {})
+ state = value.get('state', {})
+ listitems = value.get('listitems', [])
+ dictitems = value.get('dictitems', {})
+ instance = self.make_python_instance(suffix, node, args, kwds, newobj)
+ if state:
+ self.set_python_instance_state(instance, state)
+ if listitems:
+ instance.extend(listitems)
+ if dictitems:
+ for key in dictitems:
+ instance[key] = dictitems[key]
+ return instance
+
+ def construct_python_object_new(self, suffix, node):
+ return self.construct_python_object_apply(suffix, node, newobj=True)
+
+FullConstructor.add_constructor(
+ u'tag:yaml.org,2002:python/none',
+ FullConstructor.construct_yaml_null)
+
+FullConstructor.add_constructor(
+ u'tag:yaml.org,2002:python/bool',
+ FullConstructor.construct_yaml_bool)
+
+FullConstructor.add_constructor(
+ u'tag:yaml.org,2002:python/str',
+ FullConstructor.construct_python_str)
+
+FullConstructor.add_constructor(
+ u'tag:yaml.org,2002:python/unicode',
+ FullConstructor.construct_python_unicode)
+
+FullConstructor.add_constructor(
+ u'tag:yaml.org,2002:python/int',
+ FullConstructor.construct_yaml_int)
+
+FullConstructor.add_constructor(
+ u'tag:yaml.org,2002:python/long',
+ FullConstructor.construct_python_long)
+
+FullConstructor.add_constructor(
+ u'tag:yaml.org,2002:python/float',
+ FullConstructor.construct_yaml_float)
+
+FullConstructor.add_constructor(
+ u'tag:yaml.org,2002:python/complex',
+ FullConstructor.construct_python_complex)
+
+FullConstructor.add_constructor(
+ u'tag:yaml.org,2002:python/list',
+ FullConstructor.construct_yaml_seq)
+
+FullConstructor.add_constructor(
+ u'tag:yaml.org,2002:python/tuple',
+ FullConstructor.construct_python_tuple)
+
+FullConstructor.add_constructor(
+ u'tag:yaml.org,2002:python/dict',
+ FullConstructor.construct_yaml_map)
+
+FullConstructor.add_multi_constructor(
+ u'tag:yaml.org,2002:python/name:',
+ FullConstructor.construct_python_name)
+
+FullConstructor.add_multi_constructor(
+ u'tag:yaml.org,2002:python/module:',
+ FullConstructor.construct_python_module)
+
+FullConstructor.add_multi_constructor(
+ u'tag:yaml.org,2002:python/object:',
+ FullConstructor.construct_python_object)
+
+FullConstructor.add_multi_constructor(
+ u'tag:yaml.org,2002:python/object/new:',
+ FullConstructor.construct_python_object_new)
+
+class UnsafeConstructor(FullConstructor):
+
+ def find_python_module(self, name, mark):
+ return super(UnsafeConstructor, self).find_python_module(name, mark, unsafe=True)
+
+ def find_python_name(self, name, mark):
+ return super(UnsafeConstructor, self).find_python_name(name, mark, unsafe=True)
+
+ def make_python_instance(self, suffix, node, args=None, kwds=None, newobj=False):
+ return super(UnsafeConstructor, self).make_python_instance(
+ suffix, node, args, kwds, newobj, unsafe=True)
+
+ def set_python_instance_state(self, instance, state):
+ return super(UnsafeConstructor, self).set_python_instance_state(
+ instance, state, unsafe=True)
+
+UnsafeConstructor.add_multi_constructor(
+ u'tag:yaml.org,2002:python/object/apply:',
+ UnsafeConstructor.construct_python_object_apply)
+
+# Constructor is same as UnsafeConstructor. Need to leave this in place in case
+# people have extended it directly.
+class Constructor(UnsafeConstructor):
+ pass
diff --git a/third_party/python/PyYAML/lib/yaml/cyaml.py b/third_party/python/PyYAML/lib/yaml/cyaml.py
new file mode 100644
index 0000000000..ebb8959352
--- /dev/null
+++ b/third_party/python/PyYAML/lib/yaml/cyaml.py
@@ -0,0 +1,101 @@
+
+__all__ = [
+ 'CBaseLoader', 'CSafeLoader', 'CFullLoader', 'CUnsafeLoader', 'CLoader',
+ 'CBaseDumper', 'CSafeDumper', 'CDumper'
+]
+
+from _yaml import CParser, CEmitter
+
+from constructor import *
+
+from serializer import *
+from representer import *
+
+from resolver import *
+
+class CBaseLoader(CParser, BaseConstructor, BaseResolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ BaseConstructor.__init__(self)
+ BaseResolver.__init__(self)
+
+class CSafeLoader(CParser, SafeConstructor, Resolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ SafeConstructor.__init__(self)
+ Resolver.__init__(self)
+
+class CFullLoader(CParser, FullConstructor, Resolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ FullConstructor.__init__(self)
+ Resolver.__init__(self)
+
+class CUnsafeLoader(CParser, UnsafeConstructor, Resolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ UnsafeConstructor.__init__(self)
+ Resolver.__init__(self)
+
+class CLoader(CParser, Constructor, Resolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ Constructor.__init__(self)
+ Resolver.__init__(self)
+
+class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=False,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None, sort_keys=True):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style, sort_keys=sort_keys)
+ Resolver.__init__(self)
+
+class CSafeDumper(CEmitter, SafeRepresenter, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=False,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None, sort_keys=True):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ SafeRepresenter.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style, sort_keys=sort_keys)
+ Resolver.__init__(self)
+
+class CDumper(CEmitter, Serializer, Representer, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=False,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None, sort_keys=True):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style, sort_keys=sort_keys)
+ Resolver.__init__(self)
+
diff --git a/third_party/python/PyYAML/lib/yaml/dumper.py b/third_party/python/PyYAML/lib/yaml/dumper.py
new file mode 100644
index 0000000000..f9cd49fda5
--- /dev/null
+++ b/third_party/python/PyYAML/lib/yaml/dumper.py
@@ -0,0 +1,62 @@
+
+__all__ = ['BaseDumper', 'SafeDumper', 'Dumper']
+
+from emitter import *
+from serializer import *
+from representer import *
+from resolver import *
+
+class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=False,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None, sort_keys=True):
+ Emitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ Serializer.__init__(self, encoding=encoding,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style, sort_keys=sort_keys)
+ Resolver.__init__(self)
+
+class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=False,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None, sort_keys=True):
+ Emitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ Serializer.__init__(self, encoding=encoding,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ SafeRepresenter.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style, sort_keys=sort_keys)
+ Resolver.__init__(self)
+
+class Dumper(Emitter, Serializer, Representer, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=False,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None, sort_keys=True):
+ Emitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ Serializer.__init__(self, encoding=encoding,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style, sort_keys=sort_keys)
+ Resolver.__init__(self)
+
diff --git a/third_party/python/PyYAML/lib/yaml/emitter.py b/third_party/python/PyYAML/lib/yaml/emitter.py
new file mode 100644
index 0000000000..23c25ca80a
--- /dev/null
+++ b/third_party/python/PyYAML/lib/yaml/emitter.py
@@ -0,0 +1,1144 @@
+
+# Emitter expects events obeying the following grammar:
+# stream ::= STREAM-START document* STREAM-END
+# document ::= DOCUMENT-START node DOCUMENT-END
+# node ::= SCALAR | sequence | mapping
+# sequence ::= SEQUENCE-START node* SEQUENCE-END
+# mapping ::= MAPPING-START (node node)* MAPPING-END
+
+__all__ = ['Emitter', 'EmitterError']
+
+import sys
+
+from error import YAMLError
+from events import *
+
+has_ucs4 = sys.maxunicode > 0xffff
+
+class EmitterError(YAMLError):
+ pass
+
+class ScalarAnalysis(object):
+ def __init__(self, scalar, empty, multiline,
+ allow_flow_plain, allow_block_plain,
+ allow_single_quoted, allow_double_quoted,
+ allow_block):
+ self.scalar = scalar
+ self.empty = empty
+ self.multiline = multiline
+ self.allow_flow_plain = allow_flow_plain
+ self.allow_block_plain = allow_block_plain
+ self.allow_single_quoted = allow_single_quoted
+ self.allow_double_quoted = allow_double_quoted
+ self.allow_block = allow_block
+
+class Emitter(object):
+
+ DEFAULT_TAG_PREFIXES = {
+ u'!' : u'!',
+ u'tag:yaml.org,2002:' : u'!!',
+ }
+
+ def __init__(self, stream, canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None):
+
+ # The stream should have the methods `write` and possibly `flush`.
+ self.stream = stream
+
+ # Encoding can be overridden by STREAM-START.
+ self.encoding = None
+
+ # Emitter is a state machine with a stack of states to handle nested
+ # structures.
+ self.states = []
+ self.state = self.expect_stream_start
+
+ # Current event and the event queue.
+ self.events = []
+ self.event = None
+
+ # The current indentation level and the stack of previous indents.
+ self.indents = []
+ self.indent = None
+
+ # Flow level.
+ self.flow_level = 0
+
+ # Contexts.
+ self.root_context = False
+ self.sequence_context = False
+ self.mapping_context = False
+ self.simple_key_context = False
+
+ # Characteristics of the last emitted character:
+ # - current position.
+ # - is it a whitespace?
+ # - is it an indention character
+ # (indentation space, '-', '?', or ':')?
+ self.line = 0
+ self.column = 0
+ self.whitespace = True
+ self.indention = True
+
+ # Whether the document requires an explicit document indicator
+ self.open_ended = False
+
+ # Formatting details.
+ self.canonical = canonical
+ self.allow_unicode = allow_unicode
+ self.best_indent = 2
+ if indent and 1 < indent < 10:
+ self.best_indent = indent
+ self.best_width = 80
+ if width and width > self.best_indent*2:
+ self.best_width = width
+ self.best_line_break = u'\n'
+ if line_break in [u'\r', u'\n', u'\r\n']:
+ self.best_line_break = line_break
+
+ # Tag prefixes.
+ self.tag_prefixes = None
+
+ # Prepared anchor and tag.
+ self.prepared_anchor = None
+ self.prepared_tag = None
+
+ # Scalar analysis and style.
+ self.analysis = None
+ self.style = None
+
+ def dispose(self):
+ # Reset the state attributes (to clear self-references)
+ self.states = []
+ self.state = None
+
+ def emit(self, event):
+ self.events.append(event)
+ while not self.need_more_events():
+ self.event = self.events.pop(0)
+ self.state()
+ self.event = None
+
+ # In some cases, we wait for a few next events before emitting.
+
+ def need_more_events(self):
+ if not self.events:
+ return True
+ event = self.events[0]
+ if isinstance(event, DocumentStartEvent):
+ return self.need_events(1)
+ elif isinstance(event, SequenceStartEvent):
+ return self.need_events(2)
+ elif isinstance(event, MappingStartEvent):
+ return self.need_events(3)
+ else:
+ return False
+
+ def need_events(self, count):
+ level = 0
+ for event in self.events[1:]:
+ if isinstance(event, (DocumentStartEvent, CollectionStartEvent)):
+ level += 1
+ elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)):
+ level -= 1
+ elif isinstance(event, StreamEndEvent):
+ level = -1
+ if level < 0:
+ return False
+ return (len(self.events) < count+1)
+
+ def increase_indent(self, flow=False, indentless=False):
+ self.indents.append(self.indent)
+ if self.indent is None:
+ if flow:
+ self.indent = self.best_indent
+ else:
+ self.indent = 0
+ elif not indentless:
+ self.indent += self.best_indent
+
+ # States.
+
+ # Stream handlers.
+
+ def expect_stream_start(self):
+ if isinstance(self.event, StreamStartEvent):
+ if self.event.encoding and not getattr(self.stream, 'encoding', None):
+ self.encoding = self.event.encoding
+ self.write_stream_start()
+ self.state = self.expect_first_document_start
+ else:
+ raise EmitterError("expected StreamStartEvent, but got %s"
+ % self.event)
+
+ def expect_nothing(self):
+ raise EmitterError("expected nothing, but got %s" % self.event)
+
+ # Document handlers.
+
+ def expect_first_document_start(self):
+ return self.expect_document_start(first=True)
+
+ def expect_document_start(self, first=False):
+ if isinstance(self.event, DocumentStartEvent):
+ if (self.event.version or self.event.tags) and self.open_ended:
+ self.write_indicator(u'...', True)
+ self.write_indent()
+ if self.event.version:
+ version_text = self.prepare_version(self.event.version)
+ self.write_version_directive(version_text)
+ self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy()
+ if self.event.tags:
+ handles = self.event.tags.keys()
+ handles.sort()
+ for handle in handles:
+ prefix = self.event.tags[handle]
+ self.tag_prefixes[prefix] = handle
+ handle_text = self.prepare_tag_handle(handle)
+ prefix_text = self.prepare_tag_prefix(prefix)
+ self.write_tag_directive(handle_text, prefix_text)
+ implicit = (first and not self.event.explicit and not self.canonical
+ and not self.event.version and not self.event.tags
+ and not self.check_empty_document())
+ if not implicit:
+ self.write_indent()
+ self.write_indicator(u'---', True)
+ if self.canonical:
+ self.write_indent()
+ self.state = self.expect_document_root
+ elif isinstance(self.event, StreamEndEvent):
+ if self.open_ended:
+ self.write_indicator(u'...', True)
+ self.write_indent()
+ self.write_stream_end()
+ self.state = self.expect_nothing
+ else:
+ raise EmitterError("expected DocumentStartEvent, but got %s"
+ % self.event)
+
+ def expect_document_end(self):
+ if isinstance(self.event, DocumentEndEvent):
+ self.write_indent()
+ if self.event.explicit:
+ self.write_indicator(u'...', True)
+ self.write_indent()
+ self.flush_stream()
+ self.state = self.expect_document_start
+ else:
+ raise EmitterError("expected DocumentEndEvent, but got %s"
+ % self.event)
+
+ def expect_document_root(self):
+ self.states.append(self.expect_document_end)
+ self.expect_node(root=True)
+
+ # Node handlers.
+
+ def expect_node(self, root=False, sequence=False, mapping=False,
+ simple_key=False):
+ self.root_context = root
+ self.sequence_context = sequence
+ self.mapping_context = mapping
+ self.simple_key_context = simple_key
+ if isinstance(self.event, AliasEvent):
+ self.expect_alias()
+ elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)):
+ self.process_anchor(u'&')
+ self.process_tag()
+ if isinstance(self.event, ScalarEvent):
+ self.expect_scalar()
+ elif isinstance(self.event, SequenceStartEvent):
+ if self.flow_level or self.canonical or self.event.flow_style \
+ or self.check_empty_sequence():
+ self.expect_flow_sequence()
+ else:
+ self.expect_block_sequence()
+ elif isinstance(self.event, MappingStartEvent):
+ if self.flow_level or self.canonical or self.event.flow_style \
+ or self.check_empty_mapping():
+ self.expect_flow_mapping()
+ else:
+ self.expect_block_mapping()
+ else:
+ raise EmitterError("expected NodeEvent, but got %s" % self.event)
+
+ def expect_alias(self):
+ if self.event.anchor is None:
+ raise EmitterError("anchor is not specified for alias")
+ self.process_anchor(u'*')
+ self.state = self.states.pop()
+
+ def expect_scalar(self):
+ self.increase_indent(flow=True)
+ self.process_scalar()
+ self.indent = self.indents.pop()
+ self.state = self.states.pop()
+
+ # Flow sequence handlers.
+
+ def expect_flow_sequence(self):
+ self.write_indicator(u'[', True, whitespace=True)
+ self.flow_level += 1
+ self.increase_indent(flow=True)
+ self.state = self.expect_first_flow_sequence_item
+
+ def expect_first_flow_sequence_item(self):
+ if isinstance(self.event, SequenceEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ self.write_indicator(u']', False)
+ self.state = self.states.pop()
+ else:
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ self.states.append(self.expect_flow_sequence_item)
+ self.expect_node(sequence=True)
+
+ def expect_flow_sequence_item(self):
+ if isinstance(self.event, SequenceEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ if self.canonical:
+ self.write_indicator(u',', False)
+ self.write_indent()
+ self.write_indicator(u']', False)
+ self.state = self.states.pop()
+ else:
+ self.write_indicator(u',', False)
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ self.states.append(self.expect_flow_sequence_item)
+ self.expect_node(sequence=True)
+
+ # Flow mapping handlers.
+
+ def expect_flow_mapping(self):
+ self.write_indicator(u'{', True, whitespace=True)
+ self.flow_level += 1
+ self.increase_indent(flow=True)
+ self.state = self.expect_first_flow_mapping_key
+
+ def expect_first_flow_mapping_key(self):
+ if isinstance(self.event, MappingEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ self.write_indicator(u'}', False)
+ self.state = self.states.pop()
+ else:
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ if not self.canonical and self.check_simple_key():
+ self.states.append(self.expect_flow_mapping_simple_value)
+ self.expect_node(mapping=True, simple_key=True)
+ else:
+ self.write_indicator(u'?', True)
+ self.states.append(self.expect_flow_mapping_value)
+ self.expect_node(mapping=True)
+
+ def expect_flow_mapping_key(self):
+ if isinstance(self.event, MappingEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ if self.canonical:
+ self.write_indicator(u',', False)
+ self.write_indent()
+ self.write_indicator(u'}', False)
+ self.state = self.states.pop()
+ else:
+ self.write_indicator(u',', False)
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ if not self.canonical and self.check_simple_key():
+ self.states.append(self.expect_flow_mapping_simple_value)
+ self.expect_node(mapping=True, simple_key=True)
+ else:
+ self.write_indicator(u'?', True)
+ self.states.append(self.expect_flow_mapping_value)
+ self.expect_node(mapping=True)
+
+ def expect_flow_mapping_simple_value(self):
+ self.write_indicator(u':', False)
+ self.states.append(self.expect_flow_mapping_key)
+ self.expect_node(mapping=True)
+
+ def expect_flow_mapping_value(self):
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ self.write_indicator(u':', True)
+ self.states.append(self.expect_flow_mapping_key)
+ self.expect_node(mapping=True)
+
+ # Block sequence handlers.
+
+ def expect_block_sequence(self):
+ indentless = (self.mapping_context and not self.indention)
+ self.increase_indent(flow=False, indentless=indentless)
+ self.state = self.expect_first_block_sequence_item
+
+ def expect_first_block_sequence_item(self):
+ return self.expect_block_sequence_item(first=True)
+
+ def expect_block_sequence_item(self, first=False):
+ if not first and isinstance(self.event, SequenceEndEvent):
+ self.indent = self.indents.pop()
+ self.state = self.states.pop()
+ else:
+ self.write_indent()
+ self.write_indicator(u'-', True, indention=True)
+ self.states.append(self.expect_block_sequence_item)
+ self.expect_node(sequence=True)
+
+ # Block mapping handlers.
+
+ def expect_block_mapping(self):
+ self.increase_indent(flow=False)
+ self.state = self.expect_first_block_mapping_key
+
+ def expect_first_block_mapping_key(self):
+ return self.expect_block_mapping_key(first=True)
+
+ def expect_block_mapping_key(self, first=False):
+ if not first and isinstance(self.event, MappingEndEvent):
+ self.indent = self.indents.pop()
+ self.state = self.states.pop()
+ else:
+ self.write_indent()
+ if self.check_simple_key():
+ self.states.append(self.expect_block_mapping_simple_value)
+ self.expect_node(mapping=True, simple_key=True)
+ else:
+ self.write_indicator(u'?', True, indention=True)
+ self.states.append(self.expect_block_mapping_value)
+ self.expect_node(mapping=True)
+
+ def expect_block_mapping_simple_value(self):
+ self.write_indicator(u':', False)
+ self.states.append(self.expect_block_mapping_key)
+ self.expect_node(mapping=True)
+
+ def expect_block_mapping_value(self):
+ self.write_indent()
+ self.write_indicator(u':', True, indention=True)
+ self.states.append(self.expect_block_mapping_key)
+ self.expect_node(mapping=True)
+
+ # Checkers.
+
+ def check_empty_sequence(self):
+ return (isinstance(self.event, SequenceStartEvent) and self.events
+ and isinstance(self.events[0], SequenceEndEvent))
+
+ def check_empty_mapping(self):
+ return (isinstance(self.event, MappingStartEvent) and self.events
+ and isinstance(self.events[0], MappingEndEvent))
+
+ def check_empty_document(self):
+ if not isinstance(self.event, DocumentStartEvent) or not self.events:
+ return False
+ event = self.events[0]
+ return (isinstance(event, ScalarEvent) and event.anchor is None
+ and event.tag is None and event.implicit and event.value == u'')
+
+ def check_simple_key(self):
+ length = 0
+ if isinstance(self.event, NodeEvent) and self.event.anchor is not None:
+ if self.prepared_anchor is None:
+ self.prepared_anchor = self.prepare_anchor(self.event.anchor)
+ length += len(self.prepared_anchor)
+ if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \
+ and self.event.tag is not None:
+ if self.prepared_tag is None:
+ self.prepared_tag = self.prepare_tag(self.event.tag)
+ length += len(self.prepared_tag)
+ if isinstance(self.event, ScalarEvent):
+ if self.analysis is None:
+ self.analysis = self.analyze_scalar(self.event.value)
+ length += len(self.analysis.scalar)
+ return (length < 128 and (isinstance(self.event, AliasEvent)
+ or (isinstance(self.event, ScalarEvent)
+ and not self.analysis.empty and not self.analysis.multiline)
+ or self.check_empty_sequence() or self.check_empty_mapping()))
+
+ # Anchor, Tag, and Scalar processors.
+
+ def process_anchor(self, indicator):
+ if self.event.anchor is None:
+ self.prepared_anchor = None
+ return
+ if self.prepared_anchor is None:
+ self.prepared_anchor = self.prepare_anchor(self.event.anchor)
+ if self.prepared_anchor:
+ self.write_indicator(indicator+self.prepared_anchor, True)
+ self.prepared_anchor = None
+
+ def process_tag(self):
+ tag = self.event.tag
+ if isinstance(self.event, ScalarEvent):
+ if self.style is None:
+ self.style = self.choose_scalar_style()
+ if ((not self.canonical or tag is None) and
+ ((self.style == '' and self.event.implicit[0])
+ or (self.style != '' and self.event.implicit[1]))):
+ self.prepared_tag = None
+ return
+ if self.event.implicit[0] and tag is None:
+ tag = u'!'
+ self.prepared_tag = None
+ else:
+ if (not self.canonical or tag is None) and self.event.implicit:
+ self.prepared_tag = None
+ return
+ if tag is None:
+ raise EmitterError("tag is not specified")
+ if self.prepared_tag is None:
+ self.prepared_tag = self.prepare_tag(tag)
+ if self.prepared_tag:
+ self.write_indicator(self.prepared_tag, True)
+ self.prepared_tag = None
+
+ def choose_scalar_style(self):
+ if self.analysis is None:
+ self.analysis = self.analyze_scalar(self.event.value)
+ if self.event.style == '"' or self.canonical:
+ return '"'
+ if not self.event.style and self.event.implicit[0]:
+ if (not (self.simple_key_context and
+ (self.analysis.empty or self.analysis.multiline))
+ and (self.flow_level and self.analysis.allow_flow_plain
+ or (not self.flow_level and self.analysis.allow_block_plain))):
+ return ''
+ if self.event.style and self.event.style in '|>':
+ if (not self.flow_level and not self.simple_key_context
+ and self.analysis.allow_block):
+ return self.event.style
+ if not self.event.style or self.event.style == '\'':
+ if (self.analysis.allow_single_quoted and
+ not (self.simple_key_context and self.analysis.multiline)):
+ return '\''
+ return '"'
+
+ def process_scalar(self):
+ if self.analysis is None:
+ self.analysis = self.analyze_scalar(self.event.value)
+ if self.style is None:
+ self.style = self.choose_scalar_style()
+ split = (not self.simple_key_context)
+ #if self.analysis.multiline and split \
+ # and (not self.style or self.style in '\'\"'):
+ # self.write_indent()
+ if self.style == '"':
+ self.write_double_quoted(self.analysis.scalar, split)
+ elif self.style == '\'':
+ self.write_single_quoted(self.analysis.scalar, split)
+ elif self.style == '>':
+ self.write_folded(self.analysis.scalar)
+ elif self.style == '|':
+ self.write_literal(self.analysis.scalar)
+ else:
+ self.write_plain(self.analysis.scalar, split)
+ self.analysis = None
+ self.style = None
+
+ # Analyzers.
+
+ def prepare_version(self, version):
+ major, minor = version
+ if major != 1:
+ raise EmitterError("unsupported YAML version: %d.%d" % (major, minor))
+ return u'%d.%d' % (major, minor)
+
+ def prepare_tag_handle(self, handle):
+ if not handle:
+ raise EmitterError("tag handle must not be empty")
+ if handle[0] != u'!' or handle[-1] != u'!':
+ raise EmitterError("tag handle must start and end with '!': %r"
+ % (handle.encode('utf-8')))
+ for ch in handle[1:-1]:
+ if not (u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
+ or ch in u'-_'):
+ raise EmitterError("invalid character %r in the tag handle: %r"
+ % (ch.encode('utf-8'), handle.encode('utf-8')))
+ return handle
+
+ def prepare_tag_prefix(self, prefix):
+ if not prefix:
+ raise EmitterError("tag prefix must not be empty")
+ chunks = []
+ start = end = 0
+ if prefix[0] == u'!':
+ end = 1
+ while end < len(prefix):
+ ch = prefix[end]
+ if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
+ or ch in u'-;/?!:@&=+$,_.~*\'()[]':
+ end += 1
+ else:
+ if start < end:
+ chunks.append(prefix[start:end])
+ start = end = end+1
+ data = ch.encode('utf-8')
+ for ch in data:
+ chunks.append(u'%%%02X' % ord(ch))
+ if start < end:
+ chunks.append(prefix[start:end])
+ return u''.join(chunks)
+
+ def prepare_tag(self, tag):
+ if not tag:
+ raise EmitterError("tag must not be empty")
+ if tag == u'!':
+ return tag
+ handle = None
+ suffix = tag
+ prefixes = self.tag_prefixes.keys()
+ prefixes.sort()
+ for prefix in prefixes:
+ if tag.startswith(prefix) \
+ and (prefix == u'!' or len(prefix) < len(tag)):
+ handle = self.tag_prefixes[prefix]
+ suffix = tag[len(prefix):]
+ chunks = []
+ start = end = 0
+ while end < len(suffix):
+ ch = suffix[end]
+ if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
+ or ch in u'-;/?:@&=+$,_.~*\'()[]' \
+ or (ch == u'!' and handle != u'!'):
+ end += 1
+ else:
+ if start < end:
+ chunks.append(suffix[start:end])
+ start = end = end+1
+ data = ch.encode('utf-8')
+ for ch in data:
+ chunks.append(u'%%%02X' % ord(ch))
+ if start < end:
+ chunks.append(suffix[start:end])
+ suffix_text = u''.join(chunks)
+ if handle:
+ return u'%s%s' % (handle, suffix_text)
+ else:
+ return u'!<%s>' % suffix_text
+
+ def prepare_anchor(self, anchor):
+ if not anchor:
+ raise EmitterError("anchor must not be empty")
+ for ch in anchor:
+ if not (u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
+ or ch in u'-_'):
+ raise EmitterError("invalid character %r in the anchor: %r"
+ % (ch.encode('utf-8'), anchor.encode('utf-8')))
+ return anchor
+
+ def analyze_scalar(self, scalar):
+
+ # Empty scalar is a special case.
+ if not scalar:
+ return ScalarAnalysis(scalar=scalar, empty=True, multiline=False,
+ allow_flow_plain=False, allow_block_plain=True,
+ allow_single_quoted=True, allow_double_quoted=True,
+ allow_block=False)
+
+ # Indicators and special characters.
+ block_indicators = False
+ flow_indicators = False
+ line_breaks = False
+ special_characters = False
+
+ # Important whitespace combinations.
+ leading_space = False
+ leading_break = False
+ trailing_space = False
+ trailing_break = False
+ break_space = False
+ space_break = False
+
+ # Check document indicators.
+ if scalar.startswith(u'---') or scalar.startswith(u'...'):
+ block_indicators = True
+ flow_indicators = True
+
+ # First character or preceded by a whitespace.
+ preceded_by_whitespace = True
+
+ # Last character or followed by a whitespace.
+ followed_by_whitespace = (len(scalar) == 1 or
+ scalar[1] in u'\0 \t\r\n\x85\u2028\u2029')
+
+ # The previous character is a space.
+ previous_space = False
+
+ # The previous character is a break.
+ previous_break = False
+
+ index = 0
+ while index < len(scalar):
+ ch = scalar[index]
+
+ # Check for indicators.
+ if index == 0:
+ # Leading indicators are special characters.
+ if ch in u'#,[]{}&*!|>\'\"%@`':
+ flow_indicators = True
+ block_indicators = True
+ if ch in u'?:':
+ flow_indicators = True
+ if followed_by_whitespace:
+ block_indicators = True
+ if ch == u'-' and followed_by_whitespace:
+ flow_indicators = True
+ block_indicators = True
+ else:
+ # Some indicators cannot appear within a scalar as well.
+ if ch in u',?[]{}':
+ flow_indicators = True
+ if ch == u':':
+ flow_indicators = True
+ if followed_by_whitespace:
+ block_indicators = True
+ if ch == u'#' and preceded_by_whitespace:
+ flow_indicators = True
+ block_indicators = True
+
+ # Check for line breaks, special, and unicode characters.
+ if ch in u'\n\x85\u2028\u2029':
+ line_breaks = True
+ if not (ch == u'\n' or u'\x20' <= ch <= u'\x7E'):
+ if (ch == u'\x85' or u'\xA0' <= ch <= u'\uD7FF'
+ or u'\uE000' <= ch <= u'\uFFFD'
+ or (u'\U00010000' <= ch < u'\U0010ffff')) and ch != u'\uFEFF':
+ unicode_characters = True
+ if not self.allow_unicode:
+ special_characters = True
+ else:
+ special_characters = True
+
+ # Detect important whitespace combinations.
+ if ch == u' ':
+ if index == 0:
+ leading_space = True
+ if index == len(scalar)-1:
+ trailing_space = True
+ if previous_break:
+ break_space = True
+ previous_space = True
+ previous_break = False
+ elif ch in u'\n\x85\u2028\u2029':
+ if index == 0:
+ leading_break = True
+ if index == len(scalar)-1:
+ trailing_break = True
+ if previous_space:
+ space_break = True
+ previous_space = False
+ previous_break = True
+ else:
+ previous_space = False
+ previous_break = False
+
+ # Prepare for the next character.
+ index += 1
+ preceded_by_whitespace = (ch in u'\0 \t\r\n\x85\u2028\u2029')
+ followed_by_whitespace = (index+1 >= len(scalar) or
+ scalar[index+1] in u'\0 \t\r\n\x85\u2028\u2029')
+
+ # Let's decide what styles are allowed.
+ allow_flow_plain = True
+ allow_block_plain = True
+ allow_single_quoted = True
+ allow_double_quoted = True
+ allow_block = True
+
+ # Leading and trailing whitespaces are bad for plain scalars.
+ if (leading_space or leading_break
+ or trailing_space or trailing_break):
+ allow_flow_plain = allow_block_plain = False
+
+ # We do not permit trailing spaces for block scalars.
+ if trailing_space:
+ allow_block = False
+
+ # Spaces at the beginning of a new line are only acceptable for block
+ # scalars.
+ if break_space:
+ allow_flow_plain = allow_block_plain = allow_single_quoted = False
+
+ # Spaces followed by breaks, as well as special character are only
+ # allowed for double quoted scalars.
+ if space_break or special_characters:
+ allow_flow_plain = allow_block_plain = \
+ allow_single_quoted = allow_block = False
+
+ # Although the plain scalar writer supports breaks, we never emit
+ # multiline plain scalars.
+ if line_breaks:
+ allow_flow_plain = allow_block_plain = False
+
+ # Flow indicators are forbidden for flow plain scalars.
+ if flow_indicators:
+ allow_flow_plain = False
+
+ # Block indicators are forbidden for block plain scalars.
+ if block_indicators:
+ allow_block_plain = False
+
+ return ScalarAnalysis(scalar=scalar,
+ empty=False, multiline=line_breaks,
+ allow_flow_plain=allow_flow_plain,
+ allow_block_plain=allow_block_plain,
+ allow_single_quoted=allow_single_quoted,
+ allow_double_quoted=allow_double_quoted,
+ allow_block=allow_block)
+
+ # Writers.
+
+ def flush_stream(self):
+ if hasattr(self.stream, 'flush'):
+ self.stream.flush()
+
+ def write_stream_start(self):
+ # Write BOM if needed.
+ if self.encoding and self.encoding.startswith('utf-16'):
+ self.stream.write(u'\uFEFF'.encode(self.encoding))
+
+ def write_stream_end(self):
+ self.flush_stream()
+
+ def write_indicator(self, indicator, need_whitespace,
+ whitespace=False, indention=False):
+ if self.whitespace or not need_whitespace:
+ data = indicator
+ else:
+ data = u' '+indicator
+ self.whitespace = whitespace
+ self.indention = self.indention and indention
+ self.column += len(data)
+ self.open_ended = False
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+
+ def write_indent(self):
+ indent = self.indent or 0
+ if not self.indention or self.column > indent \
+ or (self.column == indent and not self.whitespace):
+ self.write_line_break()
+ if self.column < indent:
+ self.whitespace = True
+ data = u' '*(indent-self.column)
+ self.column = indent
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+
+ def write_line_break(self, data=None):
+ if data is None:
+ data = self.best_line_break
+ self.whitespace = True
+ self.indention = True
+ self.line += 1
+ self.column = 0
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+
+ def write_version_directive(self, version_text):
+ data = u'%%YAML %s' % version_text
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.write_line_break()
+
+ def write_tag_directive(self, handle_text, prefix_text):
+ data = u'%%TAG %s %s' % (handle_text, prefix_text)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.write_line_break()
+
+ # Scalar streams.
+
+ def write_single_quoted(self, text, split=True):
+ self.write_indicator(u'\'', True)
+ spaces = False
+ breaks = False
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if spaces:
+ if ch is None or ch != u' ':
+ if start+1 == end and self.column > self.best_width and split \
+ and start != 0 and end != len(text):
+ self.write_indent()
+ else:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ elif breaks:
+ if ch is None or ch not in u'\n\x85\u2028\u2029':
+ if text[start] == u'\n':
+ self.write_line_break()
+ for br in text[start:end]:
+ if br == u'\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ self.write_indent()
+ start = end
+ else:
+ if ch is None or ch in u' \n\x85\u2028\u2029' or ch == u'\'':
+ if start < end:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ if ch == u'\'':
+ data = u'\'\''
+ self.column += 2
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end + 1
+ if ch is not None:
+ spaces = (ch == u' ')
+ breaks = (ch in u'\n\x85\u2028\u2029')
+ end += 1
+ self.write_indicator(u'\'', False)
+
+ ESCAPE_REPLACEMENTS = {
+ u'\0': u'0',
+ u'\x07': u'a',
+ u'\x08': u'b',
+ u'\x09': u't',
+ u'\x0A': u'n',
+ u'\x0B': u'v',
+ u'\x0C': u'f',
+ u'\x0D': u'r',
+ u'\x1B': u'e',
+ u'\"': u'\"',
+ u'\\': u'\\',
+ u'\x85': u'N',
+ u'\xA0': u'_',
+ u'\u2028': u'L',
+ u'\u2029': u'P',
+ }
+
+ def write_double_quoted(self, text, split=True):
+ self.write_indicator(u'"', True)
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if ch is None or ch in u'"\\\x85\u2028\u2029\uFEFF' \
+ or not (u'\x20' <= ch <= u'\x7E'
+ or (self.allow_unicode
+ and (u'\xA0' <= ch <= u'\uD7FF'
+ or u'\uE000' <= ch <= u'\uFFFD'))):
+ if start < end:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ if ch is not None:
+ if ch in self.ESCAPE_REPLACEMENTS:
+ data = u'\\'+self.ESCAPE_REPLACEMENTS[ch]
+ elif ch <= u'\xFF':
+ data = u'\\x%02X' % ord(ch)
+ elif ch <= u'\uFFFF':
+ data = u'\\u%04X' % ord(ch)
+ else:
+ data = u'\\U%08X' % ord(ch)
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end+1
+ if 0 < end < len(text)-1 and (ch == u' ' or start >= end) \
+ and self.column+(end-start) > self.best_width and split:
+ data = text[start:end]+u'\\'
+ if start < end:
+ start = end
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.write_indent()
+ self.whitespace = False
+ self.indention = False
+ if text[start] == u' ':
+ data = u'\\'
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ end += 1
+ self.write_indicator(u'"', False)
+
+ def determine_block_hints(self, text):
+ hints = u''
+ if text:
+ if text[0] in u' \n\x85\u2028\u2029':
+ hints += unicode(self.best_indent)
+ if text[-1] not in u'\n\x85\u2028\u2029':
+ hints += u'-'
+ elif len(text) == 1 or text[-2] in u'\n\x85\u2028\u2029':
+ hints += u'+'
+ return hints
+
+ def write_folded(self, text):
+ hints = self.determine_block_hints(text)
+ self.write_indicator(u'>'+hints, True)
+ if hints[-1:] == u'+':
+ self.open_ended = True
+ self.write_line_break()
+ leading_space = True
+ spaces = False
+ breaks = True
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if breaks:
+ if ch is None or ch not in u'\n\x85\u2028\u2029':
+ if not leading_space and ch is not None and ch != u' ' \
+ and text[start] == u'\n':
+ self.write_line_break()
+ leading_space = (ch == u' ')
+ for br in text[start:end]:
+ if br == u'\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ if ch is not None:
+ self.write_indent()
+ start = end
+ elif spaces:
+ if ch != u' ':
+ if start+1 == end and self.column > self.best_width:
+ self.write_indent()
+ else:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ else:
+ if ch is None or ch in u' \n\x85\u2028\u2029':
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ if ch is None:
+ self.write_line_break()
+ start = end
+ if ch is not None:
+ breaks = (ch in u'\n\x85\u2028\u2029')
+ spaces = (ch == u' ')
+ end += 1
+
+ def write_literal(self, text):
+ hints = self.determine_block_hints(text)
+ self.write_indicator(u'|'+hints, True)
+ if hints[-1:] == u'+':
+ self.open_ended = True
+ self.write_line_break()
+ breaks = True
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if breaks:
+ if ch is None or ch not in u'\n\x85\u2028\u2029':
+ for br in text[start:end]:
+ if br == u'\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ if ch is not None:
+ self.write_indent()
+ start = end
+ else:
+ if ch is None or ch in u'\n\x85\u2028\u2029':
+ data = text[start:end]
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ if ch is None:
+ self.write_line_break()
+ start = end
+ if ch is not None:
+ breaks = (ch in u'\n\x85\u2028\u2029')
+ end += 1
+
+ def write_plain(self, text, split=True):
+ if self.root_context:
+ self.open_ended = True
+ if not text:
+ return
+ if not self.whitespace:
+ data = u' '
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.whitespace = False
+ self.indention = False
+ spaces = False
+ breaks = False
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if spaces:
+ if ch != u' ':
+ if start+1 == end and self.column > self.best_width and split:
+ self.write_indent()
+ self.whitespace = False
+ self.indention = False
+ else:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ elif breaks:
+ if ch not in u'\n\x85\u2028\u2029':
+ if text[start] == u'\n':
+ self.write_line_break()
+ for br in text[start:end]:
+ if br == u'\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ self.write_indent()
+ self.whitespace = False
+ self.indention = False
+ start = end
+ else:
+ if ch is None or ch in u' \n\x85\u2028\u2029':
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ if ch is not None:
+ spaces = (ch == u' ')
+ breaks = (ch in u'\n\x85\u2028\u2029')
+ end += 1
diff --git a/third_party/python/PyYAML/lib/yaml/error.py b/third_party/python/PyYAML/lib/yaml/error.py
new file mode 100644
index 0000000000..577686db5f
--- /dev/null
+++ b/third_party/python/PyYAML/lib/yaml/error.py
@@ -0,0 +1,75 @@
+
+__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError']
+
+class Mark(object):
+
+ def __init__(self, name, index, line, column, buffer, pointer):
+ self.name = name
+ self.index = index
+ self.line = line
+ self.column = column
+ self.buffer = buffer
+ self.pointer = pointer
+
+ def get_snippet(self, indent=4, max_length=75):
+ if self.buffer is None:
+ return None
+ head = ''
+ start = self.pointer
+ while start > 0 and self.buffer[start-1] not in u'\0\r\n\x85\u2028\u2029':
+ start -= 1
+ if self.pointer-start > max_length/2-1:
+ head = ' ... '
+ start += 5
+ break
+ tail = ''
+ end = self.pointer
+ while end < len(self.buffer) and self.buffer[end] not in u'\0\r\n\x85\u2028\u2029':
+ end += 1
+ if end-self.pointer > max_length/2-1:
+ tail = ' ... '
+ end -= 5
+ break
+ snippet = self.buffer[start:end].encode('utf-8')
+ return ' '*indent + head + snippet + tail + '\n' \
+ + ' '*(indent+self.pointer-start+len(head)) + '^'
+
+ def __str__(self):
+ snippet = self.get_snippet()
+ where = " in \"%s\", line %d, column %d" \
+ % (self.name, self.line+1, self.column+1)
+ if snippet is not None:
+ where += ":\n"+snippet
+ return where
+
+class YAMLError(Exception):
+ pass
+
+class MarkedYAMLError(YAMLError):
+
+ def __init__(self, context=None, context_mark=None,
+ problem=None, problem_mark=None, note=None):
+ self.context = context
+ self.context_mark = context_mark
+ self.problem = problem
+ self.problem_mark = problem_mark
+ self.note = note
+
+ def __str__(self):
+ lines = []
+ if self.context is not None:
+ lines.append(self.context)
+ if self.context_mark is not None \
+ and (self.problem is None or self.problem_mark is None
+ or self.context_mark.name != self.problem_mark.name
+ or self.context_mark.line != self.problem_mark.line
+ or self.context_mark.column != self.problem_mark.column):
+ lines.append(str(self.context_mark))
+ if self.problem is not None:
+ lines.append(self.problem)
+ if self.problem_mark is not None:
+ lines.append(str(self.problem_mark))
+ if self.note is not None:
+ lines.append(self.note)
+ return '\n'.join(lines)
+
diff --git a/third_party/python/PyYAML/lib/yaml/events.py b/third_party/python/PyYAML/lib/yaml/events.py
new file mode 100644
index 0000000000..f79ad389cb
--- /dev/null
+++ b/third_party/python/PyYAML/lib/yaml/events.py
@@ -0,0 +1,86 @@
+
+# Abstract classes.
+
+class Event(object):
+ def __init__(self, start_mark=None, end_mark=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ def __repr__(self):
+ attributes = [key for key in ['anchor', 'tag', 'implicit', 'value']
+ if hasattr(self, key)]
+ arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
+ for key in attributes])
+ return '%s(%s)' % (self.__class__.__name__, arguments)
+
+class NodeEvent(Event):
+ def __init__(self, anchor, start_mark=None, end_mark=None):
+ self.anchor = anchor
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class CollectionStartEvent(NodeEvent):
+ def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None,
+ flow_style=None):
+ self.anchor = anchor
+ self.tag = tag
+ self.implicit = implicit
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.flow_style = flow_style
+
+class CollectionEndEvent(Event):
+ pass
+
+# Implementations.
+
+class StreamStartEvent(Event):
+ def __init__(self, start_mark=None, end_mark=None, encoding=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.encoding = encoding
+
+class StreamEndEvent(Event):
+ pass
+
+class DocumentStartEvent(Event):
+ def __init__(self, start_mark=None, end_mark=None,
+ explicit=None, version=None, tags=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.explicit = explicit
+ self.version = version
+ self.tags = tags
+
+class DocumentEndEvent(Event):
+ def __init__(self, start_mark=None, end_mark=None,
+ explicit=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.explicit = explicit
+
+class AliasEvent(NodeEvent):
+ pass
+
+class ScalarEvent(NodeEvent):
+ def __init__(self, anchor, tag, implicit, value,
+ start_mark=None, end_mark=None, style=None):
+ self.anchor = anchor
+ self.tag = tag
+ self.implicit = implicit
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.style = style
+
+class SequenceStartEvent(CollectionStartEvent):
+ pass
+
+class SequenceEndEvent(CollectionEndEvent):
+ pass
+
+class MappingStartEvent(CollectionStartEvent):
+ pass
+
+class MappingEndEvent(CollectionEndEvent):
+ pass
+
diff --git a/third_party/python/PyYAML/lib/yaml/loader.py b/third_party/python/PyYAML/lib/yaml/loader.py
new file mode 100644
index 0000000000..4d773c3cc1
--- /dev/null
+++ b/third_party/python/PyYAML/lib/yaml/loader.py
@@ -0,0 +1,63 @@
+
+__all__ = ['BaseLoader', 'FullLoader', 'SafeLoader', 'Loader', 'UnsafeLoader']
+
+from reader import *
+from scanner import *
+from parser import *
+from composer import *
+from constructor import *
+from resolver import *
+
+class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ BaseConstructor.__init__(self)
+ BaseResolver.__init__(self)
+
+class FullLoader(Reader, Scanner, Parser, Composer, FullConstructor, Resolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ FullConstructor.__init__(self)
+ Resolver.__init__(self)
+
+class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ SafeConstructor.__init__(self)
+ Resolver.__init__(self)
+
+class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ Constructor.__init__(self)
+ Resolver.__init__(self)
+
+# UnsafeLoader is the same as Loader (which is and was always unsafe on
+# untrusted input). Use of either Loader or UnsafeLoader should be rare, since
+# FullLoad should be able to load almost all YAML safely. Loader is left intact
+# to ensure backwards compatibility.
+class UnsafeLoader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ Constructor.__init__(self)
+ Resolver.__init__(self)
diff --git a/third_party/python/PyYAML/lib/yaml/nodes.py b/third_party/python/PyYAML/lib/yaml/nodes.py
new file mode 100644
index 0000000000..c4f070c41e
--- /dev/null
+++ b/third_party/python/PyYAML/lib/yaml/nodes.py
@@ -0,0 +1,49 @@
+
+class Node(object):
+ def __init__(self, tag, value, start_mark, end_mark):
+ self.tag = tag
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ def __repr__(self):
+ value = self.value
+ #if isinstance(value, list):
+ # if len(value) == 0:
+ # value = '<empty>'
+ # elif len(value) == 1:
+ # value = '<1 item>'
+ # else:
+ # value = '<%d items>' % len(value)
+ #else:
+ # if len(value) > 75:
+ # value = repr(value[:70]+u' ... ')
+ # else:
+ # value = repr(value)
+ value = repr(value)
+ return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value)
+
+class ScalarNode(Node):
+ id = 'scalar'
+ def __init__(self, tag, value,
+ start_mark=None, end_mark=None, style=None):
+ self.tag = tag
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.style = style
+
+class CollectionNode(Node):
+ def __init__(self, tag, value,
+ start_mark=None, end_mark=None, flow_style=None):
+ self.tag = tag
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.flow_style = flow_style
+
+class SequenceNode(CollectionNode):
+ id = 'sequence'
+
+class MappingNode(CollectionNode):
+ id = 'mapping'
+
diff --git a/third_party/python/PyYAML/lib/yaml/parser.py b/third_party/python/PyYAML/lib/yaml/parser.py
new file mode 100644
index 0000000000..f9e3057f33
--- /dev/null
+++ b/third_party/python/PyYAML/lib/yaml/parser.py
@@ -0,0 +1,589 @@
+
+# The following YAML grammar is LL(1) and is parsed by a recursive descent
+# parser.
+#
+# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
+# implicit_document ::= block_node DOCUMENT-END*
+# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
+# block_node_or_indentless_sequence ::=
+# ALIAS
+# | properties (block_content | indentless_block_sequence)?
+# | block_content
+# | indentless_block_sequence
+# block_node ::= ALIAS
+# | properties block_content?
+# | block_content
+# flow_node ::= ALIAS
+# | properties flow_content?
+# | flow_content
+# properties ::= TAG ANCHOR? | ANCHOR TAG?
+# block_content ::= block_collection | flow_collection | SCALAR
+# flow_content ::= flow_collection | SCALAR
+# block_collection ::= block_sequence | block_mapping
+# flow_collection ::= flow_sequence | flow_mapping
+# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
+# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
+# block_mapping ::= BLOCK-MAPPING_START
+# ((KEY block_node_or_indentless_sequence?)?
+# (VALUE block_node_or_indentless_sequence?)?)*
+# BLOCK-END
+# flow_sequence ::= FLOW-SEQUENCE-START
+# (flow_sequence_entry FLOW-ENTRY)*
+# flow_sequence_entry?
+# FLOW-SEQUENCE-END
+# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+# flow_mapping ::= FLOW-MAPPING-START
+# (flow_mapping_entry FLOW-ENTRY)*
+# flow_mapping_entry?
+# FLOW-MAPPING-END
+# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+#
+# FIRST sets:
+#
+# stream: { STREAM-START }
+# explicit_document: { DIRECTIVE DOCUMENT-START }
+# implicit_document: FIRST(block_node)
+# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
+# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
+# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
+# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# block_sequence: { BLOCK-SEQUENCE-START }
+# block_mapping: { BLOCK-MAPPING-START }
+# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
+# indentless_sequence: { ENTRY }
+# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# flow_sequence: { FLOW-SEQUENCE-START }
+# flow_mapping: { FLOW-MAPPING-START }
+# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
+# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
+
+__all__ = ['Parser', 'ParserError']
+
+from error import MarkedYAMLError
+from tokens import *
+from events import *
+from scanner import *
+
+class ParserError(MarkedYAMLError):
+ pass
+
+class Parser(object):
+ # Since writing a recursive-descendant parser is a straightforward task, we
+ # do not give many comments here.
+
+ DEFAULT_TAGS = {
+ u'!': u'!',
+ u'!!': u'tag:yaml.org,2002:',
+ }
+
+ def __init__(self):
+ self.current_event = None
+ self.yaml_version = None
+ self.tag_handles = {}
+ self.states = []
+ self.marks = []
+ self.state = self.parse_stream_start
+
+ def dispose(self):
+ # Reset the state attributes (to clear self-references)
+ self.states = []
+ self.state = None
+
+ def check_event(self, *choices):
+ # Check the type of the next event.
+ if self.current_event is None:
+ if self.state:
+ self.current_event = self.state()
+ if self.current_event is not None:
+ if not choices:
+ return True
+ for choice in choices:
+ if isinstance(self.current_event, choice):
+ return True
+ return False
+
+ def peek_event(self):
+ # Get the next event.
+ if self.current_event is None:
+ if self.state:
+ self.current_event = self.state()
+ return self.current_event
+
+ def get_event(self):
+ # Get the next event and proceed further.
+ if self.current_event is None:
+ if self.state:
+ self.current_event = self.state()
+ value = self.current_event
+ self.current_event = None
+ return value
+
+ # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
+ # implicit_document ::= block_node DOCUMENT-END*
+ # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
+
+ def parse_stream_start(self):
+
+ # Parse the stream start.
+ token = self.get_token()
+ event = StreamStartEvent(token.start_mark, token.end_mark,
+ encoding=token.encoding)
+
+ # Prepare the next state.
+ self.state = self.parse_implicit_document_start
+
+ return event
+
+ def parse_implicit_document_start(self):
+
+ # Parse an implicit document.
+ if not self.check_token(DirectiveToken, DocumentStartToken,
+ StreamEndToken):
+ self.tag_handles = self.DEFAULT_TAGS
+ token = self.peek_token()
+ start_mark = end_mark = token.start_mark
+ event = DocumentStartEvent(start_mark, end_mark,
+ explicit=False)
+
+ # Prepare the next state.
+ self.states.append(self.parse_document_end)
+ self.state = self.parse_block_node
+
+ return event
+
+ else:
+ return self.parse_document_start()
+
+ def parse_document_start(self):
+
+ # Parse any extra document end indicators.
+ while self.check_token(DocumentEndToken):
+ self.get_token()
+
+ # Parse an explicit document.
+ if not self.check_token(StreamEndToken):
+ token = self.peek_token()
+ start_mark = token.start_mark
+ version, tags = self.process_directives()
+ if not self.check_token(DocumentStartToken):
+ raise ParserError(None, None,
+ "expected '<document start>', but found %r"
+ % self.peek_token().id,
+ self.peek_token().start_mark)
+ token = self.get_token()
+ end_mark = token.end_mark
+ event = DocumentStartEvent(start_mark, end_mark,
+ explicit=True, version=version, tags=tags)
+ self.states.append(self.parse_document_end)
+ self.state = self.parse_document_content
+ else:
+ # Parse the end of the stream.
+ token = self.get_token()
+ event = StreamEndEvent(token.start_mark, token.end_mark)
+ assert not self.states
+ assert not self.marks
+ self.state = None
+ return event
+
+ def parse_document_end(self):
+
+ # Parse the document end.
+ token = self.peek_token()
+ start_mark = end_mark = token.start_mark
+ explicit = False
+ if self.check_token(DocumentEndToken):
+ token = self.get_token()
+ end_mark = token.end_mark
+ explicit = True
+ event = DocumentEndEvent(start_mark, end_mark,
+ explicit=explicit)
+
+ # Prepare the next state.
+ self.state = self.parse_document_start
+
+ return event
+
+ def parse_document_content(self):
+ if self.check_token(DirectiveToken,
+ DocumentStartToken, DocumentEndToken, StreamEndToken):
+ event = self.process_empty_scalar(self.peek_token().start_mark)
+ self.state = self.states.pop()
+ return event
+ else:
+ return self.parse_block_node()
+
+ def process_directives(self):
+ self.yaml_version = None
+ self.tag_handles = {}
+ while self.check_token(DirectiveToken):
+ token = self.get_token()
+ if token.name == u'YAML':
+ if self.yaml_version is not None:
+ raise ParserError(None, None,
+ "found duplicate YAML directive", token.start_mark)
+ major, minor = token.value
+ if major != 1:
+ raise ParserError(None, None,
+ "found incompatible YAML document (version 1.* is required)",
+ token.start_mark)
+ self.yaml_version = token.value
+ elif token.name == u'TAG':
+ handle, prefix = token.value
+ if handle in self.tag_handles:
+ raise ParserError(None, None,
+ "duplicate tag handle %r" % handle.encode('utf-8'),
+ token.start_mark)
+ self.tag_handles[handle] = prefix
+ if self.tag_handles:
+ value = self.yaml_version, self.tag_handles.copy()
+ else:
+ value = self.yaml_version, None
+ for key in self.DEFAULT_TAGS:
+ if key not in self.tag_handles:
+ self.tag_handles[key] = self.DEFAULT_TAGS[key]
+ return value
+
+ # block_node_or_indentless_sequence ::= ALIAS
+ # | properties (block_content | indentless_block_sequence)?
+ # | block_content
+ # | indentless_block_sequence
+ # block_node ::= ALIAS
+ # | properties block_content?
+ # | block_content
+ # flow_node ::= ALIAS
+ # | properties flow_content?
+ # | flow_content
+ # properties ::= TAG ANCHOR? | ANCHOR TAG?
+ # block_content ::= block_collection | flow_collection | SCALAR
+ # flow_content ::= flow_collection | SCALAR
+ # block_collection ::= block_sequence | block_mapping
+ # flow_collection ::= flow_sequence | flow_mapping
+
+ def parse_block_node(self):
+ return self.parse_node(block=True)
+
+ def parse_flow_node(self):
+ return self.parse_node()
+
+ def parse_block_node_or_indentless_sequence(self):
+ return self.parse_node(block=True, indentless_sequence=True)
+
+ def parse_node(self, block=False, indentless_sequence=False):
+ if self.check_token(AliasToken):
+ token = self.get_token()
+ event = AliasEvent(token.value, token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ else:
+ anchor = None
+ tag = None
+ start_mark = end_mark = tag_mark = None
+ if self.check_token(AnchorToken):
+ token = self.get_token()
+ start_mark = token.start_mark
+ end_mark = token.end_mark
+ anchor = token.value
+ if self.check_token(TagToken):
+ token = self.get_token()
+ tag_mark = token.start_mark
+ end_mark = token.end_mark
+ tag = token.value
+ elif self.check_token(TagToken):
+ token = self.get_token()
+ start_mark = tag_mark = token.start_mark
+ end_mark = token.end_mark
+ tag = token.value
+ if self.check_token(AnchorToken):
+ token = self.get_token()
+ end_mark = token.end_mark
+ anchor = token.value
+ if tag is not None:
+ handle, suffix = tag
+ if handle is not None:
+ if handle not in self.tag_handles:
+ raise ParserError("while parsing a node", start_mark,
+ "found undefined tag handle %r" % handle.encode('utf-8'),
+ tag_mark)
+ tag = self.tag_handles[handle]+suffix
+ else:
+ tag = suffix
+ #if tag == u'!':
+ # raise ParserError("while parsing a node", start_mark,
+ # "found non-specific tag '!'", tag_mark,
+ # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
+ if start_mark is None:
+ start_mark = end_mark = self.peek_token().start_mark
+ event = None
+ implicit = (tag is None or tag == u'!')
+ if indentless_sequence and self.check_token(BlockEntryToken):
+ end_mark = self.peek_token().end_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark)
+ self.state = self.parse_indentless_sequence_entry
+ else:
+ if self.check_token(ScalarToken):
+ token = self.get_token()
+ end_mark = token.end_mark
+ if (token.plain and tag is None) or tag == u'!':
+ implicit = (True, False)
+ elif tag is None:
+ implicit = (False, True)
+ else:
+ implicit = (False, False)
+ event = ScalarEvent(anchor, tag, implicit, token.value,
+ start_mark, end_mark, style=token.style)
+ self.state = self.states.pop()
+ elif self.check_token(FlowSequenceStartToken):
+ end_mark = self.peek_token().end_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=True)
+ self.state = self.parse_flow_sequence_first_entry
+ elif self.check_token(FlowMappingStartToken):
+ end_mark = self.peek_token().end_mark
+ event = MappingStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=True)
+ self.state = self.parse_flow_mapping_first_key
+ elif block and self.check_token(BlockSequenceStartToken):
+ end_mark = self.peek_token().start_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=False)
+ self.state = self.parse_block_sequence_first_entry
+ elif block and self.check_token(BlockMappingStartToken):
+ end_mark = self.peek_token().start_mark
+ event = MappingStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=False)
+ self.state = self.parse_block_mapping_first_key
+ elif anchor is not None or tag is not None:
+ # Empty scalars are allowed even if a tag or an anchor is
+ # specified.
+ event = ScalarEvent(anchor, tag, (implicit, False), u'',
+ start_mark, end_mark)
+ self.state = self.states.pop()
+ else:
+ if block:
+ node = 'block'
+ else:
+ node = 'flow'
+ token = self.peek_token()
+ raise ParserError("while parsing a %s node" % node, start_mark,
+ "expected the node content, but found %r" % token.id,
+ token.start_mark)
+ return event
+
+ # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
+
+ def parse_block_sequence_first_entry(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_block_sequence_entry()
+
+ def parse_block_sequence_entry(self):
+ if self.check_token(BlockEntryToken):
+ token = self.get_token()
+ if not self.check_token(BlockEntryToken, BlockEndToken):
+ self.states.append(self.parse_block_sequence_entry)
+ return self.parse_block_node()
+ else:
+ self.state = self.parse_block_sequence_entry
+ return self.process_empty_scalar(token.end_mark)
+ if not self.check_token(BlockEndToken):
+ token = self.peek_token()
+ raise ParserError("while parsing a block collection", self.marks[-1],
+ "expected <block end>, but found %r" % token.id, token.start_mark)
+ token = self.get_token()
+ event = SequenceEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ # indentless_sequence ::= (BLOCK-ENTRY block_node?)+
+
+ def parse_indentless_sequence_entry(self):
+ if self.check_token(BlockEntryToken):
+ token = self.get_token()
+ if not self.check_token(BlockEntryToken,
+ KeyToken, ValueToken, BlockEndToken):
+ self.states.append(self.parse_indentless_sequence_entry)
+ return self.parse_block_node()
+ else:
+ self.state = self.parse_indentless_sequence_entry
+ return self.process_empty_scalar(token.end_mark)
+ token = self.peek_token()
+ event = SequenceEndEvent(token.start_mark, token.start_mark)
+ self.state = self.states.pop()
+ return event
+
+ # block_mapping ::= BLOCK-MAPPING_START
+ # ((KEY block_node_or_indentless_sequence?)?
+ # (VALUE block_node_or_indentless_sequence?)?)*
+ # BLOCK-END
+
+ def parse_block_mapping_first_key(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_block_mapping_key()
+
+ def parse_block_mapping_key(self):
+ if self.check_token(KeyToken):
+ token = self.get_token()
+ if not self.check_token(KeyToken, ValueToken, BlockEndToken):
+ self.states.append(self.parse_block_mapping_value)
+ return self.parse_block_node_or_indentless_sequence()
+ else:
+ self.state = self.parse_block_mapping_value
+ return self.process_empty_scalar(token.end_mark)
+ if not self.check_token(BlockEndToken):
+ token = self.peek_token()
+ raise ParserError("while parsing a block mapping", self.marks[-1],
+ "expected <block end>, but found %r" % token.id, token.start_mark)
+ token = self.get_token()
+ event = MappingEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ def parse_block_mapping_value(self):
+ if self.check_token(ValueToken):
+ token = self.get_token()
+ if not self.check_token(KeyToken, ValueToken, BlockEndToken):
+ self.states.append(self.parse_block_mapping_key)
+ return self.parse_block_node_or_indentless_sequence()
+ else:
+ self.state = self.parse_block_mapping_key
+ return self.process_empty_scalar(token.end_mark)
+ else:
+ self.state = self.parse_block_mapping_key
+ token = self.peek_token()
+ return self.process_empty_scalar(token.start_mark)
+
+ # flow_sequence ::= FLOW-SEQUENCE-START
+ # (flow_sequence_entry FLOW-ENTRY)*
+ # flow_sequence_entry?
+ # FLOW-SEQUENCE-END
+ # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+ #
+ # Note that while production rules for both flow_sequence_entry and
+ # flow_mapping_entry are equal, their interpretations are different.
+ # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
+ # generate an inline mapping (set syntax).
+
+ def parse_flow_sequence_first_entry(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_flow_sequence_entry(first=True)
+
+ def parse_flow_sequence_entry(self, first=False):
+ if not self.check_token(FlowSequenceEndToken):
+ if not first:
+ if self.check_token(FlowEntryToken):
+ self.get_token()
+ else:
+ token = self.peek_token()
+ raise ParserError("while parsing a flow sequence", self.marks[-1],
+ "expected ',' or ']', but got %r" % token.id, token.start_mark)
+
+ if self.check_token(KeyToken):
+ token = self.peek_token()
+ event = MappingStartEvent(None, None, True,
+ token.start_mark, token.end_mark,
+ flow_style=True)
+ self.state = self.parse_flow_sequence_entry_mapping_key
+ return event
+ elif not self.check_token(FlowSequenceEndToken):
+ self.states.append(self.parse_flow_sequence_entry)
+ return self.parse_flow_node()
+ token = self.get_token()
+ event = SequenceEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ def parse_flow_sequence_entry_mapping_key(self):
+ token = self.get_token()
+ if not self.check_token(ValueToken,
+ FlowEntryToken, FlowSequenceEndToken):
+ self.states.append(self.parse_flow_sequence_entry_mapping_value)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_sequence_entry_mapping_value
+ return self.process_empty_scalar(token.end_mark)
+
+ def parse_flow_sequence_entry_mapping_value(self):
+ if self.check_token(ValueToken):
+ token = self.get_token()
+ if not self.check_token(FlowEntryToken, FlowSequenceEndToken):
+ self.states.append(self.parse_flow_sequence_entry_mapping_end)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_sequence_entry_mapping_end
+ return self.process_empty_scalar(token.end_mark)
+ else:
+ self.state = self.parse_flow_sequence_entry_mapping_end
+ token = self.peek_token()
+ return self.process_empty_scalar(token.start_mark)
+
+ def parse_flow_sequence_entry_mapping_end(self):
+ self.state = self.parse_flow_sequence_entry
+ token = self.peek_token()
+ return MappingEndEvent(token.start_mark, token.start_mark)
+
+ # flow_mapping ::= FLOW-MAPPING-START
+ # (flow_mapping_entry FLOW-ENTRY)*
+ # flow_mapping_entry?
+ # FLOW-MAPPING-END
+ # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+
+ def parse_flow_mapping_first_key(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_flow_mapping_key(first=True)
+
+ def parse_flow_mapping_key(self, first=False):
+ if not self.check_token(FlowMappingEndToken):
+ if not first:
+ if self.check_token(FlowEntryToken):
+ self.get_token()
+ else:
+ token = self.peek_token()
+ raise ParserError("while parsing a flow mapping", self.marks[-1],
+ "expected ',' or '}', but got %r" % token.id, token.start_mark)
+ if self.check_token(KeyToken):
+ token = self.get_token()
+ if not self.check_token(ValueToken,
+ FlowEntryToken, FlowMappingEndToken):
+ self.states.append(self.parse_flow_mapping_value)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_mapping_value
+ return self.process_empty_scalar(token.end_mark)
+ elif not self.check_token(FlowMappingEndToken):
+ self.states.append(self.parse_flow_mapping_empty_value)
+ return self.parse_flow_node()
+ token = self.get_token()
+ event = MappingEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ def parse_flow_mapping_value(self):
+ if self.check_token(ValueToken):
+ token = self.get_token()
+ if not self.check_token(FlowEntryToken, FlowMappingEndToken):
+ self.states.append(self.parse_flow_mapping_key)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_mapping_key
+ return self.process_empty_scalar(token.end_mark)
+ else:
+ self.state = self.parse_flow_mapping_key
+ token = self.peek_token()
+ return self.process_empty_scalar(token.start_mark)
+
+ def parse_flow_mapping_empty_value(self):
+ self.state = self.parse_flow_mapping_key
+ return self.process_empty_scalar(self.peek_token().start_mark)
+
+ def process_empty_scalar(self, mark):
+ return ScalarEvent(None, None, (True, False), u'', mark, mark)
+
diff --git a/third_party/python/PyYAML/lib/yaml/reader.py b/third_party/python/PyYAML/lib/yaml/reader.py
new file mode 100644
index 0000000000..4b377d61e2
--- /dev/null
+++ b/third_party/python/PyYAML/lib/yaml/reader.py
@@ -0,0 +1,188 @@
+# This module contains abstractions for the input stream. You don't have to
+# looks further, there are no pretty code.
+#
+# We define two classes here.
+#
+# Mark(source, line, column)
+# It's just a record and its only use is producing nice error messages.
+# Parser does not use it for any other purposes.
+#
+# Reader(source, data)
+# Reader determines the encoding of `data` and converts it to unicode.
+# Reader provides the following methods and attributes:
+# reader.peek(length=1) - return the next `length` characters
+# reader.forward(length=1) - move the current position to `length` characters.
+# reader.index - the number of the current character.
+# reader.line, stream.column - the line and the column of the current character.
+
+__all__ = ['Reader', 'ReaderError']
+
+from error import YAMLError, Mark
+
+import codecs, re, sys
+
+has_ucs4 = sys.maxunicode > 0xffff
+
+class ReaderError(YAMLError):
+
+ def __init__(self, name, position, character, encoding, reason):
+ self.name = name
+ self.character = character
+ self.position = position
+ self.encoding = encoding
+ self.reason = reason
+
+ def __str__(self):
+ if isinstance(self.character, str):
+ return "'%s' codec can't decode byte #x%02x: %s\n" \
+ " in \"%s\", position %d" \
+ % (self.encoding, ord(self.character), self.reason,
+ self.name, self.position)
+ else:
+ return "unacceptable character #x%04x: %s\n" \
+ " in \"%s\", position %d" \
+ % (self.character, self.reason,
+ self.name, self.position)
+
+class Reader(object):
+ # Reader:
+ # - determines the data encoding and converts it to unicode,
+ # - checks if characters are in allowed range,
+ # - adds '\0' to the end.
+
+ # Reader accepts
+ # - a `str` object,
+ # - a `unicode` object,
+ # - a file-like object with its `read` method returning `str`,
+ # - a file-like object with its `read` method returning `unicode`.
+
+ # Yeah, it's ugly and slow.
+
+ def __init__(self, stream):
+ self.name = None
+ self.stream = None
+ self.stream_pointer = 0
+ self.eof = True
+ self.buffer = u''
+ self.pointer = 0
+ self.raw_buffer = None
+ self.raw_decode = None
+ self.encoding = None
+ self.index = 0
+ self.line = 0
+ self.column = 0
+ if isinstance(stream, unicode):
+ self.name = "<unicode string>"
+ self.check_printable(stream)
+ self.buffer = stream+u'\0'
+ elif isinstance(stream, str):
+ self.name = "<string>"
+ self.raw_buffer = stream
+ self.determine_encoding()
+ else:
+ self.stream = stream
+ self.name = getattr(stream, 'name', "<file>")
+ self.eof = False
+ self.raw_buffer = ''
+ self.determine_encoding()
+
+ def peek(self, index=0):
+ try:
+ return self.buffer[self.pointer+index]
+ except IndexError:
+ self.update(index+1)
+ return self.buffer[self.pointer+index]
+
+ def prefix(self, length=1):
+ if self.pointer+length >= len(self.buffer):
+ self.update(length)
+ return self.buffer[self.pointer:self.pointer+length]
+
+ def forward(self, length=1):
+ if self.pointer+length+1 >= len(self.buffer):
+ self.update(length+1)
+ while length:
+ ch = self.buffer[self.pointer]
+ self.pointer += 1
+ self.index += 1
+ if ch in u'\n\x85\u2028\u2029' \
+ or (ch == u'\r' and self.buffer[self.pointer] != u'\n'):
+ self.line += 1
+ self.column = 0
+ elif ch != u'\uFEFF':
+ self.column += 1
+ length -= 1
+
+ def get_mark(self):
+ if self.stream is None:
+ return Mark(self.name, self.index, self.line, self.column,
+ self.buffer, self.pointer)
+ else:
+ return Mark(self.name, self.index, self.line, self.column,
+ None, None)
+
+ def determine_encoding(self):
+ while not self.eof and len(self.raw_buffer) < 2:
+ self.update_raw()
+ if not isinstance(self.raw_buffer, unicode):
+ if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
+ self.raw_decode = codecs.utf_16_le_decode
+ self.encoding = 'utf-16-le'
+ elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
+ self.raw_decode = codecs.utf_16_be_decode
+ self.encoding = 'utf-16-be'
+ else:
+ self.raw_decode = codecs.utf_8_decode
+ self.encoding = 'utf-8'
+ self.update(1)
+
+ if has_ucs4:
+ NON_PRINTABLE = re.compile(u'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD\U00010000-\U0010ffff]')
+ else:
+ NON_PRINTABLE = re.compile(u'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uFFFD]|(?:^|[^\uD800-\uDBFF])[\uDC00-\uDFFF]|[\uD800-\uDBFF](?:[^\uDC00-\uDFFF]|$)')
+ def check_printable(self, data):
+ match = self.NON_PRINTABLE.search(data)
+ if match:
+ character = match.group()
+ position = self.index+(len(self.buffer)-self.pointer)+match.start()
+ raise ReaderError(self.name, position, ord(character),
+ 'unicode', "special characters are not allowed")
+
+ def update(self, length):
+ if self.raw_buffer is None:
+ return
+ self.buffer = self.buffer[self.pointer:]
+ self.pointer = 0
+ while len(self.buffer) < length:
+ if not self.eof:
+ self.update_raw()
+ if self.raw_decode is not None:
+ try:
+ data, converted = self.raw_decode(self.raw_buffer,
+ 'strict', self.eof)
+ except UnicodeDecodeError, exc:
+ character = exc.object[exc.start]
+ if self.stream is not None:
+ position = self.stream_pointer-len(self.raw_buffer)+exc.start
+ else:
+ position = exc.start
+ raise ReaderError(self.name, position, character,
+ exc.encoding, exc.reason)
+ else:
+ data = self.raw_buffer
+ converted = len(data)
+ self.check_printable(data)
+ self.buffer += data
+ self.raw_buffer = self.raw_buffer[converted:]
+ if self.eof:
+ self.buffer += u'\0'
+ self.raw_buffer = None
+ break
+
+ def update_raw(self, size=1024):
+ data = self.stream.read(size)
+ if data:
+ self.raw_buffer += data
+ self.stream_pointer += len(data)
+ else:
+ self.eof = True
diff --git a/third_party/python/PyYAML/lib/yaml/representer.py b/third_party/python/PyYAML/lib/yaml/representer.py
new file mode 100644
index 0000000000..93e09b67b3
--- /dev/null
+++ b/third_party/python/PyYAML/lib/yaml/representer.py
@@ -0,0 +1,489 @@
+
+__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer',
+ 'RepresenterError']
+
+from error import *
+
+from nodes import *
+
+import datetime
+
+import copy_reg, types
+
+class RepresenterError(YAMLError):
+ pass
+
+class BaseRepresenter(object):
+
+ yaml_representers = {}
+ yaml_multi_representers = {}
+
+ def __init__(self, default_style=None, default_flow_style=False, sort_keys=True):
+ self.default_style = default_style
+ self.default_flow_style = default_flow_style
+ self.sort_keys = sort_keys
+ self.represented_objects = {}
+ self.object_keeper = []
+ self.alias_key = None
+
+ def represent(self, data):
+ node = self.represent_data(data)
+ self.serialize(node)
+ self.represented_objects = {}
+ self.object_keeper = []
+ self.alias_key = None
+
+ def get_classobj_bases(self, cls):
+ bases = [cls]
+ for base in cls.__bases__:
+ bases.extend(self.get_classobj_bases(base))
+ return bases
+
+ def represent_data(self, data):
+ if self.ignore_aliases(data):
+ self.alias_key = None
+ else:
+ self.alias_key = id(data)
+ if self.alias_key is not None:
+ if self.alias_key in self.represented_objects:
+ node = self.represented_objects[self.alias_key]
+ #if node is None:
+ # raise RepresenterError("recursive objects are not allowed: %r" % data)
+ return node
+ #self.represented_objects[alias_key] = None
+ self.object_keeper.append(data)
+ data_types = type(data).__mro__
+ if type(data) is types.InstanceType:
+ data_types = self.get_classobj_bases(data.__class__)+list(data_types)
+ if data_types[0] in self.yaml_representers:
+ node = self.yaml_representers[data_types[0]](self, data)
+ else:
+ for data_type in data_types:
+ if data_type in self.yaml_multi_representers:
+ node = self.yaml_multi_representers[data_type](self, data)
+ break
+ else:
+ if None in self.yaml_multi_representers:
+ node = self.yaml_multi_representers[None](self, data)
+ elif None in self.yaml_representers:
+ node = self.yaml_representers[None](self, data)
+ else:
+ node = ScalarNode(None, unicode(data))
+ #if alias_key is not None:
+ # self.represented_objects[alias_key] = node
+ return node
+
+ def add_representer(cls, data_type, representer):
+ if not 'yaml_representers' in cls.__dict__:
+ cls.yaml_representers = cls.yaml_representers.copy()
+ cls.yaml_representers[data_type] = representer
+ add_representer = classmethod(add_representer)
+
+ def add_multi_representer(cls, data_type, representer):
+ if not 'yaml_multi_representers' in cls.__dict__:
+ cls.yaml_multi_representers = cls.yaml_multi_representers.copy()
+ cls.yaml_multi_representers[data_type] = representer
+ add_multi_representer = classmethod(add_multi_representer)
+
+ def represent_scalar(self, tag, value, style=None):
+ if style is None:
+ style = self.default_style
+ node = ScalarNode(tag, value, style=style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ return node
+
+ def represent_sequence(self, tag, sequence, flow_style=None):
+ value = []
+ node = SequenceNode(tag, value, flow_style=flow_style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ best_style = True
+ for item in sequence:
+ node_item = self.represent_data(item)
+ if not (isinstance(node_item, ScalarNode) and not node_item.style):
+ best_style = False
+ value.append(node_item)
+ if flow_style is None:
+ if self.default_flow_style is not None:
+ node.flow_style = self.default_flow_style
+ else:
+ node.flow_style = best_style
+ return node
+
+ def represent_mapping(self, tag, mapping, flow_style=None):
+ value = []
+ node = MappingNode(tag, value, flow_style=flow_style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ best_style = True
+ if hasattr(mapping, 'items'):
+ mapping = mapping.items()
+ if self.sort_keys:
+ mapping.sort()
+ for item_key, item_value in mapping:
+ node_key = self.represent_data(item_key)
+ node_value = self.represent_data(item_value)
+ if not (isinstance(node_key, ScalarNode) and not node_key.style):
+ best_style = False
+ if not (isinstance(node_value, ScalarNode) and not node_value.style):
+ best_style = False
+ value.append((node_key, node_value))
+ if flow_style is None:
+ if self.default_flow_style is not None:
+ node.flow_style = self.default_flow_style
+ else:
+ node.flow_style = best_style
+ return node
+
+ def ignore_aliases(self, data):
+ return False
+
+class SafeRepresenter(BaseRepresenter):
+
+ def ignore_aliases(self, data):
+ if data is None:
+ return True
+ if isinstance(data, tuple) and data == ():
+ return True
+ if isinstance(data, (str, unicode, bool, int, float)):
+ return True
+
+ def represent_none(self, data):
+ return self.represent_scalar(u'tag:yaml.org,2002:null',
+ u'null')
+
+ def represent_str(self, data):
+ tag = None
+ style = None
+ try:
+ data = unicode(data, 'ascii')
+ tag = u'tag:yaml.org,2002:str'
+ except UnicodeDecodeError:
+ try:
+ data = unicode(data, 'utf-8')
+ tag = u'tag:yaml.org,2002:str'
+ except UnicodeDecodeError:
+ data = data.encode('base64')
+ tag = u'tag:yaml.org,2002:binary'
+ style = '|'
+ return self.represent_scalar(tag, data, style=style)
+
+ def represent_unicode(self, data):
+ return self.represent_scalar(u'tag:yaml.org,2002:str', data)
+
+ def represent_bool(self, data):
+ if data:
+ value = u'true'
+ else:
+ value = u'false'
+ return self.represent_scalar(u'tag:yaml.org,2002:bool', value)
+
+ def represent_int(self, data):
+ return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data))
+
+ def represent_long(self, data):
+ return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data))
+
+ inf_value = 1e300
+ while repr(inf_value) != repr(inf_value*inf_value):
+ inf_value *= inf_value
+
+ def represent_float(self, data):
+ if data != data or (data == 0.0 and data == 1.0):
+ value = u'.nan'
+ elif data == self.inf_value:
+ value = u'.inf'
+ elif data == -self.inf_value:
+ value = u'-.inf'
+ else:
+ value = unicode(repr(data)).lower()
+ # Note that in some cases `repr(data)` represents a float number
+ # without the decimal parts. For instance:
+ # >>> repr(1e17)
+ # '1e17'
+ # Unfortunately, this is not a valid float representation according
+ # to the definition of the `!!float` tag. We fix this by adding
+ # '.0' before the 'e' symbol.
+ if u'.' not in value and u'e' in value:
+ value = value.replace(u'e', u'.0e', 1)
+ return self.represent_scalar(u'tag:yaml.org,2002:float', value)
+
+ def represent_list(self, data):
+ #pairs = (len(data) > 0 and isinstance(data, list))
+ #if pairs:
+ # for item in data:
+ # if not isinstance(item, tuple) or len(item) != 2:
+ # pairs = False
+ # break
+ #if not pairs:
+ return self.represent_sequence(u'tag:yaml.org,2002:seq', data)
+ #value = []
+ #for item_key, item_value in data:
+ # value.append(self.represent_mapping(u'tag:yaml.org,2002:map',
+ # [(item_key, item_value)]))
+ #return SequenceNode(u'tag:yaml.org,2002:pairs', value)
+
+ def represent_dict(self, data):
+ return self.represent_mapping(u'tag:yaml.org,2002:map', data)
+
+ def represent_set(self, data):
+ value = {}
+ for key in data:
+ value[key] = None
+ return self.represent_mapping(u'tag:yaml.org,2002:set', value)
+
+ def represent_date(self, data):
+ value = unicode(data.isoformat())
+ return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value)
+
+ def represent_datetime(self, data):
+ value = unicode(data.isoformat(' '))
+ return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value)
+
+ def represent_yaml_object(self, tag, data, cls, flow_style=None):
+ if hasattr(data, '__getstate__'):
+ state = data.__getstate__()
+ else:
+ state = data.__dict__.copy()
+ return self.represent_mapping(tag, state, flow_style=flow_style)
+
+ def represent_undefined(self, data):
+ raise RepresenterError("cannot represent an object", data)
+
+SafeRepresenter.add_representer(type(None),
+ SafeRepresenter.represent_none)
+
+SafeRepresenter.add_representer(str,
+ SafeRepresenter.represent_str)
+
+SafeRepresenter.add_representer(unicode,
+ SafeRepresenter.represent_unicode)
+
+SafeRepresenter.add_representer(bool,
+ SafeRepresenter.represent_bool)
+
+SafeRepresenter.add_representer(int,
+ SafeRepresenter.represent_int)
+
+SafeRepresenter.add_representer(long,
+ SafeRepresenter.represent_long)
+
+SafeRepresenter.add_representer(float,
+ SafeRepresenter.represent_float)
+
+SafeRepresenter.add_representer(list,
+ SafeRepresenter.represent_list)
+
+SafeRepresenter.add_representer(tuple,
+ SafeRepresenter.represent_list)
+
+SafeRepresenter.add_representer(dict,
+ SafeRepresenter.represent_dict)
+
+SafeRepresenter.add_representer(set,
+ SafeRepresenter.represent_set)
+
+SafeRepresenter.add_representer(datetime.date,
+ SafeRepresenter.represent_date)
+
+SafeRepresenter.add_representer(datetime.datetime,
+ SafeRepresenter.represent_datetime)
+
+SafeRepresenter.add_representer(None,
+ SafeRepresenter.represent_undefined)
+
+class Representer(SafeRepresenter):
+
+ def represent_str(self, data):
+ tag = None
+ style = None
+ try:
+ data = unicode(data, 'ascii')
+ tag = u'tag:yaml.org,2002:str'
+ except UnicodeDecodeError:
+ try:
+ data = unicode(data, 'utf-8')
+ tag = u'tag:yaml.org,2002:python/str'
+ except UnicodeDecodeError:
+ data = data.encode('base64')
+ tag = u'tag:yaml.org,2002:binary'
+ style = '|'
+ return self.represent_scalar(tag, data, style=style)
+
+ def represent_unicode(self, data):
+ tag = None
+ try:
+ data.encode('ascii')
+ tag = u'tag:yaml.org,2002:python/unicode'
+ except UnicodeEncodeError:
+ tag = u'tag:yaml.org,2002:str'
+ return self.represent_scalar(tag, data)
+
+ def represent_long(self, data):
+ tag = u'tag:yaml.org,2002:int'
+ if int(data) is not data:
+ tag = u'tag:yaml.org,2002:python/long'
+ return self.represent_scalar(tag, unicode(data))
+
+ def represent_complex(self, data):
+ if data.imag == 0.0:
+ data = u'%r' % data.real
+ elif data.real == 0.0:
+ data = u'%rj' % data.imag
+ elif data.imag > 0:
+ data = u'%r+%rj' % (data.real, data.imag)
+ else:
+ data = u'%r%rj' % (data.real, data.imag)
+ return self.represent_scalar(u'tag:yaml.org,2002:python/complex', data)
+
+ def represent_tuple(self, data):
+ return self.represent_sequence(u'tag:yaml.org,2002:python/tuple', data)
+
+ def represent_name(self, data):
+ name = u'%s.%s' % (data.__module__, data.__name__)
+ return self.represent_scalar(u'tag:yaml.org,2002:python/name:'+name, u'')
+
+ def represent_module(self, data):
+ return self.represent_scalar(
+ u'tag:yaml.org,2002:python/module:'+data.__name__, u'')
+
+ def represent_instance(self, data):
+ # For instances of classic classes, we use __getinitargs__ and
+ # __getstate__ to serialize the data.
+
+ # If data.__getinitargs__ exists, the object must be reconstructed by
+ # calling cls(**args), where args is a tuple returned by
+ # __getinitargs__. Otherwise, the cls.__init__ method should never be
+ # called and the class instance is created by instantiating a trivial
+ # class and assigning to the instance's __class__ variable.
+
+ # If data.__getstate__ exists, it returns the state of the object.
+ # Otherwise, the state of the object is data.__dict__.
+
+ # We produce either a !!python/object or !!python/object/new node.
+ # If data.__getinitargs__ does not exist and state is a dictionary, we
+ # produce a !!python/object node . Otherwise we produce a
+ # !!python/object/new node.
+
+ cls = data.__class__
+ class_name = u'%s.%s' % (cls.__module__, cls.__name__)
+ args = None
+ state = None
+ if hasattr(data, '__getinitargs__'):
+ args = list(data.__getinitargs__())
+ if hasattr(data, '__getstate__'):
+ state = data.__getstate__()
+ else:
+ state = data.__dict__
+ if args is None and isinstance(state, dict):
+ return self.represent_mapping(
+ u'tag:yaml.org,2002:python/object:'+class_name, state)
+ if isinstance(state, dict) and not state:
+ return self.represent_sequence(
+ u'tag:yaml.org,2002:python/object/new:'+class_name, args)
+ value = {}
+ if args:
+ value['args'] = args
+ value['state'] = state
+ return self.represent_mapping(
+ u'tag:yaml.org,2002:python/object/new:'+class_name, value)
+
+ def represent_object(self, data):
+ # We use __reduce__ API to save the data. data.__reduce__ returns
+ # a tuple of length 2-5:
+ # (function, args, state, listitems, dictitems)
+
+ # For reconstructing, we calls function(*args), then set its state,
+ # listitems, and dictitems if they are not None.
+
+ # A special case is when function.__name__ == '__newobj__'. In this
+ # case we create the object with args[0].__new__(*args).
+
+ # Another special case is when __reduce__ returns a string - we don't
+ # support it.
+
+ # We produce a !!python/object, !!python/object/new or
+ # !!python/object/apply node.
+
+ cls = type(data)
+ if cls in copy_reg.dispatch_table:
+ reduce = copy_reg.dispatch_table[cls](data)
+ elif hasattr(data, '__reduce_ex__'):
+ reduce = data.__reduce_ex__(2)
+ elif hasattr(data, '__reduce__'):
+ reduce = data.__reduce__()
+ else:
+ raise RepresenterError("cannot represent an object", data)
+ reduce = (list(reduce)+[None]*5)[:5]
+ function, args, state, listitems, dictitems = reduce
+ args = list(args)
+ if state is None:
+ state = {}
+ if listitems is not None:
+ listitems = list(listitems)
+ if dictitems is not None:
+ dictitems = dict(dictitems)
+ if function.__name__ == '__newobj__':
+ function = args[0]
+ args = args[1:]
+ tag = u'tag:yaml.org,2002:python/object/new:'
+ newobj = True
+ else:
+ tag = u'tag:yaml.org,2002:python/object/apply:'
+ newobj = False
+ function_name = u'%s.%s' % (function.__module__, function.__name__)
+ if not args and not listitems and not dictitems \
+ and isinstance(state, dict) and newobj:
+ return self.represent_mapping(
+ u'tag:yaml.org,2002:python/object:'+function_name, state)
+ if not listitems and not dictitems \
+ and isinstance(state, dict) and not state:
+ return self.represent_sequence(tag+function_name, args)
+ value = {}
+ if args:
+ value['args'] = args
+ if state or not isinstance(state, dict):
+ value['state'] = state
+ if listitems:
+ value['listitems'] = listitems
+ if dictitems:
+ value['dictitems'] = dictitems
+ return self.represent_mapping(tag+function_name, value)
+
+Representer.add_representer(str,
+ Representer.represent_str)
+
+Representer.add_representer(unicode,
+ Representer.represent_unicode)
+
+Representer.add_representer(long,
+ Representer.represent_long)
+
+Representer.add_representer(complex,
+ Representer.represent_complex)
+
+Representer.add_representer(tuple,
+ Representer.represent_tuple)
+
+Representer.add_representer(type,
+ Representer.represent_name)
+
+Representer.add_representer(types.ClassType,
+ Representer.represent_name)
+
+Representer.add_representer(types.FunctionType,
+ Representer.represent_name)
+
+Representer.add_representer(types.BuiltinFunctionType,
+ Representer.represent_name)
+
+Representer.add_representer(types.ModuleType,
+ Representer.represent_module)
+
+Representer.add_multi_representer(types.InstanceType,
+ Representer.represent_instance)
+
+Representer.add_multi_representer(object,
+ Representer.represent_object)
+
diff --git a/third_party/python/PyYAML/lib/yaml/resolver.py b/third_party/python/PyYAML/lib/yaml/resolver.py
new file mode 100644
index 0000000000..528fbc0ead
--- /dev/null
+++ b/third_party/python/PyYAML/lib/yaml/resolver.py
@@ -0,0 +1,227 @@
+
+__all__ = ['BaseResolver', 'Resolver']
+
+from error import *
+from nodes import *
+
+import re
+
+class ResolverError(YAMLError):
+ pass
+
+class BaseResolver(object):
+
+ DEFAULT_SCALAR_TAG = u'tag:yaml.org,2002:str'
+ DEFAULT_SEQUENCE_TAG = u'tag:yaml.org,2002:seq'
+ DEFAULT_MAPPING_TAG = u'tag:yaml.org,2002:map'
+
+ yaml_implicit_resolvers = {}
+ yaml_path_resolvers = {}
+
+ def __init__(self):
+ self.resolver_exact_paths = []
+ self.resolver_prefix_paths = []
+
+ def add_implicit_resolver(cls, tag, regexp, first):
+ if not 'yaml_implicit_resolvers' in cls.__dict__:
+ implicit_resolvers = {}
+ for key in cls.yaml_implicit_resolvers:
+ implicit_resolvers[key] = cls.yaml_implicit_resolvers[key][:]
+ cls.yaml_implicit_resolvers = implicit_resolvers
+ if first is None:
+ first = [None]
+ for ch in first:
+ cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
+ add_implicit_resolver = classmethod(add_implicit_resolver)
+
+ def add_path_resolver(cls, tag, path, kind=None):
+ # Note: `add_path_resolver` is experimental. The API could be changed.
+ # `new_path` is a pattern that is matched against the path from the
+ # root to the node that is being considered. `node_path` elements are
+ # tuples `(node_check, index_check)`. `node_check` is a node class:
+ # `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None`
+ # matches any kind of a node. `index_check` could be `None`, a boolean
+ # value, a string value, or a number. `None` and `False` match against
+ # any _value_ of sequence and mapping nodes. `True` matches against
+ # any _key_ of a mapping node. A string `index_check` matches against
+ # a mapping value that corresponds to a scalar key which content is
+ # equal to the `index_check` value. An integer `index_check` matches
+ # against a sequence value with the index equal to `index_check`.
+ if not 'yaml_path_resolvers' in cls.__dict__:
+ cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
+ new_path = []
+ for element in path:
+ if isinstance(element, (list, tuple)):
+ if len(element) == 2:
+ node_check, index_check = element
+ elif len(element) == 1:
+ node_check = element[0]
+ index_check = True
+ else:
+ raise ResolverError("Invalid path element: %s" % element)
+ else:
+ node_check = None
+ index_check = element
+ if node_check is str:
+ node_check = ScalarNode
+ elif node_check is list:
+ node_check = SequenceNode
+ elif node_check is dict:
+ node_check = MappingNode
+ elif node_check not in [ScalarNode, SequenceNode, MappingNode] \
+ and not isinstance(node_check, basestring) \
+ and node_check is not None:
+ raise ResolverError("Invalid node checker: %s" % node_check)
+ if not isinstance(index_check, (basestring, int)) \
+ and index_check is not None:
+ raise ResolverError("Invalid index checker: %s" % index_check)
+ new_path.append((node_check, index_check))
+ if kind is str:
+ kind = ScalarNode
+ elif kind is list:
+ kind = SequenceNode
+ elif kind is dict:
+ kind = MappingNode
+ elif kind not in [ScalarNode, SequenceNode, MappingNode] \
+ and kind is not None:
+ raise ResolverError("Invalid node kind: %s" % kind)
+ cls.yaml_path_resolvers[tuple(new_path), kind] = tag
+ add_path_resolver = classmethod(add_path_resolver)
+
+ def descend_resolver(self, current_node, current_index):
+ if not self.yaml_path_resolvers:
+ return
+ exact_paths = {}
+ prefix_paths = []
+ if current_node:
+ depth = len(self.resolver_prefix_paths)
+ for path, kind in self.resolver_prefix_paths[-1]:
+ if self.check_resolver_prefix(depth, path, kind,
+ current_node, current_index):
+ if len(path) > depth:
+ prefix_paths.append((path, kind))
+ else:
+ exact_paths[kind] = self.yaml_path_resolvers[path, kind]
+ else:
+ for path, kind in self.yaml_path_resolvers:
+ if not path:
+ exact_paths[kind] = self.yaml_path_resolvers[path, kind]
+ else:
+ prefix_paths.append((path, kind))
+ self.resolver_exact_paths.append(exact_paths)
+ self.resolver_prefix_paths.append(prefix_paths)
+
+ def ascend_resolver(self):
+ if not self.yaml_path_resolvers:
+ return
+ self.resolver_exact_paths.pop()
+ self.resolver_prefix_paths.pop()
+
+ def check_resolver_prefix(self, depth, path, kind,
+ current_node, current_index):
+ node_check, index_check = path[depth-1]
+ if isinstance(node_check, basestring):
+ if current_node.tag != node_check:
+ return
+ elif node_check is not None:
+ if not isinstance(current_node, node_check):
+ return
+ if index_check is True and current_index is not None:
+ return
+ if (index_check is False or index_check is None) \
+ and current_index is None:
+ return
+ if isinstance(index_check, basestring):
+ if not (isinstance(current_index, ScalarNode)
+ and index_check == current_index.value):
+ return
+ elif isinstance(index_check, int) and not isinstance(index_check, bool):
+ if index_check != current_index:
+ return
+ return True
+
+ def resolve(self, kind, value, implicit):
+ if kind is ScalarNode and implicit[0]:
+ if value == u'':
+ resolvers = self.yaml_implicit_resolvers.get(u'', [])
+ else:
+ resolvers = self.yaml_implicit_resolvers.get(value[0], [])
+ resolvers += self.yaml_implicit_resolvers.get(None, [])
+ for tag, regexp in resolvers:
+ if regexp.match(value):
+ return tag
+ implicit = implicit[1]
+ if self.yaml_path_resolvers:
+ exact_paths = self.resolver_exact_paths[-1]
+ if kind in exact_paths:
+ return exact_paths[kind]
+ if None in exact_paths:
+ return exact_paths[None]
+ if kind is ScalarNode:
+ return self.DEFAULT_SCALAR_TAG
+ elif kind is SequenceNode:
+ return self.DEFAULT_SEQUENCE_TAG
+ elif kind is MappingNode:
+ return self.DEFAULT_MAPPING_TAG
+
+class Resolver(BaseResolver):
+ pass
+
+Resolver.add_implicit_resolver(
+ u'tag:yaml.org,2002:bool',
+ re.compile(ur'''^(?:yes|Yes|YES|no|No|NO
+ |true|True|TRUE|false|False|FALSE
+ |on|On|ON|off|Off|OFF)$''', re.X),
+ list(u'yYnNtTfFoO'))
+
+Resolver.add_implicit_resolver(
+ u'tag:yaml.org,2002:float',
+ re.compile(ur'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)?
+ |\.[0-9_]+(?:[eE][-+][0-9]+)?
+ |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*
+ |[-+]?\.(?:inf|Inf|INF)
+ |\.(?:nan|NaN|NAN))$''', re.X),
+ list(u'-+0123456789.'))
+
+Resolver.add_implicit_resolver(
+ u'tag:yaml.org,2002:int',
+ re.compile(ur'''^(?:[-+]?0b[0-1_]+
+ |[-+]?0[0-7_]+
+ |[-+]?(?:0|[1-9][0-9_]*)
+ |[-+]?0x[0-9a-fA-F_]+
+ |[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X),
+ list(u'-+0123456789'))
+
+Resolver.add_implicit_resolver(
+ u'tag:yaml.org,2002:merge',
+ re.compile(ur'^(?:<<)$'),
+ [u'<'])
+
+Resolver.add_implicit_resolver(
+ u'tag:yaml.org,2002:null',
+ re.compile(ur'''^(?: ~
+ |null|Null|NULL
+ | )$''', re.X),
+ [u'~', u'n', u'N', u''])
+
+Resolver.add_implicit_resolver(
+ u'tag:yaml.org,2002:timestamp',
+ re.compile(ur'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
+ |[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
+ (?:[Tt]|[ \t]+)[0-9][0-9]?
+ :[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)?
+ (?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
+ list(u'0123456789'))
+
+Resolver.add_implicit_resolver(
+ u'tag:yaml.org,2002:value',
+ re.compile(ur'^(?:=)$'),
+ [u'='])
+
+# The following resolver is only for documentation purposes. It cannot work
+# because plain scalars cannot start with '!', '&', or '*'.
+Resolver.add_implicit_resolver(
+ u'tag:yaml.org,2002:yaml',
+ re.compile(ur'^(?:!|&|\*)$'),
+ list(u'!&*'))
+
diff --git a/third_party/python/PyYAML/lib/yaml/scanner.py b/third_party/python/PyYAML/lib/yaml/scanner.py
new file mode 100644
index 0000000000..098ea7be82
--- /dev/null
+++ b/third_party/python/PyYAML/lib/yaml/scanner.py
@@ -0,0 +1,1444 @@
+
+# Scanner produces tokens of the following types:
+# STREAM-START
+# STREAM-END
+# DIRECTIVE(name, value)
+# DOCUMENT-START
+# DOCUMENT-END
+# BLOCK-SEQUENCE-START
+# BLOCK-MAPPING-START
+# BLOCK-END
+# FLOW-SEQUENCE-START
+# FLOW-MAPPING-START
+# FLOW-SEQUENCE-END
+# FLOW-MAPPING-END
+# BLOCK-ENTRY
+# FLOW-ENTRY
+# KEY
+# VALUE
+# ALIAS(value)
+# ANCHOR(value)
+# TAG(value)
+# SCALAR(value, plain, style)
+#
+# Read comments in the Scanner code for more details.
+#
+
+__all__ = ['Scanner', 'ScannerError']
+
+from error import MarkedYAMLError
+from tokens import *
+
+class ScannerError(MarkedYAMLError):
+ pass
+
+class SimpleKey(object):
+ # See below simple keys treatment.
+
+ def __init__(self, token_number, required, index, line, column, mark):
+ self.token_number = token_number
+ self.required = required
+ self.index = index
+ self.line = line
+ self.column = column
+ self.mark = mark
+
+class Scanner(object):
+
+ def __init__(self):
+ """Initialize the scanner."""
+ # It is assumed that Scanner and Reader will have a common descendant.
+ # Reader do the dirty work of checking for BOM and converting the
+ # input data to Unicode. It also adds NUL to the end.
+ #
+ # Reader supports the following methods
+ # self.peek(i=0) # peek the next i-th character
+ # self.prefix(l=1) # peek the next l characters
+ # self.forward(l=1) # read the next l characters and move the pointer.
+
+ # Had we reached the end of the stream?
+ self.done = False
+
+ # The number of unclosed '{' and '['. `flow_level == 0` means block
+ # context.
+ self.flow_level = 0
+
+ # List of processed tokens that are not yet emitted.
+ self.tokens = []
+
+ # Add the STREAM-START token.
+ self.fetch_stream_start()
+
+ # Number of tokens that were emitted through the `get_token` method.
+ self.tokens_taken = 0
+
+ # The current indentation level.
+ self.indent = -1
+
+ # Past indentation levels.
+ self.indents = []
+
+ # Variables related to simple keys treatment.
+
+ # A simple key is a key that is not denoted by the '?' indicator.
+ # Example of simple keys:
+ # ---
+ # block simple key: value
+ # ? not a simple key:
+ # : { flow simple key: value }
+ # We emit the KEY token before all keys, so when we find a potential
+ # simple key, we try to locate the corresponding ':' indicator.
+ # Simple keys should be limited to a single line and 1024 characters.
+
+ # Can a simple key start at the current position? A simple key may
+ # start:
+ # - at the beginning of the line, not counting indentation spaces
+ # (in block context),
+ # - after '{', '[', ',' (in the flow context),
+ # - after '?', ':', '-' (in the block context).
+ # In the block context, this flag also signifies if a block collection
+ # may start at the current position.
+ self.allow_simple_key = True
+
+ # Keep track of possible simple keys. This is a dictionary. The key
+ # is `flow_level`; there can be no more that one possible simple key
+ # for each level. The value is a SimpleKey record:
+ # (token_number, required, index, line, column, mark)
+ # A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow),
+ # '[', or '{' tokens.
+ self.possible_simple_keys = {}
+
+ # Public methods.
+
+ def check_token(self, *choices):
+ # Check if the next token is one of the given types.
+ while self.need_more_tokens():
+ self.fetch_more_tokens()
+ if self.tokens:
+ if not choices:
+ return True
+ for choice in choices:
+ if isinstance(self.tokens[0], choice):
+ return True
+ return False
+
+ def peek_token(self):
+ # Return the next token, but do not delete if from the queue.
+ # Return None if no more tokens.
+ while self.need_more_tokens():
+ self.fetch_more_tokens()
+ if self.tokens:
+ return self.tokens[0]
+ else:
+ return None
+
+ def get_token(self):
+ # Return the next token.
+ while self.need_more_tokens():
+ self.fetch_more_tokens()
+ if self.tokens:
+ self.tokens_taken += 1
+ return self.tokens.pop(0)
+
+ # Private methods.
+
+ def need_more_tokens(self):
+ if self.done:
+ return False
+ if not self.tokens:
+ return True
+ # The current token may be a potential simple key, so we
+ # need to look further.
+ self.stale_possible_simple_keys()
+ if self.next_possible_simple_key() == self.tokens_taken:
+ return True
+
+ def fetch_more_tokens(self):
+
+ # Eat whitespaces and comments until we reach the next token.
+ self.scan_to_next_token()
+
+ # Remove obsolete possible simple keys.
+ self.stale_possible_simple_keys()
+
+ # Compare the current indentation and column. It may add some tokens
+ # and decrease the current indentation level.
+ self.unwind_indent(self.column)
+
+ # Peek the next character.
+ ch = self.peek()
+
+ # Is it the end of stream?
+ if ch == u'\0':
+ return self.fetch_stream_end()
+
+ # Is it a directive?
+ if ch == u'%' and self.check_directive():
+ return self.fetch_directive()
+
+ # Is it the document start?
+ if ch == u'-' and self.check_document_start():
+ return self.fetch_document_start()
+
+ # Is it the document end?
+ if ch == u'.' and self.check_document_end():
+ return self.fetch_document_end()
+
+ # TODO: support for BOM within a stream.
+ #if ch == u'\uFEFF':
+ # return self.fetch_bom() <-- issue BOMToken
+
+ # Note: the order of the following checks is NOT significant.
+
+ # Is it the flow sequence start indicator?
+ if ch == u'[':
+ return self.fetch_flow_sequence_start()
+
+ # Is it the flow mapping start indicator?
+ if ch == u'{':
+ return self.fetch_flow_mapping_start()
+
+ # Is it the flow sequence end indicator?
+ if ch == u']':
+ return self.fetch_flow_sequence_end()
+
+ # Is it the flow mapping end indicator?
+ if ch == u'}':
+ return self.fetch_flow_mapping_end()
+
+ # Is it the flow entry indicator?
+ if ch == u',':
+ return self.fetch_flow_entry()
+
+ # Is it the block entry indicator?
+ if ch == u'-' and self.check_block_entry():
+ return self.fetch_block_entry()
+
+ # Is it the key indicator?
+ if ch == u'?' and self.check_key():
+ return self.fetch_key()
+
+ # Is it the value indicator?
+ if ch == u':' and self.check_value():
+ return self.fetch_value()
+
+ # Is it an alias?
+ if ch == u'*':
+ return self.fetch_alias()
+
+ # Is it an anchor?
+ if ch == u'&':
+ return self.fetch_anchor()
+
+ # Is it a tag?
+ if ch == u'!':
+ return self.fetch_tag()
+
+ # Is it a literal scalar?
+ if ch == u'|' and not self.flow_level:
+ return self.fetch_literal()
+
+ # Is it a folded scalar?
+ if ch == u'>' and not self.flow_level:
+ return self.fetch_folded()
+
+ # Is it a single quoted scalar?
+ if ch == u'\'':
+ return self.fetch_single()
+
+ # Is it a double quoted scalar?
+ if ch == u'\"':
+ return self.fetch_double()
+
+ # It must be a plain scalar then.
+ if self.check_plain():
+ return self.fetch_plain()
+
+ # No? It's an error. Let's produce a nice error message.
+ raise ScannerError("while scanning for the next token", None,
+ "found character %r that cannot start any token"
+ % ch.encode('utf-8'), self.get_mark())
+
+ # Simple keys treatment.
+
+ def next_possible_simple_key(self):
+ # Return the number of the nearest possible simple key. Actually we
+ # don't need to loop through the whole dictionary. We may replace it
+ # with the following code:
+ # if not self.possible_simple_keys:
+ # return None
+ # return self.possible_simple_keys[
+ # min(self.possible_simple_keys.keys())].token_number
+ min_token_number = None
+ for level in self.possible_simple_keys:
+ key = self.possible_simple_keys[level]
+ if min_token_number is None or key.token_number < min_token_number:
+ min_token_number = key.token_number
+ return min_token_number
+
+ def stale_possible_simple_keys(self):
+ # Remove entries that are no longer possible simple keys. According to
+ # the YAML specification, simple keys
+ # - should be limited to a single line,
+ # - should be no longer than 1024 characters.
+ # Disabling this procedure will allow simple keys of any length and
+ # height (may cause problems if indentation is broken though).
+ for level in self.possible_simple_keys.keys():
+ key = self.possible_simple_keys[level]
+ if key.line != self.line \
+ or self.index-key.index > 1024:
+ if key.required:
+ raise ScannerError("while scanning a simple key", key.mark,
+ "could not find expected ':'", self.get_mark())
+ del self.possible_simple_keys[level]
+
+ def save_possible_simple_key(self):
+ # The next token may start a simple key. We check if it's possible
+ # and save its position. This function is called for
+ # ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'.
+
+ # Check if a simple key is required at the current position.
+ required = not self.flow_level and self.indent == self.column
+
+ # The next token might be a simple key. Let's save it's number and
+ # position.
+ if self.allow_simple_key:
+ self.remove_possible_simple_key()
+ token_number = self.tokens_taken+len(self.tokens)
+ key = SimpleKey(token_number, required,
+ self.index, self.line, self.column, self.get_mark())
+ self.possible_simple_keys[self.flow_level] = key
+
+ def remove_possible_simple_key(self):
+ # Remove the saved possible key position at the current flow level.
+ if self.flow_level in self.possible_simple_keys:
+ key = self.possible_simple_keys[self.flow_level]
+
+ if key.required:
+ raise ScannerError("while scanning a simple key", key.mark,
+ "could not find expected ':'", self.get_mark())
+
+ del self.possible_simple_keys[self.flow_level]
+
+ # Indentation functions.
+
+ def unwind_indent(self, column):
+
+ ## In flow context, tokens should respect indentation.
+ ## Actually the condition should be `self.indent >= column` according to
+ ## the spec. But this condition will prohibit intuitively correct
+ ## constructions such as
+ ## key : {
+ ## }
+ #if self.flow_level and self.indent > column:
+ # raise ScannerError(None, None,
+ # "invalid indentation or unclosed '[' or '{'",
+ # self.get_mark())
+
+ # In the flow context, indentation is ignored. We make the scanner less
+ # restrictive then specification requires.
+ if self.flow_level:
+ return
+
+ # In block context, we may need to issue the BLOCK-END tokens.
+ while self.indent > column:
+ mark = self.get_mark()
+ self.indent = self.indents.pop()
+ self.tokens.append(BlockEndToken(mark, mark))
+
+ def add_indent(self, column):
+ # Check if we need to increase indentation.
+ if self.indent < column:
+ self.indents.append(self.indent)
+ self.indent = column
+ return True
+ return False
+
+ # Fetchers.
+
+ def fetch_stream_start(self):
+ # We always add STREAM-START as the first token and STREAM-END as the
+ # last token.
+
+ # Read the token.
+ mark = self.get_mark()
+
+ # Add STREAM-START.
+ self.tokens.append(StreamStartToken(mark, mark,
+ encoding=self.encoding))
+
+
+ def fetch_stream_end(self):
+
+ # Set the current indentation to -1.
+ self.unwind_indent(-1)
+
+ # Reset simple keys.
+ self.remove_possible_simple_key()
+ self.allow_simple_key = False
+ self.possible_simple_keys = {}
+
+ # Read the token.
+ mark = self.get_mark()
+
+ # Add STREAM-END.
+ self.tokens.append(StreamEndToken(mark, mark))
+
+ # The steam is finished.
+ self.done = True
+
+ def fetch_directive(self):
+
+ # Set the current indentation to -1.
+ self.unwind_indent(-1)
+
+ # Reset simple keys.
+ self.remove_possible_simple_key()
+ self.allow_simple_key = False
+
+ # Scan and add DIRECTIVE.
+ self.tokens.append(self.scan_directive())
+
+ def fetch_document_start(self):
+ self.fetch_document_indicator(DocumentStartToken)
+
+ def fetch_document_end(self):
+ self.fetch_document_indicator(DocumentEndToken)
+
+ def fetch_document_indicator(self, TokenClass):
+
+ # Set the current indentation to -1.
+ self.unwind_indent(-1)
+
+ # Reset simple keys. Note that there could not be a block collection
+ # after '---'.
+ self.remove_possible_simple_key()
+ self.allow_simple_key = False
+
+ # Add DOCUMENT-START or DOCUMENT-END.
+ start_mark = self.get_mark()
+ self.forward(3)
+ end_mark = self.get_mark()
+ self.tokens.append(TokenClass(start_mark, end_mark))
+
+ def fetch_flow_sequence_start(self):
+ self.fetch_flow_collection_start(FlowSequenceStartToken)
+
+ def fetch_flow_mapping_start(self):
+ self.fetch_flow_collection_start(FlowMappingStartToken)
+
+ def fetch_flow_collection_start(self, TokenClass):
+
+ # '[' and '{' may start a simple key.
+ self.save_possible_simple_key()
+
+ # Increase the flow level.
+ self.flow_level += 1
+
+ # Simple keys are allowed after '[' and '{'.
+ self.allow_simple_key = True
+
+ # Add FLOW-SEQUENCE-START or FLOW-MAPPING-START.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(TokenClass(start_mark, end_mark))
+
+ def fetch_flow_sequence_end(self):
+ self.fetch_flow_collection_end(FlowSequenceEndToken)
+
+ def fetch_flow_mapping_end(self):
+ self.fetch_flow_collection_end(FlowMappingEndToken)
+
+ def fetch_flow_collection_end(self, TokenClass):
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Decrease the flow level.
+ self.flow_level -= 1
+
+ # No simple keys after ']' or '}'.
+ self.allow_simple_key = False
+
+ # Add FLOW-SEQUENCE-END or FLOW-MAPPING-END.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(TokenClass(start_mark, end_mark))
+
+ def fetch_flow_entry(self):
+
+ # Simple keys are allowed after ','.
+ self.allow_simple_key = True
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add FLOW-ENTRY.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(FlowEntryToken(start_mark, end_mark))
+
+ def fetch_block_entry(self):
+
+ # Block context needs additional checks.
+ if not self.flow_level:
+
+ # Are we allowed to start a new entry?
+ if not self.allow_simple_key:
+ raise ScannerError(None, None,
+ "sequence entries are not allowed here",
+ self.get_mark())
+
+ # We may need to add BLOCK-SEQUENCE-START.
+ if self.add_indent(self.column):
+ mark = self.get_mark()
+ self.tokens.append(BlockSequenceStartToken(mark, mark))
+
+ # It's an error for the block entry to occur in the flow context,
+ # but we let the parser detect this.
+ else:
+ pass
+
+ # Simple keys are allowed after '-'.
+ self.allow_simple_key = True
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add BLOCK-ENTRY.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(BlockEntryToken(start_mark, end_mark))
+
+ def fetch_key(self):
+
+ # Block context needs additional checks.
+ if not self.flow_level:
+
+ # Are we allowed to start a key (not necessary a simple)?
+ if not self.allow_simple_key:
+ raise ScannerError(None, None,
+ "mapping keys are not allowed here",
+ self.get_mark())
+
+ # We may need to add BLOCK-MAPPING-START.
+ if self.add_indent(self.column):
+ mark = self.get_mark()
+ self.tokens.append(BlockMappingStartToken(mark, mark))
+
+ # Simple keys are allowed after '?' in the block context.
+ self.allow_simple_key = not self.flow_level
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add KEY.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(KeyToken(start_mark, end_mark))
+
+ def fetch_value(self):
+
+ # Do we determine a simple key?
+ if self.flow_level in self.possible_simple_keys:
+
+ # Add KEY.
+ key = self.possible_simple_keys[self.flow_level]
+ del self.possible_simple_keys[self.flow_level]
+ self.tokens.insert(key.token_number-self.tokens_taken,
+ KeyToken(key.mark, key.mark))
+
+ # If this key starts a new block mapping, we need to add
+ # BLOCK-MAPPING-START.
+ if not self.flow_level:
+ if self.add_indent(key.column):
+ self.tokens.insert(key.token_number-self.tokens_taken,
+ BlockMappingStartToken(key.mark, key.mark))
+
+ # There cannot be two simple keys one after another.
+ self.allow_simple_key = False
+
+ # It must be a part of a complex key.
+ else:
+
+ # Block context needs additional checks.
+ # (Do we really need them? They will be caught by the parser
+ # anyway.)
+ if not self.flow_level:
+
+ # We are allowed to start a complex value if and only if
+ # we can start a simple key.
+ if not self.allow_simple_key:
+ raise ScannerError(None, None,
+ "mapping values are not allowed here",
+ self.get_mark())
+
+ # If this value starts a new block mapping, we need to add
+ # BLOCK-MAPPING-START. It will be detected as an error later by
+ # the parser.
+ if not self.flow_level:
+ if self.add_indent(self.column):
+ mark = self.get_mark()
+ self.tokens.append(BlockMappingStartToken(mark, mark))
+
+ # Simple keys are allowed after ':' in the block context.
+ self.allow_simple_key = not self.flow_level
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add VALUE.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(ValueToken(start_mark, end_mark))
+
+ def fetch_alias(self):
+
+ # ALIAS could be a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after ALIAS.
+ self.allow_simple_key = False
+
+ # Scan and add ALIAS.
+ self.tokens.append(self.scan_anchor(AliasToken))
+
+ def fetch_anchor(self):
+
+ # ANCHOR could start a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after ANCHOR.
+ self.allow_simple_key = False
+
+ # Scan and add ANCHOR.
+ self.tokens.append(self.scan_anchor(AnchorToken))
+
+ def fetch_tag(self):
+
+ # TAG could start a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after TAG.
+ self.allow_simple_key = False
+
+ # Scan and add TAG.
+ self.tokens.append(self.scan_tag())
+
+ def fetch_literal(self):
+ self.fetch_block_scalar(style='|')
+
+ def fetch_folded(self):
+ self.fetch_block_scalar(style='>')
+
+ def fetch_block_scalar(self, style):
+
+ # A simple key may follow a block scalar.
+ self.allow_simple_key = True
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Scan and add SCALAR.
+ self.tokens.append(self.scan_block_scalar(style))
+
+ def fetch_single(self):
+ self.fetch_flow_scalar(style='\'')
+
+ def fetch_double(self):
+ self.fetch_flow_scalar(style='"')
+
+ def fetch_flow_scalar(self, style):
+
+ # A flow scalar could be a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after flow scalars.
+ self.allow_simple_key = False
+
+ # Scan and add SCALAR.
+ self.tokens.append(self.scan_flow_scalar(style))
+
+ def fetch_plain(self):
+
+ # A plain scalar could be a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after plain scalars. But note that `scan_plain` will
+ # change this flag if the scan is finished at the beginning of the
+ # line.
+ self.allow_simple_key = False
+
+ # Scan and add SCALAR. May change `allow_simple_key`.
+ self.tokens.append(self.scan_plain())
+
+ # Checkers.
+
+ def check_directive(self):
+
+ # DIRECTIVE: ^ '%' ...
+ # The '%' indicator is already checked.
+ if self.column == 0:
+ return True
+
+ def check_document_start(self):
+
+ # DOCUMENT-START: ^ '---' (' '|'\n')
+ if self.column == 0:
+ if self.prefix(3) == u'---' \
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
+ return True
+
+ def check_document_end(self):
+
+ # DOCUMENT-END: ^ '...' (' '|'\n')
+ if self.column == 0:
+ if self.prefix(3) == u'...' \
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
+ return True
+
+ def check_block_entry(self):
+
+ # BLOCK-ENTRY: '-' (' '|'\n')
+ return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029'
+
+ def check_key(self):
+
+ # KEY(flow context): '?'
+ if self.flow_level:
+ return True
+
+ # KEY(block context): '?' (' '|'\n')
+ else:
+ return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029'
+
+ def check_value(self):
+
+ # VALUE(flow context): ':'
+ if self.flow_level:
+ return True
+
+ # VALUE(block context): ':' (' '|'\n')
+ else:
+ return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029'
+
+ def check_plain(self):
+
+ # A plain scalar may start with any non-space character except:
+ # '-', '?', ':', ',', '[', ']', '{', '}',
+ # '#', '&', '*', '!', '|', '>', '\'', '\"',
+ # '%', '@', '`'.
+ #
+ # It may also start with
+ # '-', '?', ':'
+ # if it is followed by a non-space character.
+ #
+ # Note that we limit the last rule to the block context (except the
+ # '-' character) because we want the flow context to be space
+ # independent.
+ ch = self.peek()
+ return ch not in u'\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'\"%@`' \
+ or (self.peek(1) not in u'\0 \t\r\n\x85\u2028\u2029'
+ and (ch == u'-' or (not self.flow_level and ch in u'?:')))
+
+ # Scanners.
+
+ def scan_to_next_token(self):
+ # We ignore spaces, line breaks and comments.
+ # If we find a line break in the block context, we set the flag
+ # `allow_simple_key` on.
+ # The byte order mark is stripped if it's the first character in the
+ # stream. We do not yet support BOM inside the stream as the
+ # specification requires. Any such mark will be considered as a part
+ # of the document.
+ #
+ # TODO: We need to make tab handling rules more sane. A good rule is
+ # Tabs cannot precede tokens
+ # BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END,
+ # KEY(block), VALUE(block), BLOCK-ENTRY
+ # So the checking code is
+ # if <TAB>:
+ # self.allow_simple_keys = False
+ # We also need to add the check for `allow_simple_keys == True` to
+ # `unwind_indent` before issuing BLOCK-END.
+ # Scanners for block, flow, and plain scalars need to be modified.
+
+ if self.index == 0 and self.peek() == u'\uFEFF':
+ self.forward()
+ found = False
+ while not found:
+ while self.peek() == u' ':
+ self.forward()
+ if self.peek() == u'#':
+ while self.peek() not in u'\0\r\n\x85\u2028\u2029':
+ self.forward()
+ if self.scan_line_break():
+ if not self.flow_level:
+ self.allow_simple_key = True
+ else:
+ found = True
+
+ def scan_directive(self):
+ # See the specification for details.
+ start_mark = self.get_mark()
+ self.forward()
+ name = self.scan_directive_name(start_mark)
+ value = None
+ if name == u'YAML':
+ value = self.scan_yaml_directive_value(start_mark)
+ end_mark = self.get_mark()
+ elif name == u'TAG':
+ value = self.scan_tag_directive_value(start_mark)
+ end_mark = self.get_mark()
+ else:
+ end_mark = self.get_mark()
+ while self.peek() not in u'\0\r\n\x85\u2028\u2029':
+ self.forward()
+ self.scan_directive_ignored_line(start_mark)
+ return DirectiveToken(name, value, start_mark, end_mark)
+
+ def scan_directive_name(self, start_mark):
+ # See the specification for details.
+ length = 0
+ ch = self.peek(length)
+ while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
+ or ch in u'-_':
+ length += 1
+ ch = self.peek(length)
+ if not length:
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch.encode('utf-8'), self.get_mark())
+ value = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch not in u'\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch.encode('utf-8'), self.get_mark())
+ return value
+
+ def scan_yaml_directive_value(self, start_mark):
+ # See the specification for details.
+ while self.peek() == u' ':
+ self.forward()
+ major = self.scan_yaml_directive_number(start_mark)
+ if self.peek() != '.':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a digit or '.', but found %r"
+ % self.peek().encode('utf-8'),
+ self.get_mark())
+ self.forward()
+ minor = self.scan_yaml_directive_number(start_mark)
+ if self.peek() not in u'\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a digit or ' ', but found %r"
+ % self.peek().encode('utf-8'),
+ self.get_mark())
+ return (major, minor)
+
+ def scan_yaml_directive_number(self, start_mark):
+ # See the specification for details.
+ ch = self.peek()
+ if not (u'0' <= ch <= u'9'):
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a digit, but found %r" % ch.encode('utf-8'),
+ self.get_mark())
+ length = 0
+ while u'0' <= self.peek(length) <= u'9':
+ length += 1
+ value = int(self.prefix(length))
+ self.forward(length)
+ return value
+
+ def scan_tag_directive_value(self, start_mark):
+ # See the specification for details.
+ while self.peek() == u' ':
+ self.forward()
+ handle = self.scan_tag_directive_handle(start_mark)
+ while self.peek() == u' ':
+ self.forward()
+ prefix = self.scan_tag_directive_prefix(start_mark)
+ return (handle, prefix)
+
+ def scan_tag_directive_handle(self, start_mark):
+ # See the specification for details.
+ value = self.scan_tag_handle('directive', start_mark)
+ ch = self.peek()
+ if ch != u' ':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected ' ', but found %r" % ch.encode('utf-8'),
+ self.get_mark())
+ return value
+
+ def scan_tag_directive_prefix(self, start_mark):
+ # See the specification for details.
+ value = self.scan_tag_uri('directive', start_mark)
+ ch = self.peek()
+ if ch not in u'\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected ' ', but found %r" % ch.encode('utf-8'),
+ self.get_mark())
+ return value
+
+ def scan_directive_ignored_line(self, start_mark):
+ # See the specification for details.
+ while self.peek() == u' ':
+ self.forward()
+ if self.peek() == u'#':
+ while self.peek() not in u'\0\r\n\x85\u2028\u2029':
+ self.forward()
+ ch = self.peek()
+ if ch not in u'\0\r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a comment or a line break, but found %r"
+ % ch.encode('utf-8'), self.get_mark())
+ self.scan_line_break()
+
+ def scan_anchor(self, TokenClass):
+ # The specification does not restrict characters for anchors and
+ # aliases. This may lead to problems, for instance, the document:
+ # [ *alias, value ]
+ # can be interpreted in two ways, as
+ # [ "value" ]
+ # and
+ # [ *alias , "value" ]
+ # Therefore we restrict aliases to numbers and ASCII letters.
+ start_mark = self.get_mark()
+ indicator = self.peek()
+ if indicator == u'*':
+ name = 'alias'
+ else:
+ name = 'anchor'
+ self.forward()
+ length = 0
+ ch = self.peek(length)
+ while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
+ or ch in u'-_':
+ length += 1
+ ch = self.peek(length)
+ if not length:
+ raise ScannerError("while scanning an %s" % name, start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch.encode('utf-8'), self.get_mark())
+ value = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch not in u'\0 \t\r\n\x85\u2028\u2029?:,]}%@`':
+ raise ScannerError("while scanning an %s" % name, start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch.encode('utf-8'), self.get_mark())
+ end_mark = self.get_mark()
+ return TokenClass(value, start_mark, end_mark)
+
+ def scan_tag(self):
+ # See the specification for details.
+ start_mark = self.get_mark()
+ ch = self.peek(1)
+ if ch == u'<':
+ handle = None
+ self.forward(2)
+ suffix = self.scan_tag_uri('tag', start_mark)
+ if self.peek() != u'>':
+ raise ScannerError("while parsing a tag", start_mark,
+ "expected '>', but found %r" % self.peek().encode('utf-8'),
+ self.get_mark())
+ self.forward()
+ elif ch in u'\0 \t\r\n\x85\u2028\u2029':
+ handle = None
+ suffix = u'!'
+ self.forward()
+ else:
+ length = 1
+ use_handle = False
+ while ch not in u'\0 \r\n\x85\u2028\u2029':
+ if ch == u'!':
+ use_handle = True
+ break
+ length += 1
+ ch = self.peek(length)
+ handle = u'!'
+ if use_handle:
+ handle = self.scan_tag_handle('tag', start_mark)
+ else:
+ handle = u'!'
+ self.forward()
+ suffix = self.scan_tag_uri('tag', start_mark)
+ ch = self.peek()
+ if ch not in u'\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a tag", start_mark,
+ "expected ' ', but found %r" % ch.encode('utf-8'),
+ self.get_mark())
+ value = (handle, suffix)
+ end_mark = self.get_mark()
+ return TagToken(value, start_mark, end_mark)
+
+ def scan_block_scalar(self, style):
+ # See the specification for details.
+
+ if style == '>':
+ folded = True
+ else:
+ folded = False
+
+ chunks = []
+ start_mark = self.get_mark()
+
+ # Scan the header.
+ self.forward()
+ chomping, increment = self.scan_block_scalar_indicators(start_mark)
+ self.scan_block_scalar_ignored_line(start_mark)
+
+ # Determine the indentation level and go to the first non-empty line.
+ min_indent = self.indent+1
+ if min_indent < 1:
+ min_indent = 1
+ if increment is None:
+ breaks, max_indent, end_mark = self.scan_block_scalar_indentation()
+ indent = max(min_indent, max_indent)
+ else:
+ indent = min_indent+increment-1
+ breaks, end_mark = self.scan_block_scalar_breaks(indent)
+ line_break = u''
+
+ # Scan the inner part of the block scalar.
+ while self.column == indent and self.peek() != u'\0':
+ chunks.extend(breaks)
+ leading_non_space = self.peek() not in u' \t'
+ length = 0
+ while self.peek(length) not in u'\0\r\n\x85\u2028\u2029':
+ length += 1
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ line_break = self.scan_line_break()
+ breaks, end_mark = self.scan_block_scalar_breaks(indent)
+ if self.column == indent and self.peek() != u'\0':
+
+ # Unfortunately, folding rules are ambiguous.
+ #
+ # This is the folding according to the specification:
+
+ if folded and line_break == u'\n' \
+ and leading_non_space and self.peek() not in u' \t':
+ if not breaks:
+ chunks.append(u' ')
+ else:
+ chunks.append(line_break)
+
+ # This is Clark Evans's interpretation (also in the spec
+ # examples):
+ #
+ #if folded and line_break == u'\n':
+ # if not breaks:
+ # if self.peek() not in ' \t':
+ # chunks.append(u' ')
+ # else:
+ # chunks.append(line_break)
+ #else:
+ # chunks.append(line_break)
+ else:
+ break
+
+ # Chomp the tail.
+ if chomping is not False:
+ chunks.append(line_break)
+ if chomping is True:
+ chunks.extend(breaks)
+
+ # We are done.
+ return ScalarToken(u''.join(chunks), False, start_mark, end_mark,
+ style)
+
+ def scan_block_scalar_indicators(self, start_mark):
+ # See the specification for details.
+ chomping = None
+ increment = None
+ ch = self.peek()
+ if ch in u'+-':
+ if ch == '+':
+ chomping = True
+ else:
+ chomping = False
+ self.forward()
+ ch = self.peek()
+ if ch in u'0123456789':
+ increment = int(ch)
+ if increment == 0:
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected indentation indicator in the range 1-9, but found 0",
+ self.get_mark())
+ self.forward()
+ elif ch in u'0123456789':
+ increment = int(ch)
+ if increment == 0:
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected indentation indicator in the range 1-9, but found 0",
+ self.get_mark())
+ self.forward()
+ ch = self.peek()
+ if ch in u'+-':
+ if ch == '+':
+ chomping = True
+ else:
+ chomping = False
+ self.forward()
+ ch = self.peek()
+ if ch not in u'\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected chomping or indentation indicators, but found %r"
+ % ch.encode('utf-8'), self.get_mark())
+ return chomping, increment
+
+ def scan_block_scalar_ignored_line(self, start_mark):
+ # See the specification for details.
+ while self.peek() == u' ':
+ self.forward()
+ if self.peek() == u'#':
+ while self.peek() not in u'\0\r\n\x85\u2028\u2029':
+ self.forward()
+ ch = self.peek()
+ if ch not in u'\0\r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected a comment or a line break, but found %r"
+ % ch.encode('utf-8'), self.get_mark())
+ self.scan_line_break()
+
+ def scan_block_scalar_indentation(self):
+ # See the specification for details.
+ chunks = []
+ max_indent = 0
+ end_mark = self.get_mark()
+ while self.peek() in u' \r\n\x85\u2028\u2029':
+ if self.peek() != u' ':
+ chunks.append(self.scan_line_break())
+ end_mark = self.get_mark()
+ else:
+ self.forward()
+ if self.column > max_indent:
+ max_indent = self.column
+ return chunks, max_indent, end_mark
+
+ def scan_block_scalar_breaks(self, indent):
+ # See the specification for details.
+ chunks = []
+ end_mark = self.get_mark()
+ while self.column < indent and self.peek() == u' ':
+ self.forward()
+ while self.peek() in u'\r\n\x85\u2028\u2029':
+ chunks.append(self.scan_line_break())
+ end_mark = self.get_mark()
+ while self.column < indent and self.peek() == u' ':
+ self.forward()
+ return chunks, end_mark
+
+ def scan_flow_scalar(self, style):
+ # See the specification for details.
+ # Note that we loose indentation rules for quoted scalars. Quoted
+ # scalars don't need to adhere indentation because " and ' clearly
+ # mark the beginning and the end of them. Therefore we are less
+ # restrictive then the specification requires. We only need to check
+ # that document separators are not included in scalars.
+ if style == '"':
+ double = True
+ else:
+ double = False
+ chunks = []
+ start_mark = self.get_mark()
+ quote = self.peek()
+ self.forward()
+ chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
+ while self.peek() != quote:
+ chunks.extend(self.scan_flow_scalar_spaces(double, start_mark))
+ chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
+ self.forward()
+ end_mark = self.get_mark()
+ return ScalarToken(u''.join(chunks), False, start_mark, end_mark,
+ style)
+
+ ESCAPE_REPLACEMENTS = {
+ u'0': u'\0',
+ u'a': u'\x07',
+ u'b': u'\x08',
+ u't': u'\x09',
+ u'\t': u'\x09',
+ u'n': u'\x0A',
+ u'v': u'\x0B',
+ u'f': u'\x0C',
+ u'r': u'\x0D',
+ u'e': u'\x1B',
+ u' ': u'\x20',
+ u'\"': u'\"',
+ u'\\': u'\\',
+ u'/': u'/',
+ u'N': u'\x85',
+ u'_': u'\xA0',
+ u'L': u'\u2028',
+ u'P': u'\u2029',
+ }
+
+ ESCAPE_CODES = {
+ u'x': 2,
+ u'u': 4,
+ u'U': 8,
+ }
+
+ def scan_flow_scalar_non_spaces(self, double, start_mark):
+ # See the specification for details.
+ chunks = []
+ while True:
+ length = 0
+ while self.peek(length) not in u'\'\"\\\0 \t\r\n\x85\u2028\u2029':
+ length += 1
+ if length:
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ ch = self.peek()
+ if not double and ch == u'\'' and self.peek(1) == u'\'':
+ chunks.append(u'\'')
+ self.forward(2)
+ elif (double and ch == u'\'') or (not double and ch in u'\"\\'):
+ chunks.append(ch)
+ self.forward()
+ elif double and ch == u'\\':
+ self.forward()
+ ch = self.peek()
+ if ch in self.ESCAPE_REPLACEMENTS:
+ chunks.append(self.ESCAPE_REPLACEMENTS[ch])
+ self.forward()
+ elif ch in self.ESCAPE_CODES:
+ length = self.ESCAPE_CODES[ch]
+ self.forward()
+ for k in range(length):
+ if self.peek(k) not in u'0123456789ABCDEFabcdef':
+ raise ScannerError("while scanning a double-quoted scalar", start_mark,
+ "expected escape sequence of %d hexdecimal numbers, but found %r" %
+ (length, self.peek(k).encode('utf-8')), self.get_mark())
+ code = int(self.prefix(length), 16)
+ chunks.append(unichr(code))
+ self.forward(length)
+ elif ch in u'\r\n\x85\u2028\u2029':
+ self.scan_line_break()
+ chunks.extend(self.scan_flow_scalar_breaks(double, start_mark))
+ else:
+ raise ScannerError("while scanning a double-quoted scalar", start_mark,
+ "found unknown escape character %r" % ch.encode('utf-8'), self.get_mark())
+ else:
+ return chunks
+
+ def scan_flow_scalar_spaces(self, double, start_mark):
+ # See the specification for details.
+ chunks = []
+ length = 0
+ while self.peek(length) in u' \t':
+ length += 1
+ whitespaces = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch == u'\0':
+ raise ScannerError("while scanning a quoted scalar", start_mark,
+ "found unexpected end of stream", self.get_mark())
+ elif ch in u'\r\n\x85\u2028\u2029':
+ line_break = self.scan_line_break()
+ breaks = self.scan_flow_scalar_breaks(double, start_mark)
+ if line_break != u'\n':
+ chunks.append(line_break)
+ elif not breaks:
+ chunks.append(u' ')
+ chunks.extend(breaks)
+ else:
+ chunks.append(whitespaces)
+ return chunks
+
+ def scan_flow_scalar_breaks(self, double, start_mark):
+ # See the specification for details.
+ chunks = []
+ while True:
+ # Instead of checking indentation, we check for document
+ # separators.
+ prefix = self.prefix(3)
+ if (prefix == u'---' or prefix == u'...') \
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a quoted scalar", start_mark,
+ "found unexpected document separator", self.get_mark())
+ while self.peek() in u' \t':
+ self.forward()
+ if self.peek() in u'\r\n\x85\u2028\u2029':
+ chunks.append(self.scan_line_break())
+ else:
+ return chunks
+
+ def scan_plain(self):
+ # See the specification for details.
+ # We add an additional restriction for the flow context:
+ # plain scalars in the flow context cannot contain ',' or '?'.
+ # We also keep track of the `allow_simple_key` flag here.
+ # Indentation rules are loosed for the flow context.
+ chunks = []
+ start_mark = self.get_mark()
+ end_mark = start_mark
+ indent = self.indent+1
+ # We allow zero indentation for scalars, but then we need to check for
+ # document separators at the beginning of the line.
+ #if indent == 0:
+ # indent = 1
+ spaces = []
+ while True:
+ length = 0
+ if self.peek() == u'#':
+ break
+ while True:
+ ch = self.peek(length)
+ if ch in u'\0 \t\r\n\x85\u2028\u2029' \
+ or (ch == u':' and
+ self.peek(length+1) in u'\0 \t\r\n\x85\u2028\u2029'
+ + (u',[]{}' if self.flow_level else u''))\
+ or (self.flow_level and ch in u',?[]{}'):
+ break
+ length += 1
+ if length == 0:
+ break
+ self.allow_simple_key = False
+ chunks.extend(spaces)
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ end_mark = self.get_mark()
+ spaces = self.scan_plain_spaces(indent, start_mark)
+ if not spaces or self.peek() == u'#' \
+ or (not self.flow_level and self.column < indent):
+ break
+ return ScalarToken(u''.join(chunks), True, start_mark, end_mark)
+
+ def scan_plain_spaces(self, indent, start_mark):
+ # See the specification for details.
+ # The specification is really confusing about tabs in plain scalars.
+ # We just forbid them completely. Do not use tabs in YAML!
+ chunks = []
+ length = 0
+ while self.peek(length) in u' ':
+ length += 1
+ whitespaces = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch in u'\r\n\x85\u2028\u2029':
+ line_break = self.scan_line_break()
+ self.allow_simple_key = True
+ prefix = self.prefix(3)
+ if (prefix == u'---' or prefix == u'...') \
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
+ return
+ breaks = []
+ while self.peek() in u' \r\n\x85\u2028\u2029':
+ if self.peek() == ' ':
+ self.forward()
+ else:
+ breaks.append(self.scan_line_break())
+ prefix = self.prefix(3)
+ if (prefix == u'---' or prefix == u'...') \
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
+ return
+ if line_break != u'\n':
+ chunks.append(line_break)
+ elif not breaks:
+ chunks.append(u' ')
+ chunks.extend(breaks)
+ elif whitespaces:
+ chunks.append(whitespaces)
+ return chunks
+
+ def scan_tag_handle(self, name, start_mark):
+ # See the specification for details.
+ # For some strange reasons, the specification does not allow '_' in
+ # tag handles. I have allowed it anyway.
+ ch = self.peek()
+ if ch != u'!':
+ raise ScannerError("while scanning a %s" % name, start_mark,
+ "expected '!', but found %r" % ch.encode('utf-8'),
+ self.get_mark())
+ length = 1
+ ch = self.peek(length)
+ if ch != u' ':
+ while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
+ or ch in u'-_':
+ length += 1
+ ch = self.peek(length)
+ if ch != u'!':
+ self.forward(length)
+ raise ScannerError("while scanning a %s" % name, start_mark,
+ "expected '!', but found %r" % ch.encode('utf-8'),
+ self.get_mark())
+ length += 1
+ value = self.prefix(length)
+ self.forward(length)
+ return value
+
+ def scan_tag_uri(self, name, start_mark):
+ # See the specification for details.
+ # Note: we do not check if URI is well-formed.
+ chunks = []
+ length = 0
+ ch = self.peek(length)
+ while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
+ or ch in u'-;/?:@&=+$,_.!~*\'()[]%':
+ if ch == u'%':
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ length = 0
+ chunks.append(self.scan_uri_escapes(name, start_mark))
+ else:
+ length += 1
+ ch = self.peek(length)
+ if length:
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ length = 0
+ if not chunks:
+ raise ScannerError("while parsing a %s" % name, start_mark,
+ "expected URI, but found %r" % ch.encode('utf-8'),
+ self.get_mark())
+ return u''.join(chunks)
+
+ def scan_uri_escapes(self, name, start_mark):
+ # See the specification for details.
+ bytes = []
+ mark = self.get_mark()
+ while self.peek() == u'%':
+ self.forward()
+ for k in range(2):
+ if self.peek(k) not in u'0123456789ABCDEFabcdef':
+ raise ScannerError("while scanning a %s" % name, start_mark,
+ "expected URI escape sequence of 2 hexdecimal numbers, but found %r" %
+ (self.peek(k).encode('utf-8')), self.get_mark())
+ bytes.append(chr(int(self.prefix(2), 16)))
+ self.forward(2)
+ try:
+ value = unicode(''.join(bytes), 'utf-8')
+ except UnicodeDecodeError, exc:
+ raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark)
+ return value
+
+ def scan_line_break(self):
+ # Transforms:
+ # '\r\n' : '\n'
+ # '\r' : '\n'
+ # '\n' : '\n'
+ # '\x85' : '\n'
+ # '\u2028' : '\u2028'
+ # '\u2029 : '\u2029'
+ # default : ''
+ ch = self.peek()
+ if ch in u'\r\n\x85':
+ if self.prefix(2) == u'\r\n':
+ self.forward(2)
+ else:
+ self.forward()
+ return u'\n'
+ elif ch in u'\u2028\u2029':
+ self.forward()
+ return ch
+ return u''
diff --git a/third_party/python/PyYAML/lib/yaml/serializer.py b/third_party/python/PyYAML/lib/yaml/serializer.py
new file mode 100644
index 0000000000..0bf1e96dc1
--- /dev/null
+++ b/third_party/python/PyYAML/lib/yaml/serializer.py
@@ -0,0 +1,111 @@
+
+__all__ = ['Serializer', 'SerializerError']
+
+from error import YAMLError
+from events import *
+from nodes import *
+
+class SerializerError(YAMLError):
+ pass
+
+class Serializer(object):
+
+ ANCHOR_TEMPLATE = u'id%03d'
+
+ def __init__(self, encoding=None,
+ explicit_start=None, explicit_end=None, version=None, tags=None):
+ self.use_encoding = encoding
+ self.use_explicit_start = explicit_start
+ self.use_explicit_end = explicit_end
+ self.use_version = version
+ self.use_tags = tags
+ self.serialized_nodes = {}
+ self.anchors = {}
+ self.last_anchor_id = 0
+ self.closed = None
+
+ def open(self):
+ if self.closed is None:
+ self.emit(StreamStartEvent(encoding=self.use_encoding))
+ self.closed = False
+ elif self.closed:
+ raise SerializerError("serializer is closed")
+ else:
+ raise SerializerError("serializer is already opened")
+
+ def close(self):
+ if self.closed is None:
+ raise SerializerError("serializer is not opened")
+ elif not self.closed:
+ self.emit(StreamEndEvent())
+ self.closed = True
+
+ #def __del__(self):
+ # self.close()
+
+ def serialize(self, node):
+ if self.closed is None:
+ raise SerializerError("serializer is not opened")
+ elif self.closed:
+ raise SerializerError("serializer is closed")
+ self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
+ version=self.use_version, tags=self.use_tags))
+ self.anchor_node(node)
+ self.serialize_node(node, None, None)
+ self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
+ self.serialized_nodes = {}
+ self.anchors = {}
+ self.last_anchor_id = 0
+
+ def anchor_node(self, node):
+ if node in self.anchors:
+ if self.anchors[node] is None:
+ self.anchors[node] = self.generate_anchor(node)
+ else:
+ self.anchors[node] = None
+ if isinstance(node, SequenceNode):
+ for item in node.value:
+ self.anchor_node(item)
+ elif isinstance(node, MappingNode):
+ for key, value in node.value:
+ self.anchor_node(key)
+ self.anchor_node(value)
+
+ def generate_anchor(self, node):
+ self.last_anchor_id += 1
+ return self.ANCHOR_TEMPLATE % self.last_anchor_id
+
+ def serialize_node(self, node, parent, index):
+ alias = self.anchors[node]
+ if node in self.serialized_nodes:
+ self.emit(AliasEvent(alias))
+ else:
+ self.serialized_nodes[node] = True
+ self.descend_resolver(parent, index)
+ if isinstance(node, ScalarNode):
+ detected_tag = self.resolve(ScalarNode, node.value, (True, False))
+ default_tag = self.resolve(ScalarNode, node.value, (False, True))
+ implicit = (node.tag == detected_tag), (node.tag == default_tag)
+ self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
+ style=node.style))
+ elif isinstance(node, SequenceNode):
+ implicit = (node.tag
+ == self.resolve(SequenceNode, node.value, True))
+ self.emit(SequenceStartEvent(alias, node.tag, implicit,
+ flow_style=node.flow_style))
+ index = 0
+ for item in node.value:
+ self.serialize_node(item, node, index)
+ index += 1
+ self.emit(SequenceEndEvent())
+ elif isinstance(node, MappingNode):
+ implicit = (node.tag
+ == self.resolve(MappingNode, node.value, True))
+ self.emit(MappingStartEvent(alias, node.tag, implicit,
+ flow_style=node.flow_style))
+ for key, value in node.value:
+ self.serialize_node(key, node, None)
+ self.serialize_node(value, node, key)
+ self.emit(MappingEndEvent())
+ self.ascend_resolver()
+
diff --git a/third_party/python/PyYAML/lib/yaml/tokens.py b/third_party/python/PyYAML/lib/yaml/tokens.py
new file mode 100644
index 0000000000..4d0b48a394
--- /dev/null
+++ b/third_party/python/PyYAML/lib/yaml/tokens.py
@@ -0,0 +1,104 @@
+
+class Token(object):
+ def __init__(self, start_mark, end_mark):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ def __repr__(self):
+ attributes = [key for key in self.__dict__
+ if not key.endswith('_mark')]
+ attributes.sort()
+ arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
+ for key in attributes])
+ return '%s(%s)' % (self.__class__.__name__, arguments)
+
+#class BOMToken(Token):
+# id = '<byte order mark>'
+
+class DirectiveToken(Token):
+ id = '<directive>'
+ def __init__(self, name, value, start_mark, end_mark):
+ self.name = name
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class DocumentStartToken(Token):
+ id = '<document start>'
+
+class DocumentEndToken(Token):
+ id = '<document end>'
+
+class StreamStartToken(Token):
+ id = '<stream start>'
+ def __init__(self, start_mark=None, end_mark=None,
+ encoding=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.encoding = encoding
+
+class StreamEndToken(Token):
+ id = '<stream end>'
+
+class BlockSequenceStartToken(Token):
+ id = '<block sequence start>'
+
+class BlockMappingStartToken(Token):
+ id = '<block mapping start>'
+
+class BlockEndToken(Token):
+ id = '<block end>'
+
+class FlowSequenceStartToken(Token):
+ id = '['
+
+class FlowMappingStartToken(Token):
+ id = '{'
+
+class FlowSequenceEndToken(Token):
+ id = ']'
+
+class FlowMappingEndToken(Token):
+ id = '}'
+
+class KeyToken(Token):
+ id = '?'
+
+class ValueToken(Token):
+ id = ':'
+
+class BlockEntryToken(Token):
+ id = '-'
+
+class FlowEntryToken(Token):
+ id = ','
+
+class AliasToken(Token):
+ id = '<alias>'
+ def __init__(self, value, start_mark, end_mark):
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class AnchorToken(Token):
+ id = '<anchor>'
+ def __init__(self, value, start_mark, end_mark):
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class TagToken(Token):
+ id = '<tag>'
+ def __init__(self, value, start_mark, end_mark):
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class ScalarToken(Token):
+ id = '<scalar>'
+ def __init__(self, value, plain, start_mark, end_mark, style=None):
+ self.value = value
+ self.plain = plain
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.style = style
+
diff --git a/third_party/python/PyYAML/lib3/yaml/__init__.py b/third_party/python/PyYAML/lib3/yaml/__init__.py
new file mode 100644
index 0000000000..13d687c501
--- /dev/null
+++ b/third_party/python/PyYAML/lib3/yaml/__init__.py
@@ -0,0 +1,427 @@
+
+from .error import *
+
+from .tokens import *
+from .events import *
+from .nodes import *
+
+from .loader import *
+from .dumper import *
+
+__version__ = '5.3.1'
+try:
+ from .cyaml import *
+ __with_libyaml__ = True
+except ImportError:
+ __with_libyaml__ = False
+
+import io
+
+#------------------------------------------------------------------------------
+# Warnings control
+#------------------------------------------------------------------------------
+
+# 'Global' warnings state:
+_warnings_enabled = {
+ 'YAMLLoadWarning': True,
+}
+
+# Get or set global warnings' state
+def warnings(settings=None):
+ if settings is None:
+ return _warnings_enabled
+
+ if type(settings) is dict:
+ for key in settings:
+ if key in _warnings_enabled:
+ _warnings_enabled[key] = settings[key]
+
+# Warn when load() is called without Loader=...
+class YAMLLoadWarning(RuntimeWarning):
+ pass
+
+def load_warning(method):
+ if _warnings_enabled['YAMLLoadWarning'] is False:
+ return
+
+ import warnings
+
+ message = (
+ "calling yaml.%s() without Loader=... is deprecated, as the "
+ "default Loader is unsafe. Please read "
+ "https://msg.pyyaml.org/load for full details."
+ ) % method
+
+ warnings.warn(message, YAMLLoadWarning, stacklevel=3)
+
+#------------------------------------------------------------------------------
+def scan(stream, Loader=Loader):
+ """
+ Scan a YAML stream and produce scanning tokens.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_token():
+ yield loader.get_token()
+ finally:
+ loader.dispose()
+
+def parse(stream, Loader=Loader):
+ """
+ Parse a YAML stream and produce parsing events.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_event():
+ yield loader.get_event()
+ finally:
+ loader.dispose()
+
+def compose(stream, Loader=Loader):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding representation tree.
+ """
+ loader = Loader(stream)
+ try:
+ return loader.get_single_node()
+ finally:
+ loader.dispose()
+
+def compose_all(stream, Loader=Loader):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding representation trees.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_node():
+ yield loader.get_node()
+ finally:
+ loader.dispose()
+
+def load(stream, Loader=None):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding Python object.
+ """
+ if Loader is None:
+ load_warning('load')
+ Loader = FullLoader
+
+ loader = Loader(stream)
+ try:
+ return loader.get_single_data()
+ finally:
+ loader.dispose()
+
+def load_all(stream, Loader=None):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding Python objects.
+ """
+ if Loader is None:
+ load_warning('load_all')
+ Loader = FullLoader
+
+ loader = Loader(stream)
+ try:
+ while loader.check_data():
+ yield loader.get_data()
+ finally:
+ loader.dispose()
+
+def full_load(stream):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding Python object.
+
+ Resolve all tags except those known to be
+ unsafe on untrusted input.
+ """
+ return load(stream, FullLoader)
+
+def full_load_all(stream):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding Python objects.
+
+ Resolve all tags except those known to be
+ unsafe on untrusted input.
+ """
+ return load_all(stream, FullLoader)
+
+def safe_load(stream):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding Python object.
+
+ Resolve only basic YAML tags. This is known
+ to be safe for untrusted input.
+ """
+ return load(stream, SafeLoader)
+
+def safe_load_all(stream):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding Python objects.
+
+ Resolve only basic YAML tags. This is known
+ to be safe for untrusted input.
+ """
+ return load_all(stream, SafeLoader)
+
+def unsafe_load(stream):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding Python object.
+
+ Resolve all tags, even those known to be
+ unsafe on untrusted input.
+ """
+ return load(stream, UnsafeLoader)
+
+def unsafe_load_all(stream):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding Python objects.
+
+ Resolve all tags, even those known to be
+ unsafe on untrusted input.
+ """
+ return load_all(stream, UnsafeLoader)
+
+def emit(events, stream=None, Dumper=Dumper,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None):
+ """
+ Emit YAML parsing events into a stream.
+ If stream is None, return the produced string instead.
+ """
+ getvalue = None
+ if stream is None:
+ stream = io.StringIO()
+ getvalue = stream.getvalue
+ dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ try:
+ for event in events:
+ dumper.emit(event)
+ finally:
+ dumper.dispose()
+ if getvalue:
+ return getvalue()
+
+def serialize_all(nodes, stream=None, Dumper=Dumper,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ """
+ Serialize a sequence of representation trees into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ getvalue = None
+ if stream is None:
+ if encoding is None:
+ stream = io.StringIO()
+ else:
+ stream = io.BytesIO()
+ getvalue = stream.getvalue
+ dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break,
+ encoding=encoding, version=version, tags=tags,
+ explicit_start=explicit_start, explicit_end=explicit_end)
+ try:
+ dumper.open()
+ for node in nodes:
+ dumper.serialize(node)
+ dumper.close()
+ finally:
+ dumper.dispose()
+ if getvalue:
+ return getvalue()
+
+def serialize(node, stream=None, Dumper=Dumper, **kwds):
+ """
+ Serialize a representation tree into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ return serialize_all([node], stream, Dumper=Dumper, **kwds)
+
+def dump_all(documents, stream=None, Dumper=Dumper,
+ default_style=None, default_flow_style=False,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None, sort_keys=True):
+ """
+ Serialize a sequence of Python objects into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ getvalue = None
+ if stream is None:
+ if encoding is None:
+ stream = io.StringIO()
+ else:
+ stream = io.BytesIO()
+ getvalue = stream.getvalue
+ dumper = Dumper(stream, default_style=default_style,
+ default_flow_style=default_flow_style,
+ canonical=canonical, indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break,
+ encoding=encoding, version=version, tags=tags,
+ explicit_start=explicit_start, explicit_end=explicit_end, sort_keys=sort_keys)
+ try:
+ dumper.open()
+ for data in documents:
+ dumper.represent(data)
+ dumper.close()
+ finally:
+ dumper.dispose()
+ if getvalue:
+ return getvalue()
+
+def dump(data, stream=None, Dumper=Dumper, **kwds):
+ """
+ Serialize a Python object into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ return dump_all([data], stream, Dumper=Dumper, **kwds)
+
+def safe_dump_all(documents, stream=None, **kwds):
+ """
+ Serialize a sequence of Python objects into a YAML stream.
+ Produce only basic YAML tags.
+ If stream is None, return the produced string instead.
+ """
+ return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
+
+def safe_dump(data, stream=None, **kwds):
+ """
+ Serialize a Python object into a YAML stream.
+ Produce only basic YAML tags.
+ If stream is None, return the produced string instead.
+ """
+ return dump_all([data], stream, Dumper=SafeDumper, **kwds)
+
+def add_implicit_resolver(tag, regexp, first=None,
+ Loader=None, Dumper=Dumper):
+ """
+ Add an implicit scalar detector.
+ If an implicit scalar value matches the given regexp,
+ the corresponding tag is assigned to the scalar.
+ first is a sequence of possible initial characters or None.
+ """
+ if Loader is None:
+ loader.Loader.add_implicit_resolver(tag, regexp, first)
+ loader.FullLoader.add_implicit_resolver(tag, regexp, first)
+ loader.UnsafeLoader.add_implicit_resolver(tag, regexp, first)
+ else:
+ Loader.add_implicit_resolver(tag, regexp, first)
+ Dumper.add_implicit_resolver(tag, regexp, first)
+
+def add_path_resolver(tag, path, kind=None, Loader=None, Dumper=Dumper):
+ """
+ Add a path based resolver for the given tag.
+ A path is a list of keys that forms a path
+ to a node in the representation tree.
+ Keys can be string values, integers, or None.
+ """
+ if Loader is None:
+ loader.Loader.add_path_resolver(tag, path, kind)
+ loader.FullLoader.add_path_resolver(tag, path, kind)
+ loader.UnsafeLoader.add_path_resolver(tag, path, kind)
+ else:
+ Loader.add_path_resolver(tag, path, kind)
+ Dumper.add_path_resolver(tag, path, kind)
+
+def add_constructor(tag, constructor, Loader=None):
+ """
+ Add a constructor for the given tag.
+ Constructor is a function that accepts a Loader instance
+ and a node object and produces the corresponding Python object.
+ """
+ if Loader is None:
+ loader.Loader.add_constructor(tag, constructor)
+ loader.FullLoader.add_constructor(tag, constructor)
+ loader.UnsafeLoader.add_constructor(tag, constructor)
+ else:
+ Loader.add_constructor(tag, constructor)
+
+def add_multi_constructor(tag_prefix, multi_constructor, Loader=None):
+ """
+ Add a multi-constructor for the given tag prefix.
+ Multi-constructor is called for a node if its tag starts with tag_prefix.
+ Multi-constructor accepts a Loader instance, a tag suffix,
+ and a node object and produces the corresponding Python object.
+ """
+ if Loader is None:
+ loader.Loader.add_multi_constructor(tag_prefix, multi_constructor)
+ loader.FullLoader.add_multi_constructor(tag_prefix, multi_constructor)
+ loader.UnsafeLoader.add_multi_constructor(tag_prefix, multi_constructor)
+ else:
+ Loader.add_multi_constructor(tag_prefix, multi_constructor)
+
+def add_representer(data_type, representer, Dumper=Dumper):
+ """
+ Add a representer for the given type.
+ Representer is a function accepting a Dumper instance
+ and an instance of the given data type
+ and producing the corresponding representation node.
+ """
+ Dumper.add_representer(data_type, representer)
+
+def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
+ """
+ Add a representer for the given type.
+ Multi-representer is a function accepting a Dumper instance
+ and an instance of the given data type or subtype
+ and producing the corresponding representation node.
+ """
+ Dumper.add_multi_representer(data_type, multi_representer)
+
+class YAMLObjectMetaclass(type):
+ """
+ The metaclass for YAMLObject.
+ """
+ def __init__(cls, name, bases, kwds):
+ super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
+ if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
+ if isinstance(cls.yaml_loader, list):
+ for loader in cls.yaml_loader:
+ loader.add_constructor(cls.yaml_tag, cls.from_yaml)
+ else:
+ cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
+
+ cls.yaml_dumper.add_representer(cls, cls.to_yaml)
+
+class YAMLObject(metaclass=YAMLObjectMetaclass):
+ """
+ An object that can dump itself to a YAML stream
+ and load itself from a YAML stream.
+ """
+
+ __slots__ = () # no direct instantiation, so allow immutable subclasses
+
+ yaml_loader = [Loader, FullLoader, UnsafeLoader]
+ yaml_dumper = Dumper
+
+ yaml_tag = None
+ yaml_flow_style = None
+
+ @classmethod
+ def from_yaml(cls, loader, node):
+ """
+ Convert a representation node to a Python object.
+ """
+ return loader.construct_yaml_object(node, cls)
+
+ @classmethod
+ def to_yaml(cls, dumper, data):
+ """
+ Convert a Python object to a representation node.
+ """
+ return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
+ flow_style=cls.yaml_flow_style)
+
diff --git a/third_party/python/PyYAML/lib3/yaml/composer.py b/third_party/python/PyYAML/lib3/yaml/composer.py
new file mode 100644
index 0000000000..6d15cb40e3
--- /dev/null
+++ b/third_party/python/PyYAML/lib3/yaml/composer.py
@@ -0,0 +1,139 @@
+
+__all__ = ['Composer', 'ComposerError']
+
+from .error import MarkedYAMLError
+from .events import *
+from .nodes import *
+
+class ComposerError(MarkedYAMLError):
+ pass
+
+class Composer:
+
+ def __init__(self):
+ self.anchors = {}
+
+ def check_node(self):
+ # Drop the STREAM-START event.
+ if self.check_event(StreamStartEvent):
+ self.get_event()
+
+ # If there are more documents available?
+ return not self.check_event(StreamEndEvent)
+
+ def get_node(self):
+ # Get the root node of the next document.
+ if not self.check_event(StreamEndEvent):
+ return self.compose_document()
+
+ def get_single_node(self):
+ # Drop the STREAM-START event.
+ self.get_event()
+
+ # Compose a document if the stream is not empty.
+ document = None
+ if not self.check_event(StreamEndEvent):
+ document = self.compose_document()
+
+ # Ensure that the stream contains no more documents.
+ if not self.check_event(StreamEndEvent):
+ event = self.get_event()
+ raise ComposerError("expected a single document in the stream",
+ document.start_mark, "but found another document",
+ event.start_mark)
+
+ # Drop the STREAM-END event.
+ self.get_event()
+
+ return document
+
+ def compose_document(self):
+ # Drop the DOCUMENT-START event.
+ self.get_event()
+
+ # Compose the root node.
+ node = self.compose_node(None, None)
+
+ # Drop the DOCUMENT-END event.
+ self.get_event()
+
+ self.anchors = {}
+ return node
+
+ def compose_node(self, parent, index):
+ if self.check_event(AliasEvent):
+ event = self.get_event()
+ anchor = event.anchor
+ if anchor not in self.anchors:
+ raise ComposerError(None, None, "found undefined alias %r"
+ % anchor, event.start_mark)
+ return self.anchors[anchor]
+ event = self.peek_event()
+ anchor = event.anchor
+ if anchor is not None:
+ if anchor in self.anchors:
+ raise ComposerError("found duplicate anchor %r; first occurrence"
+ % anchor, self.anchors[anchor].start_mark,
+ "second occurrence", event.start_mark)
+ self.descend_resolver(parent, index)
+ if self.check_event(ScalarEvent):
+ node = self.compose_scalar_node(anchor)
+ elif self.check_event(SequenceStartEvent):
+ node = self.compose_sequence_node(anchor)
+ elif self.check_event(MappingStartEvent):
+ node = self.compose_mapping_node(anchor)
+ self.ascend_resolver()
+ return node
+
+ def compose_scalar_node(self, anchor):
+ event = self.get_event()
+ tag = event.tag
+ if tag is None or tag == '!':
+ tag = self.resolve(ScalarNode, event.value, event.implicit)
+ node = ScalarNode(tag, event.value,
+ event.start_mark, event.end_mark, style=event.style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ return node
+
+ def compose_sequence_node(self, anchor):
+ start_event = self.get_event()
+ tag = start_event.tag
+ if tag is None or tag == '!':
+ tag = self.resolve(SequenceNode, None, start_event.implicit)
+ node = SequenceNode(tag, [],
+ start_event.start_mark, None,
+ flow_style=start_event.flow_style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ index = 0
+ while not self.check_event(SequenceEndEvent):
+ node.value.append(self.compose_node(node, index))
+ index += 1
+ end_event = self.get_event()
+ node.end_mark = end_event.end_mark
+ return node
+
+ def compose_mapping_node(self, anchor):
+ start_event = self.get_event()
+ tag = start_event.tag
+ if tag is None or tag == '!':
+ tag = self.resolve(MappingNode, None, start_event.implicit)
+ node = MappingNode(tag, [],
+ start_event.start_mark, None,
+ flow_style=start_event.flow_style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ while not self.check_event(MappingEndEvent):
+ #key_event = self.peek_event()
+ item_key = self.compose_node(node, None)
+ #if item_key in node.value:
+ # raise ComposerError("while composing a mapping", start_event.start_mark,
+ # "found duplicate key", key_event.start_mark)
+ item_value = self.compose_node(node, item_key)
+ #node.value[item_key] = item_value
+ node.value.append((item_key, item_value))
+ end_event = self.get_event()
+ node.end_mark = end_event.end_mark
+ return node
+
diff --git a/third_party/python/PyYAML/lib3/yaml/constructor.py b/third_party/python/PyYAML/lib3/yaml/constructor.py
new file mode 100644
index 0000000000..1948b125c2
--- /dev/null
+++ b/third_party/python/PyYAML/lib3/yaml/constructor.py
@@ -0,0 +1,748 @@
+
+__all__ = [
+ 'BaseConstructor',
+ 'SafeConstructor',
+ 'FullConstructor',
+ 'UnsafeConstructor',
+ 'Constructor',
+ 'ConstructorError'
+]
+
+from .error import *
+from .nodes import *
+
+import collections.abc, datetime, base64, binascii, re, sys, types
+
+class ConstructorError(MarkedYAMLError):
+ pass
+
+class BaseConstructor:
+
+ yaml_constructors = {}
+ yaml_multi_constructors = {}
+
+ def __init__(self):
+ self.constructed_objects = {}
+ self.recursive_objects = {}
+ self.state_generators = []
+ self.deep_construct = False
+
+ def check_data(self):
+ # If there are more documents available?
+ return self.check_node()
+
+ def check_state_key(self, key):
+ """Block special attributes/methods from being set in a newly created
+ object, to prevent user-controlled methods from being called during
+ deserialization"""
+ if self.get_state_keys_blacklist_regexp().match(key):
+ raise ConstructorError(None, None,
+ "blacklisted key '%s' in instance state found" % (key,), None)
+
+ def get_data(self):
+ # Construct and return the next document.
+ if self.check_node():
+ return self.construct_document(self.get_node())
+
+ def get_single_data(self):
+ # Ensure that the stream contains a single document and construct it.
+ node = self.get_single_node()
+ if node is not None:
+ return self.construct_document(node)
+ return None
+
+ def construct_document(self, node):
+ data = self.construct_object(node)
+ while self.state_generators:
+ state_generators = self.state_generators
+ self.state_generators = []
+ for generator in state_generators:
+ for dummy in generator:
+ pass
+ self.constructed_objects = {}
+ self.recursive_objects = {}
+ self.deep_construct = False
+ return data
+
+ def construct_object(self, node, deep=False):
+ if node in self.constructed_objects:
+ return self.constructed_objects[node]
+ if deep:
+ old_deep = self.deep_construct
+ self.deep_construct = True
+ if node in self.recursive_objects:
+ raise ConstructorError(None, None,
+ "found unconstructable recursive node", node.start_mark)
+ self.recursive_objects[node] = None
+ constructor = None
+ tag_suffix = None
+ if node.tag in self.yaml_constructors:
+ constructor = self.yaml_constructors[node.tag]
+ else:
+ for tag_prefix in self.yaml_multi_constructors:
+ if tag_prefix is not None and node.tag.startswith(tag_prefix):
+ tag_suffix = node.tag[len(tag_prefix):]
+ constructor = self.yaml_multi_constructors[tag_prefix]
+ break
+ else:
+ if None in self.yaml_multi_constructors:
+ tag_suffix = node.tag
+ constructor = self.yaml_multi_constructors[None]
+ elif None in self.yaml_constructors:
+ constructor = self.yaml_constructors[None]
+ elif isinstance(node, ScalarNode):
+ constructor = self.__class__.construct_scalar
+ elif isinstance(node, SequenceNode):
+ constructor = self.__class__.construct_sequence
+ elif isinstance(node, MappingNode):
+ constructor = self.__class__.construct_mapping
+ if tag_suffix is None:
+ data = constructor(self, node)
+ else:
+ data = constructor(self, tag_suffix, node)
+ if isinstance(data, types.GeneratorType):
+ generator = data
+ data = next(generator)
+ if self.deep_construct:
+ for dummy in generator:
+ pass
+ else:
+ self.state_generators.append(generator)
+ self.constructed_objects[node] = data
+ del self.recursive_objects[node]
+ if deep:
+ self.deep_construct = old_deep
+ return data
+
+ def construct_scalar(self, node):
+ if not isinstance(node, ScalarNode):
+ raise ConstructorError(None, None,
+ "expected a scalar node, but found %s" % node.id,
+ node.start_mark)
+ return node.value
+
+ def construct_sequence(self, node, deep=False):
+ if not isinstance(node, SequenceNode):
+ raise ConstructorError(None, None,
+ "expected a sequence node, but found %s" % node.id,
+ node.start_mark)
+ return [self.construct_object(child, deep=deep)
+ for child in node.value]
+
+ def construct_mapping(self, node, deep=False):
+ if not isinstance(node, MappingNode):
+ raise ConstructorError(None, None,
+ "expected a mapping node, but found %s" % node.id,
+ node.start_mark)
+ mapping = {}
+ for key_node, value_node in node.value:
+ key = self.construct_object(key_node, deep=deep)
+ if not isinstance(key, collections.abc.Hashable):
+ raise ConstructorError("while constructing a mapping", node.start_mark,
+ "found unhashable key", key_node.start_mark)
+ value = self.construct_object(value_node, deep=deep)
+ mapping[key] = value
+ return mapping
+
+ def construct_pairs(self, node, deep=False):
+ if not isinstance(node, MappingNode):
+ raise ConstructorError(None, None,
+ "expected a mapping node, but found %s" % node.id,
+ node.start_mark)
+ pairs = []
+ for key_node, value_node in node.value:
+ key = self.construct_object(key_node, deep=deep)
+ value = self.construct_object(value_node, deep=deep)
+ pairs.append((key, value))
+ return pairs
+
+ @classmethod
+ def add_constructor(cls, tag, constructor):
+ if not 'yaml_constructors' in cls.__dict__:
+ cls.yaml_constructors = cls.yaml_constructors.copy()
+ cls.yaml_constructors[tag] = constructor
+
+ @classmethod
+ def add_multi_constructor(cls, tag_prefix, multi_constructor):
+ if not 'yaml_multi_constructors' in cls.__dict__:
+ cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
+ cls.yaml_multi_constructors[tag_prefix] = multi_constructor
+
+class SafeConstructor(BaseConstructor):
+
+ def construct_scalar(self, node):
+ if isinstance(node, MappingNode):
+ for key_node, value_node in node.value:
+ if key_node.tag == 'tag:yaml.org,2002:value':
+ return self.construct_scalar(value_node)
+ return super().construct_scalar(node)
+
+ def flatten_mapping(self, node):
+ merge = []
+ index = 0
+ while index < len(node.value):
+ key_node, value_node = node.value[index]
+ if key_node.tag == 'tag:yaml.org,2002:merge':
+ del node.value[index]
+ if isinstance(value_node, MappingNode):
+ self.flatten_mapping(value_node)
+ merge.extend(value_node.value)
+ elif isinstance(value_node, SequenceNode):
+ submerge = []
+ for subnode in value_node.value:
+ if not isinstance(subnode, MappingNode):
+ raise ConstructorError("while constructing a mapping",
+ node.start_mark,
+ "expected a mapping for merging, but found %s"
+ % subnode.id, subnode.start_mark)
+ self.flatten_mapping(subnode)
+ submerge.append(subnode.value)
+ submerge.reverse()
+ for value in submerge:
+ merge.extend(value)
+ else:
+ raise ConstructorError("while constructing a mapping", node.start_mark,
+ "expected a mapping or list of mappings for merging, but found %s"
+ % value_node.id, value_node.start_mark)
+ elif key_node.tag == 'tag:yaml.org,2002:value':
+ key_node.tag = 'tag:yaml.org,2002:str'
+ index += 1
+ else:
+ index += 1
+ if merge:
+ node.value = merge + node.value
+
+ def construct_mapping(self, node, deep=False):
+ if isinstance(node, MappingNode):
+ self.flatten_mapping(node)
+ return super().construct_mapping(node, deep=deep)
+
+ def construct_yaml_null(self, node):
+ self.construct_scalar(node)
+ return None
+
+ bool_values = {
+ 'yes': True,
+ 'no': False,
+ 'true': True,
+ 'false': False,
+ 'on': True,
+ 'off': False,
+ }
+
+ def construct_yaml_bool(self, node):
+ value = self.construct_scalar(node)
+ return self.bool_values[value.lower()]
+
+ def construct_yaml_int(self, node):
+ value = self.construct_scalar(node)
+ value = value.replace('_', '')
+ sign = +1
+ if value[0] == '-':
+ sign = -1
+ if value[0] in '+-':
+ value = value[1:]
+ if value == '0':
+ return 0
+ elif value.startswith('0b'):
+ return sign*int(value[2:], 2)
+ elif value.startswith('0x'):
+ return sign*int(value[2:], 16)
+ elif value[0] == '0':
+ return sign*int(value, 8)
+ elif ':' in value:
+ digits = [int(part) for part in value.split(':')]
+ digits.reverse()
+ base = 1
+ value = 0
+ for digit in digits:
+ value += digit*base
+ base *= 60
+ return sign*value
+ else:
+ return sign*int(value)
+
+ inf_value = 1e300
+ while inf_value != inf_value*inf_value:
+ inf_value *= inf_value
+ nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99).
+
+ def construct_yaml_float(self, node):
+ value = self.construct_scalar(node)
+ value = value.replace('_', '').lower()
+ sign = +1
+ if value[0] == '-':
+ sign = -1
+ if value[0] in '+-':
+ value = value[1:]
+ if value == '.inf':
+ return sign*self.inf_value
+ elif value == '.nan':
+ return self.nan_value
+ elif ':' in value:
+ digits = [float(part) for part in value.split(':')]
+ digits.reverse()
+ base = 1
+ value = 0.0
+ for digit in digits:
+ value += digit*base
+ base *= 60
+ return sign*value
+ else:
+ return sign*float(value)
+
+ def construct_yaml_binary(self, node):
+ try:
+ value = self.construct_scalar(node).encode('ascii')
+ except UnicodeEncodeError as exc:
+ raise ConstructorError(None, None,
+ "failed to convert base64 data into ascii: %s" % exc,
+ node.start_mark)
+ try:
+ if hasattr(base64, 'decodebytes'):
+ return base64.decodebytes(value)
+ else:
+ return base64.decodestring(value)
+ except binascii.Error as exc:
+ raise ConstructorError(None, None,
+ "failed to decode base64 data: %s" % exc, node.start_mark)
+
+ timestamp_regexp = re.compile(
+ r'''^(?P<year>[0-9][0-9][0-9][0-9])
+ -(?P<month>[0-9][0-9]?)
+ -(?P<day>[0-9][0-9]?)
+ (?:(?:[Tt]|[ \t]+)
+ (?P<hour>[0-9][0-9]?)
+ :(?P<minute>[0-9][0-9])
+ :(?P<second>[0-9][0-9])
+ (?:\.(?P<fraction>[0-9]*))?
+ (?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
+ (?::(?P<tz_minute>[0-9][0-9]))?))?)?$''', re.X)
+
+ def construct_yaml_timestamp(self, node):
+ value = self.construct_scalar(node)
+ match = self.timestamp_regexp.match(node.value)
+ values = match.groupdict()
+ year = int(values['year'])
+ month = int(values['month'])
+ day = int(values['day'])
+ if not values['hour']:
+ return datetime.date(year, month, day)
+ hour = int(values['hour'])
+ minute = int(values['minute'])
+ second = int(values['second'])
+ fraction = 0
+ tzinfo = None
+ if values['fraction']:
+ fraction = values['fraction'][:6]
+ while len(fraction) < 6:
+ fraction += '0'
+ fraction = int(fraction)
+ if values['tz_sign']:
+ tz_hour = int(values['tz_hour'])
+ tz_minute = int(values['tz_minute'] or 0)
+ delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
+ if values['tz_sign'] == '-':
+ delta = -delta
+ tzinfo = datetime.timezone(delta)
+ elif values['tz']:
+ tzinfo = datetime.timezone.utc
+ return datetime.datetime(year, month, day, hour, minute, second, fraction,
+ tzinfo=tzinfo)
+
+ def construct_yaml_omap(self, node):
+ # Note: we do not check for duplicate keys, because it's too
+ # CPU-expensive.
+ omap = []
+ yield omap
+ if not isinstance(node, SequenceNode):
+ raise ConstructorError("while constructing an ordered map", node.start_mark,
+ "expected a sequence, but found %s" % node.id, node.start_mark)
+ for subnode in node.value:
+ if not isinstance(subnode, MappingNode):
+ raise ConstructorError("while constructing an ordered map", node.start_mark,
+ "expected a mapping of length 1, but found %s" % subnode.id,
+ subnode.start_mark)
+ if len(subnode.value) != 1:
+ raise ConstructorError("while constructing an ordered map", node.start_mark,
+ "expected a single mapping item, but found %d items" % len(subnode.value),
+ subnode.start_mark)
+ key_node, value_node = subnode.value[0]
+ key = self.construct_object(key_node)
+ value = self.construct_object(value_node)
+ omap.append((key, value))
+
+ def construct_yaml_pairs(self, node):
+ # Note: the same code as `construct_yaml_omap`.
+ pairs = []
+ yield pairs
+ if not isinstance(node, SequenceNode):
+ raise ConstructorError("while constructing pairs", node.start_mark,
+ "expected a sequence, but found %s" % node.id, node.start_mark)
+ for subnode in node.value:
+ if not isinstance(subnode, MappingNode):
+ raise ConstructorError("while constructing pairs", node.start_mark,
+ "expected a mapping of length 1, but found %s" % subnode.id,
+ subnode.start_mark)
+ if len(subnode.value) != 1:
+ raise ConstructorError("while constructing pairs", node.start_mark,
+ "expected a single mapping item, but found %d items" % len(subnode.value),
+ subnode.start_mark)
+ key_node, value_node = subnode.value[0]
+ key = self.construct_object(key_node)
+ value = self.construct_object(value_node)
+ pairs.append((key, value))
+
+ def construct_yaml_set(self, node):
+ data = set()
+ yield data
+ value = self.construct_mapping(node)
+ data.update(value)
+
+ def construct_yaml_str(self, node):
+ return self.construct_scalar(node)
+
+ def construct_yaml_seq(self, node):
+ data = []
+ yield data
+ data.extend(self.construct_sequence(node))
+
+ def construct_yaml_map(self, node):
+ data = {}
+ yield data
+ value = self.construct_mapping(node)
+ data.update(value)
+
+ def construct_yaml_object(self, node, cls):
+ data = cls.__new__(cls)
+ yield data
+ if hasattr(data, '__setstate__'):
+ state = self.construct_mapping(node, deep=True)
+ data.__setstate__(state)
+ else:
+ state = self.construct_mapping(node)
+ data.__dict__.update(state)
+
+ def construct_undefined(self, node):
+ raise ConstructorError(None, None,
+ "could not determine a constructor for the tag %r" % node.tag,
+ node.start_mark)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:null',
+ SafeConstructor.construct_yaml_null)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:bool',
+ SafeConstructor.construct_yaml_bool)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:int',
+ SafeConstructor.construct_yaml_int)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:float',
+ SafeConstructor.construct_yaml_float)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:binary',
+ SafeConstructor.construct_yaml_binary)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:timestamp',
+ SafeConstructor.construct_yaml_timestamp)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:omap',
+ SafeConstructor.construct_yaml_omap)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:pairs',
+ SafeConstructor.construct_yaml_pairs)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:set',
+ SafeConstructor.construct_yaml_set)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:str',
+ SafeConstructor.construct_yaml_str)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:seq',
+ SafeConstructor.construct_yaml_seq)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:map',
+ SafeConstructor.construct_yaml_map)
+
+SafeConstructor.add_constructor(None,
+ SafeConstructor.construct_undefined)
+
+class FullConstructor(SafeConstructor):
+ # 'extend' is blacklisted because it is used by
+ # construct_python_object_apply to add `listitems` to a newly generate
+ # python instance
+ def get_state_keys_blacklist(self):
+ return ['^extend$', '^__.*__$']
+
+ def get_state_keys_blacklist_regexp(self):
+ if not hasattr(self, 'state_keys_blacklist_regexp'):
+ self.state_keys_blacklist_regexp = re.compile('(' + '|'.join(self.get_state_keys_blacklist()) + ')')
+ return self.state_keys_blacklist_regexp
+
+ def construct_python_str(self, node):
+ return self.construct_scalar(node)
+
+ def construct_python_unicode(self, node):
+ return self.construct_scalar(node)
+
+ def construct_python_bytes(self, node):
+ try:
+ value = self.construct_scalar(node).encode('ascii')
+ except UnicodeEncodeError as exc:
+ raise ConstructorError(None, None,
+ "failed to convert base64 data into ascii: %s" % exc,
+ node.start_mark)
+ try:
+ if hasattr(base64, 'decodebytes'):
+ return base64.decodebytes(value)
+ else:
+ return base64.decodestring(value)
+ except binascii.Error as exc:
+ raise ConstructorError(None, None,
+ "failed to decode base64 data: %s" % exc, node.start_mark)
+
+ def construct_python_long(self, node):
+ return self.construct_yaml_int(node)
+
+ def construct_python_complex(self, node):
+ return complex(self.construct_scalar(node))
+
+ def construct_python_tuple(self, node):
+ return tuple(self.construct_sequence(node))
+
+ def find_python_module(self, name, mark, unsafe=False):
+ if not name:
+ raise ConstructorError("while constructing a Python module", mark,
+ "expected non-empty name appended to the tag", mark)
+ if unsafe:
+ try:
+ __import__(name)
+ except ImportError as exc:
+ raise ConstructorError("while constructing a Python module", mark,
+ "cannot find module %r (%s)" % (name, exc), mark)
+ if name not in sys.modules:
+ raise ConstructorError("while constructing a Python module", mark,
+ "module %r is not imported" % name, mark)
+ return sys.modules[name]
+
+ def find_python_name(self, name, mark, unsafe=False):
+ if not name:
+ raise ConstructorError("while constructing a Python object", mark,
+ "expected non-empty name appended to the tag", mark)
+ if '.' in name:
+ module_name, object_name = name.rsplit('.', 1)
+ else:
+ module_name = 'builtins'
+ object_name = name
+ if unsafe:
+ try:
+ __import__(module_name)
+ except ImportError as exc:
+ raise ConstructorError("while constructing a Python object", mark,
+ "cannot find module %r (%s)" % (module_name, exc), mark)
+ if module_name not in sys.modules:
+ raise ConstructorError("while constructing a Python object", mark,
+ "module %r is not imported" % module_name, mark)
+ module = sys.modules[module_name]
+ if not hasattr(module, object_name):
+ raise ConstructorError("while constructing a Python object", mark,
+ "cannot find %r in the module %r"
+ % (object_name, module.__name__), mark)
+ return getattr(module, object_name)
+
+ def construct_python_name(self, suffix, node):
+ value = self.construct_scalar(node)
+ if value:
+ raise ConstructorError("while constructing a Python name", node.start_mark,
+ "expected the empty value, but found %r" % value, node.start_mark)
+ return self.find_python_name(suffix, node.start_mark)
+
+ def construct_python_module(self, suffix, node):
+ value = self.construct_scalar(node)
+ if value:
+ raise ConstructorError("while constructing a Python module", node.start_mark,
+ "expected the empty value, but found %r" % value, node.start_mark)
+ return self.find_python_module(suffix, node.start_mark)
+
+ def make_python_instance(self, suffix, node,
+ args=None, kwds=None, newobj=False, unsafe=False):
+ if not args:
+ args = []
+ if not kwds:
+ kwds = {}
+ cls = self.find_python_name(suffix, node.start_mark)
+ if not (unsafe or isinstance(cls, type)):
+ raise ConstructorError("while constructing a Python instance", node.start_mark,
+ "expected a class, but found %r" % type(cls),
+ node.start_mark)
+ if newobj and isinstance(cls, type):
+ return cls.__new__(cls, *args, **kwds)
+ else:
+ return cls(*args, **kwds)
+
+ def set_python_instance_state(self, instance, state, unsafe=False):
+ if hasattr(instance, '__setstate__'):
+ instance.__setstate__(state)
+ else:
+ slotstate = {}
+ if isinstance(state, tuple) and len(state) == 2:
+ state, slotstate = state
+ if hasattr(instance, '__dict__'):
+ if not unsafe and state:
+ for key in state.keys():
+ self.check_state_key(key)
+ instance.__dict__.update(state)
+ elif state:
+ slotstate.update(state)
+ for key, value in slotstate.items():
+ if not unsafe:
+ self.check_state_key(key)
+ setattr(instance, key, value)
+
+ def construct_python_object(self, suffix, node):
+ # Format:
+ # !!python/object:module.name { ... state ... }
+ instance = self.make_python_instance(suffix, node, newobj=True)
+ yield instance
+ deep = hasattr(instance, '__setstate__')
+ state = self.construct_mapping(node, deep=deep)
+ self.set_python_instance_state(instance, state)
+
+ def construct_python_object_apply(self, suffix, node, newobj=False):
+ # Format:
+ # !!python/object/apply # (or !!python/object/new)
+ # args: [ ... arguments ... ]
+ # kwds: { ... keywords ... }
+ # state: ... state ...
+ # listitems: [ ... listitems ... ]
+ # dictitems: { ... dictitems ... }
+ # or short format:
+ # !!python/object/apply [ ... arguments ... ]
+ # The difference between !!python/object/apply and !!python/object/new
+ # is how an object is created, check make_python_instance for details.
+ if isinstance(node, SequenceNode):
+ args = self.construct_sequence(node, deep=True)
+ kwds = {}
+ state = {}
+ listitems = []
+ dictitems = {}
+ else:
+ value = self.construct_mapping(node, deep=True)
+ args = value.get('args', [])
+ kwds = value.get('kwds', {})
+ state = value.get('state', {})
+ listitems = value.get('listitems', [])
+ dictitems = value.get('dictitems', {})
+ instance = self.make_python_instance(suffix, node, args, kwds, newobj)
+ if state:
+ self.set_python_instance_state(instance, state)
+ if listitems:
+ instance.extend(listitems)
+ if dictitems:
+ for key in dictitems:
+ instance[key] = dictitems[key]
+ return instance
+
+ def construct_python_object_new(self, suffix, node):
+ return self.construct_python_object_apply(suffix, node, newobj=True)
+
+FullConstructor.add_constructor(
+ 'tag:yaml.org,2002:python/none',
+ FullConstructor.construct_yaml_null)
+
+FullConstructor.add_constructor(
+ 'tag:yaml.org,2002:python/bool',
+ FullConstructor.construct_yaml_bool)
+
+FullConstructor.add_constructor(
+ 'tag:yaml.org,2002:python/str',
+ FullConstructor.construct_python_str)
+
+FullConstructor.add_constructor(
+ 'tag:yaml.org,2002:python/unicode',
+ FullConstructor.construct_python_unicode)
+
+FullConstructor.add_constructor(
+ 'tag:yaml.org,2002:python/bytes',
+ FullConstructor.construct_python_bytes)
+
+FullConstructor.add_constructor(
+ 'tag:yaml.org,2002:python/int',
+ FullConstructor.construct_yaml_int)
+
+FullConstructor.add_constructor(
+ 'tag:yaml.org,2002:python/long',
+ FullConstructor.construct_python_long)
+
+FullConstructor.add_constructor(
+ 'tag:yaml.org,2002:python/float',
+ FullConstructor.construct_yaml_float)
+
+FullConstructor.add_constructor(
+ 'tag:yaml.org,2002:python/complex',
+ FullConstructor.construct_python_complex)
+
+FullConstructor.add_constructor(
+ 'tag:yaml.org,2002:python/list',
+ FullConstructor.construct_yaml_seq)
+
+FullConstructor.add_constructor(
+ 'tag:yaml.org,2002:python/tuple',
+ FullConstructor.construct_python_tuple)
+
+FullConstructor.add_constructor(
+ 'tag:yaml.org,2002:python/dict',
+ FullConstructor.construct_yaml_map)
+
+FullConstructor.add_multi_constructor(
+ 'tag:yaml.org,2002:python/name:',
+ FullConstructor.construct_python_name)
+
+FullConstructor.add_multi_constructor(
+ 'tag:yaml.org,2002:python/module:',
+ FullConstructor.construct_python_module)
+
+FullConstructor.add_multi_constructor(
+ 'tag:yaml.org,2002:python/object:',
+ FullConstructor.construct_python_object)
+
+FullConstructor.add_multi_constructor(
+ 'tag:yaml.org,2002:python/object/new:',
+ FullConstructor.construct_python_object_new)
+
+class UnsafeConstructor(FullConstructor):
+
+ def find_python_module(self, name, mark):
+ return super(UnsafeConstructor, self).find_python_module(name, mark, unsafe=True)
+
+ def find_python_name(self, name, mark):
+ return super(UnsafeConstructor, self).find_python_name(name, mark, unsafe=True)
+
+ def make_python_instance(self, suffix, node, args=None, kwds=None, newobj=False):
+ return super(UnsafeConstructor, self).make_python_instance(
+ suffix, node, args, kwds, newobj, unsafe=True)
+
+ def set_python_instance_state(self, instance, state):
+ return super(UnsafeConstructor, self).set_python_instance_state(
+ instance, state, unsafe=True)
+
+UnsafeConstructor.add_multi_constructor(
+ 'tag:yaml.org,2002:python/object/apply:',
+ UnsafeConstructor.construct_python_object_apply)
+
+# Constructor is same as UnsafeConstructor. Need to leave this in place in case
+# people have extended it directly.
+class Constructor(UnsafeConstructor):
+ pass
diff --git a/third_party/python/PyYAML/lib3/yaml/cyaml.py b/third_party/python/PyYAML/lib3/yaml/cyaml.py
new file mode 100644
index 0000000000..1e606c74b9
--- /dev/null
+++ b/third_party/python/PyYAML/lib3/yaml/cyaml.py
@@ -0,0 +1,101 @@
+
+__all__ = [
+ 'CBaseLoader', 'CSafeLoader', 'CFullLoader', 'CUnsafeLoader', 'CLoader',
+ 'CBaseDumper', 'CSafeDumper', 'CDumper'
+]
+
+from _yaml import CParser, CEmitter
+
+from .constructor import *
+
+from .serializer import *
+from .representer import *
+
+from .resolver import *
+
+class CBaseLoader(CParser, BaseConstructor, BaseResolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ BaseConstructor.__init__(self)
+ BaseResolver.__init__(self)
+
+class CSafeLoader(CParser, SafeConstructor, Resolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ SafeConstructor.__init__(self)
+ Resolver.__init__(self)
+
+class CFullLoader(CParser, FullConstructor, Resolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ FullConstructor.__init__(self)
+ Resolver.__init__(self)
+
+class CUnsafeLoader(CParser, UnsafeConstructor, Resolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ UnsafeConstructor.__init__(self)
+ Resolver.__init__(self)
+
+class CLoader(CParser, Constructor, Resolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ Constructor.__init__(self)
+ Resolver.__init__(self)
+
+class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=False,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None, sort_keys=True):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style, sort_keys=sort_keys)
+ Resolver.__init__(self)
+
+class CSafeDumper(CEmitter, SafeRepresenter, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=False,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None, sort_keys=True):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ SafeRepresenter.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style, sort_keys=sort_keys)
+ Resolver.__init__(self)
+
+class CDumper(CEmitter, Serializer, Representer, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=False,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None, sort_keys=True):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style, sort_keys=sort_keys)
+ Resolver.__init__(self)
+
diff --git a/third_party/python/PyYAML/lib3/yaml/dumper.py b/third_party/python/PyYAML/lib3/yaml/dumper.py
new file mode 100644
index 0000000000..6aadba551f
--- /dev/null
+++ b/third_party/python/PyYAML/lib3/yaml/dumper.py
@@ -0,0 +1,62 @@
+
+__all__ = ['BaseDumper', 'SafeDumper', 'Dumper']
+
+from .emitter import *
+from .serializer import *
+from .representer import *
+from .resolver import *
+
+class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=False,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None, sort_keys=True):
+ Emitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ Serializer.__init__(self, encoding=encoding,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style, sort_keys=sort_keys)
+ Resolver.__init__(self)
+
+class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=False,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None, sort_keys=True):
+ Emitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ Serializer.__init__(self, encoding=encoding,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ SafeRepresenter.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style, sort_keys=sort_keys)
+ Resolver.__init__(self)
+
+class Dumper(Emitter, Serializer, Representer, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=False,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None, sort_keys=True):
+ Emitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ Serializer.__init__(self, encoding=encoding,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style, sort_keys=sort_keys)
+ Resolver.__init__(self)
+
diff --git a/third_party/python/PyYAML/lib3/yaml/emitter.py b/third_party/python/PyYAML/lib3/yaml/emitter.py
new file mode 100644
index 0000000000..a664d01116
--- /dev/null
+++ b/third_party/python/PyYAML/lib3/yaml/emitter.py
@@ -0,0 +1,1137 @@
+
+# Emitter expects events obeying the following grammar:
+# stream ::= STREAM-START document* STREAM-END
+# document ::= DOCUMENT-START node DOCUMENT-END
+# node ::= SCALAR | sequence | mapping
+# sequence ::= SEQUENCE-START node* SEQUENCE-END
+# mapping ::= MAPPING-START (node node)* MAPPING-END
+
+__all__ = ['Emitter', 'EmitterError']
+
+from .error import YAMLError
+from .events import *
+
+class EmitterError(YAMLError):
+ pass
+
+class ScalarAnalysis:
+ def __init__(self, scalar, empty, multiline,
+ allow_flow_plain, allow_block_plain,
+ allow_single_quoted, allow_double_quoted,
+ allow_block):
+ self.scalar = scalar
+ self.empty = empty
+ self.multiline = multiline
+ self.allow_flow_plain = allow_flow_plain
+ self.allow_block_plain = allow_block_plain
+ self.allow_single_quoted = allow_single_quoted
+ self.allow_double_quoted = allow_double_quoted
+ self.allow_block = allow_block
+
+class Emitter:
+
+ DEFAULT_TAG_PREFIXES = {
+ '!' : '!',
+ 'tag:yaml.org,2002:' : '!!',
+ }
+
+ def __init__(self, stream, canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None):
+
+ # The stream should have the methods `write` and possibly `flush`.
+ self.stream = stream
+
+ # Encoding can be overridden by STREAM-START.
+ self.encoding = None
+
+ # Emitter is a state machine with a stack of states to handle nested
+ # structures.
+ self.states = []
+ self.state = self.expect_stream_start
+
+ # Current event and the event queue.
+ self.events = []
+ self.event = None
+
+ # The current indentation level and the stack of previous indents.
+ self.indents = []
+ self.indent = None
+
+ # Flow level.
+ self.flow_level = 0
+
+ # Contexts.
+ self.root_context = False
+ self.sequence_context = False
+ self.mapping_context = False
+ self.simple_key_context = False
+
+ # Characteristics of the last emitted character:
+ # - current position.
+ # - is it a whitespace?
+ # - is it an indention character
+ # (indentation space, '-', '?', or ':')?
+ self.line = 0
+ self.column = 0
+ self.whitespace = True
+ self.indention = True
+
+ # Whether the document requires an explicit document indicator
+ self.open_ended = False
+
+ # Formatting details.
+ self.canonical = canonical
+ self.allow_unicode = allow_unicode
+ self.best_indent = 2
+ if indent and 1 < indent < 10:
+ self.best_indent = indent
+ self.best_width = 80
+ if width and width > self.best_indent*2:
+ self.best_width = width
+ self.best_line_break = '\n'
+ if line_break in ['\r', '\n', '\r\n']:
+ self.best_line_break = line_break
+
+ # Tag prefixes.
+ self.tag_prefixes = None
+
+ # Prepared anchor and tag.
+ self.prepared_anchor = None
+ self.prepared_tag = None
+
+ # Scalar analysis and style.
+ self.analysis = None
+ self.style = None
+
+ def dispose(self):
+ # Reset the state attributes (to clear self-references)
+ self.states = []
+ self.state = None
+
+ def emit(self, event):
+ self.events.append(event)
+ while not self.need_more_events():
+ self.event = self.events.pop(0)
+ self.state()
+ self.event = None
+
+ # In some cases, we wait for a few next events before emitting.
+
+ def need_more_events(self):
+ if not self.events:
+ return True
+ event = self.events[0]
+ if isinstance(event, DocumentStartEvent):
+ return self.need_events(1)
+ elif isinstance(event, SequenceStartEvent):
+ return self.need_events(2)
+ elif isinstance(event, MappingStartEvent):
+ return self.need_events(3)
+ else:
+ return False
+
+ def need_events(self, count):
+ level = 0
+ for event in self.events[1:]:
+ if isinstance(event, (DocumentStartEvent, CollectionStartEvent)):
+ level += 1
+ elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)):
+ level -= 1
+ elif isinstance(event, StreamEndEvent):
+ level = -1
+ if level < 0:
+ return False
+ return (len(self.events) < count+1)
+
+ def increase_indent(self, flow=False, indentless=False):
+ self.indents.append(self.indent)
+ if self.indent is None:
+ if flow:
+ self.indent = self.best_indent
+ else:
+ self.indent = 0
+ elif not indentless:
+ self.indent += self.best_indent
+
+ # States.
+
+ # Stream handlers.
+
+ def expect_stream_start(self):
+ if isinstance(self.event, StreamStartEvent):
+ if self.event.encoding and not hasattr(self.stream, 'encoding'):
+ self.encoding = self.event.encoding
+ self.write_stream_start()
+ self.state = self.expect_first_document_start
+ else:
+ raise EmitterError("expected StreamStartEvent, but got %s"
+ % self.event)
+
+ def expect_nothing(self):
+ raise EmitterError("expected nothing, but got %s" % self.event)
+
+ # Document handlers.
+
+ def expect_first_document_start(self):
+ return self.expect_document_start(first=True)
+
+ def expect_document_start(self, first=False):
+ if isinstance(self.event, DocumentStartEvent):
+ if (self.event.version or self.event.tags) and self.open_ended:
+ self.write_indicator('...', True)
+ self.write_indent()
+ if self.event.version:
+ version_text = self.prepare_version(self.event.version)
+ self.write_version_directive(version_text)
+ self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy()
+ if self.event.tags:
+ handles = sorted(self.event.tags.keys())
+ for handle in handles:
+ prefix = self.event.tags[handle]
+ self.tag_prefixes[prefix] = handle
+ handle_text = self.prepare_tag_handle(handle)
+ prefix_text = self.prepare_tag_prefix(prefix)
+ self.write_tag_directive(handle_text, prefix_text)
+ implicit = (first and not self.event.explicit and not self.canonical
+ and not self.event.version and not self.event.tags
+ and not self.check_empty_document())
+ if not implicit:
+ self.write_indent()
+ self.write_indicator('---', True)
+ if self.canonical:
+ self.write_indent()
+ self.state = self.expect_document_root
+ elif isinstance(self.event, StreamEndEvent):
+ if self.open_ended:
+ self.write_indicator('...', True)
+ self.write_indent()
+ self.write_stream_end()
+ self.state = self.expect_nothing
+ else:
+ raise EmitterError("expected DocumentStartEvent, but got %s"
+ % self.event)
+
+ def expect_document_end(self):
+ if isinstance(self.event, DocumentEndEvent):
+ self.write_indent()
+ if self.event.explicit:
+ self.write_indicator('...', True)
+ self.write_indent()
+ self.flush_stream()
+ self.state = self.expect_document_start
+ else:
+ raise EmitterError("expected DocumentEndEvent, but got %s"
+ % self.event)
+
+ def expect_document_root(self):
+ self.states.append(self.expect_document_end)
+ self.expect_node(root=True)
+
+ # Node handlers.
+
+ def expect_node(self, root=False, sequence=False, mapping=False,
+ simple_key=False):
+ self.root_context = root
+ self.sequence_context = sequence
+ self.mapping_context = mapping
+ self.simple_key_context = simple_key
+ if isinstance(self.event, AliasEvent):
+ self.expect_alias()
+ elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)):
+ self.process_anchor('&')
+ self.process_tag()
+ if isinstance(self.event, ScalarEvent):
+ self.expect_scalar()
+ elif isinstance(self.event, SequenceStartEvent):
+ if self.flow_level or self.canonical or self.event.flow_style \
+ or self.check_empty_sequence():
+ self.expect_flow_sequence()
+ else:
+ self.expect_block_sequence()
+ elif isinstance(self.event, MappingStartEvent):
+ if self.flow_level or self.canonical or self.event.flow_style \
+ or self.check_empty_mapping():
+ self.expect_flow_mapping()
+ else:
+ self.expect_block_mapping()
+ else:
+ raise EmitterError("expected NodeEvent, but got %s" % self.event)
+
+ def expect_alias(self):
+ if self.event.anchor is None:
+ raise EmitterError("anchor is not specified for alias")
+ self.process_anchor('*')
+ self.state = self.states.pop()
+
+ def expect_scalar(self):
+ self.increase_indent(flow=True)
+ self.process_scalar()
+ self.indent = self.indents.pop()
+ self.state = self.states.pop()
+
+ # Flow sequence handlers.
+
+ def expect_flow_sequence(self):
+ self.write_indicator('[', True, whitespace=True)
+ self.flow_level += 1
+ self.increase_indent(flow=True)
+ self.state = self.expect_first_flow_sequence_item
+
+ def expect_first_flow_sequence_item(self):
+ if isinstance(self.event, SequenceEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ self.write_indicator(']', False)
+ self.state = self.states.pop()
+ else:
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ self.states.append(self.expect_flow_sequence_item)
+ self.expect_node(sequence=True)
+
+ def expect_flow_sequence_item(self):
+ if isinstance(self.event, SequenceEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ if self.canonical:
+ self.write_indicator(',', False)
+ self.write_indent()
+ self.write_indicator(']', False)
+ self.state = self.states.pop()
+ else:
+ self.write_indicator(',', False)
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ self.states.append(self.expect_flow_sequence_item)
+ self.expect_node(sequence=True)
+
+ # Flow mapping handlers.
+
+ def expect_flow_mapping(self):
+ self.write_indicator('{', True, whitespace=True)
+ self.flow_level += 1
+ self.increase_indent(flow=True)
+ self.state = self.expect_first_flow_mapping_key
+
+ def expect_first_flow_mapping_key(self):
+ if isinstance(self.event, MappingEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ self.write_indicator('}', False)
+ self.state = self.states.pop()
+ else:
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ if not self.canonical and self.check_simple_key():
+ self.states.append(self.expect_flow_mapping_simple_value)
+ self.expect_node(mapping=True, simple_key=True)
+ else:
+ self.write_indicator('?', True)
+ self.states.append(self.expect_flow_mapping_value)
+ self.expect_node(mapping=True)
+
+ def expect_flow_mapping_key(self):
+ if isinstance(self.event, MappingEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ if self.canonical:
+ self.write_indicator(',', False)
+ self.write_indent()
+ self.write_indicator('}', False)
+ self.state = self.states.pop()
+ else:
+ self.write_indicator(',', False)
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ if not self.canonical and self.check_simple_key():
+ self.states.append(self.expect_flow_mapping_simple_value)
+ self.expect_node(mapping=True, simple_key=True)
+ else:
+ self.write_indicator('?', True)
+ self.states.append(self.expect_flow_mapping_value)
+ self.expect_node(mapping=True)
+
+ def expect_flow_mapping_simple_value(self):
+ self.write_indicator(':', False)
+ self.states.append(self.expect_flow_mapping_key)
+ self.expect_node(mapping=True)
+
+ def expect_flow_mapping_value(self):
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ self.write_indicator(':', True)
+ self.states.append(self.expect_flow_mapping_key)
+ self.expect_node(mapping=True)
+
+ # Block sequence handlers.
+
+ def expect_block_sequence(self):
+ indentless = (self.mapping_context and not self.indention)
+ self.increase_indent(flow=False, indentless=indentless)
+ self.state = self.expect_first_block_sequence_item
+
+ def expect_first_block_sequence_item(self):
+ return self.expect_block_sequence_item(first=True)
+
+ def expect_block_sequence_item(self, first=False):
+ if not first and isinstance(self.event, SequenceEndEvent):
+ self.indent = self.indents.pop()
+ self.state = self.states.pop()
+ else:
+ self.write_indent()
+ self.write_indicator('-', True, indention=True)
+ self.states.append(self.expect_block_sequence_item)
+ self.expect_node(sequence=True)
+
+ # Block mapping handlers.
+
+ def expect_block_mapping(self):
+ self.increase_indent(flow=False)
+ self.state = self.expect_first_block_mapping_key
+
+ def expect_first_block_mapping_key(self):
+ return self.expect_block_mapping_key(first=True)
+
+ def expect_block_mapping_key(self, first=False):
+ if not first and isinstance(self.event, MappingEndEvent):
+ self.indent = self.indents.pop()
+ self.state = self.states.pop()
+ else:
+ self.write_indent()
+ if self.check_simple_key():
+ self.states.append(self.expect_block_mapping_simple_value)
+ self.expect_node(mapping=True, simple_key=True)
+ else:
+ self.write_indicator('?', True, indention=True)
+ self.states.append(self.expect_block_mapping_value)
+ self.expect_node(mapping=True)
+
+ def expect_block_mapping_simple_value(self):
+ self.write_indicator(':', False)
+ self.states.append(self.expect_block_mapping_key)
+ self.expect_node(mapping=True)
+
+ def expect_block_mapping_value(self):
+ self.write_indent()
+ self.write_indicator(':', True, indention=True)
+ self.states.append(self.expect_block_mapping_key)
+ self.expect_node(mapping=True)
+
+ # Checkers.
+
+ def check_empty_sequence(self):
+ return (isinstance(self.event, SequenceStartEvent) and self.events
+ and isinstance(self.events[0], SequenceEndEvent))
+
+ def check_empty_mapping(self):
+ return (isinstance(self.event, MappingStartEvent) and self.events
+ and isinstance(self.events[0], MappingEndEvent))
+
+ def check_empty_document(self):
+ if not isinstance(self.event, DocumentStartEvent) or not self.events:
+ return False
+ event = self.events[0]
+ return (isinstance(event, ScalarEvent) and event.anchor is None
+ and event.tag is None and event.implicit and event.value == '')
+
+ def check_simple_key(self):
+ length = 0
+ if isinstance(self.event, NodeEvent) and self.event.anchor is not None:
+ if self.prepared_anchor is None:
+ self.prepared_anchor = self.prepare_anchor(self.event.anchor)
+ length += len(self.prepared_anchor)
+ if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \
+ and self.event.tag is not None:
+ if self.prepared_tag is None:
+ self.prepared_tag = self.prepare_tag(self.event.tag)
+ length += len(self.prepared_tag)
+ if isinstance(self.event, ScalarEvent):
+ if self.analysis is None:
+ self.analysis = self.analyze_scalar(self.event.value)
+ length += len(self.analysis.scalar)
+ return (length < 128 and (isinstance(self.event, AliasEvent)
+ or (isinstance(self.event, ScalarEvent)
+ and not self.analysis.empty and not self.analysis.multiline)
+ or self.check_empty_sequence() or self.check_empty_mapping()))
+
+ # Anchor, Tag, and Scalar processors.
+
+ def process_anchor(self, indicator):
+ if self.event.anchor is None:
+ self.prepared_anchor = None
+ return
+ if self.prepared_anchor is None:
+ self.prepared_anchor = self.prepare_anchor(self.event.anchor)
+ if self.prepared_anchor:
+ self.write_indicator(indicator+self.prepared_anchor, True)
+ self.prepared_anchor = None
+
+ def process_tag(self):
+ tag = self.event.tag
+ if isinstance(self.event, ScalarEvent):
+ if self.style is None:
+ self.style = self.choose_scalar_style()
+ if ((not self.canonical or tag is None) and
+ ((self.style == '' and self.event.implicit[0])
+ or (self.style != '' and self.event.implicit[1]))):
+ self.prepared_tag = None
+ return
+ if self.event.implicit[0] and tag is None:
+ tag = '!'
+ self.prepared_tag = None
+ else:
+ if (not self.canonical or tag is None) and self.event.implicit:
+ self.prepared_tag = None
+ return
+ if tag is None:
+ raise EmitterError("tag is not specified")
+ if self.prepared_tag is None:
+ self.prepared_tag = self.prepare_tag(tag)
+ if self.prepared_tag:
+ self.write_indicator(self.prepared_tag, True)
+ self.prepared_tag = None
+
+ def choose_scalar_style(self):
+ if self.analysis is None:
+ self.analysis = self.analyze_scalar(self.event.value)
+ if self.event.style == '"' or self.canonical:
+ return '"'
+ if not self.event.style and self.event.implicit[0]:
+ if (not (self.simple_key_context and
+ (self.analysis.empty or self.analysis.multiline))
+ and (self.flow_level and self.analysis.allow_flow_plain
+ or (not self.flow_level and self.analysis.allow_block_plain))):
+ return ''
+ if self.event.style and self.event.style in '|>':
+ if (not self.flow_level and not self.simple_key_context
+ and self.analysis.allow_block):
+ return self.event.style
+ if not self.event.style or self.event.style == '\'':
+ if (self.analysis.allow_single_quoted and
+ not (self.simple_key_context and self.analysis.multiline)):
+ return '\''
+ return '"'
+
+ def process_scalar(self):
+ if self.analysis is None:
+ self.analysis = self.analyze_scalar(self.event.value)
+ if self.style is None:
+ self.style = self.choose_scalar_style()
+ split = (not self.simple_key_context)
+ #if self.analysis.multiline and split \
+ # and (not self.style or self.style in '\'\"'):
+ # self.write_indent()
+ if self.style == '"':
+ self.write_double_quoted(self.analysis.scalar, split)
+ elif self.style == '\'':
+ self.write_single_quoted(self.analysis.scalar, split)
+ elif self.style == '>':
+ self.write_folded(self.analysis.scalar)
+ elif self.style == '|':
+ self.write_literal(self.analysis.scalar)
+ else:
+ self.write_plain(self.analysis.scalar, split)
+ self.analysis = None
+ self.style = None
+
+ # Analyzers.
+
+ def prepare_version(self, version):
+ major, minor = version
+ if major != 1:
+ raise EmitterError("unsupported YAML version: %d.%d" % (major, minor))
+ return '%d.%d' % (major, minor)
+
+ def prepare_tag_handle(self, handle):
+ if not handle:
+ raise EmitterError("tag handle must not be empty")
+ if handle[0] != '!' or handle[-1] != '!':
+ raise EmitterError("tag handle must start and end with '!': %r" % handle)
+ for ch in handle[1:-1]:
+ if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-_'):
+ raise EmitterError("invalid character %r in the tag handle: %r"
+ % (ch, handle))
+ return handle
+
+ def prepare_tag_prefix(self, prefix):
+ if not prefix:
+ raise EmitterError("tag prefix must not be empty")
+ chunks = []
+ start = end = 0
+ if prefix[0] == '!':
+ end = 1
+ while end < len(prefix):
+ ch = prefix[end]
+ if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-;/?!:@&=+$,_.~*\'()[]':
+ end += 1
+ else:
+ if start < end:
+ chunks.append(prefix[start:end])
+ start = end = end+1
+ data = ch.encode('utf-8')
+ for ch in data:
+ chunks.append('%%%02X' % ord(ch))
+ if start < end:
+ chunks.append(prefix[start:end])
+ return ''.join(chunks)
+
+ def prepare_tag(self, tag):
+ if not tag:
+ raise EmitterError("tag must not be empty")
+ if tag == '!':
+ return tag
+ handle = None
+ suffix = tag
+ prefixes = sorted(self.tag_prefixes.keys())
+ for prefix in prefixes:
+ if tag.startswith(prefix) \
+ and (prefix == '!' or len(prefix) < len(tag)):
+ handle = self.tag_prefixes[prefix]
+ suffix = tag[len(prefix):]
+ chunks = []
+ start = end = 0
+ while end < len(suffix):
+ ch = suffix[end]
+ if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-;/?:@&=+$,_.~*\'()[]' \
+ or (ch == '!' and handle != '!'):
+ end += 1
+ else:
+ if start < end:
+ chunks.append(suffix[start:end])
+ start = end = end+1
+ data = ch.encode('utf-8')
+ for ch in data:
+ chunks.append('%%%02X' % ch)
+ if start < end:
+ chunks.append(suffix[start:end])
+ suffix_text = ''.join(chunks)
+ if handle:
+ return '%s%s' % (handle, suffix_text)
+ else:
+ return '!<%s>' % suffix_text
+
+ def prepare_anchor(self, anchor):
+ if not anchor:
+ raise EmitterError("anchor must not be empty")
+ for ch in anchor:
+ if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-_'):
+ raise EmitterError("invalid character %r in the anchor: %r"
+ % (ch, anchor))
+ return anchor
+
+ def analyze_scalar(self, scalar):
+
+ # Empty scalar is a special case.
+ if not scalar:
+ return ScalarAnalysis(scalar=scalar, empty=True, multiline=False,
+ allow_flow_plain=False, allow_block_plain=True,
+ allow_single_quoted=True, allow_double_quoted=True,
+ allow_block=False)
+
+ # Indicators and special characters.
+ block_indicators = False
+ flow_indicators = False
+ line_breaks = False
+ special_characters = False
+
+ # Important whitespace combinations.
+ leading_space = False
+ leading_break = False
+ trailing_space = False
+ trailing_break = False
+ break_space = False
+ space_break = False
+
+ # Check document indicators.
+ if scalar.startswith('---') or scalar.startswith('...'):
+ block_indicators = True
+ flow_indicators = True
+
+ # First character or preceded by a whitespace.
+ preceded_by_whitespace = True
+
+ # Last character or followed by a whitespace.
+ followed_by_whitespace = (len(scalar) == 1 or
+ scalar[1] in '\0 \t\r\n\x85\u2028\u2029')
+
+ # The previous character is a space.
+ previous_space = False
+
+ # The previous character is a break.
+ previous_break = False
+
+ index = 0
+ while index < len(scalar):
+ ch = scalar[index]
+
+ # Check for indicators.
+ if index == 0:
+ # Leading indicators are special characters.
+ if ch in '#,[]{}&*!|>\'\"%@`':
+ flow_indicators = True
+ block_indicators = True
+ if ch in '?:':
+ flow_indicators = True
+ if followed_by_whitespace:
+ block_indicators = True
+ if ch == '-' and followed_by_whitespace:
+ flow_indicators = True
+ block_indicators = True
+ else:
+ # Some indicators cannot appear within a scalar as well.
+ if ch in ',?[]{}':
+ flow_indicators = True
+ if ch == ':':
+ flow_indicators = True
+ if followed_by_whitespace:
+ block_indicators = True
+ if ch == '#' and preceded_by_whitespace:
+ flow_indicators = True
+ block_indicators = True
+
+ # Check for line breaks, special, and unicode characters.
+ if ch in '\n\x85\u2028\u2029':
+ line_breaks = True
+ if not (ch == '\n' or '\x20' <= ch <= '\x7E'):
+ if (ch == '\x85' or '\xA0' <= ch <= '\uD7FF'
+ or '\uE000' <= ch <= '\uFFFD'
+ or '\U00010000' <= ch < '\U0010ffff') and ch != '\uFEFF':
+ unicode_characters = True
+ if not self.allow_unicode:
+ special_characters = True
+ else:
+ special_characters = True
+
+ # Detect important whitespace combinations.
+ if ch == ' ':
+ if index == 0:
+ leading_space = True
+ if index == len(scalar)-1:
+ trailing_space = True
+ if previous_break:
+ break_space = True
+ previous_space = True
+ previous_break = False
+ elif ch in '\n\x85\u2028\u2029':
+ if index == 0:
+ leading_break = True
+ if index == len(scalar)-1:
+ trailing_break = True
+ if previous_space:
+ space_break = True
+ previous_space = False
+ previous_break = True
+ else:
+ previous_space = False
+ previous_break = False
+
+ # Prepare for the next character.
+ index += 1
+ preceded_by_whitespace = (ch in '\0 \t\r\n\x85\u2028\u2029')
+ followed_by_whitespace = (index+1 >= len(scalar) or
+ scalar[index+1] in '\0 \t\r\n\x85\u2028\u2029')
+
+ # Let's decide what styles are allowed.
+ allow_flow_plain = True
+ allow_block_plain = True
+ allow_single_quoted = True
+ allow_double_quoted = True
+ allow_block = True
+
+ # Leading and trailing whitespaces are bad for plain scalars.
+ if (leading_space or leading_break
+ or trailing_space or trailing_break):
+ allow_flow_plain = allow_block_plain = False
+
+ # We do not permit trailing spaces for block scalars.
+ if trailing_space:
+ allow_block = False
+
+ # Spaces at the beginning of a new line are only acceptable for block
+ # scalars.
+ if break_space:
+ allow_flow_plain = allow_block_plain = allow_single_quoted = False
+
+ # Spaces followed by breaks, as well as special character are only
+ # allowed for double quoted scalars.
+ if space_break or special_characters:
+ allow_flow_plain = allow_block_plain = \
+ allow_single_quoted = allow_block = False
+
+ # Although the plain scalar writer supports breaks, we never emit
+ # multiline plain scalars.
+ if line_breaks:
+ allow_flow_plain = allow_block_plain = False
+
+ # Flow indicators are forbidden for flow plain scalars.
+ if flow_indicators:
+ allow_flow_plain = False
+
+ # Block indicators are forbidden for block plain scalars.
+ if block_indicators:
+ allow_block_plain = False
+
+ return ScalarAnalysis(scalar=scalar,
+ empty=False, multiline=line_breaks,
+ allow_flow_plain=allow_flow_plain,
+ allow_block_plain=allow_block_plain,
+ allow_single_quoted=allow_single_quoted,
+ allow_double_quoted=allow_double_quoted,
+ allow_block=allow_block)
+
+ # Writers.
+
+ def flush_stream(self):
+ if hasattr(self.stream, 'flush'):
+ self.stream.flush()
+
+ def write_stream_start(self):
+ # Write BOM if needed.
+ if self.encoding and self.encoding.startswith('utf-16'):
+ self.stream.write('\uFEFF'.encode(self.encoding))
+
+ def write_stream_end(self):
+ self.flush_stream()
+
+ def write_indicator(self, indicator, need_whitespace,
+ whitespace=False, indention=False):
+ if self.whitespace or not need_whitespace:
+ data = indicator
+ else:
+ data = ' '+indicator
+ self.whitespace = whitespace
+ self.indention = self.indention and indention
+ self.column += len(data)
+ self.open_ended = False
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+
+ def write_indent(self):
+ indent = self.indent or 0
+ if not self.indention or self.column > indent \
+ or (self.column == indent and not self.whitespace):
+ self.write_line_break()
+ if self.column < indent:
+ self.whitespace = True
+ data = ' '*(indent-self.column)
+ self.column = indent
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+
+ def write_line_break(self, data=None):
+ if data is None:
+ data = self.best_line_break
+ self.whitespace = True
+ self.indention = True
+ self.line += 1
+ self.column = 0
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+
+ def write_version_directive(self, version_text):
+ data = '%%YAML %s' % version_text
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.write_line_break()
+
+ def write_tag_directive(self, handle_text, prefix_text):
+ data = '%%TAG %s %s' % (handle_text, prefix_text)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.write_line_break()
+
+ # Scalar streams.
+
+ def write_single_quoted(self, text, split=True):
+ self.write_indicator('\'', True)
+ spaces = False
+ breaks = False
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if spaces:
+ if ch is None or ch != ' ':
+ if start+1 == end and self.column > self.best_width and split \
+ and start != 0 and end != len(text):
+ self.write_indent()
+ else:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ elif breaks:
+ if ch is None or ch not in '\n\x85\u2028\u2029':
+ if text[start] == '\n':
+ self.write_line_break()
+ for br in text[start:end]:
+ if br == '\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ self.write_indent()
+ start = end
+ else:
+ if ch is None or ch in ' \n\x85\u2028\u2029' or ch == '\'':
+ if start < end:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ if ch == '\'':
+ data = '\'\''
+ self.column += 2
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end + 1
+ if ch is not None:
+ spaces = (ch == ' ')
+ breaks = (ch in '\n\x85\u2028\u2029')
+ end += 1
+ self.write_indicator('\'', False)
+
+ ESCAPE_REPLACEMENTS = {
+ '\0': '0',
+ '\x07': 'a',
+ '\x08': 'b',
+ '\x09': 't',
+ '\x0A': 'n',
+ '\x0B': 'v',
+ '\x0C': 'f',
+ '\x0D': 'r',
+ '\x1B': 'e',
+ '\"': '\"',
+ '\\': '\\',
+ '\x85': 'N',
+ '\xA0': '_',
+ '\u2028': 'L',
+ '\u2029': 'P',
+ }
+
+ def write_double_quoted(self, text, split=True):
+ self.write_indicator('"', True)
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if ch is None or ch in '"\\\x85\u2028\u2029\uFEFF' \
+ or not ('\x20' <= ch <= '\x7E'
+ or (self.allow_unicode
+ and ('\xA0' <= ch <= '\uD7FF'
+ or '\uE000' <= ch <= '\uFFFD'))):
+ if start < end:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ if ch is not None:
+ if ch in self.ESCAPE_REPLACEMENTS:
+ data = '\\'+self.ESCAPE_REPLACEMENTS[ch]
+ elif ch <= '\xFF':
+ data = '\\x%02X' % ord(ch)
+ elif ch <= '\uFFFF':
+ data = '\\u%04X' % ord(ch)
+ else:
+ data = '\\U%08X' % ord(ch)
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end+1
+ if 0 < end < len(text)-1 and (ch == ' ' or start >= end) \
+ and self.column+(end-start) > self.best_width and split:
+ data = text[start:end]+'\\'
+ if start < end:
+ start = end
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.write_indent()
+ self.whitespace = False
+ self.indention = False
+ if text[start] == ' ':
+ data = '\\'
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ end += 1
+ self.write_indicator('"', False)
+
+ def determine_block_hints(self, text):
+ hints = ''
+ if text:
+ if text[0] in ' \n\x85\u2028\u2029':
+ hints += str(self.best_indent)
+ if text[-1] not in '\n\x85\u2028\u2029':
+ hints += '-'
+ elif len(text) == 1 or text[-2] in '\n\x85\u2028\u2029':
+ hints += '+'
+ return hints
+
+ def write_folded(self, text):
+ hints = self.determine_block_hints(text)
+ self.write_indicator('>'+hints, True)
+ if hints[-1:] == '+':
+ self.open_ended = True
+ self.write_line_break()
+ leading_space = True
+ spaces = False
+ breaks = True
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if breaks:
+ if ch is None or ch not in '\n\x85\u2028\u2029':
+ if not leading_space and ch is not None and ch != ' ' \
+ and text[start] == '\n':
+ self.write_line_break()
+ leading_space = (ch == ' ')
+ for br in text[start:end]:
+ if br == '\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ if ch is not None:
+ self.write_indent()
+ start = end
+ elif spaces:
+ if ch != ' ':
+ if start+1 == end and self.column > self.best_width:
+ self.write_indent()
+ else:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ else:
+ if ch is None or ch in ' \n\x85\u2028\u2029':
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ if ch is None:
+ self.write_line_break()
+ start = end
+ if ch is not None:
+ breaks = (ch in '\n\x85\u2028\u2029')
+ spaces = (ch == ' ')
+ end += 1
+
+ def write_literal(self, text):
+ hints = self.determine_block_hints(text)
+ self.write_indicator('|'+hints, True)
+ if hints[-1:] == '+':
+ self.open_ended = True
+ self.write_line_break()
+ breaks = True
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if breaks:
+ if ch is None or ch not in '\n\x85\u2028\u2029':
+ for br in text[start:end]:
+ if br == '\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ if ch is not None:
+ self.write_indent()
+ start = end
+ else:
+ if ch is None or ch in '\n\x85\u2028\u2029':
+ data = text[start:end]
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ if ch is None:
+ self.write_line_break()
+ start = end
+ if ch is not None:
+ breaks = (ch in '\n\x85\u2028\u2029')
+ end += 1
+
+ def write_plain(self, text, split=True):
+ if self.root_context:
+ self.open_ended = True
+ if not text:
+ return
+ if not self.whitespace:
+ data = ' '
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.whitespace = False
+ self.indention = False
+ spaces = False
+ breaks = False
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if spaces:
+ if ch != ' ':
+ if start+1 == end and self.column > self.best_width and split:
+ self.write_indent()
+ self.whitespace = False
+ self.indention = False
+ else:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ elif breaks:
+ if ch not in '\n\x85\u2028\u2029':
+ if text[start] == '\n':
+ self.write_line_break()
+ for br in text[start:end]:
+ if br == '\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ self.write_indent()
+ self.whitespace = False
+ self.indention = False
+ start = end
+ else:
+ if ch is None or ch in ' \n\x85\u2028\u2029':
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ if ch is not None:
+ spaces = (ch == ' ')
+ breaks = (ch in '\n\x85\u2028\u2029')
+ end += 1
diff --git a/third_party/python/PyYAML/lib3/yaml/error.py b/third_party/python/PyYAML/lib3/yaml/error.py
new file mode 100644
index 0000000000..b796b4dc51
--- /dev/null
+++ b/third_party/python/PyYAML/lib3/yaml/error.py
@@ -0,0 +1,75 @@
+
+__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError']
+
+class Mark:
+
+ def __init__(self, name, index, line, column, buffer, pointer):
+ self.name = name
+ self.index = index
+ self.line = line
+ self.column = column
+ self.buffer = buffer
+ self.pointer = pointer
+
+ def get_snippet(self, indent=4, max_length=75):
+ if self.buffer is None:
+ return None
+ head = ''
+ start = self.pointer
+ while start > 0 and self.buffer[start-1] not in '\0\r\n\x85\u2028\u2029':
+ start -= 1
+ if self.pointer-start > max_length/2-1:
+ head = ' ... '
+ start += 5
+ break
+ tail = ''
+ end = self.pointer
+ while end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029':
+ end += 1
+ if end-self.pointer > max_length/2-1:
+ tail = ' ... '
+ end -= 5
+ break
+ snippet = self.buffer[start:end]
+ return ' '*indent + head + snippet + tail + '\n' \
+ + ' '*(indent+self.pointer-start+len(head)) + '^'
+
+ def __str__(self):
+ snippet = self.get_snippet()
+ where = " in \"%s\", line %d, column %d" \
+ % (self.name, self.line+1, self.column+1)
+ if snippet is not None:
+ where += ":\n"+snippet
+ return where
+
+class YAMLError(Exception):
+ pass
+
+class MarkedYAMLError(YAMLError):
+
+ def __init__(self, context=None, context_mark=None,
+ problem=None, problem_mark=None, note=None):
+ self.context = context
+ self.context_mark = context_mark
+ self.problem = problem
+ self.problem_mark = problem_mark
+ self.note = note
+
+ def __str__(self):
+ lines = []
+ if self.context is not None:
+ lines.append(self.context)
+ if self.context_mark is not None \
+ and (self.problem is None or self.problem_mark is None
+ or self.context_mark.name != self.problem_mark.name
+ or self.context_mark.line != self.problem_mark.line
+ or self.context_mark.column != self.problem_mark.column):
+ lines.append(str(self.context_mark))
+ if self.problem is not None:
+ lines.append(self.problem)
+ if self.problem_mark is not None:
+ lines.append(str(self.problem_mark))
+ if self.note is not None:
+ lines.append(self.note)
+ return '\n'.join(lines)
+
diff --git a/third_party/python/PyYAML/lib3/yaml/events.py b/third_party/python/PyYAML/lib3/yaml/events.py
new file mode 100644
index 0000000000..f79ad389cb
--- /dev/null
+++ b/third_party/python/PyYAML/lib3/yaml/events.py
@@ -0,0 +1,86 @@
+
+# Abstract classes.
+
+class Event(object):
+ def __init__(self, start_mark=None, end_mark=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ def __repr__(self):
+ attributes = [key for key in ['anchor', 'tag', 'implicit', 'value']
+ if hasattr(self, key)]
+ arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
+ for key in attributes])
+ return '%s(%s)' % (self.__class__.__name__, arguments)
+
+class NodeEvent(Event):
+ def __init__(self, anchor, start_mark=None, end_mark=None):
+ self.anchor = anchor
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class CollectionStartEvent(NodeEvent):
+ def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None,
+ flow_style=None):
+ self.anchor = anchor
+ self.tag = tag
+ self.implicit = implicit
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.flow_style = flow_style
+
+class CollectionEndEvent(Event):
+ pass
+
+# Implementations.
+
+class StreamStartEvent(Event):
+ def __init__(self, start_mark=None, end_mark=None, encoding=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.encoding = encoding
+
+class StreamEndEvent(Event):
+ pass
+
+class DocumentStartEvent(Event):
+ def __init__(self, start_mark=None, end_mark=None,
+ explicit=None, version=None, tags=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.explicit = explicit
+ self.version = version
+ self.tags = tags
+
+class DocumentEndEvent(Event):
+ def __init__(self, start_mark=None, end_mark=None,
+ explicit=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.explicit = explicit
+
+class AliasEvent(NodeEvent):
+ pass
+
+class ScalarEvent(NodeEvent):
+ def __init__(self, anchor, tag, implicit, value,
+ start_mark=None, end_mark=None, style=None):
+ self.anchor = anchor
+ self.tag = tag
+ self.implicit = implicit
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.style = style
+
+class SequenceStartEvent(CollectionStartEvent):
+ pass
+
+class SequenceEndEvent(CollectionEndEvent):
+ pass
+
+class MappingStartEvent(CollectionStartEvent):
+ pass
+
+class MappingEndEvent(CollectionEndEvent):
+ pass
+
diff --git a/third_party/python/PyYAML/lib3/yaml/loader.py b/third_party/python/PyYAML/lib3/yaml/loader.py
new file mode 100644
index 0000000000..e90c11224c
--- /dev/null
+++ b/third_party/python/PyYAML/lib3/yaml/loader.py
@@ -0,0 +1,63 @@
+
+__all__ = ['BaseLoader', 'FullLoader', 'SafeLoader', 'Loader', 'UnsafeLoader']
+
+from .reader import *
+from .scanner import *
+from .parser import *
+from .composer import *
+from .constructor import *
+from .resolver import *
+
+class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ BaseConstructor.__init__(self)
+ BaseResolver.__init__(self)
+
+class FullLoader(Reader, Scanner, Parser, Composer, FullConstructor, Resolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ FullConstructor.__init__(self)
+ Resolver.__init__(self)
+
+class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ SafeConstructor.__init__(self)
+ Resolver.__init__(self)
+
+class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ Constructor.__init__(self)
+ Resolver.__init__(self)
+
+# UnsafeLoader is the same as Loader (which is and was always unsafe on
+# untrusted input). Use of either Loader or UnsafeLoader should be rare, since
+# FullLoad should be able to load almost all YAML safely. Loader is left intact
+# to ensure backwards compatibility.
+class UnsafeLoader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ Constructor.__init__(self)
+ Resolver.__init__(self)
diff --git a/third_party/python/PyYAML/lib3/yaml/nodes.py b/third_party/python/PyYAML/lib3/yaml/nodes.py
new file mode 100644
index 0000000000..c4f070c41e
--- /dev/null
+++ b/third_party/python/PyYAML/lib3/yaml/nodes.py
@@ -0,0 +1,49 @@
+
+class Node(object):
+ def __init__(self, tag, value, start_mark, end_mark):
+ self.tag = tag
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ def __repr__(self):
+ value = self.value
+ #if isinstance(value, list):
+ # if len(value) == 0:
+ # value = '<empty>'
+ # elif len(value) == 1:
+ # value = '<1 item>'
+ # else:
+ # value = '<%d items>' % len(value)
+ #else:
+ # if len(value) > 75:
+ # value = repr(value[:70]+u' ... ')
+ # else:
+ # value = repr(value)
+ value = repr(value)
+ return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value)
+
+class ScalarNode(Node):
+ id = 'scalar'
+ def __init__(self, tag, value,
+ start_mark=None, end_mark=None, style=None):
+ self.tag = tag
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.style = style
+
+class CollectionNode(Node):
+ def __init__(self, tag, value,
+ start_mark=None, end_mark=None, flow_style=None):
+ self.tag = tag
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.flow_style = flow_style
+
+class SequenceNode(CollectionNode):
+ id = 'sequence'
+
+class MappingNode(CollectionNode):
+ id = 'mapping'
+
diff --git a/third_party/python/PyYAML/lib3/yaml/parser.py b/third_party/python/PyYAML/lib3/yaml/parser.py
new file mode 100644
index 0000000000..13a5995d29
--- /dev/null
+++ b/third_party/python/PyYAML/lib3/yaml/parser.py
@@ -0,0 +1,589 @@
+
+# The following YAML grammar is LL(1) and is parsed by a recursive descent
+# parser.
+#
+# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
+# implicit_document ::= block_node DOCUMENT-END*
+# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
+# block_node_or_indentless_sequence ::=
+# ALIAS
+# | properties (block_content | indentless_block_sequence)?
+# | block_content
+# | indentless_block_sequence
+# block_node ::= ALIAS
+# | properties block_content?
+# | block_content
+# flow_node ::= ALIAS
+# | properties flow_content?
+# | flow_content
+# properties ::= TAG ANCHOR? | ANCHOR TAG?
+# block_content ::= block_collection | flow_collection | SCALAR
+# flow_content ::= flow_collection | SCALAR
+# block_collection ::= block_sequence | block_mapping
+# flow_collection ::= flow_sequence | flow_mapping
+# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
+# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
+# block_mapping ::= BLOCK-MAPPING_START
+# ((KEY block_node_or_indentless_sequence?)?
+# (VALUE block_node_or_indentless_sequence?)?)*
+# BLOCK-END
+# flow_sequence ::= FLOW-SEQUENCE-START
+# (flow_sequence_entry FLOW-ENTRY)*
+# flow_sequence_entry?
+# FLOW-SEQUENCE-END
+# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+# flow_mapping ::= FLOW-MAPPING-START
+# (flow_mapping_entry FLOW-ENTRY)*
+# flow_mapping_entry?
+# FLOW-MAPPING-END
+# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+#
+# FIRST sets:
+#
+# stream: { STREAM-START }
+# explicit_document: { DIRECTIVE DOCUMENT-START }
+# implicit_document: FIRST(block_node)
+# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
+# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
+# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
+# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# block_sequence: { BLOCK-SEQUENCE-START }
+# block_mapping: { BLOCK-MAPPING-START }
+# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
+# indentless_sequence: { ENTRY }
+# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# flow_sequence: { FLOW-SEQUENCE-START }
+# flow_mapping: { FLOW-MAPPING-START }
+# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
+# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
+
+__all__ = ['Parser', 'ParserError']
+
+from .error import MarkedYAMLError
+from .tokens import *
+from .events import *
+from .scanner import *
+
+class ParserError(MarkedYAMLError):
+ pass
+
+class Parser:
+ # Since writing a recursive-descendant parser is a straightforward task, we
+ # do not give many comments here.
+
+ DEFAULT_TAGS = {
+ '!': '!',
+ '!!': 'tag:yaml.org,2002:',
+ }
+
+ def __init__(self):
+ self.current_event = None
+ self.yaml_version = None
+ self.tag_handles = {}
+ self.states = []
+ self.marks = []
+ self.state = self.parse_stream_start
+
+ def dispose(self):
+ # Reset the state attributes (to clear self-references)
+ self.states = []
+ self.state = None
+
+ def check_event(self, *choices):
+ # Check the type of the next event.
+ if self.current_event is None:
+ if self.state:
+ self.current_event = self.state()
+ if self.current_event is not None:
+ if not choices:
+ return True
+ for choice in choices:
+ if isinstance(self.current_event, choice):
+ return True
+ return False
+
+ def peek_event(self):
+ # Get the next event.
+ if self.current_event is None:
+ if self.state:
+ self.current_event = self.state()
+ return self.current_event
+
+ def get_event(self):
+ # Get the next event and proceed further.
+ if self.current_event is None:
+ if self.state:
+ self.current_event = self.state()
+ value = self.current_event
+ self.current_event = None
+ return value
+
+ # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
+ # implicit_document ::= block_node DOCUMENT-END*
+ # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
+
+ def parse_stream_start(self):
+
+ # Parse the stream start.
+ token = self.get_token()
+ event = StreamStartEvent(token.start_mark, token.end_mark,
+ encoding=token.encoding)
+
+ # Prepare the next state.
+ self.state = self.parse_implicit_document_start
+
+ return event
+
+ def parse_implicit_document_start(self):
+
+ # Parse an implicit document.
+ if not self.check_token(DirectiveToken, DocumentStartToken,
+ StreamEndToken):
+ self.tag_handles = self.DEFAULT_TAGS
+ token = self.peek_token()
+ start_mark = end_mark = token.start_mark
+ event = DocumentStartEvent(start_mark, end_mark,
+ explicit=False)
+
+ # Prepare the next state.
+ self.states.append(self.parse_document_end)
+ self.state = self.parse_block_node
+
+ return event
+
+ else:
+ return self.parse_document_start()
+
+ def parse_document_start(self):
+
+ # Parse any extra document end indicators.
+ while self.check_token(DocumentEndToken):
+ self.get_token()
+
+ # Parse an explicit document.
+ if not self.check_token(StreamEndToken):
+ token = self.peek_token()
+ start_mark = token.start_mark
+ version, tags = self.process_directives()
+ if not self.check_token(DocumentStartToken):
+ raise ParserError(None, None,
+ "expected '<document start>', but found %r"
+ % self.peek_token().id,
+ self.peek_token().start_mark)
+ token = self.get_token()
+ end_mark = token.end_mark
+ event = DocumentStartEvent(start_mark, end_mark,
+ explicit=True, version=version, tags=tags)
+ self.states.append(self.parse_document_end)
+ self.state = self.parse_document_content
+ else:
+ # Parse the end of the stream.
+ token = self.get_token()
+ event = StreamEndEvent(token.start_mark, token.end_mark)
+ assert not self.states
+ assert not self.marks
+ self.state = None
+ return event
+
+ def parse_document_end(self):
+
+ # Parse the document end.
+ token = self.peek_token()
+ start_mark = end_mark = token.start_mark
+ explicit = False
+ if self.check_token(DocumentEndToken):
+ token = self.get_token()
+ end_mark = token.end_mark
+ explicit = True
+ event = DocumentEndEvent(start_mark, end_mark,
+ explicit=explicit)
+
+ # Prepare the next state.
+ self.state = self.parse_document_start
+
+ return event
+
+ def parse_document_content(self):
+ if self.check_token(DirectiveToken,
+ DocumentStartToken, DocumentEndToken, StreamEndToken):
+ event = self.process_empty_scalar(self.peek_token().start_mark)
+ self.state = self.states.pop()
+ return event
+ else:
+ return self.parse_block_node()
+
+ def process_directives(self):
+ self.yaml_version = None
+ self.tag_handles = {}
+ while self.check_token(DirectiveToken):
+ token = self.get_token()
+ if token.name == 'YAML':
+ if self.yaml_version is not None:
+ raise ParserError(None, None,
+ "found duplicate YAML directive", token.start_mark)
+ major, minor = token.value
+ if major != 1:
+ raise ParserError(None, None,
+ "found incompatible YAML document (version 1.* is required)",
+ token.start_mark)
+ self.yaml_version = token.value
+ elif token.name == 'TAG':
+ handle, prefix = token.value
+ if handle in self.tag_handles:
+ raise ParserError(None, None,
+ "duplicate tag handle %r" % handle,
+ token.start_mark)
+ self.tag_handles[handle] = prefix
+ if self.tag_handles:
+ value = self.yaml_version, self.tag_handles.copy()
+ else:
+ value = self.yaml_version, None
+ for key in self.DEFAULT_TAGS:
+ if key not in self.tag_handles:
+ self.tag_handles[key] = self.DEFAULT_TAGS[key]
+ return value
+
+ # block_node_or_indentless_sequence ::= ALIAS
+ # | properties (block_content | indentless_block_sequence)?
+ # | block_content
+ # | indentless_block_sequence
+ # block_node ::= ALIAS
+ # | properties block_content?
+ # | block_content
+ # flow_node ::= ALIAS
+ # | properties flow_content?
+ # | flow_content
+ # properties ::= TAG ANCHOR? | ANCHOR TAG?
+ # block_content ::= block_collection | flow_collection | SCALAR
+ # flow_content ::= flow_collection | SCALAR
+ # block_collection ::= block_sequence | block_mapping
+ # flow_collection ::= flow_sequence | flow_mapping
+
+ def parse_block_node(self):
+ return self.parse_node(block=True)
+
+ def parse_flow_node(self):
+ return self.parse_node()
+
+ def parse_block_node_or_indentless_sequence(self):
+ return self.parse_node(block=True, indentless_sequence=True)
+
+ def parse_node(self, block=False, indentless_sequence=False):
+ if self.check_token(AliasToken):
+ token = self.get_token()
+ event = AliasEvent(token.value, token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ else:
+ anchor = None
+ tag = None
+ start_mark = end_mark = tag_mark = None
+ if self.check_token(AnchorToken):
+ token = self.get_token()
+ start_mark = token.start_mark
+ end_mark = token.end_mark
+ anchor = token.value
+ if self.check_token(TagToken):
+ token = self.get_token()
+ tag_mark = token.start_mark
+ end_mark = token.end_mark
+ tag = token.value
+ elif self.check_token(TagToken):
+ token = self.get_token()
+ start_mark = tag_mark = token.start_mark
+ end_mark = token.end_mark
+ tag = token.value
+ if self.check_token(AnchorToken):
+ token = self.get_token()
+ end_mark = token.end_mark
+ anchor = token.value
+ if tag is not None:
+ handle, suffix = tag
+ if handle is not None:
+ if handle not in self.tag_handles:
+ raise ParserError("while parsing a node", start_mark,
+ "found undefined tag handle %r" % handle,
+ tag_mark)
+ tag = self.tag_handles[handle]+suffix
+ else:
+ tag = suffix
+ #if tag == '!':
+ # raise ParserError("while parsing a node", start_mark,
+ # "found non-specific tag '!'", tag_mark,
+ # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
+ if start_mark is None:
+ start_mark = end_mark = self.peek_token().start_mark
+ event = None
+ implicit = (tag is None or tag == '!')
+ if indentless_sequence and self.check_token(BlockEntryToken):
+ end_mark = self.peek_token().end_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark)
+ self.state = self.parse_indentless_sequence_entry
+ else:
+ if self.check_token(ScalarToken):
+ token = self.get_token()
+ end_mark = token.end_mark
+ if (token.plain and tag is None) or tag == '!':
+ implicit = (True, False)
+ elif tag is None:
+ implicit = (False, True)
+ else:
+ implicit = (False, False)
+ event = ScalarEvent(anchor, tag, implicit, token.value,
+ start_mark, end_mark, style=token.style)
+ self.state = self.states.pop()
+ elif self.check_token(FlowSequenceStartToken):
+ end_mark = self.peek_token().end_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=True)
+ self.state = self.parse_flow_sequence_first_entry
+ elif self.check_token(FlowMappingStartToken):
+ end_mark = self.peek_token().end_mark
+ event = MappingStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=True)
+ self.state = self.parse_flow_mapping_first_key
+ elif block and self.check_token(BlockSequenceStartToken):
+ end_mark = self.peek_token().start_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=False)
+ self.state = self.parse_block_sequence_first_entry
+ elif block and self.check_token(BlockMappingStartToken):
+ end_mark = self.peek_token().start_mark
+ event = MappingStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=False)
+ self.state = self.parse_block_mapping_first_key
+ elif anchor is not None or tag is not None:
+ # Empty scalars are allowed even if a tag or an anchor is
+ # specified.
+ event = ScalarEvent(anchor, tag, (implicit, False), '',
+ start_mark, end_mark)
+ self.state = self.states.pop()
+ else:
+ if block:
+ node = 'block'
+ else:
+ node = 'flow'
+ token = self.peek_token()
+ raise ParserError("while parsing a %s node" % node, start_mark,
+ "expected the node content, but found %r" % token.id,
+ token.start_mark)
+ return event
+
+ # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
+
+ def parse_block_sequence_first_entry(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_block_sequence_entry()
+
+ def parse_block_sequence_entry(self):
+ if self.check_token(BlockEntryToken):
+ token = self.get_token()
+ if not self.check_token(BlockEntryToken, BlockEndToken):
+ self.states.append(self.parse_block_sequence_entry)
+ return self.parse_block_node()
+ else:
+ self.state = self.parse_block_sequence_entry
+ return self.process_empty_scalar(token.end_mark)
+ if not self.check_token(BlockEndToken):
+ token = self.peek_token()
+ raise ParserError("while parsing a block collection", self.marks[-1],
+ "expected <block end>, but found %r" % token.id, token.start_mark)
+ token = self.get_token()
+ event = SequenceEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ # indentless_sequence ::= (BLOCK-ENTRY block_node?)+
+
+ def parse_indentless_sequence_entry(self):
+ if self.check_token(BlockEntryToken):
+ token = self.get_token()
+ if not self.check_token(BlockEntryToken,
+ KeyToken, ValueToken, BlockEndToken):
+ self.states.append(self.parse_indentless_sequence_entry)
+ return self.parse_block_node()
+ else:
+ self.state = self.parse_indentless_sequence_entry
+ return self.process_empty_scalar(token.end_mark)
+ token = self.peek_token()
+ event = SequenceEndEvent(token.start_mark, token.start_mark)
+ self.state = self.states.pop()
+ return event
+
+ # block_mapping ::= BLOCK-MAPPING_START
+ # ((KEY block_node_or_indentless_sequence?)?
+ # (VALUE block_node_or_indentless_sequence?)?)*
+ # BLOCK-END
+
+ def parse_block_mapping_first_key(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_block_mapping_key()
+
+ def parse_block_mapping_key(self):
+ if self.check_token(KeyToken):
+ token = self.get_token()
+ if not self.check_token(KeyToken, ValueToken, BlockEndToken):
+ self.states.append(self.parse_block_mapping_value)
+ return self.parse_block_node_or_indentless_sequence()
+ else:
+ self.state = self.parse_block_mapping_value
+ return self.process_empty_scalar(token.end_mark)
+ if not self.check_token(BlockEndToken):
+ token = self.peek_token()
+ raise ParserError("while parsing a block mapping", self.marks[-1],
+ "expected <block end>, but found %r" % token.id, token.start_mark)
+ token = self.get_token()
+ event = MappingEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ def parse_block_mapping_value(self):
+ if self.check_token(ValueToken):
+ token = self.get_token()
+ if not self.check_token(KeyToken, ValueToken, BlockEndToken):
+ self.states.append(self.parse_block_mapping_key)
+ return self.parse_block_node_or_indentless_sequence()
+ else:
+ self.state = self.parse_block_mapping_key
+ return self.process_empty_scalar(token.end_mark)
+ else:
+ self.state = self.parse_block_mapping_key
+ token = self.peek_token()
+ return self.process_empty_scalar(token.start_mark)
+
+ # flow_sequence ::= FLOW-SEQUENCE-START
+ # (flow_sequence_entry FLOW-ENTRY)*
+ # flow_sequence_entry?
+ # FLOW-SEQUENCE-END
+ # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+ #
+ # Note that while production rules for both flow_sequence_entry and
+ # flow_mapping_entry are equal, their interpretations are different.
+ # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
+ # generate an inline mapping (set syntax).
+
+ def parse_flow_sequence_first_entry(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_flow_sequence_entry(first=True)
+
+ def parse_flow_sequence_entry(self, first=False):
+ if not self.check_token(FlowSequenceEndToken):
+ if not first:
+ if self.check_token(FlowEntryToken):
+ self.get_token()
+ else:
+ token = self.peek_token()
+ raise ParserError("while parsing a flow sequence", self.marks[-1],
+ "expected ',' or ']', but got %r" % token.id, token.start_mark)
+
+ if self.check_token(KeyToken):
+ token = self.peek_token()
+ event = MappingStartEvent(None, None, True,
+ token.start_mark, token.end_mark,
+ flow_style=True)
+ self.state = self.parse_flow_sequence_entry_mapping_key
+ return event
+ elif not self.check_token(FlowSequenceEndToken):
+ self.states.append(self.parse_flow_sequence_entry)
+ return self.parse_flow_node()
+ token = self.get_token()
+ event = SequenceEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ def parse_flow_sequence_entry_mapping_key(self):
+ token = self.get_token()
+ if not self.check_token(ValueToken,
+ FlowEntryToken, FlowSequenceEndToken):
+ self.states.append(self.parse_flow_sequence_entry_mapping_value)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_sequence_entry_mapping_value
+ return self.process_empty_scalar(token.end_mark)
+
+ def parse_flow_sequence_entry_mapping_value(self):
+ if self.check_token(ValueToken):
+ token = self.get_token()
+ if not self.check_token(FlowEntryToken, FlowSequenceEndToken):
+ self.states.append(self.parse_flow_sequence_entry_mapping_end)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_sequence_entry_mapping_end
+ return self.process_empty_scalar(token.end_mark)
+ else:
+ self.state = self.parse_flow_sequence_entry_mapping_end
+ token = self.peek_token()
+ return self.process_empty_scalar(token.start_mark)
+
+ def parse_flow_sequence_entry_mapping_end(self):
+ self.state = self.parse_flow_sequence_entry
+ token = self.peek_token()
+ return MappingEndEvent(token.start_mark, token.start_mark)
+
+ # flow_mapping ::= FLOW-MAPPING-START
+ # (flow_mapping_entry FLOW-ENTRY)*
+ # flow_mapping_entry?
+ # FLOW-MAPPING-END
+ # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+
+ def parse_flow_mapping_first_key(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_flow_mapping_key(first=True)
+
+ def parse_flow_mapping_key(self, first=False):
+ if not self.check_token(FlowMappingEndToken):
+ if not first:
+ if self.check_token(FlowEntryToken):
+ self.get_token()
+ else:
+ token = self.peek_token()
+ raise ParserError("while parsing a flow mapping", self.marks[-1],
+ "expected ',' or '}', but got %r" % token.id, token.start_mark)
+ if self.check_token(KeyToken):
+ token = self.get_token()
+ if not self.check_token(ValueToken,
+ FlowEntryToken, FlowMappingEndToken):
+ self.states.append(self.parse_flow_mapping_value)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_mapping_value
+ return self.process_empty_scalar(token.end_mark)
+ elif not self.check_token(FlowMappingEndToken):
+ self.states.append(self.parse_flow_mapping_empty_value)
+ return self.parse_flow_node()
+ token = self.get_token()
+ event = MappingEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ def parse_flow_mapping_value(self):
+ if self.check_token(ValueToken):
+ token = self.get_token()
+ if not self.check_token(FlowEntryToken, FlowMappingEndToken):
+ self.states.append(self.parse_flow_mapping_key)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_mapping_key
+ return self.process_empty_scalar(token.end_mark)
+ else:
+ self.state = self.parse_flow_mapping_key
+ token = self.peek_token()
+ return self.process_empty_scalar(token.start_mark)
+
+ def parse_flow_mapping_empty_value(self):
+ self.state = self.parse_flow_mapping_key
+ return self.process_empty_scalar(self.peek_token().start_mark)
+
+ def process_empty_scalar(self, mark):
+ return ScalarEvent(None, None, (True, False), '', mark, mark)
+
diff --git a/third_party/python/PyYAML/lib3/yaml/reader.py b/third_party/python/PyYAML/lib3/yaml/reader.py
new file mode 100644
index 0000000000..774b0219b5
--- /dev/null
+++ b/third_party/python/PyYAML/lib3/yaml/reader.py
@@ -0,0 +1,185 @@
+# This module contains abstractions for the input stream. You don't have to
+# looks further, there are no pretty code.
+#
+# We define two classes here.
+#
+# Mark(source, line, column)
+# It's just a record and its only use is producing nice error messages.
+# Parser does not use it for any other purposes.
+#
+# Reader(source, data)
+# Reader determines the encoding of `data` and converts it to unicode.
+# Reader provides the following methods and attributes:
+# reader.peek(length=1) - return the next `length` characters
+# reader.forward(length=1) - move the current position to `length` characters.
+# reader.index - the number of the current character.
+# reader.line, stream.column - the line and the column of the current character.
+
+__all__ = ['Reader', 'ReaderError']
+
+from .error import YAMLError, Mark
+
+import codecs, re
+
+class ReaderError(YAMLError):
+
+ def __init__(self, name, position, character, encoding, reason):
+ self.name = name
+ self.character = character
+ self.position = position
+ self.encoding = encoding
+ self.reason = reason
+
+ def __str__(self):
+ if isinstance(self.character, bytes):
+ return "'%s' codec can't decode byte #x%02x: %s\n" \
+ " in \"%s\", position %d" \
+ % (self.encoding, ord(self.character), self.reason,
+ self.name, self.position)
+ else:
+ return "unacceptable character #x%04x: %s\n" \
+ " in \"%s\", position %d" \
+ % (self.character, self.reason,
+ self.name, self.position)
+
+class Reader(object):
+ # Reader:
+ # - determines the data encoding and converts it to a unicode string,
+ # - checks if characters are in allowed range,
+ # - adds '\0' to the end.
+
+ # Reader accepts
+ # - a `bytes` object,
+ # - a `str` object,
+ # - a file-like object with its `read` method returning `str`,
+ # - a file-like object with its `read` method returning `unicode`.
+
+ # Yeah, it's ugly and slow.
+
+ def __init__(self, stream):
+ self.name = None
+ self.stream = None
+ self.stream_pointer = 0
+ self.eof = True
+ self.buffer = ''
+ self.pointer = 0
+ self.raw_buffer = None
+ self.raw_decode = None
+ self.encoding = None
+ self.index = 0
+ self.line = 0
+ self.column = 0
+ if isinstance(stream, str):
+ self.name = "<unicode string>"
+ self.check_printable(stream)
+ self.buffer = stream+'\0'
+ elif isinstance(stream, bytes):
+ self.name = "<byte string>"
+ self.raw_buffer = stream
+ self.determine_encoding()
+ else:
+ self.stream = stream
+ self.name = getattr(stream, 'name', "<file>")
+ self.eof = False
+ self.raw_buffer = None
+ self.determine_encoding()
+
+ def peek(self, index=0):
+ try:
+ return self.buffer[self.pointer+index]
+ except IndexError:
+ self.update(index+1)
+ return self.buffer[self.pointer+index]
+
+ def prefix(self, length=1):
+ if self.pointer+length >= len(self.buffer):
+ self.update(length)
+ return self.buffer[self.pointer:self.pointer+length]
+
+ def forward(self, length=1):
+ if self.pointer+length+1 >= len(self.buffer):
+ self.update(length+1)
+ while length:
+ ch = self.buffer[self.pointer]
+ self.pointer += 1
+ self.index += 1
+ if ch in '\n\x85\u2028\u2029' \
+ or (ch == '\r' and self.buffer[self.pointer] != '\n'):
+ self.line += 1
+ self.column = 0
+ elif ch != '\uFEFF':
+ self.column += 1
+ length -= 1
+
+ def get_mark(self):
+ if self.stream is None:
+ return Mark(self.name, self.index, self.line, self.column,
+ self.buffer, self.pointer)
+ else:
+ return Mark(self.name, self.index, self.line, self.column,
+ None, None)
+
+ def determine_encoding(self):
+ while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2):
+ self.update_raw()
+ if isinstance(self.raw_buffer, bytes):
+ if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
+ self.raw_decode = codecs.utf_16_le_decode
+ self.encoding = 'utf-16-le'
+ elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
+ self.raw_decode = codecs.utf_16_be_decode
+ self.encoding = 'utf-16-be'
+ else:
+ self.raw_decode = codecs.utf_8_decode
+ self.encoding = 'utf-8'
+ self.update(1)
+
+ NON_PRINTABLE = re.compile('[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD\U00010000-\U0010ffff]')
+ def check_printable(self, data):
+ match = self.NON_PRINTABLE.search(data)
+ if match:
+ character = match.group()
+ position = self.index+(len(self.buffer)-self.pointer)+match.start()
+ raise ReaderError(self.name, position, ord(character),
+ 'unicode', "special characters are not allowed")
+
+ def update(self, length):
+ if self.raw_buffer is None:
+ return
+ self.buffer = self.buffer[self.pointer:]
+ self.pointer = 0
+ while len(self.buffer) < length:
+ if not self.eof:
+ self.update_raw()
+ if self.raw_decode is not None:
+ try:
+ data, converted = self.raw_decode(self.raw_buffer,
+ 'strict', self.eof)
+ except UnicodeDecodeError as exc:
+ character = self.raw_buffer[exc.start]
+ if self.stream is not None:
+ position = self.stream_pointer-len(self.raw_buffer)+exc.start
+ else:
+ position = exc.start
+ raise ReaderError(self.name, position, character,
+ exc.encoding, exc.reason)
+ else:
+ data = self.raw_buffer
+ converted = len(data)
+ self.check_printable(data)
+ self.buffer += data
+ self.raw_buffer = self.raw_buffer[converted:]
+ if self.eof:
+ self.buffer += '\0'
+ self.raw_buffer = None
+ break
+
+ def update_raw(self, size=4096):
+ data = self.stream.read(size)
+ if self.raw_buffer is None:
+ self.raw_buffer = data
+ else:
+ self.raw_buffer += data
+ self.stream_pointer += len(data)
+ if not data:
+ self.eof = True
diff --git a/third_party/python/PyYAML/lib3/yaml/representer.py b/third_party/python/PyYAML/lib3/yaml/representer.py
new file mode 100644
index 0000000000..3b0b192ef3
--- /dev/null
+++ b/third_party/python/PyYAML/lib3/yaml/representer.py
@@ -0,0 +1,389 @@
+
+__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer',
+ 'RepresenterError']
+
+from .error import *
+from .nodes import *
+
+import datetime, copyreg, types, base64, collections
+
+class RepresenterError(YAMLError):
+ pass
+
+class BaseRepresenter:
+
+ yaml_representers = {}
+ yaml_multi_representers = {}
+
+ def __init__(self, default_style=None, default_flow_style=False, sort_keys=True):
+ self.default_style = default_style
+ self.sort_keys = sort_keys
+ self.default_flow_style = default_flow_style
+ self.represented_objects = {}
+ self.object_keeper = []
+ self.alias_key = None
+
+ def represent(self, data):
+ node = self.represent_data(data)
+ self.serialize(node)
+ self.represented_objects = {}
+ self.object_keeper = []
+ self.alias_key = None
+
+ def represent_data(self, data):
+ if self.ignore_aliases(data):
+ self.alias_key = None
+ else:
+ self.alias_key = id(data)
+ if self.alias_key is not None:
+ if self.alias_key in self.represented_objects:
+ node = self.represented_objects[self.alias_key]
+ #if node is None:
+ # raise RepresenterError("recursive objects are not allowed: %r" % data)
+ return node
+ #self.represented_objects[alias_key] = None
+ self.object_keeper.append(data)
+ data_types = type(data).__mro__
+ if data_types[0] in self.yaml_representers:
+ node = self.yaml_representers[data_types[0]](self, data)
+ else:
+ for data_type in data_types:
+ if data_type in self.yaml_multi_representers:
+ node = self.yaml_multi_representers[data_type](self, data)
+ break
+ else:
+ if None in self.yaml_multi_representers:
+ node = self.yaml_multi_representers[None](self, data)
+ elif None in self.yaml_representers:
+ node = self.yaml_representers[None](self, data)
+ else:
+ node = ScalarNode(None, str(data))
+ #if alias_key is not None:
+ # self.represented_objects[alias_key] = node
+ return node
+
+ @classmethod
+ def add_representer(cls, data_type, representer):
+ if not 'yaml_representers' in cls.__dict__:
+ cls.yaml_representers = cls.yaml_representers.copy()
+ cls.yaml_representers[data_type] = representer
+
+ @classmethod
+ def add_multi_representer(cls, data_type, representer):
+ if not 'yaml_multi_representers' in cls.__dict__:
+ cls.yaml_multi_representers = cls.yaml_multi_representers.copy()
+ cls.yaml_multi_representers[data_type] = representer
+
+ def represent_scalar(self, tag, value, style=None):
+ if style is None:
+ style = self.default_style
+ node = ScalarNode(tag, value, style=style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ return node
+
+ def represent_sequence(self, tag, sequence, flow_style=None):
+ value = []
+ node = SequenceNode(tag, value, flow_style=flow_style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ best_style = True
+ for item in sequence:
+ node_item = self.represent_data(item)
+ if not (isinstance(node_item, ScalarNode) and not node_item.style):
+ best_style = False
+ value.append(node_item)
+ if flow_style is None:
+ if self.default_flow_style is not None:
+ node.flow_style = self.default_flow_style
+ else:
+ node.flow_style = best_style
+ return node
+
+ def represent_mapping(self, tag, mapping, flow_style=None):
+ value = []
+ node = MappingNode(tag, value, flow_style=flow_style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ best_style = True
+ if hasattr(mapping, 'items'):
+ mapping = list(mapping.items())
+ if self.sort_keys:
+ try:
+ mapping = sorted(mapping)
+ except TypeError:
+ pass
+ for item_key, item_value in mapping:
+ node_key = self.represent_data(item_key)
+ node_value = self.represent_data(item_value)
+ if not (isinstance(node_key, ScalarNode) and not node_key.style):
+ best_style = False
+ if not (isinstance(node_value, ScalarNode) and not node_value.style):
+ best_style = False
+ value.append((node_key, node_value))
+ if flow_style is None:
+ if self.default_flow_style is not None:
+ node.flow_style = self.default_flow_style
+ else:
+ node.flow_style = best_style
+ return node
+
+ def ignore_aliases(self, data):
+ return False
+
+class SafeRepresenter(BaseRepresenter):
+
+ def ignore_aliases(self, data):
+ if data is None:
+ return True
+ if isinstance(data, tuple) and data == ():
+ return True
+ if isinstance(data, (str, bytes, bool, int, float)):
+ return True
+
+ def represent_none(self, data):
+ return self.represent_scalar('tag:yaml.org,2002:null', 'null')
+
+ def represent_str(self, data):
+ return self.represent_scalar('tag:yaml.org,2002:str', data)
+
+ def represent_binary(self, data):
+ if hasattr(base64, 'encodebytes'):
+ data = base64.encodebytes(data).decode('ascii')
+ else:
+ data = base64.encodestring(data).decode('ascii')
+ return self.represent_scalar('tag:yaml.org,2002:binary', data, style='|')
+
+ def represent_bool(self, data):
+ if data:
+ value = 'true'
+ else:
+ value = 'false'
+ return self.represent_scalar('tag:yaml.org,2002:bool', value)
+
+ def represent_int(self, data):
+ return self.represent_scalar('tag:yaml.org,2002:int', str(data))
+
+ inf_value = 1e300
+ while repr(inf_value) != repr(inf_value*inf_value):
+ inf_value *= inf_value
+
+ def represent_float(self, data):
+ if data != data or (data == 0.0 and data == 1.0):
+ value = '.nan'
+ elif data == self.inf_value:
+ value = '.inf'
+ elif data == -self.inf_value:
+ value = '-.inf'
+ else:
+ value = repr(data).lower()
+ # Note that in some cases `repr(data)` represents a float number
+ # without the decimal parts. For instance:
+ # >>> repr(1e17)
+ # '1e17'
+ # Unfortunately, this is not a valid float representation according
+ # to the definition of the `!!float` tag. We fix this by adding
+ # '.0' before the 'e' symbol.
+ if '.' not in value and 'e' in value:
+ value = value.replace('e', '.0e', 1)
+ return self.represent_scalar('tag:yaml.org,2002:float', value)
+
+ def represent_list(self, data):
+ #pairs = (len(data) > 0 and isinstance(data, list))
+ #if pairs:
+ # for item in data:
+ # if not isinstance(item, tuple) or len(item) != 2:
+ # pairs = False
+ # break
+ #if not pairs:
+ return self.represent_sequence('tag:yaml.org,2002:seq', data)
+ #value = []
+ #for item_key, item_value in data:
+ # value.append(self.represent_mapping(u'tag:yaml.org,2002:map',
+ # [(item_key, item_value)]))
+ #return SequenceNode(u'tag:yaml.org,2002:pairs', value)
+
+ def represent_dict(self, data):
+ return self.represent_mapping('tag:yaml.org,2002:map', data)
+
+ def represent_set(self, data):
+ value = {}
+ for key in data:
+ value[key] = None
+ return self.represent_mapping('tag:yaml.org,2002:set', value)
+
+ def represent_date(self, data):
+ value = data.isoformat()
+ return self.represent_scalar('tag:yaml.org,2002:timestamp', value)
+
+ def represent_datetime(self, data):
+ value = data.isoformat(' ')
+ return self.represent_scalar('tag:yaml.org,2002:timestamp', value)
+
+ def represent_yaml_object(self, tag, data, cls, flow_style=None):
+ if hasattr(data, '__getstate__'):
+ state = data.__getstate__()
+ else:
+ state = data.__dict__.copy()
+ return self.represent_mapping(tag, state, flow_style=flow_style)
+
+ def represent_undefined(self, data):
+ raise RepresenterError("cannot represent an object", data)
+
+SafeRepresenter.add_representer(type(None),
+ SafeRepresenter.represent_none)
+
+SafeRepresenter.add_representer(str,
+ SafeRepresenter.represent_str)
+
+SafeRepresenter.add_representer(bytes,
+ SafeRepresenter.represent_binary)
+
+SafeRepresenter.add_representer(bool,
+ SafeRepresenter.represent_bool)
+
+SafeRepresenter.add_representer(int,
+ SafeRepresenter.represent_int)
+
+SafeRepresenter.add_representer(float,
+ SafeRepresenter.represent_float)
+
+SafeRepresenter.add_representer(list,
+ SafeRepresenter.represent_list)
+
+SafeRepresenter.add_representer(tuple,
+ SafeRepresenter.represent_list)
+
+SafeRepresenter.add_representer(dict,
+ SafeRepresenter.represent_dict)
+
+SafeRepresenter.add_representer(set,
+ SafeRepresenter.represent_set)
+
+SafeRepresenter.add_representer(datetime.date,
+ SafeRepresenter.represent_date)
+
+SafeRepresenter.add_representer(datetime.datetime,
+ SafeRepresenter.represent_datetime)
+
+SafeRepresenter.add_representer(None,
+ SafeRepresenter.represent_undefined)
+
+class Representer(SafeRepresenter):
+
+ def represent_complex(self, data):
+ if data.imag == 0.0:
+ data = '%r' % data.real
+ elif data.real == 0.0:
+ data = '%rj' % data.imag
+ elif data.imag > 0:
+ data = '%r+%rj' % (data.real, data.imag)
+ else:
+ data = '%r%rj' % (data.real, data.imag)
+ return self.represent_scalar('tag:yaml.org,2002:python/complex', data)
+
+ def represent_tuple(self, data):
+ return self.represent_sequence('tag:yaml.org,2002:python/tuple', data)
+
+ def represent_name(self, data):
+ name = '%s.%s' % (data.__module__, data.__name__)
+ return self.represent_scalar('tag:yaml.org,2002:python/name:'+name, '')
+
+ def represent_module(self, data):
+ return self.represent_scalar(
+ 'tag:yaml.org,2002:python/module:'+data.__name__, '')
+
+ def represent_object(self, data):
+ # We use __reduce__ API to save the data. data.__reduce__ returns
+ # a tuple of length 2-5:
+ # (function, args, state, listitems, dictitems)
+
+ # For reconstructing, we calls function(*args), then set its state,
+ # listitems, and dictitems if they are not None.
+
+ # A special case is when function.__name__ == '__newobj__'. In this
+ # case we create the object with args[0].__new__(*args).
+
+ # Another special case is when __reduce__ returns a string - we don't
+ # support it.
+
+ # We produce a !!python/object, !!python/object/new or
+ # !!python/object/apply node.
+
+ cls = type(data)
+ if cls in copyreg.dispatch_table:
+ reduce = copyreg.dispatch_table[cls](data)
+ elif hasattr(data, '__reduce_ex__'):
+ reduce = data.__reduce_ex__(2)
+ elif hasattr(data, '__reduce__'):
+ reduce = data.__reduce__()
+ else:
+ raise RepresenterError("cannot represent an object", data)
+ reduce = (list(reduce)+[None]*5)[:5]
+ function, args, state, listitems, dictitems = reduce
+ args = list(args)
+ if state is None:
+ state = {}
+ if listitems is not None:
+ listitems = list(listitems)
+ if dictitems is not None:
+ dictitems = dict(dictitems)
+ if function.__name__ == '__newobj__':
+ function = args[0]
+ args = args[1:]
+ tag = 'tag:yaml.org,2002:python/object/new:'
+ newobj = True
+ else:
+ tag = 'tag:yaml.org,2002:python/object/apply:'
+ newobj = False
+ function_name = '%s.%s' % (function.__module__, function.__name__)
+ if not args and not listitems and not dictitems \
+ and isinstance(state, dict) and newobj:
+ return self.represent_mapping(
+ 'tag:yaml.org,2002:python/object:'+function_name, state)
+ if not listitems and not dictitems \
+ and isinstance(state, dict) and not state:
+ return self.represent_sequence(tag+function_name, args)
+ value = {}
+ if args:
+ value['args'] = args
+ if state or not isinstance(state, dict):
+ value['state'] = state
+ if listitems:
+ value['listitems'] = listitems
+ if dictitems:
+ value['dictitems'] = dictitems
+ return self.represent_mapping(tag+function_name, value)
+
+ def represent_ordered_dict(self, data):
+ # Provide uniform representation across different Python versions.
+ data_type = type(data)
+ tag = 'tag:yaml.org,2002:python/object/apply:%s.%s' \
+ % (data_type.__module__, data_type.__name__)
+ items = [[key, value] for key, value in data.items()]
+ return self.represent_sequence(tag, [items])
+
+Representer.add_representer(complex,
+ Representer.represent_complex)
+
+Representer.add_representer(tuple,
+ Representer.represent_tuple)
+
+Representer.add_representer(type,
+ Representer.represent_name)
+
+Representer.add_representer(collections.OrderedDict,
+ Representer.represent_ordered_dict)
+
+Representer.add_representer(types.FunctionType,
+ Representer.represent_name)
+
+Representer.add_representer(types.BuiltinFunctionType,
+ Representer.represent_name)
+
+Representer.add_representer(types.ModuleType,
+ Representer.represent_module)
+
+Representer.add_multi_representer(object,
+ Representer.represent_object)
+
diff --git a/third_party/python/PyYAML/lib3/yaml/resolver.py b/third_party/python/PyYAML/lib3/yaml/resolver.py
new file mode 100644
index 0000000000..02b82e73ee
--- /dev/null
+++ b/third_party/python/PyYAML/lib3/yaml/resolver.py
@@ -0,0 +1,227 @@
+
+__all__ = ['BaseResolver', 'Resolver']
+
+from .error import *
+from .nodes import *
+
+import re
+
+class ResolverError(YAMLError):
+ pass
+
+class BaseResolver:
+
+ DEFAULT_SCALAR_TAG = 'tag:yaml.org,2002:str'
+ DEFAULT_SEQUENCE_TAG = 'tag:yaml.org,2002:seq'
+ DEFAULT_MAPPING_TAG = 'tag:yaml.org,2002:map'
+
+ yaml_implicit_resolvers = {}
+ yaml_path_resolvers = {}
+
+ def __init__(self):
+ self.resolver_exact_paths = []
+ self.resolver_prefix_paths = []
+
+ @classmethod
+ def add_implicit_resolver(cls, tag, regexp, first):
+ if not 'yaml_implicit_resolvers' in cls.__dict__:
+ implicit_resolvers = {}
+ for key in cls.yaml_implicit_resolvers:
+ implicit_resolvers[key] = cls.yaml_implicit_resolvers[key][:]
+ cls.yaml_implicit_resolvers = implicit_resolvers
+ if first is None:
+ first = [None]
+ for ch in first:
+ cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
+
+ @classmethod
+ def add_path_resolver(cls, tag, path, kind=None):
+ # Note: `add_path_resolver` is experimental. The API could be changed.
+ # `new_path` is a pattern that is matched against the path from the
+ # root to the node that is being considered. `node_path` elements are
+ # tuples `(node_check, index_check)`. `node_check` is a node class:
+ # `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None`
+ # matches any kind of a node. `index_check` could be `None`, a boolean
+ # value, a string value, or a number. `None` and `False` match against
+ # any _value_ of sequence and mapping nodes. `True` matches against
+ # any _key_ of a mapping node. A string `index_check` matches against
+ # a mapping value that corresponds to a scalar key which content is
+ # equal to the `index_check` value. An integer `index_check` matches
+ # against a sequence value with the index equal to `index_check`.
+ if not 'yaml_path_resolvers' in cls.__dict__:
+ cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
+ new_path = []
+ for element in path:
+ if isinstance(element, (list, tuple)):
+ if len(element) == 2:
+ node_check, index_check = element
+ elif len(element) == 1:
+ node_check = element[0]
+ index_check = True
+ else:
+ raise ResolverError("Invalid path element: %s" % element)
+ else:
+ node_check = None
+ index_check = element
+ if node_check is str:
+ node_check = ScalarNode
+ elif node_check is list:
+ node_check = SequenceNode
+ elif node_check is dict:
+ node_check = MappingNode
+ elif node_check not in [ScalarNode, SequenceNode, MappingNode] \
+ and not isinstance(node_check, str) \
+ and node_check is not None:
+ raise ResolverError("Invalid node checker: %s" % node_check)
+ if not isinstance(index_check, (str, int)) \
+ and index_check is not None:
+ raise ResolverError("Invalid index checker: %s" % index_check)
+ new_path.append((node_check, index_check))
+ if kind is str:
+ kind = ScalarNode
+ elif kind is list:
+ kind = SequenceNode
+ elif kind is dict:
+ kind = MappingNode
+ elif kind not in [ScalarNode, SequenceNode, MappingNode] \
+ and kind is not None:
+ raise ResolverError("Invalid node kind: %s" % kind)
+ cls.yaml_path_resolvers[tuple(new_path), kind] = tag
+
+ def descend_resolver(self, current_node, current_index):
+ if not self.yaml_path_resolvers:
+ return
+ exact_paths = {}
+ prefix_paths = []
+ if current_node:
+ depth = len(self.resolver_prefix_paths)
+ for path, kind in self.resolver_prefix_paths[-1]:
+ if self.check_resolver_prefix(depth, path, kind,
+ current_node, current_index):
+ if len(path) > depth:
+ prefix_paths.append((path, kind))
+ else:
+ exact_paths[kind] = self.yaml_path_resolvers[path, kind]
+ else:
+ for path, kind in self.yaml_path_resolvers:
+ if not path:
+ exact_paths[kind] = self.yaml_path_resolvers[path, kind]
+ else:
+ prefix_paths.append((path, kind))
+ self.resolver_exact_paths.append(exact_paths)
+ self.resolver_prefix_paths.append(prefix_paths)
+
+ def ascend_resolver(self):
+ if not self.yaml_path_resolvers:
+ return
+ self.resolver_exact_paths.pop()
+ self.resolver_prefix_paths.pop()
+
+ def check_resolver_prefix(self, depth, path, kind,
+ current_node, current_index):
+ node_check, index_check = path[depth-1]
+ if isinstance(node_check, str):
+ if current_node.tag != node_check:
+ return
+ elif node_check is not None:
+ if not isinstance(current_node, node_check):
+ return
+ if index_check is True and current_index is not None:
+ return
+ if (index_check is False or index_check is None) \
+ and current_index is None:
+ return
+ if isinstance(index_check, str):
+ if not (isinstance(current_index, ScalarNode)
+ and index_check == current_index.value):
+ return
+ elif isinstance(index_check, int) and not isinstance(index_check, bool):
+ if index_check != current_index:
+ return
+ return True
+
+ def resolve(self, kind, value, implicit):
+ if kind is ScalarNode and implicit[0]:
+ if value == '':
+ resolvers = self.yaml_implicit_resolvers.get('', [])
+ else:
+ resolvers = self.yaml_implicit_resolvers.get(value[0], [])
+ resolvers += self.yaml_implicit_resolvers.get(None, [])
+ for tag, regexp in resolvers:
+ if regexp.match(value):
+ return tag
+ implicit = implicit[1]
+ if self.yaml_path_resolvers:
+ exact_paths = self.resolver_exact_paths[-1]
+ if kind in exact_paths:
+ return exact_paths[kind]
+ if None in exact_paths:
+ return exact_paths[None]
+ if kind is ScalarNode:
+ return self.DEFAULT_SCALAR_TAG
+ elif kind is SequenceNode:
+ return self.DEFAULT_SEQUENCE_TAG
+ elif kind is MappingNode:
+ return self.DEFAULT_MAPPING_TAG
+
+class Resolver(BaseResolver):
+ pass
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:bool',
+ re.compile(r'''^(?:yes|Yes|YES|no|No|NO
+ |true|True|TRUE|false|False|FALSE
+ |on|On|ON|off|Off|OFF)$''', re.X),
+ list('yYnNtTfFoO'))
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:float',
+ re.compile(r'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)?
+ |\.[0-9_]+(?:[eE][-+][0-9]+)?
+ |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*
+ |[-+]?\.(?:inf|Inf|INF)
+ |\.(?:nan|NaN|NAN))$''', re.X),
+ list('-+0123456789.'))
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:int',
+ re.compile(r'''^(?:[-+]?0b[0-1_]+
+ |[-+]?0[0-7_]+
+ |[-+]?(?:0|[1-9][0-9_]*)
+ |[-+]?0x[0-9a-fA-F_]+
+ |[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X),
+ list('-+0123456789'))
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:merge',
+ re.compile(r'^(?:<<)$'),
+ ['<'])
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:null',
+ re.compile(r'''^(?: ~
+ |null|Null|NULL
+ | )$''', re.X),
+ ['~', 'n', 'N', ''])
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:timestamp',
+ re.compile(r'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
+ |[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
+ (?:[Tt]|[ \t]+)[0-9][0-9]?
+ :[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)?
+ (?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
+ list('0123456789'))
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:value',
+ re.compile(r'^(?:=)$'),
+ ['='])
+
+# The following resolver is only for documentation purposes. It cannot work
+# because plain scalars cannot start with '!', '&', or '*'.
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:yaml',
+ re.compile(r'^(?:!|&|\*)$'),
+ list('!&*'))
+
diff --git a/third_party/python/PyYAML/lib3/yaml/scanner.py b/third_party/python/PyYAML/lib3/yaml/scanner.py
new file mode 100644
index 0000000000..7437ede1c6
--- /dev/null
+++ b/third_party/python/PyYAML/lib3/yaml/scanner.py
@@ -0,0 +1,1435 @@
+
+# Scanner produces tokens of the following types:
+# STREAM-START
+# STREAM-END
+# DIRECTIVE(name, value)
+# DOCUMENT-START
+# DOCUMENT-END
+# BLOCK-SEQUENCE-START
+# BLOCK-MAPPING-START
+# BLOCK-END
+# FLOW-SEQUENCE-START
+# FLOW-MAPPING-START
+# FLOW-SEQUENCE-END
+# FLOW-MAPPING-END
+# BLOCK-ENTRY
+# FLOW-ENTRY
+# KEY
+# VALUE
+# ALIAS(value)
+# ANCHOR(value)
+# TAG(value)
+# SCALAR(value, plain, style)
+#
+# Read comments in the Scanner code for more details.
+#
+
+__all__ = ['Scanner', 'ScannerError']
+
+from .error import MarkedYAMLError
+from .tokens import *
+
+class ScannerError(MarkedYAMLError):
+ pass
+
+class SimpleKey:
+ # See below simple keys treatment.
+
+ def __init__(self, token_number, required, index, line, column, mark):
+ self.token_number = token_number
+ self.required = required
+ self.index = index
+ self.line = line
+ self.column = column
+ self.mark = mark
+
+class Scanner:
+
+ def __init__(self):
+ """Initialize the scanner."""
+ # It is assumed that Scanner and Reader will have a common descendant.
+ # Reader do the dirty work of checking for BOM and converting the
+ # input data to Unicode. It also adds NUL to the end.
+ #
+ # Reader supports the following methods
+ # self.peek(i=0) # peek the next i-th character
+ # self.prefix(l=1) # peek the next l characters
+ # self.forward(l=1) # read the next l characters and move the pointer.
+
+ # Had we reached the end of the stream?
+ self.done = False
+
+ # The number of unclosed '{' and '['. `flow_level == 0` means block
+ # context.
+ self.flow_level = 0
+
+ # List of processed tokens that are not yet emitted.
+ self.tokens = []
+
+ # Add the STREAM-START token.
+ self.fetch_stream_start()
+
+ # Number of tokens that were emitted through the `get_token` method.
+ self.tokens_taken = 0
+
+ # The current indentation level.
+ self.indent = -1
+
+ # Past indentation levels.
+ self.indents = []
+
+ # Variables related to simple keys treatment.
+
+ # A simple key is a key that is not denoted by the '?' indicator.
+ # Example of simple keys:
+ # ---
+ # block simple key: value
+ # ? not a simple key:
+ # : { flow simple key: value }
+ # We emit the KEY token before all keys, so when we find a potential
+ # simple key, we try to locate the corresponding ':' indicator.
+ # Simple keys should be limited to a single line and 1024 characters.
+
+ # Can a simple key start at the current position? A simple key may
+ # start:
+ # - at the beginning of the line, not counting indentation spaces
+ # (in block context),
+ # - after '{', '[', ',' (in the flow context),
+ # - after '?', ':', '-' (in the block context).
+ # In the block context, this flag also signifies if a block collection
+ # may start at the current position.
+ self.allow_simple_key = True
+
+ # Keep track of possible simple keys. This is a dictionary. The key
+ # is `flow_level`; there can be no more that one possible simple key
+ # for each level. The value is a SimpleKey record:
+ # (token_number, required, index, line, column, mark)
+ # A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow),
+ # '[', or '{' tokens.
+ self.possible_simple_keys = {}
+
+ # Public methods.
+
+ def check_token(self, *choices):
+ # Check if the next token is one of the given types.
+ while self.need_more_tokens():
+ self.fetch_more_tokens()
+ if self.tokens:
+ if not choices:
+ return True
+ for choice in choices:
+ if isinstance(self.tokens[0], choice):
+ return True
+ return False
+
+ def peek_token(self):
+ # Return the next token, but do not delete if from the queue.
+ # Return None if no more tokens.
+ while self.need_more_tokens():
+ self.fetch_more_tokens()
+ if self.tokens:
+ return self.tokens[0]
+ else:
+ return None
+
+ def get_token(self):
+ # Return the next token.
+ while self.need_more_tokens():
+ self.fetch_more_tokens()
+ if self.tokens:
+ self.tokens_taken += 1
+ return self.tokens.pop(0)
+
+ # Private methods.
+
+ def need_more_tokens(self):
+ if self.done:
+ return False
+ if not self.tokens:
+ return True
+ # The current token may be a potential simple key, so we
+ # need to look further.
+ self.stale_possible_simple_keys()
+ if self.next_possible_simple_key() == self.tokens_taken:
+ return True
+
+ def fetch_more_tokens(self):
+
+ # Eat whitespaces and comments until we reach the next token.
+ self.scan_to_next_token()
+
+ # Remove obsolete possible simple keys.
+ self.stale_possible_simple_keys()
+
+ # Compare the current indentation and column. It may add some tokens
+ # and decrease the current indentation level.
+ self.unwind_indent(self.column)
+
+ # Peek the next character.
+ ch = self.peek()
+
+ # Is it the end of stream?
+ if ch == '\0':
+ return self.fetch_stream_end()
+
+ # Is it a directive?
+ if ch == '%' and self.check_directive():
+ return self.fetch_directive()
+
+ # Is it the document start?
+ if ch == '-' and self.check_document_start():
+ return self.fetch_document_start()
+
+ # Is it the document end?
+ if ch == '.' and self.check_document_end():
+ return self.fetch_document_end()
+
+ # TODO: support for BOM within a stream.
+ #if ch == '\uFEFF':
+ # return self.fetch_bom() <-- issue BOMToken
+
+ # Note: the order of the following checks is NOT significant.
+
+ # Is it the flow sequence start indicator?
+ if ch == '[':
+ return self.fetch_flow_sequence_start()
+
+ # Is it the flow mapping start indicator?
+ if ch == '{':
+ return self.fetch_flow_mapping_start()
+
+ # Is it the flow sequence end indicator?
+ if ch == ']':
+ return self.fetch_flow_sequence_end()
+
+ # Is it the flow mapping end indicator?
+ if ch == '}':
+ return self.fetch_flow_mapping_end()
+
+ # Is it the flow entry indicator?
+ if ch == ',':
+ return self.fetch_flow_entry()
+
+ # Is it the block entry indicator?
+ if ch == '-' and self.check_block_entry():
+ return self.fetch_block_entry()
+
+ # Is it the key indicator?
+ if ch == '?' and self.check_key():
+ return self.fetch_key()
+
+ # Is it the value indicator?
+ if ch == ':' and self.check_value():
+ return self.fetch_value()
+
+ # Is it an alias?
+ if ch == '*':
+ return self.fetch_alias()
+
+ # Is it an anchor?
+ if ch == '&':
+ return self.fetch_anchor()
+
+ # Is it a tag?
+ if ch == '!':
+ return self.fetch_tag()
+
+ # Is it a literal scalar?
+ if ch == '|' and not self.flow_level:
+ return self.fetch_literal()
+
+ # Is it a folded scalar?
+ if ch == '>' and not self.flow_level:
+ return self.fetch_folded()
+
+ # Is it a single quoted scalar?
+ if ch == '\'':
+ return self.fetch_single()
+
+ # Is it a double quoted scalar?
+ if ch == '\"':
+ return self.fetch_double()
+
+ # It must be a plain scalar then.
+ if self.check_plain():
+ return self.fetch_plain()
+
+ # No? It's an error. Let's produce a nice error message.
+ raise ScannerError("while scanning for the next token", None,
+ "found character %r that cannot start any token" % ch,
+ self.get_mark())
+
+ # Simple keys treatment.
+
+ def next_possible_simple_key(self):
+ # Return the number of the nearest possible simple key. Actually we
+ # don't need to loop through the whole dictionary. We may replace it
+ # with the following code:
+ # if not self.possible_simple_keys:
+ # return None
+ # return self.possible_simple_keys[
+ # min(self.possible_simple_keys.keys())].token_number
+ min_token_number = None
+ for level in self.possible_simple_keys:
+ key = self.possible_simple_keys[level]
+ if min_token_number is None or key.token_number < min_token_number:
+ min_token_number = key.token_number
+ return min_token_number
+
+ def stale_possible_simple_keys(self):
+ # Remove entries that are no longer possible simple keys. According to
+ # the YAML specification, simple keys
+ # - should be limited to a single line,
+ # - should be no longer than 1024 characters.
+ # Disabling this procedure will allow simple keys of any length and
+ # height (may cause problems if indentation is broken though).
+ for level in list(self.possible_simple_keys):
+ key = self.possible_simple_keys[level]
+ if key.line != self.line \
+ or self.index-key.index > 1024:
+ if key.required:
+ raise ScannerError("while scanning a simple key", key.mark,
+ "could not find expected ':'", self.get_mark())
+ del self.possible_simple_keys[level]
+
+ def save_possible_simple_key(self):
+ # The next token may start a simple key. We check if it's possible
+ # and save its position. This function is called for
+ # ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'.
+
+ # Check if a simple key is required at the current position.
+ required = not self.flow_level and self.indent == self.column
+
+ # The next token might be a simple key. Let's save it's number and
+ # position.
+ if self.allow_simple_key:
+ self.remove_possible_simple_key()
+ token_number = self.tokens_taken+len(self.tokens)
+ key = SimpleKey(token_number, required,
+ self.index, self.line, self.column, self.get_mark())
+ self.possible_simple_keys[self.flow_level] = key
+
+ def remove_possible_simple_key(self):
+ # Remove the saved possible key position at the current flow level.
+ if self.flow_level in self.possible_simple_keys:
+ key = self.possible_simple_keys[self.flow_level]
+
+ if key.required:
+ raise ScannerError("while scanning a simple key", key.mark,
+ "could not find expected ':'", self.get_mark())
+
+ del self.possible_simple_keys[self.flow_level]
+
+ # Indentation functions.
+
+ def unwind_indent(self, column):
+
+ ## In flow context, tokens should respect indentation.
+ ## Actually the condition should be `self.indent >= column` according to
+ ## the spec. But this condition will prohibit intuitively correct
+ ## constructions such as
+ ## key : {
+ ## }
+ #if self.flow_level and self.indent > column:
+ # raise ScannerError(None, None,
+ # "invalid indentation or unclosed '[' or '{'",
+ # self.get_mark())
+
+ # In the flow context, indentation is ignored. We make the scanner less
+ # restrictive then specification requires.
+ if self.flow_level:
+ return
+
+ # In block context, we may need to issue the BLOCK-END tokens.
+ while self.indent > column:
+ mark = self.get_mark()
+ self.indent = self.indents.pop()
+ self.tokens.append(BlockEndToken(mark, mark))
+
+ def add_indent(self, column):
+ # Check if we need to increase indentation.
+ if self.indent < column:
+ self.indents.append(self.indent)
+ self.indent = column
+ return True
+ return False
+
+ # Fetchers.
+
+ def fetch_stream_start(self):
+ # We always add STREAM-START as the first token and STREAM-END as the
+ # last token.
+
+ # Read the token.
+ mark = self.get_mark()
+
+ # Add STREAM-START.
+ self.tokens.append(StreamStartToken(mark, mark,
+ encoding=self.encoding))
+
+
+ def fetch_stream_end(self):
+
+ # Set the current indentation to -1.
+ self.unwind_indent(-1)
+
+ # Reset simple keys.
+ self.remove_possible_simple_key()
+ self.allow_simple_key = False
+ self.possible_simple_keys = {}
+
+ # Read the token.
+ mark = self.get_mark()
+
+ # Add STREAM-END.
+ self.tokens.append(StreamEndToken(mark, mark))
+
+ # The steam is finished.
+ self.done = True
+
+ def fetch_directive(self):
+
+ # Set the current indentation to -1.
+ self.unwind_indent(-1)
+
+ # Reset simple keys.
+ self.remove_possible_simple_key()
+ self.allow_simple_key = False
+
+ # Scan and add DIRECTIVE.
+ self.tokens.append(self.scan_directive())
+
+ def fetch_document_start(self):
+ self.fetch_document_indicator(DocumentStartToken)
+
+ def fetch_document_end(self):
+ self.fetch_document_indicator(DocumentEndToken)
+
+ def fetch_document_indicator(self, TokenClass):
+
+ # Set the current indentation to -1.
+ self.unwind_indent(-1)
+
+ # Reset simple keys. Note that there could not be a block collection
+ # after '---'.
+ self.remove_possible_simple_key()
+ self.allow_simple_key = False
+
+ # Add DOCUMENT-START or DOCUMENT-END.
+ start_mark = self.get_mark()
+ self.forward(3)
+ end_mark = self.get_mark()
+ self.tokens.append(TokenClass(start_mark, end_mark))
+
+ def fetch_flow_sequence_start(self):
+ self.fetch_flow_collection_start(FlowSequenceStartToken)
+
+ def fetch_flow_mapping_start(self):
+ self.fetch_flow_collection_start(FlowMappingStartToken)
+
+ def fetch_flow_collection_start(self, TokenClass):
+
+ # '[' and '{' may start a simple key.
+ self.save_possible_simple_key()
+
+ # Increase the flow level.
+ self.flow_level += 1
+
+ # Simple keys are allowed after '[' and '{'.
+ self.allow_simple_key = True
+
+ # Add FLOW-SEQUENCE-START or FLOW-MAPPING-START.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(TokenClass(start_mark, end_mark))
+
+ def fetch_flow_sequence_end(self):
+ self.fetch_flow_collection_end(FlowSequenceEndToken)
+
+ def fetch_flow_mapping_end(self):
+ self.fetch_flow_collection_end(FlowMappingEndToken)
+
+ def fetch_flow_collection_end(self, TokenClass):
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Decrease the flow level.
+ self.flow_level -= 1
+
+ # No simple keys after ']' or '}'.
+ self.allow_simple_key = False
+
+ # Add FLOW-SEQUENCE-END or FLOW-MAPPING-END.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(TokenClass(start_mark, end_mark))
+
+ def fetch_flow_entry(self):
+
+ # Simple keys are allowed after ','.
+ self.allow_simple_key = True
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add FLOW-ENTRY.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(FlowEntryToken(start_mark, end_mark))
+
+ def fetch_block_entry(self):
+
+ # Block context needs additional checks.
+ if not self.flow_level:
+
+ # Are we allowed to start a new entry?
+ if not self.allow_simple_key:
+ raise ScannerError(None, None,
+ "sequence entries are not allowed here",
+ self.get_mark())
+
+ # We may need to add BLOCK-SEQUENCE-START.
+ if self.add_indent(self.column):
+ mark = self.get_mark()
+ self.tokens.append(BlockSequenceStartToken(mark, mark))
+
+ # It's an error for the block entry to occur in the flow context,
+ # but we let the parser detect this.
+ else:
+ pass
+
+ # Simple keys are allowed after '-'.
+ self.allow_simple_key = True
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add BLOCK-ENTRY.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(BlockEntryToken(start_mark, end_mark))
+
+ def fetch_key(self):
+
+ # Block context needs additional checks.
+ if not self.flow_level:
+
+ # Are we allowed to start a key (not necessary a simple)?
+ if not self.allow_simple_key:
+ raise ScannerError(None, None,
+ "mapping keys are not allowed here",
+ self.get_mark())
+
+ # We may need to add BLOCK-MAPPING-START.
+ if self.add_indent(self.column):
+ mark = self.get_mark()
+ self.tokens.append(BlockMappingStartToken(mark, mark))
+
+ # Simple keys are allowed after '?' in the block context.
+ self.allow_simple_key = not self.flow_level
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add KEY.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(KeyToken(start_mark, end_mark))
+
+ def fetch_value(self):
+
+ # Do we determine a simple key?
+ if self.flow_level in self.possible_simple_keys:
+
+ # Add KEY.
+ key = self.possible_simple_keys[self.flow_level]
+ del self.possible_simple_keys[self.flow_level]
+ self.tokens.insert(key.token_number-self.tokens_taken,
+ KeyToken(key.mark, key.mark))
+
+ # If this key starts a new block mapping, we need to add
+ # BLOCK-MAPPING-START.
+ if not self.flow_level:
+ if self.add_indent(key.column):
+ self.tokens.insert(key.token_number-self.tokens_taken,
+ BlockMappingStartToken(key.mark, key.mark))
+
+ # There cannot be two simple keys one after another.
+ self.allow_simple_key = False
+
+ # It must be a part of a complex key.
+ else:
+
+ # Block context needs additional checks.
+ # (Do we really need them? They will be caught by the parser
+ # anyway.)
+ if not self.flow_level:
+
+ # We are allowed to start a complex value if and only if
+ # we can start a simple key.
+ if not self.allow_simple_key:
+ raise ScannerError(None, None,
+ "mapping values are not allowed here",
+ self.get_mark())
+
+ # If this value starts a new block mapping, we need to add
+ # BLOCK-MAPPING-START. It will be detected as an error later by
+ # the parser.
+ if not self.flow_level:
+ if self.add_indent(self.column):
+ mark = self.get_mark()
+ self.tokens.append(BlockMappingStartToken(mark, mark))
+
+ # Simple keys are allowed after ':' in the block context.
+ self.allow_simple_key = not self.flow_level
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add VALUE.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(ValueToken(start_mark, end_mark))
+
+ def fetch_alias(self):
+
+ # ALIAS could be a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after ALIAS.
+ self.allow_simple_key = False
+
+ # Scan and add ALIAS.
+ self.tokens.append(self.scan_anchor(AliasToken))
+
+ def fetch_anchor(self):
+
+ # ANCHOR could start a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after ANCHOR.
+ self.allow_simple_key = False
+
+ # Scan and add ANCHOR.
+ self.tokens.append(self.scan_anchor(AnchorToken))
+
+ def fetch_tag(self):
+
+ # TAG could start a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after TAG.
+ self.allow_simple_key = False
+
+ # Scan and add TAG.
+ self.tokens.append(self.scan_tag())
+
+ def fetch_literal(self):
+ self.fetch_block_scalar(style='|')
+
+ def fetch_folded(self):
+ self.fetch_block_scalar(style='>')
+
+ def fetch_block_scalar(self, style):
+
+ # A simple key may follow a block scalar.
+ self.allow_simple_key = True
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Scan and add SCALAR.
+ self.tokens.append(self.scan_block_scalar(style))
+
+ def fetch_single(self):
+ self.fetch_flow_scalar(style='\'')
+
+ def fetch_double(self):
+ self.fetch_flow_scalar(style='"')
+
+ def fetch_flow_scalar(self, style):
+
+ # A flow scalar could be a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after flow scalars.
+ self.allow_simple_key = False
+
+ # Scan and add SCALAR.
+ self.tokens.append(self.scan_flow_scalar(style))
+
+ def fetch_plain(self):
+
+ # A plain scalar could be a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after plain scalars. But note that `scan_plain` will
+ # change this flag if the scan is finished at the beginning of the
+ # line.
+ self.allow_simple_key = False
+
+ # Scan and add SCALAR. May change `allow_simple_key`.
+ self.tokens.append(self.scan_plain())
+
+ # Checkers.
+
+ def check_directive(self):
+
+ # DIRECTIVE: ^ '%' ...
+ # The '%' indicator is already checked.
+ if self.column == 0:
+ return True
+
+ def check_document_start(self):
+
+ # DOCUMENT-START: ^ '---' (' '|'\n')
+ if self.column == 0:
+ if self.prefix(3) == '---' \
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ return True
+
+ def check_document_end(self):
+
+ # DOCUMENT-END: ^ '...' (' '|'\n')
+ if self.column == 0:
+ if self.prefix(3) == '...' \
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ return True
+
+ def check_block_entry(self):
+
+ # BLOCK-ENTRY: '-' (' '|'\n')
+ return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
+
+ def check_key(self):
+
+ # KEY(flow context): '?'
+ if self.flow_level:
+ return True
+
+ # KEY(block context): '?' (' '|'\n')
+ else:
+ return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
+
+ def check_value(self):
+
+ # VALUE(flow context): ':'
+ if self.flow_level:
+ return True
+
+ # VALUE(block context): ':' (' '|'\n')
+ else:
+ return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
+
+ def check_plain(self):
+
+ # A plain scalar may start with any non-space character except:
+ # '-', '?', ':', ',', '[', ']', '{', '}',
+ # '#', '&', '*', '!', '|', '>', '\'', '\"',
+ # '%', '@', '`'.
+ #
+ # It may also start with
+ # '-', '?', ':'
+ # if it is followed by a non-space character.
+ #
+ # Note that we limit the last rule to the block context (except the
+ # '-' character) because we want the flow context to be space
+ # independent.
+ ch = self.peek()
+ return ch not in '\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'\"%@`' \
+ or (self.peek(1) not in '\0 \t\r\n\x85\u2028\u2029'
+ and (ch == '-' or (not self.flow_level and ch in '?:')))
+
+ # Scanners.
+
+ def scan_to_next_token(self):
+ # We ignore spaces, line breaks and comments.
+ # If we find a line break in the block context, we set the flag
+ # `allow_simple_key` on.
+ # The byte order mark is stripped if it's the first character in the
+ # stream. We do not yet support BOM inside the stream as the
+ # specification requires. Any such mark will be considered as a part
+ # of the document.
+ #
+ # TODO: We need to make tab handling rules more sane. A good rule is
+ # Tabs cannot precede tokens
+ # BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END,
+ # KEY(block), VALUE(block), BLOCK-ENTRY
+ # So the checking code is
+ # if <TAB>:
+ # self.allow_simple_keys = False
+ # We also need to add the check for `allow_simple_keys == True` to
+ # `unwind_indent` before issuing BLOCK-END.
+ # Scanners for block, flow, and plain scalars need to be modified.
+
+ if self.index == 0 and self.peek() == '\uFEFF':
+ self.forward()
+ found = False
+ while not found:
+ while self.peek() == ' ':
+ self.forward()
+ if self.peek() == '#':
+ while self.peek() not in '\0\r\n\x85\u2028\u2029':
+ self.forward()
+ if self.scan_line_break():
+ if not self.flow_level:
+ self.allow_simple_key = True
+ else:
+ found = True
+
+ def scan_directive(self):
+ # See the specification for details.
+ start_mark = self.get_mark()
+ self.forward()
+ name = self.scan_directive_name(start_mark)
+ value = None
+ if name == 'YAML':
+ value = self.scan_yaml_directive_value(start_mark)
+ end_mark = self.get_mark()
+ elif name == 'TAG':
+ value = self.scan_tag_directive_value(start_mark)
+ end_mark = self.get_mark()
+ else:
+ end_mark = self.get_mark()
+ while self.peek() not in '\0\r\n\x85\u2028\u2029':
+ self.forward()
+ self.scan_directive_ignored_line(start_mark)
+ return DirectiveToken(name, value, start_mark, end_mark)
+
+ def scan_directive_name(self, start_mark):
+ # See the specification for details.
+ length = 0
+ ch = self.peek(length)
+ while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-_':
+ length += 1
+ ch = self.peek(length)
+ if not length:
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch, self.get_mark())
+ value = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch not in '\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch, self.get_mark())
+ return value
+
+ def scan_yaml_directive_value(self, start_mark):
+ # See the specification for details.
+ while self.peek() == ' ':
+ self.forward()
+ major = self.scan_yaml_directive_number(start_mark)
+ if self.peek() != '.':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a digit or '.', but found %r" % self.peek(),
+ self.get_mark())
+ self.forward()
+ minor = self.scan_yaml_directive_number(start_mark)
+ if self.peek() not in '\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a digit or ' ', but found %r" % self.peek(),
+ self.get_mark())
+ return (major, minor)
+
+ def scan_yaml_directive_number(self, start_mark):
+ # See the specification for details.
+ ch = self.peek()
+ if not ('0' <= ch <= '9'):
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a digit, but found %r" % ch, self.get_mark())
+ length = 0
+ while '0' <= self.peek(length) <= '9':
+ length += 1
+ value = int(self.prefix(length))
+ self.forward(length)
+ return value
+
+ def scan_tag_directive_value(self, start_mark):
+ # See the specification for details.
+ while self.peek() == ' ':
+ self.forward()
+ handle = self.scan_tag_directive_handle(start_mark)
+ while self.peek() == ' ':
+ self.forward()
+ prefix = self.scan_tag_directive_prefix(start_mark)
+ return (handle, prefix)
+
+ def scan_tag_directive_handle(self, start_mark):
+ # See the specification for details.
+ value = self.scan_tag_handle('directive', start_mark)
+ ch = self.peek()
+ if ch != ' ':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected ' ', but found %r" % ch, self.get_mark())
+ return value
+
+ def scan_tag_directive_prefix(self, start_mark):
+ # See the specification for details.
+ value = self.scan_tag_uri('directive', start_mark)
+ ch = self.peek()
+ if ch not in '\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected ' ', but found %r" % ch, self.get_mark())
+ return value
+
+ def scan_directive_ignored_line(self, start_mark):
+ # See the specification for details.
+ while self.peek() == ' ':
+ self.forward()
+ if self.peek() == '#':
+ while self.peek() not in '\0\r\n\x85\u2028\u2029':
+ self.forward()
+ ch = self.peek()
+ if ch not in '\0\r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a comment or a line break, but found %r"
+ % ch, self.get_mark())
+ self.scan_line_break()
+
+ def scan_anchor(self, TokenClass):
+ # The specification does not restrict characters for anchors and
+ # aliases. This may lead to problems, for instance, the document:
+ # [ *alias, value ]
+ # can be interpreted in two ways, as
+ # [ "value" ]
+ # and
+ # [ *alias , "value" ]
+ # Therefore we restrict aliases to numbers and ASCII letters.
+ start_mark = self.get_mark()
+ indicator = self.peek()
+ if indicator == '*':
+ name = 'alias'
+ else:
+ name = 'anchor'
+ self.forward()
+ length = 0
+ ch = self.peek(length)
+ while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-_':
+ length += 1
+ ch = self.peek(length)
+ if not length:
+ raise ScannerError("while scanning an %s" % name, start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch, self.get_mark())
+ value = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch not in '\0 \t\r\n\x85\u2028\u2029?:,]}%@`':
+ raise ScannerError("while scanning an %s" % name, start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch, self.get_mark())
+ end_mark = self.get_mark()
+ return TokenClass(value, start_mark, end_mark)
+
+ def scan_tag(self):
+ # See the specification for details.
+ start_mark = self.get_mark()
+ ch = self.peek(1)
+ if ch == '<':
+ handle = None
+ self.forward(2)
+ suffix = self.scan_tag_uri('tag', start_mark)
+ if self.peek() != '>':
+ raise ScannerError("while parsing a tag", start_mark,
+ "expected '>', but found %r" % self.peek(),
+ self.get_mark())
+ self.forward()
+ elif ch in '\0 \t\r\n\x85\u2028\u2029':
+ handle = None
+ suffix = '!'
+ self.forward()
+ else:
+ length = 1
+ use_handle = False
+ while ch not in '\0 \r\n\x85\u2028\u2029':
+ if ch == '!':
+ use_handle = True
+ break
+ length += 1
+ ch = self.peek(length)
+ handle = '!'
+ if use_handle:
+ handle = self.scan_tag_handle('tag', start_mark)
+ else:
+ handle = '!'
+ self.forward()
+ suffix = self.scan_tag_uri('tag', start_mark)
+ ch = self.peek()
+ if ch not in '\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a tag", start_mark,
+ "expected ' ', but found %r" % ch, self.get_mark())
+ value = (handle, suffix)
+ end_mark = self.get_mark()
+ return TagToken(value, start_mark, end_mark)
+
+ def scan_block_scalar(self, style):
+ # See the specification for details.
+
+ if style == '>':
+ folded = True
+ else:
+ folded = False
+
+ chunks = []
+ start_mark = self.get_mark()
+
+ # Scan the header.
+ self.forward()
+ chomping, increment = self.scan_block_scalar_indicators(start_mark)
+ self.scan_block_scalar_ignored_line(start_mark)
+
+ # Determine the indentation level and go to the first non-empty line.
+ min_indent = self.indent+1
+ if min_indent < 1:
+ min_indent = 1
+ if increment is None:
+ breaks, max_indent, end_mark = self.scan_block_scalar_indentation()
+ indent = max(min_indent, max_indent)
+ else:
+ indent = min_indent+increment-1
+ breaks, end_mark = self.scan_block_scalar_breaks(indent)
+ line_break = ''
+
+ # Scan the inner part of the block scalar.
+ while self.column == indent and self.peek() != '\0':
+ chunks.extend(breaks)
+ leading_non_space = self.peek() not in ' \t'
+ length = 0
+ while self.peek(length) not in '\0\r\n\x85\u2028\u2029':
+ length += 1
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ line_break = self.scan_line_break()
+ breaks, end_mark = self.scan_block_scalar_breaks(indent)
+ if self.column == indent and self.peek() != '\0':
+
+ # Unfortunately, folding rules are ambiguous.
+ #
+ # This is the folding according to the specification:
+
+ if folded and line_break == '\n' \
+ and leading_non_space and self.peek() not in ' \t':
+ if not breaks:
+ chunks.append(' ')
+ else:
+ chunks.append(line_break)
+
+ # This is Clark Evans's interpretation (also in the spec
+ # examples):
+ #
+ #if folded and line_break == '\n':
+ # if not breaks:
+ # if self.peek() not in ' \t':
+ # chunks.append(' ')
+ # else:
+ # chunks.append(line_break)
+ #else:
+ # chunks.append(line_break)
+ else:
+ break
+
+ # Chomp the tail.
+ if chomping is not False:
+ chunks.append(line_break)
+ if chomping is True:
+ chunks.extend(breaks)
+
+ # We are done.
+ return ScalarToken(''.join(chunks), False, start_mark, end_mark,
+ style)
+
+ def scan_block_scalar_indicators(self, start_mark):
+ # See the specification for details.
+ chomping = None
+ increment = None
+ ch = self.peek()
+ if ch in '+-':
+ if ch == '+':
+ chomping = True
+ else:
+ chomping = False
+ self.forward()
+ ch = self.peek()
+ if ch in '0123456789':
+ increment = int(ch)
+ if increment == 0:
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected indentation indicator in the range 1-9, but found 0",
+ self.get_mark())
+ self.forward()
+ elif ch in '0123456789':
+ increment = int(ch)
+ if increment == 0:
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected indentation indicator in the range 1-9, but found 0",
+ self.get_mark())
+ self.forward()
+ ch = self.peek()
+ if ch in '+-':
+ if ch == '+':
+ chomping = True
+ else:
+ chomping = False
+ self.forward()
+ ch = self.peek()
+ if ch not in '\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected chomping or indentation indicators, but found %r"
+ % ch, self.get_mark())
+ return chomping, increment
+
+ def scan_block_scalar_ignored_line(self, start_mark):
+ # See the specification for details.
+ while self.peek() == ' ':
+ self.forward()
+ if self.peek() == '#':
+ while self.peek() not in '\0\r\n\x85\u2028\u2029':
+ self.forward()
+ ch = self.peek()
+ if ch not in '\0\r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected a comment or a line break, but found %r" % ch,
+ self.get_mark())
+ self.scan_line_break()
+
+ def scan_block_scalar_indentation(self):
+ # See the specification for details.
+ chunks = []
+ max_indent = 0
+ end_mark = self.get_mark()
+ while self.peek() in ' \r\n\x85\u2028\u2029':
+ if self.peek() != ' ':
+ chunks.append(self.scan_line_break())
+ end_mark = self.get_mark()
+ else:
+ self.forward()
+ if self.column > max_indent:
+ max_indent = self.column
+ return chunks, max_indent, end_mark
+
+ def scan_block_scalar_breaks(self, indent):
+ # See the specification for details.
+ chunks = []
+ end_mark = self.get_mark()
+ while self.column < indent and self.peek() == ' ':
+ self.forward()
+ while self.peek() in '\r\n\x85\u2028\u2029':
+ chunks.append(self.scan_line_break())
+ end_mark = self.get_mark()
+ while self.column < indent and self.peek() == ' ':
+ self.forward()
+ return chunks, end_mark
+
+ def scan_flow_scalar(self, style):
+ # See the specification for details.
+ # Note that we loose indentation rules for quoted scalars. Quoted
+ # scalars don't need to adhere indentation because " and ' clearly
+ # mark the beginning and the end of them. Therefore we are less
+ # restrictive then the specification requires. We only need to check
+ # that document separators are not included in scalars.
+ if style == '"':
+ double = True
+ else:
+ double = False
+ chunks = []
+ start_mark = self.get_mark()
+ quote = self.peek()
+ self.forward()
+ chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
+ while self.peek() != quote:
+ chunks.extend(self.scan_flow_scalar_spaces(double, start_mark))
+ chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
+ self.forward()
+ end_mark = self.get_mark()
+ return ScalarToken(''.join(chunks), False, start_mark, end_mark,
+ style)
+
+ ESCAPE_REPLACEMENTS = {
+ '0': '\0',
+ 'a': '\x07',
+ 'b': '\x08',
+ 't': '\x09',
+ '\t': '\x09',
+ 'n': '\x0A',
+ 'v': '\x0B',
+ 'f': '\x0C',
+ 'r': '\x0D',
+ 'e': '\x1B',
+ ' ': '\x20',
+ '\"': '\"',
+ '\\': '\\',
+ '/': '/',
+ 'N': '\x85',
+ '_': '\xA0',
+ 'L': '\u2028',
+ 'P': '\u2029',
+ }
+
+ ESCAPE_CODES = {
+ 'x': 2,
+ 'u': 4,
+ 'U': 8,
+ }
+
+ def scan_flow_scalar_non_spaces(self, double, start_mark):
+ # See the specification for details.
+ chunks = []
+ while True:
+ length = 0
+ while self.peek(length) not in '\'\"\\\0 \t\r\n\x85\u2028\u2029':
+ length += 1
+ if length:
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ ch = self.peek()
+ if not double and ch == '\'' and self.peek(1) == '\'':
+ chunks.append('\'')
+ self.forward(2)
+ elif (double and ch == '\'') or (not double and ch in '\"\\'):
+ chunks.append(ch)
+ self.forward()
+ elif double and ch == '\\':
+ self.forward()
+ ch = self.peek()
+ if ch in self.ESCAPE_REPLACEMENTS:
+ chunks.append(self.ESCAPE_REPLACEMENTS[ch])
+ self.forward()
+ elif ch in self.ESCAPE_CODES:
+ length = self.ESCAPE_CODES[ch]
+ self.forward()
+ for k in range(length):
+ if self.peek(k) not in '0123456789ABCDEFabcdef':
+ raise ScannerError("while scanning a double-quoted scalar", start_mark,
+ "expected escape sequence of %d hexdecimal numbers, but found %r" %
+ (length, self.peek(k)), self.get_mark())
+ code = int(self.prefix(length), 16)
+ chunks.append(chr(code))
+ self.forward(length)
+ elif ch in '\r\n\x85\u2028\u2029':
+ self.scan_line_break()
+ chunks.extend(self.scan_flow_scalar_breaks(double, start_mark))
+ else:
+ raise ScannerError("while scanning a double-quoted scalar", start_mark,
+ "found unknown escape character %r" % ch, self.get_mark())
+ else:
+ return chunks
+
+ def scan_flow_scalar_spaces(self, double, start_mark):
+ # See the specification for details.
+ chunks = []
+ length = 0
+ while self.peek(length) in ' \t':
+ length += 1
+ whitespaces = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch == '\0':
+ raise ScannerError("while scanning a quoted scalar", start_mark,
+ "found unexpected end of stream", self.get_mark())
+ elif ch in '\r\n\x85\u2028\u2029':
+ line_break = self.scan_line_break()
+ breaks = self.scan_flow_scalar_breaks(double, start_mark)
+ if line_break != '\n':
+ chunks.append(line_break)
+ elif not breaks:
+ chunks.append(' ')
+ chunks.extend(breaks)
+ else:
+ chunks.append(whitespaces)
+ return chunks
+
+ def scan_flow_scalar_breaks(self, double, start_mark):
+ # See the specification for details.
+ chunks = []
+ while True:
+ # Instead of checking indentation, we check for document
+ # separators.
+ prefix = self.prefix(3)
+ if (prefix == '---' or prefix == '...') \
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a quoted scalar", start_mark,
+ "found unexpected document separator", self.get_mark())
+ while self.peek() in ' \t':
+ self.forward()
+ if self.peek() in '\r\n\x85\u2028\u2029':
+ chunks.append(self.scan_line_break())
+ else:
+ return chunks
+
+ def scan_plain(self):
+ # See the specification for details.
+ # We add an additional restriction for the flow context:
+ # plain scalars in the flow context cannot contain ',' or '?'.
+ # We also keep track of the `allow_simple_key` flag here.
+ # Indentation rules are loosed for the flow context.
+ chunks = []
+ start_mark = self.get_mark()
+ end_mark = start_mark
+ indent = self.indent+1
+ # We allow zero indentation for scalars, but then we need to check for
+ # document separators at the beginning of the line.
+ #if indent == 0:
+ # indent = 1
+ spaces = []
+ while True:
+ length = 0
+ if self.peek() == '#':
+ break
+ while True:
+ ch = self.peek(length)
+ if ch in '\0 \t\r\n\x85\u2028\u2029' \
+ or (ch == ':' and
+ self.peek(length+1) in '\0 \t\r\n\x85\u2028\u2029'
+ + (u',[]{}' if self.flow_level else u''))\
+ or (self.flow_level and ch in ',?[]{}'):
+ break
+ length += 1
+ if length == 0:
+ break
+ self.allow_simple_key = False
+ chunks.extend(spaces)
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ end_mark = self.get_mark()
+ spaces = self.scan_plain_spaces(indent, start_mark)
+ if not spaces or self.peek() == '#' \
+ or (not self.flow_level and self.column < indent):
+ break
+ return ScalarToken(''.join(chunks), True, start_mark, end_mark)
+
+ def scan_plain_spaces(self, indent, start_mark):
+ # See the specification for details.
+ # The specification is really confusing about tabs in plain scalars.
+ # We just forbid them completely. Do not use tabs in YAML!
+ chunks = []
+ length = 0
+ while self.peek(length) in ' ':
+ length += 1
+ whitespaces = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch in '\r\n\x85\u2028\u2029':
+ line_break = self.scan_line_break()
+ self.allow_simple_key = True
+ prefix = self.prefix(3)
+ if (prefix == '---' or prefix == '...') \
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ return
+ breaks = []
+ while self.peek() in ' \r\n\x85\u2028\u2029':
+ if self.peek() == ' ':
+ self.forward()
+ else:
+ breaks.append(self.scan_line_break())
+ prefix = self.prefix(3)
+ if (prefix == '---' or prefix == '...') \
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ return
+ if line_break != '\n':
+ chunks.append(line_break)
+ elif not breaks:
+ chunks.append(' ')
+ chunks.extend(breaks)
+ elif whitespaces:
+ chunks.append(whitespaces)
+ return chunks
+
+ def scan_tag_handle(self, name, start_mark):
+ # See the specification for details.
+ # For some strange reasons, the specification does not allow '_' in
+ # tag handles. I have allowed it anyway.
+ ch = self.peek()
+ if ch != '!':
+ raise ScannerError("while scanning a %s" % name, start_mark,
+ "expected '!', but found %r" % ch, self.get_mark())
+ length = 1
+ ch = self.peek(length)
+ if ch != ' ':
+ while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-_':
+ length += 1
+ ch = self.peek(length)
+ if ch != '!':
+ self.forward(length)
+ raise ScannerError("while scanning a %s" % name, start_mark,
+ "expected '!', but found %r" % ch, self.get_mark())
+ length += 1
+ value = self.prefix(length)
+ self.forward(length)
+ return value
+
+ def scan_tag_uri(self, name, start_mark):
+ # See the specification for details.
+ # Note: we do not check if URI is well-formed.
+ chunks = []
+ length = 0
+ ch = self.peek(length)
+ while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-;/?:@&=+$,_.!~*\'()[]%':
+ if ch == '%':
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ length = 0
+ chunks.append(self.scan_uri_escapes(name, start_mark))
+ else:
+ length += 1
+ ch = self.peek(length)
+ if length:
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ length = 0
+ if not chunks:
+ raise ScannerError("while parsing a %s" % name, start_mark,
+ "expected URI, but found %r" % ch, self.get_mark())
+ return ''.join(chunks)
+
+ def scan_uri_escapes(self, name, start_mark):
+ # See the specification for details.
+ codes = []
+ mark = self.get_mark()
+ while self.peek() == '%':
+ self.forward()
+ for k in range(2):
+ if self.peek(k) not in '0123456789ABCDEFabcdef':
+ raise ScannerError("while scanning a %s" % name, start_mark,
+ "expected URI escape sequence of 2 hexdecimal numbers, but found %r"
+ % self.peek(k), self.get_mark())
+ codes.append(int(self.prefix(2), 16))
+ self.forward(2)
+ try:
+ value = bytes(codes).decode('utf-8')
+ except UnicodeDecodeError as exc:
+ raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark)
+ return value
+
+ def scan_line_break(self):
+ # Transforms:
+ # '\r\n' : '\n'
+ # '\r' : '\n'
+ # '\n' : '\n'
+ # '\x85' : '\n'
+ # '\u2028' : '\u2028'
+ # '\u2029 : '\u2029'
+ # default : ''
+ ch = self.peek()
+ if ch in '\r\n\x85':
+ if self.prefix(2) == '\r\n':
+ self.forward(2)
+ else:
+ self.forward()
+ return '\n'
+ elif ch in '\u2028\u2029':
+ self.forward()
+ return ch
+ return ''
diff --git a/third_party/python/PyYAML/lib3/yaml/serializer.py b/third_party/python/PyYAML/lib3/yaml/serializer.py
new file mode 100644
index 0000000000..fe911e67ae
--- /dev/null
+++ b/third_party/python/PyYAML/lib3/yaml/serializer.py
@@ -0,0 +1,111 @@
+
+__all__ = ['Serializer', 'SerializerError']
+
+from .error import YAMLError
+from .events import *
+from .nodes import *
+
+class SerializerError(YAMLError):
+ pass
+
+class Serializer:
+
+ ANCHOR_TEMPLATE = 'id%03d'
+
+ def __init__(self, encoding=None,
+ explicit_start=None, explicit_end=None, version=None, tags=None):
+ self.use_encoding = encoding
+ self.use_explicit_start = explicit_start
+ self.use_explicit_end = explicit_end
+ self.use_version = version
+ self.use_tags = tags
+ self.serialized_nodes = {}
+ self.anchors = {}
+ self.last_anchor_id = 0
+ self.closed = None
+
+ def open(self):
+ if self.closed is None:
+ self.emit(StreamStartEvent(encoding=self.use_encoding))
+ self.closed = False
+ elif self.closed:
+ raise SerializerError("serializer is closed")
+ else:
+ raise SerializerError("serializer is already opened")
+
+ def close(self):
+ if self.closed is None:
+ raise SerializerError("serializer is not opened")
+ elif not self.closed:
+ self.emit(StreamEndEvent())
+ self.closed = True
+
+ #def __del__(self):
+ # self.close()
+
+ def serialize(self, node):
+ if self.closed is None:
+ raise SerializerError("serializer is not opened")
+ elif self.closed:
+ raise SerializerError("serializer is closed")
+ self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
+ version=self.use_version, tags=self.use_tags))
+ self.anchor_node(node)
+ self.serialize_node(node, None, None)
+ self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
+ self.serialized_nodes = {}
+ self.anchors = {}
+ self.last_anchor_id = 0
+
+ def anchor_node(self, node):
+ if node in self.anchors:
+ if self.anchors[node] is None:
+ self.anchors[node] = self.generate_anchor(node)
+ else:
+ self.anchors[node] = None
+ if isinstance(node, SequenceNode):
+ for item in node.value:
+ self.anchor_node(item)
+ elif isinstance(node, MappingNode):
+ for key, value in node.value:
+ self.anchor_node(key)
+ self.anchor_node(value)
+
+ def generate_anchor(self, node):
+ self.last_anchor_id += 1
+ return self.ANCHOR_TEMPLATE % self.last_anchor_id
+
+ def serialize_node(self, node, parent, index):
+ alias = self.anchors[node]
+ if node in self.serialized_nodes:
+ self.emit(AliasEvent(alias))
+ else:
+ self.serialized_nodes[node] = True
+ self.descend_resolver(parent, index)
+ if isinstance(node, ScalarNode):
+ detected_tag = self.resolve(ScalarNode, node.value, (True, False))
+ default_tag = self.resolve(ScalarNode, node.value, (False, True))
+ implicit = (node.tag == detected_tag), (node.tag == default_tag)
+ self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
+ style=node.style))
+ elif isinstance(node, SequenceNode):
+ implicit = (node.tag
+ == self.resolve(SequenceNode, node.value, True))
+ self.emit(SequenceStartEvent(alias, node.tag, implicit,
+ flow_style=node.flow_style))
+ index = 0
+ for item in node.value:
+ self.serialize_node(item, node, index)
+ index += 1
+ self.emit(SequenceEndEvent())
+ elif isinstance(node, MappingNode):
+ implicit = (node.tag
+ == self.resolve(MappingNode, node.value, True))
+ self.emit(MappingStartEvent(alias, node.tag, implicit,
+ flow_style=node.flow_style))
+ for key, value in node.value:
+ self.serialize_node(key, node, None)
+ self.serialize_node(value, node, key)
+ self.emit(MappingEndEvent())
+ self.ascend_resolver()
+
diff --git a/third_party/python/PyYAML/lib3/yaml/tokens.py b/third_party/python/PyYAML/lib3/yaml/tokens.py
new file mode 100644
index 0000000000..4d0b48a394
--- /dev/null
+++ b/third_party/python/PyYAML/lib3/yaml/tokens.py
@@ -0,0 +1,104 @@
+
+class Token(object):
+ def __init__(self, start_mark, end_mark):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ def __repr__(self):
+ attributes = [key for key in self.__dict__
+ if not key.endswith('_mark')]
+ attributes.sort()
+ arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
+ for key in attributes])
+ return '%s(%s)' % (self.__class__.__name__, arguments)
+
+#class BOMToken(Token):
+# id = '<byte order mark>'
+
+class DirectiveToken(Token):
+ id = '<directive>'
+ def __init__(self, name, value, start_mark, end_mark):
+ self.name = name
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class DocumentStartToken(Token):
+ id = '<document start>'
+
+class DocumentEndToken(Token):
+ id = '<document end>'
+
+class StreamStartToken(Token):
+ id = '<stream start>'
+ def __init__(self, start_mark=None, end_mark=None,
+ encoding=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.encoding = encoding
+
+class StreamEndToken(Token):
+ id = '<stream end>'
+
+class BlockSequenceStartToken(Token):
+ id = '<block sequence start>'
+
+class BlockMappingStartToken(Token):
+ id = '<block mapping start>'
+
+class BlockEndToken(Token):
+ id = '<block end>'
+
+class FlowSequenceStartToken(Token):
+ id = '['
+
+class FlowMappingStartToken(Token):
+ id = '{'
+
+class FlowSequenceEndToken(Token):
+ id = ']'
+
+class FlowMappingEndToken(Token):
+ id = '}'
+
+class KeyToken(Token):
+ id = '?'
+
+class ValueToken(Token):
+ id = ':'
+
+class BlockEntryToken(Token):
+ id = '-'
+
+class FlowEntryToken(Token):
+ id = ','
+
+class AliasToken(Token):
+ id = '<alias>'
+ def __init__(self, value, start_mark, end_mark):
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class AnchorToken(Token):
+ id = '<anchor>'
+ def __init__(self, value, start_mark, end_mark):
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class TagToken(Token):
+ id = '<tag>'
+ def __init__(self, value, start_mark, end_mark):
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class ScalarToken(Token):
+ id = '<scalar>'
+ def __init__(self, value, plain, start_mark, end_mark, style=None):
+ self.value = value
+ self.plain = plain
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.style = style
+
diff --git a/third_party/python/PyYAML/setup.cfg b/third_party/python/PyYAML/setup.cfg
new file mode 100644
index 0000000000..b186858a70
--- /dev/null
+++ b/third_party/python/PyYAML/setup.cfg
@@ -0,0 +1,25 @@
+
+# The INCLUDE and LIB directories to build the '_yaml' extension.
+# You may also set them using the options '-I' and '-L'.
+[build_ext]
+
+# List of directories to search for 'yaml.h' (separated by ':').
+#include_dirs=/usr/local/include:../../include
+
+# List of directories to search for 'libyaml.a' (separated by ':').
+#library_dirs=/usr/local/lib:../../lib
+
+# An alternative compiler to build the extension.
+#compiler=mingw32
+
+# Additional preprocessor definitions might be required.
+#define=YAML_DECLARE_STATIC
+
+# The following options are used to build PyYAML Windows installer
+# for Python 2.7 on my PC:
+#include_dirs=../../../libyaml/tags/0.1.4/include
+#library_dirs=../../../libyaml/tags/0.1.4/win32/vs2008/output/release/lib
+#define=YAML_DECLARE_STATIC
+
+[metadata]
+license_file = LICENSE \ No newline at end of file
diff --git a/third_party/python/PyYAML/setup.py b/third_party/python/PyYAML/setup.py
new file mode 100644
index 0000000000..5e34adfb59
--- /dev/null
+++ b/third_party/python/PyYAML/setup.py
@@ -0,0 +1,315 @@
+
+NAME = 'PyYAML'
+VERSION = '5.3.1'
+DESCRIPTION = "YAML parser and emitter for Python"
+LONG_DESCRIPTION = """\
+YAML is a data serialization format designed for human readability
+and interaction with scripting languages. PyYAML is a YAML parser
+and emitter for Python.
+
+PyYAML features a complete YAML 1.1 parser, Unicode support, pickle
+support, capable extension API, and sensible error messages. PyYAML
+supports standard YAML tags and provides Python-specific tags that
+allow to represent an arbitrary Python object.
+
+PyYAML is applicable for a broad range of tasks from complex
+configuration files to object serialization and persistence."""
+AUTHOR = "Kirill Simonov"
+AUTHOR_EMAIL = 'xi@resolvent.net'
+LICENSE = "MIT"
+PLATFORMS = "Any"
+URL = "https://github.com/yaml/pyyaml"
+DOWNLOAD_URL = "https://pypi.org/project/PyYAML/"
+CLASSIFIERS = [
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: MIT License",
+ "Operating System :: OS Independent",
+ "Programming Language :: Cython",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Programming Language :: Python :: Implementation :: PyPy",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+ "Topic :: Text Processing :: Markup",
+]
+
+
+LIBYAML_CHECK = """
+#include <yaml.h>
+
+int main(void) {
+ yaml_parser_t parser;
+ yaml_emitter_t emitter;
+
+ yaml_parser_initialize(&parser);
+ yaml_parser_delete(&parser);
+
+ yaml_emitter_initialize(&emitter);
+ yaml_emitter_delete(&emitter);
+
+ return 0;
+}
+"""
+
+
+import sys, os.path, platform, warnings
+
+from distutils import log
+from distutils.core import setup, Command
+from distutils.core import Distribution as _Distribution
+from distutils.core import Extension as _Extension
+from distutils.command.build_ext import build_ext as _build_ext
+from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
+from distutils.errors import DistutilsError, CompileError, LinkError, DistutilsPlatformError
+
+if 'setuptools.extension' in sys.modules:
+ _Extension = sys.modules['setuptools.extension']._Extension
+ sys.modules['distutils.core'].Extension = _Extension
+ sys.modules['distutils.extension'].Extension = _Extension
+ sys.modules['distutils.command.build_ext'].Extension = _Extension
+
+with_cython = False
+if 'sdist' in sys.argv:
+ # we need cython here
+ with_cython = True
+try:
+ from Cython.Distutils.extension import Extension as _Extension
+ from Cython.Distutils import build_ext as _build_ext
+ with_cython = True
+except ImportError:
+ if with_cython:
+ raise
+
+try:
+ from wheel.bdist_wheel import bdist_wheel
+except ImportError:
+ bdist_wheel = None
+
+
+# on Windows, disable wheel generation warning noise
+windows_ignore_warnings = [
+"Unknown distribution option: 'python_requires'",
+"Config variable 'Py_DEBUG' is unset",
+"Config variable 'WITH_PYMALLOC' is unset",
+"Config variable 'Py_UNICODE_SIZE' is unset",
+"Cython directive 'language_level' not set"
+]
+
+if platform.system() == 'Windows':
+ for w in windows_ignore_warnings:
+ warnings.filterwarnings('ignore', w)
+
+class Distribution(_Distribution):
+
+ def __init__(self, attrs=None):
+ _Distribution.__init__(self, attrs)
+ if not self.ext_modules:
+ return
+ for idx in range(len(self.ext_modules)-1, -1, -1):
+ ext = self.ext_modules[idx]
+ if not isinstance(ext, Extension):
+ continue
+ setattr(self, ext.attr_name, None)
+ self.global_options = [
+ (ext.option_name, None,
+ "include %s (default if %s is available)"
+ % (ext.feature_description, ext.feature_name)),
+ (ext.neg_option_name, None,
+ "exclude %s" % ext.feature_description),
+ ] + self.global_options
+ self.negative_opt = self.negative_opt.copy()
+ self.negative_opt[ext.neg_option_name] = ext.option_name
+
+ def has_ext_modules(self):
+ if not self.ext_modules:
+ return False
+ for ext in self.ext_modules:
+ with_ext = self.ext_status(ext)
+ if with_ext is None or with_ext:
+ return True
+ return False
+
+ def ext_status(self, ext):
+ implementation = platform.python_implementation()
+ if implementation != 'CPython':
+ return False
+ if isinstance(ext, Extension):
+ with_ext = getattr(self, ext.attr_name)
+ return with_ext
+ else:
+ return True
+
+
+class Extension(_Extension):
+
+ def __init__(self, name, sources, feature_name, feature_description,
+ feature_check, **kwds):
+ if not with_cython:
+ for filename in sources[:]:
+ base, ext = os.path.splitext(filename)
+ if ext == '.pyx':
+ sources.remove(filename)
+ sources.append('%s.c' % base)
+ _Extension.__init__(self, name, sources, **kwds)
+ self.feature_name = feature_name
+ self.feature_description = feature_description
+ self.feature_check = feature_check
+ self.attr_name = 'with_' + feature_name.replace('-', '_')
+ self.option_name = 'with-' + feature_name
+ self.neg_option_name = 'without-' + feature_name
+
+
+class build_ext(_build_ext):
+
+ def run(self):
+ optional = True
+ disabled = True
+ for ext in self.extensions:
+ with_ext = self.distribution.ext_status(ext)
+ if with_ext is None:
+ disabled = False
+ elif with_ext:
+ optional = False
+ disabled = False
+ break
+ if disabled:
+ return
+ try:
+ _build_ext.run(self)
+ except DistutilsPlatformError:
+ exc = sys.exc_info()[1]
+ if optional:
+ log.warn(str(exc))
+ log.warn("skipping build_ext")
+ else:
+ raise
+
+ def get_source_files(self):
+ self.check_extensions_list(self.extensions)
+ filenames = []
+ for ext in self.extensions:
+ if with_cython:
+ self.cython_sources(ext.sources, ext)
+ for filename in ext.sources:
+ filenames.append(filename)
+ base = os.path.splitext(filename)[0]
+ for ext in ['c', 'h', 'pyx', 'pxd']:
+ filename = '%s.%s' % (base, ext)
+ if filename not in filenames and os.path.isfile(filename):
+ filenames.append(filename)
+ return filenames
+
+ def get_outputs(self):
+ self.check_extensions_list(self.extensions)
+ outputs = []
+ for ext in self.extensions:
+ fullname = self.get_ext_fullname(ext.name)
+ filename = os.path.join(self.build_lib,
+ self.get_ext_filename(fullname))
+ if os.path.isfile(filename):
+ outputs.append(filename)
+ return outputs
+
+ def build_extensions(self):
+ self.check_extensions_list(self.extensions)
+ for ext in self.extensions:
+ with_ext = self.distribution.ext_status(ext)
+ if with_ext is not None and not with_ext:
+ continue
+ if with_cython:
+ ext.sources = self.cython_sources(ext.sources, ext)
+ try:
+ self.build_extension(ext)
+ except (CompileError, LinkError):
+ if with_ext is not None:
+ raise
+ log.warn("Error compiling module, falling back to pure Python")
+
+
+class bdist_rpm(_bdist_rpm):
+
+ def _make_spec_file(self):
+ argv0 = sys.argv[0]
+ features = []
+ for ext in self.distribution.ext_modules:
+ if not isinstance(ext, Extension):
+ continue
+ with_ext = getattr(self.distribution, ext.attr_name)
+ if with_ext is None:
+ continue
+ if with_ext:
+ features.append('--'+ext.option_name)
+ else:
+ features.append('--'+ext.neg_option_name)
+ sys.argv[0] = ' '.join([argv0]+features)
+ spec_file = _bdist_rpm._make_spec_file(self)
+ sys.argv[0] = argv0
+ return spec_file
+
+
+class test(Command):
+
+ user_options = []
+
+ def initialize_options(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ build_cmd = self.get_finalized_command('build')
+ build_cmd.run()
+ sys.path.insert(0, build_cmd.build_lib)
+ if sys.version_info[0] < 3:
+ sys.path.insert(0, 'tests/lib')
+ else:
+ sys.path.insert(0, 'tests/lib3')
+ import test_all
+ if not test_all.main([]):
+ raise DistutilsError("Tests failed")
+
+
+cmdclass = {
+ 'build_ext': build_ext,
+ 'bdist_rpm': bdist_rpm,
+ 'test': test,
+}
+if bdist_wheel:
+ cmdclass['bdist_wheel'] = bdist_wheel
+
+
+if __name__ == '__main__':
+
+ setup(
+ name=NAME,
+ version=VERSION,
+ description=DESCRIPTION,
+ long_description=LONG_DESCRIPTION,
+ author=AUTHOR,
+ author_email=AUTHOR_EMAIL,
+ license=LICENSE,
+ platforms=PLATFORMS,
+ url=URL,
+ download_url=DOWNLOAD_URL,
+ classifiers=CLASSIFIERS,
+
+ package_dir={'': {2: 'lib', 3: 'lib3'}[sys.version_info[0]]},
+ packages=['yaml'],
+ ext_modules=[
+ Extension('_yaml', ['ext/_yaml.pyx'],
+ 'libyaml', "LibYAML bindings", LIBYAML_CHECK,
+ libraries=['yaml']),
+ ],
+
+ distclass=Distribution,
+ cmdclass=cmdclass,
+ python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
+ )
diff --git a/third_party/python/appdirs/.gitignore b/third_party/python/appdirs/.gitignore
new file mode 100644
index 0000000000..9bef3459ae
--- /dev/null
+++ b/third_party/python/appdirs/.gitignore
@@ -0,0 +1,8 @@
+*.pyc
+*.egg-info
+tmp/
+build/
+dist/
+.tox/
+MANIFEST
+*.komodoproject
diff --git a/third_party/python/appdirs/.travis.yml b/third_party/python/appdirs/.travis.yml
new file mode 100644
index 0000000000..684833b20c
--- /dev/null
+++ b/third_party/python/appdirs/.travis.yml
@@ -0,0 +1,10 @@
+language: python
+python:
+ - "2.7"
+ - "pypy"
+ - "3.4"
+ - "3.5"
+ - "3.6"
+ - "3.7"
+ - "3.8"
+script: python setup.py test
diff --git a/third_party/python/appdirs/CHANGES.rst b/third_party/python/appdirs/CHANGES.rst
new file mode 100644
index 0000000000..e9987c7ee2
--- /dev/null
+++ b/third_party/python/appdirs/CHANGES.rst
@@ -0,0 +1,93 @@
+appdirs Changelog
+=================
+
+appdirs 1.4.4
+-------------
+- [PR #92] Don't import appdirs from setup.py
+
+Project officially classified as Stable which is important
+for inclusion in other distros such as ActivePython.
+
+First of several incremental releases to catch up on maintenance.
+
+appdirs 1.4.3
+-------------
+- [PR #76] Python 3.6 invalid escape sequence deprecation fixes
+- Fix for Python 3.6 support
+
+appdirs 1.4.2
+-------------
+- [PR #84] Allow installing without setuptools
+- [PR #86] Fix string delimiters in setup.py description
+- Add Python 3.6 support
+
+appdirs 1.4.1
+-------------
+- [issue #38] Fix _winreg import on Windows Py3
+- [issue #55] Make appname optional
+
+appdirs 1.4.0
+-------------
+- [PR #42] AppAuthor is now optional on Windows
+- [issue 41] Support Jython on Windows, Mac, and Unix-like platforms. Windows
+ support requires `JNA <https://github.com/twall/jna>`_.
+- [PR #44] Fix incorrect behaviour of the site_config_dir method
+
+appdirs 1.3.0
+-------------
+- [Unix, issue 16] Conform to XDG standard, instead of breaking it for
+ everybody
+- [Unix] Removes gratuitous case mangling of the case, since \*nix-es are
+ usually case sensitive, so mangling is not wise
+- [Unix] Fixes the utterly wrong behaviour in ``site_data_dir``, return result
+ based on XDG_DATA_DIRS and make room for respecting the standard which
+ specifies XDG_DATA_DIRS is a multiple-value variable
+- [Issue 6] Add ``*_config_dir`` which are distinct on nix-es, according to
+ XDG specs; on Windows and Mac return the corresponding ``*_data_dir``
+
+appdirs 1.2.0
+-------------
+
+- [Unix] Put ``user_log_dir`` under the *cache* dir on Unix. Seems to be more
+ typical.
+- [issue 9] Make ``unicode`` work on py3k.
+
+appdirs 1.1.0
+-------------
+
+- [issue 4] Add ``AppDirs.user_log_dir``.
+- [Unix, issue 2, issue 7] appdirs now conforms to `XDG base directory spec
+ <http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_.
+- [Mac, issue 5] Fix ``site_data_dir()`` on Mac.
+- [Mac] Drop use of 'Carbon' module in favour of hardcoded paths; supports
+ Python3 now.
+- [Windows] Append "Cache" to ``user_cache_dir`` on Windows by default. Use
+ ``opinion=False`` option to disable this.
+- Add ``appdirs.AppDirs`` convenience class. Usage:
+
+ >>> dirs = AppDirs("SuperApp", "Acme", version="1.0")
+ >>> dirs.user_data_dir
+ '/Users/trentm/Library/Application Support/SuperApp/1.0'
+
+- [Windows] Cherry-pick Komodo's change to downgrade paths to the Windows short
+ paths if there are high bit chars.
+- [Linux] Change default ``user_cache_dir()`` on Linux to be singular, e.g.
+ "~/.superapp/cache".
+- [Windows] Add ``roaming`` option to ``user_data_dir()`` (for use on Windows only)
+ and change the default ``user_data_dir`` behaviour to use a *non*-roaming
+ profile dir (``CSIDL_LOCAL_APPDATA`` instead of ``CSIDL_APPDATA``). Why? Because
+ a large roaming profile can cause login speed issues. The "only syncs on
+ logout" behaviour can cause surprises in appdata info.
+
+
+appdirs 1.0.1 (never released)
+------------------------------
+
+Started this changelog 27 July 2010. Before that this module originated in the
+`Komodo <http://www.activestate.com/komodo>`_ product as ``applib.py`` and then
+as `applib/location.py
+<http://github.com/ActiveState/applib/blob/master/applib/location.py>`_ (used by
+`PyPM <http://code.activestate.com/pypm/>`_ in `ActivePython
+<http://www.activestate.com/activepython>`_). This is basically a fork of
+applib.py 1.0.1 and applib/location.py 1.0.1.
+
diff --git a/third_party/python/appdirs/Dockerfile b/third_party/python/appdirs/Dockerfile
new file mode 100644
index 0000000000..93dd737bd6
--- /dev/null
+++ b/third_party/python/appdirs/Dockerfile
@@ -0,0 +1,13 @@
+FROM activestate/activepython:2.7
+
+# For Python3 compact
+RUN apt-get -y update && apt-get -y install python3-setuptools && \
+ apt-get -y clean
+
+WORKDIR /app
+ADD . /app
+RUN python setup.py install && python setup.py test
+RUN python3 setup.py install && python3 setup.py test
+
+RUN python -m appdirs
+RUN python3 -m appdirs
diff --git a/third_party/python/appdirs/HACKING.md b/third_party/python/appdirs/HACKING.md
new file mode 100644
index 0000000000..f98b053adb
--- /dev/null
+++ b/third_party/python/appdirs/HACKING.md
@@ -0,0 +1,16 @@
+# HACKING
+
+## release
+
+ensure correct version in CHANGES.md and appdirs.py, and:
+
+```
+python setup.py register sdist bdist_wheel upload
+```
+
+## docker image
+
+```
+docker build -t appdirs .
+```
+
diff --git a/third_party/python/appdirs/LICENSE.txt b/third_party/python/appdirs/LICENSE.txt
new file mode 100644
index 0000000000..107c61405e
--- /dev/null
+++ b/third_party/python/appdirs/LICENSE.txt
@@ -0,0 +1,23 @@
+# This is the MIT license
+
+Copyright (c) 2010 ActiveState Software Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
diff --git a/third_party/python/appdirs/MANIFEST.in b/third_party/python/appdirs/MANIFEST.in
new file mode 100644
index 0000000000..6c3ac1b1db
--- /dev/null
+++ b/third_party/python/appdirs/MANIFEST.in
@@ -0,0 +1,5 @@
+include README.rst
+include CHANGES.rst
+include LICENSE.txt
+include *.py
+include test/*.py
diff --git a/third_party/python/appdirs/PKG-INFO b/third_party/python/appdirs/PKG-INFO
new file mode 100644
index 0000000000..6e98536505
--- /dev/null
+++ b/third_party/python/appdirs/PKG-INFO
@@ -0,0 +1,262 @@
+Metadata-Version: 1.2
+Name: appdirs
+Version: 1.4.4
+Summary: A small Python module for determining appropriate platform-specific dirs, e.g. a "user data dir".
+Home-page: http://github.com/ActiveState/appdirs
+Author: Trent Mick
+Author-email: trentm@gmail.com
+Maintainer: Jeff Rouse
+Maintainer-email: jr@its.to
+License: MIT
+Description:
+ .. image:: https://secure.travis-ci.org/ActiveState/appdirs.png
+ :target: http://travis-ci.org/ActiveState/appdirs
+
+ the problem
+ ===========
+
+ What directory should your app use for storing user data? If running on Mac OS X, you
+ should use::
+
+ ~/Library/Application Support/<AppName>
+
+ If on Windows (at least English Win XP) that should be::
+
+ C:\Documents and Settings\<User>\Application Data\Local Settings\<AppAuthor>\<AppName>
+
+ or possibly::
+
+ C:\Documents and Settings\<User>\Application Data\<AppAuthor>\<AppName>
+
+ for `roaming profiles <http://bit.ly/9yl3b6>`_ but that is another story.
+
+ On Linux (and other Unices) the dir, according to the `XDG
+ spec <http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_, is::
+
+ ~/.local/share/<AppName>
+
+
+ ``appdirs`` to the rescue
+ =========================
+
+ This kind of thing is what the ``appdirs`` module is for. ``appdirs`` will
+ help you choose an appropriate:
+
+ - user data dir (``user_data_dir``)
+ - user config dir (``user_config_dir``)
+ - user cache dir (``user_cache_dir``)
+ - site data dir (``site_data_dir``)
+ - site config dir (``site_config_dir``)
+ - user log dir (``user_log_dir``)
+
+ and also:
+
+ - is a single module so other Python packages can include their own private copy
+ - is slightly opinionated on the directory names used. Look for "OPINION" in
+ documentation and code for when an opinion is being applied.
+
+
+ some example output
+ ===================
+
+ On Mac OS X::
+
+ >>> from appdirs import *
+ >>> appname = "SuperApp"
+ >>> appauthor = "Acme"
+ >>> user_data_dir(appname, appauthor)
+ '/Users/trentm/Library/Application Support/SuperApp'
+ >>> site_data_dir(appname, appauthor)
+ '/Library/Application Support/SuperApp'
+ >>> user_cache_dir(appname, appauthor)
+ '/Users/trentm/Library/Caches/SuperApp'
+ >>> user_log_dir(appname, appauthor)
+ '/Users/trentm/Library/Logs/SuperApp'
+
+ On Windows 7::
+
+ >>> from appdirs import *
+ >>> appname = "SuperApp"
+ >>> appauthor = "Acme"
+ >>> user_data_dir(appname, appauthor)
+ 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp'
+ >>> user_data_dir(appname, appauthor, roaming=True)
+ 'C:\\Users\\trentm\\AppData\\Roaming\\Acme\\SuperApp'
+ >>> user_cache_dir(appname, appauthor)
+ 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Cache'
+ >>> user_log_dir(appname, appauthor)
+ 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Logs'
+
+ On Linux::
+
+ >>> from appdirs import *
+ >>> appname = "SuperApp"
+ >>> appauthor = "Acme"
+ >>> user_data_dir(appname, appauthor)
+ '/home/trentm/.local/share/SuperApp
+ >>> site_data_dir(appname, appauthor)
+ '/usr/local/share/SuperApp'
+ >>> site_data_dir(appname, appauthor, multipath=True)
+ '/usr/local/share/SuperApp:/usr/share/SuperApp'
+ >>> user_cache_dir(appname, appauthor)
+ '/home/trentm/.cache/SuperApp'
+ >>> user_log_dir(appname, appauthor)
+ '/home/trentm/.cache/SuperApp/log'
+ >>> user_config_dir(appname)
+ '/home/trentm/.config/SuperApp'
+ >>> site_config_dir(appname)
+ '/etc/xdg/SuperApp'
+ >>> os.environ['XDG_CONFIG_DIRS'] = '/etc:/usr/local/etc'
+ >>> site_config_dir(appname, multipath=True)
+ '/etc/SuperApp:/usr/local/etc/SuperApp'
+
+
+ ``AppDirs`` for convenience
+ ===========================
+
+ ::
+
+ >>> from appdirs import AppDirs
+ >>> dirs = AppDirs("SuperApp", "Acme")
+ >>> dirs.user_data_dir
+ '/Users/trentm/Library/Application Support/SuperApp'
+ >>> dirs.site_data_dir
+ '/Library/Application Support/SuperApp'
+ >>> dirs.user_cache_dir
+ '/Users/trentm/Library/Caches/SuperApp'
+ >>> dirs.user_log_dir
+ '/Users/trentm/Library/Logs/SuperApp'
+
+
+
+ Per-version isolation
+ =====================
+
+ If you have multiple versions of your app in use that you want to be
+ able to run side-by-side, then you may want version-isolation for these
+ dirs::
+
+ >>> from appdirs import AppDirs
+ >>> dirs = AppDirs("SuperApp", "Acme", version="1.0")
+ >>> dirs.user_data_dir
+ '/Users/trentm/Library/Application Support/SuperApp/1.0'
+ >>> dirs.site_data_dir
+ '/Library/Application Support/SuperApp/1.0'
+ >>> dirs.user_cache_dir
+ '/Users/trentm/Library/Caches/SuperApp/1.0'
+ >>> dirs.user_log_dir
+ '/Users/trentm/Library/Logs/SuperApp/1.0'
+
+
+
+ appdirs Changelog
+ =================
+
+ appdirs 1.4.4
+ -------------
+ - [PR #92] Don't import appdirs from setup.py
+
+ Project officially classified as Stable which is important
+ for inclusion in other distros such as ActivePython.
+
+ First of several incremental releases to catch up on maintenance.
+
+ appdirs 1.4.3
+ -------------
+ - [PR #76] Python 3.6 invalid escape sequence deprecation fixes
+ - Fix for Python 3.6 support
+
+ appdirs 1.4.2
+ -------------
+ - [PR #84] Allow installing without setuptools
+ - [PR #86] Fix string delimiters in setup.py description
+ - Add Python 3.6 support
+
+ appdirs 1.4.1
+ -------------
+ - [issue #38] Fix _winreg import on Windows Py3
+ - [issue #55] Make appname optional
+
+ appdirs 1.4.0
+ -------------
+ - [PR #42] AppAuthor is now optional on Windows
+ - [issue 41] Support Jython on Windows, Mac, and Unix-like platforms. Windows
+ support requires `JNA <https://github.com/twall/jna>`_.
+ - [PR #44] Fix incorrect behaviour of the site_config_dir method
+
+ appdirs 1.3.0
+ -------------
+ - [Unix, issue 16] Conform to XDG standard, instead of breaking it for
+ everybody
+ - [Unix] Removes gratuitous case mangling of the case, since \*nix-es are
+ usually case sensitive, so mangling is not wise
+ - [Unix] Fixes the utterly wrong behaviour in ``site_data_dir``, return result
+ based on XDG_DATA_DIRS and make room for respecting the standard which
+ specifies XDG_DATA_DIRS is a multiple-value variable
+ - [Issue 6] Add ``*_config_dir`` which are distinct on nix-es, according to
+ XDG specs; on Windows and Mac return the corresponding ``*_data_dir``
+
+ appdirs 1.2.0
+ -------------
+
+ - [Unix] Put ``user_log_dir`` under the *cache* dir on Unix. Seems to be more
+ typical.
+ - [issue 9] Make ``unicode`` work on py3k.
+
+ appdirs 1.1.0
+ -------------
+
+ - [issue 4] Add ``AppDirs.user_log_dir``.
+ - [Unix, issue 2, issue 7] appdirs now conforms to `XDG base directory spec
+ <http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_.
+ - [Mac, issue 5] Fix ``site_data_dir()`` on Mac.
+ - [Mac] Drop use of 'Carbon' module in favour of hardcoded paths; supports
+ Python3 now.
+ - [Windows] Append "Cache" to ``user_cache_dir`` on Windows by default. Use
+ ``opinion=False`` option to disable this.
+ - Add ``appdirs.AppDirs`` convenience class. Usage:
+
+ >>> dirs = AppDirs("SuperApp", "Acme", version="1.0")
+ >>> dirs.user_data_dir
+ '/Users/trentm/Library/Application Support/SuperApp/1.0'
+
+ - [Windows] Cherry-pick Komodo's change to downgrade paths to the Windows short
+ paths if there are high bit chars.
+ - [Linux] Change default ``user_cache_dir()`` on Linux to be singular, e.g.
+ "~/.superapp/cache".
+ - [Windows] Add ``roaming`` option to ``user_data_dir()`` (for use on Windows only)
+ and change the default ``user_data_dir`` behaviour to use a *non*-roaming
+ profile dir (``CSIDL_LOCAL_APPDATA`` instead of ``CSIDL_APPDATA``). Why? Because
+ a large roaming profile can cause login speed issues. The "only syncs on
+ logout" behaviour can cause surprises in appdata info.
+
+
+ appdirs 1.0.1 (never released)
+ ------------------------------
+
+ Started this changelog 27 July 2010. Before that this module originated in the
+ `Komodo <http://www.activestate.com/komodo>`_ product as ``applib.py`` and then
+ as `applib/location.py
+ <http://github.com/ActiveState/applib/blob/master/applib/location.py>`_ (used by
+ `PyPM <http://code.activestate.com/pypm/>`_ in `ActivePython
+ <http://www.activestate.com/activepython>`_). This is basically a fork of
+ applib.py 1.0.1 and applib/location.py 1.0.1.
+
+
+Keywords: application directory log cache user
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff --git a/third_party/python/appdirs/README.rst b/third_party/python/appdirs/README.rst
new file mode 100644
index 0000000000..dfafbdfbe6
--- /dev/null
+++ b/third_party/python/appdirs/README.rst
@@ -0,0 +1,138 @@
+.. image:: https://secure.travis-ci.org/ActiveState/appdirs.png
+ :target: http://travis-ci.org/ActiveState/appdirs
+
+the problem
+===========
+
+What directory should your app use for storing user data? If running on Mac OS X, you
+should use::
+
+ ~/Library/Application Support/<AppName>
+
+If on Windows (at least English Win XP) that should be::
+
+ C:\Documents and Settings\<User>\Application Data\Local Settings\<AppAuthor>\<AppName>
+
+or possibly::
+
+ C:\Documents and Settings\<User>\Application Data\<AppAuthor>\<AppName>
+
+for `roaming profiles <http://bit.ly/9yl3b6>`_ but that is another story.
+
+On Linux (and other Unices) the dir, according to the `XDG
+spec <http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_, is::
+
+ ~/.local/share/<AppName>
+
+
+``appdirs`` to the rescue
+=========================
+
+This kind of thing is what the ``appdirs`` module is for. ``appdirs`` will
+help you choose an appropriate:
+
+- user data dir (``user_data_dir``)
+- user config dir (``user_config_dir``)
+- user cache dir (``user_cache_dir``)
+- site data dir (``site_data_dir``)
+- site config dir (``site_config_dir``)
+- user log dir (``user_log_dir``)
+
+and also:
+
+- is a single module so other Python packages can include their own private copy
+- is slightly opinionated on the directory names used. Look for "OPINION" in
+ documentation and code for when an opinion is being applied.
+
+
+some example output
+===================
+
+On Mac OS X::
+
+ >>> from appdirs import *
+ >>> appname = "SuperApp"
+ >>> appauthor = "Acme"
+ >>> user_data_dir(appname, appauthor)
+ '/Users/trentm/Library/Application Support/SuperApp'
+ >>> site_data_dir(appname, appauthor)
+ '/Library/Application Support/SuperApp'
+ >>> user_cache_dir(appname, appauthor)
+ '/Users/trentm/Library/Caches/SuperApp'
+ >>> user_log_dir(appname, appauthor)
+ '/Users/trentm/Library/Logs/SuperApp'
+
+On Windows 7::
+
+ >>> from appdirs import *
+ >>> appname = "SuperApp"
+ >>> appauthor = "Acme"
+ >>> user_data_dir(appname, appauthor)
+ 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp'
+ >>> user_data_dir(appname, appauthor, roaming=True)
+ 'C:\\Users\\trentm\\AppData\\Roaming\\Acme\\SuperApp'
+ >>> user_cache_dir(appname, appauthor)
+ 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Cache'
+ >>> user_log_dir(appname, appauthor)
+ 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Logs'
+
+On Linux::
+
+ >>> from appdirs import *
+ >>> appname = "SuperApp"
+ >>> appauthor = "Acme"
+ >>> user_data_dir(appname, appauthor)
+ '/home/trentm/.local/share/SuperApp
+ >>> site_data_dir(appname, appauthor)
+ '/usr/local/share/SuperApp'
+ >>> site_data_dir(appname, appauthor, multipath=True)
+ '/usr/local/share/SuperApp:/usr/share/SuperApp'
+ >>> user_cache_dir(appname, appauthor)
+ '/home/trentm/.cache/SuperApp'
+ >>> user_log_dir(appname, appauthor)
+ '/home/trentm/.cache/SuperApp/log'
+ >>> user_config_dir(appname)
+ '/home/trentm/.config/SuperApp'
+ >>> site_config_dir(appname)
+ '/etc/xdg/SuperApp'
+ >>> os.environ['XDG_CONFIG_DIRS'] = '/etc:/usr/local/etc'
+ >>> site_config_dir(appname, multipath=True)
+ '/etc/SuperApp:/usr/local/etc/SuperApp'
+
+
+``AppDirs`` for convenience
+===========================
+
+::
+
+ >>> from appdirs import AppDirs
+ >>> dirs = AppDirs("SuperApp", "Acme")
+ >>> dirs.user_data_dir
+ '/Users/trentm/Library/Application Support/SuperApp'
+ >>> dirs.site_data_dir
+ '/Library/Application Support/SuperApp'
+ >>> dirs.user_cache_dir
+ '/Users/trentm/Library/Caches/SuperApp'
+ >>> dirs.user_log_dir
+ '/Users/trentm/Library/Logs/SuperApp'
+
+
+
+Per-version isolation
+=====================
+
+If you have multiple versions of your app in use that you want to be
+able to run side-by-side, then you may want version-isolation for these
+dirs::
+
+ >>> from appdirs import AppDirs
+ >>> dirs = AppDirs("SuperApp", "Acme", version="1.0")
+ >>> dirs.user_data_dir
+ '/Users/trentm/Library/Application Support/SuperApp/1.0'
+ >>> dirs.site_data_dir
+ '/Library/Application Support/SuperApp/1.0'
+ >>> dirs.user_cache_dir
+ '/Users/trentm/Library/Caches/SuperApp/1.0'
+ >>> dirs.user_log_dir
+ '/Users/trentm/Library/Logs/SuperApp/1.0'
+
diff --git a/third_party/python/appdirs/TODO.md b/third_party/python/appdirs/TODO.md
new file mode 100644
index 0000000000..81e466f904
--- /dev/null
+++ b/third_party/python/appdirs/TODO.md
@@ -0,0 +1 @@
+- add some Windows 7 examples
diff --git a/third_party/python/appdirs/appdirs.py b/third_party/python/appdirs/appdirs.py
new file mode 100644
index 0000000000..2acd1debeb
--- /dev/null
+++ b/third_party/python/appdirs/appdirs.py
@@ -0,0 +1,608 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2005-2010 ActiveState Software Inc.
+# Copyright (c) 2013 Eddy Petrișor
+
+"""Utilities for determining application-specific dirs.
+
+See <http://github.com/ActiveState/appdirs> for details and usage.
+"""
+# Dev Notes:
+# - MSDN on where to store app data files:
+# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
+# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
+# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
+
+__version__ = "1.4.4"
+__version_info__ = tuple(int(segment) for segment in __version__.split("."))
+
+
+import sys
+import os
+
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ unicode = str
+
+if sys.platform.startswith('java'):
+ import platform
+ os_name = platform.java_ver()[3][0]
+ if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
+ system = 'win32'
+ elif os_name.startswith('Mac'): # "Mac OS X", etc.
+ system = 'darwin'
+ else: # "Linux", "SunOS", "FreeBSD", etc.
+ # Setting this to "linux2" is not ideal, but only Windows or Mac
+ # are actually checked for and the rest of the module expects
+ # *sys.platform* style strings.
+ system = 'linux2'
+else:
+ system = sys.platform
+
+
+
+def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
+ r"""Return full path to the user-specific data dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "roaming" (boolean, default False) can be set True to use the Windows
+ roaming appdata directory. That means that for users on a Windows
+ network setup for roaming profiles, this user data will be
+ sync'd on login. See
+ <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
+ for a discussion of issues.
+
+ Typical user data directories are:
+ Mac OS X: ~/Library/Application Support/<AppName>
+ Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
+ Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
+ Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
+ Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
+ Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
+
+ For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
+ That means, by default "~/.local/share/<AppName>".
+ """
+ if system == "win32":
+ if appauthor is None:
+ appauthor = appname
+ const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
+ path = os.path.normpath(_get_win_folder(const))
+ if appname:
+ if appauthor is not False:
+ path = os.path.join(path, appauthor, appname)
+ else:
+ path = os.path.join(path, appname)
+ elif system == 'darwin':
+ path = os.path.expanduser('~/Library/Application Support/')
+ if appname:
+ path = os.path.join(path, appname)
+ else:
+ path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
+ if appname:
+ path = os.path.join(path, appname)
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
+ r"""Return full path to the user-shared data dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "multipath" is an optional parameter only applicable to *nix
+ which indicates that the entire list of data dirs should be
+ returned. By default, the first item from XDG_DATA_DIRS is
+ returned, or '/usr/local/share/<AppName>',
+ if XDG_DATA_DIRS is not set
+
+ Typical site data directories are:
+ Mac OS X: /Library/Application Support/<AppName>
+ Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
+ Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
+ Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
+ Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
+
+ For Unix, this is using the $XDG_DATA_DIRS[0] default.
+
+ WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
+ """
+ if system == "win32":
+ if appauthor is None:
+ appauthor = appname
+ path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
+ if appname:
+ if appauthor is not False:
+ path = os.path.join(path, appauthor, appname)
+ else:
+ path = os.path.join(path, appname)
+ elif system == 'darwin':
+ path = os.path.expanduser('/Library/Application Support')
+ if appname:
+ path = os.path.join(path, appname)
+ else:
+ # XDG default for $XDG_DATA_DIRS
+ # only first, if multipath is False
+ path = os.getenv('XDG_DATA_DIRS',
+ os.pathsep.join(['/usr/local/share', '/usr/share']))
+ pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
+ if appname:
+ if version:
+ appname = os.path.join(appname, version)
+ pathlist = [os.sep.join([x, appname]) for x in pathlist]
+
+ if multipath:
+ path = os.pathsep.join(pathlist)
+ else:
+ path = pathlist[0]
+ return path
+
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
+ r"""Return full path to the user-specific config dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "roaming" (boolean, default False) can be set True to use the Windows
+ roaming appdata directory. That means that for users on a Windows
+ network setup for roaming profiles, this user data will be
+ sync'd on login. See
+ <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
+ for a discussion of issues.
+
+ Typical user config directories are:
+ Mac OS X: same as user_data_dir
+ Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
+ Win *: same as user_data_dir
+
+ For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
+ That means, by default "~/.config/<AppName>".
+ """
+ if system in ["win32", "darwin"]:
+ path = user_data_dir(appname, appauthor, None, roaming)
+ else:
+ path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
+ if appname:
+ path = os.path.join(path, appname)
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
+ r"""Return full path to the user-shared data dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "multipath" is an optional parameter only applicable to *nix
+ which indicates that the entire list of config dirs should be
+ returned. By default, the first item from XDG_CONFIG_DIRS is
+ returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
+
+ Typical site config directories are:
+ Mac OS X: same as site_data_dir
+ Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
+ $XDG_CONFIG_DIRS
+ Win *: same as site_data_dir
+ Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
+
+ For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
+
+ WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
+ """
+ if system in ["win32", "darwin"]:
+ path = site_data_dir(appname, appauthor)
+ if appname and version:
+ path = os.path.join(path, version)
+ else:
+ # XDG default for $XDG_CONFIG_DIRS
+ # only first, if multipath is False
+ path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
+ pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
+ if appname:
+ if version:
+ appname = os.path.join(appname, version)
+ pathlist = [os.sep.join([x, appname]) for x in pathlist]
+
+ if multipath:
+ path = os.pathsep.join(pathlist)
+ else:
+ path = pathlist[0]
+ return path
+
+
+def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
+ r"""Return full path to the user-specific cache dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "opinion" (boolean) can be False to disable the appending of
+ "Cache" to the base app data dir for Windows. See
+ discussion below.
+
+ Typical user cache directories are:
+ Mac OS X: ~/Library/Caches/<AppName>
+ Unix: ~/.cache/<AppName> (XDG default)
+ Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
+ Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
+
+ On Windows the only suggestion in the MSDN docs is that local settings go in
+ the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
+ app data dir (the default returned by `user_data_dir` above). Apps typically
+ put cache data somewhere *under* the given dir here. Some examples:
+ ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
+ ...\Acme\SuperApp\Cache\1.0
+ OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
+ This can be disabled with the `opinion=False` option.
+ """
+ if system == "win32":
+ if appauthor is None:
+ appauthor = appname
+ path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
+ if appname:
+ if appauthor is not False:
+ path = os.path.join(path, appauthor, appname)
+ else:
+ path = os.path.join(path, appname)
+ if opinion:
+ path = os.path.join(path, "Cache")
+ elif system == 'darwin':
+ path = os.path.expanduser('~/Library/Caches')
+ if appname:
+ path = os.path.join(path, appname)
+ else:
+ path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
+ if appname:
+ path = os.path.join(path, appname)
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
+ r"""Return full path to the user-specific state dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "roaming" (boolean, default False) can be set True to use the Windows
+ roaming appdata directory. That means that for users on a Windows
+ network setup for roaming profiles, this user data will be
+ sync'd on login. See
+ <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
+ for a discussion of issues.
+
+ Typical user state directories are:
+ Mac OS X: same as user_data_dir
+ Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
+ Win *: same as user_data_dir
+
+ For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
+ to extend the XDG spec and support $XDG_STATE_HOME.
+
+ That means, by default "~/.local/state/<AppName>".
+ """
+ if system in ["win32", "darwin"]:
+ path = user_data_dir(appname, appauthor, None, roaming)
+ else:
+ path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
+ if appname:
+ path = os.path.join(path, appname)
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
+ r"""Return full path to the user-specific log dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "opinion" (boolean) can be False to disable the appending of
+ "Logs" to the base app data dir for Windows, and "log" to the
+ base cache dir for Unix. See discussion below.
+
+ Typical user log directories are:
+ Mac OS X: ~/Library/Logs/<AppName>
+ Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
+ Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
+ Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
+
+ On Windows the only suggestion in the MSDN docs is that local settings
+ go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
+ examples of what some windows apps use for a logs dir.)
+
+ OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
+ value for Windows and appends "log" to the user cache dir for Unix.
+ This can be disabled with the `opinion=False` option.
+ """
+ if system == "darwin":
+ path = os.path.join(
+ os.path.expanduser('~/Library/Logs'),
+ appname)
+ elif system == "win32":
+ path = user_data_dir(appname, appauthor, version)
+ version = False
+ if opinion:
+ path = os.path.join(path, "Logs")
+ else:
+ path = user_cache_dir(appname, appauthor, version)
+ version = False
+ if opinion:
+ path = os.path.join(path, "log")
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+class AppDirs(object):
+ """Convenience wrapper for getting application dirs."""
+ def __init__(self, appname=None, appauthor=None, version=None,
+ roaming=False, multipath=False):
+ self.appname = appname
+ self.appauthor = appauthor
+ self.version = version
+ self.roaming = roaming
+ self.multipath = multipath
+
+ @property
+ def user_data_dir(self):
+ return user_data_dir(self.appname, self.appauthor,
+ version=self.version, roaming=self.roaming)
+
+ @property
+ def site_data_dir(self):
+ return site_data_dir(self.appname, self.appauthor,
+ version=self.version, multipath=self.multipath)
+
+ @property
+ def user_config_dir(self):
+ return user_config_dir(self.appname, self.appauthor,
+ version=self.version, roaming=self.roaming)
+
+ @property
+ def site_config_dir(self):
+ return site_config_dir(self.appname, self.appauthor,
+ version=self.version, multipath=self.multipath)
+
+ @property
+ def user_cache_dir(self):
+ return user_cache_dir(self.appname, self.appauthor,
+ version=self.version)
+
+ @property
+ def user_state_dir(self):
+ return user_state_dir(self.appname, self.appauthor,
+ version=self.version)
+
+ @property
+ def user_log_dir(self):
+ return user_log_dir(self.appname, self.appauthor,
+ version=self.version)
+
+
+#---- internal support stuff
+
+def _get_win_folder_from_registry(csidl_name):
+ """This is a fallback technique at best. I'm not sure if using the
+ registry for this guarantees us the correct answer for all CSIDL_*
+ names.
+ """
+ if PY3:
+ import winreg as _winreg
+ else:
+ import _winreg
+
+ shell_folder_name = {
+ "CSIDL_APPDATA": "AppData",
+ "CSIDL_COMMON_APPDATA": "Common AppData",
+ "CSIDL_LOCAL_APPDATA": "Local AppData",
+ }[csidl_name]
+
+ key = _winreg.OpenKey(
+ _winreg.HKEY_CURRENT_USER,
+ r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
+ )
+ dir, type = _winreg.QueryValueEx(key, shell_folder_name)
+ return dir
+
+
+def _get_win_folder_with_pywin32(csidl_name):
+ from win32com.shell import shellcon, shell
+ dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
+ # Try to make this a unicode path because SHGetFolderPath does
+ # not return unicode strings when there is unicode data in the
+ # path.
+ try:
+ dir = unicode(dir)
+
+ # Downgrade to short path name if have highbit chars. See
+ # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
+ has_high_char = False
+ for c in dir:
+ if ord(c) > 255:
+ has_high_char = True
+ break
+ if has_high_char:
+ try:
+ import win32api
+ dir = win32api.GetShortPathName(dir)
+ except ImportError:
+ pass
+ except UnicodeError:
+ pass
+ return dir
+
+
+def _get_win_folder_with_ctypes(csidl_name):
+ import ctypes
+
+ csidl_const = {
+ "CSIDL_APPDATA": 26,
+ "CSIDL_COMMON_APPDATA": 35,
+ "CSIDL_LOCAL_APPDATA": 28,
+ }[csidl_name]
+
+ buf = ctypes.create_unicode_buffer(1024)
+ ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
+
+ # Downgrade to short path name if have highbit chars. See
+ # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
+ has_high_char = False
+ for c in buf:
+ if ord(c) > 255:
+ has_high_char = True
+ break
+ if has_high_char:
+ buf2 = ctypes.create_unicode_buffer(1024)
+ if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
+ buf = buf2
+
+ return buf.value
+
+def _get_win_folder_with_jna(csidl_name):
+ import array
+ from com.sun import jna
+ from com.sun.jna.platform import win32
+
+ buf_size = win32.WinDef.MAX_PATH * 2
+ buf = array.zeros('c', buf_size)
+ shell = win32.Shell32.INSTANCE
+ shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
+ dir = jna.Native.toString(buf.tostring()).rstrip("\0")
+
+ # Downgrade to short path name if have highbit chars. See
+ # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
+ has_high_char = False
+ for c in dir:
+ if ord(c) > 255:
+ has_high_char = True
+ break
+ if has_high_char:
+ buf = array.zeros('c', buf_size)
+ kernel = win32.Kernel32.INSTANCE
+ if kernel.GetShortPathName(dir, buf, buf_size):
+ dir = jna.Native.toString(buf.tostring()).rstrip("\0")
+
+ return dir
+
+if system == "win32":
+ try:
+ import win32com.shell
+ _get_win_folder = _get_win_folder_with_pywin32
+ except ImportError:
+ try:
+ from ctypes import windll
+ _get_win_folder = _get_win_folder_with_ctypes
+ except ImportError:
+ try:
+ import com.sun.jna
+ _get_win_folder = _get_win_folder_with_jna
+ except ImportError:
+ _get_win_folder = _get_win_folder_from_registry
+
+
+#---- self test code
+
+if __name__ == "__main__":
+ appname = "MyApp"
+ appauthor = "MyCompany"
+
+ props = ("user_data_dir",
+ "user_config_dir",
+ "user_cache_dir",
+ "user_state_dir",
+ "user_log_dir",
+ "site_data_dir",
+ "site_config_dir")
+
+ print("-- app dirs %s --" % __version__)
+
+ print("-- app dirs (with optional 'version')")
+ dirs = AppDirs(appname, appauthor, version="1.0")
+ for prop in props:
+ print("%s: %s" % (prop, getattr(dirs, prop)))
+
+ print("\n-- app dirs (without optional 'version')")
+ dirs = AppDirs(appname, appauthor)
+ for prop in props:
+ print("%s: %s" % (prop, getattr(dirs, prop)))
+
+ print("\n-- app dirs (without optional 'appauthor')")
+ dirs = AppDirs(appname)
+ for prop in props:
+ print("%s: %s" % (prop, getattr(dirs, prop)))
+
+ print("\n-- app dirs (with disabled 'appauthor')")
+ dirs = AppDirs(appname, appauthor=False)
+ for prop in props:
+ print("%s: %s" % (prop, getattr(dirs, prop)))
diff --git a/third_party/python/appdirs/setup.cfg b/third_party/python/appdirs/setup.cfg
new file mode 100644
index 0000000000..1e3eb367c1
--- /dev/null
+++ b/third_party/python/appdirs/setup.cfg
@@ -0,0 +1,7 @@
+[wheel]
+universal = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/appdirs/setup.py b/third_party/python/appdirs/setup.py
new file mode 100644
index 0000000000..293c1c44d4
--- /dev/null
+++ b/third_party/python/appdirs/setup.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+import sys
+import os
+import os.path
+# appdirs is a dependency of setuptools, so allow installing without it.
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+import ast
+
+tests_require = []
+if sys.version_info < (2, 7):
+ tests_require.append("unittest2")
+
+
+def read(fname):
+ inf = open(os.path.join(os.path.dirname(__file__), fname))
+ out = "\n" + inf.read().replace("\r\n", "\n")
+ inf.close()
+ return out
+
+
+# Do not import `appdirs` yet, lest we import some random version on sys.path.
+for line in read("appdirs.py").splitlines():
+ if line.startswith("__version__"):
+ version = ast.literal_eval(line.split("=", 1)[1].strip())
+ break
+
+
+setup(
+ name='appdirs',
+ version=version,
+ description='A small Python module for determining appropriate ' + \
+ 'platform-specific dirs, e.g. a "user data dir".',
+ long_description=read('README.rst') + '\n' + read('CHANGES.rst'),
+ classifiers=[c.strip() for c in """
+ Development Status :: 5 - Production/Stable
+ Intended Audience :: Developers
+ License :: OSI Approved :: MIT License
+ Operating System :: OS Independent
+ Programming Language :: Python :: 2
+ Programming Language :: Python :: 2.7
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3.4
+ Programming Language :: Python :: 3.5
+ Programming Language :: Python :: 3.6
+ Programming Language :: Python :: 3.7
+ Programming Language :: Python :: 3.8
+ Programming Language :: Python :: Implementation :: PyPy
+ Programming Language :: Python :: Implementation :: CPython
+ Topic :: Software Development :: Libraries :: Python Modules
+ """.split('\n') if c.strip()],
+ test_suite='test.test_api',
+ tests_require=tests_require,
+ keywords='application directory log cache user',
+ author='Trent Mick',
+ author_email='trentm@gmail.com',
+ maintainer='Jeff Rouse',
+ maintainer_email='jr@its.to',
+ url='http://github.com/ActiveState/appdirs',
+ license='MIT',
+ py_modules=["appdirs"],
+)
diff --git a/third_party/python/appdirs/tox.ini b/third_party/python/appdirs/tox.ini
new file mode 100644
index 0000000000..85e3dd324f
--- /dev/null
+++ b/third_party/python/appdirs/tox.ini
@@ -0,0 +1,5 @@
+[tox]
+envlist = py26, py27, py32, py33, py34, py35, py36
+
+[testenv]
+commands = python setup.py test
diff --git a/third_party/python/atomicwrites/LICENSE b/third_party/python/atomicwrites/LICENSE
new file mode 100644
index 0000000000..3bbadc3af2
--- /dev/null
+++ b/third_party/python/atomicwrites/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2015-2016 Markus Unterwaditzer
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/third_party/python/atomicwrites/MANIFEST.in b/third_party/python/atomicwrites/MANIFEST.in
new file mode 100644
index 0000000000..1b28469174
--- /dev/null
+++ b/third_party/python/atomicwrites/MANIFEST.in
@@ -0,0 +1,6 @@
+include LICENSE
+include README.rst
+
+recursive-include docs *
+recursive-include tests *
+prune docs/_build
diff --git a/third_party/python/atomicwrites/PKG-INFO b/third_party/python/atomicwrites/PKG-INFO
new file mode 100644
index 0000000000..eec6b7c305
--- /dev/null
+++ b/third_party/python/atomicwrites/PKG-INFO
@@ -0,0 +1,112 @@
+Metadata-Version: 1.0
+Name: atomicwrites
+Version: 1.1.5
+Summary: Atomic file writes.
+Home-page: https://github.com/untitaker/python-atomicwrites
+Author: Markus Unterwaditzer
+Author-email: markus@unterwaditzer.net
+License: MIT
+Description: ===================
+ python-atomicwrites
+ ===================
+
+ .. image:: https://travis-ci.org/untitaker/python-atomicwrites.svg?branch=master
+ :target: https://travis-ci.org/untitaker/python-atomicwrites
+
+ .. image:: https://ci.appveyor.com/api/projects/status/vadc4le3c27to59x/branch/master?svg=true
+ :target: https://ci.appveyor.com/project/untitaker/python-atomicwrites/branch/master
+
+ Atomic file writes.
+
+ .. code-block:: python
+
+ from atomicwrites import atomic_write
+
+ with atomic_write('foo.txt', overwrite=True) as f:
+ f.write('Hello world.')
+ # "foo.txt" doesn't exist yet.
+
+ # Now it does.
+
+
+ Features that distinguish it from other similar libraries (see `Alternatives and Credit`_):
+
+ - Race-free assertion that the target file doesn't yet exist. This can be
+ controlled with the ``overwrite`` parameter.
+
+ - Windows support, although not well-tested. The MSDN resources are not very
+ explicit about which operations are atomic.
+
+ - Simple high-level API that wraps a very flexible class-based API.
+
+ - Consistent error handling across platforms.
+
+
+ How it works
+ ============
+
+ It uses a temporary file in the same directory as the given path. This ensures
+ that the temporary file resides on the same filesystem.
+
+ The temporary file will then be atomically moved to the target location: On
+ POSIX, it will use ``rename`` if files should be overwritten, otherwise a
+ combination of ``link`` and ``unlink``. On Windows, it uses MoveFileEx_ through
+ stdlib's ``ctypes`` with the appropriate flags.
+
+ Note that with ``link`` and ``unlink``, there's a timewindow where the file
+ might be available under two entries in the filesystem: The name of the
+ temporary file, and the name of the target file.
+
+ Also note that the permissions of the target file may change this way. In some
+ situations a ``chmod`` can be issued without any concurrency problems, but
+ since that is not always the case, this library doesn't do it by itself.
+
+ .. _MoveFileEx: https://msdn.microsoft.com/en-us/library/windows/desktop/aa365240%28v=vs.85%29.aspx
+
+ fsync
+ -----
+
+ On POSIX, ``fsync`` is invoked on the temporary file after it is written (to
+ flush file content and metadata), and on the parent directory after the file is
+ moved (to flush filename).
+
+ ``fsync`` does not take care of disks' internal buffers, but there don't seem
+ to be any standard POSIX APIs for that. On OS X, ``fcntl`` is used with
+ ``F_FULLFSYNC`` instead of ``fsync`` for that reason.
+
+ On Windows, `_commit <https://msdn.microsoft.com/en-us/library/17618685.aspx>`_
+ is used, but there are no guarantees about disk internal buffers.
+
+ Alternatives and Credit
+ =======================
+
+ Atomicwrites is directly inspired by the following libraries (and shares a
+ minimal amount of code):
+
+ - The Trac project's `utility functions
+ <http://www.edgewall.org/docs/tags-trac-0.11.7/epydoc/trac.util-pysrc.html>`_,
+ also used in `Werkzeug <http://werkzeug.pocoo.org/>`_ and
+ `mitsuhiko/python-atomicfile
+ <https://github.com/mitsuhiko/python-atomicfile>`_. The idea to use
+ ``ctypes`` instead of ``PyWin32`` originated there.
+
+ - `abarnert/fatomic <https://github.com/abarnert/fatomic>`_. Windows support
+ (based on ``PyWin32``) was originally taken from there.
+
+ Other alternatives to atomicwrites include:
+
+ - `sashka/atomicfile <https://github.com/sashka/atomicfile>`_. Originally I
+ considered using that, but at the time it was lacking a lot of features I
+ needed (Windows support, overwrite-parameter, overriding behavior through
+ subclassing).
+
+ - The `Boltons library collection <https://github.com/mahmoud/boltons>`_
+ features a class for atomic file writes, which seems to have a very similar
+ ``overwrite`` parameter. It is lacking Windows support though.
+
+ License
+ =======
+
+ Licensed under the MIT, see ``LICENSE``.
+
+Platform: UNKNOWN
diff --git a/third_party/python/atomicwrites/README.rst b/third_party/python/atomicwrites/README.rst
new file mode 100644
index 0000000000..3a5658cbd8
--- /dev/null
+++ b/third_party/python/atomicwrites/README.rst
@@ -0,0 +1,102 @@
+===================
+python-atomicwrites
+===================
+
+.. image:: https://travis-ci.org/untitaker/python-atomicwrites.svg?branch=master
+ :target: https://travis-ci.org/untitaker/python-atomicwrites
+
+.. image:: https://ci.appveyor.com/api/projects/status/vadc4le3c27to59x/branch/master?svg=true
+ :target: https://ci.appveyor.com/project/untitaker/python-atomicwrites/branch/master
+
+Atomic file writes.
+
+.. code-block:: python
+
+ from atomicwrites import atomic_write
+
+ with atomic_write('foo.txt', overwrite=True) as f:
+ f.write('Hello world.')
+ # "foo.txt" doesn't exist yet.
+
+ # Now it does.
+
+
+Features that distinguish it from other similar libraries (see `Alternatives and Credit`_):
+
+- Race-free assertion that the target file doesn't yet exist. This can be
+ controlled with the ``overwrite`` parameter.
+
+- Windows support, although not well-tested. The MSDN resources are not very
+ explicit about which operations are atomic.
+
+- Simple high-level API that wraps a very flexible class-based API.
+
+- Consistent error handling across platforms.
+
+
+How it works
+============
+
+It uses a temporary file in the same directory as the given path. This ensures
+that the temporary file resides on the same filesystem.
+
+The temporary file will then be atomically moved to the target location: On
+POSIX, it will use ``rename`` if files should be overwritten, otherwise a
+combination of ``link`` and ``unlink``. On Windows, it uses MoveFileEx_ through
+stdlib's ``ctypes`` with the appropriate flags.
+
+Note that with ``link`` and ``unlink``, there's a timewindow where the file
+might be available under two entries in the filesystem: The name of the
+temporary file, and the name of the target file.
+
+Also note that the permissions of the target file may change this way. In some
+situations a ``chmod`` can be issued without any concurrency problems, but
+since that is not always the case, this library doesn't do it by itself.
+
+.. _MoveFileEx: https://msdn.microsoft.com/en-us/library/windows/desktop/aa365240%28v=vs.85%29.aspx
+
+fsync
+-----
+
+On POSIX, ``fsync`` is invoked on the temporary file after it is written (to
+flush file content and metadata), and on the parent directory after the file is
+moved (to flush filename).
+
+``fsync`` does not take care of disks' internal buffers, but there don't seem
+to be any standard POSIX APIs for that. On OS X, ``fcntl`` is used with
+``F_FULLFSYNC`` instead of ``fsync`` for that reason.
+
+On Windows, `_commit <https://msdn.microsoft.com/en-us/library/17618685.aspx>`_
+is used, but there are no guarantees about disk internal buffers.
+
+Alternatives and Credit
+=======================
+
+Atomicwrites is directly inspired by the following libraries (and shares a
+minimal amount of code):
+
+- The Trac project's `utility functions
+ <http://www.edgewall.org/docs/tags-trac-0.11.7/epydoc/trac.util-pysrc.html>`_,
+ also used in `Werkzeug <http://werkzeug.pocoo.org/>`_ and
+ `mitsuhiko/python-atomicfile
+ <https://github.com/mitsuhiko/python-atomicfile>`_. The idea to use
+ ``ctypes`` instead of ``PyWin32`` originated there.
+
+- `abarnert/fatomic <https://github.com/abarnert/fatomic>`_. Windows support
+ (based on ``PyWin32``) was originally taken from there.
+
+Other alternatives to atomicwrites include:
+
+- `sashka/atomicfile <https://github.com/sashka/atomicfile>`_. Originally I
+ considered using that, but at the time it was lacking a lot of features I
+ needed (Windows support, overwrite-parameter, overriding behavior through
+ subclassing).
+
+- The `Boltons library collection <https://github.com/mahmoud/boltons>`_
+ features a class for atomic file writes, which seems to have a very similar
+ ``overwrite`` parameter. It is lacking Windows support though.
+
+License
+=======
+
+Licensed under the MIT, see ``LICENSE``.
diff --git a/third_party/python/atomicwrites/atomicwrites/__init__.py b/third_party/python/atomicwrites/atomicwrites/__init__.py
new file mode 100644
index 0000000000..a182c07afd
--- /dev/null
+++ b/third_party/python/atomicwrites/atomicwrites/__init__.py
@@ -0,0 +1,201 @@
+import contextlib
+import os
+import sys
+import tempfile
+
+try:
+ import fcntl
+except ImportError:
+ fcntl = None
+
+__version__ = '1.1.5'
+
+
+PY2 = sys.version_info[0] == 2
+
+text_type = unicode if PY2 else str # noqa
+
+
+def _path_to_unicode(x):
+ if not isinstance(x, text_type):
+ return x.decode(sys.getfilesystemencoding())
+ return x
+
+
+_proper_fsync = os.fsync
+
+
+if sys.platform != 'win32':
+ if hasattr(fcntl, 'F_FULLFSYNC'):
+ def _proper_fsync(fd):
+ # https://lists.apple.com/archives/darwin-dev/2005/Feb/msg00072.html
+ # https://developer.apple.com/library/mac/documentation/Darwin/Reference/ManPages/man2/fsync.2.html
+ # https://github.com/untitaker/python-atomicwrites/issues/6
+ fcntl.fcntl(fd, fcntl.F_FULLFSYNC)
+
+ def _sync_directory(directory):
+ # Ensure that filenames are written to disk
+ fd = os.open(directory, 0)
+ try:
+ _proper_fsync(fd)
+ finally:
+ os.close(fd)
+
+ def _replace_atomic(src, dst):
+ os.rename(src, dst)
+ _sync_directory(os.path.normpath(os.path.dirname(dst)))
+
+ def _move_atomic(src, dst):
+ os.link(src, dst)
+ os.unlink(src)
+
+ src_dir = os.path.normpath(os.path.dirname(src))
+ dst_dir = os.path.normpath(os.path.dirname(dst))
+ _sync_directory(dst_dir)
+ if src_dir != dst_dir:
+ _sync_directory(src_dir)
+else:
+ from ctypes import windll, WinError
+
+ _MOVEFILE_REPLACE_EXISTING = 0x1
+ _MOVEFILE_WRITE_THROUGH = 0x8
+ _windows_default_flags = _MOVEFILE_WRITE_THROUGH
+
+ def _handle_errors(rv):
+ if not rv:
+ raise WinError()
+
+ def _replace_atomic(src, dst):
+ _handle_errors(windll.kernel32.MoveFileExW(
+ _path_to_unicode(src), _path_to_unicode(dst),
+ _windows_default_flags | _MOVEFILE_REPLACE_EXISTING
+ ))
+
+ def _move_atomic(src, dst):
+ _handle_errors(windll.kernel32.MoveFileExW(
+ _path_to_unicode(src), _path_to_unicode(dst),
+ _windows_default_flags
+ ))
+
+
+def replace_atomic(src, dst):
+ '''
+ Move ``src`` to ``dst``. If ``dst`` exists, it will be silently
+ overwritten.
+
+ Both paths must reside on the same filesystem for the operation to be
+ atomic.
+ '''
+ return _replace_atomic(src, dst)
+
+
+def move_atomic(src, dst):
+ '''
+ Move ``src`` to ``dst``. There might a timewindow where both filesystem
+ entries exist. If ``dst`` already exists, :py:exc:`FileExistsError` will be
+ raised.
+
+ Both paths must reside on the same filesystem for the operation to be
+ atomic.
+ '''
+ return _move_atomic(src, dst)
+
+
+class AtomicWriter(object):
+ '''
+ A helper class for performing atomic writes. Usage::
+
+ with AtomicWriter(path).open() as f:
+ f.write(...)
+
+ :param path: The destination filepath. May or may not exist.
+ :param mode: The filemode for the temporary file.
+ :param overwrite: If set to false, an error is raised if ``path`` exists.
+ Errors are only raised after the file has been written to. Either way,
+ the operation is atomic.
+
+ If you need further control over the exact behavior, you are encouraged to
+ subclass.
+ '''
+
+ def __init__(self, path, mode='w', overwrite=False):
+ if 'a' in mode:
+ raise ValueError(
+ 'Appending to an existing file is not supported, because that '
+ 'would involve an expensive `copy`-operation to a temporary '
+ 'file. Open the file in normal `w`-mode and copy explicitly '
+ 'if that\'s what you\'re after.'
+ )
+ if 'x' in mode:
+ raise ValueError('Use the `overwrite`-parameter instead.')
+ if 'w' not in mode:
+ raise ValueError('AtomicWriters can only be written to.')
+
+ self._path = path
+ self._mode = mode
+ self._overwrite = overwrite
+
+ def open(self):
+ '''
+ Open the temporary file.
+ '''
+ return self._open(self.get_fileobject)
+
+ @contextlib.contextmanager
+ def _open(self, get_fileobject):
+ f = None # make sure f exists even if get_fileobject() fails
+ try:
+ success = False
+ with get_fileobject() as f:
+ yield f
+ self.sync(f)
+ self.commit(f)
+ success = True
+ finally:
+ if not success:
+ try:
+ self.rollback(f)
+ except Exception:
+ pass
+
+ def get_fileobject(self, dir=None, **kwargs):
+ '''Return the temporary file to use.'''
+ if dir is None:
+ dir = os.path.normpath(os.path.dirname(self._path))
+ return tempfile.NamedTemporaryFile(mode=self._mode, dir=dir,
+ delete=False, **kwargs)
+
+ def sync(self, f):
+ '''responsible for clearing as many file caches as possible before
+ commit'''
+ f.flush()
+ _proper_fsync(f.fileno())
+
+ def commit(self, f):
+ '''Move the temporary file to the target location.'''
+ if self._overwrite:
+ replace_atomic(f.name, self._path)
+ else:
+ move_atomic(f.name, self._path)
+
+ def rollback(self, f):
+ '''Clean up all temporary resources.'''
+ os.unlink(f.name)
+
+
+def atomic_write(path, writer_cls=AtomicWriter, **cls_kwargs):
+ '''
+ Simple atomic writes. This wraps :py:class:`AtomicWriter`::
+
+ with atomic_write(path) as f:
+ f.write(...)
+
+ :param path: The target path to write to.
+ :param writer_cls: The writer class to use. This parameter is useful if you
+ subclassed :py:class:`AtomicWriter` to change some behavior and want to
+ use that new subclass.
+
+ Additional keyword arguments are passed to the writer class. See
+ :py:class:`AtomicWriter`.
+ '''
+ return writer_cls(path, **cls_kwargs).open()
diff --git a/third_party/python/atomicwrites/setup.cfg b/third_party/python/atomicwrites/setup.cfg
new file mode 100644
index 0000000000..3e89eac2c0
--- /dev/null
+++ b/third_party/python/atomicwrites/setup.cfg
@@ -0,0 +1,8 @@
+[wheel]
+universal = 1
+
+[egg_info]
+tag_date = 0
+tag_svn_revision = 0
+tag_build =
+
diff --git a/third_party/python/atomicwrites/setup.py b/third_party/python/atomicwrites/setup.py
new file mode 100644
index 0000000000..98488e9b98
--- /dev/null
+++ b/third_party/python/atomicwrites/setup.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+
+import ast
+import re
+
+from setuptools import find_packages, setup
+
+
+_version_re = re.compile(r'__version__\s+=\s+(.*)')
+
+
+with open('atomicwrites/__init__.py', 'rb') as f:
+ version = str(ast.literal_eval(_version_re.search(
+ f.read().decode('utf-8')).group(1)))
+
+setup(
+ name='atomicwrites',
+ version=version,
+ author='Markus Unterwaditzer',
+ author_email='markus@unterwaditzer.net',
+ url='https://github.com/untitaker/python-atomicwrites',
+ description='Atomic file writes.',
+ license='MIT',
+ long_description=open('README.rst').read(),
+ packages=find_packages(exclude=['tests.*', 'tests']),
+ include_package_data=True,
+)
diff --git a/third_party/python/attrs/.coveragerc b/third_party/python/attrs/.coveragerc
new file mode 100644
index 0000000000..093c119431
--- /dev/null
+++ b/third_party/python/attrs/.coveragerc
@@ -0,0 +1,13 @@
+[run]
+branch = True
+source =
+ attr
+
+[paths]
+source =
+ src/attr
+ .tox/*/lib/python*/site-packages/attr
+ .tox/pypy/site-packages/attr
+
+[report]
+show_missing = True
diff --git a/third_party/python/attrs/.github/CODE_OF_CONDUCT.rst b/third_party/python/attrs/.github/CODE_OF_CONDUCT.rst
new file mode 100644
index 0000000000..56e8914ce2
--- /dev/null
+++ b/third_party/python/attrs/.github/CODE_OF_CONDUCT.rst
@@ -0,0 +1,55 @@
+Contributor Covenant Code of Conduct
+====================================
+
+Our Pledge
+----------
+
+In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to make participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
+
+Our Standards
+-------------
+
+Examples of behavior that contributes to creating a positive environment include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a professional setting
+
+Our Responsibilities
+--------------------
+
+Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
+
+Scope
+-----
+
+This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community.
+Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event.
+Representation of a project may be further defined and clarified by project maintainers.
+
+Enforcement
+-----------
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at hs@ox.cx.
+All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances.
+The project team is obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
+
+Attribution
+-----------
+
+This Code of Conduct is adapted from the `Contributor Covenant <https://www.contributor-covenant.org>`_, version 1.4, available at <https://www.contributor-covenant.org/version/1/4/code-of-conduct.html>.
diff --git a/third_party/python/attrs/.github/CONTRIBUTING.rst b/third_party/python/attrs/.github/CONTRIBUTING.rst
new file mode 100644
index 0000000000..c43c2f1ff6
--- /dev/null
+++ b/third_party/python/attrs/.github/CONTRIBUTING.rst
@@ -0,0 +1,250 @@
+How To Contribute
+=================
+
+First off, thank you for considering contributing to ``attrs``!
+It's people like *you* who make it such a great tool for everyone.
+
+This document intends to make contribution more accessible by codifying tribal knowledge and expectations.
+Don't be afraid to open half-finished PRs, and ask questions if something is unclear!
+
+
+Support
+-------
+
+In case you'd like to help out but don't want to deal with GitHub, there's a great opportunity:
+help your fellow developers on `StackOverflow <https://stackoverflow.com/questions/tagged/python-attrs>`_!
+
+The offical tag is ``python-attrs`` and helping out in support frees us up to improve ``attrs`` instead!
+
+
+Workflow
+--------
+
+- No contribution is too small!
+ Please submit as many fixes for typos and grammar bloopers as you can!
+- Try to limit each pull request to *one* change only.
+- Since we squash on merge, it's up to you how you handle updates to the master branch.
+ Whether you prefer to rebase on master or merge master into your branch, do whatever is more comfortable for you.
+- *Always* add tests and docs for your code.
+ This is a hard rule; patches with missing tests or documentation can't be merged.
+- Make sure your changes pass our CI_.
+ You won't get any feedback until it's green unless you ask for it.
+- Once you've addressed review feedback, make sure to bump the pull request with a short note, so we know you're done.
+- Don’t break `backward compatibility`_.
+
+
+Code
+----
+
+- Obey `PEP 8`_ and `PEP 257`_.
+ We use the ``"""``\ -on-separate-lines style for docstrings:
+
+ .. code-block:: python
+
+ def func(x):
+ """
+ Do something.
+
+ :param str x: A very important parameter.
+
+ :rtype: str
+ """
+- If you add or change public APIs, tag the docstring using ``.. versionadded:: 16.0.0 WHAT`` or ``.. versionchanged:: 16.2.0 WHAT``.
+- We use isort_ to sort our imports, and we follow the Black_ code style with a line length of 79 characters.
+ As long as you run our full tox suite before committing, or install our pre-commit_ hooks (ideally you'll do both -- see below "Local Development Environment"), you won't have to spend any time on formatting your code at all.
+ If you don't, CI will catch it for you -- but that seems like a waste of your time!
+
+
+Tests
+-----
+
+- Write your asserts as ``expected == actual`` to line them up nicely:
+
+ .. code-block:: python
+
+ x = f()
+
+ assert 42 == x.some_attribute
+ assert "foo" == x._a_private_attribute
+
+- To run the test suite, all you need is a recent tox_.
+ It will ensure the test suite runs with all dependencies against all Python versions just as it will on Travis CI.
+ If you lack some Python versions, you can can always limit the environments like ``tox -e py27,py35`` (in that case you may want to look into pyenv_, which makes it very easy to install many different Python versions in parallel).
+- Write `good test docstrings`_.
+- To ensure new features work well with the rest of the system, they should be also added to our `Hypothesis`_ testing strategy, which is found in ``tests/strategies.py``.
+- If you've changed or added public APIs, please update our type stubs (files ending in ``.pyi``).
+
+
+Documentation
+-------------
+
+- Use `semantic newlines`_ in reStructuredText_ files (files ending in ``.rst``):
+
+ .. code-block:: rst
+
+ This is a sentence.
+ This is another sentence.
+
+- If you start a new section, add two blank lines before and one blank line after the header, except if two headers follow immediately after each other:
+
+ .. code-block:: rst
+
+ Last line of previous section.
+
+
+ Header of New Top Section
+ -------------------------
+
+ Header of New Section
+ ^^^^^^^^^^^^^^^^^^^^^
+
+ First line of new section.
+
+- If you add a new feature, demonstrate its awesomeness on the `examples page`_!
+
+
+Changelog
+^^^^^^^^^
+
+If your change is noteworthy, there needs to be a changelog entry so our users can learn about it!
+
+To avoid merge conflicts, we use the towncrier_ package to manage our changelog.
+``towncrier`` uses independent files for each pull request -- so called *news fragments* -- instead of one monolithic changelog file.
+On release, those news fragments are compiled into our ``CHANGELOG.rst``.
+
+You don't need to install ``towncrier`` yourself, you just have to abide by a few simple rules:
+
+- For each pull request, add a new file into ``changelog.d`` with a filename adhering to the ``pr#.(change|deprecation|breaking).rst`` schema:
+ For example, ``changelog.d/42.change.rst`` for a non-breaking change that is proposed in pull request #42.
+- As with other docs, please use `semantic newlines`_ within news fragments.
+- Wrap symbols like modules, functions, or classes into double backticks so they are rendered in a ``monospace font``.
+- Wrap arguments into asterisks like in docstrings: *these* or *attributes*.
+- If you mention functions or other callables, add parentheses at the end of their names: ``attr.func()`` or ``attr.Class.method()``.
+ This makes the changelog a lot more readable.
+- Prefer simple past tense or constructions with "now".
+ For example:
+
+ + Added ``attr.validators.func()``.
+ + ``attr.func()`` now doesn't crash the Large Hadron Collider anymore when passed the *foobar* argument.
+- If you want to reference multiple issues, copy the news fragment to another filename.
+ ``towncrier`` will merge all news fragments with identical contents into one entry with multiple links to the respective pull requests.
+
+Example entries:
+
+ .. code-block:: rst
+
+ Added ``attr.validators.func()``.
+ The feature really *is* awesome.
+
+or:
+
+ .. code-block:: rst
+
+ ``attr.func()`` now doesn't crash the Large Hadron Collider anymore when passed the *foobar* argument.
+ The bug really *was* nasty.
+
+----
+
+``tox -e changelog`` will render the current changelog to the terminal if you have any doubts.
+
+
+Local Development Environment
+-----------------------------
+
+You can (and should) run our test suite using tox_.
+However, you’ll probably want a more traditional environment as well.
+We highly recommend to develop using the latest Python 3 release because ``attrs`` tries to take advantage of modern features whenever possible.
+
+First create a `virtual environment <https://virtualenv.pypa.io/>`_.
+It’s out of scope for this document to list all the ways to manage virtual environments in Python, but if you don’t already have a pet way, take some time to look at tools like `pew <https://github.com/berdario/pew>`_, `virtualfish <https://virtualfish.readthedocs.io/>`_, and `virtualenvwrapper <https://virtualenvwrapper.readthedocs.io/>`_.
+
+Next, get an up to date checkout of the ``attrs`` repository:
+
+.. code-block:: bash
+
+ $ git clone git@github.com:python-attrs/attrs.git
+
+or if you want to use git via ``https``:
+
+.. code-block:: bash
+
+ $ git clone https://github.com/python-attrs/attrs.git
+
+Change into the newly created directory and **after activating your virtual environment** install an editable version of ``attrs`` along with its tests and docs requirements:
+
+.. code-block:: bash
+
+ $ cd attrs
+ $ pip install -e '.[dev]'
+
+At this point,
+
+.. code-block:: bash
+
+ $ python -m pytest
+
+should work and pass, as should:
+
+.. code-block:: bash
+
+ $ cd docs
+ $ make html
+
+The built documentation can then be found in ``docs/_build/html/``.
+
+To avoid committing code that violates our style guide, we strongly advise you to install pre-commit_ [#f1]_ hooks:
+
+.. code-block:: bash
+
+ $ pre-commit install
+
+You can also run them anytime (as our tox does) using:
+
+.. code-block:: bash
+
+ $ pre-commit run --all-files
+
+
+.. [#f1] pre-commit should have been installed into your virtualenv automatically when you ran ``pip install -e '.[dev]'`` above. If pre-commit is missing, it may be that you need to re-run ``pip install -e '.[dev]'``.
+
+
+Governance
+----------
+
+``attrs`` is maintained by `team of volunteers`_ that is always open to new members that share our vision of a fast, lean, and magic-free library that empowers programmers to write better code with less effort.
+If you'd like to join, just get a pull request merged and ask to be added in the very same pull request!
+
+**The simple rule is that everyone is welcome to review/merge pull requests of others but nobody is allowed to merge their own code.**
+
+`Hynek Schlawack`_ acts reluctantly as the BDFL_ and has the final say over design decisions.
+
+
+****
+
+Please note that this project is released with a Contributor `Code of Conduct`_.
+By participating in this project you agree to abide by its terms.
+Please report any harm to `Hynek Schlawack`_ in any way you find appropriate.
+
+Thank you for considering contributing to ``attrs``!
+
+
+.. _`Hynek Schlawack`: https://hynek.me/about/
+.. _`PEP 8`: https://www.python.org/dev/peps/pep-0008/
+.. _`PEP 257`: https://www.python.org/dev/peps/pep-0257/
+.. _`good test docstrings`: https://jml.io/pages/test-docstrings.html
+.. _`Code of Conduct`: https://github.com/python-attrs/attrs/blob/master/.github/CODE_OF_CONDUCT.rst
+.. _changelog: https://github.com/python-attrs/attrs/blob/master/CHANGELOG.rst
+.. _`backward compatibility`: https://www.attrs.org/en/latest/backward-compatibility.html
+.. _tox: https://tox.readthedocs.io/
+.. _pyenv: https://github.com/pyenv/pyenv
+.. _reStructuredText: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html
+.. _semantic newlines: https://rhodesmill.org/brandon/2012/one-sentence-per-line/
+.. _examples page: https://github.com/python-attrs/attrs/blob/master/docs/examples.rst
+.. _Hypothesis: https://hypothesis.readthedocs.io/
+.. _CI: https://travis-ci.org/python-attrs/attrs/
+.. _`team of volunteers`: https://github.com/python-attrs
+.. _BDFL: https://en.wikipedia.org/wiki/Benevolent_dictator_for_life
+.. _towncrier: https://pypi.org/project/towncrier
+.. _black: https://github.com/ambv/black
+.. _pre-commit: https://pre-commit.com/
+.. _isort: https://github.com/timothycrosley/isort
diff --git a/third_party/python/attrs/.github/PULL_REQUEST_TEMPLATE.md b/third_party/python/attrs/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000000..8ac4f7aea4
--- /dev/null
+++ b/third_party/python/attrs/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,18 @@
+# Pull Request Check List
+
+This is just a reminder about the most common mistakes. Please make sure that you tick all *appropriate* boxes. But please read our [contribution guide](https://www.attrs.org/en/latest/contributing.html) at least once, it will save you unnecessary review cycles!
+
+If an item doesn't apply to your pull request, **check it anyway** to make it apparent that there's nothing to do.
+
+- [ ] Added **tests** for changed code.
+- [ ] New features have been added to our [Hypothesis testing strategy](https://github.com/python-attrs/attrs/blob/master/tests/strategies.py).
+- [ ] Changes or additions to public APIs are reflected in our type stubs (files ending in ``.pyi``).
+ - [ ] ...and used in the stub test file `tests/typing_example.py`.
+- [ ] Updated **documentation** for changed code.
+ - [ ] New functions/classes have to be added to `docs/api.rst` by hand.
+ - [ ] Changes to the signature of `@attr.s()` have to be added by hand too.
+ - [ ] Changed/added classes/methods/functions have appropriate `versionadded`, `versionchanged`, or `deprecated` [directives](http://www.sphinx-doc.org/en/stable/markup/para.html#directive-versionadded).
+- [ ] Documentation in `.rst` files is written using [semantic newlines](https://rhodesmill.org/brandon/2012/one-sentence-per-line/).
+- [ ] Changes (and possible deprecations) have news fragments in [`changelog.d`](https://github.com/python-attrs/attrs/blob/master/changelog.d).
+
+If you have *any* questions to *any* of the points above, just **submit and ask**! This checklist is here to *help* you, not to deter you from contributing!
diff --git a/third_party/python/attrs/.pre-commit-config.yaml b/third_party/python/attrs/.pre-commit-config.yaml
new file mode 100644
index 0000000000..a35fff26c8
--- /dev/null
+++ b/third_party/python/attrs/.pre-commit-config.yaml
@@ -0,0 +1,33 @@
+repos:
+ - repo: https://github.com/ambv/black
+ rev: 18.9b0
+ hooks:
+ - id: black
+ language_version: python3.7
+ # override until resolved: https://github.com/ambv/black/issues/402
+ files: \.pyi?$
+ types: []
+
+ - repo: https://gitlab.com/pycqa/flake8
+ rev: 3.7.6
+ hooks:
+ - id: flake8
+ language_version: python3.7
+
+ - repo: https://github.com/asottile/seed-isort-config
+ rev: v1.6.0
+ hooks:
+ - id: seed-isort-config
+
+ - repo: https://github.com/pre-commit/mirrors-isort
+ rev: v4.3.4
+ hooks:
+ - id: isort
+ language_version: python3.7
+
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v2.1.0
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: debug-statements
diff --git a/third_party/python/attrs/.readthedocs.yml b/third_party/python/attrs/.readthedocs.yml
new file mode 100644
index 0000000000..e6a8043b2d
--- /dev/null
+++ b/third_party/python/attrs/.readthedocs.yml
@@ -0,0 +1,10 @@
+---
+version: 2
+python:
+ version: 3.7
+
+ install:
+ - method: pip
+ path: .
+ extra_requirements:
+ - docs
diff --git a/third_party/python/attrs/.travis.yml b/third_party/python/attrs/.travis.yml
new file mode 100644
index 0000000000..8b135d6e65
--- /dev/null
+++ b/third_party/python/attrs/.travis.yml
@@ -0,0 +1,77 @@
+dist: xenial
+group: travis_latest
+cache:
+ directories:
+ - $HOME/.cache/pip
+
+language: python
+
+
+matrix:
+ fast_finish: true
+
+ include:
+ # lint
+ - python: "3.7"
+ stage: lint
+ env: TOXENV=lint
+ - python: "3.7"
+ env: TOXENV=manifest
+ - python: "3.7"
+ env: TOXENV=typing
+
+ # test
+ - python: "2.7"
+ stage: test
+ env: TOXENV=py27
+ - python: "3.4"
+ env: TOXENV=py34
+ - python: "3.5"
+ env: TOXENV=py35
+ - python: "3.6"
+ env: TOXENV=py36
+ - python: "pypy"
+ env: TOXENV=pypy
+ dist: trusty
+ - python: "pypy3"
+ env: TOXENV=pypy3
+ dist: trusty
+ - python: "3.7"
+ env: TOXENV=py37
+
+ # Prevent breakage by new releases
+ - python: "3.7-dev"
+ env: TOXENV=py37
+
+ # Docs
+ - python: "3.7"
+ stage: docs
+ env: TOXENV=docs
+ - python: "3.7"
+ env: TOXENV=pypi-description
+ - python: "3.7"
+ env: TOXENV=changelog
+
+ allow_failures:
+ - python: "3.7-dev"
+
+
+install:
+ - pip install --upgrade tox
+
+
+script:
+ - tox
+
+
+before_install:
+ - pip install codecov
+
+
+after_success:
+ - tox -e coverage-report
+ - codecov
+
+
+notifications:
+ email: false
diff --git a/third_party/python/attrs/AUTHORS.rst b/third_party/python/attrs/AUTHORS.rst
new file mode 100644
index 0000000000..f14ef6c607
--- /dev/null
+++ b/third_party/python/attrs/AUTHORS.rst
@@ -0,0 +1,11 @@
+Credits
+=======
+
+``attrs`` is written and maintained by `Hynek Schlawack <https://hynek.me/>`_.
+
+The development is kindly supported by `Variomedia AG <https://www.variomedia.de/>`_.
+
+A full list of contributors can be found in `GitHub's overview <https://github.com/python-attrs/attrs/graphs/contributors>`_.
+
+It’s the spiritual successor of `characteristic <https://characteristic.readthedocs.io/>`_ and aspires to fix some of it clunkiness and unfortunate decisions.
+Both were inspired by Twisted’s `FancyEqMixin <https://twistedmatrix.com/documents/current/api/twisted.python.util.FancyEqMixin.html>`_ but both are implemented using class decorators because `subclassing is bad for you <https://www.youtube.com/watch?v=3MNVP9-hglc>`_, m’kay?
diff --git a/third_party/python/attrs/CHANGELOG.rst b/third_party/python/attrs/CHANGELOG.rst
new file mode 100644
index 0000000000..1bcfb431e7
--- /dev/null
+++ b/third_party/python/attrs/CHANGELOG.rst
@@ -0,0 +1,559 @@
+Changelog
+=========
+
+Versions follow `CalVer <https://calver.org>`_ with a strict backwards compatibility policy.
+The third digit is only for regressions.
+
+.. towncrier release notes start
+
+19.1.0 (2019-03-03)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- Fixed a bug where deserialized objects with ``cache_hash=True`` could have incorrect hash code values.
+ This change breaks classes with ``cache_hash=True`` when a custom ``__setstate__`` is present.
+ An exception will be thrown when applying the ``attrs`` annotation to such a class.
+ This limitation is tracked in issue `#494 <https://github.com/python-attrs/attrs/issues/494>`_.
+ `#482 <https://github.com/python-attrs/attrs/issues/482>`_
+
+
+Changes
+^^^^^^^
+
+- Add ``is_callable``, ``deep_iterable``, and ``deep_mapping`` validators.
+
+ * ``is_callable``: validates that a value is callable
+ * ``deep_iterable``: Allows recursion down into an iterable,
+ applying another validator to every member in the iterable
+ as well as applying an optional validator to the iterable itself.
+ * ``deep_mapping``: Allows recursion down into the items in a mapping object,
+ applying a key validator and a value validator to the key and value in every item.
+ Also applies an optional validator to the mapping object itself.
+
+ You can find them in the ``attr.validators`` package.
+ `#425 <https://github.com/python-attrs/attrs/issues/425>`_
+- Fixed stub files to prevent errors raised by mypy's ``disallow_any_generics = True`` option.
+ `#443 <https://github.com/python-attrs/attrs/issues/443>`_
+- Attributes with ``init=False`` now can follow after ``kw_only=True`` attributes.
+ `#450 <https://github.com/python-attrs/attrs/issues/450>`_
+- ``attrs`` now has first class support for defining exception classes.
+
+ If you define a class using ``@attr.s(auto_exc=True)`` and subclass an exception, the class will behave like a well-behaved exception class including an appropriate ``__str__`` method, and all attributes additionally available in an ``args`` attribute.
+ `#500 <https://github.com/python-attrs/attrs/issues/500>`_
+- Clarified documentation for hashing to warn that hashable objects should be deeply immutable (in their usage, even if this is not enforced).
+ `#503 <https://github.com/python-attrs/attrs/issues/503>`_
+
+
+----
+
+
+18.2.0 (2018-09-01)
+-------------------
+
+Deprecations
+^^^^^^^^^^^^
+
+- Comparing subclasses using ``<``, ``>``, ``<=``, and ``>=`` is now deprecated.
+ The docs always claimed that instances are only compared if the types are identical, so this is a first step to conform to the docs.
+
+ Equality operators (``==`` and ``!=``) were always strict in this regard.
+ `#394 <https://github.com/python-attrs/attrs/issues/394>`_
+
+
+Changes
+^^^^^^^
+
+- ``attrs`` now ships its own `PEP 484 <https://www.python.org/dev/peps/pep-0484/>`_ type hints.
+ Together with `mypy <http://mypy-lang.org>`_'s ``attrs`` plugin, you've got all you need for writing statically typed code in both Python 2 and 3!
+
+ At that occasion, we've also added `narrative docs <https://www.attrs.org/en/stable/types.html>`_ about type annotations in ``attrs``.
+ `#238 <https://github.com/python-attrs/attrs/issues/238>`_
+- Added *kw_only* arguments to ``attr.ib`` and ``attr.s``, and a corresponding *kw_only* attribute to ``attr.Attribute``.
+ This change makes it possible to have a generated ``__init__`` with keyword-only arguments on Python 3, relaxing the required ordering of default and non-default valued attributes.
+ `#281 <https://github.com/python-attrs/attrs/issues/281>`_,
+ `#411 <https://github.com/python-attrs/attrs/issues/411>`_
+- The test suite now runs with ``hypothesis.HealthCheck.too_slow`` disabled to prevent CI breakage on slower computers.
+ `#364 <https://github.com/python-attrs/attrs/issues/364>`_,
+ `#396 <https://github.com/python-attrs/attrs/issues/396>`_
+- ``attr.validators.in_()`` now raises a ``ValueError`` with a useful message even if the options are a string and the value is not a string.
+ `#383 <https://github.com/python-attrs/attrs/issues/383>`_
+- ``attr.asdict()`` now properly handles deeply nested lists and dictionaries.
+ `#395 <https://github.com/python-attrs/attrs/issues/395>`_
+- Added ``attr.converters.default_if_none()`` that allows to replace ``None`` values in attributes.
+ For example ``attr.ib(converter=default_if_none(""))`` replaces ``None`` by empty strings.
+ `#400 <https://github.com/python-attrs/attrs/issues/400>`_,
+ `#414 <https://github.com/python-attrs/attrs/issues/414>`_
+- Fixed a reference leak where the original class would remain live after being replaced when ``slots=True`` is set.
+ `#407 <https://github.com/python-attrs/attrs/issues/407>`_
+- Slotted classes can now be made weakly referenceable by passing ``@attr.s(weakref_slot=True)``.
+ `#420 <https://github.com/python-attrs/attrs/issues/420>`_
+- Added *cache_hash* option to ``@attr.s`` which causes the hash code to be computed once and stored on the object.
+ `#425 <https://github.com/python-attrs/attrs/issues/425>`_
+- Attributes can be named ``property`` and ``itemgetter`` now.
+ `#430 <https://github.com/python-attrs/attrs/issues/430>`_
+- It is now possible to override a base class' class variable using only class annotations.
+ `#431 <https://github.com/python-attrs/attrs/issues/431>`_
+
+
+----
+
+
+18.1.0 (2018-05-03)
+-------------------
+
+Changes
+^^^^^^^
+
+- ``x=X(); x.cycle = x; repr(x)`` will no longer raise a ``RecursionError``, and will instead show as ``X(x=...)``.
+
+ `#95 <https://github.com/python-attrs/attrs/issues/95>`_
+- ``attr.ib(factory=f)`` is now syntactic sugar for the common case of ``attr.ib(default=attr.Factory(f))``.
+
+ `#178 <https://github.com/python-attrs/attrs/issues/178>`_,
+ `#356 <https://github.com/python-attrs/attrs/issues/356>`_
+- Added ``attr.field_dict()`` to return an ordered dictionary of ``attrs`` attributes for a class, whose keys are the attribute names.
+
+ `#290 <https://github.com/python-attrs/attrs/issues/290>`_,
+ `#349 <https://github.com/python-attrs/attrs/issues/349>`_
+- The order of attributes that are passed into ``attr.make_class()`` or the *these* argument of ``@attr.s()`` is now retained if the dictionary is ordered (i.e. ``dict`` on Python 3.6 and later, ``collections.OrderedDict`` otherwise).
+
+ Before, the order was always determined by the order in which the attributes have been defined which may not be desirable when creating classes programatically.
+
+ `#300 <https://github.com/python-attrs/attrs/issues/300>`_,
+ `#339 <https://github.com/python-attrs/attrs/issues/339>`_,
+ `#343 <https://github.com/python-attrs/attrs/issues/343>`_
+- In slotted classes, ``__getstate__`` and ``__setstate__`` now ignore the ``__weakref__`` attribute.
+
+ `#311 <https://github.com/python-attrs/attrs/issues/311>`_,
+ `#326 <https://github.com/python-attrs/attrs/issues/326>`_
+- Setting the cell type is now completely best effort.
+ This fixes ``attrs`` on Jython.
+
+ We cannot make any guarantees regarding Jython though, because our test suite cannot run due to dependency incompatabilities.
+
+ `#321 <https://github.com/python-attrs/attrs/issues/321>`_,
+ `#334 <https://github.com/python-attrs/attrs/issues/334>`_
+- If ``attr.s`` is passed a *these* argument, it will no longer attempt to remove attributes with the same name from the class body.
+
+ `#322 <https://github.com/python-attrs/attrs/issues/322>`_,
+ `#323 <https://github.com/python-attrs/attrs/issues/323>`_
+- The hash of ``attr.NOTHING`` is now vegan and faster on 32bit Python builds.
+
+ `#331 <https://github.com/python-attrs/attrs/issues/331>`_,
+ `#332 <https://github.com/python-attrs/attrs/issues/332>`_
+- The overhead of instantiating frozen dict classes is virtually eliminated.
+ `#336 <https://github.com/python-attrs/attrs/issues/336>`_
+- Generated ``__init__`` methods now have an ``__annotations__`` attribute derived from the types of the fields.
+
+ `#363 <https://github.com/python-attrs/attrs/issues/363>`_
+- We have restructured the documentation a bit to account for ``attrs``' growth in scope.
+ Instead of putting everything into the `examples <https://www.attrs.org/en/stable/examples.html>`_ page, we have started to extract narrative chapters.
+
+ So far, we've added chapters on `initialization <https://www.attrs.org/en/stable/init.html>`_ and `hashing <https://www.attrs.org/en/stable/hashing.html>`_.
+
+ Expect more to come!
+
+ `#369 <https://github.com/python-attrs/attrs/issues/369>`_,
+ `#370 <https://github.com/python-attrs/attrs/issues/370>`_
+
+
+----
+
+
+17.4.0 (2017-12-30)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- The traversal of MROs when using multiple inheritance was backward:
+ If you defined a class ``C`` that subclasses ``A`` and ``B`` like ``C(A, B)``, ``attrs`` would have collected the attributes from ``B`` *before* those of ``A``.
+
+ This is now fixed and means that in classes that employ multiple inheritance, the output of ``__repr__`` and the order of positional arguments in ``__init__`` changes.
+ Because of the nature of this bug, a proper deprecation cycle was unfortunately impossible.
+
+ Generally speaking, it's advisable to prefer ``kwargs``-based initialization anyways – *especially* if you employ multiple inheritance and diamond-shaped hierarchies.
+
+ `#298 <https://github.com/python-attrs/attrs/issues/298>`_,
+ `#299 <https://github.com/python-attrs/attrs/issues/299>`_,
+ `#304 <https://github.com/python-attrs/attrs/issues/304>`_
+- The ``__repr__`` set by ``attrs`` no longer produces an ``AttributeError`` when the instance is missing some of the specified attributes (either through deleting or after using ``init=False`` on some attributes).
+
+ This can break code that relied on ``repr(attr_cls_instance)`` raising ``AttributeError`` to check if any ``attrs``-specified members were unset.
+
+ If you were using this, you can implement a custom method for checking this::
+
+ def has_unset_members(self):
+ for field in attr.fields(type(self)):
+ try:
+ getattr(self, field.name)
+ except AttributeError:
+ return True
+ return False
+
+ `#308 <https://github.com/python-attrs/attrs/issues/308>`_
+
+
+Deprecations
+^^^^^^^^^^^^
+
+- The ``attr.ib(convert=callable)`` option is now deprecated in favor of ``attr.ib(converter=callable)``.
+
+ This is done to achieve consistency with other noun-based arguments like *validator*.
+
+ *convert* will keep working until at least January 2019 while raising a ``DeprecationWarning``.
+
+ `#307 <https://github.com/python-attrs/attrs/issues/307>`_
+
+
+Changes
+^^^^^^^
+
+- Generated ``__hash__`` methods now hash the class type along with the attribute values.
+ Until now the hashes of two classes with the same values were identical which was a bug.
+
+ The generated method is also *much* faster now.
+
+ `#261 <https://github.com/python-attrs/attrs/issues/261>`_,
+ `#295 <https://github.com/python-attrs/attrs/issues/295>`_,
+ `#296 <https://github.com/python-attrs/attrs/issues/296>`_
+- ``attr.ib``\ ’s *metadata* argument now defaults to a unique empty ``dict`` instance instead of sharing a common empty ``dict`` for all.
+ The singleton empty ``dict`` is still enforced.
+
+ `#280 <https://github.com/python-attrs/attrs/issues/280>`_
+- ``ctypes`` is optional now however if it's missing, a bare ``super()`` will not work in slotted classes.
+ This should only happen in special environments like Google App Engine.
+
+ `#284 <https://github.com/python-attrs/attrs/issues/284>`_,
+ `#286 <https://github.com/python-attrs/attrs/issues/286>`_
+- The attribute redefinition feature introduced in 17.3.0 now takes into account if an attribute is redefined via multiple inheritance.
+ In that case, the definition that is closer to the base of the class hierarchy wins.
+
+ `#285 <https://github.com/python-attrs/attrs/issues/285>`_,
+ `#287 <https://github.com/python-attrs/attrs/issues/287>`_
+- Subclasses of ``auto_attribs=True`` can be empty now.
+
+ `#291 <https://github.com/python-attrs/attrs/issues/291>`_,
+ `#292 <https://github.com/python-attrs/attrs/issues/292>`_
+- Equality tests are *much* faster now.
+
+ `#306 <https://github.com/python-attrs/attrs/issues/306>`_
+- All generated methods now have correct ``__module__``, ``__name__``, and (on Python 3) ``__qualname__`` attributes.
+
+ `#309 <https://github.com/python-attrs/attrs/issues/309>`_
+
+
+----
+
+
+17.3.0 (2017-11-08)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- Attributes are no longer defined on the class body.
+
+ This means that if you define a class ``C`` with an attribute ``x``, the class will *not* have an attribute ``x`` for introspection.
+ Instead of ``C.x``, use ``attr.fields(C).x`` or look at ``C.__attrs_attrs__``.
+ The old behavior has been deprecated since version 16.1.
+ (`#253 <https://github.com/python-attrs/attrs/issues/253>`_)
+
+
+Changes
+^^^^^^^
+
+- ``super()`` and ``__class__`` now work with slotted classes on Python 3.
+ (`#102 <https://github.com/python-attrs/attrs/issues/102>`_, `#226 <https://github.com/python-attrs/attrs/issues/226>`_, `#269 <https://github.com/python-attrs/attrs/issues/269>`_, `#270 <https://github.com/python-attrs/attrs/issues/270>`_, `#272 <https://github.com/python-attrs/attrs/issues/272>`_)
+- Added *type* argument to ``attr.ib()`` and corresponding ``type`` attribute to ``attr.Attribute``.
+
+ This change paves the way for automatic type checking and serialization (though as of this release ``attrs`` does not make use of it).
+ In Python 3.6 or higher, the value of ``attr.Attribute.type`` can alternately be set using variable type annotations
+ (see `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_).
+ (`#151 <https://github.com/python-attrs/attrs/issues/151>`_, `#214 <https://github.com/python-attrs/attrs/issues/214>`_, `#215 <https://github.com/python-attrs/attrs/issues/215>`_, `#239 <https://github.com/python-attrs/attrs/issues/239>`_)
+- The combination of ``str=True`` and ``slots=True`` now works on Python 2.
+ (`#198 <https://github.com/python-attrs/attrs/issues/198>`_)
+- ``attr.Factory`` is hashable again.
+ (`#204 <https://github.com/python-attrs/attrs/issues/204>`_)
+- Subclasses now can overwrite attribute definitions of their base classes.
+
+ That means that you can -- for example -- change the default value for an attribute by redefining it.
+ (`#221 <https://github.com/python-attrs/attrs/issues/221>`_, `#229 <https://github.com/python-attrs/attrs/issues/229>`_)
+- Added new option *auto_attribs* to ``@attr.s`` that allows to collect annotated fields without setting them to ``attr.ib()``.
+
+ Setting a field to an ``attr.ib()`` is still possible to supply options like validators.
+ Setting it to any other value is treated like it was passed as ``attr.ib(default=value)`` -- passing an instance of ``attr.Factory`` also works as expected.
+ (`#262 <https://github.com/python-attrs/attrs/issues/262>`_, `#277 <https://github.com/python-attrs/attrs/issues/277>`_)
+- Instances of classes created using ``attr.make_class()`` can now be pickled.
+ (`#282 <https://github.com/python-attrs/attrs/issues/282>`_)
+
+
+----
+
+
+17.2.0 (2017-05-24)
+-------------------
+
+
+Changes:
+^^^^^^^^
+
+- Validators are hashable again.
+ Note that validators may become frozen in the future, pending availability of no-overhead frozen classes.
+ `#192 <https://github.com/python-attrs/attrs/issues/192>`_
+
+
+----
+
+
+17.1.0 (2017-05-16)
+-------------------
+
+To encourage more participation, the project has also been moved into a `dedicated GitHub organization <https://github.com/python-attrs/>`_ and everyone is most welcome to join!
+
+``attrs`` also has a logo now!
+
+.. image:: https://www.attrs.org/en/latest/_static/attrs_logo.png
+ :alt: attrs logo
+
+
+Backward-incompatible Changes:
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- ``attrs`` will set the ``__hash__()`` method to ``None`` by default now.
+ The way hashes were handled before was in conflict with `Python's specification <https://docs.python.org/3/reference/datamodel.html#object.__hash__>`_.
+ This *may* break some software although this breakage is most likely just surfacing of latent bugs.
+ You can always make ``attrs`` create the ``__hash__()`` method using ``@attr.s(hash=True)``.
+ See `#136`_ for the rationale of this change.
+
+ .. warning::
+
+ Please *do not* upgrade blindly and *do* test your software!
+ *Especially* if you use instances as dict keys or put them into sets!
+
+- Correspondingly, ``attr.ib``'s *hash* argument is ``None`` by default too and mirrors the *cmp* argument as it should.
+
+
+Deprecations:
+^^^^^^^^^^^^^
+
+- ``attr.assoc()`` is now deprecated in favor of ``attr.evolve()`` and will stop working in 2018.
+
+
+Changes:
+^^^^^^^^
+
+- Fix default hashing behavior.
+ Now *hash* mirrors the value of *cmp* and classes are unhashable by default.
+ `#136`_
+ `#142 <https://github.com/python-attrs/attrs/issues/142>`_
+- Added ``attr.evolve()`` that, given an instance of an ``attrs`` class and field changes as keyword arguments, will instantiate a copy of the given instance with the changes applied.
+ ``evolve()`` replaces ``assoc()``, which is now deprecated.
+ ``evolve()`` is significantly faster than ``assoc()``, and requires the class have an initializer that can take the field values as keyword arguments (like ``attrs`` itself can generate).
+ `#116 <https://github.com/python-attrs/attrs/issues/116>`_
+ `#124 <https://github.com/python-attrs/attrs/pull/124>`_
+ `#135 <https://github.com/python-attrs/attrs/pull/135>`_
+- ``FrozenInstanceError`` is now raised when trying to delete an attribute from a frozen class.
+ `#118 <https://github.com/python-attrs/attrs/pull/118>`_
+- Frozen-ness of classes is now inherited.
+ `#128 <https://github.com/python-attrs/attrs/pull/128>`_
+- ``__attrs_post_init__()`` is now run if validation is disabled.
+ `#130 <https://github.com/python-attrs/attrs/pull/130>`_
+- Added ``attr.validators.in_(options)`` that, given the allowed `options`, checks whether the attribute value is in it.
+ This can be used to check constants, enums, mappings, etc.
+ `#181 <https://github.com/python-attrs/attrs/pull/181>`_
+- Added ``attr.validators.and_()`` that composes multiple validators into one.
+ `#161 <https://github.com/python-attrs/attrs/issues/161>`_
+- For convenience, the *validator* argument of ``@attr.s`` now can take a list of validators that are wrapped using ``and_()``.
+ `#138 <https://github.com/python-attrs/attrs/issues/138>`_
+- Accordingly, ``attr.validators.optional()`` now can take a list of validators too.
+ `#161 <https://github.com/python-attrs/attrs/issues/161>`_
+- Validators can now be defined conveniently inline by using the attribute as a decorator.
+ Check out the `validator examples <http://www.attrs.org/en/stable/init.html#decorator>`_ to see it in action!
+ `#143 <https://github.com/python-attrs/attrs/issues/143>`_
+- ``attr.Factory()`` now has a *takes_self* argument that makes the initializer to pass the partially initialized instance into the factory.
+ In other words you can define attribute defaults based on other attributes.
+ `#165`_
+ `#189 <https://github.com/python-attrs/attrs/issues/189>`_
+- Default factories can now also be defined inline using decorators.
+ They are *always* passed the partially initialized instance.
+ `#165`_
+- Conversion can now be made optional using ``attr.converters.optional()``.
+ `#105 <https://github.com/python-attrs/attrs/issues/105>`_
+ `#173 <https://github.com/python-attrs/attrs/pull/173>`_
+- ``attr.make_class()`` now accepts the keyword argument ``bases`` which allows for subclassing.
+ `#152 <https://github.com/python-attrs/attrs/pull/152>`_
+- Metaclasses are now preserved with ``slots=True``.
+ `#155 <https://github.com/python-attrs/attrs/pull/155>`_
+
+.. _`#136`: https://github.com/python-attrs/attrs/issues/136
+.. _`#165`: https://github.com/python-attrs/attrs/issues/165
+
+
+----
+
+
+16.3.0 (2016-11-24)
+-------------------
+
+Changes:
+^^^^^^^^
+
+- Attributes now can have user-defined metadata which greatly improves ``attrs``'s extensibility.
+ `#96 <https://github.com/python-attrs/attrs/pull/96>`_
+- Allow for a ``__attrs_post_init__()`` method that -- if defined -- will get called at the end of the ``attrs``-generated ``__init__()`` method.
+ `#111 <https://github.com/python-attrs/attrs/pull/111>`_
+- Added ``@attr.s(str=True)`` that will optionally create a ``__str__()`` method that is identical to ``__repr__()``.
+ This is mainly useful with ``Exception``\ s and other classes that rely on a useful ``__str__()`` implementation but overwrite the default one through a poor own one.
+ Default Python class behavior is to use ``__repr__()`` as ``__str__()`` anyways.
+
+ If you tried using ``attrs`` with ``Exception``\ s and were puzzled by the tracebacks: this option is for you.
+- ``__name__`` is no longer overwritten with ``__qualname__`` for ``attr.s(slots=True)`` classes.
+ `#99 <https://github.com/python-attrs/attrs/issues/99>`_
+
+
+----
+
+
+16.2.0 (2016-09-17)
+-------------------
+
+Changes:
+^^^^^^^^
+
+- Added ``attr.astuple()`` that -- similarly to ``attr.asdict()`` -- returns the instance as a tuple.
+ `#77 <https://github.com/python-attrs/attrs/issues/77>`_
+- Converters now work with frozen classes.
+ `#76 <https://github.com/python-attrs/attrs/issues/76>`_
+- Instantiation of ``attrs`` classes with converters is now significantly faster.
+ `#80 <https://github.com/python-attrs/attrs/pull/80>`_
+- Pickling now works with slotted classes.
+ `#81 <https://github.com/python-attrs/attrs/issues/81>`_
+- ``attr.assoc()`` now works with slotted classes.
+ `#84 <https://github.com/python-attrs/attrs/issues/84>`_
+- The tuple returned by ``attr.fields()`` now also allows to access the ``Attribute`` instances by name.
+ Yes, we've subclassed ``tuple`` so you don't have to!
+ Therefore ``attr.fields(C).x`` is equivalent to the deprecated ``C.x`` and works with slotted classes.
+ `#88 <https://github.com/python-attrs/attrs/issues/88>`_
+
+
+----
+
+
+16.1.0 (2016-08-30)
+-------------------
+
+Backward-incompatible Changes:
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- All instances where function arguments were called ``cl`` have been changed to the more Pythonic ``cls``.
+ Since it was always the first argument, it's doubtful anyone ever called those function with in the keyword form.
+ If so, sorry for any breakage but there's no practical deprecation path to solve this ugly wart.
+
+
+Deprecations:
+^^^^^^^^^^^^^
+
+- Accessing ``Attribute`` instances on class objects is now deprecated and will stop working in 2017.
+ If you need introspection please use the ``__attrs_attrs__`` attribute or the ``attr.fields()`` function that carry them too.
+ In the future, the attributes that are defined on the class body and are usually overwritten in your ``__init__`` method are simply removed after ``@attr.s`` has been applied.
+
+ This will remove the confusing error message if you write your own ``__init__`` and forget to initialize some attribute.
+ Instead you will get a straightforward ``AttributeError``.
+ In other words: decorated classes will work more like plain Python classes which was always ``attrs``'s goal.
+- The serious business aliases ``attr.attributes`` and ``attr.attr`` have been deprecated in favor of ``attr.attrs`` and ``attr.attrib`` which are much more consistent and frankly obvious in hindsight.
+ They will be purged from documentation immediately but there are no plans to actually remove them.
+
+
+Changes:
+^^^^^^^^
+
+- ``attr.asdict()``\ 's ``dict_factory`` arguments is now propagated on recursion.
+ `#45 <https://github.com/python-attrs/attrs/issues/45>`_
+- ``attr.asdict()``, ``attr.has()`` and ``attr.fields()`` are significantly faster.
+ `#48 <https://github.com/python-attrs/attrs/issues/48>`_
+ `#51 <https://github.com/python-attrs/attrs/issues/51>`_
+- Add ``attr.attrs`` and ``attr.attrib`` as a more consistent aliases for ``attr.s`` and ``attr.ib``.
+- Add *frozen* option to ``attr.s`` that will make instances best-effort immutable.
+ `#60 <https://github.com/python-attrs/attrs/issues/60>`_
+- ``attr.asdict()`` now takes ``retain_collection_types`` as an argument.
+ If ``True``, it does not convert attributes of type ``tuple`` or ``set`` to ``list``.
+ `#69 <https://github.com/python-attrs/attrs/issues/69>`_
+
+
+----
+
+
+16.0.0 (2016-05-23)
+-------------------
+
+Backward-incompatible Changes:
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- Python 3.3 and 2.6 are no longer supported.
+ They may work by chance but any effort to keep them working has ceased.
+
+ The last Python 2.6 release was on October 29, 2013 and is no longer supported by the CPython core team.
+ Major Python packages like Django and Twisted dropped Python 2.6 a while ago already.
+
+ Python 3.3 never had a significant user base and wasn't part of any distribution's LTS release.
+
+Changes:
+^^^^^^^^
+
+- ``__slots__`` have arrived!
+ Classes now can automatically be `slotted <https://docs.python.org/3/reference/datamodel.html#slots>`_-style (and save your precious memory) just by passing ``slots=True``.
+ `#35 <https://github.com/python-attrs/attrs/issues/35>`_
+- Allow the case of initializing attributes that are set to ``init=False``.
+ This allows for clean initializer parameter lists while being able to initialize attributes to default values.
+ `#32 <https://github.com/python-attrs/attrs/issues/32>`_
+- ``attr.asdict()`` can now produce arbitrary mappings instead of Python ``dict``\ s when provided with a ``dict_factory`` argument.
+ `#40 <https://github.com/python-attrs/attrs/issues/40>`_
+- Multiple performance improvements.
+
+
+----
+
+
+15.2.0 (2015-12-08)
+-------------------
+
+Changes:
+^^^^^^^^
+
+- Added a ``convert`` argument to ``attr.ib``, which allows specifying a function to run on arguments.
+ This allows for simple type conversions, e.g. with ``attr.ib(convert=int)``.
+ `#26 <https://github.com/python-attrs/attrs/issues/26>`_
+- Speed up object creation when attribute validators are used.
+ `#28 <https://github.com/python-attrs/attrs/issues/28>`_
+
+
+----
+
+
+15.1.0 (2015-08-20)
+-------------------
+
+Changes:
+^^^^^^^^
+
+- Added ``attr.validators.optional()`` that wraps other validators allowing attributes to be ``None``.
+ `#16 <https://github.com/python-attrs/attrs/issues/16>`_
+- Multi-level inheritance now works.
+ `#24 <https://github.com/python-attrs/attrs/issues/24>`_
+- ``__repr__()`` now works with non-redecorated subclasses.
+ `#20 <https://github.com/python-attrs/attrs/issues/20>`_
+
+
+----
+
+
+15.0.0 (2015-04-15)
+-------------------
+
+Changes:
+^^^^^^^^
+
+Initial release.
diff --git a/third_party/python/attrs/LICENSE b/third_party/python/attrs/LICENSE
new file mode 100644
index 0000000000..7ae3df9309
--- /dev/null
+++ b/third_party/python/attrs/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Hynek Schlawack
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/third_party/python/attrs/MANIFEST.in b/third_party/python/attrs/MANIFEST.in
new file mode 100644
index 0000000000..852830df30
--- /dev/null
+++ b/third_party/python/attrs/MANIFEST.in
@@ -0,0 +1,23 @@
+include LICENSE *.rst *.toml *.yml *.yaml
+graft .github
+
+# Stubs
+include src/attr/py.typed
+recursive-include src *.pyi
+
+# Tests
+include tox.ini .coveragerc conftest.py
+recursive-include tests *.py
+
+# Documentation
+include docs/Makefile docs/docutils.conf
+recursive-include docs *.png
+recursive-include docs *.svg
+recursive-include docs *.py
+recursive-include docs *.rst
+prune docs/_build
+
+# Just to keep check-manifest happy; on releases those files are gone.
+# Last rule wins!
+exclude changelog.d/*.rst
+include changelog.d/towncrier_template.rst
diff --git a/third_party/python/attrs/PKG-INFO b/third_party/python/attrs/PKG-INFO
new file mode 100644
index 0000000000..dd7a8f0b36
--- /dev/null
+++ b/third_party/python/attrs/PKG-INFO
@@ -0,0 +1,231 @@
+Metadata-Version: 2.1
+Name: attrs
+Version: 19.1.0
+Summary: Classes Without Boilerplate
+Home-page: https://www.attrs.org/
+Author: Hynek Schlawack
+Author-email: hs@ox.cx
+Maintainer: Hynek Schlawack
+Maintainer-email: hs@ox.cx
+License: MIT
+Project-URL: Documentation, https://www.attrs.org/
+Project-URL: Bug Tracker, https://github.com/python-attrs/attrs/issues
+Project-URL: Source Code, https://github.com/python-attrs/attrs
+Description: .. image:: https://www.attrs.org/en/latest/_static/attrs_logo.png
+ :alt: attrs Logo
+
+ ======================================
+ ``attrs``: Classes Without Boilerplate
+ ======================================
+
+ .. image:: https://readthedocs.org/projects/attrs/badge/?version=stable
+ :target: https://www.attrs.org/en/stable/?badge=stable
+ :alt: Documentation Status
+
+ .. image:: https://travis-ci.org/python-attrs/attrs.svg?branch=master
+ :target: https://travis-ci.org/python-attrs/attrs
+ :alt: CI Status
+
+ .. image:: https://codecov.io/github/python-attrs/attrs/branch/master/graph/badge.svg
+ :target: https://codecov.io/github/python-attrs/attrs
+ :alt: Test Coverage
+
+ .. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+ :alt: Code style: black
+
+ .. teaser-begin
+
+ ``attrs`` is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka `dunder <https://nedbatchelder.com/blog/200605/dunder.html>`_ methods).
+
+ Its main goal is to help you to write **concise** and **correct** software without slowing down your code.
+
+ .. -spiel-end-
+
+ For that, it gives you a class decorator and a way to declaratively define the attributes on that class:
+
+ .. -code-begin-
+
+ .. code-block:: pycon
+
+ >>> import attr
+
+ >>> @attr.s
+ ... class SomeClass(object):
+ ... a_number = attr.ib(default=42)
+ ... list_of_numbers = attr.ib(factory=list)
+ ...
+ ... def hard_math(self, another_number):
+ ... return self.a_number + sum(self.list_of_numbers) * another_number
+
+
+ >>> sc = SomeClass(1, [1, 2, 3])
+ >>> sc
+ SomeClass(a_number=1, list_of_numbers=[1, 2, 3])
+
+ >>> sc.hard_math(3)
+ 19
+ >>> sc == SomeClass(1, [1, 2, 3])
+ True
+ >>> sc != SomeClass(2, [3, 2, 1])
+ True
+
+ >>> attr.asdict(sc)
+ {'a_number': 1, 'list_of_numbers': [1, 2, 3]}
+
+ >>> SomeClass()
+ SomeClass(a_number=42, list_of_numbers=[])
+
+ >>> C = attr.make_class("C", ["a", "b"])
+ >>> C("foo", "bar")
+ C(a='foo', b='bar')
+
+
+ After *declaring* your attributes ``attrs`` gives you:
+
+ - a concise and explicit overview of the class's attributes,
+ - a nice human-readable ``__repr__``,
+ - a complete set of comparison methods,
+ - an initializer,
+ - and much more,
+
+ *without* writing dull boilerplate code again and again and *without* runtime performance penalties.
+
+ On Python 3.6 and later, you can often even drop the calls to ``attr.ib()`` by using `type annotations <https://www.attrs.org/en/latest/types.html>`_.
+
+ This gives you the power to use actual classes with actual types in your code instead of confusing ``tuple``\ s or `confusingly behaving <https://www.attrs.org/en/stable/why.html#namedtuples>`_ ``namedtuple``\ s.
+ Which in turn encourages you to write *small classes* that do `one thing well <https://www.destroyallsoftware.com/talks/boundaries>`_.
+ Never again violate the `single responsibility principle <https://en.wikipedia.org/wiki/Single_responsibility_principle>`_ just because implementing ``__init__`` et al is a painful drag.
+
+
+ .. -testimonials-
+
+ Testimonials
+ ============
+
+ **Amber Hawkie Brown**, Twisted Release Manager and Computer Owl:
+
+ Writing a fully-functional class using attrs takes me less time than writing this testimonial.
+
+
+ **Glyph Lefkowitz**, creator of `Twisted <https://twistedmatrix.com/>`_, `Automat <https://pypi.org/project/Automat/>`_, and other open source software, in `The One Python Library Everyone Needs <https://glyph.twistedmatrix.com/2016/08/attrs.html>`_:
+
+ I’m looking forward to is being able to program in Python-with-attrs everywhere.
+ It exerts a subtle, but positive, design influence in all the codebases I’ve see it used in.
+
+
+ **Kenneth Reitz**, author of `Requests <http://www.python-requests.org/>`_ and Developer Advocate at DigitalOcean, (`on paper no less <https://twitter.com/hynek/status/866817877650751488>`_!):
+
+ attrs—classes for humans. I like it.
+
+
+ **Łukasz Langa**, prolific CPython core developer and Production Engineer at Facebook:
+
+ I'm increasingly digging your attr.ocity. Good job!
+
+
+ .. -end-
+
+ .. -project-information-
+
+ Getting Help
+ ============
+
+ Please use the ``python-attrs`` tag on `StackOverflow <https://stackoverflow.com/questions/tagged/python-attrs>`_ to get help.
+
+ Answering questions of your fellow developers is also great way to help the project!
+
+
+ Project Information
+ ===================
+
+ ``attrs`` is released under the `MIT <https://choosealicense.com/licenses/mit/>`_ license,
+ its documentation lives at `Read the Docs <https://www.attrs.org/>`_,
+ the code on `GitHub <https://github.com/python-attrs/attrs>`_,
+ and the latest release on `PyPI <https://pypi.org/project/attrs/>`_.
+ It’s rigorously tested on Python 2.7, 3.4+, and PyPy.
+
+ We collect information on **third-party extensions** in our `wiki <https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs>`_.
+ Feel free to browse and add your own!
+
+ If you'd like to contribute to ``attrs`` you're most welcome and we've written `a little guide <https://www.attrs.org/en/latest/contributing.html>`_ to get you started!
+
+
+ Release Information
+ ===================
+
+ 19.1.0 (2019-03-03)
+ -------------------
+
+ Backward-incompatible Changes
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+ - Fixed a bug where deserialized objects with ``cache_hash=True`` could have incorrect hash code values.
+ This change breaks classes with ``cache_hash=True`` when a custom ``__setstate__`` is present.
+ An exception will be thrown when applying the ``attrs`` annotation to such a class.
+ This limitation is tracked in issue `#494 <https://github.com/python-attrs/attrs/issues/494>`_.
+ `#482 <https://github.com/python-attrs/attrs/issues/482>`_
+
+
+ Changes
+ ^^^^^^^
+
+ - Add ``is_callable``, ``deep_iterable``, and ``deep_mapping`` validators.
+
+ * ``is_callable``: validates that a value is callable
+ * ``deep_iterable``: Allows recursion down into an iterable,
+ applying another validator to every member in the iterable
+ as well as applying an optional validator to the iterable itself.
+ * ``deep_mapping``: Allows recursion down into the items in a mapping object,
+ applying a key validator and a value validator to the key and value in every item.
+ Also applies an optional validator to the mapping object itself.
+
+ You can find them in the ``attr.validators`` package.
+ `#425 <https://github.com/python-attrs/attrs/issues/425>`_
+ - Fixed stub files to prevent errors raised by mypy's ``disallow_any_generics = True`` option.
+ `#443 <https://github.com/python-attrs/attrs/issues/443>`_
+ - Attributes with ``init=False`` now can follow after ``kw_only=True`` attributes.
+ `#450 <https://github.com/python-attrs/attrs/issues/450>`_
+ - ``attrs`` now has first class support for defining exception classes.
+
+ If you define a class using ``@attr.s(auto_exc=True)`` and subclass an exception, the class will behave like a well-behaved exception class including an appropriate ``__str__`` method, and all attributes additionally available in an ``args`` attribute.
+ `#500 <https://github.com/python-attrs/attrs/issues/500>`_
+ - Clarified documentation for hashing to warn that hashable objects should be deeply immutable (in their usage, even if this is not enforced).
+ `#503 <https://github.com/python-attrs/attrs/issues/503>`_
+
+ `Full changelog <https://www.attrs.org/en/stable/changelog.html>`_.
+
+ Credits
+ =======
+
+ ``attrs`` is written and maintained by `Hynek Schlawack <https://hynek.me/>`_.
+
+ The development is kindly supported by `Variomedia AG <https://www.variomedia.de/>`_.
+
+ A full list of contributors can be found in `GitHub's overview <https://github.com/python-attrs/attrs/graphs/contributors>`_.
+
+ It’s the spiritual successor of `characteristic <https://characteristic.readthedocs.io/>`_ and aspires to fix some of it clunkiness and unfortunate decisions.
+ Both were inspired by Twisted’s `FancyEqMixin <https://twistedmatrix.com/documents/current/api/twisted.python.util.FancyEqMixin.html>`_ but both are implemented using class decorators because `subclassing is bad for you <https://www.youtube.com/watch?v=3MNVP9-hglc>`_, m’kay?
+
+Keywords: class,attribute,boilerplate
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
+Provides-Extra: tests
+Provides-Extra: dev
+Provides-Extra: docs
diff --git a/third_party/python/attrs/README.rst b/third_party/python/attrs/README.rst
new file mode 100644
index 0000000000..db287f73b9
--- /dev/null
+++ b/third_party/python/attrs/README.rst
@@ -0,0 +1,138 @@
+.. image:: https://www.attrs.org/en/latest/_static/attrs_logo.png
+ :alt: attrs Logo
+
+======================================
+``attrs``: Classes Without Boilerplate
+======================================
+
+.. image:: https://readthedocs.org/projects/attrs/badge/?version=stable
+ :target: https://www.attrs.org/en/stable/?badge=stable
+ :alt: Documentation Status
+
+.. image:: https://travis-ci.org/python-attrs/attrs.svg?branch=master
+ :target: https://travis-ci.org/python-attrs/attrs
+ :alt: CI Status
+
+.. image:: https://codecov.io/github/python-attrs/attrs/branch/master/graph/badge.svg
+ :target: https://codecov.io/github/python-attrs/attrs
+ :alt: Test Coverage
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+ :alt: Code style: black
+
+.. teaser-begin
+
+``attrs`` is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka `dunder <https://nedbatchelder.com/blog/200605/dunder.html>`_ methods).
+
+Its main goal is to help you to write **concise** and **correct** software without slowing down your code.
+
+.. -spiel-end-
+
+For that, it gives you a class decorator and a way to declaratively define the attributes on that class:
+
+.. -code-begin-
+
+.. code-block:: pycon
+
+ >>> import attr
+
+ >>> @attr.s
+ ... class SomeClass(object):
+ ... a_number = attr.ib(default=42)
+ ... list_of_numbers = attr.ib(factory=list)
+ ...
+ ... def hard_math(self, another_number):
+ ... return self.a_number + sum(self.list_of_numbers) * another_number
+
+
+ >>> sc = SomeClass(1, [1, 2, 3])
+ >>> sc
+ SomeClass(a_number=1, list_of_numbers=[1, 2, 3])
+
+ >>> sc.hard_math(3)
+ 19
+ >>> sc == SomeClass(1, [1, 2, 3])
+ True
+ >>> sc != SomeClass(2, [3, 2, 1])
+ True
+
+ >>> attr.asdict(sc)
+ {'a_number': 1, 'list_of_numbers': [1, 2, 3]}
+
+ >>> SomeClass()
+ SomeClass(a_number=42, list_of_numbers=[])
+
+ >>> C = attr.make_class("C", ["a", "b"])
+ >>> C("foo", "bar")
+ C(a='foo', b='bar')
+
+
+After *declaring* your attributes ``attrs`` gives you:
+
+- a concise and explicit overview of the class's attributes,
+- a nice human-readable ``__repr__``,
+- a complete set of comparison methods,
+- an initializer,
+- and much more,
+
+*without* writing dull boilerplate code again and again and *without* runtime performance penalties.
+
+On Python 3.6 and later, you can often even drop the calls to ``attr.ib()`` by using `type annotations <https://www.attrs.org/en/latest/types.html>`_.
+
+This gives you the power to use actual classes with actual types in your code instead of confusing ``tuple``\ s or `confusingly behaving <https://www.attrs.org/en/stable/why.html#namedtuples>`_ ``namedtuple``\ s.
+Which in turn encourages you to write *small classes* that do `one thing well <https://www.destroyallsoftware.com/talks/boundaries>`_.
+Never again violate the `single responsibility principle <https://en.wikipedia.org/wiki/Single_responsibility_principle>`_ just because implementing ``__init__`` et al is a painful drag.
+
+
+.. -testimonials-
+
+Testimonials
+============
+
+**Amber Hawkie Brown**, Twisted Release Manager and Computer Owl:
+
+ Writing a fully-functional class using attrs takes me less time than writing this testimonial.
+
+
+**Glyph Lefkowitz**, creator of `Twisted <https://twistedmatrix.com/>`_, `Automat <https://pypi.org/project/Automat/>`_, and other open source software, in `The One Python Library Everyone Needs <https://glyph.twistedmatrix.com/2016/08/attrs.html>`_:
+
+ I’m looking forward to is being able to program in Python-with-attrs everywhere.
+ It exerts a subtle, but positive, design influence in all the codebases I’ve see it used in.
+
+
+**Kenneth Reitz**, author of `Requests <http://www.python-requests.org/>`_ and Developer Advocate at DigitalOcean, (`on paper no less <https://twitter.com/hynek/status/866817877650751488>`_!):
+
+ attrs—classes for humans. I like it.
+
+
+**Łukasz Langa**, prolific CPython core developer and Production Engineer at Facebook:
+
+ I'm increasingly digging your attr.ocity. Good job!
+
+
+.. -end-
+
+.. -project-information-
+
+Getting Help
+============
+
+Please use the ``python-attrs`` tag on `StackOverflow <https://stackoverflow.com/questions/tagged/python-attrs>`_ to get help.
+
+Answering questions of your fellow developers is also great way to help the project!
+
+
+Project Information
+===================
+
+``attrs`` is released under the `MIT <https://choosealicense.com/licenses/mit/>`_ license,
+its documentation lives at `Read the Docs <https://www.attrs.org/>`_,
+the code on `GitHub <https://github.com/python-attrs/attrs>`_,
+and the latest release on `PyPI <https://pypi.org/project/attrs/>`_.
+It’s rigorously tested on Python 2.7, 3.4+, and PyPy.
+
+We collect information on **third-party extensions** in our `wiki <https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs>`_.
+Feel free to browse and add your own!
+
+If you'd like to contribute to ``attrs`` you're most welcome and we've written `a little guide <https://www.attrs.org/en/latest/contributing.html>`_ to get you started!
diff --git a/third_party/python/attrs/changelog.d/towncrier_template.rst b/third_party/python/attrs/changelog.d/towncrier_template.rst
new file mode 100644
index 0000000000..29ca74c4e8
--- /dev/null
+++ b/third_party/python/attrs/changelog.d/towncrier_template.rst
@@ -0,0 +1,35 @@
+{% for section, _ in sections.items() %}
+{% set underline = underlines[0] %}{% if section %}{{section}}
+{{ underline * section|length }}{% set underline = underlines[1] %}
+
+{% endif %}
+
+{% if sections[section] %}
+{% for category, val in definitions.items() if category in sections[section]%}
+{{ definitions[category]['name'] }}
+{{ underline * definitions[category]['name']|length }}
+
+{% if definitions[category]['showcontent'] %}
+{% for text, values in sections[section][category].items() %}
+- {{ text }}
+ {{ values|join(',\n ') }}
+{% endfor %}
+
+{% else %}
+- {{ sections[section][category]['']|join(', ') }}
+
+{% endif %}
+{% if sections[section][category]|length == 0 %}
+No significant changes.
+
+{% else %}
+{% endif %}
+
+{% endfor %}
+{% else %}
+No significant changes.
+
+
+{% endif %}
+{% endfor %}
+----
diff --git a/third_party/python/attrs/codecov.yml b/third_party/python/attrs/codecov.yml
new file mode 100644
index 0000000000..60a1e5c12e
--- /dev/null
+++ b/third_party/python/attrs/codecov.yml
@@ -0,0 +1,10 @@
+---
+comment: false
+coverage:
+ status:
+ patch:
+ default:
+ target: "100"
+ project:
+ default:
+ target: "100"
diff --git a/third_party/python/attrs/conftest.py b/third_party/python/attrs/conftest.py
new file mode 100644
index 0000000000..cce4950cbd
--- /dev/null
+++ b/third_party/python/attrs/conftest.py
@@ -0,0 +1,41 @@
+from __future__ import absolute_import, division, print_function
+
+import sys
+
+import pytest
+
+from hypothesis import HealthCheck, settings
+
+from attr._compat import PYPY
+
+
+def pytest_configure(config):
+ # HealthCheck.too_slow causes more trouble than good -- especially in CIs.
+ settings.register_profile(
+ "patience", settings(suppress_health_check=[HealthCheck.too_slow])
+ )
+ settings.load_profile("patience")
+
+
+@pytest.fixture(scope="session")
+def C():
+ """
+ Return a simple but fully featured attrs class with an x and a y attribute.
+ """
+ import attr
+
+ @attr.s
+ class C(object):
+ x = attr.ib()
+ y = attr.ib()
+
+ return C
+
+
+collect_ignore = []
+if sys.version_info[:2] < (3, 6):
+ collect_ignore.extend(
+ ["tests/test_annotations.py", "tests/test_init_subclass.py"]
+ )
+elif PYPY: # FIXME: Currently our tests fail on pypy3. See #509
+ collect_ignore.extend(["tests/test_annotations.py"])
diff --git a/third_party/python/attrs/pyproject.toml b/third_party/python/attrs/pyproject.toml
new file mode 100644
index 0000000000..5657791dfd
--- /dev/null
+++ b/third_party/python/attrs/pyproject.toml
@@ -0,0 +1,36 @@
+[build-system]
+requires = ["setuptools>=40.6.0", "wheel"]
+build-backend = "setuptools.build_meta"
+
+
+[tool.black]
+line-length = 79
+
+
+[tool.towncrier]
+ package = "attr"
+ package_dir = "src"
+ filename = "CHANGELOG.rst"
+ template = "changelog.d/towncrier_template.rst"
+ issue_format = "`#{issue} <https://github.com/python-attrs/attrs/issues/{issue}>`_"
+ directory = "changelog.d"
+ title_format = "{version} ({project_date})"
+ underlines = ["-", "^"]
+
+ [[tool.towncrier.section]]
+ path = ""
+
+ [[tool.towncrier.type]]
+ directory = "breaking"
+ name = "Backward-incompatible Changes"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "deprecation"
+ name = "Deprecations"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "change"
+ name = "Changes"
+ showcontent = true
diff --git a/third_party/python/attrs/setup.cfg b/third_party/python/attrs/setup.cfg
new file mode 100644
index 0000000000..0dfa1a84cc
--- /dev/null
+++ b/third_party/python/attrs/setup.cfg
@@ -0,0 +1,31 @@
+[bdist_wheel]
+universal = 1
+
+[metadata]
+license_file = LICENSE
+
+[tool:pytest]
+minversion = 3.0
+strict = true
+addopts = -ra
+testpaths = tests
+filterwarnings =
+ once::Warning
+ ignore:::pympler[.*]
+
+[isort]
+atomic = true
+force_grid_wrap = 0
+include_trailing_comma = true
+lines_after_imports = 2
+lines_between_types = 1
+multi_line_output = 3
+not_skip = __init__.py
+use_parentheses = true
+known_first_party = attr
+known_third_party = hypothesis,pytest,setuptools,six,zope
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/attrs/setup.py b/third_party/python/attrs/setup.py
new file mode 100644
index 0000000000..a181c95ff7
--- /dev/null
+++ b/third_party/python/attrs/setup.py
@@ -0,0 +1,122 @@
+import codecs
+import os
+import re
+
+from setuptools import find_packages, setup
+
+
+###############################################################################
+
+NAME = "attrs"
+PACKAGES = find_packages(where="src")
+META_PATH = os.path.join("src", "attr", "__init__.py")
+KEYWORDS = ["class", "attribute", "boilerplate"]
+PROJECT_URLS = {
+ "Documentation": "https://www.attrs.org/",
+ "Bug Tracker": "https://github.com/python-attrs/attrs/issues",
+ "Source Code": "https://github.com/python-attrs/attrs",
+}
+CLASSIFIERS = [
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "Natural Language :: English",
+ "License :: OSI Approved :: MIT License",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.4",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Programming Language :: Python :: Implementation :: PyPy",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+]
+INSTALL_REQUIRES = []
+EXTRAS_REQUIRE = {
+ "docs": ["sphinx", "zope.interface"],
+ "tests": [
+ "coverage",
+ "hypothesis",
+ "pympler",
+ "pytest",
+ "six",
+ "zope.interface",
+ ],
+}
+EXTRAS_REQUIRE["dev"] = (
+ EXTRAS_REQUIRE["tests"] + EXTRAS_REQUIRE["docs"] + ["pre-commit"]
+)
+
+###############################################################################
+
+HERE = os.path.abspath(os.path.dirname(__file__))
+
+
+def read(*parts):
+ """
+ Build an absolute path from *parts* and and return the contents of the
+ resulting file. Assume UTF-8 encoding.
+ """
+ with codecs.open(os.path.join(HERE, *parts), "rb", "utf-8") as f:
+ return f.read()
+
+
+META_FILE = read(META_PATH)
+
+
+def find_meta(meta):
+ """
+ Extract __*meta*__ from META_FILE.
+ """
+ meta_match = re.search(
+ r"^__{meta}__ = ['\"]([^'\"]*)['\"]".format(meta=meta), META_FILE, re.M
+ )
+ if meta_match:
+ return meta_match.group(1)
+ raise RuntimeError("Unable to find __{meta}__ string.".format(meta=meta))
+
+
+VERSION = find_meta("version")
+URL = find_meta("url")
+LONG = (
+ read("README.rst")
+ + "\n\n"
+ + "Release Information\n"
+ + "===================\n\n"
+ + re.search(
+ r"(\d+.\d.\d \(.*?\)\n.*?)\n\n\n----\n\n\n",
+ read("CHANGELOG.rst"),
+ re.S,
+ ).group(1)
+ + "\n\n`Full changelog "
+ + "<{url}en/stable/changelog.html>`_.\n\n".format(url=URL)
+ + read("AUTHORS.rst")
+)
+
+
+if __name__ == "__main__":
+ setup(
+ name=NAME,
+ description=find_meta("description"),
+ license=find_meta("license"),
+ url=URL,
+ project_urls=PROJECT_URLS,
+ version=VERSION,
+ author=find_meta("author"),
+ author_email=find_meta("email"),
+ maintainer=find_meta("author"),
+ maintainer_email=find_meta("email"),
+ keywords=KEYWORDS,
+ long_description=LONG,
+ packages=PACKAGES,
+ package_dir={"": "src"},
+ python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
+ zip_safe=False,
+ classifiers=CLASSIFIERS,
+ install_requires=INSTALL_REQUIRES,
+ extras_require=EXTRAS_REQUIRE,
+ include_package_data=True,
+ )
diff --git a/third_party/python/attrs/src/attr/__init__.py b/third_party/python/attrs/src/attr/__init__.py
new file mode 100644
index 0000000000..0ebe5197a0
--- /dev/null
+++ b/third_party/python/attrs/src/attr/__init__.py
@@ -0,0 +1,65 @@
+from __future__ import absolute_import, division, print_function
+
+from functools import partial
+
+from . import converters, exceptions, filters, validators
+from ._config import get_run_validators, set_run_validators
+from ._funcs import asdict, assoc, astuple, evolve, has
+from ._make import (
+ NOTHING,
+ Attribute,
+ Factory,
+ attrib,
+ attrs,
+ fields,
+ fields_dict,
+ make_class,
+ validate,
+)
+
+
+__version__ = "19.1.0"
+
+__title__ = "attrs"
+__description__ = "Classes Without Boilerplate"
+__url__ = "https://www.attrs.org/"
+__uri__ = __url__
+__doc__ = __description__ + " <" + __uri__ + ">"
+
+__author__ = "Hynek Schlawack"
+__email__ = "hs@ox.cx"
+
+__license__ = "MIT"
+__copyright__ = "Copyright (c) 2015 Hynek Schlawack"
+
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
+
+__all__ = [
+ "Attribute",
+ "Factory",
+ "NOTHING",
+ "asdict",
+ "assoc",
+ "astuple",
+ "attr",
+ "attrib",
+ "attributes",
+ "attrs",
+ "converters",
+ "evolve",
+ "exceptions",
+ "fields",
+ "fields_dict",
+ "filters",
+ "get_run_validators",
+ "has",
+ "ib",
+ "make_class",
+ "s",
+ "set_run_validators",
+ "validate",
+ "validators",
+]
diff --git a/third_party/python/attrs/src/attr/__init__.pyi b/third_party/python/attrs/src/attr/__init__.pyi
new file mode 100644
index 0000000000..fcb93b18e3
--- /dev/null
+++ b/third_party/python/attrs/src/attr/__init__.pyi
@@ -0,0 +1,255 @@
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Generic,
+ List,
+ Optional,
+ Sequence,
+ Mapping,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ overload,
+)
+
+# `import X as X` is required to make these public
+from . import exceptions as exceptions
+from . import filters as filters
+from . import converters as converters
+from . import validators as validators
+
+_T = TypeVar("_T")
+_C = TypeVar("_C", bound=type)
+
+_ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
+_ConverterType = Callable[[Any], _T]
+_FilterType = Callable[[Attribute[_T], _T], bool]
+# FIXME: in reality, if multiple validators are passed they must be in a list or tuple,
+# but those are invariant and so would prevent subtypes of _ValidatorType from working
+# when passed in a list or tuple.
+_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
+
+# _make --
+
+NOTHING: object
+
+# NOTE: Factory lies about its return type to make this possible: `x: List[int] = Factory(list)`
+# Work around mypy issue #4554 in the common case by using an overload.
+@overload
+def Factory(factory: Callable[[], _T]) -> _T: ...
+@overload
+def Factory(
+ factory: Union[Callable[[Any], _T], Callable[[], _T]],
+ takes_self: bool = ...,
+) -> _T: ...
+
+class Attribute(Generic[_T]):
+ name: str
+ default: Optional[_T]
+ validator: Optional[_ValidatorType[_T]]
+ repr: bool
+ cmp: bool
+ hash: Optional[bool]
+ init: bool
+ converter: Optional[_ConverterType[_T]]
+ metadata: Dict[Any, Any]
+ type: Optional[Type[_T]]
+ kw_only: bool
+ def __lt__(self, x: Attribute[_T]) -> bool: ...
+ def __le__(self, x: Attribute[_T]) -> bool: ...
+ def __gt__(self, x: Attribute[_T]) -> bool: ...
+ def __ge__(self, x: Attribute[_T]) -> bool: ...
+
+# NOTE: We had several choices for the annotation to use for type arg:
+# 1) Type[_T]
+# - Pros: Handles simple cases correctly
+# - Cons: Might produce less informative errors in the case of conflicting TypeVars
+# e.g. `attr.ib(default='bad', type=int)`
+# 2) Callable[..., _T]
+# - Pros: Better error messages than #1 for conflicting TypeVars
+# - Cons: Terrible error messages for validator checks.
+# e.g. attr.ib(type=int, validator=validate_str)
+# -> error: Cannot infer function type argument
+# 3) type (and do all of the work in the mypy plugin)
+# - Pros: Simple here, and we could customize the plugin with our own errors.
+# - Cons: Would need to write mypy plugin code to handle all the cases.
+# We chose option #1.
+
+# `attr` lies about its return type to make the following possible:
+# attr() -> Any
+# attr(8) -> int
+# attr(validator=<some callable>) -> Whatever the callable expects.
+# This makes this type of assignments possible:
+# x: int = attr(8)
+#
+# This form catches explicit None or no default but with no other arguments returns Any.
+@overload
+def attrib(
+ default: None = ...,
+ validator: None = ...,
+ repr: bool = ...,
+ cmp: bool = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ convert: None = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: None = ...,
+ converter: None = ...,
+ factory: None = ...,
+ kw_only: bool = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the other arguments.
+@overload
+def attrib(
+ default: None = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: bool = ...,
+ cmp: bool = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ convert: Optional[_ConverterType[_T]] = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: Optional[Type[_T]] = ...,
+ converter: Optional[_ConverterType[_T]] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def attrib(
+ default: _T,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: bool = ...,
+ cmp: bool = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ convert: Optional[_ConverterType[_T]] = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: Optional[Type[_T]] = ...,
+ converter: Optional[_ConverterType[_T]] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def attrib(
+ default: Optional[_T] = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: bool = ...,
+ cmp: bool = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ convert: Optional[_ConverterType[_T]] = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: object = ...,
+ converter: Optional[_ConverterType[_T]] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+) -> Any: ...
+@overload
+def attrs(
+ maybe_cls: _C,
+ these: Optional[Dict[str, Any]] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: bool = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+) -> _C: ...
+@overload
+def attrs(
+ maybe_cls: None = ...,
+ these: Optional[Dict[str, Any]] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: bool = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+) -> Callable[[_C], _C]: ...
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+class _Fields(Tuple[Attribute[Any], ...]):
+ def __getattr__(self, name: str) -> Attribute[Any]: ...
+
+def fields(cls: type) -> _Fields: ...
+def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ...
+def validate(inst: Any) -> None: ...
+
+# TODO: add support for returning a proper attrs class from the mypy plugin
+# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', [attr.ib()])` is valid
+def make_class(
+ name: str,
+ attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
+ bases: Tuple[type, ...] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: bool = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+) -> type: ...
+
+# _funcs --
+
+# TODO: add support for returning TypedDict from the mypy plugin
+# FIXME: asdict/astuple do not honor their factory args. waiting on one of these:
+# https://github.com/python/mypy/issues/4236
+# https://github.com/python/typing/issues/253
+def asdict(
+ inst: Any,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ dict_factory: Type[Mapping[Any, Any]] = ...,
+ retain_collection_types: bool = ...,
+) -> Dict[str, Any]: ...
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+def astuple(
+ inst: Any,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ tuple_factory: Type[Sequence[Any]] = ...,
+ retain_collection_types: bool = ...,
+) -> Tuple[Any, ...]: ...
+def has(cls: type) -> bool: ...
+def assoc(inst: _T, **changes: Any) -> _T: ...
+def evolve(inst: _T, **changes: Any) -> _T: ...
+
+# _config --
+
+def set_run_validators(run: bool) -> None: ...
+def get_run_validators() -> bool: ...
+
+# aliases --
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
diff --git a/third_party/python/attrs/src/attr/_compat.py b/third_party/python/attrs/src/attr/_compat.py
new file mode 100644
index 0000000000..9a99dcd96c
--- /dev/null
+++ b/third_party/python/attrs/src/attr/_compat.py
@@ -0,0 +1,159 @@
+from __future__ import absolute_import, division, print_function
+
+import platform
+import sys
+import types
+import warnings
+
+
+PY2 = sys.version_info[0] == 2
+PYPY = platform.python_implementation() == "PyPy"
+
+
+if PYPY or sys.version_info[:2] >= (3, 6):
+ ordered_dict = dict
+else:
+ from collections import OrderedDict
+
+ ordered_dict = OrderedDict
+
+
+if PY2:
+ from UserDict import IterableUserDict
+ from collections import Mapping, Sequence # noqa
+
+ # We 'bundle' isclass instead of using inspect as importing inspect is
+ # fairly expensive (order of 10-15 ms for a modern machine in 2016)
+ def isclass(klass):
+ return isinstance(klass, (type, types.ClassType))
+
+ # TYPE is used in exceptions, repr(int) is different on Python 2 and 3.
+ TYPE = "type"
+
+ def iteritems(d):
+ return d.iteritems()
+
+ # Python 2 is bereft of a read-only dict proxy, so we make one!
+ class ReadOnlyDict(IterableUserDict):
+ """
+ Best-effort read-only dict wrapper.
+ """
+
+ def __setitem__(self, key, val):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise TypeError(
+ "'mappingproxy' object does not support item assignment"
+ )
+
+ def update(self, _):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'update'"
+ )
+
+ def __delitem__(self, _):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise TypeError(
+ "'mappingproxy' object does not support item deletion"
+ )
+
+ def clear(self):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'clear'"
+ )
+
+ def pop(self, key, default=None):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'pop'"
+ )
+
+ def popitem(self):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'popitem'"
+ )
+
+ def setdefault(self, key, default=None):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'setdefault'"
+ )
+
+ def __repr__(self):
+ # Override to be identical to the Python 3 version.
+ return "mappingproxy(" + repr(self.data) + ")"
+
+ def metadata_proxy(d):
+ res = ReadOnlyDict()
+ res.data.update(d) # We blocked update, so we have to do it like this.
+ return res
+
+ def just_warn(*args, **kw): # pragma: nocover
+ """
+ We only warn on Python 3 because we are not aware of any concrete
+ consequences of not setting the cell on Python 2.
+ """
+
+
+else: # Python 3 and later.
+ from collections.abc import Mapping, Sequence # noqa
+
+ def just_warn(*args, **kw):
+ """
+ We only warn on Python 3 because we are not aware of any concrete
+ consequences of not setting the cell on Python 2.
+ """
+ warnings.warn(
+ "Missing ctypes. Some features like bare super() or accessing "
+ "__class__ will not work with slotted classes.",
+ RuntimeWarning,
+ stacklevel=2,
+ )
+
+ def isclass(klass):
+ return isinstance(klass, type)
+
+ TYPE = "class"
+
+ def iteritems(d):
+ return d.items()
+
+ def metadata_proxy(d):
+ return types.MappingProxyType(dict(d))
+
+
+def import_ctypes():
+ """
+ Moved into a function for testability.
+ """
+ import ctypes
+
+ return ctypes
+
+
+def make_set_closure_cell():
+ """
+ Moved into a function for testability.
+ """
+ if PYPY: # pragma: no cover
+
+ def set_closure_cell(cell, value):
+ cell.__setstate__((value,))
+
+ else:
+ try:
+ ctypes = import_ctypes()
+
+ set_closure_cell = ctypes.pythonapi.PyCell_Set
+ set_closure_cell.argtypes = (ctypes.py_object, ctypes.py_object)
+ set_closure_cell.restype = ctypes.c_int
+ except Exception:
+ # We try best effort to set the cell, but sometimes it's not
+ # possible. For example on Jython or on GAE.
+ set_closure_cell = just_warn
+ return set_closure_cell
+
+
+set_closure_cell = make_set_closure_cell()
diff --git a/third_party/python/attrs/src/attr/_config.py b/third_party/python/attrs/src/attr/_config.py
new file mode 100644
index 0000000000..8ec920962d
--- /dev/null
+++ b/third_party/python/attrs/src/attr/_config.py
@@ -0,0 +1,23 @@
+from __future__ import absolute_import, division, print_function
+
+
+__all__ = ["set_run_validators", "get_run_validators"]
+
+_run_validators = True
+
+
+def set_run_validators(run):
+ """
+ Set whether or not validators are run. By default, they are run.
+ """
+ if not isinstance(run, bool):
+ raise TypeError("'run' must be bool.")
+ global _run_validators
+ _run_validators = run
+
+
+def get_run_validators():
+ """
+ Return whether or not validators are run.
+ """
+ return _run_validators
diff --git a/third_party/python/attrs/src/attr/_funcs.py b/third_party/python/attrs/src/attr/_funcs.py
new file mode 100644
index 0000000000..b61d239412
--- /dev/null
+++ b/third_party/python/attrs/src/attr/_funcs.py
@@ -0,0 +1,290 @@
+from __future__ import absolute_import, division, print_function
+
+import copy
+
+from ._compat import iteritems
+from ._make import NOTHING, _obj_setattr, fields
+from .exceptions import AttrsAttributeNotFoundError
+
+
+def asdict(
+ inst,
+ recurse=True,
+ filter=None,
+ dict_factory=dict,
+ retain_collection_types=False,
+):
+ """
+ Return the ``attrs`` attribute values of *inst* as a dict.
+
+ Optionally recurse into other ``attrs``-decorated classes.
+
+ :param inst: Instance of an ``attrs``-decorated class.
+ :param bool recurse: Recurse into classes that are also
+ ``attrs``-decorated.
+ :param callable filter: A callable whose return code determines whether an
+ attribute or element is included (``True``) or dropped (``False``). Is
+ called with the :class:`attr.Attribute` as the first argument and the
+ value as the second argument.
+ :param callable dict_factory: A callable to produce dictionaries from. For
+ example, to produce ordered dictionaries instead of normal Python
+ dictionaries, pass in ``collections.OrderedDict``.
+ :param bool retain_collection_types: Do not convert to ``list`` when
+ encountering an attribute whose type is ``tuple`` or ``set``. Only
+ meaningful if ``recurse`` is ``True``.
+
+ :rtype: return type of *dict_factory*
+
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 16.0.0 *dict_factory*
+ .. versionadded:: 16.1.0 *retain_collection_types*
+ """
+ attrs = fields(inst.__class__)
+ rv = dict_factory()
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+ if recurse is True:
+ if has(v.__class__):
+ rv[a.name] = asdict(
+ v, True, filter, dict_factory, retain_collection_types
+ )
+ elif isinstance(v, (tuple, list, set)):
+ cf = v.__class__ if retain_collection_types is True else list
+ rv[a.name] = cf(
+ [
+ _asdict_anything(
+ i, filter, dict_factory, retain_collection_types
+ )
+ for i in v
+ ]
+ )
+ elif isinstance(v, dict):
+ df = dict_factory
+ rv[a.name] = df(
+ (
+ _asdict_anything(
+ kk, filter, df, retain_collection_types
+ ),
+ _asdict_anything(
+ vv, filter, df, retain_collection_types
+ ),
+ )
+ for kk, vv in iteritems(v)
+ )
+ else:
+ rv[a.name] = v
+ else:
+ rv[a.name] = v
+ return rv
+
+
+def _asdict_anything(val, filter, dict_factory, retain_collection_types):
+ """
+ ``asdict`` only works on attrs instances, this works on anything.
+ """
+ if getattr(val.__class__, "__attrs_attrs__", None) is not None:
+ # Attrs class.
+ rv = asdict(val, True, filter, dict_factory, retain_collection_types)
+ elif isinstance(val, (tuple, list, set)):
+ cf = val.__class__ if retain_collection_types is True else list
+ rv = cf(
+ [
+ _asdict_anything(
+ i, filter, dict_factory, retain_collection_types
+ )
+ for i in val
+ ]
+ )
+ elif isinstance(val, dict):
+ df = dict_factory
+ rv = df(
+ (
+ _asdict_anything(kk, filter, df, retain_collection_types),
+ _asdict_anything(vv, filter, df, retain_collection_types),
+ )
+ for kk, vv in iteritems(val)
+ )
+ else:
+ rv = val
+ return rv
+
+
+def astuple(
+ inst,
+ recurse=True,
+ filter=None,
+ tuple_factory=tuple,
+ retain_collection_types=False,
+):
+ """
+ Return the ``attrs`` attribute values of *inst* as a tuple.
+
+ Optionally recurse into other ``attrs``-decorated classes.
+
+ :param inst: Instance of an ``attrs``-decorated class.
+ :param bool recurse: Recurse into classes that are also
+ ``attrs``-decorated.
+ :param callable filter: A callable whose return code determines whether an
+ attribute or element is included (``True``) or dropped (``False``). Is
+ called with the :class:`attr.Attribute` as the first argument and the
+ value as the second argument.
+ :param callable tuple_factory: A callable to produce tuples from. For
+ example, to produce lists instead of tuples.
+ :param bool retain_collection_types: Do not convert to ``list``
+ or ``dict`` when encountering an attribute which type is
+ ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is
+ ``True``.
+
+ :rtype: return type of *tuple_factory*
+
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 16.2.0
+ """
+ attrs = fields(inst.__class__)
+ rv = []
+ retain = retain_collection_types # Very long. :/
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+ if recurse is True:
+ if has(v.__class__):
+ rv.append(
+ astuple(
+ v,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ )
+ elif isinstance(v, (tuple, list, set)):
+ cf = v.__class__ if retain is True else list
+ rv.append(
+ cf(
+ [
+ astuple(
+ j,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(j.__class__)
+ else j
+ for j in v
+ ]
+ )
+ )
+ elif isinstance(v, dict):
+ df = v.__class__ if retain is True else dict
+ rv.append(
+ df(
+ (
+ astuple(
+ kk,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(kk.__class__)
+ else kk,
+ astuple(
+ vv,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(vv.__class__)
+ else vv,
+ )
+ for kk, vv in iteritems(v)
+ )
+ )
+ else:
+ rv.append(v)
+ else:
+ rv.append(v)
+ return rv if tuple_factory is list else tuple_factory(rv)
+
+
+def has(cls):
+ """
+ Check whether *cls* is a class with ``attrs`` attributes.
+
+ :param type cls: Class to introspect.
+ :raise TypeError: If *cls* is not a class.
+
+ :rtype: :class:`bool`
+ """
+ return getattr(cls, "__attrs_attrs__", None) is not None
+
+
+def assoc(inst, **changes):
+ """
+ Copy *inst* and apply *changes*.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ :param changes: Keyword changes in the new copy.
+
+ :return: A copy of inst with *changes* incorporated.
+
+ :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
+ be found on *cls*.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. deprecated:: 17.1.0
+ Use :func:`evolve` instead.
+ """
+ import warnings
+
+ warnings.warn(
+ "assoc is deprecated and will be removed after 2018/01.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ new = copy.copy(inst)
+ attrs = fields(inst.__class__)
+ for k, v in iteritems(changes):
+ a = getattr(attrs, k, NOTHING)
+ if a is NOTHING:
+ raise AttrsAttributeNotFoundError(
+ "{k} is not an attrs attribute on {cl}.".format(
+ k=k, cl=new.__class__
+ )
+ )
+ _obj_setattr(new, k, v)
+ return new
+
+
+def evolve(inst, **changes):
+ """
+ Create a new instance, based on *inst* with *changes* applied.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ :param changes: Keyword changes in the new copy.
+
+ :return: A copy of inst with *changes* incorporated.
+
+ :raise TypeError: If *attr_name* couldn't be found in the class
+ ``__init__``.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 17.1.0
+ """
+ cls = inst.__class__
+ attrs = fields(cls)
+ for a in attrs:
+ if not a.init:
+ continue
+ attr_name = a.name # To deal with private attributes.
+ init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
+ if init_name not in changes:
+ changes[init_name] = getattr(inst, attr_name)
+ return cls(**changes)
diff --git a/third_party/python/attrs/src/attr/_make.py b/third_party/python/attrs/src/attr/_make.py
new file mode 100644
index 0000000000..827175a460
--- /dev/null
+++ b/third_party/python/attrs/src/attr/_make.py
@@ -0,0 +1,2086 @@
+from __future__ import absolute_import, division, print_function
+
+import copy
+import hashlib
+import linecache
+import sys
+import threading
+import warnings
+
+from operator import itemgetter
+
+from . import _config
+from ._compat import (
+ PY2,
+ isclass,
+ iteritems,
+ metadata_proxy,
+ ordered_dict,
+ set_closure_cell,
+)
+from .exceptions import (
+ DefaultAlreadySetError,
+ FrozenInstanceError,
+ NotAnAttrsClassError,
+ PythonTooOldError,
+ UnannotatedAttributeError,
+)
+
+
+# This is used at least twice, so cache it here.
+_obj_setattr = object.__setattr__
+_init_converter_pat = "__attr_converter_{}"
+_init_factory_pat = "__attr_factory_{}"
+_tuple_property_pat = (
+ " {attr_name} = _attrs_property(_attrs_itemgetter({index}))"
+)
+_classvar_prefixes = ("typing.ClassVar", "t.ClassVar", "ClassVar")
+# we don't use a double-underscore prefix because that triggers
+# name mangling when trying to create a slot for the field
+# (when slots=True)
+_hash_cache_field = "_attrs_cached_hash"
+
+_empty_metadata_singleton = metadata_proxy({})
+
+
+class _Nothing(object):
+ """
+ Sentinel class to indicate the lack of a value when ``None`` is ambiguous.
+
+ ``_Nothing`` is a singleton. There is only ever one of it.
+ """
+
+ _singleton = None
+
+ def __new__(cls):
+ if _Nothing._singleton is None:
+ _Nothing._singleton = super(_Nothing, cls).__new__(cls)
+ return _Nothing._singleton
+
+ def __repr__(self):
+ return "NOTHING"
+
+
+NOTHING = _Nothing()
+"""
+Sentinel to indicate the lack of a value when ``None`` is ambiguous.
+"""
+
+
+def attrib(
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=True,
+ hash=None,
+ init=True,
+ convert=None,
+ metadata=None,
+ type=None,
+ converter=None,
+ factory=None,
+ kw_only=False,
+):
+ """
+ Create a new attribute on a class.
+
+ .. warning::
+
+ Does *not* do anything unless the class is also decorated with
+ :func:`attr.s`!
+
+ :param default: A value that is used if an ``attrs``-generated ``__init__``
+ is used and no value is passed while instantiating or the attribute is
+ excluded using ``init=False``.
+
+ If the value is an instance of :class:`Factory`, its callable will be
+ used to construct a new value (useful for mutable data types like lists
+ or dicts).
+
+ If a default is not set (or set manually to ``attr.NOTHING``), a value
+ *must* be supplied when instantiating; otherwise a :exc:`TypeError`
+ will be raised.
+
+ The default can also be set using decorator notation as shown below.
+
+ :type default: Any value.
+
+ :param callable factory: Syntactic sugar for
+ ``default=attr.Factory(callable)``.
+
+ :param validator: :func:`callable` that is called by ``attrs``-generated
+ ``__init__`` methods after the instance has been initialized. They
+ receive the initialized instance, the :class:`Attribute`, and the
+ passed value.
+
+ The return value is *not* inspected so the validator has to throw an
+ exception itself.
+
+ If a ``list`` is passed, its items are treated as validators and must
+ all pass.
+
+ Validators can be globally disabled and re-enabled using
+ :func:`get_run_validators`.
+
+ The validator can also be set using decorator notation as shown below.
+
+ :type validator: ``callable`` or a ``list`` of ``callable``\\ s.
+
+ :param bool repr: Include this attribute in the generated ``__repr__``
+ method.
+ :param bool cmp: Include this attribute in the generated comparison methods
+ (``__eq__`` et al).
+ :param hash: Include this attribute in the generated ``__hash__``
+ method. If ``None`` (default), mirror *cmp*'s value. This is the
+ correct behavior according the Python spec. Setting this value to
+ anything else than ``None`` is *discouraged*.
+ :type hash: ``bool`` or ``None``
+ :param bool init: Include this attribute in the generated ``__init__``
+ method. It is possible to set this to ``False`` and set a default
+ value. In that case this attributed is unconditionally initialized
+ with the specified default value or factory.
+ :param callable converter: :func:`callable` that is called by
+ ``attrs``-generated ``__init__`` methods to converter attribute's value
+ to the desired format. It is given the passed-in value, and the
+ returned value will be used as the new value of the attribute. The
+ value is converted before being passed to the validator, if any.
+ :param metadata: An arbitrary mapping, to be used by third-party
+ components. See :ref:`extending_metadata`.
+ :param type: The type of the attribute. In Python 3.6 or greater, the
+ preferred method to specify the type is using a variable annotation
+ (see `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_).
+ This argument is provided for backward compatibility.
+ Regardless of the approach used, the type will be stored on
+ ``Attribute.type``.
+
+ Please note that ``attrs`` doesn't do anything with this metadata by
+ itself. You can use it as part of your own code or for
+ :doc:`static type checking <types>`.
+ :param kw_only: Make this attribute keyword-only (Python 3+)
+ in the generated ``__init__`` (if ``init`` is ``False``, this
+ parameter is ignored).
+
+ .. versionadded:: 15.2.0 *convert*
+ .. versionadded:: 16.3.0 *metadata*
+ .. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
+ .. versionchanged:: 17.1.0
+ *hash* is ``None`` and therefore mirrors *cmp* by default.
+ .. versionadded:: 17.3.0 *type*
+ .. deprecated:: 17.4.0 *convert*
+ .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated
+ *convert* to achieve consistency with other noun-based arguments.
+ .. versionadded:: 18.1.0
+ ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
+ .. versionadded:: 18.2.0 *kw_only*
+ """
+ if hash is not None and hash is not True and hash is not False:
+ raise TypeError(
+ "Invalid value for hash. Must be True, False, or None."
+ )
+
+ if convert is not None:
+ if converter is not None:
+ raise RuntimeError(
+ "Can't pass both `convert` and `converter`. "
+ "Please use `converter` only."
+ )
+ warnings.warn(
+ "The `convert` argument is deprecated in favor of `converter`. "
+ "It will be removed after 2019/01.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ converter = convert
+
+ if factory is not None:
+ if default is not NOTHING:
+ raise ValueError(
+ "The `default` and `factory` arguments are mutually "
+ "exclusive."
+ )
+ if not callable(factory):
+ raise ValueError("The `factory` argument must be a callable.")
+ default = Factory(factory)
+
+ if metadata is None:
+ metadata = {}
+
+ return _CountingAttr(
+ default=default,
+ validator=validator,
+ repr=repr,
+ cmp=cmp,
+ hash=hash,
+ init=init,
+ converter=converter,
+ metadata=metadata,
+ type=type,
+ kw_only=kw_only,
+ )
+
+
+def _make_attr_tuple_class(cls_name, attr_names):
+ """
+ Create a tuple subclass to hold `Attribute`s for an `attrs` class.
+
+ The subclass is a bare tuple with properties for names.
+
+ class MyClassAttributes(tuple):
+ __slots__ = ()
+ x = property(itemgetter(0))
+ """
+ attr_class_name = "{}Attributes".format(cls_name)
+ attr_class_template = [
+ "class {}(tuple):".format(attr_class_name),
+ " __slots__ = ()",
+ ]
+ if attr_names:
+ for i, attr_name in enumerate(attr_names):
+ attr_class_template.append(
+ _tuple_property_pat.format(index=i, attr_name=attr_name)
+ )
+ else:
+ attr_class_template.append(" pass")
+ globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property}
+ eval(compile("\n".join(attr_class_template), "", "exec"), globs)
+
+ return globs[attr_class_name]
+
+
+# Tuple class for extracted attributes from a class definition.
+# `base_attrs` is a subset of `attrs`.
+_Attributes = _make_attr_tuple_class(
+ "_Attributes",
+ [
+ # all attributes to build dunder methods for
+ "attrs",
+ # attributes that have been inherited
+ "base_attrs",
+ # map inherited attributes to their originating classes
+ "base_attrs_map",
+ ],
+)
+
+
+def _is_class_var(annot):
+ """
+ Check whether *annot* is a typing.ClassVar.
+
+ The string comparison hack is used to avoid evaluating all string
+ annotations which would put attrs-based classes at a performance
+ disadvantage compared to plain old classes.
+ """
+ return str(annot).startswith(_classvar_prefixes)
+
+
+def _get_annotations(cls):
+ """
+ Get annotations for *cls*.
+ """
+ anns = getattr(cls, "__annotations__", None)
+ if anns is None:
+ return {}
+
+ # Verify that the annotations aren't merely inherited.
+ for base_cls in cls.__mro__[1:]:
+ if anns is getattr(base_cls, "__annotations__", None):
+ return {}
+
+ return anns
+
+
+def _counter_getter(e):
+ """
+ Key function for sorting to avoid re-creating a lambda for every class.
+ """
+ return e[1].counter
+
+
+def _transform_attrs(cls, these, auto_attribs, kw_only):
+ """
+ Transform all `_CountingAttr`s on a class into `Attribute`s.
+
+ If *these* is passed, use that and don't look for them on the class.
+
+ Return an `_Attributes`.
+ """
+ cd = cls.__dict__
+ anns = _get_annotations(cls)
+
+ if these is not None:
+ ca_list = [(name, ca) for name, ca in iteritems(these)]
+
+ if not isinstance(these, ordered_dict):
+ ca_list.sort(key=_counter_getter)
+ elif auto_attribs is True:
+ ca_names = {
+ name
+ for name, attr in cd.items()
+ if isinstance(attr, _CountingAttr)
+ }
+ ca_list = []
+ annot_names = set()
+ for attr_name, type in anns.items():
+ if _is_class_var(type):
+ continue
+ annot_names.add(attr_name)
+ a = cd.get(attr_name, NOTHING)
+ if not isinstance(a, _CountingAttr):
+ if a is NOTHING:
+ a = attrib()
+ else:
+ a = attrib(default=a)
+ ca_list.append((attr_name, a))
+
+ unannotated = ca_names - annot_names
+ if len(unannotated) > 0:
+ raise UnannotatedAttributeError(
+ "The following `attr.ib`s lack a type annotation: "
+ + ", ".join(
+ sorted(unannotated, key=lambda n: cd.get(n).counter)
+ )
+ + "."
+ )
+ else:
+ ca_list = sorted(
+ (
+ (name, attr)
+ for name, attr in cd.items()
+ if isinstance(attr, _CountingAttr)
+ ),
+ key=lambda e: e[1].counter,
+ )
+
+ own_attrs = [
+ Attribute.from_counting_attr(
+ name=attr_name, ca=ca, type=anns.get(attr_name)
+ )
+ for attr_name, ca in ca_list
+ ]
+
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+ taken_attr_names = {a.name: a for a in own_attrs}
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in cls.__mro__[1:-1]:
+ sub_attrs = getattr(base_cls, "__attrs_attrs__", None)
+ if sub_attrs is not None:
+ for a in sub_attrs:
+ prev_a = taken_attr_names.get(a.name)
+ # Only add an attribute if it hasn't been defined before. This
+ # allows for overwriting attribute definitions by subclassing.
+ if prev_a is None:
+ base_attrs.append(a)
+ taken_attr_names[a.name] = a
+ base_attr_map[a.name] = base_cls
+
+ attr_names = [a.name for a in base_attrs + own_attrs]
+
+ AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
+
+ if kw_only:
+ own_attrs = [a._assoc(kw_only=True) for a in own_attrs]
+ base_attrs = [a._assoc(kw_only=True) for a in base_attrs]
+
+ attrs = AttrsClass(base_attrs + own_attrs)
+
+ had_default = False
+ was_kw_only = False
+ for a in attrs:
+ if (
+ was_kw_only is False
+ and had_default is True
+ and a.default is NOTHING
+ and a.init is True
+ and a.kw_only is False
+ ):
+ raise ValueError(
+ "No mandatory attributes allowed after an attribute with a "
+ "default value or factory. Attribute in question: %r" % (a,)
+ )
+ elif (
+ had_default is False
+ and a.default is not NOTHING
+ and a.init is not False
+ and
+ # Keyword-only attributes without defaults can be specified
+ # after keyword-only attributes with defaults.
+ a.kw_only is False
+ ):
+ had_default = True
+ if was_kw_only is True and a.kw_only is False and a.init is True:
+ raise ValueError(
+ "Non keyword-only attributes are not allowed after a "
+ "keyword-only attribute (unless they are init=False). "
+ "Attribute in question: {a!r}".format(a=a)
+ )
+ if was_kw_only is False and a.init is True and a.kw_only is True:
+ was_kw_only = True
+
+ return _Attributes((attrs, base_attrs, base_attr_map))
+
+
+def _frozen_setattrs(self, name, value):
+ """
+ Attached to frozen classes as __setattr__.
+ """
+ raise FrozenInstanceError()
+
+
+def _frozen_delattrs(self, name):
+ """
+ Attached to frozen classes as __delattr__.
+ """
+ raise FrozenInstanceError()
+
+
+class _ClassBuilder(object):
+ """
+ Iteratively build *one* class.
+ """
+
+ __slots__ = (
+ "_cls",
+ "_cls_dict",
+ "_attrs",
+ "_base_names",
+ "_attr_names",
+ "_slots",
+ "_frozen",
+ "_weakref_slot",
+ "_cache_hash",
+ "_has_post_init",
+ "_delete_attribs",
+ "_base_attr_map",
+ "_is_exc",
+ )
+
+ def __init__(
+ self,
+ cls,
+ these,
+ slots,
+ frozen,
+ weakref_slot,
+ auto_attribs,
+ kw_only,
+ cache_hash,
+ is_exc,
+ ):
+ attrs, base_attrs, base_map = _transform_attrs(
+ cls, these, auto_attribs, kw_only
+ )
+
+ self._cls = cls
+ self._cls_dict = dict(cls.__dict__) if slots else {}
+ self._attrs = attrs
+ self._base_names = set(a.name for a in base_attrs)
+ self._base_attr_map = base_map
+ self._attr_names = tuple(a.name for a in attrs)
+ self._slots = slots
+ self._frozen = frozen or _has_frozen_base_class(cls)
+ self._weakref_slot = weakref_slot
+ self._cache_hash = cache_hash
+ self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
+ self._delete_attribs = not bool(these)
+ self._is_exc = is_exc
+
+ self._cls_dict["__attrs_attrs__"] = self._attrs
+
+ if frozen:
+ self._cls_dict["__setattr__"] = _frozen_setattrs
+ self._cls_dict["__delattr__"] = _frozen_delattrs
+
+ def __repr__(self):
+ return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__)
+
+ def build_class(self):
+ """
+ Finalize class based on the accumulated configuration.
+
+ Builder cannot be used after calling this method.
+ """
+ if self._slots is True:
+ return self._create_slots_class()
+ else:
+ return self._patch_original_class()
+
+ def _patch_original_class(self):
+ """
+ Apply accumulated methods and return the class.
+ """
+ cls = self._cls
+ base_names = self._base_names
+
+ # Clean class of attribute definitions (`attr.ib()`s).
+ if self._delete_attribs:
+ for name in self._attr_names:
+ if (
+ name not in base_names
+ and getattr(cls, name, None) is not None
+ ):
+ try:
+ delattr(cls, name)
+ except AttributeError:
+ # This can happen if a base class defines a class
+ # variable and we want to set an attribute with the
+ # same name by using only a type annotation.
+ pass
+
+ # Attach our dunder methods.
+ for name, value in self._cls_dict.items():
+ setattr(cls, name, value)
+
+ # Attach __setstate__. This is necessary to clear the hash code
+ # cache on deserialization. See issue
+ # https://github.com/python-attrs/attrs/issues/482 .
+ # Note that this code only handles setstate for dict classes.
+ # For slotted classes, see similar code in _create_slots_class .
+ if self._cache_hash:
+ existing_set_state_method = getattr(cls, "__setstate__", None)
+ if existing_set_state_method:
+ raise NotImplementedError(
+ "Currently you cannot use hash caching if "
+ "you specify your own __setstate__ method."
+ "See https://github.com/python-attrs/attrs/issues/494 ."
+ )
+
+ def cache_hash_set_state(chss_self, _):
+ # clear hash code cache
+ setattr(chss_self, _hash_cache_field, None)
+
+ setattr(cls, "__setstate__", cache_hash_set_state)
+
+ return cls
+
+ def _create_slots_class(self):
+ """
+ Build and return a new class with a `__slots__` attribute.
+ """
+ base_names = self._base_names
+ cd = {
+ k: v
+ for k, v in iteritems(self._cls_dict)
+ if k not in tuple(self._attr_names) + ("__dict__", "__weakref__")
+ }
+
+ weakref_inherited = False
+
+ # Traverse the MRO to check for an existing __weakref__.
+ for base_cls in self._cls.__mro__[1:-1]:
+ if "__weakref__" in getattr(base_cls, "__dict__", ()):
+ weakref_inherited = True
+ break
+
+ names = self._attr_names
+ if (
+ self._weakref_slot
+ and "__weakref__" not in getattr(self._cls, "__slots__", ())
+ and "__weakref__" not in names
+ and not weakref_inherited
+ ):
+ names += ("__weakref__",)
+
+ # We only add the names of attributes that aren't inherited.
+ # Settings __slots__ to inherited attributes wastes memory.
+ slot_names = [name for name in names if name not in base_names]
+ if self._cache_hash:
+ slot_names.append(_hash_cache_field)
+ cd["__slots__"] = tuple(slot_names)
+
+ qualname = getattr(self._cls, "__qualname__", None)
+ if qualname is not None:
+ cd["__qualname__"] = qualname
+
+ # __weakref__ is not writable.
+ state_attr_names = tuple(
+ an for an in self._attr_names if an != "__weakref__"
+ )
+
+ def slots_getstate(self):
+ """
+ Automatically created by attrs.
+ """
+ return tuple(getattr(self, name) for name in state_attr_names)
+
+ hash_caching_enabled = self._cache_hash
+
+ def slots_setstate(self, state):
+ """
+ Automatically created by attrs.
+ """
+ __bound_setattr = _obj_setattr.__get__(self, Attribute)
+ for name, value in zip(state_attr_names, state):
+ __bound_setattr(name, value)
+ # Clearing the hash code cache on deserialization is needed
+ # because hash codes can change from run to run. See issue
+ # https://github.com/python-attrs/attrs/issues/482 .
+ # Note that this code only handles setstate for slotted classes.
+ # For dict classes, see similar code in _patch_original_class .
+ if hash_caching_enabled:
+ __bound_setattr(_hash_cache_field, None)
+
+ # slots and frozen require __getstate__/__setstate__ to work
+ cd["__getstate__"] = slots_getstate
+ cd["__setstate__"] = slots_setstate
+
+ # Create new class based on old class and our methods.
+ cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
+
+ # The following is a fix for
+ # https://github.com/python-attrs/attrs/issues/102. On Python 3,
+ # if a method mentions `__class__` or uses the no-arg super(), the
+ # compiler will bake a reference to the class in the method itself
+ # as `method.__closure__`. Since we replace the class with a
+ # clone, we rewrite these references so it keeps working.
+ for item in cls.__dict__.values():
+ if isinstance(item, (classmethod, staticmethod)):
+ # Class- and staticmethods hide their functions inside.
+ # These might need to be rewritten as well.
+ closure_cells = getattr(item.__func__, "__closure__", None)
+ else:
+ closure_cells = getattr(item, "__closure__", None)
+
+ if not closure_cells: # Catch None or the empty list.
+ continue
+ for cell in closure_cells:
+ if cell.cell_contents is self._cls:
+ set_closure_cell(cell, cls)
+
+ return cls
+
+ def add_repr(self, ns):
+ self._cls_dict["__repr__"] = self._add_method_dunders(
+ _make_repr(self._attrs, ns=ns)
+ )
+ return self
+
+ def add_str(self):
+ repr = self._cls_dict.get("__repr__")
+ if repr is None:
+ raise ValueError(
+ "__str__ can only be generated if a __repr__ exists."
+ )
+
+ def __str__(self):
+ return self.__repr__()
+
+ self._cls_dict["__str__"] = self._add_method_dunders(__str__)
+ return self
+
+ def make_unhashable(self):
+ self._cls_dict["__hash__"] = None
+ return self
+
+ def add_hash(self):
+ self._cls_dict["__hash__"] = self._add_method_dunders(
+ _make_hash(
+ self._attrs, frozen=self._frozen, cache_hash=self._cache_hash
+ )
+ )
+
+ return self
+
+ def add_init(self):
+ self._cls_dict["__init__"] = self._add_method_dunders(
+ _make_init(
+ self._attrs,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ )
+ )
+
+ return self
+
+ def add_cmp(self):
+ cd = self._cls_dict
+
+ cd["__eq__"], cd["__ne__"], cd["__lt__"], cd["__le__"], cd[
+ "__gt__"
+ ], cd["__ge__"] = (
+ self._add_method_dunders(meth) for meth in _make_cmp(self._attrs)
+ )
+
+ return self
+
+ def _add_method_dunders(self, method):
+ """
+ Add __module__ and __qualname__ to a *method* if possible.
+ """
+ try:
+ method.__module__ = self._cls.__module__
+ except AttributeError:
+ pass
+
+ try:
+ method.__qualname__ = ".".join(
+ (self._cls.__qualname__, method.__name__)
+ )
+ except AttributeError:
+ pass
+
+ return method
+
+
+def attrs(
+ maybe_cls=None,
+ these=None,
+ repr_ns=None,
+ repr=True,
+ cmp=True,
+ hash=None,
+ init=True,
+ slots=False,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=False,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=False,
+):
+ r"""
+ A class decorator that adds `dunder
+ <https://wiki.python.org/moin/DunderAlias>`_\ -methods according to the
+ specified attributes using :func:`attr.ib` or the *these* argument.
+
+ :param these: A dictionary of name to :func:`attr.ib` mappings. This is
+ useful to avoid the definition of your attributes within the class body
+ because you can't (e.g. if you want to add ``__repr__`` methods to
+ Django models) or don't want to.
+
+ If *these* is not ``None``, ``attrs`` will *not* search the class body
+ for attributes and will *not* remove any attributes from it.
+
+ If *these* is an ordered dict (:class:`dict` on Python 3.6+,
+ :class:`collections.OrderedDict` otherwise), the order is deduced from
+ the order of the attributes inside *these*. Otherwise the order
+ of the definition of the attributes is used.
+
+ :type these: :class:`dict` of :class:`str` to :func:`attr.ib`
+
+ :param str repr_ns: When using nested classes, there's no way in Python 2
+ to automatically detect that. Therefore it's possible to set the
+ namespace explicitly for a more meaningful ``repr`` output.
+ :param bool repr: Create a ``__repr__`` method with a human readable
+ representation of ``attrs`` attributes..
+ :param bool str: Create a ``__str__`` method that is identical to
+ ``__repr__``. This is usually not necessary except for
+ :class:`Exception`\ s.
+ :param bool cmp: Create ``__eq__``, ``__ne__``, ``__lt__``, ``__le__``,
+ ``__gt__``, and ``__ge__`` methods that compare the class as if it were
+ a tuple of its ``attrs`` attributes. But the attributes are *only*
+ compared, if the types of both classes are *identical*!
+ :param hash: If ``None`` (default), the ``__hash__`` method is generated
+ according how *cmp* and *frozen* are set.
+
+ 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you.
+ 2. If *cmp* is True and *frozen* is False, ``__hash__`` will be set to
+ None, marking it unhashable (which it is).
+ 3. If *cmp* is False, ``__hash__`` will be left untouched meaning the
+ ``__hash__`` method of the base class will be used (if base class is
+ ``object``, this means it will fall back to id-based hashing.).
+
+ Although not recommended, you can decide for yourself and force
+ ``attrs`` to create one (e.g. if the class is immutable even though you
+ didn't freeze it programmatically) by passing ``True`` or not. Both of
+ these cases are rather special and should be used carefully.
+
+ See the `Python documentation \
+ <https://docs.python.org/3/reference/datamodel.html#object.__hash__>`_
+ and the `GitHub issue that led to the default behavior \
+ <https://github.com/python-attrs/attrs/issues/136>`_ for more details.
+ :type hash: ``bool`` or ``None``
+ :param bool init: Create a ``__init__`` method that initializes the
+ ``attrs`` attributes. Leading underscores are stripped for the
+ argument name. If a ``__attrs_post_init__`` method exists on the
+ class, it will be called after the class is fully initialized.
+ :param bool slots: Create a slots_-style class that's more
+ memory-efficient. See :ref:`slots` for further ramifications.
+ :param bool frozen: Make instances immutable after initialization. If
+ someone attempts to modify a frozen instance,
+ :exc:`attr.exceptions.FrozenInstanceError` is raised.
+
+ Please note:
+
+ 1. This is achieved by installing a custom ``__setattr__`` method
+ on your class so you can't implement an own one.
+
+ 2. True immutability is impossible in Python.
+
+ 3. This *does* have a minor a runtime performance :ref:`impact
+ <how-frozen>` when initializing new instances. In other words:
+ ``__init__`` is slightly slower with ``frozen=True``.
+
+ 4. If a class is frozen, you cannot modify ``self`` in
+ ``__attrs_post_init__`` or a self-written ``__init__``. You can
+ circumvent that limitation by using
+ ``object.__setattr__(self, "attribute_name", value)``.
+
+ .. _slots: https://docs.python.org/3/reference/datamodel.html#slots
+ :param bool weakref_slot: Make instances weak-referenceable. This has no
+ effect unless ``slots`` is also enabled.
+ :param bool auto_attribs: If True, collect `PEP 526`_-annotated attributes
+ (Python 3.6 and later only) from the class body.
+
+ In this case, you **must** annotate every field. If ``attrs``
+ encounters a field that is set to an :func:`attr.ib` but lacks a type
+ annotation, an :exc:`attr.exceptions.UnannotatedAttributeError` is
+ raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't
+ want to set a type.
+
+ If you assign a value to those attributes (e.g. ``x: int = 42``), that
+ value becomes the default value like if it were passed using
+ ``attr.ib(default=42)``. Passing an instance of :class:`Factory` also
+ works as expected.
+
+ Attributes annotated as :data:`typing.ClassVar` are **ignored**.
+
+ .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/
+ :param bool kw_only: Make all attributes keyword-only (Python 3+)
+ in the generated ``__init__`` (if ``init`` is ``False``, this
+ parameter is ignored).
+ :param bool cache_hash: Ensure that the object's hash code is computed
+ only once and stored on the object. If this is set to ``True``,
+ hashing must be either explicitly or implicitly enabled for this
+ class. If the hash code is cached, avoid any reassignments of
+ fields involved in hash code computation or mutations of the objects
+ those fields point to after object creation. If such changes occur,
+ the behavior of the object's hash code is undefined.
+ :param bool auto_exc: If the class subclasses :class:`BaseException`
+ (which implicitly includes any subclass of any exception), the
+ following happens to behave like a well-behaved Python exceptions
+ class:
+
+ - the values for *cmp* and *hash* are ignored and the instances compare
+ and hash by the instance's ids (N.B. ``attrs`` will *not* remove
+ existing implementations of ``__hash__`` or the equality methods. It
+ just won't add own ones.),
+ - all attributes that are either passed into ``__init__`` or have a
+ default value are additionally available as a tuple in the ``args``
+ attribute,
+ - the value of *str* is ignored leaving ``__str__`` to base classes.
+
+ .. versionadded:: 16.0.0 *slots*
+ .. versionadded:: 16.1.0 *frozen*
+ .. versionadded:: 16.3.0 *str*
+ .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``.
+ .. versionchanged:: 17.1.0
+ *hash* supports ``None`` as value which is also the default now.
+ .. versionadded:: 17.3.0 *auto_attribs*
+ .. versionchanged:: 18.1.0
+ If *these* is passed, no attributes are deleted from the class body.
+ .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
+ .. versionadded:: 18.2.0 *weakref_slot*
+ .. deprecated:: 18.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
+ :class:`DeprecationWarning` if the classes compared are subclasses of
+ each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
+ to each other.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionadded:: 18.2.0 *cache_hash*
+ .. versionadded:: 19.1.0 *auto_exc*
+ """
+
+ def wrap(cls):
+
+ if getattr(cls, "__class__", None) is None:
+ raise TypeError("attrs only works with new-style classes.")
+
+ is_exc = auto_exc is True and issubclass(cls, BaseException)
+
+ builder = _ClassBuilder(
+ cls,
+ these,
+ slots,
+ frozen,
+ weakref_slot,
+ auto_attribs,
+ kw_only,
+ cache_hash,
+ is_exc,
+ )
+
+ if repr is True:
+ builder.add_repr(repr_ns)
+ if str is True:
+ builder.add_str()
+ if cmp is True and not is_exc:
+ builder.add_cmp()
+
+ if hash is not True and hash is not False and hash is not None:
+ # Can't use `hash in` because 1 == True for example.
+ raise TypeError(
+ "Invalid value for hash. Must be True, False, or None."
+ )
+ elif hash is False or (hash is None and cmp is False):
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled."
+ )
+ elif (
+ hash is True
+ or (hash is None and cmp is True and frozen is True)
+ and is_exc is False
+ ):
+ builder.add_hash()
+ else:
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled."
+ )
+ builder.make_unhashable()
+
+ if init is True:
+ builder.add_init()
+ else:
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " init must be True."
+ )
+
+ return builder.build_class()
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+ else:
+ return wrap(maybe_cls)
+
+
+_attrs = attrs
+"""
+Internal alias so we can use it in functions that take an argument called
+*attrs*.
+"""
+
+
+if PY2:
+
+ def _has_frozen_base_class(cls):
+ """
+ Check whether *cls* has a frozen ancestor by looking at its
+ __setattr__.
+ """
+ return (
+ getattr(cls.__setattr__, "__module__", None)
+ == _frozen_setattrs.__module__
+ and cls.__setattr__.__name__ == _frozen_setattrs.__name__
+ )
+
+
+else:
+
+ def _has_frozen_base_class(cls):
+ """
+ Check whether *cls* has a frozen ancestor by looking at its
+ __setattr__.
+ """
+ return cls.__setattr__ == _frozen_setattrs
+
+
+def _attrs_to_tuple(obj, attrs):
+ """
+ Create a tuple of all values of *obj*'s *attrs*.
+ """
+ return tuple(getattr(obj, a.name) for a in attrs)
+
+
+def _make_hash(attrs, frozen, cache_hash):
+ attrs = tuple(
+ a
+ for a in attrs
+ if a.hash is True or (a.hash is None and a.cmp is True)
+ )
+
+ tab = " "
+
+ # We cache the generated hash methods for the same kinds of attributes.
+ sha1 = hashlib.sha1()
+ sha1.update(repr(attrs).encode("utf-8"))
+ unique_filename = "<attrs generated hash %s>" % (sha1.hexdigest(),)
+ type_hash = hash(unique_filename)
+
+ method_lines = ["def __hash__(self):"]
+
+ def append_hash_computation_lines(prefix, indent):
+ """
+ Generate the code for actually computing the hash code.
+ Below this will either be returned directly or used to compute
+ a value which is then cached, depending on the value of cache_hash
+ """
+ method_lines.extend(
+ [indent + prefix + "hash((", indent + " %d," % (type_hash,)]
+ )
+
+ for a in attrs:
+ method_lines.append(indent + " self.%s," % a.name)
+
+ method_lines.append(indent + " ))")
+
+ if cache_hash:
+ method_lines.append(tab + "if self.%s is None:" % _hash_cache_field)
+ if frozen:
+ append_hash_computation_lines(
+ "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2
+ )
+ method_lines.append(tab * 2 + ")") # close __setattr__
+ else:
+ append_hash_computation_lines(
+ "self.%s = " % _hash_cache_field, tab * 2
+ )
+ method_lines.append(tab + "return self.%s" % _hash_cache_field)
+ else:
+ append_hash_computation_lines("return ", tab)
+
+ script = "\n".join(method_lines)
+ globs = {}
+ locs = {}
+ bytecode = compile(script, unique_filename, "exec")
+ eval(bytecode, globs, locs)
+
+ # In order of debuggers like PDB being able to step through the code,
+ # we add a fake linecache entry.
+ linecache.cache[unique_filename] = (
+ len(script),
+ None,
+ script.splitlines(True),
+ unique_filename,
+ )
+
+ return locs["__hash__"]
+
+
+def _add_hash(cls, attrs):
+ """
+ Add a hash method to *cls*.
+ """
+ cls.__hash__ = _make_hash(attrs, frozen=False, cache_hash=False)
+ return cls
+
+
+def __ne__(self, other):
+ """
+ Check equality and either forward a NotImplemented or return the result
+ negated.
+ """
+ result = self.__eq__(other)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return not result
+
+
+WARNING_CMP_ISINSTANCE = (
+ "Comparision of subclasses using __%s__ is deprecated and will be removed "
+ "in 2019."
+)
+
+
+def _make_cmp(attrs):
+ attrs = [a for a in attrs if a.cmp]
+
+ # We cache the generated eq methods for the same kinds of attributes.
+ sha1 = hashlib.sha1()
+ sha1.update(repr(attrs).encode("utf-8"))
+ unique_filename = "<attrs generated eq %s>" % (sha1.hexdigest(),)
+ lines = [
+ "def __eq__(self, other):",
+ " if other.__class__ is not self.__class__:",
+ " return NotImplemented",
+ ]
+ # We can't just do a big self.x = other.x and... clause due to
+ # irregularities like nan == nan is false but (nan,) == (nan,) is true.
+ if attrs:
+ lines.append(" return (")
+ others = [" ) == ("]
+ for a in attrs:
+ lines.append(" self.%s," % (a.name,))
+ others.append(" other.%s," % (a.name,))
+
+ lines += others + [" )"]
+ else:
+ lines.append(" return True")
+
+ script = "\n".join(lines)
+ globs = {}
+ locs = {}
+ bytecode = compile(script, unique_filename, "exec")
+ eval(bytecode, globs, locs)
+
+ # In order of debuggers like PDB being able to step through the code,
+ # we add a fake linecache entry.
+ linecache.cache[unique_filename] = (
+ len(script),
+ None,
+ script.splitlines(True),
+ unique_filename,
+ )
+ eq = locs["__eq__"]
+ ne = __ne__
+
+ def attrs_to_tuple(obj):
+ """
+ Save us some typing.
+ """
+ return _attrs_to_tuple(obj, attrs)
+
+ def __lt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if isinstance(other, self.__class__):
+ if other.__class__ is not self.__class__:
+ warnings.warn(
+ WARNING_CMP_ISINSTANCE % ("lt",), DeprecationWarning
+ )
+ return attrs_to_tuple(self) < attrs_to_tuple(other)
+ else:
+ return NotImplemented
+
+ def __le__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if isinstance(other, self.__class__):
+ if other.__class__ is not self.__class__:
+ warnings.warn(
+ WARNING_CMP_ISINSTANCE % ("le",), DeprecationWarning
+ )
+ return attrs_to_tuple(self) <= attrs_to_tuple(other)
+ else:
+ return NotImplemented
+
+ def __gt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if isinstance(other, self.__class__):
+ if other.__class__ is not self.__class__:
+ warnings.warn(
+ WARNING_CMP_ISINSTANCE % ("gt",), DeprecationWarning
+ )
+ return attrs_to_tuple(self) > attrs_to_tuple(other)
+ else:
+ return NotImplemented
+
+ def __ge__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if isinstance(other, self.__class__):
+ if other.__class__ is not self.__class__:
+ warnings.warn(
+ WARNING_CMP_ISINSTANCE % ("ge",), DeprecationWarning
+ )
+ return attrs_to_tuple(self) >= attrs_to_tuple(other)
+ else:
+ return NotImplemented
+
+ return eq, ne, __lt__, __le__, __gt__, __ge__
+
+
+def _add_cmp(cls, attrs=None):
+ """
+ Add comparison methods to *cls*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ cls.__eq__, cls.__ne__, cls.__lt__, cls.__le__, cls.__gt__, cls.__ge__ = _make_cmp( # noqa
+ attrs
+ )
+
+ return cls
+
+
+_already_repring = threading.local()
+
+
+def _make_repr(attrs, ns):
+ """
+ Make a repr method for *attr_names* adding *ns* to the full name.
+ """
+ attr_names = tuple(a.name for a in attrs if a.repr)
+
+ def __repr__(self):
+ """
+ Automatically created by attrs.
+ """
+ try:
+ working_set = _already_repring.working_set
+ except AttributeError:
+ working_set = set()
+ _already_repring.working_set = working_set
+
+ if id(self) in working_set:
+ return "..."
+ real_cls = self.__class__
+ if ns is None:
+ qualname = getattr(real_cls, "__qualname__", None)
+ if qualname is not None:
+ class_name = qualname.rsplit(">.", 1)[-1]
+ else:
+ class_name = real_cls.__name__
+ else:
+ class_name = ns + "." + real_cls.__name__
+
+ # Since 'self' remains on the stack (i.e.: strongly referenced) for the
+ # duration of this call, it's safe to depend on id(...) stability, and
+ # not need to track the instance and therefore worry about properties
+ # like weakref- or hash-ability.
+ working_set.add(id(self))
+ try:
+ result = [class_name, "("]
+ first = True
+ for name in attr_names:
+ if first:
+ first = False
+ else:
+ result.append(", ")
+ result.extend((name, "=", repr(getattr(self, name, NOTHING))))
+ return "".join(result) + ")"
+ finally:
+ working_set.remove(id(self))
+
+ return __repr__
+
+
+def _add_repr(cls, ns=None, attrs=None):
+ """
+ Add a repr method to *cls*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ cls.__repr__ = _make_repr(attrs, ns)
+ return cls
+
+
+def _make_init(
+ attrs, post_init, frozen, slots, cache_hash, base_attr_map, is_exc
+):
+ attrs = [a for a in attrs if a.init or a.default is not NOTHING]
+
+ # We cache the generated init methods for the same kinds of attributes.
+ sha1 = hashlib.sha1()
+ sha1.update(repr(attrs).encode("utf-8"))
+ unique_filename = "<attrs generated init {0}>".format(sha1.hexdigest())
+
+ script, globs, annotations = _attrs_to_init_script(
+ attrs, frozen, slots, post_init, cache_hash, base_attr_map, is_exc
+ )
+ locs = {}
+ bytecode = compile(script, unique_filename, "exec")
+ attr_dict = dict((a.name, a) for a in attrs)
+ globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
+
+ if frozen is True:
+ # Save the lookup overhead in __init__ if we need to circumvent
+ # immutability.
+ globs["_cached_setattr"] = _obj_setattr
+
+ eval(bytecode, globs, locs)
+
+ # In order of debuggers like PDB being able to step through the code,
+ # we add a fake linecache entry.
+ linecache.cache[unique_filename] = (
+ len(script),
+ None,
+ script.splitlines(True),
+ unique_filename,
+ )
+
+ __init__ = locs["__init__"]
+ __init__.__annotations__ = annotations
+
+ return __init__
+
+
+def fields(cls):
+ """
+ Return the tuple of ``attrs`` attributes for a class.
+
+ The tuple also allows accessing the fields by their names (see below for
+ examples).
+
+ :param type cls: Class to introspect.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ :rtype: tuple (with name accessors) of :class:`attr.Attribute`
+
+ .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields
+ by name.
+ """
+ if not isclass(cls):
+ raise TypeError("Passed object must be a class.")
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is None:
+ raise NotAnAttrsClassError(
+ "{cls!r} is not an attrs-decorated class.".format(cls=cls)
+ )
+ return attrs
+
+
+def fields_dict(cls):
+ """
+ Return an ordered dictionary of ``attrs`` attributes for a class, whose
+ keys are the attribute names.
+
+ :param type cls: Class to introspect.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ :rtype: an ordered dict where keys are attribute names and values are
+ :class:`attr.Attribute`\\ s. This will be a :class:`dict` if it's
+ naturally ordered like on Python 3.6+ or an
+ :class:`~collections.OrderedDict` otherwise.
+
+ .. versionadded:: 18.1.0
+ """
+ if not isclass(cls):
+ raise TypeError("Passed object must be a class.")
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is None:
+ raise NotAnAttrsClassError(
+ "{cls!r} is not an attrs-decorated class.".format(cls=cls)
+ )
+ return ordered_dict(((a.name, a) for a in attrs))
+
+
+def validate(inst):
+ """
+ Validate all attributes on *inst* that have a validator.
+
+ Leaves all exceptions through.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ """
+ if _config._run_validators is False:
+ return
+
+ for a in fields(inst.__class__):
+ v = a.validator
+ if v is not None:
+ v(inst, a, getattr(inst, a.name))
+
+
+def _is_slot_cls(cls):
+ return "__slots__" in cls.__dict__
+
+
+def _is_slot_attr(a_name, base_attr_map):
+ """
+ Check if the attribute name comes from a slot class.
+ """
+ return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name])
+
+
+def _attrs_to_init_script(
+ attrs, frozen, slots, post_init, cache_hash, base_attr_map, is_exc
+):
+ """
+ Return a script of an initializer for *attrs* and a dict of globals.
+
+ The globals are expected by the generated script.
+
+ If *frozen* is True, we cannot set the attributes directly so we use
+ a cached ``object.__setattr__``.
+ """
+ lines = []
+ any_slot_ancestors = any(
+ _is_slot_attr(a.name, base_attr_map) for a in attrs
+ )
+ if frozen is True:
+ if slots is True:
+ lines.append(
+ # Circumvent the __setattr__ descriptor to save one lookup per
+ # assignment.
+ # Note _setattr will be used again below if cache_hash is True
+ "_setattr = _cached_setattr.__get__(self, self.__class__)"
+ )
+
+ def fmt_setter(attr_name, value_var):
+ return "_setattr('%(attr_name)s', %(value_var)s)" % {
+ "attr_name": attr_name,
+ "value_var": value_var,
+ }
+
+ def fmt_setter_with_converter(attr_name, value_var):
+ conv_name = _init_converter_pat.format(attr_name)
+ return "_setattr('%(attr_name)s', %(conv)s(%(value_var)s))" % {
+ "attr_name": attr_name,
+ "value_var": value_var,
+ "conv": conv_name,
+ }
+
+ else:
+ # Dict frozen classes assign directly to __dict__.
+ # But only if the attribute doesn't come from an ancestor slot
+ # class.
+ # Note _inst_dict will be used again below if cache_hash is True
+ lines.append("_inst_dict = self.__dict__")
+ if any_slot_ancestors:
+ lines.append(
+ # Circumvent the __setattr__ descriptor to save one lookup
+ # per assignment.
+ "_setattr = _cached_setattr.__get__(self, self.__class__)"
+ )
+
+ def fmt_setter(attr_name, value_var):
+ if _is_slot_attr(attr_name, base_attr_map):
+ res = "_setattr('%(attr_name)s', %(value_var)s)" % {
+ "attr_name": attr_name,
+ "value_var": value_var,
+ }
+ else:
+ res = "_inst_dict['%(attr_name)s'] = %(value_var)s" % {
+ "attr_name": attr_name,
+ "value_var": value_var,
+ }
+ return res
+
+ def fmt_setter_with_converter(attr_name, value_var):
+ conv_name = _init_converter_pat.format(attr_name)
+ if _is_slot_attr(attr_name, base_attr_map):
+ tmpl = "_setattr('%(attr_name)s', %(c)s(%(value_var)s))"
+ else:
+ tmpl = "_inst_dict['%(attr_name)s'] = %(c)s(%(value_var)s)"
+ return tmpl % {
+ "attr_name": attr_name,
+ "value_var": value_var,
+ "c": conv_name,
+ }
+
+ else:
+ # Not frozen.
+ def fmt_setter(attr_name, value):
+ return "self.%(attr_name)s = %(value)s" % {
+ "attr_name": attr_name,
+ "value": value,
+ }
+
+ def fmt_setter_with_converter(attr_name, value_var):
+ conv_name = _init_converter_pat.format(attr_name)
+ return "self.%(attr_name)s = %(conv)s(%(value_var)s)" % {
+ "attr_name": attr_name,
+ "value_var": value_var,
+ "conv": conv_name,
+ }
+
+ args = []
+ kw_only_args = []
+ attrs_to_validate = []
+
+ # This is a dictionary of names to validator and converter callables.
+ # Injecting this into __init__ globals lets us avoid lookups.
+ names_for_globals = {}
+ annotations = {"return": None}
+
+ for a in attrs:
+ if a.validator:
+ attrs_to_validate.append(a)
+ attr_name = a.name
+ arg_name = a.name.lstrip("_")
+ has_factory = isinstance(a.default, Factory)
+ if has_factory and a.default.takes_self:
+ maybe_self = "self"
+ else:
+ maybe_self = ""
+ if a.init is False:
+ if has_factory:
+ init_factory_name = _init_factory_pat.format(a.name)
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + "({0})".format(maybe_self),
+ )
+ )
+ conv_name = _init_converter_pat.format(a.name)
+ names_for_globals[conv_name] = a.converter
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ init_factory_name + "({0})".format(maybe_self),
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ else:
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ "attr_dict['{attr_name}'].default".format(
+ attr_name=attr_name
+ ),
+ )
+ )
+ conv_name = _init_converter_pat.format(a.name)
+ names_for_globals[conv_name] = a.converter
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ "attr_dict['{attr_name}'].default".format(
+ attr_name=attr_name
+ ),
+ )
+ )
+ elif a.default is not NOTHING and not has_factory:
+ arg = "{arg_name}=attr_dict['{attr_name}'].default".format(
+ arg_name=arg_name, attr_name=attr_name
+ )
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+ if a.converter is not None:
+ lines.append(fmt_setter_with_converter(attr_name, arg_name))
+ names_for_globals[
+ _init_converter_pat.format(a.name)
+ ] = a.converter
+ else:
+ lines.append(fmt_setter(attr_name, arg_name))
+ elif has_factory:
+ arg = "{arg_name}=NOTHING".format(arg_name=arg_name)
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+ lines.append(
+ "if {arg_name} is not NOTHING:".format(arg_name=arg_name)
+ )
+ init_factory_name = _init_factory_pat.format(a.name)
+ if a.converter is not None:
+ lines.append(
+ " " + fmt_setter_with_converter(attr_name, arg_name)
+ )
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + "({0})".format(maybe_self),
+ )
+ )
+ names_for_globals[
+ _init_converter_pat.format(a.name)
+ ] = a.converter
+ else:
+ lines.append(" " + fmt_setter(attr_name, arg_name))
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter(
+ attr_name,
+ init_factory_name + "({0})".format(maybe_self),
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ else:
+ if a.kw_only:
+ kw_only_args.append(arg_name)
+ else:
+ args.append(arg_name)
+ if a.converter is not None:
+ lines.append(fmt_setter_with_converter(attr_name, arg_name))
+ names_for_globals[
+ _init_converter_pat.format(a.name)
+ ] = a.converter
+ else:
+ lines.append(fmt_setter(attr_name, arg_name))
+
+ if a.init is True and a.converter is None and a.type is not None:
+ annotations[arg_name] = a.type
+
+ if attrs_to_validate: # we can skip this if there are no validators.
+ names_for_globals["_config"] = _config
+ lines.append("if _config._run_validators is True:")
+ for a in attrs_to_validate:
+ val_name = "__attr_validator_{}".format(a.name)
+ attr_name = "__attr_{}".format(a.name)
+ lines.append(
+ " {}(self, {}, self.{})".format(val_name, attr_name, a.name)
+ )
+ names_for_globals[val_name] = a.validator
+ names_for_globals[attr_name] = a
+ if post_init:
+ lines.append("self.__attrs_post_init__()")
+
+ # because this is set only after __attrs_post_init is called, a crash
+ # will result if post-init tries to access the hash code. This seemed
+ # preferable to setting this beforehand, in which case alteration to
+ # field values during post-init combined with post-init accessing the
+ # hash code would result in silent bugs.
+ if cache_hash:
+ if frozen:
+ if slots:
+ # if frozen and slots, then _setattr defined above
+ init_hash_cache = "_setattr('%s', %s)"
+ else:
+ # if frozen and not slots, then _inst_dict defined above
+ init_hash_cache = "_inst_dict['%s'] = %s"
+ else:
+ init_hash_cache = "self.%s = %s"
+ lines.append(init_hash_cache % (_hash_cache_field, "None"))
+
+ # For exceptions we rely on BaseException.__init__ for proper
+ # initialization.
+ if is_exc:
+ vals = ",".join("self." + a.name for a in attrs if a.init)
+
+ lines.append("BaseException.__init__(self, %s)" % (vals,))
+
+ args = ", ".join(args)
+ if kw_only_args:
+ if PY2:
+ raise PythonTooOldError(
+ "Keyword-only arguments only work on Python 3 and later."
+ )
+
+ args += "{leading_comma}*, {kw_only_args}".format(
+ leading_comma=", " if args else "",
+ kw_only_args=", ".join(kw_only_args),
+ )
+ return (
+ """\
+def __init__(self, {args}):
+ {lines}
+""".format(
+ args=args, lines="\n ".join(lines) if lines else "pass"
+ ),
+ names_for_globals,
+ annotations,
+ )
+
+
+class Attribute(object):
+ """
+ *Read-only* representation of an attribute.
+
+ :attribute name: The name of the attribute.
+
+ Plus *all* arguments of :func:`attr.ib`.
+
+ For the version history of the fields, see :func:`attr.ib`.
+ """
+
+ __slots__ = (
+ "name",
+ "default",
+ "validator",
+ "repr",
+ "cmp",
+ "hash",
+ "init",
+ "metadata",
+ "type",
+ "converter",
+ "kw_only",
+ )
+
+ def __init__(
+ self,
+ name,
+ default,
+ validator,
+ repr,
+ cmp,
+ hash,
+ init,
+ convert=None,
+ metadata=None,
+ type=None,
+ converter=None,
+ kw_only=False,
+ ):
+ # Cache this descriptor here to speed things up later.
+ bound_setattr = _obj_setattr.__get__(self, Attribute)
+
+ # Despite the big red warning, people *do* instantiate `Attribute`
+ # themselves.
+ if convert is not None:
+ if converter is not None:
+ raise RuntimeError(
+ "Can't pass both `convert` and `converter`. "
+ "Please use `converter` only."
+ )
+ warnings.warn(
+ "The `convert` argument is deprecated in favor of `converter`."
+ " It will be removed after 2019/01.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ converter = convert
+
+ bound_setattr("name", name)
+ bound_setattr("default", default)
+ bound_setattr("validator", validator)
+ bound_setattr("repr", repr)
+ bound_setattr("cmp", cmp)
+ bound_setattr("hash", hash)
+ bound_setattr("init", init)
+ bound_setattr("converter", converter)
+ bound_setattr(
+ "metadata",
+ (
+ metadata_proxy(metadata)
+ if metadata
+ else _empty_metadata_singleton
+ ),
+ )
+ bound_setattr("type", type)
+ bound_setattr("kw_only", kw_only)
+
+ def __setattr__(self, name, value):
+ raise FrozenInstanceError()
+
+ @property
+ def convert(self):
+ warnings.warn(
+ "The `convert` attribute is deprecated in favor of `converter`. "
+ "It will be removed after 2019/01.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self.converter
+
+ @classmethod
+ def from_counting_attr(cls, name, ca, type=None):
+ # type holds the annotated value. deal with conflicts:
+ if type is None:
+ type = ca.type
+ elif ca.type is not None:
+ raise ValueError(
+ "Type annotation and type argument cannot both be present"
+ )
+ inst_dict = {
+ k: getattr(ca, k)
+ for k in Attribute.__slots__
+ if k
+ not in (
+ "name",
+ "validator",
+ "default",
+ "type",
+ "convert",
+ ) # exclude methods and deprecated alias
+ }
+ return cls(
+ name=name,
+ validator=ca._validator,
+ default=ca._default,
+ type=type,
+ **inst_dict
+ )
+
+ # Don't use attr.assoc since fields(Attribute) doesn't work
+ def _assoc(self, **changes):
+ """
+ Copy *self* and apply *changes*.
+ """
+ new = copy.copy(self)
+
+ new._setattrs(changes.items())
+
+ return new
+
+ # Don't use _add_pickle since fields(Attribute) doesn't work
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(
+ getattr(self, name) if name != "metadata" else dict(self.metadata)
+ for name in self.__slots__
+ )
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ self._setattrs(zip(self.__slots__, state))
+
+ def _setattrs(self, name_values_pairs):
+ bound_setattr = _obj_setattr.__get__(self, Attribute)
+ for name, value in name_values_pairs:
+ if name != "metadata":
+ bound_setattr(name, value)
+ else:
+ bound_setattr(
+ name,
+ metadata_proxy(value)
+ if value
+ else _empty_metadata_singleton,
+ )
+
+
+_a = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=True,
+ hash=(name != "metadata"),
+ init=True,
+ )
+ for name in Attribute.__slots__
+ if name != "convert" # XXX: remove once `convert` is gone
+]
+
+Attribute = _add_hash(
+ _add_cmp(_add_repr(Attribute, attrs=_a), attrs=_a),
+ attrs=[a for a in _a if a.hash],
+)
+
+
+class _CountingAttr(object):
+ """
+ Intermediate representation of attributes that uses a counter to preserve
+ the order in which the attributes have been defined.
+
+ *Internal* data structure of the attrs library. Running into is most
+ likely the result of a bug like a forgotten `@attr.s` decorator.
+ """
+
+ __slots__ = (
+ "counter",
+ "_default",
+ "repr",
+ "cmp",
+ "hash",
+ "init",
+ "metadata",
+ "_validator",
+ "converter",
+ "type",
+ "kw_only",
+ )
+ __attrs_attrs__ = tuple(
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=True,
+ hash=True,
+ init=True,
+ kw_only=False,
+ )
+ for name in ("counter", "_default", "repr", "cmp", "hash", "init")
+ ) + (
+ Attribute(
+ name="metadata",
+ default=None,
+ validator=None,
+ repr=True,
+ cmp=True,
+ hash=False,
+ init=True,
+ kw_only=False,
+ ),
+ )
+ cls_counter = 0
+
+ def __init__(
+ self,
+ default,
+ validator,
+ repr,
+ cmp,
+ hash,
+ init,
+ converter,
+ metadata,
+ type,
+ kw_only,
+ ):
+ _CountingAttr.cls_counter += 1
+ self.counter = _CountingAttr.cls_counter
+ self._default = default
+ # If validator is a list/tuple, wrap it using helper validator.
+ if validator and isinstance(validator, (list, tuple)):
+ self._validator = and_(*validator)
+ else:
+ self._validator = validator
+ self.repr = repr
+ self.cmp = cmp
+ self.hash = hash
+ self.init = init
+ self.converter = converter
+ self.metadata = metadata
+ self.type = type
+ self.kw_only = kw_only
+
+ def validator(self, meth):
+ """
+ Decorator that adds *meth* to the list of validators.
+
+ Returns *meth* unchanged.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._validator is None:
+ self._validator = meth
+ else:
+ self._validator = and_(self._validator, meth)
+ return meth
+
+ def default(self, meth):
+ """
+ Decorator that allows to set the default for an attribute.
+
+ Returns *meth* unchanged.
+
+ :raises DefaultAlreadySetError: If default has been set before.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._default is not NOTHING:
+ raise DefaultAlreadySetError()
+
+ self._default = Factory(meth, takes_self=True)
+
+ return meth
+
+
+_CountingAttr = _add_cmp(_add_repr(_CountingAttr))
+
+
+@attrs(slots=True, init=False, hash=True)
+class Factory(object):
+ """
+ Stores a factory callable.
+
+ If passed as the default value to :func:`attr.ib`, the factory is used to
+ generate a new value.
+
+ :param callable factory: A callable that takes either none or exactly one
+ mandatory positional argument depending on *takes_self*.
+ :param bool takes_self: Pass the partially initialized instance that is
+ being initialized as a positional argument.
+
+ .. versionadded:: 17.1.0 *takes_self*
+ """
+
+ factory = attrib()
+ takes_self = attrib()
+
+ def __init__(self, factory, takes_self=False):
+ """
+ `Factory` is part of the default machinery so if we want a default
+ value here, we have to implement it ourselves.
+ """
+ self.factory = factory
+ self.takes_self = takes_self
+
+
+def make_class(name, attrs, bases=(object,), **attributes_arguments):
+ """
+ A quick way to create a new class called *name* with *attrs*.
+
+ :param name: The name for the new class.
+ :type name: str
+
+ :param attrs: A list of names or a dictionary of mappings of names to
+ attributes.
+
+ If *attrs* is a list or an ordered dict (:class:`dict` on Python 3.6+,
+ :class:`collections.OrderedDict` otherwise), the order is deduced from
+ the order of the names or attributes inside *attrs*. Otherwise the
+ order of the definition of the attributes is used.
+ :type attrs: :class:`list` or :class:`dict`
+
+ :param tuple bases: Classes that the new class will subclass.
+
+ :param attributes_arguments: Passed unmodified to :func:`attr.s`.
+
+ :return: A new class with *attrs*.
+ :rtype: type
+
+ .. versionadded:: 17.1.0 *bases*
+ .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
+ """
+ if isinstance(attrs, dict):
+ cls_dict = attrs
+ elif isinstance(attrs, (list, tuple)):
+ cls_dict = dict((a, attrib()) for a in attrs)
+ else:
+ raise TypeError("attrs argument must be a dict or a list.")
+
+ post_init = cls_dict.pop("__attrs_post_init__", None)
+ type_ = type(
+ name,
+ bases,
+ {} if post_init is None else {"__attrs_post_init__": post_init},
+ )
+ # For pickling to work, the __module__ variable needs to be set to the
+ # frame where the class is created. Bypass this step in environments where
+ # sys._getframe is not defined (Jython for example) or sys._getframe is not
+ # defined for arguments greater than 0 (IronPython).
+ try:
+ type_.__module__ = sys._getframe(1).f_globals.get(
+ "__name__", "__main__"
+ )
+ except (AttributeError, ValueError):
+ pass
+
+ return _attrs(these=cls_dict, **attributes_arguments)(type_)
+
+
+# These are required by within this module so we define them here and merely
+# import into .validators.
+
+
+@attrs(slots=True, hash=True)
+class _AndValidator(object):
+ """
+ Compose many validators to a single one.
+ """
+
+ _validators = attrib()
+
+ def __call__(self, inst, attr, value):
+ for v in self._validators:
+ v(inst, attr, value)
+
+
+def and_(*validators):
+ """
+ A validator that composes multiple validators into one.
+
+ When called on a value, it runs all wrapped validators.
+
+ :param validators: Arbitrary number of validators.
+ :type validators: callables
+
+ .. versionadded:: 17.1.0
+ """
+ vals = []
+ for validator in validators:
+ vals.extend(
+ validator._validators
+ if isinstance(validator, _AndValidator)
+ else [validator]
+ )
+
+ return _AndValidator(tuple(vals))
diff --git a/third_party/python/attrs/src/attr/converters.py b/third_party/python/attrs/src/attr/converters.py
new file mode 100644
index 0000000000..37c4a07a06
--- /dev/null
+++ b/third_party/python/attrs/src/attr/converters.py
@@ -0,0 +1,78 @@
+"""
+Commonly useful converters.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from ._make import NOTHING, Factory
+
+
+def optional(converter):
+ """
+ A converter that allows an attribute to be optional. An optional attribute
+ is one which can be set to ``None``.
+
+ :param callable converter: the converter that is used for non-``None``
+ values.
+
+ .. versionadded:: 17.1.0
+ """
+
+ def optional_converter(val):
+ if val is None:
+ return None
+ return converter(val)
+
+ return optional_converter
+
+
+def default_if_none(default=NOTHING, factory=None):
+ """
+ A converter that allows to replace ``None`` values by *default* or the
+ result of *factory*.
+
+ :param default: Value to be used if ``None`` is passed. Passing an instance
+ of :class:`attr.Factory` is supported, however the ``takes_self`` option
+ is *not*.
+ :param callable factory: A callable that takes not parameters whose result
+ is used if ``None`` is passed.
+
+ :raises TypeError: If **neither** *default* or *factory* is passed.
+ :raises TypeError: If **both** *default* and *factory* are passed.
+ :raises ValueError: If an instance of :class:`attr.Factory` is passed with
+ ``takes_self=True``.
+
+ .. versionadded:: 18.2.0
+ """
+ if default is NOTHING and factory is None:
+ raise TypeError("Must pass either `default` or `factory`.")
+
+ if default is not NOTHING and factory is not None:
+ raise TypeError(
+ "Must pass either `default` or `factory` but not both."
+ )
+
+ if factory is not None:
+ default = Factory(factory)
+
+ if isinstance(default, Factory):
+ if default.takes_self:
+ raise ValueError(
+ "`takes_self` is not supported by default_if_none."
+ )
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default.factory()
+
+ else:
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default
+
+ return default_if_none_converter
diff --git a/third_party/python/attrs/src/attr/converters.pyi b/third_party/python/attrs/src/attr/converters.pyi
new file mode 100644
index 0000000000..63b2a3866e
--- /dev/null
+++ b/third_party/python/attrs/src/attr/converters.pyi
@@ -0,0 +1,12 @@
+from typing import TypeVar, Optional, Callable, overload
+from . import _ConverterType
+
+_T = TypeVar("_T")
+
+def optional(
+ converter: _ConverterType[_T]
+) -> _ConverterType[Optional[_T]]: ...
+@overload
+def default_if_none(default: _T) -> _ConverterType[_T]: ...
+@overload
+def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType[_T]: ...
diff --git a/third_party/python/attrs/src/attr/exceptions.py b/third_party/python/attrs/src/attr/exceptions.py
new file mode 100644
index 0000000000..b12e41e97a
--- /dev/null
+++ b/third_party/python/attrs/src/attr/exceptions.py
@@ -0,0 +1,57 @@
+from __future__ import absolute_import, division, print_function
+
+
+class FrozenInstanceError(AttributeError):
+ """
+ A frozen/immutable instance has been attempted to be modified.
+
+ It mirrors the behavior of ``namedtuples`` by using the same error message
+ and subclassing :exc:`AttributeError`.
+
+ .. versionadded:: 16.1.0
+ """
+
+ msg = "can't set attribute"
+ args = [msg]
+
+
+class AttrsAttributeNotFoundError(ValueError):
+ """
+ An ``attrs`` function couldn't find an attribute that the user asked for.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class NotAnAttrsClassError(ValueError):
+ """
+ A non-``attrs`` class has been passed into an ``attrs`` function.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class DefaultAlreadySetError(RuntimeError):
+ """
+ A default has been set using ``attr.ib()`` and is attempted to be reset
+ using the decorator.
+
+ .. versionadded:: 17.1.0
+ """
+
+
+class UnannotatedAttributeError(RuntimeError):
+ """
+ A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type
+ annotation.
+
+ .. versionadded:: 17.3.0
+ """
+
+
+class PythonTooOldError(RuntimeError):
+ """
+ An ``attrs`` feature requiring a more recent python version has been used.
+
+ .. versionadded:: 18.2.0
+ """
diff --git a/third_party/python/attrs/src/attr/exceptions.pyi b/third_party/python/attrs/src/attr/exceptions.pyi
new file mode 100644
index 0000000000..48fffcc1e2
--- /dev/null
+++ b/third_party/python/attrs/src/attr/exceptions.pyi
@@ -0,0 +1,7 @@
+class FrozenInstanceError(AttributeError):
+ msg: str = ...
+
+class AttrsAttributeNotFoundError(ValueError): ...
+class NotAnAttrsClassError(ValueError): ...
+class DefaultAlreadySetError(RuntimeError): ...
+class UnannotatedAttributeError(RuntimeError): ...
diff --git a/third_party/python/attrs/src/attr/filters.py b/third_party/python/attrs/src/attr/filters.py
new file mode 100644
index 0000000000..f1c69b8bac
--- /dev/null
+++ b/third_party/python/attrs/src/attr/filters.py
@@ -0,0 +1,52 @@
+"""
+Commonly useful filters for :func:`attr.asdict`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from ._compat import isclass
+from ._make import Attribute
+
+
+def _split_what(what):
+ """
+ Returns a tuple of `frozenset`s of classes and attributes.
+ """
+ return (
+ frozenset(cls for cls in what if isclass(cls)),
+ frozenset(cls for cls in what if isinstance(cls, Attribute)),
+ )
+
+
+def include(*what):
+ """
+ Whitelist *what*.
+
+ :param what: What to whitelist.
+ :type what: :class:`list` of :class:`type` or :class:`attr.Attribute`\\ s
+
+ :rtype: :class:`callable`
+ """
+ cls, attrs = _split_what(what)
+
+ def include_(attribute, value):
+ return value.__class__ in cls or attribute in attrs
+
+ return include_
+
+
+def exclude(*what):
+ """
+ Blacklist *what*.
+
+ :param what: What to blacklist.
+ :type what: :class:`list` of classes or :class:`attr.Attribute`\\ s.
+
+ :rtype: :class:`callable`
+ """
+ cls, attrs = _split_what(what)
+
+ def exclude_(attribute, value):
+ return value.__class__ not in cls and attribute not in attrs
+
+ return exclude_
diff --git a/third_party/python/attrs/src/attr/filters.pyi b/third_party/python/attrs/src/attr/filters.pyi
new file mode 100644
index 0000000000..68368fe2b9
--- /dev/null
+++ b/third_party/python/attrs/src/attr/filters.pyi
@@ -0,0 +1,5 @@
+from typing import Union, Any
+from . import Attribute, _FilterType
+
+def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
+def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
diff --git a/third_party/python/attrs/src/attr/py.typed b/third_party/python/attrs/src/attr/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/attrs/src/attr/py.typed
diff --git a/third_party/python/attrs/src/attr/validators.py b/third_party/python/attrs/src/attr/validators.py
new file mode 100644
index 0000000000..7fc4446be4
--- /dev/null
+++ b/third_party/python/attrs/src/attr/validators.py
@@ -0,0 +1,282 @@
+"""
+Commonly useful validators.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from ._make import _AndValidator, and_, attrib, attrs
+
+
+__all__ = ["and_", "in_", "instance_of", "optional", "provides"]
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _InstanceOfValidator(object):
+ type = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not isinstance(value, self.type):
+ raise TypeError(
+ "'{name}' must be {type!r} (got {value!r} that is a "
+ "{actual!r}).".format(
+ name=attr.name,
+ type=self.type,
+ actual=value.__class__,
+ value=value,
+ ),
+ attr,
+ self.type,
+ value,
+ )
+
+ def __repr__(self):
+ return "<instance_of validator for type {type!r}>".format(
+ type=self.type
+ )
+
+
+def instance_of(type):
+ """
+ A validator that raises a :exc:`TypeError` if the initializer is called
+ with a wrong type for this particular attribute (checks are performed using
+ :func:`isinstance` therefore it's also valid to pass a tuple of types).
+
+ :param type: The type to check for.
+ :type type: type or tuple of types
+
+ :raises TypeError: With a human readable error message, the attribute
+ (of type :class:`attr.Attribute`), the expected type, and the value it
+ got.
+ """
+ return _InstanceOfValidator(type)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _ProvidesValidator(object):
+ interface = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.interface.providedBy(value):
+ raise TypeError(
+ "'{name}' must provide {interface!r} which {value!r} "
+ "doesn't.".format(
+ name=attr.name, interface=self.interface, value=value
+ ),
+ attr,
+ self.interface,
+ value,
+ )
+
+ def __repr__(self):
+ return "<provides validator for interface {interface!r}>".format(
+ interface=self.interface
+ )
+
+
+def provides(interface):
+ """
+ A validator that raises a :exc:`TypeError` if the initializer is called
+ with an object that does not provide the requested *interface* (checks are
+ performed using ``interface.providedBy(value)`` (see `zope.interface
+ <https://zopeinterface.readthedocs.io/en/latest/>`_).
+
+ :param zope.interface.Interface interface: The interface to check for.
+
+ :raises TypeError: With a human readable error message, the attribute
+ (of type :class:`attr.Attribute`), the expected interface, and the
+ value it got.
+ """
+ return _ProvidesValidator(interface)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _OptionalValidator(object):
+ validator = attrib()
+
+ def __call__(self, inst, attr, value):
+ if value is None:
+ return
+
+ self.validator(inst, attr, value)
+
+ def __repr__(self):
+ return "<optional validator for {what} or None>".format(
+ what=repr(self.validator)
+ )
+
+
+def optional(validator):
+ """
+ A validator that makes an attribute optional. An optional attribute is one
+ which can be set to ``None`` in addition to satisfying the requirements of
+ the sub-validator.
+
+ :param validator: A validator (or a list of validators) that is used for
+ non-``None`` values.
+ :type validator: callable or :class:`list` of callables.
+
+ .. versionadded:: 15.1.0
+ .. versionchanged:: 17.1.0 *validator* can be a list of validators.
+ """
+ if isinstance(validator, list):
+ return _OptionalValidator(_AndValidator(validator))
+ return _OptionalValidator(validator)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _InValidator(object):
+ options = attrib()
+
+ def __call__(self, inst, attr, value):
+ try:
+ in_options = value in self.options
+ except TypeError: # e.g. `1 in "abc"`
+ in_options = False
+
+ if not in_options:
+ raise ValueError(
+ "'{name}' must be in {options!r} (got {value!r})".format(
+ name=attr.name, options=self.options, value=value
+ )
+ )
+
+ def __repr__(self):
+ return "<in_ validator with options {options!r}>".format(
+ options=self.options
+ )
+
+
+def in_(options):
+ """
+ A validator that raises a :exc:`ValueError` if the initializer is called
+ with a value that does not belong in the options provided. The check is
+ performed using ``value in options``.
+
+ :param options: Allowed options.
+ :type options: list, tuple, :class:`enum.Enum`, ...
+
+ :raises ValueError: With a human readable error message, the attribute (of
+ type :class:`attr.Attribute`), the expected options, and the value it
+ got.
+
+ .. versionadded:: 17.1.0
+ """
+ return _InValidator(options)
+
+
+@attrs(repr=False, slots=False, hash=True)
+class _IsCallableValidator(object):
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not callable(value):
+ raise TypeError("'{name}' must be callable".format(name=attr.name))
+
+ def __repr__(self):
+ return "<is_callable validator>"
+
+
+def is_callable():
+ """
+ A validator that raises a :class:`TypeError` if the initializer is called
+ with a value for this particular attribute that is not callable.
+
+ .. versionadded:: 19.1.0
+
+ :raises TypeError: With a human readable error message containing the
+ attribute (of type :class:`attr.Attribute`) name.
+ """
+ return _IsCallableValidator()
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _DeepIterable(object):
+ member_validator = attrib(validator=is_callable())
+ iterable_validator = attrib(
+ default=None, validator=optional(is_callable())
+ )
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.iterable_validator is not None:
+ self.iterable_validator(inst, attr, value)
+
+ for member in value:
+ self.member_validator(inst, attr, member)
+
+ def __repr__(self):
+ iterable_identifier = (
+ ""
+ if self.iterable_validator is None
+ else " {iterable!r}".format(iterable=self.iterable_validator)
+ )
+ return (
+ "<deep_iterable validator for{iterable_identifier}"
+ " iterables of {member!r}>"
+ ).format(
+ iterable_identifier=iterable_identifier,
+ member=self.member_validator,
+ )
+
+
+def deep_iterable(member_validator, iterable_validator=None):
+ """
+ A validator that performs deep validation of an iterable.
+
+ :param member_validator: Validator to apply to iterable members
+ :param iterable_validator: Validator to apply to iterable itself
+ (optional)
+
+ .. versionadded:: 19.1.0
+
+ :raises TypeError: if any sub-validators fail
+ """
+ return _DeepIterable(member_validator, iterable_validator)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _DeepMapping(object):
+ key_validator = attrib(validator=is_callable())
+ value_validator = attrib(validator=is_callable())
+ mapping_validator = attrib(default=None, validator=optional(is_callable()))
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.mapping_validator is not None:
+ self.mapping_validator(inst, attr, value)
+
+ for key in value:
+ self.key_validator(inst, attr, key)
+ self.value_validator(inst, attr, value[key])
+
+ def __repr__(self):
+ return (
+ "<deep_mapping validator for objects mapping {key!r} to {value!r}>"
+ ).format(key=self.key_validator, value=self.value_validator)
+
+
+def deep_mapping(key_validator, value_validator, mapping_validator=None):
+ """
+ A validator that performs deep validation of a dictionary.
+
+ :param key_validator: Validator to apply to dictionary keys
+ :param value_validator: Validator to apply to dictionary values
+ :param mapping_validator: Validator to apply to top-level mapping
+ attribute (optional)
+
+ .. versionadded:: 19.1.0
+
+ :raises TypeError: if any sub-validators fail
+ """
+ return _DeepMapping(key_validator, value_validator, mapping_validator)
diff --git a/third_party/python/attrs/src/attr/validators.pyi b/third_party/python/attrs/src/attr/validators.pyi
new file mode 100644
index 0000000000..01af06845e
--- /dev/null
+++ b/third_party/python/attrs/src/attr/validators.pyi
@@ -0,0 +1,24 @@
+from typing import Container, List, Union, TypeVar, Type, Any, Optional, Tuple
+from . import _ValidatorType
+
+_T = TypeVar("_T")
+
+def instance_of(
+ type: Union[Tuple[Type[_T], ...], Type[_T]]
+) -> _ValidatorType[_T]: ...
+def provides(interface: Any) -> _ValidatorType[Any]: ...
+def optional(
+ validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]]
+) -> _ValidatorType[Optional[_T]]: ...
+def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
+def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
+def deep_iterable(
+ member_validator: _ValidatorType[_T],
+ iterable_validator: Optional[_ValidatorType[_T]],
+) -> _ValidatorType[_T]: ...
+def deep_mapping(
+ key_validator: _ValidatorType[_T],
+ value_validator: _ValidatorType[_T],
+ mapping_validator: Optional[_ValidatorType[_T]],
+) -> _ValidatorType[_T]: ...
+def is_callable() -> _ValidatorType[_T]: ...
diff --git a/third_party/python/attrs/tox.ini b/third_party/python/attrs/tox.ini
new file mode 100644
index 0000000000..0d2f3c32c5
--- /dev/null
+++ b/third_party/python/attrs/tox.ini
@@ -0,0 +1,85 @@
+[tox]
+envlist = typing,lint,py27,py34,py35,py36,py37,pypy,pypy3,manifest,docs,pypi-description,changelog,coverage-report
+isolated_build = True
+
+
+[testenv]
+# Prevent random setuptools/pip breakages like
+# https://github.com/pypa/setuptools/issues/1042 from breaking our builds.
+setenv =
+ VIRTUALENV_NO_DOWNLOAD=1
+extras = tests
+commands = python -m pytest {posargs}
+
+
+[testenv:py27]
+extras = tests
+commands = coverage run --parallel -m pytest {posargs}
+
+
+[testenv:py37]
+# Python 3.6+ has a number of compile-time warnings on invalid string escapes.
+# PYTHONWARNINGS=d and --no-compile below make them visible during the Tox run.
+install_command = pip install --no-compile {opts} {packages}
+setenv =
+ PYTHONWARNINGS=d
+extras = tests
+commands = coverage run --parallel -m pytest {posargs}
+
+
+[testenv:coverage-report]
+basepython = python3.7
+skip_install = true
+deps = coverage
+commands =
+ coverage combine
+ coverage report
+
+
+[testenv:lint]
+basepython = python3.7
+skip_install = true
+deps = pre-commit
+passenv = HOMEPATH # needed on Windows
+commands = pre-commit run --all-files
+
+
+[testenv:docs]
+# RTD only allows for 3.7
+basepython = python3.7
+extras = docs
+commands =
+ sphinx-build -W -b html -d {envtmpdir}/doctrees docs docs/_build/html
+ sphinx-build -W -b doctest -d {envtmpdir}/doctrees docs docs/_build/html
+ python -m doctest README.rst
+
+
+[testenv:manifest]
+basepython = python3.7
+deps = check-manifest
+skip_install = true
+commands = check-manifest
+
+
+[testenv:pypi-description]
+basepython = python3.7
+skip_install = true
+deps =
+ twine
+ pip >= 18.0.0
+commands =
+ pip wheel -w {envtmpdir}/build --no-deps .
+ twine check {envtmpdir}/build/*
+
+
+[testenv:changelog]
+basepython = python3.7
+deps = towncrier
+skip_install = true
+commands = towncrier --draft
+
+
+[testenv:typing]
+basepython = python3.7
+deps = mypy
+commands = mypy tests/typing_example.py
diff --git a/third_party/python/backports/shutil_which/__init__.py b/third_party/python/backports/shutil_which/__init__.py
new file mode 100644
index 0000000000..1182bc91b7
--- /dev/null
+++ b/third_party/python/backports/shutil_which/__init__.py
@@ -0,0 +1,78 @@
+"""Backport of shutil.which from Python 3.5
+
+The function is included unmodified from Python stdlib 3.5.1,
+and is (C) Python
+"""
+from __future__ import absolute_import # Import system's os, not backports.os.
+
+import os
+import sys
+
+__version__ = '3.5.1'
+
+def backport_which(cmd, mode=os.F_OK | os.X_OK, path=None):
+ """Given a command, mode, and a PATH string, return the path which
+ conforms to the given mode on the PATH, or None if there is no such
+ file.
+
+ `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
+ of os.environ.get("PATH"), or can be overridden with a custom search
+ path.
+
+ """
+ # Check that a given file can be accessed with the correct mode.
+ # Additionally check that `file` is not a directory, as on Windows
+ # directories pass the os.access check.
+ def _access_check(fn, mode):
+ return (os.path.exists(fn) and os.access(fn, mode)
+ and not os.path.isdir(fn))
+
+ # If we're given a path with a directory part, look it up directly rather
+ # than referring to PATH directories. This includes checking relative to the
+ # current directory, e.g. ./script
+ if os.path.dirname(cmd):
+ if _access_check(cmd, mode):
+ return cmd
+ return None
+
+ if path is None:
+ path = os.environ.get("PATH", os.defpath)
+ if not path:
+ return None
+ path = path.split(os.pathsep)
+
+ if sys.platform == "win32":
+ # The current directory takes precedence on Windows.
+ if not os.curdir in path:
+ path.insert(0, os.curdir)
+
+ # PATHEXT is necessary to check on Windows.
+ pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
+ # See if the given file matches any of the expected path extensions.
+ # This will allow us to short circuit when given "python.exe".
+ # If it does match, only test that one, otherwise we have to try
+ # others.
+ if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
+ files = [cmd]
+ else:
+ files = [cmd + ext for ext in pathext]
+ else:
+ # On other platforms you don't have things like PATHEXT to tell you
+ # what file suffixes are executable, so just pass on cmd as-is.
+ files = [cmd]
+
+ seen = set()
+ for dir in path:
+ normdir = os.path.normcase(dir)
+ if not normdir in seen:
+ seen.add(normdir)
+ for thefile in files:
+ name = os.path.join(dir, thefile)
+ if _access_check(name, mode):
+ return name
+ return None
+
+try:
+ from shutil import which
+except ImportError:
+ which = backport_which
diff --git a/third_party/python/blessings/LICENSE b/third_party/python/blessings/LICENSE
new file mode 100644
index 0000000000..3d3a44e65b
--- /dev/null
+++ b/third_party/python/blessings/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2011 Erik Rose
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/third_party/python/blessings/MANIFEST.in b/third_party/python/blessings/MANIFEST.in
new file mode 100644
index 0000000000..3f4fbd7084
--- /dev/null
+++ b/third_party/python/blessings/MANIFEST.in
@@ -0,0 +1,3 @@
+include README.rst
+include LICENSE
+include tox.ini
diff --git a/third_party/python/blessings/PKG-INFO b/third_party/python/blessings/PKG-INFO
new file mode 100644
index 0000000000..34dd25e99a
--- /dev/null
+++ b/third_party/python/blessings/PKG-INFO
@@ -0,0 +1,560 @@
+Metadata-Version: 1.1
+Name: blessings
+Version: 1.7
+Summary: A thin, practical wrapper around terminal coloring, styling, and positioning
+Home-page: https://github.com/erikrose/blessings
+Author: Erik Rose
+Author-email: erikrose@grinchcentral.com
+License: MIT
+Description: =========
+ Blessings
+ =========
+
+ Coding with Blessings looks like this... ::
+
+ from blessings import Terminal
+
+ t = Terminal()
+
+ print t.bold('Hi there!')
+ print t.bold_red_on_bright_green('It hurts my eyes!')
+
+ with t.location(0, t.height - 1):
+ print 'This is at the bottom.'
+
+ Or, for byte-level control, you can drop down and play with raw terminal
+ capabilities::
+
+ print '{t.bold}All your {t.red}bold and red base{t.normal}'.format(t=t)
+ print t.wingo(2)
+
+ `Full API Reference <https://blessings.readthedocs.io/>`_
+
+ The Pitch
+ =========
+
+ Blessings lifts several of curses_' limiting assumptions, and it makes your
+ code pretty, too:
+
+ * Use styles, color, and maybe a little positioning without necessarily
+ clearing the whole
+ screen first.
+ * Leave more than one screenful of scrollback in the buffer after your program
+ exits, like a well-behaved command-line app should.
+ * Get rid of all those noisy, C-like calls to ``tigetstr`` and ``tparm``, so
+ your code doesn't get crowded out by terminal bookkeeping.
+ * Act intelligently when somebody redirects your output to a file, omitting the
+ terminal control codes the user doesn't want to see (optional).
+
+ .. _curses: http://docs.python.org/library/curses.html
+
+ Before And After
+ ----------------
+
+ Without Blessings, this is how you'd print some underlined text at the bottom
+ of the screen::
+
+ from curses import tigetstr, setupterm, tparm
+ from fcntl import ioctl
+ from os import isatty
+ import struct
+ import sys
+ from termios import TIOCGWINSZ
+
+ # If we want to tolerate having our output piped to other commands or
+ # files without crashing, we need to do all this branching:
+ if hasattr(sys.stdout, 'fileno') and isatty(sys.stdout.fileno()):
+ setupterm()
+ sc = tigetstr('sc')
+ cup = tigetstr('cup')
+ rc = tigetstr('rc')
+ underline = tigetstr('smul')
+ normal = tigetstr('sgr0')
+ else:
+ sc = cup = rc = underline = normal = ''
+ print sc # Save cursor position.
+ if cup:
+ # tigetnum('lines') doesn't always update promptly, hence this:
+ height = struct.unpack('hhhh', ioctl(0, TIOCGWINSZ, '\000' * 8))[0]
+ print tparm(cup, height - 1, 0) # Move cursor to bottom.
+ print 'This is {under}underlined{normal}!'.format(under=underline,
+ normal=normal)
+ print rc # Restore cursor position.
+
+ That was long and full of incomprehensible trash! Let's try it again, this time
+ with Blessings::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ with term.location(0, term.height - 1):
+ print 'This is', term.underline('pretty!')
+
+ Much better.
+
+ What It Provides
+ ================
+
+ Blessings provides just one top-level object: ``Terminal``. Instantiating a
+ ``Terminal`` figures out whether you're on a terminal at all and, if so, does
+ any necessary terminal setup. After that, you can proceed to ask it all sorts
+ of things about the terminal. Terminal terminal terminal.
+
+ Simple Formatting
+ -----------------
+
+ Lots of handy formatting codes ("capabilities" in low-level parlance) are
+ available as attributes on a ``Terminal``. For example::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ print 'I am ' + term.bold + 'bold' + term.normal + '!'
+
+ Though they are strings at heart, you can also use them as callable wrappers so
+ you don't have to say ``normal`` afterward::
+
+ print 'I am', term.bold('bold') + '!'
+
+ Or, if you want fine-grained control while maintaining some semblance of
+ brevity, you can combine it with Python's string formatting, which makes
+ attributes easy to access::
+
+ print 'All your {t.red}base {t.underline}are belong to us{t.normal}'.format(t=term)
+
+ Simple capabilities of interest include...
+
+ * ``bold``
+ * ``reverse``
+ * ``underline``
+ * ``no_underline`` (which turns off underlining)
+ * ``blink``
+ * ``normal`` (which turns off everything, even colors)
+
+ Here are a few more which are less likely to work on all terminals:
+
+ * ``dim``
+ * ``italic`` and ``no_italic``
+ * ``shadow`` and ``no_shadow``
+ * ``standout`` and ``no_standout``
+ * ``subscript`` and ``no_subscript``
+ * ``superscript`` and ``no_superscript``
+ * ``flash`` (which flashes the screen once)
+
+ Note that, while the inverse of ``underline`` is ``no_underline``, the only way
+ to turn off ``bold`` or ``reverse`` is ``normal``, which also cancels any
+ custom colors. This is because there's no portable way to tell the terminal to
+ undo certain pieces of formatting, even at the lowest level.
+
+ You might also notice that the above aren't the typical incomprehensible
+ terminfo capability names; we alias a few of the harder-to-remember ones for
+ readability. However, you aren't limited to these: you can reference any
+ string-returning capability listed on the `terminfo man page`_ by the name
+ under the "Cap-name" column: for example, ``term.rum``.
+
+ .. _`terminfo man page`: http://www.manpagez.com/man/5/terminfo/
+
+ Color
+ -----
+
+ 16 colors, both foreground and background, are available as easy-to-remember
+ attributes::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ print term.red + term.on_green + 'Red on green? Ick!' + term.normal
+ print term.bright_red + term.on_bright_blue + 'This is even worse!' + term.normal
+
+ You can also call them as wrappers, which sets everything back to normal at the
+ end::
+
+ print term.red_on_green('Red on green? Ick!')
+ print term.yellow('I can barely see it.')
+
+ The available colors are...
+
+ * ``black``
+ * ``red``
+ * ``green``
+ * ``yellow``
+ * ``blue``
+ * ``magenta``
+ * ``cyan``
+ * ``white``
+
+ You can set the background color instead of the foreground by prepending
+ ``on_``, as in ``on_blue``. There is also a ``bright`` version of each color:
+ for example, ``on_bright_blue``.
+
+ There is also a numerical interface to colors, which takes an integer from
+ 0-15::
+
+ term.color(5) + 'Hello' + term.normal
+ term.on_color(3) + 'Hello' + term.normal
+
+ term.color(5)('Hello')
+ term.on_color(3)('Hello')
+
+ If some color is unsupported (for instance, if only the normal colors are
+ available, not the bright ones), trying to use it will, on most terminals, have
+ no effect: the foreground and background colors will stay as they were. You can
+ get fancy and do different things depending on the supported colors by checking
+ `number_of_colors`_.
+
+ .. _`number_of_colors`: http://packages.python.org/blessings/#blessings.Terminal.number_of_colors
+
+ Compound Formatting
+ -------------------
+
+ If you want to do lots of crazy formatting all at once, you can just mash it
+ all together::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ print term.bold_underline_green_on_yellow + 'Woo' + term.normal
+
+ Or you can use your newly coined attribute as a wrapper, which implicitly sets
+ everything back to normal afterward::
+
+ print term.bold_underline_green_on_yellow('Woo')
+
+ This compound notation comes in handy if you want to allow users to customize
+ the formatting of your app: just have them pass in a format specifier like
+ "bold_green" on the command line, and do a quick ``getattr(term,
+ that_option)('Your text')`` when you do your formatting.
+
+ I'd be remiss if I didn't credit couleur_, where I probably got the idea for
+ all this mashing.
+
+ .. _couleur: http://pypi.python.org/pypi/couleur
+
+ Moving The Cursor
+ -----------------
+
+ When you want to move the cursor to output text at a specific spot, you have
+ a few choices.
+
+ Moving Temporarily
+ ~~~~~~~~~~~~~~~~~~
+
+ Most often, you'll need to flit to a certain location, print something, and
+ then return: for example, when updating a progress bar at the bottom of the
+ screen. ``Terminal`` provides a context manager for doing this concisely::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ with term.location(0, term.height - 1):
+ print 'Here is the bottom.'
+ print 'This is back where I came from.'
+
+ Parameters to ``location()`` are ``x`` and then ``y``, but you can also pass
+ just one of them, leaving the other alone. For example... ::
+
+ with term.location(y=10):
+ print 'We changed just the row.'
+
+ If you're doing a series of ``move`` calls (see below) and want to return the
+ cursor to its original position afterward, call ``location()`` with no
+ arguments, and it will do only the position restoring::
+
+ with term.location():
+ print term.move(1, 1) + 'Hi'
+ print term.move(9, 9) + 'Mom'
+
+ Note that, since ``location()`` uses the terminal's built-in
+ position-remembering machinery, you can't usefully nest multiple calls. Use
+ ``location()`` at the outermost spot, and use simpler things like ``move``
+ inside.
+
+ Moving Permanently
+ ~~~~~~~~~~~~~~~~~~
+
+ If you just want to move and aren't worried about returning, do something like
+ this::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ print term.move(10, 1) + 'Hi, mom!'
+
+ ``move``
+ Position the cursor elsewhere. Parameters are y coordinate, then x
+ coordinate.
+ ``move_x``
+ Move the cursor to the given column.
+ ``move_y``
+ Move the cursor to the given row.
+
+ How does all this work? These are simply more terminal capabilities, wrapped to
+ give them nicer names. The added wrinkle--that they take parameters--is also
+ given a pleasant treatment: rather than making you dig up ``tparm()`` all the
+ time, we simply make these capabilities into callable strings. You'd get the
+ raw capability strings if you were to just print them, but they're fully
+ parametrized if you pass params to them as if they were functions.
+
+ Consequently, you can also reference any other string-returning capability
+ listed on the `terminfo man page`_ by its name under the "Cap-name" column.
+
+ .. _`terminfo man page`: http://www.manpagez.com/man/5/terminfo/
+
+ One-Notch Movement
+ ~~~~~~~~~~~~~~~~~~
+
+ Finally, there are some parameterless movement capabilities that move the
+ cursor one character in various directions:
+
+ * ``move_left``
+ * ``move_right``
+ * ``move_up``
+ * ``move_down``
+
+ For example... ::
+
+ print term.move_up + 'Howdy!'
+
+ Height And Width
+ ----------------
+
+ It's simple to get the height and width of the terminal, in characters::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ height = term.height
+ width = term.width
+
+ These are newly updated each time you ask for them, so they're safe to use from
+ SIGWINCH handlers.
+
+ Clearing The Screen
+ -------------------
+
+ Blessings provides syntactic sugar over some screen-clearing capabilities:
+
+ ``clear``
+ Clear the whole screen.
+ ``clear_eol``
+ Clear to the end of the line.
+ ``clear_bol``
+ Clear backward to the beginning of the line.
+ ``clear_eos``
+ Clear to the end of screen.
+
+ Full-Screen Mode
+ ----------------
+
+ Perhaps you have seen a full-screen program, such as an editor, restore the
+ exact previous state of the terminal upon exiting, including, for example, the
+ command-line prompt from which it was launched. Curses pretty much forces you
+ into this behavior, but Blessings makes it optional. If you want to do the
+ state-restoration thing, use these capabilities:
+
+ ``enter_fullscreen``
+ Switch to the terminal mode where full-screen output is sanctioned. Print
+ this before you do any output.
+ ``exit_fullscreen``
+ Switch back to normal mode, restoring the exact state from before
+ ``enter_fullscreen`` was used.
+
+ Using ``exit_fullscreen`` will wipe away any trace of your program's output, so
+ reserve it for when you don't want to leave anything behind in the scrollback.
+
+ There's also a context manager you can use as a shortcut::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ with term.fullscreen():
+ # Print some stuff.
+
+ Besides brevity, another advantage is that it switches back to normal mode even
+ if an exception is raised in the ``with`` block.
+
+ Pipe Savvy
+ ----------
+
+ If your program isn't attached to a terminal, like if it's being piped to
+ another command or redirected to a file, all the capability attributes on
+ ``Terminal`` will return empty strings. You'll get a nice-looking file without
+ any formatting codes gumming up the works.
+
+ If you want to override this--like if you anticipate your program being piped
+ through ``less -r``, which handles terminal escapes just fine--pass
+ ``force_styling=True`` to the ``Terminal`` constructor.
+
+ In any case, there is a ``does_styling`` attribute on ``Terminal`` that lets
+ you see whether your capabilities will return actual, working formatting codes.
+ If it's false, you should refrain from drawing progress bars and other frippery
+ and just stick to content, since you're apparently headed into a pipe::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ if term.does_styling:
+ with term.location(0, term.height - 1):
+ print 'Progress: [=======> ]'
+ print term.bold('Important stuff')
+
+ Shopping List
+ =============
+
+ There are decades of legacy tied up in terminal interaction, so attention to
+ detail and behavior in edge cases make a difference. Here are some ways
+ Blessings has your back:
+
+ * Uses the terminfo database so it works with any terminal type
+ * Provides up-to-the-moment terminal height and width, so you can respond to
+ terminal size changes (SIGWINCH signals). (Most other libraries query the
+ ``COLUMNS`` and ``LINES`` environment variables or the ``cols`` or ``lines``
+ terminal capabilities, which don't update promptly, if at all.)
+ * Avoids making a mess if the output gets piped to a non-terminal
+ * Works great with standard Python string templating
+ * Provides convenient access to all terminal capabilities, not just a sugared
+ few
+ * Outputs to any file-like object, not just stdout
+ * Keeps a minimum of internal state, so you can feel free to mix and match with
+ calls to curses or whatever other terminal libraries you like
+
+ Blessings does not provide...
+
+ * Native color support on the Windows command prompt. However, it should work
+ when used in concert with colorama_.
+
+ .. _colorama: http://pypi.python.org/pypi/colorama/0.2.4
+
+ Bugs
+ ====
+
+ Bugs or suggestions? Visit the `issue tracker`_.
+
+ .. _`issue tracker`: https://github.com/erikrose/blessings/issues/
+
+ Blessings tests are run automatically by `Travis CI`_.
+
+ .. _`Travis CI`: https://travis-ci.org/erikrose/blessings/
+
+ .. image:: https://travis-ci.org/erikrose/blessings.svg?branch=master
+ :target: https://travis-ci.org/erikrose/blessings
+
+
+ License
+ =======
+
+ Blessings is under the MIT License. See the LICENSE file.
+
+ Version History
+ ===============
+
+ 1.7
+ * Drop support for Python 2.6 and 3.3, which are end-of-lifed.
+ * Switch from 2to3 to the ``six`` library.
+
+ 1.6.1
+ * Don't crash if ``number_of_colors()`` is called when run in a non-terminal
+ or when ``does_styling`` is otherwise false.
+
+ 1.6
+ * Add ``does_styling`` property. This takes ``force_styling`` into account
+ and should replace most uses of ``is_a_tty``.
+ * Make ``is_a_tty`` a read-only property, like ``does_styling``. Writing to
+ it never would have done anything constructive.
+ * Add ``fullscreen()`` and ``hidden_cursor()`` to the auto-generated docs.
+ * Fall back to ``LINES`` and ``COLUMNS`` environment vars to find height and
+ width. (jquast)
+ * Support terminal types, such as kermit and avatar, that use bytes 127-255
+ in their escape sequences. (jquast)
+
+ 1.5.1
+ * Clean up fabfile, removing the redundant ``test`` command.
+ * Add Travis support.
+ * Make ``python setup.py test`` work without spurious errors on 2.6.
+ * Work around a tox parsing bug in its config file.
+ * Make context managers clean up after themselves even if there's an
+ exception. (Vitja Makarov)
+ * Parametrizing a capability no longer crashes when there is no tty. (Vitja
+ Makarov)
+
+ 1.5
+ * Add syntactic sugar and documentation for ``enter_fullscreen`` and
+ ``exit_fullscreen``.
+ * Add context managers ``fullscreen()`` and ``hidden_cursor()``.
+ * Now you can force a ``Terminal`` never to emit styles by passing
+ ``force_styling=None``.
+
+ 1.4
+ * Add syntactic sugar for cursor visibility control and single-space-movement
+ capabilities.
+ * Endorse the ``location()`` idiom for restoring cursor position after a
+ series of manual movements.
+ * Fix a bug in which ``location()`` wouldn't do anything when passed zeroes.
+ * Allow tests to be run with ``python setup.py test``.
+
+ 1.3
+ * Added ``number_of_colors``, which tells you how many colors the terminal
+ supports.
+ * Made ``color(n)`` and ``on_color(n)`` callable to wrap a string, like the
+ named colors can. Also, make them both fall back to the ``setf`` and
+ ``setb`` capabilities (like the named colors do) if the ANSI ``setaf`` and
+ ``setab`` aren't available.
+ * Allowed ``color`` attr to act as an unparametrized string, not just a
+ callable.
+ * Made ``height`` and ``width`` examine any passed-in stream before falling
+ back to stdout. (This rarely if ever affects actual behavior; it's mostly
+ philosophical.)
+ * Made caching simpler and slightly more efficient.
+ * Got rid of a reference cycle between Terminals and FormattingStrings.
+ * Updated docs to reflect that terminal addressing (as in ``location()``) is
+ 0-based.
+
+ 1.2
+ * Added support for Python 3! We need 3.2.3 or greater, because the curses
+ library couldn't decide whether to accept strs or bytes before that
+ (http://bugs.python.org/issue10570).
+ * Everything that comes out of the library is now unicode. This lets us
+ support Python 3 without making a mess of the code, and Python 2 should
+ continue to work unless you were testing types (and badly). Please file a
+ bug if this causes trouble for you.
+ * Changed to the MIT License for better world domination.
+ * Added Sphinx docs.
+
+ 1.1
+ * Added nicely named attributes for colors.
+ * Introduced compound formatting.
+ * Added wrapper behavior for styling and colors.
+ * Let you force capabilities to be non-empty, even if the output stream is
+ not a terminal.
+ * Added the ``is_a_tty`` attribute for telling whether the output stream is a
+ terminal.
+ * Sugared the remaining interesting string capabilities.
+ * Let ``location()`` operate on just an x *or* y coordinate.
+
+ 1.0
+ * Extracted Blessings from nose-progressive, my `progress-bar-having,
+ traceback-shortcutting, rootin', tootin' testrunner`_. It provided the
+ tootin' functionality.
+
+ .. _`progress-bar-having, traceback-shortcutting, rootin', tootin' testrunner`: http://pypi.python.org/pypi/nose-progressive/
+
+Keywords: terminal,tty,curses,ncurses,formatting,style,color,console
+Platform: UNKNOWN
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Environment :: Console :: Curses
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: POSIX
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Software Development :: User Interfaces
+Classifier: Topic :: Terminals
diff --git a/third_party/python/blessings/README.rst b/third_party/python/blessings/README.rst
new file mode 100644
index 0000000000..77da433c19
--- /dev/null
+++ b/third_party/python/blessings/README.rst
@@ -0,0 +1,531 @@
+=========
+Blessings
+=========
+
+Coding with Blessings looks like this... ::
+
+ from blessings import Terminal
+
+ t = Terminal()
+
+ print t.bold('Hi there!')
+ print t.bold_red_on_bright_green('It hurts my eyes!')
+
+ with t.location(0, t.height - 1):
+ print 'This is at the bottom.'
+
+Or, for byte-level control, you can drop down and play with raw terminal
+capabilities::
+
+ print '{t.bold}All your {t.red}bold and red base{t.normal}'.format(t=t)
+ print t.wingo(2)
+
+`Full API Reference <https://blessings.readthedocs.io/>`_
+
+The Pitch
+=========
+
+Blessings lifts several of curses_' limiting assumptions, and it makes your
+code pretty, too:
+
+* Use styles, color, and maybe a little positioning without necessarily
+ clearing the whole
+ screen first.
+* Leave more than one screenful of scrollback in the buffer after your program
+ exits, like a well-behaved command-line app should.
+* Get rid of all those noisy, C-like calls to ``tigetstr`` and ``tparm``, so
+ your code doesn't get crowded out by terminal bookkeeping.
+* Act intelligently when somebody redirects your output to a file, omitting the
+ terminal control codes the user doesn't want to see (optional).
+
+.. _curses: http://docs.python.org/library/curses.html
+
+Before And After
+----------------
+
+Without Blessings, this is how you'd print some underlined text at the bottom
+of the screen::
+
+ from curses import tigetstr, setupterm, tparm
+ from fcntl import ioctl
+ from os import isatty
+ import struct
+ import sys
+ from termios import TIOCGWINSZ
+
+ # If we want to tolerate having our output piped to other commands or
+ # files without crashing, we need to do all this branching:
+ if hasattr(sys.stdout, 'fileno') and isatty(sys.stdout.fileno()):
+ setupterm()
+ sc = tigetstr('sc')
+ cup = tigetstr('cup')
+ rc = tigetstr('rc')
+ underline = tigetstr('smul')
+ normal = tigetstr('sgr0')
+ else:
+ sc = cup = rc = underline = normal = ''
+ print sc # Save cursor position.
+ if cup:
+ # tigetnum('lines') doesn't always update promptly, hence this:
+ height = struct.unpack('hhhh', ioctl(0, TIOCGWINSZ, '\000' * 8))[0]
+ print tparm(cup, height - 1, 0) # Move cursor to bottom.
+ print 'This is {under}underlined{normal}!'.format(under=underline,
+ normal=normal)
+ print rc # Restore cursor position.
+
+That was long and full of incomprehensible trash! Let's try it again, this time
+with Blessings::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ with term.location(0, term.height - 1):
+ print 'This is', term.underline('pretty!')
+
+Much better.
+
+What It Provides
+================
+
+Blessings provides just one top-level object: ``Terminal``. Instantiating a
+``Terminal`` figures out whether you're on a terminal at all and, if so, does
+any necessary terminal setup. After that, you can proceed to ask it all sorts
+of things about the terminal. Terminal terminal terminal.
+
+Simple Formatting
+-----------------
+
+Lots of handy formatting codes ("capabilities" in low-level parlance) are
+available as attributes on a ``Terminal``. For example::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ print 'I am ' + term.bold + 'bold' + term.normal + '!'
+
+Though they are strings at heart, you can also use them as callable wrappers so
+you don't have to say ``normal`` afterward::
+
+ print 'I am', term.bold('bold') + '!'
+
+Or, if you want fine-grained control while maintaining some semblance of
+brevity, you can combine it with Python's string formatting, which makes
+attributes easy to access::
+
+ print 'All your {t.red}base {t.underline}are belong to us{t.normal}'.format(t=term)
+
+Simple capabilities of interest include...
+
+* ``bold``
+* ``reverse``
+* ``underline``
+* ``no_underline`` (which turns off underlining)
+* ``blink``
+* ``normal`` (which turns off everything, even colors)
+
+Here are a few more which are less likely to work on all terminals:
+
+* ``dim``
+* ``italic`` and ``no_italic``
+* ``shadow`` and ``no_shadow``
+* ``standout`` and ``no_standout``
+* ``subscript`` and ``no_subscript``
+* ``superscript`` and ``no_superscript``
+* ``flash`` (which flashes the screen once)
+
+Note that, while the inverse of ``underline`` is ``no_underline``, the only way
+to turn off ``bold`` or ``reverse`` is ``normal``, which also cancels any
+custom colors. This is because there's no portable way to tell the terminal to
+undo certain pieces of formatting, even at the lowest level.
+
+You might also notice that the above aren't the typical incomprehensible
+terminfo capability names; we alias a few of the harder-to-remember ones for
+readability. However, you aren't limited to these: you can reference any
+string-returning capability listed on the `terminfo man page`_ by the name
+under the "Cap-name" column: for example, ``term.rum``.
+
+.. _`terminfo man page`: http://www.manpagez.com/man/5/terminfo/
+
+Color
+-----
+
+16 colors, both foreground and background, are available as easy-to-remember
+attributes::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ print term.red + term.on_green + 'Red on green? Ick!' + term.normal
+ print term.bright_red + term.on_bright_blue + 'This is even worse!' + term.normal
+
+You can also call them as wrappers, which sets everything back to normal at the
+end::
+
+ print term.red_on_green('Red on green? Ick!')
+ print term.yellow('I can barely see it.')
+
+The available colors are...
+
+* ``black``
+* ``red``
+* ``green``
+* ``yellow``
+* ``blue``
+* ``magenta``
+* ``cyan``
+* ``white``
+
+You can set the background color instead of the foreground by prepending
+``on_``, as in ``on_blue``. There is also a ``bright`` version of each color:
+for example, ``on_bright_blue``.
+
+There is also a numerical interface to colors, which takes an integer from
+0-15::
+
+ term.color(5) + 'Hello' + term.normal
+ term.on_color(3) + 'Hello' + term.normal
+
+ term.color(5)('Hello')
+ term.on_color(3)('Hello')
+
+If some color is unsupported (for instance, if only the normal colors are
+available, not the bright ones), trying to use it will, on most terminals, have
+no effect: the foreground and background colors will stay as they were. You can
+get fancy and do different things depending on the supported colors by checking
+`number_of_colors`_.
+
+.. _`number_of_colors`: http://packages.python.org/blessings/#blessings.Terminal.number_of_colors
+
+Compound Formatting
+-------------------
+
+If you want to do lots of crazy formatting all at once, you can just mash it
+all together::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ print term.bold_underline_green_on_yellow + 'Woo' + term.normal
+
+Or you can use your newly coined attribute as a wrapper, which implicitly sets
+everything back to normal afterward::
+
+ print term.bold_underline_green_on_yellow('Woo')
+
+This compound notation comes in handy if you want to allow users to customize
+the formatting of your app: just have them pass in a format specifier like
+"bold_green" on the command line, and do a quick ``getattr(term,
+that_option)('Your text')`` when you do your formatting.
+
+I'd be remiss if I didn't credit couleur_, where I probably got the idea for
+all this mashing.
+
+.. _couleur: http://pypi.python.org/pypi/couleur
+
+Moving The Cursor
+-----------------
+
+When you want to move the cursor to output text at a specific spot, you have
+a few choices.
+
+Moving Temporarily
+~~~~~~~~~~~~~~~~~~
+
+Most often, you'll need to flit to a certain location, print something, and
+then return: for example, when updating a progress bar at the bottom of the
+screen. ``Terminal`` provides a context manager for doing this concisely::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ with term.location(0, term.height - 1):
+ print 'Here is the bottom.'
+ print 'This is back where I came from.'
+
+Parameters to ``location()`` are ``x`` and then ``y``, but you can also pass
+just one of them, leaving the other alone. For example... ::
+
+ with term.location(y=10):
+ print 'We changed just the row.'
+
+If you're doing a series of ``move`` calls (see below) and want to return the
+cursor to its original position afterward, call ``location()`` with no
+arguments, and it will do only the position restoring::
+
+ with term.location():
+ print term.move(1, 1) + 'Hi'
+ print term.move(9, 9) + 'Mom'
+
+Note that, since ``location()`` uses the terminal's built-in
+position-remembering machinery, you can't usefully nest multiple calls. Use
+``location()`` at the outermost spot, and use simpler things like ``move``
+inside.
+
+Moving Permanently
+~~~~~~~~~~~~~~~~~~
+
+If you just want to move and aren't worried about returning, do something like
+this::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ print term.move(10, 1) + 'Hi, mom!'
+
+``move``
+ Position the cursor elsewhere. Parameters are y coordinate, then x
+ coordinate.
+``move_x``
+ Move the cursor to the given column.
+``move_y``
+ Move the cursor to the given row.
+
+How does all this work? These are simply more terminal capabilities, wrapped to
+give them nicer names. The added wrinkle--that they take parameters--is also
+given a pleasant treatment: rather than making you dig up ``tparm()`` all the
+time, we simply make these capabilities into callable strings. You'd get the
+raw capability strings if you were to just print them, but they're fully
+parametrized if you pass params to them as if they were functions.
+
+Consequently, you can also reference any other string-returning capability
+listed on the `terminfo man page`_ by its name under the "Cap-name" column.
+
+.. _`terminfo man page`: http://www.manpagez.com/man/5/terminfo/
+
+One-Notch Movement
+~~~~~~~~~~~~~~~~~~
+
+Finally, there are some parameterless movement capabilities that move the
+cursor one character in various directions:
+
+* ``move_left``
+* ``move_right``
+* ``move_up``
+* ``move_down``
+
+For example... ::
+
+ print term.move_up + 'Howdy!'
+
+Height And Width
+----------------
+
+It's simple to get the height and width of the terminal, in characters::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ height = term.height
+ width = term.width
+
+These are newly updated each time you ask for them, so they're safe to use from
+SIGWINCH handlers.
+
+Clearing The Screen
+-------------------
+
+Blessings provides syntactic sugar over some screen-clearing capabilities:
+
+``clear``
+ Clear the whole screen.
+``clear_eol``
+ Clear to the end of the line.
+``clear_bol``
+ Clear backward to the beginning of the line.
+``clear_eos``
+ Clear to the end of screen.
+
+Full-Screen Mode
+----------------
+
+Perhaps you have seen a full-screen program, such as an editor, restore the
+exact previous state of the terminal upon exiting, including, for example, the
+command-line prompt from which it was launched. Curses pretty much forces you
+into this behavior, but Blessings makes it optional. If you want to do the
+state-restoration thing, use these capabilities:
+
+``enter_fullscreen``
+ Switch to the terminal mode where full-screen output is sanctioned. Print
+ this before you do any output.
+``exit_fullscreen``
+ Switch back to normal mode, restoring the exact state from before
+ ``enter_fullscreen`` was used.
+
+Using ``exit_fullscreen`` will wipe away any trace of your program's output, so
+reserve it for when you don't want to leave anything behind in the scrollback.
+
+There's also a context manager you can use as a shortcut::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ with term.fullscreen():
+ # Print some stuff.
+
+Besides brevity, another advantage is that it switches back to normal mode even
+if an exception is raised in the ``with`` block.
+
+Pipe Savvy
+----------
+
+If your program isn't attached to a terminal, like if it's being piped to
+another command or redirected to a file, all the capability attributes on
+``Terminal`` will return empty strings. You'll get a nice-looking file without
+any formatting codes gumming up the works.
+
+If you want to override this--like if you anticipate your program being piped
+through ``less -r``, which handles terminal escapes just fine--pass
+``force_styling=True`` to the ``Terminal`` constructor.
+
+In any case, there is a ``does_styling`` attribute on ``Terminal`` that lets
+you see whether your capabilities will return actual, working formatting codes.
+If it's false, you should refrain from drawing progress bars and other frippery
+and just stick to content, since you're apparently headed into a pipe::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ if term.does_styling:
+ with term.location(0, term.height - 1):
+ print 'Progress: [=======> ]'
+ print term.bold('Important stuff')
+
+Shopping List
+=============
+
+There are decades of legacy tied up in terminal interaction, so attention to
+detail and behavior in edge cases make a difference. Here are some ways
+Blessings has your back:
+
+* Uses the terminfo database so it works with any terminal type
+* Provides up-to-the-moment terminal height and width, so you can respond to
+ terminal size changes (SIGWINCH signals). (Most other libraries query the
+ ``COLUMNS`` and ``LINES`` environment variables or the ``cols`` or ``lines``
+ terminal capabilities, which don't update promptly, if at all.)
+* Avoids making a mess if the output gets piped to a non-terminal
+* Works great with standard Python string templating
+* Provides convenient access to all terminal capabilities, not just a sugared
+ few
+* Outputs to any file-like object, not just stdout
+* Keeps a minimum of internal state, so you can feel free to mix and match with
+ calls to curses or whatever other terminal libraries you like
+
+Blessings does not provide...
+
+* Native color support on the Windows command prompt. However, it should work
+ when used in concert with colorama_.
+
+.. _colorama: http://pypi.python.org/pypi/colorama/0.2.4
+
+Bugs
+====
+
+Bugs or suggestions? Visit the `issue tracker`_.
+
+.. _`issue tracker`: https://github.com/erikrose/blessings/issues/
+
+Blessings tests are run automatically by `Travis CI`_.
+
+.. _`Travis CI`: https://travis-ci.org/erikrose/blessings/
+
+.. image:: https://travis-ci.org/erikrose/blessings.svg?branch=master
+ :target: https://travis-ci.org/erikrose/blessings
+
+
+License
+=======
+
+Blessings is under the MIT License. See the LICENSE file.
+
+Version History
+===============
+
+1.7
+ * Drop support for Python 2.6 and 3.3, which are end-of-lifed.
+ * Switch from 2to3 to the ``six`` library.
+
+1.6.1
+ * Don't crash if ``number_of_colors()`` is called when run in a non-terminal
+ or when ``does_styling`` is otherwise false.
+
+1.6
+ * Add ``does_styling`` property. This takes ``force_styling`` into account
+ and should replace most uses of ``is_a_tty``.
+ * Make ``is_a_tty`` a read-only property, like ``does_styling``. Writing to
+ it never would have done anything constructive.
+ * Add ``fullscreen()`` and ``hidden_cursor()`` to the auto-generated docs.
+ * Fall back to ``LINES`` and ``COLUMNS`` environment vars to find height and
+ width. (jquast)
+ * Support terminal types, such as kermit and avatar, that use bytes 127-255
+ in their escape sequences. (jquast)
+
+1.5.1
+ * Clean up fabfile, removing the redundant ``test`` command.
+ * Add Travis support.
+ * Make ``python setup.py test`` work without spurious errors on 2.6.
+ * Work around a tox parsing bug in its config file.
+ * Make context managers clean up after themselves even if there's an
+ exception. (Vitja Makarov)
+ * Parametrizing a capability no longer crashes when there is no tty. (Vitja
+ Makarov)
+
+1.5
+ * Add syntactic sugar and documentation for ``enter_fullscreen`` and
+ ``exit_fullscreen``.
+ * Add context managers ``fullscreen()`` and ``hidden_cursor()``.
+ * Now you can force a ``Terminal`` never to emit styles by passing
+ ``force_styling=None``.
+
+1.4
+ * Add syntactic sugar for cursor visibility control and single-space-movement
+ capabilities.
+ * Endorse the ``location()`` idiom for restoring cursor position after a
+ series of manual movements.
+ * Fix a bug in which ``location()`` wouldn't do anything when passed zeroes.
+ * Allow tests to be run with ``python setup.py test``.
+
+1.3
+ * Added ``number_of_colors``, which tells you how many colors the terminal
+ supports.
+ * Made ``color(n)`` and ``on_color(n)`` callable to wrap a string, like the
+ named colors can. Also, make them both fall back to the ``setf`` and
+ ``setb`` capabilities (like the named colors do) if the ANSI ``setaf`` and
+ ``setab`` aren't available.
+ * Allowed ``color`` attr to act as an unparametrized string, not just a
+ callable.
+ * Made ``height`` and ``width`` examine any passed-in stream before falling
+ back to stdout. (This rarely if ever affects actual behavior; it's mostly
+ philosophical.)
+ * Made caching simpler and slightly more efficient.
+ * Got rid of a reference cycle between Terminals and FormattingStrings.
+ * Updated docs to reflect that terminal addressing (as in ``location()``) is
+ 0-based.
+
+1.2
+ * Added support for Python 3! We need 3.2.3 or greater, because the curses
+ library couldn't decide whether to accept strs or bytes before that
+ (http://bugs.python.org/issue10570).
+ * Everything that comes out of the library is now unicode. This lets us
+ support Python 3 without making a mess of the code, and Python 2 should
+ continue to work unless you were testing types (and badly). Please file a
+ bug if this causes trouble for you.
+ * Changed to the MIT License for better world domination.
+ * Added Sphinx docs.
+
+1.1
+ * Added nicely named attributes for colors.
+ * Introduced compound formatting.
+ * Added wrapper behavior for styling and colors.
+ * Let you force capabilities to be non-empty, even if the output stream is
+ not a terminal.
+ * Added the ``is_a_tty`` attribute for telling whether the output stream is a
+ terminal.
+ * Sugared the remaining interesting string capabilities.
+ * Let ``location()`` operate on just an x *or* y coordinate.
+
+1.0
+ * Extracted Blessings from nose-progressive, my `progress-bar-having,
+ traceback-shortcutting, rootin', tootin' testrunner`_. It provided the
+ tootin' functionality.
+
+.. _`progress-bar-having, traceback-shortcutting, rootin', tootin' testrunner`: http://pypi.python.org/pypi/nose-progressive/
diff --git a/third_party/python/blessings/blessings/__init__.py b/third_party/python/blessings/blessings/__init__.py
new file mode 100644
index 0000000000..388cece0b8
--- /dev/null
+++ b/third_party/python/blessings/blessings/__init__.py
@@ -0,0 +1,556 @@
+"""A thin, practical wrapper around terminal coloring, styling, and
+positioning"""
+
+from contextlib import contextmanager
+import curses
+from curses import setupterm, tigetnum, tigetstr, tparm
+from fcntl import ioctl
+from six import text_type, string_types
+
+try:
+ from io import UnsupportedOperation as IOUnsupportedOperation
+except ImportError:
+ class IOUnsupportedOperation(Exception):
+ """A dummy exception to take the place of Python 3's
+ ``io.UnsupportedOperation`` in Python 2"""
+
+from os import isatty, environ
+import struct
+import sys
+from termios import TIOCGWINSZ
+
+
+__all__ = ['Terminal']
+
+
+class Terminal(object):
+ """An abstraction around terminal capabilities
+
+ Unlike curses, this doesn't require clearing the screen before doing
+ anything, and it's friendlier to use. It keeps the endless calls to
+ ``tigetstr()`` and ``tparm()`` out of your code, and it acts intelligently
+ when somebody pipes your output to a non-terminal.
+
+ Instance attributes:
+
+ ``stream``
+ The stream the terminal outputs to. It's convenient to pass the stream
+ around with the terminal; it's almost always needed when the terminal
+ is and saves sticking lots of extra args on client functions in
+ practice.
+
+ """
+ def __init__(self, kind=None, stream=None, force_styling=False):
+ """Initialize the terminal.
+
+ If ``stream`` is not a tty, I will default to returning an empty
+ Unicode string for all capability values, so things like piping your
+ output to a file won't strew escape sequences all over the place. The
+ ``ls`` command sets a precedent for this: it defaults to columnar
+ output when being sent to a tty and one-item-per-line when not.
+
+ :arg kind: A terminal string as taken by ``setupterm()``. Defaults to
+ the value of the ``TERM`` environment variable.
+ :arg stream: A file-like object representing the terminal. Defaults to
+ the original value of stdout, like ``curses.initscr()`` does.
+ :arg force_styling: Whether to force the emission of capabilities, even
+ if we don't seem to be in a terminal. This comes in handy if users
+ are trying to pipe your output through something like ``less -r``,
+ which supports terminal codes just fine but doesn't appear itself
+ to be a terminal. Just expose a command-line option, and set
+ ``force_styling`` based on it. Terminal initialization sequences
+ will be sent to ``stream`` if it has a file descriptor and to
+ ``sys.__stdout__`` otherwise. (``setupterm()`` demands to send them
+ somewhere, and stdout is probably where the output is ultimately
+ headed. If not, stderr is probably bound to the same terminal.)
+
+ If you want to force styling to not happen, pass
+ ``force_styling=None``.
+
+ """
+ if stream is None:
+ stream = sys.__stdout__
+ try:
+ stream_descriptor = (stream.fileno() if hasattr(stream, 'fileno')
+ and callable(stream.fileno)
+ else None)
+ except IOUnsupportedOperation:
+ stream_descriptor = None
+
+ self._is_a_tty = (stream_descriptor is not None and
+ isatty(stream_descriptor))
+ self._does_styling = ((self.is_a_tty or force_styling) and
+ force_styling is not None)
+
+ # The descriptor to direct terminal initialization sequences to.
+ # sys.__stdout__ seems to always have a descriptor of 1, even if output
+ # is redirected.
+ self._init_descriptor = (sys.__stdout__.fileno()
+ if stream_descriptor is None
+ else stream_descriptor)
+ if self.does_styling:
+ # Make things like tigetstr() work. Explicit args make setupterm()
+ # work even when -s is passed to nosetests. Lean toward sending
+ # init sequences to the stream if it has a file descriptor, and
+ # send them to stdout as a fallback, since they have to go
+ # somewhere.
+ setupterm(kind or environ.get('TERM', 'unknown'),
+ self._init_descriptor)
+
+ self.stream = stream
+
+ # Sugary names for commonly-used capabilities, intended to help avoid trips
+ # to the terminfo man page and comments in your code:
+ _sugar = dict(
+ # Don't use "on" or "bright" as an underscore-separated chunk in any of
+ # these (e.g. on_cology or rock_on) so we don't interfere with
+ # __getattr__.
+ save='sc',
+ restore='rc',
+
+ clear_eol='el',
+ clear_bol='el1',
+ clear_eos='ed',
+ # 'clear' clears the whole screen.
+ position='cup', # deprecated
+ enter_fullscreen='smcup',
+ exit_fullscreen='rmcup',
+ move='cup',
+ move_x='hpa',
+ move_y='vpa',
+ move_left='cub1',
+ move_right='cuf1',
+ move_up='cuu1',
+ move_down='cud1',
+
+ hide_cursor='civis',
+ normal_cursor='cnorm',
+
+ reset_colors='op', # oc doesn't work on my OS X terminal.
+
+ normal='sgr0',
+ reverse='rev',
+ # 'bold' is just 'bold'. Similarly...
+ # blink
+ # dim
+ # flash
+ italic='sitm',
+ no_italic='ritm',
+ shadow='sshm',
+ no_shadow='rshm',
+ standout='smso',
+ no_standout='rmso',
+ subscript='ssubm',
+ no_subscript='rsubm',
+ superscript='ssupm',
+ no_superscript='rsupm',
+ underline='smul',
+ no_underline='rmul')
+
+ def __getattr__(self, attr):
+ """Return a terminal capability, like bold.
+
+ For example, you can say ``term.bold`` to get the string that turns on
+ bold formatting and ``term.normal`` to get the string that turns it off
+ again. Or you can take a shortcut: ``term.bold('hi')`` bolds its
+ argument and sets everything to normal afterward. You can even combine
+ things: ``term.bold_underline_red_on_bright_green('yowzers!')``.
+
+ For a parametrized capability like ``cup``, pass the parameters too:
+ ``some_term.cup(line, column)``.
+
+ ``man terminfo`` for a complete list of capabilities.
+
+ Return values are always Unicode.
+
+ """
+ resolution = (self._resolve_formatter(attr) if self.does_styling
+ else NullCallableString())
+ setattr(self, attr, resolution) # Cache capability codes.
+ return resolution
+
+ @property
+ def does_styling(self):
+ """Whether attempt to emit capabilities
+
+ This is influenced by the ``is_a_tty`` property and by the
+ ``force_styling`` argument to the constructor. You can examine
+ this value to decide whether to draw progress bars or other frippery.
+
+ """
+ return self._does_styling
+
+ @property
+ def is_a_tty(self):
+ """Whether my ``stream`` appears to be associated with a terminal"""
+ return self._is_a_tty
+
+ @property
+ def height(self):
+ """The height of the terminal in characters
+
+ If no stream or a stream not representing a terminal was passed in at
+ construction, return the dimension of the controlling terminal so
+ piping to things that eventually display on the terminal (like ``less
+ -R``) work. If a stream representing a terminal was passed in, return
+ the dimensions of that terminal. If there somehow is no controlling
+ terminal, return ``None``. (Thus, you should check that the property
+ ``is_a_tty`` is true before doing any math on the result.)
+
+ """
+ return self._height_and_width()[0]
+
+ @property
+ def width(self):
+ """The width of the terminal in characters
+
+ See ``height()`` for some corner cases.
+
+ """
+ return self._height_and_width()[1]
+
+ def _height_and_width(self):
+ """Return a tuple of (terminal height, terminal width).
+
+ Start by trying TIOCGWINSZ (Terminal I/O-Control: Get Window Size),
+ falling back to environment variables (LINES, COLUMNS), and returning
+ (None, None) if those are unavailable or invalid.
+
+ """
+ # tigetnum('lines') and tigetnum('cols') update only if we call
+ # setupterm() again.
+ for descriptor in self._init_descriptor, sys.__stdout__:
+ try:
+ return struct.unpack(
+ 'hhhh', ioctl(descriptor, TIOCGWINSZ, '\000' * 8))[0:2]
+ except IOError:
+ # when the output stream or init descriptor is not a tty, such
+ # as when when stdout is piped to another program, fe. tee(1),
+ # these ioctls will raise IOError
+ pass
+ try:
+ return int(environ.get('LINES')), int(environ.get('COLUMNS'))
+ except TypeError:
+ return None, None
+
+ @contextmanager
+ def location(self, x=None, y=None):
+ """Return a context manager for temporarily moving the cursor.
+
+ Move the cursor to a certain position on entry, let you print stuff
+ there, then return the cursor to its original position::
+
+ term = Terminal()
+ with term.location(2, 5):
+ print 'Hello, world!'
+ for x in xrange(10):
+ print 'I can do it %i times!' % x
+
+ Specify ``x`` to move to a certain column, ``y`` to move to a certain
+ row, both, or neither. If you specify neither, only the saving and
+ restoration of cursor position will happen. This can be useful if you
+ simply want to restore your place after doing some manual cursor
+ movement.
+
+ """
+ # Save position and move to the requested column, row, or both:
+ self.stream.write(self.save)
+ if x is not None and y is not None:
+ self.stream.write(self.move(y, x))
+ elif x is not None:
+ self.stream.write(self.move_x(x))
+ elif y is not None:
+ self.stream.write(self.move_y(y))
+ try:
+ yield
+ finally:
+ # Restore original cursor position:
+ self.stream.write(self.restore)
+
+ @contextmanager
+ def fullscreen(self):
+ """Return a context manager that enters fullscreen mode while inside it
+ and restores normal mode on leaving."""
+ self.stream.write(self.enter_fullscreen)
+ try:
+ yield
+ finally:
+ self.stream.write(self.exit_fullscreen)
+
+ @contextmanager
+ def hidden_cursor(self):
+ """Return a context manager that hides the cursor while inside it and
+ makes it visible on leaving."""
+ self.stream.write(self.hide_cursor)
+ try:
+ yield
+ finally:
+ self.stream.write(self.normal_cursor)
+
+ @property
+ def color(self):
+ """Return a capability that sets the foreground color.
+
+ The capability is unparametrized until called and passed a number
+ (0-15), at which point it returns another string which represents a
+ specific color change. This second string can further be called to
+ color a piece of text and set everything back to normal afterward.
+
+ :arg num: The number, 0-15, of the color
+
+ """
+ return ParametrizingString(self._foreground_color, self.normal)
+
+ @property
+ def on_color(self):
+ """Return a capability that sets the background color.
+
+ See ``color()``.
+
+ """
+ return ParametrizingString(self._background_color, self.normal)
+
+ @property
+ def number_of_colors(self):
+ """Return the number of colors the terminal supports.
+
+ Common values are 0, 8, 16, 88, and 256.
+
+ Though the underlying capability returns -1 when there is no color
+ support, we return 0. This lets you test more Pythonically::
+
+ if term.number_of_colors:
+ ...
+
+ We also return 0 if the terminal won't tell us how many colors it
+ supports, which I think is rare.
+
+ """
+ # This is actually the only remotely useful numeric capability. We
+ # don't name it after the underlying capability, because we deviate
+ # slightly from its behavior, and we might someday wish to give direct
+ # access to it.
+ if not self._does_styling:
+ return 0
+
+ colors = tigetnum('colors') # Returns -1 if no color support, -2 if no
+ # such cap.
+ # self.__dict__['colors'] = ret # Cache it. It's not changing.
+ # (Doesn't work.)
+ return colors if colors >= 0 else 0
+
+ def _resolve_formatter(self, attr):
+ """Resolve a sugary or plain capability name, color, or compound
+ formatting function name into a callable capability.
+
+ Return a ``ParametrizingString`` or a ``FormattingString``.
+
+ """
+ if attr in COLORS:
+ return self._resolve_color(attr)
+ elif attr in COMPOUNDABLES:
+ # Bold, underline, or something that takes no parameters
+ return self._formatting_string(self._resolve_capability(attr))
+ else:
+ formatters = split_into_formatters(attr)
+ if all(f in COMPOUNDABLES for f in formatters):
+ # It's a compound formatter, like "bold_green_on_red". Future
+ # optimization: combine all formatting into a single escape
+ # sequence.
+ return self._formatting_string(
+ u''.join(self._resolve_formatter(s) for s in formatters))
+ else:
+ return ParametrizingString(self._resolve_capability(attr))
+
+ def _resolve_capability(self, atom):
+ """Return a terminal code for a capname or a sugary name, or an empty
+ Unicode.
+
+ The return value is always Unicode, because otherwise it is clumsy
+ (especially in Python 3) to concatenate with real (Unicode) strings.
+
+ """
+ code = tigetstr(self._sugar.get(atom, atom))
+ if code:
+ # See the comment in ParametrizingString for why this is latin1.
+ return code.decode('latin1')
+ return u''
+
+ def _resolve_color(self, color):
+ """Resolve a color like red or on_bright_green into a callable
+ capability."""
+ # TODO: Does curses automatically exchange red and blue and cyan and
+ # yellow when a terminal supports setf/setb rather than setaf/setab?
+ # I'll be blasted if I can find any documentation. The following
+ # assumes it does.
+ color_cap = (self._background_color if 'on_' in color else
+ self._foreground_color)
+ # curses constants go up to only 7, so add an offset to get at the
+ # bright colors at 8-15:
+ offset = 8 if 'bright_' in color else 0
+ base_color = color.rsplit('_', 1)[-1]
+ return self._formatting_string(
+ color_cap(getattr(curses, 'COLOR_' + base_color.upper()) + offset))
+
+ @property
+ def _foreground_color(self):
+ return self.setaf or self.setf
+
+ @property
+ def _background_color(self):
+ return self.setab or self.setb
+
+ def _formatting_string(self, formatting):
+ """Return a new ``FormattingString`` which implicitly receives my
+ notion of "normal"."""
+ return FormattingString(formatting, self.normal)
+
+
+def derivative_colors(colors):
+ """Return the names of valid color variants, given the base colors."""
+ return set([('on_' + c) for c in colors] +
+ [('bright_' + c) for c in colors] +
+ [('on_bright_' + c) for c in colors])
+
+
+COLORS = set(['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan',
+ 'white'])
+COLORS.update(derivative_colors(COLORS))
+COMPOUNDABLES = (COLORS |
+ set(['bold', 'underline', 'reverse', 'blink', 'dim', 'italic',
+ 'shadow', 'standout', 'subscript', 'superscript']))
+
+
+class ParametrizingString(text_type):
+ """A Unicode string which can be called to parametrize it as a terminal
+ capability"""
+
+ def __new__(cls, formatting, normal=None):
+ """Instantiate.
+
+ :arg normal: If non-None, indicates that, once parametrized, this can
+ be used as a ``FormattingString``. The value is used as the
+ "normal" capability.
+
+ """
+ new = text_type.__new__(cls, formatting)
+ new._normal = normal
+ return new
+
+ def __call__(self, *args):
+ try:
+ # Re-encode the cap, because tparm() takes a bytestring in Python
+ # 3. However, appear to be a plain Unicode string otherwise so
+ # concats work.
+ #
+ # We use *latin1* encoding so that bytes emitted by tparm are
+ # encoded to their native value: some terminal kinds, such as
+ # 'avatar' or 'kermit', emit 8-bit bytes in range 0x7f to 0xff.
+ # latin1 leaves these values unmodified in their conversion to
+ # unicode byte values. The terminal emulator will "catch" and
+ # handle these values, even if emitting utf8-encoded text, where
+ # these bytes would otherwise be illegal utf8 start bytes.
+ parametrized = tparm(self.encode('latin1'), *args).decode('latin1')
+ return (parametrized if self._normal is None else
+ FormattingString(parametrized, self._normal))
+ except curses.error:
+ # Catch "must call (at least) setupterm() first" errors, as when
+ # running simply `nosetests` (without progressive) on nose-
+ # progressive. Perhaps the terminal has gone away between calling
+ # tigetstr and calling tparm.
+ return u''
+ except TypeError:
+ # If the first non-int (i.e. incorrect) arg was a string, suggest
+ # something intelligent:
+ if len(args) == 1 and isinstance(args[0], string_types):
+ raise TypeError(
+ 'A native or nonexistent capability template received '
+ '%r when it was expecting ints. You probably misspelled a '
+ 'formatting call like bright_red_on_white(...).' % args)
+ else:
+ # Somebody passed a non-string; I don't feel confident
+ # guessing what they were trying to do.
+ raise
+
+
+class FormattingString(text_type):
+ """A Unicode string which can be called upon a piece of text to wrap it in
+ formatting"""
+
+ def __new__(cls, formatting, normal):
+ new = text_type.__new__(cls, formatting)
+ new._normal = normal
+ return new
+
+ def __call__(self, text):
+ """Return a new string that is ``text`` formatted with my contents.
+
+ At the beginning of the string, I prepend the formatting that is my
+ contents. At the end, I append the "normal" sequence to set everything
+ back to defaults. The return value is always a Unicode.
+
+ """
+ return self + text + self._normal
+
+
+class NullCallableString(text_type):
+ """A dummy callable Unicode to stand in for ``FormattingString`` and
+ ``ParametrizingString``
+
+ We use this when there is no tty and thus all capabilities should be blank.
+
+ """
+ def __new__(cls):
+ new = text_type.__new__(cls, u'')
+ return new
+
+ def __call__(self, *args):
+ """Return a Unicode or whatever you passed in as the first arg
+ (hopefully a string of some kind).
+
+ When called with an int as the first arg, return an empty Unicode. An
+ int is a good hint that I am a ``ParametrizingString``, as there are
+ only about half a dozen string-returning capabilities on OS X's
+ terminfo man page which take any param that's not an int, and those are
+ seldom if ever used on modern terminal emulators. (Most have to do with
+ programming function keys. Blessings' story for supporting
+ non-string-returning caps is undeveloped.) And any parametrized
+ capability in a situation where all capabilities themselves are taken
+ to be blank are, of course, themselves blank.
+
+ When called with a non-int as the first arg (no no args at all), return
+ the first arg. I am acting as a ``FormattingString``.
+
+ """
+ if len(args) != 1 or isinstance(args[0], int):
+ # I am acting as a ParametrizingString.
+
+ # tparm can take not only ints but also (at least) strings as its
+ # second...nth args. But we don't support callably parametrizing
+ # caps that take non-ints yet, so we can cheap out here. TODO: Go
+ # through enough of the motions in the capability resolvers to
+ # determine which of 2 special-purpose classes,
+ # NullParametrizableString or NullFormattingString, to return, and
+ # retire this one.
+ return u''
+ return args[0] # Should we force even strs in Python 2.x to be
+ # unicodes? No. How would I know what encoding to use
+ # to convert it?
+
+
+def split_into_formatters(compound):
+ """Split a possibly compound format string into segments.
+
+ >>> split_into_formatters('bold_underline_bright_blue_on_red')
+ ['bold', 'underline', 'bright_blue', 'on_red']
+
+ """
+ merged_segs = []
+ # These occur only as prefixes, so they can always be merged:
+ mergeable_prefixes = ['on', 'bright', 'on_bright']
+ for s in compound.split('_'):
+ if merged_segs and merged_segs[-1] in mergeable_prefixes:
+ merged_segs[-1] += '_' + s
+ else:
+ merged_segs.append(s)
+ return merged_segs
diff --git a/third_party/python/blessings/blessings/tests.py b/third_party/python/blessings/blessings/tests.py
new file mode 100644
index 0000000000..a03eb8dcbe
--- /dev/null
+++ b/third_party/python/blessings/blessings/tests.py
@@ -0,0 +1,269 @@
+# -*- coding: utf-8 -*-
+"""Automated tests (as opposed to human-verified test patterns)
+
+It was tempting to mock out curses to get predictable output from ``tigetstr``,
+but there are concrete integration-testing benefits in not doing so. For
+instance, ``tigetstr`` changed its return type in Python 3.2.3. So instead, we
+simply create all our test ``Terminal`` instances with a known terminal type.
+All we require from the host machine is that a standard terminfo definition of
+xterm-256color exists.
+
+"""
+from curses import tigetstr, tparm
+from functools import partial
+import sys
+
+from nose import SkipTest
+from nose.tools import eq_
+from six import StringIO
+
+# This tests that __all__ is correct, since we use below everything that should
+# be imported:
+from blessings import *
+
+
+TestTerminal = partial(Terminal, kind='xterm-256color')
+
+
+def unicode_cap(cap):
+ """Return the result of ``tigetstr`` except as Unicode."""
+ return tigetstr(cap).decode('latin1')
+
+
+def unicode_parm(cap, *parms):
+ """Return the result of ``tparm(tigetstr())`` except as Unicode."""
+ return tparm(tigetstr(cap), *parms).decode('latin1')
+
+
+def test_capability():
+ """Check that a capability lookup works.
+
+ Also test that Terminal grabs a reasonable default stream. This test
+ assumes it will be run from a tty.
+
+ """
+ t = TestTerminal()
+ sc = unicode_cap('sc')
+ eq_(t.save, sc)
+ eq_(t.save, sc) # Make sure caching doesn't screw it up.
+
+
+def test_capability_without_tty():
+ """Assert capability templates are '' when stream is not a tty."""
+ t = TestTerminal(stream=StringIO())
+ eq_(t.save, u'')
+ eq_(t.red, u'')
+
+
+def test_capability_with_forced_tty():
+ """If we force styling, capabilities had better not (generally) be
+ empty."""
+ t = TestTerminal(stream=StringIO(), force_styling=True)
+ eq_(t.save, unicode_cap('sc'))
+
+
+def test_parametrization():
+ """Test parametrizing a capability."""
+ eq_(TestTerminal().cup(3, 4), unicode_parm('cup', 3, 4))
+
+
+def test_height_and_width():
+ """Assert that ``height_and_width()`` returns ints."""
+ t = TestTerminal() # kind shouldn't matter.
+ assert isinstance(t.height, int)
+ assert isinstance(t.width, int)
+
+
+def test_stream_attr():
+ """Make sure Terminal exposes a ``stream`` attribute that defaults to
+ something sane."""
+ eq_(Terminal().stream, sys.__stdout__)
+
+
+def test_location():
+ """Make sure ``location()`` does what it claims."""
+ t = TestTerminal(stream=StringIO(), force_styling=True)
+
+ with t.location(3, 4):
+ t.stream.write(u'hi')
+
+ eq_(t.stream.getvalue(), unicode_cap('sc') +
+ unicode_parm('cup', 4, 3) +
+ u'hi' +
+ unicode_cap('rc'))
+
+
+def test_horizontal_location():
+ """Make sure we can move the cursor horizontally without changing rows."""
+ t = TestTerminal(stream=StringIO(), force_styling=True)
+ with t.location(x=5):
+ pass
+ eq_(t.stream.getvalue(), unicode_cap('sc') +
+ unicode_parm('hpa', 5) +
+ unicode_cap('rc'))
+
+
+def test_null_location():
+ """Make sure ``location()`` with no args just does position restoration."""
+ t = TestTerminal(stream=StringIO(), force_styling=True)
+ with t.location():
+ pass
+ eq_(t.stream.getvalue(), unicode_cap('sc') +
+ unicode_cap('rc'))
+
+
+def test_zero_location():
+ """Make sure ``location()`` pays attention to 0-valued args."""
+ t = TestTerminal(stream=StringIO(), force_styling=True)
+ with t.location(0, 0):
+ pass
+ eq_(t.stream.getvalue(), unicode_cap('sc') +
+ unicode_parm('cup', 0, 0) +
+ unicode_cap('rc'))
+
+
+def test_null_fileno():
+ """Make sure ``Terminal`` works when ``fileno`` is ``None``.
+
+ This simulates piping output to another program.
+
+ """
+ out = StringIO()
+ out.fileno = None
+ t = TestTerminal(stream=out)
+ eq_(t.save, u'')
+
+
+def test_mnemonic_colors():
+ """Make sure color shortcuts work."""
+ def color(num):
+ return unicode_parm('setaf', num)
+
+ def on_color(num):
+ return unicode_parm('setab', num)
+
+ # Avoid testing red, blue, yellow, and cyan, since they might someday
+ # change depending on terminal type.
+ t = TestTerminal()
+ eq_(t.white, color(7))
+ eq_(t.green, color(2)) # Make sure it's different than white.
+ eq_(t.on_black, on_color(0))
+ eq_(t.on_green, on_color(2))
+ eq_(t.bright_black, color(8))
+ eq_(t.bright_green, color(10))
+ eq_(t.on_bright_black, on_color(8))
+ eq_(t.on_bright_green, on_color(10))
+
+
+def test_callable_numeric_colors():
+ """``color(n)`` should return a formatting wrapper."""
+ t = TestTerminal()
+ eq_(t.color(5)('smoo'), t.magenta + 'smoo' + t.normal)
+ eq_(t.color(5)('smoo'), t.color(5) + 'smoo' + t.normal)
+ eq_(t.on_color(2)('smoo'), t.on_green + 'smoo' + t.normal)
+ eq_(t.on_color(2)('smoo'), t.on_color(2) + 'smoo' + t.normal)
+
+
+def test_null_callable_numeric_colors():
+ """``color(n)`` should be a no-op on null terminals."""
+ t = TestTerminal(stream=StringIO())
+ eq_(t.color(5)('smoo'), 'smoo')
+ eq_(t.on_color(6)('smoo'), 'smoo')
+
+
+def test_naked_color_cap():
+ """``term.color`` should return a stringlike capability."""
+ t = TestTerminal()
+ eq_(t.color + '', t.setaf + '')
+
+
+def test_number_of_colors_without_tty():
+ """``number_of_colors`` should return 0 when there's no tty."""
+ # Hypothesis: once setupterm() has run and decided the tty supports 256
+ # colors, it never changes its mind.
+ raise SkipTest
+
+ t = TestTerminal(stream=StringIO())
+ eq_(t.number_of_colors, 0)
+ t = TestTerminal(stream=StringIO(), force_styling=True)
+ eq_(t.number_of_colors, 0)
+
+
+def test_number_of_colors_with_tty():
+ """``number_of_colors`` should work."""
+ t = TestTerminal()
+ eq_(t.number_of_colors, 256)
+
+
+def test_formatting_functions():
+ """Test crazy-ass formatting wrappers, both simple and compound."""
+ t = TestTerminal()
+ # By now, it should be safe to use sugared attributes. Other tests test
+ # those.
+ eq_(t.bold(u'hi'), t.bold + u'hi' + t.normal)
+ eq_(t.green('hi'), t.green + u'hi' + t.normal) # Plain strs for Python 2.x
+ # Test some non-ASCII chars, probably not necessary:
+ eq_(t.bold_green(u'boö'), t.bold + t.green + u'boö' + t.normal)
+ eq_(t.bold_underline_green_on_red('boo'),
+ t.bold + t.underline + t.green + t.on_red + u'boo' + t.normal)
+ # Don't spell things like this:
+ eq_(t.on_bright_red_bold_bright_green_underline('meh'),
+ t.on_bright_red + t.bold + t.bright_green + t.underline + u'meh' +
+ t.normal)
+
+
+def test_formatting_functions_without_tty():
+ """Test crazy-ass formatting wrappers when there's no tty."""
+ t = TestTerminal(stream=StringIO())
+ eq_(t.bold(u'hi'), u'hi')
+ eq_(t.green('hi'), u'hi')
+ # Test non-ASCII chars, no longer really necessary:
+ eq_(t.bold_green(u'boö'), u'boö')
+ eq_(t.bold_underline_green_on_red('loo'), u'loo')
+ eq_(t.on_bright_red_bold_bright_green_underline('meh'), u'meh')
+
+
+def test_nice_formatting_errors():
+ """Make sure you get nice hints if you misspell a formatting wrapper."""
+ t = TestTerminal()
+ try:
+ t.bold_misspelled('hey')
+ except TypeError as e:
+ assert 'probably misspelled' in e.args[0]
+
+ try:
+ t.bold_misspelled(u'hey') # unicode
+ except TypeError as e:
+ assert 'probably misspelled' in e.args[0]
+
+ try:
+ t.bold_misspelled(None) # an arbitrary non-string
+ except TypeError as e:
+ assert 'probably misspelled' not in e.args[0]
+
+ try:
+ t.bold_misspelled('a', 'b') # >1 string arg
+ except TypeError as e:
+ assert 'probably misspelled' not in e.args[0]
+
+
+def test_init_descriptor_always_initted():
+ """We should be able to get a height and width even on no-tty Terminals."""
+ t = Terminal(stream=StringIO())
+ eq_(type(t.height), int)
+
+
+def test_force_styling_none():
+ """If ``force_styling=None`` is passed to the constructor, don't ever do
+ styling."""
+ t = TestTerminal(force_styling=None)
+ eq_(t.save, '')
+
+
+def test_null_callable_string():
+ """Make sure NullCallableString tolerates all numbers and kinds of args it
+ might receive."""
+ t = TestTerminal(stream=StringIO())
+ eq_(t.clear, '')
+ eq_(t.move(1, 2), '')
+ eq_(t.move_x(1), '')
diff --git a/third_party/python/blessings/setup.cfg b/third_party/python/blessings/setup.cfg
new file mode 100644
index 0000000000..861a9f5542
--- /dev/null
+++ b/third_party/python/blessings/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/third_party/python/blessings/setup.py b/third_party/python/blessings/setup.py
new file mode 100644
index 0000000000..01488fcdf4
--- /dev/null
+++ b/third_party/python/blessings/setup.py
@@ -0,0 +1,49 @@
+import sys
+
+# Prevent spurious errors during `python setup.py test`, a la
+# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html:
+try:
+ import multiprocessing
+except ImportError:
+ pass
+
+from setuptools import setup, find_packages
+
+
+setup(
+ name='blessings',
+ version='1.7',
+ description='A thin, practical wrapper around terminal coloring, styling, and positioning',
+ long_description=open('README.rst').read(),
+ author='Erik Rose',
+ author_email='erikrose@grinchcentral.com',
+ license='MIT',
+ packages=find_packages(exclude=['ez_setup']),
+ install_requires=['six'],
+ tests_require=['nose'],
+ test_suite='nose.collector',
+ url='https://github.com/erikrose/blessings',
+ include_package_data=True,
+ python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
+ classifiers=[
+ 'Intended Audience :: Developers',
+ 'Natural Language :: English',
+ 'Development Status :: 5 - Production/Stable',
+ 'Environment :: Console',
+ 'Environment :: Console :: Curses',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: POSIX',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ 'Programming Language :: Python :: Implementation :: PyPy',
+ 'Topic :: Software Development :: Libraries',
+ 'Topic :: Software Development :: User Interfaces',
+ 'Topic :: Terminals'
+ ],
+ keywords=['terminal', 'tty', 'curses', 'ncurses', 'formatting', 'style', 'color', 'console'],
+)
diff --git a/third_party/python/blessings/tox.ini b/third_party/python/blessings/tox.ini
new file mode 100644
index 0000000000..558601ec2b
--- /dev/null
+++ b/third_party/python/blessings/tox.ini
@@ -0,0 +1,8 @@
+[tox]
+envlist = py{27,34,35,36,py}
+
+[testenv]
+commands = nosetests blessings
+deps =
+ nose
+ six
diff --git a/third_party/python/cbor2/.gitignore b/third_party/python/cbor2/.gitignore
new file mode 100644
index 0000000000..067698fa62
--- /dev/null
+++ b/third_party/python/cbor2/.gitignore
@@ -0,0 +1,13 @@
+.project
+.pydevproject
+.idea/
+.coverage
+.cache/
+.tox/
+.eggs/
+*.egg-info/
+*.pyc
+dist/
+docs/_build/
+build/
+virtualenv/
diff --git a/third_party/python/cbor2/.travis.yml b/third_party/python/cbor2/.travis.yml
new file mode 100644
index 0000000000..82cce5a9f0
--- /dev/null
+++ b/third_party/python/cbor2/.travis.yml
@@ -0,0 +1,28 @@
+sudo: false
+
+language: python
+
+python:
+ - "2.7"
+ - "3.3"
+ - "3.4"
+ - "3.5"
+ - "3.6"
+ - "pypy"
+
+install: pip install tox-travis coveralls
+
+script: tox
+
+after_success: coveralls
+
+deploy:
+ provider: pypi
+ user: agronholm
+ password:
+ secure: QZ5qoxsrzns/b27adWNzh/OAJp86yRuxTyAFhvas/pbkiALdlT/+PGyhJBnpe+7WBTWnkIXl+YU//voJ0btf6DJcWwgRavMsy22LJJPkvvK+2DHiZ//DbpLbqKWc74y4moce29BCajFTm9JkVwcL2dgN9WuZt+Tay0efcP4sESLxo5lIGdlaQbu+9zVs61Z4Ov+yyEMO/j3LeKshNmUq+84CveQWMiXndXBfJX5TWwjahmUNDp5fMctJxr4fqgL4HCTVQhU79dPc00yDEGS45QkpP8JDrF1DQvU5Ht4COz/Lvzt11pwsAvws2ddclqBUCQsGaWvEWH5rxZTYx/MaMVdTctaUVNoT0wnFUsXXZkomQV0x8vb5RtRLDrKwXosXlSEqnRyiKhdgHGoswHvB7XF5BtQ5RmydRX77pwEGmFd3lqRif2bos0MEeOJA8Xds0TGOKO4PyokBnj/a0tjT2LEVxObmTT6grz5QPXi386AWgxbNl0Lp7cnkSpCqC1hEHVqrDlbtu7uvfGwwe/sYlEcQ07PNCvFoR2GXJawbeHmJRfz+KXjffrt2yCzc671FL1goUysHKdBCppvUInI8FCMQpVWEh5MmQJKB4IpDrhqfo0VS+NNZgZ8lFStq27Pmwqf1HUTGlaDi9VQ0Vo7tW5j4JbD/JvOQSb3j9DjUFps=
+ distributions: sdist bdist_wheel
+ on:
+ tags: true
+ python: "3.5"
+ repo: agronholm/cbor2
diff --git a/third_party/python/cbor2/LICENSE.txt b/third_party/python/cbor2/LICENSE.txt
new file mode 100644
index 0000000000..07806f8af9
--- /dev/null
+++ b/third_party/python/cbor2/LICENSE.txt
@@ -0,0 +1,19 @@
+This is the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+Copyright (c) Alex Grönholm
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this
+software and associated documentation files (the "Software"), to deal in the Software
+without restriction, including without limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
+to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or
+substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
+FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/third_party/python/cbor2/PKG-INFO b/third_party/python/cbor2/PKG-INFO
new file mode 100644
index 0000000000..cc419299d0
--- /dev/null
+++ b/third_party/python/cbor2/PKG-INFO
@@ -0,0 +1,45 @@
+Metadata-Version: 1.1
+Name: cbor2
+Version: 4.0.1
+Summary: Pure Python CBOR (de)serializer with extensive tag support
+Home-page: https://github.com/agronholm/cbor2
+Author: Alex Grönholm
+Author-email: alex.gronholm@nextday.fi
+License: MIT
+Description: .. image:: https://travis-ci.org/agronholm/cbor2.svg?branch=master
+ :target: https://travis-ci.org/agronholm/cbor2
+ :alt: Build Status
+ .. image:: https://coveralls.io/repos/github/agronholm/cbor2/badge.svg?branch=master
+ :target: https://coveralls.io/github/agronholm/cbor2?branch=master
+ :alt: Code Coverage
+
+ This library provides encoding and decoding for the Concise Binary Object Representation (CBOR)
+ (`RFC 7049`_) serialization format.
+
+ There exists another Python CBOR implementation (cbor) which is faster on CPython due to its C
+ extensions. On PyPy, cbor2 and cbor are almost identical in performance. The other implementation
+ also lacks documentation and a comprehensive test suite, does not support most standard extension
+ tags and is known to crash (segfault) when passed a cyclic structure (say, a list containing
+ itself).
+
+ .. _RFC 7049: https://tools.ietf.org/html/rfc7049
+
+ Project links
+ -------------
+
+ * `Documentation <http://cbor2.readthedocs.org/>`_
+ * `Source code <https://github.com/agronholm/cbor2>`_
+ * `Issue tracker <https://github.com/agronholm/cbor2/issues>`_
+
+Keywords: serialization cbor
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
diff --git a/third_party/python/cbor2/README.rst b/third_party/python/cbor2/README.rst
new file mode 100644
index 0000000000..c1131fd63c
--- /dev/null
+++ b/third_party/python/cbor2/README.rst
@@ -0,0 +1,24 @@
+.. image:: https://travis-ci.org/agronholm/cbor2.svg?branch=master
+ :target: https://travis-ci.org/agronholm/cbor2
+ :alt: Build Status
+.. image:: https://coveralls.io/repos/github/agronholm/cbor2/badge.svg?branch=master
+ :target: https://coveralls.io/github/agronholm/cbor2?branch=master
+ :alt: Code Coverage
+
+This library provides encoding and decoding for the Concise Binary Object Representation (CBOR)
+(`RFC 7049`_) serialization format.
+
+There exists another Python CBOR implementation (cbor) which is faster on CPython due to its C
+extensions. On PyPy, cbor2 and cbor are almost identical in performance. The other implementation
+also lacks documentation and a comprehensive test suite, does not support most standard extension
+tags and is known to crash (segfault) when passed a cyclic structure (say, a list containing
+itself).
+
+.. _RFC 7049: https://tools.ietf.org/html/rfc7049
+
+Project links
+-------------
+
+* `Documentation <http://cbor2.readthedocs.org/>`_
+* `Source code <https://github.com/agronholm/cbor2>`_
+* `Issue tracker <https://github.com/agronholm/cbor2/issues>`_
diff --git a/third_party/python/cbor2/cbor2/__init__.py b/third_party/python/cbor2/cbor2/__init__.py
new file mode 100644
index 0000000000..474841ace4
--- /dev/null
+++ b/third_party/python/cbor2/cbor2/__init__.py
@@ -0,0 +1,3 @@
+from cbor2.decoder import load, loads, CBORDecoder, CBORDecodeError # noqa
+from cbor2.encoder import dump, dumps, CBOREncoder, CBOREncodeError, shareable_encoder # noqa
+from cbor2.types import CBORTag, CBORSimpleValue, undefined # noqa
diff --git a/third_party/python/cbor2/cbor2/compat.py b/third_party/python/cbor2/cbor2/compat.py
new file mode 100644
index 0000000000..983efda59b
--- /dev/null
+++ b/third_party/python/cbor2/cbor2/compat.py
@@ -0,0 +1,49 @@
+import sys
+
+
+if sys.version_info.major < 3:
+ from datetime import tzinfo, timedelta
+
+ class timezone(tzinfo):
+ def __init__(self, offset):
+ self.offset = offset
+
+ def utcoffset(self, dt):
+ return self.offset
+
+ def dst(self, dt):
+ return timedelta(0)
+
+ def tzname(self, dt):
+ return 'UTC+00:00'
+
+ def as_unicode(string):
+ return string.decode('utf-8')
+
+ def iteritems(self):
+ return self.iteritems()
+
+ def bytes_from_list(values):
+ return bytes(bytearray(values))
+
+ byte_as_integer = ord
+ timezone.utc = timezone(timedelta(0))
+ xrange = xrange # noqa
+ long = long # noqa
+ unicode = unicode # noqa
+else:
+ from datetime import timezone
+
+ def byte_as_integer(bytestr):
+ return bytestr[0]
+
+ def as_unicode(string):
+ return string
+
+ def iteritems(self):
+ return self.items()
+
+ xrange = range # noqa
+ long = int # noqa
+ unicode = str # noqa
+ bytes_from_list = bytes
diff --git a/third_party/python/cbor2/cbor2/decoder.py b/third_party/python/cbor2/cbor2/decoder.py
new file mode 100644
index 0000000000..5833d9e9f4
--- /dev/null
+++ b/third_party/python/cbor2/cbor2/decoder.py
@@ -0,0 +1,411 @@
+import re
+import struct
+from datetime import datetime, timedelta
+from io import BytesIO
+
+from cbor2.compat import timezone, xrange, byte_as_integer
+from cbor2.types import CBORTag, undefined, break_marker, CBORSimpleValue
+
+timestamp_re = re.compile(r'^(\d{4})-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)'
+ r'(?:\.(\d+))?(?:Z|([+-]\d\d):(\d\d))$')
+
+
+class CBORDecodeError(Exception):
+ """Raised when an error occurs deserializing a CBOR datastream."""
+
+
+def decode_uint(decoder, subtype, shareable_index=None, allow_infinite=False):
+ # Major tag 0
+ if subtype < 24:
+ return subtype
+ elif subtype == 24:
+ return struct.unpack('>B', decoder.read(1))[0]
+ elif subtype == 25:
+ return struct.unpack('>H', decoder.read(2))[0]
+ elif subtype == 26:
+ return struct.unpack('>L', decoder.read(4))[0]
+ elif subtype == 27:
+ return struct.unpack('>Q', decoder.read(8))[0]
+ elif subtype == 31 and allow_infinite:
+ return None
+ else:
+ raise CBORDecodeError('unknown unsigned integer subtype 0x%x' % subtype)
+
+
+def decode_negint(decoder, subtype, shareable_index=None):
+ # Major tag 1
+ uint = decode_uint(decoder, subtype)
+ return -uint - 1
+
+
+def decode_bytestring(decoder, subtype, shareable_index=None):
+ # Major tag 2
+ length = decode_uint(decoder, subtype, allow_infinite=True)
+ if length is None:
+ # Indefinite length
+ buf = bytearray()
+ while True:
+ initial_byte = byte_as_integer(decoder.read(1))
+ if initial_byte == 255:
+ return buf
+ else:
+ length = decode_uint(decoder, initial_byte & 31)
+ value = decoder.read(length)
+ buf.extend(value)
+ else:
+ return decoder.read(length)
+
+
+def decode_string(decoder, subtype, shareable_index=None):
+ # Major tag 3
+ return decode_bytestring(decoder, subtype).decode('utf-8')
+
+
+def decode_array(decoder, subtype, shareable_index=None):
+ # Major tag 4
+ items = []
+ decoder.set_shareable(shareable_index, items)
+ length = decode_uint(decoder, subtype, allow_infinite=True)
+ if length is None:
+ # Indefinite length
+ while True:
+ value = decoder.decode()
+ if value is break_marker:
+ break
+ else:
+ items.append(value)
+ else:
+ for _ in xrange(length):
+ item = decoder.decode()
+ items.append(item)
+
+ return items
+
+
+def decode_map(decoder, subtype, shareable_index=None):
+ # Major tag 5
+ dictionary = {}
+ decoder.set_shareable(shareable_index, dictionary)
+ length = decode_uint(decoder, subtype, allow_infinite=True)
+ if length is None:
+ # Indefinite length
+ while True:
+ key = decoder.decode()
+ if key is break_marker:
+ break
+ else:
+ value = decoder.decode()
+ dictionary[key] = value
+ else:
+ for _ in xrange(length):
+ key = decoder.decode()
+ value = decoder.decode()
+ dictionary[key] = value
+
+ if decoder.object_hook:
+ return decoder.object_hook(decoder, dictionary)
+ else:
+ return dictionary
+
+
+def decode_semantic(decoder, subtype, shareable_index=None):
+ # Major tag 6
+ tagnum = decode_uint(decoder, subtype)
+
+ # Special handling for the "shareable" tag
+ if tagnum == 28:
+ shareable_index = decoder._allocate_shareable()
+ return decoder.decode(shareable_index)
+
+ value = decoder.decode()
+ semantic_decoder = semantic_decoders.get(tagnum)
+ if semantic_decoder:
+ return semantic_decoder(decoder, value, shareable_index)
+
+ tag = CBORTag(tagnum, value)
+ if decoder.tag_hook:
+ return decoder.tag_hook(decoder, tag, shareable_index)
+ else:
+ return tag
+
+
+def decode_special(decoder, subtype, shareable_index=None):
+ # Simple value
+ if subtype < 20:
+ return CBORSimpleValue(subtype)
+
+ # Major tag 7
+ return special_decoders[subtype](decoder)
+
+
+#
+# Semantic decoders (major tag 6)
+#
+
+def decode_datetime_string(decoder, value, shareable_index=None):
+ # Semantic tag 0
+ match = timestamp_re.match(value)
+ if match:
+ year, month, day, hour, minute, second, micro, offset_h, offset_m = match.groups()
+ if offset_h:
+ tz = timezone(timedelta(hours=int(offset_h), minutes=int(offset_m)))
+ else:
+ tz = timezone.utc
+
+ return datetime(int(year), int(month), int(day), int(hour), int(minute), int(second),
+ int(micro or 0), tz)
+ else:
+ raise CBORDecodeError('invalid datetime string: {}'.format(value))
+
+
+def decode_epoch_datetime(decoder, value, shareable_index=None):
+ # Semantic tag 1
+ return datetime.fromtimestamp(value, timezone.utc)
+
+
+def decode_positive_bignum(decoder, value, shareable_index=None):
+ # Semantic tag 2
+ from binascii import hexlify
+ return int(hexlify(value), 16)
+
+
+def decode_negative_bignum(decoder, value, shareable_index=None):
+ # Semantic tag 3
+ return -decode_positive_bignum(decoder, value) - 1
+
+
+def decode_fraction(decoder, value, shareable_index=None):
+ # Semantic tag 4
+ from decimal import Decimal
+ exp = Decimal(value[0])
+ mantissa = Decimal(value[1])
+ return mantissa * (10 ** exp)
+
+
+def decode_bigfloat(decoder, value, shareable_index=None):
+ # Semantic tag 5
+ from decimal import Decimal
+ exp = Decimal(value[0])
+ mantissa = Decimal(value[1])
+ return mantissa * (2 ** exp)
+
+
+def decode_sharedref(decoder, value, shareable_index=None):
+ # Semantic tag 29
+ try:
+ shared = decoder._shareables[value]
+ except IndexError:
+ raise CBORDecodeError('shared reference %d not found' % value)
+
+ if shared is None:
+ raise CBORDecodeError('shared value %d has not been initialized' % value)
+ else:
+ return shared
+
+
+def decode_rational(decoder, value, shareable_index=None):
+ # Semantic tag 30
+ from fractions import Fraction
+ return Fraction(*value)
+
+
+def decode_regexp(decoder, value, shareable_index=None):
+ # Semantic tag 35
+ return re.compile(value)
+
+
+def decode_mime(decoder, value, shareable_index=None):
+ # Semantic tag 36
+ from email.parser import Parser
+ return Parser().parsestr(value)
+
+
+def decode_uuid(decoder, value, shareable_index=None):
+ # Semantic tag 37
+ from uuid import UUID
+ return UUID(bytes=value)
+
+
+#
+# Special decoders (major tag 7)
+#
+
+def decode_simple_value(decoder, shareable_index=None):
+ return CBORSimpleValue(struct.unpack('>B', decoder.read(1))[0])
+
+
+def decode_float16(decoder, shareable_index=None):
+ # Code adapted from RFC 7049, appendix D
+ from math import ldexp
+
+ def decode_single(single):
+ return struct.unpack("!f", struct.pack("!I", single))[0]
+
+ payload = struct.unpack('>H', decoder.read(2))[0]
+ value = (payload & 0x7fff) << 13 | (payload & 0x8000) << 16
+ if payload & 0x7c00 != 0x7c00:
+ return ldexp(decode_single(value), 112)
+
+ return decode_single(value | 0x7f800000)
+
+
+def decode_float32(decoder, shareable_index=None):
+ return struct.unpack('>f', decoder.read(4))[0]
+
+
+def decode_float64(decoder, shareable_index=None):
+ return struct.unpack('>d', decoder.read(8))[0]
+
+
+major_decoders = {
+ 0: decode_uint,
+ 1: decode_negint,
+ 2: decode_bytestring,
+ 3: decode_string,
+ 4: decode_array,
+ 5: decode_map,
+ 6: decode_semantic,
+ 7: decode_special
+}
+
+special_decoders = {
+ 20: lambda self: False,
+ 21: lambda self: True,
+ 22: lambda self: None,
+ 23: lambda self: undefined,
+ 24: decode_simple_value,
+ 25: decode_float16,
+ 26: decode_float32,
+ 27: decode_float64,
+ 31: lambda self: break_marker
+}
+
+semantic_decoders = {
+ 0: decode_datetime_string,
+ 1: decode_epoch_datetime,
+ 2: decode_positive_bignum,
+ 3: decode_negative_bignum,
+ 4: decode_fraction,
+ 5: decode_bigfloat,
+ 29: decode_sharedref,
+ 30: decode_rational,
+ 35: decode_regexp,
+ 36: decode_mime,
+ 37: decode_uuid
+}
+
+
+class CBORDecoder(object):
+ """
+ Deserializes a CBOR encoded byte stream.
+
+ :param tag_hook: Callable that takes 3 arguments: the decoder instance, the
+ :class:`~cbor2.types.CBORTag` and the shareable index for the resulting object, if any.
+ This callback is called for any tags for which there is no built-in decoder.
+ The return value is substituted for the CBORTag object in the deserialized output.
+ :param object_hook: Callable that takes 2 arguments: the decoder instance and the dictionary.
+ This callback is called for each deserialized :class:`dict` object.
+ The return value is substituted for the dict in the deserialized output.
+ """
+
+ __slots__ = ('fp', 'tag_hook', 'object_hook', '_shareables')
+
+ def __init__(self, fp, tag_hook=None, object_hook=None):
+ self.fp = fp
+ self.tag_hook = tag_hook
+ self.object_hook = object_hook
+ self._shareables = []
+
+ def _allocate_shareable(self):
+ self._shareables.append(None)
+ return len(self._shareables) - 1
+
+ def set_shareable(self, index, value):
+ """
+ Set the shareable value for the last encountered shared value marker, if any.
+
+ If the given index is ``None``, nothing is done.
+
+ :param index: the value of the ``shared_index`` argument to the decoder
+ :param value: the shared value
+
+ """
+ if index is not None:
+ self._shareables[index] = value
+
+ def read(self, amount):
+ """
+ Read bytes from the data stream.
+
+ :param int amount: the number of bytes to read
+
+ """
+ data = self.fp.read(amount)
+ if len(data) < amount:
+ raise CBORDecodeError('premature end of stream (expected to read {} bytes, got {} '
+ 'instead)'.format(amount, len(data)))
+
+ return data
+
+ def decode(self, shareable_index=None):
+ """
+ Decode the next value from the stream.
+
+ :raises CBORDecodeError: if there is any problem decoding the stream
+
+ """
+ try:
+ initial_byte = byte_as_integer(self.fp.read(1))
+ major_type = initial_byte >> 5
+ subtype = initial_byte & 31
+ except Exception as e:
+ raise CBORDecodeError('error reading major type at index {}: {}'
+ .format(self.fp.tell(), e))
+
+ decoder = major_decoders[major_type]
+ try:
+ return decoder(self, subtype, shareable_index)
+ except CBORDecodeError:
+ raise
+ except Exception as e:
+ raise CBORDecodeError('error decoding value at index {}: {}'.format(self.fp.tell(), e))
+
+ def decode_from_bytes(self, buf):
+ """
+ Wrap the given bytestring as a file and call :meth:`decode` with it as the argument.
+
+ This method was intended to be used from the ``tag_hook`` hook when an object needs to be
+ decoded separately from the rest but while still taking advantage of the shared value
+ registry.
+
+ """
+ old_fp = self.fp
+ self.fp = BytesIO(buf)
+ retval = self.decode()
+ self.fp = old_fp
+ return retval
+
+
+def loads(payload, **kwargs):
+ """
+ Deserialize an object from a bytestring.
+
+ :param bytes payload: the bytestring to serialize
+ :param kwargs: keyword arguments passed to :class:`~.CBORDecoder`
+ :return: the deserialized object
+
+ """
+ fp = BytesIO(payload)
+ return CBORDecoder(fp, **kwargs).decode()
+
+
+def load(fp, **kwargs):
+ """
+ Deserialize an object from an open file.
+
+ :param fp: the input file (any file-like object)
+ :param kwargs: keyword arguments passed to :class:`~.CBORDecoder`
+ :return: the deserialized object
+
+ """
+ return CBORDecoder(fp, **kwargs).decode()
diff --git a/third_party/python/cbor2/cbor2/encoder.py b/third_party/python/cbor2/cbor2/encoder.py
new file mode 100644
index 0000000000..adcb2722e5
--- /dev/null
+++ b/third_party/python/cbor2/cbor2/encoder.py
@@ -0,0 +1,362 @@
+import re
+import struct
+from collections import OrderedDict, defaultdict
+from contextlib import contextmanager
+from functools import wraps
+from datetime import datetime, date, time
+from io import BytesIO
+
+from cbor2.compat import iteritems, timezone, long, unicode, as_unicode, bytes_from_list
+from cbor2.types import CBORTag, undefined, CBORSimpleValue
+
+
+class CBOREncodeError(Exception):
+ """Raised when an error occurs while serializing an object into a CBOR datastream."""
+
+
+def shareable_encoder(func):
+ """
+ Wrap the given encoder function to gracefully handle cyclic data structures.
+
+ If value sharing is enabled, this marks the given value shared in the datastream on the
+ first call. If the value has already been passed to this method, a reference marker is
+ instead written to the data stream and the wrapped function is not called.
+
+ If value sharing is disabled, only infinite recursion protection is done.
+
+ """
+ @wraps(func)
+ def wrapper(encoder, value, *args, **kwargs):
+ value_id = id(value)
+ container, container_index = encoder._shared_containers.get(value_id, (None, None))
+ if encoder.value_sharing:
+ if container is value:
+ # Generate a reference to the previous index instead of encoding this again
+ encoder.write(encode_length(0xd8, 0x1d))
+ encode_int(encoder, container_index)
+ else:
+ # Mark the container as shareable
+ encoder._shared_containers[value_id] = (value, len(encoder._shared_containers))
+ encoder.write(encode_length(0xd8, 0x1c))
+ func(encoder, value, *args, **kwargs)
+ else:
+ if container is value:
+ raise CBOREncodeError('cyclic data structure detected but value sharing is '
+ 'disabled')
+ else:
+ encoder._shared_containers[value_id] = (value, None)
+ func(encoder, value, *args, **kwargs)
+ del encoder._shared_containers[value_id]
+
+ return wrapper
+
+
+def encode_length(major_tag, length):
+ if length < 24:
+ return struct.pack('>B', major_tag | length)
+ elif length < 256:
+ return struct.pack('>BB', major_tag | 24, length)
+ elif length < 65536:
+ return struct.pack('>BH', major_tag | 25, length)
+ elif length < 4294967296:
+ return struct.pack('>BL', major_tag | 26, length)
+ else:
+ return struct.pack('>BQ', major_tag | 27, length)
+
+
+def encode_int(encoder, value):
+ # Big integers (2 ** 64 and over)
+ if value >= 18446744073709551616 or value < -18446744073709551616:
+ if value >= 0:
+ major_type = 0x02
+ else:
+ major_type = 0x03
+ value = -value - 1
+
+ values = []
+ while value > 0:
+ value, remainder = divmod(value, 256)
+ values.insert(0, remainder)
+
+ payload = bytes_from_list(values)
+ encode_semantic(encoder, CBORTag(major_type, payload))
+ elif value >= 0:
+ encoder.write(encode_length(0, value))
+ else:
+ encoder.write(encode_length(0x20, abs(value) - 1))
+
+
+def encode_bytestring(encoder, value):
+ encoder.write(encode_length(0x40, len(value)) + value)
+
+
+def encode_bytearray(encoder, value):
+ encode_bytestring(encoder, bytes(value))
+
+
+def encode_string(encoder, value):
+ encoded = value.encode('utf-8')
+ encoder.write(encode_length(0x60, len(encoded)) + encoded)
+
+
+@shareable_encoder
+def encode_array(encoder, value):
+ encoder.write(encode_length(0x80, len(value)))
+ for item in value:
+ encoder.encode(item)
+
+
+@shareable_encoder
+def encode_map(encoder, value):
+ encoder.write(encode_length(0xa0, len(value)))
+ for key, val in iteritems(value):
+ encoder.encode(key)
+ encoder.encode(val)
+
+
+def encode_semantic(encoder, value):
+ encoder.write(encode_length(0xc0, value.tag))
+ encoder.encode(value.value)
+
+
+#
+# Semantic decoders (major tag 6)
+#
+
+def encode_datetime(encoder, value):
+ # Semantic tag 0
+ if not value.tzinfo:
+ if encoder.timezone:
+ value = value.replace(tzinfo=encoder.timezone)
+ else:
+ raise CBOREncodeError(
+ 'naive datetime encountered and no default timezone has been set')
+
+ if encoder.datetime_as_timestamp:
+ from calendar import timegm
+ timestamp = timegm(value.utctimetuple()) + value.microsecond // 1000000
+ encode_semantic(encoder, CBORTag(1, timestamp))
+ else:
+ datestring = as_unicode(value.isoformat().replace('+00:00', 'Z'))
+ encode_semantic(encoder, CBORTag(0, datestring))
+
+
+def encode_date(encoder, value):
+ value = datetime.combine(value, time()).replace(tzinfo=timezone.utc)
+ encode_datetime(encoder, value)
+
+
+def encode_decimal(encoder, value):
+ # Semantic tag 4
+ if value.is_nan():
+ encoder.write(b'\xf9\x7e\x00')
+ elif value.is_infinite():
+ encoder.write(b'\xf9\x7c\x00' if value > 0 else b'\xf9\xfc\x00')
+ else:
+ dt = value.as_tuple()
+ mantissa = sum(d * 10 ** i for i, d in enumerate(reversed(dt.digits)))
+ with encoder.disable_value_sharing():
+ encode_semantic(encoder, CBORTag(4, [dt.exponent, mantissa]))
+
+
+def encode_rational(encoder, value):
+ # Semantic tag 30
+ with encoder.disable_value_sharing():
+ encode_semantic(encoder, CBORTag(30, [value.numerator, value.denominator]))
+
+
+def encode_regexp(encoder, value):
+ # Semantic tag 35
+ encode_semantic(encoder, CBORTag(35, as_unicode(value.pattern)))
+
+
+def encode_mime(encoder, value):
+ # Semantic tag 36
+ encode_semantic(encoder, CBORTag(36, as_unicode(value.as_string())))
+
+
+def encode_uuid(encoder, value):
+ # Semantic tag 37
+ encode_semantic(encoder, CBORTag(37, value.bytes))
+
+
+#
+# Special encoders (major tag 7)
+#
+
+def encode_simple_value(encoder, value):
+ if value.value < 20:
+ encoder.write(struct.pack('>B', 0xe0 | value.value))
+ else:
+ encoder.write(struct.pack('>BB', 0xf8, value.value))
+
+
+def encode_float(encoder, value):
+ # Handle special values efficiently
+ import math
+ if math.isnan(value):
+ encoder.write(b'\xf9\x7e\x00')
+ elif math.isinf(value):
+ encoder.write(b'\xf9\x7c\x00' if value > 0 else b'\xf9\xfc\x00')
+ else:
+ encoder.write(struct.pack('>Bd', 0xfb, value))
+
+
+def encode_boolean(encoder, value):
+ encoder.write(b'\xf5' if value else b'\xf4')
+
+
+def encode_none(encoder, value):
+ encoder.write(b'\xf6')
+
+
+def encode_undefined(encoder, value):
+ encoder.write(b'\xf7')
+
+
+default_encoders = OrderedDict([
+ (bytes, encode_bytestring),
+ (bytearray, encode_bytearray),
+ (unicode, encode_string),
+ (int, encode_int),
+ (long, encode_int),
+ (float, encode_float),
+ (('decimal', 'Decimal'), encode_decimal),
+ (bool, encode_boolean),
+ (type(None), encode_none),
+ (tuple, encode_array),
+ (list, encode_array),
+ (dict, encode_map),
+ (defaultdict, encode_map),
+ (OrderedDict, encode_map),
+ (type(undefined), encode_undefined),
+ (datetime, encode_datetime),
+ (date, encode_date),
+ (type(re.compile('')), encode_regexp),
+ (('fractions', 'Fraction'), encode_rational),
+ (('email.message', 'Message'), encode_mime),
+ (('uuid', 'UUID'), encode_uuid),
+ (CBORSimpleValue, encode_simple_value),
+ (CBORTag, encode_semantic)
+])
+
+
+class CBOREncoder(object):
+ """
+ Serializes objects to a byte stream using Concise Binary Object Representation.
+
+ :param datetime_as_timestamp: set to ``True`` to serialize datetimes as UNIX timestamps
+ (this makes datetimes more concise on the wire but loses the time zone information)
+ :param datetime.tzinfo timezone: the default timezone to use for serializing naive datetimes
+ :param value_sharing: if ``True``, allows more efficient serializing of repeated values and,
+ more importantly, cyclic data structures, at the cost of extra line overhead
+ :param default: a callable that is called by the encoder with three arguments
+ (encoder, value, file object) when no suitable encoder has been found, and should use the
+ methods on the encoder to encode any objects it wants to add to the data stream
+ """
+
+ __slots__ = ('fp', 'datetime_as_timestamp', 'timezone', 'default', 'value_sharing',
+ 'json_compatible', '_shared_containers', '_encoders')
+
+ def __init__(self, fp, datetime_as_timestamp=False, timezone=None, value_sharing=False,
+ default=None):
+ self.fp = fp
+ self.datetime_as_timestamp = datetime_as_timestamp
+ self.timezone = timezone
+ self.value_sharing = value_sharing
+ self.default = default
+ self._shared_containers = {} # indexes used for value sharing
+ self._encoders = default_encoders.copy()
+
+ def _find_encoder(self, obj_type):
+ from sys import modules
+
+ for type_, enc in list(iteritems(self._encoders)):
+ if type(type_) is tuple:
+ modname, typename = type_
+ imported_type = getattr(modules.get(modname), typename, None)
+ if imported_type is not None:
+ del self._encoders[type_]
+ self._encoders[imported_type] = enc
+ type_ = imported_type
+ else: # pragma: nocover
+ continue
+
+ if issubclass(obj_type, type_):
+ self._encoders[obj_type] = enc
+ return enc
+
+ return None
+
+ @contextmanager
+ def disable_value_sharing(self):
+ """Disable value sharing in the encoder for the duration of the context block."""
+ old_value_sharing = self.value_sharing
+ self.value_sharing = False
+ yield
+ self.value_sharing = old_value_sharing
+
+ def write(self, data):
+ """
+ Write bytes to the data stream.
+
+ :param data: the bytes to write
+
+ """
+ self.fp.write(data)
+
+ def encode(self, obj):
+ """
+ Encode the given object using CBOR.
+
+ :param obj: the object to encode
+
+ """
+ obj_type = obj.__class__
+ encoder = self._encoders.get(obj_type) or self._find_encoder(obj_type) or self.default
+ if not encoder:
+ raise CBOREncodeError('cannot serialize type %s' % obj_type.__name__)
+
+ encoder(self, obj)
+
+ def encode_to_bytes(self, obj):
+ """
+ Encode the given object to a byte buffer and return its value as bytes.
+
+ This method was intended to be used from the ``default`` hook when an object needs to be
+ encoded separately from the rest but while still taking advantage of the shared value
+ registry.
+
+ """
+ old_fp = self.fp
+ self.fp = fp = BytesIO()
+ self.encode(obj)
+ self.fp = old_fp
+ return fp.getvalue()
+
+
+def dumps(obj, **kwargs):
+ """
+ Serialize an object to a bytestring.
+
+ :param obj: the object to serialize
+ :param kwargs: keyword arguments passed to :class:`~.CBOREncoder`
+ :return: the serialized output
+ :rtype: bytes
+
+ """
+ fp = BytesIO()
+ dump(obj, fp, **kwargs)
+ return fp.getvalue()
+
+
+def dump(obj, fp, **kwargs):
+ """
+ Serialize an object to a file.
+
+ :param obj: the object to serialize
+ :param fp: a file-like object
+ :param kwargs: keyword arguments passed to :class:`~.CBOREncoder`
+
+ """
+ CBOREncoder(fp, **kwargs).encode(obj)
diff --git a/third_party/python/cbor2/cbor2/types.py b/third_party/python/cbor2/cbor2/types.py
new file mode 100644
index 0000000000..1d3afb0601
--- /dev/null
+++ b/third_party/python/cbor2/cbor2/types.py
@@ -0,0 +1,55 @@
+class CBORTag(object):
+ """
+ Represents a CBOR semantic tag.
+
+ :param int tag: tag number
+ :param value: encapsulated value (any object)
+ """
+
+ __slots__ = 'tag', 'value'
+
+ def __init__(self, tag, value):
+ self.tag = tag
+ self.value = value
+
+ def __eq__(self, other):
+ if isinstance(other, CBORTag):
+ return self.tag == other.tag and self.value == other.value
+ return NotImplemented
+
+ def __repr__(self):
+ return 'CBORTag({self.tag}, {self.value!r})'.format(self=self)
+
+
+class CBORSimpleValue(object):
+ """
+ Represents a CBOR "simple value".
+
+ :param int value: the value (0-255)
+ """
+
+ __slots__ = 'value'
+
+ def __init__(self, value):
+ if value < 0 or value > 255:
+ raise TypeError('simple value too big')
+ self.value = value
+
+ def __eq__(self, other):
+ if isinstance(other, CBORSimpleValue):
+ return self.value == other.value
+ elif isinstance(other, int):
+ return self.value == other
+ return NotImplemented
+
+ def __repr__(self):
+ return 'CBORSimpleValue({self.value})'.format(self=self)
+
+
+class UndefinedType(object):
+ __slots__ = ()
+
+
+#: Represents the "undefined" value.
+undefined = UndefinedType()
+break_marker = object()
diff --git a/third_party/python/cbor2/docs/conf.py b/third_party/python/cbor2/docs/conf.py
new file mode 100644
index 0000000000..0ddeb0719e
--- /dev/null
+++ b/third_party/python/cbor2/docs/conf.py
@@ -0,0 +1,33 @@
+# coding: utf-8
+#!/usr/bin/env python
+import pkg_resources
+
+
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.intersphinx'
+]
+
+templates_path = ['_templates']
+source_suffix = '.rst'
+master_doc = 'index'
+project = 'cbor2'
+author = u'Alex Grönholm'
+copyright = u'2016, ' + author
+
+v = pkg_resources.get_distribution(project).parsed_version
+version = v.base_version
+release = v.public
+
+language = None
+
+exclude_patterns = ['_build']
+pygments_style = 'sphinx'
+highlight_language = 'python'
+todo_include_todos = False
+
+html_theme = 'sphinx_rtd_theme'
+html_static_path = ['_static']
+htmlhelp_basename = project.replace('-', '') + 'doc'
+
+intersphinx_mapping = {'python': ('http://docs.python.org/', None)}
diff --git a/third_party/python/cbor2/docs/customizing.rst b/third_party/python/cbor2/docs/customizing.rst
new file mode 100644
index 0000000000..bf9b1b4540
--- /dev/null
+++ b/third_party/python/cbor2/docs/customizing.rst
@@ -0,0 +1,132 @@
+Customizing encoding and decoding
+=================================
+
+Both the encoder and decoder can be customized to support a wider range of types.
+
+On the encoder side, this is accomplished by passing a callback as the ``default`` constructor
+argument. This callback will receive an object that the encoder could not serialize on its own.
+The callback should then return a value that the encoder can serialize on its own, although the
+return value is allowed to contain objects that also require the encoder to use the callback, as
+long as it won't result in an infinite loop.
+
+On the decoder side, you have two options: ``tag_hook`` and ``object_hook``. The former is called
+by the decoder to process any semantic tags that have no predefined decoders. The latter is called
+for any newly decoded ``dict`` objects, and is mostly useful for implementing a JSON compatible
+custom type serialization scheme. Unless your requirements restrict you to JSON compatible types
+only, it is recommended to use ``tag_hook`` for this purpose.
+
+JSON compatibility
+------------------
+
+In certain applications, it may be desirable to limit the supported types to the same ones
+serializable as JSON: (unicode) string, integer, float, boolean, null, array and object (dict).
+This can be done by passing the ``json_compatible`` option to the encoder. When incompatible types
+are encountered, a :class:`~cbor2.encoder.CBOREncodeError` is then raised.
+
+For the decoder, there is no support for detecting incoming incompatible types yet.
+
+Using the CBOR tags for custom types
+------------------------------------
+
+The most common way to use ``default`` is to call :meth:`~cbor2.encoder.CBOREncoder.encode`
+to add a custom tag in the data stream, with the payload as the value::
+
+ class Point(object):
+ def __init__(self, x, y):
+ self.x = x
+ self.y = y
+
+ def default_encoder(encoder, value):
+ # Tag number 4000 was chosen arbitrarily
+ encoder.encode(CBORTag(4000, [value.x, value.y]))
+
+The corresponding ``tag_hook`` would be::
+
+ def tag_hook(decoder, tag, shareable_index=None):
+ if tag.tag != 4000:
+ return tag
+
+ # tag.value is now the [x, y] list we serialized before
+ return Point(*tag.value)
+
+Using dicts to carry custom types
+---------------------------------
+
+The same could be done with ``object_hook``, except less efficiently::
+
+ def default_encoder(encoder, value):
+ encoder.encode(dict(typename='Point', x=value.x, y=value.y))
+
+ def object_hook(decoder, value):
+ if value.get('typename') != 'Point':
+ return value
+
+ return Point(value['x'], value['y'])
+
+You should make sure that whatever way you decide to use for telling apart your "specially marked"
+dicts from arbitrary data dicts won't mistake on for the other.
+
+Value sharing with custom types
+-------------------------------
+
+In order to properly encode and decode cyclic references with custom types, some special care has
+to be taken. Suppose you have a custom type as below, where every child object contains a reference
+to its parent and the parent contains a list of children::
+
+ from cbor2 import dumps, loads, shareable_encoder, CBORTag
+
+
+ class MyType(object):
+ def __init__(self, parent=None):
+ self.parent = parent
+ self.children = []
+ if parent:
+ self.parent.children.append(self)
+
+This would not normally be serializable, as it would lead to an endless loop (in the worst case)
+and raise some exception (in the best case). Now, enter CBOR's extension tags 28 and 29. These tags
+make it possible to add special markers into the data stream which can be later referenced and
+substituted with the object marked earlier.
+
+To do this, in ``default`` hooks used with the encoder you will need to use the
+:meth:`~cbor2.encoder.shareable_encoder` decorator on your ``default`` hook function. It will
+automatically automatically add the object to the shared values registry on the encoder and prevent
+it from being serialized twice (instead writing a reference to the data stream)::
+
+ @shareable_encoder
+ def default_encoder(encoder, value):
+ # The state has to be serialized separately so that the decoder would have a chance to
+ # create an empty instance before the shared value references are decoded
+ serialized_state = encoder.encode_to_bytes(value.__dict__)
+ encoder.encode(CBORTag(3000, serialized_state))
+
+On the decoder side, you will need to initialize an empty instance for shared value lookup before
+the object's state (which may contain references to it) is decoded.
+This is done with the :meth:`~cbor2.encoder.CBORDecoder.set_shareable` method::
+
+ def tag_hook(decoder, tag, shareable_index=None):
+ # Return all other tags as-is
+ if tag.tag != 3000:
+ return tag
+
+ # Create a raw instance before initializing its state to make it possible for cyclic
+ # references to work
+ instance = MyType.__new__(MyType)
+ decoder.set_shareable(shareable_index, instance)
+
+ # Separately decode the state of the new object and then apply it
+ state = decoder.decode_from_bytes(tag.value)
+ instance.__dict__.update(state)
+ return instance
+
+You could then verify that the cyclic references have been restored after deserialization::
+
+ parent = MyType()
+ child1 = MyType(parent)
+ child2 = MyType(parent)
+ serialized = dumps(parent, default=default_encoder, value_sharing=True)
+
+ new_parent = loads(serialized, tag_hook=tag_hook)
+ assert new_parent.children[0].parent is new_parent
+ assert new_parent.children[1].parent is new_parent
+
diff --git a/third_party/python/cbor2/docs/index.rst b/third_party/python/cbor2/docs/index.rst
new file mode 100644
index 0000000000..443c245d86
--- /dev/null
+++ b/third_party/python/cbor2/docs/index.rst
@@ -0,0 +1,15 @@
+.. include:: ../README.rst
+ :start-line: 7
+ :end-before: Project links
+
+Table of contents
+-----------------
+
+.. toctree::
+ :maxdepth: 2
+
+ usage
+ customizing
+ versionhistory
+
+* :ref:`API reference <modindex>`
diff --git a/third_party/python/cbor2/docs/modules/decoder.rst b/third_party/python/cbor2/docs/modules/decoder.rst
new file mode 100644
index 0000000000..c2c58fe9db
--- /dev/null
+++ b/third_party/python/cbor2/docs/modules/decoder.rst
@@ -0,0 +1,5 @@
+:mod:`cbor2.decoder`
+====================
+
+.. automodule:: cbor2.decoder
+ :members:
diff --git a/third_party/python/cbor2/docs/modules/encoder.rst b/third_party/python/cbor2/docs/modules/encoder.rst
new file mode 100644
index 0000000000..c4240eeaad
--- /dev/null
+++ b/third_party/python/cbor2/docs/modules/encoder.rst
@@ -0,0 +1,5 @@
+:mod:`cbor2.encoder`
+====================
+
+.. automodule:: cbor2.encoder
+ :members:
diff --git a/third_party/python/cbor2/docs/modules/types.rst b/third_party/python/cbor2/docs/modules/types.rst
new file mode 100644
index 0000000000..a6dedaa3e4
--- /dev/null
+++ b/third_party/python/cbor2/docs/modules/types.rst
@@ -0,0 +1,5 @@
+:mod:`cbor2.types`
+==================
+
+.. automodule:: cbor2.types
+ :members:
diff --git a/third_party/python/cbor2/docs/usage.rst b/third_party/python/cbor2/docs/usage.rst
new file mode 100644
index 0000000000..54b028ee53
--- /dev/null
+++ b/third_party/python/cbor2/docs/usage.rst
@@ -0,0 +1,80 @@
+Basic usage
+===========
+
+Serializing and deserializing with cbor2 is pretty straightforward::
+
+ from cbor2 import dumps, loads
+
+ # Serialize an object as a bytestring
+ data = dumps(['hello', 'world'])
+
+ # Deserialize a bytestring
+ obj = loads(data)
+
+ # Efficiently deserialize from a file
+ with open('input.cbor', 'rb') as fp:
+ obj = load(fp)
+
+ # Efficiently serialize an object to a file
+ with open('output.cbor', 'wb') as fp:
+ dump(obj, fp)
+
+Some data types, however, require extra considerations, as detailed below.
+
+String/bytes handling on Python 2
+---------------------------------
+
+The ``str`` type is encoded as binary on Python 2. If you want to encode strings as text on
+Python 2, use unicode strings instead.
+
+Date/time handling
+------------------
+
+The CBOR specification does not support naïve datetimes (that is, datetimes where ``tzinfo`` is
+missing). When the encoder encounters such a datetime, it needs to know which timezone it belongs
+to. To this end, you can specify a default timezone by passing a :class:`~datetime.tzinfo` instance
+to :func:`~cbor2.encoder.dump`/:func:`~cbor2.encoder.dumps` call as the ``timezone`` argument.
+Decoded datetimes are always timezone aware.
+
+By default, datetimes are serialized in a manner that retains their timezone offsets. You can
+optimize the data stream size by passing ``datetime_as_timestamp=False`` to
+:func:`~cbor2.encoder.dump`/:func:`~cbor2.encoder.dumps`, but this causes the timezone offset
+information to be lost.
+
+Cyclic (recursive) data structures
+----------------------------------
+
+If the encoder encounters a shareable object (ie. list or dict) that it has been before, it will
+by default raise :exc:`~cbor2.encoder.CBOREncodeError` indicating that a cyclic reference has been
+detected and value sharing was not enabled. CBOR has, however, an extension specification that
+allows the encoder to reference a previously encoded value without processing it again. This makes
+it possible to serialize such cyclic references, but value sharing has to be enabled by passing
+``value_sharing=True`` to :func:`~cbor2.encoder.dump`/:func:`~cbor2.encoder.dumps`.
+
+.. warning:: Support for value sharing is rare in other CBOR implementations, so think carefully
+ whether you want to enable it. It also causes some line overhead, as all potentially shareable
+ values must be tagged as such.
+
+Tag support
+-----------
+
+In addition to all standard CBOR tags, this library supports many extended tags:
+
+=== ======================================== ====================================================
+Tag Semantics Python type(s)
+=== ======================================== ====================================================
+0 Standard date/time string datetime.date / datetime.datetime
+1 Epoch-based date/time datetime.date / datetime.datetime
+2 Positive bignum int / long
+3 Negative bignum int / long
+4 Decimal fraction decimal.Decimal
+5 Bigfloat decimal.Decimal
+28 Mark shared value N/A
+29 Reference shared value N/A
+30 Rational number fractions.Fraction
+35 Regular expression ``_sre.SRE_Pattern`` (result of ``re.compile(...)``)
+36 MIME message email.message.Message
+37 Binary UUID uuid.UUID
+=== ======================================== ====================================================
+
+Arbitary tags can be represented with the :class:`~cbor2.types.CBORTag` class.
diff --git a/third_party/python/cbor2/docs/versionhistory.rst b/third_party/python/cbor2/docs/versionhistory.rst
new file mode 100644
index 0000000000..246e43bb83
--- /dev/null
+++ b/third_party/python/cbor2/docs/versionhistory.rst
@@ -0,0 +1,73 @@
+Version history
+===============
+
+This library adheres to `Semantic Versioning <http://semver.org/>`_.
+
+**4.0.1.** (2017-08-21)
+
+- Fixed silent truncation of decoded data if there are not enough bytes in the stream for an exact
+ read (``CBORDecodeError`` is now raised instead)
+
+**4.0.0** (2017-04-24)
+
+- **BACKWARD INCOMPATIBLE** Value sharing has been disabled by default, for better compatibility
+ with other implementations and better performance (since it is rarely needed)
+- **BACKWARD INCOMPATIBLE** Replaced the ``semantic_decoders`` decoder option with the ``tag_hook``
+ option
+- **BACKWARD INCOMPATIBLE** Replaced the ``encoders`` encoder option with the ``default`` option
+- **BACKWARD INCOMPATIBLE** Factored out the file object argument (``fp``) from all callbacks
+- **BACKWARD INCOMPATIBLE** The encoder no longer supports every imaginable type implementing the
+ ``Sequence`` or ``Map`` interface, as they turned out to be too broad
+- Added the ``object_hook`` option for decoding dicts into complex objects
+ (intended for situations where JSON compatibility is required and semantic tags cannot be used)
+- Added encoding and decoding of simple values (``CBORSimpleValue``)
+ (contributed by Jerry Lundström)
+- Replaced the decoder for bignums with a simpler and faster version (contributed by orent)
+- Made all relevant classes and functions available directly in the ``cbor2`` namespace
+- Added proper documentation
+
+**3.0.4** (2016-09-24)
+
+- Fixed TypeError when trying to encode extension types (regression introduced in 3.0.3)
+
+**3.0.3** (2016-09-23)
+
+- No changes, just re-releasing due to git tagging screw-up
+
+**3.0.2** (2016-09-23)
+
+- Fixed decoding failure for datetimes with microseconds (tag 0)
+
+**3.0.1** (2016-08-08)
+
+- Fixed error in the cyclic structure detection code that could mistake one container for
+ another, sometimes causing a bogus error about cyclic data structures where there was none
+
+**3.0.0** (2016-07-03)
+
+- **BACKWARD INCOMPATIBLE** Encoder callbacks now receive three arguments: the encoder instance,
+ the value to encode and a file-like object. The callback must must now either write directly to
+ the file-like object or call another encoder callback instead of returning an iterable.
+- **BACKWARD INCOMPATIBLE** Semantic decoder callbacks now receive four arguments: the decoder
+ instance, the primitive value, a file-like object and the shareable index for the decoded value.
+ Decoders that support value sharing must now set the raw value at the given index in
+ ``decoder.shareables``.
+- **BACKWARD INCOMPATIBLE** Removed support for iterative encoding (``CBOREncoder.encode()`` is no
+ longer a generator function and always returns ``None``)
+- Significantly improved performance (encoder ~30 % faster, decoder ~60 % faster)
+- Fixed serialization round-trip for ``undefined`` (simple type #23)
+- Added proper support for value sharing in callbacks
+
+**2.0.0** (2016-06-11)
+
+- **BACKWARD INCOMPATIBLE** Deserialize unknown tags as ``CBORTag`` objects so as not to lose
+ information
+- Fixed error messages coming from nested structures
+
+**1.1.0** (2016-06-10)
+
+- Fixed deserialization of cyclic structures
+
+**1.0.0** (2016-06-08)
+
+- Initial release
diff --git a/third_party/python/cbor2/setup.cfg b/third_party/python/cbor2/setup.cfg
new file mode 100644
index 0000000000..fd6c5cb2cf
--- /dev/null
+++ b/third_party/python/cbor2/setup.cfg
@@ -0,0 +1,21 @@
+[tool:pytest]
+addopts = -rsx --cov --tb=short
+testpaths = tests
+
+[coverage:run]
+source = cbor2
+
+[coverage:report]
+show_missing = true
+
+[flake8]
+max-line-length = 99
+exclude = .tox,build,docs
+
+[bdist_wheel]
+universal = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/cbor2/setup.py b/third_party/python/cbor2/setup.py
new file mode 100644
index 0000000000..530fd8e1d5
--- /dev/null
+++ b/third_party/python/cbor2/setup.py
@@ -0,0 +1,43 @@
+# coding: utf-8
+import os.path
+
+from setuptools import setup, find_packages
+
+
+here = os.path.dirname(__file__)
+readme_path = os.path.join(here, 'README.rst')
+readme = open(readme_path).read()
+
+setup(
+ name='cbor2',
+ use_scm_version={
+ 'version_scheme': 'post-release',
+ 'local_scheme': 'dirty-tag'
+ },
+ description='Pure Python CBOR (de)serializer with extensive tag support',
+ long_description=readme,
+ author=u'Alex Grönholm',
+ author_email='alex.gronholm@nextday.fi',
+ url='https://github.com/agronholm/cbor2',
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6'
+ ],
+ keywords='serialization cbor',
+ license='MIT',
+ packages=find_packages(exclude=['tests']),
+ setup_requires=[
+ 'setuptools_scm'
+ ],
+ extras_require={
+ 'testing': ['pytest', 'pytest-cov']
+ }
+)
diff --git a/third_party/python/cbor2/tests/test_decoder.py b/third_party/python/cbor2/tests/test_decoder.py
new file mode 100644
index 0000000000..4d281ad3ab
--- /dev/null
+++ b/third_party/python/cbor2/tests/test_decoder.py
@@ -0,0 +1,327 @@
+from __future__ import division
+
+import math
+import re
+from binascii import unhexlify
+from datetime import datetime, timedelta
+from decimal import Decimal
+from email.message import Message
+from fractions import Fraction
+from io import BytesIO
+from uuid import UUID
+
+import pytest
+
+from cbor2.compat import timezone
+from cbor2.decoder import loads, CBORDecodeError, load, CBORDecoder
+from cbor2.types import CBORTag, undefined, CBORSimpleValue
+
+
+@pytest.mark.parametrize('payload, expected', [
+ ('00', 0),
+ ('01', 1),
+ ('0a', 10),
+ ('17', 23),
+ ('1818', 24),
+ ('1819', 25),
+ ('1864', 100),
+ ('1903e8', 1000),
+ ('1a000f4240', 1000000),
+ ('1b000000e8d4a51000', 1000000000000),
+ ('1bffffffffffffffff', 18446744073709551615),
+ ('c249010000000000000000', 18446744073709551616),
+ ('3bffffffffffffffff', -18446744073709551616),
+ ('c349010000000000000000', -18446744073709551617),
+ ('20', -1),
+ ('29', -10),
+ ('3863', -100),
+ ('3903e7', -1000)
+])
+def test_integer(payload, expected):
+ decoded = loads(unhexlify(payload))
+ assert decoded == expected
+
+
+def test_invalid_integer_subtype():
+ exc = pytest.raises(CBORDecodeError, loads, b'\x1c')
+ assert str(exc.value).endswith('unknown unsigned integer subtype 0x1c')
+
+
+@pytest.mark.parametrize('payload, expected', [
+ ('f90000', 0.0),
+ ('f98000', -0.0),
+ ('f93c00', 1.0),
+ ('fb3ff199999999999a', 1.1),
+ ('f93e00', 1.5),
+ ('f97bff', 65504.0),
+ ('fa47c35000', 100000.0),
+ ('fa7f7fffff', 3.4028234663852886e+38),
+ ('fb7e37e43c8800759c', 1.0e+300),
+ ('f90001', 5.960464477539063e-8),
+ ('f90400', 0.00006103515625),
+ ('f9c400', -4.0),
+ ('fbc010666666666666', -4.1),
+ ('f97c00', float('inf')),
+ ('f9fc00', float('-inf')),
+ ('fa7f800000', float('inf')),
+ ('faff800000', float('-inf')),
+ ('fb7ff0000000000000', float('inf')),
+ ('fbfff0000000000000', float('-inf'))
+])
+def test_float(payload, expected):
+ decoded = loads(unhexlify(payload))
+ assert decoded == expected
+
+
+@pytest.mark.parametrize('payload', ['f97e00', 'fa7fc00000', 'fb7ff8000000000000'])
+def test_float_nan(payload):
+ decoded = loads(unhexlify(payload))
+ assert math.isnan(decoded)
+
+
+@pytest.mark.parametrize('payload, expected', [
+ ('f4', False),
+ ('f5', True),
+ ('f6', None),
+ ('f7', undefined)
+])
+def test_special(payload, expected):
+ decoded = loads(unhexlify(payload))
+ assert decoded is expected
+
+
+@pytest.mark.parametrize('payload, expected', [
+ ('40', b''),
+ ('4401020304', b'\x01\x02\x03\x04'),
+])
+def test_binary(payload, expected):
+ decoded = loads(unhexlify(payload))
+ assert decoded == expected
+
+
+@pytest.mark.parametrize('payload, expected', [
+ ('60', u''),
+ ('6161', u'a'),
+ ('6449455446', u'IETF'),
+ ('62225c', u'\"\\'),
+ ('62c3bc', u'\u00fc'),
+ ('63e6b0b4', u'\u6c34')
+])
+def test_string(payload, expected):
+ decoded = loads(unhexlify(payload))
+ assert decoded == expected
+
+
+@pytest.mark.parametrize('payload, expected', [
+ ('80', []),
+ ('83010203', [1, 2, 3]),
+ ('8301820203820405', [1, [2, 3], [4, 5]]),
+ ('98190102030405060708090a0b0c0d0e0f101112131415161718181819', list(range(1, 26)))
+])
+def test_array(payload, expected):
+ decoded = loads(unhexlify(payload))
+ assert decoded == expected
+
+
+@pytest.mark.parametrize('payload, expected', [
+ ('a0', {}),
+ ('a201020304', {1: 2, 3: 4})
+])
+def test_map(payload, expected):
+ decoded = loads(unhexlify(payload))
+ assert decoded == expected
+
+
+@pytest.mark.parametrize('payload, expected', [
+ ('a26161016162820203', {'a': 1, 'b': [2, 3]}),
+ ('826161a161626163', ['a', {'b': 'c'}]),
+ ('a56161614161626142616361436164614461656145',
+ {'a': 'A', 'b': 'B', 'c': 'C', 'd': 'D', 'e': 'E'})
+])
+def test_mixed_array_map(payload, expected):
+ decoded = loads(unhexlify(payload))
+ assert decoded == expected
+
+
+@pytest.mark.parametrize('payload, expected', [
+ ('5f42010243030405ff', b'\x01\x02\x03\x04\x05'),
+ ('7f657374726561646d696e67ff', 'streaming'),
+ ('9fff', []),
+ ('9f018202039f0405ffff', [1, [2, 3], [4, 5]]),
+ ('9f01820203820405ff', [1, [2, 3], [4, 5]]),
+ ('83018202039f0405ff', [1, [2, 3], [4, 5]]),
+ ('83019f0203ff820405', [1, [2, 3], [4, 5]]),
+ ('9f0102030405060708090a0b0c0d0e0f101112131415161718181819ff', list(range(1, 26))),
+ ('bf61610161629f0203ffff', {'a': 1, 'b': [2, 3]}),
+ ('826161bf61626163ff', ['a', {'b': 'c'}]),
+ ('bf6346756ef563416d7421ff', {'Fun': True, 'Amt': -2}),
+])
+def test_streaming(payload, expected):
+ decoded = loads(unhexlify(payload))
+ assert decoded == expected
+
+
+@pytest.mark.parametrize('payload, expected', [
+ ('e0', 0),
+ ('e2', 2),
+ ('f3', 19),
+ ('f820', 32),
+ ('e0', CBORSimpleValue(0)),
+ ('e2', CBORSimpleValue(2)),
+ ('f3', CBORSimpleValue(19)),
+ ('f820', CBORSimpleValue(32))
+])
+def test_simple_value(payload, expected):
+ decoded = loads(unhexlify(payload))
+ assert decoded == expected
+
+
+#
+# Tests for extension tags
+#
+
+
+@pytest.mark.parametrize('payload, expected', [
+ ('c074323031332d30332d32315432303a30343a30305a',
+ datetime(2013, 3, 21, 20, 4, 0, tzinfo=timezone.utc)),
+ ('c0781b323031332d30332d32315432303a30343a30302e3338303834315a',
+ datetime(2013, 3, 21, 20, 4, 0, 380841, tzinfo=timezone.utc)),
+ ('c07819323031332d30332d32315432323a30343a30302b30323a3030',
+ datetime(2013, 3, 21, 22, 4, 0, tzinfo=timezone(timedelta(hours=2)))),
+ ('c11a514b67b0', datetime(2013, 3, 21, 20, 4, 0, tzinfo=timezone.utc)),
+ ('c11a514b67b0', datetime(2013, 3, 21, 22, 4, 0, tzinfo=timezone(timedelta(hours=2))))
+], ids=['datetime/utc', 'datetime+micro/utc', 'datetime/eet', 'timestamp/utc', 'timestamp/eet'])
+def test_datetime(payload, expected):
+ decoded = loads(unhexlify(payload))
+ assert decoded == expected
+
+
+def test_bad_datetime():
+ exc = pytest.raises(CBORDecodeError, loads, unhexlify('c06b303030302d3132332d3031'))
+ assert str(exc.value).endswith('invalid datetime string: 0000-123-01')
+
+
+def test_fraction():
+ decoded = loads(unhexlify('c48221196ab3'))
+ assert decoded == Decimal('273.15')
+
+
+def test_bigfloat():
+ decoded = loads(unhexlify('c5822003'))
+ assert decoded == Decimal('1.5')
+
+
+def test_rational():
+ decoded = loads(unhexlify('d81e820205'))
+ assert decoded == Fraction(2, 5)
+
+
+def test_regex():
+ decoded = loads(unhexlify('d8236d68656c6c6f2028776f726c6429'))
+ expr = re.compile(u'hello (world)')
+ assert decoded == expr
+
+
+def test_mime():
+ decoded = loads(unhexlify(
+ 'd824787b436f6e74656e742d547970653a20746578742f706c61696e3b20636861727365743d2269736f2d38'
+ '3835392d3135220a4d494d452d56657273696f6e3a20312e300a436f6e74656e742d5472616e736665722d45'
+ '6e636f64696e673a2071756f7465642d7072696e7461626c650a0a48656c6c6f203d413475726f'))
+ assert isinstance(decoded, Message)
+ assert decoded.get_payload() == 'Hello =A4uro'
+
+
+def test_uuid():
+ decoded = loads(unhexlify('d825505eaffac8b51e480581277fdcc7842faf'))
+ assert decoded == UUID(hex='5eaffac8b51e480581277fdcc7842faf')
+
+
+def test_bad_shared_reference():
+ exc = pytest.raises(CBORDecodeError, loads, unhexlify('d81d05'))
+ assert str(exc.value).endswith('shared reference 5 not found')
+
+
+def test_uninitialized_shared_reference():
+ fp = BytesIO(unhexlify('d81d00'))
+ decoder = CBORDecoder(fp)
+ decoder._shareables.append(None)
+ exc = pytest.raises(CBORDecodeError, decoder.decode)
+ assert str(exc.value).endswith('shared value 0 has not been initialized')
+
+
+def test_cyclic_array():
+ decoded = loads(unhexlify('d81c81d81d00'))
+ assert decoded == [decoded]
+
+
+def test_cyclic_map():
+ decoded = loads(unhexlify('d81ca100d81d00'))
+ assert decoded == {0: decoded}
+
+
+def test_unhandled_tag():
+ """
+ Test that a tag is simply ignored and its associated value returned if there is no special
+ handling available for it.
+
+ """
+ decoded = loads(unhexlify('d917706548656c6c6f'))
+ assert decoded == CBORTag(6000, u'Hello')
+
+
+def test_premature_end_of_stream():
+ """
+ Test that the decoder detects a situation where read() returned fewer than expected bytes.
+
+ """
+ exc = pytest.raises(CBORDecodeError, loads, unhexlify('437879'))
+ exc.match('premature end of stream \(expected to read 3 bytes, got 2 instead\)')
+
+
+def test_tag_hook():
+ def reverse(decoder, tag, fp, shareable_index=None):
+ return tag.value[::-1]
+
+ decoded = loads(unhexlify('d917706548656c6c6f'), tag_hook=reverse)
+ assert decoded == u'olleH'
+
+
+def test_tag_hook_cyclic():
+ class DummyType(object):
+ def __init__(self, value):
+ self.value = value
+
+ def unmarshal_dummy(decoder, tag, shareable_index=None):
+ instance = DummyType.__new__(DummyType)
+ decoder.set_shareable(shareable_index, instance)
+ instance.value = decoder.decode_from_bytes(tag.value)
+ return instance
+
+ decoded = loads(unhexlify('D81CD90BB849D81CD90BB843D81D00'), tag_hook=unmarshal_dummy)
+ assert isinstance(decoded, DummyType)
+ assert decoded.value.value is decoded
+
+
+def test_object_hook():
+ class DummyType(object):
+ def __init__(self, state):
+ self.state = state
+
+ payload = unhexlify('A2616103616205')
+ decoded = loads(payload, object_hook=lambda decoder, value: DummyType(value))
+ assert isinstance(decoded, DummyType)
+ assert decoded.state == {'a': 3, 'b': 5}
+
+
+def test_error_major_type():
+ exc = pytest.raises(CBORDecodeError, loads, b'')
+ assert str(exc.value).startswith('error reading major type at index 0: ')
+
+
+def test_load_from_file(tmpdir):
+ path = tmpdir.join('testdata.cbor')
+ path.write_binary(b'\x82\x01\x0a')
+ with path.open('rb') as fp:
+ obj = load(fp)
+
+ assert obj == [1, 10]
diff --git a/third_party/python/cbor2/tests/test_encoder.py b/third_party/python/cbor2/tests/test_encoder.py
new file mode 100644
index 0000000000..b2537519ba
--- /dev/null
+++ b/third_party/python/cbor2/tests/test_encoder.py
@@ -0,0 +1,260 @@
+import re
+from binascii import unhexlify
+from datetime import datetime, timedelta, date
+from decimal import Decimal
+from email.mime.text import MIMEText
+from fractions import Fraction
+from uuid import UUID
+
+import pytest
+
+from cbor2.compat import timezone
+from cbor2.encoder import dumps, CBOREncodeError, dump, shareable_encoder
+from cbor2.types import CBORTag, undefined, CBORSimpleValue
+
+
+@pytest.mark.parametrize('value, expected', [
+ (0, '00'),
+ (1, '01'),
+ (10, '0a'),
+ (23, '17'),
+ (24, '1818'),
+ (100, '1864'),
+ (1000, '1903e8'),
+ (1000000, '1a000f4240'),
+ (1000000000000, '1b000000e8d4a51000'),
+ (18446744073709551615, '1bffffffffffffffff'),
+ (18446744073709551616, 'c249010000000000000000'),
+ (-18446744073709551616, '3bffffffffffffffff'),
+ (-18446744073709551617, 'c349010000000000000000'),
+ (-1, '20'),
+ (-10, '29'),
+ (-100, '3863'),
+ (-1000, '3903e7')
+])
+def test_integer(value, expected):
+ expected = unhexlify(expected)
+ assert dumps(value) == expected
+
+
+@pytest.mark.parametrize('value, expected', [
+ (1.1, 'fb3ff199999999999a'),
+ (1.0e+300, 'fb7e37e43c8800759c'),
+ (-4.1, 'fbc010666666666666'),
+ (float('inf'), 'f97c00'),
+ (float('nan'), 'f97e00'),
+ (float('-inf'), 'f9fc00')
+])
+def test_float(value, expected):
+ expected = unhexlify(expected)
+ assert dumps(value) == expected
+
+
+@pytest.mark.parametrize('value, expected', [
+ (b'', '40'),
+ (b'\x01\x02\x03\x04', '4401020304'),
+])
+def test_bytestring(value, expected):
+ expected = unhexlify(expected)
+ assert dumps(value) == expected
+
+
+def test_bytearray():
+ expected = unhexlify('4401020304')
+ assert dumps(bytearray(b'\x01\x02\x03\x04')) == expected
+
+
+@pytest.mark.parametrize('value, expected', [
+ (u'', '60'),
+ (u'a', '6161'),
+ (u'IETF', '6449455446'),
+ (u'"\\', '62225c'),
+ (u'\u00fc', '62c3bc'),
+ (u'\u6c34', '63e6b0b4')
+])
+def test_string(value, expected):
+ expected = unhexlify(expected)
+ assert dumps(value) == expected
+
+
+@pytest.mark.parametrize('value, expected', [
+ (False, 'f4'),
+ (True, 'f5'),
+ (None, 'f6'),
+ (undefined, 'f7')
+], ids=['false', 'true', 'null', 'undefined'])
+def test_special(value, expected):
+ expected = unhexlify(expected)
+ assert dumps(value) == expected
+
+
+@pytest.mark.parametrize('value, expected', [
+ (CBORSimpleValue(0), 'e0'),
+ (CBORSimpleValue(2), 'e2'),
+ (CBORSimpleValue(19), 'f3'),
+ (CBORSimpleValue(32), 'f820')
+])
+def test_simple_value(value, expected):
+ expected = unhexlify(expected)
+ assert dumps(value) == expected
+
+
+#
+# Tests for extension tags
+#
+
+@pytest.mark.parametrize('value, as_timestamp, expected', [
+ (datetime(2013, 3, 21, 20, 4, 0, tzinfo=timezone.utc), False,
+ 'c074323031332d30332d32315432303a30343a30305a'),
+ (datetime(2013, 3, 21, 20, 4, 0, 380841, tzinfo=timezone.utc), False,
+ 'c0781b323031332d30332d32315432303a30343a30302e3338303834315a'),
+ (datetime(2013, 3, 21, 22, 4, 0, tzinfo=timezone(timedelta(hours=2))), False,
+ 'c07819323031332d30332d32315432323a30343a30302b30323a3030'),
+ (datetime(2013, 3, 21, 20, 4, 0), False, 'c074323031332d30332d32315432303a30343a30305a'),
+ (datetime(2013, 3, 21, 20, 4, 0, tzinfo=timezone.utc), True, 'c11a514b67b0'),
+ (datetime(2013, 3, 21, 22, 4, 0, tzinfo=timezone(timedelta(hours=2))), True, 'c11a514b67b0')
+], ids=['datetime/utc', 'datetime+micro/utc', 'datetime/eet', 'naive', 'timestamp/utc',
+ 'timestamp/eet'])
+def test_datetime(value, as_timestamp, expected):
+ expected = unhexlify(expected)
+ assert dumps(value, datetime_as_timestamp=as_timestamp, timezone=timezone.utc) == expected
+
+
+def test_date():
+ expected = unhexlify('c074323031332d30332d32315430303a30303a30305a')
+ assert dumps(date(2013, 3, 21), timezone=timezone.utc) == expected
+
+
+def test_naive_datetime():
+ """Test that naive datetimes are gracefully rejected when no timezone has been set."""
+ exc = pytest.raises(CBOREncodeError, dumps, datetime(2013, 3, 21))
+ exc.match('naive datetime encountered and no default timezone has been set')
+
+
+@pytest.mark.parametrize('value, expected', [
+ (Decimal('14.123'), 'c4822219372b'),
+ (Decimal('NaN'), 'f97e00'),
+ (Decimal('Infinity'), 'f97c00'),
+ (Decimal('-Infinity'), 'f9fc00')
+], ids=['normal', 'nan', 'inf', 'neginf'])
+def test_decimal(value, expected):
+ expected = unhexlify(expected)
+ assert dumps(value) == expected
+
+
+def test_rational():
+ expected = unhexlify('d81e820205')
+ assert dumps(Fraction(2, 5)) == expected
+
+
+def test_regex():
+ expected = unhexlify('d8236d68656c6c6f2028776f726c6429')
+ assert dumps(re.compile(u'hello (world)')) == expected
+
+
+def test_mime():
+ expected = unhexlify(
+ 'd824787b436f6e74656e742d547970653a20746578742f706c61696e3b20636861727365743d2269736f2d38'
+ '3835392d3135220a4d494d452d56657273696f6e3a20312e300a436f6e74656e742d5472616e736665722d456'
+ 'e636f64696e673a2071756f7465642d7072696e7461626c650a0a48656c6c6f203d413475726f')
+ message = MIMEText(u'Hello \u20acuro', 'plain', 'iso-8859-15')
+ assert dumps(message) == expected
+
+
+def test_uuid():
+ expected = unhexlify('d825505eaffac8b51e480581277fdcc7842faf')
+ assert dumps(UUID(hex='5eaffac8b51e480581277fdcc7842faf')) == expected
+
+
+def test_custom_tag():
+ expected = unhexlify('d917706548656c6c6f')
+ assert dumps(CBORTag(6000, u'Hello')) == expected
+
+
+def test_cyclic_array():
+ """Test that an array that contains itself can be serialized with value sharing enabled."""
+ expected = unhexlify('d81c81d81c81d81d00')
+ a = [[]]
+ a[0].append(a)
+ assert dumps(a, value_sharing=True) == expected
+
+
+def test_cyclic_array_nosharing():
+ """Test that serializing a cyclic structure w/o value sharing will blow up gracefully."""
+ a = []
+ a.append(a)
+ exc = pytest.raises(CBOREncodeError, dumps, a)
+ exc.match('cyclic data structure detected but value sharing is disabled')
+
+
+def test_cyclic_map():
+ """Test that a dict that contains itself can be serialized with value sharing enabled."""
+ expected = unhexlify('d81ca100d81d00')
+ a = {}
+ a[0] = a
+ assert dumps(a, value_sharing=True) == expected
+
+
+def test_cyclic_map_nosharing():
+ """Test that serializing a cyclic structure w/o value sharing will fail gracefully."""
+ a = {}
+ a[0] = a
+ exc = pytest.raises(CBOREncodeError, dumps, a)
+ exc.match('cyclic data structure detected but value sharing is disabled')
+
+
+@pytest.mark.parametrize('value_sharing, expected', [
+ (False, '828080'),
+ (True, 'd81c82d81c80d81d01')
+], ids=['nosharing', 'sharing'])
+def test_not_cyclic_same_object(value_sharing, expected):
+ """Test that the same shareable object can be included twice if not in a cyclic structure."""
+ expected = unhexlify(expected)
+ a = []
+ b = [a, a]
+ assert dumps(b, value_sharing=value_sharing) == expected
+
+
+def test_unsupported_type():
+ exc = pytest.raises(CBOREncodeError, dumps, lambda: None)
+ exc.match('cannot serialize type function')
+
+
+def test_default():
+ class DummyType(object):
+ def __init__(self, state):
+ self.state = state
+
+ def default_encoder(encoder, value):
+ encoder.encode(value.state)
+
+ expected = unhexlify('820305')
+ obj = DummyType([3, 5])
+ serialized = dumps(obj, default=default_encoder)
+ assert serialized == expected
+
+
+def test_default_cyclic():
+ class DummyType(object):
+ def __init__(self, value=None):
+ self.value = value
+
+ @shareable_encoder
+ def default_encoder(encoder, value):
+ state = encoder.encode_to_bytes(value.value)
+ encoder.encode(CBORTag(3000, state))
+
+ expected = unhexlify('D81CD90BB849D81CD90BB843D81D00')
+ obj = DummyType()
+ obj2 = DummyType(obj)
+ obj.value = obj2
+ serialized = dumps(obj, value_sharing=True, default=default_encoder)
+ assert serialized == expected
+
+
+def test_dump_to_file(tmpdir):
+ path = tmpdir.join('testdata.cbor')
+ with path.open('wb') as fp:
+ dump([1, 10], fp)
+
+ assert path.read_binary() == b'\x82\x01\x0a'
diff --git a/third_party/python/cbor2/tests/test_types.py b/third_party/python/cbor2/tests/test_types.py
new file mode 100644
index 0000000000..e5eea5fbdc
--- /dev/null
+++ b/third_party/python/cbor2/tests/test_types.py
@@ -0,0 +1,36 @@
+import pytest
+
+from cbor2.types import CBORTag, CBORSimpleValue
+
+
+def test_tag_repr():
+ assert repr(CBORTag(600, 'blah')) == "CBORTag(600, 'blah')"
+
+
+def test_tag_equals():
+ tag1 = CBORTag(500, ['foo'])
+ tag2 = CBORTag(500, ['foo'])
+ tag3 = CBORTag(500, ['bar'])
+ assert tag1 == tag2
+ assert not tag1 == tag3
+ assert not tag1 == 500
+
+
+def test_simple_value_repr():
+ assert repr(CBORSimpleValue(1)) == "CBORSimpleValue(1)"
+
+
+def test_simple_value_equals():
+ tag1 = CBORSimpleValue(1)
+ tag2 = CBORSimpleValue(1)
+ tag3 = CBORSimpleValue(21)
+ assert tag1 == tag2
+ assert tag1 == 1
+ assert not tag1 == tag3
+ assert not tag1 == 21
+ assert not tag2 == "21"
+
+
+def test_simple_value_too_big():
+ exc = pytest.raises(TypeError, CBORSimpleValue, 256)
+ assert str(exc.value) == 'simple value too big'
diff --git a/third_party/python/cbor2/tox.ini b/third_party/python/cbor2/tox.ini
new file mode 100644
index 0000000000..8f272bf698
--- /dev/null
+++ b/third_party/python/cbor2/tox.ini
@@ -0,0 +1,20 @@
+[tox]
+envlist = py27, py33, py34, py35, py36, pypy, flake8
+skip_missing_interpreters = true
+
+[tox:travis]
+2.7 = py27
+3.3 = py33
+3.4 = py34
+3.5 = py35
+3.6 = py36, flake8
+pypy = pypy
+
+[testenv]
+commands = python -m pytest {posargs}
+extras = testing
+
+[testenv:flake8]
+deps = flake8
+commands = flake8 cbor2 tests
+skip_install = true
diff --git a/third_party/python/certifi/LICENSE b/third_party/python/certifi/LICENSE
new file mode 100644
index 0000000000..802b53ff11
--- /dev/null
+++ b/third_party/python/certifi/LICENSE
@@ -0,0 +1,21 @@
+This packge contains a modified version of ca-bundle.crt:
+
+ca-bundle.crt -- Bundle of CA Root Certificates
+
+Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011#
+This is a bundle of X.509 certificates of public Certificate Authorities
+(CA). These were automatically extracted from Mozilla's root certificates
+file (certdata.txt). This file can be found in the mozilla source tree:
+http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1#
+It contains the certificates in PEM format and therefore
+can be directly used with curl / libcurl / php_curl, or with
+an Apache+mod_ssl webserver for SSL client authentication.
+Just configure this file as the SSLCACertificateFile.#
+
+***** BEGIN LICENSE BLOCK *****
+This Source Code Form is subject to the terms of the Mozilla Public License,
+v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
+one at http://mozilla.org/MPL/2.0/.
+
+***** END LICENSE BLOCK *****
+@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $
diff --git a/third_party/python/certifi/MANIFEST.in b/third_party/python/certifi/MANIFEST.in
new file mode 100644
index 0000000000..6077b5ff84
--- /dev/null
+++ b/third_party/python/certifi/MANIFEST.in
@@ -0,0 +1 @@
+include MANIFEST.in README.rst LICENSE certifi/cacert.pem
diff --git a/third_party/python/certifi/PKG-INFO b/third_party/python/certifi/PKG-INFO
new file mode 100644
index 0000000000..73f3643804
--- /dev/null
+++ b/third_party/python/certifi/PKG-INFO
@@ -0,0 +1,69 @@
+Metadata-Version: 1.1
+Name: certifi
+Version: 2018.4.16
+Summary: Python package for providing Mozilla's CA Bundle.
+Home-page: http://certifi.io/
+Author: Kenneth Reitz
+Author-email: me@kennethreitz.com
+License: MPL-2.0
+Description: Certifi: Python SSL Certificates
+ ================================
+
+ `Certifi`_ is a carefully curated collection of Root Certificates for
+ validating the trustworthiness of SSL certificates while verifying the identity
+ of TLS hosts. It has been extracted from the `Requests`_ project.
+
+ Installation
+ ------------
+
+ ``certifi`` is available on PyPI. Simply install it with ``pip``::
+
+ $ pip install certifi
+
+ Usage
+ -----
+
+ To reference the installed certificate authority (CA) bundle, you can use the
+ built-in function::
+
+ >>> import certifi
+
+ >>> certifi.where()
+ '/usr/local/lib/python2.7/site-packages/certifi/cacert.pem'
+
+ Enjoy!
+
+ 1024-bit Root Certificates
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Browsers and certificate authorities have concluded that 1024-bit keys are
+ unacceptably weak for certificates, particularly root certificates. For this
+ reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its
+ bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key)
+ certificate from the same CA. Because Mozilla removed these certificates from
+ its bundle, ``certifi`` removed them as well.
+
+ In previous versions, ``certifi`` provided the ``certifi.old_where()`` function
+ to intentionally re-add the 1024-bit roots back into your bundle. This was not
+ recommended in production and therefore was removed. To assist in migrating old
+ code, the function ``certifi.old_where()`` continues to exist as an alias of
+ ``certifi.where()``. Please update your code to use ``certifi.where()``
+ instead. ``certifi.old_where()`` will be removed in 2018.
+
+ .. _`Certifi`: http://certifi.io/en/latest/
+ .. _`Requests`: http://docs.python-requests.org/en/latest/
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
+Classifier: Natural Language :: English
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
diff --git a/third_party/python/certifi/README.rst b/third_party/python/certifi/README.rst
new file mode 100644
index 0000000000..64b3e38e10
--- /dev/null
+++ b/third_party/python/certifi/README.rst
@@ -0,0 +1,46 @@
+Certifi: Python SSL Certificates
+================================
+
+`Certifi`_ is a carefully curated collection of Root Certificates for
+validating the trustworthiness of SSL certificates while verifying the identity
+of TLS hosts. It has been extracted from the `Requests`_ project.
+
+Installation
+------------
+
+``certifi`` is available on PyPI. Simply install it with ``pip``::
+
+ $ pip install certifi
+
+Usage
+-----
+
+To reference the installed certificate authority (CA) bundle, you can use the
+built-in function::
+
+ >>> import certifi
+
+ >>> certifi.where()
+ '/usr/local/lib/python2.7/site-packages/certifi/cacert.pem'
+
+Enjoy!
+
+1024-bit Root Certificates
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Browsers and certificate authorities have concluded that 1024-bit keys are
+unacceptably weak for certificates, particularly root certificates. For this
+reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its
+bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key)
+certificate from the same CA. Because Mozilla removed these certificates from
+its bundle, ``certifi`` removed them as well.
+
+In previous versions, ``certifi`` provided the ``certifi.old_where()`` function
+to intentionally re-add the 1024-bit roots back into your bundle. This was not
+recommended in production and therefore was removed. To assist in migrating old
+code, the function ``certifi.old_where()`` continues to exist as an alias of
+``certifi.where()``. Please update your code to use ``certifi.where()``
+instead. ``certifi.old_where()`` will be removed in 2018.
+
+.. _`Certifi`: http://certifi.io/en/latest/
+.. _`Requests`: http://docs.python-requests.org/en/latest/
diff --git a/third_party/python/certifi/certifi/__init__.py b/third_party/python/certifi/certifi/__init__.py
new file mode 100644
index 0000000000..0c4963ef60
--- /dev/null
+++ b/third_party/python/certifi/certifi/__init__.py
@@ -0,0 +1,3 @@
+from .core import where, old_where
+
+__version__ = "2018.04.16"
diff --git a/third_party/python/certifi/certifi/__main__.py b/third_party/python/certifi/certifi/__main__.py
new file mode 100644
index 0000000000..5f1da0dd0c
--- /dev/null
+++ b/third_party/python/certifi/certifi/__main__.py
@@ -0,0 +1,2 @@
+from certifi import where
+print(where())
diff --git a/third_party/python/certifi/certifi/cacert.pem b/third_party/python/certifi/certifi/cacert.pem
new file mode 100644
index 0000000000..2713f541c4
--- /dev/null
+++ b/third_party/python/certifi/certifi/cacert.pem
@@ -0,0 +1,4400 @@
+
+# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Label: "GlobalSign Root CA"
+# Serial: 4835703278459707669005204
+# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
+# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
+# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Label: "GlobalSign Root CA - R2"
+# Serial: 4835703278459682885658125
+# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
+# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
+# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
+-----BEGIN CERTIFICATE-----
+MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
+MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
+v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
+eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
+tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
+C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
+zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
+mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
+V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
+bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
+3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
+J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
+291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
+ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
+AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
+TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
+# Serial: 206684696279472310254277870180966723415
+# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
+# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
+# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
+N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
+KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
+kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
+CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
+Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
+imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
+2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
+DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
+/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
+F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
+TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Premium 2048 Secure Server CA"
+# Serial: 946069240
+# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90
+# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31
+# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
+MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
+j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
+U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
+zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
+u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
+bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
+fF6adulZkMV8gzURZVE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Label: "Baltimore CyberTrust Root"
+# Serial: 33554617
+# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
+# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
+# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
+RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
+VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
+DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
+ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
+VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
+mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
+IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
+mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
+XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
+dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
+jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
+BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
+DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
+9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
+jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
+Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
+ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
+R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Label: "AddTrust External Root"
+# Serial: 1
+# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
+# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
+# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
+-----BEGIN CERTIFICATE-----
+MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
+IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
+MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
+FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
+bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
+dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
+H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
+uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
+mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
+a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
+E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
+WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
+VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
+Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
+cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
+IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
+AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
+YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
+6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
+Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
+c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
+mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Label: "Entrust Root Certification Authority"
+# Serial: 1164660820
+# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
+# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
+# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
+-----BEGIN CERTIFICATE-----
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Label: "GeoTrust Global CA"
+# Serial: 144470
+# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
+# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
+# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
+-----BEGIN CERTIFICATE-----
+MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
+YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
+R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
+9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
+fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
+iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
+1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
+MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
+ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
+uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
+Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
+tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
+PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
+hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
+5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA"
+# Serial: 1
+# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
+# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
+# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
+-----BEGIN CERTIFICATE-----
+MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
+BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
+IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
+VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
+cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
+QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
+F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
+c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
+mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
+VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
+teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
+f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
+Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
+nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
+/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
+MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
+9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
+aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
+IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
+ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
+uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
+Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
+QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
+koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
+ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
+DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
+bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA 2"
+# Serial: 1
+# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
+# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
+# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
+-----BEGIN CERTIFICATE-----
+MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
+VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
+c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
+WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
+FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
+XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
+se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
+KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
+IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
+y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
+hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
+QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
+Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
+HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
+KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
+dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
+L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
+Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
+ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
+T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
+GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
+1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
+OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
+6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
+QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
+-----END CERTIFICATE-----
+
+# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
+# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
+# Label: "Visa eCommerce Root"
+# Serial: 25952180776285836048024890241505565794
+# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02
+# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62
+# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22
+-----BEGIN CERTIFICATE-----
+MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr
+MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl
+cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv
+bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw
+CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h
+dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l
+cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h
+2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E
+lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV
+ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq
+299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t
+vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL
+dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF
+AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR
+zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3
+LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd
+7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw
+++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt
+398znM/jra6O1I7mT1GvFpLgXPYHDw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
+# Subject: CN=AAA Certificate Services O=Comodo CA Limited
+# Label: "Comodo AAA Services root"
+# Serial: 1
+# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
+# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
+# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
+-----BEGIN CERTIFICATE-----
+MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
+YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
+GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
+BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
+3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
+YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
+rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
+ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
+oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
+MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
+QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
+b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
+AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
+GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
+Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
+G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
+l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
+smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Label: "QuoVadis Root CA"
+# Serial: 985026699
+# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24
+# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9
+# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73
+-----BEGIN CERTIFICATE-----
+MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz
+MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw
+IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR
+dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp
+li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D
+rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ
+WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug
+F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU
+xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC
+Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv
+dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw
+ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl
+IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh
+c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy
+ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh
+Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI
+KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T
+KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq
+y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p
+dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD
+VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL
+MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk
+fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8
+7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R
+cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y
+mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW
+xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK
+SnQ2+Q==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2"
+# Serial: 1289
+# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b
+# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7
+# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86
+-----BEGIN CERTIFICATE-----
+MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
+GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
+Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
+WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
+rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
+ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
+Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
+PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
+/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
+oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
+yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
+EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
+A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
+MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
+ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
+BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
+g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
+fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
+WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
+B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
+hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
+TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
+mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
+ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
+4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
+8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3"
+# Serial: 1478
+# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf
+# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85
+# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35
+-----BEGIN CERTIFICATE-----
+MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
+V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
+4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
+H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
+8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
+vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
+mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
+btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
+T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
+WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
+c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
+4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
+VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
+CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
+aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
+aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
+dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
+czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
+A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
+Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
+7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
+d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
+4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
+t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
+DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
+k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
+zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
+Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
+mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
+4SVhM7JZG+Ju1zdXtg2pEto=
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
+# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
+# Label: "Security Communication Root CA"
+# Serial: 0
+# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
+# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
+# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
+-----BEGIN CERTIFICATE-----
+MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
+MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
+dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
+WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
+VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
+9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
+DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
+Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
+QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
+xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
+A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
+kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
+Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
+Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
+JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
+RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Sonera Class2 CA O=Sonera
+# Subject: CN=Sonera Class2 CA O=Sonera
+# Label: "Sonera Class 2 Root CA"
+# Serial: 29
+# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb
+# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27
+# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP
+MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx
+MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV
+BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o
+Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt
+5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s
+3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej
+vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu
+8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw
+DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG
+MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil
+zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/
+3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD
+FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6
+Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2
+ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M
+-----END CERTIFICATE-----
+
+# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Label: "XRamp Global CA Root"
+# Serial: 107108908803651509692980124233745014957
+# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
+# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
+# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
+gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
+MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
+UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
+NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
+dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
+dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
+38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
+KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
+DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
+qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
+JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
+PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
+BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
+jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
+eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
+ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
+vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
+qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
+IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
+i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
+O+7ETPTsJ3xCwnR8gooJybQDJbw=
+-----END CERTIFICATE-----
+
+# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Label: "Go Daddy Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
+# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
+# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
+MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
+YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
+MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
+ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
+MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
+ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
+PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
+wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
+EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
+avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
+sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
+/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
+IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
+OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
+TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
+HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
+dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
+ReYNnyicsbkqWletNw+vHX/bvZ8=
+-----END CERTIFICATE-----
+
+# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Label: "Starfield Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
+# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
+# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
+MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
+U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
+NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
+ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
+ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
+DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
+8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
+X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
+K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
+1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
+A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
+zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
+YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
+bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
+DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
+L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
+eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
+xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
+VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
+WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
+-----END CERTIFICATE-----
+
+# Issuer: O=Government Root Certification Authority
+# Subject: O=Government Root Certification Authority
+# Label: "Taiwan GRCA"
+# Serial: 42023070807708724159991140556527066870
+# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e
+# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9
+# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3
+-----BEGIN CERTIFICATE-----
+MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/
+MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow
+PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR
+IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q
+gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy
+yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts
+F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2
+jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx
+ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC
+VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK
+YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH
+EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN
+Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud
+DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE
+MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK
+UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ
+TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf
+qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK
+ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE
+JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7
+hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1
+EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm
+nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX
+udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz
+ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe
+LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl
+pYYsfPQS
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=Class 2 Primary CA O=Certplus
+# Subject: CN=Class 2 Primary CA O=Certplus
+# Label: "Certplus Class 2 Primary CA"
+# Serial: 177770208045934040241468760488327595043
+# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b
+# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb
+# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb
+-----BEGIN CERTIFICATE-----
+MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw
+PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz
+cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9
+MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz
+IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ
+ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR
+VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL
+kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd
+EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas
+H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0
+HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud
+DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4
+QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu
+Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/
+AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8
+yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR
+FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA
+ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB
+kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7
+l7+ijrRU
+-----END CERTIFICATE-----
+
+# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Label: "DST Root CA X3"
+# Serial: 91299735575339953335919266965803778155
+# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5
+# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13
+# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39
+-----BEGIN CERTIFICATE-----
+MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
+MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
+DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow
+PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
+Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
+rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
+OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
+xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
+7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
+aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
+SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
+ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
+AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
+R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
+JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
+Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Label: "SwissSign Gold CA - G2"
+# Serial: 13492815561806991280
+# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93
+# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61
+# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95
+-----BEGIN CERTIFICATE-----
+MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
+BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
+biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
+MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
+d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
+76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
+bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
+6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
+emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
+MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
+MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
+MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
+FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
+aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
+gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
+qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
+lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
+8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
+L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
+45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
+UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
+O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
+bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
+GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
+77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
+hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
+92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
+Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
+ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
+Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Label: "SwissSign Silver CA - G2"
+# Serial: 5700383053117599563
+# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
+# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
+# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
+-----BEGIN CERTIFICATE-----
+MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
+BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
+IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
+RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
+U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
+MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
+Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
+YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
+nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
+6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
+eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
+c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
+MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
+HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
+jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
+5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
+rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
+wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
+cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
+AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
+WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
+xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
+2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
+IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
+aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
+em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
+dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
+OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
+tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Label: "GeoTrust Primary Certification Authority"
+# Serial: 32798226551256963324313806436981982369
+# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
+# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
+# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
+-----BEGIN CERTIFICATE-----
+MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
+MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
+R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
+MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
+Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
+AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
+ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
+7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
+kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
+mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
+KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
+6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
+4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
+oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
+UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
+AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA"
+# Serial: 69529181992039203566298953787712940909
+# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
+# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
+# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
+qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
+BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
+NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
+LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
+A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
+IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
+W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
+3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
+6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
+Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
+NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
+r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
+DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
+YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
+xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
+/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
+LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
+jVaMaA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
+# Serial: 33037644167568058970164719475676101450
+# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
+# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
+# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
+-----BEGIN CERTIFICATE-----
+MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
+yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
+ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
+nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
+t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
+SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
+BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
+rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
+NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
+BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
+BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
+aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
+MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
+p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
+5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
+WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
+4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
+hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
+# Subject: CN=SecureTrust CA O=SecureTrust Corporation
+# Label: "SecureTrust CA"
+# Serial: 17199774589125277788362757014266862032
+# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1
+# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11
+# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73
+-----BEGIN CERTIFICATE-----
+MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
+MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
+cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
+Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
+0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
+wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
+7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
+8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
+BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
+JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
+6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
+3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
+D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
+CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
+3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Global CA O=SecureTrust Corporation
+# Subject: CN=Secure Global CA O=SecureTrust Corporation
+# Label: "Secure Global CA"
+# Serial: 9751836167731051554232119481456978597
+# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de
+# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b
+# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69
+-----BEGIN CERTIFICATE-----
+MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
+MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
+Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
+iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
+/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
+jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
+HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
+sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
+gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
+KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
+AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
+URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
+H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
+I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
+iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
+f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Label: "Network Solutions Certificate Authority"
+# Serial: 116697915152937497490437556386812487904
+# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
+# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
+# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
+-----BEGIN CERTIFICATE-----
+MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
+MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
+MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
+dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
+UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
+ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
+c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
+OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
+mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
+BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
+qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
+gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
+bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
+dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
+6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
+h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
+/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
+wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
+pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GA CA"
+# Serial: 86718877871133159090080555911823548314
+# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93
+# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9
+# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5
+-----BEGIN CERTIFICATE-----
+MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB
+ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly
+aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl
+ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w
+NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G
+A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD
+VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX
+SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR
+VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2
+w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF
+mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg
+4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9
+4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw
+EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx
+SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2
+ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8
+vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa
+hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi
+Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ
+/L7fCg0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna O=Dhimyotis
+# Subject: CN=Certigna O=Dhimyotis
+# Label: "Certigna"
+# Serial: 18364802974209362175
+# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff
+# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97
+# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d
+-----BEGIN CERTIFICATE-----
+MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
+BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
+DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
+BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
+QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
+gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
+zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
+130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
+JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
+ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
+AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
+AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
+9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
+bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
+fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
+HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
+t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
+WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
+# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
+# Label: "Deutsche Telekom Root CA 2"
+# Serial: 38
+# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08
+# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf
+# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3
+-----BEGIN CERTIFICATE-----
+MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc
+MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj
+IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB
+IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE
+RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl
+U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290
+IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU
+ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC
+QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr
+rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S
+NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc
+QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH
+txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP
+BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
+AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp
+tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa
+IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl
+6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+
+xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU
+Cm26OWMohpLzGITY+9HPBVZkVw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Label: "Cybertrust Global Root"
+# Serial: 4835703278459682877484360
+# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
+# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
+# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
+-----BEGIN CERTIFICATE-----
+MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
+A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
+bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
+ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
+b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
+7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
+J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
+HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
+t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
+FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
+XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
+hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
+MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
+A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
+Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
+XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
+omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
+A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
+WL1WMRJOEcgh4LMRkWXbtKaIOM5V
+-----END CERTIFICATE-----
+
+# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Label: "ePKI Root Certification Authority"
+# Serial: 28956088682735189655030529057352760477
+# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3
+# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0
+# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5
+-----BEGIN CERTIFICATE-----
+MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
+IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
+SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
+SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
+ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
+DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
+TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
+fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
+sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
+WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
+nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
+dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
+NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
+AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
+MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
+ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
+uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
+PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
+JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
+gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
+j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
+5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
+o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
+/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
+Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
+W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
+hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
+-----END CERTIFICATE-----
+
+# Issuer: O=certSIGN OU=certSIGN ROOT CA
+# Subject: O=certSIGN OU=certSIGN ROOT CA
+# Label: "certSIGN ROOT CA"
+# Serial: 35210227249154
+# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17
+# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b
+# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb
+-----BEGIN CERTIFICATE-----
+MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
+AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
+QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
+MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
+0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
+UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
+RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
+OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
+JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
+AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
+BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
+LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
+MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
+44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
+Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
+i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
+9u6wWk5JRFRYX0KD
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G3"
+# Serial: 28809105769928564313984085209975885599
+# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
+# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
+# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
+MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
+eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
+cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
+BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
+MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
+BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
+hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
+5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
+JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
+DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
+huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
+HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
+AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
+zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
+kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
+AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
+SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
+spki4cErx5z481+oghLrGREt
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G2"
+# Serial: 71758320672825410020661621085256472406
+# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
+# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
+# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
+-----BEGIN CERTIFICATE-----
+MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
+IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
+BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
+MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
+d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
+YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
+dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
+BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
+papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
+DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
+KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
+XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G3"
+# Serial: 127614157056681299805556476275995414779
+# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
+# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
+# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
+rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
+BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
+Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
+LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
+MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
+ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
+gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
+YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
+b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
+9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
+zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
+OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
+HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
+2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
+oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
+t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
+KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
+m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
+MdRAGmI0Nj81Aa6sY6A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G2"
+# Serial: 80682863203381065782177908751794619243
+# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
+# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
+# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
+MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
+KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
+MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
+eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
+BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
+NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
+BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
+MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
+So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
+tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
+CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
+qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
+rD6ogRLQy7rQkgu2npaqBA+K
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Universal Root Certification Authority"
+# Serial: 85209574734084581917763752644031726877
+# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
+# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
+# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
+-----BEGIN CERTIFICATE-----
+MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
+vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
+ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
+IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
+IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
+bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
+9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
+H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
+LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
+/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
+rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
+WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
+exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
+DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
+sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
+seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
+4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
+BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
+lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
+7M2CYfE45k+XmCpajQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
+# Serial: 63143484348153506665311985501458640051
+# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
+# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
+# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
+-----BEGIN CERTIFICATE-----
+MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
+U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
+SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
+biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
+GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
+fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
+aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
+aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
+kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
+4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
+FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny"
+# Serial: 80544274841616
+# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88
+# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91
+# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
+EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
+MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
+cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
+dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
+pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
+b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
+aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
+IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
+lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
+AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
+VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
+ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
+BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
+AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
+U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
+bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
+bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
+uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
+XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA - G2"
+# Serial: 10000012
+# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a
+# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16
+# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f
+-----BEGIN CERTIFICATE-----
+MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX
+DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291
+qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp
+uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU
+Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE
+pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp
+5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M
+UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN
+GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy
+5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv
+6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK
+eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6
+B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/
+BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov
+L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG
+SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS
+CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen
+5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897
+IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK
+gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL
++63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL
+vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm
+bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk
+N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC
+Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z
+ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Label: "Hongkong Post Root CA 1"
+# Serial: 1000
+# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca
+# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58
+# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2
+-----BEGIN CERTIFICATE-----
+MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx
+FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg
+Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG
+A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr
+b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ
+jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn
+PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh
+ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9
+nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h
+q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED
+MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC
+mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3
+7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB
+oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs
+EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO
+fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi
+AmvZWg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Label: "SecureSign RootCA11"
+# Serial: 1
+# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
+# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
+# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
+-----BEGIN CERTIFICATE-----
+MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
+MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
+A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
+MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
+Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
+QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
+i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
+h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
+MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
+UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
+8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
+h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
+VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
+KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
+X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
+QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
+pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
+QSdJQO7e5iNEOdyhIta6A/I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Label: "Microsec e-Szigno Root CA 2009"
+# Serial: 14014712776195784473
+# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1
+# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e
+# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78
+-----BEGIN CERTIFICATE-----
+MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
+ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
+CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
+OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
+FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
+Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
+dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
+kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
+cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
+fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
+N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
+xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
+Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
+SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
+mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
+ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
+tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
+2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
+HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
+# Serial: 6047274297262753887
+# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
+# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
+# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
+-----BEGIN CERTIFICATE-----
+MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
+BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
+cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
+MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
+Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
+thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
+cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
+L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
+NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
+X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
+m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
+Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
+EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
+KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
+6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
+OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
+VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
+VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
+cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
+ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
+AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
+661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
+am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
+ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
+PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
+3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
+SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
+3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
+ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
+StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
+Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
+jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
+-----END CERTIFICATE-----
+
+# Issuer: CN=Izenpe.com O=IZENPE S.A.
+# Subject: CN=Izenpe.com O=IZENPE S.A.
+# Label: "Izenpe.com"
+# Serial: 917563065490389241595536686991402621
+# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73
+# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19
+# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f
+-----BEGIN CERTIFICATE-----
+MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
+MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
+ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
+VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
+b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
+scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
+xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
+LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
+uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
+yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
+rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
+BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
+hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
+QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
+HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
+Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
+QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
+BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
+MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
+A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
+laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
+awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
+JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
+LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
+VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
+LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
+UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
+QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
+QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Label: "Chambers of Commerce Root - 2008"
+# Serial: 11806822484801597146
+# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7
+# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c
+# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0
+-----BEGIN CERTIFICATE-----
+MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz
+IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz
+MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj
+dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw
+EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp
+MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9
+28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq
+VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q
+DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR
+5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL
+ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a
+Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl
+UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s
++12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5
+Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj
+ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx
+hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV
+HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1
++HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN
+YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t
+L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy
+ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt
+IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV
+HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w
+DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW
+PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF
+5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1
+glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH
+FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2
+pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD
+xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG
+tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq
+jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De
+fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg
+OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ
+d0jQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Label: "Global Chambersign Root - 2008"
+# Serial: 14541511773111788494
+# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3
+# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c
+# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca
+-----BEGIN CERTIFICATE-----
+MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD
+aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx
+MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy
+cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG
+A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl
+BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI
+hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed
+KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7
+G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2
+zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4
+ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG
+HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2
+Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V
+yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e
+beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r
+6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh
+wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog
+zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW
+BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr
+ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp
+ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk
+cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt
+YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC
+CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow
+KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI
+hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ
+UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz
+X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x
+fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz
+a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd
+Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd
+SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O
+AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso
+M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge
+v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z
+09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
+# Subject: CN=AffirmTrust Commercial O=AffirmTrust
+# Label: "AffirmTrust Commercial"
+# Serial: 8608355977964138876
+# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
+# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
+# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
+Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
+ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
+MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
+yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
+VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
+nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
+XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
+vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
+Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
+N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
+nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Networking O=AffirmTrust
+# Subject: CN=AffirmTrust Networking O=AffirmTrust
+# Label: "AffirmTrust Networking"
+# Serial: 8957382827206547757
+# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
+# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
+# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
+YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
+kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
+QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
+6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
+yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
+QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
+tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
+QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
+Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
+olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
+x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium O=AffirmTrust
+# Subject: CN=AffirmTrust Premium O=AffirmTrust
+# Label: "AffirmTrust Premium"
+# Serial: 7893706540734352110
+# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
+# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
+# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
+dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
+A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
+cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
+qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
+JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
+s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
+HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
+70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
+V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
+qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
+5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
+C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
+OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
+FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
+KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
+Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
+8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
+MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
+0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
+u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
+u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
+YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
+GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
+RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
+KeC2uAloGRwYQw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Label: "AffirmTrust Premium ECC"
+# Serial: 8401224907861490260
+# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
+# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
+# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
+-----BEGIN CERTIFICATE-----
+MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
+VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
+cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
+BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
+VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
+0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
+ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
+A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
+aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
+flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA"
+# Serial: 279744
+# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78
+# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e
+# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e
+-----BEGIN CERTIFICATE-----
+MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
+MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
+ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
+cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
+WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
+Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
+IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
+UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
+TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
+BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
+kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
+AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
+HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
+sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
+I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
+J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
+VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
+03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Root Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79
+# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48
+# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
+MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
+V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
+WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
+LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
+AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
+K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
+RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
+rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
+3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
+hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
+MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
+XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
+lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
+aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
+YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Label: "Security Communication RootCA2"
+# Serial: 0
+# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43
+# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74
+# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
+DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
+dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
+YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
+OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
+zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
+VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
+hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
+ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
+awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
+OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
+DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
+coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
+okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
+t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
+1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
+SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2011"
+# Serial: 0
+# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9
+# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d
+# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71
+-----BEGIN CERTIFICATE-----
+MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix
+RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p
+YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw
+NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK
+EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl
+cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz
+dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ
+fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns
+bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD
+75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP
+FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV
+HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp
+5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu
+b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA
+A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p
+6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8
+TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7
+dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys
+Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI
+l7WdmplNsDz4SgCbZN2fOUvRJ9e4
+-----END CERTIFICATE-----
+
+# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Label: "Actalis Authentication Root CA"
+# Serial: 6271844772424770508
+# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6
+# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac
+# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66
+-----BEGIN CERTIFICATE-----
+MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
+BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
+MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
+IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
+SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
+ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
+UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
+4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
+KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
+gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
+rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
+51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
+be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
+KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
+v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
+fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
+jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
+ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
+ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
+e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
+jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
+WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
+SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
+pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
+X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
+fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
+K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
+ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
+LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
+LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Trustis Limited OU=Trustis FPS Root CA
+# Subject: O=Trustis Limited OU=Trustis FPS Root CA
+# Label: "Trustis FPS Root CA"
+# Serial: 36053640375399034304724988975563710553
+# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d
+# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04
+# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d
+-----BEGIN CERTIFICATE-----
+MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF
+MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL
+ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx
+MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc
+MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+
+AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH
+iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj
+vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA
+0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB
+OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/
+BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E
+FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01
+GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW
+zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4
+1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE
+f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F
+jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN
+ZetX2fNXlrtIzYE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 2 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29
+# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99
+# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
+6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
+L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
+1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
+MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
+QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
+arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
+Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
+FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
+P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
+9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
+uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
+9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
+A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
+OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
+KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
+DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
+H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
+I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
+5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
+3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
+Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 3 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec
+# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57
+# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
+ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
+N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
+tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
+0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
+/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
+KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
+zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
+O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
+34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
+K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
+Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
+QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
+cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
+IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
+HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
+O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
+033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
+dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
+kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
+3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
+u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
+4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 3"
+# Serial: 1
+# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef
+# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1
+# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
+8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
+RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
+hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
+ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
+EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
+A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
+WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
+1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
+6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
+91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
+e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
+TpPDpFQUWw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Label: "EE Certification Centre Root CA"
+# Serial: 112324828676200291871926431888494945866
+# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f
+# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7
+# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76
+-----BEGIN CERTIFICATE-----
+MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1
+MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1
+czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG
+CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy
+MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl
+ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS
+b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB
+AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy
+euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO
+bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw
+WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d
+MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE
+1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD
+VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/
+zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB
+BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF
+BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV
+v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG
+E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u
+uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW
+iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v
+GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 2009"
+# Serial: 623603
+# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f
+# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0
+# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1
+-----BEGIN CERTIFICATE-----
+MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha
+ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM
+HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03
+UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42
+tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R
+ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM
+lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp
+/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G
+A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G
+A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj
+dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy
+MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl
+cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js
+L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL
+BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni
+acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0
+o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K
+zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8
+PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y
+Johw1+qRzT65ysCQblrGXnRl11z+o+I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 EV 2009"
+# Serial: 623604
+# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6
+# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83
+# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw
+NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV
+BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn
+ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0
+3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z
+qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR
+p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8
+HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw
+ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea
+HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw
+Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh
+c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E
+RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt
+dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku
+Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp
+3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05
+nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF
+CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na
+xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
+KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig Root R2 O=Disig a.s.
+# Subject: CN=CA Disig Root R2 O=Disig a.s.
+# Label: "CA Disig Root R2"
+# Serial: 10572350602393338211
+# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03
+# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71
+# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03
+-----BEGIN CERTIFICATE-----
+MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV
+BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
+MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy
+MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
+EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw
+ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe
+NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH
+PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I
+x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe
+QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR
+yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO
+QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912
+H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ
+QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD
+i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs
+nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1
+rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
+DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI
+hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM
+tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf
+GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb
+lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka
++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal
+TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i
+nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3
+gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr
+G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os
+zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x
+L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL
+-----END CERTIFICATE-----
+
+# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Label: "ACCVRAIZ1"
+# Serial: 6828503384748696800
+# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02
+# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17
+# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13
+-----BEGIN CERTIFICATE-----
+MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
+AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
+CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
+BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
+VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
+qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
+HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
+G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
+lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
+IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
+0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
+k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
+4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
+m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
+cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
+uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
+KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
+ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
+AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
+VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
+VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
+CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
+cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
+QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
+7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
+cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
+QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
+czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
+aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
+aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
+DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
+BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
+D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
+JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
+AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
+vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
+tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
+7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
+I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
+h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
+d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
+pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Global Root CA"
+# Serial: 3262
+# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96
+# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65
+# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx
+EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT
+VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5
+NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT
+B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF
+10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz
+0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh
+MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH
+zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc
+46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2
+yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi
+laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP
+oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA
+BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE
+qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm
+4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL
+1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn
+LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF
+H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo
+RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+
+nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh
+15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW
+6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW
+nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j
+wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz
+aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy
+KwbQBM0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Label: "TeliaSonera Root CA v1"
+# Serial: 199041966741090107964904287217786801558
+# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c
+# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37
+# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89
+-----BEGIN CERTIFICATE-----
+MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw
+NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv
+b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD
+VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F
+VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1
+7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X
+Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+
+/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs
+81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm
+dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe
+Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu
+sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4
+pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs
+slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ
+arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD
+VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG
+9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl
+dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx
+0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj
+TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed
+Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7
+Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI
+OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7
+vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW
+t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn
+HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
+SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi
+# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi
+# Label: "E-Tugra Certification Authority"
+# Serial: 7667447206703254355
+# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49
+# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39
+# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c
+-----BEGIN CERTIFICATE-----
+MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV
+BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC
+aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV
+BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1
+Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz
+MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+
+BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp
+em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN
+ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY
+B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH
+D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF
+Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo
+q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D
+k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH
+fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut
+dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM
+ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8
+zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn
+rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX
+U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6
+Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5
+XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF
+Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR
+HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY
+GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c
+77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3
++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK
+vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6
+FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl
+yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P
+AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD
+y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d
+NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 2"
+# Serial: 1
+# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a
+# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9
+# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd
+AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC
+FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi
+1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq
+jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ
+wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/
+WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy
+NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC
+uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw
+IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6
+g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN
+9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP
+BSeOE6Fuwg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot 2011 O=Atos
+# Subject: CN=Atos TrustedRoot 2011 O=Atos
+# Label: "Atos TrustedRoot 2011"
+# Serial: 6643877497813316402
+# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56
+# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21
+# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE
+AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG
+EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM
+FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC
+REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp
+Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM
+VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+
+SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ
+4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L
+cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi
+eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG
+A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3
+DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j
+vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP
+DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc
+maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D
+lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv
+KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 1 G3"
+# Serial: 687049649626669250736271037606554624078720034195
+# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab
+# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67
+# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00
+MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV
+wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe
+rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341
+68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh
+4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp
+UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o
+abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc
+3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G
+KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt
+hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO
+Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt
+zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD
+ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC
+MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2
+cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN
+qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5
+YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv
+b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2
+8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k
+NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj
+ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp
+q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt
+nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2 G3"
+# Serial: 390156079458959257446133169266079962026824725800
+# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06
+# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36
+# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00
+MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf
+qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW
+n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym
+c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+
+O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1
+o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j
+IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq
+IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz
+8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh
+vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l
+7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG
+cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD
+ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66
+AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC
+roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga
+W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n
+lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE
++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV
+csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd
+dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg
+KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM
+HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4
+WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3 G3"
+# Serial: 268090761170461462463995952157327242137089239581
+# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7
+# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d
+# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00
+MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR
+/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu
+FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR
+U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c
+ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR
+FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k
+A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw
+eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl
+sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp
+VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q
+A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+
+ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD
+ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px
+KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI
+FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv
+oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg
+u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP
+0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf
+3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl
+8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+
+DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN
+PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/
+ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G2"
+# Serial: 15385348160840213938643033620894905419
+# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d
+# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f
+# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85
+-----BEGIN CERTIFICATE-----
+MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA
+n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc
+biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp
+EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA
+bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu
+YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB
+AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW
+BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI
+QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I
+0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni
+lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9
+B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv
+ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo
+IhNzbM8m9Yop5w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G3"
+# Serial: 15459312981008553731928384953135426796
+# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb
+# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89
+# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2
+-----BEGIN CERTIFICATE-----
+MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg
+RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf
+Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q
+RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD
+AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY
+JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv
+6pZjamVFkpUBtA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G2"
+# Serial: 4293743540046975378534879503202253541
+# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44
+# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4
+# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f
+-----BEGIN CERTIFICATE-----
+MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
+MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
+2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
+1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
+q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
+tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
+vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
+5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
+1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
+NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
+Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
+8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
+pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
+MrY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G3"
+# Serial: 7089244469030293291760083333884364146
+# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca
+# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e
+# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0
+-----BEGIN CERTIFICATE-----
+MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe
+Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw
+EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x
+IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG
+fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO
+Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd
+BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx
+AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/
+oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8
+sycX
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Trusted Root G4"
+# Serial: 7451500558977370777930084869016614236
+# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49
+# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4
+# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88
+-----BEGIN CERTIFICATE-----
+MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
+RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
+ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
+xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
+ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
+DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
+jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
+CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
+EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
+fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
+uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
+chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
+9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
+ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
+SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
+fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
+sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
+cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
+0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
+4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
+r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
+/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
+gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Label: "COMODO RSA Certification Authority"
+# Serial: 101909084537582093308941363524873193117
+# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18
+# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4
+# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34
+-----BEGIN CERTIFICATE-----
+MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB
+hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV
+BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT
+EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR
+Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR
+6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X
+pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC
+9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV
+/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf
+Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z
++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w
+qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah
+SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC
+u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf
+Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq
+crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E
+FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB
+/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl
+wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM
+4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV
+2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna
+FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ
+CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK
+boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke
+jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL
+S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb
+QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl
+0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB
+NVOFBkpdn627G190
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Label: "USERTrust RSA Certification Authority"
+# Serial: 2645093764781058787591871645665788717
+# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5
+# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e
+# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2
+-----BEGIN CERTIFICATE-----
+MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
+iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
+cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
+BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
+MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
+BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
+aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
+3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
+tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
+Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
+VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
+79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
+c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
+Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
+c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
+UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
+Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
+BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
+A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
+Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
+VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
+ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
+8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
+iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
+Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
+XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
+qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
+VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
+L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
+jjxDah2nGN59PRbxYvnKkKj9
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Label: "USERTrust ECC Certification Authority"
+# Serial: 123013823720199481456569720443997572134
+# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1
+# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0
+# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a
+-----BEGIN CERTIFICATE-----
+MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL
+MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl
+eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT
+JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
+Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg
+VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo
+I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng
+o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G
+A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB
+zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW
+RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Label: "GlobalSign ECC Root CA - R4"
+# Serial: 14367148294922964480859022125800977897474
+# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e
+# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb
+# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c
+-----BEGIN CERTIFICATE-----
+MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ
+FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F
+uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX
+kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs
+ewv4n4Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Label: "GlobalSign ECC Root CA - R5"
+# Serial: 32785792099990507226680698011560947931244
+# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08
+# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa
+# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24
+-----BEGIN CERTIFICATE-----
+MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc
+8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke
+hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI
+KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
+515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO
+xwy8p2Fp8fc74SrL+SvzZpA3
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA - G3"
+# Serial: 10003001
+# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37
+# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc
+# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX
+DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP
+cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW
+IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX
+xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy
+KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR
+9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az
+5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8
+6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7
+Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP
+bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt
+BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt
+XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF
+MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd
+INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD
+U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp
+LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8
+Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp
+gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh
+/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw
+0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A
+fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq
+4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR
+1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/
+QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM
+94B7IWcnMFk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Label: "Staat der Nederlanden EV Root CA"
+# Serial: 10000013
+# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba
+# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb
+# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a
+-----BEGIN CERTIFICATE-----
+MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y
+MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg
+TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS
+b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS
+M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC
+UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d
+Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p
+rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l
+pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb
+j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC
+KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS
+/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X
+cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH
+1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP
+px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7
+MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI
+eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u
+2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS
+v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC
+wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy
+CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e
+vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6
+Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa
+Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL
+eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8
+FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc
+7uzXLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Label: "IdenTrust Commercial Root CA 1"
+# Serial: 13298821034946342390520003877796839426
+# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7
+# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25
+# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu
+VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw
+MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw
+JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT
+3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU
++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp
+S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1
+bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi
+T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL
+vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK
+Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK
+dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT
+c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv
+l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N
+iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD
+ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH
+6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt
+LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93
+nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3
++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK
+W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT
+AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq
+l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG
+4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ
+mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A
+7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Label: "IdenTrust Public Sector Root CA 1"
+# Serial: 13298821034946342390521976156843933698
+# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba
+# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd
+# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu
+VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN
+MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0
+MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7
+ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy
+RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS
+bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF
+/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R
+3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw
+EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy
+9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V
+GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ
+2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV
+WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD
+W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN
+AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj
+t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV
+DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9
+TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G
+lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW
+mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df
+WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5
++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ
+tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA
+GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv
+8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G2"
+# Serial: 1246989352
+# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2
+# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4
+# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39
+-----BEGIN CERTIFICATE-----
+MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
+cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
+IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
+dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
+NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
+dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
+dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
+aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
+RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
+cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
+wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
+U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
+jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
+BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
+jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
+Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
+1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
+nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
+VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - EC1"
+# Serial: 51543124481930649114116133369
+# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc
+# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47
+# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5
+-----BEGIN CERTIFICATE-----
+MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG
+A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3
+d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu
+dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq
+RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy
+MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD
+VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0
+L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g
+Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD
+ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi
+A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt
+ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH
+Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC
+R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX
+hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G
+-----END CERTIFICATE-----
+
+# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Label: "CFCA EV ROOT"
+# Serial: 407555286
+# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30
+# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83
+# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd
+-----BEGIN CERTIFICATE-----
+MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD
+TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx
+MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j
+aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP
+T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03
+sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL
+TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5
+/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp
+7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz
+EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt
+hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP
+a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot
+aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg
+TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV
+PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv
+cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL
+tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd
+BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB
+ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT
+ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL
+jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS
+ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy
+P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19
+xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d
+Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
+5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe
+/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z
+AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
+5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
+# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
+# Label: "Certinomis - Root CA"
+# Serial: 1
+# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f
+# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8
+# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58
+-----BEGIN CERTIFICATE-----
+MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET
+MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb
+BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz
+MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx
+FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g
+Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2
+fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl
+LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV
+WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF
+TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb
+5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc
+CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri
+wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ
+wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG
+m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4
+F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng
+WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0
+2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF
+AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/
+0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw
+F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS
+g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj
+qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN
+h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/
+ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V
+btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj
+Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ
+8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW
+gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GB CA"
+# Serial: 157768595616588414422159278966750757568
+# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d
+# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed
+# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6
+-----BEGIN CERTIFICATE-----
+MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt
+MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg
+Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i
+YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x
+CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG
+b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh
+bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3
+HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx
+WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX
+1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk
+u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P
+99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r
+M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB
+BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh
+cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5
+gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO
+ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf
+aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic
+Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Label: "SZAFIR ROOT CA2"
+# Serial: 357043034767186914217277344587386743377558296292
+# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99
+# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de
+# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe
+-----BEGIN CERTIFICATE-----
+MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL
+BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6
+ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw
+NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L
+cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg
+Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN
+QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT
+3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw
+3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6
+3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5
+BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN
+XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF
+AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw
+8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG
+nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP
+oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy
+d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg
+LvWpCz/UXeHPhJ/iGcJfitYgHuNztw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA 2"
+# Serial: 44979900017204383099463764357512596969
+# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2
+# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92
+# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04
+-----BEGIN CERTIFICATE-----
+MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB
+gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu
+QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG
+A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz
+OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ
+VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3
+b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA
+DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn
+0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB
+OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE
+fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E
+Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m
+o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i
+sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW
+OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez
+Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS
+adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n
+3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC
+AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ
+F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf
+CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29
+XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm
+djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/
+WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb
+AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq
+P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko
+b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj
+XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P
+5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi
+DrW5viSP
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce
+# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6
+# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36
+-----BEGIN CERTIFICATE-----
+MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix
+DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k
+IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT
+N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v
+dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG
+A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh
+ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx
+QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA
+4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0
+AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10
+4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C
+ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV
+9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD
+gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6
+Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq
+NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko
+LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc
+Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd
+ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I
+XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI
+M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot
+9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V
+Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea
+j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh
+X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ
+l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf
+bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4
+pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK
+e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0
+vm9qp/UsQu0yrbYhnr68
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef
+# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66
+# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33
+-----BEGIN CERTIFICATE-----
+MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN
+BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl
+bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv
+b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ
+BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj
+YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5
+MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0
+dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg
+QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa
+jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC
+MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi
+C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep
+lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof
+TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certplus Root CA G1 O=Certplus
+# Subject: CN=Certplus Root CA G1 O=Certplus
+# Label: "Certplus Root CA G1"
+# Serial: 1491911565779898356709731176965615564637713
+# MD5 Fingerprint: 7f:09:9c:f7:d9:b9:5c:69:69:56:d5:37:3e:14:0d:42
+# SHA1 Fingerprint: 22:fd:d0:b7:fd:a2:4e:0d:ac:49:2c:a0:ac:a6:7b:6a:1f:e3:f7:66
+# SHA256 Fingerprint: 15:2a:40:2b:fc:df:2c:d5:48:05:4d:22:75:b3:9c:7f:ca:3e:c0:97:80:78:b0:f0:ea:76:e5:61:a6:c7:43:3e
+-----BEGIN CERTIFICATE-----
+MIIFazCCA1OgAwIBAgISESBVg+QtPlRWhS2DN7cs3EYRMA0GCSqGSIb3DQEBDQUA
+MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy
+dHBsdXMgUm9vdCBDQSBHMTAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBa
+MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy
+dHBsdXMgUm9vdCBDQSBHMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+ANpQh7bauKk+nWT6VjOaVj0W5QOVsjQcmm1iBdTYj+eJZJ+622SLZOZ5KmHNr49a
+iZFluVj8tANfkT8tEBXgfs+8/H9DZ6itXjYj2JizTfNDnjl8KvzsiNWI7nC9hRYt
+6kuJPKNxQv4c/dMcLRC4hlTqQ7jbxofaqK6AJc96Jh2qkbBIb6613p7Y1/oA/caP
+0FG7Yn2ksYyy/yARujVjBYZHYEMzkPZHogNPlk2dT8Hq6pyi/jQu3rfKG3akt62f
+6ajUeD94/vI4CTYd0hYCyOwqaK/1jpTvLRN6HkJKHRUxrgwEV/xhc/MxVoYxgKDE
+EW4wduOU8F8ExKyHcomYxZ3MVwia9Az8fXoFOvpHgDm2z4QTd28n6v+WZxcIbekN
+1iNQMLAVdBM+5S//Ds3EC0pd8NgAM0lm66EYfFkuPSi5YXHLtaW6uOrc4nBvCGrc
+h2c0798wct3zyT8j/zXhviEpIDCB5BmlIOklynMxdCm+4kLV87ImZsdo/Rmz5yCT
+mehd4F6H50boJZwKKSTUzViGUkAksnsPmBIgJPaQbEfIDbsYIC7Z/fyL8inqh3SV
+4EJQeIQEQWGw9CEjjy3LKCHyamz0GqbFFLQ3ZU+V/YDI+HLlJWvEYLF7bY5KinPO
+WftwenMGE9nTdDckQQoRb5fc5+R+ob0V8rqHDz1oihYHAgMBAAGjYzBhMA4GA1Ud
+DwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSowcCbkahDFXxd
+Bie0KlHYlwuBsTAfBgNVHSMEGDAWgBSowcCbkahDFXxdBie0KlHYlwuBsTANBgkq
+hkiG9w0BAQ0FAAOCAgEAnFZvAX7RvUz1isbwJh/k4DgYzDLDKTudQSk0YcbX8ACh
+66Ryj5QXvBMsdbRX7gp8CXrc1cqh0DQT+Hern+X+2B50ioUHj3/MeXrKls3N/U/7
+/SMNkPX0XtPGYX2eEeAC7gkE2Qfdpoq3DIMku4NQkv5gdRE+2J2winq14J2by5BS
+S7CTKtQ+FjPlnsZlFT5kOwQ/2wyPX1wdaR+v8+khjPPvl/aatxm2hHSco1S1cE5j
+2FddUyGbQJJD+tZ3VTNPZNX70Cxqjm0lpu+F6ALEUz65noe8zDUa3qHpimOHZR4R
+Kttjd5cUvpoUmRGywO6wT/gUITJDT5+rosuoD6o7BlXGEilXCNQ314cnrUlZp5Gr
+RHpejXDbl85IULFzk/bwg2D5zfHhMf1bfHEhYxQUqq/F3pN+aLHsIqKqkHWetUNy
+6mSjhEv9DKgma3GX7lZjZuhCVPnHHd/Qj1vfyDBviP4NxDMcU6ij/UgQ8uQKTuEV
+V/xuZDDCVRHc6qnNSlSsKWNEz0pAoNZoWRsz+e86i9sgktxChL8Bq4fA1SCC28a5
+g4VCXA9DO2pJNdWY9BW/+mGBDAkgGNLQFwzLSABQ6XaCjGTXOqAHVcweMcDvOrRl
+++O/QmueD6i9a5jc2NvLi6Td11n0bt3+qsOR0C5CB8AMTVPNJLFMWx5R9N/pkvo=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certplus Root CA G2 O=Certplus
+# Subject: CN=Certplus Root CA G2 O=Certplus
+# Label: "Certplus Root CA G2"
+# Serial: 1492087096131536844209563509228951875861589
+# MD5 Fingerprint: a7:ee:c4:78:2d:1b:ee:2d:b9:29:ce:d6:a7:96:32:31
+# SHA1 Fingerprint: 4f:65:8e:1f:e9:06:d8:28:02:e9:54:47:41:c9:54:25:5d:69:cc:1a
+# SHA256 Fingerprint: 6c:c0:50:41:e6:44:5e:74:69:6c:4c:fb:c9:f8:0f:54:3b:7e:ab:bb:44:b4:ce:6f:78:7c:6a:99:71:c4:2f:17
+-----BEGIN CERTIFICATE-----
+MIICHDCCAaKgAwIBAgISESDZkc6uo+jF5//pAq/Pc7xVMAoGCCqGSM49BAMDMD4x
+CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs
+dXMgUm9vdCBDQSBHMjAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBaMD4x
+CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs
+dXMgUm9vdCBDQSBHMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABM0PW1aC3/BFGtat
+93nwHcmsltaeTpwftEIRyoa/bfuFo8XlGVzX7qY/aWfYeOKmycTbLXku54uNAm8x
+Ik0G42ByRZ0OQneezs/lf4WbGOT8zC5y0xaTTsqZY1yhBSpsBqNjMGEwDgYDVR0P
+AQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNqDYwJ5jtpMxjwj
+FNiPwyCrKGBZMB8GA1UdIwQYMBaAFNqDYwJ5jtpMxjwjFNiPwyCrKGBZMAoGCCqG
+SM49BAMDA2gAMGUCMHD+sAvZ94OX7PNVHdTcswYO/jOYnYs5kGuUIe22113WTNch
+p+e/IQ8rzfcq3IUHnQIxAIYUFuXcsGXCwI4Un78kFmjlvPl5adytRSv3tjFzzAal
+U5ORGpOucGpnutee5WEaXw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=OpenTrust Root CA G1 O=OpenTrust
+# Subject: CN=OpenTrust Root CA G1 O=OpenTrust
+# Label: "OpenTrust Root CA G1"
+# Serial: 1492036577811947013770400127034825178844775
+# MD5 Fingerprint: 76:00:cc:81:29:cd:55:5e:88:6a:7a:2e:f7:4d:39:da
+# SHA1 Fingerprint: 79:91:e8:34:f7:e2:ee:dd:08:95:01:52:e9:55:2d:14:e9:58:d5:7e
+# SHA256 Fingerprint: 56:c7:71:28:d9:8c:18:d9:1b:4c:fd:ff:bc:25:ee:91:03:d4:75:8e:a2:ab:ad:82:6a:90:f3:45:7d:46:0e:b4
+-----BEGIN CERTIFICATE-----
+MIIFbzCCA1egAwIBAgISESCzkFU5fX82bWTCp59rY45nMA0GCSqGSIb3DQEBCwUA
+MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w
+ZW5UcnVzdCBSb290IENBIEcxMB4XDTE0MDUyNjA4NDU1MFoXDTM4MDExNTAwMDAw
+MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU
+T3BlblRydXN0IFJvb3QgQ0EgRzEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQD4eUbalsUwXopxAy1wpLuwxQjczeY1wICkES3d5oeuXT2R0odsN7faYp6b
+wiTXj/HbpqbfRm9RpnHLPhsxZ2L3EVs0J9V5ToybWL0iEA1cJwzdMOWo010hOHQX
+/uMftk87ay3bfWAfjH1MBcLrARYVmBSO0ZB3Ij/swjm4eTrwSSTilZHcYTSSjFR0
+77F9jAHiOH3BX2pfJLKOYheteSCtqx234LSWSE9mQxAGFiQD4eCcjsZGT44ameGP
+uY4zbGneWK2gDqdkVBFpRGZPTBKnjix9xNRbxQA0MMHZmf4yzgeEtE7NCv82TWLx
+p2NX5Ntqp66/K7nJ5rInieV+mhxNaMbBGN4zK1FGSxyO9z0M+Yo0FMT7MzUj8czx
+Kselu7Cizv5Ta01BG2Yospb6p64KTrk5M0ScdMGTHPjgniQlQ/GbI4Kq3ywgsNw2
+TgOzfALU5nsaqocTvz6hdLubDuHAk5/XpGbKuxs74zD0M1mKB3IDVedzagMxbm+W
+G+Oin6+Sx+31QrclTDsTBM8clq8cIqPQqwWyTBIjUtz9GVsnnB47ev1CI9sjgBPw
+vFEVVJSmdz7QdFG9URQIOTfLHzSpMJ1ShC5VkLG631UAC9hWLbFJSXKAqWLXwPYY
+EQRVzXR7z2FwefR7LFxckvzluFqrTJOVoSfupb7PcSNCupt2LQIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUl0YhVyE1
+2jZVx/PxN3DlCPaTKbYwHwYDVR0jBBgwFoAUl0YhVyE12jZVx/PxN3DlCPaTKbYw
+DQYJKoZIhvcNAQELBQADggIBAB3dAmB84DWn5ph76kTOZ0BP8pNuZtQ5iSas000E
+PLuHIT839HEl2ku6q5aCgZG27dmxpGWX4m9kWaSW7mDKHyP7Rbr/jyTwyqkxf3kf
+gLMtMrpkZ2CvuVnN35pJ06iCsfmYlIrM4LvgBBuZYLFGZdwIorJGnkSI6pN+VxbS
+FXJfLkur1J1juONI5f6ELlgKn0Md/rcYkoZDSw6cMoYsYPXpSOqV7XAp8dUv/TW0
+V8/bhUiZucJvbI/NeJWsZCj9VrDDb8O+WVLhX4SPgPL0DTatdrOjteFkdjpY3H1P
+XlZs5VVZV6Xf8YpmMIzUUmI4d7S+KNfKNsSbBfD4Fdvb8e80nR14SohWZ25g/4/I
+i+GOvUKpMwpZQhISKvqxnUOOBZuZ2mKtVzazHbYNeS2WuOvyDEsMpZTGMKcmGS3t
+TAZQMPH9WD25SxdfGbRqhFS0OE85og2WaMMolP3tLR9Ka0OWLpABEPs4poEL0L91
+09S5zvE/bw4cHjdx5RiHdRk/ULlepEU0rbDK5uUTdg8xFKmOLZTW1YVNcxVPS/Ky
+Pu1svf0OnWZzsD2097+o4BGkxK51CUpjAEggpsadCwmKtODmzj7HPiY46SvepghJ
+AwSQiumPv+i2tCqjI40cHLI5kqiPAlxAOXXUc0ECd97N4EOH1uS6SsNsEn/+KuYj
+1oxx
+-----END CERTIFICATE-----
+
+# Issuer: CN=OpenTrust Root CA G2 O=OpenTrust
+# Subject: CN=OpenTrust Root CA G2 O=OpenTrust
+# Label: "OpenTrust Root CA G2"
+# Serial: 1492012448042702096986875987676935573415441
+# MD5 Fingerprint: 57:24:b6:59:24:6b:ae:c8:fe:1c:0c:20:f2:c0:4e:eb
+# SHA1 Fingerprint: 79:5f:88:60:c5:ab:7c:3d:92:e6:cb:f4:8d:e1:45:cd:11:ef:60:0b
+# SHA256 Fingerprint: 27:99:58:29:fe:6a:75:15:c1:bf:e8:48:f9:c4:76:1d:b1:6c:22:59:29:25:7b:f4:0d:08:94:f2:9e:a8:ba:f2
+-----BEGIN CERTIFICATE-----
+MIIFbzCCA1egAwIBAgISESChaRu/vbm9UpaPI+hIvyYRMA0GCSqGSIb3DQEBDQUA
+MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w
+ZW5UcnVzdCBSb290IENBIEcyMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAw
+MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU
+T3BlblRydXN0IFJvb3QgQ0EgRzIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQDMtlelM5QQgTJT32F+D3Y5z1zCU3UdSXqWON2ic2rxb95eolq5cSG+Ntmh
+/LzubKh8NBpxGuga2F8ORAbtp+Dz0mEL4DKiltE48MLaARf85KxP6O6JHnSrT78e
+CbY2albz4e6WiWYkBuTNQjpK3eCasMSCRbP+yatcfD7J6xcvDH1urqWPyKwlCm/6
+1UWY0jUJ9gNDlP7ZvyCVeYCYitmJNbtRG6Q3ffyZO6v/v6wNj0OxmXsWEH4db0fE
+FY8ElggGQgT4hNYdvJGmQr5J1WqIP7wtUdGejeBSzFfdNTVY27SPJIjki9/ca1TS
+gSuyzpJLHB9G+h3Ykst2Z7UJmQnlrBcUVXDGPKBWCgOz3GIZ38i1MH/1PCZ1Eb3X
+G7OHngevZXHloM8apwkQHZOJZlvoPGIytbU6bumFAYueQ4xncyhZW+vj3CzMpSZy
+YhK05pyDRPZRpOLAeiRXyg6lPzq1O4vldu5w5pLeFlwoW5cZJ5L+epJUzpM5ChaH
+vGOz9bGTXOBut9Dq+WIyiET7vycotjCVXRIouZW+j1MY5aIYFuJWpLIsEPUdN6b4
+t/bQWVyJ98LVtZR00dX+G7bw5tYee9I8y6jj9RjzIR9u701oBnstXW5DiabA+aC/
+gh7PU3+06yzbXfZqfUAkBXKJOAGTy3HCOV0GEfZvePg3DTmEJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUajn6QiL3
+5okATV59M4PLuG53hq8wHwYDVR0jBBgwFoAUajn6QiL35okATV59M4PLuG53hq8w
+DQYJKoZIhvcNAQENBQADggIBAJjLq0A85TMCl38th6aP1F5Kr7ge57tx+4BkJamz
+Gj5oXScmp7oq4fBXgwpkTx4idBvpkF/wrM//T2h6OKQQbA2xx6R3gBi2oihEdqc0
+nXGEL8pZ0keImUEiyTCYYW49qKgFbdEfwFFEVn8nNQLdXpgKQuswv42hm1GqO+qT
+RmTFAHneIWv2V6CG1wZy7HBGS4tz3aAhdT7cHcCP009zHIXZ/n9iyJVvttN7jLpT
+wm+bREx50B1ws9efAvSyB7DH5fitIw6mVskpEndI2S9G/Tvw/HRwkqWOOAgfZDC2
+t0v7NqwQjqBSM2OdAzVWxWm9xiNaJ5T2pBL4LTM8oValX9YZ6e18CL13zSdkzJTa
+TkZQh+D5wVOAHrut+0dSixv9ovneDiK3PTNZbNTe9ZUGMg1RGUFcPk8G97krgCf2
+o6p6fAbhQ8MTOWIaNr3gKC6UAuQpLmBVrkA9sHSSXvAgZJY/X0VdiLWK2gKgW0VU
+3jg9CcCoSmVGFvyqv1ROTVu+OEO3KMqLM6oaJbolXCkvW0pujOotnCr2BXbgd5eA
+iN1nE28daCSLT7d0geX0YJ96Vdc+N9oWaz53rK4YcJUIeSkDiv7BO7M/Gg+kO14f
+WKGVyasvc0rQLW6aWQ9VGHgtPFGml4vmu7JwqkwR3v98KzfUetF3NI/n+UL3PIEM
+S1IK
+-----END CERTIFICATE-----
+
+# Issuer: CN=OpenTrust Root CA G3 O=OpenTrust
+# Subject: CN=OpenTrust Root CA G3 O=OpenTrust
+# Label: "OpenTrust Root CA G3"
+# Serial: 1492104908271485653071219941864171170455615
+# MD5 Fingerprint: 21:37:b4:17:16:92:7b:67:46:70:a9:96:d7:a8:13:24
+# SHA1 Fingerprint: 6e:26:64:f3:56:bf:34:55:bf:d1:93:3f:7c:01:de:d8:13:da:8a:a6
+# SHA256 Fingerprint: b7:c3:62:31:70:6e:81:07:8c:36:7c:b8:96:19:8f:1e:32:08:dd:92:69:49:dd:8f:57:09:a4:10:f7:5b:62:92
+-----BEGIN CERTIFICATE-----
+MIICITCCAaagAwIBAgISESDm+Ez8JLC+BUCs2oMbNGA/MAoGCCqGSM49BAMDMEAx
+CzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9wZW5U
+cnVzdCBSb290IENBIEczMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAwMFow
+QDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwUT3Bl
+blRydXN0IFJvb3QgQ0EgRzMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARK7liuTcpm
+3gY6oxH84Bjwbhy6LTAMidnW7ptzg6kjFYwvWYpa3RTqnVkrQ7cG7DK2uu5Bta1d
+oYXM6h0UZqNnfkbilPPntlahFVmhTzeXuSIevRHr9LIfXsMUmuXZl5mjYzBhMA4G
+A1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRHd8MUi2I5
+DMlv4VBN0BBY3JWIbTAfBgNVHSMEGDAWgBRHd8MUi2I5DMlv4VBN0BBY3JWIbTAK
+BggqhkjOPQQDAwNpADBmAjEAj6jcnboMBBf6Fek9LykBl7+BFjNAk2z8+e2AcG+q
+j9uEwov1NcoG3GRvaBbhj5G5AjEA2Euly8LQCGzpGPta3U1fJAuwACEl74+nBCZx
+4nxp5V2a+EEfOzmTk51V6s2N8fvB
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X1 O=Internet Security Research Group
+# Subject: CN=ISRG Root X1 O=Internet Security Research Group
+# Label: "ISRG Root X1"
+# Serial: 172886928669790476064670243504169061120
+# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e
+# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8
+# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6
+-----BEGIN CERTIFICATE-----
+MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
+TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
+cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
+WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
+ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
+h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
+0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
+A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
+T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
+B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
+B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
+KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
+OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
+jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
+qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
+rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
+hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
+ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
+3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
+NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
+ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
+TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
+jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
+oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
+4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
+mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
+emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
+-----END CERTIFICATE-----
+
+# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Label: "AC RAIZ FNMT-RCM"
+# Serial: 485876308206448804701554682760554759
+# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d
+# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20
+# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx
+CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ
+WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ
+BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG
+Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/
+yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf
+BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz
+WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF
+tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z
+374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC
+IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL
+mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7
+wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS
+MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2
+ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet
+UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H
+YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3
+LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD
+nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1
+RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM
+LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf
+77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N
+JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm
+fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp
+6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp
+1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B
+9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok
+RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv
+uu8wd+RU4riEmViAqhOLUTpPSPaLtrM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 1 O=Amazon
+# Subject: CN=Amazon Root CA 1 O=Amazon
+# Label: "Amazon Root CA 1"
+# Serial: 143266978916655856878034712317230054538369994
+# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6
+# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16
+# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e
+-----BEGIN CERTIFICATE-----
+MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj
+ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM
+9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw
+IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6
+VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L
+93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm
+jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA
+A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI
+U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs
+N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv
+o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU
+5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy
+rqXRfboQnoZsG4q5WTP468SQvvG5
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 2 O=Amazon
+# Subject: CN=Amazon Root CA 2 O=Amazon
+# Label: "Amazon Root CA 2"
+# Serial: 143266982885963551818349160658925006970653239
+# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66
+# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a
+# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK
+gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ
+W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg
+1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K
+8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r
+2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me
+z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR
+8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj
+mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz
+7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6
++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI
+0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm
+UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2
+LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY
++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS
+k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl
+7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm
+btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl
+urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+
+fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63
+n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE
+76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H
+9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT
+4PsJYGw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 3 O=Amazon
+# Subject: CN=Amazon Root CA 3 O=Amazon
+# Label: "Amazon Root CA 3"
+# Serial: 143266986699090766294700635381230934788665930
+# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87
+# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e
+# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4
+-----BEGIN CERTIFICATE-----
+MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl
+ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr
+ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr
+BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM
+YyRIHN8wfdVoOw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 4 O=Amazon
+# Subject: CN=Amazon Root CA 4 O=Amazon
+# Label: "Amazon Root CA 4"
+# Serial: 143266989758080763974105200630763877849284878
+# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd
+# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be
+# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92
+-----BEGIN CERTIFICATE-----
+MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi
+9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk
+M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB
+MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw
+CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW
+1KyLa2tJElMzrdfkviT8tQp21KW8EA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
+# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
+# Label: "LuxTrust Global Root 2"
+# Serial: 59914338225734147123941058376788110305822489521
+# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c
+# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f
+# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5
+-----BEGIN CERTIFICATE-----
+MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL
+BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV
+BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw
+MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B
+LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F
+ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem
+hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1
+EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn
+Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4
+zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ
+96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m
+j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g
+DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+
+8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j
+X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH
+hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB
+KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0
+Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT
++Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL
+BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9
+BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO
+jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9
+loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c
+qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+
+2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/
+JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre
+zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf
+LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+
+x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6
+oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr
+-----END CERTIFICATE-----
+
+# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1"
+# Serial: 1
+# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49
+# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca
+# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16
+-----BEGIN CERTIFICATE-----
+MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx
+GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp
+bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w
+KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0
+BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy
+dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG
+EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll
+IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU
+QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT
+TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg
+LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7
+a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr
+LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr
+N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X
+YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/
+iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f
+AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH
+V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh
+AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf
+IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4
+lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c
+8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf
+lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Label: "GDCA TrustAUTH R5 ROOT"
+# Serial: 9009899650740120186
+# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4
+# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4
+# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93
+-----BEGIN CERTIFICATE-----
+MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE
+BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ
+IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0
+MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV
+BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w
+HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj
+Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj
+TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u
+KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj
+qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm
+MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12
+ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP
+zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk
+L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC
+jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA
+HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC
+AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg
+p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm
+DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5
+COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry
+L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf
+JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg
+IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io
+2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV
+09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ
+XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq
+T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe
+MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-1"
+# Serial: 15752444095811006489
+# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45
+# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a
+# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y
+IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB
+pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h
+IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG
+A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU
+cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid
+RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V
+seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme
+9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV
+EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW
+hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/
+DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD
+ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I
+/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf
+ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ
+yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts
+L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN
+zl/HHk484IkzlQsPpTLWPFp5LBk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-2"
+# Serial: 2711694510199101698
+# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64
+# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0
+# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65
+-----BEGIN CERTIFICATE-----
+MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig
+Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk
+MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg
+Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD
+VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy
+dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+
+QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq
+1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp
+2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK
+DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape
+az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF
+3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88
+oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM
+g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3
+mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh
+8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd
+BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U
+nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw
+DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX
+dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+
+MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL
+/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX
+CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa
+ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW
+2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7
+N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3
+Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB
+As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp
+5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu
+1uwJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor ECA-1"
+# Serial: 9548242946988625984
+# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c
+# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd
+# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y
+IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig
+RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb
+3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA
+BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5
+3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou
+owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/
+wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF
+ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf
+BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/
+MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv
+civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2
+AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F
+hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50
+soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI
+WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi
+tJ/X5g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Label: "SSL.com Root Certification Authority RSA"
+# Serial: 8875640296558310041
+# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29
+# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb
+# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69
+-----BEGIN CERTIFICATE-----
+MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE
+BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK
+DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz
+OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv
+bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R
+xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX
+qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC
+C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3
+6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh
+/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF
+YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E
+JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc
+US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8
+ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm
++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi
+M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G
+A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV
+cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc
+Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs
+PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/
+q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0
+cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr
+a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I
+H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y
+K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu
+nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf
+oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY
+Ic2wBlX7Jz9TkHCpBB5XJ7k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com Root Certification Authority ECC"
+# Serial: 8495723813297216424
+# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e
+# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a
+# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65
+-----BEGIN CERTIFICATE-----
+MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz
+WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0
+b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS
+b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI
+7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg
+CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud
+EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD
+VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T
+kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+
+gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority RSA R2"
+# Serial: 6248227494352943350
+# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95
+# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a
+# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c
+-----BEGIN CERTIFICATE-----
+MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV
+BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE
+CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy
+MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G
+A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD
+DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq
+M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf
+OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa
+4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9
+HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR
+aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA
+b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ
+Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV
+PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO
+pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu
+UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY
+MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV
+HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4
+9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW
+s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5
+Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg
+cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM
+79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz
+/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt
+ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm
+Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK
+QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ
+w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi
+S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07
+mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority ECC"
+# Serial: 3182246526754555285
+# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90
+# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d
+# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8
+-----BEGIN CERTIFICATE-----
+MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx
+NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv
+bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49
+AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA
+VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku
+WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX
+5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ
+ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg
+h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg==
+-----END CERTIFICATE-----
diff --git a/third_party/python/certifi/certifi/core.py b/third_party/python/certifi/certifi/core.py
new file mode 100644
index 0000000000..eab9d1d178
--- /dev/null
+++ b/third_party/python/certifi/certifi/core.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
+certifi.py
+~~~~~~~~~~
+
+This module returns the installation location of cacert.pem.
+"""
+import os
+import warnings
+
+
+class DeprecatedBundleWarning(DeprecationWarning):
+ """
+ The weak security bundle is being deprecated. Please bother your service
+ provider to get them to stop using cross-signed roots.
+ """
+
+
+def where():
+ f = os.path.dirname(__file__)
+
+ return os.path.join(f, 'cacert.pem')
+
+
+def old_where():
+ warnings.warn(
+ "The weak security bundle has been removed. certifi.old_where() is now an alias "
+ "of certifi.where(). Please update your code to use certifi.where() instead. "
+ "certifi.old_where() will be removed in 2018.",
+ DeprecatedBundleWarning
+ )
+ return where()
+
+if __name__ == '__main__':
+ print(where())
diff --git a/third_party/python/certifi/setup.cfg b/third_party/python/certifi/setup.cfg
new file mode 100644
index 0000000000..163eba3165
--- /dev/null
+++ b/third_party/python/certifi/setup.cfg
@@ -0,0 +1,11 @@
+[bdist_wheel]
+universal = 1
+
+[metadata]
+license_file = LICENSE
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/third_party/python/certifi/setup.py b/third_party/python/certifi/setup.py
new file mode 100755
index 0000000000..2c20c269f6
--- /dev/null
+++ b/third_party/python/certifi/setup.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import with_statement
+import re
+import os
+import sys
+
+# While I generally consider it an antipattern to try and support both
+# setuptools and distutils with a single setup.py, in this specific instance
+# where certifi is a dependency of setuptools, it can create a circular
+# dependency when projects attempt to unbundle stuff from setuptools and pip.
+# Though we don't really support that, it makes things easier if we do this and
+# should hopefully cause less issues for end users.
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+
+version_regex = r'__version__ = ["\']([^"\']*)["\']'
+with open('certifi/__init__.py', 'r') as f:
+ text = f.read()
+ match = re.search(version_regex, text)
+
+ if match:
+ VERSION = match.group(1)
+ else:
+ raise RuntimeError("No version number found!")
+
+if sys.argv[-1] == 'publish':
+ os.system('python setup.py sdist bdist_wheel upload')
+ sys.exit()
+
+required = []
+setup(
+ name='certifi',
+ version=VERSION,
+ description='Python package for providing Mozilla\'s CA Bundle.',
+ long_description=open('README.rst').read(),
+ author='Kenneth Reitz',
+ author_email='me@kennethreitz.com',
+ url='http://certifi.io/',
+ packages=[
+ 'certifi',
+ ],
+ package_dir={'certifi': 'certifi'},
+ package_data={'certifi': ['*.pem']},
+ # data_files=[('certifi', ['certifi/cacert.pem'])],
+ include_package_data=True,
+ zip_safe=False,
+ license='MPL-2.0',
+ classifiers=(
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
+ 'Natural Language :: English',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ ),
+)
diff --git a/third_party/python/compare-locales/PKG-INFO b/third_party/python/compare-locales/PKG-INFO
new file mode 100644
index 0000000000..5daa15c4a5
--- /dev/null
+++ b/third_party/python/compare-locales/PKG-INFO
@@ -0,0 +1,82 @@
+Metadata-Version: 2.1
+Name: compare-locales
+Version: 8.1.0
+Summary: Lint Mozilla localizations
+Home-page: UNKNOWN
+Author: Axel Hecht
+Author-email: axel@mozilla.com
+License: MPL 2.0
+Description: [![Build Status](https://travis-ci.org/Pike/compare-locales.svg?branch=master)](https://travis-ci.org/Pike/compare-locales)
+ # compare-locales
+ Lint Mozilla localizations
+
+ Finds
+ * missing strings
+ * obsolete strings
+ * errors on runtime errors without false positives
+ * warns on possible runtime errors
+
+ It also includes `l10n-merge` functionality, which pads localizations with
+ missing English strings, and replaces entities with errors with English.
+
+ If you want to check your original code for errors like duplicated messages,
+ use `moz-l10n-lint`, which is also part of this package. You can also use
+ this to check for conflicts between your strings and those already exposed
+ to l10n.
+
+ # Configuration
+
+ You configure `compare-locales` (and `moz-l10n-lint`) through a
+ [project configuration](https://moz-l10n-config.readthedocs.io/en/latest/fileformat.html)
+ file, `l10n.toml`.
+
+ # Examples
+
+ To check all locales in a project use
+
+ ```bash
+ compare-locales l10n.toml .
+ ```
+
+ To check Firefox against a local check-out of l10n-central, use
+
+ ```bash
+ compare-locales browser/locales/l10n.toml ../l10n-central
+ ```
+
+ If you just want to check particular locales, specify them as additional
+ commandline parameters.
+
+ To lint your local work, use
+
+ ```bash
+ moz-l10n-lint l10n.toml
+ ```
+
+ To check for conflicts against already existing strings:
+
+ ```bash
+ moz-l10n-lint --reference-project ../android-l10n/mozilla-mobile/fenix l10n.toml
+ moz-l10n-lint --l10n-reference ../gecko-strings browser/locales/l10n.toml
+ ```
+
+ to check for a monolithic project like Fenix or a gecko project like Firefox,
+ resp.
+
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Software Development :: Localization
+Classifier: Topic :: Software Development :: Testing
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4
+Description-Content-Type: text/markdown
diff --git a/third_party/python/compare-locales/README.md b/third_party/python/compare-locales/README.md
new file mode 100644
index 0000000000..a050c7d3c8
--- /dev/null
+++ b/third_party/python/compare-locales/README.md
@@ -0,0 +1,56 @@
+[![Build Status](https://travis-ci.org/Pike/compare-locales.svg?branch=master)](https://travis-ci.org/Pike/compare-locales)
+# compare-locales
+Lint Mozilla localizations
+
+Finds
+* missing strings
+* obsolete strings
+* errors on runtime errors without false positives
+* warns on possible runtime errors
+
+It also includes `l10n-merge` functionality, which pads localizations with
+missing English strings, and replaces entities with errors with English.
+
+If you want to check your original code for errors like duplicated messages,
+use `moz-l10n-lint`, which is also part of this package. You can also use
+this to check for conflicts between your strings and those already exposed
+to l10n.
+
+# Configuration
+
+You configure `compare-locales` (and `moz-l10n-lint`) through a
+[project configuration](https://moz-l10n-config.readthedocs.io/en/latest/fileformat.html)
+file, `l10n.toml`.
+
+# Examples
+
+To check all locales in a project use
+
+```bash
+compare-locales l10n.toml .
+```
+
+To check Firefox against a local check-out of l10n-central, use
+
+```bash
+compare-locales browser/locales/l10n.toml ../l10n-central
+```
+
+If you just want to check particular locales, specify them as additional
+commandline parameters.
+
+To lint your local work, use
+
+```bash
+moz-l10n-lint l10n.toml
+```
+
+To check for conflicts against already existing strings:
+
+```bash
+moz-l10n-lint --reference-project ../android-l10n/mozilla-mobile/fenix l10n.toml
+moz-l10n-lint --l10n-reference ../gecko-strings browser/locales/l10n.toml
+```
+
+to check for a monolithic project like Fenix or a gecko project like Firefox,
+resp.
diff --git a/third_party/python/compare-locales/compare_locales/__init__.py b/third_party/python/compare-locales/compare_locales/__init__.py
new file mode 100644
index 0000000000..3f323bbf7a
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/__init__.py
@@ -0,0 +1 @@
+version = "8.1.0"
diff --git a/third_party/python/compare-locales/compare_locales/checks/__init__.py b/third_party/python/compare-locales/compare_locales/checks/__init__.py
new file mode 100644
index 0000000000..0c81a4b715
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/checks/__init__.py
@@ -0,0 +1,30 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+from .base import Checker, EntityPos
+from .android import AndroidChecker
+from .dtd import DTDChecker
+from .fluent import FluentChecker
+from .properties import PropertiesChecker
+
+
+__all__ = [
+ 'Checker', 'EntityPos',
+ 'AndroidChecker', 'DTDChecker', 'FluentChecker', 'PropertiesChecker',
+]
+
+
+def getChecker(file, extra_tests=None):
+ if PropertiesChecker.use(file):
+ return PropertiesChecker(extra_tests, locale=file.locale)
+ if DTDChecker.use(file):
+ return DTDChecker(extra_tests, locale=file.locale)
+ if FluentChecker.use(file):
+ return FluentChecker(extra_tests, locale=file.locale)
+ if AndroidChecker.use(file):
+ return AndroidChecker(extra_tests, locale=file.locale)
+ return Checker(extra_tests, locale=file.locale)
diff --git a/third_party/python/compare-locales/compare_locales/checks/android.py b/third_party/python/compare-locales/compare_locales/checks/android.py
new file mode 100644
index 0000000000..9791c49a4f
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/checks/android.py
@@ -0,0 +1,253 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+import re
+from xml.dom import minidom
+
+from .base import Checker
+from ..parser.android import textContent
+
+
+class AndroidChecker(Checker):
+ pattern = re.compile('(.*)?strings.*\\.xml$')
+
+ def check(self, refEnt, l10nEnt):
+ '''Given the reference and localized Entities, performs checks.
+
+ This is a generator yielding tuples of
+ - "warning" or "error", depending on what should be reported,
+ - tuple of line, column info for the error within the string
+ - description string to be shown in the report
+ '''
+ for encoding_trouble in super(
+ AndroidChecker, self
+ ).check(refEnt, l10nEnt):
+ yield encoding_trouble
+ refNode = refEnt.node
+ l10nNode = l10nEnt.node
+ # Apples and oranges, error out.
+ if refNode.nodeName != l10nNode.nodeName:
+ yield ("error", 0, "Incompatible resource types", "android")
+ return
+ # Once we start parsing more resource types, make sure to add checks
+ # for them.
+ if refNode.nodeName != "string":
+ yield ("warning", 0, "Unsupported resource type", "android")
+ return
+ for report_tuple in self.check_string([refNode], l10nEnt):
+ yield report_tuple
+
+ def check_string(self, refs, l10nEnt):
+ '''Check a single string literal against a list of references.
+
+ There should be multiple nodes given for <plurals> or <string-array>.
+ '''
+ l10n = l10nEnt.node
+ if self.not_translatable(l10n, *refs):
+ yield (
+ "error",
+ 0,
+ "strings must be translatable",
+ "android"
+ )
+ return
+ if self.no_at_string(l10n):
+ yield (
+ "error",
+ 0,
+ "strings must be translatable",
+ "android"
+ )
+ return
+ if self.no_at_string(*refs):
+ yield (
+ "warning",
+ 0,
+ "strings must be translatable",
+ "android"
+ )
+ if self.non_simple_data(l10n):
+ yield (
+ "error",
+ 0,
+ "Only plain text allowed, "
+ "or one CDATA surrounded by whitespace",
+ "android"
+ )
+ return
+ for report_tuple in check_apostrophes(l10nEnt.val):
+ yield report_tuple
+
+ params, errors = get_params(refs)
+ for error, pos in errors:
+ yield (
+ "warning",
+ pos,
+ error,
+ "android"
+ )
+ if params:
+ for report_tuple in check_params(params, l10nEnt.val):
+ yield report_tuple
+
+ def not_translatable(self, *nodes):
+ return any(
+ node.hasAttribute("translatable")
+ and node.getAttribute("translatable") == "false"
+ for node in nodes
+ )
+
+ def no_at_string(self, *ref_nodes):
+ '''Android allows to reference other strings by using
+ @string/identifier
+ instead of the actual value. Those references don't belong into
+ a localizable file, warn on that.
+ '''
+ return any(
+ textContent(node).startswith('@string/')
+ for node in ref_nodes
+ )
+
+ def non_simple_data(self, node):
+ '''Only allow single text nodes, or, a single CDATA node
+ surrounded by whitespace.
+ '''
+ cdata = [
+ child
+ for child in node.childNodes
+ if child.nodeType == minidom.Node.CDATA_SECTION_NODE
+ ]
+ if len(cdata) == 0:
+ if node.childNodes.length == 0:
+ # empty translation is OK
+ return False
+ if node.childNodes.length != 1:
+ return True
+ return node.childNodes[0].nodeType != minidom.Node.TEXT_NODE
+ if len(cdata) > 1:
+ return True
+ for child in node.childNodes:
+ if child == cdata[0]:
+ continue
+ if child.nodeType != minidom.Node.TEXT_NODE:
+ return True
+ if child.data.strip() != "":
+ return True
+ return False
+
+
+silencer = re.compile(r'\\.|""')
+
+
+def check_apostrophes(string):
+ '''Check Android logic for quotes and apostrophes.
+
+ If you have an apostrophe (') in your string, you must either escape it
+ with a backslash (\') or enclose the string in double-quotes (").
+
+ Unescaped quotes are not visually shown on Android, but they're
+ also harmless, so we're not checking for quotes. We might do once we're
+ better at checking for inline XML, which is full of quotes.
+ Pairing quotes as in '""' is bad, though, so report errors for that.
+ Mostly, because it's hard to tell if a string is consider quoted or not
+ by Android in the end.
+
+ https://developer.android.com/guide/topics/resources/string-resource#escaping_quotes
+ '''
+ for m in re.finditer('""', string):
+ yield (
+ "error",
+ m.start(),
+ "Double straight quotes not allowed",
+ "android"
+ )
+ string = silencer.sub(" ", string)
+
+ is_quoted = string.startswith('"') and string.endswith('"')
+ if not is_quoted:
+ # apostrophes need to be escaped
+ for m in re.finditer("'", string):
+ yield (
+ "error",
+ m.start(),
+ "Apostrophe must be escaped",
+ "android"
+ )
+
+
+def get_params(refs):
+ '''Get printf parameters and internal errors.
+
+ Returns a sparse map of positions to formatter, and a list
+ of errors. Errors covered so far are mismatching formatters.
+ '''
+ params = {}
+ errors = []
+ next_implicit = 1
+ for ref in refs:
+ if isinstance(ref, minidom.Node):
+ ref = textContent(ref)
+ for m in re.finditer(r'%(?P<order>[1-9]\$)?(?P<format>[sSd])', ref):
+ order = m.group('order')
+ if order:
+ order = int(order[0])
+ else:
+ order = next_implicit
+ next_implicit += 1
+ fmt = m.group('format')
+ if order not in params:
+ params[order] = fmt
+ else:
+ # check for consistency errors
+ if params[order] == fmt:
+ continue
+ msg = "Conflicting formatting, %{order}${f1} vs %{order}${f2}"
+ errors.append((
+ msg.format(order=order, f1=fmt, f2=params[order]),
+ m.start()
+ ))
+ return params, errors
+
+
+def check_params(params, string):
+ '''Compare the printf parameters in the given string to the reference
+ parameters.
+
+ Also yields errors that are internal to the parameters inside string,
+ as found by `get_params`.
+ '''
+ lparams, errors = get_params([string])
+ for error, pos in errors:
+ yield (
+ "error",
+ pos,
+ error,
+ "android"
+ )
+ # Compare reference for each localized parameter.
+ # If there's no reference found, error, as an out-of-bounds
+ # parameter crashes.
+ # This assumes that all parameters are actually used in the reference,
+ # which should be OK.
+ # If there's a mismatch in the formatter, error.
+ for order in sorted(lparams):
+ if order not in params:
+ yield (
+ "error",
+ 0,
+ "Formatter %{}${} not found in reference".format(
+ order, lparams[order]
+ ),
+ "android"
+ )
+ elif params[order] != lparams[order]:
+ yield (
+ "error",
+ 0,
+ "Mismatching formatter",
+ "android"
+ )
diff --git a/third_party/python/compare-locales/compare_locales/checks/base.py b/third_party/python/compare-locales/compare_locales/checks/base.py
new file mode 100644
index 0000000000..3b04caa7a9
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/checks/base.py
@@ -0,0 +1,127 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+import re
+import six
+
+
+class EntityPos(int):
+ pass
+
+
+mochibake = re.compile('\ufffd')
+
+
+class Checker(object):
+ '''Abstract class to implement checks per file type.
+ '''
+ pattern = None
+ # if a check uses all reference entities, set this to True
+ needs_reference = False
+
+ @classmethod
+ def use(cls, file):
+ return cls.pattern.match(file.file)
+
+ def __init__(self, extra_tests, locale=None):
+ self.extra_tests = extra_tests
+ self.locale = locale
+ self.reference = None
+
+ def check(self, refEnt, l10nEnt):
+ '''Given the reference and localized Entities, performs checks.
+
+ This is a generator yielding tuples of
+ - "warning" or "error", depending on what should be reported,
+ - tuple of line, column info for the error within the string
+ - description string to be shown in the report
+
+ By default, check for possible encoding errors.
+ '''
+ for m in mochibake.finditer(l10nEnt.all):
+ yield (
+ "warning",
+ EntityPos(m.start()),
+ "\ufffd in: {}".format(l10nEnt.key),
+ "encodings"
+ )
+
+ def set_reference(self, reference):
+ '''Set the reference entities.
+ Only do this if self.needs_reference is True.
+ '''
+ self.reference = reference
+
+
+class CSSCheckMixin(object):
+ def maybe_style(self, ref_value, l10n_value):
+ ref_map, _ = self.parse_css_spec(ref_value)
+ if not ref_map:
+ return
+ l10n_map, errors = self.parse_css_spec(l10n_value)
+ for t in self.check_style(ref_map, l10n_map, errors):
+ yield t
+
+ def check_style(self, ref_map, l10n_map, errors):
+ if not l10n_map:
+ yield ('error', 0, 'reference is a CSS spec', 'css')
+ return
+ if errors:
+ yield ('error', 0, 'reference is a CSS spec', 'css')
+ return
+ msgs = []
+ for prop, unit in l10n_map.items():
+ if prop not in ref_map:
+ msgs.insert(0, '%s only in l10n' % prop)
+ continue
+ else:
+ ref_unit = ref_map.pop(prop)
+ if unit != ref_unit:
+ msgs.append("units for %s don't match "
+ "(%s != %s)" % (prop, unit, ref_unit))
+ for prop in six.iterkeys(ref_map):
+ msgs.insert(0, '%s only in reference' % prop)
+ if msgs:
+ yield ('warning', 0, ', '.join(msgs), 'css')
+
+ def parse_css_spec(self, val):
+ if not hasattr(self, '_css_spec'):
+ self._css_spec = re.compile(
+ r'(?:'
+ r'(?P<prop>(?:min\-|max\-)?(?:width|height))'
+ r'[ \t\r\n]*:[ \t\r\n]*'
+ r'(?P<length>[0-9]+|[0-9]*\.[0-9]+)'
+ r'(?P<unit>ch|em|ex|rem|px|cm|mm|in|pc|pt)'
+ r')'
+ r'|\Z'
+ )
+ self._css_sep = re.compile(r'[ \t\r\n]*(?P<semi>;)?[ \t\r\n]*$')
+ refMap = errors = None
+ end = 0
+ for m in self._css_spec.finditer(val):
+ if end == 0 and m.start() == m.end():
+ # no CSS spec found, just immediately end of string
+ return None, None
+ if m.start() > end:
+ split = self._css_sep.match(val, end, m.start())
+ if split is None:
+ errors = errors or []
+ errors.append({
+ 'pos': end,
+ 'code': 'css-bad-content',
+ })
+ elif end > 0 and split.group('semi') is None:
+ errors = errors or []
+ errors.append({
+ 'pos': end,
+ 'code': 'css-missing-semicolon',
+ })
+ if m.group('prop'):
+ refMap = refMap or {}
+ refMap[m.group('prop')] = m.group('unit')
+ end = m.end()
+ return refMap, errors
diff --git a/third_party/python/compare-locales/compare_locales/checks/dtd.py b/third_party/python/compare-locales/compare_locales/checks/dtd.py
new file mode 100644
index 0000000000..37d3c7846d
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/checks/dtd.py
@@ -0,0 +1,246 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import re
+from xml import sax
+import six
+
+from compare_locales.parser import DTDParser
+from .base import Checker, CSSCheckMixin
+
+
+class DTDChecker(Checker, CSSCheckMixin):
+ """Tests to run on DTD files.
+
+ Uses xml.sax for the heavy lifting of xml parsing.
+
+ The code tries to parse until it doesn't find any unresolved entities
+ anymore. If it finds one, it tries to grab the key, and adds an empty
+ <!ENTITY key ""> definition to the header.
+
+ Also checks for some CSS and number heuristics in the values.
+ """
+ pattern = re.compile(r'.*\.dtd$')
+ needs_reference = True # to cast a wider net for known entity references
+
+ eref = re.compile('&(%s);' % DTDParser.Name)
+ tmpl = b'''<!DOCTYPE elem [%s]>
+<elem>%s</elem>
+'''
+ xmllist = set(('amp', 'lt', 'gt', 'apos', 'quot'))
+
+ def __init__(self, extra_tests, locale=None):
+ super(DTDChecker, self).__init__(extra_tests, locale=locale)
+ self.processContent = False
+ if self.extra_tests is not None and 'android-dtd' in self.extra_tests:
+ self.processContent = True
+ self.__known_entities = None
+
+ def known_entities(self, refValue):
+ if self.__known_entities is None and self.reference is not None:
+ self.__known_entities = set()
+ for ent in self.reference.values():
+ self.__known_entities.update(
+ self.entities_for_value(ent.raw_val))
+ return self.__known_entities if self.__known_entities is not None \
+ else self.entities_for_value(refValue)
+
+ def entities_for_value(self, value):
+ reflist = set(m.group(1)
+ for m in self.eref.finditer(value))
+ reflist -= self.xmllist
+ return reflist
+
+ # Setup for XML parser, with default and text-only content handler
+ class TextContent(sax.handler.ContentHandler):
+ textcontent = ''
+
+ def characters(self, content):
+ self.textcontent += content
+
+ defaulthandler = sax.handler.ContentHandler()
+ texthandler = TextContent()
+
+ numPattern = r'([0-9]+|[0-9]*\.[0-9]+)'
+ num = re.compile('^%s$' % numPattern)
+ lengthPattern = '%s(em|px|ch|cm|in)' % numPattern
+ length = re.compile('^%s$' % lengthPattern)
+
+ def check(self, refEnt, l10nEnt):
+ """Try to parse the refvalue inside a dummy element, and keep
+ track of entities that we need to define to make that work.
+
+ Return a checker that offers just those entities.
+ """
+ for encoding_trouble in super(
+ DTDChecker, self
+ ).check(refEnt, l10nEnt):
+ yield encoding_trouble
+ refValue, l10nValue = refEnt.raw_val, l10nEnt.raw_val
+ # find entities the refValue references,
+ # reusing markup from DTDParser.
+ reflist = self.known_entities(refValue)
+ inContext = self.entities_for_value(refValue)
+ entities = ''.join('<!ENTITY %s "">' % s for s in sorted(reflist))
+ parser = sax.make_parser()
+ parser.setFeature(sax.handler.feature_external_ges, False)
+
+ parser.setContentHandler(self.defaulthandler)
+ try:
+ parser.parse(
+ six.BytesIO(self.tmpl %
+ (entities.encode('utf-8'),
+ refValue.encode('utf-8'))))
+ # also catch stray %
+ parser.parse(
+ six.BytesIO(self.tmpl %
+ ((refEnt.all + entities).encode('utf-8'),
+ b'&%s;' % refEnt.key.encode('utf-8'))))
+ except sax.SAXParseException as e:
+ e # noqa
+ yield ('warning',
+ (0, 0),
+ "can't parse en-US value", 'xmlparse')
+
+ # find entities the l10nValue references,
+ # reusing markup from DTDParser.
+ l10nlist = self.entities_for_value(l10nValue)
+ missing = sorted(l10nlist - reflist)
+ _entities = entities + ''.join('<!ENTITY %s "">' % s for s in missing)
+ if self.processContent:
+ self.texthandler.textcontent = ''
+ parser.setContentHandler(self.texthandler)
+ try:
+ parser.parse(six.BytesIO(self.tmpl % (_entities.encode('utf-8'),
+ l10nValue.encode('utf-8'))))
+ # also catch stray %
+ # if this fails, we need to substract the entity definition
+ parser.setContentHandler(self.defaulthandler)
+ parser.parse(
+ six.BytesIO(self.tmpl %
+ ((l10nEnt.all + _entities).encode('utf-8'),
+ b'&%s;' % l10nEnt.key.encode('utf-8'))))
+ except sax.SAXParseException as e:
+ # xml parse error, yield error
+ # sometimes, the error is reported on our fake closing
+ # element, make that the end of the last line
+ lnr = e.getLineNumber() - 1
+ lines = l10nValue.splitlines()
+ if lnr > len(lines):
+ lnr = len(lines)
+ col = len(lines[lnr-1])
+ else:
+ col = e.getColumnNumber()
+ if lnr == 1:
+ # first line starts with <elem>, substract
+ col -= len("<elem>")
+ elif lnr == 0:
+ col -= len("<!DOCTYPE elem [") # first line is DOCTYPE
+ yield ('error', (lnr, col), ' '.join(e.args), 'xmlparse')
+
+ warntmpl = u'Referencing unknown entity `%s`'
+ if reflist:
+ if inContext:
+ elsewhere = reflist - inContext
+ warntmpl += ' (%s used in context' % \
+ ', '.join(sorted(inContext))
+ if elsewhere:
+ warntmpl += ', %s known)' % ', '.join(sorted(elsewhere))
+ else:
+ warntmpl += ')'
+ else:
+ warntmpl += ' (%s known)' % ', '.join(sorted(reflist))
+ for key in missing:
+ yield ('warning', (0, 0), warntmpl % key,
+ 'xmlparse')
+ if inContext and l10nlist and l10nlist - inContext - set(missing):
+ mismatch = sorted(l10nlist - inContext - set(missing))
+ for key in mismatch:
+ yield ('warning', (0, 0),
+ 'Entity %s referenced, but %s used in context' % (
+ key,
+ ', '.join(sorted(inContext))
+ ), 'xmlparse')
+
+ # Number check
+ if self.num.match(refValue) and not self.num.match(l10nValue):
+ yield ('warning', 0, 'reference is a number', 'number')
+ # CSS checks
+ # just a length, width="100em"
+ if self.length.match(refValue) and not self.length.match(l10nValue):
+ yield ('error', 0, 'reference is a CSS length', 'css')
+ # Check for actual CSS style attribute values
+ for t in self.maybe_style(refValue, l10nValue):
+ yield t
+
+ if self.extra_tests is not None and 'android-dtd' in self.extra_tests:
+ for t in self.processAndroidContent(self.texthandler.textcontent):
+ yield t
+
+ quoted = re.compile("(?P<q>[\"']).*(?P=q)$")
+
+ def unicode_escape(self, str):
+ """Helper method to try to decode all unicode escapes in a string.
+
+ This code uses the standard python decode for unicode-escape, but
+ that's somewhat tricky, as its input needs to be ascii. To get to
+ ascii, the unicode string gets converted to ascii with
+ backslashreplace, i.e., all non-ascii unicode chars get unicode
+ escaped. And then we try to roll all of that back.
+ Now, when that hits an error, that's from the original string, and we
+ need to search for the actual error position in the original string,
+ as the backslashreplace code changes string positions quite badly.
+ See also the last check in TestAndroid.test_android_dtd, with a
+ lengthy chinese string.
+ """
+ val = str.encode('ascii', 'backslashreplace')
+ try:
+ val.decode('unicode-escape')
+ except UnicodeDecodeError as e:
+ args = list(e.args)
+ badstring = args[1][args[2]:args[3]]
+ i = len(args[1][:args[2]].decode('unicode-escape'))
+ args[2] = i
+ args[3] = i + len(badstring)
+ raise UnicodeDecodeError(*args)
+
+ def processAndroidContent(self, val):
+ """Check for the string values that Android puts into an XML container.
+
+ http://developer.android.com/guide/topics/resources/string-resource.html#FormattingAndStyling # noqa
+
+ Check for unicode escapes and unescaped quotes and apostrophes,
+ if string's not quoted.
+ """
+ # first, try to decode unicode escapes
+ try:
+ self.unicode_escape(val)
+ except UnicodeDecodeError as e:
+ yield ('error', e.args[2], e.args[4], 'android')
+ # check for unescaped single or double quotes.
+ # first, see if the complete string is single or double quoted,
+ # that changes the rules
+ m = self.quoted.match(val)
+ if m:
+ q = m.group('q')
+ offset = 0
+ val = val[1:-1] # strip quotes
+ else:
+ q = "[\"']"
+ offset = -1
+ stray_quot = re.compile(r"[\\\\]*(%s)" % q)
+
+ for m in stray_quot.finditer(val):
+ if len(m.group(0)) % 2:
+ # found an unescaped single or double quote, which message?
+ if m.group(1) == '"':
+ msg = "Quotes in Android DTDs need escaping with \\\" "\
+ "or \\u0022, or put string in apostrophes."
+ else:
+ msg = "Apostrophes in Android DTDs need escaping with "\
+ "\\' or \\u0027, or use \u2019, or put string in "\
+ "quotes."
+ yield ('error', m.end(0)+offset, msg, 'android')
diff --git a/third_party/python/compare-locales/compare_locales/checks/fluent.py b/third_party/python/compare-locales/compare_locales/checks/fluent.py
new file mode 100644
index 0000000000..feb7242fb7
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/checks/fluent.py
@@ -0,0 +1,356 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import re
+from collections import defaultdict
+
+from fluent.syntax import ast as ftl
+from fluent.syntax.serializer import serialize_variant_key
+from fluent.syntax.visitor import Visitor
+
+from .base import Checker, CSSCheckMixin
+from compare_locales import plurals
+
+
+MSGS = {
+ 'missing-msg-ref': 'Missing message reference: {ref}',
+ 'missing-term-ref': 'Missing term reference: {ref}',
+ 'obsolete-msg-ref': 'Obsolete message reference: {ref}',
+ 'obsolete-term-ref': 'Obsolete term reference: {ref}',
+ 'duplicate-attribute': 'Attribute "{name}" is duplicated',
+ 'missing-value': 'Missing value',
+ 'obsolete-value': 'Obsolete value',
+ 'missing-attribute': 'Missing attribute: {name}',
+ 'obsolete-attribute': 'Obsolete attribute: {name}',
+ 'duplicate-variant': 'Variant key "{name}" is duplicated',
+ 'missing-plural': 'Plural categories missing: {categories}',
+ 'plain-message': '{message}',
+}
+
+
+def pattern_variants(pattern):
+ """Get variants of plain text of a pattern.
+
+ For now, just return simple text patterns.
+ This can be improved to allow for SelectExpressions
+ of simple text patterns, or even nested expressions, and Literals.
+ Variants with Variable-, Message-, or TermReferences should be ignored.
+ """
+ elements = pattern.elements
+ if len(elements) == 1:
+ if isinstance(elements[0], ftl.TextElement):
+ return [elements[0].value]
+ return []
+
+
+class ReferenceMessageVisitor(Visitor, CSSCheckMixin):
+ def __init__(self):
+ # References to Messages, their Attributes, and Terms
+ # Store reference name and type
+ self.entry_refs = defaultdict(dict)
+ # The currently active references
+ self.refs = {}
+ # Start with the Entry value (associated with None)
+ self.entry_refs[None] = self.refs
+ # If we're a messsage, store if there was a value
+ self.message_has_value = False
+ # Map attribute names to positions
+ self.attribute_positions = {}
+ # Map of CSS style attribute properties and units
+ self.css_styles = None
+ self.css_errors = None
+
+ def generic_visit(self, node):
+ if isinstance(
+ node,
+ (ftl.Span, ftl.Annotation, ftl.BaseComment)
+ ):
+ return
+ super(ReferenceMessageVisitor, self).generic_visit(node)
+
+ def visit_Message(self, node):
+ if node.value is not None:
+ self.message_has_value = True
+ super(ReferenceMessageVisitor, self).generic_visit(node)
+
+ def visit_Attribute(self, node):
+ self.attribute_positions[node.id.name] = node.span.start
+ old_refs = self.refs
+ self.refs = self.entry_refs[node.id.name]
+ super(ReferenceMessageVisitor, self).generic_visit(node)
+ self.refs = old_refs
+ if node.id.name != 'style':
+ return
+ text_values = pattern_variants(node.value)
+ if not text_values:
+ self.css_styles = 'skip'
+ return
+ # right now, there's just one possible text value
+ self.css_styles, self.css_errors = self.parse_css_spec(text_values[0])
+
+ def visit_SelectExpression(self, node):
+ # optimize select expressions to only go through the variants
+ self.visit(node.variants)
+
+ def visit_MessageReference(self, node):
+ ref = node.id.name
+ if node.attribute:
+ ref += '.' + node.attribute.name
+ self.refs[ref] = 'msg-ref'
+
+ def visit_TermReference(self, node):
+ # only collect term references, but not attributes of terms
+ if node.attribute:
+ return
+ self.refs['-' + node.id.name] = 'term-ref'
+
+
+class GenericL10nChecks(object):
+ '''Helper Mixin for checks shared between Terms and Messages.'''
+ def check_duplicate_attributes(self, node):
+ warned = set()
+ for left in range(len(node.attributes) - 1):
+ if left in warned:
+ continue
+ left_attr = node.attributes[left]
+ warned_left = False
+ for right in range(left+1, len(node.attributes)):
+ right_attr = node.attributes[right]
+ if left_attr.id.name == right_attr.id.name:
+ if not warned_left:
+ warned_left = True
+ self.messages.append(
+ (
+ 'warning', left_attr.span.start,
+ MSGS['duplicate-attribute'].format(
+ name=left_attr.id.name
+ )
+ )
+ )
+ warned.add(right)
+ self.messages.append(
+ (
+ 'warning', right_attr.span.start,
+ MSGS['duplicate-attribute'].format(
+ name=left_attr.id.name
+ )
+ )
+ )
+
+ def check_variants(self, variants):
+ # Check for duplicate variants
+ warned = set()
+ for left in range(len(variants) - 1):
+ if left in warned:
+ continue
+ left_key = variants[left].key
+ key_string = None
+ for right in range(left+1, len(variants)):
+ if left_key.equals(variants[right].key):
+ if key_string is None:
+ key_string = serialize_variant_key(left_key)
+ self.messages.append(
+ (
+ 'warning', left_key.span.start,
+ MSGS['duplicate-variant'].format(
+ name=key_string
+ )
+ )
+ )
+ warned.add(right)
+ self.messages.append(
+ (
+ 'warning', variants[right].key.span.start,
+ MSGS['duplicate-variant'].format(
+ name=key_string
+ )
+ )
+ )
+ # Check for plural categories
+ known_plurals = plurals.get_plural(self.locale)
+ if known_plurals:
+ known_plurals = set(known_plurals)
+ # Ask for known plurals, but check for plurals w/out `other`.
+ # `other` is used for all kinds of things.
+ check_plurals = known_plurals.copy()
+ check_plurals.discard('other')
+ given_plurals = set(serialize_variant_key(v.key) for v in variants)
+ if given_plurals & check_plurals:
+ missing_plurals = sorted(known_plurals - given_plurals)
+ if missing_plurals:
+ self.messages.append(
+ (
+ 'warning', variants[0].key.span.start,
+ MSGS['missing-plural'].format(
+ categories=', '.join(missing_plurals)
+ )
+ )
+ )
+
+
+class L10nMessageVisitor(GenericL10nChecks, ReferenceMessageVisitor):
+ def __init__(self, locale, reference):
+ super(L10nMessageVisitor, self).__init__()
+ self.locale = locale
+ # Overload refs to map to sets, just store what we found
+ # References to Messages, their Attributes, and Terms
+ # Store reference name and type
+ self.entry_refs = defaultdict(set)
+ # The currently active references
+ self.refs = set()
+ # Start with the Entry value (associated with None)
+ self.entry_refs[None] = self.refs
+ self.reference = reference
+ self.reference_refs = reference.entry_refs[None]
+ self.messages = []
+
+ def visit_Message(self, node):
+ self.check_duplicate_attributes(node)
+ super(L10nMessageVisitor, self).visit_Message(node)
+ if self.message_has_value and not self.reference.message_has_value:
+ self.messages.append(
+ ('error', node.value.span.start, MSGS['obsolete-value'])
+ )
+ if not self.message_has_value and self.reference.message_has_value:
+ self.messages.append(
+ ('error', 0, MSGS['missing-value'])
+ )
+ ref_attrs = set(self.reference.attribute_positions)
+ l10n_attrs = set(self.attribute_positions)
+ for missing_attr in ref_attrs - l10n_attrs:
+ self.messages.append(
+ (
+ 'error', 0,
+ MSGS['missing-attribute'].format(name=missing_attr)
+ )
+ )
+ for obs_attr in l10n_attrs - ref_attrs:
+ self.messages.append(
+ (
+ 'error', self.attribute_positions[obs_attr],
+ MSGS['obsolete-attribute'].format(name=obs_attr)
+ )
+ )
+
+ def visit_Term(self, node):
+ raise RuntimeError("Should not use L10nMessageVisitor for Terms")
+
+ def visit_Attribute(self, node):
+ old_reference_refs = self.reference_refs
+ self.reference_refs = self.reference.entry_refs[node.id.name]
+ super(L10nMessageVisitor, self).visit_Attribute(node)
+ self.reference_refs = old_reference_refs
+ if node.id.name != 'style' or self.css_styles == 'skip':
+ return
+ ref_styles = self.reference.css_styles
+ if ref_styles in ('skip', None):
+ # Reference is complex, l10n isn't.
+ # Let's still validate the css spec.
+ ref_styles = {}
+ for cat, msg, pos, _ in self.check_style(
+ ref_styles,
+ self.css_styles,
+ self.css_errors
+ ):
+ self.messages.append((cat, msg, pos))
+
+ def visit_SelectExpression(self, node):
+ super(L10nMessageVisitor, self).visit_SelectExpression(node)
+ self.check_variants(node.variants)
+
+ def visit_MessageReference(self, node):
+ ref = node.id.name
+ if node.attribute:
+ ref += '.' + node.attribute.name
+ self.refs.add(ref)
+ self.check_obsolete_ref(node, ref, 'msg-ref')
+
+ def visit_TermReference(self, node):
+ if node.attribute:
+ return
+ ref = '-' + node.id.name
+ self.refs.add(ref)
+ self.check_obsolete_ref(node, ref, 'term-ref')
+
+ def check_obsolete_ref(self, node, ref, ref_type):
+ if ref not in self.reference_refs:
+ self.messages.append(
+ (
+ 'warning', node.span.start,
+ MSGS['obsolete-' + ref_type].format(ref=ref),
+ )
+ )
+
+
+class TermVisitor(GenericL10nChecks, Visitor):
+ def __init__(self, locale):
+ super(TermVisitor, self).__init__()
+ self.locale = locale
+ self.messages = []
+
+ def generic_visit(self, node):
+ if isinstance(
+ node,
+ (ftl.Span, ftl.Annotation, ftl.BaseComment)
+ ):
+ return
+ super(TermVisitor, self).generic_visit(node)
+
+ def visit_Message(self, node):
+ raise RuntimeError("Should not use TermVisitor for Messages")
+
+ def visit_Term(self, node):
+ self.check_duplicate_attributes(node)
+ super(TermVisitor, self).generic_visit(node)
+
+ def visit_SelectExpression(self, node):
+ super(TermVisitor, self).generic_visit(node)
+ self.check_variants(node.variants)
+
+
+class FluentChecker(Checker):
+ '''Tests to run on Fluent (FTL) files.
+ '''
+ pattern = re.compile(r'.*\.ftl')
+
+ def check_message(self, ref_entry, l10n_entry):
+ '''Run checks on localized messages against reference message.'''
+ ref_data = ReferenceMessageVisitor()
+ ref_data.visit(ref_entry)
+ l10n_data = L10nMessageVisitor(self.locale, ref_data)
+ l10n_data.visit(l10n_entry)
+
+ messages = l10n_data.messages
+ for attr_or_val, refs in ref_data.entry_refs.items():
+ for ref, ref_type in refs.items():
+ if ref not in l10n_data.entry_refs[attr_or_val]:
+ msg = MSGS['missing-' + ref_type].format(ref=ref)
+ messages.append(('warning', 0, msg))
+ return messages
+
+ def check_term(self, l10n_entry):
+ '''Check localized terms.'''
+ l10n_data = TermVisitor(self.locale)
+ l10n_data.visit(l10n_entry)
+ return l10n_data.messages
+
+ def check(self, refEnt, l10nEnt):
+ for encoding_trouble in super(
+ FluentChecker, self
+ ).check(refEnt, l10nEnt):
+ yield encoding_trouble
+ l10n_entry = l10nEnt.entry
+ if isinstance(l10n_entry, ftl.Message):
+ ref_entry = refEnt.entry
+ messages = self.check_message(ref_entry, l10n_entry)
+ elif isinstance(l10n_entry, ftl.Term):
+ messages = self.check_term(l10n_entry)
+
+ messages.sort(key=lambda t: t[1])
+ for cat, pos, msg in messages:
+ if pos:
+ pos = pos - l10n_entry.span.start
+ yield (cat, pos, msg, 'fluent')
diff --git a/third_party/python/compare-locales/compare_locales/checks/properties.py b/third_party/python/compare-locales/compare_locales/checks/properties.py
new file mode 100644
index 0000000000..9ff2e4cdae
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/checks/properties.py
@@ -0,0 +1,173 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import re
+from difflib import SequenceMatcher
+from six.moves import range
+from six.moves import zip
+
+from compare_locales.parser import PropertiesEntity
+from compare_locales import plurals
+from .base import Checker
+
+
+class PrintfException(Exception):
+ def __init__(self, msg, pos):
+ self.pos = pos
+ self.msg = msg
+
+
+class PropertiesChecker(Checker):
+ '''Tests to run on .properties files.
+ '''
+ pattern = re.compile(r'.*\.properties$')
+ printf = re.compile(r'%(?P<good>%|'
+ r'(?:(?P<number>[1-9][0-9]*)\$)?'
+ r'(?P<width>\*|[0-9]+)?'
+ r'(?P<prec>\.(?:\*|[0-9]+)?)?'
+ r'(?P<spec>[duxXosScpfg]))?')
+
+ def check(self, refEnt, l10nEnt):
+ '''Test for the different variable formats.
+ '''
+ for encoding_trouble in super(
+ PropertiesChecker, self
+ ).check(refEnt, l10nEnt):
+ yield encoding_trouble
+ refValue, l10nValue = refEnt.val, l10nEnt.val
+ refSpecs = None
+ # check for PluralForm.jsm stuff, should have the docs in the
+ # comment
+ # That also includes intl.properties' pluralRule, so exclude
+ # entities with that key and values with just numbers
+ if (refEnt.pre_comment
+ and 'Localization_and_Plurals' in refEnt.pre_comment.all
+ and refEnt.key != 'pluralRule'
+ and not re.match(r'\d+$', refValue)):
+ for msg_tuple in self.check_plural(refValue, l10nValue):
+ yield msg_tuple
+ return
+ # check for lost escapes
+ raw_val = l10nEnt.raw_val
+ for m in PropertiesEntity.escape.finditer(raw_val):
+ if m.group('single') and \
+ m.group('single') not in PropertiesEntity.known_escapes:
+ yield ('warning', m.start(),
+ 'unknown escape sequence, \\' + m.group('single'),
+ 'escape')
+ try:
+ refSpecs = self.getPrintfSpecs(refValue)
+ except PrintfException:
+ refSpecs = []
+ if refSpecs:
+ for t in self.checkPrintf(refSpecs, l10nValue):
+ yield t
+ return
+
+ def check_plural(self, refValue, l10nValue):
+ '''Check for the stringbundle plurals logic.
+ The common variable pattern is #1.
+ '''
+ known_plurals = plurals.get_plural(self.locale)
+ if known_plurals:
+ expected_forms = len(known_plurals)
+ found_forms = l10nValue.count(';') + 1
+ msg = 'expecting {} plurals, found {}'.format(
+ expected_forms,
+ found_forms
+ )
+ if expected_forms > found_forms:
+ yield ('warning', 0, msg, 'plural')
+ if expected_forms < found_forms:
+ yield ('warning', 0, msg, 'plural')
+ pats = set(int(m.group(1)) for m in re.finditer('#([0-9]+)',
+ refValue))
+ if len(pats) == 0:
+ return
+ lpats = set(int(m.group(1)) for m in re.finditer('#([0-9]+)',
+ l10nValue))
+ if pats - lpats:
+ yield ('warning', 0, 'not all variables used in l10n',
+ 'plural')
+ return
+ if lpats - pats:
+ yield ('error', 0, 'unreplaced variables in l10n',
+ 'plural')
+
+ def checkPrintf(self, refSpecs, l10nValue):
+ try:
+ l10nSpecs = self.getPrintfSpecs(l10nValue)
+ except PrintfException as e:
+ yield ('error', e.pos, e.msg, 'printf')
+ return
+ if refSpecs != l10nSpecs:
+ sm = SequenceMatcher()
+ sm.set_seqs(refSpecs, l10nSpecs)
+ msgs = []
+ warn = None
+ for action, i1, i2, j1, j2 in sm.get_opcodes():
+ if action == 'equal':
+ continue
+ if action == 'delete':
+ # missing argument in l10n
+ if i2 == len(refSpecs):
+ # trailing specs missing, that's just a warning
+ warn = ', '.join('trailing argument %d `%s` missing' %
+ (i+1, refSpecs[i])
+ for i in range(i1, i2))
+ else:
+ for i in range(i1, i2):
+ msgs.append('argument %d `%s` missing' %
+ (i+1, refSpecs[i]))
+ continue
+ if action == 'insert':
+ # obsolete argument in l10n
+ for i in range(j1, j2):
+ msgs.append('argument %d `%s` obsolete' %
+ (i+1, l10nSpecs[i]))
+ continue
+ if action == 'replace':
+ for i, j in zip(range(i1, i2), range(j1, j2)):
+ msgs.append('argument %d `%s` should be `%s`' %
+ (j+1, l10nSpecs[j], refSpecs[i]))
+ if msgs:
+ yield ('error', 0, ', '.join(msgs), 'printf')
+ if warn is not None:
+ yield ('warning', 0, warn, 'printf')
+
+ def getPrintfSpecs(self, val):
+ hasNumber = False
+ specs = []
+ for m in self.printf.finditer(val):
+ if m.group("good") is None:
+ # found just a '%', signal an error
+ raise PrintfException('Found single %', m.start())
+ if m.group("good") == '%':
+ # escaped %
+ continue
+ if ((hasNumber and m.group('number') is None) or
+ (not hasNumber and specs and
+ m.group('number') is not None)):
+ # mixed style, numbered and not
+ raise PrintfException('Mixed ordered and non-ordered args',
+ m.start())
+ hasNumber = m.group('number') is not None
+ if hasNumber:
+ pos = int(m.group('number')) - 1
+ ls = len(specs)
+ if pos >= ls:
+ # pad specs
+ nones = pos - ls
+ specs[ls:pos] = nones*[None]
+ specs.append(m.group('spec'))
+ else:
+ specs[pos] = m.group('spec')
+ else:
+ specs.append(m.group('spec'))
+ # check for missing args
+ if hasNumber and not all(specs):
+ raise PrintfException('Ordered argument missing', 0)
+ return specs
diff --git a/third_party/python/compare-locales/compare_locales/commands.py b/third_party/python/compare-locales/compare_locales/commands.py
new file mode 100644
index 0000000000..c2a2f2fe01
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/commands.py
@@ -0,0 +1,205 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'Commands exposed to commandlines'
+
+from __future__ import absolute_import
+from __future__ import print_function
+import logging
+from argparse import ArgumentParser
+from json import dump as json_dump
+import os
+import sys
+
+from compare_locales import mozpath
+from compare_locales import version
+from compare_locales.paths import EnumerateApp, TOMLParser, ConfigNotFound
+from compare_locales.compare import compareProjects
+
+
+class CompareLocales(object):
+ """Check the localization status of gecko applications.
+The first arguments are paths to the l10n.toml or ini files for the
+applications, followed by the base directory of the localization repositories.
+Then you pass in the list of locale codes you want to compare. If there are
+no locales given, the list of locales will be taken from the l10n.toml file
+or the all-locales file referenced by the application\'s l10n.ini."""
+
+ def __init__(self):
+ self.parser = self.get_parser()
+
+ def get_parser(self):
+ """Get an ArgumentParser, with class docstring as description.
+ """
+ parser = ArgumentParser(description=self.__doc__)
+ parser.add_argument('--version', action='version',
+ version='%(prog)s ' + version)
+ parser.add_argument('-v', '--verbose', action='count',
+ default=0, help='Make more noise')
+ parser.add_argument('-q', '--quiet', action='count',
+ default=0, help='''Show less data.
+Specified once, don't show obsolete entities. Specified twice, also hide
+missing entities. Specify thrice to exclude warnings and four times to
+just show stats''')
+ parser.add_argument('--validate', action='store_true',
+ help='Run compare-locales against reference')
+ parser.add_argument('-m', '--merge',
+ help='''Use this directory to stage merged files,
+use {ab_CD} to specify a different directory for each locale''')
+ parser.add_argument('config_paths', metavar='l10n.toml', nargs='+',
+ help='TOML or INI file for the project')
+ parser.add_argument('l10n_base_dir', metavar='l10n-base-dir',
+ help='Parent directory of localizations')
+ parser.add_argument('locales', nargs='*', metavar='locale-code',
+ help='Locale code and top-level directory of '
+ 'each localization')
+ parser.add_argument('--json',
+ help='''Serialize to JSON. Value is the name of
+the output file, pass "-" to serialize to stdout and hide the default output.
+''')
+ parser.add_argument('-D', action='append', metavar='var=value',
+ default=[], dest='defines',
+ help='Overwrite variables in TOML files')
+ parser.add_argument('--full', action="store_true",
+ help="Compare sub-projects that are disabled")
+ parser.add_argument('--return-zero', action="store_true",
+ help="Return 0 regardless of l10n status")
+ parser.add_argument('--clobber-merge', action="store_true",
+ default=False, dest='clobber',
+ help="""WARNING: DATALOSS.
+Use this option with care. If specified, the merge directory will
+be clobbered for each module. That means, the subdirectory will
+be completely removed, any files that were there are lost.
+Be careful to specify the right merge directory when using this option.""")
+ return parser
+
+ @classmethod
+ def call(cls):
+ """Entry_point for setuptools.
+ The actual command handling is done in the handle() method of the
+ subclasses.
+ """
+ cmd = cls()
+ args = cmd.parser.parse_args()
+ return cmd.handle(**vars(args))
+
+ def handle(
+ self,
+ quiet=0, verbose=0,
+ validate=False,
+ merge=None,
+ config_paths=[], l10n_base_dir=None, locales=[],
+ defines=[],
+ full=False,
+ return_zero=False,
+ clobber=False,
+ json=None,
+ ):
+ """The instance part of the classmethod call.
+
+ Using keyword arguments as that is what we need for mach
+ commands in mozilla-central.
+ """
+ # log as verbose or quiet as we want, warn by default
+ logging_level = logging.WARNING - (verbose - quiet) * 10
+ logging.basicConfig()
+ logging.getLogger().setLevel(logging_level)
+
+ config_paths, l10n_base_dir, locales = self.extract_positionals(
+ validate=validate,
+ config_paths=config_paths,
+ l10n_base_dir=l10n_base_dir,
+ locales=locales,
+ )
+
+ # when we compare disabled projects, we set our locales
+ # on all subconfigs, so deep is True.
+ locales_deep = full
+ configs = []
+ config_env = {
+ 'l10n_base': l10n_base_dir
+ }
+ for define in defines:
+ var, _, value = define.partition('=')
+ config_env[var] = value
+ for config_path in config_paths:
+ if config_path.endswith('.toml'):
+ try:
+ config = TOMLParser().parse(config_path, env=config_env)
+ except ConfigNotFound as e:
+ self.parser.exit('config file %s not found' % e.filename)
+ if locales_deep:
+ if not locales:
+ # no explicit locales given, force all locales
+ config.set_locales(config.all_locales, deep=True)
+ else:
+ config.set_locales(locales, deep=True)
+ configs.append(config)
+ else:
+ app = EnumerateApp(config_path, l10n_base_dir)
+ configs.append(app.asConfig())
+ try:
+ observers = compareProjects(
+ configs,
+ locales,
+ l10n_base_dir,
+ quiet=quiet,
+ merge_stage=merge, clobber_merge=clobber)
+ except (OSError, IOError) as exc:
+ print("FAIL: " + str(exc))
+ self.parser.exit(2)
+
+ if json is None or json != '-':
+ details = observers.serializeDetails()
+ if details:
+ print(details)
+ if len(configs) > 1:
+ if details:
+ print('')
+ print("Summaries for")
+ for config_path in config_paths:
+ print(" " + config_path)
+ print(" and the union of these, counting each string once")
+ print(observers.serializeSummaries())
+ if json is not None:
+ data = [observer.toJSON() for observer in observers]
+ stdout = json == '-'
+ indent = 1 if stdout else None
+ fh = sys.stdout if stdout else open(json, 'w')
+ json_dump(data, fh, sort_keys=True, indent=indent)
+ if stdout:
+ fh.write('\n')
+ fh.close()
+ rv = 1 if not return_zero and observers.error else 0
+ return rv
+
+ def extract_positionals(
+ self,
+ validate=False,
+ config_paths=[], l10n_base_dir=None, locales=[],
+ ):
+ # using nargs multiple times in argparser totally screws things
+ # up, repair that.
+ # First files are configs, then the base dir, everything else is
+ # locales
+ all_args = config_paths + [l10n_base_dir] + locales
+ config_paths = []
+ # The first directory is our l10n base, split there.
+ while all_args and not os.path.isdir(all_args[0]):
+ config_paths.append(all_args.pop(0))
+ if not config_paths:
+ self.parser.error('no configuration file given')
+ for cf in config_paths:
+ if not os.path.isfile(cf):
+ self.parser.error('config file %s not found' % cf)
+ if not all_args:
+ self.parser.error('l10n-base-dir not found')
+ l10n_base_dir = mozpath.abspath(all_args.pop(0))
+ if validate:
+ # signal validation mode by setting locale list to [None]
+ locales = [None]
+ else:
+ locales = all_args
+
+ return config_paths, l10n_base_dir, locales
diff --git a/third_party/python/compare-locales/compare_locales/compare/__init__.py b/third_party/python/compare-locales/compare_locales/compare/__init__.py
new file mode 100644
index 0000000000..434dab9553
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/compare/__init__.py
@@ -0,0 +1,91 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'Mozilla l10n compare locales tool'
+
+from __future__ import absolute_import
+from __future__ import print_function
+import os
+import shutil
+
+from compare_locales import paths, mozpath
+
+from .content import ContentComparer
+from .observer import Observer, ObserverList
+from .utils import Tree, AddRemove
+
+
+__all__ = [
+ 'ContentComparer',
+ 'Observer', 'ObserverList',
+ 'AddRemove', 'Tree',
+ 'compareProjects',
+]
+
+
+def compareProjects(
+ project_configs,
+ locales,
+ l10n_base_dir,
+ stat_observer=None,
+ merge_stage=None,
+ clobber_merge=False,
+ quiet=0,
+ ):
+ all_locales = set(locales)
+ comparer = ContentComparer(quiet)
+ observers = comparer.observers
+ for project in project_configs:
+ # disable filter if we're in validation mode
+ if None in locales:
+ filter = None
+ else:
+ filter = project.filter
+ observers.append(
+ Observer(
+ quiet=quiet,
+ filter=filter,
+ ))
+ if not locales:
+ all_locales.update(project.all_locales)
+ for locale in sorted(all_locales):
+ files = paths.ProjectFiles(locale, project_configs,
+ mergebase=merge_stage)
+ if merge_stage is not None:
+ if clobber_merge:
+ mergematchers = set(_m.get('merge') for _m in files.matchers)
+ mergematchers.discard(None)
+ for matcher in mergematchers:
+ clobberdir = matcher.prefix
+ if os.path.exists(clobberdir):
+ shutil.rmtree(clobberdir)
+ print("clobbered " + clobberdir)
+ for l10npath, refpath, mergepath, extra_tests in files:
+ # module and file path are needed for legacy filter.py support
+ module = None
+ fpath = mozpath.relpath(l10npath, l10n_base_dir)
+ for _m in files.matchers:
+ if _m['l10n'].match(l10npath):
+ if _m['module']:
+ # legacy ini support, set module, and resolve
+ # local path against the matcher prefix,
+ # which includes the module
+ module = _m['module']
+ fpath = mozpath.relpath(l10npath, _m['l10n'].prefix)
+ break
+ reffile = paths.File(refpath, fpath or refpath, module=module)
+ if locale is None:
+ # When validating the reference files, set locale
+ # to a private subtag. This only shows in the output.
+ locale = paths.REFERENCE_LOCALE
+ l10n = paths.File(l10npath, fpath or l10npath,
+ module=module, locale=locale)
+ if not os.path.exists(l10npath):
+ comparer.add(reffile, l10n, mergepath)
+ continue
+ if not os.path.exists(refpath):
+ comparer.remove(reffile, l10n, mergepath)
+ continue
+ comparer.compare(reffile, l10n, mergepath, extra_tests)
+ return observers
diff --git a/third_party/python/compare-locales/compare_locales/compare/content.py b/third_party/python/compare-locales/compare_locales/compare/content.py
new file mode 100644
index 0000000000..03ba222d8e
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/compare/content.py
@@ -0,0 +1,307 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'Mozilla l10n compare locales tool'
+
+from __future__ import absolute_import
+from __future__ import print_function
+import codecs
+import os
+import shutil
+import re
+
+from compare_locales import parser
+from compare_locales import mozpath
+from compare_locales.checks import getChecker, EntityPos
+from compare_locales.keyedtuple import KeyedTuple
+
+from .observer import ObserverList
+from .utils import AddRemove
+
+
+class ContentComparer:
+ keyRE = re.compile('[kK]ey')
+ nl = re.compile('\n', re.M)
+
+ def __init__(self, quiet=0):
+ '''Create a ContentComparer.
+ observer is usually a instance of Observer. The return values
+ of the notify method are used to control the handling of missing
+ entities.
+ '''
+ self.observers = ObserverList(quiet=quiet)
+
+ def create_merge_dir(self, merge_file):
+ outdir = mozpath.dirname(merge_file)
+ if not os.path.isdir(outdir):
+ os.makedirs(outdir)
+
+ def merge(self, ref_entities, ref_file, l10n_file, merge_file,
+ missing, skips, ctx, capabilities, encoding):
+ '''Create localized file in merge dir
+
+ `ref_entities` and `ref_map` are the parser result of the
+ reference file
+ `ref_file` and `l10n_file` are the File objects for the reference and
+ the l10n file, resp.
+ `merge_file` is the output path for the generated content. This is None
+ if we're just comparing or validating.
+ `missing` are the missing messages in l10n - potentially copied from
+ reference
+ `skips` are entries to be dropped from the localized file
+ `ctx` is the parsing context
+ `capabilities` are the capabilities for the merge algorithm
+ `encoding` is the encoding to be used when serializing, usually utf-8
+ '''
+
+ if not merge_file:
+ return
+
+ if capabilities == parser.CAN_NONE:
+ return
+
+ self.create_merge_dir(merge_file)
+
+ if capabilities & parser.CAN_COPY:
+ # copy the l10n file if it's good, or the reference file if not
+ if skips or missing:
+ src = ref_file.fullpath
+ else:
+ src = l10n_file.fullpath
+ shutil.copyfile(src, merge_file)
+ print("copied reference to " + merge_file)
+ return
+
+ if not (capabilities & parser.CAN_SKIP):
+ return
+
+ # Start with None in case the merge file doesn't need to be created.
+ f = None
+
+ if skips:
+ # skips come in ordered by key name, we need them in file order
+ skips.sort(key=lambda s: s.span[0])
+
+ # we need to skip a few erroneous blocks in the input, copy by hand
+ f = codecs.open(merge_file, 'wb', encoding)
+ offset = 0
+ for skip in skips:
+ chunk = skip.span
+ f.write(ctx.contents[offset:chunk[0]])
+ offset = chunk[1]
+ f.write(ctx.contents[offset:])
+
+ if f is None:
+ # l10n file is a good starting point
+ shutil.copyfile(l10n_file.fullpath, merge_file)
+
+ if not (capabilities & parser.CAN_MERGE):
+ if f:
+ f.close()
+ return
+
+ if skips or missing:
+ if f is None:
+ f = codecs.open(merge_file, 'ab', encoding)
+ trailing = (['\n'] +
+ [ref_entities[key].all for key in missing] +
+ [ref_entities[skip.key].all for skip in skips
+ if not isinstance(skip, parser.Junk)])
+
+ def ensureNewline(s):
+ if not s.endswith('\n'):
+ return s + '\n'
+ return s
+
+ print("adding to " + merge_file)
+ f.write(''.join(map(ensureNewline, trailing)))
+
+ if f is not None:
+ f.close()
+
+ def remove(self, ref_file, l10n, merge_file):
+ '''Obsolete l10n file.
+
+ Copy to merge stage if we can.
+ '''
+ self.observers.notify('obsoleteFile', l10n, None)
+ self.merge(
+ KeyedTuple([]), ref_file, l10n, merge_file,
+ [], [], None, parser.CAN_COPY, None
+ )
+
+ def compare(self, ref_file, l10n, merge_file, extra_tests=None):
+ try:
+ p = parser.getParser(ref_file.file)
+ except UserWarning:
+ # no comparison, XXX report?
+ # At least, merge
+ self.merge(
+ KeyedTuple([]), ref_file, l10n, merge_file, [], [], None,
+ parser.CAN_COPY, None)
+ return
+ try:
+ p.readFile(ref_file)
+ except Exception as e:
+ self.observers.notify('error', ref_file, str(e))
+ return
+ ref_entities = p.parse()
+ try:
+ p.readFile(l10n)
+ l10n_entities = p.parse()
+ l10n_ctx = p.ctx
+ except Exception as e:
+ self.observers.notify('error', l10n, str(e))
+ return
+
+ ar = AddRemove()
+ ar.set_left(ref_entities.keys())
+ ar.set_right(l10n_entities.keys())
+ report = missing = obsolete = changed = unchanged = keys = 0
+ missing_w = changed_w = unchanged_w = 0 # word stats
+ missings = []
+ skips = []
+ checker = getChecker(l10n, extra_tests=extra_tests)
+ if checker and checker.needs_reference:
+ checker.set_reference(ref_entities)
+ for msg in p.findDuplicates(ref_entities):
+ self.observers.notify('warning', l10n, msg)
+ for msg in p.findDuplicates(l10n_entities):
+ self.observers.notify('error', l10n, msg)
+ for action, entity_id in ar:
+ if action == 'delete':
+ # missing entity
+ if isinstance(ref_entities[entity_id], parser.Junk):
+ self.observers.notify(
+ 'warning', l10n, 'Parser error in en-US'
+ )
+ continue
+ _rv = self.observers.notify('missingEntity', l10n, entity_id)
+ if _rv == "ignore":
+ continue
+ if _rv == "error":
+ # only add to missing entities for l10n-merge on error,
+ # not report
+ missings.append(entity_id)
+ missing += 1
+ refent = ref_entities[entity_id]
+ missing_w += refent.count_words()
+ else:
+ # just report
+ report += 1
+ elif action == 'add':
+ # obsolete entity or junk
+ if isinstance(l10n_entities[entity_id],
+ parser.Junk):
+ junk = l10n_entities[entity_id]
+ self.observers.notify(
+ 'error', l10n,
+ junk.error_message()
+ )
+ if merge_file is not None:
+ skips.append(junk)
+ elif (
+ self.observers.notify('obsoleteEntity', l10n, entity_id)
+ != 'ignore'
+ ):
+ obsolete += 1
+ else:
+ # entity found in both ref and l10n, check for changed
+ refent = ref_entities[entity_id]
+ l10nent = l10n_entities[entity_id]
+ if self.keyRE.search(entity_id):
+ keys += 1
+ else:
+ if refent.equals(l10nent):
+ self.doUnchanged(l10nent)
+ unchanged += 1
+ unchanged_w += refent.count_words()
+ else:
+ self.doChanged(ref_file, refent, l10nent)
+ changed += 1
+ changed_w += refent.count_words()
+ # run checks:
+ if checker:
+ for tp, pos, msg, cat in checker.check(refent, l10nent):
+ if isinstance(pos, EntityPos):
+ line, col = l10nent.position(pos)
+ else:
+ line, col = l10nent.value_position(pos)
+ # skip error entities when merging
+ if tp == 'error' and merge_file is not None:
+ skips.append(l10nent)
+ self.observers.notify(
+ tp, l10n,
+ u"%s at line %d, column %d for %s" %
+ (msg, line, col, refent.key)
+ )
+ pass
+
+ if merge_file is not None:
+ self.merge(
+ ref_entities, ref_file,
+ l10n, merge_file, missings, skips, l10n_ctx,
+ p.capabilities, p.encoding)
+
+ stats = {
+ 'missing': missing,
+ 'missing_w': missing_w,
+ 'report': report,
+ 'obsolete': obsolete,
+ 'changed': changed,
+ 'changed_w': changed_w,
+ 'unchanged': unchanged,
+ 'unchanged_w': unchanged_w,
+ 'keys': keys,
+ }
+ self.observers.updateStats(l10n, stats)
+ pass
+
+ def add(self, orig, missing, merge_file):
+ ''' Add missing localized file.'''
+ f = orig
+ try:
+ p = parser.getParser(f.file)
+ except UserWarning:
+ p = None
+
+ # if we don't support this file, assume CAN_COPY to mimick
+ # l10n dir as closely as possible
+ caps = p.capabilities if p else parser.CAN_COPY
+ if (caps & (parser.CAN_COPY | parser.CAN_MERGE)):
+ # even if we can merge, pretend we can only copy
+ self.merge(
+ KeyedTuple([]), orig, missing, merge_file,
+ ['trigger copy'], [], None, parser.CAN_COPY, None
+ )
+
+ if self.observers.notify('missingFile', missing, None) == "ignore":
+ # filter said that we don't need this file, don't count it
+ return
+
+ if p is None:
+ # We don't have a parser, cannot count missing strings
+ return
+
+ try:
+ p.readFile(f)
+ entities = p.parse()
+ except Exception as ex:
+ self.observers.notify('error', f, str(ex))
+ return
+ # strip parse errors
+ entities = [e for e in entities if not isinstance(e, parser.Junk)]
+ self.observers.updateStats(missing, {'missing': len(entities)})
+ missing_w = 0
+ for e in entities:
+ missing_w += e.count_words()
+ self.observers.updateStats(missing, {'missing_w': missing_w})
+
+ def doUnchanged(self, entity):
+ # overload this if needed
+ pass
+
+ def doChanged(self, file, ref_entity, l10n_entity):
+ # overload this if needed
+ pass
diff --git a/third_party/python/compare-locales/compare_locales/compare/observer.py b/third_party/python/compare-locales/compare_locales/compare/observer.py
new file mode 100644
index 0000000000..7301d9a356
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/compare/observer.py
@@ -0,0 +1,218 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'Mozilla l10n compare locales tool'
+
+from __future__ import absolute_import
+from __future__ import print_function
+from collections import defaultdict
+import six
+
+from .utils import Tree
+
+
+class Observer(object):
+
+ def __init__(self, quiet=0, filter=None):
+ '''Create Observer
+ For quiet=1, skip per-entity missing and obsolete strings,
+ for quiet=2, skip missing and obsolete files. For quiet=3,
+ skip warnings and errors.
+ '''
+ self.summary = defaultdict(lambda: {
+ "errors": 0,
+ "warnings": 0,
+ "missing": 0,
+ "missing_w": 0,
+ "report": 0,
+ "obsolete": 0,
+ "changed": 0,
+ "changed_w": 0,
+ "unchanged": 0,
+ "unchanged_w": 0,
+ "keys": 0,
+ })
+ self.details = Tree(list)
+ self.quiet = quiet
+ self.filter = filter
+ self.error = False
+
+ def _dictify(self, d):
+ plaindict = {}
+ for k, v in six.iteritems(d):
+ plaindict[k] = dict(v)
+ return plaindict
+
+ def toJSON(self):
+ # Don't export file stats, even if we collected them.
+ # Those are not part of the data we use toJSON for.
+ return {
+ 'summary': self._dictify(self.summary),
+ 'details': self.details.toJSON()
+ }
+
+ def updateStats(self, file, stats):
+ # in multi-project scenarios, this file might not be ours,
+ # check that.
+ # Pass in a dummy entity key '' to avoid getting in to
+ # generic file filters. If we have stats for those,
+ # we want to aggregate the counts
+ if (self.filter is not None and
+ self.filter(file, entity='') == 'ignore'):
+ return
+ for category, value in six.iteritems(stats):
+ if category == 'errors':
+ # updateStats isn't called with `errors`, but make sure
+ # we handle this if that changes
+ self.error = True
+ self.summary[file.locale][category] += value
+
+ def notify(self, category, file, data):
+ rv = 'error'
+ if category in ['missingFile', 'obsoleteFile']:
+ if self.filter is not None:
+ rv = self.filter(file)
+ if rv == "ignore" or self.quiet >= 2:
+ return rv
+ if self.quiet == 0 or category == 'missingFile':
+ self.details[file].append({category: rv})
+ return rv
+ if self.filter is not None:
+ rv = self.filter(file, data)
+ if rv == "ignore":
+ return rv
+ if category in ['missingEntity', 'obsoleteEntity']:
+ if (
+ (category == 'missingEntity' and self.quiet < 2)
+ or (category == 'obsoleteEntity' and self.quiet < 1)
+ ):
+ self.details[file].append({category: data})
+ return rv
+ if category == 'error':
+ # Set error independently of quiet
+ self.error = True
+ if category in ('error', 'warning'):
+ if (
+ (category == 'error' and self.quiet < 4)
+ or (category == 'warning' and self.quiet < 3)
+ ):
+ self.details[file].append({category: data})
+ self.summary[file.locale][category + 's'] += 1
+ return rv
+
+
+class ObserverList(Observer):
+ def __init__(self, quiet=0):
+ super(ObserverList, self).__init__(quiet=quiet)
+ self.observers = []
+
+ def __iter__(self):
+ return iter(self.observers)
+
+ def append(self, observer):
+ self.observers.append(observer)
+
+ def notify(self, category, file, data):
+ """Check observer for the found data, and if it's
+ not to ignore, notify stat_observers.
+ """
+ rvs = set(
+ observer.notify(category, file, data)
+ for observer in self.observers
+ )
+ if all(rv == 'ignore' for rv in rvs):
+ return 'ignore'
+ # our return value doesn't count
+ super(ObserverList, self).notify(category, file, data)
+ rvs.discard('ignore')
+ if 'error' in rvs:
+ return 'error'
+ assert len(rvs) == 1
+ return rvs.pop()
+
+ def updateStats(self, file, stats):
+ """Check observer for the found data, and if it's
+ not to ignore, notify stat_observers.
+ """
+ for observer in self.observers:
+ observer.updateStats(file, stats)
+ super(ObserverList, self).updateStats(file, stats)
+
+ def serializeDetails(self):
+
+ def tostr(t):
+ if t[1] == 'key':
+ return ' ' * t[0] + '/'.join(t[2])
+ o = []
+ indent = ' ' * (t[0] + 1)
+ for item in t[2]:
+ if 'error' in item:
+ o += [indent + 'ERROR: ' + item['error']]
+ elif 'warning' in item:
+ o += [indent + 'WARNING: ' + item['warning']]
+ elif 'missingEntity' in item:
+ o += [indent + '+' + item['missingEntity']]
+ elif 'obsoleteEntity' in item:
+ o += [indent + '-' + item['obsoleteEntity']]
+ elif 'missingFile' in item:
+ o.append(indent + '// add and localize this file')
+ elif 'obsoleteFile' in item:
+ o.append(indent + '// remove this file')
+ return '\n'.join(o)
+
+ return '\n'.join(tostr(c) for c in self.details.getContent())
+
+ def serializeSummaries(self):
+ summaries = {
+ loc: []
+ for loc in self.summary.keys()
+ }
+ for observer in self.observers:
+ for loc, lst in summaries.items():
+ # Not all locales are on all projects,
+ # default to empty summary
+ lst.append(observer.summary.get(loc, {}))
+ if len(self.observers) > 1:
+ # add ourselves if there's more than one project
+ for loc, lst in summaries.items():
+ lst.append(self.summary[loc])
+ keys = (
+ 'errors',
+ 'warnings',
+ 'missing', 'missing_w',
+ 'obsolete',
+ 'changed', 'changed_w',
+ 'unchanged', 'unchanged_w',
+ 'keys',
+ )
+ leads = [
+ '{:12}'.format(k) for k in keys
+ ]
+ out = []
+ for locale, summaries in sorted(six.iteritems(summaries)):
+ if locale:
+ out.append(locale + ':')
+ segment = [''] * len(keys)
+ for summary in summaries:
+ for row, key in enumerate(keys):
+ segment[row] += ' {:6}'.format(summary.get(key) or '')
+
+ out += [
+ lead + row
+ for lead, row in zip(leads, segment)
+ if row.strip()
+ ]
+
+ total = sum([summaries[-1].get(k, 0)
+ for k in ['changed', 'unchanged', 'report', 'missing']
+ ])
+ rate = 0
+ if total:
+ rate = (('changed' in summary and summary['changed'] * 100) or
+ 0) / total
+ out.append('%d%% of entries changed' % rate)
+ return '\n'.join(out)
+
+ def __str__(self):
+ return 'observer'
diff --git a/third_party/python/compare-locales/compare_locales/compare/utils.py b/third_party/python/compare-locales/compare_locales/compare/utils.py
new file mode 100644
index 0000000000..5d79b5c47d
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/compare/utils.py
@@ -0,0 +1,140 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'Mozilla l10n compare locales tool'
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import six
+from six.moves import zip
+
+from compare_locales import paths
+
+
+class Tree(object):
+ def __init__(self, valuetype):
+ self.branches = dict()
+ self.valuetype = valuetype
+ self.value = None
+
+ def __getitem__(self, leaf):
+ parts = []
+ if isinstance(leaf, paths.File):
+ parts = []
+ if leaf.module:
+ parts += [leaf.locale] + leaf.module.split('/')
+ parts += leaf.file.split('/')
+ else:
+ parts = leaf.split('/')
+ return self.__get(parts)
+
+ def __get(self, parts):
+ common = None
+ old = None
+ new = tuple(parts)
+ t = self
+ for k, v in six.iteritems(self.branches):
+ for i, part in enumerate(zip(k, parts)):
+ if part[0] != part[1]:
+ i -= 1
+ break
+ if i < 0:
+ continue
+ i += 1
+ common = tuple(k[:i])
+ old = tuple(k[i:])
+ new = tuple(parts[i:])
+ break
+ if old:
+ self.branches.pop(k)
+ t = Tree(self.valuetype)
+ t.branches[old] = v
+ self.branches[common] = t
+ elif common:
+ t = self.branches[common]
+ if new:
+ if common:
+ return t.__get(new)
+ t2 = t
+ t = Tree(self.valuetype)
+ t2.branches[new] = t
+ if t.value is None:
+ t.value = t.valuetype()
+ return t.value
+
+ indent = ' '
+
+ def getContent(self, depth=0):
+ '''
+ Returns iterator of (depth, flag, key_or_value) tuples.
+ If flag is 'value', key_or_value is a value object, otherwise
+ (flag is 'key') it's a key string.
+ '''
+ keys = sorted(self.branches.keys())
+ if self.value is not None:
+ yield (depth, 'value', self.value)
+ for key in keys:
+ yield (depth, 'key', key)
+ for child in self.branches[key].getContent(depth + 1):
+ yield child
+
+ def toJSON(self):
+ '''
+ Returns this Tree as a JSON-able tree of hashes.
+ Only the values need to take care that they're JSON-able.
+ '''
+ if self.value is not None:
+ return self.value
+ return dict(('/'.join(key), self.branches[key].toJSON())
+ for key in self.branches.keys())
+
+ def getStrRows(self):
+ def tostr(t):
+ if t[1] == 'key':
+ return self.indent * t[0] + '/'.join(t[2])
+ return self.indent * (t[0] + 1) + str(t[2])
+
+ return [tostr(c) for c in self.getContent()]
+
+ def __str__(self):
+ return '\n'.join(self.getStrRows())
+
+
+class AddRemove(object):
+ def __init__(self):
+ self.left = self.right = None
+
+ def set_left(self, left):
+ if not isinstance(left, list):
+ left = list(l for l in left)
+ self.left = left
+
+ def set_right(self, right):
+ if not isinstance(right, list):
+ right = list(l for l in right)
+ self.right = right
+
+ def __iter__(self):
+ # order_map stores index in left and then index in right
+ order_map = dict((item, (i, -1)) for i, item in enumerate(self.left))
+ left_items = set(order_map)
+ # as we go through the right side, keep track of which left
+ # item we had in right last, and for items not in left,
+ # set the sortmap to (left_offset, right_index)
+ left_offset = -1
+ right_items = set()
+ for i, item in enumerate(self.right):
+ right_items.add(item)
+ if item in order_map:
+ left_offset = order_map[item][0]
+ else:
+ order_map[item] = (left_offset, i)
+ for item in sorted(order_map, key=lambda item: order_map[item]):
+ if item in left_items and item in right_items:
+ yield ('equal', item)
+ elif item in left_items:
+ yield ('delete', item)
+ else:
+ yield ('add', item)
diff --git a/third_party/python/compare-locales/compare_locales/integration_tests/__init__.py b/third_party/python/compare-locales/compare_locales/integration_tests/__init__.py
new file mode 100644
index 0000000000..ba9db8b8ec
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/integration_tests/__init__.py
@@ -0,0 +1,5 @@
+'''Tests that are not run by default.
+
+They might just take long, or depend on external services, or both.
+They might also fail for external changes.
+'''
diff --git a/third_party/python/compare-locales/compare_locales/integration_tests/test_plurals.py b/third_party/python/compare-locales/compare_locales/integration_tests/test_plurals.py
new file mode 100644
index 0000000000..b36c41222b
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/integration_tests/test_plurals.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import unittest
+from six.moves.urllib.error import URLError
+from six.moves.urllib.request import urlopen
+
+from compare_locales import plurals
+
+
+TRANSVISION_URL = (
+ 'https://transvision.mozfr.org/'
+ 'api/v1/entity/gecko_strings/'
+ '?id=toolkit/chrome/global/intl.properties:pluralRule'
+)
+
+
+class TestPlural(unittest.TestCase):
+ '''Integration test for plural forms and l10n-central.
+
+ Having more plural forms than in l10n-central is OK, missing or
+ mismatching ones isn't.
+ Depends on Transvision.
+ '''
+ maxDiff = None
+
+ def test_valid_forms(self):
+ reference_form_map = self._load_transvision()
+ # Strip matches from dicts, to make diff for test small
+ locales = list(reference_form_map)
+ cl_form_map = {}
+ for locale in locales:
+ cl_form = str(plurals.get_plural_rule(locale))
+ if cl_form == reference_form_map[locale]:
+ reference_form_map.pop(locale)
+ else:
+ cl_form_map[locale] = cl_form
+ self.assertDictEqual(reference_form_map, cl_form_map)
+
+ def _load_transvision(self):
+ '''Use the Transvision API to load all values of pluralRule
+ in intl.properties.
+ Skip test on load failure.
+ '''
+ try:
+ data = urlopen(TRANSVISION_URL).read()
+ except URLError:
+ raise unittest.SkipTest("Couldn't load Transvision API.")
+ return json.loads(data)
diff --git a/third_party/python/compare-locales/compare_locales/keyedtuple.py b/third_party/python/compare-locales/compare_locales/keyedtuple.py
new file mode 100644
index 0000000000..c232cc63c7
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/keyedtuple.py
@@ -0,0 +1,58 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'''A tuple with keys.
+
+A Sequence type that allows to refer to its elements by key.
+Making this immutable, 'cause keeping track of mutations is hard.
+
+compare-locales uses strings for Entity keys, and tuples in the
+case of PO. Support both.
+
+In the interfaces that check for membership, dicts check keys and
+sequences check values. Always try our dict cache `__map` first,
+and fall back to the superclass implementation.
+'''
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+
+class KeyedTuple(tuple):
+
+ def __new__(cls, iterable):
+ return super(KeyedTuple, cls).__new__(cls, iterable)
+
+ def __init__(self, iterable):
+ self.__map = {}
+ if iterable:
+ for index, item in enumerate(self):
+ self.__map[item.key] = index
+
+ def __contains__(self, key):
+ try:
+ contains = key in self.__map
+ if contains:
+ return True
+ except TypeError:
+ pass
+ return super(KeyedTuple, self).__contains__(key)
+
+ def __getitem__(self, key):
+ try:
+ key = self.__map[key]
+ except (KeyError, TypeError):
+ pass
+ return super(KeyedTuple, self).__getitem__(key)
+
+ def keys(self):
+ for value in self:
+ yield value.key
+
+ def items(self):
+ for value in self:
+ yield value.key, value
+
+ def values(self):
+ return self
diff --git a/third_party/python/compare-locales/compare_locales/lint/__init__.py b/third_party/python/compare-locales/compare_locales/lint/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/lint/__init__.py
diff --git a/third_party/python/compare-locales/compare_locales/lint/cli.py b/third_party/python/compare-locales/compare_locales/lint/cli.py
new file mode 100644
index 0000000000..35c026ee22
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/lint/cli.py
@@ -0,0 +1,95 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+import argparse
+import os
+
+from compare_locales.lint.linter import L10nLinter
+from compare_locales.lint.util import (
+ default_reference_and_tests,
+ mirror_reference_and_tests,
+ l10n_base_reference_and_tests,
+)
+from compare_locales import mozpath
+from compare_locales import paths
+from compare_locales import parser
+from compare_locales import version
+
+
+epilog = '''\
+moz-l10n-lint checks for common mistakes in localizable files. It tests for
+duplicate entries, parsing errors, and the like. Optionally, it can compare
+the strings to an external reference with strings and warn if a string might
+need to get a new ID.
+'''
+
+
+def main():
+ p = argparse.ArgumentParser(
+ description='Validate localizable strings',
+ epilog=epilog,
+ )
+ p.add_argument('l10n_toml')
+ p.add_argument(
+ '--version', action='version', version='%(prog)s ' + version
+ )
+ p.add_argument('-W', action='store_true', help='error on warnings')
+ p.add_argument(
+ '--l10n-reference',
+ dest='l10n_reference',
+ metavar='PATH',
+ help='check for conflicts against an l10n-only reference repository '
+ 'like gecko-strings',
+ )
+ p.add_argument(
+ '--reference-project',
+ dest='ref_project',
+ metavar='PATH',
+ help='check for conflicts against a reference project like '
+ 'android-l10n',
+ )
+ args = p.parse_args()
+ if args.l10n_reference:
+ l10n_base, locale = \
+ os.path.split(os.path.abspath(args.l10n_reference))
+ if not locale or not os.path.isdir(args.l10n_reference):
+ p.error('Pass an existing l10n reference')
+ else:
+ l10n_base = '.'
+ locale = None
+ pc = paths.TOMLParser().parse(args.l10n_toml, env={'l10n_base': l10n_base})
+ if locale:
+ pc.set_locales([locale], deep=True)
+ files = paths.ProjectFiles(locale, [pc])
+ get_reference_and_tests = default_reference_and_tests
+ if args.l10n_reference:
+ get_reference_and_tests = l10n_base_reference_and_tests(files)
+ elif args.ref_project:
+ get_reference_and_tests = mirror_reference_and_tests(
+ files, args.ref_project
+ )
+ linter = L10nLinter()
+ results = linter.lint(
+ (f for f, _, _, _ in files.iter_reference() if parser.hasParser(f)),
+ get_reference_and_tests
+ )
+ rv = 0
+ if results:
+ rv = 1
+ if all(r['level'] == 'warning' for r in results) and not args.W:
+ rv = 0
+ for result in results:
+ print('{} ({}:{}): {}'.format(
+ mozpath.relpath(result['path'], '.'),
+ result.get('lineno', 0),
+ result.get('column', 0),
+ result['message']
+ ))
+ return rv
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/python/compare-locales/compare_locales/lint/linter.py b/third_party/python/compare-locales/compare_locales/lint/linter.py
new file mode 100644
index 0000000000..682d6e2ccf
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/lint/linter.py
@@ -0,0 +1,123 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+from collections import Counter
+import os
+
+from compare_locales import parser, checks
+from compare_locales.paths import File, REFERENCE_LOCALE
+
+
+class L10nLinter(object):
+
+ def lint(self, files, get_reference_and_tests):
+ results = []
+ for path in files:
+ if not parser.hasParser(path):
+ continue
+ ref, extra_tests = get_reference_and_tests(path)
+ results.extend(self.lint_file(path, ref, extra_tests))
+ return results
+
+ def lint_file(self, path, ref, extra_tests):
+ file_parser = parser.getParser(path)
+ if ref is not None and os.path.isfile(ref):
+ file_parser.readFile(ref)
+ reference = file_parser.parse()
+ else:
+ reference = {}
+ file_parser.readFile(path)
+ current = file_parser.parse()
+ checker = checks.getChecker(
+ File(path, path, locale=REFERENCE_LOCALE),
+ extra_tests=extra_tests
+ )
+ if checker and checker.needs_reference:
+ checker.set_reference(current)
+ linter = EntityLinter(current, checker, reference)
+ for current_entity in current:
+ for result in linter.lint_entity(current_entity):
+ result['path'] = path
+ yield result
+
+
+class EntityLinter(object):
+ '''Factored out helper to run linters on a single entity.'''
+ def __init__(self, current, checker, reference):
+ self.key_count = Counter(entity.key for entity in current)
+ self.checker = checker
+ self.reference = reference
+
+ def lint_entity(self, current_entity):
+ res = self.handle_junk(current_entity)
+ if res:
+ yield res
+ return
+ for res in self.lint_full_entity(current_entity):
+ yield res
+ for res in self.lint_value(current_entity):
+ yield res
+
+ def lint_full_entity(self, current_entity):
+ '''Checks that go good or bad for a full entity,
+ without a particular spot inside the entity.
+ '''
+ lineno = col = None
+ if self.key_count[current_entity.key] > 1:
+ lineno, col = current_entity.position()
+ yield {
+ 'lineno': lineno,
+ 'column': col,
+ 'level': 'error',
+ 'message': 'Duplicate string with ID: {}'.format(
+ current_entity.key
+ )
+ }
+
+ if current_entity.key in self.reference:
+ reference_entity = self.reference[current_entity.key]
+ if not current_entity.equals(reference_entity):
+ if lineno is None:
+ lineno, col = current_entity.position()
+ msg = 'Changes to string require a new ID: {}'.format(
+ current_entity.key
+ )
+ yield {
+ 'lineno': lineno,
+ 'column': col,
+ 'level': 'warning',
+ 'message': msg,
+ }
+
+ def lint_value(self, current_entity):
+ '''Checks that error on particular locations in the entity value.
+ '''
+ if self.checker:
+ for tp, pos, msg, cat in self.checker.check(
+ current_entity, current_entity
+ ):
+ if isinstance(pos, checks.EntityPos):
+ lineno, col = current_entity.position(pos)
+ else:
+ lineno, col = current_entity.value_position(pos)
+ yield {
+ 'lineno': lineno,
+ 'column': col,
+ 'level': tp,
+ 'message': msg,
+ }
+
+ def handle_junk(self, current_entity):
+ if not isinstance(current_entity, parser.Junk):
+ return None
+
+ lineno, col = current_entity.position()
+ return {
+ 'lineno': lineno,
+ 'column': col,
+ 'level': 'error',
+ 'message': current_entity.error_message()
+ }
diff --git a/third_party/python/compare-locales/compare_locales/lint/util.py b/third_party/python/compare-locales/compare_locales/lint/util.py
new file mode 100644
index 0000000000..0b2557dfdd
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/lint/util.py
@@ -0,0 +1,40 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+from compare_locales import paths
+
+
+def default_reference_and_tests(path):
+ return None, None
+
+
+def mirror_reference_and_tests(files, basedir):
+ '''Get reference files to check for conflicts in android-l10n and friends.
+ '''
+ def get_reference_and_tests(path):
+ for matchers in files.matchers:
+ if 'reference' not in matchers:
+ continue
+ matcher = matchers['reference']
+ if matcher.match(path) is None:
+ continue
+ ref_matcher = paths.Matcher(matcher, root=basedir)
+ ref_path = matcher.sub(ref_matcher, path)
+ return ref_path, matchers.get('test')
+ return None, None
+ return get_reference_and_tests
+
+
+def l10n_base_reference_and_tests(files):
+ '''Get reference files to check for conflicts in gecko-strings and friends.
+ '''
+ def get_reference_and_tests(path):
+ match = files.match(path)
+ if match is None:
+ return None, None
+ ref, _, _, extra_tests = match
+ return ref, extra_tests
+ return get_reference_and_tests
diff --git a/third_party/python/compare-locales/compare_locales/merge.py b/third_party/python/compare-locales/compare_locales/merge.py
new file mode 100644
index 0000000000..9399e639e0
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/merge.py
@@ -0,0 +1,143 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'''Merge resources across channels.
+
+Merging resources is done over a series of parsed resources, or source
+strings.
+The nomenclature is that the resources are ordered from newest to oldest.
+The generated file structure is taken from the newest file, and then the
+next-newest, etc. The values of the returned entities are taken from the
+newest to the oldest resource, too.
+
+In merge_resources, there's an option to choose the values from oldest
+to newest instead.
+'''
+
+from collections import OrderedDict, defaultdict
+from codecs import encode
+import six
+
+
+from compare_locales import parser as cl
+from compare_locales.parser.base import StickyEntry
+from compare_locales.compare.utils import AddRemove
+
+
+class MergeNotSupportedError(ValueError):
+ pass
+
+
+def merge_channels(name, resources):
+ try:
+ parser = cl.getParser(name)
+ except UserWarning:
+ raise MergeNotSupportedError(
+ 'Unsupported file format ({}).'.format(name))
+
+ entities = merge_resources(parser, resources)
+ return encode(serialize_legacy_resource(entities), parser.encoding)
+
+
+def merge_resources(parser, resources, keep_newest=True):
+ '''Merge parsed or unparsed resources, returning a enumerable of Entities.
+
+ Resources are ordered from newest to oldest in the input. The structure
+ of the generated content is taken from the newest resource first, and
+ then filled by the next etc.
+ Values are also taken from the newest, unless keep_newest is False,
+ then values are taken from the oldest first.
+ '''
+
+ def parse_resource(resource):
+ # The counter dict keeps track of number of identical comments.
+ counter = defaultdict(int)
+ if isinstance(resource, bytes):
+ parser.readContents(resource)
+ resource = parser.walk()
+ pairs = [get_key_value(entity, counter) for entity in resource]
+ return OrderedDict(pairs)
+
+ def get_key_value(entity, counter):
+ if isinstance(entity, cl.Comment):
+ counter[entity.val] += 1
+ # Use the (value, index) tuple as the key. AddRemove will
+ # de-deplicate identical comments at the same index.
+ return ((entity.val, counter[entity.val]), entity)
+
+ if isinstance(entity, cl.Whitespace):
+ # Use the Whitespace instance as the key so that it's always
+ # unique. Adjecent whitespace will be folded into the longer one in
+ # prune.
+ return (entity, entity)
+
+ return (entity.key, entity)
+
+ entities = six.moves.reduce(
+ lambda x, y: merge_two(x, y, keep_newer=keep_newest),
+ map(parse_resource, resources))
+ return entities.values()
+
+
+def merge_two(newer, older, keep_newer=True):
+ '''Merge two OrderedDicts.
+
+ The order of the result dict is determined by `newer`.
+ The values in the dict are the newer ones by default, too.
+ If `keep_newer` is False, the values will be taken from the older
+ dict.
+ '''
+ diff = AddRemove()
+ diff.set_left(newer.keys())
+ diff.set_right(older.keys())
+
+ # Create a flat sequence of all entities in order reported by AddRemove.
+ get_entity = get_newer_entity if keep_newer else get_older_entity
+ contents = [(key, get_entity(newer, older, key)) for _, key in diff]
+
+ def prune(acc, cur):
+ _, entity = cur
+ if entity is None:
+ # Prune Nones which stand for duplicated comments.
+ return acc
+
+ if len(acc) and isinstance(entity, cl.Whitespace):
+ _, prev_entity = acc[-1]
+
+ if isinstance(prev_entity, cl.Whitespace):
+ # Prefer the longer whitespace.
+ if len(entity.all) > len(prev_entity.all):
+ acc[-1] = (entity, entity)
+ return acc
+
+ acc.append(cur)
+ return acc
+
+ pruned = six.moves.reduce(prune, contents, [])
+ return OrderedDict(pruned)
+
+
+def get_newer_entity(newer, older, key):
+ entity = newer.get(key, None)
+
+ # Always prefer the newer version.
+ if entity is not None:
+ return entity
+
+ return older.get(key)
+
+
+def get_older_entity(newer, older, key):
+ entity = older.get(key, None)
+
+ # If we don't have an older version, or it's a StickyEntry,
+ # get a newer version
+ if entity is None or isinstance(entity, StickyEntry):
+ return newer.get(key)
+
+ return entity
+
+
+def serialize_legacy_resource(entities):
+ return "".join((entity.all for entity in entities))
diff --git a/third_party/python/compare-locales/compare_locales/mozpath.py b/third_party/python/compare-locales/compare_locales/mozpath.py
new file mode 100644
index 0000000000..7280e25279
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/mozpath.py
@@ -0,0 +1,155 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'''
+Like :py:mod:`os.path`, with a reduced set of functions, and with normalized
+path separators (always use forward slashes).
+Also contains a few additional utilities not found in :py:mod:`os.path`.
+'''
+
+from __future__ import absolute_import
+
+import posixpath
+import os
+import re
+
+
+def normsep(path):
+ '''
+ Normalize path separators, by using forward slashes instead of whatever
+ :py:const:`os.sep` is.
+ '''
+ if os.sep != '/':
+ path = path.replace(os.sep, '/')
+ if os.altsep and os.altsep != '/':
+ path = path.replace(os.altsep, '/')
+ return path
+
+
+def relpath(path, start):
+ rel = normsep(os.path.relpath(path, start))
+ return '' if rel == '.' else rel
+
+
+def realpath(path):
+ return normsep(os.path.realpath(path))
+
+
+def abspath(path):
+ return normsep(os.path.abspath(path))
+
+
+def join(*paths):
+ return normsep(os.path.join(*paths))
+
+
+def normpath(path):
+ return posixpath.normpath(normsep(path))
+
+
+def dirname(path):
+ return posixpath.dirname(normsep(path))
+
+
+def commonprefix(paths):
+ return posixpath.commonprefix([normsep(path) for path in paths])
+
+
+def basename(path):
+ return os.path.basename(path)
+
+
+def splitext(path):
+ return posixpath.splitext(normsep(path))
+
+
+def split(path):
+ '''
+ Return the normalized path as a list of its components.
+
+ ``split('foo/bar/baz')`` returns ``['foo', 'bar', 'baz']``
+ '''
+ return normsep(path).split('/')
+
+
+def basedir(path, bases):
+ '''
+ Given a list of directories (`bases`), return which one contains the given
+ path. If several matches are found, the deepest base directory is returned.
+
+ ``basedir('foo/bar/baz', ['foo', 'baz', 'foo/bar'])`` returns ``'foo/bar'``
+ (`'foo'` and `'foo/bar'` both match, but `'foo/bar'` is the deepest match)
+ '''
+ path = normsep(path)
+ bases = [normsep(b) for b in bases]
+ if path in bases:
+ return path
+ for b in sorted(bases, reverse=True):
+ if b == '' or path.startswith(b + '/'):
+ return b
+
+
+re_cache = {}
+
+
+def match(path, pattern):
+ '''
+ Return whether the given path matches the given pattern.
+ An asterisk can be used to match any string, including the null string, in
+ one part of the path:
+
+ ``foo`` matches ``*``, ``f*`` or ``fo*o``
+
+ However, an asterisk matching a subdirectory may not match the null string:
+
+ ``foo/bar`` does *not* match ``foo/*/bar``
+
+ If the pattern matches one of the ancestor directories of the path, the
+ patch is considered matching:
+
+ ``foo/bar`` matches ``foo``
+
+ Two adjacent asterisks can be used to match files and zero or more
+ directories and subdirectories.
+
+ ``foo/bar`` matches ``foo/**/bar``, or ``**/bar``
+ '''
+ if not pattern:
+ return True
+ if pattern not in re_cache:
+ last_end = 0
+ p = ''
+ for m in re.finditer(r'(?:(^|/)\*\*(/|$))|(?P<star>\*)', pattern):
+ if m.start() > last_end:
+ p += re.escape(pattern[last_end:m.start()])
+ if m.group('star'):
+ p += '[^/]*'
+ elif m.group(2):
+ p += re.escape(m.group(1)) + r'(?:.+%s)?' % m.group(2)
+ else:
+ p += r'(?:%s.+)?' % re.escape(m.group(1))
+ last_end = m.end()
+ p += re.escape(pattern[last_end:]) + '(?:/.*)?$'
+ re_cache[pattern] = re.compile(p)
+ return re_cache[pattern].match(path) is not None
+
+
+def rebase(oldbase, base, relativepath):
+ '''
+ Return `relativepath` relative to `base` instead of `oldbase`.
+ '''
+ if base == oldbase:
+ return relativepath
+ if len(base) < len(oldbase):
+ assert basedir(oldbase, [base]) == base
+ relbase = relpath(oldbase, base)
+ result = join(relbase, relativepath)
+ else:
+ assert basedir(base, [oldbase]) == oldbase
+ relbase = relpath(base, oldbase)
+ result = relpath(relativepath, relbase)
+ result = normpath(result)
+ if relativepath.endswith('/') and not result.endswith('/'):
+ result += '/'
+ return result
diff --git a/third_party/python/compare-locales/compare_locales/parser/__init__.py b/third_party/python/compare-locales/compare_locales/parser/__init__.py
new file mode 100644
index 0000000000..8ab36cb082
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/parser/__init__.py
@@ -0,0 +1,83 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import re
+
+from .base import (
+ CAN_NONE, CAN_COPY, CAN_SKIP, CAN_MERGE,
+ Entry, Entity, Comment, OffsetComment, Junk, Whitespace,
+ BadEntity, Parser,
+)
+from .android import (
+ AndroidParser
+)
+from .defines import (
+ DefinesParser, DefinesInstruction
+)
+from .dtd import (
+ DTDEntity, DTDParser
+)
+from .fluent import (
+ FluentParser, FluentComment, FluentEntity, FluentMessage, FluentTerm,
+)
+from .ini import (
+ IniParser, IniSection,
+)
+from .po import (
+ PoParser
+)
+from .properties import (
+ PropertiesParser, PropertiesEntity
+)
+
+__all__ = [
+ "CAN_NONE", "CAN_COPY", "CAN_SKIP", "CAN_MERGE",
+ "Junk", "Entry", "Entity", "Whitespace", "Comment", "OffsetComment",
+ "BadEntity", "Parser",
+ "AndroidParser",
+ "DefinesParser", "DefinesInstruction",
+ "DTDParser", "DTDEntity",
+ "FluentParser", "FluentComment", "FluentEntity",
+ "FluentMessage", "FluentTerm",
+ "IniParser", "IniSection",
+ "PoParser",
+ "PropertiesParser", "PropertiesEntity",
+]
+
+__constructors = []
+
+
+def getParser(path):
+ for item in __constructors:
+ if re.search(item[0], path):
+ return item[1]
+ try:
+ from pkg_resources import iter_entry_points
+ for entry_point in iter_entry_points('compare_locales.parsers'):
+ p = entry_point.resolve()()
+ if p.use(path):
+ return p
+ except (ImportError, IOError):
+ pass
+ raise UserWarning("Cannot find Parser")
+
+
+def hasParser(path):
+ try:
+ return bool(getParser(path))
+ except UserWarning:
+ return False
+
+
+__constructors = [
+ ('strings.*\\.xml$', AndroidParser()),
+ ('\\.dtd$', DTDParser()),
+ ('\\.properties$', PropertiesParser()),
+ ('\\.ini$', IniParser()),
+ ('\\.inc$', DefinesParser()),
+ ('\\.ftl$', FluentParser()),
+ ('\\.pot?$', PoParser()),
+]
diff --git a/third_party/python/compare-locales/compare_locales/parser/android.py b/third_party/python/compare-locales/compare_locales/parser/android.py
new file mode 100644
index 0000000000..a6ad2f5bff
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/parser/android.py
@@ -0,0 +1,305 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""Android strings.xml parser
+
+Parses strings.xml files per
+https://developer.android.com/guide/topics/resources/localization.
+As we're using a built-in XML parser underneath, errors on that level
+break the full parsing, and result in a single Junk entry.
+"""
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+import re
+from xml.dom import minidom
+from xml.dom.minidom import Node
+
+from .base import (
+ CAN_SKIP,
+ Entity, Comment, Junk, Whitespace,
+ StickyEntry, LiteralEntity,
+ Parser
+)
+
+
+class AndroidEntity(Entity):
+ def __init__(
+ self, ctx, pre_comment, white_space, node, all, key, raw_val, val
+ ):
+ # fill out superclass as good as we can right now
+ # most span can get modified at endElement
+ super(AndroidEntity, self).__init__(
+ ctx, pre_comment, white_space,
+ (None, None),
+ (None, None),
+ (None, None)
+ )
+ self.node = node
+ self._all_literal = all
+ self._key_literal = key
+ self._raw_val_literal = raw_val
+ self._val_literal = val
+
+ @property
+ def all(self):
+ chunks = []
+ if self.pre_comment is not None:
+ chunks.append(self.pre_comment.all)
+ if self.inner_white is not None:
+ chunks.append(self.inner_white.all)
+ chunks.append(self._all_literal)
+ return ''.join(chunks)
+
+ @property
+ def key(self):
+ return self._key_literal
+
+ @property
+ def raw_val(self):
+ return self._raw_val_literal
+
+ def position(self, offset=0):
+ return (0, offset)
+
+ def value_position(self, offset=0):
+ return (0, offset)
+
+ def wrap(self, raw_val):
+ clone = self.node.cloneNode(True)
+ if clone.childNodes.length == 1:
+ child = clone.childNodes[0]
+ else:
+ for child in clone.childNodes:
+ if child.nodeType == Node.CDATA_SECTION_NODE:
+ break
+ child.data = raw_val
+ all = []
+ if self.pre_comment is not None:
+ all.append(self.pre_comment.all)
+ if self.inner_white is not None:
+ all.append(self.inner_white.all)
+ all.append(clone.toxml())
+ return LiteralEntity(self.key, raw_val, ''.join(all))
+
+
+class NodeMixin(object):
+ def __init__(self, all, value):
+ self._all_literal = all
+ self._val_literal = value
+
+ @property
+ def all(self):
+ return self._all_literal
+
+ @property
+ def key(self):
+ return self._all_literal
+
+ @property
+ def raw_val(self):
+ return self._val_literal
+
+ def position(self, offset=0):
+ return (0, offset)
+
+ def value_position(self, offset=0):
+ return (0, offset)
+
+
+class XMLWhitespace(NodeMixin, Whitespace):
+ pass
+
+
+class XMLComment(NodeMixin, Comment):
+ @property
+ def val(self):
+ return self._val_literal
+
+ @property
+ def key(self):
+ return None
+
+
+# DocumentWrapper is sticky in serialization.
+# Always keep the one from the reference document.
+class DocumentWrapper(NodeMixin, StickyEntry):
+ def __init__(self, key, all):
+ self._all_literal = all
+ self._val_literal = all
+ self._key_literal = key
+
+ @property
+ def key(self):
+ return self._key_literal
+
+
+class XMLJunk(Junk):
+ def __init__(self, all):
+ super(XMLJunk, self).__init__(None, (0, 0))
+ self._all_literal = all
+
+ @property
+ def all(self):
+ return self._all_literal
+
+ def position(self, offset=0):
+ return (0, offset)
+
+ def value_position(self, offset=0):
+ return (0, offset)
+
+
+def textContent(node):
+ if node.childNodes.length == 0:
+ return ''
+ for child in node.childNodes:
+ if child.nodeType == minidom.Node.CDATA_SECTION_NODE:
+ return child.data
+ if (
+ node.childNodes.length != 1 or
+ node.childNodes[0].nodeType != minidom.Node.TEXT_NODE
+ ):
+ # Return something, we'll fail in checks on this
+ return node.toxml()
+ return node.childNodes[0].data
+
+
+NEWLINE = re.compile(r'[ \t]*\n[ \t]*')
+
+
+def normalize(val):
+ return NEWLINE.sub('\n', val.strip(' \t'))
+
+
+class AndroidParser(Parser):
+ # Android does l10n fallback at runtime, don't merge en-US strings
+ capabilities = CAN_SKIP
+
+ def __init__(self):
+ super(AndroidParser, self).__init__()
+ self.last_comment = None
+
+ def walk(self, only_localizable=False):
+ if not self.ctx:
+ # loading file failed, or we just didn't load anything
+ return
+ ctx = self.ctx
+ contents = ctx.contents
+ try:
+ doc = minidom.parseString(contents.encode('utf-8'))
+ except Exception:
+ yield XMLJunk(contents)
+ return
+ docElement = doc.documentElement
+ if docElement.nodeName != 'resources':
+ yield XMLJunk(doc.toxml())
+ return
+ root_children = docElement.childNodes
+ if not only_localizable:
+ yield DocumentWrapper(
+ '<?xml?><resources>',
+ '<?xml version="1.0" encoding="utf-8"?>\n<resources'
+ )
+ for attr_name, attr_value in docElement.attributes.items():
+ yield DocumentWrapper(
+ attr_name,
+ ' {}="{}"'.format(attr_name, attr_value)
+ )
+ yield DocumentWrapper('>', '>')
+ child_num = 0
+ while child_num < len(root_children):
+ node = root_children[child_num]
+ if node.nodeType == Node.COMMENT_NODE:
+ current_comment, child_num = self.handleComment(
+ node, root_children, child_num
+ )
+ if child_num < len(root_children):
+ node = root_children[child_num]
+ else:
+ if not only_localizable:
+ yield current_comment
+ break
+ else:
+ current_comment = None
+ if node.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
+ white_space = XMLWhitespace(node.toxml(), node.nodeValue)
+ child_num += 1
+ if current_comment is None:
+ if not only_localizable:
+ yield white_space
+ continue
+ if node.nodeValue.count('\n') > 1:
+ if not only_localizable:
+ if current_comment is not None:
+ yield current_comment
+ yield white_space
+ continue
+ if child_num < len(root_children):
+ node = root_children[child_num]
+ else:
+ if not only_localizable:
+ if current_comment is not None:
+ yield current_comment
+ yield white_space
+ break
+ else:
+ white_space = None
+ if node.nodeType == Node.ELEMENT_NODE:
+ yield self.handleElement(node, current_comment, white_space)
+ else:
+ if not only_localizable:
+ if current_comment:
+ yield current_comment
+ if white_space:
+ yield white_space
+ child_num += 1
+ if not only_localizable:
+ yield DocumentWrapper('</resources>', '</resources>\n')
+
+ def handleElement(self, element, current_comment, white_space):
+ if element.nodeName == 'string' and element.hasAttribute('name'):
+ return AndroidEntity(
+ self.ctx,
+ current_comment,
+ white_space,
+ element,
+ element.toxml(),
+ element.getAttribute('name'),
+ textContent(element),
+ ''.join(c.toxml() for c in element.childNodes)
+ )
+ else:
+ return XMLJunk(element.toxml())
+
+ def handleComment(self, node, root_children, child_num):
+ all = node.toxml()
+ val = normalize(node.nodeValue)
+ while True:
+ child_num += 1
+ if child_num >= len(root_children):
+ break
+ node = root_children[child_num]
+ if node.nodeType == Node.TEXT_NODE:
+ if node.nodeValue.count('\n') > 1:
+ break
+ white = node
+ child_num += 1
+ if child_num >= len(root_children):
+ break
+ node = root_children[child_num]
+ else:
+ white = None
+ if node.nodeType != Node.COMMENT_NODE:
+ if white is not None:
+ # do not consume this node
+ child_num -= 1
+ break
+ if white:
+ all += white.toxml()
+ val += normalize(white.nodeValue)
+ all += node.toxml()
+ val += normalize(node.nodeValue)
+ return XMLComment(all, val), child_num
diff --git a/third_party/python/compare-locales/compare_locales/parser/base.py b/third_party/python/compare-locales/compare_locales/parser/base.py
new file mode 100644
index 0000000000..efc6119222
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/parser/base.py
@@ -0,0 +1,451 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import re
+import bisect
+import codecs
+from collections import Counter
+from compare_locales.keyedtuple import KeyedTuple
+from compare_locales.paths import File
+
+import six
+
+__constructors = []
+
+
+# The allowed capabilities for the Parsers. They define the exact strategy
+# used by ContentComparer.merge.
+
+# Don't perform any merging
+CAN_NONE = 0
+# Copy the entire reference file
+CAN_COPY = 1
+# Remove broken entities from localization
+# Without CAN_MERGE, en-US is not good to use for localization.
+CAN_SKIP = 2
+# Add missing and broken entities from the reference to localization
+# This effectively means that en-US is good to use for localized files.
+CAN_MERGE = 4
+
+
+class Entry(object):
+ '''
+ Abstraction layer for a localizable entity.
+ Currently supported are grammars of the form:
+
+ 1: entity definition
+ 2: entity key (name)
+ 3: entity value
+
+ <!ENTITY key "value">
+
+ <--- definition ---->
+ '''
+ def __init__(
+ self, ctx, pre_comment, inner_white, span, key_span, val_span
+ ):
+ self.ctx = ctx
+ self.span = span
+ self.key_span = key_span
+ self.val_span = val_span
+ self.pre_comment = pre_comment
+ self.inner_white = inner_white
+
+ def position(self, offset=0):
+ """Get the 1-based line and column of the character
+ with given offset into the Entity.
+
+ If offset is negative, return the end of the Entity.
+ """
+ if offset < 0:
+ pos = self.span[1]
+ else:
+ pos = self.span[0] + offset
+ return self.ctx.linecol(pos)
+
+ def value_position(self, offset=0):
+ """Get the 1-based line and column of the character
+ with given offset into the value.
+
+ If offset is negative, return the end of the value.
+ """
+ assert self.val_span is not None
+ if offset < 0:
+ pos = self.val_span[1]
+ else:
+ pos = self.val_span[0] + offset
+ return self.ctx.linecol(pos)
+
+ def _span_start(self):
+ start = self.span[0]
+ if hasattr(self, 'pre_comment') and self.pre_comment is not None:
+ start = self.pre_comment.span[0]
+ return start
+
+ @property
+ def all(self):
+ start = self._span_start()
+ end = self.span[1]
+ return self.ctx.contents[start:end]
+
+ @property
+ def key(self):
+ return self.ctx.contents[self.key_span[0]:self.key_span[1]]
+
+ @property
+ def raw_val(self):
+ if self.val_span is None:
+ return None
+ return self.ctx.contents[self.val_span[0]:self.val_span[1]]
+
+ @property
+ def val(self):
+ return self.raw_val
+
+ def __repr__(self):
+ return self.key
+
+ re_br = re.compile('<br[ \t\r\n]*/?>', re.U)
+ re_sgml = re.compile(r'</?\w+.*?>', re.U | re.M)
+
+ def count_words(self):
+ """Count the words in an English string.
+ Replace a couple of xml markup to make that safer, too.
+ """
+ value = self.re_br.sub('\n', self.val)
+ value = self.re_sgml.sub('', value)
+ return len(value.split())
+
+ def equals(self, other):
+ return self.key == other.key and self.val == other.val
+
+
+class StickyEntry(Entry):
+ """Subclass of Entry to use in for syntax fragments
+ which should always be overwritten in the serializer.
+ """
+ pass
+
+
+class Entity(Entry):
+ @property
+ def localized(self):
+ '''Is this entity localized.
+
+ Always true for monolingual files.
+ In bilingual files, this is a dynamic property.
+ '''
+ return True
+
+ def unwrap(self):
+ """Return the literal value to be used by tools.
+ """
+ return self.raw_val
+
+ def wrap(self, raw_val):
+ """Create literal entity based on reference and raw value.
+
+ This is used by the serialization logic.
+ """
+ start = self._span_start()
+ all = (
+ self.ctx.contents[start:self.val_span[0]] +
+ raw_val +
+ self.ctx.contents[self.val_span[1]:self.span[1]]
+ )
+ return LiteralEntity(self.key, raw_val, all)
+
+
+class LiteralEntity(Entity):
+ """Subclass of Entity to represent entities without context slices.
+
+ It's storing string literals for key, raw_val and all instead of spans.
+ """
+ def __init__(self, key, val, all):
+ super(LiteralEntity, self).__init__(None, None, None, None, None, None)
+ self._key = key
+ self._raw_val = val
+ self._all = all
+
+ @property
+ def key(self):
+ return self._key
+
+ @property
+ def raw_val(self):
+ return self._raw_val
+
+ @property
+ def all(self):
+ return self._all
+
+
+class PlaceholderEntity(LiteralEntity):
+ """Subclass of Entity to be removed in merges.
+ """
+ def __init__(self, key):
+ super(PlaceholderEntity, self).__init__(key, "", "\nplaceholder\n")
+
+
+class Comment(Entry):
+ def __init__(self, ctx, span):
+ self.ctx = ctx
+ self.span = span
+ self.val_span = None
+ self._val_cache = None
+
+ @property
+ def key(self):
+ return None
+
+ @property
+ def val(self):
+ if self._val_cache is None:
+ self._val_cache = self.all
+ return self._val_cache
+
+ def __repr__(self):
+ return self.all
+
+
+class OffsetComment(Comment):
+ '''Helper for file formats that have a constant number of leading
+ chars to strip from comments.
+ Offset defaults to 1
+ '''
+ comment_offset = 1
+
+ @property
+ def val(self):
+ if self._val_cache is None:
+ self._val_cache = ''.join((
+ l[self.comment_offset:] for l in self.all.splitlines(True)
+ ))
+ return self._val_cache
+
+
+class Junk(object):
+ '''
+ An almost-Entity, representing junk data that we didn't parse.
+ This way, we can signal bad content as stuff we don't understand.
+ And the either fix that, or report real bugs in localizations.
+ '''
+ junkid = 0
+
+ def __init__(self, ctx, span):
+ self.ctx = ctx
+ self.span = span
+ self.__class__.junkid += 1
+ self.key = '_junk_%d_%d-%d' % (self.__class__.junkid, span[0], span[1])
+
+ def position(self, offset=0):
+ """Get the 1-based line and column of the character
+ with given offset into the Entity.
+
+ If offset is negative, return the end of the Entity.
+ """
+ if offset < 0:
+ pos = self.span[1]
+ else:
+ pos = self.span[0] + offset
+ return self.ctx.linecol(pos)
+
+ @property
+ def all(self):
+ return self.ctx.contents[self.span[0]:self.span[1]]
+
+ @property
+ def raw_val(self):
+ return self.all
+
+ @property
+ def val(self):
+ return self.all
+
+ def error_message(self):
+ params = (self.val,) + self.position() + self.position(-1)
+ return (
+ 'Unparsed content "%s" from line %d column %d'
+ ' to line %d column %d' % params
+ )
+
+ def __repr__(self):
+ return self.key
+
+
+class Whitespace(Entry):
+ '''Entity-like object representing an empty file with whitespace,
+ if allowed
+ '''
+ def __init__(self, ctx, span):
+ self.ctx = ctx
+ self.span = self.key_span = self.val_span = span
+
+ def __repr__(self):
+ return self.raw_val
+
+
+class BadEntity(ValueError):
+ '''Raised when the parser can't create an Entity for a found match.
+ '''
+ pass
+
+
+class Parser(object):
+ capabilities = CAN_SKIP | CAN_MERGE
+ reWhitespace = re.compile('[ \t\r\n]+', re.M)
+ Comment = Comment
+ # NotImplementedError would be great, but also tedious
+ reKey = reComment = None
+
+ class Context(object):
+ "Fixture for content and line numbers"
+ def __init__(self, contents):
+ self.contents = contents
+ # cache split lines
+ self._lines = None
+
+ def linecol(self, position):
+ "Returns 1-based line and column numbers."
+ if self._lines is None:
+ nl = re.compile('\n', re.M)
+ self._lines = [m.end()
+ for m in nl.finditer(self.contents)]
+
+ line_offset = bisect.bisect(self._lines, position)
+ line_start = self._lines[line_offset - 1] if line_offset else 0
+ col_offset = position - line_start
+
+ return line_offset + 1, col_offset + 1
+
+ def __init__(self):
+ if not hasattr(self, 'encoding'):
+ self.encoding = 'utf-8'
+ self.ctx = None
+
+ def readFile(self, file):
+ '''Read contents from disk, with universal_newlines'''
+ if isinstance(file, File):
+ file = file.fullpath
+ # python 2 has binary input with universal newlines,
+ # python 3 doesn't. Let's split code paths
+ if six.PY2:
+ with open(file, 'rbU') as f:
+ self.readContents(f.read())
+ else:
+ with open(
+ file, 'r',
+ encoding=self.encoding, errors='replace',
+ newline=None
+ ) as f:
+ self.readUnicode(f.read())
+
+ def readContents(self, contents):
+ '''Read contents and create parsing context.
+
+ contents are in native encoding, but with normalized line endings.
+ '''
+ (contents, _) = codecs.getdecoder(self.encoding)(contents, 'replace')
+ self.readUnicode(contents)
+
+ def readUnicode(self, contents):
+ self.ctx = self.Context(contents)
+
+ def parse(self):
+ return KeyedTuple(self)
+
+ def __iter__(self):
+ return self.walk(only_localizable=True)
+
+ def walk(self, only_localizable=False):
+ if not self.ctx:
+ # loading file failed, or we just didn't load anything
+ return
+ ctx = self.ctx
+ contents = ctx.contents
+
+ next_offset = 0
+ while next_offset < len(contents):
+ entity = self.getNext(ctx, next_offset)
+
+ if isinstance(entity, (Entity, Junk)):
+ yield entity
+ elif not only_localizable:
+ yield entity
+
+ next_offset = entity.span[1]
+
+ def getNext(self, ctx, offset):
+ '''Parse the next fragment.
+
+ Parse comments first, then white-space.
+ If an entity follows, create that entity with such pre_comment and
+ inner white-space. If not, emit comment or white-space as standlone.
+ It's OK that this might parse whitespace more than once.
+ Comments are associated with entities if they're not separated by
+ blank lines. Multiple consecutive comments are joined.
+ '''
+ junk_offset = offset
+ m = self.reComment.match(ctx.contents, offset)
+ if m:
+ current_comment = self.Comment(ctx, m.span())
+ if offset < 2 and 'License' in current_comment.val:
+ # Heuristic. A early comment with "License" is probably
+ # a license header, and should be standalone.
+ # Not glueing ourselves to offset == 0 as we might have
+ # skipped a BOM.
+ return current_comment
+ offset = m.end()
+ else:
+ current_comment = None
+ m = self.reWhitespace.match(ctx.contents, offset)
+ if m:
+ white_space = Whitespace(ctx, m.span())
+ offset = m.end()
+ if (
+ current_comment is not None
+ and white_space.raw_val.count('\n') > 1
+ ):
+ # standalone comment
+ # return the comment, and reparse the whitespace next time
+ return current_comment
+ if current_comment is None:
+ return white_space
+ else:
+ white_space = None
+ m = self.reKey.match(ctx.contents, offset)
+ if m:
+ try:
+ return self.createEntity(ctx, m, current_comment, white_space)
+ except BadEntity:
+ # fall through to Junk, probably
+ pass
+ if current_comment is not None:
+ return current_comment
+ if white_space is not None:
+ return white_space
+ return self.getJunk(ctx, junk_offset, self.reKey, self.reComment)
+
+ def getJunk(self, ctx, offset, *expressions):
+ junkend = None
+ for exp in expressions:
+ m = exp.search(ctx.contents, offset)
+ if m:
+ junkend = min(junkend, m.start()) if junkend else m.start()
+ return Junk(ctx, (offset, junkend or len(ctx.contents)))
+
+ def createEntity(self, ctx, m, current_comment, white_space):
+ return Entity(
+ ctx, current_comment, white_space,
+ m.span(), m.span('key'), m.span('val')
+ )
+
+ @classmethod
+ def findDuplicates(cls, entities):
+ found = Counter(entity.key for entity in entities)
+ for entity_id, cnt in found.items():
+ if cnt > 1:
+ yield '{} occurs {} times'.format(entity_id, cnt)
diff --git a/third_party/python/compare-locales/compare_locales/parser/defines.py b/third_party/python/compare-locales/compare_locales/parser/defines.py
new file mode 100644
index 0000000000..45b5be0530
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/parser/defines.py
@@ -0,0 +1,106 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import re
+
+from .base import (
+ CAN_COPY,
+ Entry, OffsetComment, Junk, Whitespace,
+ Parser
+)
+
+
+class DefinesInstruction(Entry):
+ '''Entity-like object representing processing instructions in inc files
+ '''
+ def __init__(self, ctx, span, val_span):
+ self.ctx = ctx
+ self.span = span
+ self.key_span = self.val_span = val_span
+
+ def __repr__(self):
+ return self.raw_val
+
+
+class DefinesParser(Parser):
+ # can't merge, #unfilter needs to be the last item, which we don't support
+ capabilities = CAN_COPY
+ reWhitespace = re.compile('\n+', re.M)
+
+ EMPTY_LINES = 1 << 0
+
+ class Comment(OffsetComment):
+ comment_offset = 2
+
+ class Context(Parser.Context):
+ def __init__(self, contents):
+ super(DefinesParser.Context, self).__init__(contents)
+ self.filter_empty_lines = False
+
+ def __init__(self):
+ self.reComment = re.compile('(?:^# .*?\n)*(?:^# [^\n]*)', re.M)
+ # corresponds to
+ # https://hg.mozilla.org/mozilla-central/file/72ee4800d4156931c89b58bd807af4a3083702bb/python/mozbuild/mozbuild/preprocessor.py#l561 # noqa
+ self.reKey = re.compile(
+ r'#define[ \t]+(?P<key>\w+)(?:[ \t](?P<val>[^\n]*))?', re.M)
+ self.rePI = re.compile(r'#(?P<val>\w+[ \t]+[^\n]+)', re.M)
+ Parser.__init__(self)
+
+ def getNext(self, ctx, offset):
+ junk_offset = offset
+ contents = ctx.contents
+
+ m = self.reComment.match(ctx.contents, offset)
+ if m:
+ current_comment = self.Comment(ctx, m.span())
+ offset = m.end()
+ else:
+ current_comment = None
+
+ m = self.reWhitespace.match(contents, offset)
+ if m:
+ # blank lines outside of filter_empty_lines or
+ # leading whitespace are bad
+ if (
+ offset == 0 or
+ not (len(m.group()) == 1 or ctx.filter_empty_lines)
+ ):
+ if current_comment:
+ return current_comment
+ return Junk(ctx, m.span())
+ white_space = Whitespace(ctx, m.span())
+ offset = m.end()
+ if (
+ current_comment is not None
+ and white_space.raw_val.count('\n') > 1
+ ):
+ # standalone comment
+ # return the comment, and reparse the whitespace next time
+ return current_comment
+ if current_comment is None:
+ return white_space
+ else:
+ white_space = None
+
+ m = self.reKey.match(contents, offset)
+ if m:
+ return self.createEntity(ctx, m, current_comment, white_space)
+ # defines instructions don't have comments
+ # Any pending commment is standalone
+ if current_comment:
+ return current_comment
+ if white_space:
+ return white_space
+ m = self.rePI.match(contents, offset)
+ if m:
+ instr = DefinesInstruction(ctx, m.span(), m.span('val'))
+ if instr.val == 'filter emptyLines':
+ ctx.filter_empty_lines = True
+ if instr.val == 'unfilter emptyLines':
+ ctx.filter_empty_lines = False
+ return instr
+ return self.getJunk(
+ ctx, junk_offset, self.reComment, self.reKey, self.rePI)
diff --git a/third_party/python/compare-locales/compare_locales/parser/dtd.py b/third_party/python/compare-locales/compare_locales/parser/dtd.py
new file mode 100644
index 0000000000..5f0574f488
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/parser/dtd.py
@@ -0,0 +1,118 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import re
+
+try:
+ from html import unescape as html_unescape
+except ImportError:
+ from HTMLParser import HTMLParser
+ html_parser = HTMLParser()
+ html_unescape = html_parser.unescape
+
+from .base import (
+ Entity, Comment, Junk,
+ Parser
+)
+
+
+class DTDEntityMixin(object):
+ @property
+ def val(self):
+ '''Unescape HTML entities into corresponding Unicode characters.
+
+ Named (&amp;), decimal (&#38;), and hex (&#x26; and &#x0026;) formats
+ are supported. Unknown entities are left intact.
+
+ As of Python 2.7 and Python 3.6 the following 252 named entities are
+ recognized and unescaped:
+
+ https://github.com/python/cpython/blob/2.7/Lib/htmlentitydefs.py
+ https://github.com/python/cpython/blob/3.6/Lib/html/entities.py
+ '''
+ return html_unescape(self.raw_val)
+
+ def value_position(self, offset=0):
+ # DTDChecker already returns tuples of (line, col) positions
+ if isinstance(offset, tuple):
+ line_pos, col_pos = offset
+ line, col = super(DTDEntityMixin, self).value_position()
+ if line_pos == 1:
+ col = col + col_pos
+ else:
+ col = col_pos
+ line += line_pos - 1
+ return line, col
+ else:
+ return super(DTDEntityMixin, self).value_position(offset)
+
+
+class DTDEntity(DTDEntityMixin, Entity):
+ pass
+
+
+class DTDParser(Parser):
+ # http://www.w3.org/TR/2006/REC-xml11-20060816/#NT-NameStartChar
+ # ":" | [A-Z] | "_" | [a-z] |
+ # [#xC0-#xD6] | [#xD8-#xF6] | [#xF8-#x2FF] | [#x370-#x37D] | [#x37F-#x1FFF]
+ # | [#x200C-#x200D] | [#x2070-#x218F] | [#x2C00-#x2FEF] |
+ # [#x3001-#xD7FF] | [#xF900-#xFDCF] | [#xFDF0-#xFFFD] |
+ # [#x10000-#xEFFFF]
+ CharMinusDash = '\x09\x0A\x0D\u0020-\u002C\u002E-\uD7FF\uE000-\uFFFD'
+ XmlComment = '<!--(?:-?[%s])*?-->' % CharMinusDash
+ NameStartChar = ':A-Z_a-z\xC0-\xD6\xD8-\xF6\xF8-\u02FF' + \
+ '\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F' + \
+ '\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD'
+ # + \U00010000-\U000EFFFF seems to be unsupported in python
+
+ # NameChar ::= NameStartChar | "-" | "." | [0-9] | #xB7 |
+ # [#x0300-#x036F] | [#x203F-#x2040]
+ NameChar = NameStartChar + r'\-\.0-9' + '\xB7\u0300-\u036F\u203F-\u2040'
+ Name = '[' + NameStartChar + '][' + NameChar + ']*'
+ reKey = re.compile('<!ENTITY[ \t\r\n]+(?P<key>' + Name + ')[ \t\r\n]+'
+ '(?P<val>\"[^\"]*\"|\'[^\']*\'?)[ \t\r\n]*>',
+ re.DOTALL | re.M)
+ # add BOM to DTDs, details in bug 435002
+ reHeader = re.compile('^\ufeff')
+ reComment = re.compile('<!--(?P<val>-?[%s])*?-->' % CharMinusDash,
+ re.S)
+ rePE = re.compile('<!ENTITY[ \t\r\n]+%[ \t\r\n]+(?P<key>' + Name + ')'
+ '[ \t\r\n]+SYSTEM[ \t\r\n]+'
+ '(?P<val>\"[^\"]*\"|\'[^\']*\')[ \t\r\n]*>[ \t\r\n]*'
+ '%' + Name + ';'
+ '(?:[ \t]*(?:' + XmlComment + u'[ \t\r\n]*)*\n?)?')
+
+ class Comment(Comment):
+ @property
+ def val(self):
+ if self._val_cache is None:
+ # Strip "<!--" and "-->" to comment contents
+ self._val_cache = self.all[4:-3]
+ return self._val_cache
+
+ def getNext(self, ctx, offset):
+ '''
+ Overload Parser.getNext to special-case ParsedEntities.
+ Just check for a parsed entity if that method claims junk.
+
+ <!ENTITY % foo SYSTEM "url">
+ %foo;
+ '''
+ if offset == 0 and self.reHeader.match(ctx.contents):
+ offset += 1
+ entity = Parser.getNext(self, ctx, offset)
+ if (entity and isinstance(entity, Junk)) or entity is None:
+ m = self.rePE.match(ctx.contents, offset)
+ if m:
+ entity = DTDEntity(
+ ctx, None, None, m.span(), m.span('key'), m.span('val'))
+ return entity
+
+ def createEntity(self, ctx, m, current_comment, white_space):
+ valspan = m.span('val')
+ valspan = (valspan[0]+1, valspan[1]-1)
+ return DTDEntity(ctx, current_comment, white_space,
+ m.span(), m.span('key'), valspan)
diff --git a/third_party/python/compare-locales/compare_locales/parser/fluent.py b/third_party/python/compare-locales/compare_locales/parser/fluent.py
new file mode 100644
index 0000000000..19d7d3c0b9
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/parser/fluent.py
@@ -0,0 +1,220 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import re
+
+from fluent.syntax import FluentParser as FTLParser
+from fluent.syntax import ast as ftl
+from fluent.syntax.serializer import serialize_comment
+from fluent.syntax.visitor import Visitor
+from .base import (
+ CAN_SKIP,
+ Entry, Entity, Comment, Junk, Whitespace,
+ LiteralEntity,
+ Parser
+)
+
+
+class WordCounter(Visitor):
+ def __init__(self):
+ self.word_count = 0
+
+ def generic_visit(self, node):
+ if isinstance(
+ node,
+ (ftl.Span, ftl.Annotation, ftl.BaseComment)
+ ):
+ return
+ super(WordCounter, self).generic_visit(node)
+
+ def visit_SelectExpression(self, node):
+ # optimize select expressions to only go through the variants
+ self.visit(node.variants)
+
+ def visit_TextElement(self, node):
+ self.word_count += len(node.value.split())
+
+
+class FluentAttribute(Entry):
+ ignored_fields = ['span']
+
+ def __init__(self, entity, attr_node):
+ self.ctx = entity.ctx
+ self.attr = attr_node
+ self.key_span = (attr_node.id.span.start, attr_node.id.span.end)
+ self.val_span = (attr_node.value.span.start, attr_node.value.span.end)
+
+ def equals(self, other):
+ if not isinstance(other, FluentAttribute):
+ return False
+ return self.attr.equals(
+ other.attr, ignored_fields=self.ignored_fields)
+
+
+class FluentEntity(Entity):
+ # Fields ignored when comparing two entities.
+ ignored_fields = ['comment', 'span']
+
+ def __init__(self, ctx, entry):
+ start = entry.span.start
+ end = entry.span.end
+
+ self.ctx = ctx
+ self.span = (start, end)
+
+ if isinstance(entry, ftl.Term):
+ # Terms don't have their '-' as part of the id, use the prior
+ # character
+ self.key_span = (entry.id.span.start - 1, entry.id.span.end)
+ else:
+ # Message
+ self.key_span = (entry.id.span.start, entry.id.span.end)
+
+ if entry.value is not None:
+ self.val_span = (entry.value.span.start, entry.value.span.end)
+ else:
+ self.val_span = None
+
+ self.entry = entry
+
+ # Entry instances are expected to have pre_comment. It's used by
+ # other formats to associate a Comment with an Entity. FluentEntities
+ # don't need it because message comments are part of the entry AST and
+ # are not separate Comment instances.
+ self.pre_comment = None
+
+ @property
+ def root_node(self):
+ '''AST node at which to start traversal for count_words.
+
+ By default we count words in the value and in all attributes.
+ '''
+ return self.entry
+
+ _word_count = None
+
+ def count_words(self):
+ if self._word_count is None:
+ counter = WordCounter()
+ counter.visit(self.root_node)
+ self._word_count = counter.word_count
+
+ return self._word_count
+
+ def equals(self, other):
+ return self.entry.equals(
+ other.entry, ignored_fields=self.ignored_fields)
+
+ # In Fluent we treat entries as a whole. FluentChecker reports errors at
+ # offsets calculated from the beginning of the entry.
+ def value_position(self, offset=None):
+ if offset is None:
+ # no offset given, use our value start or id end
+ if self.val_span:
+ offset = self.val_span[0] - self.span[0]
+ else:
+ offset = self.key_span[1] - self.span[0]
+ return self.position(offset)
+
+ @property
+ def attributes(self):
+ for attr_node in self.entry.attributes:
+ yield FluentAttribute(self, attr_node)
+
+ def unwrap(self):
+ return self.all
+
+ def wrap(self, raw_val):
+ """Create literal entity the given raw value.
+
+ For Fluent, we're exposing the message source to tools like
+ Pontoon.
+ We also recreate the comment from this entity to the created entity.
+ """
+ all = raw_val
+ if self.entry.comment is not None:
+ all = serialize_comment(self.entry.comment) + all
+ return LiteralEntity(self.key, raw_val, all)
+
+
+class FluentMessage(FluentEntity):
+ pass
+
+
+class FluentTerm(FluentEntity):
+ # Fields ignored when comparing two terms.
+ ignored_fields = ['attributes', 'comment', 'span']
+
+ @property
+ def root_node(self):
+ '''AST node at which to start traversal for count_words.
+
+ In Fluent Terms we only count words in the value. Attributes are
+ private and do not count towards the word total.
+ '''
+ return self.entry.value
+
+
+class FluentComment(Comment):
+ def __init__(self, ctx, span, entry):
+ super(FluentComment, self).__init__(ctx, span)
+ self._val_cache = entry.content
+
+
+class FluentParser(Parser):
+ capabilities = CAN_SKIP
+
+ def __init__(self):
+ super(FluentParser, self).__init__()
+ self.ftl_parser = FTLParser()
+
+ def walk(self, only_localizable=False):
+ if not self.ctx:
+ # loading file failed, or we just didn't load anything
+ return
+
+ resource = self.ftl_parser.parse(self.ctx.contents)
+
+ last_span_end = 0
+
+ for entry in resource.body:
+ if not only_localizable:
+ if entry.span.start > last_span_end:
+ yield Whitespace(
+ self.ctx, (last_span_end, entry.span.start))
+
+ if isinstance(entry, ftl.Message):
+ yield FluentMessage(self.ctx, entry)
+ elif isinstance(entry, ftl.Term):
+ yield FluentTerm(self.ctx, entry)
+ elif isinstance(entry, ftl.Junk):
+ start = entry.span.start
+ end = entry.span.end
+ # strip leading whitespace
+ start += re.match('[ \t\r\n]*', entry.content).end()
+ if not only_localizable and entry.span.start < start:
+ yield Whitespace(
+ self.ctx, (entry.span.start, start)
+ )
+ # strip trailing whitespace
+ ws, we = re.search('[ \t\r\n]*$', entry.content).span()
+ end -= we - ws
+ yield Junk(self.ctx, (start, end))
+ if not only_localizable and end < entry.span.end:
+ yield Whitespace(
+ self.ctx, (end, entry.span.end)
+ )
+ elif isinstance(entry, ftl.BaseComment) and not only_localizable:
+ span = (entry.span.start, entry.span.end)
+ yield FluentComment(self.ctx, span, entry)
+
+ last_span_end = entry.span.end
+
+ # Yield Whitespace at the EOF.
+ if not only_localizable:
+ eof_offset = len(self.ctx.contents)
+ if eof_offset > last_span_end:
+ yield Whitespace(self.ctx, (last_span_end, eof_offset))
diff --git a/third_party/python/compare-locales/compare_locales/parser/ini.py b/third_party/python/compare-locales/compare_locales/parser/ini.py
new file mode 100644
index 0000000000..d3b31d9ae9
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/parser/ini.py
@@ -0,0 +1,58 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import re
+
+from .base import (
+ Entry, OffsetComment,
+ Parser
+)
+
+
+class IniSection(Entry):
+ '''Entity-like object representing sections in ini files
+ '''
+ def __init__(self, ctx, span, val_span):
+ self.ctx = ctx
+ self.span = span
+ self.key_span = self.val_span = val_span
+
+ def __repr__(self):
+ return self.raw_val
+
+
+class IniParser(Parser):
+ '''
+ Parse files of the form:
+ # initial comment
+ [cat]
+ whitespace*
+ #comment
+ string=value
+ ...
+ '''
+
+ Comment = OffsetComment
+
+ def __init__(self):
+ self.reComment = re.compile('(?:^[;#][^\n]*\n)*(?:^[;#][^\n]*)', re.M)
+ self.reSection = re.compile(r'\[(?P<val>.*?)\]', re.M)
+ self.reKey = re.compile('(?P<key>.+?)=(?P<val>.*)', re.M)
+ Parser.__init__(self)
+
+ def getNext(self, ctx, offset):
+ contents = ctx.contents
+ m = self.reSection.match(contents, offset)
+ if m:
+ return IniSection(ctx, m.span(), m.span('val'))
+
+ return super(IniParser, self).getNext(ctx, offset)
+
+ def getJunk(self, ctx, offset, *expressions):
+ # base.Parser.getNext calls us with self.reKey, self.reComment.
+ # Add self.reSection to the end-of-junk expressions
+ expressions = expressions + (self.reSection,)
+ return super(IniParser, self).getJunk(ctx, offset, *expressions)
diff --git a/third_party/python/compare-locales/compare_locales/parser/po.py b/third_party/python/compare-locales/compare_locales/parser/po.py
new file mode 100644
index 0000000000..5880cf7c71
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/parser/po.py
@@ -0,0 +1,127 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""Gettext PO(T) parser
+
+Parses gettext po and pot files.
+"""
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+import re
+
+from .base import (
+ CAN_SKIP,
+ Entity,
+ BadEntity,
+ Parser
+)
+
+
+class PoEntityMixin(object):
+
+ @property
+ def val(self):
+ return (
+ self.stringlist_val
+ if self.stringlist_val
+ else self.stringlist_key[0]
+ )
+
+ @property
+ def key(self):
+ return self.stringlist_key
+
+ @property
+ def localized(self):
+ # gettext denotes a non-localized string by an empty value
+ return bool(self.stringlist_val)
+
+ def __repr__(self):
+ return self.key[0]
+
+
+class PoEntity(PoEntityMixin, Entity):
+ pass
+
+
+# Unescape and concat a string list
+def eval_stringlist(lines):
+ return ''.join(
+ (
+ l
+ .replace(r'\\', '\\')
+ .replace(r'\t', '\t')
+ .replace(r'\r', '\r')
+ .replace(r'\n', '\n')
+ .replace(r'\"', '"')
+ )
+ for l in lines
+ )
+
+
+class PoParser(Parser):
+ # gettext l10n fallback at runtime, don't merge en-US strings
+ capabilities = CAN_SKIP
+
+ reKey = re.compile('msgctxt|msgid')
+ reValue = re.compile('(?P<white>[ \t\r\n]*)(?P<cmd>msgstr)')
+ reComment = re.compile(r'(?:#.*?\n)+')
+ # string list item:
+ # leading whitespace
+ # `"`
+ # escaped quotes etc, not quote, newline, backslash
+ # `"`
+ reListItem = re.compile(r'[ \t\r\n]*"((?:\\[\\trn"]|[^"\n\\])*)"')
+
+ def __init__(self):
+ super(PoParser, self).__init__()
+
+ def createEntity(self, ctx, m, current_comment, white_space):
+ start = cursor = m.start()
+ id_start = cursor
+ try:
+ msgctxt, cursor = self._parse_string_list(ctx, cursor, 'msgctxt')
+ m = self.reWhitespace.match(ctx.contents, cursor)
+ if m:
+ cursor = m.end()
+ except BadEntity:
+ # no msgctxt is OK
+ msgctxt = None
+ if id_start is None:
+ id_start = cursor
+ msgid, cursor = self._parse_string_list(ctx, cursor, 'msgid')
+ id_end = cursor
+ m = self.reWhitespace.match(ctx.contents, cursor)
+ if m:
+ cursor = m.end()
+ val_start = cursor
+ msgstr, cursor = self._parse_string_list(ctx, cursor, 'msgstr')
+ e = PoEntity(
+ ctx,
+ current_comment,
+ white_space,
+ (start, cursor),
+ (id_start, id_end),
+ (val_start, cursor)
+ )
+ e.stringlist_key = (msgid, msgctxt)
+ e.stringlist_val = msgstr
+ return e
+
+ def _parse_string_list(self, ctx, cursor, key):
+ if not ctx.contents.startswith(key, cursor):
+ raise BadEntity
+ cursor += len(key)
+ frags = []
+ while True:
+ m = self.reListItem.match(ctx.contents, cursor)
+ if not m:
+ break
+ frags.append(m.group(1))
+ cursor = m.end()
+ if not frags:
+ raise BadEntity
+ return eval_stringlist(frags), cursor
diff --git a/third_party/python/compare-locales/compare_locales/parser/properties.py b/third_party/python/compare-locales/compare_locales/parser/properties.py
new file mode 100644
index 0000000000..15b865a026
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/parser/properties.py
@@ -0,0 +1,116 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import re
+
+from .base import (
+ Entity, OffsetComment, Whitespace,
+ Parser
+)
+from six import unichr
+
+
+class PropertiesEntityMixin(object):
+ escape = re.compile(r'\\((?P<uni>u[0-9a-fA-F]{1,4})|'
+ '(?P<nl>\n[ \t]*)|(?P<single>.))', re.M)
+ known_escapes = {'n': '\n', 'r': '\r', 't': '\t', '\\': '\\'}
+
+ @property
+ def val(self):
+ def unescape(m):
+ found = m.groupdict()
+ if found['uni']:
+ return unichr(int(found['uni'][1:], 16))
+ if found['nl']:
+ return ''
+ return self.known_escapes.get(found['single'], found['single'])
+
+ return self.escape.sub(unescape, self.raw_val)
+
+
+class PropertiesEntity(PropertiesEntityMixin, Entity):
+ pass
+
+
+class PropertiesParser(Parser):
+
+ Comment = OffsetComment
+
+ def __init__(self):
+ self.reKey = re.compile(
+ '(?P<key>[^#! \t\r\n][^=:\n]*?)[ \t]*[:=][ \t]*', re.M)
+ self.reComment = re.compile('(?:[#!][^\n]*\n)*(?:[#!][^\n]*)', re.M)
+ self._escapedEnd = re.compile(r'\\+$')
+ self._trailingWS = re.compile(r'[ \t\r\n]*(?:\n|\Z)', re.M)
+ Parser.__init__(self)
+
+ def getNext(self, ctx, offset):
+ junk_offset = offset
+ # overwritten to parse values line by line
+ contents = ctx.contents
+
+ m = self.reComment.match(contents, offset)
+ if m:
+ current_comment = self.Comment(ctx, m.span())
+ if offset == 0 and 'License' in current_comment.val:
+ # Heuristic. A early comment with "License" is probably
+ # a license header, and should be standalone.
+ return current_comment
+ offset = m.end()
+ else:
+ current_comment = None
+
+ m = self.reWhitespace.match(contents, offset)
+ if m:
+ white_space = Whitespace(ctx, m.span())
+ offset = m.end()
+ if (
+ current_comment is not None
+ and white_space.raw_val.count('\n') > 1
+ ):
+ # standalone comment
+ return current_comment
+ if current_comment is None:
+ return white_space
+ else:
+ white_space = None
+
+ m = self.reKey.match(contents, offset)
+ if m:
+ startline = offset = m.end()
+ while True:
+ endval = nextline = contents.find('\n', offset)
+ if nextline == -1:
+ endval = offset = len(contents)
+ break
+ # is newline escaped?
+ _e = self._escapedEnd.search(contents, offset, nextline)
+ offset = nextline + 1
+ if _e is None:
+ break
+ # backslashes at end of line, if 2*n, not escaped
+ if len(_e.group()) % 2 == 0:
+ break
+ startline = offset
+
+ # strip trailing whitespace
+ ws = self._trailingWS.search(contents, startline)
+ if ws:
+ endval = ws.start()
+
+ entity = PropertiesEntity(
+ ctx, current_comment, white_space,
+ (m.start(), endval), # full span
+ m.span('key'),
+ (m.end(), endval)) # value span
+ return entity
+
+ if current_comment is not None:
+ return current_comment
+ if white_space is not None:
+ return white_space
+
+ return self.getJunk(ctx, junk_offset, self.reKey, self.reComment)
diff --git a/third_party/python/compare-locales/compare_locales/paths/__init__.py b/third_party/python/compare-locales/compare_locales/paths/__init__.py
new file mode 100644
index 0000000000..a3d3cbc43b
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/paths/__init__.py
@@ -0,0 +1,54 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from compare_locales import mozpath
+from .files import ProjectFiles, REFERENCE_LOCALE
+from .ini import (
+ L10nConfigParser, SourceTreeConfigParser,
+ EnumerateApp, EnumerateSourceTreeApp,
+)
+from .matcher import Matcher
+from .project import ProjectConfig
+from .configparser import TOMLParser, ConfigNotFound
+
+
+__all__ = [
+ 'Matcher',
+ 'ProjectConfig',
+ 'L10nConfigParser', 'SourceTreeConfigParser',
+ 'EnumerateApp', 'EnumerateSourceTreeApp',
+ 'ProjectFiles', 'REFERENCE_LOCALE',
+ 'TOMLParser', 'ConfigNotFound',
+]
+
+
+class File(object):
+
+ def __init__(self, fullpath, file, module=None, locale=None):
+ self.fullpath = fullpath
+ self.file = file
+ self.module = module
+ self.locale = locale
+ pass
+
+ @property
+ def localpath(self):
+ if self.module:
+ return mozpath.join(self.locale, self.module, self.file)
+ return self.file
+
+ def __hash__(self):
+ return hash(self.localpath)
+
+ def __str__(self):
+ return self.fullpath
+
+ def __eq__(self, other):
+ if not isinstance(other, File):
+ return False
+ return vars(self) == vars(other)
+
+ def __ne__(self, other):
+ return not (self == other)
diff --git a/third_party/python/compare-locales/compare_locales/paths/configparser.py b/third_party/python/compare-locales/compare_locales/paths/configparser.py
new file mode 100644
index 0000000000..ce56df10b7
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/paths/configparser.py
@@ -0,0 +1,140 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+import errno
+import logging
+from compare_locales import mozpath
+from .project import ProjectConfig
+from .matcher import expand
+import pytoml as toml
+import six
+
+
+class ConfigNotFound(EnvironmentError):
+ def __init__(self, path):
+ super(ConfigNotFound, self).__init__(
+ errno.ENOENT,
+ 'Configuration file not found',
+ path)
+
+
+class ParseContext(object):
+ def __init__(self, path, env, ignore_missing_includes):
+ self.path = path
+ self.env = env
+ self.ignore_missing_includes = ignore_missing_includes
+ self.data = None
+ self.pc = ProjectConfig(path)
+
+
+class TOMLParser(object):
+ def parse(self, path, env=None, ignore_missing_includes=False):
+ ctx = self.context(
+ path, env=env, ignore_missing_includes=ignore_missing_includes
+ )
+ self.load(ctx)
+ self.processBasePath(ctx)
+ self.processEnv(ctx)
+ self.processPaths(ctx)
+ self.processFilters(ctx)
+ self.processIncludes(ctx)
+ self.processExcludes(ctx)
+ self.processLocales(ctx)
+ return self.asConfig(ctx)
+
+ def context(self, path, env=None, ignore_missing_includes=False):
+ return ParseContext(
+ path,
+ env if env is not None else {},
+ ignore_missing_includes,
+ )
+
+ def load(self, ctx):
+ try:
+ with open(ctx.path, 'rb') as fin:
+ ctx.data = toml.load(fin)
+ except (toml.TomlError, IOError):
+ raise ConfigNotFound(ctx.path)
+
+ def processBasePath(self, ctx):
+ assert ctx.data is not None
+ ctx.pc.set_root(ctx.data.get('basepath', '.'))
+
+ def processEnv(self, ctx):
+ assert ctx.data is not None
+ ctx.pc.add_environment(**ctx.data.get('env', {}))
+ # add parser environment, possibly overwriting file variables
+ ctx.pc.add_environment(**ctx.env)
+
+ def processLocales(self, ctx):
+ assert ctx.data is not None
+ if 'locales' in ctx.data:
+ ctx.pc.set_locales(ctx.data['locales'])
+
+ def processPaths(self, ctx):
+ assert ctx.data is not None
+ for data in ctx.data.get('paths', []):
+ paths = {
+ "l10n": data['l10n']
+ }
+ if 'locales' in data:
+ paths['locales'] = data['locales']
+ if 'reference' in data:
+ paths['reference'] = data['reference']
+ if 'test' in data:
+ paths['test'] = data['test']
+ ctx.pc.add_paths(paths)
+
+ def processFilters(self, ctx):
+ assert ctx.data is not None
+ for data in ctx.data.get('filters', []):
+ paths = data['path']
+ if isinstance(paths, six.string_types):
+ paths = [paths]
+ rule = {
+ "path": paths,
+ "action": data['action']
+ }
+ if 'key' in data:
+ rule['key'] = data['key']
+ ctx.pc.add_rules(rule)
+
+ def processIncludes(self, ctx):
+ for child in self._processChild(ctx, 'includes'):
+ ctx.pc.add_child(child)
+
+ def processExcludes(self, ctx):
+ for child in self._processChild(ctx, 'excludes'):
+ ctx.pc.exclude(child)
+
+ def _processChild(self, ctx, field):
+ assert ctx.data is not None
+ if field not in ctx.data:
+ return
+ for child_config in ctx.data[field]:
+ # resolve child_config['path'] against our root and env
+ p = mozpath.normpath(
+ expand(
+ ctx.pc.root,
+ child_config['path'],
+ ctx.pc.environ
+ )
+ )
+ try:
+ child = self.parse(
+ p, env=ctx.env,
+ ignore_missing_includes=ctx.ignore_missing_includes
+ )
+ except ConfigNotFound as e:
+ if not ctx.ignore_missing_includes:
+ raise
+ (logging
+ .getLogger('compare-locales.io')
+ .error('%s: %s', e.strerror, e.filename))
+ continue
+ yield child
+
+ def asConfig(self, ctx):
+ return ctx.pc
diff --git a/third_party/python/compare-locales/compare_locales/paths/files.py b/third_party/python/compare-locales/compare_locales/paths/files.py
new file mode 100644
index 0000000000..b7ec21b9f5
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/paths/files.py
@@ -0,0 +1,223 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+import os
+from compare_locales import mozpath
+
+
+REFERENCE_LOCALE = 'en-x-moz-reference'
+
+
+class ConfigList(list):
+ def maybe_extend(self, other):
+ '''Add configs from other list if this list doesn't have this path yet.
+ '''
+ for config in other:
+ if any(mine.path == config.path for mine in self):
+ continue
+ self.append(config)
+
+
+class ProjectFiles(object):
+ '''Iterable object to get all files and tests for a locale and a
+ list of ProjectConfigs.
+
+ If the given locale is None, iterate over reference files as
+ both reference and locale for a reference self-test.
+ '''
+ def __init__(self, locale, projects, mergebase=None):
+ self.locale = locale
+ self.matchers = []
+ self.exclude = None
+ self.mergebase = mergebase
+ configs = ConfigList()
+ excludes = ConfigList()
+ for project in projects:
+ # Only add this project if we're not in validation mode,
+ # and the given locale is enabled for the project.
+ if locale is not None and locale not in project.all_locales:
+ continue
+ configs.maybe_extend(project.configs)
+ excludes.maybe_extend(project.excludes)
+ # If an excluded config is explicitly included, drop if from the
+ # excludes.
+ excludes = [
+ exclude
+ for exclude in excludes
+ if not any(c.path == exclude.path for c in configs)
+ ]
+ if excludes:
+ self.exclude = ProjectFiles(locale, excludes)
+ for pc in configs:
+ if locale and pc.locales is not None and locale not in pc.locales:
+ continue
+ for paths in pc.paths:
+ if (
+ locale and
+ 'locales' in paths and
+ locale not in paths['locales']
+ ):
+ continue
+ m = {
+ 'l10n': paths['l10n'].with_env({
+ "locale": locale or REFERENCE_LOCALE
+ }),
+ 'module': paths.get('module'),
+ }
+ if 'reference' in paths:
+ m['reference'] = paths['reference']
+ if self.mergebase is not None:
+ m['merge'] = paths['l10n'].with_env({
+ "locale": locale,
+ "l10n_base": self.mergebase
+ })
+ m['test'] = set(paths.get('test', []))
+ if 'locales' in paths:
+ m['locales'] = paths['locales'][:]
+ self.matchers.append(m)
+ self.matchers.reverse() # we always iterate last first
+ # Remove duplicate patterns, comparing each matcher
+ # against all other matchers.
+ # Avoid n^2 comparisons by only scanning the upper triangle
+ # of a n x n matrix of all possible combinations.
+ # Using enumerate and keeping track of indexes, as we can't
+ # modify the list while iterating over it.
+ drops = set() # duplicate matchers to remove
+ for i, m in enumerate(self.matchers[:-1]):
+ if i in drops:
+ continue # we're dropping this anyway, don't search again
+ for i_, m_ in enumerate(self.matchers[(i+1):]):
+ if (mozpath.realpath(m['l10n'].prefix) !=
+ mozpath.realpath(m_['l10n'].prefix)):
+ # ok, not the same thing, continue
+ continue
+ # check that we're comparing the same thing
+ if 'reference' in m:
+ if (mozpath.realpath(m['reference'].prefix) !=
+ mozpath.realpath(m_.get('reference').prefix)):
+ raise RuntimeError('Mismatch in reference for ' +
+ mozpath.realpath(m['l10n'].prefix))
+ drops.add(i_ + i + 1)
+ m['test'] |= m_['test']
+ drops = sorted(drops, reverse=True)
+ for i in drops:
+ del self.matchers[i]
+
+ def __iter__(self):
+ # The iteration is pretty different when we iterate over
+ # a localization vs over the reference. We do that latter
+ # when running in validation mode.
+ inner = self.iter_locale() if self.locale else self.iter_reference()
+ for t in inner:
+ yield t
+
+ def iter_locale(self):
+ '''Iterate over locale files.'''
+ known = {}
+ for matchers in self.matchers:
+ matcher = matchers['l10n']
+ for path in self._files(matcher):
+ if path not in known:
+ known[path] = {'test': matchers.get('test')}
+ if 'reference' in matchers:
+ known[path]['reference'] = matcher.sub(
+ matchers['reference'], path)
+ if 'merge' in matchers:
+ known[path]['merge'] = matcher.sub(
+ matchers['merge'], path)
+ if 'reference' not in matchers:
+ continue
+ matcher = matchers['reference']
+ for path in self._files(matcher):
+ l10npath = matcher.sub(matchers['l10n'], path)
+ if l10npath not in known:
+ known[l10npath] = {
+ 'reference': path,
+ 'test': matchers.get('test')
+ }
+ if 'merge' in matchers:
+ known[l10npath]['merge'] = \
+ matcher.sub(matchers['merge'], path)
+ for path, d in sorted(known.items()):
+ yield (path, d.get('reference'), d.get('merge'), d['test'])
+
+ def iter_reference(self):
+ '''Iterate over reference files.'''
+ # unset self.exclude, as we don't want that for our reference files
+ exclude = self.exclude
+ self.exclude = None
+ known = {}
+ for matchers in self.matchers:
+ if 'reference' not in matchers:
+ continue
+ matcher = matchers['reference']
+ for path in self._files(matcher):
+ refpath = matcher.sub(matchers['reference'], path)
+ if refpath not in known:
+ known[refpath] = {
+ 'reference': path,
+ 'test': matchers.get('test')
+ }
+ for path, d in sorted(known.items()):
+ yield (path, d.get('reference'), None, d['test'])
+ self.exclude = exclude
+
+ def _files(self, matcher):
+ '''Base implementation of getting all files in a hierarchy
+ using the file system.
+ Subclasses might replace this method to support different IO
+ patterns.
+ '''
+ base = matcher.prefix
+ if self._isfile(base):
+ if self.exclude and self.exclude.match(base) is not None:
+ return
+ if matcher.match(base) is not None:
+ yield base
+ return
+ for d, dirs, files in self._walk(base):
+ for f in files:
+ p = mozpath.join(d, f)
+ if self.exclude and self.exclude.match(p) is not None:
+ continue
+ if matcher.match(p) is not None:
+ yield p
+
+ def _isfile(self, path):
+ return os.path.isfile(path)
+
+ def _walk(self, base):
+ for d, dirs, files in os.walk(base):
+ yield d, dirs, files
+
+ def match(self, path):
+ '''Return the tuple of l10n_path, reference, mergepath, tests
+ if the given path matches any config, otherwise None.
+
+ This routine doesn't check that the files actually exist.
+ '''
+ if (
+ self.locale is not None and
+ self.exclude and self.exclude.match(path) is not None
+ ):
+ return
+ for matchers in self.matchers:
+ matcher = matchers['l10n']
+ if self.locale is not None and matcher.match(path) is not None:
+ ref = merge = None
+ if 'reference' in matchers:
+ ref = matcher.sub(matchers['reference'], path)
+ if 'merge' in matchers:
+ merge = matcher.sub(matchers['merge'], path)
+ return path, ref, merge, matchers.get('test')
+ if 'reference' not in matchers:
+ continue
+ matcher = matchers['reference']
+ if matcher.match(path) is not None:
+ merge = None
+ l10n = matcher.sub(matchers['l10n'], path)
+ if 'merge' in matchers:
+ merge = matcher.sub(matchers['merge'], path)
+ return l10n, path, merge, matchers.get('test')
diff --git a/third_party/python/compare-locales/compare_locales/paths/ini.py b/third_party/python/compare-locales/compare_locales/paths/ini.py
new file mode 100644
index 0000000000..0e4b7d12bf
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/paths/ini.py
@@ -0,0 +1,227 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from six.moves.configparser import ConfigParser, NoSectionError, NoOptionError
+from collections import defaultdict
+from compare_locales import util, mozpath
+from .project import ProjectConfig
+
+
+class L10nConfigParser(object):
+ '''Helper class to gather application information from ini files.
+
+ This class is working on synchronous open to read files or web data.
+ Subclass this and overwrite loadConfigs and addChild if you need async.
+ '''
+ def __init__(self, inipath, **kwargs):
+ """Constructor for L10nConfigParsers
+
+ inipath -- l10n.ini path
+ Optional keyword arguments are fowarded to the inner ConfigParser as
+ defaults.
+ """
+ self.inipath = mozpath.normpath(inipath)
+ # l10n.ini files can import other l10n.ini files, store the
+ # corresponding L10nConfigParsers
+ self.children = []
+ # we really only care about the l10n directories described in l10n.ini
+ self.dirs = []
+ # optional defaults to be passed to the inner ConfigParser (unused?)
+ self.defaults = kwargs
+
+ def getDepth(self, cp):
+ '''Get the depth for the comparison from the parsed l10n.ini.
+ '''
+ try:
+ depth = cp.get('general', 'depth')
+ except (NoSectionError, NoOptionError):
+ depth = '.'
+ return depth
+
+ def getFilters(self):
+ '''Get the test functions from this ConfigParser and all children.
+
+ Only works with synchronous loads, used by compare-locales, which
+ is local anyway.
+ '''
+ filter_path = mozpath.join(mozpath.dirname(self.inipath), 'filter.py')
+ try:
+ local = {}
+ with open(filter_path) as f:
+ exec(compile(f.read(), filter_path, 'exec'), {}, local)
+ if 'test' in local and callable(local['test']):
+ filters = [local['test']]
+ else:
+ filters = []
+ except BaseException: # we really want to handle EVERYTHING here
+ filters = []
+
+ for c in self.children:
+ filters += c.getFilters()
+
+ return filters
+
+ def loadConfigs(self):
+ """Entry point to load the l10n.ini file this Parser refers to.
+
+ This implementation uses synchronous loads, subclasses might overload
+ this behaviour. If you do, make sure to pass a file-like object
+ to onLoadConfig.
+ """
+ cp = ConfigParser(self.defaults)
+ cp.read(self.inipath)
+ depth = self.getDepth(cp)
+ self.base = mozpath.join(mozpath.dirname(self.inipath), depth)
+ # create child loaders for any other l10n.ini files to be included
+ try:
+ for title, path in cp.items('includes'):
+ # skip default items
+ if title in self.defaults:
+ continue
+ # add child config parser
+ self.addChild(title, path, cp)
+ except NoSectionError:
+ pass
+ # try to load the "dirs" defined in the "compare" section
+ try:
+ self.dirs.extend(cp.get('compare', 'dirs').split())
+ except (NoOptionError, NoSectionError):
+ pass
+ # try to set "all_path" and "all_url"
+ try:
+ self.all_path = mozpath.join(self.base, cp.get('general', 'all'))
+ except (NoOptionError, NoSectionError):
+ self.all_path = None
+ return cp
+
+ def addChild(self, title, path, orig_cp):
+ """Create a child L10nConfigParser and load it.
+
+ title -- indicates the module's name
+ path -- indicates the path to the module's l10n.ini file
+ orig_cp -- the configuration parser of this l10n.ini
+ """
+ cp = L10nConfigParser(mozpath.join(self.base, path), **self.defaults)
+ cp.loadConfigs()
+ self.children.append(cp)
+
+ def dirsIter(self):
+ """Iterate over all dirs and our base path for this l10n.ini"""
+ for dir in self.dirs:
+ yield dir, (self.base, dir)
+
+ def directories(self):
+ """Iterate over all dirs and base paths for this l10n.ini as well
+ as the included ones.
+ """
+ for t in self.dirsIter():
+ yield t
+ for child in self.children:
+ for t in child.directories():
+ yield t
+
+ def allLocales(self):
+ """Return a list of all the locales of this project"""
+ with open(self.all_path) as f:
+ return util.parseLocales(f.read())
+
+
+class SourceTreeConfigParser(L10nConfigParser):
+ '''Subclassing L10nConfigParser to work with just the repos
+ checked out next to each other instead of intermingled like
+ we do for real builds.
+ '''
+
+ def __init__(self, inipath, base, redirects):
+ '''Add additional arguments basepath.
+
+ basepath is used to resolve local paths via branchnames.
+ redirects is used in unified repository, mapping upstream
+ repos to local clones.
+ '''
+ L10nConfigParser.__init__(self, inipath)
+ self.base = base
+ self.redirects = redirects
+
+ def addChild(self, title, path, orig_cp):
+ # check if there's a section with details for this include
+ # we might have to check a different repo, or even VCS
+ # for example, projects like "mail" indicate in
+ # an "include_" section where to find the l10n.ini for "toolkit"
+ details = 'include_' + title
+ if orig_cp.has_section(details):
+ branch = orig_cp.get(details, 'mozilla')
+ branch = self.redirects.get(branch, branch)
+ inipath = orig_cp.get(details, 'l10n.ini')
+ path = mozpath.join(self.base, branch, inipath)
+ else:
+ path = mozpath.join(self.base, path)
+ cp = SourceTreeConfigParser(path, self.base, self.redirects,
+ **self.defaults)
+ cp.loadConfigs()
+ self.children.append(cp)
+
+
+class EnumerateApp(object):
+ reference = 'en-US'
+
+ def __init__(self, inipath, l10nbase):
+ self.setupConfigParser(inipath)
+ self.modules = defaultdict(dict)
+ self.l10nbase = mozpath.abspath(l10nbase)
+ self.filters = []
+ self.addFilters(*self.config.getFilters())
+
+ def setupConfigParser(self, inipath):
+ self.config = L10nConfigParser(inipath)
+ self.config.loadConfigs()
+
+ def addFilters(self, *args):
+ self.filters += args
+
+ def asConfig(self):
+ # We've already normalized paths in the ini parsing.
+ # Set the path and root to None to just keep our paths as is.
+ config = ProjectConfig(None)
+ config.set_root('.') # sets to None because path is None
+ config.add_environment(l10n_base=self.l10nbase)
+ self._config_for_ini(config, self.config)
+ filters = self.config.getFilters()
+ if filters:
+ config.set_filter_py(filters[0])
+ config.set_locales(self.config.allLocales(), deep=True)
+ return config
+
+ def _config_for_ini(self, projectconfig, aConfig):
+ for k, (basepath, module) in aConfig.dirsIter():
+ paths = {
+ 'module': module,
+ 'reference': mozpath.normpath('%s/%s/locales/en-US/**' %
+ (basepath, module)),
+ 'l10n': mozpath.normpath('{l10n_base}/{locale}/%s/**' %
+ module)
+ }
+ if module == 'mobile/android/base':
+ paths['test'] = ['android-dtd']
+ projectconfig.add_paths(paths)
+ for child in aConfig.children:
+ self._config_for_ini(projectconfig, child)
+
+
+class EnumerateSourceTreeApp(EnumerateApp):
+ '''Subclass EnumerateApp to work on side-by-side checked out
+ repos, and to no pay attention to how the source would actually
+ be checked out for building.
+ '''
+
+ def __init__(self, inipath, basepath, l10nbase, redirects):
+ self.basepath = basepath
+ self.redirects = redirects
+ EnumerateApp.__init__(self, inipath, l10nbase)
+
+ def setupConfigParser(self, inipath):
+ self.config = SourceTreeConfigParser(inipath, self.basepath,
+ self.redirects)
+ self.config.loadConfigs()
diff --git a/third_party/python/compare-locales/compare_locales/paths/matcher.py b/third_party/python/compare-locales/compare_locales/paths/matcher.py
new file mode 100644
index 0000000000..554d167686
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/paths/matcher.py
@@ -0,0 +1,472 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+import os
+import re
+import itertools
+from compare_locales import mozpath
+import six
+
+
+# Android uses non-standard locale codes, these are the mappings
+# back and forth
+ANDROID_LEGACY_MAP = {
+ 'he': 'iw',
+ 'id': 'in',
+ 'yi': 'ji'
+}
+ANDROID_STANDARD_MAP = {
+ legacy: standard
+ for standard, legacy in six.iteritems(ANDROID_LEGACY_MAP)
+}
+
+
+class Matcher(object):
+ '''Path pattern matcher
+ Supports path matching similar to mozpath.match(), but does
+ not match trailing file paths without trailing wildcards.
+ Also gets a prefix, which is the path before the first wildcard,
+ which is good for filesystem iterations, and allows to replace
+ the own matches in a path on a different Matcher. compare-locales
+ uses that to transform l10n and en-US paths back and forth.
+ '''
+
+ def __init__(self, pattern_or_other, env={}, root=None, encoding=None):
+ '''Create regular expression similar to mozpath.match().
+ '''
+ parser = PatternParser()
+ real_env = {k: parser.parse(v) for k, v in env.items()}
+ self._cached_re = None
+ if root is not None:
+ # make sure that our root is fully expanded and ends with /
+ root = mozpath.abspath(root) + '/'
+ # allow constructing Matchers from Matchers
+ if isinstance(pattern_or_other, Matcher):
+ other = pattern_or_other
+ self.pattern = Pattern(other.pattern)
+ self.env = other.env.copy()
+ self.env.update(real_env)
+ if root is not None:
+ self.pattern.root = root
+ self.encoding = other.encoding
+ return
+ self.env = real_env
+ pattern = pattern_or_other
+ self.pattern = parser.parse(pattern)
+ if root is not None:
+ self.pattern.root = root
+ self.encoding = encoding
+
+ def with_env(self, environ):
+ return Matcher(self, environ)
+
+ @property
+ def prefix(self):
+ subpattern = Pattern(self.pattern[:self.pattern.prefix_length])
+ subpattern.root = self.pattern.root
+ prefix = subpattern.expand(self.env)
+ if self.encoding is not None:
+ prefix = prefix.encode(self.encoding)
+ return prefix
+
+ def match(self, path):
+ '''Test the given path against this matcher and its environment.
+
+ Return None if there's no match, and the dictionary of matched
+ variables in this matcher if there's a match.
+ '''
+ self._cache_regex()
+ m = self._cached_re.match(path)
+ if m is None:
+ return None
+ d = m.groupdict()
+ if self.encoding is not None:
+ d = {key: value.decode(self.encoding) for key, value in d.items()}
+ if 'android_locale' in d and 'locale' not in d:
+ # map android_locale to locale code
+ locale = d['android_locale']
+ # map legacy locale codes, he <-> iw, id <-> in, yi <-> ji
+ locale = re.sub(
+ r'(iw|in|ji)(?=\Z|-)',
+ lambda legacy: ANDROID_STANDARD_MAP[legacy.group(1)],
+ locale
+ )
+ locale = re.sub(r'-r([A-Z]{2})', r'-\1', locale)
+ locale = locale.replace('b+', '').replace('+', '-')
+ d['locale'] = locale
+ return d
+
+ def _cache_regex(self):
+ if self._cached_re is not None:
+ return
+ pattern = self.pattern.regex_pattern(self.env) + '$'
+ if self.encoding is not None:
+ pattern = pattern.encode(self.encoding)
+ self._cached_re = re.compile(pattern)
+
+ def sub(self, other, path):
+ '''
+ Replace the wildcard matches in this pattern into the
+ pattern of the other Match object.
+ '''
+ m = self.match(path)
+ if m is None:
+ return None
+ env = {}
+ env.update(
+ (key, Literal(value if value is not None else ''))
+ for key, value in m.items()
+ )
+ env.update(other.env)
+ path = other.pattern.expand(env)
+ if self.encoding is not None:
+ path = path.encode(self.encoding)
+ return path
+
+ def concat(self, other):
+ '''Concat two Matcher objects.
+
+ The intent is to create one Matcher with variable substitutions that
+ behaves as if you joined the resulting paths.
+ This doesn't do path separator logic, though, and it won't resolve
+ parent directories.
+ '''
+ if not isinstance(other, Matcher):
+ other_matcher = Matcher(other)
+ else:
+ other_matcher = other
+ other_pattern = other_matcher.pattern
+ if other_pattern.root is not None:
+ raise ValueError('Other matcher must not be rooted')
+ result = Matcher(self)
+ result.pattern += other_pattern
+ if self.pattern.prefix_length == len(self.pattern):
+ result.pattern.prefix_length += other_pattern.prefix_length
+ result.env.update(other_matcher.env)
+ return result
+
+ def __str__(self):
+ return self.pattern.expand(self.env)
+
+ def __repr__(self):
+ return '{}({!r}, env={!r}, root={!r})'.format(
+ type(self).__name__, self.pattern, self.env, self.pattern.root
+ )
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __eq__(self, other):
+ '''Equality for Matcher.
+
+ The equality for Matchers is defined to have the same pattern,
+ and no conflicting environment. Additional environment settings
+ in self or other are OK.
+ '''
+ if other.__class__ is not self.__class__:
+ return NotImplemented
+ if self.pattern != other.pattern:
+ return False
+ if self.env and other.env:
+ for k in self.env:
+ if k not in other.env:
+ continue
+ if self.env[k] != other.env[k]:
+ return False
+ if self.encoding != other.encoding:
+ return False
+ return True
+
+
+def expand(root, path, env):
+ '''Expand a given path relative to the given root,
+ using the given env to resolve variables.
+
+ This will break if the path contains wildcards.
+ '''
+ matcher = Matcher(path, env=env, root=root)
+ return str(matcher)
+
+
+class MissingEnvironment(Exception):
+ pass
+
+
+class Node(object):
+ '''Abstract base class for all nodes in parsed patterns.'''
+ def regex_pattern(self, env):
+ '''Create a regular expression fragment for this Node.'''
+ raise NotImplementedError
+
+ def expand(self, env):
+ '''Convert this node to a string with the given environment.'''
+ raise NotImplementedError
+
+
+class Pattern(list, Node):
+ def __init__(self, iterable=[]):
+ list.__init__(self, iterable)
+ self.root = getattr(iterable, 'root', None)
+ self.prefix_length = getattr(iterable, 'prefix_length', None)
+
+ def regex_pattern(self, env):
+ root = ''
+ if self.root is not None:
+ # make sure we're not hiding a full path
+ first_seg = self[0].expand(env)
+ if not os.path.isabs(first_seg):
+ root = re.escape(self.root)
+ return root + ''.join(
+ child.regex_pattern(env) for child in self
+ )
+
+ def expand(self, env, raise_missing=False):
+ root = ''
+ if self.root is not None:
+ # make sure we're not hiding a full path
+ first_seg = self[0].expand(env)
+ if not os.path.isabs(first_seg):
+ root = self.root
+ return root + ''.join(self._expand_children(env, raise_missing))
+
+ def _expand_children(self, env, raise_missing):
+ # Helper iterator to convert Exception to a stopped iterator
+ for child in self:
+ try:
+ yield child.expand(env, raise_missing=True)
+ except MissingEnvironment:
+ if raise_missing:
+ raise
+ return
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __eq__(self, other):
+ if not super(Pattern, self).__eq__(other):
+ return False
+ if other.__class__ == list:
+ # good for tests and debugging
+ return True
+ return (
+ self.root == other.root
+ and self.prefix_length == other.prefix_length
+ )
+
+
+class Literal(six.text_type, Node):
+ def regex_pattern(self, env):
+ return re.escape(self)
+
+ def expand(self, env, raise_missing=False):
+ return self
+
+
+class Variable(Node):
+ def __init__(self, name, repeat=False):
+ self.name = name
+ self.repeat = repeat
+
+ def regex_pattern(self, env):
+ if self.repeat:
+ return '(?P={})'.format(self.name)
+ return '(?P<{}>{})'.format(self.name, self._pattern_from_env(env))
+
+ def _pattern_from_env(self, env):
+ if self.name in env:
+ # make sure we match the value in the environment
+ return env[self.name].regex_pattern(self._no_cycle(env))
+ # match anything, including path segments
+ return '.+?'
+
+ def expand(self, env, raise_missing=False):
+ '''Create a string for this Variable.
+
+ This expansion happens recursively. We avoid recusion loops
+ by removing the current variable from the environment that's used
+ to expand child variable references.
+ '''
+ if self.name not in env:
+ raise MissingEnvironment
+ return env[self.name].expand(
+ self._no_cycle(env), raise_missing=raise_missing
+ )
+
+ def _no_cycle(self, env):
+ '''Remove our variable name from the environment.
+ That way, we can't create cyclic references.
+ '''
+ if self.name not in env:
+ return env
+ env = env.copy()
+ env.pop(self.name)
+ return env
+
+ def __repr__(self):
+ return 'Variable(name="{}")'.format(self.name)
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __eq__(self, other):
+ if other.__class__ is not self.__class__:
+ return False
+ return (
+ self.name == other.name
+ and self.repeat == other.repeat
+ )
+
+
+class AndroidLocale(Variable):
+ '''Subclass for Android locale code mangling.
+
+ Supports ab-rCD and b+ab+Scrip+DE.
+ Language and Language-Region tags get mapped to ab-rCD, more complex
+ Locale tags to b+.
+ '''
+ def __init__(self, repeat=False):
+ self.name = 'android_locale'
+ self.repeat = repeat
+
+ def _pattern_from_env(self, env):
+ android_locale = self._get_android_locale(env)
+ if android_locale is not None:
+ return re.escape(android_locale)
+ return '.+?'
+
+ def expand(self, env, raise_missing=False):
+ '''Create a string for this Variable.
+
+ This expansion happens recursively. We avoid recusion loops
+ by removing the current variable from the environment that's used
+ to expand child variable references.
+ '''
+ android_locale = self._get_android_locale(env)
+ if android_locale is None:
+ raise MissingEnvironment
+ return android_locale
+
+ def _get_android_locale(self, env):
+ if 'locale' not in env:
+ return None
+ android = bcp47 = env['locale'].expand(self._no_cycle(env))
+ # map legacy locale codes, he <-> iw, id <-> in, yi <-> ji
+ android = bcp47 = re.sub(
+ r'(he|id|yi)(?=\Z|-)',
+ lambda standard: ANDROID_LEGACY_MAP[standard.group(1)],
+ bcp47
+ )
+ if re.match(r'[a-z]{2,3}-[A-Z]{2}', bcp47):
+ android = '{}-r{}'.format(*bcp47.split('-'))
+ elif '-' in bcp47:
+ android = 'b+' + bcp47.replace('-', '+')
+ return android
+
+
+class Star(Node):
+ def __init__(self, number):
+ self.number = number
+
+ def regex_pattern(self, env):
+ return '(?P<s{}>[^/]*)'.format(self.number)
+
+ def expand(self, env, raise_missing=False):
+ return env['s%d' % self.number]
+
+ def __repr__(self):
+ return type(self).__name__
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __eq__(self, other):
+ if other.__class__ is not self.__class__:
+ return False
+ return self.number == other.number
+
+
+class Starstar(Star):
+ def __init__(self, number, suffix):
+ self.number = number
+ self.suffix = suffix
+
+ def regex_pattern(self, env):
+ return '(?P<s{}>.+{})?'.format(self.number, self.suffix)
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __eq__(self, other):
+ if not super(Starstar, self).__eq__(other):
+ return False
+ return self.suffix == other.suffix
+
+
+PATH_SPECIAL = re.compile(
+ r'(?P<starstar>(?<![^/}])\*\*(?P<suffix>/|$))'
+ r'|'
+ r'(?P<star>\*)'
+ r'|'
+ r'(?P<variable>{ *(?P<varname>[\w]+) *})'
+)
+
+
+class PatternParser(object):
+ def __init__(self):
+ # Not really initializing anything, just making room for our
+ # result and state members.
+ self.pattern = None
+ self._stargroup = self._cursor = None
+ self._known_vars = None
+
+ def parse(self, pattern):
+ if isinstance(pattern, Pattern):
+ return pattern
+ if isinstance(pattern, Matcher):
+ return pattern.pattern
+ # Initializing result and state
+ self.pattern = Pattern()
+ self._stargroup = itertools.count(1)
+ self._known_vars = set()
+ self._cursor = 0
+ for match in PATH_SPECIAL.finditer(pattern):
+ if match.start() > self._cursor:
+ self.pattern.append(
+ Literal(pattern[self._cursor:match.start()])
+ )
+ self.handle(match)
+ self.pattern.append(Literal(pattern[self._cursor:]))
+ if self.pattern.prefix_length is None:
+ self.pattern.prefix_length = len(self.pattern)
+ return self.pattern
+
+ def handle(self, match):
+ if match.group('variable'):
+ self.variable(match)
+ else:
+ self.wildcard(match)
+ self._cursor = match.end()
+
+ def variable(self, match):
+ varname = match.group('varname')
+ # Special case Android locale code matching.
+ # It's kinda sad, but true.
+ if varname == 'android_locale':
+ self.pattern.append(AndroidLocale(varname in self._known_vars))
+ else:
+ self.pattern.append(Variable(varname, varname in self._known_vars))
+ self._known_vars.add(varname)
+
+ def wildcard(self, match):
+ # wildcard found, stop prefix
+ if self.pattern.prefix_length is None:
+ self.pattern.prefix_length = len(self.pattern)
+ wildcard = next(self._stargroup)
+ if match.group('star'):
+ # *
+ self.pattern.append(Star(wildcard))
+ else:
+ # **
+ self.pattern.append(Starstar(wildcard, match.group('suffix')))
diff --git a/third_party/python/compare-locales/compare_locales/paths/project.py b/third_party/python/compare-locales/compare_locales/paths/project.py
new file mode 100644
index 0000000000..269b6fed9d
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/paths/project.py
@@ -0,0 +1,265 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+import re
+from compare_locales import mozpath
+from .matcher import Matcher
+import six
+
+
+class ExcludeError(ValueError):
+ pass
+
+
+class ProjectConfig(object):
+ '''Abstraction of l10n project configuration data.
+ '''
+
+ def __init__(self, path):
+ self.filter_py = None # legacy filter code
+ # {
+ # 'l10n': pattern,
+ # 'reference': pattern, # optional
+ # 'locales': [], # optional
+ # 'test': [], # optional
+ # }
+ self.path = path
+ self.root = None
+ self.paths = []
+ self.rules = []
+ self.locales = None
+ # cache for all_locales, as that's not in `filter`
+ self._all_locales = None
+ self.environ = {}
+ self.children = []
+ self.excludes = []
+ self._cache = None
+
+ def same(self, other):
+ '''Equality test, ignoring locales.
+ '''
+ if other.__class__ is not self.__class__:
+ return False
+ if len(self.children) != len(other.children):
+ return False
+ for prop in ('path', 'root', 'paths', 'rules', 'environ'):
+ if getattr(self, prop) != getattr(other, prop):
+ return False
+ for this_child, other_child in zip(self.children, other.children):
+ if not this_child.same(other_child):
+ return False
+ return True
+
+ def set_root(self, basepath):
+ if self.path is None:
+ self.root = None
+ return
+ self.root = mozpath.abspath(
+ mozpath.join(mozpath.dirname(self.path), basepath)
+ )
+
+ def add_environment(self, **kwargs):
+ self.environ.update(kwargs)
+
+ def add_paths(self, *paths):
+ '''Add path dictionaries to this config.
+ The dictionaries must have a `l10n` key. For monolingual files,
+ `reference` is also required.
+ An optional key `test` is allowed to enable additional tests for this
+ path pattern.
+ '''
+ self._all_locales = None # clear cache
+ for d in paths:
+ rv = {
+ 'l10n': Matcher(d['l10n'], env=self.environ, root=self.root),
+ 'module': d.get('module')
+ }
+ if 'reference' in d:
+ rv['reference'] = Matcher(
+ d['reference'], env=self.environ, root=self.root
+ )
+ if 'test' in d:
+ rv['test'] = d['test']
+ if 'locales' in d:
+ rv['locales'] = d['locales'][:]
+ self.paths.append(rv)
+
+ def set_filter_py(self, filter_function):
+ '''Set legacy filter.py code.
+ Assert that no rules are set.
+ Also, normalize output already here.
+ '''
+ assert not self.rules
+
+ def filter_(module, path, entity=None):
+ try:
+ rv = filter_function(module, path, entity=entity)
+ except BaseException: # we really want to handle EVERYTHING here
+ return 'error'
+ rv = {
+ True: 'error',
+ False: 'ignore',
+ 'report': 'warning'
+ }.get(rv, rv)
+ assert rv in ('error', 'ignore', 'warning', None)
+ return rv
+ self.filter_py = filter_
+
+ def add_rules(self, *rules):
+ '''Add rules to filter on.
+ Assert that there's no legacy filter.py code hooked up.
+ '''
+ assert self.filter_py is None
+ for rule in rules:
+ self.rules.extend(self._compile_rule(rule))
+
+ def add_child(self, child):
+ self._all_locales = None # clear cache
+ if child.excludes:
+ raise ExcludeError(
+ 'Included configs cannot declare their own excludes.'
+ )
+ self.children.append(child)
+
+ def exclude(self, child):
+ for config in child.configs:
+ if config.excludes:
+ raise ExcludeError(
+ 'Excluded configs cannot declare their own excludes.'
+ )
+ self.excludes.append(child)
+
+ def set_locales(self, locales, deep=False):
+ self._all_locales = None # clear cache
+ self.locales = locales
+ if not deep:
+ return
+ for child in self.children:
+ child.set_locales(locales, deep=deep)
+
+ @property
+ def configs(self):
+ 'Recursively get all configs in this project and its children'
+ yield self
+ for child in self.children:
+ for config in child.configs:
+ yield config
+
+ @property
+ def all_locales(self):
+ 'Recursively get all locales in this project and its paths'
+ if self._all_locales is None:
+ all_locales = set()
+ for config in self.configs:
+ if config.locales is not None:
+ all_locales.update(config.locales)
+ for paths in config.paths:
+ if 'locales' in paths:
+ all_locales.update(paths['locales'])
+ self._all_locales = sorted(all_locales)
+ return self._all_locales
+
+ def filter(self, l10n_file, entity=None):
+ '''Filter a localization file or entities within, according to
+ this configuration file.'''
+ if l10n_file.locale not in self.all_locales:
+ return 'ignore'
+ if self.filter_py is not None:
+ return self.filter_py(l10n_file.module, l10n_file.file,
+ entity=entity)
+ rv = self._filter(l10n_file, entity=entity)
+ if rv is None:
+ return 'ignore'
+ return rv
+
+ class FilterCache(object):
+ def __init__(self, locale):
+ self.locale = locale
+ self.rules = []
+ self.l10n_paths = []
+
+ def cache(self, locale):
+ if self._cache and self._cache.locale == locale:
+ return self._cache
+ self._cache = self.FilterCache(locale)
+ for paths in self.paths:
+ if 'locales' in paths and locale not in paths['locales']:
+ continue
+ self._cache.l10n_paths.append(paths['l10n'].with_env({
+ "locale": locale
+ }))
+ for rule in self.rules:
+ cached_rule = rule.copy()
+ cached_rule['path'] = rule['path'].with_env({
+ "locale": locale
+ })
+ self._cache.rules.append(cached_rule)
+ return self._cache
+
+ def _filter(self, l10n_file, entity=None):
+ if any(
+ exclude.filter(l10n_file) == 'error'
+ for exclude in self.excludes
+ ):
+ return
+ actions = set(
+ child._filter(l10n_file, entity=entity)
+ for child in self.children)
+ if 'error' in actions:
+ # return early if we know we'll error
+ return 'error'
+
+ cached = self.cache(l10n_file.locale)
+ if any(p.match(l10n_file.fullpath) for p in cached.l10n_paths):
+ action = 'error'
+ for rule in reversed(cached.rules):
+ if not rule['path'].match(l10n_file.fullpath):
+ continue
+ if ('key' in rule) ^ (entity is not None):
+ # key/file mismatch, not a matching rule
+ continue
+ if 'key' in rule and not rule['key'].match(entity):
+ continue
+ action = rule['action']
+ break
+ actions.add(action)
+ if 'error' in actions:
+ return 'error'
+ if 'warning' in actions:
+ return 'warning'
+ if 'ignore' in actions:
+ return 'ignore'
+
+ def _compile_rule(self, rule):
+ assert 'path' in rule
+ if isinstance(rule['path'], list):
+ for path in rule['path']:
+ _rule = rule.copy()
+ _rule['path'] = Matcher(path, env=self.environ, root=self.root)
+ for __rule in self._compile_rule(_rule):
+ yield __rule
+ return
+ if isinstance(rule['path'], six.string_types):
+ rule['path'] = Matcher(
+ rule['path'], env=self.environ, root=self.root
+ )
+ if 'key' not in rule:
+ yield rule
+ return
+ if not isinstance(rule['key'], six.string_types):
+ for key in rule['key']:
+ _rule = rule.copy()
+ _rule['key'] = key
+ for __rule in self._compile_rule(_rule):
+ yield __rule
+ return
+ rule = rule.copy()
+ key = rule['key']
+ if key.startswith('re:'):
+ key = key[3:]
+ else:
+ key = re.escape(key) + '$'
+ rule['key'] = re.compile(key)
+ yield rule
diff --git a/third_party/python/compare-locales/compare_locales/plurals.py b/third_party/python/compare-locales/compare_locales/plurals.py
new file mode 100644
index 0000000000..d316b6cf43
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/plurals.py
@@ -0,0 +1,218 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'Mapping of locales to CLDR plural categories as implemented by PluralForm.jsm'
+
+CATEGORIES_BY_INDEX = (
+ # 0 (Chinese)
+ ('other',),
+ # 1 (English)
+ ('one', 'other'),
+ # 2 (French)
+ ('one', 'other'),
+ # 3 (Latvian)
+ ('zero', 'one', 'other'),
+ # 4 (Scottish Gaelic)
+ ('one', 'two', 'few', 'other'),
+ # 5 (Romanian)
+ ('one', 'few', 'other'),
+ # 6 (Lithuanian)
+ # CLDR: one, few, many (fractions), other
+ ('one', 'other', 'few'),
+ # 7 (Russian)
+ # CLDR: one, few, many, other (fractions)
+ ('one', 'few', 'many'),
+ # 8 (Slovak)
+ # CLDR: one, few, many (fractions), other
+ ('one', 'few', 'other'),
+ # 9 (Polish)
+ # CLDR: one, few, many, other (fractions)
+ ('one', 'few', 'many'),
+ # 10 (Slovenian)
+ ('one', 'two', 'few', 'other'),
+ # 11 (Irish Gaelic)
+ ('one', 'two', 'few', 'many', 'other'),
+ # 12 (Arabic)
+ # CLDR: zero, one, two, few, many, other
+ ('one', 'two', 'few', 'many', 'other', 'zero'),
+ # 13 (Maltese)
+ ('one', 'few', 'many', 'other'),
+ # 14 (Unused)
+ # CLDR: one, other
+ ('one', 'two', 'other'),
+ # 15 (Icelandic, Macedonian)
+ ('one', 'other'),
+ # 16 (Breton)
+ ('one', 'two', 'few', 'many', 'other'),
+ # 17 (Shuar)
+ # CLDR: (missing)
+ ('zero', 'other'),
+ # 18 (Welsh),
+ ('zero', 'one', 'two', 'few', 'many', 'other'),
+ # 19 (Bosnian, Croatian, Serbian)
+ ('one', 'few', 'other'),
+)
+
+CATEGORIES_EXCEPTIONS = {
+}
+
+CATEGORIES_BY_LOCALE = {
+ 'ace': 0,
+ 'ach': 1,
+ 'af': 1,
+ 'ak': 2,
+ 'an': 1,
+ 'ar': 12,
+ 'arn': 1,
+ 'as': 1,
+ 'ast': 1,
+ 'az': 1,
+ 'be': 7,
+ 'bg': 1,
+ 'bn': 2,
+ 'bo': 0,
+ 'br': 16,
+ 'brx': 1,
+ 'bs': 19,
+ 'ca': 1,
+ 'cak': 1,
+ 'ckb': 1,
+ 'crh': 1,
+ 'cs': 8,
+ 'csb': 9,
+ 'cv': 1,
+ 'cy': 18,
+ 'da': 1,
+ 'de': 1,
+ 'dsb': 10,
+ 'el': 1,
+ 'en': 1,
+ 'eo': 1,
+ 'es': 1,
+ 'et': 1,
+ 'eu': 1,
+ 'fa': 2,
+ 'ff': 1,
+ 'fi': 1,
+ 'fr': 2,
+ 'frp': 2,
+ 'fur': 1,
+ 'fy': 1,
+ 'ga': 11,
+ 'gd': 4,
+ 'gl': 1,
+ 'gn': 1,
+ 'gu': 2,
+ 'he': 1,
+ 'hi': 2,
+ 'hr': 19,
+ 'hsb': 10,
+ 'hto': 1,
+ 'hu': 1,
+ 'hy': 1,
+ 'hye': 1,
+ 'ia': 1,
+ 'id': 0,
+ 'ilo': 0,
+ 'is': 15,
+ 'it': 1,
+ 'ja': 0,
+ 'jiv': 17,
+ 'ka': 1,
+ 'kab': 1,
+ 'kk': 1,
+ 'km': 0,
+ 'kn': 1,
+ 'ko': 0,
+ 'ks': 1,
+ 'ku': 1,
+ 'lb': 1,
+ 'lg': 1,
+ 'lij': 1,
+ 'lo': 0,
+ 'lt': 6,
+ 'ltg': 3,
+ 'lv': 3,
+ 'lus': 0,
+ 'mai': 1,
+ 'meh': 0,
+ 'mix': 0,
+ 'mk': 15,
+ 'ml': 1,
+ 'mn': 1,
+ 'mr': 1,
+ 'ms': 0,
+ 'my': 0,
+ 'nb': 1,
+ 'ne': 1,
+ 'nl': 1,
+ 'nn': 1,
+ 'nr': 1,
+ 'nso': 2,
+ 'ny': 1,
+ 'oc': 2,
+ 'or': 1,
+ 'pa': 2,
+ 'pai': 0,
+ 'pl': 9,
+ 'pt': 1,
+ 'quy': 1,
+ 'qvi': 1,
+ 'rm': 1,
+ 'ro': 5,
+ 'ru': 7,
+ 'rw': 1,
+ 'sah': 0,
+ 'sat': 1,
+ 'sc': 1,
+ 'scn': 1,
+ 'si': 1,
+ 'sk': 8,
+ 'sl': 10,
+ 'son': 1,
+ 'sq': 1,
+ 'sr': 19,
+ 'ss': 1,
+ 'st': 1,
+ 'sv': 1,
+ 'sw': 1,
+ 'szl': 9,
+ 'ta': 1,
+ 'ta': 1,
+ 'te': 1,
+ 'th': 0,
+ 'tl': 1,
+ 'tn': 1,
+ 'tr': 1,
+ 'trs': 1,
+ 'ts': 1,
+ 'tsz': 1,
+ 'uk': 7,
+ 'ur': 1,
+ 'uz': 1,
+ 've': 1,
+ 'vi': 0,
+ 'wo': 0,
+ 'xh': 1,
+ 'zam': 1,
+ 'zh-CN': 0,
+ 'zh-TW': 0,
+ 'zu': 2,
+}
+
+
+def get_plural(locale):
+ plural_form = get_plural_rule(locale)
+ if plural_form is None:
+ return None
+ return CATEGORIES_BY_INDEX[plural_form]
+
+
+def get_plural_rule(locale):
+ if locale is None:
+ return None
+ if locale in CATEGORIES_BY_LOCALE:
+ return CATEGORIES_BY_LOCALE[locale]
+ locale = locale.split('-', 1)[0]
+ return CATEGORIES_BY_LOCALE.get(locale)
diff --git a/third_party/python/compare-locales/compare_locales/serializer.py b/third_party/python/compare-locales/compare_locales/serializer.py
new file mode 100644
index 0000000000..60e5a93766
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/serializer.py
@@ -0,0 +1,137 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'''Serialize string changes.
+
+The serialization logic is based on the cross-channel merge algorithm.
+It's taking the file structure for the first file, and localizable entries
+from the last.
+Input data is the parsed reference as a list of parser.walk(),
+the existing localized file, also a list of parser.walk(), and a dictionary
+of newly added keys and raw values.
+To remove a string from a localization, pass `None` as value for a key.
+
+The marshalling between raw values and entities is done via Entity.unwrap
+and Entity.wrap.
+
+To avoid adding English reference strings into the generated file, the
+actual entities in the reference are replaced with Placeholders, which
+are removed in a final pass over the result of merge_resources. After that,
+we also prune whitespace once more.`
+'''
+
+from codecs import encode
+import six
+
+from compare_locales.merge import merge_resources, serialize_legacy_resource
+from compare_locales.parser import getParser
+from compare_locales.parser.base import (
+ Entity,
+ PlaceholderEntity,
+ Junk,
+ Whitespace,
+)
+
+
+class SerializationNotSupportedError(ValueError):
+ pass
+
+
+def serialize(filename, reference, old_l10n, new_data):
+ '''Returns a byte string of the serialized content to use.
+
+ Input are a filename to create the right parser, a reference and
+ an existing localization, both as the result of parser.walk().
+ Finally, new_data is a dictionary of key to raw values to serialize.
+
+ Raises a SerializationNotSupportedError if we don't support the file
+ format.
+ '''
+ try:
+ parser = getParser(filename)
+ except UserWarning:
+ raise SerializationNotSupportedError(
+ 'Unsupported file format ({}).'.format(filename))
+ # create template, whitespace and all
+ placeholders = [
+ placeholder(entry)
+ for entry in reference
+ if not isinstance(entry, Junk)
+ ]
+ ref_mapping = {
+ entry.key: entry
+ for entry in reference
+ if isinstance(entry, Entity)
+ }
+ # strip obsolete strings
+ old_l10n = sanitize_old(ref_mapping.keys(), old_l10n, new_data)
+ # create new Entities
+ # .val can just be "", merge_channels doesn't need that
+ new_l10n = []
+ for key, new_raw_val in six.iteritems(new_data):
+ if new_raw_val is None or key not in ref_mapping:
+ continue
+ ref_ent = ref_mapping[key]
+ new_l10n.append(ref_ent.wrap(new_raw_val))
+
+ merged = merge_resources(
+ parser,
+ [placeholders, old_l10n, new_l10n],
+ keep_newest=False
+ )
+ pruned = prune_placeholders(merged)
+ return encode(serialize_legacy_resource(pruned), parser.encoding)
+
+
+def sanitize_old(known_keys, old_l10n, new_data):
+ """Strip Junk and replace obsolete messages with placeholders.
+ If new_data has `None` as a value, strip the existing translation.
+ Use placeholders generously, so that we can rely on `prune_placeholders`
+ to find their associated comments and remove them, too.
+ """
+
+ def should_placeholder(entry):
+ # If entry is an Entity, check if it's obsolete
+ # or marked to be removed.
+ if not isinstance(entry, Entity):
+ return False
+ if entry.key not in known_keys:
+ return True
+ return entry.key in new_data and new_data[entry.key] is None
+
+ return [
+ placeholder(entry)
+ if should_placeholder(entry)
+ else entry
+ for entry in old_l10n
+ if not isinstance(entry, Junk)
+ ]
+
+
+def placeholder(entry):
+ if isinstance(entry, Entity):
+ return PlaceholderEntity(entry.key)
+ return entry
+
+
+def prune_placeholders(entries):
+ pruned = [
+ entry for entry in entries
+ if not isinstance(entry, PlaceholderEntity)
+ ]
+
+ def prune_whitespace(acc, entity):
+ if len(acc) and isinstance(entity, Whitespace):
+ prev_entity = acc[-1]
+
+ if isinstance(prev_entity, Whitespace):
+ # Prefer the longer whitespace.
+ if len(entity.all) > len(prev_entity.all):
+ acc[-1] = entity
+ return acc
+
+ acc.append(entity)
+ return acc
+
+ return six.moves.reduce(prune_whitespace, pruned, [])
diff --git a/third_party/python/compare-locales/compare_locales/tests/__init__.py b/third_party/python/compare-locales/compare_locales/tests/__init__.py
new file mode 100644
index 0000000000..8e4df17961
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/__init__.py
@@ -0,0 +1,82 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'''Mixins for parser tests.
+'''
+
+from __future__ import absolute_import
+
+from pkg_resources import resource_string
+import re
+import unittest
+
+from compare_locales import parser
+from compare_locales.checks import getChecker
+import six
+from six.moves import zip_longest
+
+
+class ParserTestMixin():
+ '''Utility methods used by the parser tests.
+ '''
+ filename = None
+
+ def setUp(self):
+ '''Create a parser for this test.
+ '''
+ self.parser = parser.getParser(self.filename)
+
+ def tearDown(self):
+ 'tear down this test'
+ del self.parser
+
+ def resource(self, name):
+ testcontent = resource_string(__name__, 'data/' + name)
+ # fake universal line endings
+ testcontent = re.sub(b'\r\n?', lambda m: b'\n', testcontent)
+ return testcontent
+
+ def _test(self, unicode_content, refs):
+ '''Helper to test the parser.
+ Compares the result of parsing content with the given list
+ of reference keys and values.
+ '''
+ self.parser.readUnicode(unicode_content)
+ entities = list(self.parser.walk())
+ for entity, ref in zip_longest(entities, refs):
+ self.assertTrue(entity,
+ 'excess reference entity ' + six.text_type(ref))
+ self.assertTrue(ref,
+ 'excess parsed entity ' + six.text_type(entity))
+ if isinstance(entity, parser.Entity):
+ self.assertEqual(entity.key, ref[0])
+ self.assertEqual(entity.val, ref[1])
+ if len(ref) == 3:
+ self.assertIn(ref[2], entity.pre_comment.val)
+ else:
+ self.assertIsInstance(entity, ref[0])
+ self.assertIn(ref[1], entity.all)
+
+
+class BaseHelper(unittest.TestCase):
+ file = None
+ refContent = None
+
+ def setUp(self):
+ p = parser.getParser(self.file.file)
+ p.readContents(self.refContent)
+ self.refList = p.parse()
+
+ def _test(self, content, refWarnOrErrors):
+ p = parser.getParser(self.file.file)
+ p.readContents(content)
+ l10n = [e for e in p]
+ assert len(l10n) == 1
+ l10n = l10n[0]
+ checker = getChecker(self.file)
+ if checker.needs_reference:
+ checker.set_reference(self.refList)
+ ref = self.refList[l10n.key]
+ found = tuple(checker.check(ref, l10n))
+ self.assertEqual(found, refWarnOrErrors)
diff --git a/third_party/python/compare-locales/compare_locales/tests/android/__init__.py b/third_party/python/compare-locales/compare_locales/tests/android/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/android/__init__.py
diff --git a/third_party/python/compare-locales/compare_locales/tests/android/test_checks.py b/third_party/python/compare-locales/compare_locales/tests/android/test_checks.py
new file mode 100644
index 0000000000..382a7f8bdb
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/android/test_checks.py
@@ -0,0 +1,344 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+from compare_locales.tests import BaseHelper
+from compare_locales.paths import File
+
+
+ANDROID_WRAPPER = b'''<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <string name="foo">%s</string>
+</resources>
+'''
+
+
+class SimpleStringsTest(BaseHelper):
+ file = File('values/strings.xml', 'values/strings.xml')
+ refContent = ANDROID_WRAPPER % b'plain'
+
+ def test_simple_string(self):
+ self._test(
+ ANDROID_WRAPPER % b'foo',
+ tuple()
+ )
+
+ def test_empty_string(self):
+ self._test(
+ ANDROID_WRAPPER % b'',
+ tuple()
+ )
+
+ def test_single_cdata(self):
+ self._test(
+ ANDROID_WRAPPER % b'<![CDATA[text]]>',
+ tuple()
+ )
+ self._test(
+ ANDROID_WRAPPER % b'<![CDATA[\n text\n ]]>',
+ tuple()
+ )
+
+ def test_mix_cdata(self):
+ self._test(
+ ANDROID_WRAPPER % b'<![CDATA[text]]> with <![CDATA[cdatas]]>',
+ (
+ (
+ "error",
+ 0,
+ "Only plain text allowed, "
+ "or one CDATA surrounded by whitespace",
+ "android"
+ ),
+ )
+ )
+
+ def test_element_fails(self):
+ self._test(
+ ANDROID_WRAPPER % b'one<br/>two',
+ (
+ (
+ "error",
+ 0,
+ "Only plain text allowed, "
+ "or one CDATA surrounded by whitespace",
+ "android"
+ ),
+ )
+ )
+
+ def test_bad_encoding(self):
+ self._test(
+ ANDROID_WRAPPER % 'touché'.encode('latin-1'),
+ (
+ (
+ "warning",
+ 24,
+ "\ufffd in: foo",
+ "encodings"
+ ),
+ )
+ )
+
+
+class QuotesTest(BaseHelper):
+ file = File('values/strings.xml', 'values/strings.xml')
+ refContent = ANDROID_WRAPPER % b'plain'
+
+ def test_straightquotes(self):
+ self._test(
+ ANDROID_WRAPPER % b'""',
+ (
+ (
+ "error",
+ 0,
+ "Double straight quotes not allowed",
+ "android"
+ ),
+ )
+ )
+ self._test(
+ ANDROID_WRAPPER % b'"some"',
+ tuple()
+ )
+ self._test(
+ ANDROID_WRAPPER % b'some\\"',
+ tuple()
+ )
+ self._test(
+ ANDROID_WRAPPER % b'some"',
+ tuple()
+ )
+ self._test(
+ ANDROID_WRAPPER % b'some',
+ tuple()
+ )
+ self._test(
+ ANDROID_WRAPPER % b'some""',
+ (
+ (
+ "error",
+ 4,
+ "Double straight quotes not allowed",
+ "android"
+ ),
+ )
+ )
+
+ def test_apostrophes(self):
+ self._test(
+ ANDROID_WRAPPER % b'''"some'apos"''',
+ tuple()
+ )
+ self._test(
+ ANDROID_WRAPPER % b'''some\\'apos''',
+ tuple()
+ )
+ self._test(
+ ANDROID_WRAPPER % b'''some'apos''',
+ (
+ (
+ "error",
+ 4,
+ "Apostrophe must be escaped",
+ "android"
+ ),
+ )
+ )
+
+
+class TranslatableTest(BaseHelper):
+ file = File('values/strings.xml', 'values/strings.xml')
+ refContent = (ANDROID_WRAPPER % b'plain').replace(
+ b'name="foo"',
+ b'translatable="false" name="foo"')
+
+ def test_translatable(self):
+ self._test(
+ ANDROID_WRAPPER % b'"some"',
+ (
+ (
+ "error",
+ 0,
+ "strings must be translatable",
+ "android"
+ ),
+ )
+ )
+
+
+class AtStringTest(BaseHelper):
+ file = File('values/strings.xml', 'values/strings.xml')
+ refContent = (ANDROID_WRAPPER % b'@string/foo')
+
+ def test_translatable(self):
+ self._test(
+ ANDROID_WRAPPER % b'"some"',
+ (
+ (
+ "warning",
+ 0,
+ "strings must be translatable",
+ "android"
+ ),
+ )
+ )
+
+
+class PrintfSTest(BaseHelper):
+ file = File('values/strings.xml', 'values/strings.xml')
+ refContent = ANDROID_WRAPPER % b'%s'
+
+ def test_match(self):
+ self._test(
+ ANDROID_WRAPPER % b'"%s"',
+ tuple()
+ )
+ self._test(
+ ANDROID_WRAPPER % b'"%1$s"',
+ tuple()
+ )
+ self._test(
+ ANDROID_WRAPPER % b'"$s %1$s"',
+ tuple()
+ )
+ self._test(
+ ANDROID_WRAPPER % b'"$1$s %1$s"',
+ tuple()
+ )
+
+ def test_mismatch(self):
+ self._test(
+ ANDROID_WRAPPER % b'"%d"',
+ (
+ (
+ "error",
+ 0,
+ "Mismatching formatter",
+ "android"
+ ),
+ )
+ )
+ self._test(
+ ANDROID_WRAPPER % b'"%S"',
+ (
+ (
+ "error",
+ 0,
+ "Mismatching formatter",
+ "android"
+ ),
+ )
+ )
+
+ def test_off_position(self):
+ self._test(
+ ANDROID_WRAPPER % b'%2$s',
+ (
+ (
+ "error",
+ 0,
+ "Formatter %2$s not found in reference",
+ "android"
+ ),
+ )
+ )
+
+
+class PrintfCapSTest(BaseHelper):
+ file = File('values/strings.xml', 'values/strings.xml')
+ refContent = ANDROID_WRAPPER % b'%S'
+
+ def test_match(self):
+ self._test(
+ ANDROID_WRAPPER % b'"%S"',
+ tuple()
+ )
+
+ def test_mismatch(self):
+ self._test(
+ ANDROID_WRAPPER % b'"%s"',
+ (
+ (
+ "error",
+ 0,
+ "Mismatching formatter",
+ "android"
+ ),
+ )
+ )
+ self._test(
+ ANDROID_WRAPPER % b'"%d"',
+ (
+ (
+ "error",
+ 0,
+ "Mismatching formatter",
+ "android"
+ ),
+ )
+ )
+
+
+class PrintfDTest(BaseHelper):
+ file = File('values/strings.xml', 'values/strings.xml')
+ refContent = ANDROID_WRAPPER % b'%d'
+
+ def test_match(self):
+ self._test(
+ ANDROID_WRAPPER % b'"%d"',
+ tuple()
+ )
+ self._test(
+ ANDROID_WRAPPER % b'"%1$d"',
+ tuple()
+ )
+ self._test(
+ ANDROID_WRAPPER % b'"$d %1$d"',
+ tuple()
+ )
+ self._test(
+ ANDROID_WRAPPER % b'"$1$d %1$d"',
+ tuple()
+ )
+
+ def test_mismatch(self):
+ self._test(
+ ANDROID_WRAPPER % b'"%s"',
+ (
+ (
+ "error",
+ 0,
+ "Mismatching formatter",
+ "android"
+ ),
+ )
+ )
+ self._test(
+ ANDROID_WRAPPER % b'"%S"',
+ (
+ (
+ "error",
+ 0,
+ "Mismatching formatter",
+ "android"
+ ),
+ )
+ )
+
+ def test_off_position(self):
+ self._test(
+ ANDROID_WRAPPER % b'%2$d',
+ (
+ (
+ "error",
+ 0,
+ "Formatter %2$d not found in reference",
+ "android"
+ ),
+ )
+ )
diff --git a/third_party/python/compare-locales/compare_locales/tests/android/test_merge.py b/third_party/python/compare-locales/compare_locales/tests/android/test_merge.py
new file mode 100644
index 0000000000..32e13a7439
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/android/test_merge.py
@@ -0,0 +1,82 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+from compare_locales.merge import merge_channels
+
+
+class TestMerge(unittest.TestCase):
+ name = "strings.xml"
+
+ def test_no_changes(self):
+ channels = (b'''\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <!-- bar -->
+ <string name="foo">value</string>
+</resources>
+''', b'''\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <!-- bar -->
+ <string name="foo">value</string>
+</resources>
+''')
+ self.assertEqual(
+ merge_channels(self.name, channels), b'''\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <!-- bar -->
+ <string name="foo">value</string>
+</resources>
+''')
+
+ def test_a_and_b(self):
+ channels = (b'''\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <!-- Foo -->
+ <string name="foo">value</string>
+</resources>
+''', b'''\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <!-- Bar -->
+ <string name="bar">other value</string>
+</resources>
+''')
+ self.assertEqual(
+ merge_channels(self.name, channels), b'''\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <!-- Bar -->
+ <string name="bar">other value</string>
+ <!-- Foo -->
+ <string name="foo">value</string>
+</resources>
+''')
+
+ def test_namespaces(self):
+ channels = (
+ b'''\
+<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:ns1="urn:ns1">
+ <string ns1:one="test">string</string>
+</resources>
+''',
+ b'''\
+<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:ns2="urn:ns2">
+ <string ns2:two="test">string</string>
+</resources>
+'''
+ )
+ self.assertEqual(
+ merge_channels(self.name, channels), b'''\
+<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:ns2="urn:ns2" xmlns:ns1="urn:ns1">
+ <string ns2:two="test">string</string>
+ <string ns1:one="test">string</string>
+</resources>
+''')
diff --git a/third_party/python/compare-locales/compare_locales/tests/android/test_parser.py b/third_party/python/compare-locales/compare_locales/tests/android/test_parser.py
new file mode 100644
index 0000000000..f5949a1b86
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/android/test_parser.py
@@ -0,0 +1,128 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import unittest
+
+from compare_locales.tests import ParserTestMixin
+from compare_locales.parser import (
+ Comment,
+ Junk,
+ Whitespace,
+)
+from compare_locales.parser.android import DocumentWrapper
+
+
+class TestAndroidParser(ParserTestMixin, unittest.TestCase):
+ maxDiff = None
+ filename = 'strings.xml'
+
+ def test_simple_string(self):
+ source = '''\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <!-- bar -->
+ <string name="foo">value</string>
+ <!-- bar -->
+ <!-- foo -->
+ <string name="bar">multi-line comment</string>
+
+ <!-- standalone -->
+
+ <string name="baz">so lonely</string>
+</resources>
+'''
+ self._test(
+ source,
+ (
+ (DocumentWrapper, '<?xml'),
+ (DocumentWrapper, '>'),
+ (Whitespace, '\n '),
+ ('foo', 'value', 'bar'),
+ (Whitespace, '\n'),
+ ('bar', 'multi-line comment', 'bar\nfoo'),
+ (Whitespace, '\n '),
+ (Comment, 'standalone'),
+ (Whitespace, '\n '),
+ ('baz', 'so lonely'),
+ (Whitespace, '\n'),
+ (DocumentWrapper, '</resources>')
+ )
+ )
+
+ def test_bad_doc(self):
+ source = '''\
+<?xml version="1.0" ?>
+<not-a-resource/>
+'''
+ self._test(
+ source,
+ (
+ (Junk, '<not-a-resource/>'),
+ )
+ )
+
+ def test_bad_elements(self):
+ source = '''\
+<?xml version="1.0" ?>
+<resources>
+ <string name="first">value</string>
+ <non-string name="bad">value</non-string>
+ <string name="mid">value</string>
+ <string nomine="dom">value</string>
+ <string name="last">value</string>
+</resources>
+'''
+ self._test(
+ source,
+ (
+ (DocumentWrapper, '<?xml'),
+ (DocumentWrapper, '>'),
+ (Whitespace, '\n '),
+ ('first', 'value'),
+ (Whitespace, '\n '),
+ (Junk, '<non-string name="bad">'),
+ (Whitespace, '\n '),
+ ('mid', 'value'),
+ (Whitespace, '\n '),
+ (Junk, '<string nomine="dom">'),
+ (Whitespace, '\n '),
+ ('last', 'value'),
+ (Whitespace, '\n'),
+ (DocumentWrapper, '</resources>')
+ )
+ )
+
+ def test_xml_parse_error(self):
+ source = 'no xml'
+ self._test(
+ source,
+ (
+ (Junk, 'no xml'),
+ )
+ )
+
+ def test_empty_strings(self):
+ source = '''\
+<?xml version="1.0" ?>
+<resources>
+ <string name="one"></string>
+ <string name="two"/>
+</resources>
+'''
+ self._test(
+ source,
+ (
+ (DocumentWrapper, '<?xml'),
+ (DocumentWrapper, '>'),
+ (Whitespace, '\n '),
+ ('one', ''),
+ (Whitespace, '\n '),
+ ('two', ''),
+ (Whitespace, '\n'),
+ (DocumentWrapper, '</resources>')
+ )
+ )
diff --git a/third_party/python/compare-locales/compare_locales/tests/data/bug121341.properties b/third_party/python/compare-locales/compare_locales/tests/data/bug121341.properties
new file mode 100644
index 0000000000..b45fc9698c
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/data/bug121341.properties
@@ -0,0 +1,68 @@
+# simple check
+1=abc
+# test whitespace trimming in key and value
+ 2 = xy
+# test parsing of escaped values
+3 = \u1234\t\r\n\uAB\
+\u1\n
+# test multiline properties
+4 = this is \
+multiline property
+5 = this is \
+ another multiline property
+# property with DOS EOL
+6 = test\u0036
+# test multiline property with with DOS EOL
+7 = yet another multi\
+ line propery
+# trimming should not trim escaped whitespaces
+8 = \ttest5\u0020
+# another variant of #8
+9 = \ test6\t
+# test UTF-8 encoded property/value
+10aሴb = c췯d
+# next property should test unicode escaping at the boundary of parsing buffer
+# buffer size is expected to be 4096 so add comments to get to this offset
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+###############################################################################
+11 = \uABCD
diff --git a/third_party/python/compare-locales/compare_locales/tests/data/test.properties b/third_party/python/compare-locales/compare_locales/tests/data/test.properties
new file mode 100644
index 0000000000..19cae97028
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/data/test.properties
@@ -0,0 +1,14 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+1=1
+ 2=2
+3 =3
+ 4 =4
+5=5
+6= 6
+7=7
+8= 8
+# this is a comment
+9=this is the first part of a continued line \
+ and here is the 2nd part
diff --git a/third_party/python/compare-locales/compare_locales/tests/data/triple-license.dtd b/third_party/python/compare-locales/compare_locales/tests/data/triple-license.dtd
new file mode 100644
index 0000000000..4a28b17a6f
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/data/triple-license.dtd
@@ -0,0 +1,38 @@
+<!-- ***** BEGIN LICENSE BLOCK *****
+#if 0
+ - Version: MPL 1.1/GPL 2.0/LGPL 2.1
+ -
+ - The contents of this file are subject to the Mozilla Public License Version
+ - 1.1 (the "License"); you may not use this file except in compliance with
+ - the License. You may obtain a copy of the License at
+ - http://www.mozilla.org/MPL/
+ -
+ - Software distributed under the License is distributed on an "AS IS" basis,
+ - WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+ - for the specific language governing rights and limitations under the
+ - License.
+ -
+ - The Original Code is mozilla.org Code.
+ -
+ - The Initial Developer of the Original Code is dummy.
+ - Portions created by the Initial Developer are Copyright (C) 2005
+ - the Initial Developer. All Rights Reserved.
+ -
+ - Contributor(s):
+ -
+ - Alternatively, the contents of this file may be used under the terms of
+ - either the GNU General Public License Version 2 or later (the "GPL"), or
+ - the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+ - in which case the provisions of the GPL or the LGPL are applicable instead
+ - of those above. If you wish to allow use of your version of this file only
+ - under the terms of either the GPL or the LGPL, and not to allow others to
+ - use your version of this file under the terms of the MPL, indicate your
+ - decision by deleting the provisions above and replace them with the notice
+ - and other provisions required by the LGPL or the GPL. If you do not delete
+ - the provisions above, a recipient may use your version of this file under
+ - the terms of any one of the MPL, the GPL or the LGPL.
+ -
+#endif
+ - ***** END LICENSE BLOCK ***** -->
+
+<!ENTITY foo "value">
diff --git a/third_party/python/compare-locales/compare_locales/tests/dtd/__init__.py b/third_party/python/compare-locales/compare_locales/tests/dtd/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/dtd/__init__.py
diff --git a/third_party/python/compare-locales/compare_locales/tests/dtd/test_checks.py b/third_party/python/compare-locales/compare_locales/tests/dtd/test_checks.py
new file mode 100644
index 0000000000..5967c016d9
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/dtd/test_checks.py
@@ -0,0 +1,335 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import unittest
+
+from compare_locales.checks import getChecker
+from compare_locales.parser import getParser, Parser, DTDEntity
+from compare_locales.paths import File
+from compare_locales.tests import BaseHelper
+import six
+from six.moves import range
+
+
+class TestDTDs(BaseHelper):
+ file = File('foo.dtd', 'foo.dtd')
+ refContent = b'''<!ENTITY foo "This is &apos;good&apos;">
+<!ENTITY width "10ch">
+<!ENTITY style "width: 20ch; height: 280px;">
+<!ENTITY minStyle "min-height: 50em;">
+<!ENTITY ftd "0">
+<!ENTITY formatPercent "This is 100&#037; correct">
+<!ENTITY some.key "K">
+'''
+
+ def testWarning(self):
+ self._test(b'''<!ENTITY foo "This is &not; good">
+''',
+ (('warning', (0, 0), 'Referencing unknown entity `not`',
+ 'xmlparse'),))
+ # make sure we only handle translated entity references
+ self._test('''<!ENTITY foo "This is &ƞǿŧ; good">
+'''.encode('utf-8'),
+ (('warning', (0, 0), 'Referencing unknown entity `ƞǿŧ`',
+ 'xmlparse'),))
+
+ def testErrorFirstLine(self):
+ self._test(b'''<!ENTITY foo "This is </bad> stuff">
+''',
+ (('error', (1, 10), 'mismatched tag', 'xmlparse'),))
+
+ def testErrorSecondLine(self):
+ self._test(b'''<!ENTITY foo "This is
+ </bad>
+stuff">
+''',
+ (('error', (2, 4), 'mismatched tag', 'xmlparse'),))
+
+ def testKeyErrorSingleAmpersand(self):
+ self._test(b'''<!ENTITY some.key "&">
+''',
+ (('error', (1, 1), 'not well-formed (invalid token)',
+ 'xmlparse'),))
+
+ def testXMLEntity(self):
+ self._test(b'''<!ENTITY foo "This is &quot;good&quot;">
+''',
+ tuple())
+
+ def testPercentEntity(self):
+ self._test(b'''<!ENTITY formatPercent "Another 100&#037;">
+''',
+ tuple())
+ self._test(b'''<!ENTITY formatPercent "Bad 100% should fail">
+''',
+ (('error', (0, 32), 'not well-formed (invalid token)',
+ 'xmlparse'),))
+
+ def testNoNumber(self):
+ self._test(b'''<!ENTITY ftd "foo">''',
+ (('warning', 0, 'reference is a number', 'number'),))
+
+ def testNoLength(self):
+ self._test(b'''<!ENTITY width "15miles">''',
+ (('error', 0, 'reference is a CSS length', 'css'),))
+
+ def testNoStyle(self):
+ self._test(b'''<!ENTITY style "15ch">''',
+ (('error', 0, 'reference is a CSS spec', 'css'),))
+ self._test(b'''<!ENTITY style "junk">''',
+ (('error', 0, 'reference is a CSS spec', 'css'),))
+
+ def testStyleWarnings(self):
+ self._test(b'''<!ENTITY style "width:15ch">''',
+ (('warning', 0, 'height only in reference', 'css'),))
+ self._test(b'''<!ENTITY style "width:15em;height:200px;">''',
+ (('warning', 0, "units for width don't match (em != ch)",
+ 'css'),))
+
+ def testNoWarning(self):
+ self._test(b'''<!ENTITY width "12em">''', tuple())
+ self._test(b'''<!ENTITY style "width:12ch;height:200px;">''', tuple())
+ self._test(b'''<!ENTITY ftd "0">''', tuple())
+
+ def test_bad_encoding(self):
+ self._test(
+ '<!ENTITY foo "touché">'.encode('latin-1'),
+ (
+ (
+ "warning",
+ 19,
+ "\ufffd in: foo",
+ "encodings"
+ ),
+ )
+ )
+
+
+class TestEntitiesInDTDs(BaseHelper):
+ file = File('foo.dtd', 'foo.dtd')
+ refContent = b'''<!ENTITY short "This is &brandShortName;">
+<!ENTITY shorter "This is &brandShorterName;">
+<!ENTITY ent.start "Using &brandShorterName; start to">
+<!ENTITY ent.end " end">
+'''
+
+ def testOK(self):
+ self._test(b'''<!ENTITY ent.start "Mit &brandShorterName;">''',
+ tuple())
+
+ def testMismatch(self):
+ self._test(b'''<!ENTITY ent.start "Mit &brandShortName;">''',
+ (('warning', (0, 0),
+ 'Entity brandShortName referenced, '
+ 'but brandShorterName used in context',
+ 'xmlparse'),))
+
+ def testAcross(self):
+ self._test(b'''<!ENTITY ent.end "Mit &brandShorterName;">''',
+ tuple())
+
+ def testAcrossWithMismatch(self):
+ '''If we could tell that ent.start and ent.end are one string,
+ we should warn. Sadly, we can't, so this goes without warning.'''
+ self._test(b'''<!ENTITY ent.end "Mit &brandShortName;">''',
+ tuple())
+
+ def testUnknownWithRef(self):
+ self._test(b'''<!ENTITY ent.start "Mit &foopy;">''',
+ (('warning',
+ (0, 0),
+ 'Referencing unknown entity `foopy` '
+ '(brandShorterName used in context, '
+ 'brandShortName known)',
+ 'xmlparse'),))
+
+ def testUnknown(self):
+ self._test(b'''<!ENTITY ent.end "Mit &foopy;">''',
+ (('warning',
+ (0, 0),
+ 'Referencing unknown entity `foopy`'
+ ' (brandShortName, brandShorterName known)',
+ 'xmlparse'),))
+
+
+class TestAndroid(unittest.TestCase):
+ """Test Android checker
+
+ Make sure we're hitting our extra rules only if
+ we're passing in a DTD file in the embedding/android module.
+ """
+ apos_msg = "Apostrophes in Android DTDs need escaping with \\' or " + \
+ "\\u0027, or use \u2019, or put string in quotes."
+ quot_msg = "Quotes in Android DTDs need escaping with \\\" or " + \
+ "\\u0022, or put string in apostrophes."
+
+ def getNext(self, v):
+ ctx = Parser.Context(v)
+ return DTDEntity(
+ ctx, None, None, (0, len(v)), (), (0, len(v)))
+
+ def getDTDEntity(self, v):
+ if isinstance(v, six.binary_type):
+ v = v.decode('utf-8')
+ v = v.replace('"', '&quot;')
+ ctx = Parser.Context('<!ENTITY foo "%s">' % v)
+ return DTDEntity(
+ ctx, None, None, (0, len(v) + 16), (9, 12), (14, len(v) + 14))
+
+ def test_android_dtd(self):
+ """Testing the actual android checks. The logic is involved,
+ so this is a lot of nitty gritty detail tests.
+ """
+ f = File("embedding/android/strings.dtd", "strings.dtd",
+ "embedding/android")
+ checker = getChecker(f, extra_tests=['android-dtd'])
+ # good string
+ ref = self.getDTDEntity("plain string")
+ l10n = self.getDTDEntity("plain localized string")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+ # dtd warning
+ l10n = self.getDTDEntity("plain localized string &ref;")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('warning', (0, 0),
+ 'Referencing unknown entity `ref`', 'xmlparse'),))
+ # no report on stray ampersand or quote, if not completely quoted
+ for i in range(3):
+ # make sure we're catching unescaped apostrophes,
+ # try 0..5 backticks
+ l10n = self.getDTDEntity("\\"*(2*i) + "'")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('error', 2*i, self.apos_msg, 'android'),))
+ l10n = self.getDTDEntity("\\"*(2*i + 1) + "'")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+ # make sure we don't report if apos string is quoted
+ l10n = self.getDTDEntity('"' + "\\"*(2*i) + "'\"")
+ tpl = tuple(checker.check(ref, l10n))
+ self.assertEqual(tpl, (),
+ "`%s` shouldn't fail but got %s"
+ % (l10n.val, str(tpl)))
+ l10n = self.getDTDEntity('"' + "\\"*(2*i+1) + "'\"")
+ tpl = tuple(checker.check(ref, l10n))
+ self.assertEqual(tpl, (),
+ "`%s` shouldn't fail but got %s"
+ % (l10n.val, str(tpl)))
+ # make sure we're catching unescaped quotes, try 0..5 backticks
+ l10n = self.getDTDEntity("\\"*(2*i) + "\"")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('error', 2*i, self.quot_msg, 'android'),))
+ l10n = self.getDTDEntity("\\"*(2*i + 1) + "'")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+ # make sure we don't report if quote string is single quoted
+ l10n = self.getDTDEntity("'" + "\\"*(2*i) + "\"'")
+ tpl = tuple(checker.check(ref, l10n))
+ self.assertEqual(tpl, (),
+ "`%s` shouldn't fail but got %s" %
+ (l10n.val, str(tpl)))
+ l10n = self.getDTDEntity('"' + "\\"*(2*i+1) + "'\"")
+ tpl = tuple(checker.check(ref, l10n))
+ self.assertEqual(tpl, (),
+ "`%s` shouldn't fail but got %s" %
+ (l10n.val, str(tpl)))
+ # check for mixed quotes and ampersands
+ l10n = self.getDTDEntity("'\"")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('error', 0, self.apos_msg, 'android'),
+ ('error', 1, self.quot_msg, 'android')))
+ l10n = self.getDTDEntity("''\"'")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('error', 1, self.apos_msg, 'android'),))
+ l10n = self.getDTDEntity('"\'""')
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('error', 2, self.quot_msg, 'android'),))
+
+ # broken unicode escape
+ l10n = self.getDTDEntity(b"Some broken \u098 unicode")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('error', 12, 'truncated \\uXXXX escape',
+ 'android'),))
+ # broken unicode escape, try to set the error off
+ l10n = self.getDTDEntity("\u9690"*14+"\\u006"+" "+"\\u0064")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('error', 14, 'truncated \\uXXXX escape',
+ 'android'),))
+
+ def test_android_prop(self):
+ f = File("embedding/android/strings.properties", "strings.properties",
+ "embedding/android")
+ checker = getChecker(f, extra_tests=['android-dtd'])
+ # good plain string
+ ref = self.getNext("plain string")
+ l10n = self.getNext("plain localized string")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+ # no dtd warning
+ ref = self.getNext("plain string")
+ l10n = self.getNext("plain localized string &ref;")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+ # no report on stray ampersand
+ ref = self.getNext("plain string")
+ l10n = self.getNext("plain localized string with apos: '")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+ # report on bad printf
+ ref = self.getNext("string with %s")
+ l10n = self.getNext("string with %S")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('error', 0, 'argument 1 `S` should be `s`',
+ 'printf'),))
+
+ def test_non_android_dtd(self):
+ f = File("browser/strings.dtd", "strings.dtd", "browser")
+ checker = getChecker(f)
+ # good string
+ ref = self.getDTDEntity("plain string")
+ l10n = self.getDTDEntity("plain localized string")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+ # dtd warning
+ ref = self.getDTDEntity("plain string")
+ l10n = self.getDTDEntity("plain localized string &ref;")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('warning', (0, 0),
+ 'Referencing unknown entity `ref`', 'xmlparse'),))
+ # no report on stray ampersand
+ ref = self.getDTDEntity("plain string")
+ l10n = self.getDTDEntity("plain localized string with apos: '")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+
+ def test_entities_across_dtd(self):
+ f = File("browser/strings.dtd", "strings.dtd", "browser")
+ p = getParser(f.file)
+ p.readContents(b'<!ENTITY other "some &good.ref;">')
+ ref = p.parse()
+ checker = getChecker(f)
+ checker.set_reference(ref)
+ # good string
+ ref = self.getDTDEntity("plain string")
+ l10n = self.getDTDEntity("plain localized string")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+ # dtd warning
+ ref = self.getDTDEntity("plain string")
+ l10n = self.getDTDEntity("plain localized string &ref;")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('warning', (0, 0),
+ 'Referencing unknown entity `ref` (good.ref known)',
+ 'xmlparse'),))
+ # no report on stray ampersand
+ ref = self.getDTDEntity("plain string")
+ l10n = self.getDTDEntity("plain localized string with &good.ref;")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/python/compare-locales/compare_locales/tests/dtd/test_merge.py b/third_party/python/compare-locales/compare_locales/tests/dtd/test_merge.py
new file mode 100644
index 0000000000..e1db766e94
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/dtd/test_merge.py
@@ -0,0 +1,133 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+from compare_locales.merge import merge_channels
+
+
+class TestMergeDTD(unittest.TestCase):
+ name = "foo.dtd"
+ maxDiff = None
+
+ def test_no_changes(self):
+ channels = (b"""
+<!ENTITY foo "Foo 1">
+""", b"""
+<!ENTITY foo "Foo 2">
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+<!ENTITY foo "Foo 1">
+""")
+
+ def test_trailing_whitespace(self):
+ channels = (b"""
+<!ENTITY foo "Foo 1">
+""", b"""
+<!ENTITY foo "Foo 2"> \n""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+<!ENTITY foo "Foo 1"> \n""")
+
+ def test_browser_dtd(self):
+ channels = (b"""\
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+ - License, v. 2.0. If a copy of the MPL was not distributed with this
+ - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
+
+<!-- LOCALIZATION NOTE : FILE This file contains the browser main menu ... -->
+<!-- LOCALIZATION NOTE : FILE Do not translate commandkeys -->
+
+<!-- LOCALIZATION NOTE (mainWindow.titlemodifier) : DONT_TRANSLATE -->
+<!ENTITY mainWindow.titlemodifier "&brandFullName;">
+<!-- LOCALIZATION NOTE (mainWindow.separator): DONT_TRANSLATE -->
+<!ENTITY mainWindow.separator " - ">
+<!-- LOCALIZATION NOTE (mainWindow.privatebrowsing2): This will be appended ...
+ inside the ... -->
+<!ENTITY mainWindow.privatebrowsing2 "(Private Browsing)">
+""", b"""\
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+ - License, v. 2.0. If a copy of the MPL was not distributed with this
+ - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
+
+<!-- LOCALIZATION NOTE : FILE This file contains the browser main menu ... -->
+<!-- LOCALIZATION NOTE : FILE Do not translate commandkeys -->
+
+<!-- LOCALIZATION NOTE (mainWindow.title): DONT_TRANSLATE -->
+<!ENTITY mainWindow.title "&brandFullName;">
+<!-- LOCALIZATION NOTE (mainWindow.titlemodifier) : DONT_TRANSLATE -->
+<!ENTITY mainWindow.titlemodifier "&brandFullName;">
+<!-- LOCALIZATION NOTE (mainWindow.privatebrowsing): This will be appended ...
+ inside the ... -->
+<!ENTITY mainWindow.privatebrowsing "(Private Browsing)">
+""")
+
+ self.assertMultiLineEqual(
+ merge_channels(self.name, channels).decode("utf-8"), """\
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+ - License, v. 2.0. If a copy of the MPL was not distributed with this
+ - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
+
+<!-- LOCALIZATION NOTE : FILE This file contains the browser main menu ... -->
+<!-- LOCALIZATION NOTE : FILE Do not translate commandkeys -->
+
+<!-- LOCALIZATION NOTE (mainWindow.title): DONT_TRANSLATE -->
+<!ENTITY mainWindow.title "&brandFullName;">
+
+<!-- LOCALIZATION NOTE (mainWindow.titlemodifier) : DONT_TRANSLATE -->
+<!ENTITY mainWindow.titlemodifier "&brandFullName;">
+<!-- LOCALIZATION NOTE (mainWindow.privatebrowsing): This will be appended ...
+ inside the ... -->
+<!ENTITY mainWindow.privatebrowsing "(Private Browsing)">
+<!-- LOCALIZATION NOTE (mainWindow.separator): DONT_TRANSLATE -->
+<!ENTITY mainWindow.separator " - ">
+<!-- LOCALIZATION NOTE (mainWindow.privatebrowsing2): This will be appended ...
+ inside the ... -->
+<!ENTITY mainWindow.privatebrowsing2 "(Private Browsing)">
+""")
+
+ def test_aboutServiceWorkers_dtd(self):
+ channels = (b"""\
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+ - License, v. 2.0. If a copy of the MPL was not distributed with this
+ - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
+
+<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
+<!ENTITY title "About Service Workers">
+<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
+<!ENTITY maintitle "Registered Service Workers">
+<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
+<!ENTITY warning_not_enabled "Service Workers are not enabled.">
+<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
+<!ENTITY warning_no_serviceworkers "No Service Workers registered.">
+""", b"""\
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+ - License, v. 2.0. If a copy of the MPL was not distributed with this
+ - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
+
+<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
+<!ENTITY title "About Service Workers">
+<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
+<!ENTITY maintitle "Registered Service Workers">
+<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
+<!ENTITY warning_not_enabled "Service Workers are not enabled.">
+<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
+<!ENTITY warning_no_serviceworkers "No Service Workers registered.">
+""")
+
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""\
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+ - License, v. 2.0. If a copy of the MPL was not distributed with this
+ - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
+
+<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
+<!ENTITY title "About Service Workers">
+<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
+<!ENTITY maintitle "Registered Service Workers">
+<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
+<!ENTITY warning_not_enabled "Service Workers are not enabled.">
+<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
+<!ENTITY warning_no_serviceworkers "No Service Workers registered.">
+""")
diff --git a/third_party/python/compare-locales/compare_locales/tests/dtd/test_parser.py b/third_party/python/compare-locales/compare_locales/tests/dtd/test_parser.py
new file mode 100644
index 0000000000..679bd21f84
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/dtd/test_parser.py
@@ -0,0 +1,271 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'''Tests for the DTD parser.
+'''
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import unittest
+import re
+
+from compare_locales import parser
+from compare_locales.parser import (
+ Comment,
+ Junk,
+ Whitespace,
+)
+from compare_locales.tests import ParserTestMixin
+
+
+class TestDTD(ParserTestMixin, unittest.TestCase):
+ '''Tests for the DTD Parser.'''
+ filename = 'foo.dtd'
+
+ def test_one_entity(self):
+ self._test('''<!ENTITY foo.label "stuff">''',
+ (('foo.label', 'stuff'),))
+ self.assertListEqual(
+ [e.localized for e in self.parser],
+ [True]
+ )
+
+ quoteContent = '''<!ENTITY good.one "one">
+<!ENTITY bad.one "bad " quote">
+<!ENTITY good.two "two">
+<!ENTITY bad.two "bad "quoted" word">
+<!ENTITY good.three "three">
+<!ENTITY good.four "good ' quote">
+<!ENTITY good.five "good 'quoted' word">
+'''
+ quoteRef = (
+ ('good.one', 'one'),
+ (Whitespace, '\n'),
+ (Junk, '<!ENTITY bad.one "bad " quote">\n'),
+ ('good.two', 'two'),
+ (Whitespace, '\n'),
+ (Junk, '<!ENTITY bad.two "bad "quoted" word">\n'),
+ ('good.three', 'three'),
+ (Whitespace, '\n'),
+ ('good.four', 'good \' quote'),
+ (Whitespace, '\n'),
+ ('good.five', 'good \'quoted\' word'),
+ (Whitespace, '\n'),)
+
+ def test_quotes(self):
+ self._test(self.quoteContent, self.quoteRef)
+
+ def test_apos(self):
+ qr = re.compile('[\'"]', re.M)
+
+ def quot2apos(s):
+ return qr.sub(lambda m: m.group(0) == '"' and "'" or '"', s)
+
+ self._test(quot2apos(self.quoteContent),
+ ((ref[0], quot2apos(ref[1])) for ref in self.quoteRef))
+
+ def test_parsed_ref(self):
+ self._test('''<!ENTITY % fooDTD SYSTEM "chrome://brand.dtd">
+ %fooDTD;
+''',
+ (('fooDTD', '"chrome://brand.dtd"'),))
+ self._test('''<!ENTITY % fooDTD SYSTEM "chrome://brand.dtd">
+ %fooDTD;
+''',
+ (('fooDTD', '"chrome://brand.dtd"'),))
+
+ def test_trailing_comment(self):
+ self._test('''<!ENTITY first "string">
+<!ENTITY second "string">
+<!--
+<!ENTITY commented "out">
+-->
+''',
+ (
+ ('first', 'string'),
+ (Whitespace, '\n'),
+ ('second', 'string'),
+ (Whitespace, '\n'),
+ (Comment, 'out'),
+ (Whitespace, '\n')))
+
+ def test_license_header(self):
+ p = parser.getParser('foo.dtd')
+ p.readContents(self.resource('triple-license.dtd'))
+ entities = list(p.walk())
+ self.assertIsInstance(entities[0], parser.Comment)
+ self.assertIn('MPL', entities[0].all)
+ e = entities[2]
+ self.assertIsInstance(e, parser.Entity)
+ self.assertEqual(e.key, 'foo')
+ self.assertEqual(e.val, 'value')
+ self.assertEqual(len(entities), 4)
+ p.readContents(b'''\
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+ - License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ - You can obtain one at http://mozilla.org/MPL/2.0/. -->
+
+<!ENTITY foo "value">
+''')
+ entities = list(p.walk())
+ self.assertIsInstance(entities[0], parser.Comment)
+ self.assertIn('MPL', entities[0].all)
+ e = entities[2]
+ self.assertIsInstance(e, parser.Entity)
+ self.assertEqual(e.key, 'foo')
+ self.assertEqual(e.val, 'value')
+ self.assertEqual(len(entities), 4)
+ # Test again without empty line after licence header, and with BOM.
+ p.readContents(b'''\xEF\xBB\xBF\
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+ - License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ - You can obtain one at http://mozilla.org/MPL/2.0/. -->
+<!ENTITY foo "value">
+''')
+ entities = list(p.walk())
+ self.assertIsInstance(entities[0], parser.Comment)
+ self.assertIn('MPL', entities[0].all)
+ e = entities[2]
+ self.assertIsInstance(e, parser.Entity)
+ self.assertEqual(e.key, 'foo')
+ self.assertEqual(e.val, 'value')
+ self.assertEqual(len(entities), 4)
+
+ def testBOM(self):
+ self._test(u'\ufeff<!ENTITY foo.label "stuff">',
+ (('foo.label', 'stuff'),))
+
+ def test_trailing_whitespace(self):
+ self._test('<!ENTITY foo.label "stuff">\n \n',
+ (('foo.label', 'stuff'), (Whitespace, '\n \n')))
+
+ def test_unicode_comment(self):
+ self._test(b'<!-- \xe5\x8f\x96 -->'.decode('utf-8'),
+ ((Comment, u'\u53d6'),))
+
+ def test_empty_file(self):
+ self._test('', tuple())
+ self._test('\n', ((Whitespace, '\n'),))
+ self._test('\n\n', ((Whitespace, '\n\n'),))
+ self._test(' \n\n', ((Whitespace, ' \n\n'),))
+
+ def test_positions(self):
+ self.parser.readContents(b'''\
+<!ENTITY one "value">
+<!ENTITY two "other
+escaped value">
+''')
+ one, two = list(self.parser)
+ self.assertEqual(one.position(), (1, 1))
+ self.assertEqual(one.value_position(), (1, 16))
+ self.assertEqual(one.position(-1), (1, 23))
+ self.assertEqual(two.position(), (2, 1))
+ self.assertEqual(two.value_position(), (2, 16))
+ self.assertEqual(two.value_position(-1), (3, 14))
+ self.assertEqual(two.value_position(10), (3, 5))
+
+ def test_word_count(self):
+ self.parser.readContents(b'''\
+<!ENTITY a "one">
+<!ENTITY b "one<br>two">
+<!ENTITY c "one<span>word</span>">
+<!ENTITY d "one <a href='foo'>two</a> three">
+''')
+ a, b, c, d = list(self.parser)
+ self.assertEqual(a.count_words(), 1)
+ self.assertEqual(b.count_words(), 2)
+ self.assertEqual(c.count_words(), 1)
+ self.assertEqual(d.count_words(), 3)
+
+ def test_html_entities(self):
+ self.parser.readContents(b'''\
+<!ENTITY named "&amp;">
+<!ENTITY numcode "&#38;">
+<!ENTITY shorthexcode "&#x26;">
+<!ENTITY longhexcode "&#x0026;">
+<!ENTITY unknown "&unknownEntity;">
+''')
+ entities = iter(self.parser)
+
+ entity = next(entities)
+ self.assertEqual(entity.raw_val, '&amp;')
+ self.assertEqual(entity.val, '&')
+
+ entity = next(entities)
+ self.assertEqual(entity.raw_val, '&#38;')
+ self.assertEqual(entity.val, '&')
+
+ entity = next(entities)
+ self.assertEqual(entity.raw_val, '&#x26;')
+ self.assertEqual(entity.val, '&')
+
+ entity = next(entities)
+ self.assertEqual(entity.raw_val, '&#x0026;')
+ self.assertEqual(entity.val, '&')
+
+ entity = next(entities)
+ self.assertEqual(entity.raw_val, '&unknownEntity;')
+ self.assertEqual(entity.val, '&unknownEntity;')
+
+ def test_comment_val(self):
+ self.parser.readContents(b'''\
+<!-- comment
+spanning lines --> <!--
+-->
+<!-- last line -->
+''')
+ entities = self.parser.walk()
+
+ entity = next(entities)
+ self.assertIsInstance(entity, parser.Comment)
+ self.assertEqual(entity.val, ' comment\nspanning lines ')
+ entity = next(entities)
+ self.assertIsInstance(entity, parser.Whitespace)
+
+ entity = next(entities)
+ self.assertIsInstance(entity, parser.Comment)
+ self.assertEqual(entity.val, '\n')
+ entity = next(entities)
+ self.assertIsInstance(entity, parser.Whitespace)
+
+ entity = next(entities)
+ self.assertIsInstance(entity, parser.Comment)
+ self.assertEqual(entity.val, ' last line ')
+ entity = next(entities)
+ self.assertIsInstance(entity, parser.Whitespace)
+
+ def test_pre_comment(self):
+ self.parser.readContents(b'''\
+<!-- comment -->
+<!ENTITY one "string">
+
+<!-- standalone -->
+
+<!-- glued --><!ENTITY second "string">
+''')
+ entities = self.parser.walk()
+
+ entity = next(entities)
+ self.assertIsInstance(entity.pre_comment, parser.Comment)
+ self.assertEqual(entity.pre_comment.val, ' comment ')
+ entity = next(entities)
+ self.assertIsInstance(entity, parser.Whitespace)
+
+ entity = next(entities)
+ self.assertIsInstance(entity, parser.Comment)
+ self.assertEqual(entity.val, ' standalone ')
+ entity = next(entities)
+ self.assertIsInstance(entity, parser.Whitespace)
+
+ entity = next(entities)
+ self.assertIsInstance(entity.pre_comment, parser.Comment)
+ self.assertEqual(entity.pre_comment.val, ' glued ')
+ entity = next(entities)
+ self.assertIsInstance(entity, parser.Whitespace)
+ with self.assertRaises(StopIteration):
+ next(entities)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/python/compare-locales/compare_locales/tests/fluent/__init__.py b/third_party/python/compare-locales/compare_locales/tests/fluent/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/fluent/__init__.py
diff --git a/third_party/python/compare-locales/compare_locales/tests/fluent/test_checks.py b/third_party/python/compare-locales/compare_locales/tests/fluent/test_checks.py
new file mode 100644
index 0000000000..5a906d2a8d
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/fluent/test_checks.py
@@ -0,0 +1,581 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import textwrap
+import unittest
+
+from compare_locales.tests import BaseHelper
+from compare_locales.paths import File
+
+
+def dedent_ftl(text):
+ return textwrap.dedent(text.rstrip() + "\n").encode("utf-8")
+
+
+REFERENCE = b'''\
+simple = value
+term_ref = some { -term }
+ .attr = is simple
+msg-attr-ref = some {button.label}
+mixed-attr = value
+ .and = attribute
+only-attr =
+ .one = exists
+-term = value need
+ .attrs = can differ
+'''
+
+
+class TestFluent(BaseHelper):
+ file = File('foo.ftl', 'foo.ftl')
+ refContent = REFERENCE
+
+ def test_simple(self):
+ self._test(b'''simple = localized''',
+ tuple())
+
+
+class TestMessage(BaseHelper):
+ file = File('foo.ftl', 'foo.ftl')
+ refContent = REFERENCE
+
+ def test_excess_attribute(self):
+ self._test(
+ dedent_ftl('''\
+ simple = value with
+ .obsolete = attribute
+ '''),
+ (
+ (
+ 'error', 24,
+ 'Obsolete attribute: obsolete', 'fluent'
+ ),
+ )
+ )
+
+ def test_duplicate_attribute(self):
+ self._test(
+ dedent_ftl('''\
+ only-attr =
+ .one = attribute
+ .one = again
+ .one = three times
+ '''),
+ (
+ (
+ 'warning', 16,
+ 'Attribute "one" is duplicated', 'fluent'
+ ),
+ (
+ 'warning', 37,
+ 'Attribute "one" is duplicated', 'fluent'
+ ),
+ (
+ 'warning', 54,
+ 'Attribute "one" is duplicated', 'fluent'
+ ),
+ )
+ )
+
+ def test_only_attributes(self):
+ self._test(
+ dedent_ftl('''\
+ only-attr = obsolete value
+ '''),
+ (
+ (
+ 'error', 0,
+ 'Missing attribute: one', 'fluent'
+ ),
+ (
+ 'error', 12,
+ 'Obsolete value', 'fluent'
+ ),
+ )
+ )
+
+ def test_missing_value(self):
+ self._test(
+ dedent_ftl('''\
+ mixed-attr =
+ .and = attribute exists
+ '''),
+ (
+ (
+ 'error', 0,
+ 'Missing value', 'fluent'
+ ),
+ )
+ )
+
+ def test_bad_encoding(self):
+ self._test(
+ 'simple = touché'.encode('latin-1'),
+ (
+ (
+ "warning",
+ 14,
+ "\ufffd in: simple",
+ "encodings"
+ ),
+ )
+ )
+
+
+class TestTerm(BaseHelper):
+ file = File('foo.ftl', 'foo.ftl')
+ refContent = REFERENCE
+
+ def test_mismatching_attribute(self):
+ self._test(
+ dedent_ftl('''\
+ -term = value with
+ .different = attribute
+ '''),
+ tuple()
+ )
+
+ def test_duplicate_attribute(self):
+ self._test(
+ dedent_ftl('''\
+ -term = need value
+ .one = attribute
+ .one = again
+ .one = three times
+ '''),
+ (
+ (
+ 'warning', 23,
+ 'Attribute "one" is duplicated', 'fluent'
+ ),
+ (
+ 'warning', 44,
+ 'Attribute "one" is duplicated', 'fluent'
+ ),
+ (
+ 'warning', 61,
+ 'Attribute "one" is duplicated', 'fluent'
+ ),
+ )
+ )
+
+
+class TestMessageReference(BaseHelper):
+ file = File('foo.ftl', 'foo.ftl')
+ refContent = REFERENCE
+
+ def test_msg_attr(self):
+ self._test(
+ b'''msg-attr-ref = Nice {button.label}''',
+ tuple()
+ )
+ self._test(
+ b'''msg-attr-ref = not at all''',
+ (
+ (
+ 'warning', 0,
+ 'Missing message reference: button.label', 'fluent'
+ ),
+ )
+ )
+ self._test(
+ b'''msg-attr-ref = {button} is not a label''',
+ (
+ (
+ 'warning', 0,
+ 'Missing message reference: button.label', 'fluent'
+ ),
+ (
+ 'warning', 16,
+ 'Obsolete message reference: button', 'fluent'
+ ),
+ )
+ )
+ self._test(
+ b'''msg-attr-ref = {button.tooltip} is not a label''',
+ (
+ (
+ 'warning', 0,
+ 'Missing message reference: button.label', 'fluent'
+ ),
+ (
+ 'warning', 16,
+ 'Obsolete message reference: button.tooltip', 'fluent'
+ ),
+ )
+ )
+
+
+class TestTermReference(BaseHelper):
+ file = File('foo.ftl', 'foo.ftl')
+ refContent = REFERENCE
+
+ def test_good_term_ref(self):
+ self._test(
+ dedent_ftl('''\
+ term_ref = localized to {-term}
+ .attr = is plain
+ '''),
+ tuple()
+ )
+
+ def test_missing_term_ref(self):
+ self._test(
+ dedent_ftl('''\
+ term_ref = localized
+ .attr = should not refer to {-term}
+ '''),
+ (
+ (
+ 'warning', 0,
+ 'Missing term reference: -term', 'fluent'
+ ),
+ (
+ 'warning', 54,
+ 'Obsolete term reference: -term', 'fluent'
+ ),
+ )
+ )
+
+ def test_l10n_only_term_ref(self):
+ self._test(
+ b'''simple = localized with { -term }''',
+ (
+ (
+ u'warning', 26,
+ u'Obsolete term reference: -term', u'fluent'
+ ),
+ )
+ )
+
+ def test_term_attr(self):
+ self._test(
+ dedent_ftl('''\
+ term_ref = Depends on { -term.prop ->
+ *[some] Term prop, doesn't reference the term value, though.
+ }
+ .attr = still simple
+ '''),
+ (
+ (
+ u'warning', 0,
+ u'Missing term reference: -term', u'fluent'
+ ),
+ )
+ )
+
+
+class SelectExpressionTest(BaseHelper):
+ file = File('foo.ftl', 'foo.ftl')
+ refContent = b'''\
+msg = { $val ->
+ *[other] Show something
+ }
+-term = Foopy
+'''
+
+ def test_no_select(self):
+ self._test(
+ b'''msg = Something''',
+ tuple()
+ )
+
+ def test_good(self):
+ self._test(
+ dedent_ftl('''\
+ msg = { $val ->
+ *[one] one
+ [other] other
+ }
+ '''),
+ tuple()
+ )
+
+ def test_duplicate_variant(self):
+ self._test(
+ dedent_ftl('''\
+ msg = { $val ->
+ *[one] one
+ [one] other
+ }
+ '''),
+ (
+ (
+ 'warning', 19,
+ 'Variant key "one" is duplicated', 'fluent'
+ ),
+ (
+ 'warning', 31,
+ 'Variant key "one" is duplicated', 'fluent'
+ ),
+ )
+ )
+
+ def test_term_value(self):
+ self._test(
+ dedent_ftl('''\
+ -term = { PLATFORM() ->
+ *[one] one
+ [two] two
+ [two] duplicate
+ }
+ '''),
+ (
+ (
+ 'warning', 39,
+ 'Variant key "two" is duplicated', 'fluent'
+ ),
+ (
+ 'warning', 51,
+ 'Variant key "two" is duplicated', 'fluent'
+ ),
+ )
+ )
+
+ def test_term_attribute(self):
+ self._test(
+ dedent_ftl('''\
+ -term = boring value
+ .attr = { PLATFORM() ->
+ *[one] one
+ [two] two
+ [two] duplicate
+ [two] three
+ }
+ '''),
+ (
+ (
+ 'warning', 66,
+ 'Variant key "two" is duplicated', 'fluent'
+ ),
+ (
+ 'warning', 80,
+ 'Variant key "two" is duplicated', 'fluent'
+ ),
+ (
+ 'warning', 100,
+ 'Variant key "two" is duplicated', 'fluent'
+ ),
+ )
+ )
+
+
+class PluralTest(BaseHelper):
+ file = File('foo.ftl', 'foo.ftl')
+ refContent = b'''\
+msg = { $val ->
+ *[other] Show something
+ }
+'''
+
+ def test_missing_plural(self):
+ self.file.locale = 'ru'
+ self._test(
+ dedent_ftl('''\
+ msg = { $val ->
+ [one] thing
+ [3] is ok
+ *[many] stuff
+ }
+ '''),
+ (
+ (
+ 'warning', 19,
+ 'Plural categories missing: few', 'fluent'
+ ),
+ )
+ )
+
+ def test_ignoring_other(self):
+ self.file.locale = 'de'
+ self._test(
+ dedent_ftl('''\
+ msg = { $val ->
+ [1] thing
+ *[other] stuff
+ }
+ '''),
+ tuple()
+ )
+
+
+class CSSStyleTest(BaseHelper):
+ file = File('foo.ftl', 'foo.ftl')
+ refContent = b'''\
+simple =
+ .style = width:1px
+select =
+ .style = {PLATFORM() ->
+ [windows] width:1px
+ *[unix] max-width:1px
+ }
+ref =
+ .style = {simple.style}
+broken =
+ .style = 28em
+'''
+
+ def test_simple(self):
+ self._test(dedent_ftl(
+ '''\
+ simple =
+ .style = width:2px
+ '''),
+ tuple())
+ self._test(dedent_ftl(
+ '''\
+ simple =
+ .style = max-width:2px
+ '''),
+ (
+ (
+ 'warning', 0,
+ 'width only in reference, max-width only in l10n', 'fluent'
+ ),
+ ))
+ self._test(dedent_ftl(
+ '''\
+ simple =
+ .style = stuff
+ '''),
+ (
+ (
+ 'error', 0,
+ 'reference is a CSS spec', 'fluent'
+ ),
+ ))
+ # Cover the current limitations of only plain strings
+ self._test(dedent_ftl(
+ '''\
+ simple =
+ .style = {"width:3px"}
+ '''),
+ tuple())
+
+ def test_select(self):
+ self._test(dedent_ftl(
+ '''\
+ select =
+ .style = width:2px
+ '''),
+ (
+ (
+ 'warning', 0,
+ 'width only in l10n', 'fluent'
+ ),
+ ))
+ self._test(dedent_ftl(
+ '''\
+ select =
+ .style = max-width:2px
+ '''),
+ (
+ (
+ 'warning', 0,
+ 'max-width only in l10n', 'fluent'
+ ),
+ ))
+ self._test(dedent_ftl(
+ '''\
+ select =
+ .style = stuff
+ '''),
+ (
+ (
+ 'error', 0,
+ 'reference is a CSS spec', 'fluent'
+ ),
+ ))
+ # Cover the current limitations of only plain strings
+ self._test(dedent_ftl(
+ '''\
+ select =
+ .style = {"width:1px"}
+ '''),
+ tuple())
+
+ def test_ref(self):
+ self._test(dedent_ftl(
+ '''\
+ ref =
+ .style = width:2px
+ '''),
+ (
+ (
+ 'warning', 0,
+ 'width only in l10n', 'fluent'
+ ),
+ (
+ 'warning', 0,
+ 'Missing message reference: simple.style', 'fluent'
+ ),
+ ))
+ self._test(dedent_ftl(
+ '''\
+ ref =
+ .style = max-width:2px
+ '''),
+ (
+ (
+ 'warning', 0,
+ 'max-width only in l10n', 'fluent'
+ ),
+ (
+ 'warning', 0,
+ 'Missing message reference: simple.style', 'fluent'
+ ),
+ ))
+ self._test(dedent_ftl(
+ '''\
+ ref =
+ .style = stuff
+ '''),
+ (
+ (
+ 'error', 0,
+ 'reference is a CSS spec', 'fluent'
+ ),
+ (
+ 'warning', 0,
+ 'Missing message reference: simple.style', 'fluent'
+ ),
+ ))
+ # Cover the current limitations of only plain strings
+ self._test(dedent_ftl(
+ '''\
+ ref =
+ .style = {"width:1px"}
+ '''),
+ (
+ (
+ 'warning', 0,
+ 'Missing message reference: simple.style', 'fluent'
+ ),
+ ))
+
+ def test_broken(self):
+ self._test(dedent_ftl(
+ '''\
+ broken =
+ .style = 27em
+ '''),
+ (('error', 0, 'reference is a CSS spec', 'fluent'),))
+ self._test(dedent_ftl(
+ '''\
+ broken =
+ .style = width: 27em
+ '''),
+ (
+ (
+ 'warning', 0,
+ 'width only in l10n', 'fluent'
+ ),
+ ))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/python/compare-locales/compare_locales/tests/fluent/test_merge.py b/third_party/python/compare-locales/compare_locales/tests/fluent/test_merge.py
new file mode 100644
index 0000000000..41e69eca3e
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/fluent/test_merge.py
@@ -0,0 +1,283 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+from compare_locales.merge import merge_channels
+
+
+class TestMergeFluent(unittest.TestCase):
+ name = "foo.ftl"
+
+ def test_no_changes(self):
+ channels = (b"""
+foo = Foo 1
+""", b"""
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+""")
+
+ def test_attribute_in_first(self):
+ channels = (b"""
+foo = Foo 1
+ .attr = Attr 1
+""", b"""
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+ .attr = Attr 1
+""")
+
+ def test_attribute_in_last(self):
+ channels = (b"""
+foo = Foo 1
+""", b"""
+foo = Foo 2
+ .attr = Attr 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+""")
+
+ def test_junk_in_first(self):
+ channels = (b"""\
+line of junk
+""", b"""\
+one = entry
+""")
+ self.assertMultiLineEqual(
+ merge_channels(self.name, channels).decode('utf-8'),
+ """\
+one = entry
+line of junk
+"""
+ )
+
+ def test_junk_in_last(self):
+ channels = (b"""\
+one = entry
+""", b"""\
+line of junk
+""")
+ self.assertMultiLineEqual(
+ merge_channels(self.name, channels).decode('utf-8'),
+ """\
+line of junk
+one = entry
+"""
+ )
+
+ def test_attribute_changed(self):
+ channels = (b"""
+foo = Foo 1
+ .attr = Attr 1
+""", b"""
+foo = Foo 2
+ .attr = Attr 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+ .attr = Attr 1
+""")
+
+ def test_group_comment_in_first(self):
+ channels = (b"""
+## Group Comment 1
+foo = Foo 1
+""", b"""
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+## Group Comment 1
+foo = Foo 1
+""")
+
+ def test_group_comment_in_last(self):
+ channels = (b"""
+foo = Foo 1
+""", b"""
+## Group Comment 2
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+## Group Comment 2
+foo = Foo 1
+""")
+
+ def test_group_comment_changed(self):
+ channels = (b"""
+## Group Comment 1
+foo = Foo 1
+""", b"""
+## Group Comment 2
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+## Group Comment 2
+## Group Comment 1
+foo = Foo 1
+""")
+
+ def test_group_comment_and_section(self):
+ channels = (b"""
+## Group Comment
+foo = Foo 1
+""", b"""
+// Section Comment
+[[ Section ]]
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+// Section Comment
+[[ Section ]]
+## Group Comment
+foo = Foo 1
+""")
+
+ def test_message_comment_in_first(self):
+ channels = (b"""
+# Comment 1
+foo = Foo 1
+""", b"""
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+# Comment 1
+foo = Foo 1
+""")
+
+ def test_message_comment_in_last(self):
+ channels = (b"""
+foo = Foo 1
+""", b"""
+# Comment 2
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+""")
+
+ def test_message_comment_changed(self):
+ channels = (b"""
+# Comment 1
+foo = Foo 1
+""", b"""
+# Comment 2
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+# Comment 1
+foo = Foo 1
+""")
+
+ def test_standalone_comment_in_first(self):
+ channels = (b"""
+foo = Foo 1
+
+# Comment 1
+""", b"""
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+
+# Comment 1
+""")
+
+ def test_standalone_comment_in_last(self):
+ channels = (b"""
+foo = Foo 1
+""", b"""
+foo = Foo 2
+
+# Comment 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+
+# Comment 2
+""")
+
+ def test_standalone_comment_changed(self):
+ channels = (b"""
+foo = Foo 1
+
+# Comment 1
+""", b"""
+foo = Foo 2
+
+# Comment 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+
+# Comment 2
+
+# Comment 1
+""")
+
+ def test_resource_comment_in_first(self):
+ channels = (b"""
+### Resource Comment 1
+
+foo = Foo 1
+""", b"""
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+### Resource Comment 1
+
+foo = Foo 1
+""")
+
+ def test_resource_comment_in_last(self):
+ channels = (b"""
+foo = Foo 1
+""", b"""
+### Resource Comment 1
+
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+### Resource Comment 1
+
+foo = Foo 1
+""")
+
+ def test_resource_comment_changed(self):
+ channels = (b"""
+### Resource Comment 1
+
+foo = Foo 1
+""", b"""
+### Resource Comment 2
+
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+### Resource Comment 2
+
+### Resource Comment 1
+
+foo = Foo 1
+""")
diff --git a/third_party/python/compare-locales/compare_locales/tests/fluent/test_parser.py b/third_party/python/compare-locales/compare_locales/tests/fluent/test_parser.py
new file mode 100644
index 0000000000..db767fd5e2
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/fluent/test_parser.py
@@ -0,0 +1,310 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+import unittest
+
+from compare_locales import parser
+from compare_locales.tests import ParserTestMixin
+
+
+class TestFluentParser(ParserTestMixin, unittest.TestCase):
+ maxDiff = None
+ filename = 'foo.ftl'
+
+ def test_equality_same(self):
+ source = b'progress = Progress: { NUMBER($num, style: "percent") }.'
+
+ self.parser.readContents(source)
+ [ent1] = list(self.parser)
+
+ self.parser.readContents(source)
+ [ent2] = list(self.parser)
+
+ self.assertTrue(ent1.equals(ent2))
+ self.assertTrue(ent1.localized)
+
+ def test_equality_different_whitespace(self):
+ source1 = b'foo = { $arg }'
+ source2 = b'foo = { $arg }'
+
+ self.parser.readContents(source1)
+ [ent1] = list(self.parser)
+
+ self.parser.readContents(source2)
+ [ent2] = list(self.parser)
+
+ self.assertTrue(ent1.equals(ent2))
+
+ def test_word_count(self):
+ self.parser.readContents(b'''\
+a = One
+b = One two three
+c = One { $arg } two
+d =
+ One { $arg ->
+ *[x] Two three
+ [y] Four
+ } five.
+e =
+ .attr = One
+f =
+ .attr1 = One
+ .attr2 = Two
+g = One two
+ .attr = Three
+h =
+ One { $arg ->
+ *[x] Two three
+ [y] Four
+ } five.
+ .attr1 =
+ Six { $arg ->
+ *[x] Seven eight
+ [y] Nine
+ } ten.
+-i = One
+ .prop = Do not count
+''')
+
+ a, b, c, d, e, f, g, h, i = list(self.parser)
+ self.assertEqual(a.count_words(), 1)
+ self.assertEqual(b.count_words(), 3)
+ self.assertEqual(c.count_words(), 2)
+ self.assertEqual(d.count_words(), 5)
+ self.assertEqual(e.count_words(), 1)
+ self.assertEqual(f.count_words(), 2)
+ self.assertEqual(g.count_words(), 3)
+ self.assertEqual(h.count_words(), 10)
+ self.assertEqual(i.count_words(), 1)
+
+ def test_simple_message(self):
+ self.parser.readContents(b'a = A')
+
+ [a] = list(self.parser)
+ self.assertEqual(a.key, 'a')
+ self.assertEqual(a.raw_val, 'A')
+ self.assertEqual(a.all, 'a = A')
+ attributes = list(a.attributes)
+ self.assertEqual(len(attributes), 0)
+
+ def test_complex_message(self):
+ self.parser.readContents(b'abc = A { $arg } B { msg } C')
+
+ [abc] = list(self.parser)
+ self.assertEqual(abc.key, 'abc')
+ self.assertEqual(abc.raw_val, 'A { $arg } B { msg } C')
+ self.assertEqual(abc.all, 'abc = A { $arg } B { msg } C')
+
+ def test_multiline_message(self):
+ self.parser.readContents(b'''\
+abc =
+ A
+ B
+ C
+''')
+
+ [abc] = list(self.parser)
+ self.assertEqual(abc.key, 'abc')
+ self.assertEqual(abc.raw_val, ' A\n B\n C')
+ self.assertEqual(abc.all, 'abc =\n A\n B\n C')
+
+ def test_message_with_attribute(self):
+ self.parser.readContents(b'''\
+
+
+abc = ABC
+ .attr = Attr
+''')
+
+ [abc] = list(self.parser)
+ self.assertEqual(abc.key, 'abc')
+ self.assertEqual(abc.raw_val, 'ABC')
+ self.assertEqual(abc.all, 'abc = ABC\n .attr = Attr')
+ self.assertEqual(abc.position(), (3, 1))
+ self.assertEqual(abc.value_position(), (3, 7))
+ attr = list(abc.attributes)[0]
+ self.assertEqual(attr.value_position(), (4, 13))
+
+ def test_message_with_attribute_and_no_value(self):
+ self.parser.readContents(b'''\
+abc =
+ .attr = Attr
+''')
+
+ [abc] = list(self.parser)
+ self.assertEqual(abc.key, 'abc')
+ self.assertEqual(abc.raw_val, None)
+ self.assertEqual(abc.all, 'abc =\n .attr = Attr')
+ attributes = list(abc.attributes)
+ self.assertEqual(len(attributes), 1)
+ attr = attributes[0]
+ self.assertEqual(attr.key, 'attr')
+ self.assertEqual(attr.raw_val, 'Attr')
+ self.assertEqual(abc.value_position(), (1, 4))
+ self.assertEqual(attr.value_position(), (2, 13))
+
+ def test_non_localizable(self):
+ self.parser.readContents(b'''\
+### Resource Comment
+
+foo = Foo
+
+## Group Comment
+
+-bar = Bar
+
+##
+
+# Standalone Comment
+
+# Baz Comment
+baz = Baz
+''')
+ entities = self.parser.walk()
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.FluentComment))
+ self.assertEqual(entity.all, '### Resource Comment')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Whitespace))
+ self.assertEqual(entity.all, '\n\n')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.FluentMessage))
+ self.assertEqual(entity.raw_val, 'Foo')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Whitespace))
+ self.assertEqual(entity.all, '\n\n')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.FluentComment))
+ self.assertEqual(entity.all, '## Group Comment')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Whitespace))
+ self.assertEqual(entity.all, '\n\n')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.FluentTerm))
+ self.assertEqual(entity.raw_val, 'Bar')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Whitespace))
+ self.assertEqual(entity.all, '\n\n')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.FluentComment))
+ self.assertEqual(entity.all, '##')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Whitespace))
+ self.assertEqual(entity.all, '\n\n')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.FluentComment))
+ self.assertEqual(entity.all, '# Standalone Comment')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Whitespace))
+ self.assertEqual(entity.all, '\n\n')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.FluentMessage))
+ self.assertEqual(entity.raw_val, 'Baz')
+ self.assertEqual(entity.entry.comment.content, 'Baz Comment')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Whitespace))
+ self.assertEqual(entity.all, '\n')
+
+ with self.assertRaises(StopIteration):
+ next(entities)
+
+ def test_comments_val(self):
+ self.parser.readContents(b'''\
+// Legacy Comment
+
+### Resource Comment
+
+## Section Comment
+
+# Standalone Comment
+''')
+ entities = self.parser.walk()
+
+ entity = next(entities)
+ # ensure that fluent comments are FluentComments and Comments
+ # Legacy comments (//) are Junk
+ self.assertTrue(isinstance(entity, parser.Junk))
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Whitespace))
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Comment))
+ self.assertEqual(entity.val, 'Resource Comment')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Whitespace))
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Comment))
+ self.assertEqual(entity.val, 'Section Comment')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Whitespace))
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Comment))
+ self.assertEqual(entity.val, 'Standalone Comment')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Whitespace))
+ self.assertEqual(entity.all, '\n')
+
+ with self.assertRaises(StopIteration):
+ next(entities)
+
+ def test_junk(self):
+ self.parser.readUnicode('''\
+# Comment
+
+Line of junk
+
+# Comment
+msg = value
+''')
+ entities = self.parser.walk()
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.FluentComment))
+ self.assertEqual(entity.val, 'Comment')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Whitespace))
+ self.assertEqual(entity.raw_val, '\n\n')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Junk))
+ self.assertEqual(entity.raw_val, 'Line of junk')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Whitespace))
+ self.assertEqual(entity.raw_val, '\n\n')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.FluentEntity))
+ self.assertEqual(entity.raw_val, 'value')
+ self.assertEqual(entity.entry.comment.content, 'Comment')
+
+ entity = next(entities)
+ self.assertTrue(isinstance(entity, parser.Whitespace))
+ self.assertEqual(entity.raw_val, '\n')
+
+ with self.assertRaises(StopIteration):
+ next(entities)
diff --git a/third_party/python/compare-locales/compare_locales/tests/lint/__init__.py b/third_party/python/compare-locales/compare_locales/tests/lint/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/lint/__init__.py
diff --git a/third_party/python/compare-locales/compare_locales/tests/lint/test_linter.py b/third_party/python/compare-locales/compare_locales/tests/lint/test_linter.py
new file mode 100644
index 0000000000..9abdc57c08
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/lint/test_linter.py
@@ -0,0 +1,97 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+import unittest
+
+from compare_locales.lint import linter
+from compare_locales.parser import base as parser
+
+
+class MockChecker(object):
+ def __init__(self, mocked):
+ self.results = mocked
+
+ def check(self, ent, ref):
+ for r in self.results:
+ yield r
+
+
+class EntityTest(unittest.TestCase):
+ def test_junk(self):
+ el = linter.EntityLinter([], None, {})
+ ctx = parser.Parser.Context('foo\nbar\n')
+ ent = parser.Junk(ctx, (4, 7))
+ res = el.handle_junk(ent)
+ self.assertIsNotNone(res)
+ self.assertEqual(res['lineno'], 2)
+ self.assertEqual(res['column'], 1)
+ ent = parser.LiteralEntity('one', 'two', 'one = two')
+ self.assertIsNone(el.handle_junk(ent))
+
+ def test_full_entity(self):
+ ctx = parser.Parser.Context('''\
+one = two
+two = three
+one = four
+''')
+ entities = [
+ parser.Entity(ctx, None, None, (0, 10), (0, 3), (6, 9)),
+ parser.Entity(ctx, None, None, (10, 22), (10, 13), (16, 21)),
+ parser.Entity(ctx, None, None, (22, 33), (22, 25), (28, 32)),
+ ]
+ self.assertEqual(
+ (entities[0].all, entities[0].key, entities[0].val),
+ ('one = two\n', 'one', 'two')
+ )
+ self.assertEqual(
+ (entities[1].all, entities[1].key, entities[1].val),
+ ('two = three\n', 'two', 'three')
+ )
+ self.assertEqual(
+ (entities[2].all, entities[2].key, entities[2].val),
+ ('one = four\n', 'one', 'four')
+ )
+ el = linter.EntityLinter(entities, None, {})
+ results = list(el.lint_full_entity(entities[1]))
+ self.assertListEqual(results, [])
+ results = list(el.lint_full_entity(entities[2]))
+ self.assertEqual(len(results), 1)
+ result = results[0]
+ self.assertEqual(result['level'], 'error')
+ self.assertEqual(result['lineno'], 3)
+ self.assertEqual(result['column'], 1)
+ # finally check for conflict
+ el.reference = {
+ 'two': parser.LiteralEntity('two = other', 'two', 'other')
+ }
+ results = list(el.lint_full_entity(entities[1]))
+ self.assertEqual(len(results), 1)
+ result = results[0]
+ self.assertEqual(result['level'], 'warning')
+ self.assertEqual(result['lineno'], 2)
+ self.assertEqual(result['column'], 1)
+
+ def test_in_value(self):
+ ctx = parser.Parser.Context('''\
+one = two
+''')
+ entities = [
+ parser.Entity(ctx, None, None, (0, 10), (0, 3), (6, 9)),
+ ]
+ self.assertEqual(
+ (entities[0].all, entities[0].key, entities[0].val),
+ ('one = two\n', 'one', 'two')
+ )
+ checker = MockChecker([
+ ('error', 2, 'Incompatible resource types', 'android'),
+ ])
+ el = linter.EntityLinter(entities, checker, {})
+ results = list(el.lint_value(entities[0]))
+ self.assertEqual(len(results), 1)
+ result = results[0]
+ self.assertEqual(result['level'], 'error')
+ self.assertEqual(result['lineno'], 1)
+ self.assertEqual(result['column'], 9)
diff --git a/third_party/python/compare-locales/compare_locales/tests/lint/test_util.py b/third_party/python/compare-locales/compare_locales/tests/lint/test_util.py
new file mode 100644
index 0000000000..2a8d30bf2a
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/lint/test_util.py
@@ -0,0 +1,91 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import unittest
+
+from compare_locales.lint import util
+from compare_locales.paths.project import ProjectConfig
+from compare_locales.paths.files import ProjectFiles
+from compare_locales import mozpath
+
+
+class MirrorReferenceTest(unittest.TestCase):
+ def test_empty(self):
+ files = ProjectFiles(None, [])
+ get_reference_and_tests = util.mirror_reference_and_tests(files, 'tld')
+ ref, tests = get_reference_and_tests('some/path/file.ftl')
+ self.assertIsNone(ref)
+ self.assertIsNone(tests)
+
+ def test_no_tests(self):
+ pc = ProjectConfig(None)
+ pc.add_paths({
+ 'reference': 'some/path/file.ftl',
+ 'l10n': 'some/{locale}/file.ftl',
+ })
+ files = ProjectFiles(None, [pc])
+ get_reference_and_tests = util.mirror_reference_and_tests(files, 'tld')
+ ref, tests = get_reference_and_tests('some/path/file.ftl')
+ self.assertEqual(mozpath.relpath(ref, 'tld'), 'some/path/file.ftl')
+ self.assertEqual(tests, set())
+
+ def test_with_tests(self):
+ pc = ProjectConfig(None)
+ pc.add_paths({
+ 'reference': 'some/path/file.ftl',
+ 'l10n': 'some/{locale}/file.ftl',
+ 'test': ['more_stuff'],
+ })
+ files = ProjectFiles(None, [pc])
+ get_reference_and_tests = util.mirror_reference_and_tests(files, 'tld')
+ ref, tests = get_reference_and_tests('some/path/file.ftl')
+ self.assertEqual(mozpath.relpath(ref, 'tld'), 'some/path/file.ftl')
+ self.assertEqual(tests, {'more_stuff'})
+
+
+class L10nBaseReferenceTest(unittest.TestCase):
+ def test_empty(self):
+ files = ProjectFiles(None, [])
+ get_reference_and_tests = util.l10n_base_reference_and_tests(files)
+ ref, tests = get_reference_and_tests('some/path/file.ftl')
+ self.assertIsNone(ref)
+ self.assertIsNone(tests)
+
+ def test_no_tests(self):
+ pc = ProjectConfig(None)
+ pc.add_environment(l10n_base='l10n_orig')
+ pc.add_paths({
+ 'reference': 'some/path/file.ftl',
+ 'l10n': '{l10n_base}/{locale}/some/file.ftl',
+ })
+ pc.set_locales(['gecko'], deep=True)
+ files = ProjectFiles('gecko', [pc])
+ get_reference_and_tests = util.l10n_base_reference_and_tests(files)
+ ref, tests = get_reference_and_tests('some/path/file.ftl')
+ self.assertEqual(
+ mozpath.relpath(ref, 'l10n_orig/gecko'),
+ 'some/file.ftl'
+ )
+ self.assertEqual(tests, set())
+
+ def test_with_tests(self):
+ pc = ProjectConfig(None)
+ pc.add_environment(l10n_base='l10n_orig')
+ pc.add_paths({
+ 'reference': 'some/path/file.ftl',
+ 'l10n': '{l10n_base}/{locale}/some/file.ftl',
+ 'test': ['more_stuff'],
+ })
+ pc.set_locales(['gecko'], deep=True)
+ files = ProjectFiles('gecko', [pc])
+ get_reference_and_tests = util.l10n_base_reference_and_tests(files)
+ ref, tests = get_reference_and_tests('some/path/file.ftl')
+ self.assertEqual(
+ mozpath.relpath(ref, 'l10n_orig/gecko'),
+ 'some/file.ftl'
+ )
+ self.assertEqual(tests, {'more_stuff'})
diff --git a/third_party/python/compare-locales/compare_locales/tests/merge/__init__.py b/third_party/python/compare-locales/compare_locales/tests/merge/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/merge/__init__.py
diff --git a/third_party/python/compare-locales/compare_locales/tests/merge/test_comments.py b/third_party/python/compare-locales/compare_locales/tests/merge/test_comments.py
new file mode 100644
index 0000000000..71241c8768
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/merge/test_comments.py
@@ -0,0 +1,188 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+from compare_locales.merge import merge_channels
+
+
+class TestMergeComments(unittest.TestCase):
+ name = "foo.properties"
+
+ def test_comment_added_in_first(self):
+ channels = (b"""
+foo = Foo 1
+# Bar Comment 1
+bar = Bar 1
+""", b"""
+foo = Foo 2
+bar = Bar 2
+""")
+ self.assertMultiLineEqual(
+ merge_channels(self.name, channels).decode("utf-8"), """
+foo = Foo 1
+# Bar Comment 1
+bar = Bar 1
+""")
+
+ def test_comment_still_in_last(self):
+ channels = (b"""
+foo = Foo 1
+bar = Bar 1
+""", b"""
+foo = Foo 2
+# Bar Comment 2
+bar = Bar 2
+""")
+ self.assertMultiLineEqual(
+ merge_channels(self.name, channels).decode("utf-8"), """
+foo = Foo 1
+bar = Bar 1
+""")
+
+ def test_comment_changed(self):
+ channels = (b"""
+foo = Foo 1
+# Bar Comment 1
+bar = Bar 1
+""", b"""
+foo = Foo 2
+# Bar Comment 2
+bar = Bar 2
+""")
+ self.assertMultiLineEqual(
+ merge_channels(self.name, channels).decode("utf-8"), """
+foo = Foo 1
+# Bar Comment 1
+bar = Bar 1
+""")
+
+
+class TestMergeStandaloneComments(unittest.TestCase):
+ name = "foo.properties"
+
+ def test_comment_added_in_first(self):
+ channels = (b"""
+# Standalone Comment 1
+
+# Foo Comment 1
+foo = Foo 1
+""", b"""
+# Foo Comment 2
+foo = Foo 2
+""")
+ self.assertMultiLineEqual(
+ merge_channels(self.name, channels).decode("utf-8"), """
+# Standalone Comment 1
+
+# Foo Comment 1
+foo = Foo 1
+""")
+
+ def test_comment_still_in_last(self):
+ channels = (b"""
+# Foo Comment 1
+foo = Foo 1
+""", b"""
+# Standalone Comment 2
+
+# Foo Comment 2
+foo = Foo 2
+""")
+ self.assertMultiLineEqual(
+ merge_channels(self.name, channels).decode("utf-8"), """
+# Standalone Comment 2
+
+# Foo Comment 1
+foo = Foo 1
+""")
+
+ def test_comments_in_both(self):
+ channels = (b"""
+# Standalone Comment 1
+
+# Foo Comment 1
+foo = Foo 1
+""", b"""
+# Standalone Comment 2
+
+# Foo Comment 2
+foo = Foo 2
+""")
+ self.assertMultiLineEqual(
+ merge_channels(self.name, channels).decode("utf-8"), """
+# Standalone Comment 2
+
+# Standalone Comment 1
+
+# Foo Comment 1
+foo = Foo 1
+""")
+
+ def test_identical_comments_in_both(self):
+ channels = (b"""
+# Standalone Comment
+
+# Foo Comment 1
+foo = Foo 1
+""", b"""
+# Standalone Comment
+
+# Foo Comment 2
+foo = Foo 2
+""")
+ self.assertMultiLineEqual(
+ merge_channels(self.name, channels).decode("utf-8"), """
+# Standalone Comment
+
+# Foo Comment 1
+foo = Foo 1
+""")
+
+ def test_standalone_which_is_attached_in_first(self):
+ channels = (b"""
+# Ambiguous Comment
+foo = Foo 1
+
+# Bar Comment 1
+bar = Bar 1
+""", b"""
+# Ambiguous Comment
+
+# Bar Comment 2
+bar = Bar 2
+""")
+ self.assertMultiLineEqual(
+ merge_channels(self.name, channels).decode("utf-8"), """
+# Ambiguous Comment
+
+# Ambiguous Comment
+foo = Foo 1
+
+# Bar Comment 1
+bar = Bar 1
+""")
+
+ def test_standalone_which_is_attached_in_second(self):
+ channels = (b"""
+# Ambiguous Comment
+
+# Bar Comment 1
+bar = Bar 1
+""", b"""
+# Ambiguous Comment
+foo = Foo 1
+
+# Bar Comment 2
+bar = Bar 2
+""")
+ self.assertMultiLineEqual(
+ merge_channels(self.name, channels).decode("utf-8"), """
+# Ambiguous Comment
+foo = Foo 1
+
+# Ambiguous Comment
+
+# Bar Comment 1
+bar = Bar 1
+""")
diff --git a/third_party/python/compare-locales/compare_locales/tests/merge/test_messages.py b/third_party/python/compare-locales/compare_locales/tests/merge/test_messages.py
new file mode 100644
index 0000000000..664bbd16c5
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/merge/test_messages.py
@@ -0,0 +1,93 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+from compare_locales.merge import merge_channels
+
+
+class TestMergeTwo(unittest.TestCase):
+ name = "foo.properties"
+
+ def test_no_changes(self):
+ channels = (b"""
+foo = Foo 1
+""", b"""
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+""")
+
+ def test_message_added_in_first(self):
+ channels = (b"""
+foo = Foo 1
+bar = Bar 1
+""", b"""
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+bar = Bar 1
+""")
+
+ def test_message_still_in_last(self):
+ channels = (b"""
+foo = Foo 1
+""", b"""
+foo = Foo 2
+bar = Bar 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+bar = Bar 2
+""")
+
+ def test_message_reordered(self):
+ channels = (b"""
+foo = Foo 1
+bar = Bar 1
+""", b"""
+bar = Bar 2
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+bar = Bar 1
+""")
+
+
+class TestMergeThree(unittest.TestCase):
+ name = "foo.properties"
+
+ def test_no_changes(self):
+ channels = (b"""
+foo = Foo 1
+""", b"""
+foo = Foo 2
+""", b"""
+foo = Foo 3
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+""")
+
+ def test_message_still_in_last(self):
+ channels = (b"""
+foo = Foo 1
+""", b"""
+foo = Foo 2
+""", b"""
+foo = Foo 3
+bar = Bar 3
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+bar = Bar 3
+""")
diff --git a/third_party/python/compare-locales/compare_locales/tests/merge/test_unknown.py b/third_party/python/compare-locales/compare_locales/tests/merge/test_unknown.py
new file mode 100644
index 0000000000..ce74e1a10b
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/merge/test_unknown.py
@@ -0,0 +1,22 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+import six
+
+from compare_locales.merge import merge_channels, MergeNotSupportedError
+
+
+class TestMergeUnknown(unittest.TestCase):
+ name = "foo.unknown"
+
+ def test_not_supported_error(self):
+ channels = (b"""
+foo = Foo 1
+""", b"""
+foo = Foo 2
+""")
+ pattern = r"Unsupported file format \(foo\.unknown\)\."
+ with six.assertRaisesRegex(self, MergeNotSupportedError, pattern):
+ merge_channels(self.name, channels)
diff --git a/third_party/python/compare-locales/compare_locales/tests/merge/test_whitespace.py b/third_party/python/compare-locales/compare_locales/tests/merge/test_whitespace.py
new file mode 100644
index 0000000000..adaedc70d1
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/merge/test_whitespace.py
@@ -0,0 +1,76 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+from compare_locales.merge import merge_channels
+
+
+class TestMergeWhitespace(unittest.TestCase):
+ name = "foo.properties"
+
+ def test_trailing_spaces(self):
+ channels = (b"""
+foo = Foo 1
+ """, b"""
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+ """)
+
+ def test_blank_lines_between_messages(self):
+ channels = (b"""
+foo = Foo 1
+
+bar = Bar 1
+""", b"""
+foo = Foo 2
+bar = Bar 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+
+bar = Bar 1
+""")
+
+ def test_no_eol(self):
+ channels = (b"""
+foo = Foo 1""", b"""
+foo = Foo 2
+bar = Bar 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+bar = Bar 2
+""")
+
+ def test_still_in_last_with_blank(self):
+ channels = (b"""
+
+foo = Foo 1
+
+baz = Baz 1
+
+""", b"""
+
+foo = Foo 2
+
+bar = Bar 2
+
+baz = Baz 2
+
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+
+foo = Foo 1
+
+bar = Bar 2
+
+baz = Baz 1
+
+""")
diff --git a/third_party/python/compare-locales/compare_locales/tests/paths/__init__.py b/third_party/python/compare-locales/compare_locales/tests/paths/__init__.py
new file mode 100644
index 0000000000..1a99c53e2f
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/paths/__init__.py
@@ -0,0 +1,132 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+from collections import defaultdict
+import six
+import tempfile
+from compare_locales.paths import (
+ ProjectConfig, File, ProjectFiles, TOMLParser
+)
+from compare_locales import mozpath
+import pytoml as toml
+
+
+class Rooted(object):
+ def setUp(self):
+ # Use tempdir as self.root, that's absolute on all platforms
+ self.root = mozpath.normpath(tempfile.gettempdir())
+
+ def path(self, leaf=''):
+ return self.root + leaf
+
+ def leaf(self, path):
+ return mozpath.relpath(path, self.root)
+
+
+class SetupMixin(object):
+ def setUp(self):
+ self.cfg = ProjectConfig(None)
+ self.file = File(
+ '/tmp/somedir/de/browser/one/two/file.ftl',
+ 'file.ftl',
+ module='browser', locale='de')
+ self.other_file = File(
+ '/tmp/somedir/de/toolkit/two/one/file.ftl',
+ 'file.ftl',
+ module='toolkit', locale='de')
+ self.cfg.set_locales(['de'])
+
+
+class MockOS(object):
+ '''Mock `os.path.isfile` and `os.walk` based on a list of files.
+ '''
+ def __init__(self, root, paths):
+ self.root = root
+ self.files = []
+ self.dirs = {}
+ if not paths:
+ return
+ if isinstance(paths[0], six.string_types):
+ paths = [
+ mozpath.split(path)
+ for path in sorted(paths)
+ ]
+ child_paths = defaultdict(list)
+ for segs in paths:
+ if len(segs) == 1:
+ self.files.append(segs[0])
+ else:
+ child_paths[segs[0]].append(segs[1:])
+ for root, leafs in child_paths.items():
+ self.dirs[root] = MockOS(mozpath.join(self.root, root), leafs)
+
+ def find(self, dir_path):
+ relpath = mozpath.relpath(dir_path, self.root)
+ if relpath.startswith('..'):
+ return None
+ if relpath in ('', '.'):
+ return self
+ segs = mozpath.split(relpath)
+ node = self
+ while segs:
+ seg = segs.pop(0)
+ if seg not in node.dirs:
+ return None
+ node = node.dirs[seg]
+ return node
+
+ def isfile(self, path):
+ dirname = mozpath.dirname(path)
+ if dirname:
+ node = self.find(dirname)
+ else:
+ node = self
+ return node and mozpath.basename(path) in node.files
+
+ def walk(self, path=None):
+ if path is None:
+ node = self
+ else:
+ node = self.find(path)
+ if node is None:
+ return
+ subdirs = sorted(node.dirs)
+ if node.root is not None:
+ yield node.root, subdirs, node.files
+ for subdir in subdirs:
+ child = node.dirs[subdir]
+ for tpl in child.walk():
+ yield tpl
+
+
+class MockProjectFiles(ProjectFiles):
+ def __init__(self, mocks, locale, projects, mergebase=None):
+ (super(MockProjectFiles, self)
+ .__init__(locale, projects, mergebase=mergebase))
+ root = mozpath.commonprefix(mocks)
+ files = [mozpath.relpath(f, root) for f in mocks]
+ self.mocks = MockOS(root, files)
+
+ def _isfile(self, path):
+ return self.mocks.isfile(path)
+
+ def _walk(self, base):
+ base = mozpath.normpath(base)
+ root = self.mocks.find(base)
+ if not root:
+ return
+ for tpl in root.walk():
+ yield tpl
+
+
+class MockTOMLParser(TOMLParser):
+ def __init__(self, mock_data):
+ self.mock_data = mock_data
+
+ def load(self, ctx):
+ p = mozpath.basename(ctx.path)
+ ctx.data = toml.loads(self.mock_data[p])
diff --git a/third_party/python/compare-locales/compare_locales/tests/paths/test_configparser.py b/third_party/python/compare-locales/compare_locales/tests/paths/test_configparser.py
new file mode 100644
index 0000000000..fe9d7dcf6e
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/paths/test_configparser.py
@@ -0,0 +1,126 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+import unittest
+import six
+
+from . import MockTOMLParser
+from compare_locales.paths.matcher import Matcher
+from compare_locales.paths.project import ProjectConfig, ExcludeError
+from compare_locales import mozpath
+
+
+class TestConfigParser(unittest.TestCase):
+ def test_includes(self):
+ parser = MockTOMLParser({
+ "root.toml": """
+basepath = "."
+[env]
+ o = "toolkit"
+[[includes]]
+ path = "{o}/other.toml"
+[[includes]]
+ path = "dom/more.toml"
+""",
+ "other.toml": """
+basepath = "."
+""",
+ "more.toml": """
+basepath = "."
+"""
+ })
+ config = parser.parse("root.toml")
+ self.assertIsInstance(config, ProjectConfig)
+ configs = list(config.configs)
+ self.assertEqual(configs[0], config)
+ self.assertListEqual(
+ [c.path for c in configs],
+ [
+ "root.toml",
+ mozpath.abspath("toolkit/other.toml"),
+ mozpath.abspath("dom/more.toml"),
+ ]
+ )
+
+ def test_excludes(self):
+ parser = MockTOMLParser({
+ "root.toml": """
+basepath = "."
+[[excludes]]
+ path = "exclude.toml"
+[[excludes]]
+ path = "other-exclude.toml"
+ """,
+ "exclude.toml": """
+basepath = "."
+""",
+ "other-exclude.toml": """
+basepath = "."
+""",
+ "grandparent.toml": """
+basepath = "."
+[[includes]]
+ path = "root.toml"
+""",
+ "wrapped.toml": """
+basepath = "."
+[[excludes]]
+ path = "root.toml"
+ """
+ })
+ config = parser.parse("root.toml")
+ self.assertIsInstance(config, ProjectConfig)
+ configs = list(config.configs)
+ self.assertListEqual(configs, [config])
+ self.assertEqual(
+ [c.path for c in config.excludes],
+ [
+ mozpath.abspath("exclude.toml"),
+ mozpath.abspath("other-exclude.toml"),
+ ]
+ )
+ with six.assertRaisesRegex(self, ExcludeError, 'Included configs'):
+ parser.parse("grandparent.toml")
+ with six.assertRaisesRegex(self, ExcludeError, 'Excluded configs'):
+ parser.parse("wrapped.toml")
+
+ def test_paths(self):
+ parser = MockTOMLParser({
+ "l10n.toml": """
+[[paths]]
+ l10n = "some/{locale}/*"
+""",
+ "ref.toml": """
+[[paths]]
+ reference = "ref/l10n/*"
+ l10n = "some/{locale}/*"
+""",
+ "tests.toml": """
+[[paths]]
+ l10n = "some/{locale}/*"
+ test = [
+ "run_this",
+ ]
+""",
+ })
+
+ paths = parser.parse("l10n.toml").paths
+ self.assertIn("l10n", paths[0])
+ self.assertIsInstance(paths[0]["l10n"], Matcher)
+ self.assertNotIn("reference", paths[0])
+ self.assertNotIn("test", paths[0])
+ paths = parser.parse("ref.toml").paths
+ self.assertIn("l10n", paths[0])
+ self.assertIsInstance(paths[0]["l10n"], Matcher)
+ self.assertIn("reference", paths[0])
+ self.assertIsInstance(paths[0]["reference"], Matcher)
+ self.assertNotIn("test", paths[0])
+ paths = parser.parse("tests.toml").paths
+ self.assertIn("l10n", paths[0])
+ self.assertIsInstance(paths[0]["l10n"], Matcher)
+ self.assertNotIn("reference", paths[0])
+ self.assertIn("test", paths[0])
+ self.assertListEqual(paths[0]["test"], ["run_this"])
diff --git a/third_party/python/compare-locales/compare_locales/tests/paths/test_files.py b/third_party/python/compare-locales/compare_locales/tests/paths/test_files.py
new file mode 100644
index 0000000000..997d7d2ffc
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/paths/test_files.py
@@ -0,0 +1,572 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+import unittest
+import mock
+
+from compare_locales.paths import (
+ File,
+ ProjectConfig,
+ ProjectFiles,
+)
+from . import (
+ MockOS,
+ MockProjectFiles,
+ MockTOMLParser,
+ Rooted,
+)
+
+
+class TestMockOS(Rooted, unittest.TestCase):
+ def setUp(self):
+ self.node = MockOS('jazz', [
+ 'one/bit',
+ 'two/deep/in/directories/with/file1',
+ 'two/deep/in/directories/with/file2',
+ 'three/feet',
+ ])
+
+ def test_isfile(self):
+ self.assertTrue(self.node.isfile('jazz/one/bit'))
+ self.assertFalse(self.node.isfile('jazz/one'))
+ self.assertFalse(self.node.isfile('foo'))
+
+ def test_walk(self):
+ self.assertListEqual(
+ list(self.node.walk()),
+ [
+ ('jazz', ['one', 'three', 'two'], []),
+ ('jazz/one', [], ['bit']),
+ ('jazz/three', [], ['feet']),
+ ('jazz/two', ['deep'], []),
+ ('jazz/two/deep', ['in'], []),
+ ('jazz/two/deep/in', ['directories'], []),
+ ('jazz/two/deep/in/directories', ['with'], []),
+ ('jazz/two/deep/in/directories/with', [], [
+ 'file1',
+ 'file2',
+ ]),
+ ]
+ )
+
+ def test_find(self):
+ self.assertIsNone(self.node.find('foo'))
+ self.assertIsNone(self.node.find('jazz/one/bit'))
+ self.assertIsNone(self.node.find('jazz/one/bit/too/much'))
+ self.assertIsNotNone(self.node.find('jazz/one'))
+ self.assertListEqual(list(self.node.find('jazz/one').walk()), [
+ ('jazz/one', [], ['bit']),
+ ])
+ self.assertEqual(self.node.find('jazz'), self.node)
+
+
+class TestProjectPaths(Rooted, unittest.TestCase):
+ def test_l10n_path(self):
+ cfg = ProjectConfig(None)
+ cfg.add_environment(l10n_base=self.root)
+ cfg.set_locales(['de'])
+ cfg.add_paths({
+ 'l10n': '{l10n_base}/{locale}/*'
+ })
+ mocks = [
+ self.path(leaf)
+ for leaf in (
+ '/de/good.ftl',
+ '/de/not/subdir/bad.ftl',
+ '/fr/good.ftl',
+ '/fr/not/subdir/bad.ftl',
+ )
+ ]
+ files = MockProjectFiles(mocks, 'de', [cfg])
+ self.assertListEqual(
+ list(files),
+ [
+ (self.path('/de/good.ftl'), None, None, set())
+ ]
+ )
+ self.assertTupleEqual(
+ files.match(self.path('/de/good.ftl')),
+ (self.path('/de/good.ftl'), None, None, set())
+ )
+ self.assertIsNone(files.match(self.path('/fr/something.ftl')))
+ files = MockProjectFiles(mocks, 'de', [cfg], mergebase='merging')
+ self.assertListEqual(
+ list(files),
+ [
+ (self.path('/de/good.ftl'), None, 'merging/de/good.ftl', set())
+ ]
+ )
+ self.assertTupleEqual(
+ files.match(self.path('/de/something.ftl')),
+ (self.path('/de/something.ftl'),
+ None,
+ 'merging/de/something.ftl',
+ set()))
+ # 'fr' is not in the locale list, should return no files
+ files = MockProjectFiles(mocks, 'fr', [cfg])
+ self.assertListEqual(list(files), [])
+
+ def test_single_reference_path(self):
+ cfg = ProjectConfig(None)
+ cfg.add_environment(l10n_base=self.path('/l10n'))
+ cfg.set_locales(['de'])
+ cfg.add_paths({
+ 'l10n': '{l10n_base}/{locale}/good.ftl',
+ 'reference': self.path('/reference/good.ftl')
+ })
+ mocks = [
+ self.path('/reference/good.ftl'),
+ self.path('/reference/not/subdir/bad.ftl'),
+ ]
+ files = MockProjectFiles(mocks, 'de', [cfg])
+ self.assertListEqual(
+ list(files),
+ [
+ (self.path('/l10n/de/good.ftl'),
+ self.path('/reference/good.ftl'),
+ None,
+ set()),
+ ])
+ self.assertTupleEqual(
+ files.match(self.path('/reference/good.ftl')),
+ (self.path('/l10n/de/good.ftl'),
+ self.path('/reference/good.ftl'),
+ None,
+ set()),
+ )
+ self.assertTupleEqual(
+ files.match(self.path('/l10n/de/good.ftl')),
+ (self.path('/l10n/de/good.ftl'),
+ self.path('/reference/good.ftl'),
+ None,
+ set()),
+ )
+
+ def test_reference_path(self):
+ cfg = ProjectConfig(None)
+ cfg.add_environment(l10n_base=self.path('/l10n'))
+ cfg.set_locales(['de'])
+ cfg.add_paths({
+ 'l10n': '{l10n_base}/{locale}/*',
+ 'reference': self.path('/reference/*')
+ })
+ mocks = [
+ self.path(leaf)
+ for leaf in [
+ '/l10n/de/good.ftl',
+ '/l10n/de/not/subdir/bad.ftl',
+ '/l10n/fr/good.ftl',
+ '/l10n/fr/not/subdir/bad.ftl',
+ '/reference/ref.ftl',
+ '/reference/not/subdir/bad.ftl',
+ ]
+ ]
+ files = MockProjectFiles(mocks, 'de', [cfg])
+ self.assertListEqual(
+ list(files),
+ [
+ (self.path('/l10n/de/good.ftl'),
+ self.path('/reference/good.ftl'),
+ None,
+ set()),
+ (self.path('/l10n/de/ref.ftl'),
+ self.path('/reference/ref.ftl'),
+ None,
+ set()),
+ ])
+ self.assertTupleEqual(
+ files.match(self.path('/l10n/de/good.ftl')),
+ (self.path('/l10n/de/good.ftl'),
+ self.path('/reference/good.ftl'),
+ None,
+ set()),
+ )
+ self.assertTupleEqual(
+ files.match(self.path('/reference/good.ftl')),
+ (self.path('/l10n/de/good.ftl'),
+ self.path('/reference/good.ftl'),
+ None,
+ set()),
+ )
+ self.assertIsNone(files.match(self.path('/l10n/de/subdir/bad.ftl')))
+ self.assertIsNone(files.match(self.path('/reference/subdir/bad.ftl')))
+ files = MockProjectFiles(mocks, 'de', [cfg], mergebase='merging')
+ self.assertListEqual(
+ list(files),
+ [
+ (self.path('/l10n/de/good.ftl'),
+ self.path('/reference/good.ftl'),
+ 'merging/de/good.ftl', set()),
+ (self.path('/l10n/de/ref.ftl'),
+ self.path('/reference/ref.ftl'),
+ 'merging/de/ref.ftl', set()),
+ ])
+ self.assertTupleEqual(
+ files.match(self.path('/l10n/de/good.ftl')),
+ (self.path('/l10n/de/good.ftl'),
+ self.path('/reference/good.ftl'),
+ 'merging/de/good.ftl', set()),
+ )
+ self.assertTupleEqual(
+ files.match(self.path('/reference/good.ftl')),
+ (self.path('/l10n/de/good.ftl'),
+ self.path('/reference/good.ftl'),
+ 'merging/de/good.ftl', set()),
+ )
+ # 'fr' is not in the locale list, should return no files
+ files = MockProjectFiles(mocks, 'fr', [cfg])
+ self.assertListEqual(list(files), [])
+
+ def test_partial_l10n(self):
+ cfg = ProjectConfig(None)
+ cfg.set_locales(['de', 'fr'])
+ cfg.add_paths({
+ 'l10n': self.path('/{locale}/major/*')
+ }, {
+ 'l10n': self.path('/{locale}/minor/*'),
+ 'locales': ['de']
+ })
+ mocks = [
+ self.path(leaf)
+ for leaf in [
+ '/de/major/good.ftl',
+ '/de/major/not/subdir/bad.ftl',
+ '/de/minor/good.ftl',
+ '/fr/major/good.ftl',
+ '/fr/major/not/subdir/bad.ftl',
+ '/fr/minor/good.ftl',
+ ]
+ ]
+ files = MockProjectFiles(mocks, 'de', [cfg])
+ self.assertListEqual(
+ list(files),
+ [
+ (self.path('/de/major/good.ftl'), None, None, set()),
+ (self.path('/de/minor/good.ftl'), None, None, set()),
+ ])
+ self.assertTupleEqual(
+ files.match(self.path('/de/major/some.ftl')),
+ (self.path('/de/major/some.ftl'), None, None, set()))
+ self.assertIsNone(files.match(self.path('/de/other/some.ftl')))
+ # 'fr' is not in the locale list of minor, should only return major
+ files = MockProjectFiles(mocks, 'fr', [cfg])
+ self.assertListEqual(
+ list(files),
+ [
+ (self.path('/fr/major/good.ftl'), None, None, set()),
+ ])
+ self.assertIsNone(files.match(self.path('/fr/minor/some.ftl')))
+
+ def test_validation_mode(self):
+ cfg = ProjectConfig(None)
+ cfg.add_environment(l10n_base=self.path('/l10n'))
+ cfg.set_locales(['de'])
+ cfg.add_paths({
+ 'l10n': '{l10n_base}/{locale}/*',
+ 'reference': self.path('/reference/*')
+ })
+ mocks = [
+ self.path(leaf)
+ for leaf in [
+ '/l10n/de/good.ftl',
+ '/l10n/de/not/subdir/bad.ftl',
+ '/l10n/fr/good.ftl',
+ '/l10n/fr/not/subdir/bad.ftl',
+ '/reference/ref.ftl',
+ '/reference/not/subdir/bad.ftl',
+ ]
+ ]
+ # `None` switches on validation mode
+ files = MockProjectFiles(mocks, None, [cfg])
+ self.assertListEqual(
+ list(files),
+ [
+ (self.path('/reference/ref.ftl'),
+ self.path('/reference/ref.ftl'),
+ None,
+ set()),
+ ])
+
+
+@mock.patch('os.path.isfile')
+@mock.patch('os.walk')
+class TestExcludes(Rooted, unittest.TestCase):
+ def _list(self, locale, _walk, _isfile):
+ parser = MockTOMLParser({
+ "pontoon.toml":
+ '''\
+basepath = "."
+
+[[includes]]
+ path = "configs-pontoon.toml"
+
+[[excludes]]
+ path = "configs-vendor.toml"
+[[excludes]]
+ path = "configs-special-templates.toml"
+''',
+ "vendor.toml":
+ '''\
+basepath = "."
+
+[[includes]]
+ path = "configs-vendor.toml"
+
+[[excludes]]
+ path = "configs-special-templates.toml"
+''',
+ "configs-pontoon.toml":
+ '''\
+basepath = "."
+
+locales = [
+ "de",
+ "gd",
+ "it",
+]
+
+[[paths]]
+ reference = "en/**/*.ftl"
+ l10n = "{locale}/**/*.ftl"
+''',
+ "configs-vendor.toml":
+ '''\
+basepath = "."
+
+locales = [
+ "de",
+ "it",
+]
+
+[[paths]]
+ reference = "en/firefox/*.ftl"
+ l10n = "{locale}/firefox/*.ftl"
+''',
+ "configs-special-templates.toml":
+ '''\
+basepath = "."
+
+[[paths]]
+ reference = "en/firefox/home.ftl"
+ l10n = "{locale}/firefox/home.ftl"
+ locales = [
+ "de",
+ "fr",
+ ]
+[[paths]]
+ reference = "en/firefox/pagina.ftl"
+ l10n = "{locale}/firefox/pagina.ftl"
+ locales = [
+ "gd",
+ ]
+''',
+ })
+ pontoon = parser.parse(self.path('/pontoon.toml'))
+ vendor = parser.parse(self.path('/vendor.toml'))
+ pc = ProjectFiles(locale, [pontoon, vendor])
+ mock_files = [
+ '{}/{}/{}'.format(locale, dir, f)
+ for locale in ('de', 'en', 'gd', 'it')
+ for dir, files in (
+ ('firefox', ('home.ftl', 'feature.ftl')),
+ ('mozorg', ('mission.ftl',)),
+ )
+ for f in files
+ ]
+ os_ = MockOS(self.root, mock_files)
+ _isfile.side_effect = os_.isfile
+ _walk.side_effect = os_.walk
+ local_files = [self.leaf(p).split('/', 1)[1] for p, _, _, _ in pc]
+ return pontoon, vendor, local_files
+
+ def test_reference(self, _walk, _isfile):
+ pontoon_config, vendor_config, files = self._list(None, _walk, _isfile)
+ pontoon_files = ProjectFiles(None, [pontoon_config])
+ vendor_files = ProjectFiles(None, [vendor_config])
+ self.assertListEqual(
+ files,
+ [
+ 'firefox/feature.ftl',
+ 'firefox/home.ftl',
+ 'mozorg/mission.ftl',
+ ]
+ )
+ ref_path = self.path('/en/firefox/feature.ftl')
+ self.assertIsNotNone(pontoon_files.match(ref_path))
+ self.assertIsNotNone(vendor_files.match(ref_path))
+ ref_path = self.path('/en/firefox/home.ftl')
+ self.assertIsNotNone(pontoon_files.match(ref_path))
+ self.assertIsNotNone(vendor_files.match(ref_path))
+ ref_path = self.path('/en/mozorg/mission.ftl')
+ self.assertIsNotNone(pontoon_files.match(ref_path))
+ self.assertIsNone(vendor_files.match(ref_path))
+
+ def test_de(self, _walk, _isfile):
+ # home.ftl excluded completely by configs-special-templates.toml
+ # firefox/* only in vendor
+ pontoon_config, vendor_config, files = self._list('de', _walk, _isfile)
+ pontoon_files = ProjectFiles('de', [pontoon_config])
+ vendor_files = ProjectFiles('de', [vendor_config])
+ self.assertListEqual(
+ files,
+ [
+ 'firefox/feature.ftl',
+ # 'firefox/home.ftl',
+ 'mozorg/mission.ftl',
+ ]
+ )
+ l10n_path = self.path('/de/firefox/feature.ftl')
+ ref_path = self.path('/en/firefox/feature.ftl')
+ self.assertEqual(
+ pontoon_config.filter(
+ File(
+ l10n_path,
+ 'de/firefox/feature.ftl',
+ locale='de'
+ )
+ ),
+ 'ignore'
+ )
+ self.assertIsNone(pontoon_files.match(l10n_path))
+ self.assertIsNone(pontoon_files.match(ref_path))
+ self.assertIsNotNone(vendor_files.match(l10n_path))
+ self.assertIsNotNone(vendor_files.match(ref_path))
+ l10n_path = self.path('/de/firefox/home.ftl')
+ ref_path = self.path('/en/firefox/home.ftl')
+ self.assertEqual(
+ pontoon_config.filter(
+ File(
+ l10n_path,
+ 'de/firefox/home.ftl',
+ locale='de'
+ )
+ ),
+ 'ignore'
+ )
+ self.assertIsNone(pontoon_files.match(l10n_path))
+ self.assertIsNone(pontoon_files.match(ref_path))
+ self.assertIsNone(vendor_files.match(l10n_path))
+ self.assertIsNone(vendor_files.match(ref_path))
+ l10n_path = self.path('/de/mozorg/mission.ftl')
+ ref_path = self.path('/en/mozorg/mission.ftl')
+ self.assertEqual(
+ pontoon_config.filter(
+ File(
+ l10n_path,
+ 'de/mozorg/mission.ftl',
+ locale='de'
+ )
+ ),
+ 'error'
+ )
+ self.assertIsNotNone(pontoon_files.match(l10n_path))
+ self.assertIsNotNone(pontoon_files.match(ref_path))
+ self.assertIsNone(vendor_files.match(l10n_path))
+ self.assertIsNone(vendor_files.match(ref_path))
+
+ def test_gd(self, _walk, _isfile):
+ # only community localization
+ pontoon_config, vendor_config, files = self._list('gd', _walk, _isfile)
+ pontoon_files = ProjectFiles('gd', [pontoon_config])
+ vendor_files = ProjectFiles('gd', [vendor_config])
+ self.assertListEqual(
+ files,
+ [
+ 'firefox/feature.ftl',
+ 'firefox/home.ftl',
+ 'mozorg/mission.ftl',
+ ]
+ )
+ l10n_path = self.path('/gd/firefox/home.ftl')
+ ref_path = self.path('/en/firefox/home.ftl')
+ self.assertEqual(
+ pontoon_config.filter(
+ File(
+ l10n_path,
+ 'gd/firefox/home.ftl',
+ locale='gd'
+ )
+ ),
+ 'error'
+ )
+ self.assertIsNotNone(pontoon_files.match(l10n_path))
+ self.assertIsNotNone(pontoon_files.match(ref_path))
+ self.assertIsNone(vendor_files.match(l10n_path))
+ self.assertIsNone(vendor_files.match(ref_path))
+
+ def test_it(self, _walk, _isfile):
+ # all pages translated, but split between vendor and community
+ pontoon_config, vendor_config, files = self._list('it', _walk, _isfile)
+ pontoon_files = ProjectFiles('it', [pontoon_config])
+ vendor_files = ProjectFiles('it', [vendor_config])
+ self.assertListEqual(
+ files,
+ [
+ 'firefox/feature.ftl',
+ 'firefox/home.ftl',
+ 'mozorg/mission.ftl',
+ ]
+ )
+ l10n_path = self.path('/it/firefox/home.ftl')
+ ref_path = self.path('/en/firefox/home.ftl')
+ file = File(
+ l10n_path,
+ 'it/firefox/home.ftl',
+ locale='it'
+ )
+ self.assertEqual(pontoon_config.filter(file), 'ignore')
+ self.assertEqual(vendor_config.filter(file), 'error')
+ self.assertIsNone(pontoon_files.match(l10n_path))
+ self.assertIsNone(pontoon_files.match(ref_path))
+ self.assertIsNotNone(vendor_files.match(l10n_path))
+ self.assertIsNotNone(vendor_files.match(ref_path))
+
+
+class TestL10nMerge(Rooted, unittest.TestCase):
+ # need to go through TOMLParser, as that's handling most of the
+ # environment
+ def test_merge_paths(self):
+ parser = MockTOMLParser({
+ "base.toml":
+ '''\
+basepath = "."
+locales = [
+ "de",
+]
+[env]
+ l = "{l10n_base}/{locale}/"
+[[paths]]
+ reference = "reference/*"
+ l10n = "{l}*"
+'''})
+ cfg = parser.parse(
+ self.path('/base.toml'),
+ env={'l10n_base': self.path('/l10n')}
+ )
+ mocks = [
+ self.path(leaf)
+ for leaf in [
+ '/l10n/de/good.ftl',
+ '/l10n/de/not/subdir/bad.ftl',
+ '/l10n/fr/good.ftl',
+ '/l10n/fr/not/subdir/bad.ftl',
+ '/reference/ref.ftl',
+ '/reference/not/subdir/bad.ftl',
+ ]
+ ]
+ files = MockProjectFiles(mocks, 'de', [cfg], self.path('/mergers'))
+ self.assertListEqual(
+ list(files),
+ [
+ (self.path('/l10n/de/good.ftl'),
+ self.path('/reference/good.ftl'),
+ self.path('/mergers/de/good.ftl'),
+ set()),
+ (self.path('/l10n/de/ref.ftl'),
+ self.path('/reference/ref.ftl'),
+ self.path('/mergers/de/ref.ftl'),
+ set()),
+ ])
diff --git a/third_party/python/compare-locales/compare_locales/tests/paths/test_ini.py b/third_party/python/compare-locales/compare_locales/tests/paths/test_ini.py
new file mode 100644
index 0000000000..ddb75e2b1b
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/paths/test_ini.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+import unittest
+
+from . import (
+ SetupMixin,
+)
+
+
+class TestConfigLegacy(SetupMixin, unittest.TestCase):
+
+ def test_filter_py_true(self):
+ 'Test filter.py just return bool(True)'
+ def filter(mod, path, entity=None):
+ return True
+ self.cfg.set_filter_py(filter)
+ with self.assertRaises(AssertionError):
+ self.cfg.add_rules({})
+ rv = self.cfg.filter(self.file)
+ self.assertEqual(rv, 'error')
+ rv = self.cfg.filter(self.file, entity='one_entity')
+ self.assertEqual(rv, 'error')
+
+ def test_filter_py_false(self):
+ 'Test filter.py just return bool(False)'
+ def filter(mod, path, entity=None):
+ return False
+ self.cfg.set_filter_py(filter)
+ with self.assertRaises(AssertionError):
+ self.cfg.add_rules({})
+ rv = self.cfg.filter(self.file)
+ self.assertEqual(rv, 'ignore')
+ rv = self.cfg.filter(self.file, entity='one_entity')
+ self.assertEqual(rv, 'ignore')
+
+ def test_filter_py_error(self):
+ 'Test filter.py just return str("error")'
+ def filter(mod, path, entity=None):
+ return 'error'
+ self.cfg.set_filter_py(filter)
+ with self.assertRaises(AssertionError):
+ self.cfg.add_rules({})
+ rv = self.cfg.filter(self.file)
+ self.assertEqual(rv, 'error')
+ rv = self.cfg.filter(self.file, entity='one_entity')
+ self.assertEqual(rv, 'error')
+
+ def test_filter_py_ignore(self):
+ 'Test filter.py just return str("ignore")'
+ def filter(mod, path, entity=None):
+ return 'ignore'
+ self.cfg.set_filter_py(filter)
+ with self.assertRaises(AssertionError):
+ self.cfg.add_rules({})
+ rv = self.cfg.filter(self.file)
+ self.assertEqual(rv, 'ignore')
+ rv = self.cfg.filter(self.file, entity='one_entity')
+ self.assertEqual(rv, 'ignore')
+
+ def test_filter_py_report(self):
+ 'Test filter.py just return str("report") and match to "warning"'
+ def filter(mod, path, entity=None):
+ return 'report'
+ self.cfg.set_filter_py(filter)
+ with self.assertRaises(AssertionError):
+ self.cfg.add_rules({})
+ rv = self.cfg.filter(self.file)
+ self.assertEqual(rv, 'warning')
+ rv = self.cfg.filter(self.file, entity='one_entity')
+ self.assertEqual(rv, 'warning')
+
+ def test_filter_py_module(self):
+ 'Test filter.py to return str("error") for browser or "ignore"'
+ def filter(mod, path, entity=None):
+ return 'error' if mod == 'browser' else 'ignore'
+ self.cfg.set_filter_py(filter)
+ with self.assertRaises(AssertionError):
+ self.cfg.add_rules({})
+ rv = self.cfg.filter(self.file)
+ self.assertEqual(rv, 'error')
+ rv = self.cfg.filter(self.file, entity='one_entity')
+ self.assertEqual(rv, 'error')
+ rv = self.cfg.filter(self.other_file)
+ self.assertEqual(rv, 'ignore')
+ rv = self.cfg.filter(self.other_file, entity='one_entity')
+ self.assertEqual(rv, 'ignore')
diff --git a/third_party/python/compare-locales/compare_locales/tests/paths/test_matcher.py b/third_party/python/compare-locales/compare_locales/tests/paths/test_matcher.py
new file mode 100644
index 0000000000..74a20a84ce
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/paths/test_matcher.py
@@ -0,0 +1,500 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+import six
+import unittest
+
+from compare_locales.paths.matcher import Matcher, ANDROID_STANDARD_MAP
+from . import Rooted
+
+
+class TestMatcher(unittest.TestCase):
+
+ def test_matcher(self):
+ one = Matcher('foo/*')
+ self.assertTrue(one.match('foo/baz'))
+ self.assertFalse(one.match('foo/baz/qux'))
+ other = Matcher('bar/*')
+ self.assertTrue(other.match('bar/baz'))
+ self.assertFalse(other.match('bar/baz/qux'))
+ self.assertEqual(one.sub(other, 'foo/baz'), 'bar/baz')
+ self.assertIsNone(one.sub(other, 'bar/baz'))
+ one = Matcher('foo/**')
+ self.assertTrue(one.match('foo/baz'))
+ self.assertTrue(one.match('foo/baz/qux'))
+ other = Matcher('bar/**')
+ self.assertTrue(other.match('bar/baz'))
+ self.assertTrue(other.match('bar/baz/qux'))
+ self.assertEqual(one.sub(other, 'foo/baz'), 'bar/baz')
+ self.assertEqual(one.sub(other, 'foo/baz/qux'), 'bar/baz/qux')
+ one = Matcher('foo/*/one/**')
+ self.assertTrue(one.match('foo/baz/one/qux'))
+ self.assertFalse(one.match('foo/baz/bez/one/qux'))
+ other = Matcher('bar/*/other/**')
+ self.assertTrue(other.match('bar/baz/other/qux'))
+ self.assertFalse(other.match('bar/baz/bez/other/qux'))
+ self.assertEqual(one.sub(other, 'foo/baz/one/qux'),
+ 'bar/baz/other/qux')
+ self.assertEqual(one.sub(other, 'foo/baz/one/qux/zzz'),
+ 'bar/baz/other/qux/zzz')
+ self.assertIsNone(one.sub(other, 'foo/baz/bez/one/qux'))
+ one = Matcher('foo/**/bar/**')
+ self.assertTrue(one.match('foo/bar/baz.qux'))
+ self.assertTrue(one.match('foo/tender/bar/baz.qux'))
+ self.assertFalse(one.match('foo/nobar/baz.qux'))
+ self.assertFalse(one.match('foo/tender/bar'))
+ other = Matcher('baz/**/qux/**')
+ self.assertEqual(one.sub(other, 'foo/bar/baz.qux'), 'baz/qux/baz.qux')
+ self.assertEqual(
+ one.sub(other, 'foo/tender/bar/baz.qux'),
+ 'baz/tender/qux/baz.qux'
+ )
+
+ def test_encoded_matcher(self):
+ one = Matcher('foo/*', encoding='utf-8')
+ self.assertTrue(one.match(b'foo/bar'))
+ other = Matcher('bar/*', encoding='utf-8')
+ self.assertEqual(one.sub(other, b'foo/baz'), b'bar/baz')
+
+ def test_prefix(self):
+ self.assertEqual(
+ Matcher('foo/bar.file').prefix, 'foo/bar.file'
+ )
+ self.assertEqual(
+ Matcher('foo/*').prefix, 'foo/'
+ )
+ self.assertEqual(
+ Matcher('foo/**').prefix, 'foo/'
+ )
+ self.assertEqual(
+ Matcher('foo/*/bar').prefix, 'foo/'
+ )
+ self.assertEqual(
+ Matcher('foo/**/bar').prefix, 'foo/'
+ )
+ self.assertEqual(
+ Matcher('foo/**/bar/*').prefix, 'foo/'
+ )
+ self.assertEqual(
+ Matcher('foo/{v}/bar').prefix,
+ 'foo/'
+ )
+ self.assertEqual(
+ Matcher('foo/{v}/bar', {'v': 'expanded'}).prefix,
+ 'foo/expanded/bar'
+ )
+ self.assertEqual(
+ Matcher('foo/{v}/*/bar').prefix,
+ 'foo/'
+ )
+ self.assertEqual(
+ Matcher('foo/{v}/*/bar', {'v': 'expanded'}).prefix,
+ 'foo/expanded/'
+ )
+ self.assertEqual(
+ Matcher('foo/{v}/*/bar', {'v': '{missing}'}).prefix,
+ 'foo/'
+ )
+
+ def test_encoded_prefix(self):
+ self.assertEqual(
+ Matcher('foo/bar.file', encoding='utf-8').prefix, b'foo/bar.file'
+ )
+ self.assertEqual(
+ Matcher('foo/*', encoding='utf-8').prefix, b'foo/'
+ )
+ self.assertEqual(
+ Matcher('foo/{v}/bar', encoding='utf-8').prefix,
+ b'foo/'
+ )
+ self.assertEqual(
+ Matcher('foo/{v}/bar', {'v': 'expanded'}, encoding='utf-8').prefix,
+ b'foo/expanded/bar'
+ )
+
+ def test_variables(self):
+ self.assertDictEqual(
+ Matcher('foo/bar.file').match('foo/bar.file'),
+ {}
+ )
+ self.assertDictEqual(
+ Matcher('{path}/bar.file').match('foo/bar.file'),
+ {
+ 'path': 'foo'
+ }
+ )
+ self.assertDictEqual(
+ Matcher('{ path }/bar.file').match('foo/bar.file'),
+ {
+ 'path': 'foo'
+ }
+ )
+ self.assertIsNone(
+ Matcher('{ var }/foopy/{ var }/bears')
+ .match('one/foopy/other/bears')
+ )
+ self.assertDictEqual(
+ Matcher('{ var }/foopy/{ var }/bears')
+ .match('same_value/foopy/same_value/bears'),
+ {
+ 'var': 'same_value'
+ }
+ )
+ self.assertIsNone(
+ Matcher('{ var }/foopy/bears', {'var': 'other'})
+ .match('one/foopy/bears')
+ )
+ self.assertDictEqual(
+ Matcher('{ var }/foopy/bears', {'var': 'one'})
+ .match('one/foopy/bears'),
+ {
+ 'var': 'one'
+ }
+ )
+ self.assertDictEqual(
+ Matcher('{one}/{two}/something', {
+ 'one': 'some/segment',
+ 'two': 'with/a/lot/of'
+ }).match('some/segment/with/a/lot/of/something'),
+ {
+ 'one': 'some/segment',
+ 'two': 'with/a/lot/of'
+ }
+ )
+ self.assertDictEqual(
+ Matcher('{l}**', {
+ 'l': 'foo/{locale}/'
+ }).match('foo/it/path'),
+ {
+ 'l': 'foo/it/',
+ 'locale': 'it',
+ 's1': 'path',
+ }
+ )
+ self.assertDictEqual(
+ Matcher('{l}*', {
+ 'l': 'foo/{locale}/'
+ }).match('foo/it/path'),
+ {
+ 'l': 'foo/it/',
+ 'locale': 'it',
+ 's1': 'path',
+ }
+ )
+
+ def test_encoded_variables(self):
+ self.assertDictEqual(
+ Matcher('foo/bar.file', encoding='utf-8').match(b'foo/bar.file'),
+ {}
+ )
+ self.assertDictEqual(
+ Matcher(
+ '{path}/bar.file', encoding='utf-8'
+ ).match(b'foo/bar.file'),
+ {
+ 'path': 'foo'
+ }
+ )
+ self.assertDictEqual(
+ Matcher('{l}*', {
+ 'l': 'foo/{locale}/'
+ }, encoding='utf-8').match(b'foo/it/path'),
+ {
+ 'l': 'foo/it/',
+ 'locale': 'it',
+ 's1': 'path',
+ }
+ )
+
+ def test_variables_sub(self):
+ one = Matcher('{base}/{loc}/*', {'base': 'ONE_BASE'})
+ other = Matcher('{base}/somewhere/*', {'base': 'OTHER_BASE'})
+ self.assertEqual(
+ one.sub(other, 'ONE_BASE/ab-CD/special'),
+ 'OTHER_BASE/somewhere/special'
+ )
+ one = Matcher('{base}/{loc}/*', {'base': 'ONE_BASE'}, encoding='utf-8')
+ other = Matcher(
+ '{base}/somewhere/*', {'base': 'OTHER_BASE'}, encoding='utf-8'
+ )
+ self.assertEqual(
+ one.sub(other, b'ONE_BASE/ab-CD/special'),
+ b'OTHER_BASE/somewhere/special'
+ )
+
+ def test_copy(self):
+ one = Matcher('{base}/{loc}/*', {
+ 'base': 'ONE_BASE',
+ 'generic': 'keep'
+ })
+ other = Matcher(one, {'base': 'OTHER_BASE'})
+ self.assertEqual(
+ one.sub(other, 'ONE_BASE/ab-CD/special'),
+ 'OTHER_BASE/ab-CD/special'
+ )
+ self.assertDictEqual(
+ one.env,
+ {
+ 'base': ['ONE_BASE'],
+ 'generic': ['keep']
+ }
+ )
+ self.assertDictEqual(
+ other.env,
+ {
+ 'base': ['OTHER_BASE'],
+ 'generic': ['keep']
+ }
+ )
+
+ def test_eq(self):
+ self.assertEqual(
+ Matcher('foo'),
+ Matcher('foo')
+ )
+ self.assertNotEqual(
+ Matcher('foo'),
+ Matcher('bar')
+ )
+ self.assertEqual(
+ Matcher('foo', root='/bar/'),
+ Matcher('foo', root='/bar/')
+ )
+ self.assertNotEqual(
+ Matcher('foo', root='/bar/'),
+ Matcher('foo', root='/baz/')
+ )
+ self.assertNotEqual(
+ Matcher('foo'),
+ Matcher('foo', root='/bar/')
+ )
+ self.assertEqual(
+ Matcher('foo', env={'one': 'two'}),
+ Matcher('foo', env={'one': 'two'})
+ )
+ self.assertEqual(
+ Matcher('foo'),
+ Matcher('foo', env={})
+ )
+ self.assertNotEqual(
+ Matcher('foo', env={'one': 'two'}),
+ Matcher('foo', env={'one': 'three'})
+ )
+ self.assertEqual(
+ Matcher('foo', env={'other': 'val'}),
+ Matcher('foo', env={'one': 'two'})
+ )
+
+
+class ConcatTest(unittest.TestCase):
+ def test_plain(self):
+ left = Matcher('some/path/')
+ right = Matcher('with/file')
+ concatenated = left.concat(right)
+ self.assertEqual(str(concatenated), 'some/path/with/file')
+ self.assertEqual(concatenated.prefix, 'some/path/with/file')
+ pattern_concatenated = left.concat('with/file')
+ self.assertEqual(concatenated, pattern_concatenated)
+
+ def test_stars(self):
+ left = Matcher('some/*/path/')
+ right = Matcher('with/file')
+ concatenated = left.concat(right)
+ self.assertEqual(concatenated.prefix, 'some/')
+ concatenated = right.concat(left)
+ self.assertEqual(concatenated.prefix, 'with/filesome/')
+
+
+class TestAndroid(unittest.TestCase):
+ '''special case handling for `android_locale` to handle the funky
+ locale codes in Android apps
+ '''
+ def test_match(self):
+ # test matches as well as groupdict aliasing.
+ one = Matcher('values-{android_locale}/strings.xml')
+ self.assertEqual(
+ one.match('values-de/strings.xml'),
+ {
+ 'android_locale': 'de',
+ 'locale': 'de'
+ }
+ )
+ self.assertEqual(
+ one.match('values-de-rDE/strings.xml'),
+ {
+ 'android_locale': 'de-rDE',
+ 'locale': 'de-DE'
+ }
+ )
+ self.assertEqual(
+ one.match('values-b+sr+Latn/strings.xml'),
+ {
+ 'android_locale': 'b+sr+Latn',
+ 'locale': 'sr-Latn'
+ }
+ )
+ self.assertEqual(
+ one.with_env(
+ {'locale': 'de'}
+ ).match('values-de/strings.xml'),
+ {
+ 'android_locale': 'de',
+ 'locale': 'de'
+ }
+ )
+ self.assertEqual(
+ one.with_env(
+ {'locale': 'de-DE'}
+ ).match('values-de-rDE/strings.xml'),
+ {
+ 'android_locale': 'de-rDE',
+ 'locale': 'de-DE'
+ }
+ )
+ self.assertEqual(
+ one.with_env(
+ {'locale': 'sr-Latn'}
+ ).match('values-b+sr+Latn/strings.xml'),
+ {
+ 'android_locale': 'b+sr+Latn',
+ 'locale': 'sr-Latn'
+ }
+ )
+
+ def test_repeat(self):
+ self.assertEqual(
+ Matcher('{android_locale}/{android_locale}').match(
+ 'b+sr+Latn/b+sr+Latn'
+ ),
+ {
+ 'android_locale': 'b+sr+Latn',
+ 'locale': 'sr-Latn'
+ }
+ )
+ self.assertEqual(
+ Matcher(
+ '{android_locale}/{android_locale}',
+ env={'locale': 'sr-Latn'}
+ ).match(
+ 'b+sr+Latn/b+sr+Latn'
+ ),
+ {
+ 'android_locale': 'b+sr+Latn',
+ 'locale': 'sr-Latn'
+ }
+ )
+
+ def test_mismatch(self):
+ # test failed matches
+ one = Matcher('values-{android_locale}/strings.xml')
+ self.assertIsNone(
+ one.with_env({'locale': 'de'}).match(
+ 'values-fr.xml'
+ )
+ )
+ self.assertIsNone(
+ one.with_env({'locale': 'de-DE'}).match(
+ 'values-de-DE.xml'
+ )
+ )
+ self.assertIsNone(
+ one.with_env({'locale': 'sr-Latn'}).match(
+ 'values-sr-Latn.xml'
+ )
+ )
+ self.assertIsNone(
+ Matcher('{android_locale}/{android_locale}').match(
+ 'b+sr+Latn/de-rDE'
+ )
+ )
+
+ def test_prefix(self):
+ one = Matcher('values-{android_locale}/strings.xml')
+ self.assertEqual(
+ one.with_env({'locale': 'de'}).prefix,
+ 'values-de/strings.xml'
+ )
+ self.assertEqual(
+ one.with_env({'locale': 'de-DE'}).prefix,
+ 'values-de-rDE/strings.xml'
+ )
+ self.assertEqual(
+ one.with_env({'locale': 'sr-Latn'}).prefix,
+ 'values-b+sr+Latn/strings.xml'
+ )
+ self.assertEqual(
+ one.prefix,
+ 'values-'
+ )
+
+ def test_aliases(self):
+ # test legacy locale code mapping
+ # he <-> iw, id <-> in, yi <-> ji
+ one = Matcher('values-{android_locale}/strings.xml')
+ for legacy, standard in six.iteritems(ANDROID_STANDARD_MAP):
+ self.assertDictEqual(
+ one.match('values-{}/strings.xml'.format(legacy)),
+ {
+ 'android_locale': legacy,
+ 'locale': standard
+ }
+ )
+ self.assertEqual(
+ one.with_env({'locale': standard}).prefix,
+ 'values-{}/strings.xml'.format(legacy)
+ )
+
+
+class TestRootedMatcher(Rooted, unittest.TestCase):
+ def test_root_path(self):
+ one = Matcher('some/path', root=self.root)
+ self.assertIsNone(one.match('some/path'))
+ self.assertIsNotNone(one.match(self.path('/some/path')))
+
+ def test_copy(self):
+ one = Matcher('some/path', root=self.path('/one-root'))
+ other = Matcher(one, root=self.path('/different-root'))
+ self.assertIsNone(other.match('some/path'))
+ self.assertIsNone(
+ other.match(self.path('/one-root/some/path'))
+ )
+ self.assertIsNotNone(
+ other.match(self.path('/different-root/some/path'))
+ )
+
+ def test_rooted(self):
+ r1 = self.path('/one-root')
+ r2 = self.path('/other-root')
+ one = Matcher(self.path('/one-root/full/path'), root=r2)
+ self.assertIsNone(one.match(self.path('/other-root/full/path')))
+ # concat r2 and r1. r1 is absolute, so we gotta trick that
+ concat_root = r2
+ if not r1.startswith('/'):
+ # windows absolute paths don't start with '/', add one
+ concat_root += '/'
+ concat_root += r1
+ self.assertIsNone(one.match(concat_root + '/full/path'))
+ self.assertIsNotNone(one.match(self.path('/one-root/full/path')))
+
+ def test_variable(self):
+ r1 = self.path('/one-root')
+ r2 = self.path('/other-root')
+ one = Matcher(
+ '{var}/path',
+ env={'var': 'relative-dir'},
+ root=r1
+ )
+ self.assertIsNone(one.match('relative-dir/path'))
+ self.assertIsNotNone(
+ one.match(self.path('/one-root/relative-dir/path'))
+ )
+ other = Matcher(one, env={'var': r2})
+ self.assertIsNone(
+ other.match(self.path('/one-root/relative-dir/path'))
+ )
+ self.assertIsNotNone(
+ other.match(self.path('/other-root/path'))
+ )
diff --git a/third_party/python/compare-locales/compare_locales/tests/paths/test_paths.py b/third_party/python/compare-locales/compare_locales/tests/paths/test_paths.py
new file mode 100644
index 0000000000..e72fe9a7a6
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/paths/test_paths.py
@@ -0,0 +1,28 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+import unittest
+
+from compare_locales.paths import File
+
+
+class TestFile(unittest.TestCase):
+ def test_hash_and_equality(self):
+ f1 = File('/tmp/full/path/to/file', 'path/to/file')
+ d = {}
+ d[f1] = True
+ self.assertIn(f1, d)
+ f2 = File('/tmp/full/path/to/file', 'path/to/file')
+ self.assertIn(f2, d)
+ f2 = File('/tmp/full/path/to/file', 'path/to/file', locale='en')
+ self.assertNotIn(f2, d)
+ # trigger hash collisions between File and non-File objects
+ self.assertEqual(hash(f1), hash(f1.localpath))
+ self.assertNotIn(f1.localpath, d)
+ f1 = File('/tmp/full/other/path', 'other/path')
+ d[f1.localpath] = True
+ self.assertIn(f1.localpath, d)
+ self.assertNotIn(f1, d)
diff --git a/third_party/python/compare-locales/compare_locales/tests/paths/test_project.py b/third_party/python/compare-locales/compare_locales/tests/paths/test_project.py
new file mode 100644
index 0000000000..fe12245486
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/paths/test_project.py
@@ -0,0 +1,229 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+import unittest
+
+from compare_locales.paths import ProjectConfig
+from . import SetupMixin
+
+
+class TestConfigRules(SetupMixin, unittest.TestCase):
+
+ def test_filter_empty(self):
+ 'Test that an empty config works'
+ self.cfg.add_paths({
+ 'l10n': '/tmp/somedir/{locale}/browser/**'
+ })
+ rv = self.cfg.filter(self.file)
+ self.assertEqual(rv, 'error')
+ rv = self.cfg.filter(self.file, entity='one_entity')
+ self.assertEqual(rv, 'error')
+ rv = self.cfg.filter(self.other_file)
+ self.assertEqual(rv, 'ignore')
+ rv = self.cfg.filter(self.other_file, entity='one_entity')
+ self.assertEqual(rv, 'ignore')
+
+ def test_single_file_rule(self):
+ 'Test a single rule for just a single file, no key'
+ self.cfg.add_paths({
+ 'l10n': '/tmp/somedir/{locale}/browser/**'
+ })
+ self.cfg.add_rules({
+ 'path': '/tmp/somedir/{locale}/browser/one/two/file.ftl',
+ 'action': 'ignore'
+ })
+ rv = self.cfg.filter(self.file)
+ self.assertEqual(rv, 'ignore')
+ rv = self.cfg.filter(self.file, 'one_entity')
+ self.assertEqual(rv, 'error')
+ rv = self.cfg.filter(self.other_file)
+ self.assertEqual(rv, 'ignore')
+ rv = self.cfg.filter(self.other_file, 'one_entity')
+ self.assertEqual(rv, 'ignore')
+
+ def test_single_key_rule(self):
+ 'Test a single rule with file and key'
+ self.cfg.add_paths({
+ 'l10n': '/tmp/somedir/{locale}/browser/**'
+ })
+ self.cfg.add_rules({
+ 'path': '/tmp/somedir/{locale}/browser/one/two/file.ftl',
+ 'key': 'one_entity',
+ 'action': 'ignore'
+ })
+ rv = self.cfg.filter(self.file)
+ self.assertEqual(rv, 'error')
+ rv = self.cfg.filter(self.file, 'one_entity')
+ self.assertEqual(rv, 'ignore')
+ rv = self.cfg.filter(self.other_file)
+ self.assertEqual(rv, 'ignore')
+ rv = self.cfg.filter(self.other_file, 'one_entity')
+ self.assertEqual(rv, 'ignore')
+
+ def test_single_non_matching_key_rule(self):
+ 'Test a single key rule with regex special chars that should not match'
+ self.cfg.add_paths({
+ 'l10n': '/tmp/somedir/{locale}/**'
+ })
+ self.cfg.add_rules({
+ 'path': '/tmp/somedir/{locale}/browser/one/two/file.ftl',
+ 'key': '.ne_entit.',
+ 'action': 'ignore'
+ })
+ rv = self.cfg.filter(self.file, 'one_entity')
+ self.assertEqual(rv, 'error')
+
+ def test_single_matching_re_key_rule(self):
+ 'Test a single key with regular expression'
+ self.cfg.add_paths({
+ 'l10n': '/tmp/somedir/{locale}/**'
+ })
+ self.cfg.add_rules({
+ 'path': '/tmp/somedir/{locale}/browser/one/two/file.ftl',
+ 'key': 're:.ne_entit.$',
+ 'action': 'ignore'
+ })
+ rv = self.cfg.filter(self.file, 'one_entity')
+ self.assertEqual(rv, 'ignore')
+
+ def test_double_file_rule(self):
+ 'Test path shortcut, one for each of our files'
+ self.cfg.add_paths({
+ 'l10n': '/tmp/somedir/{locale}/**'
+ })
+ self.cfg.add_rules({
+ 'path': [
+ '/tmp/somedir/{locale}/browser/one/two/file.ftl',
+ '/tmp/somedir/{locale}/toolkit/two/one/file.ftl',
+ ],
+ 'action': 'ignore'
+ })
+ rv = self.cfg.filter(self.file)
+ self.assertEqual(rv, 'ignore')
+ rv = self.cfg.filter(self.other_file)
+ self.assertEqual(rv, 'ignore')
+
+ def test_double_file_key_rule(self):
+ 'Test path and key shortcut, one key matching, one not'
+ self.cfg.add_paths({
+ 'l10n': '/tmp/somedir/{locale}/**'
+ })
+ self.cfg.add_rules({
+ 'path': [
+ '/tmp/somedir/{locale}/browser/one/two/file.ftl',
+ '/tmp/somedir/{locale}/toolkit/two/one/file.ftl',
+ ],
+ 'key': [
+ 'one_entity',
+ 'other_entity',
+ ],
+ 'action': 'ignore'
+ })
+ rv = self.cfg.filter(self.file)
+ self.assertEqual(rv, 'error')
+ rv = self.cfg.filter(self.file, 'one_entity')
+ self.assertEqual(rv, 'ignore')
+ rv = self.cfg.filter(self.other_file)
+ self.assertEqual(rv, 'error')
+ rv = self.cfg.filter(self.other_file, 'one_entity')
+ self.assertEqual(rv, 'ignore')
+
+ def test_single_wildcard_rule(self):
+ 'Test single wildcard'
+ self.cfg.add_paths({
+ 'l10n': '/tmp/somedir/{locale}/browser/**'
+ })
+ self.cfg.add_rules({
+ 'path': [
+ '/tmp/somedir/{locale}/browser/one/*/*',
+ ],
+ 'action': 'ignore'
+ })
+ rv = self.cfg.filter(self.file)
+ self.assertEqual(rv, 'ignore')
+ rv = self.cfg.filter(self.other_file)
+ self.assertEqual(rv, 'ignore')
+
+ def test_double_wildcard_rule(self):
+ 'Test double wildcard'
+ self.cfg.add_paths({
+ 'l10n': '/tmp/somedir/{locale}/**'
+ })
+ self.cfg.add_rules({
+ 'path': [
+ '/tmp/somedir/{locale}/**',
+ ],
+ 'action': 'ignore'
+ })
+ rv = self.cfg.filter(self.file)
+ self.assertEqual(rv, 'ignore')
+ rv = self.cfg.filter(self.other_file)
+ self.assertEqual(rv, 'ignore')
+
+
+class TestProjectConfig(unittest.TestCase):
+ def test_children(self):
+ pc = ProjectConfig(None)
+ child = ProjectConfig(None)
+ pc.add_child(child)
+ self.assertListEqual([pc, child], list(pc.configs))
+
+ def test_locales_in_children(self):
+ pc = ProjectConfig(None)
+ child = ProjectConfig(None)
+ child.add_paths({
+ 'l10n': '/tmp/somedir/{locale}/toolkit/**',
+ })
+ child.set_locales([])
+ pc.add_child(child)
+ self.assertListEqual(pc.all_locales, [])
+ pc.set_locales(['de', 'fr'])
+ self.assertListEqual(child.locales, [])
+ self.assertListEqual(pc.all_locales, ['de', 'fr'])
+
+ def test_locales_in_paths(self):
+ pc = ProjectConfig(None)
+ child = ProjectConfig(None)
+ child.add_paths({
+ 'l10n': '/tmp/somedir/{locale}/toolkit/**',
+ 'locales': ['it']
+ })
+ child.set_locales([])
+ pc.add_child(child)
+ self.assertListEqual(pc.all_locales, ['it'])
+ pc.set_locales(['de', 'fr'])
+ self.assertListEqual(pc.all_locales, ['de', 'fr', 'it'])
+
+
+class TestSameConfig(unittest.TestCase):
+
+ def test_path(self):
+ one = ProjectConfig('one.toml')
+ one.set_locales(['ab'])
+ self.assertTrue(one.same(ProjectConfig('one.toml')))
+ self.assertFalse(one.same(ProjectConfig('two.toml')))
+
+ def test_paths(self):
+ one = ProjectConfig('one.toml')
+ one.set_locales(['ab'])
+ one.add_paths({
+ 'l10n': '/tmp/somedir/{locale}/**'
+ })
+ other = ProjectConfig('one.toml')
+ self.assertFalse(one.same(other))
+ other.add_paths({
+ 'l10n': '/tmp/somedir/{locale}/**'
+ })
+ self.assertTrue(one.same(other))
+
+ def test_children(self):
+ one = ProjectConfig('one.toml')
+ one.add_child(ProjectConfig('inner.toml'))
+ one.set_locales(['ab'])
+ other = ProjectConfig('one.toml')
+ self.assertFalse(one.same(other))
+ other.add_child(ProjectConfig('inner.toml'))
+ self.assertTrue(one.same(other))
diff --git a/third_party/python/compare-locales/compare_locales/tests/po/__init__.py b/third_party/python/compare-locales/compare_locales/tests/po/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/po/__init__.py
diff --git a/third_party/python/compare-locales/compare_locales/tests/po/test_parser.py b/third_party/python/compare-locales/compare_locales/tests/po/test_parser.py
new file mode 100644
index 0000000000..e02fe66283
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/po/test_parser.py
@@ -0,0 +1,139 @@
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import unittest
+
+from compare_locales.tests import ParserTestMixin
+from compare_locales.parser import (
+ BadEntity,
+ Whitespace,
+)
+
+
+class TestPoParser(ParserTestMixin, unittest.TestCase):
+ maxDiff = None
+ filename = 'strings.po'
+
+ def test_parse_string_list(self):
+ self.parser.readUnicode(' ')
+ ctx = self.parser.ctx
+ with self.assertRaises(BadEntity):
+ self.parser._parse_string_list(ctx, 0, 'msgctxt')
+ self.parser.readUnicode('msgctxt ')
+ ctx = self.parser.ctx
+ with self.assertRaises(BadEntity):
+ self.parser._parse_string_list(ctx, 0, 'msgctxt')
+ self.parser.readUnicode('msgctxt " "')
+ ctx = self.parser.ctx
+ self.assertTupleEqual(
+ self.parser._parse_string_list(ctx, 0, 'msgctxt'),
+ (" ", len(ctx.contents))
+ )
+ self.parser.readUnicode('msgctxt " " \t "A"\r "B"asdf')
+ ctx = self.parser.ctx
+ self.assertTupleEqual(
+ self.parser._parse_string_list(ctx, 0, 'msgctxt'),
+ (" AB", len(ctx.contents)-4)
+ )
+ self.parser.readUnicode('msgctxt "\\\\ " "A" "B"asdf"fo"')
+ ctx = self.parser.ctx
+ self.assertTupleEqual(
+ self.parser._parse_string_list(ctx, 0, 'msgctxt'),
+ ("\\ AB", len(ctx.contents)-8)
+ )
+
+ def test_simple_string(self):
+ source = '''
+msgid "untranslated string"
+msgstr "translated string"
+'''
+ self._test(
+ source,
+ (
+ (Whitespace, '\n'),
+ (('untranslated string', None), 'translated string'),
+ (Whitespace, '\n'),
+ )
+ )
+
+ def test_escapes(self):
+ source = r'''
+msgid "untranslated string"
+msgstr "\\\t\r\n\""
+'''
+ self._test(
+ source,
+ (
+ (Whitespace, '\n'),
+ (('untranslated string', None), '\\\t\r\n"'),
+ (Whitespace, '\n'),
+ )
+ )
+
+ def test_comments(self):
+ source = '''
+# translator-comments
+#. extracted-comments
+#: reference...
+#, flag...
+#| msgctxt previous-context
+#| msgid previous-untranslated-string
+msgid "untranslated string"
+msgstr "translated string"
+'''
+ self._test(
+ source,
+ (
+ (Whitespace, '\n'),
+ (
+ ('untranslated string', None),
+ 'translated string',
+ 'extracted-comments',
+ ),
+ (Whitespace, '\n'),
+ )
+ )
+
+ def test_simple_context(self):
+ source = '''
+msgctxt "context to use"
+msgid "untranslated string"
+msgstr "translated string"
+'''
+ self._test(
+ source,
+ (
+ (Whitespace, '\n'),
+ (
+ ('untranslated string', 'context to use'),
+ 'translated string'
+ ),
+ (Whitespace, '\n'),
+ )
+ )
+
+ def test_translated(self):
+ source = '''
+msgid "reference 1"
+msgstr "translated string"
+
+msgid "reference 2"
+msgstr ""
+'''
+ self._test(
+ source,
+ (
+ (Whitespace, '\n'),
+ (('reference 1', None), 'translated string'),
+ (Whitespace, '\n'),
+ (('reference 2', None), 'reference 2'),
+ (Whitespace, '\n'),
+ )
+ )
+ entities = self.parser.parse()
+ self.assertListEqual(
+ [e.localized for e in entities],
+ [True, False]
+ )
diff --git a/third_party/python/compare-locales/compare_locales/tests/properties/__init__.py b/third_party/python/compare-locales/compare_locales/tests/properties/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/properties/__init__.py
diff --git a/third_party/python/compare-locales/compare_locales/tests/properties/test_checks.py b/third_party/python/compare-locales/compare_locales/tests/properties/test_checks.py
new file mode 100644
index 0000000000..68a8e0fd8c
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/properties/test_checks.py
@@ -0,0 +1,109 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+from compare_locales.paths import File
+from compare_locales.tests import BaseHelper
+
+
+class TestProperties(BaseHelper):
+ file = File('foo.properties', 'foo.properties')
+ refContent = b'''some = value
+'''
+
+ def testGood(self):
+ self._test(b'''some = localized''',
+ tuple())
+
+ def testMissedEscape(self):
+ self._test(br'''some = \u67ood escape, bad \escape''',
+ (('warning', 20, r'unknown escape sequence, \e',
+ 'escape'),))
+
+ def test_bad_encoding(self):
+ self._test(
+ 'some = touché"'.encode('latin-1'),
+ (
+ (
+ "warning",
+ 12,
+ "\ufffd in: some",
+ "encodings"
+ ),
+ )
+ )
+
+
+class TestPlurals(BaseHelper):
+ file = File('foo.properties', 'foo.properties')
+ refContent = b'''\
+# LOCALIZATION NOTE (downloadsTitleFiles): Semi-colon list of plural forms.
+# See: http://developer.mozilla.org/en/docs/Localization_and_Plurals
+# #1 number of files
+# example: 111 files - Downloads
+downloadsTitleFiles=#1 file - Downloads;#1 files - #2
+'''
+
+ def testGood(self):
+ self._test(b'''\
+# LOCALIZATION NOTE (downloadsTitleFiles): Semi-colon list of plural forms.
+# See: http://developer.mozilla.org/en/docs/Localization_and_Plurals
+# #1 number of files
+# example: 111 files - Downloads
+downloadsTitleFiles=#1 file - Downloads;#1 files - #2;#1 filers
+''',
+ tuple())
+
+ def testNotUsed(self):
+ self._test(b'''\
+# LOCALIZATION NOTE (downloadsTitleFiles): Semi-colon list of plural forms.
+# See: http://developer.mozilla.org/en/docs/Localization_and_Plurals
+# #1 number of files
+# example: 111 files - Downloads
+downloadsTitleFiles=#1 file - Downloads;#1 files - Downloads;#1 filers
+''',
+ (('warning', 0, 'not all variables used in l10n',
+ 'plural'),))
+
+ def testNotDefined(self):
+ self._test(b'''\
+# LOCALIZATION NOTE (downloadsTitleFiles): Semi-colon list of plural forms.
+# See: http://developer.mozilla.org/en/docs/Localization_and_Plurals
+# #1 number of files
+# example: 111 files - Downloads
+downloadsTitleFiles=#1 file - Downloads;#1 files - #2;#1 #3
+''',
+ (('error', 0, 'unreplaced variables in l10n', 'plural'),))
+
+
+class TestPluralForms(BaseHelper):
+ file = File('foo.properties', 'foo.properties', locale='en-GB')
+ refContent = b'''\
+# LOCALIZATION NOTE (downloadsTitleFiles): Semi-colon list of plural forms.
+# See: http://developer.mozilla.org/en/docs/Localization_and_Plurals
+# #1 number of files
+# example: 111 files - Downloads
+downloadsTitleFiles=#1 file;#1 files
+'''
+
+ def test_matching_forms(self):
+ self._test(b'''\
+downloadsTitleFiles=#1 fiiilee;#1 fiiilees
+''',
+ tuple())
+
+ def test_lacking_forms(self):
+ self._test(b'''\
+downloadsTitleFiles=#1 fiiilee
+''',
+ (('warning', 0, 'expecting 2 plurals, found 1', 'plural'),))
+
+ def test_excess_forms(self):
+ self._test(b'''\
+downloadsTitleFiles=#1 fiiilee;#1 fiiilees;#1 fiiilees
+''',
+ (('warning', 0, 'expecting 2 plurals, found 3', 'plural'),))
diff --git a/third_party/python/compare-locales/compare_locales/tests/properties/test_merge.py b/third_party/python/compare-locales/compare_locales/tests/properties/test_merge.py
new file mode 100644
index 0000000000..97d98f5167
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/properties/test_merge.py
@@ -0,0 +1,68 @@
+# coding=utf8
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from codecs import decode, encode
+import unittest
+
+from compare_locales.merge import merge_channels
+
+
+class TestMergeProperties(unittest.TestCase):
+ name = "foo.properties"
+
+ def test_no_changes(self):
+ channels = (b"""
+foo = Foo 1
+""", b"""
+foo = Foo 2
+""")
+ self.assertEqual(
+ merge_channels(self.name, channels), b"""
+foo = Foo 1
+""")
+
+ def test_encoding(self):
+ channels = (encode(u"""
+foo = Foo 1…
+""", "utf8"), encode(u"""
+foo = Foo 2…
+""", "utf8"))
+ output = merge_channels(self.name, channels)
+ self.assertEqual(output, encode(u"""
+foo = Foo 1…
+""", "utf8"))
+
+ u_output = decode(output, "utf8")
+ self.assertEqual(u_output, u"""
+foo = Foo 1…
+""")
+
+ def test_repetitive(self):
+ channels = (b"""\
+# comment
+one = one
+# comment
+three = three
+""", b"""\
+# comment
+one = one
+# comment
+two = two
+# comment
+three = three
+""")
+ output = merge_channels(self.name, channels)
+ self.assertMultiLineEqual(
+ decode(output, "utf-8"),
+ """\
+# comment
+one = one
+# comment
+two = two
+# comment
+three = three
+"""
+ )
diff --git a/third_party/python/compare-locales/compare_locales/tests/properties/test_parser.py b/third_party/python/compare-locales/compare_locales/tests/properties/test_parser.py
new file mode 100644
index 0000000000..7600baa753
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/properties/test_parser.py
@@ -0,0 +1,243 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import unittest
+
+from six.moves import zip
+from compare_locales.tests import ParserTestMixin
+from compare_locales.parser import (
+ Comment,
+ Junk,
+ Whitespace,
+)
+
+
+class TestPropertiesParser(ParserTestMixin, unittest.TestCase):
+
+ filename = 'foo.properties'
+
+ def testBackslashes(self):
+ self._test(r'''one_line = This is one line
+two_line = This is the first \
+of two lines
+one_line_trailing = This line ends in \\
+and has junk
+two_lines_triple = This line is one of two and ends in \\\
+and still has another line coming
+''', (
+ ('one_line', 'This is one line'),
+ (Whitespace, '\n'),
+ ('two_line', u'This is the first of two lines'),
+ (Whitespace, '\n'),
+ ('one_line_trailing', u'This line ends in \\'),
+ (Whitespace, '\n'),
+ (Junk, 'and has junk\n'),
+ ('two_lines_triple', 'This line is one of two and ends in \\'
+ 'and still has another line coming'),
+ (Whitespace, '\n')))
+
+ def testProperties(self):
+ # port of netwerk/test/PropertiesTest.cpp
+ self.parser.readContents(self.resource('test.properties'))
+ ref = ['1', '2', '3', '4', '5', '6', '7', '8',
+ 'this is the first part of a continued line '
+ 'and here is the 2nd part']
+ i = iter(self.parser)
+ for r, e in zip(ref, i):
+ self.assertTrue(e.localized)
+ self.assertEqual(e.val, r)
+
+ def test_bug121341(self):
+ # port of xpcom/tests/unit/test_bug121341.js
+ self.parser.readContents(self.resource('bug121341.properties'))
+ ref = ['abc', 'xy', u"\u1234\t\r\n\u00AB\u0001\n",
+ "this is multiline property",
+ "this is another multiline property", u"test\u0036",
+ "yet another multiline propery", u"\ttest5\u0020", " test6\t",
+ u"c\uCDEFd", u"\uABCD"]
+ i = iter(self.parser)
+ for r, e in zip(ref, i):
+ self.assertEqual(e.val, r)
+
+ def test_comment_in_multi(self):
+ self._test(r'''bar=one line with a \
+# part that looks like a comment \
+and an end''', (('bar', 'one line with a # part that looks like a comment '
+ 'and an end'),))
+
+ def test_license_header(self):
+ self._test('''\
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+foo=value
+''', (
+ (Comment, 'MPL'),
+ (Whitespace, '\n\n'),
+ ('foo', 'value'),
+ (Whitespace, '\n')))
+
+ def test_escapes(self):
+ self.parser.readContents(br'''
+# unicode escapes
+zero = some \unicode
+one = \u0
+two = \u41
+three = \u042
+four = \u0043
+five = \u0044a
+six = \a
+seven = \n\r\t\\
+''')
+ ref = ['some unicode', chr(0), 'A', 'B', 'C', 'Da', 'a', '\n\r\t\\']
+ for r, e in zip(ref, self.parser):
+ self.assertEqual(e.val, r)
+
+ def test_trailing_comment(self):
+ self._test('''first = string
+second = string
+
+#
+#commented out
+''', (
+ ('first', 'string'),
+ (Whitespace, '\n'),
+ ('second', 'string'),
+ (Whitespace, '\n\n'),
+ (Comment, 'commented out'),
+ (Whitespace, '\n')))
+
+ def test_trailing_newlines(self):
+ self._test('''\
+foo = bar
+
+\x20\x20
+ ''', (('foo', 'bar'), (Whitespace, '\n\n\x20\x20\n ')))
+
+ def test_just_comments(self):
+ self._test('''\
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# LOCALIZATION NOTE These strings are used inside the Promise debugger
+# which is available as a panel in the Debugger.
+''', (
+ (Comment, 'MPL'),
+ (Whitespace, '\n\n'),
+ (Comment, 'LOCALIZATION NOTE'),
+ (Whitespace, '\n')))
+
+ def test_just_comments_without_trailing_newline(self):
+ self._test('''\
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# LOCALIZATION NOTE These strings are used inside the Promise debugger
+# which is available as a panel in the Debugger.''', (
+ (Comment, 'MPL'),
+ (Whitespace, '\n\n'),
+ (Comment, 'LOCALIZATION NOTE')))
+
+ def test_trailing_comment_and_newlines(self):
+ self._test('''\
+# LOCALIZATION NOTE These strings are used inside the Promise debugger
+# which is available as a panel in the Debugger.
+
+
+
+''', (
+ (Comment, 'LOCALIZATION NOTE'),
+ (Whitespace, '\n\n\n')))
+
+ def test_standalone_license(self):
+ self._test('''\
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+foo = value
+''', (
+ (Comment, 'MPL'),
+ (Whitespace, '\n'),
+ ('foo', 'value'),
+ (Whitespace, '\n')))
+
+ def test_empty_file(self):
+ self._test('', tuple())
+ self._test('\n', ((Whitespace, '\n'),))
+ self._test('\n\n', ((Whitespace, '\n\n'),))
+ self._test(' \n\n', ((Whitespace, '\n\n'),))
+
+ def test_positions(self):
+ self.parser.readContents(b'''\
+one = value
+two = other \\
+escaped value
+''')
+ one, two = list(self.parser)
+ self.assertEqual(one.position(), (1, 1))
+ self.assertEqual(one.value_position(), (1, 7))
+ self.assertEqual(two.position(), (2, 1))
+ self.assertEqual(two.value_position(), (2, 7))
+ self.assertEqual(two.value_position(-1), (3, 14))
+ self.assertEqual(two.value_position(10), (3, 3))
+
+ # Bug 1399059 comment 18
+ def test_z(self):
+ self.parser.readContents(b'''\
+one = XYZ ABC
+''')
+ one, = list(self.parser)
+ self.assertEqual(one.val, 'XYZ ABC')
+
+ def test_white_space_stripping(self):
+ self._test('''\
+one = one
+two = two \n\
+three = three\xa0''', (
+ ('one', 'one'),
+ (Whitespace, '\n'),
+ ('two', 'two'),
+ (Whitespace, '\n'),
+ ('three', 'three\xa0'),
+ ))
+
+ def test_white_space_keys(self):
+ self._test('''\
+o\\ e = one
+t\fo = two \n\
+t\xa0e = three\xa0''', (
+ ('o\\ e', 'one'),
+ (Whitespace, '\n'),
+ ('t\fo', 'two'),
+ (Whitespace, '\n'),
+ ('t\xa0e', 'three\xa0'),
+ ))
+
+ def test_pre_comment(self):
+ self._test('''\
+# comment
+one = string
+
+# standalone
+
+# glued
+second = string
+''', (
+ ('one', 'string', 'comment'),
+ (Whitespace, '\n\n'),
+ (Comment, 'standalone'),
+ (Whitespace, '\n\n'),
+ ('second', 'string', 'glued'),
+ (Whitespace, '\n'),
+ ))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/python/compare-locales/compare_locales/tests/serializer/__init__.py b/third_party/python/compare-locales/compare_locales/tests/serializer/__init__.py
new file mode 100644
index 0000000000..9b85098b23
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/serializer/__init__.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from compare_locales.parser import getParser
+from compare_locales.serializer import serialize
+
+
+class Helper(object):
+ """Mixin to test serializers.
+
+ Reads the reference_content into self.reference, and uses
+ that to serialize in _test.
+ """
+ name = None
+ reference_content = None
+
+ def setUp(self):
+ p = self.parser = getParser(self.name)
+ p.readUnicode(self.reference_content)
+ self.reference = list(p.walk())
+
+ def _test(self, old_content, new_data, expected):
+ """Test with old content, new data, and the reference data
+ against the expected unicode output.
+ """
+ self.parser.readUnicode(old_content)
+ old_l10n = list(self.parser.walk())
+ output = serialize(self.name, self.reference, old_l10n, new_data)
+ self.assertMultiLineEqual(
+ output.decode(self.parser.encoding),
+ expected
+ )
diff --git a/third_party/python/compare-locales/compare_locales/tests/serializer/test_android.py b/third_party/python/compare-locales/compare_locales/tests/serializer/test_android.py
new file mode 100644
index 0000000000..b36f605e87
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/serializer/test_android.py
@@ -0,0 +1,218 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from . import Helper
+
+
+class TestAndroidSerializer(Helper, unittest.TestCase):
+ name = 'strings.xml'
+ reference_content = """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <!-- The page html title (i.e. the <title> tag content) -->
+ <string name="title">Unable to connect</string>
+ <string name="message"><![CDATA[
+ <ul>
+ <li>The site could be temporarily unavailable or too busy.</li>
+ </ul>
+ ]]></string>
+ <string name="wrapped_message">
+ <![CDATA[
+ <ul>
+ <li>The site could be temporarily unavailable or too busy.</li>
+ </ul>
+ ]]>
+ </string>
+</resources>
+"""
+
+ def test_nothing_new_or_old(self):
+ self._test(
+ "",
+ {},
+ """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ </resources>
+"""
+ )
+
+ def test_new_string(self):
+ self._test(
+ "",
+ {
+ "title": "Cannot connect"
+ },
+ """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <!-- The page html title (i.e. the <title> tag content) -->
+ <string name="title">Cannot connect</string>
+ </resources>
+"""
+ )
+
+ def test_new_cdata(self):
+ self._test(
+ "",
+ {
+ "message": """
+<ul>
+ <li>Something else</li>
+</ul>
+"""
+ },
+ """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <string name="message"><![CDATA[
+<ul>
+ <li>Something else</li>
+</ul>
+]]></string>
+ </resources>
+"""
+ )
+
+ def test_new_cdata_wrapped(self):
+ self._test(
+ "",
+ {
+ "wrapped_message": """
+<ul>
+ <li>Something else</li>
+</ul>
+"""
+ },
+ """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <string name="wrapped_message">
+ <![CDATA[
+<ul>
+ <li>Something else</li>
+</ul>
+]]>
+ </string>
+</resources>
+"""
+ )
+
+ def test_remove_string(self):
+ self._test(
+ """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <string name="first_old_title">Unable to connect</string>
+ <string name="title">Unable to connect</string>
+ <string name="last_old_title">Unable to connect</string>
+</resources>
+""",
+ {},
+ """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <string name="title">Unable to connect</string>
+ </resources>
+"""
+ )
+
+ def test_same_string(self):
+ self._test(
+ """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <string name="title">Unable to connect</string>
+</resources>
+""",
+ {
+ "title": "Unable to connect"
+ },
+ """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <!-- The page html title (i.e. the <title> tag content) -->
+ <string name="title">Unable to connect</string>
+ </resources>
+"""
+ )
+
+
+class TestAndroidDuplicateComment(Helper, unittest.TestCase):
+ name = 'strings.xml'
+ reference_content = """\
+<?xml version="1.0" encoding="utf-8"?>
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+ - License, v. 2.0. If a copy of the MPL was not distributed with this
+ - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
+<resources>
+ <!-- Label used in the contextmenu shown when long-pressing on a link -->
+ <string name="contextmenu_open_in_app">Open with app</string>
+ <!-- Label used in the contextmenu shown when long-pressing on a link -->
+ <string name="contextmenu_link_share">Share link</string>
+</resources>
+"""
+
+ def test_missing_translation(self):
+ self._test(
+ """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+
+ <!-- Label used in the contextmenu shown when long-pressing on a link -->
+ <!-- Label used in the contextmenu shown when long-pressing on a link -->
+ <string name="contextmenu_link_share"/>
+ </resources>
+""",
+ {
+ "contextmenu_link_share": "translation"
+ },
+ """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+
+ <!-- Label used in the contextmenu shown when long-pressing on a link -->
+ <string name="contextmenu_link_share">translation</string>
+ </resources>
+"""
+ )
+
+
+class TestAndroidTools(Helper, unittest.TestCase):
+ name = 'strings.xml'
+ reference_content = (
+ """\
+<resources xmlns:tools="http://schemas.android.com/tools">
+ <string name="app_tagline">Take your passwords everywhere.</string>
+ <string name="search_your_entries" tools:ignore="ExtraTranslation">"""
+ "search your entries"
+ """</string>
+</resources>
+""")
+
+ def test_namespaced_document(self):
+ self._test(
+ """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <string name="app_tagline">Localized tag line</string>
+ </resources>
+""",
+ {
+ "search_your_entries": "Looking for Entries"
+ },
+ (
+ """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:tools="http://schemas.android.com/tools">
+ <string name="app_tagline">Localized tag line</string>
+ <string name="search_your_entries" tools:ignore="ExtraTranslation">"""
+ "Looking for Entries"
+ """</string>
+</resources>
+""")
+ )
diff --git a/third_party/python/compare-locales/compare_locales/tests/serializer/test_fluent.py b/third_party/python/compare-locales/compare_locales/tests/serializer/test_fluent.py
new file mode 100644
index 0000000000..9aa9acd4f7
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/serializer/test_fluent.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from compare_locales.serializer import serialize
+from . import Helper
+
+
+class TestFluentSerializer(Helper, unittest.TestCase):
+ name = "foo.ftl"
+ reference_content = """\
+this = is English
+
+# another one bites
+another = message
+"""
+
+ def test_nothing_new_or_old(self):
+ output = serialize(self.name, self.reference, [], {})
+ self.assertMultiLineEqual(output.decode(self.parser.encoding), '\n\n')
+
+ def test_obsolete_old_string(self):
+ self._test(
+ """\
+# we used to have this
+old = stuff with comment
+""",
+ {},
+ """\
+
+
+""")
+
+ def test_nothing_old_new_translation(self):
+ self._test(
+ "",
+ {
+ "another": "another = localized message"
+ },
+ """\
+
+
+# another one bites
+another = localized message
+"""
+ )
+
+ def test_old_message_new_other_translation(self):
+ self._test(
+ """\
+this = is localized
+""",
+ {
+ "another": "another = localized message"
+ },
+ """\
+this = is localized
+
+# another one bites
+another = localized message
+"""
+ )
+
+ def test_old_message_new_same_translation(self):
+ self._test(
+ """\
+this = is localized
+""",
+ {
+ "this": "this = has a better message"
+ },
+ """\
+this = has a better message
+
+"""
+ )
diff --git a/third_party/python/compare-locales/compare_locales/tests/serializer/test_properties.py b/third_party/python/compare-locales/compare_locales/tests/serializer/test_properties.py
new file mode 100644
index 0000000000..50929fce73
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/serializer/test_properties.py
@@ -0,0 +1,106 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from compare_locales.serializer import serialize
+from . import Helper
+
+
+class TestPropertiesSerializer(Helper, unittest.TestCase):
+ name = 'foo.properties'
+ reference_content = """\
+this = is English
+
+# another one bites
+another = message
+"""
+
+ def test_nothing_new_or_old(self):
+ output = serialize(self.name, self.reference, [], {})
+ self.assertMultiLineEqual(output.decode(self.parser.encoding), '\n\n')
+
+ def test_obsolete_old_string(self):
+ self._test(
+ """\
+# we used to have this
+old = stuff with comment
+""",
+ {},
+ """\
+
+
+""")
+
+ def test_nothing_old_new_translation(self):
+ self._test(
+ "",
+ {
+ "another": "localized message"
+ },
+ """\
+
+
+# another one bites
+another = localized message
+"""
+ )
+
+ def test_old_message_new_other_translation(self):
+ self._test(
+ """\
+this = is localized
+""",
+ {
+ "another": "localized message"
+ },
+ """\
+this = is localized
+
+# another one bites
+another = localized message
+"""
+ )
+
+ def test_old_message_new_same_translation(self):
+ self._test(
+ """\
+this = is localized
+""",
+ {
+ "this": "has a better message"
+ },
+ """\
+this = has a better message
+
+"""
+ )
+
+
+class TestPropertiesDuplicateComment(Helper, unittest.TestCase):
+ name = 'foo.properties'
+ reference_content = """\
+# repetitive
+one = one
+# repetitive
+two = two
+"""
+
+ def test_missing_translation(self):
+ self._test(
+ """\
+# repetitive
+
+# repetitive
+two = two
+""",
+ {},
+ """\
+# repetitive
+
+# repetitive
+two = two
+"""
+ )
diff --git a/third_party/python/compare-locales/compare_locales/tests/test_apps.py b/third_party/python/compare-locales/compare_locales/tests/test_apps.py
new file mode 100644
index 0000000000..3fc5091fe5
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/test_apps.py
@@ -0,0 +1,168 @@
+from __future__ import absolute_import
+import unittest
+import os
+import tempfile
+import shutil
+
+from compare_locales import mozpath
+from compare_locales.paths import (
+ EnumerateApp,
+ EnumerateSourceTreeApp,
+ ProjectFiles,
+)
+
+MAIL_INI = '''\
+[general]
+depth = ../..
+all = mail/locales/all-locales
+
+[compare]
+dirs = mail
+
+[includes]
+# non-central apps might want to use %(topsrcdir)s here, or other vars
+# RFE: that needs to be supported by compare-locales, too, though
+toolkit = mozilla/toolkit/locales/l10n.ini
+
+[include_toolkit]
+type = hg
+mozilla = mozilla-central
+repo = http://hg.mozilla.org/
+l10n.ini = toolkit/locales/l10n.ini
+'''
+
+
+MAIL_ALL_LOCALES = '''af
+de
+fr
+'''
+
+MAIL_FILTER_PY = '''
+def test(mod, path, entity = None):
+ if mod == 'toolkit' and path == 'ignored_path':
+ return 'ignore'
+ return 'error'
+'''
+
+TOOLKIT_INI = '''[general]
+depth = ../..
+
+[compare]
+dirs = toolkit
+'''
+
+
+class TestApp(unittest.TestCase):
+ def setUp(self):
+ self.stage = tempfile.mkdtemp()
+ mail = mozpath.join(self.stage, 'comm', 'mail', 'locales')
+ toolkit = mozpath.join(
+ self.stage, 'comm', 'mozilla', 'toolkit', 'locales')
+ l10n = mozpath.join(self.stage, 'l10n-central', 'de', 'toolkit')
+ os.makedirs(mozpath.join(mail, 'en-US'))
+ os.makedirs(mozpath.join(toolkit, 'en-US'))
+ os.makedirs(l10n)
+ with open(mozpath.join(mail, 'l10n.ini'), 'w') as f:
+ f.write(MAIL_INI)
+ with open(mozpath.join(mail, 'all-locales'), 'w') as f:
+ f.write(MAIL_ALL_LOCALES)
+ with open(mozpath.join(mail, 'filter.py'), 'w') as f:
+ f.write(MAIL_FILTER_PY)
+ with open(mozpath.join(toolkit, 'l10n.ini'), 'w') as f:
+ f.write(TOOLKIT_INI)
+ with open(mozpath.join(mail, 'en-US', 'mail.ftl'), 'w') as f:
+ f.write('')
+ with open(mozpath.join(toolkit, 'en-US', 'platform.ftl'), 'w') as f:
+ f.write('')
+ with open(mozpath.join(l10n, 'localized.ftl'), 'w') as f:
+ f.write('')
+
+ def tearDown(self):
+ shutil.rmtree(self.stage)
+
+ def test_app(self):
+ 'Test parsing a App'
+ app = EnumerateApp(
+ mozpath.join(self.stage, 'comm', 'mail', 'locales', 'l10n.ini'),
+ mozpath.join(self.stage, 'l10n-central'))
+ self.assertListEqual(app.config.allLocales(), ['af', 'de', 'fr'])
+ self.assertEqual(len(app.config.children), 1)
+ projectconfig = app.asConfig()
+ self.assertListEqual(projectconfig.locales, ['af', 'de', 'fr'])
+ files = ProjectFiles('de', [projectconfig])
+ files = list(files)
+ self.assertEqual(len(files), 3)
+
+ l10nfile, reffile, mergefile, test = files[0]
+ self.assertListEqual(mozpath.split(l10nfile)[-3:],
+ ['de', 'mail', 'mail.ftl'])
+ self.assertListEqual(mozpath.split(reffile)[-4:],
+ ['mail', 'locales', 'en-US', 'mail.ftl'])
+ self.assertIsNone(mergefile)
+ self.assertSetEqual(test, set())
+
+ l10nfile, reffile, mergefile, test = files[1]
+ self.assertListEqual(mozpath.split(l10nfile)[-3:],
+ ['de', 'toolkit', 'localized.ftl'])
+ self.assertListEqual(
+ mozpath.split(reffile)[-6:],
+ ['comm', 'mozilla', 'toolkit',
+ 'locales', 'en-US', 'localized.ftl'])
+ self.assertIsNone(mergefile)
+ self.assertSetEqual(test, set())
+
+ l10nfile, reffile, mergefile, test = files[2]
+ self.assertListEqual(mozpath.split(l10nfile)[-3:],
+ ['de', 'toolkit', 'platform.ftl'])
+ self.assertListEqual(
+ mozpath.split(reffile)[-6:],
+ ['comm', 'mozilla', 'toolkit', 'locales', 'en-US', 'platform.ftl'])
+ self.assertIsNone(mergefile)
+ self.assertSetEqual(test, set())
+
+ def test_src_app(self):
+ 'Test parsing a App in source setup'
+ # move toolkit to toplevel
+ shutil.move(mozpath.join(self.stage, 'comm', 'mozilla'), self.stage)
+ app = EnumerateSourceTreeApp(
+ mozpath.join(self.stage, 'comm', 'mail', 'locales', 'l10n.ini'),
+ self.stage,
+ mozpath.join(self.stage, 'l10n-central'),
+ {
+ 'mozilla-central': mozpath.join(self.stage, 'mozilla')
+ }
+ )
+ self.assertListEqual(app.config.allLocales(), ['af', 'de', 'fr'])
+ self.assertEqual(len(app.config.children), 1)
+ projectconfig = app.asConfig()
+ self.assertListEqual(projectconfig.locales, ['af', 'de', 'fr'])
+ files = ProjectFiles('de', [projectconfig])
+ files = list(files)
+ self.assertEqual(len(files), 3)
+
+ l10nfile, reffile, mergefile, test = files[0]
+ self.assertListEqual(mozpath.split(l10nfile)[-3:],
+ ['de', 'mail', 'mail.ftl'])
+ self.assertListEqual(mozpath.split(reffile)[-4:],
+ ['mail', 'locales', 'en-US', 'mail.ftl'])
+ self.assertIsNone(mergefile)
+ self.assertSetEqual(test, set())
+
+ l10nfile, reffile, mergefile, test = files[1]
+ self.assertListEqual(mozpath.split(l10nfile)[-3:],
+ ['de', 'toolkit', 'localized.ftl'])
+ self.assertListEqual(
+ mozpath.split(reffile)[-5:],
+ ['mozilla', 'toolkit',
+ 'locales', 'en-US', 'localized.ftl'])
+ self.assertIsNone(mergefile)
+ self.assertSetEqual(test, set())
+
+ l10nfile, reffile, mergefile, test = files[2]
+ self.assertListEqual(mozpath.split(l10nfile)[-3:],
+ ['de', 'toolkit', 'platform.ftl'])
+ self.assertListEqual(
+ mozpath.split(reffile)[-5:],
+ ['mozilla', 'toolkit', 'locales', 'en-US', 'platform.ftl'])
+ self.assertIsNone(mergefile)
+ self.assertSetEqual(test, set())
diff --git a/third_party/python/compare-locales/compare_locales/tests/test_checks.py b/third_party/python/compare-locales/compare_locales/tests/test_checks.py
new file mode 100644
index 0000000000..193ac60c6b
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/test_checks.py
@@ -0,0 +1,89 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+import unittest
+
+from compare_locales.checks.base import CSSCheckMixin
+
+
+class CSSParserTest(unittest.TestCase):
+ def setUp(self):
+ self.mixin = CSSCheckMixin()
+
+ def test_other(self):
+ refMap, errors = self.mixin.parse_css_spec('foo')
+ self.assertIsNone(refMap)
+ self.assertIsNone(errors)
+
+ def test_css_specs(self):
+ for prop in (
+ 'min-width', 'width', 'max-width',
+ 'min-height', 'height', 'max-height',
+ ):
+ refMap, errors = self.mixin.parse_css_spec('{}:1px;'.format(prop))
+ self.assertDictEqual(
+ refMap, {prop: 'px'}
+ )
+ self.assertIsNone(errors)
+
+ def test_single_whitespace(self):
+ refMap, errors = self.mixin.parse_css_spec('width:15px;')
+ self.assertDictEqual(
+ refMap, {'width': 'px'}
+ )
+ self.assertIsNone(errors)
+ refMap, errors = self.mixin.parse_css_spec('width : \t 15px ; ')
+ self.assertDictEqual(
+ refMap, {'width': 'px'}
+ )
+ self.assertIsNone(errors)
+ refMap, errors = self.mixin.parse_css_spec('width: 15px')
+ self.assertDictEqual(
+ refMap, {'width': 'px'}
+ )
+ self.assertIsNone(errors)
+
+ def test_multiple(self):
+ refMap, errors = self.mixin.parse_css_spec('width:15px;height:20.2em;')
+ self.assertDictEqual(
+ refMap, {'height': 'em', 'width': 'px'}
+ )
+ self.assertIsNone(errors)
+ refMap, errors = self.mixin.parse_css_spec(
+ 'width:15px \t\t; height:20em'
+ )
+ self.assertDictEqual(
+ refMap, {'height': 'em', 'width': 'px'}
+ )
+ self.assertIsNone(errors)
+
+ def test_errors(self):
+ refMap, errors = self.mixin.parse_css_spec('width:15pxfoo')
+ self.assertDictEqual(
+ refMap, {'width': 'px'}
+ )
+ self.assertListEqual(
+ errors, [{'pos': 10, 'code': 'css-bad-content'}]
+ )
+ refMap, errors = self.mixin.parse_css_spec('width:15px height:20em')
+ self.assertDictEqual(
+ refMap, {'height': 'em', 'width': 'px'}
+ )
+ self.assertListEqual(
+ errors, [{'pos': 10, 'code': 'css-missing-semicolon'}]
+ )
+ refMap, errors = self.mixin.parse_css_spec('witdth:15px')
+ self.assertIsNone(refMap)
+ self.assertIsNone(errors)
+ refMap, errors = self.mixin.parse_css_spec('width:1,5px')
+ self.assertIsNone(refMap)
+ self.assertIsNone(errors)
+ refMap, errors = self.mixin.parse_css_spec('width:1.5.1px')
+ self.assertIsNone(refMap)
+ self.assertIsNone(errors)
+ refMap, errors = self.mixin.parse_css_spec('width:1.px')
+ self.assertIsNone(refMap)
+ self.assertIsNone(errors)
diff --git a/third_party/python/compare-locales/compare_locales/tests/test_compare.py b/third_party/python/compare-locales/compare_locales/tests/test_compare.py
new file mode 100644
index 0000000000..acc47cff68
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/test_compare.py
@@ -0,0 +1,229 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+import unittest
+
+from compare_locales import compare, paths
+
+
+class TestTree(unittest.TestCase):
+ '''Test the Tree utility class
+
+ Tree value classes need to be in-place editable
+ '''
+
+ def test_empty_dict(self):
+ tree = compare.Tree(dict)
+ self.assertEqual(list(tree.getContent()), [])
+ self.assertDictEqual(
+ tree.toJSON(),
+ {}
+ )
+
+ def test_disjoint_dict(self):
+ tree = compare.Tree(dict)
+ tree['one/entry']['leaf'] = 1
+ tree['two/other']['leaf'] = 2
+ self.assertEqual(
+ list(tree.getContent()),
+ [
+ (0, 'key', ('one', 'entry')),
+ (1, 'value', {'leaf': 1}),
+ (0, 'key', ('two', 'other')),
+ (1, 'value', {'leaf': 2})
+ ]
+ )
+ self.assertDictEqual(
+ tree.toJSON(),
+ {
+ 'one/entry':
+ {'leaf': 1},
+ 'two/other':
+ {'leaf': 2}
+ }
+ )
+ self.assertMultiLineEqual(
+ str(tree),
+ '''\
+one/entry
+ {'leaf': 1}
+two/other
+ {'leaf': 2}\
+'''
+ )
+
+ def test_overlapping_dict(self):
+ tree = compare.Tree(dict)
+ tree['one/entry']['leaf'] = 1
+ tree['one/other']['leaf'] = 2
+ self.assertEqual(
+ list(tree.getContent()),
+ [
+ (0, 'key', ('one',)),
+ (1, 'key', ('entry',)),
+ (2, 'value', {'leaf': 1}),
+ (1, 'key', ('other',)),
+ (2, 'value', {'leaf': 2})
+ ]
+ )
+ self.assertDictEqual(
+ tree.toJSON(),
+ {
+ 'one': {
+ 'entry':
+ {'leaf': 1},
+ 'other':
+ {'leaf': 2}
+ }
+ }
+ )
+
+
+class TestObserver(unittest.TestCase):
+ def test_simple(self):
+ obs = compare.Observer()
+ f = paths.File('/some/real/sub/path', 'de/sub/path', locale='de')
+ obs.notify('missingEntity', f, 'one')
+ obs.notify('missingEntity', f, 'two')
+ obs.updateStats(f, {'missing': 15})
+ self.assertDictEqual(obs.toJSON(), {
+ 'summary': {
+ 'de': {
+ 'errors': 0,
+ 'warnings': 0,
+ 'missing': 15,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 0,
+ 'changed_w': 0,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }
+ },
+ 'details': {
+ 'de/sub/path':
+ [{'missingEntity': 'one'},
+ {'missingEntity': 'two'}]
+ }
+ })
+
+ def test_module(self):
+ obs = compare.Observer()
+ f = paths.File('/some/real/sub/path', 'path',
+ module='sub', locale='de')
+ obs.notify('missingEntity', f, 'one')
+ obs.notify('obsoleteEntity', f, 'bar')
+ obs.notify('missingEntity', f, 'two')
+ obs.updateStats(f, {'missing': 15})
+ self.assertDictEqual(obs.toJSON(), {
+ 'summary': {
+ 'de': {
+ 'errors': 0,
+ 'warnings': 0,
+ 'missing': 15,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 0,
+ 'changed_w': 0,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }
+ },
+ 'details': {
+ 'de/sub/path':
+ [
+ {'missingEntity': 'one'},
+ {'obsoleteEntity': 'bar'},
+ {'missingEntity': 'two'},
+ ]
+ }
+ })
+
+
+class TestAddRemove(unittest.TestCase):
+
+ def _test(self, left, right, ref_actions):
+ ar = compare.AddRemove()
+ ar.set_left(left)
+ ar.set_right(right)
+ actions = list(ar)
+ self.assertListEqual(actions, ref_actions)
+
+ def test_equal(self):
+ self._test(['z', 'a', 'p'], ['z', 'a', 'p'], [
+ ('equal', 'z'),
+ ('equal', 'a'),
+ ('equal', 'p'),
+ ])
+
+ def test_add_start(self):
+ self._test(['a', 'p'], ['z', 'a', 'p'], [
+ ('add', 'z'),
+ ('equal', 'a'),
+ ('equal', 'p'),
+ ])
+
+ def test_add_middle(self):
+ self._test(['z', 'p'], ['z', 'a', 'p'], [
+ ('equal', 'z'),
+ ('add', 'a'),
+ ('equal', 'p'),
+ ])
+
+ def test_add_end(self):
+ self._test(['z', 'a'], ['z', 'a', 'p'], [
+ ('equal', 'z'),
+ ('equal', 'a'),
+ ('add', 'p'),
+ ])
+
+ def test_delete_start(self):
+ self._test(['z', 'a', 'p'], ['a', 'p'], [
+ ('delete', 'z'),
+ ('equal', 'a'),
+ ('equal', 'p'),
+ ])
+
+ def test_delete_middle(self):
+ self._test(['z', 'a', 'p'], ['z', 'p'], [
+ ('equal', 'z'),
+ ('delete', 'a'),
+ ('equal', 'p'),
+ ])
+
+ def test_delete_end(self):
+ self._test(['z', 'a', 'p'], ['z', 'a'], [
+ ('equal', 'z'),
+ ('equal', 'a'),
+ ('delete', 'p'),
+ ])
+
+ def test_replace_start(self):
+ self._test(['b', 'a', 'p'], ['z', 'a', 'p'], [
+ ('add', 'z'),
+ ('delete', 'b'),
+ ('equal', 'a'),
+ ('equal', 'p'),
+ ])
+
+ def test_replace_middle(self):
+ self._test(['z', 'b', 'p'], ['z', 'a', 'p'], [
+ ('equal', 'z'),
+ ('add', 'a'),
+ ('delete', 'b'),
+ ('equal', 'p'),
+ ])
+
+ def test_replace_end(self):
+ self._test(['z', 'a', 'b'], ['z', 'a', 'p'], [
+ ('equal', 'z'),
+ ('equal', 'a'),
+ ('add', 'p'),
+ ('delete', 'b'),
+ ])
diff --git a/third_party/python/compare-locales/compare_locales/tests/test_defines.py b/third_party/python/compare-locales/compare_locales/tests/test_defines.py
new file mode 100644
index 0000000000..6f903d82d5
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/test_defines.py
@@ -0,0 +1,251 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import unittest
+
+from compare_locales.tests import ParserTestMixin, BaseHelper
+from compare_locales.paths import File
+from compare_locales.parser import (
+ Comment,
+ DefinesInstruction,
+ Junk,
+ Whitespace,
+)
+
+
+mpl2 = '''\
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.'''
+
+
+class TestDefinesParser(ParserTestMixin, unittest.TestCase):
+
+ filename = 'defines.inc'
+
+ def testBrowser(self):
+ self._test(mpl2 + '''
+#filter emptyLines
+
+#define MOZ_LANGPACK_CREATOR mozilla.org
+
+# If non-English locales wish to credit multiple contributors, uncomment this
+# variable definition and use the format specified.
+# #define MOZ_LANGPACK_CONTRIBUTORS <em:contributor>Joe Solon</em:contributor>
+
+#unfilter emptyLines
+
+''', (
+ (Comment, mpl2),
+ (Whitespace, '\n'),
+ (DefinesInstruction, 'filter emptyLines'),
+ (Whitespace, '\n\n'),
+ ('MOZ_LANGPACK_CREATOR', 'mozilla.org'),
+ (Whitespace, '\n\n'),
+ (Comment, '#define'),
+ (Whitespace, '\n\n'),
+ (DefinesInstruction, 'unfilter emptyLines'),
+ (Junk, '\n\n')))
+
+ def testBrowserWithContributors(self):
+ self._test(mpl2 + '''
+#filter emptyLines
+
+#define MOZ_LANGPACK_CREATOR mozilla.org
+
+# If non-English locales wish to credit multiple contributors, uncomment this
+# variable definition and use the format specified.
+#define MOZ_LANGPACK_CONTRIBUTORS <em:contributor>Joe Solon</em:contributor>
+
+#unfilter emptyLines
+
+''', (
+ (Comment, mpl2),
+ (Whitespace, '\n'),
+ (DefinesInstruction, 'filter emptyLines'),
+ (Whitespace, '\n\n'),
+ ('MOZ_LANGPACK_CREATOR', 'mozilla.org'),
+ (Whitespace, '\n\n'),
+ (
+ 'MOZ_LANGPACK_CONTRIBUTORS',
+ '<em:contributor>Joe Solon</em:contributor>',
+ 'non-English',
+ ),
+ (Whitespace, '\n\n'),
+ (DefinesInstruction, 'unfilter emptyLines'),
+ (Junk, '\n\n')))
+
+ def testCommentWithNonAsciiCharacters(self):
+ self._test(mpl2 + '''
+#filter emptyLines
+
+# e.g. #define seamonkey_l10n <DT><A HREF="urn:foo">SeaMonkey v češtině</a>
+#define seamonkey_l10n_long
+
+#unfilter emptyLines
+
+''', (
+ (Comment, mpl2),
+ (Whitespace, '\n'),
+ (DefinesInstruction, 'filter emptyLines'),
+ (Whitespace, '\n\n'),
+ ('seamonkey_l10n_long', '', 'češtině'),
+ (Whitespace, '\n\n'),
+ (DefinesInstruction, 'unfilter emptyLines'),
+ (Junk, '\n\n')))
+
+ def test_no_empty_lines(self):
+ self._test('''#define MOZ_LANGPACK_CREATOR mozilla.org
+#define MOZ_LANGPACK_CREATOR mozilla.org
+''', (
+ ('MOZ_LANGPACK_CREATOR', 'mozilla.org'),
+ (Whitespace, '\n'),
+ ('MOZ_LANGPACK_CREATOR', 'mozilla.org'),
+ (Whitespace, '\n')))
+
+ def test_empty_line_between(self):
+ self._test('''#define MOZ_LANGPACK_CREATOR mozilla.org
+
+#define MOZ_LANGPACK_CREATOR mozilla.org
+''', (
+ ('MOZ_LANGPACK_CREATOR', 'mozilla.org'),
+ (Junk, '\n'),
+ ('MOZ_LANGPACK_CREATOR', 'mozilla.org'),
+ (Whitespace, '\n')))
+
+ def test_empty_line_at_the_beginning(self):
+ self._test('''
+#define MOZ_LANGPACK_CREATOR mozilla.org
+#define MOZ_LANGPACK_CREATOR mozilla.org
+''', (
+ (Junk, '\n'),
+ ('MOZ_LANGPACK_CREATOR', 'mozilla.org'),
+ (Whitespace, '\n'),
+ ('MOZ_LANGPACK_CREATOR', 'mozilla.org'),
+ (Whitespace, '\n')))
+
+ def test_filter_empty_lines(self):
+ self._test('''#filter emptyLines
+
+#define MOZ_LANGPACK_CREATOR mozilla.org
+#define MOZ_LANGPACK_CREATOR mozilla.org
+#unfilter emptyLines''', (
+ (DefinesInstruction, 'filter emptyLines'),
+ (Whitespace, '\n\n'),
+ ('MOZ_LANGPACK_CREATOR', 'mozilla.org'),
+ (Whitespace, '\n'),
+ ('MOZ_LANGPACK_CREATOR', 'mozilla.org'),
+ (Whitespace, '\n'),
+ (DefinesInstruction, 'unfilter emptyLines')))
+
+ def test_unfilter_empty_lines_with_trailing(self):
+ self._test('''#filter emptyLines
+
+#define MOZ_LANGPACK_CREATOR mozilla.org
+#define MOZ_LANGPACK_CREATOR mozilla.org
+#unfilter emptyLines
+''', (
+ (DefinesInstruction, 'filter emptyLines'),
+ (Whitespace, '\n\n'),
+ ('MOZ_LANGPACK_CREATOR', 'mozilla.org'),
+ (Whitespace, '\n'),
+ ('MOZ_LANGPACK_CREATOR', 'mozilla.org'),
+ (Whitespace, '\n'),
+ (DefinesInstruction, 'unfilter emptyLines'),
+ (Whitespace, '\n')))
+
+ def testToolkit(self):
+ self._test('''#define MOZ_LANG_TITLE English (US)
+''', (
+ ('MOZ_LANG_TITLE', 'English (US)'),
+ (Whitespace, '\n')))
+
+ def testToolkitEmpty(self):
+ self._test('', tuple())
+
+ def test_empty_file(self):
+ '''Test that empty files generate errors
+
+ defines.inc are interesting that way, as their
+ content is added to the generated file.
+ '''
+ self._test('\n', ((Junk, '\n'),))
+ self._test('\n\n', ((Junk, '\n\n'),))
+ self._test(' \n\n', ((Junk, ' \n\n'),))
+
+ def test_whitespace_value(self):
+ '''Test that there's only one whitespace between key and value
+ '''
+ # funny formatting of trailing whitespace to make it explicit
+ # and flake-8 happy
+ self._test('''\
+#define one \n\
+#define two \n\
+#define tre \n\
+''', (
+ ('one', ''),
+ (Whitespace, '\n'),
+ ('two', ' '),
+ (Whitespace, '\n'),
+ ('tre', ' '),
+ (Whitespace, '\n'),))
+
+ def test_standalone_comments(self):
+ self._test(
+ '''\
+#filter emptyLines
+# One comment
+
+# Second comment
+
+#define foo
+# bar comment
+#define bar
+
+#unfilter emptyLines
+''',
+ (
+ (DefinesInstruction, 'filter emptyLines'),
+ (Whitespace, '\n'),
+ (Comment, 'One comment'),
+ (Whitespace, '\n\n'),
+ (Comment, 'Second comment'),
+ (Whitespace, '\n\n'),
+ ('foo', ''),
+ (Whitespace, '\n'),
+ ('bar', '', 'bar comment'),
+ (Whitespace, '\n\n'),
+ (DefinesInstruction, 'unfilter emptyLines'),
+ (Whitespace, '\n'),
+ )
+ )
+
+
+class TestChecks(BaseHelper):
+ file = File('defines.inc', 'defines.inc')
+ refContent = b'''\
+#define foo bar
+'''
+
+ def test_ok(self):
+ self._test(
+ b'#define foo other',
+ tuple()
+ )
+
+ def test_bad_encoding(self):
+ self._test(
+ '#define foo touché'.encode('latin-1'),
+ (
+ (
+ "warning",
+ 17,
+ "\ufffd in: foo",
+ "encodings"
+ ),
+ )
+ )
diff --git a/third_party/python/compare-locales/compare_locales/tests/test_ini.py b/third_party/python/compare-locales/compare_locales/tests/test_ini.py
new file mode 100644
index 0000000000..e5d68482c1
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/test_ini.py
@@ -0,0 +1,223 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import unittest
+
+from compare_locales.tests import ParserTestMixin, BaseHelper
+from compare_locales.paths import File
+from compare_locales.parser import (
+ Comment,
+ IniSection,
+ Junk,
+ Whitespace,
+)
+
+
+mpl2 = '''\
+; This Source Code Form is subject to the terms of the Mozilla Public
+; License, v. 2.0. If a copy of the MPL was not distributed with this file,
+; You can obtain one at http://mozilla.org/MPL/2.0/.'''
+
+
+class TestIniParser(ParserTestMixin, unittest.TestCase):
+
+ filename = 'foo.ini'
+
+ def testSimpleHeader(self):
+ self._test('''; This file is in the UTF-8 encoding
+[Strings]
+TitleText=Some Title
+''', (
+ (Comment, 'UTF-8 encoding'),
+ (Whitespace, '\n'),
+ (IniSection, 'Strings'),
+ (Whitespace, '\n'),
+ ('TitleText', 'Some Title'),
+ (Whitespace, '\n')))
+
+ def testMPL2_Space_UTF(self):
+ self._test(mpl2 + '''
+
+; This file is in the UTF-8 encoding
+[Strings]
+TitleText=Some Title
+''', (
+ (Comment, mpl2),
+ (Whitespace, '\n\n'),
+ (Comment, 'UTF-8'),
+ (Whitespace, '\n'),
+ (IniSection, 'Strings'),
+ (Whitespace, '\n'),
+ ('TitleText', 'Some Title'),
+ (Whitespace, '\n')))
+
+ def testMPL2_Space(self):
+ self._test(mpl2 + '''
+
+[Strings]
+TitleText=Some Title
+''', (
+ (Comment, mpl2),
+ (Whitespace, '\n\n'),
+ (IniSection, 'Strings'),
+ (Whitespace, '\n'),
+ ('TitleText', 'Some Title'),
+ (Whitespace, '\n')))
+
+ def testMPL2_no_space(self):
+ self._test(mpl2 + '''
+[Strings]
+TitleText=Some Title
+''', (
+ (Comment, mpl2),
+ (Whitespace, '\n'),
+ (IniSection, 'Strings'),
+ (Whitespace, '\n'),
+ ('TitleText', 'Some Title'),
+ (Whitespace, '\n')))
+
+ def testMPL2_MultiSpace(self):
+ self._test(mpl2 + '''
+
+; more comments
+
+[Strings]
+TitleText=Some Title
+''', (
+ (Comment, mpl2),
+ (Whitespace, '\n\n'),
+ (Comment, 'more comments'),
+ (Whitespace, '\n\n'),
+ (IniSection, 'Strings'),
+ (Whitespace, '\n'),
+ ('TitleText', 'Some Title'),
+ (Whitespace, '\n')))
+
+ def testMPL2_JunkBeforeCategory(self):
+ self._test(mpl2 + '''
+Junk
+[Strings]
+TitleText=Some Title
+''', (
+ (Comment, mpl2),
+ (Whitespace, '\n'),
+ (Junk, 'Junk\n'),
+ (IniSection, 'Strings'),
+ (Whitespace, '\n'),
+ ('TitleText', 'Some Title'),
+ (Whitespace, '\n')))
+
+ def test_TrailingComment(self):
+ self._test(mpl2 + '''
+
+[Strings]
+TitleText=Some Title
+;Stray trailing comment
+''', (
+ (Comment, mpl2),
+ (Whitespace, '\n\n'),
+ (IniSection, 'Strings'),
+ (Whitespace, '\n'),
+ ('TitleText', 'Some Title'),
+ (Whitespace, '\n'),
+ (Comment, 'Stray trailing'),
+ (Whitespace, '\n')))
+
+ def test_SpacedTrailingComments(self):
+ self._test(mpl2 + '''
+
+[Strings]
+TitleText=Some Title
+
+;Stray trailing comment
+;Second stray comment
+
+''', (
+ (Comment, mpl2),
+ (Whitespace, '\n\n'),
+ (IniSection, 'Strings'),
+ (Whitespace, '\n'),
+ ('TitleText', 'Some Title'),
+ (Whitespace, '\n\n'),
+ (Comment, 'Second stray comment'),
+ (Whitespace, '\n\n')))
+
+ def test_TrailingCommentsAndJunk(self):
+ self._test(mpl2 + '''
+
+[Strings]
+TitleText=Some Title
+
+;Stray trailing comment
+Junk
+;Second stray comment
+
+''', (
+ (Comment, mpl2),
+ (Whitespace, '\n\n'),
+ (IniSection, 'Strings'),
+ (Whitespace, '\n'),
+ ('TitleText', 'Some Title'),
+ (Whitespace, '\n\n'),
+ (Comment, 'Stray trailing'),
+ (Whitespace, '\n'),
+ (Junk, 'Junk\n'),
+ (Comment, 'Second stray comment'),
+ (Whitespace, '\n\n')))
+
+ def test_JunkInbetweenEntries(self):
+ self._test(mpl2 + '''
+
+[Strings]
+TitleText=Some Title
+
+Junk
+
+Good=other string
+''', (
+ (Comment, mpl2),
+ (Whitespace, '\n\n'),
+ (IniSection, 'Strings'),
+ (Whitespace, '\n'),
+ ('TitleText', 'Some Title'),
+ (Whitespace, '\n\n'),
+ (Junk, 'Junk\n\n'),
+ ('Good', 'other string'),
+ (Whitespace, '\n')))
+
+ def test_empty_file(self):
+ self._test('', tuple())
+ self._test('\n', ((Whitespace, '\n'),))
+ self._test('\n\n', ((Whitespace, '\n\n'),))
+ self._test(' \n\n', ((Whitespace, ' \n\n'),))
+
+
+class TestChecks(BaseHelper):
+ file = File('foo.ini', 'foo.ini')
+ refContent = b'''\
+[Strings]
+foo=good
+'''
+
+ def test_ok(self):
+ self._test(
+ b'[Strings]\nfoo=other',
+ tuple()
+ )
+
+ def test_bad_encoding(self):
+ self._test(
+ 'foo=touché'.encode('latin-1'),
+ (
+ (
+ "warning",
+ 9,
+ "\ufffd in: foo",
+ "encodings"
+ ),
+ )
+ )
diff --git a/third_party/python/compare-locales/compare_locales/tests/test_keyedtuple.py b/third_party/python/compare-locales/compare_locales/tests/test_keyedtuple.py
new file mode 100644
index 0000000000..156a8e868c
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/test_keyedtuple.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+from collections import namedtuple
+import unittest
+
+from compare_locales.keyedtuple import KeyedTuple
+
+
+KeyedThing = namedtuple('KeyedThing', ['key', 'val'])
+
+
+class TestKeyedTuple(unittest.TestCase):
+ def test_constructor(self):
+ keyedtuple = KeyedTuple([])
+ self.assertEqual(keyedtuple, tuple())
+
+ def test_contains(self):
+ things = [KeyedThing('one', 'thing'), KeyedThing('two', 'things')]
+ keyedtuple = KeyedTuple(things)
+ self.assertNotIn(1, keyedtuple)
+ self.assertIn('one', keyedtuple)
+ self.assertIn(things[0], keyedtuple)
+ self.assertIn(things[1], keyedtuple)
+ self.assertNotIn(KeyedThing('three', 'stooges'), keyedtuple)
+
+ def test_getitem(self):
+ things = [KeyedThing('one', 'thing'), KeyedThing('two', 'things')]
+ keyedtuple = KeyedTuple(things)
+ self.assertEqual(keyedtuple[0], things[0])
+ self.assertEqual(keyedtuple[1], things[1])
+ self.assertEqual(keyedtuple['one'], things[0])
+ self.assertEqual(keyedtuple['two'], things[1])
+
+ def test_items(self):
+ things = [KeyedThing('one', 'thing'), KeyedThing('two', 'things')]
+ things.extend(things)
+ keyedtuple = KeyedTuple(things)
+ self.assertEqual(len(keyedtuple), 4)
+ items = list(keyedtuple.items())
+ self.assertEqual(len(items), 4)
+ self.assertEqual(
+ keyedtuple,
+ tuple((v for k, v in items))
+ )
+ self.assertEqual(
+ ('one', 'two', 'one', 'two',),
+ tuple((k for k, v in items))
+ )
diff --git a/third_party/python/compare-locales/compare_locales/tests/test_merge.py b/third_party/python/compare-locales/compare_locales/tests/test_merge.py
new file mode 100644
index 0000000000..a10a04ca16
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/test_merge.py
@@ -0,0 +1,1408 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+import unittest
+import filecmp
+import os
+from tempfile import mkdtemp
+import shutil
+
+from compare_locales.parser import getParser
+from compare_locales.paths import File
+from compare_locales.compare.content import ContentComparer
+from compare_locales.compare.observer import Observer
+from compare_locales import mozpath
+
+
+class ContentMixin(object):
+ extension = None # OVERLOAD
+
+ @property
+ def ref(self):
+ return mozpath.join(self.tmp, "en-reference" + self.extension)
+
+ @property
+ def l10n(self):
+ return mozpath.join(self.tmp, "l10n" + self.extension)
+
+ def reference(self, content):
+ with open(self.ref, "w") as f:
+ f.write(content)
+
+ def localized(self, content):
+ with open(self.l10n, "w") as f:
+ f.write(content)
+
+
+class TestNonSupported(unittest.TestCase, ContentMixin):
+ extension = '.js'
+
+ def setUp(self):
+ self.maxDiff = None
+ self.tmp = mkdtemp()
+ os.mkdir(mozpath.join(self.tmp, "merge"))
+
+ def tearDown(self):
+ shutil.rmtree(self.tmp)
+ del self.tmp
+
+ def test_good(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""foo = 'fooVal';""")
+ self.localized("""foo = 'lfoo';""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.js", ""),
+ File(self.l10n, "l10n.js", ""),
+ mozpath.join(self.tmp, "merge", "l10n.js"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary': {},
+ 'details': {}
+ }
+ )
+ self.assertTrue(filecmp.cmp(
+ self.l10n,
+ mozpath.join(self.tmp, "merge", 'l10n.js'))
+ )
+
+ def test_missing(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""foo = 'fooVal';""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.add(File(self.ref, "en-reference.js", ""),
+ File(self.l10n, "l10n.js", ""),
+ mozpath.join(self.tmp, "merge", "l10n.js"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary': {},
+ 'details': {'l10n.js': [{'missingFile': 'error'}]}
+ }
+ )
+ self.assertTrue(filecmp.cmp(
+ self.ref,
+ mozpath.join(self.tmp, "merge", 'l10n.js'))
+ )
+
+ def test_missing_ignored(self):
+
+ def ignore(*args):
+ return 'ignore'
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""foo = 'fooVal';""")
+ cc = ContentComparer()
+ cc.observers.append(Observer(filter=ignore))
+ cc.add(File(self.ref, "en-reference.js", ""),
+ File(self.l10n, "l10n.js", ""),
+ mozpath.join(self.tmp, "merge", "l10n.js"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary': {},
+ 'details': {}
+ }
+ )
+ self.assertTrue(filecmp.cmp(
+ self.ref,
+ mozpath.join(self.tmp, "merge", 'l10n.js'))
+ )
+
+
+class TestDefines(unittest.TestCase, ContentMixin):
+ '''Test case for parsers with just CAN_COPY'''
+ extension = '.inc'
+
+ def setUp(self):
+ self.maxDiff = None
+ self.tmp = mkdtemp()
+ os.mkdir(mozpath.join(self.tmp, "merge"))
+
+ def tearDown(self):
+ shutil.rmtree(self.tmp)
+ del self.tmp
+
+ def testGood(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""#filter emptyLines
+
+#define MOZ_LANGPACK_CREATOR mozilla.org
+
+#define MOZ_LANGPACK_CONTRIBUTORS <em:contributor>Suzy Solon</em:contributor>
+
+#unfilter emptyLines
+""")
+ self.localized("""#filter emptyLines
+
+#define MOZ_LANGPACK_CREATOR mozilla.org
+
+#define MOZ_LANGPACK_CONTRIBUTORS <em:contributor>Jane Doe</em:contributor>
+
+#unfilter emptyLines
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.inc", ""),
+ File(self.l10n, "l10n.inc", ""),
+ mozpath.join(self.tmp, "merge", "l10n.inc"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary':
+ {None: {
+ 'errors': 0,
+ 'warnings': 0,
+ 'missing': 0,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 1,
+ 'changed_w': 2,
+ 'unchanged': 1,
+ 'unchanged_w': 1,
+ 'keys': 0,
+ }},
+ 'details': {}
+ }
+ )
+ self.assertTrue(filecmp.cmp(
+ self.l10n,
+ mozpath.join(self.tmp, "merge", 'l10n.inc'))
+ )
+
+ def testMissing(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""#filter emptyLines
+
+#define MOZ_LANGPACK_CREATOR mozilla.org
+
+#define MOZ_LANGPACK_CONTRIBUTORS <em:contributor>Suzy Solon</em:contributor>
+
+#unfilter emptyLines
+""")
+ self.localized("""#filter emptyLines
+
+#define MOZ_LANGPACK_CREATOR mozilla.org
+
+#unfilter emptyLines
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.inc", ""),
+ File(self.l10n, "l10n.inc", ""),
+ mozpath.join(self.tmp, "merge", "l10n.inc"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {
+ 'summary':
+ {None: {
+ 'errors': 0,
+ 'warnings': 0,
+ 'missing': 1,
+ 'missing_w': 2,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 0,
+ 'changed_w': 0,
+ 'unchanged': 1,
+ 'unchanged_w': 1,
+ 'keys': 0,
+ }},
+ 'details':
+ {
+ 'l10n.inc': [
+ {'missingEntity': 'MOZ_LANGPACK_CONTRIBUTORS'}
+ ]
+ }
+ }
+ )
+ self.assertTrue(filecmp.cmp(
+ self.ref,
+ mozpath.join(self.tmp, "merge", 'l10n.inc'))
+ )
+
+
+class TestProperties(unittest.TestCase, ContentMixin):
+ extension = '.properties'
+
+ def setUp(self):
+ self.maxDiff = None
+ self.tmp = mkdtemp()
+ os.mkdir(mozpath.join(self.tmp, "merge"))
+
+ def tearDown(self):
+ shutil.rmtree(self.tmp)
+ del self.tmp
+
+ def testGood(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""foo = fooVal word
+bar = barVal word
+eff = effVal""")
+ self.localized("""foo = lFoo
+bar = lBar
+eff = lEff word
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.properties", ""),
+ File(self.l10n, "l10n.properties", ""),
+ mozpath.join(self.tmp, "merge", "l10n.properties"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary':
+ {None: {
+ 'errors': 0,
+ 'warnings': 0,
+ 'missing': 0,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 3,
+ 'changed_w': 5,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+
+ }},
+ 'details': {}
+ }
+ )
+ self.assertTrue(filecmp.cmp(
+ self.l10n,
+ mozpath.join(self.tmp, "merge", 'l10n.properties'))
+ )
+
+ def testMissing(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""foo = fooVal
+bar = barVal
+eff = effVal""")
+ self.localized("""bar = lBar
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.properties", ""),
+ File(self.l10n, "l10n.properties", ""),
+ mozpath.join(self.tmp, "merge", "l10n.properties"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary':
+ {None: {
+ 'errors': 0,
+ 'warnings': 0,
+ 'missing': 2,
+ 'missing_w': 2,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 1,
+ 'changed_w': 1,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }},
+ 'details': {
+ 'l10n.properties': [
+ {'missingEntity': u'foo'},
+ {'missingEntity': u'eff'}]
+ }
+ })
+ mergefile = mozpath.join(self.tmp, "merge", "l10n.properties")
+ self.assertTrue(os.path.isfile(mergefile))
+ p = getParser(mergefile)
+ p.readFile(mergefile)
+ entities = p.parse()
+ self.assertEqual(list(entities.keys()), ["bar", "foo", "eff"])
+
+ def test_missing_file(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""foo = fooVal
+bar = barVal
+eff = effVal""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.add(File(self.ref, "en-reference.properties", ""),
+ File(self.l10n, "l10n.properties", ""),
+ mozpath.join(self.tmp, "merge", "l10n.properties"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary':
+ {None: {
+ 'errors': 0,
+ 'warnings': 0,
+ 'missing': 3,
+ 'missing_w': 3,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 0,
+ 'changed_w': 0,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }},
+ 'details': {
+ 'l10n.properties': [
+ {'missingFile': 'error'}]
+ }
+ })
+ mergefile = mozpath.join(self.tmp, "merge", "l10n.properties")
+ self.assertTrue(filecmp.cmp(self.ref, mergefile))
+
+ def testError(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""foo = fooVal
+bar = %d barVal
+eff = effVal""")
+ self.localized("""\
+bar = %S lBar
+eff = leffVal
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.properties", ""),
+ File(self.l10n, "l10n.properties", ""),
+ mozpath.join(self.tmp, "merge", "l10n.properties"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary':
+ {None: {
+ 'errors': 1,
+ 'warnings': 0,
+ 'missing': 1,
+ 'missing_w': 1,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 2,
+ 'changed_w': 3,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }},
+ 'details': {
+ 'l10n.properties': [
+ {'missingEntity': u'foo'},
+ {'error': u'argument 1 `S` should be `d` '
+ u'at line 1, column 7 for bar'}]
+ }
+ })
+ mergefile = mozpath.join(self.tmp, "merge", "l10n.properties")
+ self.assertTrue(os.path.isfile(mergefile))
+ p = getParser(mergefile)
+ p.readFile(mergefile)
+ entities = p.parse()
+ self.assertEqual(list(entities.keys()), ["eff", "foo", "bar"])
+ self.assertEqual(entities['bar'].val, '%d barVal')
+
+ def testObsolete(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""foo = fooVal
+eff = effVal""")
+ self.localized("""foo = fooVal
+other = obsolete
+eff = leffVal
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.properties", ""),
+ File(self.l10n, "l10n.properties", ""),
+ mozpath.join(self.tmp, "merge", "l10n.properties"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary':
+ {None: {
+ 'errors': 0,
+ 'warnings': 0,
+ 'missing': 0,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 1,
+ 'changed': 1,
+ 'changed_w': 1,
+ 'unchanged': 1,
+ 'unchanged_w': 1,
+ 'keys': 0,
+ }},
+ 'details': {
+ 'l10n.properties': [
+ {'obsoleteEntity': u'other'}]
+ }
+ })
+ mergefile = mozpath.join(self.tmp, "merge", "l10n.properties")
+ self.assertTrue(filecmp.cmp(self.l10n, mergefile))
+
+ def test_obsolete_file(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.localized("""foo = fooVal
+eff = leffVal
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.remove(File(self.ref, "en-reference.properties", ""),
+ File(self.l10n, "l10n.properties", ""),
+ mozpath.join(self.tmp, "merge", "l10n.properties"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary':
+ {},
+ 'details': {
+ 'l10n.properties': [
+ {'obsoleteFile': u'error'}]
+ }
+ })
+ mergefile = mozpath.join(self.tmp, "merge", "l10n.properties")
+ self.assertTrue(os.path.isfile(mergefile))
+
+ def test_duplicate(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""foo = fooVal
+bar = barVal
+eff = effVal
+foo = other val for foo""")
+ self.localized("""foo = localized
+bar = lBar
+eff = localized eff
+bar = duplicated bar
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.properties", ""),
+ File(self.l10n, "l10n.properties", ""),
+ mozpath.join(self.tmp, "merge", "l10n.properties"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary':
+ {None: {
+ 'errors': 1,
+ 'warnings': 1,
+ 'missing': 0,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 3,
+ 'changed_w': 6,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }},
+ 'details': {
+ 'l10n.properties': [
+ {'warning': u'foo occurs 2 times'},
+ {'error': u'bar occurs 2 times'}]
+ }
+ })
+ mergefile = mozpath.join(self.tmp, "merge", "l10n.properties")
+ self.assertTrue(filecmp.cmp(self.l10n, mergefile))
+
+
+class TestDTD(unittest.TestCase, ContentMixin):
+ extension = '.dtd'
+
+ def setUp(self):
+ self.maxDiff = None
+ self.tmp = mkdtemp()
+ os.mkdir(mozpath.join(self.tmp, "merge"))
+
+ def tearDown(self):
+ shutil.rmtree(self.tmp)
+ del self.tmp
+
+ def testGood(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""<!ENTITY foo 'fooVal'>
+<!ENTITY bar 'barVal'>
+<!ENTITY eff 'effVal'>""")
+ self.localized("""<!ENTITY foo 'lFoo'>
+<!ENTITY bar 'lBar'>
+<!ENTITY eff 'lEff'>
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.dtd", ""),
+ File(self.l10n, "l10n.dtd", ""),
+ mozpath.join(self.tmp, "merge", "l10n.dtd"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary':
+ {None: {
+ 'errors': 0,
+ 'warnings': 0,
+ 'missing': 0,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 3,
+ 'changed_w': 3,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }},
+ 'details': {}
+ }
+ )
+ self.assertTrue(filecmp.cmp(
+ self.l10n,
+ mozpath.join(self.tmp, "merge", 'l10n.dtd'))
+ )
+
+ def testMissing(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""<!ENTITY foo 'fooVal'>
+<!ENTITY bar 'barVal'>
+<!ENTITY eff 'effVal'>""")
+ self.localized("""<!ENTITY bar 'lBar'>
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.dtd", ""),
+ File(self.l10n, "l10n.dtd", ""),
+ mozpath.join(self.tmp, "merge", "l10n.dtd"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary':
+ {None: {
+ 'errors': 0,
+ 'warnings': 0,
+ 'missing': 2,
+ 'missing_w': 2,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 1,
+ 'changed_w': 1,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }},
+ 'details': {
+ 'l10n.dtd': [
+ {'missingEntity': u'foo'},
+ {'missingEntity': u'eff'}]
+ }
+ })
+ mergefile = mozpath.join(self.tmp, "merge", "l10n.dtd")
+ self.assertTrue(os.path.isfile(mergefile))
+ p = getParser(mergefile)
+ p.readFile(mergefile)
+ entities = p.parse()
+ self.assertEqual(list(entities.keys()), ["bar", "foo", "eff"])
+
+ def testJunk(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""<!ENTITY foo 'fooVal'>
+<!ENTITY bar 'barVal'>
+<!ENTITY eff 'effVal'>""")
+ self.localized("""<!ENTITY foo 'fooVal'>
+<!ENTY bar 'gimmick'>
+<!ENTITY eff 'effVal'>
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.dtd", ""),
+ File(self.l10n, "l10n.dtd", ""),
+ mozpath.join(self.tmp, "merge", "l10n.dtd"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary':
+ {None: {
+ 'errors': 1,
+ 'warnings': 0,
+ 'missing': 1,
+ 'missing_w': 1,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 0,
+ 'changed_w': 0,
+ 'unchanged': 2,
+ 'unchanged_w': 2,
+ 'keys': 0,
+ }},
+ 'details': {
+ 'l10n.dtd': [
+ {'error': u'Unparsed content "<!ENTY bar '
+ u'\'gimmick\'>\n" '
+ u'from line 2 column 1 to '
+ u'line 3 column 1'},
+ {'missingEntity': u'bar'}]
+ }
+ })
+ mergefile = mozpath.join(self.tmp, "merge", "l10n.dtd")
+ self.assertTrue(os.path.isfile(mergefile))
+ p = getParser(mergefile)
+ p.readFile(mergefile)
+ entities = p.parse()
+ self.assertEqual(list(entities.keys()), ["foo", "eff", "bar"])
+
+ def test_reference_junk(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""<!ENTITY foo 'fooVal'>
+<!ENT bar 'bad val'>
+<!ENTITY eff 'effVal'>""")
+ self.localized("""<!ENTITY foo 'fooVal'>
+<!ENTITY eff 'effVal'>
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.dtd", ""),
+ File(self.l10n, "l10n.dtd", ""),
+ mozpath.join(self.tmp, "merge", "l10n.dtd"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary':
+ {None: {
+ 'errors': 0,
+ 'warnings': 1,
+ 'missing': 0,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 0,
+ 'changed_w': 0,
+ 'unchanged': 2,
+ 'unchanged_w': 2,
+ 'keys': 0,
+ }},
+ 'details': {
+ 'l10n.dtd': [
+ {'warning': 'Parser error in en-US'}]
+ }
+ })
+ mergefile = mozpath.join(self.tmp, "merge", "l10n.dtd")
+ self.assertTrue(filecmp.cmp(self.l10n, mergefile))
+
+ def test_reference_xml_error(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""<!ENTITY foo 'fooVal'>
+<!ENTITY bar 'bad &val'>
+<!ENTITY eff 'effVal'>""")
+ self.localized("""<!ENTITY foo 'fooVal'>
+<!ENTITY bar 'good val'>
+<!ENTITY eff 'effVal'>
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.dtd", ""),
+ File(self.l10n, "l10n.dtd", ""),
+ mozpath.join(self.tmp, "merge", "l10n.dtd"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary':
+ {None: {
+ 'errors': 0,
+ 'warnings': 1,
+ 'missing': 0,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 1,
+ 'changed_w': 2,
+ 'unchanged': 2,
+ 'unchanged_w': 2,
+ 'keys': 0,
+ }},
+ 'details': {
+ 'l10n.dtd': [
+ {'warning': u"can't parse en-US value at line 1, "
+ u"column 0 for bar"}]
+ }
+ })
+ mergefile = mozpath.join(self.tmp, "merge", "l10n.dtd")
+ self.assertTrue(filecmp.cmp(self.l10n, mergefile))
+
+
+class TestFluent(unittest.TestCase):
+ maxDiff = None # we got big dictionaries to compare
+
+ def reference(self, content):
+ self.ref = os.path.join(self.tmp, "en-reference.ftl")
+ with open(self.ref, "w") as f:
+ f.write(content)
+
+ def localized(self, content):
+ self.l10n = os.path.join(self.tmp, "l10n.ftl")
+ with open(self.l10n, "w") as f:
+ f.write(content)
+
+ def setUp(self):
+ self.tmp = mkdtemp()
+ os.mkdir(os.path.join(self.tmp, "merge"))
+ self.ref = self.l10n = None
+
+ def tearDown(self):
+ shutil.rmtree(self.tmp)
+ del self.tmp
+ del self.ref
+ del self.l10n
+
+ def testGood(self):
+ self.reference("""\
+foo = fooVal
+bar = barVal
+-eff = effVal
+""")
+ self.localized("""\
+foo = lFoo
+bar = lBar
+-eff = lEff
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.ftl", ""),
+ File(self.l10n, "l10n.ftl", ""),
+ mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary':
+ {None: {
+ 'errors': 0,
+ 'warnings': 0,
+ 'missing': 0,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 3,
+ 'changed_w': 3,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }},
+ 'details': {}
+ }
+ )
+
+ # validate merge results
+ mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+ self.assertTrue(filecmp.cmp(self.l10n, mergepath))
+
+ def testMissing(self):
+ self.reference("""\
+foo = fooVal
+bar = barVal
+-baz = bazVal
+eff = effVal
+""")
+ self.localized("""\
+foo = lFoo
+eff = lEff
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.ftl", ""),
+ File(self.l10n, "l10n.ftl", ""),
+ mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {
+ 'details': {
+ 'l10n.ftl': [
+ {'missingEntity': u'bar'},
+ {'missingEntity': u'-baz'},
+ ],
+ },
+ 'summary': {
+ None: {
+ 'errors': 0,
+ 'warnings': 0,
+ 'missing': 2,
+ 'missing_w': 2,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 2,
+ 'changed_w': 2,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }
+ }
+ }
+ )
+
+ # validate merge results
+ mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+ self.assertTrue(filecmp.cmp(self.l10n, mergepath))
+
+ def testBroken(self):
+ self.reference("""\
+foo = fooVal
+bar = barVal
+eff = effVal
+""")
+ self.localized("""\
+-- Invalid Comment
+foo = lFoo
+bar lBar
+eff = lEff {
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.ftl", ""),
+ File(self.l10n, "l10n.ftl", ""),
+ mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {
+ 'details': {
+ 'l10n.ftl': [
+ {'error': u'Unparsed content "-- Invalid Comment" '
+ u'from line 1 column 1 '
+ u'to line 1 column 19'},
+ {'error': u'Unparsed content "bar lBar" '
+ u'from line 3 column 1 '
+ u'to line 3 column 9'},
+ {'error': u'Unparsed content "eff = lEff {" '
+ u'from line 4 column 1 '
+ u'to line 4 column 13'},
+ {'missingEntity': u'bar'},
+ {'missingEntity': u'eff'},
+ ],
+ },
+ 'summary': {
+ None: {
+ 'errors': 3,
+ 'warnings': 0,
+ 'missing': 2,
+ 'missing_w': 2,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 1,
+ 'changed_w': 1,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }
+ }
+ }
+ )
+
+ # validate merge results
+ mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+ self.assertTrue(os.path.exists(mergepath))
+
+ p = getParser(mergepath)
+ p.readFile(mergepath)
+ merged_entities = p.parse()
+ self.assertEqual(list(merged_entities.keys()), ["foo"])
+ merged_foo = merged_entities['foo']
+
+ # foo should be l10n
+ p.readFile(self.l10n)
+ l10n_entities = p.parse()
+ l10n_foo = l10n_entities['foo']
+ self.assertTrue(merged_foo.equals(l10n_foo))
+
+ def testMatchingReferences(self):
+ self.reference("""\
+foo = Reference { bar }
+""")
+ self.localized("""\
+foo = Localized { bar }
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.ftl", ""),
+ File(self.l10n, "l10n.ftl", ""),
+ mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {
+ 'details': {},
+ 'summary': {
+ None: {
+ 'errors': 0,
+ 'warnings': 0,
+ 'missing': 0,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 1,
+ 'changed_w': 1,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }
+ }
+ }
+ )
+
+ # validate merge results
+ mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+ self.assertTrue(filecmp.cmp(self.l10n, mergepath))
+
+ def testMismatchingReferences(self):
+ self.reference("""\
+foo = Reference { bar }
+bar = Reference { baz }
+baz = Reference
+""")
+ self.localized("""\
+foo = Localized { qux }
+bar = Localized
+baz = Localized { qux }
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.ftl", ""),
+ File(self.l10n, "l10n.ftl", ""),
+ mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {
+ 'details': {
+ 'l10n.ftl': [
+ {
+ 'warning':
+ u'Missing message reference: bar '
+ u'at line 1, column 1 for foo'
+ },
+ {
+ 'warning':
+ u'Obsolete message reference: qux '
+ u'at line 1, column 19 for foo'
+ },
+ {
+ 'warning':
+ u'Missing message reference: baz '
+ u'at line 2, column 1 for bar'
+ },
+ {
+ 'warning':
+ u'Obsolete message reference: qux '
+ u'at line 3, column 19 for baz'
+ },
+ ],
+ },
+ 'summary': {
+ None: {
+ 'errors': 0,
+ 'warnings': 4,
+ 'missing': 0,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 3,
+ 'changed_w': 3,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }
+ }
+ }
+ )
+
+ # validate merge results
+ mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+ self.assertTrue(filecmp.cmp(self.l10n, mergepath))
+
+ def testMismatchingAttributes(self):
+ self.reference("""
+foo = Foo
+bar = Bar
+ .tender = Attribute value
+eff = Eff
+""")
+ self.localized("""\
+foo = lFoo
+ .obsolete = attr
+bar = lBar
+eff = lEff
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.ftl", ""),
+ File(self.l10n, "l10n.ftl", ""),
+ mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {
+ 'details': {
+ 'l10n.ftl': [
+ {
+ 'error':
+ u'Obsolete attribute: '
+ 'obsolete at line 2, column 3 for foo'
+ },
+ {
+ 'error':
+ u'Missing attribute: tender at line 3,'
+ ' column 1 for bar',
+ },
+ ],
+ },
+ 'summary': {
+ None: {
+ 'errors': 2,
+ 'warnings': 0,
+ 'missing': 0,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 3,
+ 'changed_w': 5,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }
+ }
+ }
+ )
+
+ # validate merge results
+ mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+ self.assertTrue(os.path.exists(mergepath))
+
+ p = getParser(mergepath)
+ p.readFile(mergepath)
+ merged_entities = p.parse()
+ self.assertEqual(list(merged_entities.keys()), ["eff"])
+ merged_eff = merged_entities['eff']
+
+ # eff should be l10n
+ p.readFile(self.l10n)
+ l10n_entities = p.parse()
+ l10n_eff = l10n_entities['eff']
+ self.assertTrue(merged_eff.equals(l10n_eff))
+
+ def test_term_attributes(self):
+ self.reference("""
+-foo = Foo
+-bar = Bar
+-baz = Baz
+ .attr = Baz Attribute
+-qux = Qux
+ .attr = Qux Attribute
+-missing = Missing
+ .attr = An Attribute
+""")
+ self.localized("""\
+-foo = Localized Foo
+-bar = Localized Bar
+ .attr = Locale-specific Bar Attribute
+-baz = Localized Baz
+-qux = Localized Qux
+ .other = Locale-specific Qux Attribute
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.ftl", ""),
+ File(self.l10n, "l10n.ftl", ""),
+ mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {
+ 'details': {
+ 'l10n.ftl': [
+ {'missingEntity': u'-missing'},
+ ],
+ },
+ 'summary': {
+ None: {
+ 'errors': 0,
+ 'warnings': 0,
+ 'missing': 1,
+ 'missing_w': 1,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 4,
+ 'changed_w': 4,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }
+ }
+ }
+ )
+
+ # validate merge results
+ mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+ self.assertTrue(filecmp.cmp(self.l10n, mergepath))
+
+ def testMismatchingValues(self):
+ self.reference("""
+foo = Foo
+ .foottr = something
+bar =
+ .tender = Attribute value
+""")
+ self.localized("""\
+foo =
+ .foottr = attr
+bar = lBar
+ .tender = localized
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.ftl", ""),
+ File(self.l10n, "l10n.ftl", ""),
+ mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {
+ 'details': {
+ 'l10n.ftl': [
+ {
+ 'error':
+ u'Missing value at line 1, column 1 for foo'
+ },
+ {
+ 'error':
+ u'Obsolete value at line 3, column 7 for bar',
+ },
+ ]
+ },
+ 'summary': {
+ None: {
+ 'errors': 2,
+ 'warnings': 0,
+ 'missing': 0,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 2,
+ 'changed_w': 4,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }
+ }
+ }
+ )
+
+ # validate merge results
+ mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+ self.assertTrue(os.path.exists(mergepath))
+
+ p = getParser(mergepath)
+ p.readFile(mergepath)
+ merged_entities = p.parse()
+ self.assertEqual(merged_entities, tuple())
+
+ def testMissingGroupComment(self):
+ self.reference("""\
+foo = fooVal
+
+## Group Comment
+bar = barVal
+""")
+ self.localized("""\
+foo = lFoo
+bar = lBar
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.ftl", ""),
+ File(self.l10n, "l10n.ftl", ""),
+ mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {
+ 'details': {},
+ 'summary': {
+ None: {
+ 'errors': 0,
+ 'warnings': 0,
+ 'missing': 0,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 2,
+ 'changed_w': 2,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }
+ }
+ }
+ )
+
+ # validate merge results
+ mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+ self.assertTrue(filecmp.cmp(self.l10n, mergepath))
+
+ def testMissingAttachedComment(self):
+ self.reference("""\
+foo = fooVal
+
+# Attached Comment
+bar = barVal
+""")
+ self.localized("""\
+foo = lFoo
+bar = barVal
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.ftl", ""),
+ File(self.l10n, "l10n.ftl", ""),
+ mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {
+ 'details': {},
+ 'summary': {
+ None: {
+ 'errors': 0,
+ 'warnings': 0,
+ 'missing': 0,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 1,
+ 'changed_w': 1,
+ 'unchanged': 1,
+ 'unchanged_w': 1,
+ 'keys': 0,
+ }
+ }
+ }
+ )
+
+ # validate merge results
+ mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+ self.assertTrue(filecmp.cmp(self.l10n, mergepath))
+
+ def testObsoleteStandaloneComment(self):
+ self.reference("""\
+foo = fooVal
+bar = barVal
+""")
+ self.localized("""\
+foo = lFoo
+
+# Standalone Comment
+
+bar = lBar
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.ftl", ""),
+ File(self.l10n, "l10n.ftl", ""),
+ mozpath.join(self.tmp, "merge", "l10n.ftl"))
+
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {
+ 'details': {},
+ 'summary': {
+ None: {
+ 'errors': 0,
+ 'warnings': 0,
+ 'missing': 0,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 2,
+ 'changed_w': 2,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }
+ }
+ }
+ )
+
+ # validate merge results
+ mergepath = mozpath.join(self.tmp, "merge", "l10n.ftl")
+ self.assertTrue(filecmp.cmp(self.l10n, mergepath))
+
+ def test_duplicate(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""foo = fooVal
+bar = barVal
+eff = effVal
+foo = other val for foo""")
+ self.localized("""foo = localized
+bar = lBar
+eff = localized eff
+bar = duplicated bar
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.ftl", ""),
+ File(self.l10n, "l10n.ftl", ""),
+ mozpath.join(self.tmp, "merge", "l10n.ftl"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary':
+ {None: {
+ 'errors': 1,
+ 'warnings': 1,
+ 'missing': 0,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 3,
+ 'changed_w': 6,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }},
+ 'details': {
+ 'l10n.ftl': [
+ {'warning': u'foo occurs 2 times'},
+ {'error': u'bar occurs 2 times'}]
+ }
+ })
+ mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl")
+ self.assertTrue(filecmp.cmp(self.l10n, mergefile))
+
+ def test_duplicate_attributes(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""foo = fooVal
+ .attr = good""")
+ self.localized("""foo = localized
+ .attr = not
+ .attr = so
+ .attr = good
+""")
+ cc = ContentComparer()
+ cc.observers.append(Observer())
+ cc.compare(File(self.ref, "en-reference.ftl", ""),
+ File(self.l10n, "l10n.ftl", ""),
+ mozpath.join(self.tmp, "merge", "l10n.ftl"))
+ self.assertDictEqual(
+ cc.observers.toJSON(),
+ {'summary':
+ {None: {
+ 'errors': 0,
+ 'warnings': 3,
+ 'missing': 0,
+ 'missing_w': 0,
+ 'report': 0,
+ 'obsolete': 0,
+ 'changed': 1,
+ 'changed_w': 2,
+ 'unchanged': 0,
+ 'unchanged_w': 0,
+ 'keys': 0,
+ }},
+ 'details': {
+ 'l10n.ftl': [
+ {'warning':
+ u'Attribute "attr" is duplicated '
+ u'at line 2, column 5 for foo'
+ },
+ {'warning':
+ u'Attribute "attr" is duplicated '
+ u'at line 3, column 5 for foo'
+ },
+ {'warning':
+ u'Attribute "attr" is duplicated '
+ u'at line 4, column 5 for foo'
+ },
+ ]
+ }
+ })
+ mergefile = mozpath.join(self.tmp, "merge", "l10n.ftl")
+ self.assertTrue(filecmp.cmp(self.l10n, mergefile))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/python/compare-locales/compare_locales/tests/test_mozpath.py b/third_party/python/compare-locales/compare_locales/tests/test_mozpath.py
new file mode 100644
index 0000000000..d4bf9ec4b2
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/test_mozpath.py
@@ -0,0 +1,139 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+from compare_locales.mozpath import (
+ relpath,
+ join,
+ normpath,
+ dirname,
+ commonprefix,
+ basename,
+ split,
+ splitext,
+ basedir,
+ match,
+ rebase,
+)
+import unittest
+import os
+
+
+class TestPath(unittest.TestCase):
+ SEP = os.sep
+
+ def test_relpath(self):
+ self.assertEqual(relpath('foo', 'foo'), '')
+ self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/bar'), '')
+ self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo'), 'bar')
+ self.assertEqual(relpath(self.SEP.join(('foo', 'bar', 'baz')), 'foo'),
+ 'bar/baz')
+ self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/bar/baz'),
+ '..')
+ self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/baz'),
+ '../bar')
+ self.assertEqual(relpath('foo/', 'foo'), '')
+ self.assertEqual(relpath('foo/bar/', 'foo'), 'bar')
+
+ def test_join(self):
+ self.assertEqual(join('foo', 'bar', 'baz'), 'foo/bar/baz')
+ self.assertEqual(join('foo', '', 'bar'), 'foo/bar')
+ self.assertEqual(join('', 'foo', 'bar'), 'foo/bar')
+ self.assertEqual(join('', 'foo', '/bar'), '/bar')
+
+ def test_normpath(self):
+ self.assertEqual(normpath(self.SEP.join(('foo', 'bar', 'baz',
+ '..', 'qux'))), 'foo/bar/qux')
+
+ def test_dirname(self):
+ self.assertEqual(dirname('foo/bar/baz'), 'foo/bar')
+ self.assertEqual(dirname('foo/bar'), 'foo')
+ self.assertEqual(dirname('foo'), '')
+ self.assertEqual(dirname('foo/bar/'), 'foo/bar')
+
+ def test_commonprefix(self):
+ self.assertEqual(commonprefix([self.SEP.join(('foo', 'bar', 'baz')),
+ 'foo/qux', 'foo/baz/qux']), 'foo/')
+ self.assertEqual(commonprefix([self.SEP.join(('foo', 'bar', 'baz')),
+ 'foo/qux', 'baz/qux']), '')
+
+ def test_basename(self):
+ self.assertEqual(basename('foo/bar/baz'), 'baz')
+ self.assertEqual(basename('foo/bar'), 'bar')
+ self.assertEqual(basename('foo'), 'foo')
+ self.assertEqual(basename('foo/bar/'), '')
+
+ def test_split(self):
+ self.assertEqual(split(self.SEP.join(('foo', 'bar', 'baz'))),
+ ['foo', 'bar', 'baz'])
+
+ def test_splitext(self):
+ self.assertEqual(splitext(self.SEP.join(('foo', 'bar', 'baz.qux'))),
+ ('foo/bar/baz', '.qux'))
+
+ def test_basedir(self):
+ foobarbaz = self.SEP.join(('foo', 'bar', 'baz'))
+ self.assertEqual(basedir(foobarbaz, ['foo', 'bar', 'baz']), 'foo')
+ self.assertEqual(basedir(foobarbaz, ['foo', 'foo/bar', 'baz']),
+ 'foo/bar')
+ self.assertEqual(basedir(foobarbaz, ['foo/bar', 'foo', 'baz']),
+ 'foo/bar')
+ self.assertEqual(basedir(foobarbaz, ['foo', 'bar', '']), 'foo')
+ self.assertEqual(basedir(foobarbaz, ['bar', 'baz', '']), '')
+
+ def test_match(self):
+ self.assertTrue(match('foo', ''))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/bar'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo'))
+ self.assertTrue(match('foo', '*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/*/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '*/bar/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '*/*/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '*/*/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/*/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/*/*.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/b*/*z.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/b*r/ba*z.qux'))
+ self.assertFalse(match('foo/bar/baz.qux', 'foo/b*z/ba*r.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '**'))
+ self.assertTrue(match('foo/bar/baz.qux', '**/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '**/bar/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/*.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '**/foo/bar/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/*.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/*.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '**/*.qux'))
+ self.assertFalse(match('foo/bar/baz.qux', '**.qux'))
+ self.assertFalse(match('foo/bar', 'foo/*/bar'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/**'))
+ self.assertFalse(match('foo/nobar/baz.qux', 'foo/**/bar/**'))
+ self.assertTrue(match('foo/bar', 'foo/**/bar/**'))
+
+ def test_rebase(self):
+ self.assertEqual(rebase('foo', 'foo/bar', 'bar/baz'), 'baz')
+ self.assertEqual(rebase('foo', 'foo', 'bar/baz'), 'bar/baz')
+ self.assertEqual(rebase('foo/bar', 'foo', 'baz'), 'bar/baz')
+
+
+if os.altsep:
+ class TestAltPath(TestPath):
+ SEP = os.altsep
+
+ class TestReverseAltPath(TestPath):
+ def setUp(self):
+ sep = os.sep
+ os.sep = os.altsep
+ os.altsep = sep
+
+ def tearDown(self):
+ self.setUp()
+
+ class TestAltReverseAltPath(TestReverseAltPath):
+ SEP = os.altsep
diff --git a/third_party/python/compare-locales/compare_locales/tests/test_parser.py b/third_party/python/compare-locales/compare_locales/tests/test_parser.py
new file mode 100644
index 0000000000..38fe642ddf
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/test_parser.py
@@ -0,0 +1,118 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+import pkg_resources
+import shutil
+import tempfile
+import textwrap
+import unittest
+
+from compare_locales import parser, mozpath
+
+
+class TestParserContext(unittest.TestCase):
+ def test_linecol(self):
+ "Should return 1-based line and column numbers."
+ ctx = parser.Parser.Context('''first line
+second line
+third line
+''')
+ self.assertEqual(
+ ctx.linecol(0),
+ (1, 1)
+ )
+ self.assertEqual(
+ ctx.linecol(1),
+ (1, 2)
+ )
+ self.assertEqual(
+ ctx.linecol(len('first line')),
+ (1, len('first line') + 1)
+ )
+ self.assertEqual(
+ ctx.linecol(len('first line') + 1),
+ (2, 1)
+ )
+ self.assertEqual(
+ ctx.linecol(len(ctx.contents)),
+ (4, 1)
+ )
+
+ def test_empty_parser(self):
+ p = parser.Parser()
+ entities = p.parse()
+ self.assertTupleEqual(
+ entities,
+ tuple()
+ )
+
+
+class TestOffsetComment(unittest.TestCase):
+ def test_offset(self):
+ ctx = parser.Parser.Context(textwrap.dedent('''\
+ #foo
+ #bar
+ # baz
+ '''
+ )) # noqa
+ offset_comment = parser.OffsetComment(ctx, (0, len(ctx.contents)))
+ self.assertEqual(
+ offset_comment.val,
+ textwrap.dedent('''\
+ foo
+ bar
+ baz
+ ''')
+ )
+
+
+class TestUniversalNewlines(unittest.TestCase):
+ def setUp(self):
+ '''Create a parser for this test.
+ '''
+ self.parser = parser.Parser()
+ self.dir = tempfile.mkdtemp()
+
+ def tearDown(self):
+ 'tear down this test'
+ del self.parser
+ shutil.rmtree(self.dir)
+
+ def test_universal_newlines(self):
+ f = mozpath.join(self.dir, 'file')
+ with open(f, 'wb') as fh:
+ fh.write(b'one\ntwo\rthree\r\n')
+ self.parser.readFile(f)
+ self.assertEqual(
+ self.parser.ctx.contents,
+ 'one\ntwo\nthree\n')
+
+
+class TestPlugins(unittest.TestCase):
+ def setUp(self):
+ self.old_working_set_state = pkg_resources.working_set.__getstate__()
+ distribution = pkg_resources.Distribution(__file__)
+ entry_point = pkg_resources.EntryPoint.parse(
+ 'test_parser = compare_locales.tests.test_parser:DummyParser',
+ dist=distribution
+ )
+ distribution._ep_map = {
+ 'compare_locales.parsers': {
+ 'test_parser': entry_point
+ }
+ }
+ pkg_resources.working_set.add(distribution)
+
+ def tearDown(self):
+ pkg_resources.working_set.__setstate__(self.old_working_set_state)
+
+ def test_dummy_parser(self):
+ p = parser.getParser('some/weird/file.ext')
+ self.assertIsInstance(p, DummyParser)
+
+
+class DummyParser(parser.Parser):
+ def use(self, path):
+ return path.endswith('weird/file.ext')
diff --git a/third_party/python/compare-locales/compare_locales/tests/test_util.py b/third_party/python/compare-locales/compare_locales/tests/test_util.py
new file mode 100644
index 0000000000..f549cd2c67
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/tests/test_util.py
@@ -0,0 +1,30 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+import unittest
+
+from compare_locales import util
+
+
+class ParseLocalesTest(unittest.TestCase):
+ def test_empty(self):
+ self.assertEqual(util.parseLocales(''), [])
+
+ def test_all(self):
+ self.assertEqual(util.parseLocales('''af
+de'''), ['af', 'de'])
+
+ def test_shipped(self):
+ self.assertEqual(util.parseLocales('''af
+ja win mac
+de'''), ['af', 'de', 'ja'])
+
+ def test_sparse(self):
+ self.assertEqual(util.parseLocales('''
+af
+
+de
+
+'''), ['af', 'de'])
diff --git a/third_party/python/compare-locales/compare_locales/util.py b/third_party/python/compare-locales/compare_locales/util.py
new file mode 100644
index 0000000000..71eadd8749
--- /dev/null
+++ b/third_party/python/compare-locales/compare_locales/util.py
@@ -0,0 +1,11 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file is shared between compare-locales and locale-inspector
+# test_util is in compare-locales only, for the sake of easy
+# development.
+
+
+def parseLocales(content):
+ return sorted(l.split()[0] for l in content.splitlines() if l)
diff --git a/third_party/python/compare-locales/setup.cfg b/third_party/python/compare-locales/setup.cfg
new file mode 100644
index 0000000000..adf5ed72aa
--- /dev/null
+++ b/third_party/python/compare-locales/setup.cfg
@@ -0,0 +1,7 @@
+[bdist_wheel]
+universal = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/compare-locales/setup.py b/third_party/python/compare-locales/setup.py
new file mode 100755
index 0000000000..b273929dce
--- /dev/null
+++ b/third_party/python/compare-locales/setup.py
@@ -0,0 +1,62 @@
+from __future__ import absolute_import
+
+from setuptools import setup, find_packages
+
+import sys
+import os
+sys.path.insert(0, os.path.dirname(__file__))
+
+from compare_locales import version
+
+this_directory = os.path.abspath(os.path.dirname(__file__))
+with open(os.path.join(this_directory, 'README.md'), 'rb') as f:
+ long_description = f.read().decode('utf-8')
+
+CLASSIFIERS = """\
+Development Status :: 5 - Production/Stable
+Intended Audience :: Developers
+License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
+Operating System :: OS Independent
+Programming Language :: Python
+Programming Language :: Python :: 2
+Programming Language :: Python :: 2.7
+Programming Language :: Python :: 3
+Programming Language :: Python :: 3.5
+Programming Language :: Python :: 3.6
+Programming Language :: Python :: 3.7
+Topic :: Software Development :: Libraries :: Python Modules
+Topic :: Software Development :: Localization
+Topic :: Software Development :: Testing\
+"""
+
+setup(name="compare-locales",
+ version=version,
+ author="Axel Hecht",
+ author_email="axel@mozilla.com",
+ description='Lint Mozilla localizations',
+ long_description=long_description,
+ long_description_content_type='text/markdown',
+ license="MPL 2.0",
+ classifiers=CLASSIFIERS.split("\n"),
+ platforms=["any"],
+ python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4',
+ entry_points={
+ 'console_scripts':
+ [
+ 'compare-locales = compare_locales.commands:CompareLocales.call',
+ 'moz-l10n-lint = compare_locales.lint.cli:main',
+ ],
+ },
+ packages=find_packages(),
+ package_data={
+ 'compare_locales.tests': ['data/*.properties', 'data/*.dtd']
+ },
+ install_requires=[
+ 'fluent.syntax >=0.18.0, <0.19',
+ 'pytoml',
+ 'six',
+ ],
+ tests_require=[
+ 'mock<4.0',
+ ],
+ test_suite='compare_locales.tests')
diff --git a/third_party/python/cookies/PKG-INFO b/third_party/python/cookies/PKG-INFO
new file mode 100644
index 0000000000..dc06229f8a
--- /dev/null
+++ b/third_party/python/cookies/PKG-INFO
@@ -0,0 +1,109 @@
+Metadata-Version: 1.1
+Name: cookies
+Version: 2.2.1
+Summary: Friendlier RFC 6265-compliant cookie parser/renderer
+Home-page: https://github.com/sashahart/cookies
+Author: Sasha Hart
+Author-email: s@sashahart.net
+License: UNKNOWN
+Description: What is this and what is it for?
+ --------------------------------
+
+ cookies.py is a Python module for working with HTTP cookies: parsing and
+ rendering 'Cookie:' request headers and 'Set-Cookie:' response headers,
+ and exposing a convenient API for creating and modifying cookies. It can be
+ used as a replacement of Python's Cookie.py (aka http.cookies).
+
+ Features
+ --------
+
+ * Rendering according to the excellent new RFC 6265
+ (rather than using a unique ad hoc format inconsistently relating to
+ unrealistic, very old RFCs which everyone ignored). Uses URL encoding to
+ represent non-ASCII by default, like many other languages' libraries
+ * Liberal parsing, incorporating many complaints about Cookie.py barfing
+ on common cookie formats which can be reliably parsed (e.g. search 'cookie'
+ on the Python issue tracker)
+ * Well-documented code, with chapter and verse from RFCs
+ (rather than arbitrary, undocumented decisions and huge tables of magic
+ values, as you see in Cookie.py).
+ * Test coverage at 100%, with a much more comprehensive test suite
+ than Cookie.py
+ * Single-source compatible with the following Python versions:
+ 2.6, 2.7, 3.2, 3.3 and PyPy (2.7).
+ * Cleaner, less surprising API::
+
+ # old Cookie.py - this code is all directly from its docstring
+ >>> from Cookie import SmartCookie
+ >>> C = SmartCookie()
+ >>> # n.b. it's "smart" because it automatically pickles Python objects,
+ >>> # which is actually quite stupid for security reasons!
+ >>> C["rocky"] = "road"
+ >>> C["rocky"]["path"] = "/cookie"
+ >>> # So C["rocky"] is a string, except when it's a dict...
+ >>> # and why do I have to write [""] to access a fixed set of attrs?
+ >>> # Look at the atrocious way I render out a request header:
+ >>> C.output(attrs=[], header="Cookie:")
+ 'Cookie: rocky=road'
+
+ # new cookies.py
+ >>> from cookies import Cookies, Cookie
+ >>> cookies = Cookies(rocky='road')
+ >>> # Can also write explicitly: cookies['rocky'] = Cookie['road']
+ >>> cookies['rocky'].path = "/cookie"
+ >>> cookies.render_request()
+ 'rocky=road'
+ * Friendly to customization, extension, and reuse of its parts.
+ Unlike Cookie.py, it doesn't lock all implementation inside its own classes
+ (forcing you to write ugly wrappers as Django, Trac, Werkzeug/Flask, web.py
+ and Tornado had to do). You can suppress minor parse exceptions with
+ parameters rather than subclass wrappers. You can plug in your own parsers,
+ renderers and validators for new or existing cookie attributes. You can
+ render the data out in a dict. You can easily use the underlying imperative
+ API or even lift the parser's regexps for your own parser or project. They
+ are very well documented and relate directly to RFCs, so you know exactly
+ what you are getting and why. It's MIT-licensed so do
+ what you want (but I'd love to know what use you are getting from it!)
+ * One file, so you can just drop cookies.py into your project if you like
+ * MIT license, so you can use it in whatever you want with no strings
+
+ Things this is not meant to do
+ ------------------------------
+ While this is intended to be a good module for handling cookies, it does not
+ even try to do any of the following:
+
+ * Maintain backward compatibility with Cookie.py, which would mean
+ inheriting its confusions and bugs
+ * Implement RFCs 2109 or 2965, which have always been ignored by almost
+ everyone and are now obsolete as well
+ * Handle every conceivable output from terrible legacy apps, which is not
+ possible to do without lots of silent data loss and corruption (the
+ parser does try to be liberal as possible otherwise, though)
+ * Provide a means to store pickled Python objects in cookie values
+ (that's a big security hole)
+
+ This doesn't compete with the cookielib (http.cookiejar) module in the Python
+ standard library, which is specifically for implementing cookie storage and
+ similar behavior in an HTTP client such as a browser. Things cookielib does
+ that this doesn't:
+
+ * Write to or read from browsers' cookie stores or other proprietary
+ formats for storing cookie data in files
+ * Handle the browser/client logic like deciding which cookies to send or
+ discard, etc.
+
+ If you are looking for a cookie library but neither this one nor cookielib
+ will help, you might also consider the implementations in WebOb or Bottle.
+
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Environment :: Other Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff --git a/third_party/python/cookies/README b/third_party/python/cookies/README
new file mode 100644
index 0000000000..2363dadb2f
--- /dev/null
+++ b/third_party/python/cookies/README
@@ -0,0 +1,88 @@
+What is this and what is it for?
+--------------------------------
+
+cookies.py is a Python module for working with HTTP cookies: parsing and
+rendering 'Cookie:' request headers and 'Set-Cookie:' response headers,
+and exposing a convenient API for creating and modifying cookies. It can be
+used as a replacement of Python's Cookie.py (aka http.cookies).
+
+Features
+--------
+
+* Rendering according to the excellent new RFC 6265
+ (rather than using a unique ad hoc format inconsistently relating to
+ unrealistic, very old RFCs which everyone ignored). Uses URL encoding to
+ represent non-ASCII by default, like many other languages' libraries
+* Liberal parsing, incorporating many complaints about Cookie.py barfing
+ on common cookie formats which can be reliably parsed (e.g. search 'cookie'
+ on the Python issue tracker)
+* Well-documented code, with chapter and verse from RFCs
+ (rather than arbitrary, undocumented decisions and huge tables of magic
+ values, as you see in Cookie.py).
+* Test coverage at 100%, with a much more comprehensive test suite
+ than Cookie.py
+* Single-source compatible with the following Python versions:
+ 2.6, 2.7, 3.2, 3.3 and PyPy (2.7).
+* Cleaner, less surprising API::
+
+ # old Cookie.py - this code is all directly from its docstring
+ >>> from Cookie import SmartCookie
+ >>> C = SmartCookie()
+ >>> # n.b. it's "smart" because it automatically pickles Python objects,
+ >>> # which is actually quite stupid for security reasons!
+ >>> C["rocky"] = "road"
+ >>> C["rocky"]["path"] = "/cookie"
+ >>> # So C["rocky"] is a string, except when it's a dict...
+ >>> # and why do I have to write [""] to access a fixed set of attrs?
+ >>> # Look at the atrocious way I render out a request header:
+ >>> C.output(attrs=[], header="Cookie:")
+ 'Cookie: rocky=road'
+
+ # new cookies.py
+ >>> from cookies import Cookies, Cookie
+ >>> cookies = Cookies(rocky='road')
+ >>> # Can also write explicitly: cookies['rocky'] = Cookie['road']
+ >>> cookies['rocky'].path = "/cookie"
+ >>> cookies.render_request()
+ 'rocky=road'
+* Friendly to customization, extension, and reuse of its parts.
+ Unlike Cookie.py, it doesn't lock all implementation inside its own classes
+ (forcing you to write ugly wrappers as Django, Trac, Werkzeug/Flask, web.py
+ and Tornado had to do). You can suppress minor parse exceptions with
+ parameters rather than subclass wrappers. You can plug in your own parsers,
+ renderers and validators for new or existing cookie attributes. You can
+ render the data out in a dict. You can easily use the underlying imperative
+ API or even lift the parser's regexps for your own parser or project. They
+ are very well documented and relate directly to RFCs, so you know exactly
+ what you are getting and why. It's MIT-licensed so do
+ what you want (but I'd love to know what use you are getting from it!)
+* One file, so you can just drop cookies.py into your project if you like
+* MIT license, so you can use it in whatever you want with no strings
+
+Things this is not meant to do
+------------------------------
+While this is intended to be a good module for handling cookies, it does not
+even try to do any of the following:
+
+* Maintain backward compatibility with Cookie.py, which would mean
+ inheriting its confusions and bugs
+* Implement RFCs 2109 or 2965, which have always been ignored by almost
+ everyone and are now obsolete as well
+* Handle every conceivable output from terrible legacy apps, which is not
+ possible to do without lots of silent data loss and corruption (the
+ parser does try to be liberal as possible otherwise, though)
+* Provide a means to store pickled Python objects in cookie values
+ (that's a big security hole)
+
+This doesn't compete with the cookielib (http.cookiejar) module in the Python
+standard library, which is specifically for implementing cookie storage and
+similar behavior in an HTTP client such as a browser. Things cookielib does
+that this doesn't:
+
+* Write to or read from browsers' cookie stores or other proprietary
+ formats for storing cookie data in files
+* Handle the browser/client logic like deciding which cookies to send or
+ discard, etc.
+
+If you are looking for a cookie library but neither this one nor cookielib
+will help, you might also consider the implementations in WebOb or Bottle.
diff --git a/third_party/python/cookies/cookies.py b/third_party/python/cookies/cookies.py
new file mode 100644
index 0000000000..d1637d2263
--- /dev/null
+++ b/third_party/python/cookies/cookies.py
@@ -0,0 +1,1169 @@
+"""Parse, manipulate and render cookies in a convenient way.
+
+Copyright (c) 2011-2014, Sasha Hart.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+"""
+__version__ = "2.2.1"
+import re
+import datetime
+import logging
+import sys
+from unicodedata import normalize
+if sys.version_info >= (3, 0, 0): # pragma: no cover
+ from urllib.parse import (
+ quote as _default_quote, unquote as _default_unquote)
+ basestring = str
+ long = int
+else: # pragma: no cover
+ from urllib import (
+ quote as _default_quote, unquote as _default_unquote)
+
+
+def _total_seconds(td):
+ """Wrapper to work around lack of .total_seconds() method in Python 3.1.
+ """
+ if hasattr(td, "total_seconds"):
+ return td.total_seconds()
+ return td.days * 3600 * 24 + td.seconds + td.microseconds / 100000.0
+
+# see test_encoding_assumptions for how these magical safe= parms were figured
+# out. the differences are because of what cookie-octet may contain
+# vs the more liberal spec for extension-av
+default_cookie_quote = lambda item: _default_quote(
+ item, safe='!#$%&\'()*+/:<=>?@[]^`{|}~')
+
+default_extension_quote = lambda item: _default_quote(
+ item, safe=' !"#$%&\'()*+,/:<=>?@[\\]^`{|}~')
+
+default_unquote = _default_unquote
+
+
+def _report_invalid_cookie(data):
+ "How this module logs a bad cookie when exception suppressed"
+ logging.error("invalid Cookie: %r", data)
+
+
+def _report_unknown_attribute(name):
+ "How this module logs an unknown attribute when exception suppressed"
+ logging.error("unknown Cookie attribute: %r", name)
+
+
+def _report_invalid_attribute(name, value, reason):
+ "How this module logs a bad attribute when exception suppressed"
+ logging.error("invalid Cookie attribute (%s): %r=%r", reason, name, value)
+
+
+class CookieError(Exception):
+ """Base class for this module's exceptions, so you can catch them all if
+ you want to.
+ """
+ def __init__(self):
+ Exception.__init__(self)
+
+
+class InvalidCookieError(CookieError):
+ """Raised when attempting to parse or construct a cookie which is
+ syntactically invalid (in any way that has possibly serious implications).
+ """
+ def __init__(self, data=None, message=""):
+ CookieError.__init__(self)
+ self.data = data
+ self.message = message
+
+ def __str__(self):
+ return '%r %r' % (self.message, self.data)
+
+
+class InvalidCookieAttributeError(CookieError):
+ """Raised when setting an invalid attribute on a Cookie.
+ """
+ def __init__(self, name, value, reason=None):
+ CookieError.__init__(self)
+ self.name = name
+ self.value = value
+ self.reason = reason
+
+ def __str__(self):
+ prefix = ("%s: " % self.reason) if self.reason else ""
+ if self.name is None:
+ return '%s%r' % (prefix, self.value)
+ return '%s%r = %r' % (prefix, self.name, self.value)
+
+
+class Definitions(object):
+ """Namespace to hold definitions used in cookie parsing (mostly pieces of
+ regex).
+
+ These are separated out for individual testing against examples and RFC
+ grammar, and kept here to avoid cluttering other namespaces.
+ """
+ # Most of the following are set down or cited in RFC 6265 4.1.1
+
+ # This is the grammar's 'cookie-name' defined as 'token' per RFC 2616 2.2.
+ COOKIE_NAME = r"!#$%&'*+\-.0-9A-Z^_`a-z|~"
+
+ # 'cookie-octet' - as used twice in definition of 'cookie-value'
+ COOKIE_OCTET = r"\x21\x23-\x2B\--\x3A\x3C-\x5B\]-\x7E"
+
+ # extension-av - also happens to be a superset of cookie-av and path-value
+ EXTENSION_AV = """ !"#$%&\\\\'()*+,\-./0-9:<=>?@A-Z[\\]^_`a-z{|}~"""
+
+ # This is for the first pass parse on a Set-Cookie: response header. It
+ # includes cookie-value, cookie-pair, set-cookie-string, cookie-av.
+ # extension-av is used to extract the chunk containing variable-length,
+ # unordered attributes. The second pass then uses ATTR to break out each
+ # attribute and extract it appropriately.
+ # As compared with the RFC production grammar, it is must more liberal with
+ # space characters, in order not to break on data made by barbarians.
+ SET_COOKIE_HEADER = """(?x) # Verbose mode
+ ^(?:Set-Cookie:[ ]*)?
+ (?P<name>[{name}:]+)
+ [ ]*=[ ]*
+
+ # Accept anything in quotes - this is not RFC 6265, but might ease
+ # working with older code that half-heartedly works with 2965. Accept
+ # spaces inside tokens up front, so we can deal with that error one
+ # cookie at a time, after this first pass.
+ (?P<value>(?:"{value}*")|(?:[{cookie_octet} ]*))
+ [ ]*
+
+ # Extract everything up to the end in one chunk, which will be broken
+ # down in the second pass. Don't match if there's any unexpected
+ # garbage at the end (hence the \Z; $ matches before newline).
+ (?P<attrs>(?:;[ ]*[{cookie_av}]+)*)
+ """.format(name=COOKIE_NAME, cookie_av=EXTENSION_AV + ";",
+ cookie_octet=COOKIE_OCTET, value="[^;]")
+
+ # Now we specify the individual patterns for the attribute extraction pass
+ # of Set-Cookie parsing (mapping to *-av in the RFC grammar). Things which
+ # don't match any of these but are in extension-av are simply ignored;
+ # anything else should be rejected in the first pass (SET_COOKIE_HEADER).
+
+ # Max-Age attribute. These are digits, they are expressed this way
+ # because that is how they are expressed in the RFC.
+ MAX_AGE_AV = "Max-Age=(?P<max_age>[\x30-\x39]+)"
+
+ # Domain attribute; a label is one part of the domain
+ LABEL = '{let_dig}(?:(?:{let_dig_hyp}+)?{let_dig})?'.format(
+ let_dig="[A-Za-z0-9]", let_dig_hyp="[0-9A-Za-z\-]")
+ DOMAIN = "\.?(?:{label}\.)*(?:{label})".format(label=LABEL)
+ # Parse initial period though it's wrong, as RFC 6265 4.1.2.3
+ DOMAIN_AV = "Domain=(?P<domain>{domain})".format(domain=DOMAIN)
+
+ # Path attribute. We don't take special care with quotes because
+ # they are hardly used, they don't allow invalid characters per RFC 6265,
+ # and " is a valid character to occur in a path value anyway.
+ PATH_AV = 'Path=(?P<path>[%s]+)' % EXTENSION_AV
+
+ # Expires attribute. This gets big because of date parsing, which needs to
+ # support a large range of formats, so it's broken down into pieces.
+
+ # Generate a mapping of months to use in render/parse, to avoid
+ # localizations which might be produced by strftime (e.g. %a -> Mayo)
+ month_list = ["January", "February", "March", "April", "May", "June",
+ "July", "August", "September", "October", "November",
+ "December"]
+ month_abbr_list = [item[:3] for item in month_list]
+ month_numbers = {}
+ for index, name in enumerate(month_list):
+ name = name.lower()
+ month_numbers[name[:3]] = index + 1
+ month_numbers[name] = index + 1
+ # Use the same list to create regexps for months.
+ MONTH_SHORT = "(?:" + "|".join(item[:3] for item in month_list) + ")"
+ MONTH_LONG = "(?:" + "|".join(item for item in month_list) + ")"
+
+ # Same drill with weekdays, for the same reason.
+ weekday_list = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday",
+ "Saturday", "Sunday"]
+ weekday_abbr_list = [item[:3] for item in weekday_list]
+ WEEKDAY_SHORT = "(?:" + "|".join(item[:3] for item in weekday_list) + ")"
+ WEEKDAY_LONG = "(?:" + "|".join(item for item in weekday_list) + ")"
+
+ # This regexp tries to exclude obvious nonsense in the first pass.
+ DAY_OF_MONTH = "(?:[0 ]?[1-9]|[12][0-9]|[3][01])(?!\d)"
+
+ # Here is the overall date format; ~99% of cases fold into one generalized
+ # syntax like RFC 1123, and many of the rest use asctime-like formats.
+ # (see test_date_formats for a full exegesis)
+ DATE = """(?ix) # Case-insensitive mode, verbose mode
+ (?:
+ (?P<weekday>(?:{wdy}|{weekday}),[ ])?
+ (?P<day>{day})
+ [ \-]
+ (?P<month>{mon}|{month})
+ [ \-]
+ # This does not support 3-digit years, which are rare and don't
+ # seem to have one canonical interpretation.
+ (?P<year>(?:\d{{2}}|\d{{4}}))
+ [ ]
+ # HH:MM[:SS] GMT
+ (?P<hour>(?:[ 0][0-9]|[01][0-9]|2[0-3]))
+ :(?P<minute>(?:0[0-9]|[1-5][0-9]))
+ (?::(?P<second>\d{{2}}))?
+ [ ]GMT
+ |
+ # Support asctime format, e.g. 'Sun Nov 6 08:49:37 1994'
+ (?P<weekday2>{wdy})[ ]
+ (?P<month2>{mon})[ ]
+ (?P<day2>[ ]\d|\d\d)[ ]
+ (?P<hour2>\d\d):
+ (?P<minute2>\d\d)
+ (?::(?P<second2>\d\d)?)[ ]
+ (?P<year2>\d\d\d\d)
+ (?:[ ]GMT)? # GMT (Amazon)
+ )
+ """
+ DATE = DATE.format(wdy=WEEKDAY_SHORT, weekday=WEEKDAY_LONG,
+ day=DAY_OF_MONTH, mon=MONTH_SHORT, month=MONTH_LONG)
+
+ EXPIRES_AV = "Expires=(?P<expires>%s)" % DATE
+
+ # Now we're ready to define a regexp which can match any number of attrs
+ # in the variable portion of the Set-Cookie header (like the unnamed latter
+ # part of set-cookie-string in the grammar). Each regexp of any complexity
+ # is split out for testing by itself.
+ ATTR = """(?ix) # Case-insensitive mode, verbose mode
+ # Always start with start or semicolon and any number of spaces
+ (?:^|;)[ ]*(?:
+ # Big disjunction of attribute patterns (*_AV), with named capture
+ # groups to extract everything in one pass. Anything unrecognized
+ # goes in the 'unrecognized' capture group for reporting.
+ {expires}
+ |{max_age}
+ |{domain}
+ |{path}
+ |(?P<secure>Secure=?)
+ |(?P<httponly>HttpOnly=?)
+ |Version=(?P<version>[{stuff}]+)
+ |Comment=(?P<comment>[{stuff}]+)
+ |(?P<unrecognized>[{stuff}]+)
+ )
+ # End with any number of spaces not matched by the preceding (up to the
+ # next semicolon) - but do not capture these.
+ [ ]*
+ """.format(expires=EXPIRES_AV, max_age=MAX_AGE_AV, domain=DOMAIN_AV,
+ path=PATH_AV, stuff=EXTENSION_AV)
+
+ # For request data ("Cookie: ") parsing, with finditer cf. RFC 6265 4.2.1
+ COOKIE = """(?x) # Verbose mode
+ (?: # Either something close to valid...
+
+ # Match starts at start of string, or at separator.
+ # Split on comma for the sake of legacy code (RFC 2109/2965),
+ # and since it only breaks when invalid commas are put in values.
+ # see http://bugs.python.org/issue1210326
+ (?:^Cookie:|^|;|,)
+
+ # 1 or more valid token characters making up the name (captured)
+ # with colon added to accommodate users of some old Java apps, etc.
+ [ ]*
+ (?P<name>[{name}:]+)
+ [ ]*
+ =
+ [ ]*
+
+ # While 6265 provides only for cookie-octet, this allows just about
+ # anything in quotes (like in RFC 2616); people stuck on RFC
+ # 2109/2965 will expect it to work this way. The non-quoted token
+ # allows interior spaces ('\x20'), which is not valid. In both
+ # cases, the decision of whether to allow these is downstream.
+ (?P<value>
+ ["][^\00-\31"]*["]
+ |
+ [{value}]
+ |
+ [{value}][{value} ]*[{value}]+
+ |
+ )
+
+ # ... Or something way off-spec - extract to report and move on
+ |
+ (?P<invalid>[^;]+)
+ )
+ # Trailing spaces after value
+ [ ]*
+ # Must end with ; or be at end of string (don't consume this though,
+ # so use the lookahead assertion ?=
+ (?=;|\Z)
+ """.format(name=COOKIE_NAME, value=COOKIE_OCTET)
+
+ # Precompile externally useful definitions into re objects.
+ COOKIE_NAME_RE = re.compile("^([%s:]+)\Z" % COOKIE_NAME)
+ COOKIE_RE = re.compile(COOKIE)
+ SET_COOKIE_HEADER_RE = re.compile(SET_COOKIE_HEADER)
+ ATTR_RE = re.compile(ATTR)
+ DATE_RE = re.compile(DATE)
+ DOMAIN_RE = re.compile(DOMAIN)
+ PATH_RE = re.compile('^([%s]+)\Z' % EXTENSION_AV)
+ EOL = re.compile("(?:\r\n|\n)")
+
+
+def strip_spaces_and_quotes(value):
+ """Remove invalid whitespace and/or single pair of dquotes and return None
+ for empty strings.
+
+ Used to prepare cookie values, path, and domain attributes in a way which
+ tolerates simple formatting mistakes and standards variations.
+ """
+ value = value.strip() if value else ""
+ if value and len(value) > 1 and (value[0] == value[-1] == '"'):
+ value = value[1:-1]
+ if not value:
+ value = ""
+ return value
+
+
+def parse_string(data, unquote=default_unquote):
+ """Decode URL-encoded strings to UTF-8 containing the escaped chars.
+ """
+ if data is None:
+ return None
+
+ # We'll soon need to unquote to recover our UTF-8 data.
+ # In Python 2, unquote crashes on chars beyond ASCII. So encode functions
+ # had better not include anything beyond ASCII in data.
+ # In Python 3, unquote crashes on bytes objects, requiring conversion to
+ # str objects (unicode) using decode().
+ # But in Python 2, the same decode causes unquote to butcher the data.
+ # So in that case, just leave the bytes.
+ if isinstance(data, bytes):
+ if sys.version_info > (3, 0, 0): # pragma: no cover
+ data = data.decode('ascii')
+ # Recover URL encoded data
+ unquoted = unquote(data)
+ # Without this step, Python 2 may have good URL decoded *bytes*,
+ # which will therefore not normalize as unicode and not compare to
+ # the original.
+ if isinstance(unquoted, bytes):
+ unquoted = unquoted.decode('utf-8')
+ return unquoted
+
+
+def parse_date(value):
+ """Parse an RFC 1123 or asctime-like format date string to produce
+ a Python datetime object (without a timezone).
+ """
+ # Do the regex magic; also enforces 2 or 4 digit years
+ match = Definitions.DATE_RE.match(value) if value else None
+ if not match:
+ return None
+ # We're going to extract and prepare captured data in 'data'.
+ data = {}
+ captured = match.groupdict()
+ fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
+ # If we matched on the RFC 1123 family format
+ if captured['year']:
+ for field in fields:
+ data[field] = captured[field]
+ # If we matched on the asctime format, use year2 etc.
+ else:
+ for field in fields:
+ data[field] = captured[field + "2"]
+ year = data['year']
+ # Interpret lame 2-digit years - base the cutoff on UNIX epoch, in case
+ # someone sets a '70' cookie meaning 'distant past'. This won't break for
+ # 58 years and people who use 2-digit years are asking for it anyway.
+ if len(year) == 2:
+ if int(year) < 70:
+ year = "20" + year
+ else:
+ year = "19" + year
+ year = int(year)
+ # Clamp to [1900, 9999]: strftime has min 1900, datetime has max 9999
+ data['year'] = max(1900, min(year, 9999))
+ # Other things which are numbers should convert to integer
+ for field in ['day', 'hour', 'minute', 'second']:
+ if data[field] is None:
+ data[field] = 0
+ data[field] = int(data[field])
+ # Look up the number datetime needs for the named month
+ data['month'] = Definitions.month_numbers[data['month'].lower()]
+ return datetime.datetime(**data)
+
+
+def parse_domain(value):
+ """Parse and validate an incoming Domain attribute value.
+ """
+ value = strip_spaces_and_quotes(value)
+ if value:
+ assert valid_domain(value)
+ return value
+
+
+def parse_path(value):
+ """Parse and validate an incoming Path attribute value.
+ """
+ value = strip_spaces_and_quotes(value)
+ assert valid_path(value)
+ return value
+
+
+def parse_value(value, allow_spaces=True, unquote=default_unquote):
+ "Process a cookie value"
+ if value is None:
+ return None
+ value = strip_spaces_and_quotes(value)
+ value = parse_string(value, unquote=unquote)
+ if not allow_spaces:
+ assert ' ' not in value
+ return value
+
+
+def valid_name(name):
+ "Validate a cookie name string"
+ if isinstance(name, bytes):
+ name = name.decode('ascii')
+ if not Definitions.COOKIE_NAME_RE.match(name):
+ return False
+ # This module doesn't support $identifiers, which are part of an obsolete
+ # and highly complex standard which is never used.
+ if name[0] == "$":
+ return False
+ return True
+
+
+def valid_value(value, quote=default_cookie_quote, unquote=default_unquote):
+ """Validate a cookie value string.
+
+ This is generic across quote/unquote functions because it directly verifies
+ the encoding round-trip using the specified quote/unquote functions.
+ So if you use different quote/unquote functions, use something like this
+ as a replacement for valid_value::
+
+ my_valid_value = lambda value: valid_value(value, quote=my_quote,
+ unquote=my_unquote)
+ """
+ if value is None:
+ return False
+
+ # Put the value through a round trip with the given quote and unquote
+ # functions, so we will know whether data will get lost or not in the event
+ # that we don't complain.
+ encoded = encode_cookie_value(value, quote=quote)
+ decoded = parse_string(encoded, unquote=unquote)
+
+ # If the original string made the round trip, this is a valid value for the
+ # given quote and unquote functions. Since the round trip can generate
+ # different unicode forms, normalize before comparing, so we can ignore
+ # trivial inequalities.
+ decoded_normalized = (normalize("NFKD", decoded)
+ if not isinstance(decoded, bytes) else decoded)
+ value_normalized = (normalize("NFKD", value)
+ if not isinstance(value, bytes) else value)
+ if decoded_normalized == value_normalized:
+ return True
+ return False
+
+
+def valid_date(date):
+ "Validate an expires datetime object"
+ # We want something that acts like a datetime. In particular,
+ # strings indicate a failure to parse down to an object and ints are
+ # nonstandard and ambiguous at best.
+ if not hasattr(date, 'tzinfo'):
+ return False
+ # Relevant RFCs define UTC as 'close enough' to GMT, and the maximum
+ # difference between UTC and GMT is often stated to be less than a second.
+ if date.tzinfo is None or _total_seconds(date.utcoffset()) < 1.1:
+ return True
+ return False
+
+
+def valid_domain(domain):
+ "Validate a cookie domain ASCII string"
+ # Using encoding on domain would confuse browsers into not sending cookies.
+ # Generate UnicodeDecodeError up front if it can't store as ASCII.
+ domain.encode('ascii')
+ # Domains starting with periods are not RFC-valid, but this is very common
+ # in existing cookies, so they should still parse with DOMAIN_AV.
+ if Definitions.DOMAIN_RE.match(domain):
+ return True
+ return False
+
+
+def valid_path(value):
+ "Validate a cookie path ASCII string"
+ # Generate UnicodeDecodeError if path can't store as ASCII.
+ value.encode("ascii")
+ # Cookies without leading slash will likely be ignored, raise ASAP.
+ if not (value and value[0] == "/"):
+ return False
+ if not Definitions.PATH_RE.match(value):
+ return False
+ return True
+
+
+def valid_max_age(number):
+ "Validate a cookie Max-Age"
+ if isinstance(number, basestring):
+ try:
+ number = long(number)
+ except (ValueError, TypeError):
+ return False
+ if number >= 0 and number % 1 == 0:
+ return True
+ return False
+
+
+def encode_cookie_value(data, quote=default_cookie_quote):
+ """URL-encode strings to make them safe for a cookie value.
+
+ By default this uses urllib quoting, as used in many other cookie
+ implementations and in other Python code, instead of an ad hoc escaping
+ mechanism which includes backslashes (these also being illegal chars in RFC
+ 6265).
+ """
+ if data is None:
+ return None
+
+ # encode() to ASCII bytes so quote won't crash on non-ASCII.
+ # but doing that to bytes objects is nonsense.
+ # On Python 2 encode crashes if s is bytes containing non-ASCII.
+ # On Python 3 encode crashes on all byte objects.
+ if not isinstance(data, bytes):
+ data = data.encode("utf-8")
+
+ # URL encode data so it is safe for cookie value
+ quoted = quote(data)
+
+ # Don't force to bytes, so that downstream can use proper string API rather
+ # than crippled bytes, and to encourage encoding to be done just once.
+ return quoted
+
+
+def encode_extension_av(data, quote=default_extension_quote):
+ """URL-encode strings to make them safe for an extension-av
+ (extension attribute value): <any CHAR except CTLs or ";">
+ """
+ if not data:
+ return ''
+ return quote(data)
+
+
+def render_date(date):
+ """Render a date (e.g. an Expires value) per RFCs 6265/2616/1123.
+
+ Don't give this localized (timezone-aware) datetimes. If you use them,
+ convert them to GMT before passing them to this. There are too many
+ conversion corner cases to handle this universally.
+ """
+ if not date:
+ return None
+ assert valid_date(date)
+ # Avoid %a and %b, which can change with locale, breaking compliance
+ weekday = Definitions.weekday_abbr_list[date.weekday()]
+ month = Definitions.month_abbr_list[date.month - 1]
+ return date.strftime("{day}, %d {month} %Y %H:%M:%S GMT"
+ ).format(day=weekday, month=month)
+
+
+def render_domain(domain):
+ if not domain:
+ return None
+ if domain[0] == '.':
+ return domain[1:]
+ return domain
+
+
+def _parse_request(header_data, ignore_bad_cookies=False):
+ """Turn one or more lines of 'Cookie:' header data into a dict mapping
+ cookie names to cookie values (raw strings).
+ """
+ cookies_dict = {}
+ for line in Definitions.EOL.split(header_data.strip()):
+ matches = Definitions.COOKIE_RE.finditer(line)
+ matches = [item for item in matches]
+ for match in matches:
+ invalid = match.group('invalid')
+ if invalid:
+ if not ignore_bad_cookies:
+ raise InvalidCookieError(data=invalid)
+ _report_invalid_cookie(invalid)
+ continue
+ name = match.group('name')
+ values = cookies_dict.get(name)
+ value = match.group('value').strip('"')
+ if values:
+ values.append(value)
+ else:
+ cookies_dict[name] = [value]
+ if not matches:
+ if not ignore_bad_cookies:
+ raise InvalidCookieError(data=line)
+ _report_invalid_cookie(line)
+ return cookies_dict
+
+
+def parse_one_response(line, ignore_bad_cookies=False,
+ ignore_bad_attributes=True):
+ """Turn one 'Set-Cookie:' line into a dict mapping attribute names to
+ attribute values (raw strings).
+ """
+ cookie_dict = {}
+ # Basic validation, extract name/value/attrs-chunk
+ match = Definitions.SET_COOKIE_HEADER_RE.match(line)
+ if not match:
+ if not ignore_bad_cookies:
+ raise InvalidCookieError(data=line)
+ _report_invalid_cookie(line)
+ return None
+ cookie_dict.update({
+ 'name': match.group('name'),
+ 'value': match.group('value')})
+ # Extract individual attrs from the attrs chunk
+ for match in Definitions.ATTR_RE.finditer(match.group('attrs')):
+ captured = dict((k, v) for (k, v) in match.groupdict().items() if v)
+ unrecognized = captured.get('unrecognized', None)
+ if unrecognized:
+ if not ignore_bad_attributes:
+ raise InvalidCookieAttributeError(None, unrecognized,
+ "unrecognized")
+ _report_unknown_attribute(unrecognized)
+ continue
+ # for unary flags
+ for key in ('secure', 'httponly'):
+ if captured.get(key):
+ captured[key] = True
+ # ignore subcomponents of expires - they're still there to avoid doing
+ # two passes
+ timekeys = ('weekday', 'month', 'day', 'hour', 'minute', 'second',
+ 'year')
+ if 'year' in captured:
+ for key in timekeys:
+ del captured[key]
+ elif 'year2' in captured:
+ for key in timekeys:
+ del captured[key + "2"]
+ cookie_dict.update(captured)
+ return cookie_dict
+
+
+def _parse_response(header_data, ignore_bad_cookies=False,
+ ignore_bad_attributes=True):
+ """Turn one or more lines of 'Set-Cookie:' header data into a list of dicts
+ mapping attribute names to attribute values (as plain strings).
+ """
+ cookie_dicts = []
+ for line in Definitions.EOL.split(header_data.strip()):
+ if not line:
+ break
+ cookie_dict = parse_one_response(
+ line, ignore_bad_cookies=ignore_bad_cookies,
+ ignore_bad_attributes=ignore_bad_attributes)
+ if not cookie_dict:
+ continue
+ cookie_dicts.append(cookie_dict)
+ if not cookie_dicts:
+ if not ignore_bad_cookies:
+ raise InvalidCookieError(data=header_data)
+ _report_invalid_cookie(header_data)
+ return cookie_dicts
+
+
+class Cookie(object):
+ """Provide a simple interface for creating, modifying, and rendering
+ individual HTTP cookies.
+
+ Cookie attributes are represented as normal Python object attributes.
+ Parsing, rendering and validation are reconfigurable per-attribute. The
+ default behavior is intended to comply with RFC 6265, URL-encoding illegal
+ characters where necessary. For example: the default behavior for the
+ Expires attribute is to parse strings as datetimes using parse_date,
+ validate that any set value is a datetime, and render the attribute per the
+ preferred date format in RFC 1123.
+ """
+ def __init__(self, name, value, **kwargs):
+ # If we don't have or can't set a name value, we don't want to return
+ # junk, so we must break control flow. And we don't want to use
+ # InvalidCookieAttributeError, because users may want to catch that to
+ # suppress all complaining about funky attributes.
+ try:
+ self.name = name
+ except InvalidCookieAttributeError:
+ raise InvalidCookieError(message="invalid name for new Cookie",
+ data=name)
+ value = value or ''
+ try:
+ self.value = value
+ except InvalidCookieAttributeError:
+ raise InvalidCookieError(message="invalid value for new Cookie",
+ data=value)
+ if kwargs:
+ self._set_attributes(kwargs, ignore_bad_attributes=False)
+
+ def _set_attributes(self, attrs, ignore_bad_attributes=False):
+ for attr_name, attr_value in attrs.items():
+ if not attr_name in self.attribute_names:
+ if not ignore_bad_attributes:
+ raise InvalidCookieAttributeError(
+ attr_name, attr_value,
+ "unknown cookie attribute '%s'" % attr_name)
+ _report_unknown_attribute(attr_name)
+
+ try:
+ setattr(self, attr_name, attr_value)
+ except InvalidCookieAttributeError as error:
+ if not ignore_bad_attributes:
+ raise
+ _report_invalid_attribute(attr_name, attr_value, error.reason)
+ continue
+
+ @classmethod
+ def from_dict(cls, cookie_dict, ignore_bad_attributes=True):
+ """Construct an instance from a dict of strings to parse.
+
+ The main difference between this and Cookie(name, value, **kwargs) is
+ that the values in the argument to this method are parsed.
+
+ If ignore_bad_attributes=True (default), values which did not parse
+ are set to '' in order to avoid passing bad data.
+ """
+ name = cookie_dict.get('name', None)
+ if not name:
+ raise InvalidCookieError("Cookie must have name")
+ raw_value = cookie_dict.get('value', '')
+ # Absence or failure of parser here is fatal; errors in present name
+ # and value should be found by Cookie.__init__.
+ value = cls.attribute_parsers['value'](raw_value)
+ cookie = cls(name, value)
+
+ # Parse values from serialized formats into objects
+ parsed = {}
+ for key, value in cookie_dict.items():
+ # Don't want to pass name/value to _set_attributes
+ if key in ('name', 'value'):
+ continue
+ parser = cls.attribute_parsers.get(key)
+ if not parser:
+ # Don't let totally unknown attributes pass silently
+ if not ignore_bad_attributes:
+ raise InvalidCookieAttributeError(
+ key, value, "unknown cookie attribute '%s'" % key)
+ _report_unknown_attribute(key)
+ continue
+ try:
+ parsed_value = parser(value)
+ except Exception as e:
+ reason = "did not parse with %r: %r" % (parser, e)
+ if not ignore_bad_attributes:
+ raise InvalidCookieAttributeError(
+ key, value, reason)
+ _report_invalid_attribute(key, value, reason)
+ parsed_value = ''
+ parsed[key] = parsed_value
+
+ # Set the parsed objects (does object validation automatically)
+ cookie._set_attributes(parsed, ignore_bad_attributes)
+ return cookie
+
+ @classmethod
+ def from_string(cls, line, ignore_bad_cookies=False,
+ ignore_bad_attributes=True):
+ "Construct a Cookie object from a line of Set-Cookie header data."
+ cookie_dict = parse_one_response(
+ line, ignore_bad_cookies=ignore_bad_cookies,
+ ignore_bad_attributes=ignore_bad_attributes)
+ if not cookie_dict:
+ return None
+ return cls.from_dict(
+ cookie_dict, ignore_bad_attributes=ignore_bad_attributes)
+
+ def to_dict(self):
+ this_dict = {'name': self.name, 'value': self.value}
+ this_dict.update(self.attributes())
+ return this_dict
+
+ def validate(self, name, value):
+ """Validate a cookie attribute with an appropriate validator.
+
+ The value comes in already parsed (for example, an expires value
+ should be a datetime). Called automatically when an attribute
+ value is set.
+ """
+ validator = self.attribute_validators.get(name, None)
+ if validator:
+ return True if validator(value) else False
+ return True
+
+ def __setattr__(self, name, value):
+ """Attributes mentioned in attribute_names get validated using
+ functions in attribute_validators, raising an exception on failure.
+ Others get left alone.
+ """
+ if name in self.attribute_names or name in ("name", "value"):
+ if name == 'name' and not value:
+ raise InvalidCookieError(message="Cookies must have names")
+ # Ignore None values indicating unset attr. Other invalids should
+ # raise error so users of __setattr__ can learn.
+ if value is not None:
+ if not self.validate(name, value):
+ raise InvalidCookieAttributeError(
+ name, value, "did not validate with " +
+ repr(self.attribute_validators.get(name)))
+ object.__setattr__(self, name, value)
+
+ def __getattr__(self, name):
+ """Provide for acting like everything in attribute_names is
+ automatically set to None, rather than having to do so explicitly and
+ only at import time.
+ """
+ if name in self.attribute_names:
+ return None
+ raise AttributeError(name)
+
+ def attributes(self):
+ """Export this cookie's attributes as a dict of encoded values.
+
+ This is an important part of the code for rendering attributes, e.g.
+ render_response().
+ """
+ dictionary = {}
+ # Only look for attributes registered in attribute_names.
+ for python_attr_name, cookie_attr_name in self.attribute_names.items():
+ value = getattr(self, python_attr_name)
+ renderer = self.attribute_renderers.get(python_attr_name, None)
+ if renderer:
+ value = renderer(value)
+ # If renderer returns None, or it's just natively none, then the
+ # value is suppressed entirely - does not appear in any rendering.
+ if not value:
+ continue
+ dictionary[cookie_attr_name] = value
+ return dictionary
+
+ def render_request(self):
+ """Render as a string formatted for HTTP request headers
+ (simple 'Cookie: ' style).
+ """
+ # Use whatever renderers are defined for name and value.
+ name, value = self.name, self.value
+ renderer = self.attribute_renderers.get('name', None)
+ if renderer:
+ name = renderer(name)
+ renderer = self.attribute_renderers.get('value', None)
+ if renderer:
+ value = renderer(value)
+ return ''.join((name, "=", value))
+
+ def render_response(self):
+ """Render as a string formatted for HTTP response headers
+ (detailed 'Set-Cookie: ' style).
+ """
+ # Use whatever renderers are defined for name and value.
+ # (.attributes() is responsible for all other rendering.)
+ name, value = self.name, self.value
+ renderer = self.attribute_renderers.get('name', None)
+ if renderer:
+ name = renderer(name)
+ renderer = self.attribute_renderers.get('value', None)
+ if renderer:
+ value = renderer(value)
+ return '; '.join(
+ ['{0}={1}'.format(name, value)] +
+ [key if isinstance(val, bool) else '='.join((key, val))
+ for key, val in self.attributes().items()]
+ )
+
+ def __eq__(self, other):
+ attrs = ['name', 'value'] + list(self.attribute_names.keys())
+ for attr in attrs:
+ mine = getattr(self, attr, None)
+ his = getattr(other, attr, None)
+ if isinstance(mine, bytes):
+ mine = mine.decode('utf-8')
+ if isinstance(his, bytes):
+ his = his.decode('utf-8')
+ if attr == 'domain':
+ if mine and mine[0] == '.':
+ mine = mine[1:]
+ if his and his[0] == '.':
+ his = his[1:]
+ if mine != his:
+ return False
+ return True
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ # Add a name and its proper rendering to this dict to register an attribute
+ # as exportable. The key is the name of the Cookie object attribute in
+ # Python, and it is mapped to the name you want in the output.
+ # 'name' and 'value' should not be here.
+ attribute_names = {
+ 'expires': 'Expires',
+ 'max_age': 'Max-Age',
+ 'domain': 'Domain',
+ 'path': 'Path',
+ 'comment': 'Comment',
+ 'version': 'Version',
+ 'secure': 'Secure',
+ 'httponly': 'HttpOnly',
+ }
+
+ # Register single-parameter functions in this dictionary to have them
+ # used for encoding outgoing values (e.g. as RFC compliant strings,
+ # as base64, encrypted stuff, etc.)
+ # These are called by the property generated by cookie_attribute().
+ # Usually it would be wise not to define a renderer for name, but it is
+ # supported in case there is ever a real need.
+ attribute_renderers = {
+ 'value': encode_cookie_value,
+ 'domain': render_domain,
+ 'expires': render_date,
+ 'max_age': lambda item: str(item) if item is not None else None,
+ 'secure': lambda item: True if item else False,
+ 'httponly': lambda item: True if item else False,
+ 'comment': encode_extension_av,
+ 'version': lambda item: (str(item) if isinstance(item, int)
+ else encode_extension_av(item)),
+ }
+
+ # Register single-parameter functions in this dictionary to have them used
+ # for decoding incoming values for use in the Python API (e.g. into nice
+ # objects, numbers, unicode strings, etc.)
+ # These are called by the property generated by cookie_attribute().
+ attribute_parsers = {
+ 'value': parse_value,
+ 'expires': parse_date,
+ 'domain': parse_domain,
+ 'path': parse_path,
+ 'max_age': lambda item: long(strip_spaces_and_quotes(item)),
+ 'comment': parse_string,
+ 'version': lambda item: int(strip_spaces_and_quotes(item)),
+ 'secure': lambda item: True if item else False,
+ 'httponly': lambda item: True if item else False,
+ }
+
+ # Register single-parameter functions which return a true value for
+ # acceptable values, and a false value for unacceptable ones. An
+ # attribute's validator is run after it is parsed or when it is directly
+ # set, and InvalidCookieAttribute is raised if validation fails (and the
+ # validator doesn't raise a different exception prior)
+ attribute_validators = {
+ 'name': valid_name,
+ 'value': valid_value,
+ 'expires': valid_date,
+ 'domain': valid_domain,
+ 'path': valid_path,
+ 'max_age': valid_max_age,
+ 'comment': valid_value,
+ 'version': lambda number: re.match("^\d+\Z", str(number)),
+ 'secure': lambda item: item is True or item is False,
+ 'httponly': lambda item: item is True or item is False,
+ }
+
+
+class Cookies(dict):
+ """Represent a set of cookies indexed by name.
+
+ This class bundles together a set of Cookie objects and provides
+ a convenient interface to them. for parsing and producing cookie headers.
+ In basic operation it acts just like a dict of Cookie objects, but it adds
+ additional convenience methods for the usual cookie tasks: add cookie
+ objects by their names, create new cookie objects under specified names,
+ parse HTTP request or response data into new cookie objects automatically
+ stored in the dict, and render the set in formats suitable for HTTP request
+ or response headers.
+ """
+ DEFAULT_COOKIE_CLASS = Cookie
+
+ def __init__(self, *args, **kwargs):
+ dict.__init__(self)
+ self.all_cookies = []
+ self.cookie_class = kwargs.get(
+ "_cookie_class", self.DEFAULT_COOKIE_CLASS)
+ self.add(*args, **kwargs)
+
+ def add(self, *args, **kwargs):
+ """Add Cookie objects by their names, or create new ones under
+ specified names.
+
+ Any unnamed arguments are interpreted as existing cookies, and
+ are added under the value in their .name attribute. With keyword
+ arguments, the key is interpreted as the cookie name and the
+ value as the UNENCODED value stored in the cookie.
+ """
+ # Only the first one is accessible through the main interface,
+ # others accessible through get_all (all_cookies).
+ for cookie in args:
+ self.all_cookies.append(cookie)
+ if cookie.name in self:
+ continue
+ self[cookie.name] = cookie
+ for key, value in kwargs.items():
+ cookie = self.cookie_class(key, value)
+ self.all_cookies.append(cookie)
+ if key in self:
+ continue
+ self[key] = cookie
+
+ def get_all(self, key):
+ return [cookie for cookie in self.all_cookies
+ if cookie.name == key]
+
+ def parse_request(self, header_data, ignore_bad_cookies=False):
+ """Parse 'Cookie' header data into Cookie objects, and add them to
+ this Cookies object.
+
+ :arg header_data: string containing only 'Cookie:' request headers or
+ header values (as in CGI/WSGI HTTP_COOKIE); if more than one, they must
+ be separated by CRLF (\\r\\n).
+
+ :arg ignore_bad_cookies: if set, will log each syntactically invalid
+ cookie (at the granularity of semicolon-delimited blocks) rather than
+ raising an exception at the first bad cookie.
+
+ :returns: a Cookies instance containing Cookie objects parsed from
+ header_data.
+
+ .. note::
+ If you want to parse 'Set-Cookie:' response headers, please use
+ parse_response instead. parse_request will happily turn 'expires=frob'
+ into a separate cookie without complaining, according to the grammar.
+ """
+ cookies_dict = _parse_request(
+ header_data, ignore_bad_cookies=ignore_bad_cookies)
+ cookie_objects = []
+ for name, values in cookies_dict.items():
+ for value in values:
+ # Use from_dict to check name and parse value
+ cookie_dict = {'name': name, 'value': value}
+ try:
+ cookie = self.cookie_class.from_dict(cookie_dict)
+ except InvalidCookieError:
+ if not ignore_bad_cookies:
+ raise
+ else:
+ cookie_objects.append(cookie)
+ try:
+ self.add(*cookie_objects)
+ except InvalidCookieError:
+ if not ignore_bad_cookies:
+ raise
+ _report_invalid_cookie(header_data)
+ return self
+
+ def parse_response(self, header_data, ignore_bad_cookies=False,
+ ignore_bad_attributes=True):
+ """Parse 'Set-Cookie' header data into Cookie objects, and add them to
+ this Cookies object.
+
+ :arg header_data: string containing only 'Set-Cookie:' request headers
+ or their corresponding header values; if more than one, they must be
+ separated by CRLF (\\r\\n).
+
+ :arg ignore_bad_cookies: if set, will log each syntactically invalid
+ cookie rather than raising an exception at the first bad cookie. (This
+ includes cookies which have noncompliant characters in the attribute
+ section).
+
+ :arg ignore_bad_attributes: defaults to True, which means to log but
+ not raise an error when a particular attribute is unrecognized. (This
+ does not necessarily mean that the attribute is invalid, although that
+ would often be the case.) if unset, then an error will be raised at the
+ first semicolon-delimited block which has an unknown attribute.
+
+ :returns: a Cookies instance containing Cookie objects parsed from
+ header_data, each with recognized attributes populated.
+
+ .. note::
+ If you want to parse 'Cookie:' headers (i.e., data like what's sent
+ with an HTTP request, which has only name=value pairs and no
+ attributes), then please use parse_request instead. Such lines often
+ contain multiple name=value pairs, and parse_response will throw away
+ the pairs after the first one, which will probably generate errors or
+ confusing behavior. (Since there's no perfect way to automatically
+ determine which kind of parsing to do, you have to tell it manually by
+ choosing correctly from parse_request between part_response.)
+ """
+ cookie_dicts = _parse_response(
+ header_data,
+ ignore_bad_cookies=ignore_bad_cookies,
+ ignore_bad_attributes=ignore_bad_attributes)
+ cookie_objects = []
+ for cookie_dict in cookie_dicts:
+ cookie = self.cookie_class.from_dict(cookie_dict)
+ cookie_objects.append(cookie)
+ self.add(*cookie_objects)
+ return self
+
+ @classmethod
+ def from_request(cls, header_data, ignore_bad_cookies=False):
+ "Construct a Cookies object from request header data."
+ cookies = cls()
+ cookies.parse_request(
+ header_data, ignore_bad_cookies=ignore_bad_cookies)
+ return cookies
+
+ @classmethod
+ def from_response(cls, header_data, ignore_bad_cookies=False,
+ ignore_bad_attributes=True):
+ "Construct a Cookies object from response header data."
+ cookies = cls()
+ cookies.parse_response(
+ header_data,
+ ignore_bad_cookies=ignore_bad_cookies,
+ ignore_bad_attributes=ignore_bad_attributes)
+ return cookies
+
+ def render_request(self, sort=True):
+ """Render the dict's Cookie objects into a string formatted for HTTP
+ request headers (simple 'Cookie: ' style).
+ """
+ if not sort:
+ return ("; ".join(
+ cookie.render_request() for cookie in self.values()))
+ return ("; ".join(sorted(
+ cookie.render_request() for cookie in self.values())))
+
+ def render_response(self, sort=True):
+ """Render the dict's Cookie objects into list of strings formatted for
+ HTTP response headers (detailed 'Set-Cookie: ' style).
+ """
+ rendered = [cookie.render_response() for cookie in self.values()]
+ return rendered if not sort else sorted(rendered)
+
+ def __repr__(self):
+ return "Cookies(%s)" % ', '.join("%s=%r" % (name, cookie.value) for
+ (name, cookie) in self.items())
+
+ def __eq__(self, other):
+ """Test if a Cookies object is globally 'equal' to another one by
+ seeing if it looks like a dict such that d[k] == self[k]. This depends
+ on each Cookie object reporting its equality correctly.
+ """
+ if not hasattr(other, "keys"):
+ return False
+ try:
+ keys = sorted(set(self.keys()) | set(other.keys()))
+ for key in keys:
+ if not key in self:
+ return False
+ if not key in other:
+ return False
+ if self[key] != other[key]:
+ return False
+ except (TypeError, KeyError):
+ raise
+ return True
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
diff --git a/third_party/python/cookies/setup.cfg b/third_party/python/cookies/setup.cfg
new file mode 100644
index 0000000000..9a777d0253
--- /dev/null
+++ b/third_party/python/cookies/setup.cfg
@@ -0,0 +1,8 @@
+[wheel]
+universal = 1
+
+[egg_info]
+tag_date = 0
+tag_build =
+tag_svn_revision = 0
+
diff --git a/third_party/python/cookies/setup.py b/third_party/python/cookies/setup.py
new file mode 100644
index 0000000000..d2554997e6
--- /dev/null
+++ b/third_party/python/cookies/setup.py
@@ -0,0 +1,45 @@
+from setuptools import setup, Command
+from cookies import __version__
+
+class Test(Command):
+ user_options = []
+
+ def initialize_options(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ try:
+ import pytest
+ except ImportError:
+ raise AssertionError("Install py.test to run the tests")
+ import sys, subprocess
+ errno = subprocess.call([sys.executable, '-m', 'py.test'])
+ raise SystemExit(errno)
+
+setup(
+ name="cookies",
+ version=__version__,
+ author="Sasha Hart",
+ author_email="s@sashahart.net",
+ url="https://github.com/sashahart/cookies",
+ py_modules=['cookies', 'test_cookies'],
+ description="Friendlier RFC 6265-compliant cookie parser/renderer",
+ long_description=open('README').read(),
+ classifiers=[
+ "Development Status :: 4 - Beta",
+ "Environment :: Other Environment",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: MIT License",
+ "Programming Language :: Python :: 2.6",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.2",
+ "Programming Language :: Python :: 3.3",
+ "Programming Language :: Python :: Implementation :: PyPy",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+ ],
+ cmdclass = {'test': Test},
+)
diff --git a/third_party/python/cookies/test_cookies.py b/third_party/python/cookies/test_cookies.py
new file mode 100644
index 0000000000..2197916eff
--- /dev/null
+++ b/third_party/python/cookies/test_cookies.py
@@ -0,0 +1,2447 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""Tests for code in cookies.py.
+"""
+from __future__ import unicode_literals
+import re
+import sys
+import logging
+if sys.version_info < (3, 0, 0):
+ from urllib import quote, unquote
+else:
+ from urllib.parse import quote, unquote
+ unichr = chr
+ basestring = str
+from datetime import datetime, tzinfo, timedelta
+from pytest import raises
+
+from cookies import (
+ InvalidCookieError, InvalidCookieAttributeError,
+ Definitions,
+ Cookie, Cookies,
+ render_date, parse_date,
+ parse_string, parse_value, parse_domain, parse_path,
+ parse_one_response,
+ encode_cookie_value, encode_extension_av,
+ valid_value, valid_date, valid_domain, valid_path,
+ strip_spaces_and_quotes, _total_seconds,
+ )
+
+
+class RFC1034:
+ """Definitions from RFC 1034: 'DOMAIN NAMES - CONCEPTS AND FACILITIES'
+ section 3.5, as cited in RFC 6265 4.1.1.
+ """
+ digit = "[0-9]"
+ letter = "[A-Za-z]"
+ let_dig = "[0-9A-Za-z]"
+ let_dig_hyp = "[0-9A-Za-z\-]"
+ assert "\\" in let_dig_hyp
+ ldh_str = "%s+" % let_dig_hyp
+ label = "(?:%s|%s|%s)" % (
+ letter,
+ letter + let_dig,
+ letter + ldh_str + let_dig)
+ subdomain = "(?:%s\.)*(?:%s)" % (label, label)
+ domain = "( |%s)" % (subdomain)
+
+ def test_sanity(self):
+ "Basic smoke tests that definitions transcribed OK"
+ match = re.compile("^%s\Z" % self.domain).match
+ assert match("A.ISI.EDU")
+ assert match("XX.LCS.MIT.EDU")
+ assert match("SRI-NIC.ARPA")
+ assert not match("foo+bar")
+ assert match("foo.com")
+ assert match("foo9.com")
+ assert not match("9foo.com")
+ assert not match("26.0.0.73.COM")
+ assert not match(".woo.com")
+ assert not match("blop.foo.")
+ assert match("foo-bar.com")
+ assert not match("-foo.com")
+ assert not match("foo.com-")
+
+
+class RFC1123:
+ """Definitions from RFC 1123: "Requirements for Internet Hosts --
+ Application and Support" section 2.1, cited in RFC 6265 section
+ 4.1.1 as an update to RFC 1034.
+ Here this is really just used for testing Domain attribute values.
+ """
+ # Changed per 2.1 (similar to some changes in RFC 1101)
+ # this implementation is a bit simpler...
+ # n.b.: there are length limits in the real thing
+ label = "{let_dig}(?:(?:{let_dig_hyp}+)?{let_dig})?".format(
+ let_dig=RFC1034.let_dig, let_dig_hyp=RFC1034.let_dig_hyp)
+ subdomain = "(?:%s\.)*(?:%s)" % (label, label)
+ domain = "( |%s)" % (subdomain)
+
+ def test_sanity(self):
+ "Basic smoke tests that definitions transcribed OK"
+ match = re.compile("^%s\Z" % self.domain).match
+ assert match("A.ISI.EDU")
+ assert match("XX.LCS.MIT.EDU")
+ assert match("SRI-NIC.ARPA")
+ assert not match("foo+bar")
+ assert match("foo.com")
+ assert match("9foo.com")
+ assert match("3Com.COM")
+ assert match("3M.COM")
+
+
+class RFC2616:
+ """Definitions from RFC 2616 section 2.2, as cited in RFC 6265 4.1.1
+ """
+ SEPARATORS = '()<>@,;:\\"/[]?={} \t'
+
+
+class RFC5234:
+ """Basic definitions per RFC 5234: 'Augmented BNF for Syntax
+ Specifications'
+ """
+ CHAR = "".join([chr(i) for i in range(0, 127 + 1)])
+ CTL = "".join([chr(i) for i in range(0, 31 + 1)]) + "\x7f"
+ # this isn't in the RFC but it can be handy
+ NONCTL = "".join([chr(i) for i in range(32, 127)])
+ # this is what the RFC says about a token more or less verbatim
+ TOKEN = "".join(sorted(set(NONCTL) - set(RFC2616.SEPARATORS)))
+
+
+class FixedOffsetTz(tzinfo):
+ """A tzinfo subclass for attaching to datetime objects.
+
+ Used for various tests involving date parsing, since Python stdlib does not
+ obviously provide tzinfo subclasses and testing this module only requires
+ a very simple one.
+ """
+ def __init__(self, offset):
+ # tzinfo.utcoffset() throws an error for sub-minute amounts,
+ # so round
+ minutes = round(offset / 60.0, 0)
+ self.__offset = timedelta(minutes=minutes)
+
+ def utcoffset(self, dt):
+ return self.__offset
+
+ def tzname(self, dt):
+ return "FixedOffsetTz" + str(self.__offset.seconds)
+
+ def dst(self, dt):
+ return timedelta(0)
+
+
+class TestInvalidCookieError(object):
+ """Exercise the trivial behavior of the InvalidCookieError exception.
+ """
+ def test_simple(self):
+ "This be the test"
+ def exception(data):
+ "Gather an InvalidCookieError exception"
+ try:
+ raise InvalidCookieError(data)
+ except InvalidCookieError as exception:
+ return exception
+ # other exceptions will pass through
+ return None
+ assert exception("no donut").data == "no donut"
+
+ # Spot check for obvious junk in loggable representations.
+ e = exception("yay\x00whee")
+ assert "\x00" not in repr(e)
+ assert "\x00" not in str(e)
+ assert "yaywhee" not in repr(e)
+ assert "yaywhee" not in str(e)
+ assert "\n" not in repr(exception("foo\nbar"))
+
+
+class TestInvalidCookieAttributeError(object):
+ """Exercise the trivial behavior of InvalidCookieAttributeError.
+ """
+ def exception(self, *args, **kwargs):
+ "Generate an InvalidCookieAttributeError exception naturally"
+ try:
+ raise InvalidCookieAttributeError(*args, **kwargs)
+ except InvalidCookieAttributeError as exception:
+ return exception
+ return None
+
+ def test_simple(self):
+ e = self.exception("foo", "bar")
+ assert e.name == "foo"
+ assert e.value == "bar"
+
+ def test_junk_in_loggables(self):
+ # Spot check for obvious junk in loggable representations.
+ # This isn't completely idle: for example, nulls are ignored in
+ # %-formatted text, and this could be very misleading
+ e = self.exception("ya\x00y", "whee")
+ assert "\x00" not in repr(e)
+ assert "\x00" not in str(e)
+ assert "yay" not in repr(e)
+ assert "yay" not in str(e)
+
+ e = self.exception("whee", "ya\x00y")
+ assert "\x00" not in repr(e)
+ assert "\x00" not in str(e)
+ assert "yay" not in repr(e)
+ assert "yay" not in str(e)
+
+ assert "\n" not in repr(self.exception("yay", "foo\nbar"))
+ assert "\n" not in repr(self.exception("foo\nbar", "yay"))
+
+ def test_no_name(self):
+ # not recommended to do this, but we want to handle it if people do
+ e = self.exception(None, "stuff")
+ assert e.name == None
+ assert e.value == "stuff"
+ assert e.reason == None
+ assert 'stuff' in str(e)
+
+
+class TestDefinitions(object):
+ """Test the patterns in cookies.Definitions against specs.
+ """
+ def test_cookie_name(self, check_unicode=False):
+ """Check COOKIE_NAME against the token definition in RFC 2616 2.2 (as
+ cited in RFC 6265):
+
+ token = 1*<any CHAR except CTLs or separators>
+ separators = "(" | ")" | "<" | ">" | "@"
+ | "," | ";" | ":" | "\" | <">
+ | "/" | "[" | "]" | "?" | "="
+ | "{" | "}" | SP | HT
+
+ (Definitions.COOKIE_NAME is regex-ready while RFC5234.TOKEN is more
+ clearly related to the RFC; they should be functionally the same)
+ """
+ regex = Definitions.COOKIE_NAME_RE
+ assert regex.match(RFC5234.TOKEN)
+ assert not regex.match(RFC5234.NONCTL)
+ for c in RFC5234.CTL:
+ assert not regex.match(c)
+ for c in RFC2616.SEPARATORS:
+ # Skip special case - some number of Java and PHP apps have used
+ # colon in names, while this is dumb we want to not choke on this
+ # by default since it may be the single biggest cause of bugs filed
+ # against Python's cookie libraries
+ if c == ':':
+ continue
+ assert not regex.match(c)
+ # Unicode over 7 bit ASCII shouldn't match, but this takes a while
+ if check_unicode:
+ for i in range(127, 0x10FFFF + 1):
+ assert not regex.match(unichr(i))
+
+ def test_cookie_octet(self):
+ """Check COOKIE_OCTET against the definition in RFC 6265:
+
+ cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E
+ ; US-ASCII characters excluding CTLs,
+ ; whitespace DQUOTE, comma, semicolon,
+ ; and backslash
+ """
+ match = re.compile("^[%s]+\Z" % Definitions.COOKIE_OCTET).match
+ for c in RFC5234.CTL:
+ assert not match(c)
+ assert not match("a%sb" % c)
+ # suspect RFC typoed 'whitespace, DQUOTE' as 'whitespace DQUOTE'
+ assert not match(' ')
+ assert not match('"')
+ assert not match(',')
+ assert not match(';')
+ assert not match('\\')
+ # the spec above DOES include =.-
+ assert match("=")
+ assert match(".")
+ assert match("-")
+
+ # Check that everything else in CHAR works.
+ safe_cookie_octet = "".join(sorted(
+ set(RFC5234.NONCTL) - set(' ",;\\')))
+ assert match(safe_cookie_octet)
+
+ def test_set_cookie_header(self):
+ """Smoke test SET_COOKIE_HEADER (used to compile SET_COOKIE_HEADER_RE)
+ against HEADER_CASES.
+ """
+ # should match if expectation is not an error, shouldn't match if it is
+ # an error. set-cookie-header is for responses not requests, so use
+ # response expectation rather than request expectation
+ match = re.compile(Definitions.SET_COOKIE_HEADER).match
+ for case in HEADER_CASES:
+ arg, kwargs, request_result, expected = case
+ this_match = match(arg)
+ if expected and not isinstance(expected, type):
+ assert this_match, "should match as response: " + repr(arg)
+ else:
+ if not request_result:
+ assert not this_match, \
+ "should not match as response: " + repr(arg)
+
+ def test_cookie_cases(self):
+ """Smoke test COOKIE_HEADER (used to compile COOKIE_HEADER_RE) against
+ HEADER_CASES.
+ """
+ # should match if expectation is not an error, shouldn't match if it is
+ # an error. cookie-header is for requests not responses, so use request
+ # expectation rather than response expectation
+ match = re.compile(Definitions.COOKIE).match
+ for case in HEADER_CASES:
+ arg, kwargs, expected, response_result = case
+ this_match = match(arg)
+ if expected and not isinstance(expected, type):
+ assert this_match, "should match as request: " + repr(arg)
+ else:
+ if not response_result:
+ assert not this_match, \
+ "should not match as request: " + repr(arg)
+
+ def test_cookie_pattern(self):
+ """Smoke test Definitions.COOKIE (used to compile COOKIE_RE) against
+ the grammar for cookie-header as in RFC 6265.
+
+ cookie-header = "Cookie:" OWS cookie-string OWS
+ cookie-string = cookie-pair *( ";" SP cookie-pair )
+ cookie-pair = cookie-name "=" cookie-value
+ cookie-name = token
+ cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE )
+
+ cookie-name and cookie-value are not broken apart for separate
+ testing, as the former is essentially just token and the latter
+ essentially just cookie-octet.
+ """
+ match = re.compile(Definitions.COOKIE).match
+ # cookie-pair behavior around =
+ assert match("foo").group('invalid')
+ assert match("foo=bar")
+ # Looks dumb, but this is legal because "=" is valid for cookie-octet.
+ assert match("a=b=c")
+ # DQUOTE *cookie-octet DQUOTE - allowed
+ assert match('foo="bar"')
+
+ # for testing on the contents of cookie name and cookie value,
+ # see test_cookie_name and test_cookie_octet.
+
+ regex = re.compile(Definitions.COOKIE)
+ correct = [
+ ('foo', 'yar', ''),
+ ('bar', 'eeg', ''),
+ ('baz', 'wog', ''),
+ ('frob', 'laz', '')]
+
+ def assert_correct(s):
+ #naive = re.findall(" *([^;]+)=([^;]+) *(?:;|\Z)", s)
+ result = regex.findall(s)
+ assert result == correct
+ # normal-looking case should work normally
+ assert_correct("foo=yar; bar=eeg; baz=wog; frob=laz")
+ # forgive lack of whitespace as long as semicolons are explicit
+ assert_correct("foo=yar;bar=eeg;baz=wog;frob=laz")
+ # forgive too much whitespace AROUND values
+ assert_correct(" foo=yar; bar=eeg; baz=wog; frob=laz ")
+
+ # Actually literal spaces are NOT allowed in cookie values per RFC 6265
+ # and it is UNWISE to put them in without escaping. But we want the
+ # flexibility to let this pass with a warning, because this is the kind
+ # of bad idea which is very common and results in loud complaining on
+ # issue trackers on the grounds that PHP does it or something. So the
+ # regex is weakened, but the presence of a space should still be at
+ # least noted, and an exception must be raised if = is also used
+ # - because that would often indicate the loss of cookies due to
+ # forgotten separator, as in "foo=yar bar=eeg baz=wog frob=laz".
+ assert regex.findall("foo=yar; bar=eeg; baz=wog; frob=l az") == [
+ ('foo', 'yar', ''),
+ ('bar', 'eeg', ''),
+ ('baz', 'wog', ''),
+ # handle invalid internal whitespace.
+ ('frob', 'l az', '')
+ ]
+
+ # Without semicolons or inside semicolon-delimited blocks, the part
+ # before the first = should be interpreted as a name, and the rest as
+ # a value (since = is not forbidden for cookie values). Thus:
+ result = regex.findall("foo=yarbar=eegbaz=wogfrob=laz")
+ assert result[0][0] == 'foo'
+ assert result[0][1] == 'yarbar=eegbaz=wogfrob=laz'
+ assert result[0][2] == ''
+
+ # Make some bad values and see that it's handled reasonably.
+ # (related to http://bugs.python.org/issue2988)
+ # don't test on semicolon because the regexp stops there, reasonably.
+ for c in '\x00",\\':
+ nasty = "foo=yar" + c + "bar"
+ result = regex.findall(nasty + "; baz=bam")
+ # whole bad pair reported in the 'invalid' group (the third one)
+ assert result[0][2] == nasty
+ # kept on truckin' and got the other one just fine.
+ assert result[1] == ('baz', 'bam', '')
+ # same thing if the good one is first and the bad one second
+ result = regex.findall("baz=bam; " + nasty)
+ assert result[0] == ('baz', 'bam', '')
+ assert result[1][2] == ' ' + nasty
+
+ def test_extension_av(self, check_unicode=False):
+ """Test Definitions.EXTENSION_AV against extension-av per RFC 6265.
+
+ extension-av = <any CHAR except CTLs or ";">
+ """
+ # This is how it's defined in RFC 6265, just about verbatim.
+ extension_av_explicit = "".join(sorted(
+ set(RFC5234.CHAR) - set(RFC5234.CTL + ";")))
+ # ... that should turn out to be the same as Definitions.EXTENSION_AV
+ match = re.compile("^([%s]+)\Z" % Definitions.EXTENSION_AV).match
+ # Verify I didn't mess up on escaping here first
+ assert match(r']')
+ assert match(r'[')
+ assert match(r"'")
+ assert match(r'"')
+ assert match("\\")
+ assert match(extension_av_explicit)
+ # There should be some CHAR not matched
+ assert not match(RFC5234.CHAR)
+ # Every single CTL should not match
+ for c in RFC5234.CTL + ";":
+ assert not match(c)
+ # Unicode over 7 bit ASCII shouldn't match, but this takes a while
+ if check_unicode:
+ for i in range(127, 0x10FFFF + 1):
+ assert not match(unichr(i))
+
+ def test_max_age_av(self):
+ "Smoke test Definitions.MAX_AGE_AV"
+ # Not a lot to this, it's just digits
+ match = re.compile("^%s\Z" % Definitions.MAX_AGE_AV).match
+ assert not match("")
+ assert not match("Whiskers")
+ assert not match("Max-Headroom=992")
+ for c in "123456789":
+ assert not match(c)
+ assert match("Max-Age=%s" % c)
+ assert match("Max-Age=0")
+ for c in RFC5234.CHAR:
+ assert not match(c)
+
+ def test_label(self, check_unicode=False):
+ "Test label, as used in Domain attribute"
+ match = re.compile("^(%s)\Z" % Definitions.LABEL).match
+ for i in range(0, 10):
+ assert match(str(i))
+ assert not match(".")
+ assert not match(",")
+ for c in RFC5234.CTL:
+ assert not match("a%sb" % c)
+ assert not match("%sb" % c)
+ assert not match("a%s" % c)
+ # Unicode over 7 bit ASCII shouldn't match, but this takes a while
+ if check_unicode:
+ for i in range(127, 0x10FFFF + 1):
+ assert not match(unichr(i))
+
+ def test_domain_av(self):
+ "Smoke test Definitions.DOMAIN_AV"
+ # This is basically just RFC1123.subdomain, which has its own
+ # assertions in the class definition
+ bad_domains = [
+ ""
+ ]
+ good_domains = [
+ "foobar.com",
+ "foo-bar.com",
+ "3Com.COM"
+ ]
+
+ # First test DOMAIN via DOMAIN_RE
+ match = Definitions.DOMAIN_RE.match
+ for domain in bad_domains:
+ assert not match(domain)
+ for domain in good_domains:
+ assert match(domain)
+
+ # Now same tests through DOMAIN_AV
+ match = re.compile("^%s\Z" % Definitions.DOMAIN_AV).match
+ for domain in bad_domains:
+ assert not match("Domain=%s" % domain)
+ for domain in good_domains:
+ assert not match(domain)
+ assert match("Domain=%s" % domain)
+ # This is NOT valid and shouldn't be tolerated in cookies we create,
+ # but it should be tolerated in existing cookies since people do it;
+ # interpreted by stripping the initial .
+ assert match("Domain=.foo.net")
+
+ def test_path_av(self):
+ "Smoke test PATH and PATH_AV"
+ # This is basically just EXTENSION_AV, see test_extension_av
+ bad_paths = [
+ ""
+ ]
+ good_paths = [
+ "/",
+ "/foo",
+ "/foo/bar"
+ ]
+ match = Definitions.PATH_RE.match
+ for path in bad_paths:
+ assert not match(path)
+ for path in good_paths:
+ assert match(path)
+
+ match = re.compile("^%s\Z" % Definitions.PATH_AV).match
+ for path in bad_paths:
+ assert not match("Path=%s" % path)
+ for path in good_paths:
+ assert not match(path)
+ assert match("Path=%s" % path)
+
+ def test_months(self):
+ """Sanity checks on MONTH_SHORT and MONTH_LONG month name recognizers.
+
+ The RFCs set these in stone, they aren't locale-dependent.
+ """
+ match = re.compile(Definitions.MONTH_SHORT).match
+ assert match("Jan")
+ assert match("Feb")
+ assert match("Mar")
+ assert match("Apr")
+ assert match("May")
+ assert match("Jun")
+ assert match("Jul")
+ assert match("Aug")
+ assert match("Sep")
+ assert match("Oct")
+ assert match("Nov")
+ assert match("Dec")
+
+ match = re.compile(Definitions.MONTH_LONG).match
+ assert match("January")
+ assert match("February")
+ assert match("March")
+ assert match("April")
+ assert match("May")
+ assert match("June")
+ assert match("July")
+ assert match("August")
+ assert match("September")
+ assert match("October")
+ assert match("November")
+ assert match("December")
+
+ def test_weekdays(self):
+ """Sanity check on WEEKDAY_SHORT and WEEKDAY_LONG weekday
+ recognizers.
+
+ The RFCs set these in stone, they aren't locale-dependent.
+ """
+ match = re.compile(Definitions.WEEKDAY_SHORT).match
+ assert match("Mon")
+ assert match("Tue")
+ assert match("Wed")
+ assert match("Thu")
+ assert match("Fri")
+ assert match("Sat")
+ assert match("Sun")
+
+ match = re.compile(Definitions.WEEKDAY_LONG).match
+ assert match("Monday")
+ assert match("Tuesday")
+ assert match("Wednesday")
+ assert match("Thursday")
+ assert match("Friday")
+ assert match("Saturday")
+ assert match("Sunday")
+
+ def test_day_of_month(self):
+ """Check that the DAY_OF_MONTH regex allows all actual days, but
+ excludes obviously wrong ones (so they are tossed in the first pass).
+ """
+ match = re.compile(Definitions.DAY_OF_MONTH).match
+ for day in ['01', '02', '03', '04', '05', '06', '07', '08', '09', ' 1',
+ ' 2', ' 3', ' 4', ' 5', ' 6', ' 7', ' 8', ' 9', '1', '2', '3',
+ '4', '5', '6', '7', '8', '9'] \
+ + [str(i) for i in range(10, 32)]:
+ assert match(day)
+ assert not match("0")
+ assert not match("00")
+ assert not match("000")
+ assert not match("111")
+ assert not match("99")
+ assert not match("41")
+
+ def test_expires_av(self):
+ "Smoke test the EXPIRES_AV regex pattern"
+ # Definitions.EXPIRES_AV is actually pretty bad because it's a disaster
+ # to test three different date formats with lots of definition
+ # dependencies, and odds are good that other implementations are loose.
+ # so this parser is also loose. "liberal in what you accept,
+ # conservative in what you produce"
+ match = re.compile("^%s\Z" % Definitions.EXPIRES_AV).match
+ assert not match("")
+ assert not match("Expires=")
+
+ assert match("Expires=Tue, 15-Jan-2013 21:47:38 GMT")
+ assert match("Expires=Sun, 06 Nov 1994 08:49:37 GMT")
+ assert match("Expires=Sunday, 06-Nov-94 08:49:37 GMT")
+ assert match("Expires=Sun Nov 6 08:49:37 1994")
+ # attributed to Netscape in RFC 2109 10.1.2
+ assert match("Expires=Mon, 13-Jun-93 10:00:00 GMT")
+
+ assert not match("Expires=S9n, 06 Nov 1994 08:49:37 GMT")
+ assert not match("Expires=Sun3ay, 06-Nov-94 08:49:37 GMT")
+ assert not match("Expires=S9n Nov 6 08:49:37 1994")
+
+ assert not match("Expires=Sun, A6 Nov 1994 08:49:37 GMT")
+ assert not match("Expires=Sunday, 0B-Nov-94 08:49:37 GMT")
+ assert not match("Expires=Sun No8 6 08:49:37 1994")
+
+ assert not match("Expires=Sun, 06 N3v 1994 08:49:37 GMT")
+ assert not match("Expires=Sunday, 06-N8v-94 08:49:37 GMT")
+ assert not match("Expires=Sun Nov A 08:49:37 1994")
+
+ assert not match("Expires=Sun, 06 Nov 1B94 08:49:37 GMT")
+ assert not match("Expires=Sunday, 06-Nov-C4 08:49:37 GMT")
+ assert not match("Expires=Sun Nov 6 08:49:37 1Z94")
+
+ def test_no_obvious_need_for_disjunctive_attr_pattern(self):
+ """Smoke test the assumption that extension-av is a reasonable set of
+ chars for all attrs (and thus that there is no reason to use a fancy
+ disjunctive pattern in the findall that splits out the attrs, freeing
+ us to use EXTENSION_AV instead).
+
+ If this works, then ATTR should work
+ """
+ match = re.compile("^[%s]+\Z" % Definitions.EXTENSION_AV).match
+ assert match("Expires=Sun, 06 Nov 1994 08:49:37 GMT")
+ assert match("Expires=Sunday, 06-Nov-94 08:49:37 GMT")
+ assert match("Expires=Sun Nov 6 08:49:37 1994")
+ assert match("Max-Age=14658240962")
+ assert match("Domain=FoO.b9ar.baz")
+ assert match("Path=/flakes")
+ assert match("Secure")
+ assert match("HttpOnly")
+
+ def test_attr(self):
+ """Smoke test ATTR, used to compile ATTR_RE.
+ """
+ match = re.compile(Definitions.ATTR).match
+
+ def recognized(pattern):
+ "macro for seeing if ATTR recognized something"
+ this_match = match(pattern)
+ if not this_match:
+ return False
+ groupdict = this_match.groupdict()
+ if groupdict['unrecognized']:
+ return False
+ return True
+
+ # Quickly test that a batch of attributes matching the explicitly
+ # recognized patterns make it through without anything in the
+ # 'unrecognized' catchall capture group.
+ for pattern in [
+ "Secure",
+ "HttpOnly",
+ "Max-Age=9523052",
+ "Domain=frobble.com",
+ "Domain=3Com.COM",
+ "Path=/",
+ "Expires=Wed, 09 Jun 2021 10:18:14 GMT",
+ ]:
+ assert recognized(pattern)
+
+ # Anything else is in extension-av and that's very broad;
+ # see test_extension_av for that test.
+ # This is only about the recognized ones.
+ assert not recognized("Frob=mugmannary")
+ assert not recognized("Fqjewp@1j5j510923")
+ assert not recognized(";aqjwe")
+ assert not recognized("ETJpqw;fjw")
+ assert not recognized("fjq;")
+ assert not recognized("Expires=\x00")
+
+ # Verify interface from regexp for extracting values isn't changed;
+ # a little rigidity here is a good idea
+ expires = "Wed, 09 Jun 2021 10:18:14 GMT"
+ m = match("Expires=%s" % expires)
+ assert m.group("expires") == expires
+
+ max_age = "233951698"
+ m = match("Max-Age=%s" % max_age)
+ assert m.group("max_age") == max_age
+
+ domain = "flarp"
+ m = match("Domain=%s" % domain)
+ assert m.group("domain") == domain
+
+ path = "2903"
+ m = match("Path=%s" % path)
+ assert m.group("path") == path
+
+ m = match("Secure")
+ assert m.group("secure")
+ assert not m.group("httponly")
+
+ m = match("HttpOnly")
+ assert not m.group("secure")
+ assert m.group("httponly")
+
+ def test_date_accepts_formats(self):
+ """Check that DATE matches most formats used in Expires: headers,
+ and explain what the different formats are about.
+
+ The value extraction of this regexp is more comprehensively exercised
+ by test_date_parsing().
+ """
+ # Date formats vary widely in the wild. Even the standards vary widely.
+ # This series of tests does spot-checks with instances of formats that
+ # it makes sense to support. In the following comments, each format is
+ # discussed and the rationale for the overall regexp is developed.
+
+ match = re.compile(Definitions.DATE).match
+
+ # The most common formats, related to the old Netscape cookie spec
+ # (NCSP), are supposed to follow this template:
+ #
+ # Wdy, DD-Mon-YYYY HH:MM:SS GMT
+ #
+ # (where 'Wdy' is a short weekday, and 'Mon' is a named month).
+ assert match("Mon, 20-Jan-1994 00:00:00 GMT")
+
+ # Similarly, RFC 850 proposes this format:
+ #
+ # Weekday, DD-Mon-YY HH:MM:SS GMT
+ #
+ # (with a long-form weekday and a 2-digit year).
+ assert match("Tuesday, 12-Feb-92 23:25:42 GMT")
+
+ # RFC 1036 obsoleted the RFC 850 format:
+ #
+ # Wdy, DD Mon YY HH:MM:SS GMT
+ #
+ # (shortening the weekday format and changing dashes to spaces).
+ assert match("Wed, 30 Mar 92 13:16:12 GMT")
+
+ # RFC 6265 cites a definition from RFC 2616, which uses the RFC 1123
+ # definition but limits it to GMT (consonant with NCSP). RFC 1123
+ # expanded RFC 822 with 2-4 digit years (more permissive than NCSP);
+ # RFC 822 left weekday and seconds as optional, and a day of 1-2 digits
+ # (all more permissive than NCSP). Giving something like this:
+ #
+ # [Wdy, ][D]D Mon [YY]YY HH:MM[:SS] GMT
+ #
+ assert match("Thu, 3 Apr 91 12:46 GMT")
+ # No weekday, two digit year.
+ assert match("13 Apr 91 12:46 GMT")
+
+ # Similarly, there is RFC 2822:
+ #
+ # [Wdy, ][D]D Mon YYYY HH:MM[:SS] GMT
+ # (which only differs in requiring a 4-digit year, where RFC 1123
+ # permits 2 or 3 digit years).
+ assert match("13 Apr 1991 12:46 GMT")
+ assert match("Wed, 13 Apr 1991 12:46 GMT")
+
+ # The generalized format given above encompasses RFC 1036 and RFC 2822
+ # and would encompass NCSP except for the dashes; allowing long-form
+ # weekdays also encompasses the format proposed in RFC 850. Taken
+ # together, this should cover something like 99% of Expires values
+ # (see, e.g., https://bugzilla.mozilla.org/show_bug.cgi?id=610218)
+
+ # Finally, we also want to support asctime format, as mentioned in RFC
+ # 850 and RFC 2616 and occasionally seen in the wild:
+ # Wdy Mon DD HH:MM:SS YYYY
+ # e.g.: Sun Nov 6 08:49:37 1994
+ assert match("Sun Nov 6 08:49:37 1994")
+ assert match("Sun Nov 26 08:49:37 1994")
+ # Reportedly someone has tacked 'GMT' on to the end of an asctime -
+ # although this is not RFC valid, it is pretty harmless
+ assert match("Sun Nov 26 08:49:37 1994 GMT")
+
+ # This test is not passed until it is shown that it wasn't trivially
+ # because DATE was matching .* or similar. This isn't intended to be
+ # a thorough test, just rule out the obvious reason. See test_date()
+ # for a more thorough workout of the whole parse and render mechanisms
+ assert not match("")
+ assert not match(" ")
+ assert not match("wobbly")
+ assert not match("Mon")
+ assert not match("Mon, 20")
+ assert not match("Mon, 20 Jan")
+ assert not match("Mon, 20,Jan,1994 00:00:00 GMT")
+ assert not match("Tuesday, 12-Feb-992 23:25:42 GMT")
+ assert not match("Wed, 30 Mar 92 13:16:1210 GMT")
+ assert not match("Wed, 30 Mar 92 13:16:12:10 GMT")
+ assert not match("Thu, 3 Apr 91 12:461 GMT")
+
+ def test_eol(self):
+ """Test that the simple EOL regex works basically as expected.
+ """
+ split = Definitions.EOL.split
+ assert split("foo\nbar") == ["foo", "bar"]
+ assert split("foo\r\nbar") == ["foo", "bar"]
+ letters = list("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
+ assert split("\n".join(letters)) == letters
+ assert split("\r\n".join(letters)) == letters
+
+ def test_compiled(self):
+ """Check that certain patterns are present as compiled regexps
+ """
+ re_type = type(re.compile(''))
+
+ def present(name):
+ "Macro for testing existence of an re in Definitions"
+ item = getattr(Definitions, name)
+ return item and isinstance(item, re_type)
+
+ assert present("COOKIE_NAME_RE")
+ assert present("COOKIE_RE")
+ assert present("SET_COOKIE_HEADER_RE")
+ assert present("ATTR_RE")
+ assert present("DATE_RE")
+ assert present("EOL")
+
+
+def _test_init(cls, args, kwargs, expected):
+ "Core instance test function for test_init"
+ print("test_init", cls, args, kwargs)
+ try:
+ instance = cls(*args, **kwargs)
+ except Exception as exception:
+ if type(exception) == expected:
+ return
+ logging.error("expected %s, got %s", expected, repr(exception))
+ raise
+ if isinstance(expected, type) and issubclass(expected, Exception):
+ raise AssertionError("No exception raised; "
+ "expected %s for %s/%s" % (
+ expected.__name__,
+ repr(args),
+ repr(kwargs)))
+ for attr_name, attr_value in expected.items():
+ assert getattr(instance, attr_name) == attr_value
+
+
+class TestCookie(object):
+ """Tests for the Cookie class.
+ """
+ # Test cases exercising different constructor calls to make a new Cookie
+ # from scratch. Each case is tuple:
+ # args, kwargs, exception or dict of expected attribute values
+ # this exercises the default validators as well.
+ creation_cases = [
+ # bad call gives TypeError
+ (("foo",), {}, TypeError),
+ (("a", "b", "c"), {}, TypeError),
+ # give un-ascii-able name - raises error due to likely
+ # compatibility problems (cookie ignored, etc.)
+ # in value it's fine, it'll be encoded and not inspected anyway.
+ (("ăŊĻ", "b"), {}, InvalidCookieError),
+ (("b", "ăŊĻ"), {}, {'name': 'b', 'value': "ăŊĻ"}),
+ # normal simple construction gives name and value
+ (("foo", "bar"), {}, {'name': 'foo', 'value': 'bar'}),
+ # add a valid attribute and get it set
+ (("baz", "bam"), {'max_age': 9},
+ {'name': 'baz', 'value': 'bam', 'max_age': 9}),
+ # multiple valid attributes
+ (("x", "y"), {'max_age': 9, 'comment': 'fruity'},
+ {'name': 'x', 'value': 'y',
+ 'max_age': 9, 'comment': 'fruity'}),
+ # invalid max-age
+ (("w", "m"), {'max_age': 'loopy'}, InvalidCookieAttributeError),
+ (("w", "m"), {'max_age': -1}, InvalidCookieAttributeError),
+ (("w", "m"), {'max_age': 1.2}, InvalidCookieAttributeError),
+ # invalid expires
+ (("w", "m"), {'expires': 0}, InvalidCookieAttributeError),
+ (("w", "m"), {'expires':
+ datetime(2010, 1, 1, tzinfo=FixedOffsetTz(600))},
+ InvalidCookieAttributeError),
+ # control: valid expires
+ (("w", "m"),
+ {'expires': datetime(2010, 1, 1)},
+ {'expires': datetime(2010, 1, 1)}),
+ # invalid domain
+ (("w", "m"), {'domain': ''}, InvalidCookieAttributeError),
+ (("w", "m"), {'domain': '@'}, InvalidCookieAttributeError),
+ (("w", "m"), {'domain': '.foo.net'}, {'domain': '.foo.net'}),
+ # control: valid domain
+ (("w", "m"),
+ {'domain': 'foo.net'},
+ {'domain': 'foo.net'},),
+ # invalid path
+ (("w", "m"), {'path': ''}, InvalidCookieAttributeError),
+ (("w", "m"), {'path': '""'}, InvalidCookieAttributeError),
+ (("w", "m"), {'path': 'foo'}, InvalidCookieAttributeError),
+ (("w", "m"), {'path': '"/foo"'}, InvalidCookieAttributeError),
+ (("w", "m"), {'path': ' /foo '}, InvalidCookieAttributeError),
+ # control: valid path
+ (("w", "m"), {'path': '/'},
+ {'path': '/'}),
+ (("w", "m"), {'path': '/axes'},
+ {'path': '/axes'}),
+ # invalid version per RFC 2109/RFC 2965
+ (("w", "m"), {'version': ''}, InvalidCookieAttributeError),
+ (("w", "m"), {'version': 'baa'}, InvalidCookieAttributeError),
+ (("w", "m"), {'version': -2}, InvalidCookieAttributeError),
+ (("w", "m"), {'version': 2.3}, InvalidCookieAttributeError),
+ # control: valid version
+ (("w", "m"), {'version': 0}, {'version': 0}),
+ (("w", "m"), {'version': 1}, {'version': 1}),
+ (("w", "m"), {'version': 3042}, {'version': 3042}),
+ # invalid secure, httponly
+ (("w", "m"), {'secure': ''}, InvalidCookieAttributeError),
+ (("w", "m"), {'secure': 0}, InvalidCookieAttributeError),
+ (("w", "m"), {'secure': 1}, InvalidCookieAttributeError),
+ (("w", "m"), {'secure': 'a'}, InvalidCookieAttributeError),
+ (("w", "m"), {'httponly': ''}, InvalidCookieAttributeError),
+ (("w", "m"), {'httponly': 0}, InvalidCookieAttributeError),
+ (("w", "m"), {'httponly': 1}, InvalidCookieAttributeError),
+ (("w", "m"), {'httponly': 'a'}, InvalidCookieAttributeError),
+ # valid comment
+ (("w", "m"), {'comment': 'a'}, {'comment': 'a'}),
+ # invalid names
+ # (unicode cases are done last because they mess with pytest print)
+ ((None, "m"), {}, InvalidCookieError),
+ (("", "m"), {}, InvalidCookieError),
+ (("ü", "m"), {}, InvalidCookieError),
+ # invalid values
+ (("w", None), {}, {'name': 'w'}),
+ # a control - unicode is valid value, just gets encoded on way out
+ (("w", "üm"), {}, {'value': "üm"}),
+ # comma
+ (('a', ','), {}, {'value': ','}),
+ # semicolons
+ (('a', ';'), {}, {'value': ';'}),
+ # spaces
+ (('a', ' '), {}, {'value': ' '}),
+ ]
+
+ def test_init(self):
+ """Exercise __init__ and validators.
+
+ This is important both because it is a user-facing API, and also
+ because the parse/render tests depend heavily on it.
+ """
+ creation_cases = self.creation_cases + [
+ (("a", "b"), {'frob': 10}, InvalidCookieAttributeError)
+ ]
+ counter = 0
+ for args, kwargs, expected in creation_cases:
+ counter += 1
+ logging.error("counter %d, %s, %s, %s", counter, args, kwargs,
+ expected)
+ _test_init(Cookie, args, kwargs, expected)
+
+ def test_set_attributes(self):
+ """Exercise setting, validation and getting of attributes without
+ much involving __init__. Also sets value and name.
+ """
+ for args, kwargs, expected in self.creation_cases:
+ if not kwargs:
+ continue
+ try:
+ cookie = Cookie("yarp", "flam")
+ for attr, value in kwargs.items():
+ setattr(cookie, attr, value)
+ if args:
+ cookie.name = args[0]
+ cookie.value = args[1]
+ except Exception as e:
+ if type(e) == expected:
+ continue
+ raise
+ if isinstance(expected, type) and issubclass(expected, Exception):
+ raise AssertionError("No exception raised; "
+ "expected %s for %s" % (
+ expected.__name__,
+ repr(kwargs)))
+ for attr_name, attr_value in expected.items():
+ assert getattr(cookie, attr_name) == attr_value
+
+ def test_get_defaults(self):
+ "Test that defaults are right for cookie attrs"
+ cookie = Cookie("foo", "bar")
+ for attr in (
+ "expires",
+ "max_age",
+ "domain",
+ "path",
+ "comment",
+ "version",
+ "secure",
+ "httponly"):
+ assert hasattr(cookie, attr)
+ assert getattr(cookie, attr) == None
+ # Verify that not every name is getting something
+ for attr in ("foo", "bar", "baz"):
+ assert not hasattr(cookie, attr)
+ with raises(AttributeError):
+ getattr(cookie, attr)
+
+ names_values = [
+ ("a", "b"),
+ ("foo", "bar"),
+ ("baz", "1234567890"),
+ ("!!#po99!", "blah"),
+ ("^_~`*", "foo"),
+ ("%s+|-.&$", "snah"),
+ ("lub", "!@#$%^&*()[]{}|/:'<>~.?`"),
+ ("woah", "====+-_"),
+ ]
+
+ def test_render_response(self):
+ "Test rendering Cookie object for Set-Cookie: header"
+ for name, value in self.names_values:
+ cookie = Cookie(name, value)
+ expected = "{name}={value}".format(
+ name=name, value=value)
+ assert cookie.render_response() == expected
+ for data, result in [
+ ({'name': 'a', 'value': 'b'}, "a=b"),
+ ({'name': 'foo', 'value': 'bar'}, "foo=bar"),
+ ({'name': 'baz', 'value': 'bam'}, "baz=bam"),
+ ({'name': 'baz', 'value': 'bam', 'max_age': 2},
+ "baz=bam; Max-Age=2"),
+ ({'name': 'baz', 'value': 'bam',
+ 'max_age': 2, 'comment': 'foobarbaz'},
+ "baz=bam; Max-Age=2; Comment=foobarbaz"),
+ ({'name': 'baz', 'value': 'bam',
+ 'max_age': 2,
+ 'expires': datetime(1970, 1, 1),
+ },
+ "baz=bam; Max-Age=2; "
+ "Expires=Thu, 01 Jan 1970 00:00:00 GMT"),
+ ({'name': 'baz', 'value': 'bam', 'path': '/yams',
+ 'domain': '3Com.COM'},
+ "baz=bam; Domain=3Com.COM; Path=/yams"),
+ ({'name': 'baz', 'value': 'bam', 'path': '/', 'secure': True,
+ 'httponly': True},
+ "baz=bam; Path=/; Secure; HttpOnly"),
+ ({'name': 'baz', 'value': 'bam', 'domain': '.domain'},
+ 'baz=bam; Domain=domain'),
+ ]:
+ cookie = Cookie(**data)
+ actual = sorted(cookie.render_response().split("; "))
+ ideal = sorted(result.split("; "))
+ assert actual == ideal
+
+ def test_render_encode(self):
+ """Test encoding of a few special characters.
+
+ as in http://bugs.python.org/issue9824
+ """
+ cases = {
+ ("x", "foo,bar;baz"): 'x=foo%2Cbar%3Bbaz',
+ ("y", 'yap"bip'): 'y=yap%22bip',
+ }
+ for args, ideal in cases.items():
+ cookie = Cookie(*args)
+ assert cookie.render_response() == ideal
+ assert cookie.render_request() == ideal
+
+ def test_legacy_quotes(self):
+ """Check that cookies which delimit values with quotes are understood
+ but that this non-6265 behavior is not repeated in the output
+ """
+ cookie = Cookie.from_string(
+ 'Set-Cookie: y="foo"; version="1"; Path="/foo"')
+ assert cookie.name == 'y'
+ assert cookie.value == 'foo'
+ assert cookie.version == 1
+ assert cookie.path == "/foo"
+ pieces = cookie.render_response().split("; ")
+ assert pieces[0] == 'y=foo'
+ assert set(pieces[1:]) == set([
+ 'Path=/foo', 'Version=1'
+ ])
+
+ def test_render_response_expires(self):
+ "Simple spot check of cookie expires rendering"
+ a = Cookie('a', 'blah')
+ a.expires = parse_date("Wed, 23-Jan-1992 00:01:02 GMT")
+ assert a.render_response() == \
+ 'a=blah; Expires=Thu, 23 Jan 1992 00:01:02 GMT'
+
+ b = Cookie('b', 'blr')
+ b.expires = parse_date("Sun Nov 6 08:49:37 1994")
+ assert b.render_response() == \
+ 'b=blr; Expires=Sun, 06 Nov 1994 08:49:37 GMT'
+
+ def test_eq(self):
+ "Smoke test equality/inequality with Cookie objects"
+ ref = Cookie('a', 'b')
+ # trivial cases
+ assert ref == ref
+ assert not (ref != ref)
+ assert None != ref
+ assert not (None == ref)
+ assert ref != None
+ assert not (ref == None)
+ # equivalence and nonequivalence
+ assert Cookie('a', 'b') is not ref
+ assert Cookie('a', 'b') == ref
+ assert Cookie('x', 'y') != ref
+ assert Cookie('a', 'y') != ref
+ assert Cookie('a', 'b', path='/') != ref
+ assert {'c': 'd'} != ref
+ assert ref != {'c': 'd'}
+ # unlike attribute values and sets of attributes
+ assert Cookie('a', 'b', path='/a') \
+ != Cookie('a', 'b', path='/')
+ assert Cookie('x', 'y', max_age=3) != \
+ Cookie('x', 'y', path='/b')
+ assert Cookie('yargo', 'z', max_age=5) != \
+ Cookie('yargo', 'z', max_age=6)
+ assert ref != Cookie('a', 'b', domain='yab')
+ # Exercise bytes conversion
+ assert Cookie(b'a', 'b') == Cookie('a', 'b')
+ assert Cookie(b'a', 'b') == Cookie(b'a', 'b')
+
+ def test_manifest(self):
+ "Test presence of important stuff on Cookie class"
+ for name in ("attribute_names", "attribute_renderers",
+ "attribute_parsers", "attribute_validators"):
+ dictionary = getattr(Cookie, name)
+ assert dictionary
+ assert isinstance(dictionary, dict)
+
+ def test_simple_extension(self):
+ "Trivial example/smoke test of extending Cookie"
+
+ count_state = [0]
+
+ def call_counter(item=None):
+ count_state[0] += 1
+ return True if item else False
+
+ class Cookie2(Cookie):
+ "Example Cookie subclass with new behavior"
+ attribute_names = {
+ 'foo': 'Foo',
+ 'bar': 'Bar',
+ 'baz': 'Baz',
+ 'ram': 'Ram',
+ }
+ attribute_parsers = {
+ 'foo': lambda s: "/".join(s),
+ 'bar': call_counter,
+ 'value': lambda s:
+ parse_value(s, allow_spaces=True),
+ }
+ attribute_validators = {
+ 'foo': lambda item: True,
+ 'bar': call_counter,
+ 'baz': lambda item: False,
+ }
+ attribute_renderers = {
+ 'foo': lambda s: "|".join(s) if s else None,
+ 'bar': call_counter,
+ 'name': lambda item: item,
+ }
+ cookie = Cookie2("a", "b")
+ for key in Cookie2.attribute_names:
+ assert hasattr(cookie, key)
+ assert getattr(cookie, key) == None
+ cookie.foo = "abc"
+ assert cookie.render_request() == "a=b"
+ assert cookie.render_response() == "a=b; Foo=a|b|c"
+ cookie.foo = None
+ # Setting it to None makes it drop from the listing
+ assert cookie.render_response() == "a=b"
+
+ cookie.bar = "what"
+ assert cookie.bar == "what"
+ assert cookie.render_request() == "a=b"
+ # bar's renderer returns a bool; if it's True we get Bar.
+ # that's a special case for flags like HttpOnly.
+ assert cookie.render_response() == "a=b; Bar"
+
+ with raises(InvalidCookieAttributeError):
+ cookie.baz = "anything"
+
+ Cookie2('a', 'b fog')
+ Cookie2('a', ' b=fo g')
+
+ def test_from_string(self):
+ with raises(InvalidCookieError):
+ Cookie.from_string("")
+ with raises(InvalidCookieError):
+ Cookie.from_string("", ignore_bad_attributes=True)
+ assert Cookie.from_string("", ignore_bad_cookies=True) == None
+
+ def test_from_dict(self):
+ assert Cookie.from_dict({'name': 'a', 'value': 'b'}) == \
+ Cookie('a', 'b')
+ assert Cookie.from_dict(
+ {'name': 'a', 'value': 'b', 'duh': 'no'},
+ ignore_bad_attributes=True) == \
+ Cookie('a', 'b')
+ with raises(InvalidCookieError):
+ Cookie.from_dict({}, ignore_bad_attributes=True)
+ with raises(InvalidCookieError):
+ Cookie.from_dict({}, ignore_bad_attributes=False)
+ with raises(InvalidCookieError):
+ Cookie.from_dict({'name': ''}, ignore_bad_attributes=False)
+ with raises(InvalidCookieError):
+ Cookie.from_dict({'name': None, 'value': 'b'},
+ ignore_bad_attributes=False)
+ assert Cookie.from_dict({'name': 'foo'}) == Cookie('foo', None)
+ assert Cookie.from_dict({'name': 'foo', 'value': ''}) == \
+ Cookie('foo', None)
+ with raises(InvalidCookieAttributeError):
+ assert Cookie.from_dict(
+ {'name': 'a', 'value': 'b', 'duh': 'no'},
+ ignore_bad_attributes=False)
+ assert Cookie.from_dict({'name': 'a', 'value': 'b', 'expires': 2},
+ ignore_bad_attributes=True) == Cookie('a', 'b')
+ with raises(InvalidCookieAttributeError):
+ assert Cookie.from_dict({'name': 'a', 'value': 'b', 'expires': 2},
+ ignore_bad_attributes=False)
+
+
+class Scone(object):
+ """Non-useful alternative to Cookie class for tests only.
+ """
+ def __init__(self, name, value):
+ self.name = name
+ self.value = value
+
+ @classmethod
+ def from_dict(cls, cookie_dict):
+ instance = cls(cookie_dict['name'], cookie_dict['value'])
+ return instance
+
+ def __eq__(self, other):
+ if type(self) != type(other):
+ return False
+ if self.name != other.name:
+ return False
+ if self.value != other.value:
+ return False
+ return True
+
+
+class Scones(Cookies):
+ """Non-useful alternative to Cookies class for tests only.
+ """
+ DEFAULT_COOKIE_CLASS = Scone
+
+
+class TestCookies(object):
+ """Tests for the Cookies class.
+ """
+ creation_cases = [
+ # Only args - simple
+ ((Cookie("a", "b"),), {}, 1),
+ # Only kwargs - simple
+ (tuple(), {'a': 'b'}, 1),
+ # Only kwargs - bigger
+ (tuple(),
+ {'axl': 'bosk',
+ 'x': 'y',
+ 'foo': 'bar',
+ 'baz': 'bam'}, 4),
+ # Sum between args/kwargs
+ ((Cookie('a', 'b'),),
+ {'axl': 'bosk',
+ 'x': 'y',
+ 'foo': 'bar',
+ 'baz': 'bam'}, 5),
+ # Redundant between args/kwargs
+ ((Cookie('a', 'b'),
+ Cookie('x', 'y')),
+ {'axl': 'bosk',
+ 'x': 'y',
+ 'foo': 'bar',
+ 'baz': 'bam'}, 5),
+ ]
+
+ def test_init(self):
+ """Create some Cookies objects with __init__, varying the constructor
+ arguments, and check on the results.
+
+ Exercises __init__, __repr__, render_request, render_response, and
+ simple cases of parse_response and parse_request.
+ """
+ def same(a, b):
+ keys = sorted(set(a.keys() + b.keys()))
+ for key in keys:
+ assert a[key] == b[key]
+
+ for args, kwargs, length in self.creation_cases:
+ # Make a Cookies object using the args.
+ cookies = Cookies(*args, **kwargs)
+ assert len(cookies) == length
+
+ # Render into various text formats.
+ rep = repr(cookies)
+ res = cookies.render_response()
+ req = cookies.render_request()
+
+ # Very basic sanity check on renders, fail fast and in a simple way
+ # if output is truly terrible
+ assert rep.count('=') == length
+ assert len(res) == length
+ assert [item.count('=') == 1 for item in res]
+ assert req.count('=') == length
+ assert len(req.split(";")) == length
+
+ # Explicitly parse out the data (this can be simple since the
+ # output should be in a highly consistent format)
+ pairs = [item.split("=") for item in req.split("; ")]
+ assert len(pairs) == length
+ for name, value in pairs:
+ cookie = cookies[name]
+ assert cookie.name == name
+ assert cookie.value == value
+
+ # Parse the rendered output, check that result is equal to the
+ # originally produced object.
+
+ parsed = Cookies()
+ parsed.parse_request(req)
+ assert parsed == cookies
+
+ parsed = Cookies()
+ for item in res:
+ parsed.parse_response(item)
+ assert parsed == cookies
+
+ # Check that all the requested cookies were created correctly:
+ # indexed with correct names in dict, also with correctly set name
+ # and value attributes.
+ for cookie in args:
+ assert cookies[cookie.name] == cookie
+ for name, value in kwargs.items():
+ cookie = cookies[name]
+ assert cookie.name == name
+ assert cookie.value == value
+ assert name in rep
+ assert value in rep
+
+ # Spot check that setting an attribute still works
+ # with these particular parameters. Not a torture test.
+ for key in cookies:
+ cookies[key].max_age = 42
+ for line in cookies.render_response():
+ assert line.endswith("Max-Age=42")
+
+ # Spot check attribute deletion
+ assert cookies[key].max_age
+ del cookies[key].max_age
+ assert cookies[key].max_age is None
+
+ # Spot check cookie deletion
+ keys = [key for key in cookies.keys()]
+ for key in keys:
+ del cookies[key]
+ assert key not in cookies
+
+ def test_eq(self):
+ "Smoke test equality/inequality of Cookies objects"
+ ref = Cookies(a='b')
+ assert Cookies(a='b') == ref
+ assert Cookies(b='c') != ref
+ assert ref != Cookies(d='e')
+ assert Cookies(a='x') != ref
+
+ class Dummy(object):
+ "Just any old object"
+ pass
+ x = Dummy()
+ x.keys = True
+ with raises(TypeError):
+ assert ref != x
+
+ def test_add(self):
+ "Test the Cookies.add method"
+ for args, kwargs, length in self.creation_cases:
+ cookies = Cookies()
+ cookies.add(*args, **kwargs)
+ assert len(cookies) == length
+ for cookie in args:
+ assert cookies[cookie.name] == cookie
+ for name, value in kwargs.items():
+ cookie = cookies[name]
+ assert cookie.value == value
+ count = len(cookies)
+ assert 'w' not in cookies
+ cookies.add(w='m')
+ assert 'w' in cookies
+ assert count == len(cookies) - 1
+ assert cookies['w'].value == 'm'
+
+ def test_empty(self):
+ "Trivial test of behavior of empty Cookies object"
+ cookies = Cookies()
+ assert len(cookies) == 0
+ assert Cookies() == cookies
+
+ def test_parse_request(self):
+ """Test Cookies.parse_request.
+ """
+ def run(arg, **kwargs):
+ "run Cookies.parse_request on an instance"
+ cookies = Cookies()
+ result = runner(cookies.parse_request)(arg, **kwargs)
+ return result
+
+ for i, case in enumerate(HEADER_CASES):
+ arg, kwargs, expected, response_result = case
+
+ # parse_request doesn't take ignore_bad_attributes. remove it
+ # without changing original kwargs for further tests
+ kwargs = kwargs.copy()
+ if 'ignore_bad_attributes' in kwargs:
+ del kwargs['ignore_bad_attributes']
+
+ def expect(arg, kwargs):
+ "repeated complex assertion"
+ result = run(arg, **kwargs)
+ assert result == expected \
+ or isinstance(expected, type) \
+ and type(result) == expected, \
+ "unexpected result for (%s): %s. should be %s" \
+ % (repr(arg), repr(result), repr(expected))
+
+ # Check result - should be same with and without the prefix
+ expect("Cookie: " + arg, kwargs)
+ expect(arg, kwargs)
+
+ # But it should not match with the response prefix.
+ other_result = run("Set-Cookie: " + arg, **kwargs)
+ assert other_result != expected
+ assert other_result != response_result
+
+ # If case expects InvalidCookieError, verify that it is suppressed
+ # by ignore_bad_cookies.
+ if expected == InvalidCookieError:
+ kwargs2 = kwargs.copy()
+ kwargs2['ignore_bad_cookies'] = True
+ cookies = Cookies()
+ # Let natural exception raise, easier to figure out
+ cookies.parse_request(arg, **kwargs2)
+
+ # Spot check that exception is raised for clearly wrong format
+ assert not isinstance(run("Cookie: a=b"), InvalidCookieError)
+ assert isinstance(run("Set-Cookie: a=b"), InvalidCookieError)
+
+ def test_parse_response(self):
+ """Test Cookies.parse_response.
+ """
+ def run(arg, **kwargs):
+ "run parse_response method of a Cookies instance"
+ cookies = Cookies()
+ return runner(cookies.parse_response)(arg, **kwargs)
+
+ for case in HEADER_CASES:
+ arg, kwargs, request_result, expected = case
+ # If we expect InvalidCookieError or InvalidCookieAttributeError,
+ # telling the function to ignore those should result in no
+ # exception.
+ kwargs2 = kwargs.copy()
+ if expected == InvalidCookieError:
+ kwargs2['ignore_bad_cookies'] = True
+ assert not isinstance(
+ run(arg, **kwargs2),
+ Exception)
+ elif expected == InvalidCookieAttributeError:
+ kwargs2['ignore_bad_attributes'] = True
+ result = run(arg, **kwargs2)
+ if isinstance(result, InvalidCookieAttributeError):
+ raise AssertionError("InvalidCookieAttributeError "
+ "should have been silenced/logged")
+ else:
+ assert not isinstance(result, Exception)
+ # Check result - should be same with and without the prefix
+ sys.stdout.flush()
+ result = run(arg, **kwargs)
+ assert result == expected \
+ or isinstance(expected, type) \
+ and type(result) == expected, \
+ "unexpected result for (%s): %s. should be %s" \
+ % (repr(arg), repr(result), repr(expected))
+ result = run("Set-Cookie: " + arg, **kwargs)
+ assert result == expected \
+ or isinstance(expected, type) \
+ and type(result) == expected, \
+ "unexpected result for (%s): %s. should be %s" \
+ % (repr("Set-Cookie: " + arg),
+ repr(result), repr(expected))
+ # But it should not match with the request prefix.
+ other_result = run("Cookie: " + arg, **kwargs)
+ assert other_result != expected
+ assert other_result != request_result
+
+ assert not isinstance(run("Set-Cookie: a=b"), InvalidCookieError)
+ assert isinstance(run("Cookie: a=b"), InvalidCookieError)
+
+ def test_exercise_parse_one_response_asctime(self):
+ asctime = 'Sun Nov 6 08:49:37 1994'
+ line = "Set-Cookie: a=b; Expires=%s" % asctime
+ response_dict = parse_one_response(line)
+ assert response_dict == \
+ {'expires': 'Sun Nov 6 08:49:37 1994', 'name': 'a', 'value': 'b'}
+ assert Cookie.from_dict(response_dict) == \
+ Cookie('a', 'b', expires=parse_date(asctime))
+
+ def test_get_all(self):
+ cookies = Cookies.from_request('a=b; a=c; b=x')
+ assert cookies['a'].value == 'b'
+ assert cookies['b'].value == 'x'
+ values = [cookie.value for cookie in cookies.get_all('a')]
+ assert values == ['b', 'c']
+
+ def test_custom_cookie_class_on_instance(self):
+ cookies = Cookies(_cookie_class=Scone)
+ cookies.add(a="b")
+ assert cookies['a'] == Scone("a", "b")
+
+ def test_custom_cookie_class_on_subclass(self):
+ cookies = Scones()
+ cookies.add(a="b")
+ assert cookies['a'] == Scone("a", "b")
+
+ def test_custom_cookie_class_on_instance_parse_request(self):
+ cookies = Scones()
+ cookies.parse_request("Cookie: c=d")
+ assert cookies['c'] == Scone("c", "d")
+
+ def test_custom_cookie_class_on_instance_parse_response(self):
+ cookies = Scones()
+ cookies.parse_response("Set-Cookie: c=d")
+ assert cookies['c'] == Scone("c", "d")
+
+
+def test_parse_date():
+ """Throw a ton of dirty samples at the date parse/render and verify the
+ exact output of rendering the parsed version of the sample.
+ """
+ cases = [
+ # Obviously off format
+ ("", None),
+ (" ", None),
+ ("\t", None),
+ ("\n", None),
+ ("\x02\x03\x04", None),
+ ("froppity", None),
+ ("@@@@@%@#:%", None),
+ ("foo bar baz", None),
+ # We'll do a number of overall manglings.
+ # First, show that the baseline passes
+ ("Sat, 10 Oct 2009 13:47:21 GMT", "Sat, 10 Oct 2009 13:47:21 GMT"),
+ # Delete semantically important pieces
+ (" Oct 2009 13:47:21 GMT", None),
+ ("Fri, Oct 2009 13:47:21 GMT", None),
+ ("Fri, 10 2009 13:47:21 GMT", None),
+ ("Sat, 10 Oct 2009 :47:21 GMT", None),
+ ("Sat, 10 Oct 2009 13::21 GMT", None),
+ ("Sat, 10 Oct 2009 13:47: GMT", None),
+ # Replace single characters out of tokens with spaces - harder to
+ # do programmatically because some whitespace can reasonably be
+ # tolerated.
+ ("F i, 10 Oct 2009 13:47:21 GMT", None),
+ ("Fr , 10 Oct 2009 13:47:21 GMT", None),
+ ("Fri, 10 ct 2009 13:47:21 GMT", None),
+ ("Fri, 10 O t 2009 13:47:21 GMT", None),
+ ("Fri, 10 Oc 2009 13:47:21 GMT", None),
+ ("Sat, 10 Oct 009 13:47:21 GMT", None),
+ ("Sat, 10 Oct 2 09 13:47:21 GMT", None),
+ ("Sat, 10 Oct 20 9 13:47:21 GMT", None),
+ ("Sat, 10 Oct 200 13:47:21 GMT", None),
+ ("Sat, 10 Oct 2009 1 :47:21 GMT", None),
+ ("Sat, 10 Oct 2009 13 47:21 GMT", None),
+ ("Sat, 10 Oct 2009 13: 7:21 GMT", None),
+ ("Sat, 10 Oct 2009 13:4 :21 GMT", None),
+ ("Sat, 10 Oct 2009 13:47 21 GMT", None),
+ ("Sat, 10 Oct 2009 13:47: 1 GMT", None),
+ ("Sat, 10 Oct 2009 13:47:2 GMT", None),
+ ("Sat, 10 Oct 2009 13:47:21 MT", None),
+ ("Sat, 10 Oct 2009 13:47:21 G T", None),
+ ("Sat, 10 Oct 2009 13:47:21 GM ", None),
+ # Replace numeric elements with stuff that contains A-Z
+ ("Fri, Burp Oct 2009 13:47:21 GMT", None),
+ ("Fri, 10 Tabalqplar 2009 13:47:21 GMT", None),
+ ("Sat, 10 Oct Fruit 13:47:21 GMT", None),
+ ("Sat, 10 Oct 2009 13:47:21 Fruits", None),
+ # Weekday
+ (", Dec 31 00:00:00 2003", None),
+ ("T, Dec 31 00:00:00 2003", None),
+ ("Tu, Dec 31 00:00:00 2003", None),
+ ("Hi, Dec 31 00:00:00 2003", None),
+ ("Heretounforeseen, Dec 31 00:00:00 2003", None),
+ ("Wednesday2, Dec 31 00:00:00 2003", None),
+ ("Mon\x00frobs, Dec 31 00:00:00 2003", None),
+ ("Mon\x10day, Dec 31 00:00:00 2003", None),
+ # Day of month
+ ("Fri, Oct 2009 13:47:21 GMT", None),
+ ("Fri, 110 Oct 2009 13:47:21 GMT", None),
+ ("Fri, 0 Oct 2009 13:47:21 GMT", None),
+ ("Fri, 00 Oct 2009 13:47:21 GMT", None),
+ ("Fri, 0 Oct 2009 13:47:21 GMT", None),
+ ("Fri, 0 Oct 2009 13:47:21 GMT", None),
+ ("Fri, 00 Oct 2009 13:47:21 GMT", None),
+ ("Fri, 33 Oct 2009 13:47:21 GMT", None),
+ ("Fri, 40 Oct 2009 13:47:21 GMT", None),
+ ("Fri, A2 Oct 2009 13:47:21 GMT", None),
+ ("Fri, 2\x00 Oct 2009 13:47:21 GMT", None),
+ ("Fri, \t3 Oct 2009 13:47:21 GMT", None),
+ ("Fri, 3\t Oct 2009 13:47:21 GMT", None),
+ # Month
+ ("Fri, 10 2009 13:47:21 GMT", None),
+ ("Fri, 10 O 2009 13:47:21 GMT", None),
+ ("Fri, 10 Oc 2009 13:47:21 GMT", None),
+ ("Sat, 10 Octuarial 2009 13:47:21 GMT", None),
+ ("Sat, 10 Octuary 2009 13:47:21 GMT", None),
+ ("Sat, 10 Octubre 2009 13:47:21 GMT", None),
+ # Year
+ ("Sat, 10 Oct 009 13:47:21 GMT", None),
+ ("Sat, 10 Oct 200 13:47:21 GMT", None),
+ ("Sat, 10 Oct 209 13:47:21 GMT", None),
+ ("Sat, 10 Oct 20 9 13:47:21 GMT", None),
+ # Hour
+ ("Sat, 10 Oct 2009 25:47:21 GMT", None),
+ ("Sat, 10 Oct 2009 1@:47:21 GMT", None),
+ # Minute
+ ("Sat, 10 Oct 2009 13:71:21 GMT", None),
+ ("Sat, 10 Oct 2009 13:61:21 GMT", None),
+ ("Sat, 10 Oct 2009 13:60:21 GMT", None),
+ ("Sat, 10 Oct 2009 24:01:00 GMT", None),
+ # Second
+ ("Sat, 10 Oct 2009 13:47 GMT", "Sat, 10 Oct 2009 13:47:00 GMT"),
+ ("Sat, 10 Oct 2009 13:47:00 GMT", "Sat, 10 Oct 2009 13:47:00 GMT"),
+ ("Sat, 10 Oct 2009 24:00:01 GMT", None),
+ # Some reasonable cases (ignore weekday)
+ ("Mon Dec 24 16:32:39 1977 GMT", "Sat, 24 Dec 1977 16:32:39 GMT"),
+ ("Sat, 7 Dec 1991 13:56:05 GMT", "Sat, 07 Dec 1991 13:56:05 GMT"),
+ ("Saturday, 8-Mar-2012 21:35:09 GMT", "Thu, 08 Mar 2012 21:35:09 GMT"),
+ ("Sun, 1-Feb-1998 00:00:00 GMT", "Sun, 01 Feb 1998 00:00:00 GMT"),
+ ("Thursday, 01-Jan-1983 01:01:01 GMT",
+ "Sat, 01 Jan 1983 01:01:01 GMT"),
+ ("Tue, 15-Nov-1973 22:23:24 GMT", "Thu, 15 Nov 1973 22:23:24 GMT"),
+ ("Wed, 09 Dec 1999 23:59:59 GMT", "Thu, 09 Dec 1999 23:59:59 GMT"),
+ ("Mon, 12-May-05 20:25:03 GMT", "Thu, 12 May 2005 20:25:03 GMT"),
+ ("Thursday, 01-Jan-12 09:00:00 GMT", "Sun, 01 Jan 2012 09:00:00 GMT"),
+ # starts like asctime, but flips the time and year - nonsense
+ ("Wed Mar 12 2007 08:25:07 GMT", None),
+ # starts like RFC 1123, but flips the time and year - nonsense
+ ("Thu, 31 Dec 23:55:55 2107 GMT", None),
+ ('Fri, 21-May-2004 10:40:51 GMT', "Fri, 21 May 2004 10:40:51 GMT"),
+ # extra 2-digit year exercises
+ ("Sat, 10 Oct 11 13:47:21 GMT", "Mon, 10 Oct 2011 13:47:21 GMT"),
+ ("Sat, 10 Oct 09 13:47:22 GMT", "Sat, 10 Oct 2009 13:47:22 GMT"),
+ ("Sat, 10 Oct 93 13:47:23 GMT", "Sun, 10 Oct 1993 13:47:23 GMT"),
+ ("Sat, 10 Oct 85 13:47:24 GMT", "Thu, 10 Oct 1985 13:47:24 GMT"),
+ ("Sat, 10 Oct 70 13:47:25 GMT", "Sat, 10 Oct 1970 13:47:25 GMT"),
+ ("Sat, 10 Oct 69 13:47:26 GMT", "Thu, 10 Oct 2069 13:47:26 GMT"),
+ # dealing with 3-digit year is incredibly tedious, will do as needed
+ ("Sat, 10 Oct 969 13:47:26 GMT", None),
+ ("Sat, 10 Oct 9 13:47:26 GMT", None),
+ ("Fri, 10 Oct 19691 13:47:26 GMT", None),
+ ]
+
+ def change(string, position, new_value):
+ "Macro to change a string"
+ return string[:position] + new_value + string[position + 1:]
+
+ original = "Sat, 10 Oct 2009 13:47:21 GMT"
+
+ # Stuff garbage in every position - none of these characters should
+ # ever be allowed in a date string.
+ # not included because pytest chokes: "¿�␦"
+ bad_chars = "/<>()\\*$#&=;\x00\b\f\n\r\"\'`?"
+ for pos in range(0, len(original)):
+ for bad_char in bad_chars:
+ cases.append((change(original, pos, bad_char), None))
+
+ # Invalidate each letter
+ letter_positions = [i for (i, c) in enumerate(original) \
+ if re.match("[A-Za-z]", c)]
+ for pos in letter_positions:
+ cases.append((change(original, pos, 'q'), None))
+ cases.append((change(original, pos, '0'), None))
+ cases.append((change(original, pos, '-'), None))
+ cases.append((change(original, pos, ''), None))
+ # But do tolerate case changes.
+ c = original[pos]
+ if c.isupper():
+ c = c.lower()
+ else:
+ c = c.upper()
+ cases.append((change(original, pos, c), original))
+
+ # Invalidate each digit
+ digit_positions = [i for (i, c) in enumerate(original) \
+ if c in "0123456789"]
+ for pos in digit_positions:
+ c = original[pos]
+ cases.append((change(original, pos, 'q'), None))
+ cases.append((change(original, pos, '-' + c), None))
+ cases.append((change(original, pos, '+' + c), None))
+
+ # Invalidate each space
+ space_positions = [i for (i, c) in enumerate(original) \
+ if c in " \t\n\r"]
+ for pos in space_positions:
+ cases.append((change(original, pos, 'x'), None))
+ cases.append((change(original, pos, '\t'), None))
+ cases.append((change(original, pos, ' '), None))
+ cases.append((change(original, pos, ''), None))
+
+ # Invalidate each colon
+ colon_positions = [i for (i, c) in enumerate(original) \
+ if c == ":"]
+ for pos in colon_positions:
+ cases.append((change(original, pos, 'z'), None))
+ cases.append((change(original, pos, '0'), None))
+ cases.append((change(original, pos, ' '), None))
+ cases.append((change(original, pos, ''), None))
+
+ for data, ideal in cases:
+ actual = render_date(parse_date(data))
+ assert actual == ideal
+
+
+def runner(function):
+ """Generate a function which collects the result/exception from another
+ function, for easier assertions.
+ """
+ def run(*args, **kwargs):
+ "Function which collects result/exception"
+ actual_result, actual_exception = None, None
+ try:
+ actual_result = function(*args, **kwargs)
+ except Exception as exception:
+ actual_exception = exception
+ return actual_exception or actual_result
+ return run
+
+
+# Define cases for testing parsing and rendering.
+# Format: input, kwargs, expected parse_request result, expected parse_response
+# result.
+
+HEADER_CASES = [
+ # cases with nothing that can be parsed out result in
+ # InvalidCookieError. unless ignore_bad_cookies=True, then they give an
+ # empty Cookies().
+ ("", {},
+ InvalidCookieError,
+ InvalidCookieError),
+ ('a', {},
+ InvalidCookieError,
+ InvalidCookieError),
+ (" ", {},
+ InvalidCookieError,
+ InvalidCookieError),
+ (";;;;;", {},
+ InvalidCookieError,
+ InvalidCookieError),
+ ("qwejrkqlwjere", {},
+ InvalidCookieError,
+ InvalidCookieError),
+ # vacuous headers should give invalid
+ ('Cookie: ', {},
+ InvalidCookieError,
+ InvalidCookieError),
+ ('Set-Cookie: ', {},
+ InvalidCookieError,
+ InvalidCookieError),
+ # Single pair should work the same as request or response
+ ("foo=bar", {},
+ Cookies(foo='bar'),
+ Cookies(foo='bar')),
+ ("SID=242d96421d4e", {},
+ Cookies(SID='242d96421d4e'),
+ Cookies(SID='242d96421d4e')),
+ # Two pairs on SAME line should work with request, fail with response.
+ # if ignore_bad_attributes, response should not raise.
+ # and ignore_bad_attributes behavior should be default
+ ("a=b; c=dx", {'ignore_bad_attributes': True},
+ Cookies(a='b', c='dx'),
+ Cookies(a='b')),
+ ("a=b; c=d", {'ignore_bad_attributes': False},
+ Cookies(a='b', c='d'),
+ InvalidCookieAttributeError),
+ ('g=h;j=k', {},
+ Cookies(g='h', j='k'),
+ Cookies(g='h')),
+ # tolerance: response shouldn't barf on unrecognized attr by default,
+ # but request should recognize as malformed
+ ('a=b; brains', {},
+ InvalidCookieError,
+ Cookies(a='b')),
+ # tolerance: should strip quotes and spaces
+ ('A="BBB"', {},
+ Cookies(A='BBB'),
+ Cookies(A='BBB'),
+ ),
+ ('A= "BBB" ', {},
+ Cookies(A='BBB'),
+ Cookies(A='BBB'),
+ ),
+ # tolerance: should ignore dumb trailing ;
+ ('foo=bar;', {},
+ Cookies(foo='bar'),
+ Cookies(foo='bar'),
+ ),
+ ('A="BBB";', {},
+ Cookies(A='BBB'),
+ Cookies(A='BBB'),
+ ),
+ ('A= "BBB" ;', {},
+ Cookies(A='BBB'),
+ Cookies(A='BBB'),
+ ),
+ # empty value
+ ("lang=; Expires=Sun, 06 Nov 1994 08:49:37 GMT", {},
+ InvalidCookieError,
+ Cookies(
+ Cookie('lang', '',
+ expires=parse_date(
+ "Sun, 06 Nov 1994 08:49:37 GMT")))),
+ # normal examples of varying complexity
+ ("frob=varvels; Expires=Wed, 09 Jun 2021 10:18:14 GMT", {},
+ InvalidCookieError,
+ Cookies(
+ Cookie('frob', 'varvels',
+ expires=parse_date(
+ "Wed, 09 Jun 2021 10:18:14 GMT"
+ )))),
+ ("lang=en-US; Expires=Wed, 03 Jun 2019 10:18:14 GMT", {},
+ InvalidCookieError,
+ Cookies(
+ Cookie('lang', 'en-US',
+ expires=parse_date(
+ "Wed, 03 Jun 2019 10:18:14 GMT"
+ )))),
+ # easily interpretable as multiple request cookies!
+ ("CID=39b4d9be4d42; Path=/; Domain=example.com", {},
+ Cookies(CID="39b4d9be4d42", Path='/', Domain='example.com'),
+ Cookies(Cookie('CID', '39b4d9be4d42', path='/',
+ domain='example.com'))),
+ ("lang=en-US; Path=/; Domain=example.com", {},
+ Cookies(lang='en-US', Path='/', Domain='example.com'),
+ Cookies(Cookie('lang', 'en-US',
+ path='/', domain='example.com'))),
+ ("foo=bar; path=/; expires=Mon, 04-Dec-2001 12:43:00 GMT", {},
+ InvalidCookieError,
+ Cookies(
+ Cookie('foo', 'bar', path='/',
+ expires=parse_date("Mon, 04-Dec-2001 12:43:00 GMT")
+ ))),
+ ("SID=0fae49; Path=/; Secure; HttpOnly", {},
+ InvalidCookieError,
+ Cookies(Cookie('SID', '0fae49',
+ path='/', secure=True, httponly=True))),
+ ('TMID=DQAAXKEaeo_aYp; Domain=mail.nauk.com; '
+ 'Path=/accounts; Expires=Wed, 13-Jan-2021 22:23:01 GMT; '
+ 'Secure; HttpOnly', {},
+ InvalidCookieError,
+ Cookies(
+ Cookie('TMID', 'DQAAXKEaeo_aYp',
+ domain='mail.nauk.com',
+ path='/accounts', secure=True, httponly=True,
+ expires=parse_date("Wed, 13-Jan-2021 22:23:01 GMT")
+ ))),
+ ("test=some_value; expires=Sat, 01-Jan-2000 00:00:00 GMT; "
+ "path=/;", {},
+ InvalidCookieError,
+ Cookies(
+ Cookie('test', 'some_value', path='/',
+ expires=parse_date('Sat, 01 Jan 2000 00:00:00 GMT')
+ ))),
+ # From RFC 2109 - accept the lots-of-dquotes style but don't produce.
+ ('Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"; '
+ 'Part_Number="Rocket_Launcher_0001"', {},
+ Cookies(Customer='WILE_E_COYOTE', Version='1', Path='/acme',
+ Part_Number='Rocket_Launcher_0001'),
+ Cookies(Cookie('Customer', 'WILE_E_COYOTE',
+ version=1, path='/acme'))),
+ # However, we don't honor RFC 2109 type meta-attributes
+ ('Cookie: $Version="1"; Customer="WILE_E_COYOTE"; $Path="/acme"', {},
+ InvalidCookieError,
+ InvalidCookieError),
+ # degenerate Domain=. is common, so should be handled though invalid
+ ("lu=Qg3OHJZLehYLjVgAqiZbZbzo; Expires=Tue, 15-Jan-2013 "
+ "21:47:38 GMT; Path=/; Domain=.foo.com; HttpOnly", {},
+ InvalidCookieError,
+ Cookies(Cookie('lu', "Qg3OHJZLehYLjVgAqiZbZbzo",
+ expires=parse_date('Tue, 15 Jan 2013 21:47:38 GMT'),
+ path='/', domain='.foo.com', httponly=True,
+ ))),
+ ('ZQID=AYBEVnDKrdst; Domain=.nauk.com; Path=/; '
+ 'Expires=Wed, 13-Jan-2021 22:23:01 GMT; HttpOnly', {},
+ InvalidCookieError,
+ Cookies(Cookie('ZQID', "AYBEVnDKrdst",
+ httponly=True, domain='.nauk.com', path='/',
+ expires=parse_date('Wed, 13 Jan 2021 22:23:01 GMT'),
+ ))),
+ ("OMID=Ap4PQQEq; Domain=.nauk.com; Path=/; "
+ 'Expires=Wed, 13-Jan-2021 22:23:01 GMT; Secure; HttpOnly', {},
+ InvalidCookieError,
+ Cookies(Cookie('OMID', "Ap4PQQEq",
+ path='/', domain='.nauk.com', secure=True, httponly=True,
+ expires=parse_date('Wed, 13 Jan 2021 22:23:01 GMT')
+ ))),
+ # question mark in value
+ ('foo="?foo"; Path=/', {},
+ Cookies(foo='?foo', Path='/'),
+ Cookies(Cookie('foo', '?foo', path='/'))),
+ # unusual format for secure/httponly
+ ("a=b; Secure=true; HttpOnly=true;", {},
+ Cookies(a='b', Secure='true', HttpOnly='true'),
+ Cookies(Cookie('a', 'b', secure=True, httponly=True))),
+ # invalid per RFC to have spaces in value, but here they are
+ # URL-encoded by default. Extend the mechanism if this is no good
+ ('user=RJMmei IORqmD; expires=Wed, 3 Nov 2007 23:20:39 GMT; path=/',
+ {},
+ InvalidCookieError,
+ Cookies(
+ Cookie('user', 'RJMmei IORqmD', path='/',
+ expires=parse_date("Wed, 3 Nov 2007 23:20:39 GMT")))),
+ # Most characters from 32 to \x31 + 1 should be allowed in values -
+ # not including space/32, dquote/34, comma/44.
+ ("x=!#$%&'()*+-./01", {},
+ Cookies(x="!#$%&'()*+-./01"),
+ Cookies(x="!#$%&'()*+-./01")),
+ # don't crash when value wrapped with quotes
+ # http://bugs.python.org/issue3924
+ ('a=b; version="1"', {},
+ Cookies(a='b', version='1'),
+ Cookies(Cookie('a', 'b', version=1))),
+ # cookie with name 'expires'. inadvisable, but valid.
+ # http://bugs.python.org/issue1117339
+ ('expires=foo', {},
+ Cookies(expires='foo'),
+ Cookies(expires='foo')),
+ # http://bugs.python.org/issue8826
+ # quick date parsing spot-check, see test_parse_date for a real workout
+ ('foo=bar; expires=Fri, 31-Dec-2010 23:59:59 GMT', {},
+ InvalidCookieError,
+ Cookies(
+ Cookie('foo', 'bar',
+ expires=datetime(2010, 12, 31, 23, 59, 59)))),
+ # allow VALID equals sign in values - not even an issue in RFC 6265 or
+ # this module, but very helpful for base64 and always worth checking.
+ # http://bugs.python.org/issue403473
+ ('a=Zm9vIGJhcg==', {},
+ Cookies(a='Zm9vIGJhcg=='),
+ Cookies(a='Zm9vIGJhcg==')),
+ ('blah="Foo=2"', {},
+ Cookies(blah='Foo=2'),
+ Cookies(blah='Foo=2')),
+ # take the first cookie in request parsing.
+ # (response parse ignores the second one as a bad attribute)
+ # http://bugs.python.org/issue1375011
+ # http://bugs.python.org/issue1372650
+ # http://bugs.python.org/issue7504
+ ('foo=33;foo=34', {},
+ Cookies(foo='33'),
+ Cookies(foo='33')),
+ # Colons in names (invalid!), as used by some dumb old Java/PHP code
+ # http://bugs.python.org/issue2988
+ # http://bugs.python.org/issue472646
+ # http://bugs.python.org/issue2193
+ ('a:b=c', {},
+ Cookies(
+ Cookie('a:b', 'c')),
+ Cookies(
+ Cookie('a:b', 'c'))),
+# # http://bugs.python.org/issue991266
+# # This module doesn't do the backslash quoting so this would
+# # effectively require allowing all possible characters inside arbitrary
+# # attributes, which does not seem reasonable.
+# ('foo=bar; Comment="\342\230\243"', {},
+# Cookies(foo='bar', Comment='\342\230\243'),
+# Cookies(
+# Cookie('foo', 'bar', comment='\342\230\243')
+# )),
+ ]
+
+
+def _cheap_request_parse(arg1, arg2):
+ """Really cheap parse like what client code often does, for
+ testing request rendering (determining order-insensitively whether two
+ cookies-as-text are equivalent). 'a=b; x=y' type format
+ """
+ def crumble(arg):
+ "Break down string into pieces"
+ pieces = [piece.strip('\r\n ;') for piece in re.split("(\r\n|;)", arg)]
+ pieces = [piece for piece in pieces if piece and '=' in piece]
+ pieces = [tuple(piece.split("=", 1)) for piece in pieces]
+ pieces = [(name.strip(), value.strip('" ')) for name, value in pieces]
+ # Keep the first one in front (can use set down the line);
+ # the rest are sorted
+ if len(pieces) > 1:
+ pieces = [pieces[0]] + sorted(pieces[1:])
+ return pieces
+
+ def dedupe(pieces):
+ "Eliminate duplicate pieces"
+ deduped = {}
+ for name, value in pieces:
+ if name in deduped:
+ continue
+ deduped[name] = value
+ return sorted(deduped.items(),
+ key=pieces.index)
+
+ return dedupe(crumble(arg1)), crumble(arg2)
+
+
+def _cheap_response_parse(arg1, arg2):
+ """Silly parser for 'name=value; attr=attrvalue' format,
+ to test out response renders
+ """
+ def crumble(arg):
+ "Break down string into pieces"
+ lines = [line for line in arg if line]
+ done = []
+ for line in lines:
+ clauses = [clause for clause in line.split(';')]
+ import logging
+ logging.error("clauses %r", clauses)
+ name, value = re.split(" *= *", clauses[0], 1)
+ value = unquote(value.strip(' "'))
+ attrs = [re.split(" *= *", clause, 1) \
+ for clause in clauses[1:] if clause]
+ attrs = [attr for attr in attrs \
+ if attr[0] in Cookie.attribute_names]
+ attrs = [(k, v.strip(' "')) for k, v in attrs]
+ done.append((name, value, tuple(attrs)))
+ return done
+ result1 = crumble([arg1])
+ result2 = crumble(arg2)
+ return result1, result2
+
+
+def test_render_request():
+ """Test the request renderer against HEADER_CASES.
+ Perhaps a wider range of values is tested in TestCookies.test_init.
+ """
+ for case in HEADER_CASES:
+ arg, kwargs, cookies, _ = case
+ # can't reproduce examples which are supposed to throw parse errors
+ if isinstance(cookies, type) and issubclass(cookies, Exception):
+ continue
+ rendered = cookies.render_request()
+ expected, actual = _cheap_request_parse(arg, rendered)
+ # we can only use set() here because requests aren't order sensitive.
+ assert set(actual) == set(expected)
+
+
+def test_render_response():
+ """Test the response renderer against HEADER_CASES.
+ Perhaps a wider range of values is tested in TestCookies.test_init.
+ """
+ def filter_attrs(items):
+ "Filter out the items which are Cookie attributes"
+ return [(name, value) for (name, value) in items \
+ if name.lower() in Cookie.attribute_names]
+
+ for case in HEADER_CASES:
+ arg, kwargs, _, cookies = case
+ # can't reproduce examples which are supposed to throw parse errors
+ if isinstance(cookies, type) and issubclass(cookies, Exception):
+ continue
+ rendered = cookies.render_response()
+ expected, actual = _cheap_response_parse(arg, rendered)
+ expected, actual = set(expected), set(actual)
+ assert actual == expected, \
+ "failed: %s -> %s | %s != %s" % (arg, repr(cookies), actual,
+ expected)
+
+
+def test_backslash_roundtrip():
+ """Check that backslash in input or value stays backslash internally but
+ goes out as %5C, and comes back in again as a backslash.
+ """
+ reference = Cookie('xx', '\\')
+ assert len(reference.value) == 1
+ reference_request = reference.render_request()
+ reference_response = reference.render_response()
+ assert '\\' not in reference_request
+ assert '\\' not in reference_response
+ assert '%5C' in reference_request
+ assert '%5C' in reference_response
+
+ # Parse from multiple entry points
+ raw_cookie = r'xx="\"'
+ parsed_cookies = [Cookie.from_string(raw_cookie),
+ Cookies.from_request(raw_cookie)['xx'],
+ Cookies.from_response(raw_cookie)['xx']]
+ for parsed_cookie in parsed_cookies:
+ assert parsed_cookie.name == reference.name
+ assert parsed_cookie.value == reference.value
+ # Renders should match exactly
+ request = parsed_cookie.render_request()
+ response = parsed_cookie.render_response()
+ assert request == reference_request
+ assert response == reference_response
+ # Reparses should too
+ rrequest = Cookies.from_request(request)['xx']
+ rresponse = Cookies.from_response(response)['xx']
+ assert rrequest.name == reference.name
+ assert rrequest.value == reference.value
+ assert rresponse.name == reference.name
+ assert rresponse.value == reference.value
+
+
+def _simple_test(function, case_dict):
+ "Macro for making simple case-based tests for a function call"
+ def actual_test():
+ "Test generated by _simple_test"
+ for arg, expected in case_dict.items():
+ logging.info("case for %s: %s %s",
+ repr(function), repr(arg), repr(expected))
+ result = function(arg)
+ assert result == expected, \
+ "%s(%s) != %s, rather %s" % (
+ function.__name__,
+ repr(arg),
+ repr(expected),
+ repr(result))
+ actual_test.cases = case_dict
+ return actual_test
+
+test_strip_spaces_and_quotes = _simple_test(strip_spaces_and_quotes, {
+ ' ': '',
+ '""': '',
+ '"': '"',
+ "''": "''",
+ ' foo ': 'foo',
+ 'foo ': 'foo',
+ ' foo': 'foo',
+ ' "" ': '',
+ ' " " ': ' ',
+ ' " ': '"',
+ 'foo bar': 'foo bar',
+ '"foo bar': '"foo bar',
+ 'foo bar"': 'foo bar"',
+ '"foo bar"': 'foo bar',
+ '"dquoted"': 'dquoted',
+ ' "dquoted"': 'dquoted',
+ '"dquoted" ': 'dquoted',
+ ' "dquoted" ': 'dquoted',
+ })
+
+test_parse_string = _simple_test(parse_string, {
+ None: None,
+ '': '',
+ b'': '',
+ })
+
+test_parse_domain = _simple_test(parse_domain, {
+ ' foo ': 'foo',
+ '"foo"': 'foo',
+ ' "foo" ': 'foo',
+ '.foo': '.foo',
+ })
+
+test_parse_path = _simple_test(parse_path, {
+ })
+
+
+def test_render_date():
+ "Test date render routine directly with raw datetime objects"
+ # Date rendering is also exercised pretty well in test_parse_date.
+
+ cases = {
+ # Error for anything which is not known UTC/GMT
+ datetime(2001, 10, 11, tzinfo=FixedOffsetTz(60 * 60)):
+ AssertionError,
+ # A couple of baseline tests
+ datetime(1970, 1, 1, 0, 0, 0):
+ 'Thu, 01 Jan 1970 00:00:00 GMT',
+ datetime(2007, 9, 2, 13, 59, 49):
+ 'Sun, 02 Sep 2007 13:59:49 GMT',
+ # Don't produce 1-digit hour
+ datetime(2007, 9, 2, 1, 59, 49):
+ "Sun, 02 Sep 2007 01:59:49 GMT",
+ # Don't produce 1-digit minute
+ datetime(2007, 9, 2, 1, 1, 49):
+ "Sun, 02 Sep 2007 01:01:49 GMT",
+ # Don't produce 1-digit second
+ datetime(2007, 9, 2, 1, 1, 2):
+ "Sun, 02 Sep 2007 01:01:02 GMT",
+ # Allow crazy past/future years for cookie delete/persist
+ datetime(1900, 9, 2, 1, 1, 2):
+ "Sun, 02 Sep 1900 01:01:02 GMT",
+ datetime(3000, 9, 2, 1, 1, 2):
+ "Tue, 02 Sep 3000 01:01:02 GMT"
+ }
+
+ for dt, expected in cases.items():
+ if isinstance(expected, type) and issubclass(expected, Exception):
+ try:
+ render_date(dt)
+ except expected:
+ continue
+ except Exception as exception:
+ raise AssertionError("expected %s, got %s"
+ % (expected, exception))
+ raise AssertionError("expected %s, got no exception"
+ % (expected))
+ else:
+ assert render_date(dt) == expected
+
+
+def test_encoding_assumptions(check_unicode=False):
+ "Document and test assumptions underlying URL encoding scheme"
+ # Use the RFC 6265 based character class to build a regexp matcher that
+ # will tell us whether or not a character is okay to put in cookie values.
+ cookie_value_re = re.compile("[%s]" % Definitions.COOKIE_OCTET)
+ # Figure out which characters are okay. (unichr doesn't exist in Python 3,
+ # in Python 2 it shouldn't be an issue)
+ cookie_value_safe1 = set(chr(i) for i in range(0, 256) \
+ if cookie_value_re.match(chr(i)))
+ cookie_value_safe2 = set(unichr(i) for i in range(0, 256) \
+ if cookie_value_re.match(unichr(i)))
+ # These two are NOT the same on Python3
+ assert cookie_value_safe1 == cookie_value_safe2
+ # Now which of these are quoted by urllib.quote?
+ # caveat: Python 2.6 crashes if chr(127) is passed to quote and safe="",
+ # so explicitly set it to b"" to avoid the issue
+ safe_but_quoted = set(c for c in cookie_value_safe1
+ if quote(c, safe=b"") != c)
+ # Produce a set of characters to give to urllib.quote for the safe parm.
+ dont_quote = "".join(sorted(safe_but_quoted))
+ # Make sure it works (and that it works because of what we passed)
+ for c in dont_quote:
+ assert quote(c, safe="") != c
+ assert quote(c, safe=dont_quote) == c
+
+ # Make sure that the result of using dont_quote as the safe characters for
+ # urllib.quote produces stuff which is safe as a cookie value, but not
+ # different unless it has to be.
+ for i in range(0, 255):
+ original = chr(i)
+ quoted = quote(original, safe=dont_quote)
+ # If it is a valid value for a cookie, that quoting should leave it
+ # alone.
+ if cookie_value_re.match(original):
+ assert original == quoted
+ # If it isn't a valid value, then the quoted value should be valid.
+ else:
+ assert cookie_value_re.match(quoted)
+
+ assert set(dont_quote) == set("!#$%&'()*+/:<=>?@[]^`{|}~")
+
+ # From 128 on urllib.quote will not work on a unichr() return value.
+ # We'll want to encode utf-8 values into ASCII, then do the quoting.
+ # Verify that this is reversible.
+ if check_unicode:
+ for c in (unichr(i) for i in range(0, 1114112)):
+ asc = c.encode('utf-8')
+ quoted = quote(asc, safe=dont_quote)
+ unquoted = unquote(asc)
+ unicoded = unquoted.decode('utf-8')
+ assert unicoded == c
+
+ # Now do the same for extension-av.
+ extension_av_re = re.compile("[%s]" % Definitions.EXTENSION_AV)
+ extension_av_safe = set(chr(i) for i in range(0, 256) \
+ if extension_av_re.match(chr(i)))
+ safe_but_quoted = set(c for c in extension_av_safe \
+ if quote(c, safe="") != c)
+ dont_quote = "".join(sorted(safe_but_quoted))
+ for c in dont_quote:
+ assert quote(c, safe="") != c
+ assert quote(c, safe=dont_quote) == c
+
+ for i in range(0, 255):
+ original = chr(i)
+ quoted = quote(original, safe=dont_quote)
+ if extension_av_re.match(original):
+ assert original == quoted
+ else:
+ assert extension_av_re.match(quoted)
+
+ assert set(dont_quote) == set(' !"#$%&\'()*+,/:<=>?@[\\]^`{|}~')
+
+
+test_encode_cookie_value = _simple_test(encode_cookie_value,
+ {
+ None: None,
+ ' ': '%20',
+ # let through
+ '!': '!',
+ '#': '#',
+ '$': '$',
+ '%': '%',
+ '&': '&',
+ "'": "'",
+ '(': '(',
+ ')': ')',
+ '*': '*',
+ '+': '+',
+ '/': '/',
+ ':': ':',
+ '<': '<',
+ '=': '=',
+ '>': '>',
+ '?': '?',
+ '@': '@',
+ '[': '[',
+ ']': ']',
+ '^': '^',
+ '`': '`',
+ '{': '{',
+ '|': '|',
+ '}': '}',
+ '~': '~',
+ # not let through
+ ' ': '%20',
+ '"': '%22',
+ ',': '%2C',
+ '\\': '%5C',
+ 'crud,': 'crud%2C',
+ })
+
+test_encode_extension_av = _simple_test(encode_extension_av,
+ {
+ None: '',
+ '': '',
+ 'foo': 'foo',
+ # stuff this lets through that cookie-value does not
+ ' ': ' ',
+ '"': '"',
+ ',': ',',
+ '\\': '\\',
+ 'yo\\b': 'yo\\b',
+ })
+
+test_valid_value = _simple_test(valid_value,
+ {
+ None: False,
+ '': True,
+ 'ಠ_ಠ': True,
+ 'μῆνιν ἄειδε θεὰ Πηληϊάδεω Ἀχιλῆος': True,
+ '这事情得搞好啊': True,
+ '宮崎 駿': True,
+ 'أم كلثوم': True,
+ 'ედუარდ შევარდნაძე': True,
+ 'Myötähäpeä': True,
+ 'Pedro Almodóvar': True,
+# b'': True,
+# b'ABCDEFGHIJKLMNOPQRSTUVWXYZ': True,
+ 'Pedro Almodóvar'.encode('utf-8'): False,
+ })
+
+test_valid_date = _simple_test(valid_date,
+ {
+ datetime(2011, 1, 1): True,
+ datetime(2011, 1, 1, tzinfo=FixedOffsetTz(1000)): False,
+ datetime(2011, 1, 1, tzinfo=FixedOffsetTz(0)): True,
+ })
+
+test_valid_domain = _simple_test(valid_domain,
+ {
+ '': False,
+ ' ': False,
+ '.': False,
+ '..': False,
+ '.foo': True,
+ '"foo"': False,
+ 'foo': True,
+ })
+
+test_valid_path = _simple_test(valid_path,
+ {
+ '': False,
+ ' ': False,
+ '/': True,
+ 'a': False,
+ '/a': True,
+ '\x00': False,
+ '/\x00': False,
+ })
+
+
+def test_many_pairs():
+ """Simple 'lots of pairs' test
+ """
+ from_request = Cookies.from_request
+ header = "a0=0"
+ for i in range(1, 100):
+ i_range = list(range(0, i))
+ cookies = from_request(header)
+ assert len(cookies) == i
+ for j in i_range:
+ key = 'a%d' % j
+ assert cookies[key].value == str(j * 10)
+ assert cookies[key].render_request() == \
+ "a%d=%d" % (j, j * 10)
+
+ # same test, different entry point
+ cookies = Cookies()
+ cookies.parse_request(header)
+ assert len(cookies) == i
+ for j in i_range:
+ key = 'a%d' % j
+ assert cookies[key].value == str(j * 10)
+ assert cookies[key].render_request() == \
+ "a%d=%d" % (j, j * 10)
+
+ # Add another piece to the header
+ header += "; a%d=%d" % (i, i * 10)
+
+
+def test_parse_value():
+ # this really just glues together strip_spaces_and_quotes
+ # and parse_string, so reuse their test cases
+ cases = {}
+ cases.update(test_strip_spaces_and_quotes.cases)
+ cases.update(test_parse_string.cases)
+ for inp, expected in cases.items():
+ print("case", inp, expected)
+ # Test with spaces allowed
+ obtained = parse_value(inp, allow_spaces=True)
+ assert obtained == expected
+
+ # Test with spaces disallowed, if it could do anything
+ if (isinstance(inp, bytes) and ' ' in inp.decode('utf-8').strip()) \
+ or (not isinstance(inp, bytes) and inp and ' ' in inp.strip()):
+ try:
+ obtained = parse_value(inp, allow_spaces=False)
+ except AssertionError:
+ pass
+ else:
+ raise AssertionError("parse_value(%s, allow_spaces=False) "
+ "did not raise" % repr(inp))
+
+
+def test_total_seconds():
+ """This wrapper probably doesn't need testing so much, and it's not
+ entirely trivial to fully exercise, but the coverage is nice to have
+ """
+ def basic_sanity(td_type):
+ assert _total_seconds(td_type(seconds=1)) == 1
+ assert _total_seconds(td_type(seconds=1, minutes=1)) == 1 + 60
+ assert _total_seconds(td_type(seconds=1, minutes=1, hours=1)) == \
+ 1 + 60 + 60 * 60
+
+ basic_sanity(timedelta)
+
+ class FakeTimeDelta(object):
+ def __init__(self, days=0, hours=0, minutes=0, seconds=0,
+ microseconds=0):
+ self.days = days
+ self.seconds = seconds + minutes * 60 + hours * 60 * 60
+ self.microseconds = microseconds
+
+ assert not hasattr(FakeTimeDelta, "total_seconds")
+ basic_sanity(FakeTimeDelta)
+
+ FakeTimeDelta.total_seconds = lambda: None.missing_attribute
+ try:
+ _total_seconds(None)
+ except AttributeError as e:
+ assert 'total_seconds' not in str(e)
+
+
+def test_valid_value_bad_quoter():
+ def bad_quote(s):
+ return "Frogs"
+
+ assert valid_value("eep", quote=bad_quote) == False
diff --git a/third_party/python/coverage/.editorconfig b/third_party/python/coverage/.editorconfig
new file mode 100644
index 0000000000..f560af7444
--- /dev/null
+++ b/third_party/python/coverage/.editorconfig
@@ -0,0 +1,44 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+# This file is for unifying the coding style for different editors and IDEs.
+# More information at http://EditorConfig.org
+
+root = true
+
+[*]
+charset = utf-8
+end_of_line = lf
+indent_size = 4
+indent_style = space
+insert_final_newline = true
+max_line_length = 80
+trim_trailing_whitespace = true
+
+[*.py]
+max_line_length = 100
+
+[*.c]
+max_line_length = 100
+
+[*.h]
+max_line_length = 100
+
+[*.yml]
+indent_size = 2
+
+[*.rst]
+max_line_length = 79
+
+[Makefile]
+indent_style = tab
+indent_size = 8
+
+[*,cover]
+trim_trailing_whitespace = false
+
+[*.diff]
+trim_trailing_whitespace = false
+
+[.git/*]
+trim_trailing_whitespace = false
diff --git a/third_party/python/coverage/.readthedocs.yml b/third_party/python/coverage/.readthedocs.yml
new file mode 100644
index 0000000000..ed3737fbe7
--- /dev/null
+++ b/third_party/python/coverage/.readthedocs.yml
@@ -0,0 +1,22 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+#
+# ReadTheDocs configuration.
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html
+
+version: 2
+
+sphinx:
+ builder: html
+ configuration: doc/conf.py
+
+# No other formats than HTML
+formats: []
+
+python:
+ version: 3.7
+ install:
+ - requirements: doc/requirements.pip
+ - method: pip
+ path: .
+ system_packages: false
diff --git a/third_party/python/coverage/.travis.yml b/third_party/python/coverage/.travis.yml
new file mode 100644
index 0000000000..f5e8fad19d
--- /dev/null
+++ b/third_party/python/coverage/.travis.yml
@@ -0,0 +1,52 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+#
+# Tell Travis what to do
+# https://travis-ci.com/nedbat/coveragepy
+
+dist: xenial
+language: python
+
+cache: pip
+
+python:
+ - '2.7'
+ - '3.5'
+ - '3.6'
+ - '3.7'
+ - '3.8'
+ - 'pypy2.7-6.0'
+ - 'pypy3.5-6.0'
+
+# Only testing it for python3.8 on aarch64 platform, since it already has a lot
+# of jobs to test and takes long time.
+matrix:
+ include:
+ - python: 3.8
+ arch: arm64
+ env:
+ - COVERAGE_COVERAGE=no
+ - python: 3.8
+ arch: arm64
+ env:
+ - COVERAGE_COVERAGE=yes
+
+env:
+ matrix:
+ - COVERAGE_COVERAGE=no
+ - COVERAGE_COVERAGE=yes
+
+install:
+ - pip install -r requirements/ci.pip
+ - pip freeze
+
+script:
+ - tox
+
+after_script:
+ - |
+ if [[ $COVERAGE_COVERAGE == 'yes' ]]; then
+ python igor.py combine_html
+ pip install codecov
+ codecov -X gcov --file coverage.xml
+ fi
diff --git a/third_party/python/coverage/CHANGES.rst b/third_party/python/coverage/CHANGES.rst
new file mode 100644
index 0000000000..cd34eab809
--- /dev/null
+++ b/third_party/python/coverage/CHANGES.rst
@@ -0,0 +1,2743 @@
+.. Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+.. For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+==============================
+Change history for coverage.py
+==============================
+
+These changes are listed in decreasing version number order. Note this can be
+different from a strict chronological order when there are two branches in
+development at the same time, such as 4.5.x and 5.0.
+
+This list is detailed and covers changes in each pre-release version. If you
+want to know what's different in 5.0 since 4.5.x, see :ref:`whatsnew5x`.
+
+
+ .. When updating the "Unreleased" header to a specific version, use this
+ .. format. Don't forget the jump target:
+ ..
+ .. .. _changes_981:
+ ..
+ .. Version 9.8.1 --- 2027-07-27
+ .. ----------------------------
+
+
+.. _changes_51:
+
+Version 5.1 --- 2020-04-12
+--------------------------
+
+- The JSON report now includes counts of covered and missing branches. Thanks,
+ Salvatore Zagaria.
+
+- On Python 3.8, try-finally-return reported wrong branch coverage with
+ decorated async functions (`issue 946`_). This is now fixed. Thanks, Kjell
+ Braden.
+
+- The :meth:`~coverage.Coverage.get_option` and
+ :meth:`~coverage.Coverage.set_option` methods can now manipulate the
+ ``[paths]`` configuration setting. Thanks to Bernát Gábor for the fix for
+ `issue 967`_.
+
+.. _issue 946: https://github.com/nedbat/coveragepy/issues/946
+.. _issue 967: https://github.com/nedbat/coveragepy/issues/967
+
+
+.. _changes_504:
+
+Version 5.0.4 --- 2020-03-16
+----------------------------
+
+- If using the ``[run] relative_files`` setting, the XML report will use
+ relative files in the ``<source>`` elements indicating the location of source
+ code. Closes `issue 948`_.
+
+- The textual summary report could report missing lines with negative line
+ numbers on PyPy3 7.1 (`issue 943`_). This is now fixed.
+
+- Windows wheels for Python 3.8 were incorrectly built, but are now fixed.
+ (`issue 949`_)
+
+- Updated Python 3.9 support to 3.9a4.
+
+- HTML reports couldn't be sorted if localStorage wasn't available. This is now
+ fixed: sorting works even though the sorting setting isn't retained. (`issue
+ 944`_ and `pull request 945`_). Thanks, Abdeali Kothari.
+
+.. _issue 943: https://github.com/nedbat/coveragepy/issues/943
+.. _issue 944: https://github.com/nedbat/coveragepy/issues/944
+.. _pull request 945: https://github.com/nedbat/coveragepy/pull/945
+.. _issue 948: https://github.com/nedbat/coveragepy/issues/948
+.. _issue 949: https://github.com/nedbat/coveragepy/issues/949
+
+
+.. _changes_503:
+
+Version 5.0.3 --- 2020-01-12
+----------------------------
+
+- A performance improvement in 5.0.2 didn't work for test suites that changed
+ directory before combining data, causing "Couldn't use data file: no such
+ table: meta" errors (`issue 916`_). This is now fixed.
+
+- Coverage could fail to run your program with some form of "ModuleNotFound" or
+ "ImportError" trying to import from the current directory. This would happen
+ if coverage had been packaged into a zip file (for example, on Windows), or
+ was found indirectly (for example, by pyenv-virtualenv). A number of
+ different scenarios were described in `issue 862`_ which is now fixed. Huge
+ thanks to Agbonze O. Jeremiah for reporting it, and Alexander Waters and
+ George-Cristian Bîrzan for protracted debugging sessions.
+
+- Added the "premain" debug option.
+
+- Added SQLite compile-time options to the "debug sys" output.
+
+.. _issue 862: https://github.com/nedbat/coveragepy/issues/862
+.. _issue 916: https://github.com/nedbat/coveragepy/issues/916
+
+
+.. _changes_502:
+
+Version 5.0.2 --- 2020-01-05
+----------------------------
+
+- Programs that used multiprocessing and changed directories would fail under
+ coverage. This is now fixed (`issue 890`_). A side effect is that debug
+ information about the config files read now shows absolute paths to the
+ files.
+
+- When running programs as modules (``coverage run -m``) with ``--source``,
+ some measured modules were imported before coverage starts. This resulted in
+ unwanted warnings ("Already imported a file that will be measured") and a
+ reduction in coverage totals (`issue 909`_). This is now fixed.
+
+- If no data was collected, an exception about "No data to report" could happen
+ instead of a 0% report being created (`issue 884`_). This is now fixed.
+
+- The handling of source files with non-encodable file names has changed.
+ Previously, if a file name could not be encoded as UTF-8, an error occurred,
+ as described in `issue 891`_. Now, those files will not be measured, since
+ their data would not be recordable.
+
+- A new warning ("dynamic-conflict") is issued if two mechanisms are trying to
+ change the dynamic context. Closes `issue 901`_.
+
+- ``coverage run --debug=sys`` would fail with an AttributeError. This is now
+ fixed (`issue 907`_).
+
+.. _issue 884: https://github.com/nedbat/coveragepy/issues/884
+.. _issue 890: https://github.com/nedbat/coveragepy/issues/890
+.. _issue 891: https://github.com/nedbat/coveragepy/issues/891
+.. _issue 901: https://github.com/nedbat/coveragepy/issues/901
+.. _issue 907: https://github.com/nedbat/coveragepy/issues/907
+.. _issue 909: https://github.com/nedbat/coveragepy/issues/909
+
+
+.. _changes_501:
+
+Version 5.0.1 --- 2019-12-22
+----------------------------
+
+- If a 4.x data file is the cause of a "file is not a database" error, then use
+ a more specific error message, "Looks like a coverage 4.x data file, are you
+ mixing versions of coverage?" Helps diagnose the problems described in
+ `issue 886`_.
+
+- Measurement contexts and relative file names didn't work together, as
+ reported in `issue 899`_ and `issue 900`_. This is now fixed, thanks to
+ David Szotten.
+
+- When using ``coverage run --concurrency=multiprocessing``, all data files
+ should be named with parallel-ready suffixes. 5.0 mistakenly named the main
+ process' file with no suffix when using ``--append``. This is now fixed,
+ closing `issue 880`_.
+
+- Fixed a problem on Windows when the current directory is changed to a
+ different drive (`issue 895`_). Thanks, Olivier Grisel.
+
+- Updated Python 3.9 support to 3.9a2.
+
+.. _issue 880: https://github.com/nedbat/coveragepy/issues/880
+.. _issue 886: https://github.com/nedbat/coveragepy/issues/886
+.. _issue 895: https://github.com/nedbat/coveragepy/issues/895
+.. _issue 899: https://github.com/nedbat/coveragepy/issues/899
+.. _issue 900: https://github.com/nedbat/coveragepy/issues/900
+
+
+.. _changes_50:
+
+Version 5.0 --- 2019-12-14
+--------------------------
+
+Nothing new beyond 5.0b2.
+
+
+.. _changes_50b2:
+
+Version 5.0b2 --- 2019-12-08
+----------------------------
+
+- An experimental ``[run] relative_files`` setting tells coverage to store
+ relative file names in the data file. This makes it easier to run tests in
+ one (or many) environments, and then report in another. It has not had much
+ real-world testing, so it may change in incompatible ways in the future.
+
+- When constructing a :class:`coverage.Coverage` object, `data_file` can be
+ specified as None to prevent writing any data file at all. In previous
+ versions, an explicit `data_file=None` argument would use the default of
+ ".coverage". Fixes `issue 871`_.
+
+- Python files run with ``-m`` now have ``__spec__`` defined properly. This
+ fixes `issue 745`_ (about not being able to run unittest tests that spawn
+ subprocesses), and `issue 838`_, which described the problem directly.
+
+- The ``[paths]`` configuration section is now ordered. If you specify more
+ than one list of patterns, the first one that matches will be used. Fixes
+ `issue 649`_.
+
+- The :func:`.coverage.numbits.register_sqlite_functions` function now also
+ registers `numbits_to_nums` for use in SQLite queries. Thanks, Simon
+ Willison.
+
+- Python 3.9a1 is supported.
+
+- Coverage.py has a mascot: :ref:`Sleepy Snake <sleepy>`.
+
+.. _issue 649: https://github.com/nedbat/coveragepy/issues/649
+.. _issue 745: https://github.com/nedbat/coveragepy/issues/745
+.. _issue 838: https://github.com/nedbat/coveragepy/issues/838
+.. _issue 871: https://github.com/nedbat/coveragepy/issues/871
+
+
+.. _changes_50b1:
+
+Version 5.0b1 --- 2019-11-11
+----------------------------
+
+- The HTML and textual reports now have a ``--skip-empty`` option that skips
+ files with no statements, notably ``__init__.py`` files. Thanks, Reya B.
+
+- Configuration can now be read from `TOML`_ files. This requires installing
+ coverage.py with the ``[toml]`` extra. The standard "pyproject.toml" file
+ will be read automatically if no other configuration file is found, with
+ settings in the ``[tool.coverage.]`` namespace. Thanks to Frazer McLean for
+ implementation and persistence. Finishes `issue 664`_.
+
+- The ``[run] note`` setting has been deprecated. Using it will result in a
+ warning, and the note will not be written to the data file. The
+ corresponding :class:`.CoverageData` methods have been removed.
+
+- The HTML report has been reimplemented (no more table around the source
+ code). This allowed for a better presentation of the context information,
+ hopefully resolving `issue 855`_.
+
+- Added sqlite3 module version information to ``coverage debug sys`` output.
+
+- Asking the HTML report to show contexts (``[html] show_contexts=True`` or
+ ``coverage html --show-contexts``) will issue a warning if there were no
+ contexts measured (`issue 851`_).
+
+.. _TOML: https://github.com/toml-lang/toml#readme
+.. _issue 664: https://github.com/nedbat/coveragepy/issues/664
+.. _issue 851: https://github.com/nedbat/coveragepy/issues/851
+.. _issue 855: https://github.com/nedbat/coveragepy/issues/855
+
+
+.. _changes_50a8:
+
+Version 5.0a8 --- 2019-10-02
+----------------------------
+
+- The :class:`.CoverageData` API has changed how queries are limited to
+ specific contexts. Now you use :meth:`.CoverageData.set_query_context` to
+ set a single exact-match string, or :meth:`.CoverageData.set_query_contexts`
+ to set a list of regular expressions to match contexts. This changes the
+ command-line ``--contexts`` option to use regular expressions instead of
+ filename-style wildcards.
+
+
+.. _changes_50a7:
+
+Version 5.0a7 --- 2019-09-21
+----------------------------
+
+- Data can now be "reported" in JSON format, for programmatic use, as requested
+ in `issue 720`_. The new ``coverage json`` command writes raw and summarized
+ data to a JSON file. Thanks, Matt Bachmann.
+
+- Dynamic contexts are now supported in the Python tracer, which is important
+ for PyPy users. Closes `issue 846`_.
+
+- The compact line number representation introduced in 5.0a6 is called a
+ "numbits." The :mod:`coverage.numbits` module provides functions for working
+ with them.
+
+- The reporting methods used to permanently apply their arguments to the
+ configuration of the Coverage object. Now they no longer do. The arguments
+ affect the operation of the method, but do not persist.
+
+- A class named "test_something" no longer confuses the ``test_function``
+ dynamic context setting. Fixes `issue 829`_.
+
+- Fixed an unusual tokenizing issue with backslashes in comments. Fixes
+ `issue 822`_.
+
+- ``debug=plugin`` didn't properly support configuration or dynamic context
+ plugins, but now it does, closing `issue 834`_.
+
+.. _issue 720: https://github.com/nedbat/coveragepy/issues/720
+.. _issue 822: https://github.com/nedbat/coveragepy/issues/822
+.. _issue 834: https://github.com/nedbat/coveragepy/issues/834
+.. _issue 829: https://github.com/nedbat/coveragepy/issues/829
+.. _issue 846: https://github.com/nedbat/coveragepy/issues/846
+
+
+.. _changes_50a6:
+
+Version 5.0a6 --- 2019-07-16
+----------------------------
+
+- Reporting on contexts. Big thanks to Stephan Richter and Albertas Agejevas
+ for the contribution.
+
+ - The ``--contexts`` option is available on the ``report`` and ``html``
+ commands. It's a comma-separated list of shell-style wildcards, selecting
+ the contexts to report on. Only contexts matching one of the wildcards
+ will be included in the report.
+
+ - The ``--show-contexts`` option for the ``html`` command adds context
+ information to each covered line. Hovering over the "ctx" marker at the
+ end of the line reveals a list of the contexts that covered the line.
+
+- Database changes:
+
+ - Line numbers are now stored in a much more compact way. For each file and
+ context, a single binary string is stored with a bit per line number. This
+ greatly improves memory use, but makes ad-hoc use difficult.
+
+ - Dynamic contexts with no data are no longer written to the database.
+
+ - SQLite data storage is now faster. There's no longer a reason to keep the
+ JSON data file code, so it has been removed.
+
+- Changes to the :class:`.CoverageData` interface:
+
+ - The new :meth:`.CoverageData.dumps` method serializes the data to a string,
+ and a corresponding :meth:`.CoverageData.loads` method reconstitutes this
+ data. The format of the data string is subject to change at any time, and
+ so should only be used between two installations of the same version of
+ coverage.py.
+
+ - The :meth:`CoverageData constructor<.CoverageData.__init__>` has a new
+ argument, `no_disk` (default: False). Setting it to True prevents writing
+ any data to the disk. This is useful for transient data objects.
+
+- Added the classmethod :meth:`.Coverage.current` to get the latest started
+ Coverage instance.
+
+- Multiprocessing support in Python 3.8 was broken, but is now fixed. Closes
+ `issue 828`_.
+
+- Error handling during reporting has changed slightly. All reporting methods
+ now behave the same. The ``--ignore-errors`` option keeps errors from
+ stopping the reporting, but files that couldn't parse as Python will always
+ be reported as warnings. As with other warnings, you can suppress them with
+ the ``[run] disable_warnings`` configuration setting.
+
+- Coverage.py no longer fails if the user program deletes its current
+ directory. Fixes `issue 806`_. Thanks, Dan Hemberger.
+
+- The scrollbar markers in the HTML report now accurately show the highlighted
+ lines, regardless of what categories of line are highlighted.
+
+- The hack to accommodate ShiningPanda_ looking for an obsolete internal data
+ file has been removed, since ShiningPanda 0.22 fixed it four years ago.
+
+- The deprecated `Reporter.file_reporters` property has been removed.
+
+.. _ShiningPanda: https://wiki.jenkins.io/display/JENKINS/ShiningPanda+Plugin
+.. _issue 806: https://github.com/nedbat/coveragepy/pull/806
+.. _issue 828: https://github.com/nedbat/coveragepy/issues/828
+
+
+.. _changes_50a5:
+
+Version 5.0a5 --- 2019-05-07
+----------------------------
+
+- Drop support for Python 3.4
+
+- Dynamic contexts can now be set two new ways, both thanks to Justas
+ Sadzevičius.
+
+ - A plugin can implement a ``dynamic_context`` method to check frames for
+ whether a new context should be started. See
+ :ref:`dynamic_context_plugins` for more details.
+
+ - Another tool (such as a test runner) can use the new
+ :meth:`.Coverage.switch_context` method to explicitly change the context.
+
+- The ``dynamic_context = test_function`` setting now works with Python 2
+ old-style classes, though it only reports the method name, not the class it
+ was defined on. Closes `issue 797`_.
+
+- ``fail_under`` values more than 100 are reported as errors. Thanks to Mike
+ Fiedler for closing `issue 746`_.
+
+- The "missing" values in the text output are now sorted by line number, so
+ that missing branches are reported near the other lines they affect. The
+ values used to show all missing lines, and then all missing branches.
+
+- Access to the SQLite database used for data storage is now thread-safe.
+ Thanks, Stephan Richter. This closes `issue 702`_.
+
+- Combining data stored in SQLite is now about twice as fast, fixing `issue
+ 761`_. Thanks, Stephan Richter.
+
+- The ``filename`` attribute on :class:`.CoverageData` objects has been made
+ private. You can use the ``data_filename`` method to get the actual file
+ name being used to store data, and the ``base_filename`` method to get the
+ original filename before parallelizing suffixes were added. This is part of
+ fixing `issue 708`_.
+
+- Line numbers in the HTML report now align properly with source lines, even
+ when Chrome's minimum font size is set, fixing `issue 748`_. Thanks Wen Ye.
+
+.. _issue 702: https://github.com/nedbat/coveragepy/issues/702
+.. _issue 708: https://github.com/nedbat/coveragepy/issues/708
+.. _issue 746: https://github.com/nedbat/coveragepy/issues/746
+.. _issue 748: https://github.com/nedbat/coveragepy/issues/748
+.. _issue 761: https://github.com/nedbat/coveragepy/issues/761
+.. _issue 797: https://github.com/nedbat/coveragepy/issues/797
+
+
+.. _changes_50a4:
+
+Version 5.0a4 --- 2018-11-25
+----------------------------
+
+- You can specify the command line to run your program with the ``[run]
+ command_line`` configuration setting, as requested in `issue 695`_.
+
+- Coverage will create directories as needed for the data file if they don't
+ exist, closing `issue 721`_.
+
+- The ``coverage run`` command has always adjusted the first entry in sys.path,
+ to properly emulate how Python runs your program. Now this adjustment is
+ skipped if sys.path[0] is already different than Python's default. This
+ fixes `issue 715`_.
+
+- Improvements to context support:
+
+ - The "no such table: meta" error is fixed.: `issue 716`_.
+
+ - Combining data files is now much faster.
+
+- Python 3.8 (as of today!) passes all tests.
+
+.. _issue 695: https://github.com/nedbat/coveragepy/issues/695
+.. _issue 715: https://github.com/nedbat/coveragepy/issues/715
+.. _issue 716: https://github.com/nedbat/coveragepy/issues/716
+.. _issue 721: https://github.com/nedbat/coveragepy/issues/721
+
+
+.. _changes_50a3:
+
+Version 5.0a3 --- 2018-10-06
+----------------------------
+
+- Context support: static contexts let you specify a label for a coverage run,
+ which is recorded in the data, and retained when you combine files. See
+ :ref:`contexts` for more information.
+
+- Dynamic contexts: specifying ``[run] dynamic_context = test_function`` in the
+ config file will record the test function name as a dynamic context during
+ execution. This is the core of "Who Tests What" (`issue 170`_). Things to
+ note:
+
+ - There is no reporting support yet. Use SQLite to query the .coverage file
+ for information. Ideas are welcome about how reporting could be extended
+ to use this data.
+
+ - There's a noticeable slow-down before any test is run.
+
+ - Data files will now be roughly N times larger, where N is the number of
+ tests you have. Combining data files is therefore also N times slower.
+
+ - No other values for ``dynamic_context`` are recognized yet. Let me know
+ what else would be useful. I'd like to use a pytest plugin to get better
+ information directly from pytest, for example.
+
+.. _issue 170: https://github.com/nedbat/coveragepy/issues/170
+
+- Environment variable substitution in configuration files now supports two
+ syntaxes for controlling the behavior of undefined variables: if ``VARNAME``
+ is not defined, ``${VARNAME?}`` will raise an error, and ``${VARNAME-default
+ value}`` will use "default value".
+
+- Partial support for Python 3.8, which has not yet released an alpha. Fixes
+ `issue 707`_ and `issue 714`_.
+
+.. _issue 707: https://github.com/nedbat/coveragepy/issues/707
+.. _issue 714: https://github.com/nedbat/coveragepy/issues/714
+
+
+.. _changes_50a2:
+
+Version 5.0a2 --- 2018-09-03
+----------------------------
+
+- Coverage's data storage has changed. In version 4.x, .coverage files were
+ basically JSON. Now, they are SQLite databases. This means the data file
+ can be created earlier than it used to. A large amount of code was
+ refactored to support this change.
+
+ - Because the data file is created differently than previous releases, you
+ may need ``parallel=true`` where you didn't before.
+
+ - The old data format is still available (for now) by setting the environment
+ variable COVERAGE_STORAGE=json. Please tell me if you think you need to
+ keep the JSON format.
+
+ - The database schema is guaranteed to change in the future, to support new
+ features. I'm looking for opinions about making the schema part of the
+ public API to coverage.py or not.
+
+- Development moved from `Bitbucket`_ to `GitHub`_.
+
+- HTML files no longer have trailing and extra whitespace.
+
+- The sort order in the HTML report is stored in local storage rather than
+ cookies, closing `issue 611`_. Thanks, Federico Bond.
+
+- pickle2json, for converting v3 data files to v4 data files, has been removed.
+
+.. _Bitbucket: https://bitbucket.org/ned/coveragepy
+.. _GitHub: https://github.com/nedbat/coveragepy
+
+.. _issue 611: https://github.com/nedbat/coveragepy/issues/611
+
+
+.. _changes_50a1:
+
+Version 5.0a1 --- 2018-06-05
+----------------------------
+
+- Coverage.py no longer supports Python 2.6 or 3.3.
+
+- The location of the configuration file can now be specified with a
+ ``COVERAGE_RCFILE`` environment variable, as requested in `issue 650`_.
+
+- Namespace packages are supported on Python 3.7, where they used to cause
+ TypeErrors about path being None. Fixes `issue 700`_.
+
+- A new warning (``already-imported``) is issued if measurable files have
+ already been imported before coverage.py started measurement. See
+ :ref:`cmd_warnings` for more information.
+
+- Running coverage many times for small runs in a single process should be
+ faster, closing `issue 625`_. Thanks, David MacIver.
+
+- Large HTML report pages load faster. Thanks, Pankaj Pandey.
+
+.. _issue 625: https://bitbucket.org/ned/coveragepy/issues/625/lstat-dominates-in-the-case-of-small
+.. _issue 650: https://bitbucket.org/ned/coveragepy/issues/650/allow-setting-configuration-file-location
+.. _issue 700: https://github.com/nedbat/coveragepy/issues/700
+
+
+.. _changes_454:
+
+Version 4.5.4 --- 2019-07-29
+----------------------------
+
+- Multiprocessing support in Python 3.8 was broken, but is now fixed. Closes
+ `issue 828`_.
+
+.. _issue 828: https://github.com/nedbat/coveragepy/issues/828
+
+
+.. _changes_453:
+
+Version 4.5.3 --- 2019-03-09
+----------------------------
+
+- Only packaging metadata changes.
+
+
+.. _changes_452:
+
+Version 4.5.2 --- 2018-11-12
+----------------------------
+
+- Namespace packages are supported on Python 3.7, where they used to cause
+ TypeErrors about path being None. Fixes `issue 700`_.
+
+- Python 3.8 (as of today!) passes all tests. Fixes `issue 707`_ and
+ `issue 714`_.
+
+- Development moved from `Bitbucket`_ to `GitHub`_.
+
+.. _issue 700: https://github.com/nedbat/coveragepy/issues/700
+.. _issue 707: https://github.com/nedbat/coveragepy/issues/707
+.. _issue 714: https://github.com/nedbat/coveragepy/issues/714
+
+.. _Bitbucket: https://bitbucket.org/ned/coveragepy
+.. _GitHub: https://github.com/nedbat/coveragepy
+
+
+.. _changes_451:
+
+Version 4.5.1 --- 2018-02-10
+----------------------------
+
+- Now that 4.5 properly separated the ``[run] omit`` and ``[report] omit``
+ settings, an old bug has become apparent. If you specified a package name
+ for ``[run] source``, then omit patterns weren't matched inside that package.
+ This bug (`issue 638`_) is now fixed.
+
+- On Python 3.7, reporting about a decorated function with no body other than a
+ docstring would crash coverage.py with an IndexError (`issue 640`_). This is
+ now fixed.
+
+- Configurer plugins are now reported in the output of ``--debug=sys``.
+
+.. _issue 638: https://bitbucket.org/ned/coveragepy/issues/638/run-omit-is-ignored-since-45
+.. _issue 640: https://bitbucket.org/ned/coveragepy/issues/640/indexerror-reporting-on-an-empty-decorated
+
+
+.. _changes_45:
+
+Version 4.5 --- 2018-02-03
+--------------------------
+
+- A new kind of plugin is supported: configurers are invoked at start-up to
+ allow more complex configuration than the .coveragerc file can easily do.
+ See :ref:`api_plugin` for details. This solves the complex configuration
+ problem described in `issue 563`_.
+
+- The ``fail_under`` option can now be a float. Note that you must specify the
+ ``[report] precision`` configuration option for the fractional part to be
+ used. Thanks to Lars Hupfeldt Nielsen for help with the implementation.
+ Fixes `issue 631`_.
+
+- The ``include`` and ``omit`` options can be specified for both the ``[run]``
+ and ``[report]`` phases of execution. 4.4.2 introduced some incorrect
+ interactions between those phases, where the options for one were confused
+ for the other. This is now corrected, fixing `issue 621`_ and `issue 622`_.
+ Thanks to Daniel Hahler for seeing more clearly than I could.
+
+- The ``coverage combine`` command used to always overwrite the data file, even
+ when no data had been read from apparently combinable files. Now, an error
+ is raised if we thought there were files to combine, but in fact none of them
+ could be used. Fixes `issue 629`_.
+
+- The ``coverage combine`` command could get confused about path separators
+ when combining data collected on Windows with data collected on Linux, as
+ described in `issue 618`_. This is now fixed: the result path always uses
+ the path separator specified in the ``[paths]`` result.
+
+- On Windows, the HTML report could fail when source trees are deeply nested,
+ due to attempting to create HTML filenames longer than the 250-character
+ maximum. Now filenames will never get much larger than 200 characters,
+ fixing `issue 627`_. Thanks to Alex Sandro for helping with the fix.
+
+.. _issue 563: https://bitbucket.org/ned/coveragepy/issues/563/platform-specific-configuration
+.. _issue 618: https://bitbucket.org/ned/coveragepy/issues/618/problem-when-combining-windows-generated
+.. _issue 621: https://bitbucket.org/ned/coveragepy/issues/621/include-ignored-warning-when-using
+.. _issue 622: https://bitbucket.org/ned/coveragepy/issues/622/report-omit-overwrites-run-omit
+.. _issue 627: https://bitbucket.org/ned/coveragepy/issues/627/failure-generating-html-reports-when-the
+.. _issue 629: https://bitbucket.org/ned/coveragepy/issues/629/multiple-use-of-combine-leads-to-empty
+.. _issue 631: https://bitbucket.org/ned/coveragepy/issues/631/precise-coverage-percentage-value
+
+
+.. _changes_442:
+
+Version 4.4.2 --- 2017-11-05
+----------------------------
+
+- Support for Python 3.7. In some cases, class and module docstrings are no
+ longer counted in statement totals, which could slightly change your total
+ results.
+
+- Specifying both ``--source`` and ``--include`` no longer silently ignores the
+ include setting, instead it displays a warning. Thanks, Loïc Dachary. Closes
+ `issue 265`_ and `issue 101`_.
+
+- Fixed a race condition when saving data and multiple threads are tracing
+ (`issue 581`_). It could produce a "dictionary changed size during iteration"
+ RuntimeError. I believe this mostly but not entirely fixes the race
+ condition. A true fix would likely be too expensive. Thanks, Peter Baughman
+ for the debugging, and Olivier Grisel for the fix with tests.
+
+- Configuration values which are file paths will now apply tilde-expansion,
+ closing `issue 589`_.
+
+- Now secondary config files like tox.ini and setup.cfg can be specified
+ explicitly, and prefixed sections like `[coverage:run]` will be read. Fixes
+ `issue 588`_.
+
+- Be more flexible about the command name displayed by help, fixing
+ `issue 600`_. Thanks, Ben Finney.
+
+.. _issue 101: https://bitbucket.org/ned/coveragepy/issues/101/settings-under-report-affect-running
+.. _issue 581: https://bitbucket.org/ned/coveragepy/issues/581/race-condition-when-saving-data-under
+.. _issue 588: https://bitbucket.org/ned/coveragepy/issues/588/using-rcfile-path-to-toxini-uses-run
+.. _issue 589: https://bitbucket.org/ned/coveragepy/issues/589/allow-expansion-in-coveragerc
+.. _issue 600: https://bitbucket.org/ned/coveragepy/issues/600/get-program-name-from-command-line-when
+
+
+.. _changes_441:
+
+Version 4.4.1 --- 2017-05-14
+----------------------------
+
+- No code changes: just corrected packaging for Python 2.7 Linux wheels.
+
+
+.. _changes_44:
+
+Version 4.4 --- 2017-05-07
+--------------------------
+
+- Reports could produce the wrong file names for packages, reporting ``pkg.py``
+ instead of the correct ``pkg/__init__.py``. This is now fixed. Thanks, Dirk
+ Thomas.
+
+- XML reports could produce ``<source>`` and ``<class>`` lines that together
+ didn't specify a valid source file path. This is now fixed. (`issue 526`_)
+
+- Namespace packages are no longer warned as having no code. (`issue 572`_)
+
+- Code that uses ``sys.settrace(sys.gettrace())`` in a file that wasn't being
+ coverage-measured would prevent correct coverage measurement in following
+ code. An example of this was running doctests programmatically. This is now
+ fixed. (`issue 575`_)
+
+- Errors printed by the ``coverage`` command now go to stderr instead of
+ stdout.
+
+- Running ``coverage xml`` in a directory named with non-ASCII characters would
+ fail under Python 2. This is now fixed. (`issue 573`_)
+
+.. _issue 526: https://bitbucket.org/ned/coveragepy/issues/526/generated-xml-invalid-paths-for-cobertura
+.. _issue 572: https://bitbucket.org/ned/coveragepy/issues/572/no-python-source-warning-for-namespace
+.. _issue 573: https://bitbucket.org/ned/coveragepy/issues/573/cant-generate-xml-report-if-some-source
+.. _issue 575: https://bitbucket.org/ned/coveragepy/issues/575/running-doctest-prevents-complete-coverage
+
+
+Version 4.4b1 --- 2017-04-04
+----------------------------
+
+- Some warnings can now be individually disabled. Warnings that can be
+ disabled have a short name appended. The ``[run] disable_warnings`` setting
+ takes a list of these warning names to disable. Closes both `issue 96`_ and
+ `issue 355`_.
+
+- The XML report now includes attributes from version 4 of the Cobertura XML
+ format, fixing `issue 570`_.
+
+- In previous versions, calling a method that used collected data would prevent
+ further collection. For example, `save()`, `report()`, `html_report()`, and
+ others would all stop collection. An explicit `start()` was needed to get it
+ going again. This is no longer true. Now you can use the collected data and
+ also continue measurement. Both `issue 79`_ and `issue 448`_ described this
+ problem, and have been fixed.
+
+- Plugins can now find unexecuted files if they choose, by implementing the
+ `find_executable_files` method. Thanks, Emil Madsen.
+
+- Minimal IronPython support. You should be able to run IronPython programs
+ under ``coverage run``, though you will still have to do the reporting phase
+ with CPython.
+
+- Coverage.py has long had a special hack to support CPython's need to measure
+ the coverage of the standard library tests. This code was not installed by
+ kitted versions of coverage.py. Now it is.
+
+.. _issue 79: https://bitbucket.org/ned/coveragepy/issues/79/save-prevents-harvesting-on-stop
+.. _issue 96: https://bitbucket.org/ned/coveragepy/issues/96/unhelpful-warnings-produced-when-using
+.. _issue 355: https://bitbucket.org/ned/coveragepy/issues/355/warnings-should-be-suppressable
+.. _issue 448: https://bitbucket.org/ned/coveragepy/issues/448/save-and-html_report-prevent-further
+.. _issue 570: https://bitbucket.org/ned/coveragepy/issues/570/cobertura-coverage-04dtd-support
+
+
+.. _changes_434:
+
+Version 4.3.4 --- 2017-01-17
+----------------------------
+
+- Fixing 2.6 in version 4.3.3 broke other things, because the too-tricky
+ exception wasn't properly derived from Exception, described in `issue 556`_.
+ A newb mistake; it hasn't been a good few days.
+
+.. _issue 556: https://bitbucket.org/ned/coveragepy/issues/556/43-fails-if-there-are-html-files-in-the
+
+
+.. _changes_433:
+
+Version 4.3.3 --- 2017-01-17
+----------------------------
+
+- Python 2.6 support was broken due to a testing exception imported for the
+ benefit of the coverage.py test suite. Properly conditionalizing it fixed
+ `issue 554`_ so that Python 2.6 works again.
+
+.. _issue 554: https://bitbucket.org/ned/coveragepy/issues/554/traceback-on-python-26-starting-with-432
+
+
+.. _changes_432:
+
+Version 4.3.2 --- 2017-01-16
+----------------------------
+
+- Using the ``--skip-covered`` option on an HTML report with 100% coverage
+ would cause a "No data to report" error, as reported in `issue 549`_. This is
+ now fixed; thanks, Loïc Dachary.
+
+- If-statements can be optimized away during compilation, for example, `if 0:`
+ or `if __debug__:`. Coverage.py had problems properly understanding these
+ statements which existed in the source, but not in the compiled bytecode.
+ This problem, reported in `issue 522`_, is now fixed.
+
+- If you specified ``--source`` as a directory, then coverage.py would look for
+ importable Python files in that directory, and could identify ones that had
+ never been executed at all. But if you specified it as a package name, that
+ detection wasn't performed. Now it is, closing `issue 426`_. Thanks to Loïc
+ Dachary for the fix.
+
+- If you started and stopped coverage measurement thousands of times in your
+ process, you could crash Python with a "Fatal Python error: deallocating
+ None" error. This is now fixed. Thanks to Alex Groce for the bug report.
+
+- On PyPy, measuring coverage in subprocesses could produce a warning: "Trace
+ function changed, measurement is likely wrong: None". This was spurious, and
+ has been suppressed.
+
+- Previously, coverage.py couldn't start on Jython, due to that implementation
+ missing the multiprocessing module (`issue 551`_). This problem has now been
+ fixed. Also, `issue 322`_ about not being able to invoke coverage
+ conveniently, seems much better: ``jython -m coverage run myprog.py`` works
+ properly.
+
+- Let's say you ran the HTML report over and over again in the same output
+ directory, with ``--skip-covered``. And imagine due to your heroic
+ test-writing efforts, a file just achieved the goal of 100% coverage. With
+ coverage.py 4.3, the old HTML file with the less-than-100% coverage would be
+ left behind. This file is now properly deleted.
+
+.. _issue 322: https://bitbucket.org/ned/coveragepy/issues/322/cannot-use-coverage-with-jython
+.. _issue 426: https://bitbucket.org/ned/coveragepy/issues/426/difference-between-coverage-results-with
+.. _issue 522: https://bitbucket.org/ned/coveragepy/issues/522/incorrect-branch-reporting
+.. _issue 549: https://bitbucket.org/ned/coveragepy/issues/549/skip-covered-with-100-coverage-throws-a-no
+.. _issue 551: https://bitbucket.org/ned/coveragepy/issues/551/coveragepy-cannot-be-imported-in-jython27
+
+
+.. _changes_431:
+
+Version 4.3.1 --- 2016-12-28
+----------------------------
+
+- Some environments couldn't install 4.3, as described in `issue 540`_. This is
+ now fixed.
+
+- The check for conflicting ``--source`` and ``--include`` was too simple in a
+ few different ways, breaking a few perfectly reasonable use cases, described
+ in `issue 541`_. The check has been reverted while we re-think the fix for
+ `issue 265`_.
+
+.. _issue 540: https://bitbucket.org/ned/coveragepy/issues/540/cant-install-coverage-v43-into-under
+.. _issue 541: https://bitbucket.org/ned/coveragepy/issues/541/coverage-43-breaks-nosetest-with-coverage
+
+
+.. _changes_43:
+
+Version 4.3 --- 2016-12-27
+--------------------------
+
+Special thanks to **Loïc Dachary**, who took an extraordinary interest in
+coverage.py and contributed a number of improvements in this release.
+
+- Subprocesses that are measured with `automatic subprocess measurement`_ used
+ to read in any pre-existing data file. This meant data would be incorrectly
+ carried forward from run to run. Now those files are not read, so each
+ subprocess only writes its own data. Fixes `issue 510`_.
+
+- The ``coverage combine`` command will now fail if there are no data files to
+ combine. The combine changes in 4.2 meant that multiple combines could lose
+ data, leaving you with an empty .coverage data file. Fixes
+ `issue 525`_, `issue 412`_, `issue 516`_, and probably `issue 511`_.
+
+- Coverage.py wouldn't execute `sys.excepthook`_ when an exception happened in
+ your program. Now it does, thanks to Andrew Hoos. Closes `issue 535`_.
+
+- Branch coverage fixes:
+
+ - Branch coverage could misunderstand a finally clause on a try block that
+ never continued on to the following statement, as described in `issue
+ 493`_. This is now fixed. Thanks to Joe Doherty for the report and Loïc
+ Dachary for the fix.
+
+ - A while loop with a constant condition (while True) and a continue
+ statement would be mis-analyzed, as described in `issue 496`_. This is now
+ fixed, thanks to a bug report by Eli Skeggs and a fix by Loïc Dachary.
+
+ - While loops with constant conditions that were never executed could result
+ in a non-zero coverage report. Artem Dayneko reported this in `issue
+ 502`_, and Loïc Dachary provided the fix.
+
+- The HTML report now supports a ``--skip-covered`` option like the other
+ reporting commands. Thanks, Loïc Dachary for the implementation, closing
+ `issue 433`_.
+
+- Options can now be read from a tox.ini file, if any. Like setup.cfg, sections
+ are prefixed with "coverage:", so ``[run]`` options will be read from the
+ ``[coverage:run]`` section of tox.ini. Implements part of `issue 519`_.
+ Thanks, Stephen Finucane.
+
+- Specifying both ``--source`` and ``--include`` no longer silently ignores the
+ include setting, instead it fails with a message. Thanks, Nathan Land and
+ Loïc Dachary. Closes `issue 265`_.
+
+- The ``Coverage.combine`` method has a new parameter, ``strict=False``, to
+ support failing if there are no data files to combine.
+
+- When forking subprocesses, the coverage data files would have the same random
+ number appended to the file name. This didn't cause problems, because the
+ file names had the process id also, making collisions (nearly) impossible.
+ But it was disconcerting. This is now fixed.
+
+- The text report now properly sizes headers when skipping some files, fixing
+ `issue 524`_. Thanks, Anthony Sottile and Loïc Dachary.
+
+- Coverage.py can now search .pex files for source, just as it can .zip and
+ .egg. Thanks, Peter Ebden.
+
+- Data files are now about 15% smaller.
+
+- Improvements in the ``[run] debug`` setting:
+
+ - The "dataio" debug setting now also logs when data files are deleted during
+ combining or erasing.
+
+ - A new debug option, "multiproc", for logging the behavior of
+ ``concurrency=multiprocessing``.
+
+ - If you used the debug options "config" and "callers" together, you'd get a
+ call stack printed for every line in the multi-line config output. This is
+ now fixed.
+
+- Fixed an unusual bug involving multiple coding declarations affecting code
+ containing code in multi-line strings: `issue 529`_.
+
+- Coverage.py will no longer be misled into thinking that a plain file is a
+ package when interpreting ``--source`` options. Thanks, Cosimo Lupo.
+
+- If you try to run a non-Python file with coverage.py, you will now get a more
+ useful error message. `Issue 514`_.
+
+- The default pragma regex changed slightly, but this will only matter to you
+ if you are deranged and use mixed-case pragmas.
+
+- Deal properly with non-ASCII file names in an ASCII-only world, `issue 533`_.
+
+- Programs that set Unicode configuration values could cause UnicodeErrors when
+ generating HTML reports. Pytest-cov is one example. This is now fixed.
+
+- Prevented deprecation warnings from configparser that happened in some
+ circumstances, closing `issue 530`_.
+
+- Corrected the name of the jquery.ba-throttle-debounce.js library. Thanks,
+ Ben Finney. Closes `issue 505`_.
+
+- Testing against PyPy 5.6 and PyPy3 5.5.
+
+- Switched to pytest from nose for running the coverage.py tests.
+
+- Renamed AUTHORS.txt to CONTRIBUTORS.txt, since there are other ways to
+ contribute than by writing code. Also put the count of contributors into the
+ author string in setup.py, though this might be too cute.
+
+.. _sys.excepthook: https://docs.python.org/3/library/sys.html#sys.excepthook
+.. _issue 265: https://bitbucket.org/ned/coveragepy/issues/265/when-using-source-include-is-silently
+.. _issue 412: https://bitbucket.org/ned/coveragepy/issues/412/coverage-combine-should-error-if-no
+.. _issue 433: https://bitbucket.org/ned/coveragepy/issues/433/coverage-html-does-not-suport-skip-covered
+.. _issue 493: https://bitbucket.org/ned/coveragepy/issues/493/confusing-branching-failure
+.. _issue 496: https://bitbucket.org/ned/coveragepy/issues/496/incorrect-coverage-with-branching-and
+.. _issue 502: https://bitbucket.org/ned/coveragepy/issues/502/incorrect-coverage-report-with-cover
+.. _issue 505: https://bitbucket.org/ned/coveragepy/issues/505/use-canonical-filename-for-debounce
+.. _issue 514: https://bitbucket.org/ned/coveragepy/issues/514/path-to-problem-file-not-reported-when
+.. _issue 510: https://bitbucket.org/ned/coveragepy/issues/510/erase-still-needed-in-42
+.. _issue 511: https://bitbucket.org/ned/coveragepy/issues/511/version-42-coverage-combine-empties
+.. _issue 516: https://bitbucket.org/ned/coveragepy/issues/516/running-coverage-combine-twice-deletes-all
+.. _issue 519: https://bitbucket.org/ned/coveragepy/issues/519/coverage-run-sections-in-toxini-or-as
+.. _issue 524: https://bitbucket.org/ned/coveragepy/issues/524/coverage-report-with-skip-covered-column
+.. _issue 525: https://bitbucket.org/ned/coveragepy/issues/525/coverage-combine-when-not-in-parallel-mode
+.. _issue 529: https://bitbucket.org/ned/coveragepy/issues/529/encoding-marker-may-only-appear-on-the
+.. _issue 530: https://bitbucket.org/ned/coveragepy/issues/530/deprecationwarning-you-passed-a-bytestring
+.. _issue 533: https://bitbucket.org/ned/coveragepy/issues/533/exception-on-unencodable-file-name
+.. _issue 535: https://bitbucket.org/ned/coveragepy/issues/535/sysexcepthook-is-not-called
+
+
+.. _changes_42:
+
+Version 4.2 --- 2016-07-26
+--------------------------
+
+- Since ``concurrency=multiprocessing`` uses subprocesses, options specified on
+ the coverage.py command line will not be communicated down to them. Only
+ options in the configuration file will apply to the subprocesses.
+ Previously, the options didn't apply to the subprocesses, but there was no
+ indication. Now it is an error to use ``--concurrency=multiprocessing`` and
+ other run-affecting options on the command line. This prevents
+ failures like those reported in `issue 495`_.
+
+- Filtering the HTML report is now faster, thanks to Ville Skyttä.
+
+.. _issue 495: https://bitbucket.org/ned/coveragepy/issues/495/branch-and-concurrency-are-conflicting
+
+
+Version 4.2b1 --- 2016-07-04
+----------------------------
+
+Work from the PyCon 2016 Sprints!
+
+- BACKWARD INCOMPATIBILITY: the ``coverage combine`` command now ignores an
+ existing ``.coverage`` data file. It used to include that file in its
+ combining. This caused confusing results, and extra tox "clean" steps. If
+ you want the old behavior, use the new ``coverage combine --append`` option.
+
+- The ``concurrency`` option can now take multiple values, to support programs
+ using multiprocessing and another library such as eventlet. This is only
+ possible in the configuration file, not from the command line. The
+ configuration file is the only way for sub-processes to all run with the same
+ options. Fixes `issue 484`_. Thanks to Josh Williams for prototyping.
+
+- Using a ``concurrency`` setting of ``multiprocessing`` now implies
+ ``--parallel`` so that the main program is measured similarly to the
+ sub-processes.
+
+- When using `automatic subprocess measurement`_, running coverage commands
+ would create spurious data files. This is now fixed, thanks to diagnosis and
+ testing by Dan Riti. Closes `issue 492`_.
+
+- A new configuration option, ``report:sort``, controls what column of the
+ text report is used to sort the rows. Thanks to Dan Wandschneider, this
+ closes `issue 199`_.
+
+- The HTML report has a more-visible indicator for which column is being
+ sorted. Closes `issue 298`_, thanks to Josh Williams.
+
+- If the HTML report cannot find the source for a file, the message now
+ suggests using the ``-i`` flag to allow the report to continue. Closes
+ `issue 231`_, thanks, Nathan Land.
+
+- When reports are ignoring errors, there's now a warning if a file cannot be
+ parsed, rather than being silently ignored. Closes `issue 396`_. Thanks,
+ Matthew Boehm.
+
+- A new option for ``coverage debug`` is available: ``coverage debug config``
+ shows the current configuration. Closes `issue 454`_, thanks to Matthew
+ Boehm.
+
+- Running coverage as a module (``python -m coverage``) no longer shows the
+ program name as ``__main__.py``. Fixes `issue 478`_. Thanks, Scott Belden.
+
+- The `test_helpers` module has been moved into a separate pip-installable
+ package: `unittest-mixins`_.
+
+.. _automatic subprocess measurement: https://coverage.readthedocs.io/en/latest/subprocess.html
+.. _issue 199: https://bitbucket.org/ned/coveragepy/issues/199/add-a-way-to-sort-the-text-report
+.. _issue 231: https://bitbucket.org/ned/coveragepy/issues/231/various-default-behavior-in-report-phase
+.. _issue 298: https://bitbucket.org/ned/coveragepy/issues/298/show-in-html-report-that-the-columns-are
+.. _issue 396: https://bitbucket.org/ned/coveragepy/issues/396/coverage-xml-shouldnt-bail-out-on-parse
+.. _issue 454: https://bitbucket.org/ned/coveragepy/issues/454/coverage-debug-config-should-be
+.. _issue 478: https://bitbucket.org/ned/coveragepy/issues/478/help-shows-silly-program-name-when-running
+.. _issue 484: https://bitbucket.org/ned/coveragepy/issues/484/multiprocessing-greenlet-concurrency
+.. _issue 492: https://bitbucket.org/ned/coveragepy/issues/492/subprocess-coverage-strange-detection-of
+.. _unittest-mixins: https://pypi.org/project/unittest-mixins/
+
+
+.. _changes_41:
+
+Version 4.1 --- 2016-05-21
+--------------------------
+
+- The internal attribute `Reporter.file_reporters` was removed in 4.1b3. It
+ should have come has no surprise that there were third-party tools out there
+ using that attribute. It has been restored, but with a deprecation warning.
+
+
+Version 4.1b3 --- 2016-05-10
+----------------------------
+
+- When running your program, execution can jump from an ``except X:`` line to
+ some other line when an exception other than ``X`` happens. This jump is no
+ longer considered a branch when measuring branch coverage.
+
+- When measuring branch coverage, ``yield`` statements that were never resumed
+ were incorrectly marked as missing, as reported in `issue 440`_. This is now
+ fixed.
+
+- During branch coverage of single-line callables like lambdas and generator
+ expressions, coverage.py can now distinguish between them never being called,
+ or being called but not completed. Fixes `issue 90`_, `issue 460`_ and
+ `issue 475`_.
+
+- The HTML report now has a map of the file along the rightmost edge of the
+ page, giving an overview of where the missed lines are. Thanks, Dmitry
+ Shishov.
+
+- The HTML report now uses different monospaced fonts, favoring Consolas over
+ Courier. Along the way, `issue 472`_ about not properly handling one-space
+ indents was fixed. The index page also has slightly different styling, to
+ try to make the clickable detail pages more apparent.
+
+- Missing branches reported with ``coverage report -m`` will now say ``->exit``
+ for missed branches to the exit of a function, rather than a negative number.
+ Fixes `issue 469`_.
+
+- ``coverage --help`` and ``coverage --version`` now mention which tracer is
+ installed, to help diagnose problems. The docs mention which features need
+ the C extension. (`issue 479`_)
+
+- Officially support PyPy 5.1, which required no changes, just updates to the
+ docs.
+
+- The `Coverage.report` function had two parameters with non-None defaults,
+ which have been changed. `show_missing` used to default to True, but now
+ defaults to None. If you had been calling `Coverage.report` without
+ specifying `show_missing`, you'll need to explicitly set it to True to keep
+ the same behavior. `skip_covered` used to default to False. It is now None,
+ which doesn't change the behavior. This fixes `issue 485`_.
+
+- It's never been possible to pass a namespace module to one of the analysis
+ functions, but now at least we raise a more specific error message, rather
+ than getting confused. (`issue 456`_)
+
+- The `coverage.process_startup` function now returns the `Coverage` instance
+ it creates, as suggested in `issue 481`_.
+
+- Make a small tweak to how we compare threads, to avoid buggy custom
+ comparison code in thread classes. (`issue 245`_)
+
+.. _issue 90: https://bitbucket.org/ned/coveragepy/issues/90/lambda-expression-confuses-branch
+.. _issue 245: https://bitbucket.org/ned/coveragepy/issues/245/change-solution-for-issue-164
+.. _issue 440: https://bitbucket.org/ned/coveragepy/issues/440/yielded-twisted-failure-marked-as-missed
+.. _issue 456: https://bitbucket.org/ned/coveragepy/issues/456/coverage-breaks-with-implicit-namespaces
+.. _issue 460: https://bitbucket.org/ned/coveragepy/issues/460/confusing-html-report-for-certain-partial
+.. _issue 469: https://bitbucket.org/ned/coveragepy/issues/469/strange-1-line-number-in-branch-coverage
+.. _issue 472: https://bitbucket.org/ned/coveragepy/issues/472/html-report-indents-incorrectly-for-one
+.. _issue 475: https://bitbucket.org/ned/coveragepy/issues/475/generator-expression-is-marked-as-not
+.. _issue 479: https://bitbucket.org/ned/coveragepy/issues/479/clarify-the-need-for-the-c-extension
+.. _issue 481: https://bitbucket.org/ned/coveragepy/issues/481/asyncioprocesspoolexecutor-tracing-not
+.. _issue 485: https://bitbucket.org/ned/coveragepy/issues/485/coveragereport-ignores-show_missing-and
+
+
+Version 4.1b2 --- 2016-01-23
+----------------------------
+
+- Problems with the new branch measurement in 4.1 beta 1 were fixed:
+
+ - Class docstrings were considered executable. Now they no longer are.
+
+ - ``yield from`` and ``await`` were considered returns from functions, since
+ they could transfer control to the caller. This produced unhelpful
+ "missing branch" reports in a number of circumstances. Now they no longer
+ are considered returns.
+
+ - In unusual situations, a missing branch to a negative number was reported.
+ This has been fixed, closing `issue 466`_.
+
+- The XML report now produces correct package names for modules found in
+ directories specified with ``source=``. Fixes `issue 465`_.
+
+- ``coverage report`` won't produce trailing whitespace.
+
+.. _issue 465: https://bitbucket.org/ned/coveragepy/issues/465/coveragexml-produces-package-names-with-an
+.. _issue 466: https://bitbucket.org/ned/coveragepy/issues/466/impossible-missed-branch-to-a-negative
+
+
+Version 4.1b1 --- 2016-01-10
+----------------------------
+
+- Branch analysis has been rewritten: it used to be based on bytecode, but now
+ uses AST analysis. This has changed a number of things:
+
+ - More code paths are now considered runnable, especially in
+ ``try``/``except`` structures. This may mean that coverage.py will
+ identify more code paths as uncovered. This could either raise or lower
+ your overall coverage number.
+
+ - Python 3.5's ``async`` and ``await`` keywords are properly supported,
+ fixing `issue 434`_.
+
+ - Some long-standing branch coverage bugs were fixed:
+
+ - `issue 129`_: functions with only a docstring for a body would
+ incorrectly report a missing branch on the ``def`` line.
+
+ - `issue 212`_: code in an ``except`` block could be incorrectly marked as
+ a missing branch.
+
+ - `issue 146`_: context managers (``with`` statements) in a loop or ``try``
+ block could confuse the branch measurement, reporting incorrect partial
+ branches.
+
+ - `issue 422`_: in Python 3.5, an actual partial branch could be marked as
+ complete.
+
+- Pragmas to disable coverage measurement can now be used on decorator lines,
+ and they will apply to the entire function or class being decorated. This
+ implements the feature requested in `issue 131`_.
+
+- Multiprocessing support is now available on Windows. Thanks, Rodrigue
+ Cloutier.
+
+- Files with two encoding declarations are properly supported, fixing
+ `issue 453`_. Thanks, Max Linke.
+
+- Non-ascii characters in regexes in the configuration file worked in 3.7, but
+ stopped working in 4.0. Now they work again, closing `issue 455`_.
+
+- Form-feed characters would prevent accurate determination of the beginning of
+ statements in the rest of the file. This is now fixed, closing `issue 461`_.
+
+.. _issue 129: https://bitbucket.org/ned/coveragepy/issues/129/misleading-branch-coverage-of-empty
+.. _issue 131: https://bitbucket.org/ned/coveragepy/issues/131/pragma-on-a-decorator-line-should-affect
+.. _issue 146: https://bitbucket.org/ned/coveragepy/issues/146/context-managers-confuse-branch-coverage
+.. _issue 212: https://bitbucket.org/ned/coveragepy/issues/212/coverage-erroneously-reports-partial
+.. _issue 422: https://bitbucket.org/ned/coveragepy/issues/422/python35-partial-branch-marked-as-fully
+.. _issue 434: https://bitbucket.org/ned/coveragepy/issues/434/indexerror-in-python-35
+.. _issue 453: https://bitbucket.org/ned/coveragepy/issues/453/source-code-encoding-can-only-be-specified
+.. _issue 455: https://bitbucket.org/ned/coveragepy/issues/455/unusual-exclusions-stopped-working-in
+.. _issue 461: https://bitbucket.org/ned/coveragepy/issues/461/multiline-asserts-need-too-many-pragma
+
+
+.. _changes_403:
+
+Version 4.0.3 --- 2015-11-24
+----------------------------
+
+- Fixed a mysterious problem that manifested in different ways: sometimes
+ hanging the process (`issue 420`_), sometimes making database connections
+ fail (`issue 445`_).
+
+- The XML report now has correct ``<source>`` elements when using a
+ ``--source=`` option somewhere besides the current directory. This fixes
+ `issue 439`_. Thanks, Arcadiy Ivanov.
+
+- Fixed an unusual edge case of detecting source encodings, described in
+ `issue 443`_.
+
+- Help messages that mention the command to use now properly use the actual
+ command name, which might be different than "coverage". Thanks to Ben
+ Finney, this closes `issue 438`_.
+
+.. _issue 420: https://bitbucket.org/ned/coveragepy/issues/420/coverage-40-hangs-indefinitely-on-python27
+.. _issue 438: https://bitbucket.org/ned/coveragepy/issues/438/parameterise-coverage-command-name
+.. _issue 439: https://bitbucket.org/ned/coveragepy/issues/439/incorrect-cobertura-file-sources-generated
+.. _issue 443: https://bitbucket.org/ned/coveragepy/issues/443/coverage-gets-confused-when-encoding
+.. _issue 445: https://bitbucket.org/ned/coveragepy/issues/445/django-app-cannot-connect-to-cassandra
+
+
+.. _changes_402:
+
+Version 4.0.2 --- 2015-11-04
+----------------------------
+
+- More work on supporting unusually encoded source. Fixed `issue 431`_.
+
+- Files or directories with non-ASCII characters are now handled properly,
+ fixing `issue 432`_.
+
+- Setting a trace function with sys.settrace was broken by a change in 4.0.1,
+ as reported in `issue 436`_. This is now fixed.
+
+- Officially support PyPy 4.0, which required no changes, just updates to the
+ docs.
+
+.. _issue 431: https://bitbucket.org/ned/coveragepy/issues/431/couldnt-parse-python-file-with-cp1252
+.. _issue 432: https://bitbucket.org/ned/coveragepy/issues/432/path-with-unicode-characters-various
+.. _issue 436: https://bitbucket.org/ned/coveragepy/issues/436/disabled-coverage-ctracer-may-rise-from
+
+
+.. _changes_401:
+
+Version 4.0.1 --- 2015-10-13
+----------------------------
+
+- When combining data files, unreadable files will now generate a warning
+ instead of failing the command. This is more in line with the older
+ coverage.py v3.7.1 behavior, which silently ignored unreadable files.
+ Prompted by `issue 418`_.
+
+- The --skip-covered option would skip reporting on 100% covered files, but
+ also skipped them when calculating total coverage. This was wrong, it should
+ only remove lines from the report, not change the final answer. This is now
+ fixed, closing `issue 423`_.
+
+- In 4.0, the data file recorded a summary of the system on which it was run.
+ Combined data files would keep all of those summaries. This could lead to
+ enormous data files consisting of mostly repetitive useless information. That
+ summary is now gone, fixing `issue 415`_. If you want summary information,
+ get in touch, and we'll figure out a better way to do it.
+
+- Test suites that mocked os.path.exists would experience strange failures, due
+ to coverage.py using their mock inadvertently. This is now fixed, closing
+ `issue 416`_.
+
+- Importing a ``__init__`` module explicitly would lead to an error:
+ ``AttributeError: 'module' object has no attribute '__path__'``, as reported
+ in `issue 410`_. This is now fixed.
+
+- Code that uses ``sys.settrace(sys.gettrace())`` used to incur a more than 2x
+ speed penalty. Now there's no penalty at all. Fixes `issue 397`_.
+
+- Pyexpat C code will no longer be recorded as a source file, fixing
+ `issue 419`_.
+
+- The source kit now contains all of the files needed to have a complete source
+ tree, re-fixing `issue 137`_ and closing `issue 281`_.
+
+.. _issue 281: https://bitbucket.org/ned/coveragepy/issues/281/supply-scripts-for-testing-in-the
+.. _issue 397: https://bitbucket.org/ned/coveragepy/issues/397/stopping-and-resuming-coverage-with
+.. _issue 410: https://bitbucket.org/ned/coveragepy/issues/410/attributeerror-module-object-has-no
+.. _issue 415: https://bitbucket.org/ned/coveragepy/issues/415/repeated-coveragedataupdates-cause
+.. _issue 416: https://bitbucket.org/ned/coveragepy/issues/416/mocking-ospathexists-causes-failures
+.. _issue 418: https://bitbucket.org/ned/coveragepy/issues/418/json-parse-error
+.. _issue 419: https://bitbucket.org/ned/coveragepy/issues/419/nosource-no-source-for-code-path-to-c
+.. _issue 423: https://bitbucket.org/ned/coveragepy/issues/423/skip_covered-changes-reported-total
+
+
+.. _changes_40:
+
+Version 4.0 --- 2015-09-20
+--------------------------
+
+No changes from 4.0b3
+
+
+Version 4.0b3 --- 2015-09-07
+----------------------------
+
+- Reporting on an unmeasured file would fail with a traceback. This is now
+ fixed, closing `issue 403`_.
+
+- The Jenkins ShiningPanda_ plugin looks for an obsolete file name to find the
+ HTML reports to publish, so it was failing under coverage.py 4.0. Now we
+ create that file if we are running under Jenkins, to keep things working
+ smoothly. `issue 404`_.
+
+- Kits used to include tests and docs, but didn't install them anywhere, or
+ provide all of the supporting tools to make them useful. Kits no longer
+ include tests and docs. If you were using them from the older packages, get
+ in touch and help me understand how.
+
+.. _issue 403: https://bitbucket.org/ned/coveragepy/issues/403/hasherupdate-fails-with-typeerror-nonetype
+.. _issue 404: https://bitbucket.org/ned/coveragepy/issues/404/shiningpanda-jenkins-plugin-cant-find-html
+
+
+Version 4.0b2 --- 2015-08-22
+----------------------------
+
+- 4.0b1 broke ``--append`` creating new data files. This is now fixed, closing
+ `issue 392`_.
+
+- ``py.test --cov`` can write empty data, then touch files due to ``--source``,
+ which made coverage.py mistakenly force the data file to record lines instead
+ of arcs. This would lead to a "Can't combine line data with arc data" error
+ message. This is now fixed, and changed some method names in the
+ CoverageData interface. Fixes `issue 399`_.
+
+- `CoverageData.read_fileobj` and `CoverageData.write_fileobj` replace the
+ `.read` and `.write` methods, and are now properly inverses of each other.
+
+- When using ``report --skip-covered``, a message will now be included in the
+ report output indicating how many files were skipped, and if all files are
+ skipped, coverage.py won't accidentally scold you for having no data to
+ report. Thanks, Krystian Kichewko.
+
+- A new conversion utility has been added: ``python -m coverage.pickle2json``
+ will convert v3.x pickle data files to v4.x JSON data files. Thanks,
+ Alexander Todorov. Closes `issue 395`_.
+
+- A new version identifier is available, `coverage.version_info`, a plain tuple
+ of values similar to `sys.version_info`_.
+
+.. _issue 392: https://bitbucket.org/ned/coveragepy/issues/392/run-append-doesnt-create-coverage-file
+.. _issue 395: https://bitbucket.org/ned/coveragepy/issues/395/rfe-read-pickled-files-as-well-for
+.. _issue 399: https://bitbucket.org/ned/coveragepy/issues/399/coverageexception-cant-combine-line-data
+.. _sys.version_info: https://docs.python.org/3/library/sys.html#sys.version_info
+
+
+Version 4.0b1 --- 2015-08-02
+----------------------------
+
+- Coverage.py is now licensed under the Apache 2.0 license. See NOTICE.txt for
+ details. Closes `issue 313`_.
+
+- The data storage has been completely revamped. The data file is now
+ JSON-based instead of a pickle, closing `issue 236`_. The `CoverageData`
+ class is now a public supported documented API to the data file.
+
+- A new configuration option, ``[run] note``, lets you set a note that will be
+ stored in the `runs` section of the data file. You can use this to annotate
+ the data file with any information you like.
+
+- Unrecognized configuration options will now print an error message and stop
+ coverage.py. This should help prevent configuration mistakes from passing
+ silently. Finishes `issue 386`_.
+
+- In parallel mode, ``coverage erase`` will now delete all of the data files,
+ fixing `issue 262`_.
+
+- Coverage.py now accepts a directory name for ``coverage run`` and will run a
+ ``__main__.py`` found there, just like Python will. Fixes `issue 252`_.
+ Thanks, Dmitry Trofimov.
+
+- The XML report now includes a ``missing-branches`` attribute. Thanks, Steve
+ Peak. This is not a part of the Cobertura DTD, so the XML report no longer
+ references the DTD.
+
+- Missing branches in the HTML report now have a bit more information in the
+ right-hand annotations. Hopefully this will make their meaning clearer.
+
+- All the reporting functions now behave the same if no data had been
+ collected, exiting with a status code of 1. Fixed ``fail_under`` to be
+ applied even when the report is empty. Thanks, Ionel Cristian Mărieș.
+
+- Plugins are now initialized differently. Instead of looking for a class
+ called ``Plugin``, coverage.py looks for a function called ``coverage_init``.
+
+- A file-tracing plugin can now ask to have built-in Python reporting by
+ returning `"python"` from its `file_reporter()` method.
+
+- Code that was executed with `exec` would be mis-attributed to the file that
+ called it. This is now fixed, closing `issue 380`_.
+
+- The ability to use item access on `Coverage.config` (introduced in 4.0a2) has
+ been changed to a more explicit `Coverage.get_option` and
+ `Coverage.set_option` API.
+
+- The ``Coverage.use_cache`` method is no longer supported.
+
+- The private method ``Coverage._harvest_data`` is now called
+ ``Coverage.get_data``, and returns the ``CoverageData`` containing the
+ collected data.
+
+- The project is consistently referred to as "coverage.py" throughout the code
+ and the documentation, closing `issue 275`_.
+
+- Combining data files with an explicit configuration file was broken in 4.0a6,
+ but now works again, closing `issue 385`_.
+
+- ``coverage combine`` now accepts files as well as directories.
+
+- The speed is back to 3.7.1 levels, after having slowed down due to plugin
+ support, finishing up `issue 387`_.
+
+.. _issue 236: https://bitbucket.org/ned/coveragepy/issues/236/pickles-are-bad-and-you-should-feel-bad
+.. _issue 252: https://bitbucket.org/ned/coveragepy/issues/252/coverage-wont-run-a-program-with
+.. _issue 262: https://bitbucket.org/ned/coveragepy/issues/262/when-parallel-true-erase-should-erase-all
+.. _issue 275: https://bitbucket.org/ned/coveragepy/issues/275/refer-consistently-to-project-as-coverage
+.. _issue 313: https://bitbucket.org/ned/coveragepy/issues/313/add-license-file-containing-2-3-or-4
+.. _issue 380: https://bitbucket.org/ned/coveragepy/issues/380/code-executed-by-exec-excluded-from
+.. _issue 385: https://bitbucket.org/ned/coveragepy/issues/385/coverage-combine-doesnt-work-with-rcfile
+.. _issue 386: https://bitbucket.org/ned/coveragepy/issues/386/error-on-unrecognised-configuration
+.. _issue 387: https://bitbucket.org/ned/coveragepy/issues/387/performance-degradation-from-371-to-40
+
+.. 40 issues closed in 4.0 below here
+
+
+Version 4.0a6 --- 2015-06-21
+----------------------------
+
+- Python 3.5b2 and PyPy 2.6.0 are supported.
+
+- The original module-level function interface to coverage.py is no longer
+ supported. You must now create a ``coverage.Coverage`` object, and use
+ methods on it.
+
+- The ``coverage combine`` command now accepts any number of directories as
+ arguments, and will combine all the data files from those directories. This
+ means you don't have to copy the files to one directory before combining.
+ Thanks, Christine Lytwynec. Finishes `issue 354`_.
+
+- Branch coverage couldn't properly handle certain extremely long files. This
+ is now fixed (`issue 359`_).
+
+- Branch coverage didn't understand yield statements properly. Mickie Betz
+ persisted in pursuing this despite Ned's pessimism. Fixes `issue 308`_ and
+ `issue 324`_.
+
+- The COVERAGE_DEBUG environment variable can be used to set the
+ ``[run] debug`` configuration option to control what internal operations are
+ logged.
+
+- HTML reports were truncated at formfeed characters. This is now fixed
+ (`issue 360`_). It's always fun when the problem is due to a `bug in the
+ Python standard library <http://bugs.python.org/issue19035>`_.
+
+- Files with incorrect encoding declaration comments are no longer ignored by
+ the reporting commands, fixing `issue 351`_.
+
+- HTML reports now include a timestamp in the footer, closing `issue 299`_.
+ Thanks, Conrad Ho.
+
+- HTML reports now begrudgingly use double-quotes rather than single quotes,
+ because there are "software engineers" out there writing tools that read HTML
+ and somehow have no idea that single quotes exist. Capitulates to the absurd
+ `issue 361`_. Thanks, Jon Chappell.
+
+- The ``coverage annotate`` command now handles non-ASCII characters properly,
+ closing `issue 363`_. Thanks, Leonardo Pistone.
+
+- Drive letters on Windows were not normalized correctly, now they are. Thanks,
+ Ionel Cristian Mărieș.
+
+- Plugin support had some bugs fixed, closing `issue 374`_ and `issue 375`_.
+ Thanks, Stefan Behnel.
+
+.. _issue 299: https://bitbucket.org/ned/coveragepy/issues/299/inserted-created-on-yyyy-mm-dd-hh-mm-in
+.. _issue 308: https://bitbucket.org/ned/coveragepy/issues/308/yield-lambda-branch-coverage
+.. _issue 324: https://bitbucket.org/ned/coveragepy/issues/324/yield-in-loop-confuses-branch-coverage
+.. _issue 351: https://bitbucket.org/ned/coveragepy/issues/351/files-with-incorrect-encoding-are-ignored
+.. _issue 354: https://bitbucket.org/ned/coveragepy/issues/354/coverage-combine-should-take-a-list-of
+.. _issue 359: https://bitbucket.org/ned/coveragepy/issues/359/xml-report-chunk-error
+.. _issue 360: https://bitbucket.org/ned/coveragepy/issues/360/html-reports-get-confused-by-l-in-the-code
+.. _issue 361: https://bitbucket.org/ned/coveragepy/issues/361/use-double-quotes-in-html-output-to
+.. _issue 363: https://bitbucket.org/ned/coveragepy/issues/363/annotate-command-hits-unicode-happy-fun
+.. _issue 374: https://bitbucket.org/ned/coveragepy/issues/374/c-tracer-lookups-fail-in
+.. _issue 375: https://bitbucket.org/ned/coveragepy/issues/375/ctracer_handle_return-reads-byte-code
+
+
+Version 4.0a5 --- 2015-02-16
+----------------------------
+
+- Plugin support is now implemented in the C tracer instead of the Python
+ tracer. This greatly improves the speed of tracing projects using plugins.
+
+- Coverage.py now always adds the current directory to sys.path, so that
+ plugins can import files in the current directory (`issue 358`_).
+
+- If the `config_file` argument to the Coverage constructor is specified as
+ ".coveragerc", it is treated as if it were True. This means setup.cfg is
+ also examined, and a missing file is not considered an error (`issue 357`_).
+
+- Wildly experimental: support for measuring processes started by the
+ multiprocessing module. To use, set ``--concurrency=multiprocessing``,
+ either on the command line or in the .coveragerc file (`issue 117`_). Thanks,
+ Eduardo Schettino. Currently, this does not work on Windows.
+
+- A new warning is possible, if a desired file isn't measured because it was
+ imported before coverage.py was started (`issue 353`_).
+
+- The `coverage.process_startup` function now will start coverage measurement
+ only once, no matter how many times it is called. This fixes problems due
+ to unusual virtualenv configurations (`issue 340`_).
+
+- Added 3.5.0a1 to the list of supported CPython versions.
+
+.. _issue 117: https://bitbucket.org/ned/coveragepy/issues/117/enable-coverage-measurement-of-code-run-by
+.. _issue 340: https://bitbucket.org/ned/coveragepy/issues/340/keyerror-subpy
+.. _issue 353: https://bitbucket.org/ned/coveragepy/issues/353/40a3-introduces-an-unexpected-third-case
+.. _issue 357: https://bitbucket.org/ned/coveragepy/issues/357/behavior-changed-when-coveragerc-is
+.. _issue 358: https://bitbucket.org/ned/coveragepy/issues/358/all-coverage-commands-should-adjust
+
+
+Version 4.0a4 --- 2015-01-25
+----------------------------
+
+- Plugins can now provide sys_info for debugging output.
+
+- Started plugins documentation.
+
+- Prepared to move the docs to readthedocs.org.
+
+
+Version 4.0a3 --- 2015-01-20
+----------------------------
+
+- Reports now use file names with extensions. Previously, a report would
+ describe a/b/c.py as "a/b/c". Now it is shown as "a/b/c.py". This allows
+ for better support of non-Python files, and also fixed `issue 69`_.
+
+- The XML report now reports each directory as a package again. This was a bad
+ regression, I apologize. This was reported in `issue 235`_, which is now
+ fixed.
+
+- A new configuration option for the XML report: ``[xml] package_depth``
+ controls which directories are identified as packages in the report.
+ Directories deeper than this depth are not reported as packages.
+ The default is that all directories are reported as packages.
+ Thanks, Lex Berezhny.
+
+- When looking for the source for a frame, check if the file exists. On
+ Windows, .pyw files are no longer recorded as .py files. Along the way, this
+ fixed `issue 290`_.
+
+- Empty files are now reported as 100% covered in the XML report, not 0%
+ covered (`issue 345`_).
+
+- Regexes in the configuration file are now compiled as soon as they are read,
+ to provide error messages earlier (`issue 349`_).
+
+.. _issue 69: https://bitbucket.org/ned/coveragepy/issues/69/coverage-html-overwrite-files-that-doesnt
+.. _issue 235: https://bitbucket.org/ned/coveragepy/issues/235/package-name-is-missing-in-xml-report
+.. _issue 290: https://bitbucket.org/ned/coveragepy/issues/290/running-programmatically-with-pyw-files
+.. _issue 345: https://bitbucket.org/ned/coveragepy/issues/345/xml-reports-line-rate-0-for-empty-files
+.. _issue 349: https://bitbucket.org/ned/coveragepy/issues/349/bad-regex-in-config-should-get-an-earlier
+
+
+Version 4.0a2 --- 2015-01-14
+----------------------------
+
+- Officially support PyPy 2.4, and PyPy3 2.4. Drop support for
+ CPython 3.2 and older versions of PyPy. The code won't work on CPython 3.2.
+ It will probably still work on older versions of PyPy, but I'm not testing
+ against them.
+
+- Plugins!
+
+- The original command line switches (`-x` to run a program, etc) are no
+ longer supported.
+
+- A new option: `coverage report --skip-covered` will reduce the number of
+ files reported by skipping files with 100% coverage. Thanks, Krystian
+ Kichewko. This means that empty `__init__.py` files will be skipped, since
+ they are 100% covered, closing `issue 315`_.
+
+- You can now specify the ``--fail-under`` option in the ``.coveragerc`` file
+ as the ``[report] fail_under`` option. This closes `issue 314`_.
+
+- The ``COVERAGE_OPTIONS`` environment variable is no longer supported. It was
+ a hack for ``--timid`` before configuration files were available.
+
+- The HTML report now has filtering. Type text into the Filter box on the
+ index page, and only modules with that text in the name will be shown.
+ Thanks, Danny Allen.
+
+- The textual report and the HTML report used to report partial branches
+ differently for no good reason. Now the text report's "missing branches"
+ column is a "partial branches" column so that both reports show the same
+ numbers. This closes `issue 342`_.
+
+- If you specify a ``--rcfile`` that cannot be read, you will get an error
+ message. Fixes `issue 343`_.
+
+- The ``--debug`` switch can now be used on any command.
+
+- You can now programmatically adjust the configuration of coverage.py by
+ setting items on `Coverage.config` after construction.
+
+- A module run with ``-m`` can be used as the argument to ``--source``, fixing
+ `issue 328`_. Thanks, Buck Evan.
+
+- The regex for matching exclusion pragmas has been fixed to allow more kinds
+ of whitespace, fixing `issue 334`_.
+
+- Made some PyPy-specific tweaks to improve speed under PyPy. Thanks, Alex
+ Gaynor.
+
+- In some cases, with a source file missing a final newline, coverage.py would
+ count statements incorrectly. This is now fixed, closing `issue 293`_.
+
+- The status.dat file that HTML reports use to avoid re-creating files that
+ haven't changed is now a JSON file instead of a pickle file. This obviates
+ `issue 287`_ and `issue 237`_.
+
+.. _issue 237: https://bitbucket.org/ned/coveragepy/issues/237/htmlcov-with-corrupt-statusdat
+.. _issue 287: https://bitbucket.org/ned/coveragepy/issues/287/htmlpy-doesnt-specify-pickle-protocol
+.. _issue 293: https://bitbucket.org/ned/coveragepy/issues/293/number-of-statement-detection-wrong-if-no
+.. _issue 314: https://bitbucket.org/ned/coveragepy/issues/314/fail_under-param-not-working-in-coveragerc
+.. _issue 315: https://bitbucket.org/ned/coveragepy/issues/315/option-to-omit-empty-files-eg-__init__py
+.. _issue 328: https://bitbucket.org/ned/coveragepy/issues/328/misbehavior-in-run-source
+.. _issue 334: https://bitbucket.org/ned/coveragepy/issues/334/pragma-not-recognized-if-tab-character
+.. _issue 342: https://bitbucket.org/ned/coveragepy/issues/342/console-and-html-coverage-reports-differ
+.. _issue 343: https://bitbucket.org/ned/coveragepy/issues/343/an-explicitly-named-non-existent-config
+
+
+Version 4.0a1 --- 2014-09-27
+----------------------------
+
+- Python versions supported are now CPython 2.6, 2.7, 3.2, 3.3, and 3.4, and
+ PyPy 2.2.
+
+- Gevent, eventlet, and greenlet are now supported, closing `issue 149`_.
+ The ``concurrency`` setting specifies the concurrency library in use. Huge
+ thanks to Peter Portante for initial implementation, and to Joe Jevnik for
+ the final insight that completed the work.
+
+- Options are now also read from a setup.cfg file, if any. Sections are
+ prefixed with "coverage:", so the ``[run]`` options will be read from the
+ ``[coverage:run]`` section of setup.cfg. Finishes `issue 304`_.
+
+- The ``report -m`` command can now show missing branches when reporting on
+ branch coverage. Thanks, Steve Leonard. Closes `issue 230`_.
+
+- The XML report now contains a <source> element, fixing `issue 94`_. Thanks
+ Stan Hu.
+
+- The class defined in the coverage module is now called ``Coverage`` instead
+ of ``coverage``, though the old name still works, for backward compatibility.
+
+- The ``fail-under`` value is now rounded the same as reported results,
+ preventing paradoxical results, fixing `issue 284`_.
+
+- The XML report will now create the output directory if need be, fixing
+ `issue 285`_. Thanks, Chris Rose.
+
+- HTML reports no longer raise UnicodeDecodeError if a Python file has
+ undecodable characters, fixing `issue 303`_ and `issue 331`_.
+
+- The annotate command will now annotate all files, not just ones relative to
+ the current directory, fixing `issue 57`_.
+
+- The coverage module no longer causes deprecation warnings on Python 3.4 by
+ importing the imp module, fixing `issue 305`_.
+
+- Encoding declarations in source files are only considered if they are truly
+ comments. Thanks, Anthony Sottile.
+
+.. _issue 57: https://bitbucket.org/ned/coveragepy/issues/57/annotate-command-fails-to-annotate-many
+.. _issue 94: https://bitbucket.org/ned/coveragepy/issues/94/coverage-xml-doesnt-produce-sources
+.. _issue 149: https://bitbucket.org/ned/coveragepy/issues/149/coverage-gevent-looks-broken
+.. _issue 230: https://bitbucket.org/ned/coveragepy/issues/230/show-line-no-for-missing-branches-in
+.. _issue 284: https://bitbucket.org/ned/coveragepy/issues/284/fail-under-should-show-more-precision
+.. _issue 285: https://bitbucket.org/ned/coveragepy/issues/285/xml-report-fails-if-output-file-directory
+.. _issue 303: https://bitbucket.org/ned/coveragepy/issues/303/unicodedecodeerror
+.. _issue 304: https://bitbucket.org/ned/coveragepy/issues/304/attempt-to-get-configuration-from-setupcfg
+.. _issue 305: https://bitbucket.org/ned/coveragepy/issues/305/pendingdeprecationwarning-the-imp-module
+.. _issue 331: https://bitbucket.org/ned/coveragepy/issues/331/failure-of-encoding-detection-on-python2
+
+
+.. _changes_371:
+
+Version 3.7.1 --- 2013-12-13
+----------------------------
+
+- Improved the speed of HTML report generation by about 20%.
+
+- Fixed the mechanism for finding OS-installed static files for the HTML report
+ so that it will actually find OS-installed static files.
+
+
+.. _changes_37:
+
+Version 3.7 --- 2013-10-06
+--------------------------
+
+- Added the ``--debug`` switch to ``coverage run``. It accepts a list of
+ options indicating the type of internal activity to log to stderr.
+
+- Improved the branch coverage facility, fixing `issue 92`_ and `issue 175`_.
+
+- Running code with ``coverage run -m`` now behaves more like Python does,
+ setting sys.path properly, which fixes `issue 207`_ and `issue 242`_.
+
+- Coverage.py can now run .pyc files directly, closing `issue 264`_.
+
+- Coverage.py properly supports .pyw files, fixing `issue 261`_.
+
+- Omitting files within a tree specified with the ``source`` option would
+ cause them to be incorrectly marked as unexecuted, as described in
+ `issue 218`_. This is now fixed.
+
+- When specifying paths to alias together during data combining, you can now
+ specify relative paths, fixing `issue 267`_.
+
+- Most file paths can now be specified with username expansion (``~/src``, or
+ ``~build/src``, for example), and with environment variable expansion
+ (``build/$BUILDNUM/src``).
+
+- Trying to create an XML report with no files to report on, would cause a
+ ZeroDivideError, but no longer does, fixing `issue 250`_.
+
+- When running a threaded program under the Python tracer, coverage.py no
+ longer issues a spurious warning about the trace function changing: "Trace
+ function changed, measurement is likely wrong: None." This fixes `issue
+ 164`_.
+
+- Static files necessary for HTML reports are found in system-installed places,
+ to ease OS-level packaging of coverage.py. Closes `issue 259`_.
+
+- Source files with encoding declarations, but a blank first line, were not
+ decoded properly. Now they are. Thanks, Roger Hu.
+
+- The source kit now includes the ``__main__.py`` file in the root coverage
+ directory, fixing `issue 255`_.
+
+.. _issue 92: https://bitbucket.org/ned/coveragepy/issues/92/finally-clauses-arent-treated-properly-in
+.. _issue 164: https://bitbucket.org/ned/coveragepy/issues/164/trace-function-changed-warning-when-using
+.. _issue 175: https://bitbucket.org/ned/coveragepy/issues/175/branch-coverage-gets-confused-in-certain
+.. _issue 207: https://bitbucket.org/ned/coveragepy/issues/207/run-m-cannot-find-module-or-package-in
+.. _issue 242: https://bitbucket.org/ned/coveragepy/issues/242/running-a-two-level-package-doesnt-work
+.. _issue 218: https://bitbucket.org/ned/coveragepy/issues/218/run-command-does-not-respect-the-omit-flag
+.. _issue 250: https://bitbucket.org/ned/coveragepy/issues/250/uncaught-zerodivisionerror-when-generating
+.. _issue 255: https://bitbucket.org/ned/coveragepy/issues/255/directory-level-__main__py-not-included-in
+.. _issue 259: https://bitbucket.org/ned/coveragepy/issues/259/allow-use-of-system-installed-third-party
+.. _issue 261: https://bitbucket.org/ned/coveragepy/issues/261/pyw-files-arent-reported-properly
+.. _issue 264: https://bitbucket.org/ned/coveragepy/issues/264/coverage-wont-run-pyc-files
+.. _issue 267: https://bitbucket.org/ned/coveragepy/issues/267/relative-path-aliases-dont-work
+
+
+.. _changes_36:
+
+Version 3.6 --- 2013-01-05
+--------------------------
+
+- Added a page to the docs about troublesome situations, closing `issue 226`_,
+ and added some info to the TODO file, closing `issue 227`_.
+
+.. _issue 226: https://bitbucket.org/ned/coveragepy/issues/226/make-readme-section-to-describe-when
+.. _issue 227: https://bitbucket.org/ned/coveragepy/issues/227/update-todo
+
+
+Version 3.6b3 --- 2012-12-29
+----------------------------
+
+- Beta 2 broke the nose plugin. It's fixed again, closing `issue 224`_.
+
+.. _issue 224: https://bitbucket.org/ned/coveragepy/issues/224/36b2-breaks-nosexcover
+
+
+Version 3.6b2 --- 2012-12-23
+----------------------------
+
+- Coverage.py runs on Python 2.3 and 2.4 again. It was broken in 3.6b1.
+
+- The C extension is optionally compiled using a different more widely-used
+ technique, taking another stab at fixing `issue 80`_ once and for all.
+
+- Combining data files would create entries for phantom files if used with
+ ``source`` and path aliases. It no longer does.
+
+- ``debug sys`` now shows the configuration file path that was read.
+
+- If an oddly-behaved package claims that code came from an empty-string
+ file name, coverage.py no longer associates it with the directory name,
+ fixing `issue 221`_.
+
+.. _issue 221: https://bitbucket.org/ned/coveragepy/issues/221/coveragepy-incompatible-with-pyratemp
+
+
+Version 3.6b1 --- 2012-11-28
+----------------------------
+
+- Wildcards in ``include=`` and ``omit=`` arguments were not handled properly
+ in reporting functions, though they were when running. Now they are handled
+ uniformly, closing `issue 143`_ and `issue 163`_. **NOTE**: it is possible
+ that your configurations may now be incorrect. If you use ``include`` or
+ ``omit`` during reporting, whether on the command line, through the API, or
+ in a configuration file, please check carefully that you were not relying on
+ the old broken behavior.
+
+- The **report**, **html**, and **xml** commands now accept a ``--fail-under``
+ switch that indicates in the exit status whether the coverage percentage was
+ less than a particular value. Closes `issue 139`_.
+
+- The reporting functions coverage.report(), coverage.html_report(), and
+ coverage.xml_report() now all return a float, the total percentage covered
+ measurement.
+
+- The HTML report's title can now be set in the configuration file, with the
+ ``--title`` switch on the command line, or via the API.
+
+- Configuration files now support substitution of environment variables, using
+ syntax like ``${WORD}``. Closes `issue 97`_.
+
+- Embarrassingly, the ``[xml] output=`` setting in the .coveragerc file simply
+ didn't work. Now it does.
+
+- The XML report now consistently uses file names for the file name attribute,
+ rather than sometimes using module names. Fixes `issue 67`_.
+ Thanks, Marcus Cobden.
+
+- Coverage percentage metrics are now computed slightly differently under
+ branch coverage. This means that completely unexecuted files will now
+ correctly have 0% coverage, fixing `issue 156`_. This also means that your
+ total coverage numbers will generally now be lower if you are measuring
+ branch coverage.
+
+- When installing, now in addition to creating a "coverage" command, two new
+ aliases are also installed. A "coverage2" or "coverage3" command will be
+ created, depending on whether you are installing in Python 2.x or 3.x.
+ A "coverage-X.Y" command will also be created corresponding to your specific
+ version of Python. Closes `issue 111`_.
+
+- The coverage.py installer no longer tries to bootstrap setuptools or
+ Distribute. You must have one of them installed first, as `issue 202`_
+ recommended.
+
+- The coverage.py kit now includes docs (closing `issue 137`_) and tests.
+
+- On Windows, files are now reported in their correct case, fixing `issue 89`_
+ and `issue 203`_.
+
+- If a file is missing during reporting, the path shown in the error message
+ is now correct, rather than an incorrect path in the current directory.
+ Fixes `issue 60`_.
+
+- Running an HTML report in Python 3 in the same directory as an old Python 2
+ HTML report would fail with a UnicodeDecodeError. This issue (`issue 193`_)
+ is now fixed.
+
+- Fixed yet another error trying to parse non-Python files as Python, this
+ time an IndentationError, closing `issue 82`_ for the fourth time...
+
+- If `coverage xml` fails because there is no data to report, it used to
+ create a zero-length XML file. Now it doesn't, fixing `issue 210`_.
+
+- Jython files now work with the ``--source`` option, fixing `issue 100`_.
+
+- Running coverage.py under a debugger is unlikely to work, but it shouldn't
+ fail with "TypeError: 'NoneType' object is not iterable". Fixes `issue
+ 201`_.
+
+- On some Linux distributions, when installed with the OS package manager,
+ coverage.py would report its own code as part of the results. Now it won't,
+ fixing `issue 214`_, though this will take some time to be repackaged by the
+ operating systems.
+
+- Docstrings for the legacy singleton methods are more helpful. Thanks Marius
+ Gedminas. Closes `issue 205`_.
+
+- The pydoc tool can now show documentation for the class `coverage.coverage`.
+ Closes `issue 206`_.
+
+- Added a page to the docs about contributing to coverage.py, closing
+ `issue 171`_.
+
+- When coverage.py ended unsuccessfully, it may have reported odd errors like
+ ``'NoneType' object has no attribute 'isabs'``. It no longer does,
+ so kiss `issue 153`_ goodbye.
+
+.. _issue 60: https://bitbucket.org/ned/coveragepy/issues/60/incorrect-path-to-orphaned-pyc-files
+.. _issue 67: https://bitbucket.org/ned/coveragepy/issues/67/xml-report-filenames-may-be-generated
+.. _issue 89: https://bitbucket.org/ned/coveragepy/issues/89/on-windows-all-packages-are-reported-in
+.. _issue 97: https://bitbucket.org/ned/coveragepy/issues/97/allow-environment-variables-to-be
+.. _issue 100: https://bitbucket.org/ned/coveragepy/issues/100/source-directive-doesnt-work-for-packages
+.. _issue 111: https://bitbucket.org/ned/coveragepy/issues/111/when-installing-coverage-with-pip-not
+.. _issue 137: https://bitbucket.org/ned/coveragepy/issues/137/provide-docs-with-source-distribution
+.. _issue 139: https://bitbucket.org/ned/coveragepy/issues/139/easy-check-for-a-certain-coverage-in-tests
+.. _issue 143: https://bitbucket.org/ned/coveragepy/issues/143/omit-doesnt-seem-to-work-in-coverage
+.. _issue 153: https://bitbucket.org/ned/coveragepy/issues/153/non-existent-filename-triggers
+.. _issue 156: https://bitbucket.org/ned/coveragepy/issues/156/a-completely-unexecuted-file-shows-14
+.. _issue 163: https://bitbucket.org/ned/coveragepy/issues/163/problem-with-include-and-omit-filename
+.. _issue 171: https://bitbucket.org/ned/coveragepy/issues/171/how-to-contribute-and-run-tests
+.. _issue 193: https://bitbucket.org/ned/coveragepy/issues/193/unicodedecodeerror-on-htmlpy
+.. _issue 201: https://bitbucket.org/ned/coveragepy/issues/201/coverage-using-django-14-with-pydb-on
+.. _issue 202: https://bitbucket.org/ned/coveragepy/issues/202/get-rid-of-ez_setuppy-and
+.. _issue 203: https://bitbucket.org/ned/coveragepy/issues/203/duplicate-filenames-reported-when-filename
+.. _issue 205: https://bitbucket.org/ned/coveragepy/issues/205/make-pydoc-coverage-more-friendly
+.. _issue 206: https://bitbucket.org/ned/coveragepy/issues/206/pydoc-coveragecoverage-fails-with-an-error
+.. _issue 210: https://bitbucket.org/ned/coveragepy/issues/210/if-theres-no-coverage-data-coverage-xml
+.. _issue 214: https://bitbucket.org/ned/coveragepy/issues/214/coveragepy-measures-itself-on-precise
+
+
+.. _changes_353:
+
+Version 3.5.3 --- 2012-09-29
+----------------------------
+
+- Line numbers in the HTML report line up better with the source lines, fixing
+ `issue 197`_, thanks Marius Gedminas.
+
+- When specifying a directory as the source= option, the directory itself no
+ longer needs to have a ``__init__.py`` file, though its sub-directories do,
+ to be considered as source files.
+
+- Files encoded as UTF-8 with a BOM are now properly handled, fixing
+ `issue 179`_. Thanks, Pablo Carballo.
+
+- Fixed more cases of non-Python files being reported as Python source, and
+ then not being able to parse them as Python. Closes `issue 82`_ (again).
+ Thanks, Julian Berman.
+
+- Fixed memory leaks under Python 3, thanks, Brett Cannon. Closes `issue 147`_.
+
+- Optimized .pyo files may not have been handled correctly, `issue 195`_.
+ Thanks, Marius Gedminas.
+
+- Certain unusually named file paths could have been mangled during reporting,
+ `issue 194`_. Thanks, Marius Gedminas.
+
+- Try to do a better job of the impossible task of detecting when we can't
+ build the C extension, fixing `issue 183`_.
+
+- Testing is now done with `tox`_, thanks, Marc Abramowitz.
+
+.. _issue 147: https://bitbucket.org/ned/coveragepy/issues/147/massive-memory-usage-by-ctracer
+.. _issue 179: https://bitbucket.org/ned/coveragepy/issues/179/htmlreporter-fails-when-source-file-is
+.. _issue 183: https://bitbucket.org/ned/coveragepy/issues/183/install-fails-for-python-23
+.. _issue 194: https://bitbucket.org/ned/coveragepy/issues/194/filelocatorrelative_filename-could-mangle
+.. _issue 195: https://bitbucket.org/ned/coveragepy/issues/195/pyo-file-handling-in-codeunit
+.. _issue 197: https://bitbucket.org/ned/coveragepy/issues/197/line-numbers-in-html-report-do-not-align
+.. _tox: https://tox.readthedocs.io/
+
+
+.. _changes_352:
+
+Version 3.5.2 --- 2012-05-04
+----------------------------
+
+No changes since 3.5.2.b1
+
+
+Version 3.5.2b1 --- 2012-04-29
+------------------------------
+
+- The HTML report has slightly tweaked controls: the buttons at the top of
+ the page are color-coded to the source lines they affect.
+
+- Custom CSS can be applied to the HTML report by specifying a CSS file as
+ the ``extra_css`` configuration value in the ``[html]`` section.
+
+- Source files with custom encodings declared in a comment at the top are now
+ properly handled during reporting on Python 2. Python 3 always handled them
+ properly. This fixes `issue 157`_.
+
+- Backup files left behind by editors are no longer collected by the source=
+ option, fixing `issue 168`_.
+
+- If a file doesn't parse properly as Python, we don't report it as an error
+ if the file name seems like maybe it wasn't meant to be Python. This is a
+ pragmatic fix for `issue 82`_.
+
+- The ``-m`` switch on ``coverage report``, which includes missing line numbers
+ in the summary report, can now be specified as ``show_missing`` in the
+ config file. Closes `issue 173`_.
+
+- When running a module with ``coverage run -m <modulename>``, certain details
+ of the execution environment weren't the same as for
+ ``python -m <modulename>``. This had the unfortunate side-effect of making
+ ``coverage run -m unittest discover`` not work if you had tests in a
+ directory named "test". This fixes `issue 155`_ and `issue 142`_.
+
+- Now the exit status of your product code is properly used as the process
+ status when running ``python -m coverage run ...``. Thanks, JT Olds.
+
+- When installing into pypy, we no longer attempt (and fail) to compile
+ the C tracer function, closing `issue 166`_.
+
+.. _issue 142: https://bitbucket.org/ned/coveragepy/issues/142/executing-python-file-syspath-is-replaced
+.. _issue 155: https://bitbucket.org/ned/coveragepy/issues/155/cant-use-coverage-run-m-unittest-discover
+.. _issue 157: https://bitbucket.org/ned/coveragepy/issues/157/chokes-on-source-files-with-non-utf-8
+.. _issue 166: https://bitbucket.org/ned/coveragepy/issues/166/dont-try-to-compile-c-extension-on-pypy
+.. _issue 168: https://bitbucket.org/ned/coveragepy/issues/168/dont-be-alarmed-by-emacs-droppings
+.. _issue 173: https://bitbucket.org/ned/coveragepy/issues/173/theres-no-way-to-specify-show-missing-in
+
+
+.. _changes_351:
+
+Version 3.5.1 --- 2011-09-23
+----------------------------
+
+- The ``[paths]`` feature unfortunately didn't work in real world situations
+ where you wanted to, you know, report on the combined data. Now all paths
+ stored in the combined file are canonicalized properly.
+
+
+Version 3.5.1b1 --- 2011-08-28
+------------------------------
+
+- When combining data files from parallel runs, you can now instruct
+ coverage.py about which directories are equivalent on different machines. A
+ ``[paths]`` section in the configuration file lists paths that are to be
+ considered equivalent. Finishes `issue 17`_.
+
+- for-else constructs are understood better, and don't cause erroneous partial
+ branch warnings. Fixes `issue 122`_.
+
+- Branch coverage for ``with`` statements is improved, fixing `issue 128`_.
+
+- The number of partial branches reported on the HTML summary page was
+ different than the number reported on the individual file pages. This is
+ now fixed.
+
+- An explicit include directive to measure files in the Python installation
+ wouldn't work because of the standard library exclusion. Now the include
+ directive takes precedence, and the files will be measured. Fixes
+ `issue 138`_.
+
+- The HTML report now handles Unicode characters in Python source files
+ properly. This fixes `issue 124`_ and `issue 144`_. Thanks, Devin
+ Jeanpierre.
+
+- In order to help the core developers measure the test coverage of the
+ standard library, Brandon Rhodes devised an aggressive hack to trick Python
+ into running some coverage.py code before anything else in the process.
+ See the coverage/fullcoverage directory if you are interested.
+
+.. _issue 17: https://bitbucket.org/ned/coveragepy/issues/17/support-combining-coverage-data-from
+.. _issue 122: https://bitbucket.org/ned/coveragepy/issues/122/for-else-always-reports-missing-branch
+.. _issue 124: https://bitbucket.org/ned/coveragepy/issues/124/no-arbitrary-unicode-in-html-reports-in
+.. _issue 128: https://bitbucket.org/ned/coveragepy/issues/128/branch-coverage-of-with-statement-in-27
+.. _issue 138: https://bitbucket.org/ned/coveragepy/issues/138/include-should-take-precedence-over-is
+.. _issue 144: https://bitbucket.org/ned/coveragepy/issues/144/failure-generating-html-output-for
+
+
+.. _changes_35:
+
+Version 3.5 --- 2011-06-29
+--------------------------
+
+- The HTML report hotkeys now behave slightly differently when the current
+ chunk isn't visible at all: a chunk on the screen will be selected,
+ instead of the old behavior of jumping to the literal next chunk.
+ The hotkeys now work in Google Chrome. Thanks, Guido van Rossum.
+
+
+Version 3.5b1 --- 2011-06-05
+----------------------------
+
+- The HTML report now has hotkeys. Try ``n``, ``s``, ``m``, ``x``, ``b``,
+ ``p``, and ``c`` on the overview page to change the column sorting.
+ On a file page, ``r``, ``m``, ``x``, and ``p`` toggle the run, missing,
+ excluded, and partial line markings. You can navigate the highlighted
+ sections of code by using the ``j`` and ``k`` keys for next and previous.
+ The ``1`` (one) key jumps to the first highlighted section in the file,
+ and ``0`` (zero) scrolls to the top of the file.
+
+- The ``--omit`` and ``--include`` switches now interpret their values more
+ usefully. If the value starts with a wildcard character, it is used as-is.
+ If it does not, it is interpreted relative to the current directory.
+ Closes `issue 121`_.
+
+- Partial branch warnings can now be pragma'd away. The configuration option
+ ``partial_branches`` is a list of regular expressions. Lines matching any of
+ those expressions will never be marked as a partial branch. In addition,
+ there's a built-in list of regular expressions marking statements which
+ should never be marked as partial. This list includes ``while True:``,
+ ``while 1:``, ``if 1:``, and ``if 0:``.
+
+- The ``coverage()`` constructor accepts single strings for the ``omit=`` and
+ ``include=`` arguments, adapting to a common error in programmatic use.
+
+- Modules can now be run directly using ``coverage run -m modulename``, to
+ mirror Python's ``-m`` flag. Closes `issue 95`_, thanks, Brandon Rhodes.
+
+- ``coverage run`` didn't emulate Python accurately in one small detail: the
+ current directory inserted into ``sys.path`` was relative rather than
+ absolute. This is now fixed.
+
+- HTML reporting is now incremental: a record is kept of the data that
+ produced the HTML reports, and only files whose data has changed will
+ be generated. This should make most HTML reporting faster.
+
+- Pathological code execution could disable the trace function behind our
+ backs, leading to incorrect code measurement. Now if this happens,
+ coverage.py will issue a warning, at least alerting you to the problem.
+ Closes `issue 93`_. Thanks to Marius Gedminas for the idea.
+
+- The C-based trace function now behaves properly when saved and restored
+ with ``sys.gettrace()`` and ``sys.settrace()``. This fixes `issue 125`_
+ and `issue 123`_. Thanks, Devin Jeanpierre.
+
+- Source files are now opened with Python 3.2's ``tokenize.open()`` where
+ possible, to get the best handling of Python source files with encodings.
+ Closes `issue 107`_, thanks, Brett Cannon.
+
+- Syntax errors in supposed Python files can now be ignored during reporting
+ with the ``-i`` switch just like other source errors. Closes `issue 115`_.
+
+- Installation from source now succeeds on machines without a C compiler,
+ closing `issue 80`_.
+
+- Coverage.py can now be run directly from a working tree by specifying
+ the directory name to python: ``python coverage_py_working_dir run ...``.
+ Thanks, Brett Cannon.
+
+- A little bit of Jython support: `coverage run` can now measure Jython
+ execution by adapting when $py.class files are traced. Thanks, Adi Roiban.
+ Jython still doesn't provide the Python libraries needed to make
+ coverage reporting work, unfortunately.
+
+- Internally, files are now closed explicitly, fixing `issue 104`_. Thanks,
+ Brett Cannon.
+
+.. _issue 80: https://bitbucket.org/ned/coveragepy/issues/80/is-there-a-duck-typing-way-to-know-we-cant
+.. _issue 93: https://bitbucket.org/ned/coveragepy/issues/93/copying-a-mock-object-breaks-coverage
+.. _issue 95: https://bitbucket.org/ned/coveragepy/issues/95/run-subcommand-should-take-a-module-name
+.. _issue 104: https://bitbucket.org/ned/coveragepy/issues/104/explicitly-close-files
+.. _issue 107: https://bitbucket.org/ned/coveragepy/issues/107/codeparser-not-opening-source-files-with
+.. _issue 115: https://bitbucket.org/ned/coveragepy/issues/115/fail-gracefully-when-reporting-on-file
+.. _issue 121: https://bitbucket.org/ned/coveragepy/issues/121/filename-patterns-are-applied-stupidly
+.. _issue 123: https://bitbucket.org/ned/coveragepy/issues/123/pyeval_settrace-used-in-way-that-breaks
+.. _issue 125: https://bitbucket.org/ned/coveragepy/issues/125/coverage-removes-decoratortoolss-tracing
+
+
+.. _changes_34:
+
+Version 3.4 --- 2010-09-19
+--------------------------
+
+- The XML report is now sorted by package name, fixing `issue 88`_.
+
+- Programs that exited with ``sys.exit()`` with no argument weren't handled
+ properly, producing a coverage.py stack trace. That is now fixed.
+
+.. _issue 88: https://bitbucket.org/ned/coveragepy/issues/88/xml-report-lists-packages-in-random-order
+
+
+Version 3.4b2 --- 2010-09-06
+----------------------------
+
+- Completely unexecuted files can now be included in coverage results, reported
+ as 0% covered. This only happens if the --source option is specified, since
+ coverage.py needs guidance about where to look for source files.
+
+- The XML report output now properly includes a percentage for branch coverage,
+ fixing `issue 65`_ and `issue 81`_.
+
+- Coverage percentages are now displayed uniformly across reporting methods.
+ Previously, different reports could round percentages differently. Also,
+ percentages are only reported as 0% or 100% if they are truly 0 or 100, and
+ are rounded otherwise. Fixes `issue 41`_ and `issue 70`_.
+
+- The precision of reported coverage percentages can be set with the
+ ``[report] precision`` config file setting. Completes `issue 16`_.
+
+- Threads derived from ``threading.Thread`` with an overridden `run` method
+ would report no coverage for the `run` method. This is now fixed, closing
+ `issue 85`_.
+
+.. _issue 16: https://bitbucket.org/ned/coveragepy/issues/16/allow-configuration-of-accuracy-of-percentage-totals
+.. _issue 41: https://bitbucket.org/ned/coveragepy/issues/41/report-says-100-when-it-isnt-quite-there
+.. _issue 65: https://bitbucket.org/ned/coveragepy/issues/65/branch-option-not-reported-in-cobertura
+.. _issue 70: https://bitbucket.org/ned/coveragepy/issues/70/text-report-and-html-report-disagree-on-coverage
+.. _issue 81: https://bitbucket.org/ned/coveragepy/issues/81/xml-report-does-not-have-condition-coverage-attribute-for-lines-with-a
+.. _issue 85: https://bitbucket.org/ned/coveragepy/issues/85/threadrun-isnt-measured
+
+
+Version 3.4b1 --- 2010-08-21
+----------------------------
+
+- BACKWARD INCOMPATIBILITY: the ``--omit`` and ``--include`` switches now take
+ file patterns rather than file prefixes, closing `issue 34`_ and `issue 36`_.
+
+- BACKWARD INCOMPATIBILITY: the `omit_prefixes` argument is gone throughout
+ coverage.py, replaced with `omit`, a list of file name patterns suitable for
+ `fnmatch`. A parallel argument `include` controls what files are included.
+
+- The run command now has a ``--source`` switch, a list of directories or
+ module names. If provided, coverage.py will only measure execution in those
+ source files.
+
+- Various warnings are printed to stderr for problems encountered during data
+ measurement: if a ``--source`` module has no Python source to measure, or is
+ never encountered at all, or if no data is collected.
+
+- The reporting commands (report, annotate, html, and xml) now have an
+ ``--include`` switch to restrict reporting to modules matching those file
+ patterns, similar to the existing ``--omit`` switch. Thanks, Zooko.
+
+- The run command now supports ``--include`` and ``--omit`` to control what
+ modules it measures. This can speed execution and reduce the amount of data
+ during reporting. Thanks Zooko.
+
+- Since coverage.py 3.1, using the Python trace function has been slower than
+ it needs to be. A cache of tracing decisions was broken, but has now been
+ fixed.
+
+- Python 2.7 and 3.2 have introduced new opcodes that are now supported.
+
+- Python files with no statements, for example, empty ``__init__.py`` files,
+ are now reported as having zero statements instead of one. Fixes `issue 1`_.
+
+- Reports now have a column of missed line counts rather than executed line
+ counts, since developers should focus on reducing the missed lines to zero,
+ rather than increasing the executed lines to varying targets. Once
+ suggested, this seemed blindingly obvious.
+
+- Line numbers in HTML source pages are clickable, linking directly to that
+ line, which is highlighted on arrival. Added a link back to the index page
+ at the bottom of each HTML page.
+
+- Programs that call ``os.fork`` will properly collect data from both the child
+ and parent processes. Use ``coverage run -p`` to get two data files that can
+ be combined with ``coverage combine``. Fixes `issue 56`_.
+
+- Coverage.py is now runnable as a module: ``python -m coverage``. Thanks,
+ Brett Cannon.
+
+- When measuring code running in a virtualenv, most of the system library was
+ being measured when it shouldn't have been. This is now fixed.
+
+- Doctest text files are no longer recorded in the coverage data, since they
+ can't be reported anyway. Fixes `issue 52`_ and `issue 61`_.
+
+- Jinja HTML templates compile into Python code using the HTML file name,
+ which confused coverage.py. Now these files are no longer traced, fixing
+ `issue 82`_.
+
+- Source files can have more than one dot in them (foo.test.py), and will be
+ treated properly while reporting. Fixes `issue 46`_.
+
+- Source files with DOS line endings are now properly tokenized for syntax
+ coloring on non-DOS machines. Fixes `issue 53`_.
+
+- Unusual code structure that confused exits from methods with exits from
+ classes is now properly analyzed. See `issue 62`_.
+
+- Asking for an HTML report with no files now shows a nice error message rather
+ than a cryptic failure ('int' object is unsubscriptable). Fixes `issue 59`_.
+
+.. _issue 1: https://bitbucket.org/ned/coveragepy/issues/1/empty-__init__py-files-are-reported-as-1-executable
+.. _issue 34: https://bitbucket.org/ned/coveragepy/issues/34/enhanced-omit-globbing-handling
+.. _issue 36: https://bitbucket.org/ned/coveragepy/issues/36/provide-regex-style-omit
+.. _issue 46: https://bitbucket.org/ned/coveragepy/issues/46
+.. _issue 53: https://bitbucket.org/ned/coveragepy/issues/53
+.. _issue 52: https://bitbucket.org/ned/coveragepy/issues/52/doctesttestfile-confuses-source-detection
+.. _issue 56: https://bitbucket.org/ned/coveragepy/issues/56
+.. _issue 61: https://bitbucket.org/ned/coveragepy/issues/61/annotate-i-doesnt-work
+.. _issue 62: https://bitbucket.org/ned/coveragepy/issues/62
+.. _issue 59: https://bitbucket.org/ned/coveragepy/issues/59/html-report-fails-with-int-object-is
+.. _issue 82: https://bitbucket.org/ned/coveragepy/issues/82/tokenerror-when-generating-html-report
+
+
+.. _changes_331:
+
+Version 3.3.1 --- 2010-03-06
+----------------------------
+
+- Using `parallel=True` in .coveragerc file prevented reporting, but now does
+ not, fixing `issue 49`_.
+
+- When running your code with "coverage run", if you call `sys.exit()`,
+ coverage.py will exit with that status code, fixing `issue 50`_.
+
+.. _issue 49: https://bitbucket.org/ned/coveragepy/issues/49
+.. _issue 50: https://bitbucket.org/ned/coveragepy/issues/50
+
+
+.. _changes_33:
+
+Version 3.3 --- 2010-02-24
+--------------------------
+
+- Settings are now read from a .coveragerc file. A specific file can be
+ specified on the command line with --rcfile=FILE. The name of the file can
+ be programmatically set with the `config_file` argument to the coverage()
+ constructor, or reading a config file can be disabled with
+ `config_file=False`.
+
+- Fixed a problem with nested loops having their branch possibilities
+ mischaracterized: `issue 39`_.
+
+- Added coverage.process_start to enable coverage measurement when Python
+ starts.
+
+- Parallel data file names now have a random number appended to them in
+ addition to the machine name and process id.
+
+- Parallel data files combined with "coverage combine" are deleted after
+ they're combined, to clean up unneeded files. Fixes `issue 40`_.
+
+- Exceptions thrown from product code run with "coverage run" are now displayed
+ without internal coverage.py frames, so the output is the same as when the
+ code is run without coverage.py.
+
+- The `data_suffix` argument to the coverage constructor is now appended with
+ an added dot rather than simply appended, so that .coveragerc files will not
+ be confused for data files.
+
+- Python source files that don't end with a newline can now be executed, fixing
+ `issue 47`_.
+
+- Added an AUTHORS.txt file.
+
+.. _issue 39: https://bitbucket.org/ned/coveragepy/issues/39
+.. _issue 40: https://bitbucket.org/ned/coveragepy/issues/40
+.. _issue 47: https://bitbucket.org/ned/coveragepy/issues/47
+
+
+.. _changes_32:
+
+Version 3.2 --- 2009-12-05
+--------------------------
+
+- Added a ``--version`` option on the command line.
+
+
+Version 3.2b4 --- 2009-12-01
+----------------------------
+
+- Branch coverage improvements:
+
+ - The XML report now includes branch information.
+
+- Click-to-sort HTML report columns are now persisted in a cookie. Viewing
+ a report will sort it first the way you last had a coverage report sorted.
+ Thanks, `Chris Adams`_.
+
+- On Python 3.x, setuptools has been replaced by `Distribute`_.
+
+.. _Distribute: https://pypi.org/project/distribute/
+
+
+Version 3.2b3 --- 2009-11-23
+----------------------------
+
+- Fixed a memory leak in the C tracer that was introduced in 3.2b1.
+
+- Branch coverage improvements:
+
+ - Branches to excluded code are ignored.
+
+- The table of contents in the HTML report is now sortable: click the headers
+ on any column. Thanks, `Chris Adams`_.
+
+.. _Chris Adams: http://chris.improbable.org
+
+
+Version 3.2b2 --- 2009-11-19
+----------------------------
+
+- Branch coverage improvements:
+
+ - Classes are no longer incorrectly marked as branches: `issue 32`_.
+
+ - "except" clauses with types are no longer incorrectly marked as branches:
+ `issue 35`_.
+
+- Fixed some problems syntax coloring sources with line continuations and
+ source with tabs: `issue 30`_ and `issue 31`_.
+
+- The --omit option now works much better than before, fixing `issue 14`_ and
+ `issue 33`_. Thanks, Danek Duvall.
+
+.. _issue 14: https://bitbucket.org/ned/coveragepy/issues/14
+.. _issue 30: https://bitbucket.org/ned/coveragepy/issues/30
+.. _issue 31: https://bitbucket.org/ned/coveragepy/issues/31
+.. _issue 32: https://bitbucket.org/ned/coveragepy/issues/32
+.. _issue 33: https://bitbucket.org/ned/coveragepy/issues/33
+.. _issue 35: https://bitbucket.org/ned/coveragepy/issues/35
+
+
+Version 3.2b1 --- 2009-11-10
+----------------------------
+
+- Branch coverage!
+
+- XML reporting has file paths that let Cobertura find the source code.
+
+- The tracer code has changed, it's a few percent faster.
+
+- Some exceptions reported by the command line interface have been cleaned up
+ so that tracebacks inside coverage.py aren't shown. Fixes `issue 23`_.
+
+.. _issue 23: https://bitbucket.org/ned/coveragepy/issues/23
+
+
+.. _changes_31:
+
+Version 3.1 --- 2009-10-04
+--------------------------
+
+- Source code can now be read from eggs. Thanks, Ross Lawley. Fixes
+ `issue 25`_.
+
+.. _issue 25: https://bitbucket.org/ned/coveragepy/issues/25
+
+
+Version 3.1b1 --- 2009-09-27
+----------------------------
+
+- Python 3.1 is now supported.
+
+- Coverage.py has a new command line syntax with sub-commands. This expands
+ the possibilities for adding features and options in the future. The old
+ syntax is still supported. Try "coverage help" to see the new commands.
+ Thanks to Ben Finney for early help.
+
+- Added an experimental "coverage xml" command for producing coverage reports
+ in a Cobertura-compatible XML format. Thanks, Bill Hart.
+
+- Added the --timid option to enable a simpler slower trace function that works
+ for DecoratorTools projects, including TurboGears. Fixed `issue 12`_ and
+ `issue 13`_.
+
+- HTML reports show modules from other directories. Fixed `issue 11`_.
+
+- HTML reports now display syntax-colored Python source.
+
+- Programs that change directory will still write .coverage files in the
+ directory where execution started. Fixed `issue 24`_.
+
+- Added a "coverage debug" command for getting diagnostic information about the
+ coverage.py installation.
+
+.. _issue 11: https://bitbucket.org/ned/coveragepy/issues/11
+.. _issue 12: https://bitbucket.org/ned/coveragepy/issues/12
+.. _issue 13: https://bitbucket.org/ned/coveragepy/issues/13
+.. _issue 24: https://bitbucket.org/ned/coveragepy/issues/24
+
+
+.. _changes_301:
+
+Version 3.0.1 --- 2009-07-07
+----------------------------
+
+- Removed the recursion limit in the tracer function. Previously, code that
+ ran more than 500 frames deep would crash. Fixed `issue 9`_.
+
+- Fixed a bizarre problem involving pyexpat, whereby lines following XML parser
+ invocations could be overlooked. Fixed `issue 10`_.
+
+- On Python 2.3, coverage.py could mis-measure code with exceptions being
+ raised. This is now fixed.
+
+- The coverage.py code itself will now not be measured by coverage.py, and no
+ coverage.py modules will be mentioned in the nose --with-cover plug-in.
+ Fixed `issue 8`_.
+
+- When running source files, coverage.py now opens them in universal newline
+ mode just like Python does. This lets it run Windows files on Mac, for
+ example.
+
+.. _issue 9: https://bitbucket.org/ned/coveragepy/issues/9
+.. _issue 10: https://bitbucket.org/ned/coveragepy/issues/10
+.. _issue 8: https://bitbucket.org/ned/coveragepy/issues/8
+
+
+.. _changes_30:
+
+Version 3.0 --- 2009-06-13
+--------------------------
+
+- Fixed the way the Python library was ignored. Too much code was being
+ excluded the old way.
+
+- Tabs are now properly converted in HTML reports. Previously indentation was
+ lost. Fixed `issue 6`_.
+
+- Nested modules now get a proper flat_rootname. Thanks, Christian Heimes.
+
+.. _issue 6: https://bitbucket.org/ned/coveragepy/issues/6
+
+
+Version 3.0b3 --- 2009-05-16
+----------------------------
+
+- Added parameters to coverage.__init__ for options that had been set on the
+ coverage object itself.
+
+- Added clear_exclude() and get_exclude_list() methods for programmatic
+ manipulation of the exclude regexes.
+
+- Added coverage.load() to read previously-saved data from the data file.
+
+- Improved the finding of code files. For example, .pyc files that have been
+ installed after compiling are now located correctly. Thanks, Detlev
+ Offenbach.
+
+- When using the object API (that is, constructing a coverage() object), data
+ is no longer saved automatically on process exit. You can re-enable it with
+ the auto_data=True parameter on the coverage() constructor. The module-level
+ interface still uses automatic saving.
+
+
+Version 3.0b --- 2009-04-30
+---------------------------
+
+HTML reporting, and continued refactoring.
+
+- HTML reports and annotation of source files: use the new -b (browser) switch.
+ Thanks to George Song for code, inspiration and guidance.
+
+- Code in the Python standard library is not measured by default. If you need
+ to measure standard library code, use the -L command-line switch during
+ execution, or the cover_pylib=True argument to the coverage() constructor.
+
+- Source annotation into a directory (-a -d) behaves differently. The
+ annotated files are named with their hierarchy flattened so that same-named
+ files from different directories no longer collide. Also, only files in the
+ current tree are included.
+
+- coverage.annotate_file is no longer available.
+
+- Programs executed with -x now behave more as they should, for example,
+ __file__ has the correct value.
+
+- .coverage data files have a new pickle-based format designed for better
+ extensibility.
+
+- Removed the undocumented cache_file argument to coverage.usecache().
+
+
+Version 3.0b1 --- 2009-03-07
+----------------------------
+
+Major overhaul.
+
+- Coverage.py is now a package rather than a module. Functionality has been
+ split into classes.
+
+- The trace function is implemented in C for speed. Coverage.py runs are now
+ much faster. Thanks to David Christian for productive micro-sprints and
+ other encouragement.
+
+- Executable lines are identified by reading the line number tables in the
+ compiled code, removing a great deal of complicated analysis code.
+
+- Precisely which lines are considered executable has changed in some cases.
+ Therefore, your coverage stats may also change slightly.
+
+- The singleton coverage object is only created if the module-level functions
+ are used. This maintains the old interface while allowing better
+ programmatic use of coverage.py.
+
+- The minimum supported Python version is 2.3.
+
+
+Version 2.85 --- 2008-09-14
+---------------------------
+
+- Add support for finding source files in eggs. Don't check for
+ morf's being instances of ModuleType, instead use duck typing so that
+ pseudo-modules can participate. Thanks, Imri Goldberg.
+
+- Use os.realpath as part of the fixing of file names so that symlinks won't
+ confuse things. Thanks, Patrick Mezard.
+
+
+Version 2.80 --- 2008-05-25
+---------------------------
+
+- Open files in rU mode to avoid line ending craziness. Thanks, Edward Loper.
+
+
+Version 2.78 --- 2007-09-30
+---------------------------
+
+- Don't try to predict whether a file is Python source based on the extension.
+ Extension-less files are often Pythons scripts. Instead, simply parse the
+ file and catch the syntax errors. Hat tip to Ben Finney.
+
+
+Version 2.77 --- 2007-07-29
+---------------------------
+
+- Better packaging.
+
+
+Version 2.76 --- 2007-07-23
+---------------------------
+
+- Now Python 2.5 is *really* fully supported: the body of the new with
+ statement is counted as executable.
+
+
+Version 2.75 --- 2007-07-22
+---------------------------
+
+- Python 2.5 now fully supported. The method of dealing with multi-line
+ statements is now less sensitive to the exact line that Python reports during
+ execution. Pass statements are handled specially so that their disappearance
+ during execution won't throw off the measurement.
+
+
+Version 2.7 --- 2007-07-21
+--------------------------
+
+- "#pragma: nocover" is excluded by default.
+
+- Properly ignore docstrings and other constant expressions that appear in the
+ middle of a function, a problem reported by Tim Leslie.
+
+- coverage.erase() shouldn't clobber the exclude regex. Change how parallel
+ mode is invoked, and fix erase() so that it erases the cache when called
+ programmatically.
+
+- In reports, ignore code executed from strings, since we can't do anything
+ useful with it anyway.
+
+- Better file handling on Linux, thanks Guillaume Chazarain.
+
+- Better shell support on Windows, thanks Noel O'Boyle.
+
+- Python 2.2 support maintained, thanks Catherine Proulx.
+
+- Minor changes to avoid lint warnings.
+
+
+Version 2.6 --- 2006-08-23
+--------------------------
+
+- Applied Joseph Tate's patch for function decorators.
+
+- Applied Sigve Tjora and Mark van der Wal's fixes for argument handling.
+
+- Applied Geoff Bache's parallel mode patch.
+
+- Refactorings to improve testability. Fixes to command-line logic for parallel
+ mode and collect.
+
+
+Version 2.5 --- 2005-12-04
+--------------------------
+
+- Call threading.settrace so that all threads are measured. Thanks Martin
+ Fuzzey.
+
+- Add a file argument to report so that reports can be captured to a different
+ destination.
+
+- Coverage.py can now measure itself.
+
+- Adapted Greg Rogers' patch for using relative file names, and sorting and
+ omitting files to report on.
+
+
+Version 2.2 --- 2004-12-31
+--------------------------
+
+- Allow for keyword arguments in the module global functions. Thanks, Allen.
+
+
+Version 2.1 --- 2004-12-14
+--------------------------
+
+- Return 'analysis' to its original behavior and add 'analysis2'. Add a global
+ for 'annotate', and factor it, adding 'annotate_file'.
+
+
+Version 2.0 --- 2004-12-12
+--------------------------
+
+Significant code changes.
+
+- Finding executable statements has been rewritten so that docstrings and
+ other quirks of Python execution aren't mistakenly identified as missing
+ lines.
+
+- Lines can be excluded from consideration, even entire suites of lines.
+
+- The file system cache of covered lines can be disabled programmatically.
+
+- Modernized the code.
+
+
+Earlier History
+---------------
+
+2001-12-04 GDR Created.
+
+2001-12-06 GDR Added command-line interface and source code annotation.
+
+2001-12-09 GDR Moved design and interface to separate documents.
+
+2001-12-10 GDR Open cache file as binary on Windows. Allow simultaneous -e and
+-x, or -a and -r.
+
+2001-12-12 GDR Added command-line help. Cache analysis so that it only needs to
+be done once when you specify -a and -r.
+
+2001-12-13 GDR Improved speed while recording. Portable between Python 1.5.2
+and 2.1.1.
+
+2002-01-03 GDR Module-level functions work correctly.
+
+2002-01-07 GDR Update sys.path when running a file with the -x option, so that
+it matches the value the program would get if it were run on its own.
diff --git a/third_party/python/coverage/CONTRIBUTORS.txt b/third_party/python/coverage/CONTRIBUTORS.txt
new file mode 100644
index 0000000000..a3cc9be769
--- /dev/null
+++ b/third_party/python/coverage/CONTRIBUTORS.txt
@@ -0,0 +1,136 @@
+Coverage.py was originally written by Gareth Rees, and since 2004 has been
+extended and maintained by Ned Batchelder.
+
+Other contributions, including writing code, updating docs, and submitting
+useful bug reports, have been made by:
+
+Abdeali Kothari
+Adi Roiban
+Agbonze O. Jeremiah
+Albertas Agejevas
+Aleksi Torhamo
+Alex Gaynor
+Alex Groce
+Alex Sandro
+Alexander Todorov
+Alexander Walters
+Andrew Hoos
+Anthony Sottile
+Arcadiy Ivanov
+Aron Griffis
+Artem Dayneko
+Ben Finney
+Bernát Gábor
+Bill Hart
+Brandon Rhodes
+Brett Cannon
+Bruno P. Kinoshita
+Buck Evan
+Calen Pennington
+Carl Gieringer
+Catherine Proulx
+Chris Adams
+Chris Jerdonek
+Chris Rose
+Chris Warrick
+Christian Heimes
+Christine Lytwynec
+Christoph Zwerschke
+Conrad Ho
+Cosimo Lupo
+Dan Hemberger
+Dan Riti
+Dan Wandschneider
+Danek Duvall
+Daniel Hahler
+Danny Allen
+David Christian
+David MacIver
+David Stanek
+David Szotten
+Detlev Offenbach
+Devin Jeanpierre
+Dirk Thomas
+Dmitry Shishov
+Dmitry Trofimov
+Eduardo Schettino
+Eli Skeggs
+Emil Madsen
+Edward Loper
+Federico Bond
+Frazer McLean
+Geoff Bache
+George Paci
+George Song
+George-Cristian Bîrzan
+Greg Rogers
+Guido van Rossum
+Guillaume Chazarain
+Hugo van Kemenade
+Ilia Meerovich
+Imri Goldberg
+Ionel Cristian Mărieș
+JT Olds
+Jessamyn Smith
+Joe Doherty
+Joe Jevnik
+Jon Chappell
+Jon Dufresne
+Joseph Tate
+Josh Williams
+Julian Berman
+Julien Voisin
+Justas Sadzevičius
+Kjell Braden
+Krystian Kichewko
+Kyle Altendorf
+Lars Hupfeldt Nielsen
+Leonardo Pistone
+Lex Berezhny
+Loïc Dachary
+Marc Abramowitz
+Marcus Cobden
+Marius Gedminas
+Mark van der Wal
+Martin Fuzzey
+Matt Bachmann
+Matthew Boehm
+Matthew Desmarais
+Max Linke
+Michał Bultrowicz
+Mickie Betz
+Mike Fiedler
+Nathan Land
+Noel O'Boyle
+Olivier Grisel
+Ori Avtalion
+Pankaj Pandey
+Pablo Carballo
+Patrick Mezard
+Peter Baughman
+Peter Ebden
+Peter Portante
+Reya B
+Rodrigue Cloutier
+Roger Hu
+Ross Lawley
+Roy Williams
+Salvatore Zagaria
+Sandra Martocchia
+Scott Belden
+Sigve Tjora
+Simon Willison
+Stan Hu
+Stefan Behnel
+Stephan Richter
+Stephen Finucane
+Steve Leonard
+Steve Peak
+S. Y. Lee
+Ted Wexler
+Thijs Triemstra
+Titus Brown
+Ville Skyttä
+Yury Selivanov
+Zac Hatfield-Dodds
+Zooko Wilcox-O'Hearn
diff --git a/third_party/python/coverage/LICENSE.txt b/third_party/python/coverage/LICENSE.txt
new file mode 100644
index 0000000000..f433b1a53f
--- /dev/null
+++ b/third_party/python/coverage/LICENSE.txt
@@ -0,0 +1,177 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
diff --git a/third_party/python/coverage/MANIFEST.in b/third_party/python/coverage/MANIFEST.in
new file mode 100644
index 0000000000..75257c6068
--- /dev/null
+++ b/third_party/python/coverage/MANIFEST.in
@@ -0,0 +1,49 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+# MANIFEST.in file for coverage.py
+
+# This file includes everything needed to recreate the entire project, even
+# though many of these files are not installed by setup.py. Unpacking the
+# .tar.gz source distribution would give you everything needed to continue
+# developing the project. "pip install" will not install many of these files.
+
+include CONTRIBUTORS.txt
+include CHANGES.rst
+include LICENSE.txt
+include MANIFEST.in
+include Makefile
+include NOTICE.txt
+include README.rst
+include __main__.py
+include .travis.yml
+include appveyor.yml
+include howto.txt
+include igor.py
+include metacov.ini
+include pylintrc
+include setup.py
+include tox.ini
+include tox_wheels.ini
+include .editorconfig
+include .readthedocs.yml
+
+recursive-include ci *
+exclude ci/*.token
+
+recursive-include coverage/fullcoverage *.py
+recursive-include coverage/ctracer *.c *.h
+
+recursive-include doc *.py *.pip *.rst *.txt *.png
+recursive-include doc/_static *
+prune doc/_build
+prune doc/_spell
+
+recursive-include requirements *.pip
+
+recursive-include tests *.py *.tok
+recursive-include tests/gold *
+recursive-include tests js/* qunit/*
+prune tests/eggsrc/build
+
+global-exclude *.py[co]
diff --git a/third_party/python/coverage/Makefile b/third_party/python/coverage/Makefile
new file mode 100644
index 0000000000..e1675d9b12
--- /dev/null
+++ b/third_party/python/coverage/Makefile
@@ -0,0 +1,162 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+# Makefile for utility work on coverage.py.
+
+help: ## Show this help.
+ @echo "Available targets:"
+ @grep '^[a-zA-Z]' $(MAKEFILE_LIST) | sort | awk -F ':.*?## ' 'NF==2 {printf " %-26s%s\n", $$1, $$2}'
+
+clean_platform: ## Remove files that clash across platforms.
+ rm -f *.so */*.so
+ rm -rf __pycache__ */__pycache__ */*/__pycache__ */*/*/__pycache__ */*/*/*/__pycache__ */*/*/*/*/__pycache__
+ rm -f *.pyc */*.pyc */*/*.pyc */*/*/*.pyc */*/*/*/*.pyc */*/*/*/*/*.pyc
+ rm -f *.pyo */*.pyo */*/*.pyo */*/*/*.pyo */*/*/*/*.pyo */*/*/*/*/*.pyo
+
+clean: clean_platform ## Remove artifacts of test execution, installation, etc.
+ -pip uninstall -y coverage
+ rm -f *.pyd */*.pyd
+ rm -rf build coverage.egg-info dist htmlcov
+ rm -f *.bak */*.bak */*/*.bak */*/*/*.bak */*/*/*/*.bak */*/*/*/*/*.bak
+ rm -f *$$py.class */*$$py.class */*/*$$py.class */*/*/*$$py.class */*/*/*/*$$py.class */*/*/*/*/*$$py.class
+ rm -f coverage/*,cover
+ rm -f MANIFEST
+ rm -f .coverage .coverage.* coverage.xml .metacov*
+ rm -f .tox/*/lib/*/site-packages/zzz_metacov.pth
+ rm -f */.coverage */*/.coverage */*/*/.coverage */*/*/*/.coverage */*/*/*/*/.coverage */*/*/*/*/*/.coverage
+ rm -f tests/covmain.zip tests/zipmods.zip
+ rm -rf tests/eggsrc/build tests/eggsrc/dist tests/eggsrc/*.egg-info
+ rm -f setuptools-*.egg distribute-*.egg distribute-*.tar.gz
+ rm -rf doc/_build doc/_spell doc/sample_html_beta
+ rm -rf .cache .pytest_cache .hypothesis
+ rm -rf $$TMPDIR/coverage_test
+ -make -C tests/gold/html clean
+
+sterile: clean ## Remove all non-controlled content, even if expensive.
+ rm -rf .tox
+ -docker image rm -f quay.io/pypa/manylinux1_i686 quay.io/pypa/manylinux1_x86_64
+
+
+CSS = coverage/htmlfiles/style.css
+SCSS = coverage/htmlfiles/style.scss
+
+css: $(CSS) ## Compile .scss into .css.
+$(CSS): $(SCSS)
+ sass --style=compact --sourcemap=none --no-cache $(SCSS) $@
+ cp $@ tests/gold/html/styled
+
+LINTABLE = coverage tests igor.py setup.py __main__.py
+
+lint: ## Run linters and checkers.
+ tox -e lint
+
+todo:
+ -grep -R --include=*.py TODO $(LINTABLE)
+
+pep8:
+ pycodestyle --filename=*.py --repeat $(LINTABLE)
+
+test:
+ tox -e py27,py35 $(ARGS)
+
+PYTEST_SMOKE_ARGS = -n 6 -m "not expensive" --maxfail=3 $(ARGS)
+
+smoke: ## Run tests quickly with the C tracer in the lowest supported Python versions.
+ COVERAGE_NO_PYTRACER=1 tox -q -e py27,py35 -- $(PYTEST_SMOKE_ARGS)
+
+pysmoke: ## Run tests quickly with the Python tracer in the lowest supported Python versions.
+ COVERAGE_NO_CTRACER=1 tox -q -e py27,py35 -- $(PYTEST_SMOKE_ARGS)
+
+DOCKER_RUN = docker run -it --init --rm -v `pwd`:/io
+RUN_MANYLINUX_X86 = $(DOCKER_RUN) quay.io/pypa/manylinux1_x86_64 /io/ci/manylinux.sh
+RUN_MANYLINUX_I686 = $(DOCKER_RUN) quay.io/pypa/manylinux1_i686 /io/ci/manylinux.sh
+
+test_linux: ## Run the tests in Linux under Docker.
+ # The Linux .pyc files clash with the host's because of file path
+ # changes, so clean them before and after running tests.
+ make clean_platform
+ $(RUN_MANYLINUX_X86) test $(ARGS)
+ make clean_platform
+
+meta_linux: ## Run meta-coverage in Linux under Docker.
+ ARGS="meta $(ARGS)" make test_linux
+
+# Coverage measurement of coverage.py itself (meta-coverage). See metacov.ini
+# for details.
+
+metacov: ## Run meta-coverage, measuring ourself.
+ COVERAGE_COVERAGE=yes tox $(ARGS)
+
+metahtml: ## Produce meta-coverage HTML reports.
+ python igor.py combine_html
+
+# Kitting
+
+kit: ## Make the source distribution.
+ python setup.py sdist
+
+wheel: ## Make the wheels for distribution.
+ tox -c tox_wheels.ini $(ARGS)
+
+kit_linux: ## Make the Linux wheels.
+ $(RUN_MANYLINUX_X86) build
+ $(RUN_MANYLINUX_I686) build
+
+kit_upload: ## Upload the built distributions to PyPI.
+ twine upload --verbose dist/*
+
+test_upload: ## Upload the distrubutions to PyPI's testing server.
+ twine upload --verbose --repository testpypi dist/*
+
+kit_local:
+ # pip.conf looks like this:
+ # [global]
+ # find-links = file:///Users/ned/Downloads/local_pypi
+ cp -v dist/* `awk -F "//" '/find-links/ {print $$2}' ~/.pip/pip.conf`
+ # pip caches wheels of things it has installed. Clean them out so we
+ # don't go crazy trying to figure out why our new code isn't installing.
+ find ~/Library/Caches/pip/wheels -name 'coverage-*' -delete
+
+download_appveyor: ## Download the latest Windows artifacts from AppVeyor.
+ python ci/download_appveyor.py nedbat/coveragepy
+
+build_ext:
+ python setup.py build_ext
+
+# Documentation
+
+DOCBIN = .tox/doc/bin
+SPHINXOPTS = -aE
+SPHINXBUILD = $(DOCBIN)/sphinx-build $(SPHINXOPTS)
+SPHINXAUTOBUILD = $(DOCBIN)/sphinx-autobuild -p 9876 --ignore '.git/**' --open-browser
+WEBHOME = ~/web/stellated/
+WEBSAMPLE = $(WEBHOME)/files/sample_coverage_html
+WEBSAMPLEBETA = $(WEBHOME)/files/sample_coverage_html_beta
+
+docreqs:
+ tox -q -e doc --notest
+
+dochtml: docreqs ## Build the docs HTML output.
+ $(DOCBIN)/python doc/check_copied_from.py doc/*.rst
+ $(SPHINXBUILD) -b html doc doc/_build/html
+
+docdev: dochtml ## Build docs, and auto-watch for changes.
+ PATH=$(DOCBIN):$(PATH) $(SPHINXAUTOBUILD) -b html doc doc/_build/html
+
+docspell: docreqs
+ $(SPHINXBUILD) -b spelling doc doc/_spell
+
+publish:
+ rm -f $(WEBSAMPLE)/*.*
+ mkdir -p $(WEBSAMPLE)
+ cp doc/sample_html/*.* $(WEBSAMPLE)
+
+publishbeta:
+ rm -f $(WEBSAMPLEBETA)/*.*
+ mkdir -p $(WEBSAMPLEBETA)
+ cp doc/sample_html_beta/*.* $(WEBSAMPLEBETA)
+
+upload_relnotes: docreqs ## Upload parsed release notes to Tidelift.
+ $(SPHINXBUILD) -b rst doc /tmp/rst_rst
+ pandoc -frst -tmarkdown_strict --atx-headers /tmp/rst_rst/changes.rst > /tmp/rst_rst/changes.md
+ python ci/upload_relnotes.py /tmp/rst_rst/changes.md pypi/coverage
diff --git a/third_party/python/coverage/NOTICE.txt b/third_party/python/coverage/NOTICE.txt
new file mode 100644
index 0000000000..2e7671024e
--- /dev/null
+++ b/third_party/python/coverage/NOTICE.txt
@@ -0,0 +1,14 @@
+Copyright 2001 Gareth Rees. All rights reserved.
+Copyright 2004-2020 Ned Batchelder. All rights reserved.
+
+Except where noted otherwise, this software is licensed under the Apache
+License, Version 2.0 (the "License"); you may not use this work except in
+compliance with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/third_party/python/coverage/PKG-INFO b/third_party/python/coverage/PKG-INFO
new file mode 100644
index 0000000000..181e84b15c
--- /dev/null
+++ b/third_party/python/coverage/PKG-INFO
@@ -0,0 +1,187 @@
+Metadata-Version: 2.1
+Name: coverage
+Version: 5.1
+Summary: Code coverage measurement for Python
+Home-page: https://github.com/nedbat/coveragepy
+Author: Ned Batchelder and 131 others
+Author-email: ned@nedbatchelder.com
+License: Apache 2.0
+Project-URL: Documentation, https://coverage.readthedocs.io
+Project-URL: Funding, https://tidelift.com/subscription/pkg/pypi-coverage?utm_source=pypi-coverage&utm_medium=referral&utm_campaign=pypi
+Project-URL: Issues, https://github.com/nedbat/coveragepy/issues
+Description: .. Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+ .. For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+ ===========
+ Coverage.py
+ ===========
+
+ Code coverage testing for Python.
+
+ | |license| |versions| |status|
+ | |ci-status| |win-ci-status| |docs| |codecov|
+ | |kit| |format| |repos|
+ | |stars| |forks| |contributors|
+ | |tidelift| |twitter-coveragepy| |twitter-nedbat|
+
+ Coverage.py measures code coverage, typically during test execution. It uses
+ the code analysis tools and tracing hooks provided in the Python standard
+ library to determine which lines are executable, and which have been executed.
+
+ Coverage.py runs on many versions of Python:
+
+ * CPython 2.7.
+ * CPython 3.5 through 3.9 alpha 4.
+ * PyPy2 7.3.0 and PyPy3 7.3.0.
+
+ Documentation is on `Read the Docs`_. Code repository and issue tracker are on
+ `GitHub`_.
+
+ .. _Read the Docs: https://coverage.readthedocs.io/
+ .. _GitHub: https://github.com/nedbat/coveragepy
+
+
+ **New in 5.0:** SQLite data storage, JSON report, contexts, relative filenames,
+ dropped support for Python 2.6, 3.3 and 3.4.
+
+
+ For Enterprise
+ --------------
+
+ .. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logo_small.png
+ :width: 75
+ :alt: Tidelift
+ :target: https://tidelift.com/subscription/pkg/pypi-coverage?utm_source=pypi-coverage&utm_medium=referral&utm_campaign=readme
+
+ .. list-table::
+ :widths: 10 100
+
+ * - |tideliftlogo|
+ - `Available as part of the Tidelift Subscription. <https://tidelift.com/subscription/pkg/pypi-coverage?utm_source=pypi-coverage&utm_medium=referral&utm_campaign=readme>`_
+ Coverage and thousands of other packages are working with
+ Tidelift to deliver one enterprise subscription that covers all of the open
+ source you use. If you want the flexibility of open source and the confidence
+ of commercial-grade software, this is for you.
+ `Learn more. <https://tidelift.com/subscription/pkg/pypi-coverage?utm_source=pypi-coverage&utm_medium=referral&utm_campaign=readme>`_
+
+
+ Getting Started
+ ---------------
+
+ See the `Quick Start section`_ of the docs.
+
+ .. _Quick Start section: https://coverage.readthedocs.io/#quick-start
+
+
+ Change history
+ --------------
+
+ The complete history of changes is on the `change history page`_.
+
+ .. _change history page: https://coverage.readthedocs.io/en/latest/changes.html
+
+
+ Contributing
+ ------------
+
+ See the `Contributing section`_ of the docs.
+
+ .. _Contributing section: https://coverage.readthedocs.io/en/latest/contributing.html
+
+
+ Security
+ --------
+
+ To report a security vulnerability, please use the `Tidelift security
+ contact`_. Tidelift will coordinate the fix and disclosure.
+
+ .. _Tidelift security contact: https://tidelift.com/security
+
+
+ License
+ -------
+
+ Licensed under the `Apache 2.0 License`_. For details, see `NOTICE.txt`_.
+
+ .. _Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0
+ .. _NOTICE.txt: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+
+ .. |ci-status| image:: https://travis-ci.com/nedbat/coveragepy.svg?branch=master
+ :target: https://travis-ci.com/nedbat/coveragepy
+ :alt: Build status
+ .. |win-ci-status| image:: https://ci.appveyor.com/api/projects/status/kmeqpdje7h9r6vsf/branch/master?svg=true
+ :target: https://ci.appveyor.com/project/nedbat/coveragepy
+ :alt: Windows build status
+ .. |docs| image:: https://readthedocs.org/projects/coverage/badge/?version=latest&style=flat
+ :target: https://coverage.readthedocs.io/
+ :alt: Documentation
+ .. |reqs| image:: https://requires.io/github/nedbat/coveragepy/requirements.svg?branch=master
+ :target: https://requires.io/github/nedbat/coveragepy/requirements/?branch=master
+ :alt: Requirements status
+ .. |kit| image:: https://badge.fury.io/py/coverage.svg
+ :target: https://pypi.org/project/coverage/
+ :alt: PyPI status
+ .. |format| image:: https://img.shields.io/pypi/format/coverage.svg
+ :target: https://pypi.org/project/coverage/
+ :alt: Kit format
+ .. |downloads| image:: https://img.shields.io/pypi/dw/coverage.svg
+ :target: https://pypi.org/project/coverage/
+ :alt: Weekly PyPI downloads
+ .. |versions| image:: https://img.shields.io/pypi/pyversions/coverage.svg?logo=python&logoColor=FBE072
+ :target: https://pypi.org/project/coverage/
+ :alt: Python versions supported
+ .. |status| image:: https://img.shields.io/pypi/status/coverage.svg
+ :target: https://pypi.org/project/coverage/
+ :alt: Package stability
+ .. |license| image:: https://img.shields.io/pypi/l/coverage.svg
+ :target: https://pypi.org/project/coverage/
+ :alt: License
+ .. |codecov| image:: https://codecov.io/github/nedbat/coveragepy/coverage.svg?branch=master&precision=2
+ :target: https://codecov.io/github/nedbat/coveragepy?branch=master
+ :alt: Coverage!
+ .. |repos| image:: https://repology.org/badge/tiny-repos/python:coverage.svg
+ :target: https://repology.org/metapackage/python:coverage/versions
+ :alt: Packaging status
+ .. |tidelift| image:: https://tidelift.com/badges/package/pypi/coverage
+ :target: https://tidelift.com/subscription/pkg/pypi-coverage?utm_source=pypi-coverage&utm_medium=referral&utm_campaign=readme
+ :alt: Tidelift
+ .. |stars| image:: https://img.shields.io/github/stars/nedbat/coveragepy.svg?logo=github
+ :target: https://github.com/nedbat/coveragepy/stargazers
+ :alt: Github stars
+ .. |forks| image:: https://img.shields.io/github/forks/nedbat/coveragepy.svg?logo=github
+ :target: https://github.com/nedbat/coveragepy/network/members
+ :alt: Github forks
+ .. |contributors| image:: https://img.shields.io/github/contributors/nedbat/coveragepy.svg?logo=github
+ :target: https://github.com/nedbat/coveragepy/graphs/contributors
+ :alt: Contributors
+ .. |twitter-coveragepy| image:: https://img.shields.io/twitter/follow/coveragepy.svg?label=coveragepy&style=flat&logo=twitter&logoColor=4FADFF
+ :target: https://twitter.com/coveragepy
+ :alt: coverage.py on Twitter
+ .. |twitter-nedbat| image:: https://img.shields.io/twitter/follow/nedbat.svg?label=nedbat&style=flat&logo=twitter&logoColor=4FADFF
+ :target: https://twitter.com/nedbat
+ :alt: nedbat on Twitter
+
+Keywords: code coverage testing
+Platform: UNKNOWN
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Quality Assurance
+Classifier: Topic :: Software Development :: Testing
+Classifier: Development Status :: 5 - Production/Stable
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4
+Description-Content-Type: text/x-rst
+Provides-Extra: toml
diff --git a/third_party/python/coverage/README.rst b/third_party/python/coverage/README.rst
new file mode 100644
index 0000000000..4534bc92a6
--- /dev/null
+++ b/third_party/python/coverage/README.rst
@@ -0,0 +1,152 @@
+.. Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+.. For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+===========
+Coverage.py
+===========
+
+Code coverage testing for Python.
+
+| |license| |versions| |status|
+| |ci-status| |win-ci-status| |docs| |codecov|
+| |kit| |format| |repos|
+| |stars| |forks| |contributors|
+| |tidelift| |twitter-coveragepy| |twitter-nedbat|
+
+Coverage.py measures code coverage, typically during test execution. It uses
+the code analysis tools and tracing hooks provided in the Python standard
+library to determine which lines are executable, and which have been executed.
+
+Coverage.py runs on many versions of Python:
+
+* CPython 2.7.
+* CPython 3.5 through 3.9 alpha 4.
+* PyPy2 7.3.0 and PyPy3 7.3.0.
+
+Documentation is on `Read the Docs`_. Code repository and issue tracker are on
+`GitHub`_.
+
+.. _Read the Docs: https://coverage.readthedocs.io/
+.. _GitHub: https://github.com/nedbat/coveragepy
+
+
+**New in 5.0:** SQLite data storage, JSON report, contexts, relative filenames,
+dropped support for Python 2.6, 3.3 and 3.4.
+
+
+For Enterprise
+--------------
+
+.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logo_small.png
+ :width: 75
+ :alt: Tidelift
+ :target: https://tidelift.com/subscription/pkg/pypi-coverage?utm_source=pypi-coverage&utm_medium=referral&utm_campaign=readme
+
+.. list-table::
+ :widths: 10 100
+
+ * - |tideliftlogo|
+ - `Available as part of the Tidelift Subscription. <https://tidelift.com/subscription/pkg/pypi-coverage?utm_source=pypi-coverage&utm_medium=referral&utm_campaign=readme>`_
+ Coverage and thousands of other packages are working with
+ Tidelift to deliver one enterprise subscription that covers all of the open
+ source you use. If you want the flexibility of open source and the confidence
+ of commercial-grade software, this is for you.
+ `Learn more. <https://tidelift.com/subscription/pkg/pypi-coverage?utm_source=pypi-coverage&utm_medium=referral&utm_campaign=readme>`_
+
+
+Getting Started
+---------------
+
+See the `Quick Start section`_ of the docs.
+
+.. _Quick Start section: https://coverage.readthedocs.io/#quick-start
+
+
+Change history
+--------------
+
+The complete history of changes is on the `change history page`_.
+
+.. _change history page: https://coverage.readthedocs.io/en/latest/changes.html
+
+
+Contributing
+------------
+
+See the `Contributing section`_ of the docs.
+
+.. _Contributing section: https://coverage.readthedocs.io/en/latest/contributing.html
+
+
+Security
+--------
+
+To report a security vulnerability, please use the `Tidelift security
+contact`_. Tidelift will coordinate the fix and disclosure.
+
+.. _Tidelift security contact: https://tidelift.com/security
+
+
+License
+-------
+
+Licensed under the `Apache 2.0 License`_. For details, see `NOTICE.txt`_.
+
+.. _Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0
+.. _NOTICE.txt: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+
+.. |ci-status| image:: https://travis-ci.com/nedbat/coveragepy.svg?branch=master
+ :target: https://travis-ci.com/nedbat/coveragepy
+ :alt: Build status
+.. |win-ci-status| image:: https://ci.appveyor.com/api/projects/status/kmeqpdje7h9r6vsf/branch/master?svg=true
+ :target: https://ci.appveyor.com/project/nedbat/coveragepy
+ :alt: Windows build status
+.. |docs| image:: https://readthedocs.org/projects/coverage/badge/?version=latest&style=flat
+ :target: https://coverage.readthedocs.io/
+ :alt: Documentation
+.. |reqs| image:: https://requires.io/github/nedbat/coveragepy/requirements.svg?branch=master
+ :target: https://requires.io/github/nedbat/coveragepy/requirements/?branch=master
+ :alt: Requirements status
+.. |kit| image:: https://badge.fury.io/py/coverage.svg
+ :target: https://pypi.org/project/coverage/
+ :alt: PyPI status
+.. |format| image:: https://img.shields.io/pypi/format/coverage.svg
+ :target: https://pypi.org/project/coverage/
+ :alt: Kit format
+.. |downloads| image:: https://img.shields.io/pypi/dw/coverage.svg
+ :target: https://pypi.org/project/coverage/
+ :alt: Weekly PyPI downloads
+.. |versions| image:: https://img.shields.io/pypi/pyversions/coverage.svg?logo=python&logoColor=FBE072
+ :target: https://pypi.org/project/coverage/
+ :alt: Python versions supported
+.. |status| image:: https://img.shields.io/pypi/status/coverage.svg
+ :target: https://pypi.org/project/coverage/
+ :alt: Package stability
+.. |license| image:: https://img.shields.io/pypi/l/coverage.svg
+ :target: https://pypi.org/project/coverage/
+ :alt: License
+.. |codecov| image:: https://codecov.io/github/nedbat/coveragepy/coverage.svg?branch=master&precision=2
+ :target: https://codecov.io/github/nedbat/coveragepy?branch=master
+ :alt: Coverage!
+.. |repos| image:: https://repology.org/badge/tiny-repos/python:coverage.svg
+ :target: https://repology.org/metapackage/python:coverage/versions
+ :alt: Packaging status
+.. |tidelift| image:: https://tidelift.com/badges/package/pypi/coverage
+ :target: https://tidelift.com/subscription/pkg/pypi-coverage?utm_source=pypi-coverage&utm_medium=referral&utm_campaign=readme
+ :alt: Tidelift
+.. |stars| image:: https://img.shields.io/github/stars/nedbat/coveragepy.svg?logo=github
+ :target: https://github.com/nedbat/coveragepy/stargazers
+ :alt: Github stars
+.. |forks| image:: https://img.shields.io/github/forks/nedbat/coveragepy.svg?logo=github
+ :target: https://github.com/nedbat/coveragepy/network/members
+ :alt: Github forks
+.. |contributors| image:: https://img.shields.io/github/contributors/nedbat/coveragepy.svg?logo=github
+ :target: https://github.com/nedbat/coveragepy/graphs/contributors
+ :alt: Contributors
+.. |twitter-coveragepy| image:: https://img.shields.io/twitter/follow/coveragepy.svg?label=coveragepy&style=flat&logo=twitter&logoColor=4FADFF
+ :target: https://twitter.com/coveragepy
+ :alt: coverage.py on Twitter
+.. |twitter-nedbat| image:: https://img.shields.io/twitter/follow/nedbat.svg?label=nedbat&style=flat&logo=twitter&logoColor=4FADFF
+ :target: https://twitter.com/nedbat
+ :alt: nedbat on Twitter
diff --git a/third_party/python/coverage/__main__.py b/third_party/python/coverage/__main__.py
new file mode 100644
index 0000000000..28ad7d2da4
--- /dev/null
+++ b/third_party/python/coverage/__main__.py
@@ -0,0 +1,12 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Be able to execute coverage.py by pointing Python at a working tree."""
+
+import runpy
+import os
+
+PKG = 'coverage'
+
+run_globals = runpy.run_module(PKG, run_name='__main__', alter_sys=True)
+executed = os.path.splitext(os.path.basename(run_globals['__file__']))[0]
diff --git a/third_party/python/coverage/appveyor.yml b/third_party/python/coverage/appveyor.yml
new file mode 100644
index 0000000000..76e21dad55
--- /dev/null
+++ b/third_party/python/coverage/appveyor.yml
@@ -0,0 +1,169 @@
+# Appveyor, continuous integration for Windows
+# https://ci.appveyor.com/project/nedbat/coveragepy
+
+version: '{branch}-{build}'
+
+shallow_clone: true
+
+cache:
+ - '%LOCALAPPDATA%\pip\Cache'
+
+environment:
+
+ CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\ci\\run_with_env.cmd"
+
+ PYTEST_ADDOPTS: "-n auto"
+
+ # Note: There is logic to install Python version $PYTHON_VERSION if the
+ # $PYTHON directory doesn't exist. $PYTHON_VERSION is visible in the job
+ # descriptions, but can be wrong in the minor version, since we use the
+ # version pre-installed on AppVeyor.
+ #
+ matrix:
+ - JOB: "2.7 64-bit"
+ TOXENV: "py27"
+ PYTHON: "C:\\Python27-x64"
+ PYTHON_VERSION: "2.7.17"
+ PYTHON_ARCH: "64"
+
+ - JOB: "3.5 64-bit"
+ TOXENV: "py35"
+ PYTHON: "C:\\Python35-x64"
+ PYTHON_VERSION: "3.5.9"
+ PYTHON_ARCH: "64"
+
+ - JOB: "3.6 64-bit"
+ TOXENV: "py36"
+ PYTHON: "C:\\Python36-x64"
+ PYTHON_VERSION: "3.6.9"
+ PYTHON_ARCH: "64"
+
+ - JOB: "3.7 64-bit"
+ TOXENV: "py37"
+ PYTHON: "C:\\Python37-x64"
+ PYTHON_VERSION: "3.7.5"
+ PYTHON_ARCH: "64"
+
+ - JOB: "3.8 64-bit"
+ TOXENV: "py38"
+ PYTHON: "C:\\Python38-x64"
+ PYTHON_VERSION: "3.8.0"
+ PYTHON_ARCH: "64"
+
+ - JOB: "3.9 64-bit"
+ TOXENV: "py39"
+ PYTHON: "C:\\Python39-x64"
+ PYTHON_VERSION: "3.9.0a3"
+ PYTHON_ARCH: "64"
+
+ # 32-bit jobs don't run the tests under the Python tracer, since that should
+ # be exactly the same as 64-bit.
+ - JOB: "2.7 32-bit"
+ TOXENV: "py27"
+ PYTHON: "C:\\Python27"
+ PYTHON_VERSION: "2.7.17"
+ PYTHON_ARCH: "32"
+ COVERAGE_NO_PYTRACER: "1"
+
+ - JOB: "3.5 32-bit"
+ TOXENV: "py35"
+ PYTHON: "C:\\Python35"
+ PYTHON_VERSION: "3.5.9"
+ PYTHON_ARCH: "32"
+ COVERAGE_NO_PYTRACER: "1"
+
+ - JOB: "3.6 32-bit"
+ TOXENV: "py36"
+ PYTHON: "C:\\Python36"
+ PYTHON_VERSION: "3.6.9"
+ PYTHON_ARCH: "32"
+ COVERAGE_NO_PYTRACER: "1"
+
+ - JOB: "3.7 32-bit"
+ TOXENV: "py37"
+ PYTHON: "C:\\Python37"
+ PYTHON_VERSION: "3.7.5"
+ PYTHON_ARCH: "32"
+ COVERAGE_NO_PYTRACER: "1"
+
+ - JOB: "3.8 32-bit"
+ TOXENV: "py38"
+ PYTHON: "C:\\Python38"
+ PYTHON_VERSION: "3.8.0"
+ PYTHON_ARCH: "32"
+ COVERAGE_NO_PYTRACER: "1"
+
+ - JOB: "3.9 32-bit"
+ TOXENV: "py39"
+ PYTHON: "C:\\Python39"
+ PYTHON_VERSION: "3.9.0a3"
+ PYTHON_ARCH: "32"
+ COVERAGE_NO_PYTRACER: "1"
+
+ # Meta coverage
+ - JOB: "Meta 2.7"
+ TOXENV: "py27"
+ PYTHON: "C:\\Python27"
+ PYTHON_VERSION: "2.7.17"
+ PYTHON_ARCH: "32"
+ COVERAGE_COVERAGE: "yes"
+
+ - JOB: "Meta 3.6"
+ TOXENV: "py36"
+ PYTHON: "C:\\Python36"
+ PYTHON_VERSION: "3.6.9"
+ PYTHON_ARCH: "32"
+ COVERAGE_COVERAGE: "yes"
+
+init:
+ - "ECHO %TOXENV%"
+
+install:
+ # Install Python (from the official .msi of http://python.org) and pip when
+ # not already installed.
+ - ps: if (-not(Test-Path($env:PYTHON))) { & ci\install.ps1 }
+
+ # Prepend newly installed Python to the PATH of this build (this cannot be
+ # done from inside the powershell script as it would require to restart
+ # the parent CMD process).
+ - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
+
+ # Check that we have the expected version and architecture for Python
+ - "python -c \"import struct, sys; print('{}\\n{}-bit'.format(sys.version, struct.calcsize('P') * 8))\""
+
+ # Upgrade to the latest version of pip to avoid it displaying warnings
+ # about it being out of date.
+ - "python -m pip install --disable-pip-version-check --upgrade pip"
+ # And upgrade virtualenv to get the latest pip inside .tox virtualenvs.
+ - "python -m pip install --disable-pip-version-check --upgrade virtualenv"
+
+ # Install requirements.
+ - "%CMD_IN_ENV% pip install -r requirements/ci.pip"
+
+ # Make a pythonX.Y.bat file in the current directory so that tox will find it
+ # and pythonX.Y will mean what we want it to.
+ - "python -c \"import os; open('python{}.{}.bat'.format(*os.environ['TOXENV'][2:]), 'w').write('@{}\\\\python \\x25*\\n'.format(os.environ['PYTHON']))\""
+
+build_script:
+ # If not a metacov job, then build wheel installers.
+ - if NOT "%COVERAGE_COVERAGE%" == "yes" %CMD_IN_ENV% %PYTHON%\python setup.py bdist_wheel
+
+ # Push everything in dist\ as an artifact.
+ - ps: if ( Test-Path 'dist' -PathType Container ) { Get-ChildItem dist\*.* | % { Push-AppveyorArtifact $_.FullName -FileName ('dist\' + $_.Name) } }
+
+test_script:
+ - "%CMD_IN_ENV% %PYTHON%\\Scripts\\tox"
+
+after_test:
+ - if "%COVERAGE_COVERAGE%" == "yes" 7z a metacov-win-%TOXENV%.zip %APPVEYOR_BUILD_FOLDER%\.metacov*
+ - if "%COVERAGE_COVERAGE%" == "yes" %CMD_IN_ENV% %PYTHON%\python igor.py combine_html
+ - if "%COVERAGE_COVERAGE%" == "yes" %CMD_IN_ENV% pip install codecov
+ - if "%COVERAGE_COVERAGE%" == "yes" %CMD_IN_ENV% codecov -X gcov --file coverage.xml
+
+# Uncomment this to enable RDP access to the build when done.
+# https://www.appveyor.com/docs/how-to/rdp-to-build-worker/
+# on_finish:
+# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
+
+artifacts:
+ - path: "metacov-*.zip"
diff --git a/third_party/python/coverage/ci/README.txt b/third_party/python/coverage/ci/README.txt
new file mode 100644
index 0000000000..a34d036bb0
--- /dev/null
+++ b/third_party/python/coverage/ci/README.txt
@@ -0,0 +1 @@
+Files to support continuous integration systems.
diff --git a/third_party/python/coverage/ci/download_appveyor.py b/third_party/python/coverage/ci/download_appveyor.py
new file mode 100644
index 0000000000..a3d814962d
--- /dev/null
+++ b/third_party/python/coverage/ci/download_appveyor.py
@@ -0,0 +1,95 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Use the Appveyor API to download Windows artifacts."""
+
+import os
+import os.path
+import sys
+import zipfile
+
+import requests
+
+
+def make_auth_headers():
+ """Make the authentication headers needed to use the Appveyor API."""
+ with open("ci/appveyor.token") as f:
+ token = f.read().strip()
+
+ headers = {
+ 'Authorization': 'Bearer {}'.format(token),
+ }
+ return headers
+
+
+def make_url(url, **kwargs):
+ """Build an Appveyor API url."""
+ return "https://ci.appveyor.com/api" + url.format(**kwargs)
+
+
+def get_project_build(account_project):
+ """Get the details of the latest Appveyor build."""
+ url = make_url("/projects/{account_project}", account_project=account_project)
+ response = requests.get(url, headers=make_auth_headers())
+ return response.json()
+
+
+def download_latest_artifacts(account_project):
+ """Download all the artifacts from the latest build."""
+ build = get_project_build(account_project)
+ jobs = build['build']['jobs']
+ print("Build {0[build][version]}, {1} jobs: {0[build][message]}".format(build, len(jobs)))
+ for job in jobs:
+ name = job['name'].partition(':')[2].split(',')[0].strip()
+ print(" {0}: {1[status]}, {1[artifactsCount]} artifacts".format(name, job))
+
+ url = make_url("/buildjobs/{jobid}/artifacts", jobid=job['jobId'])
+ response = requests.get(url, headers=make_auth_headers())
+ artifacts = response.json()
+
+ for artifact in artifacts:
+ is_zip = artifact['type'] == "Zip"
+ filename = artifact['fileName']
+ print(" {}, {} bytes".format(filename, artifact['size']))
+
+ url = make_url(
+ "/buildjobs/{jobid}/artifacts/{filename}",
+ jobid=job['jobId'],
+ filename=filename
+ )
+ download_url(url, filename, make_auth_headers())
+
+ if is_zip:
+ unpack_zipfile(filename)
+ os.remove(filename)
+
+
+def ensure_dirs(filename):
+ """Make sure the directories exist for `filename`."""
+ dirname, _ = os.path.split(filename)
+ if dirname and not os.path.exists(dirname):
+ os.makedirs(dirname)
+
+
+def download_url(url, filename, headers):
+ """Download a file from `url` to `filename`."""
+ ensure_dirs(filename)
+ response = requests.get(url, headers=headers, stream=True)
+ if response.status_code == 200:
+ with open(filename, 'wb') as f:
+ for chunk in response.iter_content(16*1024):
+ f.write(chunk)
+
+
+def unpack_zipfile(filename):
+ """Unpack a zipfile, using the names in the zip."""
+ with open(filename, 'rb') as fzip:
+ z = zipfile.ZipFile(fzip)
+ for name in z.namelist():
+ print(" extracting {}".format(name))
+ ensure_dirs(name)
+ z.extract(name)
+
+
+if __name__ == "__main__":
+ download_latest_artifacts(sys.argv[1])
diff --git a/third_party/python/coverage/ci/install.ps1 b/third_party/python/coverage/ci/install.ps1
new file mode 100644
index 0000000000..fd5ab22021
--- /dev/null
+++ b/third_party/python/coverage/ci/install.ps1
@@ -0,0 +1,203 @@
+# From: https://github.com/ogrisel/python-appveyor-demo/blob/master/appveyor/install.ps1
+#
+#
+# Sample script to install Python and pip under Windows
+# Authors: Olivier Grisel, Jonathan Helmus, Kyle Kastner, and Alex Willmer
+# License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/
+
+$MINICONDA_URL = "http://repo.continuum.io/miniconda/"
+$BASE_URL = "https://www.python.org/ftp/python/"
+$GET_PIP_URL = "https://bootstrap.pypa.io/get-pip.py"
+$GET_PIP_PATH = "C:\get-pip.py"
+
+$PYTHON_PRERELEASE_REGEX = @"
+(?x)
+(?<major>\d+)
+\.
+(?<minor>\d+)
+\.
+(?<micro>\d+)
+(?<prerelease>[a-z]{1,2}\d+)
+"@
+
+
+function Download ($filename, $url) {
+ $webclient = New-Object System.Net.WebClient
+
+ $basedir = $pwd.Path + "\"
+ $filepath = $basedir + $filename
+ if (Test-Path $filename) {
+ Write-Host "Reusing" $filepath
+ return $filepath
+ }
+
+ # Download and retry up to 3 times in case of network transient errors.
+ Write-Host "Downloading" $filename "from" $url
+ $retry_attempts = 2
+ for ($i = 0; $i -lt $retry_attempts; $i++) {
+ try {
+ $webclient.DownloadFile($url, $filepath)
+ break
+ }
+ Catch [Exception]{
+ Start-Sleep 1
+ }
+ }
+ if (Test-Path $filepath) {
+ Write-Host "File saved at" $filepath
+ } else {
+ # Retry once to get the error message if any at the last try
+ $webclient.DownloadFile($url, $filepath)
+ }
+ return $filepath
+}
+
+
+function ParsePythonVersion ($python_version) {
+ if ($python_version -match $PYTHON_PRERELEASE_REGEX) {
+ return ([int]$matches.major, [int]$matches.minor, [int]$matches.micro,
+ $matches.prerelease)
+ }
+ $version_obj = [version]$python_version
+ return ($version_obj.major, $version_obj.minor, $version_obj.build, "")
+}
+
+
+function DownloadPython ($python_version, $platform_suffix) {
+ $major, $minor, $micro, $prerelease = ParsePythonVersion $python_version
+
+ $dir = "$major.$minor.$micro"
+ $ext = "exe"
+ if ($platform_suffix) {
+ $platform_suffix = "-$platform_suffix"
+ }
+
+ $filename = "python-$python_version$platform_suffix.$ext"
+ $url = "$BASE_URL$dir/$filename"
+ $filepath = Download $filename $url
+ return $filepath
+}
+
+
+function InstallPython ($python_version, $architecture, $python_home) {
+ Write-Host "Installing Python" $python_version "for" $architecture "bit architecture to" $python_home
+ if (Test-Path $python_home) {
+ Write-Host $python_home "already exists, skipping."
+ return $false
+ }
+ if ($architecture -eq "32") {
+ $platform_suffix = ""
+ } else {
+ $platform_suffix = "amd64"
+ }
+ $installer_path = DownloadPython $python_version $platform_suffix
+ $installer_ext = [System.IO.Path]::GetExtension($installer_path)
+ Write-Host "Installing $installer_path to $python_home"
+ $install_log = $python_home + ".log"
+ if ($installer_ext -eq '.msi') {
+ InstallPythonMSI $installer_path $python_home $install_log
+ } else {
+ InstallPythonEXE $installer_path $python_home $install_log
+ }
+ if (Test-Path $python_home) {
+ Write-Host "Python $python_version ($architecture) installation complete"
+ } else {
+ Write-Host "Failed to install Python in $python_home"
+ Get-Content -Path $install_log
+ Exit 1
+ }
+}
+
+
+function InstallPythonEXE ($exepath, $python_home, $install_log) {
+ $install_args = "/quiet InstallAllUsers=1 TargetDir=$python_home"
+ RunCommand $exepath $install_args
+}
+
+
+function InstallPythonMSI ($msipath, $python_home, $install_log) {
+ $install_args = "/qn /log $install_log /i $msipath TARGETDIR=$python_home"
+ $uninstall_args = "/qn /x $msipath"
+ RunCommand "msiexec.exe" $install_args
+ if (-not(Test-Path $python_home)) {
+ Write-Host "Python seems to be installed else-where, reinstalling."
+ RunCommand "msiexec.exe" $uninstall_args
+ RunCommand "msiexec.exe" $install_args
+ }
+}
+
+function RunCommand ($command, $command_args) {
+ Write-Host $command $command_args
+ Start-Process -FilePath $command -ArgumentList $command_args -Wait -Passthru
+}
+
+
+function InstallPip ($python_home) {
+ $pip_path = $python_home + "\Scripts\pip.exe"
+ $python_path = $python_home + "\python.exe"
+ if (-not(Test-Path $pip_path)) {
+ Write-Host "Installing pip..."
+ $webclient = New-Object System.Net.WebClient
+ $webclient.DownloadFile($GET_PIP_URL, $GET_PIP_PATH)
+ Write-Host "Executing:" $python_path $GET_PIP_PATH
+ & $python_path $GET_PIP_PATH
+ } else {
+ Write-Host "pip already installed."
+ }
+}
+
+
+function DownloadMiniconda ($python_version, $platform_suffix) {
+ $filename = "Miniconda-3.5.5-Windows-" + $platform_suffix + ".exe"
+ $url = $MINICONDA_URL + $filename
+ $filepath = Download $filename $url
+ return $filepath
+}
+
+
+function InstallMiniconda ($python_version, $architecture, $python_home) {
+ Write-Host "Installing Python" $python_version "for" $architecture "bit architecture to" $python_home
+ if (Test-Path $python_home) {
+ Write-Host $python_home "already exists, skipping."
+ return $false
+ }
+ if ($architecture -eq "32") {
+ $platform_suffix = "x86"
+ } else {
+ $platform_suffix = "x86_64"
+ }
+ $filepath = DownloadMiniconda $python_version $platform_suffix
+ Write-Host "Installing" $filepath "to" $python_home
+ $install_log = $python_home + ".log"
+ $args = "/S /D=$python_home"
+ Write-Host $filepath $args
+ Start-Process -FilePath $filepath -ArgumentList $args -Wait -Passthru
+ if (Test-Path $python_home) {
+ Write-Host "Python $python_version ($architecture) installation complete"
+ } else {
+ Write-Host "Failed to install Python in $python_home"
+ Get-Content -Path $install_log
+ Exit 1
+ }
+}
+
+
+function InstallMinicondaPip ($python_home) {
+ $pip_path = $python_home + "\Scripts\pip.exe"
+ $conda_path = $python_home + "\Scripts\conda.exe"
+ if (-not(Test-Path $pip_path)) {
+ Write-Host "Installing pip..."
+ $args = "install --yes pip"
+ Write-Host $conda_path $args
+ Start-Process -FilePath "$conda_path" -ArgumentList $args -Wait -Passthru
+ } else {
+ Write-Host "pip already installed."
+ }
+}
+
+function main () {
+ InstallPython $env:PYTHON_VERSION $env:PYTHON_ARCH $env:PYTHON
+ InstallPip $env:PYTHON
+}
+
+main
diff --git a/third_party/python/coverage/ci/manylinux.sh b/third_party/python/coverage/ci/manylinux.sh
new file mode 100755
index 0000000000..1fafec9ded
--- /dev/null
+++ b/third_party/python/coverage/ci/manylinux.sh
@@ -0,0 +1,60 @@
+#!/bin/bash
+# From: https://github.com/pypa/python-manylinux-demo/blob/master/travis/build-wheels.sh
+# which is in the public domain.
+#
+# This is run inside a CentOS 5 virtual machine to build manylinux wheels:
+#
+# $ docker run -v `pwd`:/io quay.io/pypa/manylinux1_x86_64 /io/ci/build_manylinux.sh
+#
+
+set -e -x
+
+action=$1
+shift
+
+if [[ $action == "build" ]]; then
+ # Compile wheels
+ cd /io
+ for PYBIN in /opt/python/*/bin; do
+ if [[ $PYBIN == *cp34* ]]; then
+ # manylinux docker images have Python 3.4, but we don't use it.
+ continue
+ fi
+ "$PYBIN/pip" install -r requirements/wheel.pip
+ "$PYBIN/python" setup.py clean -a
+ "$PYBIN/python" setup.py bdist_wheel -d ~/wheelhouse/
+ done
+ cd ~
+
+ # Bundle external shared libraries into the wheels
+ for whl in wheelhouse/*.whl; do
+ auditwheel repair "$whl" -w /io/dist/
+ done
+
+elif [[ $action == "test" ]]; then
+ # Create "pythonX.Y" links
+ for PYBIN in /opt/python/*/bin/; do
+ if [[ $PYBIN == *cp34* ]]; then
+ # manylinux docker images have Python 3.4, but we don't use it.
+ continue
+ fi
+ PYNAME=$("$PYBIN/python" -c "import sys; print('python{0[0]}.{0[1]}'.format(sys.version_info))")
+ ln -sf "$PYBIN/$PYNAME" /usr/local/bin/$PYNAME
+ done
+
+ # Install packages and test
+ TOXBIN=/opt/python/cp36-cp36m/bin
+ "$TOXBIN/pip" install -r /io/requirements/tox.pip
+
+ cd /io
+ export PYTHONPYCACHEPREFIX=/opt/pyc
+ if [[ $1 == "meta" ]]; then
+ shift
+ export COVERAGE_COVERAGE=yes
+ fi
+ TOXWORKDIR=.tox/linux "$TOXBIN/tox" "$@" || true
+ cd ~
+
+else
+ echo "Need an action to perform!"
+fi
diff --git a/third_party/python/coverage/ci/run_with_env.cmd b/third_party/python/coverage/ci/run_with_env.cmd
new file mode 100644
index 0000000000..66b9252efc
--- /dev/null
+++ b/third_party/python/coverage/ci/run_with_env.cmd
@@ -0,0 +1,91 @@
+:: From: https://github.com/ogrisel/python-appveyor-demo/blob/master/appveyor/run_with_env.cmd
+::
+::
+:: To build extensions for 64 bit Python 3, we need to configure environment
+:: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of:
+:: MS Windows SDK for Windows 7 and .NET Framework 4 (SDK v7.1)
+::
+:: To build extensions for 64 bit Python 2, we need to configure environment
+:: variables to use the MSVC 2008 C++ compilers from GRMSDKX_EN_DVD.iso of:
+:: MS Windows SDK for Windows 7 and .NET Framework 3.5 (SDK v7.0)
+::
+:: 32 bit builds, and 64-bit builds for 3.5 and beyond, do not require specific
+:: environment configurations.
+::
+:: Note: this script needs to be run with the /E:ON and /V:ON flags for the
+:: cmd interpreter, at least for (SDK v7.0)
+::
+:: More details at:
+:: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows
+:: http://stackoverflow.com/a/13751649/163740
+::
+:: Author: Olivier Grisel
+:: License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/
+::
+:: Notes about batch files for Python people:
+::
+:: Quotes in values are literally part of the values:
+:: SET FOO="bar"
+:: FOO is now five characters long: " b a r "
+:: If you don't want quotes, don't include them on the right-hand side.
+::
+:: The CALL lines at the end of this file look redundant, but if you move them
+:: outside of the IF clauses, they do not run properly in the SET_SDK_64==Y
+:: case, I don't know why.
+@ECHO OFF
+
+SET COMMAND_TO_RUN=%*
+SET WIN_SDK_ROOT=C:\Program Files\Microsoft SDKs\Windows
+SET WIN_WDK=c:\Program Files (x86)\Windows Kits\10\Include\wdf
+
+:: Extract the major and minor versions, and allow for the minor version to be
+:: more than 9. This requires the version number to have two dots in it.
+SET MAJOR_PYTHON_VERSION=%PYTHON_VERSION:~0,1%
+IF "%PYTHON_VERSION:~3,1%" == "." (
+ SET MINOR_PYTHON_VERSION=%PYTHON_VERSION:~2,1%
+) ELSE (
+ SET MINOR_PYTHON_VERSION=%PYTHON_VERSION:~2,2%
+)
+
+:: Based on the Python version, determine what SDK version to use, and whether
+:: to set the SDK for 64-bit.
+IF %MAJOR_PYTHON_VERSION% == 2 (
+ SET WINDOWS_SDK_VERSION="v7.0"
+ SET SET_SDK_64=Y
+) ELSE (
+ IF %MAJOR_PYTHON_VERSION% == 3 (
+ SET WINDOWS_SDK_VERSION="v7.1"
+ IF %MINOR_PYTHON_VERSION% LEQ 4 (
+ SET SET_SDK_64=Y
+ ) ELSE (
+ SET SET_SDK_64=N
+ IF EXIST "%WIN_WDK%" (
+ :: See: https://connect.microsoft.com/VisualStudio/feedback/details/1610302/
+ REN "%WIN_WDK%" 0wdf
+ )
+ )
+ ) ELSE (
+ ECHO Unsupported Python version: "%MAJOR_PYTHON_VERSION%"
+ EXIT 1
+ )
+)
+
+IF %PYTHON_ARCH% == 64 (
+ IF %SET_SDK_64% == Y (
+ ECHO Configuring Windows SDK %WINDOWS_SDK_VERSION% for Python %MAJOR_PYTHON_VERSION% on a 64 bit architecture
+ SET DISTUTILS_USE_SDK=1
+ SET MSSdk=1
+ "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Setup\WindowsSdkVer.exe" -q -version:%WINDOWS_SDK_VERSION%
+ "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Bin\SetEnv.cmd" /x64 /release
+ ECHO Executing: %COMMAND_TO_RUN%
+ call %COMMAND_TO_RUN% || EXIT 1
+ ) ELSE (
+ ECHO Using default MSVC build environment for 64 bit architecture
+ ECHO Executing: %COMMAND_TO_RUN%
+ call %COMMAND_TO_RUN% || EXIT 1
+ )
+) ELSE (
+ ECHO Using default MSVC build environment for 32 bit architecture
+ ECHO Executing: %COMMAND_TO_RUN%
+ call %COMMAND_TO_RUN% || EXIT 1
+)
diff --git a/third_party/python/coverage/ci/upload_relnotes.py b/third_party/python/coverage/ci/upload_relnotes.py
new file mode 100644
index 0000000000..630f4d0a3f
--- /dev/null
+++ b/third_party/python/coverage/ci/upload_relnotes.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python3
+"""
+Upload CHANGES.md to Tidelift as Markdown chunks
+
+Put your Tidelift API token in a file called tidelift.token alongside this
+program, for example:
+
+ user/n3IwOpxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxc2ZwE4
+
+Run with two arguments: the .md file to parse, and the Tidelift package name:
+
+ python upload_relnotes.py CHANGES.md pypi/coverage
+
+Every section that has something that looks like a version number in it will
+be uploaded as the release notes for that version.
+
+"""
+
+import os.path
+import re
+import sys
+
+import requests
+
+class TextChunkBuffer:
+ """Hold onto text chunks until needed."""
+ def __init__(self):
+ self.buffer = []
+
+ def append(self, text):
+ """Add `text` to the buffer."""
+ self.buffer.append(text)
+
+ def clear(self):
+ """Clear the buffer."""
+ self.buffer = []
+
+ def flush(self):
+ """Produce a ("text", text) tuple if there's anything here."""
+ buffered = "".join(self.buffer).strip()
+ if buffered:
+ yield ("text", buffered)
+ self.clear()
+
+
+def parse_md(lines):
+ """Parse markdown lines, producing (type, text) chunks."""
+ buffer = TextChunkBuffer()
+
+ for line in lines:
+ header_match = re.search(r"^(#+) (.+)$", line)
+ is_header = bool(header_match)
+ if is_header:
+ yield from buffer.flush()
+ hashes, text = header_match.groups()
+ yield (f"h{len(hashes)}", text)
+ else:
+ buffer.append(line)
+
+ yield from buffer.flush()
+
+
+def sections(parsed_data):
+ """Convert a stream of parsed tokens into sections with text and notes.
+
+ Yields a stream of:
+ ('h-level', 'header text', 'text')
+
+ """
+ header = None
+ text = []
+ for ttype, ttext in parsed_data:
+ if ttype.startswith('h'):
+ if header:
+ yield (*header, "\n".join(text))
+ text = []
+ header = (ttype, ttext)
+ elif ttype == "text":
+ text.append(ttext)
+ else:
+ raise Exception(f"Don't know ttype {ttype!r}")
+ yield (*header, "\n".join(text))
+
+
+def relnotes(mdlines):
+ r"""Yield (version, text) pairs from markdown lines.
+
+ Each tuple is a separate version mentioned in the release notes.
+
+ A version is any section with \d\.\d in the header text.
+
+ """
+ for _, htext, text in sections(parse_md(mdlines)):
+ m_version = re.search(r"\d+\.\d[^ ]*", htext)
+ if m_version:
+ version = m_version.group()
+ yield version, text
+
+def update_release_note(package, version, text):
+ """Update the release notes for one version of a package."""
+ url = f"https://api.tidelift.com/external-api/lifting/{package}/release-notes/{version}"
+ token_file = os.path.join(os.path.dirname(__file__), "tidelift.token")
+ with open(token_file) as ftoken:
+ token = ftoken.read().strip()
+ headers = {
+ "Authorization": f"Bearer: {token}",
+ }
+ req_args = dict(url=url, data=text.encode('utf8'), headers=headers)
+ result = requests.post(**req_args)
+ if result.status_code == 409:
+ result = requests.put(**req_args)
+ print(f"{version}: {result.status_code}")
+
+def parse_and_upload(md_filename, package):
+ """Main function: parse markdown and upload to Tidelift."""
+ with open(md_filename) as f:
+ markdown = f.read()
+ for version, text in relnotes(markdown.splitlines(True)):
+ update_release_note(package, version, text)
+
+if __name__ == "__main__":
+ parse_and_upload(*sys.argv[1:]) # pylint: disable=no-value-for-parameter
diff --git a/third_party/python/coverage/coverage/__init__.py b/third_party/python/coverage/coverage/__init__.py
new file mode 100644
index 0000000000..331b304b68
--- /dev/null
+++ b/third_party/python/coverage/coverage/__init__.py
@@ -0,0 +1,36 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Code coverage measurement for Python.
+
+Ned Batchelder
+https://nedbatchelder.com/code/coverage
+
+"""
+
+import sys
+
+from coverage.version import __version__, __url__, version_info
+
+from coverage.control import Coverage, process_startup
+from coverage.data import CoverageData
+from coverage.misc import CoverageException
+from coverage.plugin import CoveragePlugin, FileTracer, FileReporter
+from coverage.pytracer import PyTracer
+
+# Backward compatibility.
+coverage = Coverage
+
+# On Windows, we encode and decode deep enough that something goes wrong and
+# the encodings.utf_8 module is loaded and then unloaded, I don't know why.
+# Adding a reference here prevents it from being unloaded. Yuk.
+import encodings.utf_8 # pylint: disable=wrong-import-position, wrong-import-order
+
+# Because of the "from coverage.control import fooey" lines at the top of the
+# file, there's an entry for coverage.coverage in sys.modules, mapped to None.
+# This makes some inspection tools (like pydoc) unable to find the class
+# coverage.coverage. So remove that entry.
+try:
+ del sys.modules['coverage.coverage']
+except KeyError:
+ pass
diff --git a/third_party/python/coverage/coverage/__main__.py b/third_party/python/coverage/coverage/__main__.py
new file mode 100644
index 0000000000..79aa4e2b35
--- /dev/null
+++ b/third_party/python/coverage/coverage/__main__.py
@@ -0,0 +1,8 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Coverage.py's main entry point."""
+
+import sys
+from coverage.cmdline import main
+sys.exit(main())
diff --git a/third_party/python/coverage/coverage/annotate.py b/third_party/python/coverage/coverage/annotate.py
new file mode 100644
index 0000000000..999ab6e557
--- /dev/null
+++ b/third_party/python/coverage/coverage/annotate.py
@@ -0,0 +1,108 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Source file annotation for coverage.py."""
+
+import io
+import os
+import re
+
+from coverage.files import flat_rootname
+from coverage.misc import ensure_dir, isolate_module
+from coverage.report import get_analysis_to_report
+
+os = isolate_module(os)
+
+
+class AnnotateReporter(object):
+ """Generate annotated source files showing line coverage.
+
+ This reporter creates annotated copies of the measured source files. Each
+ .py file is copied as a .py,cover file, with a left-hand margin annotating
+ each line::
+
+ > def h(x):
+ - if 0: #pragma: no cover
+ - pass
+ > if x == 1:
+ ! a = 1
+ > else:
+ > a = 2
+
+ > h(2)
+
+ Executed lines use '>', lines not executed use '!', lines excluded from
+ consideration use '-'.
+
+ """
+
+ def __init__(self, coverage):
+ self.coverage = coverage
+ self.config = self.coverage.config
+ self.directory = None
+
+ blank_re = re.compile(r"\s*(#|$)")
+ else_re = re.compile(r"\s*else\s*:\s*(#|$)")
+
+ def report(self, morfs, directory=None):
+ """Run the report.
+
+ See `coverage.report()` for arguments.
+
+ """
+ self.directory = directory
+ self.coverage.get_data()
+ for fr, analysis in get_analysis_to_report(self.coverage, morfs):
+ self.annotate_file(fr, analysis)
+
+ def annotate_file(self, fr, analysis):
+ """Annotate a single file.
+
+ `fr` is the FileReporter for the file to annotate.
+
+ """
+ statements = sorted(analysis.statements)
+ missing = sorted(analysis.missing)
+ excluded = sorted(analysis.excluded)
+
+ if self.directory:
+ ensure_dir(self.directory)
+ dest_file = os.path.join(self.directory, flat_rootname(fr.relative_filename()))
+ if dest_file.endswith("_py"):
+ dest_file = dest_file[:-3] + ".py"
+ dest_file += ",cover"
+ else:
+ dest_file = fr.filename + ",cover"
+
+ with io.open(dest_file, 'w', encoding='utf8') as dest:
+ i = 0
+ j = 0
+ covered = True
+ source = fr.source()
+ for lineno, line in enumerate(source.splitlines(True), start=1):
+ while i < len(statements) and statements[i] < lineno:
+ i += 1
+ while j < len(missing) and missing[j] < lineno:
+ j += 1
+ if i < len(statements) and statements[i] == lineno:
+ covered = j >= len(missing) or missing[j] > lineno
+ if self.blank_re.match(line):
+ dest.write(u' ')
+ elif self.else_re.match(line):
+ # Special logic for lines containing only 'else:'.
+ if i >= len(statements) and j >= len(missing):
+ dest.write(u'! ')
+ elif i >= len(statements) or j >= len(missing):
+ dest.write(u'> ')
+ elif statements[i] == missing[j]:
+ dest.write(u'! ')
+ else:
+ dest.write(u'> ')
+ elif lineno in excluded:
+ dest.write(u'- ')
+ elif covered:
+ dest.write(u'> ')
+ else:
+ dest.write(u'! ')
+
+ dest.write(line)
diff --git a/third_party/python/coverage/coverage/backunittest.py b/third_party/python/coverage/coverage/backunittest.py
new file mode 100644
index 0000000000..078f48ccac
--- /dev/null
+++ b/third_party/python/coverage/coverage/backunittest.py
@@ -0,0 +1,33 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Implementations of unittest features from the future."""
+
+import unittest
+
+
+def unittest_has(method):
+ """Does `unittest.TestCase` have `method` defined?"""
+ return hasattr(unittest.TestCase, method)
+
+
+class TestCase(unittest.TestCase):
+ """Just like unittest.TestCase, but with assert methods added.
+
+ Designed to be compatible with 3.1 unittest. Methods are only defined if
+ `unittest` doesn't have them.
+
+ """
+ # pylint: disable=arguments-differ, deprecated-method
+
+ if not unittest_has('assertCountEqual'):
+ def assertCountEqual(self, *args, **kwargs):
+ return self.assertItemsEqual(*args, **kwargs)
+
+ if not unittest_has('assertRaisesRegex'):
+ def assertRaisesRegex(self, *args, **kwargs):
+ return self.assertRaisesRegexp(*args, **kwargs)
+
+ if not unittest_has('assertRegex'):
+ def assertRegex(self, *args, **kwargs):
+ return self.assertRegexpMatches(*args, **kwargs)
diff --git a/third_party/python/coverage/coverage/backward.py b/third_party/python/coverage/coverage/backward.py
new file mode 100644
index 0000000000..37b4916761
--- /dev/null
+++ b/third_party/python/coverage/coverage/backward.py
@@ -0,0 +1,253 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Add things to old Pythons so I can pretend they are newer."""
+
+# This file's purpose is to provide modules to be imported from here.
+# pylint: disable=unused-import
+
+import os
+import sys
+
+from coverage import env
+
+
+# Pythons 2 and 3 differ on where to get StringIO.
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from io import StringIO
+
+# In py3, ConfigParser was renamed to the more-standard configparser.
+# But there's a py3 backport that installs "configparser" in py2, and I don't
+# want it because it has annoying deprecation warnings. So try the real py2
+# import first.
+try:
+ import ConfigParser as configparser
+except ImportError:
+ import configparser
+
+# What's a string called?
+try:
+ string_class = basestring
+except NameError:
+ string_class = str
+
+# What's a Unicode string called?
+try:
+ unicode_class = unicode
+except NameError:
+ unicode_class = str
+
+# range or xrange?
+try:
+ range = xrange # pylint: disable=redefined-builtin
+except NameError:
+ range = range
+
+try:
+ from itertools import zip_longest
+except ImportError:
+ from itertools import izip_longest as zip_longest
+
+# Where do we get the thread id from?
+try:
+ from thread import get_ident as get_thread_id
+except ImportError:
+ from threading import get_ident as get_thread_id
+
+try:
+ os.PathLike
+except AttributeError:
+ # This is Python 2 and 3
+ path_types = (bytes, string_class, unicode_class)
+else:
+ # 3.6+
+ path_types = (bytes, str, os.PathLike)
+
+# shlex.quote is new, but there's an undocumented implementation in "pipes",
+# who knew!?
+try:
+ from shlex import quote as shlex_quote
+except ImportError:
+ # Useful function, available under a different (undocumented) name
+ # in Python versions earlier than 3.3.
+ from pipes import quote as shlex_quote
+
+try:
+ import reprlib
+except ImportError:
+ import repr as reprlib
+
+# A function to iterate listlessly over a dict's items, and one to get the
+# items as a list.
+try:
+ {}.iteritems
+except AttributeError:
+ # Python 3
+ def iitems(d):
+ """Produce the items from dict `d`."""
+ return d.items()
+
+ def litems(d):
+ """Return a list of items from dict `d`."""
+ return list(d.items())
+else:
+ # Python 2
+ def iitems(d):
+ """Produce the items from dict `d`."""
+ return d.iteritems()
+
+ def litems(d):
+ """Return a list of items from dict `d`."""
+ return d.items()
+
+# Getting the `next` function from an iterator is different in 2 and 3.
+try:
+ iter([]).next
+except AttributeError:
+ def iternext(seq):
+ """Get the `next` function for iterating over `seq`."""
+ return iter(seq).__next__
+else:
+ def iternext(seq):
+ """Get the `next` function for iterating over `seq`."""
+ return iter(seq).next
+
+# Python 3.x is picky about bytes and strings, so provide methods to
+# get them right, and make them no-ops in 2.x
+if env.PY3:
+ def to_bytes(s):
+ """Convert string `s` to bytes."""
+ return s.encode('utf8')
+
+ def to_string(b):
+ """Convert bytes `b` to string."""
+ return b.decode('utf8')
+
+ def binary_bytes(byte_values):
+ """Produce a byte string with the ints from `byte_values`."""
+ return bytes(byte_values)
+
+ def byte_to_int(byte):
+ """Turn a byte indexed from a bytes object into an int."""
+ return byte
+
+ def bytes_to_ints(bytes_value):
+ """Turn a bytes object into a sequence of ints."""
+ # In Python 3, iterating bytes gives ints.
+ return bytes_value
+
+else:
+ def to_bytes(s):
+ """Convert string `s` to bytes (no-op in 2.x)."""
+ return s
+
+ def to_string(b):
+ """Convert bytes `b` to string."""
+ return b
+
+ def binary_bytes(byte_values):
+ """Produce a byte string with the ints from `byte_values`."""
+ return "".join(chr(b) for b in byte_values)
+
+ def byte_to_int(byte):
+ """Turn a byte indexed from a bytes object into an int."""
+ return ord(byte)
+
+ def bytes_to_ints(bytes_value):
+ """Turn a bytes object into a sequence of ints."""
+ for byte in bytes_value:
+ yield ord(byte)
+
+
+try:
+ # In Python 2.x, the builtins were in __builtin__
+ BUILTINS = sys.modules['__builtin__']
+except KeyError:
+ # In Python 3.x, they're in builtins
+ BUILTINS = sys.modules['builtins']
+
+
+# imp was deprecated in Python 3.3
+try:
+ import importlib
+ import importlib.util
+ imp = None
+except ImportError:
+ importlib = None
+
+# We only want to use importlib if it has everything we need.
+try:
+ importlib_util_find_spec = importlib.util.find_spec
+except Exception:
+ import imp
+ importlib_util_find_spec = None
+
+# What is the .pyc magic number for this version of Python?
+try:
+ PYC_MAGIC_NUMBER = importlib.util.MAGIC_NUMBER
+except AttributeError:
+ PYC_MAGIC_NUMBER = imp.get_magic()
+
+
+def code_object(fn):
+ """Get the code object from a function."""
+ try:
+ return fn.func_code
+ except AttributeError:
+ return fn.__code__
+
+
+try:
+ from types import SimpleNamespace
+except ImportError:
+ # The code from https://docs.python.org/3/library/types.html#types.SimpleNamespace
+ class SimpleNamespace:
+ """Python implementation of SimpleNamespace, for Python 2."""
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)
+
+ def __repr__(self):
+ keys = sorted(self.__dict__)
+ items = ("{}={!r}".format(k, self.__dict__[k]) for k in keys)
+ return "{}({})".format(type(self).__name__, ", ".join(items))
+
+ def __eq__(self, other):
+ return self.__dict__ == other.__dict__
+
+
+def invalidate_import_caches():
+ """Invalidate any import caches that may or may not exist."""
+ if importlib and hasattr(importlib, "invalidate_caches"):
+ importlib.invalidate_caches()
+
+
+def import_local_file(modname, modfile=None):
+ """Import a local file as a module.
+
+ Opens a file in the current directory named `modname`.py, imports it
+ as `modname`, and returns the module object. `modfile` is the file to
+ import if it isn't in the current directory.
+
+ """
+ try:
+ from importlib.machinery import SourceFileLoader
+ except ImportError:
+ SourceFileLoader = None
+
+ if modfile is None:
+ modfile = modname + '.py'
+ if SourceFileLoader:
+ # pylint: disable=no-value-for-parameter, deprecated-method
+ mod = SourceFileLoader(modname, modfile).load_module()
+ else:
+ for suff in imp.get_suffixes(): # pragma: part covered
+ if suff[0] == '.py':
+ break
+
+ with open(modfile, 'r') as f:
+ # pylint: disable=undefined-loop-variable
+ mod = imp.load_module(modname, f, modfile, suff)
+
+ return mod
diff --git a/third_party/python/coverage/coverage/bytecode.py b/third_party/python/coverage/coverage/bytecode.py
new file mode 100644
index 0000000000..ceb18cf374
--- /dev/null
+++ b/third_party/python/coverage/coverage/bytecode.py
@@ -0,0 +1,19 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Bytecode manipulation for coverage.py"""
+
+import types
+
+
+def code_objects(code):
+ """Iterate over all the code objects in `code`."""
+ stack = [code]
+ while stack:
+ # We're going to return the code object on the stack, but first
+ # push its children for later returning.
+ code = stack.pop()
+ for c in code.co_consts:
+ if isinstance(c, types.CodeType):
+ stack.append(c)
+ yield code
diff --git a/third_party/python/coverage/coverage/cmdline.py b/third_party/python/coverage/coverage/cmdline.py
new file mode 100644
index 0000000000..9fddb6bb85
--- /dev/null
+++ b/third_party/python/coverage/coverage/cmdline.py
@@ -0,0 +1,866 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Command-line support for coverage.py."""
+
+from __future__ import print_function
+
+import glob
+import optparse
+import os.path
+import shlex
+import sys
+import textwrap
+import traceback
+
+import coverage
+from coverage import Coverage
+from coverage import env
+from coverage.collector import CTracer
+from coverage.data import line_counts
+from coverage.debug import info_formatter, info_header, short_stack
+from coverage.execfile import PyRunner
+from coverage.misc import BaseCoverageException, ExceptionDuringRun, NoSource, output_encoding
+from coverage.results import should_fail_under
+
+
+class Opts(object):
+ """A namespace class for individual options we'll build parsers from."""
+
+ append = optparse.make_option(
+ '-a', '--append', action='store_true',
+ help="Append coverage data to .coverage, otherwise it starts clean each time.",
+ )
+ branch = optparse.make_option(
+ '', '--branch', action='store_true',
+ help="Measure branch coverage in addition to statement coverage.",
+ )
+ CONCURRENCY_CHOICES = [
+ "thread", "gevent", "greenlet", "eventlet", "multiprocessing",
+ ]
+ concurrency = optparse.make_option(
+ '', '--concurrency', action='store', metavar="LIB",
+ choices=CONCURRENCY_CHOICES,
+ help=(
+ "Properly measure code using a concurrency library. "
+ "Valid values are: %s."
+ ) % ", ".join(CONCURRENCY_CHOICES),
+ )
+ context = optparse.make_option(
+ '', '--context', action='store', metavar="LABEL",
+ help="The context label to record for this coverage run.",
+ )
+ debug = optparse.make_option(
+ '', '--debug', action='store', metavar="OPTS",
+ help="Debug options, separated by commas. [env: COVERAGE_DEBUG]",
+ )
+ directory = optparse.make_option(
+ '-d', '--directory', action='store', metavar="DIR",
+ help="Write the output files to DIR.",
+ )
+ fail_under = optparse.make_option(
+ '', '--fail-under', action='store', metavar="MIN", type="float",
+ help="Exit with a status of 2 if the total coverage is less than MIN.",
+ )
+ help = optparse.make_option(
+ '-h', '--help', action='store_true',
+ help="Get help on this command.",
+ )
+ ignore_errors = optparse.make_option(
+ '-i', '--ignore-errors', action='store_true',
+ help="Ignore errors while reading source files.",
+ )
+ include = optparse.make_option(
+ '', '--include', action='store',
+ metavar="PAT1,PAT2,...",
+ help=(
+ "Include only files whose paths match one of these patterns. "
+ "Accepts shell-style wildcards, which must be quoted."
+ ),
+ )
+ pylib = optparse.make_option(
+ '-L', '--pylib', action='store_true',
+ help=(
+ "Measure coverage even inside the Python installed library, "
+ "which isn't done by default."
+ ),
+ )
+ show_missing = optparse.make_option(
+ '-m', '--show-missing', action='store_true',
+ help="Show line numbers of statements in each module that weren't executed.",
+ )
+ skip_covered = optparse.make_option(
+ '--skip-covered', action='store_true',
+ help="Skip files with 100% coverage.",
+ )
+ skip_empty = optparse.make_option(
+ '--skip-empty', action='store_true',
+ help="Skip files with no code.",
+ )
+ show_contexts = optparse.make_option(
+ '--show-contexts', action='store_true',
+ help="Show contexts for covered lines.",
+ )
+ omit = optparse.make_option(
+ '', '--omit', action='store',
+ metavar="PAT1,PAT2,...",
+ help=(
+ "Omit files whose paths match one of these patterns. "
+ "Accepts shell-style wildcards, which must be quoted."
+ ),
+ )
+ contexts = optparse.make_option(
+ '', '--contexts', action='store',
+ metavar="REGEX1,REGEX2,...",
+ help=(
+ "Only display data from lines covered in the given contexts. "
+ "Accepts Python regexes, which must be quoted."
+ ),
+ )
+ output_xml = optparse.make_option(
+ '-o', '', action='store', dest="outfile",
+ metavar="OUTFILE",
+ help="Write the XML report to this file. Defaults to 'coverage.xml'",
+ )
+ output_json = optparse.make_option(
+ '-o', '', action='store', dest="outfile",
+ metavar="OUTFILE",
+ help="Write the JSON report to this file. Defaults to 'coverage.json'",
+ )
+ json_pretty_print = optparse.make_option(
+ '', '--pretty-print', action='store_true',
+ help="Format the JSON for human readers.",
+ )
+ parallel_mode = optparse.make_option(
+ '-p', '--parallel-mode', action='store_true',
+ help=(
+ "Append the machine name, process id and random number to the "
+ ".coverage data file name to simplify collecting data from "
+ "many processes."
+ ),
+ )
+ module = optparse.make_option(
+ '-m', '--module', action='store_true',
+ help=(
+ "<pyfile> is an importable Python module, not a script path, "
+ "to be run as 'python -m' would run it."
+ ),
+ )
+ rcfile = optparse.make_option(
+ '', '--rcfile', action='store',
+ help=(
+ "Specify configuration file. "
+ "By default '.coveragerc', 'setup.cfg', 'tox.ini', and "
+ "'pyproject.toml' are tried. [env: COVERAGE_RCFILE]"
+ ),
+ )
+ source = optparse.make_option(
+ '', '--source', action='store', metavar="SRC1,SRC2,...",
+ help="A list of packages or directories of code to be measured.",
+ )
+ timid = optparse.make_option(
+ '', '--timid', action='store_true',
+ help=(
+ "Use a simpler but slower trace method. Try this if you get "
+ "seemingly impossible results!"
+ ),
+ )
+ title = optparse.make_option(
+ '', '--title', action='store', metavar="TITLE",
+ help="A text string to use as the title on the HTML.",
+ )
+ version = optparse.make_option(
+ '', '--version', action='store_true',
+ help="Display version information and exit.",
+ )
+
+
+class CoverageOptionParser(optparse.OptionParser, object):
+ """Base OptionParser for coverage.py.
+
+ Problems don't exit the program.
+ Defaults are initialized for all options.
+
+ """
+
+ def __init__(self, *args, **kwargs):
+ super(CoverageOptionParser, self).__init__(
+ add_help_option=False, *args, **kwargs
+ )
+ self.set_defaults(
+ action=None,
+ append=None,
+ branch=None,
+ concurrency=None,
+ context=None,
+ debug=None,
+ directory=None,
+ fail_under=None,
+ help=None,
+ ignore_errors=None,
+ include=None,
+ module=None,
+ omit=None,
+ contexts=None,
+ parallel_mode=None,
+ pylib=None,
+ rcfile=True,
+ show_missing=None,
+ skip_covered=None,
+ skip_empty=None,
+ show_contexts=None,
+ source=None,
+ timid=None,
+ title=None,
+ version=None,
+ )
+
+ self.disable_interspersed_args()
+
+ class OptionParserError(Exception):
+ """Used to stop the optparse error handler ending the process."""
+ pass
+
+ def parse_args_ok(self, args=None, options=None):
+ """Call optparse.parse_args, but return a triple:
+
+ (ok, options, args)
+
+ """
+ try:
+ options, args = super(CoverageOptionParser, self).parse_args(args, options)
+ except self.OptionParserError:
+ return False, None, None
+ return True, options, args
+
+ def error(self, msg):
+ """Override optparse.error so sys.exit doesn't get called."""
+ show_help(msg)
+ raise self.OptionParserError
+
+
+class GlobalOptionParser(CoverageOptionParser):
+ """Command-line parser for coverage.py global option arguments."""
+
+ def __init__(self):
+ super(GlobalOptionParser, self).__init__()
+
+ self.add_options([
+ Opts.help,
+ Opts.version,
+ ])
+
+
+class CmdOptionParser(CoverageOptionParser):
+ """Parse one of the new-style commands for coverage.py."""
+
+ def __init__(self, action, options, defaults=None, usage=None, description=None):
+ """Create an OptionParser for a coverage.py command.
+
+ `action` is the slug to put into `options.action`.
+ `options` is a list of Option's for the command.
+ `defaults` is a dict of default value for options.
+ `usage` is the usage string to display in help.
+ `description` is the description of the command, for the help text.
+
+ """
+ if usage:
+ usage = "%prog " + usage
+ super(CmdOptionParser, self).__init__(
+ usage=usage,
+ description=description,
+ )
+ self.set_defaults(action=action, **(defaults or {}))
+ self.add_options(options)
+ self.cmd = action
+
+ def __eq__(self, other):
+ # A convenience equality, so that I can put strings in unit test
+ # results, and they will compare equal to objects.
+ return (other == "<CmdOptionParser:%s>" % self.cmd)
+
+ __hash__ = None # This object doesn't need to be hashed.
+
+ def get_prog_name(self):
+ """Override of an undocumented function in optparse.OptionParser."""
+ program_name = super(CmdOptionParser, self).get_prog_name()
+
+ # Include the sub-command for this parser as part of the command.
+ return "{command} {subcommand}".format(command=program_name, subcommand=self.cmd)
+
+
+GLOBAL_ARGS = [
+ Opts.debug,
+ Opts.help,
+ Opts.rcfile,
+ ]
+
+CMDS = {
+ 'annotate': CmdOptionParser(
+ "annotate",
+ [
+ Opts.directory,
+ Opts.ignore_errors,
+ Opts.include,
+ Opts.omit,
+ ] + GLOBAL_ARGS,
+ usage="[options] [modules]",
+ description=(
+ "Make annotated copies of the given files, marking statements that are executed "
+ "with > and statements that are missed with !."
+ ),
+ ),
+
+ 'combine': CmdOptionParser(
+ "combine",
+ [
+ Opts.append,
+ ] + GLOBAL_ARGS,
+ usage="[options] <path1> <path2> ... <pathN>",
+ description=(
+ "Combine data from multiple coverage files collected "
+ "with 'run -p'. The combined results are written to a single "
+ "file representing the union of the data. The positional "
+ "arguments are data files or directories containing data files. "
+ "If no paths are provided, data files in the default data file's "
+ "directory are combined."
+ ),
+ ),
+
+ 'debug': CmdOptionParser(
+ "debug", GLOBAL_ARGS,
+ usage="<topic>",
+ description=(
+ "Display information on the internals of coverage.py, "
+ "for diagnosing problems. "
+ "Topics are 'data' to show a summary of the collected data, "
+ "or 'sys' to show installation information."
+ ),
+ ),
+
+ 'erase': CmdOptionParser(
+ "erase", GLOBAL_ARGS,
+ description="Erase previously collected coverage data.",
+ ),
+
+ 'help': CmdOptionParser(
+ "help", GLOBAL_ARGS,
+ usage="[command]",
+ description="Describe how to use coverage.py",
+ ),
+
+ 'html': CmdOptionParser(
+ "html",
+ [
+ Opts.contexts,
+ Opts.directory,
+ Opts.fail_under,
+ Opts.ignore_errors,
+ Opts.include,
+ Opts.omit,
+ Opts.show_contexts,
+ Opts.skip_covered,
+ Opts.skip_empty,
+ Opts.title,
+ ] + GLOBAL_ARGS,
+ usage="[options] [modules]",
+ description=(
+ "Create an HTML report of the coverage of the files. "
+ "Each file gets its own page, with the source decorated to show "
+ "executed, excluded, and missed lines."
+ ),
+ ),
+
+ 'json': CmdOptionParser(
+ "json",
+ [
+ Opts.contexts,
+ Opts.fail_under,
+ Opts.ignore_errors,
+ Opts.include,
+ Opts.omit,
+ Opts.output_json,
+ Opts.json_pretty_print,
+ Opts.show_contexts,
+ ] + GLOBAL_ARGS,
+ usage="[options] [modules]",
+ description="Generate a JSON report of coverage results."
+ ),
+
+ 'report': CmdOptionParser(
+ "report",
+ [
+ Opts.contexts,
+ Opts.fail_under,
+ Opts.ignore_errors,
+ Opts.include,
+ Opts.omit,
+ Opts.show_missing,
+ Opts.skip_covered,
+ Opts.skip_empty,
+ ] + GLOBAL_ARGS,
+ usage="[options] [modules]",
+ description="Report coverage statistics on modules."
+ ),
+
+ 'run': CmdOptionParser(
+ "run",
+ [
+ Opts.append,
+ Opts.branch,
+ Opts.concurrency,
+ Opts.context,
+ Opts.include,
+ Opts.module,
+ Opts.omit,
+ Opts.pylib,
+ Opts.parallel_mode,
+ Opts.source,
+ Opts.timid,
+ ] + GLOBAL_ARGS,
+ usage="[options] <pyfile> [program options]",
+ description="Run a Python program, measuring code execution."
+ ),
+
+ 'xml': CmdOptionParser(
+ "xml",
+ [
+ Opts.fail_under,
+ Opts.ignore_errors,
+ Opts.include,
+ Opts.omit,
+ Opts.output_xml,
+ ] + GLOBAL_ARGS,
+ usage="[options] [modules]",
+ description="Generate an XML report of coverage results."
+ ),
+}
+
+
+def show_help(error=None, topic=None, parser=None):
+ """Display an error message, or the named topic."""
+ assert error or topic or parser
+
+ program_path = sys.argv[0]
+ if program_path.endswith(os.path.sep + '__main__.py'):
+ # The path is the main module of a package; get that path instead.
+ program_path = os.path.dirname(program_path)
+ program_name = os.path.basename(program_path)
+ if env.WINDOWS:
+ # entry_points={'console_scripts':...} on Windows makes files
+ # called coverage.exe, coverage3.exe, and coverage-3.5.exe. These
+ # invoke coverage-script.py, coverage3-script.py, and
+ # coverage-3.5-script.py. argv[0] is the .py file, but we want to
+ # get back to the original form.
+ auto_suffix = "-script.py"
+ if program_name.endswith(auto_suffix):
+ program_name = program_name[:-len(auto_suffix)]
+
+ help_params = dict(coverage.__dict__)
+ help_params['program_name'] = program_name
+ if CTracer is not None:
+ help_params['extension_modifier'] = 'with C extension'
+ else:
+ help_params['extension_modifier'] = 'without C extension'
+
+ if error:
+ print(error, file=sys.stderr)
+ print("Use '%s help' for help." % (program_name,), file=sys.stderr)
+ elif parser:
+ print(parser.format_help().strip())
+ print()
+ else:
+ help_msg = textwrap.dedent(HELP_TOPICS.get(topic, '')).strip()
+ if help_msg:
+ print(help_msg.format(**help_params))
+ else:
+ print("Don't know topic %r" % topic)
+ print("Full documentation is at {__url__}".format(**help_params))
+
+
+OK, ERR, FAIL_UNDER = 0, 1, 2
+
+
+class CoverageScript(object):
+ """The command-line interface to coverage.py."""
+
+ def __init__(self):
+ self.global_option = False
+ self.coverage = None
+
+ def command_line(self, argv):
+ """The bulk of the command line interface to coverage.py.
+
+ `argv` is the argument list to process.
+
+ Returns 0 if all is well, 1 if something went wrong.
+
+ """
+ # Collect the command-line options.
+ if not argv:
+ show_help(topic='minimum_help')
+ return OK
+
+ # The command syntax we parse depends on the first argument. Global
+ # switch syntax always starts with an option.
+ self.global_option = argv[0].startswith('-')
+ if self.global_option:
+ parser = GlobalOptionParser()
+ else:
+ parser = CMDS.get(argv[0])
+ if not parser:
+ show_help("Unknown command: '%s'" % argv[0])
+ return ERR
+ argv = argv[1:]
+
+ ok, options, args = parser.parse_args_ok(argv)
+ if not ok:
+ return ERR
+
+ # Handle help and version.
+ if self.do_help(options, args, parser):
+ return OK
+
+ # Listify the list options.
+ source = unshell_list(options.source)
+ omit = unshell_list(options.omit)
+ include = unshell_list(options.include)
+ debug = unshell_list(options.debug)
+ contexts = unshell_list(options.contexts)
+
+ # Do something.
+ self.coverage = Coverage(
+ data_suffix=options.parallel_mode,
+ cover_pylib=options.pylib,
+ timid=options.timid,
+ branch=options.branch,
+ config_file=options.rcfile,
+ source=source,
+ omit=omit,
+ include=include,
+ debug=debug,
+ concurrency=options.concurrency,
+ check_preimported=True,
+ context=options.context,
+ )
+
+ if options.action == "debug":
+ return self.do_debug(args)
+
+ elif options.action == "erase":
+ self.coverage.erase()
+ return OK
+
+ elif options.action == "run":
+ return self.do_run(options, args)
+
+ elif options.action == "combine":
+ if options.append:
+ self.coverage.load()
+ data_dirs = args or None
+ self.coverage.combine(data_dirs, strict=True)
+ self.coverage.save()
+ return OK
+
+ # Remaining actions are reporting, with some common options.
+ report_args = dict(
+ morfs=unglob_args(args),
+ ignore_errors=options.ignore_errors,
+ omit=omit,
+ include=include,
+ contexts=contexts,
+ )
+
+ # We need to be able to import from the current directory, because
+ # plugins may try to, for example, to read Django settings.
+ sys.path.insert(0, '')
+
+ self.coverage.load()
+
+ total = None
+ if options.action == "report":
+ total = self.coverage.report(
+ show_missing=options.show_missing,
+ skip_covered=options.skip_covered,
+ skip_empty=options.skip_empty,
+ **report_args
+ )
+ elif options.action == "annotate":
+ self.coverage.annotate(directory=options.directory, **report_args)
+ elif options.action == "html":
+ total = self.coverage.html_report(
+ directory=options.directory,
+ title=options.title,
+ skip_covered=options.skip_covered,
+ skip_empty=options.skip_empty,
+ show_contexts=options.show_contexts,
+ **report_args
+ )
+ elif options.action == "xml":
+ outfile = options.outfile
+ total = self.coverage.xml_report(outfile=outfile, **report_args)
+ elif options.action == "json":
+ outfile = options.outfile
+ total = self.coverage.json_report(
+ outfile=outfile,
+ pretty_print=options.pretty_print,
+ show_contexts=options.show_contexts,
+ **report_args
+ )
+
+ if total is not None:
+ # Apply the command line fail-under options, and then use the config
+ # value, so we can get fail_under from the config file.
+ if options.fail_under is not None:
+ self.coverage.set_option("report:fail_under", options.fail_under)
+
+ fail_under = self.coverage.get_option("report:fail_under")
+ precision = self.coverage.get_option("report:precision")
+ if should_fail_under(total, fail_under, precision):
+ return FAIL_UNDER
+
+ return OK
+
+ def do_help(self, options, args, parser):
+ """Deal with help requests.
+
+ Return True if it handled the request, False if not.
+
+ """
+ # Handle help.
+ if options.help:
+ if self.global_option:
+ show_help(topic='help')
+ else:
+ show_help(parser=parser)
+ return True
+
+ if options.action == "help":
+ if args:
+ for a in args:
+ parser = CMDS.get(a)
+ if parser:
+ show_help(parser=parser)
+ else:
+ show_help(topic=a)
+ else:
+ show_help(topic='help')
+ return True
+
+ # Handle version.
+ if options.version:
+ show_help(topic='version')
+ return True
+
+ return False
+
+ def do_run(self, options, args):
+ """Implementation of 'coverage run'."""
+
+ if not args:
+ if options.module:
+ # Specified -m with nothing else.
+ show_help("No module specified for -m")
+ return ERR
+ command_line = self.coverage.get_option("run:command_line")
+ if command_line is not None:
+ args = shlex.split(command_line)
+ if args and args[0] == "-m":
+ options.module = True
+ args = args[1:]
+ if not args:
+ show_help("Nothing to do.")
+ return ERR
+
+ if options.append and self.coverage.get_option("run:parallel"):
+ show_help("Can't append to data files in parallel mode.")
+ return ERR
+
+ if options.concurrency == "multiprocessing":
+ # Can't set other run-affecting command line options with
+ # multiprocessing.
+ for opt_name in ['branch', 'include', 'omit', 'pylib', 'source', 'timid']:
+ # As it happens, all of these options have no default, meaning
+ # they will be None if they have not been specified.
+ if getattr(options, opt_name) is not None:
+ show_help(
+ "Options affecting multiprocessing must only be specified "
+ "in a configuration file.\n"
+ "Remove --{} from the command line.".format(opt_name)
+ )
+ return ERR
+
+ runner = PyRunner(args, as_module=bool(options.module))
+ runner.prepare()
+
+ if options.append:
+ self.coverage.load()
+
+ # Run the script.
+ self.coverage.start()
+ code_ran = True
+ try:
+ runner.run()
+ except NoSource:
+ code_ran = False
+ raise
+ finally:
+ self.coverage.stop()
+ if code_ran:
+ self.coverage.save()
+
+ return OK
+
+ def do_debug(self, args):
+ """Implementation of 'coverage debug'."""
+
+ if not args:
+ show_help("What information would you like: config, data, sys, premain?")
+ return ERR
+
+ for info in args:
+ if info == 'sys':
+ sys_info = self.coverage.sys_info()
+ print(info_header("sys"))
+ for line in info_formatter(sys_info):
+ print(" %s" % line)
+ elif info == 'data':
+ self.coverage.load()
+ data = self.coverage.get_data()
+ print(info_header("data"))
+ print("path: %s" % self.coverage.get_data().data_filename())
+ if data:
+ print("has_arcs: %r" % data.has_arcs())
+ summary = line_counts(data, fullpath=True)
+ filenames = sorted(summary.keys())
+ print("\n%d files:" % len(filenames))
+ for f in filenames:
+ line = "%s: %d lines" % (f, summary[f])
+ plugin = data.file_tracer(f)
+ if plugin:
+ line += " [%s]" % plugin
+ print(line)
+ else:
+ print("No data collected")
+ elif info == 'config':
+ print(info_header("config"))
+ config_info = self.coverage.config.__dict__.items()
+ for line in info_formatter(config_info):
+ print(" %s" % line)
+ elif info == "premain":
+ print(info_header("premain"))
+ print(short_stack())
+ else:
+ show_help("Don't know what you mean by %r" % info)
+ return ERR
+
+ return OK
+
+
+def unshell_list(s):
+ """Turn a command-line argument into a list."""
+ if not s:
+ return None
+ if env.WINDOWS:
+ # When running coverage.py as coverage.exe, some of the behavior
+ # of the shell is emulated: wildcards are expanded into a list of
+ # file names. So you have to single-quote patterns on the command
+ # line, but (not) helpfully, the single quotes are included in the
+ # argument, so we have to strip them off here.
+ s = s.strip("'")
+ return s.split(',')
+
+
+def unglob_args(args):
+ """Interpret shell wildcards for platforms that need it."""
+ if env.WINDOWS:
+ globbed = []
+ for arg in args:
+ if '?' in arg or '*' in arg:
+ globbed.extend(glob.glob(arg))
+ else:
+ globbed.append(arg)
+ args = globbed
+ return args
+
+
+HELP_TOPICS = {
+ 'help': """\
+ Coverage.py, version {__version__} {extension_modifier}
+ Measure, collect, and report on code coverage in Python programs.
+
+ usage: {program_name} <command> [options] [args]
+
+ Commands:
+ annotate Annotate source files with execution information.
+ combine Combine a number of data files.
+ erase Erase previously collected coverage data.
+ help Get help on using coverage.py.
+ html Create an HTML report.
+ json Create a JSON report of coverage results.
+ report Report coverage stats on modules.
+ run Run a Python program and measure code execution.
+ xml Create an XML report of coverage results.
+
+ Use "{program_name} help <command>" for detailed help on any command.
+ """,
+
+ 'minimum_help': """\
+ Code coverage for Python. Use '{program_name} help' for help.
+ """,
+
+ 'version': """\
+ Coverage.py, version {__version__} {extension_modifier}
+ """,
+}
+
+
+def main(argv=None):
+ """The main entry point to coverage.py.
+
+ This is installed as the script entry point.
+
+ """
+ if argv is None:
+ argv = sys.argv[1:]
+ try:
+ status = CoverageScript().command_line(argv)
+ except ExceptionDuringRun as err:
+ # An exception was caught while running the product code. The
+ # sys.exc_info() return tuple is packed into an ExceptionDuringRun
+ # exception.
+ traceback.print_exception(*err.args) # pylint: disable=no-value-for-parameter
+ status = ERR
+ except BaseCoverageException as err:
+ # A controlled error inside coverage.py: print the message to the user.
+ msg = err.args[0]
+ if env.PY2:
+ msg = msg.encode(output_encoding())
+ print(msg)
+ status = ERR
+ except SystemExit as err:
+ # The user called `sys.exit()`. Exit with their argument, if any.
+ if err.args:
+ status = err.args[0]
+ else:
+ status = None
+ return status
+
+# Profiling using ox_profile. Install it from GitHub:
+# pip install git+https://github.com/emin63/ox_profile.git
+#
+# $set_env.py: COVERAGE_PROFILE - Set to use ox_profile.
+_profile = os.environ.get("COVERAGE_PROFILE", "")
+if _profile: # pragma: debugging
+ from ox_profile.core.launchers import SimpleLauncher # pylint: disable=import-error
+ original_main = main
+
+ def main(argv=None): # pylint: disable=function-redefined
+ """A wrapper around main that profiles."""
+ try:
+ profiler = SimpleLauncher.launch()
+ return original_main(argv)
+ finally:
+ data, _ = profiler.query(re_filter='coverage', max_records=100)
+ print(profiler.show(query=data, limit=100, sep='', col=''))
+ profiler.cancel()
diff --git a/third_party/python/coverage/coverage/collector.py b/third_party/python/coverage/coverage/collector.py
new file mode 100644
index 0000000000..a042357f67
--- /dev/null
+++ b/third_party/python/coverage/coverage/collector.py
@@ -0,0 +1,429 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Raw data collector for coverage.py."""
+
+import os
+import sys
+
+from coverage import env
+from coverage.backward import litems, range # pylint: disable=redefined-builtin
+from coverage.debug import short_stack
+from coverage.disposition import FileDisposition
+from coverage.misc import CoverageException, isolate_module
+from coverage.pytracer import PyTracer
+
+os = isolate_module(os)
+
+
+try:
+ # Use the C extension code when we can, for speed.
+ from coverage.tracer import CTracer, CFileDisposition
+except ImportError:
+ # Couldn't import the C extension, maybe it isn't built.
+ if os.getenv('COVERAGE_TEST_TRACER') == 'c':
+ # During testing, we use the COVERAGE_TEST_TRACER environment variable
+ # to indicate that we've fiddled with the environment to test this
+ # fallback code. If we thought we had a C tracer, but couldn't import
+ # it, then exit quickly and clearly instead of dribbling confusing
+ # errors. I'm using sys.exit here instead of an exception because an
+ # exception here causes all sorts of other noise in unittest.
+ sys.stderr.write("*** COVERAGE_TEST_TRACER is 'c' but can't import CTracer!\n")
+ sys.exit(1)
+ CTracer = None
+
+
+class Collector(object):
+ """Collects trace data.
+
+ Creates a Tracer object for each thread, since they track stack
+ information. Each Tracer points to the same shared data, contributing
+ traced data points.
+
+ When the Collector is started, it creates a Tracer for the current thread,
+ and installs a function to create Tracers for each new thread started.
+ When the Collector is stopped, all active Tracers are stopped.
+
+ Threads started while the Collector is stopped will never have Tracers
+ associated with them.
+
+ """
+
+ # The stack of active Collectors. Collectors are added here when started,
+ # and popped when stopped. Collectors on the stack are paused when not
+ # the top, and resumed when they become the top again.
+ _collectors = []
+
+ # The concurrency settings we support here.
+ SUPPORTED_CONCURRENCIES = set(["greenlet", "eventlet", "gevent", "thread"])
+
+ def __init__(
+ self, should_trace, check_include, should_start_context, file_mapper,
+ timid, branch, warn, concurrency,
+ ):
+ """Create a collector.
+
+ `should_trace` is a function, taking a file name and a frame, and
+ returning a `coverage.FileDisposition object`.
+
+ `check_include` is a function taking a file name and a frame. It returns
+ a boolean: True if the file should be traced, False if not.
+
+ `should_start_context` is a function taking a frame, and returning a
+ string. If the frame should be the start of a new context, the string
+ is the new context. If the frame should not be the start of a new
+ context, return None.
+
+ `file_mapper` is a function taking a filename, and returning a Unicode
+ filename. The result is the name that will be recorded in the data
+ file.
+
+ If `timid` is true, then a slower simpler trace function will be
+ used. This is important for some environments where manipulation of
+ tracing functions make the faster more sophisticated trace function not
+ operate properly.
+
+ If `branch` is true, then branches will be measured. This involves
+ collecting data on which statements followed each other (arcs). Use
+ `get_arc_data` to get the arc data.
+
+ `warn` is a warning function, taking a single string message argument
+ and an optional slug argument which will be a string or None, to be
+ used if a warning needs to be issued.
+
+ `concurrency` is a list of strings indicating the concurrency libraries
+ in use. Valid values are "greenlet", "eventlet", "gevent", or "thread"
+ (the default). Of these four values, only one can be supplied. Other
+ values are ignored.
+
+ """
+ self.should_trace = should_trace
+ self.check_include = check_include
+ self.should_start_context = should_start_context
+ self.file_mapper = file_mapper
+ self.warn = warn
+ self.branch = branch
+ self.threading = None
+ self.covdata = None
+
+ self.static_context = None
+
+ self.origin = short_stack()
+
+ self.concur_id_func = None
+ self.mapped_file_cache = {}
+
+ # We can handle a few concurrency options here, but only one at a time.
+ these_concurrencies = self.SUPPORTED_CONCURRENCIES.intersection(concurrency)
+ if len(these_concurrencies) > 1:
+ raise CoverageException("Conflicting concurrency settings: %s" % concurrency)
+ self.concurrency = these_concurrencies.pop() if these_concurrencies else ''
+
+ try:
+ if self.concurrency == "greenlet":
+ import greenlet
+ self.concur_id_func = greenlet.getcurrent
+ elif self.concurrency == "eventlet":
+ import eventlet.greenthread # pylint: disable=import-error,useless-suppression
+ self.concur_id_func = eventlet.greenthread.getcurrent
+ elif self.concurrency == "gevent":
+ import gevent # pylint: disable=import-error,useless-suppression
+ self.concur_id_func = gevent.getcurrent
+ elif self.concurrency == "thread" or not self.concurrency:
+ # It's important to import threading only if we need it. If
+ # it's imported early, and the program being measured uses
+ # gevent, then gevent's monkey-patching won't work properly.
+ import threading
+ self.threading = threading
+ else:
+ raise CoverageException("Don't understand concurrency=%s" % concurrency)
+ except ImportError:
+ raise CoverageException(
+ "Couldn't trace with concurrency=%s, the module isn't installed." % (
+ self.concurrency,
+ )
+ )
+
+ self.reset()
+
+ if timid:
+ # Being timid: use the simple Python trace function.
+ self._trace_class = PyTracer
+ else:
+ # Being fast: use the C Tracer if it is available, else the Python
+ # trace function.
+ self._trace_class = CTracer or PyTracer
+
+ if self._trace_class is CTracer:
+ self.file_disposition_class = CFileDisposition
+ self.supports_plugins = True
+ else:
+ self.file_disposition_class = FileDisposition
+ self.supports_plugins = False
+
+ def __repr__(self):
+ return "<Collector at 0x%x: %s>" % (id(self), self.tracer_name())
+
+ def use_data(self, covdata, context):
+ """Use `covdata` for recording data."""
+ self.covdata = covdata
+ self.static_context = context
+ self.covdata.set_context(self.static_context)
+
+ def tracer_name(self):
+ """Return the class name of the tracer we're using."""
+ return self._trace_class.__name__
+
+ def _clear_data(self):
+ """Clear out existing data, but stay ready for more collection."""
+ # We used to used self.data.clear(), but that would remove filename
+ # keys and data values that were still in use higher up the stack
+ # when we are called as part of switch_context.
+ for d in self.data.values():
+ d.clear()
+
+ for tracer in self.tracers:
+ tracer.reset_activity()
+
+ def reset(self):
+ """Clear collected data, and prepare to collect more."""
+ # A dictionary mapping file names to dicts with line number keys (if not
+ # branch coverage), or mapping file names to dicts with line number
+ # pairs as keys (if branch coverage).
+ self.data = {}
+
+ # A dictionary mapping file names to file tracer plugin names that will
+ # handle them.
+ self.file_tracers = {}
+
+ # The .should_trace_cache attribute is a cache from file names to
+ # coverage.FileDisposition objects, or None. When a file is first
+ # considered for tracing, a FileDisposition is obtained from
+ # Coverage.should_trace. Its .trace attribute indicates whether the
+ # file should be traced or not. If it should be, a plugin with dynamic
+ # file names can decide not to trace it based on the dynamic file name
+ # being excluded by the inclusion rules, in which case the
+ # FileDisposition will be replaced by None in the cache.
+ if env.PYPY:
+ import __pypy__ # pylint: disable=import-error
+ # Alex Gaynor said:
+ # should_trace_cache is a strictly growing key: once a key is in
+ # it, it never changes. Further, the keys used to access it are
+ # generally constant, given sufficient context. That is to say, at
+ # any given point _trace() is called, pypy is able to know the key.
+ # This is because the key is determined by the physical source code
+ # line, and that's invariant with the call site.
+ #
+ # This property of a dict with immutable keys, combined with
+ # call-site-constant keys is a match for PyPy's module dict,
+ # which is optimized for such workloads.
+ #
+ # This gives a 20% benefit on the workload described at
+ # https://bitbucket.org/pypy/pypy/issue/1871/10x-slower-than-cpython-under-coverage
+ self.should_trace_cache = __pypy__.newdict("module")
+ else:
+ self.should_trace_cache = {}
+
+ # Our active Tracers.
+ self.tracers = []
+
+ self._clear_data()
+
+ def _start_tracer(self):
+ """Start a new Tracer object, and store it in self.tracers."""
+ tracer = self._trace_class()
+ tracer.data = self.data
+ tracer.trace_arcs = self.branch
+ tracer.should_trace = self.should_trace
+ tracer.should_trace_cache = self.should_trace_cache
+ tracer.warn = self.warn
+
+ if hasattr(tracer, 'concur_id_func'):
+ tracer.concur_id_func = self.concur_id_func
+ elif self.concur_id_func:
+ raise CoverageException(
+ "Can't support concurrency=%s with %s, only threads are supported" % (
+ self.concurrency, self.tracer_name(),
+ )
+ )
+
+ if hasattr(tracer, 'file_tracers'):
+ tracer.file_tracers = self.file_tracers
+ if hasattr(tracer, 'threading'):
+ tracer.threading = self.threading
+ if hasattr(tracer, 'check_include'):
+ tracer.check_include = self.check_include
+ if hasattr(tracer, 'should_start_context'):
+ tracer.should_start_context = self.should_start_context
+ tracer.switch_context = self.switch_context
+
+ fn = tracer.start()
+ self.tracers.append(tracer)
+
+ return fn
+
+ # The trace function has to be set individually on each thread before
+ # execution begins. Ironically, the only support the threading module has
+ # for running code before the thread main is the tracing function. So we
+ # install this as a trace function, and the first time it's called, it does
+ # the real trace installation.
+
+ def _installation_trace(self, frame, event, arg):
+ """Called on new threads, installs the real tracer."""
+ # Remove ourselves as the trace function.
+ sys.settrace(None)
+ # Install the real tracer.
+ fn = self._start_tracer()
+ # Invoke the real trace function with the current event, to be sure
+ # not to lose an event.
+ if fn:
+ fn = fn(frame, event, arg)
+ # Return the new trace function to continue tracing in this scope.
+ return fn
+
+ def start(self):
+ """Start collecting trace information."""
+ if self._collectors:
+ self._collectors[-1].pause()
+
+ self.tracers = []
+
+ # Check to see whether we had a fullcoverage tracer installed. If so,
+ # get the stack frames it stashed away for us.
+ traces0 = []
+ fn0 = sys.gettrace()
+ if fn0:
+ tracer0 = getattr(fn0, '__self__', None)
+ if tracer0:
+ traces0 = getattr(tracer0, 'traces', [])
+
+ try:
+ # Install the tracer on this thread.
+ fn = self._start_tracer()
+ except:
+ if self._collectors:
+ self._collectors[-1].resume()
+ raise
+
+ # If _start_tracer succeeded, then we add ourselves to the global
+ # stack of collectors.
+ self._collectors.append(self)
+
+ # Replay all the events from fullcoverage into the new trace function.
+ for args in traces0:
+ (frame, event, arg), lineno = args
+ try:
+ fn(frame, event, arg, lineno=lineno)
+ except TypeError:
+ raise Exception("fullcoverage must be run with the C trace function.")
+
+ # Install our installation tracer in threading, to jump-start other
+ # threads.
+ if self.threading:
+ self.threading.settrace(self._installation_trace)
+
+ def stop(self):
+ """Stop collecting trace information."""
+ assert self._collectors
+ if self._collectors[-1] is not self:
+ print("self._collectors:")
+ for c in self._collectors:
+ print(" {!r}\n{}".format(c, c.origin))
+ assert self._collectors[-1] is self, (
+ "Expected current collector to be %r, but it's %r" % (self, self._collectors[-1])
+ )
+
+ self.pause()
+
+ # Remove this Collector from the stack, and resume the one underneath
+ # (if any).
+ self._collectors.pop()
+ if self._collectors:
+ self._collectors[-1].resume()
+
+ def pause(self):
+ """Pause tracing, but be prepared to `resume`."""
+ for tracer in self.tracers:
+ tracer.stop()
+ stats = tracer.get_stats()
+ if stats:
+ print("\nCoverage.py tracer stats:")
+ for k in sorted(stats.keys()):
+ print("%20s: %s" % (k, stats[k]))
+ if self.threading:
+ self.threading.settrace(None)
+
+ def resume(self):
+ """Resume tracing after a `pause`."""
+ for tracer in self.tracers:
+ tracer.start()
+ if self.threading:
+ self.threading.settrace(self._installation_trace)
+ else:
+ self._start_tracer()
+
+ def _activity(self):
+ """Has any activity been traced?
+
+ Returns a boolean, True if any trace function was invoked.
+
+ """
+ return any(tracer.activity() for tracer in self.tracers)
+
+ def switch_context(self, new_context):
+ """Switch to a new dynamic context."""
+ self.flush_data()
+ if self.static_context:
+ context = self.static_context
+ if new_context:
+ context += "|" + new_context
+ else:
+ context = new_context
+ self.covdata.set_context(context)
+
+ def cached_mapped_file(self, filename):
+ """A locally cached version of file names mapped through file_mapper."""
+ key = (type(filename), filename)
+ try:
+ return self.mapped_file_cache[key]
+ except KeyError:
+ return self.mapped_file_cache.setdefault(key, self.file_mapper(filename))
+
+ def mapped_file_dict(self, d):
+ """Return a dict like d, but with keys modified by file_mapper."""
+ # The call to litems() ensures that the GIL protects the dictionary
+ # iterator against concurrent modifications by tracers running
+ # in other threads. We try three times in case of concurrent
+ # access, hoping to get a clean copy.
+ runtime_err = None
+ for _ in range(3):
+ try:
+ items = litems(d)
+ except RuntimeError as ex:
+ runtime_err = ex
+ else:
+ break
+ else:
+ raise runtime_err
+
+ return dict((self.cached_mapped_file(k), v) for k, v in items if v)
+
+ def flush_data(self):
+ """Save the collected data to our associated `CoverageData`.
+
+ Data may have also been saved along the way. This forces the
+ last of the data to be saved.
+
+ Returns True if there was data to save, False if not.
+ """
+ if not self._activity():
+ return False
+
+ if self.branch:
+ self.covdata.add_arcs(self.mapped_file_dict(self.data))
+ else:
+ self.covdata.add_lines(self.mapped_file_dict(self.data))
+ self.covdata.add_file_tracers(self.mapped_file_dict(self.file_tracers))
+
+ self._clear_data()
+ return True
diff --git a/third_party/python/coverage/coverage/config.py b/third_party/python/coverage/coverage/config.py
new file mode 100644
index 0000000000..7876052b50
--- /dev/null
+++ b/third_party/python/coverage/coverage/config.py
@@ -0,0 +1,555 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Config file for coverage.py"""
+
+import collections
+import copy
+import os
+import os.path
+import re
+
+from coverage import env
+from coverage.backward import configparser, iitems, string_class
+from coverage.misc import contract, CoverageException, isolate_module
+from coverage.misc import substitute_variables
+
+from coverage.tomlconfig import TomlConfigParser, TomlDecodeError
+
+os = isolate_module(os)
+
+
+class HandyConfigParser(configparser.RawConfigParser):
+ """Our specialization of ConfigParser."""
+
+ def __init__(self, our_file):
+ """Create the HandyConfigParser.
+
+ `our_file` is True if this config file is specifically for coverage,
+ False if we are examining another config file (tox.ini, setup.cfg)
+ for possible settings.
+ """
+
+ configparser.RawConfigParser.__init__(self)
+ self.section_prefixes = ["coverage:"]
+ if our_file:
+ self.section_prefixes.append("")
+
+ def read(self, filenames, encoding=None):
+ """Read a file name as UTF-8 configuration data."""
+ kwargs = {}
+ if env.PYVERSION >= (3, 2):
+ kwargs['encoding'] = encoding or "utf-8"
+ return configparser.RawConfigParser.read(self, filenames, **kwargs)
+
+ def has_option(self, section, option):
+ for section_prefix in self.section_prefixes:
+ real_section = section_prefix + section
+ has = configparser.RawConfigParser.has_option(self, real_section, option)
+ if has:
+ return has
+ return False
+
+ def has_section(self, section):
+ for section_prefix in self.section_prefixes:
+ real_section = section_prefix + section
+ has = configparser.RawConfigParser.has_section(self, real_section)
+ if has:
+ return real_section
+ return False
+
+ def options(self, section):
+ for section_prefix in self.section_prefixes:
+ real_section = section_prefix + section
+ if configparser.RawConfigParser.has_section(self, real_section):
+ return configparser.RawConfigParser.options(self, real_section)
+ raise configparser.NoSectionError
+
+ def get_section(self, section):
+ """Get the contents of a section, as a dictionary."""
+ d = {}
+ for opt in self.options(section):
+ d[opt] = self.get(section, opt)
+ return d
+
+ def get(self, section, option, *args, **kwargs): # pylint: disable=arguments-differ
+ """Get a value, replacing environment variables also.
+
+ The arguments are the same as `RawConfigParser.get`, but in the found
+ value, ``$WORD`` or ``${WORD}`` are replaced by the value of the
+ environment variable ``WORD``.
+
+ Returns the finished value.
+
+ """
+ for section_prefix in self.section_prefixes:
+ real_section = section_prefix + section
+ if configparser.RawConfigParser.has_option(self, real_section, option):
+ break
+ else:
+ raise configparser.NoOptionError
+
+ v = configparser.RawConfigParser.get(self, real_section, option, *args, **kwargs)
+ v = substitute_variables(v, os.environ)
+ return v
+
+ def getlist(self, section, option):
+ """Read a list of strings.
+
+ The value of `section` and `option` is treated as a comma- and newline-
+ separated list of strings. Each value is stripped of whitespace.
+
+ Returns the list of strings.
+
+ """
+ value_list = self.get(section, option)
+ values = []
+ for value_line in value_list.split('\n'):
+ for value in value_line.split(','):
+ value = value.strip()
+ if value:
+ values.append(value)
+ return values
+
+ def getregexlist(self, section, option):
+ """Read a list of full-line regexes.
+
+ The value of `section` and `option` is treated as a newline-separated
+ list of regexes. Each value is stripped of whitespace.
+
+ Returns the list of strings.
+
+ """
+ line_list = self.get(section, option)
+ value_list = []
+ for value in line_list.splitlines():
+ value = value.strip()
+ try:
+ re.compile(value)
+ except re.error as e:
+ raise CoverageException(
+ "Invalid [%s].%s value %r: %s" % (section, option, value, e)
+ )
+ if value:
+ value_list.append(value)
+ return value_list
+
+
+# The default line exclusion regexes.
+DEFAULT_EXCLUDE = [
+ r'#\s*(pragma|PRAGMA)[:\s]?\s*(no|NO)\s*(cover|COVER)',
+]
+
+# The default partial branch regexes, to be modified by the user.
+DEFAULT_PARTIAL = [
+ r'#\s*(pragma|PRAGMA)[:\s]?\s*(no|NO)\s*(branch|BRANCH)',
+]
+
+# The default partial branch regexes, based on Python semantics.
+# These are any Python branching constructs that can't actually execute all
+# their branches.
+DEFAULT_PARTIAL_ALWAYS = [
+ 'while (True|1|False|0):',
+ 'if (True|1|False|0):',
+]
+
+
+class CoverageConfig(object):
+ """Coverage.py configuration.
+
+ The attributes of this class are the various settings that control the
+ operation of coverage.py.
+
+ """
+ # pylint: disable=too-many-instance-attributes
+
+ def __init__(self):
+ """Initialize the configuration attributes to their defaults."""
+ # Metadata about the config.
+ # We tried to read these config files.
+ self.attempted_config_files = []
+ # We did read these config files, but maybe didn't find any content for us.
+ self.config_files_read = []
+ # The file that gave us our configuration.
+ self.config_file = None
+ self._config_contents = None
+
+ # Defaults for [run] and [report]
+ self._include = None
+ self._omit = None
+
+ # Defaults for [run]
+ self.branch = False
+ self.command_line = None
+ self.concurrency = None
+ self.context = None
+ self.cover_pylib = False
+ self.data_file = ".coverage"
+ self.debug = []
+ self.disable_warnings = []
+ self.dynamic_context = None
+ self.note = None
+ self.parallel = False
+ self.plugins = []
+ self.relative_files = False
+ self.run_include = None
+ self.run_omit = None
+ self.source = None
+ self.timid = False
+ self._crash = None
+
+ # Defaults for [report]
+ self.exclude_list = DEFAULT_EXCLUDE[:]
+ self.fail_under = 0.0
+ self.ignore_errors = False
+ self.report_include = None
+ self.report_omit = None
+ self.partial_always_list = DEFAULT_PARTIAL_ALWAYS[:]
+ self.partial_list = DEFAULT_PARTIAL[:]
+ self.precision = 0
+ self.report_contexts = None
+ self.show_missing = False
+ self.skip_covered = False
+ self.skip_empty = False
+
+ # Defaults for [html]
+ self.extra_css = None
+ self.html_dir = "htmlcov"
+ self.html_title = "Coverage report"
+ self.show_contexts = False
+
+ # Defaults for [xml]
+ self.xml_output = "coverage.xml"
+ self.xml_package_depth = 99
+
+ # Defaults for [json]
+ self.json_output = "coverage.json"
+ self.json_pretty_print = False
+ self.json_show_contexts = False
+
+ # Defaults for [paths]
+ self.paths = collections.OrderedDict()
+
+ # Options for plugins
+ self.plugin_options = {}
+
+ MUST_BE_LIST = [
+ "debug", "concurrency", "plugins",
+ "report_omit", "report_include",
+ "run_omit", "run_include",
+ ]
+
+ def from_args(self, **kwargs):
+ """Read config values from `kwargs`."""
+ for k, v in iitems(kwargs):
+ if v is not None:
+ if k in self.MUST_BE_LIST and isinstance(v, string_class):
+ v = [v]
+ setattr(self, k, v)
+
+ @contract(filename=str)
+ def from_file(self, filename, our_file):
+ """Read configuration from a .rc file.
+
+ `filename` is a file name to read.
+
+ `our_file` is True if this config file is specifically for coverage,
+ False if we are examining another config file (tox.ini, setup.cfg)
+ for possible settings.
+
+ Returns True or False, whether the file could be read, and it had some
+ coverage.py settings in it.
+
+ """
+ _, ext = os.path.splitext(filename)
+ if ext == '.toml':
+ cp = TomlConfigParser(our_file)
+ else:
+ cp = HandyConfigParser(our_file)
+
+ self.attempted_config_files.append(filename)
+
+ try:
+ files_read = cp.read(filename)
+ except (configparser.Error, TomlDecodeError) as err:
+ raise CoverageException("Couldn't read config file %s: %s" % (filename, err))
+ if not files_read:
+ return False
+
+ self.config_files_read.extend(map(os.path.abspath, files_read))
+
+ any_set = False
+ try:
+ for option_spec in self.CONFIG_FILE_OPTIONS:
+ was_set = self._set_attr_from_config_option(cp, *option_spec)
+ if was_set:
+ any_set = True
+ except ValueError as err:
+ raise CoverageException("Couldn't read config file %s: %s" % (filename, err))
+
+ # Check that there are no unrecognized options.
+ all_options = collections.defaultdict(set)
+ for option_spec in self.CONFIG_FILE_OPTIONS:
+ section, option = option_spec[1].split(":")
+ all_options[section].add(option)
+
+ for section, options in iitems(all_options):
+ real_section = cp.has_section(section)
+ if real_section:
+ for unknown in set(cp.options(section)) - options:
+ raise CoverageException(
+ "Unrecognized option '[%s] %s=' in config file %s" % (
+ real_section, unknown, filename
+ )
+ )
+
+ # [paths] is special
+ if cp.has_section('paths'):
+ for option in cp.options('paths'):
+ self.paths[option] = cp.getlist('paths', option)
+ any_set = True
+
+ # plugins can have options
+ for plugin in self.plugins:
+ if cp.has_section(plugin):
+ self.plugin_options[plugin] = cp.get_section(plugin)
+ any_set = True
+
+ # Was this file used as a config file? If it's specifically our file,
+ # then it was used. If we're piggybacking on someone else's file,
+ # then it was only used if we found some settings in it.
+ if our_file:
+ used = True
+ else:
+ used = any_set
+
+ if used:
+ self.config_file = os.path.abspath(filename)
+ with open(filename) as f:
+ self._config_contents = f.read()
+
+ return used
+
+ def copy(self):
+ """Return a copy of the configuration."""
+ return copy.deepcopy(self)
+
+ CONFIG_FILE_OPTIONS = [
+ # These are *args for _set_attr_from_config_option:
+ # (attr, where, type_="")
+ #
+ # attr is the attribute to set on the CoverageConfig object.
+ # where is the section:name to read from the configuration file.
+ # type_ is the optional type to apply, by using .getTYPE to read the
+ # configuration value from the file.
+
+ # [run]
+ ('branch', 'run:branch', 'boolean'),
+ ('command_line', 'run:command_line'),
+ ('concurrency', 'run:concurrency', 'list'),
+ ('context', 'run:context'),
+ ('cover_pylib', 'run:cover_pylib', 'boolean'),
+ ('data_file', 'run:data_file'),
+ ('debug', 'run:debug', 'list'),
+ ('disable_warnings', 'run:disable_warnings', 'list'),
+ ('dynamic_context', 'run:dynamic_context'),
+ ('note', 'run:note'),
+ ('parallel', 'run:parallel', 'boolean'),
+ ('plugins', 'run:plugins', 'list'),
+ ('relative_files', 'run:relative_files', 'boolean'),
+ ('run_include', 'run:include', 'list'),
+ ('run_omit', 'run:omit', 'list'),
+ ('source', 'run:source', 'list'),
+ ('timid', 'run:timid', 'boolean'),
+ ('_crash', 'run:_crash'),
+
+ # [report]
+ ('exclude_list', 'report:exclude_lines', 'regexlist'),
+ ('fail_under', 'report:fail_under', 'float'),
+ ('ignore_errors', 'report:ignore_errors', 'boolean'),
+ ('partial_always_list', 'report:partial_branches_always', 'regexlist'),
+ ('partial_list', 'report:partial_branches', 'regexlist'),
+ ('precision', 'report:precision', 'int'),
+ ('report_contexts', 'report:contexts', 'list'),
+ ('report_include', 'report:include', 'list'),
+ ('report_omit', 'report:omit', 'list'),
+ ('show_missing', 'report:show_missing', 'boolean'),
+ ('skip_covered', 'report:skip_covered', 'boolean'),
+ ('skip_empty', 'report:skip_empty', 'boolean'),
+ ('sort', 'report:sort'),
+
+ # [html]
+ ('extra_css', 'html:extra_css'),
+ ('html_dir', 'html:directory'),
+ ('html_title', 'html:title'),
+ ('show_contexts', 'html:show_contexts', 'boolean'),
+
+ # [xml]
+ ('xml_output', 'xml:output'),
+ ('xml_package_depth', 'xml:package_depth', 'int'),
+
+ # [json]
+ ('json_output', 'json:output'),
+ ('json_pretty_print', 'json:pretty_print', 'boolean'),
+ ('json_show_contexts', 'json:show_contexts', 'boolean'),
+ ]
+
+ def _set_attr_from_config_option(self, cp, attr, where, type_=''):
+ """Set an attribute on self if it exists in the ConfigParser.
+
+ Returns True if the attribute was set.
+
+ """
+ section, option = where.split(":")
+ if cp.has_option(section, option):
+ method = getattr(cp, 'get' + type_)
+ setattr(self, attr, method(section, option))
+ return True
+ return False
+
+ def get_plugin_options(self, plugin):
+ """Get a dictionary of options for the plugin named `plugin`."""
+ return self.plugin_options.get(plugin, {})
+
+ def set_option(self, option_name, value):
+ """Set an option in the configuration.
+
+ `option_name` is a colon-separated string indicating the section and
+ option name. For example, the ``branch`` option in the ``[run]``
+ section of the config file would be indicated with `"run:branch"`.
+
+ `value` is the new value for the option.
+
+ """
+ # Special-cased options.
+ if option_name == "paths":
+ self.paths = value
+ return
+
+ # Check all the hard-coded options.
+ for option_spec in self.CONFIG_FILE_OPTIONS:
+ attr, where = option_spec[:2]
+ if where == option_name:
+ setattr(self, attr, value)
+ return
+
+ # See if it's a plugin option.
+ plugin_name, _, key = option_name.partition(":")
+ if key and plugin_name in self.plugins:
+ self.plugin_options.setdefault(plugin_name, {})[key] = value
+ return
+
+ # If we get here, we didn't find the option.
+ raise CoverageException("No such option: %r" % option_name)
+
+ def get_option(self, option_name):
+ """Get an option from the configuration.
+
+ `option_name` is a colon-separated string indicating the section and
+ option name. For example, the ``branch`` option in the ``[run]``
+ section of the config file would be indicated with `"run:branch"`.
+
+ Returns the value of the option.
+
+ """
+ # Special-cased options.
+ if option_name == "paths":
+ return self.paths
+
+ # Check all the hard-coded options.
+ for option_spec in self.CONFIG_FILE_OPTIONS:
+ attr, where = option_spec[:2]
+ if where == option_name:
+ return getattr(self, attr)
+
+ # See if it's a plugin option.
+ plugin_name, _, key = option_name.partition(":")
+ if key and plugin_name in self.plugins:
+ return self.plugin_options.get(plugin_name, {}).get(key)
+
+ # If we get here, we didn't find the option.
+ raise CoverageException("No such option: %r" % option_name)
+
+
+def config_files_to_try(config_file):
+ """What config files should we try to read?
+
+ Returns a list of tuples:
+ (filename, is_our_file, was_file_specified)
+ """
+
+ # Some API users were specifying ".coveragerc" to mean the same as
+ # True, so make it so.
+ if config_file == ".coveragerc":
+ config_file = True
+ specified_file = (config_file is not True)
+ if not specified_file:
+ # No file was specified. Check COVERAGE_RCFILE.
+ config_file = os.environ.get('COVERAGE_RCFILE')
+ if config_file:
+ specified_file = True
+ if not specified_file:
+ # Still no file specified. Default to .coveragerc
+ config_file = ".coveragerc"
+ files_to_try = [
+ (config_file, True, specified_file),
+ ("setup.cfg", False, False),
+ ("tox.ini", False, False),
+ ("pyproject.toml", False, False),
+ ]
+ return files_to_try
+
+
+def read_coverage_config(config_file, **kwargs):
+ """Read the coverage.py configuration.
+
+ Arguments:
+ config_file: a boolean or string, see the `Coverage` class for the
+ tricky details.
+ all others: keyword arguments from the `Coverage` class, used for
+ setting values in the configuration.
+
+ Returns:
+ config:
+ config is a CoverageConfig object read from the appropriate
+ configuration file.
+
+ """
+ # Build the configuration from a number of sources:
+ # 1) defaults:
+ config = CoverageConfig()
+
+ # 2) from a file:
+ if config_file:
+ files_to_try = config_files_to_try(config_file)
+
+ for fname, our_file, specified_file in files_to_try:
+ config_read = config.from_file(fname, our_file=our_file)
+ if config_read:
+ break
+ if specified_file:
+ raise CoverageException("Couldn't read '%s' as a config file" % fname)
+
+ # $set_env.py: COVERAGE_DEBUG - Options for --debug.
+ # 3) from environment variables:
+ env_data_file = os.environ.get('COVERAGE_FILE')
+ if env_data_file:
+ config.data_file = env_data_file
+ debugs = os.environ.get('COVERAGE_DEBUG')
+ if debugs:
+ config.debug.extend(d.strip() for d in debugs.split(","))
+
+ # 4) from constructor arguments:
+ config.from_args(**kwargs)
+
+ # Once all the config has been collected, there's a little post-processing
+ # to do.
+ config.data_file = os.path.expanduser(config.data_file)
+ config.html_dir = os.path.expanduser(config.html_dir)
+ config.xml_output = os.path.expanduser(config.xml_output)
+ config.paths = collections.OrderedDict(
+ (k, [os.path.expanduser(f) for f in v])
+ for k, v in config.paths.items()
+ )
+
+ return config
diff --git a/third_party/python/coverage/coverage/context.py b/third_party/python/coverage/coverage/context.py
new file mode 100644
index 0000000000..ea13da21ed
--- /dev/null
+++ b/third_party/python/coverage/coverage/context.py
@@ -0,0 +1,91 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Determine contexts for coverage.py"""
+
+
+def combine_context_switchers(context_switchers):
+ """Create a single context switcher from multiple switchers.
+
+ `context_switchers` is a list of functions that take a frame as an
+ argument and return a string to use as the new context label.
+
+ Returns a function that composites `context_switchers` functions, or None
+ if `context_switchers` is an empty list.
+
+ When invoked, the combined switcher calls `context_switchers` one-by-one
+ until a string is returned. The combined switcher returns None if all
+ `context_switchers` return None.
+ """
+ if not context_switchers:
+ return None
+
+ if len(context_switchers) == 1:
+ return context_switchers[0]
+
+ def should_start_context(frame):
+ """The combiner for multiple context switchers."""
+ for switcher in context_switchers:
+ new_context = switcher(frame)
+ if new_context is not None:
+ return new_context
+ return None
+
+ return should_start_context
+
+
+def should_start_context_test_function(frame):
+ """Is this frame calling a test_* function?"""
+ co_name = frame.f_code.co_name
+ if co_name.startswith("test") or co_name == "runTest":
+ return qualname_from_frame(frame)
+ return None
+
+
+def qualname_from_frame(frame):
+ """Get a qualified name for the code running in `frame`."""
+ co = frame.f_code
+ fname = co.co_name
+ method = None
+ if co.co_argcount and co.co_varnames[0] == "self":
+ self = frame.f_locals["self"]
+ method = getattr(self, fname, None)
+
+ if method is None:
+ func = frame.f_globals.get(fname)
+ if func is None:
+ return None
+ return func.__module__ + '.' + fname
+
+ func = getattr(method, '__func__', None)
+ if func is None:
+ cls = self.__class__
+ return cls.__module__ + '.' + cls.__name__ + "." + fname
+
+ if hasattr(func, '__qualname__'):
+ qname = func.__module__ + '.' + func.__qualname__
+ else:
+ for cls in getattr(self.__class__, '__mro__', ()):
+ f = cls.__dict__.get(fname, None)
+ if f is None:
+ continue
+ if f is func:
+ qname = cls.__module__ + '.' + cls.__name__ + "." + fname
+ break
+ else:
+ # Support for old-style classes.
+ def mro(bases):
+ for base in bases:
+ f = base.__dict__.get(fname, None)
+ if f is func:
+ return base.__module__ + '.' + base.__name__ + "." + fname
+ for base in bases:
+ qname = mro(base.__bases__)
+ if qname is not None:
+ return qname
+ return None
+ qname = mro([self.__class__])
+ if qname is None:
+ qname = func.__module__ + '.' + fname
+
+ return qname
diff --git a/third_party/python/coverage/coverage/control.py b/third_party/python/coverage/coverage/control.py
new file mode 100644
index 0000000000..2b8c3d261d
--- /dev/null
+++ b/third_party/python/coverage/coverage/control.py
@@ -0,0 +1,1110 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Core control stuff for coverage.py."""
+
+import atexit
+import contextlib
+import os
+import os.path
+import platform
+import sys
+import time
+
+from coverage import env
+from coverage.annotate import AnnotateReporter
+from coverage.backward import string_class, iitems
+from coverage.collector import Collector, CTracer
+from coverage.config import read_coverage_config
+from coverage.context import should_start_context_test_function, combine_context_switchers
+from coverage.data import CoverageData, combine_parallel_data
+from coverage.debug import DebugControl, short_stack, write_formatted_info
+from coverage.disposition import disposition_debug_msg
+from coverage.files import PathAliases, abs_file, relative_filename, set_relative_directory
+from coverage.html import HtmlReporter
+from coverage.inorout import InOrOut
+from coverage.jsonreport import JsonReporter
+from coverage.misc import CoverageException, bool_or_none, join_regex
+from coverage.misc import DefaultValue, ensure_dir_for_file, isolate_module
+from coverage.plugin import FileReporter
+from coverage.plugin_support import Plugins
+from coverage.python import PythonFileReporter
+from coverage.report import render_report
+from coverage.results import Analysis, Numbers
+from coverage.summary import SummaryReporter
+from coverage.xmlreport import XmlReporter
+
+try:
+ from coverage.multiproc import patch_multiprocessing
+except ImportError: # pragma: only jython
+ # Jython has no multiprocessing module.
+ patch_multiprocessing = None
+
+os = isolate_module(os)
+
+@contextlib.contextmanager
+def override_config(cov, **kwargs):
+ """Temporarily tweak the configuration of `cov`.
+
+ The arguments are applied to `cov.config` with the `from_args` method.
+ At the end of the with-statement, the old configuration is restored.
+ """
+ original_config = cov.config
+ cov.config = cov.config.copy()
+ try:
+ cov.config.from_args(**kwargs)
+ yield
+ finally:
+ cov.config = original_config
+
+
+_DEFAULT_DATAFILE = DefaultValue("MISSING")
+
+class Coverage(object):
+ """Programmatic access to coverage.py.
+
+ To use::
+
+ from coverage import Coverage
+
+ cov = Coverage()
+ cov.start()
+ #.. call your code ..
+ cov.stop()
+ cov.html_report(directory='covhtml')
+
+ Note: in keeping with Python custom, names starting with underscore are
+ not part of the public API. They might stop working at any point. Please
+ limit yourself to documented methods to avoid problems.
+
+ """
+
+ # The stack of started Coverage instances.
+ _instances = []
+
+ @classmethod
+ def current(cls):
+ """Get the latest started `Coverage` instance, if any.
+
+ Returns: a `Coverage` instance, or None.
+
+ .. versionadded:: 5.0
+
+ """
+ if cls._instances:
+ return cls._instances[-1]
+ else:
+ return None
+
+ def __init__(
+ self, data_file=_DEFAULT_DATAFILE, data_suffix=None, cover_pylib=None,
+ auto_data=False, timid=None, branch=None, config_file=True,
+ source=None, omit=None, include=None, debug=None,
+ concurrency=None, check_preimported=False, context=None,
+ ):
+ """
+ Many of these arguments duplicate and override values that can be
+ provided in a configuration file. Parameters that are missing here
+ will use values from the config file.
+
+ `data_file` is the base name of the data file to use. The config value
+ defaults to ".coverage". None can be provided to prevent writing a data
+ file. `data_suffix` is appended (with a dot) to `data_file` to create
+ the final file name. If `data_suffix` is simply True, then a suffix is
+ created with the machine and process identity included.
+
+ `cover_pylib` is a boolean determining whether Python code installed
+ with the Python interpreter is measured. This includes the Python
+ standard library and any packages installed with the interpreter.
+
+ If `auto_data` is true, then any existing data file will be read when
+ coverage measurement starts, and data will be saved automatically when
+ measurement stops.
+
+ If `timid` is true, then a slower and simpler trace function will be
+ used. This is important for some environments where manipulation of
+ tracing functions breaks the faster trace function.
+
+ If `branch` is true, then branch coverage will be measured in addition
+ to the usual statement coverage.
+
+ `config_file` determines what configuration file to read:
+
+ * If it is ".coveragerc", it is interpreted as if it were True,
+ for backward compatibility.
+
+ * If it is a string, it is the name of the file to read. If the
+ file can't be read, it is an error.
+
+ * If it is True, then a few standard files names are tried
+ (".coveragerc", "setup.cfg", "tox.ini"). It is not an error for
+ these files to not be found.
+
+ * If it is False, then no configuration file is read.
+
+ `source` is a list of file paths or package names. Only code located
+ in the trees indicated by the file paths or package names will be
+ measured.
+
+ `include` and `omit` are lists of file name patterns. Files that match
+ `include` will be measured, files that match `omit` will not. Each
+ will also accept a single string argument.
+
+ `debug` is a list of strings indicating what debugging information is
+ desired.
+
+ `concurrency` is a string indicating the concurrency library being used
+ in the measured code. Without this, coverage.py will get incorrect
+ results if these libraries are in use. Valid strings are "greenlet",
+ "eventlet", "gevent", "multiprocessing", or "thread" (the default).
+ This can also be a list of these strings.
+
+ If `check_preimported` is true, then when coverage is started, the
+ already-imported files will be checked to see if they should be
+ measured by coverage. Importing measured files before coverage is
+ started can mean that code is missed.
+
+ `context` is a string to use as the :ref:`static context
+ <static_contexts>` label for collected data.
+
+ .. versionadded:: 4.0
+ The `concurrency` parameter.
+
+ .. versionadded:: 4.2
+ The `concurrency` parameter can now be a list of strings.
+
+ .. versionadded:: 5.0
+ The `check_preimported` and `context` parameters.
+
+ """
+ # data_file=None means no disk file at all. data_file missing means
+ # use the value from the config file.
+ self._no_disk = data_file is None
+ if data_file is _DEFAULT_DATAFILE:
+ data_file = None
+
+ # Build our configuration from a number of sources.
+ self.config = read_coverage_config(
+ config_file=config_file,
+ data_file=data_file, cover_pylib=cover_pylib, timid=timid,
+ branch=branch, parallel=bool_or_none(data_suffix),
+ source=source, run_omit=omit, run_include=include, debug=debug,
+ report_omit=omit, report_include=include,
+ concurrency=concurrency, context=context,
+ )
+
+ # This is injectable by tests.
+ self._debug_file = None
+
+ self._auto_load = self._auto_save = auto_data
+ self._data_suffix_specified = data_suffix
+
+ # Is it ok for no data to be collected?
+ self._warn_no_data = True
+ self._warn_unimported_source = True
+ self._warn_preimported_source = check_preimported
+ self._no_warn_slugs = None
+
+ # A record of all the warnings that have been issued.
+ self._warnings = []
+
+ # Other instance attributes, set later.
+ self._data = self._collector = None
+ self._plugins = None
+ self._inorout = None
+ self._inorout_class = InOrOut
+ self._data_suffix = self._run_suffix = None
+ self._exclude_re = None
+ self._debug = None
+ self._file_mapper = None
+
+ # State machine variables:
+ # Have we initialized everything?
+ self._inited = False
+ self._inited_for_start = False
+ # Have we started collecting and not stopped it?
+ self._started = False
+ # Should we write the debug output?
+ self._should_write_debug = True
+
+ # If we have sub-process measurement happening automatically, then we
+ # want any explicit creation of a Coverage object to mean, this process
+ # is already coverage-aware, so don't auto-measure it. By now, the
+ # auto-creation of a Coverage object has already happened. But we can
+ # find it and tell it not to save its data.
+ if not env.METACOV:
+ _prevent_sub_process_measurement()
+
+ def _init(self):
+ """Set all the initial state.
+
+ This is called by the public methods to initialize state. This lets us
+ construct a :class:`Coverage` object, then tweak its state before this
+ function is called.
+
+ """
+ if self._inited:
+ return
+
+ self._inited = True
+
+ # Create and configure the debugging controller. COVERAGE_DEBUG_FILE
+ # is an environment variable, the name of a file to append debug logs
+ # to.
+ self._debug = DebugControl(self.config.debug, self._debug_file)
+
+ if "multiprocessing" in (self.config.concurrency or ()):
+ # Multi-processing uses parallel for the subprocesses, so also use
+ # it for the main process.
+ self.config.parallel = True
+
+ # _exclude_re is a dict that maps exclusion list names to compiled regexes.
+ self._exclude_re = {}
+
+ set_relative_directory()
+ self._file_mapper = relative_filename if self.config.relative_files else abs_file
+
+ # Load plugins
+ self._plugins = Plugins.load_plugins(self.config.plugins, self.config, self._debug)
+
+ # Run configuring plugins.
+ for plugin in self._plugins.configurers:
+ # We need an object with set_option and get_option. Either self or
+ # self.config will do. Choosing randomly stops people from doing
+ # other things with those objects, against the public API. Yes,
+ # this is a bit childish. :)
+ plugin.configure([self, self.config][int(time.time()) % 2])
+
+ def _post_init(self):
+ """Stuff to do after everything is initialized."""
+ if self._should_write_debug:
+ self._should_write_debug = False
+ self._write_startup_debug()
+
+ # '[run] _crash' will raise an exception if the value is close by in
+ # the call stack, for testing error handling.
+ if self.config._crash and self.config._crash in short_stack(limit=4):
+ raise Exception("Crashing because called by {}".format(self.config._crash))
+
+ def _write_startup_debug(self):
+ """Write out debug info at startup if needed."""
+ wrote_any = False
+ with self._debug.without_callers():
+ if self._debug.should('config'):
+ config_info = sorted(self.config.__dict__.items())
+ config_info = [(k, v) for k, v in config_info if not k.startswith('_')]
+ write_formatted_info(self._debug, "config", config_info)
+ wrote_any = True
+
+ if self._debug.should('sys'):
+ write_formatted_info(self._debug, "sys", self.sys_info())
+ for plugin in self._plugins:
+ header = "sys: " + plugin._coverage_plugin_name
+ info = plugin.sys_info()
+ write_formatted_info(self._debug, header, info)
+ wrote_any = True
+
+ if wrote_any:
+ write_formatted_info(self._debug, "end", ())
+
+ def _should_trace(self, filename, frame):
+ """Decide whether to trace execution in `filename`.
+
+ Calls `_should_trace_internal`, and returns the FileDisposition.
+
+ """
+ disp = self._inorout.should_trace(filename, frame)
+ if self._debug.should('trace'):
+ self._debug.write(disposition_debug_msg(disp))
+ return disp
+
+ def _check_include_omit_etc(self, filename, frame):
+ """Check a file name against the include/omit/etc, rules, verbosely.
+
+ Returns a boolean: True if the file should be traced, False if not.
+
+ """
+ reason = self._inorout.check_include_omit_etc(filename, frame)
+ if self._debug.should('trace'):
+ if not reason:
+ msg = "Including %r" % (filename,)
+ else:
+ msg = "Not including %r: %s" % (filename, reason)
+ self._debug.write(msg)
+
+ return not reason
+
+ def _warn(self, msg, slug=None, once=False):
+ """Use `msg` as a warning.
+
+ For warning suppression, use `slug` as the shorthand.
+
+ If `once` is true, only show this warning once (determined by the
+ slug.)
+
+ """
+ if self._no_warn_slugs is None:
+ self._no_warn_slugs = list(self.config.disable_warnings)
+
+ if slug in self._no_warn_slugs:
+ # Don't issue the warning
+ return
+
+ self._warnings.append(msg)
+ if slug:
+ msg = "%s (%s)" % (msg, slug)
+ if self._debug.should('pid'):
+ msg = "[%d] %s" % (os.getpid(), msg)
+ sys.stderr.write("Coverage.py warning: %s\n" % msg)
+
+ if once:
+ self._no_warn_slugs.append(slug)
+
+ def get_option(self, option_name):
+ """Get an option from the configuration.
+
+ `option_name` is a colon-separated string indicating the section and
+ option name. For example, the ``branch`` option in the ``[run]``
+ section of the config file would be indicated with `"run:branch"`.
+
+ Returns the value of the option. The type depends on the option
+ selected.
+
+ As a special case, an `option_name` of ``"paths"`` will return an
+ OrderedDict with the entire ``[paths]`` section value.
+
+ .. versionadded:: 4.0
+
+ """
+ return self.config.get_option(option_name)
+
+ def set_option(self, option_name, value):
+ """Set an option in the configuration.
+
+ `option_name` is a colon-separated string indicating the section and
+ option name. For example, the ``branch`` option in the ``[run]``
+ section of the config file would be indicated with ``"run:branch"``.
+
+ `value` is the new value for the option. This should be an
+ appropriate Python value. For example, use True for booleans, not the
+ string ``"True"``.
+
+ As an example, calling::
+
+ cov.set_option("run:branch", True)
+
+ has the same effect as this configuration file::
+
+ [run]
+ branch = True
+
+ As a special case, an `option_name` of ``"paths"`` will replace the
+ entire ``[paths]`` section. The value should be an OrderedDict.
+
+ .. versionadded:: 4.0
+
+ """
+ self.config.set_option(option_name, value)
+
+ def load(self):
+ """Load previously-collected coverage data from the data file."""
+ self._init()
+ if self._collector:
+ self._collector.reset()
+ should_skip = self.config.parallel and not os.path.exists(self.config.data_file)
+ if not should_skip:
+ self._init_data(suffix=None)
+ self._post_init()
+ if not should_skip:
+ self._data.read()
+
+ def _init_for_start(self):
+ """Initialization for start()"""
+ # Construct the collector.
+ concurrency = self.config.concurrency or ()
+ if "multiprocessing" in concurrency:
+ if not patch_multiprocessing:
+ raise CoverageException( # pragma: only jython
+ "multiprocessing is not supported on this Python"
+ )
+ patch_multiprocessing(rcfile=self.config.config_file)
+
+ dycon = self.config.dynamic_context
+ if not dycon or dycon == "none":
+ context_switchers = []
+ elif dycon == "test_function":
+ context_switchers = [should_start_context_test_function]
+ else:
+ raise CoverageException(
+ "Don't understand dynamic_context setting: {!r}".format(dycon)
+ )
+
+ context_switchers.extend(
+ plugin.dynamic_context for plugin in self._plugins.context_switchers
+ )
+
+ should_start_context = combine_context_switchers(context_switchers)
+
+ self._collector = Collector(
+ should_trace=self._should_trace,
+ check_include=self._check_include_omit_etc,
+ should_start_context=should_start_context,
+ file_mapper=self._file_mapper,
+ timid=self.config.timid,
+ branch=self.config.branch,
+ warn=self._warn,
+ concurrency=concurrency,
+ )
+
+ suffix = self._data_suffix_specified
+ if suffix or self.config.parallel:
+ if not isinstance(suffix, string_class):
+ # if data_suffix=True, use .machinename.pid.random
+ suffix = True
+ else:
+ suffix = None
+
+ self._init_data(suffix)
+
+ self._collector.use_data(self._data, self.config.context)
+
+ # Early warning if we aren't going to be able to support plugins.
+ if self._plugins.file_tracers and not self._collector.supports_plugins:
+ self._warn(
+ "Plugin file tracers (%s) aren't supported with %s" % (
+ ", ".join(
+ plugin._coverage_plugin_name
+ for plugin in self._plugins.file_tracers
+ ),
+ self._collector.tracer_name(),
+ )
+ )
+ for plugin in self._plugins.file_tracers:
+ plugin._coverage_enabled = False
+
+ # Create the file classifying substructure.
+ self._inorout = self._inorout_class(warn=self._warn)
+ self._inorout.configure(self.config)
+ self._inorout.plugins = self._plugins
+ self._inorout.disp_class = self._collector.file_disposition_class
+
+ # It's useful to write debug info after initing for start.
+ self._should_write_debug = True
+
+ atexit.register(self._atexit)
+
+ def _init_data(self, suffix):
+ """Create a data file if we don't have one yet."""
+ if self._data is None:
+ # Create the data file. We do this at construction time so that the
+ # data file will be written into the directory where the process
+ # started rather than wherever the process eventually chdir'd to.
+ ensure_dir_for_file(self.config.data_file)
+ self._data = CoverageData(
+ basename=self.config.data_file,
+ suffix=suffix,
+ warn=self._warn,
+ debug=self._debug,
+ no_disk=self._no_disk,
+ )
+
+ def start(self):
+ """Start measuring code coverage.
+
+ Coverage measurement only occurs in functions called after
+ :meth:`start` is invoked. Statements in the same scope as
+ :meth:`start` won't be measured.
+
+ Once you invoke :meth:`start`, you must also call :meth:`stop`
+ eventually, or your process might not shut down cleanly.
+
+ """
+ self._init()
+ if not self._inited_for_start:
+ self._inited_for_start = True
+ self._init_for_start()
+ self._post_init()
+
+ # Issue warnings for possible problems.
+ self._inorout.warn_conflicting_settings()
+
+ # See if we think some code that would eventually be measured has
+ # already been imported.
+ if self._warn_preimported_source:
+ self._inorout.warn_already_imported_files()
+
+ if self._auto_load:
+ self.load()
+
+ self._collector.start()
+ self._started = True
+ self._instances.append(self)
+
+ def stop(self):
+ """Stop measuring code coverage."""
+ if self._instances:
+ if self._instances[-1] is self:
+ self._instances.pop()
+ if self._started:
+ self._collector.stop()
+ self._started = False
+
+ def _atexit(self):
+ """Clean up on process shutdown."""
+ if self._debug.should("process"):
+ self._debug.write("atexit: pid: {}, instance: {!r}".format(os.getpid(), self))
+ if self._started:
+ self.stop()
+ if self._auto_save:
+ self.save()
+
+ def erase(self):
+ """Erase previously collected coverage data.
+
+ This removes the in-memory data collected in this session as well as
+ discarding the data file.
+
+ """
+ self._init()
+ self._post_init()
+ if self._collector:
+ self._collector.reset()
+ self._init_data(suffix=None)
+ self._data.erase(parallel=self.config.parallel)
+ self._data = None
+ self._inited_for_start = False
+
+ def switch_context(self, new_context):
+ """Switch to a new dynamic context.
+
+ `new_context` is a string to use as the :ref:`dynamic context
+ <dynamic_contexts>` label for collected data. If a :ref:`static
+ context <static_contexts>` is in use, the static and dynamic context
+ labels will be joined together with a pipe character.
+
+ Coverage collection must be started already.
+
+ .. versionadded:: 5.0
+
+ """
+ if not self._started: # pragma: part started
+ raise CoverageException(
+ "Cannot switch context, coverage is not started"
+ )
+
+ if self._collector.should_start_context:
+ self._warn("Conflicting dynamic contexts", slug="dynamic-conflict", once=True)
+
+ self._collector.switch_context(new_context)
+
+ def clear_exclude(self, which='exclude'):
+ """Clear the exclude list."""
+ self._init()
+ setattr(self.config, which + "_list", [])
+ self._exclude_regex_stale()
+
+ def exclude(self, regex, which='exclude'):
+ """Exclude source lines from execution consideration.
+
+ A number of lists of regular expressions are maintained. Each list
+ selects lines that are treated differently during reporting.
+
+ `which` determines which list is modified. The "exclude" list selects
+ lines that are not considered executable at all. The "partial" list
+ indicates lines with branches that are not taken.
+
+ `regex` is a regular expression. The regex is added to the specified
+ list. If any of the regexes in the list is found in a line, the line
+ is marked for special treatment during reporting.
+
+ """
+ self._init()
+ excl_list = getattr(self.config, which + "_list")
+ excl_list.append(regex)
+ self._exclude_regex_stale()
+
+ def _exclude_regex_stale(self):
+ """Drop all the compiled exclusion regexes, a list was modified."""
+ self._exclude_re.clear()
+
+ def _exclude_regex(self, which):
+ """Return a compiled regex for the given exclusion list."""
+ if which not in self._exclude_re:
+ excl_list = getattr(self.config, which + "_list")
+ self._exclude_re[which] = join_regex(excl_list)
+ return self._exclude_re[which]
+
+ def get_exclude_list(self, which='exclude'):
+ """Return a list of excluded regex patterns.
+
+ `which` indicates which list is desired. See :meth:`exclude` for the
+ lists that are available, and their meaning.
+
+ """
+ self._init()
+ return getattr(self.config, which + "_list")
+
+ def save(self):
+ """Save the collected coverage data to the data file."""
+ data = self.get_data()
+ data.write()
+
+ def combine(self, data_paths=None, strict=False):
+ """Combine together a number of similarly-named coverage data files.
+
+ All coverage data files whose name starts with `data_file` (from the
+ coverage() constructor) will be read, and combined together into the
+ current measurements.
+
+ `data_paths` is a list of files or directories from which data should
+ be combined. If no list is passed, then the data files from the
+ directory indicated by the current data file (probably the current
+ directory) will be combined.
+
+ If `strict` is true, then it is an error to attempt to combine when
+ there are no data files to combine.
+
+ .. versionadded:: 4.0
+ The `data_paths` parameter.
+
+ .. versionadded:: 4.3
+ The `strict` parameter.
+
+ """
+ self._init()
+ self._init_data(suffix=None)
+ self._post_init()
+ self.get_data()
+
+ aliases = None
+ if self.config.paths:
+ aliases = PathAliases()
+ for paths in self.config.paths.values():
+ result = paths[0]
+ for pattern in paths[1:]:
+ aliases.add(pattern, result)
+
+ combine_parallel_data(self._data, aliases=aliases, data_paths=data_paths, strict=strict)
+
+ def get_data(self):
+ """Get the collected data.
+
+ Also warn about various problems collecting data.
+
+ Returns a :class:`coverage.CoverageData`, the collected coverage data.
+
+ .. versionadded:: 4.0
+
+ """
+ self._init()
+ self._init_data(suffix=None)
+ self._post_init()
+
+ if self._collector and self._collector.flush_data():
+ self._post_save_work()
+
+ return self._data
+
+ def _post_save_work(self):
+ """After saving data, look for warnings, post-work, etc.
+
+ Warn about things that should have happened but didn't.
+ Look for unexecuted files.
+
+ """
+ # If there are still entries in the source_pkgs_unmatched list,
+ # then we never encountered those packages.
+ if self._warn_unimported_source:
+ self._inorout.warn_unimported_source()
+
+ # Find out if we got any data.
+ if not self._data and self._warn_no_data:
+ self._warn("No data was collected.", slug="no-data-collected")
+
+ # Touch all the files that could have executed, so that we can
+ # mark completely unexecuted files as 0% covered.
+ if self._data is not None:
+ for file_path, plugin_name in self._inorout.find_possibly_unexecuted_files():
+ file_path = self._file_mapper(file_path)
+ self._data.touch_file(file_path, plugin_name)
+
+ if self.config.note:
+ self._warn("The '[run] note' setting is no longer supported.")
+
+ # Backward compatibility with version 1.
+ def analysis(self, morf):
+ """Like `analysis2` but doesn't return excluded line numbers."""
+ f, s, _, m, mf = self.analysis2(morf)
+ return f, s, m, mf
+
+ def analysis2(self, morf):
+ """Analyze a module.
+
+ `morf` is a module or a file name. It will be analyzed to determine
+ its coverage statistics. The return value is a 5-tuple:
+
+ * The file name for the module.
+ * A list of line numbers of executable statements.
+ * A list of line numbers of excluded statements.
+ * A list of line numbers of statements not run (missing from
+ execution).
+ * A readable formatted string of the missing line numbers.
+
+ The analysis uses the source file itself and the current measured
+ coverage data.
+
+ """
+ analysis = self._analyze(morf)
+ return (
+ analysis.filename,
+ sorted(analysis.statements),
+ sorted(analysis.excluded),
+ sorted(analysis.missing),
+ analysis.missing_formatted(),
+ )
+
+ def _analyze(self, it):
+ """Analyze a single morf or code unit.
+
+ Returns an `Analysis` object.
+
+ """
+ # All reporting comes through here, so do reporting initialization.
+ self._init()
+ Numbers.set_precision(self.config.precision)
+ self._post_init()
+
+ data = self.get_data()
+ if not isinstance(it, FileReporter):
+ it = self._get_file_reporter(it)
+
+ return Analysis(data, it, self._file_mapper)
+
+ def _get_file_reporter(self, morf):
+ """Get a FileReporter for a module or file name."""
+ plugin = None
+ file_reporter = "python"
+
+ if isinstance(morf, string_class):
+ mapped_morf = self._file_mapper(morf)
+ plugin_name = self._data.file_tracer(mapped_morf)
+ if plugin_name:
+ plugin = self._plugins.get(plugin_name)
+
+ if plugin:
+ file_reporter = plugin.file_reporter(mapped_morf)
+ if file_reporter is None:
+ raise CoverageException(
+ "Plugin %r did not provide a file reporter for %r." % (
+ plugin._coverage_plugin_name, morf
+ )
+ )
+
+ if file_reporter == "python":
+ file_reporter = PythonFileReporter(morf, self)
+
+ return file_reporter
+
+ def _get_file_reporters(self, morfs=None):
+ """Get a list of FileReporters for a list of modules or file names.
+
+ For each module or file name in `morfs`, find a FileReporter. Return
+ the list of FileReporters.
+
+ If `morfs` is a single module or file name, this returns a list of one
+ FileReporter. If `morfs` is empty or None, then the list of all files
+ measured is used to find the FileReporters.
+
+ """
+ if not morfs:
+ morfs = self._data.measured_files()
+
+ # Be sure we have a collection.
+ if not isinstance(morfs, (list, tuple, set)):
+ morfs = [morfs]
+
+ file_reporters = [self._get_file_reporter(morf) for morf in morfs]
+ return file_reporters
+
+ def report(
+ self, morfs=None, show_missing=None, ignore_errors=None,
+ file=None, omit=None, include=None, skip_covered=None,
+ contexts=None, skip_empty=None,
+ ):
+ """Write a textual summary report to `file`.
+
+ Each module in `morfs` is listed, with counts of statements, executed
+ statements, missing statements, and a list of lines missed.
+
+ If `show_missing` is true, then details of which lines or branches are
+ missing will be included in the report. If `ignore_errors` is true,
+ then a failure while reporting a single file will not stop the entire
+ report.
+
+ `file` is a file-like object, suitable for writing.
+
+ `include` is a list of file name patterns. Files that match will be
+ included in the report. Files matching `omit` will not be included in
+ the report.
+
+ If `skip_covered` is true, don't report on files with 100% coverage.
+
+ If `skip_empty` is true, don't report on empty files (those that have
+ no statements).
+
+ `contexts` is a list of regular expressions. Only data from
+ :ref:`dynamic contexts <dynamic_contexts>` that match one of those
+ expressions (using :func:`re.search <python:re.search>`) will be
+ included in the report.
+
+ All of the arguments default to the settings read from the
+ :ref:`configuration file <config>`.
+
+ Returns a float, the total percentage covered.
+
+ .. versionadded:: 4.0
+ The `skip_covered` parameter.
+
+ .. versionadded:: 5.0
+ The `contexts` and `skip_empty` parameters.
+
+ """
+ with override_config(
+ self,
+ ignore_errors=ignore_errors, report_omit=omit, report_include=include,
+ show_missing=show_missing, skip_covered=skip_covered,
+ report_contexts=contexts, skip_empty=skip_empty,
+ ):
+ reporter = SummaryReporter(self)
+ return reporter.report(morfs, outfile=file)
+
+ def annotate(
+ self, morfs=None, directory=None, ignore_errors=None,
+ omit=None, include=None, contexts=None,
+ ):
+ """Annotate a list of modules.
+
+ Each module in `morfs` is annotated. The source is written to a new
+ file, named with a ",cover" suffix, with each line prefixed with a
+ marker to indicate the coverage of the line. Covered lines have ">",
+ excluded lines have "-", and missing lines have "!".
+
+ See :meth:`report` for other arguments.
+
+ """
+ with override_config(self,
+ ignore_errors=ignore_errors, report_omit=omit,
+ report_include=include, report_contexts=contexts,
+ ):
+ reporter = AnnotateReporter(self)
+ reporter.report(morfs, directory=directory)
+
+ def html_report(self, morfs=None, directory=None, ignore_errors=None,
+ omit=None, include=None, extra_css=None, title=None,
+ skip_covered=None, show_contexts=None, contexts=None,
+ skip_empty=None):
+ """Generate an HTML report.
+
+ The HTML is written to `directory`. The file "index.html" is the
+ overview starting point, with links to more detailed pages for
+ individual modules.
+
+ `extra_css` is a path to a file of other CSS to apply on the page.
+ It will be copied into the HTML directory.
+
+ `title` is a text string (not HTML) to use as the title of the HTML
+ report.
+
+ See :meth:`report` for other arguments.
+
+ Returns a float, the total percentage covered.
+
+ .. note::
+ The HTML report files are generated incrementally based on the
+ source files and coverage results. If you modify the report files,
+ the changes will not be considered. You should be careful about
+ changing the files in the report folder.
+
+ """
+ with override_config(self,
+ ignore_errors=ignore_errors, report_omit=omit, report_include=include,
+ html_dir=directory, extra_css=extra_css, html_title=title,
+ skip_covered=skip_covered, show_contexts=show_contexts, report_contexts=contexts,
+ skip_empty=skip_empty,
+ ):
+ reporter = HtmlReporter(self)
+ return reporter.report(morfs)
+
+ def xml_report(
+ self, morfs=None, outfile=None, ignore_errors=None,
+ omit=None, include=None, contexts=None,
+ ):
+ """Generate an XML report of coverage results.
+
+ The report is compatible with Cobertura reports.
+
+ Each module in `morfs` is included in the report. `outfile` is the
+ path to write the file to, "-" will write to stdout.
+
+ See :meth:`report` for other arguments.
+
+ Returns a float, the total percentage covered.
+
+ """
+ with override_config(self,
+ ignore_errors=ignore_errors, report_omit=omit, report_include=include,
+ xml_output=outfile, report_contexts=contexts,
+ ):
+ return render_report(self.config.xml_output, XmlReporter(self), morfs)
+
+ def json_report(
+ self, morfs=None, outfile=None, ignore_errors=None,
+ omit=None, include=None, contexts=None, pretty_print=None,
+ show_contexts=None
+ ):
+ """Generate a JSON report of coverage results.
+
+ Each module in `morfs` is included in the report. `outfile` is the
+ path to write the file to, "-" will write to stdout.
+
+ See :meth:`report` for other arguments.
+
+ Returns a float, the total percentage covered.
+
+ .. versionadded:: 5.0
+
+ """
+ with override_config(self,
+ ignore_errors=ignore_errors, report_omit=omit, report_include=include,
+ json_output=outfile, report_contexts=contexts, json_pretty_print=pretty_print,
+ json_show_contexts=show_contexts
+ ):
+ return render_report(self.config.json_output, JsonReporter(self), morfs)
+
+ def sys_info(self):
+ """Return a list of (key, value) pairs showing internal information."""
+
+ import coverage as covmod
+
+ self._init()
+ self._post_init()
+
+ def plugin_info(plugins):
+ """Make an entry for the sys_info from a list of plug-ins."""
+ entries = []
+ for plugin in plugins:
+ entry = plugin._coverage_plugin_name
+ if not plugin._coverage_enabled:
+ entry += " (disabled)"
+ entries.append(entry)
+ return entries
+
+ info = [
+ ('version', covmod.__version__),
+ ('coverage', covmod.__file__),
+ ('tracer', self._collector.tracer_name() if self._collector else "-none-"),
+ ('CTracer', 'available' if CTracer else "unavailable"),
+ ('plugins.file_tracers', plugin_info(self._plugins.file_tracers)),
+ ('plugins.configurers', plugin_info(self._plugins.configurers)),
+ ('plugins.context_switchers', plugin_info(self._plugins.context_switchers)),
+ ('configs_attempted', self.config.attempted_config_files),
+ ('configs_read', self.config.config_files_read),
+ ('config_file', self.config.config_file),
+ ('config_contents',
+ repr(self.config._config_contents)
+ if self.config._config_contents
+ else '-none-'
+ ),
+ ('data_file', self._data.data_filename() if self._data is not None else "-none-"),
+ ('python', sys.version.replace('\n', '')),
+ ('platform', platform.platform()),
+ ('implementation', platform.python_implementation()),
+ ('executable', sys.executable),
+ ('def_encoding', sys.getdefaultencoding()),
+ ('fs_encoding', sys.getfilesystemencoding()),
+ ('pid', os.getpid()),
+ ('cwd', os.getcwd()),
+ ('path', sys.path),
+ ('environment', sorted(
+ ("%s = %s" % (k, v))
+ for k, v in iitems(os.environ)
+ if any(slug in k for slug in ("COV", "PY"))
+ )),
+ ('command_line', " ".join(getattr(sys, 'argv', ['-none-']))),
+ ]
+
+ if self._inorout:
+ info.extend(self._inorout.sys_info())
+
+ info.extend(CoverageData.sys_info())
+
+ return info
+
+
+# Mega debugging...
+# $set_env.py: COVERAGE_DEBUG_CALLS - Lots and lots of output about calls to Coverage.
+if int(os.environ.get("COVERAGE_DEBUG_CALLS", 0)): # pragma: debugging
+ from coverage.debug import decorate_methods, show_calls
+
+ Coverage = decorate_methods(show_calls(show_args=True), butnot=['get_data'])(Coverage)
+
+
+def process_startup():
+ """Call this at Python start-up to perhaps measure coverage.
+
+ If the environment variable COVERAGE_PROCESS_START is defined, coverage
+ measurement is started. The value of the variable is the config file
+ to use.
+
+ There are two ways to configure your Python installation to invoke this
+ function when Python starts:
+
+ #. Create or append to sitecustomize.py to add these lines::
+
+ import coverage
+ coverage.process_startup()
+
+ #. Create a .pth file in your Python installation containing::
+
+ import coverage; coverage.process_startup()
+
+ Returns the :class:`Coverage` instance that was started, or None if it was
+ not started by this call.
+
+ """
+ cps = os.environ.get("COVERAGE_PROCESS_START")
+ if not cps:
+ # No request for coverage, nothing to do.
+ return None
+
+ # This function can be called more than once in a process. This happens
+ # because some virtualenv configurations make the same directory visible
+ # twice in sys.path. This means that the .pth file will be found twice,
+ # and executed twice, executing this function twice. We set a global
+ # flag (an attribute on this function) to indicate that coverage.py has
+ # already been started, so we can avoid doing it twice.
+ #
+ # https://bitbucket.org/ned/coveragepy/issue/340/keyerror-subpy has more
+ # details.
+
+ if hasattr(process_startup, "coverage"):
+ # We've annotated this function before, so we must have already
+ # started coverage.py in this process. Nothing to do.
+ return None
+
+ cov = Coverage(config_file=cps)
+ process_startup.coverage = cov
+ cov._warn_no_data = False
+ cov._warn_unimported_source = False
+ cov._warn_preimported_source = False
+ cov._auto_save = True
+ cov.start()
+
+ return cov
+
+
+def _prevent_sub_process_measurement():
+ """Stop any subprocess auto-measurement from writing data."""
+ auto_created_coverage = getattr(process_startup, "coverage", None)
+ if auto_created_coverage is not None:
+ auto_created_coverage._auto_save = False
diff --git a/third_party/python/coverage/coverage/ctracer/datastack.c b/third_party/python/coverage/coverage/ctracer/datastack.c
new file mode 100644
index 0000000000..a9cfcc2cf2
--- /dev/null
+++ b/third_party/python/coverage/coverage/ctracer/datastack.c
@@ -0,0 +1,50 @@
+/* Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 */
+/* For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt */
+
+#include "util.h"
+#include "datastack.h"
+
+#define STACK_DELTA 20
+
+int
+DataStack_init(Stats *pstats, DataStack *pdata_stack)
+{
+ pdata_stack->depth = -1;
+ pdata_stack->stack = NULL;
+ pdata_stack->alloc = 0;
+ return RET_OK;
+}
+
+void
+DataStack_dealloc(Stats *pstats, DataStack *pdata_stack)
+{
+ int i;
+
+ for (i = 0; i < pdata_stack->alloc; i++) {
+ Py_XDECREF(pdata_stack->stack[i].file_data);
+ }
+ PyMem_Free(pdata_stack->stack);
+}
+
+int
+DataStack_grow(Stats *pstats, DataStack *pdata_stack)
+{
+ pdata_stack->depth++;
+ if (pdata_stack->depth >= pdata_stack->alloc) {
+ /* We've outgrown our data_stack array: make it bigger. */
+ int bigger = pdata_stack->alloc + STACK_DELTA;
+ DataStackEntry * bigger_data_stack = PyMem_Realloc(pdata_stack->stack, bigger * sizeof(DataStackEntry));
+ STATS( pstats->stack_reallocs++; )
+ if (bigger_data_stack == NULL) {
+ PyErr_NoMemory();
+ pdata_stack->depth--;
+ return RET_ERROR;
+ }
+ /* Zero the new entries. */
+ memset(bigger_data_stack + pdata_stack->alloc, 0, STACK_DELTA * sizeof(DataStackEntry));
+
+ pdata_stack->stack = bigger_data_stack;
+ pdata_stack->alloc = bigger;
+ }
+ return RET_OK;
+}
diff --git a/third_party/python/coverage/coverage/ctracer/datastack.h b/third_party/python/coverage/coverage/ctracer/datastack.h
new file mode 100644
index 0000000000..3b3078ba27
--- /dev/null
+++ b/third_party/python/coverage/coverage/ctracer/datastack.h
@@ -0,0 +1,45 @@
+/* Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 */
+/* For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt */
+
+#ifndef _COVERAGE_DATASTACK_H
+#define _COVERAGE_DATASTACK_H
+
+#include "util.h"
+#include "stats.h"
+
+/* An entry on the data stack. For each call frame, we need to record all
+ * the information needed for CTracer_handle_line to operate as quickly as
+ * possible.
+ */
+typedef struct DataStackEntry {
+ /* The current file_data dictionary. Owned. */
+ PyObject * file_data;
+
+ /* The disposition object for this frame. A borrowed instance of CFileDisposition. */
+ PyObject * disposition;
+
+ /* The FileTracer handling this frame, or None if it's Python. Borrowed. */
+ PyObject * file_tracer;
+
+ /* The line number of the last line recorded, for tracing arcs.
+ -1 means there was no previous line, as when entering a code object.
+ */
+ int last_line;
+
+ BOOL started_context;
+} DataStackEntry;
+
+/* A data stack is a dynamically allocated vector of DataStackEntry's. */
+typedef struct DataStack {
+ int depth; /* The index of the last-used entry in stack. */
+ int alloc; /* number of entries allocated at stack. */
+ /* The file data at each level, or NULL if not recording. */
+ DataStackEntry * stack;
+} DataStack;
+
+
+int DataStack_init(Stats * pstats, DataStack *pdata_stack);
+void DataStack_dealloc(Stats * pstats, DataStack *pdata_stack);
+int DataStack_grow(Stats * pstats, DataStack *pdata_stack);
+
+#endif /* _COVERAGE_DATASTACK_H */
diff --git a/third_party/python/coverage/coverage/ctracer/filedisp.c b/third_party/python/coverage/coverage/ctracer/filedisp.c
new file mode 100644
index 0000000000..47782ae090
--- /dev/null
+++ b/third_party/python/coverage/coverage/ctracer/filedisp.c
@@ -0,0 +1,85 @@
+/* Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 */
+/* For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt */
+
+#include "util.h"
+#include "filedisp.h"
+
+void
+CFileDisposition_dealloc(CFileDisposition *self)
+{
+ Py_XDECREF(self->original_filename);
+ Py_XDECREF(self->canonical_filename);
+ Py_XDECREF(self->source_filename);
+ Py_XDECREF(self->trace);
+ Py_XDECREF(self->reason);
+ Py_XDECREF(self->file_tracer);
+ Py_XDECREF(self->has_dynamic_filename);
+}
+
+static PyMemberDef
+CFileDisposition_members[] = {
+ { "original_filename", T_OBJECT, offsetof(CFileDisposition, original_filename), 0,
+ PyDoc_STR("") },
+
+ { "canonical_filename", T_OBJECT, offsetof(CFileDisposition, canonical_filename), 0,
+ PyDoc_STR("") },
+
+ { "source_filename", T_OBJECT, offsetof(CFileDisposition, source_filename), 0,
+ PyDoc_STR("") },
+
+ { "trace", T_OBJECT, offsetof(CFileDisposition, trace), 0,
+ PyDoc_STR("") },
+
+ { "reason", T_OBJECT, offsetof(CFileDisposition, reason), 0,
+ PyDoc_STR("") },
+
+ { "file_tracer", T_OBJECT, offsetof(CFileDisposition, file_tracer), 0,
+ PyDoc_STR("") },
+
+ { "has_dynamic_filename", T_OBJECT, offsetof(CFileDisposition, has_dynamic_filename), 0,
+ PyDoc_STR("") },
+
+ { NULL }
+};
+
+PyTypeObject
+CFileDispositionType = {
+ MyType_HEAD_INIT
+ "coverage.CFileDispositionType", /*tp_name*/
+ sizeof(CFileDisposition), /*tp_basicsize*/
+ 0, /*tp_itemsize*/
+ (destructor)CFileDisposition_dealloc, /*tp_dealloc*/
+ 0, /*tp_print*/
+ 0, /*tp_getattr*/
+ 0, /*tp_setattr*/
+ 0, /*tp_compare*/
+ 0, /*tp_repr*/
+ 0, /*tp_as_number*/
+ 0, /*tp_as_sequence*/
+ 0, /*tp_as_mapping*/
+ 0, /*tp_hash */
+ 0, /*tp_call*/
+ 0, /*tp_str*/
+ 0, /*tp_getattro*/
+ 0, /*tp_setattro*/
+ 0, /*tp_as_buffer*/
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/
+ "CFileDisposition objects", /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
+ 0, /* tp_methods */
+ CFileDisposition_members, /* tp_members */
+ 0, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+ 0, /* tp_init */
+ 0, /* tp_alloc */
+ 0, /* tp_new */
+};
diff --git a/third_party/python/coverage/coverage/ctracer/filedisp.h b/third_party/python/coverage/coverage/ctracer/filedisp.h
new file mode 100644
index 0000000000..860f9a50b1
--- /dev/null
+++ b/third_party/python/coverage/coverage/ctracer/filedisp.h
@@ -0,0 +1,26 @@
+/* Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 */
+/* For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt */
+
+#ifndef _COVERAGE_FILEDISP_H
+#define _COVERAGE_FILEDISP_H
+
+#include "util.h"
+#include "structmember.h"
+
+typedef struct CFileDisposition {
+ PyObject_HEAD
+
+ PyObject * original_filename;
+ PyObject * canonical_filename;
+ PyObject * source_filename;
+ PyObject * trace;
+ PyObject * reason;
+ PyObject * file_tracer;
+ PyObject * has_dynamic_filename;
+} CFileDisposition;
+
+void CFileDisposition_dealloc(CFileDisposition *self);
+
+extern PyTypeObject CFileDispositionType;
+
+#endif /* _COVERAGE_FILEDISP_H */
diff --git a/third_party/python/coverage/coverage/ctracer/module.c b/third_party/python/coverage/coverage/ctracer/module.c
new file mode 100644
index 0000000000..f308902b69
--- /dev/null
+++ b/third_party/python/coverage/coverage/ctracer/module.c
@@ -0,0 +1,108 @@
+/* Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 */
+/* For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt */
+
+#include "util.h"
+#include "tracer.h"
+#include "filedisp.h"
+
+/* Module definition */
+
+#define MODULE_DOC PyDoc_STR("Fast coverage tracer.")
+
+#if PY_MAJOR_VERSION >= 3
+
+static PyModuleDef
+moduledef = {
+ PyModuleDef_HEAD_INIT,
+ "coverage.tracer",
+ MODULE_DOC,
+ -1,
+ NULL, /* methods */
+ NULL,
+ NULL, /* traverse */
+ NULL, /* clear */
+ NULL
+};
+
+
+PyObject *
+PyInit_tracer(void)
+{
+ PyObject * mod = PyModule_Create(&moduledef);
+ if (mod == NULL) {
+ return NULL;
+ }
+
+ if (CTracer_intern_strings() < 0) {
+ return NULL;
+ }
+
+ /* Initialize CTracer */
+ CTracerType.tp_new = PyType_GenericNew;
+ if (PyType_Ready(&CTracerType) < 0) {
+ Py_DECREF(mod);
+ return NULL;
+ }
+
+ Py_INCREF(&CTracerType);
+ if (PyModule_AddObject(mod, "CTracer", (PyObject *)&CTracerType) < 0) {
+ Py_DECREF(mod);
+ Py_DECREF(&CTracerType);
+ return NULL;
+ }
+
+ /* Initialize CFileDisposition */
+ CFileDispositionType.tp_new = PyType_GenericNew;
+ if (PyType_Ready(&CFileDispositionType) < 0) {
+ Py_DECREF(mod);
+ Py_DECREF(&CTracerType);
+ return NULL;
+ }
+
+ Py_INCREF(&CFileDispositionType);
+ if (PyModule_AddObject(mod, "CFileDisposition", (PyObject *)&CFileDispositionType) < 0) {
+ Py_DECREF(mod);
+ Py_DECREF(&CTracerType);
+ Py_DECREF(&CFileDispositionType);
+ return NULL;
+ }
+
+ return mod;
+}
+
+#else
+
+void
+inittracer(void)
+{
+ PyObject * mod;
+
+ mod = Py_InitModule3("coverage.tracer", NULL, MODULE_DOC);
+ if (mod == NULL) {
+ return;
+ }
+
+ if (CTracer_intern_strings() < 0) {
+ return;
+ }
+
+ /* Initialize CTracer */
+ CTracerType.tp_new = PyType_GenericNew;
+ if (PyType_Ready(&CTracerType) < 0) {
+ return;
+ }
+
+ Py_INCREF(&CTracerType);
+ PyModule_AddObject(mod, "CTracer", (PyObject *)&CTracerType);
+
+ /* Initialize CFileDisposition */
+ CFileDispositionType.tp_new = PyType_GenericNew;
+ if (PyType_Ready(&CFileDispositionType) < 0) {
+ return;
+ }
+
+ Py_INCREF(&CFileDispositionType);
+ PyModule_AddObject(mod, "CFileDisposition", (PyObject *)&CFileDispositionType);
+}
+
+#endif /* Py3k */
diff --git a/third_party/python/coverage/coverage/ctracer/stats.h b/third_party/python/coverage/coverage/ctracer/stats.h
new file mode 100644
index 0000000000..05173369f7
--- /dev/null
+++ b/third_party/python/coverage/coverage/ctracer/stats.h
@@ -0,0 +1,31 @@
+/* Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 */
+/* For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt */
+
+#ifndef _COVERAGE_STATS_H
+#define _COVERAGE_STATS_H
+
+#include "util.h"
+
+#if COLLECT_STATS
+#define STATS(x) x
+#else
+#define STATS(x)
+#endif
+
+typedef struct Stats {
+ unsigned int calls; /* Need at least one member, but the rest only if needed. */
+#if COLLECT_STATS
+ unsigned int lines;
+ unsigned int returns;
+ unsigned int exceptions;
+ unsigned int others;
+ unsigned int files;
+ unsigned int missed_returns;
+ unsigned int stack_reallocs;
+ unsigned int errors;
+ unsigned int pycalls;
+ unsigned int start_context_calls;
+#endif
+} Stats;
+
+#endif /* _COVERAGE_STATS_H */
diff --git a/third_party/python/coverage/coverage/ctracer/tracer.c b/third_party/python/coverage/coverage/ctracer/tracer.c
new file mode 100644
index 0000000000..7d639112db
--- /dev/null
+++ b/third_party/python/coverage/coverage/ctracer/tracer.c
@@ -0,0 +1,1186 @@
+/* Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 */
+/* For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt */
+
+/* C-based Tracer for coverage.py. */
+
+#include "util.h"
+#include "datastack.h"
+#include "filedisp.h"
+#include "tracer.h"
+
+/* Python C API helpers. */
+
+static int
+pyint_as_int(PyObject * pyint, int *pint)
+{
+ int the_int = MyInt_AsInt(pyint);
+ if (the_int == -1 && PyErr_Occurred()) {
+ return RET_ERROR;
+ }
+
+ *pint = the_int;
+ return RET_OK;
+}
+
+
+/* Interned strings to speed GetAttr etc. */
+
+static PyObject *str_trace;
+static PyObject *str_file_tracer;
+static PyObject *str__coverage_enabled;
+static PyObject *str__coverage_plugin;
+static PyObject *str__coverage_plugin_name;
+static PyObject *str_dynamic_source_filename;
+static PyObject *str_line_number_range;
+
+int
+CTracer_intern_strings(void)
+{
+ int ret = RET_ERROR;
+
+#define INTERN_STRING(v, s) \
+ v = MyText_InternFromString(s); \
+ if (v == NULL) { \
+ goto error; \
+ }
+
+ INTERN_STRING(str_trace, "trace")
+ INTERN_STRING(str_file_tracer, "file_tracer")
+ INTERN_STRING(str__coverage_enabled, "_coverage_enabled")
+ INTERN_STRING(str__coverage_plugin, "_coverage_plugin")
+ INTERN_STRING(str__coverage_plugin_name, "_coverage_plugin_name")
+ INTERN_STRING(str_dynamic_source_filename, "dynamic_source_filename")
+ INTERN_STRING(str_line_number_range, "line_number_range")
+
+ ret = RET_OK;
+
+error:
+ return ret;
+}
+
+static void CTracer_disable_plugin(CTracer *self, PyObject * disposition);
+
+static int
+CTracer_init(CTracer *self, PyObject *args_unused, PyObject *kwds_unused)
+{
+ int ret = RET_ERROR;
+
+ if (DataStack_init(&self->stats, &self->data_stack) < 0) {
+ goto error;
+ }
+
+ self->pdata_stack = &self->data_stack;
+
+ self->context = Py_None;
+ Py_INCREF(self->context);
+
+ ret = RET_OK;
+ goto ok;
+
+error:
+ STATS( self->stats.errors++; )
+
+ok:
+ return ret;
+}
+
+static void
+CTracer_dealloc(CTracer *self)
+{
+ int i;
+
+ if (self->started) {
+ PyEval_SetTrace(NULL, NULL);
+ }
+
+ Py_XDECREF(self->should_trace);
+ Py_XDECREF(self->check_include);
+ Py_XDECREF(self->warn);
+ Py_XDECREF(self->concur_id_func);
+ Py_XDECREF(self->data);
+ Py_XDECREF(self->file_tracers);
+ Py_XDECREF(self->should_trace_cache);
+ Py_XDECREF(self->should_start_context);
+ Py_XDECREF(self->switch_context);
+ Py_XDECREF(self->context);
+
+ DataStack_dealloc(&self->stats, &self->data_stack);
+ if (self->data_stacks) {
+ for (i = 0; i < self->data_stacks_used; i++) {
+ DataStack_dealloc(&self->stats, self->data_stacks + i);
+ }
+ PyMem_Free(self->data_stacks);
+ }
+
+ Py_XDECREF(self->data_stack_index);
+
+ Py_TYPE(self)->tp_free((PyObject*)self);
+}
+
+#if TRACE_LOG
+static const char *
+indent(int n)
+{
+ static const char * spaces =
+ " "
+ " "
+ " "
+ " "
+ ;
+ return spaces + strlen(spaces) - n*2;
+}
+
+static BOOL logging = FALSE;
+/* Set these constants to be a file substring and line number to start logging. */
+static const char * start_file = "tests/views";
+static int start_line = 27;
+
+static void
+showlog(int depth, int lineno, PyObject * filename, const char * msg)
+{
+ if (logging) {
+ printf("%s%3d ", indent(depth), depth);
+ if (lineno) {
+ printf("%4d", lineno);
+ }
+ else {
+ printf(" ");
+ }
+ if (filename) {
+ PyObject *ascii = MyText_AS_BYTES(filename);
+ printf(" %s", MyBytes_AS_STRING(ascii));
+ Py_DECREF(ascii);
+ }
+ if (msg) {
+ printf(" %s", msg);
+ }
+ printf("\n");
+ }
+}
+
+#define SHOWLOG(a,b,c,d) showlog(a,b,c,d)
+#else
+#define SHOWLOG(a,b,c,d)
+#endif /* TRACE_LOG */
+
+#if WHAT_LOG
+static const char * what_sym[] = {"CALL", "EXC ", "LINE", "RET "};
+#endif
+
+/* Record a pair of integers in self->pcur_entry->file_data. */
+static int
+CTracer_record_pair(CTracer *self, int l1, int l2)
+{
+ int ret = RET_ERROR;
+
+ PyObject * t = NULL;
+
+ t = Py_BuildValue("(ii)", l1, l2);
+ if (t == NULL) {
+ goto error;
+ }
+
+ if (PyDict_SetItem(self->pcur_entry->file_data, t, Py_None) < 0) {
+ goto error;
+ }
+
+ ret = RET_OK;
+
+error:
+ Py_XDECREF(t);
+
+ return ret;
+}
+
+/* Set self->pdata_stack to the proper data_stack to use. */
+static int
+CTracer_set_pdata_stack(CTracer *self)
+{
+ int ret = RET_ERROR;
+ PyObject * co_obj = NULL;
+ PyObject * stack_index = NULL;
+
+ if (self->concur_id_func != Py_None) {
+ int the_index = 0;
+
+ if (self->data_stack_index == NULL) {
+ PyObject * weakref = NULL;
+
+ weakref = PyImport_ImportModule("weakref");
+ if (weakref == NULL) {
+ goto error;
+ }
+ STATS( self->stats.pycalls++; )
+ self->data_stack_index = PyObject_CallMethod(weakref, "WeakKeyDictionary", NULL);
+ Py_XDECREF(weakref);
+
+ if (self->data_stack_index == NULL) {
+ goto error;
+ }
+ }
+
+ STATS( self->stats.pycalls++; )
+ co_obj = PyObject_CallObject(self->concur_id_func, NULL);
+ if (co_obj == NULL) {
+ goto error;
+ }
+ stack_index = PyObject_GetItem(self->data_stack_index, co_obj);
+ if (stack_index == NULL) {
+ /* PyObject_GetItem sets an exception if it didn't find the thing. */
+ PyErr_Clear();
+
+ /* A new concurrency object. Make a new data stack. */
+ the_index = self->data_stacks_used;
+ stack_index = MyInt_FromInt(the_index);
+ if (stack_index == NULL) {
+ goto error;
+ }
+ if (PyObject_SetItem(self->data_stack_index, co_obj, stack_index) < 0) {
+ goto error;
+ }
+ self->data_stacks_used++;
+ if (self->data_stacks_used >= self->data_stacks_alloc) {
+ int bigger = self->data_stacks_alloc + 10;
+ DataStack * bigger_stacks = PyMem_Realloc(self->data_stacks, bigger * sizeof(DataStack));
+ if (bigger_stacks == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+ self->data_stacks = bigger_stacks;
+ self->data_stacks_alloc = bigger;
+ }
+ DataStack_init(&self->stats, &self->data_stacks[the_index]);
+ }
+ else {
+ if (pyint_as_int(stack_index, &the_index) < 0) {
+ goto error;
+ }
+ }
+
+ self->pdata_stack = &self->data_stacks[the_index];
+ }
+ else {
+ self->pdata_stack = &self->data_stack;
+ }
+
+ ret = RET_OK;
+
+error:
+
+ Py_XDECREF(co_obj);
+ Py_XDECREF(stack_index);
+
+ return ret;
+}
+
+/*
+ * Parts of the trace function.
+ */
+
+static int
+CTracer_check_missing_return(CTracer *self, PyFrameObject *frame)
+{
+ int ret = RET_ERROR;
+
+ if (self->last_exc_back) {
+ if (frame == self->last_exc_back) {
+ /* Looks like someone forgot to send a return event. We'll clear
+ the exception state and do the RETURN code here. Notice that the
+ frame we have in hand here is not the correct frame for the RETURN,
+ that frame is gone. Our handling for RETURN doesn't need the
+ actual frame, but we do log it, so that will look a little off if
+ you're looking at the detailed log.
+
+ If someday we need to examine the frame when doing RETURN, then
+ we'll need to keep more of the missed frame's state.
+ */
+ STATS( self->stats.missed_returns++; )
+ if (CTracer_set_pdata_stack(self) < 0) {
+ goto error;
+ }
+ if (self->pdata_stack->depth >= 0) {
+ if (self->tracing_arcs && self->pcur_entry->file_data) {
+ if (CTracer_record_pair(self, self->pcur_entry->last_line, -self->last_exc_firstlineno) < 0) {
+ goto error;
+ }
+ }
+ SHOWLOG(self->pdata_stack->depth, frame->f_lineno, frame->f_code->co_filename, "missedreturn");
+ self->pdata_stack->depth--;
+ self->pcur_entry = &self->pdata_stack->stack[self->pdata_stack->depth];
+ }
+ }
+ self->last_exc_back = NULL;
+ }
+
+ ret = RET_OK;
+
+error:
+
+ return ret;
+}
+
+static int
+CTracer_handle_call(CTracer *self, PyFrameObject *frame)
+{
+ int ret = RET_ERROR;
+ int ret2;
+
+ /* Owned references that we clean up at the very end of the function. */
+ PyObject * disposition = NULL;
+ PyObject * plugin = NULL;
+ PyObject * plugin_name = NULL;
+ PyObject * next_tracename = NULL;
+
+ /* Borrowed references. */
+ PyObject * filename = NULL;
+ PyObject * disp_trace = NULL;
+ PyObject * tracename = NULL;
+ PyObject * file_tracer = NULL;
+ PyObject * has_dynamic_filename = NULL;
+
+ CFileDisposition * pdisp = NULL;
+
+ STATS( self->stats.calls++; )
+
+ /* Grow the stack. */
+ if (CTracer_set_pdata_stack(self) < 0) {
+ goto error;
+ }
+ if (DataStack_grow(&self->stats, self->pdata_stack) < 0) {
+ goto error;
+ }
+ self->pcur_entry = &self->pdata_stack->stack[self->pdata_stack->depth];
+
+ /* See if this frame begins a new context. */
+ if (self->should_start_context != Py_None && self->context == Py_None) {
+ PyObject * context;
+ /* We're looking for our context, ask should_start_context if this is the start. */
+ STATS( self->stats.start_context_calls++; )
+ STATS( self->stats.pycalls++; )
+ context = PyObject_CallFunctionObjArgs(self->should_start_context, frame, NULL);
+ if (context == NULL) {
+ goto error;
+ }
+ if (context != Py_None) {
+ PyObject * val;
+ Py_DECREF(self->context);
+ self->context = context;
+ self->pcur_entry->started_context = TRUE;
+ STATS( self->stats.pycalls++; )
+ val = PyObject_CallFunctionObjArgs(self->switch_context, context, NULL);
+ if (val == NULL) {
+ goto error;
+ }
+ Py_DECREF(val);
+ }
+ else {
+ Py_DECREF(context);
+ self->pcur_entry->started_context = FALSE;
+ }
+ }
+ else {
+ self->pcur_entry->started_context = FALSE;
+ }
+
+ /* Check if we should trace this line. */
+ filename = frame->f_code->co_filename;
+ disposition = PyDict_GetItem(self->should_trace_cache, filename);
+ if (disposition == NULL) {
+ if (PyErr_Occurred()) {
+ goto error;
+ }
+ STATS( self->stats.files++; )
+
+ /* We've never considered this file before. */
+ /* Ask should_trace about it. */
+ STATS( self->stats.pycalls++; )
+ disposition = PyObject_CallFunctionObjArgs(self->should_trace, filename, frame, NULL);
+ if (disposition == NULL) {
+ /* An error occurred inside should_trace. */
+ goto error;
+ }
+ if (PyDict_SetItem(self->should_trace_cache, filename, disposition) < 0) {
+ goto error;
+ }
+ }
+ else {
+ Py_INCREF(disposition);
+ }
+
+ if (disposition == Py_None) {
+ /* A later check_include returned false, so don't trace it. */
+ disp_trace = Py_False;
+ }
+ else {
+ /* The object we got is a CFileDisposition, use it efficiently. */
+ pdisp = (CFileDisposition *) disposition;
+ disp_trace = pdisp->trace;
+ if (disp_trace == NULL) {
+ goto error;
+ }
+ }
+
+ if (disp_trace == Py_True) {
+ /* If tracename is a string, then we're supposed to trace. */
+ tracename = pdisp->source_filename;
+ if (tracename == NULL) {
+ goto error;
+ }
+ file_tracer = pdisp->file_tracer;
+ if (file_tracer == NULL) {
+ goto error;
+ }
+ if (file_tracer != Py_None) {
+ plugin = PyObject_GetAttr(file_tracer, str__coverage_plugin);
+ if (plugin == NULL) {
+ goto error;
+ }
+ plugin_name = PyObject_GetAttr(plugin, str__coverage_plugin_name);
+ if (plugin_name == NULL) {
+ goto error;
+ }
+ }
+ has_dynamic_filename = pdisp->has_dynamic_filename;
+ if (has_dynamic_filename == NULL) {
+ goto error;
+ }
+ if (has_dynamic_filename == Py_True) {
+ STATS( self->stats.pycalls++; )
+ next_tracename = PyObject_CallMethodObjArgs(
+ file_tracer, str_dynamic_source_filename,
+ tracename, frame, NULL
+ );
+ if (next_tracename == NULL) {
+ /* An exception from the function. Alert the user with a
+ * warning and a traceback.
+ */
+ CTracer_disable_plugin(self, disposition);
+ /* Because we handled the error, goto ok. */
+ goto ok;
+ }
+ tracename = next_tracename;
+
+ if (tracename != Py_None) {
+ /* Check the dynamic source filename against the include rules. */
+ PyObject * included = NULL;
+ int should_include;
+ included = PyDict_GetItem(self->should_trace_cache, tracename);
+ if (included == NULL) {
+ PyObject * should_include_bool;
+ if (PyErr_Occurred()) {
+ goto error;
+ }
+ STATS( self->stats.files++; )
+ STATS( self->stats.pycalls++; )
+ should_include_bool = PyObject_CallFunctionObjArgs(self->check_include, tracename, frame, NULL);
+ if (should_include_bool == NULL) {
+ goto error;
+ }
+ should_include = (should_include_bool == Py_True);
+ Py_DECREF(should_include_bool);
+ if (PyDict_SetItem(self->should_trace_cache, tracename, should_include ? disposition : Py_None) < 0) {
+ goto error;
+ }
+ }
+ else {
+ should_include = (included != Py_None);
+ }
+ if (!should_include) {
+ tracename = Py_None;
+ }
+ }
+ }
+ }
+ else {
+ tracename = Py_None;
+ }
+
+ if (tracename != Py_None) {
+ PyObject * file_data = PyDict_GetItem(self->data, tracename);
+
+ if (file_data == NULL) {
+ if (PyErr_Occurred()) {
+ goto error;
+ }
+ file_data = PyDict_New();
+ if (file_data == NULL) {
+ goto error;
+ }
+ ret2 = PyDict_SetItem(self->data, tracename, file_data);
+ if (ret2 < 0) {
+ goto error;
+ }
+
+ /* If the disposition mentions a plugin, record that. */
+ if (file_tracer != Py_None) {
+ ret2 = PyDict_SetItem(self->file_tracers, tracename, plugin_name);
+ if (ret2 < 0) {
+ goto error;
+ }
+ }
+ }
+ else {
+ /* PyDict_GetItem gives a borrowed reference. Own it. */
+ Py_INCREF(file_data);
+ }
+
+ Py_XDECREF(self->pcur_entry->file_data);
+ self->pcur_entry->file_data = file_data;
+ self->pcur_entry->file_tracer = file_tracer;
+
+ SHOWLOG(self->pdata_stack->depth, frame->f_lineno, filename, "traced");
+ }
+ else {
+ Py_XDECREF(self->pcur_entry->file_data);
+ self->pcur_entry->file_data = NULL;
+ self->pcur_entry->file_tracer = Py_None;
+ SHOWLOG(self->pdata_stack->depth, frame->f_lineno, filename, "skipped");
+ }
+
+ self->pcur_entry->disposition = disposition;
+
+ /* Make the frame right in case settrace(gettrace()) happens. */
+ Py_INCREF(self);
+ My_XSETREF(frame->f_trace, (PyObject*)self);
+
+ /* A call event is really a "start frame" event, and can happen for
+ * re-entering a generator also. f_lasti is -1 for a true call, and a
+ * real byte offset for a generator re-entry.
+ */
+ if (frame->f_lasti < 0) {
+ self->pcur_entry->last_line = -frame->f_code->co_firstlineno;
+ }
+ else {
+ self->pcur_entry->last_line = frame->f_lineno;
+ }
+
+ok:
+ ret = RET_OK;
+
+error:
+ Py_XDECREF(next_tracename);
+ Py_XDECREF(disposition);
+ Py_XDECREF(plugin);
+ Py_XDECREF(plugin_name);
+
+ return ret;
+}
+
+
+static void
+CTracer_disable_plugin(CTracer *self, PyObject * disposition)
+{
+ PyObject * file_tracer = NULL;
+ PyObject * plugin = NULL;
+ PyObject * plugin_name = NULL;
+ PyObject * msg = NULL;
+ PyObject * ignored = NULL;
+
+ PyErr_Print();
+
+ file_tracer = PyObject_GetAttr(disposition, str_file_tracer);
+ if (file_tracer == NULL) {
+ goto error;
+ }
+ if (file_tracer == Py_None) {
+ /* This shouldn't happen... */
+ goto ok;
+ }
+ plugin = PyObject_GetAttr(file_tracer, str__coverage_plugin);
+ if (plugin == NULL) {
+ goto error;
+ }
+ plugin_name = PyObject_GetAttr(plugin, str__coverage_plugin_name);
+ if (plugin_name == NULL) {
+ goto error;
+ }
+ msg = MyText_FromFormat(
+ "Disabling plug-in '%s' due to previous exception",
+ MyText_AsString(plugin_name)
+ );
+ if (msg == NULL) {
+ goto error;
+ }
+ STATS( self->stats.pycalls++; )
+ ignored = PyObject_CallFunctionObjArgs(self->warn, msg, NULL);
+ if (ignored == NULL) {
+ goto error;
+ }
+
+ /* Disable the plugin for future files, and stop tracing this file. */
+ if (PyObject_SetAttr(plugin, str__coverage_enabled, Py_False) < 0) {
+ goto error;
+ }
+ if (PyObject_SetAttr(disposition, str_trace, Py_False) < 0) {
+ goto error;
+ }
+
+ goto ok;
+
+error:
+ /* This function doesn't return a status, so if an error happens, print it,
+ * but don't interrupt the flow. */
+ /* PySys_WriteStderr is nicer, but is not in the public API. */
+ fprintf(stderr, "Error occurred while disabling plug-in:\n");
+ PyErr_Print();
+
+ok:
+ Py_XDECREF(file_tracer);
+ Py_XDECREF(plugin);
+ Py_XDECREF(plugin_name);
+ Py_XDECREF(msg);
+ Py_XDECREF(ignored);
+}
+
+
+static int
+CTracer_unpack_pair(CTracer *self, PyObject *pair, int *p_one, int *p_two)
+{
+ int ret = RET_ERROR;
+ int the_int;
+ PyObject * pyint = NULL;
+ int index;
+
+ if (!PyTuple_Check(pair) || PyTuple_Size(pair) != 2) {
+ PyErr_SetString(
+ PyExc_TypeError,
+ "line_number_range must return 2-tuple"
+ );
+ goto error;
+ }
+
+ for (index = 0; index < 2; index++) {
+ pyint = PyTuple_GetItem(pair, index);
+ if (pyint == NULL) {
+ goto error;
+ }
+ if (pyint_as_int(pyint, &the_int) < 0) {
+ goto error;
+ }
+ *(index == 0 ? p_one : p_two) = the_int;
+ }
+
+ ret = RET_OK;
+
+error:
+ return ret;
+}
+
+static int
+CTracer_handle_line(CTracer *self, PyFrameObject *frame)
+{
+ int ret = RET_ERROR;
+ int ret2;
+
+ STATS( self->stats.lines++; )
+ if (self->pdata_stack->depth >= 0) {
+ SHOWLOG(self->pdata_stack->depth, frame->f_lineno, frame->f_code->co_filename, "line");
+ if (self->pcur_entry->file_data) {
+ int lineno_from = -1;
+ int lineno_to = -1;
+
+ /* We're tracing in this frame: record something. */
+ if (self->pcur_entry->file_tracer != Py_None) {
+ PyObject * from_to = NULL;
+ STATS( self->stats.pycalls++; )
+ from_to = PyObject_CallMethodObjArgs(self->pcur_entry->file_tracer, str_line_number_range, frame, NULL);
+ if (from_to == NULL) {
+ goto error;
+ }
+ ret2 = CTracer_unpack_pair(self, from_to, &lineno_from, &lineno_to);
+ Py_DECREF(from_to);
+ if (ret2 < 0) {
+ CTracer_disable_plugin(self, self->pcur_entry->disposition);
+ goto ok;
+ }
+ }
+ else {
+ lineno_from = lineno_to = frame->f_lineno;
+ }
+
+ if (lineno_from != -1) {
+ for (; lineno_from <= lineno_to; lineno_from++) {
+ if (self->tracing_arcs) {
+ /* Tracing arcs: key is (last_line,this_line). */
+ if (CTracer_record_pair(self, self->pcur_entry->last_line, lineno_from) < 0) {
+ goto error;
+ }
+ }
+ else {
+ /* Tracing lines: key is simply this_line. */
+ PyObject * this_line = MyInt_FromInt(lineno_from);
+ if (this_line == NULL) {
+ goto error;
+ }
+
+ ret2 = PyDict_SetItem(self->pcur_entry->file_data, this_line, Py_None);
+ Py_DECREF(this_line);
+ if (ret2 < 0) {
+ goto error;
+ }
+ }
+
+ self->pcur_entry->last_line = lineno_from;
+ }
+ }
+ }
+ }
+
+ok:
+ ret = RET_OK;
+
+error:
+
+ return ret;
+}
+
+static int
+CTracer_handle_return(CTracer *self, PyFrameObject *frame)
+{
+ int ret = RET_ERROR;
+
+ STATS( self->stats.returns++; )
+ /* A near-copy of this code is above in the missing-return handler. */
+ if (CTracer_set_pdata_stack(self) < 0) {
+ goto error;
+ }
+ self->pcur_entry = &self->pdata_stack->stack[self->pdata_stack->depth];
+
+ if (self->pdata_stack->depth >= 0) {
+ if (self->tracing_arcs && self->pcur_entry->file_data) {
+ /* Need to distinguish between RETURN_VALUE and YIELD_VALUE. Read
+ * the current bytecode to see what it is. In unusual circumstances
+ * (Cython code), co_code can be the empty string, so range-check
+ * f_lasti before reading the byte.
+ */
+ int bytecode = RETURN_VALUE;
+ PyObject * pCode = frame->f_code->co_code;
+ int lasti = frame->f_lasti;
+
+ if (lasti < MyBytes_GET_SIZE(pCode)) {
+ bytecode = MyBytes_AS_STRING(pCode)[lasti];
+ }
+ if (bytecode != YIELD_VALUE) {
+ int first = frame->f_code->co_firstlineno;
+ if (CTracer_record_pair(self, self->pcur_entry->last_line, -first) < 0) {
+ goto error;
+ }
+ }
+ }
+
+ /* If this frame started a context, then returning from it ends the context. */
+ if (self->pcur_entry->started_context) {
+ PyObject * val;
+ Py_DECREF(self->context);
+ self->context = Py_None;
+ Py_INCREF(self->context);
+ STATS( self->stats.pycalls++; )
+
+ val = PyObject_CallFunctionObjArgs(self->switch_context, self->context, NULL);
+ if (val == NULL) {
+ goto error;
+ }
+ Py_DECREF(val);
+ }
+
+ /* Pop the stack. */
+ SHOWLOG(self->pdata_stack->depth, frame->f_lineno, frame->f_code->co_filename, "return");
+ self->pdata_stack->depth--;
+ self->pcur_entry = &self->pdata_stack->stack[self->pdata_stack->depth];
+ }
+
+ ret = RET_OK;
+
+error:
+
+ return ret;
+}
+
+static int
+CTracer_handle_exception(CTracer *self, PyFrameObject *frame)
+{
+ /* Some code (Python 2.3, and pyexpat anywhere) fires an exception event
+ without a return event. To detect that, we'll keep a copy of the
+ parent frame for an exception event. If the next event is in that
+ frame, then we must have returned without a return event. We can
+ synthesize the missing event then.
+
+ Python itself fixed this problem in 2.4. Pyexpat still has the bug.
+ I've reported the problem with pyexpat as http://bugs.python.org/issue6359 .
+ If it gets fixed, this code should still work properly. Maybe some day
+ the bug will be fixed everywhere coverage.py is supported, and we can
+ remove this missing-return detection.
+
+ More about this fix: https://nedbatchelder.com/blog/200907/a_nasty_little_bug.html
+ */
+ STATS( self->stats.exceptions++; )
+ self->last_exc_back = frame->f_back;
+ self->last_exc_firstlineno = frame->f_code->co_firstlineno;
+
+ return RET_OK;
+}
+
+/*
+ * The Trace Function
+ */
+static int
+CTracer_trace(CTracer *self, PyFrameObject *frame, int what, PyObject *arg_unused)
+{
+ int ret = RET_ERROR;
+
+ #if DO_NOTHING
+ return RET_OK;
+ #endif
+
+ if (!self->started) {
+ /* If CTracer.stop() has been called from another thread, the tracer
+ is still active in the current thread. Let's deactivate ourselves
+ now. */
+ PyEval_SetTrace(NULL, NULL);
+ return RET_OK;
+ }
+
+ #if WHAT_LOG || TRACE_LOG
+ PyObject * ascii = NULL;
+ #endif
+
+ #if WHAT_LOG
+ if (what <= (int)(sizeof(what_sym)/sizeof(const char *))) {
+ ascii = MyText_AS_BYTES(frame->f_code->co_filename);
+ printf("trace: %s @ %s %d\n", what_sym[what], MyBytes_AS_STRING(ascii), frame->f_lineno);
+ Py_DECREF(ascii);
+ }
+ #endif
+
+ #if TRACE_LOG
+ ascii = MyText_AS_BYTES(frame->f_code->co_filename);
+ if (strstr(MyBytes_AS_STRING(ascii), start_file) && frame->f_lineno == start_line) {
+ logging = TRUE;
+ }
+ Py_DECREF(ascii);
+ #endif
+
+ /* See below for details on missing-return detection. */
+ if (CTracer_check_missing_return(self, frame) < 0) {
+ goto error;
+ }
+
+ self->activity = TRUE;
+
+ switch (what) {
+ case PyTrace_CALL:
+ if (CTracer_handle_call(self, frame) < 0) {
+ goto error;
+ }
+ break;
+
+ case PyTrace_RETURN:
+ if (CTracer_handle_return(self, frame) < 0) {
+ goto error;
+ }
+ break;
+
+ case PyTrace_LINE:
+ if (CTracer_handle_line(self, frame) < 0) {
+ goto error;
+ }
+ break;
+
+ case PyTrace_EXCEPTION:
+ if (CTracer_handle_exception(self, frame) < 0) {
+ goto error;
+ }
+ break;
+
+ default:
+ STATS( self->stats.others++; )
+ break;
+ }
+
+ ret = RET_OK;
+ goto cleanup;
+
+error:
+ STATS( self->stats.errors++; )
+
+cleanup:
+ return ret;
+}
+
+
+/*
+ * Python has two ways to set the trace function: sys.settrace(fn), which
+ * takes a Python callable, and PyEval_SetTrace(func, obj), which takes
+ * a C function and a Python object. The way these work together is that
+ * sys.settrace(pyfn) calls PyEval_SetTrace(builtin_func, pyfn), using the
+ * Python callable as the object in PyEval_SetTrace. So sys.gettrace()
+ * simply returns the Python object used as the second argument to
+ * PyEval_SetTrace. So sys.gettrace() will return our self parameter, which
+ * means it must be callable to be used in sys.settrace().
+ *
+ * So we make ourself callable, equivalent to invoking our trace function.
+ *
+ * To help with the process of replaying stored frames, this function has an
+ * optional keyword argument:
+ *
+ * def CTracer_call(frame, event, arg, lineno=0)
+ *
+ * If provided, the lineno argument is used as the line number, and the
+ * frame's f_lineno member is ignored.
+ */
+static PyObject *
+CTracer_call(CTracer *self, PyObject *args, PyObject *kwds)
+{
+ PyFrameObject *frame;
+ PyObject *what_str;
+ PyObject *arg;
+ int lineno = 0;
+ int what;
+ int orig_lineno;
+ PyObject *ret = NULL;
+ PyObject * ascii = NULL;
+
+ #if DO_NOTHING
+ CRASH
+ #endif
+
+ static char *what_names[] = {
+ "call", "exception", "line", "return",
+ "c_call", "c_exception", "c_return",
+ NULL
+ };
+
+ static char *kwlist[] = {"frame", "event", "arg", "lineno", NULL};
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "O!O!O|i:Tracer_call", kwlist,
+ &PyFrame_Type, &frame, &MyText_Type, &what_str, &arg, &lineno)) {
+ goto done;
+ }
+
+ /* In Python, the what argument is a string, we need to find an int
+ for the C function. */
+ for (what = 0; what_names[what]; what++) {
+ int should_break;
+ ascii = MyText_AS_BYTES(what_str);
+ should_break = !strcmp(MyBytes_AS_STRING(ascii), what_names[what]);
+ Py_DECREF(ascii);
+ if (should_break) {
+ break;
+ }
+ }
+
+ #if WHAT_LOG
+ ascii = MyText_AS_BYTES(frame->f_code->co_filename);
+ printf("pytrace: %s @ %s %d\n", what_sym[what], MyBytes_AS_STRING(ascii), frame->f_lineno);
+ Py_DECREF(ascii);
+ #endif
+
+ /* Save off the frame's lineno, and use the forced one, if provided. */
+ orig_lineno = frame->f_lineno;
+ if (lineno > 0) {
+ frame->f_lineno = lineno;
+ }
+
+ /* Invoke the C function, and return ourselves. */
+ if (CTracer_trace(self, frame, what, arg) == RET_OK) {
+ Py_INCREF(self);
+ ret = (PyObject *)self;
+ }
+
+ /* Clean up. */
+ frame->f_lineno = orig_lineno;
+
+ /* For better speed, install ourselves the C way so that future calls go
+ directly to CTracer_trace, without this intermediate function.
+
+ Only do this if this is a CALL event, since new trace functions only
+ take effect then. If we don't condition it on CALL, then we'll clobber
+ the new trace function before it has a chance to get called. To
+ understand why, there are three internal values to track: frame.f_trace,
+ c_tracefunc, and c_traceobj. They are explained here:
+ https://nedbatchelder.com/text/trace-function.html
+
+ Without the conditional on PyTrace_CALL, this is what happens:
+
+ def func(): # f_trace c_tracefunc c_traceobj
+ # -------------- -------------- --------------
+ # CTracer CTracer.trace CTracer
+ sys.settrace(my_func)
+ # CTracer trampoline my_func
+ # Now Python calls trampoline(CTracer), which calls this function
+ # which calls PyEval_SetTrace below, setting us as the tracer again:
+ # CTracer CTracer.trace CTracer
+ # and it's as if the settrace never happened.
+ */
+ if (what == PyTrace_CALL) {
+ PyEval_SetTrace((Py_tracefunc)CTracer_trace, (PyObject*)self);
+ }
+
+done:
+ return ret;
+}
+
+static PyObject *
+CTracer_start(CTracer *self, PyObject *args_unused)
+{
+ PyEval_SetTrace((Py_tracefunc)CTracer_trace, (PyObject*)self);
+ self->started = TRUE;
+ self->tracing_arcs = self->trace_arcs && PyObject_IsTrue(self->trace_arcs);
+
+ /* start() returns a trace function usable with sys.settrace() */
+ Py_INCREF(self);
+ return (PyObject *)self;
+}
+
+static PyObject *
+CTracer_stop(CTracer *self, PyObject *args_unused)
+{
+ if (self->started) {
+ /* Set the started flag only. The actual call to
+ PyEval_SetTrace(NULL, NULL) is delegated to the callback
+ itself to ensure that it called from the right thread.
+ */
+ self->started = FALSE;
+ }
+
+ Py_RETURN_NONE;
+}
+
+static PyObject *
+CTracer_activity(CTracer *self, PyObject *args_unused)
+{
+ if (self->activity) {
+ Py_RETURN_TRUE;
+ }
+ else {
+ Py_RETURN_FALSE;
+ }
+}
+
+static PyObject *
+CTracer_reset_activity(CTracer *self, PyObject *args_unused)
+{
+ self->activity = FALSE;
+ Py_RETURN_NONE;
+}
+
+static PyObject *
+CTracer_get_stats(CTracer *self, PyObject *args_unused)
+{
+#if COLLECT_STATS
+ return Py_BuildValue(
+ "{sI,sI,sI,sI,sI,sI,sI,sI,si,sI,sI,sI}",
+ "calls", self->stats.calls,
+ "lines", self->stats.lines,
+ "returns", self->stats.returns,
+ "exceptions", self->stats.exceptions,
+ "others", self->stats.others,
+ "files", self->stats.files,
+ "missed_returns", self->stats.missed_returns,
+ "stack_reallocs", self->stats.stack_reallocs,
+ "stack_alloc", self->pdata_stack->alloc,
+ "errors", self->stats.errors,
+ "pycalls", self->stats.pycalls,
+ "start_context_calls", self->stats.start_context_calls
+ );
+#else
+ Py_RETURN_NONE;
+#endif /* COLLECT_STATS */
+}
+
+static PyMemberDef
+CTracer_members[] = {
+ { "should_trace", T_OBJECT, offsetof(CTracer, should_trace), 0,
+ PyDoc_STR("Function indicating whether to trace a file.") },
+
+ { "check_include", T_OBJECT, offsetof(CTracer, check_include), 0,
+ PyDoc_STR("Function indicating whether to include a file.") },
+
+ { "warn", T_OBJECT, offsetof(CTracer, warn), 0,
+ PyDoc_STR("Function for issuing warnings.") },
+
+ { "concur_id_func", T_OBJECT, offsetof(CTracer, concur_id_func), 0,
+ PyDoc_STR("Function for determining concurrency context") },
+
+ { "data", T_OBJECT, offsetof(CTracer, data), 0,
+ PyDoc_STR("The raw dictionary of trace data.") },
+
+ { "file_tracers", T_OBJECT, offsetof(CTracer, file_tracers), 0,
+ PyDoc_STR("Mapping from file name to plugin name.") },
+
+ { "should_trace_cache", T_OBJECT, offsetof(CTracer, should_trace_cache), 0,
+ PyDoc_STR("Dictionary caching should_trace results.") },
+
+ { "trace_arcs", T_OBJECT, offsetof(CTracer, trace_arcs), 0,
+ PyDoc_STR("Should we trace arcs, or just lines?") },
+
+ { "should_start_context", T_OBJECT, offsetof(CTracer, should_start_context), 0,
+ PyDoc_STR("Function for starting contexts.") },
+
+ { "switch_context", T_OBJECT, offsetof(CTracer, switch_context), 0,
+ PyDoc_STR("Function for switching to a new context.") },
+
+ { NULL }
+};
+
+static PyMethodDef
+CTracer_methods[] = {
+ { "start", (PyCFunction) CTracer_start, METH_VARARGS,
+ PyDoc_STR("Start the tracer") },
+
+ { "stop", (PyCFunction) CTracer_stop, METH_VARARGS,
+ PyDoc_STR("Stop the tracer") },
+
+ { "get_stats", (PyCFunction) CTracer_get_stats, METH_VARARGS,
+ PyDoc_STR("Get statistics about the tracing") },
+
+ { "activity", (PyCFunction) CTracer_activity, METH_VARARGS,
+ PyDoc_STR("Has there been any activity?") },
+
+ { "reset_activity", (PyCFunction) CTracer_reset_activity, METH_VARARGS,
+ PyDoc_STR("Reset the activity flag") },
+
+ { NULL }
+};
+
+PyTypeObject
+CTracerType = {
+ MyType_HEAD_INIT
+ "coverage.CTracer", /*tp_name*/
+ sizeof(CTracer), /*tp_basicsize*/
+ 0, /*tp_itemsize*/
+ (destructor)CTracer_dealloc, /*tp_dealloc*/
+ 0, /*tp_print*/
+ 0, /*tp_getattr*/
+ 0, /*tp_setattr*/
+ 0, /*tp_compare*/
+ 0, /*tp_repr*/
+ 0, /*tp_as_number*/
+ 0, /*tp_as_sequence*/
+ 0, /*tp_as_mapping*/
+ 0, /*tp_hash */
+ (ternaryfunc)CTracer_call, /*tp_call*/
+ 0, /*tp_str*/
+ 0, /*tp_getattro*/
+ 0, /*tp_setattro*/
+ 0, /*tp_as_buffer*/
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/
+ "CTracer objects", /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
+ CTracer_methods, /* tp_methods */
+ CTracer_members, /* tp_members */
+ 0, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+ (initproc)CTracer_init, /* tp_init */
+ 0, /* tp_alloc */
+ 0, /* tp_new */
+};
diff --git a/third_party/python/coverage/coverage/ctracer/tracer.h b/third_party/python/coverage/coverage/ctracer/tracer.h
new file mode 100644
index 0000000000..a83742ddf3
--- /dev/null
+++ b/third_party/python/coverage/coverage/ctracer/tracer.h
@@ -0,0 +1,74 @@
+/* Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 */
+/* For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt */
+
+#ifndef _COVERAGE_TRACER_H
+#define _COVERAGE_TRACER_H
+
+#include "util.h"
+#include "structmember.h"
+#include "frameobject.h"
+#include "opcode.h"
+
+#include "datastack.h"
+
+/* The CTracer type. */
+
+typedef struct CTracer {
+ PyObject_HEAD
+
+ /* Python objects manipulated directly by the Collector class. */
+ PyObject * should_trace;
+ PyObject * check_include;
+ PyObject * warn;
+ PyObject * concur_id_func;
+ PyObject * data;
+ PyObject * file_tracers;
+ PyObject * should_trace_cache;
+ PyObject * trace_arcs;
+ PyObject * should_start_context;
+ PyObject * switch_context;
+
+ /* Has the tracer been started? */
+ BOOL started;
+ /* Are we tracing arcs, or just lines? */
+ BOOL tracing_arcs;
+ /* Have we had any activity? */
+ BOOL activity;
+ /* The current dynamic context. */
+ PyObject * context;
+
+ /*
+ The data stack is a stack of dictionaries. Each dictionary collects
+ data for a single source file. The data stack parallels the call stack:
+ each call pushes the new frame's file data onto the data stack, and each
+ return pops file data off.
+
+ The file data is a dictionary whose form depends on the tracing options.
+ If tracing arcs, the keys are line number pairs. If not tracing arcs,
+ the keys are line numbers. In both cases, the value is irrelevant
+ (None).
+ */
+
+ DataStack data_stack; /* Used if we aren't doing concurrency. */
+
+ PyObject * data_stack_index; /* Used if we are doing concurrency. */
+ DataStack * data_stacks;
+ int data_stacks_alloc;
+ int data_stacks_used;
+ DataStack * pdata_stack;
+
+ /* The current file's data stack entry. */
+ DataStackEntry * pcur_entry;
+
+ /* The parent frame for the last exception event, to fix missing returns. */
+ PyFrameObject * last_exc_back;
+ int last_exc_firstlineno;
+
+ Stats stats;
+} CTracer;
+
+int CTracer_intern_strings(void);
+
+extern PyTypeObject CTracerType;
+
+#endif /* _COVERAGE_TRACER_H */
diff --git a/third_party/python/coverage/coverage/ctracer/util.h b/third_party/python/coverage/coverage/ctracer/util.h
new file mode 100644
index 0000000000..5cba9b3096
--- /dev/null
+++ b/third_party/python/coverage/coverage/ctracer/util.h
@@ -0,0 +1,67 @@
+/* Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 */
+/* For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt */
+
+#ifndef _COVERAGE_UTIL_H
+#define _COVERAGE_UTIL_H
+
+#include <Python.h>
+
+/* Compile-time debugging helpers */
+#undef WHAT_LOG /* Define to log the WHAT params in the trace function. */
+#undef TRACE_LOG /* Define to log our bookkeeping. */
+#undef COLLECT_STATS /* Collect counters: stats are printed when tracer is stopped. */
+#undef DO_NOTHING /* Define this to make the tracer do nothing. */
+
+/* Py 2.x and 3.x compatibility */
+
+#if PY_MAJOR_VERSION >= 3
+
+#define MyText_Type PyUnicode_Type
+#define MyText_AS_BYTES(o) PyUnicode_AsASCIIString(o)
+#define MyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o)
+#define MyBytes_AS_STRING(o) PyBytes_AS_STRING(o)
+#define MyText_AsString(o) PyUnicode_AsUTF8(o)
+#define MyText_FromFormat PyUnicode_FromFormat
+#define MyInt_FromInt(i) PyLong_FromLong((long)i)
+#define MyInt_AsInt(o) (int)PyLong_AsLong(o)
+#define MyText_InternFromString(s) PyUnicode_InternFromString(s)
+
+#define MyType_HEAD_INIT PyVarObject_HEAD_INIT(NULL, 0)
+
+#else
+
+#define MyText_Type PyString_Type
+#define MyText_AS_BYTES(o) (Py_INCREF(o), o)
+#define MyBytes_GET_SIZE(o) PyString_GET_SIZE(o)
+#define MyBytes_AS_STRING(o) PyString_AS_STRING(o)
+#define MyText_AsString(o) PyString_AsString(o)
+#define MyText_FromFormat PyUnicode_FromFormat
+#define MyInt_FromInt(i) PyInt_FromLong((long)i)
+#define MyInt_AsInt(o) (int)PyInt_AsLong(o)
+#define MyText_InternFromString(s) PyString_InternFromString(s)
+
+#define MyType_HEAD_INIT PyObject_HEAD_INIT(NULL) 0,
+
+#endif /* Py3k */
+
+// Undocumented, and not in all 2.7.x, so our own copy of it.
+#define My_XSETREF(op, op2) \
+ do { \
+ PyObject *_py_tmp = (PyObject *)(op); \
+ (op) = (op2); \
+ Py_XDECREF(_py_tmp); \
+ } while (0)
+
+/* The values returned to indicate ok or error. */
+#define RET_OK 0
+#define RET_ERROR -1
+
+/* Nicer booleans */
+typedef int BOOL;
+#define FALSE 0
+#define TRUE 1
+
+/* Only for extreme machete-mode debugging! */
+#define CRASH { printf("*** CRASH! ***\n"); *((int*)1) = 1; }
+
+#endif /* _COVERAGE_UTIL_H */
diff --git a/third_party/python/coverage/coverage/data.py b/third_party/python/coverage/coverage/data.py
new file mode 100644
index 0000000000..82bf1d41c1
--- /dev/null
+++ b/third_party/python/coverage/coverage/data.py
@@ -0,0 +1,124 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Coverage data for coverage.py.
+
+This file had the 4.x JSON data support, which is now gone. This file still
+has storage-agnostic helpers, and is kept to avoid changing too many imports.
+CoverageData is now defined in sqldata.py, and imported here to keep the
+imports working.
+
+"""
+
+import glob
+import os.path
+
+from coverage.misc import CoverageException, file_be_gone
+from coverage.sqldata import CoverageData
+
+
+def line_counts(data, fullpath=False):
+ """Return a dict summarizing the line coverage data.
+
+ Keys are based on the file names, and values are the number of executed
+ lines. If `fullpath` is true, then the keys are the full pathnames of
+ the files, otherwise they are the basenames of the files.
+
+ Returns a dict mapping file names to counts of lines.
+
+ """
+ summ = {}
+ if fullpath:
+ filename_fn = lambda f: f
+ else:
+ filename_fn = os.path.basename
+ for filename in data.measured_files():
+ summ[filename_fn(filename)] = len(data.lines(filename))
+ return summ
+
+
+def add_data_to_hash(data, filename, hasher):
+ """Contribute `filename`'s data to the `hasher`.
+
+ `hasher` is a `coverage.misc.Hasher` instance to be updated with
+ the file's data. It should only get the results data, not the run
+ data.
+
+ """
+ if data.has_arcs():
+ hasher.update(sorted(data.arcs(filename) or []))
+ else:
+ hasher.update(sorted(data.lines(filename) or []))
+ hasher.update(data.file_tracer(filename))
+
+
+def combine_parallel_data(data, aliases=None, data_paths=None, strict=False):
+ """Combine a number of data files together.
+
+ Treat `data.filename` as a file prefix, and combine the data from all
+ of the data files starting with that prefix plus a dot.
+
+ If `aliases` is provided, it's a `PathAliases` object that is used to
+ re-map paths to match the local machine's.
+
+ If `data_paths` is provided, it is a list of directories or files to
+ combine. Directories are searched for files that start with
+ `data.filename` plus dot as a prefix, and those files are combined.
+
+ If `data_paths` is not provided, then the directory portion of
+ `data.filename` is used as the directory to search for data files.
+
+ Every data file found and combined is then deleted from disk. If a file
+ cannot be read, a warning will be issued, and the file will not be
+ deleted.
+
+ If `strict` is true, and no files are found to combine, an error is
+ raised.
+
+ """
+ # Because of the os.path.abspath in the constructor, data_dir will
+ # never be an empty string.
+ data_dir, local = os.path.split(data.base_filename())
+ localdot = local + '.*'
+
+ data_paths = data_paths or [data_dir]
+ files_to_combine = []
+ for p in data_paths:
+ if os.path.isfile(p):
+ files_to_combine.append(os.path.abspath(p))
+ elif os.path.isdir(p):
+ pattern = os.path.join(os.path.abspath(p), localdot)
+ files_to_combine.extend(glob.glob(pattern))
+ else:
+ raise CoverageException("Couldn't combine from non-existent path '%s'" % (p,))
+
+ if strict and not files_to_combine:
+ raise CoverageException("No data to combine")
+
+ files_combined = 0
+ for f in files_to_combine:
+ if f == data.data_filename():
+ # Sometimes we are combining into a file which is one of the
+ # parallel files. Skip that file.
+ if data._debug.should('dataio'):
+ data._debug.write("Skipping combining ourself: %r" % (f,))
+ continue
+ if data._debug.should('dataio'):
+ data._debug.write("Combining data file %r" % (f,))
+ try:
+ new_data = CoverageData(f, debug=data._debug)
+ new_data.read()
+ except CoverageException as exc:
+ if data._warn:
+ # The CoverageException has the file name in it, so just
+ # use the message as the warning.
+ data._warn(str(exc))
+ else:
+ data.update(new_data, aliases=aliases)
+ files_combined += 1
+ if data._debug.should('dataio'):
+ data._debug.write("Deleting combined data file %r" % (f,))
+ file_be_gone(f)
+
+ if strict and not files_combined:
+ raise CoverageException("No usable data files")
diff --git a/third_party/python/coverage/coverage/debug.py b/third_party/python/coverage/coverage/debug.py
new file mode 100644
index 0000000000..194f16f50d
--- /dev/null
+++ b/third_party/python/coverage/coverage/debug.py
@@ -0,0 +1,406 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Control of and utilities for debugging."""
+
+import contextlib
+import functools
+import inspect
+import itertools
+import os
+import pprint
+import sys
+try:
+ import _thread
+except ImportError:
+ import thread as _thread
+
+from coverage.backward import reprlib, StringIO
+from coverage.misc import isolate_module
+
+os = isolate_module(os)
+
+
+# When debugging, it can be helpful to force some options, especially when
+# debugging the configuration mechanisms you usually use to control debugging!
+# This is a list of forced debugging options.
+FORCED_DEBUG = []
+FORCED_DEBUG_FILE = None
+
+
+class DebugControl(object):
+ """Control and output for debugging."""
+
+ show_repr_attr = False # For SimpleReprMixin
+
+ def __init__(self, options, output):
+ """Configure the options and output file for debugging."""
+ self.options = list(options) + FORCED_DEBUG
+ self.suppress_callers = False
+
+ filters = []
+ if self.should('pid'):
+ filters.append(add_pid_and_tid)
+ self.output = DebugOutputFile.get_one(
+ output,
+ show_process=self.should('process'),
+ filters=filters,
+ )
+ self.raw_output = self.output.outfile
+
+ def __repr__(self):
+ return "<DebugControl options=%r raw_output=%r>" % (self.options, self.raw_output)
+
+ def should(self, option):
+ """Decide whether to output debug information in category `option`."""
+ if option == "callers" and self.suppress_callers:
+ return False
+ return (option in self.options)
+
+ @contextlib.contextmanager
+ def without_callers(self):
+ """A context manager to prevent call stacks from being logged."""
+ old = self.suppress_callers
+ self.suppress_callers = True
+ try:
+ yield
+ finally:
+ self.suppress_callers = old
+
+ def write(self, msg):
+ """Write a line of debug output.
+
+ `msg` is the line to write. A newline will be appended.
+
+ """
+ self.output.write(msg+"\n")
+ if self.should('self'):
+ caller_self = inspect.stack()[1][0].f_locals.get('self')
+ if caller_self is not None:
+ self.output.write("self: {!r}\n".format(caller_self))
+ if self.should('callers'):
+ dump_stack_frames(out=self.output, skip=1)
+ self.output.flush()
+
+
+class DebugControlString(DebugControl):
+ """A `DebugControl` that writes to a StringIO, for testing."""
+ def __init__(self, options):
+ super(DebugControlString, self).__init__(options, StringIO())
+
+ def get_output(self):
+ """Get the output text from the `DebugControl`."""
+ return self.raw_output.getvalue()
+
+
+class NoDebugging(object):
+ """A replacement for DebugControl that will never try to do anything."""
+ def should(self, option): # pylint: disable=unused-argument
+ """Should we write debug messages? Never."""
+ return False
+
+
+def info_header(label):
+ """Make a nice header string."""
+ return "--{:-<60s}".format(" "+label+" ")
+
+
+def info_formatter(info):
+ """Produce a sequence of formatted lines from info.
+
+ `info` is a sequence of pairs (label, data). The produced lines are
+ nicely formatted, ready to print.
+
+ """
+ info = list(info)
+ if not info:
+ return
+ label_len = 30
+ assert all(len(l) < label_len for l, _ in info)
+ for label, data in info:
+ if data == []:
+ data = "-none-"
+ if isinstance(data, (list, set, tuple)):
+ prefix = "%*s:" % (label_len, label)
+ for e in data:
+ yield "%*s %s" % (label_len+1, prefix, e)
+ prefix = ""
+ else:
+ yield "%*s: %s" % (label_len, label, data)
+
+
+def write_formatted_info(writer, header, info):
+ """Write a sequence of (label,data) pairs nicely."""
+ writer.write(info_header(header))
+ for line in info_formatter(info):
+ writer.write(" %s" % line)
+
+
+def short_stack(limit=None, skip=0):
+ """Return a string summarizing the call stack.
+
+ The string is multi-line, with one line per stack frame. Each line shows
+ the function name, the file name, and the line number:
+
+ ...
+ start_import_stop : /Users/ned/coverage/trunk/tests/coveragetest.py @95
+ import_local_file : /Users/ned/coverage/trunk/tests/coveragetest.py @81
+ import_local_file : /Users/ned/coverage/trunk/coverage/backward.py @159
+ ...
+
+ `limit` is the number of frames to include, defaulting to all of them.
+
+ `skip` is the number of frames to skip, so that debugging functions can
+ call this and not be included in the result.
+
+ """
+ stack = inspect.stack()[limit:skip:-1]
+ return "\n".join("%30s : %s:%d" % (t[3], t[1], t[2]) for t in stack)
+
+
+def dump_stack_frames(limit=None, out=None, skip=0):
+ """Print a summary of the stack to stdout, or someplace else."""
+ out = out or sys.stdout
+ out.write(short_stack(limit=limit, skip=skip+1))
+ out.write("\n")
+
+
+def clipped_repr(text, numchars=50):
+ """`repr(text)`, but limited to `numchars`."""
+ r = reprlib.Repr()
+ r.maxstring = numchars
+ return r.repr(text)
+
+
+def short_id(id64):
+ """Given a 64-bit id, make a shorter 16-bit one."""
+ id16 = 0
+ for offset in range(0, 64, 16):
+ id16 ^= id64 >> offset
+ return id16 & 0xFFFF
+
+
+def add_pid_and_tid(text):
+ """A filter to add pid and tid to debug messages."""
+ # Thread ids are useful, but too long. Make a shorter one.
+ tid = "{:04x}".format(short_id(_thread.get_ident()))
+ text = "{:5d}.{}: {}".format(os.getpid(), tid, text)
+ return text
+
+
+class SimpleReprMixin(object):
+ """A mixin implementing a simple __repr__."""
+ simple_repr_ignore = ['simple_repr_ignore', '$coverage.object_id']
+
+ def __repr__(self):
+ show_attrs = (
+ (k, v) for k, v in self.__dict__.items()
+ if getattr(v, "show_repr_attr", True)
+ and not callable(v)
+ and k not in self.simple_repr_ignore
+ )
+ return "<{klass} @0x{id:x} {attrs}>".format(
+ klass=self.__class__.__name__,
+ id=id(self),
+ attrs=" ".join("{}={!r}".format(k, v) for k, v in show_attrs),
+ )
+
+
+def simplify(v): # pragma: debugging
+ """Turn things which are nearly dict/list/etc into dict/list/etc."""
+ if isinstance(v, dict):
+ return {k:simplify(vv) for k, vv in v.items()}
+ elif isinstance(v, (list, tuple)):
+ return type(v)(simplify(vv) for vv in v)
+ elif hasattr(v, "__dict__"):
+ return simplify({'.'+k: v for k, v in v.__dict__.items()})
+ else:
+ return v
+
+
+def pp(v): # pragma: debugging
+ """Debug helper to pretty-print data, including SimpleNamespace objects."""
+ # Might not be needed in 3.9+
+ pprint.pprint(simplify(v))
+
+
+def filter_text(text, filters):
+ """Run `text` through a series of filters.
+
+ `filters` is a list of functions. Each takes a string and returns a
+ string. Each is run in turn.
+
+ Returns: the final string that results after all of the filters have
+ run.
+
+ """
+ clean_text = text.rstrip()
+ ending = text[len(clean_text):]
+ text = clean_text
+ for fn in filters:
+ lines = []
+ for line in text.splitlines():
+ lines.extend(fn(line).splitlines())
+ text = "\n".join(lines)
+ return text + ending
+
+
+class CwdTracker(object): # pragma: debugging
+ """A class to add cwd info to debug messages."""
+ def __init__(self):
+ self.cwd = None
+
+ def filter(self, text):
+ """Add a cwd message for each new cwd."""
+ cwd = os.getcwd()
+ if cwd != self.cwd:
+ text = "cwd is now {!r}\n".format(cwd) + text
+ self.cwd = cwd
+ return text
+
+
+class DebugOutputFile(object): # pragma: debugging
+ """A file-like object that includes pid and cwd information."""
+ def __init__(self, outfile, show_process, filters):
+ self.outfile = outfile
+ self.show_process = show_process
+ self.filters = list(filters)
+
+ if self.show_process:
+ self.filters.insert(0, CwdTracker().filter)
+ self.write("New process: executable: %r\n" % (sys.executable,))
+ self.write("New process: cmd: %r\n" % (getattr(sys, 'argv', None),))
+ if hasattr(os, 'getppid'):
+ self.write("New process: pid: %r, parent pid: %r\n" % (os.getpid(), os.getppid()))
+
+ SYS_MOD_NAME = '$coverage.debug.DebugOutputFile.the_one'
+
+ @classmethod
+ def get_one(cls, fileobj=None, show_process=True, filters=(), interim=False):
+ """Get a DebugOutputFile.
+
+ If `fileobj` is provided, then a new DebugOutputFile is made with it.
+
+ If `fileobj` isn't provided, then a file is chosen
+ (COVERAGE_DEBUG_FILE, or stderr), and a process-wide singleton
+ DebugOutputFile is made.
+
+ `show_process` controls whether the debug file adds process-level
+ information, and filters is a list of other message filters to apply.
+
+ `filters` are the text filters to apply to the stream to annotate with
+ pids, etc.
+
+ If `interim` is true, then a future `get_one` can replace this one.
+
+ """
+ if fileobj is not None:
+ # Make DebugOutputFile around the fileobj passed.
+ return cls(fileobj, show_process, filters)
+
+ # Because of the way igor.py deletes and re-imports modules,
+ # this class can be defined more than once. But we really want
+ # a process-wide singleton. So stash it in sys.modules instead of
+ # on a class attribute. Yes, this is aggressively gross.
+ the_one, is_interim = sys.modules.get(cls.SYS_MOD_NAME, (None, True))
+ if the_one is None or is_interim:
+ if fileobj is None:
+ debug_file_name = os.environ.get("COVERAGE_DEBUG_FILE", FORCED_DEBUG_FILE)
+ if debug_file_name:
+ fileobj = open(debug_file_name, "a")
+ else:
+ fileobj = sys.stderr
+ the_one = cls(fileobj, show_process, filters)
+ sys.modules[cls.SYS_MOD_NAME] = (the_one, interim)
+ return the_one
+
+ def write(self, text):
+ """Just like file.write, but filter through all our filters."""
+ self.outfile.write(filter_text(text, self.filters))
+ self.outfile.flush()
+
+ def flush(self):
+ """Flush our file."""
+ self.outfile.flush()
+
+
+def log(msg, stack=False): # pragma: debugging
+ """Write a log message as forcefully as possible."""
+ out = DebugOutputFile.get_one(interim=True)
+ out.write(msg+"\n")
+ if stack:
+ dump_stack_frames(out=out, skip=1)
+
+
+def decorate_methods(decorator, butnot=(), private=False): # pragma: debugging
+ """A class decorator to apply a decorator to methods."""
+ def _decorator(cls):
+ for name, meth in inspect.getmembers(cls, inspect.isroutine):
+ if name not in cls.__dict__:
+ continue
+ if name != "__init__":
+ if not private and name.startswith("_"):
+ continue
+ if name in butnot:
+ continue
+ setattr(cls, name, decorator(meth))
+ return cls
+ return _decorator
+
+
+def break_in_pudb(func): # pragma: debugging
+ """A function decorator to stop in the debugger for each call."""
+ @functools.wraps(func)
+ def _wrapper(*args, **kwargs):
+ import pudb
+ sys.stdout = sys.__stdout__
+ pudb.set_trace()
+ return func(*args, **kwargs)
+ return _wrapper
+
+
+OBJ_IDS = itertools.count()
+CALLS = itertools.count()
+OBJ_ID_ATTR = "$coverage.object_id"
+
+def show_calls(show_args=True, show_stack=False, show_return=False): # pragma: debugging
+ """A method decorator to debug-log each call to the function."""
+ def _decorator(func):
+ @functools.wraps(func)
+ def _wrapper(self, *args, **kwargs):
+ oid = getattr(self, OBJ_ID_ATTR, None)
+ if oid is None:
+ oid = "{:08d} {:04d}".format(os.getpid(), next(OBJ_IDS))
+ setattr(self, OBJ_ID_ATTR, oid)
+ extra = ""
+ if show_args:
+ eargs = ", ".join(map(repr, args))
+ ekwargs = ", ".join("{}={!r}".format(*item) for item in kwargs.items())
+ extra += "("
+ extra += eargs
+ if eargs and ekwargs:
+ extra += ", "
+ extra += ekwargs
+ extra += ")"
+ if show_stack:
+ extra += " @ "
+ extra += "; ".join(_clean_stack_line(l) for l in short_stack().splitlines())
+ callid = next(CALLS)
+ msg = "{} {:04d} {}{}\n".format(oid, callid, func.__name__, extra)
+ DebugOutputFile.get_one(interim=True).write(msg)
+ ret = func(self, *args, **kwargs)
+ if show_return:
+ msg = "{} {:04d} {} return {!r}\n".format(oid, callid, func.__name__, ret)
+ DebugOutputFile.get_one(interim=True).write(msg)
+ return ret
+ return _wrapper
+ return _decorator
+
+
+def _clean_stack_line(s): # pragma: debugging
+ """Simplify some paths in a stack trace, for compactness."""
+ s = s.strip()
+ s = s.replace(os.path.dirname(__file__) + '/', '')
+ s = s.replace(os.path.dirname(os.__file__) + '/', '')
+ s = s.replace(sys.prefix + '/', '')
+ return s
diff --git a/third_party/python/coverage/coverage/disposition.py b/third_party/python/coverage/coverage/disposition.py
new file mode 100644
index 0000000000..9b9a997d8a
--- /dev/null
+++ b/third_party/python/coverage/coverage/disposition.py
@@ -0,0 +1,37 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Simple value objects for tracking what to do with files."""
+
+
+class FileDisposition(object):
+ """A simple value type for recording what to do with a file."""
+ pass
+
+
+# FileDisposition "methods": FileDisposition is a pure value object, so it can
+# be implemented in either C or Python. Acting on them is done with these
+# functions.
+
+def disposition_init(cls, original_filename):
+ """Construct and initialize a new FileDisposition object."""
+ disp = cls()
+ disp.original_filename = original_filename
+ disp.canonical_filename = original_filename
+ disp.source_filename = None
+ disp.trace = False
+ disp.reason = ""
+ disp.file_tracer = None
+ disp.has_dynamic_filename = False
+ return disp
+
+
+def disposition_debug_msg(disp):
+ """Make a nice debug message of what the FileDisposition is doing."""
+ if disp.trace:
+ msg = "Tracing %r" % (disp.original_filename,)
+ if disp.file_tracer:
+ msg += ": will be traced by %r" % disp.file_tracer
+ else:
+ msg = "Not tracing %r: %s" % (disp.original_filename, disp.reason)
+ return msg
diff --git a/third_party/python/coverage/coverage/env.py b/third_party/python/coverage/coverage/env.py
new file mode 100644
index 0000000000..b5da3b4719
--- /dev/null
+++ b/third_party/python/coverage/coverage/env.py
@@ -0,0 +1,99 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Determine facts about the environment."""
+
+import os
+import platform
+import sys
+
+# Operating systems.
+WINDOWS = sys.platform == "win32"
+LINUX = sys.platform.startswith("linux")
+
+# Python versions. We amend version_info with one more value, a zero if an
+# official version, or 1 if built from source beyond an official version.
+PYVERSION = sys.version_info + (int(platform.python_version()[-1] == "+"),)
+PY2 = PYVERSION < (3, 0)
+PY3 = PYVERSION >= (3, 0)
+
+# Python implementations.
+PYPY = (platform.python_implementation() == 'PyPy')
+if PYPY:
+ PYPYVERSION = sys.pypy_version_info
+
+PYPY2 = PYPY and PY2
+PYPY3 = PYPY and PY3
+
+JYTHON = (platform.python_implementation() == 'Jython')
+IRONPYTHON = (platform.python_implementation() == 'IronPython')
+
+# Python behavior
+class PYBEHAVIOR(object):
+ """Flags indicating this Python's behavior."""
+
+ # Is "if __debug__" optimized away?
+ optimize_if_debug = (not PYPY)
+
+ # Is "if not __debug__" optimized away?
+ optimize_if_not_debug = (not PYPY) and (PYVERSION >= (3, 7, 0, 'alpha', 4))
+
+ # Is "if not __debug__" optimized away even better?
+ optimize_if_not_debug2 = (not PYPY) and (PYVERSION >= (3, 8, 0, 'beta', 1))
+
+ # Do we have yield-from?
+ yield_from = (PYVERSION >= (3, 3))
+
+ # Do we have PEP 420 namespace packages?
+ namespaces_pep420 = (PYVERSION >= (3, 3))
+
+ # Do .pyc files have the source file size recorded in them?
+ size_in_pyc = (PYVERSION >= (3, 3))
+
+ # Do we have async and await syntax?
+ async_syntax = (PYVERSION >= (3, 5))
+
+ # PEP 448 defined additional unpacking generalizations
+ unpackings_pep448 = (PYVERSION >= (3, 5))
+
+ # Can co_lnotab have negative deltas?
+ negative_lnotab = (PYVERSION >= (3, 6)) and not (PYPY and PYPYVERSION < (7, 2))
+
+ # Do .pyc files conform to PEP 552? Hash-based pyc's.
+ hashed_pyc_pep552 = (PYVERSION >= (3, 7, 0, 'alpha', 4))
+
+ # Python 3.7.0b3 changed the behavior of the sys.path[0] entry for -m. It
+ # used to be an empty string (meaning the current directory). It changed
+ # to be the actual path to the current directory, so that os.chdir wouldn't
+ # affect the outcome.
+ actual_syspath0_dash_m = (PYVERSION >= (3, 7, 0, 'beta', 3))
+
+ # When a break/continue/return statement in a try block jumps to a finally
+ # block, does the finally block do the break/continue/return (pre-3.8), or
+ # does the finally jump back to the break/continue/return (3.8) to do the
+ # work?
+ finally_jumps_back = (PYVERSION >= (3, 8))
+
+ # When a function is decorated, does the trace function get called for the
+ # @-line and also the def-line (new behavior in 3.8)? Or just the @-line
+ # (old behavior)?
+ trace_decorated_def = (PYVERSION >= (3, 8))
+
+ # Are while-true loops optimized into absolute jumps with no loop setup?
+ nix_while_true = (PYVERSION >= (3, 8))
+
+ # Python 3.9a1 made sys.argv[0] and other reported files absolute paths.
+ report_absolute_files = (PYVERSION >= (3, 9))
+
+# Coverage.py specifics.
+
+# Are we using the C-implemented trace function?
+C_TRACER = os.getenv('COVERAGE_TEST_TRACER', 'c') == 'c'
+
+# Are we coverage-measuring ourselves?
+METACOV = os.getenv('COVERAGE_COVERAGE', '') != ''
+
+# Are we running our test suite?
+# Even when running tests, you can use COVERAGE_TESTING=0 to disable the
+# test-specific behavior like contracts.
+TESTING = os.getenv('COVERAGE_TESTING', '') == 'True'
diff --git a/third_party/python/coverage/coverage/execfile.py b/third_party/python/coverage/coverage/execfile.py
new file mode 100644
index 0000000000..29409d517a
--- /dev/null
+++ b/third_party/python/coverage/coverage/execfile.py
@@ -0,0 +1,362 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Execute files of Python code."""
+
+import inspect
+import marshal
+import os
+import struct
+import sys
+import types
+
+from coverage import env
+from coverage.backward import BUILTINS
+from coverage.backward import PYC_MAGIC_NUMBER, imp, importlib_util_find_spec
+from coverage.files import canonical_filename, python_reported_file
+from coverage.misc import CoverageException, ExceptionDuringRun, NoCode, NoSource, isolate_module
+from coverage.phystokens import compile_unicode
+from coverage.python import get_python_source
+
+os = isolate_module(os)
+
+
+class DummyLoader(object):
+ """A shim for the pep302 __loader__, emulating pkgutil.ImpLoader.
+
+ Currently only implements the .fullname attribute
+ """
+ def __init__(self, fullname, *_args):
+ self.fullname = fullname
+
+
+if importlib_util_find_spec:
+ def find_module(modulename):
+ """Find the module named `modulename`.
+
+ Returns the file path of the module, the name of the enclosing
+ package, and the spec.
+ """
+ try:
+ spec = importlib_util_find_spec(modulename)
+ except ImportError as err:
+ raise NoSource(str(err))
+ if not spec:
+ raise NoSource("No module named %r" % (modulename,))
+ pathname = spec.origin
+ packagename = spec.name
+ if spec.submodule_search_locations:
+ mod_main = modulename + ".__main__"
+ spec = importlib_util_find_spec(mod_main)
+ if not spec:
+ raise NoSource(
+ "No module named %s; "
+ "%r is a package and cannot be directly executed"
+ % (mod_main, modulename)
+ )
+ pathname = spec.origin
+ packagename = spec.name
+ packagename = packagename.rpartition(".")[0]
+ return pathname, packagename, spec
+else:
+ def find_module(modulename):
+ """Find the module named `modulename`.
+
+ Returns the file path of the module, the name of the enclosing
+ package, and None (where a spec would have been).
+ """
+ openfile = None
+ glo, loc = globals(), locals()
+ try:
+ # Search for the module - inside its parent package, if any - using
+ # standard import mechanics.
+ if '.' in modulename:
+ packagename, name = modulename.rsplit('.', 1)
+ package = __import__(packagename, glo, loc, ['__path__'])
+ searchpath = package.__path__
+ else:
+ packagename, name = None, modulename
+ searchpath = None # "top-level search" in imp.find_module()
+ openfile, pathname, _ = imp.find_module(name, searchpath)
+
+ # Complain if this is a magic non-file module.
+ if openfile is None and pathname is None:
+ raise NoSource(
+ "module does not live in a file: %r" % modulename
+ )
+
+ # If `modulename` is actually a package, not a mere module, then we
+ # pretend to be Python 2.7 and try running its __main__.py script.
+ if openfile is None:
+ packagename = modulename
+ name = '__main__'
+ package = __import__(packagename, glo, loc, ['__path__'])
+ searchpath = package.__path__
+ openfile, pathname, _ = imp.find_module(name, searchpath)
+ except ImportError as err:
+ raise NoSource(str(err))
+ finally:
+ if openfile:
+ openfile.close()
+
+ return pathname, packagename, None
+
+
+class PyRunner(object):
+ """Multi-stage execution of Python code.
+
+ This is meant to emulate real Python execution as closely as possible.
+
+ """
+ def __init__(self, args, as_module=False):
+ self.args = args
+ self.as_module = as_module
+
+ self.arg0 = args[0]
+ self.package = self.modulename = self.pathname = self.loader = self.spec = None
+
+ def prepare(self):
+ """Set sys.path properly.
+
+ This needs to happen before any importing, and without importing anything.
+ """
+ if self.as_module:
+ if env.PYBEHAVIOR.actual_syspath0_dash_m:
+ path0 = os.getcwd()
+ else:
+ path0 = ""
+ elif os.path.isdir(self.arg0):
+ # Running a directory means running the __main__.py file in that
+ # directory.
+ path0 = self.arg0
+ else:
+ path0 = os.path.abspath(os.path.dirname(self.arg0))
+
+ if os.path.isdir(sys.path[0]):
+ # sys.path fakery. If we are being run as a command, then sys.path[0]
+ # is the directory of the "coverage" script. If this is so, replace
+ # sys.path[0] with the directory of the file we're running, or the
+ # current directory when running modules. If it isn't so, then we
+ # don't know what's going on, and just leave it alone.
+ top_file = inspect.stack()[-1][0].f_code.co_filename
+ sys_path_0_abs = os.path.abspath(sys.path[0])
+ top_file_dir_abs = os.path.abspath(os.path.dirname(top_file))
+ sys_path_0_abs = canonical_filename(sys_path_0_abs)
+ top_file_dir_abs = canonical_filename(top_file_dir_abs)
+ if sys_path_0_abs != top_file_dir_abs:
+ path0 = None
+
+ else:
+ # sys.path[0] is a file. Is the next entry the directory containing
+ # that file?
+ if sys.path[1] == os.path.dirname(sys.path[0]):
+ # Can it be right to always remove that?
+ del sys.path[1]
+
+ if path0 is not None:
+ sys.path[0] = python_reported_file(path0)
+
+ def _prepare2(self):
+ """Do more preparation to run Python code.
+
+ Includes finding the module to run and adjusting sys.argv[0].
+ This method is allowed to import code.
+
+ """
+ if self.as_module:
+ self.modulename = self.arg0
+ pathname, self.package, self.spec = find_module(self.modulename)
+ if self.spec is not None:
+ self.modulename = self.spec.name
+ self.loader = DummyLoader(self.modulename)
+ self.pathname = os.path.abspath(pathname)
+ self.args[0] = self.arg0 = self.pathname
+ elif os.path.isdir(self.arg0):
+ # Running a directory means running the __main__.py file in that
+ # directory.
+ for ext in [".py", ".pyc", ".pyo"]:
+ try_filename = os.path.join(self.arg0, "__main__" + ext)
+ if os.path.exists(try_filename):
+ self.arg0 = try_filename
+ break
+ else:
+ raise NoSource("Can't find '__main__' module in '%s'" % self.arg0)
+
+ if env.PY2:
+ self.arg0 = os.path.abspath(self.arg0)
+
+ # Make a spec. I don't know if this is the right way to do it.
+ try:
+ import importlib.machinery
+ except ImportError:
+ pass
+ else:
+ try_filename = python_reported_file(try_filename)
+ self.spec = importlib.machinery.ModuleSpec("__main__", None, origin=try_filename)
+ self.spec.has_location = True
+ self.package = ""
+ self.loader = DummyLoader("__main__")
+ else:
+ if env.PY3:
+ self.loader = DummyLoader("__main__")
+
+ self.arg0 = python_reported_file(self.arg0)
+
+ def run(self):
+ """Run the Python code!"""
+
+ self._prepare2()
+
+ # Create a module to serve as __main__
+ main_mod = types.ModuleType('__main__')
+
+ from_pyc = self.arg0.endswith((".pyc", ".pyo"))
+ main_mod.__file__ = self.arg0
+ if from_pyc:
+ main_mod.__file__ = main_mod.__file__[:-1]
+ if self.package is not None:
+ main_mod.__package__ = self.package
+ main_mod.__loader__ = self.loader
+ if self.spec is not None:
+ main_mod.__spec__ = self.spec
+
+ main_mod.__builtins__ = BUILTINS
+
+ sys.modules['__main__'] = main_mod
+
+ # Set sys.argv properly.
+ sys.argv = self.args
+
+ try:
+ # Make a code object somehow.
+ if from_pyc:
+ code = make_code_from_pyc(self.arg0)
+ else:
+ code = make_code_from_py(self.arg0)
+ except CoverageException:
+ raise
+ except Exception as exc:
+ msg = "Couldn't run '{filename}' as Python code: {exc.__class__.__name__}: {exc}"
+ raise CoverageException(msg.format(filename=self.arg0, exc=exc))
+
+ # Execute the code object.
+ # Return to the original directory in case the test code exits in
+ # a non-existent directory.
+ cwd = os.getcwd()
+ try:
+ exec(code, main_mod.__dict__)
+ except SystemExit: # pylint: disable=try-except-raise
+ # The user called sys.exit(). Just pass it along to the upper
+ # layers, where it will be handled.
+ raise
+ except Exception:
+ # Something went wrong while executing the user code.
+ # Get the exc_info, and pack them into an exception that we can
+ # throw up to the outer loop. We peel one layer off the traceback
+ # so that the coverage.py code doesn't appear in the final printed
+ # traceback.
+ typ, err, tb = sys.exc_info()
+
+ # PyPy3 weirdness. If I don't access __context__, then somehow it
+ # is non-None when the exception is reported at the upper layer,
+ # and a nested exception is shown to the user. This getattr fixes
+ # it somehow? https://bitbucket.org/pypy/pypy/issue/1903
+ getattr(err, '__context__', None)
+
+ # Call the excepthook.
+ try:
+ if hasattr(err, "__traceback__"):
+ err.__traceback__ = err.__traceback__.tb_next
+ sys.excepthook(typ, err, tb.tb_next)
+ except SystemExit: # pylint: disable=try-except-raise
+ raise
+ except Exception:
+ # Getting the output right in the case of excepthook
+ # shenanigans is kind of involved.
+ sys.stderr.write("Error in sys.excepthook:\n")
+ typ2, err2, tb2 = sys.exc_info()
+ err2.__suppress_context__ = True
+ if hasattr(err2, "__traceback__"):
+ err2.__traceback__ = err2.__traceback__.tb_next
+ sys.__excepthook__(typ2, err2, tb2.tb_next)
+ sys.stderr.write("\nOriginal exception was:\n")
+ raise ExceptionDuringRun(typ, err, tb.tb_next)
+ else:
+ sys.exit(1)
+ finally:
+ os.chdir(cwd)
+
+
+def run_python_module(args):
+ """Run a Python module, as though with ``python -m name args...``.
+
+ `args` is the argument array to present as sys.argv, including the first
+ element naming the module being executed.
+
+ This is a helper for tests, to encapsulate how to use PyRunner.
+
+ """
+ runner = PyRunner(args, as_module=True)
+ runner.prepare()
+ runner.run()
+
+
+def run_python_file(args):
+ """Run a Python file as if it were the main program on the command line.
+
+ `args` is the argument array to present as sys.argv, including the first
+ element naming the file being executed. `package` is the name of the
+ enclosing package, if any.
+
+ This is a helper for tests, to encapsulate how to use PyRunner.
+
+ """
+ runner = PyRunner(args, as_module=False)
+ runner.prepare()
+ runner.run()
+
+
+def make_code_from_py(filename):
+ """Get source from `filename` and make a code object of it."""
+ # Open the source file.
+ try:
+ source = get_python_source(filename)
+ except (IOError, NoSource):
+ raise NoSource("No file to run: '%s'" % filename)
+
+ code = compile_unicode(source, filename, "exec")
+ return code
+
+
+def make_code_from_pyc(filename):
+ """Get a code object from a .pyc file."""
+ try:
+ fpyc = open(filename, "rb")
+ except IOError:
+ raise NoCode("No file to run: '%s'" % filename)
+
+ with fpyc:
+ # First four bytes are a version-specific magic number. It has to
+ # match or we won't run the file.
+ magic = fpyc.read(4)
+ if magic != PYC_MAGIC_NUMBER:
+ raise NoCode("Bad magic number in .pyc file: {} != {}".format(magic, PYC_MAGIC_NUMBER))
+
+ date_based = True
+ if env.PYBEHAVIOR.hashed_pyc_pep552:
+ flags = struct.unpack('<L', fpyc.read(4))[0]
+ hash_based = flags & 0x01
+ if hash_based:
+ fpyc.read(8) # Skip the hash.
+ date_based = False
+ if date_based:
+ # Skip the junk in the header that we don't need.
+ fpyc.read(4) # Skip the moddate.
+ if env.PYBEHAVIOR.size_in_pyc:
+ # 3.3 added another long to the header (size), skip it.
+ fpyc.read(4)
+
+ # The rest of the file is the code object we want.
+ code = marshal.load(fpyc)
+
+ return code
diff --git a/third_party/python/coverage/coverage/files.py b/third_party/python/coverage/coverage/files.py
new file mode 100644
index 0000000000..5c2ff1ace4
--- /dev/null
+++ b/third_party/python/coverage/coverage/files.py
@@ -0,0 +1,432 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""File wrangling."""
+
+import hashlib
+import fnmatch
+import ntpath
+import os
+import os.path
+import posixpath
+import re
+import sys
+
+from coverage import env
+from coverage.backward import unicode_class
+from coverage.misc import contract, CoverageException, join_regex, isolate_module
+
+
+os = isolate_module(os)
+
+
+def set_relative_directory():
+ """Set the directory that `relative_filename` will be relative to."""
+ global RELATIVE_DIR, CANONICAL_FILENAME_CACHE
+
+ # The absolute path to our current directory.
+ RELATIVE_DIR = os.path.normcase(abs_file(os.curdir) + os.sep)
+
+ # Cache of results of calling the canonical_filename() method, to
+ # avoid duplicating work.
+ CANONICAL_FILENAME_CACHE = {}
+
+
+def relative_directory():
+ """Return the directory that `relative_filename` is relative to."""
+ return RELATIVE_DIR
+
+
+@contract(returns='unicode')
+def relative_filename(filename):
+ """Return the relative form of `filename`.
+
+ The file name will be relative to the current directory when the
+ `set_relative_directory` was called.
+
+ """
+ fnorm = os.path.normcase(filename)
+ if fnorm.startswith(RELATIVE_DIR):
+ filename = filename[len(RELATIVE_DIR):]
+ return unicode_filename(filename)
+
+
+@contract(returns='unicode')
+def canonical_filename(filename):
+ """Return a canonical file name for `filename`.
+
+ An absolute path with no redundant components and normalized case.
+
+ """
+ if filename not in CANONICAL_FILENAME_CACHE:
+ cf = filename
+ if not os.path.isabs(filename):
+ for path in [os.curdir] + sys.path:
+ if path is None:
+ continue
+ f = os.path.join(path, filename)
+ try:
+ exists = os.path.exists(f)
+ except UnicodeError:
+ exists = False
+ if exists:
+ cf = f
+ break
+ cf = abs_file(cf)
+ CANONICAL_FILENAME_CACHE[filename] = cf
+ return CANONICAL_FILENAME_CACHE[filename]
+
+
+MAX_FLAT = 200
+
+@contract(filename='unicode', returns='unicode')
+def flat_rootname(filename):
+ """A base for a flat file name to correspond to this file.
+
+ Useful for writing files about the code where you want all the files in
+ the same directory, but need to differentiate same-named files from
+ different directories.
+
+ For example, the file a/b/c.py will return 'a_b_c_py'
+
+ """
+ name = ntpath.splitdrive(filename)[1]
+ name = re.sub(r"[\\/.:]", "_", name)
+ if len(name) > MAX_FLAT:
+ h = hashlib.sha1(name.encode('UTF-8')).hexdigest()
+ name = name[-(MAX_FLAT-len(h)-1):] + '_' + h
+ return name
+
+
+if env.WINDOWS:
+
+ _ACTUAL_PATH_CACHE = {}
+ _ACTUAL_PATH_LIST_CACHE = {}
+
+ def actual_path(path):
+ """Get the actual path of `path`, including the correct case."""
+ if env.PY2 and isinstance(path, unicode_class):
+ path = path.encode(sys.getfilesystemencoding())
+ if path in _ACTUAL_PATH_CACHE:
+ return _ACTUAL_PATH_CACHE[path]
+
+ head, tail = os.path.split(path)
+ if not tail:
+ # This means head is the drive spec: normalize it.
+ actpath = head.upper()
+ elif not head:
+ actpath = tail
+ else:
+ head = actual_path(head)
+ if head in _ACTUAL_PATH_LIST_CACHE:
+ files = _ACTUAL_PATH_LIST_CACHE[head]
+ else:
+ try:
+ files = os.listdir(head)
+ except Exception:
+ # This will raise OSError, or this bizarre TypeError:
+ # https://bugs.python.org/issue1776160
+ files = []
+ _ACTUAL_PATH_LIST_CACHE[head] = files
+ normtail = os.path.normcase(tail)
+ for f in files:
+ if os.path.normcase(f) == normtail:
+ tail = f
+ break
+ actpath = os.path.join(head, tail)
+ _ACTUAL_PATH_CACHE[path] = actpath
+ return actpath
+
+else:
+ def actual_path(filename):
+ """The actual path for non-Windows platforms."""
+ return filename
+
+
+if env.PY2:
+ @contract(returns='unicode')
+ def unicode_filename(filename):
+ """Return a Unicode version of `filename`."""
+ if isinstance(filename, str):
+ encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
+ filename = filename.decode(encoding, "replace")
+ return filename
+else:
+ @contract(filename='unicode', returns='unicode')
+ def unicode_filename(filename):
+ """Return a Unicode version of `filename`."""
+ return filename
+
+
+@contract(returns='unicode')
+def abs_file(path):
+ """Return the absolute normalized form of `path`."""
+ try:
+ path = os.path.realpath(path)
+ except UnicodeError:
+ pass
+ path = os.path.abspath(path)
+ path = actual_path(path)
+ path = unicode_filename(path)
+ return path
+
+
+def python_reported_file(filename):
+ """Return the string as Python would describe this file name."""
+ if env.PYBEHAVIOR.report_absolute_files:
+ filename = os.path.abspath(filename)
+ return filename
+
+
+RELATIVE_DIR = None
+CANONICAL_FILENAME_CACHE = None
+set_relative_directory()
+
+
+def isabs_anywhere(filename):
+ """Is `filename` an absolute path on any OS?"""
+ return ntpath.isabs(filename) or posixpath.isabs(filename)
+
+
+def prep_patterns(patterns):
+ """Prepare the file patterns for use in a `FnmatchMatcher`.
+
+ If a pattern starts with a wildcard, it is used as a pattern
+ as-is. If it does not start with a wildcard, then it is made
+ absolute with the current directory.
+
+ If `patterns` is None, an empty list is returned.
+
+ """
+ prepped = []
+ for p in patterns or []:
+ if p.startswith(("*", "?")):
+ prepped.append(p)
+ else:
+ prepped.append(abs_file(p))
+ return prepped
+
+
+class TreeMatcher(object):
+ """A matcher for files in a tree.
+
+ Construct with a list of paths, either files or directories. Paths match
+ with the `match` method if they are one of the files, or if they are
+ somewhere in a subtree rooted at one of the directories.
+
+ """
+ def __init__(self, paths):
+ self.paths = list(paths)
+
+ def __repr__(self):
+ return "<TreeMatcher %r>" % self.paths
+
+ def info(self):
+ """A list of strings for displaying when dumping state."""
+ return self.paths
+
+ def match(self, fpath):
+ """Does `fpath` indicate a file in one of our trees?"""
+ for p in self.paths:
+ if fpath.startswith(p):
+ if fpath == p:
+ # This is the same file!
+ return True
+ if fpath[len(p)] == os.sep:
+ # This is a file in the directory
+ return True
+ return False
+
+
+class ModuleMatcher(object):
+ """A matcher for modules in a tree."""
+ def __init__(self, module_names):
+ self.modules = list(module_names)
+
+ def __repr__(self):
+ return "<ModuleMatcher %r>" % (self.modules)
+
+ def info(self):
+ """A list of strings for displaying when dumping state."""
+ return self.modules
+
+ def match(self, module_name):
+ """Does `module_name` indicate a module in one of our packages?"""
+ if not module_name:
+ return False
+
+ for m in self.modules:
+ if module_name.startswith(m):
+ if module_name == m:
+ return True
+ if module_name[len(m)] == '.':
+ # This is a module in the package
+ return True
+
+ return False
+
+
+class FnmatchMatcher(object):
+ """A matcher for files by file name pattern."""
+ def __init__(self, pats):
+ self.pats = list(pats)
+ self.re = fnmatches_to_regex(self.pats, case_insensitive=env.WINDOWS)
+
+ def __repr__(self):
+ return "<FnmatchMatcher %r>" % self.pats
+
+ def info(self):
+ """A list of strings for displaying when dumping state."""
+ return self.pats
+
+ def match(self, fpath):
+ """Does `fpath` match one of our file name patterns?"""
+ return self.re.match(fpath) is not None
+
+
+def sep(s):
+ """Find the path separator used in this string, or os.sep if none."""
+ sep_match = re.search(r"[\\/]", s)
+ if sep_match:
+ the_sep = sep_match.group(0)
+ else:
+ the_sep = os.sep
+ return the_sep
+
+
+def fnmatches_to_regex(patterns, case_insensitive=False, partial=False):
+ """Convert fnmatch patterns to a compiled regex that matches any of them.
+
+ Slashes are always converted to match either slash or backslash, for
+ Windows support, even when running elsewhere.
+
+ If `partial` is true, then the pattern will match if the target string
+ starts with the pattern. Otherwise, it must match the entire string.
+
+ Returns: a compiled regex object. Use the .match method to compare target
+ strings.
+
+ """
+ regexes = (fnmatch.translate(pattern) for pattern in patterns)
+ # Python3.7 fnmatch translates "/" as "/". Before that, it translates as "\/",
+ # so we have to deal with maybe a backslash.
+ regexes = (re.sub(r"\\?/", r"[\\\\/]", regex) for regex in regexes)
+
+ if partial:
+ # fnmatch always adds a \Z to match the whole string, which we don't
+ # want, so we remove the \Z. While removing it, we only replace \Z if
+ # followed by paren (introducing flags), or at end, to keep from
+ # destroying a literal \Z in the pattern.
+ regexes = (re.sub(r'\\Z(\(\?|$)', r'\1', regex) for regex in regexes)
+
+ flags = 0
+ if case_insensitive:
+ flags |= re.IGNORECASE
+ compiled = re.compile(join_regex(regexes), flags=flags)
+
+ return compiled
+
+
+class PathAliases(object):
+ """A collection of aliases for paths.
+
+ When combining data files from remote machines, often the paths to source
+ code are different, for example, due to OS differences, or because of
+ serialized checkouts on continuous integration machines.
+
+ A `PathAliases` object tracks a list of pattern/result pairs, and can
+ map a path through those aliases to produce a unified path.
+
+ """
+ def __init__(self):
+ self.aliases = []
+
+ def pprint(self): # pragma: debugging
+ """Dump the important parts of the PathAliases, for debugging."""
+ for regex, result in self.aliases:
+ print("{!r} --> {!r}".format(regex.pattern, result))
+
+ def add(self, pattern, result):
+ """Add the `pattern`/`result` pair to the list of aliases.
+
+ `pattern` is an `fnmatch`-style pattern. `result` is a simple
+ string. When mapping paths, if a path starts with a match against
+ `pattern`, then that match is replaced with `result`. This models
+ isomorphic source trees being rooted at different places on two
+ different machines.
+
+ `pattern` can't end with a wildcard component, since that would
+ match an entire tree, and not just its root.
+
+ """
+ if len(pattern) > 1:
+ pattern = pattern.rstrip(r"\/")
+
+ # The pattern can't end with a wildcard component.
+ if pattern.endswith("*"):
+ raise CoverageException("Pattern must not end with wildcards.")
+ pattern_sep = sep(pattern)
+
+ # The pattern is meant to match a filepath. Let's make it absolute
+ # unless it already is, or is meant to match any prefix.
+ if not pattern.startswith('*') and not isabs_anywhere(pattern):
+ pattern = abs_file(pattern)
+ if not pattern.endswith(pattern_sep):
+ pattern += pattern_sep
+
+ # Make a regex from the pattern.
+ regex = fnmatches_to_regex([pattern], case_insensitive=True, partial=True)
+
+ # Normalize the result: it must end with a path separator.
+ result_sep = sep(result)
+ result = result.rstrip(r"\/") + result_sep
+ self.aliases.append((regex, result))
+
+ def map(self, path):
+ """Map `path` through the aliases.
+
+ `path` is checked against all of the patterns. The first pattern to
+ match is used to replace the root of the path with the result root.
+ Only one pattern is ever used. If no patterns match, `path` is
+ returned unchanged.
+
+ The separator style in the result is made to match that of the result
+ in the alias.
+
+ Returns the mapped path. If a mapping has happened, this is a
+ canonical path. If no mapping has happened, it is the original value
+ of `path` unchanged.
+
+ """
+ for regex, result in self.aliases:
+ m = regex.match(path)
+ if m:
+ new = path.replace(m.group(0), result)
+ new = new.replace(sep(path), sep(result))
+ new = canonical_filename(new)
+ return new
+ return path
+
+
+def find_python_files(dirname):
+ """Yield all of the importable Python files in `dirname`, recursively.
+
+ To be importable, the files have to be in a directory with a __init__.py,
+ except for `dirname` itself, which isn't required to have one. The
+ assumption is that `dirname` was specified directly, so the user knows
+ best, but sub-directories are checked for a __init__.py to be sure we only
+ find the importable files.
+
+ """
+ for i, (dirpath, dirnames, filenames) in enumerate(os.walk(dirname)):
+ if i > 0 and '__init__.py' not in filenames:
+ # If a directory doesn't have __init__.py, then it isn't
+ # importable and neither are its files
+ del dirnames[:]
+ continue
+ for filename in filenames:
+ # We're only interested in files that look like reasonable Python
+ # files: Must end with .py or .pyw, and must not have certain funny
+ # characters that probably mean they are editor junk.
+ if re.match(r"^[^.#~!$@%^&*()+=,]+\.pyw?$", filename):
+ yield os.path.join(dirpath, filename)
diff --git a/third_party/python/coverage/coverage/fullcoverage/encodings.py b/third_party/python/coverage/coverage/fullcoverage/encodings.py
new file mode 100644
index 0000000000..aeb416e406
--- /dev/null
+++ b/third_party/python/coverage/coverage/fullcoverage/encodings.py
@@ -0,0 +1,60 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Imposter encodings module that installs a coverage-style tracer.
+
+This is NOT the encodings module; it is an imposter that sets up tracing
+instrumentation and then replaces itself with the real encodings module.
+
+If the directory that holds this file is placed first in the PYTHONPATH when
+using "coverage" to run Python's tests, then this file will become the very
+first module imported by the internals of Python 3. It installs a
+coverage.py-compatible trace function that can watch Standard Library modules
+execute from the very earliest stages of Python's own boot process. This fixes
+a problem with coverage.py - that it starts too late to trace the coverage of
+many of the most fundamental modules in the Standard Library.
+
+"""
+
+import sys
+
+class FullCoverageTracer(object):
+ def __init__(self):
+ # `traces` is a list of trace events. Frames are tricky: the same
+ # frame object is used for a whole scope, with new line numbers
+ # written into it. So in one scope, all the frame objects are the
+ # same object, and will eventually all will point to the last line
+ # executed. So we keep the line numbers alongside the frames.
+ # The list looks like:
+ #
+ # traces = [
+ # ((frame, event, arg), lineno), ...
+ # ]
+ #
+ self.traces = []
+
+ def fullcoverage_trace(self, *args):
+ frame, event, arg = args
+ self.traces.append((args, frame.f_lineno))
+ return self.fullcoverage_trace
+
+sys.settrace(FullCoverageTracer().fullcoverage_trace)
+
+# In coverage/files.py is actual_filename(), which uses glob.glob. I don't
+# understand why, but that use of glob borks everything if fullcoverage is in
+# effect. So here we make an ugly hail-mary pass to switch off glob.glob over
+# there. This means when using fullcoverage, Windows path names will not be
+# their actual case.
+
+#sys.fullcoverage = True
+
+# Finally, remove our own directory from sys.path; remove ourselves from
+# sys.modules; and re-import "encodings", which will be the real package
+# this time. Note that the delete from sys.modules dictionary has to
+# happen last, since all of the symbols in this module will become None
+# at that exact moment, including "sys".
+
+parentdir = max(filter(__file__.startswith, sys.path), key=len)
+sys.path.remove(parentdir)
+del sys.modules['encodings']
+import encodings
diff --git a/third_party/python/coverage/coverage/html.py b/third_party/python/coverage/coverage/html.py
new file mode 100644
index 0000000000..596e114351
--- /dev/null
+++ b/third_party/python/coverage/coverage/html.py
@@ -0,0 +1,511 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""HTML reporting for coverage.py."""
+
+import datetime
+import json
+import os
+import re
+import shutil
+
+import coverage
+from coverage import env
+from coverage.backward import iitems, SimpleNamespace
+from coverage.data import add_data_to_hash
+from coverage.files import flat_rootname
+from coverage.misc import CoverageException, ensure_dir, file_be_gone, Hasher, isolate_module
+from coverage.report import get_analysis_to_report
+from coverage.results import Numbers
+from coverage.templite import Templite
+
+os = isolate_module(os)
+
+
+# Static files are looked for in a list of places.
+STATIC_PATH = [
+ # The place Debian puts system Javascript libraries.
+ "/usr/share/javascript",
+
+ # Our htmlfiles directory.
+ os.path.join(os.path.dirname(__file__), "htmlfiles"),
+]
+
+
+def data_filename(fname, pkgdir=""):
+ """Return the path to a data file of ours.
+
+ The file is searched for on `STATIC_PATH`, and the first place it's found,
+ is returned.
+
+ Each directory in `STATIC_PATH` is searched as-is, and also, if `pkgdir`
+ is provided, at that sub-directory.
+
+ """
+ tried = []
+ for static_dir in STATIC_PATH:
+ static_filename = os.path.join(static_dir, fname)
+ if os.path.exists(static_filename):
+ return static_filename
+ else:
+ tried.append(static_filename)
+ if pkgdir:
+ static_filename = os.path.join(static_dir, pkgdir, fname)
+ if os.path.exists(static_filename):
+ return static_filename
+ else:
+ tried.append(static_filename)
+ raise CoverageException(
+ "Couldn't find static file %r from %r, tried: %r" % (fname, os.getcwd(), tried)
+ )
+
+
+def read_data(fname):
+ """Return the contents of a data file of ours."""
+ with open(data_filename(fname)) as data_file:
+ return data_file.read()
+
+
+def write_html(fname, html):
+ """Write `html` to `fname`, properly encoded."""
+ html = re.sub(r"(\A\s+)|(\s+$)", "", html, flags=re.MULTILINE) + "\n"
+ with open(fname, "wb") as fout:
+ fout.write(html.encode('ascii', 'xmlcharrefreplace'))
+
+
+class HtmlDataGeneration(object):
+ """Generate structured data to be turned into HTML reports."""
+
+ EMPTY = "(empty)"
+
+ def __init__(self, cov):
+ self.coverage = cov
+ self.config = self.coverage.config
+ data = self.coverage.get_data()
+ self.has_arcs = data.has_arcs()
+ if self.config.show_contexts:
+ if data.measured_contexts() == set([""]):
+ self.coverage._warn("No contexts were measured")
+ data.set_query_contexts(self.config.report_contexts)
+
+ def data_for_file(self, fr, analysis):
+ """Produce the data needed for one file's report."""
+ if self.has_arcs:
+ missing_branch_arcs = analysis.missing_branch_arcs()
+ arcs_executed = analysis.arcs_executed()
+
+ if self.config.show_contexts:
+ contexts_by_lineno = analysis.data.contexts_by_lineno(analysis.filename)
+
+ lines = []
+
+ for lineno, tokens in enumerate(fr.source_token_lines(), start=1):
+ # Figure out how to mark this line.
+ category = None
+ short_annotations = []
+ long_annotations = []
+
+ if lineno in analysis.excluded:
+ category = 'exc'
+ elif lineno in analysis.missing:
+ category = 'mis'
+ elif self.has_arcs and lineno in missing_branch_arcs:
+ category = 'par'
+ for b in missing_branch_arcs[lineno]:
+ if b < 0:
+ short_annotations.append("exit")
+ else:
+ short_annotations.append(b)
+ long_annotations.append(fr.missing_arc_description(lineno, b, arcs_executed))
+ elif lineno in analysis.statements:
+ category = 'run'
+
+ contexts = contexts_label = None
+ context_list = None
+ if category and self.config.show_contexts:
+ contexts = sorted(c or self.EMPTY for c in contexts_by_lineno[lineno])
+ if contexts == [self.EMPTY]:
+ contexts_label = self.EMPTY
+ else:
+ contexts_label = "{} ctx".format(len(contexts))
+ context_list = contexts
+
+ lines.append(SimpleNamespace(
+ tokens=tokens,
+ number=lineno,
+ category=category,
+ statement=(lineno in analysis.statements),
+ contexts=contexts,
+ contexts_label=contexts_label,
+ context_list=context_list,
+ short_annotations=short_annotations,
+ long_annotations=long_annotations,
+ ))
+
+ file_data = SimpleNamespace(
+ relative_filename=fr.relative_filename(),
+ nums=analysis.numbers,
+ lines=lines,
+ )
+
+ return file_data
+
+
+class HtmlReporter(object):
+ """HTML reporting."""
+
+ # These files will be copied from the htmlfiles directory to the output
+ # directory.
+ STATIC_FILES = [
+ ("style.css", ""),
+ ("jquery.min.js", "jquery"),
+ ("jquery.ba-throttle-debounce.min.js", "jquery-throttle-debounce"),
+ ("jquery.hotkeys.js", "jquery-hotkeys"),
+ ("jquery.isonscreen.js", "jquery-isonscreen"),
+ ("jquery.tablesorter.min.js", "jquery-tablesorter"),
+ ("coverage_html.js", ""),
+ ("keybd_closed.png", ""),
+ ("keybd_open.png", ""),
+ ]
+
+ def __init__(self, cov):
+ self.coverage = cov
+ self.config = self.coverage.config
+ self.directory = self.config.html_dir
+ title = self.config.html_title
+ if env.PY2:
+ title = title.decode("utf8")
+
+ if self.config.extra_css:
+ self.extra_css = os.path.basename(self.config.extra_css)
+ else:
+ self.extra_css = None
+
+ self.data = self.coverage.get_data()
+ self.has_arcs = self.data.has_arcs()
+
+ self.file_summaries = []
+ self.all_files_nums = []
+ self.incr = IncrementalChecker(self.directory)
+ self.datagen = HtmlDataGeneration(self.coverage)
+ self.totals = Numbers()
+
+ self.template_globals = {
+ # Functions available in the templates.
+ 'escape': escape,
+ 'pair': pair,
+ 'len': len,
+
+ # Constants for this report.
+ '__url__': coverage.__url__,
+ '__version__': coverage.__version__,
+ 'title': title,
+ 'time_stamp': datetime.datetime.now().strftime('%Y-%m-%d %H:%M'),
+ 'extra_css': self.extra_css,
+ 'has_arcs': self.has_arcs,
+ 'show_contexts': self.config.show_contexts,
+
+ # Constants for all reports.
+ # These css classes determine which lines are highlighted by default.
+ 'category': {
+ 'exc': 'exc show_exc',
+ 'mis': 'mis show_mis',
+ 'par': 'par run show_par',
+ 'run': 'run',
+ }
+ }
+ self.pyfile_html_source = read_data("pyfile.html")
+ self.source_tmpl = Templite(self.pyfile_html_source, self.template_globals)
+
+ def report(self, morfs):
+ """Generate an HTML report for `morfs`.
+
+ `morfs` is a list of modules or file names.
+
+ """
+ # Read the status data and check that this run used the same
+ # global data as the last run.
+ self.incr.read()
+ self.incr.check_global_data(self.config, self.pyfile_html_source)
+
+ # Process all the files.
+ for fr, analysis in get_analysis_to_report(self.coverage, morfs):
+ self.html_file(fr, analysis)
+
+ if not self.all_files_nums:
+ raise CoverageException("No data to report.")
+
+ self.totals = sum(self.all_files_nums)
+
+ # Write the index file.
+ self.index_file()
+
+ self.make_local_static_report_files()
+ return self.totals.n_statements and self.totals.pc_covered
+
+ def make_local_static_report_files(self):
+ """Make local instances of static files for HTML report."""
+ # The files we provide must always be copied.
+ for static, pkgdir in self.STATIC_FILES:
+ shutil.copyfile(
+ data_filename(static, pkgdir),
+ os.path.join(self.directory, static)
+ )
+
+ # The user may have extra CSS they want copied.
+ if self.extra_css:
+ shutil.copyfile(
+ self.config.extra_css,
+ os.path.join(self.directory, self.extra_css)
+ )
+
+ def html_file(self, fr, analysis):
+ """Generate an HTML file for one source file."""
+ rootname = flat_rootname(fr.relative_filename())
+ html_filename = rootname + ".html"
+ ensure_dir(self.directory)
+ html_path = os.path.join(self.directory, html_filename)
+
+ # Get the numbers for this file.
+ nums = analysis.numbers
+ self.all_files_nums.append(nums)
+
+ if self.config.skip_covered:
+ # Don't report on 100% files.
+ no_missing_lines = (nums.n_missing == 0)
+ no_missing_branches = (nums.n_partial_branches == 0)
+ if no_missing_lines and no_missing_branches:
+ # If there's an existing file, remove it.
+ file_be_gone(html_path)
+ return
+
+ if self.config.skip_empty:
+ # Don't report on empty files.
+ if nums.n_statements == 0:
+ file_be_gone(html_path)
+ return
+
+ # Find out if the file on disk is already correct.
+ if self.incr.can_skip_file(self.data, fr, rootname):
+ self.file_summaries.append(self.incr.index_info(rootname))
+ return
+
+ # Write the HTML page for this file.
+ file_data = self.datagen.data_for_file(fr, analysis)
+ for ldata in file_data.lines:
+ # Build the HTML for the line.
+ html = []
+ for tok_type, tok_text in ldata.tokens:
+ if tok_type == "ws":
+ html.append(escape(tok_text))
+ else:
+ tok_html = escape(tok_text) or '&nbsp;'
+ html.append(
+ u'<span class="{}">{}</span>'.format(tok_type, tok_html)
+ )
+ ldata.html = ''.join(html)
+
+ if ldata.short_annotations:
+ # 202F is NARROW NO-BREAK SPACE.
+ # 219B is RIGHTWARDS ARROW WITH STROKE.
+ ldata.annotate = u",&nbsp;&nbsp; ".join(
+ u"{}&#x202F;&#x219B;&#x202F;{}".format(ldata.number, d)
+ for d in ldata.short_annotations
+ )
+ else:
+ ldata.annotate = None
+
+ if ldata.long_annotations:
+ longs = ldata.long_annotations
+ if len(longs) == 1:
+ ldata.annotate_long = longs[0]
+ else:
+ ldata.annotate_long = u"{:d} missed branches: {}".format(
+ len(longs),
+ u", ".join(
+ u"{:d}) {}".format(num, ann_long)
+ for num, ann_long in enumerate(longs, start=1)
+ ),
+ )
+ else:
+ ldata.annotate_long = None
+
+ css_classes = []
+ if ldata.category:
+ css_classes.append(self.template_globals['category'][ldata.category])
+ ldata.css_class = ' '.join(css_classes) or "pln"
+
+ html = self.source_tmpl.render(file_data.__dict__)
+ write_html(html_path, html)
+
+ # Save this file's information for the index file.
+ index_info = {
+ 'nums': nums,
+ 'html_filename': html_filename,
+ 'relative_filename': fr.relative_filename(),
+ }
+ self.file_summaries.append(index_info)
+ self.incr.set_index_info(rootname, index_info)
+
+ def index_file(self):
+ """Write the index.html file for this report."""
+ index_tmpl = Templite(read_data("index.html"), self.template_globals)
+
+ html = index_tmpl.render({
+ 'files': self.file_summaries,
+ 'totals': self.totals,
+ })
+
+ write_html(os.path.join(self.directory, "index.html"), html)
+
+ # Write the latest hashes for next time.
+ self.incr.write()
+
+
+class IncrementalChecker(object):
+ """Logic and data to support incremental reporting."""
+
+ STATUS_FILE = "status.json"
+ STATUS_FORMAT = 2
+
+ # pylint: disable=wrong-spelling-in-comment,useless-suppression
+ # The data looks like:
+ #
+ # {
+ # "format": 2,
+ # "globals": "540ee119c15d52a68a53fe6f0897346d",
+ # "version": "4.0a1",
+ # "files": {
+ # "cogapp___init__": {
+ # "hash": "e45581a5b48f879f301c0f30bf77a50c",
+ # "index": {
+ # "html_filename": "cogapp___init__.html",
+ # "relative_filename": "cogapp/__init__",
+ # "nums": [ 1, 14, 0, 0, 0, 0, 0 ]
+ # }
+ # },
+ # ...
+ # "cogapp_whiteutils": {
+ # "hash": "8504bb427fc488c4176809ded0277d51",
+ # "index": {
+ # "html_filename": "cogapp_whiteutils.html",
+ # "relative_filename": "cogapp/whiteutils",
+ # "nums": [ 1, 59, 0, 1, 28, 2, 2 ]
+ # }
+ # }
+ # }
+ # }
+
+ def __init__(self, directory):
+ self.directory = directory
+ self.reset()
+
+ def reset(self):
+ """Initialize to empty. Causes all files to be reported."""
+ self.globals = ''
+ self.files = {}
+
+ def read(self):
+ """Read the information we stored last time."""
+ usable = False
+ try:
+ status_file = os.path.join(self.directory, self.STATUS_FILE)
+ with open(status_file) as fstatus:
+ status = json.load(fstatus)
+ except (IOError, ValueError):
+ usable = False
+ else:
+ usable = True
+ if status['format'] != self.STATUS_FORMAT:
+ usable = False
+ elif status['version'] != coverage.__version__:
+ usable = False
+
+ if usable:
+ self.files = {}
+ for filename, fileinfo in iitems(status['files']):
+ fileinfo['index']['nums'] = Numbers(*fileinfo['index']['nums'])
+ self.files[filename] = fileinfo
+ self.globals = status['globals']
+ else:
+ self.reset()
+
+ def write(self):
+ """Write the current status."""
+ status_file = os.path.join(self.directory, self.STATUS_FILE)
+ files = {}
+ for filename, fileinfo in iitems(self.files):
+ fileinfo['index']['nums'] = fileinfo['index']['nums'].init_args()
+ files[filename] = fileinfo
+
+ status = {
+ 'format': self.STATUS_FORMAT,
+ 'version': coverage.__version__,
+ 'globals': self.globals,
+ 'files': files,
+ }
+ with open(status_file, "w") as fout:
+ json.dump(status, fout, separators=(',', ':'))
+
+ def check_global_data(self, *data):
+ """Check the global data that can affect incremental reporting."""
+ m = Hasher()
+ for d in data:
+ m.update(d)
+ these_globals = m.hexdigest()
+ if self.globals != these_globals:
+ self.reset()
+ self.globals = these_globals
+
+ def can_skip_file(self, data, fr, rootname):
+ """Can we skip reporting this file?
+
+ `data` is a CoverageData object, `fr` is a `FileReporter`, and
+ `rootname` is the name being used for the file.
+ """
+ m = Hasher()
+ m.update(fr.source().encode('utf-8'))
+ add_data_to_hash(data, fr.filename, m)
+ this_hash = m.hexdigest()
+
+ that_hash = self.file_hash(rootname)
+
+ if this_hash == that_hash:
+ # Nothing has changed to require the file to be reported again.
+ return True
+ else:
+ self.set_file_hash(rootname, this_hash)
+ return False
+
+ def file_hash(self, fname):
+ """Get the hash of `fname`'s contents."""
+ return self.files.get(fname, {}).get('hash', '')
+
+ def set_file_hash(self, fname, val):
+ """Set the hash of `fname`'s contents."""
+ self.files.setdefault(fname, {})['hash'] = val
+
+ def index_info(self, fname):
+ """Get the information for index.html for `fname`."""
+ return self.files.get(fname, {}).get('index', {})
+
+ def set_index_info(self, fname, info):
+ """Set the information for index.html for `fname`."""
+ self.files.setdefault(fname, {})['index'] = info
+
+
+# Helpers for templates and generating HTML
+
+def escape(t):
+ """HTML-escape the text in `t`.
+
+ This is only suitable for HTML text, not attributes.
+
+ """
+ # Convert HTML special chars into HTML entities.
+ return t.replace("&", "&amp;").replace("<", "&lt;")
+
+
+def pair(ratio):
+ """Format a pair of numbers so JavaScript can read them in an attribute."""
+ return "%s %s" % ratio
diff --git a/third_party/python/coverage/coverage/htmlfiles/coverage_html.js b/third_party/python/coverage/coverage/htmlfiles/coverage_html.js
new file mode 100644
index 0000000000..3bf04bf927
--- /dev/null
+++ b/third_party/python/coverage/coverage/htmlfiles/coverage_html.js
@@ -0,0 +1,589 @@
+// Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+// For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+// Coverage.py HTML report browser code.
+/*jslint browser: true, sloppy: true, vars: true, plusplus: true, maxerr: 50, indent: 4 */
+/*global coverage: true, document, window, $ */
+
+coverage = {};
+
+// Find all the elements with shortkey_* class, and use them to assign a shortcut key.
+coverage.assign_shortkeys = function () {
+ $("*[class*='shortkey_']").each(function (i, e) {
+ $.each($(e).attr("class").split(" "), function (i, c) {
+ if (/^shortkey_/.test(c)) {
+ $(document).bind('keydown', c.substr(9), function () {
+ $(e).click();
+ });
+ }
+ });
+ });
+};
+
+// Create the events for the help panel.
+coverage.wire_up_help_panel = function () {
+ $("#keyboard_icon").click(function () {
+ // Show the help panel, and position it so the keyboard icon in the
+ // panel is in the same place as the keyboard icon in the header.
+ $(".help_panel").show();
+ var koff = $("#keyboard_icon").offset();
+ var poff = $("#panel_icon").position();
+ $(".help_panel").offset({
+ top: koff.top-poff.top,
+ left: koff.left-poff.left
+ });
+ });
+ $("#panel_icon").click(function () {
+ $(".help_panel").hide();
+ });
+};
+
+// Create the events for the filter box.
+coverage.wire_up_filter = function () {
+ // Cache elements.
+ var table = $("table.index");
+ var table_rows = table.find("tbody tr");
+ var table_row_names = table_rows.find("td.name a");
+ var no_rows = $("#no_rows");
+
+ // Create a duplicate table footer that we can modify with dynamic summed values.
+ var table_footer = $("table.index tfoot tr");
+ var table_dynamic_footer = table_footer.clone();
+ table_dynamic_footer.attr('class', 'total_dynamic hidden');
+ table_footer.after(table_dynamic_footer);
+
+ // Observe filter keyevents.
+ $("#filter").on("keyup change", $.debounce(150, function (event) {
+ var filter_value = $(this).val();
+
+ if (filter_value === "") {
+ // Filter box is empty, remove all filtering.
+ table_rows.removeClass("hidden");
+
+ // Show standard footer, hide dynamic footer.
+ table_footer.removeClass("hidden");
+ table_dynamic_footer.addClass("hidden");
+
+ // Hide placeholder, show table.
+ if (no_rows.length > 0) {
+ no_rows.hide();
+ }
+ table.show();
+
+ }
+ else {
+ // Filter table items by value.
+ var hidden = 0;
+ var shown = 0;
+
+ // Hide / show elements.
+ $.each(table_row_names, function () {
+ var element = $(this).parents("tr");
+
+ if ($(this).text().indexOf(filter_value) === -1) {
+ // hide
+ element.addClass("hidden");
+ hidden++;
+ }
+ else {
+ // show
+ element.removeClass("hidden");
+ shown++;
+ }
+ });
+
+ // Show placeholder if no rows will be displayed.
+ if (no_rows.length > 0) {
+ if (shown === 0) {
+ // Show placeholder, hide table.
+ no_rows.show();
+ table.hide();
+ }
+ else {
+ // Hide placeholder, show table.
+ no_rows.hide();
+ table.show();
+ }
+ }
+
+ // Manage dynamic header:
+ if (hidden > 0) {
+ // Calculate new dynamic sum values based on visible rows.
+ for (var column = 2; column < 20; column++) {
+ // Calculate summed value.
+ var cells = table_rows.find('td:nth-child(' + column + ')');
+ if (!cells.length) {
+ // No more columns...!
+ break;
+ }
+
+ var sum = 0, numer = 0, denom = 0;
+ $.each(cells.filter(':visible'), function () {
+ var ratio = $(this).data("ratio");
+ if (ratio) {
+ var splitted = ratio.split(" ");
+ numer += parseInt(splitted[0], 10);
+ denom += parseInt(splitted[1], 10);
+ }
+ else {
+ sum += parseInt(this.innerHTML, 10);
+ }
+ });
+
+ // Get footer cell element.
+ var footer_cell = table_dynamic_footer.find('td:nth-child(' + column + ')');
+
+ // Set value into dynamic footer cell element.
+ if (cells[0].innerHTML.indexOf('%') > -1) {
+ // Percentage columns use the numerator and denominator,
+ // and adapt to the number of decimal places.
+ var match = /\.([0-9]+)/.exec(cells[0].innerHTML);
+ var places = 0;
+ if (match) {
+ places = match[1].length;
+ }
+ var pct = numer * 100 / denom;
+ footer_cell.text(pct.toFixed(places) + '%');
+ }
+ else {
+ footer_cell.text(sum);
+ }
+ }
+
+ // Hide standard footer, show dynamic footer.
+ table_footer.addClass("hidden");
+ table_dynamic_footer.removeClass("hidden");
+ }
+ else {
+ // Show standard footer, hide dynamic footer.
+ table_footer.removeClass("hidden");
+ table_dynamic_footer.addClass("hidden");
+ }
+ }
+ }));
+
+ // Trigger change event on setup, to force filter on page refresh
+ // (filter value may still be present).
+ $("#filter").trigger("change");
+};
+
+// Loaded on index.html
+coverage.index_ready = function ($) {
+ // Look for a localStorage item containing previous sort settings:
+ var sort_list = [];
+ var storage_name = "COVERAGE_INDEX_SORT";
+ var stored_list = undefined;
+ try {
+ stored_list = localStorage.getItem(storage_name);
+ } catch(err) {}
+
+ if (stored_list) {
+ sort_list = JSON.parse('[[' + stored_list + ']]');
+ }
+
+ // Create a new widget which exists only to save and restore
+ // the sort order:
+ $.tablesorter.addWidget({
+ id: "persistentSort",
+
+ // Format is called by the widget before displaying:
+ format: function (table) {
+ if (table.config.sortList.length === 0 && sort_list.length > 0) {
+ // This table hasn't been sorted before - we'll use
+ // our stored settings:
+ $(table).trigger('sorton', [sort_list]);
+ }
+ else {
+ // This is not the first load - something has
+ // already defined sorting so we'll just update
+ // our stored value to match:
+ sort_list = table.config.sortList;
+ }
+ }
+ });
+
+ // Configure our tablesorter to handle the variable number of
+ // columns produced depending on report options:
+ var headers = [];
+ var col_count = $("table.index > thead > tr > th").length;
+
+ headers[0] = { sorter: 'text' };
+ for (i = 1; i < col_count-1; i++) {
+ headers[i] = { sorter: 'digit' };
+ }
+ headers[col_count-1] = { sorter: 'percent' };
+
+ // Enable the table sorter:
+ $("table.index").tablesorter({
+ widgets: ['persistentSort'],
+ headers: headers
+ });
+
+ coverage.assign_shortkeys();
+ coverage.wire_up_help_panel();
+ coverage.wire_up_filter();
+
+ // Watch for page unload events so we can save the final sort settings:
+ $(window).unload(function () {
+ try {
+ localStorage.setItem(storage_name, sort_list.toString())
+ } catch(err) {}
+ });
+};
+
+// -- pyfile stuff --
+
+coverage.pyfile_ready = function ($) {
+ // If we're directed to a particular line number, highlight the line.
+ var frag = location.hash;
+ if (frag.length > 2 && frag[1] === 't') {
+ $(frag).addClass('highlight');
+ coverage.set_sel(parseInt(frag.substr(2), 10));
+ }
+ else {
+ coverage.set_sel(0);
+ }
+
+ $(document)
+ .bind('keydown', 'j', coverage.to_next_chunk_nicely)
+ .bind('keydown', 'k', coverage.to_prev_chunk_nicely)
+ .bind('keydown', '0', coverage.to_top)
+ .bind('keydown', '1', coverage.to_first_chunk)
+ ;
+
+ $(".button_toggle_run").click(function (evt) {coverage.toggle_lines(evt.target, "run");});
+ $(".button_toggle_exc").click(function (evt) {coverage.toggle_lines(evt.target, "exc");});
+ $(".button_toggle_mis").click(function (evt) {coverage.toggle_lines(evt.target, "mis");});
+ $(".button_toggle_par").click(function (evt) {coverage.toggle_lines(evt.target, "par");});
+
+ coverage.assign_shortkeys();
+ coverage.wire_up_help_panel();
+
+ coverage.init_scroll_markers();
+
+ // Rebuild scroll markers when the window height changes.
+ $(window).resize(coverage.build_scroll_markers);
+};
+
+coverage.toggle_lines = function (btn, cls) {
+ btn = $(btn);
+ var show = "show_"+cls;
+ if (btn.hasClass(show)) {
+ $("#source ." + cls).removeClass(show);
+ btn.removeClass(show);
+ }
+ else {
+ $("#source ." + cls).addClass(show);
+ btn.addClass(show);
+ }
+ coverage.build_scroll_markers();
+};
+
+// Return the nth line div.
+coverage.line_elt = function (n) {
+ return $("#t" + n);
+};
+
+// Return the nth line number div.
+coverage.num_elt = function (n) {
+ return $("#n" + n);
+};
+
+// Set the selection. b and e are line numbers.
+coverage.set_sel = function (b, e) {
+ // The first line selected.
+ coverage.sel_begin = b;
+ // The next line not selected.
+ coverage.sel_end = (e === undefined) ? b+1 : e;
+};
+
+coverage.to_top = function () {
+ coverage.set_sel(0, 1);
+ coverage.scroll_window(0);
+};
+
+coverage.to_first_chunk = function () {
+ coverage.set_sel(0, 1);
+ coverage.to_next_chunk();
+};
+
+// Return a string indicating what kind of chunk this line belongs to,
+// or null if not a chunk.
+coverage.chunk_indicator = function (line_elt) {
+ var klass = line_elt.attr('class');
+ if (klass) {
+ var m = klass.match(/\bshow_\w+\b/);
+ if (m) {
+ return m[0];
+ }
+ }
+ return null;
+};
+
+coverage.to_next_chunk = function () {
+ var c = coverage;
+
+ // Find the start of the next colored chunk.
+ var probe = c.sel_end;
+ var chunk_indicator, probe_line;
+ while (true) {
+ probe_line = c.line_elt(probe);
+ if (probe_line.length === 0) {
+ return;
+ }
+ chunk_indicator = c.chunk_indicator(probe_line);
+ if (chunk_indicator) {
+ break;
+ }
+ probe++;
+ }
+
+ // There's a next chunk, `probe` points to it.
+ var begin = probe;
+
+ // Find the end of this chunk.
+ var next_indicator = chunk_indicator;
+ while (next_indicator === chunk_indicator) {
+ probe++;
+ probe_line = c.line_elt(probe);
+ next_indicator = c.chunk_indicator(probe_line);
+ }
+ c.set_sel(begin, probe);
+ c.show_selection();
+};
+
+coverage.to_prev_chunk = function () {
+ var c = coverage;
+
+ // Find the end of the prev colored chunk.
+ var probe = c.sel_begin-1;
+ var probe_line = c.line_elt(probe);
+ if (probe_line.length === 0) {
+ return;
+ }
+ var chunk_indicator = c.chunk_indicator(probe_line);
+ while (probe > 0 && !chunk_indicator) {
+ probe--;
+ probe_line = c.line_elt(probe);
+ if (probe_line.length === 0) {
+ return;
+ }
+ chunk_indicator = c.chunk_indicator(probe_line);
+ }
+
+ // There's a prev chunk, `probe` points to its last line.
+ var end = probe+1;
+
+ // Find the beginning of this chunk.
+ var prev_indicator = chunk_indicator;
+ while (prev_indicator === chunk_indicator) {
+ probe--;
+ probe_line = c.line_elt(probe);
+ prev_indicator = c.chunk_indicator(probe_line);
+ }
+ c.set_sel(probe+1, end);
+ c.show_selection();
+};
+
+// Return the line number of the line nearest pixel position pos
+coverage.line_at_pos = function (pos) {
+ var l1 = coverage.line_elt(1),
+ l2 = coverage.line_elt(2),
+ result;
+ if (l1.length && l2.length) {
+ var l1_top = l1.offset().top,
+ line_height = l2.offset().top - l1_top,
+ nlines = (pos - l1_top) / line_height;
+ if (nlines < 1) {
+ result = 1;
+ }
+ else {
+ result = Math.ceil(nlines);
+ }
+ }
+ else {
+ result = 1;
+ }
+ return result;
+};
+
+// Returns 0, 1, or 2: how many of the two ends of the selection are on
+// the screen right now?
+coverage.selection_ends_on_screen = function () {
+ if (coverage.sel_begin === 0) {
+ return 0;
+ }
+
+ var top = coverage.line_elt(coverage.sel_begin);
+ var next = coverage.line_elt(coverage.sel_end-1);
+
+ return (
+ (top.isOnScreen() ? 1 : 0) +
+ (next.isOnScreen() ? 1 : 0)
+ );
+};
+
+coverage.to_next_chunk_nicely = function () {
+ coverage.finish_scrolling();
+ if (coverage.selection_ends_on_screen() === 0) {
+ // The selection is entirely off the screen: select the top line on
+ // the screen.
+ var win = $(window);
+ coverage.select_line_or_chunk(coverage.line_at_pos(win.scrollTop()));
+ }
+ coverage.to_next_chunk();
+};
+
+coverage.to_prev_chunk_nicely = function () {
+ coverage.finish_scrolling();
+ if (coverage.selection_ends_on_screen() === 0) {
+ var win = $(window);
+ coverage.select_line_or_chunk(coverage.line_at_pos(win.scrollTop() + win.height()));
+ }
+ coverage.to_prev_chunk();
+};
+
+// Select line number lineno, or if it is in a colored chunk, select the
+// entire chunk
+coverage.select_line_or_chunk = function (lineno) {
+ var c = coverage;
+ var probe_line = c.line_elt(lineno);
+ if (probe_line.length === 0) {
+ return;
+ }
+ var the_indicator = c.chunk_indicator(probe_line);
+ if (the_indicator) {
+ // The line is in a highlighted chunk.
+ // Search backward for the first line.
+ var probe = lineno;
+ var indicator = the_indicator;
+ while (probe > 0 && indicator === the_indicator) {
+ probe--;
+ probe_line = c.line_elt(probe);
+ if (probe_line.length === 0) {
+ break;
+ }
+ indicator = c.chunk_indicator(probe_line);
+ }
+ var begin = probe + 1;
+
+ // Search forward for the last line.
+ probe = lineno;
+ indicator = the_indicator;
+ while (indicator === the_indicator) {
+ probe++;
+ probe_line = c.line_elt(probe);
+ indicator = c.chunk_indicator(probe_line);
+ }
+
+ coverage.set_sel(begin, probe);
+ }
+ else {
+ coverage.set_sel(lineno);
+ }
+};
+
+coverage.show_selection = function () {
+ var c = coverage;
+
+ // Highlight the lines in the chunk
+ $(".linenos .highlight").removeClass("highlight");
+ for (var probe = c.sel_begin; probe > 0 && probe < c.sel_end; probe++) {
+ c.num_elt(probe).addClass("highlight");
+ }
+
+ c.scroll_to_selection();
+};
+
+coverage.scroll_to_selection = function () {
+ // Scroll the page if the chunk isn't fully visible.
+ if (coverage.selection_ends_on_screen() < 2) {
+ // Need to move the page. The html,body trick makes it scroll in all
+ // browsers, got it from http://stackoverflow.com/questions/3042651
+ var top = coverage.line_elt(coverage.sel_begin);
+ var top_pos = parseInt(top.offset().top, 10);
+ coverage.scroll_window(top_pos - 30);
+ }
+};
+
+coverage.scroll_window = function (to_pos) {
+ $("html,body").animate({scrollTop: to_pos}, 200);
+};
+
+coverage.finish_scrolling = function () {
+ $("html,body").stop(true, true);
+};
+
+coverage.init_scroll_markers = function () {
+ var c = coverage;
+ // Init some variables
+ c.lines_len = $('#source p').length;
+ c.body_h = $('body').height();
+ c.header_h = $('div#header').height();
+
+ // Build html
+ c.build_scroll_markers();
+};
+
+coverage.build_scroll_markers = function () {
+ var c = coverage,
+ min_line_height = 3,
+ max_line_height = 10,
+ visible_window_h = $(window).height();
+
+ c.lines_to_mark = $('#source').find('p.show_run, p.show_mis, p.show_exc, p.show_exc, p.show_par');
+ $('#scroll_marker').remove();
+ // Don't build markers if the window has no scroll bar.
+ if (c.body_h <= visible_window_h) {
+ return;
+ }
+
+ $("body").append("<div id='scroll_marker'>&nbsp;</div>");
+ var scroll_marker = $('#scroll_marker'),
+ marker_scale = scroll_marker.height() / c.body_h,
+ line_height = scroll_marker.height() / c.lines_len;
+
+ // Line height must be between the extremes.
+ if (line_height > min_line_height) {
+ if (line_height > max_line_height) {
+ line_height = max_line_height;
+ }
+ }
+ else {
+ line_height = min_line_height;
+ }
+
+ var previous_line = -99,
+ last_mark,
+ last_top,
+ offsets = {};
+
+ // Calculate line offsets outside loop to prevent relayouts
+ c.lines_to_mark.each(function() {
+ offsets[this.id] = $(this).offset().top;
+ });
+ c.lines_to_mark.each(function () {
+ var id_name = $(this).attr('id'),
+ line_top = Math.round(offsets[id_name] * marker_scale),
+ line_number = parseInt(id_name.substring(1, id_name.length));
+
+ if (line_number === previous_line + 1) {
+ // If this solid missed block just make previous mark higher.
+ last_mark.css({
+ 'height': line_top + line_height - last_top
+ });
+ }
+ else {
+ // Add colored line in scroll_marker block.
+ scroll_marker.append('<div id="m' + line_number + '" class="marker"></div>');
+ last_mark = $('#m' + line_number);
+ last_mark.css({
+ 'height': line_height,
+ 'top': line_top
+ });
+ last_top = line_top;
+ }
+
+ previous_line = line_number;
+ });
+};
diff --git a/third_party/python/coverage/coverage/htmlfiles/index.html b/third_party/python/coverage/coverage/htmlfiles/index.html
new file mode 100644
index 0000000000..4129bc31b9
--- /dev/null
+++ b/third_party/python/coverage/coverage/htmlfiles/index.html
@@ -0,0 +1,118 @@
+{# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 #}
+{# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt #}
+
+<!DOCTYPE html>
+<html>
+<head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+ <title>{{ title|escape }}</title>
+ <link rel="stylesheet" href="style.css" type="text/css">
+ {% if extra_css %}
+ <link rel="stylesheet" href="{{ extra_css }}" type="text/css">
+ {% endif %}
+ <script type="text/javascript" src="jquery.min.js"></script>
+ <script type="text/javascript" src="jquery.ba-throttle-debounce.min.js"></script>
+ <script type="text/javascript" src="jquery.tablesorter.min.js"></script>
+ <script type="text/javascript" src="jquery.hotkeys.js"></script>
+ <script type="text/javascript" src="coverage_html.js"></script>
+ <script type="text/javascript">
+ jQuery(document).ready(coverage.index_ready);
+ </script>
+</head>
+<body class="indexfile">
+
+<div id="header">
+ <div class="content">
+ <h1>{{ title|escape }}:
+ <span class="pc_cov">{{totals.pc_covered_str}}%</span>
+ </h1>
+
+ <img id="keyboard_icon" src="keybd_closed.png" alt="Show keyboard shortcuts" />
+
+ <form id="filter_container">
+ <input id="filter" type="text" value="" placeholder="filter..." />
+ </form>
+ </div>
+</div>
+
+<div class="help_panel">
+ <img id="panel_icon" src="keybd_open.png" alt="Hide keyboard shortcuts" />
+ <p class="legend">Hot-keys on this page</p>
+ <div>
+ <p class="keyhelp">
+ <span class="key">n</span>
+ <span class="key">s</span>
+ <span class="key">m</span>
+ <span class="key">x</span>
+ {% if has_arcs %}
+ <span class="key">b</span>
+ <span class="key">p</span>
+ {% endif %}
+ <span class="key">c</span> &nbsp; change column sorting
+ </p>
+ </div>
+</div>
+
+<div id="index">
+ <table class="index">
+ <thead>
+ {# The title="" attr doesn"t work in Safari. #}
+ <tr class="tablehead" title="Click to sort">
+ <th class="name left headerSortDown shortkey_n">Module</th>
+ <th class="shortkey_s">statements</th>
+ <th class="shortkey_m">missing</th>
+ <th class="shortkey_x">excluded</th>
+ {% if has_arcs %}
+ <th class="shortkey_b">branches</th>
+ <th class="shortkey_p">partial</th>
+ {% endif %}
+ <th class="right shortkey_c">coverage</th>
+ </tr>
+ </thead>
+ {# HTML syntax requires thead, tfoot, tbody #}
+ <tfoot>
+ <tr class="total">
+ <td class="name left">Total</td>
+ <td>{{totals.n_statements}}</td>
+ <td>{{totals.n_missing}}</td>
+ <td>{{totals.n_excluded}}</td>
+ {% if has_arcs %}
+ <td>{{totals.n_branches}}</td>
+ <td>{{totals.n_partial_branches}}</td>
+ {% endif %}
+ <td class="right" data-ratio="{{totals.ratio_covered|pair}}">{{totals.pc_covered_str}}%</td>
+ </tr>
+ </tfoot>
+ <tbody>
+ {% for file in files %}
+ <tr class="file">
+ <td class="name left"><a href="{{file.html_filename}}">{{file.relative_filename}}</a></td>
+ <td>{{file.nums.n_statements}}</td>
+ <td>{{file.nums.n_missing}}</td>
+ <td>{{file.nums.n_excluded}}</td>
+ {% if has_arcs %}
+ <td>{{file.nums.n_branches}}</td>
+ <td>{{file.nums.n_partial_branches}}</td>
+ {% endif %}
+ <td class="right" data-ratio="{{file.nums.ratio_covered|pair}}">{{file.nums.pc_covered_str}}%</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+
+ <p id="no_rows">
+ No items found using the specified filter.
+ </p>
+</div>
+
+<div id="footer">
+ <div class="content">
+ <p>
+ <a class="nav" href="{{__url__}}">coverage.py v{{__version__}}</a>,
+ created at {{ time_stamp }}
+ </p>
+ </div>
+</div>
+
+</body>
+</html>
diff --git a/third_party/python/coverage/coverage/htmlfiles/jquery.ba-throttle-debounce.min.js b/third_party/python/coverage/coverage/htmlfiles/jquery.ba-throttle-debounce.min.js
new file mode 100644
index 0000000000..648fe5d3c2
--- /dev/null
+++ b/third_party/python/coverage/coverage/htmlfiles/jquery.ba-throttle-debounce.min.js
@@ -0,0 +1,9 @@
+/*
+ * jQuery throttle / debounce - v1.1 - 3/7/2010
+ * http://benalman.com/projects/jquery-throttle-debounce-plugin/
+ *
+ * Copyright (c) 2010 "Cowboy" Ben Alman
+ * Dual licensed under the MIT and GPL licenses.
+ * http://benalman.com/about/license/
+ */
+(function(b,c){var $=b.jQuery||b.Cowboy||(b.Cowboy={}),a;$.throttle=a=function(e,f,j,i){var h,d=0;if(typeof f!=="boolean"){i=j;j=f;f=c}function g(){var o=this,m=+new Date()-d,n=arguments;function l(){d=+new Date();j.apply(o,n)}function k(){h=c}if(i&&!h){l()}h&&clearTimeout(h);if(i===c&&m>e){l()}else{if(f!==true){h=setTimeout(i?k:l,i===c?e-m:e)}}}if($.guid){g.guid=j.guid=j.guid||$.guid++}return g};$.debounce=function(d,e,f){return f===c?a(d,e,false):a(d,f,e!==false)}})(this);
diff --git a/third_party/python/coverage/coverage/htmlfiles/jquery.hotkeys.js b/third_party/python/coverage/coverage/htmlfiles/jquery.hotkeys.js
new file mode 100644
index 0000000000..09b21e03c7
--- /dev/null
+++ b/third_party/python/coverage/coverage/htmlfiles/jquery.hotkeys.js
@@ -0,0 +1,99 @@
+/*
+ * jQuery Hotkeys Plugin
+ * Copyright 2010, John Resig
+ * Dual licensed under the MIT or GPL Version 2 licenses.
+ *
+ * Based upon the plugin by Tzury Bar Yochay:
+ * http://github.com/tzuryby/hotkeys
+ *
+ * Original idea by:
+ * Binny V A, http://www.openjs.com/scripts/events/keyboard_shortcuts/
+*/
+
+(function(jQuery){
+
+ jQuery.hotkeys = {
+ version: "0.8",
+
+ specialKeys: {
+ 8: "backspace", 9: "tab", 13: "return", 16: "shift", 17: "ctrl", 18: "alt", 19: "pause",
+ 20: "capslock", 27: "esc", 32: "space", 33: "pageup", 34: "pagedown", 35: "end", 36: "home",
+ 37: "left", 38: "up", 39: "right", 40: "down", 45: "insert", 46: "del",
+ 96: "0", 97: "1", 98: "2", 99: "3", 100: "4", 101: "5", 102: "6", 103: "7",
+ 104: "8", 105: "9", 106: "*", 107: "+", 109: "-", 110: ".", 111 : "/",
+ 112: "f1", 113: "f2", 114: "f3", 115: "f4", 116: "f5", 117: "f6", 118: "f7", 119: "f8",
+ 120: "f9", 121: "f10", 122: "f11", 123: "f12", 144: "numlock", 145: "scroll", 191: "/", 224: "meta"
+ },
+
+ shiftNums: {
+ "`": "~", "1": "!", "2": "@", "3": "#", "4": "$", "5": "%", "6": "^", "7": "&",
+ "8": "*", "9": "(", "0": ")", "-": "_", "=": "+", ";": ": ", "'": "\"", ",": "<",
+ ".": ">", "/": "?", "\\": "|"
+ }
+ };
+
+ function keyHandler( handleObj ) {
+ // Only care when a possible input has been specified
+ if ( typeof handleObj.data !== "string" ) {
+ return;
+ }
+
+ var origHandler = handleObj.handler,
+ keys = handleObj.data.toLowerCase().split(" ");
+
+ handleObj.handler = function( event ) {
+ // Don't fire in text-accepting inputs that we didn't directly bind to
+ if ( this !== event.target && (/textarea|select/i.test( event.target.nodeName ) ||
+ event.target.type === "text") ) {
+ return;
+ }
+
+ // Keypress represents characters, not special keys
+ var special = event.type !== "keypress" && jQuery.hotkeys.specialKeys[ event.which ],
+ character = String.fromCharCode( event.which ).toLowerCase(),
+ key, modif = "", possible = {};
+
+ // check combinations (alt|ctrl|shift+anything)
+ if ( event.altKey && special !== "alt" ) {
+ modif += "alt+";
+ }
+
+ if ( event.ctrlKey && special !== "ctrl" ) {
+ modif += "ctrl+";
+ }
+
+ // TODO: Need to make sure this works consistently across platforms
+ if ( event.metaKey && !event.ctrlKey && special !== "meta" ) {
+ modif += "meta+";
+ }
+
+ if ( event.shiftKey && special !== "shift" ) {
+ modif += "shift+";
+ }
+
+ if ( special ) {
+ possible[ modif + special ] = true;
+
+ } else {
+ possible[ modif + character ] = true;
+ possible[ modif + jQuery.hotkeys.shiftNums[ character ] ] = true;
+
+ // "$" can be triggered as "Shift+4" or "Shift+$" or just "$"
+ if ( modif === "shift+" ) {
+ possible[ jQuery.hotkeys.shiftNums[ character ] ] = true;
+ }
+ }
+
+ for ( var i = 0, l = keys.length; i < l; i++ ) {
+ if ( possible[ keys[i] ] ) {
+ return origHandler.apply( this, arguments );
+ }
+ }
+ };
+ }
+
+ jQuery.each([ "keydown", "keyup", "keypress" ], function() {
+ jQuery.event.special[ this ] = { add: keyHandler };
+ });
+
+})( jQuery );
diff --git a/third_party/python/coverage/coverage/htmlfiles/jquery.isonscreen.js b/third_party/python/coverage/coverage/htmlfiles/jquery.isonscreen.js
new file mode 100644
index 0000000000..0182ebd213
--- /dev/null
+++ b/third_party/python/coverage/coverage/htmlfiles/jquery.isonscreen.js
@@ -0,0 +1,53 @@
+/* Copyright (c) 2010
+ * @author Laurence Wheway
+ * Dual licensed under the MIT (http://www.opensource.org/licenses/mit-license.php)
+ * and GPL (http://www.opensource.org/licenses/gpl-license.php) licenses.
+ *
+ * @version 1.2.0
+ */
+(function($) {
+ jQuery.extend({
+ isOnScreen: function(box, container) {
+ //ensure numbers come in as intgers (not strings) and remove 'px' is it's there
+ for(var i in box){box[i] = parseFloat(box[i])};
+ for(var i in container){container[i] = parseFloat(container[i])};
+
+ if(!container){
+ container = {
+ left: $(window).scrollLeft(),
+ top: $(window).scrollTop(),
+ width: $(window).width(),
+ height: $(window).height()
+ }
+ }
+
+ if( box.left+box.width-container.left > 0 &&
+ box.left < container.width+container.left &&
+ box.top+box.height-container.top > 0 &&
+ box.top < container.height+container.top
+ ) return true;
+ return false;
+ }
+ })
+
+
+ jQuery.fn.isOnScreen = function (container) {
+ for(var i in container){container[i] = parseFloat(container[i])};
+
+ if(!container){
+ container = {
+ left: $(window).scrollLeft(),
+ top: $(window).scrollTop(),
+ width: $(window).width(),
+ height: $(window).height()
+ }
+ }
+
+ if( $(this).offset().left+$(this).width()-container.left > 0 &&
+ $(this).offset().left < container.width+container.left &&
+ $(this).offset().top+$(this).height()-container.top > 0 &&
+ $(this).offset().top < container.height+container.top
+ ) return true;
+ return false;
+ }
+})(jQuery);
diff --git a/third_party/python/coverage/coverage/htmlfiles/jquery.min.js b/third_party/python/coverage/coverage/htmlfiles/jquery.min.js
new file mode 100644
index 0000000000..d1608e37ff
--- /dev/null
+++ b/third_party/python/coverage/coverage/htmlfiles/jquery.min.js
@@ -0,0 +1,4 @@
+/*! jQuery v1.11.1 | (c) 2005, 2014 jQuery Foundation, Inc. | jquery.org/license */
+!function(a,b){"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){var c=[],d=c.slice,e=c.concat,f=c.push,g=c.indexOf,h={},i=h.toString,j=h.hasOwnProperty,k={},l="1.11.1",m=function(a,b){return new m.fn.init(a,b)},n=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,o=/^-ms-/,p=/-([\da-z])/gi,q=function(a,b){return b.toUpperCase()};m.fn=m.prototype={jquery:l,constructor:m,selector:"",length:0,toArray:function(){return d.call(this)},get:function(a){return null!=a?0>a?this[a+this.length]:this[a]:d.call(this)},pushStack:function(a){var b=m.merge(this.constructor(),a);return b.prevObject=this,b.context=this.context,b},each:function(a,b){return m.each(this,a,b)},map:function(a){return this.pushStack(m.map(this,function(b,c){return a.call(b,c,b)}))},slice:function(){return this.pushStack(d.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(a){var b=this.length,c=+a+(0>a?b:0);return this.pushStack(c>=0&&b>c?[this[c]]:[])},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:c.sort,splice:c.splice},m.extend=m.fn.extend=function(){var a,b,c,d,e,f,g=arguments[0]||{},h=1,i=arguments.length,j=!1;for("boolean"==typeof g&&(j=g,g=arguments[h]||{},h++),"object"==typeof g||m.isFunction(g)||(g={}),h===i&&(g=this,h--);i>h;h++)if(null!=(e=arguments[h]))for(d in e)a=g[d],c=e[d],g!==c&&(j&&c&&(m.isPlainObject(c)||(b=m.isArray(c)))?(b?(b=!1,f=a&&m.isArray(a)?a:[]):f=a&&m.isPlainObject(a)?a:{},g[d]=m.extend(j,f,c)):void 0!==c&&(g[d]=c));return g},m.extend({expando:"jQuery"+(l+Math.random()).replace(/\D/g,""),isReady:!0,error:function(a){throw new Error(a)},noop:function(){},isFunction:function(a){return"function"===m.type(a)},isArray:Array.isArray||function(a){return"array"===m.type(a)},isWindow:function(a){return null!=a&&a==a.window},isNumeric:function(a){return!m.isArray(a)&&a-parseFloat(a)>=0},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},isPlainObject:function(a){var b;if(!a||"object"!==m.type(a)||a.nodeType||m.isWindow(a))return!1;try{if(a.constructor&&!j.call(a,"constructor")&&!j.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}if(k.ownLast)for(b in a)return j.call(a,b);for(b in a);return void 0===b||j.call(a,b)},type:function(a){return null==a?a+"":"object"==typeof a||"function"==typeof a?h[i.call(a)]||"object":typeof a},globalEval:function(b){b&&m.trim(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(o,"ms-").replace(p,q)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,b,c){var d,e=0,f=a.length,g=r(a);if(c){if(g){for(;f>e;e++)if(d=b.apply(a[e],c),d===!1)break}else for(e in a)if(d=b.apply(a[e],c),d===!1)break}else if(g){for(;f>e;e++)if(d=b.call(a[e],e,a[e]),d===!1)break}else for(e in a)if(d=b.call(a[e],e,a[e]),d===!1)break;return a},trim:function(a){return null==a?"":(a+"").replace(n,"")},makeArray:function(a,b){var c=b||[];return null!=a&&(r(Object(a))?m.merge(c,"string"==typeof a?[a]:a):f.call(c,a)),c},inArray:function(a,b,c){var d;if(b){if(g)return g.call(b,a,c);for(d=b.length,c=c?0>c?Math.max(0,d+c):c:0;d>c;c++)if(c in b&&b[c]===a)return c}return-1},merge:function(a,b){var c=+b.length,d=0,e=a.length;while(c>d)a[e++]=b[d++];if(c!==c)while(void 0!==b[d])a[e++]=b[d++];return a.length=e,a},grep:function(a,b,c){for(var d,e=[],f=0,g=a.length,h=!c;g>f;f++)d=!b(a[f],f),d!==h&&e.push(a[f]);return e},map:function(a,b,c){var d,f=0,g=a.length,h=r(a),i=[];if(h)for(;g>f;f++)d=b(a[f],f,c),null!=d&&i.push(d);else for(f in a)d=b(a[f],f,c),null!=d&&i.push(d);return e.apply([],i)},guid:1,proxy:function(a,b){var c,e,f;return"string"==typeof b&&(f=a[b],b=a,a=f),m.isFunction(a)?(c=d.call(arguments,2),e=function(){return a.apply(b||this,c.concat(d.call(arguments)))},e.guid=a.guid=a.guid||m.guid++,e):void 0},now:function(){return+new Date},support:k}),m.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(a,b){h["[object "+b+"]"]=b.toLowerCase()});function r(a){var b=a.length,c=m.type(a);return"function"===c||m.isWindow(a)?!1:1===a.nodeType&&b?!0:"array"===c||0===b||"number"==typeof b&&b>0&&b-1 in a}var s=function(a){var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u="sizzle"+-new Date,v=a.document,w=0,x=0,y=gb(),z=gb(),A=gb(),B=function(a,b){return a===b&&(l=!0),0},C="undefined",D=1<<31,E={}.hasOwnProperty,F=[],G=F.pop,H=F.push,I=F.push,J=F.slice,K=F.indexOf||function(a){for(var b=0,c=this.length;c>b;b++)if(this[b]===a)return b;return-1},L="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",M="[\\x20\\t\\r\\n\\f]",N="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",O=N.replace("w","w#"),P="\\["+M+"*("+N+")(?:"+M+"*([*^$|!~]?=)"+M+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+O+"))|)"+M+"*\\]",Q=":("+N+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+P+")*)|.*)\\)|)",R=new RegExp("^"+M+"+|((?:^|[^\\\\])(?:\\\\.)*)"+M+"+$","g"),S=new RegExp("^"+M+"*,"+M+"*"),T=new RegExp("^"+M+"*([>+~]|"+M+")"+M+"*"),U=new RegExp("="+M+"*([^\\]'\"]*?)"+M+"*\\]","g"),V=new RegExp(Q),W=new RegExp("^"+O+"$"),X={ID:new RegExp("^#("+N+")"),CLASS:new RegExp("^\\.("+N+")"),TAG:new RegExp("^("+N.replace("w","w*")+")"),ATTR:new RegExp("^"+P),PSEUDO:new RegExp("^"+Q),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+L+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/^(?:input|select|textarea|button)$/i,Z=/^h\d$/i,$=/^[^{]+\{\s*\[native \w/,_=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ab=/[+~]/,bb=/'|\\/g,cb=new RegExp("\\\\([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),db=function(a,b,c){var d="0x"+b-65536;return d!==d||c?b:0>d?String.fromCharCode(d+65536):String.fromCharCode(d>>10|55296,1023&d|56320)};try{I.apply(F=J.call(v.childNodes),v.childNodes),F[v.childNodes.length].nodeType}catch(eb){I={apply:F.length?function(a,b){H.apply(a,J.call(b))}:function(a,b){var c=a.length,d=0;while(a[c++]=b[d++]);a.length=c-1}}}function fb(a,b,d,e){var f,h,j,k,l,o,r,s,w,x;if((b?b.ownerDocument||b:v)!==n&&m(b),b=b||n,d=d||[],!a||"string"!=typeof a)return d;if(1!==(k=b.nodeType)&&9!==k)return[];if(p&&!e){if(f=_.exec(a))if(j=f[1]){if(9===k){if(h=b.getElementById(j),!h||!h.parentNode)return d;if(h.id===j)return d.push(h),d}else if(b.ownerDocument&&(h=b.ownerDocument.getElementById(j))&&t(b,h)&&h.id===j)return d.push(h),d}else{if(f[2])return I.apply(d,b.getElementsByTagName(a)),d;if((j=f[3])&&c.getElementsByClassName&&b.getElementsByClassName)return I.apply(d,b.getElementsByClassName(j)),d}if(c.qsa&&(!q||!q.test(a))){if(s=r=u,w=b,x=9===k&&a,1===k&&"object"!==b.nodeName.toLowerCase()){o=g(a),(r=b.getAttribute("id"))?s=r.replace(bb,"\\$&"):b.setAttribute("id",s),s="[id='"+s+"'] ",l=o.length;while(l--)o[l]=s+qb(o[l]);w=ab.test(a)&&ob(b.parentNode)||b,x=o.join(",")}if(x)try{return I.apply(d,w.querySelectorAll(x)),d}catch(y){}finally{r||b.removeAttribute("id")}}}return i(a.replace(R,"$1"),b,d,e)}function gb(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLength&&delete b[a.shift()],b[c+" "]=e}return b}function hb(a){return a[u]=!0,a}function ib(a){var b=n.createElement("div");try{return!!a(b)}catch(c){return!1}finally{b.parentNode&&b.parentNode.removeChild(b),b=null}}function jb(a,b){var c=a.split("|"),e=a.length;while(e--)d.attrHandle[c[e]]=b}function kb(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&(~b.sourceIndex||D)-(~a.sourceIndex||D);if(d)return d;if(c)while(c=c.nextSibling)if(c===b)return-1;return a?1:-1}function lb(a){return function(b){var c=b.nodeName.toLowerCase();return"input"===c&&b.type===a}}function mb(a){return function(b){var c=b.nodeName.toLowerCase();return("input"===c||"button"===c)&&b.type===a}}function nb(a){return hb(function(b){return b=+b,hb(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function ob(a){return a&&typeof a.getElementsByTagName!==C&&a}c=fb.support={},f=fb.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?"HTML"!==b.nodeName:!1},m=fb.setDocument=function(a){var b,e=a?a.ownerDocument||a:v,g=e.defaultView;return e!==n&&9===e.nodeType&&e.documentElement?(n=e,o=e.documentElement,p=!f(e),g&&g!==g.top&&(g.addEventListener?g.addEventListener("unload",function(){m()},!1):g.attachEvent&&g.attachEvent("onunload",function(){m()})),c.attributes=ib(function(a){return a.className="i",!a.getAttribute("className")}),c.getElementsByTagName=ib(function(a){return a.appendChild(e.createComment("")),!a.getElementsByTagName("*").length}),c.getElementsByClassName=$.test(e.getElementsByClassName)&&ib(function(a){return a.innerHTML="<div class='a'></div><div class='a i'></div>",a.firstChild.className="i",2===a.getElementsByClassName("i").length}),c.getById=ib(function(a){return o.appendChild(a).id=u,!e.getElementsByName||!e.getElementsByName(u).length}),c.getById?(d.find.ID=function(a,b){if(typeof b.getElementById!==C&&p){var c=b.getElementById(a);return c&&c.parentNode?[c]:[]}},d.filter.ID=function(a){var b=a.replace(cb,db);return function(a){return a.getAttribute("id")===b}}):(delete d.find.ID,d.filter.ID=function(a){var b=a.replace(cb,db);return function(a){var c=typeof a.getAttributeNode!==C&&a.getAttributeNode("id");return c&&c.value===b}}),d.find.TAG=c.getElementsByTagName?function(a,b){return typeof b.getElementsByTagName!==C?b.getElementsByTagName(a):void 0}:function(a,b){var c,d=[],e=0,f=b.getElementsByTagName(a);if("*"===a){while(c=f[e++])1===c.nodeType&&d.push(c);return d}return f},d.find.CLASS=c.getElementsByClassName&&function(a,b){return typeof b.getElementsByClassName!==C&&p?b.getElementsByClassName(a):void 0},r=[],q=[],(c.qsa=$.test(e.querySelectorAll))&&(ib(function(a){a.innerHTML="<select msallowclip=''><option selected=''></option></select>",a.querySelectorAll("[msallowclip^='']").length&&q.push("[*^$]="+M+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||q.push("\\["+M+"*(?:value|"+L+")"),a.querySelectorAll(":checked").length||q.push(":checked")}),ib(function(a){var b=e.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&q.push("name"+M+"*[*^$|!~]?="),a.querySelectorAll(":enabled").length||q.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),q.push(",.*:")})),(c.matchesSelector=$.test(s=o.matches||o.webkitMatchesSelector||o.mozMatchesSelector||o.oMatchesSelector||o.msMatchesSelector))&&ib(function(a){c.disconnectedMatch=s.call(a,"div"),s.call(a,"[s!='']:x"),r.push("!=",Q)}),q=q.length&&new RegExp(q.join("|")),r=r.length&&new RegExp(r.join("|")),b=$.test(o.compareDocumentPosition),t=b||$.test(o.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)while(b=b.parentNode)if(b===a)return!0;return!1},B=b?function(a,b){if(a===b)return l=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!c.sortDetached&&b.compareDocumentPosition(a)===d?a===e||a.ownerDocument===v&&t(v,a)?-1:b===e||b.ownerDocument===v&&t(v,b)?1:k?K.call(k,a)-K.call(k,b):0:4&d?-1:1)}:function(a,b){if(a===b)return l=!0,0;var c,d=0,f=a.parentNode,g=b.parentNode,h=[a],i=[b];if(!f||!g)return a===e?-1:b===e?1:f?-1:g?1:k?K.call(k,a)-K.call(k,b):0;if(f===g)return kb(a,b);c=a;while(c=c.parentNode)h.unshift(c);c=b;while(c=c.parentNode)i.unshift(c);while(h[d]===i[d])d++;return d?kb(h[d],i[d]):h[d]===v?-1:i[d]===v?1:0},e):n},fb.matches=function(a,b){return fb(a,null,null,b)},fb.matchesSelector=function(a,b){if((a.ownerDocument||a)!==n&&m(a),b=b.replace(U,"='$1']"),!(!c.matchesSelector||!p||r&&r.test(b)||q&&q.test(b)))try{var d=s.call(a,b);if(d||c.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return fb(b,n,null,[a]).length>0},fb.contains=function(a,b){return(a.ownerDocument||a)!==n&&m(a),t(a,b)},fb.attr=function(a,b){(a.ownerDocument||a)!==n&&m(a);var e=d.attrHandle[b.toLowerCase()],f=e&&E.call(d.attrHandle,b.toLowerCase())?e(a,b,!p):void 0;return void 0!==f?f:c.attributes||!p?a.getAttribute(b):(f=a.getAttributeNode(b))&&f.specified?f.value:null},fb.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},fb.uniqueSort=function(a){var b,d=[],e=0,f=0;if(l=!c.detectDuplicates,k=!c.sortStable&&a.slice(0),a.sort(B),l){while(b=a[f++])b===a[f]&&(e=d.push(f));while(e--)a.splice(d[e],1)}return k=null,a},e=fb.getText=function(a){var b,c="",d=0,f=a.nodeType;if(f){if(1===f||9===f||11===f){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=e(a)}else if(3===f||4===f)return a.nodeValue}else while(b=a[d++])c+=e(b);return c},d=fb.selectors={cacheLength:50,createPseudo:hb,match:X,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(cb,db),a[3]=(a[3]||a[4]||a[5]||"").replace(cb,db),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||fb.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&fb.error(a[0]),a},PSEUDO:function(a){var b,c=!a[6]&&a[2];return X.CHILD.test(a[0])?null:(a[3]?a[2]=a[4]||a[5]||"":c&&V.test(c)&&(b=g(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(cb,db).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=y[a+" "];return b||(b=new RegExp("(^|"+M+")"+a+"("+M+"|$)"))&&y(a,function(a){return b.test("string"==typeof a.className&&a.className||typeof a.getAttribute!==C&&a.getAttribute("class")||"")})},ATTR:function(a,b,c){return function(d){var e=fb.attr(d,a);return null==e?"!="===b:b?(e+="","="===b?e===c:"!="===b?e!==c:"^="===b?c&&0===e.indexOf(c):"*="===b?c&&e.indexOf(c)>-1:"$="===b?c&&e.slice(-c.length)===c:"~="===b?(" "+e+" ").indexOf(c)>-1:"|="===b?e===c||e.slice(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),s=!i&&!h;if(q){if(f){while(p){l=b;while(l=l[p])if(h?l.nodeName.toLowerCase()===r:1===l.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&s){k=q[u]||(q[u]={}),j=k[a]||[],n=j[0]===w&&j[1],m=j[0]===w&&j[2],l=n&&q.childNodes[n];while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if(1===l.nodeType&&++m&&l===b){k[a]=[w,n,m];break}}else if(s&&(j=(b[u]||(b[u]={}))[a])&&j[0]===w)m=j[1];else while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if((h?l.nodeName.toLowerCase()===r:1===l.nodeType)&&++m&&(s&&((l[u]||(l[u]={}))[a]=[w,m]),l===b))break;return m-=e,m===d||m%d===0&&m/d>=0}}},PSEUDO:function(a,b){var c,e=d.pseudos[a]||d.setFilters[a.toLowerCase()]||fb.error("unsupported pseudo: "+a);return e[u]?e(b):e.length>1?(c=[a,a,"",b],d.setFilters.hasOwnProperty(a.toLowerCase())?hb(function(a,c){var d,f=e(a,b),g=f.length;while(g--)d=K.call(a,f[g]),a[d]=!(c[d]=f[g])}):function(a){return e(a,0,c)}):e}},pseudos:{not:hb(function(a){var b=[],c=[],d=h(a.replace(R,"$1"));return d[u]?hb(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,e,f){return b[0]=a,d(b,null,f,c),!c.pop()}}),has:hb(function(a){return function(b){return fb(a,b).length>0}}),contains:hb(function(a){return function(b){return(b.textContent||b.innerText||e(b)).indexOf(a)>-1}}),lang:hb(function(a){return W.test(a||"")||fb.error("unsupported lang: "+a),a=a.replace(cb,db).toLowerCase(),function(b){var c;do if(c=p?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===o},focus:function(a){return a===n.activeElement&&(!n.hasFocus||n.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!d.pseudos.empty(a)},header:function(a){return Z.test(a.nodeName)},input:function(a){return Y.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:nb(function(){return[0]}),last:nb(function(a,b){return[b-1]}),eq:nb(function(a,b,c){return[0>c?c+b:c]}),even:nb(function(a,b){for(var c=0;b>c;c+=2)a.push(c);return a}),odd:nb(function(a,b){for(var c=1;b>c;c+=2)a.push(c);return a}),lt:nb(function(a,b,c){for(var d=0>c?c+b:c;--d>=0;)a.push(d);return a}),gt:nb(function(a,b,c){for(var d=0>c?c+b:c;++d<b;)a.push(d);return a})}},d.pseudos.nth=d.pseudos.eq;for(b in{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})d.pseudos[b]=lb(b);for(b in{submit:!0,reset:!0})d.pseudos[b]=mb(b);function pb(){}pb.prototype=d.filters=d.pseudos,d.setFilters=new pb,g=fb.tokenize=function(a,b){var c,e,f,g,h,i,j,k=z[a+" "];if(k)return b?0:k.slice(0);h=a,i=[],j=d.preFilter;while(h){(!c||(e=S.exec(h)))&&(e&&(h=h.slice(e[0].length)||h),i.push(f=[])),c=!1,(e=T.exec(h))&&(c=e.shift(),f.push({value:c,type:e[0].replace(R," ")}),h=h.slice(c.length));for(g in d.filter)!(e=X[g].exec(h))||j[g]&&!(e=j[g](e))||(c=e.shift(),f.push({value:c,type:g,matches:e}),h=h.slice(c.length));if(!c)break}return b?h.length:h?fb.error(a):z(a,i).slice(0)};function qb(a){for(var b=0,c=a.length,d="";c>b;b++)d+=a[b].value;return d}function rb(a,b,c){var d=b.dir,e=c&&"parentNode"===d,f=x++;return b.first?function(b,c,f){while(b=b[d])if(1===b.nodeType||e)return a(b,c,f)}:function(b,c,g){var h,i,j=[w,f];if(g){while(b=b[d])if((1===b.nodeType||e)&&a(b,c,g))return!0}else while(b=b[d])if(1===b.nodeType||e){if(i=b[u]||(b[u]={}),(h=i[d])&&h[0]===w&&h[1]===f)return j[2]=h[2];if(i[d]=j,j[2]=a(b,c,g))return!0}}}function sb(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function tb(a,b,c){for(var d=0,e=b.length;e>d;d++)fb(a,b[d],c);return c}function ub(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;i>h;h++)(f=a[h])&&(!c||c(f,d,e))&&(g.push(f),j&&b.push(h));return g}function vb(a,b,c,d,e,f){return d&&!d[u]&&(d=vb(d)),e&&!e[u]&&(e=vb(e,f)),hb(function(f,g,h,i){var j,k,l,m=[],n=[],o=g.length,p=f||tb(b||"*",h.nodeType?[h]:h,[]),q=!a||!f&&b?p:ub(p,m,a,h,i),r=c?e||(f?a:o||d)?[]:g:q;if(c&&c(q,r,h,i),d){j=ub(r,n),d(j,[],h,i),k=j.length;while(k--)(l=j[k])&&(r[n[k]]=!(q[n[k]]=l))}if(f){if(e||a){if(e){j=[],k=r.length;while(k--)(l=r[k])&&j.push(q[k]=l);e(null,r=[],j,i)}k=r.length;while(k--)(l=r[k])&&(j=e?K.call(f,l):m[k])>-1&&(f[j]=!(g[j]=l))}}else r=ub(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):I.apply(g,r)})}function wb(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.relative[" "],i=g?1:0,k=rb(function(a){return a===b},h,!0),l=rb(function(a){return K.call(b,a)>-1},h,!0),m=[function(a,c,d){return!g&&(d||c!==j)||((b=c).nodeType?k(a,c,d):l(a,c,d))}];f>i;i++)if(c=d.relative[a[i].type])m=[rb(sb(m),c)];else{if(c=d.filter[a[i].type].apply(null,a[i].matches),c[u]){for(e=++i;f>e;e++)if(d.relative[a[e].type])break;return vb(i>1&&sb(m),i>1&&qb(a.slice(0,i-1).concat({value:" "===a[i-2].type?"*":""})).replace(R,"$1"),c,e>i&&wb(a.slice(i,e)),f>e&&wb(a=a.slice(e)),f>e&&qb(a))}m.push(c)}return sb(m)}function xb(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,h,i,k){var l,m,o,p=0,q="0",r=f&&[],s=[],t=j,u=f||e&&d.find.TAG("*",k),v=w+=null==t?1:Math.random()||.1,x=u.length;for(k&&(j=g!==n&&g);q!==x&&null!=(l=u[q]);q++){if(e&&l){m=0;while(o=a[m++])if(o(l,g,h)){i.push(l);break}k&&(w=v)}c&&((l=!o&&l)&&p--,f&&r.push(l))}if(p+=q,c&&q!==p){m=0;while(o=b[m++])o(r,s,g,h);if(f){if(p>0)while(q--)r[q]||s[q]||(s[q]=G.call(i));s=ub(s)}I.apply(i,s),k&&!f&&s.length>0&&p+b.length>1&&fb.uniqueSort(i)}return k&&(w=v,j=t),r};return c?hb(f):f}return h=fb.compile=function(a,b){var c,d=[],e=[],f=A[a+" "];if(!f){b||(b=g(a)),c=b.length;while(c--)f=wb(b[c]),f[u]?d.push(f):e.push(f);f=A(a,xb(e,d)),f.selector=a}return f},i=fb.select=function(a,b,e,f){var i,j,k,l,m,n="function"==typeof a&&a,o=!f&&g(a=n.selector||a);if(e=e||[],1===o.length){if(j=o[0]=o[0].slice(0),j.length>2&&"ID"===(k=j[0]).type&&c.getById&&9===b.nodeType&&p&&d.relative[j[1].type]){if(b=(d.find.ID(k.matches[0].replace(cb,db),b)||[])[0],!b)return e;n&&(b=b.parentNode),a=a.slice(j.shift().value.length)}i=X.needsContext.test(a)?0:j.length;while(i--){if(k=j[i],d.relative[l=k.type])break;if((m=d.find[l])&&(f=m(k.matches[0].replace(cb,db),ab.test(j[0].type)&&ob(b.parentNode)||b))){if(j.splice(i,1),a=f.length&&qb(j),!a)return I.apply(e,f),e;break}}}return(n||h(a,o))(f,b,!p,e,ab.test(a)&&ob(b.parentNode)||b),e},c.sortStable=u.split("").sort(B).join("")===u,c.detectDuplicates=!!l,m(),c.sortDetached=ib(function(a){return 1&a.compareDocumentPosition(n.createElement("div"))}),ib(function(a){return a.innerHTML="<a href='#'></a>","#"===a.firstChild.getAttribute("href")})||jb("type|href|height|width",function(a,b,c){return c?void 0:a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),c.attributes&&ib(function(a){return a.innerHTML="<input/>",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||jb("value",function(a,b,c){return c||"input"!==a.nodeName.toLowerCase()?void 0:a.defaultValue}),ib(function(a){return null==a.getAttribute("disabled")})||jb(L,function(a,b,c){var d;return c?void 0:a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),fb}(a);m.find=s,m.expr=s.selectors,m.expr[":"]=m.expr.pseudos,m.unique=s.uniqueSort,m.text=s.getText,m.isXMLDoc=s.isXML,m.contains=s.contains;var t=m.expr.match.needsContext,u=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,v=/^.[^:#\[\.,]*$/;function w(a,b,c){if(m.isFunction(b))return m.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return m.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(v.test(b))return m.filter(b,a,c);b=m.filter(b,a)}return m.grep(a,function(a){return m.inArray(a,b)>=0!==c})}m.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?m.find.matchesSelector(d,a)?[d]:[]:m.find.matches(a,m.grep(b,function(a){return 1===a.nodeType}))},m.fn.extend({find:function(a){var b,c=[],d=this,e=d.length;if("string"!=typeof a)return this.pushStack(m(a).filter(function(){for(b=0;e>b;b++)if(m.contains(d[b],this))return!0}));for(b=0;e>b;b++)m.find(a,d[b],c);return c=this.pushStack(e>1?m.unique(c):c),c.selector=this.selector?this.selector+" "+a:a,c},filter:function(a){return this.pushStack(w(this,a||[],!1))},not:function(a){return this.pushStack(w(this,a||[],!0))},is:function(a){return!!w(this,"string"==typeof a&&t.test(a)?m(a):a||[],!1).length}});var x,y=a.document,z=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,A=m.fn.init=function(a,b){var c,d;if(!a)return this;if("string"==typeof a){if(c="<"===a.charAt(0)&&">"===a.charAt(a.length-1)&&a.length>=3?[null,a,null]:z.exec(a),!c||!c[1]&&b)return!b||b.jquery?(b||x).find(a):this.constructor(b).find(a);if(c[1]){if(b=b instanceof m?b[0]:b,m.merge(this,m.parseHTML(c[1],b&&b.nodeType?b.ownerDocument||b:y,!0)),u.test(c[1])&&m.isPlainObject(b))for(c in b)m.isFunction(this[c])?this[c](b[c]):this.attr(c,b[c]);return this}if(d=y.getElementById(c[2]),d&&d.parentNode){if(d.id!==c[2])return x.find(a);this.length=1,this[0]=d}return this.context=y,this.selector=a,this}return a.nodeType?(this.context=this[0]=a,this.length=1,this):m.isFunction(a)?"undefined"!=typeof x.ready?x.ready(a):a(m):(void 0!==a.selector&&(this.selector=a.selector,this.context=a.context),m.makeArray(a,this))};A.prototype=m.fn,x=m(y);var B=/^(?:parents|prev(?:Until|All))/,C={children:!0,contents:!0,next:!0,prev:!0};m.extend({dir:function(a,b,c){var d=[],e=a[b];while(e&&9!==e.nodeType&&(void 0===c||1!==e.nodeType||!m(e).is(c)))1===e.nodeType&&d.push(e),e=e[b];return d},sibling:function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c}}),m.fn.extend({has:function(a){var b,c=m(a,this),d=c.length;return this.filter(function(){for(b=0;d>b;b++)if(m.contains(this,c[b]))return!0})},closest:function(a,b){for(var c,d=0,e=this.length,f=[],g=t.test(a)||"string"!=typeof a?m(a,b||this.context):0;e>d;d++)for(c=this[d];c&&c!==b;c=c.parentNode)if(c.nodeType<11&&(g?g.index(c)>-1:1===c.nodeType&&m.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?m.unique(f):f)},index:function(a){return a?"string"==typeof a?m.inArray(this[0],m(a)):m.inArray(a.jquery?a[0]:a,this):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(m.unique(m.merge(this.get(),m(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}});function D(a,b){do a=a[b];while(a&&1!==a.nodeType);return a}m.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return m.dir(a,"parentNode")},parentsUntil:function(a,b,c){return m.dir(a,"parentNode",c)},next:function(a){return D(a,"nextSibling")},prev:function(a){return D(a,"previousSibling")},nextAll:function(a){return m.dir(a,"nextSibling")},prevAll:function(a){return m.dir(a,"previousSibling")},nextUntil:function(a,b,c){return m.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return m.dir(a,"previousSibling",c)},siblings:function(a){return m.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return m.sibling(a.firstChild)},contents:function(a){return m.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:m.merge([],a.childNodes)}},function(a,b){m.fn[a]=function(c,d){var e=m.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=m.filter(d,e)),this.length>1&&(C[a]||(e=m.unique(e)),B.test(a)&&(e=e.reverse())),this.pushStack(e)}});var E=/\S+/g,F={};function G(a){var b=F[a]={};return m.each(a.match(E)||[],function(a,c){b[c]=!0}),b}m.Callbacks=function(a){a="string"==typeof a?F[a]||G(a):m.extend({},a);var b,c,d,e,f,g,h=[],i=!a.once&&[],j=function(l){for(c=a.memory&&l,d=!0,f=g||0,g=0,e=h.length,b=!0;h&&e>f;f++)if(h[f].apply(l[0],l[1])===!1&&a.stopOnFalse){c=!1;break}b=!1,h&&(i?i.length&&j(i.shift()):c?h=[]:k.disable())},k={add:function(){if(h){var d=h.length;!function f(b){m.each(b,function(b,c){var d=m.type(c);"function"===d?a.unique&&k.has(c)||h.push(c):c&&c.length&&"string"!==d&&f(c)})}(arguments),b?e=h.length:c&&(g=d,j(c))}return this},remove:function(){return h&&m.each(arguments,function(a,c){var d;while((d=m.inArray(c,h,d))>-1)h.splice(d,1),b&&(e>=d&&e--,f>=d&&f--)}),this},has:function(a){return a?m.inArray(a,h)>-1:!(!h||!h.length)},empty:function(){return h=[],e=0,this},disable:function(){return h=i=c=void 0,this},disabled:function(){return!h},lock:function(){return i=void 0,c||k.disable(),this},locked:function(){return!i},fireWith:function(a,c){return!h||d&&!i||(c=c||[],c=[a,c.slice?c.slice():c],b?i.push(c):j(c)),this},fire:function(){return k.fireWith(this,arguments),this},fired:function(){return!!d}};return k},m.extend({Deferred:function(a){var b=[["resolve","done",m.Callbacks("once memory"),"resolved"],["reject","fail",m.Callbacks("once memory"),"rejected"],["notify","progress",m.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return m.Deferred(function(c){m.each(b,function(b,f){var g=m.isFunction(a[b])&&a[b];e[f[1]](function(){var a=g&&g.apply(this,arguments);a&&m.isFunction(a.promise)?a.promise().done(c.resolve).fail(c.reject).progress(c.notify):c[f[0]+"With"](this===d?c.promise():this,g?[a]:arguments)})}),a=null}).promise()},promise:function(a){return null!=a?m.extend(a,d):d}},e={};return d.pipe=d.then,m.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[1^a][2].disable,b[2][2].lock),e[f[0]]=function(){return e[f[0]+"With"](this===e?d:this,arguments),this},e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=d.call(arguments),e=c.length,f=1!==e||a&&m.isFunction(a.promise)?e:0,g=1===f?a:m.Deferred(),h=function(a,b,c){return function(e){b[a]=this,c[a]=arguments.length>1?d.call(arguments):e,c===i?g.notifyWith(b,c):--f||g.resolveWith(b,c)}},i,j,k;if(e>1)for(i=new Array(e),j=new Array(e),k=new Array(e);e>b;b++)c[b]&&m.isFunction(c[b].promise)?c[b].promise().done(h(b,k,c)).fail(g.reject).progress(h(b,j,i)):--f;return f||g.resolveWith(k,c),g.promise()}});var H;m.fn.ready=function(a){return m.ready.promise().done(a),this},m.extend({isReady:!1,readyWait:1,holdReady:function(a){a?m.readyWait++:m.ready(!0)},ready:function(a){if(a===!0?!--m.readyWait:!m.isReady){if(!y.body)return setTimeout(m.ready);m.isReady=!0,a!==!0&&--m.readyWait>0||(H.resolveWith(y,[m]),m.fn.triggerHandler&&(m(y).triggerHandler("ready"),m(y).off("ready")))}}});function I(){y.addEventListener?(y.removeEventListener("DOMContentLoaded",J,!1),a.removeEventListener("load",J,!1)):(y.detachEvent("onreadystatechange",J),a.detachEvent("onload",J))}function J(){(y.addEventListener||"load"===event.type||"complete"===y.readyState)&&(I(),m.ready())}m.ready.promise=function(b){if(!H)if(H=m.Deferred(),"complete"===y.readyState)setTimeout(m.ready);else if(y.addEventListener)y.addEventListener("DOMContentLoaded",J,!1),a.addEventListener("load",J,!1);else{y.attachEvent("onreadystatechange",J),a.attachEvent("onload",J);var c=!1;try{c=null==a.frameElement&&y.documentElement}catch(d){}c&&c.doScroll&&!function e(){if(!m.isReady){try{c.doScroll("left")}catch(a){return setTimeout(e,50)}I(),m.ready()}}()}return H.promise(b)};var K="undefined",L;for(L in m(k))break;k.ownLast="0"!==L,k.inlineBlockNeedsLayout=!1,m(function(){var a,b,c,d;c=y.getElementsByTagName("body")[0],c&&c.style&&(b=y.createElement("div"),d=y.createElement("div"),d.style.cssText="position:absolute;border:0;width:0;height:0;top:0;left:-9999px",c.appendChild(d).appendChild(b),typeof b.style.zoom!==K&&(b.style.cssText="display:inline;margin:0;border:0;padding:1px;width:1px;zoom:1",k.inlineBlockNeedsLayout=a=3===b.offsetWidth,a&&(c.style.zoom=1)),c.removeChild(d))}),function(){var a=y.createElement("div");if(null==k.deleteExpando){k.deleteExpando=!0;try{delete a.test}catch(b){k.deleteExpando=!1}}a=null}(),m.acceptData=function(a){var b=m.noData[(a.nodeName+" ").toLowerCase()],c=+a.nodeType||1;return 1!==c&&9!==c?!1:!b||b!==!0&&a.getAttribute("classid")===b};var M=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,N=/([A-Z])/g;function O(a,b,c){if(void 0===c&&1===a.nodeType){var d="data-"+b.replace(N,"-$1").toLowerCase();if(c=a.getAttribute(d),"string"==typeof c){try{c="true"===c?!0:"false"===c?!1:"null"===c?null:+c+""===c?+c:M.test(c)?m.parseJSON(c):c}catch(e){}m.data(a,b,c)}else c=void 0}return c}function P(a){var b;for(b in a)if(("data"!==b||!m.isEmptyObject(a[b]))&&"toJSON"!==b)return!1;return!0}function Q(a,b,d,e){if(m.acceptData(a)){var f,g,h=m.expando,i=a.nodeType,j=i?m.cache:a,k=i?a[h]:a[h]&&h;
+if(k&&j[k]&&(e||j[k].data)||void 0!==d||"string"!=typeof b)return k||(k=i?a[h]=c.pop()||m.guid++:h),j[k]||(j[k]=i?{}:{toJSON:m.noop}),("object"==typeof b||"function"==typeof b)&&(e?j[k]=m.extend(j[k],b):j[k].data=m.extend(j[k].data,b)),g=j[k],e||(g.data||(g.data={}),g=g.data),void 0!==d&&(g[m.camelCase(b)]=d),"string"==typeof b?(f=g[b],null==f&&(f=g[m.camelCase(b)])):f=g,f}}function R(a,b,c){if(m.acceptData(a)){var d,e,f=a.nodeType,g=f?m.cache:a,h=f?a[m.expando]:m.expando;if(g[h]){if(b&&(d=c?g[h]:g[h].data)){m.isArray(b)?b=b.concat(m.map(b,m.camelCase)):b in d?b=[b]:(b=m.camelCase(b),b=b in d?[b]:b.split(" ")),e=b.length;while(e--)delete d[b[e]];if(c?!P(d):!m.isEmptyObject(d))return}(c||(delete g[h].data,P(g[h])))&&(f?m.cleanData([a],!0):k.deleteExpando||g!=g.window?delete g[h]:g[h]=null)}}}m.extend({cache:{},noData:{"applet ":!0,"embed ":!0,"object ":"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000"},hasData:function(a){return a=a.nodeType?m.cache[a[m.expando]]:a[m.expando],!!a&&!P(a)},data:function(a,b,c){return Q(a,b,c)},removeData:function(a,b){return R(a,b)},_data:function(a,b,c){return Q(a,b,c,!0)},_removeData:function(a,b){return R(a,b,!0)}}),m.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=m.data(f),1===f.nodeType&&!m._data(f,"parsedAttrs"))){c=g.length;while(c--)g[c]&&(d=g[c].name,0===d.indexOf("data-")&&(d=m.camelCase(d.slice(5)),O(f,d,e[d])));m._data(f,"parsedAttrs",!0)}return e}return"object"==typeof a?this.each(function(){m.data(this,a)}):arguments.length>1?this.each(function(){m.data(this,a,b)}):f?O(f,a,m.data(f,a)):void 0},removeData:function(a){return this.each(function(){m.removeData(this,a)})}}),m.extend({queue:function(a,b,c){var d;return a?(b=(b||"fx")+"queue",d=m._data(a,b),c&&(!d||m.isArray(c)?d=m._data(a,b,m.makeArray(c)):d.push(c)),d||[]):void 0},dequeue:function(a,b){b=b||"fx";var c=m.queue(a,b),d=c.length,e=c.shift(),f=m._queueHooks(a,b),g=function(){m.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return m._data(a,c)||m._data(a,c,{empty:m.Callbacks("once memory").add(function(){m._removeData(a,b+"queue"),m._removeData(a,c)})})}}),m.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.length<c?m.queue(this[0],a):void 0===b?this:this.each(function(){var c=m.queue(this,a,b);m._queueHooks(this,a),"fx"===a&&"inprogress"!==c[0]&&m.dequeue(this,a)})},dequeue:function(a){return this.each(function(){m.dequeue(this,a)})},clearQueue:function(a){return this.queue(a||"fx",[])},promise:function(a,b){var c,d=1,e=m.Deferred(),f=this,g=this.length,h=function(){--d||e.resolveWith(f,[f])};"string"!=typeof a&&(b=a,a=void 0),a=a||"fx";while(g--)c=m._data(f[g],a+"queueHooks"),c&&c.empty&&(d++,c.empty.add(h));return h(),e.promise(b)}});var S=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,T=["Top","Right","Bottom","Left"],U=function(a,b){return a=b||a,"none"===m.css(a,"display")||!m.contains(a.ownerDocument,a)},V=m.access=function(a,b,c,d,e,f,g){var h=0,i=a.length,j=null==c;if("object"===m.type(c)){e=!0;for(h in c)m.access(a,b,h,c[h],!0,f,g)}else if(void 0!==d&&(e=!0,m.isFunction(d)||(g=!0),j&&(g?(b.call(a,d),b=null):(j=b,b=function(a,b,c){return j.call(m(a),c)})),b))for(;i>h;h++)b(a[h],c,g?d:d.call(a[h],h,b(a[h],c)));return e?a:j?b.call(a):i?b(a[0],c):f},W=/^(?:checkbox|radio)$/i;!function(){var a=y.createElement("input"),b=y.createElement("div"),c=y.createDocumentFragment();if(b.innerHTML=" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>",k.leadingWhitespace=3===b.firstChild.nodeType,k.tbody=!b.getElementsByTagName("tbody").length,k.htmlSerialize=!!b.getElementsByTagName("link").length,k.html5Clone="<:nav></:nav>"!==y.createElement("nav").cloneNode(!0).outerHTML,a.type="checkbox",a.checked=!0,c.appendChild(a),k.appendChecked=a.checked,b.innerHTML="<textarea>x</textarea>",k.noCloneChecked=!!b.cloneNode(!0).lastChild.defaultValue,c.appendChild(b),b.innerHTML="<input type='radio' checked='checked' name='t'/>",k.checkClone=b.cloneNode(!0).cloneNode(!0).lastChild.checked,k.noCloneEvent=!0,b.attachEvent&&(b.attachEvent("onclick",function(){k.noCloneEvent=!1}),b.cloneNode(!0).click()),null==k.deleteExpando){k.deleteExpando=!0;try{delete b.test}catch(d){k.deleteExpando=!1}}}(),function(){var b,c,d=y.createElement("div");for(b in{submit:!0,change:!0,focusin:!0})c="on"+b,(k[b+"Bubbles"]=c in a)||(d.setAttribute(c,"t"),k[b+"Bubbles"]=d.attributes[c].expando===!1);d=null}();var X=/^(?:input|select|textarea)$/i,Y=/^key/,Z=/^(?:mouse|pointer|contextmenu)|click/,$=/^(?:focusinfocus|focusoutblur)$/,_=/^([^.]*)(?:\.(.+)|)$/;function ab(){return!0}function bb(){return!1}function cb(){try{return y.activeElement}catch(a){}}m.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,n,o,p,q,r=m._data(a);if(r){c.handler&&(i=c,c=i.handler,e=i.selector),c.guid||(c.guid=m.guid++),(g=r.events)||(g=r.events={}),(k=r.handle)||(k=r.handle=function(a){return typeof m===K||a&&m.event.triggered===a.type?void 0:m.event.dispatch.apply(k.elem,arguments)},k.elem=a),b=(b||"").match(E)||[""],h=b.length;while(h--)f=_.exec(b[h])||[],o=q=f[1],p=(f[2]||"").split(".").sort(),o&&(j=m.event.special[o]||{},o=(e?j.delegateType:j.bindType)||o,j=m.event.special[o]||{},l=m.extend({type:o,origType:q,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&m.expr.match.needsContext.test(e),namespace:p.join(".")},i),(n=g[o])||(n=g[o]=[],n.delegateCount=0,j.setup&&j.setup.call(a,d,p,k)!==!1||(a.addEventListener?a.addEventListener(o,k,!1):a.attachEvent&&a.attachEvent("on"+o,k))),j.add&&(j.add.call(a,l),l.handler.guid||(l.handler.guid=c.guid)),e?n.splice(n.delegateCount++,0,l):n.push(l),m.event.global[o]=!0);a=null}},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,n,o,p,q,r=m.hasData(a)&&m._data(a);if(r&&(k=r.events)){b=(b||"").match(E)||[""],j=b.length;while(j--)if(h=_.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o){l=m.event.special[o]||{},o=(d?l.delegateType:l.bindType)||o,n=k[o]||[],h=h[2]&&new RegExp("(^|\\.)"+p.join("\\.(?:.*\\.|)")+"(\\.|$)"),i=f=n.length;while(f--)g=n[f],!e&&q!==g.origType||c&&c.guid!==g.guid||h&&!h.test(g.namespace)||d&&d!==g.selector&&("**"!==d||!g.selector)||(n.splice(f,1),g.selector&&n.delegateCount--,l.remove&&l.remove.call(a,g));i&&!n.length&&(l.teardown&&l.teardown.call(a,p,r.handle)!==!1||m.removeEvent(a,o,r.handle),delete k[o])}else for(o in k)m.event.remove(a,o+b[j],c,d,!0);m.isEmptyObject(k)&&(delete r.handle,m._removeData(a,"events"))}},trigger:function(b,c,d,e){var f,g,h,i,k,l,n,o=[d||y],p=j.call(b,"type")?b.type:b,q=j.call(b,"namespace")?b.namespace.split("."):[];if(h=l=d=d||y,3!==d.nodeType&&8!==d.nodeType&&!$.test(p+m.event.triggered)&&(p.indexOf(".")>=0&&(q=p.split("."),p=q.shift(),q.sort()),g=p.indexOf(":")<0&&"on"+p,b=b[m.expando]?b:new m.Event(p,"object"==typeof b&&b),b.isTrigger=e?2:3,b.namespace=q.join("."),b.namespace_re=b.namespace?new RegExp("(^|\\.)"+q.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,b.result=void 0,b.target||(b.target=d),c=null==c?[b]:m.makeArray(c,[b]),k=m.event.special[p]||{},e||!k.trigger||k.trigger.apply(d,c)!==!1)){if(!e&&!k.noBubble&&!m.isWindow(d)){for(i=k.delegateType||p,$.test(i+p)||(h=h.parentNode);h;h=h.parentNode)o.push(h),l=h;l===(d.ownerDocument||y)&&o.push(l.defaultView||l.parentWindow||a)}n=0;while((h=o[n++])&&!b.isPropagationStopped())b.type=n>1?i:k.bindType||p,f=(m._data(h,"events")||{})[b.type]&&m._data(h,"handle"),f&&f.apply(h,c),f=g&&h[g],f&&f.apply&&m.acceptData(h)&&(b.result=f.apply(h,c),b.result===!1&&b.preventDefault());if(b.type=p,!e&&!b.isDefaultPrevented()&&(!k._default||k._default.apply(o.pop(),c)===!1)&&m.acceptData(d)&&g&&d[p]&&!m.isWindow(d)){l=d[g],l&&(d[g]=null),m.event.triggered=p;try{d[p]()}catch(r){}m.event.triggered=void 0,l&&(d[g]=l)}return b.result}},dispatch:function(a){a=m.event.fix(a);var b,c,e,f,g,h=[],i=d.call(arguments),j=(m._data(this,"events")||{})[a.type]||[],k=m.event.special[a.type]||{};if(i[0]=a,a.delegateTarget=this,!k.preDispatch||k.preDispatch.call(this,a)!==!1){h=m.event.handlers.call(this,a,j),b=0;while((f=h[b++])&&!a.isPropagationStopped()){a.currentTarget=f.elem,g=0;while((e=f.handlers[g++])&&!a.isImmediatePropagationStopped())(!a.namespace_re||a.namespace_re.test(e.namespace))&&(a.handleObj=e,a.data=e.data,c=((m.event.special[e.origType]||{}).handle||e.handler).apply(f.elem,i),void 0!==c&&(a.result=c)===!1&&(a.preventDefault(),a.stopPropagation()))}return k.postDispatch&&k.postDispatch.call(this,a),a.result}},handlers:function(a,b){var c,d,e,f,g=[],h=b.delegateCount,i=a.target;if(h&&i.nodeType&&(!a.button||"click"!==a.type))for(;i!=this;i=i.parentNode||this)if(1===i.nodeType&&(i.disabled!==!0||"click"!==a.type)){for(e=[],f=0;h>f;f++)d=b[f],c=d.selector+" ",void 0===e[c]&&(e[c]=d.needsContext?m(c,this).index(i)>=0:m.find(c,this,null,[i]).length),e[c]&&e.push(d);e.length&&g.push({elem:i,handlers:e})}return h<b.length&&g.push({elem:this,handlers:b.slice(h)}),g},fix:function(a){if(a[m.expando])return a;var b,c,d,e=a.type,f=a,g=this.fixHooks[e];g||(this.fixHooks[e]=g=Z.test(e)?this.mouseHooks:Y.test(e)?this.keyHooks:{}),d=g.props?this.props.concat(g.props):this.props,a=new m.Event(f),b=d.length;while(b--)c=d[b],a[c]=f[c];return a.target||(a.target=f.srcElement||y),3===a.target.nodeType&&(a.target=a.target.parentNode),a.metaKey=!!a.metaKey,g.filter?g.filter(a,f):a},props:"altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(a,b){return null==a.which&&(a.which=null!=b.charCode?b.charCode:b.keyCode),a}},mouseHooks:{props:"button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(a,b){var c,d,e,f=b.button,g=b.fromElement;return null==a.pageX&&null!=b.clientX&&(d=a.target.ownerDocument||y,e=d.documentElement,c=d.body,a.pageX=b.clientX+(e&&e.scrollLeft||c&&c.scrollLeft||0)-(e&&e.clientLeft||c&&c.clientLeft||0),a.pageY=b.clientY+(e&&e.scrollTop||c&&c.scrollTop||0)-(e&&e.clientTop||c&&c.clientTop||0)),!a.relatedTarget&&g&&(a.relatedTarget=g===a.target?b.toElement:g),a.which||void 0===f||(a.which=1&f?1:2&f?3:4&f?2:0),a}},special:{load:{noBubble:!0},focus:{trigger:function(){if(this!==cb()&&this.focus)try{return this.focus(),!1}catch(a){}},delegateType:"focusin"},blur:{trigger:function(){return this===cb()&&this.blur?(this.blur(),!1):void 0},delegateType:"focusout"},click:{trigger:function(){return m.nodeName(this,"input")&&"checkbox"===this.type&&this.click?(this.click(),!1):void 0},_default:function(a){return m.nodeName(a.target,"a")}},beforeunload:{postDispatch:function(a){void 0!==a.result&&a.originalEvent&&(a.originalEvent.returnValue=a.result)}}},simulate:function(a,b,c,d){var e=m.extend(new m.Event,c,{type:a,isSimulated:!0,originalEvent:{}});d?m.event.trigger(e,null,b):m.event.dispatch.call(b,e),e.isDefaultPrevented()&&c.preventDefault()}},m.removeEvent=y.removeEventListener?function(a,b,c){a.removeEventListener&&a.removeEventListener(b,c,!1)}:function(a,b,c){var d="on"+b;a.detachEvent&&(typeof a[d]===K&&(a[d]=null),a.detachEvent(d,c))},m.Event=function(a,b){return this instanceof m.Event?(a&&a.type?(this.originalEvent=a,this.type=a.type,this.isDefaultPrevented=a.defaultPrevented||void 0===a.defaultPrevented&&a.returnValue===!1?ab:bb):this.type=a,b&&m.extend(this,b),this.timeStamp=a&&a.timeStamp||m.now(),void(this[m.expando]=!0)):new m.Event(a,b)},m.Event.prototype={isDefaultPrevented:bb,isPropagationStopped:bb,isImmediatePropagationStopped:bb,preventDefault:function(){var a=this.originalEvent;this.isDefaultPrevented=ab,a&&(a.preventDefault?a.preventDefault():a.returnValue=!1)},stopPropagation:function(){var a=this.originalEvent;this.isPropagationStopped=ab,a&&(a.stopPropagation&&a.stopPropagation(),a.cancelBubble=!0)},stopImmediatePropagation:function(){var a=this.originalEvent;this.isImmediatePropagationStopped=ab,a&&a.stopImmediatePropagation&&a.stopImmediatePropagation(),this.stopPropagation()}},m.each({mouseenter:"mouseover",mouseleave:"mouseout",pointerenter:"pointerover",pointerleave:"pointerout"},function(a,b){m.event.special[a]={delegateType:b,bindType:b,handle:function(a){var c,d=this,e=a.relatedTarget,f=a.handleObj;return(!e||e!==d&&!m.contains(d,e))&&(a.type=f.origType,c=f.handler.apply(this,arguments),a.type=b),c}}}),k.submitBubbles||(m.event.special.submit={setup:function(){return m.nodeName(this,"form")?!1:void m.event.add(this,"click._submit keypress._submit",function(a){var b=a.target,c=m.nodeName(b,"input")||m.nodeName(b,"button")?b.form:void 0;c&&!m._data(c,"submitBubbles")&&(m.event.add(c,"submit._submit",function(a){a._submit_bubble=!0}),m._data(c,"submitBubbles",!0))})},postDispatch:function(a){a._submit_bubble&&(delete a._submit_bubble,this.parentNode&&!a.isTrigger&&m.event.simulate("submit",this.parentNode,a,!0))},teardown:function(){return m.nodeName(this,"form")?!1:void m.event.remove(this,"._submit")}}),k.changeBubbles||(m.event.special.change={setup:function(){return X.test(this.nodeName)?(("checkbox"===this.type||"radio"===this.type)&&(m.event.add(this,"propertychange._change",function(a){"checked"===a.originalEvent.propertyName&&(this._just_changed=!0)}),m.event.add(this,"click._change",function(a){this._just_changed&&!a.isTrigger&&(this._just_changed=!1),m.event.simulate("change",this,a,!0)})),!1):void m.event.add(this,"beforeactivate._change",function(a){var b=a.target;X.test(b.nodeName)&&!m._data(b,"changeBubbles")&&(m.event.add(b,"change._change",function(a){!this.parentNode||a.isSimulated||a.isTrigger||m.event.simulate("change",this.parentNode,a,!0)}),m._data(b,"changeBubbles",!0))})},handle:function(a){var b=a.target;return this!==b||a.isSimulated||a.isTrigger||"radio"!==b.type&&"checkbox"!==b.type?a.handleObj.handler.apply(this,arguments):void 0},teardown:function(){return m.event.remove(this,"._change"),!X.test(this.nodeName)}}),k.focusinBubbles||m.each({focus:"focusin",blur:"focusout"},function(a,b){var c=function(a){m.event.simulate(b,a.target,m.event.fix(a),!0)};m.event.special[b]={setup:function(){var d=this.ownerDocument||this,e=m._data(d,b);e||d.addEventListener(a,c,!0),m._data(d,b,(e||0)+1)},teardown:function(){var d=this.ownerDocument||this,e=m._data(d,b)-1;e?m._data(d,b,e):(d.removeEventListener(a,c,!0),m._removeData(d,b))}}}),m.fn.extend({on:function(a,b,c,d,e){var f,g;if("object"==typeof a){"string"!=typeof b&&(c=c||b,b=void 0);for(f in a)this.on(f,b,c,a[f],e);return this}if(null==c&&null==d?(d=b,c=b=void 0):null==d&&("string"==typeof b?(d=c,c=void 0):(d=c,c=b,b=void 0)),d===!1)d=bb;else if(!d)return this;return 1===e&&(g=d,d=function(a){return m().off(a),g.apply(this,arguments)},d.guid=g.guid||(g.guid=m.guid++)),this.each(function(){m.event.add(this,a,d,c,b)})},one:function(a,b,c,d){return this.on(a,b,c,d,1)},off:function(a,b,c){var d,e;if(a&&a.preventDefault&&a.handleObj)return d=a.handleObj,m(a.delegateTarget).off(d.namespace?d.origType+"."+d.namespace:d.origType,d.selector,d.handler),this;if("object"==typeof a){for(e in a)this.off(e,b,a[e]);return this}return(b===!1||"function"==typeof b)&&(c=b,b=void 0),c===!1&&(c=bb),this.each(function(){m.event.remove(this,a,c,b)})},trigger:function(a,b){return this.each(function(){m.event.trigger(a,b,this)})},triggerHandler:function(a,b){var c=this[0];return c?m.event.trigger(a,b,c,!0):void 0}});function db(a){var b=eb.split("|"),c=a.createDocumentFragment();if(c.createElement)while(b.length)c.createElement(b.pop());return c}var eb="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",fb=/ jQuery\d+="(?:null|\d+)"/g,gb=new RegExp("<(?:"+eb+")[\\s/>]","i"),hb=/^\s+/,ib=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,jb=/<([\w:]+)/,kb=/<tbody/i,lb=/<|&#?\w+;/,mb=/<(?:script|style|link)/i,nb=/checked\s*(?:[^=]|=\s*.checked.)/i,ob=/^$|\/(?:java|ecma)script/i,pb=/^true\/(.*)/,qb=/^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g,rb={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],area:[1,"<map>","</map>"],param:[1,"<object>","</object>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:k.htmlSerialize?[0,"",""]:[1,"X<div>","</div>"]},sb=db(y),tb=sb.appendChild(y.createElement("div"));rb.optgroup=rb.option,rb.tbody=rb.tfoot=rb.colgroup=rb.caption=rb.thead,rb.th=rb.td;function ub(a,b){var c,d,e=0,f=typeof a.getElementsByTagName!==K?a.getElementsByTagName(b||"*"):typeof a.querySelectorAll!==K?a.querySelectorAll(b||"*"):void 0;if(!f)for(f=[],c=a.childNodes||a;null!=(d=c[e]);e++)!b||m.nodeName(d,b)?f.push(d):m.merge(f,ub(d,b));return void 0===b||b&&m.nodeName(a,b)?m.merge([a],f):f}function vb(a){W.test(a.type)&&(a.defaultChecked=a.checked)}function wb(a,b){return m.nodeName(a,"table")&&m.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function xb(a){return a.type=(null!==m.find.attr(a,"type"))+"/"+a.type,a}function yb(a){var b=pb.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function zb(a,b){for(var c,d=0;null!=(c=a[d]);d++)m._data(c,"globalEval",!b||m._data(b[d],"globalEval"))}function Ab(a,b){if(1===b.nodeType&&m.hasData(a)){var c,d,e,f=m._data(a),g=m._data(b,f),h=f.events;if(h){delete g.handle,g.events={};for(c in h)for(d=0,e=h[c].length;e>d;d++)m.event.add(b,c,h[c][d])}g.data&&(g.data=m.extend({},g.data))}}function Bb(a,b){var c,d,e;if(1===b.nodeType){if(c=b.nodeName.toLowerCase(),!k.noCloneEvent&&b[m.expando]){e=m._data(b);for(d in e.events)m.removeEvent(b,d,e.handle);b.removeAttribute(m.expando)}"script"===c&&b.text!==a.text?(xb(b).text=a.text,yb(b)):"object"===c?(b.parentNode&&(b.outerHTML=a.outerHTML),k.html5Clone&&a.innerHTML&&!m.trim(b.innerHTML)&&(b.innerHTML=a.innerHTML)):"input"===c&&W.test(a.type)?(b.defaultChecked=b.checked=a.checked,b.value!==a.value&&(b.value=a.value)):"option"===c?b.defaultSelected=b.selected=a.defaultSelected:("input"===c||"textarea"===c)&&(b.defaultValue=a.defaultValue)}}m.extend({clone:function(a,b,c){var d,e,f,g,h,i=m.contains(a.ownerDocument,a);if(k.html5Clone||m.isXMLDoc(a)||!gb.test("<"+a.nodeName+">")?f=a.cloneNode(!0):(tb.innerHTML=a.outerHTML,tb.removeChild(f=tb.firstChild)),!(k.noCloneEvent&&k.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||m.isXMLDoc(a)))for(d=ub(f),h=ub(a),g=0;null!=(e=h[g]);++g)d[g]&&Bb(e,d[g]);if(b)if(c)for(h=h||ub(a),d=d||ub(f),g=0;null!=(e=h[g]);g++)Ab(e,d[g]);else Ab(a,f);return d=ub(f,"script"),d.length>0&&zb(d,!i&&ub(a,"script")),d=h=e=null,f},buildFragment:function(a,b,c,d){for(var e,f,g,h,i,j,l,n=a.length,o=db(b),p=[],q=0;n>q;q++)if(f=a[q],f||0===f)if("object"===m.type(f))m.merge(p,f.nodeType?[f]:f);else if(lb.test(f)){h=h||o.appendChild(b.createElement("div")),i=(jb.exec(f)||["",""])[1].toLowerCase(),l=rb[i]||rb._default,h.innerHTML=l[1]+f.replace(ib,"<$1></$2>")+l[2],e=l[0];while(e--)h=h.lastChild;if(!k.leadingWhitespace&&hb.test(f)&&p.push(b.createTextNode(hb.exec(f)[0])),!k.tbody){f="table"!==i||kb.test(f)?"<table>"!==l[1]||kb.test(f)?0:h:h.firstChild,e=f&&f.childNodes.length;while(e--)m.nodeName(j=f.childNodes[e],"tbody")&&!j.childNodes.length&&f.removeChild(j)}m.merge(p,h.childNodes),h.textContent="";while(h.firstChild)h.removeChild(h.firstChild);h=o.lastChild}else p.push(b.createTextNode(f));h&&o.removeChild(h),k.appendChecked||m.grep(ub(p,"input"),vb),q=0;while(f=p[q++])if((!d||-1===m.inArray(f,d))&&(g=m.contains(f.ownerDocument,f),h=ub(o.appendChild(f),"script"),g&&zb(h),c)){e=0;while(f=h[e++])ob.test(f.type||"")&&c.push(f)}return h=null,o},cleanData:function(a,b){for(var d,e,f,g,h=0,i=m.expando,j=m.cache,l=k.deleteExpando,n=m.event.special;null!=(d=a[h]);h++)if((b||m.acceptData(d))&&(f=d[i],g=f&&j[f])){if(g.events)for(e in g.events)n[e]?m.event.remove(d,e):m.removeEvent(d,e,g.handle);j[f]&&(delete j[f],l?delete d[i]:typeof d.removeAttribute!==K?d.removeAttribute(i):d[i]=null,c.push(f))}}}),m.fn.extend({text:function(a){return V(this,function(a){return void 0===a?m.text(this):this.empty().append((this[0]&&this[0].ownerDocument||y).createTextNode(a))},null,a,arguments.length)},append:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=wb(this,a);b.appendChild(a)}})},prepend:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=wb(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},remove:function(a,b){for(var c,d=a?m.filter(a,this):this,e=0;null!=(c=d[e]);e++)b||1!==c.nodeType||m.cleanData(ub(c)),c.parentNode&&(b&&m.contains(c.ownerDocument,c)&&zb(ub(c,"script")),c.parentNode.removeChild(c));return this},empty:function(){for(var a,b=0;null!=(a=this[b]);b++){1===a.nodeType&&m.cleanData(ub(a,!1));while(a.firstChild)a.removeChild(a.firstChild);a.options&&m.nodeName(a,"select")&&(a.options.length=0)}return this},clone:function(a,b){return a=null==a?!1:a,b=null==b?a:b,this.map(function(){return m.clone(this,a,b)})},html:function(a){return V(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a)return 1===b.nodeType?b.innerHTML.replace(fb,""):void 0;if(!("string"!=typeof a||mb.test(a)||!k.htmlSerialize&&gb.test(a)||!k.leadingWhitespace&&hb.test(a)||rb[(jb.exec(a)||["",""])[1].toLowerCase()])){a=a.replace(ib,"<$1></$2>");try{for(;d>c;c++)b=this[c]||{},1===b.nodeType&&(m.cleanData(ub(b,!1)),b.innerHTML=a);b=0}catch(e){}}b&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(){var a=arguments[0];return this.domManip(arguments,function(b){a=this.parentNode,m.cleanData(ub(this)),a&&a.replaceChild(b,this)}),a&&(a.length||a.nodeType)?this:this.remove()},detach:function(a){return this.remove(a,!0)},domManip:function(a,b){a=e.apply([],a);var c,d,f,g,h,i,j=0,l=this.length,n=this,o=l-1,p=a[0],q=m.isFunction(p);if(q||l>1&&"string"==typeof p&&!k.checkClone&&nb.test(p))return this.each(function(c){var d=n.eq(c);q&&(a[0]=p.call(this,c,d.html())),d.domManip(a,b)});if(l&&(i=m.buildFragment(a,this[0].ownerDocument,!1,this),c=i.firstChild,1===i.childNodes.length&&(i=c),c)){for(g=m.map(ub(i,"script"),xb),f=g.length;l>j;j++)d=i,j!==o&&(d=m.clone(d,!0,!0),f&&m.merge(g,ub(d,"script"))),b.call(this[j],d,j);if(f)for(h=g[g.length-1].ownerDocument,m.map(g,yb),j=0;f>j;j++)d=g[j],ob.test(d.type||"")&&!m._data(d,"globalEval")&&m.contains(h,d)&&(d.src?m._evalUrl&&m._evalUrl(d.src):m.globalEval((d.text||d.textContent||d.innerHTML||"").replace(qb,"")));i=c=null}return this}}),m.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){m.fn[a]=function(a){for(var c,d=0,e=[],g=m(a),h=g.length-1;h>=d;d++)c=d===h?this:this.clone(!0),m(g[d])[b](c),f.apply(e,c.get());return this.pushStack(e)}});var Cb,Db={};function Eb(b,c){var d,e=m(c.createElement(b)).appendTo(c.body),f=a.getDefaultComputedStyle&&(d=a.getDefaultComputedStyle(e[0]))?d.display:m.css(e[0],"display");return e.detach(),f}function Fb(a){var b=y,c=Db[a];return c||(c=Eb(a,b),"none"!==c&&c||(Cb=(Cb||m("<iframe frameborder='0' width='0' height='0'/>")).appendTo(b.documentElement),b=(Cb[0].contentWindow||Cb[0].contentDocument).document,b.write(),b.close(),c=Eb(a,b),Cb.detach()),Db[a]=c),c}!function(){var a;k.shrinkWrapBlocks=function(){if(null!=a)return a;a=!1;var b,c,d;return c=y.getElementsByTagName("body")[0],c&&c.style?(b=y.createElement("div"),d=y.createElement("div"),d.style.cssText="position:absolute;border:0;width:0;height:0;top:0;left:-9999px",c.appendChild(d).appendChild(b),typeof b.style.zoom!==K&&(b.style.cssText="-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;display:block;margin:0;border:0;padding:1px;width:1px;zoom:1",b.appendChild(y.createElement("div")).style.width="5px",a=3!==b.offsetWidth),c.removeChild(d),a):void 0}}();var Gb=/^margin/,Hb=new RegExp("^("+S+")(?!px)[a-z%]+$","i"),Ib,Jb,Kb=/^(top|right|bottom|left)$/;a.getComputedStyle?(Ib=function(a){return a.ownerDocument.defaultView.getComputedStyle(a,null)},Jb=function(a,b,c){var d,e,f,g,h=a.style;return c=c||Ib(a),g=c?c.getPropertyValue(b)||c[b]:void 0,c&&(""!==g||m.contains(a.ownerDocument,a)||(g=m.style(a,b)),Hb.test(g)&&Gb.test(b)&&(d=h.width,e=h.minWidth,f=h.maxWidth,h.minWidth=h.maxWidth=h.width=g,g=c.width,h.width=d,h.minWidth=e,h.maxWidth=f)),void 0===g?g:g+""}):y.documentElement.currentStyle&&(Ib=function(a){return a.currentStyle},Jb=function(a,b,c){var d,e,f,g,h=a.style;return c=c||Ib(a),g=c?c[b]:void 0,null==g&&h&&h[b]&&(g=h[b]),Hb.test(g)&&!Kb.test(b)&&(d=h.left,e=a.runtimeStyle,f=e&&e.left,f&&(e.left=a.currentStyle.left),h.left="fontSize"===b?"1em":g,g=h.pixelLeft+"px",h.left=d,f&&(e.left=f)),void 0===g?g:g+""||"auto"});function Lb(a,b){return{get:function(){var c=a();if(null!=c)return c?void delete this.get:(this.get=b).apply(this,arguments)}}}!function(){var b,c,d,e,f,g,h;if(b=y.createElement("div"),b.innerHTML=" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>",d=b.getElementsByTagName("a")[0],c=d&&d.style){c.cssText="float:left;opacity:.5",k.opacity="0.5"===c.opacity,k.cssFloat=!!c.cssFloat,b.style.backgroundClip="content-box",b.cloneNode(!0).style.backgroundClip="",k.clearCloneStyle="content-box"===b.style.backgroundClip,k.boxSizing=""===c.boxSizing||""===c.MozBoxSizing||""===c.WebkitBoxSizing,m.extend(k,{reliableHiddenOffsets:function(){return null==g&&i(),g},boxSizingReliable:function(){return null==f&&i(),f},pixelPosition:function(){return null==e&&i(),e},reliableMarginRight:function(){return null==h&&i(),h}});function i(){var b,c,d,i;c=y.getElementsByTagName("body")[0],c&&c.style&&(b=y.createElement("div"),d=y.createElement("div"),d.style.cssText="position:absolute;border:0;width:0;height:0;top:0;left:-9999px",c.appendChild(d).appendChild(b),b.style.cssText="-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;display:block;margin-top:1%;top:1%;border:1px;padding:1px;width:4px;position:absolute",e=f=!1,h=!0,a.getComputedStyle&&(e="1%"!==(a.getComputedStyle(b,null)||{}).top,f="4px"===(a.getComputedStyle(b,null)||{width:"4px"}).width,i=b.appendChild(y.createElement("div")),i.style.cssText=b.style.cssText="-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;display:block;margin:0;border:0;padding:0",i.style.marginRight=i.style.width="0",b.style.width="1px",h=!parseFloat((a.getComputedStyle(i,null)||{}).marginRight)),b.innerHTML="<table><tr><td></td><td>t</td></tr></table>",i=b.getElementsByTagName("td"),i[0].style.cssText="margin:0;border:0;padding:0;display:none",g=0===i[0].offsetHeight,g&&(i[0].style.display="",i[1].style.display="none",g=0===i[0].offsetHeight),c.removeChild(d))}}}(),m.swap=function(a,b,c,d){var e,f,g={};for(f in b)g[f]=a.style[f],a.style[f]=b[f];e=c.apply(a,d||[]);for(f in b)a.style[f]=g[f];return e};var Mb=/alpha\([^)]*\)/i,Nb=/opacity\s*=\s*([^)]*)/,Ob=/^(none|table(?!-c[ea]).+)/,Pb=new RegExp("^("+S+")(.*)$","i"),Qb=new RegExp("^([+-])=("+S+")","i"),Rb={position:"absolute",visibility:"hidden",display:"block"},Sb={letterSpacing:"0",fontWeight:"400"},Tb=["Webkit","O","Moz","ms"];function Ub(a,b){if(b in a)return b;var c=b.charAt(0).toUpperCase()+b.slice(1),d=b,e=Tb.length;while(e--)if(b=Tb[e]+c,b in a)return b;return d}function Vb(a,b){for(var c,d,e,f=[],g=0,h=a.length;h>g;g++)d=a[g],d.style&&(f[g]=m._data(d,"olddisplay"),c=d.style.display,b?(f[g]||"none"!==c||(d.style.display=""),""===d.style.display&&U(d)&&(f[g]=m._data(d,"olddisplay",Fb(d.nodeName)))):(e=U(d),(c&&"none"!==c||!e)&&m._data(d,"olddisplay",e?c:m.css(d,"display"))));for(g=0;h>g;g++)d=a[g],d.style&&(b&&"none"!==d.style.display&&""!==d.style.display||(d.style.display=b?f[g]||"":"none"));return a}function Wb(a,b,c){var d=Pb.exec(b);return d?Math.max(0,d[1]-(c||0))+(d[2]||"px"):b}function Xb(a,b,c,d,e){for(var f=c===(d?"border":"content")?4:"width"===b?1:0,g=0;4>f;f+=2)"margin"===c&&(g+=m.css(a,c+T[f],!0,e)),d?("content"===c&&(g-=m.css(a,"padding"+T[f],!0,e)),"margin"!==c&&(g-=m.css(a,"border"+T[f]+"Width",!0,e))):(g+=m.css(a,"padding"+T[f],!0,e),"padding"!==c&&(g+=m.css(a,"border"+T[f]+"Width",!0,e)));return g}function Yb(a,b,c){var d=!0,e="width"===b?a.offsetWidth:a.offsetHeight,f=Ib(a),g=k.boxSizing&&"border-box"===m.css(a,"boxSizing",!1,f);if(0>=e||null==e){if(e=Jb(a,b,f),(0>e||null==e)&&(e=a.style[b]),Hb.test(e))return e;d=g&&(k.boxSizingReliable()||e===a.style[b]),e=parseFloat(e)||0}return e+Xb(a,b,c||(g?"border":"content"),d,f)+"px"}m.extend({cssHooks:{opacity:{get:function(a,b){if(b){var c=Jb(a,"opacity");return""===c?"1":c}}}},cssNumber:{columnCount:!0,fillOpacity:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":k.cssFloat?"cssFloat":"styleFloat"},style:function(a,b,c,d){if(a&&3!==a.nodeType&&8!==a.nodeType&&a.style){var e,f,g,h=m.camelCase(b),i=a.style;if(b=m.cssProps[h]||(m.cssProps[h]=Ub(i,h)),g=m.cssHooks[b]||m.cssHooks[h],void 0===c)return g&&"get"in g&&void 0!==(e=g.get(a,!1,d))?e:i[b];if(f=typeof c,"string"===f&&(e=Qb.exec(c))&&(c=(e[1]+1)*e[2]+parseFloat(m.css(a,b)),f="number"),null!=c&&c===c&&("number"!==f||m.cssNumber[h]||(c+="px"),k.clearCloneStyle||""!==c||0!==b.indexOf("background")||(i[b]="inherit"),!(g&&"set"in g&&void 0===(c=g.set(a,c,d)))))try{i[b]=c}catch(j){}}},css:function(a,b,c,d){var e,f,g,h=m.camelCase(b);return b=m.cssProps[h]||(m.cssProps[h]=Ub(a.style,h)),g=m.cssHooks[b]||m.cssHooks[h],g&&"get"in g&&(f=g.get(a,!0,c)),void 0===f&&(f=Jb(a,b,d)),"normal"===f&&b in Sb&&(f=Sb[b]),""===c||c?(e=parseFloat(f),c===!0||m.isNumeric(e)?e||0:f):f}}),m.each(["height","width"],function(a,b){m.cssHooks[b]={get:function(a,c,d){return c?Ob.test(m.css(a,"display"))&&0===a.offsetWidth?m.swap(a,Rb,function(){return Yb(a,b,d)}):Yb(a,b,d):void 0},set:function(a,c,d){var e=d&&Ib(a);return Wb(a,c,d?Xb(a,b,d,k.boxSizing&&"border-box"===m.css(a,"boxSizing",!1,e),e):0)}}}),k.opacity||(m.cssHooks.opacity={get:function(a,b){return Nb.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":b?"1":""},set:function(a,b){var c=a.style,d=a.currentStyle,e=m.isNumeric(b)?"alpha(opacity="+100*b+")":"",f=d&&d.filter||c.filter||"";c.zoom=1,(b>=1||""===b)&&""===m.trim(f.replace(Mb,""))&&c.removeAttribute&&(c.removeAttribute("filter"),""===b||d&&!d.filter)||(c.filter=Mb.test(f)?f.replace(Mb,e):f+" "+e)}}),m.cssHooks.marginRight=Lb(k.reliableMarginRight,function(a,b){return b?m.swap(a,{display:"inline-block"},Jb,[a,"marginRight"]):void 0}),m.each({margin:"",padding:"",border:"Width"},function(a,b){m.cssHooks[a+b]={expand:function(c){for(var d=0,e={},f="string"==typeof c?c.split(" "):[c];4>d;d++)e[a+T[d]+b]=f[d]||f[d-2]||f[0];return e}},Gb.test(a)||(m.cssHooks[a+b].set=Wb)}),m.fn.extend({css:function(a,b){return V(this,function(a,b,c){var d,e,f={},g=0;if(m.isArray(b)){for(d=Ib(a),e=b.length;e>g;g++)f[b[g]]=m.css(a,b[g],!1,d);return f}return void 0!==c?m.style(a,b,c):m.css(a,b)},a,b,arguments.length>1)},show:function(){return Vb(this,!0)},hide:function(){return Vb(this)},toggle:function(a){return"boolean"==typeof a?a?this.show():this.hide():this.each(function(){U(this)?m(this).show():m(this).hide()})}});function Zb(a,b,c,d,e){return new Zb.prototype.init(a,b,c,d,e)}m.Tween=Zb,Zb.prototype={constructor:Zb,init:function(a,b,c,d,e,f){this.elem=a,this.prop=c,this.easing=e||"swing",this.options=b,this.start=this.now=this.cur(),this.end=d,this.unit=f||(m.cssNumber[c]?"":"px")
+},cur:function(){var a=Zb.propHooks[this.prop];return a&&a.get?a.get(this):Zb.propHooks._default.get(this)},run:function(a){var b,c=Zb.propHooks[this.prop];return this.pos=b=this.options.duration?m.easing[this.easing](a,this.options.duration*a,0,1,this.options.duration):a,this.now=(this.end-this.start)*b+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),c&&c.set?c.set(this):Zb.propHooks._default.set(this),this}},Zb.prototype.init.prototype=Zb.prototype,Zb.propHooks={_default:{get:function(a){var b;return null==a.elem[a.prop]||a.elem.style&&null!=a.elem.style[a.prop]?(b=m.css(a.elem,a.prop,""),b&&"auto"!==b?b:0):a.elem[a.prop]},set:function(a){m.fx.step[a.prop]?m.fx.step[a.prop](a):a.elem.style&&(null!=a.elem.style[m.cssProps[a.prop]]||m.cssHooks[a.prop])?m.style(a.elem,a.prop,a.now+a.unit):a.elem[a.prop]=a.now}}},Zb.propHooks.scrollTop=Zb.propHooks.scrollLeft={set:function(a){a.elem.nodeType&&a.elem.parentNode&&(a.elem[a.prop]=a.now)}},m.easing={linear:function(a){return a},swing:function(a){return.5-Math.cos(a*Math.PI)/2}},m.fx=Zb.prototype.init,m.fx.step={};var $b,_b,ac=/^(?:toggle|show|hide)$/,bc=new RegExp("^(?:([+-])=|)("+S+")([a-z%]*)$","i"),cc=/queueHooks$/,dc=[ic],ec={"*":[function(a,b){var c=this.createTween(a,b),d=c.cur(),e=bc.exec(b),f=e&&e[3]||(m.cssNumber[a]?"":"px"),g=(m.cssNumber[a]||"px"!==f&&+d)&&bc.exec(m.css(c.elem,a)),h=1,i=20;if(g&&g[3]!==f){f=f||g[3],e=e||[],g=+d||1;do h=h||".5",g/=h,m.style(c.elem,a,g+f);while(h!==(h=c.cur()/d)&&1!==h&&--i)}return e&&(g=c.start=+g||+d||0,c.unit=f,c.end=e[1]?g+(e[1]+1)*e[2]:+e[2]),c}]};function fc(){return setTimeout(function(){$b=void 0}),$b=m.now()}function gc(a,b){var c,d={height:a},e=0;for(b=b?1:0;4>e;e+=2-b)c=T[e],d["margin"+c]=d["padding"+c]=a;return b&&(d.opacity=d.width=a),d}function hc(a,b,c){for(var d,e=(ec[b]||[]).concat(ec["*"]),f=0,g=e.length;g>f;f++)if(d=e[f].call(c,b,a))return d}function ic(a,b,c){var d,e,f,g,h,i,j,l,n=this,o={},p=a.style,q=a.nodeType&&U(a),r=m._data(a,"fxshow");c.queue||(h=m._queueHooks(a,"fx"),null==h.unqueued&&(h.unqueued=0,i=h.empty.fire,h.empty.fire=function(){h.unqueued||i()}),h.unqueued++,n.always(function(){n.always(function(){h.unqueued--,m.queue(a,"fx").length||h.empty.fire()})})),1===a.nodeType&&("height"in b||"width"in b)&&(c.overflow=[p.overflow,p.overflowX,p.overflowY],j=m.css(a,"display"),l="none"===j?m._data(a,"olddisplay")||Fb(a.nodeName):j,"inline"===l&&"none"===m.css(a,"float")&&(k.inlineBlockNeedsLayout&&"inline"!==Fb(a.nodeName)?p.zoom=1:p.display="inline-block")),c.overflow&&(p.overflow="hidden",k.shrinkWrapBlocks()||n.always(function(){p.overflow=c.overflow[0],p.overflowX=c.overflow[1],p.overflowY=c.overflow[2]}));for(d in b)if(e=b[d],ac.exec(e)){if(delete b[d],f=f||"toggle"===e,e===(q?"hide":"show")){if("show"!==e||!r||void 0===r[d])continue;q=!0}o[d]=r&&r[d]||m.style(a,d)}else j=void 0;if(m.isEmptyObject(o))"inline"===("none"===j?Fb(a.nodeName):j)&&(p.display=j);else{r?"hidden"in r&&(q=r.hidden):r=m._data(a,"fxshow",{}),f&&(r.hidden=!q),q?m(a).show():n.done(function(){m(a).hide()}),n.done(function(){var b;m._removeData(a,"fxshow");for(b in o)m.style(a,b,o[b])});for(d in o)g=hc(q?r[d]:0,d,n),d in r||(r[d]=g.start,q&&(g.end=g.start,g.start="width"===d||"height"===d?1:0))}}function jc(a,b){var c,d,e,f,g;for(c in a)if(d=m.camelCase(c),e=b[d],f=a[c],m.isArray(f)&&(e=f[1],f=a[c]=f[0]),c!==d&&(a[d]=f,delete a[c]),g=m.cssHooks[d],g&&"expand"in g){f=g.expand(f),delete a[d];for(c in f)c in a||(a[c]=f[c],b[c]=e)}else b[d]=e}function kc(a,b,c){var d,e,f=0,g=dc.length,h=m.Deferred().always(function(){delete i.elem}),i=function(){if(e)return!1;for(var b=$b||fc(),c=Math.max(0,j.startTime+j.duration-b),d=c/j.duration||0,f=1-d,g=0,i=j.tweens.length;i>g;g++)j.tweens[g].run(f);return h.notifyWith(a,[j,f,c]),1>f&&i?c:(h.resolveWith(a,[j]),!1)},j=h.promise({elem:a,props:m.extend({},b),opts:m.extend(!0,{specialEasing:{}},c),originalProperties:b,originalOptions:c,startTime:$b||fc(),duration:c.duration,tweens:[],createTween:function(b,c){var d=m.Tween(a,j.opts,b,c,j.opts.specialEasing[b]||j.opts.easing);return j.tweens.push(d),d},stop:function(b){var c=0,d=b?j.tweens.length:0;if(e)return this;for(e=!0;d>c;c++)j.tweens[c].run(1);return b?h.resolveWith(a,[j,b]):h.rejectWith(a,[j,b]),this}}),k=j.props;for(jc(k,j.opts.specialEasing);g>f;f++)if(d=dc[f].call(j,a,k,j.opts))return d;return m.map(k,hc,j),m.isFunction(j.opts.start)&&j.opts.start.call(a,j),m.fx.timer(m.extend(i,{elem:a,anim:j,queue:j.opts.queue})),j.progress(j.opts.progress).done(j.opts.done,j.opts.complete).fail(j.opts.fail).always(j.opts.always)}m.Animation=m.extend(kc,{tweener:function(a,b){m.isFunction(a)?(b=a,a=["*"]):a=a.split(" ");for(var c,d=0,e=a.length;e>d;d++)c=a[d],ec[c]=ec[c]||[],ec[c].unshift(b)},prefilter:function(a,b){b?dc.unshift(a):dc.push(a)}}),m.speed=function(a,b,c){var d=a&&"object"==typeof a?m.extend({},a):{complete:c||!c&&b||m.isFunction(a)&&a,duration:a,easing:c&&b||b&&!m.isFunction(b)&&b};return d.duration=m.fx.off?0:"number"==typeof d.duration?d.duration:d.duration in m.fx.speeds?m.fx.speeds[d.duration]:m.fx.speeds._default,(null==d.queue||d.queue===!0)&&(d.queue="fx"),d.old=d.complete,d.complete=function(){m.isFunction(d.old)&&d.old.call(this),d.queue&&m.dequeue(this,d.queue)},d},m.fn.extend({fadeTo:function(a,b,c,d){return this.filter(U).css("opacity",0).show().end().animate({opacity:b},a,c,d)},animate:function(a,b,c,d){var e=m.isEmptyObject(a),f=m.speed(b,c,d),g=function(){var b=kc(this,m.extend({},a),f);(e||m._data(this,"finish"))&&b.stop(!0)};return g.finish=g,e||f.queue===!1?this.each(g):this.queue(f.queue,g)},stop:function(a,b,c){var d=function(a){var b=a.stop;delete a.stop,b(c)};return"string"!=typeof a&&(c=b,b=a,a=void 0),b&&a!==!1&&this.queue(a||"fx",[]),this.each(function(){var b=!0,e=null!=a&&a+"queueHooks",f=m.timers,g=m._data(this);if(e)g[e]&&g[e].stop&&d(g[e]);else for(e in g)g[e]&&g[e].stop&&cc.test(e)&&d(g[e]);for(e=f.length;e--;)f[e].elem!==this||null!=a&&f[e].queue!==a||(f[e].anim.stop(c),b=!1,f.splice(e,1));(b||!c)&&m.dequeue(this,a)})},finish:function(a){return a!==!1&&(a=a||"fx"),this.each(function(){var b,c=m._data(this),d=c[a+"queue"],e=c[a+"queueHooks"],f=m.timers,g=d?d.length:0;for(c.finish=!0,m.queue(this,a,[]),e&&e.stop&&e.stop.call(this,!0),b=f.length;b--;)f[b].elem===this&&f[b].queue===a&&(f[b].anim.stop(!0),f.splice(b,1));for(b=0;g>b;b++)d[b]&&d[b].finish&&d[b].finish.call(this);delete c.finish})}}),m.each(["toggle","show","hide"],function(a,b){var c=m.fn[b];m.fn[b]=function(a,d,e){return null==a||"boolean"==typeof a?c.apply(this,arguments):this.animate(gc(b,!0),a,d,e)}}),m.each({slideDown:gc("show"),slideUp:gc("hide"),slideToggle:gc("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(a,b){m.fn[a]=function(a,c,d){return this.animate(b,a,c,d)}}),m.timers=[],m.fx.tick=function(){var a,b=m.timers,c=0;for($b=m.now();c<b.length;c++)a=b[c],a()||b[c]!==a||b.splice(c--,1);b.length||m.fx.stop(),$b=void 0},m.fx.timer=function(a){m.timers.push(a),a()?m.fx.start():m.timers.pop()},m.fx.interval=13,m.fx.start=function(){_b||(_b=setInterval(m.fx.tick,m.fx.interval))},m.fx.stop=function(){clearInterval(_b),_b=null},m.fx.speeds={slow:600,fast:200,_default:400},m.fn.delay=function(a,b){return a=m.fx?m.fx.speeds[a]||a:a,b=b||"fx",this.queue(b,function(b,c){var d=setTimeout(b,a);c.stop=function(){clearTimeout(d)}})},function(){var a,b,c,d,e;b=y.createElement("div"),b.setAttribute("className","t"),b.innerHTML=" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>",d=b.getElementsByTagName("a")[0],c=y.createElement("select"),e=c.appendChild(y.createElement("option")),a=b.getElementsByTagName("input")[0],d.style.cssText="top:1px",k.getSetAttribute="t"!==b.className,k.style=/top/.test(d.getAttribute("style")),k.hrefNormalized="/a"===d.getAttribute("href"),k.checkOn=!!a.value,k.optSelected=e.selected,k.enctype=!!y.createElement("form").enctype,c.disabled=!0,k.optDisabled=!e.disabled,a=y.createElement("input"),a.setAttribute("value",""),k.input=""===a.getAttribute("value"),a.value="t",a.setAttribute("type","radio"),k.radioValue="t"===a.value}();var lc=/\r/g;m.fn.extend({val:function(a){var b,c,d,e=this[0];{if(arguments.length)return d=m.isFunction(a),this.each(function(c){var e;1===this.nodeType&&(e=d?a.call(this,c,m(this).val()):a,null==e?e="":"number"==typeof e?e+="":m.isArray(e)&&(e=m.map(e,function(a){return null==a?"":a+""})),b=m.valHooks[this.type]||m.valHooks[this.nodeName.toLowerCase()],b&&"set"in b&&void 0!==b.set(this,e,"value")||(this.value=e))});if(e)return b=m.valHooks[e.type]||m.valHooks[e.nodeName.toLowerCase()],b&&"get"in b&&void 0!==(c=b.get(e,"value"))?c:(c=e.value,"string"==typeof c?c.replace(lc,""):null==c?"":c)}}}),m.extend({valHooks:{option:{get:function(a){var b=m.find.attr(a,"value");return null!=b?b:m.trim(m.text(a))}},select:{get:function(a){for(var b,c,d=a.options,e=a.selectedIndex,f="select-one"===a.type||0>e,g=f?null:[],h=f?e+1:d.length,i=0>e?h:f?e:0;h>i;i++)if(c=d[i],!(!c.selected&&i!==e||(k.optDisabled?c.disabled:null!==c.getAttribute("disabled"))||c.parentNode.disabled&&m.nodeName(c.parentNode,"optgroup"))){if(b=m(c).val(),f)return b;g.push(b)}return g},set:function(a,b){var c,d,e=a.options,f=m.makeArray(b),g=e.length;while(g--)if(d=e[g],m.inArray(m.valHooks.option.get(d),f)>=0)try{d.selected=c=!0}catch(h){d.scrollHeight}else d.selected=!1;return c||(a.selectedIndex=-1),e}}}}),m.each(["radio","checkbox"],function(){m.valHooks[this]={set:function(a,b){return m.isArray(b)?a.checked=m.inArray(m(a).val(),b)>=0:void 0}},k.checkOn||(m.valHooks[this].get=function(a){return null===a.getAttribute("value")?"on":a.value})});var mc,nc,oc=m.expr.attrHandle,pc=/^(?:checked|selected)$/i,qc=k.getSetAttribute,rc=k.input;m.fn.extend({attr:function(a,b){return V(this,m.attr,a,b,arguments.length>1)},removeAttr:function(a){return this.each(function(){m.removeAttr(this,a)})}}),m.extend({attr:function(a,b,c){var d,e,f=a.nodeType;if(a&&3!==f&&8!==f&&2!==f)return typeof a.getAttribute===K?m.prop(a,b,c):(1===f&&m.isXMLDoc(a)||(b=b.toLowerCase(),d=m.attrHooks[b]||(m.expr.match.bool.test(b)?nc:mc)),void 0===c?d&&"get"in d&&null!==(e=d.get(a,b))?e:(e=m.find.attr(a,b),null==e?void 0:e):null!==c?d&&"set"in d&&void 0!==(e=d.set(a,c,b))?e:(a.setAttribute(b,c+""),c):void m.removeAttr(a,b))},removeAttr:function(a,b){var c,d,e=0,f=b&&b.match(E);if(f&&1===a.nodeType)while(c=f[e++])d=m.propFix[c]||c,m.expr.match.bool.test(c)?rc&&qc||!pc.test(c)?a[d]=!1:a[m.camelCase("default-"+c)]=a[d]=!1:m.attr(a,c,""),a.removeAttribute(qc?c:d)},attrHooks:{type:{set:function(a,b){if(!k.radioValue&&"radio"===b&&m.nodeName(a,"input")){var c=a.value;return a.setAttribute("type",b),c&&(a.value=c),b}}}}}),nc={set:function(a,b,c){return b===!1?m.removeAttr(a,c):rc&&qc||!pc.test(c)?a.setAttribute(!qc&&m.propFix[c]||c,c):a[m.camelCase("default-"+c)]=a[c]=!0,c}},m.each(m.expr.match.bool.source.match(/\w+/g),function(a,b){var c=oc[b]||m.find.attr;oc[b]=rc&&qc||!pc.test(b)?function(a,b,d){var e,f;return d||(f=oc[b],oc[b]=e,e=null!=c(a,b,d)?b.toLowerCase():null,oc[b]=f),e}:function(a,b,c){return c?void 0:a[m.camelCase("default-"+b)]?b.toLowerCase():null}}),rc&&qc||(m.attrHooks.value={set:function(a,b,c){return m.nodeName(a,"input")?void(a.defaultValue=b):mc&&mc.set(a,b,c)}}),qc||(mc={set:function(a,b,c){var d=a.getAttributeNode(c);return d||a.setAttributeNode(d=a.ownerDocument.createAttribute(c)),d.value=b+="","value"===c||b===a.getAttribute(c)?b:void 0}},oc.id=oc.name=oc.coords=function(a,b,c){var d;return c?void 0:(d=a.getAttributeNode(b))&&""!==d.value?d.value:null},m.valHooks.button={get:function(a,b){var c=a.getAttributeNode(b);return c&&c.specified?c.value:void 0},set:mc.set},m.attrHooks.contenteditable={set:function(a,b,c){mc.set(a,""===b?!1:b,c)}},m.each(["width","height"],function(a,b){m.attrHooks[b]={set:function(a,c){return""===c?(a.setAttribute(b,"auto"),c):void 0}}})),k.style||(m.attrHooks.style={get:function(a){return a.style.cssText||void 0},set:function(a,b){return a.style.cssText=b+""}});var sc=/^(?:input|select|textarea|button|object)$/i,tc=/^(?:a|area)$/i;m.fn.extend({prop:function(a,b){return V(this,m.prop,a,b,arguments.length>1)},removeProp:function(a){return a=m.propFix[a]||a,this.each(function(){try{this[a]=void 0,delete this[a]}catch(b){}})}}),m.extend({propFix:{"for":"htmlFor","class":"className"},prop:function(a,b,c){var d,e,f,g=a.nodeType;if(a&&3!==g&&8!==g&&2!==g)return f=1!==g||!m.isXMLDoc(a),f&&(b=m.propFix[b]||b,e=m.propHooks[b]),void 0!==c?e&&"set"in e&&void 0!==(d=e.set(a,c,b))?d:a[b]=c:e&&"get"in e&&null!==(d=e.get(a,b))?d:a[b]},propHooks:{tabIndex:{get:function(a){var b=m.find.attr(a,"tabindex");return b?parseInt(b,10):sc.test(a.nodeName)||tc.test(a.nodeName)&&a.href?0:-1}}}}),k.hrefNormalized||m.each(["href","src"],function(a,b){m.propHooks[b]={get:function(a){return a.getAttribute(b,4)}}}),k.optSelected||(m.propHooks.selected={get:function(a){var b=a.parentNode;return b&&(b.selectedIndex,b.parentNode&&b.parentNode.selectedIndex),null}}),m.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){m.propFix[this.toLowerCase()]=this}),k.enctype||(m.propFix.enctype="encoding");var uc=/[\t\r\n\f]/g;m.fn.extend({addClass:function(a){var b,c,d,e,f,g,h=0,i=this.length,j="string"==typeof a&&a;if(m.isFunction(a))return this.each(function(b){m(this).addClass(a.call(this,b,this.className))});if(j)for(b=(a||"").match(E)||[];i>h;h++)if(c=this[h],d=1===c.nodeType&&(c.className?(" "+c.className+" ").replace(uc," "):" ")){f=0;while(e=b[f++])d.indexOf(" "+e+" ")<0&&(d+=e+" ");g=m.trim(d),c.className!==g&&(c.className=g)}return this},removeClass:function(a){var b,c,d,e,f,g,h=0,i=this.length,j=0===arguments.length||"string"==typeof a&&a;if(m.isFunction(a))return this.each(function(b){m(this).removeClass(a.call(this,b,this.className))});if(j)for(b=(a||"").match(E)||[];i>h;h++)if(c=this[h],d=1===c.nodeType&&(c.className?(" "+c.className+" ").replace(uc," "):"")){f=0;while(e=b[f++])while(d.indexOf(" "+e+" ")>=0)d=d.replace(" "+e+" "," ");g=a?m.trim(d):"",c.className!==g&&(c.className=g)}return this},toggleClass:function(a,b){var c=typeof a;return"boolean"==typeof b&&"string"===c?b?this.addClass(a):this.removeClass(a):this.each(m.isFunction(a)?function(c){m(this).toggleClass(a.call(this,c,this.className,b),b)}:function(){if("string"===c){var b,d=0,e=m(this),f=a.match(E)||[];while(b=f[d++])e.hasClass(b)?e.removeClass(b):e.addClass(b)}else(c===K||"boolean"===c)&&(this.className&&m._data(this,"__className__",this.className),this.className=this.className||a===!1?"":m._data(this,"__className__")||"")})},hasClass:function(a){for(var b=" "+a+" ",c=0,d=this.length;d>c;c++)if(1===this[c].nodeType&&(" "+this[c].className+" ").replace(uc," ").indexOf(b)>=0)return!0;return!1}}),m.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(a,b){m.fn[b]=function(a,c){return arguments.length>0?this.on(b,null,a,c):this.trigger(b)}}),m.fn.extend({hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)},bind:function(a,b,c){return this.on(a,null,b,c)},unbind:function(a,b){return this.off(a,null,b)},delegate:function(a,b,c,d){return this.on(b,a,c,d)},undelegate:function(a,b,c){return 1===arguments.length?this.off(a,"**"):this.off(b,a||"**",c)}});var vc=m.now(),wc=/\?/,xc=/(,)|(\[|{)|(}|])|"(?:[^"\\\r\n]|\\["\\\/bfnrt]|\\u[\da-fA-F]{4})*"\s*:?|true|false|null|-?(?!0\d)\d+(?:\.\d+|)(?:[eE][+-]?\d+|)/g;m.parseJSON=function(b){if(a.JSON&&a.JSON.parse)return a.JSON.parse(b+"");var c,d=null,e=m.trim(b+"");return e&&!m.trim(e.replace(xc,function(a,b,e,f){return c&&b&&(d=0),0===d?a:(c=e||b,d+=!f-!e,"")}))?Function("return "+e)():m.error("Invalid JSON: "+b)},m.parseXML=function(b){var c,d;if(!b||"string"!=typeof b)return null;try{a.DOMParser?(d=new DOMParser,c=d.parseFromString(b,"text/xml")):(c=new ActiveXObject("Microsoft.XMLDOM"),c.async="false",c.loadXML(b))}catch(e){c=void 0}return c&&c.documentElement&&!c.getElementsByTagName("parsererror").length||m.error("Invalid XML: "+b),c};var yc,zc,Ac=/#.*$/,Bc=/([?&])_=[^&]*/,Cc=/^(.*?):[ \t]*([^\r\n]*)\r?$/gm,Dc=/^(?:about|app|app-storage|.+-extension|file|res|widget):$/,Ec=/^(?:GET|HEAD)$/,Fc=/^\/\//,Gc=/^([\w.+-]+:)(?:\/\/(?:[^\/?#]*@|)([^\/?#:]*)(?::(\d+)|)|)/,Hc={},Ic={},Jc="*/".concat("*");try{zc=location.href}catch(Kc){zc=y.createElement("a"),zc.href="",zc=zc.href}yc=Gc.exec(zc.toLowerCase())||[];function Lc(a){return function(b,c){"string"!=typeof b&&(c=b,b="*");var d,e=0,f=b.toLowerCase().match(E)||[];if(m.isFunction(c))while(d=f[e++])"+"===d.charAt(0)?(d=d.slice(1)||"*",(a[d]=a[d]||[]).unshift(c)):(a[d]=a[d]||[]).push(c)}}function Mc(a,b,c,d){var e={},f=a===Ic;function g(h){var i;return e[h]=!0,m.each(a[h]||[],function(a,h){var j=h(b,c,d);return"string"!=typeof j||f||e[j]?f?!(i=j):void 0:(b.dataTypes.unshift(j),g(j),!1)}),i}return g(b.dataTypes[0])||!e["*"]&&g("*")}function Nc(a,b){var c,d,e=m.ajaxSettings.flatOptions||{};for(d in b)void 0!==b[d]&&((e[d]?a:c||(c={}))[d]=b[d]);return c&&m.extend(!0,a,c),a}function Oc(a,b,c){var d,e,f,g,h=a.contents,i=a.dataTypes;while("*"===i[0])i.shift(),void 0===e&&(e=a.mimeType||b.getResponseHeader("Content-Type"));if(e)for(g in h)if(h[g]&&h[g].test(e)){i.unshift(g);break}if(i[0]in c)f=i[0];else{for(g in c){if(!i[0]||a.converters[g+" "+i[0]]){f=g;break}d||(d=g)}f=f||d}return f?(f!==i[0]&&i.unshift(f),c[f]):void 0}function Pc(a,b,c,d){var e,f,g,h,i,j={},k=a.dataTypes.slice();if(k[1])for(g in a.converters)j[g.toLowerCase()]=a.converters[g];f=k.shift();while(f)if(a.responseFields[f]&&(c[a.responseFields[f]]=b),!i&&d&&a.dataFilter&&(b=a.dataFilter(b,a.dataType)),i=f,f=k.shift())if("*"===f)f=i;else if("*"!==i&&i!==f){if(g=j[i+" "+f]||j["* "+f],!g)for(e in j)if(h=e.split(" "),h[1]===f&&(g=j[i+" "+h[0]]||j["* "+h[0]])){g===!0?g=j[e]:j[e]!==!0&&(f=h[0],k.unshift(h[1]));break}if(g!==!0)if(g&&a["throws"])b=g(b);else try{b=g(b)}catch(l){return{state:"parsererror",error:g?l:"No conversion from "+i+" to "+f}}}return{state:"success",data:b}}m.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:zc,type:"GET",isLocal:Dc.test(yc[1]),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":Jc,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":m.parseJSON,"text xml":m.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(a,b){return b?Nc(Nc(a,m.ajaxSettings),b):Nc(m.ajaxSettings,a)},ajaxPrefilter:Lc(Hc),ajaxTransport:Lc(Ic),ajax:function(a,b){"object"==typeof a&&(b=a,a=void 0),b=b||{};var c,d,e,f,g,h,i,j,k=m.ajaxSetup({},b),l=k.context||k,n=k.context&&(l.nodeType||l.jquery)?m(l):m.event,o=m.Deferred(),p=m.Callbacks("once memory"),q=k.statusCode||{},r={},s={},t=0,u="canceled",v={readyState:0,getResponseHeader:function(a){var b;if(2===t){if(!j){j={};while(b=Cc.exec(f))j[b[1].toLowerCase()]=b[2]}b=j[a.toLowerCase()]}return null==b?null:b},getAllResponseHeaders:function(){return 2===t?f:null},setRequestHeader:function(a,b){var c=a.toLowerCase();return t||(a=s[c]=s[c]||a,r[a]=b),this},overrideMimeType:function(a){return t||(k.mimeType=a),this},statusCode:function(a){var b;if(a)if(2>t)for(b in a)q[b]=[q[b],a[b]];else v.always(a[v.status]);return this},abort:function(a){var b=a||u;return i&&i.abort(b),x(0,b),this}};if(o.promise(v).complete=p.add,v.success=v.done,v.error=v.fail,k.url=((a||k.url||zc)+"").replace(Ac,"").replace(Fc,yc[1]+"//"),k.type=b.method||b.type||k.method||k.type,k.dataTypes=m.trim(k.dataType||"*").toLowerCase().match(E)||[""],null==k.crossDomain&&(c=Gc.exec(k.url.toLowerCase()),k.crossDomain=!(!c||c[1]===yc[1]&&c[2]===yc[2]&&(c[3]||("http:"===c[1]?"80":"443"))===(yc[3]||("http:"===yc[1]?"80":"443")))),k.data&&k.processData&&"string"!=typeof k.data&&(k.data=m.param(k.data,k.traditional)),Mc(Hc,k,b,v),2===t)return v;h=k.global,h&&0===m.active++&&m.event.trigger("ajaxStart"),k.type=k.type.toUpperCase(),k.hasContent=!Ec.test(k.type),e=k.url,k.hasContent||(k.data&&(e=k.url+=(wc.test(e)?"&":"?")+k.data,delete k.data),k.cache===!1&&(k.url=Bc.test(e)?e.replace(Bc,"$1_="+vc++):e+(wc.test(e)?"&":"?")+"_="+vc++)),k.ifModified&&(m.lastModified[e]&&v.setRequestHeader("If-Modified-Since",m.lastModified[e]),m.etag[e]&&v.setRequestHeader("If-None-Match",m.etag[e])),(k.data&&k.hasContent&&k.contentType!==!1||b.contentType)&&v.setRequestHeader("Content-Type",k.contentType),v.setRequestHeader("Accept",k.dataTypes[0]&&k.accepts[k.dataTypes[0]]?k.accepts[k.dataTypes[0]]+("*"!==k.dataTypes[0]?", "+Jc+"; q=0.01":""):k.accepts["*"]);for(d in k.headers)v.setRequestHeader(d,k.headers[d]);if(k.beforeSend&&(k.beforeSend.call(l,v,k)===!1||2===t))return v.abort();u="abort";for(d in{success:1,error:1,complete:1})v[d](k[d]);if(i=Mc(Ic,k,b,v)){v.readyState=1,h&&n.trigger("ajaxSend",[v,k]),k.async&&k.timeout>0&&(g=setTimeout(function(){v.abort("timeout")},k.timeout));try{t=1,i.send(r,x)}catch(w){if(!(2>t))throw w;x(-1,w)}}else x(-1,"No Transport");function x(a,b,c,d){var j,r,s,u,w,x=b;2!==t&&(t=2,g&&clearTimeout(g),i=void 0,f=d||"",v.readyState=a>0?4:0,j=a>=200&&300>a||304===a,c&&(u=Oc(k,v,c)),u=Pc(k,u,v,j),j?(k.ifModified&&(w=v.getResponseHeader("Last-Modified"),w&&(m.lastModified[e]=w),w=v.getResponseHeader("etag"),w&&(m.etag[e]=w)),204===a||"HEAD"===k.type?x="nocontent":304===a?x="notmodified":(x=u.state,r=u.data,s=u.error,j=!s)):(s=x,(a||!x)&&(x="error",0>a&&(a=0))),v.status=a,v.statusText=(b||x)+"",j?o.resolveWith(l,[r,x,v]):o.rejectWith(l,[v,x,s]),v.statusCode(q),q=void 0,h&&n.trigger(j?"ajaxSuccess":"ajaxError",[v,k,j?r:s]),p.fireWith(l,[v,x]),h&&(n.trigger("ajaxComplete",[v,k]),--m.active||m.event.trigger("ajaxStop")))}return v},getJSON:function(a,b,c){return m.get(a,b,c,"json")},getScript:function(a,b){return m.get(a,void 0,b,"script")}}),m.each(["get","post"],function(a,b){m[b]=function(a,c,d,e){return m.isFunction(c)&&(e=e||d,d=c,c=void 0),m.ajax({url:a,type:b,dataType:e,data:c,success:d})}}),m.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(a,b){m.fn[b]=function(a){return this.on(b,a)}}),m._evalUrl=function(a){return m.ajax({url:a,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0})},m.fn.extend({wrapAll:function(a){if(m.isFunction(a))return this.each(function(b){m(this).wrapAll(a.call(this,b))});if(this[0]){var b=m(a,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstChild&&1===a.firstChild.nodeType)a=a.firstChild;return a}).append(this)}return this},wrapInner:function(a){return this.each(m.isFunction(a)?function(b){m(this).wrapInner(a.call(this,b))}:function(){var b=m(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=m.isFunction(a);return this.each(function(c){m(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){m.nodeName(this,"body")||m(this).replaceWith(this.childNodes)}).end()}}),m.expr.filters.hidden=function(a){return a.offsetWidth<=0&&a.offsetHeight<=0||!k.reliableHiddenOffsets()&&"none"===(a.style&&a.style.display||m.css(a,"display"))},m.expr.filters.visible=function(a){return!m.expr.filters.hidden(a)};var Qc=/%20/g,Rc=/\[\]$/,Sc=/\r?\n/g,Tc=/^(?:submit|button|image|reset|file)$/i,Uc=/^(?:input|select|textarea|keygen)/i;function Vc(a,b,c,d){var e;if(m.isArray(b))m.each(b,function(b,e){c||Rc.test(a)?d(a,e):Vc(a+"["+("object"==typeof e?b:"")+"]",e,c,d)});else if(c||"object"!==m.type(b))d(a,b);else for(e in b)Vc(a+"["+e+"]",b[e],c,d)}m.param=function(a,b){var c,d=[],e=function(a,b){b=m.isFunction(b)?b():null==b?"":b,d[d.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};if(void 0===b&&(b=m.ajaxSettings&&m.ajaxSettings.traditional),m.isArray(a)||a.jquery&&!m.isPlainObject(a))m.each(a,function(){e(this.name,this.value)});else for(c in a)Vc(c,a[c],b,e);return d.join("&").replace(Qc,"+")},m.fn.extend({serialize:function(){return m.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var a=m.prop(this,"elements");return a?m.makeArray(a):this}).filter(function(){var a=this.type;return this.name&&!m(this).is(":disabled")&&Uc.test(this.nodeName)&&!Tc.test(a)&&(this.checked||!W.test(a))}).map(function(a,b){var c=m(this).val();return null==c?null:m.isArray(c)?m.map(c,function(a){return{name:b.name,value:a.replace(Sc,"\r\n")}}):{name:b.name,value:c.replace(Sc,"\r\n")}}).get()}}),m.ajaxSettings.xhr=void 0!==a.ActiveXObject?function(){return!this.isLocal&&/^(get|post|head|put|delete|options)$/i.test(this.type)&&Zc()||$c()}:Zc;var Wc=0,Xc={},Yc=m.ajaxSettings.xhr();a.ActiveXObject&&m(a).on("unload",function(){for(var a in Xc)Xc[a](void 0,!0)}),k.cors=!!Yc&&"withCredentials"in Yc,Yc=k.ajax=!!Yc,Yc&&m.ajaxTransport(function(a){if(!a.crossDomain||k.cors){var b;return{send:function(c,d){var e,f=a.xhr(),g=++Wc;if(f.open(a.type,a.url,a.async,a.username,a.password),a.xhrFields)for(e in a.xhrFields)f[e]=a.xhrFields[e];a.mimeType&&f.overrideMimeType&&f.overrideMimeType(a.mimeType),a.crossDomain||c["X-Requested-With"]||(c["X-Requested-With"]="XMLHttpRequest");for(e in c)void 0!==c[e]&&f.setRequestHeader(e,c[e]+"");f.send(a.hasContent&&a.data||null),b=function(c,e){var h,i,j;if(b&&(e||4===f.readyState))if(delete Xc[g],b=void 0,f.onreadystatechange=m.noop,e)4!==f.readyState&&f.abort();else{j={},h=f.status,"string"==typeof f.responseText&&(j.text=f.responseText);try{i=f.statusText}catch(k){i=""}h||!a.isLocal||a.crossDomain?1223===h&&(h=204):h=j.text?200:404}j&&d(h,i,j,f.getAllResponseHeaders())},a.async?4===f.readyState?setTimeout(b):f.onreadystatechange=Xc[g]=b:b()},abort:function(){b&&b(void 0,!0)}}}});function Zc(){try{return new a.XMLHttpRequest}catch(b){}}function $c(){try{return new a.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}}m.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/(?:java|ecma)script/},converters:{"text script":function(a){return m.globalEval(a),a}}}),m.ajaxPrefilter("script",function(a){void 0===a.cache&&(a.cache=!1),a.crossDomain&&(a.type="GET",a.global=!1)}),m.ajaxTransport("script",function(a){if(a.crossDomain){var b,c=y.head||m("head")[0]||y.documentElement;return{send:function(d,e){b=y.createElement("script"),b.async=!0,a.scriptCharset&&(b.charset=a.scriptCharset),b.src=a.url,b.onload=b.onreadystatechange=function(a,c){(c||!b.readyState||/loaded|complete/.test(b.readyState))&&(b.onload=b.onreadystatechange=null,b.parentNode&&b.parentNode.removeChild(b),b=null,c||e(200,"success"))},c.insertBefore(b,c.firstChild)},abort:function(){b&&b.onload(void 0,!0)}}}});var _c=[],ad=/(=)\?(?=&|$)|\?\?/;m.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var a=_c.pop()||m.expando+"_"+vc++;return this[a]=!0,a}}),m.ajaxPrefilter("json jsonp",function(b,c,d){var e,f,g,h=b.jsonp!==!1&&(ad.test(b.url)?"url":"string"==typeof b.data&&!(b.contentType||"").indexOf("application/x-www-form-urlencoded")&&ad.test(b.data)&&"data");return h||"jsonp"===b.dataTypes[0]?(e=b.jsonpCallback=m.isFunction(b.jsonpCallback)?b.jsonpCallback():b.jsonpCallback,h?b[h]=b[h].replace(ad,"$1"+e):b.jsonp!==!1&&(b.url+=(wc.test(b.url)?"&":"?")+b.jsonp+"="+e),b.converters["script json"]=function(){return g||m.error(e+" was not called"),g[0]},b.dataTypes[0]="json",f=a[e],a[e]=function(){g=arguments},d.always(function(){a[e]=f,b[e]&&(b.jsonpCallback=c.jsonpCallback,_c.push(e)),g&&m.isFunction(f)&&f(g[0]),g=f=void 0}),"script"):void 0}),m.parseHTML=function(a,b,c){if(!a||"string"!=typeof a)return null;"boolean"==typeof b&&(c=b,b=!1),b=b||y;var d=u.exec(a),e=!c&&[];return d?[b.createElement(d[1])]:(d=m.buildFragment([a],b,e),e&&e.length&&m(e).remove(),m.merge([],d.childNodes))};var bd=m.fn.load;m.fn.load=function(a,b,c){if("string"!=typeof a&&bd)return bd.apply(this,arguments);var d,e,f,g=this,h=a.indexOf(" ");return h>=0&&(d=m.trim(a.slice(h,a.length)),a=a.slice(0,h)),m.isFunction(b)?(c=b,b=void 0):b&&"object"==typeof b&&(f="POST"),g.length>0&&m.ajax({url:a,type:f,dataType:"html",data:b}).done(function(a){e=arguments,g.html(d?m("<div>").append(m.parseHTML(a)).find(d):a)}).complete(c&&function(a,b){g.each(c,e||[a.responseText,b,a])}),this},m.expr.filters.animated=function(a){return m.grep(m.timers,function(b){return a===b.elem}).length};var cd=a.document.documentElement;function dd(a){return m.isWindow(a)?a:9===a.nodeType?a.defaultView||a.parentWindow:!1}m.offset={setOffset:function(a,b,c){var d,e,f,g,h,i,j,k=m.css(a,"position"),l=m(a),n={};"static"===k&&(a.style.position="relative"),h=l.offset(),f=m.css(a,"top"),i=m.css(a,"left"),j=("absolute"===k||"fixed"===k)&&m.inArray("auto",[f,i])>-1,j?(d=l.position(),g=d.top,e=d.left):(g=parseFloat(f)||0,e=parseFloat(i)||0),m.isFunction(b)&&(b=b.call(a,c,h)),null!=b.top&&(n.top=b.top-h.top+g),null!=b.left&&(n.left=b.left-h.left+e),"using"in b?b.using.call(a,n):l.css(n)}},m.fn.extend({offset:function(a){if(arguments.length)return void 0===a?this:this.each(function(b){m.offset.setOffset(this,a,b)});var b,c,d={top:0,left:0},e=this[0],f=e&&e.ownerDocument;if(f)return b=f.documentElement,m.contains(b,e)?(typeof e.getBoundingClientRect!==K&&(d=e.getBoundingClientRect()),c=dd(f),{top:d.top+(c.pageYOffset||b.scrollTop)-(b.clientTop||0),left:d.left+(c.pageXOffset||b.scrollLeft)-(b.clientLeft||0)}):d},position:function(){if(this[0]){var a,b,c={top:0,left:0},d=this[0];return"fixed"===m.css(d,"position")?b=d.getBoundingClientRect():(a=this.offsetParent(),b=this.offset(),m.nodeName(a[0],"html")||(c=a.offset()),c.top+=m.css(a[0],"borderTopWidth",!0),c.left+=m.css(a[0],"borderLeftWidth",!0)),{top:b.top-c.top-m.css(d,"marginTop",!0),left:b.left-c.left-m.css(d,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var a=this.offsetParent||cd;while(a&&!m.nodeName(a,"html")&&"static"===m.css(a,"position"))a=a.offsetParent;return a||cd})}}),m.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(a,b){var c=/Y/.test(b);m.fn[a]=function(d){return V(this,function(a,d,e){var f=dd(a);return void 0===e?f?b in f?f[b]:f.document.documentElement[d]:a[d]:void(f?f.scrollTo(c?m(f).scrollLeft():e,c?e:m(f).scrollTop()):a[d]=e)},a,d,arguments.length,null)}}),m.each(["top","left"],function(a,b){m.cssHooks[b]=Lb(k.pixelPosition,function(a,c){return c?(c=Jb(a,b),Hb.test(c)?m(a).position()[b]+"px":c):void 0})}),m.each({Height:"height",Width:"width"},function(a,b){m.each({padding:"inner"+a,content:b,"":"outer"+a},function(c,d){m.fn[d]=function(d,e){var f=arguments.length&&(c||"boolean"!=typeof d),g=c||(d===!0||e===!0?"margin":"border");return V(this,function(b,c,d){var e;return m.isWindow(b)?b.document.documentElement["client"+a]:9===b.nodeType?(e=b.documentElement,Math.max(b.body["scroll"+a],e["scroll"+a],b.body["offset"+a],e["offset"+a],e["client"+a])):void 0===d?m.css(b,c,g):m.style(b,c,d,g)},b,f?d:void 0,f,null)}})}),m.fn.size=function(){return this.length},m.fn.andSelf=m.fn.addBack,"function"==typeof define&&define.amd&&define("jquery",[],function(){return m});var ed=a.jQuery,fd=a.$;return m.noConflict=function(b){return a.$===m&&(a.$=fd),b&&a.jQuery===m&&(a.jQuery=ed),m},typeof b===K&&(a.jQuery=a.$=m),m}); \ No newline at end of file
diff --git a/third_party/python/coverage/coverage/htmlfiles/jquery.tablesorter.min.js b/third_party/python/coverage/coverage/htmlfiles/jquery.tablesorter.min.js
new file mode 100644
index 0000000000..64c7007129
--- /dev/null
+++ b/third_party/python/coverage/coverage/htmlfiles/jquery.tablesorter.min.js
@@ -0,0 +1,2 @@
+
+(function($){$.extend({tablesorter:new function(){var parsers=[],widgets=[];this.defaults={cssHeader:"header",cssAsc:"headerSortUp",cssDesc:"headerSortDown",sortInitialOrder:"asc",sortMultiSortKey:"shiftKey",sortForce:null,sortAppend:null,textExtraction:"simple",parsers:{},widgets:[],widgetZebra:{css:["even","odd"]},headers:{},widthFixed:false,cancelSelection:true,sortList:[],headerList:[],dateFormat:"us",decimal:'.',debug:false};function benchmark(s,d){log(s+","+(new Date().getTime()-d.getTime())+"ms");}this.benchmark=benchmark;function log(s){if(typeof console!="undefined"&&typeof console.debug!="undefined"){console.log(s);}else{alert(s);}}function buildParserCache(table,$headers){if(table.config.debug){var parsersDebug="";}var rows=table.tBodies[0].rows;if(table.tBodies[0].rows[0]){var list=[],cells=rows[0].cells,l=cells.length;for(var i=0;i<l;i++){var p=false;if($.metadata&&($($headers[i]).metadata()&&$($headers[i]).metadata().sorter)){p=getParserById($($headers[i]).metadata().sorter);}else if((table.config.headers[i]&&table.config.headers[i].sorter)){p=getParserById(table.config.headers[i].sorter);}if(!p){p=detectParserForColumn(table,cells[i]);}if(table.config.debug){parsersDebug+="column:"+i+" parser:"+p.id+"\n";}list.push(p);}}if(table.config.debug){log(parsersDebug);}return list;};function detectParserForColumn(table,node){var l=parsers.length;for(var i=1;i<l;i++){if(parsers[i].is($.trim(getElementText(table.config,node)),table,node)){return parsers[i];}}return parsers[0];}function getParserById(name){var l=parsers.length;for(var i=0;i<l;i++){if(parsers[i].id.toLowerCase()==name.toLowerCase()){return parsers[i];}}return false;}function buildCache(table){if(table.config.debug){var cacheTime=new Date();}var totalRows=(table.tBodies[0]&&table.tBodies[0].rows.length)||0,totalCells=(table.tBodies[0].rows[0]&&table.tBodies[0].rows[0].cells.length)||0,parsers=table.config.parsers,cache={row:[],normalized:[]};for(var i=0;i<totalRows;++i){var c=table.tBodies[0].rows[i],cols=[];cache.row.push($(c));for(var j=0;j<totalCells;++j){cols.push(parsers[j].format(getElementText(table.config,c.cells[j]),table,c.cells[j]));}cols.push(i);cache.normalized.push(cols);cols=null;};if(table.config.debug){benchmark("Building cache for "+totalRows+" rows:",cacheTime);}return cache;};function getElementText(config,node){if(!node)return"";var t="";if(config.textExtraction=="simple"){if(node.childNodes[0]&&node.childNodes[0].hasChildNodes()){t=node.childNodes[0].innerHTML;}else{t=node.innerHTML;}}else{if(typeof(config.textExtraction)=="function"){t=config.textExtraction(node);}else{t=$(node).text();}}return t;}function appendToTable(table,cache){if(table.config.debug){var appendTime=new Date()}var c=cache,r=c.row,n=c.normalized,totalRows=n.length,checkCell=(n[0].length-1),tableBody=$(table.tBodies[0]),rows=[];for(var i=0;i<totalRows;i++){rows.push(r[n[i][checkCell]]);if(!table.config.appender){var o=r[n[i][checkCell]];var l=o.length;for(var j=0;j<l;j++){tableBody[0].appendChild(o[j]);}}}if(table.config.appender){table.config.appender(table,rows);}rows=null;if(table.config.debug){benchmark("Rebuilt table:",appendTime);}applyWidget(table);setTimeout(function(){$(table).trigger("sortEnd");},0);};function buildHeaders(table){if(table.config.debug){var time=new Date();}var meta=($.metadata)?true:false,tableHeadersRows=[];for(var i=0;i<table.tHead.rows.length;i++){tableHeadersRows[i]=0;};$tableHeaders=$("thead th",table);$tableHeaders.each(function(index){this.count=0;this.column=index;this.order=formatSortingOrder(table.config.sortInitialOrder);if(checkHeaderMetadata(this)||checkHeaderOptions(table,index))this.sortDisabled=true;if(!this.sortDisabled){$(this).addClass(table.config.cssHeader);}table.config.headerList[index]=this;});if(table.config.debug){benchmark("Built headers:",time);log($tableHeaders);}return $tableHeaders;};function checkCellColSpan(table,rows,row){var arr=[],r=table.tHead.rows,c=r[row].cells;for(var i=0;i<c.length;i++){var cell=c[i];if(cell.colSpan>1){arr=arr.concat(checkCellColSpan(table,headerArr,row++));}else{if(table.tHead.length==1||(cell.rowSpan>1||!r[row+1])){arr.push(cell);}}}return arr;};function checkHeaderMetadata(cell){if(($.metadata)&&($(cell).metadata().sorter===false)){return true;};return false;}function checkHeaderOptions(table,i){if((table.config.headers[i])&&(table.config.headers[i].sorter===false)){return true;};return false;}function applyWidget(table){var c=table.config.widgets;var l=c.length;for(var i=0;i<l;i++){getWidgetById(c[i]).format(table);}}function getWidgetById(name){var l=widgets.length;for(var i=0;i<l;i++){if(widgets[i].id.toLowerCase()==name.toLowerCase()){return widgets[i];}}};function formatSortingOrder(v){if(typeof(v)!="Number"){i=(v.toLowerCase()=="desc")?1:0;}else{i=(v==(0||1))?v:0;}return i;}function isValueInArray(v,a){var l=a.length;for(var i=0;i<l;i++){if(a[i][0]==v){return true;}}return false;}function setHeadersCss(table,$headers,list,css){$headers.removeClass(css[0]).removeClass(css[1]);var h=[];$headers.each(function(offset){if(!this.sortDisabled){h[this.column]=$(this);}});var l=list.length;for(var i=0;i<l;i++){h[list[i][0]].addClass(css[list[i][1]]);}}function fixColumnWidth(table,$headers){var c=table.config;if(c.widthFixed){var colgroup=$('<colgroup>');$("tr:first td",table.tBodies[0]).each(function(){colgroup.append($('<col>').css('width',$(this).width()));});$(table).prepend(colgroup);};}function updateHeaderSortCount(table,sortList){var c=table.config,l=sortList.length;for(var i=0;i<l;i++){var s=sortList[i],o=c.headerList[s[0]];o.count=s[1];o.count++;}}function multisort(table,sortList,cache){if(table.config.debug){var sortTime=new Date();}var dynamicExp="var sortWrapper = function(a,b) {",l=sortList.length;for(var i=0;i<l;i++){var c=sortList[i][0];var order=sortList[i][1];var s=(getCachedSortType(table.config.parsers,c)=="text")?((order==0)?"sortText":"sortTextDesc"):((order==0)?"sortNumeric":"sortNumericDesc");var e="e"+i;dynamicExp+="var "+e+" = "+s+"(a["+c+"],b["+c+"]); ";dynamicExp+="if("+e+") { return "+e+"; } ";dynamicExp+="else { ";}var orgOrderCol=cache.normalized[0].length-1;dynamicExp+="return a["+orgOrderCol+"]-b["+orgOrderCol+"];";for(var i=0;i<l;i++){dynamicExp+="}; ";}dynamicExp+="return 0; ";dynamicExp+="}; ";eval(dynamicExp);cache.normalized.sort(sortWrapper);if(table.config.debug){benchmark("Sorting on "+sortList.toString()+" and dir "+order+" time:",sortTime);}return cache;};function sortText(a,b){return((a<b)?-1:((a>b)?1:0));};function sortTextDesc(a,b){return((b<a)?-1:((b>a)?1:0));};function sortNumeric(a,b){return a-b;};function sortNumericDesc(a,b){return b-a;};function getCachedSortType(parsers,i){return parsers[i].type;};this.construct=function(settings){return this.each(function(){if(!this.tHead||!this.tBodies)return;var $this,$document,$headers,cache,config,shiftDown=0,sortOrder;this.config={};config=$.extend(this.config,$.tablesorter.defaults,settings);$this=$(this);$headers=buildHeaders(this);this.config.parsers=buildParserCache(this,$headers);cache=buildCache(this);var sortCSS=[config.cssDesc,config.cssAsc];fixColumnWidth(this);$headers.click(function(e){$this.trigger("sortStart");var totalRows=($this[0].tBodies[0]&&$this[0].tBodies[0].rows.length)||0;if(!this.sortDisabled&&totalRows>0){var $cell=$(this);var i=this.column;this.order=this.count++%2;if(!e[config.sortMultiSortKey]){config.sortList=[];if(config.sortForce!=null){var a=config.sortForce;for(var j=0;j<a.length;j++){if(a[j][0]!=i){config.sortList.push(a[j]);}}}config.sortList.push([i,this.order]);}else{if(isValueInArray(i,config.sortList)){for(var j=0;j<config.sortList.length;j++){var s=config.sortList[j],o=config.headerList[s[0]];if(s[0]==i){o.count=s[1];o.count++;s[1]=o.count%2;}}}else{config.sortList.push([i,this.order]);}};setTimeout(function(){setHeadersCss($this[0],$headers,config.sortList,sortCSS);appendToTable($this[0],multisort($this[0],config.sortList,cache));},1);return false;}}).mousedown(function(){if(config.cancelSelection){this.onselectstart=function(){return false};return false;}});$this.bind("update",function(){this.config.parsers=buildParserCache(this,$headers);cache=buildCache(this);}).bind("sorton",function(e,list){$(this).trigger("sortStart");config.sortList=list;var sortList=config.sortList;updateHeaderSortCount(this,sortList);setHeadersCss(this,$headers,sortList,sortCSS);appendToTable(this,multisort(this,sortList,cache));}).bind("appendCache",function(){appendToTable(this,cache);}).bind("applyWidgetId",function(e,id){getWidgetById(id).format(this);}).bind("applyWidgets",function(){applyWidget(this);});if($.metadata&&($(this).metadata()&&$(this).metadata().sortlist)){config.sortList=$(this).metadata().sortlist;}if(config.sortList.length>0){$this.trigger("sorton",[config.sortList]);}applyWidget(this);});};this.addParser=function(parser){var l=parsers.length,a=true;for(var i=0;i<l;i++){if(parsers[i].id.toLowerCase()==parser.id.toLowerCase()){a=false;}}if(a){parsers.push(parser);};};this.addWidget=function(widget){widgets.push(widget);};this.formatFloat=function(s){var i=parseFloat(s);return(isNaN(i))?0:i;};this.formatInt=function(s){var i=parseInt(s);return(isNaN(i))?0:i;};this.isDigit=function(s,config){var DECIMAL='\\'+config.decimal;var exp='/(^[+]?0('+DECIMAL+'0+)?$)|(^([-+]?[1-9][0-9]*)$)|(^([-+]?((0?|[1-9][0-9]*)'+DECIMAL+'(0*[1-9][0-9]*)))$)|(^[-+]?[1-9]+[0-9]*'+DECIMAL+'0+$)/';return RegExp(exp).test($.trim(s));};this.clearTableBody=function(table){if($.browser.msie){function empty(){while(this.firstChild)this.removeChild(this.firstChild);}empty.apply(table.tBodies[0]);}else{table.tBodies[0].innerHTML="";}};}});$.fn.extend({tablesorter:$.tablesorter.construct});var ts=$.tablesorter;ts.addParser({id:"text",is:function(s){return true;},format:function(s){return $.trim(s.toLowerCase());},type:"text"});ts.addParser({id:"digit",is:function(s,table){var c=table.config;return $.tablesorter.isDigit(s,c);},format:function(s){return $.tablesorter.formatFloat(s);},type:"numeric"});ts.addParser({id:"currency",is:function(s){return/^[£$€?.]/.test(s);},format:function(s){return $.tablesorter.formatFloat(s.replace(new RegExp(/[^0-9.]/g),""));},type:"numeric"});ts.addParser({id:"ipAddress",is:function(s){return/^\d{2,3}[\.]\d{2,3}[\.]\d{2,3}[\.]\d{2,3}$/.test(s);},format:function(s){var a=s.split("."),r="",l=a.length;for(var i=0;i<l;i++){var item=a[i];if(item.length==2){r+="0"+item;}else{r+=item;}}return $.tablesorter.formatFloat(r);},type:"numeric"});ts.addParser({id:"url",is:function(s){return/^(https?|ftp|file):\/\/$/.test(s);},format:function(s){return jQuery.trim(s.replace(new RegExp(/(https?|ftp|file):\/\//),''));},type:"text"});ts.addParser({id:"isoDate",is:function(s){return/^\d{4}[\/-]\d{1,2}[\/-]\d{1,2}$/.test(s);},format:function(s){return $.tablesorter.formatFloat((s!="")?new Date(s.replace(new RegExp(/-/g),"/")).getTime():"0");},type:"numeric"});ts.addParser({id:"percent",is:function(s){return/\%$/.test($.trim(s));},format:function(s){return $.tablesorter.formatFloat(s.replace(new RegExp(/%/g),""));},type:"numeric"});ts.addParser({id:"usLongDate",is:function(s){return s.match(new RegExp(/^[A-Za-z]{3,10}\.? [0-9]{1,2}, ([0-9]{4}|'?[0-9]{2}) (([0-2]?[0-9]:[0-5][0-9])|([0-1]?[0-9]:[0-5][0-9]\s(AM|PM)))$/));},format:function(s){return $.tablesorter.formatFloat(new Date(s).getTime());},type:"numeric"});ts.addParser({id:"shortDate",is:function(s){return/\d{1,2}[\/\-]\d{1,2}[\/\-]\d{2,4}/.test(s);},format:function(s,table){var c=table.config;s=s.replace(/\-/g,"/");if(c.dateFormat=="us"){s=s.replace(/(\d{1,2})[\/\-](\d{1,2})[\/\-](\d{4})/,"$3/$1/$2");}else if(c.dateFormat=="uk"){s=s.replace(/(\d{1,2})[\/\-](\d{1,2})[\/\-](\d{4})/,"$3/$2/$1");}else if(c.dateFormat=="dd/mm/yy"||c.dateFormat=="dd-mm-yy"){s=s.replace(/(\d{1,2})[\/\-](\d{1,2})[\/\-](\d{2})/,"$1/$2/$3");}return $.tablesorter.formatFloat(new Date(s).getTime());},type:"numeric"});ts.addParser({id:"time",is:function(s){return/^(([0-2]?[0-9]:[0-5][0-9])|([0-1]?[0-9]:[0-5][0-9]\s(am|pm)))$/.test(s);},format:function(s){return $.tablesorter.formatFloat(new Date("2000/01/01 "+s).getTime());},type:"numeric"});ts.addParser({id:"metadata",is:function(s){return false;},format:function(s,table,cell){var c=table.config,p=(!c.parserMetadataName)?'sortValue':c.parserMetadataName;return $(cell).metadata()[p];},type:"numeric"});ts.addWidget({id:"zebra",format:function(table){if(table.config.debug){var time=new Date();}$("tr:visible",table.tBodies[0]).filter(':even').removeClass(table.config.widgetZebra.css[1]).addClass(table.config.widgetZebra.css[0]).end().filter(':odd').removeClass(table.config.widgetZebra.css[0]).addClass(table.config.widgetZebra.css[1]);if(table.config.debug){$.tablesorter.benchmark("Applying Zebra widget",time);}}});})(jQuery); \ No newline at end of file
diff --git a/third_party/python/coverage/coverage/htmlfiles/keybd_closed.png b/third_party/python/coverage/coverage/htmlfiles/keybd_closed.png
new file mode 100644
index 0000000000..db114023f0
--- /dev/null
+++ b/third_party/python/coverage/coverage/htmlfiles/keybd_closed.png
Binary files differ
diff --git a/third_party/python/coverage/coverage/htmlfiles/keybd_open.png b/third_party/python/coverage/coverage/htmlfiles/keybd_open.png
new file mode 100644
index 0000000000..db114023f0
--- /dev/null
+++ b/third_party/python/coverage/coverage/htmlfiles/keybd_open.png
Binary files differ
diff --git a/third_party/python/coverage/coverage/htmlfiles/pyfile.html b/third_party/python/coverage/coverage/htmlfiles/pyfile.html
new file mode 100644
index 0000000000..eb0f99c812
--- /dev/null
+++ b/third_party/python/coverage/coverage/htmlfiles/pyfile.html
@@ -0,0 +1,112 @@
+{# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 #}
+{# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt #}
+
+<!DOCTYPE html>
+<html>
+<head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+ {# IE8 rounds line-height incorrectly, and adding this emulateIE7 line makes it right! #}
+ {# http://social.msdn.microsoft.com/Forums/en-US/iewebdevelopment/thread/7684445e-f080-4d8f-8529-132763348e21 #}
+ <meta http-equiv="X-UA-Compatible" content="IE=emulateIE7" />
+ <title>Coverage for {{relative_filename|escape}}: {{nums.pc_covered_str}}%</title>
+ <link rel="stylesheet" href="style.css" type="text/css">
+ {% if extra_css %}
+ <link rel="stylesheet" href="{{ extra_css }}" type="text/css">
+ {% endif %}
+ <script type="text/javascript" src="jquery.min.js"></script>
+ <script type="text/javascript" src="jquery.hotkeys.js"></script>
+ <script type="text/javascript" src="jquery.isonscreen.js"></script>
+ <script type="text/javascript" src="coverage_html.js"></script>
+ <script type="text/javascript">
+ jQuery(document).ready(coverage.pyfile_ready);
+ </script>
+</head>
+<body class="pyfile">
+
+<div id="header">
+ <div class="content">
+ <h1>Coverage for <b>{{relative_filename|escape}}</b> :
+ <span class="pc_cov">{{nums.pc_covered_str}}%</span>
+ </h1>
+
+ <img id="keyboard_icon" src="keybd_closed.png" alt="Show keyboard shortcuts" />
+
+ <h2 class="stats">
+ {{nums.n_statements}} statements &nbsp;
+ <span class="{{category.run}} shortkey_r button_toggle_run">{{nums.n_executed}} run</span>
+ <span class="{{category.mis}} shortkey_m button_toggle_mis">{{nums.n_missing}} missing</span>
+ <span class="{{category.exc}} shortkey_x button_toggle_exc">{{nums.n_excluded}} excluded</span>
+
+ {% if has_arcs %}
+ <span class="{{category.par}} shortkey_p button_toggle_par">{{nums.n_partial_branches}} partial</span>
+ {% endif %}
+ </h2>
+ </div>
+</div>
+
+<div class="help_panel">
+ <img id="panel_icon" src="keybd_open.png" alt="Hide keyboard shortcuts" />
+ <p class="legend">Hot-keys on this page</p>
+ <div>
+ <p class="keyhelp">
+ <span class="key">r</span>
+ <span class="key">m</span>
+ <span class="key">x</span>
+ <span class="key">p</span> &nbsp; toggle line displays
+ </p>
+ <p class="keyhelp">
+ <span class="key">j</span>
+ <span class="key">k</span> &nbsp; next/prev highlighted chunk
+ </p>
+ <p class="keyhelp">
+ <span class="key">0</span> &nbsp; (zero) top of page
+ </p>
+ <p class="keyhelp">
+ <span class="key">1</span> &nbsp; (one) first highlighted chunk
+ </p>
+ </div>
+</div>
+
+<div id="source">
+ {% for line in lines -%}
+ {% joined %}
+ <p id="t{{line.number}}" class="{{line.css_class}}">
+ <span class="n"><a href="#t{{line.number}}">{{line.number}}</a></span>
+ <span class="t">{{line.html}}&nbsp;</span>
+ {% if line.context_list %}
+ <input type="checkbox" id="ctxs{{line.number}}" />
+ {% endif %}
+ {# Things that should float right in the line. #}
+ <span class="r">
+ {% if line.annotate %}
+ <span class="annotate short">{{line.annotate}}</span>
+ <span class="annotate long">{{line.annotate_long}}</span>
+ {% endif %}
+ {% if line.contexts %}
+ <label for="ctxs{{line.number}}" class="ctx">{{ line.contexts_label }}</label>
+ {% endif %}
+ </span>
+ {# Things that should appear below the line. #}
+ {% if line.context_list %}
+ <span class="ctxs">
+ {% for context in line.context_list %}
+ <span>{{context}}</span>
+ {% endfor %}
+ </span>
+ {% endif %}
+ </p>
+ {% endjoined %}
+ {% endfor %}
+</div>
+
+<div id="footer">
+ <div class="content">
+ <p>
+ <a class="nav" href="index.html">&#xab; index</a> &nbsp; &nbsp; <a class="nav" href="{{__url__}}">coverage.py v{{__version__}}</a>,
+ created at {{ time_stamp }}
+ </p>
+ </div>
+</div>
+
+</body>
+</html>
diff --git a/third_party/python/coverage/coverage/htmlfiles/style.css b/third_party/python/coverage/coverage/htmlfiles/style.css
new file mode 100644
index 0000000000..e8ff57657f
--- /dev/null
+++ b/third_party/python/coverage/coverage/htmlfiles/style.css
@@ -0,0 +1,124 @@
+@charset "UTF-8";
+/* Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 */
+/* For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt */
+/* Don't edit this .css file. Edit the .scss file instead! */
+html, body, h1, h2, h3, p, table, td, th { margin: 0; padding: 0; border: 0; font-weight: inherit; font-style: inherit; font-size: 100%; font-family: inherit; vertical-align: baseline; }
+
+body { font-family: georgia, serif; font-size: 1em; }
+
+html > body { font-size: 16px; }
+
+p { font-size: .75em; line-height: 1.33333333em; }
+
+table { border-collapse: collapse; }
+
+td { vertical-align: top; }
+
+table tr.hidden { display: none !important; }
+
+p#no_rows { display: none; font-size: 1.2em; }
+
+a.nav { text-decoration: none; color: inherit; }
+a.nav:hover { text-decoration: underline; color: inherit; }
+
+#header { background: #f8f8f8; width: 100%; border-bottom: 1px solid #eee; }
+
+.indexfile #footer { margin: 1em 3em; }
+
+.pyfile #footer { margin: 1em 1em; }
+
+#footer .content { padding: 0; font-size: 85%; font-family: verdana, sans-serif; color: #666666; font-style: italic; }
+
+#index { margin: 1em 0 0 3em; }
+
+#header .content { padding: 1em 3rem; }
+
+h1 { font-size: 1.25em; display: inline-block; }
+
+#filter_container { display: inline-block; float: right; margin: 0 2em 0 0; }
+#filter_container input { width: 10em; }
+
+h2.stats { margin-top: .5em; font-size: 1em; }
+
+.stats span { border: 1px solid; border-radius: .1em; padding: .1em .5em; margin: 0 .1em; cursor: pointer; border-color: #ccc #999 #999 #ccc; }
+.stats span.run { background: #eeffee; }
+.stats span.run.show_run { border-color: #999 #ccc #ccc #999; background: #ddffdd; }
+.stats span.mis { background: #ffeeee; }
+.stats span.mis.show_mis { border-color: #999 #ccc #ccc #999; background: #ffdddd; }
+.stats span.exc { background: #f7f7f7; }
+.stats span.exc.show_exc { border-color: #999 #ccc #ccc #999; background: #eeeeee; }
+.stats span.par { background: #ffffd5; }
+.stats span.par.show_par { border-color: #999 #ccc #ccc #999; background: #ffffaa; }
+
+#source p .annotate.long, .help_panel { display: none; position: absolute; z-index: 999; background: #ffffcc; border: 1px solid #888; border-radius: .2em; box-shadow: #cccccc .2em .2em .2em; color: #333; padding: .25em .5em; }
+
+#source p .annotate.long { white-space: normal; float: right; top: 1.75em; right: 1em; height: auto; }
+
+#keyboard_icon { float: right; margin: 5px; cursor: pointer; }
+
+.help_panel { padding: .5em; border: 1px solid #883; }
+.help_panel .legend { font-style: italic; margin-bottom: 1em; }
+.indexfile .help_panel { width: 20em; height: 4em; }
+.pyfile .help_panel { width: 16em; height: 8em; }
+
+#panel_icon { float: right; cursor: pointer; }
+
+.keyhelp { margin: .75em; }
+.keyhelp .key { border: 1px solid black; border-color: #888 #333 #333 #888; padding: .1em .35em; font-family: monospace; font-weight: bold; background: #eee; }
+
+#source { padding: 1em 0 1em 3rem; font-family: Consolas, "Liberation Mono", Menlo, Courier, monospace; }
+#source p { position: relative; white-space: pre; }
+#source p * { box-sizing: border-box; }
+#source p .n { float: left; text-align: right; width: 3rem; box-sizing: border-box; margin-left: -3rem; padding-right: 1em; color: #999999; font-family: verdana, sans-serif; }
+#source p .n a { text-decoration: none; color: #999999; font-size: .8333em; line-height: 1em; }
+#source p .n a:hover { text-decoration: underline; color: #999999; }
+#source p.highlight .n { background: #ffdd00; }
+#source p .t { display: inline-block; width: 100%; box-sizing: border-box; margin-left: -.5em; padding-left: 0.3em; border-left: 0.2em solid white; }
+#source p .t:hover { background: #f2f2f2; }
+#source p .t:hover ~ .r .annotate.long { display: block; }
+#source p .t .com { color: green; font-style: italic; line-height: 1px; }
+#source p .t .key { font-weight: bold; line-height: 1px; }
+#source p .t .str { color: #000080; }
+#source p.mis .t { border-left: 0.2em solid #ff0000; }
+#source p.mis.show_mis .t { background: #ffdddd; }
+#source p.mis.show_mis .t:hover { background: #f2d2d2; }
+#source p.run .t { border-left: 0.2em solid #00ff00; }
+#source p.run.show_run .t { background: #ddffdd; }
+#source p.run.show_run .t:hover { background: #d2f2d2; }
+#source p.exc .t { border-left: 0.2em solid #808080; }
+#source p.exc.show_exc .t { background: #eeeeee; }
+#source p.exc.show_exc .t:hover { background: #e2e2e2; }
+#source p.par .t { border-left: 0.2em solid #eeee99; }
+#source p.par.show_par .t { background: #ffffaa; }
+#source p.par.show_par .t:hover { background: #f2f2a2; }
+#source p .r { position: absolute; top: 0; right: 2.5em; font-family: verdana, sans-serif; }
+#source p .annotate { font-family: georgia; color: #666; padding-right: .5em; }
+#source p .annotate.short:hover ~ .long { display: block; }
+#source p .annotate.long { width: 30em; right: 2.5em; }
+#source p input { display: none; }
+#source p input ~ .r label.ctx { cursor: pointer; border-radius: .25em; }
+#source p input ~ .r label.ctx::before { content: "▶ "; }
+#source p input ~ .r label.ctx:hover { background: #d5f7ff; color: #666; }
+#source p input:checked ~ .r label.ctx { background: #aaeeff; color: #666; border-radius: .75em .75em 0 0; padding: 0 .5em; margin: -.25em 0; }
+#source p input:checked ~ .r label.ctx::before { content: "▼ "; }
+#source p input:checked ~ .ctxs { padding: .25em .5em; overflow-y: scroll; max-height: 10.5em; }
+#source p label.ctx { color: #999; display: inline-block; padding: 0 .5em; font-size: .8333em; }
+#source p .ctxs { display: block; max-height: 0; overflow-y: hidden; transition: all .2s; padding: 0 .5em; font-family: verdana, sans-serif; white-space: nowrap; background: #aaeeff; border-radius: .25em; margin-right: 1.75em; }
+#source p .ctxs span { display: block; text-align: right; }
+
+#index td, #index th { text-align: right; width: 5em; padding: .25em .5em; border-bottom: 1px solid #eee; }
+#index td.left, #index th.left { padding-left: 0; }
+#index td.right, #index th.right { padding-right: 0; }
+#index td.name, #index th.name { text-align: left; width: auto; }
+#index th { font-style: italic; color: #333; border-bottom: 1px solid #ccc; cursor: pointer; }
+#index th:hover { background: #eee; border-bottom: 1px solid #999; }
+#index th.headerSortDown, #index th.headerSortUp { border-bottom: 1px solid #000; white-space: nowrap; background: #eee; }
+#index th.headerSortDown:after { content: " ↓"; }
+#index th.headerSortUp:after { content: " ↑"; }
+#index td.name a { text-decoration: none; color: #000; }
+#index tr.total td, #index tr.total_dynamic td { font-weight: bold; border-top: 1px solid #ccc; border-bottom: none; }
+#index tr.file:hover { background: #eeeeee; }
+#index tr.file:hover td.name { text-decoration: underline; color: #000; }
+
+#scroll_marker { position: fixed; right: 0; top: 0; width: 16px; height: 100%; background: white; border-left: 1px solid #eee; will-change: transform; }
+#scroll_marker .marker { background: #ddd; position: absolute; min-height: 3px; width: 100%; }
diff --git a/third_party/python/coverage/coverage/htmlfiles/style.scss b/third_party/python/coverage/coverage/htmlfiles/style.scss
new file mode 100644
index 0000000000..901cccc4ed
--- /dev/null
+++ b/third_party/python/coverage/coverage/htmlfiles/style.scss
@@ -0,0 +1,537 @@
+/* Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 */
+/* For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt */
+
+// CSS styles for coverage.py HTML reports.
+
+// When you edit this file, you need to run "make css" to get the CSS file
+// generated, and then check in both the .scss and the .css files.
+
+// When working on the file, this command is useful:
+// sass --watch --style=compact --sourcemap=none --no-cache coverage/htmlfiles/style.scss:htmlcov/style.css
+
+// Ignore this comment, it's for the CSS output file:
+/* Don't edit this .css file. Edit the .scss file instead! */
+
+// Dimensions
+$left-gutter: 3rem;
+
+// Page-wide styles
+html, body, h1, h2, h3, p, table, td, th {
+ margin: 0;
+ padding: 0;
+ border: 0;
+ font-weight: inherit;
+ font-style: inherit;
+ font-size: 100%;
+ font-family: inherit;
+ vertical-align: baseline;
+}
+
+// Set baseline grid to 16 pt.
+body {
+ font-family: georgia, serif;
+ font-size: 1em;
+}
+
+html>body {
+ font-size: 16px;
+}
+
+// Set base font size to 12/16
+p {
+ font-size: .75em; // 12/16
+ line-height: 1.33333333em; // 16/12
+}
+
+table {
+ border-collapse: collapse;
+}
+td {
+ vertical-align: top;
+}
+table tr.hidden {
+ display: none !important;
+}
+
+p#no_rows {
+ display: none;
+ font-size: 1.2em;
+}
+
+a.nav {
+ text-decoration: none;
+ color: inherit;
+
+ &:hover {
+ text-decoration: underline;
+ color: inherit;
+ }
+}
+
+// Page structure
+#header {
+ background: #f8f8f8;
+ width: 100%;
+ border-bottom: 1px solid #eee;
+}
+
+.indexfile #footer {
+ margin: 1em 3em;
+}
+
+.pyfile #footer {
+ margin: 1em 1em;
+}
+
+#footer .content {
+ padding: 0;
+ font-size: 85%;
+ font-family: verdana, sans-serif;
+ color: #666666;
+ font-style: italic;
+}
+
+#index {
+ margin: 1em 0 0 3em;
+}
+
+// Header styles
+#header .content {
+ padding: 1em $left-gutter;
+}
+
+h1 {
+ font-size: 1.25em;
+ display: inline-block;
+}
+
+#filter_container {
+ display: inline-block;
+ float: right;
+ margin: 0 2em 0 0;
+
+ input {
+ width: 10em;
+ }
+}
+
+$pln-color: #ffffff;
+$mis-color: #ffdddd;
+$run-color: #ddffdd;
+$exc-color: #eeeeee;
+$par-color: #ffffaa;
+
+$off-button-lighten: 50%;
+
+h2.stats {
+ margin-top: .5em;
+ font-size: 1em;
+}
+.stats span {
+ border: 1px solid;
+ border-radius: .1em;
+ padding: .1em .5em;
+ margin: 0 .1em;
+ cursor: pointer;
+ border-color: #ccc #999 #999 #ccc;
+
+ &.run {
+ background: mix($run-color, #fff, $off-button-lighten);
+ &.show_run {
+ border-color: #999 #ccc #ccc #999;
+ background: $run-color;
+ }
+ }
+ &.mis {
+ background: mix($mis-color, #fff, $off-button-lighten);
+ &.show_mis {
+ border-color: #999 #ccc #ccc #999;
+ background: $mis-color;
+ }
+ }
+ &.exc {
+ background: mix($exc-color, #fff, $off-button-lighten);
+ &.show_exc {
+ border-color: #999 #ccc #ccc #999;
+ background: $exc-color;
+ }
+ }
+ &.par {
+ background: mix($par-color, #fff, $off-button-lighten);
+ &.show_par {
+ border-color: #999 #ccc #ccc #999;
+ background: $par-color;
+ }
+ }
+}
+
+// Yellow post-it things.
+%popup {
+ display: none;
+ position: absolute;
+ z-index: 999;
+ background: #ffffcc;
+ border: 1px solid #888;
+ border-radius: .2em;
+ box-shadow: #cccccc .2em .2em .2em;
+ color: #333;
+ padding: .25em .5em;
+}
+
+// Yellow post-it's in the text listings.
+%in-text-popup {
+ @extend %popup;
+ white-space: normal;
+ float: right;
+ top: 1.75em;
+ right: 1em;
+ height: auto;
+}
+
+// Help panel
+#keyboard_icon {
+ float: right;
+ margin: 5px;
+ cursor: pointer;
+}
+
+.help_panel {
+ @extend %popup;
+ padding: .5em;
+ border: 1px solid #883;
+
+ .legend {
+ font-style: italic;
+ margin-bottom: 1em;
+ }
+
+ .indexfile & {
+ width: 20em;
+ height: 4em;
+ }
+
+ .pyfile & {
+ width: 16em;
+ height: 8em;
+ }
+}
+
+#panel_icon {
+ float: right;
+ cursor: pointer;
+}
+
+.keyhelp {
+ margin: .75em;
+
+ .key {
+ border: 1px solid black;
+ border-color: #888 #333 #333 #888;
+ padding: .1em .35em;
+ font-family: monospace;
+ font-weight: bold;
+ background: #eee;
+ }
+}
+
+// Source file styles
+
+$hover-dark-amt: 95%;
+$pln-hover-color: mix($pln-color, #000, $hover-dark-amt);
+$mis-hover-color: mix($mis-color, #000, $hover-dark-amt);
+$run-hover-color: mix($run-color, #000, $hover-dark-amt);
+$exc-hover-color: mix($exc-color, #000, $hover-dark-amt);
+$par-hover-color: mix($par-color, #000, $hover-dark-amt);
+
+// The slim bar at the left edge of the source lines, colored by coverage.
+$border-indicator-width: .2em;
+
+$context-panel-color: #aaeeff;
+
+#source {
+ padding: 1em 0 1em $left-gutter;
+ font-family: Consolas, "Liberation Mono", Menlo, Courier, monospace;
+
+ p {
+ // position relative makes position:absolute pop-ups appear in the right place.
+ position: relative;
+ white-space: pre;
+
+ * {
+ box-sizing: border-box;
+ }
+
+ .n {
+ float: left;
+ text-align: right;
+ width: $left-gutter;
+ box-sizing: border-box;
+ margin-left: -$left-gutter;
+ padding-right: 1em;
+ color: #999999;
+ font-family: verdana, sans-serif;
+
+ a {
+ text-decoration: none;
+ color: #999999;
+ font-size: .8333em; // 10/12
+ line-height: 1em;
+ &:hover {
+ text-decoration: underline;
+ color: #999999;
+ }
+ }
+ }
+
+ &.highlight .n {
+ background: #ffdd00;
+ }
+
+ .t {
+ display: inline-block;
+ width: 100%;
+ box-sizing: border-box;
+ margin-left: -.5em;
+ padding-left: .5em - $border-indicator-width;
+ border-left: $border-indicator-width solid white;
+
+ &:hover {
+ background: $pln-hover-color;
+
+ & ~ .r .annotate.long {
+ display: block;
+ }
+ }
+
+ // Syntax coloring
+ .com {
+ color: green;
+ font-style: italic;
+ line-height: 1px;
+ }
+ .key {
+ font-weight: bold;
+ line-height: 1px;
+ }
+ .str {
+ color: #000080;
+ }
+ }
+
+ &.mis {
+ .t {
+ border-left: $border-indicator-width solid #ff0000;
+ }
+
+ &.show_mis .t {
+ background: $mis-color;
+
+ &:hover {
+ background: $mis-hover-color;
+ }
+ }
+ }
+
+ &.run {
+ .t {
+ border-left: $border-indicator-width solid #00ff00;
+ }
+
+ &.show_run .t {
+ background: $run-color;
+
+ &:hover {
+ background: $run-hover-color;
+ }
+ }
+ }
+
+ &.exc {
+ .t {
+ border-left: $border-indicator-width solid #808080;
+ }
+
+ &.show_exc .t {
+ background: $exc-color;
+
+ &:hover {
+ background: $exc-hover-color;
+ }
+ }
+ }
+
+ &.par {
+ .t {
+ border-left: $border-indicator-width solid #eeee99;
+ }
+
+ &.show_par .t {
+ background: $par-color;
+
+ &:hover {
+ background: $par-hover-color;
+ }
+ }
+
+ }
+
+ .r {
+ position: absolute;
+ top: 0;
+ right: 2.5em;
+ font-family: verdana, sans-serif;
+ }
+
+ .annotate {
+ font-family: georgia;
+ color: #666;
+ padding-right: .5em;
+
+ &.short:hover ~ .long {
+ display: block;
+ }
+
+ &.long {
+ @extend %in-text-popup;
+ width: 30em;
+ right: 2.5em;
+ }
+ }
+
+ input {
+ display: none;
+
+ & ~ .r label.ctx {
+ cursor: pointer;
+ border-radius: .25em;
+ &::before {
+ content: "▶ ";
+ }
+ &:hover {
+ background: mix($context-panel-color, #fff, 50%);
+ color: #666;
+ }
+ }
+
+ &:checked ~ .r label.ctx {
+ background: $context-panel-color;
+ color: #666;
+ border-radius: .75em .75em 0 0;
+ padding: 0 .5em;
+ margin: -.25em 0;
+ &::before {
+ content: "▼ ";
+ }
+ }
+
+ &:checked ~ .ctxs {
+ padding: .25em .5em;
+ overflow-y: scroll;
+ max-height: 10.5em;
+ }
+ }
+
+ label.ctx {
+ color: #999;
+ display: inline-block;
+ padding: 0 .5em;
+ font-size: .8333em; // 10/12
+ }
+
+ .ctxs {
+ display: block;
+ max-height: 0;
+ overflow-y: hidden;
+ transition: all .2s;
+ padding: 0 .5em;
+ font-family: verdana, sans-serif;
+ white-space: nowrap;
+ background: $context-panel-color;
+ border-radius: .25em;
+ margin-right: 1.75em;
+ span {
+ display: block;
+ text-align: right;
+ }
+ }
+ }
+}
+
+
+// index styles
+#index {
+ td, th {
+ text-align: right;
+ width: 5em;
+ padding: .25em .5em;
+ border-bottom: 1px solid #eee;
+ &.left {
+ padding-left: 0;
+ }
+ &.right {
+ padding-right: 0;
+ }
+ &.name {
+ text-align: left;
+ width: auto;
+ }
+ }
+ th {
+ font-style: italic;
+ color: #333;
+ border-bottom: 1px solid #ccc;
+ cursor: pointer;
+ &:hover {
+ background: #eee;
+ border-bottom: 1px solid #999;
+ }
+ &.headerSortDown, &.headerSortUp {
+ border-bottom: 1px solid #000;
+ white-space: nowrap;
+ background: #eee;
+ }
+ &.headerSortDown:after {
+ content: " ↓";
+ }
+ &.headerSortUp:after {
+ content: " ↑";
+ }
+ }
+ td.name a {
+ text-decoration: none;
+ color: #000;
+ }
+
+ tr.total td,
+ tr.total_dynamic td {
+ font-weight: bold;
+ border-top: 1px solid #ccc;
+ border-bottom: none;
+ }
+ tr.file:hover {
+ background: #eeeeee;
+ td.name {
+ text-decoration: underline;
+ color: #000;
+ }
+ }
+}
+
+// scroll marker styles
+#scroll_marker {
+ position: fixed;
+ right: 0;
+ top: 0;
+ width: 16px;
+ height: 100%;
+ background: white;
+ border-left: 1px solid #eee;
+ will-change: transform; // for faster scrolling of fixed element in Chrome
+
+ .marker {
+ background: #ddd;
+ position: absolute;
+ min-height: 3px;
+ width: 100%;
+ }
+}
diff --git a/third_party/python/coverage/coverage/inorout.py b/third_party/python/coverage/coverage/inorout.py
new file mode 100644
index 0000000000..d5e8b22692
--- /dev/null
+++ b/third_party/python/coverage/coverage/inorout.py
@@ -0,0 +1,469 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Determining whether files are being measured/reported or not."""
+
+# For finding the stdlib
+import atexit
+import inspect
+import itertools
+import os
+import platform
+import re
+import sys
+import traceback
+
+from coverage import env
+from coverage.backward import code_object
+from coverage.disposition import FileDisposition, disposition_init
+from coverage.files import TreeMatcher, FnmatchMatcher, ModuleMatcher
+from coverage.files import prep_patterns, find_python_files, canonical_filename
+from coverage.misc import CoverageException
+from coverage.python import source_for_file, source_for_morf
+
+
+# Pypy has some unusual stuff in the "stdlib". Consider those locations
+# when deciding where the stdlib is. These modules are not used for anything,
+# they are modules importable from the pypy lib directories, so that we can
+# find those directories.
+_structseq = _pypy_irc_topic = None
+if env.PYPY:
+ try:
+ import _structseq
+ except ImportError:
+ pass
+
+ try:
+ import _pypy_irc_topic
+ except ImportError:
+ pass
+
+
+def canonical_path(morf, directory=False):
+ """Return the canonical path of the module or file `morf`.
+
+ If the module is a package, then return its directory. If it is a
+ module, then return its file, unless `directory` is True, in which
+ case return its enclosing directory.
+
+ """
+ morf_path = canonical_filename(source_for_morf(morf))
+ if morf_path.endswith("__init__.py") or directory:
+ morf_path = os.path.split(morf_path)[0]
+ return morf_path
+
+
+def name_for_module(filename, frame):
+ """Get the name of the module for a filename and frame.
+
+ For configurability's sake, we allow __main__ modules to be matched by
+ their importable name.
+
+ If loaded via runpy (aka -m), we can usually recover the "original"
+ full dotted module name, otherwise, we resort to interpreting the
+ file name to get the module's name. In the case that the module name
+ can't be determined, None is returned.
+
+ """
+ module_globals = frame.f_globals if frame is not None else {}
+ if module_globals is None: # pragma: only ironpython
+ # IronPython doesn't provide globals: https://github.com/IronLanguages/main/issues/1296
+ module_globals = {}
+
+ dunder_name = module_globals.get('__name__', None)
+
+ if isinstance(dunder_name, str) and dunder_name != '__main__':
+ # This is the usual case: an imported module.
+ return dunder_name
+
+ loader = module_globals.get('__loader__', None)
+ for attrname in ('fullname', 'name'): # attribute renamed in py3.2
+ if hasattr(loader, attrname):
+ fullname = getattr(loader, attrname)
+ else:
+ continue
+
+ if isinstance(fullname, str) and fullname != '__main__':
+ # Module loaded via: runpy -m
+ return fullname
+
+ # Script as first argument to Python command line.
+ inspectedname = inspect.getmodulename(filename)
+ if inspectedname is not None:
+ return inspectedname
+ else:
+ return dunder_name
+
+
+def module_is_namespace(mod):
+ """Is the module object `mod` a PEP420 namespace module?"""
+ return hasattr(mod, '__path__') and getattr(mod, '__file__', None) is None
+
+
+def module_has_file(mod):
+ """Does the module object `mod` have an existing __file__ ?"""
+ mod__file__ = getattr(mod, '__file__', None)
+ if mod__file__ is None:
+ return False
+ return os.path.exists(mod__file__)
+
+
+class InOrOut(object):
+ """Machinery for determining what files to measure."""
+
+ def __init__(self, warn):
+ self.warn = warn
+
+ # The matchers for should_trace.
+ self.source_match = None
+ self.source_pkgs_match = None
+ self.pylib_paths = self.cover_paths = None
+ self.pylib_match = self.cover_match = None
+ self.include_match = self.omit_match = None
+ self.plugins = []
+ self.disp_class = FileDisposition
+
+ # The source argument can be directories or package names.
+ self.source = []
+ self.source_pkgs = []
+ self.source_pkgs_unmatched = []
+ self.omit = self.include = None
+
+ def configure(self, config):
+ """Apply the configuration to get ready for decision-time."""
+ for src in config.source or []:
+ if os.path.isdir(src):
+ self.source.append(canonical_filename(src))
+ else:
+ self.source_pkgs.append(src)
+ self.source_pkgs_unmatched = self.source_pkgs[:]
+
+ self.omit = prep_patterns(config.run_omit)
+ self.include = prep_patterns(config.run_include)
+
+ # The directories for files considered "installed with the interpreter".
+ self.pylib_paths = set()
+ if not config.cover_pylib:
+ # Look at where some standard modules are located. That's the
+ # indication for "installed with the interpreter". In some
+ # environments (virtualenv, for example), these modules may be
+ # spread across a few locations. Look at all the candidate modules
+ # we've imported, and take all the different ones.
+ for m in (atexit, inspect, os, platform, _pypy_irc_topic, re, _structseq, traceback):
+ if m is not None and hasattr(m, "__file__"):
+ self.pylib_paths.add(canonical_path(m, directory=True))
+
+ if _structseq and not hasattr(_structseq, '__file__'):
+ # PyPy 2.4 has no __file__ in the builtin modules, but the code
+ # objects still have the file names. So dig into one to find
+ # the path to exclude. The "filename" might be synthetic,
+ # don't be fooled by those.
+ structseq_file = code_object(_structseq.structseq_new).co_filename
+ if not structseq_file.startswith("<"):
+ self.pylib_paths.add(canonical_path(structseq_file))
+
+ # To avoid tracing the coverage.py code itself, we skip anything
+ # located where we are.
+ self.cover_paths = [canonical_path(__file__, directory=True)]
+ if env.TESTING:
+ # Don't include our own test code.
+ self.cover_paths.append(os.path.join(self.cover_paths[0], "tests"))
+
+ # When testing, we use PyContracts, which should be considered
+ # part of coverage.py, and it uses six. Exclude those directories
+ # just as we exclude ourselves.
+ import contracts
+ import six
+ for mod in [contracts, six]:
+ self.cover_paths.append(canonical_path(mod))
+
+ # Create the matchers we need for should_trace
+ if self.source or self.source_pkgs:
+ self.source_match = TreeMatcher(self.source)
+ self.source_pkgs_match = ModuleMatcher(self.source_pkgs)
+ else:
+ if self.cover_paths:
+ self.cover_match = TreeMatcher(self.cover_paths)
+ if self.pylib_paths:
+ self.pylib_match = TreeMatcher(self.pylib_paths)
+ if self.include:
+ self.include_match = FnmatchMatcher(self.include)
+ if self.omit:
+ self.omit_match = FnmatchMatcher(self.omit)
+
+ def should_trace(self, filename, frame=None):
+ """Decide whether to trace execution in `filename`, with a reason.
+
+ This function is called from the trace function. As each new file name
+ is encountered, this function determines whether it is traced or not.
+
+ Returns a FileDisposition object.
+
+ """
+ original_filename = filename
+ disp = disposition_init(self.disp_class, filename)
+
+ def nope(disp, reason):
+ """Simple helper to make it easy to return NO."""
+ disp.trace = False
+ disp.reason = reason
+ return disp
+
+ if frame is not None:
+ # Compiled Python files have two file names: frame.f_code.co_filename is
+ # the file name at the time the .pyc was compiled. The second name is
+ # __file__, which is where the .pyc was actually loaded from. Since
+ # .pyc files can be moved after compilation (for example, by being
+ # installed), we look for __file__ in the frame and prefer it to the
+ # co_filename value.
+ dunder_file = frame.f_globals and frame.f_globals.get('__file__')
+ if dunder_file:
+ filename = source_for_file(dunder_file)
+ if original_filename and not original_filename.startswith('<'):
+ orig = os.path.basename(original_filename)
+ if orig != os.path.basename(filename):
+ # Files shouldn't be renamed when moved. This happens when
+ # exec'ing code. If it seems like something is wrong with
+ # the frame's file name, then just use the original.
+ filename = original_filename
+
+ if not filename:
+ # Empty string is pretty useless.
+ return nope(disp, "empty string isn't a file name")
+
+ if filename.startswith('memory:'):
+ return nope(disp, "memory isn't traceable")
+
+ if filename.startswith('<'):
+ # Lots of non-file execution is represented with artificial
+ # file names like "<string>", "<doctest readme.txt[0]>", or
+ # "<exec_function>". Don't ever trace these executions, since we
+ # can't do anything with the data later anyway.
+ return nope(disp, "not a real file name")
+
+ # pyexpat does a dumb thing, calling the trace function explicitly from
+ # C code with a C file name.
+ if re.search(r"[/\\]Modules[/\\]pyexpat.c", filename):
+ return nope(disp, "pyexpat lies about itself")
+
+ # Jython reports the .class file to the tracer, use the source file.
+ if filename.endswith("$py.class"):
+ filename = filename[:-9] + ".py"
+
+ canonical = canonical_filename(filename)
+ disp.canonical_filename = canonical
+
+ # Try the plugins, see if they have an opinion about the file.
+ plugin = None
+ for plugin in self.plugins.file_tracers:
+ if not plugin._coverage_enabled:
+ continue
+
+ try:
+ file_tracer = plugin.file_tracer(canonical)
+ if file_tracer is not None:
+ file_tracer._coverage_plugin = plugin
+ disp.trace = True
+ disp.file_tracer = file_tracer
+ if file_tracer.has_dynamic_source_filename():
+ disp.has_dynamic_filename = True
+ else:
+ disp.source_filename = canonical_filename(
+ file_tracer.source_filename()
+ )
+ break
+ except Exception:
+ self.warn(
+ "Disabling plug-in %r due to an exception:" % (plugin._coverage_plugin_name)
+ )
+ traceback.print_exc()
+ plugin._coverage_enabled = False
+ continue
+ else:
+ # No plugin wanted it: it's Python.
+ disp.trace = True
+ disp.source_filename = canonical
+
+ if not disp.has_dynamic_filename:
+ if not disp.source_filename:
+ raise CoverageException(
+ "Plugin %r didn't set source_filename for %r" %
+ (plugin, disp.original_filename)
+ )
+ reason = self.check_include_omit_etc(disp.source_filename, frame)
+ if reason:
+ nope(disp, reason)
+
+ return disp
+
+ def check_include_omit_etc(self, filename, frame):
+ """Check a file name against the include, omit, etc, rules.
+
+ Returns a string or None. String means, don't trace, and is the reason
+ why. None means no reason found to not trace.
+
+ """
+ modulename = name_for_module(filename, frame)
+
+ # If the user specified source or include, then that's authoritative
+ # about the outer bound of what to measure and we don't have to apply
+ # any canned exclusions. If they didn't, then we have to exclude the
+ # stdlib and coverage.py directories.
+ if self.source_match:
+ if self.source_pkgs_match.match(modulename):
+ if modulename in self.source_pkgs_unmatched:
+ self.source_pkgs_unmatched.remove(modulename)
+ elif not self.source_match.match(filename):
+ return "falls outside the --source trees"
+ elif self.include_match:
+ if not self.include_match.match(filename):
+ return "falls outside the --include trees"
+ else:
+ # If we aren't supposed to trace installed code, then check if this
+ # is near the Python standard library and skip it if so.
+ if self.pylib_match and self.pylib_match.match(filename):
+ return "is in the stdlib"
+
+ # We exclude the coverage.py code itself, since a little of it
+ # will be measured otherwise.
+ if self.cover_match and self.cover_match.match(filename):
+ return "is part of coverage.py"
+
+ # Check the file against the omit pattern.
+ if self.omit_match and self.omit_match.match(filename):
+ return "is inside an --omit pattern"
+
+ # No point tracing a file we can't later write to SQLite.
+ try:
+ filename.encode("utf8")
+ except UnicodeEncodeError:
+ return "non-encodable filename"
+
+ # No reason found to skip this file.
+ return None
+
+ def warn_conflicting_settings(self):
+ """Warn if there are settings that conflict."""
+ if self.include:
+ if self.source or self.source_pkgs:
+ self.warn("--include is ignored because --source is set", slug="include-ignored")
+
+ def warn_already_imported_files(self):
+ """Warn if files have already been imported that we will be measuring."""
+ if self.include or self.source or self.source_pkgs:
+ warned = set()
+ for mod in list(sys.modules.values()):
+ filename = getattr(mod, "__file__", None)
+ if filename is None:
+ continue
+ if filename in warned:
+ continue
+
+ disp = self.should_trace(filename)
+ if disp.trace:
+ msg = "Already imported a file that will be measured: {}".format(filename)
+ self.warn(msg, slug="already-imported")
+ warned.add(filename)
+
+ def warn_unimported_source(self):
+ """Warn about source packages that were of interest, but never traced."""
+ for pkg in self.source_pkgs_unmatched:
+ self._warn_about_unmeasured_code(pkg)
+
+ def _warn_about_unmeasured_code(self, pkg):
+ """Warn about a package or module that we never traced.
+
+ `pkg` is a string, the name of the package or module.
+
+ """
+ mod = sys.modules.get(pkg)
+ if mod is None:
+ self.warn("Module %s was never imported." % pkg, slug="module-not-imported")
+ return
+
+ if module_is_namespace(mod):
+ # A namespace package. It's OK for this not to have been traced,
+ # since there is no code directly in it.
+ return
+
+ if not module_has_file(mod):
+ self.warn("Module %s has no Python source." % pkg, slug="module-not-python")
+ return
+
+ # The module was in sys.modules, and seems like a module with code, but
+ # we never measured it. I guess that means it was imported before
+ # coverage even started.
+ self.warn(
+ "Module %s was previously imported, but not measured" % pkg,
+ slug="module-not-measured",
+ )
+
+ def find_possibly_unexecuted_files(self):
+ """Find files in the areas of interest that might be untraced.
+
+ Yields pairs: file path, and responsible plug-in name.
+ """
+ for pkg in self.source_pkgs:
+ if (not pkg in sys.modules or
+ not module_has_file(sys.modules[pkg])):
+ continue
+ pkg_file = source_for_file(sys.modules[pkg].__file__)
+ for ret in self._find_executable_files(canonical_path(pkg_file)):
+ yield ret
+
+ for src in self.source:
+ for ret in self._find_executable_files(src):
+ yield ret
+
+ def _find_plugin_files(self, src_dir):
+ """Get executable files from the plugins."""
+ for plugin in self.plugins.file_tracers:
+ for x_file in plugin.find_executable_files(src_dir):
+ yield x_file, plugin._coverage_plugin_name
+
+ def _find_executable_files(self, src_dir):
+ """Find executable files in `src_dir`.
+
+ Search for files in `src_dir` that can be executed because they
+ are probably importable. Don't include ones that have been omitted
+ by the configuration.
+
+ Yield the file path, and the plugin name that handles the file.
+
+ """
+ py_files = ((py_file, None) for py_file in find_python_files(src_dir))
+ plugin_files = self._find_plugin_files(src_dir)
+
+ for file_path, plugin_name in itertools.chain(py_files, plugin_files):
+ file_path = canonical_filename(file_path)
+ if self.omit_match and self.omit_match.match(file_path):
+ # Turns out this file was omitted, so don't pull it back
+ # in as unexecuted.
+ continue
+ yield file_path, plugin_name
+
+ def sys_info(self):
+ """Our information for Coverage.sys_info.
+
+ Returns a list of (key, value) pairs.
+ """
+ info = [
+ ('cover_paths', self.cover_paths),
+ ('pylib_paths', self.pylib_paths),
+ ]
+
+ matcher_names = [
+ 'source_match', 'source_pkgs_match',
+ 'include_match', 'omit_match',
+ 'cover_match', 'pylib_match',
+ ]
+
+ for matcher_name in matcher_names:
+ matcher = getattr(self, matcher_name)
+ if matcher:
+ matcher_info = matcher.info()
+ else:
+ matcher_info = '-none-'
+ info.append((matcher_name, matcher_info))
+
+ return info
diff --git a/third_party/python/coverage/coverage/jsonreport.py b/third_party/python/coverage/coverage/jsonreport.py
new file mode 100644
index 0000000000..4287bc79a3
--- /dev/null
+++ b/third_party/python/coverage/coverage/jsonreport.py
@@ -0,0 +1,103 @@
+# coding: utf-8
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Json reporting for coverage.py"""
+import datetime
+import json
+import sys
+
+from coverage import __version__
+from coverage.report import get_analysis_to_report
+from coverage.results import Numbers
+
+
+class JsonReporter(object):
+ """A reporter for writing JSON coverage results."""
+
+ def __init__(self, coverage):
+ self.coverage = coverage
+ self.config = self.coverage.config
+ self.total = Numbers()
+ self.report_data = {}
+
+ def report(self, morfs, outfile=None):
+ """Generate a json report for `morfs`.
+
+ `morfs` is a list of modules or file names.
+
+ `outfile` is a file object to write the json to
+
+ """
+ outfile = outfile or sys.stdout
+ coverage_data = self.coverage.get_data()
+ coverage_data.set_query_contexts(self.config.report_contexts)
+ self.report_data["meta"] = {
+ "version": __version__,
+ "timestamp": datetime.datetime.now().isoformat(),
+ "branch_coverage": coverage_data.has_arcs(),
+ "show_contexts": self.config.json_show_contexts,
+ }
+
+ measured_files = {}
+ for file_reporter, analysis in get_analysis_to_report(self.coverage, morfs):
+ measured_files[file_reporter.relative_filename()] = self.report_one_file(
+ coverage_data,
+ analysis
+ )
+
+ self.report_data["files"] = measured_files
+
+ self.report_data["totals"] = {
+ 'covered_lines': self.total.n_executed,
+ 'num_statements': self.total.n_statements,
+ 'percent_covered': self.total.pc_covered,
+ 'missing_lines': self.total.n_missing,
+ 'excluded_lines': self.total.n_excluded,
+ }
+
+ if coverage_data.has_arcs():
+ self.report_data["totals"].update({
+ 'num_branches': self.total.n_branches,
+ 'num_partial_branches': self.total.n_partial_branches,
+ 'covered_branches': self.total.n_executed_branches,
+ 'missing_branches': self.total.n_missing_branches,
+ })
+
+ json.dump(
+ self.report_data,
+ outfile,
+ indent=4 if self.config.json_pretty_print else None
+ )
+
+ return self.total.n_statements and self.total.pc_covered
+
+ def report_one_file(self, coverage_data, analysis):
+ """Extract the relevant report data for a single file"""
+ nums = analysis.numbers
+ self.total += nums
+ summary = {
+ 'covered_lines': nums.n_executed,
+ 'num_statements': nums.n_statements,
+ 'percent_covered': nums.pc_covered,
+ 'missing_lines': nums.n_missing,
+ 'excluded_lines': nums.n_excluded,
+ }
+ reported_file = {
+ 'executed_lines': sorted(analysis.executed),
+ 'summary': summary,
+ 'missing_lines': sorted(analysis.missing),
+ 'excluded_lines': sorted(analysis.excluded)
+ }
+ if self.config.json_show_contexts:
+ reported_file['contexts'] = analysis.data.contexts_by_lineno(
+ analysis.filename,
+ )
+ if coverage_data.has_arcs():
+ reported_file['summary'].update({
+ 'num_branches': nums.n_branches,
+ 'num_partial_branches': nums.n_partial_branches,
+ 'covered_branches': nums.n_executed_branches,
+ 'missing_branches': nums.n_missing_branches,
+ })
+ return reported_file
diff --git a/third_party/python/coverage/coverage/misc.py b/third_party/python/coverage/coverage/misc.py
new file mode 100644
index 0000000000..5c4381ab65
--- /dev/null
+++ b/third_party/python/coverage/coverage/misc.py
@@ -0,0 +1,361 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Miscellaneous stuff for coverage.py."""
+
+import errno
+import hashlib
+import inspect
+import locale
+import os
+import os.path
+import random
+import re
+import socket
+import sys
+import types
+
+from coverage import env
+from coverage.backward import to_bytes, unicode_class
+
+ISOLATED_MODULES = {}
+
+
+def isolate_module(mod):
+ """Copy a module so that we are isolated from aggressive mocking.
+
+ If a test suite mocks os.path.exists (for example), and then we need to use
+ it during the test, everything will get tangled up if we use their mock.
+ Making a copy of the module when we import it will isolate coverage.py from
+ those complications.
+ """
+ if mod not in ISOLATED_MODULES:
+ new_mod = types.ModuleType(mod.__name__)
+ ISOLATED_MODULES[mod] = new_mod
+ for name in dir(mod):
+ value = getattr(mod, name)
+ if isinstance(value, types.ModuleType):
+ value = isolate_module(value)
+ setattr(new_mod, name, value)
+ return ISOLATED_MODULES[mod]
+
+os = isolate_module(os)
+
+
+def dummy_decorator_with_args(*args_unused, **kwargs_unused):
+ """Dummy no-op implementation of a decorator with arguments."""
+ def _decorator(func):
+ return func
+ return _decorator
+
+
+# Environment COVERAGE_NO_CONTRACTS=1 can turn off contracts while debugging
+# tests to remove noise from stack traces.
+# $set_env.py: COVERAGE_NO_CONTRACTS - Disable PyContracts to simplify stack traces.
+USE_CONTRACTS = env.TESTING and not bool(int(os.environ.get("COVERAGE_NO_CONTRACTS", 0)))
+
+# Use PyContracts for assertion testing on parameters and returns, but only if
+# we are running our own test suite.
+if USE_CONTRACTS:
+ from contracts import contract # pylint: disable=unused-import
+ from contracts import new_contract as raw_new_contract
+
+ def new_contract(*args, **kwargs):
+ """A proxy for contracts.new_contract that doesn't mind happening twice."""
+ try:
+ return raw_new_contract(*args, **kwargs)
+ except ValueError:
+ # During meta-coverage, this module is imported twice, and
+ # PyContracts doesn't like redefining contracts. It's OK.
+ pass
+
+ # Define contract words that PyContract doesn't have.
+ new_contract('bytes', lambda v: isinstance(v, bytes))
+ if env.PY3:
+ new_contract('unicode', lambda v: isinstance(v, unicode_class))
+
+ def one_of(argnames):
+ """Ensure that only one of the argnames is non-None."""
+ def _decorator(func):
+ argnameset = set(name.strip() for name in argnames.split(","))
+ def _wrapper(*args, **kwargs):
+ vals = [kwargs.get(name) for name in argnameset]
+ assert sum(val is not None for val in vals) == 1
+ return func(*args, **kwargs)
+ return _wrapper
+ return _decorator
+else: # pragma: not testing
+ # We aren't using real PyContracts, so just define our decorators as
+ # stunt-double no-ops.
+ contract = dummy_decorator_with_args
+ one_of = dummy_decorator_with_args
+
+ def new_contract(*args_unused, **kwargs_unused):
+ """Dummy no-op implementation of `new_contract`."""
+ pass
+
+
+def nice_pair(pair):
+ """Make a nice string representation of a pair of numbers.
+
+ If the numbers are equal, just return the number, otherwise return the pair
+ with a dash between them, indicating the range.
+
+ """
+ start, end = pair
+ if start == end:
+ return "%d" % start
+ else:
+ return "%d-%d" % (start, end)
+
+
+def expensive(fn):
+ """A decorator to indicate that a method shouldn't be called more than once.
+
+ Normally, this does nothing. During testing, this raises an exception if
+ called more than once.
+
+ """
+ if env.TESTING:
+ attr = "_once_" + fn.__name__
+
+ def _wrapper(self):
+ if hasattr(self, attr):
+ raise AssertionError("Shouldn't have called %s more than once" % fn.__name__)
+ setattr(self, attr, True)
+ return fn(self)
+ return _wrapper
+ else:
+ return fn # pragma: not testing
+
+
+def bool_or_none(b):
+ """Return bool(b), but preserve None."""
+ if b is None:
+ return None
+ else:
+ return bool(b)
+
+
+def join_regex(regexes):
+ """Combine a list of regexes into one that matches any of them."""
+ return "|".join("(?:%s)" % r for r in regexes)
+
+
+def file_be_gone(path):
+ """Remove a file, and don't get annoyed if it doesn't exist."""
+ try:
+ os.remove(path)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+
+def ensure_dir(directory):
+ """Make sure the directory exists.
+
+ If `directory` is None or empty, do nothing.
+ """
+ if directory and not os.path.isdir(directory):
+ os.makedirs(directory)
+
+
+def ensure_dir_for_file(path):
+ """Make sure the directory for the path exists."""
+ ensure_dir(os.path.dirname(path))
+
+
+def output_encoding(outfile=None):
+ """Determine the encoding to use for output written to `outfile` or stdout."""
+ if outfile is None:
+ outfile = sys.stdout
+ encoding = (
+ getattr(outfile, "encoding", None) or
+ getattr(sys.__stdout__, "encoding", None) or
+ locale.getpreferredencoding()
+ )
+ return encoding
+
+
+def filename_suffix(suffix):
+ """Compute a filename suffix for a data file.
+
+ If `suffix` is a string or None, simply return it. If `suffix` is True,
+ then build a suffix incorporating the hostname, process id, and a random
+ number.
+
+ Returns a string or None.
+
+ """
+ if suffix is True:
+ # If data_suffix was a simple true value, then make a suffix with
+ # plenty of distinguishing information. We do this here in
+ # `save()` at the last minute so that the pid will be correct even
+ # if the process forks.
+ dice = random.Random(os.urandom(8)).randint(0, 999999)
+ suffix = "%s.%s.%06d" % (socket.gethostname(), os.getpid(), dice)
+ return suffix
+
+
+class Hasher(object):
+ """Hashes Python data into md5."""
+ def __init__(self):
+ self.md5 = hashlib.md5()
+
+ def update(self, v):
+ """Add `v` to the hash, recursively if needed."""
+ self.md5.update(to_bytes(str(type(v))))
+ if isinstance(v, unicode_class):
+ self.md5.update(v.encode('utf8'))
+ elif isinstance(v, bytes):
+ self.md5.update(v)
+ elif v is None:
+ pass
+ elif isinstance(v, (int, float)):
+ self.md5.update(to_bytes(str(v)))
+ elif isinstance(v, (tuple, list)):
+ for e in v:
+ self.update(e)
+ elif isinstance(v, dict):
+ keys = v.keys()
+ for k in sorted(keys):
+ self.update(k)
+ self.update(v[k])
+ else:
+ for k in dir(v):
+ if k.startswith('__'):
+ continue
+ a = getattr(v, k)
+ if inspect.isroutine(a):
+ continue
+ self.update(k)
+ self.update(a)
+ self.md5.update(b'.')
+
+ def hexdigest(self):
+ """Retrieve the hex digest of the hash."""
+ return self.md5.hexdigest()
+
+
+def _needs_to_implement(that, func_name):
+ """Helper to raise NotImplementedError in interface stubs."""
+ if hasattr(that, "_coverage_plugin_name"):
+ thing = "Plugin"
+ name = that._coverage_plugin_name
+ else:
+ thing = "Class"
+ klass = that.__class__
+ name = "{klass.__module__}.{klass.__name__}".format(klass=klass)
+
+ raise NotImplementedError(
+ "{thing} {name!r} needs to implement {func_name}()".format(
+ thing=thing, name=name, func_name=func_name
+ )
+ )
+
+
+class DefaultValue(object):
+ """A sentinel object to use for unusual default-value needs.
+
+ Construct with a string that will be used as the repr, for display in help
+ and Sphinx output.
+
+ """
+ def __init__(self, display_as):
+ self.display_as = display_as
+
+ def __repr__(self):
+ return self.display_as
+
+
+def substitute_variables(text, variables):
+ """Substitute ``${VAR}`` variables in `text` with their values.
+
+ Variables in the text can take a number of shell-inspired forms::
+
+ $VAR
+ ${VAR}
+ ${VAR?} strict: an error if VAR isn't defined.
+ ${VAR-missing} defaulted: "missing" if VAR isn't defined.
+ $$ just a dollar sign.
+
+ `variables` is a dictionary of variable values.
+
+ Returns the resulting text with values substituted.
+
+ """
+ dollar_pattern = r"""(?x) # Use extended regex syntax
+ \$ # A dollar sign,
+ (?: # then
+ (?P<dollar>\$) | # a dollar sign, or
+ (?P<word1>\w+) | # a plain word, or
+ { # a {-wrapped
+ (?P<word2>\w+) # word,
+ (?:
+ (?P<strict>\?) | # with a strict marker
+ -(?P<defval>[^}]*) # or a default value
+ )? # maybe.
+ }
+ )
+ """
+
+ def dollar_replace(match):
+ """Called for each $replacement."""
+ # Only one of the groups will have matched, just get its text.
+ word = next(g for g in match.group('dollar', 'word1', 'word2') if g)
+ if word == "$":
+ return "$"
+ elif word in variables:
+ return variables[word]
+ elif match.group('strict'):
+ msg = "Variable {} is undefined: {!r}".format(word, text)
+ raise CoverageException(msg)
+ else:
+ return match.group('defval')
+
+ text = re.sub(dollar_pattern, dollar_replace, text)
+ return text
+
+
+class BaseCoverageException(Exception):
+ """The base of all Coverage exceptions."""
+ pass
+
+
+class CoverageException(BaseCoverageException):
+ """An exception raised by a coverage.py function."""
+ pass
+
+
+class NoSource(CoverageException):
+ """We couldn't find the source for a module."""
+ pass
+
+
+class NoCode(NoSource):
+ """We couldn't find any code at all."""
+ pass
+
+
+class NotPython(CoverageException):
+ """A source file turned out not to be parsable Python."""
+ pass
+
+
+class ExceptionDuringRun(CoverageException):
+ """An exception happened while running customer code.
+
+ Construct it with three arguments, the values from `sys.exc_info`.
+
+ """
+ pass
+
+
+class StopEverything(BaseCoverageException):
+ """An exception that means everything should stop.
+
+ The CoverageTest class converts these to SkipTest, so that when running
+ tests, raising this exception will automatically skip the test.
+
+ """
+ pass
diff --git a/third_party/python/coverage/coverage/multiproc.py b/third_party/python/coverage/coverage/multiproc.py
new file mode 100644
index 0000000000..2931b3be0e
--- /dev/null
+++ b/third_party/python/coverage/coverage/multiproc.py
@@ -0,0 +1,111 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Monkey-patching to add multiprocessing support for coverage.py"""
+
+import multiprocessing
+import multiprocessing.process
+import os
+import os.path
+import sys
+import traceback
+
+from coverage import env
+from coverage.misc import contract
+
+# An attribute that will be set on the module to indicate that it has been
+# monkey-patched.
+PATCHED_MARKER = "_coverage$patched"
+
+
+if env.PYVERSION >= (3, 4):
+ OriginalProcess = multiprocessing.process.BaseProcess
+else:
+ OriginalProcess = multiprocessing.Process
+
+original_bootstrap = OriginalProcess._bootstrap
+
+class ProcessWithCoverage(OriginalProcess): # pylint: disable=abstract-method
+ """A replacement for multiprocess.Process that starts coverage."""
+
+ def _bootstrap(self, *args, **kwargs): # pylint: disable=arguments-differ
+ """Wrapper around _bootstrap to start coverage."""
+ try:
+ from coverage import Coverage # avoid circular import
+ cov = Coverage(data_suffix=True)
+ cov._warn_preimported_source = False
+ cov.start()
+ debug = cov._debug
+ if debug.should("multiproc"):
+ debug.write("Calling multiprocessing bootstrap")
+ except Exception:
+ print("Exception during multiprocessing bootstrap init:")
+ traceback.print_exc(file=sys.stdout)
+ sys.stdout.flush()
+ raise
+ try:
+ return original_bootstrap(self, *args, **kwargs)
+ finally:
+ if debug.should("multiproc"):
+ debug.write("Finished multiprocessing bootstrap")
+ cov.stop()
+ cov.save()
+ if debug.should("multiproc"):
+ debug.write("Saved multiprocessing data")
+
+class Stowaway(object):
+ """An object to pickle, so when it is unpickled, it can apply the monkey-patch."""
+ def __init__(self, rcfile):
+ self.rcfile = rcfile
+
+ def __getstate__(self):
+ return {'rcfile': self.rcfile}
+
+ def __setstate__(self, state):
+ patch_multiprocessing(state['rcfile'])
+
+
+@contract(rcfile=str)
+def patch_multiprocessing(rcfile):
+ """Monkey-patch the multiprocessing module.
+
+ This enables coverage measurement of processes started by multiprocessing.
+ This involves aggressive monkey-patching.
+
+ `rcfile` is the path to the rcfile being used.
+
+ """
+
+ if hasattr(multiprocessing, PATCHED_MARKER):
+ return
+
+ if env.PYVERSION >= (3, 4):
+ OriginalProcess._bootstrap = ProcessWithCoverage._bootstrap
+ else:
+ multiprocessing.Process = ProcessWithCoverage
+
+ # Set the value in ProcessWithCoverage that will be pickled into the child
+ # process.
+ os.environ["COVERAGE_RCFILE"] = os.path.abspath(rcfile)
+
+ # When spawning processes rather than forking them, we have no state in the
+ # new process. We sneak in there with a Stowaway: we stuff one of our own
+ # objects into the data that gets pickled and sent to the sub-process. When
+ # the Stowaway is unpickled, it's __setstate__ method is called, which
+ # re-applies the monkey-patch.
+ # Windows only spawns, so this is needed to keep Windows working.
+ try:
+ from multiprocessing import spawn
+ original_get_preparation_data = spawn.get_preparation_data
+ except (ImportError, AttributeError):
+ pass
+ else:
+ def get_preparation_data_with_stowaway(name):
+ """Get the original preparation data, and also insert our stowaway."""
+ d = original_get_preparation_data(name)
+ d['stowaway'] = Stowaway(rcfile)
+ return d
+
+ spawn.get_preparation_data = get_preparation_data_with_stowaway
+
+ setattr(multiprocessing, PATCHED_MARKER, True)
diff --git a/third_party/python/coverage/coverage/numbits.py b/third_party/python/coverage/coverage/numbits.py
new file mode 100644
index 0000000000..6ca96fbcf7
--- /dev/null
+++ b/third_party/python/coverage/coverage/numbits.py
@@ -0,0 +1,163 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""
+Functions to manipulate packed binary representations of number sets.
+
+To save space, coverage stores sets of line numbers in SQLite using a packed
+binary representation called a numbits. A numbits is a set of positive
+integers.
+
+A numbits is stored as a blob in the database. The exact meaning of the bytes
+in the blobs should be considered an implementation detail that might change in
+the future. Use these functions to work with those binary blobs of data.
+
+"""
+import json
+
+from coverage import env
+from coverage.backward import byte_to_int, bytes_to_ints, binary_bytes, zip_longest
+from coverage.misc import contract, new_contract
+
+if env.PY3:
+ def _to_blob(b):
+ """Convert a bytestring into a type SQLite will accept for a blob."""
+ return b
+
+ new_contract('blob', lambda v: isinstance(v, bytes))
+else:
+ def _to_blob(b):
+ """Convert a bytestring into a type SQLite will accept for a blob."""
+ return buffer(b) # pylint: disable=undefined-variable
+
+ new_contract('blob', lambda v: isinstance(v, buffer)) # pylint: disable=undefined-variable
+
+
+@contract(nums='Iterable', returns='blob')
+def nums_to_numbits(nums):
+ """Convert `nums` into a numbits.
+
+ Arguments:
+ nums: a reusable iterable of integers, the line numbers to store.
+
+ Returns:
+ A binary blob.
+ """
+ try:
+ nbytes = max(nums) // 8 + 1
+ except ValueError:
+ # nums was empty.
+ return _to_blob(b'')
+ b = bytearray(nbytes)
+ for num in nums:
+ b[num//8] |= 1 << num % 8
+ return _to_blob(bytes(b))
+
+
+@contract(numbits='blob', returns='list[int]')
+def numbits_to_nums(numbits):
+ """Convert a numbits into a list of numbers.
+
+ Arguments:
+ numbits: a binary blob, the packed number set.
+
+ Returns:
+ A list of ints.
+
+ When registered as a SQLite function by :func:`register_sqlite_functions`,
+ this returns a string, a JSON-encoded list of ints.
+
+ """
+ nums = []
+ for byte_i, byte in enumerate(bytes_to_ints(numbits)):
+ for bit_i in range(8):
+ if (byte & (1 << bit_i)):
+ nums.append(byte_i * 8 + bit_i)
+ return nums
+
+
+@contract(numbits1='blob', numbits2='blob', returns='blob')
+def numbits_union(numbits1, numbits2):
+ """Compute the union of two numbits.
+
+ Returns:
+ A new numbits, the union of `numbits1` and `numbits2`.
+ """
+ byte_pairs = zip_longest(bytes_to_ints(numbits1), bytes_to_ints(numbits2), fillvalue=0)
+ return _to_blob(binary_bytes(b1 | b2 for b1, b2 in byte_pairs))
+
+
+@contract(numbits1='blob', numbits2='blob', returns='blob')
+def numbits_intersection(numbits1, numbits2):
+ """Compute the intersection of two numbits.
+
+ Returns:
+ A new numbits, the intersection `numbits1` and `numbits2`.
+ """
+ byte_pairs = zip_longest(bytes_to_ints(numbits1), bytes_to_ints(numbits2), fillvalue=0)
+ intersection_bytes = binary_bytes(b1 & b2 for b1, b2 in byte_pairs)
+ return _to_blob(intersection_bytes.rstrip(b'\0'))
+
+
+@contract(numbits1='blob', numbits2='blob', returns='bool')
+def numbits_any_intersection(numbits1, numbits2):
+ """Is there any number that appears in both numbits?
+
+ Determine whether two number sets have a non-empty intersection. This is
+ faster than computing the intersection.
+
+ Returns:
+ A bool, True if there is any number in both `numbits1` and `numbits2`.
+ """
+ byte_pairs = zip_longest(bytes_to_ints(numbits1), bytes_to_ints(numbits2), fillvalue=0)
+ return any(b1 & b2 for b1, b2 in byte_pairs)
+
+
+@contract(num='int', numbits='blob', returns='bool')
+def num_in_numbits(num, numbits):
+ """Does the integer `num` appear in `numbits`?
+
+ Returns:
+ A bool, True if `num` is a member of `numbits`.
+ """
+ nbyte, nbit = divmod(num, 8)
+ if nbyte >= len(numbits):
+ return False
+ return bool(byte_to_int(numbits[nbyte]) & (1 << nbit))
+
+
+def register_sqlite_functions(connection):
+ """
+ Define numbits functions in a SQLite connection.
+
+ This defines these functions for use in SQLite statements:
+
+ * :func:`numbits_union`
+ * :func:`numbits_intersection`
+ * :func:`numbits_any_intersection`
+ * :func:`num_in_numbits`
+ * :func:`numbits_to_nums`
+
+ `connection` is a :class:`sqlite3.Connection <python:sqlite3.Connection>`
+ object. After creating the connection, pass it to this function to
+ register the numbits functions. Then you can use numbits functions in your
+ queries::
+
+ import sqlite3
+ from coverage.numbits import register_sqlite_functions
+
+ conn = sqlite3.connect('example.db')
+ register_sqlite_functions(conn)
+ c = conn.cursor()
+ # Kind of a nonsense query: find all the files and contexts that
+ # executed line 47 in any file:
+ c.execute(
+ "select file_id, context_id from line_bits where num_in_numbits(?, numbits)",
+ (47,)
+ )
+ """
+ connection.create_function("numbits_union", 2, numbits_union)
+ connection.create_function("numbits_intersection", 2, numbits_intersection)
+ connection.create_function("numbits_any_intersection", 2, numbits_any_intersection)
+ connection.create_function("num_in_numbits", 2, num_in_numbits)
+ connection.create_function("numbits_to_nums", 1, lambda b: json.dumps(numbits_to_nums(b)))
diff --git a/third_party/python/coverage/coverage/optional.py b/third_party/python/coverage/coverage/optional.py
new file mode 100644
index 0000000000..ee617b625b
--- /dev/null
+++ b/third_party/python/coverage/coverage/optional.py
@@ -0,0 +1,68 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""
+Imports that we need at runtime, but might not be present.
+
+When importing one of these modules, always do it in the function where you
+need the module. Some tests will need to remove the module. If you import
+it at the top level of your module, then the test won't be able to simulate
+the module being unimportable.
+
+The import will always succeed, but the value will be None if the module is
+unavailable.
+
+Bad::
+
+ # MyModule.py
+ from coverage.optional import unsure
+
+ def use_unsure():
+ unsure.something()
+
+Good::
+
+ # MyModule.py
+
+ def use_unsure():
+ from coverage.optional import unsure
+ if unsure is None:
+ raise Exception("Module unsure isn't available!")
+
+ unsure.something()
+
+"""
+
+import contextlib
+
+# This file's purpose is to provide modules to be imported from here.
+# pylint: disable=unused-import
+
+# TOML support is an install-time extra option.
+try:
+ import toml
+except ImportError: # pragma: not covered
+ toml = None
+
+
+@contextlib.contextmanager
+def without(modname):
+ """Hide a module for testing.
+
+ Use this in a test function to make an optional module unavailable during
+ the test::
+
+ with coverage.optional.without('toml'):
+ use_toml_somehow()
+
+ Arguments:
+ modname (str): the name of a module importable from
+ `coverage.optional`.
+
+ """
+ real_module = globals()[modname]
+ try:
+ globals()[modname] = None
+ yield
+ finally:
+ globals()[modname] = real_module
diff --git a/third_party/python/coverage/coverage/parser.py b/third_party/python/coverage/coverage/parser.py
new file mode 100644
index 0000000000..e3e4314902
--- /dev/null
+++ b/third_party/python/coverage/coverage/parser.py
@@ -0,0 +1,1251 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Code parsing for coverage.py."""
+
+import ast
+import collections
+import os
+import re
+import token
+import tokenize
+
+from coverage import env
+from coverage.backward import range # pylint: disable=redefined-builtin
+from coverage.backward import bytes_to_ints, string_class
+from coverage.bytecode import code_objects
+from coverage.debug import short_stack
+from coverage.misc import contract, join_regex, new_contract, nice_pair, one_of
+from coverage.misc import NoSource, NotPython, StopEverything
+from coverage.phystokens import compile_unicode, generate_tokens, neuter_encoding_declaration
+
+
+class PythonParser(object):
+ """Parse code to find executable lines, excluded lines, etc.
+
+ This information is all based on static analysis: no code execution is
+ involved.
+
+ """
+ @contract(text='unicode|None')
+ def __init__(self, text=None, filename=None, exclude=None):
+ """
+ Source can be provided as `text`, the text itself, or `filename`, from
+ which the text will be read. Excluded lines are those that match
+ `exclude`, a regex.
+
+ """
+ assert text or filename, "PythonParser needs either text or filename"
+ self.filename = filename or "<code>"
+ self.text = text
+ if not self.text:
+ from coverage.python import get_python_source
+ try:
+ self.text = get_python_source(self.filename)
+ except IOError as err:
+ raise NoSource(
+ "No source for code: '%s': %s" % (self.filename, err)
+ )
+
+ self.exclude = exclude
+
+ # The text lines of the parsed code.
+ self.lines = self.text.split('\n')
+
+ # The normalized line numbers of the statements in the code. Exclusions
+ # are taken into account, and statements are adjusted to their first
+ # lines.
+ self.statements = set()
+
+ # The normalized line numbers of the excluded lines in the code,
+ # adjusted to their first lines.
+ self.excluded = set()
+
+ # The raw_* attributes are only used in this class, and in
+ # lab/parser.py to show how this class is working.
+
+ # The line numbers that start statements, as reported by the line
+ # number table in the bytecode.
+ self.raw_statements = set()
+
+ # The raw line numbers of excluded lines of code, as marked by pragmas.
+ self.raw_excluded = set()
+
+ # The line numbers of class and function definitions.
+ self.raw_classdefs = set()
+
+ # The line numbers of docstring lines.
+ self.raw_docstrings = set()
+
+ # Internal detail, used by lab/parser.py.
+ self.show_tokens = False
+
+ # A dict mapping line numbers to lexical statement starts for
+ # multi-line statements.
+ self._multiline = {}
+
+ # Lazily-created ByteParser, arc data, and missing arc descriptions.
+ self._byte_parser = None
+ self._all_arcs = None
+ self._missing_arc_fragments = None
+
+ @property
+ def byte_parser(self):
+ """Create a ByteParser on demand."""
+ if not self._byte_parser:
+ self._byte_parser = ByteParser(self.text, filename=self.filename)
+ return self._byte_parser
+
+ def lines_matching(self, *regexes):
+ """Find the lines matching one of a list of regexes.
+
+ Returns a set of line numbers, the lines that contain a match for one
+ of the regexes in `regexes`. The entire line needn't match, just a
+ part of it.
+
+ """
+ combined = join_regex(regexes)
+ if env.PY2:
+ combined = combined.decode("utf8")
+ regex_c = re.compile(combined)
+ matches = set()
+ for i, ltext in enumerate(self.lines, start=1):
+ if regex_c.search(ltext):
+ matches.add(i)
+ return matches
+
+ def _raw_parse(self):
+ """Parse the source to find the interesting facts about its lines.
+
+ A handful of attributes are updated.
+
+ """
+ # Find lines which match an exclusion pattern.
+ if self.exclude:
+ self.raw_excluded = self.lines_matching(self.exclude)
+
+ # Tokenize, to find excluded suites, to find docstrings, and to find
+ # multi-line statements.
+ indent = 0
+ exclude_indent = 0
+ excluding = False
+ excluding_decorators = False
+ prev_toktype = token.INDENT
+ first_line = None
+ empty = True
+ first_on_line = True
+
+ tokgen = generate_tokens(self.text)
+ for toktype, ttext, (slineno, _), (elineno, _), ltext in tokgen:
+ if self.show_tokens: # pragma: debugging
+ print("%10s %5s %-20r %r" % (
+ tokenize.tok_name.get(toktype, toktype),
+ nice_pair((slineno, elineno)), ttext, ltext
+ ))
+ if toktype == token.INDENT:
+ indent += 1
+ elif toktype == token.DEDENT:
+ indent -= 1
+ elif toktype == token.NAME:
+ if ttext == 'class':
+ # Class definitions look like branches in the bytecode, so
+ # we need to exclude them. The simplest way is to note the
+ # lines with the 'class' keyword.
+ self.raw_classdefs.add(slineno)
+ elif toktype == token.OP:
+ if ttext == ':':
+ should_exclude = (elineno in self.raw_excluded) or excluding_decorators
+ if not excluding and should_exclude:
+ # Start excluding a suite. We trigger off of the colon
+ # token so that the #pragma comment will be recognized on
+ # the same line as the colon.
+ self.raw_excluded.add(elineno)
+ exclude_indent = indent
+ excluding = True
+ excluding_decorators = False
+ elif ttext == '@' and first_on_line:
+ # A decorator.
+ if elineno in self.raw_excluded:
+ excluding_decorators = True
+ if excluding_decorators:
+ self.raw_excluded.add(elineno)
+ elif toktype == token.STRING and prev_toktype == token.INDENT:
+ # Strings that are first on an indented line are docstrings.
+ # (a trick from trace.py in the stdlib.) This works for
+ # 99.9999% of cases. For the rest (!) see:
+ # http://stackoverflow.com/questions/1769332/x/1769794#1769794
+ self.raw_docstrings.update(range(slineno, elineno+1))
+ elif toktype == token.NEWLINE:
+ if first_line is not None and elineno != first_line:
+ # We're at the end of a line, and we've ended on a
+ # different line than the first line of the statement,
+ # so record a multi-line range.
+ for l in range(first_line, elineno+1):
+ self._multiline[l] = first_line
+ first_line = None
+ first_on_line = True
+
+ if ttext.strip() and toktype != tokenize.COMMENT:
+ # A non-whitespace token.
+ empty = False
+ if first_line is None:
+ # The token is not whitespace, and is the first in a
+ # statement.
+ first_line = slineno
+ # Check whether to end an excluded suite.
+ if excluding and indent <= exclude_indent:
+ excluding = False
+ if excluding:
+ self.raw_excluded.add(elineno)
+ first_on_line = False
+
+ prev_toktype = toktype
+
+ # Find the starts of the executable statements.
+ if not empty:
+ self.raw_statements.update(self.byte_parser._find_statements())
+
+ def first_line(self, line):
+ """Return the first line number of the statement including `line`."""
+ if line < 0:
+ line = -self._multiline.get(-line, -line)
+ else:
+ line = self._multiline.get(line, line)
+ return line
+
+ def first_lines(self, lines):
+ """Map the line numbers in `lines` to the correct first line of the
+ statement.
+
+ Returns a set of the first lines.
+
+ """
+ return set(self.first_line(l) for l in lines)
+
+ def translate_lines(self, lines):
+ """Implement `FileReporter.translate_lines`."""
+ return self.first_lines(lines)
+
+ def translate_arcs(self, arcs):
+ """Implement `FileReporter.translate_arcs`."""
+ return [(self.first_line(a), self.first_line(b)) for (a, b) in arcs]
+
+ def parse_source(self):
+ """Parse source text to find executable lines, excluded lines, etc.
+
+ Sets the .excluded and .statements attributes, normalized to the first
+ line of multi-line statements.
+
+ """
+ try:
+ self._raw_parse()
+ except (tokenize.TokenError, IndentationError) as err:
+ if hasattr(err, "lineno"):
+ lineno = err.lineno # IndentationError
+ else:
+ lineno = err.args[1][0] # TokenError
+ raise NotPython(
+ u"Couldn't parse '%s' as Python source: '%s' at line %d" % (
+ self.filename, err.args[0], lineno
+ )
+ )
+
+ self.excluded = self.first_lines(self.raw_excluded)
+
+ ignore = self.excluded | self.raw_docstrings
+ starts = self.raw_statements - ignore
+ self.statements = self.first_lines(starts) - ignore
+
+ def arcs(self):
+ """Get information about the arcs available in the code.
+
+ Returns a set of line number pairs. Line numbers have been normalized
+ to the first line of multi-line statements.
+
+ """
+ if self._all_arcs is None:
+ self._analyze_ast()
+ return self._all_arcs
+
+ def _analyze_ast(self):
+ """Run the AstArcAnalyzer and save its results.
+
+ `_all_arcs` is the set of arcs in the code.
+
+ """
+ aaa = AstArcAnalyzer(self.text, self.raw_statements, self._multiline)
+ aaa.analyze()
+
+ self._all_arcs = set()
+ for l1, l2 in aaa.arcs:
+ fl1 = self.first_line(l1)
+ fl2 = self.first_line(l2)
+ if fl1 != fl2:
+ self._all_arcs.add((fl1, fl2))
+
+ self._missing_arc_fragments = aaa.missing_arc_fragments
+
+ def exit_counts(self):
+ """Get a count of exits from that each line.
+
+ Excluded lines are excluded.
+
+ """
+ exit_counts = collections.defaultdict(int)
+ for l1, l2 in self.arcs():
+ if l1 < 0:
+ # Don't ever report -1 as a line number
+ continue
+ if l1 in self.excluded:
+ # Don't report excluded lines as line numbers.
+ continue
+ if l2 in self.excluded:
+ # Arcs to excluded lines shouldn't count.
+ continue
+ exit_counts[l1] += 1
+
+ # Class definitions have one extra exit, so remove one for each:
+ for l in self.raw_classdefs:
+ # Ensure key is there: class definitions can include excluded lines.
+ if l in exit_counts:
+ exit_counts[l] -= 1
+
+ return exit_counts
+
+ def missing_arc_description(self, start, end, executed_arcs=None):
+ """Provide an English sentence describing a missing arc."""
+ if self._missing_arc_fragments is None:
+ self._analyze_ast()
+
+ actual_start = start
+
+ if (
+ executed_arcs and
+ end < 0 and end == -start and
+ (end, start) not in executed_arcs and
+ (end, start) in self._missing_arc_fragments
+ ):
+ # It's a one-line callable, and we never even started it,
+ # and we have a message about not starting it.
+ start, end = end, start
+
+ fragment_pairs = self._missing_arc_fragments.get((start, end), [(None, None)])
+
+ msgs = []
+ for fragment_pair in fragment_pairs:
+ smsg, emsg = fragment_pair
+
+ if emsg is None:
+ if end < 0:
+ # Hmm, maybe we have a one-line callable, let's check.
+ if (-end, end) in self._missing_arc_fragments:
+ return self.missing_arc_description(-end, end)
+ emsg = "didn't jump to the function exit"
+ else:
+ emsg = "didn't jump to line {lineno}"
+ emsg = emsg.format(lineno=end)
+
+ msg = "line {start} {emsg}".format(start=actual_start, emsg=emsg)
+ if smsg is not None:
+ msg += ", because {smsg}".format(smsg=smsg.format(lineno=actual_start))
+
+ msgs.append(msg)
+
+ return " or ".join(msgs)
+
+
+class ByteParser(object):
+ """Parse bytecode to understand the structure of code."""
+
+ @contract(text='unicode')
+ def __init__(self, text, code=None, filename=None):
+ self.text = text
+ if code:
+ self.code = code
+ else:
+ try:
+ self.code = compile_unicode(text, filename, "exec")
+ except SyntaxError as synerr:
+ raise NotPython(
+ u"Couldn't parse '%s' as Python source: '%s' at line %d" % (
+ filename, synerr.msg, synerr.lineno
+ )
+ )
+
+ # Alternative Python implementations don't always provide all the
+ # attributes on code objects that we need to do the analysis.
+ for attr in ['co_lnotab', 'co_firstlineno']:
+ if not hasattr(self.code, attr):
+ raise StopEverything( # pragma: only jython
+ "This implementation of Python doesn't support code analysis.\n"
+ "Run coverage.py under another Python for this command."
+ )
+
+ def child_parsers(self):
+ """Iterate over all the code objects nested within this one.
+
+ The iteration includes `self` as its first value.
+
+ """
+ return (ByteParser(self.text, code=c) for c in code_objects(self.code))
+
+ def _bytes_lines(self):
+ """Map byte offsets to line numbers in `code`.
+
+ Uses co_lnotab described in Python/compile.c to map byte offsets to
+ line numbers. Produces a sequence: (b0, l0), (b1, l1), ...
+
+ Only byte offsets that correspond to line numbers are included in the
+ results.
+
+ """
+ # Adapted from dis.py in the standard library.
+ byte_increments = bytes_to_ints(self.code.co_lnotab[0::2])
+ line_increments = bytes_to_ints(self.code.co_lnotab[1::2])
+
+ last_line_num = None
+ line_num = self.code.co_firstlineno
+ byte_num = 0
+ for byte_incr, line_incr in zip(byte_increments, line_increments):
+ if byte_incr:
+ if line_num != last_line_num:
+ yield (byte_num, line_num)
+ last_line_num = line_num
+ byte_num += byte_incr
+ if env.PYBEHAVIOR.negative_lnotab and line_incr >= 0x80:
+ line_incr -= 0x100
+ line_num += line_incr
+ if line_num != last_line_num:
+ yield (byte_num, line_num)
+
+ def _find_statements(self):
+ """Find the statements in `self.code`.
+
+ Produce a sequence of line numbers that start statements. Recurses
+ into all code objects reachable from `self.code`.
+
+ """
+ for bp in self.child_parsers():
+ # Get all of the lineno information from this code.
+ for _, l in bp._bytes_lines():
+ yield l
+
+
+#
+# AST analysis
+#
+
+class LoopBlock(object):
+ """A block on the block stack representing a `for` or `while` loop."""
+ @contract(start=int)
+ def __init__(self, start):
+ # The line number where the loop starts.
+ self.start = start
+ # A set of ArcStarts, the arcs from break statements exiting this loop.
+ self.break_exits = set()
+
+
+class FunctionBlock(object):
+ """A block on the block stack representing a function definition."""
+ @contract(start=int, name=str)
+ def __init__(self, start, name):
+ # The line number where the function starts.
+ self.start = start
+ # The name of the function.
+ self.name = name
+
+
+class TryBlock(object):
+ """A block on the block stack representing a `try` block."""
+ @contract(handler_start='int|None', final_start='int|None')
+ def __init__(self, handler_start, final_start):
+ # The line number of the first "except" handler, if any.
+ self.handler_start = handler_start
+ # The line number of the "finally:" clause, if any.
+ self.final_start = final_start
+
+ # The ArcStarts for breaks/continues/returns/raises inside the "try:"
+ # that need to route through the "finally:" clause.
+ self.break_from = set()
+ self.continue_from = set()
+ self.return_from = set()
+ self.raise_from = set()
+
+
+class ArcStart(collections.namedtuple("Arc", "lineno, cause")):
+ """The information needed to start an arc.
+
+ `lineno` is the line number the arc starts from.
+
+ `cause` is an English text fragment used as the `startmsg` for
+ AstArcAnalyzer.missing_arc_fragments. It will be used to describe why an
+ arc wasn't executed, so should fit well into a sentence of the form,
+ "Line 17 didn't run because {cause}." The fragment can include "{lineno}"
+ to have `lineno` interpolated into it.
+
+ """
+ def __new__(cls, lineno, cause=None):
+ return super(ArcStart, cls).__new__(cls, lineno, cause)
+
+
+# Define contract words that PyContract doesn't have.
+# ArcStarts is for a list or set of ArcStart's.
+new_contract('ArcStarts', lambda seq: all(isinstance(x, ArcStart) for x in seq))
+
+
+# Turn on AST dumps with an environment variable.
+# $set_env.py: COVERAGE_AST_DUMP - Dump the AST nodes when parsing code.
+AST_DUMP = bool(int(os.environ.get("COVERAGE_AST_DUMP", 0)))
+
+class NodeList(object):
+ """A synthetic fictitious node, containing a sequence of nodes.
+
+ This is used when collapsing optimized if-statements, to represent the
+ unconditional execution of one of the clauses.
+
+ """
+ def __init__(self, body):
+ self.body = body
+ self.lineno = body[0].lineno
+
+
+# TODO: some add_arcs methods here don't add arcs, they return them. Rename them.
+# TODO: the cause messages have too many commas.
+# TODO: Shouldn't the cause messages join with "and" instead of "or"?
+
+class AstArcAnalyzer(object):
+ """Analyze source text with an AST to find executable code paths."""
+
+ @contract(text='unicode', statements=set)
+ def __init__(self, text, statements, multiline):
+ self.root_node = ast.parse(neuter_encoding_declaration(text))
+ # TODO: I think this is happening in too many places.
+ self.statements = set(multiline.get(l, l) for l in statements)
+ self.multiline = multiline
+
+ if AST_DUMP: # pragma: debugging
+ # Dump the AST so that failing tests have helpful output.
+ print("Statements: {}".format(self.statements))
+ print("Multiline map: {}".format(self.multiline))
+ ast_dump(self.root_node)
+
+ self.arcs = set()
+
+ # A map from arc pairs to a list of pairs of sentence fragments:
+ # { (start, end): [(startmsg, endmsg), ...], }
+ #
+ # For an arc from line 17, they should be usable like:
+ # "Line 17 {endmsg}, because {startmsg}"
+ self.missing_arc_fragments = collections.defaultdict(list)
+ self.block_stack = []
+
+ # $set_env.py: COVERAGE_TRACK_ARCS - Trace every arc added while parsing code.
+ self.debug = bool(int(os.environ.get("COVERAGE_TRACK_ARCS", 0)))
+
+ def analyze(self):
+ """Examine the AST tree from `root_node` to determine possible arcs.
+
+ This sets the `arcs` attribute to be a set of (from, to) line number
+ pairs.
+
+ """
+ for node in ast.walk(self.root_node):
+ node_name = node.__class__.__name__
+ code_object_handler = getattr(self, "_code_object__" + node_name, None)
+ if code_object_handler is not None:
+ code_object_handler(node)
+
+ @contract(start=int, end=int)
+ def add_arc(self, start, end, smsg=None, emsg=None):
+ """Add an arc, including message fragments to use if it is missing."""
+ if self.debug: # pragma: debugging
+ print("\nAdding arc: ({}, {}): {!r}, {!r}".format(start, end, smsg, emsg))
+ print(short_stack(limit=6))
+ self.arcs.add((start, end))
+
+ if smsg is not None or emsg is not None:
+ self.missing_arc_fragments[(start, end)].append((smsg, emsg))
+
+ def nearest_blocks(self):
+ """Yield the blocks in nearest-to-farthest order."""
+ return reversed(self.block_stack)
+
+ @contract(returns=int)
+ def line_for_node(self, node):
+ """What is the right line number to use for this node?
+
+ This dispatches to _line__Node functions where needed.
+
+ """
+ node_name = node.__class__.__name__
+ handler = getattr(self, "_line__" + node_name, None)
+ if handler is not None:
+ return handler(node)
+ else:
+ return node.lineno
+
+ def _line_decorated(self, node):
+ """Compute first line number for things that can be decorated (classes and functions)."""
+ lineno = node.lineno
+ if env.PYBEHAVIOR.trace_decorated_def:
+ if node.decorator_list:
+ lineno = node.decorator_list[0].lineno
+ return lineno
+
+ def _line__Assign(self, node):
+ return self.line_for_node(node.value)
+
+ _line__ClassDef = _line_decorated
+
+ def _line__Dict(self, node):
+ # Python 3.5 changed how dict literals are made.
+ if env.PYVERSION >= (3, 5) and node.keys:
+ if node.keys[0] is not None:
+ return node.keys[0].lineno
+ else:
+ # Unpacked dict literals `{**{'a':1}}` have None as the key,
+ # use the value in that case.
+ return node.values[0].lineno
+ else:
+ return node.lineno
+
+ _line__FunctionDef = _line_decorated
+ _line__AsyncFunctionDef = _line_decorated
+
+ def _line__List(self, node):
+ if node.elts:
+ return self.line_for_node(node.elts[0])
+ else:
+ return node.lineno
+
+ def _line__Module(self, node):
+ if node.body:
+ return self.line_for_node(node.body[0])
+ else:
+ # Empty modules have no line number, they always start at 1.
+ return 1
+
+ # The node types that just flow to the next node with no complications.
+ OK_TO_DEFAULT = set([
+ "Assign", "Assert", "AugAssign", "Delete", "Exec", "Expr", "Global",
+ "Import", "ImportFrom", "Nonlocal", "Pass", "Print",
+ ])
+
+ @contract(returns='ArcStarts')
+ def add_arcs(self, node):
+ """Add the arcs for `node`.
+
+ Return a set of ArcStarts, exits from this node to the next. Because a
+ node represents an entire sub-tree (including its children), the exits
+ from a node can be arbitrarily complex::
+
+ if something(1):
+ if other(2):
+ doit(3)
+ else:
+ doit(5)
+
+ There are two exits from line 1: they start at line 3 and line 5.
+
+ """
+ node_name = node.__class__.__name__
+ handler = getattr(self, "_handle__" + node_name, None)
+ if handler is not None:
+ return handler(node)
+ else:
+ # No handler: either it's something that's ok to default (a simple
+ # statement), or it's something we overlooked. Change this 0 to 1
+ # to see if it's overlooked.
+ if 0:
+ if node_name not in self.OK_TO_DEFAULT:
+ print("*** Unhandled: {}".format(node))
+
+ # Default for simple statements: one exit from this node.
+ return set([ArcStart(self.line_for_node(node))])
+
+ @one_of("from_start, prev_starts")
+ @contract(returns='ArcStarts')
+ def add_body_arcs(self, body, from_start=None, prev_starts=None):
+ """Add arcs for the body of a compound statement.
+
+ `body` is the body node. `from_start` is a single `ArcStart` that can
+ be the previous line in flow before this body. `prev_starts` is a set
+ of ArcStarts that can be the previous line. Only one of them should be
+ given.
+
+ Returns a set of ArcStarts, the exits from this body.
+
+ """
+ if prev_starts is None:
+ prev_starts = set([from_start])
+ for body_node in body:
+ lineno = self.line_for_node(body_node)
+ first_line = self.multiline.get(lineno, lineno)
+ if first_line not in self.statements:
+ body_node = self.find_non_missing_node(body_node)
+ if body_node is None:
+ continue
+ lineno = self.line_for_node(body_node)
+ for prev_start in prev_starts:
+ self.add_arc(prev_start.lineno, lineno, prev_start.cause)
+ prev_starts = self.add_arcs(body_node)
+ return prev_starts
+
+ def find_non_missing_node(self, node):
+ """Search `node` looking for a child that has not been optimized away.
+
+ This might return the node you started with, or it will work recursively
+ to find a child node in self.statements.
+
+ Returns a node, or None if none of the node remains.
+
+ """
+ # This repeats work just done in add_body_arcs, but this duplication
+ # means we can avoid a function call in the 99.9999% case of not
+ # optimizing away statements.
+ lineno = self.line_for_node(node)
+ first_line = self.multiline.get(lineno, lineno)
+ if first_line in self.statements:
+ return node
+
+ missing_fn = getattr(self, "_missing__" + node.__class__.__name__, None)
+ if missing_fn:
+ node = missing_fn(node)
+ else:
+ node = None
+ return node
+
+ # Missing nodes: _missing__*
+ #
+ # Entire statements can be optimized away by Python. They will appear in
+ # the AST, but not the bytecode. These functions are called (by
+ # find_non_missing_node) to find a node to use instead of the missing
+ # node. They can return None if the node should truly be gone.
+
+ def _missing__If(self, node):
+ # If the if-node is missing, then one of its children might still be
+ # here, but not both. So return the first of the two that isn't missing.
+ # Use a NodeList to hold the clauses as a single node.
+ non_missing = self.find_non_missing_node(NodeList(node.body))
+ if non_missing:
+ return non_missing
+ if node.orelse:
+ return self.find_non_missing_node(NodeList(node.orelse))
+ return None
+
+ def _missing__NodeList(self, node):
+ # A NodeList might be a mixture of missing and present nodes. Find the
+ # ones that are present.
+ non_missing_children = []
+ for child in node.body:
+ child = self.find_non_missing_node(child)
+ if child is not None:
+ non_missing_children.append(child)
+
+ # Return the simplest representation of the present children.
+ if not non_missing_children:
+ return None
+ if len(non_missing_children) == 1:
+ return non_missing_children[0]
+ return NodeList(non_missing_children)
+
+ def _missing__While(self, node):
+ body_nodes = self.find_non_missing_node(NodeList(node.body))
+ if not body_nodes:
+ return None
+ # Make a synthetic While-true node.
+ new_while = ast.While()
+ new_while.lineno = body_nodes.lineno
+ new_while.test = ast.Name()
+ new_while.test.lineno = body_nodes.lineno
+ new_while.test.id = "True"
+ new_while.body = body_nodes.body
+ new_while.orelse = None
+ return new_while
+
+ def is_constant_expr(self, node):
+ """Is this a compile-time constant?"""
+ node_name = node.__class__.__name__
+ if node_name in ["Constant", "NameConstant", "Num"]:
+ return "Num"
+ elif node_name == "Name":
+ if node.id in ["True", "False", "None", "__debug__"]:
+ return "Name"
+ return None
+
+ # In the fullness of time, these might be good tests to write:
+ # while EXPR:
+ # while False:
+ # listcomps hidden deep in other expressions
+ # listcomps hidden in lists: x = [[i for i in range(10)]]
+ # nested function definitions
+
+
+ # Exit processing: process_*_exits
+ #
+ # These functions process the four kinds of jump exits: break, continue,
+ # raise, and return. To figure out where an exit goes, we have to look at
+ # the block stack context. For example, a break will jump to the nearest
+ # enclosing loop block, or the nearest enclosing finally block, whichever
+ # is nearer.
+
+ @contract(exits='ArcStarts')
+ def process_break_exits(self, exits):
+ """Add arcs due to jumps from `exits` being breaks."""
+ for block in self.nearest_blocks():
+ if isinstance(block, LoopBlock):
+ block.break_exits.update(exits)
+ break
+ elif isinstance(block, TryBlock) and block.final_start is not None:
+ block.break_from.update(exits)
+ break
+
+ @contract(exits='ArcStarts')
+ def process_continue_exits(self, exits):
+ """Add arcs due to jumps from `exits` being continues."""
+ for block in self.nearest_blocks():
+ if isinstance(block, LoopBlock):
+ for xit in exits:
+ self.add_arc(xit.lineno, block.start, xit.cause)
+ break
+ elif isinstance(block, TryBlock) and block.final_start is not None:
+ block.continue_from.update(exits)
+ break
+
+ @contract(exits='ArcStarts')
+ def process_raise_exits(self, exits):
+ """Add arcs due to jumps from `exits` being raises."""
+ for block in self.nearest_blocks():
+ if isinstance(block, TryBlock):
+ if block.handler_start is not None:
+ for xit in exits:
+ self.add_arc(xit.lineno, block.handler_start, xit.cause)
+ break
+ elif block.final_start is not None:
+ block.raise_from.update(exits)
+ break
+ elif isinstance(block, FunctionBlock):
+ for xit in exits:
+ self.add_arc(
+ xit.lineno, -block.start, xit.cause,
+ "didn't except from function {!r}".format(block.name),
+ )
+ break
+
+ @contract(exits='ArcStarts')
+ def process_return_exits(self, exits):
+ """Add arcs due to jumps from `exits` being returns."""
+ for block in self.nearest_blocks():
+ if isinstance(block, TryBlock) and block.final_start is not None:
+ block.return_from.update(exits)
+ break
+ elif isinstance(block, FunctionBlock):
+ for xit in exits:
+ self.add_arc(
+ xit.lineno, -block.start, xit.cause,
+ "didn't return from function {!r}".format(block.name),
+ )
+ break
+
+
+ # Handlers: _handle__*
+ #
+ # Each handler deals with a specific AST node type, dispatched from
+ # add_arcs. Handlers return the set of exits from that node, and can
+ # also call self.add_arc to record arcs they find. These functions mirror
+ # the Python semantics of each syntactic construct. See the docstring
+ # for add_arcs to understand the concept of exits from a node.
+
+ @contract(returns='ArcStarts')
+ def _handle__Break(self, node):
+ here = self.line_for_node(node)
+ break_start = ArcStart(here, cause="the break on line {lineno} wasn't executed")
+ self.process_break_exits([break_start])
+ return set()
+
+ @contract(returns='ArcStarts')
+ def _handle_decorated(self, node):
+ """Add arcs for things that can be decorated (classes and functions)."""
+ main_line = last = node.lineno
+ if node.decorator_list:
+ if env.PYBEHAVIOR.trace_decorated_def:
+ last = None
+ for dec_node in node.decorator_list:
+ dec_start = self.line_for_node(dec_node)
+ if last is not None and dec_start != last:
+ self.add_arc(last, dec_start)
+ last = dec_start
+ if env.PYBEHAVIOR.trace_decorated_def:
+ self.add_arc(last, main_line)
+ last = main_line
+ # The definition line may have been missed, but we should have it
+ # in `self.statements`. For some constructs, `line_for_node` is
+ # not what we'd think of as the first line in the statement, so map
+ # it to the first one.
+ if node.body:
+ body_start = self.line_for_node(node.body[0])
+ body_start = self.multiline.get(body_start, body_start)
+ for lineno in range(last+1, body_start):
+ if lineno in self.statements:
+ self.add_arc(last, lineno)
+ last = lineno
+ # The body is handled in collect_arcs.
+ return set([ArcStart(last)])
+
+ _handle__ClassDef = _handle_decorated
+
+ @contract(returns='ArcStarts')
+ def _handle__Continue(self, node):
+ here = self.line_for_node(node)
+ continue_start = ArcStart(here, cause="the continue on line {lineno} wasn't executed")
+ self.process_continue_exits([continue_start])
+ return set()
+
+ @contract(returns='ArcStarts')
+ def _handle__For(self, node):
+ start = self.line_for_node(node.iter)
+ self.block_stack.append(LoopBlock(start=start))
+ from_start = ArcStart(start, cause="the loop on line {lineno} never started")
+ exits = self.add_body_arcs(node.body, from_start=from_start)
+ # Any exit from the body will go back to the top of the loop.
+ for xit in exits:
+ self.add_arc(xit.lineno, start, xit.cause)
+ my_block = self.block_stack.pop()
+ exits = my_block.break_exits
+ from_start = ArcStart(start, cause="the loop on line {lineno} didn't complete")
+ if node.orelse:
+ else_exits = self.add_body_arcs(node.orelse, from_start=from_start)
+ exits |= else_exits
+ else:
+ # No else clause: exit from the for line.
+ exits.add(from_start)
+ return exits
+
+ _handle__AsyncFor = _handle__For
+
+ _handle__FunctionDef = _handle_decorated
+ _handle__AsyncFunctionDef = _handle_decorated
+
+ @contract(returns='ArcStarts')
+ def _handle__If(self, node):
+ start = self.line_for_node(node.test)
+ from_start = ArcStart(start, cause="the condition on line {lineno} was never true")
+ exits = self.add_body_arcs(node.body, from_start=from_start)
+ from_start = ArcStart(start, cause="the condition on line {lineno} was never false")
+ exits |= self.add_body_arcs(node.orelse, from_start=from_start)
+ return exits
+
+ @contract(returns='ArcStarts')
+ def _handle__NodeList(self, node):
+ start = self.line_for_node(node)
+ exits = self.add_body_arcs(node.body, from_start=ArcStart(start))
+ return exits
+
+ @contract(returns='ArcStarts')
+ def _handle__Raise(self, node):
+ here = self.line_for_node(node)
+ raise_start = ArcStart(here, cause="the raise on line {lineno} wasn't executed")
+ self.process_raise_exits([raise_start])
+ # `raise` statement jumps away, no exits from here.
+ return set()
+
+ @contract(returns='ArcStarts')
+ def _handle__Return(self, node):
+ here = self.line_for_node(node)
+ return_start = ArcStart(here, cause="the return on line {lineno} wasn't executed")
+ self.process_return_exits([return_start])
+ # `return` statement jumps away, no exits from here.
+ return set()
+
+ @contract(returns='ArcStarts')
+ def _handle__Try(self, node):
+ if node.handlers:
+ handler_start = self.line_for_node(node.handlers[0])
+ else:
+ handler_start = None
+
+ if node.finalbody:
+ final_start = self.line_for_node(node.finalbody[0])
+ else:
+ final_start = None
+
+ try_block = TryBlock(handler_start, final_start)
+ self.block_stack.append(try_block)
+
+ start = self.line_for_node(node)
+ exits = self.add_body_arcs(node.body, from_start=ArcStart(start))
+
+ # We're done with the `try` body, so this block no longer handles
+ # exceptions. We keep the block so the `finally` clause can pick up
+ # flows from the handlers and `else` clause.
+ if node.finalbody:
+ try_block.handler_start = None
+ if node.handlers:
+ # If there are `except` clauses, then raises in the try body
+ # will already jump to them. Start this set over for raises in
+ # `except` and `else`.
+ try_block.raise_from = set([])
+ else:
+ self.block_stack.pop()
+
+ handler_exits = set()
+
+ if node.handlers:
+ last_handler_start = None
+ for handler_node in node.handlers:
+ handler_start = self.line_for_node(handler_node)
+ if last_handler_start is not None:
+ self.add_arc(last_handler_start, handler_start)
+ last_handler_start = handler_start
+ from_cause = "the exception caught by line {lineno} didn't happen"
+ from_start = ArcStart(handler_start, cause=from_cause)
+ handler_exits |= self.add_body_arcs(handler_node.body, from_start=from_start)
+
+ if node.orelse:
+ exits = self.add_body_arcs(node.orelse, prev_starts=exits)
+
+ exits |= handler_exits
+
+ if node.finalbody:
+ self.block_stack.pop()
+ final_from = ( # You can get to the `finally` clause from:
+ exits | # the exits of the body or `else` clause,
+ try_block.break_from | # or a `break`,
+ try_block.continue_from | # or a `continue`,
+ try_block.raise_from | # or a `raise`,
+ try_block.return_from # or a `return`.
+ )
+
+ final_exits = self.add_body_arcs(node.finalbody, prev_starts=final_from)
+
+ if try_block.break_from:
+ if env.PYBEHAVIOR.finally_jumps_back:
+ for break_line in try_block.break_from:
+ lineno = break_line.lineno
+ cause = break_line.cause.format(lineno=lineno)
+ for final_exit in final_exits:
+ self.add_arc(final_exit.lineno, lineno, cause)
+ breaks = try_block.break_from
+ else:
+ breaks = self._combine_finally_starts(try_block.break_from, final_exits)
+ self.process_break_exits(breaks)
+
+ if try_block.continue_from:
+ if env.PYBEHAVIOR.finally_jumps_back:
+ for continue_line in try_block.continue_from:
+ lineno = continue_line.lineno
+ cause = continue_line.cause.format(lineno=lineno)
+ for final_exit in final_exits:
+ self.add_arc(final_exit.lineno, lineno, cause)
+ continues = try_block.continue_from
+ else:
+ continues = self._combine_finally_starts(try_block.continue_from, final_exits)
+ self.process_continue_exits(continues)
+
+ if try_block.raise_from:
+ self.process_raise_exits(
+ self._combine_finally_starts(try_block.raise_from, final_exits)
+ )
+
+ if try_block.return_from:
+ if env.PYBEHAVIOR.finally_jumps_back:
+ for return_line in try_block.return_from:
+ lineno = return_line.lineno
+ cause = return_line.cause.format(lineno=lineno)
+ for final_exit in final_exits:
+ self.add_arc(final_exit.lineno, lineno, cause)
+ returns = try_block.return_from
+ else:
+ returns = self._combine_finally_starts(try_block.return_from, final_exits)
+ self.process_return_exits(returns)
+
+ if exits:
+ # The finally clause's exits are only exits for the try block
+ # as a whole if the try block had some exits to begin with.
+ exits = final_exits
+
+ return exits
+
+ @contract(starts='ArcStarts', exits='ArcStarts', returns='ArcStarts')
+ def _combine_finally_starts(self, starts, exits):
+ """Helper for building the cause of `finally` branches.
+
+ "finally" clauses might not execute their exits, and the causes could
+ be due to a failure to execute any of the exits in the try block. So
+ we use the causes from `starts` as the causes for `exits`.
+ """
+ causes = []
+ for start in sorted(starts):
+ if start.cause is not None:
+ causes.append(start.cause.format(lineno=start.lineno))
+ cause = " or ".join(causes)
+ exits = set(ArcStart(xit.lineno, cause) for xit in exits)
+ return exits
+
+ @contract(returns='ArcStarts')
+ def _handle__TryExcept(self, node):
+ # Python 2.7 uses separate TryExcept and TryFinally nodes. If we get
+ # TryExcept, it means there was no finally, so fake it, and treat as
+ # a general Try node.
+ node.finalbody = []
+ return self._handle__Try(node)
+
+ @contract(returns='ArcStarts')
+ def _handle__TryFinally(self, node):
+ # Python 2.7 uses separate TryExcept and TryFinally nodes. If we get
+ # TryFinally, see if there's a TryExcept nested inside. If so, merge
+ # them. Otherwise, fake fields to complete a Try node.
+ node.handlers = []
+ node.orelse = []
+
+ first = node.body[0]
+ if first.__class__.__name__ == "TryExcept" and node.lineno == first.lineno:
+ assert len(node.body) == 1
+ node.body = first.body
+ node.handlers = first.handlers
+ node.orelse = first.orelse
+
+ return self._handle__Try(node)
+
+ @contract(returns='ArcStarts')
+ def _handle__While(self, node):
+ constant_test = self.is_constant_expr(node.test)
+ start = to_top = self.line_for_node(node.test)
+ if constant_test and (env.PY3 or constant_test == "Num"):
+ to_top = self.line_for_node(node.body[0])
+ self.block_stack.append(LoopBlock(start=to_top))
+ from_start = ArcStart(start, cause="the condition on line {lineno} was never true")
+ exits = self.add_body_arcs(node.body, from_start=from_start)
+ for xit in exits:
+ self.add_arc(xit.lineno, to_top, xit.cause)
+ exits = set()
+ my_block = self.block_stack.pop()
+ exits.update(my_block.break_exits)
+ from_start = ArcStart(start, cause="the condition on line {lineno} was never false")
+ if node.orelse:
+ else_exits = self.add_body_arcs(node.orelse, from_start=from_start)
+ exits |= else_exits
+ else:
+ # No `else` clause: you can exit from the start.
+ if not constant_test:
+ exits.add(from_start)
+ return exits
+
+ @contract(returns='ArcStarts')
+ def _handle__With(self, node):
+ start = self.line_for_node(node)
+ exits = self.add_body_arcs(node.body, from_start=ArcStart(start))
+ return exits
+
+ _handle__AsyncWith = _handle__With
+
+ def _code_object__Module(self, node):
+ start = self.line_for_node(node)
+ if node.body:
+ exits = self.add_body_arcs(node.body, from_start=ArcStart(-start))
+ for xit in exits:
+ self.add_arc(xit.lineno, -start, xit.cause, "didn't exit the module")
+ else:
+ # Empty module.
+ self.add_arc(-start, start)
+ self.add_arc(start, -start)
+
+ def _code_object__FunctionDef(self, node):
+ start = self.line_for_node(node)
+ self.block_stack.append(FunctionBlock(start=start, name=node.name))
+ exits = self.add_body_arcs(node.body, from_start=ArcStart(-start))
+ self.process_return_exits(exits)
+ self.block_stack.pop()
+
+ _code_object__AsyncFunctionDef = _code_object__FunctionDef
+
+ def _code_object__ClassDef(self, node):
+ start = self.line_for_node(node)
+ self.add_arc(-start, start)
+ exits = self.add_body_arcs(node.body, from_start=ArcStart(start))
+ for xit in exits:
+ self.add_arc(
+ xit.lineno, -start, xit.cause,
+ "didn't exit the body of class {!r}".format(node.name),
+ )
+
+ def _make_oneline_code_method(noun): # pylint: disable=no-self-argument
+ """A function to make methods for online callable _code_object__ methods."""
+ def _code_object__oneline_callable(self, node):
+ start = self.line_for_node(node)
+ self.add_arc(-start, start, None, "didn't run the {} on line {}".format(noun, start))
+ self.add_arc(
+ start, -start, None,
+ "didn't finish the {} on line {}".format(noun, start),
+ )
+ return _code_object__oneline_callable
+
+ _code_object__Lambda = _make_oneline_code_method("lambda")
+ _code_object__GeneratorExp = _make_oneline_code_method("generator expression")
+ _code_object__DictComp = _make_oneline_code_method("dictionary comprehension")
+ _code_object__SetComp = _make_oneline_code_method("set comprehension")
+ if env.PY3:
+ _code_object__ListComp = _make_oneline_code_method("list comprehension")
+
+
+if AST_DUMP: # pragma: debugging
+ # Code only used when dumping the AST for debugging.
+
+ SKIP_DUMP_FIELDS = ["ctx"]
+
+ def _is_simple_value(value):
+ """Is `value` simple enough to be displayed on a single line?"""
+ return (
+ value in [None, [], (), {}, set()] or
+ isinstance(value, (string_class, int, float))
+ )
+
+ def ast_dump(node, depth=0):
+ """Dump the AST for `node`.
+
+ This recursively walks the AST, printing a readable version.
+
+ """
+ indent = " " * depth
+ if not isinstance(node, ast.AST):
+ print("{}<{} {!r}>".format(indent, node.__class__.__name__, node))
+ return
+
+ lineno = getattr(node, "lineno", None)
+ if lineno is not None:
+ linemark = " @ {}".format(node.lineno)
+ else:
+ linemark = ""
+ head = "{}<{}{}".format(indent, node.__class__.__name__, linemark)
+
+ named_fields = [
+ (name, value)
+ for name, value in ast.iter_fields(node)
+ if name not in SKIP_DUMP_FIELDS
+ ]
+ if not named_fields:
+ print("{}>".format(head))
+ elif len(named_fields) == 1 and _is_simple_value(named_fields[0][1]):
+ field_name, value = named_fields[0]
+ print("{} {}: {!r}>".format(head, field_name, value))
+ else:
+ print(head)
+ if 0:
+ print("{}# mro: {}".format(
+ indent, ", ".join(c.__name__ for c in node.__class__.__mro__[1:]),
+ ))
+ next_indent = indent + " "
+ for field_name, value in named_fields:
+ prefix = "{}{}:".format(next_indent, field_name)
+ if _is_simple_value(value):
+ print("{} {!r}".format(prefix, value))
+ elif isinstance(value, list):
+ print("{} [".format(prefix))
+ for n in value:
+ ast_dump(n, depth + 8)
+ print("{}]".format(next_indent))
+ else:
+ print(prefix)
+ ast_dump(value, depth + 8)
+
+ print("{}>".format(indent))
diff --git a/third_party/python/coverage/coverage/phystokens.py b/third_party/python/coverage/coverage/phystokens.py
new file mode 100644
index 0000000000..b6866e7dd0
--- /dev/null
+++ b/third_party/python/coverage/coverage/phystokens.py
@@ -0,0 +1,297 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Better tokenizing for coverage.py."""
+
+import codecs
+import keyword
+import re
+import sys
+import token
+import tokenize
+
+from coverage import env
+from coverage.backward import iternext, unicode_class
+from coverage.misc import contract
+
+
+def phys_tokens(toks):
+ """Return all physical tokens, even line continuations.
+
+ tokenize.generate_tokens() doesn't return a token for the backslash that
+ continues lines. This wrapper provides those tokens so that we can
+ re-create a faithful representation of the original source.
+
+ Returns the same values as generate_tokens()
+
+ """
+ last_line = None
+ last_lineno = -1
+ last_ttext = None
+ for ttype, ttext, (slineno, scol), (elineno, ecol), ltext in toks:
+ if last_lineno != elineno:
+ if last_line and last_line.endswith("\\\n"):
+ # We are at the beginning of a new line, and the last line
+ # ended with a backslash. We probably have to inject a
+ # backslash token into the stream. Unfortunately, there's more
+ # to figure out. This code::
+ #
+ # usage = """\
+ # HEY THERE
+ # """
+ #
+ # triggers this condition, but the token text is::
+ #
+ # '"""\\\nHEY THERE\n"""'
+ #
+ # so we need to figure out if the backslash is already in the
+ # string token or not.
+ inject_backslash = True
+ if last_ttext.endswith("\\"):
+ inject_backslash = False
+ elif ttype == token.STRING:
+ if "\n" in ttext and ttext.split('\n', 1)[0][-1] == '\\':
+ # It's a multi-line string and the first line ends with
+ # a backslash, so we don't need to inject another.
+ inject_backslash = False
+ if inject_backslash:
+ # Figure out what column the backslash is in.
+ ccol = len(last_line.split("\n")[-2]) - 1
+ # Yield the token, with a fake token type.
+ yield (
+ 99999, "\\\n",
+ (slineno, ccol), (slineno, ccol+2),
+ last_line
+ )
+ last_line = ltext
+ if ttype not in (tokenize.NEWLINE, tokenize.NL):
+ last_ttext = ttext
+ yield ttype, ttext, (slineno, scol), (elineno, ecol), ltext
+ last_lineno = elineno
+
+
+@contract(source='unicode')
+def source_token_lines(source):
+ """Generate a series of lines, one for each line in `source`.
+
+ Each line is a list of pairs, each pair is a token::
+
+ [('key', 'def'), ('ws', ' '), ('nam', 'hello'), ('op', '('), ... ]
+
+ Each pair has a token class, and the token text.
+
+ If you concatenate all the token texts, and then join them with newlines,
+ you should have your original `source` back, with two differences:
+ trailing whitespace is not preserved, and a final line with no newline
+ is indistinguishable from a final line with a newline.
+
+ """
+
+ ws_tokens = set([token.INDENT, token.DEDENT, token.NEWLINE, tokenize.NL])
+ line = []
+ col = 0
+
+ source = source.expandtabs(8).replace('\r\n', '\n')
+ tokgen = generate_tokens(source)
+
+ for ttype, ttext, (_, scol), (_, ecol), _ in phys_tokens(tokgen):
+ mark_start = True
+ for part in re.split('(\n)', ttext):
+ if part == '\n':
+ yield line
+ line = []
+ col = 0
+ mark_end = False
+ elif part == '':
+ mark_end = False
+ elif ttype in ws_tokens:
+ mark_end = False
+ else:
+ if mark_start and scol > col:
+ line.append(("ws", u" " * (scol - col)))
+ mark_start = False
+ tok_class = tokenize.tok_name.get(ttype, 'xx').lower()[:3]
+ if ttype == token.NAME and keyword.iskeyword(ttext):
+ tok_class = "key"
+ line.append((tok_class, part))
+ mark_end = True
+ scol = 0
+ if mark_end:
+ col = ecol
+
+ if line:
+ yield line
+
+
+class CachedTokenizer(object):
+ """A one-element cache around tokenize.generate_tokens.
+
+ When reporting, coverage.py tokenizes files twice, once to find the
+ structure of the file, and once to syntax-color it. Tokenizing is
+ expensive, and easily cached.
+
+ This is a one-element cache so that our twice-in-a-row tokenizing doesn't
+ actually tokenize twice.
+
+ """
+ def __init__(self):
+ self.last_text = None
+ self.last_tokens = None
+
+ @contract(text='unicode')
+ def generate_tokens(self, text):
+ """A stand-in for `tokenize.generate_tokens`."""
+ if text != self.last_text:
+ self.last_text = text
+ readline = iternext(text.splitlines(True))
+ self.last_tokens = list(tokenize.generate_tokens(readline))
+ return self.last_tokens
+
+# Create our generate_tokens cache as a callable replacement function.
+generate_tokens = CachedTokenizer().generate_tokens
+
+
+COOKIE_RE = re.compile(r"^[ \t]*#.*coding[:=][ \t]*([-\w.]+)", flags=re.MULTILINE)
+
+@contract(source='bytes')
+def _source_encoding_py2(source):
+ """Determine the encoding for `source`, according to PEP 263.
+
+ `source` is a byte string, the text of the program.
+
+ Returns a string, the name of the encoding.
+
+ """
+ assert isinstance(source, bytes)
+
+ # Do this so the detect_encode code we copied will work.
+ readline = iternext(source.splitlines(True))
+
+ # This is mostly code adapted from Py3.2's tokenize module.
+
+ def _get_normal_name(orig_enc):
+ """Imitates get_normal_name in tokenizer.c."""
+ # Only care about the first 12 characters.
+ enc = orig_enc[:12].lower().replace("_", "-")
+ if re.match(r"^utf-8($|-)", enc):
+ return "utf-8"
+ if re.match(r"^(latin-1|iso-8859-1|iso-latin-1)($|-)", enc):
+ return "iso-8859-1"
+ return orig_enc
+
+ # From detect_encode():
+ # It detects the encoding from the presence of a UTF-8 BOM or an encoding
+ # cookie as specified in PEP-0263. If both a BOM and a cookie are present,
+ # but disagree, a SyntaxError will be raised. If the encoding cookie is an
+ # invalid charset, raise a SyntaxError. Note that if a UTF-8 BOM is found,
+ # 'utf-8-sig' is returned.
+
+ # If no encoding is specified, then the default will be returned.
+ default = 'ascii'
+
+ bom_found = False
+ encoding = None
+
+ def read_or_stop():
+ """Get the next source line, or ''."""
+ try:
+ return readline()
+ except StopIteration:
+ return ''
+
+ def find_cookie(line):
+ """Find an encoding cookie in `line`."""
+ try:
+ line_string = line.decode('ascii')
+ except UnicodeDecodeError:
+ return None
+
+ matches = COOKIE_RE.findall(line_string)
+ if not matches:
+ return None
+ encoding = _get_normal_name(matches[0])
+ try:
+ codec = codecs.lookup(encoding)
+ except LookupError:
+ # This behavior mimics the Python interpreter
+ raise SyntaxError("unknown encoding: " + encoding)
+
+ if bom_found:
+ # codecs in 2.3 were raw tuples of functions, assume the best.
+ codec_name = getattr(codec, 'name', encoding)
+ if codec_name != 'utf-8':
+ # This behavior mimics the Python interpreter
+ raise SyntaxError('encoding problem: utf-8')
+ encoding += '-sig'
+ return encoding
+
+ first = read_or_stop()
+ if first.startswith(codecs.BOM_UTF8):
+ bom_found = True
+ first = first[3:]
+ default = 'utf-8-sig'
+ if not first:
+ return default
+
+ encoding = find_cookie(first)
+ if encoding:
+ return encoding
+
+ second = read_or_stop()
+ if not second:
+ return default
+
+ encoding = find_cookie(second)
+ if encoding:
+ return encoding
+
+ return default
+
+
+@contract(source='bytes')
+def _source_encoding_py3(source):
+ """Determine the encoding for `source`, according to PEP 263.
+
+ `source` is a byte string: the text of the program.
+
+ Returns a string, the name of the encoding.
+
+ """
+ readline = iternext(source.splitlines(True))
+ return tokenize.detect_encoding(readline)[0]
+
+
+if env.PY3:
+ source_encoding = _source_encoding_py3
+else:
+ source_encoding = _source_encoding_py2
+
+
+@contract(source='unicode')
+def compile_unicode(source, filename, mode):
+ """Just like the `compile` builtin, but works on any Unicode string.
+
+ Python 2's compile() builtin has a stupid restriction: if the source string
+ is Unicode, then it may not have a encoding declaration in it. Why not?
+ Who knows! It also decodes to utf8, and then tries to interpret those utf8
+ bytes according to the encoding declaration. Why? Who knows!
+
+ This function neuters the coding declaration, and compiles it.
+
+ """
+ source = neuter_encoding_declaration(source)
+ if env.PY2 and isinstance(filename, unicode_class):
+ filename = filename.encode(sys.getfilesystemencoding(), "replace")
+ code = compile(source, filename, mode)
+ return code
+
+
+@contract(source='unicode', returns='unicode')
+def neuter_encoding_declaration(source):
+ """Return `source`, with any encoding declaration neutered."""
+ if COOKIE_RE.search(source):
+ source_lines = source.splitlines(True)
+ for lineno in range(min(2, len(source_lines))):
+ source_lines[lineno] = COOKIE_RE.sub("# (deleted declaration)", source_lines[lineno])
+ source = "".join(source_lines)
+ return source
diff --git a/third_party/python/coverage/coverage/plugin.py b/third_party/python/coverage/coverage/plugin.py
new file mode 100644
index 0000000000..6997b489bb
--- /dev/null
+++ b/third_party/python/coverage/coverage/plugin.py
@@ -0,0 +1,533 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""
+.. versionadded:: 4.0
+
+Plug-in interfaces for coverage.py.
+
+Coverage.py supports a few different kinds of plug-ins that change its
+behavior:
+
+* File tracers implement tracing of non-Python file types.
+
+* Configurers add custom configuration, using Python code to change the
+ configuration.
+
+* Dynamic context switchers decide when the dynamic context has changed, for
+ example, to record what test function produced the coverage.
+
+To write a coverage.py plug-in, create a module with a subclass of
+:class:`~coverage.CoveragePlugin`. You will override methods in your class to
+participate in various aspects of coverage.py's processing.
+Different types of plug-ins have to override different methods.
+
+Any plug-in can optionally implement :meth:`~coverage.CoveragePlugin.sys_info`
+to provide debugging information about their operation.
+
+Your module must also contain a ``coverage_init`` function that registers an
+instance of your plug-in class::
+
+ import coverage
+
+ class MyPlugin(coverage.CoveragePlugin):
+ ...
+
+ def coverage_init(reg, options):
+ reg.add_file_tracer(MyPlugin())
+
+You use the `reg` parameter passed to your ``coverage_init`` function to
+register your plug-in object. The registration method you call depends on
+what kind of plug-in it is.
+
+If your plug-in takes options, the `options` parameter is a dictionary of your
+plug-in's options from the coverage.py configuration file. Use them however
+you want to configure your object before registering it.
+
+Coverage.py will store its own information on your plug-in object, using
+attributes whose names start with ``_coverage_``. Don't be startled.
+
+.. warning::
+ Plug-ins are imported by coverage.py before it begins measuring code.
+ If you write a plugin in your own project, it might import your product
+ code before coverage.py can start measuring. This can result in your
+ own code being reported as missing.
+
+ One solution is to put your plugins in your project tree, but not in
+ your importable Python package.
+
+
+.. _file_tracer_plugins:
+
+File Tracers
+============
+
+File tracers implement measurement support for non-Python files. File tracers
+implement the :meth:`~coverage.CoveragePlugin.file_tracer` method to claim
+files and the :meth:`~coverage.CoveragePlugin.file_reporter` method to report
+on those files.
+
+In your ``coverage_init`` function, use the ``add_file_tracer`` method to
+register your file tracer.
+
+
+.. _configurer_plugins:
+
+Configurers
+===========
+
+.. versionadded:: 4.5
+
+Configurers modify the configuration of coverage.py during start-up.
+Configurers implement the :meth:`~coverage.CoveragePlugin.configure` method to
+change the configuration.
+
+In your ``coverage_init`` function, use the ``add_configurer`` method to
+register your configurer.
+
+
+.. _dynamic_context_plugins:
+
+Dynamic Context Switchers
+=========================
+
+.. versionadded:: 5.0
+
+Dynamic context switcher plugins implement the
+:meth:`~coverage.CoveragePlugin.dynamic_context` method to dynamically compute
+the context label for each measured frame.
+
+Computed context labels are useful when you want to group measured data without
+modifying the source code.
+
+For example, you could write a plugin that checks `frame.f_code` to inspect
+the currently executed method, and set the context label to a fully qualified
+method name if it's an instance method of `unittest.TestCase` and the method
+name starts with 'test'. Such a plugin would provide basic coverage grouping
+by test and could be used with test runners that have no built-in coveragepy
+support.
+
+In your ``coverage_init`` function, use the ``add_dynamic_context`` method to
+register your dynamic context switcher.
+
+"""
+
+from coverage import files
+from coverage.misc import contract, _needs_to_implement
+
+
+class CoveragePlugin(object):
+ """Base class for coverage.py plug-ins."""
+
+ def file_tracer(self, filename): # pylint: disable=unused-argument
+ """Get a :class:`FileTracer` object for a file.
+
+ Plug-in type: file tracer.
+
+ Every Python source file is offered to your plug-in to give it a chance
+ to take responsibility for tracing the file. If your plug-in can
+ handle the file, it should return a :class:`FileTracer` object.
+ Otherwise return None.
+
+ There is no way to register your plug-in for particular files.
+ Instead, this method is invoked for all files as they are executed,
+ and the plug-in decides whether it can trace the file or not.
+ Be prepared for `filename` to refer to all kinds of files that have
+ nothing to do with your plug-in.
+
+ The file name will be a Python file being executed. There are two
+ broad categories of behavior for a plug-in, depending on the kind of
+ files your plug-in supports:
+
+ * Static file names: each of your original source files has been
+ converted into a distinct Python file. Your plug-in is invoked with
+ the Python file name, and it maps it back to its original source
+ file.
+
+ * Dynamic file names: all of your source files are executed by the same
+ Python file. In this case, your plug-in implements
+ :meth:`FileTracer.dynamic_source_filename` to provide the actual
+ source file for each execution frame.
+
+ `filename` is a string, the path to the file being considered. This is
+ the absolute real path to the file. If you are comparing to other
+ paths, be sure to take this into account.
+
+ Returns a :class:`FileTracer` object to use to trace `filename`, or
+ None if this plug-in cannot trace this file.
+
+ """
+ return None
+
+ def file_reporter(self, filename): # pylint: disable=unused-argument
+ """Get the :class:`FileReporter` class to use for a file.
+
+ Plug-in type: file tracer.
+
+ This will only be invoked if `filename` returns non-None from
+ :meth:`file_tracer`. It's an error to return None from this method.
+
+ Returns a :class:`FileReporter` object to use to report on `filename`,
+ or the string `"python"` to have coverage.py treat the file as Python.
+
+ """
+ _needs_to_implement(self, "file_reporter")
+
+ def dynamic_context(self, frame): # pylint: disable=unused-argument
+ """Get the dynamically computed context label for `frame`.
+
+ Plug-in type: dynamic context.
+
+ This method is invoked for each frame when outside of a dynamic
+ context, to see if a new dynamic context should be started. If it
+ returns a string, a new context label is set for this and deeper
+ frames. The dynamic context ends when this frame returns.
+
+ Returns a string to start a new dynamic context, or None if no new
+ context should be started.
+
+ """
+ return None
+
+ def find_executable_files(self, src_dir): # pylint: disable=unused-argument
+ """Yield all of the executable files in `src_dir`, recursively.
+
+ Plug-in type: file tracer.
+
+ Executability is a plug-in-specific property, but generally means files
+ which would have been considered for coverage analysis, had they been
+ included automatically.
+
+ Returns or yields a sequence of strings, the paths to files that could
+ have been executed, including files that had been executed.
+
+ """
+ return []
+
+ def configure(self, config):
+ """Modify the configuration of coverage.py.
+
+ Plug-in type: configurer.
+
+ This method is called during coverage.py start-up, to give your plug-in
+ a chance to change the configuration. The `config` parameter is an
+ object with :meth:`~coverage.Coverage.get_option` and
+ :meth:`~coverage.Coverage.set_option` methods. Do not call any other
+ methods on the `config` object.
+
+ """
+ pass
+
+ def sys_info(self):
+ """Get a list of information useful for debugging.
+
+ Plug-in type: any.
+
+ This method will be invoked for ``--debug=sys``. Your
+ plug-in can return any information it wants to be displayed.
+
+ Returns a list of pairs: `[(name, value), ...]`.
+
+ """
+ return []
+
+
+class FileTracer(object):
+ """Support needed for files during the execution phase.
+
+ File tracer plug-ins implement subclasses of FileTracer to return from
+ their :meth:`~CoveragePlugin.file_tracer` method.
+
+ You may construct this object from :meth:`CoveragePlugin.file_tracer` any
+ way you like. A natural choice would be to pass the file name given to
+ `file_tracer`.
+
+ `FileTracer` objects should only be created in the
+ :meth:`CoveragePlugin.file_tracer` method.
+
+ See :ref:`howitworks` for details of the different coverage.py phases.
+
+ """
+
+ def source_filename(self):
+ """The source file name for this file.
+
+ This may be any file name you like. A key responsibility of a plug-in
+ is to own the mapping from Python execution back to whatever source
+ file name was originally the source of the code.
+
+ See :meth:`CoveragePlugin.file_tracer` for details about static and
+ dynamic file names.
+
+ Returns the file name to credit with this execution.
+
+ """
+ _needs_to_implement(self, "source_filename")
+
+ def has_dynamic_source_filename(self):
+ """Does this FileTracer have dynamic source file names?
+
+ FileTracers can provide dynamically determined file names by
+ implementing :meth:`dynamic_source_filename`. Invoking that function
+ is expensive. To determine whether to invoke it, coverage.py uses the
+ result of this function to know if it needs to bother invoking
+ :meth:`dynamic_source_filename`.
+
+ See :meth:`CoveragePlugin.file_tracer` for details about static and
+ dynamic file names.
+
+ Returns True if :meth:`dynamic_source_filename` should be called to get
+ dynamic source file names.
+
+ """
+ return False
+
+ def dynamic_source_filename(self, filename, frame): # pylint: disable=unused-argument
+ """Get a dynamically computed source file name.
+
+ Some plug-ins need to compute the source file name dynamically for each
+ frame.
+
+ This function will not be invoked if
+ :meth:`has_dynamic_source_filename` returns False.
+
+ Returns the source file name for this frame, or None if this frame
+ shouldn't be measured.
+
+ """
+ return None
+
+ def line_number_range(self, frame):
+ """Get the range of source line numbers for a given a call frame.
+
+ The call frame is examined, and the source line number in the original
+ file is returned. The return value is a pair of numbers, the starting
+ line number and the ending line number, both inclusive. For example,
+ returning (5, 7) means that lines 5, 6, and 7 should be considered
+ executed.
+
+ This function might decide that the frame doesn't indicate any lines
+ from the source file were executed. Return (-1, -1) in this case to
+ tell coverage.py that no lines should be recorded for this frame.
+
+ """
+ lineno = frame.f_lineno
+ return lineno, lineno
+
+
+class FileReporter(object):
+ """Support needed for files during the analysis and reporting phases.
+
+ File tracer plug-ins implement a subclass of `FileReporter`, and return
+ instances from their :meth:`CoveragePlugin.file_reporter` method.
+
+ There are many methods here, but only :meth:`lines` is required, to provide
+ the set of executable lines in the file.
+
+ See :ref:`howitworks` for details of the different coverage.py phases.
+
+ """
+
+ def __init__(self, filename):
+ """Simple initialization of a `FileReporter`.
+
+ The `filename` argument is the path to the file being reported. This
+ will be available as the `.filename` attribute on the object. Other
+ method implementations on this base class rely on this attribute.
+
+ """
+ self.filename = filename
+
+ def __repr__(self):
+ return "<{0.__class__.__name__} filename={0.filename!r}>".format(self)
+
+ def relative_filename(self):
+ """Get the relative file name for this file.
+
+ This file path will be displayed in reports. The default
+ implementation will supply the actual project-relative file path. You
+ only need to supply this method if you have an unusual syntax for file
+ paths.
+
+ """
+ return files.relative_filename(self.filename)
+
+ @contract(returns='unicode')
+ def source(self):
+ """Get the source for the file.
+
+ Returns a Unicode string.
+
+ The base implementation simply reads the `self.filename` file and
+ decodes it as UTF8. Override this method if your file isn't readable
+ as a text file, or if you need other encoding support.
+
+ """
+ with open(self.filename, "rb") as f:
+ return f.read().decode("utf8")
+
+ def lines(self):
+ """Get the executable lines in this file.
+
+ Your plug-in must determine which lines in the file were possibly
+ executable. This method returns a set of those line numbers.
+
+ Returns a set of line numbers.
+
+ """
+ _needs_to_implement(self, "lines")
+
+ def excluded_lines(self):
+ """Get the excluded executable lines in this file.
+
+ Your plug-in can use any method it likes to allow the user to exclude
+ executable lines from consideration.
+
+ Returns a set of line numbers.
+
+ The base implementation returns the empty set.
+
+ """
+ return set()
+
+ def translate_lines(self, lines):
+ """Translate recorded lines into reported lines.
+
+ Some file formats will want to report lines slightly differently than
+ they are recorded. For example, Python records the last line of a
+ multi-line statement, but reports are nicer if they mention the first
+ line.
+
+ Your plug-in can optionally define this method to perform these kinds
+ of adjustment.
+
+ `lines` is a sequence of integers, the recorded line numbers.
+
+ Returns a set of integers, the adjusted line numbers.
+
+ The base implementation returns the numbers unchanged.
+
+ """
+ return set(lines)
+
+ def arcs(self):
+ """Get the executable arcs in this file.
+
+ To support branch coverage, your plug-in needs to be able to indicate
+ possible execution paths, as a set of line number pairs. Each pair is
+ a `(prev, next)` pair indicating that execution can transition from the
+ `prev` line number to the `next` line number.
+
+ Returns a set of pairs of line numbers. The default implementation
+ returns an empty set.
+
+ """
+ return set()
+
+ def no_branch_lines(self):
+ """Get the lines excused from branch coverage in this file.
+
+ Your plug-in can use any method it likes to allow the user to exclude
+ lines from consideration of branch coverage.
+
+ Returns a set of line numbers.
+
+ The base implementation returns the empty set.
+
+ """
+ return set()
+
+ def translate_arcs(self, arcs):
+ """Translate recorded arcs into reported arcs.
+
+ Similar to :meth:`translate_lines`, but for arcs. `arcs` is a set of
+ line number pairs.
+
+ Returns a set of line number pairs.
+
+ The default implementation returns `arcs` unchanged.
+
+ """
+ return arcs
+
+ def exit_counts(self):
+ """Get a count of exits from that each line.
+
+ To determine which lines are branches, coverage.py looks for lines that
+ have more than one exit. This function creates a dict mapping each
+ executable line number to a count of how many exits it has.
+
+ To be honest, this feels wrong, and should be refactored. Let me know
+ if you attempt to implement this method in your plug-in...
+
+ """
+ return {}
+
+ def missing_arc_description(self, start, end, executed_arcs=None): # pylint: disable=unused-argument
+ """Provide an English sentence describing a missing arc.
+
+ The `start` and `end` arguments are the line numbers of the missing
+ arc. Negative numbers indicate entering or exiting code objects.
+
+ The `executed_arcs` argument is a set of line number pairs, the arcs
+ that were executed in this file.
+
+ By default, this simply returns the string "Line {start} didn't jump
+ to {end}".
+
+ """
+ return "Line {start} didn't jump to line {end}".format(start=start, end=end)
+
+ def source_token_lines(self):
+ """Generate a series of tokenized lines, one for each line in `source`.
+
+ These tokens are used for syntax-colored reports.
+
+ Each line is a list of pairs, each pair is a token::
+
+ [('key', 'def'), ('ws', ' '), ('nam', 'hello'), ('op', '('), ... ]
+
+ Each pair has a token class, and the token text. The token classes
+ are:
+
+ * ``'com'``: a comment
+ * ``'key'``: a keyword
+ * ``'nam'``: a name, or identifier
+ * ``'num'``: a number
+ * ``'op'``: an operator
+ * ``'str'``: a string literal
+ * ``'ws'``: some white space
+ * ``'txt'``: some other kind of text
+
+ If you concatenate all the token texts, and then join them with
+ newlines, you should have your original source back.
+
+ The default implementation simply returns each line tagged as
+ ``'txt'``.
+
+ """
+ for line in self.source().splitlines():
+ yield [('txt', line)]
+
+ # Annoying comparison operators. Py3k wants __lt__ etc, and Py2k needs all
+ # of them defined.
+
+ def __eq__(self, other):
+ return isinstance(other, FileReporter) and self.filename == other.filename
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __lt__(self, other):
+ return self.filename < other.filename
+
+ def __le__(self, other):
+ return self.filename <= other.filename
+
+ def __gt__(self, other):
+ return self.filename > other.filename
+
+ def __ge__(self, other):
+ return self.filename >= other.filename
+
+ __hash__ = None # This object doesn't need to be hashed.
diff --git a/third_party/python/coverage/coverage/plugin_support.py b/third_party/python/coverage/coverage/plugin_support.py
new file mode 100644
index 0000000000..89c1c7658f
--- /dev/null
+++ b/third_party/python/coverage/coverage/plugin_support.py
@@ -0,0 +1,281 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Support for plugins."""
+
+import os
+import os.path
+import sys
+
+from coverage.misc import CoverageException, isolate_module
+from coverage.plugin import CoveragePlugin, FileTracer, FileReporter
+
+os = isolate_module(os)
+
+
+class Plugins(object):
+ """The currently loaded collection of coverage.py plugins."""
+
+ def __init__(self):
+ self.order = []
+ self.names = {}
+ self.file_tracers = []
+ self.configurers = []
+ self.context_switchers = []
+
+ self.current_module = None
+ self.debug = None
+
+ @classmethod
+ def load_plugins(cls, modules, config, debug=None):
+ """Load plugins from `modules`.
+
+ Returns a Plugins object with the loaded and configured plugins.
+
+ """
+ plugins = cls()
+ plugins.debug = debug
+
+ for module in modules:
+ plugins.current_module = module
+ __import__(module)
+ mod = sys.modules[module]
+
+ coverage_init = getattr(mod, "coverage_init", None)
+ if not coverage_init:
+ raise CoverageException(
+ "Plugin module %r didn't define a coverage_init function" % module
+ )
+
+ options = config.get_plugin_options(module)
+ coverage_init(plugins, options)
+
+ plugins.current_module = None
+ return plugins
+
+ def add_file_tracer(self, plugin):
+ """Add a file tracer plugin.
+
+ `plugin` is an instance of a third-party plugin class. It must
+ implement the :meth:`CoveragePlugin.file_tracer` method.
+
+ """
+ self._add_plugin(plugin, self.file_tracers)
+
+ def add_configurer(self, plugin):
+ """Add a configuring plugin.
+
+ `plugin` is an instance of a third-party plugin class. It must
+ implement the :meth:`CoveragePlugin.configure` method.
+
+ """
+ self._add_plugin(plugin, self.configurers)
+
+ def add_dynamic_context(self, plugin):
+ """Add a dynamic context plugin.
+
+ `plugin` is an instance of a third-party plugin class. It must
+ implement the :meth:`CoveragePlugin.dynamic_context` method.
+
+ """
+ self._add_plugin(plugin, self.context_switchers)
+
+ def add_noop(self, plugin):
+ """Add a plugin that does nothing.
+
+ This is only useful for testing the plugin support.
+
+ """
+ self._add_plugin(plugin, None)
+
+ def _add_plugin(self, plugin, specialized):
+ """Add a plugin object.
+
+ `plugin` is a :class:`CoveragePlugin` instance to add. `specialized`
+ is a list to append the plugin to.
+
+ """
+ plugin_name = "%s.%s" % (self.current_module, plugin.__class__.__name__)
+ if self.debug and self.debug.should('plugin'):
+ self.debug.write("Loaded plugin %r: %r" % (self.current_module, plugin))
+ labelled = LabelledDebug("plugin %r" % (self.current_module,), self.debug)
+ plugin = DebugPluginWrapper(plugin, labelled)
+
+ # pylint: disable=attribute-defined-outside-init
+ plugin._coverage_plugin_name = plugin_name
+ plugin._coverage_enabled = True
+ self.order.append(plugin)
+ self.names[plugin_name] = plugin
+ if specialized is not None:
+ specialized.append(plugin)
+
+ def __nonzero__(self):
+ return bool(self.order)
+
+ __bool__ = __nonzero__
+
+ def __iter__(self):
+ return iter(self.order)
+
+ def get(self, plugin_name):
+ """Return a plugin by name."""
+ return self.names[plugin_name]
+
+
+class LabelledDebug(object):
+ """A Debug writer, but with labels for prepending to the messages."""
+
+ def __init__(self, label, debug, prev_labels=()):
+ self.labels = list(prev_labels) + [label]
+ self.debug = debug
+
+ def add_label(self, label):
+ """Add a label to the writer, and return a new `LabelledDebug`."""
+ return LabelledDebug(label, self.debug, self.labels)
+
+ def message_prefix(self):
+ """The prefix to use on messages, combining the labels."""
+ prefixes = self.labels + ['']
+ return ":\n".join(" "*i+label for i, label in enumerate(prefixes))
+
+ def write(self, message):
+ """Write `message`, but with the labels prepended."""
+ self.debug.write("%s%s" % (self.message_prefix(), message))
+
+
+class DebugPluginWrapper(CoveragePlugin):
+ """Wrap a plugin, and use debug to report on what it's doing."""
+
+ def __init__(self, plugin, debug):
+ super(DebugPluginWrapper, self).__init__()
+ self.plugin = plugin
+ self.debug = debug
+
+ def file_tracer(self, filename):
+ tracer = self.plugin.file_tracer(filename)
+ self.debug.write("file_tracer(%r) --> %r" % (filename, tracer))
+ if tracer:
+ debug = self.debug.add_label("file %r" % (filename,))
+ tracer = DebugFileTracerWrapper(tracer, debug)
+ return tracer
+
+ def file_reporter(self, filename):
+ reporter = self.plugin.file_reporter(filename)
+ self.debug.write("file_reporter(%r) --> %r" % (filename, reporter))
+ if reporter:
+ debug = self.debug.add_label("file %r" % (filename,))
+ reporter = DebugFileReporterWrapper(filename, reporter, debug)
+ return reporter
+
+ def dynamic_context(self, frame):
+ context = self.plugin.dynamic_context(frame)
+ self.debug.write("dynamic_context(%r) --> %r" % (frame, context))
+ return context
+
+ def find_executable_files(self, src_dir):
+ executable_files = self.plugin.find_executable_files(src_dir)
+ self.debug.write("find_executable_files(%r) --> %r" % (src_dir, executable_files))
+ return executable_files
+
+ def configure(self, config):
+ self.debug.write("configure(%r)" % (config,))
+ self.plugin.configure(config)
+
+ def sys_info(self):
+ return self.plugin.sys_info()
+
+
+class DebugFileTracerWrapper(FileTracer):
+ """A debugging `FileTracer`."""
+
+ def __init__(self, tracer, debug):
+ self.tracer = tracer
+ self.debug = debug
+
+ def _show_frame(self, frame):
+ """A short string identifying a frame, for debug messages."""
+ return "%s@%d" % (
+ os.path.basename(frame.f_code.co_filename),
+ frame.f_lineno,
+ )
+
+ def source_filename(self):
+ sfilename = self.tracer.source_filename()
+ self.debug.write("source_filename() --> %r" % (sfilename,))
+ return sfilename
+
+ def has_dynamic_source_filename(self):
+ has = self.tracer.has_dynamic_source_filename()
+ self.debug.write("has_dynamic_source_filename() --> %r" % (has,))
+ return has
+
+ def dynamic_source_filename(self, filename, frame):
+ dyn = self.tracer.dynamic_source_filename(filename, frame)
+ self.debug.write("dynamic_source_filename(%r, %s) --> %r" % (
+ filename, self._show_frame(frame), dyn,
+ ))
+ return dyn
+
+ def line_number_range(self, frame):
+ pair = self.tracer.line_number_range(frame)
+ self.debug.write("line_number_range(%s) --> %r" % (self._show_frame(frame), pair))
+ return pair
+
+
+class DebugFileReporterWrapper(FileReporter):
+ """A debugging `FileReporter`."""
+
+ def __init__(self, filename, reporter, debug):
+ super(DebugFileReporterWrapper, self).__init__(filename)
+ self.reporter = reporter
+ self.debug = debug
+
+ def relative_filename(self):
+ ret = self.reporter.relative_filename()
+ self.debug.write("relative_filename() --> %r" % (ret,))
+ return ret
+
+ def lines(self):
+ ret = self.reporter.lines()
+ self.debug.write("lines() --> %r" % (ret,))
+ return ret
+
+ def excluded_lines(self):
+ ret = self.reporter.excluded_lines()
+ self.debug.write("excluded_lines() --> %r" % (ret,))
+ return ret
+
+ def translate_lines(self, lines):
+ ret = self.reporter.translate_lines(lines)
+ self.debug.write("translate_lines(%r) --> %r" % (lines, ret))
+ return ret
+
+ def translate_arcs(self, arcs):
+ ret = self.reporter.translate_arcs(arcs)
+ self.debug.write("translate_arcs(%r) --> %r" % (arcs, ret))
+ return ret
+
+ def no_branch_lines(self):
+ ret = self.reporter.no_branch_lines()
+ self.debug.write("no_branch_lines() --> %r" % (ret,))
+ return ret
+
+ def exit_counts(self):
+ ret = self.reporter.exit_counts()
+ self.debug.write("exit_counts() --> %r" % (ret,))
+ return ret
+
+ def arcs(self):
+ ret = self.reporter.arcs()
+ self.debug.write("arcs() --> %r" % (ret,))
+ return ret
+
+ def source(self):
+ ret = self.reporter.source()
+ self.debug.write("source() --> %d chars" % (len(ret),))
+ return ret
+
+ def source_token_lines(self):
+ ret = list(self.reporter.source_token_lines())
+ self.debug.write("source_token_lines() --> %d tokens" % (len(ret),))
+ return ret
diff --git a/third_party/python/coverage/coverage/python.py b/third_party/python/coverage/coverage/python.py
new file mode 100644
index 0000000000..81aa66ba16
--- /dev/null
+++ b/third_party/python/coverage/coverage/python.py
@@ -0,0 +1,249 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Python source expertise for coverage.py"""
+
+import os.path
+import types
+import zipimport
+
+from coverage import env, files
+from coverage.misc import contract, expensive, isolate_module, join_regex
+from coverage.misc import CoverageException, NoSource
+from coverage.parser import PythonParser
+from coverage.phystokens import source_token_lines, source_encoding
+from coverage.plugin import FileReporter
+
+os = isolate_module(os)
+
+
+@contract(returns='bytes')
+def read_python_source(filename):
+ """Read the Python source text from `filename`.
+
+ Returns bytes.
+
+ """
+ with open(filename, "rb") as f:
+ source = f.read()
+
+ if env.IRONPYTHON:
+ # IronPython reads Unicode strings even for "rb" files.
+ source = bytes(source)
+
+ return source.replace(b"\r\n", b"\n").replace(b"\r", b"\n")
+
+
+@contract(returns='unicode')
+def get_python_source(filename):
+ """Return the source code, as unicode."""
+ base, ext = os.path.splitext(filename)
+ if ext == ".py" and env.WINDOWS:
+ exts = [".py", ".pyw"]
+ else:
+ exts = [ext]
+
+ for ext in exts:
+ try_filename = base + ext
+ if os.path.exists(try_filename):
+ # A regular text file: open it.
+ source = read_python_source(try_filename)
+ break
+
+ # Maybe it's in a zip file?
+ source = get_zip_bytes(try_filename)
+ if source is not None:
+ break
+ else:
+ # Couldn't find source.
+ exc_msg = "No source for code: '%s'.\n" % (filename,)
+ exc_msg += "Aborting report output, consider using -i."
+ raise NoSource(exc_msg)
+
+ # Replace \f because of http://bugs.python.org/issue19035
+ source = source.replace(b'\f', b' ')
+ source = source.decode(source_encoding(source), "replace")
+
+ # Python code should always end with a line with a newline.
+ if source and source[-1] != '\n':
+ source += '\n'
+
+ return source
+
+
+@contract(returns='bytes|None')
+def get_zip_bytes(filename):
+ """Get data from `filename` if it is a zip file path.
+
+ Returns the bytestring data read from the zip file, or None if no zip file
+ could be found or `filename` isn't in it. The data returned will be
+ an empty string if the file is empty.
+
+ """
+ markers = ['.zip'+os.sep, '.egg'+os.sep, '.pex'+os.sep]
+ for marker in markers:
+ if marker in filename:
+ parts = filename.split(marker)
+ try:
+ zi = zipimport.zipimporter(parts[0]+marker[:-1])
+ except zipimport.ZipImportError:
+ continue
+ try:
+ data = zi.get_data(parts[1])
+ except IOError:
+ continue
+ return data
+ return None
+
+
+def source_for_file(filename):
+ """Return the source filename for `filename`.
+
+ Given a file name being traced, return the best guess as to the source
+ file to attribute it to.
+
+ """
+ if filename.endswith(".py"):
+ # .py files are themselves source files.
+ return filename
+
+ elif filename.endswith((".pyc", ".pyo")):
+ # Bytecode files probably have source files near them.
+ py_filename = filename[:-1]
+ if os.path.exists(py_filename):
+ # Found a .py file, use that.
+ return py_filename
+ if env.WINDOWS:
+ # On Windows, it could be a .pyw file.
+ pyw_filename = py_filename + "w"
+ if os.path.exists(pyw_filename):
+ return pyw_filename
+ # Didn't find source, but it's probably the .py file we want.
+ return py_filename
+
+ elif filename.endswith("$py.class"):
+ # Jython is easy to guess.
+ return filename[:-9] + ".py"
+
+ # No idea, just use the file name as-is.
+ return filename
+
+
+def source_for_morf(morf):
+ """Get the source filename for the module-or-file `morf`."""
+ if hasattr(morf, '__file__') and morf.__file__:
+ filename = morf.__file__
+ elif isinstance(morf, types.ModuleType):
+ # A module should have had .__file__, otherwise we can't use it.
+ # This could be a PEP-420 namespace package.
+ raise CoverageException("Module {} has no file".format(morf))
+ else:
+ filename = morf
+
+ filename = source_for_file(files.unicode_filename(filename))
+ return filename
+
+
+class PythonFileReporter(FileReporter):
+ """Report support for a Python file."""
+
+ def __init__(self, morf, coverage=None):
+ self.coverage = coverage
+
+ filename = source_for_morf(morf)
+
+ super(PythonFileReporter, self).__init__(files.canonical_filename(filename))
+
+ if hasattr(morf, '__name__'):
+ name = morf.__name__.replace(".", os.sep)
+ if os.path.basename(filename).startswith('__init__.'):
+ name += os.sep + "__init__"
+ name += ".py"
+ name = files.unicode_filename(name)
+ else:
+ name = files.relative_filename(filename)
+ self.relname = name
+
+ self._source = None
+ self._parser = None
+ self._excluded = None
+
+ def __repr__(self):
+ return "<PythonFileReporter {!r}>".format(self.filename)
+
+ @contract(returns='unicode')
+ def relative_filename(self):
+ return self.relname
+
+ @property
+ def parser(self):
+ """Lazily create a :class:`PythonParser`."""
+ if self._parser is None:
+ self._parser = PythonParser(
+ filename=self.filename,
+ exclude=self.coverage._exclude_regex('exclude'),
+ )
+ self._parser.parse_source()
+ return self._parser
+
+ def lines(self):
+ """Return the line numbers of statements in the file."""
+ return self.parser.statements
+
+ def excluded_lines(self):
+ """Return the line numbers of statements in the file."""
+ return self.parser.excluded
+
+ def translate_lines(self, lines):
+ return self.parser.translate_lines(lines)
+
+ def translate_arcs(self, arcs):
+ return self.parser.translate_arcs(arcs)
+
+ @expensive
+ def no_branch_lines(self):
+ no_branch = self.parser.lines_matching(
+ join_regex(self.coverage.config.partial_list),
+ join_regex(self.coverage.config.partial_always_list)
+ )
+ return no_branch
+
+ @expensive
+ def arcs(self):
+ return self.parser.arcs()
+
+ @expensive
+ def exit_counts(self):
+ return self.parser.exit_counts()
+
+ def missing_arc_description(self, start, end, executed_arcs=None):
+ return self.parser.missing_arc_description(start, end, executed_arcs)
+
+ @contract(returns='unicode')
+ def source(self):
+ if self._source is None:
+ self._source = get_python_source(self.filename)
+ return self._source
+
+ def should_be_python(self):
+ """Does it seem like this file should contain Python?
+
+ This is used to decide if a file reported as part of the execution of
+ a program was really likely to have contained Python in the first
+ place.
+
+ """
+ # Get the file extension.
+ _, ext = os.path.splitext(self.filename)
+
+ # Anything named *.py* should be Python.
+ if ext.startswith('.py'):
+ return True
+ # A file with no extension should be Python.
+ if not ext:
+ return True
+ # Everything else is probably not Python.
+ return False
+
+ def source_token_lines(self):
+ return source_token_lines(self.source())
diff --git a/third_party/python/coverage/coverage/pytracer.py b/third_party/python/coverage/coverage/pytracer.py
new file mode 100644
index 0000000000..44bfc8d6a8
--- /dev/null
+++ b/third_party/python/coverage/coverage/pytracer.py
@@ -0,0 +1,245 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Raw data collector for coverage.py."""
+
+import atexit
+import dis
+import sys
+
+from coverage import env
+
+# We need the YIELD_VALUE opcode below, in a comparison-friendly form.
+YIELD_VALUE = dis.opmap['YIELD_VALUE']
+if env.PY2:
+ YIELD_VALUE = chr(YIELD_VALUE)
+
+
+class PyTracer(object):
+ """Python implementation of the raw data tracer."""
+
+ # Because of poor implementations of trace-function-manipulating tools,
+ # the Python trace function must be kept very simple. In particular, there
+ # must be only one function ever set as the trace function, both through
+ # sys.settrace, and as the return value from the trace function. Put
+ # another way, the trace function must always return itself. It cannot
+ # swap in other functions, or return None to avoid tracing a particular
+ # frame.
+ #
+ # The trace manipulator that introduced this restriction is DecoratorTools,
+ # which sets a trace function, and then later restores the pre-existing one
+ # by calling sys.settrace with a function it found in the current frame.
+ #
+ # Systems that use DecoratorTools (or similar trace manipulations) must use
+ # PyTracer to get accurate results. The command-line --timid argument is
+ # used to force the use of this tracer.
+
+ def __init__(self):
+ # Attributes set from the collector:
+ self.data = None
+ self.trace_arcs = False
+ self.should_trace = None
+ self.should_trace_cache = None
+ self.should_start_context = None
+ self.warn = None
+ # The threading module to use, if any.
+ self.threading = None
+
+ self.cur_file_dict = None
+ self.last_line = 0 # int, but uninitialized.
+ self.cur_file_name = None
+ self.context = None
+ self.started_context = False
+
+ self.data_stack = []
+ self.last_exc_back = None
+ self.last_exc_firstlineno = 0
+ self.thread = None
+ self.stopped = False
+ self._activity = False
+
+ self.in_atexit = False
+ # On exit, self.in_atexit = True
+ atexit.register(setattr, self, 'in_atexit', True)
+
+ def __repr__(self):
+ return "<PyTracer at {}: {} lines in {} files>".format(
+ id(self),
+ sum(len(v) for v in self.data.values()),
+ len(self.data),
+ )
+
+ def log(self, marker, *args):
+ """For hard-core logging of what this tracer is doing."""
+ with open("/tmp/debug_trace.txt", "a") as f:
+ f.write("{} {:x}.{:x}[{}] {:x} {}\n".format(
+ marker,
+ id(self),
+ self.thread.ident,
+ len(self.data_stack),
+ self.threading.currentThread().ident,
+ " ".join(map(str, args))
+ ))
+
+ def _trace(self, frame, event, arg_unused):
+ """The trace function passed to sys.settrace."""
+
+ #self.log(":", frame.f_code.co_filename, frame.f_lineno, event)
+
+ if (self.stopped and sys.gettrace() == self._trace): # pylint: disable=comparison-with-callable
+ # The PyTrace.stop() method has been called, possibly by another
+ # thread, let's deactivate ourselves now.
+ #self.log("X", frame.f_code.co_filename, frame.f_lineno)
+ sys.settrace(None)
+ return None
+
+ if self.last_exc_back:
+ if frame == self.last_exc_back:
+ # Someone forgot a return event.
+ if self.trace_arcs and self.cur_file_dict:
+ pair = (self.last_line, -self.last_exc_firstlineno)
+ self.cur_file_dict[pair] = None
+ self.cur_file_dict, self.cur_file_name, self.last_line, self.started_context = (
+ self.data_stack.pop()
+ )
+ self.last_exc_back = None
+
+ if event == 'call':
+ # Should we start a new context?
+ if self.should_start_context and self.context is None:
+ context_maybe = self.should_start_context(frame)
+ if context_maybe is not None:
+ self.context = context_maybe
+ self.started_context = True
+ self.switch_context(self.context)
+ else:
+ self.started_context = False
+ else:
+ self.started_context = False
+
+ # Entering a new frame. Decide if we should trace
+ # in this file.
+ self._activity = True
+ self.data_stack.append(
+ (
+ self.cur_file_dict,
+ self.cur_file_name,
+ self.last_line,
+ self.started_context,
+ )
+ )
+ filename = frame.f_code.co_filename
+ self.cur_file_name = filename
+ disp = self.should_trace_cache.get(filename)
+ if disp is None:
+ disp = self.should_trace(filename, frame)
+ self.should_trace_cache[filename] = disp
+
+ self.cur_file_dict = None
+ if disp.trace:
+ tracename = disp.source_filename
+ if tracename not in self.data:
+ self.data[tracename] = {}
+ self.cur_file_dict = self.data[tracename]
+ # The call event is really a "start frame" event, and happens for
+ # function calls and re-entering generators. The f_lasti field is
+ # -1 for calls, and a real offset for generators. Use <0 as the
+ # line number for calls, and the real line number for generators.
+ if getattr(frame, 'f_lasti', -1) < 0:
+ self.last_line = -frame.f_code.co_firstlineno
+ else:
+ self.last_line = frame.f_lineno
+ elif event == 'line':
+ # Record an executed line.
+ if self.cur_file_dict is not None:
+ lineno = frame.f_lineno
+ #if frame.f_code.co_filename != self.cur_file_name:
+ # self.log("*", frame.f_code.co_filename, self.cur_file_name, lineno)
+ if self.trace_arcs:
+ self.cur_file_dict[(self.last_line, lineno)] = None
+ else:
+ self.cur_file_dict[lineno] = None
+ self.last_line = lineno
+ elif event == 'return':
+ if self.trace_arcs and self.cur_file_dict:
+ # Record an arc leaving the function, but beware that a
+ # "return" event might just mean yielding from a generator.
+ # Jython seems to have an empty co_code, so just assume return.
+ code = frame.f_code.co_code
+ if (not code) or code[frame.f_lasti] != YIELD_VALUE:
+ first = frame.f_code.co_firstlineno
+ self.cur_file_dict[(self.last_line, -first)] = None
+ # Leaving this function, pop the filename stack.
+ self.cur_file_dict, self.cur_file_name, self.last_line, self.started_context = (
+ self.data_stack.pop()
+ )
+ # Leaving a context?
+ if self.started_context:
+ self.context = None
+ self.switch_context(None)
+ elif event == 'exception':
+ self.last_exc_back = frame.f_back
+ self.last_exc_firstlineno = frame.f_code.co_firstlineno
+ return self._trace
+
+ def start(self):
+ """Start this Tracer.
+
+ Return a Python function suitable for use with sys.settrace().
+
+ """
+ self.stopped = False
+ if self.threading:
+ if self.thread is None:
+ self.thread = self.threading.currentThread()
+ else:
+ if self.thread.ident != self.threading.currentThread().ident:
+ # Re-starting from a different thread!? Don't set the trace
+ # function, but we are marked as running again, so maybe it
+ # will be ok?
+ #self.log("~", "starting on different threads")
+ return self._trace
+
+ sys.settrace(self._trace)
+ return self._trace
+
+ def stop(self):
+ """Stop this Tracer."""
+ # Get the active tracer callback before setting the stop flag to be
+ # able to detect if the tracer was changed prior to stopping it.
+ tf = sys.gettrace()
+
+ # Set the stop flag. The actual call to sys.settrace(None) will happen
+ # in the self._trace callback itself to make sure to call it from the
+ # right thread.
+ self.stopped = True
+
+ if self.threading and self.thread.ident != self.threading.currentThread().ident:
+ # Called on a different thread than started us: we can't unhook
+ # ourselves, but we've set the flag that we should stop, so we
+ # won't do any more tracing.
+ #self.log("~", "stopping on different threads")
+ return
+
+ if self.warn:
+ # PyPy clears the trace function before running atexit functions,
+ # so don't warn if we are in atexit on PyPy and the trace function
+ # has changed to None.
+ dont_warn = (env.PYPY and env.PYPYVERSION >= (5, 4) and self.in_atexit and tf is None)
+ if (not dont_warn) and tf != self._trace: # pylint: disable=comparison-with-callable
+ self.warn(
+ "Trace function changed, measurement is likely wrong: %r" % (tf,),
+ slug="trace-changed",
+ )
+
+ def activity(self):
+ """Has there been any activity?"""
+ return self._activity
+
+ def reset_activity(self):
+ """Reset the activity() flag."""
+ self._activity = False
+
+ def get_stats(self):
+ """Return a dictionary of statistics, or None."""
+ return None
diff --git a/third_party/python/coverage/coverage/report.py b/third_party/python/coverage/coverage/report.py
new file mode 100644
index 0000000000..64678ff95d
--- /dev/null
+++ b/third_party/python/coverage/coverage/report.py
@@ -0,0 +1,86 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Reporter foundation for coverage.py."""
+import sys
+
+from coverage import env
+from coverage.files import prep_patterns, FnmatchMatcher
+from coverage.misc import CoverageException, NoSource, NotPython, ensure_dir_for_file, file_be_gone
+
+
+def render_report(output_path, reporter, morfs):
+ """Run the provided reporter ensuring any required setup and cleanup is done
+
+ At a high level this method ensures the output file is ready to be written to. Then writes the
+ report to it. Then closes the file and deletes any garbage created if necessary.
+ """
+ file_to_close = None
+ delete_file = False
+ if output_path:
+ if output_path == '-':
+ outfile = sys.stdout
+ else:
+ # Ensure that the output directory is created; done here
+ # because this report pre-opens the output file.
+ # HTMLReport does this using the Report plumbing because
+ # its task is more complex, being multiple files.
+ ensure_dir_for_file(output_path)
+ open_kwargs = {}
+ if env.PY3:
+ open_kwargs['encoding'] = 'utf8'
+ outfile = open(output_path, "w", **open_kwargs)
+ file_to_close = outfile
+ try:
+ return reporter.report(morfs, outfile=outfile)
+ except CoverageException:
+ delete_file = True
+ raise
+ finally:
+ if file_to_close:
+ file_to_close.close()
+ if delete_file:
+ file_be_gone(output_path)
+
+
+def get_analysis_to_report(coverage, morfs):
+ """Get the files to report on.
+
+ For each morf in `morfs`, if it should be reported on (based on the omit
+ and include configuration options), yield a pair, the `FileReporter` and
+ `Analysis` for the morf.
+
+ """
+ file_reporters = coverage._get_file_reporters(morfs)
+ config = coverage.config
+
+ if config.report_include:
+ matcher = FnmatchMatcher(prep_patterns(config.report_include))
+ file_reporters = [fr for fr in file_reporters if matcher.match(fr.filename)]
+
+ if config.report_omit:
+ matcher = FnmatchMatcher(prep_patterns(config.report_omit))
+ file_reporters = [fr for fr in file_reporters if not matcher.match(fr.filename)]
+
+ if not file_reporters:
+ raise CoverageException("No data to report.")
+
+ for fr in sorted(file_reporters):
+ try:
+ analysis = coverage._analyze(fr)
+ except NoSource:
+ if not config.ignore_errors:
+ raise
+ except NotPython:
+ # Only report errors for .py files, and only if we didn't
+ # explicitly suppress those errors.
+ # NotPython is only raised by PythonFileReporter, which has a
+ # should_be_python() method.
+ if fr.should_be_python():
+ if config.ignore_errors:
+ msg = "Couldn't parse Python file '{}'".format(fr.filename)
+ coverage._warn(msg, slug="couldnt-parse")
+ else:
+ raise
+ else:
+ yield (fr, analysis)
diff --git a/third_party/python/coverage/coverage/results.py b/third_party/python/coverage/coverage/results.py
new file mode 100644
index 0000000000..ae8366bf5a
--- /dev/null
+++ b/third_party/python/coverage/coverage/results.py
@@ -0,0 +1,346 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Results of coverage measurement."""
+
+import collections
+
+from coverage.backward import iitems
+from coverage.debug import SimpleReprMixin
+from coverage.misc import contract, CoverageException, nice_pair
+
+
+class Analysis(object):
+ """The results of analyzing a FileReporter."""
+
+ def __init__(self, data, file_reporter, file_mapper):
+ self.data = data
+ self.file_reporter = file_reporter
+ self.filename = file_mapper(self.file_reporter.filename)
+ self.statements = self.file_reporter.lines()
+ self.excluded = self.file_reporter.excluded_lines()
+
+ # Identify missing statements.
+ executed = self.data.lines(self.filename) or []
+ executed = self.file_reporter.translate_lines(executed)
+ self.executed = executed
+ self.missing = self.statements - self.executed
+
+ if self.data.has_arcs():
+ self._arc_possibilities = sorted(self.file_reporter.arcs())
+ self.exit_counts = self.file_reporter.exit_counts()
+ self.no_branch = self.file_reporter.no_branch_lines()
+ n_branches = self._total_branches()
+ mba = self.missing_branch_arcs()
+ n_partial_branches = sum(len(v) for k,v in iitems(mba) if k not in self.missing)
+ n_missing_branches = sum(len(v) for k,v in iitems(mba))
+ else:
+ self._arc_possibilities = []
+ self.exit_counts = {}
+ self.no_branch = set()
+ n_branches = n_partial_branches = n_missing_branches = 0
+
+ self.numbers = Numbers(
+ n_files=1,
+ n_statements=len(self.statements),
+ n_excluded=len(self.excluded),
+ n_missing=len(self.missing),
+ n_branches=n_branches,
+ n_partial_branches=n_partial_branches,
+ n_missing_branches=n_missing_branches,
+ )
+
+ def missing_formatted(self, branches=False):
+ """The missing line numbers, formatted nicely.
+
+ Returns a string like "1-2, 5-11, 13-14".
+
+ If `branches` is true, includes the missing branch arcs also.
+
+ """
+ if branches and self.has_arcs():
+ arcs = iitems(self.missing_branch_arcs())
+ else:
+ arcs = None
+
+ return format_lines(self.statements, self.missing, arcs=arcs)
+
+ def has_arcs(self):
+ """Were arcs measured in this result?"""
+ return self.data.has_arcs()
+
+ @contract(returns='list(tuple(int, int))')
+ def arc_possibilities(self):
+ """Returns a sorted list of the arcs in the code."""
+ return self._arc_possibilities
+
+ @contract(returns='list(tuple(int, int))')
+ def arcs_executed(self):
+ """Returns a sorted list of the arcs actually executed in the code."""
+ executed = self.data.arcs(self.filename) or []
+ executed = self.file_reporter.translate_arcs(executed)
+ return sorted(executed)
+
+ @contract(returns='list(tuple(int, int))')
+ def arcs_missing(self):
+ """Returns a sorted list of the arcs in the code not executed."""
+ possible = self.arc_possibilities()
+ executed = self.arcs_executed()
+ missing = (
+ p for p in possible
+ if p not in executed
+ and p[0] not in self.no_branch
+ )
+ return sorted(missing)
+
+ @contract(returns='list(tuple(int, int))')
+ def arcs_unpredicted(self):
+ """Returns a sorted list of the executed arcs missing from the code."""
+ possible = self.arc_possibilities()
+ executed = self.arcs_executed()
+ # Exclude arcs here which connect a line to itself. They can occur
+ # in executed data in some cases. This is where they can cause
+ # trouble, and here is where it's the least burden to remove them.
+ # Also, generators can somehow cause arcs from "enter" to "exit", so
+ # make sure we have at least one positive value.
+ unpredicted = (
+ e for e in executed
+ if e not in possible
+ and e[0] != e[1]
+ and (e[0] > 0 or e[1] > 0)
+ )
+ return sorted(unpredicted)
+
+ def _branch_lines(self):
+ """Returns a list of line numbers that have more than one exit."""
+ return [l1 for l1,count in iitems(self.exit_counts) if count > 1]
+
+ def _total_branches(self):
+ """How many total branches are there?"""
+ return sum(count for count in self.exit_counts.values() if count > 1)
+
+ @contract(returns='dict(int: list(int))')
+ def missing_branch_arcs(self):
+ """Return arcs that weren't executed from branch lines.
+
+ Returns {l1:[l2a,l2b,...], ...}
+
+ """
+ missing = self.arcs_missing()
+ branch_lines = set(self._branch_lines())
+ mba = collections.defaultdict(list)
+ for l1, l2 in missing:
+ if l1 in branch_lines:
+ mba[l1].append(l2)
+ return mba
+
+ @contract(returns='dict(int: tuple(int, int))')
+ def branch_stats(self):
+ """Get stats about branches.
+
+ Returns a dict mapping line numbers to a tuple:
+ (total_exits, taken_exits).
+ """
+
+ missing_arcs = self.missing_branch_arcs()
+ stats = {}
+ for lnum in self._branch_lines():
+ exits = self.exit_counts[lnum]
+ try:
+ missing = len(missing_arcs[lnum])
+ except KeyError:
+ missing = 0
+ stats[lnum] = (exits, exits - missing)
+ return stats
+
+
+class Numbers(SimpleReprMixin):
+ """The numerical results of measuring coverage.
+
+ This holds the basic statistics from `Analysis`, and is used to roll
+ up statistics across files.
+
+ """
+ # A global to determine the precision on coverage percentages, the number
+ # of decimal places.
+ _precision = 0
+ _near0 = 1.0 # These will change when _precision is changed.
+ _near100 = 99.0
+
+ def __init__(self, n_files=0, n_statements=0, n_excluded=0, n_missing=0,
+ n_branches=0, n_partial_branches=0, n_missing_branches=0
+ ):
+ self.n_files = n_files
+ self.n_statements = n_statements
+ self.n_excluded = n_excluded
+ self.n_missing = n_missing
+ self.n_branches = n_branches
+ self.n_partial_branches = n_partial_branches
+ self.n_missing_branches = n_missing_branches
+
+ def init_args(self):
+ """Return a list for __init__(*args) to recreate this object."""
+ return [
+ self.n_files, self.n_statements, self.n_excluded, self.n_missing,
+ self.n_branches, self.n_partial_branches, self.n_missing_branches,
+ ]
+
+ @classmethod
+ def set_precision(cls, precision):
+ """Set the number of decimal places used to report percentages."""
+ assert 0 <= precision < 10
+ cls._precision = precision
+ cls._near0 = 1.0 / 10**precision
+ cls._near100 = 100.0 - cls._near0
+
+ @property
+ def n_executed(self):
+ """Returns the number of executed statements."""
+ return self.n_statements - self.n_missing
+
+ @property
+ def n_executed_branches(self):
+ """Returns the number of executed branches."""
+ return self.n_branches - self.n_missing_branches
+
+ @property
+ def pc_covered(self):
+ """Returns a single percentage value for coverage."""
+ if self.n_statements > 0:
+ numerator, denominator = self.ratio_covered
+ pc_cov = (100.0 * numerator) / denominator
+ else:
+ pc_cov = 100.0
+ return pc_cov
+
+ @property
+ def pc_covered_str(self):
+ """Returns the percent covered, as a string, without a percent sign.
+
+ Note that "0" is only returned when the value is truly zero, and "100"
+ is only returned when the value is truly 100. Rounding can never
+ result in either "0" or "100".
+
+ """
+ pc = self.pc_covered
+ if 0 < pc < self._near0:
+ pc = self._near0
+ elif self._near100 < pc < 100:
+ pc = self._near100
+ else:
+ pc = round(pc, self._precision)
+ return "%.*f" % (self._precision, pc)
+
+ @classmethod
+ def pc_str_width(cls):
+ """How many characters wide can pc_covered_str be?"""
+ width = 3 # "100"
+ if cls._precision > 0:
+ width += 1 + cls._precision
+ return width
+
+ @property
+ def ratio_covered(self):
+ """Return a numerator and denominator for the coverage ratio."""
+ numerator = self.n_executed + self.n_executed_branches
+ denominator = self.n_statements + self.n_branches
+ return numerator, denominator
+
+ def __add__(self, other):
+ nums = Numbers()
+ nums.n_files = self.n_files + other.n_files
+ nums.n_statements = self.n_statements + other.n_statements
+ nums.n_excluded = self.n_excluded + other.n_excluded
+ nums.n_missing = self.n_missing + other.n_missing
+ nums.n_branches = self.n_branches + other.n_branches
+ nums.n_partial_branches = (
+ self.n_partial_branches + other.n_partial_branches
+ )
+ nums.n_missing_branches = (
+ self.n_missing_branches + other.n_missing_branches
+ )
+ return nums
+
+ def __radd__(self, other):
+ # Implementing 0+Numbers allows us to sum() a list of Numbers.
+ if other == 0:
+ return self
+ return NotImplemented
+
+
+def _line_ranges(statements, lines):
+ """Produce a list of ranges for `format_lines`."""
+ statements = sorted(statements)
+ lines = sorted(lines)
+
+ pairs = []
+ start = None
+ lidx = 0
+ for stmt in statements:
+ if lidx >= len(lines):
+ break
+ if stmt == lines[lidx]:
+ lidx += 1
+ if not start:
+ start = stmt
+ end = stmt
+ elif start:
+ pairs.append((start, end))
+ start = None
+ if start:
+ pairs.append((start, end))
+ return pairs
+
+
+def format_lines(statements, lines, arcs=None):
+ """Nicely format a list of line numbers.
+
+ Format a list of line numbers for printing by coalescing groups of lines as
+ long as the lines represent consecutive statements. This will coalesce
+ even if there are gaps between statements.
+
+ For example, if `statements` is [1,2,3,4,5,10,11,12,13,14] and
+ `lines` is [1,2,5,10,11,13,14] then the result will be "1-2, 5-11, 13-14".
+
+ Both `lines` and `statements` can be any iterable. All of the elements of
+ `lines` must be in `statements`, and all of the values must be positive
+ integers.
+
+ If `arcs` is provided, they are (start,[end,end,end]) pairs that will be
+ included in the output as long as start isn't in `lines`.
+
+ """
+ line_items = [(pair[0], nice_pair(pair)) for pair in _line_ranges(statements, lines)]
+ if arcs:
+ line_exits = sorted(arcs)
+ for line, exits in line_exits:
+ for ex in sorted(exits):
+ if line not in lines:
+ dest = (ex if ex > 0 else "exit")
+ line_items.append((line, "%d->%s" % (line, dest)))
+
+ ret = ', '.join(t[-1] for t in sorted(line_items))
+ return ret
+
+
+@contract(total='number', fail_under='number', precision=int, returns=bool)
+def should_fail_under(total, fail_under, precision):
+ """Determine if a total should fail due to fail-under.
+
+ `total` is a float, the coverage measurement total. `fail_under` is the
+ fail_under setting to compare with. `precision` is the number of digits
+ to consider after the decimal point.
+
+ Returns True if the total should fail.
+
+ """
+ # We can never achieve higher than 100% coverage, or less than zero.
+ if not (0 <= fail_under <= 100.0):
+ msg = "fail_under={} is invalid. Must be between 0 and 100.".format(fail_under)
+ raise CoverageException(msg)
+
+ # Special case for fail_under=100, it must really be 100.
+ if fail_under == 100.0 and total != 100.0:
+ return True
+
+ return round(total, precision) < fail_under
diff --git a/third_party/python/coverage/coverage/sqldata.py b/third_party/python/coverage/coverage/sqldata.py
new file mode 100644
index 0000000000..b8ee885327
--- /dev/null
+++ b/third_party/python/coverage/coverage/sqldata.py
@@ -0,0 +1,1106 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Sqlite coverage data."""
+
+# TODO: factor out dataop debugging to a wrapper class?
+# TODO: make sure all dataop debugging is in place somehow
+
+import collections
+import datetime
+import glob
+import itertools
+import os
+import re
+import sqlite3
+import sys
+import zlib
+
+from coverage import env
+from coverage.backward import get_thread_id, iitems, to_bytes, to_string
+from coverage.debug import NoDebugging, SimpleReprMixin, clipped_repr
+from coverage.files import PathAliases
+from coverage.misc import CoverageException, contract, file_be_gone, filename_suffix, isolate_module
+from coverage.numbits import numbits_to_nums, numbits_union, nums_to_numbits
+from coverage.version import __version__
+
+os = isolate_module(os)
+
+# If you change the schema, increment the SCHEMA_VERSION, and update the
+# docs in docs/dbschema.rst also.
+
+SCHEMA_VERSION = 7
+
+# Schema versions:
+# 1: Released in 5.0a2
+# 2: Added contexts in 5.0a3.
+# 3: Replaced line table with line_map table.
+# 4: Changed line_map.bitmap to line_map.numbits.
+# 5: Added foreign key declarations.
+# 6: Key-value in meta.
+# 7: line_map -> line_bits
+
+SCHEMA = """\
+CREATE TABLE coverage_schema (
+ -- One row, to record the version of the schema in this db.
+ version integer
+);
+
+CREATE TABLE meta (
+ -- Key-value pairs, to record metadata about the data
+ key text,
+ value text,
+ unique (key)
+ -- Keys:
+ -- 'has_arcs' boolean -- Is this data recording branches?
+ -- 'sys_argv' text -- The coverage command line that recorded the data.
+ -- 'version' text -- The version of coverage.py that made the file.
+ -- 'when' text -- Datetime when the file was created.
+);
+
+CREATE TABLE file (
+ -- A row per file measured.
+ id integer primary key,
+ path text,
+ unique (path)
+);
+
+CREATE TABLE context (
+ -- A row per context measured.
+ id integer primary key,
+ context text,
+ unique (context)
+);
+
+CREATE TABLE line_bits (
+ -- If recording lines, a row per context per file executed.
+ -- All of the line numbers for that file/context are in one numbits.
+ file_id integer, -- foreign key to `file`.
+ context_id integer, -- foreign key to `context`.
+ numbits blob, -- see the numbits functions in coverage.numbits
+ foreign key (file_id) references file (id),
+ foreign key (context_id) references context (id),
+ unique (file_id, context_id)
+);
+
+CREATE TABLE arc (
+ -- If recording branches, a row per context per from/to line transition executed.
+ file_id integer, -- foreign key to `file`.
+ context_id integer, -- foreign key to `context`.
+ fromno integer, -- line number jumped from.
+ tono integer, -- line number jumped to.
+ foreign key (file_id) references file (id),
+ foreign key (context_id) references context (id),
+ unique (file_id, context_id, fromno, tono)
+);
+
+CREATE TABLE tracer (
+ -- A row per file indicating the tracer used for that file.
+ file_id integer primary key,
+ tracer text,
+ foreign key (file_id) references file (id)
+);
+"""
+
+class CoverageData(SimpleReprMixin):
+ """Manages collected coverage data, including file storage.
+
+ This class is the public supported API to the data that coverage.py
+ collects during program execution. It includes information about what code
+ was executed. It does not include information from the analysis phase, to
+ determine what lines could have been executed, or what lines were not
+ executed.
+
+ .. note::
+
+ The data file is currently a SQLite database file, with a
+ :ref:`documented schema <dbschema>`. The schema is subject to change
+ though, so be careful about querying it directly. Use this API if you
+ can to isolate yourself from changes.
+
+ There are a number of kinds of data that can be collected:
+
+ * **lines**: the line numbers of source lines that were executed.
+ These are always available.
+
+ * **arcs**: pairs of source and destination line numbers for transitions
+ between source lines. These are only available if branch coverage was
+ used.
+
+ * **file tracer names**: the module names of the file tracer plugins that
+ handled each file in the data.
+
+ Lines, arcs, and file tracer names are stored for each source file. File
+ names in this API are case-sensitive, even on platforms with
+ case-insensitive file systems.
+
+ A data file either stores lines, or arcs, but not both.
+
+ A data file is associated with the data when the :class:`CoverageData`
+ is created, using the parameters `basename`, `suffix`, and `no_disk`. The
+ base name can be queried with :meth:`base_filename`, and the actual file
+ name being used is available from :meth:`data_filename`.
+
+ To read an existing coverage.py data file, use :meth:`read`. You can then
+ access the line, arc, or file tracer data with :meth:`lines`, :meth:`arcs`,
+ or :meth:`file_tracer`.
+
+ The :meth:`has_arcs` method indicates whether arc data is available. You
+ can get a set of the files in the data with :meth:`measured_files`. As
+ with most Python containers, you can determine if there is any data at all
+ by using this object as a boolean value.
+
+ The contexts for each line in a file can be read with
+ :meth:`contexts_by_lineno`.
+
+ To limit querying to certain contexts, use :meth:`set_query_context` or
+ :meth:`set_query_contexts`. These will narrow the focus of subsequent
+ :meth:`lines`, :meth:`arcs`, and :meth:`contexts_by_lineno` calls. The set
+ of all measured context names can be retrieved with
+ :meth:`measured_contexts`.
+
+ Most data files will be created by coverage.py itself, but you can use
+ methods here to create data files if you like. The :meth:`add_lines`,
+ :meth:`add_arcs`, and :meth:`add_file_tracers` methods add data, in ways
+ that are convenient for coverage.py.
+
+ To record data for contexts, use :meth:`set_context` to set a context to
+ be used for subsequent :meth:`add_lines` and :meth:`add_arcs` calls.
+
+ To add a source file without any measured data, use :meth:`touch_file`.
+
+ Write the data to its file with :meth:`write`.
+
+ You can clear the data in memory with :meth:`erase`. Two data collections
+ can be combined by using :meth:`update` on one :class:`CoverageData`,
+ passing it the other.
+
+ Data in a :class:`CoverageData` can be serialized and deserialized with
+ :meth:`dumps` and :meth:`loads`.
+
+ """
+
+ def __init__(self, basename=None, suffix=None, no_disk=False, warn=None, debug=None):
+ """Create a :class:`CoverageData` object to hold coverage-measured data.
+
+ Arguments:
+ basename (str): the base name of the data file, defaulting to
+ ".coverage".
+ suffix (str or bool): has the same meaning as the `data_suffix`
+ argument to :class:`coverage.Coverage`.
+ no_disk (bool): if True, keep all data in memory, and don't
+ write any disk file.
+ warn: a warning callback function, accepting a warning message
+ argument.
+ debug: a `DebugControl` object (optional)
+
+ """
+ self._no_disk = no_disk
+ self._basename = os.path.abspath(basename or ".coverage")
+ self._suffix = suffix
+ self._warn = warn
+ self._debug = debug or NoDebugging()
+
+ self._choose_filename()
+ self._file_map = {}
+ # Maps thread ids to SqliteDb objects.
+ self._dbs = {}
+ self._pid = os.getpid()
+
+ # Are we in sync with the data file?
+ self._have_used = False
+
+ self._has_lines = False
+ self._has_arcs = False
+
+ self._current_context = None
+ self._current_context_id = None
+ self._query_context_ids = None
+
+ def _choose_filename(self):
+ """Set self._filename based on inited attributes."""
+ if self._no_disk:
+ self._filename = ":memory:"
+ else:
+ self._filename = self._basename
+ suffix = filename_suffix(self._suffix)
+ if suffix:
+ self._filename += "." + suffix
+
+ def _reset(self):
+ """Reset our attributes."""
+ if self._dbs:
+ for db in self._dbs.values():
+ db.close()
+ self._dbs = {}
+ self._file_map = {}
+ self._have_used = False
+ self._current_context_id = None
+
+ def _create_db(self):
+ """Create a db file that doesn't exist yet.
+
+ Initializes the schema and certain metadata.
+ """
+ if self._debug.should('dataio'):
+ self._debug.write("Creating data file {!r}".format(self._filename))
+ self._dbs[get_thread_id()] = db = SqliteDb(self._filename, self._debug)
+ with db:
+ db.executescript(SCHEMA)
+ db.execute("insert into coverage_schema (version) values (?)", (SCHEMA_VERSION,))
+ db.executemany(
+ "insert into meta (key, value) values (?, ?)",
+ [
+ ('sys_argv', str(getattr(sys, 'argv', None))),
+ ('version', __version__),
+ ('when', datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')),
+ ]
+ )
+
+ def _open_db(self):
+ """Open an existing db file, and read its metadata."""
+ if self._debug.should('dataio'):
+ self._debug.write("Opening data file {!r}".format(self._filename))
+ self._dbs[get_thread_id()] = SqliteDb(self._filename, self._debug)
+ self._read_db()
+
+ def _read_db(self):
+ """Read the metadata from a database so that we are ready to use it."""
+ with self._dbs[get_thread_id()] as db:
+ try:
+ schema_version, = db.execute_one("select version from coverage_schema")
+ except Exception as exc:
+ raise CoverageException(
+ "Data file {!r} doesn't seem to be a coverage data file: {}".format(
+ self._filename, exc
+ )
+ )
+ else:
+ if schema_version != SCHEMA_VERSION:
+ raise CoverageException(
+ "Couldn't use data file {!r}: wrong schema: {} instead of {}".format(
+ self._filename, schema_version, SCHEMA_VERSION
+ )
+ )
+
+ for row in db.execute("select value from meta where key = 'has_arcs'"):
+ self._has_arcs = bool(int(row[0]))
+ self._has_lines = not self._has_arcs
+
+ for path, file_id in db.execute("select path, id from file"):
+ self._file_map[path] = file_id
+
+ def _connect(self):
+ """Get the SqliteDb object to use."""
+ if get_thread_id() not in self._dbs:
+ if os.path.exists(self._filename):
+ self._open_db()
+ else:
+ self._create_db()
+ return self._dbs[get_thread_id()]
+
+ def __nonzero__(self):
+ if (get_thread_id() not in self._dbs and not os.path.exists(self._filename)):
+ return False
+ try:
+ with self._connect() as con:
+ rows = con.execute("select * from file limit 1")
+ return bool(list(rows))
+ except CoverageException:
+ return False
+
+ __bool__ = __nonzero__
+
+ @contract(returns='bytes')
+ def dumps(self):
+ """Serialize the current data to a byte string.
+
+ The format of the serialized data is not documented. It is only
+ suitable for use with :meth:`loads` in the same version of
+ coverage.py.
+
+ Returns:
+ A byte string of serialized data.
+
+ .. versionadded:: 5.0
+
+ """
+ if self._debug.should('dataio'):
+ self._debug.write("Dumping data from data file {!r}".format(self._filename))
+ with self._connect() as con:
+ return b'z' + zlib.compress(to_bytes(con.dump()))
+
+ @contract(data='bytes')
+ def loads(self, data):
+ """Deserialize data from :meth:`dumps`
+
+ Use with a newly-created empty :class:`CoverageData` object. It's
+ undefined what happens if the object already has data in it.
+
+ Arguments:
+ data: A byte string of serialized data produced by :meth:`dumps`.
+
+ .. versionadded:: 5.0
+
+ """
+ if self._debug.should('dataio'):
+ self._debug.write("Loading data into data file {!r}".format(self._filename))
+ if data[:1] != b'z':
+ raise CoverageException(
+ "Unrecognized serialization: {!r} (head of {} bytes)".format(data[:40], len(data))
+ )
+ script = to_string(zlib.decompress(data[1:]))
+ self._dbs[get_thread_id()] = db = SqliteDb(self._filename, self._debug)
+ with db:
+ db.executescript(script)
+ self._read_db()
+ self._have_used = True
+
+ def _file_id(self, filename, add=False):
+ """Get the file id for `filename`.
+
+ If filename is not in the database yet, add it if `add` is True.
+ If `add` is not True, return None.
+ """
+ if filename not in self._file_map:
+ if add:
+ with self._connect() as con:
+ cur = con.execute("insert or replace into file (path) values (?)", (filename,))
+ self._file_map[filename] = cur.lastrowid
+ return self._file_map.get(filename)
+
+ def _context_id(self, context):
+ """Get the id for a context."""
+ assert context is not None
+ self._start_using()
+ with self._connect() as con:
+ row = con.execute_one("select id from context where context = ?", (context,))
+ if row is not None:
+ return row[0]
+ else:
+ return None
+
+ def set_context(self, context):
+ """Set the current context for future :meth:`add_lines` etc.
+
+ `context` is a str, the name of the context to use for the next data
+ additions. The context persists until the next :meth:`set_context`.
+
+ .. versionadded:: 5.0
+
+ """
+ if self._debug.should('dataop'):
+ self._debug.write("Setting context: %r" % (context,))
+ self._current_context = context
+ self._current_context_id = None
+
+ def _set_context_id(self):
+ """Use the _current_context to set _current_context_id."""
+ context = self._current_context or ""
+ context_id = self._context_id(context)
+ if context_id is not None:
+ self._current_context_id = context_id
+ else:
+ with self._connect() as con:
+ cur = con.execute("insert into context (context) values (?)", (context,))
+ self._current_context_id = cur.lastrowid
+
+ def base_filename(self):
+ """The base filename for storing data.
+
+ .. versionadded:: 5.0
+
+ """
+ return self._basename
+
+ def data_filename(self):
+ """Where is the data stored?
+
+ .. versionadded:: 5.0
+
+ """
+ return self._filename
+
+ def add_lines(self, line_data):
+ """Add measured line data.
+
+ `line_data` is a dictionary mapping file names to dictionaries::
+
+ { filename: { lineno: None, ... }, ...}
+
+ """
+ if self._debug.should('dataop'):
+ self._debug.write("Adding lines: %d files, %d lines total" % (
+ len(line_data), sum(len(lines) for lines in line_data.values())
+ ))
+ self._start_using()
+ self._choose_lines_or_arcs(lines=True)
+ if not line_data:
+ return
+ with self._connect() as con:
+ self._set_context_id()
+ for filename, linenos in iitems(line_data):
+ linemap = nums_to_numbits(linenos)
+ file_id = self._file_id(filename, add=True)
+ query = "select numbits from line_bits where file_id = ? and context_id = ?"
+ existing = list(con.execute(query, (file_id, self._current_context_id)))
+ if existing:
+ linemap = numbits_union(linemap, existing[0][0])
+
+ con.execute(
+ "insert or replace into line_bits "
+ " (file_id, context_id, numbits) values (?, ?, ?)",
+ (file_id, self._current_context_id, linemap),
+ )
+
+ def add_arcs(self, arc_data):
+ """Add measured arc data.
+
+ `arc_data` is a dictionary mapping file names to dictionaries::
+
+ { filename: { (l1,l2): None, ... }, ...}
+
+ """
+ if self._debug.should('dataop'):
+ self._debug.write("Adding arcs: %d files, %d arcs total" % (
+ len(arc_data), sum(len(arcs) for arcs in arc_data.values())
+ ))
+ self._start_using()
+ self._choose_lines_or_arcs(arcs=True)
+ if not arc_data:
+ return
+ with self._connect() as con:
+ self._set_context_id()
+ for filename, arcs in iitems(arc_data):
+ file_id = self._file_id(filename, add=True)
+ data = [(file_id, self._current_context_id, fromno, tono) for fromno, tono in arcs]
+ con.executemany(
+ "insert or ignore into arc "
+ "(file_id, context_id, fromno, tono) values (?, ?, ?, ?)",
+ data,
+ )
+
+ def _choose_lines_or_arcs(self, lines=False, arcs=False):
+ """Force the data file to choose between lines and arcs."""
+ assert lines or arcs
+ assert not (lines and arcs)
+ if lines and self._has_arcs:
+ raise CoverageException("Can't add lines to existing arc data")
+ if arcs and self._has_lines:
+ raise CoverageException("Can't add arcs to existing line data")
+ if not self._has_arcs and not self._has_lines:
+ self._has_lines = lines
+ self._has_arcs = arcs
+ with self._connect() as con:
+ con.execute(
+ "insert into meta (key, value) values (?, ?)",
+ ('has_arcs', str(int(arcs)))
+ )
+
+ def add_file_tracers(self, file_tracers):
+ """Add per-file plugin information.
+
+ `file_tracers` is { filename: plugin_name, ... }
+
+ """
+ if self._debug.should('dataop'):
+ self._debug.write("Adding file tracers: %d files" % (len(file_tracers),))
+ if not file_tracers:
+ return
+ self._start_using()
+ with self._connect() as con:
+ for filename, plugin_name in iitems(file_tracers):
+ file_id = self._file_id(filename)
+ if file_id is None:
+ raise CoverageException(
+ "Can't add file tracer data for unmeasured file '%s'" % (filename,)
+ )
+
+ existing_plugin = self.file_tracer(filename)
+ if existing_plugin:
+ if existing_plugin != plugin_name:
+ raise CoverageException(
+ "Conflicting file tracer name for '%s': %r vs %r" % (
+ filename, existing_plugin, plugin_name,
+ )
+ )
+ elif plugin_name:
+ con.execute(
+ "insert into tracer (file_id, tracer) values (?, ?)",
+ (file_id, plugin_name)
+ )
+
+ def touch_file(self, filename, plugin_name=""):
+ """Ensure that `filename` appears in the data, empty if needed.
+
+ `plugin_name` is the name of the plugin responsible for this file. It is used
+ to associate the right filereporter, etc.
+ """
+ if self._debug.should('dataop'):
+ self._debug.write("Touching %r" % (filename,))
+ self._start_using()
+ if not self._has_arcs and not self._has_lines:
+ raise CoverageException("Can't touch files in an empty CoverageData")
+
+ self._file_id(filename, add=True)
+ if plugin_name:
+ # Set the tracer for this file
+ self.add_file_tracers({filename: plugin_name})
+
+ def update(self, other_data, aliases=None):
+ """Update this data with data from several other :class:`CoverageData` instances.
+
+ If `aliases` is provided, it's a `PathAliases` object that is used to
+ re-map paths to match the local machine's.
+ """
+ if self._debug.should('dataop'):
+ self._debug.write("Updating with data from %r" % (
+ getattr(other_data, '_filename', '???'),
+ ))
+ if self._has_lines and other_data._has_arcs:
+ raise CoverageException("Can't combine arc data with line data")
+ if self._has_arcs and other_data._has_lines:
+ raise CoverageException("Can't combine line data with arc data")
+
+ aliases = aliases or PathAliases()
+
+ # Force the database we're writing to to exist before we start nesting
+ # contexts.
+ self._start_using()
+
+ # Collector for all arcs, lines and tracers
+ other_data.read()
+ with other_data._connect() as conn:
+ # Get files data.
+ cur = conn.execute('select path from file')
+ files = {path: aliases.map(path) for (path,) in cur}
+ cur.close()
+
+ # Get contexts data.
+ cur = conn.execute('select context from context')
+ contexts = [context for (context,) in cur]
+ cur.close()
+
+ # Get arc data.
+ cur = conn.execute(
+ 'select file.path, context.context, arc.fromno, arc.tono '
+ 'from arc '
+ 'inner join file on file.id = arc.file_id '
+ 'inner join context on context.id = arc.context_id'
+ )
+ arcs = [(files[path], context, fromno, tono) for (path, context, fromno, tono) in cur]
+ cur.close()
+
+ # Get line data.
+ cur = conn.execute(
+ 'select file.path, context.context, line_bits.numbits '
+ 'from line_bits '
+ 'inner join file on file.id = line_bits.file_id '
+ 'inner join context on context.id = line_bits.context_id'
+ )
+ lines = {
+ (files[path], context): numbits
+ for (path, context, numbits) in cur
+ }
+ cur.close()
+
+ # Get tracer data.
+ cur = conn.execute(
+ 'select file.path, tracer '
+ 'from tracer '
+ 'inner join file on file.id = tracer.file_id'
+ )
+ tracers = {files[path]: tracer for (path, tracer) in cur}
+ cur.close()
+
+ with self._connect() as conn:
+ conn.con.isolation_level = 'IMMEDIATE'
+
+ # Get all tracers in the DB. Files not in the tracers are assumed
+ # to have an empty string tracer. Since Sqlite does not support
+ # full outer joins, we have to make two queries to fill the
+ # dictionary.
+ this_tracers = {path: '' for path, in conn.execute('select path from file')}
+ this_tracers.update({
+ aliases.map(path): tracer
+ for path, tracer in conn.execute(
+ 'select file.path, tracer from tracer '
+ 'inner join file on file.id = tracer.file_id'
+ )
+ })
+
+ # Create all file and context rows in the DB.
+ conn.executemany(
+ 'insert or ignore into file (path) values (?)',
+ ((file,) for file in files.values())
+ )
+ file_ids = {
+ path: id
+ for id, path in conn.execute('select id, path from file')
+ }
+ conn.executemany(
+ 'insert or ignore into context (context) values (?)',
+ ((context,) for context in contexts)
+ )
+ context_ids = {
+ context: id
+ for id, context in conn.execute('select id, context from context')
+ }
+
+ # Prepare tracers and fail, if a conflict is found.
+ # tracer_paths is used to ensure consistency over the tracer data
+ # and tracer_map tracks the tracers to be inserted.
+ tracer_map = {}
+ for path in files.values():
+ this_tracer = this_tracers.get(path)
+ other_tracer = tracers.get(path, '')
+ # If there is no tracer, there is always the None tracer.
+ if this_tracer is not None and this_tracer != other_tracer:
+ raise CoverageException(
+ "Conflicting file tracer name for '%s': %r vs %r" % (
+ path, this_tracer, other_tracer
+ )
+ )
+ tracer_map[path] = other_tracer
+
+ # Prepare arc and line rows to be inserted by converting the file
+ # and context strings with integer ids. Then use the efficient
+ # `executemany()` to insert all rows at once.
+ arc_rows = (
+ (file_ids[file], context_ids[context], fromno, tono)
+ for file, context, fromno, tono in arcs
+ )
+
+ # Get line data.
+ cur = conn.execute(
+ 'select file.path, context.context, line_bits.numbits '
+ 'from line_bits '
+ 'inner join file on file.id = line_bits.file_id '
+ 'inner join context on context.id = line_bits.context_id'
+ )
+ for path, context, numbits in cur:
+ key = (aliases.map(path), context)
+ if key in lines:
+ numbits = numbits_union(lines[key], numbits)
+ lines[key] = numbits
+ cur.close()
+
+ if arcs:
+ self._choose_lines_or_arcs(arcs=True)
+
+ # Write the combined data.
+ conn.executemany(
+ 'insert or ignore into arc '
+ '(file_id, context_id, fromno, tono) values (?, ?, ?, ?)',
+ arc_rows
+ )
+
+ if lines:
+ self._choose_lines_or_arcs(lines=True)
+ conn.execute("delete from line_bits")
+ conn.executemany(
+ "insert into line_bits "
+ "(file_id, context_id, numbits) values (?, ?, ?)",
+ [
+ (file_ids[file], context_ids[context], numbits)
+ for (file, context), numbits in lines.items()
+ ]
+ )
+ conn.executemany(
+ 'insert or ignore into tracer (file_id, tracer) values (?, ?)',
+ ((file_ids[filename], tracer) for filename, tracer in tracer_map.items())
+ )
+
+ # Update all internal cache data.
+ self._reset()
+ self.read()
+
+ def erase(self, parallel=False):
+ """Erase the data in this object.
+
+ If `parallel` is true, then also deletes data files created from the
+ basename by parallel-mode.
+
+ """
+ self._reset()
+ if self._no_disk:
+ return
+ if self._debug.should('dataio'):
+ self._debug.write("Erasing data file {!r}".format(self._filename))
+ file_be_gone(self._filename)
+ if parallel:
+ data_dir, local = os.path.split(self._filename)
+ localdot = local + '.*'
+ pattern = os.path.join(os.path.abspath(data_dir), localdot)
+ for filename in glob.glob(pattern):
+ if self._debug.should('dataio'):
+ self._debug.write("Erasing parallel data file {!r}".format(filename))
+ file_be_gone(filename)
+
+ def read(self):
+ """Start using an existing data file."""
+ with self._connect(): # TODO: doesn't look right
+ self._have_used = True
+
+ def write(self):
+ """Ensure the data is written to the data file."""
+ pass
+
+ def _start_using(self):
+ """Call this before using the database at all."""
+ if self._pid != os.getpid():
+ # Looks like we forked! Have to start a new data file.
+ self._reset()
+ self._choose_filename()
+ self._pid = os.getpid()
+ if not self._have_used:
+ self.erase()
+ self._have_used = True
+
+ def has_arcs(self):
+ """Does the database have arcs (True) or lines (False)."""
+ return bool(self._has_arcs)
+
+ def measured_files(self):
+ """A set of all files that had been measured."""
+ return set(self._file_map)
+
+ def measured_contexts(self):
+ """A set of all contexts that have been measured.
+
+ .. versionadded:: 5.0
+
+ """
+ self._start_using()
+ with self._connect() as con:
+ contexts = set(row[0] for row in con.execute("select distinct(context) from context"))
+ return contexts
+
+ def file_tracer(self, filename):
+ """Get the plugin name of the file tracer for a file.
+
+ Returns the name of the plugin that handles this file. If the file was
+ measured, but didn't use a plugin, then "" is returned. If the file
+ was not measured, then None is returned.
+
+ """
+ self._start_using()
+ with self._connect() as con:
+ file_id = self._file_id(filename)
+ if file_id is None:
+ return None
+ row = con.execute_one("select tracer from tracer where file_id = ?", (file_id,))
+ if row is not None:
+ return row[0] or ""
+ return "" # File was measured, but no tracer associated.
+
+ def set_query_context(self, context):
+ """Set a context for subsequent querying.
+
+ The next :meth:`lines`, :meth:`arcs`, or :meth:`contexts_by_lineno`
+ calls will be limited to only one context. `context` is a string which
+ must match a context exactly. If it does not, no exception is raised,
+ but queries will return no data.
+
+ .. versionadded:: 5.0
+
+ """
+ self._start_using()
+ with self._connect() as con:
+ cur = con.execute("select id from context where context = ?", (context,))
+ self._query_context_ids = [row[0] for row in cur.fetchall()]
+
+ def set_query_contexts(self, contexts):
+ """Set a number of contexts for subsequent querying.
+
+ The next :meth:`lines`, :meth:`arcs`, or :meth:`contexts_by_lineno`
+ calls will be limited to the specified contexts. `contexts` is a list
+ of Python regular expressions. Contexts will be matched using
+ :func:`re.search <python:re.search>`. Data will be included in query
+ results if they are part of any of the contexts matched.
+
+ .. versionadded:: 5.0
+
+ """
+ self._start_using()
+ if contexts:
+ with self._connect() as con:
+ context_clause = ' or '.join(['context regexp ?'] * len(contexts))
+ cur = con.execute("select id from context where " + context_clause, contexts)
+ self._query_context_ids = [row[0] for row in cur.fetchall()]
+ else:
+ self._query_context_ids = None
+
+ def lines(self, filename):
+ """Get the list of lines executed for a file.
+
+ If the file was not measured, returns None. A file might be measured,
+ and have no lines executed, in which case an empty list is returned.
+
+ If the file was executed, returns a list of integers, the line numbers
+ executed in the file. The list is in no particular order.
+
+ """
+ self._start_using()
+ if self.has_arcs():
+ arcs = self.arcs(filename)
+ if arcs is not None:
+ all_lines = itertools.chain.from_iterable(arcs)
+ return list(set(l for l in all_lines if l > 0))
+
+ with self._connect() as con:
+ file_id = self._file_id(filename)
+ if file_id is None:
+ return None
+ else:
+ query = "select numbits from line_bits where file_id = ?"
+ data = [file_id]
+ if self._query_context_ids is not None:
+ ids_array = ', '.join('?' * len(self._query_context_ids))
+ query += " and context_id in (" + ids_array + ")"
+ data += self._query_context_ids
+ bitmaps = list(con.execute(query, data))
+ nums = set()
+ for row in bitmaps:
+ nums.update(numbits_to_nums(row[0]))
+ return list(nums)
+
+ def arcs(self, filename):
+ """Get the list of arcs executed for a file.
+
+ If the file was not measured, returns None. A file might be measured,
+ and have no arcs executed, in which case an empty list is returned.
+
+ If the file was executed, returns a list of 2-tuples of integers. Each
+ pair is a starting line number and an ending line number for a
+ transition from one line to another. The list is in no particular
+ order.
+
+ Negative numbers have special meaning. If the starting line number is
+ -N, it represents an entry to the code object that starts at line N.
+ If the ending ling number is -N, it's an exit from the code object that
+ starts at line N.
+
+ """
+ self._start_using()
+ with self._connect() as con:
+ file_id = self._file_id(filename)
+ if file_id is None:
+ return None
+ else:
+ query = "select distinct fromno, tono from arc where file_id = ?"
+ data = [file_id]
+ if self._query_context_ids is not None:
+ ids_array = ', '.join('?' * len(self._query_context_ids))
+ query += " and context_id in (" + ids_array + ")"
+ data += self._query_context_ids
+ arcs = con.execute(query, data)
+ return list(arcs)
+
+ def contexts_by_lineno(self, filename):
+ """Get the contexts for each line in a file.
+
+ Returns:
+ A dict mapping line numbers to a list of context names.
+
+ .. versionadded:: 5.0
+
+ """
+ lineno_contexts_map = collections.defaultdict(list)
+ self._start_using()
+ with self._connect() as con:
+ file_id = self._file_id(filename)
+ if file_id is None:
+ return lineno_contexts_map
+ if self.has_arcs():
+ query = (
+ "select arc.fromno, arc.tono, context.context "
+ "from arc, context "
+ "where arc.file_id = ? and arc.context_id = context.id"
+ )
+ data = [file_id]
+ if self._query_context_ids is not None:
+ ids_array = ', '.join('?' * len(self._query_context_ids))
+ query += " and arc.context_id in (" + ids_array + ")"
+ data += self._query_context_ids
+ for fromno, tono, context in con.execute(query, data):
+ if context not in lineno_contexts_map[fromno]:
+ lineno_contexts_map[fromno].append(context)
+ if context not in lineno_contexts_map[tono]:
+ lineno_contexts_map[tono].append(context)
+ else:
+ query = (
+ "select l.numbits, c.context from line_bits l, context c "
+ "where l.context_id = c.id "
+ "and file_id = ?"
+ )
+ data = [file_id]
+ if self._query_context_ids is not None:
+ ids_array = ', '.join('?' * len(self._query_context_ids))
+ query += " and l.context_id in (" + ids_array + ")"
+ data += self._query_context_ids
+ for numbits, context in con.execute(query, data):
+ for lineno in numbits_to_nums(numbits):
+ lineno_contexts_map[lineno].append(context)
+ return lineno_contexts_map
+
+ @classmethod
+ def sys_info(cls):
+ """Our information for `Coverage.sys_info`.
+
+ Returns a list of (key, value) pairs.
+
+ """
+ with SqliteDb(":memory:", debug=NoDebugging()) as db:
+ temp_store = [row[0] for row in db.execute("pragma temp_store")]
+ compile_options = [row[0] for row in db.execute("pragma compile_options")]
+
+ return [
+ ('sqlite3_version', sqlite3.version),
+ ('sqlite3_sqlite_version', sqlite3.sqlite_version),
+ ('sqlite3_temp_store', temp_store),
+ ('sqlite3_compile_options', compile_options),
+ ]
+
+
+class SqliteDb(SimpleReprMixin):
+ """A simple abstraction over a SQLite database.
+
+ Use as a context manager, then you can use it like a
+ :class:`python:sqlite3.Connection` object::
+
+ with SqliteDb(filename, debug_control) as db:
+ db.execute("insert into schema (version) values (?)", (SCHEMA_VERSION,))
+
+ """
+ def __init__(self, filename, debug):
+ self.debug = debug if debug.should('sql') else None
+ self.filename = filename
+ self.nest = 0
+ self.con = None
+
+ def _connect(self):
+ """Connect to the db and do universal initialization."""
+ if self.con is not None:
+ return
+
+ # SQLite on Windows on py2 won't open a file if the filename argument
+ # has non-ascii characters in it. Opening a relative file name avoids
+ # a problem if the current directory has non-ascii.
+ filename = self.filename
+ if env.WINDOWS and env.PY2:
+ try:
+ filename = os.path.relpath(self.filename)
+ except ValueError:
+ # ValueError can be raised under Windows when os.getcwd() returns a
+ # folder from a different drive than the drive of self.filename in
+ # which case we keep the original value of self.filename unchanged,
+ # hoping that we won't face the non-ascii directory problem.
+ pass
+
+ # It can happen that Python switches threads while the tracer writes
+ # data. The second thread will also try to write to the data,
+ # effectively causing a nested context. However, given the idempotent
+ # nature of the tracer operations, sharing a connection among threads
+ # is not a problem.
+ if self.debug:
+ self.debug.write("Connecting to {!r}".format(self.filename))
+ self.con = sqlite3.connect(filename, check_same_thread=False)
+ self.con.create_function('REGEXP', 2, _regexp)
+
+ # This pragma makes writing faster. It disables rollbacks, but we never need them.
+ # PyPy needs the .close() calls here, or sqlite gets twisted up:
+ # https://bitbucket.org/pypy/pypy/issues/2872/default-isolation-mode-is-different-on
+ self.execute("pragma journal_mode=off").close()
+ # This pragma makes writing faster.
+ self.execute("pragma synchronous=off").close()
+
+ def close(self):
+ """If needed, close the connection."""
+ if self.con is not None and self.filename != ":memory:":
+ self.con.close()
+ self.con = None
+
+ def __enter__(self):
+ if self.nest == 0:
+ self._connect()
+ self.con.__enter__()
+ self.nest += 1
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.nest -= 1
+ if self.nest == 0:
+ try:
+ self.con.__exit__(exc_type, exc_value, traceback)
+ self.close()
+ except Exception as exc:
+ if self.debug:
+ self.debug.write("EXCEPTION from __exit__: {}".format(exc))
+ raise
+
+ def execute(self, sql, parameters=()):
+ """Same as :meth:`python:sqlite3.Connection.execute`."""
+ if self.debug:
+ tail = " with {!r}".format(parameters) if parameters else ""
+ self.debug.write("Executing {!r}{}".format(sql, tail))
+ try:
+ return self.con.execute(sql, parameters)
+ except sqlite3.Error as exc:
+ msg = str(exc)
+ try:
+ # `execute` is the first thing we do with the database, so try
+ # hard to provide useful hints if something goes wrong now.
+ with open(self.filename, "rb") as bad_file:
+ cov4_sig = b"!coverage.py: This is a private format"
+ if bad_file.read(len(cov4_sig)) == cov4_sig:
+ msg = (
+ "Looks like a coverage 4.x data file. "
+ "Are you mixing versions of coverage?"
+ )
+ except Exception:
+ pass
+ if self.debug:
+ self.debug.write("EXCEPTION from execute: {}".format(msg))
+ raise CoverageException("Couldn't use data file {!r}: {}".format(self.filename, msg))
+
+ def execute_one(self, sql, parameters=()):
+ """Execute a statement and return the one row that results.
+
+ This is like execute(sql, parameters).fetchone(), except it is
+ correct in reading the entire result set. This will raise an
+ exception if more than one row results.
+
+ Returns a row, or None if there were no rows.
+ """
+ rows = list(self.execute(sql, parameters))
+ if len(rows) == 0:
+ return None
+ elif len(rows) == 1:
+ return rows[0]
+ else:
+ raise CoverageException("Sql {!r} shouldn't return {} rows".format(sql, len(rows)))
+
+ def executemany(self, sql, data):
+ """Same as :meth:`python:sqlite3.Connection.executemany`."""
+ if self.debug:
+ data = list(data)
+ self.debug.write("Executing many {!r} with {} rows".format(sql, len(data)))
+ return self.con.executemany(sql, data)
+
+ def executescript(self, script):
+ """Same as :meth:`python:sqlite3.Connection.executescript`."""
+ if self.debug:
+ self.debug.write("Executing script with {} chars: {}".format(
+ len(script), clipped_repr(script, 100),
+ ))
+ self.con.executescript(script)
+
+ def dump(self):
+ """Return a multi-line string, the SQL dump of the database."""
+ return "\n".join(self.con.iterdump())
+
+
+def _regexp(text, pattern):
+ """A regexp function for SQLite."""
+ return re.search(text, pattern) is not None
diff --git a/third_party/python/coverage/coverage/summary.py b/third_party/python/coverage/coverage/summary.py
new file mode 100644
index 0000000000..97d9fff075
--- /dev/null
+++ b/third_party/python/coverage/coverage/summary.py
@@ -0,0 +1,155 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Summary reporting"""
+
+import sys
+
+from coverage import env
+from coverage.report import get_analysis_to_report
+from coverage.results import Numbers
+from coverage.misc import NotPython, CoverageException, output_encoding
+
+
+class SummaryReporter(object):
+ """A reporter for writing the summary report."""
+
+ def __init__(self, coverage):
+ self.coverage = coverage
+ self.config = self.coverage.config
+ self.branches = coverage.get_data().has_arcs()
+ self.outfile = None
+ self.fr_analysis = []
+ self.skipped_count = 0
+ self.empty_count = 0
+ self.total = Numbers()
+ self.fmt_err = u"%s %s: %s"
+
+ def writeout(self, line):
+ """Write a line to the output, adding a newline."""
+ if env.PY2:
+ line = line.encode(output_encoding())
+ self.outfile.write(line.rstrip())
+ self.outfile.write("\n")
+
+ def report(self, morfs, outfile=None):
+ """Writes a report summarizing coverage statistics per module.
+
+ `outfile` is a file object to write the summary to. It must be opened
+ for native strings (bytes on Python 2, Unicode on Python 3).
+
+ """
+ self.outfile = outfile or sys.stdout
+
+ self.coverage.get_data().set_query_contexts(self.config.report_contexts)
+ for fr, analysis in get_analysis_to_report(self.coverage, morfs):
+ self.report_one_file(fr, analysis)
+
+ # Prepare the formatting strings, header, and column sorting.
+ max_name = max([len(fr.relative_filename()) for (fr, analysis) in self.fr_analysis] + [5])
+ fmt_name = u"%%- %ds " % max_name
+ fmt_skip_covered = u"\n%s file%s skipped due to complete coverage."
+ fmt_skip_empty = u"\n%s empty file%s skipped."
+
+ header = (fmt_name % "Name") + u" Stmts Miss"
+ fmt_coverage = fmt_name + u"%6d %6d"
+ if self.branches:
+ header += u" Branch BrPart"
+ fmt_coverage += u" %6d %6d"
+ width100 = Numbers.pc_str_width()
+ header += u"%*s" % (width100+4, "Cover")
+ fmt_coverage += u"%%%ds%%%%" % (width100+3,)
+ if self.config.show_missing:
+ header += u" Missing"
+ fmt_coverage += u" %s"
+ rule = u"-" * len(header)
+
+ column_order = dict(name=0, stmts=1, miss=2, cover=-1)
+ if self.branches:
+ column_order.update(dict(branch=3, brpart=4))
+
+ # Write the header
+ self.writeout(header)
+ self.writeout(rule)
+
+ # `lines` is a list of pairs, (line text, line values). The line text
+ # is a string that will be printed, and line values is a tuple of
+ # sortable values.
+ lines = []
+
+ for (fr, analysis) in self.fr_analysis:
+ try:
+ nums = analysis.numbers
+
+ args = (fr.relative_filename(), nums.n_statements, nums.n_missing)
+ if self.branches:
+ args += (nums.n_branches, nums.n_partial_branches)
+ args += (nums.pc_covered_str,)
+ if self.config.show_missing:
+ args += (analysis.missing_formatted(branches=True),)
+ text = fmt_coverage % args
+ # Add numeric percent coverage so that sorting makes sense.
+ args += (nums.pc_covered,)
+ lines.append((text, args))
+ except Exception:
+ report_it = not self.config.ignore_errors
+ if report_it:
+ typ, msg = sys.exc_info()[:2]
+ # NotPython is only raised by PythonFileReporter, which has a
+ # should_be_python() method.
+ if typ is NotPython and not fr.should_be_python():
+ report_it = False
+ if report_it:
+ self.writeout(self.fmt_err % (fr.relative_filename(), typ.__name__, msg))
+
+ # Sort the lines and write them out.
+ if getattr(self.config, 'sort', None):
+ position = column_order.get(self.config.sort.lower())
+ if position is None:
+ raise CoverageException("Invalid sorting option: {!r}".format(self.config.sort))
+ lines.sort(key=lambda l: (l[1][position], l[0]))
+
+ for line in lines:
+ self.writeout(line[0])
+
+ # Write a TOTAl line if we had more than one file.
+ if self.total.n_files > 1:
+ self.writeout(rule)
+ args = ("TOTAL", self.total.n_statements, self.total.n_missing)
+ if self.branches:
+ args += (self.total.n_branches, self.total.n_partial_branches)
+ args += (self.total.pc_covered_str,)
+ if self.config.show_missing:
+ args += ("",)
+ self.writeout(fmt_coverage % args)
+
+ # Write other final lines.
+ if not self.total.n_files and not self.skipped_count:
+ raise CoverageException("No data to report.")
+
+ if self.config.skip_covered and self.skipped_count:
+ self.writeout(
+ fmt_skip_covered % (self.skipped_count, 's' if self.skipped_count > 1 else '')
+ )
+ if self.config.skip_empty and self.empty_count:
+ self.writeout(
+ fmt_skip_empty % (self.empty_count, 's' if self.empty_count > 1 else '')
+ )
+
+ return self.total.n_statements and self.total.pc_covered
+
+ def report_one_file(self, fr, analysis):
+ """Report on just one file, the callback from report()."""
+ nums = analysis.numbers
+ self.total += nums
+
+ no_missing_lines = (nums.n_missing == 0)
+ no_missing_branches = (nums.n_partial_branches == 0)
+ if self.config.skip_covered and no_missing_lines and no_missing_branches:
+ # Don't report on 100% files.
+ self.skipped_count += 1
+ elif self.config.skip_empty and nums.n_statements == 0:
+ # Don't report on empty files.
+ self.empty_count += 1
+ else:
+ self.fr_analysis.append((fr, analysis))
diff --git a/third_party/python/coverage/coverage/templite.py b/third_party/python/coverage/coverage/templite.py
new file mode 100644
index 0000000000..7d4024e0af
--- /dev/null
+++ b/third_party/python/coverage/coverage/templite.py
@@ -0,0 +1,302 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""A simple Python template renderer, for a nano-subset of Django syntax.
+
+For a detailed discussion of this code, see this chapter from 500 Lines:
+http://aosabook.org/en/500L/a-template-engine.html
+
+"""
+
+# Coincidentally named the same as http://code.activestate.com/recipes/496702/
+
+import re
+
+from coverage import env
+
+
+class TempliteSyntaxError(ValueError):
+ """Raised when a template has a syntax error."""
+ pass
+
+
+class TempliteValueError(ValueError):
+ """Raised when an expression won't evaluate in a template."""
+ pass
+
+
+class CodeBuilder(object):
+ """Build source code conveniently."""
+
+ def __init__(self, indent=0):
+ self.code = []
+ self.indent_level = indent
+
+ def __str__(self):
+ return "".join(str(c) for c in self.code)
+
+ def add_line(self, line):
+ """Add a line of source to the code.
+
+ Indentation and newline will be added for you, don't provide them.
+
+ """
+ self.code.extend([" " * self.indent_level, line, "\n"])
+
+ def add_section(self):
+ """Add a section, a sub-CodeBuilder."""
+ section = CodeBuilder(self.indent_level)
+ self.code.append(section)
+ return section
+
+ INDENT_STEP = 4 # PEP8 says so!
+
+ def indent(self):
+ """Increase the current indent for following lines."""
+ self.indent_level += self.INDENT_STEP
+
+ def dedent(self):
+ """Decrease the current indent for following lines."""
+ self.indent_level -= self.INDENT_STEP
+
+ def get_globals(self):
+ """Execute the code, and return a dict of globals it defines."""
+ # A check that the caller really finished all the blocks they started.
+ assert self.indent_level == 0
+ # Get the Python source as a single string.
+ python_source = str(self)
+ # Execute the source, defining globals, and return them.
+ global_namespace = {}
+ exec(python_source, global_namespace)
+ return global_namespace
+
+
+class Templite(object):
+ """A simple template renderer, for a nano-subset of Django syntax.
+
+ Supported constructs are extended variable access::
+
+ {{var.modifier.modifier|filter|filter}}
+
+ loops::
+
+ {% for var in list %}...{% endfor %}
+
+ and ifs::
+
+ {% if var %}...{% endif %}
+
+ Comments are within curly-hash markers::
+
+ {# This will be ignored #}
+
+ Lines between `{% joined %}` and `{% endjoined %}` will have lines stripped
+ and joined. Be careful, this could join words together!
+
+ Any of these constructs can have a hyphen at the end (`-}}`, `-%}`, `-#}`),
+ which will collapse the whitespace following the tag.
+
+ Construct a Templite with the template text, then use `render` against a
+ dictionary context to create a finished string::
+
+ templite = Templite('''
+ <h1>Hello {{name|upper}}!</h1>
+ {% for topic in topics %}
+ <p>You are interested in {{topic}}.</p>
+ {% endif %}
+ ''',
+ {'upper': str.upper},
+ )
+ text = templite.render({
+ 'name': "Ned",
+ 'topics': ['Python', 'Geometry', 'Juggling'],
+ })
+
+ """
+ def __init__(self, text, *contexts):
+ """Construct a Templite with the given `text`.
+
+ `contexts` are dictionaries of values to use for future renderings.
+ These are good for filters and global values.
+
+ """
+ self.context = {}
+ for context in contexts:
+ self.context.update(context)
+
+ self.all_vars = set()
+ self.loop_vars = set()
+
+ # We construct a function in source form, then compile it and hold onto
+ # it, and execute it to render the template.
+ code = CodeBuilder()
+
+ code.add_line("def render_function(context, do_dots):")
+ code.indent()
+ vars_code = code.add_section()
+ code.add_line("result = []")
+ code.add_line("append_result = result.append")
+ code.add_line("extend_result = result.extend")
+ if env.PY2:
+ code.add_line("to_str = unicode")
+ else:
+ code.add_line("to_str = str")
+
+ buffered = []
+
+ def flush_output():
+ """Force `buffered` to the code builder."""
+ if len(buffered) == 1:
+ code.add_line("append_result(%s)" % buffered[0])
+ elif len(buffered) > 1:
+ code.add_line("extend_result([%s])" % ", ".join(buffered))
+ del buffered[:]
+
+ ops_stack = []
+
+ # Split the text to form a list of tokens.
+ tokens = re.split(r"(?s)({{.*?}}|{%.*?%}|{#.*?#})", text)
+
+ squash = in_joined = False
+
+ for token in tokens:
+ if token.startswith('{'):
+ start, end = 2, -2
+ squash = (token[-3] == '-')
+ if squash:
+ end = -3
+
+ if token.startswith('{#'):
+ # Comment: ignore it and move on.
+ continue
+ elif token.startswith('{{'):
+ # An expression to evaluate.
+ expr = self._expr_code(token[start:end].strip())
+ buffered.append("to_str(%s)" % expr)
+ else:
+ # token.startswith('{%')
+ # Action tag: split into words and parse further.
+ flush_output()
+
+ words = token[start:end].strip().split()
+ if words[0] == 'if':
+ # An if statement: evaluate the expression to determine if.
+ if len(words) != 2:
+ self._syntax_error("Don't understand if", token)
+ ops_stack.append('if')
+ code.add_line("if %s:" % self._expr_code(words[1]))
+ code.indent()
+ elif words[0] == 'for':
+ # A loop: iterate over expression result.
+ if len(words) != 4 or words[2] != 'in':
+ self._syntax_error("Don't understand for", token)
+ ops_stack.append('for')
+ self._variable(words[1], self.loop_vars)
+ code.add_line(
+ "for c_%s in %s:" % (
+ words[1],
+ self._expr_code(words[3])
+ )
+ )
+ code.indent()
+ elif words[0] == 'joined':
+ ops_stack.append('joined')
+ in_joined = True
+ elif words[0].startswith('end'):
+ # Endsomething. Pop the ops stack.
+ if len(words) != 1:
+ self._syntax_error("Don't understand end", token)
+ end_what = words[0][3:]
+ if not ops_stack:
+ self._syntax_error("Too many ends", token)
+ start_what = ops_stack.pop()
+ if start_what != end_what:
+ self._syntax_error("Mismatched end tag", end_what)
+ if end_what == 'joined':
+ in_joined = False
+ else:
+ code.dedent()
+ else:
+ self._syntax_error("Don't understand tag", words[0])
+ else:
+ # Literal content. If it isn't empty, output it.
+ if in_joined:
+ token = re.sub(r"\s*\n\s*", "", token.strip())
+ elif squash:
+ token = token.lstrip()
+ if token:
+ buffered.append(repr(token))
+
+ if ops_stack:
+ self._syntax_error("Unmatched action tag", ops_stack[-1])
+
+ flush_output()
+
+ for var_name in self.all_vars - self.loop_vars:
+ vars_code.add_line("c_%s = context[%r]" % (var_name, var_name))
+
+ code.add_line('return "".join(result)')
+ code.dedent()
+ self._render_function = code.get_globals()['render_function']
+
+ def _expr_code(self, expr):
+ """Generate a Python expression for `expr`."""
+ if "|" in expr:
+ pipes = expr.split("|")
+ code = self._expr_code(pipes[0])
+ for func in pipes[1:]:
+ self._variable(func, self.all_vars)
+ code = "c_%s(%s)" % (func, code)
+ elif "." in expr:
+ dots = expr.split(".")
+ code = self._expr_code(dots[0])
+ args = ", ".join(repr(d) for d in dots[1:])
+ code = "do_dots(%s, %s)" % (code, args)
+ else:
+ self._variable(expr, self.all_vars)
+ code = "c_%s" % expr
+ return code
+
+ def _syntax_error(self, msg, thing):
+ """Raise a syntax error using `msg`, and showing `thing`."""
+ raise TempliteSyntaxError("%s: %r" % (msg, thing))
+
+ def _variable(self, name, vars_set):
+ """Track that `name` is used as a variable.
+
+ Adds the name to `vars_set`, a set of variable names.
+
+ Raises an syntax error if `name` is not a valid name.
+
+ """
+ if not re.match(r"[_a-zA-Z][_a-zA-Z0-9]*$", name):
+ self._syntax_error("Not a valid name", name)
+ vars_set.add(name)
+
+ def render(self, context=None):
+ """Render this template by applying it to `context`.
+
+ `context` is a dictionary of values to use in this rendering.
+
+ """
+ # Make the complete context we'll use.
+ render_context = dict(self.context)
+ if context:
+ render_context.update(context)
+ return self._render_function(render_context, self._do_dots)
+
+ def _do_dots(self, value, *dots):
+ """Evaluate dotted expressions at run-time."""
+ for dot in dots:
+ try:
+ value = getattr(value, dot)
+ except AttributeError:
+ try:
+ value = value[dot]
+ except (TypeError, KeyError):
+ raise TempliteValueError(
+ "Couldn't evaluate %r.%s" % (value, dot)
+ )
+ if callable(value):
+ value = value()
+ return value
diff --git a/third_party/python/coverage/coverage/tomlconfig.py b/third_party/python/coverage/coverage/tomlconfig.py
new file mode 100644
index 0000000000..25542f99ef
--- /dev/null
+++ b/third_party/python/coverage/coverage/tomlconfig.py
@@ -0,0 +1,164 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""TOML configuration support for coverage.py"""
+
+import io
+import os
+import re
+
+from coverage import env
+from coverage.backward import configparser, path_types
+from coverage.misc import CoverageException, substitute_variables
+
+
+class TomlDecodeError(Exception):
+ """An exception class that exists even when toml isn't installed."""
+ pass
+
+
+class TomlConfigParser:
+ """TOML file reading with the interface of HandyConfigParser."""
+
+ # This class has the same interface as config.HandyConfigParser, no
+ # need for docstrings.
+ # pylint: disable=missing-function-docstring
+
+ def __init__(self, our_file):
+ self.our_file = our_file
+ self.data = None
+
+ def read(self, filenames):
+ from coverage.optional import toml
+
+ # RawConfigParser takes a filename or list of filenames, but we only
+ # ever call this with a single filename.
+ assert isinstance(filenames, path_types)
+ filename = filenames
+ if env.PYVERSION >= (3, 6):
+ filename = os.fspath(filename)
+
+ try:
+ with io.open(filename, encoding='utf-8') as fp:
+ toml_text = fp.read()
+ except IOError:
+ return []
+ if toml:
+ toml_text = substitute_variables(toml_text, os.environ)
+ try:
+ self.data = toml.loads(toml_text)
+ except toml.TomlDecodeError as err:
+ raise TomlDecodeError(*err.args)
+ return [filename]
+ else:
+ has_toml = re.search(r"^\[tool\.coverage\.", toml_text, flags=re.MULTILINE)
+ if self.our_file or has_toml:
+ # Looks like they meant to read TOML, but we can't read it.
+ msg = "Can't read {!r} without TOML support. Install with [toml] extra"
+ raise CoverageException(msg.format(filename))
+ return []
+
+ def _get_section(self, section):
+ """Get a section from the data.
+
+ Arguments:
+ section (str): A section name, which can be dotted.
+
+ Returns:
+ name (str): the actual name of the section that was found, if any,
+ or None.
+ data (str): the dict of data in the section, or None if not found.
+
+ """
+ prefixes = ["tool.coverage."]
+ if self.our_file:
+ prefixes.append("")
+ for prefix in prefixes:
+ real_section = prefix + section
+ parts = real_section.split(".")
+ try:
+ data = self.data[parts[0]]
+ for part in parts[1:]:
+ data = data[part]
+ except KeyError:
+ continue
+ break
+ else:
+ return None, None
+ return real_section, data
+
+ def _get(self, section, option):
+ """Like .get, but returns the real section name and the value."""
+ name, data = self._get_section(section)
+ if data is None:
+ raise configparser.NoSectionError(section)
+ try:
+ return name, data[option]
+ except KeyError:
+ raise configparser.NoOptionError(option, name)
+
+ def has_option(self, section, option):
+ _, data = self._get_section(section)
+ if data is None:
+ return False
+ return option in data
+
+ def has_section(self, section):
+ name, _ = self._get_section(section)
+ return name
+
+ def options(self, section):
+ _, data = self._get_section(section)
+ if data is None:
+ raise configparser.NoSectionError(section)
+ return list(data.keys())
+
+ def get_section(self, section):
+ _, data = self._get_section(section)
+ return data
+
+ def get(self, section, option):
+ _, value = self._get(section, option)
+ return value
+
+ def _check_type(self, section, option, value, type_, type_desc):
+ if not isinstance(value, type_):
+ raise ValueError(
+ 'Option {!r} in section {!r} is not {}: {!r}'
+ .format(option, section, type_desc, value)
+ )
+
+ def getboolean(self, section, option):
+ name, value = self._get(section, option)
+ self._check_type(name, option, value, bool, "a boolean")
+ return value
+
+ def getlist(self, section, option):
+ name, values = self._get(section, option)
+ self._check_type(name, option, values, list, "a list")
+ return values
+
+ def getregexlist(self, section, option):
+ name, values = self._get(section, option)
+ self._check_type(name, option, values, list, "a list")
+ for value in values:
+ value = value.strip()
+ try:
+ re.compile(value)
+ except re.error as e:
+ raise CoverageException(
+ "Invalid [%s].%s value %r: %s" % (name, option, value, e)
+ )
+ return values
+
+ def getint(self, section, option):
+ name, value = self._get(section, option)
+ self._check_type(name, option, value, int, "an integer")
+ return value
+
+ def getfloat(self, section, option):
+ name, value = self._get(section, option)
+ if isinstance(value, int):
+ value = float(value)
+ self._check_type(name, option, value, float, "a float")
+ return value
diff --git a/third_party/python/coverage/coverage/version.py b/third_party/python/coverage/coverage/version.py
new file mode 100644
index 0000000000..8e72165d3b
--- /dev/null
+++ b/third_party/python/coverage/coverage/version.py
@@ -0,0 +1,33 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""The version and URL for coverage.py"""
+# This file is exec'ed in setup.py, don't import anything!
+
+# Same semantics as sys.version_info.
+version_info = (5, 1, 0, 'final', 0)
+
+
+def _make_version(major, minor, micro, releaselevel, serial):
+ """Create a readable version string from version_info tuple components."""
+ assert releaselevel in ['alpha', 'beta', 'candidate', 'final']
+ version = "%d.%d" % (major, minor)
+ if micro:
+ version += ".%d" % (micro,)
+ if releaselevel != 'final':
+ short = {'alpha': 'a', 'beta': 'b', 'candidate': 'rc'}[releaselevel]
+ version += "%s%d" % (short, serial)
+ return version
+
+
+def _make_url(major, minor, micro, releaselevel, serial):
+ """Make the URL people should start at for this version of coverage.py."""
+ url = "https://coverage.readthedocs.io"
+ if releaselevel != 'final':
+ # For pre-releases, use a version-specific URL.
+ url += "/en/coverage-" + _make_version(major, minor, micro, releaselevel, serial)
+ return url
+
+
+__version__ = _make_version(*version_info)
+__url__ = _make_url(*version_info)
diff --git a/third_party/python/coverage/coverage/xmlreport.py b/third_party/python/coverage/coverage/xmlreport.py
new file mode 100644
index 0000000000..ad44775f2f
--- /dev/null
+++ b/third_party/python/coverage/coverage/xmlreport.py
@@ -0,0 +1,230 @@
+# coding: utf-8
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""XML reporting for coverage.py"""
+
+import os
+import os.path
+import sys
+import time
+import xml.dom.minidom
+
+from coverage import env
+from coverage import __url__, __version__, files
+from coverage.backward import iitems
+from coverage.misc import isolate_module
+from coverage.report import get_analysis_to_report
+
+os = isolate_module(os)
+
+
+DTD_URL = 'https://raw.githubusercontent.com/cobertura/web/master/htdocs/xml/coverage-04.dtd'
+
+
+def rate(hit, num):
+ """Return the fraction of `hit`/`num`, as a string."""
+ if num == 0:
+ return "1"
+ else:
+ return "%.4g" % (float(hit) / num)
+
+
+class XmlReporter(object):
+ """A reporter for writing Cobertura-style XML coverage results."""
+
+ def __init__(self, coverage):
+ self.coverage = coverage
+ self.config = self.coverage.config
+
+ self.source_paths = set()
+ if self.config.source:
+ for src in self.config.source:
+ if os.path.exists(src):
+ if not self.config.relative_files:
+ src = files.canonical_filename(src)
+ self.source_paths.add(src)
+ self.packages = {}
+ self.xml_out = None
+
+ def report(self, morfs, outfile=None):
+ """Generate a Cobertura-compatible XML report for `morfs`.
+
+ `morfs` is a list of modules or file names.
+
+ `outfile` is a file object to write the XML to.
+
+ """
+ # Initial setup.
+ outfile = outfile or sys.stdout
+ has_arcs = self.coverage.get_data().has_arcs()
+
+ # Create the DOM that will store the data.
+ impl = xml.dom.minidom.getDOMImplementation()
+ self.xml_out = impl.createDocument(None, "coverage", None)
+
+ # Write header stuff.
+ xcoverage = self.xml_out.documentElement
+ xcoverage.setAttribute("version", __version__)
+ xcoverage.setAttribute("timestamp", str(int(time.time()*1000)))
+ xcoverage.appendChild(self.xml_out.createComment(
+ " Generated by coverage.py: %s " % __url__
+ ))
+ xcoverage.appendChild(self.xml_out.createComment(" Based on %s " % DTD_URL))
+
+ # Call xml_file for each file in the data.
+ for fr, analysis in get_analysis_to_report(self.coverage, morfs):
+ self.xml_file(fr, analysis, has_arcs)
+
+ xsources = self.xml_out.createElement("sources")
+ xcoverage.appendChild(xsources)
+
+ # Populate the XML DOM with the source info.
+ for path in sorted(self.source_paths):
+ xsource = self.xml_out.createElement("source")
+ xsources.appendChild(xsource)
+ txt = self.xml_out.createTextNode(path)
+ xsource.appendChild(txt)
+
+ lnum_tot, lhits_tot = 0, 0
+ bnum_tot, bhits_tot = 0, 0
+
+ xpackages = self.xml_out.createElement("packages")
+ xcoverage.appendChild(xpackages)
+
+ # Populate the XML DOM with the package info.
+ for pkg_name, pkg_data in sorted(iitems(self.packages)):
+ class_elts, lhits, lnum, bhits, bnum = pkg_data
+ xpackage = self.xml_out.createElement("package")
+ xpackages.appendChild(xpackage)
+ xclasses = self.xml_out.createElement("classes")
+ xpackage.appendChild(xclasses)
+ for _, class_elt in sorted(iitems(class_elts)):
+ xclasses.appendChild(class_elt)
+ xpackage.setAttribute("name", pkg_name.replace(os.sep, '.'))
+ xpackage.setAttribute("line-rate", rate(lhits, lnum))
+ if has_arcs:
+ branch_rate = rate(bhits, bnum)
+ else:
+ branch_rate = "0"
+ xpackage.setAttribute("branch-rate", branch_rate)
+ xpackage.setAttribute("complexity", "0")
+
+ lnum_tot += lnum
+ lhits_tot += lhits
+ bnum_tot += bnum
+ bhits_tot += bhits
+
+ xcoverage.setAttribute("lines-valid", str(lnum_tot))
+ xcoverage.setAttribute("lines-covered", str(lhits_tot))
+ xcoverage.setAttribute("line-rate", rate(lhits_tot, lnum_tot))
+ if has_arcs:
+ xcoverage.setAttribute("branches-valid", str(bnum_tot))
+ xcoverage.setAttribute("branches-covered", str(bhits_tot))
+ xcoverage.setAttribute("branch-rate", rate(bhits_tot, bnum_tot))
+ else:
+ xcoverage.setAttribute("branches-covered", "0")
+ xcoverage.setAttribute("branches-valid", "0")
+ xcoverage.setAttribute("branch-rate", "0")
+ xcoverage.setAttribute("complexity", "0")
+
+ # Write the output file.
+ outfile.write(serialize_xml(self.xml_out))
+
+ # Return the total percentage.
+ denom = lnum_tot + bnum_tot
+ if denom == 0:
+ pct = 0.0
+ else:
+ pct = 100.0 * (lhits_tot + bhits_tot) / denom
+ return pct
+
+ def xml_file(self, fr, analysis, has_arcs):
+ """Add to the XML report for a single file."""
+
+ # Create the 'lines' and 'package' XML elements, which
+ # are populated later. Note that a package == a directory.
+ filename = fr.filename.replace("\\", "/")
+ for source_path in self.source_paths:
+ source_path = files.canonical_filename(source_path)
+ if filename.startswith(source_path.replace("\\", "/") + "/"):
+ rel_name = filename[len(source_path)+1:]
+ break
+ else:
+ rel_name = fr.relative_filename()
+ self.source_paths.add(fr.filename[:-len(rel_name)].rstrip(r"\/"))
+
+ dirname = os.path.dirname(rel_name) or u"."
+ dirname = "/".join(dirname.split("/")[:self.config.xml_package_depth])
+ package_name = dirname.replace("/", ".")
+
+ package = self.packages.setdefault(package_name, [{}, 0, 0, 0, 0])
+
+ xclass = self.xml_out.createElement("class")
+
+ xclass.appendChild(self.xml_out.createElement("methods"))
+
+ xlines = self.xml_out.createElement("lines")
+ xclass.appendChild(xlines)
+
+ xclass.setAttribute("name", os.path.relpath(rel_name, dirname))
+ xclass.setAttribute("filename", rel_name.replace("\\", "/"))
+ xclass.setAttribute("complexity", "0")
+
+ branch_stats = analysis.branch_stats()
+ missing_branch_arcs = analysis.missing_branch_arcs()
+
+ # For each statement, create an XML 'line' element.
+ for line in sorted(analysis.statements):
+ xline = self.xml_out.createElement("line")
+ xline.setAttribute("number", str(line))
+
+ # Q: can we get info about the number of times a statement is
+ # executed? If so, that should be recorded here.
+ xline.setAttribute("hits", str(int(line not in analysis.missing)))
+
+ if has_arcs:
+ if line in branch_stats:
+ total, taken = branch_stats[line]
+ xline.setAttribute("branch", "true")
+ xline.setAttribute(
+ "condition-coverage",
+ "%d%% (%d/%d)" % (100*taken//total, taken, total)
+ )
+ if line in missing_branch_arcs:
+ annlines = ["exit" if b < 0 else str(b) for b in missing_branch_arcs[line]]
+ xline.setAttribute("missing-branches", ",".join(annlines))
+ xlines.appendChild(xline)
+
+ class_lines = len(analysis.statements)
+ class_hits = class_lines - len(analysis.missing)
+
+ if has_arcs:
+ class_branches = sum(t for t, k in branch_stats.values())
+ missing_branches = sum(t - k for t, k in branch_stats.values())
+ class_br_hits = class_branches - missing_branches
+ else:
+ class_branches = 0.0
+ class_br_hits = 0.0
+
+ # Finalize the statistics that are collected in the XML DOM.
+ xclass.setAttribute("line-rate", rate(class_hits, class_lines))
+ if has_arcs:
+ branch_rate = rate(class_br_hits, class_branches)
+ else:
+ branch_rate = "0"
+ xclass.setAttribute("branch-rate", branch_rate)
+
+ package[0][rel_name] = xclass
+ package[1] += class_hits
+ package[2] += class_lines
+ package[3] += class_br_hits
+ package[4] += class_branches
+
+
+def serialize_xml(dom):
+ """Serialize a minidom node to XML."""
+ out = dom.toprettyxml()
+ if env.PY2:
+ out = out.encode("utf8")
+ return out
diff --git a/third_party/python/coverage/howto.txt b/third_party/python/coverage/howto.txt
new file mode 100644
index 0000000000..3653e830a7
--- /dev/null
+++ b/third_party/python/coverage/howto.txt
@@ -0,0 +1,122 @@
+* Release checklist
+
+- Check that the current virtualenv matches the current coverage branch.
+- Version number in coverage/version.py
+ version_info = (4, 0, 2, 'alpha', 1)
+ version_info = (4, 0, 2, 'beta', 1)
+ version_info = (4, 0, 2, 'candidate', 1)
+ version_info = (4, 0, 2, 'final', 0)
+- Python version number in classifiers in setup.py
+- Copyright date in NOTICE.txt
+- Update CHANGES.rst, including release date.
+ - don't forget the jump target
+- Update README.rst
+ - "New in x.y:"
+ - Python versions supported
+- Update docs
+ - Python versions in doc/index.rst
+ - Version of latest stable release in doc/index.rst
+ - Version, release, release_date and copyright date in doc/conf.py
+ - Look for CHANGEME comments
+ - Don't forget the man page: doc/python-coverage.1.txt
+ - Check that the docs build correctly:
+ $ tox -e doc
+ there will be warnings about the readthedocs links being broken,
+ because this version's docs haven't been published yet.
+ - Done with changes to source files, check them in.
+ - git push
+ - Generate new sample_html to get the latest, incl footer version number:
+ make clean
+ pip install -e .
+ cd ~/cog/trunk
+ rm -rf htmlcov
+ coverage run --branch --source=cogapp -m pytest -k CogTestsInMemory; coverage combine; coverage html
+ - IF PRE-RELEASE:
+ rm -f ~/coverage/trunk/doc/sample_html_beta/*.*
+ cp -r htmlcov/ ~/coverage/trunk/doc/sample_html_beta/
+ - IF NOT PRE-RELEASE:
+ rm -f ~/coverage/trunk/doc/sample_html/*.*
+ cp -r htmlcov/ ~/coverage/trunk/doc/sample_html/
+ cd ~/coverage/trunk
+ - IF NOT PRE-RELEASE:
+ check in the new sample html
+ - Build and publish docs:
+ - IF PRE-RELEASE:
+ $ make publishbeta
+ - ELSE:
+ $ make publish
+- Kits:
+ - Start fresh:
+ - $ make sterile
+ - Source kit and wheels:
+ - $ make kit wheel
+ - Linux wheels:
+ - $ make kit_linux
+ - Windows kits
+ - wait for over an hour for Appveyor to build kits.
+ - https://ci.appveyor.com/project/nedbat/coveragepy
+ - $ make download_appveyor
+ - examine the dist directory, and remove anything that looks malformed.
+ - test the pypi upload:
+ - $ make test_upload
+- Update PyPI:
+ - upload kits:
+ - $ make kit_upload
+- Tag the tree
+ - git tag coverage-3.0.1
+ - git push --tags
+- Bump version:
+ - coverage/version.py
+ - increment version number
+ - IF NOT PRE-RELEASE:
+ - set to alpha-0 if just released.
+ - CHANGES.rst
+ - add an "Unreleased" section to the top.
+ - git push
+- Update Tidelift:
+ - make upload_relnotes
+- Update readthedocs
+ - IF NOT PRE-RELEASE:
+ - update git "stable" branch to point to latest release
+ - git branch -f stable <latest-tag>
+ - git push --all
+ - visit https://readthedocs.org/projects/coverage/builds/
+ - wait for the new tag build to finish successfully.
+ - visit https://readthedocs.org/dashboard/coverage/advanced/
+ - change the default version to the new version
+ - visit https://readthedocs.org/projects/coverage/versions/
+ - find the latest tag in the inactive list, edit it, make it active.
+ - readthedocs won't find the tag until a commit is made on master.
+ - keep just the latest version of each x.y release, make the rest inactive.
+- Visit the fixed issues on GitHub and mention the version it was fixed in.
+ - make a milestone for the next release and move open issues into it.
+- Announce:
+ - twitter @coveragepy
+ - nedbatchelder.com blog post?
+ - testing-in-python mailing list?
+
+
+* Testing
+
+- Testing of Python code is handled by tox.
+ - Create and activate a virtualenv
+ - pip install -r requirements/dev.pip
+ - $ tox
+
+- Testing on Linux:
+ - $ make test_linux
+
+- For complete coverage testing:
+
+ $ make metacov
+
+ This will run coverage.py under its own measurement. You can do this in
+ different environments (Linux vs. Windows, for example), then copy the data
+ files (.metacov.*) to one machine for combination and reporting. To
+ combine and report:
+
+ $ make metahtml
+
+- To run the Javascript tests:
+
+ open tests/js/index.html in variety of browsers.
diff --git a/third_party/python/coverage/igor.py b/third_party/python/coverage/igor.py
new file mode 100644
index 0000000000..a742cb8e19
--- /dev/null
+++ b/third_party/python/coverage/igor.py
@@ -0,0 +1,395 @@
+# coding: utf-8
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Helper for building, testing, and linting coverage.py.
+
+To get portability, all these operations are written in Python here instead
+of in shell scripts, batch files, or Makefiles.
+
+"""
+
+import contextlib
+import fnmatch
+import glob
+import inspect
+import os
+import platform
+import sys
+import textwrap
+import warnings
+import zipfile
+
+import pytest
+
+
+@contextlib.contextmanager
+def ignore_warnings():
+ """Context manager to ignore warning within the with statement."""
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore")
+ yield
+
+
+# Functions named do_* are executable from the command line: do_blah is run
+# by "python igor.py blah".
+
+
+def do_show_env():
+ """Show the environment variables."""
+ print("Environment:")
+ for env in sorted(os.environ):
+ print(" %s = %r" % (env, os.environ[env]))
+
+
+def do_remove_extension():
+ """Remove the compiled C extension, no matter what its name."""
+
+ so_patterns = """
+ tracer.so
+ tracer.*.so
+ tracer.pyd
+ tracer.*.pyd
+ """.split()
+
+ for pattern in so_patterns:
+ pattern = os.path.join("coverage", pattern)
+ for filename in glob.glob(pattern):
+ try:
+ os.remove(filename)
+ except OSError:
+ pass
+
+
+def label_for_tracer(tracer):
+ """Get the label for these tests."""
+ if tracer == "py":
+ label = "with Python tracer"
+ else:
+ label = "with C tracer"
+
+ return label
+
+
+def should_skip(tracer):
+ """Is there a reason to skip these tests?"""
+ if tracer == "py":
+ # $set_env.py: COVERAGE_NO_PYTRACER - Don't run the tests under the Python tracer.
+ skipper = os.environ.get("COVERAGE_NO_PYTRACER")
+ else:
+ # $set_env.py: COVERAGE_NO_CTRACER - Don't run the tests under the C tracer.
+ skipper = os.environ.get("COVERAGE_NO_CTRACER")
+
+ if skipper:
+ msg = "Skipping tests " + label_for_tracer(tracer)
+ if len(skipper) > 1:
+ msg += ": " + skipper
+ else:
+ msg = ""
+
+ return msg
+
+
+def make_env_id(tracer):
+ """An environment id that will keep all the test runs distinct."""
+ impl = platform.python_implementation().lower()
+ version = "%s%s" % sys.version_info[:2]
+ if '__pypy__' in sys.builtin_module_names:
+ version += "_%s%s" % sys.pypy_version_info[:2]
+ env_id = "%s%s_%s" % (impl, version, tracer)
+ return env_id
+
+
+def run_tests(tracer, *runner_args):
+ """The actual running of tests."""
+ if 'COVERAGE_TESTING' not in os.environ:
+ os.environ['COVERAGE_TESTING'] = "True"
+ # $set_env.py: COVERAGE_ENV_ID - Use environment-specific test directories.
+ if 'COVERAGE_ENV_ID' in os.environ:
+ os.environ['COVERAGE_ENV_ID'] = make_env_id(tracer)
+ print_banner(label_for_tracer(tracer))
+ return pytest.main(list(runner_args))
+
+
+def run_tests_with_coverage(tracer, *runner_args):
+ """Run tests, but with coverage."""
+ # Need to define this early enough that the first import of env.py sees it.
+ os.environ['COVERAGE_TESTING'] = "True"
+ os.environ['COVERAGE_PROCESS_START'] = os.path.abspath('metacov.ini')
+ os.environ['COVERAGE_HOME'] = os.getcwd()
+
+ # Create the .pth file that will let us measure coverage in sub-processes.
+ # The .pth file seems to have to be alphabetically after easy-install.pth
+ # or the sys.path entries aren't created right?
+ # There's an entry in "make clean" to get rid of this file.
+ pth_dir = os.path.dirname(pytest.__file__)
+ pth_path = os.path.join(pth_dir, "zzz_metacov.pth")
+ with open(pth_path, "w") as pth_file:
+ pth_file.write("import coverage; coverage.process_startup()\n")
+
+ suffix = "%s_%s" % (make_env_id(tracer), platform.platform())
+ os.environ['COVERAGE_METAFILE'] = os.path.abspath(".metacov."+suffix)
+
+ import coverage
+ cov = coverage.Coverage(config_file="metacov.ini")
+ cov._warn_unimported_source = False
+ cov._warn_preimported_source = False
+ cov.start()
+
+ try:
+ # Re-import coverage to get it coverage tested! I don't understand all
+ # the mechanics here, but if I don't carry over the imported modules
+ # (in covmods), then things go haywire (os == None, eventually).
+ covmods = {}
+ covdir = os.path.split(coverage.__file__)[0]
+ # We have to make a list since we'll be deleting in the loop.
+ modules = list(sys.modules.items())
+ for name, mod in modules:
+ if name.startswith('coverage'):
+ if getattr(mod, '__file__', "??").startswith(covdir):
+ covmods[name] = mod
+ del sys.modules[name]
+ import coverage # pylint: disable=reimported
+ sys.modules.update(covmods)
+
+ # Run tests, with the arguments from our command line.
+ status = run_tests(tracer, *runner_args)
+
+ finally:
+ cov.stop()
+ os.remove(pth_path)
+
+ cov.combine()
+ cov.save()
+
+ return status
+
+
+def do_combine_html():
+ """Combine data from a meta-coverage run, and make the HTML and XML reports."""
+ import coverage
+ os.environ['COVERAGE_HOME'] = os.getcwd()
+ os.environ['COVERAGE_METAFILE'] = os.path.abspath(".metacov")
+ cov = coverage.Coverage(config_file="metacov.ini")
+ cov.load()
+ cov.combine()
+ cov.save()
+ show_contexts = bool(os.environ.get('COVERAGE_CONTEXT'))
+ cov.html_report(show_contexts=show_contexts)
+ cov.xml_report()
+
+
+def do_test_with_tracer(tracer, *runner_args):
+ """Run tests with a particular tracer."""
+ # If we should skip these tests, skip them.
+ skip_msg = should_skip(tracer)
+ if skip_msg:
+ print(skip_msg)
+ return None
+
+ os.environ["COVERAGE_TEST_TRACER"] = tracer
+ if os.environ.get("COVERAGE_COVERAGE", "no") == "yes":
+ return run_tests_with_coverage(tracer, *runner_args)
+ else:
+ return run_tests(tracer, *runner_args)
+
+
+def do_zip_mods():
+ """Build the zipmods.zip file."""
+ zf = zipfile.ZipFile("tests/zipmods.zip", "w")
+
+ # Take one file from disk.
+ zf.write("tests/covmodzip1.py", "covmodzip1.py")
+
+ # The others will be various encodings.
+ source = textwrap.dedent(u"""\
+ # coding: {encoding}
+ text = u"{text}"
+ ords = {ords}
+ assert [ord(c) for c in text] == ords
+ print(u"All OK with {encoding}")
+ """)
+ # These encodings should match the list in tests/test_python.py
+ details = [
+ (u'utf8', u'ⓗⓔⓛⓛⓞ, ⓦⓞⓡⓛⓓ'),
+ (u'gb2312', u'你好,世界'),
+ (u'hebrew', u'שלום, עולם'),
+ (u'shift_jis', u'こんにちは世界'),
+ (u'cp1252', u'“hi”'),
+ ]
+ for encoding, text in details:
+ filename = 'encoded_{}.py'.format(encoding)
+ ords = [ord(c) for c in text]
+ source_text = source.format(encoding=encoding, text=text, ords=ords)
+ zf.writestr(filename, source_text.encode(encoding))
+
+ zf.close()
+
+ zf = zipfile.ZipFile("tests/covmain.zip", "w")
+ zf.write("coverage/__main__.py", "__main__.py")
+ zf.close()
+
+
+def do_install_egg():
+ """Install the egg1 egg for tests."""
+ # I am pretty certain there are easier ways to install eggs...
+ cur_dir = os.getcwd()
+ os.chdir("tests/eggsrc")
+ with ignore_warnings():
+ import distutils.core
+ distutils.core.run_setup("setup.py", ["--quiet", "bdist_egg"])
+ egg = glob.glob("dist/*.egg")[0]
+ distutils.core.run_setup(
+ "setup.py", ["--quiet", "easy_install", "--no-deps", "--zip-ok", egg]
+ )
+ os.chdir(cur_dir)
+
+
+def do_check_eol():
+ """Check files for incorrect newlines and trailing whitespace."""
+
+ ignore_dirs = [
+ '.svn', '.hg', '.git',
+ '.tox*',
+ '*.egg-info',
+ '_build',
+ '_spell',
+ ]
+ checked = set()
+
+ def check_file(fname, crlf=True, trail_white=True):
+ """Check a single file for whitespace abuse."""
+ fname = os.path.relpath(fname)
+ if fname in checked:
+ return
+ checked.add(fname)
+
+ line = None
+ with open(fname, "rb") as f:
+ for n, line in enumerate(f, start=1):
+ if crlf:
+ if b"\r" in line:
+ print("%s@%d: CR found" % (fname, n))
+ return
+ if trail_white:
+ line = line[:-1]
+ if not crlf:
+ line = line.rstrip(b'\r')
+ if line.rstrip() != line:
+ print("%s@%d: trailing whitespace found" % (fname, n))
+ return
+
+ if line is not None and not line.strip():
+ print("%s: final blank line" % (fname,))
+
+ def check_files(root, patterns, **kwargs):
+ """Check a number of files for whitespace abuse."""
+ for where, dirs, files in os.walk(root):
+ for f in files:
+ fname = os.path.join(where, f)
+ for p in patterns:
+ if fnmatch.fnmatch(fname, p):
+ check_file(fname, **kwargs)
+ break
+ for ignore_dir in ignore_dirs:
+ ignored = []
+ for dir_name in dirs:
+ if fnmatch.fnmatch(dir_name, ignore_dir):
+ ignored.append(dir_name)
+ for dir_name in ignored:
+ dirs.remove(dir_name)
+
+ check_files("coverage", ["*.py"])
+ check_files("coverage/ctracer", ["*.c", "*.h"])
+ check_files("coverage/htmlfiles", ["*.html", "*.scss", "*.css", "*.js"])
+ check_files("tests", ["*.py"])
+ check_files("tests", ["*,cover"], trail_white=False)
+ check_files("tests/js", ["*.js", "*.html"])
+ check_file("setup.py")
+ check_file("igor.py")
+ check_file("Makefile")
+ check_file(".travis.yml")
+ check_files(".", ["*.rst", "*.txt"])
+ check_files(".", ["*.pip"])
+
+
+def print_banner(label):
+ """Print the version of Python."""
+ try:
+ impl = platform.python_implementation()
+ except AttributeError:
+ impl = "Python"
+
+ version = platform.python_version()
+
+ if '__pypy__' in sys.builtin_module_names:
+ version += " (pypy %s)" % ".".join(str(v) for v in sys.pypy_version_info)
+
+ try:
+ which_python = os.path.relpath(sys.executable)
+ except ValueError:
+ # On Windows having a python executable on a different drive
+ # than the sources cannot be relative.
+ which_python = sys.executable
+ print('=== %s %s %s (%s) ===' % (impl, version, label, which_python))
+ sys.stdout.flush()
+
+
+def do_help():
+ """List the available commands"""
+ items = list(globals().items())
+ items.sort()
+ for name, value in items:
+ if name.startswith('do_'):
+ print("%-20s%s" % (name[3:], value.__doc__))
+
+
+def analyze_args(function):
+ """What kind of args does `function` expect?
+
+ Returns:
+ star, num_pos:
+ star(boolean): Does `function` accept *args?
+ num_args(int): How many positional arguments does `function` have?
+ """
+ try:
+ getargspec = inspect.getfullargspec
+ except AttributeError:
+ getargspec = inspect.getargspec
+ with ignore_warnings():
+ # DeprecationWarning: Use inspect.signature() instead of inspect.getfullargspec()
+ argspec = getargspec(function)
+ return bool(argspec[1]), len(argspec[0])
+
+
+def main(args):
+ """Main command-line execution for igor.
+
+ Verbs are taken from the command line, and extra words taken as directed
+ by the arguments needed by the handler.
+
+ """
+ while args:
+ verb = args.pop(0)
+ handler = globals().get('do_'+verb)
+ if handler is None:
+ print("*** No handler for %r" % verb)
+ return 1
+ star, num_args = analyze_args(handler)
+ if star:
+ # Handler has *args, give it all the rest of the command line.
+ handler_args = args
+ args = []
+ else:
+ # Handler has specific arguments, give it only what it needs.
+ handler_args = args[:num_args]
+ args = args[num_args:]
+ ret = handler(*handler_args)
+ # If a handler returns a failure-like value, stop.
+ if ret:
+ return ret
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/python/coverage/metacov.ini b/third_party/python/coverage/metacov.ini
new file mode 100644
index 0000000000..daabbf82f0
--- /dev/null
+++ b/third_party/python/coverage/metacov.ini
@@ -0,0 +1,88 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+# Settings to use when using coverage.py to measure itself, known as
+# meta-coverage. This gets intricate because we need to keep the coverage
+# measurement happening in the tests separate from our own coverage measurement
+# of coverage.py itself.
+
+[run]
+branch = true
+data_file = ${COVERAGE_METAFILE?}
+parallel = true
+source =
+ ${COVERAGE_HOME-.}/coverage
+ ${COVERAGE_HOME-.}/tests
+# $set_env.py: COVERAGE_CONTEXT - Set to 'test_function' for who-tests-what
+dynamic_context = ${COVERAGE_CONTEXT-none}
+
+[report]
+# We set a different pragmas so our code won't be confused with test code, and
+# we use different pragmas for different reasons that the lines won't be
+# measured.
+exclude_lines =
+ pragma: not covered
+
+ # Lines in test code that aren't covered: we are nested inside ourselves.
+ # Sometimes this is used as a comment:
+ #
+ # cov.start()
+ # blah() # pragma: nested
+ # cov.stop() # pragma: nested
+ #
+ # In order to exclude a series of lines, sometimes it's used as a constant
+ # condition, which might be too cute:
+ #
+ # cov.start()
+ # if "pragma: nested":
+ # blah()
+ # cov.stop()
+ #
+ pragma: nested
+
+ # Lines that are only executed when we are debugging coverage.py.
+ def __repr__
+ pragma: debugging
+
+ # Lines that are only executed when we are not testing coverage.py.
+ pragma: not testing
+
+ # Lines that we can't run during metacov.
+ pragma: no metacov
+
+ # These lines only happen if tests fail.
+ raise AssertionError
+ pragma: only failure
+
+ # OS error conditions that we can't (or don't care to) replicate.
+ pragma: cant happen
+
+ # Obscure bugs in specific versions of interpreters, and so probably no
+ # longer tested.
+ pragma: obscure
+
+ # Jython needs special care.
+ pragma: only jython
+ skip.*Jython
+
+ # IronPython isn't included in metacoverage.
+ pragma: only ironpython
+
+partial_branches =
+ pragma: part covered
+ pragma: if failure
+ pragma: part started
+ if env.TESTING:
+ if .* env.JYTHON
+ if .* env.IRONPYTHON
+
+ignore_errors = true
+precision = 1
+
+[paths]
+source =
+ .
+ *\coverage\trunk
+ */coverage/trunk
+ *\coveragepy
+ /io
diff --git a/third_party/python/coverage/pylintrc b/third_party/python/coverage/pylintrc
new file mode 100644
index 0000000000..d250e9b920
--- /dev/null
+++ b/third_party/python/coverage/pylintrc
@@ -0,0 +1,335 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+# lint Python modules using external checkers.
+#
+# This is the main checker controlling the other ones and the reports
+# generation. It is itself both a raw checker and an astng checker in order
+# to:
+# * handle message activation / deactivation at the module level
+# * handle some basic but necessary stats'data (number of classes, methods...)
+#
+[MASTER]
+
+# Specify a configuration file.
+#rcfile=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Add <file or directory> to the black list. It should be a base name, not a
+# path. You may set this option multiple times.
+ignore=
+
+# Pickle collected data for later comparisons.
+persistent=no
+
+# Set the cache size for astng objects.
+cache-size=500
+
+# List of plugins (as comma separated values of python modules names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+extension-pkg-whitelist=
+ greenlet
+
+[MESSAGES CONTROL]
+
+# Enable only checker(s) with the given id(s). This option conflicts with the
+# disable-checker option
+#enable-checker=
+
+# Enable all checker(s) except those with the given id(s). This option
+# conflicts with the enable-checker option
+#disable-checker=
+
+# Enable all messages in the listed categories.
+#enable-msg-cat=
+
+# Disable all messages in the listed categories.
+#disable-msg-cat=
+
+# Enable the message(s) with the given id(s).
+enable=
+ useless-suppression
+
+# Disable the message(s) with the given id(s).
+disable=
+ spelling,
+# Messages that are just silly:
+ locally-disabled,
+ exec-used,
+ no-init,
+ bad-whitespace,
+ global-statement,
+ broad-except,
+ no-else-return,
+# Messages that may be silly:
+ no-self-use,
+ no-member,
+ using-constant-test,
+ too-many-nested-blocks,
+ too-many-ancestors,
+ unnecessary-pass,
+ no-else-break,
+ no-else-continue,
+# Questionable things, but it's ok, I don't need to be told:
+ import-outside-toplevel,
+ self-assigning-variable,
+# Formatting stuff
+ superfluous-parens,
+ bad-continuation,
+# Disable while we still support Python 2:
+ useless-object-inheritance,
+# Messages that are noisy for now, eventually maybe we'll turn them on:
+ invalid-name,
+ protected-access,
+ duplicate-code,
+ cyclic-import
+
+msg-template={path}:{line}: {msg} ({symbol})
+
+[REPORTS]
+
+# set the output format. Available formats are text, parseable, colorized, msvs
+# (visual studio) and html
+output-format=text
+
+# Put messages in a separate file for each module / package specified on the
+# command line instead of printing them on stdout. Reports (if any) will be
+# written in a file name "pylint_global.[txt|html]".
+files-output=no
+
+# Tells wether to display a full report or only the messages
+reports=no
+
+# I don't need a score, thanks.
+score=no
+
+# Python expression which should return a note less than 10 (10 is the highest
+# note).You have access to the variables errors warning, statement which
+# respectively contain the number of errors / warnings messages and the total
+# number of statements analyzed. This is used by the global evaluation report
+# (R0004).
+evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Enable the report(s) with the given id(s).
+#enable-report=
+
+# Disable the report(s) with the given id(s).
+#disable-report=
+
+
+# checks for :
+# * doc strings
+# * modules / classes / functions / methods / arguments / variables name
+# * number of arguments, local variables, branchs, returns and statements in
+# functions, methods
+# * required module attributes
+# * dangerous default values as arguments
+# * redefinition of function / method / class
+# * uses of the global statement
+#
+[BASIC]
+
+# Regular expression which should only match functions or classes name which do
+# not require a docstring
+# Special methods don't: __foo__
+# Test methods don't: testXXXX
+# TestCase overrides don't: setUp, tearDown
+# Nested decorator implementations: _decorator, _wrapper
+# Dispatched methods don't: _xxx__Xxxx
+no-docstring-rgx=__.*__|test[A-Z_].*|setUp|tearDown|_decorator|_wrapper|_.*__.*
+
+# Regular expression which should only match correct module names
+module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
+
+# Regular expression which should only match correct module level names
+const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
+
+# Regular expression which should only match correct class names
+class-rgx=[A-Z_][a-zA-Z0-9]+$
+
+# Regular expression which should only match correct function names
+function-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match correct method names
+method-rgx=[a-z_][a-z0-9_]{2,30}$|setUp|tearDown|test_.*
+
+# Regular expression which should only match correct instance attribute names
+attr-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match correct argument names
+argument-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match correct variable names
+variable-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match correct list comprehension /
+# generator expression variable names
+inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
+
+# Good variable names which should always be accepted, separated by a comma
+good-names=i,j,k,ex,Run,_
+
+# Bad variable names which should always be refused, separated by a comma
+bad-names=foo,bar,baz,toto,tutu,tata
+
+# List of builtins function names that should not be used, separated by a comma
+bad-functions=
+
+
+# try to find bugs in the code using type inference
+#
+[TYPECHECK]
+
+# Tells wether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+ignore-mixin-members=yes
+
+# List of classes names for which member attributes should not be checked
+# (useful for classes with attributes dynamicaly set).
+ignored-classes=SQLObject
+
+# List of members which are usually get through zope's acquisition mecanism and
+# so shouldn't trigger E0201 when accessed (need zope=yes to be considered).
+acquired-members=REQUEST,acl_users,aq_parent
+
+
+# checks for
+# * unused variables / imports
+# * undefined variables
+# * redefinition of variable from builtins or from an outer scope
+# * use of variable before assigment
+#
+[VARIABLES]
+
+# Tells wether we should check for unused import in __init__ files.
+init-import=no
+
+# A regular expression matching names of unused arguments.
+ignored-argument-names=_|unused|.*_unused
+dummy-variables-rgx=_|unused|.*_unused
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid to define new builtins when possible.
+additional-builtins=
+
+
+# checks for :
+# * methods without self as first argument
+# * overridden methods signature
+# * access only to existent members via self
+# * attributes not defined in the __init__ method
+# * supported interfaces implementation
+# * unreachable code
+#
+[CLASSES]
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,__new__,setUp,reset
+
+
+# checks for sign of poor/misdesign:
+# * number of methods, attributes, local variables...
+# * size, complexity of functions, methods
+#
+[DESIGN]
+
+# Maximum number of arguments for function / method
+max-args=15
+
+# Maximum number of locals for function / method body
+max-locals=50
+
+# Maximum number of return / yield for function / method body
+max-returns=20
+
+# Maximum number of branch for function / method body
+max-branches=50
+
+# Maximum number of statements in function / method body
+max-statements=150
+
+# Maximum number of parents for a class (see R0901).
+max-parents=12
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=40
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=0
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=500
+
+
+# checks for
+# * external modules dependencies
+# * relative / wildcard imports
+# * cyclic imports
+# * uses of deprecated modules
+#
+[IMPORTS]
+
+# Deprecated modules which should not be used, separated by a comma
+deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report R0402 must not be disabled)
+import-graph=
+
+# Create a graph of external dependencies in the given file (report R0402 must
+# not be disabled)
+ext-import-graph=
+
+# Create a graph of internal dependencies in the given file (report R0402 must
+# not be disabled)
+int-import-graph=
+
+
+# checks for :
+# * unauthorized constructions
+# * strict indentation
+# * line length
+# * use of <> instead of !=
+#
+[FORMAT]
+
+# Maximum number of characters on a single line.
+max-line-length=100
+
+# Maximum number of lines in a module
+max-module-lines=10000
+
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
+# tab).
+indent-string=' '
+
+
+# checks for:
+# * warning notes in the code like FIXME, XXX
+# * PEP 263: source code with non ascii character but no encoding declaration
+#
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,XXX,TODO
+
+
+# checks for similarities and duplicated code. This computation may be
+# memory / CPU intensive, so you should disable it if you experiments some
+# problems.
+#
+[SIMILARITIES]
+
+# Minimum lines number of a similarity.
+min-similarity-lines=4
+
+# Ignore comments when computing similarities.
+ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+ignore-docstrings=yes
diff --git a/third_party/python/coverage/requirements/ci.pip b/third_party/python/coverage/requirements/ci.pip
new file mode 100644
index 0000000000..c36045685c
--- /dev/null
+++ b/third_party/python/coverage/requirements/ci.pip
@@ -0,0 +1,8 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+# Things CI servers need to succeeed.
+-r tox.pip
+-r pytest.pip
+-r wheel.pip
+tox-travis==0.12
diff --git a/third_party/python/coverage/requirements/dev.pip b/third_party/python/coverage/requirements/dev.pip
new file mode 100644
index 0000000000..a11729cdb2
--- /dev/null
+++ b/third_party/python/coverage/requirements/dev.pip
@@ -0,0 +1,24 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+# Requirements for doing local development work on coverage.py.
+# https://requires.io/github/nedbat/coveragepy/requirements/
+
+pip==20.0.2
+virtualenv==16.7.9
+
+pluggy==0.13.1
+
+# PyPI requirements for running tests.
+-r tox.pip
+-r pytest.pip
+
+# for linting.
+greenlet==0.4.15
+pylint==2.4.4
+check-manifest==0.40
+readme_renderer==24.0
+
+# for kitting.
+requests==2.22.0
+twine==2.0.0
diff --git a/third_party/python/coverage/requirements/pytest.pip b/third_party/python/coverage/requirements/pytest.pip
new file mode 100644
index 0000000000..3b5499748e
--- /dev/null
+++ b/third_party/python/coverage/requirements/pytest.pip
@@ -0,0 +1,21 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+# The pytest specifics used by coverage.py
+
+# 4.x is last to support py2
+pytest==4.6.6
+pytest-xdist==1.30.0
+flaky==3.6.1
+mock==3.0.5
+# Use a fork of PyContracts that supports Python 3.9
+#PyContracts==1.8.12
+git+https://github.com/slorg1/contracts@collections_and_validator
+hypothesis==4.41.2
+
+# Our testing mixins
+unittest-mixins==1.6
+#-e/Users/ned/unittest_mixins
+
+# Just so I have a debugger if I want it
+pudb==2019.1
diff --git a/third_party/python/coverage/requirements/tox.pip b/third_party/python/coverage/requirements/tox.pip
new file mode 100644
index 0000000000..a6279c325c
--- /dev/null
+++ b/third_party/python/coverage/requirements/tox.pip
@@ -0,0 +1,7 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+# The version of tox used by coverage.py
+tox==3.14.3
+# Adds env recreation on requirements file changes.
+tox-battery==0.5.2
diff --git a/third_party/python/coverage/requirements/wheel.pip b/third_party/python/coverage/requirements/wheel.pip
new file mode 100644
index 0000000000..abef9db4d6
--- /dev/null
+++ b/third_party/python/coverage/requirements/wheel.pip
@@ -0,0 +1,7 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+# Things needed to make wheels for coverage.py
+
+setuptools==41.4.0
+wheel==0.34.2
diff --git a/third_party/python/coverage/setup.cfg b/third_party/python/coverage/setup.cfg
new file mode 100644
index 0000000000..e76069e01d
--- /dev/null
+++ b/third_party/python/coverage/setup.cfg
@@ -0,0 +1,19 @@
+[tool:pytest]
+addopts = -q -n3 --strict --no-flaky-report -rfe --failed-first
+markers =
+ expensive: too slow to run during "make smoke"
+filterwarnings =
+ ignore:dns.hash module will be removed:DeprecationWarning
+ ignore:Using or importing the ABCs:DeprecationWarning
+
+[pep8]
+ignore = E265,E266,E123,E133,E226,E241,E242,E301,E401
+max-line-length = 100
+
+[metadata]
+license_file = LICENSE.txt
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/coverage/setup.py b/third_party/python/coverage/setup.py
new file mode 100644
index 0000000000..8c837d72cf
--- /dev/null
+++ b/third_party/python/coverage/setup.py
@@ -0,0 +1,217 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Code coverage measurement for Python"""
+
+# Distutils setup for coverage.py
+# This file is used unchanged under all versions of Python, 2.x and 3.x.
+
+import os
+import sys
+
+# Setuptools has to be imported before distutils, or things break.
+from setuptools import setup
+from distutils.core import Extension # pylint: disable=wrong-import-order
+from distutils.command.build_ext import build_ext # pylint: disable=wrong-import-order
+from distutils import errors # pylint: disable=wrong-import-order
+
+
+# Get or massage our metadata. We exec coverage/version.py so we can avoid
+# importing the product code into setup.py.
+
+classifiers = """\
+Environment :: Console
+Intended Audience :: Developers
+License :: OSI Approved :: Apache Software License
+Operating System :: OS Independent
+Programming Language :: Python
+Programming Language :: Python :: 2
+Programming Language :: Python :: 2.7
+Programming Language :: Python :: 3
+Programming Language :: Python :: 3.5
+Programming Language :: Python :: 3.6
+Programming Language :: Python :: 3.7
+Programming Language :: Python :: 3.8
+Programming Language :: Python :: 3.9
+Programming Language :: Python :: Implementation :: CPython
+Programming Language :: Python :: Implementation :: PyPy
+Topic :: Software Development :: Quality Assurance
+Topic :: Software Development :: Testing
+"""
+
+cov_ver_py = os.path.join(os.path.split(__file__)[0], "coverage/version.py")
+with open(cov_ver_py) as version_file:
+ # __doc__ will be overwritten by version.py.
+ doc = __doc__
+ # Keep pylint happy.
+ __version__ = __url__ = version_info = ""
+ # Execute the code in version.py.
+ exec(compile(version_file.read(), cov_ver_py, 'exec'))
+
+with open("README.rst") as readme:
+ long_description = readme.read().replace("https://coverage.readthedocs.io", __url__)
+
+with open("CONTRIBUTORS.txt", "rb") as contributors:
+ paras = contributors.read().split(b"\n\n")
+ num_others = len(paras[-1].splitlines())
+ num_others += 1 # Count Gareth Rees, who is mentioned in the top paragraph.
+
+classifier_list = classifiers.splitlines()
+
+if version_info[3] == 'alpha':
+ devstat = "3 - Alpha"
+elif version_info[3] in ['beta', 'candidate']:
+ devstat = "4 - Beta"
+else:
+ assert version_info[3] == 'final'
+ devstat = "5 - Production/Stable"
+classifier_list.append("Development Status :: " + devstat)
+
+# Create the keyword arguments for setup()
+
+setup_args = dict(
+ name='coverage',
+ version=__version__,
+
+ packages=[
+ 'coverage',
+ ],
+
+ package_data={
+ 'coverage': [
+ 'htmlfiles/*.*',
+ 'fullcoverage/*.*',
+ ]
+ },
+
+ entry_points={
+ # Install a script as "coverage", and as "coverage[23]", and as
+ # "coverage-2.7" (or whatever).
+ 'console_scripts': [
+ 'coverage = coverage.cmdline:main',
+ 'coverage%d = coverage.cmdline:main' % sys.version_info[:1],
+ 'coverage-%d.%d = coverage.cmdline:main' % sys.version_info[:2],
+ ],
+ },
+
+ extras_require={
+ # Enable pyproject.toml support.
+ 'toml': ['toml'],
+ },
+
+ # We need to get HTML assets from our htmlfiles directory.
+ zip_safe=False,
+
+ author='Ned Batchelder and {} others'.format(num_others),
+ author_email='ned@nedbatchelder.com',
+ description=doc,
+ long_description=long_description,
+ long_description_content_type='text/x-rst',
+ keywords='code coverage testing',
+ license='Apache 2.0',
+ classifiers=classifier_list,
+ url="https://github.com/nedbat/coveragepy",
+ project_urls={
+ 'Documentation': __url__,
+ 'Funding': (
+ 'https://tidelift.com/subscription/pkg/pypi-coverage'
+ '?utm_source=pypi-coverage&utm_medium=referral&utm_campaign=pypi'
+ ),
+ 'Issues': 'https://github.com/nedbat/coveragepy/issues',
+ },
+ python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4",
+)
+
+# A replacement for the build_ext command which raises a single exception
+# if the build fails, so we can fallback nicely.
+
+ext_errors = (
+ errors.CCompilerError,
+ errors.DistutilsExecError,
+ errors.DistutilsPlatformError,
+)
+if sys.platform == 'win32':
+ # distutils.msvc9compiler can raise an IOError when failing to
+ # find the compiler
+ ext_errors += (IOError,)
+
+
+class BuildFailed(Exception):
+ """Raise this to indicate the C extension wouldn't build."""
+ def __init__(self):
+ Exception.__init__(self)
+ self.cause = sys.exc_info()[1] # work around py 2/3 different syntax
+
+
+class ve_build_ext(build_ext):
+ """Build C extensions, but fail with a straightforward exception."""
+
+ def run(self):
+ """Wrap `run` with `BuildFailed`."""
+ try:
+ build_ext.run(self)
+ except errors.DistutilsPlatformError:
+ raise BuildFailed()
+
+ def build_extension(self, ext):
+ """Wrap `build_extension` with `BuildFailed`."""
+ try:
+ # Uncomment to test compile failure handling:
+ # raise errors.CCompilerError("OOPS")
+ build_ext.build_extension(self, ext)
+ except ext_errors:
+ raise BuildFailed()
+ except ValueError as err:
+ # this can happen on Windows 64 bit, see Python issue 7511
+ if "'path'" in str(err): # works with both py 2/3
+ raise BuildFailed()
+ raise
+
+# There are a few reasons we might not be able to compile the C extension.
+# Figure out if we should attempt the C extension or not.
+
+compile_extension = True
+
+if sys.platform.startswith('java'):
+ # Jython can't compile C extensions
+ compile_extension = False
+
+if '__pypy__' in sys.builtin_module_names:
+ # Pypy can't compile C extensions
+ compile_extension = False
+
+if compile_extension:
+ setup_args.update(dict(
+ ext_modules=[
+ Extension(
+ "coverage.tracer",
+ sources=[
+ "coverage/ctracer/datastack.c",
+ "coverage/ctracer/filedisp.c",
+ "coverage/ctracer/module.c",
+ "coverage/ctracer/tracer.c",
+ ],
+ ),
+ ],
+ cmdclass={
+ 'build_ext': ve_build_ext,
+ },
+ ))
+
+
+def main():
+ """Actually invoke setup() with the arguments we built above."""
+ # For a variety of reasons, it might not be possible to install the C
+ # extension. Try it with, and if it fails, try it without.
+ try:
+ setup(**setup_args)
+ except BuildFailed as exc:
+ msg = "Couldn't install with extension module, trying without it..."
+ exc_msg = "%s: %s" % (exc.__class__.__name__, exc.cause)
+ print("**\n** %s\n** %s\n**" % (msg, exc_msg))
+
+ del setup_args['ext_modules']
+ setup(**setup_args)
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/python/coverage/tox.ini b/third_party/python/coverage/tox.ini
new file mode 100644
index 0000000000..57c4d4bca0
--- /dev/null
+++ b/third_party/python/coverage/tox.ini
@@ -0,0 +1,95 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+[tox]
+envlist = py{27,35,36,37,38,39}, pypy{2,3}, doc, lint
+skip_missing_interpreters = {env:COVERAGE_SKIP_MISSING_INTERPRETERS:True}
+toxworkdir = {env:TOXWORKDIR:.tox}
+
+[testenv]
+usedevelop = True
+extras =
+ toml
+
+deps =
+ # Check here for what might be out of date:
+ # https://requires.io/github/nedbat/coveragepy/requirements/
+ -r requirements/pytest.pip
+ pip==20.0.2
+ setuptools==41.4.0
+ # gevent 1.3 causes a failure: https://github.com/nedbat/coveragepy/issues/663
+ py{27,35,36}: gevent==1.2.2
+ py{27,35,36,37,38}: eventlet==0.25.1
+ py{27,35,36,37,38}: greenlet==0.4.15
+
+# Windows can't update the pip version with pip running, so use Python
+# to install things.
+install_command = python -m pip install -U {opts} {packages}
+
+passenv = *
+setenv =
+ pypy,pypy{2,3}: COVERAGE_NO_CTRACER=no C extension under PyPy
+ jython: COVERAGE_NO_CTRACER=no C extension under Jython
+ jython: PYTEST_ADDOPTS=-n 0
+
+commands =
+ python setup.py --quiet clean develop
+
+ # Create tests/zipmods.zip
+ # Install the egg1 egg
+ # Remove the C extension so that we can test the PyTracer
+ python igor.py zip_mods install_egg remove_extension
+
+ # Test with the PyTracer
+ python igor.py test_with_tracer py {posargs}
+
+ # Build the C extension and test with the CTracer
+ python setup.py --quiet build_ext --inplace
+ python igor.py test_with_tracer c {posargs}
+
+[testenv:py39]
+basepython = python3.9
+
+[testenv:anypy]
+# For running against my own builds of CPython, or any other specific Python.
+basepython = {env:COVERAGE_PYTHON}
+
+[testenv:doc]
+# Build the docs so we know if they are successful. We build twice: once with
+# -q to get all warnings, and once with -QW to get a success/fail status
+# return.
+deps =
+ -r doc/requirements.pip
+commands =
+ python doc/check_copied_from.py doc/*.rst
+ doc8 -q --ignore-path 'doc/_*' doc CHANGES.rst README.rst
+ sphinx-build -b html -aqE doc doc/_build/html
+ rst2html.py --strict README.rst doc/_build/trash
+ - sphinx-build -b html -b linkcheck -aEnq doc doc/_build/html
+ - sphinx-build -b html -b linkcheck -aEnQW doc doc/_build/html
+
+[testenv:lint]
+deps =
+ -r requirements/dev.pip
+ -r doc/requirements.pip
+
+setenv =
+ LINTABLE = coverage tests doc ci igor.py setup.py __main__.py
+
+commands =
+ python -m tabnanny {env:LINTABLE}
+ python igor.py check_eol
+ check-manifest --ignore 'lab*,perf*,doc/sample_html*,.treerc,.github*'
+ python setup.py -q sdist bdist_wheel
+ twine check dist/*
+ python -m pylint --notes= -j 4 {env:LINTABLE}
+
+[travis]
+#2.7: py27, lint
+python =
+ 2.7: py27
+ 3.5: py35
+ 3.6: py36
+ 3.7: py37
+ pypy: pypy
+ pypy3.5: pypy3
diff --git a/third_party/python/coverage/tox_wheels.ini b/third_party/python/coverage/tox_wheels.ini
new file mode 100644
index 0000000000..92a1ddf419
--- /dev/null
+++ b/third_party/python/coverage/tox_wheels.ini
@@ -0,0 +1,21 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+[tox]
+envlist = py{27,35,36,37,38,sys}
+toxworkdir = {toxinidir}/.tox/wheels
+
+[testenv]
+deps =
+ -rrequirements/wheel.pip
+
+commands =
+ python -c "import sys; print(sys.real_prefix)"
+ python setup.py bdist_wheel {posargs}
+
+[testenv:py27]
+basepython = python2.7
+
+[testenv:pysys]
+# For building with the Mac Framework Python.
+basepython = /usr/bin/python
diff --git a/third_party/python/cram/cram/__init__.py b/third_party/python/cram/cram/__init__.py
new file mode 100644
index 0000000000..4b626c4027
--- /dev/null
+++ b/third_party/python/cram/cram/__init__.py
@@ -0,0 +1,6 @@
+"""Functional testing framework for command line applications"""
+
+from cram._main import main
+from cram._test import test, testfile
+
+__all__ = ['main', 'test', 'testfile']
diff --git a/third_party/python/cram/cram/__main__.py b/third_party/python/cram/cram/__main__.py
new file mode 100644
index 0000000000..e6b0aef978
--- /dev/null
+++ b/third_party/python/cram/cram/__main__.py
@@ -0,0 +1,10 @@
+"""Main module (invoked by "python -m cram")"""
+
+import sys
+
+import cram
+
+try:
+ sys.exit(cram.main(sys.argv[1:]))
+except KeyboardInterrupt:
+ pass
diff --git a/third_party/python/cram/cram/_cli.py b/third_party/python/cram/cram/_cli.py
new file mode 100644
index 0000000000..8333b6b951
--- /dev/null
+++ b/third_party/python/cram/cram/_cli.py
@@ -0,0 +1,134 @@
+"""The command line interface implementation"""
+
+import os
+import sys
+
+from cram._encoding import b, bytestype, stdoutb
+from cram._process import execute
+
+__all__ = ['runcli']
+
+def _prompt(question, answers, auto=None):
+ """Write a prompt to stdout and ask for answer in stdin.
+
+ answers should be a string, with each character a single
+ answer. An uppercase letter is considered the default answer.
+
+ If an invalid answer is given, this asks again until it gets a
+ valid one.
+
+ If auto is set, the question is answered automatically with the
+ specified value.
+ """
+ default = [c for c in answers if c.isupper()]
+ while True:
+ sys.stdout.write('%s [%s] ' % (question, answers))
+ sys.stdout.flush()
+ if auto is not None:
+ sys.stdout.write(auto + '\n')
+ sys.stdout.flush()
+ return auto
+
+ answer = sys.stdin.readline().strip().lower()
+ if not answer and default:
+ return default[0]
+ elif answer and answer in answers.lower():
+ return answer
+
+def _log(msg=None, verbosemsg=None, verbose=False):
+ """Write msg to standard out and flush.
+
+ If verbose is True, write verbosemsg instead.
+ """
+ if verbose:
+ msg = verbosemsg
+ if msg:
+ if isinstance(msg, bytestype):
+ stdoutb.write(msg)
+ else: # pragma: nocover
+ sys.stdout.write(msg)
+ sys.stdout.flush()
+
+def _patch(cmd, diff):
+ """Run echo [lines from diff] | cmd -p0"""
+ out, retcode = execute([cmd, '-p0'], stdin=b('').join(diff))
+ return retcode == 0
+
+def runcli(tests, quiet=False, verbose=False, patchcmd=None, answer=None):
+ """Run tests with command line interface input/output.
+
+ tests should be a sequence of 2-tuples containing the following:
+
+ (test path, test function)
+
+ This function yields a new sequence where each test function is wrapped
+ with a function that handles CLI input/output.
+
+ If quiet is True, diffs aren't printed. If verbose is True,
+ filenames and status information are printed.
+
+ If patchcmd is set, a prompt is written to stdout asking if
+ changed output should be merged back into the original test. The
+ answer is read from stdin. If 'y', the test is patched using patch
+ based on the changed output.
+ """
+ total, skipped, failed = [0], [0], [0]
+
+ for path, test in tests:
+ def testwrapper():
+ """Test function that adds CLI output"""
+ total[0] += 1
+ _log(None, path + b(': '), verbose)
+
+ refout, postout, diff = test()
+ if refout is None:
+ skipped[0] += 1
+ _log('s', 'empty\n', verbose)
+ return refout, postout, diff
+
+ abspath = os.path.abspath(path)
+ errpath = abspath + b('.err')
+
+ if postout is None:
+ skipped[0] += 1
+ _log('s', 'skipped\n', verbose)
+ elif not diff:
+ _log('.', 'passed\n', verbose)
+ if os.path.exists(errpath):
+ os.remove(errpath)
+ else:
+ failed[0] += 1
+ _log('!', 'failed\n', verbose)
+ if not quiet:
+ _log('\n', None, verbose)
+
+ errfile = open(errpath, 'wb')
+ try:
+ for line in postout:
+ errfile.write(line)
+ finally:
+ errfile.close()
+
+ if not quiet:
+ origdiff = diff
+ diff = []
+ for line in origdiff:
+ stdoutb.write(line)
+ diff.append(line)
+
+ if (patchcmd and
+ _prompt('Accept this change?', 'yN', answer) == 'y'):
+ if _patch(patchcmd, diff):
+ _log(None, path + b(': merged output\n'), verbose)
+ os.remove(errpath)
+ else:
+ _log(path + b(': merge failed\n'))
+
+ return refout, postout, diff
+
+ yield (path, testwrapper)
+
+ if total[0] > 0:
+ _log('\n', None, verbose)
+ _log('# Ran %s tests, %s skipped, %s failed.\n'
+ % (total[0], skipped[0], failed[0]))
diff --git a/third_party/python/cram/cram/_diff.py b/third_party/python/cram/cram/_diff.py
new file mode 100644
index 0000000000..4877305082
--- /dev/null
+++ b/third_party/python/cram/cram/_diff.py
@@ -0,0 +1,158 @@
+"""Utilities for diffing test files and their output"""
+
+import codecs
+import difflib
+import re
+
+from cram._encoding import b
+
+__all__ = ['esc', 'glob', 'regex', 'unified_diff']
+
+def _regex(pattern, s):
+ """Match a regular expression or return False if invalid.
+
+ >>> from cram._encoding import b
+ >>> [bool(_regex(r, b('foobar'))) for r in (b('foo.*'), b('***'))]
+ [True, False]
+ """
+ try:
+ return re.match(pattern + b(r'\Z'), s)
+ except re.error:
+ return False
+
+def _glob(el, l):
+ r"""Match a glob-like pattern.
+
+ The only supported special characters are * and ?. Escaping is
+ supported.
+
+ >>> from cram._encoding import b
+ >>> bool(_glob(b(r'\* \\ \? fo?b*'), b('* \\ ? foobar')))
+ True
+ """
+ i, n = 0, len(el)
+ res = b('')
+ while i < n:
+ c = el[i:i + 1]
+ i += 1
+ if c == b('\\') and el[i] in b('*?\\'):
+ res += el[i - 1:i + 1]
+ i += 1
+ elif c == b('*'):
+ res += b('.*')
+ elif c == b('?'):
+ res += b('.')
+ else:
+ res += re.escape(c)
+ return _regex(res, l)
+
+def _matchannotation(keyword, matchfunc, el, l):
+ """Apply match function based on annotation keyword"""
+ ann = b(' (%s)\n' % keyword)
+ return el.endswith(ann) and matchfunc(el[:-len(ann)], l[:-1])
+
+def regex(el, l):
+ """Apply a regular expression match to a line annotated with '(re)'"""
+ return _matchannotation('re', _regex, el, l)
+
+def glob(el, l):
+ """Apply a glob match to a line annotated with '(glob)'"""
+ return _matchannotation('glob', _glob, el, l)
+
+def esc(el, l):
+ """Apply an escape match to a line annotated with '(esc)'"""
+ ann = b(' (esc)\n')
+
+ if el.endswith(ann):
+ el = codecs.escape_decode(el[:-len(ann)])[0] + b('\n')
+ if el == l:
+ return True
+
+ if l.endswith(ann):
+ l = codecs.escape_decode(l[:-len(ann)])[0] + b('\n')
+ return el == l
+
+class _SequenceMatcher(difflib.SequenceMatcher, object):
+ """Like difflib.SequenceMatcher, but supports custom match functions"""
+ def __init__(self, *args, **kwargs):
+ self._matchers = kwargs.pop('matchers', [])
+ super(_SequenceMatcher, self).__init__(*args, **kwargs)
+
+ def _match(self, el, l):
+ """Tests for matching lines using custom matchers"""
+ for matcher in self._matchers:
+ if matcher(el, l):
+ return True
+ return False
+
+ def find_longest_match(self, alo, ahi, blo, bhi):
+ """Find longest matching block in a[alo:ahi] and b[blo:bhi]"""
+ # SequenceMatcher uses find_longest_match() to slowly whittle down
+ # the differences between a and b until it has each matching block.
+ # Because of this, we can end up doing the same matches many times.
+ matches = []
+ for n, (el, line) in enumerate(zip(self.a[alo:ahi], self.b[blo:bhi])):
+ if el != line and self._match(el, line):
+ # This fools the superclass's method into thinking that the
+ # regex/glob in a is identical to b by replacing a's line (the
+ # expected output) with b's line (the actual output).
+ self.a[alo + n] = line
+ matches.append((n, el))
+ ret = super(_SequenceMatcher, self).find_longest_match(alo, ahi,
+ blo, bhi)
+ # Restore the lines replaced above. Otherwise, the diff output
+ # would seem to imply that the tests never had any regexes/globs.
+ for n, el in matches:
+ self.a[alo + n] = el
+ return ret
+
+def unified_diff(l1, l2, fromfile=b(''), tofile=b(''), fromfiledate=b(''),
+ tofiledate=b(''), n=3, lineterm=b('\n'), matchers=None):
+ r"""Compare two sequences of lines; generate the delta as a unified diff.
+
+ This is like difflib.unified_diff(), but allows custom matchers.
+
+ >>> from cram._encoding import b
+ >>> l1 = [b('a\n'), b('? (glob)\n')]
+ >>> l2 = [b('a\n'), b('b\n')]
+ >>> (list(unified_diff(l1, l2, b('f1'), b('f2'), b('1970-01-01'),
+ ... b('1970-01-02'))) ==
+ ... [b('--- f1\t1970-01-01\n'), b('+++ f2\t1970-01-02\n'),
+ ... b('@@ -1,2 +1,2 @@\n'), b(' a\n'), b('-? (glob)\n'), b('+b\n')])
+ True
+
+ >>> from cram._diff import glob
+ >>> list(unified_diff(l1, l2, matchers=[glob]))
+ []
+ """
+ if matchers is None:
+ matchers = []
+ started = False
+ matcher = _SequenceMatcher(None, l1, l2, matchers=matchers)
+ for group in matcher.get_grouped_opcodes(n):
+ if not started:
+ if fromfiledate:
+ fromdate = b('\t') + fromfiledate
+ else:
+ fromdate = b('')
+ if tofiledate:
+ todate = b('\t') + tofiledate
+ else:
+ todate = b('')
+ yield b('--- ') + fromfile + fromdate + lineterm
+ yield b('+++ ') + tofile + todate + lineterm
+ started = True
+ i1, i2, j1, j2 = group[0][1], group[-1][2], group[0][3], group[-1][4]
+ yield (b("@@ -%d,%d +%d,%d @@" % (i1 + 1, i2 - i1, j1 + 1, j2 - j1)) +
+ lineterm)
+ for tag, i1, i2, j1, j2 in group:
+ if tag == 'equal':
+ for line in l1[i1:i2]:
+ yield b(' ') + line
+ continue
+ if tag == 'replace' or tag == 'delete':
+ for line in l1[i1:i2]:
+ yield b('-') + line
+ if tag == 'replace' or tag == 'insert':
+ for line in l2[j1:j2]:
+ yield b('+') + line
diff --git a/third_party/python/cram/cram/_encoding.py b/third_party/python/cram/cram/_encoding.py
new file mode 100644
index 0000000000..d639ccee19
--- /dev/null
+++ b/third_party/python/cram/cram/_encoding.py
@@ -0,0 +1,106 @@
+"""Encoding utilities"""
+
+import os
+import sys
+
+try:
+ import builtins
+except ImportError:
+ import __builtin__ as builtins
+
+__all__ = ['b', 'bchr', 'bytestype', 'envencode', 'fsdecode', 'fsencode',
+ 'stdoutb', 'stderrb', 'u', 'ul', 'unicodetype']
+
+bytestype = getattr(builtins, 'bytes', str)
+unicodetype = getattr(builtins, 'unicode', str)
+
+if getattr(os, 'fsdecode', None) is not None:
+ fsdecode = os.fsdecode
+ fsencode = os.fsencode
+elif bytestype is not str:
+ if sys.platform == 'win32':
+ def fsdecode(s):
+ """Decode a filename from the filesystem encoding"""
+ if isinstance(s, unicodetype):
+ return s
+ encoding = sys.getfilesystemencoding()
+ if encoding == 'mbcs':
+ return s.decode(encoding)
+ else:
+ return s.decode(encoding, 'surrogateescape')
+
+ def fsencode(s):
+ """Encode a filename to the filesystem encoding"""
+ if isinstance(s, bytestype):
+ return s
+ encoding = sys.getfilesystemencoding()
+ if encoding == 'mbcs':
+ return s.encode(encoding)
+ else:
+ return s.encode(encoding, 'surrogateescape')
+ else:
+ def fsdecode(s):
+ """Decode a filename from the filesystem encoding"""
+ if isinstance(s, unicodetype):
+ return s
+ return s.decode(sys.getfilesystemencoding(), 'surrogateescape')
+
+ def fsencode(s):
+ """Encode a filename to the filesystem encoding"""
+ if isinstance(s, bytestype):
+ return s
+ return s.encode(sys.getfilesystemencoding(), 'surrogateescape')
+else:
+ def fsdecode(s):
+ """Decode a filename from the filesystem encoding"""
+ return s
+
+ def fsencode(s):
+ """Encode a filename to the filesystem encoding"""
+ return s
+
+if bytestype is str:
+ def envencode(s):
+ """Encode a byte string to the os.environ encoding"""
+ return s
+else:
+ envencode = fsdecode
+
+if getattr(sys.stdout, 'buffer', None) is not None:
+ stdoutb = sys.stdout.buffer
+ stderrb = sys.stderr.buffer
+else:
+ stdoutb = sys.stdout
+ stderrb = sys.stderr
+
+if bytestype is str:
+ def b(s):
+ """Convert an ASCII string literal into a bytes object"""
+ return s
+
+ bchr = chr
+
+ def u(s):
+ """Convert an ASCII string literal into a unicode object"""
+ return s.decode('ascii')
+else:
+ def b(s):
+ """Convert an ASCII string literal into a bytes object"""
+ return s.encode('ascii')
+
+ def bchr(i):
+ """Return a bytes character for a given integer value"""
+ return bytestype([i])
+
+ def u(s):
+ """Convert an ASCII string literal into a unicode object"""
+ return s
+
+try:
+ eval(r'u""')
+except SyntaxError:
+ ul = eval
+else:
+ def ul(e):
+ """Evaluate e as a unicode string literal"""
+ return eval('u' + e)
diff --git a/third_party/python/cram/cram/_main.py b/third_party/python/cram/cram/_main.py
new file mode 100644
index 0000000000..11d457bb16
--- /dev/null
+++ b/third_party/python/cram/cram/_main.py
@@ -0,0 +1,211 @@
+"""Main entry point"""
+
+import optparse
+import os
+import shlex
+import shutil
+import sys
+import tempfile
+
+try:
+ import configparser
+except ImportError: # pragma: nocover
+ import ConfigParser as configparser
+
+from cram._cli import runcli
+from cram._encoding import b, fsencode, stderrb, stdoutb
+from cram._run import runtests
+from cram._xunit import runxunit
+
+def _which(cmd):
+ """Return the path to cmd or None if not found"""
+ cmd = fsencode(cmd)
+ for p in os.environ['PATH'].split(os.pathsep):
+ path = os.path.join(fsencode(p), cmd)
+ if os.path.isfile(path) and os.access(path, os.X_OK):
+ return os.path.abspath(path)
+ return None
+
+def _expandpath(path):
+ """Expands ~ and environment variables in path"""
+ return os.path.expanduser(os.path.expandvars(path))
+
+class _OptionParser(optparse.OptionParser):
+ """Like optparse.OptionParser, but supports setting values through
+ CRAM= and .cramrc."""
+
+ def __init__(self, *args, **kwargs):
+ self._config_opts = {}
+ optparse.OptionParser.__init__(self, *args, **kwargs)
+
+ def add_option(self, *args, **kwargs):
+ option = optparse.OptionParser.add_option(self, *args, **kwargs)
+ if option.dest and option.dest != 'version':
+ key = option.dest.replace('_', '-')
+ self._config_opts[key] = option.action == 'store_true'
+ return option
+
+ def parse_args(self, args=None, values=None):
+ config = configparser.RawConfigParser()
+ config.read(_expandpath(os.environ.get('CRAMRC', '.cramrc')))
+ defaults = {}
+ for key, isbool in self._config_opts.items():
+ try:
+ if isbool:
+ try:
+ value = config.getboolean('cram', key)
+ except ValueError:
+ value = config.get('cram', key)
+ self.error('--%s: invalid boolean value: %r'
+ % (key, value))
+ else:
+ value = config.get('cram', key)
+ except (configparser.NoSectionError, configparser.NoOptionError):
+ pass
+ else:
+ defaults[key] = value
+ self.set_defaults(**defaults)
+
+ eargs = os.environ.get('CRAM', '').strip()
+ if eargs:
+ args = args or []
+ args += shlex.split(eargs)
+
+ try:
+ return optparse.OptionParser.parse_args(self, args, values)
+ except optparse.OptionValueError:
+ self.error(str(sys.exc_info()[1]))
+
+def _parseopts(args):
+ """Parse command line arguments"""
+ p = _OptionParser(usage='cram [OPTIONS] TESTS...', prog='cram')
+ p.add_option('-V', '--version', action='store_true',
+ help='show version information and exit')
+ p.add_option('-q', '--quiet', action='store_true',
+ help="don't print diffs")
+ p.add_option('-v', '--verbose', action='store_true',
+ help='show filenames and test status')
+ p.add_option('-i', '--interactive', action='store_true',
+ help='interactively merge changed test output')
+ p.add_option('-d', '--debug', action='store_true',
+ help='write script output directly to the terminal')
+ p.add_option('-y', '--yes', action='store_true',
+ help='answer yes to all questions')
+ p.add_option('-n', '--no', action='store_true',
+ help='answer no to all questions')
+ p.add_option('-E', '--preserve-env', action='store_true',
+ help="don't reset common environment variables")
+ p.add_option('--keep-tmpdir', action='store_true',
+ help='keep temporary directories')
+ p.add_option('--shell', action='store', default='/bin/sh', metavar='PATH',
+ help='shell to use for running tests (default: %default)')
+ p.add_option('--shell-opts', action='store', metavar='OPTS',
+ help='arguments to invoke shell with')
+ p.add_option('--indent', action='store', default=2, metavar='NUM',
+ type='int', help=('number of spaces to use for indentation '
+ '(default: %default)'))
+ p.add_option('--xunit-file', action='store', metavar='PATH',
+ help='path to write xUnit XML output')
+ opts, paths = p.parse_args(args)
+ paths = [fsencode(path) for path in paths]
+ return opts, paths, p.get_usage
+
+def main(args):
+ """Main entry point.
+
+ If you're thinking of using Cram in other Python code (e.g., unit tests),
+ consider using the test() or testfile() functions instead.
+
+ :param args: Script arguments (excluding script name)
+ :type args: str
+ :return: Exit code (non-zero on failure)
+ :rtype: int
+ """
+ opts, paths, getusage = _parseopts(args)
+ if opts.version:
+ sys.stdout.write("""Cram CLI testing framework (version 0.7)
+
+Copyright (C) 2010-2016 Brodie Rao <brodie@bitheap.org> and others
+This is free software; see the source for copying conditions. There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+""")
+ return
+
+ conflicts = [('--yes', opts.yes, '--no', opts.no),
+ ('--quiet', opts.quiet, '--interactive', opts.interactive),
+ ('--debug', opts.debug, '--quiet', opts.quiet),
+ ('--debug', opts.debug, '--interactive', opts.interactive),
+ ('--debug', opts.debug, '--verbose', opts.verbose),
+ ('--debug', opts.debug, '--xunit-file', opts.xunit_file)]
+ for s1, o1, s2, o2 in conflicts:
+ if o1 and o2:
+ sys.stderr.write('options %s and %s are mutually exclusive\n'
+ % (s1, s2))
+ return 2
+
+ shellcmd = _which(opts.shell)
+ if not shellcmd:
+ stderrb.write(b('shell not found: ') + fsencode(opts.shell) + b('\n'))
+ return 2
+ shell = [shellcmd]
+ if opts.shell_opts:
+ shell += shlex.split(opts.shell_opts)
+
+ patchcmd = None
+ if opts.interactive:
+ patchcmd = _which('patch')
+ if not patchcmd:
+ sys.stderr.write('patch(1) required for -i\n')
+ return 2
+
+ if not paths:
+ sys.stdout.write(getusage())
+ return 2
+
+ badpaths = [path for path in paths if not os.path.exists(path)]
+ if badpaths:
+ stderrb.write(b('no such file: ') + badpaths[0] + b('\n'))
+ return 2
+
+ if opts.yes:
+ answer = 'y'
+ elif opts.no:
+ answer = 'n'
+ else:
+ answer = None
+
+ tmpdir = os.environ['CRAMTMP'] = tempfile.mkdtemp('', 'cramtests-')
+ tmpdirb = fsencode(tmpdir)
+ proctmp = os.path.join(tmpdir, 'tmp')
+ for s in ('TMPDIR', 'TEMP', 'TMP'):
+ os.environ[s] = proctmp
+
+ os.mkdir(proctmp)
+ try:
+ tests = runtests(paths, tmpdirb, shell, indent=opts.indent,
+ cleanenv=not opts.preserve_env, debug=opts.debug)
+ if not opts.debug:
+ tests = runcli(tests, quiet=opts.quiet, verbose=opts.verbose,
+ patchcmd=patchcmd, answer=answer)
+ if opts.xunit_file is not None:
+ tests = runxunit(tests, opts.xunit_file)
+
+ hastests = False
+ failed = False
+ for path, test in tests:
+ hastests = True
+ refout, postout, diff = test()
+ if diff:
+ failed = True
+
+ if not hastests:
+ sys.stderr.write('no tests found\n')
+ return 2
+
+ return int(failed)
+ finally:
+ if opts.keep_tmpdir:
+ stdoutb.write(b('# Kept temporary directory: ') + tmpdirb +
+ b('\n'))
+ else:
+ shutil.rmtree(tmpdir)
diff --git a/third_party/python/cram/cram/_process.py b/third_party/python/cram/cram/_process.py
new file mode 100644
index 0000000000..decdfbc3a7
--- /dev/null
+++ b/third_party/python/cram/cram/_process.py
@@ -0,0 +1,54 @@
+"""Utilities for running subprocesses"""
+
+import os
+import signal
+import subprocess
+import sys
+
+from cram._encoding import fsdecode
+
+__all__ = ['PIPE', 'STDOUT', 'execute']
+
+PIPE = subprocess.PIPE
+STDOUT = subprocess.STDOUT
+
+def _makeresetsigpipe():
+ """Make a function to reset SIGPIPE to SIG_DFL (for use in subprocesses).
+
+ Doing subprocess.Popen(..., preexec_fn=makeresetsigpipe()) will prevent
+ Python's SIGPIPE handler (SIG_IGN) from being inherited by the
+ child process.
+ """
+ if (sys.platform == 'win32' or
+ getattr(signal, 'SIGPIPE', None) is None): # pragma: nocover
+ return None
+ return lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+
+def execute(args, stdin=None, stdout=None, stderr=None, cwd=None, env=None):
+ """Run a process and return its output and return code.
+
+ stdin may either be None or a string to send to the process.
+
+ stdout may either be None or PIPE. If set to PIPE, the process's output
+ is returned as a string.
+
+ stderr may either be None or STDOUT. If stdout is set to PIPE and stderr
+ is set to STDOUT, the process's stderr output will be interleaved with
+ stdout and returned as a string.
+
+ cwd sets the process's current working directory.
+
+ env can be set to a dictionary to override the process's environment
+ variables.
+
+ This function returns a 2-tuple of (output, returncode).
+ """
+ if sys.platform == 'win32': # pragma: nocover
+ args = [fsdecode(arg) for arg in args]
+
+ p = subprocess.Popen(args, stdin=PIPE, stdout=stdout, stderr=stderr,
+ cwd=cwd, env=env, bufsize=-1,
+ preexec_fn=_makeresetsigpipe(),
+ close_fds=os.name == 'posix')
+ out, err = p.communicate(stdin)
+ return out, p.returncode
diff --git a/third_party/python/cram/cram/_run.py b/third_party/python/cram/cram/_run.py
new file mode 100644
index 0000000000..9111c0f686
--- /dev/null
+++ b/third_party/python/cram/cram/_run.py
@@ -0,0 +1,77 @@
+"""The test runner"""
+
+import os
+import sys
+
+from cram._encoding import b, fsdecode, fsencode
+from cram._test import testfile
+
+__all__ = ['runtests']
+
+if sys.platform == 'win32': # pragma: nocover
+ def _walk(top):
+ top = fsdecode(top)
+ for root, dirs, files in os.walk(top):
+ yield (fsencode(root),
+ [fsencode(p) for p in dirs],
+ [fsencode(p) for p in files])
+else:
+ _walk = os.walk
+
+def _findtests(paths):
+ """Yield tests in paths in sorted order"""
+ for p in paths:
+ if os.path.isdir(p):
+ for root, dirs, files in _walk(p):
+ if os.path.basename(root).startswith(b('.')):
+ continue
+ for f in sorted(files):
+ if not f.startswith(b('.')) and f.endswith(b('.t')):
+ yield os.path.normpath(os.path.join(root, f))
+ else:
+ yield os.path.normpath(p)
+
+def runtests(paths, tmpdir, shell, indent=2, cleanenv=True, debug=False):
+ """Run tests and yield results.
+
+ This yields a sequence of 2-tuples containing the following:
+
+ (test path, test function)
+
+ The test function, when called, runs the test in a temporary directory
+ and returns a 3-tuple:
+
+ (list of lines in the test, same list with actual output, diff)
+ """
+ cwd = os.getcwd()
+ seen = set()
+ basenames = set()
+ for i, path in enumerate(_findtests(paths)):
+ abspath = os.path.abspath(path)
+ if abspath in seen:
+ continue
+ seen.add(abspath)
+
+ if not os.stat(path).st_size:
+ yield (path, lambda: (None, None, None))
+ continue
+
+ basename = os.path.basename(path)
+ if basename in basenames:
+ basename = basename + b('-%s' % i)
+ else:
+ basenames.add(basename)
+
+ def test():
+ """Run test file"""
+ testdir = os.path.join(tmpdir, basename)
+ os.mkdir(testdir)
+ try:
+ os.chdir(testdir)
+ return testfile(abspath, shell, indent=indent,
+ cleanenv=cleanenv, debug=debug,
+ testname=path)
+ finally:
+ os.chdir(cwd)
+
+ yield (path, test)
diff --git a/third_party/python/cram/cram/_test.py b/third_party/python/cram/cram/_test.py
new file mode 100644
index 0000000000..27ef99c597
--- /dev/null
+++ b/third_party/python/cram/cram/_test.py
@@ -0,0 +1,230 @@
+"""Utilities for running individual tests"""
+
+import itertools
+import os
+import re
+import time
+
+from cram._encoding import b, bchr, bytestype, envencode, unicodetype
+from cram._diff import esc, glob, regex, unified_diff
+from cram._process import PIPE, STDOUT, execute
+
+__all__ = ['test', 'testfile']
+
+_needescape = re.compile(b(r'[\x00-\x09\x0b-\x1f\x7f-\xff]')).search
+_escapesub = re.compile(b(r'[\x00-\x09\x0b-\x1f\\\x7f-\xff]')).sub
+_escapemap = dict((bchr(i), b(r'\x%02x' % i)) for i in range(256))
+_escapemap.update({b('\\'): b('\\\\'), b('\r'): b(r'\r'), b('\t'): b(r'\t')})
+
+def _escape(s):
+ """Like the string-escape codec, but doesn't escape quotes"""
+ return (_escapesub(lambda m: _escapemap[m.group(0)], s[:-1]) +
+ b(' (esc)\n'))
+
+def test(lines, shell='/bin/sh', indent=2, testname=None, env=None,
+ cleanenv=True, debug=False):
+ r"""Run test lines and return input, output, and diff.
+
+ This returns a 3-tuple containing the following:
+
+ (list of lines in test, same list with actual output, diff)
+
+ diff is a generator that yields the diff between the two lists.
+
+ If a test exits with return code 80, the actual output is set to
+ None and diff is set to [].
+
+ Note that the TESTSHELL environment variable is available in the
+ test (set to the specified shell). However, the TESTDIR and
+ TESTFILE environment variables are not available. To run actual
+ test files, see testfile().
+
+ Example usage:
+
+ >>> from cram._encoding import b
+ >>> refout, postout, diff = test([b(' $ echo hi\n'),
+ ... b(' [a-z]{2} (re)\n')])
+ >>> refout == [b(' $ echo hi\n'), b(' [a-z]{2} (re)\n')]
+ True
+ >>> postout == [b(' $ echo hi\n'), b(' hi\n')]
+ True
+ >>> bool(diff)
+ False
+
+ lines may also be a single bytes string:
+
+ >>> refout, postout, diff = test(b(' $ echo hi\n bye\n'))
+ >>> refout == [b(' $ echo hi\n'), b(' bye\n')]
+ True
+ >>> postout == [b(' $ echo hi\n'), b(' hi\n')]
+ True
+ >>> bool(diff)
+ True
+ >>> (b('').join(diff) ==
+ ... b('--- \n+++ \n@@ -1,2 +1,2 @@\n $ echo hi\n- bye\n+ hi\n'))
+ True
+
+ Note that the b() function is internal to Cram. If you're using Python 2,
+ use normal string literals instead. If you're using Python 3, use bytes
+ literals.
+
+ :param lines: Test input
+ :type lines: bytes or collections.Iterable[bytes]
+ :param shell: Shell to run test in
+ :type shell: bytes or str or list[bytes] or list[str]
+ :param indent: Amount of indentation to use for shell commands
+ :type indent: int
+ :param testname: Optional test file name (used in diff output)
+ :type testname: bytes or None
+ :param env: Optional environment variables for the test shell
+ :type env: dict or None
+ :param cleanenv: Whether or not to sanitize the environment
+ :type cleanenv: bool
+ :param debug: Whether or not to run in debug mode (don't capture stdout)
+ :type debug: bool
+ :return: Input, output, and diff iterables
+ :rtype: (list[bytes], list[bytes], collections.Iterable[bytes])
+ """
+ indent = b(' ') * indent
+ cmdline = indent + b('$ ')
+ conline = indent + b('> ')
+ usalt = 'CRAM%s' % time.time()
+ salt = b(usalt)
+
+ if env is None:
+ env = os.environ.copy()
+
+ if cleanenv:
+ for s in ('LANG', 'LC_ALL', 'LANGUAGE'):
+ env[s] = 'C'
+ env['TZ'] = 'GMT'
+ env['CDPATH'] = ''
+ env['COLUMNS'] = '80'
+ env['GREP_OPTIONS'] = ''
+
+ if isinstance(lines, bytestype):
+ lines = lines.splitlines(True)
+
+ if isinstance(shell, (bytestype, unicodetype)):
+ shell = [shell]
+ env['TESTSHELL'] = shell[0]
+
+ if debug:
+ stdin = []
+ for line in lines:
+ if not line.endswith(b('\n')):
+ line += b('\n')
+ if line.startswith(cmdline):
+ stdin.append(line[len(cmdline):])
+ elif line.startswith(conline):
+ stdin.append(line[len(conline):])
+
+ execute(shell + ['-'], stdin=b('').join(stdin), env=env)
+ return ([], [], [])
+
+ after = {}
+ refout, postout = [], []
+ i = pos = prepos = -1
+ stdin = []
+ for i, line in enumerate(lines):
+ if not line.endswith(b('\n')):
+ line += b('\n')
+ refout.append(line)
+ if line.startswith(cmdline):
+ after.setdefault(pos, []).append(line)
+ prepos = pos
+ pos = i
+ stdin.append(b('echo %s %s $?\n' % (usalt, i)))
+ stdin.append(line[len(cmdline):])
+ elif line.startswith(conline):
+ after.setdefault(prepos, []).append(line)
+ stdin.append(line[len(conline):])
+ elif not line.startswith(indent):
+ after.setdefault(pos, []).append(line)
+ stdin.append(b('echo %s %s $?\n' % (usalt, i + 1)))
+
+ output, retcode = execute(shell + ['-'], stdin=b('').join(stdin),
+ stdout=PIPE, stderr=STDOUT, env=env)
+ if retcode == 80:
+ return (refout, None, [])
+
+ pos = -1
+ ret = 0
+ for i, line in enumerate(output[:-1].splitlines(True)):
+ out, cmd = line, None
+ if salt in line:
+ out, cmd = line.split(salt, 1)
+
+ if out:
+ if not out.endswith(b('\n')):
+ out += b(' (no-eol)\n')
+
+ if _needescape(out):
+ out = _escape(out)
+ postout.append(indent + out)
+
+ if cmd:
+ ret = int(cmd.split()[1])
+ if ret != 0:
+ postout.append(indent + b('[%s]\n' % (ret)))
+ postout += after.pop(pos, [])
+ pos = int(cmd.split()[0])
+
+ postout += after.pop(pos, [])
+
+ if testname:
+ diffpath = testname
+ errpath = diffpath + b('.err')
+ else:
+ diffpath = errpath = b('')
+ diff = unified_diff(refout, postout, diffpath, errpath,
+ matchers=[esc, glob, regex])
+ for firstline in diff:
+ return refout, postout, itertools.chain([firstline], diff)
+ return refout, postout, []
+
+def testfile(path, shell='/bin/sh', indent=2, env=None, cleanenv=True,
+ debug=False, testname=None):
+ """Run test at path and return input, output, and diff.
+
+ This returns a 3-tuple containing the following:
+
+ (list of lines in test, same list with actual output, diff)
+
+ diff is a generator that yields the diff between the two lists.
+
+ If a test exits with return code 80, the actual output is set to
+ None and diff is set to [].
+
+ Note that the TESTDIR, TESTFILE, and TESTSHELL environment
+ variables are available to use in the test.
+
+ :param path: Path to test file
+ :type path: bytes or str
+ :param shell: Shell to run test in
+ :type shell: bytes or str or list[bytes] or list[str]
+ :param indent: Amount of indentation to use for shell commands
+ :type indent: int
+ :param env: Optional environment variables for the test shell
+ :type env: dict or None
+ :param cleanenv: Whether or not to sanitize the environment
+ :type cleanenv: bool
+ :param debug: Whether or not to run in debug mode (don't capture stdout)
+ :type debug: bool
+ :param testname: Optional test file name (used in diff output)
+ :type testname: bytes or None
+ :return: Input, output, and diff iterables
+ :rtype: (list[bytes], list[bytes], collections.Iterable[bytes])
+ """
+ f = open(path, 'rb')
+ try:
+ abspath = os.path.abspath(path)
+ env = env or os.environ.copy()
+ env['TESTDIR'] = envencode(os.path.dirname(abspath))
+ env['TESTFILE'] = envencode(os.path.basename(abspath))
+ if testname is None: # pragma: nocover
+ testname = os.path.basename(abspath)
+ return test(f, shell, indent=indent, testname=testname, env=env,
+ cleanenv=cleanenv, debug=debug)
+ finally:
+ f.close()
diff --git a/third_party/python/cram/cram/_xunit.py b/third_party/python/cram/cram/_xunit.py
new file mode 100644
index 0000000000..0b3cb49cfc
--- /dev/null
+++ b/third_party/python/cram/cram/_xunit.py
@@ -0,0 +1,173 @@
+"""xUnit XML output"""
+
+import locale
+import os
+import re
+import socket
+import sys
+import time
+
+from cram._encoding import u, ul
+
+__all__ = ['runxunit']
+
+_widecdataregex = ul(r"'(?:[^\x09\x0a\x0d\x20-\ud7ff\ue000-\ufffd"
+ r"\U00010000-\U0010ffff]|]]>)'")
+_narrowcdataregex = ul(r"'(?:[^\x09\x0a\x0d\x20-\ud7ff\ue000-\ufffd]"
+ r"|]]>)'")
+_widequoteattrregex = ul(r"'[^\x20\x21\x23-\x25\x27-\x3b\x3d"
+ r"\x3f-\ud7ff\ue000-\ufffd"
+ r"\U00010000-\U0010ffff]'")
+_narrowquoteattrregex = ul(r"'[^\x20\x21\x23-\x25\x27-\x3b\x3d"
+ r"\x3f-\ud7ff\ue000-\ufffd]'")
+_replacementchar = ul(r"'\N{REPLACEMENT CHARACTER}'")
+
+if sys.maxunicode >= 0x10ffff: # pragma: nocover
+ _cdatasub = re.compile(_widecdataregex).sub
+ _quoteattrsub = re.compile(_widequoteattrregex).sub
+else: # pragma: nocover
+ _cdatasub = re.compile(_narrowcdataregex).sub
+ _quoteattrsub = re.compile(_narrowquoteattrregex).sub
+
+def _cdatareplace(m):
+ """Replace _cdatasub() regex match"""
+ if m.group(0) == u(']]>'):
+ return u(']]>]]&gt;<![CDATA[')
+ else:
+ return _replacementchar
+
+def _cdata(s):
+ r"""Escape a string as an XML CDATA block.
+
+ >>> from cram._encoding import ul
+ >>> (_cdata('1<\'2\'>&"3\x00]]>\t\r\n') ==
+ ... ul(r"'<![CDATA[1<\'2\'>&\"3\ufffd]]>]]&gt;<![CDATA[\t\r\n]]>'"))
+ True
+ """
+ return u('<![CDATA[%s]]>') % _cdatasub(_cdatareplace, s)
+
+def _quoteattrreplace(m):
+ """Replace _quoteattrsub() regex match"""
+ return {u('\t'): u('&#9;'),
+ u('\n'): u('&#10;'),
+ u('\r'): u('&#13;'),
+ u('"'): u('&quot;'),
+ u('&'): u('&amp;'),
+ u('<'): u('&lt;'),
+ u('>'): u('&gt;')}.get(m.group(0), _replacementchar)
+
+def _quoteattr(s):
+ r"""Escape a string for use as an XML attribute value.
+
+ >>> from cram._encoding import ul
+ >>> (_quoteattr('1<\'2\'>&"3\x00]]>\t\r\n') ==
+ ... ul(r"'\"1&lt;\'2\'&gt;&amp;&quot;3\ufffd]]&gt;&#9;&#13;&#10;\"'"))
+ True
+ """
+ return u('"%s"') % _quoteattrsub(_quoteattrreplace, s)
+
+def _timestamp():
+ """Return the current time in ISO 8601 format"""
+ tm = time.localtime()
+ if tm.tm_isdst == 1: # pragma: nocover
+ tz = time.altzone
+ else: # pragma: nocover
+ tz = time.timezone
+
+ timestamp = time.strftime('%Y-%m-%dT%H:%M:%S', tm)
+ tzhours = int(-tz / 60 / 60)
+ tzmins = int(abs(tz) / 60 % 60)
+ timestamp += u('%+03d:%02d') % (tzhours, tzmins)
+ return timestamp
+
+def runxunit(tests, xmlpath):
+ """Run tests with xUnit XML output.
+
+ tests should be a sequence of 2-tuples containing the following:
+
+ (test path, test function)
+
+ This function yields a new sequence where each test function is wrapped
+ with a function that writes test results to an xUnit XML file.
+ """
+ suitestart = time.time()
+ timestamp = _timestamp()
+ hostname = socket.gethostname()
+ total, skipped, failed = [0], [0], [0]
+ testcases = []
+
+ for path, test in tests:
+ def testwrapper():
+ """Run test and collect XML output"""
+ total[0] += 1
+
+ start = time.time()
+ refout, postout, diff = test()
+ testtime = time.time() - start
+
+ classname = path.decode(locale.getpreferredencoding(), 'replace')
+ name = os.path.basename(classname)
+
+ if postout is None:
+ skipped[0] += 1
+ testcase = (u(' <testcase classname=%(classname)s\n'
+ ' name=%(name)s\n'
+ ' time="%(time).6f">\n'
+ ' <skipped/>\n'
+ ' </testcase>\n') %
+ {'classname': _quoteattr(classname),
+ 'name': _quoteattr(name),
+ 'time': testtime})
+ elif diff:
+ failed[0] += 1
+ diff = list(diff)
+ diffu = u('').join(l.decode(locale.getpreferredencoding(),
+ 'replace')
+ for l in diff)
+ testcase = (u(' <testcase classname=%(classname)s\n'
+ ' name=%(name)s\n'
+ ' time="%(time).6f">\n'
+ ' <failure>%(diff)s</failure>\n'
+ ' </testcase>\n') %
+ {'classname': _quoteattr(classname),
+ 'name': _quoteattr(name),
+ 'time': testtime,
+ 'diff': _cdata(diffu)})
+ else:
+ testcase = (u(' <testcase classname=%(classname)s\n'
+ ' name=%(name)s\n'
+ ' time="%(time).6f"/>\n') %
+ {'classname': _quoteattr(classname),
+ 'name': _quoteattr(name),
+ 'time': testtime})
+ testcases.append(testcase)
+
+ return refout, postout, diff
+
+ yield path, testwrapper
+
+ suitetime = time.time() - suitestart
+ header = (u('<?xml version="1.0" encoding="utf-8"?>\n'
+ '<testsuite name="cram"\n'
+ ' tests="%(total)d"\n'
+ ' failures="%(failed)d"\n'
+ ' skipped="%(skipped)d"\n'
+ ' timestamp=%(timestamp)s\n'
+ ' hostname=%(hostname)s\n'
+ ' time="%(time).6f">\n') %
+ {'total': total[0],
+ 'failed': failed[0],
+ 'skipped': skipped[0],
+ 'timestamp': _quoteattr(timestamp),
+ 'hostname': _quoteattr(hostname),
+ 'time': suitetime})
+ footer = u('</testsuite>\n')
+
+ xmlfile = open(xmlpath, 'wb')
+ try:
+ xmlfile.write(header.encode('utf-8'))
+ for testcase in testcases:
+ xmlfile.write(testcase.encode('utf-8'))
+ xmlfile.write(footer.encode('utf-8'))
+ finally:
+ xmlfile.close()
diff --git a/third_party/python/diskcache/LICENSE b/third_party/python/diskcache/LICENSE
new file mode 100644
index 0000000000..3259b989fd
--- /dev/null
+++ b/third_party/python/diskcache/LICENSE
@@ -0,0 +1,12 @@
+Copyright 2016-2019 Grant Jenks
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may not use
+this file except in compliance with the License. You may obtain a copy of the
+License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software distributed
+under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
+CONDITIONS OF ANY KIND, either express or implied. See the License for the
+specific language governing permissions and limitations under the License.
diff --git a/third_party/python/diskcache/MANIFEST.in b/third_party/python/diskcache/MANIFEST.in
new file mode 100644
index 0000000000..0c738421d6
--- /dev/null
+++ b/third_party/python/diskcache/MANIFEST.in
@@ -0,0 +1 @@
+include README.rst LICENSE
diff --git a/third_party/python/diskcache/PKG-INFO b/third_party/python/diskcache/PKG-INFO
new file mode 100644
index 0000000000..12d0977c7d
--- /dev/null
+++ b/third_party/python/diskcache/PKG-INFO
@@ -0,0 +1,428 @@
+Metadata-Version: 1.1
+Name: diskcache
+Version: 4.1.0
+Summary: Disk Cache -- Disk and file backed persistent cache.
+Home-page: http://www.grantjenks.com/docs/diskcache/
+Author: Grant Jenks
+Author-email: contact@grantjenks.com
+License: Apache 2.0
+Description: DiskCache: Disk Backed Cache
+ ============================
+
+ `DiskCache`_ is an Apache2 licensed disk and file backed cache library, written
+ in pure-Python, and compatible with Django.
+
+ The cloud-based computing of 2019 puts a premium on memory. Gigabytes of empty
+ space is left on disks as processes vie for memory. Among these processes is
+ Memcached (and sometimes Redis) which is used as a cache. Wouldn't it be nice
+ to leverage empty disk space for caching?
+
+ Django is Python's most popular web framework and ships with several caching
+ backends. Unfortunately the file-based cache in Django is essentially
+ broken. The culling method is random and large caches repeatedly scan a cache
+ directory which slows linearly with growth. Can you really allow it to take
+ sixty milliseconds to store a key in a cache with a thousand items?
+
+ In Python, we can do better. And we can do it in pure-Python!
+
+ ::
+
+ In [1]: import pylibmc
+ In [2]: client = pylibmc.Client(['127.0.0.1'], binary=True)
+ In [3]: client[b'key'] = b'value'
+ In [4]: %timeit client[b'key']
+
+ 10000 loops, best of 3: 25.4 µs per loop
+
+ In [5]: import diskcache as dc
+ In [6]: cache = dc.Cache('tmp')
+ In [7]: cache[b'key'] = b'value'
+ In [8]: %timeit cache[b'key']
+
+ 100000 loops, best of 3: 11.8 µs per loop
+
+ **Note:** Micro-benchmarks have their place but are not a substitute for real
+ measurements. DiskCache offers cache benchmarks to defend its performance
+ claims. Micro-optimizations are avoided but your mileage may vary.
+
+ DiskCache efficiently makes gigabytes of storage space available for
+ caching. By leveraging rock-solid database libraries and memory-mapped files,
+ cache performance can match and exceed industry-standard solutions. There's no
+ need for a C compiler or running another process. Performance is a feature and
+ testing has 100% coverage with unit tests and hours of stress.
+
+ Testimonials
+ ------------
+
+ `Daren Hasenkamp`_, Founder --
+
+ "It's a useful, simple API, just like I love about Redis. It has reduced
+ the amount of queries hitting my Elasticsearch cluster by over 25% for a
+ website that gets over a million users/day (100+ hits/second)."
+
+ `Mathias Petermann`_, Senior Linux System Engineer --
+
+ "I implemented it into a wrapper for our Ansible lookup modules and we were
+ able to speed up some Ansible runs by almost 3 times. DiskCache is saving
+ us a ton of time."
+
+ Does your company or website use `DiskCache`_? Send us a `message
+ <contact@grantjenks.com>`_ and let us know.
+
+ .. _`Daren Hasenkamp`: https://www.linkedin.com/in/daren-hasenkamp-93006438/
+ .. _`Mathias Petermann`: https://www.linkedin.com/in/mathias-petermann-a8aa273b/
+
+ Features
+ --------
+
+ - Pure-Python
+ - Fully Documented
+ - Benchmark comparisons (alternatives, Django cache backends)
+ - 100% test coverage
+ - Hours of stress testing
+ - Performance matters
+ - Django compatible API
+ - Thread-safe and process-safe
+ - Supports multiple eviction policies (LRU and LFU included)
+ - Keys support "tag" metadata and eviction
+ - Developed on Python 3.7
+ - Tested on CPython 2.7, 3.4, 3.5, 3.6, 3.7 and PyPy
+ - Tested on Linux, Mac OS X, and Windows
+ - Tested using Travis CI and AppVeyor CI
+
+ .. image:: https://api.travis-ci.org/grantjenks/python-diskcache.svg?branch=master
+ :target: http://www.grantjenks.com/docs/diskcache/
+
+ .. image:: https://ci.appveyor.com/api/projects/status/github/grantjenks/python-diskcache?branch=master&svg=true
+ :target: http://www.grantjenks.com/docs/diskcache/
+
+ Quickstart
+ ----------
+
+ Installing `DiskCache`_ is simple with `pip <http://www.pip-installer.org/>`_::
+
+ $ pip install diskcache
+
+ You can access documentation in the interpreter with Python's built-in help
+ function::
+
+ >>> import diskcache
+ >>> help(diskcache)
+
+ The core of `DiskCache`_ is three data types intended for caching. `Cache`_
+ objects manage a SQLite database and filesystem directory to store key and
+ value pairs. `FanoutCache`_ provides a sharding layer to utilize multiple
+ caches and `DjangoCache`_ integrates that with `Django`_::
+
+ >>> from diskcache import Cache, FanoutCache, DjangoCache
+ >>> help(Cache)
+ >>> help(FanoutCache)
+ >>> help(DjangoCache)
+
+ Built atop the caching data types, are `Deque`_ and `Index`_ which work as a
+ cross-process, persistent replacements for Python's ``collections.deque`` and
+ ``dict``. These implement the sequence and mapping container base classes::
+
+ >>> from diskcache import Deque, Index
+ >>> help(Deque)
+ >>> help(Index)
+
+ Finally, a number of `recipes`_ for cross-process synchronization are provided
+ using an underlying cache. Features like memoization with cache stampede
+ prevention, cross-process locking, and cross-process throttling are available::
+
+ >>> from diskcache import memoize_stampede, Lock, throttle
+ >>> help(memoize_stampede)
+ >>> help(Lock)
+ >>> help(throttle)
+
+ Python's docstrings are a quick way to get started but not intended as a
+ replacement for the `DiskCache Tutorial`_ and `DiskCache API Reference`_.
+
+ .. _`Cache`: http://www.grantjenks.com/docs/diskcache/tutorial.html#cache
+ .. _`FanoutCache`: http://www.grantjenks.com/docs/diskcache/tutorial.html#fanoutcache
+ .. _`DjangoCache`: http://www.grantjenks.com/docs/diskcache/tutorial.html#djangocache
+ .. _`Django`: https://www.djangoproject.com/
+ .. _`Deque`: http://www.grantjenks.com/docs/diskcache/tutorial.html#deque
+ .. _`Index`: http://www.grantjenks.com/docs/diskcache/tutorial.html#index
+ .. _`recipes`: http://www.grantjenks.com/docs/diskcache/tutorial.html#recipes
+
+ User Guide
+ ----------
+
+ For those wanting more details, this part of the documentation describes
+ tutorial, benchmarks, API, and development.
+
+ * `DiskCache Tutorial`_
+ * `DiskCache Cache Benchmarks`_
+ * `DiskCache DjangoCache Benchmarks`_
+ * `Case Study: Web Crawler`_
+ * `Case Study: Landing Page Caching`_
+ * `Talk: All Things Cached - SF Python 2017 Meetup`_
+ * `DiskCache API Reference`_
+ * `DiskCache Development`_
+
+ .. _`DiskCache Tutorial`: http://www.grantjenks.com/docs/diskcache/tutorial.html
+ .. _`DiskCache Cache Benchmarks`: http://www.grantjenks.com/docs/diskcache/cache-benchmarks.html
+ .. _`DiskCache DjangoCache Benchmarks`: http://www.grantjenks.com/docs/diskcache/djangocache-benchmarks.html
+ .. _`Talk: All Things Cached - SF Python 2017 Meetup`: http://www.grantjenks.com/docs/diskcache/sf-python-2017-meetup-talk.html
+ .. _`Case Study: Web Crawler`: http://www.grantjenks.com/docs/diskcache/case-study-web-crawler.html
+ .. _`Case Study: Landing Page Caching`: http://www.grantjenks.com/docs/diskcache/case-study-landing-page-caching.html
+ .. _`DiskCache API Reference`: http://www.grantjenks.com/docs/diskcache/api.html
+ .. _`DiskCache Development`: http://www.grantjenks.com/docs/diskcache/development.html
+
+ Comparisons
+ -----------
+
+ Comparisons to popular projects related to `DiskCache`_.
+
+ Key-Value Stores
+ ................
+
+ `DiskCache`_ is mostly a simple key-value store. Feature comparisons with four
+ other projects are shown in the tables below.
+
+ * `dbm`_ is part of Python's standard library and implements a generic
+ interface to variants of the DBM database — dbm.gnu or dbm.ndbm. If none of
+ these modules is installed, the slow-but-simple dbm.dumb is used.
+ * `shelve`_ is part of Python's standard library and implements a “shelf” as a
+ persistent, dictionary-like object. The difference with “dbm” databases is
+ that the values can be anything that the pickle module can handle.
+ * `sqlitedict`_ is a lightweight wrapper around Python's sqlite3 database with
+ a simple, Pythonic dict-like interface and support for multi-thread
+ access. Keys are arbitrary strings, values arbitrary pickle-able objects.
+ * `pickleDB`_ is a lightweight and simple key-value store. It is built upon
+ Python's simplejson module and was inspired by Redis. It is licensed with the
+ BSD three-caluse license.
+
+ .. _`dbm`: https://docs.python.org/3/library/dbm.html
+ .. _`shelve`: https://docs.python.org/3/library/shelve.html
+ .. _`sqlitedict`: https://github.com/RaRe-Technologies/sqlitedict
+ .. _`pickleDB`: https://pythonhosted.org/pickleDB/
+
+ **Features**
+
+ ================ ============= ========= ========= ============ ============
+ Feature diskcache dbm shelve sqlitedict pickleDB
+ ================ ============= ========= ========= ============ ============
+ Atomic? Always Maybe Maybe Maybe No
+ Persistent? Yes Yes Yes Yes Yes
+ Thread-safe? Yes No No Yes No
+ Process-safe? Yes No No Maybe No
+ Backend? SQLite DBM DBM SQLite File
+ Serialization? Customizable None Pickle Customizable JSON
+ Data Types? Mapping/Deque Mapping Mapping Mapping Mapping
+ Ordering? Insert/Sorted None None None None
+ Eviction? LRU/LFU/more None None None None
+ Vacuum? Automatic Maybe Maybe Manual Automatic
+ Transactions? Yes No No Maybe No
+ Multiprocessing? Yes No No No No
+ Forkable? Yes No No No No
+ Metadata? Yes No No No No
+ ================ ============= ========= ========= ============ ============
+
+ **Quality**
+
+ ================ ============= ========= ========= ============ ============
+ Project diskcache dbm shelve sqlitedict pickleDB
+ ================ ============= ========= ========= ============ ============
+ Tests? Yes Yes Yes Yes Yes
+ Coverage? Yes Yes Yes Yes No
+ Stress? Yes No No No No
+ CI Tests? Linux/Windows Yes Yes Linux No
+ Python? 2/3/PyPy All All 2/3 2/3
+ License? Apache2 Python Python Apache2 3-Clause BSD
+ Docs? Extensive Summary Summary Readme Summary
+ Benchmarks? Yes No No No No
+ Sources? GitHub GitHub GitHub GitHub GitHub
+ Pure-Python? Yes Yes Yes Yes Yes
+ Server? No No No No No
+ Integrations? Django None None None None
+ ================ ============= ========= ========= ============ ============
+
+ **Timings**
+
+ These are rough measurements. See `DiskCache Cache Benchmarks`_ for more
+ rigorous data.
+
+ ================ ============= ========= ========= ============ ============
+ Project diskcache dbm shelve sqlitedict pickleDB
+ ================ ============= ========= ========= ============ ============
+ get 25 µs 36 µs 41 µs 513 µs 92 µs
+ set 198 µs 900 µs 928 µs 697 µs 1,020 µs
+ delete 248 µs 740 µs 702 µs 1,717 µs 1,020 µs
+ ================ ============= ========= ========= ============ ============
+
+ Caching Libraries
+ .................
+
+ * `joblib.Memory`_ provides caching functions and works by explicitly saving
+ the inputs and outputs to files. It is designed to work with non-hashable and
+ potentially large input and output data types such as numpy arrays.
+ * `klepto`_ extends Python’s `lru_cache` to utilize different keymaps and
+ alternate caching algorithms, such as `lfu_cache` and `mru_cache`. Klepto
+ uses a simple dictionary-sytle interface for all caches and archives.
+
+ .. _`klepto`: https://pypi.org/project/klepto/
+ .. _`joblib.Memory`: https://joblib.readthedocs.io/en/latest/memory.html
+
+ Data Structures
+ ...............
+
+ * `dict`_ is a mapping object that maps hashable keys to arbitrary
+ values. Mappings are mutable objects. There is currently only one standard
+ Python mapping type, the dictionary.
+ * `pandas`_ is a Python package providing fast, flexible, and expressive data
+ structures designed to make working with “relational” or “labeled” data both
+ easy and intuitive.
+ * `Sorted Containers`_ is an Apache2 licensed sorted collections library,
+ written in pure-Python, and fast as C-extensions. Sorted Containers
+ implements sorted list, sorted dictionary, and sorted set data types.
+
+ .. _`dict`: https://docs.python.org/3/library/stdtypes.html#typesmapping
+ .. _`pandas`: https://pandas.pydata.org/
+ .. _`Sorted Containers`: http://www.grantjenks.com/docs/sortedcontainers/
+
+ Pure-Python Databases
+ .....................
+
+ * `ZODB`_ supports an isomorphic interface for database operations which means
+ there's little impact on your code to make objects persistent and there's no
+ database mapper that partially hides the datbase.
+ * `CodernityDB`_ is an open source, pure-Python, multi-platform, schema-less,
+ NoSQL database and includes an HTTP server version, and a Python client
+ library that aims to be 100% compatible with the embedded version.
+ * `TinyDB`_ is a tiny, document oriented database optimized for your
+ happiness. If you need a simple database with a clean API that just works
+ without lots of configuration, TinyDB might be the right choice for you.
+
+ .. _`ZODB`: http://www.zodb.org/
+ .. _`CodernityDB`: https://pypi.org/project/CodernityDB/
+ .. _`TinyDB`: https://tinydb.readthedocs.io/
+
+ Object Relational Mappings (ORM)
+ ................................
+
+ * `Django ORM`_ provides models that are the single, definitive source of
+ information about data and contains the essential fields and behaviors of the
+ stored data. Generally, each model maps to a single SQL database table.
+ * `SQLAlchemy`_ is the Python SQL toolkit and Object Relational Mapper that
+ gives application developers the full power and flexibility of SQL. It
+ provides a full suite of well known enterprise-level persistence patterns.
+ * `Peewee`_ is a simple and small ORM. It has few (but expressive) concepts,
+ making it easy to learn and intuitive to use. Peewee supports Sqlite, MySQL,
+ and PostgreSQL with tons of extensions.
+ * `SQLObject`_ is a popular Object Relational Manager for providing an object
+ interface to your database, with tables as classes, rows as instances, and
+ columns as attributes.
+ * `Pony ORM`_ is a Python ORM with beautiful query syntax. Use Python syntax
+ for interacting with the database. Pony translates such queries into SQL and
+ executes them in the database in the most efficient way.
+
+ .. _`Django ORM`: https://docs.djangoproject.com/en/dev/topics/db/
+ .. _`SQLAlchemy`: https://www.sqlalchemy.org/
+ .. _`Peewee`: http://docs.peewee-orm.com/
+ .. _`dataset`: https://dataset.readthedocs.io/
+ .. _`SQLObject`: http://sqlobject.org/
+ .. _`Pony ORM`: https://ponyorm.com/
+
+ SQL Databases
+ .............
+
+ * `SQLite`_ is part of Python's standard library and provides a lightweight
+ disk-based database that doesn’t require a separate server process and allows
+ accessing the database using a nonstandard variant of the SQL query language.
+ * `MySQL`_ is one of the world’s most popular open source databases and has
+ become a leading database choice for web-based applications. MySQL includes a
+ standardized database driver for Python platforms and development.
+ * `PostgreSQL`_ is a powerful, open source object-relational database system
+ with over 30 years of active development. Psycopg is the most popular
+ PostgreSQL adapter for the Python programming language.
+ * `Oracle DB`_ is a relational database management system (RDBMS) from the
+ Oracle Corporation. Originally developed in 1977, Oracle DB is one of the
+ most trusted and widely used enterprise relational database engines.
+ * `Microsoft SQL Server`_ is a relational database management system developed
+ by Microsoft. As a database server, it stores and retrieves data as requested
+ by other software applications.
+
+ .. _`SQLite`: https://docs.python.org/3/library/sqlite3.html
+ .. _`MySQL`: https://dev.mysql.com/downloads/connector/python/
+ .. _`PostgreSQL`: http://initd.org/psycopg/
+ .. _`Oracle DB`: https://pypi.org/project/cx_Oracle/
+ .. _`Microsoft SQL Server`: https://pypi.org/project/pyodbc/
+
+ Other Databases
+ ...............
+
+ * `Memcached`_ is free and open source, high-performance, distributed memory
+ object caching system, generic in nature, but intended for use in speeding up
+ dynamic web applications by alleviating database load.
+ * `Redis`_ is an open source, in-memory data structure store, used as a
+ database, cache and message broker. It supports data structures such as
+ strings, hashes, lists, sets, sorted sets with range queries, and more.
+ * `MongoDB`_ is a cross-platform document-oriented database program. Classified
+ as a NoSQL database program, MongoDB uses JSON-like documents with
+ schema. PyMongo is the recommended way to work with MongoDB from Python.
+ * `LMDB`_ is a lightning-fast, memory-mapped database. With memory-mapped
+ files, it has the read performance of a pure in-memory database while
+ retaining the persistence of standard disk-based databases.
+ * `BerkeleyDB`_ is a software library intended to provide a high-performance
+ embedded database for key/value data. Berkeley DB is a programmatic toolkit
+ that provides built-in database support for desktop and server applications.
+ * `LevelDB`_ is a fast key-value storage library written at Google that
+ provides an ordered mapping from string keys to string values. Data is stored
+ sorted by key and users can provide a custom comparison function.
+
+ .. _`Memcached`: https://pypi.org/project/python-memcached/
+ .. _`MongoDB`: https://api.mongodb.com/python/current/
+ .. _`Redis`: https://redis.io/clients#python
+ .. _`LMDB`: https://lmdb.readthedocs.io/
+ .. _`BerkeleyDB`: https://pypi.org/project/bsddb3/
+ .. _`LevelDB`: https://plyvel.readthedocs.io/
+
+ Reference
+ ---------
+
+ * `DiskCache Documentation`_
+ * `DiskCache at PyPI`_
+ * `DiskCache at GitHub`_
+ * `DiskCache Issue Tracker`_
+
+ .. _`DiskCache Documentation`: http://www.grantjenks.com/docs/diskcache/
+ .. _`DiskCache at PyPI`: https://pypi.python.org/pypi/diskcache/
+ .. _`DiskCache at GitHub`: https://github.com/grantjenks/python-diskcache/
+ .. _`DiskCache Issue Tracker`: https://github.com/grantjenks/python-diskcache/issues/
+
+ License
+ -------
+
+ Copyright 2016-2019 Grant Jenks
+
+ Licensed under the Apache License, Version 2.0 (the "License"); you may not use
+ this file except in compliance with the License. You may obtain a copy of the
+ License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software distributed
+ under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
+ CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations under the License.
+
+ .. _`DiskCache`: http://www.grantjenks.com/docs/diskcache/
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Natural Language :: English
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
diff --git a/third_party/python/diskcache/README.rst b/third_party/python/diskcache/README.rst
new file mode 100644
index 0000000000..57b7a2e21d
--- /dev/null
+++ b/third_party/python/diskcache/README.rst
@@ -0,0 +1,404 @@
+DiskCache: Disk Backed Cache
+============================
+
+`DiskCache`_ is an Apache2 licensed disk and file backed cache library, written
+in pure-Python, and compatible with Django.
+
+The cloud-based computing of 2019 puts a premium on memory. Gigabytes of empty
+space is left on disks as processes vie for memory. Among these processes is
+Memcached (and sometimes Redis) which is used as a cache. Wouldn't it be nice
+to leverage empty disk space for caching?
+
+Django is Python's most popular web framework and ships with several caching
+backends. Unfortunately the file-based cache in Django is essentially
+broken. The culling method is random and large caches repeatedly scan a cache
+directory which slows linearly with growth. Can you really allow it to take
+sixty milliseconds to store a key in a cache with a thousand items?
+
+In Python, we can do better. And we can do it in pure-Python!
+
+::
+
+ In [1]: import pylibmc
+ In [2]: client = pylibmc.Client(['127.0.0.1'], binary=True)
+ In [3]: client[b'key'] = b'value'
+ In [4]: %timeit client[b'key']
+
+ 10000 loops, best of 3: 25.4 µs per loop
+
+ In [5]: import diskcache as dc
+ In [6]: cache = dc.Cache('tmp')
+ In [7]: cache[b'key'] = b'value'
+ In [8]: %timeit cache[b'key']
+
+ 100000 loops, best of 3: 11.8 µs per loop
+
+**Note:** Micro-benchmarks have their place but are not a substitute for real
+measurements. DiskCache offers cache benchmarks to defend its performance
+claims. Micro-optimizations are avoided but your mileage may vary.
+
+DiskCache efficiently makes gigabytes of storage space available for
+caching. By leveraging rock-solid database libraries and memory-mapped files,
+cache performance can match and exceed industry-standard solutions. There's no
+need for a C compiler or running another process. Performance is a feature and
+testing has 100% coverage with unit tests and hours of stress.
+
+Testimonials
+------------
+
+`Daren Hasenkamp`_, Founder --
+
+ "It's a useful, simple API, just like I love about Redis. It has reduced
+ the amount of queries hitting my Elasticsearch cluster by over 25% for a
+ website that gets over a million users/day (100+ hits/second)."
+
+`Mathias Petermann`_, Senior Linux System Engineer --
+
+ "I implemented it into a wrapper for our Ansible lookup modules and we were
+ able to speed up some Ansible runs by almost 3 times. DiskCache is saving
+ us a ton of time."
+
+Does your company or website use `DiskCache`_? Send us a `message
+<contact@grantjenks.com>`_ and let us know.
+
+.. _`Daren Hasenkamp`: https://www.linkedin.com/in/daren-hasenkamp-93006438/
+.. _`Mathias Petermann`: https://www.linkedin.com/in/mathias-petermann-a8aa273b/
+
+Features
+--------
+
+- Pure-Python
+- Fully Documented
+- Benchmark comparisons (alternatives, Django cache backends)
+- 100% test coverage
+- Hours of stress testing
+- Performance matters
+- Django compatible API
+- Thread-safe and process-safe
+- Supports multiple eviction policies (LRU and LFU included)
+- Keys support "tag" metadata and eviction
+- Developed on Python 3.7
+- Tested on CPython 2.7, 3.4, 3.5, 3.6, 3.7 and PyPy
+- Tested on Linux, Mac OS X, and Windows
+- Tested using Travis CI and AppVeyor CI
+
+.. image:: https://api.travis-ci.org/grantjenks/python-diskcache.svg?branch=master
+ :target: http://www.grantjenks.com/docs/diskcache/
+
+.. image:: https://ci.appveyor.com/api/projects/status/github/grantjenks/python-diskcache?branch=master&svg=true
+ :target: http://www.grantjenks.com/docs/diskcache/
+
+Quickstart
+----------
+
+Installing `DiskCache`_ is simple with `pip <http://www.pip-installer.org/>`_::
+
+ $ pip install diskcache
+
+You can access documentation in the interpreter with Python's built-in help
+function::
+
+ >>> import diskcache
+ >>> help(diskcache)
+
+The core of `DiskCache`_ is three data types intended for caching. `Cache`_
+objects manage a SQLite database and filesystem directory to store key and
+value pairs. `FanoutCache`_ provides a sharding layer to utilize multiple
+caches and `DjangoCache`_ integrates that with `Django`_::
+
+ >>> from diskcache import Cache, FanoutCache, DjangoCache
+ >>> help(Cache)
+ >>> help(FanoutCache)
+ >>> help(DjangoCache)
+
+Built atop the caching data types, are `Deque`_ and `Index`_ which work as a
+cross-process, persistent replacements for Python's ``collections.deque`` and
+``dict``. These implement the sequence and mapping container base classes::
+
+ >>> from diskcache import Deque, Index
+ >>> help(Deque)
+ >>> help(Index)
+
+Finally, a number of `recipes`_ for cross-process synchronization are provided
+using an underlying cache. Features like memoization with cache stampede
+prevention, cross-process locking, and cross-process throttling are available::
+
+ >>> from diskcache import memoize_stampede, Lock, throttle
+ >>> help(memoize_stampede)
+ >>> help(Lock)
+ >>> help(throttle)
+
+Python's docstrings are a quick way to get started but not intended as a
+replacement for the `DiskCache Tutorial`_ and `DiskCache API Reference`_.
+
+.. _`Cache`: http://www.grantjenks.com/docs/diskcache/tutorial.html#cache
+.. _`FanoutCache`: http://www.grantjenks.com/docs/diskcache/tutorial.html#fanoutcache
+.. _`DjangoCache`: http://www.grantjenks.com/docs/diskcache/tutorial.html#djangocache
+.. _`Django`: https://www.djangoproject.com/
+.. _`Deque`: http://www.grantjenks.com/docs/diskcache/tutorial.html#deque
+.. _`Index`: http://www.grantjenks.com/docs/diskcache/tutorial.html#index
+.. _`recipes`: http://www.grantjenks.com/docs/diskcache/tutorial.html#recipes
+
+User Guide
+----------
+
+For those wanting more details, this part of the documentation describes
+tutorial, benchmarks, API, and development.
+
+* `DiskCache Tutorial`_
+* `DiskCache Cache Benchmarks`_
+* `DiskCache DjangoCache Benchmarks`_
+* `Case Study: Web Crawler`_
+* `Case Study: Landing Page Caching`_
+* `Talk: All Things Cached - SF Python 2017 Meetup`_
+* `DiskCache API Reference`_
+* `DiskCache Development`_
+
+.. _`DiskCache Tutorial`: http://www.grantjenks.com/docs/diskcache/tutorial.html
+.. _`DiskCache Cache Benchmarks`: http://www.grantjenks.com/docs/diskcache/cache-benchmarks.html
+.. _`DiskCache DjangoCache Benchmarks`: http://www.grantjenks.com/docs/diskcache/djangocache-benchmarks.html
+.. _`Talk: All Things Cached - SF Python 2017 Meetup`: http://www.grantjenks.com/docs/diskcache/sf-python-2017-meetup-talk.html
+.. _`Case Study: Web Crawler`: http://www.grantjenks.com/docs/diskcache/case-study-web-crawler.html
+.. _`Case Study: Landing Page Caching`: http://www.grantjenks.com/docs/diskcache/case-study-landing-page-caching.html
+.. _`DiskCache API Reference`: http://www.grantjenks.com/docs/diskcache/api.html
+.. _`DiskCache Development`: http://www.grantjenks.com/docs/diskcache/development.html
+
+Comparisons
+-----------
+
+Comparisons to popular projects related to `DiskCache`_.
+
+Key-Value Stores
+................
+
+`DiskCache`_ is mostly a simple key-value store. Feature comparisons with four
+other projects are shown in the tables below.
+
+* `dbm`_ is part of Python's standard library and implements a generic
+ interface to variants of the DBM database — dbm.gnu or dbm.ndbm. If none of
+ these modules is installed, the slow-but-simple dbm.dumb is used.
+* `shelve`_ is part of Python's standard library and implements a “shelf” as a
+ persistent, dictionary-like object. The difference with “dbm” databases is
+ that the values can be anything that the pickle module can handle.
+* `sqlitedict`_ is a lightweight wrapper around Python's sqlite3 database with
+ a simple, Pythonic dict-like interface and support for multi-thread
+ access. Keys are arbitrary strings, values arbitrary pickle-able objects.
+* `pickleDB`_ is a lightweight and simple key-value store. It is built upon
+ Python's simplejson module and was inspired by Redis. It is licensed with the
+ BSD three-caluse license.
+
+.. _`dbm`: https://docs.python.org/3/library/dbm.html
+.. _`shelve`: https://docs.python.org/3/library/shelve.html
+.. _`sqlitedict`: https://github.com/RaRe-Technologies/sqlitedict
+.. _`pickleDB`: https://pythonhosted.org/pickleDB/
+
+**Features**
+
+================ ============= ========= ========= ============ ============
+Feature diskcache dbm shelve sqlitedict pickleDB
+================ ============= ========= ========= ============ ============
+Atomic? Always Maybe Maybe Maybe No
+Persistent? Yes Yes Yes Yes Yes
+Thread-safe? Yes No No Yes No
+Process-safe? Yes No No Maybe No
+Backend? SQLite DBM DBM SQLite File
+Serialization? Customizable None Pickle Customizable JSON
+Data Types? Mapping/Deque Mapping Mapping Mapping Mapping
+Ordering? Insert/Sorted None None None None
+Eviction? LRU/LFU/more None None None None
+Vacuum? Automatic Maybe Maybe Manual Automatic
+Transactions? Yes No No Maybe No
+Multiprocessing? Yes No No No No
+Forkable? Yes No No No No
+Metadata? Yes No No No No
+================ ============= ========= ========= ============ ============
+
+**Quality**
+
+================ ============= ========= ========= ============ ============
+Project diskcache dbm shelve sqlitedict pickleDB
+================ ============= ========= ========= ============ ============
+Tests? Yes Yes Yes Yes Yes
+Coverage? Yes Yes Yes Yes No
+Stress? Yes No No No No
+CI Tests? Linux/Windows Yes Yes Linux No
+Python? 2/3/PyPy All All 2/3 2/3
+License? Apache2 Python Python Apache2 3-Clause BSD
+Docs? Extensive Summary Summary Readme Summary
+Benchmarks? Yes No No No No
+Sources? GitHub GitHub GitHub GitHub GitHub
+Pure-Python? Yes Yes Yes Yes Yes
+Server? No No No No No
+Integrations? Django None None None None
+================ ============= ========= ========= ============ ============
+
+**Timings**
+
+These are rough measurements. See `DiskCache Cache Benchmarks`_ for more
+rigorous data.
+
+================ ============= ========= ========= ============ ============
+Project diskcache dbm shelve sqlitedict pickleDB
+================ ============= ========= ========= ============ ============
+get 25 µs 36 µs 41 µs 513 µs 92 µs
+set 198 µs 900 µs 928 µs 697 µs 1,020 µs
+delete 248 µs 740 µs 702 µs 1,717 µs 1,020 µs
+================ ============= ========= ========= ============ ============
+
+Caching Libraries
+.................
+
+* `joblib.Memory`_ provides caching functions and works by explicitly saving
+ the inputs and outputs to files. It is designed to work with non-hashable and
+ potentially large input and output data types such as numpy arrays.
+* `klepto`_ extends Python’s `lru_cache` to utilize different keymaps and
+ alternate caching algorithms, such as `lfu_cache` and `mru_cache`. Klepto
+ uses a simple dictionary-sytle interface for all caches and archives.
+
+.. _`klepto`: https://pypi.org/project/klepto/
+.. _`joblib.Memory`: https://joblib.readthedocs.io/en/latest/memory.html
+
+Data Structures
+...............
+
+* `dict`_ is a mapping object that maps hashable keys to arbitrary
+ values. Mappings are mutable objects. There is currently only one standard
+ Python mapping type, the dictionary.
+* `pandas`_ is a Python package providing fast, flexible, and expressive data
+ structures designed to make working with “relational” or “labeled” data both
+ easy and intuitive.
+* `Sorted Containers`_ is an Apache2 licensed sorted collections library,
+ written in pure-Python, and fast as C-extensions. Sorted Containers
+ implements sorted list, sorted dictionary, and sorted set data types.
+
+.. _`dict`: https://docs.python.org/3/library/stdtypes.html#typesmapping
+.. _`pandas`: https://pandas.pydata.org/
+.. _`Sorted Containers`: http://www.grantjenks.com/docs/sortedcontainers/
+
+Pure-Python Databases
+.....................
+
+* `ZODB`_ supports an isomorphic interface for database operations which means
+ there's little impact on your code to make objects persistent and there's no
+ database mapper that partially hides the datbase.
+* `CodernityDB`_ is an open source, pure-Python, multi-platform, schema-less,
+ NoSQL database and includes an HTTP server version, and a Python client
+ library that aims to be 100% compatible with the embedded version.
+* `TinyDB`_ is a tiny, document oriented database optimized for your
+ happiness. If you need a simple database with a clean API that just works
+ without lots of configuration, TinyDB might be the right choice for you.
+
+.. _`ZODB`: http://www.zodb.org/
+.. _`CodernityDB`: https://pypi.org/project/CodernityDB/
+.. _`TinyDB`: https://tinydb.readthedocs.io/
+
+Object Relational Mappings (ORM)
+................................
+
+* `Django ORM`_ provides models that are the single, definitive source of
+ information about data and contains the essential fields and behaviors of the
+ stored data. Generally, each model maps to a single SQL database table.
+* `SQLAlchemy`_ is the Python SQL toolkit and Object Relational Mapper that
+ gives application developers the full power and flexibility of SQL. It
+ provides a full suite of well known enterprise-level persistence patterns.
+* `Peewee`_ is a simple and small ORM. It has few (but expressive) concepts,
+ making it easy to learn and intuitive to use. Peewee supports Sqlite, MySQL,
+ and PostgreSQL with tons of extensions.
+* `SQLObject`_ is a popular Object Relational Manager for providing an object
+ interface to your database, with tables as classes, rows as instances, and
+ columns as attributes.
+* `Pony ORM`_ is a Python ORM with beautiful query syntax. Use Python syntax
+ for interacting with the database. Pony translates such queries into SQL and
+ executes them in the database in the most efficient way.
+
+.. _`Django ORM`: https://docs.djangoproject.com/en/dev/topics/db/
+.. _`SQLAlchemy`: https://www.sqlalchemy.org/
+.. _`Peewee`: http://docs.peewee-orm.com/
+.. _`dataset`: https://dataset.readthedocs.io/
+.. _`SQLObject`: http://sqlobject.org/
+.. _`Pony ORM`: https://ponyorm.com/
+
+SQL Databases
+.............
+
+* `SQLite`_ is part of Python's standard library and provides a lightweight
+ disk-based database that doesn’t require a separate server process and allows
+ accessing the database using a nonstandard variant of the SQL query language.
+* `MySQL`_ is one of the world’s most popular open source databases and has
+ become a leading database choice for web-based applications. MySQL includes a
+ standardized database driver for Python platforms and development.
+* `PostgreSQL`_ is a powerful, open source object-relational database system
+ with over 30 years of active development. Psycopg is the most popular
+ PostgreSQL adapter for the Python programming language.
+* `Oracle DB`_ is a relational database management system (RDBMS) from the
+ Oracle Corporation. Originally developed in 1977, Oracle DB is one of the
+ most trusted and widely used enterprise relational database engines.
+* `Microsoft SQL Server`_ is a relational database management system developed
+ by Microsoft. As a database server, it stores and retrieves data as requested
+ by other software applications.
+
+.. _`SQLite`: https://docs.python.org/3/library/sqlite3.html
+.. _`MySQL`: https://dev.mysql.com/downloads/connector/python/
+.. _`PostgreSQL`: http://initd.org/psycopg/
+.. _`Oracle DB`: https://pypi.org/project/cx_Oracle/
+.. _`Microsoft SQL Server`: https://pypi.org/project/pyodbc/
+
+Other Databases
+...............
+
+* `Memcached`_ is free and open source, high-performance, distributed memory
+ object caching system, generic in nature, but intended for use in speeding up
+ dynamic web applications by alleviating database load.
+* `Redis`_ is an open source, in-memory data structure store, used as a
+ database, cache and message broker. It supports data structures such as
+ strings, hashes, lists, sets, sorted sets with range queries, and more.
+* `MongoDB`_ is a cross-platform document-oriented database program. Classified
+ as a NoSQL database program, MongoDB uses JSON-like documents with
+ schema. PyMongo is the recommended way to work with MongoDB from Python.
+* `LMDB`_ is a lightning-fast, memory-mapped database. With memory-mapped
+ files, it has the read performance of a pure in-memory database while
+ retaining the persistence of standard disk-based databases.
+* `BerkeleyDB`_ is a software library intended to provide a high-performance
+ embedded database for key/value data. Berkeley DB is a programmatic toolkit
+ that provides built-in database support for desktop and server applications.
+* `LevelDB`_ is a fast key-value storage library written at Google that
+ provides an ordered mapping from string keys to string values. Data is stored
+ sorted by key and users can provide a custom comparison function.
+
+.. _`Memcached`: https://pypi.org/project/python-memcached/
+.. _`MongoDB`: https://api.mongodb.com/python/current/
+.. _`Redis`: https://redis.io/clients#python
+.. _`LMDB`: https://lmdb.readthedocs.io/
+.. _`BerkeleyDB`: https://pypi.org/project/bsddb3/
+.. _`LevelDB`: https://plyvel.readthedocs.io/
+
+Reference
+---------
+
+* `DiskCache Documentation`_
+* `DiskCache at PyPI`_
+* `DiskCache at GitHub`_
+* `DiskCache Issue Tracker`_
+
+.. _`DiskCache Documentation`: http://www.grantjenks.com/docs/diskcache/
+.. _`DiskCache at PyPI`: https://pypi.python.org/pypi/diskcache/
+.. _`DiskCache at GitHub`: https://github.com/grantjenks/python-diskcache/
+.. _`DiskCache Issue Tracker`: https://github.com/grantjenks/python-diskcache/issues/
+
+License
+-------
+
+Copyright 2016-2019 Grant Jenks
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may not use
+this file except in compliance with the License. You may obtain a copy of the
+License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software distributed
+under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
+CONDITIONS OF ANY KIND, either express or implied. See the License for the
+specific language governing permissions and limitations under the License.
+
+.. _`DiskCache`: http://www.grantjenks.com/docs/diskcache/
diff --git a/third_party/python/diskcache/diskcache/__init__.py b/third_party/python/diskcache/diskcache/__init__.py
new file mode 100644
index 0000000000..192524e6ce
--- /dev/null
+++ b/third_party/python/diskcache/diskcache/__init__.py
@@ -0,0 +1,51 @@
+"""
+DiskCache API Reference
+=======================
+
+The :doc:`tutorial` provides a helpful walkthrough of most methods.
+
+"""
+
+from .core import Cache, Disk, EmptyDirWarning, JSONDisk, UnknownFileWarning, Timeout
+from .core import DEFAULT_SETTINGS, ENOVAL, EVICTION_POLICY, UNKNOWN
+from .fanout import FanoutCache
+from .persistent import Deque, Index
+from .recipes import Averager, BoundedSemaphore, Lock, RLock
+from .recipes import barrier, memoize_stampede, throttle
+
+__all__ = [
+ 'Averager',
+ 'BoundedSemaphore',
+ 'Cache',
+ 'DEFAULT_SETTINGS',
+ 'Deque',
+ 'Disk',
+ 'ENOVAL',
+ 'EVICTION_POLICY',
+ 'EmptyDirWarning',
+ 'FanoutCache',
+ 'Index',
+ 'JSONDisk',
+ 'Lock',
+ 'RLock',
+ 'Timeout',
+ 'UNKNOWN',
+ 'UnknownFileWarning',
+ 'barrier',
+ 'memoize_stampede',
+ 'throttle',
+]
+
+try:
+ from .djangocache import DjangoCache # pylint: disable=wrong-import-position
+ __all__.append('DjangoCache')
+except Exception: # pylint: disable=broad-except
+ # Django not installed or not setup so ignore.
+ pass
+
+__title__ = 'diskcache'
+__version__ = '4.1.0'
+__build__ = 0x040100
+__author__ = 'Grant Jenks'
+__license__ = 'Apache 2.0'
+__copyright__ = 'Copyright 2016-2018 Grant Jenks'
diff --git a/third_party/python/diskcache/diskcache/cli.py b/third_party/python/diskcache/diskcache/cli.py
new file mode 100644
index 0000000000..44bffebfcc
--- /dev/null
+++ b/third_party/python/diskcache/diskcache/cli.py
@@ -0,0 +1 @@
+"Command line interface to disk cache."
diff --git a/third_party/python/diskcache/diskcache/core.py b/third_party/python/diskcache/diskcache/core.py
new file mode 100644
index 0000000000..0c8fd2c745
--- /dev/null
+++ b/third_party/python/diskcache/diskcache/core.py
@@ -0,0 +1,2481 @@
+"""Core disk and file backed cache API.
+
+"""
+
+import codecs
+import contextlib as cl
+import errno
+import functools as ft
+import io
+import json
+import os
+import os.path as op
+import pickletools
+import sqlite3
+import struct
+import sys
+import tempfile
+import threading
+import time
+import warnings
+import zlib
+
+############################################################################
+# BEGIN Python 2/3 Shims
+############################################################################
+
+if sys.hexversion < 0x03000000:
+ import cPickle as pickle # pylint: disable=import-error
+ # ISSUE #25 Fix for http://bugs.python.org/issue10211
+ from cStringIO import StringIO as BytesIO # pylint: disable=import-error
+ from thread import get_ident # pylint: disable=import-error,no-name-in-module
+ TextType = unicode # pylint: disable=invalid-name,undefined-variable
+ BytesType = str
+ INT_TYPES = int, long # pylint: disable=undefined-variable
+ range = xrange # pylint: disable=redefined-builtin,invalid-name,undefined-variable
+ io_open = io.open # pylint: disable=invalid-name
+else:
+ import pickle
+ from io import BytesIO # pylint: disable=ungrouped-imports
+ from threading import get_ident
+ TextType = str
+ BytesType = bytes
+ INT_TYPES = (int,)
+ io_open = open # pylint: disable=invalid-name
+
+def full_name(func):
+ "Return full name of `func` by adding the module and function name."
+ try:
+ # The __qualname__ attribute is only available in Python 3.3 and later.
+ # GrantJ 2019-03-29 Remove after support for Python 2 is dropped.
+ name = func.__qualname__
+ except AttributeError:
+ name = func.__name__
+ return func.__module__ + '.' + name
+
+############################################################################
+# END Python 2/3 Shims
+############################################################################
+
+try:
+ WindowsError
+except NameError:
+ class WindowsError(Exception):
+ "Windows error place-holder on platforms without support."
+
+class Constant(tuple):
+ "Pretty display of immutable constant."
+ def __new__(cls, name):
+ return tuple.__new__(cls, (name,))
+
+ def __repr__(self):
+ return '%s' % self[0]
+
+DBNAME = 'cache.db'
+ENOVAL = Constant('ENOVAL')
+UNKNOWN = Constant('UNKNOWN')
+
+MODE_NONE = 0
+MODE_RAW = 1
+MODE_BINARY = 2
+MODE_TEXT = 3
+MODE_PICKLE = 4
+
+DEFAULT_SETTINGS = {
+ u'statistics': 0, # False
+ u'tag_index': 0, # False
+ u'eviction_policy': u'least-recently-stored',
+ u'size_limit': 2 ** 30, # 1gb
+ u'cull_limit': 10,
+ u'sqlite_auto_vacuum': 1, # FULL
+ u'sqlite_cache_size': 2 ** 13, # 8,192 pages
+ u'sqlite_journal_mode': u'wal',
+ u'sqlite_mmap_size': 2 ** 26, # 64mb
+ u'sqlite_synchronous': 1, # NORMAL
+ u'disk_min_file_size': 2 ** 15, # 32kb
+ u'disk_pickle_protocol': pickle.HIGHEST_PROTOCOL,
+}
+
+METADATA = {
+ u'count': 0,
+ u'size': 0,
+ u'hits': 0,
+ u'misses': 0,
+}
+
+EVICTION_POLICY = {
+ 'none': {
+ 'init': None,
+ 'get': None,
+ 'cull': None,
+ },
+ 'least-recently-stored': {
+ 'init': (
+ 'CREATE INDEX IF NOT EXISTS Cache_store_time ON'
+ ' Cache (store_time)'
+ ),
+ 'get': None,
+ 'cull': 'SELECT {fields} FROM Cache ORDER BY store_time LIMIT ?',
+ },
+ 'least-recently-used': {
+ 'init': (
+ 'CREATE INDEX IF NOT EXISTS Cache_access_time ON'
+ ' Cache (access_time)'
+ ),
+ 'get': 'access_time = {now}',
+ 'cull': 'SELECT {fields} FROM Cache ORDER BY access_time LIMIT ?',
+ },
+ 'least-frequently-used': {
+ 'init': (
+ 'CREATE INDEX IF NOT EXISTS Cache_access_count ON'
+ ' Cache (access_count)'
+ ),
+ 'get': 'access_count = access_count + 1',
+ 'cull': 'SELECT {fields} FROM Cache ORDER BY access_count LIMIT ?',
+ },
+}
+
+
+class Disk(object):
+ "Cache key and value serialization for SQLite database and files."
+ def __init__(self, directory, min_file_size=0, pickle_protocol=0):
+ """Initialize disk instance.
+
+ :param str directory: directory path
+ :param int min_file_size: minimum size for file use
+ :param int pickle_protocol: pickle protocol for serialization
+
+ """
+ self._directory = directory
+ self.min_file_size = min_file_size
+ self.pickle_protocol = pickle_protocol
+
+
+ def hash(self, key):
+ """Compute portable hash for `key`.
+
+ :param key: key to hash
+ :return: hash value
+
+ """
+ mask = 0xFFFFFFFF
+ disk_key, _ = self.put(key)
+ type_disk_key = type(disk_key)
+
+ if type_disk_key is sqlite3.Binary:
+ return zlib.adler32(disk_key) & mask
+ elif type_disk_key is TextType:
+ return zlib.adler32(disk_key.encode('utf-8')) & mask # pylint: disable=no-member
+ elif type_disk_key in INT_TYPES:
+ return disk_key % mask
+ else:
+ assert type_disk_key is float
+ return zlib.adler32(struct.pack('!d', disk_key)) & mask
+
+
+ def put(self, key):
+ """Convert `key` to fields key and raw for Cache table.
+
+ :param key: key to convert
+ :return: (database key, raw boolean) pair
+
+ """
+ # pylint: disable=bad-continuation,unidiomatic-typecheck
+ type_key = type(key)
+
+ if type_key is BytesType:
+ return sqlite3.Binary(key), True
+ elif ((type_key is TextType)
+ or (type_key in INT_TYPES
+ and -9223372036854775808 <= key <= 9223372036854775807)
+ or (type_key is float)):
+ return key, True
+ else:
+ data = pickle.dumps(key, protocol=self.pickle_protocol)
+ result = pickletools.optimize(data)
+ return sqlite3.Binary(result), False
+
+
+ def get(self, key, raw):
+ """Convert fields `key` and `raw` from Cache table to key.
+
+ :param key: database key to convert
+ :param bool raw: flag indicating raw database storage
+ :return: corresponding Python key
+
+ """
+ # pylint: disable=no-self-use,unidiomatic-typecheck
+ if raw:
+ return BytesType(key) if type(key) is sqlite3.Binary else key
+ else:
+ return pickle.load(BytesIO(key))
+
+
+ def store(self, value, read, key=UNKNOWN):
+ """Convert `value` to fields size, mode, filename, and value for Cache
+ table.
+
+ :param value: value to convert
+ :param bool read: True when value is file-like object
+ :param key: key for item (default UNKNOWN)
+ :return: (size, mode, filename, value) tuple for Cache table
+
+ """
+ # pylint: disable=unidiomatic-typecheck
+ type_value = type(value)
+ min_file_size = self.min_file_size
+
+ if ((type_value is TextType and len(value) < min_file_size)
+ or (type_value in INT_TYPES
+ and -9223372036854775808 <= value <= 9223372036854775807)
+ or (type_value is float)):
+ return 0, MODE_RAW, None, value
+ elif type_value is BytesType:
+ if len(value) < min_file_size:
+ return 0, MODE_RAW, None, sqlite3.Binary(value)
+ else:
+ filename, full_path = self.filename(key, value)
+
+ with open(full_path, 'wb') as writer:
+ writer.write(value)
+
+ return len(value), MODE_BINARY, filename, None
+ elif type_value is TextType:
+ filename, full_path = self.filename(key, value)
+
+ with io_open(full_path, 'w', encoding='UTF-8') as writer:
+ writer.write(value)
+
+ size = op.getsize(full_path)
+ return size, MODE_TEXT, filename, None
+ elif read:
+ size = 0
+ reader = ft.partial(value.read, 2 ** 22)
+ filename, full_path = self.filename(key, value)
+
+ with open(full_path, 'wb') as writer:
+ for chunk in iter(reader, b''):
+ size += len(chunk)
+ writer.write(chunk)
+
+ return size, MODE_BINARY, filename, None
+ else:
+ result = pickle.dumps(value, protocol=self.pickle_protocol)
+
+ if len(result) < min_file_size:
+ return 0, MODE_PICKLE, None, sqlite3.Binary(result)
+ else:
+ filename, full_path = self.filename(key, value)
+
+ with open(full_path, 'wb') as writer:
+ writer.write(result)
+
+ return len(result), MODE_PICKLE, filename, None
+
+
+ def fetch(self, mode, filename, value, read):
+ """Convert fields `mode`, `filename`, and `value` from Cache table to
+ value.
+
+ :param int mode: value mode raw, binary, text, or pickle
+ :param str filename: filename of corresponding value
+ :param value: database value
+ :param bool read: when True, return an open file handle
+ :return: corresponding Python value
+
+ """
+ # pylint: disable=no-self-use,unidiomatic-typecheck
+ if mode == MODE_RAW:
+ return BytesType(value) if type(value) is sqlite3.Binary else value
+ elif mode == MODE_BINARY:
+ if read:
+ return open(op.join(self._directory, filename), 'rb')
+ else:
+ with open(op.join(self._directory, filename), 'rb') as reader:
+ return reader.read()
+ elif mode == MODE_TEXT:
+ full_path = op.join(self._directory, filename)
+ with io_open(full_path, 'r', encoding='UTF-8') as reader:
+ return reader.read()
+ elif mode == MODE_PICKLE:
+ if value is None:
+ with open(op.join(self._directory, filename), 'rb') as reader:
+ return pickle.load(reader)
+ else:
+ return pickle.load(BytesIO(value))
+
+
+ def filename(self, key=UNKNOWN, value=UNKNOWN):
+ """Return filename and full-path tuple for file storage.
+
+ Filename will be a randomly generated 28 character hexadecimal string
+ with ".val" suffixed. Two levels of sub-directories will be used to
+ reduce the size of directories. On older filesystems, lookups in
+ directories with many files may be slow.
+
+ The default implementation ignores the `key` and `value` parameters.
+
+ In some scenarios, for example :meth:`Cache.push
+ <diskcache.Cache.push>`, the `key` or `value` may not be known when the
+ item is stored in the cache.
+
+ :param key: key for item (default UNKNOWN)
+ :param value: value for item (default UNKNOWN)
+
+ """
+ # pylint: disable=unused-argument
+ hex_name = codecs.encode(os.urandom(16), 'hex').decode('utf-8')
+ sub_dir = op.join(hex_name[:2], hex_name[2:4])
+ name = hex_name[4:] + '.val'
+ directory = op.join(self._directory, sub_dir)
+
+ try:
+ os.makedirs(directory)
+ except OSError as error:
+ if error.errno != errno.EEXIST:
+ raise
+
+ filename = op.join(sub_dir, name)
+ full_path = op.join(self._directory, filename)
+ return filename, full_path
+
+
+ def remove(self, filename):
+ """Remove a file given by `filename`.
+
+ This method is cross-thread and cross-process safe. If an "error no
+ entry" occurs, it is suppressed.
+
+ :param str filename: relative path to file
+
+ """
+ full_path = op.join(self._directory, filename)
+
+ try:
+ os.remove(full_path)
+ except WindowsError:
+ pass
+ except OSError as error:
+ if error.errno != errno.ENOENT:
+ # ENOENT may occur if two caches attempt to delete the same
+ # file at the same time.
+ raise
+
+
+class JSONDisk(Disk):
+ "Cache key and value using JSON serialization with zlib compression."
+ def __init__(self, directory, compress_level=1, **kwargs):
+ """Initialize JSON disk instance.
+
+ Keys and values are compressed using the zlib library. The
+ `compress_level` is an integer from 0 to 9 controlling the level of
+ compression; 1 is fastest and produces the least compression, 9 is
+ slowest and produces the most compression, and 0 is no compression.
+
+ :param str directory: directory path
+ :param int compress_level: zlib compression level (default 1)
+ :param kwargs: super class arguments
+
+ """
+ self.compress_level = compress_level
+ super(JSONDisk, self).__init__(directory, **kwargs)
+
+
+ def put(self, key):
+ json_bytes = json.dumps(key).encode('utf-8')
+ data = zlib.compress(json_bytes, self.compress_level)
+ return super(JSONDisk, self).put(data)
+
+
+ def get(self, key, raw):
+ data = super(JSONDisk, self).get(key, raw)
+ return json.loads(zlib.decompress(data).decode('utf-8'))
+
+
+ def store(self, value, read, key=UNKNOWN):
+ if not read:
+ json_bytes = json.dumps(value).encode('utf-8')
+ value = zlib.compress(json_bytes, self.compress_level)
+ return super(JSONDisk, self).store(value, read, key=key)
+
+
+ def fetch(self, mode, filename, value, read):
+ data = super(JSONDisk, self).fetch(mode, filename, value, read)
+ if not read:
+ data = json.loads(zlib.decompress(data).decode('utf-8'))
+ return data
+
+
+class Timeout(Exception):
+ "Database timeout expired."
+
+
+class UnknownFileWarning(UserWarning):
+ "Warning used by Cache.check for unknown files."
+
+
+class EmptyDirWarning(UserWarning):
+ "Warning used by Cache.check for empty directories."
+
+
+def args_to_key(base, args, kwargs, typed):
+ """Create cache key out of function arguments.
+
+ :param tuple base: base of key
+ :param tuple args: function arguments
+ :param dict kwargs: function keyword arguments
+ :param bool typed: include types in cache key
+ :return: cache key tuple
+
+ """
+ key = base + args
+
+ if kwargs:
+ key += (ENOVAL,)
+ sorted_items = sorted(kwargs.items())
+
+ for item in sorted_items:
+ key += item
+
+ if typed:
+ key += tuple(type(arg) for arg in args)
+
+ if kwargs:
+ key += tuple(type(value) for _, value in sorted_items)
+
+ return key
+
+
+class Cache(object):
+ "Disk and file backed cache."
+ # pylint: disable=bad-continuation
+ def __init__(self, directory=None, timeout=60, disk=Disk, **settings):
+ """Initialize cache instance.
+
+ :param str directory: cache directory
+ :param float timeout: SQLite connection timeout
+ :param disk: Disk type or subclass for serialization
+ :param settings: any of DEFAULT_SETTINGS
+
+ """
+ try:
+ assert issubclass(disk, Disk)
+ except (TypeError, AssertionError):
+ raise ValueError('disk must subclass diskcache.Disk')
+
+ if directory is None:
+ directory = tempfile.mkdtemp(prefix='diskcache-')
+ directory = op.expanduser(directory)
+ directory = op.expandvars(directory)
+
+ self._directory = directory
+ self._timeout = 0 # Manually handle retries during initialization.
+ self._local = threading.local()
+ self._txn_id = None
+
+ if not op.isdir(directory):
+ try:
+ os.makedirs(directory, 0o755)
+ except OSError as error:
+ if error.errno != errno.EEXIST:
+ raise EnvironmentError(
+ error.errno,
+ 'Cache directory "%s" does not exist'
+ ' and could not be created' % self._directory
+ )
+
+ sql = self._sql_retry
+
+ # Setup Settings table.
+
+ try:
+ current_settings = dict(sql(
+ 'SELECT key, value FROM Settings'
+ ).fetchall())
+ except sqlite3.OperationalError:
+ current_settings = {}
+
+ sets = DEFAULT_SETTINGS.copy()
+ sets.update(current_settings)
+ sets.update(settings)
+
+ for key in METADATA:
+ sets.pop(key, None)
+
+ # Chance to set pragmas before any tables are created.
+
+ for key, value in sorted(sets.items()):
+ if key.startswith('sqlite_'):
+ self.reset(key, value, update=False)
+
+ sql('CREATE TABLE IF NOT EXISTS Settings ('
+ ' key TEXT NOT NULL UNIQUE,'
+ ' value)'
+ )
+
+ # Setup Disk object (must happen after settings initialized).
+
+ kwargs = {
+ key[5:]: value for key, value in sets.items()
+ if key.startswith('disk_')
+ }
+ self._disk = disk(directory, **kwargs)
+
+ # Set cached attributes: updates settings and sets pragmas.
+
+ for key, value in sets.items():
+ query = 'INSERT OR REPLACE INTO Settings VALUES (?, ?)'
+ sql(query, (key, value))
+ self.reset(key, value)
+
+ for key, value in METADATA.items():
+ query = 'INSERT OR IGNORE INTO Settings VALUES (?, ?)'
+ sql(query, (key, value))
+ self.reset(key)
+
+ (self._page_size,), = sql('PRAGMA page_size').fetchall()
+
+ # Setup Cache table.
+
+ sql('CREATE TABLE IF NOT EXISTS Cache ('
+ ' rowid INTEGER PRIMARY KEY,'
+ ' key BLOB,'
+ ' raw INTEGER,'
+ ' store_time REAL,'
+ ' expire_time REAL,'
+ ' access_time REAL,'
+ ' access_count INTEGER DEFAULT 0,'
+ ' tag BLOB,'
+ ' size INTEGER DEFAULT 0,'
+ ' mode INTEGER DEFAULT 0,'
+ ' filename TEXT,'
+ ' value BLOB)'
+ )
+
+ sql('CREATE UNIQUE INDEX IF NOT EXISTS Cache_key_raw ON'
+ ' Cache(key, raw)'
+ )
+
+ sql('CREATE INDEX IF NOT EXISTS Cache_expire_time ON'
+ ' Cache (expire_time)'
+ )
+
+ query = EVICTION_POLICY[self.eviction_policy]['init']
+
+ if query is not None:
+ sql(query)
+
+ # Use triggers to keep Metadata updated.
+
+ sql('CREATE TRIGGER IF NOT EXISTS Settings_count_insert'
+ ' AFTER INSERT ON Cache FOR EACH ROW BEGIN'
+ ' UPDATE Settings SET value = value + 1'
+ ' WHERE key = "count"; END'
+ )
+
+ sql('CREATE TRIGGER IF NOT EXISTS Settings_count_delete'
+ ' AFTER DELETE ON Cache FOR EACH ROW BEGIN'
+ ' UPDATE Settings SET value = value - 1'
+ ' WHERE key = "count"; END'
+ )
+
+ sql('CREATE TRIGGER IF NOT EXISTS Settings_size_insert'
+ ' AFTER INSERT ON Cache FOR EACH ROW BEGIN'
+ ' UPDATE Settings SET value = value + NEW.size'
+ ' WHERE key = "size"; END'
+ )
+
+ sql('CREATE TRIGGER IF NOT EXISTS Settings_size_update'
+ ' AFTER UPDATE ON Cache FOR EACH ROW BEGIN'
+ ' UPDATE Settings'
+ ' SET value = value + NEW.size - OLD.size'
+ ' WHERE key = "size"; END'
+ )
+
+ sql('CREATE TRIGGER IF NOT EXISTS Settings_size_delete'
+ ' AFTER DELETE ON Cache FOR EACH ROW BEGIN'
+ ' UPDATE Settings SET value = value - OLD.size'
+ ' WHERE key = "size"; END'
+ )
+
+ # Create tag index if requested.
+
+ if self.tag_index: # pylint: disable=no-member
+ self.create_tag_index()
+ else:
+ self.drop_tag_index()
+
+ # Close and re-open database connection with given timeout.
+
+ self.close()
+ self._timeout = timeout
+ self._sql # pylint: disable=pointless-statement
+
+
+ @property
+ def directory(self):
+ """Cache directory."""
+ return self._directory
+
+
+ @property
+ def timeout(self):
+ """SQLite connection timeout value in seconds."""
+ return self._timeout
+
+
+ @property
+ def disk(self):
+ """Disk used for serialization."""
+ return self._disk
+
+
+ @property
+ def _con(self):
+ # Check process ID to support process forking. If the process
+ # ID changes, close the connection and update the process ID.
+
+ local_pid = getattr(self._local, 'pid', None)
+ pid = os.getpid()
+
+ if local_pid != pid:
+ self.close()
+ self._local.pid = pid
+
+ con = getattr(self._local, 'con', None)
+
+ if con is None:
+ con = self._local.con = sqlite3.connect(
+ op.join(self._directory, DBNAME),
+ timeout=self._timeout,
+ isolation_level=None,
+ )
+
+ # Some SQLite pragmas work on a per-connection basis so
+ # query the Settings table and reset the pragmas. The
+ # Settings table may not exist so catch and ignore the
+ # OperationalError that may occur.
+
+ try:
+ select = 'SELECT key, value FROM Settings'
+ settings = con.execute(select).fetchall()
+ except sqlite3.OperationalError:
+ pass
+ else:
+ for key, value in settings:
+ if key.startswith('sqlite_'):
+ self.reset(key, value, update=False)
+
+ return con
+
+
+ @property
+ def _sql(self):
+ return self._con.execute
+
+
+ @property
+ def _sql_retry(self):
+ sql = self._sql
+
+ # 2018-11-01 GrantJ - Some SQLite builds/versions handle
+ # the SQLITE_BUSY return value and connection parameter
+ # "timeout" differently. For a more reliable duration,
+ # manually retry the statement for 60 seconds. Only used
+ # by statements which modify the database and do not use
+ # a transaction (like those in ``__init__`` or ``reset``).
+ # See Issue #85 for and tests/issue_85.py for more details.
+
+ def _execute_with_retry(statement, *args, **kwargs):
+ start = time.time()
+ while True:
+ try:
+ return sql(statement, *args, **kwargs)
+ except sqlite3.OperationalError as exc:
+ if str(exc) != 'database is locked':
+ raise
+ diff = time.time() - start
+ if diff > 60:
+ raise
+ time.sleep(0.001)
+
+ return _execute_with_retry
+
+
+ @cl.contextmanager
+ def transact(self, retry=False):
+ """Context manager to perform a transaction by locking the cache.
+
+ While the cache is locked, no other write operation is permitted.
+ Transactions should therefore be as short as possible. Read and write
+ operations performed in a transaction are atomic. Read operations may
+ occur concurrent to a transaction.
+
+ Transactions may be nested and may not be shared between threads.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ >>> cache = Cache()
+ >>> with cache.transact(): # Atomically increment two keys.
+ ... _ = cache.incr('total', 123.4)
+ ... _ = cache.incr('count', 1)
+ >>> with cache.transact(): # Atomically calculate average.
+ ... average = cache['total'] / cache['count']
+ >>> average
+ 123.4
+
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: context manager for use in `with` statement
+ :raises Timeout: if database timeout occurs
+
+ """
+ with self._transact(retry=retry):
+ yield
+
+
+ @cl.contextmanager
+ def _transact(self, retry=False, filename=None):
+ sql = self._sql
+ filenames = []
+ _disk_remove = self._disk.remove
+ tid = get_ident()
+ txn_id = self._txn_id
+
+ if tid == txn_id:
+ begin = False
+ else:
+ while True:
+ try:
+ sql('BEGIN IMMEDIATE')
+ begin = True
+ self._txn_id = tid
+ break
+ except sqlite3.OperationalError:
+ if retry:
+ continue
+ if filename is not None:
+ _disk_remove(filename)
+ raise Timeout
+
+ try:
+ yield sql, filenames.append
+ except BaseException:
+ if begin:
+ assert self._txn_id == tid
+ self._txn_id = None
+ sql('ROLLBACK')
+ raise
+ else:
+ if begin:
+ assert self._txn_id == tid
+ self._txn_id = None
+ sql('COMMIT')
+ for name in filenames:
+ if name is not None:
+ _disk_remove(name)
+
+
+ def set(self, key, value, expire=None, read=False, tag=None, retry=False):
+ """Set `key` and `value` item in cache.
+
+ When `read` is `True`, `value` should be a file-like object opened
+ for reading in binary mode.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ :param key: key for item
+ :param value: value for item
+ :param float expire: seconds until item expires
+ (default None, no expiry)
+ :param bool read: read value as bytes from file (default False)
+ :param str tag: text to associate with key (default None)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: True if item was set
+ :raises Timeout: if database timeout occurs
+
+ """
+ now = time.time()
+ db_key, raw = self._disk.put(key)
+ expire_time = None if expire is None else now + expire
+ size, mode, filename, db_value = self._disk.store(value, read, key=key)
+ columns = (expire_time, tag, size, mode, filename, db_value)
+
+ # The order of SELECT, UPDATE, and INSERT is important below.
+ #
+ # Typical cache usage pattern is:
+ #
+ # value = cache.get(key)
+ # if value is None:
+ # value = expensive_calculation()
+ # cache.set(key, value)
+ #
+ # Cache.get does not evict expired keys to avoid writes during lookups.
+ # Commonly used/expired keys will therefore remain in the cache making
+ # an UPDATE the preferred path.
+ #
+ # The alternative is to assume the key is not present by first trying
+ # to INSERT and then handling the IntegrityError that occurs from
+ # violating the UNIQUE constraint. This optimistic approach was
+ # rejected based on the common cache usage pattern.
+ #
+ # INSERT OR REPLACE aka UPSERT is not used because the old filename may
+ # need cleanup.
+
+ with self._transact(retry, filename) as (sql, cleanup):
+ rows = sql(
+ 'SELECT rowid, filename FROM Cache'
+ ' WHERE key = ? AND raw = ?',
+ (db_key, raw),
+ ).fetchall()
+
+ if rows:
+ (rowid, old_filename), = rows
+ cleanup(old_filename)
+ self._row_update(rowid, now, columns)
+ else:
+ self._row_insert(db_key, raw, now, columns)
+
+ self._cull(now, sql, cleanup)
+
+ return True
+
+
+ def __setitem__(self, key, value):
+ """Set corresponding `value` for `key` in cache.
+
+ :param key: key for item
+ :param value: value for item
+ :return: corresponding value
+ :raises KeyError: if key is not found
+
+ """
+ self.set(key, value, retry=True)
+
+
+ def _row_update(self, rowid, now, columns):
+ sql = self._sql
+ expire_time, tag, size, mode, filename, value = columns
+ sql('UPDATE Cache SET'
+ ' store_time = ?,'
+ ' expire_time = ?,'
+ ' access_time = ?,'
+ ' access_count = ?,'
+ ' tag = ?,'
+ ' size = ?,'
+ ' mode = ?,'
+ ' filename = ?,'
+ ' value = ?'
+ ' WHERE rowid = ?', (
+ now, # store_time
+ expire_time,
+ now, # access_time
+ 0, # access_count
+ tag,
+ size,
+ mode,
+ filename,
+ value,
+ rowid,
+ ),
+ )
+
+
+ def _row_insert(self, key, raw, now, columns):
+ sql = self._sql
+ expire_time, tag, size, mode, filename, value = columns
+ sql('INSERT INTO Cache('
+ ' key, raw, store_time, expire_time, access_time,'
+ ' access_count, tag, size, mode, filename, value'
+ ') VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', (
+ key,
+ raw,
+ now, # store_time
+ expire_time,
+ now, # access_time
+ 0, # access_count
+ tag,
+ size,
+ mode,
+ filename,
+ value,
+ ),
+ )
+
+
+ def _cull(self, now, sql, cleanup, limit=None):
+ cull_limit = self.cull_limit if limit is None else limit
+
+ if cull_limit == 0:
+ return
+
+ # Evict expired keys.
+
+ select_expired_template = (
+ 'SELECT %s FROM Cache'
+ ' WHERE expire_time IS NOT NULL AND expire_time < ?'
+ ' ORDER BY expire_time LIMIT ?'
+ )
+
+ select_expired = select_expired_template % 'filename'
+ rows = sql(select_expired, (now, cull_limit)).fetchall()
+
+ if rows:
+ delete_expired = (
+ 'DELETE FROM Cache WHERE rowid IN (%s)'
+ % (select_expired_template % 'rowid')
+ )
+ sql(delete_expired, (now, cull_limit))
+
+ for filename, in rows:
+ cleanup(filename)
+
+ cull_limit -= len(rows)
+
+ if cull_limit == 0:
+ return
+
+ # Evict keys by policy.
+
+ select_policy = EVICTION_POLICY[self.eviction_policy]['cull']
+
+ if select_policy is None or self.volume() < self.size_limit:
+ return
+
+ select_filename = select_policy.format(fields='filename', now=now)
+ rows = sql(select_filename, (cull_limit,)).fetchall()
+
+ if rows:
+ delete = (
+ 'DELETE FROM Cache WHERE rowid IN (%s)'
+ % (select_policy.format(fields='rowid', now=now))
+ )
+ sql(delete, (cull_limit,))
+
+ for filename, in rows:
+ cleanup(filename)
+
+
+ def touch(self, key, expire=None, retry=False):
+ """Touch `key` in cache and update `expire` time.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ :param key: key for item
+ :param float expire: seconds until item expires
+ (default None, no expiry)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: True if key was touched
+ :raises Timeout: if database timeout occurs
+
+ """
+ now = time.time()
+ db_key, raw = self._disk.put(key)
+ expire_time = None if expire is None else now + expire
+
+ with self._transact(retry) as (sql, _):
+ rows = sql(
+ 'SELECT rowid, expire_time FROM Cache'
+ ' WHERE key = ? AND raw = ?',
+ (db_key, raw),
+ ).fetchall()
+
+ if rows:
+ (rowid, old_expire_time), = rows
+
+ if old_expire_time is None or old_expire_time > now:
+ sql('UPDATE Cache SET expire_time = ? WHERE rowid = ?',
+ (expire_time, rowid),
+ )
+ return True
+
+ return False
+
+
+ def add(self, key, value, expire=None, read=False, tag=None, retry=False):
+ """Add `key` and `value` item to cache.
+
+ Similar to `set`, but only add to cache if key not present.
+
+ Operation is atomic. Only one concurrent add operation for a given key
+ will succeed.
+
+ When `read` is `True`, `value` should be a file-like object opened
+ for reading in binary mode.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ :param key: key for item
+ :param value: value for item
+ :param float expire: seconds until the key expires
+ (default None, no expiry)
+ :param bool read: read value as bytes from file (default False)
+ :param str tag: text to associate with key (default None)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: True if item was added
+ :raises Timeout: if database timeout occurs
+
+ """
+ now = time.time()
+ db_key, raw = self._disk.put(key)
+ expire_time = None if expire is None else now + expire
+ size, mode, filename, db_value = self._disk.store(value, read, key=key)
+ columns = (expire_time, tag, size, mode, filename, db_value)
+
+ with self._transact(retry, filename) as (sql, cleanup):
+ rows = sql(
+ 'SELECT rowid, filename, expire_time FROM Cache'
+ ' WHERE key = ? AND raw = ?',
+ (db_key, raw),
+ ).fetchall()
+
+ if rows:
+ (rowid, old_filename, old_expire_time), = rows
+
+ if old_expire_time is None or old_expire_time > now:
+ cleanup(filename)
+ return False
+
+ cleanup(old_filename)
+ self._row_update(rowid, now, columns)
+ else:
+ self._row_insert(db_key, raw, now, columns)
+
+ self._cull(now, sql, cleanup)
+
+ return True
+
+
+ def incr(self, key, delta=1, default=0, retry=False):
+ """Increment value by delta for item with key.
+
+ If key is missing and default is None then raise KeyError. Else if key
+ is missing and default is not None then use default for value.
+
+ Operation is atomic. All concurrent increment operations will be
+ counted individually.
+
+ Assumes value may be stored in a SQLite column. Most builds that target
+ machines with 64-bit pointer widths will support 64-bit signed
+ integers.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ :param key: key for item
+ :param int delta: amount to increment (default 1)
+ :param int default: value if key is missing (default 0)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: new value for item
+ :raises KeyError: if key is not found and default is None
+ :raises Timeout: if database timeout occurs
+
+ """
+ now = time.time()
+ db_key, raw = self._disk.put(key)
+ select = (
+ 'SELECT rowid, expire_time, filename, value FROM Cache'
+ ' WHERE key = ? AND raw = ?'
+ )
+
+ with self._transact(retry) as (sql, cleanup):
+ rows = sql(select, (db_key, raw)).fetchall()
+
+ if not rows:
+ if default is None:
+ raise KeyError(key)
+
+ value = default + delta
+ columns = (None, None) + self._disk.store(value, False, key=key)
+ self._row_insert(db_key, raw, now, columns)
+ self._cull(now, sql, cleanup)
+ return value
+
+ (rowid, expire_time, filename, value), = rows
+
+ if expire_time is not None and expire_time < now:
+ if default is None:
+ raise KeyError(key)
+
+ value = default + delta
+ columns = (None, None) + self._disk.store(value, False, key=key)
+ self._row_update(rowid, now, columns)
+ self._cull(now, sql, cleanup)
+ cleanup(filename)
+ return value
+
+ value += delta
+
+ columns = 'store_time = ?, value = ?'
+ update_column = EVICTION_POLICY[self.eviction_policy]['get']
+
+ if update_column is not None:
+ columns += ', ' + update_column.format(now=now)
+
+ update = 'UPDATE Cache SET %s WHERE rowid = ?' % columns
+ sql(update, (now, value, rowid))
+
+ return value
+
+
+ def decr(self, key, delta=1, default=0, retry=False):
+ """Decrement value by delta for item with key.
+
+ If key is missing and default is None then raise KeyError. Else if key
+ is missing and default is not None then use default for value.
+
+ Operation is atomic. All concurrent decrement operations will be
+ counted individually.
+
+ Unlike Memcached, negative values are supported. Value may be
+ decremented below zero.
+
+ Assumes value may be stored in a SQLite column. Most builds that target
+ machines with 64-bit pointer widths will support 64-bit signed
+ integers.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ :param key: key for item
+ :param int delta: amount to decrement (default 1)
+ :param int default: value if key is missing (default 0)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: new value for item
+ :raises KeyError: if key is not found and default is None
+ :raises Timeout: if database timeout occurs
+
+ """
+ return self.incr(key, -delta, default, retry)
+
+
+ def get(self, key, default=None, read=False, expire_time=False, tag=False,
+ retry=False):
+ """Retrieve value from cache. If `key` is missing, return `default`.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ :param key: key for item
+ :param default: value to return if key is missing (default None)
+ :param bool read: if True, return file handle to value
+ (default False)
+ :param bool expire_time: if True, return expire_time in tuple
+ (default False)
+ :param bool tag: if True, return tag in tuple (default False)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: value for item or default if key not found
+ :raises Timeout: if database timeout occurs
+
+ """
+ db_key, raw = self._disk.put(key)
+ update_column = EVICTION_POLICY[self.eviction_policy]['get']
+ select = (
+ 'SELECT rowid, expire_time, tag, mode, filename, value'
+ ' FROM Cache WHERE key = ? AND raw = ?'
+ ' AND (expire_time IS NULL OR expire_time > ?)'
+ )
+
+ if expire_time and tag:
+ default = (default, None, None)
+ elif expire_time or tag:
+ default = (default, None)
+
+ if not self.statistics and update_column is None:
+ # Fast path, no transaction necessary.
+
+ rows = self._sql(select, (db_key, raw, time.time())).fetchall()
+
+ if not rows:
+ return default
+
+ (rowid, db_expire_time, db_tag, mode, filename, db_value), = rows
+
+ try:
+ value = self._disk.fetch(mode, filename, db_value, read)
+ except IOError:
+ # Key was deleted before we could retrieve result.
+ return default
+
+ else: # Slow path, transaction required.
+ cache_hit = (
+ 'UPDATE Settings SET value = value + 1 WHERE key = "hits"'
+ )
+ cache_miss = (
+ 'UPDATE Settings SET value = value + 1 WHERE key = "misses"'
+ )
+
+ with self._transact(retry) as (sql, _):
+ rows = sql(select, (db_key, raw, time.time())).fetchall()
+
+ if not rows:
+ if self.statistics:
+ sql(cache_miss)
+ return default
+
+ (rowid, db_expire_time, db_tag,
+ mode, filename, db_value), = rows
+
+ try:
+ value = self._disk.fetch(mode, filename, db_value, read)
+ except IOError as error:
+ if error.errno == errno.ENOENT:
+ # Key was deleted before we could retrieve result.
+ if self.statistics:
+ sql(cache_miss)
+ return default
+ else:
+ raise
+
+ if self.statistics:
+ sql(cache_hit)
+
+ now = time.time()
+ update = 'UPDATE Cache SET %s WHERE rowid = ?'
+
+ if update_column is not None:
+ sql(update % update_column.format(now=now), (rowid,))
+
+ if expire_time and tag:
+ return (value, db_expire_time, db_tag)
+ elif expire_time:
+ return (value, db_expire_time)
+ elif tag:
+ return (value, db_tag)
+ else:
+ return value
+
+
+ def __getitem__(self, key):
+ """Return corresponding value for `key` from cache.
+
+ :param key: key matching item
+ :return: corresponding value
+ :raises KeyError: if key is not found
+
+ """
+ value = self.get(key, default=ENOVAL, retry=True)
+ if value is ENOVAL:
+ raise KeyError(key)
+ return value
+
+
+ def read(self, key, retry=False):
+ """Return file handle value corresponding to `key` from cache.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ :param key: key matching item
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: file open for reading in binary mode
+ :raises KeyError: if key is not found
+ :raises Timeout: if database timeout occurs
+
+ """
+ handle = self.get(key, default=ENOVAL, read=True, retry=retry)
+ if handle is ENOVAL:
+ raise KeyError(key)
+ return handle
+
+
+ def __contains__(self, key):
+ """Return `True` if `key` matching item is found in cache.
+
+ :param key: key matching item
+ :return: True if key matching item
+
+ """
+ sql = self._sql
+ db_key, raw = self._disk.put(key)
+ select = (
+ 'SELECT rowid FROM Cache'
+ ' WHERE key = ? AND raw = ?'
+ ' AND (expire_time IS NULL OR expire_time > ?)'
+ )
+
+ rows = sql(select, (db_key, raw, time.time())).fetchall()
+
+ return bool(rows)
+
+
+ def pop(self, key, default=None, expire_time=False, tag=False, retry=False):
+ """Remove corresponding item for `key` from cache and return value.
+
+ If `key` is missing, return `default`.
+
+ Operation is atomic. Concurrent operations will be serialized.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ :param key: key for item
+ :param default: value to return if key is missing (default None)
+ :param bool expire_time: if True, return expire_time in tuple
+ (default False)
+ :param bool tag: if True, return tag in tuple (default False)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: value for item or default if key not found
+ :raises Timeout: if database timeout occurs
+
+ """
+ db_key, raw = self._disk.put(key)
+ select = (
+ 'SELECT rowid, expire_time, tag, mode, filename, value'
+ ' FROM Cache WHERE key = ? AND raw = ?'
+ ' AND (expire_time IS NULL OR expire_time > ?)'
+ )
+
+ if expire_time and tag:
+ default = default, None, None
+ elif expire_time or tag:
+ default = default, None
+
+ with self._transact(retry) as (sql, _):
+ rows = sql(select, (db_key, raw, time.time())).fetchall()
+
+ if not rows:
+ return default
+
+ (rowid, db_expire_time, db_tag, mode, filename, db_value), = rows
+
+ sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
+
+ try:
+ value = self._disk.fetch(mode, filename, db_value, False)
+ except IOError as error:
+ if error.errno == errno.ENOENT:
+ # Key was deleted before we could retrieve result.
+ return default
+ else:
+ raise
+ finally:
+ if filename is not None:
+ self._disk.remove(filename)
+
+ if expire_time and tag:
+ return value, db_expire_time, db_tag
+ elif expire_time:
+ return value, db_expire_time
+ elif tag:
+ return value, db_tag
+ else:
+ return value
+
+
+ def __delitem__(self, key, retry=True):
+ """Delete corresponding item for `key` from cache.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default `True`).
+
+ :param key: key matching item
+ :param bool retry: retry if database timeout occurs (default True)
+ :raises KeyError: if key is not found
+ :raises Timeout: if database timeout occurs
+
+ """
+ db_key, raw = self._disk.put(key)
+
+ with self._transact(retry) as (sql, cleanup):
+ rows = sql(
+ 'SELECT rowid, filename FROM Cache'
+ ' WHERE key = ? AND raw = ?'
+ ' AND (expire_time IS NULL OR expire_time > ?)',
+ (db_key, raw, time.time()),
+ ).fetchall()
+
+ if not rows:
+ raise KeyError(key)
+
+ (rowid, filename), = rows
+ sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
+ cleanup(filename)
+
+ return True
+
+
+ def delete(self, key, retry=False):
+ """Delete corresponding item for `key` from cache.
+
+ Missing keys are ignored.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ :param key: key matching item
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: True if item was deleted
+ :raises Timeout: if database timeout occurs
+
+ """
+ try:
+ return self.__delitem__(key, retry=retry)
+ except KeyError:
+ return False
+
+
+ def push(self, value, prefix=None, side='back', expire=None, read=False,
+ tag=None, retry=False):
+ """Push `value` onto `side` of queue identified by `prefix` in cache.
+
+ When prefix is None, integer keys are used. Otherwise, string keys are
+ used in the format "prefix-integer". Integer starts at 500 trillion.
+
+ Defaults to pushing value on back of queue. Set side to 'front' to push
+ value on front of queue. Side must be one of 'back' or 'front'.
+
+ Operation is atomic. Concurrent operations will be serialized.
+
+ When `read` is `True`, `value` should be a file-like object opened
+ for reading in binary mode.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ See also `Cache.pull`.
+
+ >>> cache = Cache()
+ >>> print(cache.push('first value'))
+ 500000000000000
+ >>> cache.get(500000000000000)
+ 'first value'
+ >>> print(cache.push('second value'))
+ 500000000000001
+ >>> print(cache.push('third value', side='front'))
+ 499999999999999
+ >>> cache.push(1234, prefix='userids')
+ 'userids-500000000000000'
+
+ :param value: value for item
+ :param str prefix: key prefix (default None, key is integer)
+ :param str side: either 'back' or 'front' (default 'back')
+ :param float expire: seconds until the key expires
+ (default None, no expiry)
+ :param bool read: read value as bytes from file (default False)
+ :param str tag: text to associate with key (default None)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: key for item in cache
+ :raises Timeout: if database timeout occurs
+
+ """
+ if prefix is None:
+ min_key = 0
+ max_key = 999999999999999
+ else:
+ min_key = prefix + '-000000000000000'
+ max_key = prefix + '-999999999999999'
+
+ now = time.time()
+ raw = True
+ expire_time = None if expire is None else now + expire
+ size, mode, filename, db_value = self._disk.store(value, read)
+ columns = (expire_time, tag, size, mode, filename, db_value)
+ order = {'back': 'DESC', 'front': 'ASC'}
+ select = (
+ 'SELECT key FROM Cache'
+ ' WHERE ? < key AND key < ? AND raw = ?'
+ ' ORDER BY key %s LIMIT 1'
+ ) % order[side]
+
+ with self._transact(retry, filename) as (sql, cleanup):
+ rows = sql(select, (min_key, max_key, raw)).fetchall()
+
+ if rows:
+ (key,), = rows
+
+ if prefix is not None:
+ num = int(key[(key.rfind('-') + 1):])
+ else:
+ num = key
+
+ if side == 'back':
+ num += 1
+ else:
+ assert side == 'front'
+ num -= 1
+ else:
+ num = 500000000000000
+
+ if prefix is not None:
+ db_key = '{0}-{1:015d}'.format(prefix, num)
+ else:
+ db_key = num
+
+ self._row_insert(db_key, raw, now, columns)
+ self._cull(now, sql, cleanup)
+
+ return db_key
+
+
+ def pull(self, prefix=None, default=(None, None), side='front',
+ expire_time=False, tag=False, retry=False):
+ """Pull key and value item pair from `side` of queue in cache.
+
+ When prefix is None, integer keys are used. Otherwise, string keys are
+ used in the format "prefix-integer". Integer starts at 500 trillion.
+
+ If queue is empty, return default.
+
+ Defaults to pulling key and value item pairs from front of queue. Set
+ side to 'back' to pull from back of queue. Side must be one of 'front'
+ or 'back'.
+
+ Operation is atomic. Concurrent operations will be serialized.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ See also `Cache.push` and `Cache.get`.
+
+ >>> cache = Cache()
+ >>> cache.pull()
+ (None, None)
+ >>> for letter in 'abc':
+ ... print(cache.push(letter))
+ 500000000000000
+ 500000000000001
+ 500000000000002
+ >>> key, value = cache.pull()
+ >>> print(key)
+ 500000000000000
+ >>> value
+ 'a'
+ >>> _, value = cache.pull(side='back')
+ >>> value
+ 'c'
+ >>> cache.push(1234, 'userids')
+ 'userids-500000000000000'
+ >>> _, value = cache.pull('userids')
+ >>> value
+ 1234
+
+ :param str prefix: key prefix (default None, key is integer)
+ :param default: value to return if key is missing
+ (default (None, None))
+ :param str side: either 'front' or 'back' (default 'front')
+ :param bool expire_time: if True, return expire_time in tuple
+ (default False)
+ :param bool tag: if True, return tag in tuple (default False)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: key and value item pair or default if queue is empty
+ :raises Timeout: if database timeout occurs
+
+ """
+ # Caution: Nearly identical code exists in Cache.peek
+ if prefix is None:
+ min_key = 0
+ max_key = 999999999999999
+ else:
+ min_key = prefix + '-000000000000000'
+ max_key = prefix + '-999999999999999'
+
+ order = {'front': 'ASC', 'back': 'DESC'}
+ select = (
+ 'SELECT rowid, key, expire_time, tag, mode, filename, value'
+ ' FROM Cache WHERE ? < key AND key < ? AND raw = 1'
+ ' ORDER BY key %s LIMIT 1'
+ ) % order[side]
+
+ if expire_time and tag:
+ default = default, None, None
+ elif expire_time or tag:
+ default = default, None
+
+ while True:
+ while True:
+ with self._transact(retry) as (sql, cleanup):
+ rows = sql(select, (min_key, max_key)).fetchall()
+
+ if not rows:
+ return default
+
+ (rowid, key, db_expire, db_tag, mode, name,
+ db_value), = rows
+
+ sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
+
+ if db_expire is not None and db_expire < time.time():
+ cleanup(name)
+ else:
+ break
+
+ try:
+ value = self._disk.fetch(mode, name, db_value, False)
+ except IOError as error:
+ if error.errno == errno.ENOENT:
+ # Key was deleted before we could retrieve result.
+ continue
+ else:
+ raise
+ finally:
+ if name is not None:
+ self._disk.remove(name)
+ break
+
+ if expire_time and tag:
+ return (key, value), db_expire, db_tag
+ elif expire_time:
+ return (key, value), db_expire
+ elif tag:
+ return (key, value), db_tag
+ else:
+ return key, value
+
+
+ def peek(self, prefix=None, default=(None, None), side='front',
+ expire_time=False, tag=False, retry=False):
+ """Peek at key and value item pair from `side` of queue in cache.
+
+ When prefix is None, integer keys are used. Otherwise, string keys are
+ used in the format "prefix-integer". Integer starts at 500 trillion.
+
+ If queue is empty, return default.
+
+ Defaults to peeking at key and value item pairs from front of queue.
+ Set side to 'back' to pull from back of queue. Side must be one of
+ 'front' or 'back'.
+
+ Expired items are deleted from cache. Operation is atomic. Concurrent
+ operations will be serialized.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ See also `Cache.pull` and `Cache.push`.
+
+ >>> cache = Cache()
+ >>> for letter in 'abc':
+ ... print(cache.push(letter))
+ 500000000000000
+ 500000000000001
+ 500000000000002
+ >>> key, value = cache.peek()
+ >>> print(key)
+ 500000000000000
+ >>> value
+ 'a'
+ >>> key, value = cache.peek(side='back')
+ >>> print(key)
+ 500000000000002
+ >>> value
+ 'c'
+
+ :param str prefix: key prefix (default None, key is integer)
+ :param default: value to return if key is missing
+ (default (None, None))
+ :param str side: either 'front' or 'back' (default 'front')
+ :param bool expire_time: if True, return expire_time in tuple
+ (default False)
+ :param bool tag: if True, return tag in tuple (default False)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: key and value item pair or default if queue is empty
+ :raises Timeout: if database timeout occurs
+
+ """
+ # Caution: Nearly identical code exists in Cache.pull
+ if prefix is None:
+ min_key = 0
+ max_key = 999999999999999
+ else:
+ min_key = prefix + '-000000000000000'
+ max_key = prefix + '-999999999999999'
+
+ order = {'front': 'ASC', 'back': 'DESC'}
+ select = (
+ 'SELECT rowid, key, expire_time, tag, mode, filename, value'
+ ' FROM Cache WHERE ? < key AND key < ? AND raw = 1'
+ ' ORDER BY key %s LIMIT 1'
+ ) % order[side]
+
+ if expire_time and tag:
+ default = default, None, None
+ elif expire_time or tag:
+ default = default, None
+
+ while True:
+ while True:
+ with self._transact(retry) as (sql, cleanup):
+ rows = sql(select, (min_key, max_key)).fetchall()
+
+ if not rows:
+ return default
+
+ (rowid, key, db_expire, db_tag, mode, name,
+ db_value), = rows
+
+ if db_expire is not None and db_expire < time.time():
+ sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
+ cleanup(name)
+ else:
+ break
+
+ try:
+ value = self._disk.fetch(mode, name, db_value, False)
+ except IOError as error:
+ if error.errno == errno.ENOENT:
+ # Key was deleted before we could retrieve result.
+ continue
+ else:
+ raise
+ finally:
+ if name is not None:
+ self._disk.remove(name)
+ break
+
+ if expire_time and tag:
+ return (key, value), db_expire, db_tag
+ elif expire_time:
+ return (key, value), db_expire
+ elif tag:
+ return (key, value), db_tag
+ else:
+ return key, value
+
+
+ def peekitem(self, last=True, expire_time=False, tag=False, retry=False):
+ """Peek at key and value item pair in cache based on iteration order.
+
+ Expired items are deleted from cache. Operation is atomic. Concurrent
+ operations will be serialized.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ >>> cache = Cache()
+ >>> for num, letter in enumerate('abc'):
+ ... cache[letter] = num
+ >>> cache.peekitem()
+ ('c', 2)
+ >>> cache.peekitem(last=False)
+ ('a', 0)
+
+ :param bool last: last item in iteration order (default True)
+ :param bool expire_time: if True, return expire_time in tuple
+ (default False)
+ :param bool tag: if True, return tag in tuple (default False)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: key and value item pair
+ :raises KeyError: if cache is empty
+ :raises Timeout: if database timeout occurs
+
+ """
+ order = ('ASC', 'DESC')
+ select = (
+ 'SELECT rowid, key, raw, expire_time, tag, mode, filename, value'
+ ' FROM Cache ORDER BY rowid %s LIMIT 1'
+ ) % order[last]
+
+ while True:
+ while True:
+ with self._transact(retry) as (sql, cleanup):
+ rows = sql(select).fetchall()
+
+ if not rows:
+ raise KeyError('dictionary is empty')
+
+ (rowid, db_key, raw, db_expire, db_tag, mode, name,
+ db_value), = rows
+
+ if db_expire is not None and db_expire < time.time():
+ sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
+ cleanup(name)
+ else:
+ break
+
+ key = self._disk.get(db_key, raw)
+
+ try:
+ value = self._disk.fetch(mode, name, db_value, False)
+ except IOError as error:
+ if error.errno == errno.ENOENT:
+ # Key was deleted before we could retrieve result.
+ continue
+ else:
+ raise
+ break
+
+ if expire_time and tag:
+ return (key, value), db_expire, db_tag
+ elif expire_time:
+ return (key, value), db_expire
+ elif tag:
+ return (key, value), db_tag
+ else:
+ return key, value
+
+
+ def memoize(self, name=None, typed=False, expire=None, tag=None):
+ """Memoizing cache decorator.
+
+ Decorator to wrap callable with memoizing function using cache.
+ Repeated calls with the same arguments will lookup result in cache and
+ avoid function evaluation.
+
+ If name is set to None (default), the callable name will be determined
+ automatically.
+
+ When expire is set to zero, function results will not be set in the
+ cache. Cache lookups still occur, however. Read
+ :doc:`case-study-landing-page-caching` for example usage.
+
+ If typed is set to True, function arguments of different types will be
+ cached separately. For example, f(3) and f(3.0) will be treated as
+ distinct calls with distinct results.
+
+ The original underlying function is accessible through the __wrapped__
+ attribute. This is useful for introspection, for bypassing the cache,
+ or for rewrapping the function with a different cache.
+
+ >>> from diskcache import Cache
+ >>> cache = Cache()
+ >>> @cache.memoize(expire=1, tag='fib')
+ ... def fibonacci(number):
+ ... if number == 0:
+ ... return 0
+ ... elif number == 1:
+ ... return 1
+ ... else:
+ ... return fibonacci(number - 1) + fibonacci(number - 2)
+ >>> print(fibonacci(100))
+ 354224848179261915075
+
+ An additional `__cache_key__` attribute can be used to generate the
+ cache key used for the given arguments.
+
+ >>> key = fibonacci.__cache_key__(100)
+ >>> print(cache[key])
+ 354224848179261915075
+
+ Remember to call memoize when decorating a callable. If you forget,
+ then a TypeError will occur. Note the lack of parenthenses after
+ memoize below:
+
+ >>> @cache.memoize
+ ... def test():
+ ... pass
+ Traceback (most recent call last):
+ ...
+ TypeError: name cannot be callable
+
+ :param cache: cache to store callable arguments and return values
+ :param str name: name given for callable (default None, automatic)
+ :param bool typed: cache different types separately (default False)
+ :param float expire: seconds until arguments expire
+ (default None, no expiry)
+ :param str tag: text to associate with arguments (default None)
+ :return: callable decorator
+
+ """
+ # Caution: Nearly identical code exists in DjangoCache.memoize
+ if callable(name):
+ raise TypeError('name cannot be callable')
+
+ def decorator(func):
+ "Decorator created by memoize() for callable `func`."
+ base = (full_name(func),) if name is None else (name,)
+
+ @ft.wraps(func)
+ def wrapper(*args, **kwargs):
+ "Wrapper for callable to cache arguments and return values."
+ key = wrapper.__cache_key__(*args, **kwargs)
+ result = self.get(key, default=ENOVAL, retry=True)
+
+ if result is ENOVAL:
+ result = func(*args, **kwargs)
+ if expire is None or expire > 0:
+ self.set(key, result, expire, tag=tag, retry=True)
+
+ return result
+
+ def __cache_key__(*args, **kwargs):
+ "Make key for cache given function arguments."
+ return args_to_key(base, args, kwargs, typed)
+
+ wrapper.__cache_key__ = __cache_key__
+ return wrapper
+
+ return decorator
+
+
+ def check(self, fix=False, retry=False):
+ """Check database and file system consistency.
+
+ Intended for use in testing and post-mortem error analysis.
+
+ While checking the Cache table for consistency, a writer lock is held
+ on the database. The lock blocks other cache clients from writing to
+ the database. For caches with many file references, the lock may be
+ held for a long time. For example, local benchmarking shows that a
+ cache with 1,000 file references takes ~60ms to check.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ :param bool fix: correct inconsistencies
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: list of warnings
+ :raises Timeout: if database timeout occurs
+
+ """
+ # pylint: disable=access-member-before-definition,W0201
+ with warnings.catch_warnings(record=True) as warns:
+ sql = self._sql
+
+ # Check integrity of database.
+
+ rows = sql('PRAGMA integrity_check').fetchall()
+
+ if len(rows) != 1 or rows[0][0] != u'ok':
+ for message, in rows:
+ warnings.warn(message)
+
+ if fix:
+ sql('VACUUM')
+
+ with self._transact(retry) as (sql, _):
+
+ # Check Cache.filename against file system.
+
+ filenames = set()
+ select = (
+ 'SELECT rowid, size, filename FROM Cache'
+ ' WHERE filename IS NOT NULL'
+ )
+
+ rows = sql(select).fetchall()
+
+ for rowid, size, filename in rows:
+ full_path = op.join(self._directory, filename)
+ filenames.add(full_path)
+
+ if op.exists(full_path):
+ real_size = op.getsize(full_path)
+
+ if size != real_size:
+ message = 'wrong file size: %s, %d != %d'
+ args = full_path, real_size, size
+ warnings.warn(message % args)
+
+ if fix:
+ sql('UPDATE Cache SET size = ?'
+ ' WHERE rowid = ?',
+ (real_size, rowid),
+ )
+
+ continue
+
+ warnings.warn('file not found: %s' % full_path)
+
+ if fix:
+ sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
+
+ # Check file system against Cache.filename.
+
+ for dirpath, _, files in os.walk(self._directory):
+ paths = [op.join(dirpath, filename) for filename in files]
+ error = set(paths) - filenames
+
+ for full_path in error:
+ if DBNAME in full_path:
+ continue
+
+ message = 'unknown file: %s' % full_path
+ warnings.warn(message, UnknownFileWarning)
+
+ if fix:
+ os.remove(full_path)
+
+ # Check for empty directories.
+
+ for dirpath, dirs, files in os.walk(self._directory):
+ if not (dirs or files):
+ message = 'empty directory: %s' % dirpath
+ warnings.warn(message, EmptyDirWarning)
+
+ if fix:
+ os.rmdir(dirpath)
+
+ # Check Settings.count against count of Cache rows.
+
+ self.reset('count')
+ (count,), = sql('SELECT COUNT(key) FROM Cache').fetchall()
+
+ if self.count != count:
+ message = 'Settings.count != COUNT(Cache.key); %d != %d'
+ warnings.warn(message % (self.count, count))
+
+ if fix:
+ sql('UPDATE Settings SET value = ? WHERE key = ?',
+ (count, 'count'),
+ )
+
+ # Check Settings.size against sum of Cache.size column.
+
+ self.reset('size')
+ select_size = 'SELECT COALESCE(SUM(size), 0) FROM Cache'
+ (size,), = sql(select_size).fetchall()
+
+ if self.size != size:
+ message = 'Settings.size != SUM(Cache.size); %d != %d'
+ warnings.warn(message % (self.size, size))
+
+ if fix:
+ sql('UPDATE Settings SET value = ? WHERE key =?',
+ (size, 'size'),
+ )
+
+ return warns
+
+
+ def create_tag_index(self):
+ """Create tag index on cache database.
+
+ It is better to initialize cache with `tag_index=True` than use this.
+
+ :raises Timeout: if database timeout occurs
+
+ """
+ sql = self._sql
+ sql('CREATE INDEX IF NOT EXISTS Cache_tag_rowid ON Cache(tag, rowid)')
+ self.reset('tag_index', 1)
+
+
+ def drop_tag_index(self):
+ """Drop tag index on cache database.
+
+ :raises Timeout: if database timeout occurs
+
+ """
+ sql = self._sql
+ sql('DROP INDEX IF EXISTS Cache_tag_rowid')
+ self.reset('tag_index', 0)
+
+
+ def evict(self, tag, retry=False):
+ """Remove items with matching `tag` from cache.
+
+ Removing items is an iterative process. In each iteration, a subset of
+ items is removed. Concurrent writes may occur between iterations.
+
+ If a :exc:`Timeout` occurs, the first element of the exception's
+ `args` attribute will be the number of items removed before the
+ exception occurred.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ :param str tag: tag identifying items
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: count of rows removed
+ :raises Timeout: if database timeout occurs
+
+ """
+ select = (
+ 'SELECT rowid, filename FROM Cache'
+ ' WHERE tag = ? AND rowid > ?'
+ ' ORDER BY rowid LIMIT ?'
+ )
+ args = [tag, 0, 100]
+ return self._select_delete(select, args, arg_index=1, retry=retry)
+
+
+ def expire(self, now=None, retry=False):
+ """Remove expired items from cache.
+
+ Removing items is an iterative process. In each iteration, a subset of
+ items is removed. Concurrent writes may occur between iterations.
+
+ If a :exc:`Timeout` occurs, the first element of the exception's
+ `args` attribute will be the number of items removed before the
+ exception occurred.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ :param float now: current time (default None, ``time.time()`` used)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: count of items removed
+ :raises Timeout: if database timeout occurs
+
+ """
+ select = (
+ 'SELECT rowid, expire_time, filename FROM Cache'
+ ' WHERE ? < expire_time AND expire_time < ?'
+ ' ORDER BY expire_time LIMIT ?'
+ )
+ args = [0, now or time.time(), 100]
+ return self._select_delete(select, args, row_index=1, retry=retry)
+
+
+ def cull(self, retry=False):
+ """Cull items from cache until volume is less than size limit.
+
+ Removing items is an iterative process. In each iteration, a subset of
+ items is removed. Concurrent writes may occur between iterations.
+
+ If a :exc:`Timeout` occurs, the first element of the exception's
+ `args` attribute will be the number of items removed before the
+ exception occurred.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: count of items removed
+ :raises Timeout: if database timeout occurs
+
+ """
+ now = time.time()
+
+ # Remove expired items.
+
+ count = self.expire(now)
+
+ # Remove items by policy.
+
+ select_policy = EVICTION_POLICY[self.eviction_policy]['cull']
+
+ if select_policy is None:
+ return
+
+ select_filename = select_policy.format(fields='filename', now=now)
+
+ try:
+ while self.volume() > self.size_limit:
+ with self._transact(retry) as (sql, cleanup):
+ rows = sql(select_filename, (10,)).fetchall()
+
+ if not rows:
+ break
+
+ count += len(rows)
+ delete = (
+ 'DELETE FROM Cache WHERE rowid IN (%s)'
+ % select_policy.format(fields='rowid', now=now)
+ )
+ sql(delete, (10,))
+
+ for filename, in rows:
+ cleanup(filename)
+ except Timeout:
+ raise Timeout(count)
+
+ return count
+
+
+ def clear(self, retry=False):
+ """Remove all items from cache.
+
+ Removing items is an iterative process. In each iteration, a subset of
+ items is removed. Concurrent writes may occur between iterations.
+
+ If a :exc:`Timeout` occurs, the first element of the exception's
+ `args` attribute will be the number of items removed before the
+ exception occurred.
+
+ Raises :exc:`Timeout` error when database timeout occurs and `retry` is
+ `False` (default).
+
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: count of rows removed
+ :raises Timeout: if database timeout occurs
+
+ """
+ select = (
+ 'SELECT rowid, filename FROM Cache'
+ ' WHERE rowid > ?'
+ ' ORDER BY rowid LIMIT ?'
+ )
+ args = [0, 100]
+ return self._select_delete(select, args, retry=retry)
+
+
+ def _select_delete(self, select, args, row_index=0, arg_index=0,
+ retry=False):
+ count = 0
+ delete = 'DELETE FROM Cache WHERE rowid IN (%s)'
+
+ try:
+ while True:
+ with self._transact(retry) as (sql, cleanup):
+ rows = sql(select, args).fetchall()
+
+ if not rows:
+ break
+
+ count += len(rows)
+ sql(delete % ','.join(str(row[0]) for row in rows))
+
+ for row in rows:
+ args[arg_index] = row[row_index]
+ cleanup(row[-1])
+
+ except Timeout:
+ raise Timeout(count)
+
+ return count
+
+
+ def iterkeys(self, reverse=False):
+ """Iterate Cache keys in database sort order.
+
+ >>> cache = Cache()
+ >>> for key in [4, 1, 3, 0, 2]:
+ ... cache[key] = key
+ >>> list(cache.iterkeys())
+ [0, 1, 2, 3, 4]
+ >>> list(cache.iterkeys(reverse=True))
+ [4, 3, 2, 1, 0]
+
+ :param bool reverse: reverse sort order (default False)
+ :return: iterator of Cache keys
+
+ """
+ sql = self._sql
+ limit = 100
+ _disk_get = self._disk.get
+
+ if reverse:
+ select = (
+ 'SELECT key, raw FROM Cache'
+ ' ORDER BY key DESC, raw DESC LIMIT 1'
+ )
+ iterate = (
+ 'SELECT key, raw FROM Cache'
+ ' WHERE key = ? AND raw < ? OR key < ?'
+ ' ORDER BY key DESC, raw DESC LIMIT ?'
+ )
+ else:
+ select = (
+ 'SELECT key, raw FROM Cache'
+ ' ORDER BY key ASC, raw ASC LIMIT 1'
+ )
+ iterate = (
+ 'SELECT key, raw FROM Cache'
+ ' WHERE key = ? AND raw > ? OR key > ?'
+ ' ORDER BY key ASC, raw ASC LIMIT ?'
+ )
+
+ row = sql(select).fetchall()
+
+ if row:
+ (key, raw), = row
+ else:
+ return
+
+ yield _disk_get(key, raw)
+
+ while True:
+ rows = sql(iterate, (key, raw, key, limit)).fetchall()
+
+ if not rows:
+ break
+
+ for key, raw in rows:
+ yield _disk_get(key, raw)
+
+
+ def _iter(self, ascending=True):
+ sql = self._sql
+ rows = sql('SELECT MAX(rowid) FROM Cache').fetchall()
+ (max_rowid,), = rows
+ yield # Signal ready.
+
+ if max_rowid is None:
+ return
+
+ bound = max_rowid + 1
+ limit = 100
+ _disk_get = self._disk.get
+ rowid = 0 if ascending else bound
+ select = (
+ 'SELECT rowid, key, raw FROM Cache'
+ ' WHERE ? < rowid AND rowid < ?'
+ ' ORDER BY rowid %s LIMIT ?'
+ ) % ('ASC' if ascending else 'DESC')
+
+ while True:
+ if ascending:
+ args = (rowid, bound, limit)
+ else:
+ args = (0, rowid, limit)
+
+ rows = sql(select, args).fetchall()
+
+ if not rows:
+ break
+
+ for rowid, key, raw in rows:
+ yield _disk_get(key, raw)
+
+
+ def __iter__(self):
+ "Iterate keys in cache including expired items."
+ iterator = self._iter()
+ next(iterator)
+ return iterator
+
+
+ def __reversed__(self):
+ "Reverse iterate keys in cache including expired items."
+ iterator = self._iter(ascending=False)
+ next(iterator)
+ return iterator
+
+
+ def stats(self, enable=True, reset=False):
+ """Return cache statistics hits and misses.
+
+ :param bool enable: enable collecting statistics (default True)
+ :param bool reset: reset hits and misses to 0 (default False)
+ :return: (hits, misses)
+
+ """
+ # pylint: disable=E0203,W0201
+ result = (self.reset('hits'), self.reset('misses'))
+
+ if reset:
+ self.reset('hits', 0)
+ self.reset('misses', 0)
+
+ self.reset('statistics', enable)
+
+ return result
+
+
+ def volume(self):
+ """Return estimated total size of cache on disk.
+
+ :return: size in bytes
+
+ """
+ (page_count,), = self._sql('PRAGMA page_count').fetchall()
+ total_size = self._page_size * page_count + self.reset('size')
+ return total_size
+
+
+ def close(self):
+ """Close database connection.
+
+ """
+ con = getattr(self._local, 'con', None)
+
+ if con is None:
+ return
+
+ con.close()
+
+ try:
+ delattr(self._local, 'con')
+ except AttributeError:
+ pass
+
+
+ def __enter__(self):
+ # Create connection in thread.
+ connection = self._con # pylint: disable=unused-variable
+ return self
+
+
+ def __exit__(self, *exception):
+ self.close()
+
+
+ def __len__(self):
+ "Count of items in cache including expired items."
+ return self.reset('count')
+
+
+ def __getstate__(self):
+ return (self.directory, self.timeout, type(self.disk))
+
+
+ def __setstate__(self, state):
+ self.__init__(*state)
+
+
+ def reset(self, key, value=ENOVAL, update=True):
+ """Reset `key` and `value` item from Settings table.
+
+ Use `reset` to update the value of Cache settings correctly. Cache
+ settings are stored in the Settings table of the SQLite database. If
+ `update` is ``False`` then no attempt is made to update the database.
+
+ If `value` is not given, it is reloaded from the Settings
+ table. Otherwise, the Settings table is updated.
+
+ Settings with the ``disk_`` prefix correspond to Disk
+ attributes. Updating the value will change the unprefixed attribute on
+ the associated Disk instance.
+
+ Settings with the ``sqlite_`` prefix correspond to SQLite
+ pragmas. Updating the value will execute the corresponding PRAGMA
+ statement.
+
+ SQLite PRAGMA statements may be executed before the Settings table
+ exists in the database by setting `update` to ``False``.
+
+ :param str key: Settings key for item
+ :param value: value for item (optional)
+ :param bool update: update database Settings table (default True)
+ :return: updated value for item
+ :raises Timeout: if database timeout occurs
+
+ """
+ sql = self._sql
+ sql_retry = self._sql_retry
+
+ if value is ENOVAL:
+ select = 'SELECT value FROM Settings WHERE key = ?'
+ (value,), = sql_retry(select, (key,)).fetchall()
+ setattr(self, key, value)
+ return value
+
+ if update:
+ statement = 'UPDATE Settings SET value = ? WHERE key = ?'
+ sql_retry(statement, (value, key))
+
+ if key.startswith('sqlite_'):
+ pragma = key[7:]
+
+ # 2016-02-17 GrantJ - PRAGMA and isolation_level=None
+ # don't always play nicely together. Retry setting the
+ # PRAGMA. I think some PRAGMA statements expect to
+ # immediately take an EXCLUSIVE lock on the database. I
+ # can't find any documentation for this but without the
+ # retry, stress will intermittently fail with multiple
+ # processes.
+
+ # 2018-11-05 GrantJ - Avoid setting pragma values that
+ # are already set. Pragma settings like auto_vacuum and
+ # journal_mode can take a long time or may not work after
+ # tables have been created.
+
+ start = time.time()
+ while True:
+ try:
+ try:
+ (old_value,), = sql('PRAGMA %s' % (pragma)).fetchall()
+ update = old_value != value
+ except ValueError:
+ update = True
+ if update:
+ sql('PRAGMA %s = %s' % (pragma, value)).fetchall()
+ break
+ except sqlite3.OperationalError as exc:
+ if str(exc) != 'database is locked':
+ raise
+ diff = time.time() - start
+ if diff > 60:
+ raise
+ time.sleep(0.001)
+ elif key.startswith('disk_'):
+ attr = key[5:]
+ setattr(self._disk, attr, value)
+
+ setattr(self, key, value)
+ return value
diff --git a/third_party/python/diskcache/diskcache/djangocache.py b/third_party/python/diskcache/diskcache/djangocache.py
new file mode 100644
index 0000000000..997b852406
--- /dev/null
+++ b/third_party/python/diskcache/diskcache/djangocache.py
@@ -0,0 +1,433 @@
+"Django-compatible disk and file backed cache."
+
+from functools import wraps
+from django.core.cache.backends.base import BaseCache
+
+try:
+ from django.core.cache.backends.base import DEFAULT_TIMEOUT
+except ImportError:
+ # For older versions of Django simply use 300 seconds.
+ DEFAULT_TIMEOUT = 300
+
+from .core import ENOVAL, args_to_key, full_name
+from .fanout import FanoutCache
+
+
+class DjangoCache(BaseCache):
+ "Django-compatible disk and file backed cache."
+ def __init__(self, directory, params):
+ """Initialize DjangoCache instance.
+
+ :param str directory: cache directory
+ :param dict params: cache parameters
+
+ """
+ super(DjangoCache, self).__init__(params)
+ shards = params.get('SHARDS', 8)
+ timeout = params.get('DATABASE_TIMEOUT', 0.010)
+ options = params.get('OPTIONS', {})
+ self._cache = FanoutCache(directory, shards, timeout, **options)
+
+
+ @property
+ def directory(self):
+ """Cache directory."""
+ return self._cache.directory
+
+
+ def cache(self, name):
+ """Return Cache with given `name` in subdirectory.
+
+ :param str name: subdirectory name for Cache
+ :return: Cache with given name
+
+ """
+ return self._cache.cache(name)
+
+
+ def deque(self, name):
+ """Return Deque with given `name` in subdirectory.
+
+ :param str name: subdirectory name for Deque
+ :return: Deque with given name
+
+ """
+ return self._cache.deque(name)
+
+
+ def index(self, name):
+ """Return Index with given `name` in subdirectory.
+
+ :param str name: subdirectory name for Index
+ :return: Index with given name
+
+ """
+ return self._cache.index(name)
+
+
+ def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None,
+ read=False, tag=None, retry=True):
+ """Set a value in the cache if the key does not already exist. If
+ timeout is given, that timeout will be used for the key; otherwise the
+ default cache timeout will be used.
+
+ Return True if the value was stored, False otherwise.
+
+ :param key: key for item
+ :param value: value for item
+ :param float timeout: seconds until the item expires
+ (default 300 seconds)
+ :param int version: key version number (default None, cache parameter)
+ :param bool read: read value as bytes from file (default False)
+ :param str tag: text to associate with key (default None)
+ :param bool retry: retry if database timeout occurs (default True)
+ :return: True if item was added
+
+ """
+ # pylint: disable=arguments-differ
+ key = self.make_key(key, version=version)
+ timeout = self.get_backend_timeout(timeout=timeout)
+ return self._cache.add(key, value, timeout, read, tag, retry)
+
+
+ def get(self, key, default=None, version=None, read=False,
+ expire_time=False, tag=False, retry=False):
+ """Fetch a given key from the cache. If the key does not exist, return
+ default, which itself defaults to None.
+
+ :param key: key for item
+ :param default: return value if key is missing (default None)
+ :param int version: key version number (default None, cache parameter)
+ :param bool read: if True, return file handle to value
+ (default False)
+ :param float expire_time: if True, return expire_time in tuple
+ (default False)
+ :param tag: if True, return tag in tuple (default False)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: value for item if key is found else default
+
+ """
+ # pylint: disable=arguments-differ
+ key = self.make_key(key, version=version)
+ return self._cache.get(key, default, read, expire_time, tag, retry)
+
+
+ def read(self, key, version=None):
+ """Return file handle corresponding to `key` from Cache.
+
+ :param key: Python key to retrieve
+ :param int version: key version number (default None, cache parameter)
+ :return: file open for reading in binary mode
+ :raises KeyError: if key is not found
+
+ """
+ key = self.make_key(key, version=version)
+ return self._cache.read(key)
+
+
+ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None,
+ read=False, tag=None, retry=True):
+ """Set a value in the cache. If timeout is given, that timeout will be
+ used for the key; otherwise the default cache timeout will be used.
+
+ :param key: key for item
+ :param value: value for item
+ :param float timeout: seconds until the item expires
+ (default 300 seconds)
+ :param int version: key version number (default None, cache parameter)
+ :param bool read: read value as bytes from file (default False)
+ :param str tag: text to associate with key (default None)
+ :param bool retry: retry if database timeout occurs (default True)
+ :return: True if item was set
+
+ """
+ # pylint: disable=arguments-differ
+ key = self.make_key(key, version=version)
+ timeout = self.get_backend_timeout(timeout=timeout)
+ return self._cache.set(key, value, timeout, read, tag, retry)
+
+
+ def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None, retry=True):
+ """Touch a key in the cache. If timeout is given, that timeout will be
+ used for the key; otherwise the default cache timeout will be used.
+
+ :param key: key for item
+ :param float timeout: seconds until the item expires
+ (default 300 seconds)
+ :param int version: key version number (default None, cache parameter)
+ :param bool retry: retry if database timeout occurs (default True)
+ :return: True if key was touched
+
+ """
+ # pylint: disable=arguments-differ
+ key = self.make_key(key, version=version)
+ timeout = self.get_backend_timeout(timeout=timeout)
+ return self._cache.touch(key, timeout, retry)
+
+
+ def pop(self, key, default=None, version=None, expire_time=False,
+ tag=False, retry=True):
+ """Remove corresponding item for `key` from cache and return value.
+
+ If `key` is missing, return `default`.
+
+ Operation is atomic. Concurrent operations will be serialized.
+
+ :param key: key for item
+ :param default: return value if key is missing (default None)
+ :param int version: key version number (default None, cache parameter)
+ :param float expire_time: if True, return expire_time in tuple
+ (default False)
+ :param tag: if True, return tag in tuple (default False)
+ :param bool retry: retry if database timeout occurs (default True)
+ :return: value for item if key is found else default
+
+ """
+ key = self.make_key(key, version=version)
+ return self._cache.pop(key, default, expire_time, tag, retry)
+
+
+ def delete(self, key, version=None, retry=True):
+ """Delete a key from the cache, failing silently.
+
+ :param key: key for item
+ :param int version: key version number (default None, cache parameter)
+ :param bool retry: retry if database timeout occurs (default True)
+ :return: True if item was deleted
+
+ """
+ # pylint: disable=arguments-differ
+ key = self.make_key(key, version=version)
+ self._cache.delete(key, retry)
+
+
+ def incr(self, key, delta=1, version=None, default=None, retry=True):
+ """Increment value by delta for item with key.
+
+ If key is missing and default is None then raise KeyError. Else if key
+ is missing and default is not None then use default for value.
+
+ Operation is atomic. All concurrent increment operations will be
+ counted individually.
+
+ Assumes value may be stored in a SQLite column. Most builds that target
+ machines with 64-bit pointer widths will support 64-bit signed
+ integers.
+
+ :param key: key for item
+ :param int delta: amount to increment (default 1)
+ :param int version: key version number (default None, cache parameter)
+ :param int default: value if key is missing (default None)
+ :param bool retry: retry if database timeout occurs (default True)
+ :return: new value for item on success else None
+ :raises ValueError: if key is not found and default is None
+
+ """
+ # pylint: disable=arguments-differ
+ key = self.make_key(key, version=version)
+ try:
+ return self._cache.incr(key, delta, default, retry)
+ except KeyError:
+ raise ValueError("Key '%s' not found" % key)
+
+
+ def decr(self, key, delta=1, version=None, default=None, retry=True):
+ """Decrement value by delta for item with key.
+
+ If key is missing and default is None then raise KeyError. Else if key
+ is missing and default is not None then use default for value.
+
+ Operation is atomic. All concurrent decrement operations will be
+ counted individually.
+
+ Unlike Memcached, negative values are supported. Value may be
+ decremented below zero.
+
+ Assumes value may be stored in a SQLite column. Most builds that target
+ machines with 64-bit pointer widths will support 64-bit signed
+ integers.
+
+ :param key: key for item
+ :param int delta: amount to decrement (default 1)
+ :param int version: key version number (default None, cache parameter)
+ :param int default: value if key is missing (default None)
+ :param bool retry: retry if database timeout occurs (default True)
+ :return: new value for item on success else None
+ :raises ValueError: if key is not found and default is None
+
+ """
+ # pylint: disable=arguments-differ
+ return self.incr(key, -delta, version, default, retry)
+
+
+ def has_key(self, key, version=None):
+ """Returns True if the key is in the cache and has not expired.
+
+ :param key: key for item
+ :param int version: key version number (default None, cache parameter)
+ :return: True if key is found
+
+ """
+ key = self.make_key(key, version=version)
+ return key in self._cache
+
+
+ def expire(self):
+ """Remove expired items from cache.
+
+ :return: count of items removed
+
+ """
+ return self._cache.expire()
+
+
+ def stats(self, enable=True, reset=False):
+ """Return cache statistics hits and misses.
+
+ :param bool enable: enable collecting statistics (default True)
+ :param bool reset: reset hits and misses to 0 (default False)
+ :return: (hits, misses)
+
+ """
+ return self._cache.stats(enable=enable, reset=reset)
+
+
+ def create_tag_index(self):
+ """Create tag index on cache database.
+
+ Better to initialize cache with `tag_index=True` than use this.
+
+ :raises Timeout: if database timeout occurs
+
+ """
+ self._cache.create_tag_index()
+
+
+ def drop_tag_index(self):
+ """Drop tag index on cache database.
+
+ :raises Timeout: if database timeout occurs
+
+ """
+ self._cache.drop_tag_index()
+
+
+ def evict(self, tag):
+ """Remove items with matching `tag` from cache.
+
+ :param str tag: tag identifying items
+ :return: count of items removed
+
+ """
+ return self._cache.evict(tag)
+
+
+ def cull(self):
+ """Cull items from cache until volume is less than size limit.
+
+ :return: count of items removed
+
+ """
+ return self._cache.cull()
+
+
+ def clear(self):
+ "Remove *all* values from the cache at once."
+ return self._cache.clear()
+
+
+ def close(self, **kwargs):
+ "Close the cache connection."
+ # pylint: disable=unused-argument
+ self._cache.close()
+
+
+ def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT):
+ """Return seconds to expiration.
+
+ :param float timeout: seconds until the item expires
+ (default 300 seconds)
+
+ """
+ if timeout == DEFAULT_TIMEOUT:
+ timeout = self.default_timeout
+ elif timeout == 0:
+ # ticket 21147 - avoid time.time() related precision issues
+ timeout = -1
+ return None if timeout is None else timeout
+
+
+ def memoize(self, name=None, timeout=DEFAULT_TIMEOUT, version=None,
+ typed=False, tag=None):
+ """Memoizing cache decorator.
+
+ Decorator to wrap callable with memoizing function using cache.
+ Repeated calls with the same arguments will lookup result in cache and
+ avoid function evaluation.
+
+ If name is set to None (default), the callable name will be determined
+ automatically.
+
+ When timeout is set to zero, function results will not be set in the
+ cache. Cache lookups still occur, however. Read
+ :doc:`case-study-landing-page-caching` for example usage.
+
+ If typed is set to True, function arguments of different types will be
+ cached separately. For example, f(3) and f(3.0) will be treated as
+ distinct calls with distinct results.
+
+ The original underlying function is accessible through the __wrapped__
+ attribute. This is useful for introspection, for bypassing the cache,
+ or for rewrapping the function with a different cache.
+
+ An additional `__cache_key__` attribute can be used to generate the
+ cache key used for the given arguments.
+
+ Remember to call memoize when decorating a callable. If you forget,
+ then a TypeError will occur.
+
+ :param str name: name given for callable (default None, automatic)
+ :param float timeout: seconds until the item expires
+ (default 300 seconds)
+ :param int version: key version number (default None, cache parameter)
+ :param bool typed: cache different types separately (default False)
+ :param str tag: text to associate with arguments (default None)
+ :return: callable decorator
+
+ """
+ # Caution: Nearly identical code exists in Cache.memoize
+ if callable(name):
+ raise TypeError('name cannot be callable')
+
+ def decorator(func):
+ "Decorator created by memoize() for callable `func`."
+ base = (full_name(func),) if name is None else (name,)
+
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ "Wrapper for callable to cache arguments and return values."
+ key = wrapper.__cache_key__(*args, **kwargs)
+ result = self.get(key, ENOVAL, version, retry=True)
+
+ if result is ENOVAL:
+ result = func(*args, **kwargs)
+ valid_timeout = (
+ timeout is None
+ or timeout == DEFAULT_TIMEOUT
+ or timeout > 0
+ )
+ if valid_timeout:
+ self.set(
+ key, result, timeout, version, tag=tag, retry=True,
+ )
+
+ return result
+
+ def __cache_key__(*args, **kwargs):
+ "Make key for cache given function arguments."
+ return args_to_key(base, args, kwargs, typed)
+
+ wrapper.__cache_key__ = __cache_key__
+ return wrapper
+
+ return decorator
diff --git a/third_party/python/diskcache/diskcache/fanout.py b/third_party/python/diskcache/diskcache/fanout.py
new file mode 100644
index 0000000000..8a0a722ae6
--- /dev/null
+++ b/third_party/python/diskcache/diskcache/fanout.py
@@ -0,0 +1,677 @@
+"Fanout cache automatically shards keys and values."
+
+import itertools as it
+import operator
+import os.path as op
+import sqlite3
+import sys
+import tempfile
+import time
+
+from .core import ENOVAL, DEFAULT_SETTINGS, Cache, Disk, Timeout
+from .persistent import Deque, Index
+
+############################################################################
+# BEGIN Python 2/3 Shims
+############################################################################
+
+if sys.hexversion >= 0x03000000:
+ from functools import reduce
+
+############################################################################
+# END Python 2/3 Shims
+############################################################################
+
+
+class FanoutCache(object):
+ "Cache that shards keys and values."
+ def __init__(self, directory=None, shards=8, timeout=0.010, disk=Disk,
+ **settings):
+ """Initialize cache instance.
+
+ :param str directory: cache directory
+ :param int shards: number of shards to distribute writes
+ :param float timeout: SQLite connection timeout
+ :param disk: `Disk` instance for serialization
+ :param settings: any of `DEFAULT_SETTINGS`
+
+ """
+ if directory is None:
+ directory = tempfile.mkdtemp(prefix='diskcache-')
+ directory = op.expanduser(directory)
+ directory = op.expandvars(directory)
+
+ default_size_limit = DEFAULT_SETTINGS['size_limit']
+ size_limit = settings.pop('size_limit', default_size_limit) / shards
+
+ self._count = shards
+ self._directory = directory
+ self._shards = tuple(
+ Cache(
+ directory=op.join(directory, '%03d' % num),
+ timeout=timeout,
+ disk=disk,
+ size_limit=size_limit,
+ **settings
+ )
+ for num in range(shards)
+ )
+ self._hash = self._shards[0].disk.hash
+ self._caches = {}
+ self._deques = {}
+ self._indexes = {}
+
+
+ @property
+ def directory(self):
+ """Cache directory."""
+ return self._directory
+
+
+ def __getattr__(self, name):
+ return getattr(self._shards[0], name)
+
+
+ def set(self, key, value, expire=None, read=False, tag=None, retry=False):
+ """Set `key` and `value` item in cache.
+
+ When `read` is `True`, `value` should be a file-like object opened
+ for reading in binary mode.
+
+ If database timeout occurs then fails silently unless `retry` is set to
+ `True` (default `False`).
+
+ :param key: key for item
+ :param value: value for item
+ :param float expire: seconds until the key expires
+ (default None, no expiry)
+ :param bool read: read value as raw bytes from file (default False)
+ :param str tag: text to associate with key (default None)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: True if item was set
+
+ """
+ index = self._hash(key) % self._count
+ shard = self._shards[index]
+ try:
+ return shard.set(key, value, expire, read, tag, retry)
+ except Timeout:
+ return False
+
+
+ def __setitem__(self, key, value):
+ """Set `key` and `value` item in cache.
+
+ Calls :func:`FanoutCache.set` internally with `retry` set to `True`.
+
+ :param key: key for item
+ :param value: value for item
+
+ """
+ index = self._hash(key) % self._count
+ shard = self._shards[index]
+ shard[key] = value
+
+
+ def touch(self, key, expire=None, retry=False):
+ """Touch `key` in cache and update `expire` time.
+
+ If database timeout occurs then fails silently unless `retry` is set to
+ `True` (default `False`).
+
+ :param key: key for item
+ :param float expire: seconds until the key expires
+ (default None, no expiry)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: True if key was touched
+
+ """
+ index = self._hash(key) % self._count
+ shard = self._shards[index]
+ try:
+ return shard.touch(key, expire, retry)
+ except Timeout:
+ return False
+
+
+ def add(self, key, value, expire=None, read=False, tag=None, retry=False):
+ """Add `key` and `value` item to cache.
+
+ Similar to `set`, but only add to cache if key not present.
+
+ This operation is atomic. Only one concurrent add operation for given
+ key from separate threads or processes will succeed.
+
+ When `read` is `True`, `value` should be a file-like object opened
+ for reading in binary mode.
+
+ If database timeout occurs then fails silently unless `retry` is set to
+ `True` (default `False`).
+
+ :param key: key for item
+ :param value: value for item
+ :param float expire: seconds until the key expires
+ (default None, no expiry)
+ :param bool read: read value as bytes from file (default False)
+ :param str tag: text to associate with key (default None)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: True if item was added
+
+ """
+ index = self._hash(key) % self._count
+ shard = self._shards[index]
+ try:
+ return shard.add(key, value, expire, read, tag, retry)
+ except Timeout:
+ return False
+
+
+ def incr(self, key, delta=1, default=0, retry=False):
+ """Increment value by delta for item with key.
+
+ If key is missing and default is None then raise KeyError. Else if key
+ is missing and default is not None then use default for value.
+
+ Operation is atomic. All concurrent increment operations will be
+ counted individually.
+
+ Assumes value may be stored in a SQLite column. Most builds that target
+ machines with 64-bit pointer widths will support 64-bit signed
+ integers.
+
+ If database timeout occurs then fails silently unless `retry` is set to
+ `True` (default `False`).
+
+ :param key: key for item
+ :param int delta: amount to increment (default 1)
+ :param int default: value if key is missing (default 0)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: new value for item on success else None
+ :raises KeyError: if key is not found and default is None
+
+ """
+ index = self._hash(key) % self._count
+ shard = self._shards[index]
+ try:
+ return shard.incr(key, delta, default, retry)
+ except Timeout:
+ return None
+
+
+ def decr(self, key, delta=1, default=0, retry=False):
+ """Decrement value by delta for item with key.
+
+ If key is missing and default is None then raise KeyError. Else if key
+ is missing and default is not None then use default for value.
+
+ Operation is atomic. All concurrent decrement operations will be
+ counted individually.
+
+ Unlike Memcached, negative values are supported. Value may be
+ decremented below zero.
+
+ Assumes value may be stored in a SQLite column. Most builds that target
+ machines with 64-bit pointer widths will support 64-bit signed
+ integers.
+
+ If database timeout occurs then fails silently unless `retry` is set to
+ `True` (default `False`).
+
+ :param key: key for item
+ :param int delta: amount to decrement (default 1)
+ :param int default: value if key is missing (default 0)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: new value for item on success else None
+ :raises KeyError: if key is not found and default is None
+
+ """
+ index = self._hash(key) % self._count
+ shard = self._shards[index]
+ try:
+ return shard.decr(key, delta, default, retry)
+ except Timeout:
+ return None
+
+
+ def get(self, key, default=None, read=False, expire_time=False, tag=False,
+ retry=False):
+ """Retrieve value from cache. If `key` is missing, return `default`.
+
+ If database timeout occurs then returns `default` unless `retry` is set
+ to `True` (default `False`).
+
+ :param key: key for item
+ :param default: return value if key is missing (default None)
+ :param bool read: if True, return file handle to value
+ (default False)
+ :param float expire_time: if True, return expire_time in tuple
+ (default False)
+ :param tag: if True, return tag in tuple (default False)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: value for item if key is found else default
+
+ """
+ index = self._hash(key) % self._count
+ shard = self._shards[index]
+ try:
+ return shard.get(key, default, read, expire_time, tag, retry)
+ except (Timeout, sqlite3.OperationalError):
+ return default
+
+
+ def __getitem__(self, key):
+ """Return corresponding value for `key` from cache.
+
+ Calls :func:`FanoutCache.get` internally with `retry` set to `True`.
+
+ :param key: key for item
+ :return: value for item
+ :raises KeyError: if key is not found
+
+ """
+ index = self._hash(key) % self._count
+ shard = self._shards[index]
+ return shard[key]
+
+
+ def read(self, key):
+ """Return file handle corresponding to `key` from cache.
+
+ :param key: key for item
+ :return: file open for reading in binary mode
+ :raises KeyError: if key is not found
+
+ """
+ handle = self.get(key, default=ENOVAL, read=True, retry=True)
+ if handle is ENOVAL:
+ raise KeyError(key)
+ return handle
+
+
+ def __contains__(self, key):
+ """Return `True` if `key` matching item is found in cache.
+
+ :param key: key for item
+ :return: True if key is found
+
+ """
+ index = self._hash(key) % self._count
+ shard = self._shards[index]
+ return key in shard
+
+
+ def pop(self, key, default=None, expire_time=False, tag=False, retry=False):
+ """Remove corresponding item for `key` from cache and return value.
+
+ If `key` is missing, return `default`.
+
+ Operation is atomic. Concurrent operations will be serialized.
+
+ If database timeout occurs then fails silently unless `retry` is set to
+ `True` (default `False`).
+
+ :param key: key for item
+ :param default: return value if key is missing (default None)
+ :param float expire_time: if True, return expire_time in tuple
+ (default False)
+ :param tag: if True, return tag in tuple (default False)
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: value for item if key is found else default
+
+ """
+ index = self._hash(key) % self._count
+ shard = self._shards[index]
+ try:
+ return shard.pop(key, default, expire_time, tag, retry)
+ except Timeout:
+ return default
+
+
+ def delete(self, key, retry=False):
+ """Delete corresponding item for `key` from cache.
+
+ Missing keys are ignored.
+
+ If database timeout occurs then fails silently unless `retry` is set to
+ `True` (default `False`).
+
+ :param key: key for item
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: True if item was deleted
+
+ """
+ index = self._hash(key) % self._count
+ shard = self._shards[index]
+ try:
+ return shard.delete(key, retry)
+ except Timeout:
+ return False
+
+
+ def __delitem__(self, key):
+ """Delete corresponding item for `key` from cache.
+
+ Calls :func:`FanoutCache.delete` internally with `retry` set to `True`.
+
+ :param key: key for item
+ :raises KeyError: if key is not found
+
+ """
+ index = self._hash(key) % self._count
+ shard = self._shards[index]
+ del shard[key]
+
+
+ def check(self, fix=False, retry=False):
+ """Check database and file system consistency.
+
+ Intended for use in testing and post-mortem error analysis.
+
+ While checking the cache table for consistency, a writer lock is held
+ on the database. The lock blocks other cache clients from writing to
+ the database. For caches with many file references, the lock may be
+ held for a long time. For example, local benchmarking shows that a
+ cache with 1,000 file references takes ~60ms to check.
+
+ If database timeout occurs then fails silently unless `retry` is set to
+ `True` (default `False`).
+
+ :param bool fix: correct inconsistencies
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: list of warnings
+ :raises Timeout: if database timeout occurs
+
+ """
+ warnings = (shard.check(fix, retry) for shard in self._shards)
+ return reduce(operator.iadd, warnings, [])
+
+
+ def expire(self, retry=False):
+ """Remove expired items from cache.
+
+ If database timeout occurs then fails silently unless `retry` is set to
+ `True` (default `False`).
+
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: count of items removed
+
+ """
+ return self._remove('expire', args=(time.time(),), retry=retry)
+
+
+ def create_tag_index(self):
+ """Create tag index on cache database.
+
+ Better to initialize cache with `tag_index=True` than use this.
+
+ :raises Timeout: if database timeout occurs
+
+ """
+ for shard in self._shards:
+ shard.create_tag_index()
+
+
+ def drop_tag_index(self):
+ """Drop tag index on cache database.
+
+ :raises Timeout: if database timeout occurs
+
+ """
+ for shard in self._shards:
+ shard.drop_tag_index()
+
+
+ def evict(self, tag, retry=False):
+ """Remove items with matching `tag` from cache.
+
+ If database timeout occurs then fails silently unless `retry` is set to
+ `True` (default `False`).
+
+ :param str tag: tag identifying items
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: count of items removed
+
+ """
+ return self._remove('evict', args=(tag,), retry=retry)
+
+
+ def cull(self, retry=False):
+ """Cull items from cache until volume is less than size limit.
+
+ If database timeout occurs then fails silently unless `retry` is set to
+ `True` (default `False`).
+
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: count of items removed
+
+ """
+ return self._remove('cull', retry=retry)
+
+
+ def clear(self, retry=False):
+ """Remove all items from cache.
+
+ If database timeout occurs then fails silently unless `retry` is set to
+ `True` (default `False`).
+
+ :param bool retry: retry if database timeout occurs (default False)
+ :return: count of items removed
+
+ """
+ return self._remove('clear', retry=retry)
+
+
+ def _remove(self, name, args=(), retry=False):
+ total = 0
+ for shard in self._shards:
+ method = getattr(shard, name)
+ while True:
+ try:
+ count = method(*args, retry=retry)
+ total += count
+ except Timeout as timeout:
+ total += timeout.args[0]
+ else:
+ break
+ return total
+
+
+ def stats(self, enable=True, reset=False):
+ """Return cache statistics hits and misses.
+
+ :param bool enable: enable collecting statistics (default True)
+ :param bool reset: reset hits and misses to 0 (default False)
+ :return: (hits, misses)
+
+ """
+ results = [shard.stats(enable, reset) for shard in self._shards]
+ total_hits = sum(hits for hits, _ in results)
+ total_misses = sum(misses for _, misses in results)
+ return total_hits, total_misses
+
+
+ def volume(self):
+ """Return estimated total size of cache on disk.
+
+ :return: size in bytes
+
+ """
+ return sum(shard.volume() for shard in self._shards)
+
+
+ def close(self):
+ "Close database connection."
+ for shard in self._shards:
+ shard.close()
+ self._caches.clear()
+ self._deques.clear()
+ self._indexes.clear()
+
+
+ def __enter__(self):
+ return self
+
+
+ def __exit__(self, *exception):
+ self.close()
+
+
+ def __getstate__(self):
+ return (self._directory, self._count, self.timeout, type(self.disk))
+
+
+ def __setstate__(self, state):
+ self.__init__(*state)
+
+
+ def __iter__(self):
+ "Iterate keys in cache including expired items."
+ iterators = (iter(shard) for shard in self._shards)
+ return it.chain.from_iterable(iterators)
+
+
+ def __reversed__(self):
+ "Reverse iterate keys in cache including expired items."
+ iterators = (reversed(shard) for shard in reversed(self._shards))
+ return it.chain.from_iterable(iterators)
+
+
+ def __len__(self):
+ "Count of items in cache including expired items."
+ return sum(len(shard) for shard in self._shards)
+
+
+ def reset(self, key, value=ENOVAL):
+ """Reset `key` and `value` item from Settings table.
+
+ If `value` is not given, it is reloaded from the Settings
+ table. Otherwise, the Settings table is updated.
+
+ Settings attributes on cache objects are lazy-loaded and
+ read-only. Use `reset` to update the value.
+
+ Settings with the ``sqlite_`` prefix correspond to SQLite
+ pragmas. Updating the value will execute the corresponding PRAGMA
+ statement.
+
+ :param str key: Settings key for item
+ :param value: value for item (optional)
+ :return: updated value for item
+
+ """
+ for shard in self._shards:
+ while True:
+ try:
+ result = shard.reset(key, value)
+ except Timeout:
+ pass
+ else:
+ break
+ return result
+
+
+ def cache(self, name):
+ """Return Cache with given `name` in subdirectory.
+
+ >>> fanout_cache = FanoutCache()
+ >>> cache = fanout_cache.cache('test')
+ >>> cache.set('abc', 123)
+ True
+ >>> cache.get('abc')
+ 123
+ >>> len(cache)
+ 1
+ >>> cache.delete('abc')
+ True
+
+ :param str name: subdirectory name for Cache
+ :return: Cache with given name
+
+ """
+ _caches = self._caches
+
+ try:
+ return _caches[name]
+ except KeyError:
+ parts = name.split('/')
+ directory = op.join(self._directory, 'cache', *parts)
+ temp = Cache(directory=directory)
+ _caches[name] = temp
+ return temp
+
+
+ def deque(self, name):
+ """Return Deque with given `name` in subdirectory.
+
+ >>> cache = FanoutCache()
+ >>> deque = cache.deque('test')
+ >>> deque.extend('abc')
+ >>> deque.popleft()
+ 'a'
+ >>> deque.pop()
+ 'c'
+ >>> len(deque)
+ 1
+
+ :param str name: subdirectory name for Deque
+ :return: Deque with given name
+
+ """
+ _deques = self._deques
+
+ try:
+ return _deques[name]
+ except KeyError:
+ parts = name.split('/')
+ directory = op.join(self._directory, 'deque', *parts)
+ temp = Deque(directory=directory)
+ _deques[name] = temp
+ return temp
+
+
+ def index(self, name):
+ """Return Index with given `name` in subdirectory.
+
+ >>> cache = FanoutCache()
+ >>> index = cache.index('test')
+ >>> index['abc'] = 123
+ >>> index['def'] = 456
+ >>> index['ghi'] = 789
+ >>> index.popitem()
+ ('ghi', 789)
+ >>> del index['abc']
+ >>> len(index)
+ 1
+ >>> index['def']
+ 456
+
+ :param str name: subdirectory name for Index
+ :return: Index with given name
+
+ """
+ _indexes = self._indexes
+
+ try:
+ return _indexes[name]
+ except KeyError:
+ parts = name.split('/')
+ directory = op.join(self._directory, 'index', *parts)
+ temp = Index(directory)
+ _indexes[name] = temp
+ return temp
+
+
+############################################################################
+# BEGIN Python 2/3 Shims
+############################################################################
+
+if sys.hexversion < 0x03000000:
+ import types
+ memoize_func = Cache.__dict__['memoize'] # pylint: disable=invalid-name
+ FanoutCache.memoize = types.MethodType(memoize_func, None, FanoutCache)
+else:
+ FanoutCache.memoize = Cache.memoize
+
+############################################################################
+# END Python 2/3 Shims
+############################################################################
diff --git a/third_party/python/diskcache/diskcache/persistent.py b/third_party/python/diskcache/diskcache/persistent.py
new file mode 100644
index 0000000000..961f77361f
--- /dev/null
+++ b/third_party/python/diskcache/diskcache/persistent.py
@@ -0,0 +1,1403 @@
+"""Persistent Data Types
+
+"""
+
+import operator as op
+import sys
+
+from collections import OrderedDict
+from contextlib import contextmanager
+from shutil import rmtree
+
+from .core import BytesType, Cache, ENOVAL, TextType
+
+############################################################################
+# BEGIN Python 2/3 Shims
+############################################################################
+
+try:
+ from collections.abc import MutableMapping, Sequence
+ from collections.abc import KeysView, ValuesView, ItemsView
+except ImportError:
+ from collections import MutableMapping, Sequence
+ from collections import KeysView, ValuesView, ItemsView
+
+if sys.hexversion < 0x03000000:
+ from itertools import izip as zip # pylint: disable=redefined-builtin,no-name-in-module,ungrouped-imports
+ range = xrange # pylint: disable=redefined-builtin,invalid-name,undefined-variable
+
+############################################################################
+# END Python 2/3 Shims
+############################################################################
+
+
+def _make_compare(seq_op, doc):
+ "Make compare method with Sequence semantics."
+ def compare(self, that):
+ "Compare method for deque and sequence."
+ if not isinstance(that, Sequence):
+ return NotImplemented
+
+ len_self = len(self)
+ len_that = len(that)
+
+ if len_self != len_that:
+ if seq_op is op.eq:
+ return False
+ if seq_op is op.ne:
+ return True
+
+ for alpha, beta in zip(self, that):
+ if alpha != beta:
+ return seq_op(alpha, beta)
+
+ return seq_op(len_self, len_that)
+
+ compare.__name__ = '__{0}__'.format(seq_op.__name__)
+ doc_str = 'Return True if and only if deque is {0} `that`.'
+ compare.__doc__ = doc_str.format(doc)
+
+ return compare
+
+
+class Deque(Sequence):
+ """Persistent sequence with double-ended queue semantics.
+
+ Double-ended queue is an ordered collection with optimized access at its
+ endpoints.
+
+ Items are serialized to disk. Deque may be initialized from directory path
+ where items are stored.
+
+ >>> deque = Deque()
+ >>> deque += range(5)
+ >>> list(deque)
+ [0, 1, 2, 3, 4]
+ >>> for value in range(5):
+ ... deque.appendleft(-value)
+ >>> len(deque)
+ 10
+ >>> list(deque)
+ [-4, -3, -2, -1, 0, 0, 1, 2, 3, 4]
+ >>> deque.pop()
+ 4
+ >>> deque.popleft()
+ -4
+ >>> deque.reverse()
+ >>> list(deque)
+ [3, 2, 1, 0, 0, -1, -2, -3]
+
+ """
+ def __init__(self, iterable=(), directory=None):
+ """Initialize deque instance.
+
+ If directory is None then temporary directory created. The directory
+ will *not* be automatically removed.
+
+ :param iterable: iterable of items to append to deque
+ :param directory: deque directory (default None)
+
+ """
+ self._cache = Cache(directory, eviction_policy='none')
+ with self.transact():
+ self.extend(iterable)
+
+
+ @classmethod
+ def fromcache(cls, cache, iterable=()):
+ """Initialize deque using `cache`.
+
+ >>> cache = Cache()
+ >>> deque = Deque.fromcache(cache, [5, 6, 7, 8])
+ >>> deque.cache is cache
+ True
+ >>> len(deque)
+ 4
+ >>> 7 in deque
+ True
+ >>> deque.popleft()
+ 5
+
+ :param Cache cache: cache to use
+ :param iterable: iterable of items
+ :return: initialized Deque
+
+ """
+ # pylint: disable=no-member,protected-access
+ self = cls.__new__(cls)
+ self._cache = cache
+ self.extend(iterable)
+ return self
+
+
+ @property
+ def cache(self):
+ "Cache used by deque."
+ return self._cache
+
+
+ @property
+ def directory(self):
+ "Directory path where deque is stored."
+ return self._cache.directory
+
+
+ def _index(self, index, func):
+ len_self = len(self)
+
+ if index >= 0:
+ if index >= len_self:
+ raise IndexError('deque index out of range')
+
+ for key in self._cache.iterkeys():
+ if index == 0:
+ try:
+ return func(key)
+ except KeyError:
+ continue
+ index -= 1
+ else:
+ if index < -len_self:
+ raise IndexError('deque index out of range')
+
+ index += 1
+
+ for key in self._cache.iterkeys(reverse=True):
+ if index == 0:
+ try:
+ return func(key)
+ except KeyError:
+ continue
+ index += 1
+
+ raise IndexError('deque index out of range')
+
+
+ def __getitem__(self, index):
+ """deque.__getitem__(index) <==> deque[index]
+
+ Return corresponding item for `index` in deque.
+
+ See also `Deque.peekleft` and `Deque.peek` for indexing deque at index
+ ``0`` or ``-1``.
+
+ >>> deque = Deque()
+ >>> deque.extend('abcde')
+ >>> deque[1]
+ 'b'
+ >>> deque[-2]
+ 'd'
+
+ :param int index: index of item
+ :return: corresponding item
+ :raises IndexError: if index out of range
+
+ """
+ return self._index(index, self._cache.__getitem__)
+
+
+ def __setitem__(self, index, value):
+ """deque.__setitem__(index, value) <==> deque[index] = value
+
+ Store `value` in deque at `index`.
+
+ >>> deque = Deque()
+ >>> deque.extend([None] * 3)
+ >>> deque[0] = 'a'
+ >>> deque[1] = 'b'
+ >>> deque[-1] = 'c'
+ >>> ''.join(deque)
+ 'abc'
+
+ :param int index: index of value
+ :param value: value to store
+ :raises IndexError: if index out of range
+
+ """
+ set_value = lambda key: self._cache.__setitem__(key, value)
+ self._index(index, set_value)
+
+
+ def __delitem__(self, index):
+ """deque.__delitem__(index) <==> del deque[index]
+
+ Delete item in deque at `index`.
+
+ >>> deque = Deque()
+ >>> deque.extend([None] * 3)
+ >>> del deque[0]
+ >>> del deque[1]
+ >>> del deque[-1]
+ >>> len(deque)
+ 0
+
+ :param int index: index of item
+ :raises IndexError: if index out of range
+
+ """
+ self._index(index, self._cache.__delitem__)
+
+
+ def __repr__(self):
+ """deque.__repr__() <==> repr(deque)
+
+ Return string with printable representation of deque.
+
+ """
+ name = type(self).__name__
+ return '{0}(directory={1!r})'.format(name, self.directory)
+
+
+ __eq__ = _make_compare(op.eq, 'equal to')
+ __ne__ = _make_compare(op.ne, 'not equal to')
+ __lt__ = _make_compare(op.lt, 'less than')
+ __gt__ = _make_compare(op.gt, 'greater than')
+ __le__ = _make_compare(op.le, 'less than or equal to')
+ __ge__ = _make_compare(op.ge, 'greater than or equal to')
+
+
+ def __iadd__(self, iterable):
+ """deque.__iadd__(iterable) <==> deque += iterable
+
+ Extend back side of deque with items from iterable.
+
+ :param iterable: iterable of items to append to deque
+ :return: deque with added items
+
+ """
+ self.extend(iterable)
+ return self
+
+
+ def __iter__(self):
+ """deque.__iter__() <==> iter(deque)
+
+ Return iterator of deque from front to back.
+
+ """
+ _cache = self._cache
+
+ for key in _cache.iterkeys():
+ try:
+ yield _cache[key]
+ except KeyError:
+ pass
+
+
+ def __len__(self):
+ """deque.__len__() <==> len(deque)
+
+ Return length of deque.
+
+ """
+ return len(self._cache)
+
+
+ def __reversed__(self):
+ """deque.__reversed__() <==> reversed(deque)
+
+ Return iterator of deque from back to front.
+
+ >>> deque = Deque()
+ >>> deque.extend('abcd')
+ >>> iterator = reversed(deque)
+ >>> next(iterator)
+ 'd'
+ >>> list(iterator)
+ ['c', 'b', 'a']
+
+ """
+ _cache = self._cache
+
+ for key in _cache.iterkeys(reverse=True):
+ try:
+ yield _cache[key]
+ except KeyError:
+ pass
+
+
+ def __getstate__(self):
+ return self.directory
+
+
+ def __setstate__(self, state):
+ self.__init__(directory=state)
+
+
+ def append(self, value):
+ """Add `value` to back of deque.
+
+ >>> deque = Deque()
+ >>> deque.append('a')
+ >>> deque.append('b')
+ >>> deque.append('c')
+ >>> list(deque)
+ ['a', 'b', 'c']
+
+ :param value: value to add to back of deque
+
+ """
+ self._cache.push(value, retry=True)
+
+
+ def appendleft(self, value):
+ """Add `value` to front of deque.
+
+ >>> deque = Deque()
+ >>> deque.appendleft('a')
+ >>> deque.appendleft('b')
+ >>> deque.appendleft('c')
+ >>> list(deque)
+ ['c', 'b', 'a']
+
+ :param value: value to add to front of deque
+
+ """
+ self._cache.push(value, side='front', retry=True)
+
+
+ def clear(self):
+ """Remove all elements from deque.
+
+ >>> deque = Deque('abc')
+ >>> len(deque)
+ 3
+ >>> deque.clear()
+ >>> list(deque)
+ []
+
+ """
+ self._cache.clear(retry=True)
+
+
+ def count(self, value):
+ """Return number of occurrences of `value` in deque.
+
+ >>> deque = Deque()
+ >>> deque += [num for num in range(1, 5) for _ in range(num)]
+ >>> deque.count(0)
+ 0
+ >>> deque.count(1)
+ 1
+ >>> deque.count(4)
+ 4
+
+ :param value: value to count in deque
+ :return: count of items equal to value in deque
+
+ """
+ return sum(1 for item in self if value == item)
+
+
+ def extend(self, iterable):
+ """Extend back side of deque with values from `iterable`.
+
+ :param iterable: iterable of values
+
+ """
+ for value in iterable:
+ self.append(value)
+
+
+ def extendleft(self, iterable):
+ """Extend front side of deque with value from `iterable`.
+
+ >>> deque = Deque()
+ >>> deque.extendleft('abc')
+ >>> list(deque)
+ ['c', 'b', 'a']
+
+ :param iterable: iterable of values
+
+ """
+ for value in iterable:
+ self.appendleft(value)
+
+
+ def peek(self):
+ """Peek at value at back of deque.
+
+ Faster than indexing deque at -1.
+
+ If deque is empty then raise IndexError.
+
+ >>> deque = Deque()
+ >>> deque.peek()
+ Traceback (most recent call last):
+ ...
+ IndexError: peek from an empty deque
+ >>> deque += 'abc'
+ >>> deque.peek()
+ 'c'
+
+ :return: value at back of deque
+ :raises IndexError: if deque is empty
+
+ """
+ default = None, ENOVAL
+ _, value = self._cache.peek(default=default, side='back', retry=True)
+ if value is ENOVAL:
+ raise IndexError('peek from an empty deque')
+ return value
+
+
+ def peekleft(self):
+ """Peek at value at back of deque.
+
+ Faster than indexing deque at 0.
+
+ If deque is empty then raise IndexError.
+
+ >>> deque = Deque()
+ >>> deque.peekleft()
+ Traceback (most recent call last):
+ ...
+ IndexError: peek from an empty deque
+ >>> deque += 'abc'
+ >>> deque.peekleft()
+ 'a'
+
+ :return: value at front of deque
+ :raises IndexError: if deque is empty
+
+ """
+ default = None, ENOVAL
+ _, value = self._cache.peek(default=default, side='front', retry=True)
+ if value is ENOVAL:
+ raise IndexError('peek from an empty deque')
+ return value
+
+
+ def pop(self):
+ """Remove and return value at back of deque.
+
+ If deque is empty then raise IndexError.
+
+ >>> deque = Deque()
+ >>> deque += 'ab'
+ >>> deque.pop()
+ 'b'
+ >>> deque.pop()
+ 'a'
+ >>> deque.pop()
+ Traceback (most recent call last):
+ ...
+ IndexError: pop from an empty deque
+
+ :return: value at back of deque
+ :raises IndexError: if deque is empty
+
+ """
+ default = None, ENOVAL
+ _, value = self._cache.pull(default=default, side='back', retry=True)
+ if value is ENOVAL:
+ raise IndexError('pop from an empty deque')
+ return value
+
+
+ def popleft(self):
+ """Remove and return value at front of deque.
+
+ >>> deque = Deque()
+ >>> deque += 'ab'
+ >>> deque.popleft()
+ 'a'
+ >>> deque.popleft()
+ 'b'
+ >>> deque.popleft()
+ Traceback (most recent call last):
+ ...
+ IndexError: pop from an empty deque
+
+ :return: value at front of deque
+ :raises IndexError: if deque is empty
+
+ """
+ default = None, ENOVAL
+ _, value = self._cache.pull(default=default, retry=True)
+ if value is ENOVAL:
+ raise IndexError('pop from an empty deque')
+ return value
+
+
+ def remove(self, value):
+ """Remove first occurrence of `value` in deque.
+
+ >>> deque = Deque()
+ >>> deque += 'aab'
+ >>> deque.remove('a')
+ >>> list(deque)
+ ['a', 'b']
+ >>> deque.remove('b')
+ >>> list(deque)
+ ['a']
+ >>> deque.remove('c')
+ Traceback (most recent call last):
+ ...
+ ValueError: deque.remove(value): value not in deque
+
+ :param value: value to remove
+ :raises ValueError: if value not in deque
+
+ """
+ _cache = self._cache
+
+ for key in _cache.iterkeys():
+ try:
+ item = _cache[key]
+ except KeyError:
+ continue
+ else:
+ if value == item:
+ try:
+ del _cache[key]
+ except KeyError:
+ continue
+ return
+
+ raise ValueError('deque.remove(value): value not in deque')
+
+
+ def reverse(self):
+ """Reverse deque in place.
+
+ >>> deque = Deque()
+ >>> deque += 'abc'
+ >>> deque.reverse()
+ >>> list(deque)
+ ['c', 'b', 'a']
+
+ """
+ # GrantJ 2019-03-22 Consider using an algorithm that swaps the values
+ # at two keys. Like self._cache.swap(key1, key2, retry=True) The swap
+ # method would exchange the values at two given keys. Then, using a
+ # forward iterator and a reverse iterator, the reversis method could
+ # avoid making copies of the values.
+ temp = Deque(iterable=reversed(self))
+ self.clear()
+ self.extend(temp)
+ directory = temp.directory
+ del temp
+ rmtree(directory)
+
+
+ def rotate(self, steps=1):
+ """Rotate deque right by `steps`.
+
+ If steps is negative then rotate left.
+
+ >>> deque = Deque()
+ >>> deque += range(5)
+ >>> deque.rotate(2)
+ >>> list(deque)
+ [3, 4, 0, 1, 2]
+ >>> deque.rotate(-1)
+ >>> list(deque)
+ [4, 0, 1, 2, 3]
+
+ :param int steps: number of steps to rotate (default 1)
+
+ """
+ if not isinstance(steps, int):
+ type_name = type(steps).__name__
+ raise TypeError('integer argument expected, got %s' % type_name)
+
+ len_self = len(self)
+
+ if not len_self:
+ return
+
+ if steps >= 0:
+ steps %= len_self
+
+ for _ in range(steps):
+ try:
+ value = self.pop()
+ except IndexError:
+ return
+ else:
+ self.appendleft(value)
+ else:
+ steps *= -1
+ steps %= len_self
+
+ for _ in range(steps):
+ try:
+ value = self.popleft()
+ except IndexError:
+ return
+ else:
+ self.append(value)
+
+
+ __hash__ = None
+
+
+ @contextmanager
+ def transact(self):
+ """Context manager to perform a transaction by locking the deque.
+
+ While the deque is locked, no other write operation is permitted.
+ Transactions should therefore be as short as possible. Read and write
+ operations performed in a transaction are atomic. Read operations may
+ occur concurrent to a transaction.
+
+ Transactions may be nested and may not be shared between threads.
+
+ >>> from diskcache import Deque
+ >>> deque = Deque()
+ >>> deque += range(5)
+ >>> with deque.transact(): # Atomically rotate elements.
+ ... value = deque.pop()
+ ... deque.appendleft(value)
+ >>> list(deque)
+ [4, 0, 1, 2, 3]
+
+ :return: context manager for use in `with` statement
+
+ """
+ with self._cache.transact(retry=True):
+ yield
+
+
+class Index(MutableMapping):
+ """Persistent mutable mapping with insertion order iteration.
+
+ Items are serialized to disk. Index may be initialized from directory path
+ where items are stored.
+
+ Hashing protocol is not used. Keys are looked up by their serialized
+ format. See ``diskcache.Disk`` for details.
+
+ >>> index = Index()
+ >>> index.update([('a', 1), ('b', 2), ('c', 3)])
+ >>> index['a']
+ 1
+ >>> list(index)
+ ['a', 'b', 'c']
+ >>> len(index)
+ 3
+ >>> del index['b']
+ >>> index.popitem()
+ ('c', 3)
+
+ """
+ def __init__(self, *args, **kwargs):
+ """Initialize index in directory and update items.
+
+ Optional first argument may be string specifying directory where items
+ are stored. When None or not given, temporary directory is created.
+
+ >>> index = Index({'a': 1, 'b': 2, 'c': 3})
+ >>> len(index)
+ 3
+ >>> directory = index.directory
+ >>> inventory = Index(directory, d=4)
+ >>> inventory['b']
+ 2
+ >>> len(inventory)
+ 4
+
+ """
+ if args and isinstance(args[0], (BytesType, TextType)):
+ directory = args[0]
+ args = args[1:]
+ else:
+ if args and args[0] is None:
+ args = args[1:]
+ directory = None
+ self._cache = Cache(directory, eviction_policy='none')
+ self.update(*args, **kwargs)
+
+
+ @classmethod
+ def fromcache(cls, cache, *args, **kwargs):
+ """Initialize index using `cache` and update items.
+
+ >>> cache = Cache()
+ >>> index = Index.fromcache(cache, {'a': 1, 'b': 2, 'c': 3})
+ >>> index.cache is cache
+ True
+ >>> len(index)
+ 3
+ >>> 'b' in index
+ True
+ >>> index['c']
+ 3
+
+ :param Cache cache: cache to use
+ :param args: mapping or sequence of items
+ :param kwargs: mapping of items
+ :return: initialized Index
+
+ """
+ # pylint: disable=no-member,protected-access
+ self = cls.__new__(cls)
+ self._cache = cache
+ self.update(*args, **kwargs)
+ return self
+
+
+ @property
+ def cache(self):
+ "Cache used by index."
+ return self._cache
+
+
+ @property
+ def directory(self):
+ "Directory path where items are stored."
+ return self._cache.directory
+
+
+ def __getitem__(self, key):
+ """index.__getitem__(key) <==> index[key]
+
+ Return corresponding value for `key` in index.
+
+ >>> index = Index()
+ >>> index.update({'a': 1, 'b': 2})
+ >>> index['a']
+ 1
+ >>> index['b']
+ 2
+ >>> index['c']
+ Traceback (most recent call last):
+ ...
+ KeyError: 'c'
+
+ :param key: key for item
+ :return: value for item in index with given key
+ :raises KeyError: if key is not found
+
+ """
+ return self._cache[key]
+
+
+ def __setitem__(self, key, value):
+ """index.__setitem__(key, value) <==> index[key] = value
+
+ Set `key` and `value` item in index.
+
+ >>> index = Index()
+ >>> index['a'] = 1
+ >>> index[0] = None
+ >>> len(index)
+ 2
+
+ :param key: key for item
+ :param value: value for item
+
+ """
+ self._cache[key] = value
+
+
+ def __delitem__(self, key):
+ """index.__delitem__(key) <==> del index[key]
+
+ Delete corresponding item for `key` from index.
+
+ >>> index = Index()
+ >>> index.update({'a': 1, 'b': 2})
+ >>> del index['a']
+ >>> del index['b']
+ >>> len(index)
+ 0
+ >>> del index['c']
+ Traceback (most recent call last):
+ ...
+ KeyError: 'c'
+
+ :param key: key for item
+ :raises KeyError: if key is not found
+
+ """
+ del self._cache[key]
+
+
+ def setdefault(self, key, default=None):
+ """Set and get value for `key` in index using `default`.
+
+ If `key` is not in index then set corresponding value to `default`. If
+ `key` is in index then ignore `default` and return existing value.
+
+ >>> index = Index()
+ >>> index.setdefault('a', 0)
+ 0
+ >>> index.setdefault('a', 1)
+ 0
+
+ :param key: key for item
+ :param default: value if key is missing (default None)
+ :return: value for item in index with given key
+
+ """
+ _cache = self._cache
+ while True:
+ try:
+ return _cache[key]
+ except KeyError:
+ _cache.add(key, default, retry=True)
+
+
+ def peekitem(self, last=True):
+ """Peek at key and value item pair in index based on iteration order.
+
+ >>> index = Index()
+ >>> for num, letter in enumerate('xyz'):
+ ... index[letter] = num
+ >>> index.peekitem()
+ ('z', 2)
+ >>> index.peekitem(last=False)
+ ('x', 0)
+
+ :param bool last: last item in iteration order (default True)
+ :return: key and value item pair
+ :raises KeyError: if cache is empty
+
+ """
+ return self._cache.peekitem(last, retry=True)
+
+
+ def pop(self, key, default=ENOVAL):
+ """Remove corresponding item for `key` from index and return value.
+
+ If `key` is missing then return `default`. If `default` is `ENOVAL`
+ then raise KeyError.
+
+ >>> index = Index({'a': 1, 'b': 2})
+ >>> index.pop('a')
+ 1
+ >>> index.pop('b')
+ 2
+ >>> index.pop('c', default=3)
+ 3
+ >>> index.pop('d')
+ Traceback (most recent call last):
+ ...
+ KeyError: 'd'
+
+ :param key: key for item
+ :param default: return value if key is missing (default ENOVAL)
+ :return: value for item if key is found else default
+ :raises KeyError: if key is not found and default is ENOVAL
+
+ """
+ _cache = self._cache
+ value = _cache.pop(key, default=default, retry=True)
+ if value is ENOVAL:
+ raise KeyError(key)
+ return value
+
+
+ def popitem(self, last=True):
+ """Remove and return item pair.
+
+ Item pairs are returned in last-in-first-out (LIFO) order if last is
+ True else first-in-first-out (FIFO) order. LIFO order imitates a stack
+ and FIFO order imitates a queue.
+
+ >>> index = Index()
+ >>> index.update([('a', 1), ('b', 2), ('c', 3)])
+ >>> index.popitem()
+ ('c', 3)
+ >>> index.popitem(last=False)
+ ('a', 1)
+ >>> index.popitem()
+ ('b', 2)
+ >>> index.popitem()
+ Traceback (most recent call last):
+ ...
+ KeyError: 'dictionary is empty'
+
+ :param bool last: pop last item pair (default True)
+ :return: key and value item pair
+ :raises KeyError: if index is empty
+
+ """
+ # pylint: disable=arguments-differ
+ _cache = self._cache
+
+ with _cache.transact(retry=True):
+ key, value = _cache.peekitem(last=last)
+ del _cache[key]
+
+ return key, value
+
+
+ def push(self, value, prefix=None, side='back'):
+ """Push `value` onto `side` of queue in index identified by `prefix`.
+
+ When prefix is None, integer keys are used. Otherwise, string keys are
+ used in the format "prefix-integer". Integer starts at 500 trillion.
+
+ Defaults to pushing value on back of queue. Set side to 'front' to push
+ value on front of queue. Side must be one of 'back' or 'front'.
+
+ See also `Index.pull`.
+
+ >>> index = Index()
+ >>> print(index.push('apples'))
+ 500000000000000
+ >>> print(index.push('beans'))
+ 500000000000001
+ >>> print(index.push('cherries', side='front'))
+ 499999999999999
+ >>> index[500000000000001]
+ 'beans'
+ >>> index.push('dates', prefix='fruit')
+ 'fruit-500000000000000'
+
+ :param value: value for item
+ :param str prefix: key prefix (default None, key is integer)
+ :param str side: either 'back' or 'front' (default 'back')
+ :return: key for item in cache
+
+ """
+ return self._cache.push(value, prefix, side, retry=True)
+
+
+ def pull(self, prefix=None, default=(None, None), side='front'):
+ """Pull key and value item pair from `side` of queue in index.
+
+ When prefix is None, integer keys are used. Otherwise, string keys are
+ used in the format "prefix-integer". Integer starts at 500 trillion.
+
+ If queue is empty, return default.
+
+ Defaults to pulling key and value item pairs from front of queue. Set
+ side to 'back' to pull from back of queue. Side must be one of 'front'
+ or 'back'.
+
+ See also `Index.push`.
+
+ >>> index = Index()
+ >>> for letter in 'abc':
+ ... print(index.push(letter))
+ 500000000000000
+ 500000000000001
+ 500000000000002
+ >>> key, value = index.pull()
+ >>> print(key)
+ 500000000000000
+ >>> value
+ 'a'
+ >>> _, value = index.pull(side='back')
+ >>> value
+ 'c'
+ >>> index.pull(prefix='fruit')
+ (None, None)
+
+ :param str prefix: key prefix (default None, key is integer)
+ :param default: value to return if key is missing
+ (default (None, None))
+ :param str side: either 'front' or 'back' (default 'front')
+ :return: key and value item pair or default if queue is empty
+
+ """
+ return self._cache.pull(prefix, default, side, retry=True)
+
+
+ def clear(self):
+ """Remove all items from index.
+
+ >>> index = Index({'a': 0, 'b': 1, 'c': 2})
+ >>> len(index)
+ 3
+ >>> index.clear()
+ >>> dict(index)
+ {}
+
+ """
+ self._cache.clear(retry=True)
+
+
+ def __iter__(self):
+ """index.__iter__() <==> iter(index)
+
+ Return iterator of index keys in insertion order.
+
+ """
+ return iter(self._cache)
+
+
+ def __reversed__(self):
+ """index.__reversed__() <==> reversed(index)
+
+ Return iterator of index keys in reversed insertion order.
+
+ >>> index = Index()
+ >>> index.update([('a', 1), ('b', 2), ('c', 3)])
+ >>> iterator = reversed(index)
+ >>> next(iterator)
+ 'c'
+ >>> list(iterator)
+ ['b', 'a']
+
+ """
+ return reversed(self._cache)
+
+
+ def __len__(self):
+ """index.__len__() <==> len(index)
+
+ Return length of index.
+
+ """
+ return len(self._cache)
+
+
+ if sys.hexversion < 0x03000000:
+ def keys(self):
+ """List of index keys.
+
+ >>> index = Index()
+ >>> index.update([('a', 1), ('b', 2), ('c', 3)])
+ >>> index.keys()
+ ['a', 'b', 'c']
+
+ :return: list of keys
+
+ """
+ return list(self._cache)
+
+
+ def values(self):
+ """List of index values.
+
+ >>> index = Index()
+ >>> index.update([('a', 1), ('b', 2), ('c', 3)])
+ >>> index.values()
+ [1, 2, 3]
+
+ :return: list of values
+
+ """
+ return list(self.itervalues())
+
+
+ def items(self):
+ """List of index items.
+
+ >>> index = Index()
+ >>> index.update([('a', 1), ('b', 2), ('c', 3)])
+ >>> index.items()
+ [('a', 1), ('b', 2), ('c', 3)]
+
+ :return: list of items
+
+ """
+ return list(self.iteritems())
+
+
+ def iterkeys(self):
+ """Iterator of index keys.
+
+ >>> index = Index()
+ >>> index.update([('a', 1), ('b', 2), ('c', 3)])
+ >>> list(index.iterkeys())
+ ['a', 'b', 'c']
+
+ :return: iterator of keys
+
+ """
+ return iter(self._cache)
+
+
+ def itervalues(self):
+ """Iterator of index values.
+
+ >>> index = Index()
+ >>> index.update([('a', 1), ('b', 2), ('c', 3)])
+ >>> list(index.itervalues())
+ [1, 2, 3]
+
+ :return: iterator of values
+
+ """
+ _cache = self._cache
+
+ for key in _cache:
+ while True:
+ try:
+ yield _cache[key]
+ except KeyError:
+ pass
+ break
+
+
+ def iteritems(self):
+ """Iterator of index items.
+
+ >>> index = Index()
+ >>> index.update([('a', 1), ('b', 2), ('c', 3)])
+ >>> list(index.iteritems())
+ [('a', 1), ('b', 2), ('c', 3)]
+
+ :return: iterator of items
+
+ """
+ _cache = self._cache
+
+ for key in _cache:
+ while True:
+ try:
+ yield key, _cache[key]
+ except KeyError:
+ pass
+ break
+
+
+ def viewkeys(self):
+ """Set-like object providing a view of index keys.
+
+ >>> index = Index()
+ >>> index.update({'a': 1, 'b': 2, 'c': 3})
+ >>> keys_view = index.viewkeys()
+ >>> 'b' in keys_view
+ True
+
+ :return: keys view
+
+ """
+ return KeysView(self)
+
+
+ def viewvalues(self):
+ """Set-like object providing a view of index values.
+
+ >>> index = Index()
+ >>> index.update({'a': 1, 'b': 2, 'c': 3})
+ >>> values_view = index.viewvalues()
+ >>> 2 in values_view
+ True
+
+ :return: values view
+
+ """
+ return ValuesView(self)
+
+
+ def viewitems(self):
+ """Set-like object providing a view of index items.
+
+ >>> index = Index()
+ >>> index.update({'a': 1, 'b': 2, 'c': 3})
+ >>> items_view = index.viewitems()
+ >>> ('b', 2) in items_view
+ True
+
+ :return: items view
+
+ """
+ return ItemsView(self)
+
+
+ else:
+ def keys(self):
+ """Set-like object providing a view of index keys.
+
+ >>> index = Index()
+ >>> index.update({'a': 1, 'b': 2, 'c': 3})
+ >>> keys_view = index.keys()
+ >>> 'b' in keys_view
+ True
+
+ :return: keys view
+
+ """
+ return KeysView(self)
+
+
+ def values(self):
+ """Set-like object providing a view of index values.
+
+ >>> index = Index()
+ >>> index.update({'a': 1, 'b': 2, 'c': 3})
+ >>> values_view = index.values()
+ >>> 2 in values_view
+ True
+
+ :return: values view
+
+ """
+ return ValuesView(self)
+
+
+ def items(self):
+ """Set-like object providing a view of index items.
+
+ >>> index = Index()
+ >>> index.update({'a': 1, 'b': 2, 'c': 3})
+ >>> items_view = index.items()
+ >>> ('b', 2) in items_view
+ True
+
+ :return: items view
+
+ """
+ return ItemsView(self)
+
+
+ __hash__ = None
+
+
+ def __getstate__(self):
+ return self.directory
+
+
+ def __setstate__(self, state):
+ self.__init__(state)
+
+
+ def __eq__(self, other):
+ """index.__eq__(other) <==> index == other
+
+ Compare equality for index and `other`.
+
+ Comparison to another index or ordered dictionary is
+ order-sensitive. Comparison to all other mappings is order-insensitive.
+
+ >>> index = Index()
+ >>> pairs = [('a', 1), ('b', 2), ('c', 3)]
+ >>> index.update(pairs)
+ >>> from collections import OrderedDict
+ >>> od = OrderedDict(pairs)
+ >>> index == od
+ True
+ >>> index == {'c': 3, 'b': 2, 'a': 1}
+ True
+
+ :param other: other mapping in equality comparison
+ :return: True if index equals other
+
+ """
+ if len(self) != len(other):
+ return False
+
+ if isinstance(other, (Index, OrderedDict)):
+ alpha = ((key, self[key]) for key in self)
+ beta = ((key, other[key]) for key in other)
+ pairs = zip(alpha, beta)
+ return not any(a != x or b != y for (a, b), (x, y) in pairs)
+ else:
+ return all(self[key] == other.get(key, ENOVAL) for key in self)
+
+
+ def __ne__(self, other):
+ """index.__ne__(other) <==> index != other
+
+ Compare inequality for index and `other`.
+
+ Comparison to another index or ordered dictionary is
+ order-sensitive. Comparison to all other mappings is order-insensitive.
+
+ >>> index = Index()
+ >>> index.update([('a', 1), ('b', 2), ('c', 3)])
+ >>> from collections import OrderedDict
+ >>> od = OrderedDict([('c', 3), ('b', 2), ('a', 1)])
+ >>> index != od
+ True
+ >>> index != {'a': 1, 'b': 2}
+ True
+
+ :param other: other mapping in inequality comparison
+ :return: True if index does not equal other
+
+ """
+ return not self == other
+
+
+ def memoize(self, name=None, typed=False):
+ """Memoizing cache decorator.
+
+ Decorator to wrap callable with memoizing function using cache.
+ Repeated calls with the same arguments will lookup result in cache and
+ avoid function evaluation.
+
+ If name is set to None (default), the callable name will be determined
+ automatically.
+
+ If typed is set to True, function arguments of different types will be
+ cached separately. For example, f(3) and f(3.0) will be treated as
+ distinct calls with distinct results.
+
+ The original underlying function is accessible through the __wrapped__
+ attribute. This is useful for introspection, for bypassing the cache,
+ or for rewrapping the function with a different cache.
+
+ >>> from diskcache import Index
+ >>> mapping = Index()
+ >>> @mapping.memoize()
+ ... def fibonacci(number):
+ ... if number == 0:
+ ... return 0
+ ... elif number == 1:
+ ... return 1
+ ... else:
+ ... return fibonacci(number - 1) + fibonacci(number - 2)
+ >>> print(fibonacci(100))
+ 354224848179261915075
+
+ An additional `__cache_key__` attribute can be used to generate the
+ cache key used for the given arguments.
+
+ >>> key = fibonacci.__cache_key__(100)
+ >>> print(mapping[key])
+ 354224848179261915075
+
+ Remember to call memoize when decorating a callable. If you forget,
+ then a TypeError will occur. Note the lack of parenthenses after
+ memoize below:
+
+ >>> @mapping.memoize
+ ... def test():
+ ... pass
+ Traceback (most recent call last):
+ ...
+ TypeError: name cannot be callable
+
+ :param str name: name given for callable (default None, automatic)
+ :param bool typed: cache different types separately (default False)
+ :return: callable decorator
+
+ """
+ return self._cache.memoize(name, typed)
+
+
+ @contextmanager
+ def transact(self):
+ """Context manager to perform a transaction by locking the index.
+
+ While the index is locked, no other write operation is permitted.
+ Transactions should therefore be as short as possible. Read and write
+ operations performed in a transaction are atomic. Read operations may
+ occur concurrent to a transaction.
+
+ Transactions may be nested and may not be shared between threads.
+
+ >>> from diskcache import Index
+ >>> mapping = Index()
+ >>> with mapping.transact(): # Atomically increment two keys.
+ ... mapping['total'] = mapping.get('total', 0) + 123.4
+ ... mapping['count'] = mapping.get('count', 0) + 1
+ >>> with mapping.transact(): # Atomically calculate average.
+ ... average = mapping['total'] / mapping['count']
+ >>> average
+ 123.4
+
+ :return: context manager for use in `with` statement
+
+ """
+ with self._cache.transact(retry=True):
+ yield
+
+
+ def __repr__(self):
+ """index.__repr__() <==> repr(index)
+
+ Return string with printable representation of index.
+
+ """
+ name = type(self).__name__
+ return '{0}({1!r})'.format(name, self.directory)
diff --git a/third_party/python/diskcache/diskcache/recipes.py b/third_party/python/diskcache/diskcache/recipes.py
new file mode 100644
index 0000000000..fb6425090a
--- /dev/null
+++ b/third_party/python/diskcache/diskcache/recipes.py
@@ -0,0 +1,437 @@
+"""Disk Cache Recipes
+
+"""
+
+import functools
+import math
+import os
+import random
+import sys
+import threading
+import time
+
+from .core import ENOVAL, args_to_key, full_name
+
+############################################################################
+# BEGIN Python 2/3 Shims
+############################################################################
+
+if sys.hexversion < 0x03000000:
+ from thread import get_ident # pylint: disable=import-error
+else:
+ from threading import get_ident
+
+############################################################################
+# END Python 2/3 Shims
+############################################################################
+
+
+class Averager(object):
+ """Recipe for calculating a running average.
+
+ Sometimes known as "online statistics," the running average maintains the
+ total and count. The average can then be calculated at any time.
+
+ >>> import diskcache
+ >>> cache = diskcache.FanoutCache()
+ >>> ave = Averager(cache, 'latency')
+ >>> ave.add(0.080)
+ >>> ave.add(0.120)
+ >>> ave.get()
+ 0.1
+ >>> ave.add(0.160)
+ >>> ave.pop()
+ 0.12
+ >>> print(ave.get())
+ None
+
+ """
+ def __init__(self, cache, key, expire=None, tag=None):
+ self._cache = cache
+ self._key = key
+ self._expire = expire
+ self._tag = tag
+
+ def add(self, value):
+ "Add `value` to average."
+ with self._cache.transact(retry=True):
+ total, count = self._cache.get(self._key, default=(0.0, 0))
+ total += value
+ count += 1
+ self._cache.set(
+ self._key, (total, count), expire=self._expire, tag=self._tag,
+ )
+
+ def get(self):
+ "Get current average or return `None` if count equals zero."
+ total, count = self._cache.get(self._key, default=(0.0, 0), retry=True)
+ return None if count == 0 else total / count
+
+ def pop(self):
+ "Return current average and delete key."
+ total, count = self._cache.pop(self._key, default=(0.0, 0), retry=True)
+ return None if count == 0 else total / count
+
+
+class Lock(object):
+ """Recipe for cross-process and cross-thread lock.
+
+ >>> import diskcache
+ >>> cache = diskcache.Cache()
+ >>> lock = Lock(cache, 'report-123')
+ >>> lock.acquire()
+ >>> lock.release()
+ >>> with lock:
+ ... pass
+
+ """
+ def __init__(self, cache, key, expire=None, tag=None):
+ self._cache = cache
+ self._key = key
+ self._expire = expire
+ self._tag = tag
+
+ def acquire(self):
+ "Acquire lock using spin-lock algorithm."
+ while True:
+ added = self._cache.add(
+ self._key, None, expire=self._expire, tag=self._tag, retry=True,
+ )
+ if added:
+ break
+ time.sleep(0.001)
+
+ def release(self):
+ "Release lock by deleting key."
+ self._cache.delete(self._key, retry=True)
+
+ def __enter__(self):
+ self.acquire()
+
+ def __exit__(self, *exc_info):
+ self.release()
+
+
+class RLock(object):
+ """Recipe for cross-process and cross-thread re-entrant lock.
+
+ >>> import diskcache
+ >>> cache = diskcache.Cache()
+ >>> rlock = RLock(cache, 'user-123')
+ >>> rlock.acquire()
+ >>> rlock.acquire()
+ >>> rlock.release()
+ >>> with rlock:
+ ... pass
+ >>> rlock.release()
+ >>> rlock.release()
+ Traceback (most recent call last):
+ ...
+ AssertionError: cannot release un-acquired lock
+
+ """
+ def __init__(self, cache, key, expire=None, tag=None):
+ self._cache = cache
+ self._key = key
+ self._expire = expire
+ self._tag = tag
+
+ def acquire(self):
+ "Acquire lock by incrementing count using spin-lock algorithm."
+ pid = os.getpid()
+ tid = get_ident()
+ pid_tid = '{}-{}'.format(pid, tid)
+
+ while True:
+ with self._cache.transact(retry=True):
+ value, count = self._cache.get(self._key, default=(None, 0))
+ if pid_tid == value or count == 0:
+ self._cache.set(
+ self._key, (pid_tid, count + 1),
+ expire=self._expire, tag=self._tag,
+ )
+ return
+ time.sleep(0.001)
+
+ def release(self):
+ "Release lock by decrementing count."
+ pid = os.getpid()
+ tid = get_ident()
+ pid_tid = '{}-{}'.format(pid, tid)
+
+ with self._cache.transact(retry=True):
+ value, count = self._cache.get(self._key, default=(None, 0))
+ is_owned = pid_tid == value and count > 0
+ assert is_owned, 'cannot release un-acquired lock'
+ self._cache.set(
+ self._key, (value, count - 1),
+ expire=self._expire, tag=self._tag,
+ )
+
+ def __enter__(self):
+ self.acquire()
+
+ def __exit__(self, *exc_info):
+ self.release()
+
+
+class BoundedSemaphore(object):
+ """Recipe for cross-process and cross-thread bounded semaphore.
+
+ >>> import diskcache
+ >>> cache = diskcache.Cache()
+ >>> semaphore = BoundedSemaphore(cache, 'max-cons', value=2)
+ >>> semaphore.acquire()
+ >>> semaphore.acquire()
+ >>> semaphore.release()
+ >>> with semaphore:
+ ... pass
+ >>> semaphore.release()
+ >>> semaphore.release()
+ Traceback (most recent call last):
+ ...
+ AssertionError: cannot release un-acquired semaphore
+
+ """
+ def __init__(self, cache, key, value=1, expire=None, tag=None):
+ self._cache = cache
+ self._key = key
+ self._value = value
+ self._expire = expire
+ self._tag = tag
+
+ def acquire(self):
+ "Acquire semaphore by decrementing value using spin-lock algorithm."
+ while True:
+ with self._cache.transact(retry=True):
+ value = self._cache.get(self._key, default=self._value)
+ if value > 0:
+ self._cache.set(
+ self._key, value - 1,
+ expire=self._expire, tag=self._tag,
+ )
+ return
+ time.sleep(0.001)
+
+ def release(self):
+ "Release semaphore by incrementing value."
+ with self._cache.transact(retry=True):
+ value = self._cache.get(self._key, default=self._value)
+ assert self._value > value, 'cannot release un-acquired semaphore'
+ value += 1
+ self._cache.set(
+ self._key, value, expire=self._expire, tag=self._tag,
+ )
+
+ def __enter__(self):
+ self.acquire()
+
+ def __exit__(self, *exc_info):
+ self.release()
+
+
+def throttle(cache, count, seconds, name=None, expire=None, tag=None,
+ time_func=time.time, sleep_func=time.sleep):
+ """Decorator to throttle calls to function.
+
+ >>> import diskcache, time
+ >>> cache = diskcache.Cache()
+ >>> count = 0
+ >>> @throttle(cache, 2, 1) # 2 calls per 1 second
+ ... def increment():
+ ... global count
+ ... count += 1
+ >>> start = time.time()
+ >>> while (time.time() - start) <= 2:
+ ... increment()
+ >>> count in (6, 7) # 6 or 7 calls depending on CPU load
+ True
+
+ """
+ def decorator(func):
+ rate = count / float(seconds)
+ key = full_name(func) if name is None else name
+ now = time_func()
+ cache.set(key, (now, count), expire=expire, tag=tag, retry=True)
+
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ while True:
+ with cache.transact(retry=True):
+ last, tally = cache.get(key)
+ now = time_func()
+ tally += (now - last) * rate
+ delay = 0
+
+ if tally > count:
+ cache.set(key, (now, count - 1), expire)
+ elif tally >= 1:
+ cache.set(key, (now, tally - 1), expire)
+ else:
+ delay = (1 - tally) / rate
+
+ if delay:
+ sleep_func(delay)
+ else:
+ break
+
+ return func(*args, **kwargs)
+
+ return wrapper
+
+ return decorator
+
+
+def barrier(cache, lock_factory, name=None, expire=None, tag=None):
+ """Barrier to calling decorated function.
+
+ Supports different kinds of locks: Lock, RLock, BoundedSemaphore.
+
+ >>> import diskcache, time
+ >>> cache = diskcache.Cache()
+ >>> @barrier(cache, Lock)
+ ... def work(num):
+ ... print('worker started')
+ ... time.sleep(1)
+ ... print('worker finished')
+ >>> import multiprocessing.pool
+ >>> pool = multiprocessing.pool.ThreadPool(2)
+ >>> _ = pool.map(work, range(2))
+ worker started
+ worker finished
+ worker started
+ worker finished
+ >>> pool.terminate()
+
+ """
+ def decorator(func):
+ key = full_name(func) if name is None else name
+ lock = lock_factory(cache, key, expire=expire, tag=tag)
+
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ with lock:
+ return func(*args, **kwargs)
+
+ return wrapper
+
+ return decorator
+
+
+def memoize_stampede(cache, expire, name=None, typed=False, tag=None, beta=1):
+ """Memoizing cache decorator with cache stampede protection.
+
+ Cache stampedes are a type of system overload that can occur when parallel
+ computing systems using memoization come under heavy load. This behaviour
+ is sometimes also called dog-piling, cache miss storm, cache choking, or
+ the thundering herd problem.
+
+ The memoization decorator implements cache stampede protection through
+ early recomputation. Early recomputation of function results will occur
+ probabilistically before expiration in a background thread of
+ execution. Early probabilistic recomputation is based on research by
+ Vattani, A.; Chierichetti, F.; Lowenstein, K. (2015), Optimal Probabilistic
+ Cache Stampede Prevention, VLDB, pp. 886-897, ISSN 2150-8097
+
+ If name is set to None (default), the callable name will be determined
+ automatically.
+
+ If typed is set to True, function arguments of different types will be
+ cached separately. For example, f(3) and f(3.0) will be treated as distinct
+ calls with distinct results.
+
+ The original underlying function is accessible through the `__wrapped__`
+ attribute. This is useful for introspection, for bypassing the cache, or
+ for rewrapping the function with a different cache.
+
+ >>> from diskcache import Cache
+ >>> cache = Cache()
+ >>> @memoize_stampede(cache, expire=1)
+ ... def fib(number):
+ ... if number == 0:
+ ... return 0
+ ... elif number == 1:
+ ... return 1
+ ... else:
+ ... return fib(number - 1) + fib(number - 2)
+ >>> print(fib(100))
+ 354224848179261915075
+
+ An additional `__cache_key__` attribute can be used to generate the cache
+ key used for the given arguments.
+
+ >>> key = fib.__cache_key__(100)
+ >>> del cache[key]
+
+ Remember to call memoize when decorating a callable. If you forget, then a
+ TypeError will occur.
+
+ :param cache: cache to store callable arguments and return values
+ :param float expire: seconds until arguments expire
+ :param str name: name given for callable (default None, automatic)
+ :param bool typed: cache different types separately (default False)
+ :param str tag: text to associate with arguments (default None)
+ :return: callable decorator
+
+ """
+ # Caution: Nearly identical code exists in Cache.memoize
+ def decorator(func):
+ "Decorator created by memoize call for callable."
+ base = (full_name(func),) if name is None else (name,)
+
+ def timer(*args, **kwargs):
+ "Time execution of `func` and return result and time delta."
+ start = time.time()
+ result = func(*args, **kwargs)
+ delta = time.time() - start
+ return result, delta
+
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ "Wrapper for callable to cache arguments and return values."
+ key = wrapper.__cache_key__(*args, **kwargs)
+ pair, expire_time = cache.get(
+ key, default=ENOVAL, expire_time=True, retry=True,
+ )
+
+ if pair is not ENOVAL:
+ result, delta = pair
+ now = time.time()
+ ttl = expire_time - now
+
+ if (-delta * beta * math.log(random.random())) < ttl:
+ return result # Cache hit.
+
+ # Check whether a thread has started for early recomputation.
+
+ thread_key = key + (ENOVAL,)
+ thread_added = cache.add(
+ thread_key, None, expire=delta, retry=True,
+ )
+
+ if thread_added:
+ # Start thread for early recomputation.
+ def recompute():
+ with cache:
+ pair = timer(*args, **kwargs)
+ cache.set(
+ key, pair, expire=expire, tag=tag, retry=True,
+ )
+ thread = threading.Thread(target=recompute)
+ thread.daemon = True
+ thread.start()
+
+ return result
+
+ pair = timer(*args, **kwargs)
+ cache.set(key, pair, expire=expire, tag=tag, retry=True)
+ return pair[0]
+
+ def __cache_key__(*args, **kwargs):
+ "Make key for cache given function arguments."
+ return args_to_key(base, args, kwargs, typed)
+
+ wrapper.__cache_key__ = __cache_key__
+ return wrapper
+
+ return decorator
diff --git a/third_party/python/diskcache/setup.cfg b/third_party/python/diskcache/setup.cfg
new file mode 100644
index 0000000000..8bfd5a12f8
--- /dev/null
+++ b/third_party/python/diskcache/setup.cfg
@@ -0,0 +1,4 @@
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/diskcache/setup.py b/third_party/python/diskcache/setup.py
new file mode 100644
index 0000000000..90dc280b27
--- /dev/null
+++ b/third_party/python/diskcache/setup.py
@@ -0,0 +1,51 @@
+from io import open
+from setuptools import setup
+from setuptools.command.test import test as TestCommand
+
+import diskcache
+
+
+class Tox(TestCommand):
+ def finalize_options(self):
+ TestCommand.finalize_options(self)
+ self.test_args = []
+ self.test_suite = True
+ def run_tests(self):
+ import tox
+ errno = tox.cmdline(self.test_args)
+ exit(errno)
+
+
+with open('README.rst', encoding='utf-8') as reader:
+ readme = reader.read()
+
+setup(
+ name=diskcache.__title__,
+ version=diskcache.__version__,
+ description='Disk Cache -- Disk and file backed persistent cache.',
+ long_description=readme,
+ author='Grant Jenks',
+ author_email='contact@grantjenks.com',
+ url='http://www.grantjenks.com/docs/diskcache/',
+ license='Apache 2.0',
+ packages=['diskcache'],
+ tests_require=['tox'],
+ cmdclass={'test': Tox},
+ install_requires=[],
+ classifiers=(
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Natural Language :: English',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ 'Programming Language :: Python :: Implementation :: PyPy',
+ ),
+)
diff --git a/third_party/python/distro/CHANGELOG.md b/third_party/python/distro/CHANGELOG.md
new file mode 100644
index 0000000000..fd32d61f4f
--- /dev/null
+++ b/third_party/python/distro/CHANGELOG.md
@@ -0,0 +1,147 @@
+## 1.4.0 (2019.2.4)
+
+BACKWARD COMPATIBILITY:
+* Prefer the VERSION_CODENAME field of os-release to parsing it from VERSION [[#230](https://github.com/nir0s/distro/pull/230)]
+
+BUG FIXES:
+* Return _uname_info from the uname_info() method [[#233](https://github.com/nir0s/distro/pull/233)]
+* Fixed CloudLinux id discovery [[#234](https://github.com/nir0s/distro/pull/234)]
+* Update Oracle matching [[#224](https://github.com/nir0s/distro/pull/224)]
+
+DOCS:
+* Update Fedora package link [[#225](https://github.com/nir0s/distro/pull/225)]
+* Distro is the recommended replacement for platform.linux_distribution [[#220](https://github.com/nir0s/distro/pull/220)]
+
+RELEASE:
+* Use Markdown for long description in setup.py [[#219](https://github.com/nir0s/distro/pull/219)]
+
+Additionally, The Python2.6 branch was fixed and rebased on top of master. It is now passing all tests. Thanks [abadger](https://github.com/abadger)!
+
+## 1.3.0 (2018.05.09)
+
+ENHANCEMENTS:
+* Added support for OpenBSD, FreeBSD, and NetBSD [[#207](https://github.com/nir0s/distro/issues/207)]
+
+TESTS:
+* Add test for Kali Linux Rolling [[#214](https://github.com/nir0s/distro/issues/214)]
+
+DOCS:
+* Update docs with regards to #207 [[#209](https://github.com/nir0s/distro/issues/209)]
+* Add Ansible reference implementation and fix arch-linux link [[#213](https://github.com/nir0s/distro/issues/213)]
+* Add facter reference implementation [[#213](https://github.com/nir0s/distro/issues/213)]
+
+## 1.2.0 (2017.12.24)
+
+BACKWARD COMPATIBILITY:
+* Don't raise ImportError on non-linux platforms [[#202](https://github.com/nir0s/distro/issues/202)]
+
+ENHANCEMENTS:
+* Lazily load the LinuxDistribution data [[#201](https://github.com/nir0s/distro/issues/201)]
+
+BUG FIXES:
+* Stdout of shell should be decoded with sys.getfilesystemencoding() [[#203](https://github.com/nir0s/distro/issues/203)]
+
+TESTS:
+* Explicitly set Python versions on Travis for flake [[#204](https://github.com/nir0s/distro/issues/204)]
+
+
+## 1.1.0 (2017.11.28)
+
+BACKWARD COMPATIBILITY:
+* Drop python3.3 support [[#199](https://github.com/nir0s/distro/issues/199)]
+* Remove Official Python26 support [[#195](https://github.com/nir0s/distro/issues/195)]
+
+TESTS:
+* Add MandrivaLinux test case [[#181](https://github.com/nir0s/distro/issues/181)]
+* Add test cases for CloudLinux 5, 6, and 7 [[#180](https://github.com/nir0s/distro/issues/180)]
+
+RELEASE:
+* Modify MANIFEST to include resources for tests and docs in source tarballs [[97c91a1](97c91a1)]
+
+## 1.0.4 (2017.04.01)
+
+BUG FIXES:
+* Guess common *-release files if /etc not readable [[#175](https://github.com/nir0s/distro/issues/175)]
+
+## 1.0.3 (2017.03.19)
+
+ENHANCEMENTS:
+* Show keys for empty values when running distro from the CLI [[#160](https://github.com/nir0s/distro/issues/160)]
+* Add manual mapping for `redhatenterpriseserver` (previously only redhatenterpriseworkstation was mapped) [[#148](https://github.com/nir0s/distro/issues/148)]
+* Race condition in `_parse_distro_release_file` [[#163](https://github.com/nir0s/distro/issues/163)]
+
+TESTS:
+* Add RHEL5 test case [[#165](https://github.com/nir0s/distro/issues/165)]
+* Add OpenELEC test case [[#166](https://github.com/nir0s/distro/issues/166)]
+* Replace nose with pytest [[#158](https://github.com/nir0s/distro/issues/158)]
+
+RELEASE:
+* Update classifiers
+* Update supported Python versions (with py36)
+
+## 1.0.2 (2017.01.12)
+
+TESTS:
+* Test on py33, py36 and py3 based flake8
+
+RELEASE:
+* Add MANIFEST file (which also includes the LICENSE as part of Issue [[#139](https://github.com/nir0s/distro/issues/139)])
+* Default to releasing using Twine [[#121](https://github.com/nir0s/distro/issues/121)]
+* Add setup.cfg file [[#145](https://github.com/nir0s/distro/issues/145)]
+* Update license in setup.py
+
+## 1.0.1 (2016-11-03)
+
+ENHANCEMENTS:
+* Prettify distro -j's output and add more elaborate docs [[#147](https://github.com/nir0s/distro/issues/147)]
+* Decode output of `lsb_release` as utf-8 [[#144](https://github.com/nir0s/distro/issues/144)]
+* Logger now uses `message %s, string` form to not-evaulate log messages if unnecessary [[#145](https://github.com/nir0s/distro/issues/145)]
+
+TESTS:
+* Increase code-coverage [[#146](https://github.com/nir0s/distro/issues/146)]
+* Fix landscape code-quality warnings [[#145](https://github.com/nir0s/distro/issues/145)]
+
+RELEASE:
+* Add CONTRIBUTING.md
+
+## 1.0.0 (2016-09-25)
+
+BACKWARD COMPATIBILITY:
+* raise exception when importing on non-supported platforms [[#129](https://github.com/nir0s/distro/issues/129)]
+
+ENHANCEMENTS:
+* Use `bytes` invariantly [[#135](https://github.com/nir0s/distro/issues/135)]
+* Some minor code adjustments plus a CLI [[#134](https://github.com/nir0s/distro/issues/134)]
+* Emit stderr if `lsb_release` fails
+
+BUG FIXES:
+* Fix some encoding related issues
+
+TESTS:
+* Add many test cases (e.g. Raspbian 8, CoreOS, Amazon Linux, Scientific Linux, Gentoo, Manjaro)
+* Completely redo the testing framework to make it easier to add tests
+* Test on pypy
+
+RELEASE:
+* Remove six as a dependency
+
+## 0.6.0 (2016-04-21)
+
+This is the first release of `distro`.
+All previous work was done on `ld` and therefore unmentioned here. See the release log in GitHub if you want the entire log.
+
+BACKWARD COMPATIBILITY:
+* No longer a package. constants.py has been removed and distro is now a single module
+
+ENHANCEMENTS:
+* distro.info() now receives best and pretty flags
+* Removed get_ prefix from get_*_release_attr functions
+* Codename is now passed in distro.info()
+
+TESTS:
+* Added Linux Mint test case
+* Now testing on Python 3.4
+
+DOCS:
+* Documentation fixes
+
diff --git a/third_party/python/distro/CONTRIBUTING.md b/third_party/python/distro/CONTRIBUTING.md
new file mode 100644
index 0000000000..4948ef24e9
--- /dev/null
+++ b/third_party/python/distro/CONTRIBUTING.md
@@ -0,0 +1,54 @@
+# General
+
+* Contributing to distro identification currently doesn't have any specific standards and rather depends on the specific implementation.
+* A 100% coverage is expected for each PR unless explicitly authorized by the reviewer.
+* Please try to maintain maximum code-health (via landscape.io).
+
+# Contributing distro specific tests
+
+Distro's tests are implemented via a standardized framework under `tests/test_distro.py`
+
+For each distribution, tests should be added in the relevant class according to which distribution file(s) exists on it, so, for example, tests should be added under `TestOSRelease` where `/etc/os-release` is available.
+
+The tests must be self-contained, meaning that the release files for the distribution should be maintained in the repository under `tests/resources/distros/distribution_name+distribution_version`.
+
+A tests method would like somewhat like this:
+
+```python
+def test_centos7_os_release(self):
+ desired_outcome = {
+ 'id': 'centos',
+ 'name': 'CentOS Linux',
+ 'pretty_name': 'CentOS Linux 7 (Core)',
+ 'version': '7',
+ 'pretty_version': '7 (Core)',
+ 'best_version': '7',
+ 'like': 'rhel fedora',
+ 'codename': 'Core'
+ }
+ self._test_outcome(desired_outcome)
+```
+
+The framework will automatically try to pick up the relevant file according to the method's name (`centos7` meaning the folder should be named `centos7` as well) and compare the `desired_outcome` with the parsed files found under the test dir.
+
+The exception to the rule is under the `TestDistroRelease` test class which should look somewhat like this:
+
+```python
+def test_centos5_dist_release(self):
+ desired_outcome = {
+ 'id': 'centos',
+ 'name': 'CentOS',
+ 'pretty_name': 'CentOS 5.11 (Final)',
+ 'version': '5.11',
+ 'pretty_version': '5.11 (Final)',
+ 'best_version': '5.11',
+ 'codename': 'Final',
+ 'major_version': '5',
+ 'minor_version': '11'
+ }
+ self._test_outcome(desired_outcome, 'centos', '5')
+```
+
+Where the name of the method is not indicative of the lookup folder but rather tha two last arguments in `_test_outcome`.
+
+A test case is mandatory under `TestOverall` for a PR to be complete. \ No newline at end of file
diff --git a/third_party/python/distro/CONTRIBUTORS.md b/third_party/python/distro/CONTRIBUTORS.md
new file mode 100644
index 0000000000..922f6533e2
--- /dev/null
+++ b/third_party/python/distro/CONTRIBUTORS.md
@@ -0,0 +1,13 @@
+Thanks!
+
+* https://github.com/andy-maier
+* https://github.com/SethMichaelLarson
+* https://github.com/asottile
+* https://github.com/MartijnBraam
+* https://github.com/funkyfuture
+* https://github.com/adamjstewart
+* https://github.com/xavfernandez
+* https://github.com/xsuchy
+* https://github.com/marcoceppi
+* https://github.com/tgamblin
+* https://github.com/sebix
diff --git a/third_party/python/distro/LICENSE b/third_party/python/distro/LICENSE
new file mode 100644
index 0000000000..e06d208186
--- /dev/null
+++ b/third_party/python/distro/LICENSE
@@ -0,0 +1,202 @@
+Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
diff --git a/third_party/python/distro/MANIFEST.in b/third_party/python/distro/MANIFEST.in
new file mode 100644
index 0000000000..4d95f6ce8e
--- /dev/null
+++ b/third_party/python/distro/MANIFEST.in
@@ -0,0 +1,12 @@
+include *.md
+include *.py
+include *.txt
+include LICENSE
+include CHANGES
+include Makefile
+
+graft tests
+
+include docs/*
+
+global-exclude *.py[co]
diff --git a/third_party/python/distro/Makefile b/third_party/python/distro/Makefile
new file mode 100644
index 0000000000..97eaba7251
--- /dev/null
+++ b/third_party/python/distro/Makefile
@@ -0,0 +1,145 @@
+# Copyright 2015,2016 Nir Cohen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Name of this package
+PACKAGENAME = distro
+
+# Additional options for Sphinx
+SPHINXOPTS = -v
+
+# Paper format for the Sphinx LaTex/PDF builder.
+# Valid values: a4, letter
+SPHINXPAPER = a4
+
+# Sphinx build subtree.
+SPHINXBUILDDIR = build_docs
+
+# Directory where conf.py is located
+SPHINXCONFDIR = docs
+
+# Directory where input files for Sphinx are located
+SPHINXSOURCEDIR = .
+
+# Sphinx build command (Use 'pip install sphinx' to get it)
+SPHINXBUILD = sphinx-build
+
+# Internal variables for Sphinx
+SPHINXPAPEROPT_a4 = -D latex_paper_size=a4
+SPHINXPAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(SPHINXBUILDDIR)/doctrees -c $(SPHINXCONFDIR) \
+ $(SPHINXPAPEROPT_$(SPHINXPAPER)) $(SPHINXOPTS) \
+ $(SPHINXSOURCEDIR)
+
+.PHONY: help
+help:
+ @echo 'Please use "make <target>" where <target> is one of'
+ @echo " release - build a release and publish it"
+ @echo " dev - prepare a development environment (includes tests)"
+ @echo " instdev - prepare a development environment (no tests)"
+ @echo " install - install into current Python environment"
+ @echo " html - generate docs as standalone HTML files in: $(SPHINXBUILDDIR)/html"
+ @echo " pdf - generate docs as PDF (via LaTeX) for paper format: $(SPHINXPAPER) in: $(SPHINXBUILDDIR)/pdf"
+ @echo " man - generate docs as manual pages in: $(SPHINXBUILDDIR)/man"
+ @echo " docchanges - generate an overview of all changed/added/deprecated items in docs"
+ @echo " doclinkcheck - check all external links in docs for integrity"
+ @echo " doccoverage - run coverage check of the documentation"
+ @echo " clobber - remove any build products"
+ @echo " build - build the package"
+ @echo " test - test from this directory using tox, including test coverage"
+ @echo " publish - upload to PyPI"
+ @echo " clean - remove any temporary build products"
+ @echo " dry-run - perform all action required for a release without actually releasing"
+
+.PHONY: release
+release: test clean build publish
+ @echo "$@ done."
+
+.PHONY: test
+test:
+ pip install 'tox>=1.7.2'
+ tox
+ @echo "$@ done."
+
+.PHONY: clean
+clean:
+ rm -rf dist build $(PACKAGENAME).egg-info
+ @echo "$@ done."
+
+.PHONY: build
+build:
+ python setup.py sdist bdist_wheel
+
+.PHONY: publish
+publish:
+ twine upload -r pypi dist/$(PACKAGENAME)-*
+ @echo "$@ done."
+
+.PHONY: dry-run
+dry-run: test clean build
+ @echo "$@ done."
+
+.PHONY: dev
+dev: instdev test
+ @echo "$@ done."
+
+.PHONY: instdev
+instdev:
+ pip install -r dev-requirements.txt
+ python setup.py develop
+ @echo "$@ done."
+
+.PHONY: install
+install:
+ python setup.py install
+ @echo "$@ done."
+
+.PHONY: html
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(SPHINXBUILDDIR)/html
+ @echo "$@ done; the HTML pages are in $(SPHINXBUILDDIR)/html."
+
+.PHONY: pdf
+pdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(SPHINXBUILDDIR)/pdf
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(SPHINXBUILDDIR)/pdf all-pdf
+ @echo "$@ done; the PDF files are in $(SPHINXBUILDDIR)/pdf."
+
+.PHONY: man
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(SPHINXBUILDDIR)/man
+ @echo "$@ done; the manual pages are in $(SPHINXBUILDDIR)/man."
+
+.PHONY: docchanges
+docchanges:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(SPHINXBUILDDIR)/changes
+ @echo
+ @echo "$@ done; the doc changes overview file is in $(SPHINXBUILDDIR)/changes."
+
+.PHONY: doclinkcheck
+doclinkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(SPHINXBUILDDIR)/linkcheck
+ @echo
+ @echo "$@ done; look for any errors in the above output " \
+ "or in $(SPHINXBUILDDIR)/linkcheck/output.txt."
+
+.PHONY: doccoverage
+doccoverage:
+ $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(SPHINXBUILDDIR)/coverage
+ @echo "$@ done; the doc coverage results are in $(SPHINXBUILDDIR)/coverage/python.txt."
+
+.PHONY: clobber
+clobber: clean
+ rm -rf $(SPHINXBUILDDIR)
+ @echo "$@ done."
diff --git a/third_party/python/distro/PKG-INFO b/third_party/python/distro/PKG-INFO
new file mode 100644
index 0000000000..eacc868d57
--- /dev/null
+++ b/third_party/python/distro/PKG-INFO
@@ -0,0 +1,168 @@
+Metadata-Version: 2.1
+Name: distro
+Version: 1.4.0
+Summary: Distro - an OS platform information API
+Home-page: https://github.com/nir0s/distro
+Author: Nir Cohen
+Author-email: nir36g@gmail.com
+License: Apache License, Version 2.0
+Description: Distro - an OS platform information API
+ =======================================
+
+ [![Build Status](https://travis-ci.org/nir0s/distro.svg?branch=master)](https://travis-ci.org/nir0s/distro)
+ [![Build status](https://ci.appveyor.com/api/projects/status/e812qjk1gf0f74r5/branch/master?svg=true)](https://ci.appveyor.com/project/nir0s/distro/branch/master)
+ [![PyPI version](http://img.shields.io/pypi/v/distro.svg)](https://pypi.python.org/pypi/distro)
+ [![Supported Python Versions](https://img.shields.io/pypi/pyversions/distro.svg)](https://img.shields.io/pypi/pyversions/distro.svg)
+ [![Requirements Status](https://requires.io/github/nir0s/distro/requirements.svg?branch=master)](https://requires.io/github/nir0s/distro/requirements/?branch=master)
+ [![Code Coverage](https://codecov.io/github/nir0s/distro/coverage.svg?branch=master)](https://codecov.io/github/nir0s/distro?branch=master)
+ [![Code Quality](https://landscape.io/github/nir0s/distro/master/landscape.svg?style=flat)](https://landscape.io/github/nir0s/distro)
+ [![Is Wheel](https://img.shields.io/pypi/wheel/distro.svg?style=flat)](https://pypi.python.org/pypi/distro)
+ [![Latest Github Release](https://readthedocs.org/projects/distro/badge/?version=stable)](http://distro.readthedocs.io/en/latest/)
+ [![Join the chat at https://gitter.im/nir0s/distro](https://badges.gitter.im/nir0s/distro.svg)](https://gitter.im/nir0s/distro?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
+
+ `distro` provides information about the
+ OS distribution it runs on, such as a reliable machine-readable ID, or
+ version information.
+
+ It is the recommended replacement for Python's original
+ [`platform.linux_distribution`](https://docs.python.org/3.7/library/platform.html#platform.linux_distribution)
+ function (which will be removed in Python 3.8).
+ It also provides much more functionality which isn't necessarily Python bound,
+ like a command-line interface.
+
+ Distro currently supports Linux and BSD based systems but [Windows and OS X support](https://github.com/nir0s/distro/issues/177) is also planned.
+
+ For Python 2.6 support, see https://github.com/nir0s/distro/tree/python2.6-support
+
+ ## Installation
+
+ Installation of the latest released version from PyPI:
+
+ ```shell
+ pip install distro
+ ```
+
+ Installation of the latest development version:
+
+ ```shell
+ pip install https://github.com/nir0s/distro/archive/master.tar.gz
+ ```
+
+
+ ## Usage
+
+ ```bash
+ $ distro
+ Name: Antergos Linux
+ Version: 2015.10 (ISO-Rolling)
+ Codename: ISO-Rolling
+
+ $ distro -j
+ {
+ "codename": "ISO-Rolling",
+ "id": "antergos",
+ "like": "arch",
+ "version": "16.9",
+ "version_parts": {
+ "build_number": "",
+ "major": "16",
+ "minor": "9"
+ }
+ }
+
+
+ $ python
+ >>> import distro
+ >>> distro.linux_distribution(full_distribution_name=False)
+ ('centos', '7.1.1503', 'Core')
+ ```
+
+
+ ## Documentation
+
+ On top of the aforementioned API, several more functions are available. For a complete description of the
+ API, see the [latest API documentation](http://distro.readthedocs.org/en/latest/).
+
+ ## Background
+
+ An alternative implementation became necessary because Python 3.5 deprecated
+ this function, and Python 3.8 will remove it altogether.
+ Its predecessor function `platform.dist` was already deprecated since
+ Python 2.6 and will also be removed in Python 3.8.
+ Still, there are many cases in which access to that information is needed.
+ See [Python issue 1322](https://bugs.python.org/issue1322) for more
+ information.
+
+ The `distro` package implements a robust and inclusive way of retrieving the
+ information about a distribution based on new standards and old methods,
+ namely from these data sources (from high to low precedence):
+
+ * The os-release file `/etc/os-release`, if present.
+ * The output of the `lsb_release` command, if available.
+ * The distro release file (`/etc/*(-|_)(release|version)`), if present.
+ * The `uname` command for BSD based distrubtions.
+
+
+ ## Python and Distribution Support
+
+ `distro` is supported and tested on Python 2.7, 3.4+ and PyPy and on
+ any distribution that provides one or more of the data sources
+ covered.
+
+ This package is tested with test data that mimics the exact behavior of the data sources of [a number of Linux distributions](https://github.com/nir0s/distro/tree/master/tests/resources/distros).
+
+
+ ## Testing
+
+ ```shell
+ git clone git@github.com:nir0s/distro.git
+ cd distro
+ pip install tox
+ tox
+ ```
+
+
+ ## Contributions
+
+ Pull requests are always welcome to deal with specific distributions or just
+ for general merriment.
+
+ See [CONTRIBUTIONS](https://github.com/nir0s/distro/blob/master/CONTRIBUTING.md) for contribution info.
+
+ Reference implementations for supporting additional distributions and file
+ formats can be found here:
+
+ * https://github.com/saltstack/salt/blob/develop/salt/grains/core.py#L1172
+ * https://github.com/chef/ohai/blob/master/lib/ohai/plugins/linux/platform.rb
+ * https://github.com/ansible/ansible/blob/devel/lib/ansible/module_utils/facts/system/distribution.py
+ * https://github.com/puppetlabs/facter/blob/master/lib/src/facts/linux/os_linux.cc
+
+ ## Package manager distributions
+
+ * https://src.fedoraproject.org/rpms/python-distro
+ * https://www.archlinux.org/packages/community/any/python-distro/
+ * https://launchpad.net/ubuntu/+source/python-distro
+ * https://packages.debian.org/sid/python-distro
+ * https://packages.gentoo.org/packages/dev-python/distro
+ * https://pkgs.org/download/python2-distro
+ * https://slackbuilds.org/repository/14.2/python/python-distro/
+
+Platform: All
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: System Administrators
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: POSIX :: Linux
+Classifier: Operating System :: POSIX :: BSD
+Classifier: Operating System :: POSIX :: BSD :: FreeBSD
+Classifier: Operating System :: POSIX :: BSD :: NetBSD
+Classifier: Operating System :: POSIX :: BSD :: OpenBSD
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: System :: Operating System
+Description-Content-Type: text/markdown
diff --git a/third_party/python/distro/README.md b/third_party/python/distro/README.md
new file mode 100644
index 0000000000..57d681c5c1
--- /dev/null
+++ b/third_party/python/distro/README.md
@@ -0,0 +1,140 @@
+Distro - an OS platform information API
+=======================================
+
+[![Build Status](https://travis-ci.org/nir0s/distro.svg?branch=master)](https://travis-ci.org/nir0s/distro)
+[![Build status](https://ci.appveyor.com/api/projects/status/e812qjk1gf0f74r5/branch/master?svg=true)](https://ci.appveyor.com/project/nir0s/distro/branch/master)
+[![PyPI version](http://img.shields.io/pypi/v/distro.svg)](https://pypi.python.org/pypi/distro)
+[![Supported Python Versions](https://img.shields.io/pypi/pyversions/distro.svg)](https://img.shields.io/pypi/pyversions/distro.svg)
+[![Requirements Status](https://requires.io/github/nir0s/distro/requirements.svg?branch=master)](https://requires.io/github/nir0s/distro/requirements/?branch=master)
+[![Code Coverage](https://codecov.io/github/nir0s/distro/coverage.svg?branch=master)](https://codecov.io/github/nir0s/distro?branch=master)
+[![Code Quality](https://landscape.io/github/nir0s/distro/master/landscape.svg?style=flat)](https://landscape.io/github/nir0s/distro)
+[![Is Wheel](https://img.shields.io/pypi/wheel/distro.svg?style=flat)](https://pypi.python.org/pypi/distro)
+[![Latest Github Release](https://readthedocs.org/projects/distro/badge/?version=stable)](http://distro.readthedocs.io/en/latest/)
+[![Join the chat at https://gitter.im/nir0s/distro](https://badges.gitter.im/nir0s/distro.svg)](https://gitter.im/nir0s/distro?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
+
+`distro` provides information about the
+OS distribution it runs on, such as a reliable machine-readable ID, or
+version information.
+
+It is the recommended replacement for Python's original
+[`platform.linux_distribution`](https://docs.python.org/3.7/library/platform.html#platform.linux_distribution)
+function (which will be removed in Python 3.8).
+It also provides much more functionality which isn't necessarily Python bound,
+like a command-line interface.
+
+Distro currently supports Linux and BSD based systems but [Windows and OS X support](https://github.com/nir0s/distro/issues/177) is also planned.
+
+For Python 2.6 support, see https://github.com/nir0s/distro/tree/python2.6-support
+
+## Installation
+
+Installation of the latest released version from PyPI:
+
+```shell
+pip install distro
+```
+
+Installation of the latest development version:
+
+```shell
+pip install https://github.com/nir0s/distro/archive/master.tar.gz
+```
+
+
+## Usage
+
+```bash
+$ distro
+Name: Antergos Linux
+Version: 2015.10 (ISO-Rolling)
+Codename: ISO-Rolling
+
+$ distro -j
+{
+ "codename": "ISO-Rolling",
+ "id": "antergos",
+ "like": "arch",
+ "version": "16.9",
+ "version_parts": {
+ "build_number": "",
+ "major": "16",
+ "minor": "9"
+ }
+}
+
+
+$ python
+>>> import distro
+>>> distro.linux_distribution(full_distribution_name=False)
+('centos', '7.1.1503', 'Core')
+```
+
+
+## Documentation
+
+On top of the aforementioned API, several more functions are available. For a complete description of the
+API, see the [latest API documentation](http://distro.readthedocs.org/en/latest/).
+
+## Background
+
+An alternative implementation became necessary because Python 3.5 deprecated
+this function, and Python 3.8 will remove it altogether.
+Its predecessor function `platform.dist` was already deprecated since
+Python 2.6 and will also be removed in Python 3.8.
+Still, there are many cases in which access to that information is needed.
+See [Python issue 1322](https://bugs.python.org/issue1322) for more
+information.
+
+The `distro` package implements a robust and inclusive way of retrieving the
+information about a distribution based on new standards and old methods,
+namely from these data sources (from high to low precedence):
+
+* The os-release file `/etc/os-release`, if present.
+* The output of the `lsb_release` command, if available.
+* The distro release file (`/etc/*(-|_)(release|version)`), if present.
+* The `uname` command for BSD based distrubtions.
+
+
+## Python and Distribution Support
+
+`distro` is supported and tested on Python 2.7, 3.4+ and PyPy and on
+any distribution that provides one or more of the data sources
+covered.
+
+This package is tested with test data that mimics the exact behavior of the data sources of [a number of Linux distributions](https://github.com/nir0s/distro/tree/master/tests/resources/distros).
+
+
+## Testing
+
+```shell
+git clone git@github.com:nir0s/distro.git
+cd distro
+pip install tox
+tox
+```
+
+
+## Contributions
+
+Pull requests are always welcome to deal with specific distributions or just
+for general merriment.
+
+See [CONTRIBUTIONS](https://github.com/nir0s/distro/blob/master/CONTRIBUTING.md) for contribution info.
+
+Reference implementations for supporting additional distributions and file
+formats can be found here:
+
+* https://github.com/saltstack/salt/blob/develop/salt/grains/core.py#L1172
+* https://github.com/chef/ohai/blob/master/lib/ohai/plugins/linux/platform.rb
+* https://github.com/ansible/ansible/blob/devel/lib/ansible/module_utils/facts/system/distribution.py
+* https://github.com/puppetlabs/facter/blob/master/lib/src/facts/linux/os_linux.cc
+
+## Package manager distributions
+
+* https://src.fedoraproject.org/rpms/python-distro
+* https://www.archlinux.org/packages/community/any/python-distro/
+* https://launchpad.net/ubuntu/+source/python-distro
+* https://packages.debian.org/sid/python-distro
+* https://packages.gentoo.org/packages/dev-python/distro
+* https://pkgs.org/download/python2-distro
+* https://slackbuilds.org/repository/14.2/python/python-distro/
diff --git a/third_party/python/distro/dev-requirements.txt b/third_party/python/distro/dev-requirements.txt
new file mode 100644
index 0000000000..738958e01f
--- /dev/null
+++ b/third_party/python/distro/dev-requirements.txt
@@ -0,0 +1,3 @@
+pytest
+pytest-cov
+sphinx>=1.1 \ No newline at end of file
diff --git a/third_party/python/distro/distro.py b/third_party/python/distro/distro.py
new file mode 100755
index 0000000000..33061633ef
--- /dev/null
+++ b/third_party/python/distro/distro.py
@@ -0,0 +1,1216 @@
+# Copyright 2015,2016,2017 Nir Cohen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+The ``distro`` package (``distro`` stands for Linux Distribution) provides
+information about the Linux distribution it runs on, such as a reliable
+machine-readable distro ID, or version information.
+
+It is the recommended replacement for Python's original
+:py:func:`platform.linux_distribution` function, but it provides much more
+functionality. An alternative implementation became necessary because Python
+3.5 deprecated this function, and Python 3.8 will remove it altogether.
+Its predecessor function :py:func:`platform.dist` was already
+deprecated since Python 2.6 and will also be removed in Python 3.8.
+Still, there are many cases in which access to OS distribution information
+is needed. See `Python issue 1322 <https://bugs.python.org/issue1322>`_ for
+more information.
+"""
+
+import os
+import re
+import sys
+import json
+import shlex
+import logging
+import argparse
+import subprocess
+
+
+_UNIXCONFDIR = os.environ.get('UNIXCONFDIR', '/etc')
+_OS_RELEASE_BASENAME = 'os-release'
+
+#: Translation table for normalizing the "ID" attribute defined in os-release
+#: files, for use by the :func:`distro.id` method.
+#:
+#: * Key: Value as defined in the os-release file, translated to lower case,
+#: with blanks translated to underscores.
+#:
+#: * Value: Normalized value.
+NORMALIZED_OS_ID = {
+ 'ol': 'oracle', # Oracle Enterprise Linux
+}
+
+#: Translation table for normalizing the "Distributor ID" attribute returned by
+#: the lsb_release command, for use by the :func:`distro.id` method.
+#:
+#: * Key: Value as returned by the lsb_release command, translated to lower
+#: case, with blanks translated to underscores.
+#:
+#: * Value: Normalized value.
+NORMALIZED_LSB_ID = {
+ 'enterpriseenterprise': 'oracle', # Oracle Enterprise Linux
+ 'redhatenterpriseworkstation': 'rhel', # RHEL 6, 7 Workstation
+ 'redhatenterpriseserver': 'rhel', # RHEL 6, 7 Server
+}
+
+#: Translation table for normalizing the distro ID derived from the file name
+#: of distro release files, for use by the :func:`distro.id` method.
+#:
+#: * Key: Value as derived from the file name of a distro release file,
+#: translated to lower case, with blanks translated to underscores.
+#:
+#: * Value: Normalized value.
+NORMALIZED_DISTRO_ID = {
+ 'redhat': 'rhel', # RHEL 6.x, 7.x
+}
+
+# Pattern for content of distro release file (reversed)
+_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile(
+ r'(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)')
+
+# Pattern for base file name of distro release file
+_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(
+ r'(\w+)[-_](release|version)$')
+
+# Base file names to be ignored when searching for distro release file
+_DISTRO_RELEASE_IGNORE_BASENAMES = (
+ 'debian_version',
+ 'lsb-release',
+ 'oem-release',
+ _OS_RELEASE_BASENAME,
+ 'system-release'
+)
+
+
+def linux_distribution(full_distribution_name=True):
+ """
+ Return information about the current OS distribution as a tuple
+ ``(id_name, version, codename)`` with items as follows:
+
+ * ``id_name``: If *full_distribution_name* is false, the result of
+ :func:`distro.id`. Otherwise, the result of :func:`distro.name`.
+
+ * ``version``: The result of :func:`distro.version`.
+
+ * ``codename``: The result of :func:`distro.codename`.
+
+ The interface of this function is compatible with the original
+ :py:func:`platform.linux_distribution` function, supporting a subset of
+ its parameters.
+
+ The data it returns may not exactly be the same, because it uses more data
+ sources than the original function, and that may lead to different data if
+ the OS distribution is not consistent across multiple data sources it
+ provides (there are indeed such distributions ...).
+
+ Another reason for differences is the fact that the :func:`distro.id`
+ method normalizes the distro ID string to a reliable machine-readable value
+ for a number of popular OS distributions.
+ """
+ return _distro.linux_distribution(full_distribution_name)
+
+
+def id():
+ """
+ Return the distro ID of the current distribution, as a
+ machine-readable string.
+
+ For a number of OS distributions, the returned distro ID value is
+ *reliable*, in the sense that it is documented and that it does not change
+ across releases of the distribution.
+
+ This package maintains the following reliable distro ID values:
+
+ ============== =========================================
+ Distro ID Distribution
+ ============== =========================================
+ "ubuntu" Ubuntu
+ "debian" Debian
+ "rhel" RedHat Enterprise Linux
+ "centos" CentOS
+ "fedora" Fedora
+ "sles" SUSE Linux Enterprise Server
+ "opensuse" openSUSE
+ "amazon" Amazon Linux
+ "arch" Arch Linux
+ "cloudlinux" CloudLinux OS
+ "exherbo" Exherbo Linux
+ "gentoo" GenToo Linux
+ "ibm_powerkvm" IBM PowerKVM
+ "kvmibm" KVM for IBM z Systems
+ "linuxmint" Linux Mint
+ "mageia" Mageia
+ "mandriva" Mandriva Linux
+ "parallels" Parallels
+ "pidora" Pidora
+ "raspbian" Raspbian
+ "oracle" Oracle Linux (and Oracle Enterprise Linux)
+ "scientific" Scientific Linux
+ "slackware" Slackware
+ "xenserver" XenServer
+ "openbsd" OpenBSD
+ "netbsd" NetBSD
+ "freebsd" FreeBSD
+ ============== =========================================
+
+ If you have a need to get distros for reliable IDs added into this set,
+ or if you find that the :func:`distro.id` function returns a different
+ distro ID for one of the listed distros, please create an issue in the
+ `distro issue tracker`_.
+
+ **Lookup hierarchy and transformations:**
+
+ First, the ID is obtained from the following sources, in the specified
+ order. The first available and non-empty value is used:
+
+ * the value of the "ID" attribute of the os-release file,
+
+ * the value of the "Distributor ID" attribute returned by the lsb_release
+ command,
+
+ * the first part of the file name of the distro release file,
+
+ The so determined ID value then passes the following transformations,
+ before it is returned by this method:
+
+ * it is translated to lower case,
+
+ * blanks (which should not be there anyway) are translated to underscores,
+
+ * a normalization of the ID is performed, based upon
+ `normalization tables`_. The purpose of this normalization is to ensure
+ that the ID is as reliable as possible, even across incompatible changes
+ in the OS distributions. A common reason for an incompatible change is
+ the addition of an os-release file, or the addition of the lsb_release
+ command, with ID values that differ from what was previously determined
+ from the distro release file name.
+ """
+ return _distro.id()
+
+
+def name(pretty=False):
+ """
+ Return the name of the current OS distribution, as a human-readable
+ string.
+
+ If *pretty* is false, the name is returned without version or codename.
+ (e.g. "CentOS Linux")
+
+ If *pretty* is true, the version and codename are appended.
+ (e.g. "CentOS Linux 7.1.1503 (Core)")
+
+ **Lookup hierarchy:**
+
+ The name is obtained from the following sources, in the specified order.
+ The first available and non-empty value is used:
+
+ * If *pretty* is false:
+
+ - the value of the "NAME" attribute of the os-release file,
+
+ - the value of the "Distributor ID" attribute returned by the lsb_release
+ command,
+
+ - the value of the "<name>" field of the distro release file.
+
+ * If *pretty* is true:
+
+ - the value of the "PRETTY_NAME" attribute of the os-release file,
+
+ - the value of the "Description" attribute returned by the lsb_release
+ command,
+
+ - the value of the "<name>" field of the distro release file, appended
+ with the value of the pretty version ("<version_id>" and "<codename>"
+ fields) of the distro release file, if available.
+ """
+ return _distro.name(pretty)
+
+
+def version(pretty=False, best=False):
+ """
+ Return the version of the current OS distribution, as a human-readable
+ string.
+
+ If *pretty* is false, the version is returned without codename (e.g.
+ "7.0").
+
+ If *pretty* is true, the codename in parenthesis is appended, if the
+ codename is non-empty (e.g. "7.0 (Maipo)").
+
+ Some distributions provide version numbers with different precisions in
+ the different sources of distribution information. Examining the different
+ sources in a fixed priority order does not always yield the most precise
+ version (e.g. for Debian 8.2, or CentOS 7.1).
+
+ The *best* parameter can be used to control the approach for the returned
+ version:
+
+ If *best* is false, the first non-empty version number in priority order of
+ the examined sources is returned.
+
+ If *best* is true, the most precise version number out of all examined
+ sources is returned.
+
+ **Lookup hierarchy:**
+
+ In all cases, the version number is obtained from the following sources.
+ If *best* is false, this order represents the priority order:
+
+ * the value of the "VERSION_ID" attribute of the os-release file,
+ * the value of the "Release" attribute returned by the lsb_release
+ command,
+ * the version number parsed from the "<version_id>" field of the first line
+ of the distro release file,
+ * the version number parsed from the "PRETTY_NAME" attribute of the
+ os-release file, if it follows the format of the distro release files.
+ * the version number parsed from the "Description" attribute returned by
+ the lsb_release command, if it follows the format of the distro release
+ files.
+ """
+ return _distro.version(pretty, best)
+
+
+def version_parts(best=False):
+ """
+ Return the version of the current OS distribution as a tuple
+ ``(major, minor, build_number)`` with items as follows:
+
+ * ``major``: The result of :func:`distro.major_version`.
+
+ * ``minor``: The result of :func:`distro.minor_version`.
+
+ * ``build_number``: The result of :func:`distro.build_number`.
+
+ For a description of the *best* parameter, see the :func:`distro.version`
+ method.
+ """
+ return _distro.version_parts(best)
+
+
+def major_version(best=False):
+ """
+ Return the major version of the current OS distribution, as a string,
+ if provided.
+ Otherwise, the empty string is returned. The major version is the first
+ part of the dot-separated version string.
+
+ For a description of the *best* parameter, see the :func:`distro.version`
+ method.
+ """
+ return _distro.major_version(best)
+
+
+def minor_version(best=False):
+ """
+ Return the minor version of the current OS distribution, as a string,
+ if provided.
+ Otherwise, the empty string is returned. The minor version is the second
+ part of the dot-separated version string.
+
+ For a description of the *best* parameter, see the :func:`distro.version`
+ method.
+ """
+ return _distro.minor_version(best)
+
+
+def build_number(best=False):
+ """
+ Return the build number of the current OS distribution, as a string,
+ if provided.
+ Otherwise, the empty string is returned. The build number is the third part
+ of the dot-separated version string.
+
+ For a description of the *best* parameter, see the :func:`distro.version`
+ method.
+ """
+ return _distro.build_number(best)
+
+
+def like():
+ """
+ Return a space-separated list of distro IDs of distributions that are
+ closely related to the current OS distribution in regards to packaging
+ and programming interfaces, for example distributions the current
+ distribution is a derivative from.
+
+ **Lookup hierarchy:**
+
+ This information item is only provided by the os-release file.
+ For details, see the description of the "ID_LIKE" attribute in the
+ `os-release man page
+ <http://www.freedesktop.org/software/systemd/man/os-release.html>`_.
+ """
+ return _distro.like()
+
+
+def codename():
+ """
+ Return the codename for the release of the current OS distribution,
+ as a string.
+
+ If the distribution does not have a codename, an empty string is returned.
+
+ Note that the returned codename is not always really a codename. For
+ example, openSUSE returns "x86_64". This function does not handle such
+ cases in any special way and just returns the string it finds, if any.
+
+ **Lookup hierarchy:**
+
+ * the codename within the "VERSION" attribute of the os-release file, if
+ provided,
+
+ * the value of the "Codename" attribute returned by the lsb_release
+ command,
+
+ * the value of the "<codename>" field of the distro release file.
+ """
+ return _distro.codename()
+
+
+def info(pretty=False, best=False):
+ """
+ Return certain machine-readable information items about the current OS
+ distribution in a dictionary, as shown in the following example:
+
+ .. sourcecode:: python
+
+ {
+ 'id': 'rhel',
+ 'version': '7.0',
+ 'version_parts': {
+ 'major': '7',
+ 'minor': '0',
+ 'build_number': ''
+ },
+ 'like': 'fedora',
+ 'codename': 'Maipo'
+ }
+
+ The dictionary structure and keys are always the same, regardless of which
+ information items are available in the underlying data sources. The values
+ for the various keys are as follows:
+
+ * ``id``: The result of :func:`distro.id`.
+
+ * ``version``: The result of :func:`distro.version`.
+
+ * ``version_parts -> major``: The result of :func:`distro.major_version`.
+
+ * ``version_parts -> minor``: The result of :func:`distro.minor_version`.
+
+ * ``version_parts -> build_number``: The result of
+ :func:`distro.build_number`.
+
+ * ``like``: The result of :func:`distro.like`.
+
+ * ``codename``: The result of :func:`distro.codename`.
+
+ For a description of the *pretty* and *best* parameters, see the
+ :func:`distro.version` method.
+ """
+ return _distro.info(pretty, best)
+
+
+def os_release_info():
+ """
+ Return a dictionary containing key-value pairs for the information items
+ from the os-release file data source of the current OS distribution.
+
+ See `os-release file`_ for details about these information items.
+ """
+ return _distro.os_release_info()
+
+
+def lsb_release_info():
+ """
+ Return a dictionary containing key-value pairs for the information items
+ from the lsb_release command data source of the current OS distribution.
+
+ See `lsb_release command output`_ for details about these information
+ items.
+ """
+ return _distro.lsb_release_info()
+
+
+def distro_release_info():
+ """
+ Return a dictionary containing key-value pairs for the information items
+ from the distro release file data source of the current OS distribution.
+
+ See `distro release file`_ for details about these information items.
+ """
+ return _distro.distro_release_info()
+
+
+def uname_info():
+ """
+ Return a dictionary containing key-value pairs for the information items
+ from the distro release file data source of the current OS distribution.
+ """
+ return _distro.uname_info()
+
+
+def os_release_attr(attribute):
+ """
+ Return a single named information item from the os-release file data source
+ of the current OS distribution.
+
+ Parameters:
+
+ * ``attribute`` (string): Key of the information item.
+
+ Returns:
+
+ * (string): Value of the information item, if the item exists.
+ The empty string, if the item does not exist.
+
+ See `os-release file`_ for details about these information items.
+ """
+ return _distro.os_release_attr(attribute)
+
+
+def lsb_release_attr(attribute):
+ """
+ Return a single named information item from the lsb_release command output
+ data source of the current OS distribution.
+
+ Parameters:
+
+ * ``attribute`` (string): Key of the information item.
+
+ Returns:
+
+ * (string): Value of the information item, if the item exists.
+ The empty string, if the item does not exist.
+
+ See `lsb_release command output`_ for details about these information
+ items.
+ """
+ return _distro.lsb_release_attr(attribute)
+
+
+def distro_release_attr(attribute):
+ """
+ Return a single named information item from the distro release file
+ data source of the current OS distribution.
+
+ Parameters:
+
+ * ``attribute`` (string): Key of the information item.
+
+ Returns:
+
+ * (string): Value of the information item, if the item exists.
+ The empty string, if the item does not exist.
+
+ See `distro release file`_ for details about these information items.
+ """
+ return _distro.distro_release_attr(attribute)
+
+
+def uname_attr(attribute):
+ """
+ Return a single named information item from the distro release file
+ data source of the current OS distribution.
+
+ Parameters:
+
+ * ``attribute`` (string): Key of the information item.
+
+ Returns:
+
+ * (string): Value of the information item, if the item exists.
+ The empty string, if the item does not exist.
+ """
+ return _distro.uname_attr(attribute)
+
+
+class cached_property(object):
+ """A version of @property which caches the value. On access, it calls the
+ underlying function and sets the value in `__dict__` so future accesses
+ will not re-call the property.
+ """
+ def __init__(self, f):
+ self._fname = f.__name__
+ self._f = f
+
+ def __get__(self, obj, owner):
+ assert obj is not None, 'call {} on an instance'.format(self._fname)
+ ret = obj.__dict__[self._fname] = self._f(obj)
+ return ret
+
+
+class LinuxDistribution(object):
+ """
+ Provides information about a OS distribution.
+
+ This package creates a private module-global instance of this class with
+ default initialization arguments, that is used by the
+ `consolidated accessor functions`_ and `single source accessor functions`_.
+ By using default initialization arguments, that module-global instance
+ returns data about the current OS distribution (i.e. the distro this
+ package runs on).
+
+ Normally, it is not necessary to create additional instances of this class.
+ However, in situations where control is needed over the exact data sources
+ that are used, instances of this class can be created with a specific
+ distro release file, or a specific os-release file, or without invoking the
+ lsb_release command.
+ """
+
+ def __init__(self,
+ include_lsb=True,
+ os_release_file='',
+ distro_release_file='',
+ include_uname=True):
+ """
+ The initialization method of this class gathers information from the
+ available data sources, and stores that in private instance attributes.
+ Subsequent access to the information items uses these private instance
+ attributes, so that the data sources are read only once.
+
+ Parameters:
+
+ * ``include_lsb`` (bool): Controls whether the
+ `lsb_release command output`_ is included as a data source.
+
+ If the lsb_release command is not available in the program execution
+ path, the data source for the lsb_release command will be empty.
+
+ * ``os_release_file`` (string): The path name of the
+ `os-release file`_ that is to be used as a data source.
+
+ An empty string (the default) will cause the default path name to
+ be used (see `os-release file`_ for details).
+
+ If the specified or defaulted os-release file does not exist, the
+ data source for the os-release file will be empty.
+
+ * ``distro_release_file`` (string): The path name of the
+ `distro release file`_ that is to be used as a data source.
+
+ An empty string (the default) will cause a default search algorithm
+ to be used (see `distro release file`_ for details).
+
+ If the specified distro release file does not exist, or if no default
+ distro release file can be found, the data source for the distro
+ release file will be empty.
+
+ * ``include_name`` (bool): Controls whether uname command output is
+ included as a data source. If the uname command is not available in
+ the program execution path the data source for the uname command will
+ be empty.
+
+ Public instance attributes:
+
+ * ``os_release_file`` (string): The path name of the
+ `os-release file`_ that is actually used as a data source. The
+ empty string if no distro release file is used as a data source.
+
+ * ``distro_release_file`` (string): The path name of the
+ `distro release file`_ that is actually used as a data source. The
+ empty string if no distro release file is used as a data source.
+
+ * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter.
+ This controls whether the lsb information will be loaded.
+
+ * ``include_uname`` (bool): The result of the ``include_uname``
+ parameter. This controls whether the uname information will
+ be loaded.
+
+ Raises:
+
+ * :py:exc:`IOError`: Some I/O issue with an os-release file or distro
+ release file.
+
+ * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had
+ some issue (other than not being available in the program execution
+ path).
+
+ * :py:exc:`UnicodeError`: A data source has unexpected characters or
+ uses an unexpected encoding.
+ """
+ self.os_release_file = os_release_file or \
+ os.path.join(_UNIXCONFDIR, _OS_RELEASE_BASENAME)
+ self.distro_release_file = distro_release_file or '' # updated later
+ self.include_lsb = include_lsb
+ self.include_uname = include_uname
+
+ def __repr__(self):
+ """Return repr of all info
+ """
+ return \
+ "LinuxDistribution(" \
+ "os_release_file={self.os_release_file!r}, " \
+ "distro_release_file={self.distro_release_file!r}, " \
+ "include_lsb={self.include_lsb!r}, " \
+ "include_uname={self.include_uname!r}, " \
+ "_os_release_info={self._os_release_info!r}, " \
+ "_lsb_release_info={self._lsb_release_info!r}, " \
+ "_distro_release_info={self._distro_release_info!r}, " \
+ "_uname_info={self._uname_info!r})".format(
+ self=self)
+
+ def linux_distribution(self, full_distribution_name=True):
+ """
+ Return information about the OS distribution that is compatible
+ with Python's :func:`platform.linux_distribution`, supporting a subset
+ of its parameters.
+
+ For details, see :func:`distro.linux_distribution`.
+ """
+ return (
+ self.name() if full_distribution_name else self.id(),
+ self.version(),
+ self.codename()
+ )
+
+ def id(self):
+ """Return the distro ID of the OS distribution, as a string.
+
+ For details, see :func:`distro.id`.
+ """
+ def normalize(distro_id, table):
+ distro_id = distro_id.lower().replace(' ', '_')
+ return table.get(distro_id, distro_id)
+
+ distro_id = self.os_release_attr('id')
+ if distro_id:
+ return normalize(distro_id, NORMALIZED_OS_ID)
+
+ distro_id = self.lsb_release_attr('distributor_id')
+ if distro_id:
+ return normalize(distro_id, NORMALIZED_LSB_ID)
+
+ distro_id = self.distro_release_attr('id')
+ if distro_id:
+ return normalize(distro_id, NORMALIZED_DISTRO_ID)
+
+ distro_id = self.uname_attr('id')
+ if distro_id:
+ return normalize(distro_id, NORMALIZED_DISTRO_ID)
+
+ return ''
+
+ def name(self, pretty=False):
+ """
+ Return the name of the OS distribution, as a string.
+
+ For details, see :func:`distro.name`.
+ """
+ name = self.os_release_attr('name') \
+ or self.lsb_release_attr('distributor_id') \
+ or self.distro_release_attr('name') \
+ or self.uname_attr('name')
+ if pretty:
+ name = self.os_release_attr('pretty_name') \
+ or self.lsb_release_attr('description')
+ if not name:
+ name = self.distro_release_attr('name') \
+ or self.uname_attr('name')
+ version = self.version(pretty=True)
+ if version:
+ name = name + ' ' + version
+ return name or ''
+
+ def version(self, pretty=False, best=False):
+ """
+ Return the version of the OS distribution, as a string.
+
+ For details, see :func:`distro.version`.
+ """
+ versions = [
+ self.os_release_attr('version_id'),
+ self.lsb_release_attr('release'),
+ self.distro_release_attr('version_id'),
+ self._parse_distro_release_content(
+ self.os_release_attr('pretty_name')).get('version_id', ''),
+ self._parse_distro_release_content(
+ self.lsb_release_attr('description')).get('version_id', ''),
+ self.uname_attr('release')
+ ]
+ version = ''
+ if best:
+ # This algorithm uses the last version in priority order that has
+ # the best precision. If the versions are not in conflict, that
+ # does not matter; otherwise, using the last one instead of the
+ # first one might be considered a surprise.
+ for v in versions:
+ if v.count(".") > version.count(".") or version == '':
+ version = v
+ else:
+ for v in versions:
+ if v != '':
+ version = v
+ break
+ if pretty and version and self.codename():
+ version = u'{0} ({1})'.format(version, self.codename())
+ return version
+
+ def version_parts(self, best=False):
+ """
+ Return the version of the OS distribution, as a tuple of version
+ numbers.
+
+ For details, see :func:`distro.version_parts`.
+ """
+ version_str = self.version(best=best)
+ if version_str:
+ version_regex = re.compile(r'(\d+)\.?(\d+)?\.?(\d+)?')
+ matches = version_regex.match(version_str)
+ if matches:
+ major, minor, build_number = matches.groups()
+ return major, minor or '', build_number or ''
+ return '', '', ''
+
+ def major_version(self, best=False):
+ """
+ Return the major version number of the current distribution.
+
+ For details, see :func:`distro.major_version`.
+ """
+ return self.version_parts(best)[0]
+
+ def minor_version(self, best=False):
+ """
+ Return the minor version number of the current distribution.
+
+ For details, see :func:`distro.minor_version`.
+ """
+ return self.version_parts(best)[1]
+
+ def build_number(self, best=False):
+ """
+ Return the build number of the current distribution.
+
+ For details, see :func:`distro.build_number`.
+ """
+ return self.version_parts(best)[2]
+
+ def like(self):
+ """
+ Return the IDs of distributions that are like the OS distribution.
+
+ For details, see :func:`distro.like`.
+ """
+ return self.os_release_attr('id_like') or ''
+
+ def codename(self):
+ """
+ Return the codename of the OS distribution.
+
+ For details, see :func:`distro.codename`.
+ """
+ try:
+ # Handle os_release specially since distros might purposefully set
+ # this to empty string to have no codename
+ return self._os_release_info['codename']
+ except KeyError:
+ return self.lsb_release_attr('codename') \
+ or self.distro_release_attr('codename') \
+ or ''
+
+ def info(self, pretty=False, best=False):
+ """
+ Return certain machine-readable information about the OS
+ distribution.
+
+ For details, see :func:`distro.info`.
+ """
+ return dict(
+ id=self.id(),
+ version=self.version(pretty, best),
+ version_parts=dict(
+ major=self.major_version(best),
+ minor=self.minor_version(best),
+ build_number=self.build_number(best)
+ ),
+ like=self.like(),
+ codename=self.codename(),
+ )
+
+ def os_release_info(self):
+ """
+ Return a dictionary containing key-value pairs for the information
+ items from the os-release file data source of the OS distribution.
+
+ For details, see :func:`distro.os_release_info`.
+ """
+ return self._os_release_info
+
+ def lsb_release_info(self):
+ """
+ Return a dictionary containing key-value pairs for the information
+ items from the lsb_release command data source of the OS
+ distribution.
+
+ For details, see :func:`distro.lsb_release_info`.
+ """
+ return self._lsb_release_info
+
+ def distro_release_info(self):
+ """
+ Return a dictionary containing key-value pairs for the information
+ items from the distro release file data source of the OS
+ distribution.
+
+ For details, see :func:`distro.distro_release_info`.
+ """
+ return self._distro_release_info
+
+ def uname_info(self):
+ """
+ Return a dictionary containing key-value pairs for the information
+ items from the uname command data source of the OS distribution.
+
+ For details, see :func:`distro.uname_info`.
+ """
+ return self._uname_info
+
+ def os_release_attr(self, attribute):
+ """
+ Return a single named information item from the os-release file data
+ source of the OS distribution.
+
+ For details, see :func:`distro.os_release_attr`.
+ """
+ return self._os_release_info.get(attribute, '')
+
+ def lsb_release_attr(self, attribute):
+ """
+ Return a single named information item from the lsb_release command
+ output data source of the OS distribution.
+
+ For details, see :func:`distro.lsb_release_attr`.
+ """
+ return self._lsb_release_info.get(attribute, '')
+
+ def distro_release_attr(self, attribute):
+ """
+ Return a single named information item from the distro release file
+ data source of the OS distribution.
+
+ For details, see :func:`distro.distro_release_attr`.
+ """
+ return self._distro_release_info.get(attribute, '')
+
+ def uname_attr(self, attribute):
+ """
+ Return a single named information item from the uname command
+ output data source of the OS distribution.
+
+ For details, see :func:`distro.uname_release_attr`.
+ """
+ return self._uname_info.get(attribute, '')
+
+ @cached_property
+ def _os_release_info(self):
+ """
+ Get the information items from the specified os-release file.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ if os.path.isfile(self.os_release_file):
+ with open(self.os_release_file) as release_file:
+ return self._parse_os_release_content(release_file)
+ return {}
+
+ @staticmethod
+ def _parse_os_release_content(lines):
+ """
+ Parse the lines of an os-release file.
+
+ Parameters:
+
+ * lines: Iterable through the lines in the os-release file.
+ Each line must be a unicode string or a UTF-8 encoded byte
+ string.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ props = {}
+ lexer = shlex.shlex(lines, posix=True)
+ lexer.whitespace_split = True
+
+ # The shlex module defines its `wordchars` variable using literals,
+ # making it dependent on the encoding of the Python source file.
+ # In Python 2.6 and 2.7, the shlex source file is encoded in
+ # 'iso-8859-1', and the `wordchars` variable is defined as a byte
+ # string. This causes a UnicodeDecodeError to be raised when the
+ # parsed content is a unicode object. The following fix resolves that
+ # (... but it should be fixed in shlex...):
+ if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes):
+ lexer.wordchars = lexer.wordchars.decode('iso-8859-1')
+
+ tokens = list(lexer)
+ for token in tokens:
+ # At this point, all shell-like parsing has been done (i.e.
+ # comments processed, quotes and backslash escape sequences
+ # processed, multi-line values assembled, trailing newlines
+ # stripped, etc.), so the tokens are now either:
+ # * variable assignments: var=value
+ # * commands or their arguments (not allowed in os-release)
+ if '=' in token:
+ k, v = token.split('=', 1)
+ if isinstance(v, bytes):
+ v = v.decode('utf-8')
+ props[k.lower()] = v
+ else:
+ # Ignore any tokens that are not variable assignments
+ pass
+
+ if 'version_codename' in props:
+ # os-release added a version_codename field. Use that in
+ # preference to anything else Note that some distros purposefully
+ # do not have code names. They should be setting
+ # version_codename=""
+ props['codename'] = props['version_codename']
+ elif 'ubuntu_codename' in props:
+ # Same as above but a non-standard field name used on older Ubuntus
+ props['codename'] = props['ubuntu_codename']
+ elif 'version' in props:
+ # If there is no version_codename, parse it from the version
+ codename = re.search(r'(\(\D+\))|,(\s+)?\D+', props['version'])
+ if codename:
+ codename = codename.group()
+ codename = codename.strip('()')
+ codename = codename.strip(',')
+ codename = codename.strip()
+ # codename appears within paranthese.
+ props['codename'] = codename
+
+ return props
+
+ @cached_property
+ def _lsb_release_info(self):
+ """
+ Get the information items from the lsb_release command output.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ if not self.include_lsb:
+ return {}
+ with open(os.devnull, 'w') as devnull:
+ try:
+ cmd = ('lsb_release', '-a')
+ stdout = subprocess.check_output(cmd, stderr=devnull)
+ except OSError: # Command not found
+ return {}
+ content = stdout.decode(sys.getfilesystemencoding()).splitlines()
+ return self._parse_lsb_release_content(content)
+
+ @staticmethod
+ def _parse_lsb_release_content(lines):
+ """
+ Parse the output of the lsb_release command.
+
+ Parameters:
+
+ * lines: Iterable through the lines of the lsb_release output.
+ Each line must be a unicode string or a UTF-8 encoded byte
+ string.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ props = {}
+ for line in lines:
+ kv = line.strip('\n').split(':', 1)
+ if len(kv) != 2:
+ # Ignore lines without colon.
+ continue
+ k, v = kv
+ props.update({k.replace(' ', '_').lower(): v.strip()})
+ return props
+
+ @cached_property
+ def _uname_info(self):
+ with open(os.devnull, 'w') as devnull:
+ try:
+ cmd = ('uname', '-rs')
+ stdout = subprocess.check_output(cmd, stderr=devnull)
+ except OSError:
+ return {}
+ content = stdout.decode(sys.getfilesystemencoding()).splitlines()
+ return self._parse_uname_content(content)
+
+ @staticmethod
+ def _parse_uname_content(lines):
+ props = {}
+ match = re.search(r'^([^\s]+)\s+([\d\.]+)', lines[0].strip())
+ if match:
+ name, version = match.groups()
+
+ # This is to prevent the Linux kernel version from
+ # appearing as the 'best' version on otherwise
+ # identifiable distributions.
+ if name == 'Linux':
+ return {}
+ props['id'] = name.lower()
+ props['name'] = name
+ props['release'] = version
+ return props
+
+ @cached_property
+ def _distro_release_info(self):
+ """
+ Get the information items from the specified distro release file.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ if self.distro_release_file:
+ # If it was specified, we use it and parse what we can, even if
+ # its file name or content does not match the expected pattern.
+ distro_info = self._parse_distro_release_file(
+ self.distro_release_file)
+ basename = os.path.basename(self.distro_release_file)
+ # The file name pattern for user-specified distro release files
+ # is somewhat more tolerant (compared to when searching for the
+ # file), because we want to use what was specified as best as
+ # possible.
+ match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
+ if 'name' in distro_info \
+ and 'cloudlinux' in distro_info['name'].lower():
+ distro_info['id'] = 'cloudlinux'
+ elif match:
+ distro_info['id'] = match.group(1)
+ return distro_info
+ else:
+ try:
+ basenames = os.listdir(_UNIXCONFDIR)
+ # We sort for repeatability in cases where there are multiple
+ # distro specific files; e.g. CentOS, Oracle, Enterprise all
+ # containing `redhat-release` on top of their own.
+ basenames.sort()
+ except OSError:
+ # This may occur when /etc is not readable but we can't be
+ # sure about the *-release files. Check common entries of
+ # /etc for information. If they turn out to not be there the
+ # error is handled in `_parse_distro_release_file()`.
+ basenames = ['SuSE-release',
+ 'arch-release',
+ 'base-release',
+ 'centos-release',
+ 'fedora-release',
+ 'gentoo-release',
+ 'mageia-release',
+ 'mandrake-release',
+ 'mandriva-release',
+ 'mandrivalinux-release',
+ 'manjaro-release',
+ 'oracle-release',
+ 'redhat-release',
+ 'sl-release',
+ 'slackware-version']
+ for basename in basenames:
+ if basename in _DISTRO_RELEASE_IGNORE_BASENAMES:
+ continue
+ match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
+ if match:
+ filepath = os.path.join(_UNIXCONFDIR, basename)
+ distro_info = self._parse_distro_release_file(filepath)
+ if 'name' in distro_info:
+ # The name is always present if the pattern matches
+ self.distro_release_file = filepath
+ distro_info['id'] = match.group(1)
+ if 'cloudlinux' in distro_info['name'].lower():
+ distro_info['id'] = 'cloudlinux'
+ return distro_info
+ return {}
+
+ def _parse_distro_release_file(self, filepath):
+ """
+ Parse a distro release file.
+
+ Parameters:
+
+ * filepath: Path name of the distro release file.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ try:
+ with open(filepath) as fp:
+ # Only parse the first line. For instance, on SLES there
+ # are multiple lines. We don't want them...
+ return self._parse_distro_release_content(fp.readline())
+ except (OSError, IOError):
+ # Ignore not being able to read a specific, seemingly version
+ # related file.
+ # See https://github.com/nir0s/distro/issues/162
+ return {}
+
+ @staticmethod
+ def _parse_distro_release_content(line):
+ """
+ Parse a line from a distro release file.
+
+ Parameters:
+ * line: Line from the distro release file. Must be a unicode string
+ or a UTF-8 encoded byte string.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ if isinstance(line, bytes):
+ line = line.decode('utf-8')
+ matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(
+ line.strip()[::-1])
+ distro_info = {}
+ if matches:
+ # regexp ensures non-None
+ distro_info['name'] = matches.group(3)[::-1]
+ if matches.group(2):
+ distro_info['version_id'] = matches.group(2)[::-1]
+ if matches.group(1):
+ distro_info['codename'] = matches.group(1)[::-1]
+ elif line:
+ distro_info['name'] = line.strip()
+ return distro_info
+
+
+_distro = LinuxDistribution()
+
+
+def main():
+ logger = logging.getLogger(__name__)
+ logger.setLevel(logging.DEBUG)
+ logger.addHandler(logging.StreamHandler(sys.stdout))
+
+ parser = argparse.ArgumentParser(description="OS distro info tool")
+ parser.add_argument(
+ '--json',
+ '-j',
+ help="Output in machine readable format",
+ action="store_true")
+ args = parser.parse_args()
+
+ if args.json:
+ logger.info(json.dumps(info(), indent=4, sort_keys=True))
+ else:
+ logger.info('Name: %s', name(pretty=True))
+ distribution_version = version(pretty=True)
+ logger.info('Version: %s', distribution_version)
+ distribution_codename = codename()
+ logger.info('Codename: %s', distribution_codename)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/python/distro/query_local_distro.py b/third_party/python/distro/query_local_distro.py
new file mode 100755
index 0000000000..5c5ed9ef6f
--- /dev/null
+++ b/third_party/python/distro/query_local_distro.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+# Copyright 2015,2016 Nir Cohen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+from pprint import pformat
+
+import distro
+
+
+def pprint(obj):
+ for line in pformat(obj).split('\n'):
+ print(4 * ' ' + line)
+
+
+print('os_release_info:')
+pprint(distro.os_release_info())
+print('lsb_release_info:')
+pprint(distro.lsb_release_info())
+print('distro_release_info:')
+pprint(distro.distro_release_info())
+print('id: {0}'.format(distro.id()))
+print('name: {0}'.format(distro.name()))
+print('name_pretty: {0}'.format(distro.name(True)))
+print('version: {0}'.format(distro.version()))
+print('version_pretty: {0}'.format(distro.version(True)))
+print('like: {0}'.format(distro.like()))
+print('codename: {0}'.format(distro.codename()))
+print('linux_distribution_full: {0}'.format(distro.linux_distribution()))
+print('linux_distribution: {0}'.format(distro.linux_distribution(False)))
+print('major_version: {0}'.format(distro.major_version()))
+print('minor_version: {0}'.format(distro.minor_version()))
+print('build_number: {0}'.format(distro.build_number()))
diff --git a/third_party/python/distro/setup.cfg b/third_party/python/distro/setup.cfg
new file mode 100644
index 0000000000..51b5f83b3d
--- /dev/null
+++ b/third_party/python/distro/setup.cfg
@@ -0,0 +1,10 @@
+[bdist_wheel]
+universal = 1
+
+[metadata]
+license_file = LICENSE
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/distro/setup.py b/third_party/python/distro/setup.py
new file mode 100644
index 0000000000..0657449267
--- /dev/null
+++ b/third_party/python/distro/setup.py
@@ -0,0 +1,67 @@
+# Copyright 2015,2016 Nir Cohen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import codecs
+from setuptools import setup
+
+# The following version is parsed by other parts of this package.
+# Don't change the format of the line, or the variable name.
+package_version = "1.4.0"
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+def read(*parts):
+ # intentionally *not* adding an encoding option to open
+ return codecs.open(os.path.join(here, *parts), 'r').read()
+
+
+setup(
+ name='distro',
+ version=package_version,
+ url='https://github.com/nir0s/distro',
+ author='Nir Cohen',
+ author_email='nir36g@gmail.com',
+ license='Apache License, Version 2.0',
+ platforms='All',
+ description='Distro - an OS platform information API',
+ long_description=read('README.md'),
+ long_description_content_type='text/markdown',
+ py_modules=['distro'],
+ entry_points={
+ 'console_scripts': [
+ 'distro = distro:main',
+ ]
+ },
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: System Administrators',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Operating System :: POSIX :: Linux',
+ 'Operating System :: POSIX :: BSD',
+ 'Operating System :: POSIX :: BSD :: FreeBSD',
+ 'Operating System :: POSIX :: BSD :: NetBSD',
+ 'Operating System :: POSIX :: BSD :: OpenBSD',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Topic :: Software Development :: Libraries :: Python Modules',
+ 'Topic :: System :: Operating System',
+ ]
+)
diff --git a/third_party/python/dlmanager/README.rst b/third_party/python/dlmanager/README.rst
new file mode 100644
index 0000000000..e8db528fa2
--- /dev/null
+++ b/third_party/python/dlmanager/README.rst
@@ -0,0 +1,59 @@
+.. image:: https://badge.fury.io/py/dlmanager.svg
+ :target: https://pypi.python.org/pypi/dlmanager
+
+.. image:: https://readthedocs.org/projects/dlmanager/badge/?version=latest
+ :target: http://dlmanager.readthedocs.org/en/latest/?badge=latest
+ :alt: Documentation Status
+
+.. image:: https://travis-ci.org/parkouss/dlmanager.svg?branch=master
+ :target: https://travis-ci.org/parkouss/dlmanager
+
+.. image:: https://codecov.io/github/parkouss/dlmanager/coverage.svg?branch=master
+ :target: https://codecov.io/github/parkouss/dlmanager?branch=master
+
+dlmanager
+=========
+
+**dlmanager** is Python 2 and 3 download manager library, with the following
+features:
+
+- Download files in background and in parallel
+- Cancel downloads
+- store downloads in a given directory, avoiding re-downloading files
+- Limit the size of this directory, removing oldest files
+
+
+Example
+-------
+
+.. code-block:: python
+
+ from dlmanager import DownloadManager, PersistLimit
+
+ manager = DownloadManager(
+ "dlmanager-destir",
+ persist_limit=PersistLimit(
+ size_limit=1073741824, # 1 GB max
+ file_limit=10, # force to keep 10 files even if size_limit is reached
+ )
+ )
+
+ # Start downloads in background
+ # Note that if files are already present, this is a no-op.
+ manager.download(url1)
+ manager.download(url2)
+
+ # Wait for completion
+ try:
+ manager.wait()
+ except:
+ manager.cancel()
+ raise
+
+
+Installation
+------------
+
+Use pip: ::
+
+ pip install -U dlmanager
diff --git a/third_party/python/dlmanager/check.py b/third_party/python/dlmanager/check.py
new file mode 100755
index 0000000000..bcc842305e
--- /dev/null
+++ b/third_party/python/dlmanager/check.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+"""
+Run flake8 checks and tests.
+"""
+
+import os
+import argparse
+import pipes
+import shutil
+import tempfile
+
+from subprocess import check_call
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument('-C', '--with-coverage', action='store_true',
+ help="Generate coverage data from the tests run")
+ parser.add_argument('-H', '--cover-html', action='store_true',
+ help='generate html files to see test coverage')
+ return parser.parse_args()
+
+
+def run(cmd, **kwargs):
+ msg = 'Running: |%s|' % ' '.join(pipes.quote(c) for c in cmd)
+ if kwargs.get('cwd'):
+ msg += ' in %s' % kwargs['cwd']
+ print(msg)
+ check_call(cmd, **kwargs)
+
+
+def rm(path):
+ if os.path.isfile(path):
+ os.unlink(path)
+ elif os.path.isdir(path):
+ shutil.rmtree(path)
+
+
+if __name__ == '__main__':
+ options = parse_args()
+
+ here = os.path.dirname(os.path.abspath(__file__))
+ os.chdir(here)
+
+ run(['flake8', 'dlmanager', 'tests', 'setup.py', __file__])
+
+ if options.with_coverage:
+ rm('.coverage')
+ test_run_cmd = ['coverage', 'run']
+ else:
+ test_run_cmd = ['python']
+
+ tmpdir = tempfile.gettempdir()
+ tmpfiles = set(os.listdir(tmpdir))
+ run(test_run_cmd + ['setup.py', 'test'])
+
+ remaining_tmpfiles = tmpfiles - set(os.listdir(tmpdir))
+ assert not remaining_tmpfiles, "tests leaked some temp files: %s" % (
+ ", ".join("`%s`" % os.path.join(tmpdir, f) for f in remaining_tmpfiles)
+ )
+
+ if options.with_coverage and options.cover_html:
+ rm('htmlcov')
+ run(['coverage', 'html'])
+ print("See coverage: |firefox %s|"
+ % os.path.join(here, 'htmlcov', 'index.html'))
diff --git a/third_party/python/dlmanager/dlmanager/__init__.py b/third_party/python/dlmanager/dlmanager/__init__.py
new file mode 100644
index 0000000000..0890af484a
--- /dev/null
+++ b/third_party/python/dlmanager/dlmanager/__init__.py
@@ -0,0 +1,18 @@
+import logging
+
+__version__ = "0.1.1"
+
+
+try: # Python 2.7+
+ from logging import NullHandler
+except ImportError:
+ class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+# Set default logging handler to avoid "No handler found" warnings.
+logging.getLogger(__name__).addHandler(NullHandler())
+
+# exported api
+from dlmanager.manager import Download, DownloadInterrupt, DownloadManager # noqa
+from dlmanager.persist_limit import PersistLimit # noqa
diff --git a/third_party/python/dlmanager/dlmanager/fs.py b/third_party/python/dlmanager/dlmanager/fs.py
new file mode 100644
index 0000000000..8908b5efce
--- /dev/null
+++ b/third_party/python/dlmanager/dlmanager/fs.py
@@ -0,0 +1,116 @@
+import errno
+import logging
+import os
+import shutil
+import stat
+import time
+
+"""
+File system utilities, copied from mozfile.
+"""
+
+LOG = logging.getLogger(__name__)
+
+
+def _call_windows_retry(func, args=(), retry_max=5, retry_delay=0.5):
+ """
+ It's possible to see spurious errors on Windows due to various things
+ keeping a handle to the directory open (explorer, virus scanners, etc)
+ So we try a few times if it fails with a known error.
+ """
+ retry_count = 0
+ while True:
+ try:
+ func(*args)
+ except OSError as e:
+ # Error codes are defined in:
+ # http://docs.python.org/2/library/errno.html#module-errno
+ if e.errno not in (errno.EACCES, errno.ENOTEMPTY):
+ raise
+
+ if retry_count == retry_max:
+ raise
+
+ retry_count += 1
+
+ LOG.info('%s() failed for "%s". Reason: %s (%s). Retrying...',
+ func.__name__, args, e.strerror, e.errno)
+ time.sleep(retry_delay)
+ else:
+ # If no exception has been thrown it should be done
+ break
+
+
+def remove(path):
+ """Removes the specified file, link, or directory tree.
+
+ This is a replacement for shutil.rmtree that works better under
+ windows. It does the following things:
+
+ - check path access for the current user before trying to remove
+ - retry operations on some known errors due to various things keeping
+ a handle on file paths - like explorer, virus scanners, etc. The
+ known errors are errno.EACCES and errno.ENOTEMPTY, and it will
+ retry up to 5 five times with a delay of 0.5 seconds between each
+ attempt.
+
+ Note that no error will be raised if the given path does not exists.
+
+ :param path: path to be removed
+ """
+
+ def _call_with_windows_retry(*args, **kwargs):
+ try:
+ _call_windows_retry(*args, **kwargs)
+ except OSError as e:
+ # The file or directory to be removed doesn't exist anymore
+ if e.errno != errno.ENOENT:
+ raise
+
+ def _update_permissions(path):
+ """Sets specified pemissions depending on filetype"""
+ if os.path.islink(path):
+ # Path is a symlink which we don't have to modify
+ # because it should already have all the needed permissions
+ return
+
+ stats = os.stat(path)
+
+ if os.path.isfile(path):
+ mode = stats.st_mode | stat.S_IWUSR
+ elif os.path.isdir(path):
+ mode = stats.st_mode | stat.S_IWUSR | stat.S_IXUSR
+ else:
+ # Not supported type
+ return
+
+ _call_with_windows_retry(os.chmod, (path, mode))
+
+ if not os.path.exists(path):
+ return
+
+ if os.path.isfile(path) or os.path.islink(path):
+ # Verify the file or link is read/write for the current user
+ _update_permissions(path)
+ _call_with_windows_retry(os.remove, (path,))
+
+ elif os.path.isdir(path):
+ # Verify the directory is read/write/execute for the current user
+ _update_permissions(path)
+
+ # We're ensuring that every nested item has writable permission.
+ for root, dirs, files in os.walk(path):
+ for entry in dirs + files:
+ _update_permissions(os.path.join(root, entry))
+ _call_with_windows_retry(shutil.rmtree, (path,))
+
+
+def move(src, dst):
+ """
+ Move a file or directory path.
+
+ This is a replacement for shutil.move that works better under windows,
+ retrying operations on some known errors due to various things keeping
+ a handle on file paths.
+ """
+ _call_windows_retry(shutil.move, (src, dst))
diff --git a/third_party/python/dlmanager/dlmanager/manager.py b/third_party/python/dlmanager/dlmanager/manager.py
new file mode 100644
index 0000000000..3dce3b7838
--- /dev/null
+++ b/third_party/python/dlmanager/dlmanager/manager.py
@@ -0,0 +1,323 @@
+import os
+import requests
+import six
+import sys
+import tempfile
+import threading
+
+from contextlib import closing
+from six.moves.urllib.parse import urlparse
+
+from dlmanager import fs
+from dlmanager.persist_limit import PersistLimit
+
+
+class DownloadInterrupt(Exception):
+ "Raised when a download is interrupted."
+
+
+class Download(object):
+ """
+ Download is reponsible of downloading one file in the background.
+
+ Example of use: ::
+
+ dl = Download(url, dest)
+ dl.start()
+ dl.wait() # this will block until completion / cancel / error
+
+ If a download fail or is canceled, the temporary dest is removed from
+ the disk.
+
+ Usually, Downloads are created by using :meth:`DownloadManager.download`.
+
+ :param url: the url of the file to download
+ :param dest: the local file path destination
+ :param finished_callback: a callback that will be called in the thread
+ when the thread work is done. Takes the download
+ instance as a parameter.
+ :param chunk_size: size of the chunk that will be read. The thread can
+ not be stopped while we are reading that chunk size.
+ :param session: a requests.Session instance that will do do the real
+ downloading work. If None, `requests` module is used.
+ :param progress: A callable to report the progress (default to None).
+ see :meth:`set_progress`.
+ """
+ def __init__(self, url, dest, finished_callback=None,
+ chunk_size=16 * 1024, session=None, progress=None):
+ self.thread = threading.Thread(
+ target=self._download,
+ args=(url, dest, finished_callback, chunk_size,
+ session or requests)
+ )
+ self._lock = threading.Lock()
+ self.__url = url
+ self.__dest = dest
+ self.__progress = progress
+ self.__canceled = False
+ self.__error = None
+
+ def start(self):
+ """
+ Start the thread that will do the download.
+ """
+ self.thread.start()
+
+ def cancel(self):
+ """
+ Cancel a previously started download.
+ """
+ self.__canceled = True
+
+ def is_canceled(self):
+ """
+ Returns True if we canceled this download.
+ """
+ return self.__canceled
+
+ def is_running(self):
+ """
+ Returns True if the downloading thread is running.
+ """
+ return self.thread.is_alive()
+
+ def wait(self, raise_if_error=True):
+ """
+ Block until the downloading thread is finished.
+
+ :param raise_if_error: if True (the default), :meth:`raise_if_error`
+ will be called and raise an error if any.
+ """
+ while self.thread.is_alive():
+ try:
+ # in case of exception here (like KeyboardInterrupt),
+ # cancel the task.
+ self.thread.join(0.02)
+ except:
+ self.cancel()
+ raise
+ # this will raise exception that may happen inside the thread.
+ if raise_if_error:
+ self.raise_if_error()
+
+ def error(self):
+ """
+ Returns None or a tuple of three values (type, value, traceback)
+ that give information about the exception.
+ """
+ return self.__error
+
+ def raise_if_error(self):
+ """
+ Raise an error if any. If the download was canceled, raise
+ :class:`DownloadInterrupt`.
+ """
+ if self.__error:
+ six.reraise(*self.__error)
+ if self.__canceled:
+ raise DownloadInterrupt()
+
+ def set_progress(self, progress):
+ """
+ set a callable to report the progress of the download, or None to
+ disable any report.
+
+ The callable must take three parameters (download, current, total).
+ Note that this method is thread safe, you can call it during a
+ download.
+ """
+ with self._lock:
+ self.__progress = progress
+
+ def get_dest(self):
+ """
+ Returns the dest.
+ """
+ return self.__dest
+
+ def get_url(self):
+ """
+ Returns the url.
+ """
+ return self.__url
+
+ def _update_progress(self, current, total):
+ with self._lock:
+ if self.__progress:
+ self.__progress(self, current, total)
+
+ def _download(self, url, dest, finished_callback, chunk_size, session):
+ # save the file under a temporary name
+ # this allow to not use a broken file in case things went really bad
+ # while downloading the file (ie the python interpreter is killed
+ # abruptly)
+ temp = None
+ bytes_so_far = 0
+ try:
+ with closing(session.get(url, stream=True)) as response:
+ total_size = response.headers.get('Content-length', '').strip()
+ total_size = int(total_size) if total_size else None
+ self._update_progress(bytes_so_far, total_size)
+ # we use NamedTemporaryFile as raw open() call was causing
+ # issues on windows - see:
+ # https://bugzilla.mozilla.org/show_bug.cgi?id=1185756
+ with tempfile.NamedTemporaryFile(
+ delete=False,
+ suffix='.tmp',
+ dir=os.path.dirname(dest)) as temp:
+ for chunk in response.iter_content(chunk_size):
+ if self.is_canceled():
+ break
+ if chunk:
+ temp.write(chunk)
+ bytes_so_far += len(chunk)
+ self._update_progress(bytes_so_far, total_size)
+ response.raise_for_status()
+ except:
+ self.__error = sys.exc_info()
+ try:
+ if temp is None:
+ pass # not even opened the temp file, nothing to do
+ elif self.is_canceled() or self.__error:
+ fs.remove(temp.name)
+ else:
+ # if all goes well, then rename the file to the real dest
+ fs.remove(dest) # just in case it already existed
+ fs.move(temp.name, dest)
+ finally:
+ if finished_callback:
+ finished_callback(self)
+
+
+class DownloadManager(object):
+ """
+ DownloadManager is responsible of starting and managing downloads inside
+ a given directory. It will download a file only if a given filename
+ is not already there.
+
+ Note that background downloads needs to be stopped. For example, if
+ you have an exception while a download is occuring, python will only
+ exit when the download will finish. To get rid of that, there is a
+ possible idiom: ::
+
+ def download_things(manager):
+ # do things with the manager
+ manager.download(url1, f1)
+ manager.download(url2, f2)
+ ...
+
+ manager = DownloadManager(destdir)
+ try:
+ download_things(manager)
+ finally:
+ # ensure we cancel all background downloads to ask the end
+ # of possible remainings threads
+ manager.cancel()
+
+ :param destdir: a directory where files are downloaded. It will be created
+ if it does not exists.
+ :param session: a requests session. If None, one will be created for you.
+ :param persist_limit: an instance of :class:`PersistLimit`, to allow
+ limiting the size of the download dir. Defaults
+ to None, meaning no limit.
+ """
+ def __init__(self, destdir, session=None, persist_limit=None):
+ self.destdir = destdir
+ self.session = session or requests.Session()
+ self._downloads = {}
+ self._lock = threading.Lock()
+ self.persist_limit = persist_limit or PersistLimit(0)
+ self.persist_limit.register_dir_content(self.destdir)
+
+ # if persist folder does not exist, create it
+ if not os.path.isdir(destdir):
+ os.makedirs(destdir)
+
+ def get_dest(self, fname):
+ return os.path.join(self.destdir, fname)
+
+ def cancel(self, cancel_if=None):
+ """
+ Cancel downloads, if any.
+
+ if cancel_if is given, it must be a callable that take the download
+ instance as parameter, and return True if the download needs to be
+ canceled.
+
+ Note that download threads won't be stopped directly.
+ """
+ with self._lock:
+ for download in six.itervalues(self._downloads):
+ if cancel_if is None or cancel_if(download):
+ if download.is_running():
+ download.cancel()
+
+ def wait(self, raise_if_error=True):
+ """
+ Wait for all downloads to be finished.
+ """
+ for download in self._downloads.values():
+ download.wait(raise_if_error=raise_if_error)
+
+ def download(self, url, fname=None, progress=None):
+ """
+ Returns a started :class:`Download` instance, or None if fname is
+ already present in destdir.
+
+ if a download is already running for the given fname, it is just
+ returned. Else the download is created, started and returned.
+
+ :param url: url of the file to download.
+ :param fname: name to give for the downloaded file. If None, it will
+ be the name extracted in the url.
+ :param progress: a callable to report the download progress, or None.
+ See :meth:`Download.set_progress`.
+ """
+ if fname is None:
+ fname = urlparse(url).path.split('/')[-1]
+ dest = self.get_dest(fname)
+ with self._lock:
+ # if we are downloading, returns the instance
+ if dest in self._downloads:
+ dl = self._downloads[dest]
+ if progress:
+ dl.set_progress(progress)
+ return dl
+
+ if os.path.exists(dest):
+ return None
+
+ # else create the download (will be automatically removed of
+ # the list on completion) start it, and returns that.
+ with self._lock:
+ download = Download(url, dest,
+ session=self.session,
+ finished_callback=self._download_finished,
+ progress=progress)
+ self._downloads[dest] = download
+ download.start()
+ self._download_started(download)
+ return download
+
+ def _download_started(self, dl):
+ """
+ Useful when sub-classing. Report the start event of a download.
+
+ :param dl: The :class:`Download` instance.
+ """
+ pass
+
+ def _download_finished(self, dl):
+ """
+ Useful when sub-classing. Report the end of a download.
+
+ Note that this is executed in the download thread. Also, you should
+ make sure to call the base implementation.
+
+ :param dl: The :class:`Download` instance.
+ """
+ with self._lock:
+ dest = dl.get_dest()
+ del self._downloads[dest]
+ self.persist_limit.register_file(dest)
+ self.persist_limit.remove_old_files()
diff --git a/third_party/python/dlmanager/dlmanager/persist_limit.py b/third_party/python/dlmanager/dlmanager/persist_limit.py
new file mode 100644
index 0000000000..03a1829f70
--- /dev/null
+++ b/third_party/python/dlmanager/dlmanager/persist_limit.py
@@ -0,0 +1,65 @@
+import os
+import stat
+
+from collections import namedtuple
+from glob import glob
+
+from dlmanager import fs
+
+
+File = namedtuple('File', ('path', 'stat'))
+
+
+class PersistLimit(object):
+ """
+ Keep a list of files, removing the oldest ones when the size_limit
+ is reached.
+
+ The access time of a file is used to determine the oldests, e.g. the
+ last time a file was read.
+
+ :param size_limit: the size limit in bytes. A value of 0 means no limit.
+ :param file_limit: even if the size limit is reached, this force
+ to keep at least *file_limit* files.
+ """
+ def __init__(self, size_limit, file_limit=5):
+ self.size_limit = size_limit
+ self.file_limit = file_limit
+ self.files = []
+ self._files_size = 0
+
+ def register_file(self, path):
+ """
+ register a single file.
+ """
+ try:
+ fstat = os.stat(path)
+ except OSError:
+ # file do not exists probably, just skip it
+ # note this happen when backgound files are canceled
+ return
+ if stat.S_ISREG(fstat.st_mode):
+ self.files.append(File(path=path, stat=fstat))
+ self._files_size += fstat.st_size
+
+ def register_dir_content(self, directory, pattern="*"):
+ """
+ Register every files in a directory that match *pattern*.
+ """
+ for path in glob(os.path.join(directory, pattern)):
+ self.register_file(path)
+
+ def remove_old_files(self):
+ """
+ remove oldest registered files.
+ """
+ if self.size_limit <= 0 or self.file_limit <= 0:
+ return
+ # sort by creation time, oldest first
+ files = sorted(self.files, key=lambda f: f.stat.st_atime)
+ while len(files) > self.file_limit and \
+ self._files_size >= self.size_limit:
+ f = files.pop(0)
+ fs.remove(f.path)
+ self._files_size -= f.stat.st_size
+ self.files = files
diff --git a/third_party/python/dlmanager/doc/Makefile b/third_party/python/dlmanager/doc/Makefile
new file mode 100644
index 0000000000..6b477bf459
--- /dev/null
+++ b/third_party/python/dlmanager/doc/Makefile
@@ -0,0 +1,216 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " applehelp to make an Apple Help Book"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " xml to make Docutils-native XML files"
+ @echo " pseudoxml to make pseudoxml-XML files for display purposes"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+ @echo " coverage to run coverage check of the documentation (if enabled)"
+
+.PHONY: clean
+clean:
+ rm -rf $(BUILDDIR)/*
+
+.PHONY: html
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+.PHONY: dirhtml
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+.PHONY: singlehtml
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+.PHONY: pickle
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+.PHONY: json
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+.PHONY: htmlhelp
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+.PHONY: qthelp
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/dlmanager.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/dlmanager.qhc"
+
+.PHONY: applehelp
+applehelp:
+ $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
+ @echo
+ @echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
+ @echo "N.B. You won't be able to view it unless you put it in" \
+ "~/Library/Documentation/Help or install it in your application" \
+ "bundle."
+
+.PHONY: devhelp
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/dlmanager"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/dlmanager"
+ @echo "# devhelp"
+
+.PHONY: epub
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+.PHONY: latex
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+.PHONY: latexpdf
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+.PHONY: latexpdfja
+latexpdfja:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through platex and dvipdfmx..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+.PHONY: text
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+.PHONY: man
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+.PHONY: texinfo
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+.PHONY: info
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+.PHONY: gettext
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+.PHONY: changes
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+.PHONY: linkcheck
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+.PHONY: doctest
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
+
+.PHONY: coverage
+coverage:
+ $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
+ @echo "Testing of coverage in the sources finished, look at the " \
+ "results in $(BUILDDIR)/coverage/python.txt."
+
+.PHONY: xml
+xml:
+ $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+ @echo
+ @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+.PHONY: pseudoxml
+pseudoxml:
+ $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+ @echo
+ @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/third_party/python/dlmanager/doc/api.rst b/third_party/python/dlmanager/doc/api.rst
new file mode 100644
index 0000000000..295ce7c1fa
--- /dev/null
+++ b/third_party/python/dlmanager/doc/api.rst
@@ -0,0 +1,25 @@
+API
+===
+
+DownloadManager
+---------------
+
+.. currentmodule:: dlmanager
+
+.. autoclass:: DownloadManager
+ :members:
+
+Download
+--------
+
+.. autoclass:: Download
+ :members:
+
+.. autoclass:: DownloadInterrupt
+ :members:
+
+PersistLimit
+------------
+
+.. autoclass:: PersistLimit
+ :members:
diff --git a/third_party/python/dlmanager/doc/conf.py b/third_party/python/dlmanager/doc/conf.py
new file mode 100644
index 0000000000..80bb5172d2
--- /dev/null
+++ b/third_party/python/dlmanager/doc/conf.py
@@ -0,0 +1,289 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# dlmanager documentation build configuration file, created by
+# sphinx-quickstart on Fri Feb 19 11:22:21 2016.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath('..'))
+
+from dlmanager import __version__
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.viewcode',
+]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+# source_suffix = ['.rst', '.md']
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = 'dlmanager'
+copyright = u'2016, Julien Pagès'
+author = u'Julien Pagès'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = __version__
+# The full version, including alpha/beta/rc tags.
+release = version
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+#keep_warnings = False
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = False
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'sphinx_rtd_theme'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+#html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Language to be used for generating the HTML full-text search index.
+# Sphinx supports the following languages:
+# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
+# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
+#html_search_language = 'en'
+
+# A dictionary with options for the search language support, empty by default.
+# Now only 'ja' uses this config value
+#html_search_options = {'type': 'default'}
+
+# The name of a javascript file (relative to the configuration directory) that
+# implements a search results scorer. If empty, the default will be used.
+#html_search_scorer = 'scorer.js'
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'dlmanagerdoc'
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+
+# Latex figure (float) alignment
+#'figure_align': 'htbp',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ (master_doc, 'dlmanager.tex', 'dlmanager Documentation',
+ 'Julien Pagès', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ (master_doc, 'dlmanager', 'dlmanager Documentation',
+ [author], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (master_doc, 'dlmanager', 'dlmanager Documentation',
+ author, 'dlmanager', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+#texinfo_no_detailmenu = False
diff --git a/third_party/python/dlmanager/doc/index.rst b/third_party/python/dlmanager/doc/index.rst
new file mode 100644
index 0000000000..c585e573ad
--- /dev/null
+++ b/third_party/python/dlmanager/doc/index.rst
@@ -0,0 +1,26 @@
+.. dlmanager documentation master file, created by
+ sphinx-quickstart on Fri Feb 19 11:22:21 2016.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+Welcome to dlmanager's documentation!
+=====================================
+
+**dlmanager** is a Python 2 and 3 download manager library. It is hosted
+`on github <https://github.com/parkouss/dlmanager>`_.
+
+Contents:
+
+.. toctree::
+ :maxdepth: 2
+
+ api
+
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
diff --git a/third_party/python/dlmanager/doc/make.bat b/third_party/python/dlmanager/doc/make.bat
new file mode 100644
index 0000000000..5bcee17fab
--- /dev/null
+++ b/third_party/python/dlmanager/doc/make.bat
@@ -0,0 +1,263 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+set I18NSPHINXOPTS=%SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+ set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+ :help
+ echo.Please use `make ^<target^>` where ^<target^> is one of
+ echo. html to make standalone HTML files
+ echo. dirhtml to make HTML files named index.html in directories
+ echo. singlehtml to make a single large HTML file
+ echo. pickle to make pickle files
+ echo. json to make JSON files
+ echo. htmlhelp to make HTML files and a HTML help project
+ echo. qthelp to make HTML files and a qthelp project
+ echo. devhelp to make HTML files and a Devhelp project
+ echo. epub to make an epub
+ echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+ echo. text to make text files
+ echo. man to make manual pages
+ echo. texinfo to make Texinfo files
+ echo. gettext to make PO message catalogs
+ echo. changes to make an overview over all changed/added/deprecated items
+ echo. xml to make Docutils-native XML files
+ echo. pseudoxml to make pseudoxml-XML files for display purposes
+ echo. linkcheck to check all external links for integrity
+ echo. doctest to run all doctests embedded in the documentation if enabled
+ echo. coverage to run coverage check of the documentation if enabled
+ goto end
+)
+
+if "%1" == "clean" (
+ for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+ del /q /s %BUILDDIR%\*
+ goto end
+)
+
+
+REM Check if sphinx-build is available and fallback to Python version if any
+%SPHINXBUILD% 1>NUL 2>NUL
+if errorlevel 9009 goto sphinx_python
+goto sphinx_ok
+
+:sphinx_python
+
+set SPHINXBUILD=python -m sphinx.__init__
+%SPHINXBUILD% 2> nul
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.http://sphinx-doc.org/
+ exit /b 1
+)
+
+:sphinx_ok
+
+
+if "%1" == "html" (
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+ goto end
+)
+
+if "%1" == "dirhtml" (
+ %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+ goto end
+)
+
+if "%1" == "singlehtml" (
+ %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+ goto end
+)
+
+if "%1" == "pickle" (
+ %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the pickle files.
+ goto end
+)
+
+if "%1" == "json" (
+ %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the JSON files.
+ goto end
+)
+
+if "%1" == "htmlhelp" (
+ %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+ goto end
+)
+
+if "%1" == "qthelp" (
+ %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+ echo.^> qcollectiongenerator %BUILDDIR%\qthelp\dlmanager.qhcp
+ echo.To view the help file:
+ echo.^> assistant -collectionFile %BUILDDIR%\qthelp\dlmanager.ghc
+ goto end
+)
+
+if "%1" == "devhelp" (
+ %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished.
+ goto end
+)
+
+if "%1" == "epub" (
+ %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The epub file is in %BUILDDIR%/epub.
+ goto end
+)
+
+if "%1" == "latex" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "latexpdf" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ cd %BUILDDIR%/latex
+ make all-pdf
+ cd %~dp0
+ echo.
+ echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "latexpdfja" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ cd %BUILDDIR%/latex
+ make all-pdf-ja
+ cd %~dp0
+ echo.
+ echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "text" (
+ %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The text files are in %BUILDDIR%/text.
+ goto end
+)
+
+if "%1" == "man" (
+ %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The manual pages are in %BUILDDIR%/man.
+ goto end
+)
+
+if "%1" == "texinfo" (
+ %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
+ goto end
+)
+
+if "%1" == "gettext" (
+ %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
+ goto end
+)
+
+if "%1" == "changes" (
+ %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.The overview file is in %BUILDDIR%/changes.
+ goto end
+)
+
+if "%1" == "linkcheck" (
+ %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+ goto end
+)
+
+if "%1" == "doctest" (
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+ goto end
+)
+
+if "%1" == "coverage" (
+ %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Testing of coverage in the sources finished, look at the ^
+results in %BUILDDIR%/coverage/python.txt.
+ goto end
+)
+
+if "%1" == "xml" (
+ %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The XML files are in %BUILDDIR%/xml.
+ goto end
+)
+
+if "%1" == "pseudoxml" (
+ %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
+ goto end
+)
+
+:end
diff --git a/third_party/python/dlmanager/examples/dl_progressbar.py b/third_party/python/dlmanager/examples/dl_progressbar.py
new file mode 100644
index 0000000000..98c36d55b6
--- /dev/null
+++ b/third_party/python/dlmanager/examples/dl_progressbar.py
@@ -0,0 +1,41 @@
+import argparse
+
+# for python 3, use https://github.com/coagulant/progressbar-python3
+from progressbar import ProgressBar, Percentage, RotatingMarker, ETA, \
+ FileTransferSpeed, Bar
+
+from six.moves.urllib.parse import urlparse
+
+from dlmanager import Download
+
+
+def parse_args(argv=None):
+ parser = argparse.ArgumentParser()
+ parser.add_argument("url", help="url to download")
+ return parser.parse_args(argv)
+
+
+def download_file(url, dest=None):
+ if dest is None:
+ dest = urlparse(url).path.split('/')[-1]
+
+ widgets = ['Download: ', Percentage(), ' ', Bar(marker=RotatingMarker()),
+ ' ', ETA(), ' ', FileTransferSpeed()]
+ bar = ProgressBar(widgets=widgets).start()
+
+ def download_progress(_, current, total):
+ bar.maxval = total
+ bar.update(current)
+
+ dl = Download(url, dest, progress=download_progress)
+ dl.start()
+ dl.wait()
+ bar.finish()
+
+
+if __name__ == '__main__':
+ options = parse_args()
+ try:
+ download_file(options.url)
+ except KeyboardInterrupt:
+ print("\nInterrupted.")
diff --git a/third_party/python/dlmanager/examples/dl_tqdm.py b/third_party/python/dlmanager/examples/dl_tqdm.py
new file mode 100644
index 0000000000..a4e458a415
--- /dev/null
+++ b/third_party/python/dlmanager/examples/dl_tqdm.py
@@ -0,0 +1,45 @@
+import argparse
+import tqdm
+
+from six.moves.urllib.parse import urlparse
+
+from dlmanager import Download
+
+
+def parse_args(argv=None):
+ parser = argparse.ArgumentParser()
+ parser.add_argument("url", help="url to download")
+ return parser.parse_args(argv)
+
+
+def download_progress(bar):
+ last_b = [0]
+
+ def inner(_, current, total):
+ if total is not None:
+ bar.total = total
+ delta = current - last_b[0]
+ last_b[0] = current
+
+ if delta > 0:
+ bar.update(delta)
+ return inner
+
+
+def download_file(url, dest=None):
+ if dest is None:
+ dest = urlparse(url).path.split('/')[-1]
+
+ with tqdm.tqdm(unit='B', unit_scale=True, miniters=1, dynamic_ncols=True,
+ desc=dest) as bar:
+ dl = Download(url, dest, progress=download_progress(bar))
+ dl.start()
+ dl.wait()
+
+
+if __name__ == '__main__':
+ options = parse_args()
+ try:
+ download_file(options.url)
+ except KeyboardInterrupt:
+ print("\nInterrupted.")
diff --git a/third_party/python/dlmanager/requirements.txt b/third_party/python/dlmanager/requirements.txt
new file mode 100644
index 0000000000..640e3d44a6
--- /dev/null
+++ b/third_party/python/dlmanager/requirements.txt
@@ -0,0 +1,2 @@
+requests
+six
diff --git a/third_party/python/dlmanager/setup.cfg b/third_party/python/dlmanager/setup.cfg
new file mode 100644
index 0000000000..3c6e79cf31
--- /dev/null
+++ b/third_party/python/dlmanager/setup.cfg
@@ -0,0 +1,2 @@
+[bdist_wheel]
+universal=1
diff --git a/third_party/python/dlmanager/setup.py b/third_party/python/dlmanager/setup.py
new file mode 100644
index 0000000000..b2a8fd392d
--- /dev/null
+++ b/third_party/python/dlmanager/setup.py
@@ -0,0 +1,60 @@
+# -*- coding: utf-8 -*-
+
+import os
+import re
+import sys
+from setuptools import setup
+from setuptools.command.test import test as TestCommand
+
+HERE = os.path.dirname(os.path.realpath(__file__))
+
+
+class PyTest(TestCommand):
+ """
+ Run py.test with the "python setup.py test command"
+ """
+ user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
+
+ def initialize_options(self):
+ TestCommand.initialize_options(self)
+ self.pytest_args = ''
+
+ def finalize_options(self):
+ TestCommand.finalize_options(self)
+ self.pytest_args += (' ' + self.distribution.test_suite)
+
+ def run_tests(self):
+ import pytest
+ errno = pytest.main(self.pytest_args)
+ sys.exit(errno)
+
+
+def read(*parts):
+ with open(os.path.join(HERE, *parts)) as f:
+ return f.read()
+
+
+def parse_requirements(data, exclude=()):
+ return [line for line in data.splitlines()
+ if line and not line.startswith("#") and line not in exclude]
+
+
+def version():
+ return re.findall(r"__version__ = \"([\d.]+)\"",
+ read("dlmanager", "__init__.py"))[0]
+
+setup(
+ name="dlmanager",
+ version=version(),
+ description="download manager library",
+ long_description=read("README.rst"),
+ author="Julien Pagès",
+ author_email="j.parkouss@gmail.com",
+ url="http://github.com/parkouss/dlmanager",
+ license="GPL/LGPL",
+ install_requires=parse_requirements(read("requirements.txt")),
+ cmdclass={'test': PyTest},
+ tests_require=parse_requirements(read("requirements.txt"),
+ exclude=("-e .",)),
+ test_suite='tests',
+)
diff --git a/third_party/python/dlmanager/test-requirements.txt b/third_party/python/dlmanager/test-requirements.txt
new file mode 100644
index 0000000000..a4db4b7672
--- /dev/null
+++ b/third_party/python/dlmanager/test-requirements.txt
@@ -0,0 +1,7 @@
+-e .
+mock
+pytest
+pytest-mock
+flake8
+coverage
+unittest2; python_version < '2.7'
diff --git a/third_party/python/dlmanager/tests/__init__.py b/third_party/python/dlmanager/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/dlmanager/tests/__init__.py
diff --git a/third_party/python/dlmanager/tests/test_manager.py b/third_party/python/dlmanager/tests/test_manager.py
new file mode 100644
index 0000000000..f0ade9021f
--- /dev/null
+++ b/third_party/python/dlmanager/tests/test_manager.py
@@ -0,0 +1,251 @@
+try:
+ import unittest2 as unittest # python < 2.7 compat
+except ImportError:
+ import unittest
+import tempfile
+import shutil
+import os
+import time
+import six
+from mock import Mock
+
+from dlmanager import manager as download_manager
+
+
+def mock_session():
+ response = Mock()
+ session = Mock(get=Mock(return_value=response))
+ return session, response
+
+
+def mock_response(response, data, wait=0):
+ data = six.b(data)
+
+ def iter_content(chunk_size=4):
+ rest = data
+ while rest:
+ time.sleep(wait)
+ chunk = rest[:chunk_size]
+ rest = rest[chunk_size:]
+ yield chunk
+
+ response.headers = {'Content-length': str(len(data))}
+ response.iter_content = iter_content
+
+
+class TestDownload(unittest.TestCase):
+ def setUp(self):
+ self.tempdir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.tempdir)
+ self.finished = Mock()
+ self.session, self.session_response = mock_session()
+ self.tempfile = os.path.join(self.tempdir, 'dest')
+ self.dl = download_manager.Download('http://url', self.tempfile,
+ finished_callback=self.finished,
+ chunk_size=4,
+ session=self.session)
+
+ def test_creation(self):
+ self.assertFalse(self.dl.is_canceled())
+ self.assertFalse(self.dl.is_running())
+ self.assertIsNone(self.dl.error())
+ self.assertEquals(self.dl.get_url(), 'http://url')
+ self.assertEquals(self.dl.get_dest(), self.tempfile)
+
+ def create_response(self, data, wait=0):
+ mock_response(self.session_response, data, wait)
+
+ def test_download(self):
+ self.create_response('1234' * 4, 0.01)
+
+ # no file present yet
+ self.assertFalse(os.path.exists(self.tempfile))
+
+ self.dl.start()
+ self.assertTrue(self.dl.is_running())
+ self.dl.wait()
+
+ self.assertFalse(self.dl.is_running())
+ self.finished.assert_called_with(self.dl)
+ # file has been downloaded
+ with open(self.tempfile) as f:
+ self.assertEquals(f.read(), '1234' * 4)
+
+ def test_download_cancel(self):
+ self.create_response('1234' * 1000, wait=0.01)
+
+ start = time.time()
+ self.dl.start()
+ time.sleep(0.1)
+ self.dl.cancel()
+
+ with self.assertRaises(download_manager.DownloadInterrupt):
+ self.dl.wait()
+
+ self.assertTrue(self.dl.is_canceled())
+
+ # response generation should have taken 1000 * 0.01 = 10 seconds.
+ # since we canceled, this must be lower.
+ self.assertTrue((time.time() - start) < 1.0)
+
+ # file was deleted
+ self.assertFalse(os.path.exists(self.tempfile))
+ # finished callback was called
+ self.finished.assert_called_with(self.dl)
+
+ def test_download_with_progress(self):
+ data = []
+
+ def update_progress(_dl, current, total):
+ data.append((_dl, current, total))
+
+ self.create_response('1234' * 4)
+
+ self.dl.set_progress(update_progress)
+ self.dl.start()
+ self.dl.wait()
+
+ self.assertEquals(data, [
+ (self.dl, 0, 16),
+ (self.dl, 4, 16),
+ (self.dl, 8, 16),
+ (self.dl, 12, 16),
+ (self.dl, 16, 16),
+ ])
+ # file has been downloaded
+ with open(self.tempfile) as f:
+ self.assertEquals(f.read(), '1234' * 4)
+ # finished callback was called
+ self.finished.assert_called_with(self.dl)
+
+ def test_download_error_in_thread(self):
+ self.session_response.headers = {'Content-length': '24'}
+ self.session_response.iter_content.side_effect = IOError
+
+ self.dl.start()
+ with self.assertRaises(IOError):
+ self.dl.wait()
+
+ self.assertEquals(self.dl.error()[0], IOError)
+ # finished callback was called
+ self.finished.assert_called_with(self.dl)
+
+ def test_wait_does_not_block_on_exception(self):
+ # this test the case when a user may hit CTRL-C for example
+ # during a dl.wait() call.
+ self.create_response('1234' * 1000, wait=0.01)
+
+ original_join = self.dl.thread.join
+ it = iter('123')
+
+ def join(timeout=None):
+ next(it) # will throw StopIteration after a few calls
+ original_join(timeout)
+
+ self.dl.thread.join = join
+
+ start = time.time()
+ self.dl.start()
+
+ with self.assertRaises(StopIteration):
+ self.dl.wait()
+
+ self.assertTrue(self.dl.is_canceled())
+ # wait for the thread to finish
+ original_join()
+
+ # response generation should have taken 1000 * 0.01 = 10 seconds.
+ # since we got an error, this must be lower.
+ self.assertTrue((time.time() - start) < 1.0)
+
+ # file was deleted
+ self.assertFalse(os.path.exists(self.tempfile))
+ # finished callback was called
+ self.finished.assert_called_with(self.dl)
+
+
+class TestDownloadManager(unittest.TestCase):
+ def setUp(self):
+ self.tempdir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, self.tempdir)
+
+ self.dl_manager = download_manager.DownloadManager(self.tempdir)
+
+ def do_download(self, url, fname, data, wait=0):
+ session, response = mock_session()
+ mock_response(response, data, wait)
+ # patch the session, so the download will use that
+ self.dl_manager.session = session
+ return self.dl_manager.download(url, fname)
+
+ def test_download(self):
+ dl1 = self.do_download('http://foo', 'foo', 'hello' * 4, wait=0.02)
+ self.assertIsInstance(dl1, download_manager.Download)
+ self.assertTrue(dl1.is_running())
+
+ # with the same fname, no new download is started. The same instance
+ # is returned since the download is running.
+ dl2 = self.do_download('http://bar', 'foo', 'hello2' * 4, wait=0.02)
+ self.assertEquals(dl1, dl2)
+
+ # starting a download with another fname will trigger a new download
+ dl3 = self.do_download('http://bar', 'foo2', 'hello you' * 4)
+ self.assertIsInstance(dl3, download_manager.Download)
+ self.assertNotEquals(dl3, dl1)
+
+ # let's wait for the downloads to finish
+ dl3.wait()
+ dl1.wait()
+
+ # now if we try to download a fname that exists, None is returned
+ dl4 = self.do_download('http://bar', 'foo', 'hello2' * 4, wait=0.02)
+ self.assertIsNone(dl4)
+
+ # downloaded files are what is expected
+ def content(fname):
+ with open(os.path.join(self.tempdir, fname)) as f:
+ return f.read()
+ self.assertEquals(content('foo'), 'hello' * 4)
+ self.assertEquals(content('foo2'), 'hello you' * 4)
+
+ # download instances are removed from the manager (internal test)
+ self.assertEquals(self.dl_manager._downloads, {})
+
+ def test_cancel(self):
+ dl1 = self.do_download('http://foo', 'foo', 'foo' * 50000, wait=0.02)
+ dl2 = self.do_download('http://foo', 'bar', 'bar' * 50000, wait=0.02)
+ dl3 = self.do_download('http://foo', 'foobar', 'foobar' * 4)
+
+ # let's cancel only one
+ def cancel_if(dl):
+ if os.path.basename(dl.get_dest()) == 'foo':
+ return True
+ self.dl_manager.cancel(cancel_if=cancel_if)
+
+ self.assertTrue(dl1.is_canceled())
+ self.assertFalse(dl2.is_canceled())
+ self.assertFalse(dl3.is_canceled())
+
+ # wait for dl3
+ dl3.wait()
+
+ # cancel everything
+ self.dl_manager.cancel()
+
+ self.assertTrue(dl1.is_canceled())
+ self.assertTrue(dl2.is_canceled())
+ # dl3 is not canceled since it finished before
+ self.assertFalse(dl3.is_canceled())
+
+ # wait for the completion of dl1 and dl2 threads
+ dl1.wait(raise_if_error=False)
+ dl2.wait(raise_if_error=False)
+
+ # at the end, only dl3 has been downloaded
+ self.assertEquals(os.listdir(self.tempdir), ["foobar"])
+
+ with open(os.path.join(self.tempdir, 'foobar')) as f:
+ self.assertEquals(f.read(), 'foobar' * 4)
+
+ # download instances are removed from the manager (internal test)
+ self.assertEquals(self.dl_manager._downloads, {})
diff --git a/third_party/python/dlmanager/tests/test_persist_limit.py b/third_party/python/dlmanager/tests/test_persist_limit.py
new file mode 100644
index 0000000000..1d899a46f2
--- /dev/null
+++ b/third_party/python/dlmanager/tests/test_persist_limit.py
@@ -0,0 +1,56 @@
+import pytest
+import os
+import tempfile
+import time
+import six
+
+from dlmanager import fs
+from dlmanager.persist_limit import PersistLimit
+
+
+class TempCreator(object):
+ def __init__(self):
+ self.tempdir = tempfile.mkdtemp()
+
+ def list(self):
+ return os.listdir(self.tempdir)
+
+ def create_file(self, name, size, delay):
+ fname = os.path.join(self.tempdir, name)
+ with open(fname, 'wb') as f:
+ f.write(six.b('a' * size))
+ # equivalent to touch, but we apply a delay for the test
+ atime = time.time() + delay
+ os.utime(fname, (atime, atime))
+
+
+@pytest.yield_fixture
+def temp():
+ tmp = TempCreator()
+ yield tmp
+ fs.remove(tmp.tempdir)
+
+
+@pytest.mark.parametrize("size_limit,file_limit,files", [
+ # limit_file is always respected
+ (10, 5, "bcdef"),
+ (10, 3, "def"),
+ # if size_limit or file_limit is 0, nothing is removed
+ (0, 5, "abcdef"),
+ (5, 0, "abcdef"),
+ # limit_size works
+ (35, 1, "def"),
+])
+def test_persist_limit(temp, size_limit, file_limit, files):
+ temp.create_file("a", 10, -6)
+ temp.create_file("b", 10, -5)
+ temp.create_file("c", 10, -4)
+ temp.create_file("d", 10, -3)
+ temp.create_file("e", 10, -2)
+ temp.create_file("f", 10, -1)
+
+ persist_limit = PersistLimit(size_limit, file_limit)
+ persist_limit.register_dir_content(temp.tempdir)
+ persist_limit.remove_old_files()
+
+ assert ''.join(sorted(temp.list())) == ''.join(sorted(files))
diff --git a/third_party/python/ecdsa/LICENSE b/third_party/python/ecdsa/LICENSE
new file mode 100644
index 0000000000..474479a2ce
--- /dev/null
+++ b/third_party/python/ecdsa/LICENSE
@@ -0,0 +1,24 @@
+"python-ecdsa" Copyright (c) 2010 Brian Warner
+
+Portions written in 2005 by Peter Pearson and placed in the public domain.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/third_party/python/ecdsa/MANIFEST.in b/third_party/python/ecdsa/MANIFEST.in
new file mode 100644
index 0000000000..a728ebd8f3
--- /dev/null
+++ b/third_party/python/ecdsa/MANIFEST.in
@@ -0,0 +1,3 @@
+# basic metadata
+include MANIFEST.in LICENSE NEWS README.md versioneer.py
+include src/ecdsa/_version.py
diff --git a/third_party/python/ecdsa/NEWS b/third_party/python/ecdsa/NEWS
new file mode 100644
index 0000000000..682fd5f9de
--- /dev/null
+++ b/third_party/python/ecdsa/NEWS
@@ -0,0 +1,213 @@
+* Release 0.15 (02 Jan 2020)
+
+Bug fixes:
+`from curves import *` will now correctly import `BRAINPOOLP256r1` and
+`BRAINPOOLP320r1` curves.
+
+New features:
+ECDH operations have a public explicit API.
+Large hashes are now supported with small curves (e.g. SHA-256 can be used
+with NIST192p).
+`VerifyingKey` now supports the `precompute()` method to further speed up
+signature verification with the given instance of the key.
+
+New API:
+`VerifyingKey`, `SigningKey`, `Public_key`, `Private_key` and
+`CurveFp` now have `__eq__` methods.
+`ecdsa.ecdh` module and `ECDH` class.
+`PointJacobi` added.
+`VerifyingKey.verify_digest`, `SigningKey.sign_digest` and
+`SigningKey.sign_digest_deterministic` methods now accept `allow_truncate`
+argument to enable use of hashes larger than the curve order.
+`VerifyingKey` `from_pem` and `from_der` now accept `hashfunc` parameter
+like other `from*` methods.
+`VerifyingKey` has `precompute` method now.
+`VerifyingKey.from_public_point` may now not perform validation of public
+point when `validate_point=False` argument is passed to method.
+`CurveFp` constructor now accepts the `h` parameter - the cofactor of the
+elliptic curve, it's used for selection of algorithm of public point
+verification.
+
+Performance:
+`randrange` now will now perform much fewer calls to system random number
+generator.
+`PointJacobi` introduced and used as the underlying implementation; speeds up
+the library by a factor of about 20.
+Library has now optional dependencies on `gmpy` and `gmpy2`. When they are
+availbale, the elliptic curve calculations will be about 3 times faster.
+
+Maintenance:
+expected minimum version of `six` module (1.9.0) is now specified explicitly
+in `setup.py` and tested against.
+Significantly faster test suite execution.
+
+* Release 0.14.1 (06 Nov 2019)
+
+Remove the obsolete `six.py` file from wheel
+
+* Release 0.14 (06 Nov 2019)
+
+Bug fixes:
+Strict checking of DER requirements when parsing SEQUENCE, INTEGER,
+OBJECT IDENTIFIER and BITSTRING objects.
+DER parsers now consistently raise `UnexpectedDER` exception on malformed DER
+encoded byte strings.
+Make sure that both malformed and invalid signatures raise `BadSignatureError`.
+Ensure that all `SigningKey` and `VerifyingKey` methods that should accept
+bytes-like objects actually do accept them (also avoid copying input strings).
+Make `SigningKey.sign_digest_deterministic` use default object hashfunc when
+none was provided.
+`encode_integer` now works for large integers.
+Make `encode_oid` and `remove_object` correctly handle OBJECT IDENTIFIERs
+with large second subidentifier and padding in encoded subidentifiers.
+
+New features:
+Deterministic signature methods now accept `extra_entropy` parameter to further
+randomise the selection of `k` (the nonce) for signature, as specified in
+RFC6979.
+Recovery of public key from signature is now supported.
+Support for SEC1/X9.62 formatted keys, all three encodings are supported:
+"uncompressed", "compressed" and "hybrid". Both string, and PEM/DER will
+automatically accept them, if the size of the key matches the curve.
+Benchmarking application now provides performance numbers that are easier to
+compare against OpenSSL.
+Support for all Brainpool curves (non-twisted).
+
+New API:
+`CurveFp`: `__str__` is now supported.
+`SigningKey.sign_deterministic`, `SigningKey.sign_digest_deterministic` and
+`generate_k`: extra_entropy parameter was added
+`Signature.recover_public_keys` was added
+`VerifyingKey.from_public_key_recovery` and
+`VerifyingKey.from_public_key_recovery_with_digest` were added
+`VerifyingKey.to_string`: `encoding` parameter was added
+`VerifyingKey.to_der` and `SigningKey.to_der`: `point_encoding` parameter was
+added.
+`encode_bitstring`: `unused` parameter was added
+`remove_bitstring`: `expect_unused` parameter was added
+`SECP256k1` is now part of `curves` `*` import
+`Curves`: `__repr__` is now supported
+`VerifyingKey`: `__repr__` is now supported
+
+Deprecations:
+Python 2.5 is not supported any more - dead code removal.
+`from ecdsa.keys import *` will now import only objects defined in that module.
+Trying to decode a malformed point using `VerifyingKey.from_string`
+will rise now the `MalformedPointError` exception (that inherits from
+`AssertionError` but is not it).
+Multiple functions in `numbertheory` are considered deprecated: `phi`,
+`carmichael`, `carmichael_of_factorized`, `carmichael_of_ppower`,
+`order_mod`, `largest_factor_relatively_prime`, `kinda_order_mod`. They will
+now emit `DeprecationWarning` when used. Run the application or test suite
+with `-Wd` option or with `PYTHONWARNINGS=default` environment variable to
+verify if those methods are not used. They will be removed completely in a
+future release.
+`encode_bitstring` and `decode_bitstring` expect the number of unused
+bits to be passed as an argument now. They will emit `DeprecationWarning`
+if they are used in the deprecated way.
+modular_exp: will emit `DeprecationWarning`
+
+Hardening:
+Deterministic signatures now verify that the signature won't leak private
+key through very unlikely selection of `k` value (the nonce).
+Nonce bit size hiding was added (hardening against Minerva attack). Please
+note that it DOES NOT make library secure against side channel attacks (timing
+attacks).
+
+Performance:
+The public key in key generation is not verified twice now, making key
+generation and private key reading about 33% faster.
+Microoptimisation to `inverse_mod` function, increasing performance by about
+40% for all operations.
+
+Maintenance:
+Extended test coverage to newer python versions.
+Fixes to examples in README.md: correct commands, more correct code (now works
+on Python 3).
+Stopped bundling `six`
+Moved sources into `src` subdirectory
+Made benchmarking script standalone (runnable either with `tox -e speed`, or
+after installation, with `python speed.py`)
+Now test coverage reported to coveralls is branch coverage, not line coverage
+Autodetection of curves supported by OpenSSL (test suite compatibility with
+Fedora OpenSSL package).
+More readable error messages (exceptions) in `der` module.
+Documentation to `VerifyingKey`, `SigningKey` and signature encoder/decoder
+functions added.
+Added measuring and verifying condition coverage to Continuous Integration.
+Big clean-up of the test suite, use pytest parametrisation and hypothesis
+for better test coverage and more precise failure reporting.
+Use platform-provided `math.gcd`, when provided.
+
+* Release 0.13.3 (07 Oct 2019)
+
+Fix CVE-2019-14853 - possible DoS caused by malformed signature decoding and
+signature malleability.
+
+Also harden key decoding from string and DER encodings.
+
+* Release 0.13.2 (17 Apr 2019)
+
+Restore compatibility of setup.py with Python 2.6 and 2.7.
+
+* Release 0.13.1 (17 Apr 2019)
+
+Fix the PyPI wheel - the old version included .pyc files.
+
+* Release 0.13 (07 Feb 2015)
+
+Fix the argument order for Curve constructor (put openssl_name= at the end,
+with a default value) to unbreak compatibility with external callers who used
+the 0.11 convention.
+
+* Release 0.12 (06 Feb 2015)
+
+Switch to Versioneer for version-string management (fixing the broken
+`ecdsa.__version__` attribute). Add Curve.openssl_name property. Mention
+secp256k1 in README, test against OpenSSL. Produce "wheel" distributions. Add
+py3.4 and pypy3 compatibility testing. Other minor fixes.
+
+* Release 0.11 (10 Mar 2014)
+
+Add signature-encoding functions "sigencode_{strings,string,der}_canonize"
+which canonicalize the S value (using the smaller of the two possible
+values). Add "validate_point=" argument to VerifyingKey.from_string()
+constructor (defaults to True) which can be used to disable time-consuming
+point validation when importing a pre-validated verifying key. Drop python2.5
+support (untested but not explicitly broken yet), update trove classifiers.
+
+* Release 0.10 (23 Oct 2013)
+
+Make the secp256k1 available in __init__.py too (thanks to Scott Bannert).
+
+* Release 0.9 (01 Oct 2013)
+
+Add secp256k1 curve (thanks to Benjamin Dauvergne). Add deterministic (no
+entropy needed) signatures (thanks to slush). Added py3.2/py3.3 compatibility
+(thanks to Elizabeth Myers).
+
+* Release 0.8 (04 Oct 2011)
+
+Small API addition: accept a hashfunc= argument in the constructors for
+SigningKey and VerifyingKey. This makes it easier to write wrappers that e.g.
+use NIST256p and SHA256 without their obligating callers to pass
+hashfunc=sha256 in each time they call sign() or verify().
+
+* Release 0.7 (28 Nov 2010)
+
+Fix test failure against OpenSSL-1.0.0 (previous versions only worked against
+openssl-0.9.8 or earlier). Increase python requirement to py2.5 or later
+(still no py3 compatibility, but work is underway). Replace use of obsolete
+'sha' library with modern 'hashlib'. Clean up unit test runner (stop using
+subprocesses).
+
+* Release 0.6 (15 Oct 2010)
+
+Small packaging changes: extract version number from git, add 'setup.py test'
+command, set exit code correctly on test failure. Fix pyflakes warnings.
+
+* Release 0.5 (27 Apr 2010)
+
+Initial release. EC-DSA signature for five NIST "Suite B" GF(p) curves:
+prime192v1, secp224r1, prime256v1, secp384r1, and secp521r1. DER/PEM
+input/output functions, seed-to-randrange helper functions.
diff --git a/third_party/python/ecdsa/PKG-INFO b/third_party/python/ecdsa/PKG-INFO
new file mode 100644
index 0000000000..06619f9663
--- /dev/null
+++ b/third_party/python/ecdsa/PKG-INFO
@@ -0,0 +1,620 @@
+Metadata-Version: 2.1
+Name: ecdsa
+Version: 0.15
+Summary: ECDSA cryptographic signature library (pure python)
+Home-page: http://github.com/warner/python-ecdsa
+Author: Brian Warner
+Author-email: warner@lothar.com
+License: MIT
+Description: # Pure-Python ECDSA
+
+ [![build status](https://travis-ci.org/warner/python-ecdsa.png)](http://travis-ci.org/warner/python-ecdsa)
+ [![Coverage Status](https://coveralls.io/repos/warner/python-ecdsa/badge.svg)](https://coveralls.io/r/warner/python-ecdsa)
+ [![condition coverage](https://img.shields.io/badge/condition%20coverage-81%25-yellow)](https://travis-ci.org/warner/python-ecdsa/jobs/626479178#L776)
+ [![Latest Version](https://img.shields.io/pypi/v/ecdsa.svg?style=flat)](https://pypi.python.org/pypi/ecdsa/)
+
+
+ This is an easy-to-use implementation of ECDSA cryptography (Elliptic Curve
+ Digital Signature Algorithm), implemented purely in Python, released under
+ the MIT license. With this library, you can quickly create keypairs (signing
+ key and verifying key), sign messages, and verify the signatures. The keys
+ and signatures are very short, making them easy to handle and incorporate
+ into other protocols.
+
+ ## Features
+
+ This library provides key generation, signing, and verifying, for five
+ popular NIST "Suite B" GF(p) (_prime field_) curves, with key lengths of 192,
+ 224, 256, 384, and 521 bits. The "short names" for these curves, as known by
+ the OpenSSL tool (`openssl ecparam -list_curves`), are: `prime192v1`,
+ `secp224r1`, `prime256v1`, `secp384r1`, and `secp521r1`. It includes the
+ 256-bit curve `secp256k1` used by Bitcoin. There is also support for the
+ regular (non-twisted) variants of Brainpool curves from 160 to 512 bits. The
+ "short names" of those curves are: `brainpoolP160r1`, `brainpoolP192r1`,
+ `brainpoolP224r1`, `brainpoolP256r1`, `brainpoolP320r1`, `brainpoolP384r1`,
+ `brainpoolP512r1`.
+ No other curves are included, but it is not too hard to add support for more
+ curves over prime fields.
+
+ ## Dependencies
+
+ This library uses only Python and the 'six' package. It is compatible with
+ Python 2.6, 2.7 and 3.3+. It also supports execution on the alternative
+ implementations like pypy and pypy3.
+
+ If `gmpy2` or `gmpy` is installed, they will be used for faster arithmetic.
+ Either of them can be installed after this library is installed,
+ `python-ecdsa` will detect their presence on start-up and use them
+ automatically.
+
+ To run the OpenSSL compatibility tests, the 'openssl' tool must be in your
+ `PATH`. This release has been tested successfully against OpenSSL 0.9.8o,
+ 1.0.0a, 1.0.2f and 1.1.1d (among others).
+
+
+ ## Installation
+
+ This library is available on PyPI, it's recommended to install it using `pip`:
+
+ ```
+ pip install ecdsa
+ ```
+
+ In case higher performance is wanted and using native code is not a problem,
+ it's possible to specify installation together with `gmpy2`:
+
+ ```
+ pip install ecdsa[gmpy2]
+ ```
+
+ or (slower, legacy option):
+ ```
+ pip install ecdsa[gmpy]
+ ```
+
+ ## Speed
+
+ The following table shows how long this library takes to generate keypairs
+ (`keygen`), to sign data (`sign`), and to verify those signatures (`verify`).
+ All those values are in seconds.
+ For convenience, the inverses of those values are also provided:
+ how many keys per second can be generated (`keygen/s`), how many signatures
+ can be made per second (`sign/s`) and how many signatures can be verified
+ per second (`verify/s`). The size of raw signature (generally the smallest
+ way a signature can be encoded) is also provided in the `siglen` column.
+ Use `tox -e speed` to generate this table on your own computer.
+ On an Intel Core i7 4790K @ 4.0GHz I'm getting the following performance:
+
+ ```
+ siglen keygen keygen/s sign sign/s verify verify/s
+ NIST192p: 48 0.00035s 2893.02 0.00038s 2620.53 0.00069s 1458.92
+ NIST224p: 56 0.00043s 2307.11 0.00048s 2092.00 0.00088s 1131.33
+ NIST256p: 64 0.00056s 1793.70 0.00061s 1639.87 0.00113s 883.79
+ NIST384p: 96 0.00116s 864.33 0.00124s 806.29 0.00233s 429.87
+ NIST521p: 132 0.00221s 452.16 0.00234s 427.31 0.00460s 217.19
+ SECP256k1: 64 0.00056s 1772.65 0.00061s 1628.73 0.00110s 912.13
+ BRAINPOOLP160r1: 40 0.00026s 3801.86 0.00029s 3401.11 0.00052s 1930.47
+ BRAINPOOLP192r1: 48 0.00034s 2925.73 0.00038s 2634.34 0.00070s 1438.06
+ BRAINPOOLP224r1: 56 0.00044s 2287.98 0.00048s 2083.87 0.00088s 1137.52
+ BRAINPOOLP256r1: 64 0.00056s 1774.11 0.00061s 1628.25 0.00112s 890.71
+ BRAINPOOLP320r1: 80 0.00081s 1238.18 0.00087s 1146.71 0.00151s 661.95
+ BRAINPOOLP384r1: 96 0.00117s 855.47 0.00124s 804.56 0.00241s 414.83
+ BRAINPOOLP512r1: 128 0.00223s 447.99 0.00234s 427.49 0.00437s 229.09
+
+ ecdh ecdh/s
+ NIST192p: 0.00110s 910.70
+ NIST224p: 0.00143s 701.17
+ NIST256p: 0.00178s 560.44
+ NIST384p: 0.00383s 261.03
+ NIST521p: 0.00745s 134.23
+ SECP256k1: 0.00168s 596.23
+ BRAINPOOLP160r1: 0.00085s 1174.02
+ BRAINPOOLP192r1: 0.00113s 883.47
+ BRAINPOOLP224r1: 0.00145s 687.82
+ BRAINPOOLP256r1: 0.00195s 514.03
+ BRAINPOOLP320r1: 0.00277s 360.80
+ BRAINPOOLP384r1: 0.00412s 242.58
+ BRAINPOOLP512r1: 0.00787s 127.12
+ ```
+
+ To test performance with `gmpy2` loaded, use `tox -e speedgmpy2`.
+ On the same machine I'm getting the following performance with `gmpy2`:
+ ```
+ siglen keygen keygen/s sign sign/s verify verify/s
+ NIST192p: 48 0.00017s 5945.50 0.00018s 5544.66 0.00033s 3002.54
+ NIST224p: 56 0.00021s 4742.14 0.00022s 4463.52 0.00044s 2248.59
+ NIST256p: 64 0.00024s 4155.73 0.00025s 3994.28 0.00047s 2105.34
+ NIST384p: 96 0.00041s 2415.06 0.00043s 2316.41 0.00085s 1177.18
+ NIST521p: 132 0.00072s 1391.14 0.00074s 1359.63 0.00140s 716.31
+ SECP256k1: 64 0.00024s 4216.50 0.00025s 3994.52 0.00047s 2120.57
+ BRAINPOOLP160r1: 40 0.00014s 7038.99 0.00015s 6501.55 0.00029s 3397.79
+ BRAINPOOLP192r1: 48 0.00017s 5983.18 0.00018s 5626.08 0.00035s 2843.62
+ BRAINPOOLP224r1: 56 0.00021s 4727.54 0.00022s 4464.86 0.00043s 2326.84
+ BRAINPOOLP256r1: 64 0.00024s 4221.00 0.00025s 4010.26 0.00049s 2046.40
+ BRAINPOOLP320r1: 80 0.00032s 3142.14 0.00033s 3009.15 0.00061s 1652.88
+ BRAINPOOLP384r1: 96 0.00041s 2415.98 0.00043s 2340.35 0.00083s 1198.77
+ BRAINPOOLP512r1: 128 0.00064s 1567.27 0.00066s 1526.33 0.00127s 788.51
+
+ ecdh ecdh/s
+ NIST192p: 0.00051s 1960.26
+ NIST224p: 0.00067s 1502.97
+ NIST256p: 0.00073s 1376.12
+ NIST384p: 0.00132s 758.68
+ NIST521p: 0.00231s 433.23
+ SECP256k1: 0.00072s 1387.18
+ BRAINPOOLP160r1: 0.00042s 2366.60
+ BRAINPOOLP192r1: 0.00049s 2026.80
+ BRAINPOOLP224r1: 0.00067s 1486.52
+ BRAINPOOLP256r1: 0.00076s 1310.31
+ BRAINPOOLP320r1: 0.00101s 986.16
+ BRAINPOOLP384r1: 0.00131s 761.35
+ BRAINPOOLP512r1: 0.00211s 473.30
+ ```
+
+ (there's also `gmpy` version, execute it using `tox -e speedgmpy`)
+
+ For comparison, a highly optimised implementation (including curve-specific
+ assembly for some curves), like the one in OpenSSL 1.1.1d, provides following
+ performance numbers on the same machine.
+ Run `openssl speed ecdsa` and `openssl speed ecdh` to reproduce it:
+ ```
+ sign verify sign/s verify/s
+ 192 bits ecdsa (nistp192) 0.0002s 0.0002s 4785.6 5380.7
+ 224 bits ecdsa (nistp224) 0.0000s 0.0001s 22475.6 9822.0
+ 256 bits ecdsa (nistp256) 0.0000s 0.0001s 45069.6 14166.6
+ 384 bits ecdsa (nistp384) 0.0008s 0.0006s 1265.6 1648.1
+ 521 bits ecdsa (nistp521) 0.0003s 0.0005s 3753.1 1819.5
+ 256 bits ecdsa (brainpoolP256r1) 0.0003s 0.0003s 2983.5 3333.2
+ 384 bits ecdsa (brainpoolP384r1) 0.0008s 0.0007s 1258.8 1528.1
+ 512 bits ecdsa (brainpoolP512r1) 0.0015s 0.0012s 675.1 860.1
+
+ op op/s
+ 192 bits ecdh (nistp192) 0.0002s 4853.4
+ 224 bits ecdh (nistp224) 0.0001s 15252.1
+ 256 bits ecdh (nistp256) 0.0001s 18436.3
+ 384 bits ecdh (nistp384) 0.0008s 1292.7
+ 521 bits ecdh (nistp521) 0.0003s 2884.7
+ 256 bits ecdh (brainpoolP256r1) 0.0003s 3066.5
+ 384 bits ecdh (brainpoolP384r1) 0.0008s 1298.0
+ 512 bits ecdh (brainpoolP512r1) 0.0014s 694.8
+ ```
+
+ Keys and signature can be serialized in different ways (see Usage, below).
+ For a NIST192p key, the three basic representations require strings of the
+ following lengths (in bytes):
+
+ to_string: signkey= 24, verifykey= 48, signature=48
+ compressed: signkey=n/a, verifykey= 25, signature=n/a
+ DER: signkey=106, verifykey= 80, signature=55
+ PEM: signkey=278, verifykey=162, (no support for PEM signatures)
+
+ ## History
+
+ In 2006, Peter Pearson announced his pure-python implementation of ECDSA in a
+ [message to sci.crypt][1], available from his [download site][2]. In 2010,
+ Brian Warner wrote a wrapper around this code, to make it a bit easier and
+ safer to use. Hubert Kario then included an implementation of elliptic curve
+ cryptography that uses Jacobian coordinates internally, improving performance
+ about 20-fold. You are looking at the README for this wrapper.
+
+ [1]: http://www.derkeiler.com/Newsgroups/sci.crypt/2006-01/msg00651.html
+ [2]: http://webpages.charter.net/curryfans/peter/downloads.html
+
+ ## Testing
+
+ To run the full test suite, do this:
+
+ tox -e coverage
+
+ On an Intel Core i7 4790K @ 4.0GHz, the tests take about 16 seconds to execute.
+ The test suite uses
+ [`hypothesis`](https://github.com/HypothesisWorks/hypothesis) so there is some
+ inherent variability in the test suite execution time.
+
+ One part of `test_pyecdsa.py` checks compatibility with OpenSSL, by
+ running the "openssl" CLI tool, make sure it's in your `PATH` if you want
+ to test compatibility with it.
+
+ ## Security
+
+ This library was not designed with security in mind. If you are processing
+ data that needs to be protected we suggest you use a quality wrapper around
+ OpenSSL. [pyca/cryptography](https://cryptography.io) is one example of such
+ a wrapper. The primary use-case of this library is as a portable library for
+ interoperability testing and as a teaching tool.
+
+ **This library does not protect against side channel attacks.**
+
+ Do not allow attackers to measure how long it takes you to generate a keypair
+ or sign a message. Do not allow attackers to run code on the same physical
+ machine when keypair generation or signing is taking place (this includes
+ virtual machines). Do not allow attackers to measure how much power your
+ computer uses while generating the keypair or signing a message. Do not allow
+ attackers to measure RF interference coming from your computer while generating
+ a keypair or signing a message. Note: just loading the private key will cause
+ keypair generation. Other operations or attack vectors may also be
+ vulnerable to attacks. **For a sophisticated attacker observing just one
+ operation with a private key will be sufficient to completely
+ reconstruct the private key**.
+
+ Please also note that any Pure-python cryptographic library will be vulnerable
+ to the same side channel attacks. This is because Python does not provide
+ side-channel secure primitives (with the exception of
+ [`hmac.compare_digest()`][3]), making side-channel secure programming
+ impossible.
+
+ This library depends upon a strong source of random numbers. Do not use it on
+ a system where `os.urandom()` does not provide cryptographically secure
+ random numbers.
+
+ [3]: https://docs.python.org/3/library/hmac.html#hmac.compare_digest
+
+ ## Usage
+
+ You start by creating a `SigningKey`. You can use this to sign data, by passing
+ in data as a byte string and getting back the signature (also a byte string).
+ You can also ask a `SigningKey` to give you the corresponding `VerifyingKey`.
+ The `VerifyingKey` can be used to verify a signature, by passing it both the
+ data string and the signature byte string: it either returns True or raises
+ `BadSignatureError`.
+
+ ```python
+ from ecdsa import SigningKey
+ sk = SigningKey.generate() # uses NIST192p
+ vk = sk.verifying_key
+ signature = sk.sign(b"message")
+ assert vk.verify(signature, b"message")
+ ```
+
+ Each `SigningKey`/`VerifyingKey` is associated with a specific curve, like
+ NIST192p (the default one). Longer curves are more secure, but take longer to
+ use, and result in longer keys and signatures.
+
+ ```python
+ from ecdsa import SigningKey, NIST384p
+ sk = SigningKey.generate(curve=NIST384p)
+ vk = sk.verifying_key
+ signature = sk.sign(b"message")
+ assert vk.verify(signature, b"message")
+ ```
+
+ The `SigningKey` can be serialized into several different formats: the shortest
+ is to call `s=sk.to_string()`, and then re-create it with
+ `SigningKey.from_string(s, curve)` . This short form does not record the
+ curve, so you must be sure to pass to `from_string()` the same curve you used
+ for the original key. The short form of a NIST192p-based signing key is just 24
+ bytes long. If a point encoding is invalid or it does not lie on the specified
+ curve, `from_string()` will raise `MalformedPointError`.
+
+ ```python
+ from ecdsa import SigningKey, NIST384p
+ sk = SigningKey.generate(curve=NIST384p)
+ sk_string = sk.to_string()
+ sk2 = SigningKey.from_string(sk_string, curve=NIST384p)
+ print(sk_string.hex())
+ print(sk2.to_string().hex())
+ ```
+
+ Note: while the methods are called `to_string()` the type they return is
+ actually `bytes`, the "string" part is leftover from Python 2.
+
+ `sk.to_pem()` and `sk.to_der()` will serialize the signing key into the same
+ formats that OpenSSL uses. The PEM file looks like the familiar ASCII-armored
+ `"-----BEGIN EC PRIVATE KEY-----"` base64-encoded format, and the DER format
+ is a shorter binary form of the same data.
+ `SigningKey.from_pem()/.from_der()` will undo this serialization. These
+ formats include the curve name, so you do not need to pass in a curve
+ identifier to the deserializer. In case the file is malformed `from_der()`
+ and `from_pem()` will raise `UnexpectedDER` or` MalformedPointError`.
+
+ ```python
+ from ecdsa import SigningKey, NIST384p
+ sk = SigningKey.generate(curve=NIST384p)
+ sk_pem = sk.to_pem()
+ sk2 = SigningKey.from_pem(sk_pem)
+ # sk and sk2 are the same key
+ ```
+
+ Likewise, the `VerifyingKey` can be serialized in the same way:
+ `vk.to_string()/VerifyingKey.from_string()`, `to_pem()/from_pem()`, and
+ `to_der()/from_der()`. The same `curve=` argument is needed for
+ `VerifyingKey.from_string()`.
+
+ ```python
+ from ecdsa import SigningKey, VerifyingKey, NIST384p
+ sk = SigningKey.generate(curve=NIST384p)
+ vk = sk.verifying_key
+ vk_string = vk.to_string()
+ vk2 = VerifyingKey.from_string(vk_string, curve=NIST384p)
+ # vk and vk2 are the same key
+
+ from ecdsa import SigningKey, VerifyingKey, NIST384p
+ sk = SigningKey.generate(curve=NIST384p)
+ vk = sk.verifying_key
+ vk_pem = vk.to_pem()
+ vk2 = VerifyingKey.from_pem(vk_pem)
+ # vk and vk2 are the same key
+ ```
+
+ There are a couple of different ways to compute a signature. Fundamentally,
+ ECDSA takes a number that represents the data being signed, and returns a
+ pair of numbers that represent the signature. The `hashfunc=` argument to
+ `sk.sign()` and `vk.verify()` is used to turn an arbitrary string into
+ fixed-length digest, which is then turned into a number that ECDSA can sign,
+ and both sign and verify must use the same approach. The default value is
+ `hashlib.sha1`, but if you use NIST256p or a longer curve, you can use
+ `hashlib.sha256` instead.
+
+ There are also multiple ways to represent a signature. The default
+ `sk.sign()` and `vk.verify()` methods present it as a short string, for
+ simplicity and minimal overhead. To use a different scheme, use the
+ `sk.sign(sigencode=)` and `vk.verify(sigdecode=)` arguments. There are helper
+ functions in the `ecdsa.util` module that can be useful here.
+
+ It is also possible to create a `SigningKey` from a "seed", which is
+ deterministic. This can be used in protocols where you want to derive
+ consistent signing keys from some other secret, for example when you want
+ three separate keys and only want to store a single master secret. You should
+ start with a uniformly-distributed unguessable seed with about `curve.baselen`
+ bytes of entropy, and then use one of the helper functions in `ecdsa.util` to
+ convert it into an integer in the correct range, and then finally pass it
+ into `SigningKey.from_secret_exponent()`, like this:
+
+ ```python
+ import os
+ from ecdsa import NIST384p, SigningKey
+ from ecdsa.util import randrange_from_seed__trytryagain
+
+ def make_key(seed):
+ secexp = randrange_from_seed__trytryagain(seed, NIST384p.order)
+ return SigningKey.from_secret_exponent(secexp, curve=NIST384p)
+
+ seed = os.urandom(NIST384p.baselen) # or other starting point
+ sk1a = make_key(seed)
+ sk1b = make_key(seed)
+ # note: sk1a and sk1b are the same key
+ assert sk1a.to_string() == sk1b.to_string()
+ sk2 = make_key(b"2-"+seed) # different key
+ assert sk1a.to_string() != sk2.to_string()
+ ```
+
+ In case the application will verify a lot of signatures made with a single
+ key, it's possible to precompute some of the internal values to make
+ signature verification significantly faster. The break-even point occurs at
+ about 100 signatures verified.
+
+ To perform precomputation, you can call the `precompute()` method
+ on `VerifyingKey` instance:
+ ```python
+ from ecdsa import SigningKey, NIST384p
+ sk = SigningKey.generate(curve=NIST384p)
+ vk = sk.verifying_key
+ vk.precompute()
+ signature = sk.sign(b"message")
+ assert vk.verify(signature, b"message")
+ ```
+
+ Once `precompute()` was called, all signature verifications with this key will
+ be faster to execute.
+
+ ## OpenSSL Compatibility
+
+ To produce signatures that can be verified by OpenSSL tools, or to verify
+ signatures that were produced by those tools, use:
+
+ ```python
+ # openssl ecparam -name prime256v1 -genkey -out sk.pem
+ # openssl ec -in sk.pem -pubout -out vk.pem
+ # echo "data for signing" > data
+ # openssl dgst -sha256 -sign sk.pem -out data.sig data
+ # openssl dgst -sha256 -verify vk.pem -signature data.sig data
+ # openssl dgst -sha256 -prverify sk.pem -signature data.sig data
+
+ import hashlib
+ from ecdsa import SigningKey, VerifyingKey
+ from ecdsa.util import sigencode_der, sigdecode_der
+
+ with open("vk.pem") as f:
+ vk = VerifyingKey.from_pem(f.read())
+
+ with open("data", "rb") as f:
+ data = f.read()
+
+ with open("data.sig", "rb") as f:
+ signature = f.read()
+
+ assert vk.verify(signature, data, hashlib.sha256, sigdecode=sigdecode_der)
+
+ with open("sk.pem") as f:
+ sk = SigningKey.from_pem(f.read(), hashlib.sha256)
+
+ new_signature = sk.sign_deterministic(data, sigencode=sigencode_der)
+
+ with open("data.sig2", "wb") as f:
+ f.write(new_signature)
+
+ # openssl dgst -sha256 -verify vk.pem -signature data.sig2 data
+ ```
+
+ Note: if compatibility with OpenSSL 1.0.0 or earlier is necessary, the
+ `sigencode_string` and `sigdecode_string` from `ecdsa.util` can be used for
+ respectively writing and reading the signatures.
+
+ The keys also can be written in format that openssl can handle:
+
+ ```python
+ from ecdsa import SigningKey, VerifyingKey
+
+ with open("sk.pem") as f:
+ sk = SigningKey.from_pem(f.read())
+ with open("sk.pem", "wb") as f:
+ f.write(sk.to_pem())
+
+ with open("vk.pem") as f:
+ vk = VerifyingKey.from_pem(f.read())
+ with open("vk.pem", "wb") as f:
+ f.write(vk.to_pem())
+ ```
+
+ ## Entropy
+
+ Creating a signing key with `SigningKey.generate()` requires some form of
+ entropy (as opposed to
+ `from_secret_exponent`/`from_string`/`from_der`/`from_pem`,
+ which are deterministic and do not require an entropy source). The default
+ source is `os.urandom()`, but you can pass any other function that behaves
+ like `os.urandom` as the `entropy=` argument to do something different. This
+ may be useful in unit tests, where you want to achieve repeatable results. The
+ `ecdsa.util.PRNG` utility is handy here: it takes a seed and produces a strong
+ pseudo-random stream from it:
+
+ ```python
+ from ecdsa.util import PRNG
+ from ecdsa import SigningKey
+ rng1 = PRNG(b"seed")
+ sk1 = SigningKey.generate(entropy=rng1)
+ rng2 = PRNG(b"seed")
+ sk2 = SigningKey.generate(entropy=rng2)
+ # sk1 and sk2 are the same key
+ ```
+
+ Likewise, ECDSA signature generation requires a random number, and each
+ signature must use a different one (using the same number twice will
+ immediately reveal the private signing key). The `sk.sign()` method takes an
+ `entropy=` argument which behaves the same as `SigningKey.generate(entropy=)`.
+
+ ## Deterministic Signatures
+
+ If you call `SigningKey.sign_deterministic(data)` instead of `.sign(data)`,
+ the code will generate a deterministic signature instead of a random one.
+ This uses the algorithm from RFC6979 to safely generate a unique `k` value,
+ derived from the private key and the message being signed. Each time you sign
+ the same message with the same key, you will get the same signature (using
+ the same `k`).
+
+ This may become the default in a future version, as it is not vulnerable to
+ failures of the entropy source.
+
+ ## Examples
+
+ Create a NIST192p keypair and immediately save both to disk:
+
+ ```python
+ from ecdsa import SigningKey
+ sk = SigningKey.generate()
+ vk = sk.verifying_key
+ with open("private.pem", "wb") as f:
+ f.write(sk.to_pem())
+ with open("public.pem", "wb") as f:
+ f.write(vk.to_pem())
+ ```
+
+ Load a signing key from disk, use it to sign a message (using SHA-1), and write
+ the signature to disk:
+
+ ```python
+ from ecdsa import SigningKey
+ with open("private.pem") as f:
+ sk = SigningKey.from_pem(f.read())
+ with open("message", "rb") as f:
+ message = f.read()
+ sig = sk.sign(message)
+ with open("signature", "wb") as f:
+ f.write(sig)
+ ```
+
+ Load the verifying key, message, and signature from disk, and verify the
+ signature (assume SHA-1 hash):
+
+ ```python
+ from ecdsa import VerifyingKey, BadSignatureError
+ vk = VerifyingKey.from_pem(open("public.pem").read())
+ with open("message", "rb") as f:
+ message = f.read()
+ with open("signature", "rb") as f:
+ sig = f.read()
+ try:
+ vk.verify(sig, message)
+ print "good signature"
+ except BadSignatureError:
+ print "BAD SIGNATURE"
+ ```
+
+ Create a NIST521p keypair:
+
+ ```python
+ from ecdsa import SigningKey, NIST521p
+ sk = SigningKey.generate(curve=NIST521p)
+ vk = sk.verifying_key
+ ```
+
+ Create three independent signing keys from a master seed:
+
+ ```python
+ from ecdsa import NIST192p, SigningKey
+ from ecdsa.util import randrange_from_seed__trytryagain
+
+ def make_key_from_seed(seed, curve=NIST192p):
+ secexp = randrange_from_seed__trytryagain(seed, curve.order)
+ return SigningKey.from_secret_exponent(secexp, curve)
+
+ sk1 = make_key_from_seed("1:%s" % seed)
+ sk2 = make_key_from_seed("2:%s" % seed)
+ sk3 = make_key_from_seed("3:%s" % seed)
+ ```
+
+ Load a verifying key from disk and print it using hex encoding in
+ uncompressed and compressed format (defined in X9.62 and SEC1 standards):
+
+ ```python
+ from ecdsa import VerifyingKey
+
+ with open("public.pem") as f:
+ vk = VerifyingKey.from_pem(f.read())
+
+ print("uncompressed: {0}".format(vk.to_string("uncompressed").hex()))
+ print("compressed: {0}".format(vk.to_string("compressed").hex()))
+ ```
+
+ Load a verifying key from a hex string from compressed format, output
+ uncompressed:
+
+ ```python
+ from ecdsa import VerifyingKey, NIST256p
+
+ comp_str = '022799c0d0ee09772fdd337d4f28dc155581951d07082fb19a38aa396b67e77759'
+ vk = VerifyingKey.from_string(bytearray.fromhex(comp_str), curve=NIST256p)
+ print(vk.to_string("uncompressed").hex())
+ ```
+
+ ECDH key exchange with remote party
+
+ ```python
+ from ecdsa import ECDH, NIST256p
+
+ ecdh = ECDH(curve=NIST256p)
+ ecdh.generate_private_key()
+ local_public_key = ecdh.get_public_key()
+ #send `local_public_key` to remote party and receive `remote_public_key` from remote party
+ with open("remote_public_key.pem") as e:
+ remote_public_key = e.read()
+ ecdh.load_received_public_key_pem(remote_public_key)
+ secret = ecdh.generate_sharedsecret_bytes()
+ ```
+
+Platform: UNKNOWN
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.*
+Description-Content-Type: text/markdown
+Provides-Extra: gmpy2
+Provides-Extra: gmpy
diff --git a/third_party/python/ecdsa/README.md b/third_party/python/ecdsa/README.md
new file mode 100644
index 0000000000..0d310b90a0
--- /dev/null
+++ b/third_party/python/ecdsa/README.md
@@ -0,0 +1,595 @@
+# Pure-Python ECDSA
+
+[![build status](https://travis-ci.org/warner/python-ecdsa.png)](http://travis-ci.org/warner/python-ecdsa)
+[![Coverage Status](https://coveralls.io/repos/warner/python-ecdsa/badge.svg)](https://coveralls.io/r/warner/python-ecdsa)
+[![condition coverage](https://img.shields.io/badge/condition%20coverage-81%25-yellow)](https://travis-ci.org/warner/python-ecdsa/jobs/626479178#L776)
+[![Latest Version](https://img.shields.io/pypi/v/ecdsa.svg?style=flat)](https://pypi.python.org/pypi/ecdsa/)
+
+
+This is an easy-to-use implementation of ECDSA cryptography (Elliptic Curve
+Digital Signature Algorithm), implemented purely in Python, released under
+the MIT license. With this library, you can quickly create keypairs (signing
+key and verifying key), sign messages, and verify the signatures. The keys
+and signatures are very short, making them easy to handle and incorporate
+into other protocols.
+
+## Features
+
+This library provides key generation, signing, and verifying, for five
+popular NIST "Suite B" GF(p) (_prime field_) curves, with key lengths of 192,
+224, 256, 384, and 521 bits. The "short names" for these curves, as known by
+the OpenSSL tool (`openssl ecparam -list_curves`), are: `prime192v1`,
+`secp224r1`, `prime256v1`, `secp384r1`, and `secp521r1`. It includes the
+256-bit curve `secp256k1` used by Bitcoin. There is also support for the
+regular (non-twisted) variants of Brainpool curves from 160 to 512 bits. The
+"short names" of those curves are: `brainpoolP160r1`, `brainpoolP192r1`,
+`brainpoolP224r1`, `brainpoolP256r1`, `brainpoolP320r1`, `brainpoolP384r1`,
+`brainpoolP512r1`.
+No other curves are included, but it is not too hard to add support for more
+curves over prime fields.
+
+## Dependencies
+
+This library uses only Python and the 'six' package. It is compatible with
+Python 2.6, 2.7 and 3.3+. It also supports execution on the alternative
+implementations like pypy and pypy3.
+
+If `gmpy2` or `gmpy` is installed, they will be used for faster arithmetic.
+Either of them can be installed after this library is installed,
+`python-ecdsa` will detect their presence on start-up and use them
+automatically.
+
+To run the OpenSSL compatibility tests, the 'openssl' tool must be in your
+`PATH`. This release has been tested successfully against OpenSSL 0.9.8o,
+1.0.0a, 1.0.2f and 1.1.1d (among others).
+
+
+## Installation
+
+This library is available on PyPI, it's recommended to install it using `pip`:
+
+```
+pip install ecdsa
+```
+
+In case higher performance is wanted and using native code is not a problem,
+it's possible to specify installation together with `gmpy2`:
+
+```
+pip install ecdsa[gmpy2]
+```
+
+or (slower, legacy option):
+```
+pip install ecdsa[gmpy]
+```
+
+## Speed
+
+The following table shows how long this library takes to generate keypairs
+(`keygen`), to sign data (`sign`), and to verify those signatures (`verify`).
+All those values are in seconds.
+For convenience, the inverses of those values are also provided:
+how many keys per second can be generated (`keygen/s`), how many signatures
+can be made per second (`sign/s`) and how many signatures can be verified
+per second (`verify/s`). The size of raw signature (generally the smallest
+way a signature can be encoded) is also provided in the `siglen` column.
+Use `tox -e speed` to generate this table on your own computer.
+On an Intel Core i7 4790K @ 4.0GHz I'm getting the following performance:
+
+```
+ siglen keygen keygen/s sign sign/s verify verify/s
+ NIST192p: 48 0.00035s 2893.02 0.00038s 2620.53 0.00069s 1458.92
+ NIST224p: 56 0.00043s 2307.11 0.00048s 2092.00 0.00088s 1131.33
+ NIST256p: 64 0.00056s 1793.70 0.00061s 1639.87 0.00113s 883.79
+ NIST384p: 96 0.00116s 864.33 0.00124s 806.29 0.00233s 429.87
+ NIST521p: 132 0.00221s 452.16 0.00234s 427.31 0.00460s 217.19
+ SECP256k1: 64 0.00056s 1772.65 0.00061s 1628.73 0.00110s 912.13
+ BRAINPOOLP160r1: 40 0.00026s 3801.86 0.00029s 3401.11 0.00052s 1930.47
+ BRAINPOOLP192r1: 48 0.00034s 2925.73 0.00038s 2634.34 0.00070s 1438.06
+ BRAINPOOLP224r1: 56 0.00044s 2287.98 0.00048s 2083.87 0.00088s 1137.52
+ BRAINPOOLP256r1: 64 0.00056s 1774.11 0.00061s 1628.25 0.00112s 890.71
+ BRAINPOOLP320r1: 80 0.00081s 1238.18 0.00087s 1146.71 0.00151s 661.95
+ BRAINPOOLP384r1: 96 0.00117s 855.47 0.00124s 804.56 0.00241s 414.83
+ BRAINPOOLP512r1: 128 0.00223s 447.99 0.00234s 427.49 0.00437s 229.09
+
+ ecdh ecdh/s
+ NIST192p: 0.00110s 910.70
+ NIST224p: 0.00143s 701.17
+ NIST256p: 0.00178s 560.44
+ NIST384p: 0.00383s 261.03
+ NIST521p: 0.00745s 134.23
+ SECP256k1: 0.00168s 596.23
+ BRAINPOOLP160r1: 0.00085s 1174.02
+ BRAINPOOLP192r1: 0.00113s 883.47
+ BRAINPOOLP224r1: 0.00145s 687.82
+ BRAINPOOLP256r1: 0.00195s 514.03
+ BRAINPOOLP320r1: 0.00277s 360.80
+ BRAINPOOLP384r1: 0.00412s 242.58
+ BRAINPOOLP512r1: 0.00787s 127.12
+```
+
+To test performance with `gmpy2` loaded, use `tox -e speedgmpy2`.
+On the same machine I'm getting the following performance with `gmpy2`:
+```
+ siglen keygen keygen/s sign sign/s verify verify/s
+ NIST192p: 48 0.00017s 5945.50 0.00018s 5544.66 0.00033s 3002.54
+ NIST224p: 56 0.00021s 4742.14 0.00022s 4463.52 0.00044s 2248.59
+ NIST256p: 64 0.00024s 4155.73 0.00025s 3994.28 0.00047s 2105.34
+ NIST384p: 96 0.00041s 2415.06 0.00043s 2316.41 0.00085s 1177.18
+ NIST521p: 132 0.00072s 1391.14 0.00074s 1359.63 0.00140s 716.31
+ SECP256k1: 64 0.00024s 4216.50 0.00025s 3994.52 0.00047s 2120.57
+ BRAINPOOLP160r1: 40 0.00014s 7038.99 0.00015s 6501.55 0.00029s 3397.79
+ BRAINPOOLP192r1: 48 0.00017s 5983.18 0.00018s 5626.08 0.00035s 2843.62
+ BRAINPOOLP224r1: 56 0.00021s 4727.54 0.00022s 4464.86 0.00043s 2326.84
+ BRAINPOOLP256r1: 64 0.00024s 4221.00 0.00025s 4010.26 0.00049s 2046.40
+ BRAINPOOLP320r1: 80 0.00032s 3142.14 0.00033s 3009.15 0.00061s 1652.88
+ BRAINPOOLP384r1: 96 0.00041s 2415.98 0.00043s 2340.35 0.00083s 1198.77
+ BRAINPOOLP512r1: 128 0.00064s 1567.27 0.00066s 1526.33 0.00127s 788.51
+
+ ecdh ecdh/s
+ NIST192p: 0.00051s 1960.26
+ NIST224p: 0.00067s 1502.97
+ NIST256p: 0.00073s 1376.12
+ NIST384p: 0.00132s 758.68
+ NIST521p: 0.00231s 433.23
+ SECP256k1: 0.00072s 1387.18
+ BRAINPOOLP160r1: 0.00042s 2366.60
+ BRAINPOOLP192r1: 0.00049s 2026.80
+ BRAINPOOLP224r1: 0.00067s 1486.52
+ BRAINPOOLP256r1: 0.00076s 1310.31
+ BRAINPOOLP320r1: 0.00101s 986.16
+ BRAINPOOLP384r1: 0.00131s 761.35
+ BRAINPOOLP512r1: 0.00211s 473.30
+```
+
+(there's also `gmpy` version, execute it using `tox -e speedgmpy`)
+
+For comparison, a highly optimised implementation (including curve-specific
+assembly for some curves), like the one in OpenSSL 1.1.1d, provides following
+performance numbers on the same machine.
+Run `openssl speed ecdsa` and `openssl speed ecdh` to reproduce it:
+```
+ sign verify sign/s verify/s
+ 192 bits ecdsa (nistp192) 0.0002s 0.0002s 4785.6 5380.7
+ 224 bits ecdsa (nistp224) 0.0000s 0.0001s 22475.6 9822.0
+ 256 bits ecdsa (nistp256) 0.0000s 0.0001s 45069.6 14166.6
+ 384 bits ecdsa (nistp384) 0.0008s 0.0006s 1265.6 1648.1
+ 521 bits ecdsa (nistp521) 0.0003s 0.0005s 3753.1 1819.5
+ 256 bits ecdsa (brainpoolP256r1) 0.0003s 0.0003s 2983.5 3333.2
+ 384 bits ecdsa (brainpoolP384r1) 0.0008s 0.0007s 1258.8 1528.1
+ 512 bits ecdsa (brainpoolP512r1) 0.0015s 0.0012s 675.1 860.1
+
+ op op/s
+ 192 bits ecdh (nistp192) 0.0002s 4853.4
+ 224 bits ecdh (nistp224) 0.0001s 15252.1
+ 256 bits ecdh (nistp256) 0.0001s 18436.3
+ 384 bits ecdh (nistp384) 0.0008s 1292.7
+ 521 bits ecdh (nistp521) 0.0003s 2884.7
+ 256 bits ecdh (brainpoolP256r1) 0.0003s 3066.5
+ 384 bits ecdh (brainpoolP384r1) 0.0008s 1298.0
+ 512 bits ecdh (brainpoolP512r1) 0.0014s 694.8
+```
+
+Keys and signature can be serialized in different ways (see Usage, below).
+For a NIST192p key, the three basic representations require strings of the
+following lengths (in bytes):
+
+ to_string: signkey= 24, verifykey= 48, signature=48
+ compressed: signkey=n/a, verifykey= 25, signature=n/a
+ DER: signkey=106, verifykey= 80, signature=55
+ PEM: signkey=278, verifykey=162, (no support for PEM signatures)
+
+## History
+
+In 2006, Peter Pearson announced his pure-python implementation of ECDSA in a
+[message to sci.crypt][1], available from his [download site][2]. In 2010,
+Brian Warner wrote a wrapper around this code, to make it a bit easier and
+safer to use. Hubert Kario then included an implementation of elliptic curve
+cryptography that uses Jacobian coordinates internally, improving performance
+about 20-fold. You are looking at the README for this wrapper.
+
+[1]: http://www.derkeiler.com/Newsgroups/sci.crypt/2006-01/msg00651.html
+[2]: http://webpages.charter.net/curryfans/peter/downloads.html
+
+## Testing
+
+To run the full test suite, do this:
+
+ tox -e coverage
+
+On an Intel Core i7 4790K @ 4.0GHz, the tests take about 16 seconds to execute.
+The test suite uses
+[`hypothesis`](https://github.com/HypothesisWorks/hypothesis) so there is some
+inherent variability in the test suite execution time.
+
+One part of `test_pyecdsa.py` checks compatibility with OpenSSL, by
+running the "openssl" CLI tool, make sure it's in your `PATH` if you want
+to test compatibility with it.
+
+## Security
+
+This library was not designed with security in mind. If you are processing
+data that needs to be protected we suggest you use a quality wrapper around
+OpenSSL. [pyca/cryptography](https://cryptography.io) is one example of such
+a wrapper. The primary use-case of this library is as a portable library for
+interoperability testing and as a teaching tool.
+
+**This library does not protect against side channel attacks.**
+
+Do not allow attackers to measure how long it takes you to generate a keypair
+or sign a message. Do not allow attackers to run code on the same physical
+machine when keypair generation or signing is taking place (this includes
+virtual machines). Do not allow attackers to measure how much power your
+computer uses while generating the keypair or signing a message. Do not allow
+attackers to measure RF interference coming from your computer while generating
+a keypair or signing a message. Note: just loading the private key will cause
+keypair generation. Other operations or attack vectors may also be
+vulnerable to attacks. **For a sophisticated attacker observing just one
+operation with a private key will be sufficient to completely
+reconstruct the private key**.
+
+Please also note that any Pure-python cryptographic library will be vulnerable
+to the same side channel attacks. This is because Python does not provide
+side-channel secure primitives (with the exception of
+[`hmac.compare_digest()`][3]), making side-channel secure programming
+impossible.
+
+This library depends upon a strong source of random numbers. Do not use it on
+a system where `os.urandom()` does not provide cryptographically secure
+random numbers.
+
+[3]: https://docs.python.org/3/library/hmac.html#hmac.compare_digest
+
+## Usage
+
+You start by creating a `SigningKey`. You can use this to sign data, by passing
+in data as a byte string and getting back the signature (also a byte string).
+You can also ask a `SigningKey` to give you the corresponding `VerifyingKey`.
+The `VerifyingKey` can be used to verify a signature, by passing it both the
+data string and the signature byte string: it either returns True or raises
+`BadSignatureError`.
+
+```python
+from ecdsa import SigningKey
+sk = SigningKey.generate() # uses NIST192p
+vk = sk.verifying_key
+signature = sk.sign(b"message")
+assert vk.verify(signature, b"message")
+```
+
+Each `SigningKey`/`VerifyingKey` is associated with a specific curve, like
+NIST192p (the default one). Longer curves are more secure, but take longer to
+use, and result in longer keys and signatures.
+
+```python
+from ecdsa import SigningKey, NIST384p
+sk = SigningKey.generate(curve=NIST384p)
+vk = sk.verifying_key
+signature = sk.sign(b"message")
+assert vk.verify(signature, b"message")
+```
+
+The `SigningKey` can be serialized into several different formats: the shortest
+is to call `s=sk.to_string()`, and then re-create it with
+`SigningKey.from_string(s, curve)` . This short form does not record the
+curve, so you must be sure to pass to `from_string()` the same curve you used
+for the original key. The short form of a NIST192p-based signing key is just 24
+bytes long. If a point encoding is invalid or it does not lie on the specified
+curve, `from_string()` will raise `MalformedPointError`.
+
+```python
+from ecdsa import SigningKey, NIST384p
+sk = SigningKey.generate(curve=NIST384p)
+sk_string = sk.to_string()
+sk2 = SigningKey.from_string(sk_string, curve=NIST384p)
+print(sk_string.hex())
+print(sk2.to_string().hex())
+```
+
+Note: while the methods are called `to_string()` the type they return is
+actually `bytes`, the "string" part is leftover from Python 2.
+
+`sk.to_pem()` and `sk.to_der()` will serialize the signing key into the same
+formats that OpenSSL uses. The PEM file looks like the familiar ASCII-armored
+`"-----BEGIN EC PRIVATE KEY-----"` base64-encoded format, and the DER format
+is a shorter binary form of the same data.
+`SigningKey.from_pem()/.from_der()` will undo this serialization. These
+formats include the curve name, so you do not need to pass in a curve
+identifier to the deserializer. In case the file is malformed `from_der()`
+and `from_pem()` will raise `UnexpectedDER` or` MalformedPointError`.
+
+```python
+from ecdsa import SigningKey, NIST384p
+sk = SigningKey.generate(curve=NIST384p)
+sk_pem = sk.to_pem()
+sk2 = SigningKey.from_pem(sk_pem)
+# sk and sk2 are the same key
+```
+
+Likewise, the `VerifyingKey` can be serialized in the same way:
+`vk.to_string()/VerifyingKey.from_string()`, `to_pem()/from_pem()`, and
+`to_der()/from_der()`. The same `curve=` argument is needed for
+`VerifyingKey.from_string()`.
+
+```python
+from ecdsa import SigningKey, VerifyingKey, NIST384p
+sk = SigningKey.generate(curve=NIST384p)
+vk = sk.verifying_key
+vk_string = vk.to_string()
+vk2 = VerifyingKey.from_string(vk_string, curve=NIST384p)
+# vk and vk2 are the same key
+
+from ecdsa import SigningKey, VerifyingKey, NIST384p
+sk = SigningKey.generate(curve=NIST384p)
+vk = sk.verifying_key
+vk_pem = vk.to_pem()
+vk2 = VerifyingKey.from_pem(vk_pem)
+# vk and vk2 are the same key
+```
+
+There are a couple of different ways to compute a signature. Fundamentally,
+ECDSA takes a number that represents the data being signed, and returns a
+pair of numbers that represent the signature. The `hashfunc=` argument to
+`sk.sign()` and `vk.verify()` is used to turn an arbitrary string into
+fixed-length digest, which is then turned into a number that ECDSA can sign,
+and both sign and verify must use the same approach. The default value is
+`hashlib.sha1`, but if you use NIST256p or a longer curve, you can use
+`hashlib.sha256` instead.
+
+There are also multiple ways to represent a signature. The default
+`sk.sign()` and `vk.verify()` methods present it as a short string, for
+simplicity and minimal overhead. To use a different scheme, use the
+`sk.sign(sigencode=)` and `vk.verify(sigdecode=)` arguments. There are helper
+functions in the `ecdsa.util` module that can be useful here.
+
+It is also possible to create a `SigningKey` from a "seed", which is
+deterministic. This can be used in protocols where you want to derive
+consistent signing keys from some other secret, for example when you want
+three separate keys and only want to store a single master secret. You should
+start with a uniformly-distributed unguessable seed with about `curve.baselen`
+bytes of entropy, and then use one of the helper functions in `ecdsa.util` to
+convert it into an integer in the correct range, and then finally pass it
+into `SigningKey.from_secret_exponent()`, like this:
+
+```python
+import os
+from ecdsa import NIST384p, SigningKey
+from ecdsa.util import randrange_from_seed__trytryagain
+
+def make_key(seed):
+ secexp = randrange_from_seed__trytryagain(seed, NIST384p.order)
+ return SigningKey.from_secret_exponent(secexp, curve=NIST384p)
+
+seed = os.urandom(NIST384p.baselen) # or other starting point
+sk1a = make_key(seed)
+sk1b = make_key(seed)
+# note: sk1a and sk1b are the same key
+assert sk1a.to_string() == sk1b.to_string()
+sk2 = make_key(b"2-"+seed) # different key
+assert sk1a.to_string() != sk2.to_string()
+```
+
+In case the application will verify a lot of signatures made with a single
+key, it's possible to precompute some of the internal values to make
+signature verification significantly faster. The break-even point occurs at
+about 100 signatures verified.
+
+To perform precomputation, you can call the `precompute()` method
+on `VerifyingKey` instance:
+```python
+from ecdsa import SigningKey, NIST384p
+sk = SigningKey.generate(curve=NIST384p)
+vk = sk.verifying_key
+vk.precompute()
+signature = sk.sign(b"message")
+assert vk.verify(signature, b"message")
+```
+
+Once `precompute()` was called, all signature verifications with this key will
+be faster to execute.
+
+## OpenSSL Compatibility
+
+To produce signatures that can be verified by OpenSSL tools, or to verify
+signatures that were produced by those tools, use:
+
+```python
+# openssl ecparam -name prime256v1 -genkey -out sk.pem
+# openssl ec -in sk.pem -pubout -out vk.pem
+# echo "data for signing" > data
+# openssl dgst -sha256 -sign sk.pem -out data.sig data
+# openssl dgst -sha256 -verify vk.pem -signature data.sig data
+# openssl dgst -sha256 -prverify sk.pem -signature data.sig data
+
+import hashlib
+from ecdsa import SigningKey, VerifyingKey
+from ecdsa.util import sigencode_der, sigdecode_der
+
+with open("vk.pem") as f:
+ vk = VerifyingKey.from_pem(f.read())
+
+with open("data", "rb") as f:
+ data = f.read()
+
+with open("data.sig", "rb") as f:
+ signature = f.read()
+
+assert vk.verify(signature, data, hashlib.sha256, sigdecode=sigdecode_der)
+
+with open("sk.pem") as f:
+ sk = SigningKey.from_pem(f.read(), hashlib.sha256)
+
+new_signature = sk.sign_deterministic(data, sigencode=sigencode_der)
+
+with open("data.sig2", "wb") as f:
+ f.write(new_signature)
+
+# openssl dgst -sha256 -verify vk.pem -signature data.sig2 data
+```
+
+Note: if compatibility with OpenSSL 1.0.0 or earlier is necessary, the
+`sigencode_string` and `sigdecode_string` from `ecdsa.util` can be used for
+respectively writing and reading the signatures.
+
+The keys also can be written in format that openssl can handle:
+
+```python
+from ecdsa import SigningKey, VerifyingKey
+
+with open("sk.pem") as f:
+ sk = SigningKey.from_pem(f.read())
+with open("sk.pem", "wb") as f:
+ f.write(sk.to_pem())
+
+with open("vk.pem") as f:
+ vk = VerifyingKey.from_pem(f.read())
+with open("vk.pem", "wb") as f:
+ f.write(vk.to_pem())
+```
+
+## Entropy
+
+Creating a signing key with `SigningKey.generate()` requires some form of
+entropy (as opposed to
+`from_secret_exponent`/`from_string`/`from_der`/`from_pem`,
+which are deterministic and do not require an entropy source). The default
+source is `os.urandom()`, but you can pass any other function that behaves
+like `os.urandom` as the `entropy=` argument to do something different. This
+may be useful in unit tests, where you want to achieve repeatable results. The
+`ecdsa.util.PRNG` utility is handy here: it takes a seed and produces a strong
+pseudo-random stream from it:
+
+```python
+from ecdsa.util import PRNG
+from ecdsa import SigningKey
+rng1 = PRNG(b"seed")
+sk1 = SigningKey.generate(entropy=rng1)
+rng2 = PRNG(b"seed")
+sk2 = SigningKey.generate(entropy=rng2)
+# sk1 and sk2 are the same key
+```
+
+Likewise, ECDSA signature generation requires a random number, and each
+signature must use a different one (using the same number twice will
+immediately reveal the private signing key). The `sk.sign()` method takes an
+`entropy=` argument which behaves the same as `SigningKey.generate(entropy=)`.
+
+## Deterministic Signatures
+
+If you call `SigningKey.sign_deterministic(data)` instead of `.sign(data)`,
+the code will generate a deterministic signature instead of a random one.
+This uses the algorithm from RFC6979 to safely generate a unique `k` value,
+derived from the private key and the message being signed. Each time you sign
+the same message with the same key, you will get the same signature (using
+the same `k`).
+
+This may become the default in a future version, as it is not vulnerable to
+failures of the entropy source.
+
+## Examples
+
+Create a NIST192p keypair and immediately save both to disk:
+
+```python
+from ecdsa import SigningKey
+sk = SigningKey.generate()
+vk = sk.verifying_key
+with open("private.pem", "wb") as f:
+ f.write(sk.to_pem())
+with open("public.pem", "wb") as f:
+ f.write(vk.to_pem())
+```
+
+Load a signing key from disk, use it to sign a message (using SHA-1), and write
+the signature to disk:
+
+```python
+from ecdsa import SigningKey
+with open("private.pem") as f:
+ sk = SigningKey.from_pem(f.read())
+with open("message", "rb") as f:
+ message = f.read()
+sig = sk.sign(message)
+with open("signature", "wb") as f:
+ f.write(sig)
+```
+
+Load the verifying key, message, and signature from disk, and verify the
+signature (assume SHA-1 hash):
+
+```python
+from ecdsa import VerifyingKey, BadSignatureError
+vk = VerifyingKey.from_pem(open("public.pem").read())
+with open("message", "rb") as f:
+ message = f.read()
+with open("signature", "rb") as f:
+ sig = f.read()
+try:
+ vk.verify(sig, message)
+ print "good signature"
+except BadSignatureError:
+ print "BAD SIGNATURE"
+```
+
+Create a NIST521p keypair:
+
+```python
+from ecdsa import SigningKey, NIST521p
+sk = SigningKey.generate(curve=NIST521p)
+vk = sk.verifying_key
+```
+
+Create three independent signing keys from a master seed:
+
+```python
+from ecdsa import NIST192p, SigningKey
+from ecdsa.util import randrange_from_seed__trytryagain
+
+def make_key_from_seed(seed, curve=NIST192p):
+ secexp = randrange_from_seed__trytryagain(seed, curve.order)
+ return SigningKey.from_secret_exponent(secexp, curve)
+
+sk1 = make_key_from_seed("1:%s" % seed)
+sk2 = make_key_from_seed("2:%s" % seed)
+sk3 = make_key_from_seed("3:%s" % seed)
+```
+
+Load a verifying key from disk and print it using hex encoding in
+uncompressed and compressed format (defined in X9.62 and SEC1 standards):
+
+```python
+from ecdsa import VerifyingKey
+
+with open("public.pem") as f:
+ vk = VerifyingKey.from_pem(f.read())
+
+print("uncompressed: {0}".format(vk.to_string("uncompressed").hex()))
+print("compressed: {0}".format(vk.to_string("compressed").hex()))
+```
+
+Load a verifying key from a hex string from compressed format, output
+uncompressed:
+
+```python
+from ecdsa import VerifyingKey, NIST256p
+
+comp_str = '022799c0d0ee09772fdd337d4f28dc155581951d07082fb19a38aa396b67e77759'
+vk = VerifyingKey.from_string(bytearray.fromhex(comp_str), curve=NIST256p)
+print(vk.to_string("uncompressed").hex())
+```
+
+ECDH key exchange with remote party
+
+```python
+from ecdsa import ECDH, NIST256p
+
+ecdh = ECDH(curve=NIST256p)
+ecdh.generate_private_key()
+local_public_key = ecdh.get_public_key()
+#send `local_public_key` to remote party and receive `remote_public_key` from remote party
+with open("remote_public_key.pem") as e:
+ remote_public_key = e.read()
+ecdh.load_received_public_key_pem(remote_public_key)
+secret = ecdh.generate_sharedsecret_bytes()
+```
diff --git a/third_party/python/ecdsa/setup.cfg b/third_party/python/ecdsa/setup.cfg
new file mode 100644
index 0000000000..218838f637
--- /dev/null
+++ b/third_party/python/ecdsa/setup.cfg
@@ -0,0 +1,15 @@
+[wheel]
+universal = 1
+
+[versioneer]
+vcs = git
+style = pep440
+versionfile_source = src/ecdsa/_version.py
+versionfile_build = ecdsa/_version.py
+tag_prefix = python-ecdsa-
+parentdir_prefix = ecdsa-
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/ecdsa/setup.py b/third_party/python/ecdsa/setup.py
new file mode 100755
index 0000000000..a6abc4ecc6
--- /dev/null
+++ b/third_party/python/ecdsa/setup.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+
+import io
+import os
+
+from setuptools import setup
+import versioneer
+
+commands = versioneer.get_cmdclass().copy()
+
+# Use README.md to set markdown long_description
+directory = os.path.abspath(os.path.dirname(__file__))
+readme_path = os.path.join(directory, "README.md")
+with io.open(readme_path, encoding="utf-8") as read_file:
+ long_description = read_file.read()
+
+setup(name="ecdsa",
+ version=versioneer.get_version(),
+ description="ECDSA cryptographic signature library (pure python)",
+ long_description=long_description,
+ long_description_content_type='text/markdown',
+ author="Brian Warner",
+ author_email="warner@lothar.com",
+ url="http://github.com/warner/python-ecdsa",
+ packages=["ecdsa"],
+ package_dir={"": "src"},
+ license="MIT",
+ cmdclass=commands,
+ python_requires=">=2.6, !=3.0.*, !=3.1.*, !=3.2.*",
+ classifiers=[
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.6",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.3",
+ "Programming Language :: Python :: 3.4",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ ],
+ install_requires=['six>=1.9.0'],
+ extras_require={
+ 'gmpy2': 'gmpy2',
+ 'gmpy': 'gmpy',
+ },
+ )
diff --git a/third_party/python/ecdsa/src/ecdsa/__init__.py b/third_party/python/ecdsa/src/ecdsa/__init__.py
new file mode 100644
index 0000000000..eef5fe38c4
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/__init__.py
@@ -0,0 +1,25 @@
+from .keys import SigningKey, VerifyingKey, BadSignatureError, BadDigestError,\
+ MalformedPointError
+from .curves import NIST192p, NIST224p, NIST256p, NIST384p, NIST521p,\
+ SECP256k1, BRAINPOOLP160r1, BRAINPOOLP192r1, BRAINPOOLP224r1,\
+ BRAINPOOLP256r1, BRAINPOOLP320r1, BRAINPOOLP384r1, BRAINPOOLP512r1
+from .ecdh import ECDH, NoKeyError, NoCurveError, InvalidCurveError, \
+ InvalidSharedSecretError
+from .der import UnexpectedDER
+
+# This code comes from http://github.com/warner/python-ecdsa
+from ._version import get_versions
+__version__ = get_versions()['version']
+del get_versions
+
+__all__ = ["curves", "der", "ecdsa", "ellipticcurve", "keys", "numbertheory",
+ "test_pyecdsa", "util", "six"]
+
+_hush_pyflakes = [SigningKey, VerifyingKey, BadSignatureError, BadDigestError,
+ MalformedPointError, UnexpectedDER, InvalidCurveError,
+ NoKeyError, InvalidSharedSecretError, ECDH, NoCurveError,
+ NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1,
+ BRAINPOOLP160r1, BRAINPOOLP192r1, BRAINPOOLP224r1,
+ BRAINPOOLP256r1, BRAINPOOLP320r1, BRAINPOOLP384r1,
+ BRAINPOOLP512r1]
+del _hush_pyflakes
diff --git a/third_party/python/ecdsa/src/ecdsa/_compat.py b/third_party/python/ecdsa/src/ecdsa/_compat.py
new file mode 100644
index 0000000000..965d8c47b5
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/_compat.py
@@ -0,0 +1,39 @@
+"""
+Common functions for providing cross-python version compatibility.
+"""
+import sys
+from six import integer_types
+
+
+def str_idx_as_int(string, index):
+ """Take index'th byte from string, return as integer"""
+ val = string[index]
+ if isinstance(val, integer_types):
+ return val
+ return ord(val)
+
+
+if sys.version_info < (3, 0):
+ def normalise_bytes(buffer_object):
+ """Cast the input into array of bytes."""
+ # flake8 runs on py3 where `buffer` indeed doesn't exist...
+ return buffer(buffer_object) # noqa: F821
+
+ def hmac_compat(ret):
+ return ret
+
+else:
+ if sys.version_info < (3, 4):
+ # on python 3.3 hmac.hmac.update() accepts only bytes, on newer
+ # versions it does accept memoryview() also
+ def hmac_compat(data):
+ if not isinstance(data, bytes):
+ return bytes(data)
+ return data
+ else:
+ def hmac_compat(data):
+ return data
+
+ def normalise_bytes(buffer_object):
+ """Cast the input into array of bytes."""
+ return memoryview(buffer_object).cast('B')
diff --git a/third_party/python/ecdsa/src/ecdsa/_rwlock.py b/third_party/python/ecdsa/src/ecdsa/_rwlock.py
new file mode 100644
index 0000000000..e4ef78dcfc
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/_rwlock.py
@@ -0,0 +1,85 @@
+# Copyright Mateusz Kobos, (c) 2011
+# https://code.activestate.com/recipes/577803-reader-writer-lock-with-priority-for-writers/
+# released under the MIT licence
+
+import threading
+
+
+__author__ = "Mateusz Kobos"
+
+
+class RWLock:
+ """
+ Read-Write locking primitive
+
+ Synchronization object used in a solution of so-called second
+ readers-writers problem. In this problem, many readers can simultaneously
+ access a share, and a writer has an exclusive access to this share.
+ Additionally, the following constraints should be met:
+ 1) no reader should be kept waiting if the share is currently opened for
+ reading unless a writer is also waiting for the share,
+ 2) no writer should be kept waiting for the share longer than absolutely
+ necessary.
+
+ The implementation is based on [1, secs. 4.2.2, 4.2.6, 4.2.7]
+ with a modification -- adding an additional lock (C{self.__readers_queue})
+ -- in accordance with [2].
+
+ Sources:
+ [1] A.B. Downey: "The little book of semaphores", Version 2.1.5, 2008
+ [2] P.J. Courtois, F. Heymans, D.L. Parnas:
+ "Concurrent Control with 'Readers' and 'Writers'",
+ Communications of the ACM, 1971 (via [3])
+ [3] http://en.wikipedia.org/wiki/Readers-writers_problem
+ """
+
+ def __init__(self):
+ """
+ A lock giving an even higher priority to the writer in certain
+ cases (see [2] for a discussion).
+ """
+ self.__read_switch = _LightSwitch()
+ self.__write_switch = _LightSwitch()
+ self.__no_readers = threading.Lock()
+ self.__no_writers = threading.Lock()
+ self.__readers_queue = threading.Lock()
+
+ def reader_acquire(self):
+ self.__readers_queue.acquire()
+ self.__no_readers.acquire()
+ self.__read_switch.acquire(self.__no_writers)
+ self.__no_readers.release()
+ self.__readers_queue.release()
+
+ def reader_release(self):
+ self.__read_switch.release(self.__no_writers)
+
+ def writer_acquire(self):
+ self.__write_switch.acquire(self.__no_readers)
+ self.__no_writers.acquire()
+
+ def writer_release(self):
+ self.__no_writers.release()
+ self.__write_switch.release(self.__no_readers)
+
+
+class _LightSwitch:
+ """An auxiliary "light switch"-like object. The first thread turns on the
+ "switch", the last one turns it off (see [1, sec. 4.2.2] for details)."""
+ def __init__(self):
+ self.__counter = 0
+ self.__mutex = threading.Lock()
+
+ def acquire(self, lock):
+ self.__mutex.acquire()
+ self.__counter += 1
+ if self.__counter == 1:
+ lock.acquire()
+ self.__mutex.release()
+
+ def release(self, lock):
+ self.__mutex.acquire()
+ self.__counter -= 1
+ if self.__counter == 0:
+ lock.release()
+ self.__mutex.release()
diff --git a/third_party/python/ecdsa/src/ecdsa/_version.py b/third_party/python/ecdsa/src/ecdsa/_version.py
new file mode 100644
index 0000000000..038d62af2c
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/_version.py
@@ -0,0 +1,21 @@
+
+# This file was generated by 'versioneer.py' (0.17) from
+# revision-control system data, or from the parent directory name of an
+# unpacked source archive. Distribution tarballs contain a pre-generated copy
+# of this file.
+
+import json
+
+version_json = '''
+{
+ "date": "2020-01-02T17:05:04+0100",
+ "dirty": false,
+ "error": null,
+ "full-revisionid": "93b04ba3ddb7c2716e07761393a179c061718c34",
+ "version": "0.15"
+}
+''' # END VERSION_JSON
+
+
+def get_versions():
+ return json.loads(version_json)
diff --git a/third_party/python/ecdsa/src/ecdsa/curves.py b/third_party/python/ecdsa/src/ecdsa/curves.py
new file mode 100644
index 0000000000..173a2cda88
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/curves.py
@@ -0,0 +1,128 @@
+from __future__ import division
+
+from . import der, ecdsa
+from .util import orderlen
+
+
+# orderlen was defined in this module previously, so keep it in __all__,
+# will need to mark it as deprecated later
+__all__ = ["UnknownCurveError", "orderlen", "Curve", "NIST192p",
+ "NIST224p", "NIST256p", "NIST384p", "NIST521p", "curves",
+ "find_curve", "SECP256k1", "BRAINPOOLP160r1", "BRAINPOOLP192r1",
+ "BRAINPOOLP224r1", "BRAINPOOLP256r1", "BRAINPOOLP320r1",
+ "BRAINPOOLP384r1", "BRAINPOOLP512r1"]
+
+
+class UnknownCurveError(Exception):
+ pass
+
+
+class Curve:
+ def __init__(self, name, curve, generator, oid, openssl_name=None):
+ self.name = name
+ self.openssl_name = openssl_name # maybe None
+ self.curve = curve
+ self.generator = generator
+ self.order = generator.order()
+ self.baselen = orderlen(self.order)
+ self.verifying_key_length = 2*self.baselen
+ self.signature_length = 2*self.baselen
+ self.oid = oid
+ self.encoded_oid = der.encode_oid(*oid)
+
+ def __repr__(self):
+ return self.name
+
+
+# the NIST curves
+NIST192p = Curve("NIST192p", ecdsa.curve_192,
+ ecdsa.generator_192,
+ (1, 2, 840, 10045, 3, 1, 1), "prime192v1")
+
+
+NIST224p = Curve("NIST224p", ecdsa.curve_224,
+ ecdsa.generator_224,
+ (1, 3, 132, 0, 33), "secp224r1")
+
+
+NIST256p = Curve("NIST256p", ecdsa.curve_256,
+ ecdsa.generator_256,
+ (1, 2, 840, 10045, 3, 1, 7), "prime256v1")
+
+
+NIST384p = Curve("NIST384p", ecdsa.curve_384,
+ ecdsa.generator_384,
+ (1, 3, 132, 0, 34), "secp384r1")
+
+
+NIST521p = Curve("NIST521p", ecdsa.curve_521,
+ ecdsa.generator_521,
+ (1, 3, 132, 0, 35), "secp521r1")
+
+
+SECP256k1 = Curve("SECP256k1", ecdsa.curve_secp256k1,
+ ecdsa.generator_secp256k1,
+ (1, 3, 132, 0, 10), "secp256k1")
+
+
+BRAINPOOLP160r1 = Curve("BRAINPOOLP160r1",
+ ecdsa.curve_brainpoolp160r1,
+ ecdsa.generator_brainpoolp160r1,
+ (1, 3, 36, 3, 3, 2, 8, 1, 1, 1),
+ "brainpoolP160r1")
+
+
+BRAINPOOLP192r1 = Curve("BRAINPOOLP192r1",
+ ecdsa.curve_brainpoolp192r1,
+ ecdsa.generator_brainpoolp192r1,
+ (1, 3, 36, 3, 3, 2, 8, 1, 1, 3),
+ "brainpoolP192r1")
+
+
+BRAINPOOLP224r1 = Curve("BRAINPOOLP224r1",
+ ecdsa.curve_brainpoolp224r1,
+ ecdsa.generator_brainpoolp224r1,
+ (1, 3, 36, 3, 3, 2, 8, 1, 1, 5),
+ "brainpoolP224r1")
+
+
+BRAINPOOLP256r1 = Curve("BRAINPOOLP256r1",
+ ecdsa.curve_brainpoolp256r1,
+ ecdsa.generator_brainpoolp256r1,
+ (1, 3, 36, 3, 3, 2, 8, 1, 1, 7),
+ "brainpoolP256r1")
+
+
+BRAINPOOLP320r1 = Curve("BRAINPOOLP320r1",
+ ecdsa.curve_brainpoolp320r1,
+ ecdsa.generator_brainpoolp320r1,
+ (1, 3, 36, 3, 3, 2, 8, 1, 1, 9),
+ "brainpoolP320r1")
+
+
+BRAINPOOLP384r1 = Curve("BRAINPOOLP384r1",
+ ecdsa.curve_brainpoolp384r1,
+ ecdsa.generator_brainpoolp384r1,
+ (1, 3, 36, 3, 3, 2, 8, 1, 1, 11),
+ "brainpoolP384r1")
+
+
+BRAINPOOLP512r1 = Curve("BRAINPOOLP512r1",
+ ecdsa.curve_brainpoolp512r1,
+ ecdsa.generator_brainpoolp512r1,
+ (1, 3, 36, 3, 3, 2, 8, 1, 1, 13),
+ "brainpoolP512r1")
+
+
+curves = [NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1,
+ BRAINPOOLP160r1, BRAINPOOLP192r1, BRAINPOOLP224r1, BRAINPOOLP256r1,
+ BRAINPOOLP320r1, BRAINPOOLP384r1, BRAINPOOLP512r1]
+
+
+def find_curve(oid_curve):
+ for c in curves:
+ if c.oid == oid_curve:
+ return c
+ raise UnknownCurveError("I don't know about the curve with oid %s."
+ "I only know about these: %s" %
+ (oid_curve, [c.name for c in curves]))
diff --git a/third_party/python/ecdsa/src/ecdsa/der.py b/third_party/python/ecdsa/src/ecdsa/der.py
new file mode 100644
index 0000000000..ad75b37b56
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/der.py
@@ -0,0 +1,384 @@
+from __future__ import division
+
+import binascii
+import base64
+import warnings
+from itertools import chain
+from six import int2byte, b, text_type
+from ._compat import str_idx_as_int
+
+
+class UnexpectedDER(Exception):
+ pass
+
+
+def encode_constructed(tag, value):
+ return int2byte(0xa0+tag) + encode_length(len(value)) + value
+
+
+def encode_integer(r):
+ assert r >= 0 # can't support negative numbers yet
+ h = ("%x" % r).encode()
+ if len(h) % 2:
+ h = b("0") + h
+ s = binascii.unhexlify(h)
+ num = str_idx_as_int(s, 0)
+ if num <= 0x7f:
+ return b("\x02") + encode_length(len(s)) + s
+ else:
+ # DER integers are two's complement, so if the first byte is
+ # 0x80-0xff then we need an extra 0x00 byte to prevent it from
+ # looking negative.
+ return b("\x02") + encode_length(len(s)+1) + b("\x00") + s
+
+
+# sentry object to check if an argument was specified (used to detect
+# deprecated calling convention)
+_sentry = object()
+
+
+def encode_bitstring(s, unused=_sentry):
+ """
+ Encode a binary string as a BIT STRING using :term:`DER` encoding.
+
+ Note, because there is no native Python object that can encode an actual
+ bit string, this function only accepts byte strings as the `s` argument.
+ The byte string is the actual bit string that will be encoded, padded
+ on the right (least significant bits, looking from big endian perspective)
+ to the first full byte. If the bit string has a bit length that is multiple
+ of 8, then the padding should not be included. For correct DER encoding
+ the padding bits MUST be set to 0.
+
+ Number of bits of padding need to be provided as the `unused` parameter.
+ In case they are specified as None, it means the number of unused bits
+ is already encoded in the string as the first byte.
+
+ The deprecated call convention specifies just the `s` parameters and
+ encodes the number of unused bits as first parameter (same convention
+ as with None).
+
+ Empty string must be encoded with `unused` specified as 0.
+
+ Future version of python-ecdsa will make specifying the `unused` argument
+ mandatory.
+
+ :param s: bytes to encode
+ :type s: bytes like object
+ :param unused: number of bits at the end of `s` that are unused, must be
+ between 0 and 7 (inclusive)
+ :type unused: int or None
+
+ :raises ValueError: when `unused` is too large or too small
+
+ :return: `s` encoded using DER
+ :rtype: bytes
+ """
+ encoded_unused = b''
+ len_extra = 0
+ if unused is _sentry:
+ warnings.warn("Legacy call convention used, unused= needs to be "
+ "specified",
+ DeprecationWarning)
+ elif unused is not None:
+ if not 0 <= unused <= 7:
+ raise ValueError("unused must be integer between 0 and 7")
+ if unused:
+ if not s:
+ raise ValueError("unused is non-zero but s is empty")
+ last = str_idx_as_int(s, -1)
+ if last & (2 ** unused - 1):
+ raise ValueError("unused bits must be zeros in DER")
+ encoded_unused = int2byte(unused)
+ len_extra = 1
+ return b("\x03") + encode_length(len(s) + len_extra) + encoded_unused + s
+
+
+def encode_octet_string(s):
+ return b("\x04") + encode_length(len(s)) + s
+
+
+def encode_oid(first, second, *pieces):
+ assert 0 <= first < 2 and 0 <= second <= 39 or first == 2 and 0 <= second
+ body = b''.join(chain([encode_number(40*first+second)],
+ (encode_number(p) for p in pieces)))
+ return b'\x06' + encode_length(len(body)) + body
+
+
+def encode_sequence(*encoded_pieces):
+ total_len = sum([len(p) for p in encoded_pieces])
+ return b('\x30') + encode_length(total_len) + b('').join(encoded_pieces)
+
+
+def encode_number(n):
+ b128_digits = []
+ while n:
+ b128_digits.insert(0, (n & 0x7f) | 0x80)
+ n = n >> 7
+ if not b128_digits:
+ b128_digits.append(0)
+ b128_digits[-1] &= 0x7f
+ return b('').join([int2byte(d) for d in b128_digits])
+
+
+def remove_constructed(string):
+ s0 = str_idx_as_int(string, 0)
+ if (s0 & 0xe0) != 0xa0:
+ raise UnexpectedDER("wanted type 'constructed tag' (0xa0-0xbf), "
+ "got 0x%02x" % s0)
+ tag = s0 & 0x1f
+ length, llen = read_length(string[1:])
+ body = string[1+llen:1+llen+length]
+ rest = string[1+llen+length:]
+ return tag, body, rest
+
+
+def remove_sequence(string):
+ if not string:
+ raise UnexpectedDER("Empty string does not encode a sequence")
+ if string[:1] != b"\x30":
+ n = str_idx_as_int(string, 0)
+ raise UnexpectedDER("wanted type 'sequence' (0x30), got 0x%02x" % n)
+ length, lengthlength = read_length(string[1:])
+ if length > len(string) - 1 - lengthlength:
+ raise UnexpectedDER("Length longer than the provided buffer")
+ endseq = 1+lengthlength+length
+ return string[1+lengthlength:endseq], string[endseq:]
+
+
+def remove_octet_string(string):
+ if string[:1] != b"\x04":
+ n = str_idx_as_int(string, 0)
+ raise UnexpectedDER("wanted type 'octetstring' (0x04), got 0x%02x" % n)
+ length, llen = read_length(string[1:])
+ body = string[1+llen:1+llen+length]
+ rest = string[1+llen+length:]
+ return body, rest
+
+
+def remove_object(string):
+ if not string:
+ raise UnexpectedDER(
+ "Empty string does not encode an object identifier")
+ if string[:1] != b"\x06":
+ n = str_idx_as_int(string, 0)
+ raise UnexpectedDER("wanted type 'object' (0x06), got 0x%02x" % n)
+ length, lengthlength = read_length(string[1:])
+ body = string[1+lengthlength:1+lengthlength+length]
+ rest = string[1+lengthlength+length:]
+ if not body:
+ raise UnexpectedDER("Empty object identifier")
+ if len(body) != length:
+ raise UnexpectedDER(
+ "Length of object identifier longer than the provided buffer")
+ numbers = []
+ while body:
+ n, ll = read_number(body)
+ numbers.append(n)
+ body = body[ll:]
+ n0 = numbers.pop(0)
+ if n0 < 80:
+ first = n0 // 40
+ else:
+ first = 2
+ second = n0 - (40 * first)
+ numbers.insert(0, first)
+ numbers.insert(1, second)
+ return tuple(numbers), rest
+
+
+def remove_integer(string):
+ if not string:
+ raise UnexpectedDER("Empty string is an invalid encoding of an "
+ "integer")
+ if string[:1] != b"\x02":
+ n = str_idx_as_int(string, 0)
+ raise UnexpectedDER("wanted type 'integer' (0x02), got 0x%02x" % n)
+ length, llen = read_length(string[1:])
+ if length > len(string) - 1 - llen:
+ raise UnexpectedDER("Length longer than provided buffer")
+ if length == 0:
+ raise UnexpectedDER("0-byte long encoding of integer")
+ numberbytes = string[1+llen:1+llen+length]
+ rest = string[1+llen+length:]
+ msb = str_idx_as_int(numberbytes, 0)
+ if not msb < 0x80:
+ raise UnexpectedDER("Negative integers are not supported")
+ # check if the encoding is the minimal one (DER requirement)
+ if length > 1 and not msb:
+ # leading zero byte is allowed if the integer would have been
+ # considered a negative number otherwise
+ smsb = str_idx_as_int(numberbytes, 1)
+ if smsb < 0x80:
+ raise UnexpectedDER("Invalid encoding of integer, unnecessary "
+ "zero padding bytes")
+ return int(binascii.hexlify(numberbytes), 16), rest
+
+
+def read_number(string):
+ number = 0
+ llen = 0
+ if str_idx_as_int(string, 0) == 0x80:
+ raise UnexpectedDER("Non minimal encoding of OID subidentifier")
+ # base-128 big endian, with most significant bit set in all but the last
+ # byte
+ while True:
+ if llen >= len(string):
+ raise UnexpectedDER("ran out of length bytes")
+ number = number << 7
+ d = str_idx_as_int(string, llen)
+ number += (d & 0x7f)
+ llen += 1
+ if not d & 0x80:
+ break
+ return number, llen
+
+
+def encode_length(l):
+ assert l >= 0
+ if l < 0x80:
+ return int2byte(l)
+ s = ("%x" % l).encode()
+ if len(s) % 2:
+ s = b("0") + s
+ s = binascii.unhexlify(s)
+ llen = len(s)
+ return int2byte(0x80 | llen) + s
+
+
+def read_length(string):
+ if not string:
+ raise UnexpectedDER("Empty string can't encode valid length value")
+ num = str_idx_as_int(string, 0)
+ if not (num & 0x80):
+ # short form
+ return (num & 0x7f), 1
+ # else long-form: b0&0x7f is number of additional base256 length bytes,
+ # big-endian
+ llen = num & 0x7f
+ if not llen:
+ raise UnexpectedDER("Invalid length encoding, length of length is 0")
+ if llen > len(string)-1:
+ raise UnexpectedDER("Length of length longer than provided buffer")
+ # verify that the encoding is minimal possible (DER requirement)
+ msb = str_idx_as_int(string, 1)
+ if not msb or llen == 1 and msb < 0x80:
+ raise UnexpectedDER("Not minimal encoding of length")
+ return int(binascii.hexlify(string[1:1+llen]), 16), 1+llen
+
+
+def remove_bitstring(string, expect_unused=_sentry):
+ """
+ Remove a BIT STRING object from `string` following :term:`DER`.
+
+ The `expect_unused` can be used to specify if the bit string should
+ have the amount of unused bits decoded or not. If it's an integer, any
+ read BIT STRING that has number of unused bits different from specified
+ value will cause UnexpectedDER exception to be raised (this is especially
+ useful when decoding BIT STRINGS that have DER encoded object in them;
+ DER encoding is byte oriented, so the unused bits will always equal 0).
+
+ If the `expect_unused` is specified as None, the first element returned
+ will be a tuple, with the first value being the extracted bit string
+ while the second value will be the decoded number of unused bits.
+
+ If the `expect_unused` is unspecified, the decoding of byte with
+ number of unused bits will not be attempted and the bit string will be
+ returned as-is, the callee will be required to decode it and verify its
+ correctness.
+
+ Future version of python will require the `expected_unused` parameter
+ to be specified.
+
+ :param string: string of bytes to extract the BIT STRING from
+ :type string: bytes like object
+ :param expect_unused: number of bits that should be unused in the BIT
+ STRING, or None, to return it to caller
+ :type expect_unused: int or None
+
+ :raises UnexpectedDER: when the encoding does not follow DER.
+
+ :return: a tuple with first element being the extracted bit string and
+ the second being the remaining bytes in the string (if any); if the
+ `expect_unused` is specified as None, the first element of the returned
+ tuple will be a tuple itself, with first element being the bit string
+ as bytes and the second element being the number of unused bits at the
+ end of the byte array as an integer
+ :rtype: tuple
+ """
+ if not string:
+ raise UnexpectedDER("Empty string does not encode a bitstring")
+ if expect_unused is _sentry:
+ warnings.warn("Legacy call convention used, expect_unused= needs to be"
+ " specified",
+ DeprecationWarning)
+ num = str_idx_as_int(string, 0)
+ if string[:1] != b"\x03":
+ raise UnexpectedDER("wanted bitstring (0x03), got 0x%02x" % num)
+ length, llen = read_length(string[1:])
+ if not length:
+ raise UnexpectedDER("Invalid length of bit string, can't be 0")
+ body = string[1+llen:1+llen+length]
+ rest = string[1+llen+length:]
+ if expect_unused is not _sentry:
+ unused = str_idx_as_int(body, 0)
+ if not 0 <= unused <= 7:
+ raise UnexpectedDER("Invalid encoding of unused bits")
+ if expect_unused is not None and expect_unused != unused:
+ raise UnexpectedDER("Unexpected number of unused bits")
+ body = body[1:]
+ if unused:
+ if not body:
+ raise UnexpectedDER("Invalid encoding of empty bit string")
+ last = str_idx_as_int(body, -1)
+ # verify that all the unused bits are set to zero (DER requirement)
+ if last & (2 ** unused - 1):
+ raise UnexpectedDER("Non zero padding bits in bit string")
+ if expect_unused is None:
+ body = (body, unused)
+ return body, rest
+
+# SEQUENCE([1, STRING(secexp), cont[0], OBJECT(curvename), cont[1], BINTSTRING)
+
+
+# signatures: (from RFC3279)
+# ansi-X9-62 OBJECT IDENTIFIER ::= {
+# iso(1) member-body(2) us(840) 10045 }
+#
+# id-ecSigType OBJECT IDENTIFIER ::= {
+# ansi-X9-62 signatures(4) }
+# ecdsa-with-SHA1 OBJECT IDENTIFIER ::= {
+# id-ecSigType 1 }
+## so 1,2,840,10045,4,1
+## so 0x42, .. ..
+
+# Ecdsa-Sig-Value ::= SEQUENCE {
+# r INTEGER,
+# s INTEGER }
+
+# id-public-key-type OBJECT IDENTIFIER ::= { ansi-X9.62 2 }
+#
+# id-ecPublicKey OBJECT IDENTIFIER ::= { id-publicKeyType 1 }
+
+# I think the secp224r1 identifier is (t=06,l=05,v=2b81040021)
+# secp224r1 OBJECT IDENTIFIER ::= {
+# iso(1) identified-organization(3) certicom(132) curve(0) 33 }
+# and the secp384r1 is (t=06,l=05,v=2b81040022)
+# secp384r1 OBJECT IDENTIFIER ::= {
+# iso(1) identified-organization(3) certicom(132) curve(0) 34 }
+
+def unpem(pem):
+ if isinstance(pem, text_type):
+ pem = pem.encode()
+
+ d = b("").join([l.strip() for l in pem.split(b("\n"))
+ if l and not l.startswith(b("-----"))])
+ return base64.b64decode(d)
+
+
+def topem(der, name):
+ b64 = base64.b64encode(der)
+ lines = [("-----BEGIN %s-----\n" % name).encode()]
+ lines.extend([b64[start:start+64]+b("\n")
+ for start in range(0, len(b64), 64)])
+ lines.append(("-----END %s-----\n" % name).encode())
+ return b("").join(lines)
diff --git a/third_party/python/ecdsa/src/ecdsa/ecdh.py b/third_party/python/ecdsa/src/ecdsa/ecdh.py
new file mode 100644
index 0000000000..88848f5503
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/ecdh.py
@@ -0,0 +1,306 @@
+"""
+Class for performing Elliptic-curve Diffie-Hellman (ECDH) operations.
+"""
+
+from .util import number_to_string
+from .ellipticcurve import INFINITY
+from .keys import SigningKey, VerifyingKey
+
+
+__all__ = ["ECDH", "NoKeyError", "NoCurveError", "InvalidCurveError",
+ "InvalidSharedSecretError"]
+
+
+class NoKeyError(Exception):
+ """ECDH. Key not found but it is needed for operation."""
+
+ pass
+
+
+class NoCurveError(Exception):
+ """ECDH. Curve not set but it is needed for operation."""
+
+ pass
+
+
+class InvalidCurveError(Exception):
+ """ECDH. Raised in case the public and private keys use different curves."""
+
+ pass
+
+
+class InvalidSharedSecretError(Exception):
+ """ECDH. Raised in case the shared secret we obtained is an INFINITY."""
+
+ pass
+
+
+class ECDH(object):
+ """
+ Elliptic-curve Diffie-Hellman (ECDH). A key agreement protocol.
+
+ Allows two parties, each having an elliptic-curve public-private key
+ pair, to establish a shared secret over an insecure channel
+ """""
+
+ def __init__(self, curve=None, private_key=None, public_key=None):
+ """
+ ECDH init.
+
+ Call can be initialised without parameters, then the first operation
+ (loading either key) will set the used curve.
+ All parameters must be ultimately set before shared secret
+ calculation will be allowed.
+
+ :param curve: curve for operations
+ :type curve: Curve
+ :param private_key: `my` private key for ECDH
+ :type private_key: SigningKey
+ :param public_key: `their` public key for ECDH
+ :type public_key: VerifyingKey
+ """
+ self.curve = curve
+ self.private_key = None
+ self.public_key = None
+ if private_key:
+ self.load_private_key(private_key)
+ if public_key:
+ self.load_received_public_key(public_key)
+
+ def _get_shared_secret(self, remote_public_key):
+ if not self.private_key:
+ raise NoKeyError(
+ "Private key needs to be set to create shared secret")
+ if not self.public_key:
+ raise NoKeyError(
+ "Public key needs to be set to create shared secret")
+ if not (self.private_key.curve == self.curve == remote_public_key.curve):
+ raise InvalidCurveError(
+ "Curves for public key and private key is not equal.")
+
+ # shared secret = PUBKEYtheirs * PRIVATEKEYours
+ result = remote_public_key.pubkey.point * self.private_key.privkey.secret_multiplier
+ if result == INFINITY:
+ raise InvalidSharedSecretError(
+ "Invalid shared secret (INFINITY).")
+
+ return result.x()
+
+ def set_curve(self, key_curve):
+ """
+ Set the working curve for ecdh operations.
+
+ :param key_curve: curve from `curves` module
+ :type key_curve: Curve
+ """
+ self.curve = key_curve
+
+ def generate_private_key(self):
+ """
+ Generate local private key for ecdh operation with curve that was set.
+
+ :raises NoCurveError: Curve must be set before key generation.
+
+ :return: public (verifying) key from this private key.
+ :rtype: VerifyingKey object
+ """
+ if not self.curve:
+ raise NoCurveError("Curve must be set prior to key generation.")
+ return self.load_private_key(SigningKey.generate(curve=self.curve))
+
+ def load_private_key(self, private_key):
+ """
+ Load private key from SigningKey (keys.py) object.
+
+ Needs to have the same curve as was set with set_curve method.
+ If curve is not set - it sets from this SigningKey
+
+ :param private_key: Initialised SigningKey class
+ :type private_key: SigningKey
+
+ :raises InvalidCurveError: private_key curve not the same as self.curve
+
+ :return: public (verifying) key from this private key.
+ :rtype: VerifyingKey object
+ """
+ if not self.curve:
+ self.curve = private_key.curve
+ if self.curve != private_key.curve:
+ raise InvalidCurveError("Curve mismatch.")
+ self.private_key = private_key
+ return self.private_key.get_verifying_key()
+
+ def load_private_key_bytes(self, private_key):
+ """
+ Load private key from byte string.
+
+ Uses current curve and checks if the provided key matches
+ the curve of ECDH key agreement.
+ Key loads via from_string method of SigningKey class
+
+ :param private_key: private key in bytes string format
+ :type private_key: :term:`bytes-like object`
+
+ :raises NoCurveError: Curve must be set before loading.
+
+ :return: public (verifying) key from this private key.
+ :rtype: VerifyingKey object
+ """
+ if not self.curve:
+ raise NoCurveError("Curve must be set prior to key load.")
+ return self.load_private_key(
+ SigningKey.from_string(private_key, curve=self.curve))
+
+ def load_private_key_der(self, private_key_der):
+ """
+ Load private key from DER byte string.
+
+ Compares the curve of the DER-encoded key with the ECDH set curve,
+ uses the former if unset.
+
+ Note, the only DER format supported is the RFC5915
+ Look at keys.py:SigningKey.from_der()
+
+ :param private_key_der: string with the DER encoding of private ECDSA key
+ :type private_key_der: string
+
+ :raises InvalidCurveError: private_key curve not the same as self.curve
+
+ :return: public (verifying) key from this private key.
+ :rtype: VerifyingKey object
+ """
+ return self.load_private_key(SigningKey.from_der(private_key_der))
+
+ def load_private_key_pem(self, private_key_pem):
+ """
+ Load private key from PEM string.
+
+ Compares the curve of the DER-encoded key with the ECDH set curve,
+ uses the former if unset.
+
+ Note, the only PEM format supported is the RFC5915
+ Look at keys.py:SigningKey.from_pem()
+ it needs to have `EC PRIVATE KEY` section
+
+ :param private_key_pem: string with PEM-encoded private ECDSA key
+ :type private_key_pem: string
+
+ :raises InvalidCurveError: private_key curve not the same as self.curve
+
+ :return: public (verifying) key from this private key.
+ :rtype: VerifyingKey object
+ """
+ return self.load_private_key(SigningKey.from_pem(private_key_pem))
+
+ def get_public_key(self):
+ """
+ Provides a public key that matches the local private key.
+
+ Needs to be sent to the remote party.
+
+ :return: public (verifying) key from local private key.
+ :rtype: VerifyingKey object
+ """
+ return self.private_key.get_verifying_key()
+
+ def load_received_public_key(self, public_key):
+ """
+ Load public key from VerifyingKey (keys.py) object.
+
+ Needs to have the same curve as set as current for ecdh operation.
+ If curve is not set - it sets it from VerifyingKey.
+
+ :param public_key: Initialised VerifyingKey class
+ :type public_key: VerifyingKey
+
+ :raises InvalidCurveError: public_key curve not the same as self.curve
+ """
+ if not self.curve:
+ self.curve = public_key.curve
+ if self.curve != public_key.curve:
+ raise InvalidCurveError("Curve mismatch.")
+ self.public_key = public_key
+
+ def load_received_public_key_bytes(self, public_key_str):
+ """
+ Load public key from byte string.
+
+ Uses current curve and checks if key length corresponds to
+ the current curve.
+ Key loads via from_string method of VerifyingKey class
+
+ :param public_key_str: public key in bytes string format
+ :type public_key_str: :term:`bytes-like object`
+ """
+ return self.load_received_public_key(
+ VerifyingKey.from_string(public_key_str, self.curve))
+
+ def load_received_public_key_der(self, public_key_der):
+ """
+ Load public key from DER byte string.
+
+ Compares the curve of the DER-encoded key with the ECDH set curve,
+ uses the former if unset.
+
+ Note, the only DER format supported is the RFC5912
+ Look at keys.py:VerifyingKey.from_der()
+
+ :param public_key_der: string with the DER encoding of public ECDSA key
+ :type public_key_der: string
+
+ :raises InvalidCurveError: public_key curve not the same as self.curve
+ """
+ return self.load_received_public_key(VerifyingKey.from_der(public_key_der))
+
+ def load_received_public_key_pem(self, public_key_pem):
+ """
+ Load public key from PEM string.
+
+ Compares the curve of the PEM-encoded key with the ECDH set curve,
+ uses the former if unset.
+
+ Note, the only PEM format supported is the RFC5912
+ Look at keys.py:VerifyingKey.from_pem()
+
+ :param public_key_pem: string with PEM-encoded public ECDSA key
+ :type public_key_pem: string
+
+ :raises InvalidCurveError: public_key curve not the same as self.curve
+ """
+ return self.load_received_public_key(VerifyingKey.from_pem(public_key_pem))
+
+ def generate_sharedsecret_bytes(self):
+ """
+ Generate shared secret from local private key and remote public key.
+
+ The objects needs to have both private key and received public key
+ before generation is allowed.
+
+ :raises InvalidCurveError: public_key curve not the same as self.curve
+ :raises NoKeyError: public_key or private_key is not set
+
+ :return: shared secret
+ :rtype: byte string
+ """
+ return number_to_string(
+ self.generate_sharedsecret(),
+ self.private_key.curve.order)
+
+ def generate_sharedsecret(self):
+ """
+ Generate shared secret from local private key and remote public key.
+
+ The objects needs to have both private key and received public key
+ before generation is allowed.
+
+ It's the same for local and remote party.
+ shared secret(local private key, remote public key ) ==
+ shared secret (local public key, remote private key)
+
+ :raises InvalidCurveError: public_key curve not the same as self.curve
+ :raises NoKeyError: public_key or private_key is not set
+
+ :return: shared secret
+ :rtype: int
+ """
+ return self._get_shared_secret(self.public_key)
diff --git a/third_party/python/ecdsa/src/ecdsa/ecdsa.py b/third_party/python/ecdsa/src/ecdsa/ecdsa.py
new file mode 100644
index 0000000000..4e9bab0898
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/ecdsa.py
@@ -0,0 +1,446 @@
+#! /usr/bin/env python
+
+"""
+Implementation of Elliptic-Curve Digital Signatures.
+
+Classes and methods for elliptic-curve signatures:
+private keys, public keys, signatures,
+NIST prime-modulus curves with modulus lengths of
+192, 224, 256, 384, and 521 bits.
+
+Example:
+
+ # (In real-life applications, you would probably want to
+ # protect against defects in SystemRandom.)
+ from random import SystemRandom
+ randrange = SystemRandom().randrange
+
+ # Generate a public/private key pair using the NIST Curve P-192:
+
+ g = generator_192
+ n = g.order()
+ secret = randrange( 1, n )
+ pubkey = Public_key( g, g * secret )
+ privkey = Private_key( pubkey, secret )
+
+ # Signing a hash value:
+
+ hash = randrange( 1, n )
+ signature = privkey.sign( hash, randrange( 1, n ) )
+
+ # Verifying a signature for a hash value:
+
+ if pubkey.verifies( hash, signature ):
+ print_("Demo verification succeeded.")
+ else:
+ print_("*** Demo verification failed.")
+
+ # Verification fails if the hash value is modified:
+
+ if pubkey.verifies( hash-1, signature ):
+ print_("**** Demo verification failed to reject tampered hash.")
+ else:
+ print_("Demo verification correctly rejected tampered hash.")
+
+Version of 2009.05.16.
+
+Revision history:
+ 2005.12.31 - Initial version.
+ 2008.11.25 - Substantial revisions introducing new classes.
+ 2009.05.16 - Warn against using random.randrange in real applications.
+ 2009.05.17 - Use random.SystemRandom by default.
+
+Written in 2005 by Peter Pearson and placed in the public domain.
+"""
+
+from six import int2byte, b
+from . import ellipticcurve
+from . import numbertheory
+from .util import bit_length
+
+
+class RSZeroError(RuntimeError):
+ pass
+
+
+class InvalidPointError(RuntimeError):
+ pass
+
+
+class Signature(object):
+ """ECDSA signature.
+ """
+ def __init__(self, r, s):
+ self.r = r
+ self.s = s
+
+ def recover_public_keys(self, hash, generator):
+ """Returns two public keys for which the signature is valid
+ hash is signed hash
+ generator is the used generator of the signature
+ """
+ curve = generator.curve()
+ n = generator.order()
+ r = self.r
+ s = self.s
+ e = hash
+ x = r
+
+ # Compute the curve point with x as x-coordinate
+ alpha = (pow(x, 3, curve.p()) + (curve.a() * x) + curve.b()) % curve.p()
+ beta = numbertheory.square_root_mod_prime(alpha, curve.p())
+ y = beta if beta % 2 == 0 else curve.p() - beta
+
+ # Compute the public key
+ R1 = ellipticcurve.PointJacobi(curve, x, y, 1, n)
+ Q1 = numbertheory.inverse_mod(r, n) * (s * R1 + (-e % n) * generator)
+ Pk1 = Public_key(generator, Q1)
+
+ # And the second solution
+ R2 = ellipticcurve.PointJacobi(curve, x, -y, 1, n)
+ Q2 = numbertheory.inverse_mod(r, n) * (s * R2 + (-e % n) * generator)
+ Pk2 = Public_key(generator, Q2)
+
+ return [Pk1, Pk2]
+
+
+class Public_key(object):
+ """Public key for ECDSA.
+ """
+
+ def __init__(self, generator, point, verify=True):
+ """
+ Low level ECDSA public key object.
+
+ :param generator: the Point that generates the group (the base point)
+ :param point: the Point that defines the public key
+ :param bool verify: if True check if point is valid point on curve
+
+ :raises InvalidPointError: if the point parameters are invalid or
+ point does not lie on the curve
+ """
+
+ self.curve = generator.curve()
+ self.generator = generator
+ self.point = point
+ n = generator.order()
+ p = self.curve.p()
+ if not (0 <= point.x() < p) or not (0 <= point.y() < p):
+ raise InvalidPointError("The public point has x or y out of range.")
+ if verify and not self.curve.contains_point(point.x(), point.y()):
+ raise InvalidPointError("Point does not lie on the curve")
+ if not n:
+ raise InvalidPointError("Generator point must have order.")
+ # for curve parameters with base point with cofactor 1, all points
+ # that are on the curve are scalar multiples of the base point, so
+ # verifying that is not necessary. See Section 3.2.2.1 of SEC 1 v2
+ if verify and self.curve.cofactor() != 1 and \
+ not n * point == ellipticcurve.INFINITY:
+ raise InvalidPointError("Generator point order is bad.")
+
+ def __eq__(self, other):
+ if isinstance(other, Public_key):
+ """Return True if the points are identical, False otherwise."""
+ return self.curve == other.curve \
+ and self.point == other.point
+ return NotImplemented
+
+ def verifies(self, hash, signature):
+ """Verify that signature is a valid signature of hash.
+ Return True if the signature is valid.
+ """
+
+ # From X9.62 J.3.1.
+
+ G = self.generator
+ n = G.order()
+ r = signature.r
+ s = signature.s
+ if r < 1 or r > n - 1:
+ return False
+ if s < 1 or s > n - 1:
+ return False
+ c = numbertheory.inverse_mod(s, n)
+ u1 = (hash * c) % n
+ u2 = (r * c) % n
+ if hasattr(G, "mul_add"):
+ xy = G.mul_add(u1, self.point, u2)
+ else:
+ xy = u1 * G + u2 * self.point
+ v = xy.x() % n
+ return v == r
+
+
+class Private_key(object):
+ """Private key for ECDSA.
+ """
+
+ def __init__(self, public_key, secret_multiplier):
+ """public_key is of class Public_key;
+ secret_multiplier is a large integer.
+ """
+
+ self.public_key = public_key
+ self.secret_multiplier = secret_multiplier
+
+ def __eq__(self, other):
+ if isinstance(other, Private_key):
+ """Return True if the points are identical, False otherwise."""
+ return self.public_key == other.public_key \
+ and self.secret_multiplier == other.secret_multiplier
+ return NotImplemented
+
+ def sign(self, hash, random_k):
+ """Return a signature for the provided hash, using the provided
+ random nonce. It is absolutely vital that random_k be an unpredictable
+ number in the range [1, self.public_key.point.order()-1]. If
+ an attacker can guess random_k, he can compute our private key from a
+ single signature. Also, if an attacker knows a few high-order
+ bits (or a few low-order bits) of random_k, he can compute our private
+ key from many signatures. The generation of nonces with adequate
+ cryptographic strength is very difficult and far beyond the scope
+ of this comment.
+
+ May raise RuntimeError, in which case retrying with a new
+ random value k is in order.
+ """
+
+ G = self.public_key.generator
+ n = G.order()
+ k = random_k % n
+ # Fix the bit-length of the random nonce,
+ # so that it doesn't leak via timing.
+ # This does not change that ks = k mod n
+ ks = k + n
+ kt = ks + n
+ if bit_length(ks) == bit_length(n):
+ p1 = kt * G
+ else:
+ p1 = ks * G
+ r = p1.x() % n
+ if r == 0:
+ raise RSZeroError("amazingly unlucky random number r")
+ s = (numbertheory.inverse_mod(k, n)
+ * (hash + (self.secret_multiplier * r) % n)) % n
+ if s == 0:
+ raise RSZeroError("amazingly unlucky random number s")
+ return Signature(r, s)
+
+
+def int_to_string(x):
+ """Convert integer x into a string of bytes, as per X9.62."""
+ assert x >= 0
+ if x == 0:
+ return b('\0')
+ result = []
+ while x:
+ ordinal = x & 0xFF
+ result.append(int2byte(ordinal))
+ x >>= 8
+
+ result.reverse()
+ return b('').join(result)
+
+
+def string_to_int(s):
+ """Convert a string of bytes into an integer, as per X9.62."""
+ result = 0
+ for c in s:
+ if not isinstance(c, int):
+ c = ord(c)
+ result = 256 * result + c
+ return result
+
+
+def digest_integer(m):
+ """Convert an integer into a string of bytes, compute
+ its SHA-1 hash, and convert the result to an integer."""
+ #
+ # I don't expect this function to be used much. I wrote
+ # it in order to be able to duplicate the examples
+ # in ECDSAVS.
+ #
+ from hashlib import sha1
+ return string_to_int(sha1(int_to_string(m)).digest())
+
+
+def point_is_valid(generator, x, y):
+ """Is (x,y) a valid public key based on the specified generator?"""
+
+ # These are the tests specified in X9.62.
+
+ n = generator.order()
+ curve = generator.curve()
+ p = curve.p()
+ if not (0 <= x < p) or not (0 <= y < p):
+ return False
+ if not curve.contains_point(x, y):
+ return False
+ if curve.cofactor() != 1 and \
+ not n * ellipticcurve.PointJacobi(curve, x, y, 1)\
+ == ellipticcurve.INFINITY:
+ return False
+ return True
+
+
+# NIST Curve P-192:
+_p = 6277101735386680763835789423207666416083908700390324961279
+_r = 6277101735386680763835789423176059013767194773182842284081
+# s = 0x3045ae6fc8422f64ed579528d38120eae12196d5L
+# c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65L
+_b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1
+_Gx = 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012
+_Gy = 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811
+
+curve_192 = ellipticcurve.CurveFp(_p, -3, _b, 1)
+generator_192 = ellipticcurve.PointJacobi(
+ curve_192, _Gx, _Gy, 1, _r, generator=True)
+
+
+# NIST Curve P-224:
+_p = 26959946667150639794667015087019630673557916260026308143510066298881
+_r = 26959946667150639794667015087019625940457807714424391721682722368061
+# s = 0xbd71344799d5c7fcdc45b59fa3b9ab8f6a948bc5L
+# c = 0x5b056c7e11dd68f40469ee7f3c7a7d74f7d121116506d031218291fbL
+_b = 0xb4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4
+_Gx = 0xb70e0cbd6bb4bf7f321390b94a03c1d356c21122343280d6115c1d21
+_Gy = 0xbd376388b5f723fb4c22dfe6cd4375a05a07476444d5819985007e34
+
+curve_224 = ellipticcurve.CurveFp(_p, -3, _b, 1)
+generator_224 = ellipticcurve.PointJacobi(
+ curve_224, _Gx, _Gy, 1, _r, generator=True)
+
+# NIST Curve P-256:
+_p = 115792089210356248762697446949407573530086143415290314195533631308867097853951
+_r = 115792089210356248762697446949407573529996955224135760342422259061068512044369
+# s = 0xc49d360886e704936a6678e1139d26b7819f7e90L
+# c = 0x7efba1662985be9403cb055c75d4f7e0ce8d84a9c5114abcaf3177680104fa0dL
+_b = 0x5ac635d8aa3a93e7b3ebbd55769886bc651d06b0cc53b0f63bce3c3e27d2604b
+_Gx = 0x6b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296
+_Gy = 0x4fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5
+
+curve_256 = ellipticcurve.CurveFp(_p, -3, _b, 1)
+generator_256 = ellipticcurve.PointJacobi(
+ curve_256, _Gx, _Gy, 1, _r, generator=True)
+
+# NIST Curve P-384:
+_p = 39402006196394479212279040100143613805079739270465446667948293404245721771496870329047266088258938001861606973112319
+_r = 39402006196394479212279040100143613805079739270465446667946905279627659399113263569398956308152294913554433653942643
+# s = 0xa335926aa319a27a1d00896a6773a4827acdac73L
+# c = 0x79d1e655f868f02fff48dcdee14151ddb80643c1406d0ca10dfe6fc52009540a495e8042ea5f744f6e184667cc722483L
+_b = 0xb3312fa7e23ee7e4988e056be3f82d19181d9c6efe8141120314088f5013875ac656398d8a2ed19d2a85c8edd3ec2aef
+_Gx = 0xaa87ca22be8b05378eb1c71ef320ad746e1d3b628ba79b9859f741e082542a385502f25dbf55296c3a545e3872760ab7
+_Gy = 0x3617de4a96262c6f5d9e98bf9292dc29f8f41dbd289a147ce9da3113b5f0b8c00a60b1ce1d7e819d7a431d7c90ea0e5f
+
+curve_384 = ellipticcurve.CurveFp(_p, -3, _b, 1)
+generator_384 = ellipticcurve.PointJacobi(
+ curve_384, _Gx, _Gy, 1, _r, generator=True)
+
+# NIST Curve P-521:
+_p = 6864797660130609714981900799081393217269435300143305409394463459185543183397656052122559640661454554977296311391480858037121987999716643812574028291115057151
+_r = 6864797660130609714981900799081393217269435300143305409394463459185543183397655394245057746333217197532963996371363321113864768612440380340372808892707005449
+# s = 0xd09e8800291cb85396cc6717393284aaa0da64baL
+# c = 0x0b48bfa5f420a34949539d2bdfc264eeeeb077688e44fbf0ad8f6d0edb37bd6b533281000518e19f1b9ffbe0fe9ed8a3c2200b8f875e523868c70c1e5bf55bad637L
+_b = 0x051953eb9618e1c9a1f929a21a0b68540eea2da725b99b315f3b8b489918ef109e156193951ec7e937b1652c0bd3bb1bf073573df883d2c34f1ef451fd46b503f00
+_Gx = 0xc6858e06b70404e9cd9e3ecb662395b4429c648139053fb521f828af606b4d3dbaa14b5e77efe75928fe1dc127a2ffa8de3348b3c1856a429bf97e7e31c2e5bd66
+_Gy = 0x11839296a789a3bc0045c8a5fb42c7d1bd998f54449579b446817afbd17273e662c97ee72995ef42640c550b9013fad0761353c7086a272c24088be94769fd16650
+
+curve_521 = ellipticcurve.CurveFp(_p, -3, _b, 1)
+generator_521 = ellipticcurve.PointJacobi(
+ curve_521, _Gx, _Gy, 1, _r, generator=True)
+
+# Certicom secp256-k1
+_a = 0x0000000000000000000000000000000000000000000000000000000000000000
+_b = 0x0000000000000000000000000000000000000000000000000000000000000007
+_p = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f
+_Gx = 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798
+_Gy = 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8
+_r = 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
+
+curve_secp256k1 = ellipticcurve.CurveFp(_p, _a, _b, 1)
+generator_secp256k1 = ellipticcurve.PointJacobi(
+ curve_secp256k1, _Gx, _Gy, 1, _r, generator=True)
+
+# Brainpool P-160-r1
+_a = 0x340E7BE2A280EB74E2BE61BADA745D97E8F7C300
+_b = 0x1E589A8595423412134FAA2DBDEC95C8D8675E58
+_p = 0xE95E4A5F737059DC60DFC7AD95B3D8139515620F
+_Gx = 0xBED5AF16EA3F6A4F62938C4631EB5AF7BDBCDBC3
+_Gy = 0x1667CB477A1A8EC338F94741669C976316DA6321
+_q = 0xE95E4A5F737059DC60DF5991D45029409E60FC09
+
+curve_brainpoolp160r1 = ellipticcurve.CurveFp(_p, _a, _b, 1)
+generator_brainpoolp160r1 = ellipticcurve.PointJacobi(
+ curve_brainpoolp160r1, _Gx, _Gy, 1, _q, generator=True)
+
+# Brainpool P-192-r1
+_a = 0x6A91174076B1E0E19C39C031FE8685C1CAE040E5C69A28EF
+_b = 0x469A28EF7C28CCA3DC721D044F4496BCCA7EF4146FBF25C9
+_p = 0xC302F41D932A36CDA7A3463093D18DB78FCE476DE1A86297
+_Gx = 0xC0A0647EAAB6A48753B033C56CB0F0900A2F5C4853375FD6
+_Gy = 0x14B690866ABD5BB88B5F4828C1490002E6773FA2FA299B8F
+_q = 0xC302F41D932A36CDA7A3462F9E9E916B5BE8F1029AC4ACC1
+
+curve_brainpoolp192r1 = ellipticcurve.CurveFp(_p, _a, _b, 1)
+generator_brainpoolp192r1 = ellipticcurve.PointJacobi(
+ curve_brainpoolp192r1, _Gx, _Gy, 1, _q, generator=True)
+
+# Brainpool P-224-r1
+_a = 0x68A5E62CA9CE6C1C299803A6C1530B514E182AD8B0042A59CAD29F43
+_b = 0x2580F63CCFE44138870713B1A92369E33E2135D266DBB372386C400B
+_p = 0xD7C134AA264366862A18302575D1D787B09F075797DA89F57EC8C0FF
+_Gx = 0x0D9029AD2C7E5CF4340823B2A87DC68C9E4CE3174C1E6EFDEE12C07D
+_Gy = 0x58AA56F772C0726F24C6B89E4ECDAC24354B9E99CAA3F6D3761402CD
+_q = 0xD7C134AA264366862A18302575D0FB98D116BC4B6DDEBCA3A5A7939F
+
+curve_brainpoolp224r1 = ellipticcurve.CurveFp(_p, _a, _b, 1)
+generator_brainpoolp224r1 = ellipticcurve.PointJacobi(
+ curve_brainpoolp224r1, _Gx, _Gy, 1, _q, generator=True)
+
+# Brainpool P-256-r1
+_a = 0x7D5A0975FC2C3057EEF67530417AFFE7FB8055C126DC5C6CE94A4B44F330B5D9
+_b = 0x26DC5C6CE94A4B44F330B5D9BBD77CBF958416295CF7E1CE6BCCDC18FF8C07B6
+_p = 0xA9FB57DBA1EEA9BC3E660A909D838D726E3BF623D52620282013481D1F6E5377
+_Gx = 0x8BD2AEB9CB7E57CB2C4B482FFC81B7AFB9DE27E1E3BD23C23A4453BD9ACE3262
+_Gy = 0x547EF835C3DAC4FD97F8461A14611DC9C27745132DED8E545C1D54C72F046997
+_q = 0xA9FB57DBA1EEA9BC3E660A909D838D718C397AA3B561A6F7901E0E82974856A7
+
+curve_brainpoolp256r1 = ellipticcurve.CurveFp(_p, _a, _b, 1)
+generator_brainpoolp256r1 = ellipticcurve.PointJacobi(
+ curve_brainpoolp256r1, _Gx, _Gy, 1, _q, generator=True)
+
+# Brainpool P-320-r1
+_a = 0x3EE30B568FBAB0F883CCEBD46D3F3BB8A2A73513F5EB79DA66190EB085FFA9F492F375A97D860EB4
+_b = 0x520883949DFDBC42D3AD198640688A6FE13F41349554B49ACC31DCCD884539816F5EB4AC8FB1F1A6
+_p = 0xD35E472036BC4FB7E13C785ED201E065F98FCFA6F6F40DEF4F92B9EC7893EC28FCD412B1F1B32E27
+_Gx = 0x43BD7E9AFB53D8B85289BCC48EE5BFE6F20137D10A087EB6E7871E2A10A599C710AF8D0D39E20611
+_Gy = 0x14FDD05545EC1CC8AB4093247F77275E0743FFED117182EAA9C77877AAAC6AC7D35245D1692E8EE1
+_q = 0xD35E472036BC4FB7E13C785ED201E065F98FCFA5B68F12A32D482EC7EE8658E98691555B44C59311
+
+curve_brainpoolp320r1 = ellipticcurve.CurveFp(_p, _a, _b, 1)
+generator_brainpoolp320r1 = ellipticcurve.PointJacobi(
+ curve_brainpoolp320r1, _Gx, _Gy, 1, _q, generator=True)
+
+# Brainpool P-384-r1
+_a = 0x7BC382C63D8C150C3C72080ACE05AFA0C2BEA28E4FB22787139165EFBA91F90F8AA5814A503AD4EB04A8C7DD22CE2826
+_b = 0x04A8C7DD22CE28268B39B55416F0447C2FB77DE107DCD2A62E880EA53EEB62D57CB4390295DBC9943AB78696FA504C11
+_p = 0x8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B412B1DA197FB71123ACD3A729901D1A71874700133107EC53
+_Gx = 0x1D1C64F068CF45FFA2A63A81B7C13F6B8847A3E77EF14FE3DB7FCAFE0CBD10E8E826E03436D646AAEF87B2E247D4AF1E
+_Gy = 0x8ABE1D7520F9C2A45CB1EB8E95CFD55262B70B29FEEC5864E19C054FF99129280E4646217791811142820341263C5315
+_q = 0x8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B31F166E6CAC0425A7CF3AB6AF6B7FC3103B883202E9046565
+
+curve_brainpoolp384r1 = ellipticcurve.CurveFp(_p, _a, _b, 1)
+generator_brainpoolp384r1 = ellipticcurve.PointJacobi(
+ curve_brainpoolp384r1, _Gx, _Gy, 1, _q, generator=True)
+
+# Brainpool P-512-r1
+_a = 0x7830A3318B603B89E2327145AC234CC594CBDD8D3DF91610A83441CAEA9863BC2DED5D5AA8253AA10A2EF1C98B9AC8B57F1117A72BF2C7B9E7C1AC4D77FC94CA
+_b = 0x3DF91610A83441CAEA9863BC2DED5D5AA8253AA10A2EF1C98B9AC8B57F1117A72BF2C7B9E7C1AC4D77FC94CADC083E67984050B75EBAE5DD2809BD638016F723
+_p = 0xAADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308717D4D9B009BC66842AECDA12AE6A380E62881FF2F2D82C68528AA6056583A48F3
+_Gx = 0x81AEE4BDD82ED9645A21322E9C4C6A9385ED9F70B5D916C1B43B62EEF4D0098EFF3B1F78E2D0D48D50D1687B93B97D5F7C6D5047406A5E688B352209BCB9F822
+_Gy = 0x7DDE385D566332ECC0EABFA9CF7822FDF209F70024A57B1AA000C55B881F8111B2DCDE494A5F485E5BCA4BD88A2763AED1CA2B2FA8F0540678CD1E0F3AD80892
+_q = 0xAADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA70330870553E5C414CA92619418661197FAC10471DB1D381085DDADDB58796829CA90069
+
+curve_brainpoolp512r1 = ellipticcurve.CurveFp(_p, _a, _b, 1)
+generator_brainpoolp512r1 = ellipticcurve.PointJacobi(
+ curve_brainpoolp512r1, _Gx, _Gy, 1, _q, generator=True)
diff --git a/third_party/python/ecdsa/src/ecdsa/ellipticcurve.py b/third_party/python/ecdsa/src/ecdsa/ellipticcurve.py
new file mode 100644
index 0000000000..3420454db4
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/ellipticcurve.py
@@ -0,0 +1,780 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Implementation of elliptic curves, for cryptographic applications.
+#
+# This module doesn't provide any way to choose a random elliptic
+# curve, nor to verify that an elliptic curve was chosen randomly,
+# because one can simply use NIST's standard curves.
+#
+# Notes from X9.62-1998 (draft):
+# Nomenclature:
+# - Q is a public key.
+# The "Elliptic Curve Domain Parameters" include:
+# - q is the "field size", which in our case equals p.
+# - p is a big prime.
+# - G is a point of prime order (5.1.1.1).
+# - n is the order of G (5.1.1.1).
+# Public-key validation (5.2.2):
+# - Verify that Q is not the point at infinity.
+# - Verify that X_Q and Y_Q are in [0,p-1].
+# - Verify that Q is on the curve.
+# - Verify that nQ is the point at infinity.
+# Signature generation (5.3):
+# - Pick random k from [1,n-1].
+# Signature checking (5.4.2):
+# - Verify that r and s are in [1,n-1].
+#
+# Version of 2008.11.25.
+#
+# Revision history:
+# 2005.12.31 - Initial version.
+# 2008.11.25 - Change CurveFp.is_on to contains_point.
+#
+# Written in 2005 by Peter Pearson and placed in the public domain.
+
+from __future__ import division
+
+try:
+ from gmpy2 import mpz
+ GMPY = True
+except ImportError:
+ try:
+ from gmpy import mpz
+ GMPY = True
+ except ImportError:
+ GMPY = False
+
+
+from six import python_2_unicode_compatible
+from . import numbertheory
+from ._rwlock import RWLock
+
+
+@python_2_unicode_compatible
+class CurveFp(object):
+ """Elliptic Curve over the field of integers modulo a prime."""
+
+ if GMPY:
+ def __init__(self, p, a, b, h=None):
+ """
+ The curve of points satisfying y^2 = x^3 + a*x + b (mod p).
+
+ h is an integer that is the cofactor of the elliptic curve domain
+ parameters; it is the number of points satisfying the elliptic curve
+ equation divided by the order of the base point. It is used for selection
+ of efficient algorithm for public point verification.
+ """
+ self.__p = mpz(p)
+ self.__a = mpz(a)
+ self.__b = mpz(b)
+ # h is not used in calculations and it can be None, so don't use
+ # gmpy with it
+ self.__h = h
+ else:
+ def __init__(self, p, a, b, h=None):
+ """
+ The curve of points satisfying y^2 = x^3 + a*x + b (mod p).
+
+ h is an integer that is the cofactor of the elliptic curve domain
+ parameters; it is the number of points satisfying the elliptic curve
+ equation divided by the order of the base point. It is used for selection
+ of efficient algorithm for public point verification.
+ """
+ self.__p = p
+ self.__a = a
+ self.__b = b
+ self.__h = h
+
+ def __eq__(self, other):
+ if isinstance(other, CurveFp):
+ """Return True if the curves are identical, False otherwise."""
+ return self.__p == other.__p \
+ and self.__a == other.__a \
+ and self.__b == other.__b
+ return NotImplemented
+
+ def __hash__(self):
+ return hash((self.__p, self.__a, self.__b))
+
+ def p(self):
+ return self.__p
+
+ def a(self):
+ return self.__a
+
+ def b(self):
+ return self.__b
+
+ def cofactor(self):
+ return self.__h
+
+ def contains_point(self, x, y):
+ """Is the point (x,y) on this curve?"""
+ return (y * y - ((x * x + self.__a) * x + self.__b)) % self.__p == 0
+
+ def __str__(self):
+ return "CurveFp(p=%d, a=%d, b=%d, h=%d)" % (
+ self.__p, self.__a, self.__b, self.__h)
+
+
+class PointJacobi(object):
+ """
+ Point on an elliptic curve. Uses Jacobi coordinates.
+
+ In Jacobian coordinates, there are three parameters, X, Y and Z.
+ They correspond to affine parameters 'x' and 'y' like so:
+
+ x = X / Z²
+ y = Y / Z³
+ """
+ def __init__(self, curve, x, y, z, order=None, generator=False):
+ """
+ Initialise a point that uses Jacobi representation internally.
+
+ :param CurveFp curve: curve on which the point resides
+ :param int x: the X parameter of Jacobi representation (equal to x when
+ converting from affine coordinates
+ :param int y: the Y parameter of Jacobi representation (equal to y when
+ converting from affine coordinates
+ :param int z: the Z parameter of Jacobi representation (equal to 1 when
+ converting from affine coordinates
+ :param int order: the point order, must be non zero when using
+ generator=True
+ :param bool generator: the point provided is a curve generator, as
+ such, it will be commonly used with scalar multiplication. This will
+ cause to precompute multiplication table for it
+ """
+ self.__curve = curve
+ # since it's generally better (faster) to use scaled points vs unscaled
+ # ones, use writer-biased RWLock for locking:
+ self._scale_lock = RWLock()
+ if GMPY:
+ self.__x = mpz(x)
+ self.__y = mpz(y)
+ self.__z = mpz(z)
+ self.__order = order and mpz(order)
+ else:
+ self.__x = x
+ self.__y = y
+ self.__z = z
+ self.__order = order
+ self.__precompute = []
+ if generator:
+ assert order
+ i = 1
+ order *= 2
+ doubler = PointJacobi(curve, x, y, z, order)
+ order *= 2
+ self.__precompute.append((doubler.x(), doubler.y()))
+
+ while i < order:
+ i *= 2
+ doubler = doubler.double().scale()
+ self.__precompute.append((doubler.x(), doubler.y()))
+
+ def __eq__(self, other):
+ """Compare two points with each-other."""
+ try:
+ self._scale_lock.reader_acquire()
+ if other is INFINITY:
+ return not self.__y or not self.__z
+ x1, y1, z1 = self.__x, self.__y, self.__z
+ finally:
+ self._scale_lock.reader_release()
+ if isinstance(other, Point):
+ x2, y2, z2 = other.x(), other.y(), 1
+ elif isinstance(other, PointJacobi):
+ try:
+ other._scale_lock.reader_acquire()
+ x2, y2, z2 = other.__x, other.__y, other.__z
+ finally:
+ other._scale_lock.reader_release()
+ else:
+ return NotImplemented
+ if self.__curve != other.curve():
+ return False
+ p = self.__curve.p()
+
+ zz1 = z1 * z1 % p
+ zz2 = z2 * z2 % p
+
+ # compare the fractions by bringing them to the same denominator
+ # depend on short-circuit to save 4 multiplications in case of inequality
+ return (x1 * zz2 - x2 * zz1) % p == 0 and \
+ (y1 * zz2 * z2 - y2 * zz1 * z1) % p == 0
+
+ def order(self):
+ """Return the order of the point.
+
+ None if it is undefined.
+ """
+ return self.__order
+
+ def curve(self):
+ """Return curve over which the point is defined."""
+ return self.__curve
+
+ def x(self):
+ """
+ Return affine x coordinate.
+
+ This method should be used only when the 'y' coordinate is not needed.
+ It's computationally more efficient to use `to_affine()` and then
+ call x() and y() on the returned instance. Or call `scale()`
+ and then x() and y() on the returned instance.
+ """
+ try:
+ self._scale_lock.reader_acquire()
+ if self.__z == 1:
+ return self.__x
+ x = self.__x
+ z = self.__z
+ finally:
+ self._scale_lock.reader_release()
+ p = self.__curve.p()
+ z = numbertheory.inverse_mod(z, p)
+ return x * z**2 % p
+
+ def y(self):
+ """
+ Return affine y coordinate.
+
+ This method should be used only when the 'x' coordinate is not needed.
+ It's computationally more efficient to use `to_affine()` and then
+ call x() and y() on the returned instance. Or call `scale()`
+ and then x() and y() on the returned instance.
+ """
+ try:
+ self._scale_lock.reader_acquire()
+ if self.__z == 1:
+ return self.__y
+ y = self.__y
+ z = self.__z
+ finally:
+ self._scale_lock.reader_release()
+ p = self.__curve.p()
+ z = numbertheory.inverse_mod(z, p)
+ return y * z**3 % p
+
+ def scale(self):
+ """
+ Return point scaled so that z == 1.
+
+ Modifies point in place, returns self.
+ """
+ try:
+ self._scale_lock.reader_acquire()
+ if self.__z == 1:
+ return self
+ finally:
+ self._scale_lock.reader_release()
+
+ try:
+ self._scale_lock.writer_acquire()
+ # scaling already scaled point is safe (as inverse of 1 is 1) and
+ # quick so we don't need to optimise for the unlikely event when
+ # two threads hit the lock at the same time
+ p = self.__curve.p()
+ z_inv = numbertheory.inverse_mod(self.__z, p)
+ zz_inv = z_inv * z_inv % p
+ self.__x = self.__x * zz_inv % p
+ self.__y = self.__y * zz_inv * z_inv % p
+ # we are setting the z last so that the check above will return true
+ # only after all values were already updated
+ self.__z = 1
+ finally:
+ self._scale_lock.writer_release()
+ return self
+
+ def to_affine(self):
+ """Return point in affine form."""
+ if not self.__y or not self.__z:
+ return INFINITY
+ self.scale()
+ # after point is scaled, it's immutable, so no need to perform locking
+ return Point(self.__curve, self.__x,
+ self.__y, self.__order)
+
+ @staticmethod
+ def from_affine(point, generator=False):
+ """Create from an affine point.
+
+ :param bool generator: set to True to make the point to precalculate
+ multiplication table - useful for public point when verifying many
+ signatures (around 100 or so) or for generator points of a curve.
+ """
+ return PointJacobi(point.curve(), point.x(), point.y(), 1,
+ point.order(), generator)
+
+ # plese note that all the methods that use the equations from hyperelliptic
+ # are formatted in a way to maximise performance.
+ # Things that make code faster: multiplying instead of taking to the power
+ # (`xx = x * x; xxxx = xx * xx % p` is faster than `xxxx = x**4 % p` and
+ # `pow(x, 4, p)`),
+ # multiple assignments at the same time (`x1, x2 = self.x1, self.x2` is
+ # faster than `x1 = self.x1; x2 = self.x2`),
+ # similarly, sometimes the `% p` is skipped if it makes the calculation
+ # faster and the result of calculation is later reduced modulo `p`
+
+ def _double_with_z_1(self, X1, Y1, p, a):
+ """Add a point to itself with z == 1."""
+ # after:
+ # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#doubling-mdbl-2007-bl
+ XX, YY = X1 * X1 % p, Y1 * Y1 % p
+ if not YY:
+ return 0, 0, 1
+ YYYY = YY * YY % p
+ S = 2 * ((X1 + YY)**2 - XX - YYYY) % p
+ M = 3 * XX + a
+ T = (M * M - 2 * S) % p
+ # X3 = T
+ Y3 = (M * (S - T) - 8 * YYYY) % p
+ Z3 = 2 * Y1 % p
+ return T, Y3, Z3
+
+ def _double(self, X1, Y1, Z1, p, a):
+ """Add a point to itself, arbitrary z."""
+ if Z1 == 1:
+ return self._double_with_z_1(X1, Y1, p, a)
+ if not Z1:
+ return 0, 0, 1
+ # after:
+ # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#doubling-dbl-2007-bl
+ XX, YY = X1 * X1 % p, Y1 * Y1 % p
+ if not YY:
+ return 0, 0, 1
+ YYYY = YY * YY % p
+ ZZ = Z1 * Z1 % p
+ S = 2 * ((X1 + YY)**2 - XX - YYYY) % p
+ M = (3 * XX + a * ZZ * ZZ) % p
+ T = (M * M - 2 * S) % p
+ # X3 = T
+ Y3 = (M * (S - T) - 8 * YYYY) % p
+ Z3 = ((Y1 + Z1)**2 - YY - ZZ) % p
+
+ return T, Y3, Z3
+
+ def double(self):
+ """Add a point to itself."""
+ if not self.__y:
+ return INFINITY
+
+ p, a = self.__curve.p(), self.__curve.a()
+
+ try:
+ self._scale_lock.reader_acquire()
+ X1, Y1, Z1 = self.__x, self.__y, self.__z
+ finally:
+ self._scale_lock.reader_release()
+
+ X3, Y3, Z3 = self._double(X1, Y1, Z1, p, a)
+
+ if not Y3 or not Z3:
+ return INFINITY
+ return PointJacobi(self.__curve, X3, Y3, Z3, self.__order)
+
+ def _add_with_z_1(self, X1, Y1, X2, Y2, p):
+ """add points when both Z1 and Z2 equal 1"""
+ # after:
+ # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-mmadd-2007-bl
+ H = X2 - X1
+ HH = H * H
+ I = 4 * HH % p
+ J = H * I
+ r = 2 * (Y2 - Y1)
+ if not H and not r:
+ return self._double_with_z_1(X1, Y1, p, self.__curve.a())
+ V = X1 * I
+ X3 = (r**2 - J - 2 * V) % p
+ Y3 = (r * (V - X3) - 2 * Y1 * J) % p
+ Z3 = 2 * H % p
+ return X3, Y3, Z3
+
+ def _add_with_z_eq(self, X1, Y1, Z1, X2, Y2, p):
+ """add points when Z1 == Z2"""
+ # after:
+ # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-zadd-2007-m
+ A = (X2 - X1)**2 % p
+ B = X1 * A % p
+ C = X2 * A
+ D = (Y2 - Y1)**2 % p
+ if not A and not D:
+ return self._double(X1, Y1, Z1, p, self.__curve.a())
+ X3 = (D - B - C) % p
+ Y3 = ((Y2 - Y1) * (B - X3) - Y1 * (C - B)) % p
+ Z3 = Z1 * (X2 - X1) % p
+ return X3, Y3, Z3
+
+ def _add_with_z2_1(self, X1, Y1, Z1, X2, Y2, p):
+ """add points when Z2 == 1"""
+ # after:
+ # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-madd-2007-bl
+ Z1Z1 = Z1 * Z1 % p
+ U2, S2 = X2 * Z1Z1 % p, Y2 * Z1 * Z1Z1 % p
+ H = (U2 - X1) % p
+ HH = H * H % p
+ I = 4 * HH % p
+ J = H * I
+ r = 2 * (S2 - Y1) % p
+ if not r and not H:
+ return self._double_with_z_1(X2, Y2, p, self.__curve.a())
+ V = X1 * I
+ X3 = (r * r - J - 2 * V) % p
+ Y3 = (r * (V - X3) - 2 * Y1 * J) % p
+ Z3 = ((Z1 + H)**2 - Z1Z1 - HH) % p
+ return X3, Y3, Z3
+
+ def _add_with_z_ne(self, X1, Y1, Z1, X2, Y2, Z2, p):
+ """add points with arbitrary z"""
+ # after:
+ # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-add-2007-bl
+ Z1Z1 = Z1 * Z1 % p
+ Z2Z2 = Z2 * Z2 % p
+ U1 = X1 * Z2Z2 % p
+ U2 = X2 * Z1Z1 % p
+ S1 = Y1 * Z2 * Z2Z2 % p
+ S2 = Y2 * Z1 * Z1Z1 % p
+ H = U2 - U1
+ I = 4 * H * H % p
+ J = H * I % p
+ r = 2 * (S2 - S1) % p
+ if not H and not r:
+ return self._double(X1, Y1, Z1, p, self.__curve.a())
+ V = U1 * I
+ X3 = (r * r - J - 2 * V) % p
+ Y3 = (r * (V - X3) - 2 * S1 * J) % p
+ Z3 = ((Z1 + Z2)**2 - Z1Z1 - Z2Z2) * H % p
+
+ return X3, Y3, Z3
+
+ def __radd__(self, other):
+ """Add other to self."""
+ return self + other
+
+ def _add(self, X1, Y1, Z1, X2, Y2, Z2, p):
+ """add two points, select fastest method."""
+ if not Y1 or not Z1:
+ return X2, Y2, Z2
+ if not Y2 or not Z2:
+ return X1, Y1, Z1
+ if Z1 == Z2:
+ if Z1 == 1:
+ return self._add_with_z_1(X1, Y1, X2, Y2, p)
+ return self._add_with_z_eq(X1, Y1, Z1, X2, Y2, p)
+ if Z1 == 1:
+ return self._add_with_z2_1(X2, Y2, Z2, X1, Y1, p)
+ if Z2 == 1:
+ return self._add_with_z2_1(X1, Y1, Z1, X2, Y2, p)
+ return self._add_with_z_ne(X1, Y1, Z1, X2, Y2, Z2, p)
+
+ def __add__(self, other):
+ """Add two points on elliptic curve."""
+ if self == INFINITY:
+ return other
+ if other == INFINITY:
+ return self
+ if isinstance(other, Point):
+ other = PointJacobi.from_affine(other)
+ if self.__curve != other.__curve:
+ raise ValueError("The other point is on different curve")
+
+ p = self.__curve.p()
+ try:
+ self._scale_lock.reader_acquire()
+ X1, Y1, Z1 = self.__x, self.__y, self.__z
+ finally:
+ self._scale_lock.reader_release()
+ try:
+ other._scale_lock.reader_acquire()
+ X2, Y2, Z2 = other.__x, other.__y, other.__z
+ finally:
+ other._scale_lock.reader_release()
+ X3, Y3, Z3 = self._add(X1, Y1, Z1, X2, Y2, Z2, p)
+
+ if not Y3 or not Z3:
+ return INFINITY
+ return PointJacobi(self.__curve, X3, Y3, Z3, self.__order)
+
+ def __rmul__(self, other):
+ """Multiply point by an integer."""
+ return self * other
+
+ def _mul_precompute(self, other):
+ """Multiply point by integer with precomputation table."""
+ X3, Y3, Z3, p = 0, 0, 1, self.__curve.p()
+ _add = self._add
+ for X2, Y2 in self.__precompute:
+ if other % 2:
+ if other % 4 >= 2:
+ other = (other + 1)//2
+ X3, Y3, Z3 = _add(X3, Y3, Z3, X2, -Y2, 1, p)
+ else:
+ other = (other - 1)//2
+ X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, 1, p)
+ else:
+ other //= 2
+
+ if not Y3 or not Z3:
+ return INFINITY
+ return PointJacobi(self.__curve, X3, Y3, Z3, self.__order)
+
+ @staticmethod
+ def _naf(mult):
+ """Calculate non-adjacent form of number."""
+ ret = []
+ while mult:
+ if mult % 2:
+ nd = mult % 4
+ if nd >= 2:
+ nd = nd - 4
+ ret += [nd]
+ mult -= nd
+ else:
+ ret += [0]
+ mult //= 2
+ return ret
+
+ def __mul__(self, other):
+ """Multiply point by an integer."""
+ if not self.__y or not other:
+ return INFINITY
+ if other == 1:
+ return self
+ if self.__order:
+ # order*2 as a protection for Minerva
+ other = other % (self.__order*2)
+ if self.__precompute:
+ return self._mul_precompute(other)
+
+ self = self.scale()
+ # once scaled, point is immutable, not need to lock
+ X2, Y2 = self.__x, self.__y
+ X3, Y3, Z3 = 0, 0, 1
+ p, a = self.__curve.p(), self.__curve.a()
+ _double = self._double
+ _add = self._add
+ # since adding points when at least one of them is scaled
+ # is quicker, reverse the NAF order
+ for i in reversed(self._naf(other)):
+ X3, Y3, Z3 = _double(X3, Y3, Z3, p, a)
+ if i < 0:
+ X3, Y3, Z3 = _add(X3, Y3, Z3, X2, -Y2, 1, p)
+ elif i > 0:
+ X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, 1, p)
+
+ if not Y3 or not Z3:
+ return INFINITY
+
+ return PointJacobi(self.__curve, X3, Y3, Z3, self.__order)
+
+ @staticmethod
+ def _leftmost_bit(x):
+ """Return integer with the same magnitude as x but hamming weight of 1"""
+ assert x > 0
+ result = 1
+ while result <= x:
+ result = 2 * result
+ return result // 2
+
+ def mul_add(self, self_mul, other, other_mul):
+ """
+ Do two multiplications at the same time, add results.
+
+ calculates self*self_mul + other*other_mul
+ """
+ if other is INFINITY or other_mul == 0:
+ return self * self_mul
+ if self_mul == 0:
+ return other * other_mul
+ if not isinstance(other, PointJacobi):
+ other = PointJacobi.from_affine(other)
+ # when the points have precomputed answers, then multiplying them alone
+ # is faster (as it uses NAF)
+ if self.__precompute and other.__precompute:
+ return self * self_mul + other * other_mul
+
+ if self.__order:
+ self_mul = self_mul % self.__order
+ other_mul = other_mul % self.__order
+
+ i = self._leftmost_bit(max(self_mul, other_mul))*2
+ X3, Y3, Z3 = 0, 0, 1
+ p, a = self.__curve.p(), self.__curve.a()
+ self = self.scale()
+ # after scaling, point is immutable, no need for locking
+ X1, Y1 = self.__x, self.__y
+ other = other.scale()
+ X2, Y2 = other.__x, other.__y
+ both = (self + other).scale()
+ X4, Y4 = both.__x, both.__y
+ _double = self._double
+ _add = self._add
+ while i > 1:
+ X3, Y3, Z3 = _double(X3, Y3, Z3, p, a)
+ i = i // 2
+
+ if self_mul & i and other_mul & i:
+ X3, Y3, Z3 = _add(X3, Y3, Z3, X4, Y4, 1, p)
+ elif self_mul & i:
+ X3, Y3, Z3 = _add(X3, Y3, Z3, X1, Y1, 1, p)
+ elif other_mul & i:
+ X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, 1, p)
+
+ if not Y3 or not Z3:
+ return INFINITY
+
+ return PointJacobi(self.__curve, X3, Y3, Z3, self.__order)
+
+ def __neg__(self):
+ """Return negated point."""
+ try:
+ self._scale_lock.reader_acquire()
+ return PointJacobi(self.__curve, self.__x, -self.__y, self.__z,
+ self.__order)
+ finally:
+ self._scale_lock.reader_release()
+
+
+class Point(object):
+ """A point on an elliptic curve. Altering x and y is forbidding,
+ but they can be read by the x() and y() methods."""
+ def __init__(self, curve, x, y, order=None):
+ """curve, x, y, order; order (optional) is the order of this point."""
+ self.__curve = curve
+ if GMPY:
+ self.__x = x and mpz(x)
+ self.__y = y and mpz(y)
+ self.__order = order and mpz(order)
+ else:
+ self.__x = x
+ self.__y = y
+ self.__order = order
+ # self.curve is allowed to be None only for INFINITY:
+ if self.__curve:
+ assert self.__curve.contains_point(x, y)
+ # for curves with cofactor 1, all points that are on the curve are scalar
+ # multiples of the base point, so performing multiplication is not
+ # necessary to verify that. See Section 3.2.2.1 of SEC 1 v2
+ if curve and curve.cofactor() != 1 and order:
+ assert self * order == INFINITY
+
+ def __eq__(self, other):
+ """Return True if the points are identical, False otherwise."""
+ if isinstance(other, Point):
+ return self.__curve == other.__curve \
+ and self.__x == other.__x \
+ and self.__y == other.__y
+ return NotImplemented
+
+ def __neg__(self):
+ return Point(self.__curve, self.__x, self.__curve.p() - self.__y)
+
+ def __add__(self, other):
+ """Add one point to another point."""
+
+ # X9.62 B.3:
+
+ if not isinstance(other, Point):
+ return NotImplemented
+ if other == INFINITY:
+ return self
+ if self == INFINITY:
+ return other
+ assert self.__curve == other.__curve
+ if self.__x == other.__x:
+ if (self.__y + other.__y) % self.__curve.p() == 0:
+ return INFINITY
+ else:
+ return self.double()
+
+ p = self.__curve.p()
+
+ l = ((other.__y - self.__y) * \
+ numbertheory.inverse_mod(other.__x - self.__x, p)) % p
+
+ x3 = (l * l - self.__x - other.__x) % p
+ y3 = (l * (self.__x - x3) - self.__y) % p
+
+ return Point(self.__curve, x3, y3)
+
+ def __mul__(self, other):
+ """Multiply a point by an integer."""
+
+ def leftmost_bit(x):
+ assert x > 0
+ result = 1
+ while result <= x:
+ result = 2 * result
+ return result // 2
+
+ e = other
+ if e == 0 or (self.__order and e % self.__order == 0):
+ return INFINITY
+ if self == INFINITY:
+ return INFINITY
+ if e < 0:
+ return (-self) * (-e)
+
+ # From X9.62 D.3.2:
+
+ e3 = 3 * e
+ negative_self = Point(self.__curve, self.__x, -self.__y, self.__order)
+ i = leftmost_bit(e3) // 2
+ result = self
+ # print_("Multiplying %s by %d (e3 = %d):" % (self, other, e3))
+ while i > 1:
+ result = result.double()
+ if (e3 & i) != 0 and (e & i) == 0:
+ result = result + self
+ if (e3 & i) == 0 and (e & i) != 0:
+ result = result + negative_self
+ # print_(". . . i = %d, result = %s" % ( i, result ))
+ i = i // 2
+
+ return result
+
+ def __rmul__(self, other):
+ """Multiply a point by an integer."""
+
+ return self * other
+
+ def __str__(self):
+ if self == INFINITY:
+ return "infinity"
+ return "(%d,%d)" % (self.__x, self.__y)
+
+ def double(self):
+ """Return a new point that is twice the old."""
+
+ if self == INFINITY:
+ return INFINITY
+
+ # X9.62 B.3:
+
+ p = self.__curve.p()
+ a = self.__curve.a()
+
+ l = ((3 * self.__x * self.__x + a) * \
+ numbertheory.inverse_mod(2 * self.__y, p)) % p
+
+ x3 = (l * l - 2 * self.__x) % p
+ y3 = (l * (self.__x - x3) - self.__y) % p
+
+ return Point(self.__curve, x3, y3)
+
+ def x(self):
+ return self.__x
+
+ def y(self):
+ return self.__y
+
+ def curve(self):
+ return self.__curve
+
+ def order(self):
+ return self.__order
+
+
+# This one point is the Point At Infinity for all purposes:
+INFINITY = Point(None, None, None)
diff --git a/third_party/python/ecdsa/src/ecdsa/keys.py b/third_party/python/ecdsa/src/ecdsa/keys.py
new file mode 100644
index 0000000000..172fdf5874
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/keys.py
@@ -0,0 +1,1219 @@
+"""
+Primary classes for performing signing and verification operations.
+
+.. glossary::
+
+ raw encoding
+ Conversion of public, private keys and signatures (which in
+ mathematical sense are integers or pairs of integers) to strings of
+ bytes that does not use any special tags or encoding rules.
+ For any given curve, all keys of the same type or signatures will be
+ encoded to byte strings of the same length. In more formal sense,
+ the integers are encoded as big-endian, constant length byte strings,
+ where the string length is determined by the curve order (e.g.
+ for NIST256p the order is 256 bits long, so the private key will be 32
+ bytes long while public key will be 64 bytes long). The encoding of a
+ single integer is zero-padded on the left if the numerical value is
+ low. In case of public keys and signatures, which are comprised of two
+ integers, the integers are simply concatenated.
+
+ uncompressed
+ The most common formatting specified in PKIX standards. Specified in
+ X9.62 and SEC1 standards. The only difference between it and
+ :term:`raw encoding` is the prepending of a 0x04 byte. Thus an
+ uncompressed NIST256p public key encoding will be 65 bytes long.
+
+ compressed
+ The public point representation that uses half of bytes of the
+ :term:`uncompressed` encoding (rounded up). It uses the first byte of
+ the encoding to specify the sign of the y coordinate and encodes the
+ x coordinate as-is. The first byte of the encoding is equal to
+ 0x02 or 0x03. Compressed encoding of NIST256p public key will be 33
+ bytes long.
+
+ hybrid
+ A combination of :term:`uncompressed` and :term:`compressed` encodings.
+ Both x and y coordinates are stored just as in :term:`compressed`
+ encoding, but the first byte reflects the sign of the y coordinate. The
+ first byte of the encoding will be equal to 0x06 or 0x7. Hybrid
+ encoding of NIST256p public key will be 65 bytes long.
+
+ PEM
+ The acronym stands for Privacy Enhanced Email, but currently it is used
+ primarily as the way to encode :term:`DER` objects into text that can
+ be either easily copy-pasted or transferred over email.
+ It uses headers like ``-----BEGIN <type of contents>-----`` and footers
+ like ``-----END <type of contents>-----`` to separate multiple
+ types of objects in the same file or the object from the surrounding
+ comments. The actual object stored is base64 encoded.
+
+ DER
+ Distinguished Encoding Rules, the way to encode :term:`ASN.1` objects
+ deterministically and uniquely into byte strings.
+
+ ASN.1
+ Abstract Syntax Notation 1 is a standard description language for
+ specifying serialisation and deserialisation of data structures in a
+ portable and cross-platform way.
+
+ bytes-like object
+ All the types that implement the buffer protocol. That includes
+ ``str`` (only on python2), ``bytes``, ``bytesarray``, ``array.array`
+ and ``memoryview`` of those objects.
+ Please note that ``array.array` serialisation (converting it to byte
+ string) is endianess dependant! Signature computed over ``array.array``
+ of integers on a big-endian system will not be verified on a
+ little-endian system and vice-versa.
+"""
+
+import binascii
+from hashlib import sha1
+from six import PY3, b
+from . import ecdsa
+from . import der
+from . import rfc6979
+from . import ellipticcurve
+from .curves import NIST192p, find_curve
+from .numbertheory import square_root_mod_prime, SquareRootError
+from .ecdsa import RSZeroError
+from .util import string_to_number, number_to_string, randrange
+from .util import sigencode_string, sigdecode_string
+from .util import oid_ecPublicKey, encoded_oid_ecPublicKey, MalformedSignature
+from ._compat import normalise_bytes
+
+
+__all__ = ["BadSignatureError", "BadDigestError", "VerifyingKey", "SigningKey",
+ "MalformedPointError"]
+
+
+class BadSignatureError(Exception):
+ """
+ Raised when verification of signature failed.
+
+ Will be raised irrespective of reason of the failure:
+
+ * the calculated or provided hash does not match the signature
+ * the signature does not match the curve/public key
+ * the encoding of the signature is malformed
+ * the size of the signature does not match the curve of the VerifyingKey
+ """
+
+ pass
+
+
+class BadDigestError(Exception):
+ """Raised in case the selected hash is too large for the curve."""
+
+ pass
+
+
+class MalformedPointError(AssertionError):
+ """Raised in case the encoding of private or public key is malformed."""
+
+ pass
+
+
+class VerifyingKey(object):
+ """
+ Class for handling keys that can verify signatures (public keys).
+
+ :ivar ecdsa.curves.Curve curve: The Curve over which all the cryptographic
+ operations will take place
+ :ivar default_hashfunc: the function that will be used for hashing the
+ data. Should implement the same API as hashlib.sha1
+ :vartype default_hashfunc: callable
+ :ivar pubkey: the actual public key
+ :vartype pubkey: ecdsa.ecdsa.Public_key
+ """
+
+ def __init__(self, _error__please_use_generate=None):
+ """Unsupported, please use one of the classmethods to initialise."""
+ if not _error__please_use_generate:
+ raise TypeError("Please use VerifyingKey.generate() to "
+ "construct me")
+ self.curve = None
+ self.default_hashfunc = None
+ self.pubkey = None
+
+ def __repr__(self):
+ pub_key = self.to_string("compressed")
+ return "VerifyingKey.from_string({0!r}, {1!r}, {2})".format(
+ pub_key, self.curve, self.default_hashfunc().name)
+
+ def __eq__(self, other):
+ """Return True if the points are identical, False otherwise."""
+ if isinstance(other, VerifyingKey):
+ return self.curve == other.curve \
+ and self.pubkey == other.pubkey
+ return NotImplemented
+
+ @classmethod
+ def from_public_point(cls, point, curve=NIST192p, hashfunc=sha1,
+ validate_point=True):
+ """
+ Initialise the object from a Point object.
+
+ This is a low-level method, generally you will not want to use it.
+
+ :param point: The point to wrap around, the actual public key
+ :type point: ecdsa.ellipticcurve.Point
+ :param curve: The curve on which the point needs to reside, defaults
+ to NIST192p
+ :type curve: ecdsa.curves.Curve
+ :param hashfunc: The default hash function that will be used for
+ verification, needs to implement the same interface
+ as hashlib.sha1
+ :type hashfunc: callable
+ :type bool validate_point: whether to check if the point lies on curve
+ should always be used if the public point is not a result
+ of our own calculation
+
+ :raises MalformedPointError: if the public point does not lie on the
+ curve
+
+ :return: Initialised VerifyingKey object
+ :rtype: VerifyingKey
+ """
+ self = cls(_error__please_use_generate=True)
+ if not isinstance(point, ellipticcurve.PointJacobi):
+ point = ellipticcurve.PointJacobi.from_affine(point)
+ self.curve = curve
+ self.default_hashfunc = hashfunc
+ try:
+ self.pubkey = ecdsa.Public_key(curve.generator, point,
+ validate_point)
+ except ecdsa.InvalidPointError:
+ raise MalformedPointError("Point does not lie on the curve")
+ self.pubkey.order = curve.order
+ return self
+
+ def precompute(self):
+ self.pubkey.point = ellipticcurve.PointJacobi.from_affine(
+ self.pubkey.point, True)
+
+ @staticmethod
+ def _from_raw_encoding(string, curve):
+ """
+ Decode public point from :term:`raw encoding`.
+
+ :term:`raw encoding` is the same as the :term:`uncompressed` encoding,
+ but without the 0x04 byte at the beginning.
+ """
+ order = curve.order
+ # real assert, from_string() should not call us with different length
+ assert len(string) == curve.verifying_key_length
+ xs = string[:curve.baselen]
+ ys = string[curve.baselen:]
+ if len(xs) != curve.baselen:
+ raise MalformedPointError("Unexpected length of encoded x")
+ if len(ys) != curve.baselen:
+ raise MalformedPointError("Unexpected length of encoded y")
+ x = string_to_number(xs)
+ y = string_to_number(ys)
+
+ return ellipticcurve.PointJacobi(curve.curve, x, y, 1, order)
+
+ @staticmethod
+ def _from_compressed(string, curve):
+ """Decode public point from compressed encoding."""
+ if string[:1] not in (b('\x02'), b('\x03')):
+ raise MalformedPointError("Malformed compressed point encoding")
+
+ is_even = string[:1] == b('\x02')
+ x = string_to_number(string[1:])
+ order = curve.order
+ p = curve.curve.p()
+ alpha = (pow(x, 3, p) + (curve.curve.a() * x) + curve.curve.b()) % p
+ try:
+ beta = square_root_mod_prime(alpha, p)
+ except SquareRootError as e:
+ raise MalformedPointError(
+ "Encoding does not correspond to a point on curve", e)
+ if is_even == bool(beta & 1):
+ y = p - beta
+ else:
+ y = beta
+ return ellipticcurve.PointJacobi(curve.curve, x, y, 1, order)
+
+ @classmethod
+ def _from_hybrid(cls, string, curve, validate_point):
+ """Decode public point from hybrid encoding."""
+ # real assert, from_string() should not call us with different types
+ assert string[:1] in (b('\x06'), b('\x07'))
+
+ # primarily use the uncompressed as it's easiest to handle
+ point = cls._from_raw_encoding(string[1:], curve)
+
+ # but validate if it's self-consistent if we're asked to do that
+ if validate_point \
+ and (point.y() & 1 and string[:1] != b('\x07')
+ or (not point.y() & 1) and string[:1] != b('\x06')):
+ raise MalformedPointError("Inconsistent hybrid point encoding")
+
+ return point
+
+ @classmethod
+ def from_string(cls, string, curve=NIST192p, hashfunc=sha1,
+ validate_point=True):
+ """
+ Initialise the object from byte encoding of public key.
+
+ The method does accept and automatically detect the type of point
+ encoding used. It supports the :term:`raw encoding`,
+ :term:`uncompressed`, :term:`compressed` and :term:`hybrid` encodings.
+
+ Note, while the method is named "from_string" it's a misnomer from
+ Python 2 days when there were no binary strings. In Python 3 the
+ input needs to be a bytes-like object.
+
+ :param string: single point encoding of the public key
+ :type string: :term:`bytes-like object`
+ :param curve: the curve on which the public key is expected to lie
+ :type curve: ecdsa.curves.Curve
+ :param hashfunc: The default hash function that will be used for
+ verification, needs to implement the same interface as hashlib.sha1
+ :type hashfunc: callable
+ :param validate_point: whether to verify that the point lies on the
+ provided curve or not, defaults to True
+ :type validate_point: bool
+
+ :raises MalformedPointError: if the public point does not lie on the
+ curve or the encoding is invalid
+
+ :return: Initialised VerifyingKey object
+ :rtype: VerifyingKey
+ """
+ string = normalise_bytes(string)
+ sig_len = len(string)
+ if sig_len == curve.verifying_key_length:
+ point = cls._from_raw_encoding(string, curve)
+ elif sig_len == curve.verifying_key_length + 1:
+ if string[:1] in (b('\x06'), b('\x07')):
+ point = cls._from_hybrid(string, curve, validate_point)
+ elif string[:1] == b('\x04'):
+ point = cls._from_raw_encoding(string[1:], curve)
+ else:
+ raise MalformedPointError(
+ "Invalid X9.62 encoding of the public point")
+ elif sig_len == curve.baselen + 1:
+ point = cls._from_compressed(string, curve)
+ else:
+ raise MalformedPointError(
+ "Length of string does not match lengths of "
+ "any of the supported encodings of {0} "
+ "curve.".format(curve.name))
+ return cls.from_public_point(point, curve, hashfunc,
+ validate_point)
+
+ @classmethod
+ def from_pem(cls, string, hashfunc=sha1):
+ """
+ Initialise from public key stored in :term:`PEM` format.
+
+ The PEM header of the key should be ``BEGIN PUBLIC KEY``.
+
+ See the :func:`~VerifyingKey.from_der()` method for details of the
+ format supported.
+
+ Note: only a single PEM object encoding is supported in provided
+ string.
+
+ :param string: text with PEM-encoded public ECDSA key
+ :type string: str
+
+ :return: Initialised VerifyingKey object
+ :rtype: VerifyingKey
+ """
+ return cls.from_der(der.unpem(string), hashfunc=hashfunc)
+
+ @classmethod
+ def from_der(cls, string, hashfunc=sha1):
+ """
+ Initialise the key stored in :term:`DER` format.
+
+ The expected format of the key is the SubjectPublicKeyInfo structure
+ from RFC5912 (for RSA keys, it's known as the PKCS#1 format)::
+
+ SubjectPublicKeyInfo {PUBLIC-KEY: IOSet} ::= SEQUENCE {
+ algorithm AlgorithmIdentifier {PUBLIC-KEY, {IOSet}},
+ subjectPublicKey BIT STRING
+ }
+
+ Note: only public EC keys are supported by this method. The
+ SubjectPublicKeyInfo.algorithm.algorithm field must specify
+ id-ecPublicKey (see RFC3279).
+
+ Only the named curve encoding is supported, thus the
+ SubjectPublicKeyInfo.algorithm.parameters field needs to be an
+ object identifier. A sequence in that field indicates an explicit
+ parameter curve encoding, this format is not supported. A NULL object
+ in that field indicates an "implicitlyCA" encoding, where the curve
+ parameters come from CA certificate, those, again, are not supported.
+
+ :param string: binary string with the DER encoding of public ECDSA key
+ :type string: bytes-like object
+
+ :return: Initialised VerifyingKey object
+ :rtype: VerifyingKey
+ """
+ string = normalise_bytes(string)
+ # [[oid_ecPublicKey,oid_curve], point_str_bitstring]
+ s1, empty = der.remove_sequence(string)
+ if empty != b"":
+ raise der.UnexpectedDER("trailing junk after DER pubkey: %s" %
+ binascii.hexlify(empty))
+ s2, point_str_bitstring = der.remove_sequence(s1)
+ # s2 = oid_ecPublicKey,oid_curve
+ oid_pk, rest = der.remove_object(s2)
+ oid_curve, empty = der.remove_object(rest)
+ if empty != b"":
+ raise der.UnexpectedDER("trailing junk after DER pubkey objects: %s" %
+ binascii.hexlify(empty))
+ if not oid_pk == oid_ecPublicKey:
+ raise der.UnexpectedDER("Unexpected object identifier in DER "
+ "encoding: {0!r}".format(oid_pk))
+ curve = find_curve(oid_curve)
+ point_str, empty = der.remove_bitstring(point_str_bitstring, 0)
+ if empty != b"":
+ raise der.UnexpectedDER("trailing junk after pubkey pointstring: %s" %
+ binascii.hexlify(empty))
+ # raw encoding of point is invalid in DER files
+ if len(point_str) == curve.verifying_key_length:
+ raise der.UnexpectedDER("Malformed encoding of public point")
+ return cls.from_string(point_str, curve, hashfunc=hashfunc)
+
+ @classmethod
+ def from_public_key_recovery(cls, signature, data, curve, hashfunc=sha1,
+ sigdecode=sigdecode_string):
+ """
+ Return keys that can be used as verifiers of the provided signature.
+
+ Tries to recover the public key that can be used to verify the
+ signature, usually returns two keys like that.
+
+ :param signature: the byte string with the encoded signature
+ :type signature: bytes-like object
+ :param data: the data to be hashed for signature verification
+ :type data: bytes-like object
+ :param curve: the curve over which the signature was performed
+ :type curve: ecdsa.curves.Curve
+ :param hashfunc: The default hash function that will be used for
+ verification, needs to implement the same interface as hashlib.sha1
+ :type hashfunc: callable
+ :param sigdecode: Callable to define the way the signature needs to
+ be decoded to an object, needs to handle `signature` as the
+ first parameter, the curve order (an int) as the second and return
+ a tuple with two integers, "r" as the first one and "s" as the
+ second one. See :func:`ecdsa.util.sigdecode_string` and
+ :func:`ecdsa.util.sigdecode_der` for examples.
+ :type sigdecode: callable
+
+ :return: Initialised VerifyingKey objects
+ :rtype: list of VerifyingKey
+ """
+ data = normalise_bytes(data)
+ digest = hashfunc(data).digest()
+ return cls.from_public_key_recovery_with_digest(
+ signature, digest, curve, hashfunc=hashfunc,
+ sigdecode=sigdecode)
+
+ @classmethod
+ def from_public_key_recovery_with_digest(
+ cls, signature, digest, curve,
+ hashfunc=sha1, sigdecode=sigdecode_string):
+ """
+ Return keys that can be used as verifiers of the provided signature.
+
+ Tries to recover the public key that can be used to verify the
+ signature, usually returns two keys like that.
+
+ :param signature: the byte string with the encoded signature
+ :type signature: bytes-like object
+ :param digest: the hash value of the message signed by the signature
+ :type digest: bytes-like object
+ :param curve: the curve over which the signature was performed
+ :type curve: ecdsa.curves.Curve
+ :param hashfunc: The default hash function that will be used for
+ verification, needs to implement the same interface as hashlib.sha1
+ :type hashfunc: callable
+ :param sigdecode: Callable to define the way the signature needs to
+ be decoded to an object, needs to handle `signature` as the
+ first parameter, the curve order (an int) as the second and return
+ a tuple with two integers, "r" as the first one and "s" as the
+ second one. See :func:`ecdsa.util.sigdecode_string` and
+ :func:`ecdsa.util.sigdecode_der` for examples.
+ :type sigdecode: callable
+
+
+ :return: Initialised VerifyingKey object
+ :rtype: VerifyingKey
+ """
+ generator = curve.generator
+ r, s = sigdecode(signature, generator.order())
+ sig = ecdsa.Signature(r, s)
+
+ digest = normalise_bytes(digest)
+ digest_as_number = string_to_number(digest)
+ pks = sig.recover_public_keys(digest_as_number, generator)
+
+ # Transforms the ecdsa.Public_key object into a VerifyingKey
+ verifying_keys = [cls.from_public_point(pk.point, curve, hashfunc)
+ for pk in pks]
+ return verifying_keys
+
+ def _raw_encode(self):
+ """Convert the public key to the :term:`raw encoding`."""
+ order = self.pubkey.order
+ x_str = number_to_string(self.pubkey.point.x(), order)
+ y_str = number_to_string(self.pubkey.point.y(), order)
+ return x_str + y_str
+
+ def _compressed_encode(self):
+ """Encode the public point into the compressed form."""
+ order = self.pubkey.order
+ x_str = number_to_string(self.pubkey.point.x(), order)
+ if self.pubkey.point.y() & 1:
+ return b('\x03') + x_str
+ else:
+ return b('\x02') + x_str
+
+ def _hybrid_encode(self):
+ """Encode the public point into the hybrid form."""
+ raw_enc = self._raw_encode()
+ if self.pubkey.point.y() & 1:
+ return b('\x07') + raw_enc
+ else:
+ return b('\x06') + raw_enc
+
+ def to_string(self, encoding="raw"):
+ """
+ Convert the public key to a byte string.
+
+ The method by default uses the :term:`raw encoding` (specified
+ by `encoding="raw"`. It can also output keys in :term:`uncompressed`,
+ :term:`compressed` and :term:`hybrid` formats.
+
+ Remember that the curve identification is not part of the encoding
+ so to decode the point using :func:`~VerifyingKey.from_string`, curve
+ needs to be specified.
+
+ Note: while the method is called "to_string", it's a misnomer from
+ Python 2 days when character strings and byte strings shared type.
+ On Python 3 the returned type will be `bytes`.
+
+ :return: :term:`raw encoding` of the public key (public point) on the
+ curve
+ :rtype: bytes
+ """
+ assert encoding in ("raw", "uncompressed", "compressed", "hybrid")
+ if encoding == "raw":
+ return self._raw_encode()
+ elif encoding == "uncompressed":
+ return b('\x04') + self._raw_encode()
+ elif encoding == "hybrid":
+ return self._hybrid_encode()
+ else:
+ return self._compressed_encode()
+
+ def to_pem(self, point_encoding="uncompressed"):
+ """
+ Convert the public key to the :term:`PEM` format.
+
+ The PEM header of the key will be ``BEGIN PUBLIC KEY``.
+
+ The format of the key is described in the
+ :func:`~VerifyingKey.from_der()` method.
+ This method supports only "named curve" encoding of keys.
+
+ :param str point_encoding: specification of the encoding format
+ of public keys. "uncompressed" is most portable, "compressed" is
+ smallest. "hybrid" is uncommon and unsupported by most
+ implementations, it is as big as "uncompressed".
+
+ :return: portable encoding of the public key
+ :rtype: str
+ """
+ return der.topem(self.to_der(point_encoding), "PUBLIC KEY")
+
+ def to_der(self, point_encoding="uncompressed"):
+ """
+ Convert the public key to the :term:`DER` format.
+
+ The format of the key is described in the
+ :func:`~VerifyingKey.from_der()` method.
+ This method supports only "named curve" encoding of keys.
+
+ :param str point_encoding: specification of the encoding format
+ of public keys. "uncompressed" is most portable, "compressed" is
+ smallest. "hybrid" is uncommon and unsupported by most
+ implementations, it is as big as "uncompressed".
+
+ :return: DER encoding of the public key
+ :rtype: bytes
+ """
+ if point_encoding == "raw":
+ raise ValueError("raw point_encoding not allowed in DER")
+ point_str = self.to_string(point_encoding)
+ return der.encode_sequence(der.encode_sequence(encoded_oid_ecPublicKey,
+ self.curve.encoded_oid),
+ # 0 is the number of unused bits in the
+ # bit string
+ der.encode_bitstring(point_str, 0))
+
+ def verify(self, signature, data, hashfunc=None,
+ sigdecode=sigdecode_string):
+ """
+ Verify a signature made over provided data.
+
+ Will hash `data` to verify the signature.
+
+ By default expects signature in :term:`raw encoding`. Can also be used
+ to verify signatures in ASN.1 DER encoding by using
+ :func:`ecdsa.util.sigdecode_der`
+ as the `sigdecode` parameter.
+
+ :param signature: encoding of the signature
+ :type signature: sigdecode method dependant
+ :param data: data signed by the `signature`, will be hashed using
+ `hashfunc`, if specified, or default hash function
+ :type data: bytes like object
+ :param hashfunc: The default hash function that will be used for
+ verification, needs to implement the same interface as hashlib.sha1
+ :type hashfunc: callable
+ :param sigdecode: Callable to define the way the signature needs to
+ be decoded to an object, needs to handle `signature` as the
+ first parameter, the curve order (an int) as the second and return
+ a tuple with two integers, "r" as the first one and "s" as the
+ second one. See :func:`ecdsa.util.sigdecode_string` and
+ :func:`ecdsa.util.sigdecode_der` for examples.
+ :type sigdecode: callable
+
+ :raises BadSignatureError: if the signature is invalid or malformed
+
+ :return: True if the verification was successful
+ :rtype: bool
+ """
+ # signature doesn't have to be a bytes-like-object so don't normalise
+ # it, the decoders will do that
+ data = normalise_bytes(data)
+
+ hashfunc = hashfunc or self.default_hashfunc
+ digest = hashfunc(data).digest()
+ return self.verify_digest(signature, digest, sigdecode, True)
+
+ def verify_digest(self, signature, digest, sigdecode=sigdecode_string,
+ allow_truncate=False):
+ """
+ Verify a signature made over provided hash value.
+
+ By default expects signature in :term:`raw encoding`. Can also be used
+ to verify signatures in ASN.1 DER encoding by using
+ :func:`ecdsa.util.sigdecode_der`
+ as the `sigdecode` parameter.
+
+ :param signature: encoding of the signature
+ :type signature: sigdecode method dependant
+ :param digest: raw hash value that the signature authenticates.
+ :type digest: bytes like object
+ :param sigdecode: Callable to define the way the signature needs to
+ be decoded to an object, needs to handle `signature` as the
+ first parameter, the curve order (an int) as the second and return
+ a tuple with two integers, "r" as the first one and "s" as the
+ second one. See :func:`ecdsa.util.sigdecode_string` and
+ :func:`ecdsa.util.sigdecode_der` for examples.
+ :type sigdecode: callable
+ :param bool allow_truncate: if True, the provided digest can have
+ bigger bit-size than the order of the curve, the extra bits (at
+ the end of the digest) will be truncated. Use it when verifying
+ SHA-384 output using NIST256p or in similar situations.
+
+ :raises BadSignatureError: if the signature is invalid or malformed
+ :raises BadDigestError: if the provided digest is too big for the curve
+ associated with this VerifyingKey and allow_truncate was not set
+
+ :return: True if the verification was successful
+ :rtype: bool
+ """
+ # signature doesn't have to be a bytes-like-object so don't normalise
+ # it, the decoders will do that
+ digest = normalise_bytes(digest)
+ if allow_truncate:
+ digest = digest[:self.curve.baselen]
+ if len(digest) > self.curve.baselen:
+ raise BadDigestError("this curve (%s) is too short "
+ "for your digest (%d)" % (self.curve.name,
+ 8 * len(digest)))
+ number = string_to_number(digest)
+ try:
+ r, s = sigdecode(signature, self.pubkey.order)
+ except (der.UnexpectedDER, MalformedSignature) as e:
+ raise BadSignatureError("Malformed formatting of signature", e)
+ sig = ecdsa.Signature(r, s)
+ if self.pubkey.verifies(number, sig):
+ return True
+ raise BadSignatureError("Signature verification failed")
+
+
+class SigningKey(object):
+ """
+ Class for handling keys that can create signatures (private keys).
+
+ :ivar ecdsa.curves.Curve curve: The Curve over which all the cryptographic
+ operations will take place
+ :ivar default_hashfunc: the function that will be used for hashing the
+ data. Should implement the same API as hashlib.sha1
+ :ivar int baselen: the length of a :term:`raw encoding` of private key
+ :ivar ecdsa.keys.VerifyingKey verifying_key: the public key
+ associated with this private key
+ :ivar ecdsa.ecdsa.Private_key privkey: the actual private key
+ """
+
+ def __init__(self, _error__please_use_generate=None):
+ """Unsupported, please use one of the classmethods to initialise."""
+ if not _error__please_use_generate:
+ raise TypeError("Please use SigningKey.generate() to construct me")
+ self.curve = None
+ self.default_hashfunc = None
+ self.baselen = None
+ self.verifying_key = None
+ self.privkey = None
+
+ def __eq__(self, other):
+ """Return True if the points are identical, False otherwise."""
+ if isinstance(other, SigningKey):
+ return self.curve == other.curve \
+ and self.verifying_key == other.verifying_key \
+ and self.privkey == other.privkey
+ return NotImplemented
+
+ @classmethod
+ def generate(cls, curve=NIST192p, entropy=None, hashfunc=sha1):
+ """
+ Generate a random private key.
+
+ :param curve: The curve on which the point needs to reside, defaults
+ to NIST192p
+ :type curve: ecdsa.curves.Curve
+ :param entropy: Source of randomness for generating the private keys,
+ should provide cryptographically secure random numbers if the keys
+ need to be secure. Uses os.urandom() by default.
+ :type entropy: callable
+ :param hashfunc: The default hash function that will be used for
+ signing, needs to implement the same interface
+ as hashlib.sha1
+ :type hashfunc: callable
+
+ :return: Initialised SigningKey object
+ :rtype: SigningKey
+ """
+ secexp = randrange(curve.order, entropy)
+ return cls.from_secret_exponent(secexp, curve, hashfunc)
+
+ @classmethod
+ def from_secret_exponent(cls, secexp, curve=NIST192p, hashfunc=sha1):
+ """
+ Create a private key from a random integer.
+
+ Note: it's a low level method, it's recommended to use the
+ :func:`~SigningKey.generate` method to create private keys.
+
+ :param int secexp: secret multiplier (the actual private key in ECDSA).
+ Needs to be an integer between 1 and the curve order.
+ :param curve: The curve on which the point needs to reside
+ :type curve: ecdsa.curves.Curve
+ :param hashfunc: The default hash function that will be used for
+ signing, needs to implement the same interface
+ as hashlib.sha1
+ :type hashfunc: callable
+
+ :raises MalformedPointError: when the provided secexp is too large
+ or too small for the curve selected
+ :raises RuntimeError: if the generation of public key from private
+ key failed
+
+ :return: Initialised SigningKey object
+ :rtype: SigningKey
+ """
+ self = cls(_error__please_use_generate=True)
+ self.curve = curve
+ self.default_hashfunc = hashfunc
+ self.baselen = curve.baselen
+ n = curve.order
+ if not 1 <= secexp < n:
+ raise MalformedPointError(
+ "Invalid value for secexp, expected integer between 1 and {0}"
+ .format(n))
+ pubkey_point = curve.generator * secexp
+ if hasattr(pubkey_point, "scale"):
+ pubkey_point = pubkey_point.scale()
+ self.verifying_key = VerifyingKey.from_public_point(pubkey_point, curve,
+ hashfunc, False)
+ pubkey = self.verifying_key.pubkey
+ self.privkey = ecdsa.Private_key(pubkey, secexp)
+ self.privkey.order = n
+ return self
+
+ @classmethod
+ def from_string(cls, string, curve=NIST192p, hashfunc=sha1):
+ """
+ Decode the private key from :term:`raw encoding`.
+
+ Note: the name of this method is a misnomer coming from days of
+ Python 2, when binary strings and character strings shared a type.
+ In Python 3, the expected type is `bytes`.
+
+ :param string: the raw encoding of the private key
+ :type string: bytes like object
+ :param curve: The curve on which the point needs to reside
+ :type curve: ecdsa.curves.Curve
+ :param hashfunc: The default hash function that will be used for
+ signing, needs to implement the same interface
+ as hashlib.sha1
+ :type hashfunc: callable
+
+ :raises MalformedPointError: if the length of encoding doesn't match
+ the provided curve or the encoded values is too large
+ :raises RuntimeError: if the generation of public key from private
+ key failed
+
+ :return: Initialised SigningKey object
+ :rtype: SigningKey
+ """
+ string = normalise_bytes(string)
+ if len(string) != curve.baselen:
+ raise MalformedPointError(
+ "Invalid length of private key, received {0}, expected {1}"
+ .format(len(string), curve.baselen))
+ secexp = string_to_number(string)
+ return cls.from_secret_exponent(secexp, curve, hashfunc)
+
+ @classmethod
+ def from_pem(cls, string, hashfunc=sha1):
+ """
+ Initialise from key stored in :term:`PEM` format.
+
+ Note, the only PEM format supported is the un-encrypted RFC5915
+ (the sslay format) supported by OpenSSL, the more common PKCS#8 format
+ is NOT supported (see:
+ https://github.com/warner/python-ecdsa/issues/113 )
+
+ ``openssl ec -in pkcs8.pem -out sslay.pem`` can be used to
+ convert PKCS#8 file to this legacy format.
+
+ The legacy format files have the header with the string
+ ``BEGIN EC PRIVATE KEY``.
+ Encrypted files (ones that include the string
+ ``Proc-Type: 4,ENCRYPTED``
+ right after the PEM header) are not supported.
+
+ See :func:`~SigningKey.from_der` for ASN.1 syntax of the objects in
+ this files.
+
+ :param string: text with PEM-encoded private ECDSA key
+ :type string: str
+
+ :raises MalformedPointError: if the length of encoding doesn't match
+ the provided curve or the encoded values is too large
+ :raises RuntimeError: if the generation of public key from private
+ key failed
+ :raises UnexpectedDER: if the encoding of the PEM file is incorrect
+
+ :return: Initialised VerifyingKey object
+ :rtype: VerifyingKey
+ """
+ # the privkey pem may have multiple sections, commonly it also has
+ # "EC PARAMETERS", we need just "EC PRIVATE KEY".
+ if PY3 and isinstance(string, str):
+ string = string.encode()
+ privkey_pem = string[string.index(b("-----BEGIN EC PRIVATE KEY-----")):]
+ return cls.from_der(der.unpem(privkey_pem), hashfunc)
+
+ @classmethod
+ def from_der(cls, string, hashfunc=sha1):
+ """
+ Initialise from key stored in :term:`DER` format.
+
+ Note, the only DER format supported is the RFC5915
+ (the sslay format) supported by OpenSSL, the more common PKCS#8 format
+ is NOT supported (see:
+ https://github.com/warner/python-ecdsa/issues/113 )
+
+ ``openssl ec -in pkcs8.pem -outform der -out sslay.der`` can be
+ used to convert PKCS#8 file to this legacy format.
+
+ The encoding of the ASN.1 object in those files follows following
+ syntax specified in RFC5915::
+
+ ECPrivateKey ::= SEQUENCE {
+ version INTEGER { ecPrivkeyVer1(1) }} (ecPrivkeyVer1),
+ privateKey OCTET STRING,
+ parameters [0] ECParameters {{ NamedCurve }} OPTIONAL,
+ publicKey [1] BIT STRING OPTIONAL
+ }
+
+ The only format supported for the `parameters` field is the named
+ curve method. Explicit encoding of curve parameters is not supported.
+
+ While `parameters` field is defined as optional, this implementation
+ requires its presence for correct parsing of the keys.
+
+ `publicKey` field is ignored completely (errors, if any, in it will
+ be undetected).
+
+ :param string: binary string with DER-encoded private ECDSA key
+ :type string: bytes like object
+
+ :raises MalformedPointError: if the length of encoding doesn't match
+ the provided curve or the encoded values is too large
+ :raises RuntimeError: if the generation of public key from private
+ key failed
+ :raises UnexpectedDER: if the encoding of the DER file is incorrect
+
+ :return: Initialised VerifyingKey object
+ :rtype: VerifyingKey
+ """
+ string = normalise_bytes(string)
+ s, empty = der.remove_sequence(string)
+ if empty != b(""):
+ raise der.UnexpectedDER("trailing junk after DER privkey: %s" %
+ binascii.hexlify(empty))
+ one, s = der.remove_integer(s)
+ if one != 1:
+ raise der.UnexpectedDER("expected '1' at start of DER privkey,"
+ " got %d" % one)
+ privkey_str, s = der.remove_octet_string(s)
+ tag, curve_oid_str, s = der.remove_constructed(s)
+ if tag != 0:
+ raise der.UnexpectedDER("expected tag 0 in DER privkey,"
+ " got %d" % tag)
+ curve_oid, empty = der.remove_object(curve_oid_str)
+ if empty != b(""):
+ raise der.UnexpectedDER("trailing junk after DER privkey "
+ "curve_oid: %s" % binascii.hexlify(empty))
+ curve = find_curve(curve_oid)
+
+ # we don't actually care about the following fields
+ #
+ # tag, pubkey_bitstring, s = der.remove_constructed(s)
+ # if tag != 1:
+ # raise der.UnexpectedDER("expected tag 1 in DER privkey, got %d"
+ # % tag)
+ # pubkey_str = der.remove_bitstring(pubkey_bitstring, 0)
+ # if empty != "":
+ # raise der.UnexpectedDER("trailing junk after DER privkey "
+ # "pubkeystr: %s" % binascii.hexlify(empty))
+
+ # our from_string method likes fixed-length privkey strings
+ if len(privkey_str) < curve.baselen:
+ privkey_str = b("\x00") * (curve.baselen - len(privkey_str)) + privkey_str
+ return cls.from_string(privkey_str, curve, hashfunc)
+
+ def to_string(self):
+ """
+ Convert the private key to :term:`raw encoding`.
+
+ Note: while the method is named "to_string", its name comes from
+ Python 2 days, when binary and character strings used the same type.
+ The type used in Python 3 is `bytes`.
+
+ :return: raw encoding of private key
+ :rtype: bytes
+ """
+ secexp = self.privkey.secret_multiplier
+ s = number_to_string(secexp, self.privkey.order)
+ return s
+
+ def to_pem(self, point_encoding="uncompressed"):
+ """
+ Convert the private key to the :term:`PEM` format.
+
+ See :func:`~SigningKey.from_pem` method for format description.
+
+ Only the named curve format is supported.
+ The public key will be included in generated string.
+
+ The PEM header will specify ``BEGIN EC PRIVATE KEY``
+
+ :param str point_encoding: format to use for encoding public point
+
+ :return: PEM encoded private key
+ :rtype: str
+ """
+ # TODO: "BEGIN ECPARAMETERS"
+ return der.topem(self.to_der(point_encoding), "EC PRIVATE KEY")
+
+ def to_der(self, point_encoding="uncompressed"):
+ """
+ Convert the private key to the :term:`DER` format.
+
+ See :func:`~SigningKey.from_der` method for format specification.
+
+ Only the named curve format is supported.
+ The public key will be included in the generated string.
+
+ :param str point_encoding: format to use for encoding public point
+
+ :return: DER encoded private key
+ :rtype: bytes
+ """
+ # SEQ([int(1), octetstring(privkey),cont[0], oid(secp224r1),
+ # cont[1],bitstring])
+ if point_encoding == "raw":
+ raise ValueError("raw encoding not allowed in DER")
+ encoded_vk = self.get_verifying_key().to_string(point_encoding)
+ # the 0 in encode_bitstring specifies the number of unused bits
+ # in the `encoded_vk` string
+ return der.encode_sequence(
+ der.encode_integer(1),
+ der.encode_octet_string(self.to_string()),
+ der.encode_constructed(0, self.curve.encoded_oid),
+ der.encode_constructed(1, der.encode_bitstring(encoded_vk, 0)))
+
+ def get_verifying_key(self):
+ """
+ Return the VerifyingKey associated with this private key.
+
+ Equivalent to reading the `verifying_key` field of an instance.
+
+ :return: a public key that can be used to verify the signatures made
+ with this SigningKey
+ :rtype: VerifyingKey
+ """
+ return self.verifying_key
+
+ def sign_deterministic(self, data, hashfunc=None,
+ sigencode=sigencode_string,
+ extra_entropy=b''):
+ """
+ Create signature over data using the deterministic RFC6679 algorithm.
+
+ The data will be hashed using the `hashfunc` function before signing.
+
+ This is the recommended method for performing signatures when hashing
+ of data is necessary.
+
+ :param data: data to be hashed and computed signature over
+ :type data: bytes like object
+ :param hashfunc: hash function to use for computing the signature,
+ if unspecified, the default hash function selected during
+ object initialisation will be used (see
+ `VerifyingKey.default_hashfunc`). The object needs to implement
+ the same interface as hashlib.sha1.
+ :type hashfunc: callable
+ :param sigencode: function used to encode the signature.
+ The function needs to accept three parameters: the two integers
+ that are the signature and the order of the curve over which the
+ signature was computed. It needs to return an encoded signature.
+ See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der`
+ as examples of such functions.
+ :type sigencode: callable
+ :param extra_entropy: additional data that will be fed into the random
+ number generator used in the RFC6979 process. Entirely optional.
+ :type extra_entropy: bytes like object
+
+ :return: encoded signature over `data`
+ :rtype: bytes or sigencode function dependant type
+ """
+ hashfunc = hashfunc or self.default_hashfunc
+ data = normalise_bytes(data)
+ extra_entropy = normalise_bytes(extra_entropy)
+ digest = hashfunc(data).digest()
+
+ return self.sign_digest_deterministic(
+ digest, hashfunc=hashfunc, sigencode=sigencode,
+ extra_entropy=extra_entropy, allow_truncate=True)
+
+ def sign_digest_deterministic(self, digest, hashfunc=None,
+ sigencode=sigencode_string,
+ extra_entropy=b'', allow_truncate=False):
+ """
+ Create signature for digest using the deterministic RFC6679 algorithm.
+
+ `digest` should be the output of cryptographically secure hash function
+ like SHA256 or SHA-3-256.
+
+ This is the recommended method for performing signatures when no
+ hashing of data is necessary.
+
+ :param digest: hash of data that will be signed
+ :type digest: bytes like object
+ :param hashfunc: hash function to use for computing the random "k"
+ value from RFC6979 process,
+ if unspecified, the default hash function selected during
+ object initialisation will be used (see
+ `VerifyingKey.default_hashfunc`). The object needs to implement
+ the same interface as hashlib.sha1.
+ :type hashfunc: callable
+ :param sigencode: function used to encode the signature.
+ The function needs to accept three parameters: the two integers
+ that are the signature and the order of the curve over which the
+ signature was computed. It needs to return an encoded signature.
+ See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der`
+ as examples of such functions.
+ :type sigencode: callable
+ :param extra_entropy: additional data that will be fed into the random
+ number generator used in the RFC6979 process. Entirely optional.
+ :type extra_entropy: bytes like object
+ :param bool allow_truncate: if True, the provided digest can have
+ bigger bit-size than the order of the curve, the extra bits (at
+ the end of the digest) will be truncated. Use it when signing
+ SHA-384 output using NIST256p or in similar situations.
+
+ :return: encoded signature for the `digest` hash
+ :rtype: bytes or sigencode function dependant type
+ """
+ secexp = self.privkey.secret_multiplier
+ hashfunc = hashfunc or self.default_hashfunc
+ digest = normalise_bytes(digest)
+ extra_entropy = normalise_bytes(extra_entropy)
+
+ def simple_r_s(r, s, order):
+ return r, s, order
+
+ retry_gen = 0
+ while True:
+ k = rfc6979.generate_k(
+ self.curve.generator.order(), secexp, hashfunc, digest,
+ retry_gen=retry_gen, extra_entropy=extra_entropy)
+ try:
+ r, s, order = self.sign_digest(digest,
+ sigencode=simple_r_s,
+ k=k,
+ allow_truncate=allow_truncate)
+ break
+ except RSZeroError:
+ retry_gen += 1
+
+ return sigencode(r, s, order)
+
+ def sign(self, data, entropy=None, hashfunc=None,
+ sigencode=sigencode_string, k=None):
+ """
+ Create signature over data using the probabilistic ECDSA algorithm.
+
+ This method uses the standard ECDSA algorithm that requires a
+ cryptographically secure random number generator.
+
+ It's recommended to use the :func:`~SigningKey.sign_deterministic`
+ method instead of this one.
+
+ :param data: data that will be hashed for signing
+ :type data: bytes like object
+ :param callable entropy: randomness source, os.urandom by default
+ :param hashfunc: hash function to use for hashing the provided `data`.
+ If unspecified the default hash function selected during
+ object initialisation will be used (see
+ `VerifyingKey.default_hashfunc`).
+ Should behave like hashlib.sha1. The output length of the
+ hash (in bytes) must not be longer than the length of the curve
+ order (rounded up to the nearest byte), so using SHA256 with
+ NIST256p is ok, but SHA256 with NIST192p is not. (In the 2**-96ish
+ unlikely event of a hash output larger than the curve order, the
+ hash will effectively be wrapped mod n).
+ Use hashfunc=hashlib.sha1 to match openssl's -ecdsa-with-SHA1 mode,
+ or hashfunc=hashlib.sha256 for openssl-1.0.0's -ecdsa-with-SHA256.
+ :type hashfunc: callable
+ :param sigencode: function used to encode the signature.
+ The function needs to accept three parameters: the two integers
+ that are the signature and the order of the curve over which the
+ signature was computed. It needs to return an encoded signature.
+ See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der`
+ as examples of such functions.
+ :type sigencode: callable
+ :param int k: a pre-selected nonce for calculating the signature.
+ In typical use cases, it should be set to None (the default) to
+ allow its generation from an entropy source.
+
+ :raises RSZeroError: in the unlikely event when "r" parameter or
+ "s" parameter is equal 0 as that would leak the key. Calee should
+ try a better entropy source or different 'k' in such case.
+
+ :return: encoded signature of the hash of `data`
+ :rtype: bytes or sigencode function dependant type
+ """
+ hashfunc = hashfunc or self.default_hashfunc
+ data = normalise_bytes(data)
+ h = hashfunc(data).digest()
+ return self.sign_digest(h, entropy, sigencode, k, allow_truncate=True)
+
+ def sign_digest(self, digest, entropy=None, sigencode=sigencode_string,
+ k=None, allow_truncate=False):
+ """
+ Create signature over digest using the probabilistic ECDSA algorithm.
+
+ This method uses the standard ECDSA algorithm that requires a
+ cryptographically secure random number generator.
+
+ This method does not hash the input.
+
+ It's recommended to use the
+ :func:`~SigningKey.sign_digest_deterministic` method
+ instead of this one.
+
+ :param digest: hash value that will be signed
+ :type digest: bytes like object
+ :param callable entropy: randomness source, os.urandom by default
+ :param sigencode: function used to encode the signature.
+ The function needs to accept three parameters: the two integers
+ that are the signature and the order of the curve over which the
+ signature was computed. It needs to return an encoded signature.
+ See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der`
+ as examples of such functions.
+ :type sigencode: callable
+ :param int k: a pre-selected nonce for calculating the signature.
+ In typical use cases, it should be set to None (the default) to
+ allow its generation from an entropy source.
+ :param bool allow_truncate: if True, the provided digest can have
+ bigger bit-size than the order of the curve, the extra bits (at
+ the end of the digest) will be truncated. Use it when signing
+ SHA-384 output using NIST256p or in similar situations.
+
+ :raises RSZeroError: in the unlikely event when "r" parameter or
+ "s" parameter is equal 0 as that would leak the key. Calee should
+ try a better entropy source in such case.
+
+ :return: encoded signature for the `digest` hash
+ :rtype: bytes or sigencode function dependant type
+ """
+ digest = normalise_bytes(digest)
+ if allow_truncate:
+ digest = digest[:self.curve.baselen]
+ if len(digest) > self.curve.baselen:
+ raise BadDigestError("this curve (%s) is too short "
+ "for your digest (%d)" % (self.curve.name,
+ 8 * len(digest)))
+ number = string_to_number(digest)
+ r, s = self.sign_number(number, entropy, k)
+ return sigencode(r, s, self.privkey.order)
+
+ def sign_number(self, number, entropy=None, k=None):
+ """
+ Sign an integer directly.
+
+ Note, this is a low level method, usually you will want to use
+ :func:`~SigningKey.sign_deterministic` or
+ :func:`~SigningKey.sign_digest_deterministic`.
+
+ :param int number: number to sign using the probabilistic ECDSA
+ algorithm.
+ :param callable entropy: entropy source, os.urandom by default
+ :param int k: pre-selected nonce for signature operation. If unset
+ it will be selected at random using the entropy source.
+
+ :raises RSZeroError: in the unlikely event when "r" parameter or
+ "s" parameter is equal 0 as that would leak the key. Calee should
+ try a different 'k' in such case.
+
+ :return: the "r" and "s" parameters of the signature
+ :rtype: tuple of ints
+ """
+ order = self.privkey.order
+
+ if k is not None:
+ _k = k
+ else:
+ _k = randrange(order, entropy)
+
+ assert 1 <= _k < order
+ sig = self.privkey.sign(number, _k)
+ return sig.r, sig.s
diff --git a/third_party/python/ecdsa/src/ecdsa/numbertheory.py b/third_party/python/ecdsa/src/ecdsa/numbertheory.py
new file mode 100644
index 0000000000..b300440c59
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/numbertheory.py
@@ -0,0 +1,600 @@
+#! /usr/bin/env python
+#
+# Provide some simple capabilities from number theory.
+#
+# Version of 2008.11.14.
+#
+# Written in 2005 and 2006 by Peter Pearson and placed in the public domain.
+# Revision history:
+# 2008.11.14: Use pow(base, exponent, modulus) for modular_exp.
+# Make gcd and lcm accept arbitrarly many arguments.
+
+from __future__ import division
+
+from six import integer_types, PY3
+from six.moves import reduce
+try:
+ xrange
+except NameError:
+ xrange = range
+try:
+ from gmpy2 import powmod
+ GMPY2 = True
+ GMPY = False
+except ImportError:
+ GMPY2 = False
+ try:
+ from gmpy import mpz
+ GMPY = True
+ except ImportError:
+ GMPY = False
+
+import math
+import warnings
+
+
+class Error(Exception):
+ """Base class for exceptions in this module."""
+ pass
+
+
+class SquareRootError(Error):
+ pass
+
+
+class NegativeExponentError(Error):
+ pass
+
+
+def modular_exp(base, exponent, modulus): # pragma: no cover
+ """Raise base to exponent, reducing by modulus"""
+ # deprecated in 0.14
+ warnings.warn("Function is unused in library code. If you use this code, "
+ "change to pow() builtin.", DeprecationWarning)
+ if exponent < 0:
+ raise NegativeExponentError("Negative exponents (%d) not allowed"
+ % exponent)
+ return pow(base, exponent, modulus)
+
+
+def polynomial_reduce_mod(poly, polymod, p):
+ """Reduce poly by polymod, integer arithmetic modulo p.
+
+ Polynomials are represented as lists of coefficients
+ of increasing powers of x."""
+
+ # This module has been tested only by extensive use
+ # in calculating modular square roots.
+
+ # Just to make this easy, require a monic polynomial:
+ assert polymod[-1] == 1
+
+ assert len(polymod) > 1
+
+ while len(poly) >= len(polymod):
+ if poly[-1] != 0:
+ for i in xrange(2, len(polymod) + 1):
+ poly[-i] = (poly[-i] - poly[-1] * polymod[-i]) % p
+ poly = poly[0:-1]
+
+ return poly
+
+
+def polynomial_multiply_mod(m1, m2, polymod, p):
+ """Polynomial multiplication modulo a polynomial over ints mod p.
+
+ Polynomials are represented as lists of coefficients
+ of increasing powers of x."""
+
+ # This is just a seat-of-the-pants implementation.
+
+ # This module has been tested only by extensive use
+ # in calculating modular square roots.
+
+ # Initialize the product to zero:
+
+ prod = (len(m1) + len(m2) - 1) * [0]
+
+ # Add together all the cross-terms:
+
+ for i in xrange(len(m1)):
+ for j in xrange(len(m2)):
+ prod[i + j] = (prod[i + j] + m1[i] * m2[j]) % p
+
+ return polynomial_reduce_mod(prod, polymod, p)
+
+
+def polynomial_exp_mod(base, exponent, polymod, p):
+ """Polynomial exponentiation modulo a polynomial over ints mod p.
+
+ Polynomials are represented as lists of coefficients
+ of increasing powers of x."""
+
+ # Based on the Handbook of Applied Cryptography, algorithm 2.227.
+
+ # This module has been tested only by extensive use
+ # in calculating modular square roots.
+
+ assert exponent < p
+
+ if exponent == 0:
+ return [1]
+
+ G = base
+ k = exponent
+ if k % 2 == 1:
+ s = G
+ else:
+ s = [1]
+
+ while k > 1:
+ k = k // 2
+ G = polynomial_multiply_mod(G, G, polymod, p)
+ if k % 2 == 1:
+ s = polynomial_multiply_mod(G, s, polymod, p)
+
+ return s
+
+
+def jacobi(a, n):
+ """Jacobi symbol"""
+
+ # Based on the Handbook of Applied Cryptography (HAC), algorithm 2.149.
+
+ # This function has been tested by comparison with a small
+ # table printed in HAC, and by extensive use in calculating
+ # modular square roots.
+
+ assert n >= 3
+ assert n % 2 == 1
+ a = a % n
+ if a == 0:
+ return 0
+ if a == 1:
+ return 1
+ a1, e = a, 0
+ while a1 % 2 == 0:
+ a1, e = a1 // 2, e + 1
+ if e % 2 == 0 or n % 8 == 1 or n % 8 == 7:
+ s = 1
+ else:
+ s = -1
+ if a1 == 1:
+ return s
+ if n % 4 == 3 and a1 % 4 == 3:
+ s = -s
+ return s * jacobi(n % a1, a1)
+
+
+def square_root_mod_prime(a, p):
+ """Modular square root of a, mod p, p prime."""
+
+ # Based on the Handbook of Applied Cryptography, algorithms 3.34 to 3.39.
+
+ # This module has been tested for all values in [0,p-1] for
+ # every prime p from 3 to 1229.
+
+ assert 0 <= a < p
+ assert 1 < p
+
+ if a == 0:
+ return 0
+ if p == 2:
+ return a
+
+ jac = jacobi(a, p)
+ if jac == -1:
+ raise SquareRootError("%d has no square root modulo %d" \
+ % (a, p))
+
+ if p % 4 == 3:
+ return pow(a, (p + 1) // 4, p)
+
+ if p % 8 == 5:
+ d = pow(a, (p - 1) // 4, p)
+ if d == 1:
+ return pow(a, (p + 3) // 8, p)
+ if d == p - 1:
+ return (2 * a * pow(4 * a, (p - 5) // 8, p)) % p
+ raise RuntimeError("Shouldn't get here.")
+
+ if PY3:
+ range_top = p
+ else:
+ # xrange on python2 can take integers representable as C long only
+ range_top = min(0x7fffffff, p)
+ for b in xrange(2, range_top):
+ if jacobi(b * b - 4 * a, p) == -1:
+ f = (a, -b, 1)
+ ff = polynomial_exp_mod((0, 1), (p + 1) // 2, f, p)
+ assert ff[1] == 0
+ return ff[0]
+ raise RuntimeError("No b found.")
+
+
+if GMPY2:
+ def inverse_mod(a, m):
+ """Inverse of a mod m."""
+ if a == 0:
+ return 0
+ return powmod(a, -1, m)
+elif GMPY:
+ def inverse_mod(a, m):
+ """Inverse of a mod m."""
+ # while libgmp likely does support inverses modulo, it is accessible
+ # only using the native `pow()` function, and `pow()` sanity checks
+ # the parameters before passing them on to underlying implementation
+ # on Python2
+ if a == 0:
+ return 0
+ a = mpz(a)
+ m = mpz(m)
+
+ lm, hm = mpz(1), mpz(0)
+ low, high = a % m, m
+ while low > 1:
+ r = high // low
+ lm, low, hm, high = hm - lm * r, high - low * r, lm, low
+
+ return lm % m
+else:
+ def inverse_mod(a, m):
+ """Inverse of a mod m."""
+
+ if a == 0:
+ return 0
+
+ lm, hm = 1, 0
+ low, high = a % m, m
+ while low > 1:
+ r = high // low
+ lm, low, hm, high = hm - lm * r, high - low * r, lm, low
+
+ return lm % m
+
+
+try:
+ gcd2 = math.gcd
+except AttributeError:
+ def gcd2(a, b):
+ """Greatest common divisor using Euclid's algorithm."""
+ while a:
+ a, b = b % a, a
+ return b
+
+
+def gcd(*a):
+ """Greatest common divisor.
+
+ Usage: gcd([ 2, 4, 6 ])
+ or: gcd(2, 4, 6)
+ """
+
+ if len(a) > 1:
+ return reduce(gcd2, a)
+ if hasattr(a[0], "__iter__"):
+ return reduce(gcd2, a[0])
+ return a[0]
+
+
+def lcm2(a, b):
+ """Least common multiple of two integers."""
+
+ return (a * b) // gcd(a, b)
+
+
+def lcm(*a):
+ """Least common multiple.
+
+ Usage: lcm([ 3, 4, 5 ])
+ or: lcm(3, 4, 5)
+ """
+
+ if len(a) > 1:
+ return reduce(lcm2, a)
+ if hasattr(a[0], "__iter__"):
+ return reduce(lcm2, a[0])
+ return a[0]
+
+
+def factorization(n):
+ """Decompose n into a list of (prime,exponent) pairs."""
+
+ assert isinstance(n, integer_types)
+
+ if n < 2:
+ return []
+
+ result = []
+ d = 2
+
+ # Test the small primes:
+
+ for d in smallprimes:
+ if d > n:
+ break
+ q, r = divmod(n, d)
+ if r == 0:
+ count = 1
+ while d <= n:
+ n = q
+ q, r = divmod(n, d)
+ if r != 0:
+ break
+ count = count + 1
+ result.append((d, count))
+
+ # If n is still greater than the last of our small primes,
+ # it may require further work:
+
+ if n > smallprimes[-1]:
+ if is_prime(n): # If what's left is prime, it's easy:
+ result.append((n, 1))
+ else: # Ugh. Search stupidly for a divisor:
+ d = smallprimes[-1]
+ while 1:
+ d = d + 2 # Try the next divisor.
+ q, r = divmod(n, d)
+ if q < d: # n < d*d means we're done, n = 1 or prime.
+ break
+ if r == 0: # d divides n. How many times?
+ count = 1
+ n = q
+ while d <= n: # As long as d might still divide n,
+ q, r = divmod(n, d) # see if it does.
+ if r != 0:
+ break
+ n = q # It does. Reduce n, increase count.
+ count = count + 1
+ result.append((d, count))
+ if n > 1:
+ result.append((n, 1))
+
+ return result
+
+
+def phi(n): # pragma: no cover
+ """Return the Euler totient function of n."""
+ # deprecated in 0.14
+ warnings.warn("Function is unused by library code. If you use this code, "
+ "please open an issue in "
+ "https://github.com/warner/python-ecdsa",
+ DeprecationWarning)
+
+ assert isinstance(n, integer_types)
+
+ if n < 3:
+ return 1
+
+ result = 1
+ ff = factorization(n)
+ for f in ff:
+ e = f[1]
+ if e > 1:
+ result = result * f[0] ** (e - 1) * (f[0] - 1)
+ else:
+ result = result * (f[0] - 1)
+ return result
+
+
+def carmichael(n): # pragma: no cover
+ """Return Carmichael function of n.
+
+ Carmichael(n) is the smallest integer x such that
+ m**x = 1 mod n for all m relatively prime to n.
+ """
+ # deprecated in 0.14
+ warnings.warn("Function is unused by library code. If you use this code, "
+ "please open an issue in "
+ "https://github.com/warner/python-ecdsa",
+ DeprecationWarning)
+
+ return carmichael_of_factorized(factorization(n))
+
+
+def carmichael_of_factorized(f_list): # pragma: no cover
+ """Return the Carmichael function of a number that is
+ represented as a list of (prime,exponent) pairs.
+ """
+ # deprecated in 0.14
+ warnings.warn("Function is unused by library code. If you use this code, "
+ "please open an issue in "
+ "https://github.com/warner/python-ecdsa",
+ DeprecationWarning)
+
+ if len(f_list) < 1:
+ return 1
+
+ result = carmichael_of_ppower(f_list[0])
+ for i in xrange(1, len(f_list)):
+ result = lcm(result, carmichael_of_ppower(f_list[i]))
+
+ return result
+
+
+def carmichael_of_ppower(pp): # pragma: no cover
+ """Carmichael function of the given power of the given prime.
+ """
+ # deprecated in 0.14
+ warnings.warn("Function is unused by library code. If you use this code, "
+ "please open an issue in "
+ "https://github.com/warner/python-ecdsa",
+ DeprecationWarning)
+
+ p, a = pp
+ if p == 2 and a > 2:
+ return 2**(a - 2)
+ else:
+ return (p - 1) * p**(a - 1)
+
+
+def order_mod(x, m): # pragma: no cover
+ """Return the order of x in the multiplicative group mod m.
+ """
+ # deprecated in 0.14
+ warnings.warn("Function is unused by library code. If you use this code, "
+ "please open an issue in "
+ "https://github.com/warner/python-ecdsa",
+ DeprecationWarning)
+
+ # Warning: this implementation is not very clever, and will
+ # take a long time if m is very large.
+
+ if m <= 1:
+ return 0
+
+ assert gcd(x, m) == 1
+
+ z = x
+ result = 1
+ while z != 1:
+ z = (z * x) % m
+ result = result + 1
+ return result
+
+
+def largest_factor_relatively_prime(a, b): # pragma: no cover
+ """Return the largest factor of a relatively prime to b.
+ """
+ # deprecated in 0.14
+ warnings.warn("Function is unused by library code. If you use this code, "
+ "please open an issue in "
+ "https://github.com/warner/python-ecdsa",
+ DeprecationWarning)
+
+ while 1:
+ d = gcd(a, b)
+ if d <= 1:
+ break
+ b = d
+ while 1:
+ q, r = divmod(a, d)
+ if r > 0:
+ break
+ a = q
+ return a
+
+
+def kinda_order_mod(x, m): # pragma: no cover
+ """Return the order of x in the multiplicative group mod m',
+ where m' is the largest factor of m relatively prime to x.
+ """
+ # deprecated in 0.14
+ warnings.warn("Function is unused by library code. If you use this code, "
+ "please open an issue in "
+ "https://github.com/warner/python-ecdsa",
+ DeprecationWarning)
+
+ return order_mod(x, largest_factor_relatively_prime(m, x))
+
+
+def is_prime(n):
+ """Return True if x is prime, False otherwise.
+
+ We use the Miller-Rabin test, as given in Menezes et al. p. 138.
+ This test is not exact: there are composite values n for which
+ it returns True.
+
+ In testing the odd numbers from 10000001 to 19999999,
+ about 66 composites got past the first test,
+ 5 got past the second test, and none got past the third.
+ Since factors of 2, 3, 5, 7, and 11 were detected during
+ preliminary screening, the number of numbers tested by
+ Miller-Rabin was (19999999 - 10000001)*(2/3)*(4/5)*(6/7)
+ = 4.57 million.
+ """
+
+ # (This is used to study the risk of false positives:)
+ global miller_rabin_test_count
+
+ miller_rabin_test_count = 0
+
+ if n <= smallprimes[-1]:
+ if n in smallprimes:
+ return True
+ else:
+ return False
+
+ if gcd(n, 2 * 3 * 5 * 7 * 11) != 1:
+ return False
+
+ # Choose a number of iterations sufficient to reduce the
+ # probability of accepting a composite below 2**-80
+ # (from Menezes et al. Table 4.4):
+
+ t = 40
+ n_bits = 1 + int(math.log(n, 2))
+ for k, tt in ((100, 27),
+ (150, 18),
+ (200, 15),
+ (250, 12),
+ (300, 9),
+ (350, 8),
+ (400, 7),
+ (450, 6),
+ (550, 5),
+ (650, 4),
+ (850, 3),
+ (1300, 2),
+ ):
+ if n_bits < k:
+ break
+ t = tt
+
+ # Run the test t times:
+
+ s = 0
+ r = n - 1
+ while (r % 2) == 0:
+ s = s + 1
+ r = r // 2
+ for i in xrange(t):
+ a = smallprimes[i]
+ y = pow(a, r, n)
+ if y != 1 and y != n - 1:
+ j = 1
+ while j <= s - 1 and y != n - 1:
+ y = pow(y, 2, n)
+ if y == 1:
+ miller_rabin_test_count = i + 1
+ return False
+ j = j + 1
+ if y != n - 1:
+ miller_rabin_test_count = i + 1
+ return False
+ return True
+
+
+def next_prime(starting_value):
+ "Return the smallest prime larger than the starting value."
+
+ if starting_value < 2:
+ return 2
+ result = (starting_value + 1) | 1
+ while not is_prime(result):
+ result = result + 2
+ return result
+
+
+smallprimes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41,
+ 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97,
+ 101, 103, 107, 109, 113, 127, 131, 137, 139, 149,
+ 151, 157, 163, 167, 173, 179, 181, 191, 193, 197,
+ 199, 211, 223, 227, 229, 233, 239, 241, 251, 257,
+ 263, 269, 271, 277, 281, 283, 293, 307, 311, 313,
+ 317, 331, 337, 347, 349, 353, 359, 367, 373, 379,
+ 383, 389, 397, 401, 409, 419, 421, 431, 433, 439,
+ 443, 449, 457, 461, 463, 467, 479, 487, 491, 499,
+ 503, 509, 521, 523, 541, 547, 557, 563, 569, 571,
+ 577, 587, 593, 599, 601, 607, 613, 617, 619, 631,
+ 641, 643, 647, 653, 659, 661, 673, 677, 683, 691,
+ 701, 709, 719, 727, 733, 739, 743, 751, 757, 761,
+ 769, 773, 787, 797, 809, 811, 821, 823, 827, 829,
+ 839, 853, 857, 859, 863, 877, 881, 883, 887, 907,
+ 911, 919, 929, 937, 941, 947, 953, 967, 971, 977,
+ 983, 991, 997, 1009, 1013, 1019, 1021, 1031, 1033,
+ 1039, 1049, 1051, 1061, 1063, 1069, 1087, 1091, 1093,
+ 1097, 1103, 1109, 1117, 1123, 1129, 1151, 1153, 1163,
+ 1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, 1229]
+
+miller_rabin_test_count = 0
diff --git a/third_party/python/ecdsa/src/ecdsa/rfc6979.py b/third_party/python/ecdsa/src/ecdsa/rfc6979.py
new file mode 100644
index 0000000000..a48938123d
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/rfc6979.py
@@ -0,0 +1,107 @@
+'''
+RFC 6979:
+ Deterministic Usage of the Digital Signature Algorithm (DSA) and
+ Elliptic Curve Digital Signature Algorithm (ECDSA)
+
+ http://tools.ietf.org/html/rfc6979
+
+Many thanks to Coda Hale for his implementation in Go language:
+ https://github.com/codahale/rfc6979
+'''
+
+import hmac
+from binascii import hexlify
+from .util import number_to_string, number_to_string_crop, bit_length
+from ._compat import hmac_compat
+
+
+# bit_length was defined in this module previously so keep it for backwards
+# compatibility, will need to deprecate and remove it later
+__all__ = ["bit_length", "bits2int", "bits2octets", "generate_k"]
+
+
+def bits2int(data, qlen):
+ x = int(hexlify(data), 16)
+ l = len(data) * 8
+
+ if l > qlen:
+ return x >> (l - qlen)
+ return x
+
+
+def bits2octets(data, order):
+ z1 = bits2int(data, bit_length(order))
+ z2 = z1 - order
+
+ if z2 < 0:
+ z2 = z1
+
+ return number_to_string_crop(z2, order)
+
+
+# https://tools.ietf.org/html/rfc6979#section-3.2
+def generate_k(order, secexp, hash_func, data, retry_gen=0, extra_entropy=b''):
+ '''
+ order - order of the DSA generator used in the signature
+ secexp - secure exponent (private key) in numeric form
+ hash_func - reference to the same hash function used for generating hash
+ data - hash in binary form of the signing data
+ retry_gen - int - how many good 'k' values to skip before returning
+ extra_entropy - extra added data in binary form as per section-3.6 of
+ rfc6979
+ '''
+
+ qlen = bit_length(order)
+ holen = hash_func().digest_size
+ rolen = (qlen + 7) / 8
+ bx = (hmac_compat(number_to_string(secexp, order)),
+ hmac_compat(bits2octets(data, order)),
+ hmac_compat(extra_entropy))
+
+ # Step B
+ v = b'\x01' * holen
+
+ # Step C
+ k = b'\x00' * holen
+
+ # Step D
+
+ k = hmac.new(k, digestmod=hash_func)
+ k.update(v + b'\x00')
+ for i in bx:
+ k.update(i)
+ k = k.digest()
+
+ # Step E
+ v = hmac.new(k, v, hash_func).digest()
+
+ # Step F
+ k = hmac.new(k, digestmod=hash_func)
+ k.update(v + b'\x01')
+ for i in bx:
+ k.update(i)
+ k = k.digest()
+
+ # Step G
+ v = hmac.new(k, v, hash_func).digest()
+
+ # Step H
+ while True:
+ # Step H1
+ t = b''
+
+ # Step H2
+ while len(t) < rolen:
+ v = hmac.new(k, v, hash_func).digest()
+ t += v
+
+ # Step H3
+ secret = bits2int(t, qlen)
+
+ if 1 <= secret < order:
+ if retry_gen <= 0:
+ return secret
+ retry_gen -= 1
+
+ k = hmac.new(k, v + b'\x00', hash_func).digest()
+ v = hmac.new(k, v, hash_func).digest()
diff --git a/third_party/python/ecdsa/src/ecdsa/test_der.py b/third_party/python/ecdsa/src/ecdsa/test_der.py
new file mode 100644
index 0000000000..e6cd593d3e
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/test_der.py
@@ -0,0 +1,384 @@
+
+# compatibility with Python 2.6, for that we need unittest2 package,
+# which is not available on 3.3 or 3.4
+import warnings
+from binascii import hexlify
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+from six import b
+import hypothesis.strategies as st
+from hypothesis import given, example
+import pytest
+from ._compat import str_idx_as_int
+from .curves import NIST256p, NIST224p
+from .der import remove_integer, UnexpectedDER, read_length, encode_bitstring,\
+ remove_bitstring, remove_object, encode_oid
+
+
+class TestRemoveInteger(unittest.TestCase):
+ # DER requires the integers to be 0-padded only if they would be
+ # interpreted as negative, check if those errors are detected
+ def test_non_minimal_encoding(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_integer(b('\x02\x02\x00\x01'))
+
+ def test_negative_with_high_bit_set(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_integer(b('\x02\x01\x80'))
+
+ def test_minimal_with_high_bit_set(self):
+ val, rem = remove_integer(b('\x02\x02\x00\x80'))
+
+ self.assertEqual(val, 0x80)
+ self.assertFalse(rem)
+
+ def test_two_zero_bytes_with_high_bit_set(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_integer(b('\x02\x03\x00\x00\xff'))
+
+ def test_zero_length_integer(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_integer(b('\x02\x00'))
+
+ def test_empty_string(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_integer(b(''))
+
+ def test_encoding_of_zero(self):
+ val, rem = remove_integer(b('\x02\x01\x00'))
+
+ self.assertEqual(val, 0)
+ self.assertFalse(rem)
+
+ def test_encoding_of_127(self):
+ val, rem = remove_integer(b('\x02\x01\x7f'))
+
+ self.assertEqual(val, 127)
+ self.assertFalse(rem)
+
+ def test_encoding_of_128(self):
+ val, rem = remove_integer(b('\x02\x02\x00\x80'))
+
+ self.assertEqual(val, 128)
+ self.assertFalse(rem)
+
+
+class TestReadLength(unittest.TestCase):
+ # DER requires the lengths between 0 and 127 to be encoded using the short
+ # form and lengths above that encoded with minimal number of bytes
+ # necessary
+ def test_zero_length(self):
+ self.assertEqual((0, 1), read_length(b('\x00')))
+
+ def test_two_byte_zero_length(self):
+ with self.assertRaises(UnexpectedDER):
+ read_length(b('\x81\x00'))
+
+ def test_two_byte_small_length(self):
+ with self.assertRaises(UnexpectedDER):
+ read_length(b('\x81\x7f'))
+
+ def test_long_form_with_zero_length(self):
+ with self.assertRaises(UnexpectedDER):
+ read_length(b('\x80'))
+
+ def test_smallest_two_byte_length(self):
+ self.assertEqual((128, 2), read_length(b('\x81\x80')))
+
+ def test_zero_padded_length(self):
+ with self.assertRaises(UnexpectedDER):
+ read_length(b('\x82\x00\x80'))
+
+ def test_two_three_byte_length(self):
+ self.assertEqual((256, 3), read_length(b'\x82\x01\x00'))
+
+ def test_empty_string(self):
+ with self.assertRaises(UnexpectedDER):
+ read_length(b(''))
+
+ def test_length_overflow(self):
+ with self.assertRaises(UnexpectedDER):
+ read_length(b('\x83\x01\x00'))
+
+
+class TestEncodeBitstring(unittest.TestCase):
+ # DER requires BIT STRINGS to include a number of padding bits in the
+ # encoded byte string, that padding must be between 0 and 7
+
+ def test_old_call_convention(self):
+ """This is the old way to use the function."""
+ warnings.simplefilter('always')
+ with pytest.warns(DeprecationWarning) as warns:
+ der = encode_bitstring(b'\x00\xff')
+
+ self.assertEqual(len(warns), 1)
+ self.assertIn("unused= needs to be specified",
+ warns[0].message.args[0])
+
+ self.assertEqual(der, b'\x03\x02\x00\xff')
+
+ def test_new_call_convention(self):
+ """This is how it should be called now."""
+ warnings.simplefilter('always')
+ with pytest.warns(None) as warns:
+ der = encode_bitstring(b'\xff', 0)
+
+ # verify that new call convention doesn't raise Warnings
+ self.assertEqual(len(warns), 0)
+
+ self.assertEqual(der, b'\x03\x02\x00\xff')
+
+ def test_implicit_unused_bits(self):
+ """
+ Writing bit string with already included the number of unused bits.
+ """
+ warnings.simplefilter('always')
+ with pytest.warns(None) as warns:
+ der = encode_bitstring(b'\x00\xff', None)
+
+ # verify that new call convention doesn't raise Warnings
+ self.assertEqual(len(warns), 0)
+
+ self.assertEqual(der, b'\x03\x02\x00\xff')
+
+ def test_explicit_unused_bits(self):
+ der = encode_bitstring(b'\xff\xf0', 4)
+
+ self.assertEqual(der, b'\x03\x03\x04\xff\xf0')
+
+ def test_empty_string(self):
+ self.assertEqual(encode_bitstring(b'', 0), b'\x03\x01\x00')
+
+ def test_invalid_unused_count(self):
+ with self.assertRaises(ValueError):
+ encode_bitstring(b'\xff\x00', 8)
+
+ def test_invalid_unused_with_empty_string(self):
+ with self.assertRaises(ValueError):
+ encode_bitstring(b'', 1)
+
+ def test_non_zero_padding_bits(self):
+ with self.assertRaises(ValueError):
+ encode_bitstring(b'\xff', 2)
+
+
+class TestRemoveBitstring(unittest.TestCase):
+ def test_old_call_convention(self):
+ """This is the old way to call the function."""
+ warnings.simplefilter('always')
+ with pytest.warns(DeprecationWarning) as warns:
+ bits, rest = remove_bitstring(b'\x03\x02\x00\xff')
+
+ self.assertEqual(len(warns), 1)
+ self.assertIn("expect_unused= needs to be specified",
+ warns[0].message.args[0])
+
+ self.assertEqual(bits, b'\x00\xff')
+ self.assertEqual(rest, b'')
+
+ def test_new_call_convention(self):
+ warnings.simplefilter('always')
+ with pytest.warns(None) as warns:
+ bits, rest = remove_bitstring(b'\x03\x02\x00\xff', 0)
+
+ self.assertEqual(len(warns), 0)
+
+ self.assertEqual(bits, b'\xff')
+ self.assertEqual(rest, b'')
+
+ def test_implicit_unexpected_unused(self):
+ warnings.simplefilter('always')
+ with pytest.warns(None) as warns:
+ bits, rest = remove_bitstring(b'\x03\x02\x00\xff', None)
+
+ self.assertEqual(len(warns), 0)
+
+ self.assertEqual(bits, (b'\xff', 0))
+ self.assertEqual(rest, b'')
+
+ def test_with_padding(self):
+ ret, rest = remove_bitstring(b'\x03\x02\x04\xf0', None)
+
+ self.assertEqual(ret, (b'\xf0', 4))
+ self.assertEqual(rest, b'')
+
+ def test_not_a_bitstring(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_bitstring(b'\x02\x02\x00\xff', None)
+
+ def test_empty_encoding(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_bitstring(b'\x03\x00', None)
+
+ def test_empty_string(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_bitstring(b'', None)
+
+ def test_no_length(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_bitstring(b'\x03', None)
+
+ def test_unexpected_number_of_unused_bits(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_bitstring(b'\x03\x02\x00\xff', 1)
+
+ def test_invalid_encoding_of_unused_bits(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_bitstring(b'\x03\x03\x08\xff\x00', None)
+
+ def test_invalid_encoding_of_empty_string(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_bitstring(b'\x03\x01\x01', None)
+
+ def test_invalid_padding_bits(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_bitstring(b'\x03\x02\x01\xff', None)
+
+
+class TestStrIdxAsInt(unittest.TestCase):
+ def test_str(self):
+ self.assertEqual(115, str_idx_as_int('str', 0))
+
+ def test_bytes(self):
+ self.assertEqual(115, str_idx_as_int(b'str', 0))
+
+ def test_bytearray(self):
+ self.assertEqual(115, str_idx_as_int(bytearray(b'str'), 0))
+
+
+class TestEncodeOid(unittest.TestCase):
+ def test_pub_key_oid(self):
+ oid_ecPublicKey = encode_oid(1, 2, 840, 10045, 2, 1)
+ self.assertEqual(hexlify(oid_ecPublicKey), b("06072a8648ce3d0201"))
+
+ def test_nist224p_oid(self):
+ self.assertEqual(hexlify(NIST224p.encoded_oid), b("06052b81040021"))
+
+ def test_nist256p_oid(self):
+ self.assertEqual(hexlify(NIST256p.encoded_oid),
+ b"06082a8648ce3d030107")
+
+ def test_large_second_subid(self):
+ # from X.690, section 8.19.5
+ oid = encode_oid(2, 999, 3)
+ self.assertEqual(oid, b'\x06\x03\x88\x37\x03')
+
+ def test_with_two_subids(self):
+ oid = encode_oid(2, 999)
+ self.assertEqual(oid, b'\x06\x02\x88\x37')
+
+ def test_zero_zero(self):
+ oid = encode_oid(0, 0)
+ self.assertEqual(oid, b'\x06\x01\x00')
+
+ def test_with_wrong_types(self):
+ with self.assertRaises((TypeError, AssertionError)):
+ encode_oid(0, None)
+
+ def test_with_small_first_large_second(self):
+ with self.assertRaises(AssertionError):
+ encode_oid(1, 40)
+
+ def test_small_first_max_second(self):
+ oid = encode_oid(1, 39)
+ self.assertEqual(oid, b'\x06\x01\x4f')
+
+ def test_with_invalid_first(self):
+ with self.assertRaises(AssertionError):
+ encode_oid(3, 39)
+
+
+class TestRemoveObject(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls.oid_ecPublicKey = encode_oid(1, 2, 840, 10045, 2, 1)
+
+ def test_pub_key_oid(self):
+ oid, rest = remove_object(self.oid_ecPublicKey)
+ self.assertEqual(rest, b'')
+ self.assertEqual(oid, (1, 2, 840, 10045, 2, 1))
+
+ def test_with_extra_bytes(self):
+ oid, rest = remove_object(self.oid_ecPublicKey + b'more')
+ self.assertEqual(rest, b'more')
+ self.assertEqual(oid, (1, 2, 840, 10045, 2, 1))
+
+ def test_with_large_second_subid(self):
+ # from X.690, section 8.19.5
+ oid, rest = remove_object(b'\x06\x03\x88\x37\x03')
+ self.assertEqual(rest, b'')
+ self.assertEqual(oid, (2, 999, 3))
+
+ def test_with_padded_first_subid(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b'\x06\x02\x80\x00')
+
+ def test_with_padded_second_subid(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b'\x06\x04\x88\x37\x80\x01')
+
+ def test_with_missing_last_byte_of_multi_byte(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b'\x06\x03\x88\x37\x83')
+
+ def test_with_two_subids(self):
+ oid, rest = remove_object(b'\x06\x02\x88\x37')
+ self.assertEqual(rest, b'')
+ self.assertEqual(oid, (2, 999))
+
+ def test_zero_zero(self):
+ oid, rest = remove_object(b'\x06\x01\x00')
+ self.assertEqual(rest, b'')
+ self.assertEqual(oid, (0, 0))
+
+ def test_empty_string(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b'')
+
+ def test_missing_length(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b'\x06')
+
+ def test_empty_oid(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b'\x06\x00')
+
+ def test_empty_oid_overflow(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b'\x06\x01')
+
+ def test_with_wrong_type(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b'\x04\x02\x88\x37')
+
+ def test_with_too_long_length(self):
+ with self.assertRaises(UnexpectedDER):
+ remove_object(b'\x06\x03\x88\x37')
+
+
+@st.composite
+def st_oid(draw, max_value=2**512, max_size=50):
+ """
+ Hypothesis strategy that returns valid OBJECT IDENTIFIERs as tuples
+
+ :param max_value: maximum value of any single sub-identifier
+ :param max_size: maximum length of the generated OID
+ """
+ first = draw(st.integers(min_value=0, max_value=2))
+ if first < 2:
+ second = draw(st.integers(min_value=0, max_value=39))
+ else:
+ second = draw(st.integers(min_value=0, max_value=max_value))
+ rest = draw(st.lists(st.integers(min_value=0, max_value=max_value),
+ max_size=max_size))
+ return (first, second) + tuple(rest)
+
+
+@given(st_oid())
+def test_oids(ids):
+ encoded_oid = encode_oid(*ids)
+ decoded_oid, rest = remove_object(encoded_oid)
+ assert rest == b''
+ assert decoded_oid == ids
diff --git a/third_party/python/ecdsa/src/ecdsa/test_ecdh.py b/third_party/python/ecdsa/src/ecdsa/test_ecdh.py
new file mode 100644
index 0000000000..74c8bbab64
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/test_ecdh.py
@@ -0,0 +1,350 @@
+
+import os
+import shutil
+import subprocess
+import pytest
+from binascii import hexlify, unhexlify
+
+from .curves import NIST192p, NIST224p, NIST256p, NIST384p, NIST521p
+from .curves import curves
+from .ecdh import ECDH, InvalidCurveError, \
+ InvalidSharedSecretError, NoKeyError
+from .keys import SigningKey, VerifyingKey
+
+
+@pytest.mark.parametrize("vcurve", curves, ids=[curve.name for curve in curves])
+def test_ecdh_each(vcurve):
+ ecdh1 = ECDH(curve=vcurve)
+ ecdh2 = ECDH(curve=vcurve)
+
+ ecdh2.generate_private_key()
+ ecdh1.load_received_public_key(ecdh2.get_public_key())
+ ecdh2.load_received_public_key(ecdh1.generate_private_key())
+
+ secret1 = ecdh1.generate_sharedsecret_bytes()
+ secret2 = ecdh2.generate_sharedsecret_bytes()
+ assert secret1 == secret2
+
+
+def test_ecdh_no_public_key():
+ ecdh1 = ECDH(curve=NIST192p)
+
+ with pytest.raises(NoKeyError):
+ ecdh1.generate_sharedsecret_bytes()
+
+ ecdh1.generate_private_key()
+
+ with pytest.raises(NoKeyError):
+ ecdh1.generate_sharedsecret_bytes()
+
+
+def test_ecdh_wrong_public_key_curve():
+ ecdh1 = ECDH(curve=NIST192p)
+ ecdh1.generate_private_key()
+ ecdh2 = ECDH(curve=NIST256p)
+ ecdh2.generate_private_key()
+
+ with pytest.raises(InvalidCurveError):
+ ecdh1.load_received_public_key(ecdh2.get_public_key())
+
+ with pytest.raises(InvalidCurveError):
+ ecdh2.load_received_public_key(ecdh1.get_public_key())
+
+ ecdh1.public_key = ecdh2.get_public_key()
+ ecdh2.public_key = ecdh1.get_public_key()
+
+ with pytest.raises(InvalidCurveError):
+ ecdh1.generate_sharedsecret_bytes()
+
+ with pytest.raises(InvalidCurveError):
+ ecdh2.generate_sharedsecret_bytes()
+
+
+def test_ecdh_invalid_shared_secret_curve():
+ ecdh1 = ECDH(curve=NIST256p)
+ ecdh1.generate_private_key()
+
+ ecdh1.load_received_public_key(SigningKey.generate(NIST256p).get_verifying_key())
+
+ ecdh1.private_key.privkey.secret_multiplier = ecdh1.private_key.curve.order
+
+ with pytest.raises(InvalidSharedSecretError):
+ ecdh1.generate_sharedsecret_bytes()
+
+
+# https://github.com/scogliani/ecc-test-vectors/blob/master/ecdh_kat/secp192r1.txt
+# https://github.com/scogliani/ecc-test-vectors/blob/master/ecdh_kat/secp256r1.txt
+# https://github.com/coruus/nist-testvectors/blob/master/csrc.nist.gov/groups/STM/cavp/documents/components/ecccdhtestvectors/KAS_ECC_CDH_PrimitiveTest.txt
+@pytest.mark.parametrize(
+ "curve,privatekey,pubkey,secret",
+ [
+ pytest.param(
+ NIST192p,
+ "f17d3fea367b74d340851ca4270dcb24c271f445bed9d527",
+ "42ea6dd9969dd2a61fea1aac7f8e98edcc896c6e55857cc0"
+ "dfbe5d7c61fac88b11811bde328e8a0d12bf01a9d204b523",
+ "803d8ab2e5b6e6fca715737c3a82f7ce3c783124f6d51cd0",
+ id="NIST192p-1"
+ ),
+ pytest.param(
+ NIST192p,
+ "56e853349d96fe4c442448dacb7cf92bb7a95dcf574a9bd5",
+ "deb5712fa027ac8d2f22c455ccb73a91e17b6512b5e030e7"
+ "7e2690a02cc9b28708431a29fb54b87b1f0c14e011ac2125",
+ "c208847568b98835d7312cef1f97f7aa298283152313c29d",
+ id="NIST192p-2"
+ ),
+ pytest.param(
+ NIST192p,
+ "c6ef61fe12e80bf56f2d3f7d0bb757394519906d55500949",
+ "4edaa8efc5a0f40f843663ec5815e7762dddc008e663c20f"
+ "0a9f8dc67a3e60ef6d64b522185d03df1fc0adfd42478279",
+ "87229107047a3b611920d6e3b2c0c89bea4f49412260b8dd",
+ id="NIST192p-3"
+ ),
+ pytest.param(
+ NIST192p,
+ "e6747b9c23ba7044f38ff7e62c35e4038920f5a0163d3cda",
+ "8887c276edeed3e9e866b46d58d895c73fbd80b63e382e88"
+ "04c5097ba6645e16206cfb70f7052655947dd44a17f1f9d5",
+ "eec0bed8fc55e1feddc82158fd6dc0d48a4d796aaf47d46c",
+ id="NIST192p-4"
+ ),
+ pytest.param(
+ NIST192p,
+ "beabedd0154a1afcfc85d52181c10f5eb47adc51f655047d",
+ "0d045f30254adc1fcefa8a5b1f31bf4e739dd327cd18d594"
+ "542c314e41427c08278a08ce8d7305f3b5b849c72d8aff73",
+ "716e743b1b37a2cd8479f0a3d5a74c10ba2599be18d7e2f4",
+ id="NIST192p-5"
+ ),
+ pytest.param(
+ NIST192p,
+ "cf70354226667321d6e2baf40999e2fd74c7a0f793fa8699",
+ "fb35ca20d2e96665c51b98e8f6eb3d79113508d8bccd4516"
+ "368eec0d5bfb847721df6aaff0e5d48c444f74bf9cd8a5a7",
+ "f67053b934459985a315cb017bf0302891798d45d0e19508",
+ id="NIST192p-6"
+ ),
+ pytest.param(
+ NIST224p,
+ "8346a60fc6f293ca5a0d2af68ba71d1dd389e5e40837942df3e43cbd",
+ "af33cd0629bc7e996320a3f40368f74de8704fa37b8fab69abaae280"
+ "882092ccbba7930f419a8a4f9bb16978bbc3838729992559a6f2e2d7",
+ "7d96f9a3bd3c05cf5cc37feb8b9d5209d5c2597464dec3e9983743e8",
+ id="NIST224p"
+ ),
+ pytest.param(
+ NIST256p,
+ "7d7dc5f71eb29ddaf80d6214632eeae03d9058af1fb6d22ed80badb62bc1a534",
+ "700c48f77f56584c5cc632ca65640db91b6bacce3a4df6b42ce7cc838833d287"
+ "db71e509e3fd9b060ddb20ba5c51dcc5948d46fbf640dfe0441782cab85fa4ac",
+ "46fc62106420ff012e54a434fbdd2d25ccc5852060561e68040dd7778997bd7b",
+ id="NIST256p-1"
+ ),
+ pytest.param(
+ NIST256p,
+ "38f65d6dce47676044d58ce5139582d568f64bb16098d179dbab07741dd5caf5",
+ "809f04289c64348c01515eb03d5ce7ac1a8cb9498f5caa50197e58d43a86a7ae"
+ "b29d84e811197f25eba8f5194092cb6ff440e26d4421011372461f579271cda3",
+ "057d636096cb80b67a8c038c890e887d1adfa4195e9b3ce241c8a778c59cda67",
+ id="NIST256p-2"
+ ),
+ pytest.param(
+ NIST256p,
+ "1accfaf1b97712b85a6f54b148985a1bdc4c9bec0bd258cad4b3d603f49f32c8",
+ "a2339c12d4a03c33546de533268b4ad667debf458b464d77443636440ee7fec3"
+ "ef48a3ab26e20220bcda2c1851076839dae88eae962869a497bf73cb66faf536",
+ "2d457b78b4614132477618a5b077965ec90730a8c81a1c75d6d4ec68005d67ec",
+ id="NIST256p-3"
+ ),
+ pytest.param(
+ NIST256p,
+ "207c43a79bfee03db6f4b944f53d2fb76cc49ef1c9c4d34d51b6c65c4db6932d",
+ "df3989b9fa55495719b3cf46dccd28b5153f7808191dd518eff0c3cff2b705ed"
+ "422294ff46003429d739a33206c8752552c8ba54a270defc06e221e0feaf6ac4",
+ "96441259534b80f6aee3d287a6bb17b5094dd4277d9e294f8fe73e48bf2a0024",
+ id="NIST256p-4"
+ ),
+ pytest.param(
+ NIST256p,
+ "59137e38152350b195c9718d39673d519838055ad908dd4757152fd8255c09bf",
+ "41192d2813e79561e6a1d6f53c8bc1a433a199c835e141b05a74a97b0faeb922"
+ "1af98cc45e98a7e041b01cf35f462b7562281351c8ebf3ffa02e33a0722a1328",
+ "19d44c8d63e8e8dd12c22a87b8cd4ece27acdde04dbf47f7f27537a6999a8e62",
+ id="NIST256p-5"
+ ),
+ pytest.param(
+ NIST256p,
+ "f5f8e0174610a661277979b58ce5c90fee6c9b3bb346a90a7196255e40b132ef",
+ "33e82092a0f1fb38f5649d5867fba28b503172b7035574bf8e5b7100a3052792"
+ "f2cf6b601e0a05945e335550bf648d782f46186c772c0f20d3cd0d6b8ca14b2f",
+ "664e45d5bba4ac931cd65d52017e4be9b19a515f669bea4703542a2c525cd3d3",
+ id="NIST256p-6"
+ ),
+ pytest.param(
+ NIST384p,
+ "3cc3122a68f0d95027ad38c067916ba0eb8c38894d22e1b1"
+ "5618b6818a661774ad463b205da88cf699ab4d43c9cf98a1",
+ "a7c76b970c3b5fe8b05d2838ae04ab47697b9eaf52e76459"
+ "2efda27fe7513272734466b400091adbf2d68c58e0c50066"
+ "ac68f19f2e1cb879aed43a9969b91a0839c4c38a49749b66"
+ "1efedf243451915ed0905a32b060992b468c64766fc8437a",
+ "5f9d29dc5e31a163060356213669c8ce132e22f57c9a04f4"
+ "0ba7fcead493b457e5621e766c40a2e3d4d6a04b25e533f1",
+ id="NIST384p"
+ ),
+ pytest.param(
+ NIST521p,
+ "017eecc07ab4b329068fba65e56a1f8890aa935e57134ae0ffcce802735151f4ea"
+ "c6564f6ee9974c5e6887a1fefee5743ae2241bfeb95d5ce31ddcb6f9edb4d6fc47",
+ "00685a48e86c79f0f0875f7bc18d25eb5fc8c0b07e5da4f4370f3a949034085433"
+ "4b1e1b87fa395464c60626124a4e70d0f785601d37c09870ebf176666877a2046d"
+ "01ba52c56fc8776d9e8f5db4f0cc27636d0b741bbe05400697942e80b739884a83"
+ "bde99e0f6716939e632bc8986fa18dccd443a348b6c3e522497955a4f3c302f676",
+ "005fc70477c3e63bc3954bd0df3ea0d1f41ee21746ed95fc5e1fdf90930d5e1366"
+ "72d72cc770742d1711c3c3a4c334a0ad9759436a4d3c5bf6e74b9578fac148c831",
+ id="NIST521p"
+ ),
+ ],
+)
+def test_ecdh_NIST(curve,privatekey,pubkey,secret):
+ ecdh = ECDH(curve=curve)
+ ecdh.load_private_key_bytes(unhexlify(privatekey))
+ ecdh.load_received_public_key_bytes(unhexlify(pubkey))
+
+ sharedsecret = ecdh.generate_sharedsecret_bytes()
+
+ assert sharedsecret == unhexlify(secret)
+
+
+pem_local_private_key = (
+ "-----BEGIN EC PRIVATE KEY-----\n"
+ "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n"
+ "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n"
+ "bA==\n"
+ "-----END EC PRIVATE KEY-----\n")
+der_local_private_key = (
+ "305f02010104185ec8420bd6ef9252a942e989043ca29f561fa525770eb1c5a00a06082a864"
+ "8ce3d030101a13403320004b88177d084ef17f5e45639408028360f9f59b4a4d7264e62da06"
+ "51dce47a35a4c5b45cf51593423a8b557b9c2099f36c")
+pem_remote_public_key = (
+ "-----BEGIN PUBLIC KEY-----\n"
+ "MEkwEwYHKoZIzj0CAQYIKoZIzj0DAQEDMgAEuIF30ITvF/XkVjlAgCg2D59ZtKTX\n"
+ "Jk5i2gZR3OR6NaTFtFz1FZNCOotVe5wgmfNs\n"
+ "-----END PUBLIC KEY-----\n")
+der_remote_public_key = (
+ "3049301306072a8648ce3d020106082a8648ce3d03010103320004b88177d084ef17f5e4563"
+ "9408028360f9f59b4a4d7264e62da0651dce47a35a4c5b45cf51593423a8b557b9c2099f36c")
+gshared_secret = "8f457e34982478d1c34b9cd2d0c15911b72dd60d869e2cea"
+
+
+def test_ecdh_pem():
+ ecdh = ECDH()
+ ecdh.load_private_key_pem(pem_local_private_key)
+ ecdh.load_received_public_key_pem(pem_remote_public_key)
+
+ sharedsecret = ecdh.generate_sharedsecret_bytes()
+
+ assert sharedsecret == unhexlify(gshared_secret)
+
+
+def test_ecdh_der():
+ ecdh = ECDH()
+ ecdh.load_private_key_der(unhexlify(der_local_private_key))
+ ecdh.load_received_public_key_der(unhexlify(der_remote_public_key))
+
+ sharedsecret = ecdh.generate_sharedsecret_bytes()
+
+ assert sharedsecret == unhexlify(gshared_secret)
+
+
+# Exception classes used by run_openssl.
+class RunOpenSslError(Exception):
+ pass
+
+
+def run_openssl(cmd):
+ OPENSSL = "openssl"
+ p = subprocess.Popen([OPENSSL] + cmd.split(),
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ stdout, ignored = p.communicate()
+ if p.returncode != 0:
+ raise RunOpenSslError(
+ "cmd '%s %s' failed: rc=%s, stdout/err was %s" %
+ (OPENSSL, cmd, p.returncode, stdout))
+ return stdout.decode()
+
+
+OPENSSL_SUPPORTED_CURVES = set(c.split(':')[0].strip() for c in
+ run_openssl("ecparam -list_curves")
+ .split('\n'))
+
+
+@pytest.mark.parametrize("vcurve", curves, ids=[curve.name for curve in curves])
+def test_ecdh_with_openssl(vcurve):
+ assert vcurve.openssl_name
+
+ if vcurve.openssl_name not in OPENSSL_SUPPORTED_CURVES:
+ pytest.skip("system openssl does not support " + vcurve.openssl_name)
+ return
+
+ try:
+ hlp = run_openssl("pkeyutl -help")
+ if hlp.find("-derive") == 0:
+ pytest.skip("system openssl does not support `pkeyutl -derive`")
+ return
+ except RunOpenSslError:
+ pytest.skip("system openssl does not support `pkeyutl -derive`")
+ return
+
+ if os.path.isdir("t"):
+ shutil.rmtree("t")
+ os.mkdir("t")
+ run_openssl("ecparam -name %s -genkey -out t/privkey1.pem" % vcurve.openssl_name)
+ run_openssl("ecparam -name %s -genkey -out t/privkey2.pem" % vcurve.openssl_name)
+ run_openssl("ec -in t/privkey1.pem -pubout -out t/pubkey1.pem")
+
+ ecdh1 = ECDH(curve=vcurve)
+ ecdh2 = ECDH(curve=vcurve)
+ with open("t/privkey1.pem") as e:
+ key = e.read()
+ ecdh1.load_private_key_pem(key)
+ with open("t/privkey2.pem") as e:
+ key = e.read()
+ ecdh2.load_private_key_pem(key)
+
+ with open("t/pubkey1.pem") as e:
+ key = e.read()
+ vk1 = VerifyingKey.from_pem(key)
+ assert vk1.to_string() == ecdh1.get_public_key().to_string()
+ vk2 = ecdh2.get_public_key()
+ with open("t/pubkey2.pem", "wb") as e:
+ e.write(vk2.to_pem())
+
+ ecdh1.load_received_public_key(vk2)
+ ecdh2.load_received_public_key(vk1)
+ secret1 = ecdh1.generate_sharedsecret_bytes()
+ secret2 = ecdh2.generate_sharedsecret_bytes()
+
+ assert secret1 == secret2
+
+ try:
+ run_openssl("pkeyutl -derive -inkey t/privkey1.pem -peerkey t/pubkey2.pem -out t/secret1")
+ run_openssl("pkeyutl -derive -inkey t/privkey2.pem -peerkey t/pubkey1.pem -out t/secret2")
+ except RunOpenSslError:
+ pytest.skip("system openssl does not support `pkeyutl -derive`")
+ return
+
+ with open("t/secret1", "rb") as e:
+ ssl_secret1 = e.read()
+ with open("t/secret1", "rb") as e:
+ ssl_secret2 = e.read()
+
+ if len(ssl_secret1) != vk1.curve.baselen:
+ pytest.skip("system openssl does not support `pkeyutl -derive`")
+ return
+
+ assert ssl_secret1 == ssl_secret2
+ assert secret1 == ssl_secret1
diff --git a/third_party/python/ecdsa/src/ecdsa/test_ecdsa.py b/third_party/python/ecdsa/src/ecdsa/test_ecdsa.py
new file mode 100644
index 0000000000..71c68913ac
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/test_ecdsa.py
@@ -0,0 +1,448 @@
+from __future__ import print_function
+import sys
+import hypothesis.strategies as st
+from hypothesis import given, settings, note, example
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+import pytest
+from .ecdsa import Private_key, Public_key, Signature, \
+ generator_192, digest_integer, ellipticcurve, point_is_valid, \
+ generator_224, generator_256, generator_384, generator_521, \
+ generator_secp256k1
+
+
+HYP_SETTINGS = {}
+# old hypothesis doesn't have the "deadline" setting
+if sys.version_info > (2, 7): # pragma: no branch
+ # SEC521p is slow, allow long execution for it
+ HYP_SETTINGS["deadline"] = 5000
+
+
+class TestP192FromX9_62(unittest.TestCase):
+ """Check test vectors from X9.62"""
+ @classmethod
+ def setUpClass(cls):
+ cls.d = 651056770906015076056810763456358567190100156695615665659
+ cls.Q = cls.d * generator_192
+ cls.k = 6140507067065001063065065565667405560006161556565665656654
+ cls.R = cls.k * generator_192
+
+ cls.msg = 968236873715988614170569073515315707566766479517
+ cls.pubk = Public_key(generator_192, generator_192 * cls.d)
+ cls.privk = Private_key(cls.pubk, cls.d)
+ cls.sig = cls.privk.sign(cls.msg, cls.k)
+
+ def test_point_multiplication(self):
+ assert self.Q.x() == 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5
+
+ def test_point_multiplication_2(self):
+ assert self.R.x() == 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD
+ assert self.R.y() == 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835
+
+ def test_mult_and_addition(self):
+ u1 = 2563697409189434185194736134579731015366492496392189760599
+ u2 = 6266643813348617967186477710235785849136406323338782220568
+ temp = u1 * generator_192 + u2 * self.Q
+ assert temp.x() == 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD
+ assert temp.y() == 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835
+
+ def test_signature(self):
+ r, s = self.sig.r, self.sig.s
+ assert r == 3342403536405981729393488334694600415596881826869351677613
+ assert s == 5735822328888155254683894997897571951568553642892029982342
+
+ def test_verification(self):
+ assert self.pubk.verifies(self.msg, self.sig)
+
+ def test_rejection(self):
+ assert not self.pubk.verifies(self.msg - 1, self.sig)
+
+
+class TestPublicKey(unittest.TestCase):
+
+ def test_equality_public_keys(self):
+ gen = generator_192
+ x = 0xc58d61f88d905293bcd4cd0080bcb1b7f811f2ffa41979f6
+ y = 0x8804dc7a7c4c7f8b5d437f5156f3312ca7d6de8a0e11867f
+ point = ellipticcurve.Point(gen.curve(), x, y)
+ pub_key1 = Public_key(gen, point)
+ pub_key2 = Public_key(gen, point)
+ self.assertEqual(pub_key1, pub_key2)
+
+ def test_inequality_public_key(self):
+ gen = generator_192
+ x1 = 0xc58d61f88d905293bcd4cd0080bcb1b7f811f2ffa41979f6
+ y1 = 0x8804dc7a7c4c7f8b5d437f5156f3312ca7d6de8a0e11867f
+ point1 = ellipticcurve.Point(gen.curve(), x1, y1)
+
+ x2 = 0x6a223d00bd22c52833409a163e057e5b5da1def2a197dd15
+ y2 = 0x7b482604199367f1f303f9ef627f922f97023e90eae08abf
+ point2 = ellipticcurve.Point(gen.curve(), x2, y2)
+
+ pub_key1 = Public_key(gen, point1)
+ pub_key2 = Public_key(gen, point2)
+ self.assertNotEqual(pub_key1, pub_key2)
+
+ def test_inequality_public_key_not_implemented(self):
+ gen = generator_192
+ x = 0xc58d61f88d905293bcd4cd0080bcb1b7f811f2ffa41979f6
+ y = 0x8804dc7a7c4c7f8b5d437f5156f3312ca7d6de8a0e11867f
+ point = ellipticcurve.Point(gen.curve(), x, y)
+ pub_key = Public_key(gen, point)
+ self.assertNotEqual(pub_key, None)
+
+
+class TestPrivateKey(unittest.TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ gen = generator_192
+ x = 0xc58d61f88d905293bcd4cd0080bcb1b7f811f2ffa41979f6
+ y = 0x8804dc7a7c4c7f8b5d437f5156f3312ca7d6de8a0e11867f
+ point = ellipticcurve.Point(gen.curve(), x, y)
+ cls.pub_key = Public_key(gen, point)
+
+ def test_equality_private_keys(self):
+ pr_key1 = Private_key(self.pub_key, 100)
+ pr_key2 = Private_key(self.pub_key, 100)
+ self.assertEqual(pr_key1, pr_key2)
+
+ def test_inequality_private_keys(self):
+ pr_key1 = Private_key(self.pub_key, 100)
+ pr_key2 = Private_key(self.pub_key, 200)
+ self.assertNotEqual(pr_key1, pr_key2)
+
+ def test_inequality_private_keys_not_implemented(self):
+ pr_key = Private_key(self.pub_key, 100)
+ self.assertNotEqual(pr_key, None)
+
+
+# Testing point validity, as per ECDSAVS.pdf B.2.2:
+P192_POINTS = [
+ (generator_192,
+ 0xcd6d0f029a023e9aaca429615b8f577abee685d8257cc83a,
+ 0x00019c410987680e9fb6c0b6ecc01d9a2647c8bae27721bacdfc,
+ False),
+
+ (generator_192,
+ 0x00017f2fce203639e9eaf9fb50b81fc32776b30e3b02af16c73b,
+ 0x95da95c5e72dd48e229d4748d4eee658a9a54111b23b2adb,
+ False),
+
+ (generator_192,
+ 0x4f77f8bc7fccbadd5760f4938746d5f253ee2168c1cf2792,
+ 0x000147156ff824d131629739817edb197717c41aab5c2a70f0f6,
+ False),
+
+ (generator_192,
+ 0xc58d61f88d905293bcd4cd0080bcb1b7f811f2ffa41979f6,
+ 0x8804dc7a7c4c7f8b5d437f5156f3312ca7d6de8a0e11867f,
+ True),
+
+ (generator_192,
+ 0xcdf56c1aa3d8afc53c521adf3ffb96734a6a630a4a5b5a70,
+ 0x97c1c44a5fb229007b5ec5d25f7413d170068ffd023caa4e,
+ True),
+
+ (generator_192,
+ 0x89009c0dc361c81e99280c8e91df578df88cdf4b0cdedced,
+ 0x27be44a529b7513e727251f128b34262a0fd4d8ec82377b9,
+ True),
+
+ (generator_192,
+ 0x6a223d00bd22c52833409a163e057e5b5da1def2a197dd15,
+ 0x7b482604199367f1f303f9ef627f922f97023e90eae08abf,
+ True),
+
+ (generator_192,
+ 0x6dccbde75c0948c98dab32ea0bc59fe125cf0fb1a3798eda,
+ 0x0001171a3e0fa60cf3096f4e116b556198de430e1fbd330c8835,
+ False),
+
+ (generator_192,
+ 0xd266b39e1f491fc4acbbbc7d098430931cfa66d55015af12,
+ 0x193782eb909e391a3148b7764e6b234aa94e48d30a16dbb2,
+ False),
+
+ (generator_192,
+ 0x9d6ddbcd439baa0c6b80a654091680e462a7d1d3f1ffeb43,
+ 0x6ad8efc4d133ccf167c44eb4691c80abffb9f82b932b8caa,
+ False),
+
+ (generator_192,
+ 0x146479d944e6bda87e5b35818aa666a4c998a71f4e95edbc,
+ 0xa86d6fe62bc8fbd88139693f842635f687f132255858e7f6,
+ False),
+
+ (generator_192,
+ 0xe594d4a598046f3598243f50fd2c7bd7d380edb055802253,
+ 0x509014c0c4d6b536e3ca750ec09066af39b4c8616a53a923,
+ False)]
+
+
+@pytest.mark.parametrize("generator,x,y,expected", P192_POINTS)
+def test_point_validity(generator, x, y, expected):
+ """
+ `generator` defines the curve; is `(x, y)` a point on
+ this curve? `expected` is True if the right answer is Yes.
+ """
+ assert point_is_valid(generator, x, y) == expected
+
+
+# Trying signature-verification tests from ECDSAVS.pdf B.2.4:
+CURVE_192_KATS = [
+ (generator_192,
+ int("0x84ce72aa8699df436059f052ac51b6398d2511e49631bcb7e71f89c499b9ee"
+ "425dfbc13a5f6d408471b054f2655617cbbaf7937b7c80cd8865cf02c8487d30"
+ "d2b0fbd8b2c4e102e16d828374bbc47b93852f212d5043c3ea720f086178ff79"
+ "8cc4f63f787b9c2e419efa033e7644ea7936f54462dc21a6c4580725f7f0e7d1"
+ "58", 16),
+ 0xd9dbfb332aa8e5ff091e8ce535857c37c73f6250ffb2e7ac,
+ 0x282102e364feded3ad15ddf968f88d8321aa268dd483ebc4,
+ 0x64dca58a20787c488d11d6dd96313f1b766f2d8efe122916,
+ 0x1ecba28141e84ab4ecad92f56720e2cc83eb3d22dec72479,
+ True),
+
+ (generator_192,
+ int("0x94bb5bacd5f8ea765810024db87f4224ad71362a3c28284b2b9f39fab86db1"
+ "2e8beb94aae899768229be8fdb6c4f12f28912bb604703a79ccff769c1607f5a"
+ "91450f30ba0460d359d9126cbd6296be6d9c4bb96c0ee74cbb44197c207f6db3"
+ "26ab6f5a659113a9034e54be7b041ced9dcf6458d7fb9cbfb2744d999f7dfd63"
+ "f4", 16),
+ 0x3e53ef8d3112af3285c0e74842090712cd324832d4277ae7,
+ 0xcc75f8952d30aec2cbb719fc6aa9934590b5d0ff5a83adb7,
+ 0x8285261607283ba18f335026130bab31840dcfd9c3e555af,
+ 0x356d89e1b04541afc9704a45e9c535ce4a50929e33d7e06c,
+ True),
+
+ (generator_192,
+ int("0xf6227a8eeb34afed1621dcc89a91d72ea212cb2f476839d9b4243c66877911"
+ "b37b4ad6f4448792a7bbba76c63bdd63414b6facab7dc71c3396a73bd7ee14cd"
+ "d41a659c61c99b779cecf07bc51ab391aa3252386242b9853ea7da67fd768d30"
+ "3f1b9b513d401565b6f1eb722dfdb96b519fe4f9bd5de67ae131e64b40e78c42"
+ "dd", 16),
+ 0x16335dbe95f8e8254a4e04575d736befb258b8657f773cb7,
+ 0x421b13379c59bc9dce38a1099ca79bbd06d647c7f6242336,
+ 0x4141bd5d64ea36c5b0bd21ef28c02da216ed9d04522b1e91,
+ 0x159a6aa852bcc579e821b7bb0994c0861fb08280c38daa09,
+ False),
+
+ (generator_192,
+ int("0x16b5f93afd0d02246f662761ed8e0dd9504681ed02a253006eb36736b56309"
+ "7ba39f81c8e1bce7a16c1339e345efabbc6baa3efb0612948ae51103382a8ee8"
+ "bc448e3ef71e9f6f7a9676694831d7f5dd0db5446f179bcb737d4a526367a447"
+ "bfe2c857521c7f40b6d7d7e01a180d92431fb0bbd29c04a0c420a57b3ed26ccd"
+ "8a", 16),
+ 0xfd14cdf1607f5efb7b1793037b15bdf4baa6f7c16341ab0b,
+ 0x83fa0795cc6c4795b9016dac928fd6bac32f3229a96312c4,
+ 0x8dfdb832951e0167c5d762a473c0416c5c15bc1195667dc1,
+ 0x1720288a2dc13fa1ec78f763f8fe2ff7354a7e6fdde44520,
+ False),
+
+ (generator_192,
+ int("0x08a2024b61b79d260e3bb43ef15659aec89e5b560199bc82cf7c65c77d3919"
+ "2e03b9a895d766655105edd9188242b91fbde4167f7862d4ddd61e5d4ab55196"
+ "683d4f13ceb90d87aea6e07eb50a874e33086c4a7cb0273a8e1c4408f4b846bc"
+ "eae1ebaac1b2b2ea851a9b09de322efe34cebe601653efd6ddc876ce8c2f2072"
+ "fb", 16),
+ 0x674f941dc1a1f8b763c9334d726172d527b90ca324db8828,
+ 0x65adfa32e8b236cb33a3e84cf59bfb9417ae7e8ede57a7ff,
+ 0x9508b9fdd7daf0d8126f9e2bc5a35e4c6d800b5b804d7796,
+ 0x36f2bf6b21b987c77b53bb801b3435a577e3d493744bfab0,
+ False),
+
+ (generator_192,
+ int("0x1843aba74b0789d4ac6b0b8923848023a644a7b70afa23b1191829bbe4397c"
+ "e15b629bf21a8838298653ed0c19222b95fa4f7390d1b4c844d96e645537e0aa"
+ "e98afb5c0ac3bd0e4c37f8daaff25556c64e98c319c52687c904c4de7240a1cc"
+ "55cd9756b7edaef184e6e23b385726e9ffcba8001b8f574987c1a3fedaaa83ca"
+ "6d", 16),
+ 0x10ecca1aad7220b56a62008b35170bfd5e35885c4014a19f,
+ 0x04eb61984c6c12ade3bc47f3c629ece7aa0a033b9948d686,
+ 0x82bfa4e82c0dfe9274169b86694e76ce993fd83b5c60f325,
+ 0xa97685676c59a65dbde002fe9d613431fb183e8006d05633,
+ False),
+
+ (generator_192,
+ int("0x5a478f4084ddd1a7fea038aa9732a822106385797d02311aeef4d0264f824f"
+ "698df7a48cfb6b578cf3da416bc0799425bb491be5b5ecc37995b85b03420a98"
+ "f2c4dc5c31a69a379e9e322fbe706bbcaf0f77175e05cbb4fa162e0da82010a2"
+ "78461e3e974d137bc746d1880d6eb02aa95216014b37480d84b87f717bb13f76"
+ "e1", 16),
+ 0x6636653cb5b894ca65c448277b29da3ad101c4c2300f7c04,
+ 0xfdf1cbb3fc3fd6a4f890b59e554544175fa77dbdbeb656c1,
+ 0xeac2ddecddfb79931a9c3d49c08de0645c783a24cb365e1c,
+ 0x3549fee3cfa7e5f93bc47d92d8ba100e881a2a93c22f8d50,
+ False),
+
+ (generator_192,
+ int("0xc598774259a058fa65212ac57eaa4f52240e629ef4c310722088292d1d4af6"
+ "c39b49ce06ba77e4247b20637174d0bd67c9723feb57b5ead232b47ea452d5d7"
+ "a089f17c00b8b6767e434a5e16c231ba0efa718a340bf41d67ea2d295812ff1b"
+ "9277daacb8bc27b50ea5e6443bcf95ef4e9f5468fe78485236313d53d1c68f6b"
+ "a2", 16),
+ 0xa82bd718d01d354001148cd5f69b9ebf38ff6f21898f8aaa,
+ 0xe67ceede07fc2ebfafd62462a51e4b6c6b3d5b537b7caf3e,
+ 0x4d292486c620c3de20856e57d3bb72fcde4a73ad26376955,
+ 0xa85289591a6081d5728825520e62ff1c64f94235c04c7f95,
+ False),
+
+ (generator_192,
+ int("0xca98ed9db081a07b7557f24ced6c7b9891269a95d2026747add9e9eb80638a"
+ "961cf9c71a1b9f2c29744180bd4c3d3db60f2243c5c0b7cc8a8d40a3f9a7fc91"
+ "0250f2187136ee6413ffc67f1a25e1c4c204fa9635312252ac0e0481d89b6d53"
+ "808f0c496ba87631803f6c572c1f61fa049737fdacce4adff757afed4f05beb6"
+ "58", 16),
+ 0x7d3b016b57758b160c4fca73d48df07ae3b6b30225126c2f,
+ 0x4af3790d9775742bde46f8da876711be1b65244b2b39e7ec,
+ 0x95f778f5f656511a5ab49a5d69ddd0929563c29cbc3a9e62,
+ 0x75c87fc358c251b4c83d2dd979faad496b539f9f2ee7a289,
+ False),
+
+ (generator_192,
+ int("0x31dd9a54c8338bea06b87eca813d555ad1850fac9742ef0bbe40dad400e102"
+ "88acc9c11ea7dac79eb16378ebea9490e09536099f1b993e2653cd50240014c9"
+ "0a9c987f64545abc6a536b9bd2435eb5e911fdfde2f13be96ea36ad38df4ae9e"
+ "a387b29cced599af777338af2794820c9cce43b51d2112380a35802ab7e396c9"
+ "7a", 16),
+ 0x9362f28c4ef96453d8a2f849f21e881cd7566887da8beb4a,
+ 0xe64d26d8d74c48a024ae85d982ee74cd16046f4ee5333905,
+ 0xf3923476a296c88287e8de914b0b324ad5a963319a4fe73b,
+ 0xf0baeed7624ed00d15244d8ba2aede085517dbdec8ac65f5,
+ True),
+
+ (generator_192,
+ int("0xb2b94e4432267c92f9fdb9dc6040c95ffa477652761290d3c7de312283f645"
+ "0d89cc4aabe748554dfb6056b2d8e99c7aeaad9cdddebdee9dbc099839562d90"
+ "64e68e7bb5f3a6bba0749ca9a538181fc785553a4000785d73cc207922f63e8c"
+ "e1112768cb1de7b673aed83a1e4a74592f1268d8e2a4e9e63d414b5d442bd045"
+ "6d", 16),
+ 0xcc6fc032a846aaac25533eb033522824f94e670fa997ecef,
+ 0xe25463ef77a029eccda8b294fd63dd694e38d223d30862f1,
+ 0x066b1d07f3a40e679b620eda7f550842a35c18b80c5ebe06,
+ 0xa0b0fb201e8f2df65e2c4508ef303bdc90d934016f16b2dc,
+ False),
+
+ (generator_192,
+ int("0x4366fcadf10d30d086911de30143da6f579527036937007b337f7282460eae"
+ "5678b15cccda853193ea5fc4bc0a6b9d7a31128f27e1214988592827520b214e"
+ "ed5052f7775b750b0c6b15f145453ba3fee24a085d65287e10509eb5d5f602c4"
+ "40341376b95c24e5c4727d4b859bfe1483d20538acdd92c7997fa9c614f0f839"
+ "d7", 16),
+ 0x955c908fe900a996f7e2089bee2f6376830f76a19135e753,
+ 0xba0c42a91d3847de4a592a46dc3fdaf45a7cc709b90de520,
+ 0x1f58ad77fc04c782815a1405b0925e72095d906cbf52a668,
+ 0xf2e93758b3af75edf784f05a6761c9b9a6043c66b845b599,
+ False),
+
+ (generator_192,
+ int("0x543f8af57d750e33aa8565e0cae92bfa7a1ff78833093421c2942cadf99866"
+ "70a5ff3244c02a8225e790fbf30ea84c74720abf99cfd10d02d34377c3d3b412"
+ "69bea763384f372bb786b5846f58932defa68023136cd571863b304886e95e52"
+ "e7877f445b9364b3f06f3c28da12707673fecb4b8071de06b6e0a3c87da160ce"
+ "f3", 16),
+ 0x31f7fa05576d78a949b24812d4383107a9a45bb5fccdd835,
+ 0x8dc0eb65994a90f02b5e19bd18b32d61150746c09107e76b,
+ 0xbe26d59e4e883dde7c286614a767b31e49ad88789d3a78ff,
+ 0x8762ca831c1ce42df77893c9b03119428e7a9b819b619068,
+ False),
+
+ (generator_192,
+ int("0xd2e8454143ce281e609a9d748014dcebb9d0bc53adb02443a6aac2ffe6cb009f"
+ "387c346ecb051791404f79e902ee333ad65e5c8cb38dc0d1d39a8dc90add502357"
+ "2720e5b94b190d43dd0d7873397504c0c7aef2727e628eb6a74411f2e400c65670"
+ "716cb4a815dc91cbbfeb7cfe8c929e93184c938af2c078584da045e8f8d1", 16),
+ 0x66aa8edbbdb5cf8e28ceb51b5bda891cae2df84819fe25c0,
+ 0x0c6bc2f69030a7ce58d4a00e3b3349844784a13b8936f8da,
+ 0xa4661e69b1734f4a71b788410a464b71e7ffe42334484f23,
+ 0x738421cf5e049159d69c57a915143e226cac8355e149afe9,
+ False),
+
+ (generator_192,
+ int("0x6660717144040f3e2f95a4e25b08a7079c702a8b29babad5a19a87654bc5c5af"
+ "a261512a11b998a4fb36b5d8fe8bd942792ff0324b108120de86d63f65855e5461"
+ "184fc96a0a8ffd2ce6d5dfb0230cbbdd98f8543e361b3205f5da3d500fdc8bac6d"
+ "b377d75ebef3cb8f4d1ff738071ad0938917889250b41dd1d98896ca06fb", 16),
+ 0xbcfacf45139b6f5f690a4c35a5fffa498794136a2353fc77,
+ 0x6f4a6c906316a6afc6d98fe1f0399d056f128fe0270b0f22,
+ 0x9db679a3dafe48f7ccad122933acfe9da0970b71c94c21c1,
+ 0x984c2db99827576c0a41a5da41e07d8cc768bc82f18c9da9,
+ False)
+ ]
+
+
+@pytest.mark.parametrize("gen,msg,qx,qy,r,s,expected", CURVE_192_KATS)
+def test_signature_validity(gen, msg, qx, qy, r, s, expected):
+ """
+ `msg` = message, `qx` and `qy` represent the base point on
+ elliptic curve of `gen`, `r` and `s` are the signature, and
+ `expected` is True iff the signature is expected to be valid."""
+ pubk = Public_key(gen,
+ ellipticcurve.Point(gen.curve(), qx, qy))
+ assert expected == pubk.verifies(digest_integer(msg), Signature(r, s))
+
+
+@pytest.mark.parametrize("gen,msg,qx,qy,r,s,expected",
+ [x for x in CURVE_192_KATS if x[6]])
+def test_pk_recovery(gen, msg, r, s, qx, qy, expected):
+ del expected
+ sign = Signature(r, s)
+ pks = sign.recover_public_keys(digest_integer(msg), gen)
+
+ assert pks
+
+ # Test if the signature is valid for all found public keys
+ for pk in pks:
+ q = pk.point
+ test_signature_validity(gen, msg, q.x(), q.y(), r, s, True)
+
+ # Test if the original public key is in the set of found keys
+ original_q = ellipticcurve.Point(gen.curve(), qx, qy)
+ points = [pk.point for pk in pks]
+ assert original_q in points
+
+
+@st.composite
+def st_random_gen_key_msg_nonce(draw):
+ """Hypothesis strategy for test_sig_verify()."""
+ name_gen = {
+ "generator_192": generator_192,
+ "generator_224": generator_224,
+ "generator_256": generator_256,
+ "generator_secp256k1": generator_secp256k1,
+ "generator_384": generator_384,
+ "generator_521": generator_521}
+ name = draw(st.sampled_from(sorted(name_gen.keys())))
+ note("Generator used: {0}".format(name))
+ generator = name_gen[name]
+ order = int(generator.order())
+
+ key = draw(st.integers(min_value=1, max_value=order))
+ msg = draw(st.integers(min_value=1, max_value=order))
+ nonce = draw(st.integers(min_value=1, max_value=order+1) |
+ st.integers(min_value=order>>1, max_value=order))
+ return generator, key, msg, nonce
+
+
+SIG_VER_SETTINGS = dict(HYP_SETTINGS)
+SIG_VER_SETTINGS["max_examples"] = 10
+@settings(**SIG_VER_SETTINGS)
+@example((generator_224, 4, 1, 1))
+@given(st_random_gen_key_msg_nonce())
+def test_sig_verify(args):
+ """
+ Check if signing and verification works for arbitrary messages and
+ that signatures for other messages are rejected.
+ """
+ generator, sec_mult, msg, nonce = args
+
+ pubkey = Public_key(generator, generator * sec_mult)
+ privkey = Private_key(pubkey, sec_mult)
+
+ signature = privkey.sign(msg, nonce)
+
+ assert pubkey.verifies(msg, signature)
+
+ assert not pubkey.verifies(msg - 1, signature)
diff --git a/third_party/python/ecdsa/src/ecdsa/test_ellipticcurve.py b/third_party/python/ecdsa/src/ecdsa/test_ellipticcurve.py
new file mode 100644
index 0000000000..924134cecd
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/test_ellipticcurve.py
@@ -0,0 +1,188 @@
+import pytest
+from six import print_
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+from hypothesis import given, settings
+import hypothesis.strategies as st
+try:
+ from hypothesis import HealthCheck
+ HC_PRESENT=True
+except ImportError: # pragma: no cover
+ HC_PRESENT=False
+from .numbertheory import inverse_mod
+from .ellipticcurve import CurveFp, INFINITY, Point
+
+
+HYP_SETTINGS={}
+if HC_PRESENT: # pragma: no branch
+ HYP_SETTINGS['suppress_health_check']=[HealthCheck.too_slow]
+ HYP_SETTINGS['deadline'] = 5000
+
+
+# NIST Curve P-192:
+p = 6277101735386680763835789423207666416083908700390324961279
+r = 6277101735386680763835789423176059013767194773182842284081
+# s = 0x3045ae6fc8422f64ed579528d38120eae12196d5
+# c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65
+b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1
+Gx = 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012
+Gy = 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811
+
+c192 = CurveFp(p, -3, b)
+p192 = Point(c192, Gx, Gy, r)
+
+c_23 = CurveFp(23, 1, 1)
+g_23 = Point(c_23, 13, 7, 7)
+
+
+HYP_SLOW_SETTINGS=dict(HYP_SETTINGS)
+HYP_SLOW_SETTINGS["max_examples"]=10
+
+
+@settings(**HYP_SLOW_SETTINGS)
+@given(st.integers(min_value=1, max_value=r+1))
+def test_p192_mult_tests(multiple):
+ inv_m = inverse_mod(multiple, r)
+
+ p1 = p192 * multiple
+ assert p1 * inv_m == p192
+
+
+def add_n_times(point, n):
+ ret = INFINITY
+ i = 0
+ while i <= n:
+ yield ret
+ ret = ret + point
+ i += 1
+
+
+# From X9.62 I.1 (p. 96):
+@pytest.mark.parametrize(
+ "p, m, check",
+ [(g_23, n, exp) for n, exp in enumerate(add_n_times(g_23, 8))],
+ ids=["g_23 test with mult {0}".format(i) for i in range(9)])
+def test_add_and_mult_equivalence(p, m, check):
+ assert p * m == check
+
+
+class TestCurve(unittest.TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ cls.c_23 = CurveFp(23, 1, 1)
+
+ def test_equality_curves(self):
+ self.assertEqual(self.c_23, CurveFp(23, 1, 1))
+
+ def test_inequality_curves(self):
+ c192 = CurveFp(p, -3, b)
+ self.assertNotEqual(self.c_23, c192)
+
+ def test_usability_in_a_hashed_collection_curves(self):
+ {self.c_23: None}
+
+ def test_hashability_curves(self):
+ hash(self.c_23)
+
+ def test_conflation_curves(self):
+ ne1, ne2, ne3 = CurveFp(24, 1, 1), CurveFp(23, 2, 1), CurveFp(23, 1, 2)
+ eq1, eq2, eq3 = CurveFp(23, 1, 1), CurveFp(23, 1, 1), self.c_23
+ self.assertEqual(len(set((c_23, eq1, eq2, eq3))), 1)
+ self.assertEqual(len(set((c_23, ne1, ne2, ne3))), 4)
+ self.assertDictEqual({c_23: None}, {eq1: None})
+ self.assertTrue(eq2 in {eq3: None})
+
+
+class TestPoint(unittest.TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ cls.c_23 = CurveFp(23, 1, 1)
+ cls.g_23 = Point(cls.c_23, 13, 7, 7)
+
+ p = 6277101735386680763835789423207666416083908700390324961279
+ r = 6277101735386680763835789423176059013767194773182842284081
+ # s = 0x3045ae6fc8422f64ed579528d38120eae12196d5
+ # c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65
+ b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1
+ Gx = 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012
+ Gy = 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811
+
+ cls.c192 = CurveFp(p, -3, b)
+ cls.p192 = Point(cls.c192, Gx, Gy, r)
+
+ def test_p192(self):
+ # Checking against some sample computations presented
+ # in X9.62:
+ d = 651056770906015076056810763456358567190100156695615665659
+ Q = d * self.p192
+ self.assertEqual(Q.x(), 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5)
+
+ k = 6140507067065001063065065565667405560006161556565665656654
+ R = k * self.p192
+ self.assertEqual(R.x(), 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD)
+ self.assertEqual(R.y(), 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835)
+
+ u1 = 2563697409189434185194736134579731015366492496392189760599
+ u2 = 6266643813348617967186477710235785849136406323338782220568
+ temp = u1 * self.p192 + u2 * Q
+ self.assertEqual(temp.x(), 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD)
+ self.assertEqual(temp.y(), 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835)
+
+ def test_double_infinity(self):
+ p1 = INFINITY
+ p3 = p1.double()
+ self.assertEqual(p1, p3)
+ self.assertEqual(p3.x(), p1.x())
+ self.assertEqual(p3.y(), p3.y())
+
+ def test_double(self):
+ x1, y1, x3, y3 = (3, 10, 7, 12)
+
+ p1 = Point(self.c_23, x1, y1)
+ p3 = p1.double()
+ self.assertEqual(p3.x(), x3)
+ self.assertEqual(p3.y(), y3)
+
+ def test_multiply(self):
+ x1, y1, m, x3, y3 = (3, 10, 2, 7, 12)
+ p1 = Point(self.c_23, x1, y1)
+ p3 = p1 * m
+ self.assertEqual(p3.x(), x3)
+ self.assertEqual(p3.y(), y3)
+
+ # Trivial tests from X9.62 B.3:
+ def test_add(self):
+ """We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3)."""
+
+ x1, y1, x2, y2, x3, y3 = (3, 10, 9, 7, 17, 20)
+ p1 = Point(self.c_23, x1, y1)
+ p2 = Point(self.c_23, x2, y2)
+ p3 = p1 + p2
+ self.assertEqual(p3.x(), x3)
+ self.assertEqual(p3.y(), y3)
+
+ def test_add_as_double(self):
+ """We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3)."""
+
+ x1, y1, x2, y2, x3, y3 = (3, 10, 3, 10, 7, 12)
+ p1 = Point(self.c_23, x1, y1)
+ p2 = Point(self.c_23, x2, y2)
+ p3 = p1 + p2
+ self.assertEqual(p3.x(), x3)
+ self.assertEqual(p3.y(), y3)
+
+ def test_equality_points(self):
+ self.assertEqual(self.g_23, Point(self.c_23, 13, 7, 7))
+
+ def test_inequality_points(self):
+ c = CurveFp(100, -3, 100)
+ p = Point(c, 100, 100, 100)
+ self.assertNotEqual(self.g_23, p)
+
+ def test_inaquality_points_diff_types(self):
+ c = CurveFp(100, -3, 100)
+ self.assertNotEqual(self.g_23, c)
diff --git a/third_party/python/ecdsa/src/ecdsa/test_jacobi.py b/third_party/python/ecdsa/src/ecdsa/test_jacobi.py
new file mode 100644
index 0000000000..35e524212a
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/test_jacobi.py
@@ -0,0 +1,365 @@
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+import hypothesis.strategies as st
+from hypothesis import given, assume, settings, example
+
+from .ellipticcurve import Point, PointJacobi, INFINITY
+from .ecdsa import generator_256, curve_256, generator_224
+from .numbertheory import inverse_mod
+
+class TestJacobi(unittest.TestCase):
+ def test___init__(self):
+ curve = object()
+ x = 2
+ y = 3
+ z = 1
+ order = 4
+ pj = PointJacobi(curve, x, y, z, order)
+
+ self.assertEqual(pj.order(), order)
+ self.assertIs(pj.curve(), curve)
+ self.assertEqual(pj.x(), x)
+ self.assertEqual(pj.y(), y)
+
+ def test_add_with_different_curves(self):
+ p_a = PointJacobi.from_affine(generator_256)
+ p_b = PointJacobi.from_affine(generator_224)
+
+ with self.assertRaises(ValueError):
+ p_a + p_b
+
+ def test_compare_different_curves(self):
+ self.assertNotEqual(generator_256, generator_224)
+
+ def test_equality_with_non_point(self):
+ pj = PointJacobi.from_affine(generator_256)
+
+ self.assertNotEqual(pj, "value")
+
+ def test_conversion(self):
+ pj = PointJacobi.from_affine(generator_256)
+ pw = pj.to_affine()
+
+ self.assertEqual(generator_256, pw)
+
+ def test_single_double(self):
+ pj = PointJacobi.from_affine(generator_256)
+ pw = generator_256.double()
+
+ pj = pj.double()
+
+ self.assertEqual(pj.x(), pw.x())
+ self.assertEqual(pj.y(), pw.y())
+
+ def test_double_with_zero_point(self):
+ pj = PointJacobi(curve_256, 0, 0, 1)
+
+ pj = pj.double()
+
+ self.assertIs(pj, INFINITY)
+
+ def test_double_with_zero_equivalent_point(self):
+ pj = PointJacobi(curve_256, 0, curve_256.p(), 1)
+
+ pj = pj.double()
+
+ self.assertIs(pj, INFINITY)
+
+ def test_double_with_zero_equivalent_point_non_1_z(self):
+ pj = PointJacobi(curve_256, 0, curve_256.p(), 2)
+
+ pj = pj.double()
+
+ self.assertIs(pj, INFINITY)
+
+ def test_compare_with_affine_point(self):
+ pj = PointJacobi.from_affine(generator_256)
+ pa = pj.to_affine()
+
+ self.assertEqual(pj, pa)
+ self.assertEqual(pa, pj)
+
+ def test_to_affine_with_zero_point(self):
+ pj = PointJacobi(curve_256, 0, 0, 1)
+
+ pa = pj.to_affine()
+
+ self.assertIs(pa, INFINITY)
+
+ def test_add_with_affine_point(self):
+ pj = PointJacobi.from_affine(generator_256)
+ pa = pj.to_affine()
+
+ s = pj + pa
+
+ self.assertEqual(s, pj.double())
+
+ def test_radd_with_affine_point(self):
+ pj = PointJacobi.from_affine(generator_256)
+ pa = pj.to_affine()
+
+ s = pa + pj
+
+ self.assertEqual(s, pj.double())
+
+ def test_add_with_infinity(self):
+ pj = PointJacobi.from_affine(generator_256)
+
+ s = pj + INFINITY
+
+ self.assertEqual(s, pj)
+
+ def test_add_zero_point_to_affine(self):
+ pa = PointJacobi.from_affine(generator_256).to_affine()
+ pj = PointJacobi(curve_256, 0, 0, 1)
+
+ s = pj + pa
+
+ self.assertIs(s, pa)
+
+ def test_multiply_by_zero(self):
+ pj = PointJacobi.from_affine(generator_256)
+
+ pj = pj * 0
+
+ self.assertIs(pj, INFINITY)
+
+ def test_zero_point_multiply_by_one(self):
+ pj = PointJacobi(curve_256, 0, 0, 1)
+
+ pj = pj * 1
+
+ self.assertIs(pj, INFINITY)
+
+ def test_multiply_by_one(self):
+ pj = PointJacobi.from_affine(generator_256)
+ pw = generator_256 * 1
+
+ pj = pj * 1
+
+ self.assertEqual(pj.x(), pw.x())
+ self.assertEqual(pj.y(), pw.y())
+
+ def test_multiply_by_two(self):
+ pj = PointJacobi.from_affine(generator_256)
+ pw = generator_256 * 2
+
+ pj = pj * 2
+
+ self.assertEqual(pj.x(), pw.x())
+ self.assertEqual(pj.y(), pw.y())
+
+ def test_rmul_by_two(self):
+ pj = PointJacobi.from_affine(generator_256)
+ pw = generator_256 * 2
+
+ pj = 2 * pj
+
+ self.assertEqual(pj, pw)
+
+ def test_compare_non_zero_with_infinity(self):
+ pj = PointJacobi.from_affine(generator_256)
+
+ self.assertNotEqual(pj, INFINITY)
+
+ def test_compare_zero_point_with_infinity(self):
+ pj = PointJacobi(curve_256, 0, 0, 1)
+
+ self.assertEqual(pj, INFINITY)
+
+ def test_compare_double_with_multiply(self):
+ pj = PointJacobi.from_affine(generator_256)
+ dbl = pj.double()
+ mlpl = pj * 2
+
+ self.assertEqual(dbl, mlpl)
+
+ @settings(max_examples=10)
+ @given(st.integers(min_value=0, max_value=int(generator_256.order())))
+ def test_multiplications(self, mul):
+ pj = PointJacobi.from_affine(generator_256)
+ pw = pj.to_affine() * mul
+
+ pj = pj * mul
+
+ self.assertEqual((pj.x(), pj.y()), (pw.x(), pw.y()))
+ self.assertEqual(pj, pw)
+
+ @settings(max_examples=10)
+ @given(st.integers(min_value=0, max_value=int(generator_256.order())))
+ @example(0)
+ @example(int(generator_256.order()))
+ def test_precompute(self, mul):
+ precomp = PointJacobi.from_affine(generator_256, True)
+ pj = PointJacobi.from_affine(generator_256)
+
+ a = precomp * mul
+ b = pj * mul
+
+ self.assertEqual(a, b)
+
+ @settings(max_examples=10)
+ @given(st.integers(min_value=1, max_value=int(generator_256.order())),
+ st.integers(min_value=1, max_value=int(generator_256.order())))
+ @example(3, 3)
+ def test_add_scaled_points(self, a_mul, b_mul):
+ j_g = PointJacobi.from_affine(generator_256)
+ a = PointJacobi.from_affine(j_g * a_mul)
+ b = PointJacobi.from_affine(j_g * b_mul)
+
+ c = a + b
+
+ self.assertEqual(c, j_g * (a_mul + b_mul))
+
+ @settings(max_examples=10)
+ @given(st.integers(min_value=1, max_value=int(generator_256.order())),
+ st.integers(min_value=1, max_value=int(generator_256.order())),
+ st.integers(min_value=1, max_value=int(curve_256.p()-1)))
+ def test_add_one_scaled_point(self, a_mul, b_mul, new_z):
+ j_g = PointJacobi.from_affine(generator_256)
+ a = PointJacobi.from_affine(j_g * a_mul)
+ b = PointJacobi.from_affine(j_g * b_mul)
+
+ p = curve_256.p()
+
+ assume(inverse_mod(new_z, p))
+
+ new_zz = new_z * new_z % p
+
+ b = PointJacobi(
+ curve_256, b.x() * new_zz % p, b.y() * new_zz * new_z % p, new_z)
+
+ c = a + b
+
+ self.assertEqual(c, j_g * (a_mul + b_mul))
+
+ @settings(max_examples=10)
+ @given(st.integers(min_value=1, max_value=int(generator_256.order())),
+ st.integers(min_value=1, max_value=int(generator_256.order())),
+ st.integers(min_value=1, max_value=int(curve_256.p()-1)))
+ @example(1, 1, 1)
+ @example(3, 3, 3)
+ @example(2, int(generator_256.order()-2), 1)
+ @example(2, int(generator_256.order()-2), 3)
+ def test_add_same_scale_points(self, a_mul, b_mul, new_z):
+ j_g = PointJacobi.from_affine(generator_256)
+ a = PointJacobi.from_affine(j_g * a_mul)
+ b = PointJacobi.from_affine(j_g * b_mul)
+
+ p = curve_256.p()
+
+ assume(inverse_mod(new_z, p))
+
+ new_zz = new_z * new_z % p
+
+ a = PointJacobi(
+ curve_256, a.x() * new_zz % p, a.y() * new_zz * new_z % p, new_z)
+ b = PointJacobi(
+ curve_256, b.x() * new_zz % p, b.y() * new_zz * new_z % p, new_z)
+
+ c = a + b
+
+ self.assertEqual(c, j_g * (a_mul + b_mul))
+
+ @settings(max_examples=14)
+ @given(st.integers(min_value=1, max_value=int(generator_256.order())),
+ st.integers(min_value=1, max_value=int(generator_256.order())),
+ st.lists(st.integers(min_value=1, max_value=int(curve_256.p()-1)),
+ min_size=2, max_size=2, unique=True))
+ @example(2, 2, [2, 1])
+ @example(2, 2, [2, 3])
+ @example(2, int(generator_256.order()-2), [2, 3])
+ @example(2, int(generator_256.order()-2), [2, 1])
+ def test_add_different_scale_points(self, a_mul, b_mul, new_z):
+ j_g = PointJacobi.from_affine(generator_256)
+ a = PointJacobi.from_affine(j_g * a_mul)
+ b = PointJacobi.from_affine(j_g * b_mul)
+
+ p = curve_256.p()
+
+ assume(inverse_mod(new_z[0], p))
+ assume(inverse_mod(new_z[1], p))
+
+ new_zz0 = new_z[0] * new_z[0] % p
+ new_zz1 = new_z[1] * new_z[1] % p
+
+ a = PointJacobi(
+ curve_256,
+ a.x() * new_zz0 % p,
+ a.y() * new_zz0 * new_z[0] % p,
+ new_z[0])
+ b = PointJacobi(
+ curve_256,
+ b.x() * new_zz1 % p,
+ b.y() * new_zz1 * new_z[1] % p,
+ new_z[1])
+
+ c = a + b
+
+ self.assertEqual(c, j_g * (a_mul + b_mul))
+
+ def test_add_point_3_times(self):
+ j_g = PointJacobi.from_affine(generator_256)
+
+ self.assertEqual(j_g * 3, j_g + j_g + j_g)
+
+ def test_mul_add_inf(self):
+ j_g = PointJacobi.from_affine(generator_256)
+
+ self.assertEqual(j_g, j_g.mul_add(1, INFINITY, 1))
+
+ def test_mul_add_same(self):
+ j_g = PointJacobi.from_affine(generator_256)
+
+ self.assertEqual(j_g * 2, j_g.mul_add(1, j_g, 1))
+
+ def test_mul_add_precompute(self):
+ j_g = PointJacobi.from_affine(generator_256, True)
+ b = PointJacobi.from_affine(j_g * 255, True)
+
+ self.assertEqual(j_g * 256, j_g + b)
+ self.assertEqual(j_g * (5 + 255 * 7), j_g * 5 + b * 7)
+ self.assertEqual(j_g * (5 + 255 * 7), j_g.mul_add(5, b, 7))
+
+ def test_mul_add_precompute_large(self):
+ j_g = PointJacobi.from_affine(generator_256, True)
+ b = PointJacobi.from_affine(j_g * 255, True)
+
+ self.assertEqual(j_g * 256, j_g + b)
+ self.assertEqual(j_g * (0xff00 + 255 * 0xf0f0),
+ j_g * 0xff00 + b * 0xf0f0)
+ self.assertEqual(j_g * (0xff00 + 255 * 0xf0f0),
+ j_g.mul_add(0xff00, b, 0xf0f0))
+
+ def test_mul_add_to_mul(self):
+ j_g = PointJacobi.from_affine(generator_256)
+
+ a = j_g * 3
+ b = j_g.mul_add(2, j_g, 1)
+
+ self.assertEqual(a, b)
+
+ def test_mul_add(self):
+ j_g = PointJacobi.from_affine(generator_256)
+
+ w_a = generator_256 * 255
+ w_b = generator_256 * (0xa8*0xf0)
+ j_b = j_g * 0xa8
+
+ ret = j_g.mul_add(255, j_b, 0xf0)
+
+ self.assertEqual(ret.to_affine(), w_a + w_b)
+
+ def test_mul_add_large(self):
+ j_g = PointJacobi.from_affine(generator_256)
+ b = PointJacobi.from_affine(j_g * 255)
+
+ self.assertEqual(j_g * 256, j_g + b)
+ self.assertEqual(j_g * (0xff00 + 255 * 0xf0f0),
+ j_g * 0xff00 + b * 0xf0f0)
+ self.assertEqual(j_g * (0xff00 + 255 * 0xf0f0),
+ j_g.mul_add(0xff00, b, 0xf0f0))
diff --git a/third_party/python/ecdsa/src/ecdsa/test_keys.py b/third_party/python/ecdsa/src/ecdsa/test_keys.py
new file mode 100644
index 0000000000..56e128421e
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/test_keys.py
@@ -0,0 +1,373 @@
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+try:
+ buffer
+except NameError:
+ buffer = memoryview
+
+import array
+import six
+import sys
+import pytest
+import hashlib
+
+from .keys import VerifyingKey, SigningKey
+from .der import unpem
+from .util import sigencode_string, sigencode_der, sigencode_strings, \
+ sigdecode_string, sigdecode_der, sigdecode_strings
+
+
+class TestVerifyingKeyFromString(unittest.TestCase):
+ """
+ Verify that ecdsa.keys.VerifyingKey.from_string() can be used with
+ bytes-like objects
+ """
+
+ @classmethod
+ def setUpClass(cls):
+ cls.key_bytes = (b'\x04L\xa2\x95\xdb\xc7Z\xd7\x1f\x93\nz\xcf\x97\xcf'
+ b'\xd7\xc2\xd9o\xfe8}X!\xae\xd4\xfah\xfa^\rpI\xba\xd1'
+ b'Y\xfb\x92xa\xebo+\x9cG\xfav\xca')
+ cls.vk = VerifyingKey.from_string(cls.key_bytes)
+
+ def test_bytes(self):
+ self.assertIsNotNone(self.vk)
+ self.assertIsInstance(self.vk, VerifyingKey)
+ self.assertEqual(
+ self.vk.pubkey.point.x(),
+ 105419898848891948935835657980914000059957975659675736097)
+ self.assertEqual(
+ self.vk.pubkey.point.y(),
+ 4286866841217412202667522375431381222214611213481632495306)
+
+ def test_bytes_memoryview(self):
+ vk = VerifyingKey.from_string(buffer(self.key_bytes))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytearray(self):
+ vk = VerifyingKey.from_string(bytearray(self.key_bytes))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytesarray_memoryview(self):
+ vk = VerifyingKey.from_string(buffer(bytearray(self.key_bytes)))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_array_array_of_bytes(self):
+ arr = array.array('B', self.key_bytes)
+ vk = VerifyingKey.from_string(arr)
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_array_array_of_bytes_memoryview(self):
+ arr = array.array('B', self.key_bytes)
+ vk = VerifyingKey.from_string(buffer(arr))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_array_array_of_ints(self):
+ arr = array.array('I', self.key_bytes)
+ vk = VerifyingKey.from_string(arr)
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_array_array_of_ints_memoryview(self):
+ arr = array.array('I', self.key_bytes)
+ vk = VerifyingKey.from_string(buffer(arr))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytes_uncompressed(self):
+ vk = VerifyingKey.from_string(b'\x04' + self.key_bytes)
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytearray_uncompressed(self):
+ vk = VerifyingKey.from_string(bytearray(b'\x04' + self.key_bytes))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytes_compressed(self):
+ vk = VerifyingKey.from_string(b'\x02' + self.key_bytes[:24])
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytearray_compressed(self):
+ vk = VerifyingKey.from_string(bytearray(b'\x02' + self.key_bytes[:24]))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+
+class TestVerifyingKeyFromDer(unittest.TestCase):
+ """
+ Verify that ecdsa.keys.VerifyingKey.from_der() can be used with
+ bytes-like objects.
+ """
+ @classmethod
+ def setUpClass(cls):
+ prv_key_str = (
+ "-----BEGIN EC PRIVATE KEY-----\n"
+ "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n"
+ "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n"
+ "bA==\n"
+ "-----END EC PRIVATE KEY-----\n")
+ key_str = (
+ "-----BEGIN PUBLIC KEY-----\n"
+ "MEkwEwYHKoZIzj0CAQYIKoZIzj0DAQEDMgAEuIF30ITvF/XkVjlAgCg2D59ZtKTX\n"
+ "Jk5i2gZR3OR6NaTFtFz1FZNCOotVe5wgmfNs\n"
+ "-----END PUBLIC KEY-----\n")
+ cls.key_pem = key_str
+
+ cls.key_bytes = unpem(key_str)
+ assert isinstance(cls.key_bytes, bytes)
+ cls.vk = VerifyingKey.from_pem(key_str)
+ cls.sk = SigningKey.from_pem(prv_key_str)
+
+ key_str = (
+ "-----BEGIN PUBLIC KEY-----\n"
+ "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4H3iRbG4TSrsSRb/gusPQB/4YcN8\n"
+ "Poqzgjau4kfxBPyZimeRfuY/9g/wMmPuhGl4BUve51DsnKJFRr8psk0ieA==\n"
+ "-----END PUBLIC KEY-----\n"
+ )
+ cls.vk2 = VerifyingKey.from_pem(key_str)
+
+ def test_custom_hashfunc(self):
+ vk = VerifyingKey.from_der(self.key_bytes, hashlib.sha256)
+
+ self.assertIs(vk.default_hashfunc, hashlib.sha256)
+
+ def test_from_pem_with_custom_hashfunc(self):
+ vk = VerifyingKey.from_pem(self.key_pem, hashlib.sha256)
+
+ self.assertIs(vk.default_hashfunc, hashlib.sha256)
+
+ def test_bytes(self):
+ vk = VerifyingKey.from_der(self.key_bytes)
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytes_memoryview(self):
+ vk = VerifyingKey.from_der(buffer(self.key_bytes))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytearray(self):
+ vk = VerifyingKey.from_der(bytearray(self.key_bytes))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_bytesarray_memoryview(self):
+ vk = VerifyingKey.from_der(buffer(bytearray(self.key_bytes)))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_array_array_of_bytes(self):
+ arr = array.array('B', self.key_bytes)
+ vk = VerifyingKey.from_der(arr)
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_array_array_of_bytes_memoryview(self):
+ arr = array.array('B', self.key_bytes)
+ vk = VerifyingKey.from_der(buffer(arr))
+
+ self.assertEqual(self.vk.to_string(), vk.to_string())
+
+ def test_equality_on_verifying_keys(self):
+ self.assertEqual(self.vk, self.sk.get_verifying_key())
+
+ def test_inequality_on_verifying_keys(self):
+ self.assertNotEqual(self.vk, self.vk2)
+
+ def test_inequality_on_verifying_keys_not_implemented(self):
+ self.assertNotEqual(self.vk, None)
+
+
+class TestSigningKey(unittest.TestCase):
+ """
+ Verify that ecdsa.keys.SigningKey.from_der() can be used with
+ bytes-like objects.
+ """
+ @classmethod
+ def setUpClass(cls):
+ prv_key_str = (
+ "-----BEGIN EC PRIVATE KEY-----\n"
+ "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n"
+ "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n"
+ "bA==\n"
+ "-----END EC PRIVATE KEY-----\n")
+ cls.sk1 = SigningKey.from_pem(prv_key_str)
+
+ prv_key_str = (
+ "-----BEGIN EC PRIVATE KEY-----\n"
+ "MHcCAQEEIKlL2EAm5NPPZuXwxRf4nXMk0A80y6UUbiQ17be/qFhRoAoGCCqGSM49\n"
+ "AwEHoUQDQgAE4H3iRbG4TSrsSRb/gusPQB/4YcN8Poqzgjau4kfxBPyZimeRfuY/\n"
+ "9g/wMmPuhGl4BUve51DsnKJFRr8psk0ieA==\n"
+ "-----END EC PRIVATE KEY-----\n")
+ cls.sk2 = SigningKey.from_pem(prv_key_str)
+
+ def test_equality_on_signing_keys(self):
+ sk = SigningKey.from_secret_exponent(self.sk1.privkey.secret_multiplier, self.sk1.curve)
+ self.assertEqual(self.sk1, sk)
+
+ def test_inequality_on_signing_keys(self):
+ self.assertNotEqual(self.sk1, self.sk2)
+
+ def test_inequality_on_signing_keys_not_implemented(self):
+ self.assertNotEqual(self.sk1, None)
+
+# test VerifyingKey.verify()
+prv_key_str = (
+ "-----BEGIN EC PRIVATE KEY-----\n"
+ "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n"
+ "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n"
+ "bA==\n"
+ "-----END EC PRIVATE KEY-----\n")
+key_bytes = unpem(prv_key_str)
+assert isinstance(key_bytes, bytes)
+sk = SigningKey.from_der(key_bytes)
+vk = sk.verifying_key
+
+data = (b"some string for signing"
+ b"contents don't really matter"
+ b"but do include also some crazy values: "
+ b"\x00\x01\t\r\n\x00\x00\x00\xff\xf0")
+assert len(data) % 4 == 0
+sha1 = hashlib.sha1()
+sha1.update(data)
+data_hash = sha1.digest()
+assert isinstance(data_hash, bytes)
+sig_raw = sk.sign(data, sigencode=sigencode_string)
+assert isinstance(sig_raw, bytes)
+sig_der = sk.sign(data, sigencode=sigencode_der)
+assert isinstance(sig_der, bytes)
+sig_strings = sk.sign(data, sigencode=sigencode_strings)
+assert isinstance(sig_strings[0], bytes)
+
+verifiers = []
+for modifier, fun in [
+ ("bytes", lambda x: x),
+ ("bytes memoryview", lambda x: buffer(x)),
+ ("bytearray", lambda x: bytearray(x)),
+ ("bytearray memoryview", lambda x: buffer(bytearray(x))),
+ ("array.array of bytes", lambda x: array.array('B', x)),
+ ("array.array of bytes memoryview", lambda x: buffer(array.array('B', x))),
+ ("array.array of ints", lambda x: array.array('I', x)),
+ ("array.array of ints memoryview", lambda x: buffer(array.array('I', x)))
+ ]:
+ if "ints" in modifier:
+ conv = lambda x: x
+ else:
+ conv = fun
+ for sig_format, signature, decoder, mod_apply in [
+ ("raw", sig_raw, sigdecode_string, lambda x: conv(x)),
+ ("der", sig_der, sigdecode_der, lambda x: conv(x)),
+ ("strings", sig_strings, sigdecode_strings, lambda x:
+ tuple(conv(i) for i in x))
+ ]:
+ for method_name, vrf_mthd, vrf_data in [
+ ("verify", vk.verify, data),
+ ("verify_digest", vk.verify_digest, data_hash)
+ ]:
+ verifiers.append(pytest.param(
+ signature, decoder, mod_apply, fun, vrf_mthd, vrf_data,
+ id="{2}-{0}-{1}".format(modifier, sig_format, method_name)))
+
+@pytest.mark.parametrize(
+ "signature,decoder,mod_apply,fun,vrf_mthd,vrf_data",
+ verifiers)
+def test_VerifyingKey_verify(
+ signature, decoder, mod_apply, fun, vrf_mthd, vrf_data):
+ sig = mod_apply(signature)
+
+ assert vrf_mthd(sig, fun(vrf_data), sigdecode=decoder)
+
+
+# test SigningKey.from_string()
+prv_key_bytes = (b'^\xc8B\x0b\xd6\xef\x92R\xa9B\xe9\x89\x04<\xa2'
+ b'\x9fV\x1f\xa5%w\x0e\xb1\xc5')
+assert len(prv_key_bytes) == 24
+converters = []
+for modifier, convert in [
+ ("bytes", lambda x: x),
+ ("bytes memoryview", buffer),
+ ("bytearray", bytearray),
+ ("bytearray memoryview", lambda x: buffer(bytearray(x))),
+ ("array.array of bytes", lambda x: array.array('B', x)),
+ ("array.array of bytes memoryview",
+ lambda x: buffer(array.array('B', x))),
+ ("array.array of ints", lambda x: array.array('I', x)),
+ ("array.array of ints memoryview",
+ lambda x: buffer(array.array('I', x)))
+ ]:
+ converters.append(pytest.param(
+ convert,
+ id=modifier))
+
+@pytest.mark.parametrize("convert", converters)
+def test_SigningKey_from_string(convert):
+ key = convert(prv_key_bytes)
+ sk = SigningKey.from_string(key)
+
+ assert sk.to_string() == prv_key_bytes
+
+
+# test SigningKey.from_der()
+prv_key_str = (
+ "-----BEGIN EC PRIVATE KEY-----\n"
+ "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n"
+ "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n"
+ "bA==\n"
+ "-----END EC PRIVATE KEY-----\n")
+key_bytes = unpem(prv_key_str)
+assert isinstance(key_bytes, bytes)
+
+# last two converters are for array.array of ints, those require input
+# that's multiple of 4, which no curve we support produces
+@pytest.mark.parametrize("convert", converters[:-2])
+def test_SigningKey_from_der(convert):
+ key = convert(key_bytes)
+ sk = SigningKey.from_der(key)
+
+ assert sk.to_string() == prv_key_bytes
+
+
+# test SigningKey.sign_deterministic()
+extra_entropy=b'\x0a\x0b\x0c\x0d\x0e\x0f\x10\x11'
+
+@pytest.mark.parametrize("convert", converters)
+def test_SigningKey_sign_deterministic(convert):
+ sig = sk.sign_deterministic(
+ convert(data),
+ extra_entropy=convert(extra_entropy))
+
+ vk.verify(sig, data)
+
+
+# test SigningKey.sign_digest_deterministic()
+@pytest.mark.parametrize("convert", converters)
+def test_SigningKey_sign_digest_deterministic(convert):
+ sig = sk.sign_digest_deterministic(
+ convert(data_hash),
+ extra_entropy=convert(extra_entropy))
+
+ vk.verify(sig, data)
+
+
+@pytest.mark.parametrize("convert", converters)
+def test_SigningKey_sign(convert):
+ sig = sk.sign(convert(data))
+
+ vk.verify(sig, data)
+
+
+@pytest.mark.parametrize("convert", converters)
+def test_SigningKey_sign_digest(convert):
+ sig = sk.sign_digest(convert(data_hash))
+
+ vk.verify(sig, data)
diff --git a/third_party/python/ecdsa/src/ecdsa/test_malformed_sigs.py b/third_party/python/ecdsa/src/ecdsa/test_malformed_sigs.py
new file mode 100644
index 0000000000..c1dca44a0e
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/test_malformed_sigs.py
@@ -0,0 +1,306 @@
+from __future__ import with_statement, division
+
+import hashlib
+try:
+ from hashlib import algorithms_available
+except ImportError: # pragma: no cover
+ algorithms_available = [
+ "md5", "sha1", "sha224", "sha256", "sha384", "sha512"]
+from functools import partial
+import pytest
+import sys
+from six import binary_type
+import hypothesis.strategies as st
+from hypothesis import note, assume, given, settings, example
+
+from .keys import SigningKey
+from .keys import BadSignatureError
+from .util import sigencode_der, sigencode_string
+from .util import sigdecode_der, sigdecode_string
+from .curves import curves, NIST256p
+from .der import encode_integer, encode_bitstring, encode_octet_string, \
+ encode_oid, encode_sequence, encode_constructed
+
+
+example_data = b"some data to sign"
+"""Since the data is hashed for processing, really any string will do."""
+
+
+hash_and_size = [(name, hashlib.new(name).digest_size)
+ for name in algorithms_available]
+"""Pairs of hash names and their output sizes.
+Needed for pairing with curves as we don't support hashes
+bigger than order sizes of curves."""
+
+
+keys_and_sigs = []
+"""Name of the curve+hash combination, VerifyingKey and DER signature."""
+
+
+# for hypothesis strategy shrinking we want smallest curves and hashes first
+for curve in sorted(curves, key=lambda x: x.baselen):
+ for hash_alg in [name for name, size in
+ sorted(hash_and_size, key=lambda x: x[1])
+ if 0 < size <= curve.baselen]:
+ sk = SigningKey.generate(
+ curve,
+ hashfunc=partial(hashlib.new, hash_alg))
+
+ keys_and_sigs.append(
+ ("{0} {1}".format(curve, hash_alg),
+ sk.verifying_key,
+ sk.sign(example_data, sigencode=sigencode_der)))
+
+
+# first make sure that the signatures can be verified
+@pytest.mark.parametrize(
+ "verifying_key,signature",
+ [pytest.param(vk, sig, id=name) for name, vk, sig in keys_and_sigs])
+def test_signatures(verifying_key, signature):
+ assert verifying_key.verify(signature, example_data,
+ sigdecode=sigdecode_der)
+
+
+@st.composite
+def st_fuzzed_sig(draw, keys_and_sigs):
+ """
+ Hypothesis strategy that generates pairs of VerifyingKey and malformed
+ signatures created by fuzzing of a valid signature.
+ """
+ name, verifying_key, old_sig = draw(st.sampled_from(keys_and_sigs))
+ note("Configuration: {0}".format(name))
+
+ sig = bytearray(old_sig)
+
+ # decide which bytes should be removed
+ to_remove = draw(st.lists(
+ st.integers(min_value=0, max_value=len(sig)-1),
+ unique=True))
+ to_remove.sort()
+ for i in reversed(to_remove):
+ del sig[i]
+ note("Remove bytes: {0}".format(to_remove))
+
+ # decide which bytes of the original signature should be changed
+ if sig: # pragma: no branch
+ xors = draw(st.dictionaries(
+ st.integers(min_value=0, max_value=len(sig)-1),
+ st.integers(min_value=1, max_value=255)))
+ for i, val in xors.items():
+ sig[i] ^= val
+ note("xors: {0}".format(xors))
+
+ # decide where new data should be inserted
+ insert_pos = draw(st.integers(min_value=0, max_value=len(sig)))
+ # NIST521p signature is about 140 bytes long, test slightly longer
+ insert_data = draw(st.binary(max_size=256))
+
+ sig = sig[:insert_pos] + insert_data + sig[insert_pos:]
+ note("Inserted at position {0} bytes: {1!r}"
+ .format(insert_pos, insert_data))
+
+ sig = bytes(sig)
+ # make sure that there was performed at least one mutation on the data
+ assume(to_remove or xors or insert_data)
+ # and that the mutations didn't cancel each-other out
+ assume(sig != old_sig)
+
+ return verifying_key, sig
+
+
+params = {}
+# not supported in hypothesis 2.0.0
+if sys.version_info >= (2, 7): # pragma: no branch
+ from hypothesis import HealthCheck
+ # deadline=5s because NIST521p are slow to verify
+ params["deadline"] = 5000
+ params["suppress_health_check"] = [HealthCheck.data_too_large,
+ HealthCheck.filter_too_much,
+ HealthCheck.too_slow]
+
+slow_params = dict(params)
+slow_params["max_examples"] = 10
+
+
+@settings(**params)
+@given(st_fuzzed_sig(keys_and_sigs))
+def test_fuzzed_der_signatures(args):
+ verifying_key, sig = args
+
+ with pytest.raises(BadSignatureError):
+ verifying_key.verify(sig, example_data, sigdecode=sigdecode_der)
+
+
+@st.composite
+def st_random_der_ecdsa_sig_value(draw):
+ """
+ Hypothesis strategy for selecting random values and encoding them
+ to ECDSA-Sig-Value object::
+
+ ECDSA-Sig-Value ::= SEQUENCE {
+ r INTEGER,
+ s INTEGER
+ }
+ """
+ name, verifying_key, _ = draw(st.sampled_from(keys_and_sigs))
+ note("Configuration: {0}".format(name))
+ order = int(verifying_key.curve.order)
+
+ # the encode_integer doesn't suport negative numbers, would be nice
+ # to generate them too, but we have coverage for remove_integer()
+ # verifying that it doesn't accept them, so meh.
+ # Test all numbers around the ones that can show up (around order)
+ # way smaller and slightly bigger
+ r = draw(st.integers(min_value=0, max_value=order << 4) |
+ st.integers(min_value=order >> 2, max_value=order+1))
+ s = draw(st.integers(min_value=0, max_value=order << 4) |
+ st.integers(min_value=order >> 2, max_value=order+1))
+
+ sig = encode_sequence(encode_integer(r), encode_integer(s))
+
+ return verifying_key, sig
+
+
+@settings(**slow_params)
+@given(st_random_der_ecdsa_sig_value())
+def test_random_der_ecdsa_sig_value(params):
+ """
+ Check if random values encoded in ECDSA-Sig-Value structure are rejected
+ as signature.
+ """
+ verifying_key, sig = params
+
+ with pytest.raises(BadSignatureError):
+ verifying_key.verify(sig, example_data, sigdecode=sigdecode_der)
+
+
+def st_der_integer(*args, **kwargs):
+ """
+ Hypothesis strategy that returns a random positive integer as DER
+ INTEGER.
+ Parameters are passed to hypothesis.strategy.integer.
+ """
+ if "min_value" not in kwargs: # pragma: no branch
+ kwargs["min_value"] = 0
+ return st.builds(encode_integer, st.integers(*args, **kwargs))
+
+
+@st.composite
+def st_der_bit_string(draw, *args, **kwargs):
+ """
+ Hypothesis strategy that returns a random DER BIT STRING.
+ Parameters are passed to hypothesis.strategy.binary.
+ """
+ data = draw(st.binary(*args, **kwargs))
+ if data:
+ unused = draw(st.integers(min_value=0, max_value=7))
+ data = bytearray(data)
+ data[-1] &= - (2**unused)
+ data = bytes(data)
+ else:
+ unused = 0
+ return encode_bitstring(data, unused)
+
+
+def st_der_octet_string(*args, **kwargs):
+ """
+ Hypothesis strategy that returns a random DER OCTET STRING object.
+ Parameters are passed to hypothesis.strategy.binary
+ """
+ return st.builds(encode_octet_string, st.binary(*args, **kwargs))
+
+
+def st_der_null():
+ """
+ Hypothesis strategy that returns DER NULL object.
+ """
+ return st.just(b'\x05\x00')
+
+
+@st.composite
+def st_der_oid(draw):
+ """
+ Hypothesis strategy that returns DER OBJECT IDENTIFIER objects.
+ """
+ first = draw(st.integers(min_value=0, max_value=2))
+ if first < 2:
+ second = draw(st.integers(min_value=0, max_value=39))
+ else:
+ second = draw(st.integers(min_value=0, max_value=2**512))
+ rest = draw(st.lists(st.integers(min_value=0, max_value=2**512),
+ max_size=50))
+ return encode_oid(first, second, *rest)
+
+
+def st_der():
+ """
+ Hypothesis strategy that returns random DER structures.
+
+ A valid DER structure is any primitive object, an octet encoding
+ of a valid DER structure, sequence of valid DER objects or a constructed
+ encoding of any of the above.
+ """
+ return st.recursive(
+ st.just(b'') | st_der_integer(max_value=2**4096) |
+ st_der_bit_string(max_size=1024**2) |
+ st_der_octet_string(max_size=1024**2) | st_der_null() | st_der_oid(),
+ lambda children:
+ st.builds(lambda x: encode_octet_string(x), st.one_of(children)) |
+ st.builds(lambda x: encode_bitstring(x, 0), st.one_of(children)) |
+ st.builds(lambda x: encode_sequence(*x),
+ st.lists(children, max_size=200)) |
+ st.builds(lambda tag, x:
+ encode_constructed(tag, x),
+ st.integers(min_value=0, max_value=0x3f),
+ st.one_of(children)),
+ max_leaves=40
+ )
+
+
+@settings(**params)
+@given(st.sampled_from(keys_and_sigs), st_der())
+def test_random_der_as_signature(params, der):
+ """Check if random DER structures are rejected as signature"""
+ name, verifying_key, _ = params
+
+ with pytest.raises(BadSignatureError):
+ verifying_key.verify(der, example_data, sigdecode=sigdecode_der)
+
+
+@settings(**params)
+@given(st.sampled_from(keys_and_sigs), st.binary(max_size=1024**2))
+@example(
+ keys_and_sigs[0],
+ encode_sequence(encode_integer(0), encode_integer(0)))
+@example(
+ keys_and_sigs[0],
+ encode_sequence(encode_integer(1), encode_integer(1)) + b'\x00')
+@example(
+ keys_and_sigs[0],
+ encode_sequence(*[encode_integer(1)] * 3))
+def test_random_bytes_as_signature(params, der):
+ """Check if random bytes are rejected as signature"""
+ name, verifying_key, _ = params
+
+ with pytest.raises(BadSignatureError):
+ verifying_key.verify(der, example_data, sigdecode=sigdecode_der)
+
+
+keys_and_string_sigs = [
+ (name, verifying_key,
+ sigencode_string(*sigdecode_der(sig, verifying_key.curve.order),
+ order=verifying_key.curve.order))
+ for name, verifying_key, sig in keys_and_sigs]
+"""
+Name of the curve+hash combination, VerifyingKey and signature as a
+byte string.
+"""
+
+
+@settings(**params)
+@given(st_fuzzed_sig(keys_and_string_sigs))
+def test_fuzzed_string_signatures(params):
+ verifying_key, sig = params
+
+ with pytest.raises(BadSignatureError):
+ verifying_key.verify(sig, example_data, sigdecode=sigdecode_string)
diff --git a/third_party/python/ecdsa/src/ecdsa/test_numbertheory.py b/third_party/python/ecdsa/src/ecdsa/test_numbertheory.py
new file mode 100644
index 0000000000..4cec4fd6a7
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/test_numbertheory.py
@@ -0,0 +1,275 @@
+import operator
+from six import print_
+from functools import reduce
+import operator
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+import hypothesis.strategies as st
+import pytest
+from hypothesis import given, settings, example
+try:
+ from hypothesis import HealthCheck
+ HC_PRESENT=True
+except ImportError: # pragma: no cover
+ HC_PRESENT=False
+from .numbertheory import (SquareRootError, factorization, gcd, lcm,
+ jacobi, inverse_mod,
+ is_prime, next_prime, smallprimes,
+ square_root_mod_prime)
+
+
+BIGPRIMES = (999671,
+ 999683,
+ 999721,
+ 999727,
+ 999749,
+ 999763,
+ 999769,
+ 999773,
+ 999809,
+ 999853,
+ 999863,
+ 999883,
+ 999907,
+ 999917,
+ 999931,
+ 999953,
+ 999959,
+ 999961,
+ 999979,
+ 999983)
+
+
+@pytest.mark.parametrize(
+ "prime, next_p",
+ [(p, q) for p, q in zip(BIGPRIMES[:-1], BIGPRIMES[1:])])
+def test_next_prime(prime, next_p):
+ assert next_prime(prime) == next_p
+
+
+@pytest.mark.parametrize(
+ "val",
+ [-1, 0, 1])
+def test_next_prime_with_nums_less_2(val):
+ assert next_prime(val) == 2
+
+
+@pytest.mark.parametrize("prime", smallprimes)
+def test_square_root_mod_prime_for_small_primes(prime):
+ squares = set()
+ for num in range(0, 1 + prime // 2):
+ sq = num * num % prime
+ squares.add(sq)
+ root = square_root_mod_prime(sq, prime)
+ # tested for real with TestNumbertheory.test_square_root_mod_prime
+ assert root * root % prime == sq
+
+ for nonsquare in range(0, prime):
+ if nonsquare in squares:
+ continue
+ with pytest.raises(SquareRootError):
+ square_root_mod_prime(nonsquare, prime)
+
+
+@st.composite
+def st_two_nums_rel_prime(draw):
+ # 521-bit is the biggest curve we operate on, use 1024 for a bit
+ # of breathing space
+ mod = draw(st.integers(min_value=2, max_value=2**1024))
+ num = draw(st.integers(min_value=1, max_value=mod-1)
+ .filter(lambda x: gcd(x, mod) == 1))
+ return num, mod
+
+
+@st.composite
+def st_primes(draw, *args, **kwargs):
+ if "min_value" not in kwargs: # pragma: no branch
+ kwargs["min_value"] = 1
+ prime = draw(st.sampled_from(smallprimes) |
+ st.integers(*args, **kwargs)
+ .filter(is_prime))
+ return prime
+
+
+@st.composite
+def st_num_square_prime(draw):
+ prime = draw(st_primes(max_value=2**1024))
+ num = draw(st.integers(min_value=0, max_value=1 + prime // 2))
+ sq = num * num % prime
+ return sq, prime
+
+
+@st.composite
+def st_comp_with_com_fac(draw):
+ """
+ Strategy that returns lists of numbers, all having a common factor.
+ """
+ primes = draw(st.lists(st_primes(max_value=2**512), min_size=1,
+ max_size=10))
+ # select random prime(s) that will make the common factor of composites
+ com_fac_primes = draw(st.lists(st.sampled_from(primes),
+ min_size=1, max_size=20))
+ com_fac = reduce(operator.mul, com_fac_primes, 1)
+
+ # select at most 20 lists (returned numbers),
+ # each having at most 30 primes (factors) including none (then the number
+ # will be 1)
+ comp_primes = draw(
+ st.integers(min_value=1, max_value=20).
+ flatmap(lambda n: st.lists(st.lists(st.sampled_from(primes),
+ max_size=30),
+ min_size=1, max_size=n)))
+
+ return [reduce(operator.mul, nums, 1) * com_fac for nums in comp_primes]
+
+
+@st.composite
+def st_comp_no_com_fac(draw):
+ """
+ Strategy that returns lists of numbers that don't have a common factor.
+ """
+ primes = draw(st.lists(st_primes(max_value=2**512),
+ min_size=2, max_size=10, unique=True))
+ # first select the primes that will create the uncommon factor
+ # between returned numbers
+ uncom_fac_primes = draw(st.lists(
+ st.sampled_from(primes),
+ min_size=1, max_size=len(primes)-1, unique=True))
+ uncom_fac = reduce(operator.mul, uncom_fac_primes, 1)
+
+ # then build composites from leftover primes
+ leftover_primes = [i for i in primes if i not in uncom_fac_primes]
+
+ assert leftover_primes
+ assert uncom_fac_primes
+
+ # select at most 20 lists, each having at most 30 primes
+ # selected from the leftover_primes list
+ number_primes = draw(
+ st.integers(min_value=1, max_value=20).
+ flatmap(lambda n: st.lists(st.lists(st.sampled_from(leftover_primes),
+ max_size=30),
+ min_size=1, max_size=n)))
+
+ numbers = [reduce(operator.mul, nums, 1) for nums in number_primes]
+
+ insert_at = draw(st.integers(min_value=0, max_value=len(numbers)))
+ numbers.insert(insert_at, uncom_fac)
+ return numbers
+
+
+HYP_SETTINGS = {}
+if HC_PRESENT: # pragma: no branch
+ HYP_SETTINGS['suppress_health_check']=[HealthCheck.filter_too_much,
+ HealthCheck.too_slow]
+ # the factorization() sometimes takes a long time to finish
+ HYP_SETTINGS['deadline'] = 5000
+
+
+HYP_SLOW_SETTINGS=dict(HYP_SETTINGS)
+HYP_SLOW_SETTINGS["max_examples"] = 10
+
+
+class TestNumbertheory(unittest.TestCase):
+ def test_gcd(self):
+ assert gcd(3 * 5 * 7, 3 * 5 * 11, 3 * 5 * 13) == 3 * 5
+ assert gcd([3 * 5 * 7, 3 * 5 * 11, 3 * 5 * 13]) == 3 * 5
+ assert gcd(3) == 3
+
+ @unittest.skipUnless(HC_PRESENT,
+ "Hypothesis 2.0.0 can't be made tolerant of hard to "
+ "meet requirements (like `is_prime()`), the test "
+ "case times-out on it")
+ @settings(**HYP_SLOW_SETTINGS)
+ @given(st_comp_with_com_fac())
+ def test_gcd_with_com_factor(self, numbers):
+ n = gcd(numbers)
+ assert 1 in numbers or n != 1
+ for i in numbers:
+ assert i % n == 0
+
+ @unittest.skipUnless(HC_PRESENT,
+ "Hypothesis 2.0.0 can't be made tolerant of hard to "
+ "meet requirements (like `is_prime()`), the test "
+ "case times-out on it")
+ @settings(**HYP_SLOW_SETTINGS)
+ @given(st_comp_no_com_fac())
+ def test_gcd_with_uncom_factor(self, numbers):
+ n = gcd(numbers)
+ assert n == 1
+
+ @given(st.lists(st.integers(min_value=1, max_value=2**8192),
+ min_size=1, max_size=20))
+ def test_gcd_with_random_numbers(self, numbers):
+ n = gcd(numbers)
+ for i in numbers:
+ # check that at least it's a divider
+ assert i % n == 0
+
+ def test_lcm(self):
+ assert lcm(3, 5 * 3, 7 * 3) == 3 * 5 * 7
+ assert lcm([3, 5 * 3, 7 * 3]) == 3 * 5 * 7
+ assert lcm(3) == 3
+
+ @given(st.lists(st.integers(min_value=1, max_value=2**8192),
+ min_size=1, max_size=20))
+ def test_lcm_with_random_numbers(self, numbers):
+ n = lcm(numbers)
+ for i in numbers:
+ assert n % i == 0
+
+ @unittest.skipUnless(HC_PRESENT,
+ "Hypothesis 2.0.0 can't be made tolerant of hard to "
+ "meet requirements (like `is_prime()`), the test "
+ "case times-out on it")
+ @settings(**HYP_SETTINGS)
+ @given(st_num_square_prime())
+ def test_square_root_mod_prime(self, vals):
+ square, prime = vals
+
+ calc = square_root_mod_prime(square, prime)
+ assert calc * calc % prime == square
+
+ @settings(**HYP_SETTINGS)
+ @given(st.integers(min_value=1, max_value=10**12))
+ @example(265399 * 1526929)
+ @example(373297 ** 2 * 553991)
+ def test_factorization(self, num):
+ factors = factorization(num)
+ mult = 1
+ for i in factors:
+ mult *= i[0] ** i[1]
+ assert mult == num
+
+ @settings(**HYP_SETTINGS)
+ @given(st.integers(min_value=3, max_value=1000).filter(lambda x: x % 2))
+ def test_jacobi(self, mod):
+ if is_prime(mod):
+ squares = set()
+ for root in range(1, mod):
+ assert jacobi(root * root, mod) == 1
+ squares.add(root * root % mod)
+ for i in range(1, mod):
+ if i not in squares:
+ assert jacobi(i, mod) == -1
+ else:
+ factors = factorization(mod)
+ for a in range(1, mod):
+ c = 1
+ for i in factors:
+ c *= jacobi(a, i[0]) ** i[1]
+ assert c == jacobi(a, mod)
+
+ @given(st_two_nums_rel_prime())
+ def test_inverse_mod(self, nums):
+ num, mod = nums
+
+ inv = inverse_mod(num, mod)
+
+ assert 0 < inv < mod
+ assert num * inv % mod == 1
+
+ def test_inverse_mod_with_zero(self):
+ assert 0 == inverse_mod(0, 11)
diff --git a/third_party/python/ecdsa/src/ecdsa/test_pyecdsa.py b/third_party/python/ecdsa/src/ecdsa/test_pyecdsa.py
new file mode 100644
index 0000000000..d83eb01d10
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/test_pyecdsa.py
@@ -0,0 +1,1445 @@
+from __future__ import with_statement, division
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+import os
+import time
+import shutil
+import subprocess
+import pytest
+from binascii import hexlify, unhexlify
+from hashlib import sha1, sha256, sha384, sha512
+import hashlib
+from functools import partial
+
+from hypothesis import given
+import hypothesis.strategies as st
+
+from six import b, print_, binary_type
+from .keys import SigningKey, VerifyingKey
+from .keys import BadSignatureError, MalformedPointError, BadDigestError
+from . import util
+from .util import sigencode_der, sigencode_strings
+from .util import sigdecode_der, sigdecode_strings
+from .util import number_to_string, encoded_oid_ecPublicKey, \
+ MalformedSignature
+from .curves import Curve, UnknownCurveError
+from .curves import NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, \
+ SECP256k1, BRAINPOOLP160r1, BRAINPOOLP192r1, BRAINPOOLP224r1, \
+ BRAINPOOLP256r1, BRAINPOOLP320r1, BRAINPOOLP384r1, BRAINPOOLP512r1, \
+ curves
+from .ecdsa import curve_brainpoolp224r1, curve_brainpoolp256r1, \
+ curve_brainpoolp384r1, curve_brainpoolp512r1
+from .ellipticcurve import Point
+from . import der
+from . import rfc6979
+from . import ecdsa
+
+
+class SubprocessError(Exception):
+ pass
+
+
+def run_openssl(cmd):
+ OPENSSL = "openssl"
+ p = subprocess.Popen([OPENSSL] + cmd.split(),
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ stdout, ignored = p.communicate()
+ if p.returncode != 0:
+ raise SubprocessError("cmd '%s %s' failed: rc=%s, stdout/err was %s" %
+ (OPENSSL, cmd, p.returncode, stdout))
+ return stdout.decode()
+
+
+class ECDSA(unittest.TestCase):
+ def test_basic(self):
+ priv = SigningKey.generate()
+ pub = priv.get_verifying_key()
+
+ data = b("blahblah")
+ sig = priv.sign(data)
+
+ self.assertTrue(pub.verify(sig, data))
+ self.assertRaises(BadSignatureError, pub.verify, sig, data + b("bad"))
+
+ pub2 = VerifyingKey.from_string(pub.to_string())
+ self.assertTrue(pub2.verify(sig, data))
+
+ def test_deterministic(self):
+ data = b("blahblah")
+ secexp = int("9d0219792467d7d37b4d43298a7d0c05", 16)
+
+ priv = SigningKey.from_secret_exponent(secexp, SECP256k1, sha256)
+ pub = priv.get_verifying_key()
+
+ k = rfc6979.generate_k(
+ SECP256k1.generator.order(), secexp, sha256, sha256(data).digest())
+
+ sig1 = priv.sign(data, k=k)
+ self.assertTrue(pub.verify(sig1, data))
+
+ sig2 = priv.sign(data, k=k)
+ self.assertTrue(pub.verify(sig2, data))
+
+ sig3 = priv.sign_deterministic(data, sha256)
+ self.assertTrue(pub.verify(sig3, data))
+
+ self.assertEqual(sig1, sig2)
+ self.assertEqual(sig1, sig3)
+
+ def test_bad_usage(self):
+ # sk=SigningKey() is wrong
+ self.assertRaises(TypeError, SigningKey)
+ self.assertRaises(TypeError, VerifyingKey)
+
+ def test_lengths(self):
+ default = NIST192p
+ priv = SigningKey.generate()
+ pub = priv.get_verifying_key()
+ self.assertEqual(len(pub.to_string()), default.verifying_key_length)
+ sig = priv.sign(b("data"))
+ self.assertEqual(len(sig), default.signature_length)
+ for curve in (NIST192p, NIST224p, NIST256p, NIST384p, NIST521p,
+ BRAINPOOLP160r1, BRAINPOOLP192r1, BRAINPOOLP224r1,
+ BRAINPOOLP256r1, BRAINPOOLP320r1, BRAINPOOLP384r1,
+ BRAINPOOLP512r1):
+ start = time.time()
+ priv = SigningKey.generate(curve=curve)
+ pub1 = priv.get_verifying_key()
+ keygen_time = time.time() - start
+ pub2 = VerifyingKey.from_string(pub1.to_string(), curve)
+ self.assertEqual(pub1.to_string(), pub2.to_string())
+ self.assertEqual(len(pub1.to_string()),
+ curve.verifying_key_length)
+ start = time.time()
+ sig = priv.sign(b("data"))
+ sign_time = time.time() - start
+ self.assertEqual(len(sig), curve.signature_length)
+
+ def test_serialize(self):
+ seed = b("secret")
+ curve = NIST192p
+ secexp1 = util.randrange_from_seed__trytryagain(seed, curve.order)
+ secexp2 = util.randrange_from_seed__trytryagain(seed, curve.order)
+ self.assertEqual(secexp1, secexp2)
+ priv1 = SigningKey.from_secret_exponent(secexp1, curve)
+ priv2 = SigningKey.from_secret_exponent(secexp2, curve)
+ self.assertEqual(hexlify(priv1.to_string()),
+ hexlify(priv2.to_string()))
+ self.assertEqual(priv1.to_pem(), priv2.to_pem())
+ pub1 = priv1.get_verifying_key()
+ pub2 = priv2.get_verifying_key()
+ data = b("data")
+ sig1 = priv1.sign(data)
+ sig2 = priv2.sign(data)
+ self.assertTrue(pub1.verify(sig1, data))
+ self.assertTrue(pub2.verify(sig1, data))
+ self.assertTrue(pub1.verify(sig2, data))
+ self.assertTrue(pub2.verify(sig2, data))
+ self.assertEqual(hexlify(pub1.to_string()),
+ hexlify(pub2.to_string()))
+
+ def test_nonrandom(self):
+ s = b("all the entropy in the entire world, compressed into one line")
+
+ def not_much_entropy(numbytes):
+ return s[:numbytes]
+
+ # we control the entropy source, these two keys should be identical:
+ priv1 = SigningKey.generate(entropy=not_much_entropy)
+ priv2 = SigningKey.generate(entropy=not_much_entropy)
+ self.assertEqual(hexlify(priv1.get_verifying_key().to_string()),
+ hexlify(priv2.get_verifying_key().to_string()))
+ # likewise, signatures should be identical. Obviously you'd never
+ # want to do this with keys you care about, because the secrecy of
+ # the private key depends upon using different random numbers for
+ # each signature
+ sig1 = priv1.sign(b("data"), entropy=not_much_entropy)
+ sig2 = priv2.sign(b("data"), entropy=not_much_entropy)
+ self.assertEqual(hexlify(sig1), hexlify(sig2))
+
+ def assertTruePrivkeysEqual(self, priv1, priv2):
+ self.assertEqual(priv1.privkey.secret_multiplier,
+ priv2.privkey.secret_multiplier)
+ self.assertEqual(priv1.privkey.public_key.generator,
+ priv2.privkey.public_key.generator)
+
+ def test_privkey_creation(self):
+ s = b("all the entropy in the entire world, compressed into one line")
+
+ def not_much_entropy(numbytes):
+ return s[:numbytes]
+
+ priv1 = SigningKey.generate()
+ self.assertEqual(priv1.baselen, NIST192p.baselen)
+
+ priv1 = SigningKey.generate(curve=NIST224p)
+ self.assertEqual(priv1.baselen, NIST224p.baselen)
+
+ priv1 = SigningKey.generate(entropy=not_much_entropy)
+ self.assertEqual(priv1.baselen, NIST192p.baselen)
+ priv2 = SigningKey.generate(entropy=not_much_entropy)
+ self.assertEqual(priv2.baselen, NIST192p.baselen)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ priv1 = SigningKey.from_secret_exponent(secexp=3)
+ self.assertEqual(priv1.baselen, NIST192p.baselen)
+ priv2 = SigningKey.from_secret_exponent(secexp=3)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ priv1 = SigningKey.from_secret_exponent(secexp=4, curve=NIST224p)
+ self.assertEqual(priv1.baselen, NIST224p.baselen)
+
+ def test_privkey_strings(self):
+ priv1 = SigningKey.generate()
+ s1 = priv1.to_string()
+ self.assertEqual(type(s1), binary_type)
+ self.assertEqual(len(s1), NIST192p.baselen)
+ priv2 = SigningKey.from_string(s1)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ s1 = priv1.to_pem()
+ self.assertEqual(type(s1), binary_type)
+ self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----")))
+ self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----")))
+ priv2 = SigningKey.from_pem(s1)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ s1 = priv1.to_der()
+ self.assertEqual(type(s1), binary_type)
+ priv2 = SigningKey.from_der(s1)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ priv1 = SigningKey.generate(curve=NIST256p)
+ s1 = priv1.to_pem()
+ self.assertEqual(type(s1), binary_type)
+ self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----")))
+ self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----")))
+ priv2 = SigningKey.from_pem(s1)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ s1 = priv1.to_der()
+ self.assertEqual(type(s1), binary_type)
+ priv2 = SigningKey.from_der(s1)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ def test_privkey_strings_brainpool(self):
+ priv1 = SigningKey.generate(curve=BRAINPOOLP512r1)
+ s1 = priv1.to_pem()
+ self.assertEqual(type(s1), binary_type)
+ self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----")))
+ self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----")))
+ priv2 = SigningKey.from_pem(s1)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ s1 = priv1.to_der()
+ self.assertEqual(type(s1), binary_type)
+ priv2 = SigningKey.from_der(s1)
+ self.assertTruePrivkeysEqual(priv1, priv2)
+
+ def assertTruePubkeysEqual(self, pub1, pub2):
+ self.assertEqual(pub1.pubkey.point, pub2.pubkey.point)
+ self.assertEqual(pub1.pubkey.generator, pub2.pubkey.generator)
+ self.assertEqual(pub1.curve, pub2.curve)
+
+ def test_pubkey_strings(self):
+ priv1 = SigningKey.generate()
+ pub1 = priv1.get_verifying_key()
+ s1 = pub1.to_string()
+ self.assertEqual(type(s1), binary_type)
+ self.assertEqual(len(s1), NIST192p.verifying_key_length)
+ pub2 = VerifyingKey.from_string(s1)
+ self.assertTruePubkeysEqual(pub1, pub2)
+
+ priv1 = SigningKey.generate(curve=NIST256p)
+ pub1 = priv1.get_verifying_key()
+ s1 = pub1.to_string()
+ self.assertEqual(type(s1), binary_type)
+ self.assertEqual(len(s1), NIST256p.verifying_key_length)
+ pub2 = VerifyingKey.from_string(s1, curve=NIST256p)
+ self.assertTruePubkeysEqual(pub1, pub2)
+
+ pub1_der = pub1.to_der()
+ self.assertEqual(type(pub1_der), binary_type)
+ pub2 = VerifyingKey.from_der(pub1_der)
+ self.assertTruePubkeysEqual(pub1, pub2)
+
+ self.assertRaises(der.UnexpectedDER,
+ VerifyingKey.from_der, pub1_der + b("junk"))
+ badpub = VerifyingKey.from_der(pub1_der)
+
+ class FakeGenerator:
+ def order(self):
+ return 123456789
+
+ badcurve = Curve("unknown", None, FakeGenerator(), (1, 2, 3, 4, 5, 6), None)
+ badpub.curve = badcurve
+ badder = badpub.to_der()
+ self.assertRaises(UnknownCurveError, VerifyingKey.from_der, badder)
+
+ pem = pub1.to_pem()
+ self.assertEqual(type(pem), binary_type)
+ self.assertTrue(pem.startswith(b("-----BEGIN PUBLIC KEY-----")), pem)
+ self.assertTrue(pem.strip().endswith(b("-----END PUBLIC KEY-----")), pem)
+ pub2 = VerifyingKey.from_pem(pem)
+ self.assertTruePubkeysEqual(pub1, pub2)
+
+ def test_pubkey_strings_brainpool(self):
+ priv1 = SigningKey.generate(curve=BRAINPOOLP512r1)
+ pub1 = priv1.get_verifying_key()
+ s1 = pub1.to_string()
+ self.assertEqual(type(s1), binary_type)
+ self.assertEqual(len(s1), BRAINPOOLP512r1.verifying_key_length)
+ pub2 = VerifyingKey.from_string(s1, curve=BRAINPOOLP512r1)
+ self.assertTruePubkeysEqual(pub1, pub2)
+
+ pub1_der = pub1.to_der()
+ self.assertEqual(type(pub1_der), binary_type)
+ pub2 = VerifyingKey.from_der(pub1_der)
+ self.assertTruePubkeysEqual(pub1, pub2)
+
+ def test_vk_to_der_with_invalid_point_encoding(self):
+ sk = SigningKey.generate()
+ vk = sk.verifying_key
+
+ with self.assertRaises(ValueError):
+ vk.to_der("raw")
+
+ def test_sk_to_der_with_invalid_point_encoding(self):
+ sk = SigningKey.generate()
+
+ with self.assertRaises(ValueError):
+ sk.to_der("raw")
+
+ def test_vk_from_der_garbage_after_curve_oid(self):
+ type_oid_der = encoded_oid_ecPublicKey
+ curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + \
+ b('garbage')
+ enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der)
+ point_der = der.encode_bitstring(b'\x00\xff', None)
+ to_decode = der.encode_sequence(enc_type_der, point_der)
+
+ with self.assertRaises(der.UnexpectedDER):
+ VerifyingKey.from_der(to_decode)
+
+ def test_vk_from_der_invalid_key_type(self):
+ type_oid_der = der.encode_oid(*(1, 2, 3))
+ curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1))
+ enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der)
+ point_der = der.encode_bitstring(b'\x00\xff', None)
+ to_decode = der.encode_sequence(enc_type_der, point_der)
+
+ with self.assertRaises(der.UnexpectedDER):
+ VerifyingKey.from_der(to_decode)
+
+ def test_vk_from_der_garbage_after_point_string(self):
+ type_oid_der = encoded_oid_ecPublicKey
+ curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1))
+ enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der)
+ point_der = der.encode_bitstring(b'\x00\xff', None) + b('garbage')
+ to_decode = der.encode_sequence(enc_type_der, point_der)
+
+ with self.assertRaises(der.UnexpectedDER):
+ VerifyingKey.from_der(to_decode)
+
+ def test_vk_from_der_invalid_bitstring(self):
+ type_oid_der = encoded_oid_ecPublicKey
+ curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1))
+ enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der)
+ point_der = der.encode_bitstring(b'\x08\xff', None)
+ to_decode = der.encode_sequence(enc_type_der, point_der)
+
+ with self.assertRaises(der.UnexpectedDER):
+ VerifyingKey.from_der(to_decode)
+
+ def test_vk_from_der_with_invalid_length_of_encoding(self):
+ type_oid_der = encoded_oid_ecPublicKey
+ curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1))
+ enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der)
+ point_der = der.encode_bitstring(b'\xff'*64, 0)
+ to_decode = der.encode_sequence(enc_type_der, point_der)
+
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_der(to_decode)
+
+ def test_vk_from_der_with_raw_encoding(self):
+ type_oid_der = encoded_oid_ecPublicKey
+ curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1))
+ enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der)
+ point_der = der.encode_bitstring(b'\xff'*48, 0)
+ to_decode = der.encode_sequence(enc_type_der, point_der)
+
+ with self.assertRaises(der.UnexpectedDER):
+ VerifyingKey.from_der(to_decode)
+
+ def test_signature_strings(self):
+ priv1 = SigningKey.generate()
+ pub1 = priv1.get_verifying_key()
+ data = b("data")
+
+ sig = priv1.sign(data)
+ self.assertEqual(type(sig), binary_type)
+ self.assertEqual(len(sig), NIST192p.signature_length)
+ self.assertTrue(pub1.verify(sig, data))
+
+ sig = priv1.sign(data, sigencode=sigencode_strings)
+ self.assertEqual(type(sig), tuple)
+ self.assertEqual(len(sig), 2)
+ self.assertEqual(type(sig[0]), binary_type)
+ self.assertEqual(type(sig[1]), binary_type)
+ self.assertEqual(len(sig[0]), NIST192p.baselen)
+ self.assertEqual(len(sig[1]), NIST192p.baselen)
+ self.assertTrue(pub1.verify(sig, data, sigdecode=sigdecode_strings))
+
+ sig_der = priv1.sign(data, sigencode=sigencode_der)
+ self.assertEqual(type(sig_der), binary_type)
+ self.assertTrue(pub1.verify(sig_der, data, sigdecode=sigdecode_der))
+
+ def test_sig_decode_strings_with_invalid_count(self):
+ with self.assertRaises(MalformedSignature):
+ sigdecode_strings([b('one'), b('two'), b('three')], 0xff)
+
+ def test_sig_decode_strings_with_wrong_r_len(self):
+ with self.assertRaises(MalformedSignature):
+ sigdecode_strings([b('one'), b('two')], 0xff)
+
+ def test_sig_decode_strings_with_wrong_s_len(self):
+ with self.assertRaises(MalformedSignature):
+ sigdecode_strings([b('\xa0'), b('\xb0\xff')], 0xff)
+
+ def test_verify_with_too_long_input(self):
+ sk = SigningKey.generate()
+ vk = sk.verifying_key
+
+ with self.assertRaises(BadDigestError):
+ vk.verify_digest(None, b('\x00') * 128)
+
+ def test_sk_from_secret_exponent_with_wrong_sec_exponent(self):
+ with self.assertRaises(MalformedPointError):
+ SigningKey.from_secret_exponent(0)
+
+ def test_sk_from_string_with_wrong_len_string(self):
+ with self.assertRaises(MalformedPointError):
+ SigningKey.from_string(b('\x01'))
+
+ def test_sk_from_der_with_junk_after_sequence(self):
+ ver_der = der.encode_integer(1)
+ to_decode = der.encode_sequence(ver_der) + b('garbage')
+
+ with self.assertRaises(der.UnexpectedDER):
+ SigningKey.from_der(to_decode)
+
+ def test_sk_from_der_with_wrong_version(self):
+ ver_der = der.encode_integer(0)
+ to_decode = der.encode_sequence(ver_der)
+
+ with self.assertRaises(der.UnexpectedDER):
+ SigningKey.from_der(to_decode)
+
+ def test_sk_from_der_invalid_const_tag(self):
+ ver_der = der.encode_integer(1)
+ privkey_der = der.encode_octet_string(b('\x00\xff'))
+ curve_oid_der = der.encode_oid(*(1, 2, 3))
+ const_der = der.encode_constructed(1, curve_oid_der)
+ to_decode = der.encode_sequence(ver_der, privkey_der, const_der,
+ curve_oid_der)
+
+ with self.assertRaises(der.UnexpectedDER):
+ SigningKey.from_der(to_decode)
+
+ def test_sk_from_der_garbage_after_privkey_oid(self):
+ ver_der = der.encode_integer(1)
+ privkey_der = der.encode_octet_string(b('\x00\xff'))
+ curve_oid_der = der.encode_oid(*(1, 2, 3)) + b('garbage')
+ const_der = der.encode_constructed(0, curve_oid_der)
+ to_decode = der.encode_sequence(ver_der, privkey_der, const_der,
+ curve_oid_der)
+
+ with self.assertRaises(der.UnexpectedDER):
+ SigningKey.from_der(to_decode)
+
+ def test_sk_from_der_with_short_privkey(self):
+ ver_der = der.encode_integer(1)
+ privkey_der = der.encode_octet_string(b('\x00\xff'))
+ curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1))
+ const_der = der.encode_constructed(0, curve_oid_der)
+ to_decode = der.encode_sequence(ver_der, privkey_der, const_der,
+ curve_oid_der)
+
+ sk = SigningKey.from_der(to_decode)
+ self.assertEqual(sk.privkey.secret_multiplier, 255)
+
+ def test_sign_with_too_long_hash(self):
+ sk = SigningKey.from_secret_exponent(12)
+
+ with self.assertRaises(BadDigestError):
+ sk.sign_digest(b('\xff') * 64)
+
+ def test_hashfunc(self):
+ sk = SigningKey.generate(curve=NIST256p, hashfunc=sha256)
+ data = b("security level is 128 bits")
+ sig = sk.sign(data)
+ vk = VerifyingKey.from_string(sk.get_verifying_key().to_string(),
+ curve=NIST256p, hashfunc=sha256)
+ self.assertTrue(vk.verify(sig, data))
+
+ sk2 = SigningKey.generate(curve=NIST256p)
+ sig2 = sk2.sign(data, hashfunc=sha256)
+ vk2 = VerifyingKey.from_string(sk2.get_verifying_key().to_string(),
+ curve=NIST256p, hashfunc=sha256)
+ self.assertTrue(vk2.verify(sig2, data))
+
+ vk3 = VerifyingKey.from_string(sk.get_verifying_key().to_string(),
+ curve=NIST256p)
+ self.assertTrue(vk3.verify(sig, data, hashfunc=sha256))
+
+ def test_public_key_recovery(self):
+ # Create keys
+ curve = NIST256p
+
+ sk = SigningKey.generate(curve=curve)
+ vk = sk.get_verifying_key()
+
+ # Sign a message
+ data = b("blahblah")
+ signature = sk.sign(data)
+
+ # Recover verifying keys
+ recovered_vks = VerifyingKey.from_public_key_recovery(signature, data, curve)
+
+ # Test if each pk is valid
+ for recovered_vk in recovered_vks:
+ # Test if recovered vk is valid for the data
+ self.assertTrue(recovered_vk.verify(signature, data))
+
+ # Test if properties are equal
+ self.assertEqual(vk.curve, recovered_vk.curve)
+ self.assertEqual(vk.default_hashfunc, recovered_vk.default_hashfunc)
+
+ # Test if original vk is the list of recovered keys
+ self.assertTrue(
+ vk.pubkey.point in [recovered_vk.pubkey.point for recovered_vk in recovered_vks])
+
+ def test_public_key_recovery_with_custom_hash(self):
+ # Create keys
+ curve = NIST256p
+
+ sk = SigningKey.generate(curve=curve, hashfunc=sha256)
+ vk = sk.get_verifying_key()
+
+ # Sign a message
+ data = b("blahblah")
+ signature = sk.sign(data)
+
+ # Recover verifying keys
+ recovered_vks = VerifyingKey.\
+ from_public_key_recovery(signature, data, curve,
+ hashfunc=sha256)
+
+ # Test if each pk is valid
+ for recovered_vk in recovered_vks:
+ # Test if recovered vk is valid for the data
+ self.assertTrue(recovered_vk.verify(signature, data))
+
+ # Test if properties are equal
+ self.assertEqual(vk.curve, recovered_vk.curve)
+ self.assertEqual(sha256, recovered_vk.default_hashfunc)
+
+ # Test if original vk is the list of recovered keys
+ self.assertTrue(vk.pubkey.point in
+ [recovered_vk.pubkey.point for recovered_vk in recovered_vks])
+
+ def test_encoding(self):
+ sk = SigningKey.from_secret_exponent(123456789)
+ vk = sk.verifying_key
+
+ exp = b('\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3'
+ '\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4'
+ 'z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*')
+ self.assertEqual(vk.to_string(), exp)
+ self.assertEqual(vk.to_string('raw'), exp)
+ self.assertEqual(vk.to_string('uncompressed'), b('\x04') + exp)
+ self.assertEqual(vk.to_string('compressed'), b('\x02') + exp[:24])
+ self.assertEqual(vk.to_string('hybrid'), b('\x06') + exp)
+
+ def test_decoding(self):
+ sk = SigningKey.from_secret_exponent(123456789)
+ vk = sk.verifying_key
+
+ enc = b('\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3'
+ '\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4'
+ 'z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*')
+
+ from_raw = VerifyingKey.from_string(enc)
+ self.assertEqual(from_raw.pubkey.point, vk.pubkey.point)
+
+ from_uncompressed = VerifyingKey.from_string(b('\x04') + enc)
+ self.assertEqual(from_uncompressed.pubkey.point, vk.pubkey.point)
+
+ from_compressed = VerifyingKey.from_string(b('\x02') + enc[:24])
+ self.assertEqual(from_compressed.pubkey.point, vk.pubkey.point)
+
+ from_uncompressed = VerifyingKey.from_string(b('\x06') + enc)
+ self.assertEqual(from_uncompressed.pubkey.point, vk.pubkey.point)
+
+ def test_decoding_with_malformed_uncompressed(self):
+ enc = b('\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3'
+ '\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4'
+ 'z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*')
+
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_string(b('\x02') + enc)
+
+ def test_decoding_with_malformed_compressed(self):
+ enc = b('\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3'
+ '\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4'
+ 'z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*')
+
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_string(b('\x01') + enc[:24])
+
+ def test_decoding_with_inconsistent_hybrid(self):
+ enc = b('\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3'
+ '\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4'
+ 'z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*')
+
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_string(b('\x07') + enc)
+
+ def test_decoding_with_point_not_on_curve(self):
+ enc = b('\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3'
+ '\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4'
+ 'z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*')
+
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_string(enc[:47] + b('\x00'))
+
+ def test_decoding_with_point_at_infinity(self):
+ # decoding it is unsupported, as it's not necessary to encode it
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_string(b('\x00'))
+
+ def test_not_lying_on_curve(self):
+ enc = number_to_string(NIST192p.curve.p(), NIST192p.curve.p()+1)
+
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_string(b('\x02') + enc)
+
+ def test_from_string_with_invalid_curve_too_short_ver_key_len(self):
+ # both verifying_key_length and baselen are calculated internally
+ # by the Curve constructor, but since we depend on them verify
+ # that inconsistent values are detected
+ curve = Curve("test", ecdsa.curve_192, ecdsa.generator_192, (1, 2))
+ curve.verifying_key_length = 16
+ curve.baselen = 32
+
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_string(b('\x00')*16, curve)
+
+ def test_from_string_with_invalid_curve_too_long_ver_key_len(self):
+ # both verifying_key_length and baselen are calculated internally
+ # by the Curve constructor, but since we depend on them verify
+ # that inconsistent values are detected
+ curve = Curve("test", ecdsa.curve_192, ecdsa.generator_192, (1, 2))
+ curve.verifying_key_length = 16
+ curve.baselen = 16
+
+ with self.assertRaises(MalformedPointError):
+ VerifyingKey.from_string(b('\x00')*16, curve)
+
+
+@pytest.mark.parametrize("val,even",
+ [(i, j) for i in range(256) for j in [True, False]])
+def test_VerifyingKey_decode_with_small_values(val, even):
+ enc = number_to_string(val, NIST192p.order)
+
+ if even:
+ enc = b('\x02') + enc
+ else:
+ enc = b('\x03') + enc
+
+ # small values can both be actual valid public keys and not, verify that
+ # only expected exceptions are raised if they are not
+ try:
+ vk = VerifyingKey.from_string(enc)
+ assert isinstance(vk, VerifyingKey)
+ except MalformedPointError:
+ assert True
+
+
+params = []
+for curve in curves:
+ for enc in ["raw", "uncompressed", "compressed", "hybrid"]:
+ params.append(pytest.param(curve, enc, id="{0}-{1}".format(
+ curve.name, enc)))
+
+
+@pytest.mark.parametrize("curve,encoding", params)
+def test_VerifyingKey_encode_decode(curve, encoding):
+ sk = SigningKey.generate(curve=curve)
+ vk = sk.verifying_key
+
+ encoded = vk.to_string(encoding)
+
+ from_enc = VerifyingKey.from_string(encoded, curve=curve)
+
+ assert vk.pubkey.point == from_enc.pubkey.point
+
+
+class OpenSSL(unittest.TestCase):
+ # test interoperability with OpenSSL tools. Note that openssl's ECDSA
+ # sign/verify arguments changed between 0.9.8 and 1.0.0: the early
+ # versions require "-ecdsa-with-SHA1", the later versions want just
+ # "-SHA1" (or to leave out that argument entirely, which means the
+ # signature will use some default digest algorithm, probably determined
+ # by the key, probably always SHA1).
+ #
+ # openssl ecparam -name secp224r1 -genkey -out privkey.pem
+ # openssl ec -in privkey.pem -text -noout # get the priv/pub keys
+ # openssl dgst -ecdsa-with-SHA1 -sign privkey.pem -out data.sig data.txt
+ # openssl asn1parse -in data.sig -inform DER
+ # data.sig is 64 bytes, probably 56b plus ASN1 overhead
+ # openssl dgst -ecdsa-with-SHA1 -prverify privkey.pem -signature data.sig data.txt ; echo $?
+ # openssl ec -in privkey.pem -pubout -out pubkey.pem
+ # openssl ec -in privkey.pem -pubout -outform DER -out pubkey.der
+
+ OPENSSL_SUPPORTED_CURVES = set(c.split(':')[0].strip() for c in
+ run_openssl("ecparam -list_curves")
+ .split('\n'))
+
+ def get_openssl_messagedigest_arg(self, hash_name):
+ v = run_openssl("version")
+ # e.g. "OpenSSL 1.0.0 29 Mar 2010", or "OpenSSL 1.0.0a 1 Jun 2010",
+ # or "OpenSSL 0.9.8o 01 Jun 2010"
+ vs = v.split()[1].split(".")
+ if vs >= ["1", "0", "0"]: # pragma: no cover
+ return "-{0}".format(hash_name)
+ else: # pragma: no cover
+ return "-ecdsa-with-{0}".format(hash_name)
+
+ # sk: 1:OpenSSL->python 2:python->OpenSSL
+ # vk: 3:OpenSSL->python 4:python->OpenSSL
+ # sig: 5:OpenSSL->python 6:python->OpenSSL
+
+ @pytest.mark.skipif("prime192v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime192v1")
+ def test_from_openssl_nist192p(self):
+ return self.do_test_from_openssl(NIST192p)
+
+ @pytest.mark.skipif("prime192v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime192v1")
+ def test_from_openssl_nist192p_sha256(self):
+ return self.do_test_from_openssl(NIST192p, "SHA256")
+
+ @pytest.mark.skipif("secp224r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support secp224r1")
+ def test_from_openssl_nist224p(self):
+ return self.do_test_from_openssl(NIST224p)
+
+ @pytest.mark.skipif("prime256v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime256v1")
+ def test_from_openssl_nist256p(self):
+ return self.do_test_from_openssl(NIST256p)
+
+ @pytest.mark.skipif("prime256v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime256v1")
+ def test_from_openssl_nist256p_sha384(self):
+ return self.do_test_from_openssl(NIST256p, "SHA384")
+
+ @pytest.mark.skipif("prime256v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime256v1")
+ def test_from_openssl_nist256p_sha512(self):
+ return self.do_test_from_openssl(NIST256p, "SHA512")
+
+ @pytest.mark.skipif("secp384r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support secp384r1")
+ def test_from_openssl_nist384p(self):
+ return self.do_test_from_openssl(NIST384p)
+
+ @pytest.mark.skipif("secp521r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support secp521r1")
+ def test_from_openssl_nist521p(self):
+ return self.do_test_from_openssl(NIST521p)
+
+ @pytest.mark.skipif("secp256k1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support secp256k1")
+ def test_from_openssl_secp256k1(self):
+ return self.do_test_from_openssl(SECP256k1)
+
+ @pytest.mark.skipif("brainpoolP160r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP160r1")
+ def test_from_openssl_brainpoolp160r1(self):
+ return self.do_test_from_openssl(BRAINPOOLP160r1)
+
+ @pytest.mark.skipif("brainpoolP192r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP192r1")
+ def test_from_openssl_brainpoolp192r1(self):
+ return self.do_test_from_openssl(BRAINPOOLP192r1)
+
+ @pytest.mark.skipif("brainpoolP224r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP224r1")
+ def test_from_openssl_brainpoolp224r1(self):
+ return self.do_test_from_openssl(BRAINPOOLP224r1)
+
+ @pytest.mark.skipif("brainpoolP256r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP256r1")
+ def test_from_openssl_brainpoolp256r1(self):
+ return self.do_test_from_openssl(BRAINPOOLP256r1)
+
+ @pytest.mark.skipif("brainpoolP320r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP320r1")
+ def test_from_openssl_brainpoolp320r1(self):
+ return self.do_test_from_openssl(BRAINPOOLP320r1)
+
+ @pytest.mark.skipif("brainpoolP384r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP384r1")
+ def test_from_openssl_brainpoolp384r1(self):
+ return self.do_test_from_openssl(BRAINPOOLP384r1)
+
+ @pytest.mark.skipif("brainpoolP512r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP512r1")
+ def test_from_openssl_brainpoolp512r1(self):
+ return self.do_test_from_openssl(BRAINPOOLP512r1)
+
+ def do_test_from_openssl(self, curve, hash_name="SHA1"):
+ curvename = curve.openssl_name
+ assert curvename
+ # OpenSSL: create sk, vk, sign.
+ # Python: read vk(3), checksig(5), read sk(1), sign, check
+ mdarg = self.get_openssl_messagedigest_arg(hash_name)
+ if os.path.isdir("t"): # pragma: no cover
+ shutil.rmtree("t")
+ os.mkdir("t")
+ run_openssl("ecparam -name %s -genkey -out t/privkey.pem" % curvename)
+ run_openssl("ec -in t/privkey.pem -pubout -out t/pubkey.pem")
+ data = b("data")
+ with open("t/data.txt", "wb") as e:
+ e.write(data)
+ run_openssl("dgst %s -sign t/privkey.pem -out t/data.sig t/data.txt" % mdarg)
+ run_openssl("dgst %s -verify t/pubkey.pem -signature t/data.sig t/data.txt" % mdarg)
+ with open("t/pubkey.pem", "rb") as e:
+ pubkey_pem = e.read()
+ vk = VerifyingKey.from_pem(pubkey_pem) # 3
+ with open("t/data.sig", "rb") as e:
+ sig_der = e.read()
+ self.assertTrue(vk.verify(sig_der, data, # 5
+ hashfunc=partial(hashlib.new, hash_name),
+ sigdecode=sigdecode_der))
+
+ with open("t/privkey.pem") as e:
+ fp = e.read()
+ sk = SigningKey.from_pem(fp) # 1
+ sig = sk.sign(
+ data,
+ hashfunc=partial(hashlib.new, hash_name),
+ )
+ self.assertTrue(vk.verify(sig,
+ data,
+ hashfunc=partial(hashlib.new, hash_name)))
+
+ @pytest.mark.skipif("prime192v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime192v1")
+ def test_to_openssl_nist192p(self):
+ self.do_test_to_openssl(NIST192p)
+
+ @pytest.mark.skipif("prime192v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime192v1")
+ def test_to_openssl_nist192p_sha256(self):
+ self.do_test_to_openssl(NIST192p, "SHA256")
+
+ @pytest.mark.skipif("secp224r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support secp224r1")
+ def test_to_openssl_nist224p(self):
+ self.do_test_to_openssl(NIST224p)
+
+ @pytest.mark.skipif("prime256v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime256v1")
+ def test_to_openssl_nist256p(self):
+ self.do_test_to_openssl(NIST256p)
+
+ @pytest.mark.skipif("prime256v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime256v1")
+ def test_to_openssl_nist256p_sha384(self):
+ self.do_test_to_openssl(NIST256p, "SHA384")
+
+ @pytest.mark.skipif("prime256v1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support prime256v1")
+ def test_to_openssl_nist256p_sha512(self):
+ self.do_test_to_openssl(NIST256p, "SHA512")
+
+ @pytest.mark.skipif("secp384r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support secp384r1")
+ def test_to_openssl_nist384p(self):
+ self.do_test_to_openssl(NIST384p)
+
+ @pytest.mark.skipif("secp521r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support secp521r1")
+ def test_to_openssl_nist521p(self):
+ self.do_test_to_openssl(NIST521p)
+
+ @pytest.mark.skipif("secp256k1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support secp256k1")
+ def test_to_openssl_secp256k1(self):
+ self.do_test_to_openssl(SECP256k1)
+
+ @pytest.mark.skipif("brainpoolP160r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP160r1")
+ def test_to_openssl_brainpoolp160r1(self):
+ self.do_test_to_openssl(BRAINPOOLP160r1)
+
+ @pytest.mark.skipif("brainpoolP192r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP192r1")
+ def test_to_openssl_brainpoolp192r1(self):
+ self.do_test_to_openssl(BRAINPOOLP192r1)
+
+ @pytest.mark.skipif("brainpoolP224r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP224r1")
+ def test_to_openssl_brainpoolp224r1(self):
+ self.do_test_to_openssl(BRAINPOOLP224r1)
+
+ @pytest.mark.skipif("brainpoolP256r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP256r1")
+ def test_to_openssl_brainpoolp256r1(self):
+ self.do_test_to_openssl(BRAINPOOLP256r1)
+
+ @pytest.mark.skipif("brainpoolP320r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP320r1")
+ def test_to_openssl_brainpoolp320r1(self):
+ self.do_test_to_openssl(BRAINPOOLP320r1)
+
+ @pytest.mark.skipif("brainpoolP384r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP384r1")
+ def test_to_openssl_brainpoolp384r1(self):
+ self.do_test_to_openssl(BRAINPOOLP384r1)
+
+ @pytest.mark.skipif("brainpoolP512r1" not in OPENSSL_SUPPORTED_CURVES,
+ reason="system openssl does not support brainpoolP512r1")
+ def test_to_openssl_brainpoolp512r1(self):
+ self.do_test_to_openssl(BRAINPOOLP512r1)
+
+ def do_test_to_openssl(self, curve, hash_name="SHA1"):
+ curvename = curve.openssl_name
+ assert curvename
+ # Python: create sk, vk, sign.
+ # OpenSSL: read vk(4), checksig(6), read sk(2), sign, check
+ mdarg = self.get_openssl_messagedigest_arg(hash_name)
+ if os.path.isdir("t"): # pragma: no cover
+ shutil.rmtree("t")
+ os.mkdir("t")
+ sk = SigningKey.generate(curve=curve)
+ vk = sk.get_verifying_key()
+ data = b("data")
+ with open("t/pubkey.der", "wb") as e:
+ e.write(vk.to_der()) # 4
+ with open("t/pubkey.pem", "wb") as e:
+ e.write(vk.to_pem()) # 4
+ sig_der = sk.sign(data, hashfunc=partial(hashlib.new, hash_name),
+ sigencode=sigencode_der)
+
+ with open("t/data.sig", "wb") as e:
+ e.write(sig_der) # 6
+ with open("t/data.txt", "wb") as e:
+ e.write(data)
+ with open("t/baddata.txt", "wb") as e:
+ e.write(data + b("corrupt"))
+
+ self.assertRaises(SubprocessError, run_openssl,
+ "dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/baddata.txt" % mdarg)
+ run_openssl("dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/data.txt" % mdarg)
+
+ with open("t/privkey.pem", "wb") as e:
+ e.write(sk.to_pem()) # 2
+ run_openssl("dgst %s -sign t/privkey.pem -out t/data.sig2 t/data.txt" % mdarg)
+ run_openssl("dgst %s -verify t/pubkey.pem -signature t/data.sig2 t/data.txt" % mdarg)
+
+
+class DER(unittest.TestCase):
+ def test_integer(self):
+ self.assertEqual(der.encode_integer(0), b("\x02\x01\x00"))
+ self.assertEqual(der.encode_integer(1), b("\x02\x01\x01"))
+ self.assertEqual(der.encode_integer(127), b("\x02\x01\x7f"))
+ self.assertEqual(der.encode_integer(128), b("\x02\x02\x00\x80"))
+ self.assertEqual(der.encode_integer(256), b("\x02\x02\x01\x00"))
+ # self.assertEqual(der.encode_integer(-1), b("\x02\x01\xff"))
+
+ def s(n):
+ return der.remove_integer(der.encode_integer(n) + b("junk"))
+ self.assertEqual(s(0), (0, b("junk")))
+ self.assertEqual(s(1), (1, b("junk")))
+ self.assertEqual(s(127), (127, b("junk")))
+ self.assertEqual(s(128), (128, b("junk")))
+ self.assertEqual(s(256), (256, b("junk")))
+ self.assertEqual(s(1234567890123456789012345678901234567890),
+ (1234567890123456789012345678901234567890, b("junk")))
+
+ def test_number(self):
+ self.assertEqual(der.encode_number(0), b("\x00"))
+ self.assertEqual(der.encode_number(127), b("\x7f"))
+ self.assertEqual(der.encode_number(128), b("\x81\x00"))
+ self.assertEqual(der.encode_number(3 * 128 + 7), b("\x83\x07"))
+ # self.assertEqual(der.read_number("\x81\x9b" + "more"), (155, 2))
+ # self.assertEqual(der.encode_number(155), b("\x81\x9b"))
+ for n in (0, 1, 2, 127, 128, 3 * 128 + 7, 840, 10045): # , 155):
+ x = der.encode_number(n) + b("more")
+ n1, llen = der.read_number(x)
+ self.assertEqual(n1, n)
+ self.assertEqual(x[llen:], b("more"))
+
+ def test_length(self):
+ self.assertEqual(der.encode_length(0), b("\x00"))
+ self.assertEqual(der.encode_length(127), b("\x7f"))
+ self.assertEqual(der.encode_length(128), b("\x81\x80"))
+ self.assertEqual(der.encode_length(255), b("\x81\xff"))
+ self.assertEqual(der.encode_length(256), b("\x82\x01\x00"))
+ self.assertEqual(der.encode_length(3 * 256 + 7), b("\x82\x03\x07"))
+ self.assertEqual(der.read_length(b("\x81\x9b") + b("more")), (155, 2))
+ self.assertEqual(der.encode_length(155), b("\x81\x9b"))
+ for n in (0, 1, 2, 127, 128, 255, 256, 3 * 256 + 7, 155):
+ x = der.encode_length(n) + b("more")
+ n1, llen = der.read_length(x)
+ self.assertEqual(n1, n)
+ self.assertEqual(x[llen:], b("more"))
+
+ def test_sequence(self):
+ x = der.encode_sequence(b("ABC"), b("DEF")) + b("GHI")
+ self.assertEqual(x, b("\x30\x06ABCDEFGHI"))
+ x1, rest = der.remove_sequence(x)
+ self.assertEqual(x1, b("ABCDEF"))
+ self.assertEqual(rest, b("GHI"))
+
+ def test_constructed(self):
+ x = der.encode_constructed(0, NIST224p.encoded_oid)
+ self.assertEqual(hexlify(x), b("a007") + b("06052b81040021"))
+ x = der.encode_constructed(1, unhexlify(b("0102030a0b0c")))
+ self.assertEqual(hexlify(x), b("a106") + b("0102030a0b0c"))
+
+
+class Util(unittest.TestCase):
+ def test_trytryagain(self):
+ tta = util.randrange_from_seed__trytryagain
+ for i in range(1000):
+ seed = "seed-%d" % i
+ for order in (2**8 - 2, 2**8 - 1, 2**8, 2**8 + 1, 2**8 + 2,
+ 2**16 - 1, 2**16 + 1):
+ n = tta(seed, order)
+ self.assertTrue(1 <= n < order, (1, n, order))
+ # this trytryagain *does* provide long-term stability
+ self.assertEqual(("%x" % (tta("seed", NIST224p.order))).encode(),
+ b("6fa59d73bf0446ae8743cf748fc5ac11d5585a90356417e97155c3bc"))
+
+ @given(st.integers(min_value=0, max_value=10**200))
+ def test_randrange(self, i):
+ # util.randrange does not provide long-term stability: we might
+ # change the algorithm in the future.
+ entropy = util.PRNG("seed-%d" % i)
+ for order in (2**8 - 2, 2**8 - 1, 2**8,
+ 2**16 - 1, 2**16 + 1,
+ ):
+ # that oddball 2**16+1 takes half our runtime
+ n = util.randrange(order, entropy=entropy)
+ self.assertTrue(1 <= n < order, (1, n, order))
+
+ def OFF_test_prove_uniformity(self): # pragma: no cover
+ order = 2**8 - 2
+ counts = dict([(i, 0) for i in range(1, order)])
+ assert 0 not in counts
+ assert order not in counts
+ for i in range(1000000):
+ seed = "seed-%d" % i
+ n = util.randrange_from_seed__trytryagain(seed, order)
+ counts[n] += 1
+ # this technique should use the full range
+ self.assertTrue(counts[order - 1])
+ for i in range(1, order):
+ print_("%3d: %s" % (i, "*" * (counts[i] // 100)))
+
+
+class RFC6979(unittest.TestCase):
+ # https://tools.ietf.org/html/rfc6979#appendix-A.1
+ def _do(self, generator, secexp, hsh, hash_func, expected):
+ actual = rfc6979.generate_k(generator.order(), secexp, hash_func, hsh)
+ self.assertEqual(expected, actual)
+
+ def test_SECP256k1(self):
+ '''RFC doesn't contain test vectors for SECP256k1 used in bitcoin.
+ This vector has been computed by Golang reference implementation instead.'''
+ self._do(
+ generator=SECP256k1.generator,
+ secexp=int("9d0219792467d7d37b4d43298a7d0c05", 16),
+ hsh=sha256(b("sample")).digest(),
+ hash_func=sha256,
+ expected=int("8fa1f95d514760e498f28957b824ee6ec39ed64826ff4fecc2b5739ec45b91cd", 16))
+
+ def test_SECP256k1_2(self):
+ self._do(
+ generator=SECP256k1.generator,
+ secexp=int("cca9fbcc1b41e5a95d369eaa6ddcff73b61a4efaa279cfc6567e8daa39cbaf50", 16),
+ hsh=sha256(b("sample")).digest(),
+ hash_func=sha256,
+ expected=int("2df40ca70e639d89528a6b670d9d48d9165fdc0febc0974056bdce192b8e16a3", 16))
+
+ def test_SECP256k1_3(self):
+ self._do(
+ generator=SECP256k1.generator,
+ secexp=0x1,
+ hsh=sha256(b("Satoshi Nakamoto")).digest(),
+ hash_func=sha256,
+ expected=0x8F8A276C19F4149656B280621E358CCE24F5F52542772691EE69063B74F15D15)
+
+ def test_SECP256k1_4(self):
+ self._do(
+ generator=SECP256k1.generator,
+ secexp=0x1,
+ hsh=sha256(b("All those moments will be lost in time, like tears in rain. Time to die...")).digest(),
+ hash_func=sha256,
+ expected=0x38AA22D72376B4DBC472E06C3BA403EE0A394DA63FC58D88686C611ABA98D6B3)
+
+ def test_SECP256k1_5(self):
+ self._do(
+ generator=SECP256k1.generator,
+ secexp=0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364140,
+ hsh=sha256(b("Satoshi Nakamoto")).digest(),
+ hash_func=sha256,
+ expected=0x33A19B60E25FB6F4435AF53A3D42D493644827367E6453928554F43E49AA6F90)
+
+ def test_SECP256k1_6(self):
+ self._do(
+ generator=SECP256k1.generator,
+ secexp=0xf8b8af8ce3c7cca5e300d33939540c10d45ce001b8f252bfbc57ba0342904181,
+ hsh=sha256(b("Alan Turing")).digest(),
+ hash_func=sha256,
+ expected=0x525A82B70E67874398067543FD84C83D30C175FDC45FDEEE082FE13B1D7CFDF1)
+
+ def test_1(self):
+ # Basic example of the RFC, it also tests 'try-try-again' from Step H of rfc6979
+ self._do(
+ generator=Point(None, 0, 0, int("4000000000000000000020108A2E0CC0D99F8A5EF", 16)),
+ secexp=int("09A4D6792295A7F730FC3F2B49CBC0F62E862272F", 16),
+ hsh=unhexlify(b("AF2BDBE1AA9B6EC1E2ADE1D694F41FC71A831D0268E9891562113D8A62ADD1BF")),
+ hash_func=sha256,
+ expected=int("23AF4074C90A02B3FE61D286D5C87F425E6BDD81B", 16))
+
+ def test_2(self):
+ self._do(
+ generator=NIST192p.generator,
+ secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
+ hsh=sha1(b("sample")).digest(),
+ hash_func=sha1,
+ expected=int("37D7CA00D2C7B0E5E412AC03BD44BA837FDD5B28CD3B0021", 16))
+
+ def test_3(self):
+ self._do(
+ generator=NIST192p.generator,
+ secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
+ hsh=sha256(b("sample")).digest(),
+ hash_func=sha256,
+ expected=int("32B1B6D7D42A05CB449065727A84804FB1A3E34D8F261496", 16))
+
+ def test_4(self):
+ self._do(
+ generator=NIST192p.generator,
+ secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
+ hsh=sha512(b("sample")).digest(),
+ hash_func=sha512,
+ expected=int("A2AC7AB055E4F20692D49209544C203A7D1F2C0BFBC75DB1", 16))
+
+ def test_5(self):
+ self._do(
+ generator=NIST192p.generator,
+ secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
+ hsh=sha1(b("test")).digest(),
+ hash_func=sha1,
+ expected=int("D9CF9C3D3297D3260773A1DA7418DB5537AB8DD93DE7FA25", 16))
+
+ def test_6(self):
+ self._do(
+ generator=NIST192p.generator,
+ secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
+ hsh=sha256(b("test")).digest(),
+ hash_func=sha256,
+ expected=int("5C4CE89CF56D9E7C77C8585339B006B97B5F0680B4306C6C", 16))
+
+ def test_7(self):
+ self._do(
+ generator=NIST192p.generator,
+ secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
+ hsh=sha512(b("test")).digest(),
+ hash_func=sha512,
+ expected=int("0758753A5254759C7CFBAD2E2D9B0792EEE44136C9480527", 16))
+
+ def test_8(self):
+ self._do(
+ generator=NIST521p.generator,
+ secexp=int("0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16),
+ hsh=sha1(b("sample")).digest(),
+ hash_func=sha1,
+ expected=int("089C071B419E1C2820962321787258469511958E80582E95D8378E0C2CCDB3CB42BEDE42F50E3FA3C71F5A76724281D31D9C89F0F91FC1BE4918DB1C03A5838D0F9", 16))
+
+ def test_9(self):
+ self._do(
+ generator=NIST521p.generator,
+ secexp=int("0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16),
+ hsh=sha256(b("sample")).digest(),
+ hash_func=sha256,
+ expected=int("0EDF38AFCAAECAB4383358B34D67C9F2216C8382AAEA44A3DAD5FDC9C32575761793FEF24EB0FC276DFC4F6E3EC476752F043CF01415387470BCBD8678ED2C7E1A0", 16))
+
+ def test_10(self):
+ self._do(
+ generator=NIST521p.generator,
+ secexp=int("0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16),
+ hsh=sha512(b("test")).digest(),
+ hash_func=sha512,
+ expected=int("16200813020EC986863BEDFC1B121F605C1215645018AEA1A7B215A564DE9EB1B38A67AA1128B80CE391C4FB71187654AAA3431027BFC7F395766CA988C964DC56D", 16))
+
+
+class ECDH(unittest.TestCase):
+ def _do(self, curve, generator, dA, x_qA, y_qA, dB, x_qB, y_qB, x_Z, y_Z):
+ qA = dA * generator
+ qB = dB * generator
+ Z = dA * qB
+ self.assertEqual(Point(curve, x_qA, y_qA), qA)
+ self.assertEqual(Point(curve, x_qB, y_qB), qB)
+ self.assertTrue((dA * qB) ==
+ (dA * dB * generator) ==
+ (dB * dA * generator) ==
+ (dB * qA))
+ self.assertEqual(Point(curve, x_Z, y_Z), Z)
+
+
+class RFC6932(ECDH):
+ # https://tools.ietf.org/html/rfc6932#appendix-A.1
+
+ def test_brainpoolP224r1(self):
+ self._do(
+ curve=curve_brainpoolp224r1,
+ generator=BRAINPOOLP224r1.generator,
+ dA=int("7C4B7A2C8A4BAD1FBB7D79CC0955DB7C6A4660CA64CC4778159B495E",
+ 16),
+ x_qA=int("B104A67A6F6E85E14EC1825E1539E8ECDBBF584922367DD88C6BDCF2",
+ 16),
+ y_qA=int("46D782E7FDB5F60CD8404301AC5949C58EDB26BC68BA07695B750A94",
+ 16),
+ dB=int("63976D4AAE6CD0F6DD18DEFEF55D96569D0507C03E74D6486FFA28FB",
+ 16),
+ x_qB=int("2A97089A9296147B71B21A4B574E1278245B536F14D8C2B9D07A874E",
+ 16),
+ y_qB=int("9B900D7C77A709A797276B8CA1BA61BB95B546FC29F862E44D59D25B",
+ 16),
+ x_Z=int("312DFD98783F9FB77B9704945A73BEB6DCCBE3B65D0F967DCAB574EB",
+ 16),
+ y_Z=int("6F800811D64114B1C48C621AB3357CF93F496E4238696A2A012B3C98",
+ 16))
+
+ def test_brainpoolP256r1(self):
+ self._do(
+ curve=curve_brainpoolp256r1,
+ generator=BRAINPOOLP256r1.generator,
+ dA=int("041EB8B1E2BC681BCE8E39963B2E9FC415B05283313DD1A8BCC055F11AE"
+ "49699", 16),
+ x_qA=int("78028496B5ECAAB3C8B6C12E45DB1E02C9E4D26B4113BC4F015F60C5C"
+ "CC0D206", 16),
+ y_qA=int("A2AE1762A3831C1D20F03F8D1E3C0C39AFE6F09B4D44BBE80CD100987"
+ "B05F92B", 16),
+ dB=int("06F5240EACDB9837BC96D48274C8AA834B6C87BA9CC3EEDD81F99A16B8D"
+ "804D3", 16),
+ x_qB=int("8E07E219BA588916C5B06AA30A2F464C2F2ACFC1610A3BE2FB240B635"
+ "341F0DB", 16),
+ y_qB=int("148EA1D7D1E7E54B9555B6C9AC90629C18B63BEE5D7AA6949EBBF47B2"
+ "4FDE40D", 16),
+ x_Z=int("05E940915549E9F6A4A75693716E37466ABA79B4BF2919877A16DD2CC2"
+ "E23708", 16),
+ y_Z=int("6BC23B6702BC5A019438CEEA107DAAD8B94232FFBBC350F3B137628FE6"
+ "FD134C", 16))
+
+ def test_brainpoolP384r1(self):
+ self._do(
+ curve=curve_brainpoolp384r1,
+ generator=BRAINPOOLP384r1.generator,
+ dA=int("014EC0755B78594BA47FB0A56F6173045B4331E74BA1A6F47322E70D79D"
+ "828D97E095884CA72B73FDABD5910DF0FA76A", 16),
+ x_qA=int("45CB26E4384DAF6FB776885307B9A38B7AD1B5C692E0C32F012533277"
+ "8F3B8D3F50CA358099B30DEB5EE69A95C058B4E", 16),
+ y_qA=int("8173A1C54AFFA7E781D0E1E1D12C0DC2B74F4DF58E4A4E3AF7026C5D3"
+ "2DC530A2CD89C859BB4B4B768497F49AB8CC859", 16),
+ dB=int("6B461CB79BD0EA519A87D6828815D8CE7CD9B3CAA0B5A8262CBCD550A01"
+ "5C90095B976F3529957506E1224A861711D54", 16),
+ x_qB=int("01BF92A92EE4BE8DED1A911125C209B03F99E3161CFCC986DC7711383"
+ "FC30AF9CE28CA3386D59E2C8D72CE1E7B4666E8", 16),
+ y_qB=int("3289C4A3A4FEE035E39BDB885D509D224A142FF9FBCC5CFE5CCBB3026"
+ "8EE47487ED8044858D31D848F7A95C635A347AC", 16),
+ x_Z=int("04CC4FF3DCCCB07AF24E0ACC529955B36D7C807772B92FCBE48F3AFE9A"
+ "2F370A1F98D3FA73FD0C0747C632E12F1423EC", 16),
+ y_Z=int("7F465F90BD69AFB8F828A214EB9716D66ABC59F17AF7C75EE7F1DE22AB"
+ "5D05085F5A01A9382D05BF72D96698FE3FF64E", 16))
+
+ def test_brainpoolP512r1(self):
+ self._do(
+ curve=curve_brainpoolp512r1,
+ generator=BRAINPOOLP512r1.generator,
+ dA=int("636B6BE0482A6C1C41AA7AE7B245E983392DB94CECEA2660A379CFE1595"
+ "59E357581825391175FC195D28BAC0CF03A7841A383B95C262B98378287"
+ "4CCE6FE333", 16),
+ x_qA=int("0562E68B9AF7CBFD5565C6B16883B777FF11C199161ECC427A39D17EC"
+ "2166499389571D6A994977C56AD8252658BA8A1B72AE42F4FB7532151"
+ "AFC3EF0971CCDA", 16),
+ y_qA=int("A7CA2D8191E21776A89860AFBC1F582FAA308D551C1DC6133AF9F9C3C"
+ "AD59998D70079548140B90B1F311AFB378AA81F51B275B2BE6B7DEE97"
+ "8EFC7343EA642E", 16),
+ dB=int("0AF4E7F6D52EDD52907BB8DBAB3992A0BB696EC10DF11892FF205B66D38"
+ "1ECE72314E6A6EA079CEA06961DBA5AE6422EF2E9EE803A1F236FB96A17"
+ "99B86E5C8B", 16),
+ x_qB=int("5A7954E32663DFF11AE24712D87419F26B708AC2B92877D6BFEE2BFC4"
+ "3714D89BBDB6D24D807BBD3AEB7F0C325F862E8BADE4F74636B97EAAC"
+ "E739E11720D323", 16),
+ y_qB=int("96D14621A9283A1BED84DE8DD64836B2C0758B11441179DC0C54C0D49"
+ "A47C03807D171DD544B72CAAEF7B7CE01C7753E2CAD1A861ECA55A719"
+ "54EE1BA35E04BE", 16),
+ x_Z=int("1EE8321A4BBF93B9CF8921AB209850EC9B7066D1984EF08C2BB7232362"
+ "08AC8F1A483E79461A00E0D5F6921CE9D360502F85C812BEDEE23AC5B2"
+ "10E5811B191E", 16),
+ y_Z=int("2632095B7B936174B41FD2FAF369B1D18DCADEED7E410A7E251F083109"
+ "7C50D02CFED02607B6A2D5ADB4C0006008562208631875B58B54ECDA5A"
+ "4F9FE9EAABA6", 16))
+
+
+class RFC7027(ECDH):
+ # https://tools.ietf.org/html/rfc7027#appendix-A
+
+ def test_brainpoolP256r1(self):
+ self._do(
+ curve=curve_brainpoolp256r1,
+ generator=BRAINPOOLP256r1.generator,
+ dA=int("81DB1EE100150FF2EA338D708271BE38300CB54241D79950F77B0630398"
+ "04F1D", 16),
+ x_qA=int("44106E913F92BC02A1705D9953A8414DB95E1AAA49E81D9E85F929A8E"
+ "3100BE5", 16),
+ y_qA=int("8AB4846F11CACCB73CE49CBDD120F5A900A69FD32C272223F789EF10E"
+ "B089BDC", 16),
+ dB=int("55E40BC41E37E3E2AD25C3C6654511FFA8474A91A0032087593852D3E7D"
+ "76BD3", 16),
+ x_qB=int("8D2D688C6CF93E1160AD04CC4429117DC2C41825E1E9FCA0ADDD34E6F"
+ "1B39F7B", 16),
+ y_qB=int("990C57520812BE512641E47034832106BC7D3E8DD0E4C7F1136D70065"
+ "47CEC6A", 16),
+ x_Z=int("89AFC39D41D3B327814B80940B042590F96556EC91E6AE7939BCE31F3A"
+ "18BF2B", 16),
+ y_Z=int("49C27868F4ECA2179BFD7D59B1E3BF34C1DBDE61AE12931648F43E5963"
+ "2504DE", 16))
+
+ def test_brainpoolP384r1(self):
+ self._do(
+ curve=curve_brainpoolp384r1,
+ generator=BRAINPOOLP384r1.generator,
+ dA=int("1E20F5E048A5886F1F157C74E91BDE2B98C8B52D58E5003D57053FC4B0B"
+ "D65D6F15EB5D1EE1610DF870795143627D042", 16),
+ x_qA=int("68B665DD91C195800650CDD363C625F4E742E8134667B767B1B476793"
+ "588F885AB698C852D4A6E77A252D6380FCAF068", 16),
+ y_qA=int("55BC91A39C9EC01DEE36017B7D673A931236D2F1F5C83942D049E3FA2"
+ "0607493E0D038FF2FD30C2AB67D15C85F7FAA59", 16),
+ dB=int("032640BC6003C59260F7250C3DB58CE647F98E1260ACCE4ACDA3DD869F7"
+ "4E01F8BA5E0324309DB6A9831497ABAC96670", 16),
+ x_qB=int("4D44326F269A597A5B58BBA565DA5556ED7FD9A8A9EB76C25F46DB69D"
+ "19DC8CE6AD18E404B15738B2086DF37E71D1EB4", 16),
+ y_qB=int("62D692136DE56CBE93BF5FA3188EF58BC8A3A0EC6C1E151A21038A42E"
+ "9185329B5B275903D192F8D4E1F32FE9CC78C48", 16),
+ x_Z=int("0BD9D3A7EA0B3D519D09D8E48D0785FB744A6B355E6304BC51C229FBBC"
+ "E239BBADF6403715C35D4FB2A5444F575D4F42", 16),
+ y_Z=int("0DF213417EBE4D8E40A5F76F66C56470C489A3478D146DECF6DF0D94BA"
+ "E9E598157290F8756066975F1DB34B2324B7BD", 16))
+
+ def test_brainpoolP512r1(self):
+ self._do(
+ curve=curve_brainpoolp512r1,
+ generator=BRAINPOOLP512r1.generator,
+ dA=int("16302FF0DBBB5A8D733DAB7141C1B45ACBC8715939677F6A56850A38BD8"
+ "7BD59B09E80279609FF333EB9D4C061231FB26F92EEB04982A5F1D1764C"
+ "AD57665422", 16),
+ x_qA=int("0A420517E406AAC0ACDCE90FCD71487718D3B953EFD7FBEC5F7F27E28"
+ "C6149999397E91E029E06457DB2D3E640668B392C2A7E737A7F0BF044"
+ "36D11640FD09FD", 16),
+ y_qA=int("72E6882E8DB28AAD36237CD25D580DB23783961C8DC52DFA2EC138AD4"
+ "72A0FCEF3887CF62B623B2A87DE5C588301EA3E5FC269B373B60724F5"
+ "E82A6AD147FDE7", 16),
+ dB=int("230E18E1BCC88A362FA54E4EA3902009292F7F8033624FD471B5D8ACE49"
+ "D12CFABBC19963DAB8E2F1EBA00BFFB29E4D72D13F2224562F405CB8050"
+ "3666B25429", 16),
+ x_qB=int("9D45F66DE5D67E2E6DB6E93A59CE0BB48106097FF78A081DE781CDB31"
+ "FCE8CCBAAEA8DD4320C4119F1E9CD437A2EAB3731FA9668AB268D871D"
+ "EDA55A5473199F", 16),
+ y_qB=int("2FDC313095BCDD5FB3A91636F07A959C8E86B5636A1E930E8396049CB"
+ "481961D365CC11453A06C719835475B12CB52FC3C383BCE35E27EF194"
+ "512B71876285FA", 16),
+ x_Z=int("A7927098655F1F9976FA50A9D566865DC530331846381C87256BAF3226"
+ "244B76D36403C024D7BBF0AA0803EAFF405D3D24F11A9B5C0BEF679FE1"
+ "454B21C4CD1F", 16),
+ y_Z=int("7DB71C3DEF63212841C463E881BDCF055523BD368240E6C3143BD8DEF8"
+ "B3B3223B95E0F53082FF5E412F4222537A43DF1C6D25729DDB51620A83"
+ "2BE6A26680A2", 16))
+
+
+# https://tools.ietf.org/html/rfc4754#page-5
+@pytest.mark.parametrize("w, gwx, gwy, k, msg, md, r, s, curve",
+ [pytest.param(
+ "DC51D3866A15BACDE33D96F992FCA99DA7E6EF0934E7097559C27F1614C88A7F",
+ "2442A5CC0ECD015FA3CA31DC8E2BBC70BF42D60CBCA20085E0822CB04235E970",
+ "6FC98BD7E50211A4A27102FA3549DF79EBCB4BF246B80945CDDFE7D509BBFD7D",
+ "9E56F509196784D963D1C0A401510EE7ADA3DCC5DEE04B154BF61AF1D5A6DECE",
+ b"abc",
+ sha256,
+ "CB28E0999B9C7715FD0A80D8E47A77079716CBBF917DD72E97566EA1C066957C",
+ "86FA3BB4E26CAD5BF90B7F81899256CE7594BB1EA0C89212748BFF3B3D5B0315",
+ NIST256p,
+ id="ECDSA-256"),
+ pytest.param(
+ "0BEB646634BA87735D77AE4809A0EBEA865535DE4C1E1DCB692E84708E81A5AF"
+ "62E528C38B2A81B35309668D73524D9F",
+ "96281BF8DD5E0525CA049C048D345D3082968D10FEDF5C5ACA0C64E6465A97EA"
+ "5CE10C9DFEC21797415710721F437922",
+ "447688BA94708EB6E2E4D59F6AB6D7EDFF9301D249FE49C33096655F5D502FAD"
+ "3D383B91C5E7EDAA2B714CC99D5743CA",
+ "B4B74E44D71A13D568003D7489908D564C7761E229C58CBFA18950096EB7463B"
+ "854D7FA992F934D927376285E63414FA",
+ b'abc',
+ sha384,
+ "FB017B914E29149432D8BAC29A514640B46F53DDAB2C69948084E2930F1C8F7E"
+ "08E07C9C63F2D21A07DCB56A6AF56EB3",
+ "B263A1305E057F984D38726A1B46874109F417BCA112674C528262A40A629AF1"
+ "CBB9F516CE0FA7D2FF630863A00E8B9F",
+ NIST384p,
+ id="ECDSA-384"),
+ pytest.param(
+ "0065FDA3409451DCAB0A0EAD45495112A3D813C17BFD34BDF8C1209D7DF58491"
+ "20597779060A7FF9D704ADF78B570FFAD6F062E95C7E0C5D5481C5B153B48B37"
+ "5FA1",
+ "0151518F1AF0F563517EDD5485190DF95A4BF57B5CBA4CF2A9A3F6474725A35F"
+ "7AFE0A6DDEB8BEDBCD6A197E592D40188901CECD650699C9B5E456AEA5ADD190"
+ "52A8",
+ "006F3B142EA1BFFF7E2837AD44C9E4FF6D2D34C73184BBAD90026DD5E6E85317"
+ "D9DF45CAD7803C6C20035B2F3FF63AFF4E1BA64D1C077577DA3F4286C58F0AEA"
+ "E643",
+ "00C1C2B305419F5A41344D7E4359933D734096F556197A9B244342B8B62F46F9"
+ "373778F9DE6B6497B1EF825FF24F42F9B4A4BD7382CFC3378A540B1B7F0C1B95"
+ "6C2F",
+ b'abc',
+ sha512,
+ "0154FD3836AF92D0DCA57DD5341D3053988534FDE8318FC6AAAAB68E2E6F4339"
+ "B19F2F281A7E0B22C269D93CF8794A9278880ED7DBB8D9362CAEACEE54432055"
+ "2251",
+ "017705A7030290D1CEB605A9A1BB03FF9CDD521E87A696EC926C8C10C8362DF4"
+ "975367101F67D1CF9BCCBF2F3D239534FA509E70AAC851AE01AAC68D62F86647"
+ "2660",
+ NIST521p,
+ id="ECDSA-521")
+ ])
+def test_RFC4754_vectors(w, gwx, gwy, k, msg, md, r, s, curve):
+ sk = SigningKey.from_string(unhexlify(w), curve)
+ vk = VerifyingKey.from_string(unhexlify(gwx + gwy), curve)
+ assert sk.verifying_key == vk
+ sig = sk.sign(msg, hashfunc=md, sigencode=sigencode_strings, k=int(k, 16))
+
+ assert sig == (unhexlify(r), unhexlify(s))
+
+ assert vk.verify(sig, msg, md, sigdecode_strings)
diff --git a/third_party/python/ecdsa/src/ecdsa/test_rw_lock.py b/third_party/python/ecdsa/src/ecdsa/test_rw_lock.py
new file mode 100644
index 0000000000..de11d15622
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/test_rw_lock.py
@@ -0,0 +1,175 @@
+# Copyright Mateusz Kobos, (c) 2011
+# https://code.activestate.com/recipes/577803-reader-writer-lock-with-priority-for-writers/
+# released under the MIT licence
+
+import unittest
+import threading
+import time
+import copy
+from ._rwlock import RWLock
+
+
+class Writer(threading.Thread):
+ def __init__(self, buffer_, rw_lock, init_sleep_time, sleep_time, to_write):
+ """
+ @param buffer_: common buffer_ shared by the readers and writers
+ @type buffer_: list
+ @type rw_lock: L{RWLock}
+ @param init_sleep_time: sleep time before doing any action
+ @type init_sleep_time: C{float}
+ @param sleep_time: sleep time while in critical section
+ @type sleep_time: C{float}
+ @param to_write: data that will be appended to the buffer
+ """
+ threading.Thread.__init__(self)
+ self.__buffer = buffer_
+ self.__rw_lock = rw_lock
+ self.__init_sleep_time = init_sleep_time
+ self.__sleep_time = sleep_time
+ self.__to_write = to_write
+ self.entry_time = None
+ """Time of entry to the critical section"""
+ self.exit_time = None
+ """Time of exit from the critical section"""
+
+ def run(self):
+ time.sleep(self.__init_sleep_time)
+ self.__rw_lock.writer_acquire()
+ self.entry_time = time.time()
+ time.sleep(self.__sleep_time)
+ self.__buffer.append(self.__to_write)
+ self.exit_time = time.time()
+ self.__rw_lock.writer_release()
+
+
+class Reader(threading.Thread):
+ def __init__(self, buffer_, rw_lock, init_sleep_time, sleep_time):
+ """
+ @param buffer_: common buffer shared by the readers and writers
+ @type buffer_: list
+ @type rw_lock: L{RWLock}
+ @param init_sleep_time: sleep time before doing any action
+ @type init_sleep_time: C{float}
+ @param sleep_time: sleep time while in critical section
+ @type sleep_time: C{float}
+ """
+ threading.Thread.__init__(self)
+ self.__buffer = buffer_
+ self.__rw_lock = rw_lock
+ self.__init_sleep_time = init_sleep_time
+ self.__sleep_time = sleep_time
+ self.buffer_read = None
+ """a copy of a the buffer read while in critical section"""
+ self.entry_time = None
+ """Time of entry to the critical section"""
+ self.exit_time = None
+ """Time of exit from the critical section"""
+
+ def run(self):
+ time.sleep(self.__init_sleep_time)
+ self.__rw_lock.reader_acquire()
+ self.entry_time = time.time()
+ time.sleep(self.__sleep_time)
+ self.buffer_read = copy.deepcopy(self.__buffer)
+ self.exit_time = time.time()
+ self.__rw_lock.reader_release()
+
+
+class RWLockTestCase(unittest.TestCase):
+ def test_readers_nonexclusive_access(self):
+ (buffer_, rw_lock, threads) = self.__init_variables()
+
+ threads.append(Reader(buffer_, rw_lock, 0, 0))
+ threads.append(Writer(buffer_, rw_lock, 0.2, 0.4, 1))
+ threads.append(Reader(buffer_, rw_lock, 0.3, 0.3))
+ threads.append(Reader(buffer_, rw_lock, 0.5, 0))
+
+ self.__start_and_join_threads(threads)
+
+ ## The third reader should enter after the second one but it should
+ ## exit before the second one exits
+ ## (i.e. the readers should be in the critical section
+ ## at the same time)
+
+ self.assertEqual([], threads[0].buffer_read)
+ self.assertEqual([1], threads[2].buffer_read)
+ self.assertEqual([1], threads[3].buffer_read)
+ self.assert_(threads[1].exit_time <= threads[2].entry_time)
+ self.assert_(threads[2].entry_time <= threads[3].entry_time)
+ self.assert_(threads[3].exit_time < threads[2].exit_time)
+
+ def test_writers_exclusive_access(self):
+ (buffer_, rw_lock, threads) = self.__init_variables()
+
+ threads.append(Writer(buffer_, rw_lock, 0, 0.4, 1))
+ threads.append(Writer(buffer_, rw_lock, 0.1, 0, 2))
+ threads.append(Reader(buffer_, rw_lock, 0.2, 0))
+
+ self.__start_and_join_threads(threads)
+
+ ## The second writer should wait for the first one to exit
+
+ self.assertEqual([1, 2], threads[2].buffer_read)
+ self.assert_(threads[0].exit_time <= threads[1].entry_time)
+ self.assert_(threads[1].exit_time <= threads[2].exit_time)
+
+ def test_writer_priority(self):
+ (buffer_, rw_lock, threads) = self.__init_variables()
+
+ threads.append(Writer(buffer_, rw_lock, 0, 0, 1))
+ threads.append(Reader(buffer_, rw_lock, 0.1, 0.4))
+ threads.append(Writer(buffer_, rw_lock, 0.2, 0, 2))
+ threads.append(Reader(buffer_, rw_lock, 0.3, 0))
+ threads.append(Reader(buffer_, rw_lock, 0.3, 0))
+
+ self.__start_and_join_threads(threads)
+
+ ## The second writer should go before the second and the third reader
+
+ self.assertEqual([1], threads[1].buffer_read)
+ self.assertEqual([1, 2], threads[3].buffer_read)
+ self.assertEqual([1, 2], threads[4].buffer_read)
+ self.assert_(threads[0].exit_time < threads[1].entry_time)
+ self.assert_(threads[1].exit_time <= threads[2].entry_time)
+ self.assert_(threads[2].exit_time <= threads[3].entry_time)
+ self.assert_(threads[2].exit_time <= threads[4].entry_time)
+
+ def test_many_writers_priority(self):
+ (buffer_, rw_lock, threads) = self.__init_variables()
+
+ threads.append(Writer(buffer_, rw_lock, 0, 0, 1))
+ threads.append(Reader(buffer_, rw_lock, 0.1, 0.6))
+ threads.append(Writer(buffer_, rw_lock, 0.2, 0.1, 2))
+ threads.append(Reader(buffer_, rw_lock, 0.3, 0))
+ threads.append(Reader(buffer_, rw_lock, 0.4, 0))
+ threads.append(Writer(buffer_, rw_lock, 0.5, 0.1, 3))
+
+ self.__start_and_join_threads(threads)
+
+ ## The two last writers should go first -- after the first reader and
+ ## before the second and the third reader
+
+ self.assertEqual([1], threads[1].buffer_read)
+ self.assertEqual([1, 2, 3], threads[3].buffer_read)
+ self.assertEqual([1, 2, 3], threads[4].buffer_read)
+ self.assert_(threads[0].exit_time < threads[1].entry_time)
+ self.assert_(threads[1].exit_time <= threads[2].entry_time)
+ self.assert_(threads[1].exit_time <= threads[5].entry_time)
+ self.assert_(threads[2].exit_time <= threads[3].entry_time)
+ self.assert_(threads[2].exit_time <= threads[4].entry_time)
+ self.assert_(threads[5].exit_time <= threads[3].entry_time)
+ self.assert_(threads[5].exit_time <= threads[4].entry_time)
+
+ @staticmethod
+ def __init_variables():
+ buffer_ = []
+ rw_lock = RWLock()
+ threads = []
+ return (buffer_, rw_lock, threads)
+
+ @staticmethod
+ def __start_and_join_threads(threads):
+ for t in threads:
+ t.start()
+ for t in threads:
+ t.join()
diff --git a/third_party/python/ecdsa/src/ecdsa/util.py b/third_party/python/ecdsa/src/ecdsa/util.py
new file mode 100644
index 0000000000..5f1c7500b6
--- /dev/null
+++ b/third_party/python/ecdsa/src/ecdsa/util.py
@@ -0,0 +1,401 @@
+from __future__ import division
+
+import os
+import math
+import binascii
+import sys
+from hashlib import sha256
+from six import PY3, int2byte, b, next
+from . import der
+from ._compat import normalise_bytes
+
+# RFC5480:
+# The "unrestricted" algorithm identifier is:
+# id-ecPublicKey OBJECT IDENTIFIER ::= {
+# iso(1) member-body(2) us(840) ansi-X9-62(10045) keyType(2) 1 }
+
+oid_ecPublicKey = (1, 2, 840, 10045, 2, 1)
+encoded_oid_ecPublicKey = der.encode_oid(*oid_ecPublicKey)
+
+if sys.version > '3':
+ def entropy_to_bits(ent_256):
+ """Convert a bytestring to string of 0's and 1's"""
+ return bin(int.from_bytes(ent_256, 'big'))[2:].zfill(len(ent_256)*8)
+else:
+ def entropy_to_bits(ent_256):
+ """Convert a bytestring to string of 0's and 1's"""
+ return ''.join(bin(ord(x))[2:].zfill(8) for x in ent_256)
+
+
+if sys.version < '2.7':
+ # Can't add a method to a built-in type so we are stuck with this
+ def bit_length(x):
+ return len(bin(x)) - 2
+else:
+ def bit_length(x):
+ return x.bit_length() or 1
+
+
+def orderlen(order):
+ return (1+len("%x" % order))//2 # bytes
+
+
+def randrange(order, entropy=None):
+ """Return a random integer k such that 1 <= k < order, uniformly
+ distributed across that range. Worst case should be a mean of 2 loops at
+ (2**k)+2.
+
+ Note that this function is not declared to be forwards-compatible: we may
+ change the behavior in future releases. The entropy= argument (which
+ should get a callable that behaves like os.urandom) can be used to
+ achieve stability within a given release (for repeatable unit tests), but
+ should not be used as a long-term-compatible key generation algorithm.
+ """
+ assert order > 1
+ if entropy is None:
+ entropy = os.urandom
+ upper_2 = bit_length(order-2)
+ upper_256 = upper_2//8 + 1
+ while True: # I don't think this needs a counter with bit-wise randrange
+ ent_256 = entropy(upper_256)
+ ent_2 = entropy_to_bits(ent_256)
+ rand_num = int(ent_2[:upper_2], base=2) + 1
+ if 0 < rand_num < order:
+ return rand_num
+
+
+class PRNG:
+ # this returns a callable which, when invoked with an integer N, will
+ # return N pseudorandom bytes. Note: this is a short-term PRNG, meant
+ # primarily for the needs of randrange_from_seed__trytryagain(), which
+ # only needs to run it a few times per seed. It does not provide
+ # protection against state compromise (forward security).
+ def __init__(self, seed):
+ self.generator = self.block_generator(seed)
+
+ def __call__(self, numbytes):
+ a = [next(self.generator) for i in range(numbytes)]
+
+ if PY3:
+ return bytes(a)
+ else:
+ return "".join(a)
+
+ def block_generator(self, seed):
+ counter = 0
+ while True:
+ for byte in sha256(("prng-%d-%s" % (counter, seed)).encode()).digest():
+ yield byte
+ counter += 1
+
+
+def randrange_from_seed__overshoot_modulo(seed, order):
+ # hash the data, then turn the digest into a number in [1,order).
+ #
+ # We use David-Sarah Hopwood's suggestion: turn it into a number that's
+ # sufficiently larger than the group order, then modulo it down to fit.
+ # This should give adequate (but not perfect) uniformity, and simple
+ # code. There are other choices: try-try-again is the main one.
+ base = PRNG(seed)(2 * orderlen(order))
+ number = (int(binascii.hexlify(base), 16) % (order - 1)) + 1
+ assert 1 <= number < order, (1, number, order)
+ return number
+
+
+def lsb_of_ones(numbits):
+ return (1 << numbits) - 1
+
+
+def bits_and_bytes(order):
+ bits = int(math.log(order - 1, 2) + 1)
+ bytes = bits // 8
+ extrabits = bits % 8
+ return bits, bytes, extrabits
+
+
+# the following randrange_from_seed__METHOD() functions take an
+# arbitrarily-sized secret seed and turn it into a number that obeys the same
+# range limits as randrange() above. They are meant for deriving consistent
+# signing keys from a secret rather than generating them randomly, for
+# example a protocol in which three signing keys are derived from a master
+# secret. You should use a uniformly-distributed unguessable seed with about
+# curve.baselen bytes of entropy. To use one, do this:
+# seed = os.urandom(curve.baselen) # or other starting point
+# secexp = ecdsa.util.randrange_from_seed__trytryagain(sed, curve.order)
+# sk = SigningKey.from_secret_exponent(secexp, curve)
+
+def randrange_from_seed__truncate_bytes(seed, order, hashmod=sha256):
+ # hash the seed, then turn the digest into a number in [1,order), but
+ # don't worry about trying to uniformly fill the range. This will lose,
+ # on average, four bits of entropy.
+ bits, _bytes, extrabits = bits_and_bytes(order)
+ if extrabits:
+ _bytes += 1
+ base = hashmod(seed).digest()[:_bytes]
+ base = "\x00" * (_bytes - len(base)) + base
+ number = 1 + int(binascii.hexlify(base), 16)
+ assert 1 <= number < order
+ return number
+
+
+def randrange_from_seed__truncate_bits(seed, order, hashmod=sha256):
+ # like string_to_randrange_truncate_bytes, but only lose an average of
+ # half a bit
+ bits = int(math.log(order - 1, 2) + 1)
+ maxbytes = (bits + 7) // 8
+ base = hashmod(seed).digest()[:maxbytes]
+ base = "\x00" * (maxbytes - len(base)) + base
+ topbits = 8 * maxbytes - bits
+ if topbits:
+ base = int2byte(ord(base[0]) & lsb_of_ones(topbits)) + base[1:]
+ number = 1 + int(binascii.hexlify(base), 16)
+ assert 1 <= number < order
+ return number
+
+
+def randrange_from_seed__trytryagain(seed, order):
+ # figure out exactly how many bits we need (rounded up to the nearest
+ # bit), so we can reduce the chance of looping to less than 0.5 . This is
+ # specified to feed from a byte-oriented PRNG, and discards the
+ # high-order bits of the first byte as necessary to get the right number
+ # of bits. The average number of loops will range from 1.0 (when
+ # order=2**k-1) to 2.0 (when order=2**k+1).
+ assert order > 1
+ bits, bytes, extrabits = bits_and_bytes(order)
+ generate = PRNG(seed)
+ while True:
+ extrabyte = b("")
+ if extrabits:
+ extrabyte = int2byte(ord(generate(1)) & lsb_of_ones(extrabits))
+ guess = string_to_number(extrabyte + generate(bytes)) + 1
+ if 1 <= guess < order:
+ return guess
+
+
+def number_to_string(num, order):
+ l = orderlen(order)
+ fmt_str = "%0" + str(2 * l) + "x"
+ string = binascii.unhexlify((fmt_str % num).encode())
+ assert len(string) == l, (len(string), l)
+ return string
+
+
+def number_to_string_crop(num, order):
+ l = orderlen(order)
+ fmt_str = "%0" + str(2 * l) + "x"
+ string = binascii.unhexlify((fmt_str % num).encode())
+ return string[:l]
+
+
+def string_to_number(string):
+ return int(binascii.hexlify(string), 16)
+
+
+def string_to_number_fixedlen(string, order):
+ l = orderlen(order)
+ assert len(string) == l, (len(string), l)
+ return int(binascii.hexlify(string), 16)
+
+
+# these methods are useful for the sigencode= argument to SK.sign() and the
+# sigdecode= argument to VK.verify(), and control how the signature is packed
+# or unpacked.
+
+def sigencode_strings(r, s, order):
+ r_str = number_to_string(r, order)
+ s_str = number_to_string(s, order)
+ return (r_str, s_str)
+
+
+def sigencode_string(r, s, order):
+ """
+ Encode the signature to raw format (:term:`raw encoding`)
+
+ It's expected that this function will be used as a `sigencode=` parameter
+ in :func:`ecdsa.keys.SigningKey.sign` method.
+
+ :param int r: first parameter of the signature
+ :param int s: second parameter of the signature
+ :param int order: the order of the curve over which the signature was
+ computed
+
+ :return: raw encoding of ECDSA signature
+ :rtype: bytes
+ """
+ # for any given curve, the size of the signature numbers is
+ # fixed, so just use simple concatenation
+ r_str, s_str = sigencode_strings(r, s, order)
+ return r_str + s_str
+
+
+def sigencode_der(r, s, order):
+ """
+ Encode the signature into the ECDSA-Sig-Value structure using :term:`DER`.
+
+ Encodes the signature to the following :term:`ASN.1` structure::
+
+ Ecdsa-Sig-Value ::= SEQUENCE {
+ r INTEGER,
+ s INTEGER
+ }
+
+ It's expected that this function will be used as a `sigencode=` parameter
+ in :func:`ecdsa.keys.SigningKey.sign` method.
+
+ :param int r: first parameter of the signature
+ :param int s: second parameter of the signature
+ :param int order: the order of the curve over which the signature was
+ computed
+
+ :return: DER encoding of ECDSA signature
+ :rtype: bytes
+ """
+ return der.encode_sequence(der.encode_integer(r), der.encode_integer(s))
+
+
+# canonical versions of sigencode methods
+# these enforce low S values, by negating the value (modulo the order) if above order/2
+# see CECKey::Sign() https://github.com/bitcoin/bitcoin/blob/master/src/key.cpp#L214
+def sigencode_strings_canonize(r, s, order):
+ if s > order / 2:
+ s = order - s
+ return sigencode_strings(r, s, order)
+
+
+def sigencode_string_canonize(r, s, order):
+ if s > order / 2:
+ s = order - s
+ return sigencode_string(r, s, order)
+
+
+def sigencode_der_canonize(r, s, order):
+ if s > order / 2:
+ s = order - s
+ return sigencode_der(r, s, order)
+
+
+class MalformedSignature(Exception):
+ """
+ Raised by decoding functions when the signature is malformed.
+
+ Malformed in this context means that the relevant strings or integers
+ do not match what a signature over provided curve would create. Either
+ because the byte strings have incorrect lengths or because the encoded
+ values are too large.
+ """
+
+ pass
+
+
+def sigdecode_string(signature, order):
+ """
+ Decoder for :term:`raw encoding` of ECDSA signatures.
+
+ raw encoding is a simple concatenation of the two integers that comprise
+ the signature, with each encoded using the same amount of bytes depending
+ on curve size/order.
+
+ It's expected that this function will be used as the `sigdecode=`
+ parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method.
+
+ :param signature: encoded signature
+ :type signature: bytes like object
+ :param order: order of the curve over which the signature was computed
+ :type order: int
+
+ :raises MalformedSignature: when the encoding of the signature is invalid
+
+ :return: tuple with decoded 'r' and 's' values of signature
+ :rtype: tuple of ints
+ """
+ signature = normalise_bytes(signature)
+ l = orderlen(order)
+ if not len(signature) == 2 * l:
+ raise MalformedSignature(
+ "Invalid length of signature, expected {0} bytes long, "
+ "provided string is {1} bytes long"
+ .format(2 * l, len(signature)))
+ r = string_to_number_fixedlen(signature[:l], order)
+ s = string_to_number_fixedlen(signature[l:], order)
+ return r, s
+
+
+def sigdecode_strings(rs_strings, order):
+ """
+ Decode the signature from two strings.
+
+ First string needs to be a big endian encoding of 'r', second needs to
+ be a big endian encoding of the 's' parameter of an ECDSA signature.
+
+ It's expected that this function will be used as the `sigdecode=`
+ parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method.
+
+ :param list rs_strings: list of two bytes-like objects, each encoding one
+ parameter of signature
+ :param int order: order of the curve over which the signature was computed
+
+ :raises MalformedSignature: when the encoding of the signature is invalid
+
+ :return: tuple with decoded 'r' and 's' values of signature
+ :rtype: tuple of ints
+ """
+ if not len(rs_strings) == 2:
+ raise MalformedSignature(
+ "Invalid number of strings provided: {0}, expected 2"
+ .format(len(rs_strings)))
+ (r_str, s_str) = rs_strings
+ r_str = normalise_bytes(r_str)
+ s_str = normalise_bytes(s_str)
+ l = orderlen(order)
+ if not len(r_str) == l:
+ raise MalformedSignature(
+ "Invalid length of first string ('r' parameter), "
+ "expected {0} bytes long, provided string is {1} bytes long"
+ .format(l, len(r_str)))
+ if not len(s_str) == l:
+ raise MalformedSignature(
+ "Invalid length of second string ('s' parameter), "
+ "expected {0} bytes long, provided string is {1} bytes long"
+ .format(l, len(s_str)))
+ r = string_to_number_fixedlen(r_str, order)
+ s = string_to_number_fixedlen(s_str, order)
+ return r, s
+
+
+def sigdecode_der(sig_der, order):
+ """
+ Decoder for DER format of ECDSA signatures.
+
+ DER format of signature is one that uses the :term:`ASN.1` :term:`DER`
+ rules to encode it as a sequence of two integers::
+
+ Ecdsa-Sig-Value ::= SEQUENCE {
+ r INTEGER,
+ s INTEGER
+ }
+
+ It's expected that this function will be used as as the `sigdecode=`
+ parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method.
+
+ :param sig_der: encoded signature
+ :type sig_der: bytes like object
+ :param order: order of the curve over which the signature was computed
+ :type order: int
+
+ :raises UnexpectedDER: when the encoding of signature is invalid
+
+ :return: tuple with decoded 'r' and 's' values of signature
+ :rtype: tuple of ints
+ """
+ sig_der = normalise_bytes(sig_der)
+ # return der.encode_sequence(der.encode_integer(r), der.encode_integer(s))
+ rs_strings, empty = der.remove_sequence(sig_der)
+ if empty != b"":
+ raise der.UnexpectedDER("trailing junk after DER sig: %s" %
+ binascii.hexlify(empty))
+ r, rest = der.remove_integer(rs_strings)
+ s, empty = der.remove_integer(rest)
+ if empty != b"":
+ raise der.UnexpectedDER("trailing junk after DER numbers: %s" %
+ binascii.hexlify(empty))
+ return r, s
diff --git a/third_party/python/ecdsa/versioneer.py b/third_party/python/ecdsa/versioneer.py
new file mode 100644
index 0000000000..f250cde55b
--- /dev/null
+++ b/third_party/python/ecdsa/versioneer.py
@@ -0,0 +1,1817 @@
+
+# Version: 0.17
+
+"""The Versioneer - like a rocketeer, but for versions.
+
+The Versioneer
+==============
+
+* like a rocketeer, but for versions!
+* https://github.com/warner/python-versioneer
+* Brian Warner
+* License: Public Domain
+* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, and pypy
+* [![Latest Version]
+(https://pypip.in/version/versioneer/badge.svg?style=flat)
+](https://pypi.python.org/pypi/versioneer/)
+* [![Build Status]
+(https://travis-ci.org/warner/python-versioneer.png?branch=master)
+](https://travis-ci.org/warner/python-versioneer)
+
+This is a tool for managing a recorded version number in distutils-based
+python projects. The goal is to remove the tedious and error-prone "update
+the embedded version string" step from your release process. Making a new
+release should be as easy as recording a new tag in your version-control
+system, and maybe making new tarballs.
+
+
+## Quick Install
+
+* `pip install versioneer` to somewhere to your $PATH
+* add a `[versioneer]` section to your setup.cfg (see below)
+* run `versioneer install` in your source tree, commit the results
+
+## Version Identifiers
+
+Source trees come from a variety of places:
+
+* a version-control system checkout (mostly used by developers)
+* a nightly tarball, produced by build automation
+* a snapshot tarball, produced by a web-based VCS browser, like github's
+ "tarball from tag" feature
+* a release tarball, produced by "setup.py sdist", distributed through PyPI
+
+Within each source tree, the version identifier (either a string or a number,
+this tool is format-agnostic) can come from a variety of places:
+
+* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
+ about recent "tags" and an absolute revision-id
+* the name of the directory into which the tarball was unpacked
+* an expanded VCS keyword ($Id$, etc)
+* a `_version.py` created by some earlier build step
+
+For released software, the version identifier is closely related to a VCS
+tag. Some projects use tag names that include more than just the version
+string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
+needs to strip the tag prefix to extract the version identifier. For
+unreleased software (between tags), the version identifier should provide
+enough information to help developers recreate the same tree, while also
+giving them an idea of roughly how old the tree is (after version 1.2, before
+version 1.3). Many VCS systems can report a description that captures this,
+for example `git describe --tags --dirty --always` reports things like
+"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
+0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
+uncommitted changes.
+
+The version identifier is used for multiple purposes:
+
+* to allow the module to self-identify its version: `myproject.__version__`
+* to choose a name and prefix for a 'setup.py sdist' tarball
+
+## Theory of Operation
+
+Versioneer works by adding a special `_version.py` file into your source
+tree, where your `__init__.py` can import it. This `_version.py` knows how to
+dynamically ask the VCS tool for version information at import time.
+
+`_version.py` also contains `$Revision$` markers, and the installation
+process marks `_version.py` to have this marker rewritten with a tag name
+during the `git archive` command. As a result, generated tarballs will
+contain enough information to get the proper version.
+
+To allow `setup.py` to compute a version too, a `versioneer.py` is added to
+the top level of your source tree, next to `setup.py` and the `setup.cfg`
+that configures it. This overrides several distutils/setuptools commands to
+compute the version when invoked, and changes `setup.py build` and `setup.py
+sdist` to replace `_version.py` with a small static file that contains just
+the generated version data.
+
+## Installation
+
+See [INSTALL.md](./INSTALL.md) for detailed installation instructions.
+
+## Version-String Flavors
+
+Code which uses Versioneer can learn about its version string at runtime by
+importing `_version` from your main `__init__.py` file and running the
+`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
+import the top-level `versioneer.py` and run `get_versions()`.
+
+Both functions return a dictionary with different flavors of version
+information:
+
+* `['version']`: A condensed version string, rendered using the selected
+ style. This is the most commonly used value for the project's version
+ string. The default "pep440" style yields strings like `0.11`,
+ `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section
+ below for alternative styles.
+
+* `['full-revisionid']`: detailed revision identifier. For Git, this is the
+ full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac".
+
+* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the
+ commit date in ISO 8601 format. This will be None if the date is not
+ available.
+
+* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
+ this is only accurate if run in a VCS checkout, otherwise it is likely to
+ be False or None
+
+* `['error']`: if the version string could not be computed, this will be set
+ to a string describing the problem, otherwise it will be None. It may be
+ useful to throw an exception in setup.py if this is set, to avoid e.g.
+ creating tarballs with a version string of "unknown".
+
+Some variants are more useful than others. Including `full-revisionid` in a
+bug report should allow developers to reconstruct the exact code being tested
+(or indicate the presence of local changes that should be shared with the
+developers). `version` is suitable for display in an "about" box or a CLI
+`--version` output: it can be easily compared against release notes and lists
+of bugs fixed in various releases.
+
+The installer adds the following text to your `__init__.py` to place a basic
+version in `YOURPROJECT.__version__`:
+
+ from ._version import get_versions
+ __version__ = get_versions()['version']
+ del get_versions
+
+## Styles
+
+The setup.cfg `style=` configuration controls how the VCS information is
+rendered into a version string.
+
+The default style, "pep440", produces a PEP440-compliant string, equal to the
+un-prefixed tag name for actual releases, and containing an additional "local
+version" section with more detail for in-between builds. For Git, this is
+TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
+--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the
+tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and
+that this commit is two revisions ("+2") beyond the "0.11" tag. For released
+software (exactly equal to a known tag), the identifier will only contain the
+stripped tag, e.g. "0.11".
+
+Other styles are available. See details.md in the Versioneer source tree for
+descriptions.
+
+## Debugging
+
+Versioneer tries to avoid fatal errors: if something goes wrong, it will tend
+to return a version of "0+unknown". To investigate the problem, run `setup.py
+version`, which will run the version-lookup code in a verbose mode, and will
+display the full contents of `get_versions()` (including the `error` string,
+which may help identify what went wrong).
+
+## Known Limitations
+
+Some situations are known to cause problems for Versioneer. This details the
+most significant ones. More can be found on Github
+[issues page](https://github.com/warner/python-versioneer/issues).
+
+### Subprojects
+
+Versioneer has limited support for source trees in which `setup.py` is not in
+the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are
+two common reasons why `setup.py` might not be in the root:
+
+* Source trees which contain multiple subprojects, such as
+ [Buildbot](https://github.com/buildbot/buildbot), which contains both
+ "master" and "slave" subprojects, each with their own `setup.py`,
+ `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI
+ distributions (and upload multiple independently-installable tarballs).
+* Source trees whose main purpose is to contain a C library, but which also
+ provide bindings to Python (and perhaps other langauges) in subdirectories.
+
+Versioneer will look for `.git` in parent directories, and most operations
+should get the right version string. However `pip` and `setuptools` have bugs
+and implementation details which frequently cause `pip install .` from a
+subproject directory to fail to find a correct version string (so it usually
+defaults to `0+unknown`).
+
+`pip install --editable .` should work correctly. `setup.py install` might
+work too.
+
+Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
+some later version.
+
+[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking
+this issue. The discussion in
+[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the
+issue from the Versioneer side in more detail.
+[pip PR#3176](https://github.com/pypa/pip/pull/3176) and
+[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
+pip to let Versioneer work correctly.
+
+Versioneer-0.16 and earlier only looked for a `.git` directory next to the
+`setup.cfg`, so subprojects were completely unsupported with those releases.
+
+### Editable installs with setuptools <= 18.5
+
+`setup.py develop` and `pip install --editable .` allow you to install a
+project into a virtualenv once, then continue editing the source code (and
+test) without re-installing after every change.
+
+"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a
+convenient way to specify executable scripts that should be installed along
+with the python package.
+
+These both work as expected when using modern setuptools. When using
+setuptools-18.5 or earlier, however, certain operations will cause
+`pkg_resources.DistributionNotFound` errors when running the entrypoint
+script, which must be resolved by re-installing the package. This happens
+when the install happens with one version, then the egg_info data is
+regenerated while a different version is checked out. Many setup.py commands
+cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
+a different virtualenv), so this can be surprising.
+
+[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes
+this one, but upgrading to a newer version of setuptools should probably
+resolve it.
+
+### Unicode version strings
+
+While Versioneer works (and is continually tested) with both Python 2 and
+Python 3, it is not entirely consistent with bytes-vs-unicode distinctions.
+Newer releases probably generate unicode version strings on py2. It's not
+clear that this is wrong, but it may be surprising for applications when then
+write these strings to a network connection or include them in bytes-oriented
+APIs like cryptographic checksums.
+
+[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates
+this question.
+
+
+## Updating Versioneer
+
+To upgrade your project to a new release of Versioneer, do the following:
+
+* install the new Versioneer (`pip install -U versioneer` or equivalent)
+* edit `setup.cfg`, if necessary, to include any new configuration settings
+ indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details.
+* re-run `versioneer install` in your source tree, to replace
+ `SRC/_version.py`
+* commit any changed files
+
+## Future Directions
+
+This tool is designed to make it easily extended to other version-control
+systems: all VCS-specific components are in separate directories like
+src/git/ . The top-level `versioneer.py` script is assembled from these
+components by running make-versioneer.py . In the future, make-versioneer.py
+will take a VCS name as an argument, and will construct a version of
+`versioneer.py` that is specific to the given VCS. It might also take the
+configuration arguments that are currently provided manually during
+installation by editing setup.py . Alternatively, it might go the other
+direction and include code from all supported VCS systems, reducing the
+number of intermediate scripts.
+
+
+## License
+
+To make Versioneer easier to embed, all its code is dedicated to the public
+domain. The `_version.py` that it creates is also in the public domain.
+Specifically, both are released under the Creative Commons "Public Domain
+Dedication" license (CC0-1.0), as described in
+https://creativecommons.org/publicdomain/zero/1.0/ .
+
+"""
+
+from __future__ import print_function
+try:
+ import configparser
+except ImportError:
+ import ConfigParser as configparser
+import errno
+import json
+import os
+import re
+import subprocess
+import sys
+
+
+class VersioneerConfig:
+ """Container for Versioneer configuration parameters."""
+
+
+def get_root():
+ """Get the project root directory.
+
+ We require that all commands are run from the project root, i.e. the
+ directory that contains setup.py, setup.cfg, and versioneer.py .
+ """
+ root = os.path.realpath(os.path.abspath(os.getcwd()))
+ setup_py = os.path.join(root, "setup.py")
+ versioneer_py = os.path.join(root, "versioneer.py")
+ if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
+ # allow 'python path/to/setup.py COMMAND'
+ root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
+ setup_py = os.path.join(root, "setup.py")
+ versioneer_py = os.path.join(root, "versioneer.py")
+ if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
+ err = ("Versioneer was unable to run the project root directory. "
+ "Versioneer requires setup.py to be executed from "
+ "its immediate directory (like 'python setup.py COMMAND'), "
+ "or in a way that lets it use sys.argv[0] to find the root "
+ "(like 'python path/to/setup.py COMMAND').")
+ raise VersioneerBadRootError(err)
+ try:
+ # Certain runtime workflows (setup.py install/develop in a setuptools
+ # tree) execute all dependencies in a single python process, so
+ # "versioneer" may be imported multiple times, and python's shared
+ # module-import table will cache the first one. So we can't use
+ # os.path.dirname(__file__), as that will find whichever
+ # versioneer.py was first imported, even in later projects.
+ me = os.path.realpath(os.path.abspath(__file__))
+ me_dir = os.path.normcase(os.path.splitext(me)[0])
+ vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
+ if me_dir != vsr_dir:
+ print("Warning: build in %s is using versioneer.py from %s"
+ % (os.path.dirname(me), versioneer_py))
+ except NameError:
+ pass
+ return root
+
+
+def get_config_from_root(root):
+ """Read the project setup.cfg file to determine Versioneer config."""
+ # This might raise EnvironmentError (if setup.cfg is missing), or
+ # configparser.NoSectionError (if it lacks a [versioneer] section), or
+ # configparser.NoOptionError (if it lacks "VCS="). See the docstring at
+ # the top of versioneer.py for instructions on writing your setup.cfg .
+ setup_cfg = os.path.join(root, "setup.cfg")
+ parser = configparser.SafeConfigParser()
+ with open(setup_cfg, "r") as f:
+ parser.readfp(f)
+ VCS = parser.get("versioneer", "VCS") # mandatory
+
+ def get(parser, name):
+ if parser.has_option("versioneer", name):
+ return parser.get("versioneer", name)
+ return None
+ cfg = VersioneerConfig()
+ cfg.VCS = VCS
+ cfg.style = get(parser, "style") or ""
+ cfg.versionfile_source = get(parser, "versionfile_source")
+ cfg.versionfile_build = get(parser, "versionfile_build")
+ cfg.tag_prefix = get(parser, "tag_prefix")
+ if cfg.tag_prefix in ("''", '""'):
+ cfg.tag_prefix = ""
+ cfg.parentdir_prefix = get(parser, "parentdir_prefix")
+ cfg.verbose = get(parser, "verbose")
+ return cfg
+
+
+class NotThisMethod(Exception):
+ """Exception raised if a method is not valid for the current scenario."""
+
+# these dictionaries contain VCS-specific tools
+LONG_VERSION_PY = {}
+HANDLERS = {}
+
+
+def register_vcs_handler(vcs, method): # decorator
+ """Decorator to mark a method as the handler for a particular VCS."""
+ def decorate(f):
+ """Store f in HANDLERS[vcs][method]."""
+ if vcs not in HANDLERS:
+ HANDLERS[vcs] = {}
+ HANDLERS[vcs][method] = f
+ return f
+ return decorate
+
+
+def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
+ env=None):
+ """Call the given command(s)."""
+ assert isinstance(commands, list)
+ p = None
+ for c in commands:
+ try:
+ dispcmd = str([c] + args)
+ # remember shell=False, so use git.cmd on windows, not just git
+ p = subprocess.Popen([c] + args, cwd=cwd, env=env,
+ stdout=subprocess.PIPE,
+ stderr=(subprocess.PIPE if hide_stderr
+ else None))
+ break
+ except EnvironmentError:
+ e = sys.exc_info()[1]
+ if e.errno == errno.ENOENT:
+ continue
+ if verbose:
+ print("unable to run %s" % dispcmd)
+ print(e)
+ return None, None
+ else:
+ if verbose:
+ print("unable to find command, tried %s" % (commands,))
+ return None, None
+ stdout = p.communicate()[0].strip()
+ if sys.version_info[0] >= 3:
+ stdout = stdout.decode()
+ if p.returncode != 0:
+ if verbose:
+ print("unable to run %s (error)" % dispcmd)
+ print("stdout was %s" % stdout)
+ return None, p.returncode
+ return stdout, p.returncode
+LONG_VERSION_PY['git'] = '''
+# This file helps to compute a version number in source trees obtained from
+# git-archive tarball (such as those provided by githubs download-from-tag
+# feature). Distribution tarballs (built by setup.py sdist) and build
+# directories (produced by setup.py build) will contain a much shorter file
+# that just contains the computed version number.
+
+# This file is released into the public domain. Generated by
+# versioneer-0.17 (https://github.com/warner/python-versioneer)
+
+"""Git implementation of _version.py."""
+
+import errno
+import os
+import re
+import subprocess
+import sys
+
+
+def get_keywords():
+ """Get the keywords needed to look up the version information."""
+ # these strings will be replaced by git during git-archive.
+ # setup.py/versioneer.py will grep for the variable names, so they must
+ # each be defined on a line of their own. _version.py will just call
+ # get_keywords().
+ git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
+ git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
+ git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s"
+ keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
+ return keywords
+
+
+class VersioneerConfig:
+ """Container for Versioneer configuration parameters."""
+
+
+def get_config():
+ """Create, populate and return the VersioneerConfig() object."""
+ # these strings are filled in when 'setup.py versioneer' creates
+ # _version.py
+ cfg = VersioneerConfig()
+ cfg.VCS = "git"
+ cfg.style = "%(STYLE)s"
+ cfg.tag_prefix = "%(TAG_PREFIX)s"
+ cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
+ cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
+ cfg.verbose = False
+ return cfg
+
+
+class NotThisMethod(Exception):
+ """Exception raised if a method is not valid for the current scenario."""
+
+
+LONG_VERSION_PY = {}
+HANDLERS = {}
+
+
+def register_vcs_handler(vcs, method): # decorator
+ """Decorator to mark a method as the handler for a particular VCS."""
+ def decorate(f):
+ """Store f in HANDLERS[vcs][method]."""
+ if vcs not in HANDLERS:
+ HANDLERS[vcs] = {}
+ HANDLERS[vcs][method] = f
+ return f
+ return decorate
+
+
+def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
+ env=None):
+ """Call the given command(s)."""
+ assert isinstance(commands, list)
+ p = None
+ for c in commands:
+ try:
+ dispcmd = str([c] + args)
+ # remember shell=False, so use git.cmd on windows, not just git
+ p = subprocess.Popen([c] + args, cwd=cwd, env=env,
+ stdout=subprocess.PIPE,
+ stderr=(subprocess.PIPE if hide_stderr
+ else None))
+ break
+ except EnvironmentError:
+ e = sys.exc_info()[1]
+ if e.errno == errno.ENOENT:
+ continue
+ if verbose:
+ print("unable to run %%s" %% dispcmd)
+ print(e)
+ return None, None
+ else:
+ if verbose:
+ print("unable to find command, tried %%s" %% (commands,))
+ return None, None
+ stdout = p.communicate()[0].strip()
+ if sys.version_info[0] >= 3:
+ stdout = stdout.decode()
+ if p.returncode != 0:
+ if verbose:
+ print("unable to run %%s (error)" %% dispcmd)
+ print("stdout was %%s" %% stdout)
+ return None, p.returncode
+ return stdout, p.returncode
+
+
+def versions_from_parentdir(parentdir_prefix, root, verbose):
+ """Try to determine the version from the parent directory name.
+
+ Source tarballs conventionally unpack into a directory that includes both
+ the project name and a version string. We will also support searching up
+ two directory levels for an appropriately named parent directory
+ """
+ rootdirs = []
+
+ for i in range(3):
+ dirname = os.path.basename(root)
+ if dirname.startswith(parentdir_prefix):
+ return {"version": dirname[len(parentdir_prefix):],
+ "full-revisionid": None,
+ "dirty": False, "error": None, "date": None}
+ else:
+ rootdirs.append(root)
+ root = os.path.dirname(root) # up a level
+
+ if verbose:
+ print("Tried directories %%s but none started with prefix %%s" %%
+ (str(rootdirs), parentdir_prefix))
+ raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
+
+
+@register_vcs_handler("git", "get_keywords")
+def git_get_keywords(versionfile_abs):
+ """Extract version information from the given file."""
+ # the code embedded in _version.py can just fetch the value of these
+ # keywords. When used from setup.py, we don't want to import _version.py,
+ # so we do it with a regexp instead. This function is not used from
+ # _version.py.
+ keywords = {}
+ try:
+ f = open(versionfile_abs, "r")
+ for line in f.readlines():
+ if line.strip().startswith("git_refnames ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["refnames"] = mo.group(1)
+ if line.strip().startswith("git_full ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["full"] = mo.group(1)
+ if line.strip().startswith("git_date ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["date"] = mo.group(1)
+ f.close()
+ except EnvironmentError:
+ pass
+ return keywords
+
+
+@register_vcs_handler("git", "keywords")
+def git_versions_from_keywords(keywords, tag_prefix, verbose):
+ """Get version information from git keywords."""
+ if not keywords:
+ raise NotThisMethod("no keywords at all, weird")
+ date = keywords.get("date")
+ if date is not None:
+ # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
+ # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
+ # -like" string, which we must then edit to make compliant), because
+ # it's been around since git-1.5.3, and it's too difficult to
+ # discover which version we're using, or to work around using an
+ # older one.
+ date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
+ refnames = keywords["refnames"].strip()
+ if refnames.startswith("$Format"):
+ if verbose:
+ print("keywords are unexpanded, not using")
+ raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
+ refs = set([r.strip() for r in refnames.strip("()").split(",")])
+ # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
+ # just "foo-1.0". If we see a "tag: " prefix, prefer those.
+ TAG = "tag: "
+ tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
+ if not tags:
+ # Either we're using git < 1.8.3, or there really are no tags. We use
+ # a heuristic: assume all version tags have a digit. The old git %%d
+ # expansion behaves like git log --decorate=short and strips out the
+ # refs/heads/ and refs/tags/ prefixes that would let us distinguish
+ # between branches and tags. By ignoring refnames without digits, we
+ # filter out many common branch names like "release" and
+ # "stabilization", as well as "HEAD" and "master".
+ tags = set([r for r in refs if re.search(r'\d', r)])
+ if verbose:
+ print("discarding '%%s', no digits" %% ",".join(refs - tags))
+ if verbose:
+ print("likely tags: %%s" %% ",".join(sorted(tags)))
+ for ref in sorted(tags):
+ # sorting will prefer e.g. "2.0" over "2.0rc1"
+ if ref.startswith(tag_prefix):
+ r = ref[len(tag_prefix):]
+ if verbose:
+ print("picking %%s" %% r)
+ return {"version": r,
+ "full-revisionid": keywords["full"].strip(),
+ "dirty": False, "error": None,
+ "date": date}
+ # no suitable tags, so version is "0+unknown", but full hex is still there
+ if verbose:
+ print("no suitable tags, using unknown + full revision id")
+ return {"version": "0+unknown",
+ "full-revisionid": keywords["full"].strip(),
+ "dirty": False, "error": "no suitable tags", "date": None}
+
+
+@register_vcs_handler("git", "pieces_from_vcs")
+def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
+ """Get version from 'git describe' in the root of the source tree.
+
+ This only gets called if the git-archive 'subst' keywords were *not*
+ expanded, and _version.py hasn't already been rewritten with a short
+ version string, meaning we're inside a checked out source tree.
+ """
+ GITS = ["git"]
+ if sys.platform == "win32":
+ GITS = ["git.cmd", "git.exe"]
+
+ out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
+ hide_stderr=True)
+ if rc != 0:
+ if verbose:
+ print("Directory %%s not under git control" %% root)
+ raise NotThisMethod("'git rev-parse --git-dir' returned error")
+
+ # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
+ # if there isn't one, this yields HEX[-dirty] (no NUM)
+ describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
+ "--always", "--long",
+ "--match", "%%s*" %% tag_prefix],
+ cwd=root)
+ # --long was added in git-1.5.5
+ if describe_out is None:
+ raise NotThisMethod("'git describe' failed")
+ describe_out = describe_out.strip()
+ full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
+ if full_out is None:
+ raise NotThisMethod("'git rev-parse' failed")
+ full_out = full_out.strip()
+
+ pieces = {}
+ pieces["long"] = full_out
+ pieces["short"] = full_out[:7] # maybe improved later
+ pieces["error"] = None
+
+ # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
+ # TAG might have hyphens.
+ git_describe = describe_out
+
+ # look for -dirty suffix
+ dirty = git_describe.endswith("-dirty")
+ pieces["dirty"] = dirty
+ if dirty:
+ git_describe = git_describe[:git_describe.rindex("-dirty")]
+
+ # now we have TAG-NUM-gHEX or HEX
+
+ if "-" in git_describe:
+ # TAG-NUM-gHEX
+ mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
+ if not mo:
+ # unparseable. Maybe git-describe is misbehaving?
+ pieces["error"] = ("unable to parse git-describe output: '%%s'"
+ %% describe_out)
+ return pieces
+
+ # tag
+ full_tag = mo.group(1)
+ if not full_tag.startswith(tag_prefix):
+ if verbose:
+ fmt = "tag '%%s' doesn't start with prefix '%%s'"
+ print(fmt %% (full_tag, tag_prefix))
+ pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
+ %% (full_tag, tag_prefix))
+ return pieces
+ pieces["closest-tag"] = full_tag[len(tag_prefix):]
+
+ # distance: number of commits since tag
+ pieces["distance"] = int(mo.group(2))
+
+ # commit: short hex revision ID
+ pieces["short"] = mo.group(3)
+
+ else:
+ # HEX: no tags
+ pieces["closest-tag"] = None
+ count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
+ cwd=root)
+ pieces["distance"] = int(count_out) # total number of commits
+
+ # commit date: see ISO-8601 comment in git_versions_from_keywords()
+ date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"],
+ cwd=root)[0].strip()
+ pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
+
+ return pieces
+
+
+def plus_or_dot(pieces):
+ """Return a + if we don't already have one, else return a ."""
+ if "+" in pieces.get("closest-tag", ""):
+ return "."
+ return "+"
+
+
+def render_pep440(pieces):
+ """Build up version string, with post-release "local version identifier".
+
+ Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
+ get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
+
+ Exceptions:
+ 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += plus_or_dot(pieces)
+ rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
+ pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
+def render_pep440_pre(pieces):
+ """TAG[.post.devDISTANCE] -- No -dirty.
+
+ Exceptions:
+ 1: no tags. 0.post.devDISTANCE
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"]:
+ rendered += ".post.dev%%d" %% pieces["distance"]
+ else:
+ # exception #1
+ rendered = "0.post.dev%%d" %% pieces["distance"]
+ return rendered
+
+
+def render_pep440_post(pieces):
+ """TAG[.postDISTANCE[.dev0]+gHEX] .
+
+ The ".dev0" means dirty. Note that .dev0 sorts backwards
+ (a dirty tree will appear "older" than the corresponding clean one),
+ but you shouldn't be releasing software with -dirty anyways.
+
+ Exceptions:
+ 1: no tags. 0.postDISTANCE[.dev0]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%%d" %% pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "g%%s" %% pieces["short"]
+ else:
+ # exception #1
+ rendered = "0.post%%d" %% pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ rendered += "+g%%s" %% pieces["short"]
+ return rendered
+
+
+def render_pep440_old(pieces):
+ """TAG[.postDISTANCE[.dev0]] .
+
+ The ".dev0" means dirty.
+
+ Eexceptions:
+ 1: no tags. 0.postDISTANCE[.dev0]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%%d" %% pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ else:
+ # exception #1
+ rendered = "0.post%%d" %% pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ return rendered
+
+
+def render_git_describe(pieces):
+ """TAG[-DISTANCE-gHEX][-dirty].
+
+ Like 'git describe --tags --dirty --always'.
+
+ Exceptions:
+ 1: no tags. HEX[-dirty] (note: no 'g' prefix)
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"]:
+ rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
+ else:
+ # exception #1
+ rendered = pieces["short"]
+ if pieces["dirty"]:
+ rendered += "-dirty"
+ return rendered
+
+
+def render_git_describe_long(pieces):
+ """TAG-DISTANCE-gHEX[-dirty].
+
+ Like 'git describe --tags --dirty --always -long'.
+ The distance/hash is unconditional.
+
+ Exceptions:
+ 1: no tags. HEX[-dirty] (note: no 'g' prefix)
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
+ else:
+ # exception #1
+ rendered = pieces["short"]
+ if pieces["dirty"]:
+ rendered += "-dirty"
+ return rendered
+
+
+def render(pieces, style):
+ """Render the given version pieces into the requested style."""
+ if pieces["error"]:
+ return {"version": "unknown",
+ "full-revisionid": pieces.get("long"),
+ "dirty": None,
+ "error": pieces["error"],
+ "date": None}
+
+ if not style or style == "default":
+ style = "pep440" # the default
+
+ if style == "pep440":
+ rendered = render_pep440(pieces)
+ elif style == "pep440-pre":
+ rendered = render_pep440_pre(pieces)
+ elif style == "pep440-post":
+ rendered = render_pep440_post(pieces)
+ elif style == "pep440-old":
+ rendered = render_pep440_old(pieces)
+ elif style == "git-describe":
+ rendered = render_git_describe(pieces)
+ elif style == "git-describe-long":
+ rendered = render_git_describe_long(pieces)
+ else:
+ raise ValueError("unknown style '%%s'" %% style)
+
+ return {"version": rendered, "full-revisionid": pieces["long"],
+ "dirty": pieces["dirty"], "error": None,
+ "date": pieces.get("date")}
+
+
+def get_versions():
+ """Get version information or return default if unable to do so."""
+ # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
+ # __file__, we can work backwards from there to the root. Some
+ # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
+ # case we can only use expanded keywords.
+
+ cfg = get_config()
+ verbose = cfg.verbose
+
+ try:
+ return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
+ verbose)
+ except NotThisMethod:
+ pass
+
+ try:
+ root = os.path.realpath(__file__)
+ # versionfile_source is the relative path from the top of the source
+ # tree (where the .git directory might live) to this file. Invert
+ # this to find the root from __file__.
+ for i in cfg.versionfile_source.split('/'):
+ root = os.path.dirname(root)
+ except NameError:
+ return {"version": "0+unknown", "full-revisionid": None,
+ "dirty": None,
+ "error": "unable to find root of source tree",
+ "date": None}
+
+ try:
+ pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
+ return render(pieces, cfg.style)
+ except NotThisMethod:
+ pass
+
+ try:
+ if cfg.parentdir_prefix:
+ return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
+ except NotThisMethod:
+ pass
+
+ return {"version": "0+unknown", "full-revisionid": None,
+ "dirty": None,
+ "error": "unable to compute version", "date": None}
+'''
+
+
+@register_vcs_handler("git", "get_keywords")
+def git_get_keywords(versionfile_abs):
+ """Extract version information from the given file."""
+ # the code embedded in _version.py can just fetch the value of these
+ # keywords. When used from setup.py, we don't want to import _version.py,
+ # so we do it with a regexp instead. This function is not used from
+ # _version.py.
+ keywords = {}
+ try:
+ f = open(versionfile_abs, "r")
+ for line in f.readlines():
+ if line.strip().startswith("git_refnames ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["refnames"] = mo.group(1)
+ if line.strip().startswith("git_full ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["full"] = mo.group(1)
+ if line.strip().startswith("git_date ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["date"] = mo.group(1)
+ f.close()
+ except EnvironmentError:
+ pass
+ return keywords
+
+
+@register_vcs_handler("git", "keywords")
+def git_versions_from_keywords(keywords, tag_prefix, verbose):
+ """Get version information from git keywords."""
+ if not keywords:
+ raise NotThisMethod("no keywords at all, weird")
+ date = keywords.get("date")
+ if date is not None:
+ # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
+ # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
+ # -like" string, which we must then edit to make compliant), because
+ # it's been around since git-1.5.3, and it's too difficult to
+ # discover which version we're using, or to work around using an
+ # older one.
+ date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
+ refnames = keywords["refnames"].strip()
+ if refnames.startswith("$Format"):
+ if verbose:
+ print("keywords are unexpanded, not using")
+ raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
+ refs = set([r.strip() for r in refnames.strip("()").split(",")])
+ # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
+ # just "foo-1.0". If we see a "tag: " prefix, prefer those.
+ TAG = "tag: "
+ tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
+ if not tags:
+ # Either we're using git < 1.8.3, or there really are no tags. We use
+ # a heuristic: assume all version tags have a digit. The old git %d
+ # expansion behaves like git log --decorate=short and strips out the
+ # refs/heads/ and refs/tags/ prefixes that would let us distinguish
+ # between branches and tags. By ignoring refnames without digits, we
+ # filter out many common branch names like "release" and
+ # "stabilization", as well as "HEAD" and "master".
+ tags = set([r for r in refs if re.search(r'\d', r)])
+ if verbose:
+ print("discarding '%s', no digits" % ",".join(refs - tags))
+ if verbose:
+ print("likely tags: %s" % ",".join(sorted(tags)))
+ for ref in sorted(tags):
+ # sorting will prefer e.g. "2.0" over "2.0rc1"
+ if ref.startswith(tag_prefix):
+ r = ref[len(tag_prefix):]
+ if verbose:
+ print("picking %s" % r)
+ return {"version": r,
+ "full-revisionid": keywords["full"].strip(),
+ "dirty": False, "error": None,
+ "date": date}
+ # no suitable tags, so version is "0+unknown", but full hex is still there
+ if verbose:
+ print("no suitable tags, using unknown + full revision id")
+ return {"version": "0+unknown",
+ "full-revisionid": keywords["full"].strip(),
+ "dirty": False, "error": "no suitable tags", "date": None}
+
+
+@register_vcs_handler("git", "pieces_from_vcs")
+def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
+ """Get version from 'git describe' in the root of the source tree.
+
+ This only gets called if the git-archive 'subst' keywords were *not*
+ expanded, and _version.py hasn't already been rewritten with a short
+ version string, meaning we're inside a checked out source tree.
+ """
+ GITS = ["git"]
+ if sys.platform == "win32":
+ GITS = ["git.cmd", "git.exe"]
+
+ out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
+ hide_stderr=True)
+ if rc != 0:
+ if verbose:
+ print("Directory %s not under git control" % root)
+ raise NotThisMethod("'git rev-parse --git-dir' returned error")
+
+ # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
+ # if there isn't one, this yields HEX[-dirty] (no NUM)
+ describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
+ "--always", "--long",
+ "--match", "%s*" % tag_prefix],
+ cwd=root)
+ # --long was added in git-1.5.5
+ if describe_out is None:
+ raise NotThisMethod("'git describe' failed")
+ describe_out = describe_out.strip()
+ full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
+ if full_out is None:
+ raise NotThisMethod("'git rev-parse' failed")
+ full_out = full_out.strip()
+
+ pieces = {}
+ pieces["long"] = full_out
+ pieces["short"] = full_out[:7] # maybe improved later
+ pieces["error"] = None
+
+ # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
+ # TAG might have hyphens.
+ git_describe = describe_out
+
+ # look for -dirty suffix
+ dirty = git_describe.endswith("-dirty")
+ pieces["dirty"] = dirty
+ if dirty:
+ git_describe = git_describe[:git_describe.rindex("-dirty")]
+
+ # now we have TAG-NUM-gHEX or HEX
+
+ if "-" in git_describe:
+ # TAG-NUM-gHEX
+ mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
+ if not mo:
+ # unparseable. Maybe git-describe is misbehaving?
+ pieces["error"] = ("unable to parse git-describe output: '%s'"
+ % describe_out)
+ return pieces
+
+ # tag
+ full_tag = mo.group(1)
+ if not full_tag.startswith(tag_prefix):
+ if verbose:
+ fmt = "tag '%s' doesn't start with prefix '%s'"
+ print(fmt % (full_tag, tag_prefix))
+ pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
+ % (full_tag, tag_prefix))
+ return pieces
+ pieces["closest-tag"] = full_tag[len(tag_prefix):]
+
+ # distance: number of commits since tag
+ pieces["distance"] = int(mo.group(2))
+
+ # commit: short hex revision ID
+ pieces["short"] = mo.group(3)
+
+ else:
+ # HEX: no tags
+ pieces["closest-tag"] = None
+ count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
+ cwd=root)
+ pieces["distance"] = int(count_out) # total number of commits
+
+ # commit date: see ISO-8601 comment in git_versions_from_keywords()
+ date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
+ cwd=root)[0].strip()
+ pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
+
+ return pieces
+
+
+def do_vcs_install(manifest_in, versionfile_source, ipy):
+ """Git-specific installation logic for Versioneer.
+
+ For Git, this means creating/changing .gitattributes to mark _version.py
+ for export-subst keyword substitution.
+ """
+ GITS = ["git"]
+ if sys.platform == "win32":
+ GITS = ["git.cmd", "git.exe"]
+ files = [manifest_in, versionfile_source]
+ if ipy:
+ files.append(ipy)
+ try:
+ me = __file__
+ if me.endswith(".pyc") or me.endswith(".pyo"):
+ me = os.path.splitext(me)[0] + ".py"
+ versioneer_file = os.path.relpath(me)
+ except NameError:
+ versioneer_file = "versioneer.py"
+ files.append(versioneer_file)
+ present = False
+ try:
+ f = open(".gitattributes", "r")
+ for line in f.readlines():
+ if line.strip().startswith(versionfile_source):
+ if "export-subst" in line.strip().split()[1:]:
+ present = True
+ f.close()
+ except EnvironmentError:
+ pass
+ if not present:
+ f = open(".gitattributes", "a+")
+ f.write("%s export-subst\n" % versionfile_source)
+ f.close()
+ files.append(".gitattributes")
+ run_command(GITS, ["add", "--"] + files)
+
+
+def versions_from_parentdir(parentdir_prefix, root, verbose):
+ """Try to determine the version from the parent directory name.
+
+ Source tarballs conventionally unpack into a directory that includes both
+ the project name and a version string. We will also support searching up
+ two directory levels for an appropriately named parent directory
+ """
+ rootdirs = []
+
+ for i in range(3):
+ dirname = os.path.basename(root)
+ if dirname.startswith(parentdir_prefix):
+ return {"version": dirname[len(parentdir_prefix):],
+ "full-revisionid": None,
+ "dirty": False, "error": None, "date": None}
+ else:
+ rootdirs.append(root)
+ root = os.path.dirname(root) # up a level
+
+ if verbose:
+ print("Tried directories %s but none started with prefix %s" %
+ (str(rootdirs), parentdir_prefix))
+ raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
+
+SHORT_VERSION_PY = """
+# This file was generated by 'versioneer.py' (0.17) from
+# revision-control system data, or from the parent directory name of an
+# unpacked source archive. Distribution tarballs contain a pre-generated copy
+# of this file.
+
+import json
+
+version_json = '''
+%s
+''' # END VERSION_JSON
+
+
+def get_versions():
+ return json.loads(version_json)
+"""
+
+
+def versions_from_file(filename):
+ """Try to determine the version from _version.py if present."""
+ try:
+ with open(filename) as f:
+ contents = f.read()
+ except EnvironmentError:
+ raise NotThisMethod("unable to read _version.py")
+ mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON",
+ contents, re.M | re.S)
+ if not mo:
+ mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON",
+ contents, re.M | re.S)
+ if not mo:
+ raise NotThisMethod("no version_json in _version.py")
+ return json.loads(mo.group(1))
+
+
+def write_to_version_file(filename, versions):
+ """Write the given version number to the given _version.py file."""
+ os.unlink(filename)
+ contents = json.dumps(versions, sort_keys=True,
+ indent=1, separators=(",", ": "))
+ with open(filename, "w") as f:
+ f.write(SHORT_VERSION_PY % contents)
+
+ print("set %s to '%s'" % (filename, versions["version"]))
+
+
+def plus_or_dot(pieces):
+ """Return a + if we don't already have one, else return a ."""
+ if "+" in pieces.get("closest-tag", ""):
+ return "."
+ return "+"
+
+
+def render_pep440(pieces):
+ """Build up version string, with post-release "local version identifier".
+
+ Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
+ get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
+
+ Exceptions:
+ 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += plus_or_dot(pieces)
+ rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0+untagged.%d.g%s" % (pieces["distance"],
+ pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
+def render_pep440_pre(pieces):
+ """TAG[.post.devDISTANCE] -- No -dirty.
+
+ Exceptions:
+ 1: no tags. 0.post.devDISTANCE
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"]:
+ rendered += ".post.dev%d" % pieces["distance"]
+ else:
+ # exception #1
+ rendered = "0.post.dev%d" % pieces["distance"]
+ return rendered
+
+
+def render_pep440_post(pieces):
+ """TAG[.postDISTANCE[.dev0]+gHEX] .
+
+ The ".dev0" means dirty. Note that .dev0 sorts backwards
+ (a dirty tree will appear "older" than the corresponding clean one),
+ but you shouldn't be releasing software with -dirty anyways.
+
+ Exceptions:
+ 1: no tags. 0.postDISTANCE[.dev0]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%d" % pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "g%s" % pieces["short"]
+ else:
+ # exception #1
+ rendered = "0.post%d" % pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ rendered += "+g%s" % pieces["short"]
+ return rendered
+
+
+def render_pep440_old(pieces):
+ """TAG[.postDISTANCE[.dev0]] .
+
+ The ".dev0" means dirty.
+
+ Eexceptions:
+ 1: no tags. 0.postDISTANCE[.dev0]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%d" % pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ else:
+ # exception #1
+ rendered = "0.post%d" % pieces["distance"]
+ if pieces["dirty"]:
+ rendered += ".dev0"
+ return rendered
+
+
+def render_git_describe(pieces):
+ """TAG[-DISTANCE-gHEX][-dirty].
+
+ Like 'git describe --tags --dirty --always'.
+
+ Exceptions:
+ 1: no tags. HEX[-dirty] (note: no 'g' prefix)
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"]:
+ rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
+ else:
+ # exception #1
+ rendered = pieces["short"]
+ if pieces["dirty"]:
+ rendered += "-dirty"
+ return rendered
+
+
+def render_git_describe_long(pieces):
+ """TAG-DISTANCE-gHEX[-dirty].
+
+ Like 'git describe --tags --dirty --always -long'.
+ The distance/hash is unconditional.
+
+ Exceptions:
+ 1: no tags. HEX[-dirty] (note: no 'g' prefix)
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
+ else:
+ # exception #1
+ rendered = pieces["short"]
+ if pieces["dirty"]:
+ rendered += "-dirty"
+ return rendered
+
+
+def render(pieces, style):
+ """Render the given version pieces into the requested style."""
+ if pieces["error"]:
+ return {"version": "unknown",
+ "full-revisionid": pieces.get("long"),
+ "dirty": None,
+ "error": pieces["error"],
+ "date": None}
+
+ if not style or style == "default":
+ style = "pep440" # the default
+
+ if style == "pep440":
+ rendered = render_pep440(pieces)
+ elif style == "pep440-pre":
+ rendered = render_pep440_pre(pieces)
+ elif style == "pep440-post":
+ rendered = render_pep440_post(pieces)
+ elif style == "pep440-old":
+ rendered = render_pep440_old(pieces)
+ elif style == "git-describe":
+ rendered = render_git_describe(pieces)
+ elif style == "git-describe-long":
+ rendered = render_git_describe_long(pieces)
+ else:
+ raise ValueError("unknown style '%s'" % style)
+
+ return {"version": rendered, "full-revisionid": pieces["long"],
+ "dirty": pieces["dirty"], "error": None,
+ "date": pieces.get("date")}
+
+
+class VersioneerBadRootError(Exception):
+ """The project root directory is unknown or missing key files."""
+
+
+def get_versions(verbose=False):
+ """Get the project version from whatever source is available.
+
+ Returns dict with two keys: 'version' and 'full'.
+ """
+ if "versioneer" in sys.modules:
+ # see the discussion in cmdclass.py:get_cmdclass()
+ del sys.modules["versioneer"]
+
+ root = get_root()
+ cfg = get_config_from_root(root)
+
+ assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
+ handlers = HANDLERS.get(cfg.VCS)
+ assert handlers, "unrecognized VCS '%s'" % cfg.VCS
+ verbose = verbose or cfg.verbose
+ assert cfg.versionfile_source is not None, \
+ "please set versioneer.versionfile_source"
+ assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
+
+ versionfile_abs = os.path.join(root, cfg.versionfile_source)
+
+ # extract version from first of: _version.py, VCS command (e.g. 'git
+ # describe'), parentdir. This is meant to work for developers using a
+ # source checkout, for users of a tarball created by 'setup.py sdist',
+ # and for users of a tarball/zipball created by 'git archive' or github's
+ # download-from-tag feature or the equivalent in other VCSes.
+
+ get_keywords_f = handlers.get("get_keywords")
+ from_keywords_f = handlers.get("keywords")
+ if get_keywords_f and from_keywords_f:
+ try:
+ keywords = get_keywords_f(versionfile_abs)
+ ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
+ if verbose:
+ print("got version from expanded keyword %s" % ver)
+ return ver
+ except NotThisMethod:
+ pass
+
+ try:
+ ver = versions_from_file(versionfile_abs)
+ if verbose:
+ print("got version from file %s %s" % (versionfile_abs, ver))
+ return ver
+ except NotThisMethod:
+ pass
+
+ from_vcs_f = handlers.get("pieces_from_vcs")
+ if from_vcs_f:
+ try:
+ pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
+ ver = render(pieces, cfg.style)
+ if verbose:
+ print("got version from VCS %s" % ver)
+ return ver
+ except NotThisMethod:
+ pass
+
+ try:
+ if cfg.parentdir_prefix:
+ ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
+ if verbose:
+ print("got version from parentdir %s" % ver)
+ return ver
+ except NotThisMethod:
+ pass
+
+ if verbose:
+ print("unable to compute version")
+
+ return {"version": "0+unknown", "full-revisionid": None,
+ "dirty": None, "error": "unable to compute version",
+ "date": None}
+
+
+def get_version():
+ """Get the short version string for this project."""
+ return get_versions()["version"]
+
+
+def get_cmdclass():
+ """Get the custom setuptools/distutils subclasses used by Versioneer."""
+ if "versioneer" in sys.modules:
+ del sys.modules["versioneer"]
+ # this fixes the "python setup.py develop" case (also 'install' and
+ # 'easy_install .'), in which subdependencies of the main project are
+ # built (using setup.py bdist_egg) in the same python process. Assume
+ # a main project A and a dependency B, which use different versions
+ # of Versioneer. A's setup.py imports A's Versioneer, leaving it in
+ # sys.modules by the time B's setup.py is executed, causing B to run
+ # with the wrong versioneer. Setuptools wraps the sub-dep builds in a
+ # sandbox that restores sys.modules to it's pre-build state, so the
+ # parent is protected against the child's "import versioneer". By
+ # removing ourselves from sys.modules here, before the child build
+ # happens, we protect the child from the parent's versioneer too.
+ # Also see https://github.com/warner/python-versioneer/issues/52
+
+ cmds = {}
+
+ # we add "version" to both distutils and setuptools
+ from distutils.core import Command
+
+ class cmd_version(Command):
+ description = "report generated version string"
+ user_options = []
+ boolean_options = []
+
+ def initialize_options(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ vers = get_versions(verbose=True)
+ print("Version: %s" % vers["version"])
+ print(" full-revisionid: %s" % vers.get("full-revisionid"))
+ print(" dirty: %s" % vers.get("dirty"))
+ print(" date: %s" % vers.get("date"))
+ if vers["error"]:
+ print(" error: %s" % vers["error"])
+ cmds["version"] = cmd_version
+
+ # we override "build_py" in both distutils and setuptools
+ #
+ # most invocation pathways end up running build_py:
+ # distutils/build -> build_py
+ # distutils/install -> distutils/build ->..
+ # setuptools/bdist_wheel -> distutils/install ->..
+ # setuptools/bdist_egg -> distutils/install_lib -> build_py
+ # setuptools/install -> bdist_egg ->..
+ # setuptools/develop -> ?
+ # pip install:
+ # copies source tree to a tempdir before running egg_info/etc
+ # if .git isn't copied too, 'git describe' will fail
+ # then does setup.py bdist_wheel, or sometimes setup.py install
+ # setup.py egg_info -> ?
+
+ # we override different "build_py" commands for both environments
+ if "setuptools" in sys.modules:
+ from setuptools.command.build_py import build_py as _build_py
+ else:
+ from distutils.command.build_py import build_py as _build_py
+
+ class cmd_build_py(_build_py):
+ def run(self):
+ root = get_root()
+ cfg = get_config_from_root(root)
+ versions = get_versions()
+ _build_py.run(self)
+ # now locate _version.py in the new build/ directory and replace
+ # it with an updated value
+ if cfg.versionfile_build:
+ target_versionfile = os.path.join(self.build_lib,
+ cfg.versionfile_build)
+ print("UPDATING %s" % target_versionfile)
+ write_to_version_file(target_versionfile, versions)
+ cmds["build_py"] = cmd_build_py
+
+ if "cx_Freeze" in sys.modules: # cx_freeze enabled?
+ from cx_Freeze.dist import build_exe as _build_exe
+ # nczeczulin reports that py2exe won't like the pep440-style string
+ # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
+ # setup(console=[{
+ # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION
+ # "product_version": versioneer.get_version(),
+ # ...
+
+ class cmd_build_exe(_build_exe):
+ def run(self):
+ root = get_root()
+ cfg = get_config_from_root(root)
+ versions = get_versions()
+ target_versionfile = cfg.versionfile_source
+ print("UPDATING %s" % target_versionfile)
+ write_to_version_file(target_versionfile, versions)
+
+ _build_exe.run(self)
+ os.unlink(target_versionfile)
+ with open(cfg.versionfile_source, "w") as f:
+ LONG = LONG_VERSION_PY[cfg.VCS]
+ f.write(LONG %
+ {"DOLLAR": "$",
+ "STYLE": cfg.style,
+ "TAG_PREFIX": cfg.tag_prefix,
+ "PARENTDIR_PREFIX": cfg.parentdir_prefix,
+ "VERSIONFILE_SOURCE": cfg.versionfile_source,
+ })
+ cmds["build_exe"] = cmd_build_exe
+ del cmds["build_py"]
+
+ if 'py2exe' in sys.modules: # py2exe enabled?
+ try:
+ from py2exe.distutils_buildexe import py2exe as _py2exe # py3
+ except ImportError:
+ from py2exe.build_exe import py2exe as _py2exe # py2
+
+ class cmd_py2exe(_py2exe):
+ def run(self):
+ root = get_root()
+ cfg = get_config_from_root(root)
+ versions = get_versions()
+ target_versionfile = cfg.versionfile_source
+ print("UPDATING %s" % target_versionfile)
+ write_to_version_file(target_versionfile, versions)
+
+ _py2exe.run(self)
+ os.unlink(target_versionfile)
+ with open(cfg.versionfile_source, "w") as f:
+ LONG = LONG_VERSION_PY[cfg.VCS]
+ f.write(LONG %
+ {"DOLLAR": "$",
+ "STYLE": cfg.style,
+ "TAG_PREFIX": cfg.tag_prefix,
+ "PARENTDIR_PREFIX": cfg.parentdir_prefix,
+ "VERSIONFILE_SOURCE": cfg.versionfile_source,
+ })
+ cmds["py2exe"] = cmd_py2exe
+
+ # we override different "sdist" commands for both environments
+ if "setuptools" in sys.modules:
+ from setuptools.command.sdist import sdist as _sdist
+ else:
+ from distutils.command.sdist import sdist as _sdist
+
+ class cmd_sdist(_sdist):
+ def run(self):
+ versions = get_versions()
+ self._versioneer_generated_versions = versions
+ # unless we update this, the command will keep using the old
+ # version
+ self.distribution.metadata.version = versions["version"]
+ return _sdist.run(self)
+
+ def make_release_tree(self, base_dir, files):
+ root = get_root()
+ cfg = get_config_from_root(root)
+ _sdist.make_release_tree(self, base_dir, files)
+ # now locate _version.py in the new base_dir directory
+ # (remembering that it may be a hardlink) and replace it with an
+ # updated value
+ target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
+ print("UPDATING %s" % target_versionfile)
+ write_to_version_file(target_versionfile,
+ self._versioneer_generated_versions)
+ cmds["sdist"] = cmd_sdist
+
+ return cmds
+
+
+CONFIG_ERROR = """
+setup.cfg is missing the necessary Versioneer configuration. You need
+a section like:
+
+ [versioneer]
+ VCS = git
+ style = pep440
+ versionfile_source = src/myproject/_version.py
+ versionfile_build = myproject/_version.py
+ tag_prefix =
+ parentdir_prefix = myproject-
+
+You will also need to edit your setup.py to use the results:
+
+ import versioneer
+ setup(version=versioneer.get_version(),
+ cmdclass=versioneer.get_cmdclass(), ...)
+
+Please read the docstring in ./versioneer.py for configuration instructions,
+edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
+"""
+
+SAMPLE_CONFIG = """
+# See the docstring in versioneer.py for instructions. Note that you must
+# re-run 'versioneer.py setup' after changing this section, and commit the
+# resulting files.
+
+[versioneer]
+#VCS = git
+#style = pep440
+#versionfile_source =
+#versionfile_build =
+#tag_prefix =
+#parentdir_prefix =
+
+"""
+
+INIT_PY_SNIPPET = """
+from ._version import get_versions
+__version__ = get_versions()['version']
+del get_versions
+"""
+
+
+def do_setup():
+ """Main VCS-independent setup function for installing Versioneer."""
+ root = get_root()
+ try:
+ cfg = get_config_from_root(root)
+ except (EnvironmentError, configparser.NoSectionError,
+ configparser.NoOptionError) as e:
+ if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
+ print("Adding sample versioneer config to setup.cfg",
+ file=sys.stderr)
+ with open(os.path.join(root, "setup.cfg"), "a") as f:
+ f.write(SAMPLE_CONFIG)
+ print(CONFIG_ERROR, file=sys.stderr)
+ return 1
+
+ print(" creating %s" % cfg.versionfile_source)
+ with open(cfg.versionfile_source, "w") as f:
+ LONG = LONG_VERSION_PY[cfg.VCS]
+ f.write(LONG % {"DOLLAR": "$",
+ "STYLE": cfg.style,
+ "TAG_PREFIX": cfg.tag_prefix,
+ "PARENTDIR_PREFIX": cfg.parentdir_prefix,
+ "VERSIONFILE_SOURCE": cfg.versionfile_source,
+ })
+
+ ipy = os.path.join(os.path.dirname(cfg.versionfile_source),
+ "__init__.py")
+ if os.path.exists(ipy):
+ try:
+ with open(ipy, "r") as f:
+ old = f.read()
+ except EnvironmentError:
+ old = ""
+ if INIT_PY_SNIPPET not in old:
+ print(" appending to %s" % ipy)
+ with open(ipy, "a") as f:
+ f.write(INIT_PY_SNIPPET)
+ else:
+ print(" %s unmodified" % ipy)
+ else:
+ print(" %s doesn't exist, ok" % ipy)
+ ipy = None
+
+ # Make sure both the top-level "versioneer.py" and versionfile_source
+ # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
+ # they'll be copied into source distributions. Pip won't be able to
+ # install the package without this.
+ manifest_in = os.path.join(root, "MANIFEST.in")
+ simple_includes = set()
+ try:
+ with open(manifest_in, "r") as f:
+ for line in f:
+ if line.startswith("include "):
+ for include in line.split()[1:]:
+ simple_includes.add(include)
+ except EnvironmentError:
+ pass
+ # That doesn't cover everything MANIFEST.in can do
+ # (http://docs.python.org/2/distutils/sourcedist.html#commands), so
+ # it might give some false negatives. Appending redundant 'include'
+ # lines is safe, though.
+ if "versioneer.py" not in simple_includes:
+ print(" appending 'versioneer.py' to MANIFEST.in")
+ with open(manifest_in, "a") as f:
+ f.write("include versioneer.py\n")
+ else:
+ print(" 'versioneer.py' already in MANIFEST.in")
+ if cfg.versionfile_source not in simple_includes:
+ print(" appending versionfile_source ('%s') to MANIFEST.in" %
+ cfg.versionfile_source)
+ with open(manifest_in, "a") as f:
+ f.write("include %s\n" % cfg.versionfile_source)
+ else:
+ print(" versionfile_source already in MANIFEST.in")
+
+ # Make VCS-specific changes. For git, this means creating/changing
+ # .gitattributes to mark _version.py for export-subst keyword
+ # substitution.
+ do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
+ return 0
+
+
+def scan_setup_py():
+ """Validate the contents of setup.py against Versioneer's expectations."""
+ found = set()
+ setters = False
+ errors = 0
+ with open("setup.py", "r") as f:
+ for line in f.readlines():
+ if "import versioneer" in line:
+ found.add("import")
+ if "versioneer.get_cmdclass()" in line:
+ found.add("cmdclass")
+ if "versioneer.get_version()" in line:
+ found.add("get_version")
+ if "versioneer.VCS" in line:
+ setters = True
+ if "versioneer.versionfile_source" in line:
+ setters = True
+ if len(found) != 3:
+ print("")
+ print("Your setup.py appears to be missing some important items")
+ print("(but I might be wrong). Please make sure it has something")
+ print("roughly like the following:")
+ print("")
+ print(" import versioneer")
+ print(" setup( version=versioneer.get_version(),")
+ print(" cmdclass=versioneer.get_cmdclass(), ...)")
+ print("")
+ errors += 1
+ if setters:
+ print("You should remove lines like 'versioneer.VCS = ' and")
+ print("'versioneer.versionfile_source = ' . This configuration")
+ print("now lives in setup.cfg, and should be removed from setup.py")
+ print("")
+ errors += 1
+ return errors
+
+if __name__ == "__main__":
+ cmd = sys.argv[1]
+ if cmd == "setup":
+ errors = do_setup()
+ errors += scan_setup_py()
+ if errors:
+ sys.exit(1)
diff --git a/third_party/python/enum34/MANIFEST.in b/third_party/python/enum34/MANIFEST.in
new file mode 100644
index 0000000000..98fe77f55a
--- /dev/null
+++ b/third_party/python/enum34/MANIFEST.in
@@ -0,0 +1,9 @@
+exclude enum/*
+include setup.py
+include README
+include enum/__init__.py
+include enum/test.py
+include enum/LICENSE
+include enum/README
+include enum/doc/enum.pdf
+include enum/doc/enum.rst
diff --git a/third_party/python/enum34/PKG-INFO b/third_party/python/enum34/PKG-INFO
new file mode 100644
index 0000000000..98927c4d99
--- /dev/null
+++ b/third_party/python/enum34/PKG-INFO
@@ -0,0 +1,62 @@
+Metadata-Version: 1.1
+Name: enum34
+Version: 1.1.6
+Summary: Python 3.4 Enum backported to 3.3, 3.2, 3.1, 2.7, 2.6, 2.5, and 2.4
+Home-page: https://bitbucket.org/stoneleaf/enum34
+Author: Ethan Furman
+Author-email: ethan@stoneleaf.us
+License: BSD License
+Description: enum --- support for enumerations
+ ========================================
+
+ An enumeration is a set of symbolic names (members) bound to unique, constant
+ values. Within an enumeration, the members can be compared by identity, and
+ the enumeration itself can be iterated over.
+
+ from enum import Enum
+
+ class Fruit(Enum):
+ apple = 1
+ banana = 2
+ orange = 3
+
+ list(Fruit)
+ # [<Fruit.apple: 1>, <Fruit.banana: 2>, <Fruit.orange: 3>]
+
+ len(Fruit)
+ # 3
+
+ Fruit.banana
+ # <Fruit.banana: 2>
+
+ Fruit['banana']
+ # <Fruit.banana: 2>
+
+ Fruit(2)
+ # <Fruit.banana: 2>
+
+ Fruit.banana is Fruit['banana'] is Fruit(2)
+ # True
+
+ Fruit.banana.name
+ # 'banana'
+
+ Fruit.banana.value
+ # 2
+
+ Repository and Issue Tracker at https://bitbucket.org/stoneleaf/enum34.
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python
+Classifier: Topic :: Software Development
+Classifier: Programming Language :: Python :: 2.4
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Provides: enum
diff --git a/third_party/python/enum34/README b/third_party/python/enum34/README
new file mode 100644
index 0000000000..aa2333d8df
--- /dev/null
+++ b/third_party/python/enum34/README
@@ -0,0 +1,3 @@
+enum34 is the new Python stdlib enum module available in Python 3.4
+backported for previous versions of Python from 2.4 to 3.3.
+tested on 2.6, 2.7, and 3.3+
diff --git a/third_party/python/enum34/enum/LICENSE b/third_party/python/enum34/enum/LICENSE
new file mode 100644
index 0000000000..9003b8850e
--- /dev/null
+++ b/third_party/python/enum34/enum/LICENSE
@@ -0,0 +1,32 @@
+Copyright (c) 2013, Ethan Furman.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+ Redistributions of source code must retain the above
+ copyright notice, this list of conditions and the
+ following disclaimer.
+
+ Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials
+ provided with the distribution.
+
+ Neither the name Ethan Furman nor the names of any
+ contributors may be used to endorse or promote products
+ derived from this software without specific prior written
+ permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
diff --git a/third_party/python/enum34/enum/README b/third_party/python/enum34/enum/README
new file mode 100644
index 0000000000..aa2333d8df
--- /dev/null
+++ b/third_party/python/enum34/enum/README
@@ -0,0 +1,3 @@
+enum34 is the new Python stdlib enum module available in Python 3.4
+backported for previous versions of Python from 2.4 to 3.3.
+tested on 2.6, 2.7, and 3.3+
diff --git a/third_party/python/enum34/enum/__init__.py b/third_party/python/enum34/enum/__init__.py
new file mode 100644
index 0000000000..d6ffb3a40f
--- /dev/null
+++ b/third_party/python/enum34/enum/__init__.py
@@ -0,0 +1,837 @@
+"""Python Enumerations"""
+
+import sys as _sys
+
+__all__ = ['Enum', 'IntEnum', 'unique']
+
+version = 1, 1, 6
+
+pyver = float('%s.%s' % _sys.version_info[:2])
+
+try:
+ any
+except NameError:
+ def any(iterable):
+ for element in iterable:
+ if element:
+ return True
+ return False
+
+try:
+ from collections import OrderedDict
+except ImportError:
+ OrderedDict = None
+
+try:
+ basestring
+except NameError:
+ # In Python 2 basestring is the ancestor of both str and unicode
+ # in Python 3 it's just str, but was missing in 3.1
+ basestring = str
+
+try:
+ unicode
+except NameError:
+ # In Python 3 unicode no longer exists (it's just str)
+ unicode = str
+
+class _RouteClassAttributeToGetattr(object):
+ """Route attribute access on a class to __getattr__.
+
+ This is a descriptor, used to define attributes that act differently when
+ accessed through an instance and through a class. Instance access remains
+ normal, but access to an attribute through a class will be routed to the
+ class's __getattr__ method; this is done by raising AttributeError.
+
+ """
+ def __init__(self, fget=None):
+ self.fget = fget
+
+ def __get__(self, instance, ownerclass=None):
+ if instance is None:
+ raise AttributeError()
+ return self.fget(instance)
+
+ def __set__(self, instance, value):
+ raise AttributeError("can't set attribute")
+
+ def __delete__(self, instance):
+ raise AttributeError("can't delete attribute")
+
+
+def _is_descriptor(obj):
+ """Returns True if obj is a descriptor, False otherwise."""
+ return (
+ hasattr(obj, '__get__') or
+ hasattr(obj, '__set__') or
+ hasattr(obj, '__delete__'))
+
+
+def _is_dunder(name):
+ """Returns True if a __dunder__ name, False otherwise."""
+ return (name[:2] == name[-2:] == '__' and
+ name[2:3] != '_' and
+ name[-3:-2] != '_' and
+ len(name) > 4)
+
+
+def _is_sunder(name):
+ """Returns True if a _sunder_ name, False otherwise."""
+ return (name[0] == name[-1] == '_' and
+ name[1:2] != '_' and
+ name[-2:-1] != '_' and
+ len(name) > 2)
+
+
+def _make_class_unpicklable(cls):
+ """Make the given class un-picklable."""
+ def _break_on_call_reduce(self, protocol=None):
+ raise TypeError('%r cannot be pickled' % self)
+ cls.__reduce_ex__ = _break_on_call_reduce
+ cls.__module__ = '<unknown>'
+
+
+class _EnumDict(dict):
+ """Track enum member order and ensure member names are not reused.
+
+ EnumMeta will use the names found in self._member_names as the
+ enumeration member names.
+
+ """
+ def __init__(self):
+ super(_EnumDict, self).__init__()
+ self._member_names = []
+
+ def __setitem__(self, key, value):
+ """Changes anything not dundered or not a descriptor.
+
+ If a descriptor is added with the same name as an enum member, the name
+ is removed from _member_names (this may leave a hole in the numerical
+ sequence of values).
+
+ If an enum member name is used twice, an error is raised; duplicate
+ values are not checked for.
+
+ Single underscore (sunder) names are reserved.
+
+ Note: in 3.x __order__ is simply discarded as a not necessary piece
+ leftover from 2.x
+
+ """
+ if pyver >= 3.0 and key in ('_order_', '__order__'):
+ return
+ elif key == '__order__':
+ key = '_order_'
+ if _is_sunder(key):
+ if key != '_order_':
+ raise ValueError('_names_ are reserved for future Enum use')
+ elif _is_dunder(key):
+ pass
+ elif key in self._member_names:
+ # descriptor overwriting an enum?
+ raise TypeError('Attempted to reuse key: %r' % key)
+ elif not _is_descriptor(value):
+ if key in self:
+ # enum overwriting a descriptor?
+ raise TypeError('Key already defined as: %r' % self[key])
+ self._member_names.append(key)
+ super(_EnumDict, self).__setitem__(key, value)
+
+
+# Dummy value for Enum as EnumMeta explicity checks for it, but of course until
+# EnumMeta finishes running the first time the Enum class doesn't exist. This
+# is also why there are checks in EnumMeta like `if Enum is not None`
+Enum = None
+
+
+class EnumMeta(type):
+ """Metaclass for Enum"""
+ @classmethod
+ def __prepare__(metacls, cls, bases):
+ return _EnumDict()
+
+ def __new__(metacls, cls, bases, classdict):
+ # an Enum class is final once enumeration items have been defined; it
+ # cannot be mixed with other types (int, float, etc.) if it has an
+ # inherited __new__ unless a new __new__ is defined (or the resulting
+ # class will fail).
+ if type(classdict) is dict:
+ original_dict = classdict
+ classdict = _EnumDict()
+ for k, v in original_dict.items():
+ classdict[k] = v
+
+ member_type, first_enum = metacls._get_mixins_(bases)
+ __new__, save_new, use_args = metacls._find_new_(classdict, member_type,
+ first_enum)
+ # save enum items into separate mapping so they don't get baked into
+ # the new class
+ members = dict((k, classdict[k]) for k in classdict._member_names)
+ for name in classdict._member_names:
+ del classdict[name]
+
+ # py2 support for definition order
+ _order_ = classdict.get('_order_')
+ if _order_ is None:
+ if pyver < 3.0:
+ try:
+ _order_ = [name for (name, value) in sorted(members.items(), key=lambda item: item[1])]
+ except TypeError:
+ _order_ = [name for name in sorted(members.keys())]
+ else:
+ _order_ = classdict._member_names
+ else:
+ del classdict['_order_']
+ if pyver < 3.0:
+ _order_ = _order_.replace(',', ' ').split()
+ aliases = [name for name in members if name not in _order_]
+ _order_ += aliases
+
+ # check for illegal enum names (any others?)
+ invalid_names = set(members) & set(['mro'])
+ if invalid_names:
+ raise ValueError('Invalid enum member name(s): %s' % (
+ ', '.join(invalid_names), ))
+
+ # save attributes from super classes so we know if we can take
+ # the shortcut of storing members in the class dict
+ base_attributes = set([a for b in bases for a in b.__dict__])
+ # create our new Enum type
+ enum_class = super(EnumMeta, metacls).__new__(metacls, cls, bases, classdict)
+ enum_class._member_names_ = [] # names in random order
+ if OrderedDict is not None:
+ enum_class._member_map_ = OrderedDict()
+ else:
+ enum_class._member_map_ = {} # name->value map
+ enum_class._member_type_ = member_type
+
+ # Reverse value->name map for hashable values.
+ enum_class._value2member_map_ = {}
+
+ # instantiate them, checking for duplicates as we go
+ # we instantiate first instead of checking for duplicates first in case
+ # a custom __new__ is doing something funky with the values -- such as
+ # auto-numbering ;)
+ if __new__ is None:
+ __new__ = enum_class.__new__
+ for member_name in _order_:
+ value = members[member_name]
+ if not isinstance(value, tuple):
+ args = (value, )
+ else:
+ args = value
+ if member_type is tuple: # special case for tuple enums
+ args = (args, ) # wrap it one more time
+ if not use_args or not args:
+ enum_member = __new__(enum_class)
+ if not hasattr(enum_member, '_value_'):
+ enum_member._value_ = value
+ else:
+ enum_member = __new__(enum_class, *args)
+ if not hasattr(enum_member, '_value_'):
+ enum_member._value_ = member_type(*args)
+ value = enum_member._value_
+ enum_member._name_ = member_name
+ enum_member.__objclass__ = enum_class
+ enum_member.__init__(*args)
+ # If another member with the same value was already defined, the
+ # new member becomes an alias to the existing one.
+ for name, canonical_member in enum_class._member_map_.items():
+ if canonical_member.value == enum_member._value_:
+ enum_member = canonical_member
+ break
+ else:
+ # Aliases don't appear in member names (only in __members__).
+ enum_class._member_names_.append(member_name)
+ # performance boost for any member that would not shadow
+ # a DynamicClassAttribute (aka _RouteClassAttributeToGetattr)
+ if member_name not in base_attributes:
+ setattr(enum_class, member_name, enum_member)
+ # now add to _member_map_
+ enum_class._member_map_[member_name] = enum_member
+ try:
+ # This may fail if value is not hashable. We can't add the value
+ # to the map, and by-value lookups for this value will be
+ # linear.
+ enum_class._value2member_map_[value] = enum_member
+ except TypeError:
+ pass
+
+
+ # If a custom type is mixed into the Enum, and it does not know how
+ # to pickle itself, pickle.dumps will succeed but pickle.loads will
+ # fail. Rather than have the error show up later and possibly far
+ # from the source, sabotage the pickle protocol for this class so
+ # that pickle.dumps also fails.
+ #
+ # However, if the new class implements its own __reduce_ex__, do not
+ # sabotage -- it's on them to make sure it works correctly. We use
+ # __reduce_ex__ instead of any of the others as it is preferred by
+ # pickle over __reduce__, and it handles all pickle protocols.
+ unpicklable = False
+ if '__reduce_ex__' not in classdict:
+ if member_type is not object:
+ methods = ('__getnewargs_ex__', '__getnewargs__',
+ '__reduce_ex__', '__reduce__')
+ if not any(m in member_type.__dict__ for m in methods):
+ _make_class_unpicklable(enum_class)
+ unpicklable = True
+
+
+ # double check that repr and friends are not the mixin's or various
+ # things break (such as pickle)
+ for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'):
+ class_method = getattr(enum_class, name)
+ obj_method = getattr(member_type, name, None)
+ enum_method = getattr(first_enum, name, None)
+ if name not in classdict and class_method is not enum_method:
+ if name == '__reduce_ex__' and unpicklable:
+ continue
+ setattr(enum_class, name, enum_method)
+
+ # method resolution and int's are not playing nice
+ # Python's less than 2.6 use __cmp__
+
+ if pyver < 2.6:
+
+ if issubclass(enum_class, int):
+ setattr(enum_class, '__cmp__', getattr(int, '__cmp__'))
+
+ elif pyver < 3.0:
+
+ if issubclass(enum_class, int):
+ for method in (
+ '__le__',
+ '__lt__',
+ '__gt__',
+ '__ge__',
+ '__eq__',
+ '__ne__',
+ '__hash__',
+ ):
+ setattr(enum_class, method, getattr(int, method))
+
+ # replace any other __new__ with our own (as long as Enum is not None,
+ # anyway) -- again, this is to support pickle
+ if Enum is not None:
+ # if the user defined their own __new__, save it before it gets
+ # clobbered in case they subclass later
+ if save_new:
+ setattr(enum_class, '__member_new__', enum_class.__dict__['__new__'])
+ setattr(enum_class, '__new__', Enum.__dict__['__new__'])
+ return enum_class
+
+ def __bool__(cls):
+ """
+ classes/types should always be True.
+ """
+ return True
+
+ def __call__(cls, value, names=None, module=None, type=None, start=1):
+ """Either returns an existing member, or creates a new enum class.
+
+ This method is used both when an enum class is given a value to match
+ to an enumeration member (i.e. Color(3)) and for the functional API
+ (i.e. Color = Enum('Color', names='red green blue')).
+
+ When used for the functional API: `module`, if set, will be stored in
+ the new class' __module__ attribute; `type`, if set, will be mixed in
+ as the first base class.
+
+ Note: if `module` is not set this routine will attempt to discover the
+ calling module by walking the frame stack; if this is unsuccessful
+ the resulting class will not be pickleable.
+
+ """
+ if names is None: # simple value lookup
+ return cls.__new__(cls, value)
+ # otherwise, functional API: we're creating a new Enum type
+ return cls._create_(value, names, module=module, type=type, start=start)
+
+ def __contains__(cls, member):
+ return isinstance(member, cls) and member.name in cls._member_map_
+
+ def __delattr__(cls, attr):
+ # nicer error message when someone tries to delete an attribute
+ # (see issue19025).
+ if attr in cls._member_map_:
+ raise AttributeError(
+ "%s: cannot delete Enum member." % cls.__name__)
+ super(EnumMeta, cls).__delattr__(attr)
+
+ def __dir__(self):
+ return (['__class__', '__doc__', '__members__', '__module__'] +
+ self._member_names_)
+
+ @property
+ def __members__(cls):
+ """Returns a mapping of member name->value.
+
+ This mapping lists all enum members, including aliases. Note that this
+ is a copy of the internal mapping.
+
+ """
+ return cls._member_map_.copy()
+
+ def __getattr__(cls, name):
+ """Return the enum member matching `name`
+
+ We use __getattr__ instead of descriptors or inserting into the enum
+ class' __dict__ in order to support `name` and `value` being both
+ properties for enum members (which live in the class' __dict__) and
+ enum members themselves.
+
+ """
+ if _is_dunder(name):
+ raise AttributeError(name)
+ try:
+ return cls._member_map_[name]
+ except KeyError:
+ raise AttributeError(name)
+
+ def __getitem__(cls, name):
+ return cls._member_map_[name]
+
+ def __iter__(cls):
+ return (cls._member_map_[name] for name in cls._member_names_)
+
+ def __reversed__(cls):
+ return (cls._member_map_[name] for name in reversed(cls._member_names_))
+
+ def __len__(cls):
+ return len(cls._member_names_)
+
+ __nonzero__ = __bool__
+
+ def __repr__(cls):
+ return "<enum %r>" % cls.__name__
+
+ def __setattr__(cls, name, value):
+ """Block attempts to reassign Enum members.
+
+ A simple assignment to the class namespace only changes one of the
+ several possible ways to get an Enum member from the Enum class,
+ resulting in an inconsistent Enumeration.
+
+ """
+ member_map = cls.__dict__.get('_member_map_', {})
+ if name in member_map:
+ raise AttributeError('Cannot reassign members.')
+ super(EnumMeta, cls).__setattr__(name, value)
+
+ def _create_(cls, class_name, names=None, module=None, type=None, start=1):
+ """Convenience method to create a new Enum class.
+
+ `names` can be:
+
+ * A string containing member names, separated either with spaces or
+ commas. Values are auto-numbered from 1.
+ * An iterable of member names. Values are auto-numbered from 1.
+ * An iterable of (member name, value) pairs.
+ * A mapping of member name -> value.
+
+ """
+ if pyver < 3.0:
+ # if class_name is unicode, attempt a conversion to ASCII
+ if isinstance(class_name, unicode):
+ try:
+ class_name = class_name.encode('ascii')
+ except UnicodeEncodeError:
+ raise TypeError('%r is not representable in ASCII' % class_name)
+ metacls = cls.__class__
+ if type is None:
+ bases = (cls, )
+ else:
+ bases = (type, cls)
+ classdict = metacls.__prepare__(class_name, bases)
+ _order_ = []
+
+ # special processing needed for names?
+ if isinstance(names, basestring):
+ names = names.replace(',', ' ').split()
+ if isinstance(names, (tuple, list)) and isinstance(names[0], basestring):
+ names = [(e, i+start) for (i, e) in enumerate(names)]
+
+ # Here, names is either an iterable of (name, value) or a mapping.
+ item = None # in case names is empty
+ for item in names:
+ if isinstance(item, basestring):
+ member_name, member_value = item, names[item]
+ else:
+ member_name, member_value = item
+ classdict[member_name] = member_value
+ _order_.append(member_name)
+ # only set _order_ in classdict if name/value was not from a mapping
+ if not isinstance(item, basestring):
+ classdict['_order_'] = ' '.join(_order_)
+ enum_class = metacls.__new__(metacls, class_name, bases, classdict)
+
+ # TODO: replace the frame hack if a blessed way to know the calling
+ # module is ever developed
+ if module is None:
+ try:
+ module = _sys._getframe(2).f_globals['__name__']
+ except (AttributeError, ValueError):
+ pass
+ if module is None:
+ _make_class_unpicklable(enum_class)
+ else:
+ enum_class.__module__ = module
+
+ return enum_class
+
+ @staticmethod
+ def _get_mixins_(bases):
+ """Returns the type for creating enum members, and the first inherited
+ enum class.
+
+ bases: the tuple of bases that was given to __new__
+
+ """
+ if not bases or Enum is None:
+ return object, Enum
+
+
+ # double check that we are not subclassing a class with existing
+ # enumeration members; while we're at it, see if any other data
+ # type has been mixed in so we can use the correct __new__
+ member_type = first_enum = None
+ for base in bases:
+ if (base is not Enum and
+ issubclass(base, Enum) and
+ base._member_names_):
+ raise TypeError("Cannot extend enumerations")
+ # base is now the last base in bases
+ if not issubclass(base, Enum):
+ raise TypeError("new enumerations must be created as "
+ "`ClassName([mixin_type,] enum_type)`")
+
+ # get correct mix-in type (either mix-in type of Enum subclass, or
+ # first base if last base is Enum)
+ if not issubclass(bases[0], Enum):
+ member_type = bases[0] # first data type
+ first_enum = bases[-1] # enum type
+ else:
+ for base in bases[0].__mro__:
+ # most common: (IntEnum, int, Enum, object)
+ # possible: (<Enum 'AutoIntEnum'>, <Enum 'IntEnum'>,
+ # <class 'int'>, <Enum 'Enum'>,
+ # <class 'object'>)
+ if issubclass(base, Enum):
+ if first_enum is None:
+ first_enum = base
+ else:
+ if member_type is None:
+ member_type = base
+
+ return member_type, first_enum
+
+ if pyver < 3.0:
+ @staticmethod
+ def _find_new_(classdict, member_type, first_enum):
+ """Returns the __new__ to be used for creating the enum members.
+
+ classdict: the class dictionary given to __new__
+ member_type: the data type whose __new__ will be used by default
+ first_enum: enumeration to check for an overriding __new__
+
+ """
+ # now find the correct __new__, checking to see of one was defined
+ # by the user; also check earlier enum classes in case a __new__ was
+ # saved as __member_new__
+ __new__ = classdict.get('__new__', None)
+ if __new__:
+ return None, True, True # __new__, save_new, use_args
+
+ N__new__ = getattr(None, '__new__')
+ O__new__ = getattr(object, '__new__')
+ if Enum is None:
+ E__new__ = N__new__
+ else:
+ E__new__ = Enum.__dict__['__new__']
+ # check all possibles for __member_new__ before falling back to
+ # __new__
+ for method in ('__member_new__', '__new__'):
+ for possible in (member_type, first_enum):
+ try:
+ target = possible.__dict__[method]
+ except (AttributeError, KeyError):
+ target = getattr(possible, method, None)
+ if target not in [
+ None,
+ N__new__,
+ O__new__,
+ E__new__,
+ ]:
+ if method == '__member_new__':
+ classdict['__new__'] = target
+ return None, False, True
+ if isinstance(target, staticmethod):
+ target = target.__get__(member_type)
+ __new__ = target
+ break
+ if __new__ is not None:
+ break
+ else:
+ __new__ = object.__new__
+
+ # if a non-object.__new__ is used then whatever value/tuple was
+ # assigned to the enum member name will be passed to __new__ and to the
+ # new enum member's __init__
+ if __new__ is object.__new__:
+ use_args = False
+ else:
+ use_args = True
+
+ return __new__, False, use_args
+ else:
+ @staticmethod
+ def _find_new_(classdict, member_type, first_enum):
+ """Returns the __new__ to be used for creating the enum members.
+
+ classdict: the class dictionary given to __new__
+ member_type: the data type whose __new__ will be used by default
+ first_enum: enumeration to check for an overriding __new__
+
+ """
+ # now find the correct __new__, checking to see of one was defined
+ # by the user; also check earlier enum classes in case a __new__ was
+ # saved as __member_new__
+ __new__ = classdict.get('__new__', None)
+
+ # should __new__ be saved as __member_new__ later?
+ save_new = __new__ is not None
+
+ if __new__ is None:
+ # check all possibles for __member_new__ before falling back to
+ # __new__
+ for method in ('__member_new__', '__new__'):
+ for possible in (member_type, first_enum):
+ target = getattr(possible, method, None)
+ if target not in (
+ None,
+ None.__new__,
+ object.__new__,
+ Enum.__new__,
+ ):
+ __new__ = target
+ break
+ if __new__ is not None:
+ break
+ else:
+ __new__ = object.__new__
+
+ # if a non-object.__new__ is used then whatever value/tuple was
+ # assigned to the enum member name will be passed to __new__ and to the
+ # new enum member's __init__
+ if __new__ is object.__new__:
+ use_args = False
+ else:
+ use_args = True
+
+ return __new__, save_new, use_args
+
+
+########################################################
+# In order to support Python 2 and 3 with a single
+# codebase we have to create the Enum methods separately
+# and then use the `type(name, bases, dict)` method to
+# create the class.
+########################################################
+temp_enum_dict = {}
+temp_enum_dict['__doc__'] = "Generic enumeration.\n\n Derive from this class to define new enumerations.\n\n"
+
+def __new__(cls, value):
+ # all enum instances are actually created during class construction
+ # without calling this method; this method is called by the metaclass'
+ # __call__ (i.e. Color(3) ), and by pickle
+ if type(value) is cls:
+ # For lookups like Color(Color.red)
+ value = value.value
+ #return value
+ # by-value search for a matching enum member
+ # see if it's in the reverse mapping (for hashable values)
+ try:
+ if value in cls._value2member_map_:
+ return cls._value2member_map_[value]
+ except TypeError:
+ # not there, now do long search -- O(n) behavior
+ for member in cls._member_map_.values():
+ if member.value == value:
+ return member
+ raise ValueError("%s is not a valid %s" % (value, cls.__name__))
+temp_enum_dict['__new__'] = __new__
+del __new__
+
+def __repr__(self):
+ return "<%s.%s: %r>" % (
+ self.__class__.__name__, self._name_, self._value_)
+temp_enum_dict['__repr__'] = __repr__
+del __repr__
+
+def __str__(self):
+ return "%s.%s" % (self.__class__.__name__, self._name_)
+temp_enum_dict['__str__'] = __str__
+del __str__
+
+if pyver >= 3.0:
+ def __dir__(self):
+ added_behavior = [
+ m
+ for cls in self.__class__.mro()
+ for m in cls.__dict__
+ if m[0] != '_' and m not in self._member_map_
+ ]
+ return (['__class__', '__doc__', '__module__', ] + added_behavior)
+ temp_enum_dict['__dir__'] = __dir__
+ del __dir__
+
+def __format__(self, format_spec):
+ # mixed-in Enums should use the mixed-in type's __format__, otherwise
+ # we can get strange results with the Enum name showing up instead of
+ # the value
+
+ # pure Enum branch
+ if self._member_type_ is object:
+ cls = str
+ val = str(self)
+ # mix-in branch
+ else:
+ cls = self._member_type_
+ val = self.value
+ return cls.__format__(val, format_spec)
+temp_enum_dict['__format__'] = __format__
+del __format__
+
+
+####################################
+# Python's less than 2.6 use __cmp__
+
+if pyver < 2.6:
+
+ def __cmp__(self, other):
+ if type(other) is self.__class__:
+ if self is other:
+ return 0
+ return -1
+ return NotImplemented
+ raise TypeError("unorderable types: %s() and %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__cmp__'] = __cmp__
+ del __cmp__
+
+else:
+
+ def __le__(self, other):
+ raise TypeError("unorderable types: %s() <= %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__le__'] = __le__
+ del __le__
+
+ def __lt__(self, other):
+ raise TypeError("unorderable types: %s() < %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__lt__'] = __lt__
+ del __lt__
+
+ def __ge__(self, other):
+ raise TypeError("unorderable types: %s() >= %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__ge__'] = __ge__
+ del __ge__
+
+ def __gt__(self, other):
+ raise TypeError("unorderable types: %s() > %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__gt__'] = __gt__
+ del __gt__
+
+
+def __eq__(self, other):
+ if type(other) is self.__class__:
+ return self is other
+ return NotImplemented
+temp_enum_dict['__eq__'] = __eq__
+del __eq__
+
+def __ne__(self, other):
+ if type(other) is self.__class__:
+ return self is not other
+ return NotImplemented
+temp_enum_dict['__ne__'] = __ne__
+del __ne__
+
+def __hash__(self):
+ return hash(self._name_)
+temp_enum_dict['__hash__'] = __hash__
+del __hash__
+
+def __reduce_ex__(self, proto):
+ return self.__class__, (self._value_, )
+temp_enum_dict['__reduce_ex__'] = __reduce_ex__
+del __reduce_ex__
+
+# _RouteClassAttributeToGetattr is used to provide access to the `name`
+# and `value` properties of enum members while keeping some measure of
+# protection from modification, while still allowing for an enumeration
+# to have members named `name` and `value`. This works because enumeration
+# members are not set directly on the enum class -- __getattr__ is
+# used to look them up.
+
+@_RouteClassAttributeToGetattr
+def name(self):
+ return self._name_
+temp_enum_dict['name'] = name
+del name
+
+@_RouteClassAttributeToGetattr
+def value(self):
+ return self._value_
+temp_enum_dict['value'] = value
+del value
+
+@classmethod
+def _convert(cls, name, module, filter, source=None):
+ """
+ Create a new Enum subclass that replaces a collection of global constants
+ """
+ # convert all constants from source (or module) that pass filter() to
+ # a new Enum called name, and export the enum and its members back to
+ # module;
+ # also, replace the __reduce_ex__ method so unpickling works in
+ # previous Python versions
+ module_globals = vars(_sys.modules[module])
+ if source:
+ source = vars(source)
+ else:
+ source = module_globals
+ members = dict((name, value) for name, value in source.items() if filter(name))
+ cls = cls(name, members, module=module)
+ cls.__reduce_ex__ = _reduce_ex_by_name
+ module_globals.update(cls.__members__)
+ module_globals[name] = cls
+ return cls
+temp_enum_dict['_convert'] = _convert
+del _convert
+
+Enum = EnumMeta('Enum', (object, ), temp_enum_dict)
+del temp_enum_dict
+
+# Enum has now been created
+###########################
+
+class IntEnum(int, Enum):
+ """Enum where members are also (and must be) ints"""
+
+def _reduce_ex_by_name(self, proto):
+ return self.name
+
+def unique(enumeration):
+ """Class decorator that ensures only unique members exist in an enumeration."""
+ duplicates = []
+ for name, member in enumeration.__members__.items():
+ if name != member.name:
+ duplicates.append((name, member.name))
+ if duplicates:
+ duplicate_names = ', '.join(
+ ["%s -> %s" % (alias, name) for (alias, name) in duplicates]
+ )
+ raise ValueError('duplicate names found in %r: %s' %
+ (enumeration, duplicate_names)
+ )
+ return enumeration
diff --git a/third_party/python/enum34/enum/doc/enum.pdf b/third_party/python/enum34/enum/doc/enum.pdf
new file mode 100644
index 0000000000..8c1383a495
--- /dev/null
+++ b/third_party/python/enum34/enum/doc/enum.pdf
@@ -0,0 +1,2237 @@
+%PDF-1.4
+% ReportLab Generated PDF document http://www.reportlab.com
+1 0 obj
+<< /F1 2 0 R /F2 3 0 R /F3 4 0 R /F4 5 0 R /F5 8 0 R /F6 15 0 R >>
+endobj
+2 0 obj
+<< /BaseFont /Helvetica /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font >>
+endobj
+3 0 obj
+<< /BaseFont /Courier-Bold /Encoding /WinAnsiEncoding /Name /F2 /Subtype /Type1 /Type /Font >>
+endobj
+4 0 obj
+<< /BaseFont /Helvetica-Bold /Encoding /WinAnsiEncoding /Name /F3 /Subtype /Type1 /Type /Font >>
+endobj
+5 0 obj
+<< /BaseFont /Courier /Encoding /WinAnsiEncoding /Name /F4 /Subtype /Type1 /Type /Font >>
+endobj
+6 0 obj
+<< /Border [ 0 0 0 ] /Contents () /Dest [ 22 0 R /XYZ 62.69291 639.3236 0 ] /Rect [ 335.1805 574.4272 405.2473 586.4272 ] /Subtype /Link /Type /Annot >>
+endobj
+7 0 obj
+<< /Border [ 0 0 0 ] /Contents () /Dest [ 22 0 R /XYZ 62.69291 639.3236 0 ] /Rect [ 255.9742 427.4272 321.5378 439.4272 ] /Subtype /Link /Type /Annot >>
+endobj
+8 0 obj
+<< /BaseFont /Helvetica-Oblique /Encoding /WinAnsiEncoding /Name /F5 /Subtype /Type1 /Type /Font >>
+endobj
+9 0 obj
+<< /Border [ 0 0 0 ] /Contents () /Dest [ 26 0 R /XYZ 62.69291 278.1236 0 ] /Rect [ 82.69291 182.2272 201.0729 194.2272 ] /Subtype /Link /Type /Annot >>
+endobj
+10 0 obj
+<< /Annots [ 6 0 R 7 0 R 9 0 R ] /Contents 56 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 55 0 R /Resources << /Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] >> /Rotate 0
+ /Trans << >> /Type /Page >>
+endobj
+11 0 obj
+<< /Contents 57 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 55 0 R /Resources << /Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] >> /Rotate 0 /Trans << >>
+ /Type /Page >>
+endobj
+12 0 obj
+<< /Contents 58 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 55 0 R /Resources << /Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] >> /Rotate 0 /Trans << >>
+ /Type /Page >>
+endobj
+13 0 obj
+<< /Contents 59 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 55 0 R /Resources << /Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] >> /Rotate 0 /Trans << >>
+ /Type /Page >>
+endobj
+14 0 obj
+<< /Border [ 0 0 0 ] /Contents () /Dest [ 22 0 R /XYZ 62.69291 157.2236 0 ] /Rect [ 101.6029 741.7736 141.6229 753.7736 ] /Subtype /Link /Type /Annot >>
+endobj
+15 0 obj
+<< /BaseFont /Helvetica-BoldOblique /Encoding /WinAnsiEncoding /Name /F6 /Subtype /Type1 /Type /Font >>
+endobj
+16 0 obj
+<< /Border [ 0 0 0 ] /Contents () /Dest [ 25 0 R /XYZ 62.69291 359.6236 0 ] /Rect [ 327.1529 585.5736 392.7329 597.5736 ] /Subtype /Link /Type /Annot >>
+endobj
+17 0 obj
+<< /Border [ 0 0 0 ] /Contents () /Dest [ 22 0 R /XYZ 62.69291 639.3236 0 ] /Rect [ 181.1917 312.1736 246.4634 324.1736 ] /Subtype /Link /Type /Annot >>
+endobj
+18 0 obj
+<< /Annots [ 14 0 R 16 0 R 17 0 R ] /Contents 60 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 55 0 R /Resources << /Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] >> /Rotate 0
+ /Trans << >> /Type /Page >>
+endobj
+19 0 obj
+<< /Border [ 0 0 0 ] /Contents () /Dest [ 26 0 R /XYZ 62.69291 610.8236 0 ] /Rect [ 326.1329 511.3736 357.2629 523.3736 ] /Subtype /Link /Type /Annot >>
+endobj
+20 0 obj
+<< /Border [ 0 0 0 ] /Contents () /Dest [ 25 0 R /XYZ 62.69291 359.6236 0 ] /Rect [ 241.1229 185.9736 306.7029 197.9736 ] /Subtype /Link /Type /Annot >>
+endobj
+21 0 obj
+<< /Annots [ 19 0 R 20 0 R ] /Contents 61 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 55 0 R /Resources << /Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] >> /Rotate 0
+ /Trans << >> /Type /Page >>
+endobj
+22 0 obj
+<< /Contents 62 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 55 0 R /Resources << /Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] >> /Rotate 0 /Trans << >>
+ /Type /Page >>
+endobj
+23 0 obj
+<< /Contents 63 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 55 0 R /Resources << /Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] >> /Rotate 0 /Trans << >>
+ /Type /Page >>
+endobj
+24 0 obj
+<< /Contents 64 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 55 0 R /Resources << /Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] >> /Rotate 0 /Trans << >>
+ /Type /Page >>
+endobj
+25 0 obj
+<< /Contents 65 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 55 0 R /Resources << /Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] >> /Rotate 0 /Trans << >>
+ /Type /Page >>
+endobj
+26 0 obj
+<< /Contents 66 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 55 0 R /Resources << /Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] >> /Rotate 0 /Trans << >>
+ /Type /Page >>
+endobj
+27 0 obj
+<< /Border [ 0 0 0 ] /Contents () /Dest [ 26 0 R /XYZ 62.69291 610.8236 0 ] /Rect [ 309.4094 299.5736 340.6461 311.5736 ] /Subtype /Link /Type /Annot >>
+endobj
+28 0 obj
+<< /Annots [ 27 0 R ] /Contents 67 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 55 0 R /Resources << /Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] >> /Rotate 0
+ /Trans << >> /Type /Page >>
+endobj
+29 0 obj
+<< /Outlines 31 0 R /PageLabels 68 0 R /PageMode /UseNone /Pages 55 0 R /Type /Catalog >>
+endobj
+30 0 obj
+<< /Author () /CreationDate (D:20160515202831+08'00') /Creator (\(unspecified\)) /Keywords () /Producer (ReportLab PDF Library - www.reportlab.com) /Subject (\(unspecified\))
+ /Title (enum --- support for enumerations) >>
+endobj
+31 0 obj
+<< /Count 27 /First 32 0 R /Last 51 0 R /Type /Outlines >>
+endobj
+32 0 obj
+<< /Dest [ 10 0 R /XYZ 62.69291 660.6772 0 ] /Next 33 0 R /Parent 31 0 R /Title (Module Contents) >>
+endobj
+33 0 obj
+<< /Dest [ 10 0 R /XYZ 62.69291 477.6772 0 ] /Next 34 0 R /Parent 31 0 R /Prev 32 0 R /Title (Creating an Enum) >>
+endobj
+34 0 obj
+<< /Dest [ 11 0 R /XYZ 62.69291 197.0236 0 ] /Next 35 0 R /Parent 31 0 R /Prev 33 0 R /Title (Programmatic access to enumeration members and their attributes) >>
+endobj
+35 0 obj
+<< /Dest [ 12 0 R /XYZ 62.69291 501.4236 0 ] /Next 36 0 R /Parent 31 0 R /Prev 34 0 R /Title (Duplicating enum members and values) >>
+endobj
+36 0 obj
+<< /Dest [ 13 0 R /XYZ 62.69291 215.0236 0 ] /Next 37 0 R /Parent 31 0 R /Prev 35 0 R /Title (Comparisons) >>
+endobj
+37 0 obj
+<< /Dest [ 18 0 R /XYZ 62.69291 362.4236 0 ] /Next 38 0 R /Parent 31 0 R /Prev 36 0 R /Title (Allowed members and attributes of enumerations) >>
+endobj
+38 0 obj
+<< /Dest [ 21 0 R /XYZ 62.69291 498.6236 0 ] /Next 39 0 R /Parent 31 0 R /Prev 37 0 R /Title (Restricted subclassing of enumerations) >>
+endobj
+39 0 obj
+<< /Dest [ 21 0 R /XYZ 62.69291 173.2236 0 ] /Next 40 0 R /Parent 31 0 R /Prev 38 0 R /Title (Pickling) >>
+endobj
+40 0 obj
+<< /Dest [ 22 0 R /XYZ 62.69291 635.8236 0 ] /Next 41 0 R /Parent 31 0 R /Prev 39 0 R /Title (Functional API) >>
+endobj
+41 0 obj
+<< /Count 2 /Dest [ 22 0 R /XYZ 62.69291 187.2236 0 ] /First 42 0 R /Last 43 0 R /Next 44 0 R /Parent 31 0 R
+ /Prev 40 0 R /Title (Derived Enumerations) >>
+endobj
+42 0 obj
+<< /Dest [ 22 0 R /XYZ 62.69291 154.2236 0 ] /Next 43 0 R /Parent 41 0 R /Title (IntEnum) >>
+endobj
+43 0 obj
+<< /Dest [ 23 0 R /XYZ 62.69291 217.4236 0 ] /Parent 41 0 R /Prev 42 0 R /Title (Others) >>
+endobj
+44 0 obj
+<< /Count 1 /Dest [ 24 0 R /XYZ 62.69291 567.0236 0 ] /First 45 0 R /Last 45 0 R /Next 46 0 R /Parent 31 0 R
+ /Prev 41 0 R /Title (Decorators) >>
+endobj
+45 0 obj
+<< /Dest [ 24 0 R /XYZ 62.69291 534.0236 0 ] /Parent 44 0 R /Title (unique) >>
+endobj
+46 0 obj
+<< /Count 4 /Dest [ 24 0 R /XYZ 62.69291 356.8236 0 ] /First 47 0 R /Last 50 0 R /Next 51 0 R /Parent 31 0 R
+ /Prev 44 0 R /Title (Interesting examples) >>
+endobj
+47 0 obj
+<< /Dest [ 24 0 R /XYZ 62.69291 281.8236 0 ] /Next 48 0 R /Parent 46 0 R /Title (AutoNumber) >>
+endobj
+48 0 obj
+<< /Dest [ 25 0 R /XYZ 62.69291 641.8236 0 ] /Next 49 0 R /Parent 46 0 R /Prev 47 0 R /Title (UniqueEnum) >>
+endobj
+49 0 obj
+<< /Dest [ 25 0 R /XYZ 62.69291 356.6236 0 ] /Next 50 0 R /Parent 46 0 R /Prev 48 0 R /Title (OrderedEnum) >>
+endobj
+50 0 obj
+<< /Dest [ 26 0 R /XYZ 62.69291 607.8236 0 ] /Parent 46 0 R /Prev 49 0 R /Title (Planet) >>
+endobj
+51 0 obj
+<< /Count 3 /Dest [ 26 0 R /XYZ 62.69291 274.6236 0 ] /First 52 0 R /Last 54 0 R /Parent 31 0 R /Prev 46 0 R
+ /Title (How are Enums different?) >>
+endobj
+52 0 obj
+<< /Dest [ 26 0 R /XYZ 62.69291 211.6236 0 ] /Next 53 0 R /Parent 51 0 R /Title (Enum Classes) >>
+endobj
+53 0 obj
+<< /Dest [ 28 0 R /XYZ 62.69291 664.0236 0 ] /Next 54 0 R /Parent 51 0 R /Prev 52 0 R /Title (Enum Members \(aka instances\)) >>
+endobj
+54 0 obj
+<< /Dest [ 28 0 R /XYZ 62.69291 592.0236 0 ] /Parent 51 0 R /Prev 53 0 R /Title (Finer Points) >>
+endobj
+55 0 obj
+<< /Count 12 /Kids [ 10 0 R 11 0 R 12 0 R 13 0 R 18 0 R 21 0 R 22 0 R 23 0 R 24 0 R 25 0 R
+ 26 0 R 28 0 R ] /Type /Pages >>
+endobj
+56 0 obj
+<< /Length 6458 >>
+stream
+1 0 0 1 0 0 cm BT /F1 12 Tf 14.4 TL ET
+q
+1 0 0 1 62.69291 741.0236 cm
+q
+BT 1 0 0 1 0 4 Tm 73.71488 0 Td 24 TL /F2 20 Tf 0 0 0 rg (enum ) Tj /F3 20 Tf 0 0 0 rg (--- support for enumerations) Tj T* -73.71488 0 Td ET
+Q
+Q
+q
+1 0 0 1 62.69291 702.6772 cm
+n 0 14.17323 m 469.8898 14.17323 l S
+Q
+q
+1 0 0 1 62.69291 672.6772 cm
+q
+BT 1 0 0 1 0 14 Tm 2.091318 Tw 12 TL /F1 10 Tf 0 0 0 rg (An enumeration is a set of symbolic names \(members\) bound to unique, constant values. Within an) Tj T* 0 Tw (enumeration, the members can be compared by identity, and the enumeration itself can be iterated over.) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 639.6772 cm
+q
+BT 1 0 0 1 0 3.5 Tm 21 TL /F3 17.5 Tf 0 0 0 rg (Module Contents) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 609.6772 cm
+q
+BT 1 0 0 1 0 14 Tm 2.027485 Tw 12 TL /F1 10 Tf 0 0 0 rg (This module defines two enumeration classes that can be used to define unique sets of names and) Tj T* 0 Tw (values: ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (and ) Tj /F4 10 Tf 0 0 0 rg (IntEnum) Tj /F1 10 Tf 0 0 0 rg (. It also defines one decorator, ) Tj /F4 10 Tf 0 0 0 rg (unique) Tj /F1 10 Tf 0 0 0 rg (.) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 591.6772 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F4 10 Tf 12 TL (Enum) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 561.6772 cm
+q
+BT 1 0 0 1 0 14 Tm 1.128443 Tw 12 TL /F1 10 Tf 0 0 0 rg (Base class for creating enumerated constants. See section ) Tj 0 0 .501961 rg (Functional API ) Tj 0 0 0 rg (for an alternate construction) Tj T* 0 Tw (syntax.) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 543.6772 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F4 10 Tf 12 TL (IntEnum) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 525.6772 cm
+q
+BT 1 0 0 1 0 2 Tm 12 TL /F1 10 Tf 0 0 0 rg (Base class for creating enumerated constants that are also subclasses of ) Tj /F4 10 Tf 0 0 0 rg (int) Tj /F1 10 Tf 0 0 0 rg (.) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 507.6772 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F4 10 Tf 12 TL (unique) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 489.6772 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (Enum class decorator that ensures only one name is bound to any one value.) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 456.6772 cm
+q
+BT 1 0 0 1 0 3.5 Tm 21 TL /F3 17.5 Tf 0 0 0 rg (Creating an Enum) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 414.6772 cm
+q
+BT 1 0 0 1 0 26 Tm 2.432651 Tw 12 TL /F1 10 Tf 0 0 0 rg (Enumerations are created using the ) Tj /F4 10 Tf 0 0 0 rg (class ) Tj /F1 10 Tf 0 0 0 rg (syntax, which makes them easy to read and write. An) Tj T* 0 Tw .533555 Tw (alternative creation method is described in ) Tj 0 0 .501961 rg (Functional API) Tj 0 0 0 rg (. To define an enumeration, subclass ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (as) Tj T* 0 Tw (follows:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 333.4772 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 72 re B*
+Q
+q
+BT 1 0 0 1 0 50 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( from enum import Enum) Tj T* (>) Tj (>) Tj (>) Tj ( class Color\(Enum\):) Tj T* (... red = 1) Tj T* (... green = 2) Tj T* (... blue = 3) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 313.4772 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (Note: Nomenclature) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 307.4772 cm
+Q
+q
+1 0 0 1 62.69291 229.4772 cm
+0 0 0 rg
+BT /F1 10 Tf 12 TL ET
+BT 1 0 0 1 0 2 Tm T* ET
+q
+1 0 0 1 20 72 cm
+Q
+q
+1 0 0 1 20 72 cm
+Q
+q
+1 0 0 1 20 60 cm
+0 0 0 rg
+BT /F1 10 Tf 12 TL ET
+q
+1 0 0 1 6 -3 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL 10.5 0 Td (\177) Tj T* -10.5 0 Td ET
+Q
+Q
+q
+1 0 0 1 23 -3 cm
+q
+BT 1 0 0 1 0 2 Tm 12 TL /F1 10 Tf 0 0 0 rg (The class ) Tj /F4 10 Tf 0 0 0 rg (Color ) Tj /F1 10 Tf 0 0 0 rg (is an ) Tj /F5 10 Tf (enumeration ) Tj /F1 10 Tf (\(or ) Tj /F5 10 Tf (enum) Tj /F1 10 Tf (\)) Tj T* ET
+Q
+Q
+q
+Q
+Q
+q
+1 0 0 1 20 54 cm
+Q
+q
+1 0 0 1 20 30 cm
+0 0 0 rg
+BT /F1 10 Tf 12 TL ET
+q
+1 0 0 1 6 9 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL 10.5 0 Td (\177) Tj T* -10.5 0 Td ET
+Q
+Q
+q
+1 0 0 1 23 -3 cm
+q
+BT 1 0 0 1 0 14 Tm 5.568863 Tw 12 TL /F1 10 Tf 0 0 0 rg (The attributes ) Tj /F4 10 Tf 0 0 0 rg (Color.red) Tj /F1 10 Tf 0 0 0 rg (, ) Tj /F4 10 Tf 0 0 0 rg (Color.green) Tj /F1 10 Tf 0 0 0 rg (, etc., are ) Tj /F5 10 Tf (enumeration members ) Tj /F1 10 Tf (\(or ) Tj /F5 10 Tf (enum) Tj T* 0 Tw (members) Tj /F1 10 Tf (\).) Tj T* ET
+Q
+Q
+q
+Q
+Q
+q
+1 0 0 1 20 24 cm
+Q
+q
+1 0 0 1 20 0 cm
+0 0 0 rg
+BT /F1 10 Tf 12 TL ET
+q
+1 0 0 1 6 9 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL 10.5 0 Td (\177) Tj T* -10.5 0 Td ET
+Q
+Q
+q
+1 0 0 1 23 -3 cm
+q
+BT 1 0 0 1 0 14 Tm 1.471318 Tw 12 TL /F1 10 Tf 0 0 0 rg (The enum members have ) Tj /F5 10 Tf (names ) Tj /F1 10 Tf (and ) Tj /F5 10 Tf (values ) Tj /F1 10 Tf (\(the name of ) Tj /F4 10 Tf 0 0 0 rg (Color.red ) Tj /F1 10 Tf 0 0 0 rg (is ) Tj /F4 10 Tf 0 0 0 rg (red) Tj /F1 10 Tf 0 0 0 rg (, the value of) Tj T* 0 Tw /F4 10 Tf 0 0 0 rg (Color.blue ) Tj /F1 10 Tf 0 0 0 rg (is ) Tj /F4 10 Tf 0 0 0 rg (3) Tj /F1 10 Tf 0 0 0 rg (, etc.\)) Tj T* ET
+Q
+Q
+q
+Q
+Q
+q
+1 0 0 1 20 0 cm
+Q
+q
+Q
+Q
+q
+1 0 0 1 62.69291 229.4772 cm
+Q
+q
+1 0 0 1 62.69291 211.4772 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (Note:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 205.4772 cm
+Q
+q
+1 0 0 1 62.69291 181.4772 cm
+0 0 0 rg
+BT /F1 10 Tf 12 TL ET
+BT 1 0 0 1 0 2 Tm T* ET
+q
+1 0 0 1 20 0 cm
+q
+BT 1 0 0 1 0 14 Tm .126235 Tw 12 TL /F1 10 Tf 0 0 0 rg (Even though we use the ) Tj /F4 10 Tf 0 0 0 rg (class ) Tj /F1 10 Tf 0 0 0 rg (syntax to create Enums, Enums are not normal Python classes. See) Tj T* 0 Tw 0 0 .501961 rg (How are Enums different? ) Tj 0 0 0 rg (for more details.) Tj T* ET
+Q
+Q
+q
+Q
+Q
+q
+1 0 0 1 62.69291 181.4772 cm
+Q
+q
+1 0 0 1 62.69291 163.4772 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (Enumeration members have human readable string representations:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 118.2772 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 36 re B*
+Q
+q
+BT 1 0 0 1 0 14 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( print\(Color.red\)) Tj T* (Color.red) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 98.27717 cm
+q
+BT 1 0 0 1 0 2 Tm 12 TL /F1 10 Tf 0 0 0 rg (...while their ) Tj /F4 10 Tf 0 0 0 rg (repr ) Tj /F1 10 Tf 0 0 0 rg (has more information:) Tj T* ET
+Q
+Q
+
+endstream
+endobj
+57 0 obj
+<< /Length 4174 >>
+stream
+1 0 0 1 0 0 cm BT /F1 12 Tf 14.4 TL ET
+q
+1 0 0 1 62.69291 727.8236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 36 re B*
+Q
+q
+BT 1 0 0 1 0 14 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( print\(repr\(Color.red\)\)) Tj T* (<) Tj (Color.red: 1) Tj (>) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 707.8236 cm
+q
+BT 1 0 0 1 0 2 Tm 12 TL /F1 10 Tf 0 0 0 rg (The ) Tj /F5 10 Tf (type ) Tj /F1 10 Tf (of an enumeration member is the enumeration it belongs to:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 626.6236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 72 re B*
+Q
+q
+BT 1 0 0 1 0 50 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( type\(Color.red\)) Tj T* (<) Tj (enum 'Color') Tj (>) Tj T* (>) Tj (>) Tj (>) Tj ( isinstance\(Color.green, Color\)) Tj T* (True) Tj T* (>) Tj (>) Tj (>) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 606.6236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (Enum members also have a property that contains just their item name:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 561.4236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 36 re B*
+Q
+q
+BT 1 0 0 1 0 14 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( print\(Color.red.name\)) Tj T* (red) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 529.4236 cm
+q
+BT 1 0 0 1 0 14 Tm .464985 Tw 12 TL /F1 10 Tf 0 0 0 rg (Enumerations support iteration. In Python 3.x definition order is used; in Python 2.x the definition order is) Tj T* 0 Tw (not available, but class attribute ) Tj /F4 10 Tf 0 0 0 rg (__order__ ) Tj /F1 10 Tf 0 0 0 rg (is supported; otherwise, value order is used:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 340.2236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 180 re B*
+Q
+q
+BT 1 0 0 1 0 158 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( class Shake\(Enum\):) Tj T* (... __order__ = 'vanilla chocolate cookies mint' # only needed in 2.x) Tj T* (... vanilla = 7) Tj T* (... chocolate = 4) Tj T* (... cookies = 9) Tj T* (... mint = 3) Tj T* (...) Tj T* (>) Tj (>) Tj (>) Tj ( for shake in Shake:) Tj T* (... print\(shake\)) Tj T* (...) Tj T* (Shake.vanilla) Tj T* (Shake.chocolate) Tj T* (Shake.cookies) Tj T* (Shake.mint) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 308.2236 cm
+q
+BT 1 0 0 1 0 14 Tm 1.893735 Tw 12 TL /F1 10 Tf 0 0 0 rg (The ) Tj /F4 10 Tf 0 0 0 rg (__order__ ) Tj /F1 10 Tf 0 0 0 rg (attribute is always removed, and in 3.x it is also ignored \(order is definition order\);) Tj T* 0 Tw (however, in the stdlib version it will be ignored but not removed.) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 290.2236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (Enumeration members are hashable, so they can be used in dictionaries and sets:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 209.0236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 72 re B*
+Q
+q
+BT 1 0 0 1 0 50 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( apples = {}) Tj T* (>) Tj (>) Tj (>) Tj ( apples[Color.red] = 'red delicious') Tj T* (>) Tj (>) Tj (>) Tj ( apples[Color.green] = 'granny smith') Tj T* (>) Tj (>) Tj (>) Tj ( apples == {Color.red: 'red delicious', Color.green: 'granny smith'}) Tj T* (True) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 155.0236 cm
+q
+BT 1 0 0 1 0 24.5 Tm 21 TL /F3 17.5 Tf 0 0 0 rg (Programmatic access to enumeration members and) Tj T* (their attributes) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 113.0236 cm
+q
+BT 1 0 0 1 0 26 Tm 3.541797 Tw 12 TL /F1 10 Tf 0 0 0 rg (Sometimes it's useful to access members in enumerations programmatically \(i.e. situations where) Tj T* 0 Tw .922651 Tw /F4 10 Tf 0 0 0 rg (Color.red ) Tj /F1 10 Tf 0 0 0 rg (won't do because the exact color is not known at program-writing time\). ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (allows such) Tj T* 0 Tw (access:) Tj T* ET
+Q
+Q
+
+endstream
+endobj
+58 0 obj
+<< /Length 3791 >>
+stream
+1 0 0 1 0 0 cm BT /F1 12 Tf 14.4 TL ET
+q
+1 0 0 1 62.69291 703.8236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 60 re B*
+Q
+q
+BT 1 0 0 1 0 38 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( Color\(1\)) Tj T* (<) Tj (Color.red: 1) Tj (>) Tj T* (>) Tj (>) Tj (>) Tj ( Color\(3\)) Tj T* (<) Tj (Color.blue: 3) Tj (>) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 683.8236 cm
+q
+BT 1 0 0 1 0 2 Tm 12 TL /F1 10 Tf 0 0 0 rg (If you want to access enum members by ) Tj /F5 10 Tf (name) Tj /F1 10 Tf (, use item access:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 614.6236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 60 re B*
+Q
+q
+BT 1 0 0 1 0 38 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( Color['red']) Tj T* (<) Tj (Color.red: 1) Tj (>) Tj T* (>) Tj (>) Tj (>) Tj ( Color['green']) Tj T* (<) Tj (Color.green: 2) Tj (>) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 594.6236 cm
+q
+BT 1 0 0 1 0 2 Tm 12 TL /F1 10 Tf 0 0 0 rg (If have an enum member and need its ) Tj /F4 10 Tf 0 0 0 rg (name ) Tj /F1 10 Tf 0 0 0 rg (or ) Tj /F4 10 Tf 0 0 0 rg (value) Tj /F1 10 Tf 0 0 0 rg (:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 513.4236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 72 re B*
+Q
+q
+BT 1 0 0 1 0 50 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( member = Color.red) Tj T* (>) Tj (>) Tj (>) Tj ( member.name) Tj T* ('red') Tj T* (>) Tj (>) Tj (>) Tj ( member.value) Tj T* (1) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 480.4236 cm
+q
+BT 1 0 0 1 0 3.5 Tm 21 TL /F3 17.5 Tf 0 0 0 rg (Duplicating enum members and values) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 450.4236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 14 Tm /F1 10 Tf 12 TL .13832 Tw (Having two enum members \(or any other attribute\) with the same name is invalid; in Python 3.x this would) Tj T* 0 Tw (raise an error, but in Python 2.x the second member simply overwrites the first:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 249.2236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 192 re B*
+Q
+q
+BT 1 0 0 1 0 170 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( # python 2.x) Tj T* (>) Tj (>) Tj (>) Tj ( class Shape\(Enum\):) Tj T* (... square = 2) Tj T* (... square = 3) Tj T* (...) Tj T* (>) Tj (>) Tj (>) Tj ( Shape.square) Tj T* (<) Tj (Shape.square: 3) Tj (>) Tj T* T* (>) Tj (>) Tj (>) Tj ( # python 3.x) Tj T* (>) Tj (>) Tj (>) Tj ( class Shape\(Enum\):) Tj T* (... square = 2) Tj T* (... square = 3) Tj T* (Traceback \(most recent call last\):) Tj T* (...) Tj T* (TypeError: Attempted to reuse key: 'square') Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 205.2236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 26 Tm /F1 10 Tf 12 TL .384987 Tw (However, two enum members are allowed to have the same value. Given two members A and B with the) Tj T* 0 Tw .444772 Tw (same value \(and A defined first\), B is an alias to A. By-value lookup of the value of A and B will return A.) Tj T* 0 Tw (By-name lookup of B will also return A:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 88.02362 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 492 108 re B*
+Q
+q
+BT 1 0 0 1 0 86 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( class Shape\(Enum\):) Tj T* (... __order__ = 'square diamond circle alias_for_square' # only needed in 2.x) Tj T* (... square = 2) Tj T* (... diamond = 1) Tj T* (... circle = 3) Tj T* (... alias_for_square = 2) Tj T* (...) Tj T* (>) Tj (>) Tj (>) Tj ( Shape.square) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+
+endstream
+endobj
+59 0 obj
+<< /Length 4406 >>
+stream
+1 0 0 1 0 0 cm BT /F1 12 Tf 14.4 TL ET
+q
+1 0 0 1 62.69291 691.8236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 72 re B*
+Q
+q
+BT 1 0 0 1 0 50 Tm 12 TL /F4 10 Tf 0 0 0 rg (<) Tj (Shape.square: 2) Tj (>) Tj T* (>) Tj (>) Tj (>) Tj ( Shape.alias_for_square) Tj T* (<) Tj (Shape.square: 2) Tj (>) Tj T* (>) Tj (>) Tj (>) Tj ( Shape\(2\)) Tj T* (<) Tj (Shape.square: 2) Tj (>) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 659.8236 cm
+q
+BT 1 0 0 1 0 14 Tm 1.074104 Tw 12 TL /F1 10 Tf 0 0 0 rg (Allowing aliases is not always desirable. ) Tj /F4 10 Tf 0 0 0 rg (unique ) Tj /F1 10 Tf 0 0 0 rg (can be used to ensure that none exist in a particular) Tj T* 0 Tw (enumeration:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 506.6236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 144 re B*
+Q
+q
+BT 1 0 0 1 0 122 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( from enum import unique) Tj T* (>) Tj (>) Tj (>) Tj ( @unique) Tj T* (... class Mistake\(Enum\):) Tj T* (... __order__ = 'one two three four' # only needed in 2.x) Tj T* (... one = 1) Tj T* (... two = 2) Tj T* (... three = 3) Tj T* (... four = 3) Tj T* (Traceback \(most recent call last\):) Tj T* (...) Tj T* (ValueError: duplicate names found in ) Tj (<) Tj (enum 'Mistake') Tj (>) Tj (: four -) Tj (>) Tj ( three) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 486.6236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (Iterating over the members of an enum does not provide the aliases:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 441.4236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 36 re B*
+Q
+q
+BT 1 0 0 1 0 14 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( list\(Shape\)) Tj T* ([) Tj (<) Tj (Shape.square: 2) Tj (>) Tj (, ) Tj (<) Tj (Shape.diamond: 1) Tj (>) Tj (, ) Tj (<) Tj (Shape.circle: 3) Tj (>) Tj (]) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 409.4236 cm
+q
+BT 1 0 0 1 0 14 Tm 1.307126 Tw 12 TL /F1 10 Tf 0 0 0 rg (The special attribute ) Tj /F4 10 Tf 0 0 0 rg (__members__ ) Tj /F1 10 Tf 0 0 0 rg (is a dictionary mapping names to members. It includes all names) Tj T* 0 Tw (defined in the enumeration, including the aliases:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 304.2236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 96 re B*
+Q
+q
+BT 1 0 0 1 0 74 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( for name, member in sorted\(Shape.__members__.items\(\)\):) Tj T* (... name, member) Tj T* (...) Tj T* (\('alias_for_square', ) Tj (<) Tj (Shape.square: 2) Tj (>) Tj (\)) Tj T* (\('circle', ) Tj (<) Tj (Shape.circle: 3) Tj (>) Tj (\)) Tj T* (\('diamond', ) Tj (<) Tj (Shape.diamond: 1) Tj (>) Tj (\)) Tj T* (\('square', ) Tj (<) Tj (Shape.square: 2) Tj (>) Tj (\)) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 272.2236 cm
+q
+BT 1 0 0 1 0 14 Tm .080751 Tw 12 TL /F1 10 Tf 0 0 0 rg (The ) Tj /F4 10 Tf 0 0 0 rg (__members__ ) Tj /F1 10 Tf 0 0 0 rg (attribute can be used for detailed programmatic access to the enumeration members.) Tj T* 0 Tw (For example, finding all the aliases:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 227.0236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 486 36 re B*
+Q
+q
+BT 1 0 0 1 0 14 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( [name for name, member in Shape.__members__.items\(\) if member.name != name]) Tj T* (['alias_for_square']) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 194.0236 cm
+q
+BT 1 0 0 1 0 3.5 Tm 21 TL /F3 17.5 Tf 0 0 0 rg (Comparisons) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 176.0236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (Enumeration members are compared by identity:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 82.82362 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 84 re B*
+Q
+q
+BT 1 0 0 1 0 62 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( Color.red is Color.red) Tj T* (True) Tj T* (>) Tj (>) Tj (>) Tj ( Color.red is Color.blue) Tj T* (False) Tj T* (>) Tj (>) Tj (>) Tj ( Color.red is not Color.blue) Tj T* (True) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+
+endstream
+endobj
+60 0 obj
+<< /Length 4521 >>
+stream
+1 0 0 1 0 0 cm BT /F1 12 Tf 14.4 TL ET
+q
+1 0 0 1 62.69291 741.0236 cm
+q
+BT 1 0 0 1 0 14 Tm 1.131647 Tw 12 TL /F1 10 Tf 0 0 0 rg (Ordered comparisons between enumeration values are ) Tj /F5 10 Tf (not ) Tj /F1 10 Tf (supported. Enum members are not integers) Tj T* 0 Tw (\(but see ) Tj 0 0 .501961 rg (IntEnum ) Tj 0 0 0 rg (below\):) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 671.8236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 60 re B*
+Q
+q
+BT 1 0 0 1 0 38 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( Color.red ) Tj (<) Tj ( Color.blue) Tj T* (Traceback \(most recent call last\):) Tj T* ( File ") Tj (<) Tj (stdin) Tj (>) Tj (", line 1, in ) Tj (<) Tj (module) Tj (>) Tj T* (TypeError: unorderable types: Color\(\) ) Tj (<) Tj ( Color\(\)) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 651.8236 cm
+Q
+q
+1 0 0 1 62.69291 568.8236 cm
+.960784 .960784 .862745 rg
+n 0 83 469.8898 -83 re f*
+0 0 0 rg
+BT /F1 10 Tf 12 TL ET
+BT 1 0 0 1 6 57 Tm T* ET
+q
+1 0 0 1 16 52 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2.5 Tm /F6 12.5 Tf 15 TL (Warning) Tj T* ET
+Q
+Q
+q
+1 0 0 1 16 16 cm
+q
+BT 1 0 0 1 0 14 Tm .189398 Tw 12 TL /F1 10 Tf 0 0 0 rg (In Python 2 ) Tj /F5 10 Tf (everything ) Tj /F1 10 Tf (is ordered, even though the ordering may not make sense. If you want your) Tj T* 0 Tw (enumerations to have a sensible ordering check out the ) Tj 0 0 .501961 rg (OrderedEnum ) Tj 0 0 0 rg (recipe below.) Tj T* ET
+Q
+Q
+q
+1 J
+1 j
+.662745 .662745 .662745 RG
+.5 w
+n 0 83 m 469.8898 83 l S
+n 0 0 m 469.8898 0 l S
+n 0 0 m 0 83 l S
+n 469.8898 0 m 469.8898 83 l S
+Q
+Q
+q
+1 0 0 1 62.69291 562.8236 cm
+Q
+q
+1 0 0 1 62.69291 544.8236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (Equality comparisons are defined though:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 451.6236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 84 re B*
+Q
+q
+BT 1 0 0 1 0 62 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( Color.blue == Color.red) Tj T* (False) Tj T* (>) Tj (>) Tj (>) Tj ( Color.blue != Color.red) Tj T* (True) Tj T* (>) Tj (>) Tj (>) Tj ( Color.blue == Color.blue) Tj T* (True) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 419.6236 cm
+q
+BT 1 0 0 1 0 14 Tm 2.582706 Tw 12 TL /F1 10 Tf 0 0 0 rg (Comparisons against non-enumeration values will always compare not equal \(again, ) Tj /F4 10 Tf 0 0 0 rg (IntEnum ) Tj /F1 10 Tf 0 0 0 rg (was) Tj T* 0 Tw (explicitly designed to behave differently, see below\):) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 374.4236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 36 re B*
+Q
+q
+BT 1 0 0 1 0 14 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( Color.blue == 2) Tj T* (False) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 341.4236 cm
+q
+BT 1 0 0 1 0 3.5 Tm 21 TL /F3 17.5 Tf 0 0 0 rg (Allowed members and attributes of enumerations) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 287.4236 cm
+q
+BT 1 0 0 1 0 38 Tm 2.755697 Tw 12 TL /F1 10 Tf 0 0 0 rg (The examples above use integers for enumeration values. Using integers is short and handy \(and) Tj T* 0 Tw .241751 Tw (provided by default by the ) Tj 0 0 .501961 rg (Functional API) Tj 0 0 0 rg (\), but not strictly enforced. In the vast majority of use-cases, one) Tj T* 0 Tw .848221 Tw (doesn't care what the actual value of an enumeration is. But if the value ) Tj /F5 10 Tf (is ) Tj /F1 10 Tf (important, enumerations can) Tj T* 0 Tw (have arbitrary values.) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 257.4236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 14 Tm /F1 10 Tf 12 TL .638735 Tw (Enumerations are Python classes, and can have methods and special methods as usual. If we have this) Tj T* 0 Tw (enumeration:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 80.22362 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 168 re B*
+Q
+q
+BT 1 0 0 1 0 146 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( class Mood\(Enum\):) Tj T* (... funky = 1) Tj T* (... happy = 3) Tj T* (...) Tj T* (... def describe\(self\):) Tj T* (... # self is the member here) Tj T* (... return self.name, self.value) Tj T* (...) Tj T* (... def __str__\(self\):) Tj T* (... return 'my custom str! {0}'.format\(self.value\)) Tj T* (...) Tj T* (... @classmethod) Tj T* (... def favorite_mood\(cls\):) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+
+endstream
+endobj
+61 0 obj
+<< /Length 4627 >>
+stream
+1 0 0 1 0 0 cm BT /F1 12 Tf 14.4 TL ET
+q
+1 0 0 1 62.69291 727.8236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 36 re B*
+Q
+q
+BT 1 0 0 1 0 14 Tm 12 TL /F4 10 Tf 0 0 0 rg (... # cls here is the enumeration) Tj T* (... return cls.happy) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 707.8236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (Then:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 614.6236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 84 re B*
+Q
+q
+BT 1 0 0 1 0 62 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( Mood.favorite_mood\(\)) Tj T* (<) Tj (Mood.happy: 3) Tj (>) Tj T* (>) Tj (>) Tj (>) Tj ( Mood.happy.describe\(\)) Tj T* (\('happy', 3\)) Tj T* (>) Tj (>) Tj (>) Tj ( str\(Mood.funky\)) Tj T* ('my custom str! 1') Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 558.6236 cm
+q
+BT 1 0 0 1 0 38 Tm 3.14186 Tw 12 TL /F1 10 Tf 0 0 0 rg (The rules for what is allowed are as follows: _sunder_ names \(starting and ending with a single) Tj T* 0 Tw .310651 Tw (underscore\) are reserved by enum and cannot be used; all other attributes defined within an enumeration) Tj T* 0 Tw 2.199213 Tw (will become members of this enumeration, with the exception of ) Tj /F5 10 Tf (__dunder__ ) Tj /F1 10 Tf (names and descriptors) Tj T* 0 Tw (\(methods are also descriptors\).) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 540.6236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (Note:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 534.6236 cm
+Q
+q
+1 0 0 1 62.69291 510.6236 cm
+0 0 0 rg
+BT /F1 10 Tf 12 TL ET
+BT 1 0 0 1 0 2 Tm T* ET
+q
+1 0 0 1 20 0 cm
+q
+BT 1 0 0 1 0 14 Tm .979213 Tw 12 TL /F1 10 Tf 0 0 0 rg (If your enumeration defines ) Tj /F4 10 Tf 0 0 0 rg (__new__ ) Tj /F1 10 Tf 0 0 0 rg (and/or ) Tj /F4 10 Tf 0 0 0 rg (__init__ ) Tj /F1 10 Tf 0 0 0 rg (then whatever value\(s\) were given to the) Tj T* 0 Tw (enum member will be passed into those methods. See ) Tj 0 0 .501961 rg (Planet ) Tj 0 0 0 rg (for an example.) Tj T* ET
+Q
+Q
+q
+Q
+Q
+q
+1 0 0 1 62.69291 510.6236 cm
+Q
+q
+1 0 0 1 62.69291 477.6236 cm
+q
+BT 1 0 0 1 0 3.5 Tm 21 TL /F3 17.5 Tf 0 0 0 rg (Restricted subclassing of enumerations) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 447.6236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 14 Tm /F1 10 Tf 12 TL .778735 Tw (Subclassing an enumeration is allowed only if the enumeration does not define any members. So this is) Tj T* 0 Tw (forbidden:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 366.4236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 72 re B*
+Q
+q
+BT 1 0 0 1 0 50 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( class MoreColor\(Color\):) Tj T* (... pink = 17) Tj T* (Traceback \(most recent call last\):) Tj T* (...) Tj T* (TypeError: Cannot extend enumerations) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 346.4236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (But this is allowed:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 229.2236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 108 re B*
+Q
+q
+BT 1 0 0 1 0 86 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( class Foo\(Enum\):) Tj T* (... def some_behavior\(self\):) Tj T* (... pass) Tj T* (...) Tj T* (>) Tj (>) Tj (>) Tj ( class Bar\(Foo\):) Tj T* (... happy = 1) Tj T* (... sad = 2) Tj T* (...) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 185.2236 cm
+q
+BT 1 0 0 1 0 26 Tm .127984 Tw 12 TL /F1 10 Tf 0 0 0 rg (Allowing subclassing of enums that define members would lead to a violation of some important invariants) Tj T* 0 Tw 1.889985 Tw (of types and instances. On the other hand, it makes sense to allow sharing some common behavior) Tj T* 0 Tw (between a group of enumerations. \(See ) Tj 0 0 .501961 rg (OrderedEnum ) Tj 0 0 0 rg (for an example.\)) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 152.2236 cm
+q
+BT 1 0 0 1 0 3.5 Tm 21 TL /F3 17.5 Tf 0 0 0 rg (Pickling) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 134.2236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (Enumerations can be pickled and unpickled:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 89.02362 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 36 re B*
+Q
+q
+BT 1 0 0 1 0 14 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( from enum.test_enum import Fruit) Tj T* (>) Tj (>) Tj (>) Tj ( from pickle import dumps, loads) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+
+endstream
+endobj
+62 0 obj
+<< /Length 5372 >>
+stream
+1 0 0 1 0 0 cm BT /F1 12 Tf 14.4 TL ET
+q
+1 0 0 1 62.69291 727.8236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 36 re B*
+Q
+q
+BT 1 0 0 1 0 14 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( Fruit.tomato is loads\(dumps\(Fruit.tomato, 2\)\)) Tj T* (True) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 695.8236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 14 Tm /F1 10 Tf 12 TL 1.256457 Tw (The usual restrictions for pickling apply: picklable enums must be defined in the top level of a module,) Tj T* 0 Tw (since unpickling requires them to be importable from that module.) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 677.8236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (Note:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 671.8236 cm
+Q
+q
+1 0 0 1 62.69291 647.8236 cm
+0 0 0 rg
+BT /F1 10 Tf 12 TL ET
+BT 1 0 0 1 0 2 Tm T* ET
+q
+1 0 0 1 20 0 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 14 Tm /F1 10 Tf 12 TL .081163 Tw (With pickle protocol version 4 \(introduced in Python 3.4\) it is possible to easily pickle enums nested in) Tj T* 0 Tw (other classes.) Tj T* ET
+Q
+Q
+q
+Q
+Q
+q
+1 0 0 1 62.69291 647.8236 cm
+Q
+q
+1 0 0 1 62.69291 614.8236 cm
+q
+BT 1 0 0 1 0 3.5 Tm 21 TL /F3 17.5 Tf 0 0 0 rg (Functional API) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 596.8236 cm
+q
+BT 1 0 0 1 0 2 Tm 12 TL /F1 10 Tf 0 0 0 rg (The ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (class is callable, providing the following functional API:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 467.6236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 120 re B*
+Q
+q
+BT 1 0 0 1 0 98 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( Animal = Enum\('Animal', 'ant bee cat dog'\)) Tj T* (>) Tj (>) Tj (>) Tj ( Animal) Tj T* (<) Tj (enum 'Animal') Tj (>) Tj T* (>) Tj (>) Tj (>) Tj ( Animal.ant) Tj T* (<) Tj (Animal.ant: 1) Tj (>) Tj T* (>) Tj (>) Tj (>) Tj ( Animal.ant.value) Tj T* (1) Tj T* (>) Tj (>) Tj (>) Tj ( list\(Animal\)) Tj T* ([) Tj (<) Tj (Animal.ant: 1) Tj (>) Tj (, ) Tj (<) Tj (Animal.bee: 2) Tj (>) Tj (, ) Tj (<) Tj (Animal.cat: 3) Tj (>) Tj (, ) Tj (<) Tj (Animal.dog: 4) Tj (>) Tj (]) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 435.6236 cm
+q
+BT 1 0 0 1 0 14 Tm .602209 Tw 12 TL /F1 10 Tf 0 0 0 rg (The semantics of this API resemble ) Tj /F4 10 Tf 0 0 0 rg (namedtuple) Tj /F1 10 Tf 0 0 0 rg (. The first argument of the call to ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (is the name of) Tj T* 0 Tw (the enumeration.) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 369.6236 cm
+q
+BT 1 0 0 1 0 50 Tm 1.326412 Tw 12 TL /F1 10 Tf 0 0 0 rg (The second argument is the ) Tj /F5 10 Tf (source ) Tj /F1 10 Tf (of enumeration member names. It can be a whitespace-separated) Tj T* 0 Tw .993516 Tw (string of names, a sequence of names, a sequence of 2-tuples with key/value pairs, or a mapping \(e.g.) Tj T* 0 Tw 1.168555 Tw (dictionary\) of names to values. The last two options enable assigning arbitrary values to enumerations;) Tj T* 0 Tw .537485 Tw (the others auto-assign increasing integers starting with 1. A new class derived from ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (is returned. In) Tj T* 0 Tw (other words, the above assignment to ) Tj /F4 10 Tf 0 0 0 rg (Animal ) Tj /F1 10 Tf 0 0 0 rg (is equivalent to:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 288.4236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 72 re B*
+Q
+q
+BT 1 0 0 1 0 50 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( class Animals\(Enum\):) Tj T* (... ant = 1) Tj T* (... bee = 2) Tj T* (... cat = 3) Tj T* (... dog = 4) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 232.4236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 38 Tm /F1 10 Tf 12 TL 1.239984 Tw (Pickling enums created with the functional API can be tricky as frame stack implementation details are) Tj T* 0 Tw .937132 Tw (used to try and figure out which module the enumeration is being created in \(e.g. it will fail if you use a) Tj T* 0 Tw 1.321163 Tw (utility function in separate module, and also may not work on IronPython or Jython\). The solution is to) Tj T* 0 Tw (specify the module name explicitly as follows:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 199.2236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 24 re B*
+Q
+q
+BT 1 0 0 1 0 2 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( Animals = Enum\('Animals', 'ant bee cat dog', module=__name__\)) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 166.2236 cm
+q
+BT 1 0 0 1 0 3.5 Tm 21 TL /F3 17.5 Tf 0 0 0 rg (Derived Enumerations) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 136.2236 cm
+q
+BT 1 0 0 1 0 3 Tm 18 TL /F3 15 Tf 0 0 0 rg (IntEnum) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 94.22362 cm
+q
+BT 1 0 0 1 0 26 Tm 1.99832 Tw 12 TL /F1 10 Tf 0 0 0 rg (A variation of ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (is provided which is also a subclass of ) Tj /F4 10 Tf 0 0 0 rg (int) Tj /F1 10 Tf 0 0 0 rg (. Members of an ) Tj /F4 10 Tf 0 0 0 rg (IntEnum ) Tj /F1 10 Tf 0 0 0 rg (can be) Tj T* 0 Tw .087984 Tw (compared to integers; by extension, integer enumerations of different types can also be compared to each) Tj T* 0 Tw (other:) Tj T* ET
+Q
+Q
+
+endstream
+endobj
+63 0 obj
+<< /Length 4141 >>
+stream
+1 0 0 1 0 0 cm BT /F1 12 Tf 14.4 TL ET
+q
+1 0 0 1 62.69291 571.8236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 192 re B*
+Q
+q
+BT 1 0 0 1 0 170 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( from enum import IntEnum) Tj T* (>) Tj (>) Tj (>) Tj ( class Shape\(IntEnum\):) Tj T* (... circle = 1) Tj T* (... square = 2) Tj T* (...) Tj T* (>) Tj (>) Tj (>) Tj ( class Request\(IntEnum\):) Tj T* (... post = 1) Tj T* (... get = 2) Tj T* (...) Tj T* (>) Tj (>) Tj (>) Tj ( Shape == 1) Tj T* (False) Tj T* (>) Tj (>) Tj (>) Tj ( Shape.circle == 1) Tj T* (True) Tj T* (>) Tj (>) Tj (>) Tj ( Shape.circle == Request.post) Tj T* (True) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 551.8236 cm
+q
+BT 1 0 0 1 0 2 Tm 12 TL /F1 10 Tf 0 0 0 rg (However, they still can't be compared to standard ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (enumerations:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 410.6236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 132 re B*
+Q
+q
+BT 1 0 0 1 0 110 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( class Shape\(IntEnum\):) Tj T* (... circle = 1) Tj T* (... square = 2) Tj T* (...) Tj T* (>) Tj (>) Tj (>) Tj ( class Color\(Enum\):) Tj T* (... red = 1) Tj T* (... green = 2) Tj T* (...) Tj T* (>) Tj (>) Tj (>) Tj ( Shape.circle == Color.red) Tj T* (False) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 390.6236 cm
+q
+BT 1 0 0 1 0 2 Tm 12 TL /F4 10 Tf 0 0 0 rg (IntEnum ) Tj /F1 10 Tf 0 0 0 rg (values behave like integers in other ways you'd expect:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 297.4236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 84 re B*
+Q
+q
+BT 1 0 0 1 0 62 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( int\(Shape.circle\)) Tj T* (1) Tj T* (>) Tj (>) Tj (>) Tj ( ['a', 'b', 'c'][Shape.circle]) Tj T* ('b') Tj T* (>) Tj (>) Tj (>) Tj ( [i for i in range\(Shape.square\)]) Tj T* ([0, 1]) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 229.4236 cm
+q
+BT 1 0 0 1 0 50 Tm 1.197126 Tw 12 TL /F1 10 Tf 0 0 0 rg (For the vast majority of code, ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (is strongly recommended, since ) Tj /F4 10 Tf 0 0 0 rg (IntEnum ) Tj /F1 10 Tf 0 0 0 rg (breaks some semantic) Tj T* 0 Tw .793318 Tw (promises of an enumeration \(by being comparable to integers, and thus by transitivity to other unrelated) Tj T* 0 Tw .554985 Tw (enumerations\). It should be used only in special cases where there's no other choice; for example, when) Tj T* 0 Tw .746136 Tw (integer constants are replaced with enumerations and backwards compatibility is required with code that) Tj T* 0 Tw (still expects integers.) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 199.4236 cm
+q
+BT 1 0 0 1 0 3 Tm 18 TL /F3 15 Tf 0 0 0 rg (Others) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 181.4236 cm
+q
+BT 1 0 0 1 0 2 Tm 12 TL /F1 10 Tf 0 0 0 rg (While ) Tj /F4 10 Tf 0 0 0 rg (IntEnum ) Tj /F1 10 Tf 0 0 0 rg (is part of the ) Tj /F4 10 Tf 0 0 0 rg (enum ) Tj /F1 10 Tf 0 0 0 rg (module, it would be very simple to implement independently:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 136.2236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 36 re B*
+Q
+q
+0 0 0 rg
+BT 1 0 0 1 0 14 Tm /F4 10 Tf 12 TL (class IntEnum\(int, Enum\):) Tj T* ( pass) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 104.2236 cm
+q
+BT 1 0 0 1 0 14 Tm .361412 Tw 12 TL /F1 10 Tf 0 0 0 rg (This demonstrates how similar derived enumerations can be defined; for example a ) Tj /F4 10 Tf 0 0 0 rg (StrEnum ) Tj /F1 10 Tf 0 0 0 rg (that mixes) Tj T* 0 Tw (in ) Tj /F4 10 Tf 0 0 0 rg (str ) Tj /F1 10 Tf 0 0 0 rg (instead of ) Tj /F4 10 Tf 0 0 0 rg (int) Tj /F1 10 Tf 0 0 0 rg (.) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 86.22362 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (Some rules:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 80.22362 cm
+Q
+q
+1 0 0 1 62.69291 80.22362 cm
+Q
+
+endstream
+endobj
+64 0 obj
+<< /Length 7108 >>
+stream
+1 0 0 1 0 0 cm BT /F1 12 Tf 14.4 TL ET
+q
+1 0 0 1 62.69291 741.0236 cm
+0 0 0 rg
+BT /F1 10 Tf 12 TL ET
+q
+1 0 0 1 6 9 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL 5.66 0 Td (1.) Tj T* -5.66 0 Td ET
+Q
+Q
+q
+1 0 0 1 23 -3 cm
+q
+BT 1 0 0 1 0 14 Tm .477318 Tw 12 TL /F1 10 Tf 0 0 0 rg (When subclassing ) Tj /F4 10 Tf 0 0 0 rg (Enum) Tj /F1 10 Tf 0 0 0 rg (, mix-in types must appear before ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (itself in the sequence of bases, as) Tj T* 0 Tw (in the ) Tj /F4 10 Tf 0 0 0 rg (IntEnum ) Tj /F1 10 Tf 0 0 0 rg (example above.) Tj T* ET
+Q
+Q
+q
+Q
+Q
+q
+1 0 0 1 62.69291 735.0236 cm
+Q
+q
+1 0 0 1 62.69291 699.0236 cm
+0 0 0 rg
+BT /F1 10 Tf 12 TL ET
+q
+1 0 0 1 6 21 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL 5.66 0 Td (2.) Tj T* -5.66 0 Td ET
+Q
+Q
+q
+1 0 0 1 23 -3 cm
+q
+BT 1 0 0 1 0 26 Tm 1.147045 Tw 12 TL /F1 10 Tf 0 0 0 rg (While ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (can have members of any type, once you mix in an additional type, all the members) Tj T* 0 Tw .420574 Tw (must have values of that type, e.g. ) Tj /F4 10 Tf 0 0 0 rg (int ) Tj /F1 10 Tf 0 0 0 rg (above. This restriction does not apply to mix-ins which only) Tj T* 0 Tw (add methods and don't specify another data type such as ) Tj /F4 10 Tf 0 0 0 rg (int ) Tj /F1 10 Tf 0 0 0 rg (or ) Tj /F4 10 Tf 0 0 0 rg (str) Tj /F1 10 Tf 0 0 0 rg (.) Tj T* ET
+Q
+Q
+q
+Q
+Q
+q
+1 0 0 1 62.69291 693.0236 cm
+Q
+q
+1 0 0 1 62.69291 669.0236 cm
+0 0 0 rg
+BT /F1 10 Tf 12 TL ET
+q
+1 0 0 1 6 9 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL 5.66 0 Td (3.) Tj T* -5.66 0 Td ET
+Q
+Q
+q
+1 0 0 1 23 -3 cm
+q
+BT 1 0 0 1 0 14 Tm .100542 Tw 12 TL /F1 10 Tf 0 0 0 rg (When another data type is mixed in, the ) Tj /F4 10 Tf 0 0 0 rg (value ) Tj /F1 10 Tf 0 0 0 rg (attribute is ) Tj /F5 10 Tf (not the same ) Tj /F1 10 Tf (as the enum member itself,) Tj T* 0 Tw (although it is equivalant and will compare equal.) Tj T* ET
+Q
+Q
+q
+Q
+Q
+q
+1 0 0 1 62.69291 663.0236 cm
+Q
+q
+1 0 0 1 62.69291 609.0236 cm
+0 0 0 rg
+BT /F1 10 Tf 12 TL ET
+q
+1 0 0 1 6 39 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL 5.66 0 Td (4.) Tj T* -5.66 0 Td ET
+Q
+Q
+q
+1 0 0 1 23 27 cm
+q
+BT 1 0 0 1 0 14 Tm 1.85998 Tw 12 TL /F1 10 Tf 0 0 0 rg (%-style formatting: ) Tj /F4 10 Tf 0 0 0 rg (%s ) Tj /F1 10 Tf 0 0 0 rg (and ) Tj /F4 10 Tf 0 0 0 rg (%r ) Tj /F1 10 Tf 0 0 0 rg (call ) Tj /F4 10 Tf 0 0 0 rg (Enum) Tj /F1 10 Tf 0 0 0 rg ('s ) Tj /F4 10 Tf 0 0 0 rg (__str__ ) Tj /F1 10 Tf 0 0 0 rg (and ) Tj /F4 10 Tf 0 0 0 rg (__repr__ ) Tj /F1 10 Tf 0 0 0 rg (respectively; other codes) Tj T* 0 Tw (\(such as ) Tj /F4 10 Tf 0 0 0 rg (%i ) Tj /F1 10 Tf 0 0 0 rg (or ) Tj /F4 10 Tf 0 0 0 rg (%h ) Tj /F1 10 Tf 0 0 0 rg (for IntEnum\) treat the enum member as its mixed-in type.) Tj T* ET
+Q
+Q
+q
+1 0 0 1 23 -3 cm
+q
+BT 1 0 0 1 0 14 Tm .067045 Tw 12 TL /F1 10 Tf 0 0 0 rg (Note: Prior to Python 3.4 there is a bug in ) Tj /F4 10 Tf 0 0 0 rg (str) Tj /F1 10 Tf 0 0 0 rg ('s %-formatting: ) Tj /F4 10 Tf 0 0 0 rg (int ) Tj /F1 10 Tf 0 0 0 rg (subclasses are printed as strings) Tj T* 0 Tw (and not numbers when the ) Tj /F4 10 Tf 0 0 0 rg (%d) Tj /F1 10 Tf 0 0 0 rg (, ) Tj /F4 10 Tf 0 0 0 rg (%i) Tj /F1 10 Tf 0 0 0 rg (, or ) Tj /F4 10 Tf 0 0 0 rg (%u ) Tj /F1 10 Tf 0 0 0 rg (codes are used.) Tj T* ET
+Q
+Q
+q
+Q
+Q
+q
+1 0 0 1 62.69291 603.0236 cm
+Q
+q
+1 0 0 1 62.69291 579.0236 cm
+0 0 0 rg
+BT /F1 10 Tf 12 TL ET
+q
+1 0 0 1 6 9 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL 5.66 0 Td (5.) Tj T* -5.66 0 Td ET
+Q
+Q
+q
+1 0 0 1 23 -3 cm
+q
+BT 1 0 0 1 0 14 Tm 1.880751 Tw 12 TL /F4 10 Tf 0 0 0 rg (str.__format__ ) Tj /F1 10 Tf 0 0 0 rg (\(or ) Tj /F4 10 Tf 0 0 0 rg (format) Tj /F1 10 Tf 0 0 0 rg (\) will use the mixed-in type's ) Tj /F4 10 Tf 0 0 0 rg (__format__) Tj /F1 10 Tf 0 0 0 rg (. If the ) Tj /F4 10 Tf 0 0 0 rg (Enum) Tj /F1 10 Tf 0 0 0 rg ('s ) Tj /F4 10 Tf 0 0 0 rg (str ) Tj /F1 10 Tf 0 0 0 rg (or) Tj T* 0 Tw /F4 10 Tf 0 0 0 rg (repr ) Tj /F1 10 Tf 0 0 0 rg (is desired use the ) Tj /F4 10 Tf 0 0 0 rg (!s ) Tj /F1 10 Tf 0 0 0 rg (or ) Tj /F4 10 Tf 0 0 0 rg (!r) Tj /F1 10 Tf 0 0 0 rg ( ) Tj /F4 10 Tf 0 0 0 rg (str ) Tj /F1 10 Tf 0 0 0 rg (format codes.) Tj T* ET
+Q
+Q
+q
+Q
+Q
+q
+1 0 0 1 62.69291 579.0236 cm
+Q
+q
+1 0 0 1 62.69291 546.0236 cm
+q
+BT 1 0 0 1 0 3.5 Tm 21 TL /F3 17.5 Tf 0 0 0 rg (Decorators) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 516.0236 cm
+q
+BT 1 0 0 1 0 3 Tm 18 TL /F3 15 Tf 0 0 0 rg (unique) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 486.0236 cm
+q
+BT 1 0 0 1 0 14 Tm .287251 Tw 12 TL /F1 10 Tf 0 0 0 rg (A ) Tj /F4 10 Tf 0 0 0 rg (class ) Tj /F1 10 Tf 0 0 0 rg (decorator specifically for enumerations. It searches an enumeration's ) Tj /F4 10 Tf 0 0 0 rg (__members__ ) Tj /F1 10 Tf 0 0 0 rg (gathering) Tj T* 0 Tw (any aliases it finds; if any are found ) Tj /F4 10 Tf 0 0 0 rg (ValueError ) Tj /F1 10 Tf 0 0 0 rg (is raised with the details:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 368.8236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 108 re B*
+Q
+q
+BT 1 0 0 1 0 86 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( @unique) Tj T* (... class NoDupes\(Enum\):) Tj T* (... first = 'one') Tj T* (... second = 'two') Tj T* (... third = 'two') Tj T* (Traceback \(most recent call last\):) Tj T* (...) Tj T* (ValueError: duplicate names found in ) Tj (<) Tj (enum 'NoDupes') Tj (>) Tj (: third -) Tj (>) Tj ( second) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 335.8236 cm
+q
+BT 1 0 0 1 0 3.5 Tm 21 TL /F3 17.5 Tf 0 0 0 rg (Interesting examples) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 293.8236 cm
+q
+BT 1 0 0 1 0 26 Tm .593735 Tw 12 TL /F1 10 Tf 0 0 0 rg (While ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (and ) Tj /F4 10 Tf 0 0 0 rg (IntEnum ) Tj /F1 10 Tf 0 0 0 rg (are expected to cover the majority of use-cases, they cannot cover them all.) Tj T* 0 Tw .897045 Tw (Here are recipes for some different types of enumerations that can be used directly, or as examples for) Tj T* 0 Tw (creating one's own.) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 263.8236 cm
+q
+BT 1 0 0 1 0 3 Tm 18 TL /F3 15 Tf 0 0 0 rg (AutoNumber) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 245.8236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (Avoids having to specify the value for each enumeration member:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 80.62362 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 156 re B*
+Q
+q
+BT 1 0 0 1 0 134 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( class AutoNumber\(Enum\):) Tj T* (... def __new__\(cls\):) Tj T* (... value = len\(cls.__members__\) + 1) Tj T* (... obj = object.__new__\(cls\)) Tj T* (... obj._value_ = value) Tj T* (... return obj) Tj T* (...) Tj T* (>) Tj (>) Tj (>) Tj ( class Color\(AutoNumber\):) Tj T* (... __order__ = "red green blue" # only needed in 2.x) Tj T* (... red = \(\)) Tj T* (... green = \(\)) Tj T* (... blue = \(\)) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+
+endstream
+endobj
+65 0 obj
+<< /Length 4158 >>
+stream
+1 0 0 1 0 0 cm BT /F1 12 Tf 14.4 TL ET
+q
+1 0 0 1 62.69291 715.8236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 48 re B*
+Q
+q
+BT 1 0 0 1 0 26 Tm 12 TL /F4 10 Tf 0 0 0 rg (...) Tj T* (>) Tj (>) Tj (>) Tj ( Color.green.value == 2) Tj T* (True) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 695.8236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (Note:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 689.8236 cm
+Q
+q
+1 0 0 1 62.69291 653.8236 cm
+0 0 0 rg
+BT /F1 10 Tf 12 TL ET
+BT 1 0 0 1 0 2 Tm T* ET
+q
+1 0 0 1 20 0 cm
+q
+BT 1 0 0 1 0 26 Tm .144104 Tw 12 TL /F1 10 Tf 0 0 0 rg (The ) Tj /F5 10 Tf 0 0 0 rg (__new__ ) Tj /F1 10 Tf 0 0 0 rg (method, if defined, is used during creation of the Enum members; it is then replaced by) Tj T* 0 Tw .799985 Tw (Enum's ) Tj /F5 10 Tf 0 0 0 rg (__new__ ) Tj /F1 10 Tf 0 0 0 rg (which is used after class creation for lookup of existing members. Due to the way) Tj T* 0 Tw (Enums are supposed to behave, there is no way to customize Enum's ) Tj /F5 10 Tf 0 0 0 rg (__new__) Tj /F1 10 Tf 0 0 0 rg (.) Tj T* ET
+Q
+Q
+q
+Q
+Q
+q
+1 0 0 1 62.69291 653.8236 cm
+Q
+q
+1 0 0 1 62.69291 623.8236 cm
+q
+BT 1 0 0 1 0 3 Tm 18 TL /F3 15 Tf 0 0 0 rg (UniqueEnum) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 605.8236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2 Tm /F1 10 Tf 12 TL (Raises an error if a duplicate member name is found instead of creating an alias:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 368.6236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 228 re B*
+Q
+q
+BT 1 0 0 1 0 206 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( class UniqueEnum\(Enum\):) Tj T* (... def __init__\(self, *args\):) Tj T* (... cls = self.__class__) Tj T* (... if any\(self.value == e.value for e in cls\):) Tj T* (... a = self.name) Tj T* (... e = cls\(self.value\).name) Tj T* (... raise ValueError\() Tj T* (... "aliases not allowed in UniqueEnum: %r --) Tj (>) Tj ( %r") Tj T* (... % \(a, e\)\)) Tj T* (...) Tj T* (>) Tj (>) Tj (>) Tj ( class Color\(UniqueEnum\):) Tj T* (... red = 1) Tj T* (... green = 2) Tj T* (... blue = 3) Tj T* (... grene = 2) Tj T* (Traceback \(most recent call last\):) Tj T* (...) Tj T* (ValueError: aliases not allowed in UniqueEnum: 'grene' --) Tj (>) Tj ( 'green') Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 338.6236 cm
+q
+BT 1 0 0 1 0 3 Tm 18 TL /F3 15 Tf 0 0 0 rg (OrderedEnum) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 308.6236 cm
+q
+BT 1 0 0 1 0 14 Tm 1.335984 Tw 12 TL /F1 10 Tf 0 0 0 rg (An ordered enumeration that is not based on ) Tj /F4 10 Tf 0 0 0 rg (IntEnum ) Tj /F1 10 Tf 0 0 0 rg (and so maintains the normal ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (invariants) Tj T* 0 Tw (\(such as not being comparable to other enumerations\):) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 83.42362 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 216 re B*
+Q
+q
+BT 1 0 0 1 0 194 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( class OrderedEnum\(Enum\):) Tj T* (... def __ge__\(self, other\):) Tj T* (... if self.__class__ is other.__class__:) Tj T* (... return self._value_ ) Tj (>) Tj (= other._value_) Tj T* (... return NotImplemented) Tj T* (... def __gt__\(self, other\):) Tj T* (... if self.__class__ is other.__class__:) Tj T* (... return self._value_ ) Tj (>) Tj ( other._value_) Tj T* (... return NotImplemented) Tj T* (... def __le__\(self, other\):) Tj T* (... if self.__class__ is other.__class__:) Tj T* (... return self._value_ ) Tj (<) Tj (= other._value_) Tj T* (... return NotImplemented) Tj T* (... def __lt__\(self, other\):) Tj T* (... if self.__class__ is other.__class__:) Tj T* (... return self._value_ ) Tj (<) Tj ( other._value_) Tj T* (... return NotImplemented) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+
+endstream
+endobj
+66 0 obj
+<< /Length 4039 >>
+stream
+1 0 0 1 0 0 cm BT /F1 12 Tf 14.4 TL ET
+q
+1 0 0 1 62.69291 619.8236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 144 re B*
+Q
+q
+BT 1 0 0 1 0 122 Tm 12 TL /F4 10 Tf 0 0 0 rg (...) Tj T* (>) Tj (>) Tj (>) Tj ( class Grade\(OrderedEnum\):) Tj T* (... __ordered__ = 'A B C D F') Tj T* (... A = 5) Tj T* (... B = 4) Tj T* (... C = 3) Tj T* (... D = 2) Tj T* (... F = 1) Tj T* (...) Tj T* (>) Tj (>) Tj (>) Tj ( Grade.C ) Tj (<) Tj ( Grade.A) Tj T* (True) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 589.8236 cm
+q
+BT 1 0 0 1 0 3 Tm 18 TL /F3 15 Tf 0 0 0 rg (Planet) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 571.8236 cm
+q
+BT 1 0 0 1 0 2 Tm 12 TL /F1 10 Tf 0 0 0 rg (If ) Tj /F4 10 Tf 0 0 0 rg (__new__ ) Tj /F1 10 Tf 0 0 0 rg (or ) Tj /F4 10 Tf 0 0 0 rg (__init__ ) Tj /F1 10 Tf 0 0 0 rg (is defined the value of the enum member will be passed to those methods:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 286.6236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 276 re B*
+Q
+q
+BT 1 0 0 1 0 254 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( class Planet\(Enum\):) Tj T* (... MERCURY = \(3.303e+23, 2.4397e6\)) Tj T* (... VENUS = \(4.869e+24, 6.0518e6\)) Tj T* (... EARTH = \(5.976e+24, 6.37814e6\)) Tj T* (... MARS = \(6.421e+23, 3.3972e6\)) Tj T* (... JUPITER = \(1.9e+27, 7.1492e7\)) Tj T* (... SATURN = \(5.688e+26, 6.0268e7\)) Tj T* (... URANUS = \(8.686e+25, 2.5559e7\)) Tj T* (... NEPTUNE = \(1.024e+26, 2.4746e7\)) Tj T* (... def __init__\(self, mass, radius\):) Tj T* (... self.mass = mass # in kilograms) Tj T* (... self.radius = radius # in meters) Tj T* (... @property) Tj T* (... def surface_gravity\(self\):) Tj T* (... # universal gravitational constant \(m3 kg-1 s-2\)) Tj T* (... G = 6.67300E-11) Tj T* (... return G * self.mass / \(self.radius * self.radius\)) Tj T* (...) Tj T* (>) Tj (>) Tj (>) Tj ( Planet.EARTH.value) Tj T* (\(5.976e+24, 6378140.0\)) Tj T* (>) Tj (>) Tj (>) Tj ( Planet.EARTH.surface_gravity) Tj T* (9.802652743337129) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 253.6236 cm
+q
+BT 1 0 0 1 0 3.5 Tm 21 TL /F3 17.5 Tf 0 0 0 rg (How are Enums different?) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 223.6236 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 14 Tm /F1 10 Tf 12 TL 2.090651 Tw (Enums have a custom metaclass that affects many aspects of both derived Enum classes and their) Tj T* 0 Tw (instances \(members\).) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 193.6236 cm
+q
+BT 1 0 0 1 0 3 Tm 18 TL /F3 15 Tf 0 0 0 rg (Enum Classes) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 127.6236 cm
+q
+BT 1 0 0 1 0 50 Tm 1.263615 Tw 12 TL /F1 10 Tf 0 0 0 rg (The ) Tj /F4 10 Tf 0 0 0 rg (EnumMeta ) Tj /F1 10 Tf 0 0 0 rg (metaclass is responsible for providing the ) Tj /F4 10 Tf 0 0 0 rg (__contains__) Tj /F1 10 Tf 0 0 0 rg (, ) Tj /F4 10 Tf 0 0 0 rg (__dir__) Tj /F1 10 Tf 0 0 0 rg (, ) Tj /F4 10 Tf 0 0 0 rg (__iter__ ) Tj /F1 10 Tf 0 0 0 rg (and) Tj T* 0 Tw 2.264724 Tw (other methods that allow one to do things with an ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (class that fail on a typical class, such as) Tj T* 0 Tw 2.594147 Tw /F4 10 Tf 0 0 0 rg (list\(Color\) ) Tj /F1 10 Tf 0 0 0 rg (or ) Tj /F4 10 Tf 0 0 0 rg (some_var) Tj ( ) Tj (in) Tj ( ) Tj (Color) Tj /F1 10 Tf 0 0 0 rg (. ) Tj /F4 10 Tf 0 0 0 rg (EnumMeta ) Tj /F1 10 Tf 0 0 0 rg (is responsible for ensuring that various other) Tj T* 0 Tw 2.196905 Tw (methods on the final ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (class are correct \(such as ) Tj /F4 10 Tf 0 0 0 rg (__new__) Tj /F1 10 Tf 0 0 0 rg (, ) Tj /F4 10 Tf 0 0 0 rg (__getnewargs__) Tj /F1 10 Tf 0 0 0 rg (, ) Tj /F4 10 Tf 0 0 0 rg (__str__ ) Tj /F1 10 Tf 0 0 0 rg (and) Tj T* 0 Tw /F4 10 Tf 0 0 0 rg (__repr__) Tj /F1 10 Tf 0 0 0 rg (\).) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 115.6236 cm
+Q
+
+endstream
+endobj
+67 0 obj
+<< /Length 5453 >>
+stream
+1 0 0 1 0 0 cm BT /F1 12 Tf 14.4 TL ET
+q
+1 0 0 1 62.69291 682.0236 cm
+.960784 .960784 .862745 rg
+n 0 83 469.8898 -83 re f*
+0 0 0 rg
+BT /F1 10 Tf 12 TL ET
+BT 1 0 0 1 6 57 Tm T* ET
+q
+1 0 0 1 16 52 cm
+q
+0 0 0 rg
+BT 1 0 0 1 0 2.5 Tm /F6 12.5 Tf 15 TL (Note) Tj T* ET
+Q
+Q
+q
+1 0 0 1 16 16 cm
+q
+BT 1 0 0 1 0 14 Tm .686654 Tw 12 TL /F4 10 Tf 0 0 0 rg (__dir__ ) Tj /F1 10 Tf 0 0 0 rg (is not changed in the Python 2 line as it messes up some of the decorators included in) Tj T* 0 Tw (the stdlib.) Tj T* ET
+Q
+Q
+q
+1 J
+1 j
+.662745 .662745 .662745 RG
+.5 w
+n 0 83 m 469.8898 83 l S
+n 0 0 m 469.8898 0 l S
+n 0 0 m 0 83 l S
+n 469.8898 0 m 469.8898 83 l S
+Q
+Q
+q
+1 0 0 1 62.69291 676.0236 cm
+Q
+q
+1 0 0 1 62.69291 646.0236 cm
+q
+BT 1 0 0 1 0 3 Tm 18 TL /F3 15 Tf 0 0 0 rg (Enum Members \(aka instances\)) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 604.0236 cm
+q
+BT 1 0 0 1 0 26 Tm .491984 Tw 12 TL /F1 10 Tf 0 0 0 rg (The most interesting thing about Enum members is that they are singletons. ) Tj /F4 10 Tf 0 0 0 rg (EnumMeta ) Tj /F1 10 Tf 0 0 0 rg (creates them all) Tj T* 0 Tw .084988 Tw (while it is creating the ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (class itself, and then puts a custom ) Tj /F4 10 Tf 0 0 0 rg (__new__ ) Tj /F1 10 Tf 0 0 0 rg (in place to ensure that no new) Tj T* 0 Tw (ones are ever instantiated by returning only the existing member instances.) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 574.0236 cm
+q
+BT 1 0 0 1 0 3 Tm 18 TL /F3 15 Tf 0 0 0 rg (Finer Points) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 520.0236 cm
+q
+BT 1 0 0 1 0 38 Tm 5.488555 Tw 12 TL /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (members are instances of an ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (class, and even though they are accessible as) Tj T* 0 Tw 1.504147 Tw /F5 10 Tf 0 0 0 rg (EnumClass.member1.member2) Tj /F1 10 Tf 0 0 0 rg (, they should not be accessed directly from the member as that lookup) Tj T* 0 Tw .329985 Tw (may fail or, worse, return something besides the ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (member you were looking for \(changed in version) Tj T* 0 Tw (1.1.1\):) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 390.8236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 120 re B*
+Q
+q
+BT 1 0 0 1 0 98 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( class FieldTypes\(Enum\):) Tj T* (... name = 1) Tj T* (... value = 2) Tj T* (... size = 3) Tj T* (...) Tj T* (>) Tj (>) Tj (>) Tj ( FieldTypes.value.size) Tj T* (<) Tj (FieldTypes.size: 3) Tj (>) Tj T* (>) Tj (>) Tj (>) Tj ( FieldTypes.size.value) Tj T* (3) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 370.8236 cm
+q
+BT 1 0 0 1 0 2 Tm 12 TL /F1 10 Tf 0 0 0 rg (The ) Tj /F4 10 Tf 0 0 0 rg (__members__ ) Tj /F1 10 Tf 0 0 0 rg (attribute is only available on the class.) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 316.8236 cm
+q
+BT 1 0 0 1 0 38 Tm 1.374651 Tw 12 TL /F1 10 Tf 0 0 0 rg (In Python 3.x ) Tj /F4 10 Tf 0 0 0 rg (__members__ ) Tj /F1 10 Tf 0 0 0 rg (is always an ) Tj /F4 10 Tf 0 0 0 rg (OrderedDict) Tj /F1 10 Tf 0 0 0 rg (, with the order being the definition order. In) Tj T* 0 Tw 3.009213 Tw (Python 2.7 ) Tj /F4 10 Tf 0 0 0 rg (__members__ ) Tj /F1 10 Tf 0 0 0 rg (is an ) Tj /F4 10 Tf 0 0 0 rg (OrderedDict ) Tj /F1 10 Tf 0 0 0 rg (if ) Tj /F4 10 Tf 0 0 0 rg (__order__ ) Tj /F1 10 Tf 0 0 0 rg (was specified, and a plain ) Tj /F4 10 Tf 0 0 0 rg (dict) Tj T* 0 Tw 1.851318 Tw /F1 10 Tf 0 0 0 rg (otherwise. In all other Python 2.x versions ) Tj /F4 10 Tf 0 0 0 rg (__members__ ) Tj /F1 10 Tf 0 0 0 rg (is a plain ) Tj /F4 10 Tf 0 0 0 rg (dict ) Tj /F1 10 Tf 0 0 0 rg (even if ) Tj /F4 10 Tf 0 0 0 rg (__order__ ) Tj /F1 10 Tf 0 0 0 rg (was) Tj T* 0 Tw (specified as the ) Tj /F4 10 Tf 0 0 0 rg (OrderedDict ) Tj /F1 10 Tf 0 0 0 rg (type didn't exist yet.) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 286.8236 cm
+q
+BT 1 0 0 1 0 14 Tm .106654 Tw 12 TL /F1 10 Tf 0 0 0 rg (If you give your ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (subclass extra methods, like the ) Tj 0 0 .501961 rg (Planet ) Tj 0 0 0 rg (class above, those methods will show up in) Tj T* 0 Tw (a ) Tj /F5 10 Tf 0 0 0 rg (dir ) Tj /F1 10 Tf 0 0 0 rg (of the member, but not of the class:) Tj T* ET
+Q
+Q
+q
+1 0 0 1 62.69291 205.6236 cm
+q
+q
+1 0 0 1 0 0 cm
+q
+1 0 0 1 6.6 6.6 cm
+q
+.662745 .662745 .662745 RG
+.5 w
+.960784 .960784 .862745 rg
+n -6 -6 468.6898 72 re B*
+Q
+q
+BT 1 0 0 1 0 50 Tm 12 TL /F4 10 Tf 0 0 0 rg (>) Tj (>) Tj (>) Tj ( dir\(Planet\)) Tj T* (['EARTH', 'JUPITER', 'MARS', 'MERCURY', 'NEPTUNE', 'SATURN', 'URANUS',) Tj T* ('VENUS', '__class__', '__doc__', '__members__', '__module__']) Tj T* (>) Tj (>) Tj (>) Tj ( dir\(Planet.EARTH\)) Tj T* (['__class__', '__doc__', '__module__', 'name', 'surface_gravity', 'value']) Tj T* ET
+Q
+Q
+Q
+Q
+Q
+q
+1 0 0 1 62.69291 161.6236 cm
+q
+BT 1 0 0 1 0 26 Tm .938935 Tw 12 TL /F1 10 Tf 0 0 0 rg (A ) Tj /F4 10 Tf 0 0 0 rg (__new__ ) Tj /F1 10 Tf 0 0 0 rg (method will only be used for the creation of the ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (members -- after that it is replaced.) Tj T* 0 Tw .949461 Tw (This means if you wish to change how ) Tj /F4 10 Tf 0 0 0 rg (Enum ) Tj /F1 10 Tf 0 0 0 rg (members are looked up you either have to write a helper) Tj T* 0 Tw (function or a ) Tj /F4 10 Tf 0 0 0 rg (classmethod) Tj /F1 10 Tf 0 0 0 rg (.) Tj T* ET
+Q
+Q
+
+endstream
+endobj
+68 0 obj
+<< /Nums [ 0 69 0 R 1 70 0 R 2 71 0 R 3 72 0 R 4 73 0 R
+ 5 74 0 R 6 75 0 R 7 76 0 R 8 77 0 R 9 78 0 R
+ 10 79 0 R 11 80 0 R ] >>
+endobj
+69 0 obj
+<< /S /D /St 1 >>
+endobj
+70 0 obj
+<< /S /D /St 2 >>
+endobj
+71 0 obj
+<< /S /D /St 3 >>
+endobj
+72 0 obj
+<< /S /D /St 4 >>
+endobj
+73 0 obj
+<< /S /D /St 5 >>
+endobj
+74 0 obj
+<< /S /D /St 6 >>
+endobj
+75 0 obj
+<< /S /D /St 7 >>
+endobj
+76 0 obj
+<< /S /D /St 8 >>
+endobj
+77 0 obj
+<< /S /D /St 9 >>
+endobj
+78 0 obj
+<< /S /D /St 10 >>
+endobj
+79 0 obj
+<< /S /D /St 11 >>
+endobj
+80 0 obj
+<< /S /D /St 12 >>
+endobj
+xref
+0 81
+0000000000 65535 f
+0000000075 00000 n
+0000000160 00000 n
+0000000270 00000 n
+0000000383 00000 n
+0000000498 00000 n
+0000000606 00000 n
+0000000777 00000 n
+0000000948 00000 n
+0000001066 00000 n
+0000001237 00000 n
+0000001477 00000 n
+0000001687 00000 n
+0000001897 00000 n
+0000002107 00000 n
+0000002279 00000 n
+0000002402 00000 n
+0000002574 00000 n
+0000002746 00000 n
+0000002989 00000 n
+0000003161 00000 n
+0000003333 00000 n
+0000003569 00000 n
+0000003779 00000 n
+0000003989 00000 n
+0000004199 00000 n
+0000004409 00000 n
+0000004619 00000 n
+0000004791 00000 n
+0000005020 00000 n
+0000005129 00000 n
+0000005373 00000 n
+0000005451 00000 n
+0000005571 00000 n
+0000005705 00000 n
+0000005886 00000 n
+0000006039 00000 n
+0000006168 00000 n
+0000006332 00000 n
+0000006488 00000 n
+0000006614 00000 n
+0000006746 00000 n
+0000006924 00000 n
+0000007036 00000 n
+0000007147 00000 n
+0000007315 00000 n
+0000007413 00000 n
+0000007591 00000 n
+0000007706 00000 n
+0000007834 00000 n
+0000007963 00000 n
+0000008074 00000 n
+0000008243 00000 n
+0000008360 00000 n
+0000008508 00000 n
+0000008625 00000 n
+0000008771 00000 n
+0000015286 00000 n
+0000019517 00000 n
+0000023365 00000 n
+0000027828 00000 n
+0000032406 00000 n
+0000037090 00000 n
+0000042519 00000 n
+0000046717 00000 n
+0000053882 00000 n
+0000058097 00000 n
+0000062193 00000 n
+0000067703 00000 n
+0000067856 00000 n
+0000067893 00000 n
+0000067930 00000 n
+0000067967 00000 n
+0000068004 00000 n
+0000068041 00000 n
+0000068078 00000 n
+0000068115 00000 n
+0000068152 00000 n
+0000068189 00000 n
+0000068227 00000 n
+0000068265 00000 n
+trailer
+<< /ID
+ % ReportLab generated PDF document -- digest (http://www.reportlab.com)
+ [(<}|~gm\352\320\235=\001p\220v\224\336) (<}|~gm\352\320\235=\001p\220v\224\336)]
+ /Info 30 0 R /Root 29 0 R /Size 81 >>
+startxref
+68303
+%%EOF
diff --git a/third_party/python/enum34/enum/doc/enum.rst b/third_party/python/enum34/enum/doc/enum.rst
new file mode 100644
index 0000000000..3afc238210
--- /dev/null
+++ b/third_party/python/enum34/enum/doc/enum.rst
@@ -0,0 +1,735 @@
+``enum`` --- support for enumerations
+========================================
+
+.. :synopsis: enumerations are sets of symbolic names bound to unique, constant
+ values.
+.. :moduleauthor:: Ethan Furman <ethan@stoneleaf.us>
+.. :sectionauthor:: Barry Warsaw <barry@python.org>,
+.. :sectionauthor:: Eli Bendersky <eliben@gmail.com>,
+.. :sectionauthor:: Ethan Furman <ethan@stoneleaf.us>
+
+----------------
+
+An enumeration is a set of symbolic names (members) bound to unique, constant
+values. Within an enumeration, the members can be compared by identity, and
+the enumeration itself can be iterated over.
+
+
+Module Contents
+---------------
+
+This module defines two enumeration classes that can be used to define unique
+sets of names and values: ``Enum`` and ``IntEnum``. It also defines
+one decorator, ``unique``.
+
+``Enum``
+
+Base class for creating enumerated constants. See section `Functional API`_
+for an alternate construction syntax.
+
+``IntEnum``
+
+Base class for creating enumerated constants that are also subclasses of ``int``.
+
+``unique``
+
+Enum class decorator that ensures only one name is bound to any one value.
+
+
+Creating an Enum
+----------------
+
+Enumerations are created using the ``class`` syntax, which makes them
+easy to read and write. An alternative creation method is described in
+`Functional API`_. To define an enumeration, subclass ``Enum`` as
+follows::
+
+ >>> from enum import Enum
+ >>> class Color(Enum):
+ ... red = 1
+ ... green = 2
+ ... blue = 3
+
+Note: Nomenclature
+
+ - The class ``Color`` is an *enumeration* (or *enum*)
+ - The attributes ``Color.red``, ``Color.green``, etc., are
+ *enumeration members* (or *enum members*).
+ - The enum members have *names* and *values* (the name of
+ ``Color.red`` is ``red``, the value of ``Color.blue`` is
+ ``3``, etc.)
+
+Note:
+
+ Even though we use the ``class`` syntax to create Enums, Enums
+ are not normal Python classes. See `How are Enums different?`_ for
+ more details.
+
+Enumeration members have human readable string representations::
+
+ >>> print(Color.red)
+ Color.red
+
+...while their ``repr`` has more information::
+
+ >>> print(repr(Color.red))
+ <Color.red: 1>
+
+The *type* of an enumeration member is the enumeration it belongs to::
+
+ >>> type(Color.red)
+ <enum 'Color'>
+ >>> isinstance(Color.green, Color)
+ True
+ >>>
+
+Enum members also have a property that contains just their item name::
+
+ >>> print(Color.red.name)
+ red
+
+Enumerations support iteration. In Python 3.x definition order is used; in
+Python 2.x the definition order is not available, but class attribute
+``__order__`` is supported; otherwise, value order is used::
+
+ >>> class Shake(Enum):
+ ... __order__ = 'vanilla chocolate cookies mint' # only needed in 2.x
+ ... vanilla = 7
+ ... chocolate = 4
+ ... cookies = 9
+ ... mint = 3
+ ...
+ >>> for shake in Shake:
+ ... print(shake)
+ ...
+ Shake.vanilla
+ Shake.chocolate
+ Shake.cookies
+ Shake.mint
+
+The ``__order__`` attribute is always removed, and in 3.x it is also ignored
+(order is definition order); however, in the stdlib version it will be ignored
+but not removed.
+
+Enumeration members are hashable, so they can be used in dictionaries and sets::
+
+ >>> apples = {}
+ >>> apples[Color.red] = 'red delicious'
+ >>> apples[Color.green] = 'granny smith'
+ >>> apples == {Color.red: 'red delicious', Color.green: 'granny smith'}
+ True
+
+
+Programmatic access to enumeration members and their attributes
+---------------------------------------------------------------
+
+Sometimes it's useful to access members in enumerations programmatically (i.e.
+situations where ``Color.red`` won't do because the exact color is not known
+at program-writing time). ``Enum`` allows such access::
+
+ >>> Color(1)
+ <Color.red: 1>
+ >>> Color(3)
+ <Color.blue: 3>
+
+If you want to access enum members by *name*, use item access::
+
+ >>> Color['red']
+ <Color.red: 1>
+ >>> Color['green']
+ <Color.green: 2>
+
+If have an enum member and need its ``name`` or ``value``::
+
+ >>> member = Color.red
+ >>> member.name
+ 'red'
+ >>> member.value
+ 1
+
+
+Duplicating enum members and values
+-----------------------------------
+
+Having two enum members (or any other attribute) with the same name is invalid;
+in Python 3.x this would raise an error, but in Python 2.x the second member
+simply overwrites the first::
+
+ >>> # python 2.x
+ >>> class Shape(Enum):
+ ... square = 2
+ ... square = 3
+ ...
+ >>> Shape.square
+ <Shape.square: 3>
+
+ >>> # python 3.x
+ >>> class Shape(Enum):
+ ... square = 2
+ ... square = 3
+ Traceback (most recent call last):
+ ...
+ TypeError: Attempted to reuse key: 'square'
+
+However, two enum members are allowed to have the same value. Given two members
+A and B with the same value (and A defined first), B is an alias to A. By-value
+lookup of the value of A and B will return A. By-name lookup of B will also
+return A::
+
+ >>> class Shape(Enum):
+ ... __order__ = 'square diamond circle alias_for_square' # only needed in 2.x
+ ... square = 2
+ ... diamond = 1
+ ... circle = 3
+ ... alias_for_square = 2
+ ...
+ >>> Shape.square
+ <Shape.square: 2>
+ >>> Shape.alias_for_square
+ <Shape.square: 2>
+ >>> Shape(2)
+ <Shape.square: 2>
+
+
+Allowing aliases is not always desirable. ``unique`` can be used to ensure
+that none exist in a particular enumeration::
+
+ >>> from enum import unique
+ >>> @unique
+ ... class Mistake(Enum):
+ ... __order__ = 'one two three four' # only needed in 2.x
+ ... one = 1
+ ... two = 2
+ ... three = 3
+ ... four = 3
+ Traceback (most recent call last):
+ ...
+ ValueError: duplicate names found in <enum 'Mistake'>: four -> three
+
+Iterating over the members of an enum does not provide the aliases::
+
+ >>> list(Shape)
+ [<Shape.square: 2>, <Shape.diamond: 1>, <Shape.circle: 3>]
+
+The special attribute ``__members__`` is a dictionary mapping names to members.
+It includes all names defined in the enumeration, including the aliases::
+
+ >>> for name, member in sorted(Shape.__members__.items()):
+ ... name, member
+ ...
+ ('alias_for_square', <Shape.square: 2>)
+ ('circle', <Shape.circle: 3>)
+ ('diamond', <Shape.diamond: 1>)
+ ('square', <Shape.square: 2>)
+
+The ``__members__`` attribute can be used for detailed programmatic access to
+the enumeration members. For example, finding all the aliases::
+
+ >>> [name for name, member in Shape.__members__.items() if member.name != name]
+ ['alias_for_square']
+
+Comparisons
+-----------
+
+Enumeration members are compared by identity::
+
+ >>> Color.red is Color.red
+ True
+ >>> Color.red is Color.blue
+ False
+ >>> Color.red is not Color.blue
+ True
+
+Ordered comparisons between enumeration values are *not* supported. Enum
+members are not integers (but see `IntEnum`_ below)::
+
+ >>> Color.red < Color.blue
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ TypeError: unorderable types: Color() < Color()
+
+.. warning::
+
+ In Python 2 *everything* is ordered, even though the ordering may not
+ make sense. If you want your enumerations to have a sensible ordering
+ check out the `OrderedEnum`_ recipe below.
+
+
+Equality comparisons are defined though::
+
+ >>> Color.blue == Color.red
+ False
+ >>> Color.blue != Color.red
+ True
+ >>> Color.blue == Color.blue
+ True
+
+Comparisons against non-enumeration values will always compare not equal
+(again, ``IntEnum`` was explicitly designed to behave differently, see
+below)::
+
+ >>> Color.blue == 2
+ False
+
+
+Allowed members and attributes of enumerations
+----------------------------------------------
+
+The examples above use integers for enumeration values. Using integers is
+short and handy (and provided by default by the `Functional API`_), but not
+strictly enforced. In the vast majority of use-cases, one doesn't care what
+the actual value of an enumeration is. But if the value *is* important,
+enumerations can have arbitrary values.
+
+Enumerations are Python classes, and can have methods and special methods as
+usual. If we have this enumeration::
+
+ >>> class Mood(Enum):
+ ... funky = 1
+ ... happy = 3
+ ...
+ ... def describe(self):
+ ... # self is the member here
+ ... return self.name, self.value
+ ...
+ ... def __str__(self):
+ ... return 'my custom str! {0}'.format(self.value)
+ ...
+ ... @classmethod
+ ... def favorite_mood(cls):
+ ... # cls here is the enumeration
+ ... return cls.happy
+
+Then::
+
+ >>> Mood.favorite_mood()
+ <Mood.happy: 3>
+ >>> Mood.happy.describe()
+ ('happy', 3)
+ >>> str(Mood.funky)
+ 'my custom str! 1'
+
+The rules for what is allowed are as follows: _sunder_ names (starting and
+ending with a single underscore) are reserved by enum and cannot be used;
+all other attributes defined within an enumeration will become members of this
+enumeration, with the exception of *__dunder__* names and descriptors (methods
+are also descriptors).
+
+Note:
+
+ If your enumeration defines ``__new__`` and/or ``__init__`` then
+ whatever value(s) were given to the enum member will be passed into
+ those methods. See `Planet`_ for an example.
+
+
+Restricted subclassing of enumerations
+--------------------------------------
+
+Subclassing an enumeration is allowed only if the enumeration does not define
+any members. So this is forbidden::
+
+ >>> class MoreColor(Color):
+ ... pink = 17
+ Traceback (most recent call last):
+ ...
+ TypeError: Cannot extend enumerations
+
+But this is allowed::
+
+ >>> class Foo(Enum):
+ ... def some_behavior(self):
+ ... pass
+ ...
+ >>> class Bar(Foo):
+ ... happy = 1
+ ... sad = 2
+ ...
+
+Allowing subclassing of enums that define members would lead to a violation of
+some important invariants of types and instances. On the other hand, it makes
+sense to allow sharing some common behavior between a group of enumerations.
+(See `OrderedEnum`_ for an example.)
+
+
+Pickling
+--------
+
+Enumerations can be pickled and unpickled::
+
+ >>> from enum.test_enum import Fruit
+ >>> from pickle import dumps, loads
+ >>> Fruit.tomato is loads(dumps(Fruit.tomato, 2))
+ True
+
+The usual restrictions for pickling apply: picklable enums must be defined in
+the top level of a module, since unpickling requires them to be importable
+from that module.
+
+Note:
+
+ With pickle protocol version 4 (introduced in Python 3.4) it is possible
+ to easily pickle enums nested in other classes.
+
+
+
+Functional API
+--------------
+
+The ``Enum`` class is callable, providing the following functional API::
+
+ >>> Animal = Enum('Animal', 'ant bee cat dog')
+ >>> Animal
+ <enum 'Animal'>
+ >>> Animal.ant
+ <Animal.ant: 1>
+ >>> Animal.ant.value
+ 1
+ >>> list(Animal)
+ [<Animal.ant: 1>, <Animal.bee: 2>, <Animal.cat: 3>, <Animal.dog: 4>]
+
+The semantics of this API resemble ``namedtuple``. The first argument
+of the call to ``Enum`` is the name of the enumeration.
+
+The second argument is the *source* of enumeration member names. It can be a
+whitespace-separated string of names, a sequence of names, a sequence of
+2-tuples with key/value pairs, or a mapping (e.g. dictionary) of names to
+values. The last two options enable assigning arbitrary values to
+enumerations; the others auto-assign increasing integers starting with 1. A
+new class derived from ``Enum`` is returned. In other words, the above
+assignment to ``Animal`` is equivalent to::
+
+ >>> class Animals(Enum):
+ ... ant = 1
+ ... bee = 2
+ ... cat = 3
+ ... dog = 4
+
+Pickling enums created with the functional API can be tricky as frame stack
+implementation details are used to try and figure out which module the
+enumeration is being created in (e.g. it will fail if you use a utility
+function in separate module, and also may not work on IronPython or Jython).
+The solution is to specify the module name explicitly as follows::
+
+ >>> Animals = Enum('Animals', 'ant bee cat dog', module=__name__)
+
+Derived Enumerations
+--------------------
+
+IntEnum
+^^^^^^^
+
+A variation of ``Enum`` is provided which is also a subclass of
+``int``. Members of an ``IntEnum`` can be compared to integers;
+by extension, integer enumerations of different types can also be compared
+to each other::
+
+ >>> from enum import IntEnum
+ >>> class Shape(IntEnum):
+ ... circle = 1
+ ... square = 2
+ ...
+ >>> class Request(IntEnum):
+ ... post = 1
+ ... get = 2
+ ...
+ >>> Shape == 1
+ False
+ >>> Shape.circle == 1
+ True
+ >>> Shape.circle == Request.post
+ True
+
+However, they still can't be compared to standard ``Enum`` enumerations::
+
+ >>> class Shape(IntEnum):
+ ... circle = 1
+ ... square = 2
+ ...
+ >>> class Color(Enum):
+ ... red = 1
+ ... green = 2
+ ...
+ >>> Shape.circle == Color.red
+ False
+
+``IntEnum`` values behave like integers in other ways you'd expect::
+
+ >>> int(Shape.circle)
+ 1
+ >>> ['a', 'b', 'c'][Shape.circle]
+ 'b'
+ >>> [i for i in range(Shape.square)]
+ [0, 1]
+
+For the vast majority of code, ``Enum`` is strongly recommended,
+since ``IntEnum`` breaks some semantic promises of an enumeration (by
+being comparable to integers, and thus by transitivity to other
+unrelated enumerations). It should be used only in special cases where
+there's no other choice; for example, when integer constants are
+replaced with enumerations and backwards compatibility is required with code
+that still expects integers.
+
+
+Others
+^^^^^^
+
+While ``IntEnum`` is part of the ``enum`` module, it would be very
+simple to implement independently::
+
+ class IntEnum(int, Enum):
+ pass
+
+This demonstrates how similar derived enumerations can be defined; for example
+a ``StrEnum`` that mixes in ``str`` instead of ``int``.
+
+Some rules:
+
+1. When subclassing ``Enum``, mix-in types must appear before
+ ``Enum`` itself in the sequence of bases, as in the ``IntEnum``
+ example above.
+2. While ``Enum`` can have members of any type, once you mix in an
+ additional type, all the members must have values of that type, e.g.
+ ``int`` above. This restriction does not apply to mix-ins which only
+ add methods and don't specify another data type such as ``int`` or
+ ``str``.
+3. When another data type is mixed in, the ``value`` attribute is *not the
+ same* as the enum member itself, although it is equivalant and will compare
+ equal.
+4. %-style formatting: ``%s`` and ``%r`` call ``Enum``'s ``__str__`` and
+ ``__repr__`` respectively; other codes (such as ``%i`` or ``%h`` for
+ IntEnum) treat the enum member as its mixed-in type.
+
+ Note: Prior to Python 3.4 there is a bug in ``str``'s %-formatting: ``int``
+ subclasses are printed as strings and not numbers when the ``%d``, ``%i``,
+ or ``%u`` codes are used.
+5. ``str.__format__`` (or ``format``) will use the mixed-in
+ type's ``__format__``. If the ``Enum``'s ``str`` or
+ ``repr`` is desired use the ``!s`` or ``!r`` ``str`` format codes.
+
+
+Decorators
+----------
+
+unique
+^^^^^^
+
+A ``class`` decorator specifically for enumerations. It searches an
+enumeration's ``__members__`` gathering any aliases it finds; if any are
+found ``ValueError`` is raised with the details::
+
+ >>> @unique
+ ... class NoDupes(Enum):
+ ... first = 'one'
+ ... second = 'two'
+ ... third = 'two'
+ Traceback (most recent call last):
+ ...
+ ValueError: duplicate names found in <enum 'NoDupes'>: third -> second
+
+
+Interesting examples
+--------------------
+
+While ``Enum`` and ``IntEnum`` are expected to cover the majority of
+use-cases, they cannot cover them all. Here are recipes for some different
+types of enumerations that can be used directly, or as examples for creating
+one's own.
+
+
+AutoNumber
+^^^^^^^^^^
+
+Avoids having to specify the value for each enumeration member::
+
+ >>> class AutoNumber(Enum):
+ ... def __new__(cls):
+ ... value = len(cls.__members__) + 1
+ ... obj = object.__new__(cls)
+ ... obj._value_ = value
+ ... return obj
+ ...
+ >>> class Color(AutoNumber):
+ ... __order__ = "red green blue" # only needed in 2.x
+ ... red = ()
+ ... green = ()
+ ... blue = ()
+ ...
+ >>> Color.green.value == 2
+ True
+
+Note:
+
+ The `__new__` method, if defined, is used during creation of the Enum
+ members; it is then replaced by Enum's `__new__` which is used after
+ class creation for lookup of existing members. Due to the way Enums are
+ supposed to behave, there is no way to customize Enum's `__new__`.
+
+
+UniqueEnum
+^^^^^^^^^^
+
+Raises an error if a duplicate member name is found instead of creating an
+alias::
+
+ >>> class UniqueEnum(Enum):
+ ... def __init__(self, *args):
+ ... cls = self.__class__
+ ... if any(self.value == e.value for e in cls):
+ ... a = self.name
+ ... e = cls(self.value).name
+ ... raise ValueError(
+ ... "aliases not allowed in UniqueEnum: %r --> %r"
+ ... % (a, e))
+ ...
+ >>> class Color(UniqueEnum):
+ ... red = 1
+ ... green = 2
+ ... blue = 3
+ ... grene = 2
+ Traceback (most recent call last):
+ ...
+ ValueError: aliases not allowed in UniqueEnum: 'grene' --> 'green'
+
+
+OrderedEnum
+^^^^^^^^^^^
+
+An ordered enumeration that is not based on ``IntEnum`` and so maintains
+the normal ``Enum`` invariants (such as not being comparable to other
+enumerations)::
+
+ >>> class OrderedEnum(Enum):
+ ... def __ge__(self, other):
+ ... if self.__class__ is other.__class__:
+ ... return self._value_ >= other._value_
+ ... return NotImplemented
+ ... def __gt__(self, other):
+ ... if self.__class__ is other.__class__:
+ ... return self._value_ > other._value_
+ ... return NotImplemented
+ ... def __le__(self, other):
+ ... if self.__class__ is other.__class__:
+ ... return self._value_ <= other._value_
+ ... return NotImplemented
+ ... def __lt__(self, other):
+ ... if self.__class__ is other.__class__:
+ ... return self._value_ < other._value_
+ ... return NotImplemented
+ ...
+ >>> class Grade(OrderedEnum):
+ ... __ordered__ = 'A B C D F'
+ ... A = 5
+ ... B = 4
+ ... C = 3
+ ... D = 2
+ ... F = 1
+ ...
+ >>> Grade.C < Grade.A
+ True
+
+
+Planet
+^^^^^^
+
+If ``__new__`` or ``__init__`` is defined the value of the enum member
+will be passed to those methods::
+
+ >>> class Planet(Enum):
+ ... MERCURY = (3.303e+23, 2.4397e6)
+ ... VENUS = (4.869e+24, 6.0518e6)
+ ... EARTH = (5.976e+24, 6.37814e6)
+ ... MARS = (6.421e+23, 3.3972e6)
+ ... JUPITER = (1.9e+27, 7.1492e7)
+ ... SATURN = (5.688e+26, 6.0268e7)
+ ... URANUS = (8.686e+25, 2.5559e7)
+ ... NEPTUNE = (1.024e+26, 2.4746e7)
+ ... def __init__(self, mass, radius):
+ ... self.mass = mass # in kilograms
+ ... self.radius = radius # in meters
+ ... @property
+ ... def surface_gravity(self):
+ ... # universal gravitational constant (m3 kg-1 s-2)
+ ... G = 6.67300E-11
+ ... return G * self.mass / (self.radius * self.radius)
+ ...
+ >>> Planet.EARTH.value
+ (5.976e+24, 6378140.0)
+ >>> Planet.EARTH.surface_gravity
+ 9.802652743337129
+
+
+How are Enums different?
+------------------------
+
+Enums have a custom metaclass that affects many aspects of both derived Enum
+classes and their instances (members).
+
+
+Enum Classes
+^^^^^^^^^^^^
+
+The ``EnumMeta`` metaclass is responsible for providing the
+``__contains__``, ``__dir__``, ``__iter__`` and other methods that
+allow one to do things with an ``Enum`` class that fail on a typical
+class, such as ``list(Color)`` or ``some_var in Color``. ``EnumMeta`` is
+responsible for ensuring that various other methods on the final ``Enum``
+class are correct (such as ``__new__``, ``__getnewargs__``,
+``__str__`` and ``__repr__``).
+
+.. note::
+
+ ``__dir__`` is not changed in the Python 2 line as it messes up some
+ of the decorators included in the stdlib.
+
+
+Enum Members (aka instances)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The most interesting thing about Enum members is that they are singletons.
+``EnumMeta`` creates them all while it is creating the ``Enum``
+class itself, and then puts a custom ``__new__`` in place to ensure
+that no new ones are ever instantiated by returning only the existing
+member instances.
+
+
+Finer Points
+^^^^^^^^^^^^
+
+``Enum`` members are instances of an ``Enum`` class, and even though they
+are accessible as `EnumClass.member1.member2`, they should not be
+accessed directly from the member as that lookup may fail or, worse,
+return something besides the ``Enum`` member you were looking for
+(changed in version 1.1.1)::
+
+ >>> class FieldTypes(Enum):
+ ... name = 1
+ ... value = 2
+ ... size = 3
+ ...
+ >>> FieldTypes.value.size
+ <FieldTypes.size: 3>
+ >>> FieldTypes.size.value
+ 3
+
+The ``__members__`` attribute is only available on the class.
+
+In Python 3.x ``__members__`` is always an ``OrderedDict``, with the order being
+the definition order. In Python 2.7 ``__members__`` is an ``OrderedDict`` if
+``__order__`` was specified, and a plain ``dict`` otherwise. In all other Python
+2.x versions ``__members__`` is a plain ``dict`` even if ``__order__`` was specified
+as the ``OrderedDict`` type didn't exist yet.
+
+If you give your ``Enum`` subclass extra methods, like the `Planet`_
+class above, those methods will show up in a `dir` of the member,
+but not of the class::
+
+ >>> dir(Planet)
+ ['EARTH', 'JUPITER', 'MARS', 'MERCURY', 'NEPTUNE', 'SATURN', 'URANUS',
+ 'VENUS', '__class__', '__doc__', '__members__', '__module__']
+ >>> dir(Planet.EARTH)
+ ['__class__', '__doc__', '__module__', 'name', 'surface_gravity', 'value']
+
+A ``__new__`` method will only be used for the creation of the
+``Enum`` members -- after that it is replaced. This means if you wish to
+change how ``Enum`` members are looked up you either have to write a
+helper function or a ``classmethod``.
diff --git a/third_party/python/enum34/enum/test.py b/third_party/python/enum34/enum/test.py
new file mode 100644
index 0000000000..d9edfaee40
--- /dev/null
+++ b/third_party/python/enum34/enum/test.py
@@ -0,0 +1,1820 @@
+from pickle import dumps, loads, PicklingError, HIGHEST_PROTOCOL
+import sys
+import unittest
+pyver = float('%s.%s' % sys.version_info[:2])
+if pyver < 2.5:
+ sys.path.insert(0, '.')
+import enum
+from enum import Enum, IntEnum, unique, EnumMeta
+
+if pyver < 2.6:
+ from __builtin__ import enumerate as bltin_enumerate
+ def enumerate(thing, start=0):
+ result = []
+ for i, item in bltin_enumerate(thing):
+ i = i + start
+ result.append((i, item))
+ return result
+
+try:
+ any
+except NameError:
+ def any(iterable):
+ for element in iterable:
+ if element:
+ return True
+ return False
+
+try:
+ unicode
+except NameError:
+ unicode = str
+
+try:
+ from collections import OrderedDict
+except ImportError:
+ OrderedDict = None
+
+# for pickle tests
+try:
+ class Stooges(Enum):
+ LARRY = 1
+ CURLY = 2
+ MOE = 3
+except Exception:
+ Stooges = sys.exc_info()[1]
+
+try:
+ class IntStooges(int, Enum):
+ LARRY = 1
+ CURLY = 2
+ MOE = 3
+except Exception:
+ IntStooges = sys.exc_info()[1]
+
+try:
+ class FloatStooges(float, Enum):
+ LARRY = 1.39
+ CURLY = 2.72
+ MOE = 3.142596
+except Exception:
+ FloatStooges = sys.exc_info()[1]
+
+# for pickle test and subclass tests
+try:
+ class StrEnum(str, Enum):
+ 'accepts only string values'
+ class Name(StrEnum):
+ BDFL = 'Guido van Rossum'
+ FLUFL = 'Barry Warsaw'
+except Exception:
+ Name = sys.exc_info()[1]
+
+try:
+ Question = Enum('Question', 'who what when where why', module=__name__)
+except Exception:
+ Question = sys.exc_info()[1]
+
+try:
+ Answer = Enum('Answer', 'him this then there because')
+except Exception:
+ Answer = sys.exc_info()[1]
+
+try:
+ Theory = Enum('Theory', 'rule law supposition', qualname='spanish_inquisition')
+except Exception:
+ Theory = sys.exc_info()[1]
+
+# for doctests
+try:
+ class Fruit(Enum):
+ tomato = 1
+ banana = 2
+ cherry = 3
+except Exception:
+ pass
+
+def test_pickle_dump_load(assertion, source, target=None,
+ protocol=(0, HIGHEST_PROTOCOL)):
+ start, stop = protocol
+ failures = []
+ for protocol in range(start, stop+1):
+ try:
+ if target is None:
+ assertion(loads(dumps(source, protocol=protocol)) is source)
+ else:
+ assertion(loads(dumps(source, protocol=protocol)), target)
+ except Exception:
+ exc, tb = sys.exc_info()[1:]
+ failures.append('%2d: %s' %(protocol, exc))
+ if failures:
+ raise ValueError('Failed with protocols: %s' % ', '.join(failures))
+
+def test_pickle_exception(assertion, exception, obj,
+ protocol=(0, HIGHEST_PROTOCOL)):
+ start, stop = protocol
+ failures = []
+ for protocol in range(start, stop+1):
+ try:
+ assertion(exception, dumps, obj, protocol=protocol)
+ except Exception:
+ exc = sys.exc_info()[1]
+ failures.append('%d: %s %s' % (protocol, exc.__class__.__name__, exc))
+ if failures:
+ raise ValueError('Failed with protocols: %s' % ', '.join(failures))
+
+
+class TestHelpers(unittest.TestCase):
+ # _is_descriptor, _is_sunder, _is_dunder
+
+ def test_is_descriptor(self):
+ class foo:
+ pass
+ for attr in ('__get__','__set__','__delete__'):
+ obj = foo()
+ self.assertFalse(enum._is_descriptor(obj))
+ setattr(obj, attr, 1)
+ self.assertTrue(enum._is_descriptor(obj))
+
+ def test_is_sunder(self):
+ for s in ('_a_', '_aa_'):
+ self.assertTrue(enum._is_sunder(s))
+
+ for s in ('a', 'a_', '_a', '__a', 'a__', '__a__', '_a__', '__a_', '_',
+ '__', '___', '____', '_____',):
+ self.assertFalse(enum._is_sunder(s))
+
+ def test_is_dunder(self):
+ for s in ('__a__', '__aa__'):
+ self.assertTrue(enum._is_dunder(s))
+ for s in ('a', 'a_', '_a', '__a', 'a__', '_a_', '_a__', '__a_', '_',
+ '__', '___', '____', '_____',):
+ self.assertFalse(enum._is_dunder(s))
+
+
+class TestEnum(unittest.TestCase):
+ def setUp(self):
+ class Season(Enum):
+ SPRING = 1
+ SUMMER = 2
+ AUTUMN = 3
+ WINTER = 4
+ self.Season = Season
+
+ class Konstants(float, Enum):
+ E = 2.7182818
+ PI = 3.1415926
+ TAU = 2 * PI
+ self.Konstants = Konstants
+
+ class Grades(IntEnum):
+ A = 5
+ B = 4
+ C = 3
+ D = 2
+ F = 0
+ self.Grades = Grades
+
+ class Directional(str, Enum):
+ EAST = 'east'
+ WEST = 'west'
+ NORTH = 'north'
+ SOUTH = 'south'
+ self.Directional = Directional
+
+ from datetime import date
+ class Holiday(date, Enum):
+ NEW_YEAR = 2013, 1, 1
+ IDES_OF_MARCH = 2013, 3, 15
+ self.Holiday = Holiday
+
+ if pyver >= 3.0: # do not specify custom `dir` on previous versions
+ def test_dir_on_class(self):
+ Season = self.Season
+ self.assertEqual(
+ set(dir(Season)),
+ set(['__class__', '__doc__', '__members__', '__module__',
+ 'SPRING', 'SUMMER', 'AUTUMN', 'WINTER']),
+ )
+
+ def test_dir_on_item(self):
+ Season = self.Season
+ self.assertEqual(
+ set(dir(Season.WINTER)),
+ set(['__class__', '__doc__', '__module__', 'name', 'value']),
+ )
+
+ def test_dir_with_added_behavior(self):
+ class Test(Enum):
+ this = 'that'
+ these = 'those'
+ def wowser(self):
+ return ("Wowser! I'm %s!" % self.name)
+ self.assertEqual(
+ set(dir(Test)),
+ set(['__class__', '__doc__', '__members__', '__module__', 'this', 'these']),
+ )
+ self.assertEqual(
+ set(dir(Test.this)),
+ set(['__class__', '__doc__', '__module__', 'name', 'value', 'wowser']),
+ )
+
+ def test_dir_on_sub_with_behavior_on_super(self):
+ # see issue22506
+ class SuperEnum(Enum):
+ def invisible(self):
+ return "did you see me?"
+ class SubEnum(SuperEnum):
+ sample = 5
+ self.assertEqual(
+ set(dir(SubEnum.sample)),
+ set(['__class__', '__doc__', '__module__', 'name', 'value', 'invisible']),
+ )
+
+ if pyver >= 2.7: # OrderedDict first available here
+ def test_members_is_ordereddict_if_ordered(self):
+ class Ordered(Enum):
+ __order__ = 'first second third'
+ first = 'bippity'
+ second = 'boppity'
+ third = 'boo'
+ self.assertTrue(type(Ordered.__members__) is OrderedDict)
+
+ def test_members_is_ordereddict_if_not_ordered(self):
+ class Unordered(Enum):
+ this = 'that'
+ these = 'those'
+ self.assertTrue(type(Unordered.__members__) is OrderedDict)
+
+ if pyver >= 3.0: # all objects are ordered in Python 2.x
+ def test_members_is_always_ordered(self):
+ class AlwaysOrdered(Enum):
+ first = 1
+ second = 2
+ third = 3
+ self.assertTrue(type(AlwaysOrdered.__members__) is OrderedDict)
+
+ def test_comparisons(self):
+ def bad_compare():
+ Season.SPRING > 4
+ Season = self.Season
+ self.assertNotEqual(Season.SPRING, 1)
+ self.assertRaises(TypeError, bad_compare)
+
+ class Part(Enum):
+ SPRING = 1
+ CLIP = 2
+ BARREL = 3
+
+ self.assertNotEqual(Season.SPRING, Part.SPRING)
+ def bad_compare():
+ Season.SPRING < Part.CLIP
+ self.assertRaises(TypeError, bad_compare)
+
+ def test_enum_in_enum_out(self):
+ Season = self.Season
+ self.assertTrue(Season(Season.WINTER) is Season.WINTER)
+
+ def test_enum_value(self):
+ Season = self.Season
+ self.assertEqual(Season.SPRING.value, 1)
+
+ def test_intenum_value(self):
+ self.assertEqual(IntStooges.CURLY.value, 2)
+
+ def test_enum(self):
+ Season = self.Season
+ lst = list(Season)
+ self.assertEqual(len(lst), len(Season))
+ self.assertEqual(len(Season), 4, Season)
+ self.assertEqual(
+ [Season.SPRING, Season.SUMMER, Season.AUTUMN, Season.WINTER], lst)
+
+ for i, season in enumerate('SPRING SUMMER AUTUMN WINTER'.split()):
+ i += 1
+ e = Season(i)
+ self.assertEqual(e, getattr(Season, season))
+ self.assertEqual(e.value, i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, season)
+ self.assertTrue(e in Season)
+ self.assertTrue(type(e) is Season)
+ self.assertTrue(isinstance(e, Season))
+ self.assertEqual(str(e), 'Season.' + season)
+ self.assertEqual(
+ repr(e),
+ '<Season.%s: %s>' % (season, i),
+ )
+
+ def test_value_name(self):
+ Season = self.Season
+ self.assertEqual(Season.SPRING.name, 'SPRING')
+ self.assertEqual(Season.SPRING.value, 1)
+ def set_name(obj, new_value):
+ obj.name = new_value
+ def set_value(obj, new_value):
+ obj.value = new_value
+ self.assertRaises(AttributeError, set_name, Season.SPRING, 'invierno', )
+ self.assertRaises(AttributeError, set_value, Season.SPRING, 2)
+
+ def test_attribute_deletion(self):
+ class Season(Enum):
+ SPRING = 1
+ SUMMER = 2
+ AUTUMN = 3
+ WINTER = 4
+
+ def spam(cls):
+ pass
+
+ self.assertTrue(hasattr(Season, 'spam'))
+ del Season.spam
+ self.assertFalse(hasattr(Season, 'spam'))
+
+ self.assertRaises(AttributeError, delattr, Season, 'SPRING')
+ self.assertRaises(AttributeError, delattr, Season, 'DRY')
+ self.assertRaises(AttributeError, delattr, Season.SPRING, 'name')
+
+ def test_bool_of_class(self):
+ class Empty(Enum):
+ pass
+ self.assertTrue(bool(Empty))
+
+ def test_bool_of_member(self):
+ class Count(Enum):
+ zero = 0
+ one = 1
+ two = 2
+ for member in Count:
+ self.assertTrue(bool(member))
+
+ def test_invalid_names(self):
+ def create_bad_class_1():
+ class Wrong(Enum):
+ mro = 9
+ def create_bad_class_2():
+ class Wrong(Enum):
+ _reserved_ = 3
+ self.assertRaises(ValueError, create_bad_class_1)
+ self.assertRaises(ValueError, create_bad_class_2)
+
+ def test_contains(self):
+ Season = self.Season
+ self.assertTrue(Season.AUTUMN in Season)
+ self.assertTrue(3 not in Season)
+
+ val = Season(3)
+ self.assertTrue(val in Season)
+
+ class OtherEnum(Enum):
+ one = 1; two = 2
+ self.assertTrue(OtherEnum.two not in Season)
+
+ if pyver >= 2.6: # when `format` came into being
+
+ def test_format_enum(self):
+ Season = self.Season
+ self.assertEqual('{0}'.format(Season.SPRING),
+ '{0}'.format(str(Season.SPRING)))
+ self.assertEqual( '{0:}'.format(Season.SPRING),
+ '{0:}'.format(str(Season.SPRING)))
+ self.assertEqual('{0:20}'.format(Season.SPRING),
+ '{0:20}'.format(str(Season.SPRING)))
+ self.assertEqual('{0:^20}'.format(Season.SPRING),
+ '{0:^20}'.format(str(Season.SPRING)))
+ self.assertEqual('{0:>20}'.format(Season.SPRING),
+ '{0:>20}'.format(str(Season.SPRING)))
+ self.assertEqual('{0:<20}'.format(Season.SPRING),
+ '{0:<20}'.format(str(Season.SPRING)))
+
+ def test_format_enum_custom(self):
+ class TestFloat(float, Enum):
+ one = 1.0
+ two = 2.0
+ def __format__(self, spec):
+ return 'TestFloat success!'
+ self.assertEqual('{0}'.format(TestFloat.one), 'TestFloat success!')
+
+ def assertFormatIsValue(self, spec, member):
+ self.assertEqual(spec.format(member), spec.format(member.value))
+
+ def test_format_enum_date(self):
+ Holiday = self.Holiday
+ self.assertFormatIsValue('{0}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:20}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:^20}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:>20}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:<20}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:%Y %m}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:%Y %m %M:00}', Holiday.IDES_OF_MARCH)
+
+ def test_format_enum_float(self):
+ Konstants = self.Konstants
+ self.assertFormatIsValue('{0}', Konstants.TAU)
+ self.assertFormatIsValue('{0:}', Konstants.TAU)
+ self.assertFormatIsValue('{0:20}', Konstants.TAU)
+ self.assertFormatIsValue('{0:^20}', Konstants.TAU)
+ self.assertFormatIsValue('{0:>20}', Konstants.TAU)
+ self.assertFormatIsValue('{0:<20}', Konstants.TAU)
+ self.assertFormatIsValue('{0:n}', Konstants.TAU)
+ self.assertFormatIsValue('{0:5.2}', Konstants.TAU)
+ self.assertFormatIsValue('{0:f}', Konstants.TAU)
+
+ def test_format_enum_int(self):
+ Grades = self.Grades
+ self.assertFormatIsValue('{0}', Grades.C)
+ self.assertFormatIsValue('{0:}', Grades.C)
+ self.assertFormatIsValue('{0:20}', Grades.C)
+ self.assertFormatIsValue('{0:^20}', Grades.C)
+ self.assertFormatIsValue('{0:>20}', Grades.C)
+ self.assertFormatIsValue('{0:<20}', Grades.C)
+ self.assertFormatIsValue('{0:+}', Grades.C)
+ self.assertFormatIsValue('{0:08X}', Grades.C)
+ self.assertFormatIsValue('{0:b}', Grades.C)
+
+ def test_format_enum_str(self):
+ Directional = self.Directional
+ self.assertFormatIsValue('{0}', Directional.WEST)
+ self.assertFormatIsValue('{0:}', Directional.WEST)
+ self.assertFormatIsValue('{0:20}', Directional.WEST)
+ self.assertFormatIsValue('{0:^20}', Directional.WEST)
+ self.assertFormatIsValue('{0:>20}', Directional.WEST)
+ self.assertFormatIsValue('{0:<20}', Directional.WEST)
+
+ def test_hash(self):
+ Season = self.Season
+ dates = {}
+ dates[Season.WINTER] = '1225'
+ dates[Season.SPRING] = '0315'
+ dates[Season.SUMMER] = '0704'
+ dates[Season.AUTUMN] = '1031'
+ self.assertEqual(dates[Season.AUTUMN], '1031')
+
+ def test_enum_duplicates(self):
+ _order_ = "SPRING SUMMER AUTUMN WINTER"
+ class Season(Enum):
+ SPRING = 1
+ SUMMER = 2
+ AUTUMN = FALL = 3
+ WINTER = 4
+ ANOTHER_SPRING = 1
+ lst = list(Season)
+ self.assertEqual(
+ lst,
+ [Season.SPRING, Season.SUMMER,
+ Season.AUTUMN, Season.WINTER,
+ ])
+ self.assertTrue(Season.FALL is Season.AUTUMN)
+ self.assertEqual(Season.FALL.value, 3)
+ self.assertEqual(Season.AUTUMN.value, 3)
+ self.assertTrue(Season(3) is Season.AUTUMN)
+ self.assertTrue(Season(1) is Season.SPRING)
+ self.assertEqual(Season.FALL.name, 'AUTUMN')
+ self.assertEqual(
+ set([k for k,v in Season.__members__.items() if v.name != k]),
+ set(['FALL', 'ANOTHER_SPRING']),
+ )
+
+ if pyver >= 3.0:
+ cls = vars()
+ result = {'Enum':Enum}
+ exec("""def test_duplicate_name(self):
+ with self.assertRaises(TypeError):
+ class Color(Enum):
+ red = 1
+ green = 2
+ blue = 3
+ red = 4
+
+ with self.assertRaises(TypeError):
+ class Color(Enum):
+ red = 1
+ green = 2
+ blue = 3
+ def red(self):
+ return 'red'
+
+ with self.assertRaises(TypeError):
+ class Color(Enum):
+ @property
+
+ def red(self):
+ return 'redder'
+ red = 1
+ green = 2
+ blue = 3""",
+ result)
+ cls['test_duplicate_name'] = result['test_duplicate_name']
+
+ def test_enum_with_value_name(self):
+ class Huh(Enum):
+ name = 1
+ value = 2
+ self.assertEqual(
+ list(Huh),
+ [Huh.name, Huh.value],
+ )
+ self.assertTrue(type(Huh.name) is Huh)
+ self.assertEqual(Huh.name.name, 'name')
+ self.assertEqual(Huh.name.value, 1)
+
+ def test_intenum_from_scratch(self):
+ class phy(int, Enum):
+ pi = 3
+ tau = 2 * pi
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_intenum_inherited(self):
+ class IntEnum(int, Enum):
+ pass
+ class phy(IntEnum):
+ pi = 3
+ tau = 2 * pi
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_floatenum_from_scratch(self):
+ class phy(float, Enum):
+ pi = 3.1415926
+ tau = 2 * pi
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_floatenum_inherited(self):
+ class FloatEnum(float, Enum):
+ pass
+ class phy(FloatEnum):
+ pi = 3.1415926
+ tau = 2 * pi
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_strenum_from_scratch(self):
+ class phy(str, Enum):
+ pi = 'Pi'
+ tau = 'Tau'
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_strenum_inherited(self):
+ class StrEnum(str, Enum):
+ pass
+ class phy(StrEnum):
+ pi = 'Pi'
+ tau = 'Tau'
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_intenum(self):
+ class WeekDay(IntEnum):
+ SUNDAY = 1
+ MONDAY = 2
+ TUESDAY = 3
+ WEDNESDAY = 4
+ THURSDAY = 5
+ FRIDAY = 6
+ SATURDAY = 7
+
+ self.assertEqual(['a', 'b', 'c'][WeekDay.MONDAY], 'c')
+ self.assertEqual([i for i in range(WeekDay.TUESDAY)], [0, 1, 2])
+
+ lst = list(WeekDay)
+ self.assertEqual(len(lst), len(WeekDay))
+ self.assertEqual(len(WeekDay), 7)
+ target = 'SUNDAY MONDAY TUESDAY WEDNESDAY THURSDAY FRIDAY SATURDAY'
+ target = target.split()
+ for i, weekday in enumerate(target):
+ i += 1
+ e = WeekDay(i)
+ self.assertEqual(e, i)
+ self.assertEqual(int(e), i)
+ self.assertEqual(e.name, weekday)
+ self.assertTrue(e in WeekDay)
+ self.assertEqual(lst.index(e)+1, i)
+ self.assertTrue(0 < e < 8)
+ self.assertTrue(type(e) is WeekDay)
+ self.assertTrue(isinstance(e, int))
+ self.assertTrue(isinstance(e, Enum))
+
+ def test_intenum_duplicates(self):
+ class WeekDay(IntEnum):
+ __order__ = 'SUNDAY MONDAY TUESDAY WEDNESDAY THURSDAY FRIDAY SATURDAY'
+ SUNDAY = 1
+ MONDAY = 2
+ TUESDAY = TEUSDAY = 3
+ WEDNESDAY = 4
+ THURSDAY = 5
+ FRIDAY = 6
+ SATURDAY = 7
+ self.assertTrue(WeekDay.TEUSDAY is WeekDay.TUESDAY)
+ self.assertEqual(WeekDay(3).name, 'TUESDAY')
+ self.assertEqual([k for k,v in WeekDay.__members__.items()
+ if v.name != k], ['TEUSDAY', ])
+
+ def test_pickle_enum(self):
+ if isinstance(Stooges, Exception):
+ raise Stooges
+ test_pickle_dump_load(self.assertTrue, Stooges.CURLY)
+ test_pickle_dump_load(self.assertTrue, Stooges)
+
+ def test_pickle_int(self):
+ if isinstance(IntStooges, Exception):
+ raise IntStooges
+ test_pickle_dump_load(self.assertTrue, IntStooges.CURLY)
+ test_pickle_dump_load(self.assertTrue, IntStooges)
+
+ def test_pickle_float(self):
+ if isinstance(FloatStooges, Exception):
+ raise FloatStooges
+ test_pickle_dump_load(self.assertTrue, FloatStooges.CURLY)
+ test_pickle_dump_load(self.assertTrue, FloatStooges)
+
+ def test_pickle_enum_function(self):
+ if isinstance(Answer, Exception):
+ raise Answer
+ test_pickle_dump_load(self.assertTrue, Answer.him)
+ test_pickle_dump_load(self.assertTrue, Answer)
+
+ def test_pickle_enum_function_with_module(self):
+ if isinstance(Question, Exception):
+ raise Question
+ test_pickle_dump_load(self.assertTrue, Question.who)
+ test_pickle_dump_load(self.assertTrue, Question)
+
+ if pyver == 3.4:
+ def test_class_nested_enum_and_pickle_protocol_four(self):
+ # would normally just have this directly in the class namespace
+ class NestedEnum(Enum):
+ twigs = 'common'
+ shiny = 'rare'
+
+ self.__class__.NestedEnum = NestedEnum
+ self.NestedEnum.__qualname__ = '%s.NestedEnum' % self.__class__.__name__
+ test_pickle_exception(
+ self.assertRaises, PicklingError, self.NestedEnum.twigs,
+ protocol=(0, 3))
+ test_pickle_dump_load(self.assertTrue, self.NestedEnum.twigs,
+ protocol=(4, HIGHEST_PROTOCOL))
+
+ elif pyver == 3.5:
+ def test_class_nested_enum_and_pickle_protocol_four(self):
+ # would normally just have this directly in the class namespace
+ class NestedEnum(Enum):
+ twigs = 'common'
+ shiny = 'rare'
+
+ self.__class__.NestedEnum = NestedEnum
+ self.NestedEnum.__qualname__ = '%s.NestedEnum' % self.__class__.__name__
+ test_pickle_dump_load(self.assertTrue, self.NestedEnum.twigs,
+ protocol=(0, HIGHEST_PROTOCOL))
+
+ def test_exploding_pickle(self):
+ BadPickle = Enum('BadPickle', 'dill sweet bread-n-butter')
+ enum._make_class_unpicklable(BadPickle)
+ globals()['BadPickle'] = BadPickle
+ test_pickle_exception(self.assertRaises, TypeError, BadPickle.dill)
+ test_pickle_exception(self.assertRaises, PicklingError, BadPickle)
+
+ def test_string_enum(self):
+ class SkillLevel(str, Enum):
+ master = 'what is the sound of one hand clapping?'
+ journeyman = 'why did the chicken cross the road?'
+ apprentice = 'knock, knock!'
+ self.assertEqual(SkillLevel.apprentice, 'knock, knock!')
+
+ def test_getattr_getitem(self):
+ class Period(Enum):
+ morning = 1
+ noon = 2
+ evening = 3
+ night = 4
+ self.assertTrue(Period(2) is Period.noon)
+ self.assertTrue(getattr(Period, 'night') is Period.night)
+ self.assertTrue(Period['morning'] is Period.morning)
+
+ def test_getattr_dunder(self):
+ Season = self.Season
+ self.assertTrue(getattr(Season, '__hash__'))
+
+ def test_iteration_order(self):
+ class Season(Enum):
+ _order_ = 'SUMMER WINTER AUTUMN SPRING'
+ SUMMER = 2
+ WINTER = 4
+ AUTUMN = 3
+ SPRING = 1
+ self.assertEqual(
+ list(Season),
+ [Season.SUMMER, Season.WINTER, Season.AUTUMN, Season.SPRING],
+ )
+
+ def test_iteration_order_reversed(self):
+ self.assertEqual(
+ list(reversed(self.Season)),
+ [self.Season.WINTER, self.Season.AUTUMN, self.Season.SUMMER,
+ self.Season.SPRING]
+ )
+
+ def test_iteration_order_with_unorderable_values(self):
+ class Complex(Enum):
+ a = complex(7, 9)
+ b = complex(3.14, 2)
+ c = complex(1, -1)
+ d = complex(-77, 32)
+ self.assertEqual(
+ list(Complex),
+ [Complex.a, Complex.b, Complex.c, Complex.d],
+ )
+
+ def test_programatic_function_string(self):
+ SummerMonth = Enum('SummerMonth', 'june july august')
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_string_with_start(self):
+ SummerMonth = Enum('SummerMonth', 'june july august', start=10)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split(), 10):
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_string_list(self):
+ SummerMonth = Enum('SummerMonth', ['june', 'july', 'august'])
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_string_list_with_start(self):
+ SummerMonth = Enum('SummerMonth', ['june', 'july', 'august'], start=20)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split(), 20):
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_iterable(self):
+ SummerMonth = Enum(
+ 'SummerMonth',
+ (('june', 1), ('july', 2), ('august', 3))
+ )
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_from_dict(self):
+ SummerMonth = Enum(
+ 'SummerMonth',
+ dict((('june', 1), ('july', 2), ('august', 3)))
+ )
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ if pyver < 3.0:
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_type(self):
+ SummerMonth = Enum('SummerMonth', 'june july august', type=int)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_type_with_start(self):
+ SummerMonth = Enum('SummerMonth', 'june july august', type=int, start=30)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split(), 30):
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_type_from_subclass(self):
+ SummerMonth = IntEnum('SummerMonth', 'june july august')
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_type_from_subclass_with_start(self):
+ SummerMonth = IntEnum('SummerMonth', 'june july august', start=40)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split(), 40):
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_unicode(self):
+ SummerMonth = Enum('SummerMonth', unicode('june july august'))
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_unicode_list(self):
+ SummerMonth = Enum('SummerMonth', [unicode('june'), unicode('july'), unicode('august')])
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_unicode_iterable(self):
+ SummerMonth = Enum(
+ 'SummerMonth',
+ ((unicode('june'), 1), (unicode('july'), 2), (unicode('august'), 3))
+ )
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_from_unicode_dict(self):
+ SummerMonth = Enum(
+ 'SummerMonth',
+ dict(((unicode('june'), 1), (unicode('july'), 2), (unicode('august'), 3)))
+ )
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ if pyver < 3.0:
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_unicode_type(self):
+ SummerMonth = Enum('SummerMonth', unicode('june july august'), type=int)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_unicode_type_from_subclass(self):
+ SummerMonth = IntEnum('SummerMonth', unicode('june july august'))
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programmatic_function_unicode_class(self):
+ if pyver < 3.0:
+ class_names = unicode('SummerMonth'), 'S\xfcmm\xe9rM\xf6nth'.decode('latin1')
+ else:
+ class_names = 'SummerMonth', 'S\xfcmm\xe9rM\xf6nth'
+ for i, class_name in enumerate(class_names):
+ if pyver < 3.0 and i == 1:
+ self.assertRaises(TypeError, Enum, class_name, unicode('june july august'))
+ else:
+ SummerMonth = Enum(class_name, unicode('june july august'))
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(e.value, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_subclassing(self):
+ if isinstance(Name, Exception):
+ raise Name
+ self.assertEqual(Name.BDFL, 'Guido van Rossum')
+ self.assertTrue(Name.BDFL, Name('Guido van Rossum'))
+ self.assertTrue(Name.BDFL is getattr(Name, 'BDFL'))
+ test_pickle_dump_load(self.assertTrue, Name.BDFL)
+
+ def test_extending(self):
+ def bad_extension():
+ class Color(Enum):
+ red = 1
+ green = 2
+ blue = 3
+ class MoreColor(Color):
+ cyan = 4
+ magenta = 5
+ yellow = 6
+ self.assertRaises(TypeError, bad_extension)
+
+ def test_exclude_methods(self):
+ class whatever(Enum):
+ this = 'that'
+ these = 'those'
+ def really(self):
+ return 'no, not %s' % self.value
+ self.assertFalse(type(whatever.really) is whatever)
+ self.assertEqual(whatever.this.really(), 'no, not that')
+
+ def test_wrong_inheritance_order(self):
+ def wrong_inherit():
+ class Wrong(Enum, str):
+ NotHere = 'error before this point'
+ self.assertRaises(TypeError, wrong_inherit)
+
+ def test_intenum_transitivity(self):
+ class number(IntEnum):
+ one = 1
+ two = 2
+ three = 3
+ class numero(IntEnum):
+ uno = 1
+ dos = 2
+ tres = 3
+ self.assertEqual(number.one, numero.uno)
+ self.assertEqual(number.two, numero.dos)
+ self.assertEqual(number.three, numero.tres)
+
+ def test_introspection(self):
+ class Number(IntEnum):
+ one = 100
+ two = 200
+ self.assertTrue(Number.one._member_type_ is int)
+ self.assertTrue(Number._member_type_ is int)
+ class String(str, Enum):
+ yarn = 'soft'
+ rope = 'rough'
+ wire = 'hard'
+ self.assertTrue(String.yarn._member_type_ is str)
+ self.assertTrue(String._member_type_ is str)
+ class Plain(Enum):
+ vanilla = 'white'
+ one = 1
+ self.assertTrue(Plain.vanilla._member_type_ is object)
+ self.assertTrue(Plain._member_type_ is object)
+
+ def test_wrong_enum_in_call(self):
+ class Monochrome(Enum):
+ black = 0
+ white = 1
+ class Gender(Enum):
+ male = 0
+ female = 1
+ self.assertRaises(ValueError, Monochrome, Gender.male)
+
+ def test_wrong_enum_in_mixed_call(self):
+ class Monochrome(IntEnum):
+ black = 0
+ white = 1
+ class Gender(Enum):
+ male = 0
+ female = 1
+ self.assertRaises(ValueError, Monochrome, Gender.male)
+
+ def test_mixed_enum_in_call_1(self):
+ class Monochrome(IntEnum):
+ black = 0
+ white = 1
+ class Gender(IntEnum):
+ male = 0
+ female = 1
+ self.assertTrue(Monochrome(Gender.female) is Monochrome.white)
+
+ def test_mixed_enum_in_call_2(self):
+ class Monochrome(Enum):
+ black = 0
+ white = 1
+ class Gender(IntEnum):
+ male = 0
+ female = 1
+ self.assertTrue(Monochrome(Gender.male) is Monochrome.black)
+
+ def test_flufl_enum(self):
+ class Fluflnum(Enum):
+ def __int__(self):
+ return int(self.value)
+ class MailManOptions(Fluflnum):
+ option1 = 1
+ option2 = 2
+ option3 = 3
+ self.assertEqual(int(MailManOptions.option1), 1)
+
+ def test_no_such_enum_member(self):
+ class Color(Enum):
+ red = 1
+ green = 2
+ blue = 3
+ self.assertRaises(ValueError, Color, 4)
+ self.assertRaises(KeyError, Color.__getitem__, 'chartreuse')
+
+ def test_new_repr(self):
+ class Color(Enum):
+ red = 1
+ green = 2
+ blue = 3
+ def __repr__(self):
+ return "don't you just love shades of %s?" % self.name
+ self.assertEqual(
+ repr(Color.blue),
+ "don't you just love shades of blue?",
+ )
+
+ def test_inherited_repr(self):
+ class MyEnum(Enum):
+ def __repr__(self):
+ return "My name is %s." % self.name
+ class MyIntEnum(int, MyEnum):
+ this = 1
+ that = 2
+ theother = 3
+ self.assertEqual(repr(MyIntEnum.that), "My name is that.")
+
+ def test_multiple_mixin_mro(self):
+ class auto_enum(EnumMeta):
+ def __new__(metacls, cls, bases, classdict):
+ original_dict = classdict
+ classdict = enum._EnumDict()
+ for k, v in original_dict.items():
+ classdict[k] = v
+ temp = type(classdict)()
+ names = set(classdict._member_names)
+ i = 0
+ for k in classdict._member_names:
+ v = classdict[k]
+ if v == ():
+ v = i
+ else:
+ i = v
+ i += 1
+ temp[k] = v
+ for k, v in classdict.items():
+ if k not in names:
+ temp[k] = v
+ return super(auto_enum, metacls).__new__(
+ metacls, cls, bases, temp)
+
+ AutoNumberedEnum = auto_enum('AutoNumberedEnum', (Enum,), {})
+
+ AutoIntEnum = auto_enum('AutoIntEnum', (IntEnum,), {})
+
+ class TestAutoNumber(AutoNumberedEnum):
+ a = ()
+ b = 3
+ c = ()
+
+ class TestAutoInt(AutoIntEnum):
+ a = ()
+ b = 3
+ c = ()
+
+ def test_subclasses_with_getnewargs(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt' # needed for pickle protocol 4
+ def __new__(cls, *args):
+ _args = args
+ if len(args) < 1:
+ raise TypeError("name and value must be specified")
+ name, args = args[0], args[1:]
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ def __getnewargs__(self):
+ return self._args
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "%s(%r, %s)" % (type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '(%s + %s)' % (self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI' # needed for pickle protocol 4
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+
+ self.assertTrue(NEI.__new__ is Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ test_pickle_dump_load(self.assertTrue, NI5, 5)
+ self.assertEqual(NEI.y.value, 2)
+ test_pickle_dump_load(self.assertTrue, NEI.y)
+
+ if pyver >= 3.4:
+ def test_subclasses_with_getnewargs_ex(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt' # needed for pickle protocol 4
+ def __new__(cls, *args):
+ _args = args
+ if len(args) < 2:
+ raise TypeError("name and value must be specified")
+ name, args = args[0], args[1:]
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ def __getnewargs_ex__(self):
+ return self._args, {}
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "{}({!r}, {})".format(type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '({0} + {1})'.format(self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI' # needed for pickle protocol 4
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+
+
+ self.assertIs(NEI.__new__, Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ test_pickle_dump_load(self.assertEqual, NI5, 5, protocol=(4, HIGHEST_PROTOCOL))
+ self.assertEqual(NEI.y.value, 2)
+ test_pickle_dump_load(self.assertTrue, NEI.y, protocol=(4, HIGHEST_PROTOCOL))
+
+ def test_subclasses_with_reduce(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt' # needed for pickle protocol 4
+ def __new__(cls, *args):
+ _args = args
+ if len(args) < 1:
+ raise TypeError("name and value must be specified")
+ name, args = args[0], args[1:]
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ def __reduce__(self):
+ return self.__class__, self._args
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "%s(%r, %s)" % (type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '(%s + %s)' % (self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI' # needed for pickle protocol 4
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+
+
+ self.assertTrue(NEI.__new__ is Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ test_pickle_dump_load(self.assertEqual, NI5, 5)
+ self.assertEqual(NEI.y.value, 2)
+ test_pickle_dump_load(self.assertTrue, NEI.y)
+
+ def test_subclasses_with_reduce_ex(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt' # needed for pickle protocol 4
+ def __new__(cls, *args):
+ _args = args
+ if len(args) < 1:
+ raise TypeError("name and value must be specified")
+ name, args = args[0], args[1:]
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ def __reduce_ex__(self, proto):
+ return self.__class__, self._args
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "%s(%r, %s)" % (type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '(%s + %s)' % (self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI' # needed for pickle protocol 4
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+
+
+ self.assertTrue(NEI.__new__ is Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ test_pickle_dump_load(self.assertEqual, NI5, 5)
+ self.assertEqual(NEI.y.value, 2)
+ test_pickle_dump_load(self.assertTrue, NEI.y)
+
+ def test_subclasses_without_direct_pickle_support(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt'
+ def __new__(cls, *args):
+ _args = args
+ name, args = args[0], args[1:]
+ if len(args) == 0:
+ raise TypeError("name and value must be specified")
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "%s(%r, %s)" % (type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '(%s + %s)' % (self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI'
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+
+ self.assertTrue(NEI.__new__ is Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ self.assertEqual(NEI.y.value, 2)
+ test_pickle_exception(self.assertRaises, TypeError, NEI.x)
+ test_pickle_exception(self.assertRaises, PicklingError, NEI)
+
+ def test_subclasses_without_direct_pickle_support_using_name(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt'
+ def __new__(cls, *args):
+ _args = args
+ name, args = args[0], args[1:]
+ if len(args) == 0:
+ raise TypeError("name and value must be specified")
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "%s(%r, %s)" % (type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '(%s + %s)' % (self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI'
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+ def __reduce_ex__(self, proto):
+ return getattr, (self.__class__, self._name_)
+
+ self.assertTrue(NEI.__new__ is Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ self.assertEqual(NEI.y.value, 2)
+ test_pickle_dump_load(self.assertTrue, NEI.y)
+ test_pickle_dump_load(self.assertTrue, NEI)
+
+ def test_tuple_subclass(self):
+ class SomeTuple(tuple, Enum):
+ __qualname__ = 'SomeTuple'
+ first = (1, 'for the money')
+ second = (2, 'for the show')
+ third = (3, 'for the music')
+ self.assertTrue(type(SomeTuple.first) is SomeTuple)
+ self.assertTrue(isinstance(SomeTuple.second, tuple))
+ self.assertEqual(SomeTuple.third, (3, 'for the music'))
+ globals()['SomeTuple'] = SomeTuple
+ test_pickle_dump_load(self.assertTrue, SomeTuple.first)
+
+ def test_duplicate_values_give_unique_enum_items(self):
+ class AutoNumber(Enum):
+ __order__ = 'enum_m enum_d enum_y'
+ enum_m = ()
+ enum_d = ()
+ enum_y = ()
+ def __new__(cls):
+ value = len(cls.__members__) + 1
+ obj = object.__new__(cls)
+ obj._value_ = value
+ return obj
+ def __int__(self):
+ return int(self._value_)
+ self.assertEqual(int(AutoNumber.enum_d), 2)
+ self.assertEqual(AutoNumber.enum_y.value, 3)
+ self.assertTrue(AutoNumber(1) is AutoNumber.enum_m)
+ self.assertEqual(
+ list(AutoNumber),
+ [AutoNumber.enum_m, AutoNumber.enum_d, AutoNumber.enum_y],
+ )
+
+ def test_inherited_new_from_enhanced_enum(self):
+ class AutoNumber2(Enum):
+ def __new__(cls):
+ value = len(cls.__members__) + 1
+ obj = object.__new__(cls)
+ obj._value_ = value
+ return obj
+ def __int__(self):
+ return int(self._value_)
+ class Color(AutoNumber2):
+ _order_ = 'red green blue'
+ red = ()
+ green = ()
+ blue = ()
+ self.assertEqual(len(Color), 3, "wrong number of elements: %d (should be %d)" % (len(Color), 3))
+ self.assertEqual(list(Color), [Color.red, Color.green, Color.blue])
+ if pyver >= 3.0:
+ self.assertEqual(list(map(int, Color)), [1, 2, 3])
+
+ def test_inherited_new_from_mixed_enum(self):
+ class AutoNumber3(IntEnum):
+ def __new__(cls):
+ value = len(cls.__members__) + 1
+ obj = int.__new__(cls, value)
+ obj._value_ = value
+ return obj
+ class Color(AutoNumber3):
+ red = ()
+ green = ()
+ blue = ()
+ self.assertEqual(len(Color), 3, "wrong number of elements: %d (should be %d)" % (len(Color), 3))
+ Color.red
+ Color.green
+ Color.blue
+
+ def test_equality(self):
+ class AlwaysEqual:
+ def __eq__(self, other):
+ return True
+ class OrdinaryEnum(Enum):
+ a = 1
+ self.assertEqual(AlwaysEqual(), OrdinaryEnum.a)
+ self.assertEqual(OrdinaryEnum.a, AlwaysEqual())
+
+ def test_ordered_mixin(self):
+ class OrderedEnum(Enum):
+ def __ge__(self, other):
+ if self.__class__ is other.__class__:
+ return self._value_ >= other._value_
+ return NotImplemented
+ def __gt__(self, other):
+ if self.__class__ is other.__class__:
+ return self._value_ > other._value_
+ return NotImplemented
+ def __le__(self, other):
+ if self.__class__ is other.__class__:
+ return self._value_ <= other._value_
+ return NotImplemented
+ def __lt__(self, other):
+ if self.__class__ is other.__class__:
+ return self._value_ < other._value_
+ return NotImplemented
+ class Grade(OrderedEnum):
+ __order__ = 'A B C D F'
+ A = 5
+ B = 4
+ C = 3
+ D = 2
+ F = 1
+ self.assertEqual(list(Grade), [Grade.A, Grade.B, Grade.C, Grade.D, Grade.F])
+ self.assertTrue(Grade.A > Grade.B)
+ self.assertTrue(Grade.F <= Grade.C)
+ self.assertTrue(Grade.D < Grade.A)
+ self.assertTrue(Grade.B >= Grade.B)
+
+ def test_extending2(self):
+ def bad_extension():
+ class Shade(Enum):
+ def shade(self):
+ print(self.name)
+ class Color(Shade):
+ red = 1
+ green = 2
+ blue = 3
+ class MoreColor(Color):
+ cyan = 4
+ magenta = 5
+ yellow = 6
+ self.assertRaises(TypeError, bad_extension)
+
+ def test_extending3(self):
+ class Shade(Enum):
+ def shade(self):
+ return self.name
+ class Color(Shade):
+ def hex(self):
+ return '%s hexlified!' % self.value
+ class MoreColor(Color):
+ cyan = 4
+ magenta = 5
+ yellow = 6
+ self.assertEqual(MoreColor.magenta.hex(), '5 hexlified!')
+
+ def test_no_duplicates(self):
+ def bad_duplicates():
+ class UniqueEnum(Enum):
+ def __init__(self, *args):
+ cls = self.__class__
+ if any(self.value == e.value for e in cls):
+ a = self.name
+ e = cls(self.value).name
+ raise ValueError(
+ "aliases not allowed in UniqueEnum: %r --> %r"
+ % (a, e)
+ )
+ class Color(UniqueEnum):
+ red = 1
+ green = 2
+ blue = 3
+ class Color(UniqueEnum):
+ red = 1
+ green = 2
+ blue = 3
+ grene = 2
+ self.assertRaises(ValueError, bad_duplicates)
+
+ def test_init(self):
+ class Planet(Enum):
+ MERCURY = (3.303e+23, 2.4397e6)
+ VENUS = (4.869e+24, 6.0518e6)
+ EARTH = (5.976e+24, 6.37814e6)
+ MARS = (6.421e+23, 3.3972e6)
+ JUPITER = (1.9e+27, 7.1492e7)
+ SATURN = (5.688e+26, 6.0268e7)
+ URANUS = (8.686e+25, 2.5559e7)
+ NEPTUNE = (1.024e+26, 2.4746e7)
+ def __init__(self, mass, radius):
+ self.mass = mass # in kilograms
+ self.radius = radius # in meters
+ @property
+ def surface_gravity(self):
+ # universal gravitational constant (m3 kg-1 s-2)
+ G = 6.67300E-11
+ return G * self.mass / (self.radius * self.radius)
+ self.assertEqual(round(Planet.EARTH.surface_gravity, 2), 9.80)
+ self.assertEqual(Planet.EARTH.value, (5.976e+24, 6.37814e6))
+
+ def test_nonhash_value(self):
+ class AutoNumberInAList(Enum):
+ def __new__(cls):
+ value = [len(cls.__members__) + 1]
+ obj = object.__new__(cls)
+ obj._value_ = value
+ return obj
+ class ColorInAList(AutoNumberInAList):
+ _order_ = 'red green blue'
+ red = ()
+ green = ()
+ blue = ()
+ self.assertEqual(list(ColorInAList), [ColorInAList.red, ColorInAList.green, ColorInAList.blue])
+ self.assertEqual(ColorInAList.red.value, [1])
+ self.assertEqual(ColorInAList([1]), ColorInAList.red)
+
+ def test_conflicting_types_resolved_in_new(self):
+ class LabelledIntEnum(int, Enum):
+ def __new__(cls, *args):
+ value, label = args
+ obj = int.__new__(cls, value)
+ obj.label = label
+ obj._value_ = value
+ return obj
+
+ class LabelledList(LabelledIntEnum):
+ unprocessed = (1, "Unprocessed")
+ payment_complete = (2, "Payment Complete")
+
+ self.assertEqual(list(LabelledList), [LabelledList.unprocessed, LabelledList.payment_complete])
+ self.assertEqual(LabelledList.unprocessed, 1)
+ self.assertEqual(LabelledList(1), LabelledList.unprocessed)
+
+ def test_empty_with_functional_api(self):
+ empty = enum.IntEnum('Foo', {})
+ self.assertEqual(len(empty), 0)
+
+
+class TestUnique(unittest.TestCase):
+ """2.4 doesn't allow class decorators, use function syntax."""
+
+ def test_unique_clean(self):
+ class Clean(Enum):
+ one = 1
+ two = 'dos'
+ tres = 4.0
+ unique(Clean)
+ class Cleaner(IntEnum):
+ single = 1
+ double = 2
+ triple = 3
+ unique(Cleaner)
+
+ def test_unique_dirty(self):
+ try:
+ class Dirty(Enum):
+ __order__ = 'one two tres'
+ one = 1
+ two = 'dos'
+ tres = 1
+ unique(Dirty)
+ except ValueError:
+ exc = sys.exc_info()[1]
+ message = exc.args[0]
+ self.assertTrue('tres -> one' in message)
+
+ try:
+ class Dirtier(IntEnum):
+ _order_ = 'single double triple turkey'
+ single = 1
+ double = 1
+ triple = 3
+ turkey = 3
+ unique(Dirtier)
+ except ValueError:
+ exc = sys.exc_info()[1]
+ message = exc.args[0]
+ self.assertTrue('double -> single' in message)
+ self.assertTrue('turkey -> triple' in message)
+
+
+class TestMe(unittest.TestCase):
+
+ pass
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/python/enum34/setup.cfg b/third_party/python/enum34/setup.cfg
new file mode 100644
index 0000000000..861a9f5542
--- /dev/null
+++ b/third_party/python/enum34/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/third_party/python/enum34/setup.py b/third_party/python/enum34/setup.py
new file mode 100644
index 0000000000..8b28400348
--- /dev/null
+++ b/third_party/python/enum34/setup.py
@@ -0,0 +1,99 @@
+import os
+import sys
+import setuptools
+from distutils.core import setup
+
+
+if sys.version_info[:2] < (2, 7):
+ required = ['ordereddict']
+else:
+ required = []
+
+long_desc = '''\
+enum --- support for enumerations
+========================================
+
+An enumeration is a set of symbolic names (members) bound to unique, constant
+values. Within an enumeration, the members can be compared by identity, and
+the enumeration itself can be iterated over.
+
+ from enum import Enum
+
+ class Fruit(Enum):
+ apple = 1
+ banana = 2
+ orange = 3
+
+ list(Fruit)
+ # [<Fruit.apple: 1>, <Fruit.banana: 2>, <Fruit.orange: 3>]
+
+ len(Fruit)
+ # 3
+
+ Fruit.banana
+ # <Fruit.banana: 2>
+
+ Fruit['banana']
+ # <Fruit.banana: 2>
+
+ Fruit(2)
+ # <Fruit.banana: 2>
+
+ Fruit.banana is Fruit['banana'] is Fruit(2)
+ # True
+
+ Fruit.banana.name
+ # 'banana'
+
+ Fruit.banana.value
+ # 2
+
+Repository and Issue Tracker at https://bitbucket.org/stoneleaf/enum34.
+'''
+
+py2_only = ()
+py3_only = ()
+make = [
+ 'rst2pdf enum/doc/enum.rst --output=enum/doc/enum.pdf',
+ ]
+
+
+data = dict(
+ name='enum34',
+ version='1.1.6',
+ url='https://bitbucket.org/stoneleaf/enum34',
+ packages=['enum'],
+ package_data={
+ 'enum' : [
+ 'LICENSE',
+ 'README',
+ 'doc/enum.rst',
+ 'doc/enum.pdf',
+ 'test.py',
+ ]
+ },
+ license='BSD License',
+ description='Python 3.4 Enum backported to 3.3, 3.2, 3.1, 2.7, 2.6, 2.5, and 2.4',
+ long_description=long_desc,
+ provides=['enum'],
+ install_requires=required,
+ author='Ethan Furman',
+ author_email='ethan@stoneleaf.us',
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: BSD License',
+ 'Programming Language :: Python',
+ 'Topic :: Software Development',
+ 'Programming Language :: Python :: 2.4',
+ 'Programming Language :: Python :: 2.5',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ ],
+ )
+
+if __name__ == '__main__':
+ setup(**data)
diff --git a/third_party/python/esprima/PKG-INFO b/third_party/python/esprima/PKG-INFO
new file mode 100644
index 0000000000..c2fee6ace6
--- /dev/null
+++ b/third_party/python/esprima/PKG-INFO
@@ -0,0 +1,143 @@
+Metadata-Version: 1.1
+Name: esprima
+Version: 4.0.1
+Summary: ECMAScript parsing infrastructure for multipurpose analysis in Python
+Home-page: https://github.com/Kronuz/esprima-python
+Author: German M. Bravo (Kronuz)
+Author-email: german.mb@gmail.com
+License: BSD License
+Description: |Donate| |PyPI Version| |PyPI License| |PyPI Format| |PyPI Status|
+
+ **Esprima** (`esprima.org <http://esprima.org>`__, BSD license) is a
+ high performance, standard-compliant
+ `ECMAScript <http://www.ecma-international.org/publications/standards/Ecma-262.htm>`__
+ parser officially written in ECMAScript (also popularly known as
+ `JavaScript <https://en.wikipedia.org/wiki/JavaScript>`__) and ported to
+ Python. Esprima is created and maintained by `Ariya
+ Hidayat <https://twitter.com/ariyahidayat>`__, with the help of `many
+ contributors <https://github.com/jquery/esprima/contributors>`__.
+
+ Python port is a line-by-line manual translation and was created and is
+ maintained by `German Mendez Bravo
+ (Kronuz) <https://twitter.com/germbravo>`__.
+
+ Features
+ ~~~~~~~~
+
+ - Full support for ECMAScript 2017 (`ECMA-262 8th
+ Edition <http://www.ecma-international.org/publications/standards/Ecma-262.htm>`__)
+ - Sensible `syntax tree
+ format <https://github.com/estree/estree/blob/master/es5.md>`__ as
+ standardized by `ESTree project <https://github.com/estree/estree>`__
+ - Experimental support for `JSX <https://facebook.github.io/jsx/>`__, a
+ syntax extension for `React <https://facebook.github.io/react/>`__
+ - Optional tracking of syntax node location (index-based and
+ line-column)
+ - `Heavily tested <http://esprima.org/test/ci.html>`__ (~1500 `unit
+ tests <https://github.com/jquery/esprima/tree/master/test/fixtures>`__
+ with `full code
+ coverage <https://codecov.io/github/jquery/esprima>`__)
+
+ Installation
+ ~~~~~~~~~~~~
+
+ .. code:: shell
+
+ pip install esprima
+
+ API
+ ~~~
+
+ Esprima can be used to perform `lexical
+ analysis <https://en.wikipedia.org/wiki/Lexical_analysis>`__
+ (tokenization) or `syntactic
+ analysis <https://en.wikipedia.org/wiki/Parsing>`__ (parsing) of a
+ JavaScript program.
+
+ A simple example:
+
+ .. code:: javascript
+
+ >>> import esprima
+ >>> program = 'const answer = 42'
+
+ >>> esprima.tokenize(program)
+ [{
+ type: "Keyword",
+ value: "const"
+ }, {
+ type: "Identifier",
+ value: "answer"
+ }, {
+ type: "Punctuator",
+ value: "="
+ }, {
+ type: "Numeric",
+ value: "42"
+ }]
+
+ >>> esprima.parseScript(program)
+ {
+ body: [
+ {
+ kind: "const",
+ declarations: [
+ {
+ init: {
+ raw: "42",
+ type: "Literal",
+ value: 42
+ },
+ type: "VariableDeclarator",
+ id: {
+ type: "Identifier",
+ name: "answer"
+ }
+ }
+ ],
+ type: "VariableDeclaration"
+ }
+ ],
+ type: "Program",
+ sourceType: "script"
+ }
+
+ For more information, please read the `complete
+ documentation <http://esprima.org/doc>`__.
+
+ .. |Donate| image:: https://img.shields.io/badge/Donate-PayPal-green.svg
+ :target: https://www.paypal.me/Kronuz/25
+ .. |PyPI Version| image:: https://img.shields.io/pypi/v/esprima.svg
+ :target: https://pypi.python.org/pypi/esprima
+ .. |PyPI License| image:: https://img.shields.io/pypi/l/esprima.svg
+ :target: https://pypi.python.org/pypi/esprima
+ .. |PyPI Wheel| image:: https://img.shields.io/pypi/wheel/esprima.svg
+ :target: https://pypi.python.org/pypi/esprima
+ .. |PyPI Format| image:: https://img.shields.io/pypi/format/esprima.svg
+ :target: https://pypi.python.org/pypi/esprima
+ .. |PyPI Python Version| image:: https://img.shields.io/pypi/pyversions/esprima.svg
+ :target: https://pypi.python.org/pypi/esprima
+ .. |PyPI Implementation| image:: https://img.shields.io/pypi/implementation/esprima.svg
+ :target: https://pypi.python.org/pypi/esprima
+ .. |PyPI Status| image:: https://img.shields.io/pypi/status/esprima.svg
+ :target: https://pypi.python.org/pypi/esprima
+ .. |PyPI Downloads| image:: https://img.shields.io/pypi/dm/esprima.svg
+ :target: https://pypi.python.org/pypi/esprima
+Keywords: esprima ecmascript javascript parser ast
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Software Development :: Code Generators
+Classifier: Topic :: Software Development :: Compilers
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Text Processing :: General
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
diff --git a/third_party/python/esprima/README b/third_party/python/esprima/README
new file mode 100644
index 0000000000..442fbc7b11
--- /dev/null
+++ b/third_party/python/esprima/README
@@ -0,0 +1,117 @@
+|Donate| |PyPI Version| |PyPI License| |PyPI Format| |PyPI Status|
+
+**Esprima** (`esprima.org <http://esprima.org>`__, BSD license) is a
+high performance, standard-compliant
+`ECMAScript <http://www.ecma-international.org/publications/standards/Ecma-262.htm>`__
+parser officially written in ECMAScript (also popularly known as
+`JavaScript <https://en.wikipedia.org/wiki/JavaScript>`__) and ported to
+Python. Esprima is created and maintained by `Ariya
+Hidayat <https://twitter.com/ariyahidayat>`__, with the help of `many
+contributors <https://github.com/jquery/esprima/contributors>`__.
+
+Python port is a line-by-line manual translation and was created and is
+maintained by `German Mendez Bravo
+(Kronuz) <https://twitter.com/germbravo>`__.
+
+Features
+~~~~~~~~
+
+- Full support for ECMAScript 2017 (`ECMA-262 8th
+ Edition <http://www.ecma-international.org/publications/standards/Ecma-262.htm>`__)
+- Sensible `syntax tree
+ format <https://github.com/estree/estree/blob/master/es5.md>`__ as
+ standardized by `ESTree project <https://github.com/estree/estree>`__
+- Experimental support for `JSX <https://facebook.github.io/jsx/>`__, a
+ syntax extension for `React <https://facebook.github.io/react/>`__
+- Optional tracking of syntax node location (index-based and
+ line-column)
+- `Heavily tested <http://esprima.org/test/ci.html>`__ (~1500 `unit
+ tests <https://github.com/jquery/esprima/tree/master/test/fixtures>`__
+ with `full code
+ coverage <https://codecov.io/github/jquery/esprima>`__)
+
+Installation
+~~~~~~~~~~~~
+
+.. code:: shell
+
+ pip install esprima
+
+API
+~~~
+
+Esprima can be used to perform `lexical
+analysis <https://en.wikipedia.org/wiki/Lexical_analysis>`__
+(tokenization) or `syntactic
+analysis <https://en.wikipedia.org/wiki/Parsing>`__ (parsing) of a
+JavaScript program.
+
+A simple example:
+
+.. code:: javascript
+
+ >>> import esprima
+ >>> program = 'const answer = 42'
+
+ >>> esprima.tokenize(program)
+ [{
+ type: "Keyword",
+ value: "const"
+ }, {
+ type: "Identifier",
+ value: "answer"
+ }, {
+ type: "Punctuator",
+ value: "="
+ }, {
+ type: "Numeric",
+ value: "42"
+ }]
+
+ >>> esprima.parseScript(program)
+ {
+ body: [
+ {
+ kind: "const",
+ declarations: [
+ {
+ init: {
+ raw: "42",
+ type: "Literal",
+ value: 42
+ },
+ type: "VariableDeclarator",
+ id: {
+ type: "Identifier",
+ name: "answer"
+ }
+ }
+ ],
+ type: "VariableDeclaration"
+ }
+ ],
+ type: "Program",
+ sourceType: "script"
+ }
+
+For more information, please read the `complete
+documentation <http://esprima.org/doc>`__.
+
+.. |Donate| image:: https://img.shields.io/badge/Donate-PayPal-green.svg
+ :target: https://www.paypal.me/Kronuz/25
+.. |PyPI Version| image:: https://img.shields.io/pypi/v/esprima.svg
+ :target: https://pypi.python.org/pypi/esprima
+.. |PyPI License| image:: https://img.shields.io/pypi/l/esprima.svg
+ :target: https://pypi.python.org/pypi/esprima
+.. |PyPI Wheel| image:: https://img.shields.io/pypi/wheel/esprima.svg
+ :target: https://pypi.python.org/pypi/esprima
+.. |PyPI Format| image:: https://img.shields.io/pypi/format/esprima.svg
+ :target: https://pypi.python.org/pypi/esprima
+.. |PyPI Python Version| image:: https://img.shields.io/pypi/pyversions/esprima.svg
+ :target: https://pypi.python.org/pypi/esprima
+.. |PyPI Implementation| image:: https://img.shields.io/pypi/implementation/esprima.svg
+ :target: https://pypi.python.org/pypi/esprima
+.. |PyPI Status| image:: https://img.shields.io/pypi/status/esprima.svg
+ :target: https://pypi.python.org/pypi/esprima
+.. |PyPI Downloads| image:: https://img.shields.io/pypi/dm/esprima.svg
+ :target: https://pypi.python.org/pypi/esprima
diff --git a/third_party/python/esprima/esprima/__init__.py b/third_party/python/esprima/esprima/__init__.py
new file mode 100644
index 0000000000..0dcdf99e5e
--- /dev/null
+++ b/third_party/python/esprima/esprima/__init__.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import
+
+version = '4.0.1'
+__version__ = (4, 0, 1)
+
+from .esprima import * # NOQA
diff --git a/third_party/python/esprima/esprima/__main__.py b/third_party/python/esprima/esprima/__main__.py
new file mode 100644
index 0000000000..92f2aa2ec5
--- /dev/null
+++ b/third_party/python/esprima/esprima/__main__.py
@@ -0,0 +1,105 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import, unicode_literals, print_function, division
+
+import sys
+
+from .esprima import parse, tokenize, Error, toDict
+from . import version
+
+
+def main():
+ import json
+ import time
+ import optparse
+
+ usage = "usage: %prog [options] [file.js]"
+ parser = optparse.OptionParser(usage=usage, version=version)
+ parser.add_option("--comment", dest="comment",
+ action="store_true", default=False,
+ help="Gather all line and block comments in an array")
+ parser.add_option("--attachComment", dest="attachComment",
+ action="store_true", default=False,
+ help="Attach comments to nodes")
+ parser.add_option("--loc", dest="loc", default=False,
+ action="store_true",
+ help="Include line-column location info for each syntax node")
+ parser.add_option("--range", dest="range", default=False,
+ action="store_true",
+ help="Include index-based range for each syntax node")
+ parser.add_option("--raw", dest="raw", default=False,
+ action="store_true",
+ help="Display the raw value of literals")
+ parser.add_option("--tokens", dest="tokens", default=False,
+ action="store_true",
+ help="List all tokens in an array")
+ parser.add_option("--tolerant", dest="tolerant", default=False,
+ action="store_true",
+ help="Tolerate errors on a best-effort basis (experimental)")
+ parser.add_option("--tokenize", dest="tokenize", default=False,
+ action="store_true",
+ help="Only tokenize, do not parse.")
+ parser.add_option("--module", dest="sourceType", default='string',
+ action="store_const", const='module',
+ help="Tolerate errors on a best-effort basis (experimental)")
+ parser.set_defaults(jsx=True, classProperties=True)
+ opts, args = parser.parse_args()
+
+ if len(args) == 1:
+ with open(args[0], 'rb') as f:
+ code = f.read().decode('utf-8')
+ elif sys.stdin.isatty():
+ parser.print_help()
+ return 64
+ else:
+ code = sys.stdin.read().decode('utf-8')
+
+ options = opts.__dict__
+ do_tokenize = options.pop('tokenize')
+
+ t = time.time()
+ try:
+ if do_tokenize:
+ del options['sourceType']
+ del options['tokens']
+ del options['raw']
+ del options['jsx']
+ res = toDict(tokenize(code, options=options))
+ else:
+ res = toDict(parse(code, options=options))
+ except Error as e:
+ res = e.toDict()
+ dt = time.time() - t + 0.000000001
+
+ print(json.dumps(res, indent=4))
+ print()
+ print('Parsed everyting in', round(dt, 5), 'seconds.')
+ print('Thats %d characters per second' % (len(code) // dt))
+
+ return 0
+
+
+if __name__ == '__main__':
+ retval = main()
+ sys.exit(retval)
diff --git a/third_party/python/esprima/esprima/character.py b/third_party/python/esprima/esprima/character.py
new file mode 100644
index 0000000000..a650a714a9
--- /dev/null
+++ b/third_party/python/esprima/esprima/character.py
@@ -0,0 +1,125 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import, unicode_literals
+
+import sys
+
+import unicodedata
+from collections import defaultdict
+
+from .compat import uchr, xrange
+
+# http://stackoverflow.com/questions/14245893/efficiently-list-all-characters-in-a-given-unicode-category
+U_CATEGORIES = defaultdict(list)
+for c in map(uchr, xrange(sys.maxunicode + 1)):
+ U_CATEGORIES[unicodedata.category(c)].append(c)
+UNICODE_LETTER = set(
+ U_CATEGORIES['Lu'] + U_CATEGORIES['Ll'] +
+ U_CATEGORIES['Lt'] + U_CATEGORIES['Lm'] +
+ U_CATEGORIES['Lo'] + U_CATEGORIES['Nl']
+)
+UNICODE_OTHER_ID_START = set((
+ # Other_ID_Start
+ '\u1885', '\u1886', '\u2118', '\u212E', '\u309B', '\u309C',
+ # New in Unicode 8.0
+ '\u08B3', '\u0AF9', '\u13F8', '\u9FCD', '\uAB60', '\U00010CC0', '\U000108E0', '\U0002B820',
+ # New in Unicode 9.0
+ '\u1C80', '\U000104DB', '\U0001E922',
+ '\U0001EE00', '\U0001EE06', '\U0001EE0A',
+))
+UNICODE_OTHER_ID_CONTINUE = set((
+ # Other_ID_Continue
+ '\xB7', '\u0387', '\u1369', '\u136A', '\u136B', '\u136C',
+ '\u136D', '\u136E', '\u136F', '\u1370', '\u1371', '\u19DA',
+ # New in Unicode 8.0
+ '\u08E3', '\uA69E', '\U00011730',
+ # New in Unicode 9.0
+ '\u08D4', '\u1DFB', '\uA8C5', '\U00011450',
+ '\U0001EE03', '\U0001EE0B',
+))
+UNICODE_COMBINING_MARK = set(U_CATEGORIES['Mn'] + U_CATEGORIES['Mc'])
+UNICODE_DIGIT = set(U_CATEGORIES['Nd'])
+UNICODE_CONNECTOR_PUNCTUATION = set(U_CATEGORIES['Pc'])
+IDENTIFIER_START = UNICODE_LETTER.union(UNICODE_OTHER_ID_START).union(set(('$', '_', '\\')))
+IDENTIFIER_PART = IDENTIFIER_START.union(UNICODE_COMBINING_MARK).union(UNICODE_DIGIT).union(UNICODE_CONNECTOR_PUNCTUATION).union(set(('\u200D', '\u200C'))).union(UNICODE_OTHER_ID_CONTINUE)
+
+WHITE_SPACE = set((
+ '\x09', '\x0B', '\x0C', '\x20', '\xA0',
+ '\u1680', '\u180E', '\u2000', '\u2001', '\u2002',
+ '\u2003', '\u2004', '\u2005', '\u2006', '\u2007',
+ '\u2008', '\u2009', '\u200A', '\u202F', '\u205F',
+ '\u3000', '\uFEFF',
+))
+LINE_TERMINATOR = set(('\x0A', '\x0D', '\u2028', '\u2029'))
+
+DECIMAL_CONV = dict((c, n) for n, c in enumerate('0123456789'))
+OCTAL_CONV = dict((c, n) for n, c in enumerate('01234567'))
+HEX_CONV = dict((c, n) for n, c in enumerate('0123456789abcdef'))
+for n, c in enumerate('ABCDEF', 10):
+ HEX_CONV[c] = n
+DECIMAL_DIGIT = set(DECIMAL_CONV.keys())
+OCTAL_DIGIT = set(OCTAL_CONV.keys())
+HEX_DIGIT = set(HEX_CONV.keys())
+
+
+class Character:
+ @staticmethod
+ def fromCodePoint(code):
+ return uchr(code)
+
+ # https://tc39.github.io/ecma262/#sec-white-space
+
+ @staticmethod
+ def isWhiteSpace(ch):
+ return ch in WHITE_SPACE
+
+ # https://tc39.github.io/ecma262/#sec-line-terminators
+
+ @staticmethod
+ def isLineTerminator(ch):
+ return ch in LINE_TERMINATOR
+
+ # https://tc39.github.io/ecma262/#sec-names-and-keywords
+
+ @staticmethod
+ def isIdentifierStart(ch):
+ return ch in IDENTIFIER_START
+
+ @staticmethod
+ def isIdentifierPart(ch):
+ return ch in IDENTIFIER_PART
+
+ # https://tc39.github.io/ecma262/#sec-literals-numeric-literals
+
+ @staticmethod
+ def isDecimalDigit(ch):
+ return ch in DECIMAL_DIGIT
+
+ @staticmethod
+ def isHexDigit(ch):
+ return ch in HEX_DIGIT
+
+ @staticmethod
+ def isOctalDigit(ch):
+ return ch in OCTAL_DIGIT
diff --git a/third_party/python/esprima/esprima/comment_handler.py b/third_party/python/esprima/esprima/comment_handler.py
new file mode 100644
index 0000000000..09a37a5fd2
--- /dev/null
+++ b/third_party/python/esprima/esprima/comment_handler.py
@@ -0,0 +1,176 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, self.list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, self.list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# self.SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES
+# LOSS OF USE, DATA, OR PROFITS OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# self.SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import, unicode_literals
+
+from .objects import Object
+from .nodes import Node
+from .syntax import Syntax
+
+
+class Comment(Node):
+ def __init__(self, type, value, range=None, loc=None):
+ self.type = type
+ self.value = value
+ self.range = range
+ self.loc = loc
+
+
+class Entry(Object):
+ def __init__(self, comment, start):
+ self.comment = comment
+ self.start = start
+
+
+class NodeInfo(Object):
+ def __init__(self, node, start):
+ self.node = node
+ self.start = start
+
+
+class CommentHandler(object):
+ def __init__(self):
+ self.attach = False
+ self.comments = []
+ self.stack = []
+ self.leading = []
+ self.trailing = []
+
+ def insertInnerComments(self, node, metadata):
+ # innnerComments for properties empty block
+ # `function a(:/** comments **\/}`
+ if node.type is Syntax.BlockStatement and not node.body:
+ innerComments = []
+ for i, entry in enumerate(self.leading):
+ if metadata.end.offset >= entry.start:
+ innerComments.append(entry.comment)
+ self.leading[i] = None
+ self.trailing[i] = None
+ if innerComments:
+ node.innerComments = innerComments
+ self.leading = [v for v in self.leading if v is not None]
+ self.trailing = [v for v in self.trailing if v is not None]
+
+ def findTrailingComments(self, metadata):
+ trailingComments = []
+
+ if self.trailing:
+ for i, entry in enumerate(self.trailing):
+ if entry.start >= metadata.end.offset:
+ trailingComments.append(entry.comment)
+ if trailingComments:
+ self.trailing = []
+ return trailingComments
+
+ last = self.stack and self.stack[-1]
+ if last and last.node.trailingComments:
+ firstComment = last.node.trailingComments[0]
+ if firstComment and firstComment.range[0] >= metadata.end.offset:
+ trailingComments = last.node.trailingComments
+ del last.node.trailingComments
+ return trailingComments
+
+ def findLeadingComments(self, metadata):
+ leadingComments = []
+
+ target = None
+ while self.stack:
+ entry = self.stack and self.stack[-1]
+ if entry and entry.start >= metadata.start.offset:
+ target = entry.node
+ self.stack.pop()
+ else:
+ break
+
+ if target:
+ if target.leadingComments:
+ for i, comment in enumerate(target.leadingComments):
+ if comment.range[1] <= metadata.start.offset:
+ leadingComments.append(comment)
+ target.leadingComments[i] = None
+ if leadingComments:
+ target.leadingComments = [v for v in target.leadingComments if v is not None]
+ if not target.leadingComments:
+ del target.leadingComments
+ return leadingComments
+
+ for i, entry in enumerate(self.leading):
+ if entry.start <= metadata.start.offset:
+ leadingComments.append(entry.comment)
+ self.leading[i] = None
+ if leadingComments:
+ self.leading = [v for v in self.leading if v is not None]
+
+ return leadingComments
+
+ def visitNode(self, node, metadata):
+ if node.type is Syntax.Program and node.body:
+ return
+
+ self.insertInnerComments(node, metadata)
+ trailingComments = self.findTrailingComments(metadata)
+ leadingComments = self.findLeadingComments(metadata)
+ if leadingComments:
+ node.leadingComments = leadingComments
+ if trailingComments:
+ node.trailingComments = trailingComments
+
+ self.stack.append(NodeInfo(
+ node=node,
+ start=metadata.start.offset
+ ))
+
+ def visitComment(self, node, metadata):
+ type = 'Line' if node.type[0] == 'L' else 'Block'
+ comment = Comment(
+ type=type,
+ value=node.value
+ )
+ if node.range:
+ comment.range = node.range
+ if node.loc:
+ comment.loc = node.loc
+ self.comments.append(comment)
+
+ if self.attach:
+ entry = Entry(
+ comment=Comment(
+ type=type,
+ value=node.value,
+ range=[metadata.start.offset, metadata.end.offset]
+ ),
+ start=metadata.start.offset
+ )
+ if node.loc:
+ entry.comment.loc = node.loc
+ node.type = type
+ self.leading.append(entry)
+ self.trailing.append(entry)
+
+ def visit(self, node, metadata):
+ if node.type == 'LineComment':
+ self.visitComment(node, metadata)
+ elif node.type == 'BlockComment':
+ self.visitComment(node, metadata)
+ elif self.attach:
+ self.visitNode(node, metadata)
diff --git a/third_party/python/esprima/esprima/compat.py b/third_party/python/esprima/esprima/compat.py
new file mode 100644
index 0000000000..79543255e3
--- /dev/null
+++ b/third_party/python/esprima/esprima/compat.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import, unicode_literals
+
+import sys
+
+PY3 = sys.version_info >= (3, 0)
+
+if PY3:
+ # Python 3:
+ basestring = str
+ long = int
+ xrange = range
+ unicode = str
+ uchr = chr
+
+ def uord(ch):
+ return ord(ch[0])
+
+else:
+ basestring = basestring
+ long = long
+ xrange = xrange
+ unicode = unicode
+
+ try:
+ # Python 2 UCS4:
+ unichr(0x10000)
+ uchr = unichr
+
+ def uord(ch):
+ return ord(ch[0])
+
+ except ValueError:
+ # Python 2 UCS2:
+ def uchr(code):
+ # UTF-16 Encoding
+ if code <= 0xFFFF:
+ return unichr(code)
+ cu1 = ((code - 0x10000) >> 10) + 0xD800
+ cu2 = ((code - 0x10000) & 1023) + 0xDC00
+ return unichr(cu1) + unichr(cu2)
+
+ def uord(ch):
+ cp = ord(ch[0])
+ if cp >= 0xD800 and cp <= 0xDBFF:
+ second = ord(ch[1])
+ if second >= 0xDC00 and second <= 0xDFFF:
+ first = cp
+ cp = (first - 0xD800) * 0x400 + second - 0xDC00 + 0x10000
+ return cp
diff --git a/third_party/python/esprima/esprima/error_handler.py b/third_party/python/esprima/esprima/error_handler.py
new file mode 100644
index 0000000000..9b0f5cb843
--- /dev/null
+++ b/third_party/python/esprima/esprima/error_handler.py
@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import unicode_literals
+
+from .compat import unicode
+
+
+class Error(Exception):
+ def __init__(self, message, name=None, index=None, lineNumber=None, column=None, description=None):
+ super(Error, self).__init__(message)
+ self.message = message
+ self.name = name
+ self.index = index
+ self.lineNumber = lineNumber
+ self.column = column
+ # self.description = description
+
+ def toString(self):
+ return '%s: %s' % (self.__class__.__name__, self)
+
+ def toDict(self):
+ d = dict((unicode(k), v) for k, v in self.__dict__.items() if v is not None)
+ d['message'] = self.toString()
+ return d
+
+
+class ErrorHandler:
+ def __init__(self):
+ self.errors = []
+ self.tolerant = False
+
+ def recordError(self, error):
+ self.errors.append(error.toDict())
+
+ def tolerate(self, error):
+ if self.tolerant:
+ self.recordError(error)
+ else:
+ raise error
+
+ def createError(self, index, line, col, description):
+ msg = 'Line %s: %s' % (line, description)
+ return Error(msg, index=index, lineNumber=line, column=col, description=description)
+
+ def throwError(self, index, line, col, description):
+ raise self.createError(index, line, col, description)
+
+ def tolerateError(self, index, line, col, description):
+ error = self.createError(index, line, col, description)
+ if self.tolerant:
+ self.recordError(error)
+ else:
+ raise error
diff --git a/third_party/python/esprima/esprima/esprima.py b/third_party/python/esprima/esprima/esprima.py
new file mode 100644
index 0000000000..faea0c2dda
--- /dev/null
+++ b/third_party/python/esprima/esprima/esprima.py
@@ -0,0 +1,125 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import, unicode_literals
+
+from .comment_handler import CommentHandler
+from .error_handler import Error
+from .jsx_parser import JSXParser
+from .jsx_syntax import JSXSyntax
+from .objects import Array, toDict
+from .parser import Parser
+from .syntax import Syntax
+from .tokenizer import Tokenizer
+from .visitor import NodeVisitor
+from . import nodes
+from . import jsx_nodes
+
+
+__all__ = ['Syntax', 'JSXSyntax', 'Error', 'NodeVisitor', 'nodes', 'jsx_nodes',
+ 'parse', 'parseModule', 'parseScript', 'tokenize', 'toDict']
+
+
+def parse(code, options=None, delegate=None, **kwargs):
+ options = {} if options is None else options.copy()
+ options.update(kwargs)
+
+ # ESNext presset:
+ if options.get('esnext', False):
+ options['jsx'] = True
+ options['classProperties'] = True
+
+ commentHandler = None
+
+ def proxyDelegate(node, metadata):
+ if delegate:
+ new_node = delegate(node, metadata)
+ if new_node is not None:
+ node = new_node
+ if commentHandler:
+ commentHandler.visit(node, metadata)
+ return node
+
+ parserDelegate = None if delegate is None else proxyDelegate
+ collectComment = options.get('comment', False)
+ attachComment = options.get('attachComment', False)
+ if collectComment or attachComment:
+ commentHandler = CommentHandler()
+ commentHandler.attach = attachComment
+ options['comment'] = True
+ parserDelegate = proxyDelegate
+
+ isModule = options.get('sourceType', 'script') == 'module'
+
+ if options.get('jsx', False):
+ parser = JSXParser(code, options=options, delegate=parserDelegate)
+ else:
+ parser = Parser(code, options=options, delegate=parserDelegate)
+
+ ast = parser.parseModule() if isModule else parser.parseScript()
+
+ if collectComment and commentHandler:
+ ast.comments = commentHandler.comments
+
+ if parser.config.tokens:
+ ast.tokens = parser.tokens
+
+ if parser.config.tolerant:
+ ast.errors = parser.errorHandler.errors
+
+ return ast
+
+
+def parseModule(code, options=None, delegate=None, **kwargs):
+ kwargs['sourceType'] = 'module'
+ return parse(code, options, delegate, **kwargs)
+
+
+def parseScript(code, options=None, delegate=None, **kwargs):
+ kwargs['sourceType'] = 'script'
+ return parse(code, options, delegate, **kwargs)
+
+
+def tokenize(code, options=None, delegate=None, **kwargs):
+ options = {} if options is None else options.copy()
+ options.update(kwargs)
+
+ tokenizer = Tokenizer(code, options)
+
+ tokens = Array()
+
+ try:
+ while True:
+ token = tokenizer.getNextToken()
+ if not token:
+ break
+ if delegate:
+ token = delegate(token)
+ tokens.append(token)
+ except Error as e:
+ tokenizer.errorHandler.tolerate(e)
+
+ if tokenizer.errorHandler.tolerant:
+ tokens.errors = tokenizer.errors()
+
+ return tokens
diff --git a/third_party/python/esprima/esprima/jsx_nodes.py b/third_party/python/esprima/esprima/jsx_nodes.py
new file mode 100644
index 0000000000..f195653ab5
--- /dev/null
+++ b/third_party/python/esprima/esprima/jsx_nodes.py
@@ -0,0 +1,100 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import, unicode_literals
+
+from .nodes import Node
+from .jsx_syntax import JSXSyntax
+
+
+class JSXClosingElement(Node):
+ def __init__(self, name):
+ self.type = JSXSyntax.JSXClosingElement
+ self.name = name
+
+
+class JSXElement(Node):
+ def __init__(self, openingElement, children, closingElement):
+ self.type = JSXSyntax.JSXElement
+ self.openingElement = openingElement
+ self.children = children
+ self.closingElement = closingElement
+
+
+class JSXEmptyExpression(Node):
+ def __init__(self):
+ self.type = JSXSyntax.JSXEmptyExpression
+
+
+class JSXExpressionContainer(Node):
+ def __init__(self, expression):
+ self.type = JSXSyntax.JSXExpressionContainer
+ self.expression = expression
+
+
+class JSXIdentifier(Node):
+ def __init__(self, name):
+ self.type = JSXSyntax.JSXIdentifier
+ self.name = name
+
+
+class JSXMemberExpression(Node):
+ def __init__(self, object, property):
+ self.type = JSXSyntax.JSXMemberExpression
+ self.object = object
+ self.property = property
+
+
+class JSXAttribute(Node):
+ def __init__(self, name, value):
+ self.type = JSXSyntax.JSXAttribute
+ self.name = name
+ self.value = value
+
+
+class JSXNamespacedName(Node):
+ def __init__(self, namespace, name):
+ self.type = JSXSyntax.JSXNamespacedName
+ self.namespace = namespace
+ self.name = name
+
+
+class JSXOpeningElement(Node):
+ def __init__(self, name, selfClosing, attributes):
+ self.type = JSXSyntax.JSXOpeningElement
+ self.name = name
+ self.selfClosing = selfClosing
+ self.attributes = attributes
+
+
+class JSXSpreadAttribute(Node):
+ def __init__(self, argument):
+ self.type = JSXSyntax.JSXSpreadAttribute
+ self.argument = argument
+
+
+class JSXText(Node):
+ def __init__(self, value, raw):
+ self.type = JSXSyntax.JSXText
+ self.value = value
+ self.raw = raw
diff --git a/third_party/python/esprima/esprima/jsx_parser.py b/third_party/python/esprima/esprima/jsx_parser.py
new file mode 100644
index 0000000000..ec71b9251d
--- /dev/null
+++ b/third_party/python/esprima/esprima/jsx_parser.py
@@ -0,0 +1,584 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import, unicode_literals
+
+from .compat import uchr
+from .character import Character
+from . import jsx_nodes as JSXNode
+from .jsx_syntax import JSXSyntax
+from . import nodes as Node
+from .parser import Marker, Parser
+from .token import Token, TokenName
+from .xhtml_entities import XHTMLEntities
+
+
+class MetaJSXElement(object):
+ def __init__(self, node=None, opening=None, closing=None, children=None):
+ self.node = node
+ self.opening = opening
+ self.closing = closing
+ self.children = children
+
+
+class JSXToken(object):
+ Identifier = 100
+ Text = 101
+
+
+class RawJSXToken(object):
+ def __init__(self, type=None, value=None, lineNumber=None, lineStart=None, start=None, end=None):
+ self.type = type
+ self.value = value
+ self.lineNumber = lineNumber
+ self.lineStart = lineStart
+ self.start = start
+ self.end = end
+
+
+TokenName[JSXToken.Identifier] = "JSXIdentifier"
+TokenName[JSXToken.Text] = "JSXText"
+
+
+# Fully qualified element name, e.g. <svg:path> returns "svg:path"
+def getQualifiedElementName(elementName):
+ typ = elementName.type
+ if typ is JSXSyntax.JSXIdentifier:
+ id = elementName
+ qualifiedName = id.name
+ elif typ is JSXSyntax.JSXNamespacedName:
+ ns = elementName
+ qualifiedName = getQualifiedElementName(ns.namespace) + ':' + getQualifiedElementName(ns.name)
+ elif typ is JSXSyntax.JSXMemberExpression:
+ expr = elementName
+ qualifiedName = getQualifiedElementName(expr.object) + '.' + getQualifiedElementName(expr.property)
+
+ return qualifiedName
+
+
+class JSXParser(Parser):
+ def __init__(self, code, options, delegate):
+ super(JSXParser, self).__init__(code, options, delegate)
+
+ def parsePrimaryExpression(self):
+ return self.parseJSXRoot() if self.match('<') else super(JSXParser, self).parsePrimaryExpression()
+
+ def startJSX(self):
+ # Unwind the scanner before the lookahead token.
+ self.scanner.index = self.startMarker.index
+ self.scanner.lineNumber = self.startMarker.line
+ self.scanner.lineStart = self.startMarker.index - self.startMarker.column
+
+ def finishJSX(self):
+ # Prime the next lookahead.
+ self.nextToken()
+
+ def reenterJSX(self):
+ self.startJSX()
+ self.expectJSX('}')
+
+ # Pop the closing '}' added from the lookahead.
+ if self.config.tokens:
+ self.tokens.pop()
+
+ def createJSXNode(self):
+ self.collectComments()
+ return Marker(
+ index=self.scanner.index,
+ line=self.scanner.lineNumber,
+ column=self.scanner.index - self.scanner.lineStart
+ )
+
+ def createJSXChildNode(self):
+ return Marker(
+ index=self.scanner.index,
+ line=self.scanner.lineNumber,
+ column=self.scanner.index - self.scanner.lineStart
+ )
+
+ def scanXHTMLEntity(self, quote):
+ result = '&'
+
+ valid = True
+ terminated = False
+ numeric = False
+ hex = False
+
+ while not self.scanner.eof() and valid and not terminated:
+ ch = self.scanner.source[self.scanner.index]
+ if ch == quote:
+ break
+
+ terminated = (ch == ';')
+ result += ch
+ self.scanner.index += 1
+ if not terminated:
+ length = len(result)
+ if length == 2:
+ # e.g. '&#123;'
+ numeric = (ch == '#')
+ elif length == 3:
+ if numeric:
+ # e.g. '&#x41;'
+ hex = ch == 'x'
+ valid = hex or Character.isDecimalDigit(ch)
+ numeric = numeric and not hex
+ else:
+ valid = valid and not (numeric and not Character.isDecimalDigit(ch))
+ valid = valid and not (hex and not Character.isHexDigit(ch))
+
+ if valid and terminated and len(result) > 2:
+ # e.g. '&#x41;' becomes just '#x41'
+ st = result[1:-1]
+ if numeric and len(st) > 1:
+ result = uchr(int(st[1:], 10))
+ elif hex and len(st) > 2:
+ result = uchr(int(st[2:], 16))
+ elif not numeric and not hex and st in XHTMLEntities:
+ result = XHTMLEntities[st]
+
+ return result
+
+ # Scan the next JSX token. This replaces Scanner#lex when in JSX mode.
+
+ def lexJSX(self):
+ ch = self.scanner.source[self.scanner.index]
+
+ # < > / : = { }
+ if ch in ('<', '>', '/', ':', '=', '{', '}'):
+ value = self.scanner.source[self.scanner.index]
+ self.scanner.index += 1
+ return RawJSXToken(
+ type=Token.Punctuator,
+ value=value,
+ lineNumber=self.scanner.lineNumber,
+ lineStart=self.scanner.lineStart,
+ start=self.scanner.index - 1,
+ end=self.scanner.index
+ )
+
+ # " '
+ if ch in ('\'', '"'):
+ start = self.scanner.index
+ quote = self.scanner.source[self.scanner.index]
+ self.scanner.index += 1
+ str = ''
+ while not self.scanner.eof():
+ ch = self.scanner.source[self.scanner.index]
+ self.scanner.index += 1
+ if ch == quote:
+ break
+ elif ch == '&':
+ str += self.scanXHTMLEntity(quote)
+ else:
+ str += ch
+
+ return RawJSXToken(
+ type=Token.StringLiteral,
+ value=str,
+ lineNumber=self.scanner.lineNumber,
+ lineStart=self.scanner.lineStart,
+ start=start,
+ end=self.scanner.index
+ )
+
+ # ... or .
+ if ch == '.':
+ start = self.scanner.index
+ if self.scanner.source[start + 1:start + 3] == '..':
+ value = '...'
+ self.scanner.index += 3
+ else:
+ value = '.'
+ self.scanner.index += 1
+ return RawJSXToken(
+ type=Token.Punctuator,
+ value=value,
+ lineNumber=self.scanner.lineNumber,
+ lineStart=self.scanner.lineStart,
+ start=start,
+ end=self.scanner.index
+ )
+
+ # `
+ if ch == '`':
+ # Only placeholder, since it will be rescanned as a real assignment expression.
+ return RawJSXToken(
+ type=Token.Template,
+ value='',
+ lineNumber=self.scanner.lineNumber,
+ lineStart=self.scanner.lineStart,
+ start=self.scanner.index,
+ end=self.scanner.index
+ )
+
+ # Identifer can not contain backslash (char code 92).
+ if Character.isIdentifierStart(ch) and ch != '\\':
+ start = self.scanner.index
+ self.scanner.index += 1
+ while not self.scanner.eof():
+ ch = self.scanner.source[self.scanner.index]
+ if Character.isIdentifierPart(ch) and ch != '\\':
+ self.scanner.index += 1
+ elif ch == '-':
+ # Hyphen (char code 45) can be part of an identifier.
+ self.scanner.index += 1
+ else:
+ break
+
+ id = self.scanner.source[start:self.scanner.index]
+ return RawJSXToken(
+ type=JSXToken.Identifier,
+ value=id,
+ lineNumber=self.scanner.lineNumber,
+ lineStart=self.scanner.lineStart,
+ start=start,
+ end=self.scanner.index
+ )
+
+ return self.scanner.lex()
+
+ def nextJSXToken(self):
+ self.collectComments()
+
+ self.startMarker.index = self.scanner.index
+ self.startMarker.line = self.scanner.lineNumber
+ self.startMarker.column = self.scanner.index - self.scanner.lineStart
+ token = self.lexJSX()
+ self.lastMarker.index = self.scanner.index
+ self.lastMarker.line = self.scanner.lineNumber
+ self.lastMarker.column = self.scanner.index - self.scanner.lineStart
+
+ if self.config.tokens:
+ self.tokens.append(self.convertToken(token))
+
+ return token
+
+ def nextJSXText(self):
+ self.startMarker.index = self.scanner.index
+ self.startMarker.line = self.scanner.lineNumber
+ self.startMarker.column = self.scanner.index - self.scanner.lineStart
+
+ start = self.scanner.index
+
+ text = ''
+ while not self.scanner.eof():
+ ch = self.scanner.source[self.scanner.index]
+ if ch in ('{', '<'):
+ break
+
+ self.scanner.index += 1
+ text += ch
+ if Character.isLineTerminator(ch):
+ self.scanner.lineNumber += 1
+ if ch == '\r' and self.scanner.source[self.scanner.index] == '\n':
+ self.scanner.index += 1
+
+ self.scanner.lineStart = self.scanner.index
+
+ self.lastMarker.index = self.scanner.index
+ self.lastMarker.line = self.scanner.lineNumber
+ self.lastMarker.column = self.scanner.index - self.scanner.lineStart
+
+ token = RawJSXToken(
+ type=JSXToken.Text,
+ value=text,
+ lineNumber=self.scanner.lineNumber,
+ lineStart=self.scanner.lineStart,
+ start=start,
+ end=self.scanner.index
+ )
+
+ if text and self.config.tokens:
+ self.tokens.append(self.convertToken(token))
+
+ return token
+
+ def peekJSXToken(self):
+ state = self.scanner.saveState()
+ self.scanner.scanComments()
+ next = self.lexJSX()
+ self.scanner.restoreState(state)
+
+ return next
+
+ # Expect the next JSX token to match the specified punctuator.
+ # If not, an exception will be thrown.
+
+ def expectJSX(self, value):
+ token = self.nextJSXToken()
+ if token.type is not Token.Punctuator or token.value != value:
+ self.throwUnexpectedToken(token)
+
+ # Return True if the next JSX token matches the specified punctuator.
+
+ def matchJSX(self, *value):
+ next = self.peekJSXToken()
+ return next.type is Token.Punctuator and next.value in value
+
+ def parseJSXIdentifier(self):
+ node = self.createJSXNode()
+ token = self.nextJSXToken()
+ if token.type is not JSXToken.Identifier:
+ self.throwUnexpectedToken(token)
+
+ return self.finalize(node, JSXNode.JSXIdentifier(token.value))
+
+ def parseJSXElementName(self):
+ node = self.createJSXNode()
+ elementName = self.parseJSXIdentifier()
+
+ if self.matchJSX(':'):
+ namespace = elementName
+ self.expectJSX(':')
+ name = self.parseJSXIdentifier()
+ elementName = self.finalize(node, JSXNode.JSXNamespacedName(namespace, name))
+ elif self.matchJSX('.'):
+ while self.matchJSX('.'):
+ object = elementName
+ self.expectJSX('.')
+ property = self.parseJSXIdentifier()
+ elementName = self.finalize(node, JSXNode.JSXMemberExpression(object, property))
+
+ return elementName
+
+ def parseJSXAttributeName(self):
+ node = self.createJSXNode()
+
+ identifier = self.parseJSXIdentifier()
+ if self.matchJSX(':'):
+ namespace = identifier
+ self.expectJSX(':')
+ name = self.parseJSXIdentifier()
+ attributeName = self.finalize(node, JSXNode.JSXNamespacedName(namespace, name))
+ else:
+ attributeName = identifier
+
+ return attributeName
+
+ def parseJSXStringLiteralAttribute(self):
+ node = self.createJSXNode()
+ token = self.nextJSXToken()
+ if token.type is not Token.StringLiteral:
+ self.throwUnexpectedToken(token)
+
+ raw = self.getTokenRaw(token)
+ return self.finalize(node, Node.Literal(token.value, raw))
+
+ def parseJSXExpressionAttribute(self):
+ node = self.createJSXNode()
+
+ self.expectJSX('{')
+ self.finishJSX()
+
+ if self.match('}'):
+ self.tolerateError('JSX attributes must only be assigned a non-empty expression')
+
+ expression = self.parseAssignmentExpression()
+ self.reenterJSX()
+
+ return self.finalize(node, JSXNode.JSXExpressionContainer(expression))
+
+ def parseJSXAttributeValue(self):
+ if self.matchJSX('{'):
+ return self.parseJSXExpressionAttribute()
+ if self.matchJSX('<'):
+ return self.parseJSXElement()
+
+ return self.parseJSXStringLiteralAttribute()
+
+ def parseJSXNameValueAttribute(self):
+ node = self.createJSXNode()
+ name = self.parseJSXAttributeName()
+ value = None
+ if self.matchJSX('='):
+ self.expectJSX('=')
+ value = self.parseJSXAttributeValue()
+
+ return self.finalize(node, JSXNode.JSXAttribute(name, value))
+
+ def parseJSXSpreadAttribute(self):
+ node = self.createJSXNode()
+ self.expectJSX('{')
+ self.expectJSX('...')
+
+ self.finishJSX()
+ argument = self.parseAssignmentExpression()
+ self.reenterJSX()
+
+ return self.finalize(node, JSXNode.JSXSpreadAttribute(argument))
+
+ def parseJSXAttributes(self):
+ attributes = []
+
+ while not self.matchJSX('/', '>'):
+ attribute = self.parseJSXSpreadAttribute() if self.matchJSX('{') else self.parseJSXNameValueAttribute()
+ attributes.append(attribute)
+
+ return attributes
+
+ def parseJSXOpeningElement(self):
+ node = self.createJSXNode()
+
+ self.expectJSX('<')
+ name = self.parseJSXElementName()
+ attributes = self.parseJSXAttributes()
+ selfClosing = self.matchJSX('/')
+ if selfClosing:
+ self.expectJSX('/')
+
+ self.expectJSX('>')
+
+ return self.finalize(node, JSXNode.JSXOpeningElement(name, selfClosing, attributes))
+
+ def parseJSXBoundaryElement(self):
+ node = self.createJSXNode()
+
+ self.expectJSX('<')
+ if self.matchJSX('/'):
+ self.expectJSX('/')
+ elementName = self.parseJSXElementName()
+ self.expectJSX('>')
+ return self.finalize(node, JSXNode.JSXClosingElement(elementName))
+
+ name = self.parseJSXElementName()
+ attributes = self.parseJSXAttributes()
+ selfClosing = self.matchJSX('/')
+ if selfClosing:
+ self.expectJSX('/')
+
+ self.expectJSX('>')
+
+ return self.finalize(node, JSXNode.JSXOpeningElement(name, selfClosing, attributes))
+
+ def parseJSXEmptyExpression(self):
+ node = self.createJSXChildNode()
+ self.collectComments()
+ self.lastMarker.index = self.scanner.index
+ self.lastMarker.line = self.scanner.lineNumber
+ self.lastMarker.column = self.scanner.index - self.scanner.lineStart
+ return self.finalize(node, JSXNode.JSXEmptyExpression())
+
+ def parseJSXExpressionContainer(self):
+ node = self.createJSXNode()
+ self.expectJSX('{')
+
+ if self.matchJSX('}'):
+ expression = self.parseJSXEmptyExpression()
+ self.expectJSX('}')
+ else:
+ self.finishJSX()
+ expression = self.parseAssignmentExpression()
+ self.reenterJSX()
+
+ return self.finalize(node, JSXNode.JSXExpressionContainer(expression))
+
+ def parseJSXChildren(self):
+ children = []
+
+ while not self.scanner.eof():
+ node = self.createJSXChildNode()
+ token = self.nextJSXText()
+ if token.start < token.end:
+ raw = self.getTokenRaw(token)
+ child = self.finalize(node, JSXNode.JSXText(token.value, raw))
+ children.append(child)
+
+ if self.scanner.source[self.scanner.index] == '{':
+ container = self.parseJSXExpressionContainer()
+ children.append(container)
+ else:
+ break
+
+ return children
+
+ def parseComplexJSXElement(self, el):
+ stack = []
+
+ while not self.scanner.eof():
+ el.children.extend(self.parseJSXChildren())
+ node = self.createJSXChildNode()
+ element = self.parseJSXBoundaryElement()
+ if element.type is JSXSyntax.JSXOpeningElement:
+ opening = element
+ if opening.selfClosing:
+ child = self.finalize(node, JSXNode.JSXElement(opening, [], None))
+ el.children.append(child)
+ else:
+ stack.append(el)
+ el = MetaJSXElement(
+ node=node,
+ opening=opening,
+ closing=None,
+ children=[],
+ )
+
+ if element.type is JSXSyntax.JSXClosingElement:
+ el.closing = element
+ open = getQualifiedElementName(el.opening.name)
+ close = getQualifiedElementName(el.closing.name)
+ if open != close:
+ self.tolerateError('Expected corresponding JSX closing tag for %0', open)
+
+ if stack:
+ child = self.finalize(el.node, JSXNode.JSXElement(el.opening, el.children, el.closing))
+ el = stack[-1]
+ el.children.append(child)
+ stack.pop()
+ else:
+ break
+
+ return el
+
+ def parseJSXElement(self):
+ node = self.createJSXNode()
+
+ opening = self.parseJSXOpeningElement()
+ children = []
+ closing = None
+
+ if not opening.selfClosing:
+ el = self.parseComplexJSXElement(MetaJSXElement(
+ node=node,
+ opening=opening,
+ closing=closing,
+ children=children
+ ))
+ children = el.children
+ closing = el.closing
+
+ return self.finalize(node, JSXNode.JSXElement(opening, children, closing))
+
+ def parseJSXRoot(self):
+ # Pop the opening '<' added from the lookahead.
+ if self.config.tokens:
+ self.tokens.pop()
+
+ self.startJSX()
+ element = self.parseJSXElement()
+ self.finishJSX()
+
+ return element
+
+ def isStartOfExpression(self):
+ return super(JSXParser, self).isStartOfExpression() or self.match('<')
diff --git a/third_party/python/esprima/esprima/jsx_syntax.py b/third_party/python/esprima/esprima/jsx_syntax.py
new file mode 100644
index 0000000000..808cc8b027
--- /dev/null
+++ b/third_party/python/esprima/esprima/jsx_syntax.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import unicode_literals
+
+
+class JSXSyntax:
+ JSXAttribute = "JSXAttribute"
+ JSXClosingElement = "JSXClosingElement"
+ JSXElement = "JSXElement"
+ JSXEmptyExpression = "JSXEmptyExpression"
+ JSXExpressionContainer = "JSXExpressionContainer"
+ JSXIdentifier = "JSXIdentifier"
+ JSXMemberExpression = "JSXMemberExpression"
+ JSXNamespacedName = "JSXNamespacedName"
+ JSXOpeningElement = "JSXOpeningElement"
+ JSXSpreadAttribute = "JSXSpreadAttribute"
+ JSXText = "JSXText"
diff --git a/third_party/python/esprima/esprima/messages.py b/third_party/python/esprima/esprima/messages.py
new file mode 100644
index 0000000000..bb6314e1ea
--- /dev/null
+++ b/third_party/python/esprima/esprima/messages.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import unicode_literals
+
+
+# Error messages should be identical to V8.
+class Messages:
+ ObjectPatternAsRestParameter = "Unexpected token {"
+ BadImportCallArity = "Unexpected token"
+ BadGetterArity = "Getter must not have any formal parameters"
+ BadSetterArity = "Setter must have exactly one formal parameter"
+ BadSetterRestParameter = "Setter function argument must not be a rest parameter"
+ ConstructorIsAsync = "Class constructor may not be an async method"
+ ConstructorSpecialMethod = "Class constructor may not be an accessor"
+ DeclarationMissingInitializer = "Missing initializer in %0 declaration"
+ DefaultRestParameter = "Unexpected token ="
+ DefaultRestProperty = "Unexpected token ="
+ DuplicateBinding = "Duplicate binding %0"
+ DuplicateConstructor = "A class may only have one constructor"
+ DuplicateProtoProperty = "Duplicate __proto__ fields are not allowed in object literals"
+ ForInOfLoopInitializer = "%0 loop variable declaration may not have an initializer"
+ GeneratorInLegacyContext = "Generator declarations are not allowed in legacy contexts"
+ IllegalBreak = "Illegal break statement"
+ IllegalContinue = "Illegal continue statement"
+ IllegalExportDeclaration = "Unexpected token"
+ IllegalImportDeclaration = "Unexpected token"
+ IllegalLanguageModeDirective = "Illegal 'use strict' directive in function with non-simple parameter list"
+ IllegalReturn = "Illegal return statement"
+ InvalidEscapedReservedWord = "Keyword must not contain escaped characters"
+ InvalidHexEscapeSequence = "Invalid hexadecimal escape sequence"
+ InvalidLHSInAssignment = "Invalid left-hand side in assignment"
+ InvalidLHSInForIn = "Invalid left-hand side in for-in"
+ InvalidLHSInForLoop = "Invalid left-hand side in for-loop"
+ InvalidModuleSpecifier = "Unexpected token"
+ InvalidRegExp = "Invalid regular expression"
+ LetInLexicalBinding = "let is disallowed as a lexically bound name"
+ MissingFromClause = "Unexpected token"
+ MultipleDefaultsInSwitch = "More than one default clause in switch statement"
+ NewlineAfterThrow = "Illegal newline after throw"
+ NoAsAfterImportNamespace = "Unexpected token"
+ NoCatchOrFinally = "Missing catch or finally after try"
+ ParameterAfterRestParameter = "Rest parameter must be last formal parameter"
+ PropertyAfterRestProperty = "Unexpected token"
+ Redeclaration = "%0 '%1' has already been declared"
+ StaticPrototype = "Classes may not have static property named prototype"
+ StrictCatchVariable = "Catch variable may not be eval or arguments in strict mode"
+ StrictDelete = "Delete of an unqualified identifier in strict mode."
+ StrictFunction = "In strict mode code, functions can only be declared at top level or inside a block"
+ StrictFunctionName = "Function name may not be eval or arguments in strict mode"
+ StrictLHSAssignment = "Assignment to eval or arguments is not allowed in strict mode"
+ StrictLHSPostfix = "Postfix increment/decrement may not have eval or arguments operand in strict mode"
+ StrictLHSPrefix = "Prefix increment/decrement may not have eval or arguments operand in strict mode"
+ StrictModeWith = "Strict mode code may not include a with statement"
+ StrictOctalLiteral = "Octal literals are not allowed in strict mode."
+ StrictParamDupe = "Strict mode function may not have duplicate parameter names"
+ StrictParamName = "Parameter name eval or arguments is not allowed in strict mode"
+ StrictReservedWord = "Use of future reserved word in strict mode"
+ StrictVarName = "Variable name may not be eval or arguments in strict mode"
+ TemplateOctalLiteral = "Octal literals are not allowed in template strings."
+ UnexpectedEOS = "Unexpected end of input"
+ UnexpectedIdentifier = "Unexpected identifier"
+ UnexpectedNumber = "Unexpected number"
+ UnexpectedReserved = "Unexpected reserved word"
+ UnexpectedString = "Unexpected string"
+ UnexpectedTemplate = "Unexpected quasi %0"
+ UnexpectedToken = "Unexpected token %0"
+ UnexpectedTokenIllegal = "Unexpected token ILLEGAL"
+ UnknownLabel = "Undefined label '%0'"
+ UnterminatedRegExp = "Invalid regular expression: missing /"
diff --git a/third_party/python/esprima/esprima/nodes.py b/third_party/python/esprima/esprima/nodes.py
new file mode 100644
index 0000000000..bbbbdb893b
--- /dev/null
+++ b/third_party/python/esprima/esprima/nodes.py
@@ -0,0 +1,620 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import, unicode_literals
+
+from .objects import Object
+from .syntax import Syntax
+from .scanner import RegExp
+
+
+class Node(Object):
+ def __dir__(self):
+ return list(self.__dict__.keys())
+
+ def __iter__(self):
+ return self.__iter__
+
+ def keys(self):
+ return self.__dict__.keys()
+
+ def items(self):
+ return self.__dict__.items()
+
+
+class ArrayExpression(Node):
+ def __init__(self, elements):
+ self.type = Syntax.ArrayExpression
+ self.elements = elements
+
+
+class ArrayPattern(Node):
+ def __init__(self, elements):
+ self.type = Syntax.ArrayPattern
+ self.elements = elements
+
+
+class ArrowFunctionExpression(Node):
+ def __init__(self, params, body, expression):
+ self.type = Syntax.ArrowFunctionExpression
+ self.generator = False
+ self.isAsync = False
+ self.params = params
+ self.body = body
+ self.expression = expression
+
+
+class AssignmentExpression(Node):
+ def __init__(self, operator, left, right):
+ self.type = Syntax.AssignmentExpression
+ self.operator = operator
+ self.left = left
+ self.right = right
+
+
+class AssignmentPattern(Node):
+ def __init__(self, left, right):
+ self.type = Syntax.AssignmentPattern
+ self.left = left
+ self.right = right
+
+
+class AsyncArrowFunctionExpression(Node):
+ def __init__(self, params, body, expression):
+ self.type = Syntax.ArrowFunctionExpression
+ self.generator = False
+ self.isAsync = True
+ self.params = params
+ self.body = body
+ self.expression = expression
+
+
+class AsyncFunctionDeclaration(Node):
+ def __init__(self, id, params, body):
+ self.type = Syntax.FunctionDeclaration
+ self.generator = False
+ self.expression = False
+ self.isAsync = True
+ self.id = id
+ self.params = params
+ self.body = body
+
+
+class AsyncFunctionExpression(Node):
+ def __init__(self, id, params, body):
+ self.type = Syntax.FunctionExpression
+ self.generator = False
+ self.expression = False
+ self.isAsync = True
+ self.id = id
+ self.params = params
+ self.body = body
+
+
+class AwaitExpression(Node):
+ def __init__(self, argument):
+ self.type = Syntax.AwaitExpression
+ self.argument = argument
+
+
+class BinaryExpression(Node):
+ def __init__(self, operator, left, right):
+ self.type = Syntax.LogicalExpression if operator in ('||', '&&') else Syntax.BinaryExpression
+ self.operator = operator
+ self.left = left
+ self.right = right
+
+
+class BlockStatement(Node):
+ def __init__(self, body):
+ self.type = Syntax.BlockStatement
+ self.body = body
+
+
+class BreakStatement(Node):
+ def __init__(self, label):
+ self.type = Syntax.BreakStatement
+ self.label = label
+
+
+class CallExpression(Node):
+ def __init__(self, callee, args):
+ self.type = Syntax.CallExpression
+ self.callee = callee
+ self.arguments = args
+
+
+class CatchClause(Node):
+ def __init__(self, param, body):
+ self.type = Syntax.CatchClause
+ self.param = param
+ self.body = body
+
+
+class ClassBody(Node):
+ def __init__(self, body):
+ self.type = Syntax.ClassBody
+ self.body = body
+
+
+class ClassDeclaration(Node):
+ def __init__(self, id, superClass, body):
+ self.type = Syntax.ClassDeclaration
+ self.id = id
+ self.superClass = superClass
+ self.body = body
+
+
+class ClassExpression(Node):
+ def __init__(self, id, superClass, body):
+ self.type = Syntax.ClassExpression
+ self.id = id
+ self.superClass = superClass
+ self.body = body
+
+
+class ComputedMemberExpression(Node):
+ def __init__(self, object, property):
+ self.type = Syntax.MemberExpression
+ self.computed = True
+ self.object = object
+ self.property = property
+
+
+class ConditionalExpression(Node):
+ def __init__(self, test, consequent, alternate):
+ self.type = Syntax.ConditionalExpression
+ self.test = test
+ self.consequent = consequent
+ self.alternate = alternate
+
+
+class ContinueStatement(Node):
+ def __init__(self, label):
+ self.type = Syntax.ContinueStatement
+ self.label = label
+
+
+class DebuggerStatement(Node):
+ def __init__(self):
+ self.type = Syntax.DebuggerStatement
+
+
+class Directive(Node):
+ def __init__(self, expression, directive):
+ self.type = Syntax.ExpressionStatement
+ self.expression = expression
+ self.directive = directive
+
+
+class DoWhileStatement(Node):
+ def __init__(self, body, test):
+ self.type = Syntax.DoWhileStatement
+ self.body = body
+ self.test = test
+
+
+class EmptyStatement(Node):
+ def __init__(self):
+ self.type = Syntax.EmptyStatement
+
+
+class ExportAllDeclaration(Node):
+ def __init__(self, source):
+ self.type = Syntax.ExportAllDeclaration
+ self.source = source
+
+
+class ExportDefaultDeclaration(Node):
+ def __init__(self, declaration):
+ self.type = Syntax.ExportDefaultDeclaration
+ self.declaration = declaration
+
+
+class ExportNamedDeclaration(Node):
+ def __init__(self, declaration, specifiers, source):
+ self.type = Syntax.ExportNamedDeclaration
+ self.declaration = declaration
+ self.specifiers = specifiers
+ self.source = source
+
+
+class ExportSpecifier(Node):
+ def __init__(self, local, exported):
+ self.type = Syntax.ExportSpecifier
+ self.exported = exported
+ self.local = local
+
+
+class ExportDefaultSpecifier(Node):
+ def __init__(self, local):
+ self.type = Syntax.ExportDefaultSpecifier
+ self.local = local
+
+
+class ExpressionStatement(Node):
+ def __init__(self, expression):
+ self.type = Syntax.ExpressionStatement
+ self.expression = expression
+
+
+class ForInStatement(Node):
+ def __init__(self, left, right, body):
+ self.type = Syntax.ForInStatement
+ self.each = False
+ self.left = left
+ self.right = right
+ self.body = body
+
+
+class ForOfStatement(Node):
+ def __init__(self, left, right, body):
+ self.type = Syntax.ForOfStatement
+ self.left = left
+ self.right = right
+ self.body = body
+
+
+class ForStatement(Node):
+ def __init__(self, init, test, update, body):
+ self.type = Syntax.ForStatement
+ self.init = init
+ self.test = test
+ self.update = update
+ self.body = body
+
+
+class FunctionDeclaration(Node):
+ def __init__(self, id, params, body, generator):
+ self.type = Syntax.FunctionDeclaration
+ self.expression = False
+ self.isAsync = False
+ self.id = id
+ self.params = params
+ self.body = body
+ self.generator = generator
+
+
+class FunctionExpression(Node):
+ def __init__(self, id, params, body, generator):
+ self.type = Syntax.FunctionExpression
+ self.expression = False
+ self.isAsync = False
+ self.id = id
+ self.params = params
+ self.body = body
+ self.generator = generator
+
+
+class Identifier(Node):
+ def __init__(self, name):
+ self.type = Syntax.Identifier
+ self.name = name
+
+
+class IfStatement(Node):
+ def __init__(self, test, consequent, alternate):
+ self.type = Syntax.IfStatement
+ self.test = test
+ self.consequent = consequent
+ self.alternate = alternate
+
+
+class Import(Node):
+ def __init__(self):
+ self.type = Syntax.Import
+
+
+class ImportDeclaration(Node):
+ def __init__(self, specifiers, source):
+ self.type = Syntax.ImportDeclaration
+ self.specifiers = specifiers
+ self.source = source
+
+
+class ImportDefaultSpecifier(Node):
+ def __init__(self, local):
+ self.type = Syntax.ImportDefaultSpecifier
+ self.local = local
+
+
+class ImportNamespaceSpecifier(Node):
+ def __init__(self, local):
+ self.type = Syntax.ImportNamespaceSpecifier
+ self.local = local
+
+
+class ImportSpecifier(Node):
+ def __init__(self, local, imported):
+ self.type = Syntax.ImportSpecifier
+ self.local = local
+ self.imported = imported
+
+
+class LabeledStatement(Node):
+ def __init__(self, label, body):
+ self.type = Syntax.LabeledStatement
+ self.label = label
+ self.body = body
+
+
+class Literal(Node):
+ def __init__(self, value, raw):
+ self.type = Syntax.Literal
+ self.value = value
+ self.raw = raw
+
+
+class MetaProperty(Node):
+ def __init__(self, meta, property):
+ self.type = Syntax.MetaProperty
+ self.meta = meta
+ self.property = property
+
+
+class MethodDefinition(Node):
+ def __init__(self, key, computed, value, kind, isStatic):
+ self.type = Syntax.MethodDefinition
+ self.key = key
+ self.computed = computed
+ self.value = value
+ self.kind = kind
+ self.static = isStatic
+
+
+class FieldDefinition(Node):
+ def __init__(self, key, computed, value, kind, isStatic):
+ self.type = Syntax.FieldDefinition
+ self.key = key
+ self.computed = computed
+ self.value = value
+ self.kind = kind
+ self.static = isStatic
+
+
+class Module(Node):
+ def __init__(self, body):
+ self.type = Syntax.Program
+ self.sourceType = 'module'
+ self.body = body
+
+
+class NewExpression(Node):
+ def __init__(self, callee, args):
+ self.type = Syntax.NewExpression
+ self.callee = callee
+ self.arguments = args
+
+
+class ObjectExpression(Node):
+ def __init__(self, properties):
+ self.type = Syntax.ObjectExpression
+ self.properties = properties
+
+
+class ObjectPattern(Node):
+ def __init__(self, properties):
+ self.type = Syntax.ObjectPattern
+ self.properties = properties
+
+
+class Property(Node):
+ def __init__(self, kind, key, computed, value, method, shorthand):
+ self.type = Syntax.Property
+ self.key = key
+ self.computed = computed
+ self.value = value
+ self.kind = kind
+ self.method = method
+ self.shorthand = shorthand
+
+
+class RegexLiteral(Node):
+ def __init__(self, value, raw, pattern, flags):
+ self.type = Syntax.Literal
+ self.value = value
+ self.raw = raw
+ self.regex = RegExp(
+ pattern=pattern,
+ flags=flags,
+ )
+
+
+class RestElement(Node):
+ def __init__(self, argument):
+ self.type = Syntax.RestElement
+ self.argument = argument
+
+
+class ReturnStatement(Node):
+ def __init__(self, argument):
+ self.type = Syntax.ReturnStatement
+ self.argument = argument
+
+
+class Script(Node):
+ def __init__(self, body):
+ self.type = Syntax.Program
+ self.sourceType = 'script'
+ self.body = body
+
+
+class SequenceExpression(Node):
+ def __init__(self, expressions):
+ self.type = Syntax.SequenceExpression
+ self.expressions = expressions
+
+
+class SpreadElement(Node):
+ def __init__(self, argument):
+ self.type = Syntax.SpreadElement
+ self.argument = argument
+
+
+class StaticMemberExpression(Node):
+ def __init__(self, object, property):
+ self.type = Syntax.MemberExpression
+ self.computed = False
+ self.object = object
+ self.property = property
+
+
+class Super(Node):
+ def __init__(self):
+ self.type = Syntax.Super
+
+
+class SwitchCase(Node):
+ def __init__(self, test, consequent):
+ self.type = Syntax.SwitchCase
+ self.test = test
+ self.consequent = consequent
+
+
+class SwitchStatement(Node):
+ def __init__(self, discriminant, cases):
+ self.type = Syntax.SwitchStatement
+ self.discriminant = discriminant
+ self.cases = cases
+
+
+class TaggedTemplateExpression(Node):
+ def __init__(self, tag, quasi):
+ self.type = Syntax.TaggedTemplateExpression
+ self.tag = tag
+ self.quasi = quasi
+
+
+class TemplateElement(Node):
+ class Value(Object):
+ def __init__(self, raw, cooked):
+ self.raw = raw
+ self.cooked = cooked
+
+ def __init__(self, raw, cooked, tail):
+ self.type = Syntax.TemplateElement
+ self.value = TemplateElement.Value(raw, cooked)
+ self.tail = tail
+
+
+class TemplateLiteral(Node):
+ def __init__(self, quasis, expressions):
+ self.type = Syntax.TemplateLiteral
+ self.quasis = quasis
+ self.expressions = expressions
+
+
+class ThisExpression(Node):
+ def __init__(self):
+ self.type = Syntax.ThisExpression
+
+
+class ThrowStatement(Node):
+ def __init__(self, argument):
+ self.type = Syntax.ThrowStatement
+ self.argument = argument
+
+
+class TryStatement(Node):
+ def __init__(self, block, handler, finalizer):
+ self.type = Syntax.TryStatement
+ self.block = block
+ self.handler = handler
+ self.finalizer = finalizer
+
+
+class UnaryExpression(Node):
+ def __init__(self, operator, argument):
+ self.type = Syntax.UnaryExpression
+ self.prefix = True
+ self.operator = operator
+ self.argument = argument
+
+
+class UpdateExpression(Node):
+ def __init__(self, operator, argument, prefix):
+ self.type = Syntax.UpdateExpression
+ self.operator = operator
+ self.argument = argument
+ self.prefix = prefix
+
+
+class VariableDeclaration(Node):
+ def __init__(self, declarations, kind):
+ self.type = Syntax.VariableDeclaration
+ self.declarations = declarations
+ self.kind = kind
+
+
+class VariableDeclarator(Node):
+ def __init__(self, id, init):
+ self.type = Syntax.VariableDeclarator
+ self.id = id
+ self.init = init
+
+
+class WhileStatement(Node):
+ def __init__(self, test, body):
+ self.type = Syntax.WhileStatement
+ self.test = test
+ self.body = body
+
+
+class WithStatement(Node):
+ def __init__(self, object, body):
+ self.type = Syntax.WithStatement
+ self.object = object
+ self.body = body
+
+
+class YieldExpression(Node):
+ def __init__(self, argument, delegate):
+ self.type = Syntax.YieldExpression
+ self.argument = argument
+ self.delegate = delegate
+
+
+class ArrowParameterPlaceHolder(Node):
+ def __init__(self, params):
+ self.type = Syntax.ArrowParameterPlaceHolder
+ self.params = params
+ self.isAsync = False
+
+
+class AsyncArrowParameterPlaceHolder(Node):
+ def __init__(self, params):
+ self.type = Syntax.ArrowParameterPlaceHolder
+ self.params = params
+ self.isAsync = True
+
+
+class BlockComment(Node):
+ def __init__(self, value):
+ self.type = Syntax.BlockComment
+ self.value = value
+
+
+class LineComment(Node):
+ def __init__(self, value):
+ self.type = Syntax.LineComment
+ self.value = value
diff --git a/third_party/python/esprima/esprima/objects.py b/third_party/python/esprima/esprima/objects.py
new file mode 100644
index 0000000000..a8acca1b63
--- /dev/null
+++ b/third_party/python/esprima/esprima/objects.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import, unicode_literals
+
+
+def toDict(value):
+ from .visitor import ToDictVisitor
+ return ToDictVisitor().visit(value)
+
+
+class Array(list):
+ pass
+
+
+class Object(object):
+ def toDict(self):
+ from .visitor import ToDictVisitor
+ return ToDictVisitor().visit(self)
+
+ def __repr__(self):
+ from .visitor import ReprVisitor
+ return ReprVisitor().visit(self)
+
+ def __getattr__(self, name):
+ return None
diff --git a/third_party/python/esprima/esprima/parser.py b/third_party/python/esprima/esprima/parser.py
new file mode 100644
index 0000000000..2309e7b6fb
--- /dev/null
+++ b/third_party/python/esprima/esprima/parser.py
@@ -0,0 +1,3104 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import, unicode_literals
+
+from .objects import Object
+from .compat import basestring, unicode
+from .utils import format
+from .error_handler import ErrorHandler
+from .messages import Messages
+from .scanner import RawToken, Scanner, SourceLocation, Position, RegExp
+from .token import Token, TokenName
+from .syntax import Syntax
+from . import nodes as Node
+
+
+class Value(object):
+ def __init__(self, value):
+ self.value = value
+
+
+class Params(object):
+ def __init__(self, simple=None, message=None, stricted=None, firstRestricted=None, inFor=None, paramSet=None, params=None, get=None):
+ self.simple = simple
+ self.message = message
+ self.stricted = stricted
+ self.firstRestricted = firstRestricted
+ self.inFor = inFor
+ self.paramSet = paramSet
+ self.params = params
+ self.get = get
+
+
+class Config(Object):
+ def __init__(self, range=False, loc=False, source=None, tokens=False, comment=False, tolerant=False, **options):
+ self.range = range
+ self.loc = loc
+ self.source = source
+ self.tokens = tokens
+ self.comment = comment
+ self.tolerant = tolerant
+ for k, v in options.items():
+ setattr(self, k, v)
+
+
+class Context(object):
+ def __init__(self, isModule=False, allowAwait=False, allowIn=True, allowStrictDirective=True, allowYield=True, firstCoverInitializedNameError=None, isAssignmentTarget=False, isBindingElement=False, inFunctionBody=False, inIteration=False, inSwitch=False, labelSet=None, strict=False):
+ self.isModule = isModule
+ self.allowAwait = allowAwait
+ self.allowIn = allowIn
+ self.allowStrictDirective = allowStrictDirective
+ self.allowYield = allowYield
+ self.firstCoverInitializedNameError = firstCoverInitializedNameError
+ self.isAssignmentTarget = isAssignmentTarget
+ self.isBindingElement = isBindingElement
+ self.inFunctionBody = inFunctionBody
+ self.inIteration = inIteration
+ self.inSwitch = inSwitch
+ self.labelSet = {} if labelSet is None else labelSet
+ self.strict = strict
+
+
+class Marker(object):
+ def __init__(self, index=None, line=None, column=None):
+ self.index = index
+ self.line = line
+ self.column = column
+
+
+class TokenEntry(Object):
+ def __init__(self, type=None, value=None, regex=None, range=None, loc=None):
+ self.type = type
+ self.value = value
+ self.regex = regex
+ self.range = range
+ self.loc = loc
+
+
+class Parser(object):
+ def __init__(self, code, options={}, delegate=None):
+ self.config = Config(**options)
+
+ self.delegate = delegate
+
+ self.errorHandler = ErrorHandler()
+ self.errorHandler.tolerant = self.config.tolerant
+ self.scanner = Scanner(code, self.errorHandler)
+ self.scanner.trackComment = self.config.comment
+
+ self.operatorPrecedence = {
+ '||': 1,
+ '&&': 2,
+ '|': 3,
+ '^': 4,
+ '&': 5,
+ '==': 6,
+ '!=': 6,
+ '===': 6,
+ '!==': 6,
+ '<': 7,
+ '>': 7,
+ '<=': 7,
+ '>=': 7,
+ 'instanceof': 7,
+ 'in': 7,
+ '<<': 8,
+ '>>': 8,
+ '>>>': 8,
+ '+': 9,
+ '-': 9,
+ '*': 11,
+ '/': 11,
+ '%': 11,
+ }
+
+ self.lookahead = RawToken(
+ type=Token.EOF,
+ value='',
+ lineNumber=self.scanner.lineNumber,
+ lineStart=0,
+ start=0,
+ end=0
+ )
+ self.hasLineTerminator = False
+
+ self.context = Context(
+ isModule=False,
+ allowAwait=False,
+ allowIn=True,
+ allowStrictDirective=True,
+ allowYield=True,
+ firstCoverInitializedNameError=None,
+ isAssignmentTarget=False,
+ isBindingElement=False,
+ inFunctionBody=False,
+ inIteration=False,
+ inSwitch=False,
+ labelSet={},
+ strict=False
+ )
+ self.tokens = []
+
+ self.startMarker = Marker(
+ index=0,
+ line=self.scanner.lineNumber,
+ column=0
+ )
+ self.lastMarker = Marker(
+ index=0,
+ line=self.scanner.lineNumber,
+ column=0
+ )
+ self.nextToken()
+ self.lastMarker = Marker(
+ index=self.scanner.index,
+ line=self.scanner.lineNumber,
+ column=self.scanner.index - self.scanner.lineStart
+ )
+
+ def throwError(self, messageFormat, *args):
+ msg = format(messageFormat, *args)
+ index = self.lastMarker.index
+ line = self.lastMarker.line
+ column = self.lastMarker.column + 1
+ raise self.errorHandler.createError(index, line, column, msg)
+
+ def tolerateError(self, messageFormat, *args):
+ msg = format(messageFormat, *args)
+ index = self.lastMarker.index
+ line = self.scanner.lineNumber
+ column = self.lastMarker.column + 1
+ self.errorHandler.tolerateError(index, line, column, msg)
+
+ # Throw an exception because of the token.
+
+ def unexpectedTokenError(self, token=None, message=None):
+ msg = message or Messages.UnexpectedToken
+ if token:
+ if not message:
+ typ = token.type
+ if typ is Token.EOF:
+ msg = Messages.UnexpectedEOS
+ elif typ is Token.Identifier:
+ msg = Messages.UnexpectedIdentifier
+ elif typ is Token.NumericLiteral:
+ msg = Messages.UnexpectedNumber
+ elif typ is Token.StringLiteral:
+ msg = Messages.UnexpectedString
+ elif typ is Token.Template:
+ msg = Messages.UnexpectedTemplate
+ elif typ is Token.Keyword:
+ if self.scanner.isFutureReservedWord(token.value):
+ msg = Messages.UnexpectedReserved
+ elif self.context.strict and self.scanner.isStrictModeReservedWord(token.value):
+ msg = Messages.StrictReservedWord
+ else:
+ msg = Messages.UnexpectedToken
+ value = token.value
+ else:
+ value = 'ILLEGAL'
+
+ msg = msg.replace('%0', unicode(value), 1)
+
+ if token and isinstance(token.lineNumber, int):
+ index = token.start
+ line = token.lineNumber
+ lastMarkerLineStart = self.lastMarker.index - self.lastMarker.column
+ column = token.start - lastMarkerLineStart + 1
+ return self.errorHandler.createError(index, line, column, msg)
+ else:
+ index = self.lastMarker.index
+ line = self.lastMarker.line
+ column = self.lastMarker.column + 1
+ return self.errorHandler.createError(index, line, column, msg)
+
+ def throwUnexpectedToken(self, token=None, message=None):
+ raise self.unexpectedTokenError(token, message)
+
+ def tolerateUnexpectedToken(self, token=None, message=None):
+ self.errorHandler.tolerate(self.unexpectedTokenError(token, message))
+
+ def collectComments(self):
+ if not self.config.comment:
+ self.scanner.scanComments()
+ else:
+ comments = self.scanner.scanComments()
+ if comments:
+ for e in comments:
+ if e.multiLine:
+ node = Node.BlockComment(self.scanner.source[e.slice[0]:e.slice[1]])
+ else:
+ node = Node.LineComment(self.scanner.source[e.slice[0]:e.slice[1]])
+ if self.config.range:
+ node.range = e.range
+ if self.config.loc:
+ node.loc = e.loc
+ if self.delegate:
+ metadata = SourceLocation(
+ start=Position(
+ line=e.loc.start.line,
+ column=e.loc.start.column,
+ offset=e.range[0],
+ ),
+ end=Position(
+ line=e.loc.end.line,
+ column=e.loc.end.column,
+ offset=e.range[1],
+ )
+ )
+ new_node = self.delegate(node, metadata)
+ if new_node is not None:
+ node = new_node
+
+ # From internal representation to an external structure
+
+ def getTokenRaw(self, token):
+ return self.scanner.source[token.start:token.end]
+
+ def convertToken(self, token):
+ t = TokenEntry(
+ type=TokenName[token.type],
+ value=self.getTokenRaw(token),
+ )
+ if self.config.range:
+ t.range = [token.start, token.end]
+ if self.config.loc:
+ t.loc = SourceLocation(
+ start=Position(
+ line=self.startMarker.line,
+ column=self.startMarker.column,
+ ),
+ end=Position(
+ line=self.scanner.lineNumber,
+ column=self.scanner.index - self.scanner.lineStart,
+ ),
+ )
+ if token.type is Token.RegularExpression:
+ t.regex = RegExp(
+ pattern=token.pattern,
+ flags=token.flags,
+ )
+
+ return t
+
+ def nextToken(self):
+ token = self.lookahead
+
+ self.lastMarker.index = self.scanner.index
+ self.lastMarker.line = self.scanner.lineNumber
+ self.lastMarker.column = self.scanner.index - self.scanner.lineStart
+
+ self.collectComments()
+
+ if self.scanner.index != self.startMarker.index:
+ self.startMarker.index = self.scanner.index
+ self.startMarker.line = self.scanner.lineNumber
+ self.startMarker.column = self.scanner.index - self.scanner.lineStart
+
+ next = self.scanner.lex()
+ self.hasLineTerminator = token.lineNumber != next.lineNumber
+
+ if next and self.context.strict and next.type is Token.Identifier:
+ if self.scanner.isStrictModeReservedWord(next.value):
+ next.type = Token.Keyword
+ self.lookahead = next
+
+ if self.config.tokens and next.type is not Token.EOF:
+ self.tokens.append(self.convertToken(next))
+
+ return token
+
+ def nextRegexToken(self):
+ self.collectComments()
+
+ token = self.scanner.scanRegExp()
+ if self.config.tokens:
+ # Pop the previous token, '/' or '/='
+ # self is added from the lookahead token.
+ self.tokens.pop()
+
+ self.tokens.append(self.convertToken(token))
+
+ # Prime the next lookahead.
+ self.lookahead = token
+ self.nextToken()
+
+ return token
+
+ def createNode(self):
+ return Marker(
+ index=self.startMarker.index,
+ line=self.startMarker.line,
+ column=self.startMarker.column,
+ )
+
+ def startNode(self, token, lastLineStart=0):
+ column = token.start - token.lineStart
+ line = token.lineNumber
+ if column < 0:
+ column += lastLineStart
+ line -= 1
+
+ return Marker(
+ index=token.start,
+ line=line,
+ column=column,
+ )
+
+ def finalize(self, marker, node):
+ if self.config.range:
+ node.range = [marker.index, self.lastMarker.index]
+
+ if self.config.loc:
+ node.loc = SourceLocation(
+ start=Position(
+ line=marker.line,
+ column=marker.column,
+ ),
+ end=Position(
+ line=self.lastMarker.line,
+ column=self.lastMarker.column,
+ ),
+ )
+ if self.config.source:
+ node.loc.source = self.config.source
+
+ if self.delegate:
+ metadata = SourceLocation(
+ start=Position(
+ line=marker.line,
+ column=marker.column,
+ offset=marker.index,
+ ),
+ end=Position(
+ line=self.lastMarker.line,
+ column=self.lastMarker.column,
+ offset=self.lastMarker.index,
+ )
+ )
+ new_node = self.delegate(node, metadata)
+ if new_node is not None:
+ node = new_node
+
+ return node
+
+ # Expect the next token to match the specified punctuator.
+ # If not, an exception will be thrown.
+
+ def expect(self, value):
+ token = self.nextToken()
+ if token.type is not Token.Punctuator or token.value != value:
+ self.throwUnexpectedToken(token)
+
+ # Quietly expect a comma when in tolerant mode, otherwise delegates to expect().
+
+ def expectCommaSeparator(self):
+ if self.config.tolerant:
+ token = self.lookahead
+ if token.type is Token.Punctuator and token.value == ',':
+ self.nextToken()
+ elif token.type is Token.Punctuator and token.value == ';':
+ self.nextToken()
+ self.tolerateUnexpectedToken(token)
+ else:
+ self.tolerateUnexpectedToken(token, Messages.UnexpectedToken)
+ else:
+ self.expect(',')
+
+ # Expect the next token to match the specified keyword.
+ # If not, an exception will be thrown.
+
+ def expectKeyword(self, keyword):
+ token = self.nextToken()
+ if token.type is not Token.Keyword or token.value != keyword:
+ self.throwUnexpectedToken(token)
+
+ # Return true if the next token matches the specified punctuator.
+
+ def match(self, *value):
+ return self.lookahead.type is Token.Punctuator and self.lookahead.value in value
+
+ # Return true if the next token matches the specified keyword
+
+ def matchKeyword(self, *keyword):
+ return self.lookahead.type is Token.Keyword and self.lookahead.value in keyword
+
+ # Return true if the next token matches the specified contextual keyword
+ # (where an identifier is sometimes a keyword depending on the context)
+
+ def matchContextualKeyword(self, *keyword):
+ return self.lookahead.type is Token.Identifier and self.lookahead.value in keyword
+
+ # Return true if the next token is an assignment operator
+
+ def matchAssign(self):
+ if self.lookahead.type is not Token.Punctuator:
+ return False
+
+ op = self.lookahead.value
+ return op in ('=', '*=', '**=', '/=', '%=', '+=', '-=', '<<=', '>>=', '>>>=', '&=', '^=', '|=')
+
+ # Cover grammar support.
+ #
+ # When an assignment expression position starts with an left parenthesis, the determination of the type
+ # of the syntax is to be deferred arbitrarily long until the end of the parentheses pair (plus a lookahead)
+ # or the first comma. This situation also defers the determination of all the expressions nested in the pair.
+ #
+ # There are three productions that can be parsed in a parentheses pair that needs to be determined
+ # after the outermost pair is closed. They are:
+ #
+ # 1. AssignmentExpression
+ # 2. BindingElements
+ # 3. AssignmentTargets
+ #
+ # In order to avoid exponential backtracking, we use two flags to denote if the production can be
+ # binding element or assignment target.
+ #
+ # The three productions have the relationship:
+ #
+ # BindingElements ⊆ AssignmentTargets ⊆ AssignmentExpression
+ #
+ # with a single exception that CoverInitializedName when used directly in an Expression, generates
+ # an early error. Therefore, we need the third state, firstCoverInitializedNameError, to track the
+ # first usage of CoverInitializedName and report it when we reached the end of the parentheses pair.
+ #
+ # isolateCoverGrammar function runs the given parser function with a new cover grammar context, and it does not
+ # effect the current flags. This means the production the parser parses is only used as an expression. Therefore
+ # the CoverInitializedName check is conducted.
+ #
+ # inheritCoverGrammar function runs the given parse function with a new cover grammar context, and it propagates
+ # the flags outside of the parser. This means the production the parser parses is used as a part of a potential
+ # pattern. The CoverInitializedName check is deferred.
+
+ def isolateCoverGrammar(self, parseFunction):
+ previousIsBindingElement = self.context.isBindingElement
+ previousIsAssignmentTarget = self.context.isAssignmentTarget
+ previousFirstCoverInitializedNameError = self.context.firstCoverInitializedNameError
+
+ self.context.isBindingElement = True
+ self.context.isAssignmentTarget = True
+ self.context.firstCoverInitializedNameError = None
+
+ result = parseFunction()
+ if self.context.firstCoverInitializedNameError is not None:
+ self.throwUnexpectedToken(self.context.firstCoverInitializedNameError)
+
+ self.context.isBindingElement = previousIsBindingElement
+ self.context.isAssignmentTarget = previousIsAssignmentTarget
+ self.context.firstCoverInitializedNameError = previousFirstCoverInitializedNameError
+
+ return result
+
+ def inheritCoverGrammar(self, parseFunction):
+ previousIsBindingElement = self.context.isBindingElement
+ previousIsAssignmentTarget = self.context.isAssignmentTarget
+ previousFirstCoverInitializedNameError = self.context.firstCoverInitializedNameError
+
+ self.context.isBindingElement = True
+ self.context.isAssignmentTarget = True
+ self.context.firstCoverInitializedNameError = None
+
+ result = parseFunction()
+
+ self.context.isBindingElement = self.context.isBindingElement and previousIsBindingElement
+ self.context.isAssignmentTarget = self.context.isAssignmentTarget and previousIsAssignmentTarget
+ self.context.firstCoverInitializedNameError = previousFirstCoverInitializedNameError or self.context.firstCoverInitializedNameError
+
+ return result
+
+ def consumeSemicolon(self):
+ if self.match(';'):
+ self.nextToken()
+ elif not self.hasLineTerminator:
+ if self.lookahead.type is not Token.EOF and not self.match('}'):
+ self.throwUnexpectedToken(self.lookahead)
+ self.lastMarker.index = self.startMarker.index
+ self.lastMarker.line = self.startMarker.line
+ self.lastMarker.column = self.startMarker.column
+
+ # https://tc39.github.io/ecma262/#sec-primary-expression
+
+ def parsePrimaryExpression(self):
+ node = self.createNode()
+
+ typ = self.lookahead.type
+ if typ is Token.Identifier:
+ if (self.context.isModule or self.context.allowAwait) and self.lookahead.value == 'await':
+ self.tolerateUnexpectedToken(self.lookahead)
+ expr = self.parseFunctionExpression() if self.matchAsyncFunction() else self.finalize(node, Node.Identifier(self.nextToken().value))
+
+ elif typ in (
+ Token.NumericLiteral,
+ Token.StringLiteral,
+ ):
+ if self.context.strict and self.lookahead.octal:
+ self.tolerateUnexpectedToken(self.lookahead, Messages.StrictOctalLiteral)
+ self.context.isAssignmentTarget = False
+ self.context.isBindingElement = False
+ token = self.nextToken()
+ raw = self.getTokenRaw(token)
+ expr = self.finalize(node, Node.Literal(token.value, raw))
+
+ elif typ is Token.BooleanLiteral:
+ self.context.isAssignmentTarget = False
+ self.context.isBindingElement = False
+ token = self.nextToken()
+ raw = self.getTokenRaw(token)
+ expr = self.finalize(node, Node.Literal(token.value == 'true', raw))
+
+ elif typ is Token.NullLiteral:
+ self.context.isAssignmentTarget = False
+ self.context.isBindingElement = False
+ token = self.nextToken()
+ raw = self.getTokenRaw(token)
+ expr = self.finalize(node, Node.Literal(None, raw))
+
+ elif typ is Token.Template:
+ expr = self.parseTemplateLiteral()
+
+ elif typ is Token.Punctuator:
+ value = self.lookahead.value
+ if value == '(':
+ self.context.isBindingElement = False
+ expr = self.inheritCoverGrammar(self.parseGroupExpression)
+ elif value == '[':
+ expr = self.inheritCoverGrammar(self.parseArrayInitializer)
+ elif value == '{':
+ expr = self.inheritCoverGrammar(self.parseObjectInitializer)
+ elif value in ('/', '/='):
+ self.context.isAssignmentTarget = False
+ self.context.isBindingElement = False
+ self.scanner.index = self.startMarker.index
+ token = self.nextRegexToken()
+ raw = self.getTokenRaw(token)
+ expr = self.finalize(node, Node.RegexLiteral(token.regex, raw, token.pattern, token.flags))
+ else:
+ expr = self.throwUnexpectedToken(self.nextToken())
+
+ elif typ is Token.Keyword:
+ if not self.context.strict and self.context.allowYield and self.matchKeyword('yield'):
+ expr = self.parseIdentifierName()
+ elif not self.context.strict and self.matchKeyword('let'):
+ expr = self.finalize(node, Node.Identifier(self.nextToken().value))
+ else:
+ self.context.isAssignmentTarget = False
+ self.context.isBindingElement = False
+ if self.matchKeyword('function'):
+ expr = self.parseFunctionExpression()
+ elif self.matchKeyword('this'):
+ self.nextToken()
+ expr = self.finalize(node, Node.ThisExpression())
+ elif self.matchKeyword('class'):
+ expr = self.parseClassExpression()
+ elif self.matchImportCall():
+ expr = self.parseImportCall()
+ else:
+ expr = self.throwUnexpectedToken(self.nextToken())
+
+ else:
+ expr = self.throwUnexpectedToken(self.nextToken())
+
+ return expr
+
+ # https://tc39.github.io/ecma262/#sec-array-initializer
+
+ def parseSpreadElement(self):
+ node = self.createNode()
+ self.expect('...')
+ arg = self.inheritCoverGrammar(self.parseAssignmentExpression)
+ return self.finalize(node, Node.SpreadElement(arg))
+
+ def parseArrayInitializer(self):
+ node = self.createNode()
+ elements = []
+
+ self.expect('[')
+ while not self.match(']'):
+ if self.match(','):
+ self.nextToken()
+ elements.append(None)
+ elif self.match('...'):
+ element = self.parseSpreadElement()
+ if not self.match(']'):
+ self.context.isAssignmentTarget = False
+ self.context.isBindingElement = False
+ self.expect(',')
+ elements.append(element)
+ else:
+ elements.append(self.inheritCoverGrammar(self.parseAssignmentExpression))
+ if not self.match(']'):
+ self.expect(',')
+ self.expect(']')
+
+ return self.finalize(node, Node.ArrayExpression(elements))
+
+ # https://tc39.github.io/ecma262/#sec-object-initializer
+
+ def parsePropertyMethod(self, params):
+ self.context.isAssignmentTarget = False
+ self.context.isBindingElement = False
+
+ previousStrict = self.context.strict
+ previousAllowStrictDirective = self.context.allowStrictDirective
+ self.context.allowStrictDirective = params.simple
+ body = self.isolateCoverGrammar(self.parseFunctionSourceElements)
+ if self.context.strict and params.firstRestricted:
+ self.tolerateUnexpectedToken(params.firstRestricted, params.message)
+ if self.context.strict and params.stricted:
+ self.tolerateUnexpectedToken(params.stricted, params.message)
+ self.context.strict = previousStrict
+ self.context.allowStrictDirective = previousAllowStrictDirective
+
+ return body
+
+ def parsePropertyMethodFunction(self):
+ isGenerator = False
+ node = self.createNode()
+
+ previousAllowYield = self.context.allowYield
+ self.context.allowYield = True
+ params = self.parseFormalParameters()
+ method = self.parsePropertyMethod(params)
+ self.context.allowYield = previousAllowYield
+
+ return self.finalize(node, Node.FunctionExpression(None, params.params, method, isGenerator))
+
+ def parsePropertyMethodAsyncFunction(self):
+ node = self.createNode()
+
+ previousAllowYield = self.context.allowYield
+ previousAwait = self.context.allowAwait
+ self.context.allowYield = False
+ self.context.allowAwait = True
+ params = self.parseFormalParameters()
+ method = self.parsePropertyMethod(params)
+ self.context.allowYield = previousAllowYield
+ self.context.allowAwait = previousAwait
+
+ return self.finalize(node, Node.AsyncFunctionExpression(None, params.params, method))
+
+ def parseObjectPropertyKey(self):
+ node = self.createNode()
+ token = self.nextToken()
+
+ typ = token.type
+ if typ in (
+ Token.StringLiteral,
+ Token.NumericLiteral,
+ ):
+ if self.context.strict and token.octal:
+ self.tolerateUnexpectedToken(token, Messages.StrictOctalLiteral)
+ raw = self.getTokenRaw(token)
+ key = self.finalize(node, Node.Literal(token.value, raw))
+
+ elif typ in (
+ Token.Identifier,
+ Token.BooleanLiteral,
+ Token.NullLiteral,
+ Token.Keyword,
+ ):
+ key = self.finalize(node, Node.Identifier(token.value))
+
+ elif typ is Token.Punctuator:
+ if token.value == '[':
+ key = self.isolateCoverGrammar(self.parseAssignmentExpression)
+ self.expect(']')
+ else:
+ key = self.throwUnexpectedToken(token)
+
+ else:
+ key = self.throwUnexpectedToken(token)
+
+ return key
+
+ def isPropertyKey(self, key, value):
+ return (
+ (key.type is Syntax.Identifier and key.name == value) or
+ (key.type is Syntax.Literal and key.value == value)
+ )
+
+ def parseObjectProperty(self, hasProto):
+ node = self.createNode()
+ token = self.lookahead
+
+ key = None
+ value = None
+
+ computed = False
+ method = False
+ shorthand = False
+ isAsync = False
+
+ if token.type is Token.Identifier:
+ id = token.value
+ self.nextToken()
+ computed = self.match('[')
+ isAsync = not self.hasLineTerminator and (id == 'async') and not (self.match(':', '(', '*', ','))
+ key = self.parseObjectPropertyKey() if isAsync else self.finalize(node, Node.Identifier(id))
+ elif self.match('*'):
+ self.nextToken()
+ else:
+ computed = self.match('[')
+ key = self.parseObjectPropertyKey()
+
+ lookaheadPropertyKey = self.qualifiedPropertyName(self.lookahead)
+ if token.type is Token.Identifier and not isAsync and token.value == 'get' and lookaheadPropertyKey:
+ kind = 'get'
+ computed = self.match('[')
+ key = self.parseObjectPropertyKey()
+ self.context.allowYield = False
+ value = self.parseGetterMethod()
+
+ elif token.type is Token.Identifier and not isAsync and token.value == 'set' and lookaheadPropertyKey:
+ kind = 'set'
+ computed = self.match('[')
+ key = self.parseObjectPropertyKey()
+ value = self.parseSetterMethod()
+
+ elif token.type is Token.Punctuator and token.value == '*' and lookaheadPropertyKey:
+ kind = 'init'
+ computed = self.match('[')
+ key = self.parseObjectPropertyKey()
+ value = self.parseGeneratorMethod()
+ method = True
+
+ else:
+ if not key:
+ self.throwUnexpectedToken(self.lookahead)
+
+ kind = 'init'
+ if self.match(':') and not isAsync:
+ if not computed and self.isPropertyKey(key, '__proto__'):
+ if hasProto.value:
+ self.tolerateError(Messages.DuplicateProtoProperty)
+ hasProto.value = True
+ self.nextToken()
+ value = self.inheritCoverGrammar(self.parseAssignmentExpression)
+
+ elif self.match('('):
+ value = self.parsePropertyMethodAsyncFunction() if isAsync else self.parsePropertyMethodFunction()
+ method = True
+
+ elif token.type is Token.Identifier:
+ id = self.finalize(node, Node.Identifier(token.value))
+ if self.match('='):
+ self.context.firstCoverInitializedNameError = self.lookahead
+ self.nextToken()
+ shorthand = True
+ init = self.isolateCoverGrammar(self.parseAssignmentExpression)
+ value = self.finalize(node, Node.AssignmentPattern(id, init))
+ else:
+ shorthand = True
+ value = id
+ else:
+ self.throwUnexpectedToken(self.nextToken())
+
+ return self.finalize(node, Node.Property(kind, key, computed, value, method, shorthand))
+
+ def parseObjectInitializer(self):
+ node = self.createNode()
+
+ self.expect('{')
+ properties = []
+ hasProto = Value(False)
+ while not self.match('}'):
+ properties.append(self.parseSpreadElement() if self.match('...') else self.parseObjectProperty(hasProto))
+ if not self.match('}'):
+ self.expectCommaSeparator()
+ self.expect('}')
+
+ return self.finalize(node, Node.ObjectExpression(properties))
+
+ # https://tc39.github.io/ecma262/#sec-template-literals
+
+ def parseTemplateHead(self):
+ assert self.lookahead.head, 'Template literal must start with a template head'
+
+ node = self.createNode()
+ token = self.nextToken()
+ raw = token.value
+ cooked = token.cooked
+
+ return self.finalize(node, Node.TemplateElement(raw, cooked, token.tail))
+
+ def parseTemplateElement(self):
+ if self.lookahead.type is not Token.Template:
+ self.throwUnexpectedToken()
+
+ node = self.createNode()
+ token = self.nextToken()
+ raw = token.value
+ cooked = token.cooked
+
+ return self.finalize(node, Node.TemplateElement(raw, cooked, token.tail))
+
+ def parseTemplateLiteral(self):
+ node = self.createNode()
+
+ expressions = []
+ quasis = []
+
+ quasi = self.parseTemplateHead()
+ quasis.append(quasi)
+ while not quasi.tail:
+ expressions.append(self.parseExpression())
+ quasi = self.parseTemplateElement()
+ quasis.append(quasi)
+
+ return self.finalize(node, Node.TemplateLiteral(quasis, expressions))
+
+ # https://tc39.github.io/ecma262/#sec-grouping-operator
+
+ def reinterpretExpressionAsPattern(self, expr):
+ typ = expr.type
+ if typ in (
+ Syntax.Identifier,
+ Syntax.MemberExpression,
+ Syntax.RestElement,
+ Syntax.AssignmentPattern,
+ ):
+ pass
+ elif typ is Syntax.SpreadElement:
+ expr.type = Syntax.RestElement
+ self.reinterpretExpressionAsPattern(expr.argument)
+ elif typ is Syntax.ArrayExpression:
+ expr.type = Syntax.ArrayPattern
+ for elem in expr.elements:
+ if elem is not None:
+ self.reinterpretExpressionAsPattern(elem)
+ elif typ is Syntax.ObjectExpression:
+ expr.type = Syntax.ObjectPattern
+ for prop in expr.properties:
+ self.reinterpretExpressionAsPattern(prop if prop.type is Syntax.SpreadElement else prop.value)
+ elif typ is Syntax.AssignmentExpression:
+ expr.type = Syntax.AssignmentPattern
+ del expr.operator
+ self.reinterpretExpressionAsPattern(expr.left)
+ else:
+ # Allow other node type for tolerant parsing.
+ pass
+
+ def parseGroupExpression(self):
+ self.expect('(')
+ if self.match(')'):
+ self.nextToken()
+ if not self.match('=>'):
+ self.expect('=>')
+ expr = Node.ArrowParameterPlaceHolder([])
+ else:
+ startToken = self.lookahead
+ params = []
+ if self.match('...'):
+ expr = self.parseRestElement(params)
+ self.expect(')')
+ if not self.match('=>'):
+ self.expect('=>')
+ expr = Node.ArrowParameterPlaceHolder([expr])
+ else:
+ arrow = False
+ self.context.isBindingElement = True
+ expr = self.inheritCoverGrammar(self.parseAssignmentExpression)
+
+ if self.match(','):
+ expressions = []
+
+ self.context.isAssignmentTarget = False
+ expressions.append(expr)
+ while self.lookahead.type is not Token.EOF:
+ if not self.match(','):
+ break
+ self.nextToken()
+ if self.match(')'):
+ self.nextToken()
+ for expression in expressions:
+ self.reinterpretExpressionAsPattern(expression)
+ arrow = True
+ expr = Node.ArrowParameterPlaceHolder(expressions)
+ elif self.match('...'):
+ if not self.context.isBindingElement:
+ self.throwUnexpectedToken(self.lookahead)
+ expressions.append(self.parseRestElement(params))
+ self.expect(')')
+ if not self.match('=>'):
+ self.expect('=>')
+ self.context.isBindingElement = False
+ for expression in expressions:
+ self.reinterpretExpressionAsPattern(expression)
+ arrow = True
+ expr = Node.ArrowParameterPlaceHolder(expressions)
+ else:
+ expressions.append(self.inheritCoverGrammar(self.parseAssignmentExpression))
+ if arrow:
+ break
+ if not arrow:
+ expr = self.finalize(self.startNode(startToken), Node.SequenceExpression(expressions))
+
+ if not arrow:
+ self.expect(')')
+ if self.match('=>'):
+ if expr.type is Syntax.Identifier and expr.name == 'yield':
+ arrow = True
+ expr = Node.ArrowParameterPlaceHolder([expr])
+ if not arrow:
+ if not self.context.isBindingElement:
+ self.throwUnexpectedToken(self.lookahead)
+
+ if expr.type is Syntax.SequenceExpression:
+ for expression in expr.expressions:
+ self.reinterpretExpressionAsPattern(expression)
+ else:
+ self.reinterpretExpressionAsPattern(expr)
+
+ if expr.type is Syntax.SequenceExpression:
+ parameters = expr.expressions
+ else:
+ parameters = [expr]
+ expr = Node.ArrowParameterPlaceHolder(parameters)
+ self.context.isBindingElement = False
+
+ return expr
+
+ # https://tc39.github.io/ecma262/#sec-left-hand-side-expressions
+
+ def parseArguments(self):
+ self.expect('(')
+ args = []
+ if not self.match(')'):
+ while True:
+ if self.match('...'):
+ expr = self.parseSpreadElement()
+ else:
+ expr = self.isolateCoverGrammar(self.parseAssignmentExpression)
+ args.append(expr)
+ if self.match(')'):
+ break
+ self.expectCommaSeparator()
+ if self.match(')'):
+ break
+ self.expect(')')
+
+ return args
+
+ def isIdentifierName(self, token):
+ return (
+ token.type is Token.Identifier or
+ token.type is Token.Keyword or
+ token.type is Token.BooleanLiteral or
+ token.type is Token.NullLiteral
+ )
+
+ def parseIdentifierName(self):
+ node = self.createNode()
+ token = self.nextToken()
+ if not self.isIdentifierName(token):
+ self.throwUnexpectedToken(token)
+ return self.finalize(node, Node.Identifier(token.value))
+
+ def parseNewExpression(self):
+ node = self.createNode()
+
+ id = self.parseIdentifierName()
+ assert id.name == 'new', 'New expression must start with `new`'
+
+ if self.match('.'):
+ self.nextToken()
+ if self.lookahead.type is Token.Identifier and self.context.inFunctionBody and self.lookahead.value == 'target':
+ property = self.parseIdentifierName()
+ expr = Node.MetaProperty(id, property)
+ else:
+ self.throwUnexpectedToken(self.lookahead)
+ elif self.matchKeyword('import'):
+ self.throwUnexpectedToken(self.lookahead)
+ else:
+ callee = self.isolateCoverGrammar(self.parseLeftHandSideExpression)
+ args = self.parseArguments() if self.match('(') else []
+ expr = Node.NewExpression(callee, args)
+ self.context.isAssignmentTarget = False
+ self.context.isBindingElement = False
+
+ return self.finalize(node, expr)
+
+ def parseAsyncArgument(self):
+ arg = self.parseAssignmentExpression()
+ self.context.firstCoverInitializedNameError = None
+ return arg
+
+ def parseAsyncArguments(self):
+ self.expect('(')
+ args = []
+ if not self.match(')'):
+ while True:
+ if self.match('...'):
+ expr = self.parseSpreadElement()
+ else:
+ expr = self.isolateCoverGrammar(self.parseAsyncArgument)
+ args.append(expr)
+ if self.match(')'):
+ break
+ self.expectCommaSeparator()
+ if self.match(')'):
+ break
+ self.expect(')')
+
+ return args
+
+ def matchImportCall(self):
+ match = self.matchKeyword('import')
+ if match:
+ state = self.scanner.saveState()
+ self.scanner.scanComments()
+ next = self.scanner.lex()
+ self.scanner.restoreState(state)
+ match = (next.type is Token.Punctuator) and (next.value == '(')
+
+ return match
+
+ def parseImportCall(self):
+ node = self.createNode()
+ self.expectKeyword('import')
+ return self.finalize(node, Node.Import())
+
+ def parseLeftHandSideExpressionAllowCall(self):
+ startToken = self.lookahead
+ maybeAsync = self.matchContextualKeyword('async')
+
+ previousAllowIn = self.context.allowIn
+ self.context.allowIn = True
+
+ if self.matchKeyword('super') and self.context.inFunctionBody:
+ expr = self.createNode()
+ self.nextToken()
+ expr = self.finalize(expr, Node.Super())
+ if not self.match('(') and not self.match('.') and not self.match('['):
+ self.throwUnexpectedToken(self.lookahead)
+ else:
+ expr = self.inheritCoverGrammar(self.parseNewExpression if self.matchKeyword('new') else self.parsePrimaryExpression)
+
+ while True:
+ if self.match('.'):
+ self.context.isBindingElement = False
+ self.context.isAssignmentTarget = True
+ self.expect('.')
+ property = self.parseIdentifierName()
+ expr = self.finalize(self.startNode(startToken), Node.StaticMemberExpression(expr, property))
+
+ elif self.match('('):
+ asyncArrow = maybeAsync and (startToken.lineNumber == self.lookahead.lineNumber)
+ self.context.isBindingElement = False
+ self.context.isAssignmentTarget = False
+ if asyncArrow:
+ args = self.parseAsyncArguments()
+ else:
+ args = self.parseArguments()
+ if expr.type is Syntax.Import and len(args) != 1:
+ self.tolerateError(Messages.BadImportCallArity)
+ expr = self.finalize(self.startNode(startToken), Node.CallExpression(expr, args))
+ if asyncArrow and self.match('=>'):
+ for arg in args:
+ self.reinterpretExpressionAsPattern(arg)
+ expr = Node.AsyncArrowParameterPlaceHolder(args)
+ elif self.match('['):
+ self.context.isBindingElement = False
+ self.context.isAssignmentTarget = True
+ self.expect('[')
+ property = self.isolateCoverGrammar(self.parseExpression)
+ self.expect(']')
+ expr = self.finalize(self.startNode(startToken), Node.ComputedMemberExpression(expr, property))
+
+ elif self.lookahead.type is Token.Template and self.lookahead.head:
+ quasi = self.parseTemplateLiteral()
+ expr = self.finalize(self.startNode(startToken), Node.TaggedTemplateExpression(expr, quasi))
+
+ else:
+ break
+
+ self.context.allowIn = previousAllowIn
+
+ return expr
+
+ def parseSuper(self):
+ node = self.createNode()
+
+ self.expectKeyword('super')
+ if not self.match('[') and not self.match('.'):
+ self.throwUnexpectedToken(self.lookahead)
+
+ return self.finalize(node, Node.Super())
+
+ def parseLeftHandSideExpression(self):
+ assert self.context.allowIn, 'callee of new expression always allow in keyword.'
+
+ node = self.startNode(self.lookahead)
+ if self.matchKeyword('super') and self.context.inFunctionBody:
+ expr = self.parseSuper()
+ else:
+ expr = self.inheritCoverGrammar(self.parseNewExpression if self.matchKeyword('new') else self.parsePrimaryExpression)
+
+ while True:
+ if self.match('['):
+ self.context.isBindingElement = False
+ self.context.isAssignmentTarget = True
+ self.expect('[')
+ property = self.isolateCoverGrammar(self.parseExpression)
+ self.expect(']')
+ expr = self.finalize(node, Node.ComputedMemberExpression(expr, property))
+
+ elif self.match('.'):
+ self.context.isBindingElement = False
+ self.context.isAssignmentTarget = True
+ self.expect('.')
+ property = self.parseIdentifierName()
+ expr = self.finalize(node, Node.StaticMemberExpression(expr, property))
+
+ elif self.lookahead.type is Token.Template and self.lookahead.head:
+ quasi = self.parseTemplateLiteral()
+ expr = self.finalize(node, Node.TaggedTemplateExpression(expr, quasi))
+
+ else:
+ break
+
+ return expr
+
+ # https://tc39.github.io/ecma262/#sec-update-expressions
+
+ def parseUpdateExpression(self):
+ startToken = self.lookahead
+
+ if self.match('++', '--'):
+ node = self.startNode(startToken)
+ token = self.nextToken()
+ expr = self.inheritCoverGrammar(self.parseUnaryExpression)
+ if self.context.strict and expr.type is Syntax.Identifier and self.scanner.isRestrictedWord(expr.name):
+ self.tolerateError(Messages.StrictLHSPrefix)
+ if not self.context.isAssignmentTarget:
+ self.tolerateError(Messages.InvalidLHSInAssignment)
+ prefix = True
+ expr = self.finalize(node, Node.UpdateExpression(token.value, expr, prefix))
+ self.context.isAssignmentTarget = False
+ self.context.isBindingElement = False
+ else:
+ expr = self.inheritCoverGrammar(self.parseLeftHandSideExpressionAllowCall)
+ if not self.hasLineTerminator and self.lookahead.type is Token.Punctuator:
+ if self.match('++', '--'):
+ if self.context.strict and expr.type is Syntax.Identifier and self.scanner.isRestrictedWord(expr.name):
+ self.tolerateError(Messages.StrictLHSPostfix)
+ if not self.context.isAssignmentTarget:
+ self.tolerateError(Messages.InvalidLHSInAssignment)
+ self.context.isAssignmentTarget = False
+ self.context.isBindingElement = False
+ operator = self.nextToken().value
+ prefix = False
+ expr = self.finalize(self.startNode(startToken), Node.UpdateExpression(operator, expr, prefix))
+
+ return expr
+
+ # https://tc39.github.io/ecma262/#sec-unary-operators
+
+ def parseAwaitExpression(self):
+ node = self.createNode()
+ self.nextToken()
+ argument = self.parseUnaryExpression()
+ return self.finalize(node, Node.AwaitExpression(argument))
+
+ def parseUnaryExpression(self):
+ if (
+ self.match('+', '-', '~', '!') or
+ self.matchKeyword('delete', 'void', 'typeof')
+ ):
+ node = self.startNode(self.lookahead)
+ token = self.nextToken()
+ expr = self.inheritCoverGrammar(self.parseUnaryExpression)
+ expr = self.finalize(node, Node.UnaryExpression(token.value, expr))
+ if self.context.strict and expr.operator == 'delete' and expr.argument.type is Syntax.Identifier:
+ self.tolerateError(Messages.StrictDelete)
+ self.context.isAssignmentTarget = False
+ self.context.isBindingElement = False
+ elif self.context.allowAwait and self.matchContextualKeyword('await'):
+ expr = self.parseAwaitExpression()
+ else:
+ expr = self.parseUpdateExpression()
+
+ return expr
+
+ def parseExponentiationExpression(self):
+ startToken = self.lookahead
+
+ expr = self.inheritCoverGrammar(self.parseUnaryExpression)
+ if expr.type is not Syntax.UnaryExpression and self.match('**'):
+ self.nextToken()
+ self.context.isAssignmentTarget = False
+ self.context.isBindingElement = False
+ left = expr
+ right = self.isolateCoverGrammar(self.parseExponentiationExpression)
+ expr = self.finalize(self.startNode(startToken), Node.BinaryExpression('**', left, right))
+
+ return expr
+
+ # https://tc39.github.io/ecma262/#sec-exp-operator
+ # https://tc39.github.io/ecma262/#sec-multiplicative-operators
+ # https://tc39.github.io/ecma262/#sec-additive-operators
+ # https://tc39.github.io/ecma262/#sec-bitwise-shift-operators
+ # https://tc39.github.io/ecma262/#sec-relational-operators
+ # https://tc39.github.io/ecma262/#sec-equality-operators
+ # https://tc39.github.io/ecma262/#sec-binary-bitwise-operators
+ # https://tc39.github.io/ecma262/#sec-binary-logical-operators
+
+ def binaryPrecedence(self, token):
+ op = token.value
+ if token.type is Token.Punctuator:
+ precedence = self.operatorPrecedence.get(op, 0)
+ elif token.type is Token.Keyword:
+ precedence = 7 if (op == 'instanceof' or (self.context.allowIn and op == 'in')) else 0
+ else:
+ precedence = 0
+ return precedence
+
+ def parseBinaryExpression(self):
+ startToken = self.lookahead
+
+ expr = self.inheritCoverGrammar(self.parseExponentiationExpression)
+
+ token = self.lookahead
+ prec = self.binaryPrecedence(token)
+ if prec > 0:
+ self.nextToken()
+
+ self.context.isAssignmentTarget = False
+ self.context.isBindingElement = False
+
+ markers = [startToken, self.lookahead]
+ left = expr
+ right = self.isolateCoverGrammar(self.parseExponentiationExpression)
+
+ stack = [left, token.value, right]
+ precedences = [prec]
+ while True:
+ prec = self.binaryPrecedence(self.lookahead)
+ if prec <= 0:
+ break
+
+ # Reduce: make a binary expression from the three topmost entries.
+ while len(stack) > 2 and prec <= precedences[-1]:
+ right = stack.pop()
+ operator = stack.pop()
+ precedences.pop()
+ left = stack.pop()
+ markers.pop()
+ node = self.startNode(markers[-1])
+ stack.append(self.finalize(node, Node.BinaryExpression(operator, left, right)))
+
+ # Shift.
+ stack.append(self.nextToken().value)
+ precedences.append(prec)
+ markers.append(self.lookahead)
+ stack.append(self.isolateCoverGrammar(self.parseExponentiationExpression))
+
+ # Final reduce to clean-up the stack.
+ i = len(stack) - 1
+ expr = stack[i]
+
+ lastMarker = markers.pop()
+ while i > 1:
+ marker = markers.pop()
+ lastLineStart = lastMarker.lineStart if lastMarker else 0
+ node = self.startNode(marker, lastLineStart)
+ operator = stack[i - 1]
+ expr = self.finalize(node, Node.BinaryExpression(operator, stack[i - 2], expr))
+ i -= 2
+ lastMarker = marker
+
+ return expr
+
+ # https://tc39.github.io/ecma262/#sec-conditional-operator
+
+ def parseConditionalExpression(self):
+ startToken = self.lookahead
+
+ expr = self.inheritCoverGrammar(self.parseBinaryExpression)
+ if self.match('?'):
+ self.nextToken()
+
+ previousAllowIn = self.context.allowIn
+ self.context.allowIn = True
+ consequent = self.isolateCoverGrammar(self.parseAssignmentExpression)
+ self.context.allowIn = previousAllowIn
+
+ self.expect(':')
+ alternate = self.isolateCoverGrammar(self.parseAssignmentExpression)
+
+ expr = self.finalize(self.startNode(startToken), Node.ConditionalExpression(expr, consequent, alternate))
+ self.context.isAssignmentTarget = False
+ self.context.isBindingElement = False
+
+ return expr
+
+ # https://tc39.github.io/ecma262/#sec-assignment-operators
+
+ def checkPatternParam(self, options, param):
+ typ = param.type
+ if typ is Syntax.Identifier:
+ self.validateParam(options, param, param.name)
+ elif typ is Syntax.RestElement:
+ self.checkPatternParam(options, param.argument)
+ elif typ is Syntax.AssignmentPattern:
+ self.checkPatternParam(options, param.left)
+ elif typ is Syntax.ArrayPattern:
+ for element in param.elements:
+ if element is not None:
+ self.checkPatternParam(options, element)
+ elif typ is Syntax.ObjectPattern:
+ for prop in param.properties:
+ self.checkPatternParam(options, prop if prop.type is Syntax.RestElement else prop.value)
+
+ options.simple = options.simple and isinstance(param, Node.Identifier)
+
+ def reinterpretAsCoverFormalsList(self, expr):
+ params = [expr]
+
+ asyncArrow = False
+ typ = expr.type
+ if typ is Syntax.Identifier:
+ pass
+ elif typ is Syntax.ArrowParameterPlaceHolder:
+ params = expr.params
+ asyncArrow = expr.isAsync
+ else:
+ return None
+
+ options = Params(
+ simple=True,
+ paramSet={},
+ )
+
+ for param in params:
+ if param.type is Syntax.AssignmentPattern:
+ if param.right.type is Syntax.YieldExpression:
+ if param.right.argument:
+ self.throwUnexpectedToken(self.lookahead)
+ param.right.type = Syntax.Identifier
+ param.right.name = 'yield'
+ del param.right.argument
+ del param.right.delegate
+ elif asyncArrow and param.type is Syntax.Identifier and param.name == 'await':
+ self.throwUnexpectedToken(self.lookahead)
+ self.checkPatternParam(options, param)
+
+ if self.context.strict or not self.context.allowYield:
+ for param in params:
+ if param.type is Syntax.YieldExpression:
+ self.throwUnexpectedToken(self.lookahead)
+
+ if options.message is Messages.StrictParamDupe:
+ token = options.stricted if self.context.strict else options.firstRestricted
+ self.throwUnexpectedToken(token, options.message)
+
+ return Params(
+ simple=options.simple,
+ params=params,
+ stricted=options.stricted,
+ firstRestricted=options.firstRestricted,
+ message=options.message
+ )
+
+ def parseAssignmentExpression(self):
+ if not self.context.allowYield and self.matchKeyword('yield'):
+ expr = self.parseYieldExpression()
+ else:
+ startToken = self.lookahead
+ token = startToken
+ expr = self.parseConditionalExpression()
+
+ if token.type is Token.Identifier and (token.lineNumber == self.lookahead.lineNumber) and token.value == 'async':
+ if self.lookahead.type is Token.Identifier or self.matchKeyword('yield'):
+ arg = self.parsePrimaryExpression()
+ self.reinterpretExpressionAsPattern(arg)
+ expr = Node.AsyncArrowParameterPlaceHolder([arg])
+
+ if expr.type is Syntax.ArrowParameterPlaceHolder or self.match('=>'):
+
+ # https://tc39.github.io/ecma262/#sec-arrow-function-definitions
+ self.context.isAssignmentTarget = False
+ self.context.isBindingElement = False
+ isAsync = expr.isAsync
+ list = self.reinterpretAsCoverFormalsList(expr)
+
+ if list:
+ if self.hasLineTerminator:
+ self.tolerateUnexpectedToken(self.lookahead)
+ self.context.firstCoverInitializedNameError = None
+
+ previousStrict = self.context.strict
+ previousAllowStrictDirective = self.context.allowStrictDirective
+ self.context.allowStrictDirective = list.simple
+
+ previousAllowYield = self.context.allowYield
+ previousAwait = self.context.allowAwait
+ self.context.allowYield = True
+ self.context.allowAwait = isAsync
+
+ node = self.startNode(startToken)
+ self.expect('=>')
+ if self.match('{'):
+ previousAllowIn = self.context.allowIn
+ self.context.allowIn = True
+ body = self.parseFunctionSourceElements()
+ self.context.allowIn = previousAllowIn
+ else:
+ body = self.isolateCoverGrammar(self.parseAssignmentExpression)
+ expression = body.type is not Syntax.BlockStatement
+
+ if self.context.strict and list.firstRestricted:
+ self.throwUnexpectedToken(list.firstRestricted, list.message)
+ if self.context.strict and list.stricted:
+ self.tolerateUnexpectedToken(list.stricted, list.message)
+ if isAsync:
+ expr = self.finalize(node, Node.AsyncArrowFunctionExpression(list.params, body, expression))
+ else:
+ expr = self.finalize(node, Node.ArrowFunctionExpression(list.params, body, expression))
+
+ self.context.strict = previousStrict
+ self.context.allowStrictDirective = previousAllowStrictDirective
+ self.context.allowYield = previousAllowYield
+ self.context.allowAwait = previousAwait
+ else:
+ if self.matchAssign():
+ if not self.context.isAssignmentTarget:
+ self.tolerateError(Messages.InvalidLHSInAssignment)
+
+ if self.context.strict and expr.type is Syntax.Identifier:
+ id = expr
+ if self.scanner.isRestrictedWord(id.name):
+ self.tolerateUnexpectedToken(token, Messages.StrictLHSAssignment)
+ if self.scanner.isStrictModeReservedWord(id.name):
+ self.tolerateUnexpectedToken(token, Messages.StrictReservedWord)
+
+ if not self.match('='):
+ self.context.isAssignmentTarget = False
+ self.context.isBindingElement = False
+ else:
+ self.reinterpretExpressionAsPattern(expr)
+
+ token = self.nextToken()
+ operator = token.value
+ right = self.isolateCoverGrammar(self.parseAssignmentExpression)
+ expr = self.finalize(self.startNode(startToken), Node.AssignmentExpression(operator, expr, right))
+ self.context.firstCoverInitializedNameError = None
+
+ return expr
+
+ # https://tc39.github.io/ecma262/#sec-comma-operator
+
+ def parseExpression(self):
+ startToken = self.lookahead
+ expr = self.isolateCoverGrammar(self.parseAssignmentExpression)
+
+ if self.match(','):
+ expressions = []
+ expressions.append(expr)
+ while self.lookahead.type is not Token.EOF:
+ if not self.match(','):
+ break
+ self.nextToken()
+ expressions.append(self.isolateCoverGrammar(self.parseAssignmentExpression))
+
+ expr = self.finalize(self.startNode(startToken), Node.SequenceExpression(expressions))
+
+ return expr
+
+ # https://tc39.github.io/ecma262/#sec-block
+
+ def parseStatementListItem(self):
+ self.context.isAssignmentTarget = True
+ self.context.isBindingElement = True
+ if self.lookahead.type is Token.Keyword:
+ value = self.lookahead.value
+ if value == 'export':
+ if not self.context.isModule:
+ self.tolerateUnexpectedToken(self.lookahead, Messages.IllegalExportDeclaration)
+ statement = self.parseExportDeclaration()
+ elif value == 'import':
+ if self.matchImportCall():
+ statement = self.parseExpressionStatement()
+ else:
+ if not self.context.isModule:
+ self.tolerateUnexpectedToken(self.lookahead, Messages.IllegalImportDeclaration)
+ statement = self.parseImportDeclaration()
+ elif value == 'const':
+ statement = self.parseLexicalDeclaration(Params(inFor=False))
+ elif value == 'function':
+ statement = self.parseFunctionDeclaration()
+ elif value == 'class':
+ statement = self.parseClassDeclaration()
+ elif value == 'let':
+ statement = self.parseLexicalDeclaration(Params(inFor=False)) if self.isLexicalDeclaration() else self.parseStatement()
+ else:
+ statement = self.parseStatement()
+ else:
+ statement = self.parseStatement()
+
+ return statement
+
+ def parseBlock(self):
+ node = self.createNode()
+
+ self.expect('{')
+ block = []
+ while True:
+ if self.match('}'):
+ break
+ block.append(self.parseStatementListItem())
+ self.expect('}')
+
+ return self.finalize(node, Node.BlockStatement(block))
+
+ # https://tc39.github.io/ecma262/#sec-let-and-const-declarations
+
+ def parseLexicalBinding(self, kind, options):
+ node = self.createNode()
+ params = []
+ id = self.parsePattern(params, kind)
+
+ if self.context.strict and id.type is Syntax.Identifier:
+ if self.scanner.isRestrictedWord(id.name):
+ self.tolerateError(Messages.StrictVarName)
+
+ init = None
+ if kind == 'const':
+ if not self.matchKeyword('in') and not self.matchContextualKeyword('of'):
+ if self.match('='):
+ self.nextToken()
+ init = self.isolateCoverGrammar(self.parseAssignmentExpression)
+ else:
+ self.throwError(Messages.DeclarationMissingInitializer, 'const')
+ elif (not options.inFor and id.type is not Syntax.Identifier) or self.match('='):
+ self.expect('=')
+ init = self.isolateCoverGrammar(self.parseAssignmentExpression)
+
+ return self.finalize(node, Node.VariableDeclarator(id, init))
+
+ def parseBindingList(self, kind, options):
+ lst = [self.parseLexicalBinding(kind, options)]
+
+ while self.match(','):
+ self.nextToken()
+ lst.append(self.parseLexicalBinding(kind, options))
+
+ return lst
+
+ def isLexicalDeclaration(self):
+ state = self.scanner.saveState()
+ self.scanner.scanComments()
+ next = self.scanner.lex()
+ self.scanner.restoreState(state)
+
+ return (
+ (next.type is Token.Identifier) or
+ (next.type is Token.Punctuator and next.value == '[') or
+ (next.type is Token.Punctuator and next.value == '{') or
+ (next.type is Token.Keyword and next.value == 'let') or
+ (next.type is Token.Keyword and next.value == 'yield')
+ )
+
+ def parseLexicalDeclaration(self, options):
+ node = self.createNode()
+ kind = self.nextToken().value
+ assert kind == 'let' or kind == 'const', 'Lexical declaration must be either or const'
+
+ declarations = self.parseBindingList(kind, options)
+ self.consumeSemicolon()
+
+ return self.finalize(node, Node.VariableDeclaration(declarations, kind))
+
+ # https://tc39.github.io/ecma262/#sec-destructuring-binding-patterns
+
+ def parseBindingRestElement(self, params, kind=None):
+ node = self.createNode()
+
+ self.expect('...')
+ arg = self.parsePattern(params, kind)
+
+ return self.finalize(node, Node.RestElement(arg))
+
+ def parseArrayPattern(self, params, kind=None):
+ node = self.createNode()
+
+ self.expect('[')
+ elements = []
+ while not self.match(']'):
+ if self.match(','):
+ self.nextToken()
+ elements.append(None)
+ else:
+ if self.match('...'):
+ elements.append(self.parseBindingRestElement(params, kind))
+ break
+ else:
+ elements.append(self.parsePatternWithDefault(params, kind))
+ if not self.match(']'):
+ self.expect(',')
+ self.expect(']')
+
+ return self.finalize(node, Node.ArrayPattern(elements))
+
+ def parsePropertyPattern(self, params, kind=None):
+ node = self.createNode()
+
+ computed = False
+ shorthand = False
+ method = False
+
+ key = None
+
+ if self.lookahead.type is Token.Identifier:
+ keyToken = self.lookahead
+ key = self.parseVariableIdentifier()
+ init = self.finalize(node, Node.Identifier(keyToken.value))
+ if self.match('='):
+ params.append(keyToken)
+ shorthand = True
+ self.nextToken()
+ expr = self.parseAssignmentExpression()
+ value = self.finalize(self.startNode(keyToken), Node.AssignmentPattern(init, expr))
+ elif not self.match(':'):
+ params.append(keyToken)
+ shorthand = True
+ value = init
+ else:
+ self.expect(':')
+ value = self.parsePatternWithDefault(params, kind)
+ else:
+ computed = self.match('[')
+ key = self.parseObjectPropertyKey()
+ self.expect(':')
+ value = self.parsePatternWithDefault(params, kind)
+
+ return self.finalize(node, Node.Property('init', key, computed, value, method, shorthand))
+
+ def parseRestProperty(self, params, kind):
+ node = self.createNode()
+ self.expect('...')
+ arg = self.parsePattern(params)
+ if self.match('='):
+ self.throwError(Messages.DefaultRestProperty)
+ if not self.match('}'):
+ self.throwError(Messages.PropertyAfterRestProperty)
+ return self.finalize(node, Node.RestElement(arg))
+
+ def parseObjectPattern(self, params, kind=None):
+ node = self.createNode()
+ properties = []
+
+ self.expect('{')
+ while not self.match('}'):
+ properties.append(self.parseRestProperty(params, kind) if self.match('...') else self.parsePropertyPattern(params, kind))
+ if not self.match('}'):
+ self.expect(',')
+ self.expect('}')
+
+ return self.finalize(node, Node.ObjectPattern(properties))
+
+ def parsePattern(self, params, kind=None):
+ if self.match('['):
+ pattern = self.parseArrayPattern(params, kind)
+ elif self.match('{'):
+ pattern = self.parseObjectPattern(params, kind)
+ else:
+ if self.matchKeyword('let') and (kind in ('const', 'let')):
+ self.tolerateUnexpectedToken(self.lookahead, Messages.LetInLexicalBinding)
+ params.append(self.lookahead)
+ pattern = self.parseVariableIdentifier(kind)
+
+ return pattern
+
+ def parsePatternWithDefault(self, params, kind=None):
+ startToken = self.lookahead
+
+ pattern = self.parsePattern(params, kind)
+ if self.match('='):
+ self.nextToken()
+ previousAllowYield = self.context.allowYield
+ self.context.allowYield = True
+ right = self.isolateCoverGrammar(self.parseAssignmentExpression)
+ self.context.allowYield = previousAllowYield
+ pattern = self.finalize(self.startNode(startToken), Node.AssignmentPattern(pattern, right))
+
+ return pattern
+
+ # https://tc39.github.io/ecma262/#sec-variable-statement
+
+ def parseVariableIdentifier(self, kind=None):
+ node = self.createNode()
+
+ token = self.nextToken()
+ if token.type is Token.Keyword and token.value == 'yield':
+ if self.context.strict:
+ self.tolerateUnexpectedToken(token, Messages.StrictReservedWord)
+ elif not self.context.allowYield:
+ self.throwUnexpectedToken(token)
+ elif token.type is not Token.Identifier:
+ if self.context.strict and token.type is Token.Keyword and self.scanner.isStrictModeReservedWord(token.value):
+ self.tolerateUnexpectedToken(token, Messages.StrictReservedWord)
+ else:
+ if self.context.strict or token.value != 'let' or kind != 'var':
+ self.throwUnexpectedToken(token)
+ elif (self.context.isModule or self.context.allowAwait) and token.type is Token.Identifier and token.value == 'await':
+ self.tolerateUnexpectedToken(token)
+
+ return self.finalize(node, Node.Identifier(token.value))
+
+ def parseVariableDeclaration(self, options):
+ node = self.createNode()
+
+ params = []
+ id = self.parsePattern(params, 'var')
+
+ if self.context.strict and id.type is Syntax.Identifier:
+ if self.scanner.isRestrictedWord(id.name):
+ self.tolerateError(Messages.StrictVarName)
+
+ init = None
+ if self.match('='):
+ self.nextToken()
+ init = self.isolateCoverGrammar(self.parseAssignmentExpression)
+ elif id.type is not Syntax.Identifier and not options.inFor:
+ self.expect('=')
+
+ return self.finalize(node, Node.VariableDeclarator(id, init))
+
+ def parseVariableDeclarationList(self, options):
+ opt = Params(inFor=options.inFor)
+
+ lst = []
+ lst.append(self.parseVariableDeclaration(opt))
+ while self.match(','):
+ self.nextToken()
+ lst.append(self.parseVariableDeclaration(opt))
+
+ return lst
+
+ def parseVariableStatement(self):
+ node = self.createNode()
+ self.expectKeyword('var')
+ declarations = self.parseVariableDeclarationList(Params(inFor=False))
+ self.consumeSemicolon()
+
+ return self.finalize(node, Node.VariableDeclaration(declarations, 'var'))
+
+ # https://tc39.github.io/ecma262/#sec-empty-statement
+
+ def parseEmptyStatement(self):
+ node = self.createNode()
+ self.expect(';')
+ return self.finalize(node, Node.EmptyStatement())
+
+ # https://tc39.github.io/ecma262/#sec-expression-statement
+
+ def parseExpressionStatement(self):
+ node = self.createNode()
+ expr = self.parseExpression()
+ self.consumeSemicolon()
+ return self.finalize(node, Node.ExpressionStatement(expr))
+
+ # https://tc39.github.io/ecma262/#sec-if-statement
+
+ def parseIfClause(self):
+ if self.context.strict and self.matchKeyword('function'):
+ self.tolerateError(Messages.StrictFunction)
+ return self.parseStatement()
+
+ def parseIfStatement(self):
+ node = self.createNode()
+ alternate = None
+
+ self.expectKeyword('if')
+ self.expect('(')
+ test = self.parseExpression()
+
+ if not self.match(')') and self.config.tolerant:
+ self.tolerateUnexpectedToken(self.nextToken())
+ consequent = self.finalize(self.createNode(), Node.EmptyStatement())
+ else:
+ self.expect(')')
+ consequent = self.parseIfClause()
+ if self.matchKeyword('else'):
+ self.nextToken()
+ alternate = self.parseIfClause()
+
+ return self.finalize(node, Node.IfStatement(test, consequent, alternate))
+
+ # https://tc39.github.io/ecma262/#sec-do-while-statement
+
+ def parseDoWhileStatement(self):
+ node = self.createNode()
+ self.expectKeyword('do')
+
+ previousInIteration = self.context.inIteration
+ self.context.inIteration = True
+ body = self.parseStatement()
+ self.context.inIteration = previousInIteration
+
+ self.expectKeyword('while')
+ self.expect('(')
+ test = self.parseExpression()
+
+ if not self.match(')') and self.config.tolerant:
+ self.tolerateUnexpectedToken(self.nextToken())
+ else:
+ self.expect(')')
+ if self.match(';'):
+ self.nextToken()
+
+ return self.finalize(node, Node.DoWhileStatement(body, test))
+
+ # https://tc39.github.io/ecma262/#sec-while-statement
+
+ def parseWhileStatement(self):
+ node = self.createNode()
+
+ self.expectKeyword('while')
+ self.expect('(')
+ test = self.parseExpression()
+
+ if not self.match(')') and self.config.tolerant:
+ self.tolerateUnexpectedToken(self.nextToken())
+ body = self.finalize(self.createNode(), Node.EmptyStatement())
+ else:
+ self.expect(')')
+
+ previousInIteration = self.context.inIteration
+ self.context.inIteration = True
+ body = self.parseStatement()
+ self.context.inIteration = previousInIteration
+
+ return self.finalize(node, Node.WhileStatement(test, body))
+
+ # https://tc39.github.io/ecma262/#sec-for-statement
+ # https://tc39.github.io/ecma262/#sec-for-in-and-for-of-statements
+
+ def parseForStatement(self):
+ init = None
+ test = None
+ update = None
+ forIn = True
+ left = None
+ right = None
+
+ node = self.createNode()
+ self.expectKeyword('for')
+ self.expect('(')
+
+ if self.match(';'):
+ self.nextToken()
+ else:
+ if self.matchKeyword('var'):
+ init = self.createNode()
+ self.nextToken()
+
+ previousAllowIn = self.context.allowIn
+ self.context.allowIn = False
+ declarations = self.parseVariableDeclarationList(Params(inFor=True))
+ self.context.allowIn = previousAllowIn
+
+ if len(declarations) == 1 and self.matchKeyword('in'):
+ decl = declarations[0]
+ if decl.init and (decl.id.type is Syntax.ArrayPattern or decl.id.type is Syntax.ObjectPattern or self.context.strict):
+ self.tolerateError(Messages.ForInOfLoopInitializer, 'for-in')
+ init = self.finalize(init, Node.VariableDeclaration(declarations, 'var'))
+ self.nextToken()
+ left = init
+ right = self.parseExpression()
+ init = None
+ elif len(declarations) == 1 and declarations[0].init is None and self.matchContextualKeyword('of'):
+ init = self.finalize(init, Node.VariableDeclaration(declarations, 'var'))
+ self.nextToken()
+ left = init
+ right = self.parseAssignmentExpression()
+ init = None
+ forIn = False
+ else:
+ init = self.finalize(init, Node.VariableDeclaration(declarations, 'var'))
+ self.expect(';')
+ elif self.matchKeyword('const', 'let'):
+ init = self.createNode()
+ kind = self.nextToken().value
+
+ if not self.context.strict and self.lookahead.value == 'in':
+ init = self.finalize(init, Node.Identifier(kind))
+ self.nextToken()
+ left = init
+ right = self.parseExpression()
+ init = None
+ else:
+ previousAllowIn = self.context.allowIn
+ self.context.allowIn = False
+ declarations = self.parseBindingList(kind, Params(inFor=True))
+ self.context.allowIn = previousAllowIn
+
+ if len(declarations) == 1 and declarations[0].init is None and self.matchKeyword('in'):
+ init = self.finalize(init, Node.VariableDeclaration(declarations, kind))
+ self.nextToken()
+ left = init
+ right = self.parseExpression()
+ init = None
+ elif len(declarations) == 1 and declarations[0].init is None and self.matchContextualKeyword('of'):
+ init = self.finalize(init, Node.VariableDeclaration(declarations, kind))
+ self.nextToken()
+ left = init
+ right = self.parseAssignmentExpression()
+ init = None
+ forIn = False
+ else:
+ self.consumeSemicolon()
+ init = self.finalize(init, Node.VariableDeclaration(declarations, kind))
+ else:
+ initStartToken = self.lookahead
+ previousAllowIn = self.context.allowIn
+ self.context.allowIn = False
+ init = self.inheritCoverGrammar(self.parseAssignmentExpression)
+ self.context.allowIn = previousAllowIn
+
+ if self.matchKeyword('in'):
+ if not self.context.isAssignmentTarget or init.type is Syntax.AssignmentExpression:
+ self.tolerateError(Messages.InvalidLHSInForIn)
+
+ self.nextToken()
+ self.reinterpretExpressionAsPattern(init)
+ left = init
+ right = self.parseExpression()
+ init = None
+ elif self.matchContextualKeyword('of'):
+ if not self.context.isAssignmentTarget or init.type is Syntax.AssignmentExpression:
+ self.tolerateError(Messages.InvalidLHSInForLoop)
+
+ self.nextToken()
+ self.reinterpretExpressionAsPattern(init)
+ left = init
+ right = self.parseAssignmentExpression()
+ init = None
+ forIn = False
+ else:
+ if self.match(','):
+ initSeq = [init]
+ while self.match(','):
+ self.nextToken()
+ initSeq.append(self.isolateCoverGrammar(self.parseAssignmentExpression))
+ init = self.finalize(self.startNode(initStartToken), Node.SequenceExpression(initSeq))
+ self.expect(';')
+
+ if left is None:
+ if not self.match(';'):
+ test = self.parseExpression()
+ self.expect(';')
+ if not self.match(')'):
+ update = self.parseExpression()
+
+ if not self.match(')') and self.config.tolerant:
+ self.tolerateUnexpectedToken(self.nextToken())
+ body = self.finalize(self.createNode(), Node.EmptyStatement())
+ else:
+ self.expect(')')
+
+ previousInIteration = self.context.inIteration
+ self.context.inIteration = True
+ body = self.isolateCoverGrammar(self.parseStatement)
+ self.context.inIteration = previousInIteration
+
+ if left is None:
+ return self.finalize(node, Node.ForStatement(init, test, update, body))
+
+ if forIn:
+ return self.finalize(node, Node.ForInStatement(left, right, body))
+
+ return self.finalize(node, Node.ForOfStatement(left, right, body))
+
+ # https://tc39.github.io/ecma262/#sec-continue-statement
+
+ def parseContinueStatement(self):
+ node = self.createNode()
+ self.expectKeyword('continue')
+
+ label = None
+ if self.lookahead.type is Token.Identifier and not self.hasLineTerminator:
+ id = self.parseVariableIdentifier()
+ label = id
+
+ key = '$' + id.name
+ if key not in self.context.labelSet:
+ self.throwError(Messages.UnknownLabel, id.name)
+
+ self.consumeSemicolon()
+ if label is None and not self.context.inIteration:
+ self.throwError(Messages.IllegalContinue)
+
+ return self.finalize(node, Node.ContinueStatement(label))
+
+ # https://tc39.github.io/ecma262/#sec-break-statement
+
+ def parseBreakStatement(self):
+ node = self.createNode()
+ self.expectKeyword('break')
+
+ label = None
+ if self.lookahead.type is Token.Identifier and not self.hasLineTerminator:
+ id = self.parseVariableIdentifier()
+
+ key = '$' + id.name
+ if key not in self.context.labelSet:
+ self.throwError(Messages.UnknownLabel, id.name)
+ label = id
+
+ self.consumeSemicolon()
+ if label is None and not self.context.inIteration and not self.context.inSwitch:
+ self.throwError(Messages.IllegalBreak)
+
+ return self.finalize(node, Node.BreakStatement(label))
+
+ # https://tc39.github.io/ecma262/#sec-return-statement
+
+ def parseReturnStatement(self):
+ if not self.context.inFunctionBody:
+ self.tolerateError(Messages.IllegalReturn)
+
+ node = self.createNode()
+ self.expectKeyword('return')
+
+ hasArgument = (
+ (
+ not self.match(';') and not self.match('}') and
+ not self.hasLineTerminator and self.lookahead.type is not Token.EOF
+ ) or
+ self.lookahead.type is Token.StringLiteral or
+ self.lookahead.type is Token.Template
+ )
+ argument = self.parseExpression() if hasArgument else None
+ self.consumeSemicolon()
+
+ return self.finalize(node, Node.ReturnStatement(argument))
+
+ # https://tc39.github.io/ecma262/#sec-with-statement
+
+ def parseWithStatement(self):
+ if self.context.strict:
+ self.tolerateError(Messages.StrictModeWith)
+
+ node = self.createNode()
+
+ self.expectKeyword('with')
+ self.expect('(')
+ object = self.parseExpression()
+
+ if not self.match(')') and self.config.tolerant:
+ self.tolerateUnexpectedToken(self.nextToken())
+ body = self.finalize(self.createNode(), Node.EmptyStatement())
+ else:
+ self.expect(')')
+ body = self.parseStatement()
+
+ return self.finalize(node, Node.WithStatement(object, body))
+
+ # https://tc39.github.io/ecma262/#sec-switch-statement
+
+ def parseSwitchCase(self):
+ node = self.createNode()
+
+ if self.matchKeyword('default'):
+ self.nextToken()
+ test = None
+ else:
+ self.expectKeyword('case')
+ test = self.parseExpression()
+ self.expect(':')
+
+ consequent = []
+ while True:
+ if self.match('}') or self.matchKeyword('default', 'case'):
+ break
+ consequent.append(self.parseStatementListItem())
+
+ return self.finalize(node, Node.SwitchCase(test, consequent))
+
+ def parseSwitchStatement(self):
+ node = self.createNode()
+ self.expectKeyword('switch')
+
+ self.expect('(')
+ discriminant = self.parseExpression()
+ self.expect(')')
+
+ previousInSwitch = self.context.inSwitch
+ self.context.inSwitch = True
+
+ cases = []
+ defaultFound = False
+ self.expect('{')
+ while True:
+ if self.match('}'):
+ break
+ clause = self.parseSwitchCase()
+ if clause.test is None:
+ if defaultFound:
+ self.throwError(Messages.MultipleDefaultsInSwitch)
+ defaultFound = True
+ cases.append(clause)
+ self.expect('}')
+
+ self.context.inSwitch = previousInSwitch
+
+ return self.finalize(node, Node.SwitchStatement(discriminant, cases))
+
+ # https://tc39.github.io/ecma262/#sec-labelled-statements
+
+ def parseLabelledStatement(self):
+ node = self.createNode()
+ expr = self.parseExpression()
+
+ if expr.type is Syntax.Identifier and self.match(':'):
+ self.nextToken()
+
+ id = expr
+ key = '$' + id.name
+ if key in self.context.labelSet:
+ self.throwError(Messages.Redeclaration, 'Label', id.name)
+
+ self.context.labelSet[key] = True
+ if self.matchKeyword('class'):
+ self.tolerateUnexpectedToken(self.lookahead)
+ body = self.parseClassDeclaration()
+ elif self.matchKeyword('function'):
+ token = self.lookahead
+ declaration = self.parseFunctionDeclaration()
+ if self.context.strict:
+ self.tolerateUnexpectedToken(token, Messages.StrictFunction)
+ elif declaration.generator:
+ self.tolerateUnexpectedToken(token, Messages.GeneratorInLegacyContext)
+ body = declaration
+ else:
+ body = self.parseStatement()
+ del self.context.labelSet[key]
+
+ statement = Node.LabeledStatement(id, body)
+ else:
+ self.consumeSemicolon()
+ statement = Node.ExpressionStatement(expr)
+
+ return self.finalize(node, statement)
+
+ # https://tc39.github.io/ecma262/#sec-throw-statement
+
+ def parseThrowStatement(self):
+ node = self.createNode()
+ self.expectKeyword('throw')
+
+ if self.hasLineTerminator:
+ self.throwError(Messages.NewlineAfterThrow)
+
+ argument = self.parseExpression()
+ self.consumeSemicolon()
+
+ return self.finalize(node, Node.ThrowStatement(argument))
+
+ # https://tc39.github.io/ecma262/#sec-try-statement
+
+ def parseCatchClause(self):
+ node = self.createNode()
+
+ self.expectKeyword('catch')
+
+ self.expect('(')
+ if self.match(')'):
+ self.throwUnexpectedToken(self.lookahead)
+
+ params = []
+ param = self.parsePattern(params)
+ paramMap = {}
+ for p in params:
+ key = '$' + p.value
+ if key in paramMap:
+ self.tolerateError(Messages.DuplicateBinding, p.value)
+ paramMap[key] = True
+
+ if self.context.strict and param.type is Syntax.Identifier:
+ if self.scanner.isRestrictedWord(param.name):
+ self.tolerateError(Messages.StrictCatchVariable)
+
+ self.expect(')')
+ body = self.parseBlock()
+
+ return self.finalize(node, Node.CatchClause(param, body))
+
+ def parseFinallyClause(self):
+ self.expectKeyword('finally')
+ return self.parseBlock()
+
+ def parseTryStatement(self):
+ node = self.createNode()
+ self.expectKeyword('try')
+
+ block = self.parseBlock()
+ handler = self.parseCatchClause() if self.matchKeyword('catch') else None
+ finalizer = self.parseFinallyClause() if self.matchKeyword('finally') else None
+
+ if not handler and not finalizer:
+ self.throwError(Messages.NoCatchOrFinally)
+
+ return self.finalize(node, Node.TryStatement(block, handler, finalizer))
+
+ # https://tc39.github.io/ecma262/#sec-debugger-statement
+
+ def parseDebuggerStatement(self):
+ node = self.createNode()
+ self.expectKeyword('debugger')
+ self.consumeSemicolon()
+ return self.finalize(node, Node.DebuggerStatement())
+
+ # https://tc39.github.io/ecma262/#sec-ecmascript-language-statements-and-declarations
+
+ def parseStatement(self):
+ typ = self.lookahead.type
+ if typ in (
+ Token.BooleanLiteral,
+ Token.NullLiteral,
+ Token.NumericLiteral,
+ Token.StringLiteral,
+ Token.Template,
+ Token.RegularExpression,
+ ):
+ statement = self.parseExpressionStatement()
+
+ elif typ is Token.Punctuator:
+ value = self.lookahead.value
+ if value == '{':
+ statement = self.parseBlock()
+ elif value == '(':
+ statement = self.parseExpressionStatement()
+ elif value == ';':
+ statement = self.parseEmptyStatement()
+ else:
+ statement = self.parseExpressionStatement()
+
+ elif typ is Token.Identifier:
+ statement = self.parseFunctionDeclaration() if self.matchAsyncFunction() else self.parseLabelledStatement()
+
+ elif typ is Token.Keyword:
+ value = self.lookahead.value
+ if value == 'break':
+ statement = self.parseBreakStatement()
+ elif value == 'continue':
+ statement = self.parseContinueStatement()
+ elif value == 'debugger':
+ statement = self.parseDebuggerStatement()
+ elif value == 'do':
+ statement = self.parseDoWhileStatement()
+ elif value == 'for':
+ statement = self.parseForStatement()
+ elif value == 'function':
+ statement = self.parseFunctionDeclaration()
+ elif value == 'if':
+ statement = self.parseIfStatement()
+ elif value == 'return':
+ statement = self.parseReturnStatement()
+ elif value == 'switch':
+ statement = self.parseSwitchStatement()
+ elif value == 'throw':
+ statement = self.parseThrowStatement()
+ elif value == 'try':
+ statement = self.parseTryStatement()
+ elif value == 'var':
+ statement = self.parseVariableStatement()
+ elif value == 'while':
+ statement = self.parseWhileStatement()
+ elif value == 'with':
+ statement = self.parseWithStatement()
+ else:
+ statement = self.parseExpressionStatement()
+
+ else:
+ statement = self.throwUnexpectedToken(self.lookahead)
+
+ return statement
+
+ # https://tc39.github.io/ecma262/#sec-function-definitions
+
+ def parseFunctionSourceElements(self):
+ node = self.createNode()
+
+ self.expect('{')
+ body = self.parseDirectivePrologues()
+
+ previousLabelSet = self.context.labelSet
+ previousInIteration = self.context.inIteration
+ previousInSwitch = self.context.inSwitch
+ previousInFunctionBody = self.context.inFunctionBody
+
+ self.context.labelSet = {}
+ self.context.inIteration = False
+ self.context.inSwitch = False
+ self.context.inFunctionBody = True
+
+ while self.lookahead.type is not Token.EOF:
+ if self.match('}'):
+ break
+ body.append(self.parseStatementListItem())
+
+ self.expect('}')
+
+ self.context.labelSet = previousLabelSet
+ self.context.inIteration = previousInIteration
+ self.context.inSwitch = previousInSwitch
+ self.context.inFunctionBody = previousInFunctionBody
+
+ return self.finalize(node, Node.BlockStatement(body))
+
+ def validateParam(self, options, param, name):
+ key = '$' + name
+ if self.context.strict:
+ if self.scanner.isRestrictedWord(name):
+ options.stricted = param
+ options.message = Messages.StrictParamName
+ if key in options.paramSet:
+ options.stricted = param
+ options.message = Messages.StrictParamDupe
+ elif not options.firstRestricted:
+ if self.scanner.isRestrictedWord(name):
+ options.firstRestricted = param
+ options.message = Messages.StrictParamName
+ elif self.scanner.isStrictModeReservedWord(name):
+ options.firstRestricted = param
+ options.message = Messages.StrictReservedWord
+ elif key in options.paramSet:
+ options.stricted = param
+ options.message = Messages.StrictParamDupe
+
+ options.paramSet[key] = True
+
+ def parseRestElement(self, params):
+ node = self.createNode()
+
+ self.expect('...')
+ arg = self.parsePattern(params)
+ if self.match('='):
+ self.throwError(Messages.DefaultRestParameter)
+ if not self.match(')'):
+ self.throwError(Messages.ParameterAfterRestParameter)
+
+ return self.finalize(node, Node.RestElement(arg))
+
+ def parseFormalParameter(self, options):
+ params = []
+ param = self.parseRestElement(params) if self.match('...') else self.parsePatternWithDefault(params)
+ for p in params:
+ self.validateParam(options, p, p.value)
+ options.simple = options.simple and isinstance(param, Node.Identifier)
+ options.params.append(param)
+
+ def parseFormalParameters(self, firstRestricted=None):
+ options = Params(
+ simple=True,
+ params=[],
+ firstRestricted=firstRestricted
+ )
+
+ self.expect('(')
+ if not self.match(')'):
+ options.paramSet = {}
+ while self.lookahead.type is not Token.EOF:
+ self.parseFormalParameter(options)
+ if self.match(')'):
+ break
+ self.expect(',')
+ if self.match(')'):
+ break
+ self.expect(')')
+
+ return Params(
+ simple=options.simple,
+ params=options.params,
+ stricted=options.stricted,
+ firstRestricted=options.firstRestricted,
+ message=options.message
+ )
+
+ def matchAsyncFunction(self):
+ match = self.matchContextualKeyword('async')
+ if match:
+ state = self.scanner.saveState()
+ self.scanner.scanComments()
+ next = self.scanner.lex()
+ self.scanner.restoreState(state)
+
+ match = (state.lineNumber == next.lineNumber) and (next.type is Token.Keyword) and (next.value == 'function')
+
+ return match
+
+ def parseFunctionDeclaration(self, identifierIsOptional=False):
+ node = self.createNode()
+
+ isAsync = self.matchContextualKeyword('async')
+ if isAsync:
+ self.nextToken()
+
+ self.expectKeyword('function')
+
+ isGenerator = False if isAsync else self.match('*')
+ if isGenerator:
+ self.nextToken()
+
+ id = None
+ firstRestricted = None
+
+ if not identifierIsOptional or not self.match('('):
+ token = self.lookahead
+ id = self.parseVariableIdentifier()
+ if self.context.strict:
+ if self.scanner.isRestrictedWord(token.value):
+ self.tolerateUnexpectedToken(token, Messages.StrictFunctionName)
+ else:
+ if self.scanner.isRestrictedWord(token.value):
+ firstRestricted = token
+ message = Messages.StrictFunctionName
+ elif self.scanner.isStrictModeReservedWord(token.value):
+ firstRestricted = token
+ message = Messages.StrictReservedWord
+
+ previousAllowAwait = self.context.allowAwait
+ previousAllowYield = self.context.allowYield
+ self.context.allowAwait = isAsync
+ self.context.allowYield = not isGenerator
+
+ formalParameters = self.parseFormalParameters(firstRestricted)
+ params = formalParameters.params
+ stricted = formalParameters.stricted
+ firstRestricted = formalParameters.firstRestricted
+ if formalParameters.message:
+ message = formalParameters.message
+
+ previousStrict = self.context.strict
+ previousAllowStrictDirective = self.context.allowStrictDirective
+ self.context.allowStrictDirective = formalParameters.simple
+ body = self.parseFunctionSourceElements()
+ if self.context.strict and firstRestricted:
+ self.throwUnexpectedToken(firstRestricted, message)
+ if self.context.strict and stricted:
+ self.tolerateUnexpectedToken(stricted, message)
+
+ self.context.strict = previousStrict
+ self.context.allowStrictDirective = previousAllowStrictDirective
+ self.context.allowAwait = previousAllowAwait
+ self.context.allowYield = previousAllowYield
+
+ if isAsync:
+ return self.finalize(node, Node.AsyncFunctionDeclaration(id, params, body))
+
+ return self.finalize(node, Node.FunctionDeclaration(id, params, body, isGenerator))
+
+ def parseFunctionExpression(self):
+ node = self.createNode()
+
+ isAsync = self.matchContextualKeyword('async')
+ if isAsync:
+ self.nextToken()
+
+ self.expectKeyword('function')
+
+ isGenerator = False if isAsync else self.match('*')
+ if isGenerator:
+ self.nextToken()
+
+ id = None
+ firstRestricted = None
+
+ previousAllowAwait = self.context.allowAwait
+ previousAllowYield = self.context.allowYield
+ self.context.allowAwait = isAsync
+ self.context.allowYield = not isGenerator
+
+ if not self.match('('):
+ token = self.lookahead
+ id = self.parseIdentifierName() if not self.context.strict and not isGenerator and self.matchKeyword('yield') else self.parseVariableIdentifier()
+ if self.context.strict:
+ if self.scanner.isRestrictedWord(token.value):
+ self.tolerateUnexpectedToken(token, Messages.StrictFunctionName)
+ else:
+ if self.scanner.isRestrictedWord(token.value):
+ firstRestricted = token
+ message = Messages.StrictFunctionName
+ elif self.scanner.isStrictModeReservedWord(token.value):
+ firstRestricted = token
+ message = Messages.StrictReservedWord
+
+ formalParameters = self.parseFormalParameters(firstRestricted)
+ params = formalParameters.params
+ stricted = formalParameters.stricted
+ firstRestricted = formalParameters.firstRestricted
+ if formalParameters.message:
+ message = formalParameters.message
+
+ previousStrict = self.context.strict
+ previousAllowStrictDirective = self.context.allowStrictDirective
+ self.context.allowStrictDirective = formalParameters.simple
+ body = self.parseFunctionSourceElements()
+ if self.context.strict and firstRestricted:
+ self.throwUnexpectedToken(firstRestricted, message)
+ if self.context.strict and stricted:
+ self.tolerateUnexpectedToken(stricted, message)
+ self.context.strict = previousStrict
+ self.context.allowStrictDirective = previousAllowStrictDirective
+ self.context.allowAwait = previousAllowAwait
+ self.context.allowYield = previousAllowYield
+
+ if isAsync:
+ return self.finalize(node, Node.AsyncFunctionExpression(id, params, body))
+
+ return self.finalize(node, Node.FunctionExpression(id, params, body, isGenerator))
+
+ # https://tc39.github.io/ecma262/#sec-directive-prologues-and-the-use-strict-directive
+
+ def parseDirective(self):
+ token = self.lookahead
+
+ node = self.createNode()
+ expr = self.parseExpression()
+ directive = self.getTokenRaw(token)[1:-1] if expr.type is Syntax.Literal else None
+ self.consumeSemicolon()
+
+ return self.finalize(node, Node.Directive(expr, directive) if directive else Node.ExpressionStatement(expr))
+
+ def parseDirectivePrologues(self):
+ firstRestricted = None
+
+ body = []
+ while True:
+ token = self.lookahead
+ if token.type is not Token.StringLiteral:
+ break
+
+ statement = self.parseDirective()
+ body.append(statement)
+ directive = statement.directive
+ if not isinstance(directive, basestring):
+ break
+
+ if directive == 'use strict':
+ self.context.strict = True
+ if firstRestricted:
+ self.tolerateUnexpectedToken(firstRestricted, Messages.StrictOctalLiteral)
+ if not self.context.allowStrictDirective:
+ self.tolerateUnexpectedToken(token, Messages.IllegalLanguageModeDirective)
+ else:
+ if not firstRestricted and token.octal:
+ firstRestricted = token
+
+ return body
+
+ # https://tc39.github.io/ecma262/#sec-method-definitions
+
+ def qualifiedPropertyName(self, token):
+ typ = token.type
+ if typ in (
+ Token.Identifier,
+ Token.StringLiteral,
+ Token.BooleanLiteral,
+ Token.NullLiteral,
+ Token.NumericLiteral,
+ Token.Keyword,
+ ):
+ return True
+ elif typ is Token.Punctuator:
+ return token.value == '['
+ return False
+
+ def parseGetterMethod(self):
+ node = self.createNode()
+
+ isGenerator = False
+ previousAllowYield = self.context.allowYield
+ self.context.allowYield = not isGenerator
+ formalParameters = self.parseFormalParameters()
+ if len(formalParameters.params) > 0:
+ self.tolerateError(Messages.BadGetterArity)
+ method = self.parsePropertyMethod(formalParameters)
+ self.context.allowYield = previousAllowYield
+
+ return self.finalize(node, Node.FunctionExpression(None, formalParameters.params, method, isGenerator))
+
+ def parseSetterMethod(self):
+ node = self.createNode()
+
+ isGenerator = False
+ previousAllowYield = self.context.allowYield
+ self.context.allowYield = not isGenerator
+ formalParameters = self.parseFormalParameters()
+ if len(formalParameters.params) != 1:
+ self.tolerateError(Messages.BadSetterArity)
+ elif isinstance(formalParameters.params[0], Node.RestElement):
+ self.tolerateError(Messages.BadSetterRestParameter)
+ method = self.parsePropertyMethod(formalParameters)
+ self.context.allowYield = previousAllowYield
+
+ return self.finalize(node, Node.FunctionExpression(None, formalParameters.params, method, isGenerator))
+
+ def parseGeneratorMethod(self):
+ node = self.createNode()
+
+ isGenerator = True
+ previousAllowYield = self.context.allowYield
+
+ self.context.allowYield = True
+ params = self.parseFormalParameters()
+ self.context.allowYield = False
+ method = self.parsePropertyMethod(params)
+ self.context.allowYield = previousAllowYield
+
+ return self.finalize(node, Node.FunctionExpression(None, params.params, method, isGenerator))
+
+ # https://tc39.github.io/ecma262/#sec-generator-function-definitions
+
+ def isStartOfExpression(self):
+ start = True
+
+ value = self.lookahead.value
+ typ = self.lookahead.type
+ if typ is Token.Punctuator:
+ start = value in ('[', '(', '{', '+', '-', '!', '~', '++', '--', '/', '/=') # regular expression literal )
+
+ elif typ is Token.Keyword:
+ start = value in ('class', 'delete', 'function', 'let', 'new', 'super', 'this', 'typeof', 'void', 'yield')
+
+ return start
+
+ def parseYieldExpression(self):
+ node = self.createNode()
+ self.expectKeyword('yield')
+
+ argument = None
+ delegate = False
+ if not self.hasLineTerminator:
+ previousAllowYield = self.context.allowYield
+ self.context.allowYield = False
+ delegate = self.match('*')
+ if delegate:
+ self.nextToken()
+ argument = self.parseAssignmentExpression()
+ elif self.isStartOfExpression():
+ argument = self.parseAssignmentExpression()
+ self.context.allowYield = previousAllowYield
+
+ return self.finalize(node, Node.YieldExpression(argument, delegate))
+
+ # https://tc39.github.io/ecma262/#sec-class-definitions
+
+ def parseClassElement(self, hasConstructor):
+ token = self.lookahead
+ node = self.createNode()
+
+ kind = ''
+ key = None
+ value = None
+ computed = False
+ isStatic = False
+ isAsync = False
+
+ if self.match('*'):
+ self.nextToken()
+
+ else:
+ computed = self.match('[')
+ key = self.parseObjectPropertyKey()
+ id = key
+ if id.name == 'static' and (self.qualifiedPropertyName(self.lookahead) or self.match('*')):
+ token = self.lookahead
+ isStatic = True
+ computed = self.match('[')
+ if self.match('*'):
+ self.nextToken()
+ else:
+ key = self.parseObjectPropertyKey()
+ if token.type is Token.Identifier and not self.hasLineTerminator and token.value == 'async':
+ punctuator = self.lookahead.value
+ if punctuator != ':' and punctuator != '(' and punctuator != '*':
+ isAsync = True
+ token = self.lookahead
+ key = self.parseObjectPropertyKey()
+ if token.type is Token.Identifier and token.value == 'constructor':
+ self.tolerateUnexpectedToken(token, Messages.ConstructorIsAsync)
+
+ lookaheadPropertyKey = self.qualifiedPropertyName(self.lookahead)
+ if token.type is Token.Identifier:
+ if token.value == 'get' and lookaheadPropertyKey:
+ kind = 'get'
+ computed = self.match('[')
+ key = self.parseObjectPropertyKey()
+ self.context.allowYield = False
+ value = self.parseGetterMethod()
+ elif token.value == 'set' and lookaheadPropertyKey:
+ kind = 'set'
+ computed = self.match('[')
+ key = self.parseObjectPropertyKey()
+ value = self.parseSetterMethod()
+ elif self.config.classProperties and not self.match('('):
+ kind = 'init'
+ id = self.finalize(node, Node.Identifier(token.value))
+ if self.match('='):
+ self.nextToken()
+ value = self.parseAssignmentExpression()
+
+ elif token.type is Token.Punctuator and token.value == '*' and lookaheadPropertyKey:
+ kind = 'method'
+ computed = self.match('[')
+ key = self.parseObjectPropertyKey()
+ value = self.parseGeneratorMethod()
+
+ if not kind and key and self.match('('):
+ kind = 'method'
+ value = self.parsePropertyMethodAsyncFunction() if isAsync else self.parsePropertyMethodFunction()
+
+ if not kind:
+ self.throwUnexpectedToken(self.lookahead)
+
+ if not computed:
+ if isStatic and self.isPropertyKey(key, 'prototype'):
+ self.throwUnexpectedToken(token, Messages.StaticPrototype)
+ if not isStatic and self.isPropertyKey(key, 'constructor'):
+ if kind != 'method' or (value and value.generator):
+ self.throwUnexpectedToken(token, Messages.ConstructorSpecialMethod)
+ if hasConstructor.value:
+ self.throwUnexpectedToken(token, Messages.DuplicateConstructor)
+ else:
+ hasConstructor.value = True
+ kind = 'constructor'
+
+ if kind in ('constructor', 'method', 'get', 'set'):
+ return self.finalize(node, Node.MethodDefinition(key, computed, value, kind, isStatic))
+
+ else:
+ return self.finalize(node, Node.FieldDefinition(key, computed, value, kind, isStatic))
+
+ def parseClassElementList(self):
+ body = []
+ hasConstructor = Value(False)
+
+ self.expect('{')
+ while not self.match('}'):
+ if self.match(';'):
+ self.nextToken()
+ else:
+ body.append(self.parseClassElement(hasConstructor))
+ self.expect('}')
+
+ return body
+
+ def parseClassBody(self):
+ node = self.createNode()
+ elementList = self.parseClassElementList()
+
+ return self.finalize(node, Node.ClassBody(elementList))
+
+ def parseClassDeclaration(self, identifierIsOptional=False):
+ node = self.createNode()
+
+ previousStrict = self.context.strict
+ self.context.strict = True
+ self.expectKeyword('class')
+
+ id = None if identifierIsOptional and self.lookahead.type is not Token.Identifier else self.parseVariableIdentifier()
+ superClass = None
+ if self.matchKeyword('extends'):
+ self.nextToken()
+ superClass = self.isolateCoverGrammar(self.parseLeftHandSideExpressionAllowCall)
+ classBody = self.parseClassBody()
+ self.context.strict = previousStrict
+
+ return self.finalize(node, Node.ClassDeclaration(id, superClass, classBody))
+
+ def parseClassExpression(self):
+ node = self.createNode()
+
+ previousStrict = self.context.strict
+ self.context.strict = True
+ self.expectKeyword('class')
+ id = self.parseVariableIdentifier() if self.lookahead.type is Token.Identifier else None
+ superClass = None
+ if self.matchKeyword('extends'):
+ self.nextToken()
+ superClass = self.isolateCoverGrammar(self.parseLeftHandSideExpressionAllowCall)
+ classBody = self.parseClassBody()
+ self.context.strict = previousStrict
+
+ return self.finalize(node, Node.ClassExpression(id, superClass, classBody))
+
+ # https://tc39.github.io/ecma262/#sec-scripts
+ # https://tc39.github.io/ecma262/#sec-modules
+
+ def parseModule(self):
+ self.context.strict = True
+ self.context.isModule = True
+ self.scanner.isModule = True
+ node = self.createNode()
+ body = self.parseDirectivePrologues()
+ while self.lookahead.type is not Token.EOF:
+ body.append(self.parseStatementListItem())
+ return self.finalize(node, Node.Module(body))
+
+ def parseScript(self):
+ node = self.createNode()
+ body = self.parseDirectivePrologues()
+ while self.lookahead.type is not Token.EOF:
+ body.append(self.parseStatementListItem())
+ return self.finalize(node, Node.Script(body))
+
+ # https://tc39.github.io/ecma262/#sec-imports
+
+ def parseModuleSpecifier(self):
+ node = self.createNode()
+
+ if self.lookahead.type is not Token.StringLiteral:
+ self.throwError(Messages.InvalidModuleSpecifier)
+
+ token = self.nextToken()
+ raw = self.getTokenRaw(token)
+ return self.finalize(node, Node.Literal(token.value, raw))
+
+ # import {<foo as bar>} ...
+ def parseImportSpecifier(self):
+ node = self.createNode()
+
+ if self.lookahead.type is Token.Identifier:
+ imported = self.parseVariableIdentifier()
+ local = imported
+ if self.matchContextualKeyword('as'):
+ self.nextToken()
+ local = self.parseVariableIdentifier()
+ else:
+ imported = self.parseIdentifierName()
+ local = imported
+ if self.matchContextualKeyword('as'):
+ self.nextToken()
+ local = self.parseVariableIdentifier()
+ else:
+ self.throwUnexpectedToken(self.nextToken())
+
+ return self.finalize(node, Node.ImportSpecifier(local, imported))
+
+ # {foo, bar as bas
+ def parseNamedImports(self):
+ self.expect('{')
+ specifiers = []
+ while not self.match('}'):
+ specifiers.append(self.parseImportSpecifier())
+ if not self.match('}'):
+ self.expect(',')
+ self.expect('}')
+
+ return specifiers
+
+ # import <foo> ...
+ def parseImportDefaultSpecifier(self):
+ node = self.createNode()
+ local = self.parseIdentifierName()
+ return self.finalize(node, Node.ImportDefaultSpecifier(local))
+
+ # import <* as foo> ...
+ def parseImportNamespaceSpecifier(self):
+ node = self.createNode()
+
+ self.expect('*')
+ if not self.matchContextualKeyword('as'):
+ self.throwError(Messages.NoAsAfterImportNamespace)
+ self.nextToken()
+ local = self.parseIdentifierName()
+
+ return self.finalize(node, Node.ImportNamespaceSpecifier(local))
+
+ def parseImportDeclaration(self):
+ if self.context.inFunctionBody:
+ self.throwError(Messages.IllegalImportDeclaration)
+
+ node = self.createNode()
+ self.expectKeyword('import')
+
+ specifiers = []
+ if self.lookahead.type is Token.StringLiteral:
+ # import 'foo'
+ src = self.parseModuleSpecifier()
+ else:
+ if self.match('{'):
+ # import {bar
+ specifiers.extend(self.parseNamedImports())
+ elif self.match('*'):
+ # import * as foo
+ specifiers.append(self.parseImportNamespaceSpecifier())
+ elif self.isIdentifierName(self.lookahead) and not self.matchKeyword('default'):
+ # import foo
+ specifiers.append(self.parseImportDefaultSpecifier())
+ if self.match(','):
+ self.nextToken()
+ if self.match('*'):
+ # import foo, * as foo
+ specifiers.append(self.parseImportNamespaceSpecifier())
+ elif self.match('{'):
+ # import foo, {bar
+ specifiers.extend(self.parseNamedImports())
+ else:
+ self.throwUnexpectedToken(self.lookahead)
+ else:
+ self.throwUnexpectedToken(self.nextToken())
+
+ if not self.matchContextualKeyword('from'):
+ message = Messages.UnexpectedToken if self.lookahead.value else Messages.MissingFromClause
+ self.throwError(message, self.lookahead.value)
+ self.nextToken()
+ src = self.parseModuleSpecifier()
+ self.consumeSemicolon()
+
+ return self.finalize(node, Node.ImportDeclaration(specifiers, src))
+
+ # https://tc39.github.io/ecma262/#sec-exports
+
+ def parseExportSpecifier(self):
+ node = self.createNode()
+
+ local = self.parseIdentifierName()
+ exported = local
+ if self.matchContextualKeyword('as'):
+ self.nextToken()
+ exported = self.parseIdentifierName()
+
+ return self.finalize(node, Node.ExportSpecifier(local, exported))
+
+ def parseExportDefaultSpecifier(self):
+ node = self.createNode()
+ local = self.parseIdentifierName()
+ return self.finalize(node, Node.ExportDefaultSpecifier(local))
+
+ def parseExportDeclaration(self):
+ if self.context.inFunctionBody:
+ self.throwError(Messages.IllegalExportDeclaration)
+
+ node = self.createNode()
+ self.expectKeyword('export')
+
+ if self.matchKeyword('default'):
+ # export default ...
+ self.nextToken()
+ if self.matchKeyword('function'):
+ # export default function foo (:
+ # export default function (:
+ declaration = self.parseFunctionDeclaration(True)
+ exportDeclaration = self.finalize(node, Node.ExportDefaultDeclaration(declaration))
+ elif self.matchKeyword('class'):
+ # export default class foo {
+ declaration = self.parseClassDeclaration(True)
+ exportDeclaration = self.finalize(node, Node.ExportDefaultDeclaration(declaration))
+ elif self.matchContextualKeyword('async'):
+ # export default async function f (:
+ # export default async function (:
+ # export default async x => x
+ declaration = self.parseFunctionDeclaration(True) if self.matchAsyncFunction() else self.parseAssignmentExpression()
+ exportDeclaration = self.finalize(node, Node.ExportDefaultDeclaration(declaration))
+ else:
+ if self.matchContextualKeyword('from'):
+ self.throwError(Messages.UnexpectedToken, self.lookahead.value)
+ # export default {}
+ # export default []
+ # export default (1 + 2)
+ if self.match('{'):
+ declaration = self.parseObjectInitializer()
+ elif self.match('['):
+ declaration = self.parseArrayInitializer()
+ else:
+ declaration = self.parseAssignmentExpression()
+ self.consumeSemicolon()
+ exportDeclaration = self.finalize(node, Node.ExportDefaultDeclaration(declaration))
+
+ elif self.match('*'):
+ # export * from 'foo'
+ self.nextToken()
+ if not self.matchContextualKeyword('from'):
+ message = Messages.UnexpectedToken if self.lookahead.value else Messages.MissingFromClause
+ self.throwError(message, self.lookahead.value)
+ self.nextToken()
+ src = self.parseModuleSpecifier()
+ self.consumeSemicolon()
+ exportDeclaration = self.finalize(node, Node.ExportAllDeclaration(src))
+
+ elif self.lookahead.type is Token.Keyword:
+ # export var f = 1
+ value = self.lookahead.value
+ if value in (
+ 'let',
+ 'const',
+ ):
+ declaration = self.parseLexicalDeclaration(Params(inFor=False))
+ elif value in (
+ 'var',
+ 'class',
+ 'function',
+ ):
+ declaration = self.parseStatementListItem()
+ else:
+ self.throwUnexpectedToken(self.lookahead)
+ exportDeclaration = self.finalize(node, Node.ExportNamedDeclaration(declaration, [], None))
+
+ elif self.matchAsyncFunction():
+ declaration = self.parseFunctionDeclaration()
+ exportDeclaration = self.finalize(node, Node.ExportNamedDeclaration(declaration, [], None))
+
+ else:
+ specifiers = []
+ source = None
+ isExportFromIdentifier = False
+
+ expectSpecifiers = True
+ if self.lookahead.type is Token.Identifier:
+ specifiers.append(self.parseExportDefaultSpecifier())
+ if self.match(','):
+ self.nextToken()
+ else:
+ expectSpecifiers = False
+
+ if expectSpecifiers:
+ self.expect('{')
+ while not self.match('}'):
+ isExportFromIdentifier = isExportFromIdentifier or self.matchKeyword('default')
+ specifiers.append(self.parseExportSpecifier())
+ if not self.match('}'):
+ self.expect(',')
+ self.expect('}')
+
+ if self.matchContextualKeyword('from'):
+ # export {default} from 'foo'
+ # export {foo} from 'foo'
+ self.nextToken()
+ source = self.parseModuleSpecifier()
+ self.consumeSemicolon()
+ elif isExportFromIdentifier:
+ # export {default}; # missing fromClause
+ message = Messages.UnexpectedToken if self.lookahead.value else Messages.MissingFromClause
+ self.throwError(message, self.lookahead.value)
+ else:
+ # export {foo}
+ self.consumeSemicolon()
+ exportDeclaration = self.finalize(node, Node.ExportNamedDeclaration(None, specifiers, source))
+
+ return exportDeclaration
diff --git a/third_party/python/esprima/esprima/scanner.py b/third_party/python/esprima/esprima/scanner.py
new file mode 100644
index 0000000000..53502a51d3
--- /dev/null
+++ b/third_party/python/esprima/esprima/scanner.py
@@ -0,0 +1,1189 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import, unicode_literals
+
+import re
+
+from .objects import Object
+from .compat import xrange, unicode, uchr, uord
+from .character import Character, HEX_CONV, OCTAL_CONV
+from .messages import Messages
+from .token import Token
+
+
+def hexValue(ch):
+ return HEX_CONV[ch]
+
+
+def octalValue(ch):
+ return OCTAL_CONV[ch]
+
+
+class RegExp(Object):
+ def __init__(self, pattern=None, flags=None):
+ self.pattern = pattern
+ self.flags = flags
+
+
+class Position(Object):
+ def __init__(self, line=None, column=None, offset=None):
+ self.line = line
+ self.column = column
+ self.offset = offset
+
+
+class SourceLocation(Object):
+ def __init__(self, start=None, end=None, source=None):
+ self.start = start
+ self.end = end
+ self.source = source
+
+
+class Comment(Object):
+ def __init__(self, multiLine=None, slice=None, range=None, loc=None):
+ self.multiLine = multiLine
+ self.slice = slice
+ self.range = range
+ self.loc = loc
+
+
+class RawToken(Object):
+ def __init__(self, type=None, value=None, pattern=None, flags=None, regex=None, octal=None, cooked=None, head=None, tail=None, lineNumber=None, lineStart=None, start=None, end=None):
+ self.type = type
+ self.value = value
+ self.pattern = pattern
+ self.flags = flags
+ self.regex = regex
+ self.octal = octal
+ self.cooked = cooked
+ self.head = head
+ self.tail = tail
+ self.lineNumber = lineNumber
+ self.lineStart = lineStart
+ self.start = start
+ self.end = end
+
+
+class ScannerState(Object):
+ def __init__(self, index=None, lineNumber=None, lineStart=None):
+ self.index = index
+ self.lineNumber = lineNumber
+ self.lineStart = lineStart
+
+
+class Octal(object):
+ def __init__(self, octal, code):
+ self.octal = octal
+ self.code = code
+
+
+class Scanner(object):
+ def __init__(self, code, handler):
+ self.source = unicode(code) + '\x00'
+ self.errorHandler = handler
+ self.trackComment = False
+ self.isModule = False
+
+ self.length = len(code)
+ self.index = 0
+ self.lineNumber = 1 if self.length > 0 else 0
+ self.lineStart = 0
+ self.curlyStack = []
+
+ def saveState(self):
+ return ScannerState(
+ index=self.index,
+ lineNumber=self.lineNumber,
+ lineStart=self.lineStart
+ )
+
+ def restoreState(self, state):
+ self.index = state.index
+ self.lineNumber = state.lineNumber
+ self.lineStart = state.lineStart
+
+ def eof(self):
+ return self.index >= self.length
+
+ def throwUnexpectedToken(self, message=Messages.UnexpectedTokenIllegal):
+ return self.errorHandler.throwError(self.index, self.lineNumber,
+ self.index - self.lineStart + 1, message)
+
+ def tolerateUnexpectedToken(self, message=Messages.UnexpectedTokenIllegal):
+ self.errorHandler.tolerateError(self.index, self.lineNumber,
+ self.index - self.lineStart + 1, message)
+
+ # https://tc39.github.io/ecma262/#sec-comments
+
+ def skipSingleLineComment(self, offset):
+ comments = []
+
+ if self.trackComment:
+ start = self.index - offset
+ loc = SourceLocation(
+ start=Position(
+ line=self.lineNumber,
+ column=self.index - self.lineStart - offset
+ ),
+ end=Position()
+ )
+
+ while not self.eof():
+ ch = self.source[self.index]
+ self.index += 1
+ if Character.isLineTerminator(ch):
+ if self.trackComment:
+ loc.end = Position(
+ line=self.lineNumber,
+ column=self.index - self.lineStart - 1
+ )
+ entry = Comment(
+ multiLine=False,
+ slice=[start + offset, self.index - 1],
+ range=[start, self.index - 1],
+ loc=loc
+ )
+ comments.append(entry)
+
+ if ch == '\r' and self.source[self.index] == '\n':
+ self.index += 1
+
+ self.lineNumber += 1
+ self.lineStart = self.index
+ return comments
+
+ if self.trackComment:
+ loc.end = Position(
+ line=self.lineNumber,
+ column=self.index - self.lineStart
+ )
+ entry = Comment(
+ multiLine=False,
+ slice=[start + offset, self.index],
+ range=[start, self.index],
+ loc=loc
+ )
+ comments.append(entry)
+
+ return comments
+
+ def skipMultiLineComment(self):
+ comments = []
+
+ if self.trackComment:
+ comments = []
+ start = self.index - 2
+ loc = SourceLocation(
+ start=Position(
+ line=self.lineNumber,
+ column=self.index - self.lineStart - 2
+ ),
+ end=Position()
+ )
+
+ while not self.eof():
+ ch = self.source[self.index]
+ if Character.isLineTerminator(ch):
+ if ch == '\r' and self.source[self.index + 1] == '\n':
+ self.index += 1
+
+ self.lineNumber += 1
+ self.index += 1
+ self.lineStart = self.index
+ elif ch == '*':
+ # Block comment ends with '*/'.
+ if self.source[self.index + 1] == '/':
+ self.index += 2
+ if self.trackComment:
+ loc.end = Position(
+ line=self.lineNumber,
+ column=self.index - self.lineStart
+ )
+ entry = Comment(
+ multiLine=True,
+ slice=[start + 2, self.index - 2],
+ range=[start, self.index],
+ loc=loc
+ )
+ comments.append(entry)
+
+ return comments
+
+ self.index += 1
+ else:
+ self.index += 1
+
+ # Ran off the end of the file - the whole thing is a comment
+ if self.trackComment:
+ loc.end = Position(
+ line=self.lineNumber,
+ column=self.index - self.lineStart
+ )
+ entry = Comment(
+ multiLine=True,
+ slice=[start + 2, self.index],
+ range=[start, self.index],
+ loc=loc
+ )
+ comments.append(entry)
+
+ self.tolerateUnexpectedToken()
+ return comments
+
+ def scanComments(self):
+ comments = []
+
+ start = self.index == 0
+ while not self.eof():
+ ch = self.source[self.index]
+
+ if Character.isWhiteSpace(ch):
+ self.index += 1
+ elif Character.isLineTerminator(ch):
+ self.index += 1
+ if ch == '\r' and self.source[self.index] == '\n':
+ self.index += 1
+
+ self.lineNumber += 1
+ self.lineStart = self.index
+ start = True
+ elif ch == '/': # U+002F is '/'
+ ch = self.source[self.index + 1]
+ if ch == '/':
+ self.index += 2
+ comment = self.skipSingleLineComment(2)
+ if self.trackComment:
+ comments.extend(comment)
+
+ start = True
+ elif ch == '*': # U+002A is '*'
+ self.index += 2
+ comment = self.skipMultiLineComment()
+ if self.trackComment:
+ comments.extend(comment)
+
+ else:
+ break
+
+ elif start and ch == '-': # U+002D is '-'
+ # U+003E is '>'
+ if self.source[self.index + 1:self.index + 3] == '->':
+ # '-->' is a single-line comment
+ self.index += 3
+ comment = self.skipSingleLineComment(3)
+ if self.trackComment:
+ comments.extend(comment)
+
+ else:
+ break
+
+ elif ch == '<' and not self.isModule: # U+003C is '<'
+ if self.source[self.index + 1:self.index + 4] == '!--':
+ self.index += 4 # `<!--`
+ comment = self.skipSingleLineComment(4)
+ if self.trackComment:
+ comments.extend(comment)
+
+ else:
+ break
+
+ else:
+ break
+
+ return comments
+
+ # https://tc39.github.io/ecma262/#sec-future-reserved-words
+
+ def isFutureReservedWord(self, id):
+ return id in self.isFutureReservedWord.set
+ isFutureReservedWord.set = set((
+ 'enum',
+ 'export',
+ 'import',
+ 'super',
+ ))
+
+ def isStrictModeReservedWord(self, id):
+ return id in self.isStrictModeReservedWord.set
+ isStrictModeReservedWord.set = set((
+ 'implements',
+ 'interface',
+ 'package',
+ 'private',
+ 'protected',
+ 'public',
+ 'static',
+ 'yield',
+ 'let',
+ ))
+
+ def isRestrictedWord(self, id):
+ return id in self.isRestrictedWord.set
+ isRestrictedWord.set = set((
+ 'eval', 'arguments',
+ ))
+
+ # https://tc39.github.io/ecma262/#sec-keywords
+
+ def isKeyword(self, id):
+ return id in self.isKeyword.set
+ isKeyword.set = set((
+ 'if', 'in', 'do',
+
+ 'var', 'for', 'new',
+ 'try', 'let',
+
+ 'this', 'else', 'case',
+ 'void', 'with', 'enum',
+
+ 'while', 'break', 'catch',
+ 'throw', 'const', 'yield',
+ 'class', 'super',
+
+ 'return', 'typeof', 'delete',
+ 'switch', 'export', 'import',
+
+ 'default', 'finally', 'extends',
+
+ 'function', 'continue', 'debugger',
+
+ 'instanceof',
+ ))
+
+ def codePointAt(self, i):
+ return uord(self.source[i:i + 2])
+
+ def scanHexEscape(self, prefix):
+ length = 4 if prefix == 'u' else 2
+ code = 0
+
+ for i in xrange(length):
+ if not self.eof() and Character.isHexDigit(self.source[self.index]):
+ ch = self.source[self.index]
+ self.index += 1
+ code = code * 16 + hexValue(ch)
+ else:
+ return None
+
+ return uchr(code)
+
+ def scanUnicodeCodePointEscape(self):
+ ch = self.source[self.index]
+ code = 0
+
+ # At least, one hex digit is required.
+ if ch == '}':
+ self.throwUnexpectedToken()
+
+ while not self.eof():
+ ch = self.source[self.index]
+ self.index += 1
+ if not Character.isHexDigit(ch):
+ break
+
+ code = code * 16 + hexValue(ch)
+
+ if code > 0x10FFFF or ch != '}':
+ self.throwUnexpectedToken()
+
+ return Character.fromCodePoint(code)
+
+ def getIdentifier(self):
+ start = self.index
+ self.index += 1
+ while not self.eof():
+ ch = self.source[self.index]
+ if ch == '\\':
+ # Blackslash (U+005C) marks Unicode escape sequence.
+ self.index = start
+ return self.getComplexIdentifier()
+ else:
+ cp = ord(ch)
+ if cp >= 0xD800 and cp < 0xDFFF:
+ # Need to handle surrogate pairs.
+ self.index = start
+ return self.getComplexIdentifier()
+
+ if Character.isIdentifierPart(ch):
+ self.index += 1
+ else:
+ break
+
+ return self.source[start:self.index]
+
+ def getComplexIdentifier(self):
+ cp = self.codePointAt(self.index)
+ id = Character.fromCodePoint(cp)
+ self.index += len(id)
+
+ # '\u' (U+005C, U+0075) denotes an escaped character.
+ if cp == 0x5C:
+ if self.source[self.index] != 'u':
+ self.throwUnexpectedToken()
+
+ self.index += 1
+ if self.source[self.index] == '{':
+ self.index += 1
+ ch = self.scanUnicodeCodePointEscape()
+ else:
+ ch = self.scanHexEscape('u')
+ if not ch or ch == '\\' or not Character.isIdentifierStart(ch[0]):
+ self.throwUnexpectedToken()
+
+ id = ch
+
+ while not self.eof():
+ cp = self.codePointAt(self.index)
+ ch = Character.fromCodePoint(cp)
+ if not Character.isIdentifierPart(ch):
+ break
+
+ id += ch
+ self.index += len(ch)
+
+ # '\u' (U+005C, U+0075) denotes an escaped character.
+ if cp == 0x5C:
+ id = id[:-1]
+ if self.source[self.index] != 'u':
+ self.throwUnexpectedToken()
+
+ self.index += 1
+ if self.source[self.index] == '{':
+ self.index += 1
+ ch = self.scanUnicodeCodePointEscape()
+ else:
+ ch = self.scanHexEscape('u')
+ if not ch or ch == '\\' or not Character.isIdentifierPart(ch[0]):
+ self.throwUnexpectedToken()
+
+ id += ch
+
+ return id
+
+ def octalToDecimal(self, ch):
+ # \0 is not octal escape sequence
+ octal = ch != '0'
+ code = octalValue(ch)
+
+ if not self.eof() and Character.isOctalDigit(self.source[self.index]):
+ octal = True
+ code = code * 8 + octalValue(self.source[self.index])
+ self.index += 1
+
+ # 3 digits are only allowed when string starts
+ # with 0, 1, 2, 3
+ if ch in '0123' and not self.eof() and Character.isOctalDigit(self.source[self.index]):
+ code = code * 8 + octalValue(self.source[self.index])
+ self.index += 1
+
+ return Octal(octal, code)
+
+ # https://tc39.github.io/ecma262/#sec-names-and-keywords
+
+ def scanIdentifier(self):
+ start = self.index
+
+ # Backslash (U+005C) starts an escaped character.
+ id = self.getComplexIdentifier() if self.source[start] == '\\' else self.getIdentifier()
+
+ # There is no keyword or literal with only one character.
+ # Thus, it must be an identifier.
+ if len(id) == 1:
+ type = Token.Identifier
+ elif self.isKeyword(id):
+ type = Token.Keyword
+ elif id == 'null':
+ type = Token.NullLiteral
+ elif id == 'true' or id == 'false':
+ type = Token.BooleanLiteral
+ else:
+ type = Token.Identifier
+
+ if type is not Token.Identifier and start + len(id) != self.index:
+ restore = self.index
+ self.index = start
+ self.tolerateUnexpectedToken(Messages.InvalidEscapedReservedWord)
+ self.index = restore
+
+ return RawToken(
+ type=type,
+ value=id,
+ lineNumber=self.lineNumber,
+ lineStart=self.lineStart,
+ start=start,
+ end=self.index
+ )
+
+ # https://tc39.github.io/ecma262/#sec-punctuators
+
+ def scanPunctuator(self):
+ start = self.index
+
+ # Check for most common single-character punctuators.
+ str = self.source[self.index]
+ if str in (
+ '(',
+ '{',
+ ):
+ if str == '{':
+ self.curlyStack.append('{')
+
+ self.index += 1
+
+ elif str == '.':
+ self.index += 1
+ if self.source[self.index] == '.' and self.source[self.index + 1] == '.':
+ # Spread operator: ...
+ self.index += 2
+ str = '...'
+
+ elif str == '}':
+ self.index += 1
+ if self.curlyStack:
+ self.curlyStack.pop()
+
+ elif str in (
+ ')',
+ ';',
+ ',',
+ '[',
+ ']',
+ ':',
+ '?',
+ '~',
+ ):
+ self.index += 1
+
+ else:
+ # 4-character punctuator.
+ str = self.source[self.index:self.index + 4]
+ if str == '>>>=':
+ self.index += 4
+ else:
+
+ # 3-character punctuators.
+ str = str[:3]
+ if str in (
+ '===', '!==', '>>>',
+ '<<=', '>>=', '**='
+ ):
+ self.index += 3
+ else:
+
+ # 2-character punctuators.
+ str = str[:2]
+ if str in (
+ '&&', '||', '==', '!=',
+ '+=', '-=', '*=', '/=',
+ '++', '--', '<<', '>>',
+ '&=', '|=', '^=', '%=',
+ '<=', '>=', '=>', '**',
+ ):
+ self.index += 2
+ else:
+
+ # 1-character punctuators.
+ str = self.source[self.index]
+ if str in '<>=!+-*%&|^/':
+ self.index += 1
+
+ if self.index == start:
+ self.throwUnexpectedToken()
+
+ return RawToken(
+ type=Token.Punctuator,
+ value=str,
+ lineNumber=self.lineNumber,
+ lineStart=self.lineStart,
+ start=start,
+ end=self.index
+ )
+
+ # https://tc39.github.io/ecma262/#sec-literals-numeric-literals
+
+ def scanHexLiteral(self, start):
+ num = ''
+
+ while not self.eof():
+ if not Character.isHexDigit(self.source[self.index]):
+ break
+
+ num += self.source[self.index]
+ self.index += 1
+
+ if len(num) == 0:
+ self.throwUnexpectedToken()
+
+ if Character.isIdentifierStart(self.source[self.index]):
+ self.throwUnexpectedToken()
+
+ return RawToken(
+ type=Token.NumericLiteral,
+ value=int(num, 16),
+ lineNumber=self.lineNumber,
+ lineStart=self.lineStart,
+ start=start,
+ end=self.index
+ )
+
+ def scanBinaryLiteral(self, start):
+ num = ''
+
+ while not self.eof():
+ ch = self.source[self.index]
+ if ch != '0' and ch != '1':
+ break
+
+ num += self.source[self.index]
+ self.index += 1
+
+ if len(num) == 0:
+ # only 0b or 0B
+ self.throwUnexpectedToken()
+
+ if not self.eof():
+ ch = self.source[self.index]
+ if Character.isIdentifierStart(ch) or Character.isDecimalDigit(ch):
+ self.throwUnexpectedToken()
+
+ return RawToken(
+ type=Token.NumericLiteral,
+ value=int(num, 2),
+ lineNumber=self.lineNumber,
+ lineStart=self.lineStart,
+ start=start,
+ end=self.index
+ )
+
+ def scanOctalLiteral(self, prefix, start):
+ num = ''
+ octal = False
+
+ if Character.isOctalDigit(prefix[0]):
+ octal = True
+ num = '0' + self.source[self.index]
+ self.index += 1
+
+ while not self.eof():
+ if not Character.isOctalDigit(self.source[self.index]):
+ break
+
+ num += self.source[self.index]
+ self.index += 1
+
+ if not octal and len(num) == 0:
+ # only 0o or 0O
+ self.throwUnexpectedToken()
+
+ if Character.isIdentifierStart(self.source[self.index]) or Character.isDecimalDigit(self.source[self.index]):
+ self.throwUnexpectedToken()
+
+ return RawToken(
+ type=Token.NumericLiteral,
+ value=int(num, 8),
+ octal=octal,
+ lineNumber=self.lineNumber,
+ lineStart=self.lineStart,
+ start=start,
+ end=self.index
+ )
+
+ def isImplicitOctalLiteral(self):
+ # Implicit octal, unless there is a non-octal digit.
+ # (Annex B.1.1 on Numeric Literals)
+ for i in xrange(self.index + 1, self.length):
+ ch = self.source[i]
+ if ch in '89':
+ return False
+ if not Character.isOctalDigit(ch):
+ return True
+ return True
+
+ def scanNumericLiteral(self):
+ start = self.index
+ ch = self.source[start]
+ assert Character.isDecimalDigit(ch) or ch == '.', 'Numeric literal must start with a decimal digit or a decimal point'
+
+ num = ''
+ if ch != '.':
+ num = self.source[self.index]
+ self.index += 1
+ ch = self.source[self.index]
+
+ # Hex number starts with '0x'.
+ # Octal number starts with '0'.
+ # Octal number in ES6 starts with '0o'.
+ # Binary number in ES6 starts with '0b'.
+ if num == '0':
+ if ch in ('x', 'X'):
+ self.index += 1
+ return self.scanHexLiteral(start)
+
+ if ch in ('b', 'B'):
+ self.index += 1
+ return self.scanBinaryLiteral(start)
+
+ if ch in ('o', 'O'):
+ return self.scanOctalLiteral(ch, start)
+
+ if ch and Character.isOctalDigit(ch):
+ if self.isImplicitOctalLiteral():
+ return self.scanOctalLiteral(ch, start)
+
+ while Character.isDecimalDigit(self.source[self.index]):
+ num += self.source[self.index]
+ self.index += 1
+
+ ch = self.source[self.index]
+
+ if ch == '.':
+ num += self.source[self.index]
+ self.index += 1
+ while Character.isDecimalDigit(self.source[self.index]):
+ num += self.source[self.index]
+ self.index += 1
+
+ ch = self.source[self.index]
+
+ if ch in ('e', 'E'):
+ num += self.source[self.index]
+ self.index += 1
+
+ ch = self.source[self.index]
+ if ch in ('+', '-'):
+ num += self.source[self.index]
+ self.index += 1
+
+ if Character.isDecimalDigit(self.source[self.index]):
+ while Character.isDecimalDigit(self.source[self.index]):
+ num += self.source[self.index]
+ self.index += 1
+
+ else:
+ self.throwUnexpectedToken()
+
+ if Character.isIdentifierStart(self.source[self.index]):
+ self.throwUnexpectedToken()
+
+ value = float(num)
+ return RawToken(
+ type=Token.NumericLiteral,
+ value=int(value) if value.is_integer() else value,
+ lineNumber=self.lineNumber,
+ lineStart=self.lineStart,
+ start=start,
+ end=self.index
+ )
+
+ # https://tc39.github.io/ecma262/#sec-literals-string-literals
+
+ def scanStringLiteral(self):
+ start = self.index
+ quote = self.source[start]
+ assert quote in ('\'', '"'), 'String literal must starts with a quote'
+
+ self.index += 1
+ octal = False
+ str = ''
+
+ while not self.eof():
+ ch = self.source[self.index]
+ self.index += 1
+
+ if ch == quote:
+ quote = ''
+ break
+ elif ch == '\\':
+ ch = self.source[self.index]
+ self.index += 1
+ if not ch or not Character.isLineTerminator(ch):
+ if ch == 'u':
+ if self.source[self.index] == '{':
+ self.index += 1
+ str += self.scanUnicodeCodePointEscape()
+ else:
+ unescapedChar = self.scanHexEscape(ch)
+ if not unescapedChar:
+ self.throwUnexpectedToken()
+
+ str += unescapedChar
+
+ elif ch == 'x':
+ unescaped = self.scanHexEscape(ch)
+ if not unescaped:
+ self.throwUnexpectedToken(Messages.InvalidHexEscapeSequence)
+
+ str += unescaped
+ elif ch == 'n':
+ str += '\n'
+ elif ch == 'r':
+ str += '\r'
+ elif ch == 't':
+ str += '\t'
+ elif ch == 'b':
+ str += '\b'
+ elif ch == 'f':
+ str += '\f'
+ elif ch == 'v':
+ str += '\x0B'
+ elif ch in (
+ '8',
+ '9',
+ ):
+ str += ch
+ self.tolerateUnexpectedToken()
+
+ else:
+ if ch and Character.isOctalDigit(ch):
+ octToDec = self.octalToDecimal(ch)
+
+ octal = octToDec.octal or octal
+ str += uchr(octToDec.code)
+ else:
+ str += ch
+
+ else:
+ self.lineNumber += 1
+ if ch == '\r' and self.source[self.index] == '\n':
+ self.index += 1
+
+ self.lineStart = self.index
+
+ elif Character.isLineTerminator(ch):
+ break
+ else:
+ str += ch
+
+ if quote != '':
+ self.index = start
+ self.throwUnexpectedToken()
+
+ return RawToken(
+ type=Token.StringLiteral,
+ value=str,
+ octal=octal,
+ lineNumber=self.lineNumber,
+ lineStart=self.lineStart,
+ start=start,
+ end=self.index
+ )
+
+ # https://tc39.github.io/ecma262/#sec-template-literal-lexical-components
+
+ def scanTemplate(self):
+ cooked = ''
+ terminated = False
+ start = self.index
+
+ head = self.source[start] == '`'
+ tail = False
+ rawOffset = 2
+
+ self.index += 1
+
+ while not self.eof():
+ ch = self.source[self.index]
+ self.index += 1
+ if ch == '`':
+ rawOffset = 1
+ tail = True
+ terminated = True
+ break
+ elif ch == '$':
+ if self.source[self.index] == '{':
+ self.curlyStack.append('${')
+ self.index += 1
+ terminated = True
+ break
+
+ cooked += ch
+ elif ch == '\\':
+ ch = self.source[self.index]
+ self.index += 1
+ if not Character.isLineTerminator(ch):
+ if ch == 'n':
+ cooked += '\n'
+ elif ch == 'r':
+ cooked += '\r'
+ elif ch == 't':
+ cooked += '\t'
+ elif ch == 'u':
+ if self.source[self.index] == '{':
+ self.index += 1
+ cooked += self.scanUnicodeCodePointEscape()
+ else:
+ restore = self.index
+ unescapedChar = self.scanHexEscape(ch)
+ if unescapedChar:
+ cooked += unescapedChar
+ else:
+ self.index = restore
+ cooked += ch
+
+ elif ch == 'x':
+ unescaped = self.scanHexEscape(ch)
+ if not unescaped:
+ self.throwUnexpectedToken(Messages.InvalidHexEscapeSequence)
+
+ cooked += unescaped
+ elif ch == 'b':
+ cooked += '\b'
+ elif ch == 'f':
+ cooked += '\f'
+ elif ch == 'v':
+ cooked += '\v'
+
+ else:
+ if ch == '0':
+ if Character.isDecimalDigit(self.source[self.index]):
+ # Illegal: \01 \02 and so on
+ self.throwUnexpectedToken(Messages.TemplateOctalLiteral)
+
+ cooked += '\0'
+ elif Character.isOctalDigit(ch):
+ # Illegal: \1 \2
+ self.throwUnexpectedToken(Messages.TemplateOctalLiteral)
+ else:
+ cooked += ch
+
+ else:
+ self.lineNumber += 1
+ if ch == '\r' and self.source[self.index] == '\n':
+ self.index += 1
+
+ self.lineStart = self.index
+
+ elif Character.isLineTerminator(ch):
+ self.lineNumber += 1
+ if ch == '\r' and self.source[self.index] == '\n':
+ self.index += 1
+
+ self.lineStart = self.index
+ cooked += '\n'
+ else:
+ cooked += ch
+
+ if not terminated:
+ self.throwUnexpectedToken()
+
+ if not head:
+ if self.curlyStack:
+ self.curlyStack.pop()
+
+ return RawToken(
+ type=Token.Template,
+ value=self.source[start + 1:self.index - rawOffset],
+ cooked=cooked,
+ head=head,
+ tail=tail,
+ lineNumber=self.lineNumber,
+ lineStart=self.lineStart,
+ start=start,
+ end=self.index
+ )
+
+ # https://tc39.github.io/ecma262/#sec-literals-regular-expression-literals
+
+ def testRegExp(self, pattern, flags):
+ # The BMP character to use as a replacement for astral symbols when
+ # translating an ES6 "u"-flagged pattern to an ES5-compatible
+ # approximation.
+ # Note: replacing with '\uFFFF' enables false positives in unlikely
+ # scenarios. For example, `[\u{1044f}-\u{10440}]` is an invalid
+ # pattern that would not be detected by this substitution.
+ astralSubstitute = '\uFFFF'
+
+ # Replace every Unicode escape sequence with the equivalent
+ # BMP character or a constant ASCII code point in the case of
+ # astral symbols. (See the above note on `astralSubstitute`
+ # for more information.)
+ def astralSub(m):
+ codePoint = int(m.group(1) or m.group(2), 16)
+ if codePoint > 0x10FFFF:
+ self.tolerateUnexpectedToken(Messages.InvalidRegExp)
+ elif codePoint <= 0xFFFF:
+ return uchr(codePoint)
+ return astralSubstitute
+ pattern = re.sub(r'\\u\{([0-9a-fA-F]+)\}|\\u([a-fA-F0-9]{4})', astralSub, pattern)
+
+ # Replace each paired surrogate with a single ASCII symbol to
+ # avoid throwing on regular expressions that are only valid in
+ # combination with the "u" flag.
+ pattern = re.sub(r'[\uD800-\uDBFF][\uDC00-\uDFFF]', astralSubstitute, pattern)
+
+ # Return a regular expression object for this pattern-flag pair, or
+ # `null` in case the current environment doesn't support the flags it
+ # uses.
+ pyflags = 0 | re.M if 'm' in flags else 0 | re.I if 'i' in flags else 0
+ try:
+ return re.compile(pattern, pyflags)
+ except Exception:
+ self.tolerateUnexpectedToken(Messages.InvalidRegExp)
+
+ def scanRegExpBody(self):
+ ch = self.source[self.index]
+ assert ch == '/', 'Regular expression literal must start with a slash'
+
+ str = self.source[self.index]
+ self.index += 1
+ classMarker = False
+ terminated = False
+
+ while not self.eof():
+ ch = self.source[self.index]
+ self.index += 1
+ str += ch
+ if ch == '\\':
+ ch = self.source[self.index]
+ self.index += 1
+ # https://tc39.github.io/ecma262/#sec-literals-regular-expression-literals
+ if Character.isLineTerminator(ch):
+ self.throwUnexpectedToken(Messages.UnterminatedRegExp)
+
+ str += ch
+ elif Character.isLineTerminator(ch):
+ self.throwUnexpectedToken(Messages.UnterminatedRegExp)
+ elif classMarker:
+ if ch == ']':
+ classMarker = False
+
+ else:
+ if ch == '/':
+ terminated = True
+ break
+ elif ch == '[':
+ classMarker = True
+
+ if not terminated:
+ self.throwUnexpectedToken(Messages.UnterminatedRegExp)
+
+ # Exclude leading and trailing slash.
+ return str[1:-1]
+
+ def scanRegExpFlags(self):
+ str = ''
+ flags = ''
+ while not self.eof():
+ ch = self.source[self.index]
+ if not Character.isIdentifierPart(ch):
+ break
+
+ self.index += 1
+ if ch == '\\' and not self.eof():
+ ch = self.source[self.index]
+ if ch == 'u':
+ self.index += 1
+ restore = self.index
+ char = self.scanHexEscape('u')
+ if char:
+ flags += char
+ str += '\\u'
+ while restore < self.index:
+ str += self.source[restore]
+ restore += 1
+
+ else:
+ self.index = restore
+ flags += 'u'
+ str += '\\u'
+
+ self.tolerateUnexpectedToken()
+ else:
+ str += '\\'
+ self.tolerateUnexpectedToken()
+
+ else:
+ flags += ch
+ str += ch
+
+ return flags
+
+ def scanRegExp(self):
+ start = self.index
+
+ pattern = self.scanRegExpBody()
+ flags = self.scanRegExpFlags()
+ value = self.testRegExp(pattern, flags)
+
+ return RawToken(
+ type=Token.RegularExpression,
+ value='',
+ pattern=pattern,
+ flags=flags,
+ regex=value,
+ lineNumber=self.lineNumber,
+ lineStart=self.lineStart,
+ start=start,
+ end=self.index
+ )
+
+ def lex(self):
+ if self.eof():
+ return RawToken(
+ type=Token.EOF,
+ value='',
+ lineNumber=self.lineNumber,
+ lineStart=self.lineStart,
+ start=self.index,
+ end=self.index
+ )
+
+ ch = self.source[self.index]
+
+ if Character.isIdentifierStart(ch):
+ return self.scanIdentifier()
+
+ # Very common: ( and ) and ;
+ if ch in ('(', ')', ';'):
+ return self.scanPunctuator()
+
+ # String literal starts with single quote (U+0027) or double quote (U+0022).
+ if ch in ('\'', '"'):
+ return self.scanStringLiteral()
+
+ # Dot (.) U+002E can also start a floating-point number, hence the need
+ # to check the next character.
+ if ch == '.':
+ if Character.isDecimalDigit(self.source[self.index + 1]):
+ return self.scanNumericLiteral()
+
+ return self.scanPunctuator()
+
+ if Character.isDecimalDigit(ch):
+ return self.scanNumericLiteral()
+
+ # Template literals start with ` (U+0060) for template head
+ # or } (U+007D) for template middle or template tail.
+ if ch == '`' or (ch == '}' and self.curlyStack and self.curlyStack[-1] == '${'):
+ return self.scanTemplate()
+
+ # Possible identifier start in a surrogate pair.
+ cp = ord(ch)
+ if cp >= 0xD800 and cp < 0xDFFF:
+ cp = self.codePointAt(self.index)
+ ch = Character.fromCodePoint(cp)
+ if Character.isIdentifierStart(ch):
+ return self.scanIdentifier()
+
+ return self.scanPunctuator()
diff --git a/third_party/python/esprima/esprima/syntax.py b/third_party/python/esprima/esprima/syntax.py
new file mode 100644
index 0000000000..001b641e25
--- /dev/null
+++ b/third_party/python/esprima/esprima/syntax.py
@@ -0,0 +1,100 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import unicode_literals
+
+
+class Syntax:
+ AssignmentExpression = "AssignmentExpression"
+ AssignmentPattern = "AssignmentPattern"
+ ArrayExpression = "ArrayExpression"
+ ArrayPattern = "ArrayPattern"
+ ArrowFunctionExpression = "ArrowFunctionExpression"
+ AwaitExpression = "AwaitExpression"
+ BlockStatement = "BlockStatement"
+ BinaryExpression = "BinaryExpression"
+ BreakStatement = "BreakStatement"
+ CallExpression = "CallExpression"
+ CatchClause = "CatchClause"
+ ClassBody = "ClassBody"
+ ClassDeclaration = "ClassDeclaration"
+ ClassExpression = "ClassExpression"
+ ConditionalExpression = "ConditionalExpression"
+ ContinueStatement = "ContinueStatement"
+ DoWhileStatement = "DoWhileStatement"
+ DebuggerStatement = "DebuggerStatement"
+ EmptyStatement = "EmptyStatement"
+ ExportAllDeclaration = "ExportAllDeclaration"
+ ExportDefaultDeclaration = "ExportDefaultDeclaration"
+ ExportNamedDeclaration = "ExportNamedDeclaration"
+ ExportSpecifier = "ExportSpecifier"
+ ExportDefaultSpecifier = "ExportDefaultSpecifier"
+ ExpressionStatement = "ExpressionStatement"
+ ForStatement = "ForStatement"
+ ForOfStatement = "ForOfStatement"
+ ForInStatement = "ForInStatement"
+ FunctionDeclaration = "FunctionDeclaration"
+ FunctionExpression = "FunctionExpression"
+ Identifier = "Identifier"
+ IfStatement = "IfStatement"
+ Import = "Import"
+ ImportDeclaration = "ImportDeclaration"
+ ImportDefaultSpecifier = "ImportDefaultSpecifier"
+ ImportNamespaceSpecifier = "ImportNamespaceSpecifier"
+ ImportSpecifier = "ImportSpecifier"
+ Literal = "Literal"
+ LabeledStatement = "LabeledStatement"
+ LogicalExpression = "LogicalExpression"
+ MemberExpression = "MemberExpression"
+ MetaProperty = "MetaProperty"
+ MethodDefinition = "MethodDefinition"
+ FieldDefinition = "FieldDefinition"
+ NewExpression = "NewExpression"
+ ObjectExpression = "ObjectExpression"
+ ObjectPattern = "ObjectPattern"
+ Program = "Program"
+ Property = "Property"
+ RestElement = "RestElement"
+ ReturnStatement = "ReturnStatement"
+ SequenceExpression = "SequenceExpression"
+ SpreadElement = "SpreadElement"
+ Super = "Super"
+ SwitchCase = "SwitchCase"
+ SwitchStatement = "SwitchStatement"
+ TaggedTemplateExpression = "TaggedTemplateExpression"
+ TemplateElement = "TemplateElement"
+ TemplateLiteral = "TemplateLiteral"
+ ThisExpression = "ThisExpression"
+ ThrowStatement = "ThrowStatement"
+ TryStatement = "TryStatement"
+ UnaryExpression = "UnaryExpression"
+ UpdateExpression = "UpdateExpression"
+ VariableDeclaration = "VariableDeclaration"
+ VariableDeclarator = "VariableDeclarator"
+ WhileStatement = "WhileStatement"
+ WithStatement = "WithStatement"
+ YieldExpression = "YieldExpression"
+
+ ArrowParameterPlaceHolder = "ArrowParameterPlaceHolder"
+ BlockComment = "BlockComment"
+ LineComment = "LineComment"
diff --git a/third_party/python/esprima/esprima/token.py b/third_party/python/esprima/esprima/token.py
new file mode 100644
index 0000000000..846ced6002
--- /dev/null
+++ b/third_party/python/esprima/esprima/token.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import unicode_literals
+
+
+class Token:
+ BooleanLiteral = 1
+ EOF = 2
+ Identifier = 3
+ Keyword = 4
+ NullLiteral = 5
+ NumericLiteral = 6
+ Punctuator = 7
+ StringLiteral = 8
+ RegularExpression = 9
+ Template = 10
+
+
+TokenName = {}
+TokenName[Token.BooleanLiteral] = "Boolean"
+TokenName[Token.EOF] = "<end>"
+TokenName[Token.Identifier] = "Identifier"
+TokenName[Token.Keyword] = "Keyword"
+TokenName[Token.NullLiteral] = "Null"
+TokenName[Token.NumericLiteral] = "Numeric"
+TokenName[Token.Punctuator] = "Punctuator"
+TokenName[Token.StringLiteral] = "String"
+TokenName[Token.RegularExpression] = "RegularExpression"
+TokenName[Token.Template] = "Template"
diff --git a/third_party/python/esprima/esprima/tokenizer.py b/third_party/python/esprima/esprima/tokenizer.py
new file mode 100644
index 0000000000..288193965d
--- /dev/null
+++ b/third_party/python/esprima/esprima/tokenizer.py
@@ -0,0 +1,193 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES
+# LOSS OF USE, DATA, OR PROFITS OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import, unicode_literals
+
+from collections import deque
+
+from .objects import Object
+from .error_handler import ErrorHandler
+from .scanner import Scanner, SourceLocation, Position, RegExp
+from .token import Token, TokenName
+
+
+class BufferEntry(Object):
+ def __init__(self, type, value, regex=None, range=None, loc=None):
+ self.type = type
+ self.value = value
+ self.regex = regex
+ self.range = range
+ self.loc = loc
+
+
+class Reader(object):
+ def __init__(self):
+ self.values = []
+ self.curly = self.paren = -1
+
+ # A function following one of those tokens is an expression.
+ def beforeFunctionExpression(self, t):
+ return t in (
+ '(', '{', '[', 'in', 'typeof', 'instanceof', 'new',
+ 'return', 'case', 'delete', 'throw', 'void',
+ # assignment operators
+ '=', '+=', '-=', '*=', '**=', '/=', '%=', '<<=', '>>=', '>>>=',
+ '&=', '|=', '^=', ',',
+ # binary/unary operators
+ '+', '-', '*', '**', '/', '%', '++', '--', '<<', '>>', '>>>', '&',
+ '|', '^', '!', '~', '&&', '||', '?', ':', '===', '==', '>=',
+ '<=', '<', '>', '!=', '!=='
+ )
+
+ # Determine if forward slash (/) is an operator or part of a regular expression
+ # https://github.com/mozilla/sweet.js/wiki/design
+ def isRegexStart(self):
+ if not self.values:
+ return True
+
+ previous = self.values[-1]
+ regex = previous is not None
+
+ if previous in (
+ 'this',
+ ']',
+ ):
+ regex = False
+ elif previous == ')':
+ keyword = self.values[self.paren - 1]
+ regex = keyword in ('if', 'while', 'for', 'with')
+
+ elif previous == '}':
+ # Dividing a function by anything makes little sense,
+ # but we have to check for that.
+ regex = True
+ if len(self.values) >= 3 and self.values[self.curly - 3] == 'function':
+ # Anonymous function, e.g. function(){} /42
+ check = self.values[self.curly - 4]
+ regex = not self.beforeFunctionExpression(check) if check else False
+ elif len(self.values) >= 4 and self.values[self.curly - 4] == 'function':
+ # Named function, e.g. function f(){} /42/
+ check = self.values[self.curly - 5]
+ regex = not self.beforeFunctionExpression(check) if check else True
+
+ return regex
+
+ def append(self, token):
+ if token.type in (Token.Punctuator, Token.Keyword):
+ if token.value == '{':
+ self.curly = len(self.values)
+ elif token.value == '(':
+ self.paren = len(self.values)
+ self.values.append(token.value)
+ else:
+ self.values.append(None)
+
+
+class Config(Object):
+ def __init__(self, tolerant=None, comment=None, range=None, loc=None, **options):
+ self.tolerant = tolerant
+ self.comment = comment
+ self.range = range
+ self.loc = loc
+ for k, v in options.items():
+ setattr(self, k, v)
+
+
+class Tokenizer(object):
+ def __init__(self, code, options):
+ self.config = Config(**options)
+
+ self.errorHandler = ErrorHandler()
+ self.errorHandler.tolerant = self.config.tolerant
+ self.scanner = Scanner(code, self.errorHandler)
+ self.scanner.trackComment = self.config.comment
+
+ self.trackRange = self.config.range
+ self.trackLoc = self.config.loc
+ self.buffer = deque()
+ self.reader = Reader()
+
+ def errors(self):
+ return self.errorHandler.errors
+
+ def getNextToken(self):
+ if not self.buffer:
+
+ comments = self.scanner.scanComments()
+ if self.scanner.trackComment:
+ for e in comments:
+ value = self.scanner.source[e.slice[0]:e.slice[1]]
+ comment = BufferEntry(
+ type='BlockComment' if e.multiLine else 'LineComment',
+ value=value
+ )
+ if self.trackRange:
+ comment.range = e.range
+ if self.trackLoc:
+ comment.loc = e.loc
+ self.buffer.append(comment)
+
+ if not self.scanner.eof():
+ if self.trackLoc:
+ loc = SourceLocation(
+ start=Position(
+ line=self.scanner.lineNumber,
+ column=self.scanner.index - self.scanner.lineStart
+ ),
+ end=Position(),
+ )
+
+ maybeRegex = self.scanner.source[self.scanner.index] == '/' and self.reader.isRegexStart()
+ if maybeRegex:
+ state = self.scanner.saveState()
+ try:
+ token = self.scanner.scanRegExp()
+ except Exception:
+ self.scanner.restoreState(state)
+ token = self.scanner.lex()
+ else:
+ token = self.scanner.lex()
+
+ self.reader.append(token)
+
+ entry = BufferEntry(
+ type=TokenName[token.type],
+ value=self.scanner.source[token.start:token.end]
+ )
+ if self.trackRange:
+ entry.range = [token.start, token.end]
+ if self.trackLoc:
+ loc.end = Position(
+ line=self.scanner.lineNumber,
+ column=self.scanner.index - self.scanner.lineStart
+ )
+ entry.loc = loc
+ if token.type is Token.RegularExpression:
+ entry.regex = RegExp(
+ pattern=token.pattern,
+ flags=token.flags,
+ )
+
+ self.buffer.append(entry)
+
+ return self.buffer.popleft() if self.buffer else None
diff --git a/third_party/python/esprima/esprima/utils.py b/third_party/python/esprima/esprima/utils.py
new file mode 100644
index 0000000000..62cbe9e91b
--- /dev/null
+++ b/third_party/python/esprima/esprima/utils.py
@@ -0,0 +1,40 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import, unicode_literals
+
+import re
+
+from .compat import unicode
+
+
+def format(messageFormat, *args):
+ def formatter(m):
+ formatter.idx += 1
+ assert formatter.idx < len(args), 'Message reference must be in range'
+ return unicode(args[formatter.idx])
+ formatter.idx = -1
+ return format.re.sub(formatter, messageFormat)
+
+
+format.re = re.compile(r'%(\d)')
diff --git a/third_party/python/esprima/esprima/visitor.py b/third_party/python/esprima/esprima/visitor.py
new file mode 100644
index 0000000000..c508eb6b37
--- /dev/null
+++ b/third_party/python/esprima/esprima/visitor.py
@@ -0,0 +1,288 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import unicode_literals
+
+import json
+import types
+from collections import deque
+
+from .objects import Object
+from .compat import PY3, unicode
+
+
+class VisitRecursionError(Exception):
+ pass
+
+
+class Visited(object):
+ def __init__(self, result):
+ if isinstance(result, Visited):
+ result = result.result
+ self.result = result
+
+
+class Visitor(object):
+ """
+ An Object visitor base class that walks the abstract syntax tree and calls a
+ visitor function for every Object found. This function may return a value
+ which is forwarded by the `visit` method.
+
+ This class is meant to be subclassed, with the subclass adding visitor
+ methods.
+
+ Per default the visitor functions for the nodes are ``'visit_'`` +
+ class name of the Object. So a `Module` Object visit function would
+ be `visit_Module`. This behavior can be changed by overriding
+ the `visit` method. If no visitor function exists for a Object
+ (return value `None`) the `generic_visit` visitor is used instead.
+ """
+
+ def __call__(self, obj, metadata):
+ return self.transform(obj, metadata)
+
+ def transform(self, obj, metadata):
+ """Transform an Object."""
+ if isinstance(obj, Object):
+ method = 'transform_' + obj.__class__.__name__
+ transformer = getattr(self, method, self.transform_Object)
+ new_obj = transformer(obj, metadata)
+ if new_obj is not None and obj is not new_obj:
+ obj = new_obj
+ return obj
+
+ def transform_Object(self, obj, metadata):
+ """Called if no explicit transform function exists for an Object."""
+ return obj
+
+ def generic_visit(self, obj):
+ return self.visit(self.visit_Object(obj))
+
+ def visit(self, obj):
+ """Visit a Object."""
+ if not hasattr(self, 'visitors'):
+ self._visit_context = {}
+ self._visit_count = 0
+ try:
+ self._visit_count += 1
+ stack = deque()
+ stack.append((obj, None))
+ last_result = None
+ while stack:
+ try:
+ last, visited = stack[-1]
+ if isinstance(last, types.GeneratorType):
+ stack.append((last.send(last_result), None))
+ last_result = None
+ elif isinstance(last, Visited):
+ stack.pop()
+ last_result = last.result
+ elif isinstance(last, Object):
+ if last in self._visit_context:
+ if self._visit_context[last] == self.visit_Object:
+ visitor = self.visit_RecursionError
+ else:
+ visitor = self.visit_Object
+ else:
+ method = 'visit_' + last.__class__.__name__
+ visitor = getattr(self, method, self.visit_Object)
+ self._visit_context[last] = visitor
+ stack.pop()
+ stack.append((visitor(last), last))
+ else:
+ method = 'visit_' + last.__class__.__name__
+ visitor = getattr(self, method, self.visit_Generic)
+ stack.pop()
+ stack.append((visitor(last), None))
+ except StopIteration:
+ stack.pop()
+ if visited and visited in self._visit_context:
+ del self._visit_context[visited]
+ return last_result
+ finally:
+ self._visit_count -= 1
+ if self._visit_count <= 0:
+ self._visit_context = {}
+
+ def visit_RecursionError(self, obj):
+ raise VisitRecursionError
+
+ def visit_Object(self, obj):
+ """Called if no explicit visitor function exists for an Object."""
+ yield obj.__dict__
+ yield Visited(obj)
+
+ def visit_Generic(self, obj):
+ """Called if no explicit visitor function exists for an object."""
+ yield Visited(obj)
+
+ def visit_list(self, obj):
+ for item in obj:
+ yield item
+ yield Visited(obj)
+
+ visit_Array = visit_list
+
+ def visit_dict(self, obj):
+ for field, value in list(obj.items()):
+ if not field.startswith('_'):
+ yield value
+ yield Visited(obj)
+
+
+class NodeVisitor(Visitor):
+ pass
+
+
+class ReprVisitor(Visitor):
+ def visit(self, obj, indent=4, nl="\n", sp="", skip=()):
+ self.level = 0
+ if isinstance(indent, int):
+ indent = " " * indent
+ self.indent = indent
+ self.nl = nl
+ self.sp = sp
+ self.skip = skip
+ return super(ReprVisitor, self).visit(obj)
+
+ def visit_RecursionError(self, obj):
+ yield Visited("...")
+
+ def visit_Object(self, obj):
+ value_repr = yield obj.__dict__
+ yield Visited(value_repr)
+
+ def visit_Generic(self, obj):
+ yield Visited(repr(obj))
+
+ def visit_list(self, obj):
+ indent1 = self.indent * self.level
+ indent2 = indent1 + self.indent
+ self.level += 1
+ try:
+ items = []
+ for item in obj:
+ v = yield item
+ items.append(v)
+ if items:
+ value_repr = "[%s%s%s%s%s%s%s]" % (
+ self.sp,
+ self.nl,
+ indent2,
+ (",%s%s%s" % (self.nl, self.sp, indent2)).join(items),
+ self.nl,
+ indent1,
+ self.sp,
+ )
+ else:
+ value_repr = "[]"
+ finally:
+ self.level -= 1
+
+ yield Visited(value_repr)
+
+ visit_Array = visit_list
+
+ def visit_dict(self, obj):
+ indent1 = self.indent * self.level
+ indent2 = indent1 + self.indent
+ self.level += 1
+ try:
+ items = []
+ for k, item in obj.items():
+ if item is not None and not k.startswith('_') and k not in self.skip:
+ v = yield item
+ items.append("%s: %s" % (k, v))
+ if items:
+ value_repr = "{%s%s%s%s%s%s%s}" % (
+ self.sp,
+ self.nl,
+ indent2,
+ (",%s%s%s" % (self.nl, self.sp, indent2)).join(items),
+ self.nl,
+ indent1,
+ self.sp,
+ )
+ else:
+ value_repr = "{}"
+ finally:
+ self.level -= 1
+
+ yield Visited(value_repr)
+
+ if PY3:
+ def visit_str(self, obj):
+ value_repr = json.dumps(obj)
+ yield Visited(value_repr)
+ else:
+ def visit_unicode(self, obj):
+ value_repr = json.dumps(obj)
+ yield Visited(value_repr)
+
+ def visit_SourceLocation(self, obj):
+ old_indent, self.indent = self.indent, ""
+ old_nl, self.nl = self.nl, ""
+ old_sp, self.sp = self.sp, ""
+ try:
+ yield obj
+ finally:
+ self.indent = old_indent
+ self.nl = old_nl
+ self.sp = old_sp
+
+
+class ToDictVisitor(Visitor):
+ map = {
+ 'isAsync': 'async',
+ 'allowAwait': 'await',
+ }
+
+ def visit_RecursionError(self, obj):
+ yield Visited({
+ 'error': "Infinite recursion detected...",
+ })
+
+ def visit_Object(self, obj):
+ obj = yield obj.__dict__
+ yield Visited(obj)
+
+ def visit_list(self, obj):
+ items = []
+ for item in obj:
+ v = yield item
+ items.append(v)
+ yield Visited(items)
+
+ visit_Array = visit_list
+
+ def visit_dict(self, obj):
+ items = []
+ for k, item in obj.items():
+ if item is not None and not k.startswith('_'):
+ v = yield item
+ k = unicode(k)
+ items.append((self.map.get(k, k), v))
+ yield Visited(dict(items))
+
+ def visit_SRE_Pattern(self, obj):
+ yield Visited({})
diff --git a/third_party/python/esprima/esprima/xhtml_entities.py b/third_party/python/esprima/esprima/xhtml_entities.py
new file mode 100644
index 0000000000..7d487bbda8
--- /dev/null
+++ b/third_party/python/esprima/esprima/xhtml_entities.py
@@ -0,0 +1,281 @@
+# -*- coding: utf-8 -*-
+# Copyright JS Foundation and other contributors, https://js.foundation/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import unicode_literals
+
+# Generated by generate-xhtml-entities.js. DO NOT MODIFY!
+
+XHTMLEntities = {
+ 'quot': "\u0022",
+ 'amp': "\u0026",
+ 'apos': "\u0027",
+ 'gt': "\u003E",
+ 'nbsp': "\u00A0",
+ 'iexcl': "\u00A1",
+ 'cent': "\u00A2",
+ 'pound': "\u00A3",
+ 'curren': "\u00A4",
+ 'yen': "\u00A5",
+ 'brvbar': "\u00A6",
+ 'sect': "\u00A7",
+ 'uml': "\u00A8",
+ 'copy': "\u00A9",
+ 'ordf': "\u00AA",
+ 'laquo': "\u00AB",
+ 'not': "\u00AC",
+ 'shy': "\u00AD",
+ 'reg': "\u00AE",
+ 'macr': "\u00AF",
+ 'deg': "\u00B0",
+ 'plusmn': "\u00B1",
+ 'sup2': "\u00B2",
+ 'sup3': "\u00B3",
+ 'acute': "\u00B4",
+ 'micro': "\u00B5",
+ 'para': "\u00B6",
+ 'middot': "\u00B7",
+ 'cedil': "\u00B8",
+ 'sup1': "\u00B9",
+ 'ordm': "\u00BA",
+ 'raquo': "\u00BB",
+ 'frac14': "\u00BC",
+ 'frac12': "\u00BD",
+ 'frac34': "\u00BE",
+ 'iquest': "\u00BF",
+ 'Agrave': "\u00C0",
+ 'Aacute': "\u00C1",
+ 'Acirc': "\u00C2",
+ 'Atilde': "\u00C3",
+ 'Auml': "\u00C4",
+ 'Aring': "\u00C5",
+ 'AElig': "\u00C6",
+ 'Ccedil': "\u00C7",
+ 'Egrave': "\u00C8",
+ 'Eacute': "\u00C9",
+ 'Ecirc': "\u00CA",
+ 'Euml': "\u00CB",
+ 'Igrave': "\u00CC",
+ 'Iacute': "\u00CD",
+ 'Icirc': "\u00CE",
+ 'Iuml': "\u00CF",
+ 'ETH': "\u00D0",
+ 'Ntilde': "\u00D1",
+ 'Ograve': "\u00D2",
+ 'Oacute': "\u00D3",
+ 'Ocirc': "\u00D4",
+ 'Otilde': "\u00D5",
+ 'Ouml': "\u00D6",
+ 'times': "\u00D7",
+ 'Oslash': "\u00D8",
+ 'Ugrave': "\u00D9",
+ 'Uacute': "\u00DA",
+ 'Ucirc': "\u00DB",
+ 'Uuml': "\u00DC",
+ 'Yacute': "\u00DD",
+ 'THORN': "\u00DE",
+ 'szlig': "\u00DF",
+ 'agrave': "\u00E0",
+ 'aacute': "\u00E1",
+ 'acirc': "\u00E2",
+ 'atilde': "\u00E3",
+ 'auml': "\u00E4",
+ 'aring': "\u00E5",
+ 'aelig': "\u00E6",
+ 'ccedil': "\u00E7",
+ 'egrave': "\u00E8",
+ 'eacute': "\u00E9",
+ 'ecirc': "\u00EA",
+ 'euml': "\u00EB",
+ 'igrave': "\u00EC",
+ 'iacute': "\u00ED",
+ 'icirc': "\u00EE",
+ 'iuml': "\u00EF",
+ 'eth': "\u00F0",
+ 'ntilde': "\u00F1",
+ 'ograve': "\u00F2",
+ 'oacute': "\u00F3",
+ 'ocirc': "\u00F4",
+ 'otilde': "\u00F5",
+ 'ouml': "\u00F6",
+ 'divide': "\u00F7",
+ 'oslash': "\u00F8",
+ 'ugrave': "\u00F9",
+ 'uacute': "\u00FA",
+ 'ucirc': "\u00FB",
+ 'uuml': "\u00FC",
+ 'yacute': "\u00FD",
+ 'thorn': "\u00FE",
+ 'yuml': "\u00FF",
+ 'OElig': "\u0152",
+ 'oelig': "\u0153",
+ 'Scaron': "\u0160",
+ 'scaron': "\u0161",
+ 'Yuml': "\u0178",
+ 'fnof': "\u0192",
+ 'circ': "\u02C6",
+ 'tilde': "\u02DC",
+ 'Alpha': "\u0391",
+ 'Beta': "\u0392",
+ 'Gamma': "\u0393",
+ 'Delta': "\u0394",
+ 'Epsilon': "\u0395",
+ 'Zeta': "\u0396",
+ 'Eta': "\u0397",
+ 'Theta': "\u0398",
+ 'Iota': "\u0399",
+ 'Kappa': "\u039A",
+ 'Lambda': "\u039B",
+ 'Mu': "\u039C",
+ 'Nu': "\u039D",
+ 'Xi': "\u039E",
+ 'Omicron': "\u039F",
+ 'Pi': "\u03A0",
+ 'Rho': "\u03A1",
+ 'Sigma': "\u03A3",
+ 'Tau': "\u03A4",
+ 'Upsilon': "\u03A5",
+ 'Phi': "\u03A6",
+ 'Chi': "\u03A7",
+ 'Psi': "\u03A8",
+ 'Omega': "\u03A9",
+ 'alpha': "\u03B1",
+ 'beta': "\u03B2",
+ 'gamma': "\u03B3",
+ 'delta': "\u03B4",
+ 'epsilon': "\u03B5",
+ 'zeta': "\u03B6",
+ 'eta': "\u03B7",
+ 'theta': "\u03B8",
+ 'iota': "\u03B9",
+ 'kappa': "\u03BA",
+ 'lambda': "\u03BB",
+ 'mu': "\u03BC",
+ 'nu': "\u03BD",
+ 'xi': "\u03BE",
+ 'omicron': "\u03BF",
+ 'pi': "\u03C0",
+ 'rho': "\u03C1",
+ 'sigmaf': "\u03C2",
+ 'sigma': "\u03C3",
+ 'tau': "\u03C4",
+ 'upsilon': "\u03C5",
+ 'phi': "\u03C6",
+ 'chi': "\u03C7",
+ 'psi': "\u03C8",
+ 'omega': "\u03C9",
+ 'thetasym': "\u03D1",
+ 'upsih': "\u03D2",
+ 'piv': "\u03D6",
+ 'ensp': "\u2002",
+ 'emsp': "\u2003",
+ 'thinsp': "\u2009",
+ 'zwnj': "\u200C",
+ 'zwj': "\u200D",
+ 'lrm': "\u200E",
+ 'rlm': "\u200F",
+ 'ndash': "\u2013",
+ 'mdash': "\u2014",
+ 'lsquo': "\u2018",
+ 'rsquo': "\u2019",
+ 'sbquo': "\u201A",
+ 'ldquo': "\u201C",
+ 'rdquo': "\u201D",
+ 'bdquo': "\u201E",
+ 'dagger': "\u2020",
+ 'Dagger': "\u2021",
+ 'bull': "\u2022",
+ 'hellip': "\u2026",
+ 'permil': "\u2030",
+ 'prime': "\u2032",
+ 'Prime': "\u2033",
+ 'lsaquo': "\u2039",
+ 'rsaquo': "\u203A",
+ 'oline': "\u203E",
+ 'frasl': "\u2044",
+ 'euro': "\u20AC",
+ 'image': "\u2111",
+ 'weierp': "\u2118",
+ 'real': "\u211C",
+ 'trade': "\u2122",
+ 'alefsym': "\u2135",
+ 'larr': "\u2190",
+ 'uarr': "\u2191",
+ 'rarr': "\u2192",
+ 'darr': "\u2193",
+ 'harr': "\u2194",
+ 'crarr': "\u21B5",
+ 'lArr': "\u21D0",
+ 'uArr': "\u21D1",
+ 'rArr': "\u21D2",
+ 'dArr': "\u21D3",
+ 'hArr': "\u21D4",
+ 'forall': "\u2200",
+ 'part': "\u2202",
+ 'exist': "\u2203",
+ 'empty': "\u2205",
+ 'nabla': "\u2207",
+ 'isin': "\u2208",
+ 'notin': "\u2209",
+ 'ni': "\u220B",
+ 'prod': "\u220F",
+ 'sum': "\u2211",
+ 'minus': "\u2212",
+ 'lowast': "\u2217",
+ 'radic': "\u221A",
+ 'prop': "\u221D",
+ 'infin': "\u221E",
+ 'ang': "\u2220",
+ 'and': "\u2227",
+ 'or': "\u2228",
+ 'cap': "\u2229",
+ 'cup': "\u222A",
+ 'int': "\u222B",
+ 'there4': "\u2234",
+ 'sim': "\u223C",
+ 'cong': "\u2245",
+ 'asymp': "\u2248",
+ 'ne': "\u2260",
+ 'equiv': "\u2261",
+ 'le': "\u2264",
+ 'ge': "\u2265",
+ 'sub': "\u2282",
+ 'sup': "\u2283",
+ 'nsub': "\u2284",
+ 'sube': "\u2286",
+ 'supe': "\u2287",
+ 'oplus': "\u2295",
+ 'otimes': "\u2297",
+ 'perp': "\u22A5",
+ 'sdot': "\u22C5",
+ 'lceil': "\u2308",
+ 'rceil': "\u2309",
+ 'lfloor': "\u230A",
+ 'rfloor': "\u230B",
+ 'loz': "\u25CA",
+ 'spades': "\u2660",
+ 'clubs': "\u2663",
+ 'hearts': "\u2665",
+ 'diams': "\u2666",
+ 'lang': "\u27E8",
+ 'rang': "\u27E9",
+}
diff --git a/third_party/python/esprima/setup.cfg b/third_party/python/esprima/setup.cfg
new file mode 100644
index 0000000000..8bfd5a12f8
--- /dev/null
+++ b/third_party/python/esprima/setup.cfg
@@ -0,0 +1,4 @@
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/esprima/setup.py b/third_party/python/esprima/setup.py
new file mode 100644
index 0000000000..a28ff7e127
--- /dev/null
+++ b/third_party/python/esprima/setup.py
@@ -0,0 +1,55 @@
+# -*- coding: utf-8 -*-
+
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+import os
+
+from esprima import version
+
+
+def read(fname):
+ try:
+ with open(os.path.join(os.path.dirname(__file__), fname), "r") as fp:
+ return fp.read().strip()
+ except IOError:
+ return ''
+
+
+setup(
+ name="esprima",
+ version=version,
+ author="German M. Bravo (Kronuz)",
+ author_email="german.mb@gmail.com",
+ url="https://github.com/Kronuz/esprima-python",
+ license="BSD License",
+ keywords="esprima ecmascript javascript parser ast",
+ description="ECMAScript parsing infrastructure for multipurpose analysis in Python",
+ long_description=read("README.rst"),
+ packages=["esprima"],
+ classifiers=[
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: BSD License",
+ "Operating System :: OS Independent",
+ "Topic :: Software Development :: Code Generators",
+ "Topic :: Software Development :: Compilers",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+ "Topic :: Text Processing :: General",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.3",
+ "Programming Language :: Python :: 3.4",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ ],
+ entry_points={
+ 'console_scripts': [
+ 'esprima = esprima.__main__:main',
+ ]
+ },
+)
diff --git a/third_party/python/fluent.migrate/PKG-INFO b/third_party/python/fluent.migrate/PKG-INFO
new file mode 100644
index 0000000000..9112b5ec94
--- /dev/null
+++ b/third_party/python/fluent.migrate/PKG-INFO
@@ -0,0 +1,62 @@
+Metadata-Version: 2.1
+Name: fluent.migrate
+Version: 0.10
+Summary: Toolchain to migrate legacy translation to Fluent.
+Home-page: https://hg.mozilla.org/l10n/fluent-migration/
+Author: Mozilla
+Author-email: l10n-drivers@mozilla.org
+License: APL 2
+Description: Fluent Migration Tools
+ ======================
+
+ Programmatically create Fluent files from existing content in both legacy
+ and Fluent formats. Use recipes written in Python to migrate content for each
+ of your localizations.
+
+ `migrate-l10n` is a CLI script which uses the `fluent.migrate` module under
+ the hood to run migrations on existing translations.
+
+ `validate-l10n-recipe` is a CLI script to test a migration recipe for common
+ errors, without trying to apply it.
+
+ Installation
+ ------------
+
+ Install from PyPI:
+
+ pip install fluent.migrate[hg]
+
+ If you only want to use the `MigrationContext` API, you can drop the
+ requirement on `python-hglib`:
+
+ pip install fluent.migrate
+
+ Usage
+ -----
+
+ Migrations consist of _recipes_, which are applied to a _localization repository_, based on _template files_.
+ You can find recipes for Firefox in `mozilla-central/python/l10n/fluent_migrations/`,
+ the reference repository is [gecko-strings](https://hg.mozilla.org/l10n/gecko-strings/) or _quarantine_.
+ You apply those migrations to l10n repositories in [l10n-central](https://hg.mozilla.org/l10n-central/), or to `gecko-strings` for testing.
+
+ The migrations are run as python modules, so you need to have their file location in `PYTHONPATH`.
+
+ An example would look like
+
+ $ migrate-l10n --lang it --reference-dir gecko-strings --localization-dir l10n-central/it bug_1451992_preferences_sitedata bug_1451992_preferences_translation
+
+ Contact
+ -------
+
+ - mailing list: https://lists.mozilla.org/listinfo/tools-l10n
+ - bugzilla: [Open Bugs](https://bugzilla.mozilla.org/buglist.cgi?component=Fluent%20Migration&product=Localization%20Infrastructure%20and%20Tools&bug_status=__open__) - [New Bug](https://bugzilla.mozilla.org/enter_bug.cgi?product=Localization%20Infrastructure%20and%20Tools&component=Fluent%20Migration)
+
+Keywords: fluent,localization,l10n
+Platform: UNKNOWN
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.7
+Description-Content-Type: text/markdown
+Provides-Extra: hg
diff --git a/third_party/python/fluent.migrate/README.md b/third_party/python/fluent.migrate/README.md
new file mode 100644
index 0000000000..5d925eece9
--- /dev/null
+++ b/third_party/python/fluent.migrate/README.md
@@ -0,0 +1,44 @@
+Fluent Migration Tools
+======================
+
+Programmatically create Fluent files from existing content in both legacy
+and Fluent formats. Use recipes written in Python to migrate content for each
+of your localizations.
+
+`migrate-l10n` is a CLI script which uses the `fluent.migrate` module under
+the hood to run migrations on existing translations.
+
+`validate-l10n-recipe` is a CLI script to test a migration recipe for common
+errors, without trying to apply it.
+
+Installation
+------------
+
+Install from PyPI:
+
+ pip install fluent.migrate[hg]
+
+If you only want to use the `MigrationContext` API, you can drop the
+requirement on `python-hglib`:
+
+ pip install fluent.migrate
+
+Usage
+-----
+
+Migrations consist of _recipes_, which are applied to a _localization repository_, based on _template files_.
+You can find recipes for Firefox in `mozilla-central/python/l10n/fluent_migrations/`,
+the reference repository is [gecko-strings](https://hg.mozilla.org/l10n/gecko-strings/) or _quarantine_.
+You apply those migrations to l10n repositories in [l10n-central](https://hg.mozilla.org/l10n-central/), or to `gecko-strings` for testing.
+
+The migrations are run as python modules, so you need to have their file location in `PYTHONPATH`.
+
+An example would look like
+
+ $ migrate-l10n --lang it --reference-dir gecko-strings --localization-dir l10n-central/it bug_1451992_preferences_sitedata bug_1451992_preferences_translation
+
+Contact
+-------
+
+ - mailing list: https://lists.mozilla.org/listinfo/tools-l10n
+ - bugzilla: [Open Bugs](https://bugzilla.mozilla.org/buglist.cgi?component=Fluent%20Migration&product=Localization%20Infrastructure%20and%20Tools&bug_status=__open__) - [New Bug](https://bugzilla.mozilla.org/enter_bug.cgi?product=Localization%20Infrastructure%20and%20Tools&component=Fluent%20Migration)
diff --git a/third_party/python/fluent.migrate/fluent/__init__.py b/third_party/python/fluent.migrate/fluent/__init__.py
new file mode 100644
index 0000000000..69e3be50da
--- /dev/null
+++ b/third_party/python/fluent.migrate/fluent/__init__.py
@@ -0,0 +1 @@
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/third_party/python/fluent.migrate/fluent/migrate/__init__.py b/third_party/python/fluent.migrate/fluent/migrate/__init__.py
new file mode 100644
index 0000000000..fead341500
--- /dev/null
+++ b/third_party/python/fluent.migrate/fluent/migrate/__init__.py
@@ -0,0 +1,5 @@
+# coding=utf8
+
+from .transforms import ( # noqa: F401
+ CONCAT, COPY, COPY_PATTERN, PLURALS, REPLACE, REPLACE_IN_TEXT
+)
diff --git a/third_party/python/fluent.migrate/fluent/migrate/_context.py b/third_party/python/fluent.migrate/fluent/migrate/_context.py
new file mode 100644
index 0000000000..53a771d58a
--- /dev/null
+++ b/third_party/python/fluent.migrate/fluent/migrate/_context.py
@@ -0,0 +1,333 @@
+# coding=utf8
+from __future__ import unicode_literals
+from __future__ import absolute_import
+
+import os
+import codecs
+from functools import partial
+import logging
+from six.moves import zip_longest
+
+import fluent.syntax.ast as FTL
+from fluent.syntax.parser import FluentParser
+from fluent.syntax.serializer import FluentSerializer
+from compare_locales.parser import getParser
+from compare_locales.plurals import get_plural
+
+from .evaluator import Evaluator
+from .merge import merge_resource
+from .errors import (
+ UnreadableReferenceError,
+)
+
+
+class InternalContext(object):
+ """Internal context for merging translation resources.
+
+ For the public interface, see `context.MigrationContext`.
+ """
+
+ def __init__(
+ self, lang, reference_dir, localization_dir, enforce_translated=False
+ ):
+ self.fluent_parser = FluentParser(with_spans=False)
+ self.fluent_serializer = FluentSerializer()
+
+ # An iterable of plural category names relevant to the context's
+ # language. E.g. ('one', 'other') for English.
+ self.plural_categories = get_plural(lang)
+ if self.plural_categories is None:
+ logger = logging.getLogger('migrate')
+ logger.warning(
+ 'Plural rule for "{}" is not defined in '
+ 'compare-locales'.format(lang))
+ self.plural_categories = ('one', 'other')
+
+ self.enforce_translated = enforce_translated
+ # Parsed input resources stored by resource path.
+ self.reference_resources = {}
+ self.localization_resources = {}
+ self.target_resources = {}
+
+ # An iterable of `FTL.Message` objects some of whose nodes can be the
+ # transform operations.
+ self.transforms = {}
+
+ # The evaluator instance is an AST transformer capable of walking an
+ # AST hierarchy and evaluating nodes which are migration Transforms.
+ self.evaluator = Evaluator(self)
+
+ def read_ftl_resource(self, path):
+ """Read an FTL resource and parse it into an AST."""
+ f = codecs.open(path, 'r', 'utf8')
+ try:
+ contents = f.read()
+ except UnicodeDecodeError as err:
+ logger = logging.getLogger('migrate')
+ logger.warning('Unable to read file {}: {}'.format(path, err))
+ raise err
+ finally:
+ f.close()
+
+ ast = self.fluent_parser.parse(contents)
+
+ annots = [
+ annot
+ for entry in ast.body
+ if isinstance(entry, FTL.Junk)
+ for annot in entry.annotations
+ ]
+
+ if len(annots):
+ logger = logging.getLogger('migrate')
+ for annot in annots:
+ msg = annot.message
+ logger.warning('Syntax error in {}: {}'.format(path, msg))
+
+ return ast
+
+ def read_legacy_resource(self, path):
+ """Read a legacy resource and parse it into a dict."""
+ parser = getParser(path)
+ parser.readFile(path)
+ # Transform the parsed result which is an iterator into a dict.
+ return {
+ entity.key: entity.val for entity in parser
+ if entity.localized or self.enforce_translated
+ }
+
+ def read_reference_ftl(self, path):
+ """Read and parse a reference FTL file.
+
+ A missing resource file is a fatal error and will raise an
+ UnreadableReferenceError.
+ """
+ fullpath = os.path.join(self.reference_dir, path)
+ try:
+ return self.read_ftl_resource(fullpath)
+ except IOError:
+ error_message = 'Missing reference file: {}'.format(fullpath)
+ logging.getLogger('migrate').error(error_message)
+ raise UnreadableReferenceError(error_message)
+ except UnicodeDecodeError as err:
+ error_message = 'Error reading file {}: {}'.format(fullpath, err)
+ logging.getLogger('migrate').error(error_message)
+ raise UnreadableReferenceError(error_message)
+
+ def read_localization_ftl(self, path):
+ """Read and parse an existing localization FTL file.
+
+ Create a new FTL.Resource if the file doesn't exist or can't be
+ decoded.
+ """
+ fullpath = os.path.join(self.localization_dir, path)
+ try:
+ return self.read_ftl_resource(fullpath)
+ except IOError:
+ logger = logging.getLogger('migrate')
+ logger.info(
+ 'Localization file {} does not exist and '
+ 'it will be created'.format(path))
+ return FTL.Resource()
+ except UnicodeDecodeError:
+ logger = logging.getLogger('migrate')
+ logger.warning(
+ 'Localization file {} has broken encoding. '
+ 'It will be re-created and some translations '
+ 'may be lost'.format(path))
+ return FTL.Resource()
+
+ def maybe_add_localization(self, path):
+ """Add a localization resource to migrate translations from.
+
+ Uses a compare-locales parser to create a dict of (key, string value)
+ tuples.
+ For Fluent sources, we store the AST.
+ """
+ try:
+ fullpath = os.path.join(self.localization_dir, path)
+ if not fullpath.endswith('.ftl'):
+ collection = self.read_legacy_resource(fullpath)
+ else:
+ collection = self.read_ftl_resource(fullpath)
+ except IOError:
+ logger = logging.getLogger('migrate')
+ logger.warning('Missing localization file: {}'.format(path))
+ else:
+ self.localization_resources[path] = collection
+
+ def get_legacy_source(self, path, key):
+ """Get an entity value from a localized legacy source.
+
+ Used by the `Source` transform.
+ """
+ resource = self.localization_resources[path]
+ return resource.get(key, None)
+
+ def get_fluent_source_pattern(self, path, key):
+ """Get a pattern from a localized Fluent source.
+
+ If the key contains a `.`, does an attribute lookup.
+ Used by the `COPY_PATTERN` transform.
+ """
+ resource = self.localization_resources[path]
+ msg_key, _, attr_key = key.partition('.')
+ found = None
+ for entry in resource.body:
+ if isinstance(entry, (FTL.Message, FTL.Term)):
+ if entry.id.name == msg_key:
+ found = entry
+ break
+ if found is None:
+ return None
+ if not attr_key:
+ return found.value
+ for attribute in found.attributes:
+ if attribute.id.name == attr_key:
+ return attribute.value
+ return None
+
+ def messages_equal(self, res1, res2):
+ """Compare messages and terms of two FTL resources.
+
+ Uses FTL.BaseNode.equals to compare all messages/terms
+ in two FTL resources.
+ If the order or number of messages differ, the result is also False.
+ """
+ def message_id(message):
+ "Return the message's identifer name for sorting purposes."
+ return message.id.name
+
+ messages1 = sorted(
+ (entry for entry in res1.body
+ if isinstance(entry, FTL.Message)
+ or isinstance(entry, FTL.Term)),
+ key=message_id)
+ messages2 = sorted(
+ (entry for entry in res2.body
+ if isinstance(entry, FTL.Message)
+ or isinstance(entry, FTL.Term)),
+ key=message_id)
+ for msg1, msg2 in zip_longest(messages1, messages2):
+ if msg1 is None or msg2 is None:
+ return False
+ if not msg1.equals(msg2):
+ return False
+ return True
+
+ def merge_changeset(self, changeset=None, known_translations=None):
+ """Return a generator of FTL ASTs for the changeset.
+
+ The input data must be configured earlier using the `add_*` methods.
+ if given, `changeset` must be a set of (path, key) tuples describing
+ which legacy translations are to be merged. If `changeset` is None,
+ all legacy translations will be allowed to be migrated in a single
+ changeset.
+
+ We use the `in_changeset` method to determine if a message should be
+ migrated for the given changeset.
+
+ Given `changeset`, return a dict whose keys are resource paths and
+ values are `FTL.Resource` instances. The values will also be used to
+ update this context's existing localization resources.
+ """
+
+ if changeset is None:
+ # Merge all known legacy translations. Used in tests.
+ changeset = {
+ (path, key)
+ for path, strings in self.localization_resources.items()
+ if not path.endswith('.ftl')
+ for key in strings.keys()
+ }
+
+ if known_translations is None:
+ known_translations = changeset
+
+ for path, reference in self.reference_resources.items():
+ current = self.target_resources[path]
+ transforms = self.transforms.get(path, [])
+ in_changeset = partial(
+ self.in_changeset, changeset, known_translations, path)
+
+ # Merge legacy translations with the existing ones using the
+ # reference as a template.
+ snapshot = merge_resource(
+ self, reference, current, transforms, in_changeset
+ )
+
+ # Skip this path if the messages in the merged snapshot are
+ # identical to those in the current state of the localization file.
+ # This may happen when:
+ #
+ # - none of the transforms is in the changset, or
+ # - all messages which would be migrated by the context's
+ # transforms already exist in the current state.
+ if self.messages_equal(current, snapshot):
+ continue
+
+ # Store the merged snapshot on the context so that the next merge
+ # already takes it into account as the existing localization.
+ self.target_resources[path] = snapshot
+
+ # The result for this path is a complete `FTL.Resource`.
+ yield path, snapshot
+
+ def in_changeset(self, changeset, known_translations, path, ident):
+ """Check if a message should be migrated in this changeset.
+
+ The message is identified by path and ident.
+
+
+ A message will be migrated only if all of its dependencies
+ are present in the currently processed changeset.
+
+ If a transform defined for this message points to a missing
+ legacy translation, this message will not be merged. The
+ missing legacy dependency won't be present in the changeset.
+
+ This also means that partially translated messages (e.g.
+ constructed from two legacy strings out of which only one is
+ avaiable) will never be migrated.
+ """
+ message_deps = self.dependencies.get((path, ident), None)
+
+ # Don't merge if we don't have a transform for this message.
+ if message_deps is None:
+ return False
+
+ # As a special case, if a transform exists but has no
+ # dependecies, it's a hardcoded `FTL.Node` which doesn't
+ # migrate any existing translation but rather creates a new
+ # one. Merge it.
+ if len(message_deps) == 0:
+ return True
+
+ # Make sure all the dependencies are present in the current
+ # changeset. Partial migrations are not currently supported.
+ # See https://bugzilla.mozilla.org/show_bug.cgi?id=1321271
+ # We only return True if our current changeset touches
+ # the transform, and we have all of the dependencies.
+ active_deps = message_deps & changeset
+ available_deps = message_deps & known_translations
+ return active_deps and message_deps == available_deps
+
+ def serialize_changeset(self, changeset, known_translations=None):
+ """Return a dict of serialized FTLs for the changeset.
+
+ Given `changeset`, return a dict whose keys are resource paths and
+ values are serialized FTL snapshots.
+ """
+
+ return {
+ path: self.fluent_serializer.serialize(snapshot)
+ for path, snapshot in self.merge_changeset(
+ changeset, known_translations
+ )
+ }
+
+ def evaluate(self, node):
+ return self.evaluator.visit(node)
+
+
+logging.basicConfig()
diff --git a/third_party/python/fluent.migrate/fluent/migrate/blame.py b/third_party/python/fluent.migrate/fluent/migrate/blame.py
new file mode 100644
index 0000000000..9cc3ba5ab0
--- /dev/null
+++ b/third_party/python/fluent.migrate/fluent/migrate/blame.py
@@ -0,0 +1,84 @@
+# coding=utf8
+from __future__ import unicode_literals
+from __future__ import absolute_import
+
+import argparse
+import json
+import os
+
+from compare_locales.parser import getParser, Junk
+from compare_locales.parser.fluent import FluentEntity
+from compare_locales import mozpath
+import hglib
+from hglib.util import b, cmdbuilder
+
+
+class Blame(object):
+ def __init__(self, client):
+ self.client = client
+ self.users = []
+ self.blame = {}
+
+ def attribution(self, file_paths):
+ args = cmdbuilder(
+ b('annotate'), *[b(p) for p in file_paths], template='json',
+ date=True, user=True, cwd=self.client.root())
+ blame_json = self.client.rawcommand(args)
+ file_blames = json.loads(blame_json)
+
+ for file_blame in file_blames:
+ self.handleFile(file_blame)
+
+ return {'authors': self.users,
+ 'blame': self.blame}
+
+ def handleFile(self, file_blame):
+ path = mozpath.normsep(file_blame['path'])
+
+ try:
+ parser = getParser(path)
+ except UserWarning:
+ return
+
+ self.blame[path] = {}
+
+ self.readFile(parser, path)
+ entities = parser.parse()
+ for e in entities:
+ if isinstance(e, Junk):
+ continue
+ if e.val_span:
+ key_vals = [(e.key, e.val_span)]
+ else:
+ key_vals = []
+ if isinstance(e, FluentEntity):
+ key_vals += [
+ ('{}.{}'.format(e.key, attr.key), attr.val_span)
+ for attr in e.attributes
+ ]
+ for key, (val_start, val_end) in key_vals:
+ entity_lines = file_blame['lines'][
+ (e.ctx.linecol(val_start)[0] - 1):e.ctx.linecol(val_end)[0]
+ ]
+ # ignore timezone
+ entity_lines.sort(key=lambda blame: -blame['date'][0])
+ line_blame = entity_lines[0]
+ user = line_blame['user']
+ timestamp = line_blame['date'][0] # ignore timezone
+ if user not in self.users:
+ self.users.append(user)
+ userid = self.users.index(user)
+ self.blame[path][key] = [userid, timestamp]
+
+ def readFile(self, parser, path):
+ parser.readFile(os.path.join(self.client.root().decode('utf-8'), path))
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('repo_path')
+ parser.add_argument('file_path', nargs='+')
+ args = parser.parse_args()
+ blame = Blame(hglib.open(args.repo_path))
+ attrib = blame.attribution(args.file_path)
+ print(json.dumps(attrib, indent=4, separators=(',', ': ')))
diff --git a/third_party/python/fluent.migrate/fluent/migrate/changesets.py b/third_party/python/fluent.migrate/fluent/migrate/changesets.py
new file mode 100644
index 0000000000..e4ad95f2d1
--- /dev/null
+++ b/third_party/python/fluent.migrate/fluent/migrate/changesets.py
@@ -0,0 +1,59 @@
+# coding=utf8
+from __future__ import absolute_import
+
+import time
+
+
+def by_first_commit(item):
+ """Order two changesets by their first commit date."""
+ return item['first_commit']
+
+
+def convert_blame_to_changesets(blame_json):
+ """Convert a blame dict into a list of changesets.
+
+ The blame information in `blame_json` should be a dict of the following
+ structure:
+
+ {
+ 'authors': [
+ 'A.N. Author <author@example.com>',
+ ],
+ 'blame': {
+ 'path/one': {
+ 'key1': [0, 1346095921.0],
+ },
+ }
+ }
+
+ It will be transformed into a list of changesets which can be fed into
+ `InternalContext.serialize_changeset`:
+
+ [
+ {
+ 'author': 'A.N. Author <author@example.com>',
+ 'first_commit': 1346095921.0,
+ 'changes': {
+ ('path/one', 'key1'),
+ }
+ },
+ ]
+
+ """
+ now = time.time()
+ changesets = [
+ {
+ 'author': author,
+ 'first_commit': now,
+ 'changes': set()
+ } for author in blame_json['authors']
+ ]
+
+ for path, keys_info in blame_json['blame'].items():
+ for key, (author_index, timestamp) in keys_info.items():
+ changeset = changesets[author_index]
+ changeset['changes'].add((path, key))
+ if timestamp < changeset['first_commit']:
+ changeset['first_commit'] = timestamp
+
+ return sorted(changesets, key=by_first_commit)
diff --git a/third_party/python/fluent.migrate/fluent/migrate/context.py b/third_party/python/fluent.migrate/fluent/migrate/context.py
new file mode 100644
index 0000000000..251a0ca206
--- /dev/null
+++ b/third_party/python/fluent.migrate/fluent/migrate/context.py
@@ -0,0 +1,152 @@
+# coding=utf8
+from __future__ import unicode_literals
+from __future__ import absolute_import
+
+import logging
+
+import fluent.syntax.ast as FTL
+from fluent.migrate.util import fold
+
+from .transforms import Source
+from .util import get_message, skeleton
+from .errors import (
+ EmptyLocalizationError,
+ UnreadableReferenceError,
+)
+from ._context import InternalContext
+
+
+__all__ = [
+ 'EmptyLocalizationError',
+ 'UnreadableReferenceError',
+ 'MigrationContext',
+]
+
+
+class MigrationContext(InternalContext):
+ """Stateful context for merging translation resources.
+
+ `MigrationContext` must be configured with the target locale and the
+ directory locations of the input data.
+
+ The transformation takes four types of input data:
+
+ - The en-US FTL reference files which will be used as templates for
+ message order, comments and sections. If the reference_dir is None,
+ the migration will create Messages and Terms in the order given by
+ the transforms.
+
+ - The current FTL files for the given locale.
+
+ - A list of `FTL.Message` or `FTL.Term` objects some of whose nodes
+ are special helper or transform nodes:
+
+ helpers: VARIABLE_REFERENCE, MESSAGE_REFERENCE, TERM_REFERENCE
+ transforms: COPY, REPLACE_IN_TEXT, REPLACE, PLURALS, CONCAT
+ fluent value helper: COPY_PATTERN
+
+ The legacy (DTD, properties) translation files are deduced by the
+ dependencies in the transforms. The translations from these files will be
+ read from the localization_dir and transformed into FTL and merged
+ into the existing FTL files for the given language.
+ """
+
+ def __init__(
+ self, locale, reference_dir, localization_dir, enforce_translated=False
+ ):
+ super(MigrationContext, self).__init__(
+ locale, reference_dir, localization_dir,
+ enforce_translated=enforce_translated
+ )
+ self.locale = locale
+ # Paths to directories with input data, relative to CWD.
+ self.reference_dir = reference_dir
+ self.localization_dir = localization_dir
+
+ # A dict whose keys are `(path, key)` tuples corresponding to target
+ # FTL translations, and values are sets of `(path, key)` tuples
+ # corresponding to localized entities which will be migrated.
+ self.dependencies = {}
+
+ def add_transforms(self, target, reference, transforms):
+ """Define transforms for target using reference as template.
+
+ `target` is a path of the destination FTL file relative to the
+ localization directory. `reference` is a path to the template FTL
+ file relative to the reference directory.
+
+ Each transform is an extended FTL node with `Transform` nodes as some
+ values. Transforms are stored in their lazy AST form until
+ `merge_changeset` is called, at which point they are evaluated to real
+ FTL nodes with migrated translations.
+
+ Each transform is scanned for `Source` nodes which will be used to
+ build the list of dependencies for the transformed message.
+
+ For transforms that merely copy legacy messages or Fluent patterns,
+ using `fluent.migrate.helpers.transforms_from` is recommended.
+ """
+ def get_sources(acc, cur):
+ if isinstance(cur, Source):
+ acc.add((cur.path, cur.key))
+ return acc
+
+ if self.reference_dir is None:
+ # Add skeletons to resource body for each transform
+ # if there's no reference.
+ reference_ast = self.reference_resources.get(target)
+ if reference_ast is None:
+ reference_ast = FTL.Resource()
+ reference_ast.body.extend(
+ skeleton(transform) for transform in transforms
+ )
+ else:
+ reference_ast = self.read_reference_ftl(reference)
+ self.reference_resources[target] = reference_ast
+
+ for node in transforms:
+ ident = node.id.name
+ # Scan `node` for `Source` nodes and collect the information they
+ # store into a set of dependencies.
+ dependencies = fold(get_sources, node, set())
+ # Set these sources as dependencies for the current transform.
+ self.dependencies[(target, ident)] = dependencies
+
+ # The target Fluent message should exist in the reference file. If
+ # it doesn't, it's probably a typo.
+ # Of course, only if we're having a reference.
+ if self.reference_dir is None:
+ continue
+ if get_message(reference_ast.body, ident) is None:
+ logger = logging.getLogger('migrate')
+ logger.warning(
+ '{} "{}" was not found in {}'.format(
+ type(node).__name__, ident, reference))
+
+ # Keep track of localization resource paths which were defined as
+ # sources in the transforms.
+ expected_paths = set()
+
+ # Read all legacy translation files defined in Source transforms. This
+ # may fail but a single missing legacy resource doesn't mean that the
+ # migration can't succeed.
+ for dependencies in self.dependencies.values():
+ for path in set(path for path, _ in dependencies):
+ expected_paths.add(path)
+ self.maybe_add_localization(path)
+
+ # However, if all legacy resources are missing, bail out early. There
+ # are no translations to migrate. We'd also get errors in hg annotate.
+ if len(expected_paths) > 0 and len(self.localization_resources) == 0:
+ error_message = 'No localization files were found'
+ logging.getLogger('migrate').error(error_message)
+ raise EmptyLocalizationError(error_message)
+
+ # Add the current transforms to any other transforms added earlier for
+ # this path.
+ path_transforms = self.transforms.setdefault(target, [])
+ path_transforms += transforms
+
+ if target not in self.target_resources:
+ target_ast = self.read_localization_ftl(target)
+ self.target_resources[target] = target_ast
diff --git a/third_party/python/fluent.migrate/fluent/migrate/errors.py b/third_party/python/fluent.migrate/fluent/migrate/errors.py
new file mode 100644
index 0000000000..dcc3025377
--- /dev/null
+++ b/third_party/python/fluent.migrate/fluent/migrate/errors.py
@@ -0,0 +1,22 @@
+class SkipTransform(RuntimeError):
+ pass
+
+
+class MigrationError(ValueError):
+ pass
+
+
+class EmptyLocalizationError(MigrationError):
+ pass
+
+
+class NotSupportedError(MigrationError):
+ pass
+
+
+class UnreadableReferenceError(MigrationError):
+ pass
+
+
+class InvalidTransformError(MigrationError):
+ pass
diff --git a/third_party/python/fluent.migrate/fluent/migrate/evaluator.py b/third_party/python/fluent.migrate/fluent/migrate/evaluator.py
new file mode 100644
index 0000000000..90c626f933
--- /dev/null
+++ b/third_party/python/fluent.migrate/fluent/migrate/evaluator.py
@@ -0,0 +1,28 @@
+from fluent.syntax import ast as FTL
+from fluent.syntax.visitor import Transformer
+
+from .transforms import Transform
+
+
+class Evaluator(Transformer):
+ """An AST transformer for evaluating migration Transforms.
+
+ An AST transformer (i.e. a visitor capable of modifying the AST) which
+ walks an AST hierarchy and evaluates nodes which are migration Transforms.
+ """
+
+ def __init__(self, ctx):
+ self.ctx = ctx
+
+ def visit(self, node):
+ if not isinstance(node, FTL.BaseNode):
+ return node
+
+ if isinstance(node, Transform):
+ # Some transforms don't expect other transforms as children.
+ # Evaluate the children first.
+ transform = self.generic_visit(node)
+ # Then, evaluate this transform.
+ return transform(self.ctx)
+
+ return self.generic_visit(node)
diff --git a/third_party/python/fluent.migrate/fluent/migrate/helpers.py b/third_party/python/fluent.migrate/fluent/migrate/helpers.py
new file mode 100644
index 0000000000..6a8133661a
--- /dev/null
+++ b/third_party/python/fluent.migrate/fluent/migrate/helpers.py
@@ -0,0 +1,150 @@
+# coding=utf8
+"""Fluent AST helpers.
+
+The functions defined in this module offer a shorthand for defining common AST
+nodes.
+
+They take a string argument and immediately return a corresponding AST node.
+(As opposed to Transforms which are AST nodes on their own and only return the
+migrated AST nodes when they are evaluated by a MigrationContext.) """
+
+from __future__ import unicode_literals
+from __future__ import absolute_import
+
+from fluent.syntax import FluentParser, ast as FTL
+from fluent.syntax.visitor import Transformer
+from .transforms import Transform, CONCAT, COPY, COPY_PATTERN
+from .errors import NotSupportedError, InvalidTransformError
+
+
+def VARIABLE_REFERENCE(name):
+ """Create an ExternalArgument expression."""
+
+ return FTL.VariableReference(
+ id=FTL.Identifier(name)
+ )
+
+
+def MESSAGE_REFERENCE(name):
+ """Create a MessageReference expression.
+
+ If the passed name contains a `.`, we're generating
+ a message reference with an attribute.
+ """
+ if '.' in name:
+ name, attribute = name.split('.')
+ attribute = FTL.Identifier(attribute)
+ else:
+ attribute = None
+
+ return FTL.MessageReference(
+ id=FTL.Identifier(name),
+ attribute=attribute,
+ )
+
+
+def TERM_REFERENCE(name):
+ """Create a TermReference expression."""
+
+ return FTL.TermReference(
+ id=FTL.Identifier(name)
+ )
+
+
+class IntoTranforms(Transformer):
+ IMPLICIT_TRANSFORMS = ("CONCAT",)
+ FORBIDDEN_TRANSFORMS = ("PLURALS", "REPLACE", "REPLACE_IN_TEXT")
+
+ def __init__(self, substitutions):
+ self.substitutions = substitutions
+
+ def visit_Junk(self, node):
+ anno = node.annotations[0]
+ raise InvalidTransformError(
+ "Transform contains parse error: {}, at {}".format(
+ anno.message, anno.span.start))
+
+ def visit_FunctionReference(self, node):
+ name = node.id.name
+ if name in self.IMPLICIT_TRANSFORMS:
+ raise NotSupportedError(
+ "{} may not be used with transforms_from(). It runs "
+ "implicitly on all Patterns anyways.".format(name))
+ if name in self.FORBIDDEN_TRANSFORMS:
+ raise NotSupportedError(
+ "{} may not be used with transforms_from(). It requires "
+ "additional logic in Python code.".format(name))
+ if name in ('COPY', 'COPY_PATTERN'):
+ args = (
+ self.into_argument(arg) for arg in node.arguments.positional
+ )
+ kwargs = {
+ arg.name.name: self.into_argument(arg.value)
+ for arg in node.arguments.named}
+ if name == 'COPY':
+ return COPY(*args, **kwargs)
+ return COPY_PATTERN(*args, **kwargs)
+ return self.generic_visit(node)
+
+ def visit_Placeable(self, node):
+ """If the expression is a Transform, replace this Placeable
+ with the Transform it's holding.
+ Transforms evaluate to Patterns, which are flattened as
+ elements of Patterns in Transform.pattern_of, but only
+ one level deep.
+ """
+ node = self.generic_visit(node)
+ if isinstance(node.expression, Transform):
+ return node.expression
+ return node
+
+ def visit_Pattern(self, node):
+ """Replace the Pattern with CONCAT which is more accepting of its
+ elements. CONCAT takes PatternElements, Expressions and other
+ Patterns (e.g. returned from evaluating transforms).
+ """
+ node = self.generic_visit(node)
+ return CONCAT(*node.elements)
+
+ def into_argument(self, node):
+ """Convert AST node into an argument to migration transforms."""
+ if isinstance(node, FTL.StringLiteral):
+ # Special cases for booleans which don't exist in Fluent.
+ if node.value == "True":
+ return True
+ if node.value == "False":
+ return False
+ return node.value
+ if isinstance(node, FTL.MessageReference):
+ try:
+ return self.substitutions[node.id.name]
+ except KeyError:
+ raise InvalidTransformError(
+ "Unknown substitution in COPY: {}".format(
+ node.id.name))
+ else:
+ raise InvalidTransformError(
+ "Invalid argument passed to COPY: {}".format(
+ type(node).__name__))
+
+
+def transforms_from(ftl, **substitutions):
+ """Parse FTL code into a list of Message nodes with Transforms.
+
+ The FTL may use a fabricated COPY function inside of placeables which
+ will be converted into actual COPY migration transform.
+
+ new-key = Hardcoded text { COPY("filepath.dtd", "string.key") }
+
+ For convenience, COPY may also refer to transforms_from's keyword
+ arguments via the MessageReference syntax:
+
+ transforms_from(\"""
+ new-key = Hardcoded text { COPY(file_dtd, "string.key") }
+ \""", file_dtd="very/long/path/to/a/file.dtd")
+
+ """
+
+ parser = FluentParser(with_spans=False)
+ resource = parser.parse(ftl)
+ return IntoTranforms(substitutions).visit(resource).body
diff --git a/third_party/python/fluent.migrate/fluent/migrate/merge.py b/third_party/python/fluent.migrate/fluent/migrate/merge.py
new file mode 100644
index 0000000000..b4575f0ca7
--- /dev/null
+++ b/third_party/python/fluent.migrate/fluent/migrate/merge.py
@@ -0,0 +1,59 @@
+# coding=utf8
+from __future__ import unicode_literals
+from __future__ import absolute_import
+
+import fluent.syntax.ast as FTL
+
+from .errors import SkipTransform
+from .util import get_message, get_transform
+
+
+def merge_resource(ctx, reference, current, transforms, in_changeset):
+ """Transform legacy translations into FTL.
+
+ Use the `reference` FTL AST as a template. For each en-US string in the
+ reference, first check for an existing translation in the current FTL
+ `localization` and use it if it's present; then if the string has
+ a transform defined in the migration specification and if it's in the
+ currently processed changeset, evaluate the transform.
+ """
+
+ def merge_body(body):
+ return [
+ entry
+ for entry in map(merge_entry, body)
+ if entry is not None
+ ]
+
+ def merge_entry(entry):
+ # All standalone comments will be merged.
+ if isinstance(entry, FTL.BaseComment):
+ return entry
+
+ # Ignore Junk
+ if isinstance(entry, FTL.Junk):
+ return None
+
+ ident = entry.id.name
+
+ # If the message is present in the existing localization, we add it to
+ # the resulting resource. This ensures consecutive merges don't remove
+ # translations but rather create supersets of them.
+ existing = get_message(current.body, ident)
+ if existing is not None:
+ return existing
+
+ transform = get_transform(transforms, ident)
+
+ # Make sure this message is supposed to be migrated as part of the
+ # current changeset.
+ if transform is not None and in_changeset(ident):
+ if transform.comment is None:
+ transform.comment = entry.comment
+ try:
+ return ctx.evaluate(transform)
+ except SkipTransform:
+ return None
+
+ body = merge_body(reference.body)
+ return FTL.Resource(body)
diff --git a/third_party/python/fluent.migrate/fluent/migrate/tool.py b/third_party/python/fluent.migrate/fluent/migrate/tool.py
new file mode 100755
index 0000000000..555a44f024
--- /dev/null
+++ b/third_party/python/fluent.migrate/fluent/migrate/tool.py
@@ -0,0 +1,184 @@
+# coding=utf8
+
+import os
+import logging
+import argparse
+from contextlib import contextmanager
+import importlib
+import sys
+
+import hglib
+import six
+
+from fluent.migrate.context import MigrationContext
+from fluent.migrate.errors import MigrationError
+from fluent.migrate.changesets import convert_blame_to_changesets
+from fluent.migrate.blame import Blame
+
+
+@contextmanager
+def dont_write_bytecode():
+ _dont_write_bytecode = sys.dont_write_bytecode
+ sys.dont_write_bytecode = True
+ yield
+ sys.dont_write_bytecode = _dont_write_bytecode
+
+
+class Migrator(object):
+ def __init__(self, locale, reference_dir, localization_dir, dry_run):
+ self.locale = locale
+ self.reference_dir = reference_dir
+ self.localization_dir = localization_dir
+ self.dry_run = dry_run
+ self._client = None
+
+ @property
+ def client(self):
+ if self._client is None:
+ self._client = hglib.open(self.localization_dir, 'utf-8')
+ return self._client
+
+ def close(self):
+ # close hglib.client, if we cached one.
+ if self._client is not None:
+ self._client.close()
+
+ def run(self, migration):
+ print('\nRunning migration {} for {}'.format(
+ migration.__name__, self.locale))
+
+ # For each migration create a new context.
+ ctx = MigrationContext(
+ self.locale, self.reference_dir, self.localization_dir
+ )
+
+ try:
+ # Add the migration spec.
+ migration.migrate(ctx)
+ except MigrationError as e:
+ print(' Skipping migration {} for {}:\n {}'.format(
+ migration.__name__, self.locale, e))
+ return
+
+ # Keep track of how many changesets we're committing.
+ index = 0
+ description_template = migration.migrate.__doc__
+
+ # Annotate localization files used as sources by this migration
+ # to preserve attribution of translations.
+ files = ctx.localization_resources.keys()
+ blame = Blame(self.client).attribution(files)
+ changesets = convert_blame_to_changesets(blame)
+ known_legacy_translations = set()
+
+ for changeset in changesets:
+ snapshot = self.snapshot(
+ ctx, changeset['changes'], known_legacy_translations
+ )
+ if not snapshot:
+ continue
+ self.serialize_changeset(snapshot)
+ index += 1
+ self.commit_changeset(
+ description_template, changeset['author'], index
+ )
+
+ def snapshot(self, ctx, changes_in_changeset, known_legacy_translations):
+ '''Run the migration for the changeset, with the set of
+ this and all prior legacy translations.
+ '''
+ known_legacy_translations.update(changes_in_changeset)
+ return ctx.serialize_changeset(
+ changes_in_changeset,
+ known_legacy_translations
+ )
+
+ def serialize_changeset(self, snapshot):
+ '''Write serialized FTL files to disk.'''
+ for path, content in six.iteritems(snapshot):
+ fullpath = os.path.join(self.localization_dir, path)
+ print(' Writing to {}'.format(fullpath))
+ if not self.dry_run:
+ fulldir = os.path.dirname(fullpath)
+ if not os.path.isdir(fulldir):
+ os.makedirs(fulldir)
+ with open(fullpath, 'wb') as f:
+ f.write(content.encode('utf8'))
+ f.close()
+
+ def commit_changeset(
+ self, description_template, author, index
+ ):
+ message = description_template.format(
+ index=index,
+ author=author
+ )
+
+ print(' Committing changeset: {}'.format(message))
+ if self.dry_run:
+ return
+ try:
+ self.client.commit(
+ message, user=author.encode('utf-8'), addremove=True
+ )
+ except hglib.error.CommandError as err:
+ print(' WARNING: hg commit failed ({})'.format(err))
+
+
+def main(locale, reference_dir, localization_dir, migrations, dry_run):
+ """Run migrations and commit files with the result."""
+ migrator = Migrator(locale, reference_dir, localization_dir, dry_run)
+
+ for migration in migrations:
+ migrator.run(migration)
+
+ migrator.close()
+
+
+def cli():
+ parser = argparse.ArgumentParser(
+ description='Migrate translations to FTL.'
+ )
+ parser.add_argument(
+ 'migrations', metavar='MIGRATION', type=str, nargs='+',
+ help='migrations to run (Python modules)'
+ )
+ parser.add_argument(
+ '--locale', '--lang', type=str,
+ help='target locale code (--lang is deprecated)'
+ )
+ parser.add_argument(
+ '--reference-dir', type=str,
+ help='directory with reference FTL files'
+ )
+ parser.add_argument(
+ '--localization-dir', type=str,
+ help='directory for localization files'
+ )
+ parser.add_argument(
+ '--dry-run', action='store_true',
+ help='do not write to disk nor commit any changes'
+ )
+ parser.set_defaults(dry_run=False)
+
+ logger = logging.getLogger('migrate')
+ logger.setLevel(logging.INFO)
+
+ args = parser.parse_args()
+
+ # Don't byte-compile migrations.
+ # They're not our code, and infrequently run
+ with dont_write_bytecode():
+ migrations = map(importlib.import_module, args.migrations)
+
+ main(
+ locale=args.locale,
+ reference_dir=args.reference_dir,
+ localization_dir=args.localization_dir,
+ migrations=migrations,
+ dry_run=args.dry_run
+ )
+
+
+if __name__ == '__main__':
+ cli()
diff --git a/third_party/python/fluent.migrate/fluent/migrate/transforms.py b/third_party/python/fluent.migrate/fluent/migrate/transforms.py
new file mode 100644
index 0000000000..8df5744294
--- /dev/null
+++ b/third_party/python/fluent.migrate/fluent/migrate/transforms.py
@@ -0,0 +1,572 @@
+# coding=utf8
+"""Migration Transforms.
+
+Transforms are AST nodes which describe how legacy translations should be
+migrated. They are created inert and only return the migrated AST nodes when
+they are evaluated by a MigrationContext.
+
+All Transforms evaluate to Fluent Patterns. This makes them suitable for
+defining migrations of values of message, attributes and variants. The special
+CONCAT Transform is capable of joining multiple Patterns returned by evaluating
+other Transforms into a single Pattern. It can also concatenate Pattern
+elements: TextElements and Placeables.
+
+The COPY, REPLACE and PLURALS Transforms inherit from Source which is a special
+AST Node defining the location (the file path and the id) of the legacy
+translation. During the migration, the current MigrationContext scans the
+migration spec for Source nodes and extracts the information about all legacy
+translations being migrated. For instance,
+
+ COPY('file.dtd', 'hello')
+
+is equivalent to:
+
+ FTL.Pattern([
+ Source('file.dtd', 'hello')
+ ])
+
+Sometimes it's useful to work with text rather than (path, key) source
+definitions. This is the case when the migrated translation requires some
+hardcoded text, e.g. <a> and </a> when multiple translations become a single
+one with a DOM overlay. In such cases it's best to use FTL.TextElements:
+
+ FTL.Message(
+ id=FTL.Identifier('update-failed'),
+ value=CONCAT(
+ COPY('aboutDialog.dtd', 'update.failed.start'),
+ FTL.TextElement('<a>'),
+ COPY('aboutDialog.dtd', 'update.failed.linkText'),
+ FTL.TextElement('</a>'),
+ COPY('aboutDialog.dtd', 'update.failed.end'),
+ )
+ )
+
+The REPLACE_IN_TEXT Transform also takes TextElements as input, making it
+possible to pass it as the foreach function of the PLURALS Transform. In the
+example below, each slice of the plural string is converted into a
+TextElement by PLURALS and then run through the REPLACE_IN_TEXT transform.
+
+ FTL.Message(
+ FTL.Identifier('delete-all'),
+ value=PLURALS(
+ 'aboutDownloads.dtd',
+ 'deleteAll',
+ VARIABLE_REFERENCE('num'),
+ lambda text: REPLACE_IN_TEXT(
+ text,
+ {
+ '#1': VARIABLE_REFERENCE('num')
+ }
+ )
+ )
+ )
+"""
+
+from __future__ import unicode_literals
+from __future__ import absolute_import
+import re
+
+from fluent.syntax import ast as FTL
+from fluent.syntax.visitor import Transformer
+from .errors import NotSupportedError
+
+
+def chain_elements(elements):
+ '''Flatten a list of FTL nodes into an iterator over PatternElements.'''
+ for element in elements:
+ if isinstance(element, FTL.Pattern):
+ # PY3 yield from element.elements
+ for child in element.elements:
+ yield child
+ elif isinstance(element, FTL.PatternElement):
+ yield element
+ elif isinstance(element, FTL.Expression):
+ yield FTL.Placeable(element)
+ else:
+ raise RuntimeError(
+ 'Expected Pattern, PatternElement or Expression')
+
+
+re_leading_ws = re.compile(
+ r'\A(?:(?P<whitespace> +)(?P<text>.*?)|(?P<block_text>\n.*?))\Z',
+ re.S,
+)
+re_trailing_ws = re.compile(
+ r'\A(?:(?P<text>.*?)(?P<whitespace> +)|(?P<block_text>.*\n))\Z',
+ re.S
+)
+
+
+def extract_whitespace(regex, element):
+ '''Extract leading or trailing whitespace from a TextElement.
+
+ Return a tuple of (Placeable, TextElement) in which the Placeable
+ encodes the extracted whitespace as a StringLiteral and the
+ TextElement has the same amount of whitespace removed. The
+ Placeable with the extracted whitespace is always returned first.
+ If the element starts or ends with a newline, add an empty
+ StringLiteral.
+ '''
+ match = re.search(regex, element.value)
+ if match:
+ # If white-space is None, we're a newline. Add an
+ # empty { "" }
+ whitespace = match.group('whitespace') or ''
+ placeable = FTL.Placeable(FTL.StringLiteral(whitespace))
+ if whitespace == element.value:
+ return placeable, None
+ else:
+ # Either text or block_text matched the rest.
+ text = match.group('text') or match.group('block_text')
+ return placeable, FTL.TextElement(text)
+ else:
+ return None, element
+
+
+class Transform(FTL.BaseNode):
+ def __call__(self, ctx):
+ raise NotImplementedError
+
+ @staticmethod
+ def pattern_of(*elements):
+ normalized = []
+
+ # Normalize text content: convert text content to TextElements, join
+ # adjacent text and prune empty. Text content is either existing
+ # TextElements or whitespace-only StringLiterals. This may result in
+ # leading and trailing whitespace being put back into TextElements if
+ # the new Pattern is built from existing Patterns (CONCAT(COPY...)).
+ # The leading and trailing whitespace of the new Pattern will be
+ # extracted later into new StringLiterals.
+ for element in chain_elements(elements):
+ if isinstance(element, FTL.TextElement):
+ text_content = element.value
+ elif isinstance(element, FTL.Placeable) \
+ and isinstance(element.expression, FTL.StringLiteral) \
+ and re.match(r'^ *$', element.expression.value):
+ text_content = element.expression.value
+ else:
+ # The element does not contain text content which should be
+ # normalized. It may be a number, a reference, or
+ # a StringLiteral which should be preserved in the Pattern.
+ normalized.append(element)
+ continue
+
+ previous = normalized[-1] if len(normalized) else None
+ if isinstance(previous, FTL.TextElement):
+ # Join adjacent TextElements.
+ previous.value += text_content
+ elif len(text_content) > 0:
+ # Normalize non-empty text to a TextElement.
+ normalized.append(FTL.TextElement(text_content))
+ else:
+ # Prune empty text.
+ pass
+
+ # Store empty values explicitly as {""}.
+ if len(normalized) == 0:
+ empty = FTL.Placeable(FTL.StringLiteral(''))
+ return FTL.Pattern([empty])
+
+ # Extract explicit leading whitespace into a StringLiteral.
+ if isinstance(normalized[0], FTL.TextElement):
+ ws, text = extract_whitespace(re_leading_ws, normalized[0])
+ normalized[:1] = [ws, text]
+
+ # Extract explicit trailing whitespace into a StringLiteral.
+ if isinstance(normalized[-1], FTL.TextElement):
+ ws, text = extract_whitespace(re_trailing_ws, normalized[-1])
+ normalized[-1:] = [text, ws]
+
+ return FTL.Pattern([
+ element
+ for element in normalized
+ if element is not None
+ ])
+
+
+class Source(Transform):
+ """Base class for Transforms that get translations from source files.
+
+ The contract is that the first argument is the source path, and the
+ second is a key representing legacy string IDs, or Fluent id.attr.
+ """
+ def __init__(self, path, key):
+ self.path = path
+ self.key = key
+
+
+class FluentSource(Source):
+ """Declare a Fluent source translation to be copied over.
+
+ When evaluated, it clones the Pattern of the parsed source.
+ """
+ def __init__(self, path, key):
+ if not path.endswith('.ftl'):
+ raise NotSupportedError(
+ 'Please use COPY to migrate from legacy files '
+ '({})'.format(path)
+ )
+ if key[0] == '-' and '.' in key:
+ raise NotSupportedError(
+ 'Cannot migrate from Term Attributes, as they are'
+ 'locale-dependent ({})'.format(path)
+ )
+ super(FluentSource, self).__init__(path, key)
+
+ def __call__(self, ctx):
+ pattern = ctx.get_fluent_source_pattern(self.path, self.key)
+ return pattern.clone()
+
+
+class COPY_PATTERN(FluentSource):
+ """Create a Pattern with the translation value from the given source.
+
+ The given key can be a Message ID, Message ID.attribute_name, or
+ Term ID. Accessing Term attributes is not supported, as they're internal
+ to the localization.
+ """
+ pass
+
+
+class TransformPattern(FluentSource, Transformer):
+ """Base class for modifying a Fluent pattern as part of a migration.
+
+ Implement visit_* methods of the Transformer pattern to do the
+ actual modifications.
+ """
+ def __call__(self, ctx):
+ pattern = super(TransformPattern, self).__call__(ctx)
+ return self.visit(pattern)
+
+ def visit_Pattern(self, node):
+ # Make sure we're creating valid Patterns after restructuring
+ # transforms.
+ node = self.generic_visit(node)
+ pattern = Transform.pattern_of(*node.elements)
+ return pattern
+
+ def visit_Placeable(self, node):
+ # Ensure we have a Placeable with an expression still.
+ # Transforms could have replaced the expression with
+ # a Pattern or PatternElement, in which case we
+ # just pass that through.
+ # Patterns then get flattened by visit_Pattern.
+ node = self.generic_visit(node)
+ if isinstance(node.expression, (FTL.Pattern, FTL.PatternElement)):
+ return node.expression
+ return node
+
+
+class LegacySource(Source):
+ """Declare the source translation to be migrated with other transforms.
+
+ When evaluated, `Source` returns a TextElement with the content from the
+ source translation. Escaped characters are unescaped by the
+ compare-locales parser according to the file format:
+
+ - in properties files: \\uXXXX,
+ - in DTD files: known named, decimal, and hexadecimal HTML entities.
+
+ Consult the following files for the list of known named HTML entities:
+
+ https://github.com/python/cpython/blob/2.7/Lib/htmlentitydefs.py
+ https://github.com/python/cpython/blob/3.6/Lib/html/entities.py
+
+ By default, leading and trailing whitespace on each line as well as
+ leading and trailing empty lines will be stripped from the source
+ translation's content. Set `trim=False` to disable this behavior.
+ """
+
+ def __init__(self, path, key, trim=None):
+ if path.endswith('.ftl'):
+ raise NotSupportedError(
+ 'Please use COPY_PATTERN to migrate from Fluent files '
+ '({})'.format(path))
+
+ super(LegacySource, self).__init__(path, key)
+ self.trim = trim
+
+ def get_text(self, ctx):
+ return ctx.get_legacy_source(self.path, self.key)
+
+ @staticmethod
+ def trim_text(text):
+ # strip leading white-space from each line
+ text = re.sub('^[ \t]+', '', text, flags=re.M)
+ # strip trailing white-space from each line
+ text = re.sub('[ \t]+$', '', text, flags=re.M)
+ # strip leading and trailing empty lines
+ text = text.strip('\r\n')
+ return text
+
+ def __call__(self, ctx):
+ text = self.get_text(ctx)
+ if self.trim is not False:
+ text = self.trim_text(text)
+ return FTL.TextElement(text)
+
+
+class COPY(LegacySource):
+ """Create a Pattern with the translation value from the given source."""
+
+ def __call__(self, ctx):
+ element = super(COPY, self).__call__(ctx)
+ return Transform.pattern_of(element)
+
+
+PRINTF = re.compile(
+ r'%(?P<good>%|'
+ r'(?:(?P<number>[1-9][0-9]*)\$)?'
+ r'(?P<width>\*|[0-9]+)?'
+ r'(?P<prec>\.(?:\*|[0-9]+)?)?'
+ r'(?P<spec>[duxXosScpfg]))'
+)
+
+
+def number():
+ i = 1
+ while True:
+ yield i
+ i += 1
+
+
+def normalize_printf(text):
+ """Normalize printf arguments so that they're all numbered.
+ Gecko forbids mixing unnumbered and numbered ones, so
+ we just need to convert unnumbered to numbered ones.
+ Also remove ones that have zero width, as they're intended
+ to be removed from the output by the localizer.
+ """
+ next_number = number()
+
+ def normalized(match):
+ if match.group('good') == '%':
+ return '%'
+ hidden = match.group('width') == '0'
+ if match.group('number'):
+ return '' if hidden else match.group()
+ num = next(next_number)
+ return '' if hidden else '%{}${}'.format(num, match.group('spec'))
+
+ return PRINTF.sub(normalized, text)
+
+
+class REPLACE_IN_TEXT(Transform):
+ """Create a Pattern from a TextElement and replace legacy placeables.
+
+ The original placeables are defined as keys on the `replacements` dict.
+ For each key the value must be defined as a FTL Pattern, Placeable,
+ TextElement or Expression to be interpolated.
+ """
+
+ def __init__(self, element, replacements, normalize_printf=False):
+ self.element = element
+ self.replacements = replacements
+ self.normalize_printf = normalize_printf
+
+ def __call__(self, ctx):
+ # For each specified replacement, find all indices of the original
+ # placeable in the source translation. If missing, the list of indices
+ # will be empty.
+ value = self.element.value
+ if self.normalize_printf:
+ value = normalize_printf(value)
+ key_indices = {
+ key: [m.start() for m in re.finditer(re.escape(key), value)]
+ for key in self.replacements.keys()
+ }
+
+ # Build a dict of indices to replacement keys.
+ keys_indexed = {}
+ for key, indices in key_indices.items():
+ for index in indices:
+ keys_indexed[index] = key
+
+ # Order the replacements by the position of the original placeable in
+ # the translation.
+ replacements = (
+ (key, ctx.evaluate(self.replacements[key]))
+ for index, key
+ in sorted(keys_indexed.items(), key=lambda x: x[0])
+ )
+
+ # A list of PatternElements built from the legacy translation and the
+ # FTL replacements. It may contain empty or adjacent TextElements.
+ elements = []
+ tail = value
+
+ # Convert original placeables and text into FTL Nodes. For each
+ # original placeable the translation will be partitioned around it and
+ # the text before it will be converted into an `FTL.TextElement` and
+ # the placeable will be replaced with its replacement.
+ for key, node in replacements:
+ before, key, tail = tail.partition(key)
+ elements.append(FTL.TextElement(before))
+ elements.append(node)
+
+ # Don't forget about the tail after the loop ends.
+ elements.append(FTL.TextElement(tail))
+ return Transform.pattern_of(*elements)
+
+
+class REPLACE(LegacySource):
+ """Create a Pattern with interpolations from given source.
+
+ Interpolations in the translation value from the given source will be
+ replaced with FTL placeables using the `REPLACE_IN_TEXT` transform.
+ """
+
+ def __init__(
+ self, path, key, replacements,
+ normalize_printf=False, **kwargs
+ ):
+ super(REPLACE, self).__init__(path, key, **kwargs)
+ self.replacements = replacements
+ self.normalize_printf = normalize_printf
+
+ def __call__(self, ctx):
+ element = super(REPLACE, self).__call__(ctx)
+ return REPLACE_IN_TEXT(
+ element, self.replacements,
+ normalize_printf=self.normalize_printf
+ )(ctx)
+
+
+class PLURALS(LegacySource):
+ """Create a Pattern with plurals from given source.
+
+ Build an `FTL.SelectExpression` with the supplied `selector` and variants
+ extracted from the source. The original translation should be a
+ semicolon-separated list of plural forms. Each form will be converted
+ into a TextElement and run through the `foreach` function, which should
+ return an `FTL.Node` or a `Transform`. By default, the `foreach` function
+ creates a valid Pattern from the TextElement passed into it.
+ """
+ DEFAULT_ORDER = ('zero', 'one', 'two', 'few', 'many', 'other')
+
+ def __init__(self, path, key, selector, foreach=Transform.pattern_of,
+ **kwargs):
+ super(PLURALS, self).__init__(path, key, **kwargs)
+ self.selector = selector
+ self.foreach = foreach
+
+ def __call__(self, ctx):
+ element = super(PLURALS, self).__call__(ctx)
+ selector = ctx.evaluate(self.selector)
+ keys = ctx.plural_categories
+ forms = [
+ FTL.TextElement(part)
+ for part in element.value.split(';')
+ ]
+
+ # The default CLDR form should be the last we have in DEFAULT_ORDER,
+ # usually `other`, but in some cases `many`. If we don't have a variant
+ # for that, we'll append one, using the, in CLDR order, last existing
+ # variant in the legacy translation. That may or may not be the last
+ # variant.
+ default_key = [
+ key for key in reversed(self.DEFAULT_ORDER) if key in keys
+ ][0]
+
+ # Match keys to legacy forms in the order they are defined in Gecko's
+ # PluralForm.jsm. Filter out empty forms.
+ pairs = [
+ (key, var)
+ for key, var in zip(keys, forms)
+ if var.value
+ ]
+
+ # A special case for legacy translations which don't define any
+ # plural forms.
+ if len(pairs) == 0:
+ return Transform.pattern_of()
+
+ # A special case for languages with one plural category or one legacy
+ # variant. We don't need to insert a SelectExpression for them.
+ if len(pairs) == 1:
+ _, only_form = pairs[0]
+ only_variant = ctx.evaluate(self.foreach(only_form))
+ return Transform.pattern_of(only_variant)
+
+ # Make sure the default key is defined. If it's missing, use the last
+ # form (in CLDR order) found in the legacy translation.
+ pairs.sort(key=lambda pair: self.DEFAULT_ORDER.index(pair[0]))
+ last_key, last_form = pairs[-1]
+ if last_key != default_key:
+ pairs.append((default_key, last_form))
+
+ def createVariant(key, form):
+ # Run the legacy plural form through `foreach` which returns an
+ # `FTL.Node` describing the transformation required for each
+ # variant. Then evaluate it to a migrated FTL node.
+ value = ctx.evaluate(self.foreach(form))
+ return FTL.Variant(
+ key=FTL.Identifier(key),
+ value=value,
+ default=key == default_key
+ )
+
+ select = FTL.SelectExpression(
+ selector=selector,
+ variants=[
+ createVariant(key, form)
+ for key, form in pairs
+ ]
+ )
+
+ return Transform.pattern_of(select)
+
+
+class CONCAT(Transform):
+ """Create a new Pattern from Patterns, PatternElements and Expressions.
+
+ When called with at least two elements, `CONCAT` disables the trimming
+ behavior of the elements which are subclasses of `LegacySource` by
+ setting `trim=False`, unless `trim` has already been set explicitly. The
+ following two `CONCAT` calls are equivalent:
+
+ CONCAT(
+ FTL.TextElement("Hello"),
+ COPY("file.properties", "hello")
+ )
+
+ CONCAT(
+ FTL.TextElement("Hello"),
+ COPY("file.properties", "hello", trim=False)
+ )
+
+ Set `trim=True` explicitly to force trimming:
+
+ CONCAT(
+ FTL.TextElement("Hello "),
+ COPY("file.properties", "hello", trim=True)
+ )
+
+ When called with a single element and when the element is a subclass of
+ `LegacySource`, the trimming behavior is not changed. The following two
+ transforms are equivalent:
+
+ CONCAT(COPY("file.properties", "hello"))
+
+ COPY("file.properties", "hello")
+ """
+
+ def __init__(self, *elements, **kwargs):
+ # We want to support both passing elements as *elements in the
+ # migration specs and as elements=[]. The latter is used by
+ # FTL.BaseNode.traverse when it recreates the traversed node using its
+ # attributes as kwargs.
+ self.elements = list(kwargs.get('elements', elements))
+
+ # We want to make CONCAT(COPY()) equivalent to COPY() so that it's
+ # always safe (no-op) to wrap transforms in a CONCAT. This is used by
+ # the implementation of transforms_from.
+ if len(self.elements) > 1:
+ for elem in self.elements:
+ # Only change trim if it hasn't been set explicitly.
+ if isinstance(elem, LegacySource) and elem.trim is None:
+ elem.trim = False
+
+ def __call__(self, ctx):
+ return Transform.pattern_of(*self.elements)
diff --git a/third_party/python/fluent.migrate/fluent/migrate/util.py b/third_party/python/fluent.migrate/fluent/migrate/util.py
new file mode 100644
index 0000000000..7fcd1c1b5c
--- /dev/null
+++ b/third_party/python/fluent.migrate/fluent/migrate/util.py
@@ -0,0 +1,114 @@
+# coding=utf8
+from __future__ import unicode_literals
+from __future__ import absolute_import
+
+import textwrap
+
+import fluent.syntax.ast as FTL
+from fluent.syntax.parser import FluentParser, FluentParserStream
+
+
+fluent_parser = FluentParser(with_spans=False)
+
+
+def parse(Parser, string):
+ if Parser is FluentParser:
+ return fluent_parser.parse(string)
+
+ # Parsing a legacy resource.
+
+ # Parse the string into the internal Context.
+ parser = Parser()
+ # compare-locales expects ASCII strings.
+ parser.readContents(string.encode('utf8'))
+ # Transform the parsed result which is an iterator into a dict.
+ return {ent.key: ent for ent in parser}
+
+
+def ftl_resource_to_ast(code):
+ return fluent_parser.parse(ftl(code))
+
+
+def ftl_resource_to_json(code):
+ return fluent_parser.parse(ftl(code)).to_json()
+
+
+def ftl_pattern_to_json(code):
+ ps = FluentParserStream(ftl(code))
+ return fluent_parser.maybe_get_pattern(ps).to_json()
+
+
+def to_json(merged_iter):
+ return {
+ path: resource.to_json()
+ for path, resource in merged_iter
+ }
+
+
+LOCALIZABLE_ENTRIES = (FTL.Message, FTL.Term)
+
+
+def get_message(body, ident):
+ """Get message called `ident` from the `body` iterable."""
+ for entity in body:
+ if isinstance(entity, LOCALIZABLE_ENTRIES) and entity.id.name == ident:
+ return entity
+
+
+def get_transform(body, ident):
+ """Get entity called `ident` from the `body` iterable."""
+ for transform in body:
+ if transform.id.name == ident:
+ return transform
+
+
+def skeleton(node):
+ """Create a skeleton copy of the given node.
+
+ For localizable entries, the value is None and the attributes are {}.
+ That's not a valid Fluent entry, so it requires further manipulation to
+ set values and/or attributes.
+ """
+ if isinstance(node, LOCALIZABLE_ENTRIES):
+ return type(node)(id=node.id.clone(), value=None)
+ return node.clone()
+
+
+def ftl(code):
+ """Nicer indentation for FTL code.
+
+ The code returned by this function is meant to be compared against the
+ output of the FTL Serializer. The input code will end with a newline to
+ match the output of the serializer.
+ """
+
+ # The code might be triple-quoted.
+ code = code.lstrip('\n')
+
+ return textwrap.dedent(code)
+
+
+def fold(fun, node, init):
+ """Reduce `node` to a single value using `fun`.
+
+ Apply `fun` against an accumulator and each subnode of `node` (in postorder
+ traversal) to reduce it to a single value.
+ """
+
+ def fold_(vals, acc):
+ if not vals:
+ return acc
+
+ head = list(vals)[0]
+ tail = list(vals)[1:]
+
+ if isinstance(head, FTL.BaseNode):
+ acc = fold(fun, head, acc)
+ if isinstance(head, list):
+ acc = fold_(head, acc)
+ if isinstance(head, dict):
+ acc = fold_(head.values(), acc)
+
+ return fold_(tail, fun(acc, head))
+
+ return fold_(vars(node).values(), init)
diff --git a/third_party/python/fluent.migrate/fluent/migrate/validator.py b/third_party/python/fluent.migrate/fluent/migrate/validator.py
new file mode 100644
index 0000000000..8189613c31
--- /dev/null
+++ b/third_party/python/fluent.migrate/fluent/migrate/validator.py
@@ -0,0 +1,339 @@
+# coding=utf8
+from __future__ import absolute_import
+
+import argparse
+import ast
+import six
+from six.moves import zip_longest
+
+from fluent.migrate import transforms
+from fluent.migrate.errors import MigrationError
+from fluent.migrate.helpers import transforms_from
+from fluent.syntax import ast as FTL
+from fluent.syntax.visitor import Visitor
+from compare_locales import mozpath
+
+
+class MigrateNotFoundException(Exception):
+ pass
+
+
+class BadContextAPIException(Exception):
+ pass
+
+
+def process_assign(node, context):
+ if isinstance(node.value, ast.Str):
+ val = node.value.s
+ elif isinstance(node.value, ast.Name):
+ val = context.get(node.value.id)
+ elif isinstance(node.value, ast.Call):
+ val = node.value
+ if val is None:
+ return
+ for target in node.targets:
+ if isinstance(target, ast.Name):
+ context[target.id] = val
+
+
+class Validator(object):
+ """Validate a migration recipe
+
+ Extract information from the migration recipe about which files to
+ migrate from, and which files to migrate to.
+ Also check for errors in the recipe, or bad API usage.
+ """
+
+ @classmethod
+ def validate(cls, path, code=None):
+ if code is None:
+ with open(path) as fh:
+ code = fh.read()
+ validator = cls(code, path)
+ return validator.inspect()
+
+ def __init__(self, code, path):
+ self.ast = ast.parse(code, path)
+
+ def inspect(self):
+ migrate_func = None
+ global_assigns = {}
+ for top_level in ast.iter_child_nodes(self.ast):
+ if (
+ isinstance(top_level, ast.FunctionDef)
+ and top_level.name == 'migrate'
+ ):
+ if migrate_func:
+ raise MigrateNotFoundException(
+ 'Duplicate definition of migrate'
+ )
+ migrate_func = top_level
+ details = self.inspect_migrate(migrate_func, global_assigns)
+ if isinstance(top_level, ast.Assign):
+ process_assign(top_level, global_assigns)
+ if isinstance(top_level, (ast.Import, ast.ImportFrom)):
+ if 'module' in top_level._fields:
+ module = top_level.module
+ else:
+ module = None
+ for alias in top_level.names:
+ asname = alias.asname or alias.name
+ dotted = alias.name
+ if module:
+ dotted = '{}.{}'.format(module, dotted)
+ global_assigns[asname] = dotted
+ if not migrate_func:
+ raise MigrateNotFoundException(
+ 'migrate function not found'
+ )
+ return details
+
+ def inspect_migrate(self, migrate_func, global_assigns):
+ if (
+ len(migrate_func.args.args) != 1 or
+ any(
+ getattr(migrate_func.args, arg_field)
+ for arg_field in migrate_func.args._fields
+ if arg_field != 'args'
+ )
+ ):
+ raise MigrateNotFoundException(
+ 'migrate takes only one positional argument'
+ )
+ arg = migrate_func.args.args[0]
+ if isinstance(arg, ast.Name):
+ ctx_var = arg.id # python 2
+ else:
+ ctx_var = arg.arg # python 3
+ visitor = MigrateAnalyzer(ctx_var, global_assigns)
+ visitor.visit(migrate_func)
+ return {
+ 'references': visitor.references,
+ 'issues': visitor.issues,
+ }
+
+
+def full_name(node, global_assigns):
+ leafs = []
+ while isinstance(node, ast.Attribute):
+ leafs.append(node.attr)
+ node = node.value
+ if isinstance(node, ast.Name):
+ leafs.append(global_assigns.get(node.id, node.id))
+ return '.'.join(reversed(leafs))
+
+
+PATH_TYPES = six.string_types + (ast.Call,)
+
+
+class MigrateAnalyzer(ast.NodeVisitor):
+ def __init__(self, ctx_var, global_assigns):
+ super(MigrateAnalyzer, self).__init__()
+ self.ctx_var = ctx_var
+ self.global_assigns = global_assigns
+ self.depth = 0
+ self.issues = []
+ self.references = set()
+
+ def generic_visit(self, node):
+ self.depth += 1
+ super(MigrateAnalyzer, self).generic_visit(node)
+ self.depth -= 1
+
+ def visit_Assign(self, node):
+ if self.depth == 1:
+ process_assign(node, self.global_assigns)
+ self.generic_visit(node)
+
+ def visit_Attribute(self, node):
+ if isinstance(node.value, ast.Name) and node.value.id == self.ctx_var:
+ if node.attr not in (
+ 'add_transforms',
+ 'locale',
+ ):
+ raise BadContextAPIException(
+ 'Unexpected attribute access on {}.{}'.format(
+ self.ctx_var, node.attr
+ )
+ )
+ self.generic_visit(node)
+
+ def visit_Call(self, node):
+ if (
+ isinstance(node.func, ast.Attribute) and
+ isinstance(node.func.value, ast.Name) and
+ node.func.value.id == self.ctx_var
+ ):
+ return self.call_ctx(node)
+ dotted = full_name(node.func, self.global_assigns)
+ if dotted == 'fluent.migrate.helpers.transforms_from':
+ return self.call_helpers_transforms_from(node)
+ if dotted.startswith('fluent.migrate.'):
+ return self.call_transform(node, dotted)
+ self.generic_visit(node)
+
+ def call_ctx(self, node):
+ if node.func.attr == 'add_transforms':
+ return self.call_add_transforms(node)
+ raise BadContextAPIException(
+ 'Unexpected call on {}.{}'.format(
+ self.ctx_var, node.func.attr
+ )
+ )
+
+ def call_add_transforms(self, node):
+ args_msg = (
+ 'Expected arguments to {}.add_transforms: '
+ 'target_ftl_path, reference_ftl_path, list_of_transforms'
+ ).format(self.ctx_var)
+ ref_msg = (
+ 'Expected second argument to {}.add_transforms: '
+ 'reference should be string or variable with string value'
+ ).format(self.ctx_var)
+ # Just check call signature here, check actual types below
+ if not self.check_arguments(node, (ast.AST, ast.AST, ast.AST)):
+ self.issues.append({
+ 'msg': args_msg,
+ 'line': node.lineno,
+ })
+ return
+ in_reference = node.args[1]
+ if isinstance(in_reference, ast.Name):
+ in_reference = self.global_assigns.get(in_reference.id)
+ if isinstance(in_reference, ast.Str):
+ in_reference = in_reference.s
+ if not isinstance(in_reference, six.string_types):
+ self.issues.append({
+ 'msg': ref_msg,
+ 'line': node.args[1].lineno,
+ })
+ return
+ self.references.add(in_reference)
+ # Checked node.args[1].
+ # There's not a lot we can say about our target path,
+ # ignoring that.
+ # For our transforms, we want more checks.
+ self.generic_visit(node.args[2])
+
+ def call_transform(self, node, dotted):
+ module, called = dotted.rsplit('.', 1)
+ if module not in ('fluent.migrate', 'fluent.migrate.transforms'):
+ return
+ transform = getattr(transforms, called)
+ if not issubclass(transform, transforms.Source):
+ return
+ bad_args = '{} takes path and key as first two params'.format(called)
+ if not self.check_arguments(
+ node, ((ast.Str, ast.Name), (ast.Str, ast.Name),),
+ allow_more=True, check_kwargs=False
+ ):
+ self.issues.append({
+ 'msg': bad_args,
+ 'line': node.lineno
+ })
+ return
+ path = node.args[0]
+ if isinstance(path, ast.Str):
+ path = path.s
+ if isinstance(path, ast.Name):
+ path = self.global_assigns.get(path.id)
+ if not isinstance(path, PATH_TYPES):
+ self.issues.append({
+ 'msg': bad_args,
+ 'line': node.lineno
+ })
+
+ def call_helpers_transforms_from(self, node):
+ args_msg = (
+ 'Expected arguments to transforms_from: '
+ 'str, **substitions'
+ )
+ if not self.check_arguments(
+ node, (ast.Str,), check_kwargs=False
+ ):
+ self.issues.append({
+ 'msg': args_msg,
+ 'line': node.lineno,
+ })
+ return
+ kwargs = {}
+ found_bad_keywords = False
+ for keyword in node.keywords:
+ v = keyword.value
+ if isinstance(v, ast.Str):
+ v = v.s
+ if isinstance(v, ast.Name):
+ v = self.global_assigns.get(v.id)
+ if isinstance(v, ast.Call):
+ v = 'determined at runtime'
+ if not isinstance(v, PATH_TYPES):
+ msg = 'Bad keyword arg {} to transforms_from'.format(
+ keyword.arg
+ )
+ self.issues.append({
+ 'msg': msg,
+ 'line': node.lineno,
+ })
+ found_bad_keywords = True
+ else:
+ kwargs[keyword.arg] = v
+ if found_bad_keywords:
+ return
+ try:
+ transforms = transforms_from(node.args[0].s, **kwargs)
+ except MigrationError as e:
+ self.issues.append({
+ 'msg': str(e),
+ 'line': node.lineno,
+ })
+ return
+ ti = TransformsInspector()
+ ti.visit(transforms)
+ self.issues.extend({
+ 'msg': issue,
+ 'line': node.lineno,
+ } for issue in set(ti.issues))
+
+ def check_arguments(
+ self, node, argspec, check_kwargs=True, allow_more=False
+ ):
+ if check_kwargs and (
+ node.keywords or
+ (hasattr(node, 'kwargs') and node.kwargs)
+ ):
+ return False
+ if hasattr(node, 'starargs') and node.starargs:
+ return False
+ for arg, NODE_TYPE in zip_longest(node.args, argspec):
+ if NODE_TYPE is None:
+ return True if allow_more else False
+ if not (isinstance(arg, NODE_TYPE)):
+ return False
+ return True
+
+
+class TransformsInspector(Visitor):
+ def __init__(self):
+ super(TransformsInspector, self).__init__()
+ self.issues = []
+
+ def generic_visit(self, node):
+ if isinstance(node, transforms.Source):
+ src = node.path
+ # Source needs paths to be normalized
+ # https://bugzilla.mozilla.org/show_bug.cgi?id=1568199
+ if src != mozpath.normpath(src):
+ self.issues.append(
+ 'Source "{}" needs to be a normalized path'.format(src)
+ )
+ super(TransformsInspector, self).generic_visit(node)
+
+
+def cli():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('migration')
+ args = parser.parse_args()
+ issues = Validator.validate(args.migration)['issues']
+ for issue in issues:
+ print(issue['msg'], 'at line', issue['line'])
+ return 1 if issues else 0
diff --git a/third_party/python/fluent.migrate/setup.cfg b/third_party/python/fluent.migrate/setup.cfg
new file mode 100644
index 0000000000..957dd1650b
--- /dev/null
+++ b/third_party/python/fluent.migrate/setup.cfg
@@ -0,0 +1,16 @@
+[options.entry_points]
+console_scripts =
+ migrate-l10n=fluent.migrate.tool:cli
+ validate-l10n-recipe=fluent.migrate.validator:cli
+
+[metadata]
+long_description = file: README.md
+long_description_content_type = text/markdown
+
+[bdist_wheel]
+universal = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/fluent.migrate/setup.py b/third_party/python/fluent.migrate/setup.py
new file mode 100644
index 0000000000..9d7bcfa9ab
--- /dev/null
+++ b/third_party/python/fluent.migrate/setup.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+from setuptools import setup
+
+setup(
+ name='fluent.migrate',
+ version='0.10',
+ description='Toolchain to migrate legacy translation to Fluent.',
+ author='Mozilla',
+ author_email='l10n-drivers@mozilla.org',
+ license='APL 2',
+ url='https://hg.mozilla.org/l10n/fluent-migration/',
+ keywords=['fluent', 'localization', 'l10n'],
+ classifiers=[
+ 'Development Status :: 3 - Alpha',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3.7',
+ ],
+ packages=['fluent', 'fluent.migrate'],
+ install_requires=[
+ 'compare-locales >=8.1, <9.0',
+ 'fluent.syntax >=0.18.0, <0.19',
+ 'six',
+ ],
+ extras_require={
+ 'hg': ['python-hglib',],
+ },
+ tests_require=[
+ 'mock',
+ ],
+ test_suite='tests.migrate'
+)
diff --git a/third_party/python/fluent.syntax/PKG-INFO b/third_party/python/fluent.syntax/PKG-INFO
new file mode 100644
index 0000000000..7ce7dd4745
--- /dev/null
+++ b/third_party/python/fluent.syntax/PKG-INFO
@@ -0,0 +1,39 @@
+Metadata-Version: 2.1
+Name: fluent.syntax
+Version: 0.18.1
+Summary: Localization library for expressive translations.
+Home-page: https://github.com/projectfluent/python-fluent
+Author: Mozilla
+Author-email: l10n-drivers@mozilla.org
+License: APL 2
+Description: ``fluent.syntax`` |fluent.syntax|
+ ---------------------------------
+
+ Read, write, and transform `Fluent`_ files.
+
+ This package includes the parser, serializer, and traversal
+ utilities like Visitor and Transformer. You’re looking for this package
+ if you work on tooling for Fluent in Python.
+
+ .. code-block:: python
+
+ >>> from fluent.syntax import parse, ast, serialize
+ >>> resource = parse("a-key = String to localize")
+ >>> resource.body[0].value.elements[0].value = "Localized string"
+ >>> serialize(resource)
+ 'a-key = Localized string\n'
+
+
+ Find the full documentation on https://projectfluent.org/python-fluent/fluent.syntax/.
+
+ .. _fluent: https://projectfluent.org/
+ .. |fluent.syntax| image:: https://github.com/projectfluent/python-fluent/workflows/fluent.syntax/badge.svg
+
+Keywords: fluent,localization,l10n
+Platform: UNKNOWN
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.5
+Description-Content-Type: text/x-rst
diff --git a/third_party/python/fluent.syntax/README.rst b/third_party/python/fluent.syntax/README.rst
new file mode 100644
index 0000000000..c847f5e6aa
--- /dev/null
+++ b/third_party/python/fluent.syntax/README.rst
@@ -0,0 +1,22 @@
+``fluent.syntax`` |fluent.syntax|
+---------------------------------
+
+Read, write, and transform `Fluent`_ files.
+
+This package includes the parser, serializer, and traversal
+utilities like Visitor and Transformer. You’re looking for this package
+if you work on tooling for Fluent in Python.
+
+.. code-block:: python
+
+ >>> from fluent.syntax import parse, ast, serialize
+ >>> resource = parse("a-key = String to localize")
+ >>> resource.body[0].value.elements[0].value = "Localized string"
+ >>> serialize(resource)
+ 'a-key = Localized string\n'
+
+
+Find the full documentation on https://projectfluent.org/python-fluent/fluent.syntax/.
+
+.. _fluent: https://projectfluent.org/
+.. |fluent.syntax| image:: https://github.com/projectfluent/python-fluent/workflows/fluent.syntax/badge.svg
diff --git a/third_party/python/fluent.syntax/fluent/__init__.py b/third_party/python/fluent.syntax/fluent/__init__.py
new file mode 100644
index 0000000000..69e3be50da
--- /dev/null
+++ b/third_party/python/fluent.syntax/fluent/__init__.py
@@ -0,0 +1 @@
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/third_party/python/fluent.syntax/fluent/syntax/__init__.py b/third_party/python/fluent.syntax/fluent/syntax/__init__.py
new file mode 100644
index 0000000000..0975b110b9
--- /dev/null
+++ b/third_party/python/fluent.syntax/fluent/syntax/__init__.py
@@ -0,0 +1,16 @@
+from .parser import FluentParser
+from .serializer import FluentSerializer
+
+
+def parse(source, **kwargs):
+ """Create an ast.Resource from a Fluent Syntax source.
+ """
+ parser = FluentParser(**kwargs)
+ return parser.parse(source)
+
+
+def serialize(resource, **kwargs):
+ """Serialize an ast.Resource to a unicode string.
+ """
+ serializer = FluentSerializer(**kwargs)
+ return serializer.serialize(resource)
diff --git a/third_party/python/fluent.syntax/fluent/syntax/ast.py b/third_party/python/fluent.syntax/fluent/syntax/ast.py
new file mode 100644
index 0000000000..7ad5d611d6
--- /dev/null
+++ b/third_party/python/fluent.syntax/fluent/syntax/ast.py
@@ -0,0 +1,349 @@
+# coding=utf-8
+from __future__ import unicode_literals
+import re
+import sys
+import json
+import six
+
+
+def to_json(value, fn=None):
+ if isinstance(value, BaseNode):
+ return value.to_json(fn)
+ if isinstance(value, list):
+ return list(to_json(item, fn) for item in value)
+ if isinstance(value, tuple):
+ return list(to_json(item, fn) for item in value)
+ else:
+ return value
+
+
+def from_json(value):
+ if isinstance(value, dict):
+ cls = getattr(sys.modules[__name__], value['type'])
+ args = {
+ k: from_json(v)
+ for k, v in value.items()
+ if k != 'type'
+ }
+ return cls(**args)
+ if isinstance(value, list):
+ return list(map(from_json, value))
+ else:
+ return value
+
+
+def scalars_equal(node1, node2, ignored_fields):
+ """Compare two nodes which are not lists."""
+
+ if type(node1) != type(node2):
+ return False
+
+ if isinstance(node1, BaseNode):
+ return node1.equals(node2, ignored_fields)
+
+ return node1 == node2
+
+
+class BaseNode(object):
+ """Base class for all Fluent AST nodes.
+
+ All productions described in the ASDL subclass BaseNode, including Span and
+ Annotation. Implements __str__, to_json and traverse.
+ """
+
+ def clone(self):
+ """Create a deep clone of the current node."""
+ def visit(value):
+ """Clone node and its descendants."""
+ if isinstance(value, BaseNode):
+ return value.clone()
+ if isinstance(value, list):
+ return [visit(child) for child in value]
+ if isinstance(value, tuple):
+ return tuple(visit(child) for child in value)
+ return value
+
+ # Use all attributes found on the node as kwargs to the constructor.
+ return self.__class__(
+ **{name: visit(value) for name, value in vars(self).items()}
+ )
+
+ def equals(self, other, ignored_fields=['span']):
+ """Compare two nodes.
+
+ Nodes are deeply compared on a field by field basis. If possible, False
+ is returned early. When comparing attributes and variants in
+ SelectExpressions, the order doesn't matter. By default, spans are not
+ taken into account.
+ """
+
+ self_keys = set(vars(self).keys())
+ other_keys = set(vars(other).keys())
+
+ if ignored_fields:
+ for key in ignored_fields:
+ self_keys.discard(key)
+ other_keys.discard(key)
+
+ if self_keys != other_keys:
+ return False
+
+ for key in self_keys:
+ field1 = getattr(self, key)
+ field2 = getattr(other, key)
+
+ # List-typed nodes are compared item-by-item. When comparing
+ # attributes and variants, the order of items doesn't matter.
+ if isinstance(field1, list) and isinstance(field2, list):
+ if len(field1) != len(field2):
+ return False
+
+ for elem1, elem2 in zip(field1, field2):
+ if not scalars_equal(elem1, elem2, ignored_fields):
+ return False
+
+ elif not scalars_equal(field1, field2, ignored_fields):
+ return False
+
+ return True
+
+ def to_json(self, fn=None):
+ obj = {
+ name: to_json(value, fn)
+ for name, value in vars(self).items()
+ }
+ obj.update(
+ {'type': self.__class__.__name__}
+ )
+ return fn(obj) if fn else obj
+
+ def __str__(self):
+ return json.dumps(self.to_json())
+
+
+class SyntaxNode(BaseNode):
+ """Base class for AST nodes which can have Spans."""
+
+ def __init__(self, span=None, **kwargs):
+ super(SyntaxNode, self).__init__(**kwargs)
+ self.span = span
+
+ def add_span(self, start, end):
+ self.span = Span(start, end)
+
+
+class Resource(SyntaxNode):
+ def __init__(self, body=None, **kwargs):
+ super(Resource, self).__init__(**kwargs)
+ self.body = body or []
+
+
+class Entry(SyntaxNode):
+ """An abstract base class for useful elements of Resource.body."""
+
+
+class Message(Entry):
+ def __init__(self, id, value=None, attributes=None,
+ comment=None, **kwargs):
+ super(Message, self).__init__(**kwargs)
+ self.id = id
+ self.value = value
+ self.attributes = attributes or []
+ self.comment = comment
+
+
+class Term(Entry):
+ def __init__(self, id, value, attributes=None,
+ comment=None, **kwargs):
+ super(Term, self).__init__(**kwargs)
+ self.id = id
+ self.value = value
+ self.attributes = attributes or []
+ self.comment = comment
+
+
+class Pattern(SyntaxNode):
+ def __init__(self, elements, **kwargs):
+ super(Pattern, self).__init__(**kwargs)
+ self.elements = elements
+
+
+class PatternElement(SyntaxNode):
+ """An abstract base class for elements of Patterns."""
+
+
+class TextElement(PatternElement):
+ def __init__(self, value, **kwargs):
+ super(TextElement, self).__init__(**kwargs)
+ self.value = value
+
+
+class Placeable(PatternElement):
+ def __init__(self, expression, **kwargs):
+ super(Placeable, self).__init__(**kwargs)
+ self.expression = expression
+
+
+class Expression(SyntaxNode):
+ """An abstract base class for expressions."""
+
+
+class Literal(Expression):
+ """An abstract base class for literals."""
+ def __init__(self, value, **kwargs):
+ super(Literal, self).__init__(**kwargs)
+ self.value = value
+
+ def parse(self):
+ return {'value': self.value}
+
+
+class StringLiteral(Literal):
+ def parse(self):
+ def from_escape_sequence(matchobj):
+ c, codepoint4, codepoint6 = matchobj.groups()
+ if c:
+ return c
+ codepoint = int(codepoint4 or codepoint6, 16)
+ if codepoint <= 0xD7FF or 0xE000 <= codepoint:
+ return six.unichr(codepoint)
+ # Escape sequences reresenting surrogate code points are
+ # well-formed but invalid in Fluent. Replace them with U+FFFD
+ # REPLACEMENT CHARACTER.
+ return '�'
+
+ value = re.sub(
+ r'\\(?:(\\|")|u([0-9a-fA-F]{4})|U([0-9a-fA-F]{6}))',
+ from_escape_sequence,
+ self.value
+ )
+ return {'value': value}
+
+
+class NumberLiteral(Literal):
+ def parse(self):
+ value = float(self.value)
+ decimal_position = self.value.find('.')
+ precision = 0
+ if decimal_position >= 0:
+ precision = len(self.value) - decimal_position - 1
+ return {
+ 'value': value,
+ 'precision': precision
+ }
+
+
+class MessageReference(Expression):
+ def __init__(self, id, attribute=None, **kwargs):
+ super(MessageReference, self).__init__(**kwargs)
+ self.id = id
+ self.attribute = attribute
+
+
+class TermReference(Expression):
+ def __init__(self, id, attribute=None, arguments=None, **kwargs):
+ super(TermReference, self).__init__(**kwargs)
+ self.id = id
+ self.attribute = attribute
+ self.arguments = arguments
+
+
+class VariableReference(Expression):
+ def __init__(self, id, **kwargs):
+ super(VariableReference, self).__init__(**kwargs)
+ self.id = id
+
+
+class FunctionReference(Expression):
+ def __init__(self, id, arguments, **kwargs):
+ super(FunctionReference, self).__init__(**kwargs)
+ self.id = id
+ self.arguments = arguments
+
+
+class SelectExpression(Expression):
+ def __init__(self, selector, variants, **kwargs):
+ super(SelectExpression, self).__init__(**kwargs)
+ self.selector = selector
+ self.variants = variants
+
+
+class CallArguments(SyntaxNode):
+ def __init__(self, positional=None, named=None, **kwargs):
+ super(CallArguments, self).__init__(**kwargs)
+ self.positional = [] if positional is None else positional
+ self.named = [] if named is None else named
+
+
+class Attribute(SyntaxNode):
+ def __init__(self, id, value, **kwargs):
+ super(Attribute, self).__init__(**kwargs)
+ self.id = id
+ self.value = value
+
+
+class Variant(SyntaxNode):
+ def __init__(self, key, value, default=False, **kwargs):
+ super(Variant, self).__init__(**kwargs)
+ self.key = key
+ self.value = value
+ self.default = default
+
+
+class NamedArgument(SyntaxNode):
+ def __init__(self, name, value, **kwargs):
+ super(NamedArgument, self).__init__(**kwargs)
+ self.name = name
+ self.value = value
+
+
+class Identifier(SyntaxNode):
+ def __init__(self, name, **kwargs):
+ super(Identifier, self).__init__(**kwargs)
+ self.name = name
+
+
+class BaseComment(Entry):
+ def __init__(self, content=None, **kwargs):
+ super(BaseComment, self).__init__(**kwargs)
+ self.content = content
+
+
+class Comment(BaseComment):
+ def __init__(self, content=None, **kwargs):
+ super(Comment, self).__init__(content, **kwargs)
+
+
+class GroupComment(BaseComment):
+ def __init__(self, content=None, **kwargs):
+ super(GroupComment, self).__init__(content, **kwargs)
+
+
+class ResourceComment(BaseComment):
+ def __init__(self, content=None, **kwargs):
+ super(ResourceComment, self).__init__(content, **kwargs)
+
+
+class Junk(SyntaxNode):
+ def __init__(self, content=None, annotations=None, **kwargs):
+ super(Junk, self).__init__(**kwargs)
+ self.content = content
+ self.annotations = annotations or []
+
+ def add_annotation(self, annot):
+ self.annotations.append(annot)
+
+
+class Span(BaseNode):
+ def __init__(self, start, end, **kwargs):
+ super(Span, self).__init__(**kwargs)
+ self.start = start
+ self.end = end
+
+
+class Annotation(SyntaxNode):
+ def __init__(self, code, arguments=None, message=None, **kwargs):
+ super(Annotation, self).__init__(**kwargs)
+ self.code = code
+ self.arguments = arguments or []
+ self.message = message
diff --git a/third_party/python/fluent.syntax/fluent/syntax/errors.py b/third_party/python/fluent.syntax/fluent/syntax/errors.py
new file mode 100644
index 0000000000..cd137871b8
--- /dev/null
+++ b/third_party/python/fluent.syntax/fluent/syntax/errors.py
@@ -0,0 +1,70 @@
+from __future__ import unicode_literals
+
+
+class ParseError(Exception):
+ def __init__(self, code, *args):
+ self.code = code
+ self.args = args
+ self.message = get_error_message(code, args)
+
+
+def get_error_message(code, args):
+ if code == 'E00001':
+ return 'Generic error'
+ if code == 'E0002':
+ return 'Expected an entry start'
+ if code == 'E0003':
+ return 'Expected token: "{}"'.format(args[0])
+ if code == 'E0004':
+ return 'Expected a character from range: "{}"'.format(args[0])
+ if code == 'E0005':
+ msg = 'Expected message "{}" to have a value or attributes'
+ return msg.format(args[0])
+ if code == 'E0006':
+ msg = 'Expected term "-{}" to have a value'
+ return msg.format(args[0])
+ if code == 'E0007':
+ return 'Keyword cannot end with a whitespace'
+ if code == 'E0008':
+ return 'The callee has to be an upper-case identifier or a term'
+ if code == 'E0009':
+ return 'The argument name has to be a simple identifier'
+ if code == 'E0010':
+ return 'Expected one of the variants to be marked as default (*)'
+ if code == 'E0011':
+ return 'Expected at least one variant after "->"'
+ if code == 'E0012':
+ return 'Expected value'
+ if code == 'E0013':
+ return 'Expected variant key'
+ if code == 'E0014':
+ return 'Expected literal'
+ if code == 'E0015':
+ return 'Only one variant can be marked as default (*)'
+ if code == 'E0016':
+ return 'Message references cannot be used as selectors'
+ if code == 'E0017':
+ return 'Terms cannot be used as selectors'
+ if code == 'E0018':
+ return 'Attributes of messages cannot be used as selectors'
+ if code == 'E0019':
+ return 'Attributes of terms cannot be used as placeables'
+ if code == 'E0020':
+ return 'Unterminated string expression'
+ if code == 'E0021':
+ return 'Positional arguments must not follow named arguments'
+ if code == 'E0022':
+ return 'Named arguments must be unique'
+ if code == 'E0024':
+ return 'Cannot access variants of a message.'
+ if code == 'E0025':
+ return 'Unknown escape sequence: \\{}.'.format(args[0])
+ if code == 'E0026':
+ return 'Invalid Unicode escape sequence: {}.'.format(args[0])
+ if code == 'E0027':
+ return 'Unbalanced closing brace in TextElement.'
+ if code == 'E0028':
+ return 'Expected an inline expression'
+ if code == 'E0029':
+ return 'Expected simple expression as selector'
+ return code
diff --git a/third_party/python/fluent.syntax/fluent/syntax/parser.py b/third_party/python/fluent.syntax/fluent/syntax/parser.py
new file mode 100644
index 0000000000..6731136cef
--- /dev/null
+++ b/third_party/python/fluent.syntax/fluent/syntax/parser.py
@@ -0,0 +1,683 @@
+from __future__ import unicode_literals
+import re
+from . import ast
+from .stream import EOF, EOL, FluentParserStream
+from .errors import ParseError
+
+
+def with_span(fn):
+ def decorated(self, ps, *args, **kwargs):
+ if not self.with_spans:
+ return fn(self, ps, *args, **kwargs)
+
+ start = ps.index
+ node = fn(self, ps, *args, **kwargs)
+
+ # Don't re-add the span if the node already has it. This may happen
+ # when one decorated function calls another decorated function.
+ if node.span is not None:
+ return node
+
+ end = ps.index
+ node.add_span(start, end)
+ return node
+
+ return decorated
+
+
+class FluentParser(object):
+ """This class is used to parse Fluent source content.
+
+ ``with_spans`` enables source information in the form of
+ :class:`.ast.Span` objects for each :class:`.ast.SyntaxNode`.
+ """
+ def __init__(self, with_spans=True):
+ self.with_spans = with_spans
+
+ def parse(self, source):
+ """Create a :class:`.ast.Resource` from a Fluent source.
+ """
+ ps = FluentParserStream(source)
+ ps.skip_blank_block()
+
+ entries = []
+ last_comment = None
+
+ while ps.current_char:
+ entry = self.get_entry_or_junk(ps)
+ blank_lines = ps.skip_blank_block()
+
+ # Regular Comments require special logic. Comments may be attached
+ # to Messages or Terms if they are followed immediately by them.
+ # However they should parse as standalone when they're followed by
+ # Junk. Consequently, we only attach Comments once we know that the
+ # Message or the Term parsed successfully.
+ if isinstance(entry, ast.Comment) and len(blank_lines) == 0 \
+ and ps.current_char:
+ # Stash the comment and decide what to do with it
+ # in the next pass.
+ last_comment = entry
+ continue
+
+ if last_comment is not None:
+ if isinstance(entry, (ast.Message, ast.Term)):
+ entry.comment = last_comment
+ if self.with_spans:
+ entry.span.start = entry.comment.span.start
+ else:
+ entries.append(last_comment)
+ # In either case, the stashed comment has been dealt with;
+ # clear it.
+ last_comment = None
+
+ entries.append(entry)
+
+ res = ast.Resource(entries)
+
+ if self.with_spans:
+ res.add_span(0, ps.index)
+
+ return res
+
+ def parse_entry(self, source):
+ """Parse the first :class:`.ast.Entry` in source.
+
+ Skip all encountered comments and start parsing at the first :class:`.ast.Message`
+ or :class:`.ast.Term` start. Return :class:`.ast.Junk` if the parsing is not successful.
+
+ Preceding comments are ignored unless they contain syntax errors
+ themselves, in which case :class:`.ast.Junk` for the invalid comment is returned.
+ """
+ ps = FluentParserStream(source)
+ ps.skip_blank_block()
+
+ while ps.current_char == '#':
+ skipped = self.get_entry_or_junk(ps)
+ if isinstance(skipped, ast.Junk):
+ # Don't skip Junk comments.
+ return skipped
+ ps.skip_blank_block()
+
+ return self.get_entry_or_junk(ps)
+
+ def get_entry_or_junk(self, ps):
+ entry_start_pos = ps.index
+
+ try:
+ entry = self.get_entry(ps)
+ ps.expect_line_end()
+ return entry
+ except ParseError as err:
+ error_index = ps.index
+ ps.skip_to_next_entry_start(entry_start_pos)
+ next_entry_start = ps.index
+ if next_entry_start < error_index:
+ # The position of the error must be inside of the Junk's span.
+ error_index = next_entry_start
+
+ # Create a Junk instance
+ slice = ps.string[entry_start_pos:next_entry_start]
+ junk = ast.Junk(slice)
+ if self.with_spans:
+ junk.add_span(entry_start_pos, next_entry_start)
+ annot = ast.Annotation(err.code, err.args, err.message)
+ annot.add_span(error_index, error_index)
+ junk.add_annotation(annot)
+ return junk
+
+ def get_entry(self, ps):
+ if ps.current_char == '#':
+ return self.get_comment(ps)
+
+ if ps.current_char == '-':
+ return self.get_term(ps)
+
+ if ps.is_identifier_start():
+ return self.get_message(ps)
+
+ raise ParseError('E0002')
+
+ @with_span
+ def get_comment(self, ps):
+ # 0 - comment
+ # 1 - group comment
+ # 2 - resource comment
+ level = -1
+ content = ''
+
+ while True:
+ i = -1
+ while ps.current_char == '#' \
+ and (i < (2 if level == -1 else level)):
+ ps.next()
+ i += 1
+
+ if level == -1:
+ level = i
+
+ if ps.current_char != EOL:
+ ps.expect_char(' ')
+ ch = ps.take_char(lambda x: x != EOL)
+ while ch:
+ content += ch
+ ch = ps.take_char(lambda x: x != EOL)
+
+ if ps.is_next_line_comment(level=level):
+ content += ps.current_char
+ ps.next()
+ else:
+ break
+
+ if level == 0:
+ return ast.Comment(content)
+ elif level == 1:
+ return ast.GroupComment(content)
+ elif level == 2:
+ return ast.ResourceComment(content)
+
+ @with_span
+ def get_message(self, ps):
+ id = self.get_identifier(ps)
+ ps.skip_blank_inline()
+ ps.expect_char('=')
+
+ value = self.maybe_get_pattern(ps)
+ attrs = self.get_attributes(ps)
+
+ if value is None and len(attrs) == 0:
+ raise ParseError('E0005', id.name)
+
+ return ast.Message(id, value, attrs)
+
+ @with_span
+ def get_term(self, ps):
+ ps.expect_char('-')
+ id = self.get_identifier(ps)
+
+ ps.skip_blank_inline()
+ ps.expect_char('=')
+
+ value = self.maybe_get_pattern(ps)
+ if value is None:
+ raise ParseError('E0006', id.name)
+
+ attrs = self.get_attributes(ps)
+ return ast.Term(id, value, attrs)
+
+ @with_span
+ def get_attribute(self, ps):
+ ps.expect_char('.')
+
+ key = self.get_identifier(ps)
+
+ ps.skip_blank_inline()
+ ps.expect_char('=')
+
+ value = self.maybe_get_pattern(ps)
+ if value is None:
+ raise ParseError('E0012')
+
+ return ast.Attribute(key, value)
+
+ def get_attributes(self, ps):
+ attrs = []
+ ps.peek_blank()
+
+ while ps.is_attribute_start():
+ ps.skip_to_peek()
+ attr = self.get_attribute(ps)
+ attrs.append(attr)
+ ps.peek_blank()
+
+ return attrs
+
+ @with_span
+ def get_identifier(self, ps):
+ name = ps.take_id_start()
+ ch = ps.take_id_char()
+ while ch:
+ name += ch
+ ch = ps.take_id_char()
+
+ return ast.Identifier(name)
+
+ def get_variant_key(self, ps):
+ ch = ps.current_char
+
+ if ch is EOF:
+ raise ParseError('E0013')
+
+ cc = ord(ch)
+ if ((cc >= 48 and cc <= 57) or cc == 45): # 0-9, -
+ return self.get_number(ps)
+
+ return self.get_identifier(ps)
+
+ @with_span
+ def get_variant(self, ps, has_default):
+ default_index = False
+
+ if ps.current_char == '*':
+ if has_default:
+ raise ParseError('E0015')
+ ps.next()
+ default_index = True
+
+ ps.expect_char('[')
+ ps.skip_blank()
+
+ key = self.get_variant_key(ps)
+
+ ps.skip_blank()
+ ps.expect_char(']')
+
+ value = self.maybe_get_pattern(ps)
+ if value is None:
+ raise ParseError('E0012')
+
+ return ast.Variant(key, value, default_index)
+
+ def get_variants(self, ps):
+ variants = []
+ has_default = False
+
+ ps.skip_blank()
+ while ps.is_variant_start():
+ variant = self.get_variant(ps, has_default)
+
+ if variant.default:
+ has_default = True
+
+ variants.append(variant)
+ ps.expect_line_end()
+ ps.skip_blank()
+
+ if len(variants) == 0:
+ raise ParseError('E0011')
+
+ if not has_default:
+ raise ParseError('E0010')
+
+ return variants
+
+ def get_digits(self, ps):
+ num = ''
+
+ ch = ps.take_digit()
+ while ch:
+ num += ch
+ ch = ps.take_digit()
+
+ if len(num) == 0:
+ raise ParseError('E0004', '0-9')
+
+ return num
+
+ @with_span
+ def get_number(self, ps):
+ num = ''
+
+ if ps.current_char == '-':
+ num += '-'
+ ps.next()
+
+ num += self.get_digits(ps)
+
+ if ps.current_char == '.':
+ num += '.'
+ ps.next()
+ num += self.get_digits(ps)
+
+ return ast.NumberLiteral(num)
+
+ def maybe_get_pattern(self, ps):
+ '''Parse an inline or a block Pattern, or None
+
+ maybe_get_pattern distinguishes between patterns which start on the
+ same line as the indentifier (aka inline singleline patterns and inline
+ multiline patterns), and patterns which start on a new line (aka block
+ patterns). The distinction is important for the dedentation logic: the
+ indent of the first line of a block pattern must be taken into account
+ when calculating the maximum common indent.
+ '''
+ ps.peek_blank_inline()
+ if ps.is_value_start():
+ ps.skip_to_peek()
+ return self.get_pattern(ps, is_block=False)
+
+ ps.peek_blank_block()
+ if ps.is_value_continuation():
+ ps.skip_to_peek()
+ return self.get_pattern(ps, is_block=True)
+
+ return None
+
+ @with_span
+ def get_pattern(self, ps, is_block):
+ elements = []
+ if is_block:
+ # A block pattern is a pattern which starts on a new line. Measure
+ # the indent of this first line for the dedentation logic.
+ blank_start = ps.index
+ first_indent = ps.skip_blank_inline()
+ elements.append(self.Indent(first_indent, blank_start, ps.index))
+ common_indent_length = len(first_indent)
+ else:
+ common_indent_length = float('infinity')
+
+ while ps.current_char:
+ if ps.current_char == EOL:
+ blank_start = ps.index
+ blank_lines = ps.peek_blank_block()
+ if ps.is_value_continuation():
+ ps.skip_to_peek()
+ indent = ps.skip_blank_inline()
+ common_indent_length = min(common_indent_length, len(indent))
+ elements.append(self.Indent(blank_lines + indent, blank_start, ps.index))
+ continue
+
+ # The end condition for get_pattern's while loop is a newline
+ # which is not followed by a valid pattern continuation.
+ ps.reset_peek()
+ break
+
+ if ps.current_char == '}':
+ raise ParseError('E0027')
+
+ if ps.current_char == '{':
+ element = self.get_placeable(ps)
+ else:
+ element = self.get_text_element(ps)
+
+ elements.append(element)
+
+ dedented = self.dedent(elements, common_indent_length)
+ return ast.Pattern(dedented)
+
+ class Indent(ast.SyntaxNode):
+ def __init__(self, value, start, end):
+ super(FluentParser.Indent, self).__init__()
+ self.value = value
+ self.add_span(start, end)
+
+ def dedent(self, elements, common_indent):
+ '''Dedent a list of elements by removing the maximum common indent from
+ the beginning of text lines. The common indent is calculated in
+ get_pattern.
+ '''
+ trimmed = []
+
+ for element in elements:
+ if isinstance(element, ast.Placeable):
+ trimmed.append(element)
+ continue
+
+ if isinstance(element, self.Indent):
+ # Strip the common indent.
+ element.value = element.value[:len(element.value) - common_indent]
+ if len(element.value) == 0:
+ continue
+
+ prev = trimmed[-1] if len(trimmed) > 0 else None
+ if isinstance(prev, ast.TextElement):
+ # Join adjacent TextElements by replacing them with their sum.
+ sum = ast.TextElement(prev.value + element.value)
+ if self.with_spans:
+ sum.add_span(prev.span.start, element.span.end)
+ trimmed[-1] = sum
+ continue
+
+ if isinstance(element, self.Indent):
+ # If the indent hasn't been merged into a preceding
+ # TextElements, convert it into a new TextElement.
+ text_element = ast.TextElement(element.value)
+ if self.with_spans:
+ text_element.add_span(element.span.start, element.span.end)
+ element = text_element
+
+ trimmed.append(element)
+
+ # Trim trailing whitespace from the Pattern.
+ last_element = trimmed[-1] if len(trimmed) > 0 else None
+ if isinstance(last_element, ast.TextElement):
+ last_element.value = last_element.value.rstrip(' \t\n\r')
+ if last_element.value == "":
+ trimmed.pop()
+
+ return trimmed
+
+ @with_span
+ def get_text_element(self, ps):
+ buf = ''
+
+ while ps.current_char:
+ ch = ps.current_char
+
+ if ch == '{' or ch == '}':
+ return ast.TextElement(buf)
+
+ if ch == EOL:
+ return ast.TextElement(buf)
+
+ buf += ch
+ ps.next()
+
+ return ast.TextElement(buf)
+
+ def get_escape_sequence(self, ps):
+ next = ps.current_char
+
+ if next == '\\' or next == '"':
+ ps.next()
+ return '\\{}'.format(next)
+
+ if next == 'u':
+ return self.get_unicode_escape_sequence(ps, next, 4)
+
+ if next == 'U':
+ return self.get_unicode_escape_sequence(ps, next, 6)
+
+ raise ParseError('E0025', next)
+
+ def get_unicode_escape_sequence(self, ps, u, digits):
+ ps.expect_char(u)
+ sequence = ''
+ for _ in range(digits):
+ ch = ps.take_hex_digit()
+ if not ch:
+ raise ParseError('E0026', '\\{}{}{}'.format(u, sequence, ps.current_char))
+ sequence += ch
+
+ return '\\{}{}'.format(u, sequence)
+
+ @with_span
+ def get_placeable(self, ps):
+ ps.expect_char('{')
+ ps.skip_blank()
+ expression = self.get_expression(ps)
+ ps.expect_char('}')
+ return ast.Placeable(expression)
+
+ @with_span
+ def get_expression(self, ps):
+ selector = self.get_inline_expression(ps)
+
+ ps.skip_blank()
+
+ if ps.current_char == '-':
+ if ps.peek() != '>':
+ ps.reset_peek()
+ return selector
+
+ if isinstance(selector, ast.MessageReference):
+ if selector.attribute is None:
+ raise ParseError('E0016')
+ else:
+ raise ParseError('E0018')
+
+ elif (
+ isinstance(selector, ast.TermReference)
+ ):
+ if selector.attribute is None:
+ raise ParseError('E0017')
+ elif not (
+ isinstance(selector, (
+ ast.StringLiteral,
+ ast.NumberLiteral,
+ ast.VariableReference,
+ ast.FunctionReference,
+ ))
+ ):
+ raise ParseError('E0029')
+
+ ps.next()
+ ps.next()
+
+ ps.skip_blank_inline()
+ ps.expect_line_end()
+
+ variants = self.get_variants(ps)
+ return ast.SelectExpression(selector, variants)
+
+ if (
+ isinstance(selector, ast.TermReference)
+ and selector.attribute is not None
+ ):
+ raise ParseError('E0019')
+
+ return selector
+
+ @with_span
+ def get_inline_expression(self, ps):
+ if ps.current_char == '{':
+ return self.get_placeable(ps)
+
+ if ps.is_number_start():
+ return self.get_number(ps)
+
+ if ps.current_char == '"':
+ return self.get_string(ps)
+
+ if ps.current_char == '$':
+ ps.next()
+ id = self.get_identifier(ps)
+ return ast.VariableReference(id)
+
+ if ps.current_char == '-':
+ ps.next()
+ id = self.get_identifier(ps)
+ attribute = None
+ if ps.current_char == '.':
+ ps.next()
+ attribute = self.get_identifier(ps)
+ arguments = None
+ ps.peek_blank()
+ if ps.current_peek == '(':
+ ps.skip_to_peek()
+ arguments = self.get_call_arguments(ps)
+ return ast.TermReference(id, attribute, arguments)
+
+ if ps.is_identifier_start():
+ id = self.get_identifier(ps)
+ ps.peek_blank()
+
+ if ps.current_peek == '(':
+ # It's a Function. Ensure it's all upper-case.
+ if not re.match('^[A-Z][A-Z0-9_-]*$', id.name):
+ raise ParseError('E0008')
+ ps.skip_to_peek()
+ args = self.get_call_arguments(ps)
+ return ast.FunctionReference(id, args)
+
+ attribute = None
+ if ps.current_char == '.':
+ ps.next()
+ attribute = self.get_identifier(ps)
+
+ return ast.MessageReference(id, attribute)
+
+ raise ParseError('E0028')
+
+ @with_span
+ def get_call_argument(self, ps):
+ exp = self.get_inline_expression(ps)
+
+ ps.skip_blank()
+
+ if ps.current_char != ':':
+ return exp
+
+ if isinstance(exp, ast.MessageReference) and exp.attribute is None:
+ ps.next()
+ ps.skip_blank()
+
+ value = self.get_literal(ps)
+ return ast.NamedArgument(exp.id, value)
+
+ raise ParseError('E0009')
+
+ @with_span
+ def get_call_arguments(self, ps):
+ positional = []
+ named = []
+ argument_names = set()
+
+ ps.expect_char('(')
+ ps.skip_blank()
+
+ while True:
+ if ps.current_char == ')':
+ break
+
+ arg = self.get_call_argument(ps)
+ if isinstance(arg, ast.NamedArgument):
+ if arg.name.name in argument_names:
+ raise ParseError('E0022')
+ named.append(arg)
+ argument_names.add(arg.name.name)
+ elif len(argument_names) > 0:
+ raise ParseError('E0021')
+ else:
+ positional.append(arg)
+
+ ps.skip_blank()
+
+ if ps.current_char == ',':
+ ps.next()
+ ps.skip_blank()
+ continue
+
+ break
+
+ ps.expect_char(')')
+ return ast.CallArguments(positional, named)
+
+ @with_span
+ def get_string(self, ps):
+ value = ''
+
+ ps.expect_char('"')
+
+ while True:
+ ch = ps.take_char(lambda x: x != '"' and x != EOL)
+ if not ch:
+ break
+ if ch == '\\':
+ value += self.get_escape_sequence(ps)
+ else:
+ value += ch
+
+ if ps.current_char == EOL:
+ raise ParseError('E0020')
+
+ ps.expect_char('"')
+
+ return ast.StringLiteral(value)
+
+ @with_span
+ def get_literal(self, ps):
+ if ps.is_number_start():
+ return self.get_number(ps)
+ if ps.current_char == '"':
+ return self.get_string(ps)
+ raise ParseError('E0014')
diff --git a/third_party/python/fluent.syntax/fluent/syntax/serializer.py b/third_party/python/fluent.syntax/fluent/syntax/serializer.py
new file mode 100644
index 0000000000..7c1bb08727
--- /dev/null
+++ b/third_party/python/fluent.syntax/fluent/syntax/serializer.py
@@ -0,0 +1,233 @@
+from __future__ import unicode_literals
+from . import ast
+
+
+def indent_except_first_line(content):
+ return " ".join(
+ content.splitlines(True)
+ )
+
+
+def includes_new_line(elem):
+ return isinstance(elem, ast.TextElement) and "\n" in elem.value
+
+
+def is_select_expr(elem):
+ return (
+ isinstance(elem, ast.Placeable) and
+ isinstance(elem.expression, ast.SelectExpression))
+
+
+def should_start_on_new_line(pattern):
+ is_multiline = any(is_select_expr(elem) for elem in pattern.elements) \
+ or any(includes_new_line(elem) for elem in pattern.elements)
+
+ if is_multiline:
+ first_element = pattern.elements[0]
+ if isinstance(first_element, ast.TextElement):
+ first_char = first_element.value[0]
+ if first_char in ("[", ".", "*"):
+ return False
+ return True
+ return False
+
+
+class FluentSerializer(object):
+ """FluentSerializer converts :class:`.ast.SyntaxNode` objects to unicode strings.
+
+ `with_junk` controls if parse errors are written back or not.
+ """
+ HAS_ENTRIES = 1
+
+ def __init__(self, with_junk=False):
+ self.with_junk = with_junk
+
+ def serialize(self, resource):
+ "Serialize a :class:`.ast.Resource` to a string."
+ if not isinstance(resource, ast.Resource):
+ raise Exception('Unknown resource type: {}'.format(type(resource)))
+
+ state = 0
+
+ parts = []
+ for entry in resource.body:
+ if not isinstance(entry, ast.Junk) or self.with_junk:
+ parts.append(self.serialize_entry(entry, state))
+ if not state & self.HAS_ENTRIES:
+ state |= self.HAS_ENTRIES
+
+ return "".join(parts)
+
+ def serialize_entry(self, entry, state=0):
+ "Serialize an :class:`.ast.Entry` to a string."
+ if isinstance(entry, ast.Message):
+ return serialize_message(entry)
+ if isinstance(entry, ast.Term):
+ return serialize_term(entry)
+ if isinstance(entry, ast.Comment):
+ if state & self.HAS_ENTRIES:
+ return "\n{}\n".format(serialize_comment(entry, "#"))
+ return "{}\n".format(serialize_comment(entry, "#"))
+ if isinstance(entry, ast.GroupComment):
+ if state & self.HAS_ENTRIES:
+ return "\n{}\n".format(serialize_comment(entry, "##"))
+ return "{}\n".format(serialize_comment(entry, "##"))
+ if isinstance(entry, ast.ResourceComment):
+ if state & self.HAS_ENTRIES:
+ return "\n{}\n".format(serialize_comment(entry, "###"))
+ return "{}\n".format(serialize_comment(entry, "###"))
+ if isinstance(entry, ast.Junk):
+ return serialize_junk(entry)
+ raise Exception('Unknown entry type: {}'.format(type(entry)))
+
+
+def serialize_comment(comment, prefix="#"):
+ prefixed = "\n".join([
+ prefix if len(line) == 0 else "{} {}".format(prefix, line)
+ for line in comment.content.split("\n")
+ ])
+ # Add the trailing line break.
+ return '{}\n'.format(prefixed)
+
+
+def serialize_junk(junk):
+ return junk.content
+
+
+def serialize_message(message):
+ parts = []
+
+ if message.comment:
+ parts.append(serialize_comment(message.comment))
+
+ parts.append("{} =".format(message.id.name))
+
+ if message.value:
+ parts.append(serialize_pattern(message.value))
+
+ if message.attributes:
+ for attribute in message.attributes:
+ parts.append(serialize_attribute(attribute))
+
+ parts.append("\n")
+ return ''.join(parts)
+
+
+def serialize_term(term):
+ parts = []
+
+ if term.comment:
+ parts.append(serialize_comment(term.comment))
+
+ parts.append("-{} =".format(term.id.name))
+ parts.append(serialize_pattern(term.value))
+
+ if term.attributes:
+ for attribute in term.attributes:
+ parts.append(serialize_attribute(attribute))
+
+ parts.append("\n")
+ return ''.join(parts)
+
+
+def serialize_attribute(attribute):
+ return "\n .{} ={}".format(
+ attribute.id.name,
+ indent_except_first_line(serialize_pattern(attribute.value))
+ )
+
+
+def serialize_pattern(pattern):
+ content = "".join(serialize_element(elem) for elem in pattern.elements)
+ content = indent_except_first_line(content)
+
+ if should_start_on_new_line(pattern):
+ return '\n {}'.format(content)
+
+ return ' {}'.format(content)
+
+
+def serialize_element(element):
+ if isinstance(element, ast.TextElement):
+ return element.value
+ if isinstance(element, ast.Placeable):
+ return serialize_placeable(element)
+ raise Exception('Unknown element type: {}'.format(type(element)))
+
+
+def serialize_placeable(placeable):
+ expr = placeable.expression
+ if isinstance(expr, ast.Placeable):
+ return "{{{}}}".format(serialize_placeable(expr))
+ if isinstance(expr, ast.SelectExpression):
+ # Special-case select expressions to control the withespace around the
+ # opening and the closing brace.
+ return "{{ {}}}".format(serialize_expression(expr))
+ if isinstance(expr, ast.Expression):
+ return "{{ {} }}".format(serialize_expression(expr))
+
+
+def serialize_expression(expression):
+ if isinstance(expression, ast.StringLiteral):
+ return '"{}"'.format(expression.value)
+ if isinstance(expression, ast.NumberLiteral):
+ return expression.value
+ if isinstance(expression, ast.VariableReference):
+ return "${}".format(expression.id.name)
+ if isinstance(expression, ast.TermReference):
+ out = "-{}".format(expression.id.name)
+ if expression.attribute is not None:
+ out += ".{}".format(expression.attribute.name)
+ if expression.arguments is not None:
+ out += serialize_call_arguments(expression.arguments)
+ return out
+ if isinstance(expression, ast.MessageReference):
+ out = expression.id.name
+ if expression.attribute is not None:
+ out += ".{}".format(expression.attribute.name)
+ return out
+ if isinstance(expression, ast.FunctionReference):
+ args = serialize_call_arguments(expression.arguments)
+ return "{}{}".format(expression.id.name, args)
+ if isinstance(expression, ast.SelectExpression):
+ out = "{} ->".format(
+ serialize_expression(expression.selector))
+ for variant in expression.variants:
+ out += serialize_variant(variant)
+ return "{}\n".format(out)
+ if isinstance(expression, ast.Placeable):
+ return serialize_placeable(expression)
+ raise Exception('Unknown expression type: {}'.format(type(expression)))
+
+
+def serialize_variant(variant):
+ return "\n{}[{}]{}".format(
+ " *" if variant.default else " ",
+ serialize_variant_key(variant.key),
+ indent_except_first_line(serialize_pattern(variant.value))
+ )
+
+
+def serialize_call_arguments(expr):
+ positional = ", ".join(
+ serialize_expression(arg) for arg in expr.positional)
+ named = ", ".join(
+ serialize_named_argument(arg) for arg in expr.named)
+ if len(expr.positional) > 0 and len(expr.named) > 0:
+ return '({}, {})'.format(positional, named)
+ return '({})'.format(positional or named)
+
+
+def serialize_named_argument(arg):
+ return "{}: {}".format(
+ arg.name.name,
+ serialize_expression(arg.value)
+ )
+
+
+def serialize_variant_key(key):
+ if isinstance(key, ast.Identifier):
+ return key.name
+ if isinstance(key, ast.NumberLiteral):
+ return key.value
+ raise Exception('Unknown variant key type: {}'.format(type(key)))
diff --git a/third_party/python/fluent.syntax/fluent/syntax/stream.py b/third_party/python/fluent.syntax/fluent/syntax/stream.py
new file mode 100644
index 0000000000..1f3852c8c2
--- /dev/null
+++ b/third_party/python/fluent.syntax/fluent/syntax/stream.py
@@ -0,0 +1,282 @@
+from __future__ import unicode_literals
+from .errors import ParseError
+
+
+class ParserStream(object):
+ def __init__(self, string):
+ self.string = string
+ self.index = 0
+ self.peek_offset = 0
+
+ def get(self, offset):
+ try:
+ return self.string[offset]
+ except IndexError:
+ return None
+
+ def char_at(self, offset):
+ # When the cursor is at CRLF, return LF but don't move the cursor. The
+ # cursor still points to the EOL position, which in this case is the
+ # beginning of the compound CRLF sequence. This ensures slices of
+ # [inclusive, exclusive) continue to work properly.
+ if self.get(offset) == '\r' \
+ and self.get(offset + 1) == '\n':
+ return '\n'
+
+ return self.get(offset)
+
+ @property
+ def current_char(self):
+ return self.char_at(self.index)
+
+ @property
+ def current_peek(self):
+ return self.char_at(self.index + self.peek_offset)
+
+ def next(self):
+ self.peek_offset = 0
+ # Skip over CRLF as if it was a single character.
+ if self.get(self.index) == '\r' \
+ and self.get(self.index + 1) == '\n':
+ self.index += 1
+ self.index += 1
+ return self.get(self.index)
+
+ def peek(self):
+ # Skip over CRLF as if it was a single character.
+ if self.get(self.index + self.peek_offset) == '\r' \
+ and self.get(self.index + self.peek_offset + 1) == '\n':
+ self.peek_offset += 1
+ self.peek_offset += 1
+ return self.get(self.index + self.peek_offset)
+
+ def reset_peek(self, offset=0):
+ self.peek_offset = offset
+
+ def skip_to_peek(self):
+ self.index += self.peek_offset
+ self.peek_offset = 0
+
+
+EOL = '\n'
+EOF = None
+SPECIAL_LINE_START_CHARS = ('}', '.', '[', '*')
+
+
+class FluentParserStream(ParserStream):
+
+ def peek_blank_inline(self):
+ start = self.index + self.peek_offset
+ while self.current_peek == ' ':
+ self.peek()
+ return self.string[start:self.index + self.peek_offset]
+
+ def skip_blank_inline(self):
+ blank = self.peek_blank_inline()
+ self.skip_to_peek()
+ return blank
+
+ def peek_blank_block(self):
+ blank = ""
+ while True:
+ line_start = self.peek_offset
+ self.peek_blank_inline()
+
+ if self.current_peek == EOL:
+ blank += EOL
+ self.peek()
+ continue
+
+ if self.current_peek is EOF:
+ # Treat the blank line at EOF as a blank block.
+ return blank
+
+ # Any other char; reset to column 1 on this line.
+ self.reset_peek(line_start)
+ return blank
+
+ def skip_blank_block(self):
+ blank = self.peek_blank_block()
+ self.skip_to_peek()
+ return blank
+
+ def peek_blank(self):
+ while self.current_peek in (" ", EOL):
+ self.peek()
+
+ def skip_blank(self):
+ self.peek_blank()
+ self.skip_to_peek()
+
+ def expect_char(self, ch):
+ if self.current_char == ch:
+ self.next()
+ return True
+
+ raise ParseError('E0003', ch)
+
+ def expect_line_end(self):
+ if self.current_char is EOF:
+ # EOF is a valid line end in Fluent.
+ return True
+
+ if self.current_char == EOL:
+ self.next()
+ return True
+
+ # Unicode Character 'SYMBOL FOR NEWLINE' (U+2424)
+ raise ParseError('E0003', '\u2424')
+
+ def take_char(self, f):
+ ch = self.current_char
+ if ch is EOF:
+ return EOF
+ if f(ch):
+ self.next()
+ return ch
+ return False
+
+ def is_char_id_start(self, ch):
+ if ch is EOF:
+ return False
+
+ cc = ord(ch)
+ return (cc >= 97 and cc <= 122) or \
+ (cc >= 65 and cc <= 90)
+
+ def is_identifier_start(self):
+ return self.is_char_id_start(self.current_peek)
+
+ def is_number_start(self):
+ ch = self.peek() if self.current_char == '-' else self.current_char
+ if ch is EOF:
+ self.reset_peek()
+ return False
+
+ cc = ord(ch)
+ is_digit = cc >= 48 and cc <= 57
+ self.reset_peek()
+ return is_digit
+
+ def is_char_pattern_continuation(self, ch):
+ if ch is EOF:
+ return False
+
+ return ch not in SPECIAL_LINE_START_CHARS
+
+ def is_value_start(self):
+ # Inline Patterns may start with any char.
+ return self.current_peek is not EOF and self.current_peek != EOL
+
+ def is_value_continuation(self):
+ column1 = self.peek_offset
+ self.peek_blank_inline()
+
+ if self.current_peek == '{':
+ self.reset_peek(column1)
+ return True
+
+ if self.peek_offset - column1 == 0:
+ return False
+
+ if self.is_char_pattern_continuation(self.current_peek):
+ self.reset_peek(column1)
+ return True
+
+ return False
+
+ # -1 - any
+ # 0 - comment
+ # 1 - group comment
+ # 2 - resource comment
+ def is_next_line_comment(self, level=-1):
+ if self.current_peek != EOL:
+ return False
+
+ i = 0
+
+ while (i <= level or (level == -1 and i < 3)):
+ if self.peek() != '#':
+ if i <= level and level != -1:
+ self.reset_peek()
+ return False
+ break
+ i += 1
+
+ # The first char after #, ## or ###.
+ if self.peek() in (' ', EOL):
+ self.reset_peek()
+ return True
+
+ self.reset_peek()
+ return False
+
+ def is_variant_start(self):
+ current_peek_offset = self.peek_offset
+ if self.current_peek == '*':
+ self.peek()
+ if self.current_peek == '[' and self.peek() != '[':
+ self.reset_peek(current_peek_offset)
+ return True
+
+ self.reset_peek(current_peek_offset)
+ return False
+
+ def is_attribute_start(self):
+ return self.current_peek == '.'
+
+ def skip_to_next_entry_start(self, junk_start):
+ last_newline = self.string.rfind(EOL, 0, self.index)
+ if junk_start < last_newline:
+ # Last seen newline is _after_ the junk start. It's safe to rewind
+ # without the risk of resuming at the same broken entry.
+ self.index = last_newline
+
+ while self.current_char:
+ # We're only interested in beginnings of line.
+ if self.current_char != EOL:
+ self.next()
+ continue
+
+ # Break if the first char in this line looks like an entry start.
+ first = self.next()
+ if self.is_char_id_start(first) or first == '-' or first == '#':
+ break
+
+ # Syntax 0.4 compatibility
+ peek = self.peek()
+ self.reset_peek()
+ if (first, peek) == ('/', '/') or (first, peek) == ('[', '['):
+ break
+
+ def take_id_start(self):
+ if self.is_char_id_start(self.current_char):
+ ret = self.current_char
+ self.next()
+ return ret
+
+ raise ParseError('E0004', 'a-zA-Z')
+
+ def take_id_char(self):
+ def closure(ch):
+ cc = ord(ch)
+ return ((cc >= 97 and cc <= 122) or
+ (cc >= 65 and cc <= 90) or
+ (cc >= 48 and cc <= 57) or
+ cc == 95 or cc == 45)
+ return self.take_char(closure)
+
+ def take_digit(self):
+ def closure(ch):
+ cc = ord(ch)
+ return (cc >= 48 and cc <= 57)
+ return self.take_char(closure)
+
+ def take_hex_digit(self):
+ def closure(ch):
+ cc = ord(ch)
+ return (
+ (cc >= 48 and cc <= 57) # 0-9
+ or (cc >= 65 and cc <= 70) # A-F
+ or (cc >= 97 and cc <= 102)) # a-f
+ return self.take_char(closure)
diff --git a/third_party/python/fluent.syntax/fluent/syntax/visitor.py b/third_party/python/fluent.syntax/fluent/syntax/visitor.py
new file mode 100644
index 0000000000..491a79597c
--- /dev/null
+++ b/third_party/python/fluent.syntax/fluent/syntax/visitor.py
@@ -0,0 +1,65 @@
+# coding=utf-8
+from __future__ import unicode_literals, absolute_import
+
+from .ast import BaseNode
+
+
+class Visitor(object):
+ '''Read-only visitor pattern.
+
+ Subclass this to gather information from an AST.
+ To generally define which nodes not to descend in to, overload
+ `generic_visit`.
+ To handle specific node types, add methods like `visit_Pattern`.
+ If you want to still descend into the children of the node, call
+ `generic_visit` of the superclass.
+ '''
+ def visit(self, node):
+ if isinstance(node, list):
+ for child in node:
+ self.visit(child)
+ return
+ if not isinstance(node, BaseNode):
+ return
+ nodename = type(node).__name__
+ visit = getattr(self, 'visit_{}'.format(nodename), self.generic_visit)
+ visit(node)
+
+ def generic_visit(self, node):
+ for propname, propvalue in vars(node).items():
+ self.visit(propvalue)
+
+
+class Transformer(Visitor):
+ '''In-place AST Transformer pattern.
+
+ Subclass this to create an in-place modified variant
+ of the given AST.
+ If you need to keep the original AST around, pass
+ a `node.clone()` to the transformer.
+ '''
+ def visit(self, node):
+ if not isinstance(node, BaseNode):
+ return node
+
+ nodename = type(node).__name__
+ visit = getattr(self, 'visit_{}'.format(nodename), self.generic_visit)
+ return visit(node)
+
+ def generic_visit(self, node):
+ for propname, propvalue in vars(node).items():
+ if isinstance(propvalue, list):
+ new_vals = []
+ for child in propvalue:
+ new_val = self.visit(child)
+ if new_val is not None:
+ new_vals.append(new_val)
+ # in-place manipulation
+ propvalue[:] = new_vals
+ elif isinstance(propvalue, BaseNode):
+ new_val = self.visit(propvalue)
+ if new_val is None:
+ delattr(node, propname)
+ else:
+ setattr(node, propname, new_val)
+ return node
diff --git a/third_party/python/fluent.syntax/setup.cfg b/third_party/python/fluent.syntax/setup.cfg
new file mode 100644
index 0000000000..4fb7b37a15
--- /dev/null
+++ b/third_party/python/fluent.syntax/setup.cfg
@@ -0,0 +1,19 @@
+[metadata]
+version = 0.18.1
+
+[bdist_wheel]
+universal = 1
+
+[flake8]
+exclude = .tox
+max-line-length = 120
+
+[isort]
+line_length = 120
+skip_glob = .tox
+not_skip = __init__.py
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/fluent.syntax/setup.py b/third_party/python/fluent.syntax/setup.py
new file mode 100755
index 0000000000..411e912f52
--- /dev/null
+++ b/third_party/python/fluent.syntax/setup.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+from setuptools import setup
+import os
+
+this_directory = os.path.abspath(os.path.dirname(__file__))
+with open(os.path.join(this_directory, 'README.rst'), 'rb') as f:
+ long_description = f.read().decode('utf-8')
+
+setup(name='fluent.syntax',
+ description='Localization library for expressive translations.',
+ long_description=long_description,
+ long_description_content_type='text/x-rst',
+ author='Mozilla',
+ author_email='l10n-drivers@mozilla.org',
+ license='APL 2',
+ url='https://github.com/projectfluent/python-fluent',
+ keywords=['fluent', 'localization', 'l10n'],
+ classifiers=[
+ 'Development Status :: 3 - Alpha',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3.5',
+ ],
+ packages=['fluent', 'fluent.syntax'],
+ # These should also be duplicated in tox.ini and /.github/workflow/fluent.syntax.yml
+ tests_require=['six'],
+ test_suite='tests.syntax'
+ )
diff --git a/third_party/python/funcsigs/CHANGELOG b/third_party/python/funcsigs/CHANGELOG
new file mode 100644
index 0000000000..e1366d2668
--- /dev/null
+++ b/third_party/python/funcsigs/CHANGELOG
@@ -0,0 +1,24 @@
+Changelog
+---------
+
+0.5
+```
+
+* Fix binding with self as a kwarg. (Robert Collins #14)
+
+0.4 (2013-12-20)
+````````````````
+* Fix unbound methods getting their first parameter curried
+* Publish Python wheel packages
+
+0.3 (2013-05-29)
+````````````````
+* Fix annotation formatting of builtin types on Python 2.x
+
+0.2 (2012-01-07)
+````````````````
+* PyPy compatability
+
+0.1 (2012-01-06)
+````````````````
+* Initial release
diff --git a/third_party/python/funcsigs/LICENSE b/third_party/python/funcsigs/LICENSE
new file mode 100644
index 0000000000..3e563d6fbd
--- /dev/null
+++ b/third_party/python/funcsigs/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2013 Aaron Iles
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/third_party/python/funcsigs/MANIFEST.in b/third_party/python/funcsigs/MANIFEST.in
new file mode 100644
index 0000000000..f0abb42f04
--- /dev/null
+++ b/third_party/python/funcsigs/MANIFEST.in
@@ -0,0 +1,7 @@
+recursive-include docs *
+recursive-include tests *.py
+include *.py
+include CHANGELOG
+include LICENSE
+include MANIFEST.in
+include README.rst
diff --git a/third_party/python/funcsigs/PKG-INFO b/third_party/python/funcsigs/PKG-INFO
new file mode 100644
index 0000000000..e262a8d1b9
--- /dev/null
+++ b/third_party/python/funcsigs/PKG-INFO
@@ -0,0 +1,378 @@
+Metadata-Version: 1.1
+Name: funcsigs
+Version: 1.0.2
+Summary: Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2+
+Home-page: http://funcsigs.readthedocs.org
+Author: Testing Cabal
+Author-email: testing-in-python@lists.idyll.org
+License: ASL
+Description: .. funcsigs documentation master file, created by
+ sphinx-quickstart on Fri Apr 20 20:27:52 2012.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+ Introducing funcsigs
+ ====================
+
+ The Funcsigs Package
+ --------------------
+
+ ``funcsigs`` is a backport of the `PEP 362`_ function signature features from
+ Python 3.3's `inspect`_ module. The backport is compatible with Python 2.6, 2.7
+ as well as 3.3 and up. 3.2 was supported by version 0.4, but with setuptools and
+ pip no longer supporting 3.2, we cannot make any statement about 3.2
+ compatibility.
+
+ Compatibility
+ `````````````
+
+ The ``funcsigs`` backport has been tested against:
+
+ * CPython 2.6
+ * CPython 2.7
+ * CPython 3.3
+ * CPython 3.4
+ * CPython 3.5
+ * CPython nightlies
+ * PyPy and PyPy3(currently failing CI)
+
+ Continuous integration testing is provided by `Travis CI`_.
+
+ Under Python 2.x there is a compatibility issue when a function is assigned to
+ the ``__wrapped__`` property of a class after it has been constructed.
+ Similiarily there under PyPy directly passing the ``__call__`` method of a
+ builtin is also a compatibility issues. Otherwise the functionality is
+ believed to be uniform between both Python2 and Python3.
+
+ Issues
+ ``````
+
+ Source code for ``funcsigs`` is hosted on `GitHub`_. Any bug reports or feature
+ requests can be made using GitHub's `issues system`_. |build_status| |coverage|
+
+ Example
+ -------
+
+ To obtain a `Signature` object, pass the target function to the
+ ``funcsigs.signature`` function.
+
+ .. code-block:: python
+
+ >>> from funcsigs import signature
+ >>> def foo(a, b=None, *args, **kwargs):
+ ... pass
+ ...
+ >>> sig = signature(foo)
+ >>> sig
+ <funcsigs.Signature object at 0x...>
+ >>> sig.parameters
+ OrderedDict([('a', <Parameter at 0x... 'a'>), ('b', <Parameter at 0x... 'b'>), ('args', <Parameter at 0x... 'args'>), ('kwargs', <Parameter at 0x... 'kwargs'>)])
+ >>> sig.return_annotation
+ <class 'funcsigs._empty'>
+
+ Introspecting callables with the Signature object
+ -------------------------------------------------
+
+ .. note::
+
+ This section of documentation is a direct reproduction of the Python
+ standard library documentation for the inspect module.
+
+ The Signature object represents the call signature of a callable object and its
+ return annotation. To retrieve a Signature object, use the :func:`signature`
+ function.
+
+ .. function:: signature(callable)
+
+ Return a :class:`Signature` object for the given ``callable``::
+
+ >>> from funcsigs import signature
+ >>> def foo(a, *, b:int, **kwargs):
+ ... pass
+
+ >>> sig = signature(foo)
+
+ >>> str(sig)
+ '(a, *, b:int, **kwargs)'
+
+ >>> str(sig.parameters['b'])
+ 'b:int'
+
+ >>> sig.parameters['b'].annotation
+ <class 'int'>
+
+ Accepts a wide range of python callables, from plain functions and classes to
+ :func:`functools.partial` objects.
+
+ .. note::
+
+ Some callables may not be introspectable in certain implementations of
+ Python. For example, in CPython, built-in functions defined in C provide
+ no metadata about their arguments.
+
+
+ .. class:: Signature
+
+ A Signature object represents the call signature of a function and its return
+ annotation. For each parameter accepted by the function it stores a
+ :class:`Parameter` object in its :attr:`parameters` collection.
+
+ Signature objects are *immutable*. Use :meth:`Signature.replace` to make a
+ modified copy.
+
+ .. attribute:: Signature.empty
+
+ A special class-level marker to specify absence of a return annotation.
+
+ .. attribute:: Signature.parameters
+
+ An ordered mapping of parameters' names to the corresponding
+ :class:`Parameter` objects.
+
+ .. attribute:: Signature.return_annotation
+
+ The "return" annotation for the callable. If the callable has no "return"
+ annotation, this attribute is set to :attr:`Signature.empty`.
+
+ .. method:: Signature.bind(*args, **kwargs)
+
+ Create a mapping from positional and keyword arguments to parameters.
+ Returns :class:`BoundArguments` if ``*args`` and ``**kwargs`` match the
+ signature, or raises a :exc:`TypeError`.
+
+ .. method:: Signature.bind_partial(*args, **kwargs)
+
+ Works the same way as :meth:`Signature.bind`, but allows the omission of
+ some required arguments (mimics :func:`functools.partial` behavior.)
+ Returns :class:`BoundArguments`, or raises a :exc:`TypeError` if the
+ passed arguments do not match the signature.
+
+ .. method:: Signature.replace(*[, parameters][, return_annotation])
+
+ Create a new Signature instance based on the instance replace was invoked
+ on. It is possible to pass different ``parameters`` and/or
+ ``return_annotation`` to override the corresponding properties of the base
+ signature. To remove return_annotation from the copied Signature, pass in
+ :attr:`Signature.empty`.
+
+ ::
+
+ >>> def test(a, b):
+ ... pass
+ >>> sig = signature(test)
+ >>> new_sig = sig.replace(return_annotation="new return anno")
+ >>> str(new_sig)
+ "(a, b) -> 'new return anno'"
+
+
+ .. class:: Parameter
+
+ Parameter objects are *immutable*. Instead of modifying a Parameter object,
+ you can use :meth:`Parameter.replace` to create a modified copy.
+
+ .. attribute:: Parameter.empty
+
+ A special class-level marker to specify absence of default values and
+ annotations.
+
+ .. attribute:: Parameter.name
+
+ The name of the parameter as a string. Must be a valid python identifier
+ name (with the exception of ``POSITIONAL_ONLY`` parameters, which can have
+ it set to ``None``).
+
+ .. attribute:: Parameter.default
+
+ The default value for the parameter. If the parameter has no default
+ value, this attribute is set to :attr:`Parameter.empty`.
+
+ .. attribute:: Parameter.annotation
+
+ The annotation for the parameter. If the parameter has no annotation,
+ this attribute is set to :attr:`Parameter.empty`.
+
+ .. attribute:: Parameter.kind
+
+ Describes how argument values are bound to the parameter. Possible values
+ (accessible via :class:`Parameter`, like ``Parameter.KEYWORD_ONLY``):
+
+ +------------------------+----------------------------------------------+
+ | Name | Meaning |
+ +========================+==============================================+
+ | *POSITIONAL_ONLY* | Value must be supplied as a positional |
+ | | argument. |
+ | | |
+ | | Python has no explicit syntax for defining |
+ | | positional-only parameters, but many built-in|
+ | | and extension module functions (especially |
+ | | those that accept only one or two parameters)|
+ | | accept them. |
+ +------------------------+----------------------------------------------+
+ | *POSITIONAL_OR_KEYWORD*| Value may be supplied as either a keyword or |
+ | | positional argument (this is the standard |
+ | | binding behaviour for functions implemented |
+ | | in Python.) |
+ +------------------------+----------------------------------------------+
+ | *VAR_POSITIONAL* | A tuple of positional arguments that aren't |
+ | | bound to any other parameter. This |
+ | | corresponds to a ``*args`` parameter in a |
+ | | Python function definition. |
+ +------------------------+----------------------------------------------+
+ | *KEYWORD_ONLY* | Value must be supplied as a keyword argument.|
+ | | Keyword only parameters are those which |
+ | | appear after a ``*`` or ``*args`` entry in a |
+ | | Python function definition. |
+ +------------------------+----------------------------------------------+
+ | *VAR_KEYWORD* | A dict of keyword arguments that aren't bound|
+ | | to any other parameter. This corresponds to a|
+ | | ``**kwargs`` parameter in a Python function |
+ | | definition. |
+ +------------------------+----------------------------------------------+
+
+ Example: print all keyword-only arguments without default values::
+
+ >>> def foo(a, b, *, c, d=10):
+ ... pass
+
+ >>> sig = signature(foo)
+ >>> for param in sig.parameters.values():
+ ... if (param.kind == param.KEYWORD_ONLY and
+ ... param.default is param.empty):
+ ... print('Parameter:', param)
+ Parameter: c
+
+ .. method:: Parameter.replace(*[, name][, kind][, default][, annotation])
+
+ Create a new Parameter instance based on the instance replaced was invoked
+ on. To override a :class:`Parameter` attribute, pass the corresponding
+ argument. To remove a default value or/and an annotation from a
+ Parameter, pass :attr:`Parameter.empty`.
+
+ ::
+
+ >>> from funcsigs import Parameter
+ >>> param = Parameter('foo', Parameter.KEYWORD_ONLY, default=42)
+ >>> str(param)
+ 'foo=42'
+
+ >>> str(param.replace()) # Will create a shallow copy of 'param'
+ 'foo=42'
+
+ >>> str(param.replace(default=Parameter.empty, annotation='spam'))
+ "foo:'spam'"
+
+
+ .. class:: BoundArguments
+
+ Result of a :meth:`Signature.bind` or :meth:`Signature.bind_partial` call.
+ Holds the mapping of arguments to the function's parameters.
+
+ .. attribute:: BoundArguments.arguments
+
+ An ordered, mutable mapping (:class:`collections.OrderedDict`) of
+ parameters' names to arguments' values. Contains only explicitly bound
+ arguments. Changes in :attr:`arguments` will reflect in :attr:`args` and
+ :attr:`kwargs`.
+
+ Should be used in conjunction with :attr:`Signature.parameters` for any
+ argument processing purposes.
+
+ .. note::
+
+ Arguments for which :meth:`Signature.bind` or
+ :meth:`Signature.bind_partial` relied on a default value are skipped.
+ However, if needed, it is easy to include them.
+
+ ::
+
+ >>> def foo(a, b=10):
+ ... pass
+
+ >>> sig = signature(foo)
+ >>> ba = sig.bind(5)
+
+ >>> ba.args, ba.kwargs
+ ((5,), {})
+
+ >>> for param in sig.parameters.values():
+ ... if param.name not in ba.arguments:
+ ... ba.arguments[param.name] = param.default
+
+ >>> ba.args, ba.kwargs
+ ((5, 10), {})
+
+
+ .. attribute:: BoundArguments.args
+
+ A tuple of positional arguments values. Dynamically computed from the
+ :attr:`arguments` attribute.
+
+ .. attribute:: BoundArguments.kwargs
+
+ A dict of keyword arguments values. Dynamically computed from the
+ :attr:`arguments` attribute.
+
+ The :attr:`args` and :attr:`kwargs` properties can be used to invoke
+ functions::
+
+ def test(a, *, b):
+ ...
+
+ sig = signature(test)
+ ba = sig.bind(10, b=20)
+ test(*ba.args, **ba.kwargs)
+
+
+ .. seealso::
+
+ :pep:`362` - Function Signature Object.
+ The detailed specification, implementation details and examples.
+
+ Copyright
+ ---------
+
+ *funcsigs* is a derived work of CPython under the terms of the `PSF License
+ Agreement`_. The original CPython inspect module, its unit tests and
+ documentation are the copyright of the Python Software Foundation. The derived
+ work is distributed under the `Apache License Version 2.0`_.
+
+ .. _PSF License Agreement: http://docs.python.org/3/license.html#terms-and-conditions-for-accessing-or-otherwise-using-python
+ .. _Apache License Version 2.0: http://opensource.org/licenses/Apache-2.0
+ .. _GitHub: https://github.com/testing-cabal/funcsigs
+ .. _PSF License Agreement: http://docs.python.org/3/license.html#terms-and-conditions-for-accessing-or-otherwise-using-python
+ .. _Travis CI: http://travis-ci.org/
+ .. _Read The Docs: http://funcsigs.readthedocs.org/
+ .. _PEP 362: http://www.python.org/dev/peps/pep-0362/
+ .. _inspect: http://docs.python.org/3/library/inspect.html#introspecting-callables-with-the-signature-object
+ .. _issues system: https://github.com/testing-cabal/funcsigs/issues
+
+ .. |build_status| image:: https://secure.travis-ci.org/aliles/funcsigs.png?branch=master
+ :target: http://travis-ci.org/#!/aliles/funcsigs
+ :alt: Current build status
+
+ .. |coverage| image:: https://coveralls.io/repos/aliles/funcsigs/badge.png?branch=master
+ :target: https://coveralls.io/r/aliles/funcsigs?branch=master
+ :alt: Coverage status
+
+ .. |pypi_version| image:: https://pypip.in/v/funcsigs/badge.png
+ :target: https://crate.io/packages/funcsigs/
+ :alt: Latest PyPI version
+
+
+
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff --git a/third_party/python/funcsigs/README.rst b/third_party/python/funcsigs/README.rst
new file mode 100644
index 0000000000..5fbca27e6e
--- /dev/null
+++ b/third_party/python/funcsigs/README.rst
@@ -0,0 +1,353 @@
+.. funcsigs documentation master file, created by
+ sphinx-quickstart on Fri Apr 20 20:27:52 2012.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+Introducing funcsigs
+====================
+
+The Funcsigs Package
+--------------------
+
+``funcsigs`` is a backport of the `PEP 362`_ function signature features from
+Python 3.3's `inspect`_ module. The backport is compatible with Python 2.6, 2.7
+as well as 3.3 and up. 3.2 was supported by version 0.4, but with setuptools and
+pip no longer supporting 3.2, we cannot make any statement about 3.2
+compatibility.
+
+Compatibility
+`````````````
+
+The ``funcsigs`` backport has been tested against:
+
+* CPython 2.6
+* CPython 2.7
+* CPython 3.3
+* CPython 3.4
+* CPython 3.5
+* CPython nightlies
+* PyPy and PyPy3(currently failing CI)
+
+Continuous integration testing is provided by `Travis CI`_.
+
+Under Python 2.x there is a compatibility issue when a function is assigned to
+the ``__wrapped__`` property of a class after it has been constructed.
+Similiarily there under PyPy directly passing the ``__call__`` method of a
+builtin is also a compatibility issues. Otherwise the functionality is
+believed to be uniform between both Python2 and Python3.
+
+Issues
+``````
+
+Source code for ``funcsigs`` is hosted on `GitHub`_. Any bug reports or feature
+requests can be made using GitHub's `issues system`_. |build_status| |coverage|
+
+Example
+-------
+
+To obtain a `Signature` object, pass the target function to the
+``funcsigs.signature`` function.
+
+.. code-block:: python
+
+ >>> from funcsigs import signature
+ >>> def foo(a, b=None, *args, **kwargs):
+ ... pass
+ ...
+ >>> sig = signature(foo)
+ >>> sig
+ <funcsigs.Signature object at 0x...>
+ >>> sig.parameters
+ OrderedDict([('a', <Parameter at 0x... 'a'>), ('b', <Parameter at 0x... 'b'>), ('args', <Parameter at 0x... 'args'>), ('kwargs', <Parameter at 0x... 'kwargs'>)])
+ >>> sig.return_annotation
+ <class 'funcsigs._empty'>
+
+Introspecting callables with the Signature object
+-------------------------------------------------
+
+.. note::
+
+ This section of documentation is a direct reproduction of the Python
+ standard library documentation for the inspect module.
+
+The Signature object represents the call signature of a callable object and its
+return annotation. To retrieve a Signature object, use the :func:`signature`
+function.
+
+.. function:: signature(callable)
+
+ Return a :class:`Signature` object for the given ``callable``::
+
+ >>> from funcsigs import signature
+ >>> def foo(a, *, b:int, **kwargs):
+ ... pass
+
+ >>> sig = signature(foo)
+
+ >>> str(sig)
+ '(a, *, b:int, **kwargs)'
+
+ >>> str(sig.parameters['b'])
+ 'b:int'
+
+ >>> sig.parameters['b'].annotation
+ <class 'int'>
+
+ Accepts a wide range of python callables, from plain functions and classes to
+ :func:`functools.partial` objects.
+
+ .. note::
+
+ Some callables may not be introspectable in certain implementations of
+ Python. For example, in CPython, built-in functions defined in C provide
+ no metadata about their arguments.
+
+
+.. class:: Signature
+
+ A Signature object represents the call signature of a function and its return
+ annotation. For each parameter accepted by the function it stores a
+ :class:`Parameter` object in its :attr:`parameters` collection.
+
+ Signature objects are *immutable*. Use :meth:`Signature.replace` to make a
+ modified copy.
+
+ .. attribute:: Signature.empty
+
+ A special class-level marker to specify absence of a return annotation.
+
+ .. attribute:: Signature.parameters
+
+ An ordered mapping of parameters' names to the corresponding
+ :class:`Parameter` objects.
+
+ .. attribute:: Signature.return_annotation
+
+ The "return" annotation for the callable. If the callable has no "return"
+ annotation, this attribute is set to :attr:`Signature.empty`.
+
+ .. method:: Signature.bind(*args, **kwargs)
+
+ Create a mapping from positional and keyword arguments to parameters.
+ Returns :class:`BoundArguments` if ``*args`` and ``**kwargs`` match the
+ signature, or raises a :exc:`TypeError`.
+
+ .. method:: Signature.bind_partial(*args, **kwargs)
+
+ Works the same way as :meth:`Signature.bind`, but allows the omission of
+ some required arguments (mimics :func:`functools.partial` behavior.)
+ Returns :class:`BoundArguments`, or raises a :exc:`TypeError` if the
+ passed arguments do not match the signature.
+
+ .. method:: Signature.replace(*[, parameters][, return_annotation])
+
+ Create a new Signature instance based on the instance replace was invoked
+ on. It is possible to pass different ``parameters`` and/or
+ ``return_annotation`` to override the corresponding properties of the base
+ signature. To remove return_annotation from the copied Signature, pass in
+ :attr:`Signature.empty`.
+
+ ::
+
+ >>> def test(a, b):
+ ... pass
+ >>> sig = signature(test)
+ >>> new_sig = sig.replace(return_annotation="new return anno")
+ >>> str(new_sig)
+ "(a, b) -> 'new return anno'"
+
+
+.. class:: Parameter
+
+ Parameter objects are *immutable*. Instead of modifying a Parameter object,
+ you can use :meth:`Parameter.replace` to create a modified copy.
+
+ .. attribute:: Parameter.empty
+
+ A special class-level marker to specify absence of default values and
+ annotations.
+
+ .. attribute:: Parameter.name
+
+ The name of the parameter as a string. Must be a valid python identifier
+ name (with the exception of ``POSITIONAL_ONLY`` parameters, which can have
+ it set to ``None``).
+
+ .. attribute:: Parameter.default
+
+ The default value for the parameter. If the parameter has no default
+ value, this attribute is set to :attr:`Parameter.empty`.
+
+ .. attribute:: Parameter.annotation
+
+ The annotation for the parameter. If the parameter has no annotation,
+ this attribute is set to :attr:`Parameter.empty`.
+
+ .. attribute:: Parameter.kind
+
+ Describes how argument values are bound to the parameter. Possible values
+ (accessible via :class:`Parameter`, like ``Parameter.KEYWORD_ONLY``):
+
+ +------------------------+----------------------------------------------+
+ | Name | Meaning |
+ +========================+==============================================+
+ | *POSITIONAL_ONLY* | Value must be supplied as a positional |
+ | | argument. |
+ | | |
+ | | Python has no explicit syntax for defining |
+ | | positional-only parameters, but many built-in|
+ | | and extension module functions (especially |
+ | | those that accept only one or two parameters)|
+ | | accept them. |
+ +------------------------+----------------------------------------------+
+ | *POSITIONAL_OR_KEYWORD*| Value may be supplied as either a keyword or |
+ | | positional argument (this is the standard |
+ | | binding behaviour for functions implemented |
+ | | in Python.) |
+ +------------------------+----------------------------------------------+
+ | *VAR_POSITIONAL* | A tuple of positional arguments that aren't |
+ | | bound to any other parameter. This |
+ | | corresponds to a ``*args`` parameter in a |
+ | | Python function definition. |
+ +------------------------+----------------------------------------------+
+ | *KEYWORD_ONLY* | Value must be supplied as a keyword argument.|
+ | | Keyword only parameters are those which |
+ | | appear after a ``*`` or ``*args`` entry in a |
+ | | Python function definition. |
+ +------------------------+----------------------------------------------+
+ | *VAR_KEYWORD* | A dict of keyword arguments that aren't bound|
+ | | to any other parameter. This corresponds to a|
+ | | ``**kwargs`` parameter in a Python function |
+ | | definition. |
+ +------------------------+----------------------------------------------+
+
+ Example: print all keyword-only arguments without default values::
+
+ >>> def foo(a, b, *, c, d=10):
+ ... pass
+
+ >>> sig = signature(foo)
+ >>> for param in sig.parameters.values():
+ ... if (param.kind == param.KEYWORD_ONLY and
+ ... param.default is param.empty):
+ ... print('Parameter:', param)
+ Parameter: c
+
+ .. method:: Parameter.replace(*[, name][, kind][, default][, annotation])
+
+ Create a new Parameter instance based on the instance replaced was invoked
+ on. To override a :class:`Parameter` attribute, pass the corresponding
+ argument. To remove a default value or/and an annotation from a
+ Parameter, pass :attr:`Parameter.empty`.
+
+ ::
+
+ >>> from funcsigs import Parameter
+ >>> param = Parameter('foo', Parameter.KEYWORD_ONLY, default=42)
+ >>> str(param)
+ 'foo=42'
+
+ >>> str(param.replace()) # Will create a shallow copy of 'param'
+ 'foo=42'
+
+ >>> str(param.replace(default=Parameter.empty, annotation='spam'))
+ "foo:'spam'"
+
+
+.. class:: BoundArguments
+
+ Result of a :meth:`Signature.bind` or :meth:`Signature.bind_partial` call.
+ Holds the mapping of arguments to the function's parameters.
+
+ .. attribute:: BoundArguments.arguments
+
+ An ordered, mutable mapping (:class:`collections.OrderedDict`) of
+ parameters' names to arguments' values. Contains only explicitly bound
+ arguments. Changes in :attr:`arguments` will reflect in :attr:`args` and
+ :attr:`kwargs`.
+
+ Should be used in conjunction with :attr:`Signature.parameters` for any
+ argument processing purposes.
+
+ .. note::
+
+ Arguments for which :meth:`Signature.bind` or
+ :meth:`Signature.bind_partial` relied on a default value are skipped.
+ However, if needed, it is easy to include them.
+
+ ::
+
+ >>> def foo(a, b=10):
+ ... pass
+
+ >>> sig = signature(foo)
+ >>> ba = sig.bind(5)
+
+ >>> ba.args, ba.kwargs
+ ((5,), {})
+
+ >>> for param in sig.parameters.values():
+ ... if param.name not in ba.arguments:
+ ... ba.arguments[param.name] = param.default
+
+ >>> ba.args, ba.kwargs
+ ((5, 10), {})
+
+
+ .. attribute:: BoundArguments.args
+
+ A tuple of positional arguments values. Dynamically computed from the
+ :attr:`arguments` attribute.
+
+ .. attribute:: BoundArguments.kwargs
+
+ A dict of keyword arguments values. Dynamically computed from the
+ :attr:`arguments` attribute.
+
+ The :attr:`args` and :attr:`kwargs` properties can be used to invoke
+ functions::
+
+ def test(a, *, b):
+ ...
+
+ sig = signature(test)
+ ba = sig.bind(10, b=20)
+ test(*ba.args, **ba.kwargs)
+
+
+.. seealso::
+
+ :pep:`362` - Function Signature Object.
+ The detailed specification, implementation details and examples.
+
+Copyright
+---------
+
+*funcsigs* is a derived work of CPython under the terms of the `PSF License
+Agreement`_. The original CPython inspect module, its unit tests and
+documentation are the copyright of the Python Software Foundation. The derived
+work is distributed under the `Apache License Version 2.0`_.
+
+.. _PSF License Agreement: http://docs.python.org/3/license.html#terms-and-conditions-for-accessing-or-otherwise-using-python
+.. _Apache License Version 2.0: http://opensource.org/licenses/Apache-2.0
+.. _GitHub: https://github.com/testing-cabal/funcsigs
+.. _PSF License Agreement: http://docs.python.org/3/license.html#terms-and-conditions-for-accessing-or-otherwise-using-python
+.. _Travis CI: http://travis-ci.org/
+.. _Read The Docs: http://funcsigs.readthedocs.org/
+.. _PEP 362: http://www.python.org/dev/peps/pep-0362/
+.. _inspect: http://docs.python.org/3/library/inspect.html#introspecting-callables-with-the-signature-object
+.. _issues system: https://github.com/testing-cabal/funcsigs/issues
+
+.. |build_status| image:: https://secure.travis-ci.org/aliles/funcsigs.png?branch=master
+ :target: http://travis-ci.org/#!/aliles/funcsigs
+ :alt: Current build status
+
+.. |coverage| image:: https://coveralls.io/repos/aliles/funcsigs/badge.png?branch=master
+ :target: https://coveralls.io/r/aliles/funcsigs?branch=master
+ :alt: Coverage status
+
+.. |pypi_version| image:: https://pypip.in/v/funcsigs/badge.png
+ :target: https://crate.io/packages/funcsigs/
+ :alt: Latest PyPI version
+
+
diff --git a/third_party/python/funcsigs/docs/Makefile b/third_party/python/funcsigs/docs/Makefile
new file mode 100644
index 0000000000..f7ab3d16b4
--- /dev/null
+++ b/third_party/python/funcsigs/docs/Makefile
@@ -0,0 +1,153 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/funcsigs.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/funcsigs.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/funcsigs"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/funcsigs"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/third_party/python/funcsigs/docs/_templates/page.html b/third_party/python/funcsigs/docs/_templates/page.html
new file mode 100644
index 0000000000..5e1e00bcaf
--- /dev/null
+++ b/third_party/python/funcsigs/docs/_templates/page.html
@@ -0,0 +1,9 @@
+{% extends "!page.html" %}
+{% block extrahead %}
+ <a href="https://github.com/aliles/funcsigs">
+ <img style="position: absolute; top: 0; right: 0; border: 0;"
+ src="https://s3.amazonaws.com/github/ribbons/forkme_right_red_aa0000.png"
+ alt="Fork me on GitHub">
+ </a>
+ {{ super() }}
+{% endblock %}
diff --git a/third_party/python/funcsigs/docs/conf.py b/third_party/python/funcsigs/docs/conf.py
new file mode 100644
index 0000000000..c6e4194cc0
--- /dev/null
+++ b/third_party/python/funcsigs/docs/conf.py
@@ -0,0 +1,251 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# funcsigs documentation build configuration file, created by
+# sphinx-quickstart on Fri Apr 20 20:27:52 2012.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath('..'))
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = 'funcsigs'
+copyright = '2013, Aaron Iles'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+from funcsigs import __version__
+version = '.'.join(__version__.split('.')[:2])
+# The full version, including alpha/beta/rc tags.
+release = __version__
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'agogo'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'funcsigsdoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ('index', 'funcsigs.tex', 'funcsigs Documentation',
+ 'Aaron Iles', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'funcsigs', 'funcsigs Documentation',
+ ['Aaron Iles'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output ------------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ ('index', 'funcsigs', 'funcsigs Documentation',
+ 'Aaron Iles', 'funcsigs', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {
+ 'python3': ('http://docs.python.org/py3k', None),
+ 'python': ('http://docs.python.org/', None)
+}
diff --git a/third_party/python/funcsigs/docs/index.rst b/third_party/python/funcsigs/docs/index.rst
new file mode 100644
index 0000000000..5fbca27e6e
--- /dev/null
+++ b/third_party/python/funcsigs/docs/index.rst
@@ -0,0 +1,353 @@
+.. funcsigs documentation master file, created by
+ sphinx-quickstart on Fri Apr 20 20:27:52 2012.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+Introducing funcsigs
+====================
+
+The Funcsigs Package
+--------------------
+
+``funcsigs`` is a backport of the `PEP 362`_ function signature features from
+Python 3.3's `inspect`_ module. The backport is compatible with Python 2.6, 2.7
+as well as 3.3 and up. 3.2 was supported by version 0.4, but with setuptools and
+pip no longer supporting 3.2, we cannot make any statement about 3.2
+compatibility.
+
+Compatibility
+`````````````
+
+The ``funcsigs`` backport has been tested against:
+
+* CPython 2.6
+* CPython 2.7
+* CPython 3.3
+* CPython 3.4
+* CPython 3.5
+* CPython nightlies
+* PyPy and PyPy3(currently failing CI)
+
+Continuous integration testing is provided by `Travis CI`_.
+
+Under Python 2.x there is a compatibility issue when a function is assigned to
+the ``__wrapped__`` property of a class after it has been constructed.
+Similiarily there under PyPy directly passing the ``__call__`` method of a
+builtin is also a compatibility issues. Otherwise the functionality is
+believed to be uniform between both Python2 and Python3.
+
+Issues
+``````
+
+Source code for ``funcsigs`` is hosted on `GitHub`_. Any bug reports or feature
+requests can be made using GitHub's `issues system`_. |build_status| |coverage|
+
+Example
+-------
+
+To obtain a `Signature` object, pass the target function to the
+``funcsigs.signature`` function.
+
+.. code-block:: python
+
+ >>> from funcsigs import signature
+ >>> def foo(a, b=None, *args, **kwargs):
+ ... pass
+ ...
+ >>> sig = signature(foo)
+ >>> sig
+ <funcsigs.Signature object at 0x...>
+ >>> sig.parameters
+ OrderedDict([('a', <Parameter at 0x... 'a'>), ('b', <Parameter at 0x... 'b'>), ('args', <Parameter at 0x... 'args'>), ('kwargs', <Parameter at 0x... 'kwargs'>)])
+ >>> sig.return_annotation
+ <class 'funcsigs._empty'>
+
+Introspecting callables with the Signature object
+-------------------------------------------------
+
+.. note::
+
+ This section of documentation is a direct reproduction of the Python
+ standard library documentation for the inspect module.
+
+The Signature object represents the call signature of a callable object and its
+return annotation. To retrieve a Signature object, use the :func:`signature`
+function.
+
+.. function:: signature(callable)
+
+ Return a :class:`Signature` object for the given ``callable``::
+
+ >>> from funcsigs import signature
+ >>> def foo(a, *, b:int, **kwargs):
+ ... pass
+
+ >>> sig = signature(foo)
+
+ >>> str(sig)
+ '(a, *, b:int, **kwargs)'
+
+ >>> str(sig.parameters['b'])
+ 'b:int'
+
+ >>> sig.parameters['b'].annotation
+ <class 'int'>
+
+ Accepts a wide range of python callables, from plain functions and classes to
+ :func:`functools.partial` objects.
+
+ .. note::
+
+ Some callables may not be introspectable in certain implementations of
+ Python. For example, in CPython, built-in functions defined in C provide
+ no metadata about their arguments.
+
+
+.. class:: Signature
+
+ A Signature object represents the call signature of a function and its return
+ annotation. For each parameter accepted by the function it stores a
+ :class:`Parameter` object in its :attr:`parameters` collection.
+
+ Signature objects are *immutable*. Use :meth:`Signature.replace` to make a
+ modified copy.
+
+ .. attribute:: Signature.empty
+
+ A special class-level marker to specify absence of a return annotation.
+
+ .. attribute:: Signature.parameters
+
+ An ordered mapping of parameters' names to the corresponding
+ :class:`Parameter` objects.
+
+ .. attribute:: Signature.return_annotation
+
+ The "return" annotation for the callable. If the callable has no "return"
+ annotation, this attribute is set to :attr:`Signature.empty`.
+
+ .. method:: Signature.bind(*args, **kwargs)
+
+ Create a mapping from positional and keyword arguments to parameters.
+ Returns :class:`BoundArguments` if ``*args`` and ``**kwargs`` match the
+ signature, or raises a :exc:`TypeError`.
+
+ .. method:: Signature.bind_partial(*args, **kwargs)
+
+ Works the same way as :meth:`Signature.bind`, but allows the omission of
+ some required arguments (mimics :func:`functools.partial` behavior.)
+ Returns :class:`BoundArguments`, or raises a :exc:`TypeError` if the
+ passed arguments do not match the signature.
+
+ .. method:: Signature.replace(*[, parameters][, return_annotation])
+
+ Create a new Signature instance based on the instance replace was invoked
+ on. It is possible to pass different ``parameters`` and/or
+ ``return_annotation`` to override the corresponding properties of the base
+ signature. To remove return_annotation from the copied Signature, pass in
+ :attr:`Signature.empty`.
+
+ ::
+
+ >>> def test(a, b):
+ ... pass
+ >>> sig = signature(test)
+ >>> new_sig = sig.replace(return_annotation="new return anno")
+ >>> str(new_sig)
+ "(a, b) -> 'new return anno'"
+
+
+.. class:: Parameter
+
+ Parameter objects are *immutable*. Instead of modifying a Parameter object,
+ you can use :meth:`Parameter.replace` to create a modified copy.
+
+ .. attribute:: Parameter.empty
+
+ A special class-level marker to specify absence of default values and
+ annotations.
+
+ .. attribute:: Parameter.name
+
+ The name of the parameter as a string. Must be a valid python identifier
+ name (with the exception of ``POSITIONAL_ONLY`` parameters, which can have
+ it set to ``None``).
+
+ .. attribute:: Parameter.default
+
+ The default value for the parameter. If the parameter has no default
+ value, this attribute is set to :attr:`Parameter.empty`.
+
+ .. attribute:: Parameter.annotation
+
+ The annotation for the parameter. If the parameter has no annotation,
+ this attribute is set to :attr:`Parameter.empty`.
+
+ .. attribute:: Parameter.kind
+
+ Describes how argument values are bound to the parameter. Possible values
+ (accessible via :class:`Parameter`, like ``Parameter.KEYWORD_ONLY``):
+
+ +------------------------+----------------------------------------------+
+ | Name | Meaning |
+ +========================+==============================================+
+ | *POSITIONAL_ONLY* | Value must be supplied as a positional |
+ | | argument. |
+ | | |
+ | | Python has no explicit syntax for defining |
+ | | positional-only parameters, but many built-in|
+ | | and extension module functions (especially |
+ | | those that accept only one or two parameters)|
+ | | accept them. |
+ +------------------------+----------------------------------------------+
+ | *POSITIONAL_OR_KEYWORD*| Value may be supplied as either a keyword or |
+ | | positional argument (this is the standard |
+ | | binding behaviour for functions implemented |
+ | | in Python.) |
+ +------------------------+----------------------------------------------+
+ | *VAR_POSITIONAL* | A tuple of positional arguments that aren't |
+ | | bound to any other parameter. This |
+ | | corresponds to a ``*args`` parameter in a |
+ | | Python function definition. |
+ +------------------------+----------------------------------------------+
+ | *KEYWORD_ONLY* | Value must be supplied as a keyword argument.|
+ | | Keyword only parameters are those which |
+ | | appear after a ``*`` or ``*args`` entry in a |
+ | | Python function definition. |
+ +------------------------+----------------------------------------------+
+ | *VAR_KEYWORD* | A dict of keyword arguments that aren't bound|
+ | | to any other parameter. This corresponds to a|
+ | | ``**kwargs`` parameter in a Python function |
+ | | definition. |
+ +------------------------+----------------------------------------------+
+
+ Example: print all keyword-only arguments without default values::
+
+ >>> def foo(a, b, *, c, d=10):
+ ... pass
+
+ >>> sig = signature(foo)
+ >>> for param in sig.parameters.values():
+ ... if (param.kind == param.KEYWORD_ONLY and
+ ... param.default is param.empty):
+ ... print('Parameter:', param)
+ Parameter: c
+
+ .. method:: Parameter.replace(*[, name][, kind][, default][, annotation])
+
+ Create a new Parameter instance based on the instance replaced was invoked
+ on. To override a :class:`Parameter` attribute, pass the corresponding
+ argument. To remove a default value or/and an annotation from a
+ Parameter, pass :attr:`Parameter.empty`.
+
+ ::
+
+ >>> from funcsigs import Parameter
+ >>> param = Parameter('foo', Parameter.KEYWORD_ONLY, default=42)
+ >>> str(param)
+ 'foo=42'
+
+ >>> str(param.replace()) # Will create a shallow copy of 'param'
+ 'foo=42'
+
+ >>> str(param.replace(default=Parameter.empty, annotation='spam'))
+ "foo:'spam'"
+
+
+.. class:: BoundArguments
+
+ Result of a :meth:`Signature.bind` or :meth:`Signature.bind_partial` call.
+ Holds the mapping of arguments to the function's parameters.
+
+ .. attribute:: BoundArguments.arguments
+
+ An ordered, mutable mapping (:class:`collections.OrderedDict`) of
+ parameters' names to arguments' values. Contains only explicitly bound
+ arguments. Changes in :attr:`arguments` will reflect in :attr:`args` and
+ :attr:`kwargs`.
+
+ Should be used in conjunction with :attr:`Signature.parameters` for any
+ argument processing purposes.
+
+ .. note::
+
+ Arguments for which :meth:`Signature.bind` or
+ :meth:`Signature.bind_partial` relied on a default value are skipped.
+ However, if needed, it is easy to include them.
+
+ ::
+
+ >>> def foo(a, b=10):
+ ... pass
+
+ >>> sig = signature(foo)
+ >>> ba = sig.bind(5)
+
+ >>> ba.args, ba.kwargs
+ ((5,), {})
+
+ >>> for param in sig.parameters.values():
+ ... if param.name not in ba.arguments:
+ ... ba.arguments[param.name] = param.default
+
+ >>> ba.args, ba.kwargs
+ ((5, 10), {})
+
+
+ .. attribute:: BoundArguments.args
+
+ A tuple of positional arguments values. Dynamically computed from the
+ :attr:`arguments` attribute.
+
+ .. attribute:: BoundArguments.kwargs
+
+ A dict of keyword arguments values. Dynamically computed from the
+ :attr:`arguments` attribute.
+
+ The :attr:`args` and :attr:`kwargs` properties can be used to invoke
+ functions::
+
+ def test(a, *, b):
+ ...
+
+ sig = signature(test)
+ ba = sig.bind(10, b=20)
+ test(*ba.args, **ba.kwargs)
+
+
+.. seealso::
+
+ :pep:`362` - Function Signature Object.
+ The detailed specification, implementation details and examples.
+
+Copyright
+---------
+
+*funcsigs* is a derived work of CPython under the terms of the `PSF License
+Agreement`_. The original CPython inspect module, its unit tests and
+documentation are the copyright of the Python Software Foundation. The derived
+work is distributed under the `Apache License Version 2.0`_.
+
+.. _PSF License Agreement: http://docs.python.org/3/license.html#terms-and-conditions-for-accessing-or-otherwise-using-python
+.. _Apache License Version 2.0: http://opensource.org/licenses/Apache-2.0
+.. _GitHub: https://github.com/testing-cabal/funcsigs
+.. _PSF License Agreement: http://docs.python.org/3/license.html#terms-and-conditions-for-accessing-or-otherwise-using-python
+.. _Travis CI: http://travis-ci.org/
+.. _Read The Docs: http://funcsigs.readthedocs.org/
+.. _PEP 362: http://www.python.org/dev/peps/pep-0362/
+.. _inspect: http://docs.python.org/3/library/inspect.html#introspecting-callables-with-the-signature-object
+.. _issues system: https://github.com/testing-cabal/funcsigs/issues
+
+.. |build_status| image:: https://secure.travis-ci.org/aliles/funcsigs.png?branch=master
+ :target: http://travis-ci.org/#!/aliles/funcsigs
+ :alt: Current build status
+
+.. |coverage| image:: https://coveralls.io/repos/aliles/funcsigs/badge.png?branch=master
+ :target: https://coveralls.io/r/aliles/funcsigs?branch=master
+ :alt: Coverage status
+
+.. |pypi_version| image:: https://pypip.in/v/funcsigs/badge.png
+ :target: https://crate.io/packages/funcsigs/
+ :alt: Latest PyPI version
+
+
diff --git a/third_party/python/funcsigs/funcsigs/__init__.py b/third_party/python/funcsigs/funcsigs/__init__.py
new file mode 100644
index 0000000000..5f5378b42a
--- /dev/null
+++ b/third_party/python/funcsigs/funcsigs/__init__.py
@@ -0,0 +1,829 @@
+# Copyright 2001-2013 Python Software Foundation; All Rights Reserved
+"""Function signature objects for callables
+
+Back port of Python 3.3's function signature tools from the inspect module,
+modified to be compatible with Python 2.6, 2.7 and 3.3+.
+"""
+from __future__ import absolute_import, division, print_function
+import itertools
+import functools
+import re
+import types
+
+try:
+ from collections import OrderedDict
+except ImportError:
+ from ordereddict import OrderedDict
+
+from funcsigs.version import __version__
+
+__all__ = ['BoundArguments', 'Parameter', 'Signature', 'signature']
+
+
+_WrapperDescriptor = type(type.__call__)
+_MethodWrapper = type(all.__call__)
+
+_NonUserDefinedCallables = (_WrapperDescriptor,
+ _MethodWrapper,
+ types.BuiltinFunctionType)
+
+
+def formatannotation(annotation, base_module=None):
+ if isinstance(annotation, type):
+ if annotation.__module__ in ('builtins', '__builtin__', base_module):
+ return annotation.__name__
+ return annotation.__module__+'.'+annotation.__name__
+ return repr(annotation)
+
+
+def _get_user_defined_method(cls, method_name, *nested):
+ try:
+ if cls is type:
+ return
+ meth = getattr(cls, method_name)
+ for name in nested:
+ meth = getattr(meth, name, meth)
+ except AttributeError:
+ return
+ else:
+ if not isinstance(meth, _NonUserDefinedCallables):
+ # Once '__signature__' will be added to 'C'-level
+ # callables, this check won't be necessary
+ return meth
+
+
+def signature(obj):
+ '''Get a signature object for the passed callable.'''
+
+ if not callable(obj):
+ raise TypeError('{0!r} is not a callable object'.format(obj))
+
+ if isinstance(obj, types.MethodType):
+ sig = signature(obj.__func__)
+ if obj.__self__ is None:
+ # Unbound method - preserve as-is.
+ return sig
+ else:
+ # Bound method. Eat self - if we can.
+ params = tuple(sig.parameters.values())
+
+ if not params or params[0].kind in (_VAR_KEYWORD, _KEYWORD_ONLY):
+ raise ValueError('invalid method signature')
+
+ kind = params[0].kind
+ if kind in (_POSITIONAL_OR_KEYWORD, _POSITIONAL_ONLY):
+ # Drop first parameter:
+ # '(p1, p2[, ...])' -> '(p2[, ...])'
+ params = params[1:]
+ else:
+ if kind is not _VAR_POSITIONAL:
+ # Unless we add a new parameter type we never
+ # get here
+ raise ValueError('invalid argument type')
+ # It's a var-positional parameter.
+ # Do nothing. '(*args[, ...])' -> '(*args[, ...])'
+
+ return sig.replace(parameters=params)
+
+ try:
+ sig = obj.__signature__
+ except AttributeError:
+ pass
+ else:
+ if sig is not None:
+ return sig
+
+ try:
+ # Was this function wrapped by a decorator?
+ wrapped = obj.__wrapped__
+ except AttributeError:
+ pass
+ else:
+ return signature(wrapped)
+
+ if isinstance(obj, types.FunctionType):
+ return Signature.from_function(obj)
+
+ if isinstance(obj, functools.partial):
+ sig = signature(obj.func)
+
+ new_params = OrderedDict(sig.parameters.items())
+
+ partial_args = obj.args or ()
+ partial_keywords = obj.keywords or {}
+ try:
+ ba = sig.bind_partial(*partial_args, **partial_keywords)
+ except TypeError as ex:
+ msg = 'partial object {0!r} has incorrect arguments'.format(obj)
+ raise ValueError(msg)
+
+ for arg_name, arg_value in ba.arguments.items():
+ param = new_params[arg_name]
+ if arg_name in partial_keywords:
+ # We set a new default value, because the following code
+ # is correct:
+ #
+ # >>> def foo(a): print(a)
+ # >>> print(partial(partial(foo, a=10), a=20)())
+ # 20
+ # >>> print(partial(partial(foo, a=10), a=20)(a=30))
+ # 30
+ #
+ # So, with 'partial' objects, passing a keyword argument is
+ # like setting a new default value for the corresponding
+ # parameter
+ #
+ # We also mark this parameter with '_partial_kwarg'
+ # flag. Later, in '_bind', the 'default' value of this
+ # parameter will be added to 'kwargs', to simulate
+ # the 'functools.partial' real call.
+ new_params[arg_name] = param.replace(default=arg_value,
+ _partial_kwarg=True)
+
+ elif (param.kind not in (_VAR_KEYWORD, _VAR_POSITIONAL) and
+ not param._partial_kwarg):
+ new_params.pop(arg_name)
+
+ return sig.replace(parameters=new_params.values())
+
+ sig = None
+ if isinstance(obj, type):
+ # obj is a class or a metaclass
+
+ # First, let's see if it has an overloaded __call__ defined
+ # in its metaclass
+ call = _get_user_defined_method(type(obj), '__call__')
+ if call is not None:
+ sig = signature(call)
+ else:
+ # Now we check if the 'obj' class has a '__new__' method
+ new = _get_user_defined_method(obj, '__new__')
+ if new is not None:
+ sig = signature(new)
+ else:
+ # Finally, we should have at least __init__ implemented
+ init = _get_user_defined_method(obj, '__init__')
+ if init is not None:
+ sig = signature(init)
+ elif not isinstance(obj, _NonUserDefinedCallables):
+ # An object with __call__
+ # We also check that the 'obj' is not an instance of
+ # _WrapperDescriptor or _MethodWrapper to avoid
+ # infinite recursion (and even potential segfault)
+ call = _get_user_defined_method(type(obj), '__call__', 'im_func')
+ if call is not None:
+ sig = signature(call)
+
+ if sig is not None:
+ # For classes and objects we skip the first parameter of their
+ # __call__, __new__, or __init__ methods
+ return sig.replace(parameters=tuple(sig.parameters.values())[1:])
+
+ if isinstance(obj, types.BuiltinFunctionType):
+ # Raise a nicer error message for builtins
+ msg = 'no signature found for builtin function {0!r}'.format(obj)
+ raise ValueError(msg)
+
+ raise ValueError('callable {0!r} is not supported by signature'.format(obj))
+
+
+class _void(object):
+ '''A private marker - used in Parameter & Signature'''
+
+
+class _empty(object):
+ pass
+
+
+class _ParameterKind(int):
+ def __new__(self, *args, **kwargs):
+ obj = int.__new__(self, *args)
+ obj._name = kwargs['name']
+ return obj
+
+ def __str__(self):
+ return self._name
+
+ def __repr__(self):
+ return '<_ParameterKind: {0!r}>'.format(self._name)
+
+
+_POSITIONAL_ONLY = _ParameterKind(0, name='POSITIONAL_ONLY')
+_POSITIONAL_OR_KEYWORD = _ParameterKind(1, name='POSITIONAL_OR_KEYWORD')
+_VAR_POSITIONAL = _ParameterKind(2, name='VAR_POSITIONAL')
+_KEYWORD_ONLY = _ParameterKind(3, name='KEYWORD_ONLY')
+_VAR_KEYWORD = _ParameterKind(4, name='VAR_KEYWORD')
+
+
+class Parameter(object):
+ '''Represents a parameter in a function signature.
+
+ Has the following public attributes:
+
+ * name : str
+ The name of the parameter as a string.
+ * default : object
+ The default value for the parameter if specified. If the
+ parameter has no default value, this attribute is not set.
+ * annotation
+ The annotation for the parameter if specified. If the
+ parameter has no annotation, this attribute is not set.
+ * kind : str
+ Describes how argument values are bound to the parameter.
+ Possible values: `Parameter.POSITIONAL_ONLY`,
+ `Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`,
+ `Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`.
+ '''
+
+ __slots__ = ('_name', '_kind', '_default', '_annotation', '_partial_kwarg')
+
+ POSITIONAL_ONLY = _POSITIONAL_ONLY
+ POSITIONAL_OR_KEYWORD = _POSITIONAL_OR_KEYWORD
+ VAR_POSITIONAL = _VAR_POSITIONAL
+ KEYWORD_ONLY = _KEYWORD_ONLY
+ VAR_KEYWORD = _VAR_KEYWORD
+
+ empty = _empty
+
+ def __init__(self, name, kind, default=_empty, annotation=_empty,
+ _partial_kwarg=False):
+
+ if kind not in (_POSITIONAL_ONLY, _POSITIONAL_OR_KEYWORD,
+ _VAR_POSITIONAL, _KEYWORD_ONLY, _VAR_KEYWORD):
+ raise ValueError("invalid value for 'Parameter.kind' attribute")
+ self._kind = kind
+
+ if default is not _empty:
+ if kind in (_VAR_POSITIONAL, _VAR_KEYWORD):
+ msg = '{0} parameters cannot have default values'.format(kind)
+ raise ValueError(msg)
+ self._default = default
+ self._annotation = annotation
+
+ if name is None:
+ if kind != _POSITIONAL_ONLY:
+ raise ValueError("None is not a valid name for a "
+ "non-positional-only parameter")
+ self._name = name
+ else:
+ name = str(name)
+ if kind != _POSITIONAL_ONLY and not re.match(r'[a-z_]\w*$', name, re.I):
+ msg = '{0!r} is not a valid parameter name'.format(name)
+ raise ValueError(msg)
+ self._name = name
+
+ self._partial_kwarg = _partial_kwarg
+
+ @property
+ def name(self):
+ return self._name
+
+ @property
+ def default(self):
+ return self._default
+
+ @property
+ def annotation(self):
+ return self._annotation
+
+ @property
+ def kind(self):
+ return self._kind
+
+ def replace(self, name=_void, kind=_void, annotation=_void,
+ default=_void, _partial_kwarg=_void):
+ '''Creates a customized copy of the Parameter.'''
+
+ if name is _void:
+ name = self._name
+
+ if kind is _void:
+ kind = self._kind
+
+ if annotation is _void:
+ annotation = self._annotation
+
+ if default is _void:
+ default = self._default
+
+ if _partial_kwarg is _void:
+ _partial_kwarg = self._partial_kwarg
+
+ return type(self)(name, kind, default=default, annotation=annotation,
+ _partial_kwarg=_partial_kwarg)
+
+ def __str__(self):
+ kind = self.kind
+
+ formatted = self._name
+ if kind == _POSITIONAL_ONLY:
+ if formatted is None:
+ formatted = ''
+ formatted = '<{0}>'.format(formatted)
+
+ # Add annotation and default value
+ if self._annotation is not _empty:
+ formatted = '{0}:{1}'.format(formatted,
+ formatannotation(self._annotation))
+
+ if self._default is not _empty:
+ formatted = '{0}={1}'.format(formatted, repr(self._default))
+
+ if kind == _VAR_POSITIONAL:
+ formatted = '*' + formatted
+ elif kind == _VAR_KEYWORD:
+ formatted = '**' + formatted
+
+ return formatted
+
+ def __repr__(self):
+ return '<{0} at {1:#x} {2!r}>'.format(self.__class__.__name__,
+ id(self), self.name)
+
+ def __hash__(self):
+ msg = "unhashable type: '{0}'".format(self.__class__.__name__)
+ raise TypeError(msg)
+
+ def __eq__(self, other):
+ return (issubclass(other.__class__, Parameter) and
+ self._name == other._name and
+ self._kind == other._kind and
+ self._default == other._default and
+ self._annotation == other._annotation)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+class BoundArguments(object):
+ '''Result of `Signature.bind` call. Holds the mapping of arguments
+ to the function's parameters.
+
+ Has the following public attributes:
+
+ * arguments : OrderedDict
+ An ordered mutable mapping of parameters' names to arguments' values.
+ Does not contain arguments' default values.
+ * signature : Signature
+ The Signature object that created this instance.
+ * args : tuple
+ Tuple of positional arguments values.
+ * kwargs : dict
+ Dict of keyword arguments values.
+ '''
+
+ def __init__(self, signature, arguments):
+ self.arguments = arguments
+ self._signature = signature
+
+ @property
+ def signature(self):
+ return self._signature
+
+ @property
+ def args(self):
+ args = []
+ for param_name, param in self._signature.parameters.items():
+ if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or
+ param._partial_kwarg):
+ # Keyword arguments mapped by 'functools.partial'
+ # (Parameter._partial_kwarg is True) are mapped
+ # in 'BoundArguments.kwargs', along with VAR_KEYWORD &
+ # KEYWORD_ONLY
+ break
+
+ try:
+ arg = self.arguments[param_name]
+ except KeyError:
+ # We're done here. Other arguments
+ # will be mapped in 'BoundArguments.kwargs'
+ break
+ else:
+ if param.kind == _VAR_POSITIONAL:
+ # *args
+ args.extend(arg)
+ else:
+ # plain argument
+ args.append(arg)
+
+ return tuple(args)
+
+ @property
+ def kwargs(self):
+ kwargs = {}
+ kwargs_started = False
+ for param_name, param in self._signature.parameters.items():
+ if not kwargs_started:
+ if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or
+ param._partial_kwarg):
+ kwargs_started = True
+ else:
+ if param_name not in self.arguments:
+ kwargs_started = True
+ continue
+
+ if not kwargs_started:
+ continue
+
+ try:
+ arg = self.arguments[param_name]
+ except KeyError:
+ pass
+ else:
+ if param.kind == _VAR_KEYWORD:
+ # **kwargs
+ kwargs.update(arg)
+ else:
+ # plain keyword argument
+ kwargs[param_name] = arg
+
+ return kwargs
+
+ def __hash__(self):
+ msg = "unhashable type: '{0}'".format(self.__class__.__name__)
+ raise TypeError(msg)
+
+ def __eq__(self, other):
+ return (issubclass(other.__class__, BoundArguments) and
+ self.signature == other.signature and
+ self.arguments == other.arguments)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+class Signature(object):
+ '''A Signature object represents the overall signature of a function.
+ It stores a Parameter object for each parameter accepted by the
+ function, as well as information specific to the function itself.
+
+ A Signature object has the following public attributes and methods:
+
+ * parameters : OrderedDict
+ An ordered mapping of parameters' names to the corresponding
+ Parameter objects (keyword-only arguments are in the same order
+ as listed in `code.co_varnames`).
+ * return_annotation : object
+ The annotation for the return type of the function if specified.
+ If the function has no annotation for its return type, this
+ attribute is not set.
+ * bind(*args, **kwargs) -> BoundArguments
+ Creates a mapping from positional and keyword arguments to
+ parameters.
+ * bind_partial(*args, **kwargs) -> BoundArguments
+ Creates a partial mapping from positional and keyword arguments
+ to parameters (simulating 'functools.partial' behavior.)
+ '''
+
+ __slots__ = ('_return_annotation', '_parameters')
+
+ _parameter_cls = Parameter
+ _bound_arguments_cls = BoundArguments
+
+ empty = _empty
+
+ def __init__(self, parameters=None, return_annotation=_empty,
+ __validate_parameters__=True):
+ '''Constructs Signature from the given list of Parameter
+ objects and 'return_annotation'. All arguments are optional.
+ '''
+
+ if parameters is None:
+ params = OrderedDict()
+ else:
+ if __validate_parameters__:
+ params = OrderedDict()
+ top_kind = _POSITIONAL_ONLY
+
+ for idx, param in enumerate(parameters):
+ kind = param.kind
+ if kind < top_kind:
+ msg = 'wrong parameter order: {0} before {1}'
+ msg = msg.format(top_kind, param.kind)
+ raise ValueError(msg)
+ else:
+ top_kind = kind
+
+ name = param.name
+ if name is None:
+ name = str(idx)
+ param = param.replace(name=name)
+
+ if name in params:
+ msg = 'duplicate parameter name: {0!r}'.format(name)
+ raise ValueError(msg)
+ params[name] = param
+ else:
+ params = OrderedDict(((param.name, param)
+ for param in parameters))
+
+ self._parameters = params
+ self._return_annotation = return_annotation
+
+ @classmethod
+ def from_function(cls, func):
+ '''Constructs Signature for the given python function'''
+
+ if not isinstance(func, types.FunctionType):
+ raise TypeError('{0!r} is not a Python function'.format(func))
+
+ Parameter = cls._parameter_cls
+
+ # Parameter information.
+ func_code = func.__code__
+ pos_count = func_code.co_argcount
+ arg_names = func_code.co_varnames
+ positional = tuple(arg_names[:pos_count])
+ keyword_only_count = getattr(func_code, 'co_kwonlyargcount', 0)
+ keyword_only = arg_names[pos_count:(pos_count + keyword_only_count)]
+ annotations = getattr(func, '__annotations__', {})
+ defaults = func.__defaults__
+ kwdefaults = getattr(func, '__kwdefaults__', None)
+
+ if defaults:
+ pos_default_count = len(defaults)
+ else:
+ pos_default_count = 0
+
+ parameters = []
+
+ # Non-keyword-only parameters w/o defaults.
+ non_default_count = pos_count - pos_default_count
+ for name in positional[:non_default_count]:
+ annotation = annotations.get(name, _empty)
+ parameters.append(Parameter(name, annotation=annotation,
+ kind=_POSITIONAL_OR_KEYWORD))
+
+ # ... w/ defaults.
+ for offset, name in enumerate(positional[non_default_count:]):
+ annotation = annotations.get(name, _empty)
+ parameters.append(Parameter(name, annotation=annotation,
+ kind=_POSITIONAL_OR_KEYWORD,
+ default=defaults[offset]))
+
+ # *args
+ if func_code.co_flags & 0x04:
+ name = arg_names[pos_count + keyword_only_count]
+ annotation = annotations.get(name, _empty)
+ parameters.append(Parameter(name, annotation=annotation,
+ kind=_VAR_POSITIONAL))
+
+ # Keyword-only parameters.
+ for name in keyword_only:
+ default = _empty
+ if kwdefaults is not None:
+ default = kwdefaults.get(name, _empty)
+
+ annotation = annotations.get(name, _empty)
+ parameters.append(Parameter(name, annotation=annotation,
+ kind=_KEYWORD_ONLY,
+ default=default))
+ # **kwargs
+ if func_code.co_flags & 0x08:
+ index = pos_count + keyword_only_count
+ if func_code.co_flags & 0x04:
+ index += 1
+
+ name = arg_names[index]
+ annotation = annotations.get(name, _empty)
+ parameters.append(Parameter(name, annotation=annotation,
+ kind=_VAR_KEYWORD))
+
+ return cls(parameters,
+ return_annotation=annotations.get('return', _empty),
+ __validate_parameters__=False)
+
+ @property
+ def parameters(self):
+ try:
+ return types.MappingProxyType(self._parameters)
+ except AttributeError:
+ return OrderedDict(self._parameters.items())
+
+ @property
+ def return_annotation(self):
+ return self._return_annotation
+
+ def replace(self, parameters=_void, return_annotation=_void):
+ '''Creates a customized copy of the Signature.
+ Pass 'parameters' and/or 'return_annotation' arguments
+ to override them in the new copy.
+ '''
+
+ if parameters is _void:
+ parameters = self.parameters.values()
+
+ if return_annotation is _void:
+ return_annotation = self._return_annotation
+
+ return type(self)(parameters,
+ return_annotation=return_annotation)
+
+ def __hash__(self):
+ msg = "unhashable type: '{0}'".format(self.__class__.__name__)
+ raise TypeError(msg)
+
+ def __eq__(self, other):
+ if (not issubclass(type(other), Signature) or
+ self.return_annotation != other.return_annotation or
+ len(self.parameters) != len(other.parameters)):
+ return False
+
+ other_positions = dict((param, idx)
+ for idx, param in enumerate(other.parameters.keys()))
+
+ for idx, (param_name, param) in enumerate(self.parameters.items()):
+ if param.kind == _KEYWORD_ONLY:
+ try:
+ other_param = other.parameters[param_name]
+ except KeyError:
+ return False
+ else:
+ if param != other_param:
+ return False
+ else:
+ try:
+ other_idx = other_positions[param_name]
+ except KeyError:
+ return False
+ else:
+ if (idx != other_idx or
+ param != other.parameters[param_name]):
+ return False
+
+ return True
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def _bind(self, args, kwargs, partial=False):
+ '''Private method. Don't use directly.'''
+
+ arguments = OrderedDict()
+
+ parameters = iter(self.parameters.values())
+ parameters_ex = ()
+ arg_vals = iter(args)
+
+ if partial:
+ # Support for binding arguments to 'functools.partial' objects.
+ # See 'functools.partial' case in 'signature()' implementation
+ # for details.
+ for param_name, param in self.parameters.items():
+ if (param._partial_kwarg and param_name not in kwargs):
+ # Simulating 'functools.partial' behavior
+ kwargs[param_name] = param.default
+
+ while True:
+ # Let's iterate through the positional arguments and corresponding
+ # parameters
+ try:
+ arg_val = next(arg_vals)
+ except StopIteration:
+ # No more positional arguments
+ try:
+ param = next(parameters)
+ except StopIteration:
+ # No more parameters. That's it. Just need to check that
+ # we have no `kwargs` after this while loop
+ break
+ else:
+ if param.kind == _VAR_POSITIONAL:
+ # That's OK, just empty *args. Let's start parsing
+ # kwargs
+ break
+ elif param.name in kwargs:
+ if param.kind == _POSITIONAL_ONLY:
+ msg = '{arg!r} parameter is positional only, ' \
+ 'but was passed as a keyword'
+ msg = msg.format(arg=param.name)
+ raise TypeError(msg)
+ parameters_ex = (param,)
+ break
+ elif (param.kind == _VAR_KEYWORD or
+ param.default is not _empty):
+ # That's fine too - we have a default value for this
+ # parameter. So, lets start parsing `kwargs`, starting
+ # with the current parameter
+ parameters_ex = (param,)
+ break
+ else:
+ if partial:
+ parameters_ex = (param,)
+ break
+ else:
+ msg = '{arg!r} parameter lacking default value'
+ msg = msg.format(arg=param.name)
+ raise TypeError(msg)
+ else:
+ # We have a positional argument to process
+ try:
+ param = next(parameters)
+ except StopIteration:
+ raise TypeError('too many positional arguments')
+ else:
+ if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY):
+ # Looks like we have no parameter for this positional
+ # argument
+ raise TypeError('too many positional arguments')
+
+ if param.kind == _VAR_POSITIONAL:
+ # We have an '*args'-like argument, let's fill it with
+ # all positional arguments we have left and move on to
+ # the next phase
+ values = [arg_val]
+ values.extend(arg_vals)
+ arguments[param.name] = tuple(values)
+ break
+
+ if param.name in kwargs:
+ raise TypeError('multiple values for argument '
+ '{arg!r}'.format(arg=param.name))
+
+ arguments[param.name] = arg_val
+
+ # Now, we iterate through the remaining parameters to process
+ # keyword arguments
+ kwargs_param = None
+ for param in itertools.chain(parameters_ex, parameters):
+ if param.kind == _POSITIONAL_ONLY:
+ # This should never happen in case of a properly built
+ # Signature object (but let's have this check here
+ # to ensure correct behaviour just in case)
+ raise TypeError('{arg!r} parameter is positional only, '
+ 'but was passed as a keyword'. \
+ format(arg=param.name))
+
+ if param.kind == _VAR_KEYWORD:
+ # Memorize that we have a '**kwargs'-like parameter
+ kwargs_param = param
+ continue
+
+ param_name = param.name
+ try:
+ arg_val = kwargs.pop(param_name)
+ except KeyError:
+ # We have no value for this parameter. It's fine though,
+ # if it has a default value, or it is an '*args'-like
+ # parameter, left alone by the processing of positional
+ # arguments.
+ if (not partial and param.kind != _VAR_POSITIONAL and
+ param.default is _empty):
+ raise TypeError('{arg!r} parameter lacking default value'. \
+ format(arg=param_name))
+
+ else:
+ arguments[param_name] = arg_val
+
+ if kwargs:
+ if kwargs_param is not None:
+ # Process our '**kwargs'-like parameter
+ arguments[kwargs_param.name] = kwargs
+ else:
+ raise TypeError('too many keyword arguments %r' % kwargs)
+
+ return self._bound_arguments_cls(self, arguments)
+
+ def bind(*args, **kwargs):
+ '''Get a BoundArguments object, that maps the passed `args`
+ and `kwargs` to the function's signature. Raises `TypeError`
+ if the passed arguments can not be bound.
+ '''
+ return args[0]._bind(args[1:], kwargs)
+
+ def bind_partial(self, *args, **kwargs):
+ '''Get a BoundArguments object, that partially maps the
+ passed `args` and `kwargs` to the function's signature.
+ Raises `TypeError` if the passed arguments can not be bound.
+ '''
+ return self._bind(args, kwargs, partial=True)
+
+ def __str__(self):
+ result = []
+ render_kw_only_separator = True
+ for idx, param in enumerate(self.parameters.values()):
+ formatted = str(param)
+
+ kind = param.kind
+ if kind == _VAR_POSITIONAL:
+ # OK, we have an '*args'-like parameter, so we won't need
+ # a '*' to separate keyword-only arguments
+ render_kw_only_separator = False
+ elif kind == _KEYWORD_ONLY and render_kw_only_separator:
+ # We have a keyword-only parameter to render and we haven't
+ # rendered an '*args'-like parameter before, so add a '*'
+ # separator to the parameters list ("foo(arg1, *, arg2)" case)
+ result.append('*')
+ # This condition should be only triggered once, so
+ # reset the flag
+ render_kw_only_separator = False
+
+ result.append(formatted)
+
+ rendered = '({0})'.format(', '.join(result))
+
+ if self.return_annotation is not _empty:
+ anno = formatannotation(self.return_annotation)
+ rendered += ' -> {0}'.format(anno)
+
+ return rendered
diff --git a/third_party/python/funcsigs/funcsigs/version.py b/third_party/python/funcsigs/funcsigs/version.py
new file mode 100644
index 0000000000..7863915fa5
--- /dev/null
+++ b/third_party/python/funcsigs/funcsigs/version.py
@@ -0,0 +1 @@
+__version__ = "1.0.2"
diff --git a/third_party/python/funcsigs/setup.cfg b/third_party/python/funcsigs/setup.cfg
new file mode 100644
index 0000000000..6c71b612d8
--- /dev/null
+++ b/third_party/python/funcsigs/setup.cfg
@@ -0,0 +1,8 @@
+[wheel]
+universal = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/third_party/python/funcsigs/setup.py b/third_party/python/funcsigs/setup.py
new file mode 100644
index 0000000000..f3696888f9
--- /dev/null
+++ b/third_party/python/funcsigs/setup.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+from setuptools import setup
+import re
+import sys
+
+def load_version(filename='funcsigs/version.py'):
+ "Parse a __version__ number from a source file"
+ with open(filename) as source:
+ text = source.read()
+ match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", text)
+ if not match:
+ msg = "Unable to find version number in {}".format(filename)
+ raise RuntimeError(msg)
+ version = match.group(1)
+ return version
+
+
+setup(
+ name="funcsigs",
+ version=load_version(),
+ packages=['funcsigs'],
+ zip_safe=False,
+ author="Testing Cabal",
+ author_email="testing-in-python@lists.idyll.org",
+ url="http://funcsigs.readthedocs.org",
+ description="Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2+",
+ long_description=open('README.rst').read(),
+ license="ASL",
+ extras_require = {
+ ':python_version<"2.7"': ['ordereddict'],
+ },
+ setup_requires = ["setuptools>=17.1"],
+ classifiers = [
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ 'Programming Language :: Python :: Implementation :: PyPy',
+ 'Topic :: Software Development :: Libraries :: Python Modules'
+ ],
+ tests_require = ['unittest2'],
+ test_suite = 'unittest2.collector',
+)
diff --git a/third_party/python/funcsigs/tests/__init__.py b/third_party/python/funcsigs/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/funcsigs/tests/__init__.py
diff --git a/third_party/python/funcsigs/tests/test_formatannotation.py b/third_party/python/funcsigs/tests/test_formatannotation.py
new file mode 100644
index 0000000000..4b98e6037d
--- /dev/null
+++ b/third_party/python/funcsigs/tests/test_formatannotation.py
@@ -0,0 +1,17 @@
+import funcsigs
+
+import unittest2 as unittest
+
+class TestFormatAnnotation(unittest.TestCase):
+ def test_string (self):
+ self.assertEqual(funcsigs.formatannotation("annotation"),
+ "'annotation'")
+
+ def test_builtin_type (self):
+ self.assertEqual(funcsigs.formatannotation(int),
+ "int")
+
+ def test_user_type (self):
+ class dummy (object): pass
+ self.assertEqual(funcsigs.formatannotation(dummy),
+ "tests.test_formatannotation.dummy")
diff --git a/third_party/python/funcsigs/tests/test_funcsigs.py b/third_party/python/funcsigs/tests/test_funcsigs.py
new file mode 100644
index 0000000000..a7b9cca767
--- /dev/null
+++ b/third_party/python/funcsigs/tests/test_funcsigs.py
@@ -0,0 +1,91 @@
+import unittest2 as unittest
+
+import doctest
+import sys
+
+import funcsigs as inspect
+
+
+class TestFunctionSignatures(unittest.TestCase):
+
+ @staticmethod
+ def signature(func):
+ sig = inspect.signature(func)
+ return (tuple((param.name,
+ (Ellipsis if param.default is param.empty else param.default),
+ (Ellipsis if param.annotation is param.empty
+ else param.annotation),
+ str(param.kind).lower())
+ for param in sig.parameters.values()),
+ (Ellipsis if sig.return_annotation is sig.empty
+ else sig.return_annotation))
+
+ def test_zero_arguments(self):
+ def test():
+ pass
+ self.assertEqual(self.signature(test),
+ ((), Ellipsis))
+
+ def test_single_positional_argument(self):
+ def test(a):
+ pass
+ self.assertEqual(self.signature(test),
+ (((('a', Ellipsis, Ellipsis, "positional_or_keyword")),), Ellipsis))
+
+ def test_single_keyword_argument(self):
+ def test(a=None):
+ pass
+ self.assertEqual(self.signature(test),
+ (((('a', None, Ellipsis, "positional_or_keyword")),), Ellipsis))
+
+ def test_var_args(self):
+ def test(*args):
+ pass
+ self.assertEqual(self.signature(test),
+ (((('args', Ellipsis, Ellipsis, "var_positional")),), Ellipsis))
+
+ def test_keywords_args(self):
+ def test(**kwargs):
+ pass
+ self.assertEqual(self.signature(test),
+ (((('kwargs', Ellipsis, Ellipsis, "var_keyword")),), Ellipsis))
+
+ def test_multiple_arguments(self):
+ def test(a, b=None, *args, **kwargs):
+ pass
+ self.assertEqual(self.signature(test), ((
+ ('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', None, Ellipsis, "positional_or_keyword"),
+ ('args', Ellipsis, Ellipsis, "var_positional"),
+ ('kwargs', Ellipsis, Ellipsis, "var_keyword"),
+ ), Ellipsis))
+
+ def test_has_version(self):
+ self.assertTrue(inspect.__version__)
+
+ def test_readme(self):
+ # XXX: This fails but doesn't fail the build.
+ # (and the syntax isn't valid on all pythons so that seems a little
+ # hard to get right.
+ doctest.testfile('../README.rst')
+
+ def test_unbound_method(self):
+ self_kind = "positional_or_keyword"
+ class Test(object):
+ def method(self):
+ pass
+ def method_with_args(self, a):
+ pass
+ def method_with_varargs(*args):
+ pass
+ self.assertEqual(
+ self.signature(Test.method),
+ (((('self', Ellipsis, Ellipsis, self_kind)),), Ellipsis))
+ self.assertEqual(
+ self.signature(Test.method_with_args),
+ ((('self', Ellipsis, Ellipsis, self_kind),
+ ('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ), Ellipsis))
+ self.assertEqual(
+ self.signature(Test.method_with_varargs),
+ ((('args', Ellipsis, Ellipsis, "var_positional"),), Ellipsis))
diff --git a/third_party/python/funcsigs/tests/test_inspect.py b/third_party/python/funcsigs/tests/test_inspect.py
new file mode 100644
index 0000000000..98d6592fcc
--- /dev/null
+++ b/third_party/python/funcsigs/tests/test_inspect.py
@@ -0,0 +1,1002 @@
+# Copyright 2001-2013 Python Software Foundation; All Rights Reserved
+from __future__ import absolute_import, division, print_function
+import collections
+import functools
+import sys
+
+import unittest2 as unittest
+
+import funcsigs as inspect
+
+
+class TestSignatureObject(unittest.TestCase):
+ @staticmethod
+ def signature(func):
+ sig = inspect.signature(func)
+ return (tuple((param.name,
+ (Ellipsis if param.default is param.empty else param.default),
+ (Ellipsis if param.annotation is param.empty
+ else param.annotation),
+ str(param.kind).lower())
+ for param in sig.parameters.values()),
+ (Ellipsis if sig.return_annotation is sig.empty
+ else sig.return_annotation))
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_object(self):
+ S = inspect.Signature
+ P = inspect.Parameter
+
+ self.assertEqual(str(S()), '()')
+
+ def test(po, pk, *args, ko, **kwargs):
+ pass
+ sig = inspect.signature(test)
+ po = sig.parameters['po'].replace(kind=P.POSITIONAL_ONLY)
+ pk = sig.parameters['pk']
+ args = sig.parameters['args']
+ ko = sig.parameters['ko']
+ kwargs = sig.parameters['kwargs']
+
+ S((po, pk, args, ko, kwargs))
+
+ with self.assertRaisesRegex(ValueError, 'wrong parameter order'):
+ S((pk, po, args, ko, kwargs))
+
+ with self.assertRaisesRegex(ValueError, 'wrong parameter order'):
+ S((po, args, pk, ko, kwargs))
+
+ with self.assertRaisesRegex(ValueError, 'wrong parameter order'):
+ S((args, po, pk, ko, kwargs))
+
+ with self.assertRaisesRegex(ValueError, 'wrong parameter order'):
+ S((po, pk, args, kwargs, ko))
+
+ kwargs2 = kwargs.replace(name='args')
+ with self.assertRaisesRegex(ValueError, 'duplicate parameter name'):
+ S((po, pk, args, kwargs2, ko))
+""")
+
+ def test_signature_immutability(self):
+ def test(a):
+ pass
+ sig = inspect.signature(test)
+
+ with self.assertRaises(AttributeError):
+ sig.foo = 'bar'
+
+ # Python2 does not have MappingProxyType class
+ if sys.version_info[:2] < (3, 3):
+ return
+
+ with self.assertRaises(TypeError):
+ sig.parameters['a'] = None
+
+ def test_signature_on_noarg(self):
+ def test():
+ pass
+ self.assertEqual(self.signature(test), ((), Ellipsis))
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_wargs(self):
+ def test(a, b:'foo') -> 123:
+ pass
+ self.assertEqual(self.signature(test),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', Ellipsis, 'foo', "positional_or_keyword")),
+ 123))
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_wkwonly(self):
+ def test(*, a:float, b:str) -> int:
+ pass
+ self.assertEqual(self.signature(test),
+ ((('a', Ellipsis, float, "keyword_only"),
+ ('b', Ellipsis, str, "keyword_only")),
+ int))
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_complex_args(self):
+ def test(a, b:'foo'=10, *args:'bar', spam:'baz', ham=123, **kwargs:int):
+ pass
+ self.assertEqual(self.signature(test),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', 10, 'foo', "positional_or_keyword"),
+ ('args', Ellipsis, 'bar', "var_positional"),
+ ('spam', Ellipsis, 'baz', "keyword_only"),
+ ('ham', 123, Ellipsis, "keyword_only"),
+ ('kwargs', Ellipsis, int, "var_keyword")),
+ Ellipsis))
+""")
+
+ def test_signature_on_builtin_function(self):
+ with self.assertRaisesRegex(ValueError, 'not supported by signature'):
+ inspect.signature(type)
+ with self.assertRaisesRegex(ValueError, 'not supported by signature'):
+ # support for 'wrapper_descriptor'
+ inspect.signature(type.__call__)
+ if hasattr(sys, 'pypy_version_info'):
+ raise ValueError('not supported by signature')
+ with self.assertRaisesRegex(ValueError, 'not supported by signature'):
+ # support for 'method-wrapper'
+ inspect.signature(min.__call__)
+ if hasattr(sys, 'pypy_version_info'):
+ raise ValueError('not supported by signature')
+ with self.assertRaisesRegex(ValueError,
+ 'no signature found for builtin function'):
+ # support for 'method-wrapper'
+ inspect.signature(min)
+
+ def test_signature_on_non_function(self):
+ with self.assertRaisesRegex(TypeError, 'is not a callable object'):
+ inspect.signature(42)
+
+ with self.assertRaisesRegex(TypeError, 'is not a Python function'):
+ inspect.Signature.from_function(42)
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_method(self):
+ class Test:
+ def foo(self, arg1, arg2=1) -> int:
+ pass
+
+ meth = Test().foo
+
+ self.assertEqual(self.signature(meth),
+ ((('arg1', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('arg2', 1, Ellipsis, "positional_or_keyword")),
+ int))
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_classmethod(self):
+ class Test:
+ @classmethod
+ def foo(cls, arg1, *, arg2=1):
+ pass
+
+ meth = Test().foo
+ self.assertEqual(self.signature(meth),
+ ((('arg1', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('arg2', 1, Ellipsis, "keyword_only")),
+ Ellipsis))
+
+ meth = Test.foo
+ self.assertEqual(self.signature(meth),
+ ((('arg1', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('arg2', 1, Ellipsis, "keyword_only")),
+ Ellipsis))
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_staticmethod(self):
+ class Test:
+ @staticmethod
+ def foo(cls, *, arg):
+ pass
+
+ meth = Test().foo
+ self.assertEqual(self.signature(meth),
+ ((('cls', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('arg', Ellipsis, Ellipsis, "keyword_only")),
+ Ellipsis))
+
+ meth = Test.foo
+ self.assertEqual(self.signature(meth),
+ ((('cls', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('arg', Ellipsis, Ellipsis, "keyword_only")),
+ Ellipsis))
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_partial(self):
+ from functools import partial
+
+ def test():
+ pass
+
+ self.assertEqual(self.signature(partial(test)), ((), Ellipsis))
+
+ with self.assertRaisesRegex(ValueError, "has incorrect arguments"):
+ inspect.signature(partial(test, 1))
+
+ with self.assertRaisesRegex(ValueError, "has incorrect arguments"):
+ inspect.signature(partial(test, a=1))
+
+ def test(a, b, *, c, d):
+ pass
+
+ self.assertEqual(self.signature(partial(test)),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('c', Ellipsis, Ellipsis, "keyword_only"),
+ ('d', Ellipsis, Ellipsis, "keyword_only")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(partial(test, 1)),
+ ((('b', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('c', Ellipsis, Ellipsis, "keyword_only"),
+ ('d', Ellipsis, Ellipsis, "keyword_only")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(partial(test, 1, c=2)),
+ ((('b', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('c', 2, Ellipsis, "keyword_only"),
+ ('d', Ellipsis, Ellipsis, "keyword_only")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(partial(test, b=1, c=2)),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', 1, Ellipsis, "positional_or_keyword"),
+ ('c', 2, Ellipsis, "keyword_only"),
+ ('d', Ellipsis, Ellipsis, "keyword_only")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(partial(test, 0, b=1, c=2)),
+ ((('b', 1, Ellipsis, "positional_or_keyword"),
+ ('c', 2, Ellipsis, "keyword_only"),
+ ('d', Ellipsis, Ellipsis, "keyword_only"),),
+ Ellipsis))
+
+ def test(a, *args, b, **kwargs):
+ pass
+
+ self.assertEqual(self.signature(partial(test, 1)),
+ ((('args', Ellipsis, Ellipsis, "var_positional"),
+ ('b', Ellipsis, Ellipsis, "keyword_only"),
+ ('kwargs', Ellipsis, Ellipsis, "var_keyword")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(partial(test, 1, 2, 3)),
+ ((('args', Ellipsis, Ellipsis, "var_positional"),
+ ('b', Ellipsis, Ellipsis, "keyword_only"),
+ ('kwargs', Ellipsis, Ellipsis, "var_keyword")),
+ Ellipsis))
+
+
+ self.assertEqual(self.signature(partial(test, 1, 2, 3, test=True)),
+ ((('args', Ellipsis, Ellipsis, "var_positional"),
+ ('b', Ellipsis, Ellipsis, "keyword_only"),
+ ('kwargs', Ellipsis, Ellipsis, "var_keyword")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(partial(test, 1, 2, 3, test=1, b=0)),
+ ((('args', Ellipsis, Ellipsis, "var_positional"),
+ ('b', 0, Ellipsis, "keyword_only"),
+ ('kwargs', Ellipsis, Ellipsis, "var_keyword")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(partial(test, b=0)),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('args', Ellipsis, Ellipsis, "var_positional"),
+ ('b', 0, Ellipsis, "keyword_only"),
+ ('kwargs', Ellipsis, Ellipsis, "var_keyword")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(partial(test, b=0, test=1)),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('args', Ellipsis, Ellipsis, "var_positional"),
+ ('b', 0, Ellipsis, "keyword_only"),
+ ('kwargs', Ellipsis, Ellipsis, "var_keyword")),
+ Ellipsis))
+
+ def test(a, b, c:int) -> 42:
+ pass
+
+ sig = test.__signature__ = inspect.signature(test)
+
+ self.assertEqual(self.signature(partial(partial(test, 1))),
+ ((('b', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('c', Ellipsis, int, "positional_or_keyword")),
+ 42))
+
+ self.assertEqual(self.signature(partial(partial(test, 1), 2)),
+ ((('c', Ellipsis, int, "positional_or_keyword"),),
+ 42))
+
+ psig = inspect.signature(partial(partial(test, 1), 2))
+
+ def foo(a):
+ return a
+ _foo = partial(partial(foo, a=10), a=20)
+ self.assertEqual(self.signature(_foo),
+ ((('a', 20, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+ # check that we don't have any side-effects in signature(),
+ # and the partial object is still functioning
+ self.assertEqual(_foo(), 20)
+
+ def foo(a, b, c):
+ return a, b, c
+ _foo = partial(partial(foo, 1, b=20), b=30)
+ self.assertEqual(self.signature(_foo),
+ ((('b', 30, Ellipsis, "positional_or_keyword"),
+ ('c', Ellipsis, Ellipsis, "positional_or_keyword")),
+ Ellipsis))
+ self.assertEqual(_foo(c=10), (1, 30, 10))
+ _foo = partial(_foo, 2) # now 'b' has two values -
+ # positional and keyword
+ with self.assertRaisesRegex(ValueError, "has incorrect arguments"):
+ inspect.signature(_foo)
+
+ def foo(a, b, c, *, d):
+ return a, b, c, d
+ _foo = partial(partial(foo, d=20, c=20), b=10, d=30)
+ self.assertEqual(self.signature(_foo),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', 10, Ellipsis, "positional_or_keyword"),
+ ('c', 20, Ellipsis, "positional_or_keyword"),
+ ('d', 30, Ellipsis, "keyword_only")),
+ Ellipsis))
+ ba = inspect.signature(_foo).bind(a=200, b=11)
+ self.assertEqual(_foo(*ba.args, **ba.kwargs), (200, 11, 20, 30))
+
+ def foo(a=1, b=2, c=3):
+ return a, b, c
+ _foo = partial(foo, a=10, c=13)
+ ba = inspect.signature(_foo).bind(11)
+ self.assertEqual(_foo(*ba.args, **ba.kwargs), (11, 2, 13))
+ ba = inspect.signature(_foo).bind(11, 12)
+ self.assertEqual(_foo(*ba.args, **ba.kwargs), (11, 12, 13))
+ ba = inspect.signature(_foo).bind(11, b=12)
+ self.assertEqual(_foo(*ba.args, **ba.kwargs), (11, 12, 13))
+ ba = inspect.signature(_foo).bind(b=12)
+ self.assertEqual(_foo(*ba.args, **ba.kwargs), (10, 12, 13))
+ _foo = partial(_foo, b=10)
+ ba = inspect.signature(_foo).bind(12, 14)
+ self.assertEqual(_foo(*ba.args, **ba.kwargs), (12, 14, 13))
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_decorated(self):
+ import functools
+
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs) -> int:
+ return func(*args, **kwargs)
+ return wrapper
+
+ class Foo:
+ @decorator
+ def bar(self, a, b):
+ pass
+
+ self.assertEqual(self.signature(Foo.bar),
+ ((('self', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', Ellipsis, Ellipsis, "positional_or_keyword")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(Foo().bar),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', Ellipsis, Ellipsis, "positional_or_keyword")),
+ Ellipsis))
+
+ # Test that we handle method wrappers correctly
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs) -> int:
+ return func(42, *args, **kwargs)
+ sig = inspect.signature(func)
+ new_params = tuple(sig.parameters.values())[1:]
+ wrapper.__signature__ = sig.replace(parameters=new_params)
+ return wrapper
+
+ class Foo:
+ @decorator
+ def __call__(self, a, b):
+ pass
+
+ self.assertEqual(self.signature(Foo.__call__),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', Ellipsis, Ellipsis, "positional_or_keyword")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(Foo().__call__),
+ ((('b', Ellipsis, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_class(self):
+ class C:
+ def __init__(self, a):
+ pass
+
+ self.assertEqual(self.signature(C),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+
+ class CM(type):
+ def __call__(cls, a):
+ pass
+ class C(metaclass=CM):
+ def __init__(self, b):
+ pass
+
+ self.assertEqual(self.signature(C),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+
+ class CM(type):
+ def __new__(mcls, name, bases, dct, *, foo=1):
+ return super().__new__(mcls, name, bases, dct)
+ class C(metaclass=CM):
+ def __init__(self, b):
+ pass
+
+ self.assertEqual(self.signature(C),
+ ((('b', Ellipsis, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+
+ self.assertEqual(self.signature(CM),
+ ((('name', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('bases', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('dct', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('foo', 1, Ellipsis, "keyword_only")),
+ Ellipsis))
+
+ class CMM(type):
+ def __new__(mcls, name, bases, dct, *, foo=1):
+ return super().__new__(mcls, name, bases, dct)
+ def __call__(cls, nm, bs, dt):
+ return type(nm, bs, dt)
+ class CM(type, metaclass=CMM):
+ def __new__(mcls, name, bases, dct, *, bar=2):
+ return super().__new__(mcls, name, bases, dct)
+ class C(metaclass=CM):
+ def __init__(self, b):
+ pass
+
+ self.assertEqual(self.signature(CMM),
+ ((('name', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('bases', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('dct', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('foo', 1, Ellipsis, "keyword_only")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(CM),
+ ((('nm', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('bs', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('dt', Ellipsis, Ellipsis, "positional_or_keyword")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(C),
+ ((('b', Ellipsis, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+
+ class CM(type):
+ def __init__(cls, name, bases, dct, *, bar=2):
+ return super().__init__(name, bases, dct)
+ class C(metaclass=CM):
+ def __init__(self, b):
+ pass
+
+ self.assertEqual(self.signature(CM),
+ ((('name', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('bases', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('dct', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('bar', 2, Ellipsis, "keyword_only")),
+ Ellipsis))
+""")
+
+ def test_signature_on_callable_objects(self):
+ class Foo(object):
+ def __call__(self, a):
+ pass
+
+ self.assertEqual(self.signature(Foo()),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+
+ class Spam(object):
+ pass
+ with self.assertRaisesRegex(TypeError, "is not a callable object"):
+ inspect.signature(Spam())
+
+ class Bar(Spam, Foo):
+ pass
+
+ self.assertEqual(self.signature(Bar()),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+
+ class ToFail(object):
+ __call__ = type
+ with self.assertRaisesRegex(ValueError, "not supported by signature"):
+ inspect.signature(ToFail())
+
+ if sys.version_info[0] < 3:
+ return
+
+ class Wrapped(object):
+ pass
+ Wrapped.__wrapped__ = lambda a: None
+ self.assertEqual(self.signature(Wrapped),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+
+ def test_signature_on_lambdas(self):
+ self.assertEqual(self.signature((lambda a=10: a)),
+ ((('a', 10, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_equality(self):
+ def foo(a, *, b:int) -> float: pass
+ self.assertNotEqual(inspect.signature(foo), 42)
+
+ def bar(a, *, b:int) -> float: pass
+ self.assertEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def bar(a, *, b:int) -> int: pass
+ self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def bar(a, *, b:int): pass
+ self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def bar(a, *, b:int=42) -> float: pass
+ self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def bar(a, *, c) -> float: pass
+ self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def bar(a, b:int) -> float: pass
+ self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
+ def spam(b:int, a) -> float: pass
+ self.assertNotEqual(inspect.signature(spam), inspect.signature(bar))
+
+ def foo(*, a, b, c): pass
+ def bar(*, c, b, a): pass
+ self.assertEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def foo(*, a=1, b, c): pass
+ def bar(*, c, b, a=1): pass
+ self.assertEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def foo(pos, *, a=1, b, c): pass
+ def bar(pos, *, c, b, a=1): pass
+ self.assertEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def foo(pos, *, a, b, c): pass
+ def bar(pos, *, c, b, a=1): pass
+ self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def foo(pos, *args, a=42, b, c, **kwargs:int): pass
+ def bar(pos, *args, c, b, a=42, **kwargs:int): pass
+ self.assertEqual(inspect.signature(foo), inspect.signature(bar))
+""")
+
+ def test_signature_unhashable(self):
+ def foo(a): pass
+ sig = inspect.signature(foo)
+ with self.assertRaisesRegex(TypeError, 'unhashable type'):
+ hash(sig)
+
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_str(self):
+ def foo(a:int=1, *, b, c=None, **kwargs) -> 42:
+ pass
+ self.assertEqual(str(inspect.signature(foo)),
+ '(a:int=1, *, b, c=None, **kwargs) -> 42')
+
+ def foo(a:int=1, *args, b, c=None, **kwargs) -> 42:
+ pass
+ self.assertEqual(str(inspect.signature(foo)),
+ '(a:int=1, *args, b, c=None, **kwargs) -> 42')
+
+ def foo():
+ pass
+ self.assertEqual(str(inspect.signature(foo)), '()')
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_str_positional_only(self):
+ P = inspect.Parameter
+
+ def test(a_po, *, b, **kwargs):
+ return a_po, kwargs
+
+ sig = inspect.signature(test)
+ new_params = list(sig.parameters.values())
+ new_params[0] = new_params[0].replace(kind=P.POSITIONAL_ONLY)
+ test.__signature__ = sig.replace(parameters=new_params)
+
+ self.assertEqual(str(inspect.signature(test)),
+ '(<a_po>, *, b, **kwargs)')
+
+ sig = inspect.signature(test)
+ new_params = list(sig.parameters.values())
+ new_params[0] = new_params[0].replace(name=None)
+ test.__signature__ = sig.replace(parameters=new_params)
+ self.assertEqual(str(inspect.signature(test)),
+ '(<0>, *, b, **kwargs)')
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_replace_anno(self):
+ def test() -> 42:
+ pass
+
+ sig = inspect.signature(test)
+ sig = sig.replace(return_annotation=None)
+ self.assertIs(sig.return_annotation, None)
+ sig = sig.replace(return_annotation=sig.empty)
+ self.assertIs(sig.return_annotation, sig.empty)
+ sig = sig.replace(return_annotation=42)
+ self.assertEqual(sig.return_annotation, 42)
+ self.assertEqual(sig, inspect.signature(test))
+""")
+
+
+class TestParameterObject(unittest.TestCase):
+
+ def test_signature_parameter_kinds(self):
+ P = inspect.Parameter
+ self.assertTrue(P.POSITIONAL_ONLY < P.POSITIONAL_OR_KEYWORD < \
+ P.VAR_POSITIONAL < P.KEYWORD_ONLY < P.VAR_KEYWORD)
+
+ self.assertEqual(str(P.POSITIONAL_ONLY), 'POSITIONAL_ONLY')
+ self.assertTrue('POSITIONAL_ONLY' in repr(P.POSITIONAL_ONLY))
+
+ def test_signature_parameter_object(self):
+ p = inspect.Parameter('foo', default=10,
+ kind=inspect.Parameter.POSITIONAL_ONLY)
+ self.assertEqual(p.name, 'foo')
+ self.assertEqual(p.default, 10)
+ self.assertIs(p.annotation, p.empty)
+ self.assertEqual(p.kind, inspect.Parameter.POSITIONAL_ONLY)
+
+ with self.assertRaisesRegex(ValueError, 'invalid value'):
+ inspect.Parameter('foo', default=10, kind='123')
+
+ with self.assertRaisesRegex(ValueError, 'not a valid parameter name'):
+ inspect.Parameter('1', kind=inspect.Parameter.VAR_KEYWORD)
+
+ with self.assertRaisesRegex(ValueError,
+ 'non-positional-only parameter'):
+ inspect.Parameter(None, kind=inspect.Parameter.VAR_KEYWORD)
+
+ with self.assertRaisesRegex(ValueError, 'cannot have default values'):
+ inspect.Parameter('a', default=42,
+ kind=inspect.Parameter.VAR_KEYWORD)
+
+ with self.assertRaisesRegex(ValueError, 'cannot have default values'):
+ inspect.Parameter('a', default=42,
+ kind=inspect.Parameter.VAR_POSITIONAL)
+
+ p = inspect.Parameter('a', default=42,
+ kind=inspect.Parameter.POSITIONAL_OR_KEYWORD)
+ with self.assertRaisesRegex(ValueError, 'cannot have default values'):
+ p.replace(kind=inspect.Parameter.VAR_POSITIONAL)
+
+ self.assertTrue(repr(p).startswith('<Parameter'))
+
+ def test_signature_parameter_equality(self):
+ P = inspect.Parameter
+ p = P('foo', default=42, kind=inspect.Parameter.KEYWORD_ONLY)
+
+ self.assertEqual(p, p)
+ self.assertNotEqual(p, 42)
+
+ self.assertEqual(p, P('foo', default=42,
+ kind=inspect.Parameter.KEYWORD_ONLY))
+
+ def test_signature_parameter_unhashable(self):
+ p = inspect.Parameter('foo', default=42,
+ kind=inspect.Parameter.KEYWORD_ONLY)
+
+ with self.assertRaisesRegex(TypeError, 'unhashable type'):
+ hash(p)
+
+ def test_signature_parameter_replace(self):
+ p = inspect.Parameter('foo', default=42,
+ kind=inspect.Parameter.KEYWORD_ONLY)
+
+ self.assertIsNot(p, p.replace())
+ self.assertEqual(p, p.replace())
+
+ p2 = p.replace(annotation=1)
+ self.assertEqual(p2.annotation, 1)
+ p2 = p2.replace(annotation=p2.empty)
+ self.assertEqual(p, p2)
+
+ p2 = p2.replace(name='bar')
+ self.assertEqual(p2.name, 'bar')
+ self.assertNotEqual(p2, p)
+
+ with self.assertRaisesRegex(ValueError, 'not a valid parameter name'):
+ p2 = p2.replace(name=p2.empty)
+
+ p2 = p2.replace(name='foo', default=None)
+ self.assertIs(p2.default, None)
+ self.assertNotEqual(p2, p)
+
+ p2 = p2.replace(name='foo', default=p2.empty)
+ self.assertIs(p2.default, p2.empty)
+
+
+ p2 = p2.replace(default=42, kind=p2.POSITIONAL_OR_KEYWORD)
+ self.assertEqual(p2.kind, p2.POSITIONAL_OR_KEYWORD)
+ self.assertNotEqual(p2, p)
+
+ with self.assertRaisesRegex(ValueError, 'invalid value for'):
+ p2 = p2.replace(kind=p2.empty)
+
+ p2 = p2.replace(kind=p2.KEYWORD_ONLY)
+ self.assertEqual(p2, p)
+
+ def test_signature_parameter_positional_only(self):
+ p = inspect.Parameter(None, kind=inspect.Parameter.POSITIONAL_ONLY)
+ self.assertEqual(str(p), '<>')
+
+ p = p.replace(name='1')
+ self.assertEqual(str(p), '<1>')
+
+ def test_signature_parameter_immutability(self):
+ p = inspect.Parameter(None, kind=inspect.Parameter.POSITIONAL_ONLY)
+
+ with self.assertRaises(AttributeError):
+ p.foo = 'bar'
+
+ with self.assertRaises(AttributeError):
+ p.kind = 123
+
+
+class TestSignatureBind(unittest.TestCase):
+ @staticmethod
+ def call(func, *args, **kwargs):
+ sig = inspect.signature(func)
+ ba = sig.bind(*args, **kwargs)
+ return func(*ba.args, **ba.kwargs)
+
+ def test_signature_bind_empty(self):
+ def test():
+ return 42
+
+ self.assertEqual(self.call(test), 42)
+ with self.assertRaisesRegex(TypeError, 'too many positional arguments'):
+ self.call(test, 1)
+ with self.assertRaisesRegex(TypeError, 'too many positional arguments'):
+ self.call(test, 1, spam=10)
+ with self.assertRaisesRegex(TypeError, 'too many keyword arguments'):
+ self.call(test, spam=1)
+
+ def test_signature_bind_var(self):
+ def test(*args, **kwargs):
+ return args, kwargs
+
+ self.assertEqual(self.call(test), ((), {}))
+ self.assertEqual(self.call(test, 1), ((1,), {}))
+ self.assertEqual(self.call(test, 1, 2), ((1, 2), {}))
+ self.assertEqual(self.call(test, foo='bar'), ((), {'foo': 'bar'}))
+ self.assertEqual(self.call(test, 1, foo='bar'), ((1,), {'foo': 'bar'}))
+ self.assertEqual(self.call(test, args=10), ((), {'args': 10}))
+ self.assertEqual(self.call(test, 1, 2, foo='bar'),
+ ((1, 2), {'foo': 'bar'}))
+
+ def test_signature_bind_just_args(self):
+ def test(a, b, c):
+ return a, b, c
+
+ self.assertEqual(self.call(test, 1, 2, 3), (1, 2, 3))
+
+ with self.assertRaisesRegex(TypeError, 'too many positional arguments'):
+ self.call(test, 1, 2, 3, 4)
+
+ with self.assertRaisesRegex(TypeError, "'b' parameter lacking default"):
+ self.call(test, 1)
+
+ with self.assertRaisesRegex(TypeError, "'a' parameter lacking default"):
+ self.call(test)
+
+ def test(a, b, c=10):
+ return a, b, c
+ self.assertEqual(self.call(test, 1, 2, 3), (1, 2, 3))
+ self.assertEqual(self.call(test, 1, 2), (1, 2, 10))
+
+ def test(a=1, b=2, c=3):
+ return a, b, c
+ self.assertEqual(self.call(test, a=10, c=13), (10, 2, 13))
+ self.assertEqual(self.call(test, a=10), (10, 2, 3))
+ self.assertEqual(self.call(test, b=10), (1, 10, 3))
+
+ def test_signature_bind_varargs_order(self):
+ def test(*args):
+ return args
+
+ self.assertEqual(self.call(test), ())
+ self.assertEqual(self.call(test, 1, 2, 3), (1, 2, 3))
+
+ def test_signature_bind_args_and_varargs(self):
+ def test(a, b, c=3, *args):
+ return a, b, c, args
+
+ self.assertEqual(self.call(test, 1, 2, 3, 4, 5), (1, 2, 3, (4, 5)))
+ self.assertEqual(self.call(test, 1, 2), (1, 2, 3, ()))
+ self.assertEqual(self.call(test, b=1, a=2), (2, 1, 3, ()))
+ self.assertEqual(self.call(test, 1, b=2), (1, 2, 3, ()))
+
+ with self.assertRaisesRegex(TypeError,
+ "multiple values for argument 'c'"):
+ self.call(test, 1, 2, 3, c=4)
+
+ def test_signature_bind_just_kwargs(self):
+ def test(**kwargs):
+ return kwargs
+
+ self.assertEqual(self.call(test), {})
+ self.assertEqual(self.call(test, foo='bar', spam='ham'),
+ {'foo': 'bar', 'spam': 'ham'})
+
+ def test_signature_bind_args_and_kwargs(self):
+ def test(a, b, c=3, **kwargs):
+ return a, b, c, kwargs
+
+ self.assertEqual(self.call(test, 1, 2), (1, 2, 3, {}))
+ self.assertEqual(self.call(test, 1, 2, foo='bar', spam='ham'),
+ (1, 2, 3, {'foo': 'bar', 'spam': 'ham'}))
+ self.assertEqual(self.call(test, b=2, a=1, foo='bar', spam='ham'),
+ (1, 2, 3, {'foo': 'bar', 'spam': 'ham'}))
+ self.assertEqual(self.call(test, a=1, b=2, foo='bar', spam='ham'),
+ (1, 2, 3, {'foo': 'bar', 'spam': 'ham'}))
+ self.assertEqual(self.call(test, 1, b=2, foo='bar', spam='ham'),
+ (1, 2, 3, {'foo': 'bar', 'spam': 'ham'}))
+ self.assertEqual(self.call(test, 1, b=2, c=4, foo='bar', spam='ham'),
+ (1, 2, 4, {'foo': 'bar', 'spam': 'ham'}))
+ self.assertEqual(self.call(test, 1, 2, 4, foo='bar'),
+ (1, 2, 4, {'foo': 'bar'}))
+ self.assertEqual(self.call(test, c=5, a=4, b=3),
+ (4, 3, 5, {}))
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_bind_kwonly(self):
+ def test(*, foo):
+ return foo
+ with self.assertRaisesRegex(TypeError,
+ 'too many positional arguments'):
+ self.call(test, 1)
+ self.assertEqual(self.call(test, foo=1), 1)
+
+ def test(a, *, foo=1, bar):
+ return foo
+ with self.assertRaisesRegex(TypeError,
+ "'bar' parameter lacking default value"):
+ self.call(test, 1)
+
+ def test(foo, *, bar):
+ return foo, bar
+ self.assertEqual(self.call(test, 1, bar=2), (1, 2))
+ self.assertEqual(self.call(test, bar=2, foo=1), (1, 2))
+
+ with self.assertRaisesRegex(TypeError,
+ 'too many keyword arguments'):
+ self.call(test, bar=2, foo=1, spam=10)
+
+ with self.assertRaisesRegex(TypeError,
+ 'too many positional arguments'):
+ self.call(test, 1, 2)
+
+ with self.assertRaisesRegex(TypeError,
+ 'too many positional arguments'):
+ self.call(test, 1, 2, bar=2)
+
+ with self.assertRaisesRegex(TypeError,
+ 'too many keyword arguments'):
+ self.call(test, 1, bar=2, spam='ham')
+
+ with self.assertRaisesRegex(TypeError,
+ "'bar' parameter lacking default value"):
+ self.call(test, 1)
+
+ def test(foo, *, bar, **bin):
+ return foo, bar, bin
+ self.assertEqual(self.call(test, 1, bar=2), (1, 2, {}))
+ self.assertEqual(self.call(test, foo=1, bar=2), (1, 2, {}))
+ self.assertEqual(self.call(test, 1, bar=2, spam='ham'),
+ (1, 2, {'spam': 'ham'}))
+ self.assertEqual(self.call(test, spam='ham', foo=1, bar=2),
+ (1, 2, {'spam': 'ham'}))
+ with self.assertRaisesRegex(TypeError,
+ "'foo' parameter lacking default value"):
+ self.call(test, spam='ham', bar=2)
+ self.assertEqual(self.call(test, 1, bar=2, bin=1, spam=10),
+ (1, 2, {'bin': 1, 'spam': 10}))
+""")
+#
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_bind_arguments(self):
+ def test(a, *args, b, z=100, **kwargs):
+ pass
+ sig = inspect.signature(test)
+ ba = sig.bind(10, 20, b=30, c=40, args=50, kwargs=60)
+ # we won't have 'z' argument in the bound arguments object, as we didn't
+ # pass it to the 'bind'
+ self.assertEqual(tuple(ba.arguments.items()),
+ (('a', 10), ('args', (20,)), ('b', 30),
+ ('kwargs', {'c': 40, 'args': 50, 'kwargs': 60})))
+ self.assertEqual(ba.kwargs,
+ {'b': 30, 'c': 40, 'args': 50, 'kwargs': 60})
+ self.assertEqual(ba.args, (10, 20))
+""")
+#
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_bind_positional_only(self):
+ P = inspect.Parameter
+
+ def test(a_po, b_po, c_po=3, foo=42, *, bar=50, **kwargs):
+ return a_po, b_po, c_po, foo, bar, kwargs
+
+ sig = inspect.signature(test)
+ new_params = collections.OrderedDict(tuple(sig.parameters.items()))
+ for name in ('a_po', 'b_po', 'c_po'):
+ new_params[name] = new_params[name].replace(kind=P.POSITIONAL_ONLY)
+ new_sig = sig.replace(parameters=new_params.values())
+ test.__signature__ = new_sig
+
+ self.assertEqual(self.call(test, 1, 2, 4, 5, bar=6),
+ (1, 2, 4, 5, 6, {}))
+
+ with self.assertRaisesRegex(TypeError, "parameter is positional only"):
+ self.call(test, 1, 2, c_po=4)
+
+ with self.assertRaisesRegex(TypeError, "parameter is positional only"):
+ self.call(test, a_po=1, b_po=2)
+""")
+
+ def test_bind_self(self):
+ class F:
+ def f(a, self):
+ return a, self
+ an_f = F()
+ partial_f = functools.partial(F.f, an_f)
+ ba = inspect.signature(partial_f).bind(self=10)
+ self.assertEqual((an_f, 10), partial_f(*ba.args, **ba.kwargs))
+
+
+class TestBoundArguments(unittest.TestCase):
+
+ def test_signature_bound_arguments_unhashable(self):
+ def foo(a): pass
+ ba = inspect.signature(foo).bind(1)
+
+ with self.assertRaisesRegex(TypeError, 'unhashable type'):
+ hash(ba)
+
+ def test_signature_bound_arguments_equality(self):
+ def foo(a): pass
+ ba = inspect.signature(foo).bind(1)
+ self.assertEqual(ba, ba)
+
+ ba2 = inspect.signature(foo).bind(1)
+ self.assertEqual(ba, ba2)
+
+ ba3 = inspect.signature(foo).bind(2)
+ self.assertNotEqual(ba, ba3)
+ ba3.arguments['a'] = 1
+ self.assertEqual(ba, ba3)
+
+ def bar(b): pass
+ ba4 = inspect.signature(bar).bind(1)
+ self.assertNotEqual(ba, ba4)
diff --git a/third_party/python/futures/CHANGES b/third_party/python/futures/CHANGES
new file mode 100644
index 0000000000..7dd051447b
--- /dev/null
+++ b/third_party/python/futures/CHANGES
@@ -0,0 +1,107 @@
+3.0.5
+=====
+
+- Fixed OverflowError with ProcessPoolExecutor on Windows (regression introduced in 3.0.4)
+
+
+3.0.4
+=====
+
+- Fixed inability to forcibly terminate the process if there are pending workers
+
+
+3.0.3
+=====
+
+- Fixed AttributeErrors on exit on Python 2.x
+
+
+3.0.2
+=====
+
+- Made multiprocessing optional again on implementations other than just Jython
+
+
+3.0.1
+=====
+
+- Made Executor.map() non-greedy
+
+
+3.0.0
+=====
+
+- Dropped Python 2.5 and 3.1 support
+- Removed the deprecated "futures" top level package
+- Applied patch for issue 11777 (Executor.map does not submit futures until
+ iter.next() is called)
+- Applied patch for issue 15015 (accessing an non-existing attribute)
+- Applied patch for issue 16284 (memory leak)
+- Applied patch for issue 20367 (behavior of concurrent.futures.as_completed()
+ for duplicate arguments)
+
+2.2.0
+=====
+
+- Added the set_exception_info() and exception_info() methods to Future
+ to enable extraction of tracebacks on Python 2.x
+- Added support for Future.set_exception_info() to ThreadPoolExecutor
+
+
+2.1.6
+=====
+
+- Fixed a problem with files missing from the source distribution
+
+
+2.1.5
+=====
+
+- Fixed Jython compatibility
+- Added metadata for wheel support
+
+
+2.1.4
+=====
+
+- Ported the library again from Python 3.2.5 to get the latest bug fixes
+
+
+2.1.3
+=====
+
+- Fixed race condition in wait(return_when=ALL_COMPLETED)
+ (http://bugs.python.org/issue14406) -- thanks Ralf Schmitt
+- Added missing setUp() methods to several test classes
+
+
+2.1.2
+=====
+
+- Fixed installation problem on Python 3.1
+
+
+2.1.1
+=====
+
+- Fixed missing 'concurrent' package declaration in setup.py
+
+
+2.1
+===
+
+- Moved the code from the 'futures' package to 'concurrent.futures' to provide
+ a drop in backport that matches the code in Python 3.2 standard library
+- Deprecated the old 'futures' package
+
+
+2.0
+===
+
+- Changed implementation to match PEP 3148
+
+
+1.0
+===
+
+Initial release.
diff --git a/third_party/python/futures/LICENSE b/third_party/python/futures/LICENSE
new file mode 100644
index 0000000000..c430db0f17
--- /dev/null
+++ b/third_party/python/futures/LICENSE
@@ -0,0 +1,21 @@
+Copyright 2009 Brian Quinlan. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ 2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY BRIAN QUINLAN "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
+HALL THE FREEBSD PROJECT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file
diff --git a/third_party/python/futures/MANIFEST.in b/third_party/python/futures/MANIFEST.in
new file mode 100644
index 0000000000..52860d0435
--- /dev/null
+++ b/third_party/python/futures/MANIFEST.in
@@ -0,0 +1,5 @@
+recursive-include docs *
+include *.py
+include tox.ini
+include CHANGES
+include LICENSE
diff --git a/third_party/python/futures/PKG-INFO b/third_party/python/futures/PKG-INFO
new file mode 100644
index 0000000000..00e3a99767
--- /dev/null
+++ b/third_party/python/futures/PKG-INFO
@@ -0,0 +1,16 @@
+Metadata-Version: 1.0
+Name: futures
+Version: 3.0.5
+Summary: Backport of the concurrent.futures package from Python 3.2
+Home-page: https://github.com/agronholm/pythonfutures
+Author: Alex Gronholm
+Author-email: alex.gronholm+pypi@nextday.fi
+License: BSD
+Description: UNKNOWN
+Platform: UNKNOWN
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 2 :: Only
diff --git a/third_party/python/futures/concurrent/__init__.py b/third_party/python/futures/concurrent/__init__.py
new file mode 100644
index 0000000000..b36383a610
--- /dev/null
+++ b/third_party/python/futures/concurrent/__init__.py
@@ -0,0 +1,3 @@
+from pkgutil import extend_path
+
+__path__ = extend_path(__path__, __name__)
diff --git a/third_party/python/futures/concurrent/futures/__init__.py b/third_party/python/futures/concurrent/futures/__init__.py
new file mode 100644
index 0000000000..428b14bdfe
--- /dev/null
+++ b/third_party/python/futures/concurrent/futures/__init__.py
@@ -0,0 +1,23 @@
+# Copyright 2009 Brian Quinlan. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Execute computations asynchronously using threads or processes."""
+
+__author__ = 'Brian Quinlan (brian@sweetapp.com)'
+
+from concurrent.futures._base import (FIRST_COMPLETED,
+ FIRST_EXCEPTION,
+ ALL_COMPLETED,
+ CancelledError,
+ TimeoutError,
+ Future,
+ Executor,
+ wait,
+ as_completed)
+from concurrent.futures.thread import ThreadPoolExecutor
+
+try:
+ from concurrent.futures.process import ProcessPoolExecutor
+except ImportError:
+ # some platforms don't have multiprocessing
+ pass
diff --git a/third_party/python/futures/concurrent/futures/_base.py b/third_party/python/futures/concurrent/futures/_base.py
new file mode 100644
index 0000000000..2936c46b16
--- /dev/null
+++ b/third_party/python/futures/concurrent/futures/_base.py
@@ -0,0 +1,607 @@
+# Copyright 2009 Brian Quinlan. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+import collections
+import logging
+import threading
+import itertools
+import time
+
+__author__ = 'Brian Quinlan (brian@sweetapp.com)'
+
+FIRST_COMPLETED = 'FIRST_COMPLETED'
+FIRST_EXCEPTION = 'FIRST_EXCEPTION'
+ALL_COMPLETED = 'ALL_COMPLETED'
+_AS_COMPLETED = '_AS_COMPLETED'
+
+# Possible future states (for internal use by the futures package).
+PENDING = 'PENDING'
+RUNNING = 'RUNNING'
+# The future was cancelled by the user...
+CANCELLED = 'CANCELLED'
+# ...and _Waiter.add_cancelled() was called by a worker.
+CANCELLED_AND_NOTIFIED = 'CANCELLED_AND_NOTIFIED'
+FINISHED = 'FINISHED'
+
+_FUTURE_STATES = [
+ PENDING,
+ RUNNING,
+ CANCELLED,
+ CANCELLED_AND_NOTIFIED,
+ FINISHED
+]
+
+_STATE_TO_DESCRIPTION_MAP = {
+ PENDING: "pending",
+ RUNNING: "running",
+ CANCELLED: "cancelled",
+ CANCELLED_AND_NOTIFIED: "cancelled",
+ FINISHED: "finished"
+}
+
+# Logger for internal use by the futures package.
+LOGGER = logging.getLogger("concurrent.futures")
+
+class Error(Exception):
+ """Base class for all future-related exceptions."""
+ pass
+
+class CancelledError(Error):
+ """The Future was cancelled."""
+ pass
+
+class TimeoutError(Error):
+ """The operation exceeded the given deadline."""
+ pass
+
+class _Waiter(object):
+ """Provides the event that wait() and as_completed() block on."""
+ def __init__(self):
+ self.event = threading.Event()
+ self.finished_futures = []
+
+ def add_result(self, future):
+ self.finished_futures.append(future)
+
+ def add_exception(self, future):
+ self.finished_futures.append(future)
+
+ def add_cancelled(self, future):
+ self.finished_futures.append(future)
+
+class _AsCompletedWaiter(_Waiter):
+ """Used by as_completed()."""
+
+ def __init__(self):
+ super(_AsCompletedWaiter, self).__init__()
+ self.lock = threading.Lock()
+
+ def add_result(self, future):
+ with self.lock:
+ super(_AsCompletedWaiter, self).add_result(future)
+ self.event.set()
+
+ def add_exception(self, future):
+ with self.lock:
+ super(_AsCompletedWaiter, self).add_exception(future)
+ self.event.set()
+
+ def add_cancelled(self, future):
+ with self.lock:
+ super(_AsCompletedWaiter, self).add_cancelled(future)
+ self.event.set()
+
+class _FirstCompletedWaiter(_Waiter):
+ """Used by wait(return_when=FIRST_COMPLETED)."""
+
+ def add_result(self, future):
+ super(_FirstCompletedWaiter, self).add_result(future)
+ self.event.set()
+
+ def add_exception(self, future):
+ super(_FirstCompletedWaiter, self).add_exception(future)
+ self.event.set()
+
+ def add_cancelled(self, future):
+ super(_FirstCompletedWaiter, self).add_cancelled(future)
+ self.event.set()
+
+class _AllCompletedWaiter(_Waiter):
+ """Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED)."""
+
+ def __init__(self, num_pending_calls, stop_on_exception):
+ self.num_pending_calls = num_pending_calls
+ self.stop_on_exception = stop_on_exception
+ self.lock = threading.Lock()
+ super(_AllCompletedWaiter, self).__init__()
+
+ def _decrement_pending_calls(self):
+ with self.lock:
+ self.num_pending_calls -= 1
+ if not self.num_pending_calls:
+ self.event.set()
+
+ def add_result(self, future):
+ super(_AllCompletedWaiter, self).add_result(future)
+ self._decrement_pending_calls()
+
+ def add_exception(self, future):
+ super(_AllCompletedWaiter, self).add_exception(future)
+ if self.stop_on_exception:
+ self.event.set()
+ else:
+ self._decrement_pending_calls()
+
+ def add_cancelled(self, future):
+ super(_AllCompletedWaiter, self).add_cancelled(future)
+ self._decrement_pending_calls()
+
+class _AcquireFutures(object):
+ """A context manager that does an ordered acquire of Future conditions."""
+
+ def __init__(self, futures):
+ self.futures = sorted(futures, key=id)
+
+ def __enter__(self):
+ for future in self.futures:
+ future._condition.acquire()
+
+ def __exit__(self, *args):
+ for future in self.futures:
+ future._condition.release()
+
+def _create_and_install_waiters(fs, return_when):
+ if return_when == _AS_COMPLETED:
+ waiter = _AsCompletedWaiter()
+ elif return_when == FIRST_COMPLETED:
+ waiter = _FirstCompletedWaiter()
+ else:
+ pending_count = sum(
+ f._state not in [CANCELLED_AND_NOTIFIED, FINISHED] for f in fs)
+
+ if return_when == FIRST_EXCEPTION:
+ waiter = _AllCompletedWaiter(pending_count, stop_on_exception=True)
+ elif return_when == ALL_COMPLETED:
+ waiter = _AllCompletedWaiter(pending_count, stop_on_exception=False)
+ else:
+ raise ValueError("Invalid return condition: %r" % return_when)
+
+ for f in fs:
+ f._waiters.append(waiter)
+
+ return waiter
+
+def as_completed(fs, timeout=None):
+ """An iterator over the given futures that yields each as it completes.
+
+ Args:
+ fs: The sequence of Futures (possibly created by different Executors) to
+ iterate over.
+ timeout: The maximum number of seconds to wait. If None, then there
+ is no limit on the wait time.
+
+ Returns:
+ An iterator that yields the given Futures as they complete (finished or
+ cancelled). If any given Futures are duplicated, they will be returned
+ once.
+
+ Raises:
+ TimeoutError: If the entire result iterator could not be generated
+ before the given timeout.
+ """
+ if timeout is not None:
+ end_time = timeout + time.time()
+
+ fs = set(fs)
+ with _AcquireFutures(fs):
+ finished = set(
+ f for f in fs
+ if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
+ pending = fs - finished
+ waiter = _create_and_install_waiters(fs, _AS_COMPLETED)
+
+ try:
+ for future in finished:
+ yield future
+
+ while pending:
+ if timeout is None:
+ wait_timeout = None
+ else:
+ wait_timeout = end_time - time.time()
+ if wait_timeout < 0:
+ raise TimeoutError(
+ '%d (of %d) futures unfinished' % (
+ len(pending), len(fs)))
+
+ waiter.event.wait(wait_timeout)
+
+ with waiter.lock:
+ finished = waiter.finished_futures
+ waiter.finished_futures = []
+ waiter.event.clear()
+
+ for future in finished:
+ yield future
+ pending.remove(future)
+
+ finally:
+ for f in fs:
+ with f._condition:
+ f._waiters.remove(waiter)
+
+DoneAndNotDoneFutures = collections.namedtuple(
+ 'DoneAndNotDoneFutures', 'done not_done')
+def wait(fs, timeout=None, return_when=ALL_COMPLETED):
+ """Wait for the futures in the given sequence to complete.
+
+ Args:
+ fs: The sequence of Futures (possibly created by different Executors) to
+ wait upon.
+ timeout: The maximum number of seconds to wait. If None, then there
+ is no limit on the wait time.
+ return_when: Indicates when this function should return. The options
+ are:
+
+ FIRST_COMPLETED - Return when any future finishes or is
+ cancelled.
+ FIRST_EXCEPTION - Return when any future finishes by raising an
+ exception. If no future raises an exception
+ then it is equivalent to ALL_COMPLETED.
+ ALL_COMPLETED - Return when all futures finish or are cancelled.
+
+ Returns:
+ A named 2-tuple of sets. The first set, named 'done', contains the
+ futures that completed (is finished or cancelled) before the wait
+ completed. The second set, named 'not_done', contains uncompleted
+ futures.
+ """
+ with _AcquireFutures(fs):
+ done = set(f for f in fs
+ if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
+ not_done = set(fs) - done
+
+ if (return_when == FIRST_COMPLETED) and done:
+ return DoneAndNotDoneFutures(done, not_done)
+ elif (return_when == FIRST_EXCEPTION) and done:
+ if any(f for f in done
+ if not f.cancelled() and f.exception() is not None):
+ return DoneAndNotDoneFutures(done, not_done)
+
+ if len(done) == len(fs):
+ return DoneAndNotDoneFutures(done, not_done)
+
+ waiter = _create_and_install_waiters(fs, return_when)
+
+ waiter.event.wait(timeout)
+ for f in fs:
+ with f._condition:
+ f._waiters.remove(waiter)
+
+ done.update(waiter.finished_futures)
+ return DoneAndNotDoneFutures(done, set(fs) - done)
+
+class Future(object):
+ """Represents the result of an asynchronous computation."""
+
+ def __init__(self):
+ """Initializes the future. Should not be called by clients."""
+ self._condition = threading.Condition()
+ self._state = PENDING
+ self._result = None
+ self._exception = None
+ self._traceback = None
+ self._waiters = []
+ self._done_callbacks = []
+
+ def _invoke_callbacks(self):
+ for callback in self._done_callbacks:
+ try:
+ callback(self)
+ except Exception:
+ LOGGER.exception('exception calling callback for %r', self)
+
+ def __repr__(self):
+ with self._condition:
+ if self._state == FINISHED:
+ if self._exception:
+ return '<Future at %s state=%s raised %s>' % (
+ hex(id(self)),
+ _STATE_TO_DESCRIPTION_MAP[self._state],
+ self._exception.__class__.__name__)
+ else:
+ return '<Future at %s state=%s returned %s>' % (
+ hex(id(self)),
+ _STATE_TO_DESCRIPTION_MAP[self._state],
+ self._result.__class__.__name__)
+ return '<Future at %s state=%s>' % (
+ hex(id(self)),
+ _STATE_TO_DESCRIPTION_MAP[self._state])
+
+ def cancel(self):
+ """Cancel the future if possible.
+
+ Returns True if the future was cancelled, False otherwise. A future
+ cannot be cancelled if it is running or has already completed.
+ """
+ with self._condition:
+ if self._state in [RUNNING, FINISHED]:
+ return False
+
+ if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
+ return True
+
+ self._state = CANCELLED
+ self._condition.notify_all()
+
+ self._invoke_callbacks()
+ return True
+
+ def cancelled(self):
+ """Return True if the future has cancelled."""
+ with self._condition:
+ return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]
+
+ def running(self):
+ """Return True if the future is currently executing."""
+ with self._condition:
+ return self._state == RUNNING
+
+ def done(self):
+ """Return True of the future was cancelled or finished executing."""
+ with self._condition:
+ return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]
+
+ def __get_result(self):
+ if self._exception:
+ raise type(self._exception), self._exception, self._traceback
+ else:
+ return self._result
+
+ def add_done_callback(self, fn):
+ """Attaches a callable that will be called when the future finishes.
+
+ Args:
+ fn: A callable that will be called with this future as its only
+ argument when the future completes or is cancelled. The callable
+ will always be called by a thread in the same process in which
+ it was added. If the future has already completed or been
+ cancelled then the callable will be called immediately. These
+ callables are called in the order that they were added.
+ """
+ with self._condition:
+ if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]:
+ self._done_callbacks.append(fn)
+ return
+ fn(self)
+
+ def result(self, timeout=None):
+ """Return the result of the call that the future represents.
+
+ Args:
+ timeout: The number of seconds to wait for the result if the future
+ isn't done. If None, then there is no limit on the wait time.
+
+ Returns:
+ The result of the call that the future represents.
+
+ Raises:
+ CancelledError: If the future was cancelled.
+ TimeoutError: If the future didn't finish executing before the given
+ timeout.
+ Exception: If the call raised then that exception will be raised.
+ """
+ with self._condition:
+ if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
+ raise CancelledError()
+ elif self._state == FINISHED:
+ return self.__get_result()
+
+ self._condition.wait(timeout)
+
+ if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
+ raise CancelledError()
+ elif self._state == FINISHED:
+ return self.__get_result()
+ else:
+ raise TimeoutError()
+
+ def exception_info(self, timeout=None):
+ """Return a tuple of (exception, traceback) raised by the call that the
+ future represents.
+
+ Args:
+ timeout: The number of seconds to wait for the exception if the
+ future isn't done. If None, then there is no limit on the wait
+ time.
+
+ Returns:
+ The exception raised by the call that the future represents or None
+ if the call completed without raising.
+
+ Raises:
+ CancelledError: If the future was cancelled.
+ TimeoutError: If the future didn't finish executing before the given
+ timeout.
+ """
+ with self._condition:
+ if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
+ raise CancelledError()
+ elif self._state == FINISHED:
+ return self._exception, self._traceback
+
+ self._condition.wait(timeout)
+
+ if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
+ raise CancelledError()
+ elif self._state == FINISHED:
+ return self._exception, self._traceback
+ else:
+ raise TimeoutError()
+
+ def exception(self, timeout=None):
+ """Return the exception raised by the call that the future represents.
+
+ Args:
+ timeout: The number of seconds to wait for the exception if the
+ future isn't done. If None, then there is no limit on the wait
+ time.
+
+ Returns:
+ The exception raised by the call that the future represents or None
+ if the call completed without raising.
+
+ Raises:
+ CancelledError: If the future was cancelled.
+ TimeoutError: If the future didn't finish executing before the given
+ timeout.
+ """
+ return self.exception_info(timeout)[0]
+
+ # The following methods should only be used by Executors and in tests.
+ def set_running_or_notify_cancel(self):
+ """Mark the future as running or process any cancel notifications.
+
+ Should only be used by Executor implementations and unit tests.
+
+ If the future has been cancelled (cancel() was called and returned
+ True) then any threads waiting on the future completing (though calls
+ to as_completed() or wait()) are notified and False is returned.
+
+ If the future was not cancelled then it is put in the running state
+ (future calls to running() will return True) and True is returned.
+
+ This method should be called by Executor implementations before
+ executing the work associated with this future. If this method returns
+ False then the work should not be executed.
+
+ Returns:
+ False if the Future was cancelled, True otherwise.
+
+ Raises:
+ RuntimeError: if this method was already called or if set_result()
+ or set_exception() was called.
+ """
+ with self._condition:
+ if self._state == CANCELLED:
+ self._state = CANCELLED_AND_NOTIFIED
+ for waiter in self._waiters:
+ waiter.add_cancelled(self)
+ # self._condition.notify_all() is not necessary because
+ # self.cancel() triggers a notification.
+ return False
+ elif self._state == PENDING:
+ self._state = RUNNING
+ return True
+ else:
+ LOGGER.critical('Future %s in unexpected state: %s',
+ id(self),
+ self._state)
+ raise RuntimeError('Future in unexpected state')
+
+ def set_result(self, result):
+ """Sets the return value of work associated with the future.
+
+ Should only be used by Executor implementations and unit tests.
+ """
+ with self._condition:
+ self._result = result
+ self._state = FINISHED
+ for waiter in self._waiters:
+ waiter.add_result(self)
+ self._condition.notify_all()
+ self._invoke_callbacks()
+
+ def set_exception_info(self, exception, traceback):
+ """Sets the result of the future as being the given exception
+ and traceback.
+
+ Should only be used by Executor implementations and unit tests.
+ """
+ with self._condition:
+ self._exception = exception
+ self._traceback = traceback
+ self._state = FINISHED
+ for waiter in self._waiters:
+ waiter.add_exception(self)
+ self._condition.notify_all()
+ self._invoke_callbacks()
+
+ def set_exception(self, exception):
+ """Sets the result of the future as being the given exception.
+
+ Should only be used by Executor implementations and unit tests.
+ """
+ self.set_exception_info(exception, None)
+
+class Executor(object):
+ """This is an abstract base class for concrete asynchronous executors."""
+
+ def submit(self, fn, *args, **kwargs):
+ """Submits a callable to be executed with the given arguments.
+
+ Schedules the callable to be executed as fn(*args, **kwargs) and returns
+ a Future instance representing the execution of the callable.
+
+ Returns:
+ A Future representing the given call.
+ """
+ raise NotImplementedError()
+
+ def map(self, fn, *iterables, **kwargs):
+ """Returns a iterator equivalent to map(fn, iter).
+
+ Args:
+ fn: A callable that will take as many arguments as there are
+ passed iterables.
+ timeout: The maximum number of seconds to wait. If None, then there
+ is no limit on the wait time.
+
+ Returns:
+ An iterator equivalent to: map(func, *iterables) but the calls may
+ be evaluated out-of-order.
+
+ Raises:
+ TimeoutError: If the entire result iterator could not be generated
+ before the given timeout.
+ Exception: If fn(*args) raises for any values.
+ """
+ timeout = kwargs.get('timeout')
+ if timeout is not None:
+ end_time = timeout + time.time()
+
+ fs = [self.submit(fn, *args) for args in itertools.izip(*iterables)]
+
+ # Yield must be hidden in closure so that the futures are submitted
+ # before the first iterator value is required.
+ def result_iterator():
+ try:
+ for future in fs:
+ if timeout is None:
+ yield future.result()
+ else:
+ yield future.result(end_time - time.time())
+ finally:
+ for future in fs:
+ future.cancel()
+ return result_iterator()
+
+ def shutdown(self, wait=True):
+ """Clean-up the resources associated with the Executor.
+
+ It is safe to call this method several times. Otherwise, no other
+ methods can be called after this one.
+
+ Args:
+ wait: If True then shutdown will not return until all running
+ futures have finished executing and the resources used by the
+ executor have been reclaimed.
+ """
+ pass
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.shutdown(wait=True)
+ return False
diff --git a/third_party/python/futures/concurrent/futures/process.py b/third_party/python/futures/concurrent/futures/process.py
new file mode 100644
index 0000000000..72528410c1
--- /dev/null
+++ b/third_party/python/futures/concurrent/futures/process.py
@@ -0,0 +1,359 @@
+# Copyright 2009 Brian Quinlan. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Implements ProcessPoolExecutor.
+
+The follow diagram and text describe the data-flow through the system:
+
+|======================= In-process =====================|== Out-of-process ==|
+
++----------+ +----------+ +--------+ +-----------+ +---------+
+| | => | Work Ids | => | | => | Call Q | => | |
+| | +----------+ | | +-----------+ | |
+| | | ... | | | | ... | | |
+| | | 6 | | | | 5, call() | | |
+| | | 7 | | | | ... | | |
+| Process | | ... | | Local | +-----------+ | Process |
+| Pool | +----------+ | Worker | | #1..n |
+| Executor | | Thread | | |
+| | +----------- + | | +-----------+ | |
+| | <=> | Work Items | <=> | | <= | Result Q | <= | |
+| | +------------+ | | +-----------+ | |
+| | | 6: call() | | | | ... | | |
+| | | future | | | | 4, result | | |
+| | | ... | | | | 3, except | | |
++----------+ +------------+ +--------+ +-----------+ +---------+
+
+Executor.submit() called:
+- creates a uniquely numbered _WorkItem and adds it to the "Work Items" dict
+- adds the id of the _WorkItem to the "Work Ids" queue
+
+Local worker thread:
+- reads work ids from the "Work Ids" queue and looks up the corresponding
+ WorkItem from the "Work Items" dict: if the work item has been cancelled then
+ it is simply removed from the dict, otherwise it is repackaged as a
+ _CallItem and put in the "Call Q". New _CallItems are put in the "Call Q"
+ until "Call Q" is full. NOTE: the size of the "Call Q" is kept small because
+ calls placed in the "Call Q" can no longer be cancelled with Future.cancel().
+- reads _ResultItems from "Result Q", updates the future stored in the
+ "Work Items" dict and deletes the dict entry
+
+Process #1..n:
+- reads _CallItems from "Call Q", executes the calls, and puts the resulting
+ _ResultItems in "Request Q"
+"""
+
+import atexit
+from concurrent.futures import _base
+import Queue as queue
+import multiprocessing
+import threading
+import weakref
+import sys
+
+__author__ = 'Brian Quinlan (brian@sweetapp.com)'
+
+# Workers are created as daemon threads and processes. This is done to allow the
+# interpreter to exit when there are still idle processes in a
+# ProcessPoolExecutor's process pool (i.e. shutdown() was not called). However,
+# allowing workers to die with the interpreter has two undesirable properties:
+# - The workers would still be running during interpretor shutdown,
+# meaning that they would fail in unpredictable ways.
+# - The workers could be killed while evaluating a work item, which could
+# be bad if the callable being evaluated has external side-effects e.g.
+# writing to a file.
+#
+# To work around this problem, an exit handler is installed which tells the
+# workers to exit when their work queues are empty and then waits until the
+# threads/processes finish.
+
+_threads_queues = weakref.WeakKeyDictionary()
+_shutdown = False
+
+def _python_exit():
+ global _shutdown
+ _shutdown = True
+ items = list(_threads_queues.items()) if _threads_queues else ()
+ for t, q in items:
+ q.put(None)
+ for t, q in items:
+ t.join(sys.maxint)
+
+# Controls how many more calls than processes will be queued in the call queue.
+# A smaller number will mean that processes spend more time idle waiting for
+# work while a larger number will make Future.cancel() succeed less frequently
+# (Futures in the call queue cannot be cancelled).
+EXTRA_QUEUED_CALLS = 1
+
+class _WorkItem(object):
+ def __init__(self, future, fn, args, kwargs):
+ self.future = future
+ self.fn = fn
+ self.args = args
+ self.kwargs = kwargs
+
+class _ResultItem(object):
+ def __init__(self, work_id, exception=None, result=None):
+ self.work_id = work_id
+ self.exception = exception
+ self.result = result
+
+class _CallItem(object):
+ def __init__(self, work_id, fn, args, kwargs):
+ self.work_id = work_id
+ self.fn = fn
+ self.args = args
+ self.kwargs = kwargs
+
+def _process_worker(call_queue, result_queue):
+ """Evaluates calls from call_queue and places the results in result_queue.
+
+ This worker is run in a separate process.
+
+ Args:
+ call_queue: A multiprocessing.Queue of _CallItems that will be read and
+ evaluated by the worker.
+ result_queue: A multiprocessing.Queue of _ResultItems that will written
+ to by the worker.
+ shutdown: A multiprocessing.Event that will be set as a signal to the
+ worker that it should exit when call_queue is empty.
+ """
+ while True:
+ call_item = call_queue.get(block=True)
+ if call_item is None:
+ # Wake up queue management thread
+ result_queue.put(None)
+ return
+ try:
+ r = call_item.fn(*call_item.args, **call_item.kwargs)
+ except BaseException:
+ e = sys.exc_info()[1]
+ result_queue.put(_ResultItem(call_item.work_id,
+ exception=e))
+ else:
+ result_queue.put(_ResultItem(call_item.work_id,
+ result=r))
+
+def _add_call_item_to_queue(pending_work_items,
+ work_ids,
+ call_queue):
+ """Fills call_queue with _WorkItems from pending_work_items.
+
+ This function never blocks.
+
+ Args:
+ pending_work_items: A dict mapping work ids to _WorkItems e.g.
+ {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
+ work_ids: A queue.Queue of work ids e.g. Queue([5, 6, ...]). Work ids
+ are consumed and the corresponding _WorkItems from
+ pending_work_items are transformed into _CallItems and put in
+ call_queue.
+ call_queue: A multiprocessing.Queue that will be filled with _CallItems
+ derived from _WorkItems.
+ """
+ while True:
+ if call_queue.full():
+ return
+ try:
+ work_id = work_ids.get(block=False)
+ except queue.Empty:
+ return
+ else:
+ work_item = pending_work_items[work_id]
+
+ if work_item.future.set_running_or_notify_cancel():
+ call_queue.put(_CallItem(work_id,
+ work_item.fn,
+ work_item.args,
+ work_item.kwargs),
+ block=True)
+ else:
+ del pending_work_items[work_id]
+ continue
+
+def _queue_management_worker(executor_reference,
+ processes,
+ pending_work_items,
+ work_ids_queue,
+ call_queue,
+ result_queue):
+ """Manages the communication between this process and the worker processes.
+
+ This function is run in a local thread.
+
+ Args:
+ executor_reference: A weakref.ref to the ProcessPoolExecutor that owns
+ this thread. Used to determine if the ProcessPoolExecutor has been
+ garbage collected and that this function can exit.
+ process: A list of the multiprocessing.Process instances used as
+ workers.
+ pending_work_items: A dict mapping work ids to _WorkItems e.g.
+ {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
+ work_ids_queue: A queue.Queue of work ids e.g. Queue([5, 6, ...]).
+ call_queue: A multiprocessing.Queue that will be filled with _CallItems
+ derived from _WorkItems for processing by the process workers.
+ result_queue: A multiprocessing.Queue of _ResultItems generated by the
+ process workers.
+ """
+ nb_shutdown_processes = [0]
+ def shutdown_one_process():
+ """Tell a worker to terminate, which will in turn wake us again"""
+ call_queue.put(None)
+ nb_shutdown_processes[0] += 1
+ while True:
+ _add_call_item_to_queue(pending_work_items,
+ work_ids_queue,
+ call_queue)
+
+ result_item = result_queue.get(block=True)
+ if result_item is not None:
+ work_item = pending_work_items[result_item.work_id]
+ del pending_work_items[result_item.work_id]
+
+ if result_item.exception:
+ work_item.future.set_exception(result_item.exception)
+ else:
+ work_item.future.set_result(result_item.result)
+ # Delete references to object. See issue16284
+ del work_item
+ # Check whether we should start shutting down.
+ executor = executor_reference()
+ # No more work items can be added if:
+ # - The interpreter is shutting down OR
+ # - The executor that owns this worker has been collected OR
+ # - The executor that owns this worker has been shutdown.
+ if _shutdown or executor is None or executor._shutdown_thread:
+ # Since no new work items can be added, it is safe to shutdown
+ # this thread if there are no pending work items.
+ if not pending_work_items:
+ while nb_shutdown_processes[0] < len(processes):
+ shutdown_one_process()
+ # If .join() is not called on the created processes then
+ # some multiprocessing.Queue methods may deadlock on Mac OS
+ # X.
+ for p in processes:
+ p.join()
+ call_queue.close()
+ return
+ del executor
+
+_system_limits_checked = False
+_system_limited = None
+def _check_system_limits():
+ global _system_limits_checked, _system_limited
+ if _system_limits_checked:
+ if _system_limited:
+ raise NotImplementedError(_system_limited)
+ _system_limits_checked = True
+ try:
+ import os
+ nsems_max = os.sysconf("SC_SEM_NSEMS_MAX")
+ except (AttributeError, ValueError):
+ # sysconf not available or setting not available
+ return
+ if nsems_max == -1:
+ # indetermine limit, assume that limit is determined
+ # by available memory only
+ return
+ if nsems_max >= 256:
+ # minimum number of semaphores available
+ # according to POSIX
+ return
+ _system_limited = "system provides too few semaphores (%d available, 256 necessary)" % nsems_max
+ raise NotImplementedError(_system_limited)
+
+class ProcessPoolExecutor(_base.Executor):
+ def __init__(self, max_workers=None):
+ """Initializes a new ProcessPoolExecutor instance.
+
+ Args:
+ max_workers: The maximum number of processes that can be used to
+ execute the given calls. If None or not given then as many
+ worker processes will be created as the machine has processors.
+ """
+ _check_system_limits()
+
+ if max_workers is None:
+ self._max_workers = multiprocessing.cpu_count()
+ else:
+ self._max_workers = max_workers
+
+ # Make the call queue slightly larger than the number of processes to
+ # prevent the worker processes from idling. But don't make it too big
+ # because futures in the call queue cannot be cancelled.
+ self._call_queue = multiprocessing.Queue(self._max_workers +
+ EXTRA_QUEUED_CALLS)
+ self._result_queue = multiprocessing.Queue()
+ self._work_ids = queue.Queue()
+ self._queue_management_thread = None
+ self._processes = set()
+
+ # Shutdown is a two-step process.
+ self._shutdown_thread = False
+ self._shutdown_lock = threading.Lock()
+ self._queue_count = 0
+ self._pending_work_items = {}
+
+ def _start_queue_management_thread(self):
+ # When the executor gets lost, the weakref callback will wake up
+ # the queue management thread.
+ def weakref_cb(_, q=self._result_queue):
+ q.put(None)
+ if self._queue_management_thread is None:
+ self._queue_management_thread = threading.Thread(
+ target=_queue_management_worker,
+ args=(weakref.ref(self, weakref_cb),
+ self._processes,
+ self._pending_work_items,
+ self._work_ids,
+ self._call_queue,
+ self._result_queue))
+ self._queue_management_thread.daemon = True
+ self._queue_management_thread.start()
+ _threads_queues[self._queue_management_thread] = self._result_queue
+
+ def _adjust_process_count(self):
+ for _ in range(len(self._processes), self._max_workers):
+ p = multiprocessing.Process(
+ target=_process_worker,
+ args=(self._call_queue,
+ self._result_queue))
+ p.start()
+ self._processes.add(p)
+
+ def submit(self, fn, *args, **kwargs):
+ with self._shutdown_lock:
+ if self._shutdown_thread:
+ raise RuntimeError('cannot schedule new futures after shutdown')
+
+ f = _base.Future()
+ w = _WorkItem(f, fn, args, kwargs)
+
+ self._pending_work_items[self._queue_count] = w
+ self._work_ids.put(self._queue_count)
+ self._queue_count += 1
+ # Wake up queue management thread
+ self._result_queue.put(None)
+
+ self._start_queue_management_thread()
+ self._adjust_process_count()
+ return f
+ submit.__doc__ = _base.Executor.submit.__doc__
+
+ def shutdown(self, wait=True):
+ with self._shutdown_lock:
+ self._shutdown_thread = True
+ if self._queue_management_thread:
+ # Wake up queue management thread
+ self._result_queue.put(None)
+ if wait:
+ self._queue_management_thread.join(sys.maxint)
+ # To reduce the risk of openning too many files, remove references to
+ # objects that use file descriptors.
+ self._queue_management_thread = None
+ self._call_queue = None
+ self._result_queue = None
+ self._processes = None
+ shutdown.__doc__ = _base.Executor.shutdown.__doc__
+
+atexit.register(_python_exit)
diff --git a/third_party/python/futures/concurrent/futures/thread.py b/third_party/python/futures/concurrent/futures/thread.py
new file mode 100644
index 0000000000..85ab4b7432
--- /dev/null
+++ b/third_party/python/futures/concurrent/futures/thread.py
@@ -0,0 +1,134 @@
+# Copyright 2009 Brian Quinlan. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Implements ThreadPoolExecutor."""
+
+import atexit
+from concurrent.futures import _base
+import Queue as queue
+import threading
+import weakref
+import sys
+
+__author__ = 'Brian Quinlan (brian@sweetapp.com)'
+
+# Workers are created as daemon threads. This is done to allow the interpreter
+# to exit when there are still idle threads in a ThreadPoolExecutor's thread
+# pool (i.e. shutdown() was not called). However, allowing workers to die with
+# the interpreter has two undesirable properties:
+# - The workers would still be running during interpretor shutdown,
+# meaning that they would fail in unpredictable ways.
+# - The workers could be killed while evaluating a work item, which could
+# be bad if the callable being evaluated has external side-effects e.g.
+# writing to a file.
+#
+# To work around this problem, an exit handler is installed which tells the
+# workers to exit when their work queues are empty and then waits until the
+# threads finish.
+
+_threads_queues = weakref.WeakKeyDictionary()
+_shutdown = False
+
+def _python_exit():
+ global _shutdown
+ _shutdown = True
+ items = list(_threads_queues.items()) if _threads_queues else ()
+ for t, q in items:
+ q.put(None)
+ for t, q in items:
+ t.join(sys.maxint)
+
+atexit.register(_python_exit)
+
+class _WorkItem(object):
+ def __init__(self, future, fn, args, kwargs):
+ self.future = future
+ self.fn = fn
+ self.args = args
+ self.kwargs = kwargs
+
+ def run(self):
+ if not self.future.set_running_or_notify_cancel():
+ return
+
+ try:
+ result = self.fn(*self.args, **self.kwargs)
+ except BaseException:
+ e, tb = sys.exc_info()[1:]
+ self.future.set_exception_info(e, tb)
+ else:
+ self.future.set_result(result)
+
+def _worker(executor_reference, work_queue):
+ try:
+ while True:
+ work_item = work_queue.get(block=True)
+ if work_item is not None:
+ work_item.run()
+ # Delete references to object. See issue16284
+ del work_item
+ continue
+ executor = executor_reference()
+ # Exit if:
+ # - The interpreter is shutting down OR
+ # - The executor that owns the worker has been collected OR
+ # - The executor that owns the worker has been shutdown.
+ if _shutdown or executor is None or executor._shutdown:
+ # Notice other workers
+ work_queue.put(None)
+ return
+ del executor
+ except BaseException:
+ _base.LOGGER.critical('Exception in worker', exc_info=True)
+
+class ThreadPoolExecutor(_base.Executor):
+ def __init__(self, max_workers):
+ """Initializes a new ThreadPoolExecutor instance.
+
+ Args:
+ max_workers: The maximum number of threads that can be used to
+ execute the given calls.
+ """
+ self._max_workers = max_workers
+ self._work_queue = queue.Queue()
+ self._threads = set()
+ self._shutdown = False
+ self._shutdown_lock = threading.Lock()
+
+ def submit(self, fn, *args, **kwargs):
+ with self._shutdown_lock:
+ if self._shutdown:
+ raise RuntimeError('cannot schedule new futures after shutdown')
+
+ f = _base.Future()
+ w = _WorkItem(f, fn, args, kwargs)
+
+ self._work_queue.put(w)
+ self._adjust_thread_count()
+ return f
+ submit.__doc__ = _base.Executor.submit.__doc__
+
+ def _adjust_thread_count(self):
+ # When the executor gets lost, the weakref callback will wake up
+ # the worker threads.
+ def weakref_cb(_, q=self._work_queue):
+ q.put(None)
+ # TODO(bquinlan): Should avoid creating new threads if there are more
+ # idle threads than items in the work queue.
+ if len(self._threads) < self._max_workers:
+ t = threading.Thread(target=_worker,
+ args=(weakref.ref(self, weakref_cb),
+ self._work_queue))
+ t.daemon = True
+ t.start()
+ self._threads.add(t)
+ _threads_queues[t] = self._work_queue
+
+ def shutdown(self, wait=True):
+ with self._shutdown_lock:
+ self._shutdown = True
+ self._work_queue.put(None)
+ if wait:
+ for t in self._threads:
+ t.join(sys.maxint)
+ shutdown.__doc__ = _base.Executor.shutdown.__doc__
diff --git a/third_party/python/futures/crawl.py b/third_party/python/futures/crawl.py
new file mode 100644
index 0000000000..86e0af7fe6
--- /dev/null
+++ b/third_party/python/futures/crawl.py
@@ -0,0 +1,74 @@
+"""Compare the speed of downloading URLs sequentially vs. using futures."""
+
+import functools
+import time
+import timeit
+import sys
+
+try:
+ from urllib2 import urlopen
+except ImportError:
+ from urllib.request import urlopen
+
+from concurrent.futures import (as_completed, ThreadPoolExecutor,
+ ProcessPoolExecutor)
+
+URLS = ['http://www.google.com/',
+ 'http://www.apple.com/',
+ 'http://www.ibm.com',
+ 'http://www.thisurlprobablydoesnotexist.com',
+ 'http://www.slashdot.org/',
+ 'http://www.python.org/',
+ 'http://www.bing.com/',
+ 'http://www.facebook.com/',
+ 'http://www.yahoo.com/',
+ 'http://www.youtube.com/',
+ 'http://www.blogger.com/']
+
+def load_url(url, timeout):
+ kwargs = {'timeout': timeout} if sys.version_info >= (2, 6) else {}
+ return urlopen(url, **kwargs).read()
+
+def download_urls_sequential(urls, timeout=60):
+ url_to_content = {}
+ for url in urls:
+ try:
+ url_to_content[url] = load_url(url, timeout=timeout)
+ except:
+ pass
+ return url_to_content
+
+def download_urls_with_executor(urls, executor, timeout=60):
+ try:
+ url_to_content = {}
+ future_to_url = dict((executor.submit(load_url, url, timeout), url)
+ for url in urls)
+
+ for future in as_completed(future_to_url):
+ try:
+ url_to_content[future_to_url[future]] = future.result()
+ except:
+ pass
+ return url_to_content
+ finally:
+ executor.shutdown()
+
+def main():
+ for name, fn in [('sequential',
+ functools.partial(download_urls_sequential, URLS)),
+ ('processes',
+ functools.partial(download_urls_with_executor,
+ URLS,
+ ProcessPoolExecutor(10))),
+ ('threads',
+ functools.partial(download_urls_with_executor,
+ URLS,
+ ThreadPoolExecutor(10)))]:
+ sys.stdout.write('%s: ' % name.ljust(12))
+ start = time.time()
+ url_map = fn()
+ sys.stdout.write('%.2f seconds (%d of %d downloaded)\n' %
+ (time.time() - start, len(url_map), len(URLS)))
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/python/futures/docs/Makefile b/third_party/python/futures/docs/Makefile
new file mode 100644
index 0000000000..f69d840353
--- /dev/null
+++ b/third_party/python/futures/docs/Makefile
@@ -0,0 +1,88 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d _build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf _build/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) _build/html
+ @echo
+ @echo "Build finished. The HTML pages are in _build/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) _build/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in _build/dirhtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) _build/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) _build/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) _build/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in _build/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) _build/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in _build/qthelp, like this:"
+ @echo "# qcollectiongenerator _build/qthelp/futures.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile _build/qthelp/futures.qhc"
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) _build/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in _build/latex."
+ @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
+ "run these through (pdf)latex."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) _build/changes
+ @echo
+ @echo "The overview file is in _build/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) _build/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in _build/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) _build/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in _build/doctest/output.txt."
diff --git a/third_party/python/futures/docs/conf.py b/third_party/python/futures/docs/conf.py
new file mode 100644
index 0000000000..5cea44c881
--- /dev/null
+++ b/third_party/python/futures/docs/conf.py
@@ -0,0 +1,194 @@
+# -*- coding: utf-8 -*-
+#
+# futures documentation build configuration file, created by
+# sphinx-quickstart on Wed Jun 3 19:35:34 2009.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.append(os.path.abspath('.'))
+
+# -- General configuration -----------------------------------------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = []
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'futures'
+copyright = u'2009-2011, Brian Quinlan'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '2.1.6'
+# The full version, including alpha/beta/rc tags.
+release = '2.1.6'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+#unused_docs = []
+
+# List of directories, relative to source directory, that shouldn't be searched
+# for source files.
+exclude_trees = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. Major themes that come with
+# Sphinx are currently 'default' and 'sphinxdoc'.
+html_theme = 'default'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_use_modindex = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = ''
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'futuresdoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ('index', 'futures.tex', u'futures Documentation',
+ u'Brian Quinlan', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_use_modindex = True
diff --git a/third_party/python/futures/docs/index.rst b/third_party/python/futures/docs/index.rst
new file mode 100644
index 0000000000..f53bc04cf5
--- /dev/null
+++ b/third_party/python/futures/docs/index.rst
@@ -0,0 +1,347 @@
+:mod:`concurrent.futures` --- Asynchronous computation
+======================================================
+
+.. module:: concurrent.futures
+ :synopsis: Execute computations asynchronously using threads or processes.
+
+The :mod:`concurrent.futures` module provides a high-level interface for
+asynchronously executing callables.
+
+The asynchronous execution can be be performed by threads using
+:class:`ThreadPoolExecutor` or seperate processes using
+:class:`ProcessPoolExecutor`. Both implement the same interface, which is
+defined by the abstract :class:`Executor` class.
+
+Executor Objects
+----------------
+
+:class:`Executor` is an abstract class that provides methods to execute calls
+asynchronously. It should not be used directly, but through its two
+subclasses: :class:`ThreadPoolExecutor` and :class:`ProcessPoolExecutor`.
+
+.. method:: Executor.submit(fn, *args, **kwargs)
+
+ Schedules the callable to be executed as *fn*(*\*args*, *\*\*kwargs*) and
+ returns a :class:`Future` representing the execution of the callable.
+
+::
+
+ with ThreadPoolExecutor(max_workers=1) as executor:
+ future = executor.submit(pow, 323, 1235)
+ print(future.result())
+
+.. method:: Executor.map(func, *iterables, timeout=None)
+
+ Equivalent to map(*func*, *\*iterables*) but func is executed asynchronously
+ and several calls to *func* may be made concurrently. The returned iterator
+ raises a :exc:`TimeoutError` if :meth:`__next__()` is called and the result
+ isn't available after *timeout* seconds from the original call to
+ :meth:`map()`. *timeout* can be an int or float. If *timeout* is not
+ specified or ``None`` then there is no limit to the wait time. If a call
+ raises an exception then that exception will be raised when its value is
+ retrieved from the iterator.
+
+.. method:: Executor.shutdown(wait=True)
+
+ Signal the executor that it should free any resources that it is using when
+ the currently pending futures are done executing. Calls to
+ :meth:`Executor.submit` and :meth:`Executor.map` made after shutdown will
+ raise :exc:`RuntimeError`.
+
+ If *wait* is `True` then this method will not return until all the pending
+ futures are done executing and the resources associated with the executor
+ have been freed. If *wait* is `False` then this method will return
+ immediately and the resources associated with the executor will be freed
+ when all pending futures are done executing. Regardless of the value of
+ *wait*, the entire Python program will not exit until all pending futures
+ are done executing.
+
+ You can avoid having to call this method explicitly if you use the `with`
+ statement, which will shutdown the `Executor` (waiting as if
+ `Executor.shutdown` were called with *wait* set to `True`):
+
+::
+
+ import shutil
+ with ThreadPoolExecutor(max_workers=4) as e:
+ e.submit(shutil.copy, 'src1.txt', 'dest1.txt')
+ e.submit(shutil.copy, 'src2.txt', 'dest2.txt')
+ e.submit(shutil.copy, 'src3.txt', 'dest3.txt')
+ e.submit(shutil.copy, 'src3.txt', 'dest4.txt')
+
+
+ThreadPoolExecutor Objects
+--------------------------
+
+The :class:`ThreadPoolExecutor` class is an :class:`Executor` subclass that uses
+a pool of threads to execute calls asynchronously.
+
+Deadlock can occur when the callable associated with a :class:`Future` waits on
+the results of another :class:`Future`. For example:
+
+::
+
+ import time
+ def wait_on_b():
+ time.sleep(5)
+ print(b.result()) # b will never complete because it is waiting on a.
+ return 5
+
+ def wait_on_a():
+ time.sleep(5)
+ print(a.result()) # a will never complete because it is waiting on b.
+ return 6
+
+
+ executor = ThreadPoolExecutor(max_workers=2)
+ a = executor.submit(wait_on_b)
+ b = executor.submit(wait_on_a)
+
+And:
+
+::
+
+ def wait_on_future():
+ f = executor.submit(pow, 5, 2)
+ # This will never complete because there is only one worker thread and
+ # it is executing this function.
+ print(f.result())
+
+ executor = ThreadPoolExecutor(max_workers=1)
+ executor.submit(wait_on_future)
+
+.. class:: ThreadPoolExecutor(max_workers)
+
+ Executes calls asynchronously using a pool of at most *max_workers* threads.
+
+.. _threadpoolexecutor-example:
+
+ThreadPoolExecutor Example
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+::
+
+ from concurrent import futures
+ import urllib.request
+
+ URLS = ['http://www.foxnews.com/',
+ 'http://www.cnn.com/',
+ 'http://europe.wsj.com/',
+ 'http://www.bbc.co.uk/',
+ 'http://some-made-up-domain.com/']
+
+ def load_url(url, timeout):
+ return urllib.request.urlopen(url, timeout=timeout).read()
+
+ with futures.ThreadPoolExecutor(max_workers=5) as executor:
+ future_to_url = dict((executor.submit(load_url, url, 60), url)
+ for url in URLS)
+
+ for future in futures.as_completed(future_to_url):
+ url = future_to_url[future]
+ if future.exception() is not None:
+ print('%r generated an exception: %s' % (url,
+ future.exception()))
+ else:
+ print('%r page is %d bytes' % (url, len(future.result())))
+
+ProcessPoolExecutor Objects
+---------------------------
+
+The :class:`ProcessPoolExecutor` class is an :class:`Executor` subclass that
+uses a pool of processes to execute calls asynchronously.
+:class:`ProcessPoolExecutor` uses the :mod:`multiprocessing` module, which
+allows it to side-step the :term:`Global Interpreter Lock` but also means that
+only picklable objects can be executed and returned.
+
+Calling :class:`Executor` or :class:`Future` methods from a callable submitted
+to a :class:`ProcessPoolExecutor` will result in deadlock.
+
+.. class:: ProcessPoolExecutor(max_workers=None)
+
+ Executes calls asynchronously using a pool of at most *max_workers*
+ processes. If *max_workers* is ``None`` or not given then as many worker
+ processes will be created as the machine has processors.
+
+.. _processpoolexecutor-example:
+
+ProcessPoolExecutor Example
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+::
+
+ import math
+
+ PRIMES = [
+ 112272535095293,
+ 112582705942171,
+ 112272535095293,
+ 115280095190773,
+ 115797848077099,
+ 1099726899285419]
+
+ def is_prime(n):
+ if n % 2 == 0:
+ return False
+
+ sqrt_n = int(math.floor(math.sqrt(n)))
+ for i in range(3, sqrt_n + 1, 2):
+ if n % i == 0:
+ return False
+ return True
+
+ def main():
+ with futures.ProcessPoolExecutor() as executor:
+ for number, prime in zip(PRIMES, executor.map(is_prime, PRIMES)):
+ print('%d is prime: %s' % (number, prime))
+
+ if __name__ == '__main__':
+ main()
+
+Future Objects
+--------------
+
+The :class:`Future` class encapulates the asynchronous execution of a callable.
+:class:`Future` instances are created by :meth:`Executor.submit`.
+
+.. method:: Future.cancel()
+
+ Attempt to cancel the call. If the call is currently being executed then
+ it cannot be cancelled and the method will return `False`, otherwise the call
+ will be cancelled and the method will return `True`.
+
+.. method:: Future.cancelled()
+
+ Return `True` if the call was successfully cancelled.
+
+.. method:: Future.running()
+
+ Return `True` if the call is currently being executed and cannot be
+ cancelled.
+
+.. method:: Future.done()
+
+ Return `True` if the call was successfully cancelled or finished running.
+
+.. method:: Future.result(timeout=None)
+
+ Return the value returned by the call. If the call hasn't yet completed then
+ this method will wait up to *timeout* seconds. If the call hasn't completed
+ in *timeout* seconds then a :exc:`TimeoutError` will be raised. *timeout* can
+ be an int or float.If *timeout* is not specified or ``None`` then there is no
+ limit to the wait time.
+
+ If the future is cancelled before completing then :exc:`CancelledError` will
+ be raised.
+
+ If the call raised then this method will raise the same exception.
+
+.. method:: Future.exception(timeout=None)
+
+ Return the exception raised by the call. If the call hasn't yet completed
+ then this method will wait up to *timeout* seconds. If the call hasn't
+ completed in *timeout* seconds then a :exc:`TimeoutError` will be raised.
+ *timeout* can be an int or float. If *timeout* is not specified or ``None``
+ then there is no limit to the wait time.
+
+ If the future is cancelled before completing then :exc:`CancelledError` will
+ be raised.
+
+ If the call completed without raising then ``None`` is returned.
+
+.. method:: Future.add_done_callback(fn)
+
+ Attaches the callable *fn* to the future. *fn* will be called, with the
+ future as its only argument, when the future is cancelled or finishes
+ running.
+
+ Added callables are called in the order that they were added and are always
+ called in a thread belonging to the process that added them. If the callable
+ raises an :exc:`Exception` then it will be logged and ignored. If the
+ callable raises another :exc:`BaseException` then the behavior is not
+ defined.
+
+ If the future has already completed or been cancelled then *fn* will be
+ called immediately.
+
+Internal Future Methods
+^^^^^^^^^^^^^^^^^^^^^^^
+
+The following :class:`Future` methods are meant for use in unit tests and
+:class:`Executor` implementations.
+
+.. method:: Future.set_running_or_notify_cancel()
+
+ This method should only be called by :class:`Executor` implementations before
+ executing the work associated with the :class:`Future` and by unit tests.
+
+ If the method returns `False` then the :class:`Future` was cancelled i.e.
+ :meth:`Future.cancel` was called and returned `True`. Any threads waiting
+ on the :class:`Future` completing (i.e. through :func:`as_completed` or
+ :func:`wait`) will be woken up.
+
+ If the method returns `True` then the :class:`Future` was not cancelled
+ and has been put in the running state i.e. calls to
+ :meth:`Future.running` will return `True`.
+
+ This method can only be called once and cannot be called after
+ :meth:`Future.set_result` or :meth:`Future.set_exception` have been
+ called.
+
+.. method:: Future.set_result(result)
+
+ Sets the result of the work associated with the :class:`Future` to *result*.
+
+ This method should only be used by Executor implementations and unit tests.
+
+.. method:: Future.set_exception(exception)
+
+ Sets the result of the work associated with the :class:`Future` to the
+ :class:`Exception` *exception*.
+
+ This method should only be used by Executor implementations and unit tests.
+
+Module Functions
+----------------
+
+.. function:: wait(fs, timeout=None, return_when=ALL_COMPLETED)
+
+ Wait for the :class:`Future` instances (possibly created by different
+ :class:`Executor` instances) given by *fs* to complete. Returns a named
+ 2-tuple of sets. The first set, named "done", contains the futures that
+ completed (finished or were cancelled) before the wait completed. The second
+ set, named "not_done", contains uncompleted futures.
+
+ *timeout* can be used to control the maximum number of seconds to wait before
+ returning. *timeout* can be an int or float. If *timeout* is not specified or
+ ``None`` then there is no limit to the wait time.
+
+ *return_when* indicates when this function should return. It must be one of
+ the following constants:
+
+ +-----------------------------+----------------------------------------+
+ | Constant | Description |
+ +=============================+========================================+
+ | :const:`FIRST_COMPLETED` | The function will return when any |
+ | | future finishes or is cancelled. |
+ +-----------------------------+----------------------------------------+
+ | :const:`FIRST_EXCEPTION` | The function will return when any |
+ | | future finishes by raising an |
+ | | exception. If no future raises an |
+ | | exception then it is equivalent to |
+ | | `ALL_COMPLETED`. |
+ +-----------------------------+----------------------------------------+
+ | :const:`ALL_COMPLETED` | The function will return when all |
+ | | futures finish or are cancelled. |
+ +-----------------------------+----------------------------------------+
+
+.. function:: as_completed(fs, timeout=None)
+
+ Returns an iterator over the :class:`Future` instances (possibly created by
+ different :class:`Executor` instances) given by *fs* that yields futures as
+ they complete (finished or were cancelled). Any futures given by *fs* that
+ are duplicated will be returned once. Any futures that completed
+ before :func:`as_completed` is called will be yielded first. The returned
+ iterator raises a :exc:`TimeoutError` if :meth:`~iterator.__next__` is
+ called and the result isn't available after *timeout* seconds from the
+ original call to :func:`as_completed`. *timeout* can be an int or float.
+ If *timeout* is not specified or ``None``, there is no limit to the wait
+ time.
diff --git a/third_party/python/futures/docs/make.bat b/third_party/python/futures/docs/make.bat
new file mode 100644
index 0000000000..3e8021b56e
--- /dev/null
+++ b/third_party/python/futures/docs/make.bat
@@ -0,0 +1,112 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+set SPHINXBUILD=sphinx-build
+set ALLSPHINXOPTS=-d _build/doctrees %SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+ :help
+ echo.Please use `make ^<target^>` where ^<target^> is one of
+ echo. html to make standalone HTML files
+ echo. dirhtml to make HTML files named index.html in directories
+ echo. pickle to make pickle files
+ echo. json to make JSON files
+ echo. htmlhelp to make HTML files and a HTML help project
+ echo. qthelp to make HTML files and a qthelp project
+ echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+ echo. changes to make an overview over all changed/added/deprecated items
+ echo. linkcheck to check all external links for integrity
+ echo. doctest to run all doctests embedded in the documentation if enabled
+ goto end
+)
+
+if "%1" == "clean" (
+ for /d %%i in (_build\*) do rmdir /q /s %%i
+ del /q /s _build\*
+ goto end
+)
+
+if "%1" == "html" (
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% _build/html
+ echo.
+ echo.Build finished. The HTML pages are in _build/html.
+ goto end
+)
+
+if "%1" == "dirhtml" (
+ %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% _build/dirhtml
+ echo.
+ echo.Build finished. The HTML pages are in _build/dirhtml.
+ goto end
+)
+
+if "%1" == "pickle" (
+ %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% _build/pickle
+ echo.
+ echo.Build finished; now you can process the pickle files.
+ goto end
+)
+
+if "%1" == "json" (
+ %SPHINXBUILD% -b json %ALLSPHINXOPTS% _build/json
+ echo.
+ echo.Build finished; now you can process the JSON files.
+ goto end
+)
+
+if "%1" == "htmlhelp" (
+ %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% _build/htmlhelp
+ echo.
+ echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in _build/htmlhelp.
+ goto end
+)
+
+if "%1" == "qthelp" (
+ %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% _build/qthelp
+ echo.
+ echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in _build/qthelp, like this:
+ echo.^> qcollectiongenerator _build\qthelp\futures.qhcp
+ echo.To view the help file:
+ echo.^> assistant -collectionFile _build\qthelp\futures.ghc
+ goto end
+)
+
+if "%1" == "latex" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% _build/latex
+ echo.
+ echo.Build finished; the LaTeX files are in _build/latex.
+ goto end
+)
+
+if "%1" == "changes" (
+ %SPHINXBUILD% -b changes %ALLSPHINXOPTS% _build/changes
+ echo.
+ echo.The overview file is in _build/changes.
+ goto end
+)
+
+if "%1" == "linkcheck" (
+ %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% _build/linkcheck
+ echo.
+ echo.Link check complete; look for any errors in the above output ^
+or in _build/linkcheck/output.txt.
+ goto end
+)
+
+if "%1" == "doctest" (
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% _build/doctest
+ echo.
+ echo.Testing of doctests in the sources finished, look at the ^
+results in _build/doctest/output.txt.
+ goto end
+)
+
+:end
diff --git a/third_party/python/futures/futures.egg-info/PKG-INFO b/third_party/python/futures/futures.egg-info/PKG-INFO
new file mode 100644
index 0000000000..00e3a99767
--- /dev/null
+++ b/third_party/python/futures/futures.egg-info/PKG-INFO
@@ -0,0 +1,16 @@
+Metadata-Version: 1.0
+Name: futures
+Version: 3.0.5
+Summary: Backport of the concurrent.futures package from Python 3.2
+Home-page: https://github.com/agronholm/pythonfutures
+Author: Alex Gronholm
+Author-email: alex.gronholm+pypi@nextday.fi
+License: BSD
+Description: UNKNOWN
+Platform: UNKNOWN
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 2 :: Only
diff --git a/third_party/python/futures/futures.egg-info/SOURCES.txt b/third_party/python/futures/futures.egg-info/SOURCES.txt
new file mode 100644
index 0000000000..31d473be10
--- /dev/null
+++ b/third_party/python/futures/futures.egg-info/SOURCES.txt
@@ -0,0 +1,24 @@
+CHANGES
+LICENSE
+MANIFEST.in
+crawl.py
+primes.py
+setup.cfg
+setup.py
+test_futures.py
+tox.ini
+concurrent/__init__.py
+concurrent/futures/__init__.py
+concurrent/futures/_base.py
+concurrent/futures/process.py
+concurrent/futures/thread.py
+docs/Makefile
+docs/conf.py
+docs/index.rst
+docs/make.bat
+futures.egg-info/PKG-INFO
+futures.egg-info/SOURCES.txt
+futures.egg-info/dependency_links.txt
+futures.egg-info/not-zip-safe
+futures.egg-info/pbr.json
+futures.egg-info/top_level.txt \ No newline at end of file
diff --git a/third_party/python/futures/futures.egg-info/dependency_links.txt b/third_party/python/futures/futures.egg-info/dependency_links.txt
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/third_party/python/futures/futures.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/third_party/python/futures/futures.egg-info/not-zip-safe b/third_party/python/futures/futures.egg-info/not-zip-safe
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/third_party/python/futures/futures.egg-info/not-zip-safe
@@ -0,0 +1 @@
+
diff --git a/third_party/python/futures/futures.egg-info/pbr.json b/third_party/python/futures/futures.egg-info/pbr.json
new file mode 100644
index 0000000000..6937c11cb5
--- /dev/null
+++ b/third_party/python/futures/futures.egg-info/pbr.json
@@ -0,0 +1 @@
+{"is_release": false, "git_version": "6532a74"} \ No newline at end of file
diff --git a/third_party/python/futures/futures.egg-info/top_level.txt b/third_party/python/futures/futures.egg-info/top_level.txt
new file mode 100644
index 0000000000..e4d7bdcbdb
--- /dev/null
+++ b/third_party/python/futures/futures.egg-info/top_level.txt
@@ -0,0 +1 @@
+concurrent
diff --git a/third_party/python/futures/primes.py b/third_party/python/futures/primes.py
new file mode 100644
index 0000000000..0da2b3e64c
--- /dev/null
+++ b/third_party/python/futures/primes.py
@@ -0,0 +1,50 @@
+from __future__ import with_statement
+import math
+import time
+import sys
+
+from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor
+
+PRIMES = [
+ 112272535095293,
+ 112582705942171,
+ 112272535095293,
+ 115280095190773,
+ 115797848077099,
+ 117450548693743,
+ 993960000099397]
+
+def is_prime(n):
+ if n % 2 == 0:
+ return False
+
+ sqrt_n = int(math.floor(math.sqrt(n)))
+ for i in range(3, sqrt_n + 1, 2):
+ if n % i == 0:
+ return False
+ return True
+
+def sequential():
+ return list(map(is_prime, PRIMES))
+
+def with_process_pool_executor():
+ with ProcessPoolExecutor(10) as executor:
+ return list(executor.map(is_prime, PRIMES))
+
+def with_thread_pool_executor():
+ with ThreadPoolExecutor(10) as executor:
+ return list(executor.map(is_prime, PRIMES))
+
+def main():
+ for name, fn in [('sequential', sequential),
+ ('processes', with_process_pool_executor),
+ ('threads', with_thread_pool_executor)]:
+ sys.stdout.write('%s: ' % name.ljust(12))
+ start = time.time()
+ if fn() != [True] * len(PRIMES):
+ sys.stdout.write('failed\n')
+ else:
+ sys.stdout.write('%.2f seconds\n' % (time.time() - start))
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/python/futures/setup.cfg b/third_party/python/futures/setup.cfg
new file mode 100644
index 0000000000..e04dbabe38
--- /dev/null
+++ b/third_party/python/futures/setup.cfg
@@ -0,0 +1,12 @@
+[build_sphinx]
+build-dir = build/sphinx
+source-dir = docs
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
+[upload_docs]
+upload-dir = build/sphinx/html
+
diff --git a/third_party/python/futures/setup.py b/third_party/python/futures/setup.py
new file mode 100755
index 0000000000..6961f35306
--- /dev/null
+++ b/third_party/python/futures/setup.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+from warnings import warn
+import sys
+
+if sys.version_info[0] > 2:
+ warn('This backport is meant only for Python 2.\n'
+ 'Python 3 users do not need it, as the concurrent.futures '
+ 'package is available in the standard library.')
+
+extras = {}
+try:
+ from setuptools import setup
+ extras['zip_safe'] = False
+except ImportError:
+ from distutils.core import setup
+
+setup(name='futures',
+ version='3.0.5',
+ description='Backport of the concurrent.futures package from Python 3.2',
+ author='Brian Quinlan',
+ author_email='brian@sweetapp.com',
+ maintainer='Alex Gronholm',
+ maintainer_email='alex.gronholm+pypi@nextday.fi',
+ url='https://github.com/agronholm/pythonfutures',
+ packages=['concurrent', 'concurrent.futures'],
+ license='BSD',
+ classifiers=['License :: OSI Approved :: BSD License',
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 2 :: Only'],
+ **extras
+ )
diff --git a/third_party/python/futures/test_futures.py b/third_party/python/futures/test_futures.py
new file mode 100644
index 0000000000..301cd0a934
--- /dev/null
+++ b/third_party/python/futures/test_futures.py
@@ -0,0 +1,727 @@
+import os
+import subprocess
+import sys
+import threading
+import functools
+import contextlib
+import logging
+import re
+import time
+import gc
+from StringIO import StringIO
+from test import test_support
+
+from concurrent import futures
+from concurrent.futures._base import (
+ PENDING, RUNNING, CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED, Future)
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+
+def reap_threads(func):
+ """Use this function when threads are being used. This will
+ ensure that the threads are cleaned up even when the test fails.
+ If threading is unavailable this function does nothing.
+ """
+ @functools.wraps(func)
+ def decorator(*args):
+ key = test_support.threading_setup()
+ try:
+ return func(*args)
+ finally:
+ test_support.threading_cleanup(*key)
+ return decorator
+
+
+# Executing the interpreter in a subprocess
+def _assert_python(expected_success, *args, **env_vars):
+ cmd_line = [sys.executable]
+ if not env_vars:
+ cmd_line.append('-E')
+ # Need to preserve the original environment, for in-place testing of
+ # shared library builds.
+ env = os.environ.copy()
+ # But a special flag that can be set to override -- in this case, the
+ # caller is responsible to pass the full environment.
+ if env_vars.pop('__cleanenv', None):
+ env = {}
+ env.update(env_vars)
+ cmd_line.extend(args)
+ p = subprocess.Popen(cmd_line, stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ env=env)
+ try:
+ out, err = p.communicate()
+ finally:
+ subprocess._cleanup()
+ p.stdout.close()
+ p.stderr.close()
+ rc = p.returncode
+ err = strip_python_stderr(err)
+ if (rc and expected_success) or (not rc and not expected_success):
+ raise AssertionError(
+ "Process return code is %d, "
+ "stderr follows:\n%s" % (rc, err.decode('ascii', 'ignore')))
+ return rc, out, err
+
+
+def assert_python_ok(*args, **env_vars):
+ """
+ Assert that running the interpreter with `args` and optional environment
+ variables `env_vars` is ok and return a (return code, stdout, stderr) tuple.
+ """
+ return _assert_python(True, *args, **env_vars)
+
+
+def strip_python_stderr(stderr):
+ """Strip the stderr of a Python process from potential debug output
+ emitted by the interpreter.
+
+ This will typically be run on the result of the communicate() method
+ of a subprocess.Popen object.
+ """
+ stderr = re.sub(r"\[\d+ refs\]\r?\n?$".encode(), "".encode(), stderr).strip()
+ return stderr
+
+
+@contextlib.contextmanager
+def captured_stderr():
+ """Return a context manager used by captured_stdout/stdin/stderr
+ that temporarily replaces the sys stream *stream_name* with a StringIO."""
+ logging_stream = StringIO()
+ handler = logging.StreamHandler(logging_stream)
+ logging.root.addHandler(handler)
+
+ try:
+ yield logging_stream
+ finally:
+ logging.root.removeHandler(handler)
+
+
+def create_future(state=PENDING, exception=None, result=None):
+ f = Future()
+ f._state = state
+ f._exception = exception
+ f._result = result
+ return f
+
+
+PENDING_FUTURE = create_future(state=PENDING)
+RUNNING_FUTURE = create_future(state=RUNNING)
+CANCELLED_FUTURE = create_future(state=CANCELLED)
+CANCELLED_AND_NOTIFIED_FUTURE = create_future(state=CANCELLED_AND_NOTIFIED)
+EXCEPTION_FUTURE = create_future(state=FINISHED, exception=IOError())
+SUCCESSFUL_FUTURE = create_future(state=FINISHED, result=42)
+
+
+def mul(x, y):
+ return x * y
+
+
+def sleep_and_raise(t):
+ time.sleep(t)
+ raise Exception('this is an exception')
+
+def sleep_and_print(t, msg):
+ time.sleep(t)
+ print(msg)
+ sys.stdout.flush()
+
+
+class ExecutorMixin:
+ worker_count = 5
+
+ def setUp(self):
+ self.t1 = time.time()
+ try:
+ self.executor = self.executor_type(max_workers=self.worker_count)
+ except NotImplementedError:
+ e = sys.exc_info()[1]
+ self.skipTest(str(e))
+ self._prime_executor()
+
+ def tearDown(self):
+ self.executor.shutdown(wait=True)
+ dt = time.time() - self.t1
+ if test_support.verbose:
+ print("%.2fs" % dt)
+ self.assertLess(dt, 60, "synchronization issue: test lasted too long")
+
+ def _prime_executor(self):
+ # Make sure that the executor is ready to do work before running the
+ # tests. This should reduce the probability of timeouts in the tests.
+ futures = [self.executor.submit(time.sleep, 0.1)
+ for _ in range(self.worker_count)]
+
+ for f in futures:
+ f.result()
+
+
+class ThreadPoolMixin(ExecutorMixin):
+ executor_type = futures.ThreadPoolExecutor
+
+
+class ProcessPoolMixin(ExecutorMixin):
+ executor_type = futures.ProcessPoolExecutor
+
+
+class ExecutorShutdownTest(unittest.TestCase):
+ def test_run_after_shutdown(self):
+ self.executor.shutdown()
+ self.assertRaises(RuntimeError,
+ self.executor.submit,
+ pow, 2, 5)
+
+ def test_interpreter_shutdown(self):
+ # Test the atexit hook for shutdown of worker threads and processes
+ rc, out, err = assert_python_ok('-c', """if 1:
+ from concurrent.futures import %s
+ from time import sleep
+ from test_futures import sleep_and_print
+ t = %s(5)
+ t.submit(sleep_and_print, 1.0, "apple")
+ """ % (self.executor_type.__name__, self.executor_type.__name__))
+ # Errors in atexit hooks don't change the process exit code, check
+ # stderr manually.
+ self.assertFalse(err)
+ self.assertEqual(out.strip(), "apple".encode())
+
+ def test_hang_issue12364(self):
+ fs = [self.executor.submit(time.sleep, 0.1) for _ in range(50)]
+ self.executor.shutdown()
+ for f in fs:
+ f.result()
+
+
+class ThreadPoolShutdownTest(ThreadPoolMixin, ExecutorShutdownTest):
+ def _prime_executor(self):
+ pass
+
+ def test_threads_terminate(self):
+ self.executor.submit(mul, 21, 2)
+ self.executor.submit(mul, 6, 7)
+ self.executor.submit(mul, 3, 14)
+ self.assertEqual(len(self.executor._threads), 3)
+ self.executor.shutdown()
+ for t in self.executor._threads:
+ t.join()
+
+ def test_context_manager_shutdown(self):
+ with futures.ThreadPoolExecutor(max_workers=5) as e:
+ executor = e
+ self.assertEqual(list(e.map(abs, range(-5, 5))),
+ [5, 4, 3, 2, 1, 0, 1, 2, 3, 4])
+
+ for t in executor._threads:
+ t.join()
+
+ def test_del_shutdown(self):
+ executor = futures.ThreadPoolExecutor(max_workers=5)
+ executor.map(abs, range(-5, 5))
+ threads = executor._threads
+ del executor
+ gc.collect()
+
+ for t in threads:
+ t.join()
+
+
+class ProcessPoolShutdownTest(ProcessPoolMixin, ExecutorShutdownTest):
+ def _prime_executor(self):
+ pass
+
+ def test_processes_terminate(self):
+ self.executor.submit(mul, 21, 2)
+ self.executor.submit(mul, 6, 7)
+ self.executor.submit(mul, 3, 14)
+ self.assertEqual(len(self.executor._processes), 5)
+ processes = self.executor._processes
+ self.executor.shutdown()
+
+ for p in processes:
+ p.join()
+
+ def test_context_manager_shutdown(self):
+ with futures.ProcessPoolExecutor(max_workers=5) as e:
+ processes = e._processes
+ self.assertEqual(list(e.map(abs, range(-5, 5))),
+ [5, 4, 3, 2, 1, 0, 1, 2, 3, 4])
+
+ for p in processes:
+ p.join()
+
+ def test_del_shutdown(self):
+ executor = futures.ProcessPoolExecutor(max_workers=5)
+ list(executor.map(abs, range(-5, 5)))
+ queue_management_thread = executor._queue_management_thread
+ processes = executor._processes
+ del executor
+ gc.collect()
+
+ queue_management_thread.join()
+ for p in processes:
+ p.join()
+
+
+class WaitTests(unittest.TestCase):
+
+ def test_first_completed(self):
+ future1 = self.executor.submit(mul, 21, 2)
+ future2 = self.executor.submit(time.sleep, 1.5)
+
+ done, not_done = futures.wait(
+ [CANCELLED_FUTURE, future1, future2],
+ return_when=futures.FIRST_COMPLETED)
+
+ self.assertEqual(set([future1]), done)
+ self.assertEqual(set([CANCELLED_FUTURE, future2]), not_done)
+
+ def test_first_completed_some_already_completed(self):
+ future1 = self.executor.submit(time.sleep, 1.5)
+
+ finished, pending = futures.wait(
+ [CANCELLED_AND_NOTIFIED_FUTURE, SUCCESSFUL_FUTURE, future1],
+ return_when=futures.FIRST_COMPLETED)
+
+ self.assertEqual(
+ set([CANCELLED_AND_NOTIFIED_FUTURE, SUCCESSFUL_FUTURE]),
+ finished)
+ self.assertEqual(set([future1]), pending)
+
+ def test_first_exception(self):
+ future1 = self.executor.submit(mul, 2, 21)
+ future2 = self.executor.submit(sleep_and_raise, 1.5)
+ future3 = self.executor.submit(time.sleep, 3)
+
+ finished, pending = futures.wait(
+ [future1, future2, future3],
+ return_when=futures.FIRST_EXCEPTION)
+
+ self.assertEqual(set([future1, future2]), finished)
+ self.assertEqual(set([future3]), pending)
+
+ def test_first_exception_some_already_complete(self):
+ future1 = self.executor.submit(divmod, 21, 0)
+ future2 = self.executor.submit(time.sleep, 1.5)
+
+ finished, pending = futures.wait(
+ [SUCCESSFUL_FUTURE,
+ CANCELLED_FUTURE,
+ CANCELLED_AND_NOTIFIED_FUTURE,
+ future1, future2],
+ return_when=futures.FIRST_EXCEPTION)
+
+ self.assertEqual(set([SUCCESSFUL_FUTURE,
+ CANCELLED_AND_NOTIFIED_FUTURE,
+ future1]), finished)
+ self.assertEqual(set([CANCELLED_FUTURE, future2]), pending)
+
+ def test_first_exception_one_already_failed(self):
+ future1 = self.executor.submit(time.sleep, 2)
+
+ finished, pending = futures.wait(
+ [EXCEPTION_FUTURE, future1],
+ return_when=futures.FIRST_EXCEPTION)
+
+ self.assertEqual(set([EXCEPTION_FUTURE]), finished)
+ self.assertEqual(set([future1]), pending)
+
+ def test_all_completed(self):
+ future1 = self.executor.submit(divmod, 2, 0)
+ future2 = self.executor.submit(mul, 2, 21)
+
+ finished, pending = futures.wait(
+ [SUCCESSFUL_FUTURE,
+ CANCELLED_AND_NOTIFIED_FUTURE,
+ EXCEPTION_FUTURE,
+ future1,
+ future2],
+ return_when=futures.ALL_COMPLETED)
+
+ self.assertEqual(set([SUCCESSFUL_FUTURE,
+ CANCELLED_AND_NOTIFIED_FUTURE,
+ EXCEPTION_FUTURE,
+ future1,
+ future2]), finished)
+ self.assertEqual(set(), pending)
+
+ def test_timeout(self):
+ future1 = self.executor.submit(mul, 6, 7)
+ future2 = self.executor.submit(time.sleep, 3)
+
+ finished, pending = futures.wait(
+ [CANCELLED_AND_NOTIFIED_FUTURE,
+ EXCEPTION_FUTURE,
+ SUCCESSFUL_FUTURE,
+ future1, future2],
+ timeout=1.5,
+ return_when=futures.ALL_COMPLETED)
+
+ self.assertEqual(set([CANCELLED_AND_NOTIFIED_FUTURE,
+ EXCEPTION_FUTURE,
+ SUCCESSFUL_FUTURE,
+ future1]), finished)
+ self.assertEqual(set([future2]), pending)
+
+
+class ThreadPoolWaitTests(ThreadPoolMixin, WaitTests):
+
+ def test_pending_calls_race(self):
+ # Issue #14406: multi-threaded race condition when waiting on all
+ # futures.
+ event = threading.Event()
+ def future_func():
+ event.wait()
+ oldswitchinterval = sys.getcheckinterval()
+ sys.setcheckinterval(1)
+ try:
+ fs = set(self.executor.submit(future_func) for i in range(100))
+ event.set()
+ futures.wait(fs, return_when=futures.ALL_COMPLETED)
+ finally:
+ sys.setcheckinterval(oldswitchinterval)
+
+
+class ProcessPoolWaitTests(ProcessPoolMixin, WaitTests):
+ pass
+
+
+class AsCompletedTests(unittest.TestCase):
+ # TODO(brian@sweetapp.com): Should have a test with a non-zero timeout.
+ def test_no_timeout(self):
+ future1 = self.executor.submit(mul, 2, 21)
+ future2 = self.executor.submit(mul, 7, 6)
+
+ completed = set(futures.as_completed(
+ [CANCELLED_AND_NOTIFIED_FUTURE,
+ EXCEPTION_FUTURE,
+ SUCCESSFUL_FUTURE,
+ future1, future2]))
+ self.assertEqual(set(
+ [CANCELLED_AND_NOTIFIED_FUTURE,
+ EXCEPTION_FUTURE,
+ SUCCESSFUL_FUTURE,
+ future1, future2]),
+ completed)
+
+ def test_zero_timeout(self):
+ future1 = self.executor.submit(time.sleep, 2)
+ completed_futures = set()
+ try:
+ for future in futures.as_completed(
+ [CANCELLED_AND_NOTIFIED_FUTURE,
+ EXCEPTION_FUTURE,
+ SUCCESSFUL_FUTURE,
+ future1],
+ timeout=0):
+ completed_futures.add(future)
+ except futures.TimeoutError:
+ pass
+
+ self.assertEqual(set([CANCELLED_AND_NOTIFIED_FUTURE,
+ EXCEPTION_FUTURE,
+ SUCCESSFUL_FUTURE]),
+ completed_futures)
+
+ def test_duplicate_futures(self):
+ # Issue 20367. Duplicate futures should not raise exceptions or give
+ # duplicate responses.
+ future1 = self.executor.submit(time.sleep, 2)
+ completed = [f for f in futures.as_completed([future1,future1])]
+ self.assertEqual(len(completed), 1)
+
+
+class ThreadPoolAsCompletedTests(ThreadPoolMixin, AsCompletedTests):
+ pass
+
+
+class ProcessPoolAsCompletedTests(ProcessPoolMixin, AsCompletedTests):
+ pass
+
+
+class ExecutorTest(unittest.TestCase):
+ # Executor.shutdown() and context manager usage is tested by
+ # ExecutorShutdownTest.
+ def test_submit(self):
+ future = self.executor.submit(pow, 2, 8)
+ self.assertEqual(256, future.result())
+
+ def test_submit_keyword(self):
+ future = self.executor.submit(mul, 2, y=8)
+ self.assertEqual(16, future.result())
+
+ def test_map(self):
+ self.assertEqual(
+ list(self.executor.map(pow, range(10), range(10))),
+ list(map(pow, range(10), range(10))))
+
+ def test_map_exception(self):
+ i = self.executor.map(divmod, [1, 1, 1, 1], [2, 3, 0, 5])
+ self.assertEqual(next(i), (0, 1))
+ self.assertEqual(next(i), (0, 1))
+ self.assertRaises(ZeroDivisionError, next, i)
+
+ def test_map_timeout(self):
+ results = []
+ try:
+ for i in self.executor.map(time.sleep,
+ [0, 0, 3],
+ timeout=1.5):
+ results.append(i)
+ except futures.TimeoutError:
+ pass
+ else:
+ self.fail('expected TimeoutError')
+
+ self.assertEqual([None, None], results)
+
+
+class ThreadPoolExecutorTest(ThreadPoolMixin, ExecutorTest):
+ def test_map_submits_without_iteration(self):
+ """Tests verifying issue 11777."""
+ finished = []
+ def record_finished(n):
+ finished.append(n)
+
+ self.executor.map(record_finished, range(10))
+ self.executor.shutdown(wait=True)
+ self.assertEqual(len(finished), 10)
+
+
+class ProcessPoolExecutorTest(ProcessPoolMixin, ExecutorTest):
+ pass
+
+
+class FutureTests(unittest.TestCase):
+ def test_done_callback_with_result(self):
+ callback_result = [None]
+ def fn(callback_future):
+ callback_result[0] = callback_future.result()
+
+ f = Future()
+ f.add_done_callback(fn)
+ f.set_result(5)
+ self.assertEqual(5, callback_result[0])
+
+ def test_done_callback_with_exception(self):
+ callback_exception = [None]
+ def fn(callback_future):
+ callback_exception[0] = callback_future.exception()
+
+ f = Future()
+ f.add_done_callback(fn)
+ f.set_exception(Exception('test'))
+ self.assertEqual(('test',), callback_exception[0].args)
+
+ def test_done_callback_with_cancel(self):
+ was_cancelled = [None]
+ def fn(callback_future):
+ was_cancelled[0] = callback_future.cancelled()
+
+ f = Future()
+ f.add_done_callback(fn)
+ self.assertTrue(f.cancel())
+ self.assertTrue(was_cancelled[0])
+
+ def test_done_callback_raises(self):
+ with captured_stderr() as stderr:
+ raising_was_called = [False]
+ fn_was_called = [False]
+
+ def raising_fn(callback_future):
+ raising_was_called[0] = True
+ raise Exception('doh!')
+
+ def fn(callback_future):
+ fn_was_called[0] = True
+
+ f = Future()
+ f.add_done_callback(raising_fn)
+ f.add_done_callback(fn)
+ f.set_result(5)
+ self.assertTrue(raising_was_called)
+ self.assertTrue(fn_was_called)
+ self.assertIn('Exception: doh!', stderr.getvalue())
+
+ def test_done_callback_already_successful(self):
+ callback_result = [None]
+ def fn(callback_future):
+ callback_result[0] = callback_future.result()
+
+ f = Future()
+ f.set_result(5)
+ f.add_done_callback(fn)
+ self.assertEqual(5, callback_result[0])
+
+ def test_done_callback_already_failed(self):
+ callback_exception = [None]
+ def fn(callback_future):
+ callback_exception[0] = callback_future.exception()
+
+ f = Future()
+ f.set_exception(Exception('test'))
+ f.add_done_callback(fn)
+ self.assertEqual(('test',), callback_exception[0].args)
+
+ def test_done_callback_already_cancelled(self):
+ was_cancelled = [None]
+ def fn(callback_future):
+ was_cancelled[0] = callback_future.cancelled()
+
+ f = Future()
+ self.assertTrue(f.cancel())
+ f.add_done_callback(fn)
+ self.assertTrue(was_cancelled[0])
+
+ def test_repr(self):
+ self.assertRegexpMatches(repr(PENDING_FUTURE),
+ '<Future at 0x[0-9a-f]+L? state=pending>')
+ self.assertRegexpMatches(repr(RUNNING_FUTURE),
+ '<Future at 0x[0-9a-f]+L? state=running>')
+ self.assertRegexpMatches(repr(CANCELLED_FUTURE),
+ '<Future at 0x[0-9a-f]+L? state=cancelled>')
+ self.assertRegexpMatches(repr(CANCELLED_AND_NOTIFIED_FUTURE),
+ '<Future at 0x[0-9a-f]+L? state=cancelled>')
+ self.assertRegexpMatches(
+ repr(EXCEPTION_FUTURE),
+ '<Future at 0x[0-9a-f]+L? state=finished raised IOError>')
+ self.assertRegexpMatches(
+ repr(SUCCESSFUL_FUTURE),
+ '<Future at 0x[0-9a-f]+L? state=finished returned int>')
+
+ def test_cancel(self):
+ f1 = create_future(state=PENDING)
+ f2 = create_future(state=RUNNING)
+ f3 = create_future(state=CANCELLED)
+ f4 = create_future(state=CANCELLED_AND_NOTIFIED)
+ f5 = create_future(state=FINISHED, exception=IOError())
+ f6 = create_future(state=FINISHED, result=5)
+
+ self.assertTrue(f1.cancel())
+ self.assertEqual(f1._state, CANCELLED)
+
+ self.assertFalse(f2.cancel())
+ self.assertEqual(f2._state, RUNNING)
+
+ self.assertTrue(f3.cancel())
+ self.assertEqual(f3._state, CANCELLED)
+
+ self.assertTrue(f4.cancel())
+ self.assertEqual(f4._state, CANCELLED_AND_NOTIFIED)
+
+ self.assertFalse(f5.cancel())
+ self.assertEqual(f5._state, FINISHED)
+
+ self.assertFalse(f6.cancel())
+ self.assertEqual(f6._state, FINISHED)
+
+ def test_cancelled(self):
+ self.assertFalse(PENDING_FUTURE.cancelled())
+ self.assertFalse(RUNNING_FUTURE.cancelled())
+ self.assertTrue(CANCELLED_FUTURE.cancelled())
+ self.assertTrue(CANCELLED_AND_NOTIFIED_FUTURE.cancelled())
+ self.assertFalse(EXCEPTION_FUTURE.cancelled())
+ self.assertFalse(SUCCESSFUL_FUTURE.cancelled())
+
+ def test_done(self):
+ self.assertFalse(PENDING_FUTURE.done())
+ self.assertFalse(RUNNING_FUTURE.done())
+ self.assertTrue(CANCELLED_FUTURE.done())
+ self.assertTrue(CANCELLED_AND_NOTIFIED_FUTURE.done())
+ self.assertTrue(EXCEPTION_FUTURE.done())
+ self.assertTrue(SUCCESSFUL_FUTURE.done())
+
+ def test_running(self):
+ self.assertFalse(PENDING_FUTURE.running())
+ self.assertTrue(RUNNING_FUTURE.running())
+ self.assertFalse(CANCELLED_FUTURE.running())
+ self.assertFalse(CANCELLED_AND_NOTIFIED_FUTURE.running())
+ self.assertFalse(EXCEPTION_FUTURE.running())
+ self.assertFalse(SUCCESSFUL_FUTURE.running())
+
+ def test_result_with_timeout(self):
+ self.assertRaises(futures.TimeoutError,
+ PENDING_FUTURE.result, timeout=0)
+ self.assertRaises(futures.TimeoutError,
+ RUNNING_FUTURE.result, timeout=0)
+ self.assertRaises(futures.CancelledError,
+ CANCELLED_FUTURE.result, timeout=0)
+ self.assertRaises(futures.CancelledError,
+ CANCELLED_AND_NOTIFIED_FUTURE.result, timeout=0)
+ self.assertRaises(IOError, EXCEPTION_FUTURE.result, timeout=0)
+ self.assertEqual(SUCCESSFUL_FUTURE.result(timeout=0), 42)
+
+ def test_result_with_success(self):
+ # TODO(brian@sweetapp.com): This test is timing dependant.
+ def notification():
+ # Wait until the main thread is waiting for the result.
+ time.sleep(1)
+ f1.set_result(42)
+
+ f1 = create_future(state=PENDING)
+ t = threading.Thread(target=notification)
+ t.start()
+
+ self.assertEqual(f1.result(timeout=5), 42)
+
+ def test_result_with_cancel(self):
+ # TODO(brian@sweetapp.com): This test is timing dependant.
+ def notification():
+ # Wait until the main thread is waiting for the result.
+ time.sleep(1)
+ f1.cancel()
+
+ f1 = create_future(state=PENDING)
+ t = threading.Thread(target=notification)
+ t.start()
+
+ self.assertRaises(futures.CancelledError, f1.result, timeout=5)
+
+ def test_exception_with_timeout(self):
+ self.assertRaises(futures.TimeoutError,
+ PENDING_FUTURE.exception, timeout=0)
+ self.assertRaises(futures.TimeoutError,
+ RUNNING_FUTURE.exception, timeout=0)
+ self.assertRaises(futures.CancelledError,
+ CANCELLED_FUTURE.exception, timeout=0)
+ self.assertRaises(futures.CancelledError,
+ CANCELLED_AND_NOTIFIED_FUTURE.exception, timeout=0)
+ self.assertTrue(isinstance(EXCEPTION_FUTURE.exception(timeout=0),
+ IOError))
+ self.assertEqual(SUCCESSFUL_FUTURE.exception(timeout=0), None)
+
+ def test_exception_with_success(self):
+ def notification():
+ # Wait until the main thread is waiting for the exception.
+ time.sleep(1)
+ with f1._condition:
+ f1._state = FINISHED
+ f1._exception = IOError()
+ f1._condition.notify_all()
+
+ f1 = create_future(state=PENDING)
+ t = threading.Thread(target=notification)
+ t.start()
+
+ self.assertTrue(isinstance(f1.exception(timeout=5), IOError))
+
+@reap_threads
+def test_main():
+ try:
+ test_support.run_unittest(ProcessPoolExecutorTest,
+ ThreadPoolExecutorTest,
+ ProcessPoolWaitTests,
+ ThreadPoolWaitTests,
+ ProcessPoolAsCompletedTests,
+ ThreadPoolAsCompletedTests,
+ FutureTests,
+ ProcessPoolShutdownTest,
+ ThreadPoolShutdownTest)
+ finally:
+ test_support.reap_children()
+
+if __name__ == "__main__":
+ test_main()
diff --git a/third_party/python/futures/tox.ini b/third_party/python/futures/tox.ini
new file mode 100644
index 0000000000..2b35e7ddb6
--- /dev/null
+++ b/third_party/python/futures/tox.ini
@@ -0,0 +1,8 @@
+[tox]
+envlist = py26,py27,pypy,jython
+
+[testenv]
+commands={envpython} test_futures.py []
+
+[testenv:py26]
+deps=unittest2
diff --git a/third_party/python/glean_parser/.circleci/config.yml b/third_party/python/glean_parser/.circleci/config.yml
new file mode 100644
index 0000000000..8cd7f41930
--- /dev/null
+++ b/third_party/python/glean_parser/.circleci/config.yml
@@ -0,0 +1,201 @@
+version: 2.1
+
+commands:
+ test-start:
+ steps:
+ - checkout
+ - run:
+ name: environment
+ command: |
+ echo 'export PATH=.:$HOME/.local/bin:$PATH' >> $BASH_ENV
+ - run:
+ name: Upgrade pip
+ command: |
+ pip install --upgrade --user pip
+
+ test-min-requirements:
+ steps:
+ - run:
+ name: install minimum requirements
+ command: |
+ # Use requirements-builder to determine the minimum versions of
+ # all requirements and test those
+ # We install requirements-builder itself into its own venv, since
+ # otherwise its dependencies might install newer versions of
+ # glean_parser's dependencies.
+ python3 -m venv .rb
+ .rb/bin/pip install requirements-builder
+ .rb/bin/requirements-builder --level=min setup.py > min_requirements.txt
+
+ pip install --use-feature=2020-resolver --progress-bar off --user -U -r min_requirements.txt
+
+ test-python-version:
+ parameters:
+ requirements-file:
+ type: string
+ default: "requirements_dev.txt"
+ steps:
+ - run:
+ name: install
+ command: |
+ pip install --use-feature=2020-resolver --progress-bar off --user -U -r <<parameters.requirements-file>>
+ sudo apt update -q
+ sudo apt upgrade -q
+ sudo apt install openjdk-11-jdk-headless
+ make install-kotlin-linters
+ - run:
+ name: lint
+ command: make lint
+ - run:
+ name: install
+ # Set CC to something that isn't a working compiler so we
+ # can detect if any of the dependencies require a compiler
+ # to be installed. We can't count on a working compiler
+ # being available to pip on all of the platforms we need to
+ # support, so we need to make sure the dependencies are all
+ # pure Python or provide pre-built wheels.
+ command: CC=broken_compiler python setup.py install --user
+ - run:
+ name: test
+ command: make test
+
+jobs:
+ build-36:
+ docker:
+ - image: circleci/python:3.6.12
+ steps:
+ - test-start
+ - test-python-version
+
+ build-36-min:
+ docker:
+ - image: circleci/python:3.6.12
+ steps:
+ - test-start
+ - test-min-requirements
+ - test-python-version
+
+ build-37:
+ docker:
+ - image: circleci/python:3.7.9
+ steps:
+ - test-start
+ - test-python-version
+ - run:
+ name: make-docs
+ command: |
+ make docs
+ touch docs/_build/html/.nojekyll
+ - persist_to_workspace:
+ root: docs/_build
+ paths: html
+
+ build-38:
+ docker:
+ - image: circleci/python:3.8.5
+ steps:
+ - test-start
+ - test-python-version
+
+ build-38-min:
+ docker:
+ - image: circleci/python:3.8.5
+ steps:
+ - test-start
+ - test-min-requirements
+ - test-python-version
+
+ build-39:
+ docker:
+ - image: circleci/python:3.9.0rc1
+ steps:
+ - test-start
+ - test-python-version
+
+ docs-deploy:
+ docker:
+ - image: node:8.10.0
+ steps:
+ - checkout
+ - add_ssh_keys:
+ fingerprints:
+ - "9b:25:aa:bf:39:b6:4a:e7:c3:52:cf:ab:23:81:3d:52"
+ - attach_workspace:
+ at: docs/_build
+ - run:
+ name: install
+ command: |
+ npm install -g --silent gh-pages@2.0.1
+ git config user.email "glean-ci@nowhere.com"
+ git config user.name "glean-ci"
+ - run:
+ name: deploy
+ command: |
+ gh-pages --dotfiles --message "[ci skip] updates" --dist docs/_build/html
+
+ pypi-deploy:
+ docker:
+ - image: circleci/python:3.7.5
+ steps:
+ - checkout
+ - run:
+ name: environment
+ command: |
+ echo 'export PATH=.:$HOME/.local/bin:$PATH' >> $BASH_ENV
+ - run:
+ name: Upgrade pip
+ command: |
+ pip install --upgrade --user pip
+ - run:
+ name: install
+ command: |
+ pip install --use-feature=2020-resolver --user -U -r requirements_dev.txt
+ - run:
+ name: deploy
+ # Requires that the TWINE_USERNAME and TWINE_PASSWORD environment
+ # variables are configured in CircleCI's environment variables.
+ command: |
+ make release
+
+workflows:
+ version: 2
+ build:
+ jobs:
+ - build-36:
+ filters:
+ tags:
+ only: /.*/
+ - build-36-min:
+ filters:
+ tags:
+ only: /.*/
+ - build-37:
+ filters:
+ tags:
+ only: /.*/
+ - build-38:
+ filters:
+ tags:
+ only: /.*/
+ - build-38-min:
+ filters:
+ tags:
+ only: /.*/
+ - build-39:
+ filters:
+ tags:
+ only: /.*/
+ - docs-deploy:
+ requires:
+ - build-37
+ filters:
+ branches:
+ only: main
+ - pypi-deploy:
+ requires:
+ - build-37
+ filters:
+ branches:
+ ignore: /.*/
+ tags:
+ only: /v[0-9]+(\.[0-9]+)*/
diff --git a/third_party/python/glean_parser/.editorconfig b/third_party/python/glean_parser/.editorconfig
new file mode 100644
index 0000000000..d4a2c4405e
--- /dev/null
+++ b/third_party/python/glean_parser/.editorconfig
@@ -0,0 +1,21 @@
+# http://editorconfig.org
+
+root = true
+
+[*]
+indent_style = space
+indent_size = 4
+trim_trailing_whitespace = true
+insert_final_newline = true
+charset = utf-8
+end_of_line = lf
+
+[*.bat]
+indent_style = tab
+end_of_line = crlf
+
+[LICENSE]
+insert_final_newline = false
+
+[Makefile]
+indent_style = tab
diff --git a/third_party/python/glean_parser/.flake8 b/third_party/python/glean_parser/.flake8
new file mode 100644
index 0000000000..2bcd70e390
--- /dev/null
+++ b/third_party/python/glean_parser/.flake8
@@ -0,0 +1,2 @@
+[flake8]
+max-line-length = 88
diff --git a/third_party/python/glean_parser/.github/ISSUE_TEMPLATE.md b/third_party/python/glean_parser/.github/ISSUE_TEMPLATE.md
new file mode 100644
index 0000000000..ac2c091d74
--- /dev/null
+++ b/third_party/python/glean_parser/.github/ISSUE_TEMPLATE.md
@@ -0,0 +1,15 @@
+* Glean Parser version:
+* Python version:
+* Operating System:
+
+### Description
+
+Describe what you were trying to get done.
+Tell us what happened, what went wrong, and what you expected to happen.
+
+### What I Did
+
+```
+Paste the command(s) you ran and the output.
+If there was a crash, please include the traceback here.
+```
diff --git a/third_party/python/glean_parser/.github/dependabot.yml b/third_party/python/glean_parser/.github/dependabot.yml
new file mode 100644
index 0000000000..b38df29f46
--- /dev/null
+++ b/third_party/python/glean_parser/.github/dependabot.yml
@@ -0,0 +1,6 @@
+version: 2
+updates:
+ - package-ecosystem: "pip"
+ directory: "/"
+ schedule:
+ interval: "daily"
diff --git a/third_party/python/glean_parser/.github/pull_request_template.md b/third_party/python/glean_parser/.github/pull_request_template.md
new file mode 100644
index 0000000000..0a47d59fdf
--- /dev/null
+++ b/third_party/python/glean_parser/.github/pull_request_template.md
@@ -0,0 +1,8 @@
+### Pull Request checklist ###
+<!-- Before submitting the PR, please address each item -->
+- [ ] **Quality**: This PR builds and tests run cleanly
+ - `make test` runs without emitting any warnings
+ - `make lint` runs without emitting any errors
+- [ ] **Tests**: This PR includes thorough tests or an explanation of why it does not
+- [ ] **Changelog**: This PR includes a changelog entry to `HISTORY.rst` or an explanation of why it does not need one
+ - Any breaking changes to language binding APIs are noted explicitly
diff --git a/third_party/python/glean_parser/.gitignore b/third_party/python/glean_parser/.gitignore
new file mode 100644
index 0000000000..120a8343f1
--- /dev/null
+++ b/third_party/python/glean_parser/.gitignore
@@ -0,0 +1,110 @@
+docs/glean_parser.rst
+docs/modules.rst
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# pyenv
+.python-version
+
+# celery beat schedule file
+celerybeat-schedule
+
+# SageMath parsed files
+*.sage.py
+
+# dotenv
+.env
+
+# virtualenv
+.venv
+venv/
+ENV/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+
+.vscode/
+
+detekt*.jar
+ktlint
diff --git a/third_party/python/glean_parser/.swiftlint.yml b/third_party/python/glean_parser/.swiftlint.yml
new file mode 100644
index 0000000000..fa7b621a5d
--- /dev/null
+++ b/third_party/python/glean_parser/.swiftlint.yml
@@ -0,0 +1,6 @@
+identifier_name:
+ # Turn off it complaining about `id` or `let t = title`, etc, but keep
+ # warnings around e.g. enum names.
+ min_length:
+ warning: 0
+ error: 0
diff --git a/third_party/python/glean_parser/AUTHORS.rst b/third_party/python/glean_parser/AUTHORS.rst
new file mode 100644
index 0000000000..8ea4597f29
--- /dev/null
+++ b/third_party/python/glean_parser/AUTHORS.rst
@@ -0,0 +1,23 @@
+=======
+Credits
+=======
+
+Development Lead
+----------------
+
+* Michael Droettboom <mdroettboom@mozilla.com>
+* Alessio Placitelli <aplacitelli@mozilla.com>
+
+Contributors
+------------
+
+* Frank Bertsch <frank@mozilla.com>
+* Travis Long <tlong@mozilla.com>
+
+Acknowledgements
+----------------
+
+This package was created with Cookiecutter_ and the `audreyr/cookiecutter-pypackage`_ project template.
+
+.. _Cookiecutter: https://github.com/audreyr/cookiecutter
+.. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage
diff --git a/third_party/python/glean_parser/CODE_OF_CONDUCT.md b/third_party/python/glean_parser/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000000..498baa3fb0
--- /dev/null
+++ b/third_party/python/glean_parser/CODE_OF_CONDUCT.md
@@ -0,0 +1,15 @@
+# Community Participation Guidelines
+
+This repository is governed by Mozilla's code of conduct and etiquette guidelines.
+For more details, please read the
+[Mozilla Community Participation Guidelines](https://www.mozilla.org/about/governance/policies/participation/).
+
+## How to Report
+For more information on how to report violations of the Community Participation Guidelines, please read our '[How to Report](https://www.mozilla.org/about/governance/policies/participation/reporting/)' page.
+
+<!--
+## Project Specific Etiquette
+
+In some cases, there will be additional project etiquette i.e.: (https://bugzilla.mozilla.org/page.cgi?id=etiquette.html).
+Please update for your project.
+-->
diff --git a/third_party/python/glean_parser/CONTRIBUTING.rst b/third_party/python/glean_parser/CONTRIBUTING.rst
new file mode 100644
index 0000000000..28a1f95fea
--- /dev/null
+++ b/third_party/python/glean_parser/CONTRIBUTING.rst
@@ -0,0 +1,160 @@
+.. highlight:: shell
+
+.. _bugzilla: https://bugzilla.mozilla.org/enter_bug.cgi?assigned_to=nobody%40mozilla.org&bug_ignored=0&bug_severity=normal&bug_status=NEW&cf_fission_milestone=---&cf_fx_iteration=---&cf_fx_points=---&cf_status_firefox65=---&cf_status_firefox66=---&cf_status_firefox67=---&cf_status_firefox_esr60=---&cf_status_thunderbird_esr60=---&cf_tracking_firefox65=---&cf_tracking_firefox66=---&cf_tracking_firefox67=---&cf_tracking_firefox_esr60=---&cf_tracking_firefox_relnote=---&cf_tracking_thunderbird_esr60=---&product=Data%20Platform%20and%20Tools&component=Glean%3A%20SDK&contenttypemethod=list&contenttypeselection=text%2Fplain&defined_groups=1&flag_type-203=X&flag_type-37=X&flag_type-41=X&flag_type-607=X&flag_type-721=X&flag_type-737=X&flag_type-787=X&flag_type-799=X&flag_type-800=X&flag_type-803=X&flag_type-835=X&flag_type-846=X&flag_type-855=X&flag_type-864=X&flag_type-916=X&flag_type-929=X&flag_type-930=X&flag_type-935=X&flag_type-936=X&flag_type-937=X&form_name=enter_bug&maketemplate=Remember%20values%20as%20bookmarkable%20template&op_sys=Unspecified&priority=P3&&rep_platform=Unspecified&status_whiteboard=%5Btelemetry%3Aglean-rs%3Am%3F%5D&target_milestone=---&version=unspecified
+
+
+============
+Contributing
+============
+
+Contributions are welcome, and they are greatly appreciated! Every little bit
+helps, and credit will always be given.
+
+You can contribute in many ways:
+
+Types of Contributions
+----------------------
+
+Report Bugs
+~~~~~~~~~~~
+
+Report bugs at bugzilla_.
+
+If you are reporting a bug, please include:
+
+* Your operating system name and version.
+* Any details about your local setup that might be helpful in troubleshooting.
+* Detailed steps to reproduce the bug.
+
+Fix Bugs
+~~~~~~~~
+
+Look through the GitHub issues for bugs. Anything tagged with "bug" and "help
+wanted" is open to whoever wants to implement it.
+
+Implement Features
+~~~~~~~~~~~~~~~~~~
+
+Look through the GitHub issues for features. Anything tagged with "enhancement"
+and "help wanted" is open to whoever wants to implement it.
+
+Write Documentation
+~~~~~~~~~~~~~~~~~~~
+
+Glean Parser could always use more documentation, whether as part of the
+official Glean Parser docs, in docstrings, or even on the web in blog posts,
+articles, and such.
+
+Submit Feedback
+~~~~~~~~~~~~~~~
+
+The best way to send feedback is to file an issue at TODO
+
+If you are proposing a feature:
+
+* Explain in detail how it would work.
+* Keep the scope as narrow as possible, to make it easier to implement.
+* Remember that this is a volunteer-driven project, and that contributions
+ are welcome :)
+
+Get Started!
+------------
+
+Ready to contribute? Here's how to set up `glean_parser` for local development.
+
+1. Fork the `glean_parser` repo on GitHub.
+2. Clone your fork locally::
+
+ $ git clone git@github.com:your_name_here/glean_parser.git
+
+3. Install your local copy into a virtualenv. Assuming you have
+ virtualenvwrapper installed, this is how you set up your fork for local
+ development::
+
+ $ mkvirtualenv glean_parser
+ $ cd glean_parser/
+ $ python setup.py develop
+
+4. Create a branch for local development::
+
+ $ git checkout -b name-of-your-bugfix-or-feature
+
+ Now you can make your changes locally.
+
+5. To test your changes to `glean_parser`:
+
+ Install the testing dependencies::
+
+ $ pip install -r requirements_dev.txt
+
+ Optionally, if you want to ensure that the generated Kotlin code lints correctly, install a Java SDK, and then run::
+
+ $ make install-kotlin-linters
+
+ Then make sure that all lints and tests are passing::
+
+ $ make lint
+ $ make test
+
+6. Commit your changes and push your branch to GitHub::
+
+ $ git add .
+ $ git commit -m "Your detailed description of your changes."
+ $ git push origin name-of-your-bugfix-or-feature
+
+7. Submit a pull request through the GitHub website.
+
+Pull Request Guidelines
+-----------------------
+
+Before you submit a pull request, check that it meets these guidelines:
+
+1. The pull request should include tests.
+2. If the pull request adds functionality, the docs should be updated. Put
+ your new functionality into a function with a docstring, and add the
+ feature to the list in README.rst.
+3. The pull request should work for Python 3.6, 3.7 and 3.8 (The CI system will take care of testing all of these Python versions).
+4. The pull request should update the changelog in `HISTORY.rst`.
+
+Tips
+----
+
+To run a subset of tests::
+
+$ py.test tests.test_glean_parser
+
+
+Deploying
+---------
+
+A reminder for the maintainers on how to deploy.
+
+Get a clean main branch with all of the changes from `upstream`::
+
+ $ git checkout main
+ $ git fetch upstream
+ $ git rebase upstream/main
+
+- Update the header with the new version and date in HISTORY.rst.
+
+- (By using the setuptools-scm package, there is no need to update the version anywhere else).
+
+- Make sure all your changes are committed.
+
+- Push the changes upstream. (Normally pushing directly without review is frowned upon, but the `main` branch is protected from force pushes and release tagging requires the same permissions as pushing to `main`)::
+
+ $ git push upstream main
+
+- Wait for [continuous integration to pass](https://circleci.com/gh/mozilla/glean/tree/main) on main.
+
+- Make the release on GitHub using [this link](https://github.com/mozilla/glean_parser/releases/new)
+
+- Both the tag and the release title should be in the form `vX.Y.Z`.
+
+- Copy and paste the relevant part of the `HISTORY.rst` file into the description.
+
+- Tagging the release will trigger a CI workflow which will build the distribution of `glean_parser` and publish it to PyPI.
+
+The continuous integration system will then automatically deploy to PyPI.
+
+See also the [instructions for updating the version of `glean_parser` used by the Glean SDK](https://mozilla.github.io/glean/book/dev/upgrading-glean-parser.html).
diff --git a/third_party/python/glean_parser/HISTORY.rst b/third_party/python/glean_parser/HISTORY.rst
new file mode 100644
index 0000000000..48630541b0
--- /dev/null
+++ b/third_party/python/glean_parser/HISTORY.rst
@@ -0,0 +1,415 @@
+=======
+History
+=======
+
+Unreleased
+----------
+
+1.29.0 (2020-10-07)
+-------------------
+
+* **Breaking change:** `glean_parser` will now return an error code when any of the input files do not exist (unless the `--allow-missing-files` flag is passed).
+* Generated code now includes a comment next to each metric containing the name of the metric in its original `snake_case` form.
+* When metrics don't provide a `unit` parameter, it is not included in the output (as provided by probe-scraper).
+
+1.28.6 (2020-09-24)
+-------------------
+
+* BUGFIX: Ensure Kotlin arguments are deterministically ordered
+
+1.28.5 (2020-09-14)
+-------------------
+
+* Fix deploy step to update pip before deploying to pypi.
+
+1.28.4 (2020-09-14)
+-------------------
+
+* The `SUPERFLUOUS_NO_LINT` warning has been removed from the glinter.
+ It likely did more harm than good, and makes it hard to make
+ `metrics.yaml` files that pass across different versions of `glean_parser`.
+* Expired metrics will now produce a linter warning, `EXPIRED_METRIC`.
+* Expiry dates that are more than 730 days (~2 years) in the future will produce a linter warning,
+ `EXPIRATION_DATE_TOO_FAR`.
+* Allow using the Quantity metric type outside of Gecko.
+* New parser configs `custom_is_expired` and `custom_validate_expires` added.
+ These are both functions that take the `expires` value of the metric and return a bool.
+ (See `Metric.is_expired` and `Metric.validate_expires`).
+ These will allow FOG to provide custom validation for its version-based `expires` values.
+
+1.28.3 (2020-07-28)
+-------------------
+
+* BUGFIX: Support HashSet and Dictionary in the C# generated code.
+
+1.28.2 (2020-07-28)
+-------------------
+
+* BUGFIX: Generate valid C# code when using Labeled metric types.
+
+1.28.1 (2020-07-24)
+-------------------
+
+* BUGFIX: Add missing column to correctly render markdown tables in generated documentation.
+
+1.28.0 (2020-07-23)
+-------------------
+
+* **Breaking change:** The internal ping `deletion-request` was misnamed in pings.py causing the linter to not allow use of the correctly named ping for adding legacy ids to. Consuming apps will need to update their metrics.yaml if they are using `deletion_request` in any `send_in_pings` to `deletion-request` after updating.
+
+1.27.0 (2020-07-21)
+-------------------
+
+* Rename the `data_category` field to `data_sensitivity` to be clearer.
+
+1.26.0 (2020-07-21)
+-------------------
+
+* Add support for JWE metric types.
+* Add a `data_sensitivity` field to all metrics for specifying the type of data collected in the field.
+
+1.25.0 (2020-07-17)
+-------------------
+
+* Add support for generating C# code.
+* BUGFIX: The memory unit is now correctly passed to the MemoryDistribution
+ metric type in Swift.
+
+1.24.0 (2020-06-30)
+-------------------
+
+* BUGFIX: look for metrics in send_if_empty pings. Metrics for these kinds of pings were being ignored.
+
+1.23.0 (2020-06-27)
+-------------------
+
+* Support for Python 3.5 has been dropped.
+* BUGFIX: The ordering of event extra keys will now match with their enum, fixing a serious bug where keys of extras may not match the correct values in the data payload. See https://bugzilla.mozilla.org/show_bug.cgi?id=1648768.
+
+1.22.0 (2020-05-28)
+-------------------
+
+* **Breaking change:** (Swift only) Combine all metrics and pings into a single generated file `Metrics.swift`.
+
+1.21.0 (2020-05-25)
+-------------------
+
+* `glinter` messages have been improved with more details and to be more
+ actionable.
+* A maximum of 10 `extra_keys` is now enforced for `event` metric types.
+* BUGFIX: the `Lifetime` enum values now match the values of the implementation in mozilla/glean.
+
+1.20.4 (2020-05-07)
+-------------------
+
+* BUGFIX: yamllint errors are now reported using the correct file name.
+
+1.20.3 (2020-05-06)
+-------------------
+
+* Support for using `timing_distribution`'s `time_unit` parameter to control the range of acceptable values is documented. The default unit for this use case is `nanosecond` to avoid creating a breaking change. See [bug 1630997](https://bugzilla.mozilla.org/show_bug.cgi?id=1630997) for more information.
+
+1.20.2 (2020-04-24)
+-------------------
+
+* Dependencies that depend on the version of Python being used are now specified using the `Declaring platform specific dependencies syntax in setuptools <https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-platform-specific-dependencies>`__. This means that more recent versions of dependencies are likely to be installed on Python 3.6 and later, and unnecessary backport libraries won't be installed on more recent Python versions.
+
+1.20.1 (2020-04-21)
+-------------------
+
+* The minimum version of the runtime dependencies has been lowered to increase compatibility with other tools. These minimum versions are now tested in CI, in addition to testing the latest versions of the dependencies that was already happening in CI.
+
+1.20.0 (2020-04-15)
+-------------------
+
+* **Breaking change:** glinter errors found during the `translate` command will now return an error code. glinter warnings will be displayed, but not return an error code.
+* `glean_parser` now produces a linter warning when `user` lifetime metrics are
+ set to expire. See [bug 1604854](https://bugzilla.mozilla.org/show_bug.cgi?id=1604854)
+ for additional context.
+
+1.19.0 (2020-03-18)
+-------------------
+
+* **Breaking change:** The regular expression used to validate labels is
+ stricter and more correct.
+* Add more information about pings to markdown documentation:
+ * State whether the ping includes client id;
+ * Add list of data review links;
+ * Add list of related bugs links.
+* `glean_parser` now makes it easier to write external translation functions for
+ different language targets.
+* BUGFIX: glean_parser now works on 32-bit Windows.
+
+1.18.3 (2020-02-24)
+-------------------
+
+* Dropped the 'inflection' dependency.
+* Constrained the 'zipp' and 'MarkupSafe' transitive dependencies to versions that
+ support Python 3.5.
+
+1.18.2 (2020-02-14)
+-------------------
+
+* BUGFIX: Fix rendering of first element of reason list.
+
+1.18.1 (2020-02-14)
+-------------------
+
+* BUGFIX: Reason codes are displayed in markdown output for built-in pings as
+ well.
+* BUGFIX: Reason descriptions are indented correctly in markdown output.
+* BUGFIX: To avoid a compiler error, the @JvmName annotation isn't added to
+ private members.
+
+1.18.0 (2020-02-13)
+-------------------
+
+* **Breaking Change (Java API)** Have the metrics names in Java match the names in Kotlin.
+ See [Bug 1588060](https://bugzilla.mozilla.org/show_bug.cgi?id=1588060).
+* The reasons a ping are sent are now included in the generated markdown documentation.
+
+1.17.3 (2020-02-05)
+-------------------
+
+* BUGFIX: The version of Jinja2 now specifies < 3.0, since that version no
+ longer supports Python 3.5.
+
+1.17.2 (2020-02-05)
+-------------------
+
+* BUGFIX: Fixes an import error in generated Kotlin code.
+
+1.17.1 (2020-02-05)
+-------------------
+
+* BUGFIX: Generated Swift code now includes `import Glean`, unless generating
+ for a Glean-internal build.
+
+1.17.0 (2020-02-03)
+-------------------
+
+* Remove default schema URL from `validate_ping`
+* Make `schema` argument required for CLI
+* BUGFIX: Avoid default import in Swift code for Glean itself
+* BUGFIX: Restore order of fields in generated Swift code
+
+1.16.0 (2020-01-15)
+-------------------
+
+* Support for `reason` codes on pings was added.
+
+1.15.6 (2020-02-06)
+-------------------
+
+* BUGFIX: The version of Jinja2 now specifies < 3.0, since that version no
+ longer supports Python 3.5 (backported from 1.17.3).
+
+1.15.5 (2019-12-19)
+-------------------
+
+* BUGFIX: Also allow the legacy name `all_pings` for `send_in_pings` parameter on metrics
+
+1.15.4 (2019-12-19)
+-------------------
+
+* BUGFIX: Also allow the legacy name `all_pings`
+
+1.15.3 (2019-12-13)
+-------------------
+
+* Add project title to markdown template.
+* Remove "Sorry about that" from markdown template.
+* BUGFIX: Replace dashes in variable names to force proper naming
+
+1.15.2 (2019-12-12)
+-------------------
+
+* BUGFIX: Use a pure Python library for iso8601 so there is no compilation required.
+
+1.15.1 (2019-12-12)
+-------------------
+
+* BUGFIX: Add some additional ping names to the non-kebab-case allow list.
+
+1.15.0 (2019-12-12)
+-------------------
+
+* Restrict new pings names to be kebab-case and change `all_pings` to `all-pings`
+
+1.14.0 (2019-12-06)
+-------------------
+
+* glean_parser now supports Python versions 3.5, 3.6, 3.7 and 3.8.
+
+1.13.0 (2019-12-04)
+-------------------
+
+* The `translate` command will no longer clear extra files in the output directory.
+* BUGFIX: Ensure all newlines in comments are prefixed with comment markers
+* BUGFIX: Escape Swift keywords in variable names in generated code
+* Generate documentation for pings that are sent if empty
+
+1.12.0 (2019-11-27)
+-------------------
+
+* Reserve the `deletion_request` ping name
+* Added a new flag `send_if_empty` for pings
+
+1.11.0 (2019-11-13)
+-------------------
+
+* The `glinter` command now performs `yamllint` validation on registry files.
+
+1.10.0 (2019-11-11)
+-------------------
+
+* The Kotlin linter `detekt` is now run during CI, and for local
+ testing if installed.
+
+* Python 3.8 is now tested in CI (in addition to Python 3.7).
+ Using `tox` for this doesn't work in modern versions of CircleCI, so
+ the `tox` configuration has been removed.
+
+* `yamllint` has been added to test the YAML files on CI.
+
+* ⚠ Metric types that don't yet have implementations in glean-core have been
+ removed. This includes `enumeration`, `rate`, `usage`, and `use_counter`, as
+ well as many labeled metrics that don't exist.
+
+1.9.5 (2019-10-22)
+------------------
+
+* Allow a Swift lint for generated code
+
+* New lint: Restrict what metric can go into the 'baseline' ping
+
+* New lint: Warn for slight misspellings in ping names
+
+* BUGFIX: change Labeled types labels from lists to sets.
+
+1.9.4 (2019-10-16)
+------------------
+
+* Use lists instead of sets in Labeled types labels to ensure that
+ the order of the labels passed to the `metrics.yaml` is kept.
+
+* `glinter` will now check for duplicate labels and error if there are any.
+
+1.9.3 (2019-10-09)
+------------------
+
+* Add labels from Labeled types to the Extra column in the Markdown template.
+
+1.9.2 (2019-10-08)
+------------------
+
+* BUGFIX: Don't call `is_internal_metric` on `Ping` objects.
+
+1.9.1 (2019-10-07)
+------------------
+
+* Don't include Glean internal metrics in the generated markdown.
+
+1.9.0 (2019-10-04)
+------------------
+
+* Glinter now warns when bug numbers (rather than URLs) are used.
+
+* BUGFIX: add `HistogramType` and `MemoryUnit` imports in Kotlin generated code.
+
+1.8.4 (2019-10-02)
+------------------
+
+* Removed unsupported labeled metric types.
+
+1.8.3 (2019-10-02)
+------------------
+
+* Fix indentation for generated Swift code
+
+1.8.2 (2019-10-01)
+------------------
+
+* Created labeled metrics and events in Swift code and wrap it in a configured namespace
+
+1.8.1 (2019-09-27)
+------------------
+
+* BUGFIX: `memory_unit` is now passed to the Kotlin generator.
+
+1.8.0 (2019-09-26)
+------------------
+
+* A new parser config, `do_not_disable_expired`, was added to turn off the
+ feature that expired metrics are automatically disabled. This is useful if you
+ want to retain the disabled value that is explicitly in the `metrics.yaml`
+ file.
+
+* `glinter` will now report about superfluous `no_lint` entries.
+
+1.7.0 (2019-09-24)
+------------------
+
+* A "`glinter`" tool is now included to find common mistakes in metric naming and setup.
+ This check is run during `translate` and warnings will be displayed.
+ ⚠ These warnings will be treated as errors in a future revision.
+
+1.6.1 (2019-09-17)
+------------------
+
+* BUGFIX: `GleanGeckoMetricsMapping` must include `LabeledMetricType` and `CounterMetricType`.
+
+1.6.0 (2019-09-17)
+------------------
+
+* NEW: Support for outputting metrics in Swift.
+
+* BUGFIX: Provides a helpful error message when `geckoview_datapoint` is used on an metric type that doesn't support GeckoView exfiltration.
+
+* Generate a lookup table for Gecko categorical histograms in `GleanGeckoMetricsMapping`.
+
+* Introduce a 'Swift' output generator.
+
+1.4.1 (2019-08-28)
+------------------
+
+* Documentation only.
+
+1.4.0 (2019-08-27)
+------------------
+
+* Added support for generating markdown documentation from `metrics.yaml` files.
+
+1.3.0 (2019-08-22)
+------------------
+
+* `quantity` metric type has been added.
+
+1.2.1 (2019-08-13)
+------------------
+
+* BUGFIX: `includeClientId` was not being output for PingType.
+
+1.2.0 (2019-08-13)
+------------------
+
+* `memory_distribution` metric type has been added.
+
+* `custom_distribution` metric type has been added.
+
+* `labeled_timespan` is no longer an allowed metric type.
+
+1.1.0 (2019-08-05)
+------------------
+
+* Add a special `all_pings` value to `send_in_pings`.
+
+1.0.0 (2019-07-29)
+------------------
+
+* First release to start following strict semver.
+
+0.1.0 (2018-10-15)
+------------------
+
+* First release on PyPI.
diff --git a/third_party/python/glean_parser/LICENSE b/third_party/python/glean_parser/LICENSE
new file mode 100644
index 0000000000..a612ad9813
--- /dev/null
+++ b/third_party/python/glean_parser/LICENSE
@@ -0,0 +1,373 @@
+Mozilla Public License Version 2.0
+==================================
+
+1. Definitions
+--------------
+
+1.1. "Contributor"
+ means each individual or legal entity that creates, contributes to
+ the creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+ means the combination of the Contributions of others (if any) used
+ by a Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+ means Source Code Form to which the initial Contributor has attached
+ the notice in Exhibit A, the Executable Form of such Source Code
+ Form, and Modifications of such Source Code Form, in each case
+ including portions thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ (a) that the initial Contributor has attached the notice described
+ in Exhibit B to the Covered Software; or
+
+ (b) that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the
+ terms of a Secondary License.
+
+1.6. "Executable Form"
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+ means a work that combines Covered Software with other material, in
+ a separate file or files, that is not Covered Software.
+
+1.8. "License"
+ means this document.
+
+1.9. "Licensable"
+ means having the right to grant, to the maximum extent possible,
+ whether at the time of the initial grant or subsequently, any and
+ all of the rights conveyed by this License.
+
+1.10. "Modifications"
+ means any of the following:
+
+ (a) any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered
+ Software; or
+
+ (b) any new file in Source Code Form that contains any Covered
+ Software.
+
+1.11. "Patent Claims" of a Contributor
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the
+ License, by the making, using, selling, offering for sale, having
+ made, import, or transfer of either its Contributions or its
+ Contributor Version.
+
+1.12. "Secondary License"
+ means either the GNU General Public License, Version 2.0, the GNU
+ Lesser General Public License, Version 2.1, the GNU Affero General
+ Public License, Version 3.0, or any later versions of those
+ licenses.
+
+1.13. "Source Code Form"
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that
+ controls, is controlled by, or is under common control with You. For
+ purposes of this definition, "control" means (a) the power, direct
+ or indirect, to cause the direction or management of such entity,
+ whether by contract or otherwise, or (b) ownership of more than
+ fifty percent (50%) of the outstanding shares or beneficial
+ ownership of such entity.
+
+2. License Grants and Conditions
+--------------------------------
+
+2.1. Grants
+
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license:
+
+(a) under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+(b) under Patent Claims of such Contributor to make, use, sell, offer
+ for sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+The licenses granted in Section 2.1 with respect to any Contribution
+become effective for each Contribution on the date the Contributor first
+distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+The licenses granted in this Section 2 are the only rights granted under
+this License. No additional rights or licenses will be implied from the
+distribution or licensing of Covered Software under this License.
+Notwithstanding Section 2.1(b) above, no patent license is granted by a
+Contributor:
+
+(a) for any code that a Contributor has removed from Covered Software;
+ or
+
+(b) for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+(c) under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+This License does not grant any rights in the trademarks, service marks,
+or logos of any Contributor (except as may be necessary to comply with
+the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+No Contributor makes additional grants as a result of Your choice to
+distribute the Covered Software under a subsequent version of this
+License (see Section 10.2) or under the terms of a Secondary License (if
+permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+Each Contributor represents that the Contributor believes its
+Contributions are its original creation(s) or it has sufficient rights
+to grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+This License is not intended to limit any rights You have under
+applicable copyright doctrines of fair use, fair dealing, or other
+equivalents.
+
+2.7. Conditions
+
+Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+in Section 2.1.
+
+3. Responsibilities
+-------------------
+
+3.1. Distribution of Source Form
+
+All distribution of Covered Software in Source Code Form, including any
+Modifications that You create or to which You contribute, must be under
+the terms of this License. You must inform recipients that the Source
+Code Form of the Covered Software is governed by the terms of this
+License, and how they can obtain a copy of this License. You may not
+attempt to alter or restrict the recipients' rights in the Source Code
+Form.
+
+3.2. Distribution of Executable Form
+
+If You distribute Covered Software in Executable Form then:
+
+(a) such Covered Software must also be made available in Source Code
+ Form, as described in Section 3.1, and You must inform recipients of
+ the Executable Form how they can obtain a copy of such Source Code
+ Form by reasonable means in a timely manner, at a charge no more
+ than the cost of distribution to the recipient; and
+
+(b) You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter
+ the recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+You may create and distribute a Larger Work under terms of Your choice,
+provided that You also comply with the requirements of this License for
+the Covered Software. If the Larger Work is a combination of Covered
+Software with a work governed by one or more Secondary Licenses, and the
+Covered Software is not Incompatible With Secondary Licenses, this
+License permits You to additionally distribute such Covered Software
+under the terms of such Secondary License(s), so that the recipient of
+the Larger Work may, at their option, further distribute the Covered
+Software under the terms of either this License or such Secondary
+License(s).
+
+3.4. Notices
+
+You may not remove or alter the substance of any license notices
+(including copyright notices, patent notices, disclaimers of warranty,
+or limitations of liability) contained within the Source Code Form of
+the Covered Software, except that You may alter any license notices to
+the extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+You may choose to offer, and to charge a fee for, warranty, support,
+indemnity or liability obligations to one or more recipients of Covered
+Software. However, You may do so only on Your own behalf, and not on
+behalf of any Contributor. You must make it absolutely clear that any
+such warranty, support, indemnity, or liability obligation is offered by
+You alone, and You hereby agree to indemnify every Contributor for any
+liability incurred by such Contributor as a result of warranty, support,
+indemnity or liability terms You offer. You may include additional
+disclaimers of warranty and limitations of liability specific to any
+jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+---------------------------------------------------
+
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Software due to
+statute, judicial order, or regulation then You must: (a) comply with
+the terms of this License to the maximum extent possible; and (b)
+describe the limitations and the code they affect. Such description must
+be placed in a text file included with all distributions of the Covered
+Software under this License. Except to the extent prohibited by statute
+or regulation, such description must be sufficiently detailed for a
+recipient of ordinary skill to be able to understand it.
+
+5. Termination
+--------------
+
+5.1. The rights granted under this License will terminate automatically
+if You fail to comply with any of its terms. However, if You become
+compliant, then the rights granted under this License from a particular
+Contributor are reinstated (a) provisionally, unless and until such
+Contributor explicitly and finally terminates Your grants, and (b) on an
+ongoing basis, if such Contributor fails to notify You of the
+non-compliance by some reasonable means prior to 60 days after You have
+come back into compliance. Moreover, Your grants from a particular
+Contributor are reinstated on an ongoing basis if such Contributor
+notifies You of the non-compliance by some reasonable means, this is the
+first time You have received notice of non-compliance with this License
+from such Contributor, and You become compliant prior to 30 days after
+Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+infringement claim (excluding declaratory judgment actions,
+counter-claims, and cross-claims) alleging that a Contributor Version
+directly or indirectly infringes any patent, then the rights granted to
+You by any and all Contributors for the Covered Software under Section
+2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+end user license agreements (excluding distributors and resellers) which
+have been validly granted by You or Your distributors under this License
+prior to termination shall survive termination.
+
+************************************************************************
+* *
+* 6. Disclaimer of Warranty *
+* ------------------------- *
+* *
+* Covered Software is provided under this License on an "as is" *
+* basis, without warranty of any kind, either expressed, implied, or *
+* statutory, including, without limitation, warranties that the *
+* Covered Software is free of defects, merchantable, fit for a *
+* particular purpose or non-infringing. The entire risk as to the *
+* quality and performance of the Covered Software is with You. *
+* Should any Covered Software prove defective in any respect, You *
+* (not any Contributor) assume the cost of any necessary servicing, *
+* repair, or correction. This disclaimer of warranty constitutes an *
+* essential part of this License. No use of any Covered Software is *
+* authorized under this License except under this disclaimer. *
+* *
+************************************************************************
+
+************************************************************************
+* *
+* 7. Limitation of Liability *
+* -------------------------- *
+* *
+* Under no circumstances and under no legal theory, whether tort *
+* (including negligence), contract, or otherwise, shall any *
+* Contributor, or anyone who distributes Covered Software as *
+* permitted above, be liable to You for any direct, indirect, *
+* special, incidental, or consequential damages of any character *
+* including, without limitation, damages for lost profits, loss of *
+* goodwill, work stoppage, computer failure or malfunction, or any *
+* and all other commercial damages or losses, even if such party *
+* shall have been informed of the possibility of such damages. This *
+* limitation of liability shall not apply to liability for death or *
+* personal injury resulting from such party's negligence to the *
+* extent applicable law prohibits such limitation. Some *
+* jurisdictions do not allow the exclusion or limitation of *
+* incidental or consequential damages, so this exclusion and *
+* limitation may not apply to You. *
+* *
+************************************************************************
+
+8. Litigation
+-------------
+
+Any litigation relating to this License may be brought only in the
+courts of a jurisdiction where the defendant maintains its principal
+place of business and such litigation shall be governed by laws of that
+jurisdiction, without reference to its conflict-of-law provisions.
+Nothing in this Section shall prevent a party's ability to bring
+cross-claims or counter-claims.
+
+9. Miscellaneous
+----------------
+
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision of this License is held to be
+unenforceable, such provision shall be reformed only to the extent
+necessary to make it enforceable. Any law or regulation which provides
+that the language of a contract shall be construed against the drafter
+shall not be used to construe this License against a Contributor.
+
+10. Versions of the License
+---------------------------
+
+10.1. New Versions
+
+Mozilla Foundation is the license steward. Except as provided in Section
+10.3, no one other than the license steward has the right to modify or
+publish new versions of this License. Each version will be given a
+distinguishing version number.
+
+10.2. Effect of New Versions
+
+You may distribute the Covered Software under the terms of the version
+of the License under which You originally received the Covered Software,
+or under the terms of any subsequent version published by the license
+steward.
+
+10.3. Modified Versions
+
+If you create software not governed by this License, and you want to
+create a new license for such software, you may create and use a
+modified version of this License if you rename the license and remove
+any references to the name of the license steward (except to note that
+such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+Licenses
+
+If You choose to distribute Source Code Form that is Incompatible With
+Secondary Licenses under the terms of this version of the License, the
+notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+-------------------------------------------
+
+ This Source Code Form is subject to the terms of the Mozilla Public
+ License, v. 2.0. If a copy of the MPL was not distributed with this
+ file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular
+file, then You may include the notice in a location (such as a LICENSE
+file in a relevant directory) where a recipient would be likely to look
+for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+---------------------------------------------------------
+
+ This Source Code Form is "Incompatible With Secondary Licenses", as
+ defined by the Mozilla Public License, v. 2.0.
diff --git a/third_party/python/glean_parser/MANIFEST.in b/third_party/python/glean_parser/MANIFEST.in
new file mode 100644
index 0000000000..9580ab203b
--- /dev/null
+++ b/third_party/python/glean_parser/MANIFEST.in
@@ -0,0 +1,14 @@
+include AUTHORS.rst
+include CONTRIBUTING.rst
+include HISTORY.rst
+include LICENSE
+include README.rst
+
+recursive-include tests *
+recursive-exclude * __pycache__
+recursive-exclude * *.py[co]
+
+recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif
+
+recursive-include glean_parser/schemas *.yaml
+recursive-include glean_parser/templates *
diff --git a/third_party/python/glean_parser/Makefile b/third_party/python/glean_parser/Makefile
new file mode 100644
index 0000000000..d88a0eef0a
--- /dev/null
+++ b/third_party/python/glean_parser/Makefile
@@ -0,0 +1,75 @@
+.PHONY: clean clean-test clean-pyc clean-build docs help
+
+define PRINT_HELP_PYSCRIPT
+import re, sys
+
+for line in sys.stdin:
+ match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line)
+ if match:
+ target, help = match.groups()
+ print("%-20s %s" % (target, help))
+endef
+export PRINT_HELP_PYSCRIPT
+
+help:
+ @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST)
+
+clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts
+
+clean-build: ## remove build artifacts
+ rm -fr build/
+ rm -fr dist/
+ rm -fr .eggs/
+ find . -name '*.egg-info' -exec rm -fr {} +
+ find . -name '*.egg' -exec rm -fr {} +
+
+clean-pyc: ## remove Python file artifacts
+ find . -name '*.pyc' -exec rm -f {} +
+ find . -name '*.pyo' -exec rm -f {} +
+ find . -name '*~' -exec rm -f {} +
+ find . -name '__pycache__' -exec rm -fr {} +
+
+clean-test: ## remove test and coverage artifacts
+ rm -f .coverage
+ rm -fr htmlcov/
+ rm -fr .pytest_cache
+
+lint: ## check style with flake8
+ python3 -m flake8 glean_parser tests
+ python3 -m black --check glean_parser tests setup.py
+ python3 -m yamllint glean_parser tests
+ python3 -m mypy glean_parser
+
+test: ## run tests quickly with the default Python
+ py.test
+
+coverage: ## check code coverage quickly with the default Python
+ coverage run --source glean_parser -m pytest
+ coverage report -m
+ coverage html
+
+docs: ## generate Sphinx HTML documentation, including API docs
+ rm -f docs/glean_parser.rst
+ rm -f docs/modules.rst
+ sphinx-apidoc -o docs/ glean_parser
+ $(MAKE) -C docs clean
+ $(MAKE) -C docs html
+
+servedocs: docs ## compile the docs watching for changes
+ watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D .
+
+release: dist ## package and upload a release
+ twine upload dist/*
+
+dist: clean ## builds source and wheel package
+ python setup.py sdist
+ python setup.py bdist_wheel
+ ls -l dist
+
+install: clean ## install the package to the active Python's site-packages
+ python setup.py install
+
+install-kotlin-linters: ## install ktlint and detekt for linting Kotlin output
+ curl -sSLO https://github.com/shyiko/ktlint/releases/download/0.29.0/ktlint
+ chmod a+x ktlint
+ curl -sSL --output "detekt-cli-1.1.1-all.jar" https://bintray.com/arturbosch/code-analysis/download_file?file_path=io%2Fgitlab%2Farturbosch%2Fdetekt%2Fdetekt-cli%2F1.1.1%2Fdetekt-cli-1.1.1-all.jar
diff --git a/third_party/python/glean_parser/PKG-INFO b/third_party/python/glean_parser/PKG-INFO
new file mode 100644
index 0000000000..9b1f7381bc
--- /dev/null
+++ b/third_party/python/glean_parser/PKG-INFO
@@ -0,0 +1,490 @@
+Metadata-Version: 1.1
+Name: glean_parser
+Version: 1.29.0
+Summary: Parser tools for Mozilla's Glean telemetry
+Home-page: https://github.com/mozilla/glean_parser
+Author: Michael Droettboom
+Author-email: mdroettboom@mozilla.com
+License: UNKNOWN
+Description: ============
+ Glean Parser
+ ============
+
+ Parser tools for Mozilla's Glean telemetry.
+
+ Features
+ --------
+
+ Parses the ``metrics.yaml`` files for the Glean telemetry SDK and produces
+ output for various integrations.
+
+ Documentation
+ -------------
+
+ The full documentation is available `here <https://mozilla.github.io/glean_parser/>`__.
+
+ Requirements
+ ------------
+
+ - Python 3.6 (or later)
+
+ The following library requirements are installed automatically when glean_parser
+ is installed by `pip`.
+
+ - appdirs
+ - Click
+ - diskcache
+ - Jinja2
+ - jsonschema
+ - PyYAML
+
+ Additionally on Python 3.6:
+
+ - iso8601
+
+ Usage
+ -----
+
+ .. code-block:: console
+
+ $ glean_parser --help
+
+ Read in `metrics.yaml`, translate to kotlin format, and output to `output_dir`:
+
+ .. code-block:: console
+
+ $ glean_parser translate -o output_dir -f kotlin metrics.yaml
+
+ Check a Glean ping against the ping schema:
+
+ .. code-block:: console
+
+ $ glean_parser check < ping.json
+
+
+ =======
+ History
+ =======
+
+ Unreleased
+ ----------
+
+ 1.29.0 (2020-10-07)
+ -------------------
+
+ * **Breaking change:** `glean_parser` will now return an error code when any of the input files do not exist (unless the `--allow-missing-files` flag is passed).
+ * Generated code now includes a comment next to each metric containing the name of the metric in its original `snake_case` form.
+ * When metrics don't provide a `unit` parameter, it is not included in the output (as provided by probe-scraper).
+
+ 1.28.6 (2020-09-24)
+ -------------------
+
+ * BUGFIX: Ensure Kotlin arguments are deterministically ordered
+
+ 1.28.5 (2020-09-14)
+ -------------------
+
+ * Fix deploy step to update pip before deploying to pypi.
+
+ 1.28.4 (2020-09-14)
+ -------------------
+
+ * The `SUPERFLUOUS_NO_LINT` warning has been removed from the glinter.
+ It likely did more harm than good, and makes it hard to make
+ `metrics.yaml` files that pass across different versions of `glean_parser`.
+ * Expired metrics will now produce a linter warning, `EXPIRED_METRIC`.
+ * Expiry dates that are more than 730 days (~2 years) in the future will produce a linter warning,
+ `EXPIRATION_DATE_TOO_FAR`.
+ * Allow using the Quantity metric type outside of Gecko.
+ * New parser configs `custom_is_expired` and `custom_validate_expires` added.
+ These are both functions that take the `expires` value of the metric and return a bool.
+ (See `Metric.is_expired` and `Metric.validate_expires`).
+ These will allow FOG to provide custom validation for its version-based `expires` values.
+
+ 1.28.3 (2020-07-28)
+ -------------------
+
+ * BUGFIX: Support HashSet and Dictionary in the C# generated code.
+
+ 1.28.2 (2020-07-28)
+ -------------------
+
+ * BUGFIX: Generate valid C# code when using Labeled metric types.
+
+ 1.28.1 (2020-07-24)
+ -------------------
+
+ * BUGFIX: Add missing column to correctly render markdown tables in generated documentation.
+
+ 1.28.0 (2020-07-23)
+ -------------------
+
+ * **Breaking change:** The internal ping `deletion-request` was misnamed in pings.py causing the linter to not allow use of the correctly named ping for adding legacy ids to. Consuming apps will need to update their metrics.yaml if they are using `deletion_request` in any `send_in_pings` to `deletion-request` after updating.
+
+ 1.27.0 (2020-07-21)
+ -------------------
+
+ * Rename the `data_category` field to `data_sensitivity` to be clearer.
+
+ 1.26.0 (2020-07-21)
+ -------------------
+
+ * Add support for JWE metric types.
+ * Add a `data_sensitivity` field to all metrics for specifying the type of data collected in the field.
+
+ 1.25.0 (2020-07-17)
+ -------------------
+
+ * Add support for generating C# code.
+ * BUGFIX: The memory unit is now correctly passed to the MemoryDistribution
+ metric type in Swift.
+
+ 1.24.0 (2020-06-30)
+ -------------------
+
+ * BUGFIX: look for metrics in send_if_empty pings. Metrics for these kinds of pings were being ignored.
+
+ 1.23.0 (2020-06-27)
+ -------------------
+
+ * Support for Python 3.5 has been dropped.
+ * BUGFIX: The ordering of event extra keys will now match with their enum, fixing a serious bug where keys of extras may not match the correct values in the data payload. See https://bugzilla.mozilla.org/show_bug.cgi?id=1648768.
+
+ 1.22.0 (2020-05-28)
+ -------------------
+
+ * **Breaking change:** (Swift only) Combine all metrics and pings into a single generated file `Metrics.swift`.
+
+ 1.21.0 (2020-05-25)
+ -------------------
+
+ * `glinter` messages have been improved with more details and to be more
+ actionable.
+ * A maximum of 10 `extra_keys` is now enforced for `event` metric types.
+ * BUGFIX: the `Lifetime` enum values now match the values of the implementation in mozilla/glean.
+
+ 1.20.4 (2020-05-07)
+ -------------------
+
+ * BUGFIX: yamllint errors are now reported using the correct file name.
+
+ 1.20.3 (2020-05-06)
+ -------------------
+
+ * Support for using `timing_distribution`'s `time_unit` parameter to control the range of acceptable values is documented. The default unit for this use case is `nanosecond` to avoid creating a breaking change. See [bug 1630997](https://bugzilla.mozilla.org/show_bug.cgi?id=1630997) for more information.
+
+ 1.20.2 (2020-04-24)
+ -------------------
+
+ * Dependencies that depend on the version of Python being used are now specified using the `Declaring platform specific dependencies syntax in setuptools <https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-platform-specific-dependencies>`__. This means that more recent versions of dependencies are likely to be installed on Python 3.6 and later, and unnecessary backport libraries won't be installed on more recent Python versions.
+
+ 1.20.1 (2020-04-21)
+ -------------------
+
+ * The minimum version of the runtime dependencies has been lowered to increase compatibility with other tools. These minimum versions are now tested in CI, in addition to testing the latest versions of the dependencies that was already happening in CI.
+
+ 1.20.0 (2020-04-15)
+ -------------------
+
+ * **Breaking change:** glinter errors found during the `translate` command will now return an error code. glinter warnings will be displayed, but not return an error code.
+ * `glean_parser` now produces a linter warning when `user` lifetime metrics are
+ set to expire. See [bug 1604854](https://bugzilla.mozilla.org/show_bug.cgi?id=1604854)
+ for additional context.
+
+ 1.19.0 (2020-03-18)
+ -------------------
+
+ * **Breaking change:** The regular expression used to validate labels is
+ stricter and more correct.
+ * Add more information about pings to markdown documentation:
+ * State whether the ping includes client id;
+ * Add list of data review links;
+ * Add list of related bugs links.
+ * `glean_parser` now makes it easier to write external translation functions for
+ different language targets.
+ * BUGFIX: glean_parser now works on 32-bit Windows.
+
+ 1.18.3 (2020-02-24)
+ -------------------
+
+ * Dropped the 'inflection' dependency.
+ * Constrained the 'zipp' and 'MarkupSafe' transitive dependencies to versions that
+ support Python 3.5.
+
+ 1.18.2 (2020-02-14)
+ -------------------
+
+ * BUGFIX: Fix rendering of first element of reason list.
+
+ 1.18.1 (2020-02-14)
+ -------------------
+
+ * BUGFIX: Reason codes are displayed in markdown output for built-in pings as
+ well.
+ * BUGFIX: Reason descriptions are indented correctly in markdown output.
+ * BUGFIX: To avoid a compiler error, the @JvmName annotation isn't added to
+ private members.
+
+ 1.18.0 (2020-02-13)
+ -------------------
+
+ * **Breaking Change (Java API)** Have the metrics names in Java match the names in Kotlin.
+ See [Bug 1588060](https://bugzilla.mozilla.org/show_bug.cgi?id=1588060).
+ * The reasons a ping are sent are now included in the generated markdown documentation.
+
+ 1.17.3 (2020-02-05)
+ -------------------
+
+ * BUGFIX: The version of Jinja2 now specifies < 3.0, since that version no
+ longer supports Python 3.5.
+
+ 1.17.2 (2020-02-05)
+ -------------------
+
+ * BUGFIX: Fixes an import error in generated Kotlin code.
+
+ 1.17.1 (2020-02-05)
+ -------------------
+
+ * BUGFIX: Generated Swift code now includes `import Glean`, unless generating
+ for a Glean-internal build.
+
+ 1.17.0 (2020-02-03)
+ -------------------
+
+ * Remove default schema URL from `validate_ping`
+ * Make `schema` argument required for CLI
+ * BUGFIX: Avoid default import in Swift code for Glean itself
+ * BUGFIX: Restore order of fields in generated Swift code
+
+ 1.16.0 (2020-01-15)
+ -------------------
+
+ * Support for `reason` codes on pings was added.
+
+ 1.15.6 (2020-02-06)
+ -------------------
+
+ * BUGFIX: The version of Jinja2 now specifies < 3.0, since that version no
+ longer supports Python 3.5 (backported from 1.17.3).
+
+ 1.15.5 (2019-12-19)
+ -------------------
+
+ * BUGFIX: Also allow the legacy name `all_pings` for `send_in_pings` parameter on metrics
+
+ 1.15.4 (2019-12-19)
+ -------------------
+
+ * BUGFIX: Also allow the legacy name `all_pings`
+
+ 1.15.3 (2019-12-13)
+ -------------------
+
+ * Add project title to markdown template.
+ * Remove "Sorry about that" from markdown template.
+ * BUGFIX: Replace dashes in variable names to force proper naming
+
+ 1.15.2 (2019-12-12)
+ -------------------
+
+ * BUGFIX: Use a pure Python library for iso8601 so there is no compilation required.
+
+ 1.15.1 (2019-12-12)
+ -------------------
+
+ * BUGFIX: Add some additional ping names to the non-kebab-case allow list.
+
+ 1.15.0 (2019-12-12)
+ -------------------
+
+ * Restrict new pings names to be kebab-case and change `all_pings` to `all-pings`
+
+ 1.14.0 (2019-12-06)
+ -------------------
+
+ * glean_parser now supports Python versions 3.5, 3.6, 3.7 and 3.8.
+
+ 1.13.0 (2019-12-04)
+ -------------------
+
+ * The `translate` command will no longer clear extra files in the output directory.
+ * BUGFIX: Ensure all newlines in comments are prefixed with comment markers
+ * BUGFIX: Escape Swift keywords in variable names in generated code
+ * Generate documentation for pings that are sent if empty
+
+ 1.12.0 (2019-11-27)
+ -------------------
+
+ * Reserve the `deletion_request` ping name
+ * Added a new flag `send_if_empty` for pings
+
+ 1.11.0 (2019-11-13)
+ -------------------
+
+ * The `glinter` command now performs `yamllint` validation on registry files.
+
+ 1.10.0 (2019-11-11)
+ -------------------
+
+ * The Kotlin linter `detekt` is now run during CI, and for local
+ testing if installed.
+
+ * Python 3.8 is now tested in CI (in addition to Python 3.7).
+ Using `tox` for this doesn't work in modern versions of CircleCI, so
+ the `tox` configuration has been removed.
+
+ * `yamllint` has been added to test the YAML files on CI.
+
+ * ⚠ Metric types that don't yet have implementations in glean-core have been
+ removed. This includes `enumeration`, `rate`, `usage`, and `use_counter`, as
+ well as many labeled metrics that don't exist.
+
+ 1.9.5 (2019-10-22)
+ ------------------
+
+ * Allow a Swift lint for generated code
+
+ * New lint: Restrict what metric can go into the 'baseline' ping
+
+ * New lint: Warn for slight misspellings in ping names
+
+ * BUGFIX: change Labeled types labels from lists to sets.
+
+ 1.9.4 (2019-10-16)
+ ------------------
+
+ * Use lists instead of sets in Labeled types labels to ensure that
+ the order of the labels passed to the `metrics.yaml` is kept.
+
+ * `glinter` will now check for duplicate labels and error if there are any.
+
+ 1.9.3 (2019-10-09)
+ ------------------
+
+ * Add labels from Labeled types to the Extra column in the Markdown template.
+
+ 1.9.2 (2019-10-08)
+ ------------------
+
+ * BUGFIX: Don't call `is_internal_metric` on `Ping` objects.
+
+ 1.9.1 (2019-10-07)
+ ------------------
+
+ * Don't include Glean internal metrics in the generated markdown.
+
+ 1.9.0 (2019-10-04)
+ ------------------
+
+ * Glinter now warns when bug numbers (rather than URLs) are used.
+
+ * BUGFIX: add `HistogramType` and `MemoryUnit` imports in Kotlin generated code.
+
+ 1.8.4 (2019-10-02)
+ ------------------
+
+ * Removed unsupported labeled metric types.
+
+ 1.8.3 (2019-10-02)
+ ------------------
+
+ * Fix indentation for generated Swift code
+
+ 1.8.2 (2019-10-01)
+ ------------------
+
+ * Created labeled metrics and events in Swift code and wrap it in a configured namespace
+
+ 1.8.1 (2019-09-27)
+ ------------------
+
+ * BUGFIX: `memory_unit` is now passed to the Kotlin generator.
+
+ 1.8.0 (2019-09-26)
+ ------------------
+
+ * A new parser config, `do_not_disable_expired`, was added to turn off the
+ feature that expired metrics are automatically disabled. This is useful if you
+ want to retain the disabled value that is explicitly in the `metrics.yaml`
+ file.
+
+ * `glinter` will now report about superfluous `no_lint` entries.
+
+ 1.7.0 (2019-09-24)
+ ------------------
+
+ * A "`glinter`" tool is now included to find common mistakes in metric naming and setup.
+ This check is run during `translate` and warnings will be displayed.
+ ⚠ These warnings will be treated as errors in a future revision.
+
+ 1.6.1 (2019-09-17)
+ ------------------
+
+ * BUGFIX: `GleanGeckoMetricsMapping` must include `LabeledMetricType` and `CounterMetricType`.
+
+ 1.6.0 (2019-09-17)
+ ------------------
+
+ * NEW: Support for outputting metrics in Swift.
+
+ * BUGFIX: Provides a helpful error message when `geckoview_datapoint` is used on an metric type that doesn't support GeckoView exfiltration.
+
+ * Generate a lookup table for Gecko categorical histograms in `GleanGeckoMetricsMapping`.
+
+ * Introduce a 'Swift' output generator.
+
+ 1.4.1 (2019-08-28)
+ ------------------
+
+ * Documentation only.
+
+ 1.4.0 (2019-08-27)
+ ------------------
+
+ * Added support for generating markdown documentation from `metrics.yaml` files.
+
+ 1.3.0 (2019-08-22)
+ ------------------
+
+ * `quantity` metric type has been added.
+
+ 1.2.1 (2019-08-13)
+ ------------------
+
+ * BUGFIX: `includeClientId` was not being output for PingType.
+
+ 1.2.0 (2019-08-13)
+ ------------------
+
+ * `memory_distribution` metric type has been added.
+
+ * `custom_distribution` metric type has been added.
+
+ * `labeled_timespan` is no longer an allowed metric type.
+
+ 1.1.0 (2019-08-05)
+ ------------------
+
+ * Add a special `all_pings` value to `send_in_pings`.
+
+ 1.0.0 (2019-07-29)
+ ------------------
+
+ * First release to start following strict semver.
+
+ 0.1.0 (2018-10-15)
+ ------------------
+
+ * First release on PyPI.
+
+Keywords: glean_parser
+Platform: UNKNOWN
+Classifier: Development Status :: 2 - Pre-Alpha
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
diff --git a/third_party/python/glean_parser/README.rst b/third_party/python/glean_parser/README.rst
new file mode 100644
index 0000000000..b81b892a13
--- /dev/null
+++ b/third_party/python/glean_parser/README.rst
@@ -0,0 +1,54 @@
+============
+Glean Parser
+============
+
+Parser tools for Mozilla's Glean telemetry.
+
+Features
+--------
+
+Parses the ``metrics.yaml`` files for the Glean telemetry SDK and produces
+output for various integrations.
+
+Documentation
+-------------
+
+The full documentation is available `here <https://mozilla.github.io/glean_parser/>`__.
+
+Requirements
+------------
+
+- Python 3.6 (or later)
+
+The following library requirements are installed automatically when glean_parser
+is installed by `pip`.
+
+- appdirs
+- Click
+- diskcache
+- Jinja2
+- jsonschema
+- PyYAML
+
+Additionally on Python 3.6:
+
+- iso8601
+
+Usage
+-----
+
+.. code-block:: console
+
+ $ glean_parser --help
+
+Read in `metrics.yaml`, translate to kotlin format, and output to `output_dir`:
+
+.. code-block:: console
+
+ $ glean_parser translate -o output_dir -f kotlin metrics.yaml
+
+Check a Glean ping against the ping schema:
+
+.. code-block:: console
+
+ $ glean_parser check < ping.json
diff --git a/third_party/python/glean_parser/glean_parser/__init__.py b/third_party/python/glean_parser/glean_parser/__init__.py
new file mode 100644
index 0000000000..22d8b82d45
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/__init__.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""Top-level package for Glean parser."""
+
+from pkg_resources import get_distribution, DistributionNotFound
+
+try:
+ __version__ = get_distribution(__name__).version
+except DistributionNotFound:
+ # package is not installed
+ pass
+
+__author__ = """Michael Droettboom"""
+__email__ = "mdroettboom@mozilla.com"
diff --git a/third_party/python/glean_parser/glean_parser/__main__.py b/third_party/python/glean_parser/glean_parser/__main__.py
new file mode 100644
index 0000000000..844fc15419
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/__main__.py
@@ -0,0 +1,169 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""Console script for glean_parser."""
+
+import io
+from pathlib import Path
+import sys
+
+import click
+
+
+import glean_parser
+
+
+from . import lint
+from . import translate as mod_translate
+from . import validate_ping
+
+
+@click.command()
+@click.argument(
+ "input",
+ type=click.Path(exists=False, dir_okay=False, file_okay=True, readable=True),
+ nargs=-1,
+)
+@click.option(
+ "--output",
+ "-o",
+ type=click.Path(dir_okay=True, file_okay=False, writable=True),
+ nargs=1,
+ required=True,
+)
+@click.option(
+ "--format", "-f", type=click.Choice(mod_translate.OUTPUTTERS.keys()), required=True
+)
+@click.option(
+ "--option",
+ "-s",
+ help="backend-specific option. Must be of the form key=value",
+ type=str,
+ multiple=True,
+ required=False,
+)
+@click.option(
+ "--allow-reserved",
+ is_flag=True,
+ help=(
+ "If provided, allow the use of reserved fields. "
+ "Should only be set when building the Glean library itself."
+ ),
+)
+@click.option(
+ "--allow-missing-files",
+ is_flag=True,
+ help=("Do not treat missing input files as an error."),
+)
+def translate(input, format, output, option, allow_reserved, allow_missing_files):
+ """
+ Translate metrics.yaml and pings.yaml files to other formats.
+ """
+ option_dict = {}
+ for opt in option:
+ key, val = opt.split("=", 1)
+ option_dict[key] = val
+
+ sys.exit(
+ mod_translate.translate(
+ [Path(x) for x in input],
+ format,
+ Path(output),
+ option_dict,
+ {
+ "allow_reserved": allow_reserved,
+ "allow_missing_files": allow_missing_files,
+ },
+ )
+ )
+
+
+@click.command()
+@click.option(
+ "--schema",
+ "-s",
+ type=str,
+ nargs=1,
+ required=True,
+ help=("HTTP url or file path to Glean ping schema. If remote, will cache to disk."),
+)
+def check(schema):
+ """
+ Validate the contents of a Glean ping.
+
+ The ping contents are read from stdin, and the validation errors are
+ written to stdout.
+ """
+ sys.exit(
+ validate_ping.validate_ping(
+ io.TextIOWrapper(sys.stdin.buffer, encoding="utf-8"),
+ io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8"),
+ schema_url=schema,
+ )
+ )
+
+
+@click.command()
+@click.argument(
+ "input",
+ type=click.Path(exists=True, dir_okay=False, file_okay=True, readable=True),
+ nargs=-1,
+)
+@click.option(
+ "--allow-reserved",
+ is_flag=True,
+ help=(
+ "If provided, allow the use of reserved fields. "
+ "Should only be set when building the Glean library itself."
+ ),
+)
+@click.option(
+ "--allow-missing-files",
+ is_flag=True,
+ help=("Do not treat missing input files as an error."),
+)
+def glinter(input, allow_reserved, allow_missing_files):
+ """
+ Runs a linter over the metrics.
+ """
+ sys.exit(
+ lint.glinter(
+ [Path(x) for x in input],
+ {
+ "allow_reserved": allow_reserved,
+ "allow_missing_files": allow_missing_files,
+ },
+ )
+ )
+
+
+@click.group()
+@click.version_option(glean_parser.__version__, prog_name="glean_parser")
+def main(args=None):
+ """Command line utility for glean_parser."""
+ pass
+
+
+main.add_command(translate)
+main.add_command(check)
+main.add_command(glinter)
+
+
+def main_wrapper(args=None):
+ """
+ A simple wrapper around click's `main` to display the glean_parser version
+ when there is an error.
+ """
+ try:
+ main(args=args)
+ except SystemExit as e:
+ if e.code != 0:
+ print(f"ERROR running glean_parser v{glean_parser.__version__}")
+ raise
+
+
+if __name__ == "__main__":
+ main_wrapper() # pragma: no cover
diff --git a/third_party/python/glean_parser/glean_parser/csharp.py b/third_party/python/glean_parser/glean_parser/csharp.py
new file mode 100644
index 0000000000..6ed8cb0338
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/csharp.py
@@ -0,0 +1,153 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Outputter to generate C# code for metrics.
+"""
+
+import enum
+import json
+from pathlib import Path
+from typing import Any, Dict, List, Optional, Union # noqa
+
+from . import metrics
+from . import pings
+from . import util
+
+
+def csharp_datatypes_filter(value: util.JSONType) -> str:
+ """
+ A Jinja2 filter that renders C# literals.
+
+ Based on Python's JSONEncoder, but overrides:
+ - lists to use `new string[] {}` (only strings)
+ - dicts to use `new Dictionary<string, string> { ...}` (string, string)
+ - sets to use `new HashSet<string>() {}` (only strings)
+ - enums to use the like-named C# enum
+ """
+
+ class CSharpEncoder(json.JSONEncoder):
+ def iterencode(self, value):
+ if isinstance(value, list):
+ assert all(isinstance(x, str) for x in value)
+ yield "new string[] {"
+ first = True
+ for subvalue in value:
+ if not first:
+ yield ", "
+ yield from self.iterencode(subvalue)
+ first = False
+ yield "}"
+ elif isinstance(value, dict):
+ yield "new Dictionary<string, string> {"
+ first = True
+ for key, subvalue in value.items():
+ if not first:
+ yield ", "
+ yield "{"
+ yield from self.iterencode(key)
+ yield ", "
+ yield from self.iterencode(subvalue)
+ yield "}"
+ first = False
+ yield "}"
+ elif isinstance(value, enum.Enum):
+ yield (value.__class__.__name__ + "." + util.Camelize(value.name))
+ elif isinstance(value, set):
+ yield "new HashSet<string>() {"
+ first = True
+ for subvalue in sorted(list(value)):
+ if not first:
+ yield ", "
+ yield from self.iterencode(subvalue)
+ first = False
+ yield "}"
+ else:
+ yield from super().iterencode(value)
+
+ return "".join(CSharpEncoder().iterencode(value))
+
+
+def type_name(obj: Union[metrics.Metric, pings.Ping]) -> str:
+ """
+ Returns the C# type to use for a given metric or ping object.
+ """
+ generate_enums = getattr(obj, "_generate_enums", [])
+ if len(generate_enums):
+ template_args = []
+ for member, suffix in generate_enums:
+ if len(getattr(obj, member)):
+ template_args.append(util.camelize(obj.name) + suffix)
+ else:
+ if suffix == "Keys":
+ template_args.append("NoExtraKeys")
+ else:
+ template_args.append("No" + suffix)
+
+ return "{}<{}>".format(class_name(obj.type), ", ".join(template_args))
+
+ return class_name(obj.type)
+
+
+def class_name(obj_type: str) -> str:
+ """
+ Returns the C# class name for a given metric or ping type.
+ """
+ if obj_type == "ping":
+ return "PingType"
+ if obj_type.startswith("labeled_"):
+ obj_type = obj_type[8:]
+ return util.Camelize(obj_type) + "MetricType"
+
+
+def output_csharp(
+ objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] = None
+) -> None:
+ """
+ Given a tree of objects, output C# code to `output_dir`.
+
+ :param objects: A tree of objects (metrics and pings) as returned from
+ `parser.parse_objects`.
+ :param output_dir: Path to an output directory to write to.
+ :param options: options dictionary, with the following optional keys:
+
+ - `namespace`: The package namespace to declare at the top of the
+ generated files. Defaults to `GleanMetrics`.
+ - `glean_namespace`: The package namespace of the glean library itself.
+ This is where glean objects will be imported from in the generated
+ code.
+ """
+ if options is None:
+ options = {}
+
+ template = util.get_jinja2_template(
+ "csharp.jinja2",
+ filters=(
+ ("csharp", csharp_datatypes_filter),
+ ("type_name", type_name),
+ ("class_name", class_name),
+ ),
+ )
+
+ namespace = options.get("namespace", "GleanMetrics")
+ glean_namespace = options.get("glean_namespace", "Mozilla.Glean")
+
+ for category_key, category_val in objs.items():
+ filename = util.Camelize(category_key) + ".cs"
+ filepath = output_dir / filename
+
+ with filepath.open("w", encoding="utf-8") as fd:
+ fd.write(
+ template.render(
+ category_name=category_key,
+ objs=category_val,
+ extra_args=util.extra_args,
+ namespace=namespace,
+ glean_namespace=glean_namespace,
+ )
+ )
+ # Jinja2 squashes the final newline, so we explicitly add it
+ fd.write("\n")
diff --git a/third_party/python/glean_parser/glean_parser/kotlin.py b/third_party/python/glean_parser/glean_parser/kotlin.py
new file mode 100644
index 0000000000..566049a892
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/kotlin.py
@@ -0,0 +1,261 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Outputter to generate Kotlin code for metrics.
+"""
+
+from collections import OrderedDict
+import enum
+import json
+from pathlib import Path
+from typing import Any, Dict, List, Optional, Union # noqa
+
+from . import metrics
+from . import pings
+from . import util
+
+
+def kotlin_datatypes_filter(value: util.JSONType) -> str:
+ """
+ A Jinja2 filter that renders Kotlin literals.
+
+ Based on Python's JSONEncoder, but overrides:
+ - lists to use listOf
+ - dicts to use mapOf
+ - sets to use setOf
+ - enums to use the like-named Kotlin enum
+ """
+
+ class KotlinEncoder(json.JSONEncoder):
+ def iterencode(self, value):
+ if isinstance(value, list):
+ yield "listOf("
+ first = True
+ for subvalue in value:
+ if not first:
+ yield ", "
+ yield from self.iterencode(subvalue)
+ first = False
+ yield ")"
+ elif isinstance(value, dict):
+ yield "mapOf("
+ first = True
+ for key, subvalue in value.items():
+ if not first:
+ yield ", "
+ yield from self.iterencode(key)
+ yield " to "
+ yield from self.iterencode(subvalue)
+ first = False
+ yield ")"
+ elif isinstance(value, enum.Enum):
+ yield (value.__class__.__name__ + "." + util.Camelize(value.name))
+ elif isinstance(value, set):
+ yield "setOf("
+ first = True
+ for subvalue in sorted(list(value)):
+ if not first:
+ yield ", "
+ yield from self.iterencode(subvalue)
+ first = False
+ yield ")"
+ else:
+ yield from super().iterencode(value)
+
+ return "".join(KotlinEncoder().iterencode(value))
+
+
+def type_name(obj: Union[metrics.Metric, pings.Ping]) -> str:
+ """
+ Returns the Kotlin type to use for a given metric or ping object.
+ """
+ generate_enums = getattr(obj, "_generate_enums", [])
+ if len(generate_enums):
+ template_args = []
+ for member, suffix in generate_enums:
+ if len(getattr(obj, member)):
+ template_args.append(util.camelize(obj.name) + suffix)
+ else:
+ if suffix == "Keys":
+ template_args.append("NoExtraKeys")
+ else:
+ template_args.append("No" + suffix)
+
+ return "{}<{}>".format(class_name(obj.type), ", ".join(template_args))
+
+ return class_name(obj.type)
+
+
+def class_name(obj_type: str) -> str:
+ """
+ Returns the Kotlin class name for a given metric or ping type.
+ """
+ if obj_type == "ping":
+ return "PingType"
+ if obj_type.startswith("labeled_"):
+ obj_type = obj_type[8:]
+ return util.Camelize(obj_type) + "MetricType"
+
+
+def output_gecko_lookup(
+ objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] = None
+) -> None:
+ """
+ Given a tree of objects, generate a Kotlin map between Gecko histograms and
+ Glean SDK metric types.
+
+ :param objects: A tree of objects (metrics and pings) as returned from
+ `parser.parse_objects`.
+ :param output_dir: Path to an output directory to write to.
+ :param options: options dictionary, with the following optional keys:
+
+ - `namespace`: The package namespace to declare at the top of the
+ generated files. Defaults to `GleanMetrics`.
+ - `glean_namespace`: The package namespace of the glean library itself.
+ This is where glean objects will be imported from in the generated
+ code.
+ """
+ if options is None:
+ options = {}
+
+ template = util.get_jinja2_template(
+ "kotlin.geckoview.jinja2",
+ filters=(
+ ("kotlin", kotlin_datatypes_filter),
+ ("type_name", type_name),
+ ("class_name", class_name),
+ ),
+ )
+
+ namespace = options.get("namespace", "GleanMetrics")
+ glean_namespace = options.get("glean_namespace", "mozilla.components.service.glean")
+
+ # Build a dictionary that contains data for metrics that are
+ # histogram-like/scalar-like and contain a gecko_datapoint, with this format:
+ #
+ # {
+ # "histograms": {
+ # "category": [
+ # {"gecko_datapoint": "the-datapoint", "name": "the-metric-name"},
+ # ...
+ # ],
+ # ...
+ # },
+ # "other-type": {}
+ # }
+ gecko_metrics: OrderedDict[
+ str, OrderedDict[str, List[Dict[str, str]]]
+ ] = OrderedDict()
+
+ # Define scalar-like types.
+ SCALAR_LIKE_TYPES = ["boolean", "string", "quantity"]
+
+ for category_key, category_val in objs.items():
+ # Support exfiltration of Gecko metrics from products using both the
+ # Glean SDK and GeckoView. See bug 1566356 for more context.
+ for metric in category_val.values():
+ # This is not a Gecko metric, skip it.
+ if isinstance(metric, pings.Ping) or not getattr(
+ metric, "gecko_datapoint", False
+ ):
+ continue
+
+ # Put scalars in their own categories, histogram-like in "histograms" and
+ # categorical histograms in "categoricals".
+ type_category = "histograms"
+ if metric.type in SCALAR_LIKE_TYPES:
+ type_category = metric.type
+ elif metric.type == "labeled_counter":
+ # Labeled counters with a 'gecko_datapoint' property
+ # are categorical histograms.
+ type_category = "categoricals"
+
+ gecko_metrics.setdefault(type_category, OrderedDict())
+ gecko_metrics[type_category].setdefault(category_key, [])
+
+ gecko_metrics[type_category][category_key].append(
+ {"gecko_datapoint": metric.gecko_datapoint, "name": metric.name}
+ )
+
+ if not gecko_metrics:
+ # Bail out and don't create a file if no gecko metrics
+ # are found.
+ return
+
+ filepath = output_dir / "GleanGeckoMetricsMapping.kt"
+ with filepath.open("w", encoding="utf-8") as fd:
+ fd.write(
+ template.render(
+ gecko_metrics=gecko_metrics,
+ namespace=namespace,
+ glean_namespace=glean_namespace,
+ )
+ )
+ # Jinja2 squashes the final newline, so we explicitly add it
+ fd.write("\n")
+
+
+def output_kotlin(
+ objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] = None
+) -> None:
+ """
+ Given a tree of objects, output Kotlin code to `output_dir`.
+
+ :param objects: A tree of objects (metrics and pings) as returned from
+ `parser.parse_objects`.
+ :param output_dir: Path to an output directory to write to.
+ :param options: options dictionary, with the following optional keys:
+
+ - `namespace`: The package namespace to declare at the top of the
+ generated files. Defaults to `GleanMetrics`.
+ - `glean_namespace`: The package namespace of the glean library itself.
+ This is where glean objects will be imported from in the generated
+ code.
+ """
+ if options is None:
+ options = {}
+
+ template = util.get_jinja2_template(
+ "kotlin.jinja2",
+ filters=(
+ ("kotlin", kotlin_datatypes_filter),
+ ("type_name", type_name),
+ ("class_name", class_name),
+ ),
+ )
+
+ namespace = options.get("namespace", "GleanMetrics")
+ glean_namespace = options.get("glean_namespace", "mozilla.components.service.glean")
+
+ for category_key, category_val in objs.items():
+ filename = util.Camelize(category_key) + ".kt"
+ filepath = output_dir / filename
+
+ obj_types = sorted(
+ list(set(class_name(obj.type) for obj in category_val.values()))
+ )
+ has_labeled_metrics = any(
+ getattr(metric, "labeled", False) for metric in category_val.values()
+ )
+
+ with filepath.open("w", encoding="utf-8") as fd:
+ fd.write(
+ template.render(
+ category_name=category_key,
+ objs=category_val,
+ obj_types=obj_types,
+ extra_args=util.extra_args,
+ namespace=namespace,
+ has_labeled_metrics=has_labeled_metrics,
+ glean_namespace=glean_namespace,
+ )
+ )
+ # Jinja2 squashes the final newline, so we explicitly add it
+ fd.write("\n")
+
+ # TODO: Maybe this should just be a separate outputter?
+ output_gecko_lookup(objs, output_dir, options)
diff --git a/third_party/python/glean_parser/glean_parser/lint.py b/third_party/python/glean_parser/glean_parser/lint.py
new file mode 100644
index 0000000000..facb632d5e
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/lint.py
@@ -0,0 +1,442 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import enum
+from pathlib import Path
+import re
+import sys
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Generator,
+ List,
+ Iterable,
+ Optional,
+ Tuple,
+) # noqa
+
+
+from . import metrics
+from . import parser
+from . import pings
+from . import util
+
+
+from yamllint.config import YamlLintConfig # type: ignore
+from yamllint import linter # type: ignore
+
+
+LintGenerator = Generator[str, None, None]
+
+
+class CheckType(enum.Enum):
+ warning = 0
+ error = 1
+
+
+def _split_words(name: str) -> List[str]:
+ """
+ Helper function to split words on either `.` or `_`.
+ """
+ return re.split("[._]", name)
+
+
+def _english_list(items: List[str]) -> str:
+ """
+ Helper function to format a list [A, B, C] as "'A', 'B', or 'C'".
+ """
+ if len(items) == 0:
+ return ""
+ elif len(items) == 1:
+ return f"'{items[0]}'"
+ else:
+ return "{}, or '{}'".format(
+ ", ".join([f"'{x}'" for x in items[:-1]]), items[-1]
+ )
+
+
+def _hamming_distance(str1: str, str2: str) -> int:
+ """
+ Count the # of differences between strings str1 and str2,
+ padding the shorter one with whitespace
+ """
+
+ diffs = 0
+ if len(str1) < len(str2):
+ str1, str2 = str2, str1
+ len_dist = len(str1) - len(str2)
+ str2 += " " * len_dist
+
+ for ch1, ch2 in zip(str1, str2):
+ if ch1 != ch2:
+ diffs += 1
+ return diffs
+
+
+def check_common_prefix(
+ category_name: str, metrics: Iterable[metrics.Metric]
+) -> LintGenerator:
+ """
+ Check if all metrics begin with a common prefix.
+ """
+ metric_words = sorted([_split_words(metric.name) for metric in metrics])
+
+ if len(metric_words) < 2:
+ return
+
+ first = metric_words[0]
+ last = metric_words[-1]
+
+ for i in range(min(len(first), len(last))):
+ if first[i] != last[i]:
+ break
+
+ if i > 0:
+ common_prefix = "_".join(first[:i])
+ yield (
+ f"Within category '{category_name}', all metrics begin with "
+ f"prefix '{common_prefix}'."
+ "Remove the prefixes on the metric names and (possibly) "
+ "rename the category."
+ )
+
+
+def check_unit_in_name(
+ metric: metrics.Metric, parser_config: Dict[str, Any]
+) -> LintGenerator:
+ """
+ The metric name ends in a unit.
+ """
+ TIME_UNIT_ABBREV = {
+ "nanosecond": "ns",
+ "microsecond": "us",
+ "millisecond": "ms",
+ "second": "s",
+ "minute": "m",
+ "hour": "h",
+ "day": "d",
+ }
+
+ MEMORY_UNIT_ABBREV = {
+ "byte": "b",
+ "kilobyte": "kb",
+ "megabyte": "mb",
+ "gigabyte": "gb",
+ }
+
+ name_words = _split_words(metric.name)
+ unit_in_name = name_words[-1]
+
+ time_unit = getattr(metric, "time_unit", None)
+ memory_unit = getattr(metric, "memory_unit", None)
+ unit = getattr(metric, "unit", None)
+
+ if time_unit is not None:
+ if (
+ unit_in_name == TIME_UNIT_ABBREV.get(time_unit.name)
+ or unit_in_name == time_unit.name
+ ):
+ yield (
+ f"Suffix '{unit_in_name}' is redundant with time_unit "
+ f"'{time_unit.name}'. Only include time_unit."
+ )
+ elif (
+ unit_in_name in TIME_UNIT_ABBREV.keys()
+ or unit_in_name in TIME_UNIT_ABBREV.values()
+ ):
+ yield (
+ f"Suffix '{unit_in_name}' doesn't match time_unit "
+ f"'{time_unit.name}'. "
+ "Confirm the unit is correct and only include time_unit."
+ )
+
+ elif memory_unit is not None:
+ if (
+ unit_in_name == MEMORY_UNIT_ABBREV.get(memory_unit.name)
+ or unit_in_name == memory_unit.name
+ ):
+ yield (
+ f"Suffix '{unit_in_name}' is redundant with memory_unit "
+ f"'{memory_unit.name}'. "
+ "Only include memory_unit."
+ )
+ elif (
+ unit_in_name in MEMORY_UNIT_ABBREV.keys()
+ or unit_in_name in MEMORY_UNIT_ABBREV.values()
+ ):
+ yield (
+ f"Suffix '{unit_in_name}' doesn't match memory_unit "
+ f"{memory_unit.name}'. "
+ "Confirm the unit is correct and only include memory_unit."
+ )
+
+ elif unit is not None:
+ if unit_in_name == unit:
+ yield (
+ f"Suffix '{unit_in_name}' is redundant with unit param "
+ f"'{unit}'. "
+ "Only include unit."
+ )
+
+
+def check_category_generic(
+ category_name: str, metrics: Iterable[metrics.Metric]
+) -> LintGenerator:
+ """
+ The category name is too generic.
+ """
+ GENERIC_CATEGORIES = ["metrics", "events"]
+
+ if category_name in GENERIC_CATEGORIES:
+ yield (
+ f"Category '{category_name}' is too generic. "
+ f"Don't use {_english_list(GENERIC_CATEGORIES)} for category names"
+ )
+
+
+def check_bug_number(
+ metric: metrics.Metric, parser_config: Dict[str, Any]
+) -> LintGenerator:
+ number_bugs = [str(bug) for bug in metric.bugs if isinstance(bug, int)]
+
+ if len(number_bugs):
+ yield (
+ f"For bugs {', '.join(number_bugs)}: "
+ "Bug numbers are deprecated and should be changed to full URLs. "
+ "For example, use 'http://bugzilla.mozilla.org/12345' instead of '12345'."
+ )
+
+
+def check_valid_in_baseline(
+ metric: metrics.Metric, parser_config: Dict[str, Any]
+) -> LintGenerator:
+ allow_reserved = parser_config.get("allow_reserved", False)
+
+ if not allow_reserved and "baseline" in metric.send_in_pings:
+ yield (
+ "The baseline ping is Glean-internal. "
+ "Remove 'baseline' from the send_in_pings array."
+ )
+
+
+def check_misspelled_pings(
+ metric: metrics.Metric, parser_config: Dict[str, Any]
+) -> LintGenerator:
+ for ping in metric.send_in_pings:
+ for builtin in pings.RESERVED_PING_NAMES:
+ distance = _hamming_distance(ping, builtin)
+ if distance == 1:
+ yield f"Ping '{ping}' seems misspelled. Did you mean '{builtin}'?"
+
+
+def check_user_lifetime_expiration(
+ metric: metrics.Metric, parser_config: Dict[str, Any]
+) -> LintGenerator:
+
+ if metric.lifetime == metrics.Lifetime.user and metric.expires != "never":
+ yield (
+ "Metrics with 'user' lifetime cannot have an expiration date. "
+ "They live as long as the user profile does. "
+ "Set expires to 'never'."
+ )
+
+
+def check_expired_date(
+ metric: metrics.Metric, parser_config: Dict[str, Any]
+) -> LintGenerator:
+ try:
+ metric.validate_expires()
+ except ValueError as e:
+ yield (str(e))
+
+
+def check_expired_metric(
+ metric: metrics.Metric, parser_config: Dict[str, Any]
+) -> LintGenerator:
+ if metric.is_expired():
+ yield ("Metric has expired. Please consider removing it.")
+
+
+# The checks that operate on an entire category of metrics:
+# {NAME: (function, is_error)}
+CATEGORY_CHECKS: Dict[
+ str, Tuple[Callable[[str, Iterable[metrics.Metric]], LintGenerator], CheckType]
+] = {
+ "COMMON_PREFIX": (check_common_prefix, CheckType.error),
+ "CATEGORY_GENERIC": (check_category_generic, CheckType.error),
+}
+
+
+# The checks that operate on individual metrics:
+# {NAME: (function, is_error)}
+INDIVIDUAL_CHECKS: Dict[
+ str, Tuple[Callable[[metrics.Metric, dict], LintGenerator], CheckType]
+] = {
+ "UNIT_IN_NAME": (check_unit_in_name, CheckType.error),
+ "BUG_NUMBER": (check_bug_number, CheckType.error),
+ "BASELINE_PING": (check_valid_in_baseline, CheckType.error),
+ "MISSPELLED_PING": (check_misspelled_pings, CheckType.error),
+ "EXPIRATION_DATE_TOO_FAR": (check_expired_date, CheckType.warning),
+ "USER_LIFETIME_EXPIRATION": (check_user_lifetime_expiration, CheckType.warning),
+ "EXPIRED": (check_expired_metric, CheckType.warning),
+}
+
+
+class GlinterNit:
+ def __init__(self, check_name: str, name: str, msg: str, check_type: CheckType):
+ self.check_name = check_name
+ self.name = name
+ self.msg = msg
+ self.check_type = check_type
+
+ def format(self):
+ return (
+ f"{self.check_type.name.upper()}: {self.check_name}: "
+ f"{self.name}: {self.msg}"
+ )
+
+
+def lint_metrics(
+ objs: metrics.ObjectTree,
+ parser_config: Optional[Dict[str, Any]] = None,
+ file=sys.stderr,
+) -> List[GlinterNit]:
+ """
+ Performs glinter checks on a set of metrics objects.
+
+ :param objs: Tree of metric objects, as returns by `parser.parse_objects`.
+ :param file: The stream to write errors to.
+ :returns: List of nits.
+ """
+ if parser_config is None:
+ parser_config = {}
+
+ nits: List[GlinterNit] = []
+ for (category_name, category) in sorted(list(objs.items())):
+ if category_name == "pings":
+ continue
+
+ # Make sure the category has only Metrics, not Pings
+ category_metrics = dict(
+ (name, metric)
+ for (name, metric) in category.items()
+ if isinstance(metric, metrics.Metric)
+ )
+
+ for (cat_check_name, (cat_check_func, check_type)) in CATEGORY_CHECKS.items():
+ if any(
+ cat_check_name in metric.no_lint for metric in category_metrics.values()
+ ):
+ continue
+ nits.extend(
+ GlinterNit(cat_check_name, category_name, msg, check_type)
+ for msg in cat_check_func(category_name, category_metrics.values())
+ )
+
+ for (_metric_name, metric) in sorted(list(category_metrics.items())):
+ for (check_name, (check_func, check_type)) in INDIVIDUAL_CHECKS.items():
+ new_nits = list(check_func(metric, parser_config))
+ if len(new_nits):
+ if check_name not in metric.no_lint:
+ nits.extend(
+ GlinterNit(
+ check_name,
+ ".".join([metric.category, metric.name]),
+ msg,
+ check_type,
+ )
+ for msg in new_nits
+ )
+
+ if len(nits):
+ print("Sorry, Glean found some glinter nits:", file=file)
+ for nit in nits:
+ print(nit.format(), file=file)
+ print("", file=file)
+ print("Please fix the above nits to continue.", file=file)
+ print(
+ "To disable a check, add a `no_lint` parameter "
+ "with a list of check names to disable.\n"
+ "This parameter can appear with each individual metric, or at the "
+ "top-level to affect the entire file.",
+ file=file,
+ )
+
+ return nits
+
+
+def lint_yaml_files(
+ input_filepaths: Iterable[Path],
+ file=sys.stderr,
+ parser_config: Dict[str, Any] = None,
+) -> List:
+ """
+ Performs glinter YAML lint on a set of files.
+
+ :param input_filepaths: List of input files to lint.
+ :param file: The stream to write errors to.
+ :returns: List of nits.
+ """
+
+ if parser_config is None:
+ parser_config = {}
+
+ # Generic type since the actual type comes from yamllint, which we don't
+ # control.
+ nits: List = []
+ for path in input_filepaths:
+ if not path.is_file() and parser_config.get("allow_missing_files", False):
+ continue
+
+ # yamllint needs both the file content and the path.
+ file_content = None
+ with path.open("r", encoding="utf-8") as fd:
+ file_content = fd.read()
+
+ problems = linter.run(file_content, YamlLintConfig("extends: default"), path)
+ nits.extend((path, p) for p in problems)
+
+ if len(nits):
+ print("Sorry, Glean found some glinter nits:", file=file)
+ for (path, p) in nits:
+ print(f"{path} ({p.line}:{p.column}) - {p.message}")
+ print("", file=file)
+ print("Please fix the above nits to continue.", file=file)
+
+ return [x[1] for x in nits]
+
+
+def glinter(
+ input_filepaths: Iterable[Path],
+ parser_config: Optional[Dict[str, Any]] = None,
+ file=sys.stderr,
+) -> int:
+ """
+ Commandline helper for glinter.
+
+ :param input_filepaths: List of Path objects to load metrics from.
+ :param parser_config: Parser configuration object, passed to
+ `parser.parse_objects`.
+ :param file: The stream to write the errors to.
+ :return: Non-zero if there were any glinter errors.
+ """
+ if parser_config is None:
+ parser_config = {}
+
+ if lint_yaml_files(input_filepaths, file=file, parser_config=parser_config):
+ return 1
+
+ objs = parser.parse_objects(input_filepaths, parser_config)
+
+ if util.report_validation_errors(objs):
+ return 1
+
+ nits = lint_metrics(objs.value, parser_config=parser_config, file=file)
+ if any(nit.check_type == CheckType.error for nit in nits):
+ return 1
+ if len(nits) == 0:
+ print("✨ Your metrics are Glean! ✨", file=file)
+ return 0
diff --git a/third_party/python/glean_parser/glean_parser/markdown.py b/third_party/python/glean_parser/glean_parser/markdown.py
new file mode 100644
index 0000000000..49d9580041
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/markdown.py
@@ -0,0 +1,244 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Outputter to generate Markdown documentation for metrics.
+"""
+
+from pathlib import Path
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+
+from . import metrics
+from . import pings
+from . import util
+from collections import defaultdict
+
+
+def extra_info(obj: Union[metrics.Metric, pings.Ping]) -> List[Tuple[str, str]]:
+ """
+ Returns a list of string to string tuples with extra information for the type
+ (e.g. extra keys for events) or an empty list if nothing is available.
+ """
+ extra_info = []
+
+ if isinstance(obj, metrics.Event):
+ for key in obj.allowed_extra_keys:
+ extra_info.append((key, obj.extra_keys[key]["description"]))
+
+ if isinstance(obj, metrics.Labeled) and obj.ordered_labels is not None:
+ for label in obj.ordered_labels:
+ extra_info.append((label, None))
+
+ if isinstance(obj, metrics.Jwe):
+ extra_info.append(("decrypted_name", obj.decrypted_name))
+
+ if isinstance(obj, metrics.Quantity):
+ extra_info.append(("unit", obj.unit))
+
+ return extra_info
+
+
+def ping_desc(
+ ping_name: str, custom_pings_cache: Optional[Dict[str, pings.Ping]] = None
+) -> str:
+ """
+ Return a text description of the ping. If a custom_pings_cache
+ is available, look in there for non-reserved ping names description.
+ """
+ desc = ""
+
+ if ping_name in pings.RESERVED_PING_NAMES:
+ desc = (
+ "This is a built-in ping that is assembled out of the "
+ "box by the Glean SDK."
+ )
+ elif ping_name == "all-pings":
+ desc = "These metrics are sent in every ping."
+ elif custom_pings_cache is not None and ping_name in custom_pings_cache:
+ desc = custom_pings_cache[ping_name].description
+
+ return desc
+
+
+def metrics_docs(obj_name: str) -> str:
+ """
+ Return a link to the documentation entry for the Glean SDK metric of the
+ requested type.
+ """
+ # We need to fixup labeled stuff, as types are singular and docs refer
+ # to them as plural.
+ fixedup_name = obj_name
+ if obj_name.startswith("labeled_"):
+ fixedup_name += "s"
+
+ return f"https://mozilla.github.io/glean/book/user/metrics/{fixedup_name}.html"
+
+
+def ping_docs(ping_name: str) -> str:
+ """
+ Return a link to the documentation entry for the requested Glean SDK
+ built-in ping.
+ """
+ if ping_name not in pings.RESERVED_PING_NAMES:
+ return ""
+
+ return f"https://mozilla.github.io/glean/book/user/pings/{ping_name}.html"
+
+
+def if_empty(
+ ping_name: str, custom_pings_cache: Optional[Dict[str, pings.Ping]] = None
+) -> bool:
+ if custom_pings_cache is not None and ping_name in custom_pings_cache:
+ return custom_pings_cache[ping_name].send_if_empty
+ else:
+ return False
+
+
+def ping_reasons(
+ ping_name: str, custom_pings_cache: Dict[str, pings.Ping]
+) -> Dict[str, str]:
+ """
+ Returns the reasons dictionary for the ping.
+ """
+ if ping_name == "all-pings":
+ return {}
+ elif ping_name in custom_pings_cache:
+ return custom_pings_cache[ping_name].reasons
+
+ return {}
+
+
+def ping_data_reviews(
+ ping_name: str, custom_pings_cache: Optional[Dict[str, pings.Ping]] = None
+) -> Optional[List[str]]:
+ if custom_pings_cache is not None and ping_name in custom_pings_cache:
+ return custom_pings_cache[ping_name].data_reviews
+ else:
+ return None
+
+
+def ping_bugs(
+ ping_name: str, custom_pings_cache: Optional[Dict[str, pings.Ping]] = None
+) -> Optional[List[str]]:
+ if custom_pings_cache is not None and ping_name in custom_pings_cache:
+ return custom_pings_cache[ping_name].bugs
+ else:
+ return None
+
+
+def ping_include_client_id(
+ ping_name: str, custom_pings_cache: Optional[Dict[str, pings.Ping]] = None
+) -> bool:
+ if custom_pings_cache is not None and ping_name in custom_pings_cache:
+ return custom_pings_cache[ping_name].include_client_id
+ else:
+ return False
+
+
+def data_sensitivity_numbers(
+ data_sensitivity: Optional[List[metrics.DataSensitivity]],
+) -> str:
+ if data_sensitivity is None:
+ return "unknown"
+ else:
+ return ", ".join(str(x.value) for x in data_sensitivity)
+
+
+def output_markdown(
+ objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] = None
+) -> None:
+ """
+ Given a tree of objects, output Markdown docs to `output_dir`.
+
+ This produces a single `metrics.md`. The file contains a table of
+ contents and a section for each ping metrics are collected for.
+
+ :param objects: A tree of objects (metrics and pings) as returned from
+ `parser.parse_objects`.
+ :param output_dir: Path to an output directory to write to.
+ :param options: options dictionary, with the following optional key:
+ - `project_title`: The projects title.
+ """
+ if options is None:
+ options = {}
+
+ # Build a dictionary that associates pings with their metrics.
+ #
+ # {
+ # "baseline": [
+ # { ... metric data ... },
+ # ...
+ # ],
+ # "metrics": [
+ # { ... metric data ... },
+ # ...
+ # ],
+ # ...
+ # }
+ #
+ # This also builds a dictionary of custom pings, if available.
+ custom_pings_cache: Dict[str, pings.Ping] = defaultdict()
+ metrics_by_pings: Dict[str, List[metrics.Metric]] = defaultdict(list)
+ for _category_key, category_val in objs.items():
+ for obj in category_val.values():
+ # Filter out custom pings. We will need them for extracting
+ # the description
+ if isinstance(obj, pings.Ping):
+ custom_pings_cache[obj.name] = obj
+ # Pings that have `send_if_empty` set to true,
+ # might not have any metrics. They need to at least have an
+ # empty array of metrics to show up on the template.
+ if obj.send_if_empty and not metrics_by_pings[obj.name]:
+ metrics_by_pings[obj.name] = []
+
+ # If this is an internal Glean metric, and we don't
+ # want docs for it.
+ if isinstance(obj, metrics.Metric) and not obj.is_internal_metric():
+ # If we get here, obj is definitely a metric we want
+ # docs for.
+ for ping_name in obj.send_in_pings:
+ metrics_by_pings[ping_name].append(obj)
+
+ # Sort the metrics by their identifier, to make them show up nicely
+ # in the docs and to make generated docs reproducible.
+ for ping_name in metrics_by_pings:
+ metrics_by_pings[ping_name] = sorted(
+ metrics_by_pings[ping_name], key=lambda x: x.identifier()
+ )
+
+ project_title = options.get("project_title", "this project")
+
+ template = util.get_jinja2_template(
+ "markdown.jinja2",
+ filters=(
+ ("extra_info", extra_info),
+ ("metrics_docs", metrics_docs),
+ ("ping_desc", lambda x: ping_desc(x, custom_pings_cache)),
+ ("ping_send_if_empty", lambda x: if_empty(x, custom_pings_cache)),
+ ("ping_docs", ping_docs),
+ ("ping_reasons", lambda x: ping_reasons(x, custom_pings_cache)),
+ ("ping_data_reviews", lambda x: ping_data_reviews(x, custom_pings_cache)),
+ ("ping_bugs", lambda x: ping_bugs(x, custom_pings_cache)),
+ (
+ "ping_include_client_id",
+ lambda x: ping_include_client_id(x, custom_pings_cache),
+ ),
+ ("data_sensitivity_numbers", data_sensitivity_numbers),
+ ),
+ )
+
+ filename = "metrics.md"
+ filepath = output_dir / filename
+
+ with filepath.open("w", encoding="utf-8") as fd:
+ fd.write(
+ template.render(
+ metrics_by_pings=metrics_by_pings, project_title=project_title
+ )
+ )
+ # Jinja2 squashes the final newline, so we explicitly add it
+ fd.write("\n")
diff --git a/third_party/python/glean_parser/glean_parser/metrics.py b/third_party/python/glean_parser/glean_parser/metrics.py
new file mode 100644
index 0000000000..8fcb96e3fb
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/metrics.py
@@ -0,0 +1,356 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Classes for each of the high-level metric types.
+"""
+
+import enum
+from typing import Any, Dict, List, Optional, Type, Union # noqa
+
+
+from . import pings
+from . import util
+
+
+# Important: if the values are ever changing here, make sure
+# to also fix mozilla/glean. Otherwise language bindings may
+# break there.
+class Lifetime(enum.Enum):
+ ping = 0
+ application = 1
+ user = 2
+
+
+class DataSensitivity(enum.Enum):
+ technical = 1
+ interaction = 2
+ web_activity = 3
+ highly_sensitive = 4
+
+
+class Metric:
+ typename: str = "ERROR"
+ glean_internal_metric_cat: str = "glean.internal.metrics"
+ metric_types: Dict[str, Any] = {}
+ default_store_names: List[str] = ["metrics"]
+
+ def __init__(
+ self,
+ type: str,
+ category: str,
+ name: str,
+ bugs: List[str],
+ description: str,
+ notification_emails: List[str],
+ expires: Any,
+ data_reviews: Optional[List[str]] = None,
+ version: int = 0,
+ disabled: bool = False,
+ lifetime: str = "ping",
+ send_in_pings: Optional[List[str]] = None,
+ unit: Optional[str] = None,
+ gecko_datapoint: str = "",
+ no_lint: Optional[List[str]] = None,
+ data_sensitivity: Optional[List[str]] = None,
+ _config: Dict[str, Any] = None,
+ _validated: bool = False,
+ ):
+ # Avoid cyclical import
+ from . import parser
+
+ self.type = type
+ self.category = category
+ self.name = name
+ self.bugs = bugs
+ self.description = description
+ self.notification_emails = notification_emails
+ self.expires = expires
+ if data_reviews is None:
+ data_reviews = []
+ self.data_reviews = data_reviews
+ self.version = version
+ self.disabled = disabled
+ self.lifetime = getattr(Lifetime, lifetime)
+ if send_in_pings is None:
+ send_in_pings = ["default"]
+ self.send_in_pings = send_in_pings
+ if unit is not None:
+ self.unit = unit
+ self.gecko_datapoint = gecko_datapoint
+ if no_lint is None:
+ no_lint = []
+ self.no_lint = no_lint
+ if data_sensitivity is not None:
+ self.data_sensitivity = [
+ getattr(DataSensitivity, x) for x in data_sensitivity
+ ]
+
+ # _validated indicates whether this metric has already been jsonschema
+ # validated (but not any of the Python-level validation).
+ if not _validated:
+ data = {
+ "$schema": parser.METRICS_ID,
+ self.category: {self.name: self.serialize()},
+ } # type: Dict[str, util.JSONType]
+ for error in parser.validate(data):
+ raise ValueError(error)
+
+ # Store the config, but only after validation.
+ if _config is None:
+ _config = {}
+ self._config = _config
+
+ # Metrics in the special category "glean.internal.metrics" need to have
+ # an empty category string when identifying the metrics in the ping.
+ if self.category == Metric.glean_internal_metric_cat:
+ self.category = ""
+
+ def __init_subclass__(cls, **kwargs):
+ # Create a mapping of all of the subclasses of this class
+ if cls not in Metric.metric_types and hasattr(cls, "typename"):
+ Metric.metric_types[cls.typename] = cls
+ super().__init_subclass__(**kwargs)
+
+ @classmethod
+ def make_metric(
+ cls,
+ category: str,
+ name: str,
+ metric_info: Dict[str, util.JSONType],
+ config: Optional[Dict[str, Any]] = None,
+ validated: bool = False,
+ ):
+ """
+ Given a metric_info dictionary from metrics.yaml, return a metric
+ instance.
+
+ :param: category The category the metric lives in
+ :param: name The name of the metric
+ :param: metric_info A dictionary of the remaining metric parameters
+ :param: config A dictionary containing commandline configuration
+ parameters
+ :param: validated True if the metric has already gone through
+ jsonschema validation
+ :return: A new Metric instance.
+ """
+ if config is None:
+ config = {}
+
+ metric_type = metric_info["type"]
+ if not isinstance(metric_type, str):
+ raise TypeError(f"Unknown metric type {metric_type}")
+ return cls.metric_types[metric_type](
+ category=category,
+ name=name,
+ _validated=validated,
+ _config=config,
+ **metric_info,
+ )
+
+ def serialize(self) -> Dict[str, util.JSONType]:
+ """
+ Serialize the metric back to JSON object model.
+ """
+ d = self.__dict__.copy()
+ # Convert enum fields back to strings
+ for key, val in d.items():
+ if isinstance(val, enum.Enum):
+ d[key] = d[key].name
+ if isinstance(val, set):
+ d[key] = sorted(list(val))
+ if isinstance(val, list) and len(val) and isinstance(val[0], enum.Enum):
+ d[key] = [x.name for x in val]
+ del d["name"]
+ del d["category"]
+ return d
+
+ def identifier(self) -> str:
+ """
+ Create an identifier unique for this metric.
+ Generally, category.name; however, Glean internal
+ metrics only use name.
+ """
+ if not self.category:
+ return self.name
+ return ".".join((self.category, self.name))
+
+ def is_disabled(self) -> bool:
+ return self.disabled or self.is_expired()
+
+ def is_expired(self) -> bool:
+ return self._config.get("custom_is_expired", util.is_expired)(self.expires)
+
+ def validate_expires(self):
+ return self._config.get("custom_validate_expires", util.validate_expires)(
+ self.expires
+ )
+
+ def is_internal_metric(self) -> bool:
+ return self.category in (Metric.glean_internal_metric_cat, "")
+
+
+class Boolean(Metric):
+ typename = "boolean"
+
+
+class String(Metric):
+ typename = "string"
+
+
+class StringList(Metric):
+ typename = "string_list"
+
+
+class Counter(Metric):
+ typename = "counter"
+
+
+class Quantity(Metric):
+ typename = "quantity"
+
+
+class TimeUnit(enum.Enum):
+ nanosecond = 0
+ microsecond = 1
+ millisecond = 2
+ second = 3
+ minute = 4
+ hour = 5
+ day = 6
+
+
+class TimeBase(Metric):
+ def __init__(self, *args, **kwargs):
+ self.time_unit = getattr(TimeUnit, kwargs.pop("time_unit", "millisecond"))
+ super().__init__(*args, **kwargs)
+
+
+class Timespan(TimeBase):
+ typename = "timespan"
+
+
+class TimingDistribution(TimeBase):
+ typename = "timing_distribution"
+
+ def __init__(self, *args, **kwargs):
+ self.time_unit = getattr(TimeUnit, kwargs.pop("time_unit", "nanosecond"))
+ Metric.__init__(self, *args, **kwargs)
+
+
+class MemoryUnit(enum.Enum):
+ byte = 0
+ kilobyte = 1
+ megabyte = 2
+ gigabyte = 3
+
+
+class MemoryDistribution(Metric):
+ typename = "memory_distribution"
+
+ def __init__(self, *args, **kwargs):
+ self.memory_unit = getattr(MemoryUnit, kwargs.pop("memory_unit", "byte"))
+ super().__init__(*args, **kwargs)
+
+
+class HistogramType(enum.Enum):
+ linear = 0
+ exponential = 1
+
+
+class CustomDistribution(Metric):
+ typename = "custom_distribution"
+
+ def __init__(self, *args, **kwargs):
+ self.range_min = kwargs.pop("range_min", 1)
+ self.range_max = kwargs.pop("range_max")
+ self.bucket_count = kwargs.pop("bucket_count")
+ self.histogram_type = getattr(
+ HistogramType, kwargs.pop("histogram_type", "exponential")
+ )
+ super().__init__(*args, **kwargs)
+
+
+class Datetime(TimeBase):
+ typename = "datetime"
+
+
+class Event(Metric):
+ typename = "event"
+
+ default_store_names = ["events"]
+
+ _generate_enums = [("allowed_extra_keys", "Keys")]
+
+ def __init__(self, *args, **kwargs):
+ self.extra_keys = kwargs.pop("extra_keys", {})
+ self.validate_extra_keys(self.extra_keys, kwargs.get("_config", {}))
+ super().__init__(*args, **kwargs)
+
+ @property
+ def allowed_extra_keys(self):
+ # Sort keys so that output is deterministic
+ return sorted(list(self.extra_keys.keys()))
+
+ @staticmethod
+ def validate_extra_keys(extra_keys: Dict[str, str], config: Dict[str, Any]) -> None:
+ if not config.get("allow_reserved") and any(
+ k.startswith("glean.") for k in extra_keys.keys()
+ ):
+ raise ValueError(
+ "Extra keys beginning with 'glean.' are reserved for "
+ "Glean internal use."
+ )
+
+
+class Uuid(Metric):
+ typename = "uuid"
+
+
+class Jwe(Metric):
+ typename = "jwe"
+
+ def __init__(self, *args, **kwargs):
+ self.decrypted_name = kwargs.pop("decrypted_name")
+ super().__init__(*args, **kwargs)
+
+
+class Labeled(Metric):
+ labeled = True
+
+ def __init__(self, *args, **kwargs):
+ labels = kwargs.pop("labels", None)
+ if labels is not None:
+ self.ordered_labels = labels
+ self.labels = set(labels)
+ else:
+ self.ordered_labels = None
+ self.labels = None
+ super().__init__(*args, **kwargs)
+
+ def serialize(self) -> Dict[str, util.JSONType]:
+ """
+ Serialize the metric back to JSON object model.
+ """
+ d = super().serialize()
+ d["labels"] = self.ordered_labels
+ del d["ordered_labels"]
+ return d
+
+
+class LabeledBoolean(Labeled, Boolean):
+ typename = "labeled_boolean"
+
+
+class LabeledString(Labeled, String):
+ typename = "labeled_string"
+
+
+class LabeledCounter(Labeled, Counter):
+ typename = "labeled_counter"
+
+
+ObjectTree = Dict[str, Dict[str, Union[Metric, pings.Ping]]]
diff --git a/third_party/python/glean_parser/glean_parser/parser.py b/third_party/python/glean_parser/glean_parser/parser.py
new file mode 100644
index 0000000000..dc6caff907
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/parser.py
@@ -0,0 +1,373 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Code for parsing metrics.yaml files.
+"""
+
+from collections import OrderedDict
+import functools
+from pathlib import Path
+import textwrap
+from typing import Any, Dict, Generator, Iterable, Optional, Tuple, Union
+
+import jsonschema # type: ignore
+from jsonschema.exceptions import ValidationError # type: ignore
+
+from .metrics import Metric, ObjectTree
+from .pings import Ping, RESERVED_PING_NAMES
+from . import util
+
+
+ROOT_DIR = Path(__file__).parent
+SCHEMAS_DIR = ROOT_DIR / "schemas"
+
+METRICS_ID = "moz://mozilla.org/schemas/glean/metrics/1-0-0"
+PINGS_ID = "moz://mozilla.org/schemas/glean/pings/1-0-0"
+
+FILE_TYPES = {METRICS_ID: "metrics", PINGS_ID: "pings"}
+
+
+def _update_validator(validator):
+ """
+ Adds some custom validators to the jsonschema validator that produce
+ nicer error messages.
+ """
+
+ def required(validator, required, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+ missing_properties = set(
+ property for property in required if property not in instance
+ )
+ if len(missing_properties):
+ missing_properties = sorted(list(missing_properties))
+ yield ValidationError(
+ f"Missing required properties: {', '.join(missing_properties)}"
+ )
+
+ validator.VALIDATORS["required"] = required
+
+
+def _load_file(
+ filepath: Path, parser_config: Dict[str, Any]
+) -> Generator[str, None, Tuple[Dict[str, util.JSONType], Optional[str]]]:
+ """
+ Load a metrics.yaml or pings.yaml format file.
+
+ If the `filepath` does not exist, raises `FileNotFoundError`, unless
+ `parser_config["allow_missing_files"]` is `True`.
+ """
+ try:
+ content = util.load_yaml_or_json(filepath, ordered_dict=True)
+ except FileNotFoundError:
+ if not parser_config.get("allow_missing_files", False):
+ raise
+ else:
+ return {}, None
+ except Exception as e:
+ yield util.format_error(filepath, "", textwrap.fill(str(e)))
+ return {}, None
+
+ if content is None:
+ yield util.format_error(filepath, "", f"'{filepath}' file can not be empty.")
+ return {}, None
+
+ if not isinstance(content, dict):
+ return {}, None
+
+ if content == {}:
+ return {}, None
+
+ schema_key = content.get("$schema")
+ if not isinstance(schema_key, str):
+ raise TypeError(f"Invalid schema key {schema_key}")
+
+ filetype = FILE_TYPES.get(schema_key)
+
+ for error in validate(content, filepath):
+ content = {}
+ yield error
+
+ return content, filetype
+
+
+@functools.lru_cache(maxsize=1)
+def _load_schemas() -> Dict[str, Tuple[Any, Any]]:
+ """
+ Load all of the known schemas from disk, and put them in a map based on the
+ schema's $id.
+ """
+ schemas = {}
+ for schema_path in SCHEMAS_DIR.glob("*.yaml"):
+ schema = util.load_yaml_or_json(schema_path)
+ resolver = util.get_null_resolver(schema)
+ validator_class = jsonschema.validators.validator_for(schema)
+ _update_validator(validator_class)
+ validator_class.check_schema(schema)
+ validator = validator_class(schema, resolver=resolver)
+ schemas[schema["$id"]] = (schema, validator)
+ return schemas
+
+
+def _get_schema(
+ schema_id: str, filepath: Union[str, Path] = "<input>"
+) -> Tuple[Any, Any]:
+ """
+ Get the schema for the given schema $id.
+ """
+ schemas = _load_schemas()
+ if schema_id not in schemas:
+ raise ValueError(
+ util.format_error(
+ filepath,
+ "",
+ f"$schema key must be one of {', '.join(schemas.keys())}",
+ )
+ )
+ return schemas[schema_id]
+
+
+def _get_schema_for_content(
+ content: Dict[str, util.JSONType], filepath: Union[str, Path]
+) -> Tuple[Any, Any]:
+ """
+ Get the appropriate schema for the given JSON content.
+ """
+ schema_url = content.get("$schema")
+ if not isinstance(schema_url, str):
+ raise TypeError("Invalid $schema type {schema_url}")
+ return _get_schema(schema_url, filepath)
+
+
+def get_parameter_doc(key: str) -> str:
+ """
+ Returns documentation about a specific metric parameter.
+ """
+ schema, _ = _get_schema(METRICS_ID)
+ return schema["definitions"]["metric"]["properties"][key]["description"]
+
+
+def get_ping_parameter_doc(key: str) -> str:
+ """
+ Returns documentation about a specific ping parameter.
+ """
+ schema, _ = _get_schema(PINGS_ID)
+ return schema["additionalProperties"]["properties"][key]["description"]
+
+
+def validate(
+ content: Dict[str, util.JSONType], filepath: Union[str, Path] = "<input>"
+) -> Generator[str, None, None]:
+ """
+ Validate the given content against the appropriate schema.
+ """
+ try:
+ schema, validator = _get_schema_for_content(content, filepath)
+ except ValueError as e:
+ yield str(e)
+ else:
+ yield from (
+ util.format_error(filepath, "", util.pprint_validation_error(e))
+ for e in validator.iter_errors(content)
+ )
+
+
+def _instantiate_metrics(
+ all_objects: ObjectTree,
+ sources: Dict[Any, Path],
+ content: Dict[str, util.JSONType],
+ filepath: Path,
+ config: Dict[str, Any],
+) -> Generator[str, None, None]:
+ """
+ Load a list of metrics.yaml files, convert the JSON information into Metric
+ objects, and merge them into a single tree.
+ """
+ global_no_lint = content.get("no_lint", [])
+
+ for category_key, category_val in content.items():
+ if category_key.startswith("$"):
+ continue
+ if category_key == "no_lint":
+ continue
+ if not config.get("allow_reserved") and category_key.split(".")[0] == "glean":
+ yield util.format_error(
+ filepath,
+ f"For category '{category_key}'",
+ "Categories beginning with 'glean' are reserved for "
+ "Glean internal use.",
+ )
+ continue
+ all_objects.setdefault(category_key, OrderedDict())
+
+ if not isinstance(category_val, dict):
+ raise TypeError(f"Invalid content for {category_key}")
+
+ for metric_key, metric_val in category_val.items():
+ try:
+ metric_obj = Metric.make_metric(
+ category_key, metric_key, metric_val, validated=True, config=config
+ )
+ except Exception as e:
+ yield util.format_error(
+ filepath,
+ f"On instance {category_key}.{metric_key}",
+ str(e),
+ )
+ metric_obj = None
+ else:
+ if (
+ not config.get("allow_reserved")
+ and "all-pings" in metric_obj.send_in_pings
+ ):
+ yield util.format_error(
+ filepath,
+ f"On instance {category_key}.{metric_key}",
+ 'Only internal metrics may specify "all-pings" '
+ 'in "send_in_pings"',
+ )
+ metric_obj = None
+
+ if metric_obj is not None:
+ metric_obj.no_lint = list(set(metric_obj.no_lint + global_no_lint))
+
+ already_seen = sources.get((category_key, metric_key))
+ if already_seen is not None:
+ # We've seen this metric name already
+ yield util.format_error(
+ filepath,
+ "",
+ (
+ f"Duplicate metric name '{category_key}.{metric_key}' "
+ f"already defined in '{already_seen}'"
+ ),
+ )
+ else:
+ all_objects[category_key][metric_key] = metric_obj
+ sources[(category_key, metric_key)] = filepath
+
+
+def _instantiate_pings(
+ all_objects: ObjectTree,
+ sources: Dict[Any, Path],
+ content: Dict[str, util.JSONType],
+ filepath: Path,
+ config: Dict[str, Any],
+) -> Generator[str, None, None]:
+ """
+ Load a list of pings.yaml files, convert the JSON information into Ping
+ objects.
+ """
+ for ping_key, ping_val in content.items():
+ if ping_key.startswith("$"):
+ continue
+ if not config.get("allow_reserved"):
+ if ping_key in RESERVED_PING_NAMES:
+ yield util.format_error(
+ filepath,
+ f"For ping '{ping_key}'",
+ f"Ping uses a reserved name ({RESERVED_PING_NAMES})",
+ )
+ continue
+ if not isinstance(ping_val, dict):
+ raise TypeError(f"Invalid content for ping {ping_key}")
+ ping_val["name"] = ping_key
+ try:
+ ping_obj = Ping(**ping_val)
+ except Exception as e:
+ yield util.format_error(filepath, f"On instance '{ping_key}'", str(e))
+ continue
+
+ already_seen = sources.get(ping_key)
+ if already_seen is not None:
+ # We've seen this ping name already
+ yield util.format_error(
+ filepath,
+ "",
+ f"Duplicate ping name '{ping_key}' "
+ f"already defined in '{already_seen}'",
+ )
+ else:
+ all_objects.setdefault("pings", {})[ping_key] = ping_obj
+ sources[ping_key] = filepath
+
+
+def _preprocess_objects(objs: ObjectTree, config: Dict[str, Any]) -> ObjectTree:
+ """
+ Preprocess the object tree to better set defaults.
+ """
+ for category in objs.values():
+ for obj in category.values():
+ if not isinstance(obj, Metric):
+ continue
+
+ if not config.get("do_not_disable_expired", False) and hasattr(
+ obj, "is_disabled"
+ ):
+ obj.disabled = obj.is_disabled()
+
+ if hasattr(obj, "send_in_pings"):
+ if "default" in obj.send_in_pings:
+ obj.send_in_pings = obj.default_store_names + [
+ x for x in obj.send_in_pings if x != "default"
+ ]
+ obj.send_in_pings = sorted(list(set(obj.send_in_pings)))
+ return objs
+
+
+@util.keep_value
+def parse_objects(
+ filepaths: Iterable[Path], config: Optional[Dict[str, Any]] = None
+) -> Generator[str, None, ObjectTree]:
+ """
+ Parse one or more metrics.yaml and/or pings.yaml files, returning a tree of
+ `metrics.Metric` and `pings.Ping` instances.
+
+ The result is a generator over any errors. If there are no errors, the
+ actual metrics can be obtained from `result.value`. For example::
+
+ result = metrics.parse_metrics(filepaths)
+ for err in result:
+ print(err)
+ all_metrics = result.value
+
+ The result value is a dictionary of category names to categories, where
+ each category is a dictionary from metric name to `metrics.Metric`
+ instances. There is also the special category `pings` containing all
+ of the `pings.Ping` instances.
+
+ :param filepaths: list of Path objects to metrics.yaml and/or pings.yaml
+ files
+ :param config: A dictionary of options that change parsing behavior.
+ Supported keys are:
+
+ - `allow_reserved`: Allow values reserved for internal Glean use.
+ - `do_not_disable_expired`: Don't mark expired metrics as disabled.
+ This is useful when you want to retain the original "disabled"
+ value from the `metrics.yaml`, rather than having it overridden when
+ the metric expires.
+ - `allow_missing_files`: Do not raise a `FileNotFoundError` if any of
+ the input `filepaths` do not exist.
+ """
+ if config is None:
+ config = {}
+
+ all_objects: ObjectTree = OrderedDict()
+ sources: Dict[Any, Path] = {}
+ filepaths = util.ensure_list(filepaths)
+ for filepath in filepaths:
+ content, filetype = yield from _load_file(filepath, config)
+ if filetype == "metrics":
+ yield from _instantiate_metrics(
+ all_objects, sources, content, filepath, config
+ )
+ elif filetype == "pings":
+ yield from _instantiate_pings(
+ all_objects, sources, content, filepath, config
+ )
+
+ return _preprocess_objects(all_objects, config)
diff --git a/third_party/python/glean_parser/glean_parser/pings.py b/third_party/python/glean_parser/glean_parser/pings.py
new file mode 100644
index 0000000000..cff2f48edb
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/pings.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Classes for managing the description of pings.
+"""
+
+from typing import Dict, List, Optional
+
+
+from . import util
+
+
+RESERVED_PING_NAMES = ["baseline", "metrics", "events", "deletion-request"]
+
+
+class Ping:
+ def __init__(
+ self,
+ name: str,
+ description: str,
+ bugs: List[str],
+ notification_emails: List[str],
+ data_reviews: Optional[List[str]] = None,
+ include_client_id: bool = False,
+ send_if_empty: bool = False,
+ reasons: Dict[str, str] = None,
+ _validated: bool = False,
+ ):
+ # Avoid cyclical import
+ from . import parser
+
+ self.name = name
+ self.description = description
+ self.bugs = bugs
+ self.notification_emails = notification_emails
+ if data_reviews is None:
+ data_reviews = []
+ self.data_reviews = data_reviews
+ self.include_client_id = include_client_id
+ self.send_if_empty = send_if_empty
+ if reasons is None:
+ reasons = {}
+ self.reasons = reasons
+
+ # _validated indicates whether this metric has already been jsonschema
+ # validated (but not any of the Python-level validation).
+ if not _validated:
+ data: Dict[str, util.JSONType] = {
+ "$schema": parser.PINGS_ID,
+ self.name: self.serialize(),
+ }
+ for error in parser.validate(data):
+ raise ValueError(error)
+
+ _generate_enums = [("reason_codes", "ReasonCodes")]
+
+ @property
+ def type(self) -> str:
+ return "ping"
+
+ @property
+ def reason_codes(self) -> List[str]:
+ return sorted(list(self.reasons.keys()))
+
+ def serialize(self) -> Dict[str, util.JSONType]:
+ """
+ Serialize the metric back to JSON object model.
+ """
+ d = self.__dict__.copy()
+ del d["name"]
+ return d
+
+ def identifier(self) -> str:
+ """
+ Used for the "generated from ..." comment in the output.
+ """
+ return self.name
diff --git a/third_party/python/glean_parser/glean_parser/schemas/metrics.1-0-0.schema.yaml b/third_party/python/glean_parser/glean_parser/schemas/metrics.1-0-0.schema.yaml
new file mode 100644
index 0000000000..fd789a62b7
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/schemas/metrics.1-0-0.schema.yaml
@@ -0,0 +1,600 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+---
+$schema: http://json-schema.org/draft-07/schema#
+title: Metrics
+description: |
+ Schema for the metrics.yaml files for Mozilla's Glean telemetry SDK.
+
+ The top-level of the `metrics.yaml` file has a key defining each category of
+ metrics. Categories must be snake_case, and they may also have dots `.` to
+ define subcategories.
+
+$id: moz://mozilla.org/schemas/glean/metrics/1-0-0
+
+definitions:
+ token:
+ type: string
+ pattern: "^[A-Za-z_][A-Za-z0-9_\\.]*$"
+
+ snake_case:
+ type: string
+ pattern: "^[a-z_][a-z0-9_]*$"
+
+ dotted_snake_case:
+ type: string
+ pattern: "^[a-z_][a-z0-9_]{0,29}(\\.[a-z_][a-z0-9_]{0,29})*$"
+ maxLength: 40
+
+ kebab_case:
+ type: string
+ # Bug 1601270; we allow 3 specific existing snake_cased ping names for now,
+ # but these special cases can be removed once the number of legacy clients
+ # sufficiently dwindles, likely in 2020H2.
+ pattern: "^[a-z][a-z0-9-]{0,29}$\
+ |^deletion_request$|^bookmarks_sync$|^history_sync$|^session_end$|^all_pings$|^glean_.*$"
+
+ long_id:
+ allOf:
+ - $ref: "#/definitions/snake_case"
+ - maxLength: 40
+
+ short_id:
+ allOf:
+ - $ref: "#/definitions/snake_case"
+ - maxLength: 30
+
+ labeled_metric_id:
+ type: string
+ pattern: "^[a-z_][a-z0-9_-]{0,29}(\\.[a-z_][a-z0-9_-]{0,29})*$"
+ maxLength: 71 # Note: this should be category + metric + 1
+
+ metric:
+ description: |
+ Describes a single metric.
+
+ See https://mozilla.github.io/glean_parser/metrics-yaml.html
+
+ type: object
+
+ additionalProperties: false
+
+ properties:
+ type:
+ title: Metric type
+ description: |
+ **Required.**
+
+ Specifies the type of a metric, like "counter" or "event". This
+ defines which operations are valid for the metric, how it is stored
+ and how data analysis tooling displays it.
+
+ The supported types are:
+ - `event`: Record a specific event (with optional metadata).
+ Additional properties: `extra_keys`.
+
+ - `boolean`: A metric storing values of true or false.
+
+ - `string`: A metric storing Unicode string values.
+
+ - `string_list`: a list of Unicode strings.
+
+ - `counter`: A numeric value that can only be incremented.
+
+ - `quantity`: A numeric value that is set directly.
+
+ - `timespan`: Represents a time interval. Additional properties:
+ `time_unit`.
+
+ - `timing_distribution`: Record the distribution of multiple
+ timings. Additional properties: `time_unit`.
+
+ - `datetime`: A date/time value. Represented as an ISO datetime in
+ UTC. Additional properties: `time_unit`.
+
+ - `uuid`: Record a UUID v4.
+
+ - `jwe`: Record a [JWE](https://tools.ietf.org/html/rfc7516) value.
+
+ - `memory_distribution`: A histogram for recording memory usage
+ values. Additional properties: `memory_unit`.
+
+ - `custom_distribution`: A histogram with a custom range and number
+ of buckets. This metric type is for legacy support only and is
+ only allowed for metrics coming from GeckoView. Additional
+ properties: `range_min`, `range_max`, `bucket_count`,
+ `histogram_type`.
+
+ - Additionally, labeled versions of many metric types are supported.
+ These support the `labels`_ parameter, allowing multiple instances
+ of the metric to be stored at a given set of labels. The labeled
+ metric types include:
+
+ `labeled_boolean`, `labeled_string`, `labeled_counter`.
+
+ type: string
+ enum:
+ - event
+ - boolean
+ - string
+ - string_list
+ - counter
+ - quantity
+ - timespan
+ - timing_distribution
+ - custom_distribution
+ - memory_distribution
+ - datetime
+ - uuid
+ - jwe
+ - labeled_boolean
+ - labeled_string
+ - labeled_counter
+
+ description:
+ title: Description
+ description: |
+ **Required.**
+
+ A textual description of what this metric does, what it means, and its
+ edge cases or any other helpful information.
+
+ Descriptions may contain [markdown
+ syntax](https://www.markdownguide.org/basic-syntax/).
+ type: string
+
+ lifetime:
+ title: Lifetime
+ description: |
+ Defines the lifetime of the metric. It must be one of the following
+ values:
+
+ - `ping` (default): The metric is reset each time it is sent in a
+ ping.
+
+ - `user`: The metric contains a property that is part of the user's
+ profile and is never reset.
+
+ - `application`: The metric contains a property that is related to the
+ application, and is reset only at application restarts.
+ enum:
+ - ping
+ - user
+ - application
+ default: ping
+
+ send_in_pings:
+ title: Send in pings
+ description: |
+ Which pings the metric should be sent on. If not specified, the metric
+ is sent on the "default ping", which is the `events` ping for events,
+ and the `metrics` ping for everything else. Most metrics don't need to
+ specify this.
+
+ (There is an additional special value of `all-pings` for internal
+ Glean metrics only that is used to indicate that a metric may appear
+ in any ping.)
+ type: array
+ items:
+ $ref: "#/definitions/kebab_case"
+ default:
+ - default
+
+ notification_emails:
+ title: Notification emails
+ description: |
+ **Required.**
+
+ A list of email addresses to notify for important events with the
+ metric or when people with context or ownership for the metric need to
+ be contacted.
+ type: array
+ minItems: 1
+ items:
+ type: string
+ format: email
+
+ bugs:
+ title: Related bugs
+ description: |
+ **Required.**
+
+ A list of bug URLs (e.g. Bugzilla and Github) that are relevant to
+ this metric, e.g., tracking its original implementation or later
+ changes to it.
+
+ Using bug numbers alone is deprecated and will be an error in the
+ future. Each entry should be a full URL to the bug in its tracker.
+ type: array
+ minItems: 1
+ items:
+ anyOf:
+ - type: integer
+ - type: string
+ format: uri
+
+ data_reviews:
+ title: Review references
+ description: |
+ **Required.**
+
+ A list of URIs to any data collection reviews relevant to the metric.
+ type: array
+ items:
+ type: string
+ format: uri
+
+ disabled:
+ title: Disabled
+ description: |
+ If `true`, the metric is disabled, and any metric collection on it
+ will be silently ignored at runtime.
+ type: boolean
+ default: false
+
+ expires:
+ title: Expires
+ description: |
+ **Required.**
+
+ By default it may be one of the following values:
+ - `<build date>`: An ISO date `yyyy-mm-dd` in UTC on which the
+ metric expires. For example, `2019-03-13`. This date is checked at
+ build time. Except in special cases, this form should be used so
+ that the metric automatically "sunsets" after a period of time.
+ - `never`: This metric never expires.
+ - `expired`: This metric is manually expired.
+
+ The default may be overriden in certain applications by the
+ `custom_validate_expires` and `custom_is_expired` configs.
+ type: string
+
+ version:
+ title: Metric version
+ description: |
+ The version of the metric. A monotonically increasing value. If not
+ provided, defaults to 0.
+
+ time_unit:
+ title: Time unit
+ description: |
+ For timespans and datetimes, specifies the unit that the metric will
+ be stored and displayed in. If not provided, it defaults to
+ "millisecond". Time values are sent to the backend as integers, so
+ `time_unit`_ determines the maximum resolution at which timespans are
+ recorded. Times are always truncated, not rounded, to the nearest time
+ unit. For example, a measurement of 25 ns will be returned as 0 ms if
+ `time_unit` is `"millisecond"`.
+
+ For timing distributions, times are always recorded and sent in
+ nanoseconds, but `time_unit` controls the minimum and maximum values.
+ If not provided, it defaults to "nanosecond".
+
+ - nanosecond: 1ns <= x <= 10 minutes
+ - microsecond: 1μs <= x <= ~6.94 days
+ - millisecond: 1ms <= x <= ~19 years
+
+ Valid when `type`_ is `timespan`, `timing_distribution` or `datetime`.
+ enum:
+ - nanosecond
+ - microsecond
+ - millisecond
+ - second
+ - minute
+ - hour
+ - day
+
+ memory_unit:
+ title: Memory unit
+ description: |
+ The unit that the incoming memory size values are recorded in.
+
+ The units are the power-of-2 units, so "kilobyte" is correctly a
+ "kibibyte".
+
+ - kilobyte == 2^10 == 1,024 bytes
+ - megabyte == 2^20 == 1,048,576 bytes
+ - gigabyte == 2^30 == 1,073,741,824 bytes
+
+ Values are automatically converted to and transmitted as bytes.
+
+ Valid when `type`_ is `memory_distribution`.
+ enum:
+ - byte
+ - kilobyte
+ - megabyte
+ - gigabyte
+
+ labels:
+ title: Labels
+ description: |
+ A list of labels for a labeled metric. If provided, the labels are
+ enforced at run time, and recording to an unknown label is recorded
+ to the special label `__other__`. If not provided, the labels
+ may be anything, but using too many unique labels will put some
+ labels in the special label `__other__`.
+
+ Valid with any of the labeled metric types.
+ anyOf:
+ - type: array
+ uniqueItems: true
+ items:
+ $ref: "#/definitions/labeled_metric_id"
+ maxItems: 16
+ - type: "null"
+
+ extra_keys:
+ title: Extra keys
+ description: |
+ The acceptable keys on the "extra" object sent with events. This is an
+ object mapping the key to an object containing metadata about the key.
+ A maximum of 10 extra keys is allowed.
+ This metadata object has the following keys:
+
+ - `description`: **Required.** A description of the key.
+
+ Valid when `type`_ is `event`.
+ type: object
+ propertyNames:
+ $ref: "#/definitions/dotted_snake_case"
+ additionalProperties:
+ type: object
+ properties:
+ description:
+ type: string
+ required:
+ - description
+ maxProperties: 10
+ default: {}
+
+ gecko_datapoint:
+ title: Gecko Datapoint
+ description: |
+ This is a Gecko-specific property. It is the name of the Gecko metric
+ to accumulate the data from, when using the Glean SDK in a product
+ using GeckoView. See bug 1566356 for more context.
+
+ type: string
+
+ range_min:
+ title: Range minimum
+ description: |
+ The minimum value of a custom distribution.
+
+ Valid when `type`_ is `custom_distribution`.
+ type: number
+ default: 1
+
+ range_max:
+ title: Range maximum
+ description: |
+ The maximum value of a custom distribution.
+
+ Required when `type`_ is `custom_distribution`.
+ type: number
+
+ bucket_count:
+ title: Bucket count
+ description: |
+ The number of buckets to include in a custom distribution.
+
+ Required when `type`_ is `custom_distribution`.
+ type: number
+ minimum: 1
+ maximum: 100
+
+ histogram_type:
+ title: Histogram type
+ description: |
+ The type of histogram bucketing to use:
+ - `linear`: The buckets are linearly spaced within the range.
+ - `exponential`: The buckets use the natural logarithmic so the
+ smaller-valued buckets are smaller in size than the higher-valued
+ buckets.
+
+ Required when `type`_ is `custom_distribution`.
+ enum:
+ - linear
+ - exponential
+
+ unit:
+ title: Unit
+ description: |
+ The unit of the metric, for metrics that don't already require a
+ meaningful unit, such as `time_unit`.
+ This is provided for informational purposes only and doesn't have any
+ effect on data collection.
+ type: string
+
+ no_lint:
+ title: Lint checks to skip
+ description: |
+ This parameter lists any lint checks to skip for this metric only.
+ type: array
+ items:
+ type: string
+
+ decrypted_name:
+ title: Decrypted name
+ description: |
+ Name of the column where to persist the decrypted value
+ stored in the JWE after processing.
+
+ Required when `type`_ is `jwe`.
+ type: string
+ pattern: "^[a-z_][a-z0-9_]{0,29}(\\.[a-z_][a-z0-9_]{0,29})*$"
+
+ data_sensitivity:
+ title: The level of data sensitivity
+ description: |
+ There are four data collection categories related to data sensitivity
+ [defined here](https://wiki.mozilla.org/Firefox/Data_Collection):
+
+ - **Category 1: Technical Data:** (`technical`) Information about the
+ machine or Firefox itself. Examples include OS, available memory,
+ crashes and errors, outcome of automated processes like updates,
+ safebrowsing, activation, version \#s, and buildid. This also
+ includes compatibility information about features and APIs used by
+ websites, addons, and other 3rd-party software that interact with
+ Firefox during usage.
+
+ - **Category 2: Interaction Data:** (`interaction`) Information about
+ the user’s direct engagement with Firefox. Examples include how many
+ tabs, addons, or windows a user has open; uses of specific Firefox
+ features; session length, scrolls and clicks; and the status of
+ discrete user preferences.
+
+ - **Category 3: Web activity data:** (`web_activity`) Information
+ about user web browsing that could be considered sensitive. Examples
+ include users’ specific web browsing history; general information
+ about their web browsing history (such as TLDs or categories of
+ webpages visited over time); and potentially certain types of
+ interaction data about specific webpages visited.
+
+ - **Category 4: Highly sensitive data:** (`highly_sensitive`)
+ Information that directly identifies a person, or if combined with
+ other data could identify a person. Examples include e-mail,
+ usernames, identifiers such as google ad id, apple id, fxaccount,
+ city or country (unless small ones are explicitly filtered out), or
+ certain cookies. It may be embedded within specific website content,
+ such as memory contents, dumps, captures of screen data, or DOM
+ data.
+ type: array
+ items:
+ enum:
+ - technical
+ - interaction
+ - web_activity
+ - highly_sensitive
+ type: string
+ minLength: 1
+ uniqueItems: true
+
+ required:
+ - type
+ - bugs
+ - description
+ - notification_emails
+ - data_reviews
+ - expires
+
+type: object
+
+propertyNames:
+ anyOf:
+ - allOf:
+ - $ref: "#/definitions/dotted_snake_case"
+ - not:
+ description: "'pings' is reserved as a category name."
+ const: pings
+ - enum: ['$schema']
+
+properties:
+ $schema:
+ type: string
+ format: url
+
+ no_lint:
+ title: Lint checks to skip globally
+ description: |
+ This parameter lists any lint checks to skip for this whole file.
+ type: array
+ items:
+ type: string
+
+additionalProperties:
+ type: object
+ propertyNames:
+ anyOf:
+ - $ref: "#/definitions/short_id"
+ additionalProperties:
+ allOf:
+ - $ref: "#/definitions/metric"
+ -
+ if:
+ properties:
+ type:
+ const: event
+ then:
+ properties:
+ lifetime:
+ description: |
+ Event metrics must have ping lifetime.
+ const: ping
+ - if:
+ not:
+ properties:
+ type:
+ enum:
+ - timing_distribution
+ - custom_distribution
+ - memory_distribution
+ - quantity
+ - boolean
+ - string
+ - labeled_counter
+ then:
+ properties:
+ gecko_datapoint:
+ description: |
+ `gecko_datapoint` is only allowed for `timing_distribution`,
+ `custom_distribution`, `memory_distribution`, `quantity`,
+ `boolean`, `string` and `labeled_counter`.
+ maxLength: 0
+ -
+ if:
+ properties:
+ type:
+ enum:
+ - custom_distribution
+ then:
+ required:
+ - gecko_datapoint
+ description: |
+ `custom_distribution` is only allowed for Gecko
+ metrics.
+ -
+ if:
+ properties:
+ type:
+ const: custom_distribution
+ then:
+ required:
+ - range_max
+ - bucket_count
+ - histogram_type
+ description: |
+ `custom_distribution` is missing required parameters `range_max`,
+ `bucket_count` and `histogram_type`.
+ -
+ if:
+ properties:
+ type:
+ const: memory_distribution
+ then:
+ required:
+ - memory_unit
+ description: |
+ `memory_distribution` is missing required parameter `memory_unit`.
+ -
+ if:
+ properties:
+ type:
+ const: quantity
+ then:
+ required:
+ - unit
+ description: |
+ `quantity` is missing required parameter `unit`.
+ -
+ if:
+ properties:
+ type:
+ const: jwe
+ then:
+ required:
+ - decrypted_name
+ description: |
+ `jwe` is missing required parameter `decrypted_name`.
diff --git a/third_party/python/glean_parser/glean_parser/schemas/pings.1-0-0.schema.yaml b/third_party/python/glean_parser/glean_parser/schemas/pings.1-0-0.schema.yaml
new file mode 100644
index 0000000000..a6a486ab0e
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/schemas/pings.1-0-0.schema.yaml
@@ -0,0 +1,141 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+---
+$schema: http://json-schema.org/draft-07/schema#
+title: Pings
+description: |
+ Schema for the pings.yaml files for Mozilla's Glean telemetry SDK.
+
+ The top-level of the `pings.yaml` file has a key defining the name of each
+ ping. The values contain metadata about that ping. Ping names must be
+ kebab-case per https://docs.telemetry.mozilla.org/cookbooks/new_ping.html
+
+$id: moz://mozilla.org/schemas/glean/pings/1-0-0
+
+definitions:
+ dotted_snake_case:
+ type: string
+ pattern: "^[a-z_][a-z0-9_]{0,29}(\\.[a-z_][a-z0-9_]{0,29})*$"
+ maxLength: 40
+ kebab_case:
+ type: string
+ # Bug 1601270; we allow 3 specific existing snake_cased ping names for now,
+ # but these special cases can be removed once the number of legacy clients
+ # sufficiently dwindles, likely in 2020H2.
+ pattern: "^[a-z][a-z0-9-]{0,29}$\
+ |^deletion_request$|^bookmarks_sync$|^history_sync$|^session_end$|^all_pings$|^glean_.*$"
+
+type: object
+
+propertyNames:
+ allOf:
+ - anyOf:
+ - $ref: "#/definitions/kebab_case"
+ - enum: ['$schema']
+ - not:
+ enum: ['all-pings']
+
+properties:
+ $schema:
+ type: string
+ format: url
+
+additionalProperties:
+ type: object
+ properties:
+ description:
+ title: Description
+ description: |
+ **Required.**
+
+ A textual description of the purpose of this ping and what it contains.
+
+ Descriptions may contain [markdown
+ syntax](https://www.markdownguide.org/basic-syntax/).
+ type: string
+
+ include_client_id:
+ title: Include client id
+ description: |
+ **Required.**
+
+ When `true`, include the `client_id` value in the ping.
+ type: boolean
+
+ send_if_empty:
+ title: Send if empty
+ description: |
+ When `false` a ping is sent only if it contains data (the default).
+ When `true` a ping is sent even if it contains no data.
+ type: boolean
+
+ notification_emails:
+ title: Notification emails
+ description: |
+ **Required.**
+
+ A list of email addresses to notify for important events with the
+ ping or when people with context or ownership for the ping need to
+ be contacted.
+ type: array
+ minItems: 1
+ items:
+ type: string
+ format: email
+
+ bugs:
+ title: Related bugs
+ description: |
+ **Required.**
+
+ A list of bugs (e.g. Bugzilla and Github) that are relevant to this
+ ping, e.g., tracking its original implementation or later changes to
+ it.
+
+ If a number, it is an ID to an issue in the default tracker (e.g.
+ Mozilla's Bugzilla instance). If a string, it must be a URI to a bug
+ page in a tracker.
+ type: array
+ minItems: 1
+ items:
+ anyOf:
+ - type: integer
+ - type: string
+ format: uri
+
+ data_reviews:
+ title: Review references
+ description: |
+ **Required.**
+
+ A list of URIs to any data collection reviews relevant to the ping.
+ type: array
+ items:
+ type: string
+ format: uri
+
+ reasons:
+ title: The reasons this ping can be sent.
+ description: |
+ A list of reasons that the ping might be triggered. Sent in the ping's
+ `ping_info.reason` field.
+
+ Specified as a mapping from reason codes (which are short strings), to
+ a textual description of the reason.
+ type: object
+ propertyNames:
+ type: string
+ maxLength: 30
+ additionalProperties:
+ type: string
+
+ required:
+ - description
+ - include_client_id
+ - bugs
+ - notification_emails
+ - data_reviews
+
+ additionalProperties: false
diff --git a/third_party/python/glean_parser/glean_parser/swift.py b/third_party/python/glean_parser/glean_parser/swift.py
new file mode 100644
index 0000000000..22a48d9086
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/swift.py
@@ -0,0 +1,176 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Outputter to generate Swift code for metrics.
+"""
+
+import enum
+import json
+from pathlib import Path
+from typing import Any, Dict, Optional, Union
+
+from . import metrics
+from . import pings
+from . import util
+
+# An (imcomplete) list of reserved keywords in Swift.
+# These will be replaced in generated code by their escaped form.
+SWIFT_RESERVED_NAMES = ["internal", "typealias"]
+
+
+def swift_datatypes_filter(value: util.JSONType) -> str:
+ """
+ A Jinja2 filter that renders Swift literals.
+
+ Based on Python's JSONEncoder, but overrides:
+ - dicts to use `[key: value]`
+ - sets to use `[...]`
+ - enums to use the like-named Swift enum
+ """
+
+ class SwiftEncoder(json.JSONEncoder):
+ def iterencode(self, value):
+ if isinstance(value, dict):
+ yield "["
+ first = True
+ for key, subvalue in value.items():
+ if not first:
+ yield ", "
+ yield from self.iterencode(key)
+ yield ": "
+ yield from self.iterencode(subvalue)
+ first = False
+ yield "]"
+ elif isinstance(value, enum.Enum):
+ yield ("." + util.camelize(value.name))
+ elif isinstance(value, set):
+ yield "["
+ first = True
+ for subvalue in sorted(list(value)):
+ if not first:
+ yield ", "
+ yield from self.iterencode(subvalue)
+ first = False
+ yield "]"
+ elif value is None:
+ yield "nil"
+ else:
+ yield from super().iterencode(value)
+
+ return "".join(SwiftEncoder().iterencode(value))
+
+
+def type_name(obj: Union[metrics.Metric, pings.Ping]) -> str:
+ """
+ Returns the Swift type to use for a given metric or ping object.
+ """
+ generate_enums = getattr(obj, "_generate_enums", [])
+ if len(generate_enums):
+ template_args = []
+ for member, suffix in generate_enums:
+ if len(getattr(obj, member)):
+ template_args.append(util.Camelize(obj.name) + suffix)
+ else:
+ if suffix == "Keys":
+ template_args.append("NoExtraKeys")
+ else:
+ template_args.append("No" + suffix)
+
+ return "{}<{}>".format(class_name(obj.type), ", ".join(template_args))
+
+ return class_name(obj.type)
+
+
+def class_name(obj_type: str) -> str:
+ """
+ Returns the Swift class name for a given metric or ping type.
+ """
+ if obj_type == "ping":
+ return "Ping"
+ if obj_type.startswith("labeled_"):
+ obj_type = obj_type[8:]
+ return util.Camelize(obj_type) + "MetricType"
+
+
+def variable_name(var: str) -> str:
+ """
+ Returns a valid Swift variable name, escaping keywords if necessary.
+ """
+ if var in SWIFT_RESERVED_NAMES:
+ return "`" + var + "`"
+ else:
+ return var
+
+
+class Category:
+ """
+ Data struct holding information about a metric to be used in the template.
+ """
+
+ name: str
+ objs: Dict[str, Union[metrics.Metric, pings.Ping]]
+ contains_pings: bool
+
+
+def output_swift(
+ objs: metrics.ObjectTree, output_dir: Path, options: Optional[Dict[str, Any]] = None
+) -> None:
+ """
+ Given a tree of objects, output Swift code to `output_dir`.
+
+ :param objects: A tree of objects (metrics and pings) as returned from
+ `parser.parse_objects`.
+ :param output_dir: Path to an output directory to write to.
+ :param options: options dictionary, with the following optional keys:
+ - namespace: The namespace to generate metrics in
+ - glean_namespace: The namespace to import Glean from
+ - allow_reserved: When True, this is a Glean-internal build
+ """
+ if options is None:
+ options = {}
+
+ template = util.get_jinja2_template(
+ "swift.jinja2",
+ filters=(
+ ("swift", swift_datatypes_filter),
+ ("type_name", type_name),
+ ("class_name", class_name),
+ ("variable_name", variable_name),
+ ),
+ )
+
+ namespace = options.get("namespace", "GleanMetrics")
+ glean_namespace = options.get("glean_namespace", "Glean")
+
+ filename = "Metrics.swift"
+ filepath = output_dir / filename
+ categories = []
+
+ for category_key, category_val in objs.items():
+ contains_pings = any(
+ isinstance(obj, pings.Ping) for obj in category_val.values()
+ )
+
+ cat = Category()
+ cat.name = category_key
+ cat.objs = category_val
+ cat.contains_pings = contains_pings
+
+ categories.append(cat)
+
+ with filepath.open("w", encoding="utf-8") as fd:
+ fd.write(
+ template.render(
+ categories=categories,
+ extra_args=util.extra_metric_args,
+ namespace=namespace,
+ glean_namespace=glean_namespace,
+ allow_reserved=options.get("allow_reserved", False),
+ )
+ )
+ # Jinja2 squashes the final newline, so we explicitly add it
+ fd.write("\n")
diff --git a/third_party/python/glean_parser/glean_parser/templates/csharp.jinja2 b/third_party/python/glean_parser/glean_parser/templates/csharp.jinja2
new file mode 100644
index 0000000000..9f5e67aeb9
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/templates/csharp.jinja2
@@ -0,0 +1,99 @@
+// -*- mode: csharp -*-
+
+/*
+ * AUTOGENERATED BY glean_parser. DO NOT EDIT.
+ */
+{# The rendered markdown is autogenerated, but this
+Jinja2 template is not. Please file bugs! #}
+
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+{% macro obj_declaration(obj, suffix='', access='', lazy=False) %}
+{{ access }} {% if lazy %} Lazy<{{ obj|type_name }}>{%- else %} {{ obj|type_name }}{% endif %} {{ obj.name|camelize }}{{ suffix }}
+{%- if lazy %} = new Lazy<{{ obj|type_name }}>(() => {%- else %} = // generated from {{ obj.identifier() }}{% endif %}
+
+ new {{ obj|type_name }}(
+ {% for arg_name in extra_args if obj[arg_name] is defined %}
+ {{ arg_name|camelize }}: {{ obj[arg_name]|csharp }}{{ "," if not loop.last }}
+ {% endfor %}
+ ){% if lazy %});{% else %};{% endif %}{% endmacro %}
+
+using System;
+using System.Collections.Generic;
+using {{ glean_namespace }}.Private;
+
+{# The C# metrics design require the class name to have a 'Definition'
+suffix, in order to nicely call in the metrics from the consumer code.
+The user code will be interested in the Value of the lazy instance, so
+that's where the real class name should be used. #}
+{% set metrics_class_name = category_name|Camelize + 'Definition' %}
+namespace {{ namespace }}
+{
+ internal sealed class {{ metrics_class_name }}
+ {
+ private static readonly Lazy<{{ metrics_class_name }}>
+ lazyInstance = new Lazy<{{ metrics_class_name }}>(() => new {{ metrics_class_name }}());
+ public static {{ metrics_class_name }} {{ category_name|Camelize }} => lazyInstance.Value;
+
+ // Private constructor to disallow instantiation from external callers.
+ private {{ metrics_class_name }}() { }
+
+#pragma warning disable IDE1006 // Naming Styles
+ {% for obj in objs.values() %}
+ {% if obj|attr("_generate_enums") %}
+ {% for name, suffix in obj["_generate_enums"] %}
+ {% if obj|attr(name)|length %}
+ internal enum {{ obj.name|camelize }}{{ suffix }} {
+ {% for key in obj|attr(name) %}
+ {{ key|camelize }}{{ "," if not loop.last }}
+ {% endfor %}
+ }
+ {% endif %}
+ {% endfor %}
+ {% endif %}
+ {% endfor %}
+
+ {% for obj in objs.values() %}
+ {% if obj.labeled %}
+ {{ obj_declaration(obj, 'Label', 'private ') }}
+ private readonly Lazy<LabeledMetricType<{{ obj|type_name }}>> {{ obj.name|camelize }}Lazy = new Lazy<LabeledMetricType<{{ obj|type_name }}>>(() => new LabeledMetricType<{{ obj|type_name }}>( // generated from {{ obj.identifier() }}
+ category: {{ obj.category|csharp }},
+ name: {{ obj.name|csharp }},
+ submetric: {{ category_name|Camelize }}.{{ obj.name|camelize }}Label,
+ disabled: {{ obj.is_disabled()|csharp }},
+ lifetime: {{ obj.lifetime|csharp }},
+ sendInPings: {{ obj.send_in_pings|csharp }},
+ labels: {{ obj.labels|csharp }}
+ )
+ );
+
+ /// <summary>
+ /// {{ obj.description|wordwrap() | replace('\n', '\n /// ') }}
+ /// </summary>
+ public LabeledMetricType<{{ obj|type_name }}> {{ obj.name|camelize }} => {{ obj.name|camelize }}Lazy.Value;
+
+ {% else %}
+ {# Deal with non-ping objects first. We need them to be lazy and we
+ want their description to stick on an accessor object. #}
+ {% if obj.type != 'ping' %}
+ {{ obj_declaration(obj, access='private readonly', suffix='Lazy', lazy=True) }}
+
+ /// <summary>
+ /// {{ obj.description|wordwrap() | replace('\n', '\n /// ') }}
+ /// </summary>
+ internal {{ obj|type_name }} {{ obj.name|camelize }} => {{ obj.name|camelize }}Lazy.Value; // generated from {{ obj.identifier() }}
+
+ {% else %}
+ {# Finally handle pings. #}
+ /// <summary>
+ /// {{ obj.description|wordwrap() | replace('\n', '\n /// ') }}
+ /// </summary>
+ {{ obj_declaration(obj, access='internal readonly', lazy=False) }}
+
+ {% endif %}
+ {% endif %}
+ {%- endfor %}
+#pragma warning restore IDE1006 // Naming Styles
+ }
+}
diff --git a/third_party/python/glean_parser/glean_parser/templates/kotlin.geckoview.jinja2 b/third_party/python/glean_parser/glean_parser/templates/kotlin.geckoview.jinja2
new file mode 100644
index 0000000000..6d2dca5a3e
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/templates/kotlin.geckoview.jinja2
@@ -0,0 +1,124 @@
+// -*- mode: kotlin -*-
+
+/*
+ * AUTOGENERATED BY glean_parser. DO NOT EDIT.
+ */
+{# The rendered markdown is autogenerated, but this
+Jinja2 template is not. Please file bugs! #}
+
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+@file:Suppress("PackageNaming", "MaxLineLength")
+package {{ namespace }}
+
+import {{ glean_namespace }}.private.BooleanMetricType // ktlint-disable import-ordering no-unused-imports
+import {{ glean_namespace }}.private.CounterMetricType // ktlint-disable import-ordering no-unused-imports
+import {{ glean_namespace }}.private.HistogramMetricBase // ktlint-disable import-ordering no-unused-imports
+import {{ glean_namespace }}.private.LabeledMetricType // ktlint-disable import-ordering no-unused-imports
+import {{ glean_namespace }}.private.QuantityMetricType // ktlint-disable import-ordering no-unused-imports
+import {{ glean_namespace }}.private.StringMetricType // ktlint-disable import-ordering no-unused-imports
+
+/*
+ * This class performs the mapping between Gecko metrics and Glean SDK
+ * metric types.
+ */
+internal object GleanGeckoMetricsMapping {
+ // Support exfiltration of Gecko histograms from products using both the
+ // Glean SDK and GeckoView. See bug 1566356 for more context.
+ @Suppress("UNUSED_PARAMETER")
+ fun getHistogram(geckoMetricName: String): HistogramMetricBase? {
+ {% if 'histograms' in gecko_metrics %}
+ return when (geckoMetricName) {
+ {% for category in gecko_metrics['histograms'].keys()|sort %}
+ // From {{ category|Camelize }}.kt
+ {% for metric in gecko_metrics['histograms'][category] %}
+ "{{ metric.gecko_datapoint }}" -> {{ category|Camelize }}.{{ metric.name|camelize }}
+ {% endfor %}
+ {%- endfor %}
+ else -> null
+ }
+ {% else %}
+ return null
+ {% endif %}
+ }
+
+ // Support exfiltration of Gecko categorical histograms from products using
+ // both the Glean SDK and GeckoView. See bug 1571740 for more context.
+ @Suppress("UNUSED_PARAMETER")
+ fun getCategoricalMetric(
+ geckoMetricName: String
+ ): LabeledMetricType<CounterMetricType>? {
+ {% if 'categoricals' in gecko_metrics %}
+ return when (geckoMetricName) {
+ {% for category in gecko_metrics['categoricals'].keys()|sort %}
+ // From {{ category|Camelize }}.kt
+ {% for metric in gecko_metrics['categoricals'][category] %}
+ "{{ metric.gecko_datapoint }}" -> {{ category|Camelize }}.{{ metric.name|camelize }}
+ {% endfor %}
+ {%- endfor %}
+ else -> null
+ }
+ {% else %}
+ return null
+ {% endif %}
+ }
+
+ // Support exfiltration of Gecko boolean scalars from products using both the
+ // Glean SDK and GeckoView. See bug 1579365 for more context.
+ @Suppress("UNUSED_PARAMETER")
+ fun getBooleanScalar(geckoMetricName: String): BooleanMetricType? {
+ {% if 'boolean' in gecko_metrics %}
+ return when (geckoMetricName) {
+ {% for category in gecko_metrics['boolean'].keys()|sort %}
+ // From {{ category|Camelize }}.kt
+ {% for metric in gecko_metrics['boolean'][category] %}
+ "{{ metric.gecko_datapoint }}" -> {{ category|Camelize }}.{{ metric.name|camelize }}
+ {% endfor %}
+ {%- endfor %}
+ else -> null
+ }
+ {% else %}
+ return null
+ {% endif %}
+ }
+
+ // Support exfiltration of Gecko string scalars from products using both the
+ // Glean SDK and GeckoView. See bug 1579365 for more context.
+ @Suppress("UNUSED_PARAMETER")
+ fun getStringScalar(geckoMetricName: String): StringMetricType? {
+ {% if 'string' in gecko_metrics %}
+ return when (geckoMetricName) {
+ {% for category in gecko_metrics['string'].keys()|sort %}
+ // From {{ category|Camelize }}.kt
+ {% for metric in gecko_metrics['string'][category] %}
+ "{{ metric.gecko_datapoint }}" -> {{ category|Camelize }}.{{ metric.name|camelize }}
+ {% endfor %}
+ {%- endfor %}
+ else -> null
+ }
+ {% else %}
+ return null
+ {% endif %}
+ }
+
+ // Support exfiltration of Gecko quantity scalars from products using both the
+ // Glean SDK and GeckoView. See bug 1579365 for more context.
+ @Suppress("UNUSED_PARAMETER")
+ fun getQuantityScalar(geckoMetricName: String): QuantityMetricType? {
+ {% if 'quantity' in gecko_metrics %}
+ return when (geckoMetricName) {
+ {% for category in gecko_metrics['quantity'].keys()|sort %}
+ // From {{ category|Camelize }}.kt
+ {% for metric in gecko_metrics['quantity'][category] %}
+ "{{ metric.gecko_datapoint }}" -> {{ category|Camelize }}.{{ metric.name|camelize }}
+ {% endfor %}
+ {%- endfor %}
+ else -> null
+ }
+ {% else %}
+ return null
+ {% endif %}
+ }
+}
diff --git a/third_party/python/glean_parser/glean_parser/templates/kotlin.jinja2 b/third_party/python/glean_parser/glean_parser/templates/kotlin.jinja2
new file mode 100644
index 0000000000..7fd1bc6cdf
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/templates/kotlin.jinja2
@@ -0,0 +1,81 @@
+// -*- mode: kotlin -*-
+
+/*
+ * AUTOGENERATED BY glean_parser. DO NOT EDIT.
+ */
+{# The rendered markdown is autogenerated, but this
+Jinja2 template is not. Please file bugs! #}
+
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+{% macro obj_declaration(obj, suffix='', access='', lazy=False) %}
+{% if (access != "private ") -%}
+@get:JvmName("{{ obj.name|camelize }}{{ suffix }}")
+{% endif -%}
+{{ access }}val {{ obj.name|camelize }}{{ suffix }}: {{ obj|type_name }}{% if lazy %} by lazy { {%- else %} ={% endif %} // generated from {{ obj.identifier() }}
+
+ {{ obj|type_name }}(
+ {% for arg_name in extra_args if obj[arg_name] is defined %}
+ {{ arg_name|camelize }} = {{ obj[arg_name]|kotlin }}{{ "," if not loop.last }}
+ {% endfor %}
+ )
+{% if lazy %} }{% endif %}{% endmacro %}
+
+/* ktlint-disable no-blank-line-before-rbrace */
+@file:Suppress("PackageNaming", "MaxLineLength")
+package {{ namespace }}
+
+import {{ glean_namespace }}.private.HistogramType // ktlint-disable import-ordering no-unused-imports
+import {{ glean_namespace }}.private.Lifetime // ktlint-disable import-ordering no-unused-imports
+import {{ glean_namespace }}.private.MemoryUnit // ktlint-disable import-ordering no-unused-imports
+import {{ glean_namespace }}.private.NoExtraKeys // ktlint-disable import-ordering no-unused-imports
+import {{ glean_namespace }}.private.NoReasonCodes // ktlint-disable import-ordering no-unused-imports
+import {{ glean_namespace }}.private.TimeUnit // ktlint-disable import-ordering no-unused-imports
+{% for obj_type in obj_types %}
+import {{ glean_namespace }}.private.{{ obj_type }} // ktlint-disable import-ordering
+{% endfor %}
+{% if has_labeled_metrics %}
+import {{ glean_namespace }}.private.LabeledMetricType // ktlint-disable import-ordering
+{% endif %}
+
+internal object {{ category_name|Camelize }} {
+{% for obj in objs.values() %}
+ {% if obj|attr("_generate_enums") %}
+ {% for name, suffix in obj["_generate_enums"] %}
+ {% if obj|attr(name)|length %}
+ @Suppress("ClassNaming", "EnumNaming")
+ enum class {{ obj.name|camelize }}{{ suffix }} {
+ {% for key in obj|attr(name) %}
+ {{ key|camelize }}{{ "," if not loop.last }}
+ {% endfor %}
+ }
+ {% endif %}
+ {% endfor %}
+ {% endif %}
+{% endfor %}
+{% for obj in objs.values() %}
+ {% if obj.labeled %}
+ {{ obj_declaration(obj, 'Label', 'private ') }}
+ /**
+ * {{ obj.description|wordwrap() | replace('\n', '\n * ') }}
+ */
+ val {{ obj.name|camelize }}: LabeledMetricType<{{ obj|type_name }}> by lazy { // generated from {{ obj.identifier() }}
+ LabeledMetricType(
+ category = {{ obj.category|kotlin }},
+ name = {{ obj.name|kotlin }},
+ subMetric = {{ obj.name|camelize }}Label,
+ disabled = {{ obj.is_disabled()|kotlin }},
+ lifetime = {{ obj.lifetime|kotlin }},
+ sendInPings = {{ obj.send_in_pings|kotlin }},
+ labels = {{ obj.labels|kotlin }}
+ )
+ }
+ {% else %}
+ /**
+ * {{ obj.description|wordwrap() | replace('\n', '\n * ') }}
+ */
+ {{ obj_declaration(obj, lazy=obj.type != 'ping') }}
+ {% endif %}
+{%- endfor %}
+}
diff --git a/third_party/python/glean_parser/glean_parser/templates/markdown.jinja2 b/third_party/python/glean_parser/glean_parser/templates/markdown.jinja2
new file mode 100644
index 0000000000..f5b2cd6392
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/templates/markdown.jinja2
@@ -0,0 +1,93 @@
+<!-- AUTOGENERATED BY glean_parser. DO NOT EDIT. -->
+{# The rendered markdown is autogenerated, but this
+Jinja2 template is not. Please file bugs! #}
+
+# Metrics
+This document enumerates the metrics collected by {{ project_title }} using the [Glean SDK](https://mozilla.github.io/glean/book/index.html).
+This project may depend on other projects which also collect metrics.
+This means you might have to go searching through the dependency tree to get a full picture of everything collected by this project.
+
+# Pings
+
+{% for ping_name in metrics_by_pings.keys()|sort %}
+ - [{{ ping_name }}]({{ '#' }}{{ ping_name|replace(" ","-") }})
+{% endfor %}
+
+
+{% for ping_name in metrics_by_pings.keys()|sort %}
+{% raw %}##{% endraw %} {{ ping_name }}
+
+{% if ping_name|ping_desc and ping_name|ping_desc|length > 0 %}
+{{ ping_name|ping_desc }}
+
+{% if ping_name|ping_docs|length > 0 %}
+See the Glean SDK documentation for the [`{{ ping_name }}` ping]({{ ping_name|ping_docs }}).
+
+{% endif %}
+{% endif %}
+{% if ping_name|ping_send_if_empty %}
+This ping is sent if empty.
+
+{% endif %}
+{% if ping_name|ping_include_client_id %}
+This ping includes the [client id](https://mozilla.github.io/glean/book/user/pings/index.html#the-client_info-section).
+
+{% endif %}
+{% if ping_name|ping_data_reviews %}
+**Data reviews for this ping:**
+
+{% for review in ping_name|ping_data_reviews %}
+- <{{review}}>
+{% endfor %}
+
+{% endif %}
+{% if ping_name|ping_bugs %}
+**Bugs related to this ping:**
+
+{% for bug in ping_name|ping_bugs %}
+- {% if bug|int != 0 %}{{bug}}{% else %}<{{bug}}>{% endif %}
+
+{% endfor %}
+
+{% endif %}
+{% if ping_name|ping_reasons %}
+**Reasons this ping may be sent:**
+
+{% for (reason, desc) in ping_name|ping_reasons|dictsort %}
+- `{{ reason }}`: {{ desc|indent(6, first=False) }}
+{% endfor %}
+
+{% endif %}
+{% if metrics_by_pings[ping_name] %}
+The following metrics are added to the ping:
+
+| Name | Type | Description | Data reviews | Extras | Expiration | [Data Sensitivity](https://wiki.mozilla.org/Firefox/Data_Collection) |
+| --- | --- | --- | --- | --- | --- | --- |
+{% for metric in metrics_by_pings[ping_name] %}
+| {{ metric.identifier() }} |
+{{- '['}}{{ metric.type }}]({{ metric.type|metrics_docs }}) |
+{{- metric.description|replace("\n", " ") }} |
+{%- for data_review in metric.data_reviews %}
+[{{ loop.index }}]({{ data_review }}){{ ", " if not loop.last }}
+{%- endfor -%} |
+{%- if metric|extra_info -%}
+<ul>
+{%- for property, desc in metric|extra_info %}
+<li>{{ property }}{%- if desc is not none -%}: {{ desc|replace("\n", " ") }}{%- endif -%}</li>
+{%- endfor -%}
+</ul>
+{%- endif -%} |
+{{- metric.expires }} |
+{{- metric.data_sensitivity|data_sensitivity_numbers }} |
+{% endfor %}
+{% else %}
+This ping contains no metrics.
+{% endif %}
+
+{% endfor %}
+
+Data categories are [defined here](https://wiki.mozilla.org/Firefox/Data_Collection).
+
+<!-- AUTOGENERATED BY glean_parser. DO NOT EDIT. -->
+{# The rendered markdown is autogenerated, but this
+Jinja2 template is not. Please file bugs! #}
diff --git a/third_party/python/glean_parser/glean_parser/templates/swift.jinja2 b/third_party/python/glean_parser/glean_parser/templates/swift.jinja2
new file mode 100644
index 0000000000..2f6fa51fb5
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/templates/swift.jinja2
@@ -0,0 +1,108 @@
+// -*- mode: Swift -*-
+
+// AUTOGENERATED BY glean_parser. DO NOT EDIT.
+{# The rendered markdown is autogenerated, but this
+Jinja2 template is not. Please file bugs! #}
+
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+{% macro obj_declaration(obj, suffix='', access='') %}
+{{ access }}static let {{ obj.name|camelize|variable_name }}{{ suffix }} = {{ obj|type_name }}( // generated from {{ obj.identifier() }}
+ {% for arg_name in extra_args if obj[arg_name] is defined %}
+ {{ arg_name|camelize }}: {{ obj[arg_name]|swift }}{{ "," if not loop.last }}
+ {% endfor %}
+ )
+{% endmacro %}
+
+{% if not allow_reserved %}
+import {{ glean_namespace }}
+
+{% endif %}
+// swiftlint:disable superfluous_disable_command
+// swiftlint:disable nesting
+// swiftlint:disable line_length
+// swiftlint:disable identifier_name
+// swiftlint:disable force_try
+
+extension {{ namespace }} {
+ {% for category in categories %}
+ {% if category.contains_pings %}
+ class {{ category.name|Camelize }} {
+ public static let shared = {{ category.name|Camelize }}()
+ private init() {
+ // Intentionally left private, no external user can instantiate a new global object.
+ }
+
+ {% for obj in category.objs.values() %}
+ {% if obj|attr("_generate_enums") %}
+ {% for name, suffix in obj["_generate_enums"] %}
+ {% if obj|attr(name)|length %}
+ enum {{ obj.name|Camelize }}{{ suffix }}: Int, ReasonCodes {
+ {% for key in obj|attr(name) %}
+ case {{ key|camelize|variable_name }} = {{ loop.index-1 }}
+ {% endfor %}
+
+ public func index() -> Int {
+ return self.rawValue
+ }
+ }
+
+ {% endif %}
+ {% endfor %}
+ {% endif %}
+ /// {{ obj.description|wordwrap() | replace('\n', '\n /// ') }}
+ let {{ obj.name|camelize|variable_name }} = {{obj|type_name}}(
+ name: {{ obj.name|swift }},
+ includeClientId: {{obj.include_client_id|swift}},
+ sendIfEmpty: {{obj.send_if_empty|swift}},
+ reasonCodes: {{obj.reason_codes|swift}}
+ )
+
+ {% endfor %}
+ }
+
+ {% else %}
+ enum {{ category.name|Camelize }} {
+ {% for obj in category.objs.values() %}
+ {% if obj|attr("_generate_enums") %}
+ {% for name, suffix in obj["_generate_enums"] %}
+ {% if obj|attr(name)|length %}
+ enum {{ obj.name|Camelize }}{{ suffix }}: Int32, ExtraKeys {
+ {% for key in obj|attr(name) %}
+ case {{ key|camelize|variable_name }} = {{ loop.index-1 }}
+ {% endfor %}
+
+ public func index() -> Int32 {
+ return self.rawValue
+ }
+ }
+
+ {% endif %}
+ {% endfor %}
+ {% endif %}
+ {% endfor %}
+ {% for obj in category.objs.values() %}
+ {% if obj.labeled %}
+ {{ obj_declaration(obj, 'Label', 'private ') }}
+ /// {{ obj.description|wordwrap() | replace('\n', '\n /// ') }}
+ static let {{ obj.name|camelize|variable_name }} = try! LabeledMetricType<{{ obj|type_name }}>( // generated from {{ obj.identifier() }}
+ category: {{ obj.category|swift }},
+ name: {{ obj.name|swift }},
+ sendInPings: {{ obj.send_in_pings|swift }},
+ lifetime: {{ obj.lifetime|swift }},
+ disabled: {{ obj.is_disabled()|swift }},
+ subMetric: {{ obj.name|camelize }}Label,
+ labels: {{ obj.labels|swift }}
+ )
+
+ {% else %}
+ /// {{ obj.description|wordwrap() | replace('\n', '\n /// ') }}
+ {{ obj_declaration(obj) }}
+ {% endif %}
+ {% endfor %}
+ }
+
+ {% endif %}
+ {% endfor %}
+}
diff --git a/third_party/python/glean_parser/glean_parser/translate.py b/third_party/python/glean_parser/glean_parser/translate.py
new file mode 100644
index 0000000000..224da1eb5e
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/translate.py
@@ -0,0 +1,176 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+High-level interface for translating `metrics.yaml` into other formats.
+"""
+
+from pathlib import Path
+import os
+import shutil
+import tempfile
+from typing import Any, Callable, Dict, Iterable, List, Optional
+
+from . import lint
+from . import parser
+from . import csharp
+from . import kotlin
+from . import markdown
+from . import metrics
+from . import swift
+from . import util
+
+
+class Outputter:
+ """
+ Class to define an output format.
+
+ Each outputter in the table has the following member values:
+
+ - output_func: the main function of the outputter, the one which
+ does the actual translation.
+
+ - clear_patterns: A list of glob patterns to clear in the directory before
+ writing new results to it.
+ """
+
+ def __init__(
+ self,
+ output_func: Callable[[metrics.ObjectTree, Path, Dict[str, Any]], None],
+ clear_patterns: Optional[List[str]] = None,
+ ):
+ if clear_patterns is None:
+ clear_patterns = []
+
+ self.output_func = output_func
+ self.clear_patterns = clear_patterns
+
+
+OUTPUTTERS = {
+ "csharp": Outputter(csharp.output_csharp, ["*.cs"]),
+ "kotlin": Outputter(kotlin.output_kotlin, ["*.kt"]),
+ "markdown": Outputter(markdown.output_markdown, []),
+ "swift": Outputter(swift.output_swift, ["*.swift"]),
+}
+
+
+def translate_metrics(
+ input_filepaths: Iterable[Path],
+ output_dir: Path,
+ translation_func: Callable[[metrics.ObjectTree, Path, Dict[str, Any]], None],
+ clear_patterns: Optional[List[str]] = None,
+ options: Optional[Dict[str, Any]] = None,
+ parser_config: Optional[Dict[str, Any]] = None,
+):
+ """
+ Translate the files in `input_filepaths` by running the metrics through a
+ translation function and writing the results in `output_dir`.
+
+ :param input_filepaths: list of paths to input metrics.yaml files
+ :param output_dir: the path to the output directory
+ :param translation_func: the function that actually performs the translation.
+ It is passed the following arguments:
+
+ - metrics_objects: The tree of metrics as pings as returned by
+ `parser.parse_objects`.
+ - output_dir: The path to the output directory.
+ - options: A dictionary of output format-specific options.
+
+ Examples of translation functions are in `kotlin.py` and `swift.py`.
+ :param clear_patterns: a list of glob patterns of files to clear before
+ generating the output files. By default, no files will be cleared (i.e.
+ the directory should be left alone).
+ :param options: dictionary of options. The available options are backend
+ format specific. These are passed unchanged to `translation_func`.
+ :param parser_config: A dictionary of options that change parsing behavior.
+ See `parser.parse_metrics` for more info.
+ """
+ if clear_patterns is None:
+ clear_patterns = []
+
+ if options is None:
+ options = {}
+
+ if parser_config is None:
+ parser_config = {}
+
+ input_filepaths = util.ensure_list(input_filepaths)
+
+ if lint.glinter(input_filepaths, parser_config):
+ return 1
+
+ all_objects = parser.parse_objects(input_filepaths, parser_config)
+
+ if util.report_validation_errors(all_objects):
+ return 1
+
+ # allow_reserved is also relevant to the translators, so copy it there
+ if parser_config.get("allow_reserved"):
+ options["allow_reserved"] = True
+
+ # Write everything out to a temporary directory, and then move it to the
+ # real directory, for transactional integrity.
+ with tempfile.TemporaryDirectory() as tempdir:
+ tempdir_path = Path(tempdir)
+ translation_func(all_objects.value, tempdir_path, options)
+
+ if output_dir.is_file():
+ output_dir.unlink()
+ elif output_dir.is_dir() and len(clear_patterns):
+ for clear_pattern in clear_patterns:
+ for filepath in output_dir.glob(clear_pattern):
+ filepath.unlink()
+ if len(list(output_dir.iterdir())):
+ print(f"Extra contents found in '{output_dir}'.")
+
+ # We can't use shutil.copytree alone if the directory already exists.
+ # However, if it doesn't exist, make sure to create one otherwise
+ # shutil.copy will fail.
+ os.makedirs(str(output_dir), exist_ok=True)
+ for filename in tempdir_path.glob("*"):
+ shutil.copy(str(filename), str(output_dir))
+
+ return 0
+
+
+def translate(
+ input_filepaths: Iterable[Path],
+ output_format: str,
+ output_dir: Path,
+ options: Optional[Dict[str, Any]] = None,
+ parser_config: Optional[Dict[str, Any]] = None,
+):
+ """
+ Translate the files in `input_filepaths` to the given `output_format` and
+ put the results in `output_dir`.
+
+ :param input_filepaths: list of paths to input metrics.yaml files
+ :param output_format: the name of the output format
+ :param output_dir: the path to the output directory
+ :param options: dictionary of options. The available options are backend
+ format specific.
+ :param parser_config: A dictionary of options that change parsing behavior.
+ See `parser.parse_metrics` for more info.
+ """
+ if options is None:
+ options = {}
+
+ if parser_config is None:
+ parser_config = {}
+
+ format_desc = OUTPUTTERS.get(output_format, None)
+
+ if format_desc is None:
+ raise ValueError(f"Unknown output format '{output_format}'")
+
+ return translate_metrics(
+ input_filepaths,
+ output_dir,
+ format_desc.output_func,
+ format_desc.clear_patterns,
+ options,
+ parser_config,
+ )
diff --git a/third_party/python/glean_parser/glean_parser/util.py b/third_party/python/glean_parser/glean_parser/util.py
new file mode 100644
index 0000000000..f3d478ed82
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/util.py
@@ -0,0 +1,438 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from collections import OrderedDict
+import datetime
+import functools
+import json
+from pathlib import Path
+import sys
+import textwrap
+from typing import Any, Callable, Iterable, Sequence, Tuple, Union
+import urllib.request
+
+import appdirs # type: ignore
+import diskcache # type: ignore
+import jinja2
+import jsonschema # type: ignore
+from jsonschema import _utils # type: ignore
+import yaml
+
+if sys.version_info < (3, 7):
+ import iso8601 # type: ignore
+
+
+TESTING_MODE = "pytest" in sys.modules
+
+
+JSONType = Union[list, dict, str, int, float, None]
+"""
+The types supported by JSON.
+
+This is only an approximation -- this should really be a recursive type.
+"""
+
+# Adapted from
+# https://stackoverflow.com/questions/34667108/ignore-dates-and-times-while-parsing-yaml
+
+
+class _NoDatesSafeLoader(yaml.SafeLoader):
+ @classmethod
+ def remove_implicit_resolver(cls, tag_to_remove):
+ """
+ Remove implicit resolvers for a particular tag
+
+ Takes care not to modify resolvers in super classes.
+
+ We want to load datetimes as strings, not dates, because we
+ go on to serialise as json which doesn't have the advanced types
+ of yaml, and leads to incompatibilities down the track.
+ """
+ if "yaml_implicit_resolvers" not in cls.__dict__:
+ cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy()
+
+ for first_letter, mappings in cls.yaml_implicit_resolvers.items():
+ cls.yaml_implicit_resolvers[first_letter] = [
+ (tag, regexp) for tag, regexp in mappings if tag != tag_to_remove
+ ]
+
+
+# Since we use JSON schema to validate, and JSON schema doesn't support
+# datetimes, we don't want the YAML loader to give us datetimes -- just
+# strings.
+_NoDatesSafeLoader.remove_implicit_resolver("tag:yaml.org,2002:timestamp")
+
+
+if sys.version_info < (3, 7):
+ # In Python prior to 3.7, dictionary order is not preserved. However, we
+ # want the metrics to appear in the output in the same order as they are in
+ # the metrics.yaml file, so on earlier versions of Python we must use an
+ # OrderedDict object.
+ def ordered_yaml_load(stream):
+ class OrderedLoader(_NoDatesSafeLoader):
+ pass
+
+ def construct_mapping(loader, node):
+ loader.flatten_mapping(node)
+ return OrderedDict(loader.construct_pairs(node))
+
+ OrderedLoader.add_constructor(
+ yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping
+ )
+ return yaml.load(stream, OrderedLoader)
+
+ def ordered_yaml_dump(data, **kwargs):
+ class OrderedDumper(yaml.Dumper):
+ pass
+
+ def _dict_representer(dumper, data):
+ return dumper.represent_mapping(
+ yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, data.items()
+ )
+
+ OrderedDumper.add_representer(OrderedDict, _dict_representer)
+ return yaml.dump(data, Dumper=OrderedDumper, **kwargs)
+
+
+else:
+
+ def ordered_yaml_load(stream):
+ return yaml.load(stream, Loader=_NoDatesSafeLoader)
+
+ def ordered_yaml_dump(data, **kwargs):
+ return yaml.dump(data, **kwargs)
+
+
+def load_yaml_or_json(path: Path, ordered_dict: bool = False):
+ """
+ Load the content from either a .json or .yaml file, based on the filename
+ extension.
+
+ :param path: `pathlib.Path` object
+ :rtype object: The tree of objects as a result of parsing the file.
+ :raises ValueError: The file is neither a .json, .yml or .yaml file.
+ :raises FileNotFoundError: The file does not exist.
+ """
+ # If in py.test, support bits of literal JSON/YAML content
+ if TESTING_MODE and isinstance(path, dict):
+ return path
+
+ if path.suffix == ".json":
+ with path.open("r", encoding="utf-8") as fd:
+ return json.load(fd)
+ elif path.suffix in (".yml", ".yaml", ".yamlx"):
+ with path.open("r", encoding="utf-8") as fd:
+ if ordered_dict:
+ return ordered_yaml_load(fd)
+ else:
+ return yaml.load(fd, Loader=_NoDatesSafeLoader)
+ else:
+ raise ValueError(f"Unknown file extension {path.suffix}")
+
+
+def ensure_list(value: Any) -> Sequence[Any]:
+ """
+ Ensures that the value is a list. If it is anything but a list or tuple, a
+ list with a single element containing only value is returned.
+ """
+ if not isinstance(value, (list, tuple)):
+ return [value]
+ return value
+
+
+def to_camel_case(input: str, capitalize_first_letter: bool) -> str:
+ """
+ Convert the value to camelCase.
+
+ This additionally replaces any '.' with '_'. The first letter is capitalized
+ depending on `capitalize_first_letter`.
+ """
+ sanitized_input = input.replace(".", "_").replace("-", "_")
+ # Filter out any empty token. This could happen due to leading '_' or
+ # consecutive '__'.
+ tokens = [s.capitalize() for s in sanitized_input.split("_") if len(s) != 0]
+ # If we're not meant to capitalize the first letter, then lowercase it.
+ if not capitalize_first_letter:
+ tokens[0] = tokens[0].lower()
+ # Finally join the tokens and capitalize.
+ return "".join(tokens)
+
+
+def camelize(value: str) -> str:
+ """
+ Convert the value to camelCase (with a lower case first letter).
+
+ This is a thin wrapper around inflection.camelize that handles dots in
+ addition to underscores.
+ """
+ return to_camel_case(value, False)
+
+
+def Camelize(value: str) -> str:
+ """
+ Convert the value to CamelCase (with an upper case first letter).
+
+ This is a thin wrapper around inflection.camelize that handles dots in
+ addition to underscores.
+ """
+ return to_camel_case(value, True)
+
+
+@functools.lru_cache()
+def get_jinja2_template(
+ template_name: str, filters: Iterable[Tuple[str, Callable]] = ()
+):
+ """
+ Get a Jinja2 template that ships with glean_parser.
+
+ The template has extra filters for camel-casing identifiers.
+
+ :param template_name: Name of a file in ``glean_parser/templates``
+ :param filters: tuple of 2-tuple. A tuple of (name, func) pairs defining
+ additional filters.
+ """
+ env = jinja2.Environment(
+ loader=jinja2.PackageLoader("glean_parser", "templates"),
+ trim_blocks=True,
+ lstrip_blocks=True,
+ )
+
+ env.filters["camelize"] = camelize
+ env.filters["Camelize"] = Camelize
+ for filter_name, filter_func in filters:
+ env.filters[filter_name] = filter_func
+
+ return env.get_template(template_name)
+
+
+def keep_value(f):
+ """
+ Wrap a generator so the value it returns (rather than yields), will be
+ accessible on the .value attribute when the generator is exhausted.
+ """
+
+ class ValueKeepingGenerator(object):
+ def __init__(self, g):
+ self.g = g
+ self.value = None
+
+ def __iter__(self):
+ self.value = yield from self.g
+
+ @functools.wraps(f)
+ def g(*args, **kwargs):
+ return ValueKeepingGenerator(f(*args, **kwargs))
+
+ return g
+
+
+def get_null_resolver(schema):
+ """
+ Returns a JSON Pointer resolver that does nothing.
+
+ This lets us handle the moz: URLs in our schemas.
+ """
+
+ class NullResolver(jsonschema.RefResolver):
+ def resolve_remote(self, uri):
+ if uri in self.store:
+ return self.store[uri]
+ if uri == "":
+ return self.referrer
+
+ return NullResolver.from_schema(schema)
+
+
+def fetch_remote_url(url: str, cache: bool = True):
+ """
+ Fetches the contents from an HTTP url or local file path, and optionally
+ caches it to disk.
+ """
+ # Include the Python version in the cache key, since caches aren't
+ # sharable across Python versions.
+ key = (url, str(sys.version_info))
+
+ is_http = url.startswith("http")
+
+ if not is_http:
+ with open(url, "r", encoding="utf-8") as fd:
+ return fd.read()
+
+ if cache:
+ cache_dir = appdirs.user_cache_dir("glean_parser", "mozilla")
+ with diskcache.Cache(cache_dir) as dc:
+ if key in dc:
+ return dc[key]
+
+ contents: str = urllib.request.urlopen(url).read()
+
+ if cache:
+ with diskcache.Cache(cache_dir) as dc:
+ dc[key] = contents
+
+ return contents
+
+
+_unset = _utils.Unset()
+
+
+def pprint_validation_error(error) -> str:
+ """
+ A version of jsonschema's ValidationError __str__ method that doesn't
+ include the schema fragment that failed. This makes the error messages
+ much more succinct.
+
+ It also shows any subschemas of anyOf/allOf that failed, if any (what
+ jsonschema calls "context").
+ """
+ essential_for_verbose = (
+ error.validator,
+ error.validator_value,
+ error.instance,
+ error.schema,
+ )
+ if any(m is _unset for m in essential_for_verbose):
+ return textwrap.fill(error.message)
+
+ instance = error.instance
+ for path in list(error.relative_path)[::-1]:
+ if isinstance(path, str):
+ instance = {path: instance}
+ else:
+ instance = [instance]
+
+ yaml_instance = ordered_yaml_dump(instance, width=72, default_flow_style=False)
+
+ parts = ["```", yaml_instance.rstrip(), "```", "", textwrap.fill(error.message)]
+ if error.context:
+ parts.extend(
+ textwrap.fill(x.message, initial_indent=" ", subsequent_indent=" ")
+ for x in error.context
+ )
+
+ description = error.schema.get("description")
+ if description:
+ parts.extend(["", "Documentation for this node:", _utils.indent(description)])
+
+ return "\n".join(parts)
+
+
+def format_error(filepath: Union[str, Path], header: str, content: str) -> str:
+ """
+ Format a jsonshema validation error.
+ """
+ if isinstance(filepath, Path):
+ filepath = filepath.resolve()
+ else:
+ filepath = "<string>"
+ if header:
+ return f"{filepath}: {header}\n{_utils.indent(content)}"
+ else:
+ return f"{filepath}:\n{_utils.indent(content)}"
+
+
+def parse_expires(expires: str) -> datetime.date:
+ """
+ Parses the expired field date (yyyy-mm-dd) as a date.
+ Raises a ValueError in case the string is not properly formatted.
+ """
+ try:
+ if sys.version_info < (3, 7):
+ try:
+ return iso8601.parse_date(expires).date()
+ except iso8601.ParseError:
+ raise ValueError()
+ else:
+ return datetime.date.fromisoformat(expires)
+ except ValueError:
+ raise ValueError(
+ f"Invalid expiration date '{expires}'. "
+ "Must be of the form yyyy-mm-dd in UTC."
+ )
+
+
+def is_expired(expires: str) -> bool:
+ """
+ Parses the `expires` field in a metric or ping and returns whether
+ the object should be considered expired.
+ """
+ if expires == "never":
+ return False
+ elif expires == "expired":
+ return True
+ else:
+ date = parse_expires(expires)
+ return date <= datetime.datetime.utcnow().date()
+
+
+def validate_expires(expires: str) -> None:
+ """
+ Raises a ValueError in case the `expires` is not ISO8601 parseable,
+ or in case the date is more than 730 days (~2 years) in the future.
+ """
+ if expires in ("never", "expired"):
+ return
+
+ date = parse_expires(expires)
+ max_date = datetime.datetime.now() + datetime.timedelta(days=730)
+ if date > max_date.date():
+ raise ValueError(
+ f"'{expires}' is more than 730 days (~2 years) in the future.",
+ "Please make sure this is intentional.",
+ "You can supress this warning by adding EXPIRATION_DATE_TOO_FAR to no_lint",
+ "See: https://mozilla.github.io/glean_parser/metrics-yaml.html#no_lint",
+ )
+
+
+def report_validation_errors(all_objects):
+ """
+ Report any validation errors found to the console.
+ """
+ found_error = False
+ for error in all_objects:
+ found_error = True
+ print("=" * 78, file=sys.stderr)
+ print(error, file=sys.stderr)
+ return found_error
+
+
+# Names of metric parameters to pass to constructors.
+# This includes only things that the language bindings care about, not things
+# that are metadata-only or are resolved into other parameters at parse time.
+# **CAUTION**: This list needs to be in the order the Swift type constructors
+# expects them. (The other language bindings don't care about the order). The
+# `test_order_of_fields` test checks that the generated code is valid.
+# **DO NOT CHANGE THE ORDER OR ADD NEW FIELDS IN THE MIDDLE**
+extra_metric_args = [
+ "category",
+ "name",
+ "send_in_pings",
+ "lifetime",
+ "disabled",
+ "time_unit",
+ "memory_unit",
+ "allowed_extra_keys",
+ "reason_codes",
+ "bucket_count",
+ "range_max",
+ "range_min",
+ "histogram_type",
+]
+
+
+# Names of ping parameters to pass to constructors.
+extra_ping_args = [
+ "include_client_id",
+ "send_if_empty",
+ "name",
+ "reason_codes",
+]
+
+
+# Names of parameters to pass to both metric and ping constructors (no duplicates).
+extra_args = extra_metric_args + [
+ v for v in extra_ping_args if v not in extra_metric_args
+]
diff --git a/third_party/python/glean_parser/glean_parser/validate_ping.py b/third_party/python/glean_parser/glean_parser/validate_ping.py
new file mode 100644
index 0000000000..33598149eb
--- /dev/null
+++ b/third_party/python/glean_parser/glean_parser/validate_ping.py
@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Validates the contents of a Glean ping against the schema.
+"""
+
+import functools
+import io
+import json
+from pathlib import Path
+import sys
+
+import jsonschema # type: ignore
+
+from . import util
+
+
+ROOT_DIR = Path(__file__).parent
+SCHEMAS_DIR = ROOT_DIR / "schemas"
+
+
+@functools.lru_cache(maxsize=1)
+def _get_ping_schema(schema_url):
+ contents = util.fetch_remote_url(schema_url)
+ return json.loads(contents)
+
+
+def _validate_ping(ins, outs, schema_url):
+ schema = _get_ping_schema(schema_url)
+
+ resolver = util.get_null_resolver(schema)
+
+ document = json.load(ins)
+
+ validator_class = jsonschema.validators.validator_for(schema)
+ validator = validator_class(schema, resolver=resolver)
+
+ has_error = 0
+ for error in validator.iter_errors(document):
+ outs.write("=" * 76)
+ outs.write("\n")
+ outs.write(util.format_error("", "", util.pprint_validation_error(error)))
+ outs.write("\n")
+ has_error = 1
+
+ return has_error
+
+
+def validate_ping(ins, outs=None, schema_url=None):
+ """
+ Validates the contents of a Glean ping.
+
+ :param ins: Input stream or file path to the ping contents to validate
+ :param outs: Output stream to write errors to. (Defaults to stdout)
+ :param schema_url: HTTP URL or local filesystem path to Glean ping schema.
+ Defaults to the current version of the schema in
+ mozilla-pipeline-schemas.
+ :rtype: int 1 if any errors occurred, otherwise 0.
+ """
+ if schema_url is None:
+ raise TypeError("Missing required argument 'schema_url'")
+
+ if outs is None:
+ outs = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8")
+
+ if isinstance(ins, (str, bytes, Path)):
+ with open(ins, "r", encoding="utf-8") as fd:
+ return _validate_ping(fd, outs, schema_url=schema_url)
+ else:
+ return _validate_ping(ins, outs, schema_url=schema_url)
diff --git a/third_party/python/glean_parser/requirements_dev.txt b/third_party/python/glean_parser/requirements_dev.txt
new file mode 100644
index 0000000000..2bd318c045
--- /dev/null
+++ b/third_party/python/glean_parser/requirements_dev.txt
@@ -0,0 +1,14 @@
+black==20.8b1
+coverage==5.3
+flake8==3.8.4
+flake8-bugbear==20.1.4
+m2r==0.2.1
+mypy==0.782
+pip
+pytest-runner==5.2
+pytest==6.1.1
+Sphinx==3.2.1
+twine==3.2.0
+watchdog==0.10.3
+wheel
+yamllint==1.25.0
diff --git a/third_party/python/glean_parser/setup.cfg b/third_party/python/glean_parser/setup.cfg
new file mode 100644
index 0000000000..ec207ff74a
--- /dev/null
+++ b/third_party/python/glean_parser/setup.cfg
@@ -0,0 +1,16 @@
+[bdist_wheel]
+python_tag = py3
+
+[flake8]
+exclude = docs
+
+[aliases]
+test = pytest
+
+[tool:pytest]
+collect_ignore = ['setup.py']
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/glean_parser/setup.py b/third_party/python/glean_parser/setup.py
new file mode 100755
index 0000000000..a44af27297
--- /dev/null
+++ b/third_party/python/glean_parser/setup.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""The setup script."""
+
+import sys
+
+from setuptools import setup, find_packages
+
+
+if sys.version_info < (3, 6):
+ print("glean_parser requires at least Python 3.6", file=sys.stderr)
+ sys.exit(1)
+
+
+with open("README.rst", encoding="utf-8") as readme_file:
+ readme = readme_file.read()
+
+with open("HISTORY.rst", encoding="utf-8") as history_file:
+ history = history_file.read()
+
+requirements = [
+ "appdirs>=1.4",
+ "Click>=7",
+ "diskcache>=4",
+ "iso8601>=0.1.10; python_version<='3.6'",
+ "Jinja2>=2.10.1",
+ "jsonschema>=3.0.2",
+ "PyYAML>=3.13",
+ "yamllint>=1.18.0",
+]
+
+setup_requirements = ["pytest-runner", "setuptools-scm"]
+
+test_requirements = [
+ "pytest",
+]
+
+setup(
+ author="Michael Droettboom",
+ author_email="mdroettboom@mozilla.com",
+ classifiers=[
+ "Development Status :: 2 - Pre-Alpha",
+ "Intended Audience :: Developers",
+ "Natural Language :: English",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ ],
+ description="Parser tools for Mozilla's Glean telemetry",
+ entry_points={
+ "console_scripts": [
+ "glean_parser=glean_parser.__main__:main_wrapper",
+ ],
+ },
+ install_requires=requirements,
+ long_description=readme + "\n\n" + history,
+ include_package_data=True,
+ keywords="glean_parser",
+ name="glean_parser",
+ packages=find_packages(include=["glean_parser"]),
+ setup_requires=setup_requirements,
+ test_suite="tests",
+ tests_require=test_requirements,
+ url="https://github.com/mozilla/glean_parser",
+ zip_safe=False,
+ use_scm_version=True,
+)
diff --git a/third_party/python/glean_parser/tools/extract_data_categories.py b/third_party/python/glean_parser/tools/extract_data_categories.py
new file mode 100755
index 0000000000..9e7d9efcc6
--- /dev/null
+++ b/third_party/python/glean_parser/tools/extract_data_categories.py
@@ -0,0 +1,176 @@
+#!/usr/bin/env python3
+
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Usage:
+ python extract_data_categories.py metrics.yaml
+
+Automatically extract the data collection categories for all the metrics in a
+metrics.yaml file by consulting the linked data reviews.
+
+This script reads a metrics.yaml file, visits all of the associated data
+reviews, trying to determine the associated data categories, and inserts them
+(in place) to the original metrics.yaml file.
+
+A very simple heuristic is used: to look for the question about data categories
+used in all data reviews, and then find any numbers between it and the next
+question. When this simple heuristic fails, comments with "!!!" are inserted in
+the output as a recommendation to manually investigate and enter the data
+categories.
+
+Requirements from PyPI: BeautifulSoup4, PyYAML
+"""
+
+import dbm
+import functools
+import re
+import sys
+import time
+from typing import List, Set
+from urllib.request import urlopen
+
+
+from bs4 import BeautifulSoup
+import yaml
+
+
+cache = dbm.open("bugzilla-cache.db", "c")
+
+
+QUESTION = "what collection type of data do the requested measurements fall under?"
+
+
+CATEGORY_MAP = {
+ 1: "technical",
+ 2: "interaction",
+ 3: "web_activity",
+ 4: "highly_sensitive",
+}
+
+
+def fetch_url(url: str) -> str:
+ """
+ Fetch a web page containing a data review, caching it to avoid
+ over-fetching.
+ """
+ content = cache.get(url)
+ if content is not None:
+ return content
+
+ print(f"Fetching {url}")
+ content = urlopen(url).read()
+ cache[url] = content
+ time.sleep(0.5)
+ return content
+
+
+@functools.lru_cache(1000)
+def parse_data_review(html: str) -> Set[int]:
+ """
+ Parse a single data review.
+ """
+ soup = BeautifulSoup(html, features="html.parser")
+ text = soup.get_text()
+ lines = iter(text.splitlines())
+ for line in lines:
+ if QUESTION in line.strip():
+ break
+
+ categories: Set[int] = set()
+ for line in lines:
+ if "?" in line:
+ break
+ categories.update(int(x) for x in re.findall("[0-9]+", line))
+
+ return categories
+
+
+def categories_as_strings(categories: Set[int]) -> List[str]:
+ """
+ From a set of numeric categories, return the strings used in a metrics.yaml
+ file. This may contain strings representing errors.
+ """
+ if len(categories):
+ return [
+ CATEGORY_MAP.get(x, f"!!!UNKNOWN CATEGORY {x}")
+ for x in sorted(list(categories))
+ ]
+ else:
+ return ["!!! NO DATA CATEGORIES FOUND"]
+
+
+def update_lines(
+ lines: List[str],
+ category_name: str,
+ metric_name: str,
+ data_sensitivity_values: List[str],
+) -> List[str]:
+ """
+ Update the lines of a YAML file in place to include the data_sensitivity
+ for the given metric, returning the lines of the result.
+ """
+ output = []
+ lines_iter = iter(lines)
+
+ for line in lines_iter:
+ output.append(line)
+ if line.startswith(f"{category_name}:"):
+ break
+
+ for line in lines_iter:
+ output.append(line)
+ if line.startswith(f" {metric_name}:"):
+ break
+
+ for line in lines_iter:
+ output.append(line)
+ if line.startswith(f" data_reviews:"):
+ break
+
+ for line in lines_iter:
+ if not line.strip().startswith("- "):
+ output.append(" data_sensitivity:\n")
+ for data_sensitivity in data_sensitivity_values:
+ output.append(f" - {data_sensitivity}\n")
+ output.append(line)
+ break
+ else:
+ output.append(line)
+
+ for line in lines_iter:
+ output.append(line)
+
+ return output
+
+
+def parse_yaml(yamlpath: str):
+ with open(yamlpath) as fd:
+ content = yaml.safe_load(fd)
+
+ with open(yamlpath) as fd:
+ lines = list(fd.readlines())
+
+ for category_name, category in content.items():
+ if category_name.startswith("$") or category_name == "no_lint":
+ continue
+ for metric_name, metric in category.items():
+ categories = set()
+ for data_review_url in metric["data_reviews"]:
+ html = fetch_url(data_review_url)
+ categories.update(parse_data_review(html))
+ lines = update_lines(
+ lines, category_name, metric_name, categories_as_strings(categories)
+ )
+
+ with open(yamlpath, "w") as fd:
+ for line in lines:
+ fd.write(line)
+
+
+if __name__ == "__main__":
+ parse_yaml(sys.argv[-1])
diff --git a/third_party/python/gyp/.gitignore b/third_party/python/gyp/.gitignore
new file mode 100644
index 0000000000..0d20b6487c
--- /dev/null
+++ b/third_party/python/gyp/.gitignore
@@ -0,0 +1 @@
+*.pyc
diff --git a/third_party/python/gyp/.travis.yml b/third_party/python/gyp/.travis.yml
new file mode 100644
index 0000000000..4733130059
--- /dev/null
+++ b/third_party/python/gyp/.travis.yml
@@ -0,0 +1,23 @@
+language: cpp
+matrix:
+ include:
+ - os: linux
+ compiler: clang
+ - os: osx
+ compiler: clang
+ - python: 2.7
+ language: python
+ before_install: pip install flake8
+ script: flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics
+ - python: 3.7
+ language: python
+ before_install: pip install flake8
+ script: flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics
+ dist: xenial # required for Python >= 3.7 (travis-ci/travis-ci#9069)
+
+before_install: ./buildbot/travis-checkout.sh
+script: ./buildbot/travis-test.sh
+
+branches:
+ only:
+ - master
diff --git a/third_party/python/gyp/AUTHORS b/third_party/python/gyp/AUTHORS
new file mode 100644
index 0000000000..9e742f2966
--- /dev/null
+++ b/third_party/python/gyp/AUTHORS
@@ -0,0 +1,17 @@
+# Names should be added to this file like so:
+# Name or Organization <email address>
+
+Google Inc. <*@google.com>
+Bloomberg Finance L.P. <*@bloomberg.net>
+IBM Inc. <*@*.ibm.com>
+Yandex LLC <*@yandex-team.ru>
+
+Steven Knight <knight@baldmt.com>
+Ryan Norton <rnorton10@gmail.com>
+David J. Sankel <david@sankelsoftware.com>
+Eric N. Vander Weele <ericvw@gmail.com>
+Tom Freudenberg <th.freudenberg@gmail.com>
+Julien Brianceau <jbriance@cisco.com>
+Refael Ackermann <refack@gmail.com>
+Jiajie Hu <jiajie.hu@intel.com>
+Philip Nery <pbfnery@gmail.com>
diff --git a/third_party/python/gyp/DEPS b/third_party/python/gyp/DEPS
new file mode 100644
index 0000000000..167fb779b0
--- /dev/null
+++ b/third_party/python/gyp/DEPS
@@ -0,0 +1,23 @@
+# DEPS file for gclient use in buildbot execution of gyp tests.
+#
+# (You don't need to use gclient for normal GYP development work.)
+
+vars = {
+ "chromium_git": "https://chromium.googlesource.com/",
+}
+
+deps = {
+}
+
+deps_os = {
+ "win": {
+ "third_party/cygwin":
+ Var("chromium_git") + "chromium/deps/cygwin@4fbd5b9",
+
+ "third_party/python_26":
+ Var("chromium_git") + "chromium/deps/python_26@5bb4080",
+
+ "src/third_party/pefile":
+ Var("chromium_git") + "external/pefile@72c6ae4",
+ },
+}
diff --git a/third_party/python/gyp/LICENSE b/third_party/python/gyp/LICENSE
new file mode 100644
index 0000000000..ab6b011a10
--- /dev/null
+++ b/third_party/python/gyp/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2009 Google Inc. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/third_party/python/gyp/OWNERS b/third_party/python/gyp/OWNERS
new file mode 100644
index 0000000000..72e8ffc0db
--- /dev/null
+++ b/third_party/python/gyp/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/third_party/python/gyp/PRESUBMIT.py b/third_party/python/gyp/PRESUBMIT.py
new file mode 100644
index 0000000000..5ee669b595
--- /dev/null
+++ b/third_party/python/gyp/PRESUBMIT.py
@@ -0,0 +1,125 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Top-level presubmit script for GYP.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details about the presubmit API built into gcl.
+"""
+
+
+PYLINT_BLACKLIST = [
+ # TODO: fix me.
+ # From SCons, not done in google style.
+ 'test/lib/TestCmd.py',
+ 'test/lib/TestCommon.py',
+ 'test/lib/TestGyp.py',
+]
+
+
+PYLINT_DISABLED_WARNINGS = [
+ # TODO: fix me.
+ # Many tests include modules they don't use.
+ 'W0611',
+ # Possible unbalanced tuple unpacking with sequence.
+ 'W0632',
+ # Attempting to unpack a non-sequence.
+ 'W0633',
+ # Include order doesn't properly include local files?
+ 'F0401',
+ # Some use of built-in names.
+ 'W0622',
+ # Some unused variables.
+ 'W0612',
+ # Operator not preceded/followed by space.
+ 'C0323',
+ 'C0322',
+ # Unnecessary semicolon.
+ 'W0301',
+ # Unused argument.
+ 'W0613',
+ # String has no effect (docstring in wrong place).
+ 'W0105',
+ # map/filter on lambda could be replaced by comprehension.
+ 'W0110',
+ # Use of eval.
+ 'W0123',
+ # Comma not followed by space.
+ 'C0324',
+ # Access to a protected member.
+ 'W0212',
+ # Bad indent.
+ 'W0311',
+ # Line too long.
+ 'C0301',
+ # Undefined variable.
+ 'E0602',
+ # Not exception type specified.
+ 'W0702',
+ # No member of that name.
+ 'E1101',
+ # Dangerous default {}.
+ 'W0102',
+ # Cyclic import.
+ 'R0401',
+ # Others, too many to sort.
+ 'W0201', 'W0232', 'E1103', 'W0621', 'W0108', 'W0223', 'W0231',
+ 'R0201', 'E0101', 'C0321',
+ # ************* Module copy
+ # W0104:427,12:_test.odict.__setitem__: Statement seems to have no effect
+ 'W0104',
+]
+
+
+def _LicenseHeader(input_api):
+ # Accept any year number from 2009 to the current year.
+ current_year = int(input_api.time.strftime('%Y'))
+ allowed_years = (str(s) for s in reversed(range(2009, current_year + 1)))
+ years_re = '(' + '|'.join(allowed_years) + ')'
+
+ # The (c) is deprecated, but tolerate it until it's removed from all files.
+ return (
+ r'.*? Copyright (\(c\) )?%(year)s Google Inc\. All rights reserved\.\n'
+ r'.*? Use of this source code is governed by a BSD-style license that '
+ r'can be\n'
+ r'.*? found in the LICENSE file\.\n'
+ ) % {
+ 'year': years_re,
+ }
+
+def CheckChangeOnUpload(input_api, output_api):
+ report = []
+ report.extend(input_api.canned_checks.PanProjectChecks(
+ input_api, output_api, license_header=_LicenseHeader(input_api)))
+ return report
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ report = []
+
+ report.extend(input_api.canned_checks.PanProjectChecks(
+ input_api, output_api, license_header=_LicenseHeader(input_api)))
+ report.extend(input_api.canned_checks.CheckTreeIsOpen(
+ input_api, output_api,
+ 'http://gyp-status.appspot.com/status',
+ 'http://gyp-status.appspot.com/current'))
+
+ import os
+ import sys
+ old_sys_path = sys.path
+ try:
+ sys.path = ['pylib', 'test/lib'] + sys.path
+ blacklist = PYLINT_BLACKLIST
+ if sys.platform == 'win32':
+ blacklist = [os.path.normpath(x).replace('\\', '\\\\')
+ for x in PYLINT_BLACKLIST]
+ report.extend(input_api.canned_checks.RunPylint(
+ input_api,
+ output_api,
+ black_list=blacklist,
+ disabled_warnings=PYLINT_DISABLED_WARNINGS))
+ finally:
+ sys.path = old_sys_path
+ return report
diff --git a/third_party/python/gyp/README.md b/third_party/python/gyp/README.md
new file mode 100644
index 0000000000..b4766c9d63
--- /dev/null
+++ b/third_party/python/gyp/README.md
@@ -0,0 +1,5 @@
+GYP can Generate Your Projects.
+===================================
+
+Documents are available at [gyp.gsrc.io](https://gyp.gsrc.io), or you can
+check out ```md-pages``` branch to read those documents offline.
diff --git a/third_party/python/gyp/buildbot/buildbot_run.py b/third_party/python/gyp/buildbot/buildbot_run.py
new file mode 100755
index 0000000000..89416520d3
--- /dev/null
+++ b/third_party/python/gyp/buildbot/buildbot_run.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Argument-less script to select what to run on the buildbots."""
+
+from __future__ import print_function
+
+import os
+import shutil
+import subprocess
+import sys
+
+
+BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__))
+TRUNK_DIR = os.path.dirname(BUILDBOT_DIR)
+ROOT_DIR = os.path.dirname(TRUNK_DIR)
+CMAKE_DIR = os.path.join(ROOT_DIR, 'cmake')
+CMAKE_BIN_DIR = os.path.join(CMAKE_DIR, 'bin')
+OUT_DIR = os.path.join(TRUNK_DIR, 'out')
+
+
+def CallSubProcess(*args, **kwargs):
+ """Wrapper around subprocess.call which treats errors as build exceptions."""
+ with open(os.devnull) as devnull_fd:
+ retcode = subprocess.call(stdin=devnull_fd, *args, **kwargs)
+ if retcode != 0:
+ print('@@@STEP_EXCEPTION@@@')
+ sys.exit(1)
+
+
+def PrepareCmake():
+ """Build CMake 2.8.8 since the version in Precise is 2.8.7."""
+ if os.environ['BUILDBOT_CLOBBER'] == '1':
+ print('@@@BUILD_STEP Clobber CMake checkout@@@')
+ shutil.rmtree(CMAKE_DIR)
+
+ # We always build CMake 2.8.8, so no need to do anything
+ # if the directory already exists.
+ if os.path.isdir(CMAKE_DIR):
+ return
+
+ print('@@@BUILD_STEP Initialize CMake checkout@@@')
+ os.mkdir(CMAKE_DIR)
+
+ print('@@@BUILD_STEP Sync CMake@@@')
+ CallSubProcess(
+ ['git', 'clone',
+ '--depth', '1',
+ '--single-branch',
+ '--branch', 'v2.8.8',
+ '--',
+ 'git://cmake.org/cmake.git',
+ CMAKE_DIR],
+ cwd=CMAKE_DIR)
+
+ print('@@@BUILD_STEP Build CMake@@@')
+ CallSubProcess(
+ ['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR],
+ cwd=CMAKE_DIR)
+
+ CallSubProcess( ['make', 'cmake'], cwd=CMAKE_DIR)
+
+
+def GypTestFormat(title, format=None, msvs_version=None, tests=[]):
+ """Run the gyp tests for a given format, emitting annotator tags.
+
+ See annotator docs at:
+ https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations
+ Args:
+ format: gyp format to test.
+ Returns:
+ 0 for sucesss, 1 for failure.
+ """
+ if not format:
+ format = title
+
+ print('@@@BUILD_STEP ' + title + '@@@')
+ sys.stdout.flush()
+ env = os.environ.copy()
+ if msvs_version:
+ env['GYP_MSVS_VERSION'] = msvs_version
+ command = ' '.join(
+ [sys.executable, 'gyp/gyptest.py',
+ '--all',
+ '--passed',
+ '--format', format,
+ '--path', CMAKE_BIN_DIR,
+ '--chdir', 'gyp'] + tests)
+ retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True)
+ if retcode:
+ # Emit failure tag, and keep going.
+ print('@@@STEP_FAILURE@@@')
+ return 1
+ return 0
+
+
+def GypBuild():
+ # Dump out/ directory.
+ print('@@@BUILD_STEP cleanup@@@')
+ print('Removing %s...' % OUT_DIR)
+ shutil.rmtree(OUT_DIR, ignore_errors=True)
+ print('Done.')
+
+ retcode = 0
+ if sys.platform.startswith('linux'):
+ retcode += GypTestFormat('ninja')
+ retcode += GypTestFormat('make')
+ PrepareCmake()
+ retcode += GypTestFormat('cmake')
+ elif sys.platform == 'darwin':
+ retcode += GypTestFormat('ninja')
+ retcode += GypTestFormat('xcode')
+ retcode += GypTestFormat('make')
+ elif sys.platform == 'win32':
+ retcode += GypTestFormat('ninja')
+ if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64':
+ retcode += GypTestFormat('msvs-ninja-2013', format='msvs-ninja',
+ msvs_version='2013',
+ tests=[
+ r'test\generator-output\gyptest-actions.py',
+ r'test\generator-output\gyptest-relocate.py',
+ r'test\generator-output\gyptest-rules.py'])
+ retcode += GypTestFormat('msvs-2013', format='msvs', msvs_version='2013')
+ else:
+ raise Exception('Unknown platform')
+ if retcode:
+ # TODO(bradnelson): once the annotator supports a postscript (section for
+ # after the build proper that could be used for cumulative failures),
+ # use that instead of this. This isolates the final return value so
+ # that it isn't misattributed to the last stage.
+ print('@@@BUILD_STEP failures@@@')
+ sys.exit(retcode)
+
+
+if __name__ == '__main__':
+ GypBuild()
diff --git a/third_party/python/gyp/buildbot/commit_queue/OWNERS b/third_party/python/gyp/buildbot/commit_queue/OWNERS
new file mode 100644
index 0000000000..b269c198b4
--- /dev/null
+++ b/third_party/python/gyp/buildbot/commit_queue/OWNERS
@@ -0,0 +1,6 @@
+set noparent
+bradnelson@chromium.org
+bradnelson@google.com
+iannucci@chromium.org
+scottmg@chromium.org
+thakis@chromium.org
diff --git a/third_party/python/gyp/buildbot/commit_queue/README b/third_party/python/gyp/buildbot/commit_queue/README
new file mode 100644
index 0000000000..9428497883
--- /dev/null
+++ b/third_party/python/gyp/buildbot/commit_queue/README
@@ -0,0 +1,3 @@
+cq_config.json describes the trybots that must pass in order
+to land a change through the commit queue.
+Comments are here as the file is strictly JSON.
diff --git a/third_party/python/gyp/buildbot/commit_queue/cq_config.json b/third_party/python/gyp/buildbot/commit_queue/cq_config.json
new file mode 100644
index 0000000000..656c21e54f
--- /dev/null
+++ b/third_party/python/gyp/buildbot/commit_queue/cq_config.json
@@ -0,0 +1,15 @@
+{
+ "trybots": {
+ "launched": {
+ "tryserver.nacl": {
+ "gyp-presubmit": ["defaulttests"],
+ "gyp-linux": ["defaulttests"],
+ "gyp-mac": ["defaulttests"],
+ "gyp-win32": ["defaulttests"],
+ "gyp-win64": ["defaulttests"]
+ }
+ },
+ "triggered": {
+ }
+ }
+}
diff --git a/third_party/python/gyp/buildbot/travis-checkout.sh b/third_party/python/gyp/buildbot/travis-checkout.sh
new file mode 100755
index 0000000000..bc42f43754
--- /dev/null
+++ b/third_party/python/gyp/buildbot/travis-checkout.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+# Copyright 2018 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -ex
+
+get_depot_tools() {
+ cd
+ git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git
+ export PATH="$HOME/depot_tools:$PATH"
+}
+
+gclient_sync() {
+ cd "${TRAVIS_BUILD_DIR}"/..
+ gclient config --unmanaged https://github.com/chromium/gyp.git
+ gclient sync
+ cd gyp
+}
+
+main() {
+ get_depot_tools
+ gclient_sync
+}
+
+main "$@"
diff --git a/third_party/python/gyp/buildbot/travis-test.sh b/third_party/python/gyp/buildbot/travis-test.sh
new file mode 100755
index 0000000000..4bd69df244
--- /dev/null
+++ b/third_party/python/gyp/buildbot/travis-test.sh
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+# Copyright 2018 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+main() {
+ export PATH="$HOME/depot_tools:$PATH"
+ ./gyptest.py -a -f ninja
+}
+
+main "$@"
diff --git a/third_party/python/gyp/codereview.settings b/third_party/python/gyp/codereview.settings
new file mode 100644
index 0000000000..27fb9f99e2
--- /dev/null
+++ b/third_party/python/gyp/codereview.settings
@@ -0,0 +1,6 @@
+# This file is used by git cl to get repository specific information.
+CC_LIST: gyp-developer@googlegroups.com
+CODE_REVIEW_SERVER: codereview.chromium.org
+GERRIT_HOST: True
+PROJECT: gyp
+VIEW_VC: https://chromium.googlesource.com/external/gyp/+/
diff --git a/third_party/python/gyp/data/win/large-pdb-shim.cc b/third_party/python/gyp/data/win/large-pdb-shim.cc
new file mode 100644
index 0000000000..8bca510815
--- /dev/null
+++ b/third_party/python/gyp/data/win/large-pdb-shim.cc
@@ -0,0 +1,12 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is
+// then used during the final link for modules that have large PDBs. Otherwise,
+// the linker will generate a pdb with a page size of 1KB, which imposes a limit
+// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler
+// (rather than the linker), this limit is avoided. With this in place PDBs may
+// grow to 2GB.
+//
+// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py.
diff --git a/third_party/python/gyp/gyp b/third_party/python/gyp/gyp
new file mode 100755
index 0000000000..1b8b9bdfb0
--- /dev/null
+++ b/third_party/python/gyp/gyp
@@ -0,0 +1,8 @@
+#!/bin/sh
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+base=$(dirname "$0")
+exec python "${base}/gyp_main.py" "$@"
diff --git a/third_party/python/gyp/gyp.bat b/third_party/python/gyp/gyp.bat
new file mode 100755
index 0000000000..c0b4ca24e5
--- /dev/null
+++ b/third_party/python/gyp/gyp.bat
@@ -0,0 +1,5 @@
+@rem Copyright (c) 2009 Google Inc. All rights reserved.
+@rem Use of this source code is governed by a BSD-style license that can be
+@rem found in the LICENSE file.
+
+@python "%~dp0gyp_main.py" %*
diff --git a/third_party/python/gyp/gyp_main.py b/third_party/python/gyp/gyp_main.py
new file mode 100755
index 0000000000..25a6eba94a
--- /dev/null
+++ b/third_party/python/gyp/gyp_main.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+# Make sure we're using the version of pylib in this repo, not one installed
+# elsewhere on the system.
+sys.path.insert(0, os.path.join(os.path.dirname(sys.argv[0]), 'pylib'))
+import gyp
+
+if __name__ == '__main__':
+ sys.exit(gyp.script_main())
diff --git a/third_party/python/gyp/gyptest.py b/third_party/python/gyp/gyptest.py
new file mode 100755
index 0000000000..1a9ffca7a1
--- /dev/null
+++ b/third_party/python/gyp/gyptest.py
@@ -0,0 +1,243 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""gyptest.py -- test runner for GYP tests."""
+
+from __future__ import print_function
+
+import argparse
+import math
+import os
+import platform
+import subprocess
+import sys
+import time
+
+
+def is_test_name(f):
+ return f.startswith('gyptest') and f.endswith('.py')
+
+
+def find_all_gyptest_files(directory):
+ result = []
+ for root, dirs, files in os.walk(directory):
+ result.extend([ os.path.join(root, f) for f in files if is_test_name(f) ])
+ result.sort()
+ return result
+
+
+def main(argv=None):
+ if argv is None:
+ argv = sys.argv
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-a", "--all", action="store_true",
+ help="run all tests")
+ parser.add_argument("-C", "--chdir", action="store",
+ help="change to directory")
+ parser.add_argument("-f", "--format", action="store", default='',
+ help="run tests with the specified formats")
+ parser.add_argument("-G", '--gyp_option', action="append", default=[],
+ help="Add -G options to the gyp command line")
+ parser.add_argument("-l", "--list", action="store_true",
+ help="list available tests and exit")
+ parser.add_argument("-n", "--no-exec", action="store_true",
+ help="no execute, just print the command line")
+ parser.add_argument("--path", action="append", default=[],
+ help="additional $PATH directory")
+ parser.add_argument("-q", "--quiet", action="store_true",
+ help="quiet, don't print anything unless there are failures")
+ parser.add_argument("-v", "--verbose", action="store_true",
+ help="print configuration info and test results.")
+ parser.add_argument('tests', nargs='*')
+ args = parser.parse_args(argv[1:])
+
+ if args.chdir:
+ os.chdir(args.chdir)
+
+ if args.path:
+ extra_path = [os.path.abspath(p) for p in args.path]
+ extra_path = os.pathsep.join(extra_path)
+ os.environ['PATH'] = extra_path + os.pathsep + os.environ['PATH']
+
+ if not args.tests:
+ if not args.all:
+ sys.stderr.write('Specify -a to get all tests.\n')
+ return 1
+ args.tests = ['test']
+
+ tests = []
+ for arg in args.tests:
+ if os.path.isdir(arg):
+ tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
+ else:
+ if not is_test_name(os.path.basename(arg)):
+ print(arg, 'is not a valid gyp test name.', file=sys.stderr)
+ sys.exit(1)
+ tests.append(arg)
+
+ if args.list:
+ for test in tests:
+ print(test)
+ sys.exit(0)
+
+ os.environ['PYTHONPATH'] = os.path.abspath('test/lib')
+
+ if args.verbose:
+ print_configuration_info()
+
+ if args.gyp_option and not args.quiet:
+ print('Extra Gyp options: %s\n' % args.gyp_option)
+
+ if args.format:
+ format_list = args.format.split(',')
+ else:
+ format_list = {
+ 'aix5': ['make'],
+ 'freebsd7': ['make'],
+ 'freebsd8': ['make'],
+ 'openbsd5': ['make'],
+ 'cygwin': ['msvs'],
+ 'win32': ['msvs', 'ninja'],
+ 'linux': ['make', 'ninja'],
+ 'linux2': ['make', 'ninja'],
+ 'linux3': ['make', 'ninja'],
+
+ # TODO: Re-enable xcode-ninja.
+ # https://bugs.chromium.org/p/gyp/issues/detail?id=530
+ # 'darwin': ['make', 'ninja', 'xcode', 'xcode-ninja'],
+ 'darwin': ['make', 'ninja', 'xcode'],
+ }[sys.platform]
+
+ gyp_options = []
+ for option in args.gyp_option:
+ gyp_options += ['-G', option]
+
+ runner = Runner(format_list, tests, gyp_options, args.verbose)
+ runner.run()
+
+ if not args.quiet:
+ runner.print_results()
+
+ if runner.failures:
+ return 1
+ else:
+ return 0
+
+
+def print_configuration_info():
+ print('Test configuration:')
+ if sys.platform == 'darwin':
+ sys.path.append(os.path.abspath('test/lib'))
+ import TestMac
+ print(' Mac %s %s' % (platform.mac_ver()[0], platform.mac_ver()[2]))
+ print(' Xcode %s' % TestMac.Xcode.Version())
+ elif sys.platform == 'win32':
+ sys.path.append(os.path.abspath('pylib'))
+ import gyp.MSVSVersion
+ print(' Win %s %s\n' % platform.win32_ver()[0:2])
+ print(' MSVS %s' %
+ gyp.MSVSVersion.SelectVisualStudioVersion().Description())
+ elif sys.platform in ('linux', 'linux2'):
+ print(' Linux %s' % ' '.join(platform.linux_distribution()))
+ print(' Python %s' % platform.python_version())
+ print(' PYTHONPATH=%s' % os.environ['PYTHONPATH'])
+ print()
+
+
+class Runner(object):
+ def __init__(self, formats, tests, gyp_options, verbose):
+ self.formats = formats
+ self.tests = tests
+ self.verbose = verbose
+ self.gyp_options = gyp_options
+ self.failures = []
+ self.num_tests = len(formats) * len(tests)
+ num_digits = len(str(self.num_tests))
+ self.fmt_str = '[%%%dd/%%%dd] (%%s) %%s' % (num_digits, num_digits)
+ self.isatty = sys.stdout.isatty() and not self.verbose
+ self.env = os.environ.copy()
+ self.hpos = 0
+
+ def run(self):
+ run_start = time.time()
+
+ i = 1
+ for fmt in self.formats:
+ for test in self.tests:
+ self.run_test(test, fmt, i)
+ i += 1
+
+ if self.isatty:
+ self.erase_current_line()
+
+ self.took = time.time() - run_start
+
+ def run_test(self, test, fmt, i):
+ if self.isatty:
+ self.erase_current_line()
+
+ msg = self.fmt_str % (i, self.num_tests, fmt, test)
+ self.print_(msg)
+
+ start = time.time()
+ cmd = [sys.executable, test] + self.gyp_options
+ self.env['TESTGYP_FORMAT'] = fmt
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT, env=self.env)
+ proc.wait()
+ took = time.time() - start
+
+ stdout = proc.stdout.read().decode('utf8')
+ if proc.returncode == 2:
+ res = 'skipped'
+ elif proc.returncode:
+ res = 'failed'
+ self.failures.append('(%s) %s' % (test, fmt))
+ else:
+ res = 'passed'
+ res_msg = ' %s %.3fs' % (res, took)
+ self.print_(res_msg)
+
+ if (stdout and
+ not stdout.endswith('PASSED\n') and
+ not (stdout.endswith('NO RESULT\n'))):
+ print()
+ for l in stdout.splitlines():
+ print(' %s' % l)
+ elif not self.isatty:
+ print()
+
+ def print_(self, msg):
+ print(msg, end='')
+ index = msg.rfind('\n')
+ if index == -1:
+ self.hpos += len(msg)
+ else:
+ self.hpos = len(msg) - index
+ sys.stdout.flush()
+
+ def erase_current_line(self):
+ print('\b' * self.hpos + ' ' * self.hpos + '\b' * self.hpos, end='')
+ sys.stdout.flush()
+ self.hpos = 0
+
+ def print_results(self):
+ num_failures = len(self.failures)
+ if num_failures:
+ print()
+ if num_failures == 1:
+ print("Failed the following test:")
+ else:
+ print("Failed the following %d tests:" % num_failures)
+ print("\t" + "\n\t".join(sorted(self.failures)))
+ print()
+ print('Ran %d tests in %.3fs, %d failed.' % (self.num_tests, self.took,
+ num_failures))
+ print()
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/third_party/python/gyp/pylib/gyp/MSVSNew.py b/third_party/python/gyp/pylib/gyp/MSVSNew.py
new file mode 100644
index 0000000000..73182ec880
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/MSVSNew.py
@@ -0,0 +1,353 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""New implementation of Visual Studio project generation."""
+
+import os
+import random
+import sys
+
+import gyp.common
+
+# hashlib is supplied as of Python 2.5 as the replacement interface for md5
+# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if
+# available, avoiding a deprecation warning under 2.6. Import md5 otherwise,
+# preserving 2.4 compatibility.
+try:
+ import hashlib
+ _new_md5 = hashlib.md5
+except ImportError:
+ import md5
+ _new_md5 = md5.new
+
+
+try:
+ # cmp was removed in python3.
+ cmp
+except NameError:
+ def cmp(a, b):
+ return (a > b) - (a < b)
+
+# Initialize random number generator
+random.seed()
+
+# GUIDs for project types
+ENTRY_TYPE_GUIDS = {
+ 'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}',
+ 'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}',
+}
+
+#------------------------------------------------------------------------------
+# Helper functions
+
+
+def MakeGuid(name, seed='msvs_new'):
+ """Returns a GUID for the specified target name.
+
+ Args:
+ name: Target name.
+ seed: Seed for MD5 hash.
+ Returns:
+ A GUID-line string calculated from the name and seed.
+
+ This generates something which looks like a GUID, but depends only on the
+ name and seed. This means the same name/seed will always generate the same
+ GUID, so that projects and solutions which refer to each other can explicitly
+ determine the GUID to refer to explicitly. It also means that the GUID will
+ not change when the project for a target is rebuilt.
+ """
+
+ to_hash = str(seed) + str(name)
+ to_hash = to_hash.encode('utf-8')
+ # Calculate a MD5 signature for the seed and name.
+ d = _new_md5(to_hash).hexdigest().upper()
+ # Convert most of the signature to GUID form (discard the rest)
+ guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20]
+ + '-' + d[20:32] + '}')
+ return guid
+
+#------------------------------------------------------------------------------
+
+
+class MSVSSolutionEntry(object):
+ def __cmp__(self, other):
+ # Sort by name then guid (so things are in order on vs2008).
+ return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
+
+ def __lt__(self, other):
+ return (self.name, self.get_guid()) < (other.name, other.get_guid())
+
+class MSVSFolder(MSVSSolutionEntry):
+ """Folder in a Visual Studio project or solution."""
+
+ def __init__(self, path, name = None, entries = None,
+ guid = None, items = None):
+ """Initializes the folder.
+
+ Args:
+ path: Full path to the folder.
+ name: Name of the folder.
+ entries: List of folder entries to nest inside this folder. May contain
+ Folder or Project objects. May be None, if the folder is empty.
+ guid: GUID to use for folder, if not None.
+ items: List of solution items to include in the folder project. May be
+ None, if the folder does not directly contain items.
+ """
+ if name:
+ self.name = name
+ else:
+ # Use last layer.
+ self.name = os.path.basename(path)
+
+ self.path = path
+ self.guid = guid
+
+ # Copy passed lists (or set to empty lists)
+ self.entries = sorted(list(entries or []))
+ self.items = list(items or [])
+
+ self.entry_type_guid = ENTRY_TYPE_GUIDS['folder']
+
+ def get_guid(self):
+ if self.guid is None:
+ # Use consistent guids for folders (so things don't regenerate).
+ self.guid = MakeGuid(self.path, seed='msvs_folder')
+ return self.guid
+
+
+#------------------------------------------------------------------------------
+
+
+class MSVSProject(MSVSSolutionEntry):
+ """Visual Studio project."""
+
+ def __init__(self, path, name = None, dependencies = None, guid = None,
+ spec = None, build_file = None, config_platform_overrides = None,
+ fixpath_prefix = None):
+ """Initializes the project.
+
+ Args:
+ path: Absolute path to the project file.
+ name: Name of project. If None, the name will be the same as the base
+ name of the project file.
+ dependencies: List of other Project objects this project is dependent
+ upon, if not None.
+ guid: GUID to use for project, if not None.
+ spec: Dictionary specifying how to build this project.
+ build_file: Filename of the .gyp file that the vcproj file comes from.
+ config_platform_overrides: optional dict of configuration platforms to
+ used in place of the default for this target.
+ fixpath_prefix: the path used to adjust the behavior of _fixpath
+ """
+ self.path = path
+ self.guid = guid
+ self.spec = spec
+ self.build_file = build_file
+ # Use project filename if name not specified
+ self.name = name or os.path.splitext(os.path.basename(path))[0]
+
+ # Copy passed lists (or set to empty lists)
+ self.dependencies = list(dependencies or [])
+
+ self.entry_type_guid = ENTRY_TYPE_GUIDS['project']
+
+ if config_platform_overrides:
+ self.config_platform_overrides = config_platform_overrides
+ else:
+ self.config_platform_overrides = {}
+ self.fixpath_prefix = fixpath_prefix
+ self.msbuild_toolset = None
+
+ def set_dependencies(self, dependencies):
+ self.dependencies = list(dependencies or [])
+
+ def get_guid(self):
+ if self.guid is None:
+ # Set GUID from path
+ # TODO(rspangler): This is fragile.
+ # 1. We can't just use the project filename sans path, since there could
+ # be multiple projects with the same base name (for example,
+ # foo/unittest.vcproj and bar/unittest.vcproj).
+ # 2. The path needs to be relative to $SOURCE_ROOT, so that the project
+ # GUID is the same whether it's included from base/base.sln or
+ # foo/bar/baz/baz.sln.
+ # 3. The GUID needs to be the same each time this builder is invoked, so
+ # that we don't need to rebuild the solution when the project changes.
+ # 4. We should be able to handle pre-built project files by reading the
+ # GUID from the files.
+ self.guid = MakeGuid(self.name)
+ return self.guid
+
+ def set_msbuild_toolset(self, msbuild_toolset):
+ self.msbuild_toolset = msbuild_toolset
+
+#------------------------------------------------------------------------------
+
+
+class MSVSSolution(object):
+ """Visual Studio solution."""
+
+ def __init__(self, path, version, entries=None, variants=None,
+ websiteProperties=True):
+ """Initializes the solution.
+
+ Args:
+ path: Path to solution file.
+ version: Format version to emit.
+ entries: List of entries in solution. May contain Folder or Project
+ objects. May be None, if the folder is empty.
+ variants: List of build variant strings. If none, a default list will
+ be used.
+ websiteProperties: Flag to decide if the website properties section
+ is generated.
+ """
+ self.path = path
+ self.websiteProperties = websiteProperties
+ self.version = version
+
+ # Copy passed lists (or set to empty lists)
+ self.entries = list(entries or [])
+
+ if variants:
+ # Copy passed list
+ self.variants = variants[:]
+ else:
+ # Use default
+ self.variants = ['Debug|Win32', 'Release|Win32']
+ # TODO(rspangler): Need to be able to handle a mapping of solution config
+ # to project config. Should we be able to handle variants being a dict,
+ # or add a separate variant_map variable? If it's a dict, we can't
+ # guarantee the order of variants since dict keys aren't ordered.
+
+
+ # TODO(rspangler): Automatically write to disk for now; should delay until
+ # node-evaluation time.
+ self.Write()
+
+
+ def Write(self, writer=gyp.common.WriteOnDiff):
+ """Writes the solution file to disk.
+
+ Raises:
+ IndexError: An entry appears multiple times.
+ """
+ # Walk the entry tree and collect all the folders and projects.
+ all_entries = set()
+ entries_to_check = self.entries[:]
+ while entries_to_check:
+ e = entries_to_check.pop(0)
+
+ # If this entry has been visited, nothing to do.
+ if e in all_entries:
+ continue
+
+ all_entries.add(e)
+
+ # If this is a folder, check its entries too.
+ if isinstance(e, MSVSFolder):
+ entries_to_check += e.entries
+
+ all_entries = sorted(all_entries)
+
+ # Open file and print header
+ f = writer(self.path)
+ f.write('Microsoft Visual Studio Solution File, '
+ 'Format Version %s\r\n' % self.version.SolutionVersion())
+ f.write('# %s\r\n' % self.version.Description())
+
+ # Project entries
+ sln_root = os.path.split(self.path)[0]
+ for e in all_entries:
+ relative_path = gyp.common.RelativePath(e.path, sln_root)
+ # msbuild does not accept an empty folder_name.
+ # use '.' in case relative_path is empty.
+ folder_name = relative_path.replace('/', '\\') or '.'
+ f.write('Project("%s") = "%s", "%s", "%s"\r\n' % (
+ e.entry_type_guid, # Entry type GUID
+ e.name, # Folder name
+ folder_name, # Folder name (again)
+ e.get_guid(), # Entry GUID
+ ))
+
+ # TODO(rspangler): Need a way to configure this stuff
+ if self.websiteProperties:
+ f.write('\tProjectSection(WebsiteProperties) = preProject\r\n'
+ '\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
+ '\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
+ '\tEndProjectSection\r\n')
+
+ if isinstance(e, MSVSFolder):
+ if e.items:
+ f.write('\tProjectSection(SolutionItems) = preProject\r\n')
+ for i in e.items:
+ f.write('\t\t%s = %s\r\n' % (i, i))
+ f.write('\tEndProjectSection\r\n')
+
+ if isinstance(e, MSVSProject):
+ if e.dependencies:
+ f.write('\tProjectSection(ProjectDependencies) = postProject\r\n')
+ for d in e.dependencies:
+ f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid()))
+ f.write('\tEndProjectSection\r\n')
+
+ f.write('EndProject\r\n')
+
+ # Global section
+ f.write('Global\r\n')
+
+ # Configurations (variants)
+ f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n')
+ for v in self.variants:
+ f.write('\t\t%s = %s\r\n' % (v, v))
+ f.write('\tEndGlobalSection\r\n')
+
+ # Sort config guids for easier diffing of solution changes.
+ config_guids = []
+ config_guids_overrides = {}
+ for e in all_entries:
+ if isinstance(e, MSVSProject):
+ config_guids.append(e.get_guid())
+ config_guids_overrides[e.get_guid()] = e.config_platform_overrides
+ config_guids.sort()
+
+ f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n')
+ for g in config_guids:
+ for v in self.variants:
+ nv = config_guids_overrides[g].get(v, v)
+ # Pick which project configuration to build for this solution
+ # configuration.
+ f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % (
+ g, # Project GUID
+ v, # Solution build configuration
+ nv, # Project build config for that solution config
+ ))
+
+ # Enable project in this solution configuration.
+ f.write('\t\t%s.%s.Build.0 = %s\r\n' % (
+ g, # Project GUID
+ v, # Solution build configuration
+ nv, # Project build config for that solution config
+ ))
+ f.write('\tEndGlobalSection\r\n')
+
+ # TODO(rspangler): Should be able to configure this stuff too (though I've
+ # never seen this be any different)
+ f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n')
+ f.write('\t\tHideSolutionNode = FALSE\r\n')
+ f.write('\tEndGlobalSection\r\n')
+
+ # Folder mappings
+ # Omit this section if there are no folders
+ if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]):
+ f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
+ for e in all_entries:
+ if not isinstance(e, MSVSFolder):
+ continue # Does not apply to projects, only folders
+ for subentry in e.entries:
+ f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
+ f.write('\tEndGlobalSection\r\n')
+
+ f.write('EndGlobal\r\n')
+
+ f.close()
diff --git a/third_party/python/gyp/pylib/gyp/MSVSProject.py b/third_party/python/gyp/pylib/gyp/MSVSProject.py
new file mode 100644
index 0000000000..db1ceede34
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/MSVSProject.py
@@ -0,0 +1,208 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Visual Studio project reader/writer."""
+
+import gyp.common
+import gyp.easy_xml as easy_xml
+
+#------------------------------------------------------------------------------
+
+
+class Tool(object):
+ """Visual Studio tool."""
+
+ def __init__(self, name, attrs=None):
+ """Initializes the tool.
+
+ Args:
+ name: Tool name.
+ attrs: Dict of tool attributes; may be None.
+ """
+ self._attrs = attrs or {}
+ self._attrs['Name'] = name
+
+ def _GetSpecification(self):
+ """Creates an element for the tool.
+
+ Returns:
+ A new xml.dom.Element for the tool.
+ """
+ return ['Tool', self._attrs]
+
+class Filter(object):
+ """Visual Studio filter - that is, a virtual folder."""
+
+ def __init__(self, name, contents=None):
+ """Initializes the folder.
+
+ Args:
+ name: Filter (folder) name.
+ contents: List of filenames and/or Filter objects contained.
+ """
+ self.name = name
+ self.contents = list(contents or [])
+
+
+#------------------------------------------------------------------------------
+
+
+class Writer(object):
+ """Visual Studio XML project writer."""
+
+ def __init__(self, project_path, version, name, guid=None, platforms=None):
+ """Initializes the project.
+
+ Args:
+ project_path: Path to the project file.
+ version: Format version to emit.
+ name: Name of the project.
+ guid: GUID to use for project, if not None.
+ platforms: Array of string, the supported platforms. If null, ['Win32']
+ """
+ self.project_path = project_path
+ self.version = version
+ self.name = name
+ self.guid = guid
+
+ # Default to Win32 for platforms.
+ if not platforms:
+ platforms = ['Win32']
+
+ # Initialize the specifications of the various sections.
+ self.platform_section = ['Platforms']
+ for platform in platforms:
+ self.platform_section.append(['Platform', {'Name': platform}])
+ self.tool_files_section = ['ToolFiles']
+ self.configurations_section = ['Configurations']
+ self.files_section = ['Files']
+
+ # Keep a dict keyed on filename to speed up access.
+ self.files_dict = dict()
+
+ def AddToolFile(self, path):
+ """Adds a tool file to the project.
+
+ Args:
+ path: Relative path from project to tool file.
+ """
+ self.tool_files_section.append(['ToolFile', {'RelativePath': path}])
+
+ def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
+ """Returns the specification for a configuration.
+
+ Args:
+ config_type: Type of configuration node.
+ config_name: Configuration name.
+ attrs: Dict of configuration attributes; may be None.
+ tools: List of tools (strings or Tool objects); may be None.
+ Returns:
+ """
+ # Handle defaults
+ if not attrs:
+ attrs = {}
+ if not tools:
+ tools = []
+
+ # Add configuration node and its attributes
+ node_attrs = attrs.copy()
+ node_attrs['Name'] = config_name
+ specification = [config_type, node_attrs]
+
+ # Add tool nodes and their attributes
+ if tools:
+ for t in tools:
+ if isinstance(t, Tool):
+ specification.append(t._GetSpecification())
+ else:
+ specification.append(Tool(t)._GetSpecification())
+ return specification
+
+
+ def AddConfig(self, name, attrs=None, tools=None):
+ """Adds a configuration to the project.
+
+ Args:
+ name: Configuration name.
+ attrs: Dict of configuration attributes; may be None.
+ tools: List of tools (strings or Tool objects); may be None.
+ """
+ spec = self._GetSpecForConfiguration('Configuration', name, attrs, tools)
+ self.configurations_section.append(spec)
+
+ def _AddFilesToNode(self, parent, files):
+ """Adds files and/or filters to the parent node.
+
+ Args:
+ parent: Destination node
+ files: A list of Filter objects and/or relative paths to files.
+
+ Will call itself recursively, if the files list contains Filter objects.
+ """
+ for f in files:
+ if isinstance(f, Filter):
+ node = ['Filter', {'Name': f.name}]
+ self._AddFilesToNode(node, f.contents)
+ else:
+ node = ['File', {'RelativePath': f}]
+ self.files_dict[f] = node
+ parent.append(node)
+
+ def AddFiles(self, files):
+ """Adds files to the project.
+
+ Args:
+ files: A list of Filter objects and/or relative paths to files.
+
+ This makes a copy of the file/filter tree at the time of this call. If you
+ later add files to a Filter object which was passed into a previous call
+ to AddFiles(), it will not be reflected in this project.
+ """
+ self._AddFilesToNode(self.files_section, files)
+ # TODO(rspangler) This also doesn't handle adding files to an existing
+ # filter. That is, it doesn't merge the trees.
+
+ def AddFileConfig(self, path, config, attrs=None, tools=None):
+ """Adds a configuration to a file.
+
+ Args:
+ path: Relative path to the file.
+ config: Name of configuration to add.
+ attrs: Dict of configuration attributes; may be None.
+ tools: List of tools (strings or Tool objects); may be None.
+
+ Raises:
+ ValueError: Relative path does not match any file added via AddFiles().
+ """
+ # Find the file node with the right relative path
+ parent = self.files_dict.get(path)
+ if not parent:
+ raise ValueError('AddFileConfig: file "%s" not in project.' % path)
+
+ # Add the config to the file node
+ spec = self._GetSpecForConfiguration('FileConfiguration', config, attrs,
+ tools)
+ parent.append(spec)
+
+ def WriteIfChanged(self):
+ """Writes the project file."""
+ # First create XML content definition
+ content = [
+ 'VisualStudioProject',
+ {'ProjectType': 'Visual C++',
+ 'Version': self.version.ProjectVersion(),
+ 'Name': self.name,
+ 'ProjectGUID': self.guid,
+ 'RootNamespace': self.name,
+ 'Keyword': 'Win32Proj'
+ },
+ self.platform_section,
+ self.tool_files_section,
+ self.configurations_section,
+ ['References'], # empty section
+ self.files_section,
+ ['Globals'] # empty section
+ ]
+ easy_xml.WriteXmlIfChanged(content, self.project_path,
+ encoding="Windows-1252")
diff --git a/third_party/python/gyp/pylib/gyp/MSVSSettings.py b/third_party/python/gyp/pylib/gyp/MSVSSettings.py
new file mode 100644
index 0000000000..1d2e25ab90
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/MSVSSettings.py
@@ -0,0 +1,1106 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+r"""Code to validate and convert settings of the Microsoft build tools.
+
+This file contains code to validate and convert settings of the Microsoft
+build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(),
+and ValidateMSBuildSettings() are the entry points.
+
+This file was created by comparing the projects created by Visual Studio 2008
+and Visual Studio 2010 for all available settings through the user interface.
+The MSBuild schemas were also considered. They are typically found in the
+MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild
+"""
+
+from __future__ import print_function
+
+import sys
+import re
+
+try:
+ # basestring was removed in python3.
+ basestring
+except NameError:
+ basestring = str
+
+# Dictionaries of settings validators. The key is the tool name, the value is
+# a dictionary mapping setting names to validation functions.
+_msvs_validators = {}
+_msbuild_validators = {}
+
+
+# A dictionary of settings converters. The key is the tool name, the value is
+# a dictionary mapping setting names to conversion functions.
+_msvs_to_msbuild_converters = {}
+
+
+# Tool name mapping from MSVS to MSBuild.
+_msbuild_name_of_tool = {}
+
+
+class _Tool(object):
+ """Represents a tool used by MSVS or MSBuild.
+
+ Attributes:
+ msvs_name: The name of the tool in MSVS.
+ msbuild_name: The name of the tool in MSBuild.
+ """
+
+ def __init__(self, msvs_name, msbuild_name):
+ self.msvs_name = msvs_name
+ self.msbuild_name = msbuild_name
+
+
+def _AddTool(tool):
+ """Adds a tool to the four dictionaries used to process settings.
+
+ This only defines the tool. Each setting also needs to be added.
+
+ Args:
+ tool: The _Tool object to be added.
+ """
+ _msvs_validators[tool.msvs_name] = {}
+ _msbuild_validators[tool.msbuild_name] = {}
+ _msvs_to_msbuild_converters[tool.msvs_name] = {}
+ _msbuild_name_of_tool[tool.msvs_name] = tool.msbuild_name
+
+
+def _GetMSBuildToolSettings(msbuild_settings, tool):
+ """Returns an MSBuild tool dictionary. Creates it if needed."""
+ return msbuild_settings.setdefault(tool.msbuild_name, {})
+
+
+class _Type(object):
+ """Type of settings (Base class)."""
+
+ def ValidateMSVS(self, value):
+ """Verifies that the value is legal for MSVS.
+
+ Args:
+ value: the value to check for this type.
+
+ Raises:
+ ValueError if value is not valid for MSVS.
+ """
+
+ def ValidateMSBuild(self, value):
+ """Verifies that the value is legal for MSBuild.
+
+ Args:
+ value: the value to check for this type.
+
+ Raises:
+ ValueError if value is not valid for MSBuild.
+ """
+
+ def ConvertToMSBuild(self, value):
+ """Returns the MSBuild equivalent of the MSVS value given.
+
+ Args:
+ value: the MSVS value to convert.
+
+ Returns:
+ the MSBuild equivalent.
+
+ Raises:
+ ValueError if value is not valid.
+ """
+ return value
+
+
+class _String(_Type):
+ """A setting that's just a string."""
+
+ def ValidateMSVS(self, value):
+ if not isinstance(value, basestring):
+ raise ValueError('expected string; got %r' % value)
+
+ def ValidateMSBuild(self, value):
+ if not isinstance(value, basestring):
+ raise ValueError('expected string; got %r' % value)
+
+ def ConvertToMSBuild(self, value):
+ # Convert the macros
+ return ConvertVCMacrosToMSBuild(value)
+
+
+class _StringList(_Type):
+ """A settings that's a list of strings."""
+
+ def ValidateMSVS(self, value):
+ if not isinstance(value, basestring) and not isinstance(value, list):
+ raise ValueError('expected string list; got %r' % value)
+
+ def ValidateMSBuild(self, value):
+ if not isinstance(value, basestring) and not isinstance(value, list):
+ raise ValueError('expected string list; got %r' % value)
+
+ def ConvertToMSBuild(self, value):
+ # Convert the macros
+ if isinstance(value, list):
+ return [ConvertVCMacrosToMSBuild(i) for i in value]
+ else:
+ return ConvertVCMacrosToMSBuild(value)
+
+
+class _Boolean(_Type):
+ """Boolean settings, can have the values 'false' or 'true'."""
+
+ def _Validate(self, value):
+ if value != 'true' and value != 'false':
+ raise ValueError('expected bool; got %r' % value)
+
+ def ValidateMSVS(self, value):
+ self._Validate(value)
+
+ def ValidateMSBuild(self, value):
+ self._Validate(value)
+
+ def ConvertToMSBuild(self, value):
+ self._Validate(value)
+ return value
+
+
+class _Integer(_Type):
+ """Integer settings."""
+
+ def __init__(self, msbuild_base=10):
+ _Type.__init__(self)
+ self._msbuild_base = msbuild_base
+
+ def ValidateMSVS(self, value):
+ # Try to convert, this will raise ValueError if invalid.
+ self.ConvertToMSBuild(value)
+
+ def ValidateMSBuild(self, value):
+ # Try to convert, this will raise ValueError if invalid.
+ int(value, self._msbuild_base)
+
+ def ConvertToMSBuild(self, value):
+ msbuild_format = (self._msbuild_base == 10) and '%d' or '0x%04x'
+ return msbuild_format % int(value)
+
+
+class _Enumeration(_Type):
+ """Type of settings that is an enumeration.
+
+ In MSVS, the values are indexes like '0', '1', and '2'.
+ MSBuild uses text labels that are more representative, like 'Win32'.
+
+ Constructor args:
+ label_list: an array of MSBuild labels that correspond to the MSVS index.
+ In the rare cases where MSVS has skipped an index value, None is
+ used in the array to indicate the unused spot.
+ new: an array of labels that are new to MSBuild.
+ """
+
+ def __init__(self, label_list, new=None):
+ _Type.__init__(self)
+ self._label_list = label_list
+ self._msbuild_values = set(value for value in label_list
+ if value is not None)
+ if new is not None:
+ self._msbuild_values.update(new)
+
+ def ValidateMSVS(self, value):
+ # Try to convert. It will raise an exception if not valid.
+ self.ConvertToMSBuild(value)
+
+ def ValidateMSBuild(self, value):
+ if value not in self._msbuild_values:
+ raise ValueError('unrecognized enumerated value %s' % value)
+
+ def ConvertToMSBuild(self, value):
+ index = int(value)
+ if index < 0 or index >= len(self._label_list):
+ raise ValueError('index value (%d) not in expected range [0, %d)' %
+ (index, len(self._label_list)))
+ label = self._label_list[index]
+ if label is None:
+ raise ValueError('converted value for %s not specified.' % value)
+ return label
+
+
+# Instantiate the various generic types.
+_boolean = _Boolean()
+_integer = _Integer()
+# For now, we don't do any special validation on these types:
+_string = _String()
+_file_name = _String()
+_folder_name = _String()
+_file_list = _StringList()
+_folder_list = _StringList()
+_string_list = _StringList()
+# Some boolean settings went from numerical values to boolean. The
+# mapping is 0: default, 1: false, 2: true.
+_newly_boolean = _Enumeration(['', 'false', 'true'])
+
+
+def _Same(tool, name, setting_type):
+ """Defines a setting that has the same name in MSVS and MSBuild.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ name: the name of the setting.
+ setting_type: the type of this setting.
+ """
+ _Renamed(tool, name, name, setting_type)
+
+
+def _Renamed(tool, msvs_name, msbuild_name, setting_type):
+ """Defines a setting for which the name has changed.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ msvs_name: the name of the MSVS setting.
+ msbuild_name: the name of the MSBuild setting.
+ setting_type: the type of this setting.
+ """
+
+ def _Translate(value, msbuild_settings):
+ msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
+ msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value)
+
+ _msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS
+ _msbuild_validators[tool.msbuild_name][msbuild_name] = (
+ setting_type.ValidateMSBuild)
+ _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
+
+
+def _Moved(tool, settings_name, msbuild_tool_name, setting_type):
+ _MovedAndRenamed(tool, settings_name, msbuild_tool_name, settings_name,
+ setting_type)
+
+
+def _MovedAndRenamed(tool, msvs_settings_name, msbuild_tool_name,
+ msbuild_settings_name, setting_type):
+ """Defines a setting that may have moved to a new section.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ msvs_settings_name: the MSVS name of the setting.
+ msbuild_tool_name: the name of the MSBuild tool to place the setting under.
+ msbuild_settings_name: the MSBuild name of the setting.
+ setting_type: the type of this setting.
+ """
+
+ def _Translate(value, msbuild_settings):
+ tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {})
+ tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value)
+
+ _msvs_validators[tool.msvs_name][msvs_settings_name] = (
+ setting_type.ValidateMSVS)
+ validator = setting_type.ValidateMSBuild
+ _msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator
+ _msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate
+
+
+def _MSVSOnly(tool, name, setting_type):
+ """Defines a setting that is only found in MSVS.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ name: the name of the setting.
+ setting_type: the type of this setting.
+ """
+
+ def _Translate(unused_value, unused_msbuild_settings):
+ # Since this is for MSVS only settings, no translation will happen.
+ pass
+
+ _msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS
+ _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
+
+
+def _MSBuildOnly(tool, name, setting_type):
+ """Defines a setting that is only found in MSBuild.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ name: the name of the setting.
+ setting_type: the type of this setting.
+ """
+
+ def _Translate(value, msbuild_settings):
+ # Let msbuild-only properties get translated as-is from msvs_settings.
+ tool_settings = msbuild_settings.setdefault(tool.msbuild_name, {})
+ tool_settings[name] = value
+
+ _msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild
+ _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
+
+
+def _ConvertedToAdditionalOption(tool, msvs_name, flag):
+ """Defines a setting that's handled via a command line option in MSBuild.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ msvs_name: the name of the MSVS setting that if 'true' becomes a flag
+ flag: the flag to insert at the end of the AdditionalOptions
+ """
+
+ def _Translate(value, msbuild_settings):
+ if value == 'true':
+ tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
+ if 'AdditionalOptions' in tool_settings:
+ new_flags = '%s %s' % (tool_settings['AdditionalOptions'], flag)
+ else:
+ new_flags = flag
+ tool_settings['AdditionalOptions'] = new_flags
+ _msvs_validators[tool.msvs_name][msvs_name] = _boolean.ValidateMSVS
+ _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
+
+
+def _CustomGeneratePreprocessedFile(tool, msvs_name):
+ def _Translate(value, msbuild_settings):
+ tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
+ if value == '0':
+ tool_settings['PreprocessToFile'] = 'false'
+ tool_settings['PreprocessSuppressLineNumbers'] = 'false'
+ elif value == '1': # /P
+ tool_settings['PreprocessToFile'] = 'true'
+ tool_settings['PreprocessSuppressLineNumbers'] = 'false'
+ elif value == '2': # /EP /P
+ tool_settings['PreprocessToFile'] = 'true'
+ tool_settings['PreprocessSuppressLineNumbers'] = 'true'
+ else:
+ raise ValueError('value must be one of [0, 1, 2]; got %s' % value)
+ # Create a bogus validator that looks for '0', '1', or '2'
+ msvs_validator = _Enumeration(['a', 'b', 'c']).ValidateMSVS
+ _msvs_validators[tool.msvs_name][msvs_name] = msvs_validator
+ msbuild_validator = _boolean.ValidateMSBuild
+ msbuild_tool_validators = _msbuild_validators[tool.msbuild_name]
+ msbuild_tool_validators['PreprocessToFile'] = msbuild_validator
+ msbuild_tool_validators['PreprocessSuppressLineNumbers'] = msbuild_validator
+ _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
+
+
+fix_vc_macro_slashes_regex_list = ('IntDir', 'OutDir')
+fix_vc_macro_slashes_regex = re.compile(
+ r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list)
+)
+
+# Regular expression to detect keys that were generated by exclusion lists
+_EXCLUDED_SUFFIX_RE = re.compile('^(.*)_excluded$')
+
+
+def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
+ """Verify that 'setting' is valid if it is generated from an exclusion list.
+
+ If the setting appears to be generated from an exclusion list, the root name
+ is checked.
+
+ Args:
+ setting: A string that is the setting name to validate
+ settings: A dictionary where the keys are valid settings
+ error_msg: The message to emit in the event of error
+ stderr: The stream receiving the error messages.
+ """
+ # This may be unrecognized because it's an exclusion list. If the
+ # setting name has the _excluded suffix, then check the root name.
+ unrecognized = True
+ m = re.match(_EXCLUDED_SUFFIX_RE, setting)
+ if m:
+ root_setting = m.group(1)
+ unrecognized = root_setting not in settings
+
+ if unrecognized:
+ # We don't know this setting. Give a warning.
+ print(error_msg, file=stderr)
+
+
+def FixVCMacroSlashes(s):
+ """Replace macros which have excessive following slashes.
+
+ These macros are known to have a built-in trailing slash. Furthermore, many
+ scripts hiccup on processing paths with extra slashes in the middle.
+
+ This list is probably not exhaustive. Add as needed.
+ """
+ if '$' in s:
+ s = fix_vc_macro_slashes_regex.sub(r'\1', s)
+ return s
+
+
+def ConvertVCMacrosToMSBuild(s):
+ """Convert the the MSVS macros found in the string to the MSBuild equivalent.
+
+ This list is probably not exhaustive. Add as needed.
+ """
+ if '$' in s:
+ replace_map = {
+ '$(ConfigurationName)': '$(Configuration)',
+ '$(InputDir)': '%(RelativeDir)',
+ '$(InputExt)': '%(Extension)',
+ '$(InputFileName)': '%(Filename)%(Extension)',
+ '$(InputName)': '%(Filename)',
+ '$(InputPath)': '%(Identity)',
+ '$(ParentName)': '$(ProjectFileName)',
+ '$(PlatformName)': '$(Platform)',
+ '$(SafeInputName)': '%(Filename)',
+ }
+ for old, new in replace_map.items():
+ s = s.replace(old, new)
+ s = FixVCMacroSlashes(s)
+ return s
+
+
+def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
+ """Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+).
+
+ Args:
+ msvs_settings: A dictionary. The key is the tool name. The values are
+ themselves dictionaries of settings and their values.
+ stderr: The stream receiving the error messages.
+
+ Returns:
+ A dictionary of MSBuild settings. The key is either the MSBuild tool name
+ or the empty string (for the global settings). The values are themselves
+ dictionaries of settings and their values.
+ """
+ msbuild_settings = {}
+ for msvs_tool_name, msvs_tool_settings in msvs_settings.items():
+ if msvs_tool_name in _msvs_to_msbuild_converters:
+ msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name]
+ for msvs_setting, msvs_value in msvs_tool_settings.items():
+ if msvs_setting in msvs_tool:
+ # Invoke the translation function.
+ try:
+ msvs_tool[msvs_setting](msvs_value, msbuild_settings)
+ except ValueError as e:
+ print(('Warning: while converting %s/%s to MSBuild, '
+ '%s' % (msvs_tool_name, msvs_setting, e)),
+ file=stderr)
+ else:
+ _ValidateExclusionSetting(msvs_setting,
+ msvs_tool,
+ ('Warning: unrecognized setting %s/%s '
+ 'while converting to MSBuild.' %
+ (msvs_tool_name, msvs_setting)),
+ stderr)
+ else:
+ print(('Warning: unrecognized tool %s while converting to '
+ 'MSBuild.' % msvs_tool_name), file=stderr)
+ return msbuild_settings
+
+
+def ValidateMSVSSettings(settings, stderr=sys.stderr):
+ """Validates that the names of the settings are valid for MSVS.
+
+ Args:
+ settings: A dictionary. The key is the tool name. The values are
+ themselves dictionaries of settings and their values.
+ stderr: The stream receiving the error messages.
+ """
+ _ValidateSettings(_msvs_validators, settings, stderr)
+
+
+def ValidateMSBuildSettings(settings, stderr=sys.stderr):
+ """Validates that the names of the settings are valid for MSBuild.
+
+ Args:
+ settings: A dictionary. The key is the tool name. The values are
+ themselves dictionaries of settings and their values.
+ stderr: The stream receiving the error messages.
+ """
+ _ValidateSettings(_msbuild_validators, settings, stderr)
+
+
+def _ValidateSettings(validators, settings, stderr):
+ """Validates that the settings are valid for MSBuild or MSVS.
+
+ We currently only validate the names of the settings, not their values.
+
+ Args:
+ validators: A dictionary of tools and their validators.
+ settings: A dictionary. The key is the tool name. The values are
+ themselves dictionaries of settings and their values.
+ stderr: The stream receiving the error messages.
+ """
+ for tool_name in settings:
+ if tool_name in validators:
+ tool_validators = validators[tool_name]
+ for setting, value in settings[tool_name].items():
+ if setting in tool_validators:
+ try:
+ tool_validators[setting](value)
+ except ValueError as e:
+ print(('Warning: for %s/%s, %s' %
+ (tool_name, setting, e)), file=stderr)
+ else:
+ _ValidateExclusionSetting(setting,
+ tool_validators,
+ ('Warning: unrecognized setting %s/%s' %
+ (tool_name, setting)),
+ stderr)
+
+ else:
+ print(('Warning: unrecognized tool %s' % tool_name), file=stderr)
+
+
+# MSVS and MBuild names of the tools.
+_compile = _Tool('VCCLCompilerTool', 'ClCompile')
+_link = _Tool('VCLinkerTool', 'Link')
+_midl = _Tool('VCMIDLTool', 'Midl')
+_rc = _Tool('VCResourceCompilerTool', 'ResourceCompile')
+_lib = _Tool('VCLibrarianTool', 'Lib')
+_manifest = _Tool('VCManifestTool', 'Manifest')
+_masm = _Tool('MASM', 'MASM')
+
+
+_AddTool(_compile)
+_AddTool(_link)
+_AddTool(_midl)
+_AddTool(_rc)
+_AddTool(_lib)
+_AddTool(_manifest)
+_AddTool(_masm)
+# Add sections only found in the MSBuild settings.
+_msbuild_validators[''] = {}
+_msbuild_validators['ProjectReference'] = {}
+_msbuild_validators['ManifestResourceCompile'] = {}
+
+# Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and
+# ClCompile in MSBuild.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for
+# the schema of the MSBuild ClCompile settings.
+
+# Options that have the same name in MSVS and MSBuild
+_Same(_compile, 'AdditionalIncludeDirectories', _folder_list) # /I
+_Same(_compile, 'AdditionalOptions', _string_list)
+_Same(_compile, 'AdditionalUsingDirectories', _folder_list) # /AI
+_Same(_compile, 'AssemblerListingLocation', _file_name) # /Fa
+_Same(_compile, 'BrowseInformationFile', _file_name)
+_Same(_compile, 'BufferSecurityCheck', _boolean) # /GS
+_Same(_compile, 'DisableLanguageExtensions', _boolean) # /Za
+_Same(_compile, 'DisableSpecificWarnings', _string_list) # /wd
+_Same(_compile, 'EnableFiberSafeOptimizations', _boolean) # /GT
+_Same(_compile, 'EnablePREfast', _boolean) # /analyze Visible='false'
+_Same(_compile, 'ExpandAttributedSource', _boolean) # /Fx
+_Same(_compile, 'FloatingPointExceptions', _boolean) # /fp:except
+_Same(_compile, 'ForceConformanceInForLoopScope', _boolean) # /Zc:forScope
+_Same(_compile, 'ForcedIncludeFiles', _file_list) # /FI
+_Same(_compile, 'ForcedUsingFiles', _file_list) # /FU
+_Same(_compile, 'GenerateXMLDocumentationFiles', _boolean) # /doc
+_Same(_compile, 'IgnoreStandardIncludePath', _boolean) # /X
+_Same(_compile, 'MinimalRebuild', _boolean) # /Gm
+_Same(_compile, 'OmitDefaultLibName', _boolean) # /Zl
+_Same(_compile, 'OmitFramePointers', _boolean) # /Oy
+_Same(_compile, 'PreprocessorDefinitions', _string_list) # /D
+_Same(_compile, 'ProgramDataBaseFileName', _file_name) # /Fd
+_Same(_compile, 'RuntimeTypeInfo', _boolean) # /GR
+_Same(_compile, 'ShowIncludes', _boolean) # /showIncludes
+_Same(_compile, 'SmallerTypeCheck', _boolean) # /RTCc
+_Same(_compile, 'StringPooling', _boolean) # /GF
+_Same(_compile, 'SuppressStartupBanner', _boolean) # /nologo
+_Same(_compile, 'TreatWChar_tAsBuiltInType', _boolean) # /Zc:wchar_t
+_Same(_compile, 'UndefineAllPreprocessorDefinitions', _boolean) # /u
+_Same(_compile, 'UndefinePreprocessorDefinitions', _string_list) # /U
+_Same(_compile, 'UseFullPaths', _boolean) # /FC
+_Same(_compile, 'WholeProgramOptimization', _boolean) # /GL
+_Same(_compile, 'XMLDocumentationFileName', _file_name)
+_Same(_compile, 'CompileAsWinRT', _boolean) # /ZW
+
+_Same(_compile, 'AssemblerOutput',
+ _Enumeration(['NoListing',
+ 'AssemblyCode', # /FA
+ 'All', # /FAcs
+ 'AssemblyAndMachineCode', # /FAc
+ 'AssemblyAndSourceCode'])) # /FAs
+_Same(_compile, 'BasicRuntimeChecks',
+ _Enumeration(['Default',
+ 'StackFrameRuntimeCheck', # /RTCs
+ 'UninitializedLocalUsageCheck', # /RTCu
+ 'EnableFastChecks'])) # /RTC1
+_Same(_compile, 'BrowseInformation',
+ _Enumeration(['false',
+ 'true', # /FR
+ 'true'])) # /Fr
+_Same(_compile, 'CallingConvention',
+ _Enumeration(['Cdecl', # /Gd
+ 'FastCall', # /Gr
+ 'StdCall', # /Gz
+ 'VectorCall'])) # /Gv
+_Same(_compile, 'CompileAs',
+ _Enumeration(['Default',
+ 'CompileAsC', # /TC
+ 'CompileAsCpp'])) # /TP
+_Same(_compile, 'DebugInformationFormat',
+ _Enumeration(['', # Disabled
+ 'OldStyle', # /Z7
+ None,
+ 'ProgramDatabase', # /Zi
+ 'EditAndContinue'])) # /ZI
+_Same(_compile, 'EnableEnhancedInstructionSet',
+ _Enumeration(['NotSet',
+ 'StreamingSIMDExtensions', # /arch:SSE
+ 'StreamingSIMDExtensions2', # /arch:SSE2
+ 'AdvancedVectorExtensions', # /arch:AVX (vs2012+)
+ 'NoExtensions', # /arch:IA32 (vs2012+)
+ # This one only exists in the new msbuild format.
+ 'AdvancedVectorExtensions2', # /arch:AVX2 (vs2013r2+)
+ ]))
+_Same(_compile, 'ErrorReporting',
+ _Enumeration(['None', # /errorReport:none
+ 'Prompt', # /errorReport:prompt
+ 'Queue'], # /errorReport:queue
+ new=['Send'])) # /errorReport:send"
+_Same(_compile, 'ExceptionHandling',
+ _Enumeration(['false',
+ 'Sync', # /EHsc
+ 'Async'], # /EHa
+ new=['SyncCThrow'])) # /EHs
+_Same(_compile, 'FavorSizeOrSpeed',
+ _Enumeration(['Neither',
+ 'Speed', # /Ot
+ 'Size'])) # /Os
+_Same(_compile, 'FloatingPointModel',
+ _Enumeration(['Precise', # /fp:precise
+ 'Strict', # /fp:strict
+ 'Fast'])) # /fp:fast
+_Same(_compile, 'InlineFunctionExpansion',
+ _Enumeration(['Default',
+ 'OnlyExplicitInline', # /Ob1
+ 'AnySuitable'], # /Ob2
+ new=['Disabled'])) # /Ob0
+_Same(_compile, 'Optimization',
+ _Enumeration(['Disabled', # /Od
+ 'MinSpace', # /O1
+ 'MaxSpeed', # /O2
+ 'Full'])) # /Ox
+_Same(_compile, 'RuntimeLibrary',
+ _Enumeration(['MultiThreaded', # /MT
+ 'MultiThreadedDebug', # /MTd
+ 'MultiThreadedDLL', # /MD
+ 'MultiThreadedDebugDLL'])) # /MDd
+_Same(_compile, 'StructMemberAlignment',
+ _Enumeration(['Default',
+ '1Byte', # /Zp1
+ '2Bytes', # /Zp2
+ '4Bytes', # /Zp4
+ '8Bytes', # /Zp8
+ '16Bytes'])) # /Zp16
+_Same(_compile, 'WarningLevel',
+ _Enumeration(['TurnOffAllWarnings', # /W0
+ 'Level1', # /W1
+ 'Level2', # /W2
+ 'Level3', # /W3
+ 'Level4'], # /W4
+ new=['EnableAllWarnings'])) # /Wall
+
+# Options found in MSVS that have been renamed in MSBuild.
+_Renamed(_compile, 'EnableFunctionLevelLinking', 'FunctionLevelLinking',
+ _boolean) # /Gy
+_Renamed(_compile, 'EnableIntrinsicFunctions', 'IntrinsicFunctions',
+ _boolean) # /Oi
+_Renamed(_compile, 'KeepComments', 'PreprocessKeepComments', _boolean) # /C
+_Renamed(_compile, 'ObjectFile', 'ObjectFileName', _file_name) # /Fo
+_Renamed(_compile, 'OpenMP', 'OpenMPSupport', _boolean) # /openmp
+_Renamed(_compile, 'PrecompiledHeaderThrough', 'PrecompiledHeaderFile',
+ _file_name) # Used with /Yc and /Yu
+_Renamed(_compile, 'PrecompiledHeaderFile', 'PrecompiledHeaderOutputFile',
+ _file_name) # /Fp
+_Renamed(_compile, 'UsePrecompiledHeader', 'PrecompiledHeader',
+ _Enumeration(['NotUsing', # VS recognized '' for this value too.
+ 'Create', # /Yc
+ 'Use'])) # /Yu
+_Renamed(_compile, 'WarnAsError', 'TreatWarningAsError', _boolean) # /WX
+
+_ConvertedToAdditionalOption(_compile, 'DefaultCharIsUnsigned', '/J')
+
+# MSVS options not found in MSBuild.
+_MSVSOnly(_compile, 'Detect64BitPortabilityProblems', _boolean)
+_MSVSOnly(_compile, 'UseUnicodeResponseFiles', _boolean)
+
+# MSBuild options not found in MSVS.
+_MSBuildOnly(_compile, 'BuildingInIDE', _boolean)
+_MSBuildOnly(_compile, 'CompileAsManaged',
+ _Enumeration([], new=['false',
+ 'true'])) # /clr
+_MSBuildOnly(_compile, 'CreateHotpatchableImage', _boolean) # /hotpatch
+_MSBuildOnly(_compile, 'MultiProcessorCompilation', _boolean) # /MP
+_MSBuildOnly(_compile, 'PreprocessOutputPath', _string) # /Fi
+_MSBuildOnly(_compile, 'ProcessorNumber', _integer) # the number of processors
+_MSBuildOnly(_compile, 'TrackerLogDirectory', _folder_name)
+_MSBuildOnly(_compile, 'TreatSpecificWarningsAsErrors', _string_list) # /we
+_MSBuildOnly(_compile, 'UseUnicodeForAssemblerListing', _boolean) # /FAu
+
+# Defines a setting that needs very customized processing
+_CustomGeneratePreprocessedFile(_compile, 'GeneratePreprocessedFile')
+
+
+# Directives for converting MSVS VCLinkerTool to MSBuild Link.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for
+# the schema of the MSBuild Link settings.
+
+# Options that have the same name in MSVS and MSBuild
+_Same(_link, 'AdditionalDependencies', _file_list)
+_Same(_link, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH
+# /MANIFESTDEPENDENCY:
+_Same(_link, 'AdditionalManifestDependencies', _file_list)
+_Same(_link, 'AdditionalOptions', _string_list)
+_Same(_link, 'AddModuleNamesToAssembly', _file_list) # /ASSEMBLYMODULE
+_Same(_link, 'AllowIsolation', _boolean) # /ALLOWISOLATION
+_Same(_link, 'AssemblyLinkResource', _file_list) # /ASSEMBLYLINKRESOURCE
+_Same(_link, 'BaseAddress', _string) # /BASE
+_Same(_link, 'CLRUnmanagedCodeCheck', _boolean) # /CLRUNMANAGEDCODECHECK
+_Same(_link, 'DelayLoadDLLs', _file_list) # /DELAYLOAD
+_Same(_link, 'DelaySign', _boolean) # /DELAYSIGN
+_Same(_link, 'EmbedManagedResourceFile', _file_list) # /ASSEMBLYRESOURCE
+_Same(_link, 'EnableUAC', _boolean) # /MANIFESTUAC
+_Same(_link, 'EntryPointSymbol', _string) # /ENTRY
+_Same(_link, 'ForceSymbolReferences', _file_list) # /INCLUDE
+_Same(_link, 'FunctionOrder', _file_name) # /ORDER
+_Same(_link, 'GenerateDebugInformation', _boolean) # /DEBUG
+_Same(_link, 'GenerateMapFile', _boolean) # /MAP
+_Same(_link, 'HeapCommitSize', _string)
+_Same(_link, 'HeapReserveSize', _string) # /HEAP
+_Same(_link, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB
+_Same(_link, 'IgnoreEmbeddedIDL', _boolean) # /IGNOREIDL
+_Same(_link, 'ImportLibrary', _file_name) # /IMPLIB
+_Same(_link, 'KeyContainer', _file_name) # /KEYCONTAINER
+_Same(_link, 'KeyFile', _file_name) # /KEYFILE
+_Same(_link, 'ManifestFile', _file_name) # /ManifestFile
+_Same(_link, 'MapExports', _boolean) # /MAPINFO:EXPORTS
+_Same(_link, 'MapFileName', _file_name)
+_Same(_link, 'MergedIDLBaseFileName', _file_name) # /IDLOUT
+_Same(_link, 'MergeSections', _string) # /MERGE
+_Same(_link, 'MidlCommandFile', _file_name) # /MIDL
+_Same(_link, 'ModuleDefinitionFile', _file_name) # /DEF
+_Same(_link, 'OutputFile', _file_name) # /OUT
+_Same(_link, 'PerUserRedirection', _boolean)
+_Same(_link, 'Profile', _boolean) # /PROFILE
+_Same(_link, 'ProfileGuidedDatabase', _file_name) # /PGD
+_Same(_link, 'ProgramDatabaseFile', _file_name) # /PDB
+_Same(_link, 'RegisterOutput', _boolean)
+_Same(_link, 'SetChecksum', _boolean) # /RELEASE
+_Same(_link, 'StackCommitSize', _string)
+_Same(_link, 'StackReserveSize', _string) # /STACK
+_Same(_link, 'StripPrivateSymbols', _file_name) # /PDBSTRIPPED
+_Same(_link, 'SupportUnloadOfDelayLoadedDLL', _boolean) # /DELAY:UNLOAD
+_Same(_link, 'SuppressStartupBanner', _boolean) # /NOLOGO
+_Same(_link, 'SwapRunFromCD', _boolean) # /SWAPRUN:CD
+_Same(_link, 'TurnOffAssemblyGeneration', _boolean) # /NOASSEMBLY
+_Same(_link, 'TypeLibraryFile', _file_name) # /TLBOUT
+_Same(_link, 'TypeLibraryResourceID', _integer) # /TLBID
+_Same(_link, 'UACUIAccess', _boolean) # /uiAccess='true'
+_Same(_link, 'Version', _string) # /VERSION
+
+_Same(_link, 'EnableCOMDATFolding', _newly_boolean) # /OPT:ICF
+_Same(_link, 'FixedBaseAddress', _newly_boolean) # /FIXED
+_Same(_link, 'LargeAddressAware', _newly_boolean) # /LARGEADDRESSAWARE
+_Same(_link, 'OptimizeReferences', _newly_boolean) # /OPT:REF
+_Same(_link, 'RandomizedBaseAddress', _newly_boolean) # /DYNAMICBASE
+_Same(_link, 'TerminalServerAware', _newly_boolean) # /TSAWARE
+
+_subsystem_enumeration = _Enumeration(
+ ['NotSet',
+ 'Console', # /SUBSYSTEM:CONSOLE
+ 'Windows', # /SUBSYSTEM:WINDOWS
+ 'Native', # /SUBSYSTEM:NATIVE
+ 'EFI Application', # /SUBSYSTEM:EFI_APPLICATION
+ 'EFI Boot Service Driver', # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER
+ 'EFI ROM', # /SUBSYSTEM:EFI_ROM
+ 'EFI Runtime', # /SUBSYSTEM:EFI_RUNTIME_DRIVER
+ 'WindowsCE'], # /SUBSYSTEM:WINDOWSCE
+ new=['POSIX']) # /SUBSYSTEM:POSIX
+
+_target_machine_enumeration = _Enumeration(
+ ['NotSet',
+ 'MachineX86', # /MACHINE:X86
+ None,
+ 'MachineARM', # /MACHINE:ARM
+ 'MachineEBC', # /MACHINE:EBC
+ 'MachineIA64', # /MACHINE:IA64
+ None,
+ 'MachineMIPS', # /MACHINE:MIPS
+ 'MachineMIPS16', # /MACHINE:MIPS16
+ 'MachineMIPSFPU', # /MACHINE:MIPSFPU
+ 'MachineMIPSFPU16', # /MACHINE:MIPSFPU16
+ None,
+ None,
+ None,
+ 'MachineSH4', # /MACHINE:SH4
+ None,
+ 'MachineTHUMB', # /MACHINE:THUMB
+ 'MachineX64']) # /MACHINE:X64
+
+_Same(_link, 'AssemblyDebug',
+ _Enumeration(['',
+ 'true', # /ASSEMBLYDEBUG
+ 'false'])) # /ASSEMBLYDEBUG:DISABLE
+_Same(_link, 'CLRImageType',
+ _Enumeration(['Default',
+ 'ForceIJWImage', # /CLRIMAGETYPE:IJW
+ 'ForcePureILImage', # /Switch="CLRIMAGETYPE:PURE
+ 'ForceSafeILImage'])) # /Switch="CLRIMAGETYPE:SAFE
+_Same(_link, 'CLRThreadAttribute',
+ _Enumeration(['DefaultThreadingAttribute', # /CLRTHREADATTRIBUTE:NONE
+ 'MTAThreadingAttribute', # /CLRTHREADATTRIBUTE:MTA
+ 'STAThreadingAttribute'])) # /CLRTHREADATTRIBUTE:STA
+_Same(_link, 'DataExecutionPrevention',
+ _Enumeration(['',
+ 'false', # /NXCOMPAT:NO
+ 'true'])) # /NXCOMPAT
+_Same(_link, 'Driver',
+ _Enumeration(['NotSet',
+ 'Driver', # /Driver
+ 'UpOnly', # /DRIVER:UPONLY
+ 'WDM'])) # /DRIVER:WDM
+_Same(_link, 'LinkTimeCodeGeneration',
+ _Enumeration(['Default',
+ 'UseLinkTimeCodeGeneration', # /LTCG
+ 'PGInstrument', # /LTCG:PGInstrument
+ 'PGOptimization', # /LTCG:PGOptimize
+ 'PGUpdate'])) # /LTCG:PGUpdate
+_Same(_link, 'ShowProgress',
+ _Enumeration(['NotSet',
+ 'LinkVerbose', # /VERBOSE
+ 'LinkVerboseLib'], # /VERBOSE:Lib
+ new=['LinkVerboseICF', # /VERBOSE:ICF
+ 'LinkVerboseREF', # /VERBOSE:REF
+ 'LinkVerboseSAFESEH', # /VERBOSE:SAFESEH
+ 'LinkVerboseCLR'])) # /VERBOSE:CLR
+_Same(_link, 'SubSystem', _subsystem_enumeration)
+_Same(_link, 'TargetMachine', _target_machine_enumeration)
+_Same(_link, 'UACExecutionLevel',
+ _Enumeration(['AsInvoker', # /level='asInvoker'
+ 'HighestAvailable', # /level='highestAvailable'
+ 'RequireAdministrator'])) # /level='requireAdministrator'
+_Same(_link, 'MinimumRequiredVersion', _string)
+_Same(_link, 'TreatLinkerWarningAsErrors', _boolean) # /WX
+
+
+# Options found in MSVS that have been renamed in MSBuild.
+_Renamed(_link, 'ErrorReporting', 'LinkErrorReporting',
+ _Enumeration(['NoErrorReport', # /ERRORREPORT:NONE
+ 'PromptImmediately', # /ERRORREPORT:PROMPT
+ 'QueueForNextLogin'], # /ERRORREPORT:QUEUE
+ new=['SendErrorReport'])) # /ERRORREPORT:SEND
+_Renamed(_link, 'IgnoreDefaultLibraryNames', 'IgnoreSpecificDefaultLibraries',
+ _file_list) # /NODEFAULTLIB
+_Renamed(_link, 'ResourceOnlyDLL', 'NoEntryPoint', _boolean) # /NOENTRY
+_Renamed(_link, 'SwapRunFromNet', 'SwapRunFromNET', _boolean) # /SWAPRUN:NET
+
+_Moved(_link, 'GenerateManifest', '', _boolean)
+_Moved(_link, 'IgnoreImportLibrary', '', _boolean)
+_Moved(_link, 'LinkIncremental', '', _newly_boolean)
+_Moved(_link, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
+_Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean)
+
+# MSVS options not found in MSBuild.
+_MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean)
+_MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean)
+
+# MSBuild options not found in MSVS.
+_MSBuildOnly(_link, 'BuildingInIDE', _boolean)
+_MSBuildOnly(_link, 'ImageHasSafeExceptionHandlers', _boolean) # /SAFESEH
+_MSBuildOnly(_link, 'LinkDLL', _boolean) # /DLL Visible='false'
+_MSBuildOnly(_link, 'LinkStatus', _boolean) # /LTCG:STATUS
+_MSBuildOnly(_link, 'PreventDllBinding', _boolean) # /ALLOWBIND
+_MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean) # /DELAY:NOBIND
+_MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name)
+_MSBuildOnly(_link, 'MSDOSStubFileName', _file_name) # /STUB Visible='false'
+_MSBuildOnly(_link, 'SectionAlignment', _integer) # /ALIGN
+_MSBuildOnly(_link, 'SpecifySectionAttributes', _string) # /SECTION
+_MSBuildOnly(_link, 'ForceFileOutput',
+ _Enumeration([], new=['Enabled', # /FORCE
+ # /FORCE:MULTIPLE
+ 'MultiplyDefinedSymbolOnly',
+ 'UndefinedSymbolOnly'])) # /FORCE:UNRESOLVED
+_MSBuildOnly(_link, 'CreateHotPatchableImage',
+ _Enumeration([], new=['Enabled', # /FUNCTIONPADMIN
+ 'X86Image', # /FUNCTIONPADMIN:5
+ 'X64Image', # /FUNCTIONPADMIN:6
+ 'ItaniumImage'])) # /FUNCTIONPADMIN:16
+_MSBuildOnly(_link, 'CLRSupportLastError',
+ _Enumeration([], new=['Enabled', # /CLRSupportLastError
+ 'Disabled', # /CLRSupportLastError:NO
+ # /CLRSupportLastError:SYSTEMDLL
+ 'SystemDlls']))
+
+
+# Directives for converting VCResourceCompilerTool to ResourceCompile.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for
+# the schema of the MSBuild ResourceCompile settings.
+
+_Same(_rc, 'AdditionalOptions', _string_list)
+_Same(_rc, 'AdditionalIncludeDirectories', _folder_list) # /I
+_Same(_rc, 'Culture', _Integer(msbuild_base=16))
+_Same(_rc, 'IgnoreStandardIncludePath', _boolean) # /X
+_Same(_rc, 'PreprocessorDefinitions', _string_list) # /D
+_Same(_rc, 'ResourceOutputFileName', _string) # /fo
+_Same(_rc, 'ShowProgress', _boolean) # /v
+# There is no UI in VisualStudio 2008 to set the following properties.
+# However they are found in CL and other tools. Include them here for
+# completeness, as they are very likely to have the same usage pattern.
+_Same(_rc, 'SuppressStartupBanner', _boolean) # /nologo
+_Same(_rc, 'UndefinePreprocessorDefinitions', _string_list) # /u
+
+# MSBuild options not found in MSVS.
+_MSBuildOnly(_rc, 'NullTerminateStrings', _boolean) # /n
+_MSBuildOnly(_rc, 'TrackerLogDirectory', _folder_name)
+
+
+# Directives for converting VCMIDLTool to Midl.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for
+# the schema of the MSBuild Midl settings.
+
+_Same(_midl, 'AdditionalIncludeDirectories', _folder_list) # /I
+_Same(_midl, 'AdditionalOptions', _string_list)
+_Same(_midl, 'CPreprocessOptions', _string) # /cpp_opt
+_Same(_midl, 'ErrorCheckAllocations', _boolean) # /error allocation
+_Same(_midl, 'ErrorCheckBounds', _boolean) # /error bounds_check
+_Same(_midl, 'ErrorCheckEnumRange', _boolean) # /error enum
+_Same(_midl, 'ErrorCheckRefPointers', _boolean) # /error ref
+_Same(_midl, 'ErrorCheckStubData', _boolean) # /error stub_data
+_Same(_midl, 'GenerateStublessProxies', _boolean) # /Oicf
+_Same(_midl, 'GenerateTypeLibrary', _boolean)
+_Same(_midl, 'HeaderFileName', _file_name) # /h
+_Same(_midl, 'IgnoreStandardIncludePath', _boolean) # /no_def_idir
+_Same(_midl, 'InterfaceIdentifierFileName', _file_name) # /iid
+_Same(_midl, 'MkTypLibCompatible', _boolean) # /mktyplib203
+_Same(_midl, 'OutputDirectory', _string) # /out
+_Same(_midl, 'PreprocessorDefinitions', _string_list) # /D
+_Same(_midl, 'ProxyFileName', _file_name) # /proxy
+_Same(_midl, 'RedirectOutputAndErrors', _file_name) # /o
+_Same(_midl, 'SuppressStartupBanner', _boolean) # /nologo
+_Same(_midl, 'TypeLibraryName', _file_name) # /tlb
+_Same(_midl, 'UndefinePreprocessorDefinitions', _string_list) # /U
+_Same(_midl, 'WarnAsError', _boolean) # /WX
+
+_Same(_midl, 'DefaultCharType',
+ _Enumeration(['Unsigned', # /char unsigned
+ 'Signed', # /char signed
+ 'Ascii'])) # /char ascii7
+_Same(_midl, 'TargetEnvironment',
+ _Enumeration(['NotSet',
+ 'Win32', # /env win32
+ 'Itanium', # /env ia64
+ 'X64'])) # /env x64
+_Same(_midl, 'EnableErrorChecks',
+ _Enumeration(['EnableCustom',
+ 'None', # /error none
+ 'All'])) # /error all
+_Same(_midl, 'StructMemberAlignment',
+ _Enumeration(['NotSet',
+ '1', # Zp1
+ '2', # Zp2
+ '4', # Zp4
+ '8'])) # Zp8
+_Same(_midl, 'WarningLevel',
+ _Enumeration(['0', # /W0
+ '1', # /W1
+ '2', # /W2
+ '3', # /W3
+ '4'])) # /W4
+
+_Renamed(_midl, 'DLLDataFileName', 'DllDataFileName', _file_name) # /dlldata
+_Renamed(_midl, 'ValidateParameters', 'ValidateAllParameters',
+ _boolean) # /robust
+
+# MSBuild options not found in MSVS.
+_MSBuildOnly(_midl, 'ApplicationConfigurationMode', _boolean) # /app_config
+_MSBuildOnly(_midl, 'ClientStubFile', _file_name) # /cstub
+_MSBuildOnly(_midl, 'GenerateClientFiles',
+ _Enumeration([], new=['Stub', # /client stub
+ 'None'])) # /client none
+_MSBuildOnly(_midl, 'GenerateServerFiles',
+ _Enumeration([], new=['Stub', # /client stub
+ 'None'])) # /client none
+_MSBuildOnly(_midl, 'LocaleID', _integer) # /lcid DECIMAL
+_MSBuildOnly(_midl, 'ServerStubFile', _file_name) # /sstub
+_MSBuildOnly(_midl, 'SuppressCompilerWarnings', _boolean) # /no_warn
+_MSBuildOnly(_midl, 'TrackerLogDirectory', _folder_name)
+_MSBuildOnly(_midl, 'TypeLibFormat',
+ _Enumeration([], new=['NewFormat', # /newtlb
+ 'OldFormat'])) # /oldtlb
+
+
+# Directives for converting VCLibrarianTool to Lib.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for
+# the schema of the MSBuild Lib settings.
+
+_Same(_lib, 'AdditionalDependencies', _file_list)
+_Same(_lib, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH
+_Same(_lib, 'AdditionalOptions', _string_list)
+_Same(_lib, 'ExportNamedFunctions', _string_list) # /EXPORT
+_Same(_lib, 'ForceSymbolReferences', _string) # /INCLUDE
+_Same(_lib, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB
+_Same(_lib, 'IgnoreSpecificDefaultLibraries', _file_list) # /NODEFAULTLIB
+_Same(_lib, 'ModuleDefinitionFile', _file_name) # /DEF
+_Same(_lib, 'OutputFile', _file_name) # /OUT
+_Same(_lib, 'SuppressStartupBanner', _boolean) # /NOLOGO
+_Same(_lib, 'UseUnicodeResponseFiles', _boolean)
+_Same(_lib, 'LinkTimeCodeGeneration', _boolean) # /LTCG
+_Same(_lib, 'TargetMachine', _target_machine_enumeration)
+
+# TODO(jeanluc) _link defines the same value that gets moved to
+# ProjectReference. We may want to validate that they are consistent.
+_Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
+
+_MSBuildOnly(_lib, 'DisplayLibrary', _string) # /LIST Visible='false'
+_MSBuildOnly(_lib, 'ErrorReporting',
+ _Enumeration([], new=['PromptImmediately', # /ERRORREPORT:PROMPT
+ 'QueueForNextLogin', # /ERRORREPORT:QUEUE
+ 'SendErrorReport', # /ERRORREPORT:SEND
+ 'NoErrorReport'])) # /ERRORREPORT:NONE
+_MSBuildOnly(_lib, 'MinimumRequiredVersion', _string)
+_MSBuildOnly(_lib, 'Name', _file_name) # /NAME
+_MSBuildOnly(_lib, 'RemoveObjects', _file_list) # /REMOVE
+_MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration)
+_MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name)
+_MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean) # /WX
+_MSBuildOnly(_lib, 'Verbose', _boolean)
+
+
+# Directives for converting VCManifestTool to Mt.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for
+# the schema of the MSBuild Lib settings.
+
+# Options that have the same name in MSVS and MSBuild
+_Same(_manifest, 'AdditionalManifestFiles', _file_list) # /manifest
+_Same(_manifest, 'AdditionalOptions', _string_list)
+_Same(_manifest, 'AssemblyIdentity', _string) # /identity:
+_Same(_manifest, 'ComponentFileName', _file_name) # /dll
+_Same(_manifest, 'GenerateCatalogFiles', _boolean) # /makecdfs
+_Same(_manifest, 'InputResourceManifests', _string) # /inputresource
+_Same(_manifest, 'OutputManifestFile', _file_name) # /out
+_Same(_manifest, 'RegistrarScriptFile', _file_name) # /rgs
+_Same(_manifest, 'ReplacementsFile', _file_name) # /replacements
+_Same(_manifest, 'SuppressStartupBanner', _boolean) # /nologo
+_Same(_manifest, 'TypeLibraryFile', _file_name) # /tlb:
+_Same(_manifest, 'UpdateFileHashes', _boolean) # /hashupdate
+_Same(_manifest, 'UpdateFileHashesSearchPath', _file_name)
+_Same(_manifest, 'VerboseOutput', _boolean) # /verbose
+
+# Options that have moved location.
+_MovedAndRenamed(_manifest, 'ManifestResourceFile',
+ 'ManifestResourceCompile',
+ 'ResourceOutputFileName',
+ _file_name)
+_Moved(_manifest, 'EmbedManifest', '', _boolean)
+
+# MSVS options not found in MSBuild.
+_MSVSOnly(_manifest, 'DependencyInformationFile', _file_name)
+_MSVSOnly(_manifest, 'UseFAT32Workaround', _boolean)
+_MSVSOnly(_manifest, 'UseUnicodeResponseFiles', _boolean)
+
+# MSBuild options not found in MSVS.
+_MSBuildOnly(_manifest, 'EnableDPIAwareness', _boolean)
+_MSBuildOnly(_manifest, 'GenerateCategoryTags', _boolean) # /category
+_MSBuildOnly(_manifest, 'ManifestFromManagedAssembly',
+ _file_name) # /managedassemblyname
+_MSBuildOnly(_manifest, 'OutputResourceManifests', _string) # /outputresource
+_MSBuildOnly(_manifest, 'SuppressDependencyElement', _boolean) # /nodependency
+_MSBuildOnly(_manifest, 'TrackerLogDirectory', _folder_name)
+
+
+# Directives for MASM.
+# See "$(VCTargetsPath)\BuildCustomizations\masm.xml" for the schema of the
+# MSBuild MASM settings.
+
+# Options that have the same name in MSVS and MSBuild.
+_Same(_masm, 'UseSafeExceptionHandlers', _boolean) # /safeseh
diff --git a/third_party/python/gyp/pylib/gyp/MSVSSettings_test.py b/third_party/python/gyp/pylib/gyp/MSVSSettings_test.py
new file mode 100755
index 0000000000..73ed25e27d
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/MSVSSettings_test.py
@@ -0,0 +1,1486 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the MSVSSettings.py file."""
+
+try:
+ from StringIO import StringIO
+except ImportError:
+ from io import StringIO
+import unittest
+import gyp.MSVSSettings as MSVSSettings
+
+
+class TestSequenceFunctions(unittest.TestCase):
+
+ def setUp(self):
+ self.stderr = StringIO()
+
+ def _ExpectedWarnings(self, expected):
+ """Compares recorded lines to expected warnings."""
+ self.stderr.seek(0)
+ actual = self.stderr.read().split('\n')
+ actual = [line for line in actual if line]
+ self.assertEqual(sorted(expected), sorted(actual))
+
+ def testValidateMSVSSettings_tool_names(self):
+ """Tests that only MSVS tool names are allowed."""
+ MSVSSettings.ValidateMSVSSettings(
+ {'VCCLCompilerTool': {},
+ 'VCLinkerTool': {},
+ 'VCMIDLTool': {},
+ 'foo': {},
+ 'VCResourceCompilerTool': {},
+ 'VCLibrarianTool': {},
+ 'VCManifestTool': {},
+ 'ClCompile': {}},
+ self.stderr)
+ self._ExpectedWarnings([
+ 'Warning: unrecognized tool foo',
+ 'Warning: unrecognized tool ClCompile'])
+
+ def testValidateMSVSSettings_settings(self):
+ """Tests that for invalid MSVS settings."""
+ MSVSSettings.ValidateMSVSSettings(
+ {'VCCLCompilerTool': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2',
+ 'AdditionalOptions': ['string1', 'string2'],
+ 'AdditionalUsingDirectories': 'folder1;folder2',
+ 'AssemblerListingLocation': 'a_file_name',
+ 'AssemblerOutput': '0',
+ 'BasicRuntimeChecks': '5',
+ 'BrowseInformation': 'fdkslj',
+ 'BrowseInformationFile': 'a_file_name',
+ 'BufferSecurityCheck': 'true',
+ 'CallingConvention': '-1',
+ 'CompileAs': '1',
+ 'DebugInformationFormat': '2',
+ 'DefaultCharIsUnsigned': 'true',
+ 'Detect64BitPortabilityProblems': 'true',
+ 'DisableLanguageExtensions': 'true',
+ 'DisableSpecificWarnings': 'string1;string2',
+ 'EnableEnhancedInstructionSet': '1',
+ 'EnableFiberSafeOptimizations': 'true',
+ 'EnableFunctionLevelLinking': 'true',
+ 'EnableIntrinsicFunctions': 'true',
+ 'EnablePREfast': 'true',
+ 'Enableprefast': 'bogus',
+ 'ErrorReporting': '1',
+ 'ExceptionHandling': '1',
+ 'ExpandAttributedSource': 'true',
+ 'FavorSizeOrSpeed': '1',
+ 'FloatingPointExceptions': 'true',
+ 'FloatingPointModel': '1',
+ 'ForceConformanceInForLoopScope': 'true',
+ 'ForcedIncludeFiles': 'file1;file2',
+ 'ForcedUsingFiles': 'file1;file2',
+ 'GeneratePreprocessedFile': '1',
+ 'GenerateXMLDocumentationFiles': 'true',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InlineFunctionExpansion': '1',
+ 'KeepComments': 'true',
+ 'MinimalRebuild': 'true',
+ 'ObjectFile': 'a_file_name',
+ 'OmitDefaultLibName': 'true',
+ 'OmitFramePointers': 'true',
+ 'OpenMP': 'true',
+ 'Optimization': '1',
+ 'PrecompiledHeaderFile': 'a_file_name',
+ 'PrecompiledHeaderThrough': 'a_file_name',
+ 'PreprocessorDefinitions': 'string1;string2',
+ 'ProgramDataBaseFileName': 'a_file_name',
+ 'RuntimeLibrary': '1',
+ 'RuntimeTypeInfo': 'true',
+ 'ShowIncludes': 'true',
+ 'SmallerTypeCheck': 'true',
+ 'StringPooling': 'true',
+ 'StructMemberAlignment': '1',
+ 'SuppressStartupBanner': 'true',
+ 'TreatWChar_tAsBuiltInType': 'true',
+ 'UndefineAllPreprocessorDefinitions': 'true',
+ 'UndefinePreprocessorDefinitions': 'string1;string2',
+ 'UseFullPaths': 'true',
+ 'UsePrecompiledHeader': '1',
+ 'UseUnicodeResponseFiles': 'true',
+ 'WarnAsError': 'true',
+ 'WarningLevel': '1',
+ 'WholeProgramOptimization': 'true',
+ 'XMLDocumentationFileName': 'a_file_name',
+ 'ZZXYZ': 'bogus'},
+ 'VCLinkerTool': {
+ 'AdditionalDependencies': 'file1;file2',
+ 'AdditionalDependencies_excluded': 'file3',
+ 'AdditionalLibraryDirectories': 'folder1;folder2',
+ 'AdditionalManifestDependencies': 'file1;file2',
+ 'AdditionalOptions': 'a string1',
+ 'AddModuleNamesToAssembly': 'file1;file2',
+ 'AllowIsolation': 'true',
+ 'AssemblyDebug': '2',
+ 'AssemblyLinkResource': 'file1;file2',
+ 'BaseAddress': 'a string1',
+ 'CLRImageType': '2',
+ 'CLRThreadAttribute': '2',
+ 'CLRUnmanagedCodeCheck': 'true',
+ 'DataExecutionPrevention': '2',
+ 'DelayLoadDLLs': 'file1;file2',
+ 'DelaySign': 'true',
+ 'Driver': '2',
+ 'EmbedManagedResourceFile': 'file1;file2',
+ 'EnableCOMDATFolding': '2',
+ 'EnableUAC': 'true',
+ 'EntryPointSymbol': 'a string1',
+ 'ErrorReporting': '2',
+ 'FixedBaseAddress': '2',
+ 'ForceSymbolReferences': 'file1;file2',
+ 'FunctionOrder': 'a_file_name',
+ 'GenerateDebugInformation': 'true',
+ 'GenerateManifest': 'true',
+ 'GenerateMapFile': 'true',
+ 'HeapCommitSize': 'a string1',
+ 'HeapReserveSize': 'a string1',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreDefaultLibraryNames': 'file1;file2',
+ 'IgnoreEmbeddedIDL': 'true',
+ 'IgnoreImportLibrary': 'true',
+ 'ImportLibrary': 'a_file_name',
+ 'KeyContainer': 'a_file_name',
+ 'KeyFile': 'a_file_name',
+ 'LargeAddressAware': '2',
+ 'LinkIncremental': '2',
+ 'LinkLibraryDependencies': 'true',
+ 'LinkTimeCodeGeneration': '2',
+ 'ManifestFile': 'a_file_name',
+ 'MapExports': 'true',
+ 'MapFileName': 'a_file_name',
+ 'MergedIDLBaseFileName': 'a_file_name',
+ 'MergeSections': 'a string1',
+ 'MidlCommandFile': 'a_file_name',
+ 'ModuleDefinitionFile': 'a_file_name',
+ 'OptimizeForWindows98': '1',
+ 'OptimizeReferences': '2',
+ 'OutputFile': 'a_file_name',
+ 'PerUserRedirection': 'true',
+ 'Profile': 'true',
+ 'ProfileGuidedDatabase': 'a_file_name',
+ 'ProgramDatabaseFile': 'a_file_name',
+ 'RandomizedBaseAddress': '2',
+ 'RegisterOutput': 'true',
+ 'ResourceOnlyDLL': 'true',
+ 'SetChecksum': 'true',
+ 'ShowProgress': '2',
+ 'StackCommitSize': 'a string1',
+ 'StackReserveSize': 'a string1',
+ 'StripPrivateSymbols': 'a_file_name',
+ 'SubSystem': '2',
+ 'SupportUnloadOfDelayLoadedDLL': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'SwapRunFromCD': 'true',
+ 'SwapRunFromNet': 'true',
+ 'TargetMachine': '2',
+ 'TerminalServerAware': '2',
+ 'TurnOffAssemblyGeneration': 'true',
+ 'TypeLibraryFile': 'a_file_name',
+ 'TypeLibraryResourceID': '33',
+ 'UACExecutionLevel': '2',
+ 'UACUIAccess': 'true',
+ 'UseLibraryDependencyInputs': 'true',
+ 'UseUnicodeResponseFiles': 'true',
+ 'Version': 'a string1'},
+ 'VCMIDLTool': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2',
+ 'AdditionalOptions': 'a string1',
+ 'CPreprocessOptions': 'a string1',
+ 'DefaultCharType': '1',
+ 'DLLDataFileName': 'a_file_name',
+ 'EnableErrorChecks': '1',
+ 'ErrorCheckAllocations': 'true',
+ 'ErrorCheckBounds': 'true',
+ 'ErrorCheckEnumRange': 'true',
+ 'ErrorCheckRefPointers': 'true',
+ 'ErrorCheckStubData': 'true',
+ 'GenerateStublessProxies': 'true',
+ 'GenerateTypeLibrary': 'true',
+ 'HeaderFileName': 'a_file_name',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InterfaceIdentifierFileName': 'a_file_name',
+ 'MkTypLibCompatible': 'true',
+ 'notgood': 'bogus',
+ 'OutputDirectory': 'a string1',
+ 'PreprocessorDefinitions': 'string1;string2',
+ 'ProxyFileName': 'a_file_name',
+ 'RedirectOutputAndErrors': 'a_file_name',
+ 'StructMemberAlignment': '1',
+ 'SuppressStartupBanner': 'true',
+ 'TargetEnvironment': '1',
+ 'TypeLibraryName': 'a_file_name',
+ 'UndefinePreprocessorDefinitions': 'string1;string2',
+ 'ValidateParameters': 'true',
+ 'WarnAsError': 'true',
+ 'WarningLevel': '1'},
+ 'VCResourceCompilerTool': {
+ 'AdditionalOptions': 'a string1',
+ 'AdditionalIncludeDirectories': 'folder1;folder2',
+ 'Culture': '1003',
+ 'IgnoreStandardIncludePath': 'true',
+ 'notgood2': 'bogus',
+ 'PreprocessorDefinitions': 'string1;string2',
+ 'ResourceOutputFileName': 'a string1',
+ 'ShowProgress': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'UndefinePreprocessorDefinitions': 'string1;string2'},
+ 'VCLibrarianTool': {
+ 'AdditionalDependencies': 'file1;file2',
+ 'AdditionalLibraryDirectories': 'folder1;folder2',
+ 'AdditionalOptions': 'a string1',
+ 'ExportNamedFunctions': 'string1;string2',
+ 'ForceSymbolReferences': 'a string1',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreSpecificDefaultLibraries': 'file1;file2',
+ 'LinkLibraryDependencies': 'true',
+ 'ModuleDefinitionFile': 'a_file_name',
+ 'OutputFile': 'a_file_name',
+ 'SuppressStartupBanner': 'true',
+ 'UseUnicodeResponseFiles': 'true'},
+ 'VCManifestTool': {
+ 'AdditionalManifestFiles': 'file1;file2',
+ 'AdditionalOptions': 'a string1',
+ 'AssemblyIdentity': 'a string1',
+ 'ComponentFileName': 'a_file_name',
+ 'DependencyInformationFile': 'a_file_name',
+ 'GenerateCatalogFiles': 'true',
+ 'InputResourceManifests': 'a string1',
+ 'ManifestResourceFile': 'a_file_name',
+ 'OutputManifestFile': 'a_file_name',
+ 'RegistrarScriptFile': 'a_file_name',
+ 'ReplacementsFile': 'a_file_name',
+ 'SuppressStartupBanner': 'true',
+ 'TypeLibraryFile': 'a_file_name',
+ 'UpdateFileHashes': 'truel',
+ 'UpdateFileHashesSearchPath': 'a_file_name',
+ 'UseFAT32Workaround': 'true',
+ 'UseUnicodeResponseFiles': 'true',
+ 'VerboseOutput': 'true'}},
+ self.stderr)
+ self._ExpectedWarnings([
+ 'Warning: for VCCLCompilerTool/BasicRuntimeChecks, '
+ 'index value (5) not in expected range [0, 4)',
+ 'Warning: for VCCLCompilerTool/BrowseInformation, '
+ "invalid literal for int() with base 10: 'fdkslj'",
+ 'Warning: for VCCLCompilerTool/CallingConvention, '
+ 'index value (-1) not in expected range [0, 4)',
+ 'Warning: for VCCLCompilerTool/DebugInformationFormat, '
+ 'converted value for 2 not specified.',
+ 'Warning: unrecognized setting VCCLCompilerTool/Enableprefast',
+ 'Warning: unrecognized setting VCCLCompilerTool/ZZXYZ',
+ 'Warning: for VCLinkerTool/TargetMachine, '
+ 'converted value for 2 not specified.',
+ 'Warning: unrecognized setting VCMIDLTool/notgood',
+ 'Warning: unrecognized setting VCResourceCompilerTool/notgood2',
+ 'Warning: for VCManifestTool/UpdateFileHashes, '
+ "expected bool; got 'truel'"
+ ''])
+
+ def testValidateMSBuildSettings_settings(self):
+ """Tests that for invalid MSBuild settings."""
+ MSVSSettings.ValidateMSBuildSettings(
+ {'ClCompile': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2',
+ 'AdditionalOptions': ['string1', 'string2'],
+ 'AdditionalUsingDirectories': 'folder1;folder2',
+ 'AssemblerListingLocation': 'a_file_name',
+ 'AssemblerOutput': 'NoListing',
+ 'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
+ 'BrowseInformation': 'false',
+ 'BrowseInformationFile': 'a_file_name',
+ 'BufferSecurityCheck': 'true',
+ 'BuildingInIDE': 'true',
+ 'CallingConvention': 'Cdecl',
+ 'CompileAs': 'CompileAsC',
+ 'CompileAsManaged': 'true',
+ 'CreateHotpatchableImage': 'true',
+ 'DebugInformationFormat': 'ProgramDatabase',
+ 'DisableLanguageExtensions': 'true',
+ 'DisableSpecificWarnings': 'string1;string2',
+ 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
+ 'EnableFiberSafeOptimizations': 'true',
+ 'EnablePREfast': 'true',
+ 'Enableprefast': 'bogus',
+ 'ErrorReporting': 'Prompt',
+ 'ExceptionHandling': 'SyncCThrow',
+ 'ExpandAttributedSource': 'true',
+ 'FavorSizeOrSpeed': 'Neither',
+ 'FloatingPointExceptions': 'true',
+ 'FloatingPointModel': 'Precise',
+ 'ForceConformanceInForLoopScope': 'true',
+ 'ForcedIncludeFiles': 'file1;file2',
+ 'ForcedUsingFiles': 'file1;file2',
+ 'FunctionLevelLinking': 'false',
+ 'GenerateXMLDocumentationFiles': 'true',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InlineFunctionExpansion': 'OnlyExplicitInline',
+ 'IntrinsicFunctions': 'false',
+ 'MinimalRebuild': 'true',
+ 'MultiProcessorCompilation': 'true',
+ 'ObjectFileName': 'a_file_name',
+ 'OmitDefaultLibName': 'true',
+ 'OmitFramePointers': 'true',
+ 'OpenMPSupport': 'true',
+ 'Optimization': 'Disabled',
+ 'PrecompiledHeader': 'NotUsing',
+ 'PrecompiledHeaderFile': 'a_file_name',
+ 'PrecompiledHeaderOutputFile': 'a_file_name',
+ 'PreprocessKeepComments': 'true',
+ 'PreprocessorDefinitions': 'string1;string2',
+ 'PreprocessOutputPath': 'a string1',
+ 'PreprocessSuppressLineNumbers': 'false',
+ 'PreprocessToFile': 'false',
+ 'ProcessorNumber': '33',
+ 'ProgramDataBaseFileName': 'a_file_name',
+ 'RuntimeLibrary': 'MultiThreaded',
+ 'RuntimeTypeInfo': 'true',
+ 'ShowIncludes': 'true',
+ 'SmallerTypeCheck': 'true',
+ 'StringPooling': 'true',
+ 'StructMemberAlignment': '1Byte',
+ 'SuppressStartupBanner': 'true',
+ 'TrackerLogDirectory': 'a_folder',
+ 'TreatSpecificWarningsAsErrors': 'string1;string2',
+ 'TreatWarningAsError': 'true',
+ 'TreatWChar_tAsBuiltInType': 'true',
+ 'UndefineAllPreprocessorDefinitions': 'true',
+ 'UndefinePreprocessorDefinitions': 'string1;string2',
+ 'UseFullPaths': 'true',
+ 'UseUnicodeForAssemblerListing': 'true',
+ 'WarningLevel': 'TurnOffAllWarnings',
+ 'WholeProgramOptimization': 'true',
+ 'XMLDocumentationFileName': 'a_file_name',
+ 'ZZXYZ': 'bogus'},
+ 'Link': {
+ 'AdditionalDependencies': 'file1;file2',
+ 'AdditionalLibraryDirectories': 'folder1;folder2',
+ 'AdditionalManifestDependencies': 'file1;file2',
+ 'AdditionalOptions': 'a string1',
+ 'AddModuleNamesToAssembly': 'file1;file2',
+ 'AllowIsolation': 'true',
+ 'AssemblyDebug': '',
+ 'AssemblyLinkResource': 'file1;file2',
+ 'BaseAddress': 'a string1',
+ 'BuildingInIDE': 'true',
+ 'CLRImageType': 'ForceIJWImage',
+ 'CLRSupportLastError': 'Enabled',
+ 'CLRThreadAttribute': 'MTAThreadingAttribute',
+ 'CLRUnmanagedCodeCheck': 'true',
+ 'CreateHotPatchableImage': 'X86Image',
+ 'DataExecutionPrevention': 'false',
+ 'DelayLoadDLLs': 'file1;file2',
+ 'DelaySign': 'true',
+ 'Driver': 'NotSet',
+ 'EmbedManagedResourceFile': 'file1;file2',
+ 'EnableCOMDATFolding': 'false',
+ 'EnableUAC': 'true',
+ 'EntryPointSymbol': 'a string1',
+ 'FixedBaseAddress': 'false',
+ 'ForceFileOutput': 'Enabled',
+ 'ForceSymbolReferences': 'file1;file2',
+ 'FunctionOrder': 'a_file_name',
+ 'GenerateDebugInformation': 'true',
+ 'GenerateMapFile': 'true',
+ 'HeapCommitSize': 'a string1',
+ 'HeapReserveSize': 'a string1',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreEmbeddedIDL': 'true',
+ 'IgnoreSpecificDefaultLibraries': 'a_file_list',
+ 'ImageHasSafeExceptionHandlers': 'true',
+ 'ImportLibrary': 'a_file_name',
+ 'KeyContainer': 'a_file_name',
+ 'KeyFile': 'a_file_name',
+ 'LargeAddressAware': 'false',
+ 'LinkDLL': 'true',
+ 'LinkErrorReporting': 'SendErrorReport',
+ 'LinkStatus': 'true',
+ 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
+ 'ManifestFile': 'a_file_name',
+ 'MapExports': 'true',
+ 'MapFileName': 'a_file_name',
+ 'MergedIDLBaseFileName': 'a_file_name',
+ 'MergeSections': 'a string1',
+ 'MidlCommandFile': 'a_file_name',
+ 'MinimumRequiredVersion': 'a string1',
+ 'ModuleDefinitionFile': 'a_file_name',
+ 'MSDOSStubFileName': 'a_file_name',
+ 'NoEntryPoint': 'true',
+ 'OptimizeReferences': 'false',
+ 'OutputFile': 'a_file_name',
+ 'PerUserRedirection': 'true',
+ 'PreventDllBinding': 'true',
+ 'Profile': 'true',
+ 'ProfileGuidedDatabase': 'a_file_name',
+ 'ProgramDatabaseFile': 'a_file_name',
+ 'RandomizedBaseAddress': 'false',
+ 'RegisterOutput': 'true',
+ 'SectionAlignment': '33',
+ 'SetChecksum': 'true',
+ 'ShowProgress': 'LinkVerboseREF',
+ 'SpecifySectionAttributes': 'a string1',
+ 'StackCommitSize': 'a string1',
+ 'StackReserveSize': 'a string1',
+ 'StripPrivateSymbols': 'a_file_name',
+ 'SubSystem': 'Console',
+ 'SupportNobindOfDelayLoadedDLL': 'true',
+ 'SupportUnloadOfDelayLoadedDLL': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'SwapRunFromCD': 'true',
+ 'SwapRunFromNET': 'true',
+ 'TargetMachine': 'MachineX86',
+ 'TerminalServerAware': 'false',
+ 'TrackerLogDirectory': 'a_folder',
+ 'TreatLinkerWarningAsErrors': 'true',
+ 'TurnOffAssemblyGeneration': 'true',
+ 'TypeLibraryFile': 'a_file_name',
+ 'TypeLibraryResourceID': '33',
+ 'UACExecutionLevel': 'AsInvoker',
+ 'UACUIAccess': 'true',
+ 'Version': 'a string1'},
+ 'ResourceCompile': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2',
+ 'AdditionalOptions': 'a string1',
+ 'Culture': '0x236',
+ 'IgnoreStandardIncludePath': 'true',
+ 'NullTerminateStrings': 'true',
+ 'PreprocessorDefinitions': 'string1;string2',
+ 'ResourceOutputFileName': 'a string1',
+ 'ShowProgress': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'TrackerLogDirectory': 'a_folder',
+ 'UndefinePreprocessorDefinitions': 'string1;string2'},
+ 'Midl': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2',
+ 'AdditionalOptions': 'a string1',
+ 'ApplicationConfigurationMode': 'true',
+ 'ClientStubFile': 'a_file_name',
+ 'CPreprocessOptions': 'a string1',
+ 'DefaultCharType': 'Signed',
+ 'DllDataFileName': 'a_file_name',
+ 'EnableErrorChecks': 'EnableCustom',
+ 'ErrorCheckAllocations': 'true',
+ 'ErrorCheckBounds': 'true',
+ 'ErrorCheckEnumRange': 'true',
+ 'ErrorCheckRefPointers': 'true',
+ 'ErrorCheckStubData': 'true',
+ 'GenerateClientFiles': 'Stub',
+ 'GenerateServerFiles': 'None',
+ 'GenerateStublessProxies': 'true',
+ 'GenerateTypeLibrary': 'true',
+ 'HeaderFileName': 'a_file_name',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InterfaceIdentifierFileName': 'a_file_name',
+ 'LocaleID': '33',
+ 'MkTypLibCompatible': 'true',
+ 'OutputDirectory': 'a string1',
+ 'PreprocessorDefinitions': 'string1;string2',
+ 'ProxyFileName': 'a_file_name',
+ 'RedirectOutputAndErrors': 'a_file_name',
+ 'ServerStubFile': 'a_file_name',
+ 'StructMemberAlignment': 'NotSet',
+ 'SuppressCompilerWarnings': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'TargetEnvironment': 'Itanium',
+ 'TrackerLogDirectory': 'a_folder',
+ 'TypeLibFormat': 'NewFormat',
+ 'TypeLibraryName': 'a_file_name',
+ 'UndefinePreprocessorDefinitions': 'string1;string2',
+ 'ValidateAllParameters': 'true',
+ 'WarnAsError': 'true',
+ 'WarningLevel': '1'},
+ 'Lib': {
+ 'AdditionalDependencies': 'file1;file2',
+ 'AdditionalLibraryDirectories': 'folder1;folder2',
+ 'AdditionalOptions': 'a string1',
+ 'DisplayLibrary': 'a string1',
+ 'ErrorReporting': 'PromptImmediately',
+ 'ExportNamedFunctions': 'string1;string2',
+ 'ForceSymbolReferences': 'a string1',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreSpecificDefaultLibraries': 'file1;file2',
+ 'LinkTimeCodeGeneration': 'true',
+ 'MinimumRequiredVersion': 'a string1',
+ 'ModuleDefinitionFile': 'a_file_name',
+ 'Name': 'a_file_name',
+ 'OutputFile': 'a_file_name',
+ 'RemoveObjects': 'file1;file2',
+ 'SubSystem': 'Console',
+ 'SuppressStartupBanner': 'true',
+ 'TargetMachine': 'MachineX86i',
+ 'TrackerLogDirectory': 'a_folder',
+ 'TreatLibWarningAsErrors': 'true',
+ 'UseUnicodeResponseFiles': 'true',
+ 'Verbose': 'true'},
+ 'Manifest': {
+ 'AdditionalManifestFiles': 'file1;file2',
+ 'AdditionalOptions': 'a string1',
+ 'AssemblyIdentity': 'a string1',
+ 'ComponentFileName': 'a_file_name',
+ 'EnableDPIAwareness': 'fal',
+ 'GenerateCatalogFiles': 'truel',
+ 'GenerateCategoryTags': 'true',
+ 'InputResourceManifests': 'a string1',
+ 'ManifestFromManagedAssembly': 'a_file_name',
+ 'notgood3': 'bogus',
+ 'OutputManifestFile': 'a_file_name',
+ 'OutputResourceManifests': 'a string1',
+ 'RegistrarScriptFile': 'a_file_name',
+ 'ReplacementsFile': 'a_file_name',
+ 'SuppressDependencyElement': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'TrackerLogDirectory': 'a_folder',
+ 'TypeLibraryFile': 'a_file_name',
+ 'UpdateFileHashes': 'true',
+ 'UpdateFileHashesSearchPath': 'a_file_name',
+ 'VerboseOutput': 'true'},
+ 'ProjectReference': {
+ 'LinkLibraryDependencies': 'true',
+ 'UseLibraryDependencyInputs': 'true'},
+ 'ManifestResourceCompile': {
+ 'ResourceOutputFileName': 'a_file_name'},
+ '': {
+ 'EmbedManifest': 'true',
+ 'GenerateManifest': 'true',
+ 'IgnoreImportLibrary': 'true',
+ 'LinkIncremental': 'false'}},
+ self.stderr)
+ self._ExpectedWarnings([
+ 'Warning: unrecognized setting ClCompile/Enableprefast',
+ 'Warning: unrecognized setting ClCompile/ZZXYZ',
+ 'Warning: unrecognized setting Manifest/notgood3',
+ 'Warning: for Manifest/GenerateCatalogFiles, '
+ "expected bool; got 'truel'",
+ 'Warning: for Lib/TargetMachine, unrecognized enumerated value '
+ 'MachineX86i',
+ "Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'"])
+
+ def testConvertToMSBuildSettings_empty(self):
+ """Tests an empty conversion."""
+ msvs_settings = {}
+ expected_msbuild_settings = {}
+ actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
+ msvs_settings,
+ self.stderr)
+ self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
+ self._ExpectedWarnings([])
+
+ def testConvertToMSBuildSettings_minimal(self):
+ """Tests a minimal conversion."""
+ msvs_settings = {
+ 'VCCLCompilerTool': {
+ 'AdditionalIncludeDirectories': 'dir1',
+ 'AdditionalOptions': '/foo',
+ 'BasicRuntimeChecks': '0',
+ },
+ 'VCLinkerTool': {
+ 'LinkTimeCodeGeneration': '1',
+ 'ErrorReporting': '1',
+ 'DataExecutionPrevention': '2',
+ },
+ }
+ expected_msbuild_settings = {
+ 'ClCompile': {
+ 'AdditionalIncludeDirectories': 'dir1',
+ 'AdditionalOptions': '/foo',
+ 'BasicRuntimeChecks': 'Default',
+ },
+ 'Link': {
+ 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
+ 'LinkErrorReporting': 'PromptImmediately',
+ 'DataExecutionPrevention': 'true',
+ },
+ }
+ actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
+ msvs_settings,
+ self.stderr)
+ self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
+ self._ExpectedWarnings([])
+
+ def testConvertToMSBuildSettings_warnings(self):
+ """Tests conversion that generates warnings."""
+ msvs_settings = {
+ 'VCCLCompilerTool': {
+ 'AdditionalIncludeDirectories': '1',
+ 'AdditionalOptions': '2',
+ # These are incorrect values:
+ 'BasicRuntimeChecks': '12',
+ 'BrowseInformation': '21',
+ 'UsePrecompiledHeader': '13',
+ 'GeneratePreprocessedFile': '14'},
+ 'VCLinkerTool': {
+ # These are incorrect values:
+ 'Driver': '10',
+ 'LinkTimeCodeGeneration': '31',
+ 'ErrorReporting': '21',
+ 'FixedBaseAddress': '6'},
+ 'VCResourceCompilerTool': {
+ # Custom
+ 'Culture': '1003'}}
+ expected_msbuild_settings = {
+ 'ClCompile': {
+ 'AdditionalIncludeDirectories': '1',
+ 'AdditionalOptions': '2'},
+ 'Link': {},
+ 'ResourceCompile': {
+ # Custom
+ 'Culture': '0x03eb'}}
+ actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
+ msvs_settings,
+ self.stderr)
+ self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
+ self._ExpectedWarnings([
+ 'Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to '
+ 'MSBuild, index value (12) not in expected range [0, 4)',
+ 'Warning: while converting VCCLCompilerTool/BrowseInformation to '
+ 'MSBuild, index value (21) not in expected range [0, 3)',
+ 'Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to '
+ 'MSBuild, index value (13) not in expected range [0, 3)',
+ 'Warning: while converting VCCLCompilerTool/GeneratePreprocessedFile to '
+ 'MSBuild, value must be one of [0, 1, 2]; got 14',
+
+ 'Warning: while converting VCLinkerTool/Driver to '
+ 'MSBuild, index value (10) not in expected range [0, 4)',
+ 'Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to '
+ 'MSBuild, index value (31) not in expected range [0, 5)',
+ 'Warning: while converting VCLinkerTool/ErrorReporting to '
+ 'MSBuild, index value (21) not in expected range [0, 3)',
+ 'Warning: while converting VCLinkerTool/FixedBaseAddress to '
+ 'MSBuild, index value (6) not in expected range [0, 3)',
+ ])
+
+ def testConvertToMSBuildSettings_full_synthetic(self):
+ """Tests conversion of all the MSBuild settings."""
+ msvs_settings = {
+ 'VCCLCompilerTool': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
+ 'AdditionalOptions': 'a_string',
+ 'AdditionalUsingDirectories': 'folder1;folder2;folder3',
+ 'AssemblerListingLocation': 'a_file_name',
+ 'AssemblerOutput': '0',
+ 'BasicRuntimeChecks': '1',
+ 'BrowseInformation': '2',
+ 'BrowseInformationFile': 'a_file_name',
+ 'BufferSecurityCheck': 'true',
+ 'CallingConvention': '0',
+ 'CompileAs': '1',
+ 'DebugInformationFormat': '4',
+ 'DefaultCharIsUnsigned': 'true',
+ 'Detect64BitPortabilityProblems': 'true',
+ 'DisableLanguageExtensions': 'true',
+ 'DisableSpecificWarnings': 'd1;d2;d3',
+ 'EnableEnhancedInstructionSet': '0',
+ 'EnableFiberSafeOptimizations': 'true',
+ 'EnableFunctionLevelLinking': 'true',
+ 'EnableIntrinsicFunctions': 'true',
+ 'EnablePREfast': 'true',
+ 'ErrorReporting': '1',
+ 'ExceptionHandling': '2',
+ 'ExpandAttributedSource': 'true',
+ 'FavorSizeOrSpeed': '0',
+ 'FloatingPointExceptions': 'true',
+ 'FloatingPointModel': '1',
+ 'ForceConformanceInForLoopScope': 'true',
+ 'ForcedIncludeFiles': 'file1;file2;file3',
+ 'ForcedUsingFiles': 'file1;file2;file3',
+ 'GeneratePreprocessedFile': '1',
+ 'GenerateXMLDocumentationFiles': 'true',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InlineFunctionExpansion': '2',
+ 'KeepComments': 'true',
+ 'MinimalRebuild': 'true',
+ 'ObjectFile': 'a_file_name',
+ 'OmitDefaultLibName': 'true',
+ 'OmitFramePointers': 'true',
+ 'OpenMP': 'true',
+ 'Optimization': '3',
+ 'PrecompiledHeaderFile': 'a_file_name',
+ 'PrecompiledHeaderThrough': 'a_file_name',
+ 'PreprocessorDefinitions': 'd1;d2;d3',
+ 'ProgramDataBaseFileName': 'a_file_name',
+ 'RuntimeLibrary': '0',
+ 'RuntimeTypeInfo': 'true',
+ 'ShowIncludes': 'true',
+ 'SmallerTypeCheck': 'true',
+ 'StringPooling': 'true',
+ 'StructMemberAlignment': '1',
+ 'SuppressStartupBanner': 'true',
+ 'TreatWChar_tAsBuiltInType': 'true',
+ 'UndefineAllPreprocessorDefinitions': 'true',
+ 'UndefinePreprocessorDefinitions': 'd1;d2;d3',
+ 'UseFullPaths': 'true',
+ 'UsePrecompiledHeader': '1',
+ 'UseUnicodeResponseFiles': 'true',
+ 'WarnAsError': 'true',
+ 'WarningLevel': '2',
+ 'WholeProgramOptimization': 'true',
+ 'XMLDocumentationFileName': 'a_file_name'},
+ 'VCLinkerTool': {
+ 'AdditionalDependencies': 'file1;file2;file3',
+ 'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
+ 'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
+ 'AdditionalManifestDependencies': 'file1;file2;file3',
+ 'AdditionalOptions': 'a_string',
+ 'AddModuleNamesToAssembly': 'file1;file2;file3',
+ 'AllowIsolation': 'true',
+ 'AssemblyDebug': '0',
+ 'AssemblyLinkResource': 'file1;file2;file3',
+ 'BaseAddress': 'a_string',
+ 'CLRImageType': '1',
+ 'CLRThreadAttribute': '2',
+ 'CLRUnmanagedCodeCheck': 'true',
+ 'DataExecutionPrevention': '0',
+ 'DelayLoadDLLs': 'file1;file2;file3',
+ 'DelaySign': 'true',
+ 'Driver': '1',
+ 'EmbedManagedResourceFile': 'file1;file2;file3',
+ 'EnableCOMDATFolding': '0',
+ 'EnableUAC': 'true',
+ 'EntryPointSymbol': 'a_string',
+ 'ErrorReporting': '0',
+ 'FixedBaseAddress': '1',
+ 'ForceSymbolReferences': 'file1;file2;file3',
+ 'FunctionOrder': 'a_file_name',
+ 'GenerateDebugInformation': 'true',
+ 'GenerateManifest': 'true',
+ 'GenerateMapFile': 'true',
+ 'HeapCommitSize': 'a_string',
+ 'HeapReserveSize': 'a_string',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreDefaultLibraryNames': 'file1;file2;file3',
+ 'IgnoreEmbeddedIDL': 'true',
+ 'IgnoreImportLibrary': 'true',
+ 'ImportLibrary': 'a_file_name',
+ 'KeyContainer': 'a_file_name',
+ 'KeyFile': 'a_file_name',
+ 'LargeAddressAware': '2',
+ 'LinkIncremental': '1',
+ 'LinkLibraryDependencies': 'true',
+ 'LinkTimeCodeGeneration': '2',
+ 'ManifestFile': 'a_file_name',
+ 'MapExports': 'true',
+ 'MapFileName': 'a_file_name',
+ 'MergedIDLBaseFileName': 'a_file_name',
+ 'MergeSections': 'a_string',
+ 'MidlCommandFile': 'a_file_name',
+ 'ModuleDefinitionFile': 'a_file_name',
+ 'OptimizeForWindows98': '1',
+ 'OptimizeReferences': '0',
+ 'OutputFile': 'a_file_name',
+ 'PerUserRedirection': 'true',
+ 'Profile': 'true',
+ 'ProfileGuidedDatabase': 'a_file_name',
+ 'ProgramDatabaseFile': 'a_file_name',
+ 'RandomizedBaseAddress': '1',
+ 'RegisterOutput': 'true',
+ 'ResourceOnlyDLL': 'true',
+ 'SetChecksum': 'true',
+ 'ShowProgress': '0',
+ 'StackCommitSize': 'a_string',
+ 'StackReserveSize': 'a_string',
+ 'StripPrivateSymbols': 'a_file_name',
+ 'SubSystem': '2',
+ 'SupportUnloadOfDelayLoadedDLL': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'SwapRunFromCD': 'true',
+ 'SwapRunFromNet': 'true',
+ 'TargetMachine': '3',
+ 'TerminalServerAware': '2',
+ 'TurnOffAssemblyGeneration': 'true',
+ 'TypeLibraryFile': 'a_file_name',
+ 'TypeLibraryResourceID': '33',
+ 'UACExecutionLevel': '1',
+ 'UACUIAccess': 'true',
+ 'UseLibraryDependencyInputs': 'false',
+ 'UseUnicodeResponseFiles': 'true',
+ 'Version': 'a_string'},
+ 'VCResourceCompilerTool': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
+ 'AdditionalOptions': 'a_string',
+ 'Culture': '1003',
+ 'IgnoreStandardIncludePath': 'true',
+ 'PreprocessorDefinitions': 'd1;d2;d3',
+ 'ResourceOutputFileName': 'a_string',
+ 'ShowProgress': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
+ 'VCMIDLTool': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
+ 'AdditionalOptions': 'a_string',
+ 'CPreprocessOptions': 'a_string',
+ 'DefaultCharType': '0',
+ 'DLLDataFileName': 'a_file_name',
+ 'EnableErrorChecks': '2',
+ 'ErrorCheckAllocations': 'true',
+ 'ErrorCheckBounds': 'true',
+ 'ErrorCheckEnumRange': 'true',
+ 'ErrorCheckRefPointers': 'true',
+ 'ErrorCheckStubData': 'true',
+ 'GenerateStublessProxies': 'true',
+ 'GenerateTypeLibrary': 'true',
+ 'HeaderFileName': 'a_file_name',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InterfaceIdentifierFileName': 'a_file_name',
+ 'MkTypLibCompatible': 'true',
+ 'OutputDirectory': 'a_string',
+ 'PreprocessorDefinitions': 'd1;d2;d3',
+ 'ProxyFileName': 'a_file_name',
+ 'RedirectOutputAndErrors': 'a_file_name',
+ 'StructMemberAlignment': '3',
+ 'SuppressStartupBanner': 'true',
+ 'TargetEnvironment': '1',
+ 'TypeLibraryName': 'a_file_name',
+ 'UndefinePreprocessorDefinitions': 'd1;d2;d3',
+ 'ValidateParameters': 'true',
+ 'WarnAsError': 'true',
+ 'WarningLevel': '4'},
+ 'VCLibrarianTool': {
+ 'AdditionalDependencies': 'file1;file2;file3',
+ 'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
+ 'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
+ 'AdditionalOptions': 'a_string',
+ 'ExportNamedFunctions': 'd1;d2;d3',
+ 'ForceSymbolReferences': 'a_string',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
+ 'LinkLibraryDependencies': 'true',
+ 'ModuleDefinitionFile': 'a_file_name',
+ 'OutputFile': 'a_file_name',
+ 'SuppressStartupBanner': 'true',
+ 'UseUnicodeResponseFiles': 'true'},
+ 'VCManifestTool': {
+ 'AdditionalManifestFiles': 'file1;file2;file3',
+ 'AdditionalOptions': 'a_string',
+ 'AssemblyIdentity': 'a_string',
+ 'ComponentFileName': 'a_file_name',
+ 'DependencyInformationFile': 'a_file_name',
+ 'EmbedManifest': 'true',
+ 'GenerateCatalogFiles': 'true',
+ 'InputResourceManifests': 'a_string',
+ 'ManifestResourceFile': 'my_name',
+ 'OutputManifestFile': 'a_file_name',
+ 'RegistrarScriptFile': 'a_file_name',
+ 'ReplacementsFile': 'a_file_name',
+ 'SuppressStartupBanner': 'true',
+ 'TypeLibraryFile': 'a_file_name',
+ 'UpdateFileHashes': 'true',
+ 'UpdateFileHashesSearchPath': 'a_file_name',
+ 'UseFAT32Workaround': 'true',
+ 'UseUnicodeResponseFiles': 'true',
+ 'VerboseOutput': 'true'}}
+ expected_msbuild_settings = {
+ 'ClCompile': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
+ 'AdditionalOptions': 'a_string /J',
+ 'AdditionalUsingDirectories': 'folder1;folder2;folder3',
+ 'AssemblerListingLocation': 'a_file_name',
+ 'AssemblerOutput': 'NoListing',
+ 'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
+ 'BrowseInformation': 'true',
+ 'BrowseInformationFile': 'a_file_name',
+ 'BufferSecurityCheck': 'true',
+ 'CallingConvention': 'Cdecl',
+ 'CompileAs': 'CompileAsC',
+ 'DebugInformationFormat': 'EditAndContinue',
+ 'DisableLanguageExtensions': 'true',
+ 'DisableSpecificWarnings': 'd1;d2;d3',
+ 'EnableEnhancedInstructionSet': 'NotSet',
+ 'EnableFiberSafeOptimizations': 'true',
+ 'EnablePREfast': 'true',
+ 'ErrorReporting': 'Prompt',
+ 'ExceptionHandling': 'Async',
+ 'ExpandAttributedSource': 'true',
+ 'FavorSizeOrSpeed': 'Neither',
+ 'FloatingPointExceptions': 'true',
+ 'FloatingPointModel': 'Strict',
+ 'ForceConformanceInForLoopScope': 'true',
+ 'ForcedIncludeFiles': 'file1;file2;file3',
+ 'ForcedUsingFiles': 'file1;file2;file3',
+ 'FunctionLevelLinking': 'true',
+ 'GenerateXMLDocumentationFiles': 'true',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InlineFunctionExpansion': 'AnySuitable',
+ 'IntrinsicFunctions': 'true',
+ 'MinimalRebuild': 'true',
+ 'ObjectFileName': 'a_file_name',
+ 'OmitDefaultLibName': 'true',
+ 'OmitFramePointers': 'true',
+ 'OpenMPSupport': 'true',
+ 'Optimization': 'Full',
+ 'PrecompiledHeader': 'Create',
+ 'PrecompiledHeaderFile': 'a_file_name',
+ 'PrecompiledHeaderOutputFile': 'a_file_name',
+ 'PreprocessKeepComments': 'true',
+ 'PreprocessorDefinitions': 'd1;d2;d3',
+ 'PreprocessSuppressLineNumbers': 'false',
+ 'PreprocessToFile': 'true',
+ 'ProgramDataBaseFileName': 'a_file_name',
+ 'RuntimeLibrary': 'MultiThreaded',
+ 'RuntimeTypeInfo': 'true',
+ 'ShowIncludes': 'true',
+ 'SmallerTypeCheck': 'true',
+ 'StringPooling': 'true',
+ 'StructMemberAlignment': '1Byte',
+ 'SuppressStartupBanner': 'true',
+ 'TreatWarningAsError': 'true',
+ 'TreatWChar_tAsBuiltInType': 'true',
+ 'UndefineAllPreprocessorDefinitions': 'true',
+ 'UndefinePreprocessorDefinitions': 'd1;d2;d3',
+ 'UseFullPaths': 'true',
+ 'WarningLevel': 'Level2',
+ 'WholeProgramOptimization': 'true',
+ 'XMLDocumentationFileName': 'a_file_name'},
+ 'Link': {
+ 'AdditionalDependencies': 'file1;file2;file3',
+ 'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
+ 'AdditionalManifestDependencies': 'file1;file2;file3',
+ 'AdditionalOptions': 'a_string',
+ 'AddModuleNamesToAssembly': 'file1;file2;file3',
+ 'AllowIsolation': 'true',
+ 'AssemblyDebug': '',
+ 'AssemblyLinkResource': 'file1;file2;file3',
+ 'BaseAddress': 'a_string',
+ 'CLRImageType': 'ForceIJWImage',
+ 'CLRThreadAttribute': 'STAThreadingAttribute',
+ 'CLRUnmanagedCodeCheck': 'true',
+ 'DataExecutionPrevention': '',
+ 'DelayLoadDLLs': 'file1;file2;file3',
+ 'DelaySign': 'true',
+ 'Driver': 'Driver',
+ 'EmbedManagedResourceFile': 'file1;file2;file3',
+ 'EnableCOMDATFolding': '',
+ 'EnableUAC': 'true',
+ 'EntryPointSymbol': 'a_string',
+ 'FixedBaseAddress': 'false',
+ 'ForceSymbolReferences': 'file1;file2;file3',
+ 'FunctionOrder': 'a_file_name',
+ 'GenerateDebugInformation': 'true',
+ 'GenerateMapFile': 'true',
+ 'HeapCommitSize': 'a_string',
+ 'HeapReserveSize': 'a_string',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreEmbeddedIDL': 'true',
+ 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
+ 'ImportLibrary': 'a_file_name',
+ 'KeyContainer': 'a_file_name',
+ 'KeyFile': 'a_file_name',
+ 'LargeAddressAware': 'true',
+ 'LinkErrorReporting': 'NoErrorReport',
+ 'LinkTimeCodeGeneration': 'PGInstrument',
+ 'ManifestFile': 'a_file_name',
+ 'MapExports': 'true',
+ 'MapFileName': 'a_file_name',
+ 'MergedIDLBaseFileName': 'a_file_name',
+ 'MergeSections': 'a_string',
+ 'MidlCommandFile': 'a_file_name',
+ 'ModuleDefinitionFile': 'a_file_name',
+ 'NoEntryPoint': 'true',
+ 'OptimizeReferences': '',
+ 'OutputFile': 'a_file_name',
+ 'PerUserRedirection': 'true',
+ 'Profile': 'true',
+ 'ProfileGuidedDatabase': 'a_file_name',
+ 'ProgramDatabaseFile': 'a_file_name',
+ 'RandomizedBaseAddress': 'false',
+ 'RegisterOutput': 'true',
+ 'SetChecksum': 'true',
+ 'ShowProgress': 'NotSet',
+ 'StackCommitSize': 'a_string',
+ 'StackReserveSize': 'a_string',
+ 'StripPrivateSymbols': 'a_file_name',
+ 'SubSystem': 'Windows',
+ 'SupportUnloadOfDelayLoadedDLL': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'SwapRunFromCD': 'true',
+ 'SwapRunFromNET': 'true',
+ 'TargetMachine': 'MachineARM',
+ 'TerminalServerAware': 'true',
+ 'TurnOffAssemblyGeneration': 'true',
+ 'TypeLibraryFile': 'a_file_name',
+ 'TypeLibraryResourceID': '33',
+ 'UACExecutionLevel': 'HighestAvailable',
+ 'UACUIAccess': 'true',
+ 'Version': 'a_string'},
+ 'ResourceCompile': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
+ 'AdditionalOptions': 'a_string',
+ 'Culture': '0x03eb',
+ 'IgnoreStandardIncludePath': 'true',
+ 'PreprocessorDefinitions': 'd1;d2;d3',
+ 'ResourceOutputFileName': 'a_string',
+ 'ShowProgress': 'true',
+ 'SuppressStartupBanner': 'true',
+ 'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
+ 'Midl': {
+ 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
+ 'AdditionalOptions': 'a_string',
+ 'CPreprocessOptions': 'a_string',
+ 'DefaultCharType': 'Unsigned',
+ 'DllDataFileName': 'a_file_name',
+ 'EnableErrorChecks': 'All',
+ 'ErrorCheckAllocations': 'true',
+ 'ErrorCheckBounds': 'true',
+ 'ErrorCheckEnumRange': 'true',
+ 'ErrorCheckRefPointers': 'true',
+ 'ErrorCheckStubData': 'true',
+ 'GenerateStublessProxies': 'true',
+ 'GenerateTypeLibrary': 'true',
+ 'HeaderFileName': 'a_file_name',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InterfaceIdentifierFileName': 'a_file_name',
+ 'MkTypLibCompatible': 'true',
+ 'OutputDirectory': 'a_string',
+ 'PreprocessorDefinitions': 'd1;d2;d3',
+ 'ProxyFileName': 'a_file_name',
+ 'RedirectOutputAndErrors': 'a_file_name',
+ 'StructMemberAlignment': '4',
+ 'SuppressStartupBanner': 'true',
+ 'TargetEnvironment': 'Win32',
+ 'TypeLibraryName': 'a_file_name',
+ 'UndefinePreprocessorDefinitions': 'd1;d2;d3',
+ 'ValidateAllParameters': 'true',
+ 'WarnAsError': 'true',
+ 'WarningLevel': '4'},
+ 'Lib': {
+ 'AdditionalDependencies': 'file1;file2;file3',
+ 'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
+ 'AdditionalOptions': 'a_string',
+ 'ExportNamedFunctions': 'd1;d2;d3',
+ 'ForceSymbolReferences': 'a_string',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
+ 'ModuleDefinitionFile': 'a_file_name',
+ 'OutputFile': 'a_file_name',
+ 'SuppressStartupBanner': 'true',
+ 'UseUnicodeResponseFiles': 'true'},
+ 'Manifest': {
+ 'AdditionalManifestFiles': 'file1;file2;file3',
+ 'AdditionalOptions': 'a_string',
+ 'AssemblyIdentity': 'a_string',
+ 'ComponentFileName': 'a_file_name',
+ 'GenerateCatalogFiles': 'true',
+ 'InputResourceManifests': 'a_string',
+ 'OutputManifestFile': 'a_file_name',
+ 'RegistrarScriptFile': 'a_file_name',
+ 'ReplacementsFile': 'a_file_name',
+ 'SuppressStartupBanner': 'true',
+ 'TypeLibraryFile': 'a_file_name',
+ 'UpdateFileHashes': 'true',
+ 'UpdateFileHashesSearchPath': 'a_file_name',
+ 'VerboseOutput': 'true'},
+ 'ManifestResourceCompile': {
+ 'ResourceOutputFileName': 'my_name'},
+ 'ProjectReference': {
+ 'LinkLibraryDependencies': 'true',
+ 'UseLibraryDependencyInputs': 'false'},
+ '': {
+ 'EmbedManifest': 'true',
+ 'GenerateManifest': 'true',
+ 'IgnoreImportLibrary': 'true',
+ 'LinkIncremental': 'false'}}
+ actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
+ msvs_settings,
+ self.stderr)
+ self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
+ self._ExpectedWarnings([])
+
+ def testConvertToMSBuildSettings_actual(self):
+ """Tests the conversion of an actual project.
+
+ A VS2008 project with most of the options defined was created through the
+ VS2008 IDE. It was then converted to VS2010. The tool settings found in
+ the .vcproj and .vcxproj files were converted to the two dictionaries
+ msvs_settings and expected_msbuild_settings.
+
+ Note that for many settings, the VS2010 converter adds macros like
+ %(AdditionalIncludeDirectories) to make sure than inherited values are
+ included. Since the Gyp projects we generate do not use inheritance,
+ we removed these macros. They were:
+ ClCompile:
+ AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)'
+ AdditionalOptions: ' %(AdditionalOptions)'
+ AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)'
+ DisableSpecificWarnings: ';%(DisableSpecificWarnings)',
+ ForcedIncludeFiles: ';%(ForcedIncludeFiles)',
+ ForcedUsingFiles: ';%(ForcedUsingFiles)',
+ PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
+ UndefinePreprocessorDefinitions:
+ ';%(UndefinePreprocessorDefinitions)',
+ Link:
+ AdditionalDependencies: ';%(AdditionalDependencies)',
+ AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)',
+ AdditionalManifestDependencies:
+ ';%(AdditionalManifestDependencies)',
+ AdditionalOptions: ' %(AdditionalOptions)',
+ AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)',
+ AssemblyLinkResource: ';%(AssemblyLinkResource)',
+ DelayLoadDLLs: ';%(DelayLoadDLLs)',
+ EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)',
+ ForceSymbolReferences: ';%(ForceSymbolReferences)',
+ IgnoreSpecificDefaultLibraries:
+ ';%(IgnoreSpecificDefaultLibraries)',
+ ResourceCompile:
+ AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)',
+ AdditionalOptions: ' %(AdditionalOptions)',
+ PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
+ Manifest:
+ AdditionalManifestFiles: ';%(AdditionalManifestFiles)',
+ AdditionalOptions: ' %(AdditionalOptions)',
+ InputResourceManifests: ';%(InputResourceManifests)',
+ """
+ msvs_settings = {
+ 'VCCLCompilerTool': {
+ 'AdditionalIncludeDirectories': 'dir1',
+ 'AdditionalOptions': '/more',
+ 'AdditionalUsingDirectories': 'test',
+ 'AssemblerListingLocation': '$(IntDir)\\a',
+ 'AssemblerOutput': '1',
+ 'BasicRuntimeChecks': '3',
+ 'BrowseInformation': '1',
+ 'BrowseInformationFile': '$(IntDir)\\e',
+ 'BufferSecurityCheck': 'false',
+ 'CallingConvention': '1',
+ 'CompileAs': '1',
+ 'DebugInformationFormat': '4',
+ 'DefaultCharIsUnsigned': 'true',
+ 'Detect64BitPortabilityProblems': 'true',
+ 'DisableLanguageExtensions': 'true',
+ 'DisableSpecificWarnings': 'abc',
+ 'EnableEnhancedInstructionSet': '1',
+ 'EnableFiberSafeOptimizations': 'true',
+ 'EnableFunctionLevelLinking': 'true',
+ 'EnableIntrinsicFunctions': 'true',
+ 'EnablePREfast': 'true',
+ 'ErrorReporting': '2',
+ 'ExceptionHandling': '2',
+ 'ExpandAttributedSource': 'true',
+ 'FavorSizeOrSpeed': '2',
+ 'FloatingPointExceptions': 'true',
+ 'FloatingPointModel': '1',
+ 'ForceConformanceInForLoopScope': 'false',
+ 'ForcedIncludeFiles': 'def',
+ 'ForcedUsingFiles': 'ge',
+ 'GeneratePreprocessedFile': '2',
+ 'GenerateXMLDocumentationFiles': 'true',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InlineFunctionExpansion': '1',
+ 'KeepComments': 'true',
+ 'MinimalRebuild': 'true',
+ 'ObjectFile': '$(IntDir)\\b',
+ 'OmitDefaultLibName': 'true',
+ 'OmitFramePointers': 'true',
+ 'OpenMP': 'true',
+ 'Optimization': '3',
+ 'PrecompiledHeaderFile': '$(IntDir)\\$(TargetName).pche',
+ 'PrecompiledHeaderThrough': 'StdAfx.hd',
+ 'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
+ 'ProgramDataBaseFileName': '$(IntDir)\\vc90b.pdb',
+ 'RuntimeLibrary': '3',
+ 'RuntimeTypeInfo': 'false',
+ 'ShowIncludes': 'true',
+ 'SmallerTypeCheck': 'true',
+ 'StringPooling': 'true',
+ 'StructMemberAlignment': '3',
+ 'SuppressStartupBanner': 'false',
+ 'TreatWChar_tAsBuiltInType': 'false',
+ 'UndefineAllPreprocessorDefinitions': 'true',
+ 'UndefinePreprocessorDefinitions': 'wer',
+ 'UseFullPaths': 'true',
+ 'UsePrecompiledHeader': '0',
+ 'UseUnicodeResponseFiles': 'false',
+ 'WarnAsError': 'true',
+ 'WarningLevel': '3',
+ 'WholeProgramOptimization': 'true',
+ 'XMLDocumentationFileName': '$(IntDir)\\c'},
+ 'VCLinkerTool': {
+ 'AdditionalDependencies': 'zx',
+ 'AdditionalLibraryDirectories': 'asd',
+ 'AdditionalManifestDependencies': 's2',
+ 'AdditionalOptions': '/mor2',
+ 'AddModuleNamesToAssembly': 'd1',
+ 'AllowIsolation': 'false',
+ 'AssemblyDebug': '1',
+ 'AssemblyLinkResource': 'd5',
+ 'BaseAddress': '23423',
+ 'CLRImageType': '3',
+ 'CLRThreadAttribute': '1',
+ 'CLRUnmanagedCodeCheck': 'true',
+ 'DataExecutionPrevention': '0',
+ 'DelayLoadDLLs': 'd4',
+ 'DelaySign': 'true',
+ 'Driver': '2',
+ 'EmbedManagedResourceFile': 'd2',
+ 'EnableCOMDATFolding': '1',
+ 'EnableUAC': 'false',
+ 'EntryPointSymbol': 'f5',
+ 'ErrorReporting': '2',
+ 'FixedBaseAddress': '1',
+ 'ForceSymbolReferences': 'd3',
+ 'FunctionOrder': 'fssdfsd',
+ 'GenerateDebugInformation': 'true',
+ 'GenerateManifest': 'false',
+ 'GenerateMapFile': 'true',
+ 'HeapCommitSize': '13',
+ 'HeapReserveSize': '12',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreDefaultLibraryNames': 'flob;flok',
+ 'IgnoreEmbeddedIDL': 'true',
+ 'IgnoreImportLibrary': 'true',
+ 'ImportLibrary': 'f4',
+ 'KeyContainer': 'f7',
+ 'KeyFile': 'f6',
+ 'LargeAddressAware': '2',
+ 'LinkIncremental': '0',
+ 'LinkLibraryDependencies': 'false',
+ 'LinkTimeCodeGeneration': '1',
+ 'ManifestFile':
+ '$(IntDir)\\$(TargetFileName).2intermediate.manifest',
+ 'MapExports': 'true',
+ 'MapFileName': 'd5',
+ 'MergedIDLBaseFileName': 'f2',
+ 'MergeSections': 'f5',
+ 'MidlCommandFile': 'f1',
+ 'ModuleDefinitionFile': 'sdsd',
+ 'OptimizeForWindows98': '2',
+ 'OptimizeReferences': '2',
+ 'OutputFile': '$(OutDir)\\$(ProjectName)2.exe',
+ 'PerUserRedirection': 'true',
+ 'Profile': 'true',
+ 'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
+ 'ProgramDatabaseFile': 'Flob.pdb',
+ 'RandomizedBaseAddress': '1',
+ 'RegisterOutput': 'true',
+ 'ResourceOnlyDLL': 'true',
+ 'SetChecksum': 'false',
+ 'ShowProgress': '1',
+ 'StackCommitSize': '15',
+ 'StackReserveSize': '14',
+ 'StripPrivateSymbols': 'd3',
+ 'SubSystem': '1',
+ 'SupportUnloadOfDelayLoadedDLL': 'true',
+ 'SuppressStartupBanner': 'false',
+ 'SwapRunFromCD': 'true',
+ 'SwapRunFromNet': 'true',
+ 'TargetMachine': '1',
+ 'TerminalServerAware': '1',
+ 'TurnOffAssemblyGeneration': 'true',
+ 'TypeLibraryFile': 'f3',
+ 'TypeLibraryResourceID': '12',
+ 'UACExecutionLevel': '2',
+ 'UACUIAccess': 'true',
+ 'UseLibraryDependencyInputs': 'true',
+ 'UseUnicodeResponseFiles': 'false',
+ 'Version': '333'},
+ 'VCResourceCompilerTool': {
+ 'AdditionalIncludeDirectories': 'f3',
+ 'AdditionalOptions': '/more3',
+ 'Culture': '3084',
+ 'IgnoreStandardIncludePath': 'true',
+ 'PreprocessorDefinitions': '_UNICODE;UNICODE2',
+ 'ResourceOutputFileName': '$(IntDir)/$(InputName)3.res',
+ 'ShowProgress': 'true'},
+ 'VCManifestTool': {
+ 'AdditionalManifestFiles': 'sfsdfsd',
+ 'AdditionalOptions': 'afdsdafsd',
+ 'AssemblyIdentity': 'sddfdsadfsa',
+ 'ComponentFileName': 'fsdfds',
+ 'DependencyInformationFile': '$(IntDir)\\mt.depdfd',
+ 'EmbedManifest': 'false',
+ 'GenerateCatalogFiles': 'true',
+ 'InputResourceManifests': 'asfsfdafs',
+ 'ManifestResourceFile':
+ '$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf',
+ 'OutputManifestFile': '$(TargetPath).manifestdfs',
+ 'RegistrarScriptFile': 'sdfsfd',
+ 'ReplacementsFile': 'sdffsd',
+ 'SuppressStartupBanner': 'false',
+ 'TypeLibraryFile': 'sfsd',
+ 'UpdateFileHashes': 'true',
+ 'UpdateFileHashesSearchPath': 'sfsd',
+ 'UseFAT32Workaround': 'true',
+ 'UseUnicodeResponseFiles': 'false',
+ 'VerboseOutput': 'true'}}
+ expected_msbuild_settings = {
+ 'ClCompile': {
+ 'AdditionalIncludeDirectories': 'dir1',
+ 'AdditionalOptions': '/more /J',
+ 'AdditionalUsingDirectories': 'test',
+ 'AssemblerListingLocation': '$(IntDir)a',
+ 'AssemblerOutput': 'AssemblyCode',
+ 'BasicRuntimeChecks': 'EnableFastChecks',
+ 'BrowseInformation': 'true',
+ 'BrowseInformationFile': '$(IntDir)e',
+ 'BufferSecurityCheck': 'false',
+ 'CallingConvention': 'FastCall',
+ 'CompileAs': 'CompileAsC',
+ 'DebugInformationFormat': 'EditAndContinue',
+ 'DisableLanguageExtensions': 'true',
+ 'DisableSpecificWarnings': 'abc',
+ 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
+ 'EnableFiberSafeOptimizations': 'true',
+ 'EnablePREfast': 'true',
+ 'ErrorReporting': 'Queue',
+ 'ExceptionHandling': 'Async',
+ 'ExpandAttributedSource': 'true',
+ 'FavorSizeOrSpeed': 'Size',
+ 'FloatingPointExceptions': 'true',
+ 'FloatingPointModel': 'Strict',
+ 'ForceConformanceInForLoopScope': 'false',
+ 'ForcedIncludeFiles': 'def',
+ 'ForcedUsingFiles': 'ge',
+ 'FunctionLevelLinking': 'true',
+ 'GenerateXMLDocumentationFiles': 'true',
+ 'IgnoreStandardIncludePath': 'true',
+ 'InlineFunctionExpansion': 'OnlyExplicitInline',
+ 'IntrinsicFunctions': 'true',
+ 'MinimalRebuild': 'true',
+ 'ObjectFileName': '$(IntDir)b',
+ 'OmitDefaultLibName': 'true',
+ 'OmitFramePointers': 'true',
+ 'OpenMPSupport': 'true',
+ 'Optimization': 'Full',
+ 'PrecompiledHeader': 'NotUsing', # Actual conversion gives ''
+ 'PrecompiledHeaderFile': 'StdAfx.hd',
+ 'PrecompiledHeaderOutputFile': '$(IntDir)$(TargetName).pche',
+ 'PreprocessKeepComments': 'true',
+ 'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
+ 'PreprocessSuppressLineNumbers': 'true',
+ 'PreprocessToFile': 'true',
+ 'ProgramDataBaseFileName': '$(IntDir)vc90b.pdb',
+ 'RuntimeLibrary': 'MultiThreadedDebugDLL',
+ 'RuntimeTypeInfo': 'false',
+ 'ShowIncludes': 'true',
+ 'SmallerTypeCheck': 'true',
+ 'StringPooling': 'true',
+ 'StructMemberAlignment': '4Bytes',
+ 'SuppressStartupBanner': 'false',
+ 'TreatWarningAsError': 'true',
+ 'TreatWChar_tAsBuiltInType': 'false',
+ 'UndefineAllPreprocessorDefinitions': 'true',
+ 'UndefinePreprocessorDefinitions': 'wer',
+ 'UseFullPaths': 'true',
+ 'WarningLevel': 'Level3',
+ 'WholeProgramOptimization': 'true',
+ 'XMLDocumentationFileName': '$(IntDir)c'},
+ 'Link': {
+ 'AdditionalDependencies': 'zx',
+ 'AdditionalLibraryDirectories': 'asd',
+ 'AdditionalManifestDependencies': 's2',
+ 'AdditionalOptions': '/mor2',
+ 'AddModuleNamesToAssembly': 'd1',
+ 'AllowIsolation': 'false',
+ 'AssemblyDebug': 'true',
+ 'AssemblyLinkResource': 'd5',
+ 'BaseAddress': '23423',
+ 'CLRImageType': 'ForceSafeILImage',
+ 'CLRThreadAttribute': 'MTAThreadingAttribute',
+ 'CLRUnmanagedCodeCheck': 'true',
+ 'DataExecutionPrevention': '',
+ 'DelayLoadDLLs': 'd4',
+ 'DelaySign': 'true',
+ 'Driver': 'UpOnly',
+ 'EmbedManagedResourceFile': 'd2',
+ 'EnableCOMDATFolding': 'false',
+ 'EnableUAC': 'false',
+ 'EntryPointSymbol': 'f5',
+ 'FixedBaseAddress': 'false',
+ 'ForceSymbolReferences': 'd3',
+ 'FunctionOrder': 'fssdfsd',
+ 'GenerateDebugInformation': 'true',
+ 'GenerateMapFile': 'true',
+ 'HeapCommitSize': '13',
+ 'HeapReserveSize': '12',
+ 'IgnoreAllDefaultLibraries': 'true',
+ 'IgnoreEmbeddedIDL': 'true',
+ 'IgnoreSpecificDefaultLibraries': 'flob;flok',
+ 'ImportLibrary': 'f4',
+ 'KeyContainer': 'f7',
+ 'KeyFile': 'f6',
+ 'LargeAddressAware': 'true',
+ 'LinkErrorReporting': 'QueueForNextLogin',
+ 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
+ 'ManifestFile': '$(IntDir)$(TargetFileName).2intermediate.manifest',
+ 'MapExports': 'true',
+ 'MapFileName': 'd5',
+ 'MergedIDLBaseFileName': 'f2',
+ 'MergeSections': 'f5',
+ 'MidlCommandFile': 'f1',
+ 'ModuleDefinitionFile': 'sdsd',
+ 'NoEntryPoint': 'true',
+ 'OptimizeReferences': 'true',
+ 'OutputFile': '$(OutDir)$(ProjectName)2.exe',
+ 'PerUserRedirection': 'true',
+ 'Profile': 'true',
+ 'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
+ 'ProgramDatabaseFile': 'Flob.pdb',
+ 'RandomizedBaseAddress': 'false',
+ 'RegisterOutput': 'true',
+ 'SetChecksum': 'false',
+ 'ShowProgress': 'LinkVerbose',
+ 'StackCommitSize': '15',
+ 'StackReserveSize': '14',
+ 'StripPrivateSymbols': 'd3',
+ 'SubSystem': 'Console',
+ 'SupportUnloadOfDelayLoadedDLL': 'true',
+ 'SuppressStartupBanner': 'false',
+ 'SwapRunFromCD': 'true',
+ 'SwapRunFromNET': 'true',
+ 'TargetMachine': 'MachineX86',
+ 'TerminalServerAware': 'false',
+ 'TurnOffAssemblyGeneration': 'true',
+ 'TypeLibraryFile': 'f3',
+ 'TypeLibraryResourceID': '12',
+ 'UACExecutionLevel': 'RequireAdministrator',
+ 'UACUIAccess': 'true',
+ 'Version': '333'},
+ 'ResourceCompile': {
+ 'AdditionalIncludeDirectories': 'f3',
+ 'AdditionalOptions': '/more3',
+ 'Culture': '0x0c0c',
+ 'IgnoreStandardIncludePath': 'true',
+ 'PreprocessorDefinitions': '_UNICODE;UNICODE2',
+ 'ResourceOutputFileName': '$(IntDir)%(Filename)3.res',
+ 'ShowProgress': 'true'},
+ 'Manifest': {
+ 'AdditionalManifestFiles': 'sfsdfsd',
+ 'AdditionalOptions': 'afdsdafsd',
+ 'AssemblyIdentity': 'sddfdsadfsa',
+ 'ComponentFileName': 'fsdfds',
+ 'GenerateCatalogFiles': 'true',
+ 'InputResourceManifests': 'asfsfdafs',
+ 'OutputManifestFile': '$(TargetPath).manifestdfs',
+ 'RegistrarScriptFile': 'sdfsfd',
+ 'ReplacementsFile': 'sdffsd',
+ 'SuppressStartupBanner': 'false',
+ 'TypeLibraryFile': 'sfsd',
+ 'UpdateFileHashes': 'true',
+ 'UpdateFileHashesSearchPath': 'sfsd',
+ 'VerboseOutput': 'true'},
+ 'ProjectReference': {
+ 'LinkLibraryDependencies': 'false',
+ 'UseLibraryDependencyInputs': 'true'},
+ '': {
+ 'EmbedManifest': 'false',
+ 'GenerateManifest': 'false',
+ 'IgnoreImportLibrary': 'true',
+ 'LinkIncremental': ''
+ },
+ 'ManifestResourceCompile': {
+ 'ResourceOutputFileName':
+ '$(IntDir)$(TargetFileName).embed.manifest.resfdsf'}
+ }
+ actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
+ msvs_settings,
+ self.stderr)
+ self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
+ self._ExpectedWarnings([])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/python/gyp/pylib/gyp/MSVSToolFile.py b/third_party/python/gyp/pylib/gyp/MSVSToolFile.py
new file mode 100644
index 0000000000..74e529a17f
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/MSVSToolFile.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Visual Studio project reader/writer."""
+
+import gyp.common
+import gyp.easy_xml as easy_xml
+
+
+class Writer(object):
+ """Visual Studio XML tool file writer."""
+
+ def __init__(self, tool_file_path, name):
+ """Initializes the tool file.
+
+ Args:
+ tool_file_path: Path to the tool file.
+ name: Name of the tool file.
+ """
+ self.tool_file_path = tool_file_path
+ self.name = name
+ self.rules_section = ['Rules']
+
+ def AddCustomBuildRule(self, name, cmd, description,
+ additional_dependencies,
+ outputs, extensions):
+ """Adds a rule to the tool file.
+
+ Args:
+ name: Name of the rule.
+ description: Description of the rule.
+ cmd: Command line of the rule.
+ additional_dependencies: other files which may trigger the rule.
+ outputs: outputs of the rule.
+ extensions: extensions handled by the rule.
+ """
+ rule = ['CustomBuildRule',
+ {'Name': name,
+ 'ExecutionDescription': description,
+ 'CommandLine': cmd,
+ 'Outputs': ';'.join(outputs),
+ 'FileExtensions': ';'.join(extensions),
+ 'AdditionalDependencies':
+ ';'.join(additional_dependencies)
+ }]
+ self.rules_section.append(rule)
+
+ def WriteIfChanged(self):
+ """Writes the tool file."""
+ content = ['VisualStudioToolFile',
+ {'Version': '8.00',
+ 'Name': self.name
+ },
+ self.rules_section
+ ]
+ easy_xml.WriteXmlIfChanged(content, self.tool_file_path,
+ encoding="Windows-1252")
diff --git a/third_party/python/gyp/pylib/gyp/MSVSUserFile.py b/third_party/python/gyp/pylib/gyp/MSVSUserFile.py
new file mode 100644
index 0000000000..2264d64015
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/MSVSUserFile.py
@@ -0,0 +1,147 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Visual Studio user preferences file writer."""
+
+import os
+import re
+import socket # for gethostname
+
+import gyp.common
+import gyp.easy_xml as easy_xml
+
+
+#------------------------------------------------------------------------------
+
+def _FindCommandInPath(command):
+ """If there are no slashes in the command given, this function
+ searches the PATH env to find the given command, and converts it
+ to an absolute path. We have to do this because MSVS is looking
+ for an actual file to launch a debugger on, not just a command
+ line. Note that this happens at GYP time, so anything needing to
+ be built needs to have a full path."""
+ if '/' in command or '\\' in command:
+ # If the command already has path elements (either relative or
+ # absolute), then assume it is constructed properly.
+ return command
+ else:
+ # Search through the path list and find an existing file that
+ # we can access.
+ paths = os.environ.get('PATH','').split(os.pathsep)
+ for path in paths:
+ item = os.path.join(path, command)
+ if os.path.isfile(item) and os.access(item, os.X_OK):
+ return item
+ return command
+
+def _QuoteWin32CommandLineArgs(args):
+ new_args = []
+ for arg in args:
+ # Replace all double-quotes with double-double-quotes to escape
+ # them for cmd shell, and then quote the whole thing if there
+ # are any.
+ if arg.find('"') != -1:
+ arg = '""'.join(arg.split('"'))
+ arg = '"%s"' % arg
+
+ # Otherwise, if there are any spaces, quote the whole arg.
+ elif re.search(r'[ \t\n]', arg):
+ arg = '"%s"' % arg
+ new_args.append(arg)
+ return new_args
+
+class Writer(object):
+ """Visual Studio XML user user file writer."""
+
+ def __init__(self, user_file_path, version, name):
+ """Initializes the user file.
+
+ Args:
+ user_file_path: Path to the user file.
+ version: Version info.
+ name: Name of the user file.
+ """
+ self.user_file_path = user_file_path
+ self.version = version
+ self.name = name
+ self.configurations = {}
+
+ def AddConfig(self, name):
+ """Adds a configuration to the project.
+
+ Args:
+ name: Configuration name.
+ """
+ self.configurations[name] = ['Configuration', {'Name': name}]
+
+ def AddDebugSettings(self, config_name, command, environment = {},
+ working_directory=""):
+ """Adds a DebugSettings node to the user file for a particular config.
+
+ Args:
+ command: command line to run. First element in the list is the
+ executable. All elements of the command will be quoted if
+ necessary.
+ working_directory: other files which may trigger the rule. (optional)
+ """
+ command = _QuoteWin32CommandLineArgs(command)
+
+ abs_command = _FindCommandInPath(command[0])
+
+ if environment and isinstance(environment, dict):
+ env_list = ['%s="%s"' % (key, val)
+ for (key,val) in environment.items()]
+ environment = ' '.join(env_list)
+ else:
+ environment = ''
+
+ n_cmd = ['DebugSettings',
+ {'Command': abs_command,
+ 'WorkingDirectory': working_directory,
+ 'CommandArguments': " ".join(command[1:]),
+ 'RemoteMachine': socket.gethostname(),
+ 'Environment': environment,
+ 'EnvironmentMerge': 'true',
+ # Currently these are all "dummy" values that we're just setting
+ # in the default manner that MSVS does it. We could use some of
+ # these to add additional capabilities, I suppose, but they might
+ # not have parity with other platforms then.
+ 'Attach': 'false',
+ 'DebuggerType': '3', # 'auto' debugger
+ 'Remote': '1',
+ 'RemoteCommand': '',
+ 'HttpUrl': '',
+ 'PDBPath': '',
+ 'SQLDebugging': '',
+ 'DebuggerFlavor': '0',
+ 'MPIRunCommand': '',
+ 'MPIRunArguments': '',
+ 'MPIRunWorkingDirectory': '',
+ 'ApplicationCommand': '',
+ 'ApplicationArguments': '',
+ 'ShimCommand': '',
+ 'MPIAcceptMode': '',
+ 'MPIAcceptFilter': ''
+ }]
+
+ # Find the config, and add it if it doesn't exist.
+ if config_name not in self.configurations:
+ self.AddConfig(config_name)
+
+ # Add the DebugSettings onto the appropriate config.
+ self.configurations[config_name].append(n_cmd)
+
+ def WriteIfChanged(self):
+ """Writes the user file."""
+ configs = ['Configurations']
+ for config, spec in sorted(self.configurations.items()):
+ configs.append(spec)
+
+ content = ['VisualStudioUserFile',
+ {'Version': self.version.ProjectVersion(),
+ 'Name': self.name
+ },
+ configs]
+ easy_xml.WriteXmlIfChanged(content, self.user_file_path,
+ encoding="Windows-1252")
diff --git a/third_party/python/gyp/pylib/gyp/MSVSUtil.py b/third_party/python/gyp/pylib/gyp/MSVSUtil.py
new file mode 100644
index 0000000000..f24530b275
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/MSVSUtil.py
@@ -0,0 +1,271 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions shared amongst the Windows generators."""
+
+import copy
+import os
+
+
+# A dictionary mapping supported target types to extensions.
+TARGET_TYPE_EXT = {
+ 'executable': 'exe',
+ 'loadable_module': 'dll',
+ 'shared_library': 'dll',
+ 'static_library': 'lib',
+ 'windows_driver': 'sys',
+}
+
+
+def _GetLargePdbShimCcPath():
+ """Returns the path of the large_pdb_shim.cc file."""
+ this_dir = os.path.abspath(os.path.dirname(__file__))
+ src_dir = os.path.abspath(os.path.join(this_dir, '..', '..'))
+ win_data_dir = os.path.join(src_dir, 'data', 'win')
+ large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc')
+ return large_pdb_shim_cc
+
+
+def _DeepCopySomeKeys(in_dict, keys):
+ """Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
+
+ Arguments:
+ in_dict: The dictionary to copy.
+ keys: The keys to be copied. If a key is in this list and doesn't exist in
+ |in_dict| this is not an error.
+ Returns:
+ The partially deep-copied dictionary.
+ """
+ d = {}
+ for key in keys:
+ if key not in in_dict:
+ continue
+ d[key] = copy.deepcopy(in_dict[key])
+ return d
+
+
+def _SuffixName(name, suffix):
+ """Add a suffix to the end of a target.
+
+ Arguments:
+ name: name of the target (foo#target)
+ suffix: the suffix to be added
+ Returns:
+ Target name with suffix added (foo_suffix#target)
+ """
+ parts = name.rsplit('#', 1)
+ parts[0] = '%s_%s' % (parts[0], suffix)
+ return '#'.join(parts)
+
+
+def _ShardName(name, number):
+ """Add a shard number to the end of a target.
+
+ Arguments:
+ name: name of the target (foo#target)
+ number: shard number
+ Returns:
+ Target name with shard added (foo_1#target)
+ """
+ return _SuffixName(name, str(number))
+
+
+def ShardTargets(target_list, target_dicts):
+ """Shard some targets apart to work around the linkers limits.
+
+ Arguments:
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ Returns:
+ Tuple of the new sharded versions of the inputs.
+ """
+ # Gather the targets to shard, and how many pieces.
+ targets_to_shard = {}
+ for t in target_dicts:
+ shards = int(target_dicts[t].get('msvs_shard', 0))
+ if shards:
+ targets_to_shard[t] = shards
+ # Shard target_list.
+ new_target_list = []
+ for t in target_list:
+ if t in targets_to_shard:
+ for i in range(targets_to_shard[t]):
+ new_target_list.append(_ShardName(t, i))
+ else:
+ new_target_list.append(t)
+ # Shard target_dict.
+ new_target_dicts = {}
+ for t in target_dicts:
+ if t in targets_to_shard:
+ for i in range(targets_to_shard[t]):
+ name = _ShardName(t, i)
+ new_target_dicts[name] = copy.copy(target_dicts[t])
+ new_target_dicts[name]['target_name'] = _ShardName(
+ new_target_dicts[name]['target_name'], i)
+ sources = new_target_dicts[name].get('sources', [])
+ new_sources = []
+ for pos in range(i, len(sources), targets_to_shard[t]):
+ new_sources.append(sources[pos])
+ new_target_dicts[name]['sources'] = new_sources
+ else:
+ new_target_dicts[t] = target_dicts[t]
+ # Shard dependencies.
+ for t in sorted(new_target_dicts):
+ for deptype in ('dependencies', 'dependencies_original'):
+ dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
+ new_dependencies = []
+ for d in dependencies:
+ if d in targets_to_shard:
+ for i in range(targets_to_shard[d]):
+ new_dependencies.append(_ShardName(d, i))
+ else:
+ new_dependencies.append(d)
+ new_target_dicts[t][deptype] = new_dependencies
+
+ return (new_target_list, new_target_dicts)
+
+
+def _GetPdbPath(target_dict, config_name, vars):
+ """Returns the path to the PDB file that will be generated by a given
+ configuration.
+
+ The lookup proceeds as follows:
+ - Look for an explicit path in the VCLinkerTool configuration block.
+ - Look for an 'msvs_large_pdb_path' variable.
+ - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
+ specified.
+ - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
+
+ Arguments:
+ target_dict: The target dictionary to be searched.
+ config_name: The name of the configuration of interest.
+ vars: A dictionary of common GYP variables with generator-specific values.
+ Returns:
+ The path of the corresponding PDB file.
+ """
+ config = target_dict['configurations'][config_name]
+ msvs = config.setdefault('msvs_settings', {})
+
+ linker = msvs.get('VCLinkerTool', {})
+
+ pdb_path = linker.get('ProgramDatabaseFile')
+ if pdb_path:
+ return pdb_path
+
+ variables = target_dict.get('variables', {})
+ pdb_path = variables.get('msvs_large_pdb_path', None)
+ if pdb_path:
+ return pdb_path
+
+
+ pdb_base = target_dict.get('product_name', target_dict['target_name'])
+ pdb_base = '%s.%s.pdb' % (pdb_base, TARGET_TYPE_EXT[target_dict['type']])
+ pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base
+
+ return pdb_path
+
+
+def InsertLargePdbShims(target_list, target_dicts, vars):
+ """Insert a shim target that forces the linker to use 4KB pagesize PDBs.
+
+ This is a workaround for targets with PDBs greater than 1GB in size, the
+ limit for the 1KB pagesize PDBs created by the linker by default.
+
+ Arguments:
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ vars: A dictionary of common GYP variables with generator-specific values.
+ Returns:
+ Tuple of the shimmed version of the inputs.
+ """
+ # Determine which targets need shimming.
+ targets_to_shim = []
+ for t in target_dicts:
+ target_dict = target_dicts[t]
+
+ # We only want to shim targets that have msvs_large_pdb enabled.
+ if not int(target_dict.get('msvs_large_pdb', 0)):
+ continue
+ # This is intended for executable, shared_library and loadable_module
+ # targets where every configuration is set up to produce a PDB output.
+ # If any of these conditions is not true then the shim logic will fail
+ # below.
+ targets_to_shim.append(t)
+
+ large_pdb_shim_cc = _GetLargePdbShimCcPath()
+
+ for t in targets_to_shim:
+ target_dict = target_dicts[t]
+ target_name = target_dict.get('target_name')
+
+ base_dict = _DeepCopySomeKeys(target_dict,
+ ['configurations', 'default_configuration', 'toolset'])
+
+ # This is the dict for copying the source file (part of the GYP tree)
+ # to the intermediate directory of the project. This is necessary because
+ # we can't always build a relative path to the shim source file (on Windows
+ # GYP and the project may be on different drives), and Ninja hates absolute
+ # paths (it ends up generating the .obj and .obj.d alongside the source
+ # file, polluting GYPs tree).
+ copy_suffix = 'large_pdb_copy'
+ copy_target_name = target_name + '_' + copy_suffix
+ full_copy_target_name = _SuffixName(t, copy_suffix)
+ shim_cc_basename = os.path.basename(large_pdb_shim_cc)
+ shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name
+ shim_cc_path = shim_cc_dir + '/' + shim_cc_basename
+ copy_dict = copy.deepcopy(base_dict)
+ copy_dict['target_name'] = copy_target_name
+ copy_dict['type'] = 'none'
+ copy_dict['sources'] = [ large_pdb_shim_cc ]
+ copy_dict['copies'] = [{
+ 'destination': shim_cc_dir,
+ 'files': [ large_pdb_shim_cc ]
+ }]
+
+ # This is the dict for the PDB generating shim target. It depends on the
+ # copy target.
+ shim_suffix = 'large_pdb_shim'
+ shim_target_name = target_name + '_' + shim_suffix
+ full_shim_target_name = _SuffixName(t, shim_suffix)
+ shim_dict = copy.deepcopy(base_dict)
+ shim_dict['target_name'] = shim_target_name
+ shim_dict['type'] = 'static_library'
+ shim_dict['sources'] = [ shim_cc_path ]
+ shim_dict['dependencies'] = [ full_copy_target_name ]
+
+ # Set up the shim to output its PDB to the same location as the final linker
+ # target.
+ for config_name, config in shim_dict.get('configurations').items():
+ pdb_path = _GetPdbPath(target_dict, config_name, vars)
+
+ # A few keys that we don't want to propagate.
+ for key in ['msvs_precompiled_header', 'msvs_precompiled_source', 'test']:
+ config.pop(key, None)
+
+ msvs = config.setdefault('msvs_settings', {})
+
+ # Update the compiler directives in the shim target.
+ compiler = msvs.setdefault('VCCLCompilerTool', {})
+ compiler['DebugInformationFormat'] = '3'
+ compiler['ProgramDataBaseFileName'] = pdb_path
+
+ # Set the explicit PDB path in the appropriate configuration of the
+ # original target.
+ config = target_dict['configurations'][config_name]
+ msvs = config.setdefault('msvs_settings', {})
+ linker = msvs.setdefault('VCLinkerTool', {})
+ linker['GenerateDebugInformation'] = 'true'
+ linker['ProgramDatabaseFile'] = pdb_path
+
+ # Add the new targets. They must go to the beginning of the list so that
+ # the dependency generation works as expected in ninja.
+ target_list.insert(0, full_copy_target_name)
+ target_list.insert(0, full_shim_target_name)
+ target_dicts[full_copy_target_name] = copy_dict
+ target_dicts[full_shim_target_name] = shim_dict
+
+ # Update the original target to depend on the shim target.
+ target_dict.setdefault('dependencies', []).append(full_shim_target_name)
+
+ return (target_list, target_dicts)
diff --git a/third_party/python/gyp/pylib/gyp/MSVSVersion.py b/third_party/python/gyp/pylib/gyp/MSVSVersion.py
new file mode 100644
index 0000000000..71582ccdce
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/MSVSVersion.py
@@ -0,0 +1,504 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Handle version information related to Visual Stuio."""
+
+import errno
+import os
+import re
+import subprocess
+import sys
+import gyp
+import glob
+
+
+def JoinPath(*args):
+ return os.path.normpath(os.path.join(*args))
+
+
+class VisualStudioVersion(object):
+ """Information regarding a version of Visual Studio."""
+
+ def __init__(self, short_name, description,
+ solution_version, project_version, flat_sln, uses_vcxproj,
+ path, sdk_based, default_toolset=None, compatible_sdks=None):
+ self.short_name = short_name
+ self.description = description
+ self.solution_version = solution_version
+ self.project_version = project_version
+ self.flat_sln = flat_sln
+ self.uses_vcxproj = uses_vcxproj
+ self.path = path
+ self.sdk_based = sdk_based
+ self.default_toolset = default_toolset
+ compatible_sdks = compatible_sdks or []
+ compatible_sdks.sort(key=lambda v: float(v.replace('v', '')), reverse=True)
+ self.compatible_sdks = compatible_sdks
+
+ def ShortName(self):
+ return self.short_name
+
+ def Description(self):
+ """Get the full description of the version."""
+ return self.description
+
+ def SolutionVersion(self):
+ """Get the version number of the sln files."""
+ return self.solution_version
+
+ def ProjectVersion(self):
+ """Get the version number of the vcproj or vcxproj files."""
+ return self.project_version
+
+ def FlatSolution(self):
+ return self.flat_sln
+
+ def UsesVcxproj(self):
+ """Returns true if this version uses a vcxproj file."""
+ return self.uses_vcxproj
+
+ def ProjectExtension(self):
+ """Returns the file extension for the project."""
+ return self.uses_vcxproj and '.vcxproj' or '.vcproj'
+
+ def Path(self):
+ """Returns the path to Visual Studio installation."""
+ return self.path
+
+ def ToolPath(self, tool):
+ """Returns the path to a given compiler tool. """
+ return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
+
+ def DefaultToolset(self):
+ """Returns the msbuild toolset version that will be used in the absence
+ of a user override."""
+ return self.default_toolset
+
+
+ def _SetupScriptInternal(self, target_arch):
+ """Returns a command (with arguments) to be used to set up the
+ environment."""
+ assert target_arch in ('x86', 'x64'), "target_arch not supported"
+ # If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the
+ # depot_tools build tools and should run SetEnv.Cmd to set up the
+ # environment. The check for WindowsSDKDir alone is not sufficient because
+ # this is set by running vcvarsall.bat.
+ sdk_dir = os.environ.get('WindowsSDKDir', '')
+ setup_path = JoinPath(sdk_dir, 'Bin', 'SetEnv.Cmd')
+ if self.sdk_based and sdk_dir and os.path.exists(setup_path):
+ return [setup_path, '/' + target_arch]
+
+ is_host_arch_x64 = (
+ os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
+ os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'
+ )
+
+ # For VS2017 (and newer) it's fairly easy
+ if self.short_name >= '2017':
+ script_path = JoinPath(self.path,
+ 'VC', 'Auxiliary', 'Build', 'vcvarsall.bat')
+
+ # Always use a native executable, cross-compiling if necessary.
+ host_arch = 'amd64' if is_host_arch_x64 else 'x86'
+ msvc_target_arch = 'amd64' if target_arch == 'x64' else 'x86'
+ arg = host_arch
+ if host_arch != msvc_target_arch:
+ arg += '_' + msvc_target_arch
+
+ return [script_path, arg]
+
+ # We try to find the best version of the env setup batch.
+ vcvarsall = JoinPath(self.path, 'VC', 'vcvarsall.bat')
+ if target_arch == 'x86':
+ if self.short_name >= '2013' and self.short_name[-1] != 'e' and \
+ is_host_arch_x64:
+ # VS2013 and later, non-Express have a x64-x86 cross that we want
+ # to prefer.
+ return [vcvarsall, 'amd64_x86']
+ else:
+ # Otherwise, the standard x86 compiler. We don't use VC/vcvarsall.bat
+ # for x86 because vcvarsall calls vcvars32, which it can only find if
+ # VS??COMNTOOLS is set, which isn't guaranteed.
+ return [JoinPath(self.path, 'Common7', 'Tools', 'vsvars32.bat')]
+ elif target_arch == 'x64':
+ arg = 'x86_amd64'
+ # Use the 64-on-64 compiler if we're not using an express edition and
+ # we're running on a 64bit OS.
+ if self.short_name[-1] != 'e' and is_host_arch_x64:
+ arg = 'amd64'
+ return [vcvarsall, arg]
+
+ def SetupScript(self, target_arch):
+ script_data = self._SetupScriptInternal(target_arch)
+ script_path = script_data[0]
+ if not os.path.exists(script_path):
+ raise Exception('%s is missing - make sure VC++ tools are installed.' %
+ script_path)
+ return script_data
+
+
+def _RegistryQueryBase(sysdir, key, value):
+ """Use reg.exe to read a particular key.
+
+ While ideally we might use the win32 module, we would like gyp to be
+ python neutral, so for instance cygwin python lacks this module.
+
+ Arguments:
+ sysdir: The system subdirectory to attempt to launch reg.exe from.
+ key: The registry key to read from.
+ value: The particular value to read.
+ Return:
+ stdout from reg.exe, or None for failure.
+ """
+ # Skip if not on Windows or Python Win32 setup issue
+ if sys.platform not in ('win32', 'cygwin'):
+ return None
+ # Setup params to pass to and attempt to launch reg.exe
+ cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'),
+ 'query', key]
+ if value:
+ cmd.extend(['/v', value])
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ # Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
+ # Note that the error text may be in [1] in some cases
+ text = p.communicate()[0]
+ # Check return code from reg.exe; officially 0==success and 1==error
+ if p.returncode:
+ return None
+ return text
+
+
+def _RegistryQuery(key, value=None):
+ r"""Use reg.exe to read a particular key through _RegistryQueryBase.
+
+ First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
+ that fails, it falls back to System32. Sysnative is available on Vista and
+ up and available on Windows Server 2003 and XP through KB patch 942589. Note
+ that Sysnative will always fail if using 64-bit python due to it being a
+ virtual directory and System32 will work correctly in the first place.
+
+ KB 942589 - http://support.microsoft.com/kb/942589/en-us.
+
+ Arguments:
+ key: The registry key.
+ value: The particular registry value to read (optional).
+ Return:
+ stdout from reg.exe, or None for failure.
+ """
+ text = None
+ try:
+ text = _RegistryQueryBase('Sysnative', key, value)
+ except OSError as e:
+ if e.errno == errno.ENOENT:
+ text = _RegistryQueryBase('System32', key, value)
+ else:
+ raise
+ return text
+
+
+def _RegistryGetValueUsingWinReg(key, value):
+ """Use the _winreg module to obtain the value of a registry key.
+
+ Args:
+ key: The registry key.
+ value: The particular registry value to read.
+ Return:
+ contents of the registry key's value, or None on failure. Throws
+ ImportError if _winreg is unavailable.
+ """
+ try:
+ import _winreg as winreg
+ except ImportError:
+ import winreg
+ try:
+ root, subkey = key.split('\\', 1)
+ assert root == 'HKLM' # Only need HKLM for now.
+ with winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
+ return winreg.QueryValueEx(hkey, value)[0]
+ except WindowsError:
+ return None
+
+
+def _RegistryGetValue(key, value):
+ """Use _winreg or reg.exe to obtain the value of a registry key.
+
+ Using _winreg is preferable because it solves an issue on some corporate
+ environments where access to reg.exe is locked down. However, we still need
+ to fallback to reg.exe for the case where the _winreg module is not available
+ (for example in cygwin python).
+
+ Args:
+ key: The registry key.
+ value: The particular registry value to read.
+ Return:
+ contents of the registry key's value, or None on failure.
+ """
+ try:
+ return _RegistryGetValueUsingWinReg(key, value)
+ except ImportError:
+ pass
+
+ # Fallback to reg.exe if we fail to import _winreg.
+ text = _RegistryQuery(key, value)
+ if not text:
+ return None
+ # Extract value.
+ match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
+ if not match:
+ return None
+ return match.group(1)
+
+
+def _CreateVersion(name, path, sdk_based=False):
+ """Sets up MSVS project generation.
+
+ Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
+ autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
+ passed in that doesn't match a value in versions python will throw a error.
+ """
+ if path:
+ path = os.path.normpath(path)
+ versions = {
+ '2019': VisualStudioVersion('2019',
+ 'Visual Studio 2019',
+ solution_version='12.00',
+ project_version='15.0',
+ flat_sln=False,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset='v141',
+ compatible_sdks=['v8.1', 'v10.0']),
+ '2017': VisualStudioVersion('2017',
+ 'Visual Studio 2017',
+ solution_version='12.00',
+ project_version='15.0',
+ flat_sln=False,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset='v141',
+ compatible_sdks=['v8.1', 'v10.0']),
+ '2015': VisualStudioVersion('2015',
+ 'Visual Studio 2015',
+ solution_version='12.00',
+ project_version='14.0',
+ flat_sln=False,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset='v140'),
+ '2013': VisualStudioVersion('2013',
+ 'Visual Studio 2013',
+ solution_version='13.00',
+ project_version='12.0',
+ flat_sln=False,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset='v120'),
+ '2013e': VisualStudioVersion('2013e',
+ 'Visual Studio 2013',
+ solution_version='13.00',
+ project_version='12.0',
+ flat_sln=True,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset='v120'),
+ '2012': VisualStudioVersion('2012',
+ 'Visual Studio 2012',
+ solution_version='12.00',
+ project_version='4.0',
+ flat_sln=False,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset='v110'),
+ '2012e': VisualStudioVersion('2012e',
+ 'Visual Studio 2012',
+ solution_version='12.00',
+ project_version='4.0',
+ flat_sln=True,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset='v110'),
+ '2010': VisualStudioVersion('2010',
+ 'Visual Studio 2010',
+ solution_version='11.00',
+ project_version='4.0',
+ flat_sln=False,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based),
+ '2010e': VisualStudioVersion('2010e',
+ 'Visual C++ Express 2010',
+ solution_version='11.00',
+ project_version='4.0',
+ flat_sln=True,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based),
+ '2008': VisualStudioVersion('2008',
+ 'Visual Studio 2008',
+ solution_version='10.00',
+ project_version='9.00',
+ flat_sln=False,
+ uses_vcxproj=False,
+ path=path,
+ sdk_based=sdk_based),
+ '2008e': VisualStudioVersion('2008e',
+ 'Visual Studio 2008',
+ solution_version='10.00',
+ project_version='9.00',
+ flat_sln=True,
+ uses_vcxproj=False,
+ path=path,
+ sdk_based=sdk_based),
+ '2005': VisualStudioVersion('2005',
+ 'Visual Studio 2005',
+ solution_version='9.00',
+ project_version='8.00',
+ flat_sln=False,
+ uses_vcxproj=False,
+ path=path,
+ sdk_based=sdk_based),
+ '2005e': VisualStudioVersion('2005e',
+ 'Visual Studio 2005',
+ solution_version='9.00',
+ project_version='8.00',
+ flat_sln=True,
+ uses_vcxproj=False,
+ path=path,
+ sdk_based=sdk_based),
+ }
+ return versions[str(name)]
+
+
+def _ConvertToCygpath(path):
+ """Convert to cygwin path if we are using cygwin."""
+ if sys.platform == 'cygwin':
+ p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
+ path = p.communicate()[0].strip()
+ return path
+
+
+def _DetectVisualStudioVersions(versions_to_check, force_express):
+ """Collect the list of installed visual studio versions.
+
+ Returns:
+ A list of visual studio versions installed in descending order of
+ usage preference.
+ Base this on the registry and a quick check if devenv.exe exists.
+ Possibilities are:
+ 2005(e) - Visual Studio 2005 (8)
+ 2008(e) - Visual Studio 2008 (9)
+ 2010(e) - Visual Studio 2010 (10)
+ 2012(e) - Visual Studio 2012 (11)
+ 2013(e) - Visual Studio 2013 (12)
+ 2015 - Visual Studio 2015 (14)
+ 2017 - Visual Studio 2017 (15)
+ Where (e) is e for express editions of MSVS and blank otherwise.
+ """
+ version_to_year = {
+ '8.0': '2005',
+ '9.0': '2008',
+ '10.0': '2010',
+ '11.0': '2012',
+ '12.0': '2013',
+ '14.0': '2015',
+ '15.0': '2017'
+ }
+ versions = []
+ for version in versions_to_check:
+ # Old method of searching for which VS version is installed
+ # We don't use the 2010-encouraged-way because we also want to get the
+ # path to the binaries, which it doesn't offer.
+ keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version,
+ r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version,
+ r'HKLM\Software\Microsoft\VCExpress\%s' % version,
+ r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version]
+ for index in range(len(keys)):
+ path = _RegistryGetValue(keys[index], 'InstallDir')
+ if not path:
+ continue
+ path = _ConvertToCygpath(path)
+ # Check for full.
+ full_path = os.path.join(path, 'devenv.exe')
+ express_path = os.path.join(path, '*express.exe')
+ if not force_express and os.path.exists(full_path):
+ # Add this one.
+ versions.append(_CreateVersion(version_to_year[version],
+ os.path.join(path, '..', '..')))
+ # Check for express.
+ elif glob.glob(express_path):
+ # Add this one.
+ versions.append(_CreateVersion(version_to_year[version] + 'e',
+ os.path.join(path, '..', '..')))
+
+ # The old method above does not work when only SDK is installed.
+ keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
+ r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7',
+ r'HKLM\Software\Microsoft\VisualStudio\SxS\VS7',
+ r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VS7']
+ for index in range(len(keys)):
+ path = _RegistryGetValue(keys[index], version)
+ if not path:
+ continue
+ path = _ConvertToCygpath(path)
+ if version == '15.0':
+ if os.path.exists(path):
+ versions.append(_CreateVersion('2017', path))
+ elif version != '14.0': # There is no Express edition for 2015.
+ versions.append(_CreateVersion(version_to_year[version] + 'e',
+ os.path.join(path, '..'), sdk_based=True))
+
+ return versions
+
+
+def SelectVisualStudioVersion(version='auto', allow_fallback=True):
+ """Select which version of Visual Studio projects to generate.
+
+ Arguments:
+ version: Hook to allow caller to force a particular version (vs auto).
+ Returns:
+ An object representing a visual studio project format version.
+ """
+ # In auto mode, check environment variable for override.
+ if version == 'auto':
+ version = os.environ.get('GYP_MSVS_VERSION', 'auto')
+ version_map = {
+ 'auto': ('15.0', '14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
+ '2005': ('8.0',),
+ '2005e': ('8.0',),
+ '2008': ('9.0',),
+ '2008e': ('9.0',),
+ '2010': ('10.0',),
+ '2010e': ('10.0',),
+ '2012': ('11.0',),
+ '2012e': ('11.0',),
+ '2013': ('12.0',),
+ '2013e': ('12.0',),
+ '2015': ('14.0',),
+ '2017': ('15.0',),
+ }
+ override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
+ if override_path:
+ msvs_version = os.environ.get('GYP_MSVS_VERSION')
+ if not msvs_version:
+ raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
+ 'set to a particular version (e.g. 2010e).')
+ return _CreateVersion(msvs_version, override_path, sdk_based=True)
+ version = str(version)
+ versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
+ if not versions:
+ if not allow_fallback:
+ raise ValueError('Could not locate Visual Studio installation.')
+ if version == 'auto':
+ # Default to 2005 if we couldn't find anything
+ return _CreateVersion('2005', None)
+ else:
+ return _CreateVersion(version, None)
+ return versions[0]
diff --git a/third_party/python/gyp/pylib/gyp/__init__.py b/third_party/python/gyp/pylib/gyp/__init__.py
new file mode 100755
index 0000000000..e038151ba7
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/__init__.py
@@ -0,0 +1,555 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import copy
+import gyp.input
+import optparse
+import os.path
+import re
+import shlex
+import sys
+import traceback
+from gyp.common import GypError
+
+try:
+ # basestring was removed in python3.
+ basestring
+except NameError:
+ basestring = str
+
+# Default debug modes for GYP
+debug = {}
+
+# List of "official" debug modes, but you can use anything you like.
+DEBUG_GENERAL = 'general'
+DEBUG_VARIABLES = 'variables'
+DEBUG_INCLUDES = 'includes'
+
+def DebugOutput(mode, message, *args):
+ if 'all' in gyp.debug or mode in gyp.debug:
+ ctx = ('unknown', 0, 'unknown')
+ try:
+ f = traceback.extract_stack(limit=2)
+ if f:
+ ctx = f[0][:3]
+ except:
+ pass
+ if args:
+ message %= args
+ print('%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
+ ctx[1], ctx[2], message))
+
+def FindBuildFiles():
+ extension = '.gyp'
+ files = os.listdir(os.getcwd())
+ build_files = []
+ for file in files:
+ if file.endswith(extension):
+ build_files.append(file)
+ return build_files
+
+
+def Load(build_files, format, default_variables={},
+ includes=[], depth='.', params=None, check=False,
+ circular_check=True, duplicate_basename_check=True):
+ """
+ Loads one or more specified build files.
+ default_variables and includes will be copied before use.
+ Returns the generator for the specified format and the
+ data returned by loading the specified build files.
+ """
+ if params is None:
+ params = {}
+
+ if '-' in format:
+ format, params['flavor'] = format.split('-', 1)
+
+ default_variables = copy.copy(default_variables)
+
+ # Default variables provided by this program and its modules should be
+ # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
+ # avoiding collisions with user and automatic variables.
+ default_variables['GENERATOR'] = format
+ default_variables['GENERATOR_FLAVOR'] = params.get('flavor', '')
+
+ # Format can be a custom python file, or by default the name of a module
+ # within gyp.generator.
+ if format.endswith('.py'):
+ generator_name = os.path.splitext(format)[0]
+ path, generator_name = os.path.split(generator_name)
+
+ # Make sure the path to the custom generator is in sys.path
+ # Don't worry about removing it once we are done. Keeping the path
+ # to each generator that is used in sys.path is likely harmless and
+ # arguably a good idea.
+ path = os.path.abspath(path)
+ if path not in sys.path:
+ sys.path.insert(0, path)
+ else:
+ generator_name = 'gyp.generator.' + format
+
+ # These parameters are passed in order (as opposed to by key)
+ # because ActivePython cannot handle key parameters to __import__.
+ generator = __import__(generator_name, globals(), locals(), generator_name)
+ for (key, val) in generator.generator_default_variables.items():
+ default_variables.setdefault(key, val)
+
+ # Give the generator the opportunity to set additional variables based on
+ # the params it will receive in the output phase.
+ if getattr(generator, 'CalculateVariables', None):
+ generator.CalculateVariables(default_variables, params)
+
+ # Give the generator the opportunity to set generator_input_info based on
+ # the params it will receive in the output phase.
+ if getattr(generator, 'CalculateGeneratorInputInfo', None):
+ generator.CalculateGeneratorInputInfo(params)
+
+ # Fetch the generator specific info that gets fed to input, we use getattr
+ # so we can default things and the generators only have to provide what
+ # they need.
+ generator_input_info = {
+ 'non_configuration_keys':
+ getattr(generator, 'generator_additional_non_configuration_keys', []),
+ 'path_sections':
+ getattr(generator, 'generator_additional_path_sections', []),
+ 'extra_sources_for_rules':
+ getattr(generator, 'generator_extra_sources_for_rules', []),
+ 'generator_supports_multiple_toolsets':
+ getattr(generator, 'generator_supports_multiple_toolsets', False),
+ 'generator_wants_static_library_dependencies_adjusted':
+ getattr(generator,
+ 'generator_wants_static_library_dependencies_adjusted', True),
+ 'generator_wants_sorted_dependencies':
+ getattr(generator, 'generator_wants_sorted_dependencies', False),
+ 'generator_filelist_paths':
+ getattr(generator, 'generator_filelist_paths', None),
+ }
+
+ # Process the input specific to this generator.
+ result = gyp.input.Load(build_files, default_variables, includes[:],
+ depth, generator_input_info, check, circular_check,
+ duplicate_basename_check,
+ params['parallel'], params['root_targets'])
+ return [generator] + result
+
+def NameValueListToDict(name_value_list):
+ """
+ Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
+ of the pairs. If a string is simply NAME, then the value in the dictionary
+ is set to True. If VALUE can be converted to an integer, it is.
+ """
+ result = { }
+ for item in name_value_list:
+ tokens = item.split('=', 1)
+ if len(tokens) == 2:
+ # If we can make it an int, use that, otherwise, use the string.
+ try:
+ token_value = int(tokens[1])
+ except ValueError:
+ token_value = tokens[1]
+ # Set the variable to the supplied value.
+ result[tokens[0]] = token_value
+ else:
+ # No value supplied, treat it as a boolean and set it.
+ result[tokens[0]] = True
+ return result
+
+def ShlexEnv(env_name):
+ flags = os.environ.get(env_name, [])
+ if flags:
+ flags = shlex.split(flags)
+ return flags
+
+def FormatOpt(opt, value):
+ if opt.startswith('--'):
+ return '%s=%s' % (opt, value)
+ return opt + value
+
+def RegenerateAppendFlag(flag, values, predicate, env_name, options):
+ """Regenerate a list of command line flags, for an option of action='append'.
+
+ The |env_name|, if given, is checked in the environment and used to generate
+ an initial list of options, then the options that were specified on the
+ command line (given in |values|) are appended. This matches the handling of
+ environment variables and command line flags where command line flags override
+ the environment, while not requiring the environment to be set when the flags
+ are used again.
+ """
+ flags = []
+ if options.use_environment and env_name:
+ for flag_value in ShlexEnv(env_name):
+ value = FormatOpt(flag, predicate(flag_value))
+ if value in flags:
+ flags.remove(value)
+ flags.append(value)
+ if values:
+ for flag_value in values:
+ flags.append(FormatOpt(flag, predicate(flag_value)))
+ return flags
+
+def RegenerateFlags(options):
+ """Given a parsed options object, and taking the environment variables into
+ account, returns a list of flags that should regenerate an equivalent options
+ object (even in the absence of the environment variables.)
+
+ Any path options will be normalized relative to depth.
+
+ The format flag is not included, as it is assumed the calling generator will
+ set that as appropriate.
+ """
+ def FixPath(path):
+ path = gyp.common.FixIfRelativePath(path, options.depth)
+ if not path:
+ return os.path.curdir
+ return path
+
+ def Noop(value):
+ return value
+
+ # We always want to ignore the environment when regenerating, to avoid
+ # duplicate or changed flags in the environment at the time of regeneration.
+ flags = ['--ignore-environment']
+ for name, metadata in options._regeneration_metadata.items():
+ opt = metadata['opt']
+ value = getattr(options, name)
+ value_predicate = metadata['type'] == 'path' and FixPath or Noop
+ action = metadata['action']
+ env_name = metadata['env_name']
+ if action == 'append':
+ flags.extend(RegenerateAppendFlag(opt, value, value_predicate,
+ env_name, options))
+ elif action in ('store', None): # None is a synonym for 'store'.
+ if value:
+ flags.append(FormatOpt(opt, value_predicate(value)))
+ elif options.use_environment and env_name and os.environ.get(env_name):
+ flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
+ elif action in ('store_true', 'store_false'):
+ if ((action == 'store_true' and value) or
+ (action == 'store_false' and not value)):
+ flags.append(opt)
+ elif options.use_environment and env_name:
+ print(('Warning: environment regeneration unimplemented '
+ 'for %s flag %r env_name %r' % (action, opt,
+ env_name)),
+ file=sys.stderr)
+ else:
+ print(('Warning: regeneration unimplemented for action %r '
+ 'flag %r' % (action, opt)), file=sys.stderr)
+
+ return flags
+
+class RegeneratableOptionParser(optparse.OptionParser):
+ def __init__(self):
+ self.__regeneratable_options = {}
+ optparse.OptionParser.__init__(self)
+
+ def add_option(self, *args, **kw):
+ """Add an option to the parser.
+
+ This accepts the same arguments as OptionParser.add_option, plus the
+ following:
+ regenerate: can be set to False to prevent this option from being included
+ in regeneration.
+ env_name: name of environment variable that additional values for this
+ option come from.
+ type: adds type='path', to tell the regenerator that the values of
+ this option need to be made relative to options.depth
+ """
+ env_name = kw.pop('env_name', None)
+ if 'dest' in kw and kw.pop('regenerate', True):
+ dest = kw['dest']
+
+ # The path type is needed for regenerating, for optparse we can just treat
+ # it as a string.
+ type = kw.get('type')
+ if type == 'path':
+ kw['type'] = 'string'
+
+ self.__regeneratable_options[dest] = {
+ 'action': kw.get('action'),
+ 'type': type,
+ 'env_name': env_name,
+ 'opt': args[0],
+ }
+
+ optparse.OptionParser.add_option(self, *args, **kw)
+
+ def parse_args(self, *args):
+ values, args = optparse.OptionParser.parse_args(self, *args)
+ values._regeneration_metadata = self.__regeneratable_options
+ return values, args
+
+def gyp_main(args):
+ my_name = os.path.basename(sys.argv[0])
+
+ parser = RegeneratableOptionParser()
+ usage = 'usage: %s [options ...] [build_file ...]'
+ parser.set_usage(usage.replace('%s', '%prog'))
+ parser.add_option('--build', dest='configs', action='append',
+ help='configuration for build after project generation')
+ parser.add_option('--check', dest='check', action='store_true',
+ help='check format of gyp files')
+ parser.add_option('--config-dir', dest='config_dir', action='store',
+ env_name='GYP_CONFIG_DIR', default=None,
+ help='The location for configuration files like '
+ 'include.gypi.')
+ parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
+ action='append', default=[], help='turn on a debugging '
+ 'mode for debugging GYP. Supported modes are "variables", '
+ '"includes" and "general" or "all" for all of them.')
+ parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
+ env_name='GYP_DEFINES',
+ help='sets variable VAR to value VAL')
+ parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
+ help='set DEPTH gyp variable to a relative path to PATH')
+ parser.add_option('-f', '--format', dest='formats', action='append',
+ env_name='GYP_GENERATORS', regenerate=False,
+ help='output formats to generate')
+ parser.add_option('-G', dest='generator_flags', action='append', default=[],
+ metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
+ help='sets generator flag FLAG to VAL')
+ parser.add_option('--generator-output', dest='generator_output',
+ action='store', default=None, metavar='DIR', type='path',
+ env_name='GYP_GENERATOR_OUTPUT',
+ help='puts generated build files under DIR')
+ parser.add_option('--ignore-environment', dest='use_environment',
+ action='store_false', default=True, regenerate=False,
+ help='do not read options from environment variables')
+ parser.add_option('-I', '--include', dest='includes', action='append',
+ metavar='INCLUDE', type='path',
+ help='files to include in all loaded .gyp files')
+ # --no-circular-check disables the check for circular relationships between
+ # .gyp files. These relationships should not exist, but they've only been
+ # observed to be harmful with the Xcode generator. Chromium's .gyp files
+ # currently have some circular relationships on non-Mac platforms, so this
+ # option allows the strict behavior to be used on Macs and the lenient
+ # behavior to be used elsewhere.
+ # TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
+ parser.add_option('--no-circular-check', dest='circular_check',
+ action='store_false', default=True, regenerate=False,
+ help="don't check for circular relationships between files")
+ # --no-duplicate-basename-check disables the check for duplicate basenames
+ # in a static_library/shared_library project. Visual C++ 2008 generator
+ # doesn't support this configuration. Libtool on Mac also generates warnings
+ # when duplicate basenames are passed into Make generator on Mac.
+ # TODO(yukawa): Remove this option when these legacy generators are
+ # deprecated.
+ parser.add_option('--no-duplicate-basename-check',
+ dest='duplicate_basename_check', action='store_false',
+ default=True, regenerate=False,
+ help="don't check for duplicate basenames")
+ parser.add_option('--no-parallel', action='store_true', default=False,
+ help='Disable multiprocessing')
+ parser.add_option('-S', '--suffix', dest='suffix', default='',
+ help='suffix to add to generated files')
+ parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
+ default=None, metavar='DIR', type='path',
+ help='directory to use as the root of the source tree')
+ parser.add_option('-R', '--root-target', dest='root_targets',
+ action='append', metavar='TARGET',
+ help='include only TARGET and its deep dependencies')
+
+ options, build_files_arg = parser.parse_args(args)
+ build_files = build_files_arg
+
+ # Set up the configuration directory (defaults to ~/.gyp)
+ if not options.config_dir:
+ home = None
+ home_dot_gyp = None
+ if options.use_environment:
+ home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None)
+ if home_dot_gyp:
+ home_dot_gyp = os.path.expanduser(home_dot_gyp)
+
+ if not home_dot_gyp:
+ home_vars = ['HOME']
+ if sys.platform in ('cygwin', 'win32'):
+ home_vars.append('USERPROFILE')
+ for home_var in home_vars:
+ home = os.getenv(home_var)
+ if home != None:
+ home_dot_gyp = os.path.join(home, '.gyp')
+ if not os.path.exists(home_dot_gyp):
+ home_dot_gyp = None
+ else:
+ break
+ else:
+ home_dot_gyp = os.path.expanduser(options.config_dir)
+
+ if home_dot_gyp and not os.path.exists(home_dot_gyp):
+ home_dot_gyp = None
+
+ if not options.formats:
+ # If no format was given on the command line, then check the env variable.
+ generate_formats = []
+ if options.use_environment:
+ generate_formats = os.environ.get('GYP_GENERATORS', [])
+ if generate_formats:
+ generate_formats = re.split(r'[\s,]', generate_formats)
+ if generate_formats:
+ options.formats = generate_formats
+ else:
+ # Nothing in the variable, default based on platform.
+ if sys.platform == 'darwin':
+ options.formats = ['xcode']
+ elif sys.platform in ('win32', 'cygwin'):
+ options.formats = ['msvs']
+ else:
+ options.formats = ['make']
+
+ if not options.generator_output and options.use_environment:
+ g_o = os.environ.get('GYP_GENERATOR_OUTPUT')
+ if g_o:
+ options.generator_output = g_o
+
+ options.parallel = not options.no_parallel
+
+ for mode in options.debug:
+ gyp.debug[mode] = 1
+
+ # Do an extra check to avoid work when we're not debugging.
+ if DEBUG_GENERAL in gyp.debug:
+ DebugOutput(DEBUG_GENERAL, 'running with these options:')
+ for option, value in sorted(options.__dict__.items()):
+ if option[0] == '_':
+ continue
+ if isinstance(value, basestring):
+ DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
+ else:
+ DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
+
+ if not build_files:
+ build_files = FindBuildFiles()
+ if not build_files:
+ raise GypError((usage + '\n\n%s: error: no build_file') %
+ (my_name, my_name))
+
+ # TODO(mark): Chromium-specific hack!
+ # For Chromium, the gyp "depth" variable should always be a relative path
+ # to Chromium's top-level "src" directory. If no depth variable was set
+ # on the command line, try to find a "src" directory by looking at the
+ # absolute path to each build file's directory. The first "src" component
+ # found will be treated as though it were the path used for --depth.
+ if not options.depth:
+ for build_file in build_files:
+ build_file_dir = os.path.abspath(os.path.dirname(build_file))
+ build_file_dir_components = build_file_dir.split(os.path.sep)
+ for component in reversed(build_file_dir_components):
+ if component == 'src':
+ options.depth = os.path.sep.join(build_file_dir_components)
+ break
+ del build_file_dir_components[-1]
+
+ # If the inner loop found something, break without advancing to another
+ # build file.
+ if options.depth:
+ break
+
+ if not options.depth:
+ raise GypError('Could not automatically locate src directory. This is'
+ 'a temporary Chromium feature that will be removed. Use'
+ '--depth as a workaround.')
+
+ # If toplevel-dir is not set, we assume that depth is the root of our source
+ # tree.
+ if not options.toplevel_dir:
+ options.toplevel_dir = options.depth
+
+ # -D on the command line sets variable defaults - D isn't just for define,
+ # it's for default. Perhaps there should be a way to force (-F?) a
+ # variable's value so that it can't be overridden by anything else.
+ cmdline_default_variables = {}
+ defines = []
+ if options.use_environment:
+ defines += ShlexEnv('GYP_DEFINES')
+ if options.defines:
+ defines += options.defines
+ cmdline_default_variables = NameValueListToDict(defines)
+ if DEBUG_GENERAL in gyp.debug:
+ DebugOutput(DEBUG_GENERAL,
+ "cmdline_default_variables: %s", cmdline_default_variables)
+
+ # Set up includes.
+ includes = []
+
+ # If ~/.gyp/include.gypi exists, it'll be forcibly included into every
+ # .gyp file that's loaded, before anything else is included.
+ if home_dot_gyp != None:
+ default_include = os.path.join(home_dot_gyp, 'include.gypi')
+ if os.path.exists(default_include):
+ print('Using overrides found in ' + default_include)
+ includes.append(default_include)
+
+ # Command-line --include files come after the default include.
+ if options.includes:
+ includes.extend(options.includes)
+
+ # Generator flags should be prefixed with the target generator since they
+ # are global across all generator runs.
+ gen_flags = []
+ if options.use_environment:
+ gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS')
+ if options.generator_flags:
+ gen_flags += options.generator_flags
+ generator_flags = NameValueListToDict(gen_flags)
+ if DEBUG_GENERAL in gyp.debug:
+ DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
+
+ # Generate all requested formats (use a set in case we got one format request
+ # twice)
+ for format in set(options.formats):
+ params = {'options': options,
+ 'build_files': build_files,
+ 'generator_flags': generator_flags,
+ 'cwd': os.getcwd(),
+ 'build_files_arg': build_files_arg,
+ 'gyp_binary': sys.argv[0],
+ 'home_dot_gyp': home_dot_gyp,
+ 'parallel': options.parallel,
+ 'root_targets': options.root_targets,
+ 'target_arch': cmdline_default_variables.get('target_arch', '')}
+
+ # Start with the default variables from the command line.
+ [generator, flat_list, targets, data] = Load(
+ build_files, format, cmdline_default_variables, includes, options.depth,
+ params, options.check, options.circular_check,
+ options.duplicate_basename_check)
+
+ # TODO(mark): Pass |data| for now because the generator needs a list of
+ # build files that came in. In the future, maybe it should just accept
+ # a list, and not the whole data dict.
+ # NOTE: flat_list is the flattened dependency graph specifying the order
+ # that targets may be built. Build systems that operate serially or that
+ # need to have dependencies defined before dependents reference them should
+ # generate targets in the order specified in flat_list.
+ generator.GenerateOutput(flat_list, targets, data, params)
+
+ if options.configs:
+ valid_configs = targets[flat_list[0]]['configurations']
+ for conf in options.configs:
+ if conf not in valid_configs:
+ raise GypError('Invalid config specified via --build: %s' % conf)
+ generator.PerformBuild(data, options.configs, params)
+
+ # Done
+ return 0
+
+
+def main(args):
+ try:
+ return gyp_main(args)
+ except GypError as e:
+ sys.stderr.write("gyp: %s\n" % e)
+ return 1
+
+# NOTE: setuptools generated console_scripts calls function with no arguments
+def script_main():
+ return main(sys.argv[1:])
+
+if __name__ == '__main__':
+ sys.exit(script_main())
diff --git a/third_party/python/gyp/pylib/gyp/common.py b/third_party/python/gyp/pylib/gyp/common.py
new file mode 100644
index 0000000000..b268d229a4
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/common.py
@@ -0,0 +1,619 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import with_statement
+
+import collections
+import errno
+import filecmp
+import os.path
+import re
+import tempfile
+import sys
+
+
+# A minimal memoizing decorator. It'll blow up if the args aren't immutable,
+# among other "problems".
+class memoize(object):
+ def __init__(self, func):
+ self.func = func
+ self.cache = {}
+ def __call__(self, *args):
+ try:
+ return self.cache[args]
+ except KeyError:
+ result = self.func(*args)
+ self.cache[args] = result
+ return result
+
+
+class GypError(Exception):
+ """Error class representing an error, which is to be presented
+ to the user. The main entry point will catch and display this.
+ """
+ pass
+
+
+def ExceptionAppend(e, msg):
+ """Append a message to the given exception's message."""
+ if not e.args:
+ e.args = (msg,)
+ elif len(e.args) == 1:
+ e.args = (str(e.args[0]) + ' ' + msg,)
+ else:
+ e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
+
+
+def FindQualifiedTargets(target, qualified_list):
+ """
+ Given a list of qualified targets, return the qualified targets for the
+ specified |target|.
+ """
+ return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
+
+
+def ParseQualifiedTarget(target):
+ # Splits a qualified target into a build file, target name and toolset.
+
+ # NOTE: rsplit is used to disambiguate the Windows drive letter separator.
+ target_split = target.rsplit(':', 1)
+ if len(target_split) == 2:
+ [build_file, target] = target_split
+ else:
+ build_file = None
+
+ target_split = target.rsplit('#', 1)
+ if len(target_split) == 2:
+ [target, toolset] = target_split
+ else:
+ toolset = None
+
+ return [build_file, target, toolset]
+
+
+def ResolveTarget(build_file, target, toolset):
+ # This function resolves a target into a canonical form:
+ # - a fully defined build file, either absolute or relative to the current
+ # directory
+ # - a target name
+ # - a toolset
+ #
+ # build_file is the file relative to which 'target' is defined.
+ # target is the qualified target.
+ # toolset is the default toolset for that target.
+ [parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
+
+ if parsed_build_file:
+ if build_file:
+ # If a relative path, parsed_build_file is relative to the directory
+ # containing build_file. If build_file is not in the current directory,
+ # parsed_build_file is not a usable path as-is. Resolve it by
+ # interpreting it as relative to build_file. If parsed_build_file is
+ # absolute, it is usable as a path regardless of the current directory,
+ # and os.path.join will return it as-is.
+ build_file = os.path.normpath(os.path.join(os.path.dirname(build_file),
+ parsed_build_file))
+ # Further (to handle cases like ../cwd), make it relative to cwd)
+ if not os.path.isabs(build_file):
+ build_file = RelativePath(build_file, '.')
+ else:
+ build_file = parsed_build_file
+
+ if parsed_toolset:
+ toolset = parsed_toolset
+
+ return [build_file, target, toolset]
+
+
+def BuildFile(fully_qualified_target):
+ # Extracts the build file from the fully qualified target.
+ return ParseQualifiedTarget(fully_qualified_target)[0]
+
+
+def GetEnvironFallback(var_list, default):
+ """Look up a key in the environment, with fallback to secondary keys
+ and finally falling back to a default value."""
+ for var in var_list:
+ if var in os.environ:
+ return os.environ[var]
+ return default
+
+
+def QualifiedTarget(build_file, target, toolset):
+ # "Qualified" means the file that a target was defined in and the target
+ # name, separated by a colon, suffixed by a # and the toolset name:
+ # /path/to/file.gyp:target_name#toolset
+ fully_qualified = build_file + ':' + target
+ if toolset:
+ fully_qualified = fully_qualified + '#' + toolset
+ return fully_qualified
+
+
+@memoize
+def RelativePath(path, relative_to, follow_path_symlink=True):
+ # Assuming both |path| and |relative_to| are relative to the current
+ # directory, returns a relative path that identifies path relative to
+ # relative_to.
+ # If |follow_symlink_path| is true (default) and |path| is a symlink, then
+ # this method returns a path to the real file represented by |path|. If it is
+ # false, this method returns a path to the symlink. If |path| is not a
+ # symlink, this option has no effect.
+
+ # Convert to normalized (and therefore absolute paths).
+ if follow_path_symlink:
+ path = os.path.realpath(path)
+ else:
+ path = os.path.abspath(path)
+ relative_to = os.path.realpath(relative_to)
+
+ # On Windows, we can't create a relative path to a different drive, so just
+ # use the absolute path.
+ if sys.platform == 'win32':
+ if (os.path.splitdrive(path)[0].lower() !=
+ os.path.splitdrive(relative_to)[0].lower()):
+ return path
+
+ # Split the paths into components.
+ path_split = path.split(os.path.sep)
+ relative_to_split = relative_to.split(os.path.sep)
+
+ # Determine how much of the prefix the two paths share.
+ prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
+
+ # Put enough ".." components to back up out of relative_to to the common
+ # prefix, and then append the part of path_split after the common prefix.
+ relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \
+ path_split[prefix_len:]
+
+ if len(relative_split) == 0:
+ # The paths were the same.
+ return ''
+
+ # Turn it back into a string and we're done.
+ return os.path.join(*relative_split)
+
+
+@memoize
+def InvertRelativePath(path, toplevel_dir=None):
+ """Given a path like foo/bar that is relative to toplevel_dir, return
+ the inverse relative path back to the toplevel_dir.
+
+ E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
+ should always produce the empty string, unless the path contains symlinks.
+ """
+ if not path:
+ return path
+ toplevel_dir = '.' if toplevel_dir is None else toplevel_dir
+ return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path))
+
+
+def FixIfRelativePath(path, relative_to):
+ # Like RelativePath but returns |path| unchanged if it is absolute.
+ if os.path.isabs(path):
+ return path
+ return RelativePath(path, relative_to)
+
+
+def UnrelativePath(path, relative_to):
+ # Assuming that |relative_to| is relative to the current directory, and |path|
+ # is a path relative to the dirname of |relative_to|, returns a path that
+ # identifies |path| relative to the current directory.
+ rel_dir = os.path.dirname(relative_to)
+ return os.path.normpath(os.path.join(rel_dir, path))
+
+
+# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
+# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
+# and the documentation for various shells.
+
+# _quote is a pattern that should match any argument that needs to be quoted
+# with double-quotes by EncodePOSIXShellArgument. It matches the following
+# characters appearing anywhere in an argument:
+# \t, \n, space parameter separators
+# # comments
+# $ expansions (quoted to always expand within one argument)
+# % called out by IEEE 1003.1 XCU.2.2
+# & job control
+# ' quoting
+# (, ) subshell execution
+# *, ?, [ pathname expansion
+# ; command delimiter
+# <, >, | redirection
+# = assignment
+# {, } brace expansion (bash)
+# ~ tilde expansion
+# It also matches the empty string, because "" (or '') is the only way to
+# represent an empty string literal argument to a POSIX shell.
+#
+# This does not match the characters in _escape, because those need to be
+# backslash-escaped regardless of whether they appear in a double-quoted
+# string.
+_quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$')
+
+# _escape is a pattern that should match any character that needs to be
+# escaped with a backslash, whether or not the argument matched the _quote
+# pattern. _escape is used with re.sub to backslash anything in _escape's
+# first match group, hence the (parentheses) in the regular expression.
+#
+# _escape matches the following characters appearing anywhere in an argument:
+# " to prevent POSIX shells from interpreting this character for quoting
+# \ to prevent POSIX shells from interpreting this character for escaping
+# ` to prevent POSIX shells from interpreting this character for command
+# substitution
+# Missing from this list is $, because the desired behavior of
+# EncodePOSIXShellArgument is to permit parameter (variable) expansion.
+#
+# Also missing from this list is !, which bash will interpret as the history
+# expansion character when history is enabled. bash does not enable history
+# by default in non-interactive shells, so this is not thought to be a problem.
+# ! was omitted from this list because bash interprets "\!" as a literal string
+# including the backslash character (avoiding history expansion but retaining
+# the backslash), which would not be correct for argument encoding. Handling
+# this case properly would also be problematic because bash allows the history
+# character to be changed with the histchars shell variable. Fortunately,
+# as history is not enabled in non-interactive shells and
+# EncodePOSIXShellArgument is only expected to encode for non-interactive
+# shells, there is no room for error here by ignoring !.
+_escape = re.compile(r'(["\\`])')
+
+def EncodePOSIXShellArgument(argument):
+ """Encodes |argument| suitably for consumption by POSIX shells.
+
+ argument may be quoted and escaped as necessary to ensure that POSIX shells
+ treat the returned value as a literal representing the argument passed to
+ this function. Parameter (variable) expansions beginning with $ are allowed
+ to remain intact without escaping the $, to allow the argument to contain
+ references to variables to be expanded by the shell.
+ """
+
+ if not isinstance(argument, str):
+ argument = str(argument)
+
+ if _quote.search(argument):
+ quote = '"'
+ else:
+ quote = ''
+
+ encoded = quote + re.sub(_escape, r'\\\1', argument) + quote
+
+ return encoded
+
+
+def EncodePOSIXShellList(list):
+ """Encodes |list| suitably for consumption by POSIX shells.
+
+ Returns EncodePOSIXShellArgument for each item in list, and joins them
+ together using the space character as an argument separator.
+ """
+
+ encoded_arguments = []
+ for argument in list:
+ encoded_arguments.append(EncodePOSIXShellArgument(argument))
+ return ' '.join(encoded_arguments)
+
+
+def DeepDependencyTargets(target_dicts, roots):
+ """Returns the recursive list of target dependencies."""
+ dependencies = set()
+ pending = set(roots)
+ while pending:
+ # Pluck out one.
+ r = pending.pop()
+ # Skip if visited already.
+ if r in dependencies:
+ continue
+ # Add it.
+ dependencies.add(r)
+ # Add its children.
+ spec = target_dicts[r]
+ pending.update(set(spec.get('dependencies', [])))
+ pending.update(set(spec.get('dependencies_original', [])))
+ return list(dependencies - set(roots))
+
+
+def BuildFileTargets(target_list, build_file):
+ """From a target_list, returns the subset from the specified build_file.
+ """
+ return [p for p in target_list if BuildFile(p) == build_file]
+
+
+def AllTargets(target_list, target_dicts, build_file):
+ """Returns all targets (direct and dependencies) for the specified build_file.
+ """
+ bftargets = BuildFileTargets(target_list, build_file)
+ deptargets = DeepDependencyTargets(target_dicts, bftargets)
+ return bftargets + deptargets
+
+
+def WriteOnDiff(filename):
+ """Write to a file only if the new contents differ.
+
+ Arguments:
+ filename: name of the file to potentially write to.
+ Returns:
+ A file like object which will write to temporary file and only overwrite
+ the target if it differs (on close).
+ """
+
+ class Writer(object):
+ """Wrapper around file which only covers the target if it differs."""
+ def __init__(self):
+ # Pick temporary file.
+ tmp_fd, self.tmp_path = tempfile.mkstemp(
+ suffix='.tmp',
+ prefix=os.path.split(filename)[1] + '.gyp.',
+ dir=os.path.split(filename)[0])
+ try:
+ self.tmp_file = os.fdopen(tmp_fd, 'w')
+ except Exception:
+ # Don't leave turds behind.
+ os.unlink(self.tmp_path)
+ raise
+
+ def __getattr__(self, attrname):
+ # Delegate everything else to self.tmp_file
+ return getattr(self.tmp_file, attrname)
+
+ def close(self):
+ try:
+ # Close tmp file.
+ self.tmp_file.close()
+ # Determine if different.
+ same = False
+ try:
+ same = filecmp.cmp(self.tmp_path, filename, False)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ if same:
+ # The new file is identical to the old one, just get rid of the new
+ # one.
+ os.unlink(self.tmp_path)
+ else:
+ # The new file is different from the old one, or there is no old one.
+ # Rename the new file to the permanent name.
+ #
+ # tempfile.mkstemp uses an overly restrictive mode, resulting in a
+ # file that can only be read by the owner, regardless of the umask.
+ # There's no reason to not respect the umask here, which means that
+ # an extra hoop is required to fetch it and reset the new file's mode.
+ #
+ # No way to get the umask without setting a new one? Set a safe one
+ # and then set it back to the old value.
+ umask = os.umask(0o77)
+ os.umask(umask)
+ os.chmod(self.tmp_path, 0o666 & ~umask)
+ if sys.platform == 'win32' and os.path.exists(filename):
+ # NOTE: on windows (but not cygwin) rename will not replace an
+ # existing file, so it must be preceded with a remove. Sadly there
+ # is no way to make the switch atomic.
+ os.remove(filename)
+ os.rename(self.tmp_path, filename)
+ except Exception:
+ # Don't leave turds behind.
+ os.unlink(self.tmp_path)
+ raise
+
+ return Writer()
+
+
+def EnsureDirExists(path):
+ """Make sure the directory for |path| exists."""
+ try:
+ os.makedirs(os.path.dirname(path))
+ except OSError:
+ pass
+
+
+def GetFlavor(params):
+ """Returns |params.flavor| if it's set, the system's default flavor else."""
+ flavors = {
+ 'cygwin': 'win',
+ 'win32': 'win',
+ 'darwin': 'mac',
+ }
+
+ if 'flavor' in params:
+ return params['flavor']
+ if sys.platform in flavors:
+ return flavors[sys.platform]
+ if sys.platform.startswith('sunos'):
+ return 'solaris'
+ if sys.platform.startswith('freebsd'):
+ return 'freebsd'
+ if sys.platform.startswith('openbsd'):
+ return 'openbsd'
+ if sys.platform.startswith('netbsd'):
+ return 'netbsd'
+ if sys.platform.startswith('aix'):
+ return 'aix'
+ if sys.platform.startswith('zos'):
+ return 'zos'
+ if sys.platform.startswith('os390'):
+ return 'zos'
+
+ return 'linux'
+
+
+def CopyTool(flavor, out_path, generator_flags={}):
+ """Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
+ to |out_path|."""
+ # aix and solaris just need flock emulation. mac and win use more complicated
+ # support scripts.
+ prefix = {
+ 'aix': 'flock',
+ 'solaris': 'flock',
+ 'mac': 'mac',
+ 'win': 'win'
+ }.get(flavor, None)
+ if not prefix:
+ return
+
+ # Slurp input file.
+ source_path = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), '%s_tool.py' % prefix)
+ with open(source_path) as source_file:
+ source = source_file.readlines()
+
+ # Set custom header flags.
+ header = '# Generated by gyp. Do not edit.\n'
+ mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None)
+ if flavor == 'mac' and mac_toolchain_dir:
+ header += "import os;\nos.environ['DEVELOPER_DIR']='%s'\n" \
+ % mac_toolchain_dir
+
+ # Add header and write it out.
+ tool_path = os.path.join(out_path, 'gyp-%s-tool' % prefix)
+ with open(tool_path, 'w') as tool_file:
+ tool_file.write(
+ ''.join([source[0], header] + source[1:]))
+
+ # Make file executable.
+ os.chmod(tool_path, 0o755)
+
+
+# From Alex Martelli,
+# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
+# ASPN: Python Cookbook: Remove duplicates from a sequence
+# First comment, dated 2001/10/13.
+# (Also in the printed Python Cookbook.)
+
+def uniquer(seq, idfun=None):
+ if idfun is None:
+ idfun = lambda x: x
+ seen = {}
+ result = []
+ for item in seq:
+ marker = idfun(item)
+ if marker in seen: continue
+ seen[marker] = 1
+ result.append(item)
+ return result
+
+
+# Based on http://code.activestate.com/recipes/576694/.
+class OrderedSet(collections.MutableSet):
+ def __init__(self, iterable=None):
+ self.end = end = []
+ end += [None, end, end] # sentinel node for doubly linked list
+ self.map = {} # key --> [key, prev, next]
+ if iterable is not None:
+ self |= iterable
+
+ def __len__(self):
+ return len(self.map)
+
+ def __contains__(self, key):
+ return key in self.map
+
+ def add(self, key):
+ if key not in self.map:
+ end = self.end
+ curr = end[1]
+ curr[2] = end[1] = self.map[key] = [key, curr, end]
+
+ def discard(self, key):
+ if key in self.map:
+ key, prev_item, next_item = self.map.pop(key)
+ prev_item[2] = next_item
+ next_item[1] = prev_item
+
+ def __iter__(self):
+ end = self.end
+ curr = end[2]
+ while curr is not end:
+ yield curr[0]
+ curr = curr[2]
+
+ def __reversed__(self):
+ end = self.end
+ curr = end[1]
+ while curr is not end:
+ yield curr[0]
+ curr = curr[1]
+
+ # The second argument is an addition that causes a pylint warning.
+ def pop(self, last=True): # pylint: disable=W0221
+ if not self:
+ raise KeyError('set is empty')
+ key = self.end[1][0] if last else self.end[2][0]
+ self.discard(key)
+ return key
+
+ def __repr__(self):
+ if not self:
+ return '%s()' % (self.__class__.__name__,)
+ return '%s(%r)' % (self.__class__.__name__, list(self))
+
+ def __eq__(self, other):
+ if isinstance(other, OrderedSet):
+ return len(self) == len(other) and list(self) == list(other)
+ return set(self) == set(other)
+
+ # Extensions to the recipe.
+ def update(self, iterable):
+ for i in iterable:
+ if i not in self:
+ self.add(i)
+
+
+class CycleError(Exception):
+ """An exception raised when an unexpected cycle is detected."""
+ def __init__(self, nodes):
+ self.nodes = nodes
+ def __str__(self):
+ return 'CycleError: cycle involving: ' + str(self.nodes)
+
+
+def TopologicallySorted(graph, get_edges):
+ r"""Topologically sort based on a user provided edge definition.
+
+ Args:
+ graph: A list of node names.
+ get_edges: A function mapping from node name to a hashable collection
+ of node names which this node has outgoing edges to.
+ Returns:
+ A list containing all of the node in graph in topological order.
+ It is assumed that calling get_edges once for each node and caching is
+ cheaper than repeatedly calling get_edges.
+ Raises:
+ CycleError in the event of a cycle.
+ Example:
+ graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
+ def GetEdges(node):
+ return re.findall(r'\$\(([^))]\)', graph[node])
+ print(TopologicallySorted(graph.keys(), GetEdges))
+ ==>
+ ['a', 'c', b']
+ """
+ get_edges = memoize(get_edges)
+ visited = set()
+ visiting = set()
+ ordered_nodes = []
+ def Visit(node):
+ if node in visiting:
+ raise CycleError(visiting)
+ if node in visited:
+ return
+ visited.add(node)
+ visiting.add(node)
+ for neighbor in get_edges(node):
+ Visit(neighbor)
+ visiting.remove(node)
+ ordered_nodes.insert(0, node)
+ for node in sorted(graph):
+ Visit(node)
+ return ordered_nodes
+
+def CrossCompileRequested():
+ # TODO: figure out how to not build extra host objects in the
+ # non-cross-compile case when this is enabled, and enable unconditionally.
+ return (os.environ.get('GYP_CROSSCOMPILE') or
+ os.environ.get('AR_host') or
+ os.environ.get('CC_host') or
+ os.environ.get('CXX_host') or
+ os.environ.get('AR_target') or
+ os.environ.get('CC_target') or
+ os.environ.get('CXX_target'))
diff --git a/third_party/python/gyp/pylib/gyp/common_test.py b/third_party/python/gyp/pylib/gyp/common_test.py
new file mode 100755
index 0000000000..0b8ada3dc3
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/common_test.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the common.py file."""
+
+import gyp.common
+import unittest
+import sys
+
+
+class TestTopologicallySorted(unittest.TestCase):
+ def test_Valid(self):
+ """Test that sorting works on a valid graph with one possible order."""
+ graph = {
+ 'a': ['b', 'c'],
+ 'b': [],
+ 'c': ['d'],
+ 'd': ['b'],
+ }
+ def GetEdge(node):
+ return tuple(graph[node])
+ self.assertEqual(
+ gyp.common.TopologicallySorted(graph.keys(), GetEdge),
+ ['a', 'c', 'd', 'b'])
+
+ def test_Cycle(self):
+ """Test that an exception is thrown on a cyclic graph."""
+ graph = {
+ 'a': ['b'],
+ 'b': ['c'],
+ 'c': ['d'],
+ 'd': ['a'],
+ }
+ def GetEdge(node):
+ return tuple(graph[node])
+ self.assertRaises(
+ gyp.common.CycleError, gyp.common.TopologicallySorted,
+ graph.keys(), GetEdge)
+
+
+class TestGetFlavor(unittest.TestCase):
+ """Test that gyp.common.GetFlavor works as intended"""
+ original_platform = ''
+
+ def setUp(self):
+ self.original_platform = sys.platform
+
+ def tearDown(self):
+ sys.platform = self.original_platform
+
+ def assertFlavor(self, expected, argument, param):
+ sys.platform = argument
+ self.assertEqual(expected, gyp.common.GetFlavor(param))
+
+ def test_platform_default(self):
+ self.assertFlavor('freebsd', 'freebsd9' , {})
+ self.assertFlavor('freebsd', 'freebsd10', {})
+ self.assertFlavor('openbsd', 'openbsd5' , {})
+ self.assertFlavor('solaris', 'sunos5' , {});
+ self.assertFlavor('solaris', 'sunos' , {});
+ self.assertFlavor('linux' , 'linux2' , {});
+ self.assertFlavor('linux' , 'linux3' , {});
+ self.assertFlavor('linux' , 'linux' , {});
+
+ def test_param(self):
+ self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'})
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/python/gyp/pylib/gyp/easy_xml.py b/third_party/python/gyp/pylib/gyp/easy_xml.py
new file mode 100644
index 0000000000..2de51e25fb
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/easy_xml.py
@@ -0,0 +1,170 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+import os
+import locale
+import sys
+
+try:
+ # reduce moved to functools in python3.
+ reduce
+except NameError:
+ from functools import reduce
+
+def XmlToString(content, encoding='utf-8', pretty=False):
+ """ Writes the XML content to disk, touching the file only if it has changed.
+
+ Visual Studio files have a lot of pre-defined structures. This function makes
+ it easy to represent these structures as Python data structures, instead of
+ having to create a lot of function calls.
+
+ Each XML element of the content is represented as a list composed of:
+ 1. The name of the element, a string,
+ 2. The attributes of the element, a dictionary (optional), and
+ 3+. The content of the element, if any. Strings are simple text nodes and
+ lists are child elements.
+
+ Example 1:
+ <test/>
+ becomes
+ ['test']
+
+ Example 2:
+ <myelement a='value1' b='value2'>
+ <childtype>This is</childtype>
+ <childtype>it!</childtype>
+ </myelement>
+
+ becomes
+ ['myelement', {'a':'value1', 'b':'value2'},
+ ['childtype', 'This is'],
+ ['childtype', 'it!'],
+ ]
+
+ Args:
+ content: The structured content to be converted.
+ encoding: The encoding to report on the first XML line.
+ pretty: True if we want pretty printing with indents and new lines.
+
+ Returns:
+ The XML content as a string.
+ """
+ # We create a huge list of all the elements of the file.
+ xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding]
+ if pretty:
+ xml_parts.append('\n')
+ _ConstructContentList(xml_parts, content, pretty)
+
+ # Convert it to a string
+ return ''.join(xml_parts)
+
+
+def _ConstructContentList(xml_parts, specification, pretty, level=0):
+ """ Appends the XML parts corresponding to the specification.
+
+ Args:
+ xml_parts: A list of XML parts to be appended to.
+ specification: The specification of the element. See EasyXml docs.
+ pretty: True if we want pretty printing with indents and new lines.
+ level: Indentation level.
+ """
+ # The first item in a specification is the name of the element.
+ if pretty:
+ indentation = ' ' * level
+ new_line = '\n'
+ else:
+ indentation = ''
+ new_line = ''
+ name = specification[0]
+ if not isinstance(name, str):
+ raise Exception('The first item of an EasyXml specification should be '
+ 'a string. Specification was ' + str(specification))
+ xml_parts.append(indentation + '<' + name)
+
+ # Optionally in second position is a dictionary of the attributes.
+ rest = specification[1:]
+ if rest and isinstance(rest[0], dict):
+ for at, val in sorted(rest[0].items()):
+ xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True)))
+ rest = rest[1:]
+ if rest:
+ xml_parts.append('>')
+ all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True)
+ multi_line = not all_strings
+ if multi_line and new_line:
+ xml_parts.append(new_line)
+ for child_spec in rest:
+ # If it's a string, append a text node.
+ # Otherwise recurse over that child definition
+ if isinstance(child_spec, str):
+ xml_parts.append(_XmlEscape(child_spec))
+ else:
+ _ConstructContentList(xml_parts, child_spec, pretty, level + 1)
+ if multi_line and indentation:
+ xml_parts.append(indentation)
+ xml_parts.append('</%s>%s' % (name, new_line))
+ else:
+ xml_parts.append('/>%s' % new_line)
+
+
+def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False,
+ win32=False):
+ """ Writes the XML content to disk, touching the file only if it has changed.
+
+ Args:
+ content: The structured content to be written.
+ path: Location of the file.
+ encoding: The encoding to report on the first line of the XML file.
+ pretty: True if we want pretty printing with indents and new lines.
+ """
+ xml_string = XmlToString(content, encoding, pretty)
+ if win32 and os.linesep != '\r\n':
+ xml_string = xml_string.replace('\n', '\r\n')
+ default_encoding = locale.getdefaultlocale()[1]
+ if default_encoding and default_encoding.upper() != encoding.upper():
+ try:
+ xml_string = xml_string.decode(default_encoding).encode(encoding)
+ except AttributeError:
+ pass
+
+ # Get the old content
+ try:
+ f = open(path, 'r')
+ existing = f.read()
+ f.close()
+ except:
+ existing = None
+
+ # It has changed, write it
+ if existing != xml_string:
+ f = open(path, 'w')
+ f.write(xml_string)
+ f.close()
+
+
+_xml_escape_map = {
+ '"': '&quot;',
+ "'": '&apos;',
+ '<': '&lt;',
+ '>': '&gt;',
+ '&': '&amp;',
+ '\n': '&#xA;',
+ '\r': '&#xD;',
+}
+
+
+_xml_escape_re = re.compile(
+ "(%s)" % "|".join(map(re.escape, _xml_escape_map.keys())))
+
+
+def _XmlEscape(value, attr=False):
+ """ Escape a string for inclusion in XML."""
+ def replace(match):
+ m = match.string[match.start() : match.end()]
+ # don't replace single quotes in attrs
+ if attr and m == "'":
+ return m
+ return _xml_escape_map[m]
+ return _xml_escape_re.sub(replace, value)
diff --git a/third_party/python/gyp/pylib/gyp/easy_xml_test.py b/third_party/python/gyp/pylib/gyp/easy_xml_test.py
new file mode 100755
index 0000000000..a1fdb1881d
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/easy_xml_test.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Unit tests for the easy_xml.py file. """
+
+import gyp.easy_xml as easy_xml
+import unittest
+try:
+ from StringIO import StringIO
+except ImportError:
+ from io import StringIO
+
+
+class TestSequenceFunctions(unittest.TestCase):
+
+ def setUp(self):
+ self.stderr = StringIO()
+
+ def test_EasyXml_simple(self):
+ self.assertEqual(
+ easy_xml.XmlToString(['test']),
+ '<?xml version="1.0" encoding="utf-8"?><test/>')
+
+ self.assertEqual(
+ easy_xml.XmlToString(['test'], encoding='Windows-1252'),
+ '<?xml version="1.0" encoding="Windows-1252"?><test/>')
+
+ def test_EasyXml_simple_with_attributes(self):
+ self.assertEqual(
+ easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]),
+ '<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>')
+
+ def test_EasyXml_escaping(self):
+ original = '<test>\'"\r&\nfoo'
+ converted = '&lt;test&gt;\'&quot;&#xD;&amp;&#xA;foo'
+ converted_apos = converted.replace("'", '&apos;')
+ self.assertEqual(
+ easy_xml.XmlToString(['test3', {'a': original}, original]),
+ '<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>' %
+ (converted, converted_apos))
+
+ def test_EasyXml_pretty(self):
+ self.assertEqual(
+ easy_xml.XmlToString(
+ ['test3',
+ ['GrandParent',
+ ['Parent1',
+ ['Child']
+ ],
+ ['Parent2']
+ ]
+ ],
+ pretty=True),
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<test3>\n'
+ ' <GrandParent>\n'
+ ' <Parent1>\n'
+ ' <Child/>\n'
+ ' </Parent1>\n'
+ ' <Parent2/>\n'
+ ' </GrandParent>\n'
+ '</test3>\n')
+
+
+ def test_EasyXml_complex(self):
+ # We want to create:
+ target = (
+ '<?xml version="1.0" encoding="utf-8"?>'
+ '<Project>'
+ '<PropertyGroup Label="Globals">'
+ '<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
+ '<Keyword>Win32Proj</Keyword>'
+ '<RootNamespace>automated_ui_tests</RootNamespace>'
+ '</PropertyGroup>'
+ '<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
+ '<PropertyGroup '
+ 'Condition="\'$(Configuration)|$(Platform)\'=='
+ '\'Debug|Win32\'" Label="Configuration">'
+ '<ConfigurationType>Application</ConfigurationType>'
+ '<CharacterSet>Unicode</CharacterSet>'
+ '</PropertyGroup>'
+ '</Project>')
+
+ xml = easy_xml.XmlToString(
+ ['Project',
+ ['PropertyGroup', {'Label': 'Globals'},
+ ['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
+ ['Keyword', 'Win32Proj'],
+ ['RootNamespace', 'automated_ui_tests']
+ ],
+ ['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
+ ['PropertyGroup',
+ {'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
+ 'Label': 'Configuration'},
+ ['ConfigurationType', 'Application'],
+ ['CharacterSet', 'Unicode']
+ ]
+ ])
+ self.assertEqual(xml, target)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/python/gyp/pylib/gyp/flock_tool.py b/third_party/python/gyp/pylib/gyp/flock_tool.py
new file mode 100755
index 0000000000..81fb79d136
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/flock_tool.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""These functions are executed via gyp-flock-tool when using the Makefile
+generator. Used on systems that don't have a built-in flock."""
+
+import fcntl
+import os
+import struct
+import subprocess
+import sys
+
+
+def main(args):
+ executor = FlockTool()
+ executor.Dispatch(args)
+
+
+class FlockTool(object):
+ """This class emulates the 'flock' command."""
+ def Dispatch(self, args):
+ """Dispatches a string command to a method."""
+ if len(args) < 1:
+ raise Exception("Not enough arguments")
+
+ method = "Exec%s" % self._CommandifyName(args[0])
+ getattr(self, method)(*args[1:])
+
+ def _CommandifyName(self, name_string):
+ """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
+ return name_string.title().replace('-', '')
+
+ def ExecFlock(self, lockfile, *cmd_list):
+ """Emulates the most basic behavior of Linux's flock(1)."""
+ # Rely on exception handling to report errors.
+ # Note that the stock python on SunOS has a bug
+ # where fcntl.flock(fd, LOCK_EX) always fails
+ # with EBADF, that's why we use this F_SETLK
+ # hack instead.
+ fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
+ if sys.platform.startswith('aix'):
+ # Python on AIX is compiled with LARGEFILE support, which changes the
+ # struct size.
+ op = struct.pack('hhIllqq', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
+ else:
+ op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
+ fcntl.fcntl(fd, fcntl.F_SETLK, op)
+ return subprocess.call(cmd_list)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/python/gyp/pylib/gyp/generator/__init__.py b/third_party/python/gyp/pylib/gyp/generator/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/generator/__init__.py
diff --git a/third_party/python/gyp/pylib/gyp/generator/analyzer.py b/third_party/python/gyp/pylib/gyp/generator/analyzer.py
new file mode 100644
index 0000000000..b3484dcb1b
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/generator/analyzer.py
@@ -0,0 +1,744 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This script is intended for use as a GYP_GENERATOR. It takes as input (by way of
+the generator flag config_path) the path of a json file that dictates the files
+and targets to search for. The following keys are supported:
+files: list of paths (relative) of the files to search for.
+test_targets: unqualified target names to search for. Any target in this list
+that depends upon a file in |files| is output regardless of the type of target
+or chain of dependencies.
+additional_compile_targets: Unqualified targets to search for in addition to
+test_targets. Targets in the combined list that depend upon a file in |files|
+are not necessarily output. For example, if the target is of type none then the
+target is not output (but one of the descendants of the target will be).
+
+The following is output:
+error: only supplied if there is an error.
+compile_targets: minimal set of targets that directly or indirectly (for
+ targets of type none) depend on the files in |files| and is one of the
+ supplied targets or a target that one of the supplied targets depends on.
+ The expectation is this set of targets is passed into a build step. This list
+ always contains the output of test_targets as well.
+test_targets: set of targets from the supplied |test_targets| that either
+ directly or indirectly depend upon a file in |files|. This list if useful
+ if additional processing needs to be done for certain targets after the
+ build, such as running tests.
+status: outputs one of three values: none of the supplied files were found,
+ one of the include files changed so that it should be assumed everything
+ changed (in this case test_targets and compile_targets are not output) or at
+ least one file was found.
+invalid_targets: list of supplied targets that were not found.
+
+Example:
+Consider a graph like the following:
+ A D
+ / \
+B C
+A depends upon both B and C, A is of type none and B and C are executables.
+D is an executable, has no dependencies and nothing depends on it.
+If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and
+files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then
+the following is output:
+|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc
+and the supplied target A depends upon it. A is not output as a build_target
+as it is of type none with no rules and actions.
+|test_targets| = ["B"] B directly depends upon the change file b.cc.
+
+Even though the file d.cc, which D depends upon, has changed D is not output
+as it was not supplied by way of |additional_compile_targets| or |test_targets|.
+
+If the generator flag analyzer_output_path is specified, output is written
+there. Otherwise output is written to stdout.
+
+In Gyp the "all" target is shorthand for the root targets in the files passed
+to gyp. For example, if file "a.gyp" contains targets "a1" and
+"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency
+on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2".
+Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not
+directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
+then the "all" target includes "b1" and "b2".
+"""
+
+from __future__ import print_function
+
+import gyp.common
+import gyp.ninja_syntax as ninja_syntax
+import json
+import os
+import posixpath
+import sys
+
+debug = False
+
+found_dependency_string = 'Found dependency'
+no_dependency_string = 'No dependencies'
+# Status when it should be assumed that everything has changed.
+all_changed_string = 'Found dependency (all)'
+
+# MatchStatus is used indicate if and how a target depends upon the supplied
+# sources.
+# The target's sources contain one of the supplied paths.
+MATCH_STATUS_MATCHES = 1
+# The target has a dependency on another target that contains one of the
+# supplied paths.
+MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2
+# The target's sources weren't in the supplied paths and none of the target's
+# dependencies depend upon a target that matched.
+MATCH_STATUS_DOESNT_MATCH = 3
+# The target doesn't contain the source, but the dependent targets have not yet
+# been visited to determine a more specific status yet.
+MATCH_STATUS_TBD = 4
+
+generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
+
+generator_wants_static_library_dependencies_adjusted = False
+
+generator_default_variables = {
+}
+for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
+ 'LIB_DIR', 'SHARED_LIB_DIR']:
+ generator_default_variables[dirname] = '!!!'
+
+for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
+ 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
+ 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
+ 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
+ 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
+ 'CONFIGURATION_NAME']:
+ generator_default_variables[unused] = ''
+
+
+def _ToGypPath(path):
+ """Converts a path to the format used by gyp."""
+ if os.sep == '\\' and os.altsep == '/':
+ return path.replace('\\', '/')
+ return path
+
+
+def _ResolveParent(path, base_path_components):
+ """Resolves |path|, which starts with at least one '../'. Returns an empty
+ string if the path shouldn't be considered. See _AddSources() for a
+ description of |base_path_components|."""
+ depth = 0
+ while path.startswith('../'):
+ depth += 1
+ path = path[3:]
+ # Relative includes may go outside the source tree. For example, an action may
+ # have inputs in /usr/include, which are not in the source tree.
+ if depth > len(base_path_components):
+ return ''
+ if depth == len(base_path_components):
+ return path
+ return '/'.join(base_path_components[0:len(base_path_components) - depth]) + \
+ '/' + path
+
+
+def _AddSources(sources, base_path, base_path_components, result):
+ """Extracts valid sources from |sources| and adds them to |result|. Each
+ source file is relative to |base_path|, but may contain '..'. To make
+ resolving '..' easier |base_path_components| contains each of the
+ directories in |base_path|. Additionally each source may contain variables.
+ Such sources are ignored as it is assumed dependencies on them are expressed
+ and tracked in some other means."""
+ # NOTE: gyp paths are always posix style.
+ for source in sources:
+ if not len(source) or source.startswith('!!!') or source.startswith('$'):
+ continue
+ # variable expansion may lead to //.
+ org_source = source
+ source = source[0] + source[1:].replace('//', '/')
+ if source.startswith('../'):
+ source = _ResolveParent(source, base_path_components)
+ if len(source):
+ result.append(source)
+ continue
+ result.append(base_path + source)
+ if debug:
+ print('AddSource', org_source, result[len(result) - 1])
+
+
+def _ExtractSourcesFromAction(action, base_path, base_path_components,
+ results):
+ if 'inputs' in action:
+ _AddSources(action['inputs'], base_path, base_path_components, results)
+
+
+def _ToLocalPath(toplevel_dir, path):
+ """Converts |path| to a path relative to |toplevel_dir|."""
+ if path == toplevel_dir:
+ return ''
+ if path.startswith(toplevel_dir + '/'):
+ return path[len(toplevel_dir) + len('/'):]
+ return path
+
+
+def _ExtractSources(target, target_dict, toplevel_dir):
+ # |target| is either absolute or relative and in the format of the OS. Gyp
+ # source paths are always posix. Convert |target| to a posix path relative to
+ # |toplevel_dir_|. This is done to make it easy to build source paths.
+ base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target)))
+ base_path_components = base_path.split('/')
+
+ # Add a trailing '/' so that _AddSources() can easily build paths.
+ if len(base_path):
+ base_path += '/'
+
+ if debug:
+ print('ExtractSources', target, base_path)
+
+ results = []
+ if 'sources' in target_dict:
+ _AddSources(target_dict['sources'], base_path, base_path_components,
+ results)
+ # Include the inputs from any actions. Any changes to these affect the
+ # resulting output.
+ if 'actions' in target_dict:
+ for action in target_dict['actions']:
+ _ExtractSourcesFromAction(action, base_path, base_path_components,
+ results)
+ if 'rules' in target_dict:
+ for rule in target_dict['rules']:
+ _ExtractSourcesFromAction(rule, base_path, base_path_components, results)
+
+ return results
+
+
+class Target(object):
+ """Holds information about a particular target:
+ deps: set of Targets this Target depends upon. This is not recursive, only the
+ direct dependent Targets.
+ match_status: one of the MatchStatus values.
+ back_deps: set of Targets that have a dependency on this Target.
+ visited: used during iteration to indicate whether we've visited this target.
+ This is used for two iterations, once in building the set of Targets and
+ again in _GetBuildTargets().
+ name: fully qualified name of the target.
+ requires_build: True if the target type is such that it needs to be built.
+ See _DoesTargetTypeRequireBuild for details.
+ added_to_compile_targets: used when determining if the target was added to the
+ set of targets that needs to be built.
+ in_roots: true if this target is a descendant of one of the root nodes.
+ is_executable: true if the type of target is executable.
+ is_static_library: true if the type of target is static_library.
+ is_or_has_linked_ancestor: true if the target does a link (eg executable), or
+ if there is a target in back_deps that does a link."""
+ def __init__(self, name):
+ self.deps = set()
+ self.match_status = MATCH_STATUS_TBD
+ self.back_deps = set()
+ self.name = name
+ # TODO(sky): I don't like hanging this off Target. This state is specific
+ # to certain functions and should be isolated there.
+ self.visited = False
+ self.requires_build = False
+ self.added_to_compile_targets = False
+ self.in_roots = False
+ self.is_executable = False
+ self.is_static_library = False
+ self.is_or_has_linked_ancestor = False
+
+
+class Config(object):
+ """Details what we're looking for
+ files: set of files to search for
+ targets: see file description for details."""
+ def __init__(self):
+ self.files = []
+ self.targets = set()
+ self.additional_compile_target_names = set()
+ self.test_target_names = set()
+
+ def Init(self, params):
+ """Initializes Config. This is a separate method as it raises an exception
+ if there is a parse error."""
+ generator_flags = params.get('generator_flags', {})
+ config_path = generator_flags.get('config_path', None)
+ if not config_path:
+ return
+ try:
+ f = open(config_path, 'r')
+ config = json.load(f)
+ f.close()
+ except IOError:
+ raise Exception('Unable to open file ' + config_path)
+ except ValueError as e:
+ raise Exception('Unable to parse config file ' + config_path + str(e))
+ if not isinstance(config, dict):
+ raise Exception('config_path must be a JSON file containing a dictionary')
+ self.files = config.get('files', [])
+ self.additional_compile_target_names = set(
+ config.get('additional_compile_targets', []))
+ self.test_target_names = set(config.get('test_targets', []))
+
+
+def _WasBuildFileModified(build_file, data, files, toplevel_dir):
+ """Returns true if the build file |build_file| is either in |files| or
+ one of the files included by |build_file| is in |files|. |toplevel_dir| is
+ the root of the source tree."""
+ if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
+ if debug:
+ print('gyp file modified', build_file)
+ return True
+
+ # First element of included_files is the file itself.
+ if len(data[build_file]['included_files']) <= 1:
+ return False
+
+ for include_file in data[build_file]['included_files'][1:]:
+ # |included_files| are relative to the directory of the |build_file|.
+ rel_include_file = \
+ _ToGypPath(gyp.common.UnrelativePath(include_file, build_file))
+ if _ToLocalPath(toplevel_dir, rel_include_file) in files:
+ if debug:
+ print('included gyp file modified, gyp_file=', build_file, \
+ 'included file=', rel_include_file)
+ return True
+ return False
+
+
+def _GetOrCreateTargetByName(targets, target_name):
+ """Creates or returns the Target at targets[target_name]. If there is no
+ Target for |target_name| one is created. Returns a tuple of whether a new
+ Target was created and the Target."""
+ if target_name in targets:
+ return False, targets[target_name]
+ target = Target(target_name)
+ targets[target_name] = target
+ return True, target
+
+
+def _DoesTargetTypeRequireBuild(target_dict):
+ """Returns true if the target type is such that it needs to be built."""
+ # If a 'none' target has rules or actions we assume it requires a build.
+ return bool(target_dict['type'] != 'none' or
+ target_dict.get('actions') or target_dict.get('rules'))
+
+
+def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
+ build_files):
+ """Returns a tuple of the following:
+ . A dictionary mapping from fully qualified name to Target.
+ . A list of the targets that have a source file in |files|.
+ . Targets that constitute the 'all' target. See description at top of file
+ for details on the 'all' target.
+ This sets the |match_status| of the targets that contain any of the source
+ files in |files| to MATCH_STATUS_MATCHES.
+ |toplevel_dir| is the root of the source tree."""
+ # Maps from target name to Target.
+ name_to_target = {}
+
+ # Targets that matched.
+ matching_targets = []
+
+ # Queue of targets to visit.
+ targets_to_visit = target_list[:]
+
+ # Maps from build file to a boolean indicating whether the build file is in
+ # |files|.
+ build_file_in_files = {}
+
+ # Root targets across all files.
+ roots = set()
+
+ # Set of Targets in |build_files|.
+ build_file_targets = set()
+
+ while len(targets_to_visit) > 0:
+ target_name = targets_to_visit.pop()
+ created_target, target = _GetOrCreateTargetByName(name_to_target,
+ target_name)
+ if created_target:
+ roots.add(target)
+ elif target.visited:
+ continue
+
+ target.visited = True
+ target.requires_build = _DoesTargetTypeRequireBuild(
+ target_dicts[target_name])
+ target_type = target_dicts[target_name]['type']
+ target.is_executable = target_type == 'executable'
+ target.is_static_library = target_type == 'static_library'
+ target.is_or_has_linked_ancestor = (target_type == 'executable' or
+ target_type == 'shared_library')
+
+ build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
+ if not build_file in build_file_in_files:
+ build_file_in_files[build_file] = \
+ _WasBuildFileModified(build_file, data, files, toplevel_dir)
+
+ if build_file in build_files:
+ build_file_targets.add(target)
+
+ # If a build file (or any of its included files) is modified we assume all
+ # targets in the file are modified.
+ if build_file_in_files[build_file]:
+ print('matching target from modified build file', target_name)
+ target.match_status = MATCH_STATUS_MATCHES
+ matching_targets.append(target)
+ else:
+ sources = _ExtractSources(target_name, target_dicts[target_name],
+ toplevel_dir)
+ for source in sources:
+ if _ToGypPath(os.path.normpath(source)) in files:
+ print('target', target_name, 'matches', source)
+ target.match_status = MATCH_STATUS_MATCHES
+ matching_targets.append(target)
+ break
+
+ # Add dependencies to visit as well as updating back pointers for deps.
+ for dep in target_dicts[target_name].get('dependencies', []):
+ targets_to_visit.append(dep)
+
+ created_dep_target, dep_target = _GetOrCreateTargetByName(name_to_target,
+ dep)
+ if not created_dep_target:
+ roots.discard(dep_target)
+
+ target.deps.add(dep_target)
+ dep_target.back_deps.add(target)
+
+ return name_to_target, matching_targets, roots & build_file_targets
+
+
+def _GetUnqualifiedToTargetMapping(all_targets, to_find):
+ """Returns a tuple of the following:
+ . mapping (dictionary) from unqualified name to Target for all the
+ Targets in |to_find|.
+ . any target names not found. If this is empty all targets were found."""
+ result = {}
+ if not to_find:
+ return {}, []
+ to_find = set(to_find)
+ for target_name in all_targets.keys():
+ extracted = gyp.common.ParseQualifiedTarget(target_name)
+ if len(extracted) > 1 and extracted[1] in to_find:
+ to_find.remove(extracted[1])
+ result[extracted[1]] = all_targets[target_name]
+ if not to_find:
+ return result, []
+ return result, [x for x in to_find]
+
+
+def _DoesTargetDependOnMatchingTargets(target):
+ """Returns true if |target| or any of its dependencies is one of the
+ targets containing the files supplied as input to analyzer. This updates
+ |matches| of the Targets as it recurses.
+ target: the Target to look for."""
+ if target.match_status == MATCH_STATUS_DOESNT_MATCH:
+ return False
+ if target.match_status == MATCH_STATUS_MATCHES or \
+ target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY:
+ return True
+ for dep in target.deps:
+ if _DoesTargetDependOnMatchingTargets(dep):
+ target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
+ print('\t', target.name, 'matches by dep', dep.name)
+ return True
+ target.match_status = MATCH_STATUS_DOESNT_MATCH
+ return False
+
+
+def _GetTargetsDependingOnMatchingTargets(possible_targets):
+ """Returns the list of Targets in |possible_targets| that depend (either
+ directly on indirectly) on at least one of the targets containing the files
+ supplied as input to analyzer.
+ possible_targets: targets to search from."""
+ found = []
+ print('Targets that matched by dependency:')
+ for target in possible_targets:
+ if _DoesTargetDependOnMatchingTargets(target):
+ found.append(target)
+ return found
+
+
+def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
+ """Recurses through all targets that depend on |target|, adding all targets
+ that need to be built (and are in |roots|) to |result|.
+ roots: set of root targets.
+ add_if_no_ancestor: If true and there are no ancestors of |target| then add
+ |target| to |result|. |target| must still be in |roots|.
+ result: targets that need to be built are added here."""
+ if target.visited:
+ return
+
+ target.visited = True
+ target.in_roots = target in roots
+
+ for back_dep_target in target.back_deps:
+ _AddCompileTargets(back_dep_target, roots, False, result)
+ target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
+ target.in_roots |= back_dep_target.in_roots
+ target.is_or_has_linked_ancestor |= (
+ back_dep_target.is_or_has_linked_ancestor)
+
+ # Always add 'executable' targets. Even though they may be built by other
+ # targets that depend upon them it makes detection of what is going to be
+ # built easier.
+ # And always add static_libraries that have no dependencies on them from
+ # linkables. This is necessary as the other dependencies on them may be
+ # static libraries themselves, which are not compile time dependencies.
+ if target.in_roots and \
+ (target.is_executable or
+ (not target.added_to_compile_targets and
+ (add_if_no_ancestor or target.requires_build)) or
+ (target.is_static_library and add_if_no_ancestor and
+ not target.is_or_has_linked_ancestor)):
+ print('\t\tadding to compile targets', target.name, 'executable',
+ target.is_executable, 'added_to_compile_targets',
+ target.added_to_compile_targets, 'add_if_no_ancestor',
+ add_if_no_ancestor, 'requires_build', target.requires_build,
+ 'is_static_library', target.is_static_library,
+ 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor
+ )
+ result.add(target)
+ target.added_to_compile_targets = True
+
+
+def _GetCompileTargets(matching_targets, supplied_targets):
+ """Returns the set of Targets that require a build.
+ matching_targets: targets that changed and need to be built.
+ supplied_targets: set of targets supplied to analyzer to search from."""
+ result = set()
+ for target in matching_targets:
+ print('finding compile targets for match', target.name)
+ _AddCompileTargets(target, supplied_targets, True, result)
+ return result
+
+
+def _WriteOutput(params, **values):
+ """Writes the output, either to stdout or a file is specified."""
+ if 'error' in values:
+ print('Error:', values['error'])
+ if 'status' in values:
+ print(values['status'])
+ if 'targets' in values:
+ values['targets'].sort()
+ print('Supplied targets that depend on changed files:')
+ for target in values['targets']:
+ print('\t', target)
+ if 'invalid_targets' in values:
+ values['invalid_targets'].sort()
+ print('The following targets were not found:')
+ for target in values['invalid_targets']:
+ print('\t', target)
+ if 'build_targets' in values:
+ values['build_targets'].sort()
+ print('Targets that require a build:')
+ for target in values['build_targets']:
+ print('\t', target)
+ if 'compile_targets' in values:
+ values['compile_targets'].sort()
+ print('Targets that need to be built:')
+ for target in values['compile_targets']:
+ print('\t', target)
+ if 'test_targets' in values:
+ values['test_targets'].sort()
+ print('Test targets:')
+ for target in values['test_targets']:
+ print('\t', target)
+
+ output_path = params.get('generator_flags', {}).get(
+ 'analyzer_output_path', None)
+ if not output_path:
+ print(json.dumps(values))
+ return
+ try:
+ f = open(output_path, 'w')
+ f.write(json.dumps(values) + '\n')
+ f.close()
+ except IOError as e:
+ print('Error writing to output file', output_path, str(e))
+
+
+def _WasGypIncludeFileModified(params, files):
+ """Returns true if one of the files in |files| is in the set of included
+ files."""
+ if params['options'].includes:
+ for include in params['options'].includes:
+ if _ToGypPath(os.path.normpath(include)) in files:
+ print('Include file modified, assuming all changed', include)
+ return True
+ return False
+
+
+def _NamesNotIn(names, mapping):
+ """Returns a list of the values in |names| that are not in |mapping|."""
+ return [name for name in names if name not in mapping]
+
+
+def _LookupTargets(names, mapping):
+ """Returns a list of the mapping[name] for each value in |names| that is in
+ |mapping|."""
+ return [mapping[name] for name in names if name in mapping]
+
+
+def CalculateVariables(default_variables, params):
+ """Calculate additional variables for use in the build (called by gyp)."""
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == 'mac':
+ default_variables.setdefault('OS', 'mac')
+ elif flavor == 'win':
+ default_variables.setdefault('OS', 'win')
+ # Copy additional generator configuration data from VS, which is shared
+ # by the Windows Ninja generator.
+ import gyp.generator.msvs as msvs_generator
+ generator_additional_non_configuration_keys = getattr(msvs_generator,
+ 'generator_additional_non_configuration_keys', [])
+ generator_additional_path_sections = getattr(msvs_generator,
+ 'generator_additional_path_sections', [])
+
+ gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
+ else:
+ operating_system = flavor
+ if flavor == 'android':
+ operating_system = 'linux' # Keep this legacy behavior for now.
+ default_variables.setdefault('OS', operating_system)
+
+
+class TargetCalculator(object):
+ """Calculates the matching test_targets and matching compile_targets."""
+ def __init__(self, files, additional_compile_target_names, test_target_names,
+ data, target_list, target_dicts, toplevel_dir, build_files):
+ self._additional_compile_target_names = set(additional_compile_target_names)
+ self._test_target_names = set(test_target_names)
+ self._name_to_target, self._changed_targets, self._root_targets = (
+ _GenerateTargets(data, target_list, target_dicts, toplevel_dir,
+ frozenset(files), build_files))
+ self._unqualified_mapping, self.invalid_targets = (
+ _GetUnqualifiedToTargetMapping(self._name_to_target,
+ self._supplied_target_names_no_all()))
+
+ def _supplied_target_names(self):
+ return self._additional_compile_target_names | self._test_target_names
+
+ def _supplied_target_names_no_all(self):
+ """Returns the supplied test targets without 'all'."""
+ result = self._supplied_target_names();
+ result.discard('all')
+ return result
+
+ def is_build_impacted(self):
+ """Returns true if the supplied files impact the build at all."""
+ return self._changed_targets
+
+ def find_matching_test_target_names(self):
+ """Returns the set of output test targets."""
+ assert self.is_build_impacted()
+ # Find the test targets first. 'all' is special cased to mean all the
+ # root targets. To deal with all the supplied |test_targets| are expanded
+ # to include the root targets during lookup. If any of the root targets
+ # match, we remove it and replace it with 'all'.
+ test_target_names_no_all = set(self._test_target_names)
+ test_target_names_no_all.discard('all')
+ test_targets_no_all = _LookupTargets(test_target_names_no_all,
+ self._unqualified_mapping)
+ test_target_names_contains_all = 'all' in self._test_target_names
+ if test_target_names_contains_all:
+ test_targets = [x for x in (set(test_targets_no_all) |
+ set(self._root_targets))]
+ else:
+ test_targets = [x for x in test_targets_no_all]
+ print('supplied test_targets')
+ for target_name in self._test_target_names:
+ print('\t', target_name)
+ print('found test_targets')
+ for target in test_targets:
+ print('\t', target.name)
+ print('searching for matching test targets')
+ matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
+ matching_test_targets_contains_all = (test_target_names_contains_all and
+ set(matching_test_targets) &
+ set(self._root_targets))
+ if matching_test_targets_contains_all:
+ # Remove any of the targets for all that were not explicitly supplied,
+ # 'all' is subsequentely added to the matching names below.
+ matching_test_targets = [x for x in (set(matching_test_targets) &
+ set(test_targets_no_all))]
+ print('matched test_targets')
+ for target in matching_test_targets:
+ print('\t', target.name)
+ matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1]
+ for target in matching_test_targets]
+ if matching_test_targets_contains_all:
+ matching_target_names.append('all')
+ print('\tall')
+ return matching_target_names
+
+ def find_matching_compile_target_names(self):
+ """Returns the set of output compile targets."""
+ assert self.is_build_impacted();
+ # Compile targets are found by searching up from changed targets.
+ # Reset the visited status for _GetBuildTargets.
+ for target in self._name_to_target.values():
+ target.visited = False
+
+ supplied_targets = _LookupTargets(self._supplied_target_names_no_all(),
+ self._unqualified_mapping)
+ if 'all' in self._supplied_target_names():
+ supplied_targets = [x for x in (set(supplied_targets) |
+ set(self._root_targets))]
+ print('Supplied test_targets & compile_targets')
+ for target in supplied_targets:
+ print('\t', target.name)
+ print('Finding compile targets')
+ compile_targets = _GetCompileTargets(self._changed_targets,
+ supplied_targets)
+ return [gyp.common.ParseQualifiedTarget(target.name)[1]
+ for target in compile_targets]
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ """Called by gyp as the final stage. Outputs results."""
+ config = Config()
+ try:
+ config.Init(params)
+
+ if not config.files:
+ raise Exception('Must specify files to analyze via config_path generator '
+ 'flag')
+
+ toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir))
+ if debug:
+ print('toplevel_dir', toplevel_dir)
+
+ if _WasGypIncludeFileModified(params, config.files):
+ result_dict = { 'status': all_changed_string,
+ 'test_targets': list(config.test_target_names),
+ 'compile_targets': list(
+ config.additional_compile_target_names |
+ config.test_target_names) }
+ _WriteOutput(params, **result_dict)
+ return
+
+ calculator = TargetCalculator(config.files,
+ config.additional_compile_target_names,
+ config.test_target_names, data,
+ target_list, target_dicts, toplevel_dir,
+ params['build_files'])
+ if not calculator.is_build_impacted():
+ result_dict = { 'status': no_dependency_string,
+ 'test_targets': [],
+ 'compile_targets': [] }
+ if calculator.invalid_targets:
+ result_dict['invalid_targets'] = calculator.invalid_targets
+ _WriteOutput(params, **result_dict)
+ return
+
+ test_target_names = calculator.find_matching_test_target_names()
+ compile_target_names = calculator.find_matching_compile_target_names()
+ found_at_least_one_target = compile_target_names or test_target_names
+ result_dict = { 'test_targets': test_target_names,
+ 'status': found_dependency_string if
+ found_at_least_one_target else no_dependency_string,
+ 'compile_targets': list(
+ set(compile_target_names) |
+ set(test_target_names)) }
+ if calculator.invalid_targets:
+ result_dict['invalid_targets'] = calculator.invalid_targets
+ _WriteOutput(params, **result_dict)
+
+ except Exception as e:
+ _WriteOutput(params, error=str(e))
diff --git a/third_party/python/gyp/pylib/gyp/generator/cmake.py b/third_party/python/gyp/pylib/gyp/generator/cmake.py
new file mode 100644
index 0000000000..4a2041cf26
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/generator/cmake.py
@@ -0,0 +1,1256 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""cmake output module
+
+This module is under development and should be considered experimental.
+
+This module produces cmake (2.8.8+) input as its output. One CMakeLists.txt is
+created for each configuration.
+
+This module's original purpose was to support editing in IDEs like KDevelop
+which use CMake for project management. It is also possible to use CMake to
+generate projects for other IDEs such as eclipse cdt and code::blocks. QtCreator
+will convert the CMakeLists.txt to a code::blocks cbp for the editor to read,
+but build using CMake. As a result QtCreator editor is unaware of compiler
+defines. The generated CMakeLists.txt can also be used to build on Linux. There
+is currently no support for building on platforms other than Linux.
+
+The generated CMakeLists.txt should properly compile all projects. However,
+there is a mismatch between gyp and cmake with regard to linking. All attempts
+are made to work around this, but CMake sometimes sees -Wl,--start-group as a
+library and incorrectly repeats it. As a result the output of this generator
+should not be relied on for building.
+
+When using with kdevelop, use version 4.4+. Previous versions of kdevelop will
+not be able to find the header file directories described in the generated
+CMakeLists.txt file.
+"""
+
+from __future__ import print_function
+
+import multiprocessing
+import os
+import signal
+import string
+import subprocess
+import gyp.common
+import gyp.xcode_emulation
+
+try:
+ # maketrans moved to str in python3.
+ _maketrans = string.maketrans
+except NameError:
+ _maketrans = str.maketrans
+
+generator_default_variables = {
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '',
+ 'STATIC_LIB_PREFIX': 'lib',
+ 'STATIC_LIB_SUFFIX': '.a',
+ 'SHARED_LIB_PREFIX': 'lib',
+ 'SHARED_LIB_SUFFIX': '.so',
+ 'SHARED_LIB_DIR': '${builddir}/lib.${TOOLSET}',
+ 'LIB_DIR': '${obj}.${TOOLSET}',
+ 'INTERMEDIATE_DIR': '${obj}.${TOOLSET}/${TARGET}/geni',
+ 'SHARED_INTERMEDIATE_DIR': '${obj}/gen',
+ 'PRODUCT_DIR': '${builddir}',
+ 'RULE_INPUT_PATH': '${RULE_INPUT_PATH}',
+ 'RULE_INPUT_DIRNAME': '${RULE_INPUT_DIRNAME}',
+ 'RULE_INPUT_NAME': '${RULE_INPUT_NAME}',
+ 'RULE_INPUT_ROOT': '${RULE_INPUT_ROOT}',
+ 'RULE_INPUT_EXT': '${RULE_INPUT_EXT}',
+ 'CONFIGURATION_NAME': '${configuration}',
+}
+
+FULL_PATH_VARS = ('${CMAKE_CURRENT_LIST_DIR}', '${builddir}', '${obj}')
+
+generator_supports_multiple_toolsets = True
+generator_wants_static_library_dependencies_adjusted = True
+
+COMPILABLE_EXTENSIONS = {
+ '.c': 'cc',
+ '.cc': 'cxx',
+ '.cpp': 'cxx',
+ '.cxx': 'cxx',
+ '.s': 's', # cc
+ '.S': 's', # cc
+}
+
+
+def RemovePrefix(a, prefix):
+ """Returns 'a' without 'prefix' if it starts with 'prefix'."""
+ return a[len(prefix):] if a.startswith(prefix) else a
+
+
+def CalculateVariables(default_variables, params):
+ """Calculate additional variables for use in the build (called by gyp)."""
+ default_variables.setdefault('OS', gyp.common.GetFlavor(params))
+
+
+def Compilable(filename):
+ """Return true if the file is compilable (should be in OBJS)."""
+ return any(filename.endswith(e) for e in COMPILABLE_EXTENSIONS)
+
+
+def Linkable(filename):
+ """Return true if the file is linkable (should be on the link line)."""
+ return filename.endswith('.o')
+
+
+def NormjoinPathForceCMakeSource(base_path, rel_path):
+ """Resolves rel_path against base_path and returns the result.
+
+ If rel_path is an absolute path it is returned unchanged.
+ Otherwise it is resolved against base_path and normalized.
+ If the result is a relative path, it is forced to be relative to the
+ CMakeLists.txt.
+ """
+ if os.path.isabs(rel_path):
+ return rel_path
+ if any([rel_path.startswith(var) for var in FULL_PATH_VARS]):
+ return rel_path
+ # TODO: do we need to check base_path for absolute variables as well?
+ return os.path.join('${CMAKE_CURRENT_LIST_DIR}',
+ os.path.normpath(os.path.join(base_path, rel_path)))
+
+
+def NormjoinPath(base_path, rel_path):
+ """Resolves rel_path against base_path and returns the result.
+ TODO: what is this really used for?
+ If rel_path begins with '$' it is returned unchanged.
+ Otherwise it is resolved against base_path if relative, then normalized.
+ """
+ if rel_path.startswith('$') and not rel_path.startswith('${configuration}'):
+ return rel_path
+ return os.path.normpath(os.path.join(base_path, rel_path))
+
+
+def CMakeStringEscape(a):
+ """Escapes the string 'a' for use inside a CMake string.
+
+ This means escaping
+ '\' otherwise it may be seen as modifying the next character
+ '"' otherwise it will end the string
+ ';' otherwise the string becomes a list
+
+ The following do not need to be escaped
+ '#' when the lexer is in string state, this does not start a comment
+
+ The following are yet unknown
+ '$' generator variables (like ${obj}) must not be escaped,
+ but text $ should be escaped
+ what is wanted is to know which $ come from generator variables
+ """
+ return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"')
+
+
+def SetFileProperty(output, source_name, property_name, values, sep):
+ """Given a set of source file, sets the given property on them."""
+ output.write('set_source_files_properties(')
+ output.write(source_name)
+ output.write(' PROPERTIES ')
+ output.write(property_name)
+ output.write(' "')
+ for value in values:
+ output.write(CMakeStringEscape(value))
+ output.write(sep)
+ output.write('")\n')
+
+
+def SetFilesProperty(output, variable, property_name, values, sep):
+ """Given a set of source files, sets the given property on them."""
+ output.write('set_source_files_properties(')
+ WriteVariable(output, variable)
+ output.write(' PROPERTIES ')
+ output.write(property_name)
+ output.write(' "')
+ for value in values:
+ output.write(CMakeStringEscape(value))
+ output.write(sep)
+ output.write('")\n')
+
+
+def SetTargetProperty(output, target_name, property_name, values, sep=''):
+ """Given a target, sets the given property."""
+ output.write('set_target_properties(')
+ output.write(target_name)
+ output.write(' PROPERTIES ')
+ output.write(property_name)
+ output.write(' "')
+ for value in values:
+ output.write(CMakeStringEscape(value))
+ output.write(sep)
+ output.write('")\n')
+
+
+def SetVariable(output, variable_name, value):
+ """Sets a CMake variable."""
+ output.write('set(')
+ output.write(variable_name)
+ output.write(' "')
+ output.write(CMakeStringEscape(value))
+ output.write('")\n')
+
+
+def SetVariableList(output, variable_name, values):
+ """Sets a CMake variable to a list."""
+ if not values:
+ return SetVariable(output, variable_name, "")
+ if len(values) == 1:
+ return SetVariable(output, variable_name, values[0])
+ output.write('list(APPEND ')
+ output.write(variable_name)
+ output.write('\n "')
+ output.write('"\n "'.join([CMakeStringEscape(value) for value in values]))
+ output.write('")\n')
+
+
+def UnsetVariable(output, variable_name):
+ """Unsets a CMake variable."""
+ output.write('unset(')
+ output.write(variable_name)
+ output.write(')\n')
+
+
+def WriteVariable(output, variable_name, prepend=None):
+ if prepend:
+ output.write(prepend)
+ output.write('${')
+ output.write(variable_name)
+ output.write('}')
+
+
+class CMakeTargetType(object):
+ def __init__(self, command, modifier, property_modifier):
+ self.command = command
+ self.modifier = modifier
+ self.property_modifier = property_modifier
+
+
+cmake_target_type_from_gyp_target_type = {
+ 'executable': CMakeTargetType('add_executable', None, 'RUNTIME'),
+ 'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE'),
+ 'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY'),
+ 'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY'),
+ 'none': CMakeTargetType('add_custom_target', 'SOURCES', None),
+}
+
+
+def StringToCMakeTargetName(a):
+ """Converts the given string 'a' to a valid CMake target name.
+
+ All invalid characters are replaced by '_'.
+ Invalid for cmake: ' ', '/', '(', ')', '"'
+ Invalid for make: ':'
+ Invalid for unknown reasons but cause failures: '.'
+ """
+ return a.translate(_maketrans(' /():."', '_______'))
+
+
+def WriteActions(target_name, actions, extra_sources, extra_deps,
+ path_to_gyp, output):
+ """Write CMake for the 'actions' in the target.
+
+ Args:
+ target_name: the name of the CMake target being generated.
+ actions: the Gyp 'actions' dict for this target.
+ extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
+ extra_deps: [<cmake_taget>] to append with generated targets.
+ path_to_gyp: relative path from CMakeLists.txt being generated to
+ the Gyp file in which the target being generated is defined.
+ """
+ for action in actions:
+ action_name = StringToCMakeTargetName(action['action_name'])
+ action_target_name = '%s__%s' % (target_name, action_name)
+
+ inputs = action['inputs']
+ inputs_name = action_target_name + '__input'
+ SetVariableList(output, inputs_name,
+ [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
+
+ outputs = action['outputs']
+ cmake_outputs = [NormjoinPathForceCMakeSource(path_to_gyp, out)
+ for out in outputs]
+ outputs_name = action_target_name + '__output'
+ SetVariableList(output, outputs_name, cmake_outputs)
+
+ # Build up a list of outputs.
+ # Collect the output dirs we'll need.
+ dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
+
+ if int(action.get('process_outputs_as_sources', False)):
+ extra_sources.extend(zip(cmake_outputs, outputs))
+
+ # add_custom_command
+ output.write('add_custom_command(OUTPUT ')
+ WriteVariable(output, outputs_name)
+ output.write('\n')
+
+ if len(dirs) > 0:
+ for directory in dirs:
+ output.write(' COMMAND ${CMAKE_COMMAND} -E make_directory ')
+ output.write(directory)
+ output.write('\n')
+
+ output.write(' COMMAND ')
+ output.write(gyp.common.EncodePOSIXShellList(action['action']))
+ output.write('\n')
+
+ output.write(' DEPENDS ')
+ WriteVariable(output, inputs_name)
+ output.write('\n')
+
+ output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
+ output.write(path_to_gyp)
+ output.write('\n')
+
+ output.write(' COMMENT ')
+ if 'message' in action:
+ output.write(action['message'])
+ else:
+ output.write(action_target_name)
+ output.write('\n')
+
+ output.write(' VERBATIM\n')
+ output.write(')\n')
+
+ # add_custom_target
+ output.write('add_custom_target(')
+ output.write(action_target_name)
+ output.write('\n DEPENDS ')
+ WriteVariable(output, outputs_name)
+ output.write('\n SOURCES ')
+ WriteVariable(output, inputs_name)
+ output.write('\n)\n')
+
+ extra_deps.append(action_target_name)
+
+
+def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source):
+ if rel_path.startswith(("${RULE_INPUT_PATH}","${RULE_INPUT_DIRNAME}")):
+ if any([rule_source.startswith(var) for var in FULL_PATH_VARS]):
+ return rel_path
+ return NormjoinPathForceCMakeSource(base_path, rel_path)
+
+
+def WriteRules(target_name, rules, extra_sources, extra_deps,
+ path_to_gyp, output):
+ """Write CMake for the 'rules' in the target.
+
+ Args:
+ target_name: the name of the CMake target being generated.
+ actions: the Gyp 'actions' dict for this target.
+ extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
+ extra_deps: [<cmake_taget>] to append with generated targets.
+ path_to_gyp: relative path from CMakeLists.txt being generated to
+ the Gyp file in which the target being generated is defined.
+ """
+ for rule in rules:
+ rule_name = StringToCMakeTargetName(target_name + '__' + rule['rule_name'])
+
+ inputs = rule.get('inputs', [])
+ inputs_name = rule_name + '__input'
+ SetVariableList(output, inputs_name,
+ [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
+ outputs = rule['outputs']
+ var_outputs = []
+
+ for count, rule_source in enumerate(rule.get('rule_sources', [])):
+ action_name = rule_name + '_' + str(count)
+
+ rule_source_dirname, rule_source_basename = os.path.split(rule_source)
+ rule_source_root, rule_source_ext = os.path.splitext(rule_source_basename)
+
+ SetVariable(output, 'RULE_INPUT_PATH', rule_source)
+ SetVariable(output, 'RULE_INPUT_DIRNAME', rule_source_dirname)
+ SetVariable(output, 'RULE_INPUT_NAME', rule_source_basename)
+ SetVariable(output, 'RULE_INPUT_ROOT', rule_source_root)
+ SetVariable(output, 'RULE_INPUT_EXT', rule_source_ext)
+
+ # Build up a list of outputs.
+ # Collect the output dirs we'll need.
+ dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
+
+ # Create variables for the output, as 'local' variable will be unset.
+ these_outputs = []
+ for output_index, out in enumerate(outputs):
+ output_name = action_name + '_' + str(output_index)
+ SetVariable(output, output_name,
+ NormjoinRulePathForceCMakeSource(path_to_gyp, out,
+ rule_source))
+ if int(rule.get('process_outputs_as_sources', False)):
+ extra_sources.append(('${' + output_name + '}', out))
+ these_outputs.append('${' + output_name + '}')
+ var_outputs.append('${' + output_name + '}')
+
+ # add_custom_command
+ output.write('add_custom_command(OUTPUT\n')
+ for out in these_outputs:
+ output.write(' ')
+ output.write(out)
+ output.write('\n')
+
+ for directory in dirs:
+ output.write(' COMMAND ${CMAKE_COMMAND} -E make_directory ')
+ output.write(directory)
+ output.write('\n')
+
+ output.write(' COMMAND ')
+ output.write(gyp.common.EncodePOSIXShellList(rule['action']))
+ output.write('\n')
+
+ output.write(' DEPENDS ')
+ WriteVariable(output, inputs_name)
+ output.write(' ')
+ output.write(NormjoinPath(path_to_gyp, rule_source))
+ output.write('\n')
+
+ # CMAKE_CURRENT_LIST_DIR is where the CMakeLists.txt lives.
+ # The cwd is the current build directory.
+ output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
+ output.write(path_to_gyp)
+ output.write('\n')
+
+ output.write(' COMMENT ')
+ if 'message' in rule:
+ output.write(rule['message'])
+ else:
+ output.write(action_name)
+ output.write('\n')
+
+ output.write(' VERBATIM\n')
+ output.write(')\n')
+
+ UnsetVariable(output, 'RULE_INPUT_PATH')
+ UnsetVariable(output, 'RULE_INPUT_DIRNAME')
+ UnsetVariable(output, 'RULE_INPUT_NAME')
+ UnsetVariable(output, 'RULE_INPUT_ROOT')
+ UnsetVariable(output, 'RULE_INPUT_EXT')
+
+ # add_custom_target
+ output.write('add_custom_target(')
+ output.write(rule_name)
+ output.write(' DEPENDS\n')
+ for out in var_outputs:
+ output.write(' ')
+ output.write(out)
+ output.write('\n')
+ output.write('SOURCES ')
+ WriteVariable(output, inputs_name)
+ output.write('\n')
+ for rule_source in rule.get('rule_sources', []):
+ output.write(' ')
+ output.write(NormjoinPath(path_to_gyp, rule_source))
+ output.write('\n')
+ output.write(')\n')
+
+ extra_deps.append(rule_name)
+
+
+def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
+ """Write CMake for the 'copies' in the target.
+
+ Args:
+ target_name: the name of the CMake target being generated.
+ actions: the Gyp 'actions' dict for this target.
+ extra_deps: [<cmake_taget>] to append with generated targets.
+ path_to_gyp: relative path from CMakeLists.txt being generated to
+ the Gyp file in which the target being generated is defined.
+ """
+ copy_name = target_name + '__copies'
+
+ # CMake gets upset with custom targets with OUTPUT which specify no output.
+ have_copies = any(copy['files'] for copy in copies)
+ if not have_copies:
+ output.write('add_custom_target(')
+ output.write(copy_name)
+ output.write(')\n')
+ extra_deps.append(copy_name)
+ return
+
+ class Copy(object):
+ def __init__(self, ext, command):
+ self.cmake_inputs = []
+ self.cmake_outputs = []
+ self.gyp_inputs = []
+ self.gyp_outputs = []
+ self.ext = ext
+ self.inputs_name = None
+ self.outputs_name = None
+ self.command = command
+
+ file_copy = Copy('', 'copy')
+ dir_copy = Copy('_dirs', 'copy_directory')
+
+ for copy in copies:
+ files = copy['files']
+ destination = copy['destination']
+ for src in files:
+ path = os.path.normpath(src)
+ basename = os.path.split(path)[1]
+ dst = os.path.join(destination, basename)
+
+ copy = file_copy if os.path.basename(src) else dir_copy
+
+ copy.cmake_inputs.append(NormjoinPathForceCMakeSource(path_to_gyp, src))
+ copy.cmake_outputs.append(NormjoinPathForceCMakeSource(path_to_gyp, dst))
+ copy.gyp_inputs.append(src)
+ copy.gyp_outputs.append(dst)
+
+ for copy in (file_copy, dir_copy):
+ if copy.cmake_inputs:
+ copy.inputs_name = copy_name + '__input' + copy.ext
+ SetVariableList(output, copy.inputs_name, copy.cmake_inputs)
+
+ copy.outputs_name = copy_name + '__output' + copy.ext
+ SetVariableList(output, copy.outputs_name, copy.cmake_outputs)
+
+ # add_custom_command
+ output.write('add_custom_command(\n')
+
+ output.write('OUTPUT')
+ for copy in (file_copy, dir_copy):
+ if copy.outputs_name:
+ WriteVariable(output, copy.outputs_name, ' ')
+ output.write('\n')
+
+ for copy in (file_copy, dir_copy):
+ for src, dst in zip(copy.gyp_inputs, copy.gyp_outputs):
+ # 'cmake -E copy src dst' will create the 'dst' directory if needed.
+ output.write('COMMAND ${CMAKE_COMMAND} -E %s ' % copy.command)
+ output.write(src)
+ output.write(' ')
+ output.write(dst)
+ output.write("\n")
+
+ output.write('DEPENDS')
+ for copy in (file_copy, dir_copy):
+ if copy.inputs_name:
+ WriteVariable(output, copy.inputs_name, ' ')
+ output.write('\n')
+
+ output.write('WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
+ output.write(path_to_gyp)
+ output.write('\n')
+
+ output.write('COMMENT Copying for ')
+ output.write(target_name)
+ output.write('\n')
+
+ output.write('VERBATIM\n')
+ output.write(')\n')
+
+ # add_custom_target
+ output.write('add_custom_target(')
+ output.write(copy_name)
+ output.write('\n DEPENDS')
+ for copy in (file_copy, dir_copy):
+ if copy.outputs_name:
+ WriteVariable(output, copy.outputs_name, ' ')
+ output.write('\n SOURCES')
+ if file_copy.inputs_name:
+ WriteVariable(output, file_copy.inputs_name, ' ')
+ output.write('\n)\n')
+
+ extra_deps.append(copy_name)
+
+
+def CreateCMakeTargetBaseName(qualified_target):
+ """This is the name we would like the target to have."""
+ _, gyp_target_name, gyp_target_toolset = (
+ gyp.common.ParseQualifiedTarget(qualified_target))
+ cmake_target_base_name = gyp_target_name
+ if gyp_target_toolset and gyp_target_toolset != 'target':
+ cmake_target_base_name += '_' + gyp_target_toolset
+ return StringToCMakeTargetName(cmake_target_base_name)
+
+
+def CreateCMakeTargetFullName(qualified_target):
+ """An unambiguous name for the target."""
+ gyp_file, gyp_target_name, gyp_target_toolset = (
+ gyp.common.ParseQualifiedTarget(qualified_target))
+ cmake_target_full_name = gyp_file + ':' + gyp_target_name
+ if gyp_target_toolset and gyp_target_toolset != 'target':
+ cmake_target_full_name += '_' + gyp_target_toolset
+ return StringToCMakeTargetName(cmake_target_full_name)
+
+
+class CMakeNamer(object):
+ """Converts Gyp target names into CMake target names.
+
+ CMake requires that target names be globally unique. One way to ensure
+ this is to fully qualify the names of the targets. Unfortunatly, this
+ ends up with all targets looking like "chrome_chrome_gyp_chrome" instead
+ of just "chrome". If this generator were only interested in building, it
+ would be possible to fully qualify all target names, then create
+ unqualified target names which depend on all qualified targets which
+ should have had that name. This is more or less what the 'make' generator
+ does with aliases. However, one goal of this generator is to create CMake
+ files for use with IDEs, and fully qualified names are not as user
+ friendly.
+
+ Since target name collision is rare, we do the above only when required.
+
+ Toolset variants are always qualified from the base, as this is required for
+ building. However, it also makes sense for an IDE, as it is possible for
+ defines to be different.
+ """
+ def __init__(self, target_list):
+ self.cmake_target_base_names_conficting = set()
+
+ cmake_target_base_names_seen = set()
+ for qualified_target in target_list:
+ cmake_target_base_name = CreateCMakeTargetBaseName(qualified_target)
+
+ if cmake_target_base_name not in cmake_target_base_names_seen:
+ cmake_target_base_names_seen.add(cmake_target_base_name)
+ else:
+ self.cmake_target_base_names_conficting.add(cmake_target_base_name)
+
+ def CreateCMakeTargetName(self, qualified_target):
+ base_name = CreateCMakeTargetBaseName(qualified_target)
+ if base_name in self.cmake_target_base_names_conficting:
+ return CreateCMakeTargetFullName(qualified_target)
+ return base_name
+
+
+def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
+ options, generator_flags, all_qualified_targets, flavor,
+ output):
+ # The make generator does this always.
+ # TODO: It would be nice to be able to tell CMake all dependencies.
+ circular_libs = generator_flags.get('circular', True)
+
+ if not generator_flags.get('standalone', False):
+ output.write('\n#')
+ output.write(qualified_target)
+ output.write('\n')
+
+ gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
+ rel_gyp_file = gyp.common.RelativePath(gyp_file, options.toplevel_dir)
+ rel_gyp_dir = os.path.dirname(rel_gyp_file)
+
+ # Relative path from build dir to top dir.
+ build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir)
+ # Relative path from build dir to gyp dir.
+ build_to_gyp = os.path.join(build_to_top, rel_gyp_dir)
+
+ path_from_cmakelists_to_gyp = build_to_gyp
+
+ spec = target_dicts.get(qualified_target, {})
+ config = spec.get('configurations', {}).get(config_to_use, {})
+
+ xcode_settings = None
+ if flavor == 'mac':
+ xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
+
+ target_name = spec.get('target_name', '<missing target name>')
+ target_type = spec.get('type', '<missing target type>')
+ target_toolset = spec.get('toolset')
+
+ cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
+ if cmake_target_type is None:
+ print('Target %s has unknown target type %s, skipping.' %
+ ( target_name, target_type ))
+ return
+
+ SetVariable(output, 'TARGET', target_name)
+ SetVariable(output, 'TOOLSET', target_toolset)
+
+ cmake_target_name = namer.CreateCMakeTargetName(qualified_target)
+
+ extra_sources = []
+ extra_deps = []
+
+ # Actions must come first, since they can generate more OBJs for use below.
+ if 'actions' in spec:
+ WriteActions(cmake_target_name, spec['actions'], extra_sources, extra_deps,
+ path_from_cmakelists_to_gyp, output)
+
+ # Rules must be early like actions.
+ if 'rules' in spec:
+ WriteRules(cmake_target_name, spec['rules'], extra_sources, extra_deps,
+ path_from_cmakelists_to_gyp, output)
+
+ # Copies
+ if 'copies' in spec:
+ WriteCopies(cmake_target_name, spec['copies'], extra_deps,
+ path_from_cmakelists_to_gyp, output)
+
+ # Target and sources
+ srcs = spec.get('sources', [])
+
+ # Gyp separates the sheep from the goats based on file extensions.
+ # A full separation is done here because of flag handing (see below).
+ s_sources = []
+ c_sources = []
+ cxx_sources = []
+ linkable_sources = []
+ other_sources = []
+ for src in srcs:
+ _, ext = os.path.splitext(src)
+ src_type = COMPILABLE_EXTENSIONS.get(ext, None)
+ src_norm_path = NormjoinPath(path_from_cmakelists_to_gyp, src);
+
+ if src_type == 's':
+ s_sources.append(src_norm_path)
+ elif src_type == 'cc':
+ c_sources.append(src_norm_path)
+ elif src_type == 'cxx':
+ cxx_sources.append(src_norm_path)
+ elif Linkable(ext):
+ linkable_sources.append(src_norm_path)
+ else:
+ other_sources.append(src_norm_path)
+
+ for extra_source in extra_sources:
+ src, real_source = extra_source
+ _, ext = os.path.splitext(real_source)
+ src_type = COMPILABLE_EXTENSIONS.get(ext, None)
+
+ if src_type == 's':
+ s_sources.append(src)
+ elif src_type == 'cc':
+ c_sources.append(src)
+ elif src_type == 'cxx':
+ cxx_sources.append(src)
+ elif Linkable(ext):
+ linkable_sources.append(src)
+ else:
+ other_sources.append(src)
+
+ s_sources_name = None
+ if s_sources:
+ s_sources_name = cmake_target_name + '__asm_srcs'
+ SetVariableList(output, s_sources_name, s_sources)
+
+ c_sources_name = None
+ if c_sources:
+ c_sources_name = cmake_target_name + '__c_srcs'
+ SetVariableList(output, c_sources_name, c_sources)
+
+ cxx_sources_name = None
+ if cxx_sources:
+ cxx_sources_name = cmake_target_name + '__cxx_srcs'
+ SetVariableList(output, cxx_sources_name, cxx_sources)
+
+ linkable_sources_name = None
+ if linkable_sources:
+ linkable_sources_name = cmake_target_name + '__linkable_srcs'
+ SetVariableList(output, linkable_sources_name, linkable_sources)
+
+ other_sources_name = None
+ if other_sources:
+ other_sources_name = cmake_target_name + '__other_srcs'
+ SetVariableList(output, other_sources_name, other_sources)
+
+ # CMake gets upset when executable targets provide no sources.
+ # http://www.cmake.org/pipermail/cmake/2010-July/038461.html
+ dummy_sources_name = None
+ has_sources = (s_sources_name or
+ c_sources_name or
+ cxx_sources_name or
+ linkable_sources_name or
+ other_sources_name)
+ if target_type == 'executable' and not has_sources:
+ dummy_sources_name = cmake_target_name + '__dummy_srcs'
+ SetVariable(output, dummy_sources_name,
+ "${obj}.${TOOLSET}/${TARGET}/genc/dummy.c")
+ output.write('if(NOT EXISTS "')
+ WriteVariable(output, dummy_sources_name)
+ output.write('")\n')
+ output.write(' file(WRITE "')
+ WriteVariable(output, dummy_sources_name)
+ output.write('" "")\n')
+ output.write("endif()\n")
+
+
+ # CMake is opposed to setting linker directories and considers the practice
+ # of setting linker directories dangerous. Instead, it favors the use of
+ # find_library and passing absolute paths to target_link_libraries.
+ # However, CMake does provide the command link_directories, which adds
+ # link directories to targets defined after it is called.
+ # As a result, link_directories must come before the target definition.
+ # CMake unfortunately has no means of removing entries from LINK_DIRECTORIES.
+ library_dirs = config.get('library_dirs')
+ if library_dirs is not None:
+ output.write('link_directories(')
+ for library_dir in library_dirs:
+ output.write(' ')
+ output.write(NormjoinPath(path_from_cmakelists_to_gyp, library_dir))
+ output.write('\n')
+ output.write(')\n')
+
+ output.write(cmake_target_type.command)
+ output.write('(')
+ output.write(cmake_target_name)
+
+ if cmake_target_type.modifier is not None:
+ output.write(' ')
+ output.write(cmake_target_type.modifier)
+
+ if s_sources_name:
+ WriteVariable(output, s_sources_name, ' ')
+ if c_sources_name:
+ WriteVariable(output, c_sources_name, ' ')
+ if cxx_sources_name:
+ WriteVariable(output, cxx_sources_name, ' ')
+ if linkable_sources_name:
+ WriteVariable(output, linkable_sources_name, ' ')
+ if other_sources_name:
+ WriteVariable(output, other_sources_name, ' ')
+ if dummy_sources_name:
+ WriteVariable(output, dummy_sources_name, ' ')
+
+ output.write(')\n')
+
+ # Let CMake know if the 'all' target should depend on this target.
+ exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets
+ else 'FALSE')
+ SetTargetProperty(output, cmake_target_name,
+ 'EXCLUDE_FROM_ALL', exclude_from_all)
+ for extra_target_name in extra_deps:
+ SetTargetProperty(output, extra_target_name,
+ 'EXCLUDE_FROM_ALL', exclude_from_all)
+
+ # Output name and location.
+ if target_type != 'none':
+ # Link as 'C' if there are no other files
+ if not c_sources and not cxx_sources:
+ SetTargetProperty(output, cmake_target_name, 'LINKER_LANGUAGE', ['C'])
+
+ # Mark uncompiled sources as uncompiled.
+ if other_sources_name:
+ output.write('set_source_files_properties(')
+ WriteVariable(output, other_sources_name, '')
+ output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n')
+
+ # Mark object sources as linkable.
+ if linkable_sources_name:
+ output.write('set_source_files_properties(')
+ WriteVariable(output, other_sources_name, '')
+ output.write(' PROPERTIES EXTERNAL_OBJECT "TRUE")\n')
+
+ # Output directory
+ target_output_directory = spec.get('product_dir')
+ if target_output_directory is None:
+ if target_type in ('executable', 'loadable_module'):
+ target_output_directory = generator_default_variables['PRODUCT_DIR']
+ elif target_type == 'shared_library':
+ target_output_directory = '${builddir}/lib.${TOOLSET}'
+ elif spec.get('standalone_static_library', False):
+ target_output_directory = generator_default_variables['PRODUCT_DIR']
+ else:
+ base_path = gyp.common.RelativePath(os.path.dirname(gyp_file),
+ options.toplevel_dir)
+ target_output_directory = '${obj}.${TOOLSET}'
+ target_output_directory = (
+ os.path.join(target_output_directory, base_path))
+
+ cmake_target_output_directory = NormjoinPathForceCMakeSource(
+ path_from_cmakelists_to_gyp,
+ target_output_directory)
+ SetTargetProperty(output,
+ cmake_target_name,
+ cmake_target_type.property_modifier + '_OUTPUT_DIRECTORY',
+ cmake_target_output_directory)
+
+ # Output name
+ default_product_prefix = ''
+ default_product_name = target_name
+ default_product_ext = ''
+ if target_type == 'static_library':
+ static_library_prefix = generator_default_variables['STATIC_LIB_PREFIX']
+ default_product_name = RemovePrefix(default_product_name,
+ static_library_prefix)
+ default_product_prefix = static_library_prefix
+ default_product_ext = generator_default_variables['STATIC_LIB_SUFFIX']
+
+ elif target_type in ('loadable_module', 'shared_library'):
+ shared_library_prefix = generator_default_variables['SHARED_LIB_PREFIX']
+ default_product_name = RemovePrefix(default_product_name,
+ shared_library_prefix)
+ default_product_prefix = shared_library_prefix
+ default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX']
+
+ elif target_type != 'executable':
+ print(('ERROR: What output file should be generated?',
+ 'type', target_type, 'target', target_name))
+
+ product_prefix = spec.get('product_prefix', default_product_prefix)
+ product_name = spec.get('product_name', default_product_name)
+ product_ext = spec.get('product_extension')
+ if product_ext:
+ product_ext = '.' + product_ext
+ else:
+ product_ext = default_product_ext
+
+ SetTargetProperty(output, cmake_target_name, 'PREFIX', product_prefix)
+ SetTargetProperty(output, cmake_target_name,
+ cmake_target_type.property_modifier + '_OUTPUT_NAME',
+ product_name)
+ SetTargetProperty(output, cmake_target_name, 'SUFFIX', product_ext)
+
+ # Make the output of this target referenceable as a source.
+ cmake_target_output_basename = product_prefix + product_name + product_ext
+ cmake_target_output = os.path.join(cmake_target_output_directory,
+ cmake_target_output_basename)
+ SetFileProperty(output, cmake_target_output, 'GENERATED', ['TRUE'], '')
+
+ # Includes
+ includes = config.get('include_dirs')
+ if includes:
+ # This (target include directories) is what requires CMake 2.8.8
+ includes_name = cmake_target_name + '__include_dirs'
+ SetVariableList(output, includes_name,
+ [NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include)
+ for include in includes])
+ output.write('set_property(TARGET ')
+ output.write(cmake_target_name)
+ output.write(' APPEND PROPERTY INCLUDE_DIRECTORIES ')
+ WriteVariable(output, includes_name, '')
+ output.write(')\n')
+
+ # Defines
+ defines = config.get('defines')
+ if defines is not None:
+ SetTargetProperty(output,
+ cmake_target_name,
+ 'COMPILE_DEFINITIONS',
+ defines,
+ ';')
+
+ # Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
+ # CMake currently does not have target C and CXX flags.
+ # So, instead of doing...
+
+ # cflags_c = config.get('cflags_c')
+ # if cflags_c is not None:
+ # SetTargetProperty(output, cmake_target_name,
+ # 'C_COMPILE_FLAGS', cflags_c, ' ')
+
+ # cflags_cc = config.get('cflags_cc')
+ # if cflags_cc is not None:
+ # SetTargetProperty(output, cmake_target_name,
+ # 'CXX_COMPILE_FLAGS', cflags_cc, ' ')
+
+ # Instead we must...
+ cflags = config.get('cflags', [])
+ cflags_c = config.get('cflags_c', [])
+ cflags_cxx = config.get('cflags_cc', [])
+ if xcode_settings:
+ cflags = xcode_settings.GetCflags(config_to_use)
+ cflags_c = xcode_settings.GetCflagsC(config_to_use)
+ cflags_cxx = xcode_settings.GetCflagsCC(config_to_use)
+ #cflags_objc = xcode_settings.GetCflagsObjC(config_to_use)
+ #cflags_objcc = xcode_settings.GetCflagsObjCC(config_to_use)
+
+ if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources):
+ SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', cflags, ' ')
+
+ elif c_sources and not (s_sources or cxx_sources):
+ flags = []
+ flags.extend(cflags)
+ flags.extend(cflags_c)
+ SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
+
+ elif cxx_sources and not (s_sources or c_sources):
+ flags = []
+ flags.extend(cflags)
+ flags.extend(cflags_cxx)
+ SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
+
+ else:
+ # TODO: This is broken, one cannot generally set properties on files,
+ # as other targets may require different properties on the same files.
+ if s_sources and cflags:
+ SetFilesProperty(output, s_sources_name, 'COMPILE_FLAGS', cflags, ' ')
+
+ if c_sources and (cflags or cflags_c):
+ flags = []
+ flags.extend(cflags)
+ flags.extend(cflags_c)
+ SetFilesProperty(output, c_sources_name, 'COMPILE_FLAGS', flags, ' ')
+
+ if cxx_sources and (cflags or cflags_cxx):
+ flags = []
+ flags.extend(cflags)
+ flags.extend(cflags_cxx)
+ SetFilesProperty(output, cxx_sources_name, 'COMPILE_FLAGS', flags, ' ')
+
+ # Linker flags
+ ldflags = config.get('ldflags')
+ if ldflags is not None:
+ SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')
+
+ # XCode settings
+ xcode_settings = config.get('xcode_settings', {})
+ for xcode_setting, xcode_value in xcode_settings.viewitems():
+ SetTargetProperty(output, cmake_target_name,
+ "XCODE_ATTRIBUTE_%s" % xcode_setting, xcode_value,
+ '' if isinstance(xcode_value, str) else ' ')
+
+ # Note on Dependencies and Libraries:
+ # CMake wants to handle link order, resolving the link line up front.
+ # Gyp does not retain or enforce specifying enough information to do so.
+ # So do as other gyp generators and use --start-group and --end-group.
+ # Give CMake as little information as possible so that it doesn't mess it up.
+
+ # Dependencies
+ rawDeps = spec.get('dependencies', [])
+
+ static_deps = []
+ shared_deps = []
+ other_deps = []
+ for rawDep in rawDeps:
+ dep_cmake_name = namer.CreateCMakeTargetName(rawDep)
+ dep_spec = target_dicts.get(rawDep, {})
+ dep_target_type = dep_spec.get('type', None)
+
+ if dep_target_type == 'static_library':
+ static_deps.append(dep_cmake_name)
+ elif dep_target_type == 'shared_library':
+ shared_deps.append(dep_cmake_name)
+ else:
+ other_deps.append(dep_cmake_name)
+
+ # ensure all external dependencies are complete before internal dependencies
+ # extra_deps currently only depend on their own deps, so otherwise run early
+ if static_deps or shared_deps or other_deps:
+ for extra_dep in extra_deps:
+ output.write('add_dependencies(')
+ output.write(extra_dep)
+ output.write('\n')
+ for deps in (static_deps, shared_deps, other_deps):
+ for dep in gyp.common.uniquer(deps):
+ output.write(' ')
+ output.write(dep)
+ output.write('\n')
+ output.write(')\n')
+
+ linkable = target_type in ('executable', 'loadable_module', 'shared_library')
+ other_deps.extend(extra_deps)
+ if other_deps or (not linkable and (static_deps or shared_deps)):
+ output.write('add_dependencies(')
+ output.write(cmake_target_name)
+ output.write('\n')
+ for dep in gyp.common.uniquer(other_deps):
+ output.write(' ')
+ output.write(dep)
+ output.write('\n')
+ if not linkable:
+ for deps in (static_deps, shared_deps):
+ for lib_dep in gyp.common.uniquer(deps):
+ output.write(' ')
+ output.write(lib_dep)
+ output.write('\n')
+ output.write(')\n')
+
+ # Libraries
+ if linkable:
+ external_libs = [lib for lib in spec.get('libraries', []) if len(lib) > 0]
+ if external_libs or static_deps or shared_deps:
+ output.write('target_link_libraries(')
+ output.write(cmake_target_name)
+ output.write('\n')
+ if static_deps:
+ write_group = circular_libs and len(static_deps) > 1 and flavor != 'mac'
+ if write_group:
+ output.write('-Wl,--start-group\n')
+ for dep in gyp.common.uniquer(static_deps):
+ output.write(' ')
+ output.write(dep)
+ output.write('\n')
+ if write_group:
+ output.write('-Wl,--end-group\n')
+ if shared_deps:
+ for dep in gyp.common.uniquer(shared_deps):
+ output.write(' ')
+ output.write(dep)
+ output.write('\n')
+ if external_libs:
+ for lib in gyp.common.uniquer(external_libs):
+ output.write(' "')
+ output.write(RemovePrefix(lib, "$(SDKROOT)"))
+ output.write('"\n')
+
+ output.write(')\n')
+
+ UnsetVariable(output, 'TOOLSET')
+ UnsetVariable(output, 'TARGET')
+
+
+def GenerateOutputForConfig(target_list, target_dicts, data,
+ params, config_to_use):
+ options = params['options']
+ generator_flags = params['generator_flags']
+ flavor = gyp.common.GetFlavor(params)
+
+ # generator_dir: relative path from pwd to where make puts build files.
+ # Makes migrating from make to cmake easier, cmake doesn't put anything here.
+ # Each Gyp configuration creates a different CMakeLists.txt file
+ # to avoid incompatibilities between Gyp and CMake configurations.
+ generator_dir = os.path.relpath(options.generator_output or '.')
+
+ # output_dir: relative path from generator_dir to the build directory.
+ output_dir = generator_flags.get('output_dir', 'out')
+
+ # build_dir: relative path from source root to our output files.
+ # e.g. "out/Debug"
+ build_dir = os.path.normpath(os.path.join(generator_dir,
+ output_dir,
+ config_to_use))
+
+ toplevel_build = os.path.join(options.toplevel_dir, build_dir)
+
+ output_file = os.path.join(toplevel_build, 'CMakeLists.txt')
+ gyp.common.EnsureDirExists(output_file)
+
+ output = open(output_file, 'w')
+ output.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
+ output.write('cmake_policy(VERSION 2.8.8)\n')
+
+ gyp_file, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1])
+ output.write('project(')
+ output.write(project_target)
+ output.write(')\n')
+
+ SetVariable(output, 'configuration', config_to_use)
+
+ ar = None
+ cc = None
+ cxx = None
+
+ make_global_settings = data[gyp_file].get('make_global_settings', [])
+ build_to_top = gyp.common.InvertRelativePath(build_dir,
+ options.toplevel_dir)
+ for key, value in make_global_settings:
+ if key == 'AR':
+ ar = os.path.join(build_to_top, value)
+ if key == 'CC':
+ cc = os.path.join(build_to_top, value)
+ if key == 'CXX':
+ cxx = os.path.join(build_to_top, value)
+
+ ar = gyp.common.GetEnvironFallback(['AR_target', 'AR'], ar)
+ cc = gyp.common.GetEnvironFallback(['CC_target', 'CC'], cc)
+ cxx = gyp.common.GetEnvironFallback(['CXX_target', 'CXX'], cxx)
+
+ if ar:
+ SetVariable(output, 'CMAKE_AR', ar)
+ if cc:
+ SetVariable(output, 'CMAKE_C_COMPILER', cc)
+ if cxx:
+ SetVariable(output, 'CMAKE_CXX_COMPILER', cxx)
+
+ # The following appears to be as-yet undocumented.
+ # http://public.kitware.com/Bug/view.php?id=8392
+ output.write('enable_language(ASM)\n')
+ # ASM-ATT does not support .S files.
+ # output.write('enable_language(ASM-ATT)\n')
+
+ if cc:
+ SetVariable(output, 'CMAKE_ASM_COMPILER', cc)
+
+ SetVariable(output, 'builddir', '${CMAKE_CURRENT_BINARY_DIR}')
+ SetVariable(output, 'obj', '${builddir}/obj')
+ output.write('\n')
+
+ # TODO: Undocumented/unsupported (the CMake Java generator depends on it).
+ # CMake by default names the object resulting from foo.c to be foo.c.o.
+ # Gyp traditionally names the object resulting from foo.c foo.o.
+ # This should be irrelevant, but some targets extract .o files from .a
+ # and depend on the name of the extracted .o files.
+ output.write('set(CMAKE_C_OUTPUT_EXTENSION_REPLACE 1)\n')
+ output.write('set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)\n')
+ output.write('\n')
+
+ # Force ninja to use rsp files. Otherwise link and ar lines can get too long,
+ # resulting in 'Argument list too long' errors.
+ # However, rsp files don't work correctly on Mac.
+ if flavor != 'mac':
+ output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n')
+ output.write('\n')
+
+ namer = CMakeNamer(target_list)
+
+ # The list of targets upon which the 'all' target should depend.
+ # CMake has it's own implicit 'all' target, one is not created explicitly.
+ all_qualified_targets = set()
+ for build_file in params['build_files']:
+ for qualified_target in gyp.common.AllTargets(target_list,
+ target_dicts,
+ os.path.normpath(build_file)):
+ all_qualified_targets.add(qualified_target)
+
+ for qualified_target in target_list:
+ if flavor == 'mac':
+ gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
+ spec = target_dicts[qualified_target]
+ gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[gyp_file], spec)
+
+ WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
+ options, generator_flags, all_qualified_targets, flavor, output)
+
+ output.close()
+
+
+def PerformBuild(data, configurations, params):
+ options = params['options']
+ generator_flags = params['generator_flags']
+
+ # generator_dir: relative path from pwd to where make puts build files.
+ # Makes migrating from make to cmake easier, cmake doesn't put anything here.
+ generator_dir = os.path.relpath(options.generator_output or '.')
+
+ # output_dir: relative path from generator_dir to the build directory.
+ output_dir = generator_flags.get('output_dir', 'out')
+
+ for config_name in configurations:
+ # build_dir: relative path from source root to our output files.
+ # e.g. "out/Debug"
+ build_dir = os.path.normpath(os.path.join(generator_dir,
+ output_dir,
+ config_name))
+ arguments = ['cmake', '-G', 'Ninja']
+ print('Generating [%s]: %s' % (config_name, arguments))
+ subprocess.check_call(arguments, cwd=build_dir)
+
+ arguments = ['ninja', '-C', build_dir]
+ print('Building [%s]: %s' % (config_name, arguments))
+ subprocess.check_call(arguments)
+
+
+def CallGenerateOutputForConfig(arglist):
+ # Ignore the interrupt signal so that the parent process catches it and
+ # kills all multiprocessing children.
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+ target_list, target_dicts, data, params, config_name = arglist
+ GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ user_config = params.get('generator_flags', {}).get('config', None)
+ if user_config:
+ GenerateOutputForConfig(target_list, target_dicts, data,
+ params, user_config)
+ else:
+ config_names = target_dicts[target_list[0]]['configurations']
+ if params['parallel']:
+ try:
+ pool = multiprocessing.Pool(len(config_names))
+ arglists = []
+ for config_name in config_names:
+ arglists.append((target_list, target_dicts, data,
+ params, config_name))
+ pool.map(CallGenerateOutputForConfig, arglists)
+ except KeyboardInterrupt as e:
+ pool.terminate()
+ raise e
+ else:
+ for config_name in config_names:
+ GenerateOutputForConfig(target_list, target_dicts, data,
+ params, config_name)
diff --git a/third_party/python/gyp/pylib/gyp/generator/dump_dependency_json.py b/third_party/python/gyp/pylib/gyp/generator/dump_dependency_json.py
new file mode 100644
index 0000000000..2bf3f397d6
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/generator/dump_dependency_json.py
@@ -0,0 +1,101 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import collections
+import os
+import gyp
+import gyp.common
+import gyp.msvs_emulation
+import json
+import sys
+
+generator_supports_multiple_toolsets = True
+
+generator_wants_static_library_dependencies_adjusted = False
+
+generator_filelist_paths = {
+}
+
+generator_default_variables = {
+}
+for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
+ 'LIB_DIR', 'SHARED_LIB_DIR']:
+ # Some gyp steps fail if these are empty(!).
+ generator_default_variables[dirname] = 'dir'
+for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
+ 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
+ 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
+ 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
+ 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
+ 'CONFIGURATION_NAME']:
+ generator_default_variables[unused] = ''
+
+
+def CalculateVariables(default_variables, params):
+ generator_flags = params.get('generator_flags', {})
+ for key, val in generator_flags.items():
+ default_variables.setdefault(key, val)
+ default_variables.setdefault('OS', gyp.common.GetFlavor(params))
+
+ flavor = gyp.common.GetFlavor(params)
+ if flavor =='win':
+ # Copy additional generator configuration data from VS, which is shared
+ # by the Windows Ninja generator.
+ import gyp.generator.msvs as msvs_generator
+ generator_additional_non_configuration_keys = getattr(msvs_generator,
+ 'generator_additional_non_configuration_keys', [])
+ generator_additional_path_sections = getattr(msvs_generator,
+ 'generator_additional_path_sections', [])
+
+ gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
+
+
+def CalculateGeneratorInputInfo(params):
+ """Calculate the generator specific info that gets fed to input (called by
+ gyp)."""
+ generator_flags = params.get('generator_flags', {})
+ if generator_flags.get('adjust_static_libraries', False):
+ global generator_wants_static_library_dependencies_adjusted
+ generator_wants_static_library_dependencies_adjusted = True
+
+ toplevel = params['options'].toplevel_dir
+ generator_dir = os.path.relpath(params['options'].generator_output or '.')
+ # output_dir: relative path from generator_dir to the build directory.
+ output_dir = generator_flags.get('output_dir', 'out')
+ qualified_out_dir = os.path.normpath(os.path.join(
+ toplevel, generator_dir, output_dir, 'gypfiles'))
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ 'toplevel': toplevel,
+ 'qualified_out_dir': qualified_out_dir,
+ }
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ # Map of target -> list of targets it depends on.
+ edges = {}
+
+ # Queue of targets to visit.
+ targets_to_visit = target_list[:]
+
+ while len(targets_to_visit) > 0:
+ target = targets_to_visit.pop()
+ if target in edges:
+ continue
+ edges[target] = []
+
+ for dep in target_dicts[target].get('dependencies', []):
+ edges[target].append(dep)
+ targets_to_visit.append(dep)
+
+ try:
+ filepath = params['generator_flags']['output_dir']
+ except KeyError:
+ filepath = '.'
+ filename = os.path.join(filepath, 'dump.json')
+ f = open(filename, 'w')
+ json.dump(edges, f)
+ f.close()
+ print('Wrote json to %s.' % filename)
diff --git a/third_party/python/gyp/pylib/gyp/generator/eclipse.py b/third_party/python/gyp/pylib/gyp/generator/eclipse.py
new file mode 100644
index 0000000000..d039f03a2c
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/generator/eclipse.py
@@ -0,0 +1,425 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""GYP backend that generates Eclipse CDT settings files.
+
+This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML
+files that can be imported into an Eclipse CDT project. The XML file contains a
+list of include paths and symbols (i.e. defines).
+
+Because a full .cproject definition is not created by this generator, it's not
+possible to properly define the include dirs and symbols for each file
+individually. Instead, one set of includes/symbols is generated for the entire
+project. This works fairly well (and is a vast improvement in general), but may
+still result in a few indexer issues here and there.
+
+This generator has no automated tests, so expect it to be broken.
+"""
+
+from xml.sax.saxutils import escape
+import os.path
+import subprocess
+import gyp
+import gyp.common
+import gyp.msvs_emulation
+import shlex
+import xml.etree.cElementTree as ET
+
+generator_wants_static_library_dependencies_adjusted = False
+
+generator_default_variables = {
+}
+
+for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']:
+ # Some gyp steps fail if these are empty(!), so we convert them to variables
+ generator_default_variables[dirname] = '$' + dirname
+
+for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
+ 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
+ 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
+ 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
+ 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
+ 'CONFIGURATION_NAME']:
+ generator_default_variables[unused] = ''
+
+# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as
+# part of the path when dealing with generated headers. This value will be
+# replaced dynamically for each configuration.
+generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \
+ '$SHARED_INTERMEDIATE_DIR'
+
+
+def CalculateVariables(default_variables, params):
+ generator_flags = params.get('generator_flags', {})
+ for key, val in generator_flags.items():
+ default_variables.setdefault(key, val)
+ flavor = gyp.common.GetFlavor(params)
+ default_variables.setdefault('OS', flavor)
+ if flavor == 'win':
+ # Copy additional generator configuration data from VS, which is shared
+ # by the Eclipse generator.
+ import gyp.generator.msvs as msvs_generator
+ generator_additional_non_configuration_keys = getattr(msvs_generator,
+ 'generator_additional_non_configuration_keys', [])
+ generator_additional_path_sections = getattr(msvs_generator,
+ 'generator_additional_path_sections', [])
+
+ gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
+
+
+def CalculateGeneratorInputInfo(params):
+ """Calculate the generator specific info that gets fed to input (called by
+ gyp)."""
+ generator_flags = params.get('generator_flags', {})
+ if generator_flags.get('adjust_static_libraries', False):
+ global generator_wants_static_library_dependencies_adjusted
+ generator_wants_static_library_dependencies_adjusted = True
+
+
+def GetAllIncludeDirectories(target_list, target_dicts,
+ shared_intermediate_dirs, config_name, params,
+ compiler_path):
+ """Calculate the set of include directories to be used.
+
+ Returns:
+ A list including all the include_dir's specified for every target followed
+ by any include directories that were added as cflag compiler options.
+ """
+
+ gyp_includes_set = set()
+ compiler_includes_list = []
+
+ # Find compiler's default include dirs.
+ if compiler_path:
+ command = shlex.split(compiler_path)
+ command.extend(['-E', '-xc++', '-v', '-'])
+ proc = subprocess.Popen(args=command, stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ output = proc.communicate()[1]
+ # Extract the list of include dirs from the output, which has this format:
+ # ...
+ # #include "..." search starts here:
+ # #include <...> search starts here:
+ # /usr/include/c++/4.6
+ # /usr/local/include
+ # End of search list.
+ # ...
+ in_include_list = False
+ for line in output.splitlines():
+ if line.startswith('#include'):
+ in_include_list = True
+ continue
+ if line.startswith('End of search list.'):
+ break
+ if in_include_list:
+ include_dir = line.strip()
+ if include_dir not in compiler_includes_list:
+ compiler_includes_list.append(include_dir)
+
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == 'win':
+ generator_flags = params.get('generator_flags', {})
+ for target_name in target_list:
+ target = target_dicts[target_name]
+ if config_name in target['configurations']:
+ config = target['configurations'][config_name]
+
+ # Look for any include dirs that were explicitly added via cflags. This
+ # may be done in gyp files to force certain includes to come at the end.
+ # TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
+ # remove this.
+ if flavor == 'win':
+ msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
+ cflags = msvs_settings.GetCflags(config_name)
+ else:
+ cflags = config['cflags']
+ for cflag in cflags:
+ if cflag.startswith('-I'):
+ include_dir = cflag[2:]
+ if include_dir not in compiler_includes_list:
+ compiler_includes_list.append(include_dir)
+
+ # Find standard gyp include dirs.
+ if 'include_dirs' in config:
+ include_dirs = config['include_dirs']
+ for shared_intermediate_dir in shared_intermediate_dirs:
+ for include_dir in include_dirs:
+ include_dir = include_dir.replace('$SHARED_INTERMEDIATE_DIR',
+ shared_intermediate_dir)
+ if not os.path.isabs(include_dir):
+ base_dir = os.path.dirname(target_name)
+
+ include_dir = base_dir + '/' + include_dir
+ include_dir = os.path.abspath(include_dir)
+
+ gyp_includes_set.add(include_dir)
+
+ # Generate a list that has all the include dirs.
+ all_includes_list = list(gyp_includes_set)
+ all_includes_list.sort()
+ for compiler_include in compiler_includes_list:
+ if not compiler_include in gyp_includes_set:
+ all_includes_list.append(compiler_include)
+
+ # All done.
+ return all_includes_list
+
+
+def GetCompilerPath(target_list, data, options):
+ """Determine a command that can be used to invoke the compiler.
+
+ Returns:
+ If this is a gyp project that has explicit make settings, try to determine
+ the compiler from that. Otherwise, see if a compiler was specified via the
+ CC_target environment variable.
+ """
+ # First, see if the compiler is configured in make's settings.
+ build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
+ make_global_settings_dict = data[build_file].get('make_global_settings', {})
+ for key, value in make_global_settings_dict:
+ if key in ['CC', 'CXX']:
+ return os.path.join(options.toplevel_dir, value)
+
+ # Check to see if the compiler was specified as an environment variable.
+ for key in ['CC_target', 'CC', 'CXX']:
+ compiler = os.environ.get(key)
+ if compiler:
+ return compiler
+
+ return 'gcc'
+
+
+def GetAllDefines(target_list, target_dicts, data, config_name, params,
+ compiler_path):
+ """Calculate the defines for a project.
+
+ Returns:
+ A dict that includes explict defines declared in gyp files along with all of
+ the default defines that the compiler uses.
+ """
+
+ # Get defines declared in the gyp files.
+ all_defines = {}
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == 'win':
+ generator_flags = params.get('generator_flags', {})
+ for target_name in target_list:
+ target = target_dicts[target_name]
+
+ if flavor == 'win':
+ msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
+ extra_defines = msvs_settings.GetComputedDefines(config_name)
+ else:
+ extra_defines = []
+ if config_name in target['configurations']:
+ config = target['configurations'][config_name]
+ target_defines = config['defines']
+ else:
+ target_defines = []
+ for define in target_defines + extra_defines:
+ split_define = define.split('=', 1)
+ if len(split_define) == 1:
+ split_define.append('1')
+ if split_define[0].strip() in all_defines:
+ # Already defined
+ continue
+ all_defines[split_define[0].strip()] = split_define[1].strip()
+ # Get default compiler defines (if possible).
+ if flavor == 'win':
+ return all_defines # Default defines already processed in the loop above.
+ if compiler_path:
+ command = shlex.split(compiler_path)
+ command.extend(['-E', '-dM', '-'])
+ cpp_proc = subprocess.Popen(args=command, cwd='.',
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+ cpp_output = cpp_proc.communicate()[0]
+ cpp_lines = cpp_output.split('\n')
+ for cpp_line in cpp_lines:
+ if not cpp_line.strip():
+ continue
+ cpp_line_parts = cpp_line.split(' ', 2)
+ key = cpp_line_parts[1]
+ if len(cpp_line_parts) >= 3:
+ val = cpp_line_parts[2]
+ else:
+ val = '1'
+ all_defines[key] = val
+
+ return all_defines
+
+
+def WriteIncludePaths(out, eclipse_langs, include_dirs):
+ """Write the includes section of a CDT settings export file."""
+
+ out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
+ 'settingswizards.IncludePaths">\n')
+ out.write(' <language name="holder for library settings"></language>\n')
+ for lang in eclipse_langs:
+ out.write(' <language name="%s">\n' % lang)
+ for include_dir in include_dirs:
+ out.write(' <includepath workspace_path="false">%s</includepath>\n' %
+ include_dir)
+ out.write(' </language>\n')
+ out.write(' </section>\n')
+
+
+def WriteMacros(out, eclipse_langs, defines):
+ """Write the macros section of a CDT settings export file."""
+
+ out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
+ 'settingswizards.Macros">\n')
+ out.write(' <language name="holder for library settings"></language>\n')
+ for lang in eclipse_langs:
+ out.write(' <language name="%s">\n' % lang)
+ for key in sorted(defines.keys()):
+ out.write(' <macro><name>%s</name><value>%s</value></macro>\n' %
+ (escape(key), escape(defines[key])))
+ out.write(' </language>\n')
+ out.write(' </section>\n')
+
+
+def GenerateOutputForConfig(target_list, target_dicts, data, params,
+ config_name):
+ options = params['options']
+ generator_flags = params.get('generator_flags', {})
+
+ # build_dir: relative path from source root to our output files.
+ # e.g. "out/Debug"
+ build_dir = os.path.join(generator_flags.get('output_dir', 'out'),
+ config_name)
+
+ toplevel_build = os.path.join(options.toplevel_dir, build_dir)
+ # Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the
+ # SHARED_INTERMEDIATE_DIR. Include both possible locations.
+ shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'),
+ os.path.join(toplevel_build, 'gen')]
+
+ GenerateCdtSettingsFile(target_list,
+ target_dicts,
+ data,
+ params,
+ config_name,
+ os.path.join(toplevel_build,
+ 'eclipse-cdt-settings.xml'),
+ options,
+ shared_intermediate_dirs)
+ GenerateClasspathFile(target_list,
+ target_dicts,
+ options.toplevel_dir,
+ toplevel_build,
+ os.path.join(toplevel_build,
+ 'eclipse-classpath.xml'))
+
+
+def GenerateCdtSettingsFile(target_list, target_dicts, data, params,
+ config_name, out_name, options,
+ shared_intermediate_dirs):
+ gyp.common.EnsureDirExists(out_name)
+ with open(out_name, 'w') as out:
+ out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
+ out.write('<cdtprojectproperties>\n')
+
+ eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
+ 'GNU C++', 'GNU C', 'Assembly']
+ compiler_path = GetCompilerPath(target_list, data, options)
+ include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
+ shared_intermediate_dirs,
+ config_name, params, compiler_path)
+ WriteIncludePaths(out, eclipse_langs, include_dirs)
+ defines = GetAllDefines(target_list, target_dicts, data, config_name,
+ params, compiler_path)
+ WriteMacros(out, eclipse_langs, defines)
+
+ out.write('</cdtprojectproperties>\n')
+
+
+def GenerateClasspathFile(target_list, target_dicts, toplevel_dir,
+ toplevel_build, out_name):
+ '''Generates a classpath file suitable for symbol navigation and code
+ completion of Java code (such as in Android projects) by finding all
+ .java and .jar files used as action inputs.'''
+ gyp.common.EnsureDirExists(out_name)
+ result = ET.Element('classpath')
+
+ def AddElements(kind, paths):
+ # First, we need to normalize the paths so they are all relative to the
+ # toplevel dir.
+ rel_paths = set()
+ for path in paths:
+ if os.path.isabs(path):
+ rel_paths.add(os.path.relpath(path, toplevel_dir))
+ else:
+ rel_paths.add(path)
+
+ for path in sorted(rel_paths):
+ entry_element = ET.SubElement(result, 'classpathentry')
+ entry_element.set('kind', kind)
+ entry_element.set('path', path)
+
+ AddElements('lib', GetJavaJars(target_list, target_dicts, toplevel_dir))
+ AddElements('src', GetJavaSourceDirs(target_list, target_dicts, toplevel_dir))
+ # Include the standard JRE container and a dummy out folder
+ AddElements('con', ['org.eclipse.jdt.launching.JRE_CONTAINER'])
+ # Include a dummy out folder so that Eclipse doesn't use the default /bin
+ # folder in the root of the project.
+ AddElements('output', [os.path.join(toplevel_build, '.eclipse-java-build')])
+
+ ET.ElementTree(result).write(out_name)
+
+
+def GetJavaJars(target_list, target_dicts, toplevel_dir):
+ '''Generates a sequence of all .jars used as inputs.'''
+ for target_name in target_list:
+ target = target_dicts[target_name]
+ for action in target.get('actions', []):
+ for input_ in action['inputs']:
+ if os.path.splitext(input_)[1] == '.jar' and not input_.startswith('$'):
+ if os.path.isabs(input_):
+ yield input_
+ else:
+ yield os.path.join(os.path.dirname(target_name), input_)
+
+
+def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir):
+ '''Generates a sequence of all likely java package root directories.'''
+ for target_name in target_list:
+ target = target_dicts[target_name]
+ for action in target.get('actions', []):
+ for input_ in action['inputs']:
+ if (os.path.splitext(input_)[1] == '.java' and
+ not input_.startswith('$')):
+ dir_ = os.path.dirname(os.path.join(os.path.dirname(target_name),
+ input_))
+ # If there is a parent 'src' or 'java' folder, navigate up to it -
+ # these are canonical package root names in Chromium. This will
+ # break if 'src' or 'java' exists in the package structure. This
+ # could be further improved by inspecting the java file for the
+ # package name if this proves to be too fragile in practice.
+ parent_search = dir_
+ while os.path.basename(parent_search) not in ['src', 'java']:
+ parent_search, _ = os.path.split(parent_search)
+ if not parent_search or parent_search == toplevel_dir:
+ # Didn't find a known root, just return the original path
+ yield dir_
+ break
+ else:
+ yield parent_search
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ """Generate an XML settings file that can be imported into a CDT project."""
+
+ if params['options'].generator_output:
+ raise NotImplementedError("--generator_output not implemented for eclipse")
+
+ user_config = params.get('generator_flags', {}).get('config', None)
+ if user_config:
+ GenerateOutputForConfig(target_list, target_dicts, data, params,
+ user_config)
+ else:
+ config_names = target_dicts[target_list[0]]['configurations']
+ for config_name in config_names:
+ GenerateOutputForConfig(target_list, target_dicts, data, params,
+ config_name)
+
diff --git a/third_party/python/gyp/pylib/gyp/generator/gypd.py b/third_party/python/gyp/pylib/gyp/generator/gypd.py
new file mode 100644
index 0000000000..78eeaa61b2
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/generator/gypd.py
@@ -0,0 +1,94 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""gypd output module
+
+This module produces gyp input as its output. Output files are given the
+.gypd extension to avoid overwriting the .gyp files that they are generated
+from. Internal references to .gyp files (such as those found in
+"dependencies" sections) are not adjusted to point to .gypd files instead;
+unlike other paths, which are relative to the .gyp or .gypd file, such paths
+are relative to the directory from which gyp was run to create the .gypd file.
+
+This generator module is intended to be a sample and a debugging aid, hence
+the "d" for "debug" in .gypd. It is useful to inspect the results of the
+various merges, expansions, and conditional evaluations performed by gyp
+and to see a representation of what would be fed to a generator module.
+
+It's not advisable to rename .gypd files produced by this module to .gyp,
+because they will have all merges, expansions, and evaluations already
+performed and the relevant constructs not present in the output; paths to
+dependencies may be wrong; and various sections that do not belong in .gyp
+files such as such as "included_files" and "*_excluded" will be present.
+Output will also be stripped of comments. This is not intended to be a
+general-purpose gyp pretty-printer; for that, you probably just want to
+run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
+comments but won't do all of the other things done to this module's output.
+
+The specific formatting of the output generated by this module is subject
+to change.
+"""
+
+
+import gyp.common
+import errno
+import os
+import pprint
+
+
+# These variables should just be spit back out as variable references.
+_generator_identity_variables = [
+ 'CONFIGURATION_NAME',
+ 'EXECUTABLE_PREFIX',
+ 'EXECUTABLE_SUFFIX',
+ 'INTERMEDIATE_DIR',
+ 'LIB_DIR',
+ 'PRODUCT_DIR',
+ 'RULE_INPUT_ROOT',
+ 'RULE_INPUT_DIRNAME',
+ 'RULE_INPUT_EXT',
+ 'RULE_INPUT_NAME',
+ 'RULE_INPUT_PATH',
+ 'SHARED_INTERMEDIATE_DIR',
+ 'SHARED_LIB_DIR',
+ 'SHARED_LIB_PREFIX',
+ 'SHARED_LIB_SUFFIX',
+ 'STATIC_LIB_PREFIX',
+ 'STATIC_LIB_SUFFIX',
+]
+
+# gypd doesn't define a default value for OS like many other generator
+# modules. Specify "-D OS=whatever" on the command line to provide a value.
+generator_default_variables = {
+}
+
+# gypd supports multiple toolsets
+generator_supports_multiple_toolsets = True
+
+# TODO(mark): This always uses <, which isn't right. The input module should
+# notify the generator to tell it which phase it is operating in, and this
+# module should use < for the early phase and then switch to > for the late
+# phase. Bonus points for carrying @ back into the output too.
+for v in _generator_identity_variables:
+ generator_default_variables[v] = '<(%s)' % v
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ output_files = {}
+ for qualified_target in target_list:
+ [input_file, target] = \
+ gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
+
+ if input_file[-4:] != '.gyp':
+ continue
+ input_file_stem = input_file[:-4]
+ output_file = input_file_stem + params['options'].suffix + '.gypd'
+
+ if not output_file in output_files:
+ output_files[output_file] = input_file
+
+ for output_file, input_file in output_files.items():
+ output = open(output_file, 'w')
+ pprint.pprint(data[input_file], output)
+ output.close()
diff --git a/third_party/python/gyp/pylib/gyp/generator/gypsh.py b/third_party/python/gyp/pylib/gyp/generator/gypsh.py
new file mode 100644
index 0000000000..bd405f43a9
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/generator/gypsh.py
@@ -0,0 +1,56 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""gypsh output module
+
+gypsh is a GYP shell. It's not really a generator per se. All it does is
+fire up an interactive Python session with a few local variables set to the
+variables passed to the generator. Like gypd, it's intended as a debugging
+aid, to facilitate the exploration of .gyp structures after being processed
+by the input module.
+
+The expected usage is "gyp -f gypsh -D OS=desired_os".
+"""
+
+
+import code
+import sys
+
+
+# All of this stuff about generator variables was lovingly ripped from gypd.py.
+# That module has a much better description of what's going on and why.
+_generator_identity_variables = [
+ 'EXECUTABLE_PREFIX',
+ 'EXECUTABLE_SUFFIX',
+ 'INTERMEDIATE_DIR',
+ 'PRODUCT_DIR',
+ 'RULE_INPUT_ROOT',
+ 'RULE_INPUT_DIRNAME',
+ 'RULE_INPUT_EXT',
+ 'RULE_INPUT_NAME',
+ 'RULE_INPUT_PATH',
+ 'SHARED_INTERMEDIATE_DIR',
+]
+
+generator_default_variables = {
+}
+
+for v in _generator_identity_variables:
+ generator_default_variables[v] = '<(%s)' % v
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ locals = {
+ 'target_list': target_list,
+ 'target_dicts': target_dicts,
+ 'data': data,
+ }
+
+ # Use a banner that looks like the stock Python one and like what
+ # code.interact uses by default, but tack on something to indicate what
+ # locals are available, and identify gypsh.
+ banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
+ (sys.version, sys.platform, repr(sorted(locals.keys())))
+
+ code.interact(banner, local=locals)
diff --git a/third_party/python/gyp/pylib/gyp/generator/make.py b/third_party/python/gyp/pylib/gyp/generator/make.py
new file mode 100644
index 0000000000..997eec0866
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/generator/make.py
@@ -0,0 +1,2260 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Notes:
+#
+# This is all roughly based on the Makefile system used by the Linux
+# kernel, but is a non-recursive make -- we put the entire dependency
+# graph in front of make and let it figure it out.
+#
+# The code below generates a separate .mk file for each target, but
+# all are sourced by the top-level Makefile. This means that all
+# variables in .mk-files clobber one another. Be careful to use :=
+# where appropriate for immediate evaluation, and similarly to watch
+# that you're not relying on a variable value to last beween different
+# .mk files.
+#
+# TODOs:
+#
+# Global settings and utility functions are currently stuffed in the
+# toplevel Makefile. It may make sense to generate some .mk files on
+# the side to keep the the files readable.
+
+from __future__ import print_function
+
+import os
+import re
+import sys
+import subprocess
+import gyp
+import gyp.common
+import gyp.xcode_emulation
+from gyp.common import GetEnvironFallback
+from gyp.common import GypError
+
+import hashlib
+
+generator_default_variables = {
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '',
+ 'STATIC_LIB_PREFIX': 'lib',
+ 'SHARED_LIB_PREFIX': 'lib',
+ 'STATIC_LIB_SUFFIX': '.a',
+ 'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/$(TARGET)/geni',
+ 'SHARED_INTERMEDIATE_DIR': '$(obj)/gen',
+ 'PRODUCT_DIR': '$(builddir)',
+ 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
+ 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python.
+ 'RULE_INPUT_PATH': '$(abspath $<)',
+ 'RULE_INPUT_EXT': '$(suffix $<)',
+ 'RULE_INPUT_NAME': '$(notdir $<)',
+ 'CONFIGURATION_NAME': '$(BUILDTYPE)',
+}
+
+# Make supports multiple toolsets
+generator_supports_multiple_toolsets = True
+
+# Request sorted dependencies in the order from dependents to dependencies.
+generator_wants_sorted_dependencies = False
+
+# Placates pylint.
+generator_additional_non_configuration_keys = []
+generator_additional_path_sections = []
+generator_extra_sources_for_rules = []
+generator_filelist_paths = None
+
+
+def CalculateVariables(default_variables, params):
+ """Calculate additional variables for use in the build (called by gyp)."""
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == 'mac':
+ default_variables.setdefault('OS', 'mac')
+ default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
+ default_variables.setdefault('SHARED_LIB_DIR',
+ generator_default_variables['PRODUCT_DIR'])
+ default_variables.setdefault('LIB_DIR',
+ generator_default_variables['PRODUCT_DIR'])
+
+ # Copy additional generator configuration data from Xcode, which is shared
+ # by the Mac Make generator.
+ import gyp.generator.xcode as xcode_generator
+ global generator_additional_non_configuration_keys
+ generator_additional_non_configuration_keys = getattr(xcode_generator,
+ 'generator_additional_non_configuration_keys', [])
+ global generator_additional_path_sections
+ generator_additional_path_sections = getattr(xcode_generator,
+ 'generator_additional_path_sections', [])
+ global generator_extra_sources_for_rules
+ generator_extra_sources_for_rules = getattr(xcode_generator,
+ 'generator_extra_sources_for_rules', [])
+ COMPILABLE_EXTENSIONS.update({'.m': 'objc', '.mm' : 'objcxx'})
+ else:
+ operating_system = flavor
+ if flavor == 'android':
+ operating_system = 'linux' # Keep this legacy behavior for now.
+ default_variables.setdefault('OS', operating_system)
+ if flavor == 'aix':
+ default_variables.setdefault('SHARED_LIB_SUFFIX', '.a')
+ else:
+ default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
+ default_variables.setdefault('SHARED_LIB_DIR','$(builddir)/lib.$(TOOLSET)')
+ default_variables.setdefault('LIB_DIR', '$(obj).$(TOOLSET)')
+
+
+def CalculateGeneratorInputInfo(params):
+ """Calculate the generator specific info that gets fed to input (called by
+ gyp)."""
+ generator_flags = params.get('generator_flags', {})
+ android_ndk_version = generator_flags.get('android_ndk_version', None)
+ # Android NDK requires a strict link order.
+ if android_ndk_version:
+ global generator_wants_sorted_dependencies
+ generator_wants_sorted_dependencies = True
+
+ output_dir = params['options'].generator_output or \
+ params['options'].toplevel_dir
+ builddir_name = generator_flags.get('output_dir', 'out')
+ qualified_out_dir = os.path.normpath(os.path.join(
+ output_dir, builddir_name, 'gypfiles'))
+
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ 'toplevel': params['options'].toplevel_dir,
+ 'qualified_out_dir': qualified_out_dir,
+ }
+
+
+# The .d checking code below uses these functions:
+# wildcard, sort, foreach, shell, wordlist
+# wildcard can handle spaces, the rest can't.
+# Since I could find no way to make foreach work with spaces in filenames
+# correctly, the .d files have spaces replaced with another character. The .d
+# file for
+# Chromium\ Framework.framework/foo
+# is for example
+# out/Release/.deps/out/Release/Chromium?Framework.framework/foo
+# This is the replacement character.
+SPACE_REPLACEMENT = '?'
+
+
+LINK_COMMANDS_LINUX = """\
+quiet_cmd_alink = AR($(TOOLSET)) $@
+cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
+
+quiet_cmd_alink_thin = AR($(TOOLSET)) $@
+cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
+
+# Due to circular dependencies between libraries :(, we wrap the
+# special "figure out circular dependencies" flags around the entire
+# input list during linking.
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
+
+# We support two kinds of shared objects (.so):
+# 1) shared_library, which is just bundling together many dependent libraries
+# into a link line.
+# 2) loadable_module, which is generating a module intended for dlopen().
+#
+# They differ only slightly:
+# In the former case, we want to package all dependent code into the .so.
+# In the latter case, we want to package just the API exposed by the
+# outermost module.
+# This means shared_library uses --whole-archive, while loadable_module doesn't.
+# (Note that --whole-archive is incompatible with the --start-group used in
+# normal linking.)
+
+# Other shared-object link notes:
+# - Set SONAME to the library filename so our binaries don't reference
+# the local, absolute paths used on the link command-line.
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
+"""
+
+LINK_COMMANDS_MAC = """\
+quiet_cmd_alink = LIBTOOL-STATIC $@
+cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
+
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
+"""
+
+LINK_COMMANDS_ANDROID = """\
+quiet_cmd_alink = AR($(TOOLSET)) $@
+cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
+
+quiet_cmd_alink_thin = AR($(TOOLSET)) $@
+cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
+
+# Due to circular dependencies between libraries :(, we wrap the
+# special "figure out circular dependencies" flags around the entire
+# input list during linking.
+quiet_cmd_link = LINK($(TOOLSET)) $@
+quiet_cmd_link_host = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
+cmd_link_host = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
+
+# Other shared-object link notes:
+# - Set SONAME to the library filename so our binaries don't reference
+# the local, absolute paths used on the link command-line.
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
+quiet_cmd_solink_module_host = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
+"""
+
+
+LINK_COMMANDS_AIX = """\
+quiet_cmd_alink = AR($(TOOLSET)) $@
+cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
+
+quiet_cmd_alink_thin = AR($(TOOLSET)) $@
+cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
+
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
+"""
+
+
+LINK_COMMANDS_OS390 = """\
+quiet_cmd_alink = AR($(TOOLSET)) $@
+cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
+
+quiet_cmd_alink_thin = AR($(TOOLSET)) $@
+cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
+
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) -Wl,DLL
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -Wl,DLL
+"""
+
+
+# Header of toplevel Makefile.
+# This should go into the build tree, but it's easier to keep it here for now.
+SHARED_HEADER = ("""\
+# We borrow heavily from the kernel build setup, though we are simpler since
+# we don't have Kconfig tweaking settings on us.
+
+# The implicit make rules have it looking for RCS files, among other things.
+# We instead explicitly write all the rules we care about.
+# It's even quicker (saves ~200ms) to pass -r on the command line.
+MAKEFLAGS=-r
+
+# The source directory tree.
+srcdir := %(srcdir)s
+abs_srcdir := $(abspath $(srcdir))
+
+# The name of the builddir.
+builddir_name ?= %(builddir)s
+
+# The V=1 flag on command line makes us verbosely print command lines.
+ifdef V
+ quiet=
+else
+ quiet=quiet_
+endif
+
+# Specify BUILDTYPE=Release on the command line for a release build.
+BUILDTYPE ?= %(default_configuration)s
+
+# Directory all our build output goes into.
+# Note that this must be two directories beneath src/ for unit tests to pass,
+# as they reach into the src/ directory for data with relative paths.
+builddir ?= $(builddir_name)/$(BUILDTYPE)
+abs_builddir := $(abspath $(builddir))
+depsdir := $(builddir)/.deps
+
+# Object output directory.
+obj := $(builddir)/obj
+abs_obj := $(abspath $(obj))
+
+# We build up a list of every single one of the targets so we can slurp in the
+# generated dependency rule Makefiles in one pass.
+all_deps :=
+
+%(make_global_settings)s
+
+CC.target ?= %(CC.target)s
+CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
+CXX.target ?= %(CXX.target)s
+CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
+LINK.target ?= %(LINK.target)s
+LDFLAGS.target ?= $(LDFLAGS)
+AR.target ?= $(AR)
+
+# C++ apps need to be linked with g++.
+LINK ?= $(CXX.target)
+
+# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
+# to replicate this environment fallback in make as well.
+CC.host ?= %(CC.host)s
+CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
+CXX.host ?= %(CXX.host)s
+CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
+LINK.host ?= %(LINK.host)s
+LDFLAGS.host ?= $(LDFLAGS_host)
+AR.host ?= %(AR.host)s
+
+# Define a dir function that can handle spaces.
+# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
+# "leading spaces cannot appear in the text of the first argument as written.
+# These characters can be put into the argument value by variable substitution."
+empty :=
+space := $(empty) $(empty)
+
+# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
+replace_spaces = $(subst $(space),""" + SPACE_REPLACEMENT + """,$1)
+unreplace_spaces = $(subst """ + SPACE_REPLACEMENT + """,$(space),$1)
+dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
+
+# Flags to make gcc output dependency info. Note that you need to be
+# careful here to use the flags that ccache and distcc can understand.
+# We write to a dep file on the side first and then rename at the end
+# so we can't end up with a broken dep file.
+depfile = $(depsdir)/$(call replace_spaces,$@).d
+DEPFLAGS = %(makedep_args)s -MF $(depfile).raw
+
+# We have to fixup the deps output in a few ways.
+# (1) the file output should mention the proper .o file.
+# ccache or distcc lose the path to the target, so we convert a rule of
+# the form:
+# foobar.o: DEP1 DEP2
+# into
+# path/to/foobar.o: DEP1 DEP2
+# (2) we want missing files not to cause us to fail to build.
+# We want to rewrite
+# foobar.o: DEP1 DEP2 \\
+# DEP3
+# to
+# DEP1:
+# DEP2:
+# DEP3:
+# so if the files are missing, they're just considered phony rules.
+# We have to do some pretty insane escaping to get those backslashes
+# and dollar signs past make, the shell, and sed at the same time.
+# Doesn't work with spaces, but that's fine: .d files have spaces in
+# their names replaced with other characters."""
+r"""
+define fixup_dep
+# The depfile may not exist if the input file didn't have any #includes.
+touch $(depfile).raw
+# Fixup path as in (1).
+sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
+# Add extra rules as in (2).
+# We remove slashes and replace spaces with new lines;
+# remove blank lines;
+# delete the first line and append a colon to the remaining lines.
+sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
+ grep -v '^$$' |\
+ sed -e 1d -e 's|$$|:|' \
+ >> $(depfile)
+rm $(depfile).raw
+endef
+"""
+"""
+# Command definitions:
+# - cmd_foo is the actual command to run;
+# - quiet_cmd_foo is the brief-output summary of the command.
+
+quiet_cmd_cc = CC($(TOOLSET)) $@
+cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
+
+quiet_cmd_cxx = CXX($(TOOLSET)) $@
+cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
+%(extra_commands)s
+quiet_cmd_touch = TOUCH $@
+cmd_touch = touch $@
+
+quiet_cmd_copy = COPY $@
+# send stderr to /dev/null to ignore messages when linking directories.
+cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp %(copy_archive_args)s "$<" "$@")
+
+%(link_commands)s
+"""
+
+r"""
+# Define an escape_quotes function to escape single quotes.
+# This allows us to handle quotes properly as long as we always use
+# use single quotes and escape_quotes.
+escape_quotes = $(subst ','\'',$(1))
+# This comment is here just to include a ' to unconfuse syntax highlighting.
+# Define an escape_vars function to escape '$' variable syntax.
+# This allows us to read/write command lines with shell variables (e.g.
+# $LD_LIBRARY_PATH), without triggering make substitution.
+escape_vars = $(subst $$,$$$$,$(1))
+# Helper that expands to a shell command to echo a string exactly as it is in
+# make. This uses printf instead of echo because printf's behaviour with respect
+# to escape sequences is more portable than echo's across different shells
+# (e.g., dash, bash).
+exact_echo = printf '%%s\n' '$(call escape_quotes,$(1))'
+"""
+"""
+# Helper to compare the command we're about to run against the command
+# we logged the last time we ran the command. Produces an empty
+# string (false) when the commands match.
+# Tricky point: Make has no string-equality test function.
+# The kernel uses the following, but it seems like it would have false
+# positives, where one string reordered its arguments.
+# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\
+# $(filter-out $(cmd_$@), $(cmd_$(1))))
+# We instead substitute each for the empty string into the other, and
+# say they're equal if both substitutions produce the empty string.
+# .d files contain """ + SPACE_REPLACEMENT + \
+ """ instead of spaces, take that into account.
+command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\
+ $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
+
+# Helper that is non-empty when a prerequisite changes.
+# Normally make does this implicitly, but we force rules to always run
+# so we can check their command lines.
+# $? -- new prerequisites
+# $| -- order-only dependencies
+prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
+
+# Helper that executes all postbuilds until one fails.
+define do_postbuilds
+ @E=0;\\
+ for p in $(POSTBUILDS); do\\
+ eval $$p;\\
+ E=$$?;\\
+ if [ $$E -ne 0 ]; then\\
+ break;\\
+ fi;\\
+ done;\\
+ if [ $$E -ne 0 ]; then\\
+ rm -rf "$@";\\
+ exit $$E;\\
+ fi
+endef
+
+# do_cmd: run a command via the above cmd_foo names, if necessary.
+# Should always run for a given target to handle command-line changes.
+# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
+# Third argument, if non-zero, makes it do POSTBUILDS processing.
+# Note: We intentionally do NOT call dirx for depfile, since it contains """ + \
+ SPACE_REPLACEMENT + """ for
+# spaces already and dirx strips the """ + SPACE_REPLACEMENT + \
+ """ characters.
+define do_cmd
+$(if $(or $(command_changed),$(prereq_changed)),
+ @$(call exact_echo, $($(quiet)cmd_$(1)))
+ @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
+ $(if $(findstring flock,$(word %(flock_index)d,$(cmd_$1))),
+ @$(cmd_$(1))
+ @echo " $(quiet_cmd_$(1)): Finished",
+ @$(cmd_$(1))
+ )
+ @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
+ @$(if $(2),$(fixup_dep))
+ $(if $(and $(3), $(POSTBUILDS)),
+ $(call do_postbuilds)
+ )
+)
+endef
+
+# Declare the "%(default_target)s" target first so it is the default,
+# even though we don't have the deps yet.
+.PHONY: %(default_target)s
+%(default_target)s:
+
+# make looks for ways to re-generate included makefiles, but in our case, we
+# don't have a direct way. Explicitly telling make that it has nothing to do
+# for them makes it go faster.
+%%.d: ;
+
+# Use FORCE_DO_CMD to force a target to run. Should be coupled with
+# do_cmd.
+.PHONY: FORCE_DO_CMD
+FORCE_DO_CMD:
+
+""")
+
+SHARED_HEADER_MAC_COMMANDS = """
+quiet_cmd_objc = CXX($(TOOLSET)) $@
+cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
+
+quiet_cmd_objcxx = CXX($(TOOLSET)) $@
+cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
+
+# Commands for precompiled header files.
+quiet_cmd_pch_c = CXX($(TOOLSET)) $@
+cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
+quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
+cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
+quiet_cmd_pch_m = CXX($(TOOLSET)) $@
+cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
+quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
+cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
+
+# gyp-mac-tool is written next to the root Makefile by gyp.
+# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
+# already.
+quiet_cmd_mac_tool = MACTOOL $(4) $<
+cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
+
+quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
+cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
+
+quiet_cmd_infoplist = INFOPLIST $@
+cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
+"""
+
+
+def WriteRootHeaderSuffixRules(writer):
+ extensions = sorted(COMPILABLE_EXTENSIONS.keys(), key=str.lower)
+
+ writer.write('# Suffix rules, putting all outputs into $(obj).\n')
+ for ext in extensions:
+ writer.write('$(obj).$(TOOLSET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD\n' % ext)
+ writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
+
+ writer.write('\n# Try building from generated source, too.\n')
+ for ext in extensions:
+ writer.write(
+ '$(obj).$(TOOLSET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD\n' % ext)
+ writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
+ writer.write('\n')
+ for ext in extensions:
+ writer.write('$(obj).$(TOOLSET)/%%.o: $(obj)/%%%s FORCE_DO_CMD\n' % ext)
+ writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
+ writer.write('\n')
+
+
+SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\
+# Suffix rules, putting all outputs into $(obj).
+""")
+
+
+SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\
+# Try building from generated source, too.
+""")
+
+
+SHARED_FOOTER = """\
+# "all" is a concatenation of the "all" targets from all the included
+# sub-makefiles. This is just here to clarify.
+all:
+
+# Add in dependency-tracking rules. $(all_deps) is the list of every single
+# target in our tree. Only consider the ones with .d (dependency) info:
+d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
+ifneq ($(d_files),)
+ include $(d_files)
+endif
+"""
+
+header = """\
+# This file is generated by gyp; do not edit.
+
+"""
+
+# Maps every compilable file extension to the do_cmd that compiles it.
+COMPILABLE_EXTENSIONS = {
+ '.c': 'cc',
+ '.cc': 'cxx',
+ '.cpp': 'cxx',
+ '.cxx': 'cxx',
+ '.s': 'cc',
+ '.S': 'cc',
+}
+
+def Compilable(filename):
+ """Return true if the file is compilable (should be in OBJS)."""
+ for res in (filename.endswith(e) for e in COMPILABLE_EXTENSIONS):
+ if res:
+ return True
+ return False
+
+
+def Linkable(filename):
+ """Return true if the file is linkable (should be on the link line)."""
+ return filename.endswith('.o')
+
+
+def Target(filename):
+ """Translate a compilable filename to its .o target."""
+ return os.path.splitext(filename)[0] + '.o'
+
+
+def EscapeShellArgument(s):
+ """Quotes an argument so that it will be interpreted literally by a POSIX
+ shell. Taken from
+ http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
+ """
+ return "'" + s.replace("'", "'\\''") + "'"
+
+
+def EscapeMakeVariableExpansion(s):
+ """Make has its own variable expansion syntax using $. We must escape it for
+ string to be interpreted literally."""
+ return s.replace('$', '$$')
+
+
+def EscapeCppDefine(s):
+ """Escapes a CPP define so that it will reach the compiler unaltered."""
+ s = EscapeShellArgument(s)
+ s = EscapeMakeVariableExpansion(s)
+ # '#' characters must be escaped even embedded in a string, else Make will
+ # treat it as the start of a comment.
+ return s.replace('#', r'\#')
+
+
+def QuoteIfNecessary(string):
+ """TODO: Should this ideally be replaced with one or more of the above
+ functions?"""
+ if '"' in string:
+ string = '"' + string.replace('"', '\\"') + '"'
+ return string
+
+
+def StringToMakefileVariable(string):
+ """Convert a string to a value that is acceptable as a make variable name."""
+ return re.sub('[^a-zA-Z0-9_]', '_', string)
+
+
+srcdir_prefix = ''
+def Sourceify(path):
+ """Convert a path to its source directory form."""
+ if '$(' in path:
+ return path
+ if os.path.isabs(path):
+ return path
+ return srcdir_prefix + path
+
+
+def QuoteSpaces(s, quote=r'\ '):
+ return s.replace(' ', quote)
+
+
+# TODO: Avoid code duplication with _ValidateSourcesForMSVSProject in msvs.py.
+def _ValidateSourcesForOSX(spec, all_sources):
+ """Makes sure if duplicate basenames are not specified in the source list.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ """
+ if spec.get('type', None) != 'static_library':
+ return
+
+ basenames = {}
+ for source in all_sources:
+ name, ext = os.path.splitext(source)
+ is_compiled_file = ext in [
+ '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
+ if not is_compiled_file:
+ continue
+ basename = os.path.basename(name) # Don't include extension.
+ basenames.setdefault(basename, []).append(source)
+
+ error = ''
+ for basename, files in basenames.items():
+ if len(files) > 1:
+ error += ' %s: %s\n' % (basename, ' '.join(files))
+
+ if error:
+ print('static library %s has several files with the same basename:\n' %
+ spec['target_name'] + error + 'libtool on OS X will generate' +
+ ' warnings for them.')
+ raise GypError('Duplicate basenames in sources section, see list above')
+
+
+# Map from qualified target to path to output.
+target_outputs = {}
+# Map from qualified target to any linkable output. A subset
+# of target_outputs. E.g. when mybinary depends on liba, we want to
+# include liba in the linker line; when otherbinary depends on
+# mybinary, we just want to build mybinary first.
+target_link_deps = {}
+
+
+class MakefileWriter(object):
+ """MakefileWriter packages up the writing of one target-specific foobar.mk.
+
+ Its only real entry point is Write(), and is mostly used for namespacing.
+ """
+
+ def __init__(self, generator_flags, flavor):
+ self.generator_flags = generator_flags
+ self.flavor = flavor
+
+ self.suffix_rules_srcdir = {}
+ self.suffix_rules_objdir1 = {}
+ self.suffix_rules_objdir2 = {}
+
+ # Generate suffix rules for all compilable extensions.
+ for ext in COMPILABLE_EXTENSIONS.keys():
+ # Suffix rules for source folder.
+ self.suffix_rules_srcdir.update({ext: ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD
+ @$(call do_cmd,%s,1)
+""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
+
+ # Suffix rules for generated source files.
+ self.suffix_rules_objdir1.update({ext: ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD
+ @$(call do_cmd,%s,1)
+""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
+ self.suffix_rules_objdir2.update({ext: ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
+ @$(call do_cmd,%s,1)
+""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
+
+
+ def Write(self, qualified_target, base_path, output_filename, spec, configs,
+ part_of_all):
+ """The main entry point: writes a .mk file for a single target.
+
+ Arguments:
+ qualified_target: target we're generating
+ base_path: path relative to source root we're building in, used to resolve
+ target-relative paths
+ output_filename: output .mk file name to write
+ spec, configs: gyp info
+ part_of_all: flag indicating this target is part of 'all'
+ """
+ gyp.common.EnsureDirExists(output_filename)
+
+ self.fp = open(output_filename, 'w')
+
+ self.fp.write(header)
+
+ self.qualified_target = qualified_target
+ self.path = base_path
+ self.target = spec['target_name']
+ self.type = spec['type']
+ self.toolset = spec['toolset']
+
+ self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
+ if self.flavor == 'mac':
+ self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
+ else:
+ self.xcode_settings = None
+
+ deps, link_deps = self.ComputeDeps(spec)
+
+ # Some of the generation below can add extra output, sources, or
+ # link dependencies. All of the out params of the functions that
+ # follow use names like extra_foo.
+ extra_outputs = []
+ extra_sources = []
+ extra_link_deps = []
+ extra_mac_bundle_resources = []
+ mac_bundle_deps = []
+
+ if self.is_mac_bundle:
+ self.output = self.ComputeMacBundleOutput(spec)
+ self.output_binary = self.ComputeMacBundleBinaryOutput(spec)
+ else:
+ self.output = self.output_binary = self.ComputeOutput(spec)
+
+ self.is_standalone_static_library = bool(
+ spec.get('standalone_static_library', 0))
+ self._INSTALLABLE_TARGETS = ('executable', 'loadable_module',
+ 'shared_library')
+ if (self.is_standalone_static_library or
+ self.type in self._INSTALLABLE_TARGETS):
+ self.alias = os.path.basename(self.output)
+ install_path = self._InstallableTargetInstallPath()
+ else:
+ self.alias = self.output
+ install_path = self.output
+
+ self.WriteLn("TOOLSET := " + self.toolset)
+ self.WriteLn("TARGET := " + self.target)
+
+ # Actions must come first, since they can generate more OBJs for use below.
+ if 'actions' in spec:
+ self.WriteActions(spec['actions'], extra_sources, extra_outputs,
+ extra_mac_bundle_resources, part_of_all)
+
+ # Rules must be early like actions.
+ if 'rules' in spec:
+ self.WriteRules(spec['rules'], extra_sources, extra_outputs,
+ extra_mac_bundle_resources, part_of_all)
+
+ if 'copies' in spec:
+ self.WriteCopies(spec['copies'], extra_outputs, part_of_all)
+
+ # Bundle resources.
+ if self.is_mac_bundle:
+ all_mac_bundle_resources = (
+ spec.get('mac_bundle_resources', []) + extra_mac_bundle_resources)
+ self.WriteMacBundleResources(all_mac_bundle_resources, mac_bundle_deps)
+ self.WriteMacInfoPlist(mac_bundle_deps)
+
+ # Sources.
+ all_sources = spec.get('sources', []) + extra_sources
+ if all_sources:
+ if self.flavor == 'mac':
+ # libtool on OS X generates warnings for duplicate basenames in the same
+ # target.
+ _ValidateSourcesForOSX(spec, all_sources)
+ self.WriteSources(
+ configs, deps, all_sources, extra_outputs,
+ extra_link_deps, part_of_all,
+ gyp.xcode_emulation.MacPrefixHeader(
+ self.xcode_settings, lambda p: Sourceify(self.Absolutify(p)),
+ self.Pchify))
+ sources = [x for x in all_sources if Compilable(x)]
+ if sources:
+ self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
+ extensions = set([os.path.splitext(s)[1] for s in sources])
+ for ext in extensions:
+ if ext in self.suffix_rules_srcdir:
+ self.WriteLn(self.suffix_rules_srcdir[ext])
+ self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2)
+ for ext in extensions:
+ if ext in self.suffix_rules_objdir1:
+ self.WriteLn(self.suffix_rules_objdir1[ext])
+ for ext in extensions:
+ if ext in self.suffix_rules_objdir2:
+ self.WriteLn(self.suffix_rules_objdir2[ext])
+ self.WriteLn('# End of this set of suffix rules')
+
+ # Add dependency from bundle to bundle binary.
+ if self.is_mac_bundle:
+ mac_bundle_deps.append(self.output_binary)
+
+ self.WriteTarget(spec, configs, deps, extra_link_deps + link_deps,
+ mac_bundle_deps, extra_outputs, part_of_all)
+
+ # Update global list of target outputs, used in dependency tracking.
+ target_outputs[qualified_target] = install_path
+
+ # Update global list of link dependencies.
+ if self.type in ('static_library', 'shared_library'):
+ target_link_deps[qualified_target] = self.output_binary
+
+ # Currently any versions have the same effect, but in future the behavior
+ # could be different.
+ if self.generator_flags.get('android_ndk_version', None):
+ self.WriteAndroidNdkModuleRule(self.target, all_sources, link_deps)
+
+ self.fp.close()
+
+
+ def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
+ """Write a "sub-project" Makefile.
+
+ This is a small, wrapper Makefile that calls the top-level Makefile to build
+ the targets from a single gyp file (i.e. a sub-project).
+
+ Arguments:
+ output_filename: sub-project Makefile name to write
+ makefile_path: path to the top-level Makefile
+ targets: list of "all" targets for this sub-project
+ build_dir: build output directory, relative to the sub-project
+ """
+ gyp.common.EnsureDirExists(output_filename)
+ self.fp = open(output_filename, 'w')
+ self.fp.write(header)
+ # For consistency with other builders, put sub-project build output in the
+ # sub-project dir (see test/subdirectory/gyptest-subdir-all.py).
+ self.WriteLn('export builddir_name ?= %s' %
+ os.path.join(os.path.dirname(output_filename), build_dir))
+ self.WriteLn('.PHONY: all')
+ self.WriteLn('all:')
+ if makefile_path:
+ makefile_path = ' -C ' + makefile_path
+ self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets)))
+ self.fp.close()
+
+
+ def WriteActions(self, actions, extra_sources, extra_outputs,
+ extra_mac_bundle_resources, part_of_all):
+ """Write Makefile code for any 'actions' from the gyp input.
+
+ extra_sources: a list that will be filled in with newly generated source
+ files, if any
+ extra_outputs: a list that will be filled in with any outputs of these
+ actions (used to make other pieces dependent on these
+ actions)
+ part_of_all: flag indicating this target is part of 'all'
+ """
+ env = self.GetSortedXcodeEnv()
+ for action in actions:
+ name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
+ action['action_name']))
+ self.WriteLn('### Rules for action "%s":' % action['action_name'])
+ inputs = action['inputs']
+ outputs = action['outputs']
+
+ # Build up a list of outputs.
+ # Collect the output dirs we'll need.
+ dirs = set()
+ for out in outputs:
+ dir = os.path.split(out)[0]
+ if dir:
+ dirs.add(dir)
+ if int(action.get('process_outputs_as_sources', False)):
+ extra_sources += outputs
+ if int(action.get('process_outputs_as_mac_bundle_resources', False)):
+ extra_mac_bundle_resources += outputs
+
+ # Write the actual command.
+ action_commands = action['action']
+ if self.flavor == 'mac':
+ action_commands = [gyp.xcode_emulation.ExpandEnvVars(command, env)
+ for command in action_commands]
+ command = gyp.common.EncodePOSIXShellList(action_commands)
+ if 'message' in action:
+ self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message']))
+ else:
+ self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, name))
+ if len(dirs) > 0:
+ command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
+
+ cd_action = 'cd %s; ' % Sourceify(self.path or '.')
+
+ # command and cd_action get written to a toplevel variable called
+ # cmd_foo. Toplevel variables can't handle things that change per
+ # makefile like $(TARGET), so hardcode the target.
+ command = command.replace('$(TARGET)', self.target)
+ cd_action = cd_action.replace('$(TARGET)', self.target)
+
+ # Set LD_LIBRARY_PATH in case the action runs an executable from this
+ # build which links to shared libs from this build.
+ # actions run on the host, so they should in theory only use host
+ # libraries, but until everything is made cross-compile safe, also use
+ # target libraries.
+ # TODO(piman): when everything is cross-compile safe, remove lib.target
+ self.WriteLn('cmd_%s = LD_LIBRARY_PATH=$(builddir)/lib.host:'
+ '$(builddir)/lib.target:$$LD_LIBRARY_PATH; '
+ 'export LD_LIBRARY_PATH; '
+ '%s%s'
+ % (name, cd_action, command))
+ self.WriteLn()
+ outputs = [self.Absolutify(o) for o in outputs]
+ # The makefile rules are all relative to the top dir, but the gyp actions
+ # are defined relative to their containing dir. This replaces the obj
+ # variable for the action rule with an absolute version so that the output
+ # goes in the right place.
+ # Only write the 'obj' and 'builddir' rules for the "primary" output (:1);
+ # it's superfluous for the "extra outputs", and this avoids accidentally
+ # writing duplicate dummy rules for those outputs.
+ # Same for environment.
+ self.WriteLn("%s: obj := $(abs_obj)" % QuoteSpaces(outputs[0]))
+ self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(outputs[0]))
+ self.WriteSortedXcodeEnv(outputs[0], self.GetSortedXcodeEnv())
+
+ for input in inputs:
+ assert ' ' not in input, (
+ "Spaces in action input filenames not supported (%s)" % input)
+ for output in outputs:
+ assert ' ' not in output, (
+ "Spaces in action output filenames not supported (%s)" % output)
+
+ # See the comment in WriteCopies about expanding env vars.
+ outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
+ inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
+
+ self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)),
+ part_of_all=part_of_all, command=name)
+
+ # Stuff the outputs in a variable so we can refer to them later.
+ outputs_variable = 'action_%s_outputs' % name
+ self.WriteLn('%s := %s' % (outputs_variable, ' '.join(outputs)))
+ extra_outputs.append('$(%s)' % outputs_variable)
+ self.WriteLn()
+
+ self.WriteLn()
+
+
+ def WriteRules(self, rules, extra_sources, extra_outputs,
+ extra_mac_bundle_resources, part_of_all):
+ """Write Makefile code for any 'rules' from the gyp input.
+
+ extra_sources: a list that will be filled in with newly generated source
+ files, if any
+ extra_outputs: a list that will be filled in with any outputs of these
+ rules (used to make other pieces dependent on these rules)
+ part_of_all: flag indicating this target is part of 'all'
+ """
+ env = self.GetSortedXcodeEnv()
+ for rule in rules:
+ name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
+ rule['rule_name']))
+ count = 0
+ self.WriteLn('### Generated for rule %s:' % name)
+
+ all_outputs = []
+
+ for rule_source in rule.get('rule_sources', []):
+ dirs = set()
+ (rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
+ (rule_source_root, rule_source_ext) = \
+ os.path.splitext(rule_source_basename)
+
+ outputs = [self.ExpandInputRoot(out, rule_source_root,
+ rule_source_dirname)
+ for out in rule['outputs']]
+
+ for out in outputs:
+ dir = os.path.dirname(out)
+ if dir:
+ dirs.add(dir)
+ if int(rule.get('process_outputs_as_sources', False)):
+ extra_sources += outputs
+ if int(rule.get('process_outputs_as_mac_bundle_resources', False)):
+ extra_mac_bundle_resources += outputs
+ inputs = map(Sourceify, map(self.Absolutify, [rule_source] +
+ rule.get('inputs', [])))
+ actions = ['$(call do_cmd,%s_%d)' % (name, count)]
+
+ if name == 'resources_grit':
+ # HACK: This is ugly. Grit intentionally doesn't touch the
+ # timestamp of its output file when the file doesn't change,
+ # which is fine in hash-based dependency systems like scons
+ # and forge, but not kosher in the make world. After some
+ # discussion, hacking around it here seems like the least
+ # amount of pain.
+ actions += ['@touch --no-create $@']
+
+ # See the comment in WriteCopies about expanding env vars.
+ outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
+ inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
+
+ outputs = [self.Absolutify(o) for o in outputs]
+ all_outputs += outputs
+ # Only write the 'obj' and 'builddir' rules for the "primary" output
+ # (:1); it's superfluous for the "extra outputs", and this avoids
+ # accidentally writing duplicate dummy rules for those outputs.
+ self.WriteLn('%s: obj := $(abs_obj)' % outputs[0])
+ self.WriteLn('%s: builddir := $(abs_builddir)' % outputs[0])
+ self.WriteMakeRule(outputs, inputs, actions,
+ command="%s_%d" % (name, count))
+ # Spaces in rule filenames are not supported, but rule variables have
+ # spaces in them (e.g. RULE_INPUT_PATH expands to '$(abspath $<)').
+ # The spaces within the variables are valid, so remove the variables
+ # before checking.
+ variables_with_spaces = re.compile(r'\$\([^ ]* \$<\)')
+ for output in outputs:
+ output = re.sub(variables_with_spaces, '', output)
+ assert ' ' not in output, (
+ "Spaces in rule filenames not yet supported (%s)" % output)
+ self.WriteLn('all_deps += %s' % ' '.join(outputs))
+
+ action = [self.ExpandInputRoot(ac, rule_source_root,
+ rule_source_dirname)
+ for ac in rule['action']]
+ mkdirs = ''
+ if len(dirs) > 0:
+ mkdirs = 'mkdir -p %s; ' % ' '.join(dirs)
+ cd_action = 'cd %s; ' % Sourceify(self.path or '.')
+
+ # action, cd_action, and mkdirs get written to a toplevel variable
+ # called cmd_foo. Toplevel variables can't handle things that change
+ # per makefile like $(TARGET), so hardcode the target.
+ if self.flavor == 'mac':
+ action = [gyp.xcode_emulation.ExpandEnvVars(command, env)
+ for command in action]
+ action = gyp.common.EncodePOSIXShellList(action)
+ action = action.replace('$(TARGET)', self.target)
+ cd_action = cd_action.replace('$(TARGET)', self.target)
+ mkdirs = mkdirs.replace('$(TARGET)', self.target)
+
+ # Set LD_LIBRARY_PATH in case the rule runs an executable from this
+ # build which links to shared libs from this build.
+ # rules run on the host, so they should in theory only use host
+ # libraries, but until everything is made cross-compile safe, also use
+ # target libraries.
+ # TODO(piman): when everything is cross-compile safe, remove lib.target
+ self.WriteLn(
+ "cmd_%(name)s_%(count)d = LD_LIBRARY_PATH="
+ "$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; "
+ "export LD_LIBRARY_PATH; "
+ "%(cd_action)s%(mkdirs)s%(action)s" % {
+ 'action': action,
+ 'cd_action': cd_action,
+ 'count': count,
+ 'mkdirs': mkdirs,
+ 'name': name,
+ })
+ self.WriteLn(
+ 'quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@' % {
+ 'count': count,
+ 'name': name,
+ })
+ self.WriteLn()
+ count += 1
+
+ outputs_variable = 'rule_%s_outputs' % name
+ self.WriteList(all_outputs, outputs_variable)
+ extra_outputs.append('$(%s)' % outputs_variable)
+
+ self.WriteLn('### Finished generating for rule: %s' % name)
+ self.WriteLn()
+ self.WriteLn('### Finished generating for all rules')
+ self.WriteLn('')
+
+
+ def WriteCopies(self, copies, extra_outputs, part_of_all):
+ """Write Makefile code for any 'copies' from the gyp input.
+
+ extra_outputs: a list that will be filled in with any outputs of this action
+ (used to make other pieces dependent on this action)
+ part_of_all: flag indicating this target is part of 'all'
+ """
+ self.WriteLn('### Generated for copy rule.')
+
+ variable = StringToMakefileVariable(self.qualified_target + '_copies')
+ outputs = []
+ for copy in copies:
+ for path in copy['files']:
+ # Absolutify() may call normpath, and will strip trailing slashes.
+ path = Sourceify(self.Absolutify(path))
+ filename = os.path.split(path)[1]
+ output = Sourceify(self.Absolutify(os.path.join(copy['destination'],
+ filename)))
+
+ # If the output path has variables in it, which happens in practice for
+ # 'copies', writing the environment as target-local doesn't work,
+ # because the variables are already needed for the target name.
+ # Copying the environment variables into global make variables doesn't
+ # work either, because then the .d files will potentially contain spaces
+ # after variable expansion, and .d file handling cannot handle spaces.
+ # As a workaround, manually expand variables at gyp time. Since 'copies'
+ # can't run scripts, there's no need to write the env then.
+ # WriteDoCmd() will escape spaces for .d files.
+ env = self.GetSortedXcodeEnv()
+ output = gyp.xcode_emulation.ExpandEnvVars(output, env)
+ path = gyp.xcode_emulation.ExpandEnvVars(path, env)
+ self.WriteDoCmd([output], [path], 'copy', part_of_all)
+ outputs.append(output)
+ self.WriteLn('%s = %s' % (variable, ' '.join(map(QuoteSpaces, outputs))))
+ extra_outputs.append('$(%s)' % variable)
+ self.WriteLn()
+
+
+ def WriteMacBundleResources(self, resources, bundle_deps):
+ """Writes Makefile code for 'mac_bundle_resources'."""
+ self.WriteLn('### Generated for mac_bundle_resources')
+
+ for output, res in gyp.xcode_emulation.GetMacBundleResources(
+ generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
+ map(Sourceify, map(self.Absolutify, resources))):
+ _, ext = os.path.splitext(output)
+ if ext != '.xcassets':
+ # Make does not supports '.xcassets' emulation.
+ self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource',
+ part_of_all=True)
+ bundle_deps.append(output)
+
+
+ def WriteMacInfoPlist(self, bundle_deps):
+ """Write Makefile code for bundle Info.plist files."""
+ info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
+ generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
+ lambda p: Sourceify(self.Absolutify(p)))
+ if not info_plist:
+ return
+ if defines:
+ # Create an intermediate file to store preprocessed results.
+ intermediate_plist = ('$(obj).$(TOOLSET)/$(TARGET)/' +
+ os.path.basename(info_plist))
+ self.WriteList(defines, intermediate_plist + ': INFOPLIST_DEFINES', '-D',
+ quoter=EscapeCppDefine)
+ self.WriteMakeRule([intermediate_plist], [info_plist],
+ ['$(call do_cmd,infoplist)',
+ # "Convert" the plist so that any weird whitespace changes from the
+ # preprocessor do not affect the XML parser in mac_tool.
+ '@plutil -convert xml1 $@ $@'])
+ info_plist = intermediate_plist
+ # plists can contain envvars and substitute them into the file.
+ self.WriteSortedXcodeEnv(
+ out, self.GetSortedXcodeEnv(additional_settings=extra_env))
+ self.WriteDoCmd([out], [info_plist], 'mac_tool,,,copy-info-plist',
+ part_of_all=True)
+ bundle_deps.append(out)
+
+
+ def WriteSources(self, configs, deps, sources,
+ extra_outputs, extra_link_deps,
+ part_of_all, precompiled_header):
+ """Write Makefile code for any 'sources' from the gyp input.
+ These are source files necessary to build the current target.
+
+ configs, deps, sources: input from gyp.
+ extra_outputs: a list of extra outputs this action should be dependent on;
+ used to serialize action/rules before compilation
+ extra_link_deps: a list that will be filled in with any outputs of
+ compilation (to be used in link lines)
+ part_of_all: flag indicating this target is part of 'all'
+ """
+
+ # Write configuration-specific variables for CFLAGS, etc.
+ for configname in sorted(configs.keys()):
+ config = configs[configname]
+ self.WriteList(config.get('defines'), 'DEFS_%s' % configname, prefix='-D',
+ quoter=EscapeCppDefine)
+
+ if self.flavor == 'mac':
+ cflags = self.xcode_settings.GetCflags(configname)
+ cflags_c = self.xcode_settings.GetCflagsC(configname)
+ cflags_cc = self.xcode_settings.GetCflagsCC(configname)
+ cflags_objc = self.xcode_settings.GetCflagsObjC(configname)
+ cflags_objcc = self.xcode_settings.GetCflagsObjCC(configname)
+ else:
+ cflags = config.get('cflags')
+ cflags_c = config.get('cflags_c')
+ cflags_cc = config.get('cflags_cc')
+
+ self.WriteLn("# Flags passed to all source files.");
+ self.WriteList(cflags, 'CFLAGS_%s' % configname)
+ self.WriteLn("# Flags passed to only C files.");
+ self.WriteList(cflags_c, 'CFLAGS_C_%s' % configname)
+ self.WriteLn("# Flags passed to only C++ files.");
+ self.WriteList(cflags_cc, 'CFLAGS_CC_%s' % configname)
+ if self.flavor == 'mac':
+ self.WriteLn("# Flags passed to only ObjC files.");
+ self.WriteList(cflags_objc, 'CFLAGS_OBJC_%s' % configname)
+ self.WriteLn("# Flags passed to only ObjC++ files.");
+ self.WriteList(cflags_objcc, 'CFLAGS_OBJCC_%s' % configname)
+ includes = config.get('include_dirs')
+ if includes:
+ includes = [Sourceify(self.Absolutify(include)) for include in includes]
+ self.WriteList(includes, 'INCS_%s' % configname, prefix='-I')
+
+ compilable = filter(Compilable, sources)
+ objs = [self.Objectify(self.Absolutify(Target(x))) for x in compilable]
+ self.WriteList(objs, 'OBJS')
+
+ for obj in objs:
+ assert ' ' not in obj, (
+ "Spaces in object filenames not supported (%s)" % obj)
+ self.WriteLn('# Add to the list of files we specially track '
+ 'dependencies for.')
+ self.WriteLn('all_deps += $(OBJS)')
+ self.WriteLn()
+
+ # Make sure our dependencies are built first.
+ if deps:
+ self.WriteMakeRule(['$(OBJS)'], deps,
+ comment = 'Make sure our dependencies are built '
+ 'before any of us.',
+ order_only = True)
+
+ # Make sure the actions and rules run first.
+ # If they generate any extra headers etc., the per-.o file dep tracking
+ # will catch the proper rebuilds, so order only is still ok here.
+ if extra_outputs:
+ self.WriteMakeRule(['$(OBJS)'], extra_outputs,
+ comment = 'Make sure our actions/rules run '
+ 'before any of us.',
+ order_only = True)
+
+ pchdeps = precompiled_header.GetObjDependencies(compilable, objs )
+ if pchdeps:
+ self.WriteLn('# Dependencies from obj files to their precompiled headers')
+ for source, obj, gch in pchdeps:
+ self.WriteLn('%s: %s' % (obj, gch))
+ self.WriteLn('# End precompiled header dependencies')
+
+ if objs:
+ extra_link_deps.append('$(OBJS)')
+ self.WriteLn("""\
+# CFLAGS et al overrides must be target-local.
+# See "Target-specific Variable Values" in the GNU Make manual.""")
+ self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)")
+ self.WriteLn("$(OBJS): GYP_CFLAGS := "
+ "$(DEFS_$(BUILDTYPE)) "
+ "$(INCS_$(BUILDTYPE)) "
+ "%s " % precompiled_header.GetInclude('c') +
+ "$(CFLAGS_$(BUILDTYPE)) "
+ "$(CFLAGS_C_$(BUILDTYPE))")
+ self.WriteLn("$(OBJS): GYP_CXXFLAGS := "
+ "$(DEFS_$(BUILDTYPE)) "
+ "$(INCS_$(BUILDTYPE)) "
+ "%s " % precompiled_header.GetInclude('cc') +
+ "$(CFLAGS_$(BUILDTYPE)) "
+ "$(CFLAGS_CC_$(BUILDTYPE))")
+ if self.flavor == 'mac':
+ self.WriteLn("$(OBJS): GYP_OBJCFLAGS := "
+ "$(DEFS_$(BUILDTYPE)) "
+ "$(INCS_$(BUILDTYPE)) "
+ "%s " % precompiled_header.GetInclude('m') +
+ "$(CFLAGS_$(BUILDTYPE)) "
+ "$(CFLAGS_C_$(BUILDTYPE)) "
+ "$(CFLAGS_OBJC_$(BUILDTYPE))")
+ self.WriteLn("$(OBJS): GYP_OBJCXXFLAGS := "
+ "$(DEFS_$(BUILDTYPE)) "
+ "$(INCS_$(BUILDTYPE)) "
+ "%s " % precompiled_header.GetInclude('mm') +
+ "$(CFLAGS_$(BUILDTYPE)) "
+ "$(CFLAGS_CC_$(BUILDTYPE)) "
+ "$(CFLAGS_OBJCC_$(BUILDTYPE))")
+
+ self.WritePchTargets(precompiled_header.GetPchBuildCommands())
+
+ # If there are any object files in our input file list, link them into our
+ # output.
+ extra_link_deps += [source for source in sources if Linkable(source)]
+
+ self.WriteLn()
+
+ def WritePchTargets(self, pch_commands):
+ """Writes make rules to compile prefix headers."""
+ if not pch_commands:
+ return
+
+ for gch, lang_flag, lang, input in pch_commands:
+ extra_flags = {
+ 'c': '$(CFLAGS_C_$(BUILDTYPE))',
+ 'cc': '$(CFLAGS_CC_$(BUILDTYPE))',
+ 'm': '$(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))',
+ 'mm': '$(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))',
+ }[lang]
+ var_name = {
+ 'c': 'GYP_PCH_CFLAGS',
+ 'cc': 'GYP_PCH_CXXFLAGS',
+ 'm': 'GYP_PCH_OBJCFLAGS',
+ 'mm': 'GYP_PCH_OBJCXXFLAGS',
+ }[lang]
+ self.WriteLn("%s: %s := %s " % (gch, var_name, lang_flag) +
+ "$(DEFS_$(BUILDTYPE)) "
+ "$(INCS_$(BUILDTYPE)) "
+ "$(CFLAGS_$(BUILDTYPE)) " +
+ extra_flags)
+
+ self.WriteLn('%s: %s FORCE_DO_CMD' % (gch, input))
+ self.WriteLn('\t@$(call do_cmd,pch_%s,1)' % lang)
+ self.WriteLn('')
+ assert ' ' not in gch, (
+ "Spaces in gch filenames not supported (%s)" % gch)
+ self.WriteLn('all_deps += %s' % gch)
+ self.WriteLn('')
+
+
+ def ComputeOutputBasename(self, spec):
+ """Return the 'output basename' of a gyp spec.
+
+ E.g., the loadable module 'foobar' in directory 'baz' will produce
+ 'libfoobar.so'
+ """
+ assert not self.is_mac_bundle
+
+ if self.flavor == 'mac' and self.type in (
+ 'static_library', 'executable', 'shared_library', 'loadable_module'):
+ return self.xcode_settings.GetExecutablePath()
+
+ target = spec['target_name']
+ target_prefix = ''
+ target_ext = ''
+ if self.type == 'static_library':
+ if target[:3] == 'lib':
+ target = target[3:]
+ target_prefix = 'lib'
+ target_ext = '.a'
+ elif self.type in ('loadable_module', 'shared_library'):
+ if target[:3] == 'lib':
+ target = target[3:]
+ target_prefix = 'lib'
+ if self.flavor == 'aix':
+ target_ext = '.a'
+ else:
+ target_ext = '.so'
+ elif self.type == 'none':
+ target = '%s.stamp' % target
+ elif self.type != 'executable':
+ print(("ERROR: What output file should be generated?",
+ "type", self.type, "target", target))
+
+ target_prefix = spec.get('product_prefix', target_prefix)
+ target = spec.get('product_name', target)
+ product_ext = spec.get('product_extension')
+ if product_ext:
+ target_ext = '.' + product_ext
+
+ return target_prefix + target + target_ext
+
+
+ def _InstallImmediately(self):
+ return self.toolset == 'target' and self.flavor == 'mac' and self.type in (
+ 'static_library', 'executable', 'shared_library', 'loadable_module')
+
+
+ def ComputeOutput(self, spec):
+ """Return the 'output' (full output path) of a gyp spec.
+
+ E.g., the loadable module 'foobar' in directory 'baz' will produce
+ '$(obj)/baz/libfoobar.so'
+ """
+ assert not self.is_mac_bundle
+
+ path = os.path.join('$(obj).' + self.toolset, self.path)
+ if self.type == 'executable' or self._InstallImmediately():
+ path = '$(builddir)'
+ path = spec.get('product_dir', path)
+ return os.path.join(path, self.ComputeOutputBasename(spec))
+
+
+ def ComputeMacBundleOutput(self, spec):
+ """Return the 'output' (full output path) to a bundle output directory."""
+ assert self.is_mac_bundle
+ path = generator_default_variables['PRODUCT_DIR']
+ return os.path.join(path, self.xcode_settings.GetWrapperName())
+
+
+ def ComputeMacBundleBinaryOutput(self, spec):
+ """Return the 'output' (full output path) to the binary in a bundle."""
+ path = generator_default_variables['PRODUCT_DIR']
+ return os.path.join(path, self.xcode_settings.GetExecutablePath())
+
+
+ def ComputeDeps(self, spec):
+ """Compute the dependencies of a gyp spec.
+
+ Returns a tuple (deps, link_deps), where each is a list of
+ filenames that will need to be put in front of make for either
+ building (deps) or linking (link_deps).
+ """
+ deps = []
+ link_deps = []
+ if 'dependencies' in spec:
+ deps.extend([target_outputs[dep] for dep in spec['dependencies']
+ if target_outputs[dep]])
+ for dep in spec['dependencies']:
+ if dep in target_link_deps:
+ link_deps.append(target_link_deps[dep])
+ deps.extend(link_deps)
+ # TODO: It seems we need to transitively link in libraries (e.g. -lfoo)?
+ # This hack makes it work:
+ # link_deps.extend(spec.get('libraries', []))
+ return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
+
+
+ def WriteDependencyOnExtraOutputs(self, target, extra_outputs):
+ self.WriteMakeRule([self.output_binary], extra_outputs,
+ comment = 'Build our special outputs first.',
+ order_only = True)
+
+
+ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps,
+ extra_outputs, part_of_all):
+ """Write Makefile code to produce the final target of the gyp spec.
+
+ spec, configs: input from gyp.
+ deps, link_deps: dependency lists; see ComputeDeps()
+ extra_outputs: any extra outputs that our target should depend on
+ part_of_all: flag indicating this target is part of 'all'
+ """
+
+ self.WriteLn('### Rules for final target.')
+
+ if extra_outputs:
+ self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs)
+ self.WriteMakeRule(extra_outputs, deps,
+ comment=('Preserve order dependency of '
+ 'special output on deps.'),
+ order_only = True)
+
+ target_postbuilds = {}
+ if self.type != 'none':
+ for configname in sorted(configs.keys()):
+ config = configs[configname]
+ if self.flavor == 'mac':
+ ldflags = self.xcode_settings.GetLdflags(configname,
+ generator_default_variables['PRODUCT_DIR'],
+ lambda p: Sourceify(self.Absolutify(p)))
+
+ # TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on.
+ gyp_to_build = gyp.common.InvertRelativePath(self.path)
+ target_postbuild = self.xcode_settings.AddImplicitPostbuilds(
+ configname,
+ QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
+ self.output))),
+ QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
+ self.output_binary))))
+ if target_postbuild:
+ target_postbuilds[configname] = target_postbuild
+ else:
+ ldflags = config.get('ldflags', [])
+ # Compute an rpath for this output if needed.
+ if any(dep.endswith('.so') or '.so.' in dep for dep in deps):
+ # We want to get the literal string "$ORIGIN" into the link command,
+ # so we need lots of escaping.
+ ldflags.append(r'-Wl,-rpath=\$$ORIGIN/lib.%s/' % self.toolset)
+ ldflags.append(r'-Wl,-rpath-link=\$(builddir)/lib.%s/' %
+ self.toolset)
+ library_dirs = config.get('library_dirs', [])
+ ldflags += [('-L%s' % library_dir) for library_dir in library_dirs]
+ self.WriteList(ldflags, 'LDFLAGS_%s' % configname)
+ if self.flavor == 'mac':
+ self.WriteList(self.xcode_settings.GetLibtoolflags(configname),
+ 'LIBTOOLFLAGS_%s' % configname)
+ libraries = spec.get('libraries')
+ if libraries:
+ # Remove duplicate entries
+ libraries = gyp.common.uniquer(libraries)
+ if self.flavor == 'mac':
+ libraries = self.xcode_settings.AdjustLibraries(libraries)
+ self.WriteList(libraries, 'LIBS')
+ self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' %
+ QuoteSpaces(self.output_binary))
+ self.WriteLn('%s: LIBS := $(LIBS)' % QuoteSpaces(self.output_binary))
+
+ if self.flavor == 'mac':
+ self.WriteLn('%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))' %
+ QuoteSpaces(self.output_binary))
+
+ # Postbuild actions. Like actions, but implicitly depend on the target's
+ # output.
+ postbuilds = []
+ if self.flavor == 'mac':
+ if target_postbuilds:
+ postbuilds.append('$(TARGET_POSTBUILDS_$(BUILDTYPE))')
+ postbuilds.extend(
+ gyp.xcode_emulation.GetSpecPostbuildCommands(spec))
+
+ if postbuilds:
+ # Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE),
+ # so we must output its definition first, since we declare variables
+ # using ":=".
+ self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv())
+
+ for configname in target_postbuilds:
+ self.WriteLn('%s: TARGET_POSTBUILDS_%s := %s' %
+ (QuoteSpaces(self.output),
+ configname,
+ gyp.common.EncodePOSIXShellList(target_postbuilds[configname])))
+
+ # Postbuilds expect to be run in the gyp file's directory, so insert an
+ # implicit postbuild to cd to there.
+ postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path]))
+ for i, postbuild in enumerate(postbuilds):
+ if not postbuild.startswith('$'):
+ postbuilds[i] = EscapeShellArgument(postbuild)
+ self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output))
+ self.WriteLn('%s: POSTBUILDS := %s' % (
+ QuoteSpaces(self.output), ' '.join(postbuilds)))
+
+ # A bundle directory depends on its dependencies such as bundle resources
+ # and bundle binary. When all dependencies have been built, the bundle
+ # needs to be packaged.
+ if self.is_mac_bundle:
+ # If the framework doesn't contain a binary, then nothing depends
+ # on the actions -- make the framework depend on them directly too.
+ self.WriteDependencyOnExtraOutputs(self.output, extra_outputs)
+
+ # Bundle dependencies. Note that the code below adds actions to this
+ # target, so if you move these two lines, move the lines below as well.
+ self.WriteList(map(QuoteSpaces, bundle_deps), 'BUNDLE_DEPS')
+ self.WriteLn('%s: $(BUNDLE_DEPS)' % QuoteSpaces(self.output))
+
+ # After the framework is built, package it. Needs to happen before
+ # postbuilds, since postbuilds depend on this.
+ if self.type in ('shared_library', 'loadable_module'):
+ self.WriteLn('\t@$(call do_cmd,mac_package_framework,,,%s)' %
+ self.xcode_settings.GetFrameworkVersion())
+
+ # Bundle postbuilds can depend on the whole bundle, so run them after
+ # the bundle is packaged, not already after the bundle binary is done.
+ if postbuilds:
+ self.WriteLn('\t@$(call do_postbuilds)')
+ postbuilds = [] # Don't write postbuilds for target's output.
+
+ # Needed by test/mac/gyptest-rebuild.py.
+ self.WriteLn('\t@true # No-op, used by tests')
+
+ # Since this target depends on binary and resources which are in
+ # nested subfolders, the framework directory will be older than
+ # its dependencies usually. To prevent this rule from executing
+ # on every build (expensive, especially with postbuilds), expliclity
+ # update the time on the framework directory.
+ self.WriteLn('\t@touch -c %s' % QuoteSpaces(self.output))
+
+ if postbuilds:
+ assert not self.is_mac_bundle, ('Postbuilds for bundles should be done '
+ 'on the bundle, not the binary (target \'%s\')' % self.target)
+ assert 'product_dir' not in spec, ('Postbuilds do not work with '
+ 'custom product_dir')
+
+ if self.type == 'executable':
+ self.WriteLn('%s: LD_INPUTS := %s' % (
+ QuoteSpaces(self.output_binary),
+ ' '.join(map(QuoteSpaces, link_deps))))
+ if self.toolset == 'host' and self.flavor == 'android':
+ self.WriteDoCmd([self.output_binary], link_deps, 'link_host',
+ part_of_all, postbuilds=postbuilds)
+ else:
+ self.WriteDoCmd([self.output_binary], link_deps, 'link', part_of_all,
+ postbuilds=postbuilds)
+
+ elif self.type == 'static_library':
+ for link_dep in link_deps:
+ assert ' ' not in link_dep, (
+ "Spaces in alink input filenames not supported (%s)" % link_dep)
+ if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
+ self.is_standalone_static_library):
+ self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin',
+ part_of_all, postbuilds=postbuilds)
+ else:
+ self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all,
+ postbuilds=postbuilds)
+ elif self.type == 'shared_library':
+ self.WriteLn('%s: LD_INPUTS := %s' % (
+ QuoteSpaces(self.output_binary),
+ ' '.join(map(QuoteSpaces, link_deps))))
+ self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all,
+ postbuilds=postbuilds)
+ elif self.type == 'loadable_module':
+ for link_dep in link_deps:
+ assert ' ' not in link_dep, (
+ "Spaces in module input filenames not supported (%s)" % link_dep)
+ if self.toolset == 'host' and self.flavor == 'android':
+ self.WriteDoCmd([self.output_binary], link_deps, 'solink_module_host',
+ part_of_all, postbuilds=postbuilds)
+ else:
+ self.WriteDoCmd(
+ [self.output_binary], link_deps, 'solink_module', part_of_all,
+ postbuilds=postbuilds)
+ elif self.type == 'none':
+ # Write a stamp line.
+ self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all,
+ postbuilds=postbuilds)
+ else:
+ print("WARNING: no output for", self.type, self.target)
+
+ # Add an alias for each target (if there are any outputs).
+ # Installable target aliases are created below.
+ if ((self.output and self.output != self.target) and
+ (self.type not in self._INSTALLABLE_TARGETS)):
+ self.WriteMakeRule([self.target], [self.output],
+ comment='Add target alias', phony = True)
+ if part_of_all:
+ self.WriteMakeRule(['all'], [self.target],
+ comment = 'Add target alias to "all" target.',
+ phony = True)
+
+ # Add special-case rules for our installable targets.
+ # 1) They need to install to the build dir or "product" dir.
+ # 2) They get shortcuts for building (e.g. "make chrome").
+ # 3) They are part of "make all".
+ if (self.type in self._INSTALLABLE_TARGETS or
+ self.is_standalone_static_library):
+ if self.type == 'shared_library':
+ file_desc = 'shared library'
+ elif self.type == 'static_library':
+ file_desc = 'static library'
+ else:
+ file_desc = 'executable'
+ install_path = self._InstallableTargetInstallPath()
+ installable_deps = [self.output]
+ if (self.flavor == 'mac' and not 'product_dir' in spec and
+ self.toolset == 'target'):
+ # On mac, products are created in install_path immediately.
+ assert install_path == self.output, '%s != %s' % (
+ install_path, self.output)
+
+ # Point the target alias to the final binary output.
+ self.WriteMakeRule([self.target], [install_path],
+ comment='Add target alias', phony = True)
+ if install_path != self.output:
+ assert not self.is_mac_bundle # See comment a few lines above.
+ self.WriteDoCmd([install_path], [self.output], 'copy',
+ comment = 'Copy this to the %s output path.' %
+ file_desc, part_of_all=part_of_all)
+ installable_deps.append(install_path)
+ if self.output != self.alias and self.alias != self.target:
+ self.WriteMakeRule([self.alias], installable_deps,
+ comment = 'Short alias for building this %s.' %
+ file_desc, phony = True)
+ if part_of_all:
+ self.WriteMakeRule(['all'], [install_path],
+ comment = 'Add %s to "all" target.' % file_desc,
+ phony = True)
+
+
+ def WriteList(self, value_list, variable=None, prefix='',
+ quoter=QuoteIfNecessary):
+ """Write a variable definition that is a list of values.
+
+ E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
+ foo = blaha blahb
+ but in a pretty-printed style.
+ """
+ values = ''
+ if value_list:
+ value_list = [quoter(prefix + l) for l in value_list]
+ values = ' \\\n\t' + ' \\\n\t'.join(value_list)
+ self.fp.write('%s :=%s\n\n' % (variable, values))
+
+
+ def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None,
+ postbuilds=False):
+ """Write a Makefile rule that uses do_cmd.
+
+ This makes the outputs dependent on the command line that was run,
+ as well as support the V= make command line flag.
+ """
+ suffix = ''
+ if postbuilds:
+ assert ',' not in command
+ suffix = ',,1' # Tell do_cmd to honor $POSTBUILDS
+ self.WriteMakeRule(outputs, inputs,
+ actions = ['$(call do_cmd,%s%s)' % (command, suffix)],
+ comment = comment,
+ command = command,
+ force = True)
+ # Add our outputs to the list of targets we read depfiles from.
+ # all_deps is only used for deps file reading, and for deps files we replace
+ # spaces with ? because escaping doesn't work with make's $(sort) and
+ # other functions.
+ outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs]
+ self.WriteLn('all_deps += %s' % ' '.join(outputs))
+
+
+ def WriteMakeRule(self, outputs, inputs, actions=None, comment=None,
+ order_only=False, force=False, phony=False, command=None):
+ """Write a Makefile rule, with some extra tricks.
+
+ outputs: a list of outputs for the rule (note: this is not directly
+ supported by make; see comments below)
+ inputs: a list of inputs for the rule
+ actions: a list of shell commands to run for the rule
+ comment: a comment to put in the Makefile above the rule (also useful
+ for making this Python script's code self-documenting)
+ order_only: if true, makes the dependency order-only
+ force: if true, include FORCE_DO_CMD as an order-only dep
+ phony: if true, the rule does not actually generate the named output, the
+ output is just a name to run the rule
+ command: (optional) command name to generate unambiguous labels
+ """
+ outputs = [QuoteSpaces(o) for o in outputs]
+ inputs = map(QuoteSpaces, inputs)
+
+ if comment:
+ self.WriteLn('# ' + comment)
+ if phony:
+ self.WriteLn('.PHONY: ' + ' '.join(outputs))
+ if actions:
+ self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0])
+ force_append = ' FORCE_DO_CMD' if force else ''
+
+ if order_only:
+ # Order only rule: Just write a simple rule.
+ # TODO(evanm): just make order_only a list of deps instead of this hack.
+ self.WriteLn('%s: | %s%s' %
+ (' '.join(outputs), ' '.join(inputs), force_append))
+ elif len(outputs) == 1:
+ # Regular rule, one output: Just write a simple rule.
+ self.WriteLn('%s: %s%s' % (outputs[0], ' '.join(inputs), force_append))
+ else:
+ # Regular rule, more than one output: Multiple outputs are tricky in
+ # make. We will write three rules:
+ # - All outputs depend on an intermediate file.
+ # - Make .INTERMEDIATE depend on the intermediate.
+ # - The intermediate file depends on the inputs and executes the
+ # actual command.
+ # - The intermediate recipe will 'touch' the intermediate file.
+ # - The multi-output rule will have an do-nothing recipe.
+
+ # Hash the target name to avoid generating overlong filenames.
+ cmdstring = (command if command else self.target).encode('utf-8')
+ cmddigest = hashlib.sha1(cmdstring).hexdigest()
+ intermediate = "%s.intermediate" % (cmddigest)
+ self.WriteLn('%s: %s' % (' '.join(outputs), intermediate))
+ self.WriteLn('\t%s' % '@:');
+ self.WriteLn('%s: %s' % ('.INTERMEDIATE', intermediate))
+ self.WriteLn('%s: %s%s' %
+ (intermediate, ' '.join(inputs), force_append))
+ actions.insert(0, '$(call do_cmd,touch)')
+
+ if actions:
+ for action in actions:
+ self.WriteLn('\t%s' % action)
+ self.WriteLn()
+
+
+ def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps):
+ """Write a set of LOCAL_XXX definitions for Android NDK.
+
+ These variable definitions will be used by Android NDK but do nothing for
+ non-Android applications.
+
+ Arguments:
+ module_name: Android NDK module name, which must be unique among all
+ module names.
+ all_sources: A list of source files (will be filtered by Compilable).
+ link_deps: A list of link dependencies, which must be sorted in
+ the order from dependencies to dependents.
+ """
+ if self.type not in ('executable', 'shared_library', 'static_library'):
+ return
+
+ self.WriteLn('# Variable definitions for Android applications')
+ self.WriteLn('include $(CLEAR_VARS)')
+ self.WriteLn('LOCAL_MODULE := ' + module_name)
+ self.WriteLn('LOCAL_CFLAGS := $(CFLAGS_$(BUILDTYPE)) '
+ '$(DEFS_$(BUILDTYPE)) '
+ # LOCAL_CFLAGS is applied to both of C and C++. There is
+ # no way to specify $(CFLAGS_C_$(BUILDTYPE)) only for C
+ # sources.
+ '$(CFLAGS_C_$(BUILDTYPE)) '
+ # $(INCS_$(BUILDTYPE)) includes the prefix '-I' while
+ # LOCAL_C_INCLUDES does not expect it. So put it in
+ # LOCAL_CFLAGS.
+ '$(INCS_$(BUILDTYPE))')
+ # LOCAL_CXXFLAGS is obsolete and LOCAL_CPPFLAGS is preferred.
+ self.WriteLn('LOCAL_CPPFLAGS := $(CFLAGS_CC_$(BUILDTYPE))')
+ self.WriteLn('LOCAL_C_INCLUDES :=')
+ self.WriteLn('LOCAL_LDLIBS := $(LDFLAGS_$(BUILDTYPE)) $(LIBS)')
+
+ # Detect the C++ extension.
+ cpp_ext = {'.cc': 0, '.cpp': 0, '.cxx': 0}
+ default_cpp_ext = '.cpp'
+ for filename in all_sources:
+ ext = os.path.splitext(filename)[1]
+ if ext in cpp_ext:
+ cpp_ext[ext] += 1
+ if cpp_ext[ext] > cpp_ext[default_cpp_ext]:
+ default_cpp_ext = ext
+ self.WriteLn('LOCAL_CPP_EXTENSION := ' + default_cpp_ext)
+
+ self.WriteList(map(self.Absolutify, filter(Compilable, all_sources)),
+ 'LOCAL_SRC_FILES')
+
+ # Filter out those which do not match prefix and suffix and produce
+ # the resulting list without prefix and suffix.
+ def DepsToModules(deps, prefix, suffix):
+ modules = []
+ for filepath in deps:
+ filename = os.path.basename(filepath)
+ if filename.startswith(prefix) and filename.endswith(suffix):
+ modules.append(filename[len(prefix):-len(suffix)])
+ return modules
+
+ # Retrieve the default value of 'SHARED_LIB_SUFFIX'
+ params = {'flavor': 'linux'}
+ default_variables = {}
+ CalculateVariables(default_variables, params)
+
+ self.WriteList(
+ DepsToModules(link_deps,
+ generator_default_variables['SHARED_LIB_PREFIX'],
+ default_variables['SHARED_LIB_SUFFIX']),
+ 'LOCAL_SHARED_LIBRARIES')
+ self.WriteList(
+ DepsToModules(link_deps,
+ generator_default_variables['STATIC_LIB_PREFIX'],
+ generator_default_variables['STATIC_LIB_SUFFIX']),
+ 'LOCAL_STATIC_LIBRARIES')
+
+ if self.type == 'executable':
+ self.WriteLn('include $(BUILD_EXECUTABLE)')
+ elif self.type == 'shared_library':
+ self.WriteLn('include $(BUILD_SHARED_LIBRARY)')
+ elif self.type == 'static_library':
+ self.WriteLn('include $(BUILD_STATIC_LIBRARY)')
+ self.WriteLn()
+
+
+ def WriteLn(self, text=''):
+ self.fp.write(text + '\n')
+
+
+ def GetSortedXcodeEnv(self, additional_settings=None):
+ return gyp.xcode_emulation.GetSortedXcodeEnv(
+ self.xcode_settings, "$(abs_builddir)",
+ os.path.join("$(abs_srcdir)", self.path), "$(BUILDTYPE)",
+ additional_settings)
+
+
+ def GetSortedXcodePostbuildEnv(self):
+ # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
+ # TODO(thakis): It would be nice to have some general mechanism instead.
+ strip_save_file = self.xcode_settings.GetPerTargetSetting(
+ 'CHROMIUM_STRIP_SAVE_FILE', '')
+ # Even if strip_save_file is empty, explicitly write it. Else a postbuild
+ # might pick up an export from an earlier target.
+ return self.GetSortedXcodeEnv(
+ additional_settings={'CHROMIUM_STRIP_SAVE_FILE': strip_save_file})
+
+
+ def WriteSortedXcodeEnv(self, target, env):
+ for k, v in env:
+ # For
+ # foo := a\ b
+ # the escaped space does the right thing. For
+ # export foo := a\ b
+ # it does not -- the backslash is written to the env as literal character.
+ # So don't escape spaces in |env[k]|.
+ self.WriteLn('%s: export %s := %s' % (QuoteSpaces(target), k, v))
+
+
+ def Objectify(self, path):
+ """Convert a path to its output directory form."""
+ if '$(' in path:
+ path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/' % self.toolset)
+ if not '$(obj)' in path:
+ path = '$(obj).%s/$(TARGET)/%s' % (self.toolset, path)
+ return path
+
+
+ def Pchify(self, path, lang):
+ """Convert a prefix header path to its output directory form."""
+ path = self.Absolutify(path)
+ if '$(' in path:
+ path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/pch-%s' %
+ (self.toolset, lang))
+ return path
+ return '$(obj).%s/$(TARGET)/pch-%s/%s' % (self.toolset, lang, path)
+
+
+ def Absolutify(self, path):
+ """Convert a subdirectory-relative path into a base-relative path.
+ Skips over paths that contain variables."""
+ if '$(' in path:
+ # Don't call normpath in this case, as it might collapse the
+ # path too aggressively if it features '..'. However it's still
+ # important to strip trailing slashes.
+ return path.rstrip('/')
+ return os.path.normpath(os.path.join(self.path, path))
+
+
+ def ExpandInputRoot(self, template, expansion, dirname):
+ if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
+ return template
+ path = template % {
+ 'INPUT_ROOT': expansion,
+ 'INPUT_DIRNAME': dirname,
+ }
+ return path
+
+
+ def _InstallableTargetInstallPath(self):
+ """Returns the location of the final output for an installable target."""
+ # Xcode puts shared_library results into PRODUCT_DIR, and some gyp files
+ # rely on this. Emulate this behavior for mac.
+ if (self.type == 'shared_library' and
+ (self.flavor != 'mac' or self.toolset != 'target')):
+ # Install all shared libs into a common directory (per toolset) for
+ # convenient access with LD_LIBRARY_PATH.
+ return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias)
+ return '$(builddir)/' + self.alias
+
+
+def WriteAutoRegenerationRule(params, root_makefile, makefile_name,
+ build_files):
+ """Write the target to regenerate the Makefile."""
+ options = params['options']
+ build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir)
+ for filename in params['build_files_arg']]
+
+ gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'],
+ options.toplevel_dir)
+ if not gyp_binary.startswith(os.sep):
+ gyp_binary = os.path.join('.', gyp_binary)
+
+ root_makefile.write(
+ "quiet_cmd_regen_makefile = ACTION Regenerating $@\n"
+ "cmd_regen_makefile = cd $(srcdir); %(cmd)s\n"
+ "%(makefile_name)s: %(deps)s\n"
+ "\t$(call do_cmd,regen_makefile)\n\n" % {
+ 'makefile_name': makefile_name,
+ 'deps': ' '.join(map(Sourceify, build_files)),
+ 'cmd': gyp.common.EncodePOSIXShellList(
+ [gyp_binary, '-fmake'] +
+ gyp.RegenerateFlags(options) +
+ build_files_args)})
+
+
+def PerformBuild(data, configurations, params):
+ options = params['options']
+ for config in configurations:
+ arguments = ['make']
+ if options.toplevel_dir and options.toplevel_dir != '.':
+ arguments += '-C', options.toplevel_dir
+ arguments.append('BUILDTYPE=' + config)
+ print('Building [%s]: %s' % (config, arguments))
+ subprocess.check_call(arguments)
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ options = params['options']
+ flavor = gyp.common.GetFlavor(params)
+ generator_flags = params.get('generator_flags', {})
+ builddir_name = generator_flags.get('output_dir', 'out')
+ android_ndk_version = generator_flags.get('android_ndk_version', None)
+ default_target = generator_flags.get('default_target', 'all')
+
+ def CalculateMakefilePath(build_file, base_name):
+ """Determine where to write a Makefile for a given gyp file."""
+ # Paths in gyp files are relative to the .gyp file, but we want
+ # paths relative to the source root for the master makefile. Grab
+ # the path of the .gyp file as the base to relativize against.
+ # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
+ base_path = gyp.common.RelativePath(os.path.dirname(build_file),
+ options.depth)
+ # We write the file in the base_path directory.
+ output_file = os.path.join(options.depth, base_path, base_name)
+ if options.generator_output:
+ output_file = os.path.join(
+ options.depth, options.generator_output, base_path, base_name)
+ base_path = gyp.common.RelativePath(os.path.dirname(build_file),
+ options.toplevel_dir)
+ return base_path, output_file
+
+ # TODO: search for the first non-'Default' target. This can go
+ # away when we add verification that all targets have the
+ # necessary configurations.
+ default_configuration = None
+ toolsets = set([target_dicts[target]['toolset'] for target in target_list])
+ for target in target_list:
+ spec = target_dicts[target]
+ if spec['default_configuration'] != 'Default':
+ default_configuration = spec['default_configuration']
+ break
+ if not default_configuration:
+ default_configuration = 'Default'
+
+ srcdir = '.'
+ makefile_name = 'Makefile' + options.suffix
+ makefile_path = os.path.join(options.toplevel_dir, makefile_name)
+ if options.generator_output:
+ global srcdir_prefix
+ makefile_path = os.path.join(
+ options.toplevel_dir, options.generator_output, makefile_name)
+ srcdir = gyp.common.RelativePath(srcdir, options.generator_output)
+ srcdir_prefix = '$(srcdir)/'
+
+ flock_command= 'flock'
+ copy_archive_arguments = '-af'
+ makedep_arguments = '-MMD'
+ header_params = {
+ 'default_target': default_target,
+ 'builddir': builddir_name,
+ 'default_configuration': default_configuration,
+ 'flock': flock_command,
+ 'flock_index': 1,
+ 'link_commands': LINK_COMMANDS_LINUX,
+ 'extra_commands': '',
+ 'srcdir': srcdir,
+ 'copy_archive_args': copy_archive_arguments,
+ 'makedep_args': makedep_arguments,
+ }
+ if flavor == 'mac':
+ flock_command = './gyp-mac-tool flock'
+ header_params.update({
+ 'flock': flock_command,
+ 'flock_index': 2,
+ 'link_commands': LINK_COMMANDS_MAC,
+ 'extra_commands': SHARED_HEADER_MAC_COMMANDS,
+ })
+ elif flavor == 'android':
+ header_params.update({
+ 'link_commands': LINK_COMMANDS_ANDROID,
+ })
+ elif flavor == 'zos':
+ copy_archive_arguments = '-fPR'
+ makedep_arguments = '-qmakedep=gcc'
+ header_params.update({
+ 'copy_archive_args': copy_archive_arguments,
+ 'makedep_args': makedep_arguments,
+ 'link_commands': LINK_COMMANDS_OS390,
+ })
+ elif flavor == 'solaris':
+ header_params.update({
+ 'flock': './gyp-flock-tool flock',
+ 'flock_index': 2,
+ })
+ elif flavor == 'freebsd':
+ # Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific.
+ header_params.update({
+ 'flock': 'lockf',
+ })
+ elif flavor == 'openbsd':
+ copy_archive_arguments = '-pPRf'
+ header_params.update({
+ 'copy_archive_args': copy_archive_arguments,
+ })
+ elif flavor == 'aix':
+ copy_archive_arguments = '-pPRf'
+ header_params.update({
+ 'copy_archive_args': copy_archive_arguments,
+ 'link_commands': LINK_COMMANDS_AIX,
+ 'flock': './gyp-flock-tool flock',
+ 'flock_index': 2,
+ })
+
+ header_params.update({
+ 'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'),
+ 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'),
+ 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'),
+ 'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'),
+ 'CC.host': GetEnvironFallback(('CC_host',), 'gcc'),
+ 'AR.host': GetEnvironFallback(('AR_host',), 'ar'),
+ 'CXX.host': GetEnvironFallback(('CXX_host',), 'g++'),
+ 'LINK.host': GetEnvironFallback(('LINK_host',), '$(CXX.host)'),
+ })
+
+ build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
+ make_global_settings_array = data[build_file].get('make_global_settings', [])
+ wrappers = {}
+ for key, value in make_global_settings_array:
+ if key.endswith('_wrapper'):
+ wrappers[key[:-len('_wrapper')]] = '$(abspath %s)' % value
+ make_global_settings = ''
+ for key, value in make_global_settings_array:
+ if re.match('.*_wrapper', key):
+ continue
+ if value[0] != '$':
+ value = '$(abspath %s)' % value
+ wrapper = wrappers.get(key)
+ if wrapper:
+ value = '%s %s' % (wrapper, value)
+ del wrappers[key]
+ if key in ('CC', 'CC.host', 'CXX', 'CXX.host'):
+ make_global_settings += (
+ 'ifneq (,$(filter $(origin %s), undefined default))\n' % key)
+ # Let gyp-time envvars win over global settings.
+ env_key = key.replace('.', '_') # CC.host -> CC_host
+ if env_key in os.environ:
+ value = os.environ[env_key]
+ make_global_settings += ' %s = %s\n' % (key, value)
+ make_global_settings += 'endif\n'
+ else:
+ make_global_settings += '%s ?= %s\n' % (key, value)
+ # TODO(ukai): define cmd when only wrapper is specified in
+ # make_global_settings.
+
+ header_params['make_global_settings'] = make_global_settings
+
+ gyp.common.EnsureDirExists(makefile_path)
+ root_makefile = open(makefile_path, 'w')
+ root_makefile.write(SHARED_HEADER % header_params)
+ # Currently any versions have the same effect, but in future the behavior
+ # could be different.
+ if android_ndk_version:
+ root_makefile.write(
+ '# Define LOCAL_PATH for build of Android applications.\n'
+ 'LOCAL_PATH := $(call my-dir)\n'
+ '\n')
+ for toolset in toolsets:
+ root_makefile.write('TOOLSET := %s\n' % toolset)
+ WriteRootHeaderSuffixRules(root_makefile)
+
+ # Put build-time support tools next to the root Makefile.
+ dest_path = os.path.dirname(makefile_path)
+ gyp.common.CopyTool(flavor, dest_path)
+
+ # Find the list of targets that derive from the gyp file(s) being built.
+ needed_targets = set()
+ for build_file in params['build_files']:
+ for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
+ needed_targets.add(target)
+
+ build_files = set()
+ include_list = set()
+ for qualified_target in target_list:
+ build_file, target, toolset = gyp.common.ParseQualifiedTarget(
+ qualified_target)
+
+ this_make_global_settings = data[build_file].get('make_global_settings', [])
+ assert make_global_settings_array == this_make_global_settings, (
+ "make_global_settings needs to be the same for all targets. %s vs. %s" %
+ (this_make_global_settings, make_global_settings))
+
+ build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir))
+ included_files = data[build_file]['included_files']
+ for included_file in included_files:
+ # The included_files entries are relative to the dir of the build file
+ # that included them, so we have to undo that and then make them relative
+ # to the root dir.
+ relative_include_file = gyp.common.RelativePath(
+ gyp.common.UnrelativePath(included_file, build_file),
+ options.toplevel_dir)
+ abs_include_file = os.path.abspath(relative_include_file)
+ # If the include file is from the ~/.gyp dir, we should use absolute path
+ # so that relocating the src dir doesn't break the path.
+ if (params['home_dot_gyp'] and
+ abs_include_file.startswith(params['home_dot_gyp'])):
+ build_files.add(abs_include_file)
+ else:
+ build_files.add(relative_include_file)
+
+ base_path, output_file = CalculateMakefilePath(build_file,
+ target + '.' + toolset + options.suffix + '.mk')
+
+ spec = target_dicts[qualified_target]
+ configs = spec['configurations']
+
+ if flavor == 'mac':
+ gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
+
+ writer = MakefileWriter(generator_flags, flavor)
+ writer.Write(qualified_target, base_path, output_file, spec, configs,
+ part_of_all=qualified_target in needed_targets)
+
+ # Our root_makefile lives at the source root. Compute the relative path
+ # from there to the output_file for including.
+ mkfile_rel_path = gyp.common.RelativePath(output_file,
+ os.path.dirname(makefile_path))
+ include_list.add(mkfile_rel_path)
+
+ # Write out per-gyp (sub-project) Makefiles.
+ depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd())
+ for build_file in build_files:
+ # The paths in build_files were relativized above, so undo that before
+ # testing against the non-relativized items in target_list and before
+ # calculating the Makefile path.
+ build_file = os.path.join(depth_rel_path, build_file)
+ gyp_targets = [target_dicts[target]['target_name'] for target in target_list
+ if target.startswith(build_file) and
+ target in needed_targets]
+ # Only generate Makefiles for gyp files with targets.
+ if not gyp_targets:
+ continue
+ base_path, output_file = CalculateMakefilePath(build_file,
+ os.path.splitext(os.path.basename(build_file))[0] + '.Makefile')
+ makefile_rel_path = gyp.common.RelativePath(os.path.dirname(makefile_path),
+ os.path.dirname(output_file))
+ writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets,
+ builddir_name)
+
+
+ # Write out the sorted list of includes.
+ root_makefile.write('\n')
+ for include_file in sorted(include_list):
+ # We wrap each .mk include in an if statement so users can tell make to
+ # not load a file by setting NO_LOAD. The below make code says, only
+ # load the .mk file if the .mk filename doesn't start with a token in
+ # NO_LOAD.
+ root_makefile.write(
+ "ifeq ($(strip $(foreach prefix,$(NO_LOAD),\\\n"
+ " $(findstring $(join ^,$(prefix)),\\\n"
+ " $(join ^," + include_file + ")))),)\n")
+ root_makefile.write(" include " + include_file + "\n")
+ root_makefile.write("endif\n")
+ root_makefile.write('\n')
+
+ if (not generator_flags.get('standalone')
+ and generator_flags.get('auto_regeneration', True)):
+ WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files)
+
+ root_makefile.write(SHARED_FOOTER)
+
+ root_makefile.close()
diff --git a/third_party/python/gyp/pylib/gyp/generator/msvs.py b/third_party/python/gyp/pylib/gyp/generator/msvs.py
new file mode 100644
index 0000000000..843e706780
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/generator/msvs.py
@@ -0,0 +1,3537 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import collections
+import copy
+import ntpath
+import os
+import posixpath
+import re
+import subprocess
+import sys
+
+import gyp.common
+import gyp.easy_xml as easy_xml
+import gyp.generator.ninja as ninja_generator
+import gyp.MSVSNew as MSVSNew
+import gyp.MSVSProject as MSVSProject
+import gyp.MSVSSettings as MSVSSettings
+import gyp.MSVSToolFile as MSVSToolFile
+import gyp.MSVSUserFile as MSVSUserFile
+import gyp.MSVSUtil as MSVSUtil
+import gyp.MSVSVersion as MSVSVersion
+from gyp.common import GypError
+from gyp.common import OrderedSet
+
+
+# Regular expression for validating Visual Studio GUIDs. If the GUID
+# contains lowercase hex letters, MSVS will be fine. However,
+# IncrediBuild BuildConsole will parse the solution file, but then
+# silently skip building the target causing hard to track down errors.
+# Note that this only happens with the BuildConsole, and does not occur
+# if IncrediBuild is executed from inside Visual Studio. This regex
+# validates that the string looks like a GUID with all uppercase hex
+# letters.
+VALID_MSVS_GUID_CHARS = re.compile(r'^[A-F0-9\-]+$')
+
+
+generator_default_variables = {
+ 'DRIVER_PREFIX': '',
+ 'DRIVER_SUFFIX': '.sys',
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '.exe',
+ 'STATIC_LIB_PREFIX': '',
+ 'SHARED_LIB_PREFIX': '',
+ 'STATIC_LIB_SUFFIX': '.lib',
+ 'SHARED_LIB_SUFFIX': '.dll',
+ 'INTERMEDIATE_DIR': '$(IntDir)',
+ 'SHARED_INTERMEDIATE_DIR': '$(OutDir)obj/global_intermediate',
+ 'OS': 'win',
+ 'PRODUCT_DIR': '$(OutDir)',
+ 'LIB_DIR': '$(OutDir)lib',
+ 'RULE_INPUT_ROOT': '$(InputName)',
+ 'RULE_INPUT_DIRNAME': '$(InputDir)',
+ 'RULE_INPUT_EXT': '$(InputExt)',
+ 'RULE_INPUT_NAME': '$(InputFileName)',
+ 'RULE_INPUT_PATH': '$(InputPath)',
+ 'CONFIGURATION_NAME': '$(ConfigurationName)',
+}
+
+
+# The msvs specific sections that hold paths
+generator_additional_path_sections = [
+ 'msvs_cygwin_dirs',
+ 'msvs_props',
+]
+
+
+generator_additional_non_configuration_keys = [
+ 'msvs_cygwin_dirs',
+ 'msvs_cygwin_shell',
+ 'msvs_large_pdb',
+ 'msvs_shard',
+ 'msvs_external_builder',
+ 'msvs_external_builder_out_dir',
+ 'msvs_external_builder_build_cmd',
+ 'msvs_external_builder_clean_cmd',
+ 'msvs_external_builder_clcompile_cmd',
+ 'msvs_enable_winrt',
+ 'msvs_requires_importlibrary',
+ 'msvs_enable_winphone',
+ 'msvs_application_type_revision',
+ 'msvs_target_platform_version',
+ 'msvs_target_platform_minversion',
+]
+
+generator_filelist_paths = None
+
+# List of precompiled header related keys.
+precomp_keys = [
+ 'msvs_precompiled_header',
+ 'msvs_precompiled_source',
+]
+
+
+cached_username = None
+
+
+cached_domain = None
+
+
+# TODO(gspencer): Switch the os.environ calls to be
+# win32api.GetDomainName() and win32api.GetUserName() once the
+# python version in depot_tools has been updated to work on Vista
+# 64-bit.
+def _GetDomainAndUserName():
+ if sys.platform not in ('win32', 'cygwin'):
+ return ('DOMAIN', 'USERNAME')
+ global cached_username
+ global cached_domain
+ if not cached_domain or not cached_username:
+ domain = os.environ.get('USERDOMAIN')
+ username = os.environ.get('USERNAME')
+ if not domain or not username:
+ call = subprocess.Popen(['net', 'config', 'Workstation'],
+ stdout=subprocess.PIPE)
+ config = call.communicate()[0]
+ username_re = re.compile(r'^User name\s+(\S+)', re.MULTILINE)
+ username_match = username_re.search(config)
+ if username_match:
+ username = username_match.group(1)
+ domain_re = re.compile(r'^Logon domain\s+(\S+)', re.MULTILINE)
+ domain_match = domain_re.search(config)
+ if domain_match:
+ domain = domain_match.group(1)
+ cached_domain = domain
+ cached_username = username
+ return (cached_domain, cached_username)
+
+fixpath_prefix = None
+
+
+def _NormalizedSource(source):
+ """Normalize the path.
+
+ But not if that gets rid of a variable, as this may expand to something
+ larger than one directory.
+
+ Arguments:
+ source: The path to be normalize.d
+
+ Returns:
+ The normalized path.
+ """
+ normalized = os.path.normpath(source)
+ if source.count('$') == normalized.count('$'):
+ source = normalized
+ return source
+
+
+def _FixPath(path):
+ """Convert paths to a form that will make sense in a vcproj file.
+
+ Arguments:
+ path: The path to convert, may contain / etc.
+ Returns:
+ The path with all slashes made into backslashes.
+ """
+ if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$':
+ path = os.path.join(fixpath_prefix, path)
+ path = path.replace('/', '\\')
+ path = _NormalizedSource(path)
+ if path and path[-1] == '\\':
+ path = path[:-1]
+ return path
+
+
+def _FixPaths(paths):
+ """Fix each of the paths of the list."""
+ return [_FixPath(i) for i in paths]
+
+
+def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None,
+ list_excluded=True, msvs_version=None):
+ """Converts a list split source file paths into a vcproj folder hierarchy.
+
+ Arguments:
+ sources: A list of source file paths split.
+ prefix: A list of source file path layers meant to apply to each of sources.
+ excluded: A set of excluded files.
+ msvs_version: A MSVSVersion object.
+
+ Returns:
+ A hierarchy of filenames and MSVSProject.Filter objects that matches the
+ layout of the source tree.
+ For example:
+ _ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']],
+ prefix=['joe'])
+ -->
+ [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']),
+ MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])]
+ """
+ if not prefix: prefix = []
+ result = []
+ excluded_result = []
+ folders = collections.OrderedDict()
+ # Gather files into the final result, excluded, or folders.
+ for s in sources:
+ if len(s) == 1:
+ filename = _NormalizedSource('\\'.join(prefix + s))
+ if filename in excluded:
+ excluded_result.append(filename)
+ else:
+ result.append(filename)
+ elif msvs_version and not msvs_version.UsesVcxproj():
+ # For MSVS 2008 and earlier, we need to process all files before walking
+ # the sub folders.
+ if not folders.get(s[0]):
+ folders[s[0]] = []
+ folders[s[0]].append(s[1:])
+ else:
+ contents = _ConvertSourcesToFilterHierarchy([s[1:]], prefix + [s[0]],
+ excluded=excluded,
+ list_excluded=list_excluded,
+ msvs_version=msvs_version)
+ contents = MSVSProject.Filter(s[0], contents=contents)
+ result.append(contents)
+ # Add a folder for excluded files.
+ if excluded_result and list_excluded:
+ excluded_folder = MSVSProject.Filter('_excluded_files',
+ contents=excluded_result)
+ result.append(excluded_folder)
+
+ if msvs_version and msvs_version.UsesVcxproj():
+ return result
+
+ # Populate all the folders.
+ for f in folders:
+ contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f],
+ excluded=excluded,
+ list_excluded=list_excluded,
+ msvs_version=msvs_version)
+ contents = MSVSProject.Filter(f, contents=contents)
+ result.append(contents)
+ return result
+
+
+def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False):
+ if not value: return
+ _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset)
+
+
+def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
+ # TODO(bradnelson): ugly hack, fix this more generally!!!
+ if 'Directories' in setting or 'Dependencies' in setting:
+ if type(value) == str:
+ value = value.replace('/', '\\')
+ else:
+ value = [i.replace('/', '\\') for i in value]
+ if not tools.get(tool_name):
+ tools[tool_name] = dict()
+ tool = tools[tool_name]
+ if 'CompileAsWinRT' == setting:
+ return
+ if tool.get(setting):
+ if only_if_unset: return
+ if type(tool[setting]) == list and type(value) == list:
+ tool[setting] += value
+ else:
+ raise TypeError(
+ 'Appending "%s" to a non-list setting "%s" for tool "%s" is '
+ 'not allowed, previous value: %s' % (
+ value, setting, tool_name, str(tool[setting])))
+ else:
+ tool[setting] = value
+
+
+def _ConfigTargetVersion(config_data):
+ return config_data.get('msvs_target_version', 'Windows7')
+
+
+def _ConfigPlatform(config_data):
+ return config_data.get('msvs_configuration_platform', 'Win32')
+
+
+def _ConfigBaseName(config_name, platform_name):
+ if config_name.endswith('_' + platform_name):
+ return config_name[0:-len(platform_name) - 1]
+ else:
+ return config_name
+
+
+def _ConfigFullName(config_name, config_data):
+ platform_name = _ConfigPlatform(config_data)
+ return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name)
+
+
+def _ConfigWindowsTargetPlatformVersion(config_data, version):
+ config_ver = config_data.get('msvs_windows_sdk_version')
+ vers = [config_ver] if config_ver else version.compatible_sdks
+ for ver in vers:
+ for key in [
+ r'HKLM\Software\Microsoft\Microsoft SDKs\Windows\%s',
+ r'HKLM\Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows\%s']:
+ sdk_dir = MSVSVersion._RegistryGetValue(key % ver, 'InstallationFolder')
+ if not sdk_dir:
+ continue
+ version = MSVSVersion._RegistryGetValue(key % ver, 'ProductVersion') or ''
+ # Find a matching entry in sdk_dir\include.
+ expected_sdk_dir=r'%s\include' % sdk_dir
+ names = sorted([x for x in (os.listdir(expected_sdk_dir)
+ if os.path.isdir(expected_sdk_dir)
+ else []
+ )
+ if x.startswith(version)], reverse=True)
+ if names:
+ return names[0]
+ else:
+ print('Warning: No include files found for '
+ 'detected Windows SDK version %s' % (version))
+
+
+def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
+ quote_cmd, do_setup_env):
+
+ if [x for x in cmd if '$(InputDir)' in x]:
+ input_dir_preamble = (
+ 'set INPUTDIR=$(InputDir)\n'
+ 'if NOT DEFINED INPUTDIR set INPUTDIR=.\\\n'
+ 'set INPUTDIR=%INPUTDIR:~0,-1%\n'
+ )
+ else:
+ input_dir_preamble = ''
+
+ if cygwin_shell:
+ # Find path to cygwin.
+ cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0])
+ # Prepare command.
+ direct_cmd = cmd
+ direct_cmd = [i.replace('$(IntDir)',
+ '`cygpath -m "${INTDIR}"`') for i in direct_cmd]
+ direct_cmd = [i.replace('$(OutDir)',
+ '`cygpath -m "${OUTDIR}"`') for i in direct_cmd]
+ direct_cmd = [i.replace('$(InputDir)',
+ '`cygpath -m "${INPUTDIR}"`') for i in direct_cmd]
+ if has_input_path:
+ direct_cmd = [i.replace('$(InputPath)',
+ '`cygpath -m "${INPUTPATH}"`')
+ for i in direct_cmd]
+ direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd]
+ # direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd)
+ direct_cmd = ' '.join(direct_cmd)
+ # TODO(quote): regularize quoting path names throughout the module
+ cmd = ''
+ if do_setup_env:
+ cmd += 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && '
+ cmd += 'set CYGWIN=nontsec&& '
+ if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0:
+ cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& '
+ if direct_cmd.find('INTDIR') >= 0:
+ cmd += 'set INTDIR=$(IntDir)&& '
+ if direct_cmd.find('OUTDIR') >= 0:
+ cmd += 'set OUTDIR=$(OutDir)&& '
+ if has_input_path and direct_cmd.find('INPUTPATH') >= 0:
+ cmd += 'set INPUTPATH=$(InputPath) && '
+ cmd += 'bash -c "%(cmd)s"'
+ cmd = cmd % {'cygwin_dir': cygwin_dir,
+ 'cmd': direct_cmd}
+ return input_dir_preamble + cmd
+ else:
+ # Convert cat --> type to mimic unix.
+ if cmd[0] == 'cat':
+ command = ['type']
+ else:
+ command = [cmd[0].replace('/', '\\')]
+ # Add call before command to ensure that commands can be tied together one
+ # after the other without aborting in Incredibuild, since IB makes a bat
+ # file out of the raw command string, and some commands (like python) are
+ # actually batch files themselves.
+ command.insert(0, 'call')
+ # Fix the paths
+ # TODO(quote): This is a really ugly heuristic, and will miss path fixing
+ # for arguments like "--arg=path" or "/opt:path".
+ # If the argument starts with a slash or dash, it's probably a command line
+ # switch
+ arguments = [i if (i[:1] in "/-") else _FixPath(i) for i in cmd[1:]]
+ arguments = [i.replace('$(InputDir)', '%INPUTDIR%') for i in arguments]
+ arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments]
+ if quote_cmd:
+ # Support a mode for using cmd directly.
+ # Convert any paths to native form (first element is used directly).
+ # TODO(quote): regularize quoting path names throughout the module
+ arguments = ['"%s"' % i for i in arguments]
+ # Collapse into a single command.
+ return input_dir_preamble + ' '.join(command + arguments)
+
+
+def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env):
+ # Currently this weird argument munging is used to duplicate the way a
+ # python script would need to be run as part of the chrome tree.
+ # Eventually we should add some sort of rule_default option to set this
+ # per project. For now the behavior chrome needs is the default.
+ mcs = rule.get('msvs_cygwin_shell')
+ if mcs is None:
+ mcs = int(spec.get('msvs_cygwin_shell', 1))
+ elif isinstance(mcs, str):
+ mcs = int(mcs)
+ quote_cmd = int(rule.get('msvs_quote_cmd', 1))
+ return _BuildCommandLineForRuleRaw(spec, rule['action'], mcs, has_input_path,
+ quote_cmd, do_setup_env=do_setup_env)
+
+
+def _AddActionStep(actions_dict, inputs, outputs, description, command):
+ """Merge action into an existing list of actions.
+
+ Care must be taken so that actions which have overlapping inputs either don't
+ get assigned to the same input, or get collapsed into one.
+
+ Arguments:
+ actions_dict: dictionary keyed on input name, which maps to a list of
+ dicts describing the actions attached to that input file.
+ inputs: list of inputs
+ outputs: list of outputs
+ description: description of the action
+ command: command line to execute
+ """
+ # Require there to be at least one input (call sites will ensure this).
+ assert inputs
+
+ action = {
+ 'inputs': inputs,
+ 'outputs': outputs,
+ 'description': description,
+ 'command': command,
+ }
+
+ # Pick where to stick this action.
+ # While less than optimal in terms of build time, attach them to the first
+ # input for now.
+ chosen_input = inputs[0]
+
+ # Add it there.
+ if chosen_input not in actions_dict:
+ actions_dict[chosen_input] = []
+ actions_dict[chosen_input].append(action)
+
+
+def _AddCustomBuildToolForMSVS(p, spec, primary_input,
+ inputs, outputs, description, cmd):
+ """Add a custom build tool to execute something.
+
+ Arguments:
+ p: the target project
+ spec: the target project dict
+ primary_input: input file to attach the build tool to
+ inputs: list of inputs
+ outputs: list of outputs
+ description: description of the action
+ cmd: command line to execute
+ """
+ inputs = _FixPaths(inputs)
+ outputs = _FixPaths(outputs)
+ tool = MSVSProject.Tool(
+ 'VCCustomBuildTool',
+ {'Description': description,
+ 'AdditionalDependencies': ';'.join(inputs),
+ 'Outputs': ';'.join(outputs),
+ 'CommandLine': cmd,
+ })
+ # Add to the properties of primary input for each config.
+ for config_name, c_data in spec['configurations'].items():
+ p.AddFileConfig(_FixPath(primary_input),
+ _ConfigFullName(config_name, c_data), tools=[tool])
+
+
+def _AddAccumulatedActionsToMSVS(p, spec, actions_dict):
+ """Add actions accumulated into an actions_dict, merging as needed.
+
+ Arguments:
+ p: the target project
+ spec: the target project dict
+ actions_dict: dictionary keyed on input name, which maps to a list of
+ dicts describing the actions attached to that input file.
+ """
+ for primary_input in actions_dict:
+ inputs = OrderedSet()
+ outputs = OrderedSet()
+ descriptions = []
+ commands = []
+ for action in actions_dict[primary_input]:
+ inputs.update(OrderedSet(action['inputs']))
+ outputs.update(OrderedSet(action['outputs']))
+ descriptions.append(action['description'])
+ commands.append(action['command'])
+ # Add the custom build step for one input file.
+ description = ', and also '.join(descriptions)
+ command = '\r\n'.join(commands)
+ _AddCustomBuildToolForMSVS(p, spec,
+ primary_input=primary_input,
+ inputs=inputs,
+ outputs=outputs,
+ description=description,
+ cmd=command)
+
+
+def _RuleExpandPath(path, input_file):
+ """Given the input file to which a rule applied, string substitute a path.
+
+ Arguments:
+ path: a path to string expand
+ input_file: the file to which the rule applied.
+ Returns:
+ The string substituted path.
+ """
+ path = path.replace('$(InputName)',
+ os.path.splitext(os.path.split(input_file)[1])[0])
+ path = path.replace('$(InputDir)', os.path.dirname(input_file))
+ path = path.replace('$(InputExt)',
+ os.path.splitext(os.path.split(input_file)[1])[1])
+ path = path.replace('$(InputFileName)', os.path.split(input_file)[1])
+ path = path.replace('$(InputPath)', input_file)
+ return path
+
+
+def _FindRuleTriggerFiles(rule, sources):
+ """Find the list of files which a particular rule applies to.
+
+ Arguments:
+ rule: the rule in question
+ sources: the set of all known source files for this project
+ Returns:
+ The list of sources that trigger a particular rule.
+ """
+ return rule.get('rule_sources', [])
+
+
+def _RuleInputsAndOutputs(rule, trigger_file):
+ """Find the inputs and outputs generated by a rule.
+
+ Arguments:
+ rule: the rule in question.
+ trigger_file: the main trigger for this rule.
+ Returns:
+ The pair of (inputs, outputs) involved in this rule.
+ """
+ raw_inputs = _FixPaths(rule.get('inputs', []))
+ raw_outputs = _FixPaths(rule.get('outputs', []))
+ inputs = OrderedSet()
+ outputs = OrderedSet()
+ inputs.add(trigger_file)
+ for i in raw_inputs:
+ inputs.add(_RuleExpandPath(i, trigger_file))
+ for o in raw_outputs:
+ outputs.add(_RuleExpandPath(o, trigger_file))
+ return (inputs, outputs)
+
+
+def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options):
+ """Generate a native rules file.
+
+ Arguments:
+ p: the target project
+ rules: the set of rules to include
+ output_dir: the directory in which the project/gyp resides
+ spec: the project dict
+ options: global generator options
+ """
+ rules_filename = '%s%s.rules' % (spec['target_name'],
+ options.suffix)
+ rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename),
+ spec['target_name'])
+ # Add each rule.
+ for r in rules:
+ rule_name = r['rule_name']
+ rule_ext = r['extension']
+ inputs = _FixPaths(r.get('inputs', []))
+ outputs = _FixPaths(r.get('outputs', []))
+ # Skip a rule with no action and no inputs.
+ if 'action' not in r and not r.get('rule_sources', []):
+ continue
+ cmd = _BuildCommandLineForRule(spec, r, has_input_path=True,
+ do_setup_env=True)
+ rules_file.AddCustomBuildRule(name=rule_name,
+ description=r.get('message', rule_name),
+ extensions=[rule_ext],
+ additional_dependencies=inputs,
+ outputs=outputs,
+ cmd=cmd)
+ # Write out rules file.
+ rules_file.WriteIfChanged()
+
+ # Add rules file to project.
+ p.AddToolFile(rules_filename)
+
+
+def _Cygwinify(path):
+ path = path.replace('$(OutDir)', '$(OutDirCygwin)')
+ path = path.replace('$(IntDir)', '$(IntDirCygwin)')
+ return path
+
+
+def _GenerateExternalRules(rules, output_dir, spec,
+ sources, options, actions_to_add):
+ """Generate an external makefile to do a set of rules.
+
+ Arguments:
+ rules: the list of rules to include
+ output_dir: path containing project and gyp files
+ spec: project specification data
+ sources: set of sources known
+ options: global generator options
+ actions_to_add: The list of actions we will add to.
+ """
+ filename = '%s_rules%s.mk' % (spec['target_name'], options.suffix)
+ mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename))
+ # Find cygwin style versions of some paths.
+ mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
+ mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n')
+ # Gather stuff needed to emit all: target.
+ all_inputs = OrderedSet()
+ all_outputs = OrderedSet()
+ all_output_dirs = OrderedSet()
+ first_outputs = []
+ for rule in rules:
+ trigger_files = _FindRuleTriggerFiles(rule, sources)
+ for tf in trigger_files:
+ inputs, outputs = _RuleInputsAndOutputs(rule, tf)
+ all_inputs.update(OrderedSet(inputs))
+ all_outputs.update(OrderedSet(outputs))
+ # Only use one target from each rule as the dependency for
+ # 'all' so we don't try to build each rule multiple times.
+ first_outputs.append(list(outputs)[0])
+ # Get the unique output directories for this rule.
+ output_dirs = [os.path.split(i)[0] for i in outputs]
+ for od in output_dirs:
+ all_output_dirs.add(od)
+ first_outputs_cyg = [_Cygwinify(i) for i in first_outputs]
+ # Write out all: target, including mkdir for each output directory.
+ mk_file.write('all: %s\n' % ' '.join(first_outputs_cyg))
+ for od in all_output_dirs:
+ if od:
+ mk_file.write('\tmkdir -p `cygpath -u "%s"`\n' % od)
+ mk_file.write('\n')
+ # Define how each output is generated.
+ for rule in rules:
+ trigger_files = _FindRuleTriggerFiles(rule, sources)
+ for tf in trigger_files:
+ # Get all the inputs and outputs for this rule for this trigger file.
+ inputs, outputs = _RuleInputsAndOutputs(rule, tf)
+ inputs = [_Cygwinify(i) for i in inputs]
+ outputs = [_Cygwinify(i) for i in outputs]
+ # Prepare the command line for this rule.
+ cmd = [_RuleExpandPath(c, tf) for c in rule['action']]
+ cmd = ['"%s"' % i for i in cmd]
+ cmd = ' '.join(cmd)
+ # Add it to the makefile.
+ mk_file.write('%s: %s\n' % (' '.join(outputs), ' '.join(inputs)))
+ mk_file.write('\t%s\n\n' % cmd)
+ # Close up the file.
+ mk_file.close()
+
+ # Add makefile to list of sources.
+ sources.add(filename)
+ # Add a build action to call makefile.
+ cmd = ['make',
+ 'OutDir=$(OutDir)',
+ 'IntDir=$(IntDir)',
+ '-j', '${NUMBER_OF_PROCESSORS_PLUS_1}',
+ '-f', filename]
+ cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True, True)
+ # Insert makefile as 0'th input, so it gets the action attached there,
+ # as this is easier to understand from in the IDE.
+ all_inputs = list(all_inputs)
+ all_inputs.insert(0, filename)
+ _AddActionStep(actions_to_add,
+ inputs=_FixPaths(all_inputs),
+ outputs=_FixPaths(all_outputs),
+ description='Running external rules for %s' %
+ spec['target_name'],
+ command=cmd)
+
+
+def _EscapeEnvironmentVariableExpansion(s):
+ """Escapes % characters.
+
+ Escapes any % characters so that Windows-style environment variable
+ expansions will leave them alone.
+ See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile
+ to understand why we have to do this.
+
+ Args:
+ s: The string to be escaped.
+
+ Returns:
+ The escaped string.
+ """
+ s = s.replace('%', '%%')
+ return s
+
+
+quote_replacer_regex = re.compile(r'(\\*)"')
+
+
+def _EscapeCommandLineArgumentForMSVS(s):
+ """Escapes a Windows command-line argument.
+
+ So that the Win32 CommandLineToArgv function will turn the escaped result back
+ into the original string.
+ See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
+ ("Parsing C++ Command-Line Arguments") to understand why we have to do
+ this.
+
+ Args:
+ s: the string to be escaped.
+ Returns:
+ the escaped string.
+ """
+
+ def _Replace(match):
+ # For a literal quote, CommandLineToArgv requires an odd number of
+ # backslashes preceding it, and it produces half as many literal backslashes
+ # (rounded down). So we need to produce 2n+1 backslashes.
+ return 2 * match.group(1) + '\\"'
+
+ # Escape all quotes so that they are interpreted literally.
+ s = quote_replacer_regex.sub(_Replace, s)
+ # Now add unescaped quotes so that any whitespace is interpreted literally.
+ s = '"' + s + '"'
+ return s
+
+
+delimiters_replacer_regex = re.compile(r'(\\*)([,;]+)')
+
+
+def _EscapeVCProjCommandLineArgListItem(s):
+ """Escapes command line arguments for MSVS.
+
+ The VCProj format stores string lists in a single string using commas and
+ semi-colons as separators, which must be quoted if they are to be
+ interpreted literally. However, command-line arguments may already have
+ quotes, and the VCProj parser is ignorant of the backslash escaping
+ convention used by CommandLineToArgv, so the command-line quotes and the
+ VCProj quotes may not be the same quotes. So to store a general
+ command-line argument in a VCProj list, we need to parse the existing
+ quoting according to VCProj's convention and quote any delimiters that are
+ not already quoted by that convention. The quotes that we add will also be
+ seen by CommandLineToArgv, so if backslashes precede them then we also have
+ to escape those backslashes according to the CommandLineToArgv
+ convention.
+
+ Args:
+ s: the string to be escaped.
+ Returns:
+ the escaped string.
+ """
+
+ def _Replace(match):
+ # For a non-literal quote, CommandLineToArgv requires an even number of
+ # backslashes preceding it, and it produces half as many literal
+ # backslashes. So we need to produce 2n backslashes.
+ return 2 * match.group(1) + '"' + match.group(2) + '"'
+
+ segments = s.split('"')
+ # The unquoted segments are at the even-numbered indices.
+ for i in range(0, len(segments), 2):
+ segments[i] = delimiters_replacer_regex.sub(_Replace, segments[i])
+ # Concatenate back into a single string
+ s = '"'.join(segments)
+ if len(segments) % 2 == 0:
+ # String ends while still quoted according to VCProj's convention. This
+ # means the delimiter and the next list item that follow this one in the
+ # .vcproj file will be misinterpreted as part of this item. There is nothing
+ # we can do about this. Adding an extra quote would correct the problem in
+ # the VCProj but cause the same problem on the final command-line. Moving
+ # the item to the end of the list does works, but that's only possible if
+ # there's only one such item. Let's just warn the user.
+ print(('Warning: MSVS may misinterpret the odd number of ' +
+ 'quotes in ' + s), file=sys.stderr)
+ return s
+
+
+def _EscapeCppDefineForMSVS(s):
+ """Escapes a CPP define so that it will reach the compiler unaltered."""
+ s = _EscapeEnvironmentVariableExpansion(s)
+ s = _EscapeCommandLineArgumentForMSVS(s)
+ s = _EscapeVCProjCommandLineArgListItem(s)
+ # cl.exe replaces literal # characters with = in preprocesor definitions for
+ # some reason. Octal-encode to work around that.
+ s = s.replace('#', '\\%03o' % ord('#'))
+ return s
+
+
+quote_replacer_regex2 = re.compile(r'(\\+)"')
+
+
+def _EscapeCommandLineArgumentForMSBuild(s):
+ """Escapes a Windows command-line argument for use by MSBuild."""
+
+ def _Replace(match):
+ return (len(match.group(1)) / 2 * 4) * '\\' + '\\"'
+
+ # Escape all quotes so that they are interpreted literally.
+ s = quote_replacer_regex2.sub(_Replace, s)
+ return s
+
+
+def _EscapeMSBuildSpecialCharacters(s):
+ escape_dictionary = {
+ '%': '%25',
+ '$': '%24',
+ '@': '%40',
+ "'": '%27',
+ ';': '%3B',
+ '?': '%3F',
+ '*': '%2A'
+ }
+ result = ''.join([escape_dictionary.get(c, c) for c in s])
+ return result
+
+
+def _EscapeCppDefineForMSBuild(s):
+ """Escapes a CPP define so that it will reach the compiler unaltered."""
+ s = _EscapeEnvironmentVariableExpansion(s)
+ s = _EscapeCommandLineArgumentForMSBuild(s)
+ s = _EscapeMSBuildSpecialCharacters(s)
+ # cl.exe replaces literal # characters with = in preprocesor definitions for
+ # some reason. Octal-encode to work around that.
+ s = s.replace('#', '\\%03o' % ord('#'))
+ return s
+
+
+def _GenerateRulesForMSVS(p, output_dir, options, spec,
+ sources, excluded_sources,
+ actions_to_add):
+ """Generate all the rules for a particular project.
+
+ Arguments:
+ p: the project
+ output_dir: directory to emit rules to
+ options: global options passed to the generator
+ spec: the specification for this project
+ sources: the set of all known source files in this project
+ excluded_sources: the set of sources excluded from normal processing
+ actions_to_add: deferred list of actions to add in
+ """
+ rules = spec.get('rules', [])
+ rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))]
+ rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))]
+
+ # Handle rules that use a native rules file.
+ if rules_native:
+ _GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options)
+
+ # Handle external rules (non-native rules).
+ if rules_external:
+ _GenerateExternalRules(rules_external, output_dir, spec,
+ sources, options, actions_to_add)
+ _AdjustSourcesForRules(rules, sources, excluded_sources, False)
+
+
+def _AdjustSourcesForRules(rules, sources, excluded_sources, is_msbuild):
+ # Add outputs generated by each rule (if applicable).
+ for rule in rules:
+ # Add in the outputs from this rule.
+ trigger_files = _FindRuleTriggerFiles(rule, sources)
+ for trigger_file in trigger_files:
+ # Remove trigger_file from excluded_sources to let the rule be triggered
+ # (e.g. rule trigger ax_enums.idl is added to excluded_sources
+ # because it's also in an action's inputs in the same project)
+ excluded_sources.discard(_FixPath(trigger_file))
+ # Done if not processing outputs as sources.
+ if int(rule.get('process_outputs_as_sources', False)):
+ inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
+ inputs = OrderedSet(_FixPaths(inputs))
+ outputs = OrderedSet(_FixPaths(outputs))
+ inputs.remove(_FixPath(trigger_file))
+ sources.update(inputs)
+ if not is_msbuild:
+ excluded_sources.update(inputs)
+ sources.update(outputs)
+
+
+def _FilterActionsFromExcluded(excluded_sources, actions_to_add):
+ """Take inputs with actions attached out of the list of exclusions.
+
+ Arguments:
+ excluded_sources: list of source files not to be built.
+ actions_to_add: dict of actions keyed on source file they're attached to.
+ Returns:
+ excluded_sources with files that have actions attached removed.
+ """
+ must_keep = OrderedSet(_FixPaths(actions_to_add.keys()))
+ return [s for s in excluded_sources if s not in must_keep]
+
+
+def _GetDefaultConfiguration(spec):
+ return spec['configurations'][spec['default_configuration']]
+
+
+def _GetGuidOfProject(proj_path, spec):
+ """Get the guid for the project.
+
+ Arguments:
+ proj_path: Path of the vcproj or vcxproj file to generate.
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ the guid.
+ Raises:
+ ValueError: if the specified GUID is invalid.
+ """
+ # Pluck out the default configuration.
+ default_config = _GetDefaultConfiguration(spec)
+ # Decide the guid of the project.
+ guid = default_config.get('msvs_guid')
+ if guid:
+ if VALID_MSVS_GUID_CHARS.match(guid) is None:
+ raise ValueError('Invalid MSVS guid: "%s". Must match regex: "%s".' %
+ (guid, VALID_MSVS_GUID_CHARS.pattern))
+ guid = '{%s}' % guid
+ guid = guid or MSVSNew.MakeGuid(proj_path)
+ return guid
+
+
+def _GetMsbuildToolsetOfProject(proj_path, spec, version):
+ """Get the platform toolset for the project.
+
+ Arguments:
+ proj_path: Path of the vcproj or vcxproj file to generate.
+ spec: The target dictionary containing the properties of the target.
+ version: The MSVSVersion object.
+ Returns:
+ the platform toolset string or None.
+ """
+ # Pluck out the default configuration.
+ default_config = _GetDefaultConfiguration(spec)
+ toolset = default_config.get('msbuild_toolset')
+ if not toolset and version.DefaultToolset():
+ toolset = version.DefaultToolset()
+ if spec['type'] == 'windows_driver':
+ toolset = 'WindowsKernelModeDriver10.0'
+ return toolset
+
+
+def _GenerateProject(project, options, version, generator_flags):
+ """Generates a vcproj file.
+
+ Arguments:
+ project: the MSVSProject object.
+ options: global generator options.
+ version: the MSVSVersion object.
+ generator_flags: dict of generator-specific flags.
+ Returns:
+ A list of source files that cannot be found on disk.
+ """
+ default_config = _GetDefaultConfiguration(project.spec)
+
+ # Skip emitting anything if told to with msvs_existing_vcproj option.
+ if default_config.get('msvs_existing_vcproj'):
+ return []
+
+ if version.UsesVcxproj():
+ return _GenerateMSBuildProject(project, options, version, generator_flags)
+ else:
+ return _GenerateMSVSProject(project, options, version, generator_flags)
+
+
+# TODO: Avoid code duplication with _ValidateSourcesForOSX in make.py.
+def _ValidateSourcesForMSVSProject(spec, version):
+ """Makes sure if duplicate basenames are not specified in the source list.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ version: The VisualStudioVersion object.
+ """
+ # This validation should not be applied to MSVC2010 and later.
+ assert not version.UsesVcxproj()
+
+ # TODO: Check if MSVC allows this for loadable_module targets.
+ if spec.get('type', None) not in ('static_library', 'shared_library'):
+ return
+ sources = spec.get('sources', [])
+ basenames = {}
+ for source in sources:
+ name, ext = os.path.splitext(source)
+ is_compiled_file = ext in [
+ '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
+ if not is_compiled_file:
+ continue
+ basename = os.path.basename(name) # Don't include extension.
+ basenames.setdefault(basename, []).append(source)
+
+ error = ''
+ for basename, files in basenames.items():
+ if len(files) > 1:
+ error += ' %s: %s\n' % (basename, ' '.join(files))
+
+ if error:
+ print('static library %s has several files with the same basename:\n' %
+ spec['target_name'] + error + 'MSVC08 cannot handle that.')
+ raise GypError('Duplicate basenames in sources section, see list above')
+
+
+def _GenerateMSVSProject(project, options, version, generator_flags):
+ """Generates a .vcproj file. It may create .rules and .user files too.
+
+ Arguments:
+ project: The project object we will generate the file for.
+ options: Global options passed to the generator.
+ version: The VisualStudioVersion object.
+ generator_flags: dict of generator-specific flags.
+ """
+ spec = project.spec
+ gyp.common.EnsureDirExists(project.path)
+
+ platforms = _GetUniquePlatforms(spec)
+ p = MSVSProject.Writer(project.path, version, spec['target_name'],
+ project.guid, platforms)
+
+ # Get directory project file is in.
+ project_dir = os.path.split(project.path)[0]
+ gyp_path = _NormalizedSource(project.build_file)
+ relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
+
+ config_type = _GetMSVSConfigurationType(spec, project.build_file)
+ for config_name, config in spec['configurations'].items():
+ _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config)
+
+ # MSVC08 and prior version cannot handle duplicate basenames in the same
+ # target.
+ # TODO: Take excluded sources into consideration if possible.
+ _ValidateSourcesForMSVSProject(spec, version)
+
+ # Prepare list of sources and excluded sources.
+ gyp_file = os.path.split(project.build_file)[1]
+ sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
+ gyp_file)
+
+ # Add rules.
+ actions_to_add = {}
+ _GenerateRulesForMSVS(p, project_dir, options, spec,
+ sources, excluded_sources,
+ actions_to_add)
+ list_excluded = generator_flags.get('msvs_list_excluded_files', True)
+ sources, excluded_sources, excluded_idl = (
+ _AdjustSourcesAndConvertToFilterHierarchy(spec, options, project_dir,
+ sources, excluded_sources,
+ list_excluded, version))
+
+ # Add in files.
+ missing_sources = _VerifySourcesExist(sources, project_dir)
+ p.AddFiles(sources)
+
+ _AddToolFilesToMSVS(p, spec)
+ _HandlePreCompiledHeaders(p, sources, spec)
+ _AddActions(actions_to_add, spec, relative_path_of_gyp_file)
+ _AddCopies(actions_to_add, spec)
+ _WriteMSVSUserFile(project.path, version, spec)
+
+ # NOTE: this stanza must appear after all actions have been decided.
+ # Don't excluded sources with actions attached, or they won't run.
+ excluded_sources = _FilterActionsFromExcluded(
+ excluded_sources, actions_to_add)
+ _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl,
+ list_excluded)
+ _AddAccumulatedActionsToMSVS(p, spec, actions_to_add)
+
+ # Write it out.
+ p.WriteIfChanged()
+
+ return missing_sources
+
+
+def _GetUniquePlatforms(spec):
+ """Returns the list of unique platforms for this spec, e.g ['win32', ...].
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ The MSVSUserFile object created.
+ """
+ # Gather list of unique platforms.
+ platforms = OrderedSet()
+ for configuration in spec['configurations']:
+ platforms.add(_ConfigPlatform(spec['configurations'][configuration]))
+ platforms = list(platforms)
+ return platforms
+
+
+def _CreateMSVSUserFile(proj_path, version, spec):
+ """Generates a .user file for the user running this Gyp program.
+
+ Arguments:
+ proj_path: The path of the project file being created. The .user file
+ shares the same path (with an appropriate suffix).
+ version: The VisualStudioVersion object.
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ The MSVSUserFile object created.
+ """
+ (domain, username) = _GetDomainAndUserName()
+ vcuser_filename = '.'.join([proj_path, domain, username, 'user'])
+ user_file = MSVSUserFile.Writer(vcuser_filename, version,
+ spec['target_name'])
+ return user_file
+
+
+def _GetMSVSConfigurationType(spec, build_file):
+ """Returns the configuration type for this project.
+
+ It's a number defined by Microsoft. May raise an exception.
+
+ Args:
+ spec: The target dictionary containing the properties of the target.
+ build_file: The path of the gyp file.
+ Returns:
+ An integer, the configuration type.
+ """
+ try:
+ config_type = {
+ 'executable': '1', # .exe
+ 'shared_library': '2', # .dll
+ 'loadable_module': '2', # .dll
+ 'static_library': '4', # .lib
+ 'windows_driver': '5', # .sys
+ 'none': '10', # Utility type
+ }[spec['type']]
+ except KeyError:
+ if spec.get('type'):
+ raise GypError('Target type %s is not a valid target type for '
+ 'target %s in %s.' %
+ (spec['type'], spec['target_name'], build_file))
+ else:
+ raise GypError('Missing type field for target %s in %s.' %
+ (spec['target_name'], build_file))
+ return config_type
+
+
+def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
+ """Adds a configuration to the MSVS project.
+
+ Many settings in a vcproj file are specific to a configuration. This
+ function the main part of the vcproj file that's configuration specific.
+
+ Arguments:
+ p: The target project being generated.
+ spec: The target dictionary containing the properties of the target.
+ config_type: The configuration type, a number as defined by Microsoft.
+ config_name: The name of the configuration.
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ """
+ # Get the information for this configuration
+ include_dirs, midl_include_dirs, resource_include_dirs = \
+ _GetIncludeDirs(config)
+ libraries = _GetLibraries(spec)
+ library_dirs = _GetLibraryDirs(config)
+ out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False)
+ defines = _GetDefines(config)
+ defines = [_EscapeCppDefineForMSVS(d) for d in defines]
+ disabled_warnings = _GetDisabledWarnings(config)
+ prebuild = config.get('msvs_prebuild')
+ postbuild = config.get('msvs_postbuild')
+ def_file = _GetModuleDefinition(spec)
+ precompiled_header = config.get('msvs_precompiled_header')
+
+ # Prepare the list of tools as a dictionary.
+ tools = dict()
+ # Add in user specified msvs_settings.
+ msvs_settings = config.get('msvs_settings', {})
+ MSVSSettings.ValidateMSVSSettings(msvs_settings)
+
+ # Prevent default library inheritance from the environment.
+ _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', ['$(NOINHERIT)'])
+
+ for tool in msvs_settings:
+ settings = config['msvs_settings'][tool]
+ for setting in settings:
+ _ToolAppend(tools, tool, setting, settings[setting])
+ # Add the information to the appropriate tool
+ _ToolAppend(tools, 'VCCLCompilerTool',
+ 'AdditionalIncludeDirectories', include_dirs)
+ _ToolAppend(tools, 'VCMIDLTool',
+ 'AdditionalIncludeDirectories', midl_include_dirs)
+ _ToolAppend(tools, 'VCResourceCompilerTool',
+ 'AdditionalIncludeDirectories', resource_include_dirs)
+ # Add in libraries.
+ _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', libraries)
+ _ToolAppend(tools, 'VCLinkerTool', 'AdditionalLibraryDirectories',
+ library_dirs)
+ if out_file:
+ _ToolAppend(tools, vc_tool, 'OutputFile', out_file, only_if_unset=True)
+ # Add defines.
+ _ToolAppend(tools, 'VCCLCompilerTool', 'PreprocessorDefinitions', defines)
+ _ToolAppend(tools, 'VCResourceCompilerTool', 'PreprocessorDefinitions',
+ defines)
+ # Change program database directory to prevent collisions.
+ _ToolAppend(tools, 'VCCLCompilerTool', 'ProgramDataBaseFileName',
+ '$(IntDir)$(ProjectName)\\vc80.pdb', only_if_unset=True)
+ # Add disabled warnings.
+ _ToolAppend(tools, 'VCCLCompilerTool',
+ 'DisableSpecificWarnings', disabled_warnings)
+ # Add Pre-build.
+ _ToolAppend(tools, 'VCPreBuildEventTool', 'CommandLine', prebuild)
+ # Add Post-build.
+ _ToolAppend(tools, 'VCPostBuildEventTool', 'CommandLine', postbuild)
+ # Turn on precompiled headers if appropriate.
+ if precompiled_header:
+ precompiled_header = os.path.split(precompiled_header)[1]
+ _ToolAppend(tools, 'VCCLCompilerTool', 'UsePrecompiledHeader', '2')
+ _ToolAppend(tools, 'VCCLCompilerTool',
+ 'PrecompiledHeaderThrough', precompiled_header)
+ _ToolAppend(tools, 'VCCLCompilerTool',
+ 'ForcedIncludeFiles', precompiled_header)
+ # Loadable modules don't generate import libraries;
+ # tell dependent projects to not expect one.
+ if spec['type'] == 'loadable_module':
+ _ToolAppend(tools, 'VCLinkerTool', 'IgnoreImportLibrary', 'true')
+ # Set the module definition file if any.
+ if def_file:
+ _ToolAppend(tools, 'VCLinkerTool', 'ModuleDefinitionFile', def_file)
+
+ _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name)
+
+
+def _GetIncludeDirs(config):
+ """Returns the list of directories to be used for #include directives.
+
+ Arguments:
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ Returns:
+ The list of directory paths.
+ """
+ # TODO(bradnelson): include_dirs should really be flexible enough not to
+ # require this sort of thing.
+ include_dirs = (
+ config.get('include_dirs', []) +
+ config.get('msvs_system_include_dirs', []))
+ midl_include_dirs = (
+ config.get('midl_include_dirs', []) +
+ config.get('msvs_system_include_dirs', []))
+ resource_include_dirs = config.get('resource_include_dirs', include_dirs)
+ include_dirs = _FixPaths(include_dirs)
+ midl_include_dirs = _FixPaths(midl_include_dirs)
+ resource_include_dirs = _FixPaths(resource_include_dirs)
+ return include_dirs, midl_include_dirs, resource_include_dirs
+
+
+def _GetLibraryDirs(config):
+ """Returns the list of directories to be used for library search paths.
+
+ Arguments:
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ Returns:
+ The list of directory paths.
+ """
+
+ library_dirs = config.get('library_dirs', [])
+ library_dirs = _FixPaths(library_dirs)
+ return library_dirs
+
+
+def _GetLibraries(spec):
+ """Returns the list of libraries for this configuration.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ The list of directory paths.
+ """
+ libraries = spec.get('libraries', [])
+ # Strip out -l, as it is not used on windows (but is needed so we can pass
+ # in libraries that are assumed to be in the default library path).
+ # Also remove duplicate entries, leaving only the last duplicate, while
+ # preserving order.
+ found = OrderedSet()
+ unique_libraries_list = []
+ for entry in reversed(libraries):
+ library = re.sub(r'^\-l', '', entry)
+ if not os.path.splitext(library)[1]:
+ library += '.lib'
+ if library not in found:
+ found.add(library)
+ unique_libraries_list.append(library)
+ unique_libraries_list.reverse()
+ return unique_libraries_list
+
+
+def _GetOutputFilePathAndTool(spec, msbuild):
+ """Returns the path and tool to use for this target.
+
+ Figures out the path of the file this spec will create and the name of
+ the VC tool that will create it.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ A triple of (file path, name of the vc tool, name of the msbuild tool)
+ """
+ # Select a name for the output file.
+ out_file = ''
+ vc_tool = ''
+ msbuild_tool = ''
+ output_file_map = {
+ 'executable': ('VCLinkerTool', 'Link', '$(OutDir)', '.exe'),
+ 'shared_library': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
+ 'loadable_module': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
+ 'windows_driver': ('VCLinkerTool', 'Link', '$(OutDir)', '.sys'),
+ 'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)lib\\', '.lib'),
+ }
+ output_file_props = output_file_map.get(spec['type'])
+ if output_file_props and int(spec.get('msvs_auto_output_file', 1)):
+ vc_tool, msbuild_tool, out_dir, suffix = output_file_props
+ if spec.get('standalone_static_library', 0):
+ out_dir = '$(OutDir)'
+ out_dir = spec.get('product_dir', out_dir)
+ product_extension = spec.get('product_extension')
+ if product_extension:
+ suffix = '.' + product_extension
+ elif msbuild:
+ suffix = '$(TargetExt)'
+ prefix = spec.get('product_prefix', '')
+ product_name = spec.get('product_name', '$(ProjectName)')
+ out_file = ntpath.join(out_dir, prefix + product_name + suffix)
+ return out_file, vc_tool, msbuild_tool
+
+
+def _GetOutputTargetExt(spec):
+ """Returns the extension for this target, including the dot
+
+ If product_extension is specified, set target_extension to this to avoid
+ MSB8012, returns None otherwise. Ignores any target_extension settings in
+ the input files.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ A string with the extension, or None
+ """
+ target_extension = spec.get('product_extension')
+ if target_extension:
+ return '.' + target_extension
+ return None
+
+
+def _GetDefines(config):
+ """Returns the list of preprocessor definitions for this configuation.
+
+ Arguments:
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ Returns:
+ The list of preprocessor definitions.
+ """
+ defines = []
+ for d in config.get('defines', []):
+ if type(d) == list:
+ fd = '='.join([str(dpart) for dpart in d])
+ else:
+ fd = str(d)
+ defines.append(fd)
+ return defines
+
+
+def _GetDisabledWarnings(config):
+ return [str(i) for i in config.get('msvs_disabled_warnings', [])]
+
+
+def _GetModuleDefinition(spec):
+ def_file = ''
+ if spec['type'] in ['shared_library', 'loadable_module', 'executable',
+ 'windows_driver']:
+ def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
+ if len(def_files) == 1:
+ def_file = _FixPath(def_files[0])
+ elif def_files:
+ raise ValueError(
+ 'Multiple module definition files in one target, target %s lists '
+ 'multiple .def files: %s' % (
+ spec['target_name'], ' '.join(def_files)))
+ return def_file
+
+
+def _ConvertToolsToExpectedForm(tools):
+ """Convert tools to a form expected by Visual Studio.
+
+ Arguments:
+ tools: A dictionary of settings; the tool name is the key.
+ Returns:
+ A list of Tool objects.
+ """
+ tool_list = []
+ for tool, settings in tools.items():
+ # Collapse settings with lists.
+ settings_fixed = {}
+ for setting, value in settings.items():
+ if type(value) == list:
+ if ((tool == 'VCLinkerTool' and
+ setting == 'AdditionalDependencies') or
+ setting == 'AdditionalOptions'):
+ settings_fixed[setting] = ' '.join(value)
+ else:
+ settings_fixed[setting] = ';'.join(value)
+ else:
+ settings_fixed[setting] = value
+ # Add in this tool.
+ tool_list.append(MSVSProject.Tool(tool, settings_fixed))
+ return tool_list
+
+
+def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name):
+ """Add to the project file the configuration specified by config.
+
+ Arguments:
+ p: The target project being generated.
+ spec: the target project dict.
+ tools: A dictionary of settings; the tool name is the key.
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ config_type: The configuration type, a number as defined by Microsoft.
+ config_name: The name of the configuration.
+ """
+ attributes = _GetMSVSAttributes(spec, config, config_type)
+ # Add in this configuration.
+ tool_list = _ConvertToolsToExpectedForm(tools)
+ p.AddConfig(_ConfigFullName(config_name, config),
+ attrs=attributes, tools=tool_list)
+
+
+def _GetMSVSAttributes(spec, config, config_type):
+ # Prepare configuration attributes.
+ prepared_attrs = {}
+ source_attrs = config.get('msvs_configuration_attributes', {})
+ for a in source_attrs:
+ prepared_attrs[a] = source_attrs[a]
+ # Add props files.
+ vsprops_dirs = config.get('msvs_props', [])
+ vsprops_dirs = _FixPaths(vsprops_dirs)
+ if vsprops_dirs:
+ prepared_attrs['InheritedPropertySheets'] = ';'.join(vsprops_dirs)
+ # Set configuration type.
+ prepared_attrs['ConfigurationType'] = config_type
+ output_dir = prepared_attrs.get('OutputDirectory',
+ '$(SolutionDir)$(ConfigurationName)')
+ prepared_attrs['OutputDirectory'] = _FixPath(output_dir) + '\\'
+ if 'IntermediateDirectory' not in prepared_attrs:
+ intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)'
+ prepared_attrs['IntermediateDirectory'] = _FixPath(intermediate) + '\\'
+ else:
+ intermediate = _FixPath(prepared_attrs['IntermediateDirectory']) + '\\'
+ intermediate = MSVSSettings.FixVCMacroSlashes(intermediate)
+ prepared_attrs['IntermediateDirectory'] = intermediate
+ return prepared_attrs
+
+
+def _AddNormalizedSources(sources_set, sources_array):
+ sources_set.update(_NormalizedSource(s) for s in sources_array)
+
+
+def _PrepareListOfSources(spec, generator_flags, gyp_file):
+ """Prepare list of sources and excluded sources.
+
+ Besides the sources specified directly in the spec, adds the gyp file so
+ that a change to it will cause a re-compile. Also adds appropriate sources
+ for actions and copies. Assumes later stage will un-exclude files which
+ have custom build steps attached.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ gyp_file: The name of the gyp file.
+ Returns:
+ A pair of (list of sources, list of excluded sources).
+ The sources will be relative to the gyp file.
+ """
+ sources = OrderedSet()
+ _AddNormalizedSources(sources, spec.get('sources', []))
+ excluded_sources = OrderedSet()
+ # Add in the gyp file.
+ if not generator_flags.get('standalone'):
+ sources.add(gyp_file)
+
+ # Add in 'action' inputs and outputs.
+ for a in spec.get('actions', []):
+ inputs = a['inputs']
+ inputs = [_NormalizedSource(i) for i in inputs]
+ # Add all inputs to sources and excluded sources.
+ inputs = OrderedSet(inputs)
+ sources.update(inputs)
+ if not spec.get('msvs_external_builder'):
+ excluded_sources.update(inputs)
+ if int(a.get('process_outputs_as_sources', False)):
+ _AddNormalizedSources(sources, a.get('outputs', []))
+ # Add in 'copies' inputs and outputs.
+ for cpy in spec.get('copies', []):
+ _AddNormalizedSources(sources, cpy.get('files', []))
+ return (sources, excluded_sources)
+
+
+def _AdjustSourcesAndConvertToFilterHierarchy(
+ spec, options, gyp_dir, sources, excluded_sources, list_excluded, version):
+ """Adjusts the list of sources and excluded sources.
+
+ Also converts the sets to lists.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ options: Global generator options.
+ gyp_dir: The path to the gyp file being processed.
+ sources: A set of sources to be included for this project.
+ excluded_sources: A set of sources to be excluded for this project.
+ version: A MSVSVersion object.
+ Returns:
+ A trio of (list of sources, list of excluded sources,
+ path of excluded IDL file)
+ """
+ # Exclude excluded sources coming into the generator.
+ excluded_sources.update(OrderedSet(spec.get('sources_excluded', [])))
+ # Add excluded sources into sources for good measure.
+ sources.update(excluded_sources)
+ # Convert to proper windows form.
+ # NOTE: sources goes from being a set to a list here.
+ # NOTE: excluded_sources goes from being a set to a list here.
+ sources = _FixPaths(sources)
+ # Convert to proper windows form.
+ excluded_sources = _FixPaths(excluded_sources)
+
+ excluded_idl = _IdlFilesHandledNonNatively(spec, sources)
+
+ precompiled_related = _GetPrecompileRelatedFiles(spec)
+ # Find the excluded ones, minus the precompiled header related ones.
+ fully_excluded = [i for i in excluded_sources if i not in precompiled_related]
+
+ # Convert to folders and the right slashes.
+ sources = [i.split('\\') for i in sources]
+ sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded,
+ list_excluded=list_excluded,
+ msvs_version=version)
+
+ # Prune filters with a single child to flatten ugly directory structures
+ # such as ../../src/modules/module1 etc.
+ if version.UsesVcxproj():
+ while all([isinstance(s, MSVSProject.Filter) for s in sources]) \
+ and len(set([s.name for s in sources])) == 1:
+ assert all([len(s.contents) == 1 for s in sources])
+ sources = [s.contents[0] for s in sources]
+ else:
+ while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
+ sources = sources[0].contents
+
+ return sources, excluded_sources, excluded_idl
+
+
+def _IdlFilesHandledNonNatively(spec, sources):
+ # If any non-native rules use 'idl' as an extension exclude idl files.
+ # Gather a list here to use later.
+ using_idl = False
+ for rule in spec.get('rules', []):
+ if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)):
+ using_idl = True
+ break
+ if using_idl:
+ excluded_idl = [i for i in sources if i.endswith('.idl')]
+ else:
+ excluded_idl = []
+ return excluded_idl
+
+
+def _GetPrecompileRelatedFiles(spec):
+ # Gather a list of precompiled header related sources.
+ precompiled_related = []
+ for _, config in spec['configurations'].items():
+ for k in precomp_keys:
+ f = config.get(k)
+ if f:
+ precompiled_related.append(_FixPath(f))
+ return precompiled_related
+
+
+def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl,
+ list_excluded):
+ exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
+ for file_name, excluded_configs in exclusions.items():
+ if (not list_excluded and
+ len(excluded_configs) == len(spec['configurations'])):
+ # If we're not listing excluded files, then they won't appear in the
+ # project, so don't try to configure them to be excluded.
+ pass
+ else:
+ for config_name, config in excluded_configs:
+ p.AddFileConfig(file_name, _ConfigFullName(config_name, config),
+ {'ExcludedFromBuild': 'true'})
+
+
+def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl):
+ exclusions = {}
+ # Exclude excluded sources from being built.
+ for f in excluded_sources:
+ excluded_configs = []
+ for config_name, config in spec['configurations'].items():
+ precomped = [_FixPath(config.get(i, '')) for i in precomp_keys]
+ # Don't do this for ones that are precompiled header related.
+ if f not in precomped:
+ excluded_configs.append((config_name, config))
+ exclusions[f] = excluded_configs
+ # If any non-native rules use 'idl' as an extension exclude idl files.
+ # Exclude them now.
+ for f in excluded_idl:
+ excluded_configs = []
+ for config_name, config in spec['configurations'].items():
+ excluded_configs.append((config_name, config))
+ exclusions[f] = excluded_configs
+ return exclusions
+
+
+def _AddToolFilesToMSVS(p, spec):
+ # Add in tool files (rules).
+ tool_files = OrderedSet()
+ for _, config in spec['configurations'].items():
+ for f in config.get('msvs_tool_files', []):
+ tool_files.add(f)
+ for f in tool_files:
+ p.AddToolFile(f)
+
+
+def _HandlePreCompiledHeaders(p, sources, spec):
+ # Pre-compiled header source stubs need a different compiler flag
+ # (generate precompiled header) and any source file not of the same
+ # kind (i.e. C vs. C++) as the precompiled header source stub needs
+ # to have use of precompiled headers disabled.
+ extensions_excluded_from_precompile = []
+ for config_name, config in spec['configurations'].items():
+ source = config.get('msvs_precompiled_source')
+ if source:
+ source = _FixPath(source)
+ # UsePrecompiledHeader=1 for if using precompiled headers.
+ tool = MSVSProject.Tool('VCCLCompilerTool',
+ {'UsePrecompiledHeader': '1'})
+ p.AddFileConfig(source, _ConfigFullName(config_name, config),
+ {}, tools=[tool])
+ basename, extension = os.path.splitext(source)
+ if extension == '.c':
+ extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx']
+ else:
+ extensions_excluded_from_precompile = ['.c']
+ def DisableForSourceTree(source_tree):
+ for source in source_tree:
+ if isinstance(source, MSVSProject.Filter):
+ DisableForSourceTree(source.contents)
+ else:
+ basename, extension = os.path.splitext(source)
+ if extension in extensions_excluded_from_precompile:
+ for config_name, config in spec['configurations'].items():
+ tool = MSVSProject.Tool('VCCLCompilerTool',
+ {'UsePrecompiledHeader': '0',
+ 'ForcedIncludeFiles': '$(NOINHERIT)'})
+ p.AddFileConfig(_FixPath(source),
+ _ConfigFullName(config_name, config),
+ {}, tools=[tool])
+ # Do nothing if there was no precompiled source.
+ if extensions_excluded_from_precompile:
+ DisableForSourceTree(sources)
+
+
+def _AddActions(actions_to_add, spec, relative_path_of_gyp_file):
+ # Add actions.
+ actions = spec.get('actions', [])
+ # Don't setup_env every time. When all the actions are run together in one
+ # batch file in VS, the PATH will grow too long.
+ # Membership in this set means that the cygwin environment has been set up,
+ # and does not need to be set up again.
+ have_setup_env = set()
+ for a in actions:
+ # Attach actions to the gyp file if nothing else is there.
+ inputs = a.get('inputs') or [relative_path_of_gyp_file]
+ attached_to = inputs[0]
+ need_setup_env = attached_to not in have_setup_env
+ cmd = _BuildCommandLineForRule(spec, a, has_input_path=False,
+ do_setup_env=need_setup_env)
+ have_setup_env.add(attached_to)
+ # Add the action.
+ _AddActionStep(actions_to_add,
+ inputs=inputs,
+ outputs=a.get('outputs', []),
+ description=a.get('message', a['action_name']),
+ command=cmd)
+
+
+def _WriteMSVSUserFile(project_path, version, spec):
+ # Add run_as and test targets.
+ if 'run_as' in spec:
+ run_as = spec['run_as']
+ action = run_as.get('action', [])
+ environment = run_as.get('environment', [])
+ working_directory = run_as.get('working_directory', '.')
+ elif int(spec.get('test', 0)):
+ action = ['$(TargetPath)', '--gtest_print_time']
+ environment = []
+ working_directory = '.'
+ else:
+ return # Nothing to add
+ # Write out the user file.
+ user_file = _CreateMSVSUserFile(project_path, version, spec)
+ for config_name, c_data in spec['configurations'].items():
+ user_file.AddDebugSettings(_ConfigFullName(config_name, c_data),
+ action, environment, working_directory)
+ user_file.WriteIfChanged()
+
+
+def _AddCopies(actions_to_add, spec):
+ copies = _GetCopies(spec)
+ for inputs, outputs, cmd, description in copies:
+ _AddActionStep(actions_to_add, inputs=inputs, outputs=outputs,
+ description=description, command=cmd)
+
+
+def _GetCopies(spec):
+ copies = []
+ # Add copies.
+ for cpy in spec.get('copies', []):
+ for src in cpy.get('files', []):
+ dst = os.path.join(cpy['destination'], os.path.basename(src))
+ # _AddCustomBuildToolForMSVS() will call _FixPath() on the inputs and
+ # outputs, so do the same for our generated command line.
+ if src.endswith('/'):
+ src_bare = src[:-1]
+ base_dir = posixpath.split(src_bare)[0]
+ outer_dir = posixpath.split(src_bare)[1]
+ fixed_dst = _FixPath(dst)
+ full_dst = '"%s\\%s\\"' % (fixed_dst, outer_dir)
+ cmd = 'mkdir %s 2>nul & cd "%s" && xcopy /e /f /y "%s" %s' % (
+ full_dst, _FixPath(base_dir), outer_dir, full_dst)
+ copies.append(([src], ['dummy_copies', dst], cmd,
+ 'Copying %s to %s' % (src, fixed_dst)))
+ else:
+ fix_dst = _FixPath(cpy['destination'])
+ cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % (
+ fix_dst, _FixPath(src), _FixPath(dst))
+ copies.append(([src], [dst], cmd, 'Copying %s to %s' % (src, fix_dst)))
+ return copies
+
+
+def _GetPathDict(root, path):
+ # |path| will eventually be empty (in the recursive calls) if it was initially
+ # relative; otherwise it will eventually end up as '\', 'D:\', etc.
+ if not path or path.endswith(os.sep):
+ return root
+ parent, folder = os.path.split(path)
+ parent_dict = _GetPathDict(root, parent)
+ if folder not in parent_dict:
+ parent_dict[folder] = dict()
+ return parent_dict[folder]
+
+
+def _DictsToFolders(base_path, bucket, flat):
+ # Convert to folders recursively.
+ children = []
+ for folder, contents in bucket.items():
+ if type(contents) == dict:
+ folder_children = _DictsToFolders(os.path.join(base_path, folder),
+ contents, flat)
+ if flat:
+ children += folder_children
+ else:
+ folder_children = MSVSNew.MSVSFolder(os.path.join(base_path, folder),
+ name='(' + folder + ')',
+ entries=folder_children)
+ children.append(folder_children)
+ else:
+ children.append(contents)
+ return children
+
+
+def _CollapseSingles(parent, node):
+ # Recursively explorer the tree of dicts looking for projects which are
+ # the sole item in a folder which has the same name as the project. Bring
+ # such projects up one level.
+ if (type(node) == dict and
+ len(node) == 1 and
+ next(iter(node)) == parent + '.vcproj'):
+ return node[next(iter(node))]
+ if type(node) != dict:
+ return node
+ for child in node:
+ node[child] = _CollapseSingles(child, node[child])
+ return node
+
+
+def _GatherSolutionFolders(sln_projects, project_objects, flat):
+ root = {}
+ # Convert into a tree of dicts on path.
+ for p in sln_projects:
+ gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2]
+ gyp_dir = os.path.dirname(gyp_file)
+ path_dict = _GetPathDict(root, gyp_dir)
+ path_dict[target + '.vcproj'] = project_objects[p]
+ # Walk down from the top until we hit a folder that has more than one entry.
+ # In practice, this strips the top-level "src/" dir from the hierarchy in
+ # the solution.
+ while len(root) == 1 and type(root[next(iter(root))]) == dict:
+ root = root[next(iter(root))]
+ # Collapse singles.
+ root = _CollapseSingles('', root)
+ # Merge buckets until everything is a root entry.
+ return _DictsToFolders('', root, flat)
+
+
+def _GetPathOfProject(qualified_target, spec, options, msvs_version):
+ default_config = _GetDefaultConfiguration(spec)
+ proj_filename = default_config.get('msvs_existing_vcproj')
+ if not proj_filename:
+ proj_filename = (spec['target_name'] + options.suffix +
+ msvs_version.ProjectExtension())
+
+ build_file = gyp.common.BuildFile(qualified_target)
+ proj_path = os.path.join(os.path.dirname(build_file), proj_filename)
+ fix_prefix = None
+ if options.generator_output:
+ project_dir_path = os.path.dirname(os.path.abspath(proj_path))
+ proj_path = os.path.join(options.generator_output, proj_path)
+ fix_prefix = gyp.common.RelativePath(project_dir_path,
+ os.path.dirname(proj_path))
+ return proj_path, fix_prefix
+
+
+def _GetPlatformOverridesOfProject(spec):
+ # Prepare a dict indicating which project configurations are used for which
+ # solution configurations for this target.
+ config_platform_overrides = {}
+ for config_name, c in spec['configurations'].items():
+ config_fullname = _ConfigFullName(config_name, c)
+ platform = c.get('msvs_target_platform', _ConfigPlatform(c))
+ fixed_config_fullname = '%s|%s' % (
+ _ConfigBaseName(config_name, _ConfigPlatform(c)), platform)
+ config_platform_overrides[config_fullname] = fixed_config_fullname
+ return config_platform_overrides
+
+
+def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
+ """Create a MSVSProject object for the targets found in target list.
+
+ Arguments:
+ target_list: the list of targets to generate project objects for.
+ target_dicts: the dictionary of specifications.
+ options: global generator options.
+ msvs_version: the MSVSVersion object.
+ Returns:
+ A set of created projects, keyed by target.
+ """
+ global fixpath_prefix
+ # Generate each project.
+ projects = {}
+ for qualified_target in target_list:
+ spec = target_dicts[qualified_target]
+ if spec['toolset'] != 'target':
+ raise GypError(
+ 'Multiple toolsets not supported in msvs build (target %s)' %
+ qualified_target)
+ proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec,
+ options, msvs_version)
+ guid = _GetGuidOfProject(proj_path, spec)
+ overrides = _GetPlatformOverridesOfProject(spec)
+ build_file = gyp.common.BuildFile(qualified_target)
+ # Create object for this project.
+ obj = MSVSNew.MSVSProject(
+ proj_path,
+ name=spec['target_name'],
+ guid=guid,
+ spec=spec,
+ build_file=build_file,
+ config_platform_overrides=overrides,
+ fixpath_prefix=fixpath_prefix)
+ # Set project toolset if any (MS build only)
+ if msvs_version.UsesVcxproj():
+ obj.set_msbuild_toolset(
+ _GetMsbuildToolsetOfProject(proj_path, spec, msvs_version))
+ projects[qualified_target] = obj
+ # Set all the dependencies, but not if we are using an external builder like
+ # ninja
+ for project in projects.values():
+ if not project.spec.get('msvs_external_builder'):
+ deps = project.spec.get('dependencies', [])
+ deps = [projects[d] for d in deps]
+ project.set_dependencies(deps)
+ return projects
+
+
+def _InitNinjaFlavor(params, target_list, target_dicts):
+ """Initialize targets for the ninja flavor.
+
+ This sets up the necessary variables in the targets to generate msvs projects
+ that use ninja as an external builder. The variables in the spec are only set
+ if they have not been set. This allows individual specs to override the
+ default values initialized here.
+ Arguments:
+ params: Params provided to the generator.
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ """
+ for qualified_target in target_list:
+ spec = target_dicts[qualified_target]
+ if spec.get('msvs_external_builder'):
+ # The spec explicitly defined an external builder, so don't change it.
+ continue
+
+ path_to_ninja = spec.get('msvs_path_to_ninja', 'ninja.exe')
+
+ spec['msvs_external_builder'] = 'ninja'
+ if not spec.get('msvs_external_builder_out_dir'):
+ gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
+ gyp_dir = os.path.dirname(gyp_file)
+ configuration = '$(Configuration)'
+ if params.get('target_arch') == 'x64':
+ configuration += '_x64'
+ spec['msvs_external_builder_out_dir'] = os.path.join(
+ gyp.common.RelativePath(params['options'].toplevel_dir, gyp_dir),
+ ninja_generator.ComputeOutputDir(params),
+ configuration)
+ if not spec.get('msvs_external_builder_build_cmd'):
+ spec['msvs_external_builder_build_cmd'] = [
+ path_to_ninja,
+ '-C',
+ '$(OutDir)',
+ '$(ProjectName)',
+ ]
+ if not spec.get('msvs_external_builder_clean_cmd'):
+ spec['msvs_external_builder_clean_cmd'] = [
+ path_to_ninja,
+ '-C',
+ '$(OutDir)',
+ '-tclean',
+ '$(ProjectName)',
+ ]
+
+
+def CalculateVariables(default_variables, params):
+ """Generated variables that require params to be known."""
+
+ generator_flags = params.get('generator_flags', {})
+
+ # Select project file format version (if unset, default to auto detecting).
+ msvs_version = MSVSVersion.SelectVisualStudioVersion(
+ generator_flags.get('msvs_version', 'auto'))
+ # Stash msvs_version for later (so we don't have to probe the system twice).
+ params['msvs_version'] = msvs_version
+
+ # Set a variable so conditions can be based on msvs_version.
+ default_variables['MSVS_VERSION'] = msvs_version.ShortName()
+
+ # To determine processor word size on Windows, in addition to checking
+ # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
+ # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which
+ # contains the actual word size of the system when running thru WOW64).
+ if (os.environ.get('PROCESSOR_ARCHITECTURE', '').find('64') >= 0 or
+ os.environ.get('PROCESSOR_ARCHITEW6432', '').find('64') >= 0):
+ default_variables['MSVS_OS_BITS'] = 64
+ else:
+ default_variables['MSVS_OS_BITS'] = 32
+
+ if gyp.common.GetFlavor(params) == 'ninja':
+ default_variables['SHARED_INTERMEDIATE_DIR'] = '$(OutDir)gen'
+
+
+def PerformBuild(data, configurations, params):
+ options = params['options']
+ msvs_version = params['msvs_version']
+ devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com')
+
+ for build_file, build_file_dict in data.items():
+ (build_file_root, build_file_ext) = os.path.splitext(build_file)
+ if build_file_ext != '.gyp':
+ continue
+ sln_path = build_file_root + options.suffix + '.sln'
+ if options.generator_output:
+ sln_path = os.path.join(options.generator_output, sln_path)
+
+ for config in configurations:
+ arguments = [devenv, sln_path, '/Build', config]
+ print('Building [%s]: %s' % (config, arguments))
+ rtn = subprocess.check_call(arguments)
+
+
+def CalculateGeneratorInputInfo(params):
+ if params.get('flavor') == 'ninja':
+ toplevel = params['options'].toplevel_dir
+ qualified_out_dir = os.path.normpath(os.path.join(
+ toplevel, ninja_generator.ComputeOutputDir(params),
+ 'gypfiles-msvs-ninja'))
+
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ 'toplevel': toplevel,
+ 'qualified_out_dir': qualified_out_dir,
+ }
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ """Generate .sln and .vcproj files.
+
+ This is the entry point for this generator.
+ Arguments:
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ data: Dictionary containing per .gyp data.
+ """
+ global fixpath_prefix
+
+ options = params['options']
+
+ # Get the project file format version back out of where we stashed it in
+ # GeneratorCalculatedVariables.
+ msvs_version = params['msvs_version']
+
+ generator_flags = params.get('generator_flags', {})
+
+ # Optionally shard targets marked with 'msvs_shard': SHARD_COUNT.
+ (target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts)
+
+ # Optionally use the large PDB workaround for targets marked with
+ # 'msvs_large_pdb': 1.
+ (target_list, target_dicts) = MSVSUtil.InsertLargePdbShims(
+ target_list, target_dicts, generator_default_variables)
+
+ # Optionally configure each spec to use ninja as the external builder.
+ if params.get('flavor') == 'ninja':
+ _InitNinjaFlavor(params, target_list, target_dicts)
+
+ # Prepare the set of configurations.
+ configs = set()
+ for qualified_target in target_list:
+ spec = target_dicts[qualified_target]
+ for config_name, config in spec['configurations'].items():
+ configs.add(_ConfigFullName(config_name, config))
+ configs = list(configs)
+
+ # Figure out all the projects that will be generated and their guids
+ project_objects = _CreateProjectObjects(target_list, target_dicts, options,
+ msvs_version)
+
+ # Generate each project.
+ missing_sources = []
+ for project in project_objects.values():
+ fixpath_prefix = project.fixpath_prefix
+ missing_sources.extend(_GenerateProject(project, options, msvs_version,
+ generator_flags))
+ fixpath_prefix = None
+
+ for build_file in data:
+ # Validate build_file extension
+ if not build_file.endswith('.gyp'):
+ continue
+ sln_path = os.path.splitext(build_file)[0] + options.suffix + '.sln'
+ if options.generator_output:
+ sln_path = os.path.join(options.generator_output, sln_path)
+ # Get projects in the solution, and their dependents.
+ sln_projects = gyp.common.BuildFileTargets(target_list, build_file)
+ sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects)
+ # Create folder hierarchy.
+ root_entries = _GatherSolutionFolders(
+ sln_projects, project_objects, flat=msvs_version.FlatSolution())
+ # Create solution.
+ sln = MSVSNew.MSVSSolution(sln_path,
+ entries=root_entries,
+ variants=configs,
+ websiteProperties=False,
+ version=msvs_version)
+ sln.Write()
+
+ if missing_sources:
+ error_message = "Missing input files:\n" + \
+ '\n'.join(set(missing_sources))
+ if generator_flags.get('msvs_error_on_missing_sources', False):
+ raise GypError(error_message)
+ else:
+ print("Warning: " + error_message)
+
+
+def _GenerateMSBuildFiltersFile(filters_path, source_files,
+ rule_dependencies, extension_to_rule_name):
+ """Generate the filters file.
+
+ This file is used by Visual Studio to organize the presentation of source
+ files into folders.
+
+ Arguments:
+ filters_path: The path of the file to be created.
+ source_files: The hierarchical structure of all the sources.
+ extension_to_rule_name: A dictionary mapping file extensions to rules.
+ """
+ filter_group = []
+ source_group = []
+ _AppendFiltersForMSBuild('', source_files, rule_dependencies,
+ extension_to_rule_name, filter_group, source_group)
+ if filter_group:
+ content = ['Project',
+ {'ToolsVersion': '4.0',
+ 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'
+ },
+ ['ItemGroup'] + filter_group,
+ ['ItemGroup'] + source_group
+ ]
+ easy_xml.WriteXmlIfChanged(content, filters_path, pretty=True, win32=True)
+ elif os.path.exists(filters_path):
+ # We don't need this filter anymore. Delete the old filter file.
+ os.unlink(filters_path)
+
+
+def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies,
+ extension_to_rule_name,
+ filter_group, source_group):
+ """Creates the list of filters and sources to be added in the filter file.
+
+ Args:
+ parent_filter_name: The name of the filter under which the sources are
+ found.
+ sources: The hierarchy of filters and sources to process.
+ extension_to_rule_name: A dictionary mapping file extensions to rules.
+ filter_group: The list to which filter entries will be appended.
+ source_group: The list to which source entries will be appeneded.
+ """
+ for source in sources:
+ if isinstance(source, MSVSProject.Filter):
+ # We have a sub-filter. Create the name of that sub-filter.
+ if not parent_filter_name:
+ filter_name = source.name
+ else:
+ filter_name = '%s\\%s' % (parent_filter_name, source.name)
+ # Add the filter to the group.
+ filter_group.append(
+ ['Filter', {'Include': filter_name},
+ ['UniqueIdentifier', MSVSNew.MakeGuid(source.name)]])
+ # Recurse and add its dependents.
+ _AppendFiltersForMSBuild(filter_name, source.contents,
+ rule_dependencies, extension_to_rule_name,
+ filter_group, source_group)
+ else:
+ # It's a source. Create a source entry.
+ _, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
+ extension_to_rule_name)
+ source_entry = [element, {'Include': source}]
+ # Specify the filter it is part of, if any.
+ if parent_filter_name:
+ source_entry.append(['Filter', parent_filter_name])
+ source_group.append(source_entry)
+
+
+def _MapFileToMsBuildSourceType(source, rule_dependencies,
+ extension_to_rule_name):
+ """Returns the group and element type of the source file.
+
+ Arguments:
+ source: The source file name.
+ extension_to_rule_name: A dictionary mapping file extensions to rules.
+
+ Returns:
+ A pair of (group this file should be part of, the label of element)
+ """
+ _, ext = os.path.splitext(source)
+ if ext in extension_to_rule_name:
+ group = 'rule'
+ element = extension_to_rule_name[ext]
+ elif ext in ['.cc', '.cpp', '.c', '.cxx']:
+ group = 'compile'
+ element = 'ClCompile'
+ elif ext in ['.h', '.hxx']:
+ group = 'include'
+ element = 'ClInclude'
+ elif ext == '.rc':
+ group = 'resource'
+ element = 'ResourceCompile'
+ elif ext == '.asm':
+ group = 'masm'
+ element = 'MASM'
+ elif ext == '.idl':
+ group = 'midl'
+ element = 'Midl'
+ elif source in rule_dependencies:
+ group = 'rule_dependency'
+ element = 'CustomBuild'
+ else:
+ group = 'none'
+ element = 'None'
+ return (group, element)
+
+
+def _GenerateRulesForMSBuild(output_dir, options, spec,
+ sources, excluded_sources,
+ props_files_of_rules, targets_files_of_rules,
+ actions_to_add, rule_dependencies,
+ extension_to_rule_name):
+ # MSBuild rules are implemented using three files: an XML file, a .targets
+ # file and a .props file.
+ # See http://blogs.msdn.com/b/vcblog/archive/2010/04/21/quick-help-on-vs2010-custom-build-rule.aspx
+ # for more details.
+ rules = spec.get('rules', [])
+ rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))]
+ rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))]
+
+ msbuild_rules = []
+ for rule in rules_native:
+ # Skip a rule with no action and no inputs.
+ if 'action' not in rule and not rule.get('rule_sources', []):
+ continue
+ msbuild_rule = MSBuildRule(rule, spec)
+ msbuild_rules.append(msbuild_rule)
+ rule_dependencies.update(msbuild_rule.additional_dependencies.split(';'))
+ extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name
+ if msbuild_rules:
+ base = spec['target_name'] + options.suffix
+ props_name = base + '.props'
+ targets_name = base + '.targets'
+ xml_name = base + '.xml'
+
+ props_files_of_rules.add(props_name)
+ targets_files_of_rules.add(targets_name)
+
+ props_path = os.path.join(output_dir, props_name)
+ targets_path = os.path.join(output_dir, targets_name)
+ xml_path = os.path.join(output_dir, xml_name)
+
+ _GenerateMSBuildRulePropsFile(props_path, msbuild_rules)
+ _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules)
+ _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules)
+
+ if rules_external:
+ _GenerateExternalRules(rules_external, output_dir, spec,
+ sources, options, actions_to_add)
+ _AdjustSourcesForRules(rules, sources, excluded_sources, True)
+
+
+class MSBuildRule(object):
+ """Used to store information used to generate an MSBuild rule.
+
+ Attributes:
+ rule_name: The rule name, sanitized to use in XML.
+ target_name: The name of the target.
+ after_targets: The name of the AfterTargets element.
+ before_targets: The name of the BeforeTargets element.
+ depends_on: The name of the DependsOn element.
+ compute_output: The name of the ComputeOutput element.
+ dirs_to_make: The name of the DirsToMake element.
+ inputs: The name of the _inputs element.
+ tlog: The name of the _tlog element.
+ extension: The extension this rule applies to.
+ description: The message displayed when this rule is invoked.
+ additional_dependencies: A string listing additional dependencies.
+ outputs: The outputs of this rule.
+ command: The command used to run the rule.
+ """
+
+ def __init__(self, rule, spec):
+ self.display_name = rule['rule_name']
+ # Assure that the rule name is only characters and numbers
+ self.rule_name = re.sub(r'\W', '_', self.display_name)
+ # Create the various element names, following the example set by the
+ # Visual Studio 2008 to 2010 conversion. I don't know if VS2010
+ # is sensitive to the exact names.
+ self.target_name = '_' + self.rule_name
+ self.after_targets = self.rule_name + 'AfterTargets'
+ self.before_targets = self.rule_name + 'BeforeTargets'
+ self.depends_on = self.rule_name + 'DependsOn'
+ self.compute_output = 'Compute%sOutput' % self.rule_name
+ self.dirs_to_make = self.rule_name + 'DirsToMake'
+ self.inputs = self.rule_name + '_inputs'
+ self.tlog = self.rule_name + '_tlog'
+ self.extension = rule['extension']
+ if not self.extension.startswith('.'):
+ self.extension = '.' + self.extension
+
+ self.description = MSVSSettings.ConvertVCMacrosToMSBuild(
+ rule.get('message', self.rule_name))
+ old_additional_dependencies = _FixPaths(rule.get('inputs', []))
+ self.additional_dependencies = (
+ ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i)
+ for i in old_additional_dependencies]))
+ old_outputs = _FixPaths(rule.get('outputs', []))
+ self.outputs = ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i)
+ for i in old_outputs])
+ old_command = _BuildCommandLineForRule(spec, rule, has_input_path=True,
+ do_setup_env=True)
+ self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command)
+
+
+def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules):
+ """Generate the .props file."""
+ content = ['Project',
+ {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}]
+ for rule in msbuild_rules:
+ content.extend([
+ ['PropertyGroup',
+ {'Condition': "'$(%s)' == '' and '$(%s)' == '' and "
+ "'$(ConfigurationType)' != 'Makefile'" % (rule.before_targets,
+ rule.after_targets)
+ },
+ [rule.before_targets, 'Midl'],
+ [rule.after_targets, 'CustomBuild'],
+ ],
+ ['PropertyGroup',
+ [rule.depends_on,
+ {'Condition': "'$(ConfigurationType)' != 'Makefile'"},
+ '_SelectedFiles;$(%s)' % rule.depends_on
+ ],
+ ],
+ ['ItemDefinitionGroup',
+ [rule.rule_name,
+ ['CommandLineTemplate', rule.command],
+ ['Outputs', rule.outputs],
+ ['ExecutionDescription', rule.description],
+ ['AdditionalDependencies', rule.additional_dependencies],
+ ],
+ ]
+ ])
+ easy_xml.WriteXmlIfChanged(content, props_path, pretty=True, win32=True)
+
+
+def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
+ """Generate the .targets file."""
+ content = ['Project',
+ {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'
+ }
+ ]
+ item_group = [
+ 'ItemGroup',
+ ['PropertyPageSchema',
+ {'Include': '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'}
+ ]
+ ]
+ for rule in msbuild_rules:
+ item_group.append(
+ ['AvailableItemName',
+ {'Include': rule.rule_name},
+ ['Targets', rule.target_name],
+ ])
+ content.append(item_group)
+
+ for rule in msbuild_rules:
+ content.append(
+ ['UsingTask',
+ {'TaskName': rule.rule_name,
+ 'TaskFactory': 'XamlTaskFactory',
+ 'AssemblyName': 'Microsoft.Build.Tasks.v4.0'
+ },
+ ['Task', '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'],
+ ])
+ for rule in msbuild_rules:
+ rule_name = rule.rule_name
+ target_outputs = '%%(%s.Outputs)' % rule_name
+ target_inputs = ('%%(%s.Identity);%%(%s.AdditionalDependencies);'
+ '$(MSBuildProjectFile)') % (rule_name, rule_name)
+ rule_inputs = '%%(%s.Identity)' % rule_name
+ extension_condition = ("'%(Extension)'=='.obj' or "
+ "'%(Extension)'=='.res' or "
+ "'%(Extension)'=='.rsc' or "
+ "'%(Extension)'=='.lib'")
+ remove_section = [
+ 'ItemGroup',
+ {'Condition': "'@(SelectedFiles)' != ''"},
+ [rule_name,
+ {'Remove': '@(%s)' % rule_name,
+ 'Condition': "'%(Identity)' != '@(SelectedFiles)'"
+ }
+ ]
+ ]
+ inputs_section = [
+ 'ItemGroup',
+ [rule.inputs, {'Include': '%%(%s.AdditionalDependencies)' % rule_name}]
+ ]
+ logging_section = [
+ 'ItemGroup',
+ [rule.tlog,
+ {'Include': '%%(%s.Outputs)' % rule_name,
+ 'Condition': ("'%%(%s.Outputs)' != '' and "
+ "'%%(%s.ExcludedFromBuild)' != 'true'" %
+ (rule_name, rule_name))
+ },
+ ['Source', "@(%s, '|')" % rule_name],
+ ['Inputs', "@(%s -> '%%(Fullpath)', ';')" % rule.inputs],
+ ],
+ ]
+ message_section = [
+ 'Message',
+ {'Importance': 'High',
+ 'Text': '%%(%s.ExecutionDescription)' % rule_name
+ }
+ ]
+ write_tlog_section = [
+ 'WriteLinesToFile',
+ {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
+ "'true'" % (rule.tlog, rule.tlog),
+ 'File': '$(IntDir)$(ProjectName).write.1.tlog',
+ 'Lines': "^%%(%s.Source);@(%s->'%%(Fullpath)')" % (rule.tlog,
+ rule.tlog)
+ }
+ ]
+ read_tlog_section = [
+ 'WriteLinesToFile',
+ {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
+ "'true'" % (rule.tlog, rule.tlog),
+ 'File': '$(IntDir)$(ProjectName).read.1.tlog',
+ 'Lines': "^%%(%s.Source);%%(%s.Inputs)" % (rule.tlog, rule.tlog)
+ }
+ ]
+ command_and_input_section = [
+ rule_name,
+ {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
+ "'true'" % (rule_name, rule_name),
+ 'EchoOff': 'true',
+ 'StandardOutputImportance': 'High',
+ 'StandardErrorImportance': 'High',
+ 'CommandLineTemplate': '%%(%s.CommandLineTemplate)' % rule_name,
+ 'AdditionalOptions': '%%(%s.AdditionalOptions)' % rule_name,
+ 'Inputs': rule_inputs
+ }
+ ]
+ content.extend([
+ ['Target',
+ {'Name': rule.target_name,
+ 'BeforeTargets': '$(%s)' % rule.before_targets,
+ 'AfterTargets': '$(%s)' % rule.after_targets,
+ 'Condition': "'@(%s)' != ''" % rule_name,
+ 'DependsOnTargets': '$(%s);%s' % (rule.depends_on,
+ rule.compute_output),
+ 'Outputs': target_outputs,
+ 'Inputs': target_inputs
+ },
+ remove_section,
+ inputs_section,
+ logging_section,
+ message_section,
+ write_tlog_section,
+ read_tlog_section,
+ command_and_input_section,
+ ],
+ ['PropertyGroup',
+ ['ComputeLinkInputsTargets',
+ '$(ComputeLinkInputsTargets);',
+ '%s;' % rule.compute_output
+ ],
+ ['ComputeLibInputsTargets',
+ '$(ComputeLibInputsTargets);',
+ '%s;' % rule.compute_output
+ ],
+ ],
+ ['Target',
+ {'Name': rule.compute_output,
+ 'Condition': "'@(%s)' != ''" % rule_name
+ },
+ ['ItemGroup',
+ [rule.dirs_to_make,
+ {'Condition': "'@(%s)' != '' and "
+ "'%%(%s.ExcludedFromBuild)' != 'true'" % (rule_name, rule_name),
+ 'Include': '%%(%s.Outputs)' % rule_name
+ }
+ ],
+ ['Link',
+ {'Include': '%%(%s.Identity)' % rule.dirs_to_make,
+ 'Condition': extension_condition
+ }
+ ],
+ ['Lib',
+ {'Include': '%%(%s.Identity)' % rule.dirs_to_make,
+ 'Condition': extension_condition
+ }
+ ],
+ ['ImpLib',
+ {'Include': '%%(%s.Identity)' % rule.dirs_to_make,
+ 'Condition': extension_condition
+ }
+ ],
+ ],
+ ['MakeDir',
+ {'Directories': ("@(%s->'%%(RootDir)%%(Directory)')" %
+ rule.dirs_to_make)
+ }
+ ]
+ ],
+ ])
+ easy_xml.WriteXmlIfChanged(content, targets_path, pretty=True, win32=True)
+
+
+def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules):
+ # Generate the .xml file
+ content = [
+ 'ProjectSchemaDefinitions',
+ {'xmlns': ('clr-namespace:Microsoft.Build.Framework.XamlTypes;'
+ 'assembly=Microsoft.Build.Framework'),
+ 'xmlns:x': 'http://schemas.microsoft.com/winfx/2006/xaml',
+ 'xmlns:sys': 'clr-namespace:System;assembly=mscorlib',
+ 'xmlns:transformCallback':
+ 'Microsoft.Cpp.Dev10.ConvertPropertyCallback'
+ }
+ ]
+ for rule in msbuild_rules:
+ content.extend([
+ ['Rule',
+ {'Name': rule.rule_name,
+ 'PageTemplate': 'tool',
+ 'DisplayName': rule.display_name,
+ 'Order': '200'
+ },
+ ['Rule.DataSource',
+ ['DataSource',
+ {'Persistence': 'ProjectFile',
+ 'ItemType': rule.rule_name
+ }
+ ]
+ ],
+ ['Rule.Categories',
+ ['Category',
+ {'Name': 'General'},
+ ['Category.DisplayName',
+ ['sys:String', 'General'],
+ ],
+ ],
+ ['Category',
+ {'Name': 'Command Line',
+ 'Subtype': 'CommandLine'
+ },
+ ['Category.DisplayName',
+ ['sys:String', 'Command Line'],
+ ],
+ ],
+ ],
+ ['StringListProperty',
+ {'Name': 'Inputs',
+ 'Category': 'Command Line',
+ 'IsRequired': 'true',
+ 'Switch': ' '
+ },
+ ['StringListProperty.DataSource',
+ ['DataSource',
+ {'Persistence': 'ProjectFile',
+ 'ItemType': rule.rule_name,
+ 'SourceType': 'Item'
+ }
+ ]
+ ],
+ ],
+ ['StringProperty',
+ {'Name': 'CommandLineTemplate',
+ 'DisplayName': 'Command Line',
+ 'Visible': 'False',
+ 'IncludeInCommandLine': 'False'
+ }
+ ],
+ ['DynamicEnumProperty',
+ {'Name': rule.before_targets,
+ 'Category': 'General',
+ 'EnumProvider': 'Targets',
+ 'IncludeInCommandLine': 'False'
+ },
+ ['DynamicEnumProperty.DisplayName',
+ ['sys:String', 'Execute Before'],
+ ],
+ ['DynamicEnumProperty.Description',
+ ['sys:String', 'Specifies the targets for the build customization'
+ ' to run before.'
+ ],
+ ],
+ ['DynamicEnumProperty.ProviderSettings',
+ ['NameValuePair',
+ {'Name': 'Exclude',
+ 'Value': '^%s|^Compute' % rule.before_targets
+ }
+ ]
+ ],
+ ['DynamicEnumProperty.DataSource',
+ ['DataSource',
+ {'Persistence': 'ProjectFile',
+ 'HasConfigurationCondition': 'true'
+ }
+ ]
+ ],
+ ],
+ ['DynamicEnumProperty',
+ {'Name': rule.after_targets,
+ 'Category': 'General',
+ 'EnumProvider': 'Targets',
+ 'IncludeInCommandLine': 'False'
+ },
+ ['DynamicEnumProperty.DisplayName',
+ ['sys:String', 'Execute After'],
+ ],
+ ['DynamicEnumProperty.Description',
+ ['sys:String', ('Specifies the targets for the build customization'
+ ' to run after.')
+ ],
+ ],
+ ['DynamicEnumProperty.ProviderSettings',
+ ['NameValuePair',
+ {'Name': 'Exclude',
+ 'Value': '^%s|^Compute' % rule.after_targets
+ }
+ ]
+ ],
+ ['DynamicEnumProperty.DataSource',
+ ['DataSource',
+ {'Persistence': 'ProjectFile',
+ 'ItemType': '',
+ 'HasConfigurationCondition': 'true'
+ }
+ ]
+ ],
+ ],
+ ['StringListProperty',
+ {'Name': 'Outputs',
+ 'DisplayName': 'Outputs',
+ 'Visible': 'False',
+ 'IncludeInCommandLine': 'False'
+ }
+ ],
+ ['StringProperty',
+ {'Name': 'ExecutionDescription',
+ 'DisplayName': 'Execution Description',
+ 'Visible': 'False',
+ 'IncludeInCommandLine': 'False'
+ }
+ ],
+ ['StringListProperty',
+ {'Name': 'AdditionalDependencies',
+ 'DisplayName': 'Additional Dependencies',
+ 'IncludeInCommandLine': 'False',
+ 'Visible': 'false'
+ }
+ ],
+ ['StringProperty',
+ {'Subtype': 'AdditionalOptions',
+ 'Name': 'AdditionalOptions',
+ 'Category': 'Command Line'
+ },
+ ['StringProperty.DisplayName',
+ ['sys:String', 'Additional Options'],
+ ],
+ ['StringProperty.Description',
+ ['sys:String', 'Additional Options'],
+ ],
+ ],
+ ],
+ ['ItemType',
+ {'Name': rule.rule_name,
+ 'DisplayName': rule.display_name
+ }
+ ],
+ ['FileExtension',
+ {'Name': '*' + rule.extension,
+ 'ContentType': rule.rule_name
+ }
+ ],
+ ['ContentType',
+ {'Name': rule.rule_name,
+ 'DisplayName': '',
+ 'ItemType': rule.rule_name
+ }
+ ]
+ ])
+ easy_xml.WriteXmlIfChanged(content, xml_path, pretty=True, win32=True)
+
+
+def _GetConfigurationAndPlatform(name, settings):
+ configuration = name.rsplit('_', 1)[0]
+ platform = settings.get('msvs_configuration_platform', 'Win32')
+ return (configuration, platform)
+
+
+def _GetConfigurationCondition(name, settings):
+ return (r"'$(Configuration)|$(Platform)'=='%s|%s'" %
+ _GetConfigurationAndPlatform(name, settings))
+
+
+def _GetMSBuildProjectConfigurations(configurations):
+ group = ['ItemGroup', {'Label': 'ProjectConfigurations'}]
+ for (name, settings) in sorted(configurations.items()):
+ configuration, platform = _GetConfigurationAndPlatform(name, settings)
+ designation = '%s|%s' % (configuration, platform)
+ group.append(
+ ['ProjectConfiguration', {'Include': designation},
+ ['Configuration', configuration],
+ ['Platform', platform]])
+ return [group]
+
+
+def _GetMSBuildGlobalProperties(spec, version, guid, gyp_file_name):
+ namespace = os.path.splitext(gyp_file_name)[0]
+ properties = [
+ ['PropertyGroup', {'Label': 'Globals'},
+ ['ProjectGuid', guid],
+ ['Keyword', 'Win32Proj'],
+ ['RootNamespace', namespace],
+ ['IgnoreWarnCompileDuplicatedFilename', 'true'],
+ ]
+ ]
+
+ if os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or \
+ os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64':
+ properties[0].append(['PreferredToolArchitecture', 'x64'])
+
+ if spec.get('msvs_target_platform_version'):
+ target_platform_version = spec.get('msvs_target_platform_version')
+ properties[0].append(['WindowsTargetPlatformVersion',
+ target_platform_version])
+ if spec.get('msvs_target_platform_minversion'):
+ target_platform_minversion = spec.get('msvs_target_platform_minversion')
+ properties[0].append(['WindowsTargetPlatformMinVersion',
+ target_platform_minversion])
+ else:
+ properties[0].append(['WindowsTargetPlatformMinVersion',
+ target_platform_version])
+
+ if spec.get('msvs_enable_winrt'):
+ properties[0].append(['DefaultLanguage', 'en-US'])
+ properties[0].append(['AppContainerApplication', 'true'])
+ if spec.get('msvs_application_type_revision'):
+ app_type_revision = spec.get('msvs_application_type_revision')
+ properties[0].append(['ApplicationTypeRevision', app_type_revision])
+ else:
+ properties[0].append(['ApplicationTypeRevision', '8.1'])
+ if spec.get('msvs_enable_winphone'):
+ properties[0].append(['ApplicationType', 'Windows Phone'])
+ else:
+ properties[0].append(['ApplicationType', 'Windows Store'])
+
+ platform_name = None
+ msvs_windows_sdk_version = None
+ for configuration in spec['configurations'].values():
+ platform_name = platform_name or _ConfigPlatform(configuration)
+ msvs_windows_sdk_version = (msvs_windows_sdk_version or
+ _ConfigWindowsTargetPlatformVersion(configuration, version))
+ if platform_name and msvs_windows_sdk_version:
+ break
+ if msvs_windows_sdk_version:
+ properties[0].append(['WindowsTargetPlatformVersion',
+ str(msvs_windows_sdk_version)])
+ elif version.compatible_sdks:
+ raise GypError('%s requires any SDK of %s version, but none were found' %
+ (version.description, version.compatible_sdks))
+
+ if platform_name == 'ARM':
+ properties[0].append(['WindowsSDKDesktopARMSupport', 'true'])
+
+ return properties
+
+
+def _GetMSBuildConfigurationDetails(spec, build_file):
+ properties = {}
+ for name, settings in spec['configurations'].items():
+ msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file)
+ condition = _GetConfigurationCondition(name, settings)
+ character_set = msbuild_attributes.get('CharacterSet')
+ config_type = msbuild_attributes.get('ConfigurationType')
+ _AddConditionalProperty(properties, condition, 'ConfigurationType',
+ config_type)
+ if config_type == 'Driver':
+ _AddConditionalProperty(properties, condition, 'DriverType', 'WDM')
+ _AddConditionalProperty(properties, condition, 'TargetVersion',
+ _ConfigTargetVersion(settings))
+ if character_set:
+ if 'msvs_enable_winrt' not in spec :
+ _AddConditionalProperty(properties, condition, 'CharacterSet',
+ character_set)
+ return _GetMSBuildPropertyGroup(spec, 'Configuration', properties)
+
+
+def _GetMSBuildLocalProperties(msbuild_toolset):
+ # Currently the only local property we support is PlatformToolset
+ properties = {}
+ if msbuild_toolset:
+ properties = [
+ ['PropertyGroup', {'Label': 'Locals'},
+ ['PlatformToolset', msbuild_toolset],
+ ]
+ ]
+ return properties
+
+
+def _GetMSBuildPropertySheets(configurations):
+ user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props'
+ additional_props = {}
+ props_specified = False
+ for name, settings in sorted(configurations.items()):
+ configuration = _GetConfigurationCondition(name, settings)
+ if 'msbuild_props' in settings:
+ additional_props[configuration] = _FixPaths(settings['msbuild_props'])
+ props_specified = True
+ else:
+ additional_props[configuration] = ''
+
+ if not props_specified:
+ return [
+ ['ImportGroup',
+ {'Label': 'PropertySheets'},
+ ['Import',
+ {'Project': user_props,
+ 'Condition': "exists('%s')" % user_props,
+ 'Label': 'LocalAppDataPlatform'
+ }
+ ]
+ ]
+ ]
+ else:
+ sheets = []
+ for condition, props in additional_props.items():
+ import_group = [
+ 'ImportGroup',
+ {'Label': 'PropertySheets',
+ 'Condition': condition
+ },
+ ['Import',
+ {'Project': user_props,
+ 'Condition': "exists('%s')" % user_props,
+ 'Label': 'LocalAppDataPlatform'
+ }
+ ]
+ ]
+ for props_file in props:
+ import_group.append(['Import', {'Project':props_file}])
+ sheets.append(import_group)
+ return sheets
+
+def _ConvertMSVSBuildAttributes(spec, config, build_file):
+ config_type = _GetMSVSConfigurationType(spec, build_file)
+ msvs_attributes = _GetMSVSAttributes(spec, config, config_type)
+ msbuild_attributes = {}
+ for a in msvs_attributes:
+ if a in ['IntermediateDirectory', 'OutputDirectory']:
+ directory = MSVSSettings.ConvertVCMacrosToMSBuild(msvs_attributes[a])
+ if not directory.endswith('\\'):
+ directory += '\\'
+ msbuild_attributes[a] = directory
+ elif a == 'CharacterSet':
+ msbuild_attributes[a] = _ConvertMSVSCharacterSet(msvs_attributes[a])
+ elif a == 'ConfigurationType':
+ msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a])
+ else:
+ print('Warning: Do not know how to convert MSVS attribute ' + a)
+ return msbuild_attributes
+
+
+def _ConvertMSVSCharacterSet(char_set):
+ if char_set.isdigit():
+ char_set = {
+ '0': 'MultiByte',
+ '1': 'Unicode',
+ '2': 'MultiByte',
+ }[char_set]
+ return char_set
+
+
+def _ConvertMSVSConfigurationType(config_type):
+ if config_type.isdigit():
+ config_type = {
+ '1': 'Application',
+ '2': 'DynamicLibrary',
+ '4': 'StaticLibrary',
+ '5': 'Driver',
+ '10': 'Utility'
+ }[config_type]
+ return config_type
+
+
+def _GetMSBuildAttributes(spec, config, build_file):
+ if 'msbuild_configuration_attributes' not in config:
+ msbuild_attributes = _ConvertMSVSBuildAttributes(spec, config, build_file)
+
+ else:
+ config_type = _GetMSVSConfigurationType(spec, build_file)
+ config_type = _ConvertMSVSConfigurationType(config_type)
+ msbuild_attributes = config.get('msbuild_configuration_attributes', {})
+ msbuild_attributes.setdefault('ConfigurationType', config_type)
+ output_dir = msbuild_attributes.get('OutputDirectory',
+ '$(SolutionDir)$(Configuration)')
+ msbuild_attributes['OutputDirectory'] = _FixPath(output_dir) + '\\'
+ if 'IntermediateDirectory' not in msbuild_attributes:
+ intermediate = _FixPath('$(Configuration)') + '\\'
+ msbuild_attributes['IntermediateDirectory'] = intermediate
+ if 'CharacterSet' in msbuild_attributes:
+ msbuild_attributes['CharacterSet'] = _ConvertMSVSCharacterSet(
+ msbuild_attributes['CharacterSet'])
+ if 'TargetName' not in msbuild_attributes:
+ prefix = spec.get('product_prefix', '')
+ product_name = spec.get('product_name', '$(ProjectName)')
+ target_name = prefix + product_name
+ msbuild_attributes['TargetName'] = target_name
+ if 'TargetExt' not in msbuild_attributes and 'product_extension' in spec:
+ ext = spec.get('product_extension')
+ msbuild_attributes['TargetExt'] = '.' + ext
+
+ if spec.get('msvs_external_builder'):
+ external_out_dir = spec.get('msvs_external_builder_out_dir', '.')
+ msbuild_attributes['OutputDirectory'] = _FixPath(external_out_dir) + '\\'
+
+ # Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile'
+ # (depending on the tool used) to avoid MSB8012 warning.
+ msbuild_tool_map = {
+ 'executable': 'Link',
+ 'shared_library': 'Link',
+ 'loadable_module': 'Link',
+ 'windows_driver': 'Link',
+ 'static_library': 'Lib',
+ }
+ msbuild_tool = msbuild_tool_map.get(spec['type'])
+ if msbuild_tool:
+ msbuild_settings = config['finalized_msbuild_settings']
+ out_file = msbuild_settings[msbuild_tool].get('OutputFile')
+ if out_file:
+ msbuild_attributes['TargetPath'] = _FixPath(out_file)
+ target_ext = msbuild_settings[msbuild_tool].get('TargetExt')
+ if target_ext:
+ msbuild_attributes['TargetExt'] = target_ext
+
+ return msbuild_attributes
+
+
+def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
+ # TODO(jeanluc) We could optimize out the following and do it only if
+ # there are actions.
+ # TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'.
+ new_paths = []
+ cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])[0]
+ if cygwin_dirs:
+ cyg_path = '$(MSBuildProjectDirectory)\\%s\\bin\\' % _FixPath(cygwin_dirs)
+ new_paths.append(cyg_path)
+ # TODO(jeanluc) Change the convention to have both a cygwin_dir and a
+ # python_dir.
+ python_path = cyg_path.replace('cygwin\\bin', 'python_26')
+ new_paths.append(python_path)
+ if new_paths:
+ new_paths = '$(ExecutablePath);' + ';'.join(new_paths)
+
+ properties = {}
+ for (name, configuration) in sorted(configurations.items()):
+ condition = _GetConfigurationCondition(name, configuration)
+ attributes = _GetMSBuildAttributes(spec, configuration, build_file)
+ msbuild_settings = configuration['finalized_msbuild_settings']
+ _AddConditionalProperty(properties, condition, 'IntDir',
+ attributes['IntermediateDirectory'])
+ _AddConditionalProperty(properties, condition, 'OutDir',
+ attributes['OutputDirectory'])
+ _AddConditionalProperty(properties, condition, 'TargetName',
+ attributes['TargetName'])
+ if 'TargetExt' in attributes:
+ _AddConditionalProperty(properties, condition, 'TargetExt',
+ attributes['TargetExt'])
+
+ if attributes.get('TargetPath'):
+ _AddConditionalProperty(properties, condition, 'TargetPath',
+ attributes['TargetPath'])
+ if attributes.get('TargetExt'):
+ _AddConditionalProperty(properties, condition, 'TargetExt',
+ attributes['TargetExt'])
+
+ if new_paths:
+ _AddConditionalProperty(properties, condition, 'ExecutablePath',
+ new_paths)
+ tool_settings = msbuild_settings.get('', {})
+ for name, value in sorted(tool_settings.items()):
+ formatted_value = _GetValueFormattedForMSBuild('', name, value)
+ _AddConditionalProperty(properties, condition, name, formatted_value)
+ return _GetMSBuildPropertyGroup(spec, None, properties)
+
+
+def _AddConditionalProperty(properties, condition, name, value):
+ """Adds a property / conditional value pair to a dictionary.
+
+ Arguments:
+ properties: The dictionary to be modified. The key is the name of the
+ property. The value is itself a dictionary; its key is the value and
+ the value a list of condition for which this value is true.
+ condition: The condition under which the named property has the value.
+ name: The name of the property.
+ value: The value of the property.
+ """
+ if name not in properties:
+ properties[name] = {}
+ values = properties[name]
+ if value not in values:
+ values[value] = []
+ conditions = values[value]
+ conditions.append(condition)
+
+
+# Regex for msvs variable references ( i.e. $(FOO) ).
+MSVS_VARIABLE_REFERENCE = re.compile(r'\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)')
+
+
+def _GetMSBuildPropertyGroup(spec, label, properties):
+ """Returns a PropertyGroup definition for the specified properties.
+
+ Arguments:
+ spec: The target project dict.
+ label: An optional label for the PropertyGroup.
+ properties: The dictionary to be converted. The key is the name of the
+ property. The value is itself a dictionary; its key is the value and
+ the value a list of condition for which this value is true.
+ """
+ group = ['PropertyGroup']
+ if label:
+ group.append({'Label': label})
+ num_configurations = len(spec['configurations'])
+ def GetEdges(node):
+ # Use a definition of edges such that user_of_variable -> used_varible.
+ # This happens to be easier in this case, since a variable's
+ # definition contains all variables it references in a single string.
+ edges = set()
+ for value in sorted(properties[node].keys()):
+ # Add to edges all $(...) references to variables.
+ #
+ # Variable references that refer to names not in properties are excluded
+ # These can exist for instance to refer built in definitions like
+ # $(SolutionDir).
+ #
+ # Self references are ignored. Self reference is used in a few places to
+ # append to the default value. I.e. PATH=$(PATH);other_path
+ edges.update(set([v for v in MSVS_VARIABLE_REFERENCE.findall(value)
+ if v in properties and v != node]))
+ return edges
+ properties_ordered = gyp.common.TopologicallySorted(
+ properties.keys(), GetEdges)
+ # Walk properties in the reverse of a topological sort on
+ # user_of_variable -> used_variable as this ensures variables are
+ # defined before they are used.
+ # NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
+ for name in reversed(properties_ordered):
+ values = properties[name]
+ for value, conditions in sorted(values.items()):
+ if len(conditions) == num_configurations:
+ # If the value is the same all configurations,
+ # just add one unconditional entry.
+ group.append([name, value])
+ else:
+ for condition in conditions:
+ group.append([name, {'Condition': condition}, value])
+ return [group]
+
+
+def _GetMSBuildToolSettingsSections(spec, configurations):
+ groups = []
+ for (name, configuration) in sorted(configurations.items()):
+ msbuild_settings = configuration['finalized_msbuild_settings']
+ group = ['ItemDefinitionGroup',
+ {'Condition': _GetConfigurationCondition(name, configuration)}
+ ]
+ for tool_name, tool_settings in sorted(msbuild_settings.items()):
+ # Skip the tool named '' which is a holder of global settings handled
+ # by _GetMSBuildConfigurationGlobalProperties.
+ if tool_name:
+ if tool_settings:
+ tool = [tool_name]
+ for name, value in sorted(tool_settings.items()):
+ formatted_value = _GetValueFormattedForMSBuild(tool_name, name,
+ value)
+ tool.append([name, formatted_value])
+ group.append(tool)
+ groups.append(group)
+ return groups
+
+
+def _FinalizeMSBuildSettings(spec, configuration):
+ if 'msbuild_settings' in configuration:
+ converted = False
+ msbuild_settings = configuration['msbuild_settings']
+ MSVSSettings.ValidateMSBuildSettings(msbuild_settings)
+ else:
+ converted = True
+ msvs_settings = configuration.get('msvs_settings', {})
+ msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings)
+ include_dirs, midl_include_dirs, resource_include_dirs = \
+ _GetIncludeDirs(configuration)
+ libraries = _GetLibraries(spec)
+ library_dirs = _GetLibraryDirs(configuration)
+ out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True)
+ target_ext = _GetOutputTargetExt(spec)
+ defines = _GetDefines(configuration)
+ if converted:
+ # Visual Studio 2010 has TR1
+ defines = [d for d in defines if d != '_HAS_TR1=0']
+ # Warn of ignored settings
+ ignored_settings = ['msvs_tool_files']
+ for ignored_setting in ignored_settings:
+ value = configuration.get(ignored_setting)
+ if value:
+ print('Warning: The automatic conversion to MSBuild does not handle '
+ '%s. Ignoring setting of %s' % (ignored_setting, str(value)))
+
+ defines = [_EscapeCppDefineForMSBuild(d) for d in defines]
+ disabled_warnings = _GetDisabledWarnings(configuration)
+ prebuild = configuration.get('msvs_prebuild')
+ postbuild = configuration.get('msvs_postbuild')
+ def_file = _GetModuleDefinition(spec)
+ precompiled_header = configuration.get('msvs_precompiled_header')
+
+ # Add the information to the appropriate tool
+ # TODO(jeanluc) We could optimize and generate these settings only if
+ # the corresponding files are found, e.g. don't generate ResourceCompile
+ # if you don't have any resources.
+ _ToolAppend(msbuild_settings, 'ClCompile',
+ 'AdditionalIncludeDirectories', include_dirs)
+ _ToolAppend(msbuild_settings, 'Midl',
+ 'AdditionalIncludeDirectories', midl_include_dirs)
+ _ToolAppend(msbuild_settings, 'ResourceCompile',
+ 'AdditionalIncludeDirectories', resource_include_dirs)
+ # Add in libraries, note that even for empty libraries, we want this
+ # set, to prevent inheriting default libraries from the enviroment.
+ _ToolSetOrAppend(msbuild_settings, 'Link', 'AdditionalDependencies',
+ libraries)
+ _ToolAppend(msbuild_settings, 'Link', 'AdditionalLibraryDirectories',
+ library_dirs)
+ if out_file:
+ _ToolAppend(msbuild_settings, msbuild_tool, 'OutputFile', out_file,
+ only_if_unset=True)
+ if target_ext:
+ _ToolAppend(msbuild_settings, msbuild_tool, 'TargetExt', target_ext,
+ only_if_unset=True)
+ # Add defines.
+ _ToolAppend(msbuild_settings, 'ClCompile',
+ 'PreprocessorDefinitions', defines)
+ _ToolAppend(msbuild_settings, 'ResourceCompile',
+ 'PreprocessorDefinitions', defines)
+ # Add disabled warnings.
+ _ToolAppend(msbuild_settings, 'ClCompile',
+ 'DisableSpecificWarnings', disabled_warnings)
+ # Turn on precompiled headers if appropriate.
+ if precompiled_header:
+ precompiled_header = os.path.split(precompiled_header)[1]
+ _ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'Use')
+ _ToolAppend(msbuild_settings, 'ClCompile',
+ 'PrecompiledHeaderFile', precompiled_header)
+ _ToolAppend(msbuild_settings, 'ClCompile',
+ 'ForcedIncludeFiles', [precompiled_header])
+ else:
+ _ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'NotUsing')
+ # Turn off WinRT compilation
+ _ToolAppend(msbuild_settings, 'ClCompile', 'CompileAsWinRT', 'false')
+ # Turn on import libraries if appropriate
+ if spec.get('msvs_requires_importlibrary'):
+ _ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'false')
+ # Loadable modules don't generate import libraries;
+ # tell dependent projects to not expect one.
+ if spec['type'] == 'loadable_module':
+ _ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'true')
+ # Set the module definition file if any.
+ if def_file:
+ _ToolAppend(msbuild_settings, 'Link', 'ModuleDefinitionFile', def_file)
+ configuration['finalized_msbuild_settings'] = msbuild_settings
+ if prebuild:
+ _ToolAppend(msbuild_settings, 'PreBuildEvent', 'Command', prebuild)
+ if postbuild:
+ _ToolAppend(msbuild_settings, 'PostBuildEvent', 'Command', postbuild)
+
+
+def _GetValueFormattedForMSBuild(tool_name, name, value):
+ if type(value) == list:
+ # For some settings, VS2010 does not automatically extends the settings
+ # TODO(jeanluc) Is this what we want?
+ if name in ['AdditionalIncludeDirectories',
+ 'AdditionalLibraryDirectories',
+ 'AdditionalOptions',
+ 'DelayLoadDLLs',
+ 'DisableSpecificWarnings',
+ 'PreprocessorDefinitions']:
+ value.append('%%(%s)' % name)
+ # For most tools, entries in a list should be separated with ';' but some
+ # settings use a space. Check for those first.
+ exceptions = {
+ 'ClCompile': ['AdditionalOptions'],
+ 'Link': ['AdditionalOptions'],
+ 'Lib': ['AdditionalOptions']}
+ if tool_name in exceptions and name in exceptions[tool_name]:
+ char = ' '
+ else:
+ char = ';'
+ formatted_value = char.join(
+ [MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in value])
+ else:
+ formatted_value = MSVSSettings.ConvertVCMacrosToMSBuild(value)
+ return formatted_value
+
+
+def _VerifySourcesExist(sources, root_dir):
+ """Verifies that all source files exist on disk.
+
+ Checks that all regular source files, i.e. not created at run time,
+ exist on disk. Missing files cause needless recompilation but no otherwise
+ visible errors.
+
+ Arguments:
+ sources: A recursive list of Filter/file names.
+ root_dir: The root directory for the relative path names.
+ Returns:
+ A list of source files that cannot be found on disk.
+ """
+ missing_sources = []
+ for source in sources:
+ if isinstance(source, MSVSProject.Filter):
+ missing_sources.extend(_VerifySourcesExist(source.contents, root_dir))
+ else:
+ if '$' not in source:
+ full_path = os.path.join(root_dir, source)
+ if not os.path.exists(full_path):
+ missing_sources.append(full_path)
+ return missing_sources
+
+
+def _GetMSBuildSources(spec, sources, exclusions, rule_dependencies,
+ extension_to_rule_name, actions_spec,
+ sources_handled_by_action, list_excluded):
+ groups = ['none', 'masm', 'midl', 'include', 'compile', 'resource', 'rule',
+ 'rule_dependency']
+ grouped_sources = {}
+ for g in groups:
+ grouped_sources[g] = []
+
+ _AddSources2(spec, sources, exclusions, grouped_sources,
+ rule_dependencies, extension_to_rule_name,
+ sources_handled_by_action, list_excluded)
+ sources = []
+ for g in groups:
+ if grouped_sources[g]:
+ sources.append(['ItemGroup'] + grouped_sources[g])
+ if actions_spec:
+ sources.append(['ItemGroup'] + actions_spec)
+ return sources
+
+
+def _AddSources2(spec, sources, exclusions, grouped_sources,
+ rule_dependencies, extension_to_rule_name,
+ sources_handled_by_action,
+ list_excluded):
+ extensions_excluded_from_precompile = []
+ for source in sources:
+ if isinstance(source, MSVSProject.Filter):
+ _AddSources2(spec, source.contents, exclusions, grouped_sources,
+ rule_dependencies, extension_to_rule_name,
+ sources_handled_by_action,
+ list_excluded)
+ else:
+ if not source in sources_handled_by_action:
+ detail = []
+ excluded_configurations = exclusions.get(source, [])
+ if len(excluded_configurations) == len(spec['configurations']):
+ detail.append(['ExcludedFromBuild', 'true'])
+ else:
+ for config_name, configuration in sorted(excluded_configurations):
+ condition = _GetConfigurationCondition(config_name, configuration)
+ detail.append(['ExcludedFromBuild',
+ {'Condition': condition},
+ 'true'])
+ # Add precompile if needed
+ for config_name, configuration in spec['configurations'].items():
+ precompiled_source = configuration.get('msvs_precompiled_source', '')
+ if precompiled_source != '':
+ precompiled_source = _FixPath(precompiled_source)
+ if not extensions_excluded_from_precompile:
+ # If the precompiled header is generated by a C source, we must
+ # not try to use it for C++ sources, and vice versa.
+ basename, extension = os.path.splitext(precompiled_source)
+ if extension == '.c':
+ extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx']
+ else:
+ extensions_excluded_from_precompile = ['.c']
+
+ if precompiled_source == source:
+ condition = _GetConfigurationCondition(config_name, configuration)
+ detail.append(['PrecompiledHeader',
+ {'Condition': condition},
+ 'Create'
+ ])
+ else:
+ # Turn off precompiled header usage for source files of a
+ # different type than the file that generated the
+ # precompiled header.
+ for extension in extensions_excluded_from_precompile:
+ if source.endswith(extension):
+ detail.append(['PrecompiledHeader', ''])
+ detail.append(['ForcedIncludeFiles', ''])
+
+ group, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
+ extension_to_rule_name)
+ grouped_sources[group].append([element, {'Include': source}] + detail)
+
+
+def _GetMSBuildProjectReferences(project):
+ references = []
+ if project.dependencies:
+ group = ['ItemGroup']
+ for dependency in project.dependencies:
+ guid = dependency.guid
+ project_dir = os.path.split(project.path)[0]
+ relative_path = gyp.common.RelativePath(dependency.path, project_dir)
+ project_ref = ['ProjectReference',
+ {'Include': relative_path},
+ ['Project', guid],
+ ['ReferenceOutputAssembly', 'false']
+ ]
+ for config in dependency.spec.get('configurations', {}).values():
+ if config.get('msvs_use_library_dependency_inputs', 0):
+ project_ref.append(['UseLibraryDependencyInputs', 'true'])
+ break
+ # If it's disabled in any config, turn it off in the reference.
+ if config.get('msvs_2010_disable_uldi_when_referenced', 0):
+ project_ref.append(['UseLibraryDependencyInputs', 'false'])
+ break
+ group.append(project_ref)
+ references.append(group)
+ return references
+
+
+def _GenerateMSBuildProject(project, options, version, generator_flags):
+ spec = project.spec
+ configurations = spec['configurations']
+ project_dir, project_file_name = os.path.split(project.path)
+ gyp.common.EnsureDirExists(project.path)
+ # Prepare list of sources and excluded sources.
+ gyp_path = _NormalizedSource(project.build_file)
+ relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
+
+ gyp_file = os.path.split(project.build_file)[1]
+ sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
+ gyp_file)
+ # Add rules.
+ actions_to_add = {}
+ props_files_of_rules = set()
+ targets_files_of_rules = set()
+ rule_dependencies = set()
+ extension_to_rule_name = {}
+ list_excluded = generator_flags.get('msvs_list_excluded_files', True)
+
+ # Don't generate rules if we are using an external builder like ninja.
+ if not spec.get('msvs_external_builder'):
+ _GenerateRulesForMSBuild(project_dir, options, spec,
+ sources, excluded_sources,
+ props_files_of_rules, targets_files_of_rules,
+ actions_to_add, rule_dependencies,
+ extension_to_rule_name)
+ else:
+ rules = spec.get('rules', [])
+ _AdjustSourcesForRules(rules, sources, excluded_sources, True)
+
+ sources, excluded_sources, excluded_idl = (
+ _AdjustSourcesAndConvertToFilterHierarchy(spec, options,
+ project_dir, sources,
+ excluded_sources,
+ list_excluded, version))
+
+ # Don't add actions if we are using an external builder like ninja.
+ if not spec.get('msvs_external_builder'):
+ _AddActions(actions_to_add, spec, project.build_file)
+ _AddCopies(actions_to_add, spec)
+
+ # NOTE: this stanza must appear after all actions have been decided.
+ # Don't excluded sources with actions attached, or they won't run.
+ excluded_sources = _FilterActionsFromExcluded(
+ excluded_sources, actions_to_add)
+
+ exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
+ actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild(
+ spec, actions_to_add)
+
+ _GenerateMSBuildFiltersFile(project.path + '.filters', sources,
+ rule_dependencies,
+ extension_to_rule_name)
+ missing_sources = _VerifySourcesExist(sources, project_dir)
+
+ for configuration in configurations.values():
+ _FinalizeMSBuildSettings(spec, configuration)
+
+ # Add attributes to root element
+
+ import_default_section = [
+ ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.Default.props'}]]
+ import_cpp_props_section = [
+ ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.props'}]]
+ import_cpp_targets_section = [
+ ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]]
+ import_masm_props_section = [
+ ['Import',
+ {'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.props'}]]
+ import_masm_targets_section = [
+ ['Import',
+ {'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.targets'}]]
+ macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]]
+
+ content = [
+ 'Project',
+ {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003',
+ 'ToolsVersion': version.ProjectVersion(),
+ 'DefaultTargets': 'Build'
+ }]
+
+ content += _GetMSBuildProjectConfigurations(configurations)
+ content += _GetMSBuildGlobalProperties(spec, version, project.guid,
+ project_file_name)
+ content += import_default_section
+ content += _GetMSBuildConfigurationDetails(spec, project.build_file)
+ if spec.get('msvs_enable_winphone'):
+ content += _GetMSBuildLocalProperties('v120_wp81')
+ else:
+ content += _GetMSBuildLocalProperties(project.msbuild_toolset)
+ content += import_cpp_props_section
+ content += import_masm_props_section
+ content += _GetMSBuildExtensions(props_files_of_rules)
+ content += _GetMSBuildPropertySheets(configurations)
+ content += macro_section
+ content += _GetMSBuildConfigurationGlobalProperties(spec, configurations,
+ project.build_file)
+ content += _GetMSBuildToolSettingsSections(spec, configurations)
+ content += _GetMSBuildSources(
+ spec, sources, exclusions, rule_dependencies, extension_to_rule_name,
+ actions_spec, sources_handled_by_action, list_excluded)
+ content += _GetMSBuildProjectReferences(project)
+ content += import_cpp_targets_section
+ content += import_masm_targets_section
+ content += _GetMSBuildExtensionTargets(targets_files_of_rules)
+
+ if spec.get('msvs_external_builder'):
+ content += _GetMSBuildExternalBuilderTargets(spec)
+
+ # TODO(jeanluc) File a bug to get rid of runas. We had in MSVS:
+ # has_run_as = _WriteMSVSUserFile(project.path, version, spec)
+
+ easy_xml.WriteXmlIfChanged(content, project.path, pretty=True, win32=True)
+
+ return missing_sources
+
+
+def _GetMSBuildExternalBuilderTargets(spec):
+ """Return a list of MSBuild targets for external builders.
+
+ The "Build" and "Clean" targets are always generated. If the spec contains
+ 'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
+ be generated, to support building selected C/C++ files.
+
+ Arguments:
+ spec: The gyp target spec.
+ Returns:
+ List of MSBuild 'Target' specs.
+ """
+ build_cmd = _BuildCommandLineForRuleRaw(
+ spec, spec['msvs_external_builder_build_cmd'],
+ False, False, False, False)
+ build_target = ['Target', {'Name': 'Build'}]
+ build_target.append(['Exec', {'Command': build_cmd}])
+
+ clean_cmd = _BuildCommandLineForRuleRaw(
+ spec, spec['msvs_external_builder_clean_cmd'],
+ False, False, False, False)
+ clean_target = ['Target', {'Name': 'Clean'}]
+ clean_target.append(['Exec', {'Command': clean_cmd}])
+
+ targets = [build_target, clean_target]
+
+ if spec.get('msvs_external_builder_clcompile_cmd'):
+ clcompile_cmd = _BuildCommandLineForRuleRaw(
+ spec, spec['msvs_external_builder_clcompile_cmd'],
+ False, False, False, False)
+ clcompile_target = ['Target', {'Name': 'ClCompile'}]
+ clcompile_target.append(['Exec', {'Command': clcompile_cmd}])
+ targets.append(clcompile_target)
+
+ return targets
+
+
+def _GetMSBuildExtensions(props_files_of_rules):
+ extensions = ['ImportGroup', {'Label': 'ExtensionSettings'}]
+ for props_file in props_files_of_rules:
+ extensions.append(['Import', {'Project': props_file}])
+ return [extensions]
+
+
+def _GetMSBuildExtensionTargets(targets_files_of_rules):
+ targets_node = ['ImportGroup', {'Label': 'ExtensionTargets'}]
+ for targets_file in sorted(targets_files_of_rules):
+ targets_node.append(['Import', {'Project': targets_file}])
+ return [targets_node]
+
+
+def _GenerateActionsForMSBuild(spec, actions_to_add):
+ """Add actions accumulated into an actions_to_add, merging as needed.
+
+ Arguments:
+ spec: the target project dict
+ actions_to_add: dictionary keyed on input name, which maps to a list of
+ dicts describing the actions attached to that input file.
+
+ Returns:
+ A pair of (action specification, the sources handled by this action).
+ """
+ sources_handled_by_action = OrderedSet()
+ actions_spec = []
+ for primary_input, actions in actions_to_add.items():
+ inputs = OrderedSet()
+ outputs = OrderedSet()
+ descriptions = []
+ commands = []
+ for action in actions:
+ inputs.update(OrderedSet(action['inputs']))
+ outputs.update(OrderedSet(action['outputs']))
+ descriptions.append(action['description'])
+ cmd = action['command']
+ # For most actions, add 'call' so that actions that invoke batch files
+ # return and continue executing. msbuild_use_call provides a way to
+ # disable this but I have not seen any adverse effect from doing that
+ # for everything.
+ if action.get('msbuild_use_call', True):
+ cmd = 'call ' + cmd
+ commands.append(cmd)
+ # Add the custom build action for one input file.
+ description = ', and also '.join(descriptions)
+
+ # We can't join the commands simply with && because the command line will
+ # get too long. See also _AddActions: cygwin's setup_env mustn't be called
+ # for every invocation or the command that sets the PATH will grow too
+ # long.
+ command = '\r\n'.join([c + '\r\nif %errorlevel% neq 0 exit /b %errorlevel%'
+ for c in commands])
+ _AddMSBuildAction(spec,
+ primary_input,
+ inputs,
+ outputs,
+ command,
+ description,
+ sources_handled_by_action,
+ actions_spec)
+ return actions_spec, sources_handled_by_action
+
+
+def _AddMSBuildAction(spec, primary_input, inputs, outputs, cmd, description,
+ sources_handled_by_action, actions_spec):
+ command = MSVSSettings.ConvertVCMacrosToMSBuild(cmd)
+ primary_input = _FixPath(primary_input)
+ inputs_array = _FixPaths(inputs)
+ outputs_array = _FixPaths(outputs)
+ additional_inputs = ';'.join([i for i in inputs_array
+ if i != primary_input])
+ outputs = ';'.join(outputs_array)
+ sources_handled_by_action.add(primary_input)
+ action_spec = ['CustomBuild', {'Include': primary_input}]
+ action_spec.extend(
+ # TODO(jeanluc) 'Document' for all or just if as_sources?
+ [['FileType', 'Document'],
+ ['Command', command],
+ ['Message', description],
+ ['Outputs', outputs]
+ ])
+ if additional_inputs:
+ action_spec.append(['AdditionalInputs', additional_inputs])
+ actions_spec.append(action_spec)
diff --git a/third_party/python/gyp/pylib/gyp/generator/msvs_test.py b/third_party/python/gyp/pylib/gyp/generator/msvs_test.py
new file mode 100755
index 0000000000..838d236a2d
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/generator/msvs_test.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Unit tests for the msvs.py file. """
+
+import gyp.generator.msvs as msvs
+import unittest
+try:
+ from StringIO import StringIO
+except ImportError:
+ from io import StringIO
+
+
+class TestSequenceFunctions(unittest.TestCase):
+
+ def setUp(self):
+ self.stderr = StringIO()
+
+ def test_GetLibraries(self):
+ self.assertEqual(
+ msvs._GetLibraries({}),
+ [])
+ self.assertEqual(
+ msvs._GetLibraries({'libraries': []}),
+ [])
+ self.assertEqual(
+ msvs._GetLibraries({'other':'foo', 'libraries': ['a.lib']}),
+ ['a.lib'])
+ self.assertEqual(
+ msvs._GetLibraries({'libraries': ['-la']}),
+ ['a.lib'])
+ self.assertEqual(
+ msvs._GetLibraries({'libraries': ['a.lib', 'b.lib', 'c.lib', '-lb.lib',
+ '-lb.lib', 'd.lib', 'a.lib']}),
+ ['c.lib', 'b.lib', 'd.lib', 'a.lib'])
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/python/gyp/pylib/gyp/generator/ninja.py b/third_party/python/gyp/pylib/gyp/generator/ninja.py
new file mode 100644
index 0000000000..bf04e567e7
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/generator/ninja.py
@@ -0,0 +1,2500 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import collections
+import copy
+import hashlib
+import json
+import multiprocessing
+import os.path
+import re
+import signal
+import subprocess
+import sys
+import gyp
+import gyp.common
+from gyp.common import OrderedSet
+import gyp.msvs_emulation
+import gyp.MSVSUtil as MSVSUtil
+import gyp.xcode_emulation
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from io import StringIO
+
+from gyp.common import GetEnvironFallback
+import gyp.ninja_syntax as ninja_syntax
+
+generator_default_variables = {
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '',
+ 'STATIC_LIB_PREFIX': 'lib',
+ 'STATIC_LIB_SUFFIX': '.a',
+ 'SHARED_LIB_PREFIX': 'lib',
+
+ # Gyp expects the following variables to be expandable by the build
+ # system to the appropriate locations. Ninja prefers paths to be
+ # known at gyp time. To resolve this, introduce special
+ # variables starting with $! and $| (which begin with a $ so gyp knows it
+ # should be treated specially, but is otherwise an invalid
+ # ninja/shell variable) that are passed to gyp here but expanded
+ # before writing out into the target .ninja files; see
+ # ExpandSpecial.
+ # $! is used for variables that represent a path and that can only appear at
+ # the start of a string, while $| is used for variables that can appear
+ # anywhere in a string.
+ 'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR',
+ 'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen',
+ 'PRODUCT_DIR': '$!PRODUCT_DIR',
+ 'CONFIGURATION_NAME': '$|CONFIGURATION_NAME',
+
+ # Special variables that may be used by gyp 'rule' targets.
+ # We generate definitions for these variables on the fly when processing a
+ # rule.
+ 'RULE_INPUT_ROOT': '${root}',
+ 'RULE_INPUT_DIRNAME': '${dirname}',
+ 'RULE_INPUT_PATH': '${source}',
+ 'RULE_INPUT_EXT': '${ext}',
+ 'RULE_INPUT_NAME': '${name}',
+}
+
+# Placates pylint.
+generator_additional_non_configuration_keys = []
+generator_additional_path_sections = []
+generator_extra_sources_for_rules = []
+generator_filelist_paths = None
+
+generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
+
+def StripPrefix(arg, prefix):
+ if arg.startswith(prefix):
+ return arg[len(prefix):]
+ return arg
+
+
+def QuoteShellArgument(arg, flavor):
+ """Quote a string such that it will be interpreted as a single argument
+ by the shell."""
+ # Rather than attempting to enumerate the bad shell characters, just
+ # whitelist common OK ones and quote anything else.
+ if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg):
+ return arg # No quoting necessary.
+ if flavor == 'win':
+ return gyp.msvs_emulation.QuoteForRspFile(arg)
+ return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'"
+
+
+def Define(d, flavor):
+ """Takes a preprocessor define and returns a -D parameter that's ninja- and
+ shell-escaped."""
+ if flavor == 'win':
+ # cl.exe replaces literal # characters with = in preprocesor definitions for
+ # some reason. Octal-encode to work around that.
+ d = d.replace('#', '\\%03o' % ord('#'))
+ return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor)
+
+
+def AddArch(output, arch):
+ """Adds an arch string to an output path."""
+ output, extension = os.path.splitext(output)
+ return '%s.%s%s' % (output, arch, extension)
+
+
+class Target(object):
+ """Target represents the paths used within a single gyp target.
+
+ Conceptually, building a single target A is a series of steps:
+
+ 1) actions/rules/copies generates source/resources/etc.
+ 2) compiles generates .o files
+ 3) link generates a binary (library/executable)
+ 4) bundle merges the above in a mac bundle
+
+ (Any of these steps can be optional.)
+
+ From a build ordering perspective, a dependent target B could just
+ depend on the last output of this series of steps.
+
+ But some dependent commands sometimes need to reach inside the box.
+ For example, when linking B it needs to get the path to the static
+ library generated by A.
+
+ This object stores those paths. To keep things simple, member
+ variables only store concrete paths to single files, while methods
+ compute derived values like "the last output of the target".
+ """
+ def __init__(self, type):
+ # Gyp type ("static_library", etc.) of this target.
+ self.type = type
+ # File representing whether any input dependencies necessary for
+ # dependent actions have completed.
+ self.preaction_stamp = None
+ # File representing whether any input dependencies necessary for
+ # dependent compiles have completed.
+ self.precompile_stamp = None
+ # File representing the completion of actions/rules/copies, if any.
+ self.actions_stamp = None
+ # Path to the output of the link step, if any.
+ self.binary = None
+ # Path to the file representing the completion of building the bundle,
+ # if any.
+ self.bundle = None
+ # On Windows, incremental linking requires linking against all the .objs
+ # that compose a .lib (rather than the .lib itself). That list is stored
+ # here. In this case, we also need to save the compile_deps for the target,
+ # so that the the target that directly depends on the .objs can also depend
+ # on those.
+ self.component_objs = None
+ self.compile_deps = None
+ # Windows only. The import .lib is the output of a build step, but
+ # because dependents only link against the lib (not both the lib and the
+ # dll) we keep track of the import library here.
+ self.import_lib = None
+ # Track if this target contains any C++ files, to decide if gcc or g++
+ # should be used for linking.
+ self.uses_cpp = False
+
+ def Linkable(self):
+ """Return true if this is a target that can be linked against."""
+ return self.type in ('static_library', 'shared_library')
+
+ def UsesToc(self, flavor):
+ """Return true if the target should produce a restat rule based on a TOC
+ file."""
+ # For bundles, the .TOC should be produced for the binary, not for
+ # FinalOutput(). But the naive approach would put the TOC file into the
+ # bundle, so don't do this for bundles for now.
+ if flavor == 'win' or self.bundle:
+ return False
+ return self.type in ('shared_library', 'loadable_module')
+
+ def PreActionInput(self, flavor):
+ """Return the path, if any, that should be used as a dependency of
+ any dependent action step."""
+ if self.UsesToc(flavor):
+ return self.FinalOutput() + '.TOC'
+ return self.FinalOutput() or self.preaction_stamp
+
+ def PreCompileInput(self):
+ """Return the path, if any, that should be used as a dependency of
+ any dependent compile step."""
+ return self.actions_stamp or self.precompile_stamp
+
+ def FinalOutput(self):
+ """Return the last output of the target, which depends on all prior
+ steps."""
+ return self.bundle or self.binary or self.actions_stamp
+
+
+# A small discourse on paths as used within the Ninja build:
+# All files we produce (both at gyp and at build time) appear in the
+# build directory (e.g. out/Debug).
+#
+# Paths within a given .gyp file are always relative to the directory
+# containing the .gyp file. Call these "gyp paths". This includes
+# sources as well as the starting directory a given gyp rule/action
+# expects to be run from. We call the path from the source root to
+# the gyp file the "base directory" within the per-.gyp-file
+# NinjaWriter code.
+#
+# All paths as written into the .ninja files are relative to the build
+# directory. Call these paths "ninja paths".
+#
+# We translate between these two notions of paths with two helper
+# functions:
+#
+# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file)
+# into the equivalent ninja path.
+#
+# - GypPathToUniqueOutput translates a gyp path into a ninja path to write
+# an output file; the result can be namespaced such that it is unique
+# to the input file name as well as the output target name.
+
+class NinjaWriter(object):
+ def __init__(self, hash_for_rules, target_outputs, base_dir, build_dir,
+ output_file, toplevel_build, output_file_name, flavor,
+ toplevel_dir=None):
+ """
+ base_dir: path from source root to directory containing this gyp file,
+ by gyp semantics, all input paths are relative to this
+ build_dir: path from source root to build output
+ toplevel_dir: path to the toplevel directory
+ """
+
+ self.hash_for_rules = hash_for_rules
+ self.target_outputs = target_outputs
+ self.base_dir = base_dir
+ self.build_dir = build_dir
+ self.ninja = ninja_syntax.Writer(output_file)
+ self.toplevel_build = toplevel_build
+ self.output_file_name = output_file_name
+
+ self.flavor = flavor
+ self.abs_build_dir = None
+ if toplevel_dir is not None:
+ self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir,
+ build_dir))
+ self.obj_ext = '.obj' if flavor == 'win' else '.o'
+ if flavor == 'win':
+ # See docstring of msvs_emulation.GenerateEnvironmentFiles().
+ self.win_env = {}
+ for arch in ('x86', 'x64'):
+ self.win_env[arch] = 'environment.' + arch
+
+ # Relative path from build output dir to base dir.
+ build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir)
+ self.build_to_base = os.path.join(build_to_top, base_dir)
+ # Relative path from base dir to build dir.
+ base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir)
+ self.base_to_build = os.path.join(base_to_top, build_dir)
+
+ def ExpandSpecial(self, path, product_dir=None):
+ """Expand specials like $!PRODUCT_DIR in |path|.
+
+ If |product_dir| is None, assumes the cwd is already the product
+ dir. Otherwise, |product_dir| is the relative path to the product
+ dir.
+ """
+
+ PRODUCT_DIR = '$!PRODUCT_DIR'
+ if PRODUCT_DIR in path:
+ if product_dir:
+ path = path.replace(PRODUCT_DIR, product_dir)
+ else:
+ path = path.replace(PRODUCT_DIR + '/', '')
+ path = path.replace(PRODUCT_DIR + '\\', '')
+ path = path.replace(PRODUCT_DIR, '.')
+
+ INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR'
+ if INTERMEDIATE_DIR in path:
+ int_dir = self.GypPathToUniqueOutput('gen')
+ # GypPathToUniqueOutput generates a path relative to the product dir,
+ # so insert product_dir in front if it is provided.
+ path = path.replace(INTERMEDIATE_DIR,
+ os.path.join(product_dir or '', int_dir))
+
+ CONFIGURATION_NAME = '$|CONFIGURATION_NAME'
+ path = path.replace(CONFIGURATION_NAME, self.config_name)
+
+ return path
+
+ def ExpandRuleVariables(self, path, root, dirname, source, ext, name):
+ if self.flavor == 'win':
+ path = self.msvs_settings.ConvertVSMacros(
+ path, config=self.config_name)
+ path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root)
+ path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'],
+ dirname)
+ path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source)
+ path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext)
+ path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name)
+ return path
+
+ def GypPathToNinja(self, path, env=None):
+ """Translate a gyp path to a ninja path, optionally expanding environment
+ variable references in |path| with |env|.
+
+ See the above discourse on path conversions."""
+ if env:
+ if self.flavor == 'mac':
+ path = gyp.xcode_emulation.ExpandEnvVars(path, env)
+ elif self.flavor == 'win':
+ path = gyp.msvs_emulation.ExpandMacros(path, env)
+ if path.startswith('$!'):
+ expanded = self.ExpandSpecial(path)
+ if self.flavor == 'win':
+ expanded = os.path.normpath(expanded)
+ return expanded
+ if '$|' in path:
+ path = self.ExpandSpecial(path)
+ assert '$' not in path, path
+ return os.path.normpath(os.path.join(self.build_to_base, path))
+
+ def GypPathToUniqueOutput(self, path, qualified=True):
+ """Translate a gyp path to a ninja path for writing output.
+
+ If qualified is True, qualify the resulting filename with the name
+ of the target. This is necessary when e.g. compiling the same
+ path twice for two separate output targets.
+
+ See the above discourse on path conversions."""
+
+ path = self.ExpandSpecial(path)
+ assert not path.startswith('$'), path
+
+ # Translate the path following this scheme:
+ # Input: foo/bar.gyp, target targ, references baz/out.o
+ # Output: obj/foo/baz/targ.out.o (if qualified)
+ # obj/foo/baz/out.o (otherwise)
+ # (and obj.host instead of obj for cross-compiles)
+ #
+ # Why this scheme and not some other one?
+ # 1) for a given input, you can compute all derived outputs by matching
+ # its path, even if the input is brought via a gyp file with '..'.
+ # 2) simple files like libraries and stamps have a simple filename.
+
+ obj = 'obj'
+ if self.toolset != 'target':
+ obj += '.' + self.toolset
+
+ path_dir, path_basename = os.path.split(path)
+ assert not os.path.isabs(path_dir), (
+ "'%s' can not be absolute path (see crbug.com/462153)." % path_dir)
+
+ if qualified:
+ path_basename = self.name + '.' + path_basename
+ return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
+ path_basename))
+
+ def WriteCollapsedDependencies(self, name, targets, order_only=None):
+ """Given a list of targets, return a path for a single file
+ representing the result of building all the targets or None.
+
+ Uses a stamp file if necessary."""
+
+ assert targets == [t for t in targets if t], targets
+ if len(targets) == 0:
+ assert not order_only
+ return None
+ if len(targets) > 1 or order_only:
+ stamp = self.GypPathToUniqueOutput(name + '.stamp')
+ targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only)
+ self.ninja.newline()
+ return targets[0]
+
+ def _SubninjaNameForArch(self, arch):
+ output_file_base = os.path.splitext(self.output_file_name)[0]
+ return '%s.%s.ninja' % (output_file_base, arch)
+
+ def WriteSpec(self, spec, config_name, generator_flags):
+ """The main entry point for NinjaWriter: write the build rules for a spec.
+
+ Returns a Target object, which represents the output paths for this spec.
+ Returns None if there are no outputs (e.g. a settings-only 'none' type
+ target)."""
+
+ self.config_name = config_name
+ self.name = spec['target_name']
+ self.toolset = spec['toolset']
+ config = spec['configurations'][config_name]
+ self.target = Target(spec['type'])
+ self.is_standalone_static_library = bool(
+ spec.get('standalone_static_library', 0))
+
+ self.target_rpath = generator_flags.get('target_rpath', r'\$$ORIGIN/lib/')
+
+ self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
+ self.xcode_settings = self.msvs_settings = None
+ if self.flavor == 'mac':
+ self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
+ mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None)
+ if mac_toolchain_dir:
+ self.xcode_settings.mac_toolchain_dir = mac_toolchain_dir
+
+ if self.flavor == 'win':
+ self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec,
+ generator_flags)
+ arch = self.msvs_settings.GetArch(config_name)
+ self.ninja.variable('arch', self.win_env[arch])
+ self.ninja.variable('cc', '$cl_' + arch)
+ self.ninja.variable('cxx', '$cl_' + arch)
+ self.ninja.variable('cc_host', '$cl_' + arch)
+ self.ninja.variable('cxx_host', '$cl_' + arch)
+ self.ninja.variable('asm', '$ml_' + arch)
+
+ if self.flavor == 'mac':
+ self.archs = self.xcode_settings.GetActiveArchs(config_name)
+ if len(self.archs) > 1:
+ self.arch_subninjas = dict(
+ (arch, ninja_syntax.Writer(
+ OpenOutput(os.path.join(self.toplevel_build,
+ self._SubninjaNameForArch(arch)),
+ 'w')))
+ for arch in self.archs)
+
+ # Compute predepends for all rules.
+ # actions_depends is the dependencies this target depends on before running
+ # any of its action/rule/copy steps.
+ # compile_depends is the dependencies this target depends on before running
+ # any of its compile steps.
+ actions_depends = []
+ compile_depends = []
+ # TODO(evan): it is rather confusing which things are lists and which
+ # are strings. Fix these.
+ if 'dependencies' in spec:
+ for dep in spec['dependencies']:
+ if dep in self.target_outputs:
+ target = self.target_outputs[dep]
+ actions_depends.append(target.PreActionInput(self.flavor))
+ compile_depends.append(target.PreCompileInput())
+ if target.uses_cpp:
+ self.target.uses_cpp = True
+ actions_depends = [d for d in actions_depends if d]
+ compile_depends = [d for d in compile_depends if d]
+ actions_depends = self.WriteCollapsedDependencies('actions_depends',
+ actions_depends)
+ compile_depends = self.WriteCollapsedDependencies('compile_depends',
+ compile_depends)
+ self.target.preaction_stamp = actions_depends
+ self.target.precompile_stamp = compile_depends
+
+ # Write out actions, rules, and copies. These must happen before we
+ # compile any sources, so compute a list of predependencies for sources
+ # while we do it.
+ extra_sources = []
+ mac_bundle_depends = []
+ self.target.actions_stamp = self.WriteActionsRulesCopies(
+ spec, extra_sources, actions_depends, mac_bundle_depends)
+
+ # If we have actions/rules/copies, we depend directly on those, but
+ # otherwise we depend on dependent target's actions/rules/copies etc.
+ # We never need to explicitly depend on previous target's link steps,
+ # because no compile ever depends on them.
+ compile_depends_stamp = (self.target.actions_stamp or compile_depends)
+
+ # Write out the compilation steps, if any.
+ link_deps = []
+ try:
+ sources = extra_sources + spec.get('sources', [])
+ except TypeError:
+ print('extra_sources: ', str(extra_sources))
+ print('spec.get("sources"): ', str(spec.get('sources')))
+ raise
+ if sources:
+ if self.flavor == 'mac' and len(self.archs) > 1:
+ # Write subninja file containing compile and link commands scoped to
+ # a single arch if a fat binary is being built.
+ for arch in self.archs:
+ self.ninja.subninja(self._SubninjaNameForArch(arch))
+
+ pch = None
+ if self.flavor == 'win':
+ gyp.msvs_emulation.VerifyMissingSources(
+ sources, self.abs_build_dir, generator_flags, self.GypPathToNinja)
+ pch = gyp.msvs_emulation.PrecompiledHeader(
+ self.msvs_settings, config_name, self.GypPathToNinja,
+ self.GypPathToUniqueOutput, self.obj_ext)
+ else:
+ pch = gyp.xcode_emulation.MacPrefixHeader(
+ self.xcode_settings, self.GypPathToNinja,
+ lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang))
+ link_deps = self.WriteSources(
+ self.ninja, config_name, config, sources, compile_depends_stamp, pch,
+ spec)
+ # Some actions/rules output 'sources' that are already object files.
+ obj_outputs = [f for f in sources if f.endswith(self.obj_ext)]
+ if obj_outputs:
+ if self.flavor != 'mac' or len(self.archs) == 1:
+ link_deps += [self.GypPathToNinja(o) for o in obj_outputs]
+ else:
+ print("Warning: Actions/rules writing object files don't work with " \
+ "multiarch targets, dropping. (target %s)" %
+ spec['target_name'])
+ elif self.flavor == 'mac' and len(self.archs) > 1:
+ link_deps = collections.defaultdict(list)
+
+ compile_deps = self.target.actions_stamp or actions_depends
+ if self.flavor == 'win' and self.target.type == 'static_library':
+ self.target.component_objs = link_deps
+ self.target.compile_deps = compile_deps
+
+ # Write out a link step, if needed.
+ output = None
+ is_empty_bundle = not link_deps and not mac_bundle_depends
+ if link_deps or self.target.actions_stamp or actions_depends:
+ output = self.WriteTarget(spec, config_name, config, link_deps,
+ compile_deps)
+ if self.is_mac_bundle:
+ mac_bundle_depends.append(output)
+
+ # Bundle all of the above together, if needed.
+ if self.is_mac_bundle:
+ output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle)
+
+ if not output:
+ return None
+
+ assert self.target.FinalOutput(), output
+ return self.target
+
+ def _WinIdlRule(self, source, prebuild, outputs):
+ """Handle the implicit VS .idl rule for one source file. Fills |outputs|
+ with files that are generated."""
+ outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData(
+ source, self.config_name)
+ outdir = self.GypPathToNinja(outdir)
+ def fix_path(path, rel=None):
+ path = os.path.join(outdir, path)
+ dirname, basename = os.path.split(source)
+ root, ext = os.path.splitext(basename)
+ path = self.ExpandRuleVariables(
+ path, root, dirname, source, ext, basename)
+ if rel:
+ path = os.path.relpath(path, rel)
+ return path
+ vars = [(name, fix_path(value, outdir)) for name, value in vars]
+ output = [fix_path(p) for p in output]
+ vars.append(('outdir', outdir))
+ vars.append(('idlflags', flags))
+ input = self.GypPathToNinja(source)
+ self.ninja.build(output, 'idl', input,
+ variables=vars, order_only=prebuild)
+ outputs.extend(output)
+
+ def WriteWinIdlFiles(self, spec, prebuild):
+ """Writes rules to match MSVS's implicit idl handling."""
+ assert self.flavor == 'win'
+ if self.msvs_settings.HasExplicitIdlRulesOrActions(spec):
+ return []
+ outputs = []
+ for source in filter(lambda x: x.endswith('.idl'), spec['sources']):
+ self._WinIdlRule(source, prebuild, outputs)
+ return outputs
+
+ def WriteActionsRulesCopies(self, spec, extra_sources, prebuild,
+ mac_bundle_depends):
+ """Write out the Actions, Rules, and Copies steps. Return a path
+ representing the outputs of these steps."""
+ outputs = []
+ if self.is_mac_bundle:
+ mac_bundle_resources = spec.get('mac_bundle_resources', [])[:]
+ else:
+ mac_bundle_resources = []
+ extra_mac_bundle_resources = []
+
+ if 'actions' in spec:
+ outputs += self.WriteActions(spec['actions'], extra_sources, prebuild,
+ extra_mac_bundle_resources)
+ if 'rules' in spec:
+ outputs += self.WriteRules(spec['rules'], extra_sources, prebuild,
+ mac_bundle_resources,
+ extra_mac_bundle_resources)
+ if 'copies' in spec:
+ outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends)
+
+ if 'sources' in spec and self.flavor == 'win':
+ outputs += self.WriteWinIdlFiles(spec, prebuild)
+
+ if self.xcode_settings and self.xcode_settings.IsIosFramework():
+ self.WriteiOSFrameworkHeaders(spec, outputs, prebuild)
+
+ stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
+
+ if self.is_mac_bundle:
+ xcassets = self.WriteMacBundleResources(
+ extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends)
+ partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends)
+ self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends)
+
+ return stamp
+
+ def GenerateDescription(self, verb, message, fallback):
+ """Generate and return a description of a build step.
+
+ |verb| is the short summary, e.g. ACTION or RULE.
+ |message| is a hand-written description, or None if not available.
+ |fallback| is the gyp-level name of the step, usable as a fallback.
+ """
+ if self.toolset != 'target':
+ verb += '(%s)' % self.toolset
+ if message:
+ return '%s %s' % (verb, self.ExpandSpecial(message))
+ else:
+ return '%s %s: %s' % (verb, self.name, fallback)
+
+ def WriteActions(self, actions, extra_sources, prebuild,
+ extra_mac_bundle_resources):
+ # Actions cd into the base directory.
+ env = self.GetToolchainEnv()
+ all_outputs = []
+ for action in actions:
+ # First write out a rule for the action.
+ name = '%s_%s' % (action['action_name'], self.hash_for_rules)
+ description = self.GenerateDescription('ACTION',
+ action.get('message', None),
+ name)
+ is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action)
+ if self.flavor == 'win' else False)
+ args = action['action']
+ depfile = action.get('depfile', None)
+ if depfile:
+ depfile = self.ExpandSpecial(depfile)
+ pool = 'console' if int(action.get('ninja_use_console', 0)) else None
+ rule_name, _ = self.WriteNewNinjaRule(name, args, description,
+ is_cygwin, env, pool,
+ depfile=depfile)
+
+ inputs = [self.GypPathToNinja(i, env) for i in action['inputs']]
+ if int(action.get('process_outputs_as_sources', False)):
+ extra_sources += action['outputs']
+ if int(action.get('process_outputs_as_mac_bundle_resources', False)):
+ extra_mac_bundle_resources += action['outputs']
+ outputs = [self.GypPathToNinja(o, env) for o in action['outputs']]
+
+ # Then write out an edge using the rule.
+ self.ninja.build(outputs, rule_name, inputs,
+ order_only=prebuild)
+ all_outputs += outputs
+
+ self.ninja.newline()
+
+ return all_outputs
+
+ def WriteRules(self, rules, extra_sources, prebuild,
+ mac_bundle_resources, extra_mac_bundle_resources):
+ env = self.GetToolchainEnv()
+ all_outputs = []
+ for rule in rules:
+ # Skip a rule with no action and no inputs.
+ if 'action' not in rule and not rule.get('rule_sources', []):
+ continue
+
+ # First write out a rule for the rule action.
+ name = '%s_%s' % (rule['rule_name'], self.hash_for_rules)
+
+ args = rule['action']
+ description = self.GenerateDescription(
+ 'RULE',
+ rule.get('message', None),
+ ('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name)
+ is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule)
+ if self.flavor == 'win' else False)
+ pool = 'console' if int(rule.get('ninja_use_console', 0)) else None
+ rule_name, args = self.WriteNewNinjaRule(
+ name, args, description, is_cygwin, env, pool)
+
+ # TODO: if the command references the outputs directly, we should
+ # simplify it to just use $out.
+
+ # Rules can potentially make use of some special variables which
+ # must vary per source file.
+ # Compute the list of variables we'll need to provide.
+ special_locals = ('source', 'root', 'dirname', 'ext', 'name')
+ needed_variables = set(['source'])
+ for argument in args:
+ for var in special_locals:
+ if '${%s}' % var in argument:
+ needed_variables.add(var)
+ needed_variables = sorted(needed_variables)
+
+ def cygwin_munge(path):
+ # pylint: disable=cell-var-from-loop
+ if is_cygwin:
+ return path.replace('\\', '/')
+ return path
+
+ inputs = [self.GypPathToNinja(i, env) for i in rule.get('inputs', [])]
+
+ # If there are n source files matching the rule, and m additional rule
+ # inputs, then adding 'inputs' to each build edge written below will
+ # write m * n inputs. Collapsing reduces this to m + n.
+ sources = rule.get('rule_sources', [])
+ num_inputs = len(inputs)
+ if prebuild:
+ num_inputs += 1
+ if num_inputs > 2 and len(sources) > 2:
+ inputs = [self.WriteCollapsedDependencies(
+ rule['rule_name'], inputs, order_only=prebuild)]
+ prebuild = []
+
+ # For each source file, write an edge that generates all the outputs.
+ for source in sources:
+ source = os.path.normpath(source)
+ dirname, basename = os.path.split(source)
+ root, ext = os.path.splitext(basename)
+
+ # Gather the list of inputs and outputs, expanding $vars if possible.
+ outputs = [self.ExpandRuleVariables(o, root, dirname,
+ source, ext, basename)
+ for o in rule['outputs']]
+
+ if int(rule.get('process_outputs_as_sources', False)):
+ extra_sources += outputs
+
+ was_mac_bundle_resource = source in mac_bundle_resources
+ if was_mac_bundle_resource or \
+ int(rule.get('process_outputs_as_mac_bundle_resources', False)):
+ extra_mac_bundle_resources += outputs
+ # Note: This is n_resources * n_outputs_in_rule. Put to-be-removed
+ # items in a set and remove them all in a single pass if this becomes
+ # a performance issue.
+ if was_mac_bundle_resource:
+ mac_bundle_resources.remove(source)
+
+ extra_bindings = []
+ for var in needed_variables:
+ if var == 'root':
+ extra_bindings.append(('root', cygwin_munge(root)))
+ elif var == 'dirname':
+ # '$dirname' is a parameter to the rule action, which means
+ # it shouldn't be converted to a Ninja path. But we don't
+ # want $!PRODUCT_DIR in there either.
+ dirname_expanded = self.ExpandSpecial(dirname, self.base_to_build)
+ extra_bindings.append(('dirname', cygwin_munge(dirname_expanded)))
+ elif var == 'source':
+ # '$source' is a parameter to the rule action, which means
+ # it shouldn't be converted to a Ninja path. But we don't
+ # want $!PRODUCT_DIR in there either.
+ source_expanded = self.ExpandSpecial(source, self.base_to_build)
+ extra_bindings.append(('source', cygwin_munge(source_expanded)))
+ elif var == 'ext':
+ extra_bindings.append(('ext', ext))
+ elif var == 'name':
+ extra_bindings.append(('name', cygwin_munge(basename)))
+ else:
+ assert var == None, repr(var)
+
+ outputs = [self.GypPathToNinja(o, env) for o in outputs]
+ if self.flavor == 'win':
+ # WriteNewNinjaRule uses unique_name for creating an rsp file on win.
+ extra_bindings.append(('unique_name',
+ hashlib.md5(outputs[0]).hexdigest()))
+
+ self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
+ implicit=inputs,
+ order_only=prebuild,
+ variables=extra_bindings)
+
+ all_outputs.extend(outputs)
+
+ return all_outputs
+
+ def WriteCopies(self, copies, prebuild, mac_bundle_depends):
+ outputs = []
+ if self.xcode_settings:
+ extra_env = self.xcode_settings.GetPerTargetSettings()
+ env = self.GetToolchainEnv(additional_settings=extra_env)
+ else:
+ env = self.GetToolchainEnv()
+ for copy in copies:
+ for path in copy['files']:
+ # Normalize the path so trailing slashes don't confuse us.
+ path = os.path.normpath(path)
+ basename = os.path.split(path)[1]
+ src = self.GypPathToNinja(path, env)
+ dst = self.GypPathToNinja(os.path.join(copy['destination'], basename),
+ env)
+ outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild)
+ if self.is_mac_bundle:
+ # gyp has mac_bundle_resources to copy things into a bundle's
+ # Resources folder, but there's no built-in way to copy files to other
+ # places in the bundle. Hence, some targets use copies for this. Check
+ # if this file is copied into the current bundle, and if so add it to
+ # the bundle depends so that dependent targets get rebuilt if the copy
+ # input changes.
+ if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()):
+ mac_bundle_depends.append(dst)
+
+ return outputs
+
+ def WriteiOSFrameworkHeaders(self, spec, outputs, prebuild):
+ """Prebuild steps to generate hmap files and copy headers to destination."""
+ framework = self.ComputeMacBundleOutput()
+ all_sources = spec['sources']
+ copy_headers = spec['mac_framework_headers']
+ output = self.GypPathToUniqueOutput('headers.hmap')
+ self.xcode_settings.header_map_path = output
+ all_headers = map(self.GypPathToNinja,
+ filter(lambda x:x.endswith(('.h')), all_sources))
+ variables = [('framework', framework),
+ ('copy_headers', map(self.GypPathToNinja, copy_headers))]
+ outputs.extend(self.ninja.build(
+ output, 'compile_ios_framework_headers', all_headers,
+ variables=variables, order_only=prebuild))
+
+ def WriteMacBundleResources(self, resources, bundle_depends):
+ """Writes ninja edges for 'mac_bundle_resources'."""
+ xcassets = []
+
+ extra_env = self.xcode_settings.GetPerTargetSettings()
+ env = self.GetSortedXcodeEnv(additional_settings=extra_env)
+ env = self.ComputeExportEnvString(env)
+ isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
+
+ for output, res in gyp.xcode_emulation.GetMacBundleResources(
+ generator_default_variables['PRODUCT_DIR'],
+ self.xcode_settings, map(self.GypPathToNinja, resources)):
+ output = self.ExpandSpecial(output)
+ if os.path.splitext(output)[-1] != '.xcassets':
+ self.ninja.build(output, 'mac_tool', res,
+ variables=[('mactool_cmd', 'copy-bundle-resource'), \
+ ('env', env), ('binary', isBinary)])
+ bundle_depends.append(output)
+ else:
+ xcassets.append(res)
+ return xcassets
+
+ def WriteMacXCassets(self, xcassets, bundle_depends):
+ """Writes ninja edges for 'mac_bundle_resources' .xcassets files.
+
+ This add an invocation of 'actool' via the 'mac_tool.py' helper script.
+ It assumes that the assets catalogs define at least one imageset and
+ thus an Assets.car file will be generated in the application resources
+ directory. If this is not the case, then the build will probably be done
+ at each invocation of ninja."""
+ if not xcassets:
+ return
+
+ extra_arguments = {}
+ settings_to_arg = {
+ 'XCASSETS_APP_ICON': 'app-icon',
+ 'XCASSETS_LAUNCH_IMAGE': 'launch-image',
+ }
+ settings = self.xcode_settings.xcode_settings[self.config_name]
+ for settings_key, arg_name in settings_to_arg.items():
+ value = settings.get(settings_key)
+ if value:
+ extra_arguments[arg_name] = value
+
+ partial_info_plist = None
+ if extra_arguments:
+ partial_info_plist = self.GypPathToUniqueOutput(
+ 'assetcatalog_generated_info.plist')
+ extra_arguments['output-partial-info-plist'] = partial_info_plist
+
+ outputs = []
+ outputs.append(
+ os.path.join(
+ self.xcode_settings.GetBundleResourceFolder(),
+ 'Assets.car'))
+ if partial_info_plist:
+ outputs.append(partial_info_plist)
+
+ keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor)
+ extra_env = self.xcode_settings.GetPerTargetSettings()
+ env = self.GetSortedXcodeEnv(additional_settings=extra_env)
+ env = self.ComputeExportEnvString(env)
+
+ bundle_depends.extend(self.ninja.build(
+ outputs, 'compile_xcassets', xcassets,
+ variables=[('env', env), ('keys', keys)]))
+ return partial_info_plist
+
+ def WriteMacInfoPlist(self, partial_info_plist, bundle_depends):
+ """Write build rules for bundle Info.plist files."""
+ info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
+ generator_default_variables['PRODUCT_DIR'],
+ self.xcode_settings, self.GypPathToNinja)
+ if not info_plist:
+ return
+ out = self.ExpandSpecial(out)
+ if defines:
+ # Create an intermediate file to store preprocessed results.
+ intermediate_plist = self.GypPathToUniqueOutput(
+ os.path.basename(info_plist))
+ defines = ' '.join([Define(d, self.flavor) for d in defines])
+ info_plist = self.ninja.build(
+ intermediate_plist, 'preprocess_infoplist', info_plist,
+ variables=[('defines',defines)])
+
+ env = self.GetSortedXcodeEnv(additional_settings=extra_env)
+ env = self.ComputeExportEnvString(env)
+
+ if partial_info_plist:
+ intermediate_plist = self.GypPathToUniqueOutput('merged_info.plist')
+ info_plist = self.ninja.build(
+ intermediate_plist, 'merge_infoplist',
+ [partial_info_plist, info_plist])
+
+ keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
+ keys = QuoteShellArgument(json.dumps(keys), self.flavor)
+ isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
+ self.ninja.build(out, 'copy_infoplist', info_plist,
+ variables=[('env', env), ('keys', keys),
+ ('binary', isBinary)])
+ bundle_depends.append(out)
+
+ def WriteSources(self, ninja_file, config_name, config, sources, predepends,
+ precompiled_header, spec):
+ """Write build rules to compile all of |sources|."""
+ if self.toolset == 'host':
+ self.ninja.variable('ar', '$ar_host')
+ self.ninja.variable('cc', '$cc_host')
+ self.ninja.variable('cxx', '$cxx_host')
+ self.ninja.variable('ld', '$ld_host')
+ self.ninja.variable('ldxx', '$ldxx_host')
+ self.ninja.variable('nm', '$nm_host')
+ self.ninja.variable('readelf', '$readelf_host')
+
+ if self.flavor != 'mac' or len(self.archs) == 1:
+ return self.WriteSourcesForArch(
+ self.ninja, config_name, config, sources, predepends,
+ precompiled_header, spec)
+ else:
+ return dict((arch, self.WriteSourcesForArch(
+ self.arch_subninjas[arch], config_name, config, sources, predepends,
+ precompiled_header, spec, arch=arch))
+ for arch in self.archs)
+
+ def WriteSourcesForArch(self, ninja_file, config_name, config, sources,
+ predepends, precompiled_header, spec, arch=None):
+ """Write build rules to compile all of |sources|."""
+
+ extra_defines = []
+ if self.flavor == 'mac':
+ cflags = self.xcode_settings.GetCflags(config_name, arch=arch)
+ cflags_c = self.xcode_settings.GetCflagsC(config_name)
+ cflags_cc = self.xcode_settings.GetCflagsCC(config_name)
+ cflags_objc = ['$cflags_c'] + \
+ self.xcode_settings.GetCflagsObjC(config_name)
+ cflags_objcc = ['$cflags_cc'] + \
+ self.xcode_settings.GetCflagsObjCC(config_name)
+ elif self.flavor == 'win':
+ asmflags = self.msvs_settings.GetAsmflags(config_name)
+ cflags = self.msvs_settings.GetCflags(config_name)
+ cflags_c = self.msvs_settings.GetCflagsC(config_name)
+ cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
+ extra_defines = self.msvs_settings.GetComputedDefines(config_name)
+ # See comment at cc_command for why there's two .pdb files.
+ pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(
+ config_name, self.ExpandSpecial)
+ if not pdbpath_c:
+ obj = 'obj'
+ if self.toolset != 'target':
+ obj += '.' + self.toolset
+ pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name))
+ pdbpath_c = pdbpath + '.c.pdb'
+ pdbpath_cc = pdbpath + '.cc.pdb'
+ self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c])
+ self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc])
+ self.WriteVariableList(ninja_file, 'pchprefix', [self.name])
+ else:
+ cflags = config.get('cflags', [])
+ cflags_c = config.get('cflags_c', [])
+ cflags_cc = config.get('cflags_cc', [])
+
+ # Respect environment variables related to build, but target-specific
+ # flags can still override them.
+ if self.toolset == 'target':
+ cflags_c = (os.environ.get('CPPFLAGS', '').split() +
+ os.environ.get('CFLAGS', '').split() + cflags_c)
+ cflags_cc = (os.environ.get('CPPFLAGS', '').split() +
+ os.environ.get('CXXFLAGS', '').split() + cflags_cc)
+ elif self.toolset == 'host':
+ cflags_c = (os.environ.get('CPPFLAGS_host', '').split() +
+ os.environ.get('CFLAGS_host', '').split() + cflags_c)
+ cflags_cc = (os.environ.get('CPPFLAGS_host', '').split() +
+ os.environ.get('CXXFLAGS_host', '').split() + cflags_cc)
+
+ defines = config.get('defines', []) + extra_defines
+ self.WriteVariableList(ninja_file, 'defines',
+ [Define(d, self.flavor) for d in defines])
+ if self.flavor == 'win':
+ self.WriteVariableList(ninja_file, 'asmflags',
+ map(self.ExpandSpecial, asmflags))
+ self.WriteVariableList(ninja_file, 'rcflags',
+ [QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
+ for f in self.msvs_settings.GetRcflags(config_name,
+ self.GypPathToNinja)])
+
+ include_dirs = config.get('include_dirs', [])
+
+ env = self.GetToolchainEnv()
+ if self.flavor == 'win':
+ include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
+ config_name)
+ self.WriteVariableList(ninja_file, 'includes',
+ [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
+ for i in include_dirs])
+
+ if self.flavor == 'win':
+ midl_include_dirs = config.get('midl_include_dirs', [])
+ midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs(
+ midl_include_dirs, config_name)
+ self.WriteVariableList(ninja_file, 'midl_includes',
+ [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
+ for i in midl_include_dirs])
+
+ pch_commands = precompiled_header.GetPchBuildCommands(arch)
+ if self.flavor == 'mac':
+ # Most targets use no precompiled headers, so only write these if needed.
+ for ext, var in [('c', 'cflags_pch_c'), ('cc', 'cflags_pch_cc'),
+ ('m', 'cflags_pch_objc'), ('mm', 'cflags_pch_objcc')]:
+ include = precompiled_header.GetInclude(ext, arch)
+ if include: ninja_file.variable(var, include)
+
+ arflags = config.get('arflags', [])
+
+ self.WriteVariableList(ninja_file, 'cflags',
+ map(self.ExpandSpecial, cflags))
+ self.WriteVariableList(ninja_file, 'cflags_c',
+ map(self.ExpandSpecial, cflags_c))
+ self.WriteVariableList(ninja_file, 'cflags_cc',
+ map(self.ExpandSpecial, cflags_cc))
+ if self.flavor == 'mac':
+ self.WriteVariableList(ninja_file, 'cflags_objc',
+ map(self.ExpandSpecial, cflags_objc))
+ self.WriteVariableList(ninja_file, 'cflags_objcc',
+ map(self.ExpandSpecial, cflags_objcc))
+ self.WriteVariableList(ninja_file, 'arflags',
+ map(self.ExpandSpecial, arflags))
+ ninja_file.newline()
+ outputs = []
+ has_rc_source = False
+ for source in sources:
+ filename, ext = os.path.splitext(source)
+ ext = ext[1:]
+ obj_ext = self.obj_ext
+ if ext in ('cc', 'cpp', 'cxx'):
+ command = 'cxx'
+ self.target.uses_cpp = True
+ elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
+ command = 'cc'
+ elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
+ command = 'cc_s'
+ elif (self.flavor == 'win' and ext == 'asm' and
+ not self.msvs_settings.HasExplicitAsmRules(spec)):
+ command = 'asm'
+ # Add the _asm suffix as msvs is capable of handling .cc and
+ # .asm files of the same name without collision.
+ obj_ext = '_asm.obj'
+ elif self.flavor == 'mac' and ext == 'm':
+ command = 'objc'
+ elif self.flavor == 'mac' and ext == 'mm':
+ command = 'objcxx'
+ self.target.uses_cpp = True
+ elif self.flavor == 'win' and ext == 'rc':
+ command = 'rc'
+ obj_ext = '.res'
+ has_rc_source = True
+ else:
+ # Ignore unhandled extensions.
+ continue
+ input = self.GypPathToNinja(source)
+ output = self.GypPathToUniqueOutput(filename + obj_ext)
+ if arch is not None:
+ output = AddArch(output, arch)
+ implicit = precompiled_header.GetObjDependencies([input], [output], arch)
+ variables = []
+ if self.flavor == 'win':
+ variables, output, implicit = precompiled_header.GetFlagsModifications(
+ input, output, implicit, command, cflags_c, cflags_cc,
+ self.ExpandSpecial)
+ ninja_file.build(output, command, input,
+ implicit=[gch for _, _, gch in implicit],
+ order_only=predepends, variables=variables)
+ outputs.append(output)
+
+ if has_rc_source:
+ resource_include_dirs = config.get('resource_include_dirs', include_dirs)
+ self.WriteVariableList(ninja_file, 'resource_includes',
+ [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
+ for i in resource_include_dirs])
+
+ self.WritePchTargets(ninja_file, pch_commands)
+
+ ninja_file.newline()
+ return outputs
+
+ def WritePchTargets(self, ninja_file, pch_commands):
+ """Writes ninja rules to compile prefix headers."""
+ if not pch_commands:
+ return
+
+ for gch, lang_flag, lang, input in pch_commands:
+ var_name = {
+ 'c': 'cflags_pch_c',
+ 'cc': 'cflags_pch_cc',
+ 'm': 'cflags_pch_objc',
+ 'mm': 'cflags_pch_objcc',
+ }[lang]
+
+ map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', }
+ cmd = map.get(lang)
+ ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
+
+ def WriteLink(self, spec, config_name, config, link_deps, compile_deps):
+ """Write out a link step. Fills out target.binary. """
+ if self.flavor != 'mac' or len(self.archs) == 1:
+ return self.WriteLinkForArch(
+ self.ninja, spec, config_name, config, link_deps, compile_deps)
+ else:
+ output = self.ComputeOutput(spec)
+ inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec,
+ config_name, config, link_deps[arch],
+ compile_deps, arch=arch)
+ for arch in self.archs]
+ extra_bindings = []
+ build_output = output
+ if not self.is_mac_bundle:
+ self.AppendPostbuildVariable(extra_bindings, spec, output, output)
+
+ # TODO(yyanagisawa): more work needed to fix:
+ # https://code.google.com/p/gyp/issues/detail?id=411
+ if (spec['type'] in ('shared_library', 'loadable_module') and
+ not self.is_mac_bundle):
+ extra_bindings.append(('lib', output))
+ self.ninja.build([output, output + '.TOC'], 'solipo', inputs,
+ variables=extra_bindings)
+ else:
+ self.ninja.build(build_output, 'lipo', inputs, variables=extra_bindings)
+ return output
+
+ def WriteLinkForArch(self, ninja_file, spec, config_name, config,
+ link_deps, compile_deps, arch=None):
+ """Write out a link step. Fills out target.binary. """
+ command = {
+ 'executable': 'link',
+ 'loadable_module': 'solink_module',
+ 'shared_library': 'solink',
+ }[spec['type']]
+ command_suffix = ''
+
+ implicit_deps = set()
+ solibs = set()
+ order_deps = set()
+
+ if compile_deps:
+ # Normally, the compiles of the target already depend on compile_deps,
+ # but a shared_library target might have no sources and only link together
+ # a few static_library deps, so the link step also needs to depend
+ # on compile_deps to make sure actions in the shared_library target
+ # get run before the link.
+ order_deps.add(compile_deps)
+
+ if 'dependencies' in spec:
+ # Two kinds of dependencies:
+ # - Linkable dependencies (like a .a or a .so): add them to the link line.
+ # - Non-linkable dependencies (like a rule that generates a file
+ # and writes a stamp file): add them to implicit_deps
+ extra_link_deps = set()
+ for dep in spec['dependencies']:
+ target = self.target_outputs.get(dep)
+ if not target:
+ continue
+ linkable = target.Linkable()
+ if linkable:
+ new_deps = []
+ if (self.flavor == 'win' and
+ target.component_objs and
+ self.msvs_settings.IsUseLibraryDependencyInputs(config_name)):
+ new_deps = target.component_objs
+ if target.compile_deps:
+ order_deps.add(target.compile_deps)
+ elif self.flavor == 'win' and target.import_lib:
+ new_deps = [target.import_lib]
+ elif target.UsesToc(self.flavor):
+ solibs.add(target.binary)
+ implicit_deps.add(target.binary + '.TOC')
+ else:
+ new_deps = [target.binary]
+ for new_dep in new_deps:
+ if new_dep not in extra_link_deps:
+ extra_link_deps.add(new_dep)
+ link_deps.append(new_dep)
+
+ final_output = target.FinalOutput()
+ if not linkable or final_output != target.binary:
+ implicit_deps.add(final_output)
+
+ extra_bindings = []
+ if self.target.uses_cpp and self.flavor != 'win':
+ extra_bindings.append(('ld', '$ldxx'))
+
+ output = self.ComputeOutput(spec, arch)
+ if arch is None and not self.is_mac_bundle:
+ self.AppendPostbuildVariable(extra_bindings, spec, output, output)
+
+ is_executable = spec['type'] == 'executable'
+ # The ldflags config key is not used on mac or win. On those platforms
+ # linker flags are set via xcode_settings and msvs_settings, respectively.
+ if self.toolset == 'target':
+ env_ldflags = os.environ.get('LDFLAGS', '').split()
+ elif self.toolset == 'host':
+ env_ldflags = os.environ.get('LDFLAGS_host', '').split()
+ if self.flavor == 'mac':
+ ldflags = self.xcode_settings.GetLdflags(config_name,
+ self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
+ self.GypPathToNinja, arch)
+ ldflags = env_ldflags + ldflags
+ elif self.flavor == 'win':
+ manifest_base_name = self.GypPathToUniqueOutput(
+ self.ComputeOutputFileName(spec))
+ ldflags, intermediate_manifest, manifest_files = \
+ self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja,
+ self.ExpandSpecial, manifest_base_name,
+ output, is_executable,
+ self.toplevel_build)
+ ldflags = env_ldflags + ldflags
+ self.WriteVariableList(ninja_file, 'manifests', manifest_files)
+ implicit_deps = implicit_deps.union(manifest_files)
+ if intermediate_manifest:
+ self.WriteVariableList(
+ ninja_file, 'intermediatemanifest', [intermediate_manifest])
+ command_suffix = _GetWinLinkRuleNameSuffix(
+ self.msvs_settings.IsEmbedManifest(config_name))
+ def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja)
+ if def_file:
+ implicit_deps.add(def_file)
+ else:
+ # Respect environment variables related to build, but target-specific
+ # flags can still override them.
+ ldflags = env_ldflags + config.get('ldflags', [])
+ if is_executable and len(solibs):
+ rpath = 'lib/'
+ if self.toolset != 'target':
+ rpath += self.toolset
+ ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath)
+ else:
+ ldflags.append('-Wl,-rpath=%s' % self.target_rpath)
+ ldflags.append('-Wl,-rpath-link=%s' % rpath)
+ self.WriteVariableList(ninja_file, 'ldflags',
+ map(self.ExpandSpecial, ldflags))
+
+ library_dirs = config.get('library_dirs', [])
+ if self.flavor == 'win':
+ library_dirs = [self.msvs_settings.ConvertVSMacros(l, config_name)
+ for l in library_dirs]
+ library_dirs = ['/LIBPATH:' + QuoteShellArgument(self.GypPathToNinja(l),
+ self.flavor)
+ for l in library_dirs]
+ else:
+ library_dirs = [QuoteShellArgument('-L' + self.GypPathToNinja(l),
+ self.flavor)
+ for l in library_dirs]
+
+ libraries = gyp.common.uniquer(map(self.ExpandSpecial,
+ spec.get('libraries', [])))
+ if self.flavor == 'mac':
+ libraries = self.xcode_settings.AdjustLibraries(libraries, config_name)
+ elif self.flavor == 'win':
+ libraries = self.msvs_settings.AdjustLibraries(libraries)
+
+ self.WriteVariableList(ninja_file, 'libs', library_dirs + libraries)
+
+ linked_binary = output
+
+ if command in ('solink', 'solink_module'):
+ extra_bindings.append(('soname', os.path.split(output)[1]))
+ extra_bindings.append(('lib',
+ gyp.common.EncodePOSIXShellArgument(output)))
+ if self.flavor != 'win':
+ link_file_list = output
+ if self.is_mac_bundle:
+ # 'Dependency Framework.framework/Versions/A/Dependency Framework' ->
+ # 'Dependency Framework.framework.rsp'
+ link_file_list = self.xcode_settings.GetWrapperName()
+ if arch:
+ link_file_list += '.' + arch
+ link_file_list += '.rsp'
+ # If an rspfile contains spaces, ninja surrounds the filename with
+ # quotes around it and then passes it to open(), creating a file with
+ # quotes in its name (and when looking for the rsp file, the name
+ # makes it through bash which strips the quotes) :-/
+ link_file_list = link_file_list.replace(' ', '_')
+ extra_bindings.append(
+ ('link_file_list',
+ gyp.common.EncodePOSIXShellArgument(link_file_list)))
+ if self.flavor == 'win':
+ extra_bindings.append(('binary', output))
+ if ('/NOENTRY' not in ldflags and
+ not self.msvs_settings.GetNoImportLibrary(config_name)):
+ self.target.import_lib = output + '.lib'
+ extra_bindings.append(('implibflag',
+ '/IMPLIB:%s' % self.target.import_lib))
+ pdbname = self.msvs_settings.GetPDBName(
+ config_name, self.ExpandSpecial, output + '.pdb')
+ output = [output, self.target.import_lib]
+ if pdbname:
+ output.append(pdbname)
+ elif not self.is_mac_bundle:
+ output = [output, output + '.TOC']
+ else:
+ command = command + '_notoc'
+ elif self.flavor == 'win':
+ extra_bindings.append(('binary', output))
+ pdbname = self.msvs_settings.GetPDBName(
+ config_name, self.ExpandSpecial, output + '.pdb')
+ if pdbname:
+ output = [output, pdbname]
+
+
+ if len(solibs):
+ extra_bindings.append(('solibs',
+ gyp.common.EncodePOSIXShellList(sorted(solibs))))
+
+ ninja_file.build(output, command + command_suffix, link_deps,
+ implicit=sorted(implicit_deps),
+ order_only=list(order_deps),
+ variables=extra_bindings)
+ return linked_binary
+
+ def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
+ extra_link_deps = any(self.target_outputs.get(dep).Linkable()
+ for dep in spec.get('dependencies', [])
+ if dep in self.target_outputs)
+ if spec['type'] == 'none' or (not link_deps and not extra_link_deps):
+ # TODO(evan): don't call this function for 'none' target types, as
+ # it doesn't do anything, and we fake out a 'binary' with a stamp file.
+ self.target.binary = compile_deps
+ self.target.type = 'none'
+ elif spec['type'] == 'static_library':
+ self.target.binary = self.ComputeOutput(spec)
+ if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
+ self.is_standalone_static_library):
+ self.ninja.build(self.target.binary, 'alink_thin', link_deps,
+ order_only=compile_deps)
+ else:
+ variables = []
+ if self.xcode_settings:
+ libtool_flags = self.xcode_settings.GetLibtoolflags(config_name)
+ if libtool_flags:
+ variables.append(('libtool_flags', libtool_flags))
+ if self.msvs_settings:
+ libflags = self.msvs_settings.GetLibFlags(config_name,
+ self.GypPathToNinja)
+ variables.append(('libflags', libflags))
+
+ if self.flavor != 'mac' or len(self.archs) == 1:
+ self.AppendPostbuildVariable(variables, spec,
+ self.target.binary, self.target.binary)
+ self.ninja.build(self.target.binary, 'alink', link_deps,
+ order_only=compile_deps, variables=variables)
+ else:
+ inputs = []
+ for arch in self.archs:
+ output = self.ComputeOutput(spec, arch)
+ self.arch_subninjas[arch].build(output, 'alink', link_deps[arch],
+ order_only=compile_deps,
+ variables=variables)
+ inputs.append(output)
+ # TODO: It's not clear if libtool_flags should be passed to the alink
+ # call that combines single-arch .a files into a fat .a file.
+ self.AppendPostbuildVariable(variables, spec,
+ self.target.binary, self.target.binary)
+ self.ninja.build(self.target.binary, 'alink', inputs,
+ # FIXME: test proving order_only=compile_deps isn't
+ # needed.
+ variables=variables)
+ else:
+ self.target.binary = self.WriteLink(spec, config_name, config, link_deps,
+ compile_deps)
+ return self.target.binary
+
+ def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
+ assert self.is_mac_bundle
+ package_framework = spec['type'] in ('shared_library', 'loadable_module')
+ output = self.ComputeMacBundleOutput()
+ if is_empty:
+ output += '.stamp'
+ variables = []
+ self.AppendPostbuildVariable(variables, spec, output, self.target.binary,
+ is_command_start=not package_framework)
+ if package_framework and not is_empty:
+ if spec['type'] == 'shared_library' and self.xcode_settings.isIOS:
+ self.ninja.build(output, 'package_ios_framework', mac_bundle_depends,
+ variables=variables)
+ else:
+ variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
+ self.ninja.build(output, 'package_framework', mac_bundle_depends,
+ variables=variables)
+ else:
+ self.ninja.build(output, 'stamp', mac_bundle_depends,
+ variables=variables)
+ self.target.bundle = output
+ return output
+
+ def GetToolchainEnv(self, additional_settings=None):
+ """Returns the variables toolchain would set for build steps."""
+ env = self.GetSortedXcodeEnv(additional_settings=additional_settings)
+ if self.flavor == 'win':
+ env = self.GetMsvsToolchainEnv(
+ additional_settings=additional_settings)
+ return env
+
+ def GetMsvsToolchainEnv(self, additional_settings=None):
+ """Returns the variables Visual Studio would set for build steps."""
+ return self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
+ config=self.config_name)
+
+ def GetSortedXcodeEnv(self, additional_settings=None):
+ """Returns the variables Xcode would set for build steps."""
+ assert self.abs_build_dir
+ abs_build_dir = self.abs_build_dir
+ return gyp.xcode_emulation.GetSortedXcodeEnv(
+ self.xcode_settings, abs_build_dir,
+ os.path.join(abs_build_dir, self.build_to_base), self.config_name,
+ additional_settings)
+
+ def GetSortedXcodePostbuildEnv(self):
+ """Returns the variables Xcode would set for postbuild steps."""
+ postbuild_settings = {}
+ # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
+ # TODO(thakis): It would be nice to have some general mechanism instead.
+ strip_save_file = self.xcode_settings.GetPerTargetSetting(
+ 'CHROMIUM_STRIP_SAVE_FILE')
+ if strip_save_file:
+ postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file
+ return self.GetSortedXcodeEnv(additional_settings=postbuild_settings)
+
+ def AppendPostbuildVariable(self, variables, spec, output, binary,
+ is_command_start=False):
+ """Adds a 'postbuild' variable if there is a postbuild for |output|."""
+ postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start)
+ if postbuild:
+ variables.append(('postbuilds', postbuild))
+
+ def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
+ """Returns a shell command that runs all the postbuilds, and removes
+ |output| if any of them fails. If |is_command_start| is False, then the
+ returned string will start with ' && '."""
+ if not self.xcode_settings or spec['type'] == 'none' or not output:
+ return ''
+ output = QuoteShellArgument(output, self.flavor)
+ postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True)
+ if output_binary is not None:
+ postbuilds = self.xcode_settings.AddImplicitPostbuilds(
+ self.config_name,
+ os.path.normpath(os.path.join(self.base_to_build, output)),
+ QuoteShellArgument(
+ os.path.normpath(os.path.join(self.base_to_build, output_binary)),
+ self.flavor),
+ postbuilds, quiet=True)
+
+ if not postbuilds:
+ return ''
+ # Postbuilds expect to be run in the gyp file's directory, so insert an
+ # implicit postbuild to cd to there.
+ postbuilds.insert(0, gyp.common.EncodePOSIXShellList(
+ ['cd', self.build_to_base]))
+ env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv())
+ # G will be non-null if any postbuild fails. Run all postbuilds in a
+ # subshell.
+ commands = env + ' (' + \
+ ' && '.join([ninja_syntax.escape(command) for command in postbuilds])
+ command_string = (commands + '); G=$$?; '
+ # Remove the final output if any postbuild failed.
+ '((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)')
+ if is_command_start:
+ return '(' + command_string + ' && '
+ else:
+ return '$ && (' + command_string
+
+ def ComputeExportEnvString(self, env):
+ """Given an environment, returns a string looking like
+ 'export FOO=foo; export BAR="${FOO} bar;'
+ that exports |env| to the shell."""
+ export_str = []
+ for k, v in env:
+ export_str.append('export %s=%s;' %
+ (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v))))
+ return ' '.join(export_str)
+
+ def ComputeMacBundleOutput(self):
+ """Return the 'output' (full output path) to a bundle output directory."""
+ assert self.is_mac_bundle
+ path = generator_default_variables['PRODUCT_DIR']
+ return self.ExpandSpecial(
+ os.path.join(path, self.xcode_settings.GetWrapperName()))
+
+ def ComputeOutputFileName(self, spec, type=None):
+ """Compute the filename of the final output for the current target."""
+ if not type:
+ type = spec['type']
+
+ default_variables = copy.copy(generator_default_variables)
+ CalculateVariables(default_variables, {'flavor': self.flavor})
+
+ # Compute filename prefix: the product prefix, or a default for
+ # the product type.
+ DEFAULT_PREFIX = {
+ 'loadable_module': default_variables['SHARED_LIB_PREFIX'],
+ 'shared_library': default_variables['SHARED_LIB_PREFIX'],
+ 'static_library': default_variables['STATIC_LIB_PREFIX'],
+ 'executable': default_variables['EXECUTABLE_PREFIX'],
+ }
+ prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, ''))
+
+ # Compute filename extension: the product extension, or a default
+ # for the product type.
+ DEFAULT_EXTENSION = {
+ 'loadable_module': default_variables['SHARED_LIB_SUFFIX'],
+ 'shared_library': default_variables['SHARED_LIB_SUFFIX'],
+ 'static_library': default_variables['STATIC_LIB_SUFFIX'],
+ 'executable': default_variables['EXECUTABLE_SUFFIX'],
+ }
+ extension = spec.get('product_extension')
+ if extension:
+ extension = '.' + extension
+ else:
+ extension = DEFAULT_EXTENSION.get(type, '')
+
+ if 'product_name' in spec:
+ # If we were given an explicit name, use that.
+ target = spec['product_name']
+ else:
+ # Otherwise, derive a name from the target name.
+ target = spec['target_name']
+ if prefix == 'lib':
+ # Snip out an extra 'lib' from libs if appropriate.
+ target = StripPrefix(target, 'lib')
+
+ if type in ('static_library', 'loadable_module', 'shared_library',
+ 'executable'):
+ return '%s%s%s' % (prefix, target, extension)
+ elif type == 'none':
+ return '%s.stamp' % target
+ else:
+ raise Exception('Unhandled output type %s' % type)
+
+ def ComputeOutput(self, spec, arch=None):
+ """Compute the path for the final output of the spec."""
+ type = spec['type']
+
+ if self.flavor == 'win':
+ override = self.msvs_settings.GetOutputName(self.config_name,
+ self.ExpandSpecial)
+ if override:
+ return override
+
+ if arch is None and self.flavor == 'mac' and type in (
+ 'static_library', 'executable', 'shared_library', 'loadable_module'):
+ filename = self.xcode_settings.GetExecutablePath()
+ else:
+ filename = self.ComputeOutputFileName(spec, type)
+
+ if arch is None and 'product_dir' in spec:
+ path = os.path.join(spec['product_dir'], filename)
+ return self.ExpandSpecial(path)
+
+ # Some products go into the output root, libraries go into shared library
+ # dir, and everything else goes into the normal place.
+ type_in_output_root = ['executable', 'loadable_module']
+ if self.flavor == 'mac' and self.toolset == 'target':
+ type_in_output_root += ['shared_library', 'static_library']
+ elif self.flavor == 'win' and self.toolset == 'target':
+ type_in_output_root += ['shared_library']
+
+ if arch is not None:
+ # Make sure partial executables don't end up in a bundle or the regular
+ # output directory.
+ archdir = 'arch'
+ if self.toolset != 'target':
+ archdir = os.path.join('arch', '%s' % self.toolset)
+ return os.path.join(archdir, AddArch(filename, arch))
+ elif type in type_in_output_root or self.is_standalone_static_library:
+ return filename
+ elif type == 'shared_library':
+ libdir = 'lib'
+ if self.toolset != 'target':
+ libdir = os.path.join('lib', '%s' % self.toolset)
+ return os.path.join(libdir, filename)
+ else:
+ return self.GypPathToUniqueOutput(filename, qualified=False)
+
+ def WriteVariableList(self, ninja_file, var, values):
+ assert not isinstance(values, str)
+ if values is None:
+ values = []
+ ninja_file.variable(var, ' '.join(values))
+
+ def WriteNewNinjaRule(self, name, args, description, is_cygwin, env, pool,
+ depfile=None):
+ """Write out a new ninja "rule" statement for a given command.
+
+ Returns the name of the new rule, and a copy of |args| with variables
+ expanded."""
+
+ if self.flavor == 'win':
+ args = [self.msvs_settings.ConvertVSMacros(
+ arg, self.base_to_build, config=self.config_name)
+ for arg in args]
+ description = self.msvs_settings.ConvertVSMacros(
+ description, config=self.config_name)
+ elif self.flavor == 'mac':
+ # |env| is an empty list on non-mac.
+ args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args]
+ description = gyp.xcode_emulation.ExpandEnvVars(description, env)
+
+ # TODO: we shouldn't need to qualify names; we do it because
+ # currently the ninja rule namespace is global, but it really
+ # should be scoped to the subninja.
+ rule_name = self.name
+ if self.toolset == 'target':
+ rule_name += '.' + self.toolset
+ rule_name += '.' + name
+ rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name)
+
+ # Remove variable references, but not if they refer to the magic rule
+ # variables. This is not quite right, as it also protects these for
+ # actions, not just for rules where they are valid. Good enough.
+ protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ]
+ protect = '(?!' + '|'.join(map(re.escape, protect)) + ')'
+ description = re.sub(protect + r'\$', '_', description)
+
+ # gyp dictates that commands are run from the base directory.
+ # cd into the directory before running, and adjust paths in
+ # the arguments to point to the proper locations.
+ rspfile = None
+ rspfile_content = None
+ args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args]
+ if self.flavor == 'win':
+ rspfile = rule_name + '.$unique_name.rsp'
+ # The cygwin case handles this inside the bash sub-shell.
+ run_in = '' if is_cygwin else ' ' + self.build_to_base
+ if is_cygwin:
+ rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine(
+ args, self.build_to_base)
+ else:
+ rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args)
+ command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable +
+ rspfile + run_in)
+ else:
+ env = self.ComputeExportEnvString(env)
+ command = gyp.common.EncodePOSIXShellList(args)
+ command = 'cd %s; ' % self.build_to_base + env + command
+
+ # GYP rules/actions express being no-ops by not touching their outputs.
+ # Avoid executing downstream dependencies in this case by specifying
+ # restat=1 to ninja.
+ self.ninja.rule(rule_name, command, description, depfile=depfile,
+ restat=True, pool=pool,
+ rspfile=rspfile, rspfile_content=rspfile_content)
+ self.ninja.newline()
+
+ return rule_name, args
+
+
+def CalculateVariables(default_variables, params):
+ """Calculate additional variables for use in the build (called by gyp)."""
+ global generator_additional_non_configuration_keys
+ global generator_additional_path_sections
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == 'mac':
+ default_variables.setdefault('OS', 'mac')
+ default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
+ default_variables.setdefault('SHARED_LIB_DIR',
+ generator_default_variables['PRODUCT_DIR'])
+ default_variables.setdefault('LIB_DIR',
+ generator_default_variables['PRODUCT_DIR'])
+
+ # Copy additional generator configuration data from Xcode, which is shared
+ # by the Mac Ninja generator.
+ import gyp.generator.xcode as xcode_generator
+ generator_additional_non_configuration_keys = getattr(xcode_generator,
+ 'generator_additional_non_configuration_keys', [])
+ generator_additional_path_sections = getattr(xcode_generator,
+ 'generator_additional_path_sections', [])
+ global generator_extra_sources_for_rules
+ generator_extra_sources_for_rules = getattr(xcode_generator,
+ 'generator_extra_sources_for_rules', [])
+ elif flavor == 'win':
+ exts = gyp.MSVSUtil.TARGET_TYPE_EXT
+ default_variables.setdefault('OS', 'win')
+ default_variables['EXECUTABLE_SUFFIX'] = '.' + exts['executable']
+ default_variables['STATIC_LIB_PREFIX'] = ''
+ default_variables['STATIC_LIB_SUFFIX'] = '.' + exts['static_library']
+ default_variables['SHARED_LIB_PREFIX'] = ''
+ default_variables['SHARED_LIB_SUFFIX'] = '.' + exts['shared_library']
+
+ # Copy additional generator configuration data from VS, which is shared
+ # by the Windows Ninja generator.
+ import gyp.generator.msvs as msvs_generator
+ generator_additional_non_configuration_keys = getattr(msvs_generator,
+ 'generator_additional_non_configuration_keys', [])
+ generator_additional_path_sections = getattr(msvs_generator,
+ 'generator_additional_path_sections', [])
+
+ gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
+ else:
+ operating_system = flavor
+ if flavor == 'android':
+ operating_system = 'linux' # Keep this legacy behavior for now.
+ default_variables.setdefault('OS', operating_system)
+ default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
+ default_variables.setdefault('SHARED_LIB_DIR',
+ os.path.join('$!PRODUCT_DIR', 'lib'))
+ default_variables.setdefault('LIB_DIR',
+ os.path.join('$!PRODUCT_DIR', 'obj'))
+
+def ComputeOutputDir(params):
+ """Returns the path from the toplevel_dir to the build output directory."""
+ # generator_dir: relative path from pwd to where make puts build files.
+ # Makes migrating from make to ninja easier, ninja doesn't put anything here.
+ generator_dir = os.path.relpath(params['options'].generator_output or '.')
+
+ # output_dir: relative path from generator_dir to the build directory.
+ output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
+
+ # Relative path from source root to our output files. e.g. "out"
+ return os.path.normpath(os.path.join(generator_dir, output_dir))
+
+
+def CalculateGeneratorInputInfo(params):
+ """Called by __init__ to initialize generator values based on params."""
+ # E.g. "out/gypfiles"
+ toplevel = params['options'].toplevel_dir
+ qualified_out_dir = os.path.normpath(os.path.join(
+ toplevel, ComputeOutputDir(params), 'gypfiles'))
+
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ 'toplevel': toplevel,
+ 'qualified_out_dir': qualified_out_dir,
+ }
+
+
+def OpenOutput(path, mode='w'):
+ """Open |path| for writing, creating directories if necessary."""
+ gyp.common.EnsureDirExists(path)
+ return open(path, mode)
+
+
+def CommandWithWrapper(cmd, wrappers, prog):
+ wrapper = wrappers.get(cmd, '')
+ if wrapper:
+ return wrapper + ' ' + prog
+ return prog
+
+
+def GetDefaultConcurrentLinks():
+ """Returns a best-guess for a number of concurrent links."""
+ pool_size = int(os.environ.get('GYP_LINK_CONCURRENCY', 0))
+ if pool_size:
+ return pool_size
+
+ if sys.platform in ('win32', 'cygwin'):
+ import ctypes
+
+ class MEMORYSTATUSEX(ctypes.Structure):
+ _fields_ = [
+ ("dwLength", ctypes.c_ulong),
+ ("dwMemoryLoad", ctypes.c_ulong),
+ ("ullTotalPhys", ctypes.c_ulonglong),
+ ("ullAvailPhys", ctypes.c_ulonglong),
+ ("ullTotalPageFile", ctypes.c_ulonglong),
+ ("ullAvailPageFile", ctypes.c_ulonglong),
+ ("ullTotalVirtual", ctypes.c_ulonglong),
+ ("ullAvailVirtual", ctypes.c_ulonglong),
+ ("sullAvailExtendedVirtual", ctypes.c_ulonglong),
+ ]
+
+ stat = MEMORYSTATUSEX()
+ stat.dwLength = ctypes.sizeof(stat)
+ ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
+
+ # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM
+ # on a 64 GB machine.
+ mem_limit = max(1, stat.ullTotalPhys // (5 * (2 ** 30))) # total / 5GB
+ hard_cap = max(1, int(os.environ.get('GYP_LINK_CONCURRENCY_MAX', 2**32)))
+ return min(mem_limit, hard_cap)
+ elif sys.platform.startswith('linux'):
+ if os.path.exists("/proc/meminfo"):
+ with open("/proc/meminfo") as meminfo:
+ memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
+ for line in meminfo:
+ match = memtotal_re.match(line)
+ if not match:
+ continue
+ # Allow 8Gb per link on Linux because Gold is quite memory hungry
+ return max(1, int(match.group(1)) // (8 * (2 ** 20)))
+ return 1
+ elif sys.platform == 'darwin':
+ try:
+ avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
+ # A static library debug build of Chromium's unit_tests takes ~2.7GB, so
+ # 4GB per ld process allows for some more bloat.
+ return max(1, avail_bytes // (4 * (2 ** 30))) # total / 4GB
+ except:
+ return 1
+ else:
+ # TODO(scottmg): Implement this for other platforms.
+ return 1
+
+
+def _GetWinLinkRuleNameSuffix(embed_manifest):
+ """Returns the suffix used to select an appropriate linking rule depending on
+ whether the manifest embedding is enabled."""
+ return '_embed' if embed_manifest else ''
+
+
+def _AddWinLinkRules(master_ninja, embed_manifest):
+ """Adds link rules for Windows platform to |master_ninja|."""
+ def FullLinkCommand(ldcmd, out, binary_type):
+ resource_name = {
+ 'exe': '1',
+ 'dll': '2',
+ }[binary_type]
+ return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s ' \
+ '%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' \
+ '$manifests' % {
+ 'python': sys.executable,
+ 'out': out,
+ 'ldcmd': ldcmd,
+ 'resname': resource_name,
+ 'embed': embed_manifest }
+ rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest)
+ use_separate_mspdbsrv = (
+ int(os.environ.get('GYP_USE_SEPARATE_MSPDBSRV', '0')) != 0)
+ dlldesc = 'LINK%s(DLL) $binary' % rule_name_suffix.upper()
+ dllcmd = ('%s gyp-win-tool link-wrapper $arch %s '
+ '$ld /nologo $implibflag /DLL /OUT:$binary '
+ '@$binary.rsp' % (sys.executable, use_separate_mspdbsrv))
+ dllcmd = FullLinkCommand(dllcmd, '$binary', 'dll')
+ master_ninja.rule('solink' + rule_name_suffix,
+ description=dlldesc, command=dllcmd,
+ rspfile='$binary.rsp',
+ rspfile_content='$libs $in_newline $ldflags',
+ restat=True,
+ pool='link_pool')
+ master_ninja.rule('solink_module' + rule_name_suffix,
+ description=dlldesc, command=dllcmd,
+ rspfile='$binary.rsp',
+ rspfile_content='$libs $in_newline $ldflags',
+ restat=True,
+ pool='link_pool')
+ # Note that ldflags goes at the end so that it has the option of
+ # overriding default settings earlier in the command line.
+ exe_cmd = ('%s gyp-win-tool link-wrapper $arch %s '
+ '$ld /nologo /OUT:$binary @$binary.rsp' %
+ (sys.executable, use_separate_mspdbsrv))
+ exe_cmd = FullLinkCommand(exe_cmd, '$binary', 'exe')
+ master_ninja.rule('link' + rule_name_suffix,
+ description='LINK%s $binary' % rule_name_suffix.upper(),
+ command=exe_cmd,
+ rspfile='$binary.rsp',
+ rspfile_content='$in_newline $libs $ldflags',
+ pool='link_pool')
+
+
+def GenerateOutputForConfig(target_list, target_dicts, data, params,
+ config_name):
+ options = params['options']
+ flavor = gyp.common.GetFlavor(params)
+ generator_flags = params.get('generator_flags', {})
+
+ # build_dir: relative path from source root to our output files.
+ # e.g. "out/Debug"
+ build_dir = os.path.normpath(
+ os.path.join(ComputeOutputDir(params), config_name))
+
+ toplevel_build = os.path.join(options.toplevel_dir, build_dir)
+
+ master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja'))
+ master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
+
+ # Put build-time support tools in out/{config_name}.
+ gyp.common.CopyTool(flavor, toplevel_build, generator_flags)
+
+ # Grab make settings for CC/CXX.
+ # The rules are
+ # - The priority from low to high is gcc/g++, the 'make_global_settings' in
+ # gyp, the environment variable.
+ # - If there is no 'make_global_settings' for CC.host/CXX.host or
+ # 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set
+ # to cc/cxx.
+ if flavor == 'win':
+ ar = 'lib.exe'
+ # cc and cxx must be set to the correct architecture by overriding with one
+ # of cl_x86 or cl_x64 below.
+ cc = 'UNSET'
+ cxx = 'UNSET'
+ ld = 'link.exe'
+ ld_host = '$ld'
+ else:
+ ar = 'ar'
+ cc = 'cc'
+ cxx = 'c++'
+ ld = '$cc'
+ ldxx = '$cxx'
+ ld_host = '$cc_host'
+ ldxx_host = '$cxx_host'
+
+ ar_host = ar
+ cc_host = None
+ cxx_host = None
+ cc_host_global_setting = None
+ cxx_host_global_setting = None
+ clang_cl = None
+ nm = 'nm'
+ nm_host = 'nm'
+ readelf = 'readelf'
+ readelf_host = 'readelf'
+
+ build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
+ make_global_settings = data[build_file].get('make_global_settings', [])
+ build_to_root = gyp.common.InvertRelativePath(build_dir,
+ options.toplevel_dir)
+ wrappers = {}
+ for key, value in make_global_settings:
+ if key == 'AR':
+ ar = os.path.join(build_to_root, value)
+ if key == 'AR.host':
+ ar_host = os.path.join(build_to_root, value)
+ if key == 'CC':
+ cc = os.path.join(build_to_root, value)
+ if cc.endswith('clang-cl'):
+ clang_cl = cc
+ if key == 'CXX':
+ cxx = os.path.join(build_to_root, value)
+ if key == 'CC.host':
+ cc_host = os.path.join(build_to_root, value)
+ cc_host_global_setting = value
+ if key == 'CXX.host':
+ cxx_host = os.path.join(build_to_root, value)
+ cxx_host_global_setting = value
+ if key == 'LD':
+ ld = os.path.join(build_to_root, value)
+ if key == 'LD.host':
+ ld_host = os.path.join(build_to_root, value)
+ if key == 'NM':
+ nm = os.path.join(build_to_root, value)
+ if key == 'NM.host':
+ nm_host = os.path.join(build_to_root, value)
+ if key == 'READELF':
+ readelf = os.path.join(build_to_root, value)
+ if key == 'READELF.host':
+ readelf_host = os.path.join(build_to_root, value)
+ if key.endswith('_wrapper'):
+ wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
+
+ # Support wrappers from environment variables too.
+ for key, value in os.environ.items():
+ if key.lower().endswith('_wrapper'):
+ key_prefix = key[:-len('_wrapper')]
+ key_prefix = re.sub(r'\.HOST$', '.host', key_prefix)
+ wrappers[key_prefix] = os.path.join(build_to_root, value)
+
+ mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None)
+ if mac_toolchain_dir:
+ wrappers['LINK'] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir
+
+ if flavor == 'win':
+ configs = [target_dicts[qualified_target]['configurations'][config_name]
+ for qualified_target in target_list]
+ shared_system_includes = None
+ if not generator_flags.get('ninja_use_custom_environment_files', 0):
+ shared_system_includes = \
+ gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes(
+ configs, generator_flags)
+ cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
+ toplevel_build, generator_flags, shared_system_includes, OpenOutput)
+ for arch, path in sorted(cl_paths.items()):
+ if clang_cl:
+ # If we have selected clang-cl, use that instead.
+ path = clang_cl
+ command = CommandWithWrapper('CC', wrappers,
+ QuoteShellArgument(path, 'win'))
+ if clang_cl:
+ # Use clang-cl to cross-compile for x86 or x86_64.
+ command += (' -m32' if arch == 'x86' else ' -m64')
+ master_ninja.variable('cl_' + arch, command)
+
+ cc = GetEnvironFallback(['CC_target', 'CC'], cc)
+ master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc))
+ cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx)
+ master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx))
+
+ if flavor == 'win':
+ master_ninja.variable('ld', ld)
+ master_ninja.variable('idl', 'midl.exe')
+ master_ninja.variable('ar', ar)
+ master_ninja.variable('rc', 'rc.exe')
+ master_ninja.variable('ml_x86', 'ml.exe')
+ master_ninja.variable('ml_x64', 'ml64.exe')
+ master_ninja.variable('mt', 'mt.exe')
+ else:
+ master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
+ master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx))
+ master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], ar))
+ if flavor != 'mac':
+ # Mac does not use readelf/nm for .TOC generation, so avoiding polluting
+ # the master ninja with extra unused variables.
+ master_ninja.variable(
+ 'nm', GetEnvironFallback(['NM_target', 'NM'], nm))
+ master_ninja.variable(
+ 'readelf', GetEnvironFallback(['READELF_target', 'READELF'], readelf))
+
+ if generator_supports_multiple_toolsets:
+ if not cc_host:
+ cc_host = cc
+ if not cxx_host:
+ cxx_host = cxx
+
+ master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], ar_host))
+ master_ninja.variable('nm_host', GetEnvironFallback(['NM_host'], nm_host))
+ master_ninja.variable('readelf_host',
+ GetEnvironFallback(['READELF_host'], readelf_host))
+ cc_host = GetEnvironFallback(['CC_host'], cc_host)
+ cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
+
+ # The environment variable could be used in 'make_global_settings', like
+ # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
+ if '$(CC)' in cc_host and cc_host_global_setting:
+ cc_host = cc_host_global_setting.replace('$(CC)', cc)
+ if '$(CXX)' in cxx_host and cxx_host_global_setting:
+ cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx)
+ master_ninja.variable('cc_host',
+ CommandWithWrapper('CC.host', wrappers, cc_host))
+ master_ninja.variable('cxx_host',
+ CommandWithWrapper('CXX.host', wrappers, cxx_host))
+ if flavor == 'win':
+ master_ninja.variable('ld_host', ld_host)
+ else:
+ master_ninja.variable('ld_host', CommandWithWrapper(
+ 'LINK', wrappers, ld_host))
+ master_ninja.variable('ldxx_host', CommandWithWrapper(
+ 'LINK', wrappers, ldxx_host))
+
+ master_ninja.newline()
+
+ master_ninja.pool('link_pool', depth=GetDefaultConcurrentLinks())
+ master_ninja.newline()
+
+ deps = 'msvc' if flavor == 'win' else 'gcc'
+
+ if flavor != 'win':
+ master_ninja.rule(
+ 'cc',
+ description='CC $out',
+ command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c '
+ '$cflags_pch_c -c $in -o $out'),
+ depfile='$out.d',
+ deps=deps)
+ master_ninja.rule(
+ 'cc_s',
+ description='CC $out',
+ command=('$cc $defines $includes $cflags $cflags_c '
+ '$cflags_pch_c -c $in -o $out'))
+ master_ninja.rule(
+ 'cxx',
+ description='CXX $out',
+ command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc '
+ '$cflags_pch_cc -c $in -o $out'),
+ depfile='$out.d',
+ deps=deps)
+ else:
+ # TODO(scottmg) Separate pdb names is a test to see if it works around
+ # http://crbug.com/142362. It seems there's a race between the creation of
+ # the .pdb by the precompiled header step for .cc and the compilation of
+ # .c files. This should be handled by mspdbsrv, but rarely errors out with
+ # c1xx : fatal error C1033: cannot open program database
+ # By making the rules target separate pdb files this might be avoided.
+ cc_command = ('ninja -t msvc -e $arch ' +
+ '-- '
+ '$cc /nologo /showIncludes /FC '
+ '@$out.rsp /c $in /Fo$out /Fd$pdbname_c ')
+ cxx_command = ('ninja -t msvc -e $arch ' +
+ '-- '
+ '$cxx /nologo /showIncludes /FC '
+ '@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ')
+ master_ninja.rule(
+ 'cc',
+ description='CC $out',
+ command=cc_command,
+ rspfile='$out.rsp',
+ rspfile_content='$defines $includes $cflags $cflags_c',
+ deps=deps)
+ master_ninja.rule(
+ 'cxx',
+ description='CXX $out',
+ command=cxx_command,
+ rspfile='$out.rsp',
+ rspfile_content='$defines $includes $cflags $cflags_cc',
+ deps=deps)
+ master_ninja.rule(
+ 'idl',
+ description='IDL $in',
+ command=('%s gyp-win-tool midl-wrapper $arch $outdir '
+ '$tlb $h $dlldata $iid $proxy $in '
+ '$midl_includes $idlflags' % sys.executable))
+ master_ninja.rule(
+ 'rc',
+ description='RC $in',
+ # Note: $in must be last otherwise rc.exe complains.
+ command=('%s gyp-win-tool rc-wrapper '
+ '$arch $rc $defines $resource_includes $rcflags /fo$out $in' %
+ sys.executable))
+ master_ninja.rule(
+ 'asm',
+ description='ASM $out',
+ command=('%s gyp-win-tool asm-wrapper '
+ '$arch $asm $defines $includes $asmflags /c /Fo $out $in' %
+ sys.executable))
+
+ if flavor != 'mac' and flavor != 'win':
+ master_ninja.rule(
+ 'alink',
+ description='AR $out',
+ command='rm -f $out && $ar rcs $arflags $out $in')
+ master_ninja.rule(
+ 'alink_thin',
+ description='AR $out',
+ command='rm -f $out && $ar rcsT $arflags $out $in')
+
+ # This allows targets that only need to depend on $lib's API to declare an
+ # order-only dependency on $lib.TOC and avoid relinking such downstream
+ # dependencies when $lib changes only in non-public ways.
+ # The resulting string leaves an uninterpolated %{suffix} which
+ # is used in the final substitution below.
+ mtime_preserving_solink_base = (
+ 'if [ ! -e $lib -o ! -e $lib.TOC ]; then '
+ '%(solink)s && %(extract_toc)s > $lib.TOC; else '
+ '%(solink)s && %(extract_toc)s > $lib.tmp && '
+ 'if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; '
+ 'fi; fi'
+ % { 'solink':
+ '$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
+ 'extract_toc':
+ ('{ $readelf -d $lib | grep SONAME ; '
+ '$nm -gD -f p $lib | cut -f1-2 -d\' \'; }')})
+
+ master_ninja.rule(
+ 'solink',
+ description='SOLINK $lib',
+ restat=True,
+ command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
+ rspfile='$link_file_list',
+ rspfile_content=
+ '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs',
+ pool='link_pool')
+ master_ninja.rule(
+ 'solink_module',
+ description='SOLINK(module) $lib',
+ restat=True,
+ command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
+ rspfile='$link_file_list',
+ rspfile_content='-Wl,--start-group $in -Wl,--end-group $solibs $libs',
+ pool='link_pool')
+ master_ninja.rule(
+ 'link',
+ description='LINK $out',
+ command=('$ld $ldflags -o $out '
+ '-Wl,--start-group $in -Wl,--end-group $solibs $libs'),
+ pool='link_pool')
+ elif flavor == 'win':
+ master_ninja.rule(
+ 'alink',
+ description='LIB $out',
+ command=('%s gyp-win-tool link-wrapper $arch False '
+ '$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' %
+ sys.executable),
+ rspfile='$out.rsp',
+ rspfile_content='$in_newline $libflags')
+ _AddWinLinkRules(master_ninja, embed_manifest=True)
+ _AddWinLinkRules(master_ninja, embed_manifest=False)
+ else:
+ master_ninja.rule(
+ 'objc',
+ description='OBJC $out',
+ command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc '
+ '$cflags_pch_objc -c $in -o $out'),
+ depfile='$out.d',
+ deps=deps)
+ master_ninja.rule(
+ 'objcxx',
+ description='OBJCXX $out',
+ command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc '
+ '$cflags_pch_objcc -c $in -o $out'),
+ depfile='$out.d',
+ deps=deps)
+ master_ninja.rule(
+ 'alink',
+ description='LIBTOOL-STATIC $out, POSTBUILDS',
+ command='rm -f $out && '
+ './gyp-mac-tool filter-libtool libtool $libtool_flags '
+ '-static -o $out $in'
+ '$postbuilds')
+ master_ninja.rule(
+ 'lipo',
+ description='LIPO $out, POSTBUILDS',
+ command='rm -f $out && lipo -create $in -output $out$postbuilds')
+ master_ninja.rule(
+ 'solipo',
+ description='SOLIPO $out, POSTBUILDS',
+ command=(
+ 'rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&'
+ '%(extract_toc)s > $lib.TOC'
+ % { 'extract_toc':
+ '{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
+ 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'}))
+
+
+ # Record the public interface of $lib in $lib.TOC. See the corresponding
+ # comment in the posix section above for details.
+ solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s'
+ mtime_preserving_solink_base = (
+ 'if [ ! -e $lib -o ! -e $lib.TOC ] || '
+ # Always force dependent targets to relink if this library
+ # reexports something. Handling this correctly would require
+ # recursive TOC dumping but this is rare in practice, so punt.
+ 'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
+ '%(solink)s && %(extract_toc)s > $lib.TOC; '
+ 'else '
+ '%(solink)s && %(extract_toc)s > $lib.tmp && '
+ 'if ! cmp -s $lib.tmp $lib.TOC; then '
+ 'mv $lib.tmp $lib.TOC ; '
+ 'fi; '
+ 'fi'
+ % { 'solink': solink_base,
+ 'extract_toc':
+ '{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
+ 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
+
+
+ solink_suffix = '@$link_file_list$postbuilds'
+ master_ninja.rule(
+ 'solink',
+ description='SOLINK $lib, POSTBUILDS',
+ restat=True,
+ command=mtime_preserving_solink_base % {'suffix': solink_suffix,
+ 'type': '-shared'},
+ rspfile='$link_file_list',
+ rspfile_content='$in $solibs $libs',
+ pool='link_pool')
+ master_ninja.rule(
+ 'solink_notoc',
+ description='SOLINK $lib, POSTBUILDS',
+ restat=True,
+ command=solink_base % {'suffix':solink_suffix, 'type': '-shared'},
+ rspfile='$link_file_list',
+ rspfile_content='$in $solibs $libs',
+ pool='link_pool')
+
+ master_ninja.rule(
+ 'solink_module',
+ description='SOLINK(module) $lib, POSTBUILDS',
+ restat=True,
+ command=mtime_preserving_solink_base % {'suffix': solink_suffix,
+ 'type': '-bundle'},
+ rspfile='$link_file_list',
+ rspfile_content='$in $solibs $libs',
+ pool='link_pool')
+ master_ninja.rule(
+ 'solink_module_notoc',
+ description='SOLINK(module) $lib, POSTBUILDS',
+ restat=True,
+ command=solink_base % {'suffix': solink_suffix, 'type': '-bundle'},
+ rspfile='$link_file_list',
+ rspfile_content='$in $solibs $libs',
+ pool='link_pool')
+
+ master_ninja.rule(
+ 'link',
+ description='LINK $out, POSTBUILDS',
+ command=('$ld $ldflags -o $out '
+ '$in $solibs $libs$postbuilds'),
+ pool='link_pool')
+ master_ninja.rule(
+ 'preprocess_infoplist',
+ description='PREPROCESS INFOPLIST $out',
+ command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && '
+ 'plutil -convert xml1 $out $out'))
+ master_ninja.rule(
+ 'copy_infoplist',
+ description='COPY INFOPLIST $in',
+ command='$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys')
+ master_ninja.rule(
+ 'merge_infoplist',
+ description='MERGE INFOPLISTS $in',
+ command='$env ./gyp-mac-tool merge-info-plist $out $in')
+ master_ninja.rule(
+ 'compile_xcassets',
+ description='COMPILE XCASSETS $in',
+ command='$env ./gyp-mac-tool compile-xcassets $keys $in')
+ master_ninja.rule(
+ 'compile_ios_framework_headers',
+ description='COMPILE HEADER MAPS AND COPY FRAMEWORK HEADERS $in',
+ command='$env ./gyp-mac-tool compile-ios-framework-header-map $out '
+ '$framework $in && $env ./gyp-mac-tool '
+ 'copy-ios-framework-headers $framework $copy_headers')
+ master_ninja.rule(
+ 'mac_tool',
+ description='MACTOOL $mactool_cmd $in',
+ command='$env ./gyp-mac-tool $mactool_cmd $in $out $binary')
+ master_ninja.rule(
+ 'package_framework',
+ description='PACKAGE FRAMEWORK $out, POSTBUILDS',
+ command='./gyp-mac-tool package-framework $out $version$postbuilds '
+ '&& touch $out')
+ master_ninja.rule(
+ 'package_ios_framework',
+ description='PACKAGE IOS FRAMEWORK $out, POSTBUILDS',
+ command='./gyp-mac-tool package-ios-framework $out $postbuilds '
+ '&& touch $out')
+ if flavor == 'win':
+ master_ninja.rule(
+ 'stamp',
+ description='STAMP $out',
+ command='%s gyp-win-tool stamp $out' % sys.executable)
+ else:
+ master_ninja.rule(
+ 'stamp',
+ description='STAMP $out',
+ command='${postbuilds}touch $out')
+ if flavor == 'win':
+ master_ninja.rule(
+ 'copy',
+ description='COPY $in $out',
+ command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable)
+ elif flavor == 'zos':
+ master_ninja.rule(
+ 'copy',
+ description='COPY $in $out',
+ command='rm -rf $out && cp -fRP $in $out')
+ else:
+ master_ninja.rule(
+ 'copy',
+ description='COPY $in $out',
+ command='ln -f $in $out 2>/dev/null || (rm -rf $out && cp -af $in $out)')
+ master_ninja.newline()
+
+ all_targets = set()
+ for build_file in params['build_files']:
+ for target in gyp.common.AllTargets(target_list,
+ target_dicts,
+ os.path.normpath(build_file)):
+ all_targets.add(target)
+ all_outputs = set()
+
+ # target_outputs is a map from qualified target name to a Target object.
+ target_outputs = {}
+ # target_short_names is a map from target short name to a list of Target
+ # objects.
+ target_short_names = {}
+
+ # short name of targets that were skipped because they didn't contain anything
+ # interesting.
+ # NOTE: there may be overlap between this an non_empty_target_names.
+ empty_target_names = set()
+
+ # Set of non-empty short target names.
+ # NOTE: there may be overlap between this an empty_target_names.
+ non_empty_target_names = set()
+
+ for qualified_target in target_list:
+ # qualified_target is like: third_party/icu/icu.gyp:icui18n#target
+ build_file, name, toolset = \
+ gyp.common.ParseQualifiedTarget(qualified_target)
+
+ this_make_global_settings = data[build_file].get('make_global_settings', [])
+ assert make_global_settings == this_make_global_settings, (
+ "make_global_settings needs to be the same for all targets. %s vs. %s" %
+ (this_make_global_settings, make_global_settings))
+
+ spec = target_dicts[qualified_target]
+ if flavor == 'mac':
+ gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
+
+ # If build_file is a symlink, we must not follow it because there's a chance
+ # it could point to a path above toplevel_dir, and we cannot correctly deal
+ # with that case at the moment.
+ build_file = gyp.common.RelativePath(build_file, options.toplevel_dir,
+ False)
+
+ qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name,
+ toolset)
+ qualified_target_for_hash = qualified_target_for_hash.encode('utf-8')
+ hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest()
+
+ base_path = os.path.dirname(build_file)
+ obj = 'obj'
+ if toolset != 'target':
+ obj += '.' + toolset
+ output_file = os.path.join(obj, base_path, name + '.ninja')
+
+ ninja_output = StringIO()
+ writer = NinjaWriter(hash_for_rules, target_outputs, base_path, build_dir,
+ ninja_output,
+ toplevel_build, output_file,
+ flavor, toplevel_dir=options.toplevel_dir)
+
+ target = writer.WriteSpec(spec, config_name, generator_flags)
+
+ if ninja_output.tell() > 0:
+ # Only create files for ninja files that actually have contents.
+ with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file:
+ ninja_file.write(ninja_output.getvalue())
+ ninja_output.close()
+ master_ninja.subninja(output_file)
+
+ if target:
+ if name != target.FinalOutput() and spec['toolset'] == 'target':
+ target_short_names.setdefault(name, []).append(target)
+ target_outputs[qualified_target] = target
+ if qualified_target in all_targets:
+ all_outputs.add(target.FinalOutput())
+ non_empty_target_names.add(name)
+ else:
+ empty_target_names.add(name)
+
+ if target_short_names:
+ # Write a short name to build this target. This benefits both the
+ # "build chrome" case as well as the gyp tests, which expect to be
+ # able to run actions and build libraries by their short name.
+ master_ninja.newline()
+ master_ninja.comment('Short names for targets.')
+ for short_name in sorted(target_short_names):
+ master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
+ target_short_names[short_name]])
+
+ # Write phony targets for any empty targets that weren't written yet. As
+ # short names are not necessarily unique only do this for short names that
+ # haven't already been output for another target.
+ empty_target_names = empty_target_names - non_empty_target_names
+ if empty_target_names:
+ master_ninja.newline()
+ master_ninja.comment('Empty targets (output for completeness).')
+ for name in sorted(empty_target_names):
+ master_ninja.build(name, 'phony')
+
+ if all_outputs:
+ master_ninja.newline()
+ master_ninja.build('all', 'phony', sorted(all_outputs))
+ master_ninja.default(generator_flags.get('default_target', 'all'))
+
+ master_ninja_file.close()
+
+
+def PerformBuild(data, configurations, params):
+ options = params['options']
+ for config in configurations:
+ builddir = os.path.join(options.toplevel_dir, 'out', config)
+ arguments = ['ninja', '-C', builddir]
+ print('Building [%s]: %s' % (config, arguments))
+ subprocess.check_call(arguments)
+
+
+def CallGenerateOutputForConfig(arglist):
+ # Ignore the interrupt signal so that the parent process catches it and
+ # kills all multiprocessing children.
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+ (target_list, target_dicts, data, params, config_name) = arglist
+ GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ # Update target_dicts for iOS device builds.
+ target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator(
+ target_dicts)
+
+ user_config = params.get('generator_flags', {}).get('config', None)
+ if gyp.common.GetFlavor(params) == 'win':
+ target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
+ target_list, target_dicts = MSVSUtil.InsertLargePdbShims(
+ target_list, target_dicts, generator_default_variables)
+
+ if user_config:
+ GenerateOutputForConfig(target_list, target_dicts, data, params,
+ user_config)
+ else:
+ config_names = target_dicts[target_list[0]]['configurations']
+ if params['parallel']:
+ try:
+ pool = multiprocessing.Pool(len(config_names))
+ arglists = []
+ for config_name in config_names:
+ arglists.append(
+ (target_list, target_dicts, data, params, config_name))
+ pool.map(CallGenerateOutputForConfig, arglists)
+ except KeyboardInterrupt as e:
+ pool.terminate()
+ raise e
+ else:
+ for config_name in config_names:
+ GenerateOutputForConfig(target_list, target_dicts, data, params,
+ config_name)
diff --git a/third_party/python/gyp/pylib/gyp/generator/ninja_test.py b/third_party/python/gyp/pylib/gyp/generator/ninja_test.py
new file mode 100644
index 0000000000..1ad68e4fc9
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/generator/ninja_test.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Unit tests for the ninja.py file. """
+
+import gyp.generator.ninja as ninja
+import unittest
+import sys
+import TestCommon
+
+
+class TestPrefixesAndSuffixes(unittest.TestCase):
+ def test_BinaryNamesWindows(self):
+ # These cannot run on non-Windows as they require a VS installation to
+ # correctly handle variable expansion.
+ if sys.platform.startswith('win'):
+ writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
+ 'build.ninja', 'win')
+ spec = { 'target_name': 'wee' }
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
+ endswith('.exe'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
+ endswith('.dll'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
+ endswith('.lib'))
+
+ def test_BinaryNamesLinux(self):
+ writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
+ 'build.ninja', 'linux')
+ spec = { 'target_name': 'wee' }
+ self.assertTrue('.' not in writer.ComputeOutputFileName(spec,
+ 'executable'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
+ startswith('lib'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
+ startswith('lib'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
+ endswith('.so'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
+ endswith('.a'))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/python/gyp/pylib/gyp/generator/xcode.py b/third_party/python/gyp/pylib/gyp/generator/xcode.py
new file mode 100644
index 0000000000..8bc22bed10
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/generator/xcode.py
@@ -0,0 +1,1302 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import filecmp
+import gyp.common
+import gyp.xcodeproj_file
+import gyp.xcode_ninja
+import errno
+import os
+import sys
+import posixpath
+import re
+import shutil
+import subprocess
+import tempfile
+
+
+# Project files generated by this module will use _intermediate_var as a
+# custom Xcode setting whose value is a DerivedSources-like directory that's
+# project-specific and configuration-specific. The normal choice,
+# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive
+# as it is likely that multiple targets within a single project file will want
+# to access the same set of generated files. The other option,
+# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific,
+# it is not configuration-specific. INTERMEDIATE_DIR is defined as
+# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION).
+_intermediate_var = 'INTERMEDIATE_DIR'
+
+# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all
+# targets that share the same BUILT_PRODUCTS_DIR.
+_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR'
+
+_library_search_paths_var = 'LIBRARY_SEARCH_PATHS'
+
+generator_default_variables = {
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '',
+ 'STATIC_LIB_PREFIX': 'lib',
+ 'SHARED_LIB_PREFIX': 'lib',
+ 'STATIC_LIB_SUFFIX': '.a',
+ 'SHARED_LIB_SUFFIX': '.dylib',
+ # INTERMEDIATE_DIR is a place for targets to build up intermediate products.
+ # It is specific to each build environment. It is only guaranteed to exist
+ # and be constant within the context of a project, corresponding to a single
+ # input file. Some build environments may allow their intermediate directory
+ # to be shared on a wider scale, but this is not guaranteed.
+ 'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var,
+ 'OS': 'mac',
+ 'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)',
+ 'LIB_DIR': '$(BUILT_PRODUCTS_DIR)',
+ 'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)',
+ 'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)',
+ 'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)',
+ 'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)',
+ 'RULE_INPUT_DIRNAME': '$(INPUT_FILE_DIRNAME)',
+ 'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var,
+ 'CONFIGURATION_NAME': '$(CONFIGURATION)',
+}
+
+# The Xcode-specific sections that hold paths.
+generator_additional_path_sections = [
+ 'mac_bundle_resources',
+ 'mac_framework_headers',
+ 'mac_framework_private_headers',
+ # 'mac_framework_dirs', input already handles _dirs endings.
+]
+
+# The Xcode-specific keys that exist on targets and aren't moved down to
+# configurations.
+generator_additional_non_configuration_keys = [
+ 'ios_app_extension',
+ 'ios_watch_app',
+ 'ios_watchkit_extension',
+ 'mac_bundle',
+ 'mac_bundle_resources',
+ 'mac_framework_headers',
+ 'mac_framework_private_headers',
+ 'mac_xctest_bundle',
+ 'mac_xcuitest_bundle',
+ 'xcode_create_dependents_test_runner',
+]
+
+# We want to let any rules apply to files that are resources also.
+generator_extra_sources_for_rules = [
+ 'mac_bundle_resources',
+ 'mac_framework_headers',
+ 'mac_framework_private_headers',
+]
+
+generator_filelist_paths = None
+
+# Xcode's standard set of library directories, which don't need to be duplicated
+# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay.
+xcode_standard_library_dirs = frozenset([
+ '$(SDKROOT)/usr/lib',
+ '$(SDKROOT)/usr/local/lib',
+])
+
+def CreateXCConfigurationList(configuration_names):
+ xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []})
+ if len(configuration_names) == 0:
+ configuration_names = ['Default']
+ for configuration_name in configuration_names:
+ xcbc = gyp.xcodeproj_file.XCBuildConfiguration({
+ 'name': configuration_name})
+ xccl.AppendProperty('buildConfigurations', xcbc)
+ xccl.SetProperty('defaultConfigurationName', configuration_names[0])
+ return xccl
+
+
+class XcodeProject(object):
+ def __init__(self, gyp_path, path, build_file_dict):
+ self.gyp_path = gyp_path
+ self.path = path
+ self.project = gyp.xcodeproj_file.PBXProject(path=path)
+ projectDirPath = gyp.common.RelativePath(
+ os.path.dirname(os.path.abspath(self.gyp_path)),
+ os.path.dirname(path) or '.')
+ self.project.SetProperty('projectDirPath', projectDirPath)
+ self.project_file = \
+ gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project})
+ self.build_file_dict = build_file_dict
+
+ # TODO(mark): add destructor that cleans up self.path if created_dir is
+ # True and things didn't complete successfully. Or do something even
+ # better with "try"?
+ self.created_dir = False
+ try:
+ os.makedirs(self.path)
+ self.created_dir = True
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ def Finalize1(self, xcode_targets, serialize_all_tests):
+ # Collect a list of all of the build configuration names used by the
+ # various targets in the file. It is very heavily advised to keep each
+ # target in an entire project (even across multiple project files) using
+ # the same set of configuration names.
+ configurations = []
+ for xct in self.project.GetProperty('targets'):
+ xccl = xct.GetProperty('buildConfigurationList')
+ xcbcs = xccl.GetProperty('buildConfigurations')
+ for xcbc in xcbcs:
+ name = xcbc.GetProperty('name')
+ if name not in configurations:
+ configurations.append(name)
+
+ # Replace the XCConfigurationList attached to the PBXProject object with
+ # a new one specifying all of the configuration names used by the various
+ # targets.
+ try:
+ xccl = CreateXCConfigurationList(configurations)
+ self.project.SetProperty('buildConfigurationList', xccl)
+ except:
+ sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path)
+ raise
+
+ # The need for this setting is explained above where _intermediate_var is
+ # defined. The comments below about wanting to avoid project-wide build
+ # settings apply here too, but this needs to be set on a project-wide basis
+ # so that files relative to the _intermediate_var setting can be displayed
+ # properly in the Xcode UI.
+ #
+ # Note that for configuration-relative files such as anything relative to
+ # _intermediate_var, for the purposes of UI tree view display, Xcode will
+ # only resolve the configuration name once, when the project file is
+ # opened. If the active build configuration is changed, the project file
+ # must be closed and reopened if it is desired for the tree view to update.
+ # This is filed as Apple radar 6588391.
+ xccl.SetBuildSetting(_intermediate_var,
+ '$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)')
+ xccl.SetBuildSetting(_shared_intermediate_var,
+ '$(SYMROOT)/DerivedSources/$(CONFIGURATION)')
+
+ # Set user-specified project-wide build settings and config files. This
+ # is intended to be used very sparingly. Really, almost everything should
+ # go into target-specific build settings sections. The project-wide
+ # settings are only intended to be used in cases where Xcode attempts to
+ # resolve variable references in a project context as opposed to a target
+ # context, such as when resolving sourceTree references while building up
+ # the tree tree view for UI display.
+ # Any values set globally are applied to all configurations, then any
+ # per-configuration values are applied.
+ for xck, xcv in self.build_file_dict.get('xcode_settings', {}).items():
+ xccl.SetBuildSetting(xck, xcv)
+ if 'xcode_config_file' in self.build_file_dict:
+ config_ref = self.project.AddOrGetFileInRootGroup(
+ self.build_file_dict['xcode_config_file'])
+ xccl.SetBaseConfiguration(config_ref)
+ build_file_configurations = self.build_file_dict.get('configurations', {})
+ if build_file_configurations:
+ for config_name in configurations:
+ build_file_configuration_named = \
+ build_file_configurations.get(config_name, {})
+ if build_file_configuration_named:
+ xcc = xccl.ConfigurationNamed(config_name)
+ for xck, xcv in build_file_configuration_named.get('xcode_settings',
+ {}).items():
+ xcc.SetBuildSetting(xck, xcv)
+ if 'xcode_config_file' in build_file_configuration_named:
+ config_ref = self.project.AddOrGetFileInRootGroup(
+ build_file_configurations[config_name]['xcode_config_file'])
+ xcc.SetBaseConfiguration(config_ref)
+
+ # Sort the targets based on how they appeared in the input.
+ # TODO(mark): Like a lot of other things here, this assumes internal
+ # knowledge of PBXProject - in this case, of its "targets" property.
+
+ # ordinary_targets are ordinary targets that are already in the project
+ # file. run_test_targets are the targets that run unittests and should be
+ # used for the Run All Tests target. support_targets are the action/rule
+ # targets used by GYP file targets, just kept for the assert check.
+ ordinary_targets = []
+ run_test_targets = []
+ support_targets = []
+
+ # targets is full list of targets in the project.
+ targets = []
+
+ # does the it define it's own "all"?
+ has_custom_all = False
+
+ # targets_for_all is the list of ordinary_targets that should be listed
+ # in this project's "All" target. It includes each non_runtest_target
+ # that does not have suppress_wildcard set.
+ targets_for_all = []
+
+ for target in self.build_file_dict['targets']:
+ target_name = target['target_name']
+ toolset = target['toolset']
+ qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name,
+ toolset)
+ xcode_target = xcode_targets[qualified_target]
+ # Make sure that the target being added to the sorted list is already in
+ # the unsorted list.
+ assert xcode_target in self.project._properties['targets']
+ targets.append(xcode_target)
+ ordinary_targets.append(xcode_target)
+ if xcode_target.support_target:
+ support_targets.append(xcode_target.support_target)
+ targets.append(xcode_target.support_target)
+
+ if not int(target.get('suppress_wildcard', False)):
+ targets_for_all.append(xcode_target)
+
+ if target_name.lower() == 'all':
+ has_custom_all = True;
+
+ # If this target has a 'run_as' attribute, add its target to the
+ # targets, and add it to the test targets.
+ if target.get('run_as'):
+ # Make a target to run something. It should have one
+ # dependency, the parent xcode target.
+ xccl = CreateXCConfigurationList(configurations)
+ run_target = gyp.xcodeproj_file.PBXAggregateTarget({
+ 'name': 'Run ' + target_name,
+ 'productName': xcode_target.GetProperty('productName'),
+ 'buildConfigurationList': xccl,
+ },
+ parent=self.project)
+ run_target.AddDependency(xcode_target)
+
+ command = target['run_as']
+ script = ''
+ if command.get('working_directory'):
+ script = script + 'cd "%s"\n' % \
+ gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
+ command.get('working_directory'))
+
+ if command.get('environment'):
+ script = script + "\n".join(
+ ['export %s="%s"' %
+ (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val))
+ for (key, val) in command.get('environment').items()]) + "\n"
+
+ # Some test end up using sockets, files on disk, etc. and can get
+ # confused if more then one test runs at a time. The generator
+ # flag 'xcode_serialize_all_test_runs' controls the forcing of all
+ # tests serially. It defaults to True. To get serial runs this
+ # little bit of python does the same as the linux flock utility to
+ # make sure only one runs at a time.
+ command_prefix = ''
+ if serialize_all_tests:
+ command_prefix = \
+"""python -c "import fcntl, subprocess, sys
+file = open('$TMPDIR/GYP_serialize_test_runs', 'a')
+fcntl.flock(file.fileno(), fcntl.LOCK_EX)
+sys.exit(subprocess.call(sys.argv[1:]))" """
+
+ # If we were unable to exec for some reason, we want to exit
+ # with an error, and fixup variable references to be shell
+ # syntax instead of xcode syntax.
+ script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \
+ gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
+ gyp.common.EncodePOSIXShellList(command.get('action')))
+
+ ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
+ 'shellScript': script,
+ 'showEnvVarsInLog': 0,
+ })
+ run_target.AppendProperty('buildPhases', ssbp)
+
+ # Add the run target to the project file.
+ targets.append(run_target)
+ run_test_targets.append(run_target)
+ xcode_target.test_runner = run_target
+
+
+ # Make sure that the list of targets being replaced is the same length as
+ # the one replacing it, but allow for the added test runner targets.
+ assert len(self.project._properties['targets']) == \
+ len(ordinary_targets) + len(support_targets)
+
+ self.project._properties['targets'] = targets
+
+ # Get rid of unnecessary levels of depth in groups like the Source group.
+ self.project.RootGroupsTakeOverOnlyChildren(True)
+
+ # Sort the groups nicely. Do this after sorting the targets, because the
+ # Products group is sorted based on the order of the targets.
+ self.project.SortGroups()
+
+ # Create an "All" target if there's more than one target in this project
+ # file and the project didn't define its own "All" target. Put a generated
+ # "All" target first so that people opening up the project for the first
+ # time will build everything by default.
+ if len(targets_for_all) > 1 and not has_custom_all:
+ xccl = CreateXCConfigurationList(configurations)
+ all_target = gyp.xcodeproj_file.PBXAggregateTarget(
+ {
+ 'buildConfigurationList': xccl,
+ 'name': 'All',
+ },
+ parent=self.project)
+
+ for target in targets_for_all:
+ all_target.AddDependency(target)
+
+ # TODO(mark): This is evil because it relies on internal knowledge of
+ # PBXProject._properties. It's important to get the "All" target first,
+ # though.
+ self.project._properties['targets'].insert(0, all_target)
+
+ # The same, but for run_test_targets.
+ if len(run_test_targets) > 1:
+ xccl = CreateXCConfigurationList(configurations)
+ run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget(
+ {
+ 'buildConfigurationList': xccl,
+ 'name': 'Run All Tests',
+ },
+ parent=self.project)
+ for run_test_target in run_test_targets:
+ run_all_tests_target.AddDependency(run_test_target)
+
+ # Insert after the "All" target, which must exist if there is more than
+ # one run_test_target.
+ self.project._properties['targets'].insert(1, run_all_tests_target)
+
+ def Finalize2(self, xcode_targets, xcode_target_to_target_dict):
+ # Finalize2 needs to happen in a separate step because the process of
+ # updating references to other projects depends on the ordering of targets
+ # within remote project files. Finalize1 is responsible for sorting duty,
+ # and once all project files are sorted, Finalize2 can come in and update
+ # these references.
+
+ # To support making a "test runner" target that will run all the tests
+ # that are direct dependents of any given target, we look for
+ # xcode_create_dependents_test_runner being set on an Aggregate target,
+ # and generate a second target that will run the tests runners found under
+ # the marked target.
+ for bf_tgt in self.build_file_dict['targets']:
+ if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)):
+ tgt_name = bf_tgt['target_name']
+ toolset = bf_tgt['toolset']
+ qualified_target = gyp.common.QualifiedTarget(self.gyp_path,
+ tgt_name, toolset)
+ xcode_target = xcode_targets[qualified_target]
+ if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget):
+ # Collect all the run test targets.
+ all_run_tests = []
+ pbxtds = xcode_target.GetProperty('dependencies')
+ for pbxtd in pbxtds:
+ pbxcip = pbxtd.GetProperty('targetProxy')
+ dependency_xct = pbxcip.GetProperty('remoteGlobalIDString')
+ if hasattr(dependency_xct, 'test_runner'):
+ all_run_tests.append(dependency_xct.test_runner)
+
+ # Directly depend on all the runners as they depend on the target
+ # that builds them.
+ if len(all_run_tests) > 0:
+ run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({
+ 'name': 'Run %s Tests' % tgt_name,
+ 'productName': tgt_name,
+ },
+ parent=self.project)
+ for run_test_target in all_run_tests:
+ run_all_target.AddDependency(run_test_target)
+
+ # Insert the test runner after the related target.
+ idx = self.project._properties['targets'].index(xcode_target)
+ self.project._properties['targets'].insert(idx + 1, run_all_target)
+
+ # Update all references to other projects, to make sure that the lists of
+ # remote products are complete. Otherwise, Xcode will fill them in when
+ # it opens the project file, which will result in unnecessary diffs.
+ # TODO(mark): This is evil because it relies on internal knowledge of
+ # PBXProject._other_pbxprojects.
+ for other_pbxproject in self.project._other_pbxprojects.keys():
+ self.project.AddOrGetProjectReference(other_pbxproject)
+
+ self.project.SortRemoteProductReferences()
+
+ # Give everything an ID.
+ self.project_file.ComputeIDs()
+
+ # Make sure that no two objects in the project file have the same ID. If
+ # multiple objects wind up with the same ID, upon loading the file, Xcode
+ # will only recognize one object (the last one in the file?) and the
+ # results are unpredictable.
+ self.project_file.EnsureNoIDCollisions()
+
+ def Write(self):
+ # Write the project file to a temporary location first. Xcode watches for
+ # changes to the project file and presents a UI sheet offering to reload
+ # the project when it does change. However, in some cases, especially when
+ # multiple projects are open or when Xcode is busy, things don't work so
+ # seamlessly. Sometimes, Xcode is able to detect that a project file has
+ # changed but can't unload it because something else is referencing it.
+ # To mitigate this problem, and to avoid even having Xcode present the UI
+ # sheet when an open project is rewritten for inconsequential changes, the
+ # project file is written to a temporary file in the xcodeproj directory
+ # first. The new temporary file is then compared to the existing project
+ # file, if any. If they differ, the new file replaces the old; otherwise,
+ # the new project file is simply deleted. Xcode properly detects a file
+ # being renamed over an open project file as a change and so it remains
+ # able to present the "project file changed" sheet under this system.
+ # Writing to a temporary file first also avoids the possible problem of
+ # Xcode rereading an incomplete project file.
+ (output_fd, new_pbxproj_path) = \
+ tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.',
+ dir=self.path)
+
+ try:
+ output_file = os.fdopen(output_fd, 'w')
+
+ self.project_file.Print(output_file)
+ output_file.close()
+
+ pbxproj_path = os.path.join(self.path, 'project.pbxproj')
+
+ same = False
+ try:
+ same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ if same:
+ # The new file is identical to the old one, just get rid of the new
+ # one.
+ os.unlink(new_pbxproj_path)
+ else:
+ # The new file is different from the old one, or there is no old one.
+ # Rename the new file to the permanent name.
+ #
+ # tempfile.mkstemp uses an overly restrictive mode, resulting in a
+ # file that can only be read by the owner, regardless of the umask.
+ # There's no reason to not respect the umask here, which means that
+ # an extra hoop is required to fetch it and reset the new file's mode.
+ #
+ # No way to get the umask without setting a new one? Set a safe one
+ # and then set it back to the old value.
+ umask = os.umask(0o77)
+ os.umask(umask)
+
+ os.chmod(new_pbxproj_path, 0o666 & ~umask)
+ os.rename(new_pbxproj_path, pbxproj_path)
+
+ except Exception:
+ # Don't leave turds behind. In fact, if this code was responsible for
+ # creating the xcodeproj directory, get rid of that too.
+ os.unlink(new_pbxproj_path)
+ if self.created_dir:
+ shutil.rmtree(self.path, True)
+ raise
+
+
+def AddSourceToTarget(source, type, pbxp, xct):
+ # TODO(mark): Perhaps source_extensions and library_extensions can be made a
+ # little bit fancier.
+ source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's', 'swift']
+
+ # .o is conceptually more of a "source" than a "library," but Xcode thinks
+ # of "sources" as things to compile and "libraries" (or "frameworks") as
+ # things to link with. Adding an object file to an Xcode target's frameworks
+ # phase works properly.
+ library_extensions = ['a', 'dylib', 'framework', 'o']
+
+ basename = posixpath.basename(source)
+ (root, ext) = posixpath.splitext(basename)
+ if ext:
+ ext = ext[1:].lower()
+
+ if ext in source_extensions and type != 'none':
+ xct.SourcesPhase().AddFile(source)
+ elif ext in library_extensions and type != 'none':
+ xct.FrameworksPhase().AddFile(source)
+ else:
+ # Files that aren't added to a sources or frameworks build phase can still
+ # go into the project file, just not as part of a build phase.
+ pbxp.AddOrGetFileInRootGroup(source)
+
+
+def AddResourceToTarget(resource, pbxp, xct):
+ # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
+ # where it's used.
+ xct.ResourcesPhase().AddFile(resource)
+
+
+def AddHeaderToTarget(header, pbxp, xct, is_public):
+ # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
+ # where it's used.
+ settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public]
+ xct.HeadersPhase().AddFile(header, settings)
+
+
+_xcode_variable_re = re.compile(r'(\$\((.*?)\))')
+def ExpandXcodeVariables(string, expansions):
+ """Expands Xcode-style $(VARIABLES) in string per the expansions dict.
+
+ In some rare cases, it is appropriate to expand Xcode variables when a
+ project file is generated. For any substring $(VAR) in string, if VAR is a
+ key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
+ Any $(VAR) substring in string for which VAR is not a key in the expansions
+ dict will remain in the returned string.
+ """
+
+ matches = _xcode_variable_re.findall(string)
+ if matches == None:
+ return string
+
+ matches.reverse()
+ for match in matches:
+ (to_replace, variable) = match
+ if not variable in expansions:
+ continue
+
+ replacement = expansions[variable]
+ string = re.sub(re.escape(to_replace), replacement, string)
+
+ return string
+
+
+_xcode_define_re = re.compile(r'([\\\"\' ])')
+def EscapeXcodeDefine(s):
+ """We must escape the defines that we give to XCode so that it knows not to
+ split on spaces and to respect backslash and quote literals. However, we
+ must not quote the define, or Xcode will incorrectly intepret variables
+ especially $(inherited)."""
+ return re.sub(_xcode_define_re, r'\\\1', s)
+
+
+def PerformBuild(data, configurations, params):
+ options = params['options']
+
+ for build_file, build_file_dict in data.items():
+ (build_file_root, build_file_ext) = os.path.splitext(build_file)
+ if build_file_ext != '.gyp':
+ continue
+ xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
+ if options.generator_output:
+ xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
+
+ for config in configurations:
+ arguments = ['xcodebuild', '-project', xcodeproj_path]
+ arguments += ['-configuration', config]
+ print("Building [%s]: %s" % (config, arguments))
+ subprocess.check_call(arguments)
+
+
+def CalculateGeneratorInputInfo(params):
+ toplevel = params['options'].toplevel_dir
+ if params.get('flavor') == 'ninja':
+ generator_dir = os.path.relpath(params['options'].generator_output or '.')
+ output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
+ output_dir = os.path.normpath(os.path.join(generator_dir, output_dir))
+ qualified_out_dir = os.path.normpath(os.path.join(
+ toplevel, output_dir, 'gypfiles-xcode-ninja'))
+ else:
+ output_dir = os.path.normpath(os.path.join(toplevel, 'xcodebuild'))
+ qualified_out_dir = os.path.normpath(os.path.join(
+ toplevel, output_dir, 'gypfiles'))
+
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ 'toplevel': toplevel,
+ 'qualified_out_dir': qualified_out_dir,
+ }
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ # Optionally configure each spec to use ninja as the external builder.
+ ninja_wrapper = params.get('flavor') == 'ninja'
+ if ninja_wrapper:
+ (target_list, target_dicts, data) = \
+ gyp.xcode_ninja.CreateWrapper(target_list, target_dicts, data, params)
+
+ options = params['options']
+ generator_flags = params.get('generator_flags', {})
+ parallel_builds = generator_flags.get('xcode_parallel_builds', True)
+ serialize_all_tests = \
+ generator_flags.get('xcode_serialize_all_test_runs', True)
+ upgrade_check_project_version = \
+ generator_flags.get('xcode_upgrade_check_project_version', None)
+
+ # Format upgrade_check_project_version with leading zeros as needed.
+ if upgrade_check_project_version:
+ upgrade_check_project_version = str(upgrade_check_project_version)
+ while len(upgrade_check_project_version) < 4:
+ upgrade_check_project_version = '0' + upgrade_check_project_version
+
+ skip_excluded_files = \
+ not generator_flags.get('xcode_list_excluded_files', True)
+ xcode_projects = {}
+ for build_file, build_file_dict in data.items():
+ (build_file_root, build_file_ext) = os.path.splitext(build_file)
+ if build_file_ext != '.gyp':
+ continue
+ xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
+ if options.generator_output:
+ xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
+ xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict)
+ xcode_projects[build_file] = xcp
+ pbxp = xcp.project
+
+ # Set project-level attributes from multiple options
+ project_attributes = {};
+ if parallel_builds:
+ project_attributes['BuildIndependentTargetsInParallel'] = 'YES'
+ if upgrade_check_project_version:
+ project_attributes['LastUpgradeCheck'] = upgrade_check_project_version
+ project_attributes['LastTestingUpgradeCheck'] = \
+ upgrade_check_project_version
+ project_attributes['LastSwiftUpdateCheck'] = \
+ upgrade_check_project_version
+ pbxp.SetProperty('attributes', project_attributes)
+
+ # Add gyp/gypi files to project
+ if not generator_flags.get('standalone'):
+ main_group = pbxp.GetProperty('mainGroup')
+ build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'})
+ main_group.AppendChild(build_group)
+ for included_file in build_file_dict['included_files']:
+ build_group.AddOrGetFileByPath(included_file, False)
+
+ xcode_targets = {}
+ xcode_target_to_target_dict = {}
+ for qualified_target in target_list:
+ [build_file, target_name, toolset] = \
+ gyp.common.ParseQualifiedTarget(qualified_target)
+
+ spec = target_dicts[qualified_target]
+ if spec['toolset'] != 'target':
+ raise Exception(
+ 'Multiple toolsets not supported in xcode build (target %s)' %
+ qualified_target)
+ configuration_names = [spec['default_configuration']]
+ for configuration_name in sorted(spec['configurations'].keys()):
+ if configuration_name not in configuration_names:
+ configuration_names.append(configuration_name)
+ xcp = xcode_projects[build_file]
+ pbxp = xcp.project
+
+ # Set up the configurations for the target according to the list of names
+ # supplied.
+ xccl = CreateXCConfigurationList(configuration_names)
+
+ # Create an XCTarget subclass object for the target. The type with
+ # "+bundle" appended will be used if the target has "mac_bundle" set.
+ # loadable_modules not in a mac_bundle are mapped to
+ # com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets
+ # to create a single-file mh_bundle.
+ _types = {
+ 'executable': 'com.apple.product-type.tool',
+ 'loadable_module': 'com.googlecode.gyp.xcode.bundle',
+ 'shared_library': 'com.apple.product-type.library.dynamic',
+ 'static_library': 'com.apple.product-type.library.static',
+ 'mac_kernel_extension': 'com.apple.product-type.kernel-extension',
+ 'executable+bundle': 'com.apple.product-type.application',
+ 'loadable_module+bundle': 'com.apple.product-type.bundle',
+ 'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
+ 'loadable_module+xcuitest': 'com.apple.product-type.bundle.ui-testing',
+ 'shared_library+bundle': 'com.apple.product-type.framework',
+ 'executable+extension+bundle': 'com.apple.product-type.app-extension',
+ 'executable+watch+extension+bundle':
+ 'com.apple.product-type.watchkit-extension',
+ 'executable+watch+bundle':
+ 'com.apple.product-type.application.watchapp',
+ 'mac_kernel_extension+bundle': 'com.apple.product-type.kernel-extension',
+ }
+
+ target_properties = {
+ 'buildConfigurationList': xccl,
+ 'name': target_name,
+ }
+
+ type = spec['type']
+ is_xctest = int(spec.get('mac_xctest_bundle', 0))
+ is_xcuitest = int(spec.get('mac_xcuitest_bundle', 0))
+ is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest or is_xcuitest
+ is_app_extension = int(spec.get('ios_app_extension', 0))
+ is_watchkit_extension = int(spec.get('ios_watchkit_extension', 0))
+ is_watch_app = int(spec.get('ios_watch_app', 0))
+ if type != 'none':
+ type_bundle_key = type
+ if is_xcuitest:
+ type_bundle_key += '+xcuitest'
+ assert type == 'loadable_module', (
+ 'mac_xcuitest_bundle targets must have type loadable_module '
+ '(target %s)' % target_name)
+ elif is_xctest:
+ type_bundle_key += '+xctest'
+ assert type == 'loadable_module', (
+ 'mac_xctest_bundle targets must have type loadable_module '
+ '(target %s)' % target_name)
+ elif is_app_extension:
+ assert is_bundle, ('ios_app_extension flag requires mac_bundle '
+ '(target %s)' % target_name)
+ type_bundle_key += '+extension+bundle'
+ elif is_watchkit_extension:
+ assert is_bundle, ('ios_watchkit_extension flag requires mac_bundle '
+ '(target %s)' % target_name)
+ type_bundle_key += '+watch+extension+bundle'
+ elif is_watch_app:
+ assert is_bundle, ('ios_watch_app flag requires mac_bundle '
+ '(target %s)' % target_name)
+ type_bundle_key += '+watch+bundle'
+ elif is_bundle:
+ type_bundle_key += '+bundle'
+
+ xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
+ try:
+ target_properties['productType'] = _types[type_bundle_key]
+ except KeyError as e:
+ gyp.common.ExceptionAppend(e, "-- unknown product type while "
+ "writing target %s" % target_name)
+ raise
+ else:
+ xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget
+ assert not is_bundle, (
+ 'mac_bundle targets cannot have type none (target "%s")' %
+ target_name)
+ assert not is_xcuitest, (
+ 'mac_xcuitest_bundle targets cannot have type none (target "%s")' %
+ target_name)
+ assert not is_xctest, (
+ 'mac_xctest_bundle targets cannot have type none (target "%s")' %
+ target_name)
+
+ target_product_name = spec.get('product_name')
+ if target_product_name is not None:
+ target_properties['productName'] = target_product_name
+
+ xct = xctarget_type(target_properties, parent=pbxp,
+ force_outdir=spec.get('product_dir'),
+ force_prefix=spec.get('product_prefix'),
+ force_extension=spec.get('product_extension'))
+ pbxp.AppendProperty('targets', xct)
+ xcode_targets[qualified_target] = xct
+ xcode_target_to_target_dict[xct] = spec
+
+ spec_actions = spec.get('actions', [])
+ spec_rules = spec.get('rules', [])
+
+ # Xcode has some "issues" with checking dependencies for the "Compile
+ # sources" step with any source files/headers generated by actions/rules.
+ # To work around this, if a target is building anything directly (not
+ # type "none"), then a second target is used to run the GYP actions/rules
+ # and is made a dependency of this target. This way the work is done
+ # before the dependency checks for what should be recompiled.
+ support_xct = None
+ # The Xcode "issues" don't affect xcode-ninja builds, since the dependency
+ # logic all happens in ninja. Don't bother creating the extra targets in
+ # that case.
+ if type != 'none' and (spec_actions or spec_rules) and not ninja_wrapper:
+ support_xccl = CreateXCConfigurationList(configuration_names);
+ support_target_suffix = generator_flags.get(
+ 'support_target_suffix', ' Support')
+ support_target_properties = {
+ 'buildConfigurationList': support_xccl,
+ 'name': target_name + support_target_suffix,
+ }
+ if target_product_name:
+ support_target_properties['productName'] = \
+ target_product_name + ' Support'
+ support_xct = \
+ gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties,
+ parent=pbxp)
+ pbxp.AppendProperty('targets', support_xct)
+ xct.AddDependency(support_xct)
+ # Hang the support target off the main target so it can be tested/found
+ # by the generator during Finalize.
+ xct.support_target = support_xct
+
+ prebuild_index = 0
+
+ # Add custom shell script phases for "actions" sections.
+ for action in spec_actions:
+ # There's no need to write anything into the script to ensure that the
+ # output directories already exist, because Xcode will look at the
+ # declared outputs and automatically ensure that they exist for us.
+
+ # Do we have a message to print when this action runs?
+ message = action.get('message')
+ if message:
+ message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message)
+ else:
+ message = ''
+
+ # Turn the list into a string that can be passed to a shell.
+ action_string = gyp.common.EncodePOSIXShellList(action['action'])
+
+ # Convert Xcode-type variable references to sh-compatible environment
+ # variable references.
+ message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message)
+ action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
+ action_string)
+
+ script = ''
+ # Include the optional message
+ if message_sh:
+ script += message_sh + '\n'
+ # Be sure the script runs in exec, and that if exec fails, the script
+ # exits signalling an error.
+ script += 'exec ' + action_string_sh + '\nexit 1\n'
+ ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
+ 'inputPaths': action['inputs'],
+ 'name': 'Action "' + action['action_name'] + '"',
+ 'outputPaths': action['outputs'],
+ 'shellScript': script,
+ 'showEnvVarsInLog': 0,
+ })
+
+ if support_xct:
+ support_xct.AppendProperty('buildPhases', ssbp)
+ else:
+ # TODO(mark): this assumes too much knowledge of the internals of
+ # xcodeproj_file; some of these smarts should move into xcodeproj_file
+ # itself.
+ xct._properties['buildPhases'].insert(prebuild_index, ssbp)
+ prebuild_index = prebuild_index + 1
+
+ # TODO(mark): Should verify that at most one of these is specified.
+ if int(action.get('process_outputs_as_sources', False)):
+ for output in action['outputs']:
+ AddSourceToTarget(output, type, pbxp, xct)
+
+ if int(action.get('process_outputs_as_mac_bundle_resources', False)):
+ for output in action['outputs']:
+ AddResourceToTarget(output, pbxp, xct)
+
+ # tgt_mac_bundle_resources holds the list of bundle resources so
+ # the rule processing can check against it.
+ if is_bundle:
+ tgt_mac_bundle_resources = spec.get('mac_bundle_resources', [])
+ else:
+ tgt_mac_bundle_resources = []
+
+ # Add custom shell script phases driving "make" for "rules" sections.
+ #
+ # Xcode's built-in rule support is almost powerful enough to use directly,
+ # but there are a few significant deficiencies that render them unusable.
+ # There are workarounds for some of its inadequacies, but in aggregate,
+ # the workarounds added complexity to the generator, and some workarounds
+ # actually require input files to be crafted more carefully than I'd like.
+ # Consequently, until Xcode rules are made more capable, "rules" input
+ # sections will be handled in Xcode output by shell script build phases
+ # performed prior to the compilation phase.
+ #
+ # The following problems with Xcode rules were found. The numbers are
+ # Apple radar IDs. I hope that these shortcomings are addressed, I really
+ # liked having the rules handled directly in Xcode during the period that
+ # I was prototyping this.
+ #
+ # 6588600 Xcode compiles custom script rule outputs too soon, compilation
+ # fails. This occurs when rule outputs from distinct inputs are
+ # interdependent. The only workaround is to put rules and their
+ # inputs in a separate target from the one that compiles the rule
+ # outputs. This requires input file cooperation and it means that
+ # process_outputs_as_sources is unusable.
+ # 6584932 Need to declare that custom rule outputs should be excluded from
+ # compilation. A possible workaround is to lie to Xcode about a
+ # rule's output, giving it a dummy file it doesn't know how to
+ # compile. The rule action script would need to touch the dummy.
+ # 6584839 I need a way to declare additional inputs to a custom rule.
+ # A possible workaround is a shell script phase prior to
+ # compilation that touches a rule's primary input files if any
+ # would-be additional inputs are newer than the output. Modifying
+ # the source tree - even just modification times - feels dirty.
+ # 6564240 Xcode "custom script" build rules always dump all environment
+ # variables. This is a low-prioroty problem and is not a
+ # show-stopper.
+ rules_by_ext = {}
+ for rule in spec_rules:
+ rules_by_ext[rule['extension']] = rule
+
+ # First, some definitions:
+ #
+ # A "rule source" is a file that was listed in a target's "sources"
+ # list and will have a rule applied to it on the basis of matching the
+ # rule's "extensions" attribute. Rule sources are direct inputs to
+ # rules.
+ #
+ # Rule definitions may specify additional inputs in their "inputs"
+ # attribute. These additional inputs are used for dependency tracking
+ # purposes.
+ #
+ # A "concrete output" is a rule output with input-dependent variables
+ # resolved. For example, given a rule with:
+ # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'],
+ # if the target's "sources" list contained "one.ext" and "two.ext",
+ # the "concrete output" for rule input "two.ext" would be "two.cc". If
+ # a rule specifies multiple outputs, each input file that the rule is
+ # applied to will have the same number of concrete outputs.
+ #
+ # If any concrete outputs are outdated or missing relative to their
+ # corresponding rule_source or to any specified additional input, the
+ # rule action must be performed to generate the concrete outputs.
+
+ # concrete_outputs_by_rule_source will have an item at the same index
+ # as the rule['rule_sources'] that it corresponds to. Each item is a
+ # list of all of the concrete outputs for the rule_source.
+ concrete_outputs_by_rule_source = []
+
+ # concrete_outputs_all is a flat list of all concrete outputs that this
+ # rule is able to produce, given the known set of input files
+ # (rule_sources) that apply to it.
+ concrete_outputs_all = []
+
+ # messages & actions are keyed by the same indices as rule['rule_sources']
+ # and concrete_outputs_by_rule_source. They contain the message and
+ # action to perform after resolving input-dependent variables. The
+ # message is optional, in which case None is stored for each rule source.
+ messages = []
+ actions = []
+
+ for rule_source in rule.get('rule_sources', []):
+ rule_source_dirname, rule_source_basename = \
+ posixpath.split(rule_source)
+ (rule_source_root, rule_source_ext) = \
+ posixpath.splitext(rule_source_basename)
+
+ # These are the same variable names that Xcode uses for its own native
+ # rule support. Because Xcode's rule engine is not being used, they
+ # need to be expanded as they are written to the makefile.
+ rule_input_dict = {
+ 'INPUT_FILE_BASE': rule_source_root,
+ 'INPUT_FILE_SUFFIX': rule_source_ext,
+ 'INPUT_FILE_NAME': rule_source_basename,
+ 'INPUT_FILE_PATH': rule_source,
+ 'INPUT_FILE_DIRNAME': rule_source_dirname,
+ }
+
+ concrete_outputs_for_this_rule_source = []
+ for output in rule.get('outputs', []):
+ # Fortunately, Xcode and make both use $(VAR) format for their
+ # variables, so the expansion is the only transformation necessary.
+ # Any remaning $(VAR)-type variables in the string can be given
+ # directly to make, which will pick up the correct settings from
+ # what Xcode puts into the environment.
+ concrete_output = ExpandXcodeVariables(output, rule_input_dict)
+ concrete_outputs_for_this_rule_source.append(concrete_output)
+
+ # Add all concrete outputs to the project.
+ pbxp.AddOrGetFileInRootGroup(concrete_output)
+
+ concrete_outputs_by_rule_source.append( \
+ concrete_outputs_for_this_rule_source)
+ concrete_outputs_all.extend(concrete_outputs_for_this_rule_source)
+
+ # TODO(mark): Should verify that at most one of these is specified.
+ if int(rule.get('process_outputs_as_sources', False)):
+ for output in concrete_outputs_for_this_rule_source:
+ AddSourceToTarget(output, type, pbxp, xct)
+
+ # If the file came from the mac_bundle_resources list or if the rule
+ # is marked to process outputs as bundle resource, do so.
+ was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources
+ if was_mac_bundle_resource or \
+ int(rule.get('process_outputs_as_mac_bundle_resources', False)):
+ for output in concrete_outputs_for_this_rule_source:
+ AddResourceToTarget(output, pbxp, xct)
+
+ # Do we have a message to print when this rule runs?
+ message = rule.get('message')
+ if message:
+ message = gyp.common.EncodePOSIXShellArgument(message)
+ message = ExpandXcodeVariables(message, rule_input_dict)
+ messages.append(message)
+
+ # Turn the list into a string that can be passed to a shell.
+ action_string = gyp.common.EncodePOSIXShellList(rule['action'])
+
+ action = ExpandXcodeVariables(action_string, rule_input_dict)
+ actions.append(action)
+
+ if len(concrete_outputs_all) > 0:
+ # TODO(mark): There's a possibilty for collision here. Consider
+ # target "t" rule "A_r" and target "t_A" rule "r".
+ makefile_name = '%s.make' % re.sub(
+ '[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name']))
+ makefile_path = os.path.join(xcode_projects[build_file].path,
+ makefile_name)
+ # TODO(mark): try/close? Write to a temporary file and swap it only
+ # if it's got changes?
+ makefile = open(makefile_path, 'w')
+
+ # make will build the first target in the makefile by default. By
+ # convention, it's called "all". List all (or at least one)
+ # concrete output for each rule source as a prerequisite of the "all"
+ # target.
+ makefile.write('all: \\\n')
+ for concrete_output_index, concrete_output_by_rule_source in \
+ enumerate(concrete_outputs_by_rule_source):
+ # Only list the first (index [0]) concrete output of each input
+ # in the "all" target. Otherwise, a parallel make (-j > 1) would
+ # attempt to process each input multiple times simultaneously.
+ # Otherwise, "all" could just contain the entire list of
+ # concrete_outputs_all.
+ concrete_output = concrete_output_by_rule_source[0]
+ if concrete_output_index == len(concrete_outputs_by_rule_source) - 1:
+ eol = ''
+ else:
+ eol = ' \\'
+ makefile.write(' %s%s\n' % (concrete_output, eol))
+
+ for (rule_source, concrete_outputs, message, action) in \
+ zip(rule['rule_sources'], concrete_outputs_by_rule_source,
+ messages, actions):
+ makefile.write('\n')
+
+ # Add a rule that declares it can build each concrete output of a
+ # rule source. Collect the names of the directories that are
+ # required.
+ concrete_output_dirs = []
+ for concrete_output_index, concrete_output in \
+ enumerate(concrete_outputs):
+ if concrete_output_index == 0:
+ bol = ''
+ else:
+ bol = ' '
+ makefile.write('%s%s \\\n' % (bol, concrete_output))
+
+ concrete_output_dir = posixpath.dirname(concrete_output)
+ if (concrete_output_dir and
+ concrete_output_dir not in concrete_output_dirs):
+ concrete_output_dirs.append(concrete_output_dir)
+
+ makefile.write(' : \\\n')
+
+ # The prerequisites for this rule are the rule source itself and
+ # the set of additional rule inputs, if any.
+ prerequisites = [rule_source]
+ prerequisites.extend(rule.get('inputs', []))
+ for prerequisite_index, prerequisite in enumerate(prerequisites):
+ if prerequisite_index == len(prerequisites) - 1:
+ eol = ''
+ else:
+ eol = ' \\'
+ makefile.write(' %s%s\n' % (prerequisite, eol))
+
+ # Make sure that output directories exist before executing the rule
+ # action.
+ if len(concrete_output_dirs) > 0:
+ makefile.write('\t@mkdir -p "%s"\n' %
+ '" "'.join(concrete_output_dirs))
+
+ # The rule message and action have already had the necessary variable
+ # substitutions performed.
+ if message:
+ # Mark it with note: so Xcode picks it up in build output.
+ makefile.write('\t@echo note: %s\n' % message)
+ makefile.write('\t%s\n' % action)
+
+ makefile.close()
+
+ # It might be nice to ensure that needed output directories exist
+ # here rather than in each target in the Makefile, but that wouldn't
+ # work if there ever was a concrete output that had an input-dependent
+ # variable anywhere other than in the leaf position.
+
+ # To help speed things up, pass -j COUNT to make so it does some work
+ # in parallel. Don't use ncpus because Xcode will build ncpus targets
+ # in parallel and if each target happens to have a rules step, there
+ # would be ncpus^2 things going. With a machine that has 2 quad-core
+ # Xeons, a build can quickly run out of processes based on
+ # scheduling/other tasks, and randomly failing builds are no good.
+ script = \
+"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)"
+if [ "${JOB_COUNT}" -gt 4 ]; then
+ JOB_COUNT=4
+fi
+exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
+exit 1
+""" % makefile_name
+ ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
+ 'inputPaths': rule['rule_sources'],
+ 'name': 'Rule "' + rule['rule_name'] + '"',
+ 'outputPaths': concrete_outputs_all,
+ 'shellScript': script,
+ 'showEnvVarsInLog': 0,
+ })
+
+ if support_xct:
+ support_xct.AppendProperty('buildPhases', ssbp)
+ else:
+ # TODO(mark): this assumes too much knowledge of the internals of
+ # xcodeproj_file; some of these smarts should move into xcodeproj_file
+ # itself.
+ xct._properties['buildPhases'].insert(prebuild_index, ssbp)
+ prebuild_index = prebuild_index + 1
+
+ # Extra rule inputs also go into the project file. Concrete outputs were
+ # already added when they were computed.
+ groups = ['inputs', 'inputs_excluded']
+ if skip_excluded_files:
+ groups = [x for x in groups if not x.endswith('_excluded')]
+ for group in groups:
+ for item in rule.get(group, []):
+ pbxp.AddOrGetFileInRootGroup(item)
+
+ # Add "sources".
+ for source in spec.get('sources', []):
+ (source_root, source_extension) = posixpath.splitext(source)
+ if source_extension[1:] not in rules_by_ext:
+ # AddSourceToTarget will add the file to a root group if it's not
+ # already there.
+ AddSourceToTarget(source, type, pbxp, xct)
+ else:
+ pbxp.AddOrGetFileInRootGroup(source)
+
+ # Add "mac_bundle_resources" and "mac_framework_private_headers" if
+ # it's a bundle of any type.
+ if is_bundle:
+ for resource in tgt_mac_bundle_resources:
+ (resource_root, resource_extension) = posixpath.splitext(resource)
+ if resource_extension[1:] not in rules_by_ext:
+ AddResourceToTarget(resource, pbxp, xct)
+ else:
+ pbxp.AddOrGetFileInRootGroup(resource)
+
+ for header in spec.get('mac_framework_private_headers', []):
+ AddHeaderToTarget(header, pbxp, xct, False)
+
+ # Add "mac_framework_headers". These can be valid for both frameworks
+ # and static libraries.
+ if is_bundle or type == 'static_library':
+ for header in spec.get('mac_framework_headers', []):
+ AddHeaderToTarget(header, pbxp, xct, True)
+
+ # Add "copies".
+ pbxcp_dict = {}
+ for copy_group in spec.get('copies', []):
+ dest = copy_group['destination']
+ if dest[0] not in ('/', '$'):
+ # Relative paths are relative to $(SRCROOT).
+ dest = '$(SRCROOT)/' + dest
+
+ code_sign = int(copy_group.get('xcode_code_sign', 0))
+ settings = (None, '{ATTRIBUTES = (CodeSignOnCopy, ); }')[code_sign];
+
+ # Coalesce multiple "copies" sections in the same target with the same
+ # "destination" property into the same PBXCopyFilesBuildPhase, otherwise
+ # they'll wind up with ID collisions.
+ pbxcp = pbxcp_dict.get(dest, None)
+ if pbxcp is None:
+ pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({
+ 'name': 'Copy to ' + copy_group['destination']
+ },
+ parent=xct)
+ pbxcp.SetDestination(dest)
+
+ # TODO(mark): The usual comment about this knowing too much about
+ # gyp.xcodeproj_file internals applies.
+ xct._properties['buildPhases'].insert(prebuild_index, pbxcp)
+
+ pbxcp_dict[dest] = pbxcp
+
+ for file in copy_group['files']:
+ pbxcp.AddFile(file, settings)
+
+ # Excluded files can also go into the project file.
+ if not skip_excluded_files:
+ for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers',
+ 'mac_framework_private_headers']:
+ excluded_key = key + '_excluded'
+ for item in spec.get(excluded_key, []):
+ pbxp.AddOrGetFileInRootGroup(item)
+
+ # So can "inputs" and "outputs" sections of "actions" groups.
+ groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded']
+ if skip_excluded_files:
+ groups = [x for x in groups if not x.endswith('_excluded')]
+ for action in spec.get('actions', []):
+ for group in groups:
+ for item in action.get(group, []):
+ # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not
+ # sources.
+ if not item.startswith('$(BUILT_PRODUCTS_DIR)/'):
+ pbxp.AddOrGetFileInRootGroup(item)
+
+ for postbuild in spec.get('postbuilds', []):
+ action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action'])
+ script = 'exec ' + action_string_sh + '\nexit 1\n'
+
+ # Make the postbuild step depend on the output of ld or ar from this
+ # target. Apparently putting the script step after the link step isn't
+ # sufficient to ensure proper ordering in all cases. With an input
+ # declared but no outputs, the script step should run every time, as
+ # desired.
+ ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
+ 'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'],
+ 'name': 'Postbuild "' + postbuild['postbuild_name'] + '"',
+ 'shellScript': script,
+ 'showEnvVarsInLog': 0,
+ })
+ xct.AppendProperty('buildPhases', ssbp)
+
+ # Add dependencies before libraries, because adding a dependency may imply
+ # adding a library. It's preferable to keep dependencies listed first
+ # during a link phase so that they can override symbols that would
+ # otherwise be provided by libraries, which will usually include system
+ # libraries. On some systems, ld is finicky and even requires the
+ # libraries to be ordered in such a way that unresolved symbols in
+ # earlier-listed libraries may only be resolved by later-listed libraries.
+ # The Mac linker doesn't work that way, but other platforms do, and so
+ # their linker invocations need to be constructed in this way. There's
+ # no compelling reason for Xcode's linker invocations to differ.
+
+ if 'dependencies' in spec:
+ for dependency in spec['dependencies']:
+ xct.AddDependency(xcode_targets[dependency])
+ # The support project also gets the dependencies (in case they are
+ # needed for the actions/rules to work).
+ if support_xct:
+ support_xct.AddDependency(xcode_targets[dependency])
+
+ if 'libraries' in spec:
+ for library in spec['libraries']:
+ xct.FrameworksPhase().AddFile(library)
+ # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary.
+ # I wish Xcode handled this automatically.
+ library_dir = posixpath.dirname(library)
+ if library_dir not in xcode_standard_library_dirs and (
+ not xct.HasBuildSetting(_library_search_paths_var) or
+ library_dir not in xct.GetBuildSetting(_library_search_paths_var)):
+ xct.AppendBuildSetting(_library_search_paths_var, library_dir)
+
+ for configuration_name in configuration_names:
+ configuration = spec['configurations'][configuration_name]
+ xcbc = xct.ConfigurationNamed(configuration_name)
+ for include_dir in configuration.get('mac_framework_dirs', []):
+ xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir)
+ for include_dir in configuration.get('include_dirs', []):
+ xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir)
+ for library_dir in configuration.get('library_dirs', []):
+ if library_dir not in xcode_standard_library_dirs and (
+ not xcbc.HasBuildSetting(_library_search_paths_var) or
+ library_dir not in xcbc.GetBuildSetting(_library_search_paths_var)):
+ xcbc.AppendBuildSetting(_library_search_paths_var, library_dir)
+
+ if 'defines' in configuration:
+ for define in configuration['defines']:
+ set_define = EscapeXcodeDefine(define)
+ xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
+ if 'xcode_settings' in configuration:
+ for xck, xcv in configuration['xcode_settings'].items():
+ xcbc.SetBuildSetting(xck, xcv)
+ if 'xcode_config_file' in configuration:
+ config_ref = pbxp.AddOrGetFileInRootGroup(
+ configuration['xcode_config_file'])
+ xcbc.SetBaseConfiguration(config_ref)
+
+ build_files = []
+ for build_file, build_file_dict in data.items():
+ if build_file.endswith('.gyp'):
+ build_files.append(build_file)
+
+ for build_file in build_files:
+ xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests)
+
+ for build_file in build_files:
+ xcode_projects[build_file].Finalize2(xcode_targets,
+ xcode_target_to_target_dict)
+
+ for build_file in build_files:
+ xcode_projects[build_file].Write()
diff --git a/third_party/python/gyp/pylib/gyp/generator/xcode_test.py b/third_party/python/gyp/pylib/gyp/generator/xcode_test.py
new file mode 100644
index 0000000000..260324a43f
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/generator/xcode_test.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Unit tests for the xcode.py file. """
+
+import gyp.generator.xcode as xcode
+import unittest
+import sys
+
+
+class TestEscapeXcodeDefine(unittest.TestCase):
+ if sys.platform == 'darwin':
+ def test_InheritedRemainsUnescaped(self):
+ self.assertEqual(xcode.EscapeXcodeDefine('$(inherited)'), '$(inherited)')
+
+ def test_Escaping(self):
+ self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\')
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/python/gyp/pylib/gyp/input.py b/third_party/python/gyp/pylib/gyp/input.py
new file mode 100644
index 0000000000..2bea3341ad
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/input.py
@@ -0,0 +1,2908 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import ast
+import gyp.common
+import gyp.simple_copy
+import multiprocessing
+import optparse
+import os.path
+import re
+import shlex
+import signal
+import subprocess
+import sys
+import threading
+import time
+import traceback
+from gyp.common import GypError
+from gyp.common import OrderedSet
+
+
+# A list of types that are treated as linkable.
+linkable_types = [
+ 'executable',
+ 'shared_library',
+ 'loadable_module',
+ 'mac_kernel_extension',
+ 'windows_driver',
+]
+
+# A list of sections that contain links to other targets.
+dependency_sections = ['dependencies', 'export_dependent_settings']
+
+# base_path_sections is a list of sections defined by GYP that contain
+# pathnames. The generators can provide more keys, the two lists are merged
+# into path_sections, but you should call IsPathSection instead of using either
+# list directly.
+base_path_sections = [
+ 'destination',
+ 'files',
+ 'include_dirs',
+ 'inputs',
+ 'libraries',
+ 'outputs',
+ 'sources',
+]
+path_sections = set()
+
+# These per-process dictionaries are used to cache build file data when loading
+# in parallel mode.
+per_process_data = {}
+per_process_aux_data = {}
+
+try:
+ _str_types = (basestring,)
+# There's no basestring in python3.
+except NameError:
+ _str_types = (str,)
+
+try:
+ _int_types = (int, long)
+# There's no long in python3.
+except NameError:
+ _int_types = (int,)
+
+# Shortcuts as we use these combos a lot.
+_str_int_types = _str_types + _int_types
+_str_int_list_types = _str_int_types + (list,)
+
+
+def IsPathSection(section):
+ # If section ends in one of the '=+?!' characters, it's applied to a section
+ # without the trailing characters. '/' is notably absent from this list,
+ # because there's no way for a regular expression to be treated as a path.
+ while section and section[-1:] in '=+?!':
+ section = section[:-1]
+
+ if section in path_sections:
+ return True
+
+ # Sections mathing the regexp '_(dir|file|path)s?$' are also
+ # considered PathSections. Using manual string matching since that
+ # is much faster than the regexp and this can be called hundreds of
+ # thousands of times so micro performance matters.
+ if "_" in section:
+ tail = section[-6:]
+ if tail[-1] == 's':
+ tail = tail[:-1]
+ if tail[-5:] in ('_file', '_path'):
+ return True
+ return tail[-4:] == '_dir'
+
+ return False
+
+# base_non_configuration_keys is a list of key names that belong in the target
+# itself and should not be propagated into its configurations. It is merged
+# with a list that can come from the generator to
+# create non_configuration_keys.
+base_non_configuration_keys = [
+ # Sections that must exist inside targets and not configurations.
+ 'actions',
+ 'all_dependent_settings',
+ 'configurations',
+ 'copies',
+ 'default_configuration',
+ 'dependencies',
+ 'dependencies_original',
+ 'direct_dependent_settings',
+ 'libraries',
+ 'postbuilds',
+ 'product_dir',
+ 'product_extension',
+ 'product_name',
+ 'product_prefix',
+ 'rules',
+ 'run_as',
+ 'sources',
+ 'standalone_static_library',
+ 'suppress_wildcard',
+ 'target_name',
+ 'toolset',
+ 'toolsets',
+ 'type',
+
+ # Sections that can be found inside targets or configurations, but that
+ # should not be propagated from targets into their configurations.
+ 'variables',
+]
+non_configuration_keys = []
+
+# Keys that do not belong inside a configuration dictionary.
+invalid_configuration_keys = [
+ 'actions',
+ 'all_dependent_settings',
+ 'configurations',
+ 'dependencies',
+ 'direct_dependent_settings',
+ 'libraries',
+ 'link_settings',
+ 'sources',
+ 'standalone_static_library',
+ 'target_name',
+ 'type',
+]
+
+# Controls whether or not the generator supports multiple toolsets.
+multiple_toolsets = False
+
+# Paths for converting filelist paths to output paths: {
+# toplevel,
+# qualified_output_dir,
+# }
+generator_filelist_paths = None
+
+def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
+ """Return a list of all build files included into build_file_path.
+
+ The returned list will contain build_file_path as well as all other files
+ that it included, either directly or indirectly. Note that the list may
+ contain files that were included into a conditional section that evaluated
+ to false and was not merged into build_file_path's dict.
+
+ aux_data is a dict containing a key for each build file or included build
+ file. Those keys provide access to dicts whose "included" keys contain
+ lists of all other files included by the build file.
+
+ included should be left at its default None value by external callers. It
+ is used for recursion.
+
+ The returned list will not contain any duplicate entries. Each build file
+ in the list will be relative to the current directory.
+ """
+
+ if included == None:
+ included = []
+
+ if build_file_path in included:
+ return included
+
+ included.append(build_file_path)
+
+ for included_build_file in aux_data[build_file_path].get('included', []):
+ GetIncludedBuildFiles(included_build_file, aux_data, included)
+
+ return included
+
+
+def CheckedEval(file_contents):
+ """Return the eval of a gyp file.
+
+ The gyp file is restricted to dictionaries and lists only, and
+ repeated keys are not allowed.
+
+ Note that this is slower than eval() is.
+ """
+
+ syntax_tree = ast.parse(file_contents)
+ assert isinstance(syntax_tree, ast.Module)
+ c1 = syntax_tree.body
+ assert len(c1) == 1
+ c2 = c1[0]
+ assert isinstance(c2, ast.Expr)
+ return CheckNode(c2.value, [])
+
+
+def CheckNode(node, keypath):
+ if isinstance(node, ast.Dict):
+ dict = {}
+ for key, value in zip(node.keys, node.values):
+ assert isinstance(key, ast.Str)
+ key = key.s
+ if key in dict:
+ raise GypError("Key '" + key + "' repeated at level " +
+ repr(len(keypath) + 1) + " with key path '" +
+ '.'.join(keypath) + "'")
+ kp = list(keypath) # Make a copy of the list for descending this node.
+ kp.append(key)
+ dict[key] = CheckNode(value, kp)
+ return dict
+ elif isinstance(node, ast.List):
+ children = []
+ for index, child in enumerate(node.elts):
+ kp = list(keypath) # Copy list.
+ kp.append(repr(index))
+ children.append(CheckNode(child, kp))
+ return children
+ elif isinstance(node, ast.Str):
+ return node.s
+ else:
+ raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) +
+ "': " + repr(node))
+
+
+def LoadOneBuildFile(build_file_path, data, aux_data, includes,
+ is_target, check):
+ if build_file_path in data:
+ return data[build_file_path]
+
+ if os.path.exists(build_file_path):
+ build_file_contents = open(build_file_path, 'rb').read().decode('utf-8')
+ else:
+ raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
+
+ build_file_data = None
+ try:
+ if check:
+ build_file_data = CheckedEval(build_file_contents)
+ else:
+ build_file_data = eval(build_file_contents, {'__builtins__': None},
+ None)
+ except SyntaxError as e:
+ e.filename = build_file_path
+ raise
+ except Exception as e:
+ gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
+ raise
+
+ if type(build_file_data) is not dict:
+ raise GypError("%s does not evaluate to a dictionary." % build_file_path)
+
+ data[build_file_path] = build_file_data
+ aux_data[build_file_path] = {}
+
+ # Scan for includes and merge them in.
+ if ('skip_includes' not in build_file_data or
+ not build_file_data['skip_includes']):
+ try:
+ if is_target:
+ LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
+ aux_data, includes, check)
+ else:
+ LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
+ aux_data, None, check)
+ except Exception as e:
+ gyp.common.ExceptionAppend(e,
+ 'while reading includes of ' + build_file_path)
+ raise
+
+ return build_file_data
+
+
+def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
+ includes, check):
+ includes_list = []
+ if includes != None:
+ includes_list.extend(includes)
+ if 'includes' in subdict:
+ for include in subdict['includes']:
+ # "include" is specified relative to subdict_path, so compute the real
+ # path to include by appending the provided "include" to the directory
+ # in which subdict_path resides.
+ relative_include = \
+ os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
+ includes_list.append(relative_include)
+ # Unhook the includes list, it's no longer needed.
+ del subdict['includes']
+
+ # Merge in the included files.
+ for include in includes_list:
+ if not 'included' in aux_data[subdict_path]:
+ aux_data[subdict_path]['included'] = []
+ aux_data[subdict_path]['included'].append(include)
+
+ gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
+
+ MergeDicts(subdict,
+ LoadOneBuildFile(include, data, aux_data, None, False, check),
+ subdict_path, include)
+
+ # Recurse into subdictionaries.
+ for k, v in subdict.items():
+ if type(v) is dict:
+ LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data,
+ None, check)
+ elif type(v) is list:
+ LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data,
+ check)
+
+
+# This recurses into lists so that it can look for dicts.
+def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check):
+ for item in sublist:
+ if type(item) is dict:
+ LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
+ None, check)
+ elif type(item) is list:
+ LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check)
+
+# Processes toolsets in all the targets. This recurses into condition entries
+# since they can contain toolsets as well.
+def ProcessToolsetsInDict(data):
+ if 'targets' in data:
+ target_list = data['targets']
+ new_target_list = []
+ for target in target_list:
+ # If this target already has an explicit 'toolset', and no 'toolsets'
+ # list, don't modify it further.
+ if 'toolset' in target and 'toolsets' not in target:
+ new_target_list.append(target)
+ continue
+ if multiple_toolsets:
+ toolsets = target.get('toolsets', ['target'])
+ else:
+ toolsets = ['target']
+ # Make sure this 'toolsets' definition is only processed once.
+ if 'toolsets' in target:
+ del target['toolsets']
+ if len(toolsets) > 0:
+ # Optimization: only do copies if more than one toolset is specified.
+ for build in toolsets[1:]:
+ new_target = gyp.simple_copy.deepcopy(target)
+ new_target['toolset'] = build
+ new_target_list.append(new_target)
+ target['toolset'] = toolsets[0]
+ new_target_list.append(target)
+ data['targets'] = new_target_list
+ if 'conditions' in data:
+ for condition in data['conditions']:
+ if type(condition) is list:
+ for condition_dict in condition[1:]:
+ if type(condition_dict) is dict:
+ ProcessToolsetsInDict(condition_dict)
+
+
+# TODO(mark): I don't love this name. It just means that it's going to load
+# a build file that contains targets and is expected to provide a targets dict
+# that contains the targets...
+def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
+ depth, check, load_dependencies):
+ # If depth is set, predefine the DEPTH variable to be a relative path from
+ # this build file's directory to the directory identified by depth.
+ if depth:
+ # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
+ # temporary measure. This should really be addressed by keeping all paths
+ # in POSIX until actual project generation.
+ d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
+ if d == '':
+ variables['DEPTH'] = '.'
+ else:
+ variables['DEPTH'] = d.replace('\\', '/')
+
+ # The 'target_build_files' key is only set when loading target build files in
+ # the non-parallel code path, where LoadTargetBuildFile is called
+ # recursively. In the parallel code path, we don't need to check whether the
+ # |build_file_path| has already been loaded, because the 'scheduled' set in
+ # ParallelState guarantees that we never load the same |build_file_path|
+ # twice.
+ if 'target_build_files' in data:
+ if build_file_path in data['target_build_files']:
+ # Already loaded.
+ return False
+ data['target_build_files'].add(build_file_path)
+
+ gyp.DebugOutput(gyp.DEBUG_INCLUDES,
+ "Loading Target Build File '%s'", build_file_path)
+
+ build_file_data = LoadOneBuildFile(build_file_path, data, aux_data,
+ includes, True, check)
+
+ # Store DEPTH for later use in generators.
+ build_file_data['_DEPTH'] = depth
+
+ # Set up the included_files key indicating which .gyp files contributed to
+ # this target dict.
+ if 'included_files' in build_file_data:
+ raise GypError(build_file_path + ' must not contain included_files key')
+
+ included = GetIncludedBuildFiles(build_file_path, aux_data)
+ build_file_data['included_files'] = []
+ for included_file in included:
+ # included_file is relative to the current directory, but it needs to
+ # be made relative to build_file_path's directory.
+ included_relative = \
+ gyp.common.RelativePath(included_file,
+ os.path.dirname(build_file_path))
+ build_file_data['included_files'].append(included_relative)
+
+ # Do a first round of toolsets expansion so that conditions can be defined
+ # per toolset.
+ ProcessToolsetsInDict(build_file_data)
+
+ # Apply "pre"/"early" variable expansions and condition evaluations.
+ ProcessVariablesAndConditionsInDict(
+ build_file_data, PHASE_EARLY, variables, build_file_path)
+
+ # Since some toolsets might have been defined conditionally, perform
+ # a second round of toolsets expansion now.
+ ProcessToolsetsInDict(build_file_data)
+
+ # Look at each project's target_defaults dict, and merge settings into
+ # targets.
+ if 'target_defaults' in build_file_data:
+ if 'targets' not in build_file_data:
+ raise GypError("Unable to find targets in build file %s" %
+ build_file_path)
+
+ index = 0
+ while index < len(build_file_data['targets']):
+ # This procedure needs to give the impression that target_defaults is
+ # used as defaults, and the individual targets inherit from that.
+ # The individual targets need to be merged into the defaults. Make
+ # a deep copy of the defaults for each target, merge the target dict
+ # as found in the input file into that copy, and then hook up the
+ # copy with the target-specific data merged into it as the replacement
+ # target dict.
+ old_target_dict = build_file_data['targets'][index]
+ new_target_dict = gyp.simple_copy.deepcopy(
+ build_file_data['target_defaults'])
+ MergeDicts(new_target_dict, old_target_dict,
+ build_file_path, build_file_path)
+ build_file_data['targets'][index] = new_target_dict
+ index += 1
+
+ # No longer needed.
+ del build_file_data['target_defaults']
+
+ # Look for dependencies. This means that dependency resolution occurs
+ # after "pre" conditionals and variable expansion, but before "post" -
+ # in other words, you can't put a "dependencies" section inside a "post"
+ # conditional within a target.
+
+ dependencies = []
+ if 'targets' in build_file_data:
+ for target_dict in build_file_data['targets']:
+ if 'dependencies' not in target_dict:
+ continue
+ for dependency in target_dict['dependencies']:
+ dependencies.append(
+ gyp.common.ResolveTarget(build_file_path, dependency, None)[0])
+
+ if load_dependencies:
+ for dependency in dependencies:
+ try:
+ LoadTargetBuildFile(dependency, data, aux_data, variables,
+ includes, depth, check, load_dependencies)
+ except Exception as e:
+ gyp.common.ExceptionAppend(
+ e, 'while loading dependencies of %s' % build_file_path)
+ raise
+ else:
+ return (build_file_path, dependencies)
+
+def CallLoadTargetBuildFile(global_flags,
+ build_file_path, variables,
+ includes, depth, check,
+ generator_input_info):
+ """Wrapper around LoadTargetBuildFile for parallel processing.
+
+ This wrapper is used when LoadTargetBuildFile is executed in
+ a worker process.
+ """
+
+ try:
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+ # Apply globals so that the worker process behaves the same.
+ for key, value in global_flags.items():
+ globals()[key] = value
+
+ SetGeneratorGlobals(generator_input_info)
+ result = LoadTargetBuildFile(build_file_path, per_process_data,
+ per_process_aux_data, variables,
+ includes, depth, check, False)
+ if not result:
+ return result
+
+ (build_file_path, dependencies) = result
+
+ # We can safely pop the build_file_data from per_process_data because it
+ # will never be referenced by this process again, so we don't need to keep
+ # it in the cache.
+ build_file_data = per_process_data.pop(build_file_path)
+
+ # This gets serialized and sent back to the main process via a pipe.
+ # It's handled in LoadTargetBuildFileCallback.
+ return (build_file_path,
+ build_file_data,
+ dependencies)
+ except GypError as e:
+ sys.stderr.write("gyp: %s\n" % e)
+ return None
+ except Exception as e:
+ print('Exception:', e, file=sys.stderr)
+ print(traceback.format_exc(), file=sys.stderr)
+ return None
+
+
+class ParallelProcessingError(Exception):
+ pass
+
+
+class ParallelState(object):
+ """Class to keep track of state when processing input files in parallel.
+
+ If build files are loaded in parallel, use this to keep track of
+ state during farming out and processing parallel jobs. It's stored
+ in a global so that the callback function can have access to it.
+ """
+
+ def __init__(self):
+ # The multiprocessing pool.
+ self.pool = None
+ # The condition variable used to protect this object and notify
+ # the main loop when there might be more data to process.
+ self.condition = None
+ # The "data" dict that was passed to LoadTargetBuildFileParallel
+ self.data = None
+ # The number of parallel calls outstanding; decremented when a response
+ # was received.
+ self.pending = 0
+ # The set of all build files that have been scheduled, so we don't
+ # schedule the same one twice.
+ self.scheduled = set()
+ # A list of dependency build file paths that haven't been scheduled yet.
+ self.dependencies = []
+ # Flag to indicate if there was an error in a child process.
+ self.error = False
+
+ def LoadTargetBuildFileCallback(self, result):
+ """Handle the results of running LoadTargetBuildFile in another process.
+ """
+ self.condition.acquire()
+ if not result:
+ self.error = True
+ self.condition.notify()
+ self.condition.release()
+ return
+ (build_file_path0, build_file_data0, dependencies0) = result
+ self.data[build_file_path0] = build_file_data0
+ self.data['target_build_files'].add(build_file_path0)
+ for new_dependency in dependencies0:
+ if new_dependency not in self.scheduled:
+ self.scheduled.add(new_dependency)
+ self.dependencies.append(new_dependency)
+ self.pending -= 1
+ self.condition.notify()
+ self.condition.release()
+
+
+def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
+ check, generator_input_info):
+ parallel_state = ParallelState()
+ parallel_state.condition = threading.Condition()
+ # Make copies of the build_files argument that we can modify while working.
+ parallel_state.dependencies = list(build_files)
+ parallel_state.scheduled = set(build_files)
+ parallel_state.pending = 0
+ parallel_state.data = data
+
+ try:
+ parallel_state.condition.acquire()
+ while parallel_state.dependencies or parallel_state.pending:
+ if parallel_state.error:
+ break
+ if not parallel_state.dependencies:
+ parallel_state.condition.wait()
+ continue
+
+ dependency = parallel_state.dependencies.pop()
+
+ parallel_state.pending += 1
+ global_flags = {
+ 'path_sections': globals()['path_sections'],
+ 'non_configuration_keys': globals()['non_configuration_keys'],
+ 'multiple_toolsets': globals()['multiple_toolsets']}
+
+ if not parallel_state.pool:
+ parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count())
+ parallel_state.pool.apply_async(
+ CallLoadTargetBuildFile,
+ args = (global_flags, dependency,
+ variables, includes, depth, check, generator_input_info),
+ callback = parallel_state.LoadTargetBuildFileCallback)
+ except KeyboardInterrupt as e:
+ parallel_state.pool.terminate()
+ raise e
+
+ parallel_state.condition.release()
+
+ parallel_state.pool.close()
+ parallel_state.pool.join()
+ parallel_state.pool = None
+
+ if parallel_state.error:
+ sys.exit(1)
+
+# Look for the bracket that matches the first bracket seen in a
+# string, and return the start and end as a tuple. For example, if
+# the input is something like "<(foo <(bar)) blah", then it would
+# return (1, 13), indicating the entire string except for the leading
+# "<" and trailing " blah".
+LBRACKETS= set('{[(')
+BRACKETS = {'}': '{', ']': '[', ')': '('}
+def FindEnclosingBracketGroup(input_str):
+ stack = []
+ start = -1
+ for index, char in enumerate(input_str):
+ if char in LBRACKETS:
+ stack.append(char)
+ if start == -1:
+ start = index
+ elif char in BRACKETS:
+ if not stack:
+ return (-1, -1)
+ if stack.pop() != BRACKETS[char]:
+ return (-1, -1)
+ if not stack:
+ return (start, index + 1)
+ return (-1, -1)
+
+
+def IsStrCanonicalInt(string):
+ """Returns True if |string| is in its canonical integer form.
+
+ The canonical form is such that str(int(string)) == string.
+ """
+ if isinstance(string, _str_types):
+ # This function is called a lot so for maximum performance, avoid
+ # involving regexps which would otherwise make the code much
+ # shorter. Regexps would need twice the time of this function.
+ if string:
+ if string == "0":
+ return True
+ if string[0] == "-":
+ string = string[1:]
+ if not string:
+ return False
+ if '1' <= string[0] <= '9':
+ return string.isdigit()
+
+ return False
+
+
+# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
+# "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
+# In the last case, the inner "<()" is captured in match['content'].
+early_variable_re = re.compile(
+ r'(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
+ r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
+ r'\((?P<is_array>\s*\[?)'
+ r'(?P<content>.*?)(\]?)\))')
+
+# This matches the same as early_variable_re, but with '>' instead of '<'.
+late_variable_re = re.compile(
+ r'(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
+ r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
+ r'\((?P<is_array>\s*\[?)'
+ r'(?P<content>.*?)(\]?)\))')
+
+# This matches the same as early_variable_re, but with '^' instead of '<'.
+latelate_variable_re = re.compile(
+ r'(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
+ r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
+ r'\((?P<is_array>\s*\[?)'
+ r'(?P<content>.*?)(\]?)\))')
+
+# Global cache of results from running commands so they don't have to be run
+# more then once.
+cached_command_results = {}
+
+
+def FixupPlatformCommand(cmd):
+ if sys.platform == 'win32':
+ if type(cmd) is list:
+ cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
+ else:
+ cmd = re.sub('^cat ', 'type ', cmd)
+ return cmd
+
+
+PHASE_EARLY = 0
+PHASE_LATE = 1
+PHASE_LATELATE = 2
+
+
+def ExpandVariables(input, phase, variables, build_file):
+ # Look for the pattern that gets expanded into variables
+ if phase == PHASE_EARLY:
+ variable_re = early_variable_re
+ expansion_symbol = '<'
+ elif phase == PHASE_LATE:
+ variable_re = late_variable_re
+ expansion_symbol = '>'
+ elif phase == PHASE_LATELATE:
+ variable_re = latelate_variable_re
+ expansion_symbol = '^'
+ else:
+ assert False
+
+ input_str = str(input)
+ if IsStrCanonicalInt(input_str):
+ return int(input_str)
+
+ # Do a quick scan to determine if an expensive regex search is warranted.
+ if expansion_symbol not in input_str:
+ return input_str
+
+ # Get the entire list of matches as a list of MatchObject instances.
+ # (using findall here would return strings instead of MatchObjects).
+ matches = list(variable_re.finditer(input_str))
+ if not matches:
+ return input_str
+
+ output = input_str
+ # Reverse the list of matches so that replacements are done right-to-left.
+ # That ensures that earlier replacements won't mess up the string in a
+ # way that causes later calls to find the earlier substituted text instead
+ # of what's intended for replacement.
+ matches.reverse()
+ for match_group in matches:
+ match = match_group.groupdict()
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
+ # match['replace'] is the substring to look for, match['type']
+ # is the character code for the replacement type (< > <! >! <| >| <@
+ # >@ <!@ >!@), match['is_array'] contains a '[' for command
+ # arrays, and match['content'] is the name of the variable (< >)
+ # or command to run (<! >!). match['command_string'] is an optional
+ # command string. Currently, only 'pymod_do_main' is supported.
+
+ # run_command is true if a ! variant is used.
+ run_command = '!' in match['type']
+ command_string = match['command_string']
+
+ # file_list is true if a | variant is used.
+ file_list = '|' in match['type']
+
+ # Capture these now so we can adjust them later.
+ replace_start = match_group.start('replace')
+ replace_end = match_group.end('replace')
+
+ # Find the ending paren, and re-evaluate the contained string.
+ (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
+
+ # Adjust the replacement range to match the entire command
+ # found by FindEnclosingBracketGroup (since the variable_re
+ # probably doesn't match the entire command if it contained
+ # nested variables).
+ replace_end = replace_start + c_end
+
+ # Find the "real" replacement, matching the appropriate closing
+ # paren, and adjust the replacement start and end.
+ replacement = input_str[replace_start:replace_end]
+
+ # Figure out what the contents of the variable parens are.
+ contents_start = replace_start + c_start + 1
+ contents_end = replace_end - 1
+ contents = input_str[contents_start:contents_end]
+
+ # Do filter substitution now for <|().
+ # Admittedly, this is different than the evaluation order in other
+ # contexts. However, since filtration has no chance to run on <|(),
+ # this seems like the only obvious way to give them access to filters.
+ if file_list:
+ processed_variables = gyp.simple_copy.deepcopy(variables)
+ ProcessListFiltersInDict(contents, processed_variables)
+ # Recurse to expand variables in the contents
+ contents = ExpandVariables(contents, phase,
+ processed_variables, build_file)
+ else:
+ # Recurse to expand variables in the contents
+ contents = ExpandVariables(contents, phase, variables, build_file)
+
+ # Strip off leading/trailing whitespace so that variable matches are
+ # simpler below (and because they are rarely needed).
+ contents = contents.strip()
+
+ # expand_to_list is true if an @ variant is used. In that case,
+ # the expansion should result in a list. Note that the caller
+ # is to be expecting a list in return, and not all callers do
+ # because not all are working in list context. Also, for list
+ # expansions, there can be no other text besides the variable
+ # expansion in the input string.
+ expand_to_list = '@' in match['type'] and input_str == replacement
+
+ if run_command or file_list:
+ # Find the build file's directory, so commands can be run or file lists
+ # generated relative to it.
+ build_file_dir = os.path.dirname(build_file)
+ if build_file_dir == '' and not file_list:
+ # If build_file is just a leaf filename indicating a file in the
+ # current directory, build_file_dir might be an empty string. Set
+ # it to None to signal to subprocess.Popen that it should run the
+ # command in the current directory.
+ build_file_dir = None
+
+ # Support <|(listfile.txt ...) which generates a file
+ # containing items from a gyp list, generated at gyp time.
+ # This works around actions/rules which have more inputs than will
+ # fit on the command line.
+ if file_list:
+ if type(contents) is list:
+ contents_list = contents
+ else:
+ contents_list = contents.split(' ')
+ replacement = contents_list[0]
+ if os.path.isabs(replacement):
+ raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
+
+ if not generator_filelist_paths:
+ path = os.path.join(build_file_dir, replacement)
+ else:
+ if os.path.isabs(build_file_dir):
+ toplevel = generator_filelist_paths['toplevel']
+ rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
+ else:
+ rel_build_file_dir = build_file_dir
+ qualified_out_dir = generator_filelist_paths['qualified_out_dir']
+ path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
+ gyp.common.EnsureDirExists(path)
+
+ replacement = gyp.common.RelativePath(path, build_file_dir)
+ f = gyp.common.WriteOnDiff(path)
+ for i in contents_list[1:]:
+ f.write('%s\n' % i)
+ f.close()
+
+ elif run_command:
+ use_shell = True
+ if match['is_array']:
+ contents = eval(contents)
+ use_shell = False
+
+ # Check for a cached value to avoid executing commands, or generating
+ # file lists more than once. The cache key contains the command to be
+ # run as well as the directory to run it from, to account for commands
+ # that depend on their current directory.
+ # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
+ # someone could author a set of GYP files where each time the command
+ # is invoked it produces different output by design. When the need
+ # arises, the syntax should be extended to support no caching off a
+ # command's output so it is run every time.
+ cache_key = (str(contents), build_file_dir)
+ cached_value = cached_command_results.get(cache_key, None)
+ if cached_value is None:
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES,
+ "Executing command '%s' in directory '%s'",
+ contents, build_file_dir)
+
+ replacement = ''
+
+ if command_string == 'pymod_do_main':
+ # <!pymod_do_main(modulename param eters) loads |modulename| as a
+ # python module and then calls that module's DoMain() function,
+ # passing ["param", "eters"] as a single list argument. For modules
+ # that don't load quickly, this can be faster than
+ # <!(python modulename param eters). Do this in |build_file_dir|.
+ oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
+ if build_file_dir: # build_file_dir may be None (see above).
+ os.chdir(build_file_dir)
+ try:
+
+ parsed_contents = shlex.split(contents)
+ try:
+ py_module = __import__(parsed_contents[0])
+ except ImportError as e:
+ raise GypError("Error importing pymod_do_main"
+ "module (%s): %s" % (parsed_contents[0], e))
+ replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
+ finally:
+ os.chdir(oldwd)
+ assert replacement != None
+ elif command_string:
+ raise GypError("Unknown command string '%s' in '%s'." %
+ (command_string, contents))
+ else:
+ # Fix up command with platform specific workarounds.
+ contents = FixupPlatformCommand(contents)
+ try:
+ p = subprocess.Popen(contents, shell=use_shell,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ stdin=subprocess.PIPE,
+ cwd=build_file_dir)
+ except Exception as e:
+ raise GypError("%s while executing command '%s' in %s" %
+ (e, contents, build_file))
+
+ p_stdout, p_stderr = p.communicate('')
+
+ if p.wait() != 0 or p_stderr:
+ p_stderr_decoded = p_stderr.decode('utf-8')
+ sys.stderr.write(p_stderr_decoded)
+ # Simulate check_call behavior, since check_call only exists
+ # in python 2.5 and later.
+ raise GypError("Call to '%s' returned exit status %d while in %s." %
+ (contents, p.returncode, build_file))
+ replacement = p_stdout.decode('utf-8').rstrip()
+
+ cached_command_results[cache_key] = replacement
+ else:
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES,
+ "Had cache value for command '%s' in directory '%s'",
+ contents,build_file_dir)
+ replacement = cached_value
+
+ else:
+ if not contents in variables:
+ if contents[-1] in ['!', '/']:
+ # In order to allow cross-compiles (nacl) to happen more naturally,
+ # we will allow references to >(sources/) etc. to resolve to
+ # and empty list if undefined. This allows actions to:
+ # 'action!': [
+ # '>@(_sources!)',
+ # ],
+ # 'action/': [
+ # '>@(_sources/)',
+ # ],
+ replacement = []
+ else:
+ raise GypError('Undefined variable ' + contents +
+ ' in ' + build_file)
+ else:
+ replacement = variables[contents]
+
+ if type(replacement) is list:
+ for item in replacement:
+ if not contents[-1] == '/' and not isinstance(item, _str_int_types):
+ raise GypError('Variable ' + contents +
+ ' must expand to a string or list of strings; ' +
+ 'list contains a ' +
+ item.__class__.__name__)
+ # Run through the list and handle variable expansions in it. Since
+ # the list is guaranteed not to contain dicts, this won't do anything
+ # with conditions sections.
+ ProcessVariablesAndConditionsInList(replacement, phase, variables,
+ build_file)
+ elif not isinstance(replacement, _str_int_types):
+ raise GypError('Variable ' + str(contents) +
+ ' must expand to a string or list of strings; ' +
+ 'found a ' + replacement.__class__.__name__)
+
+ if expand_to_list:
+ # Expanding in list context. It's guaranteed that there's only one
+ # replacement to do in |input_str| and that it's this replacement. See
+ # above.
+ if type(replacement) is list:
+ # If it's already a list, make a copy.
+ output = replacement[:]
+ else:
+ # Split it the same way sh would split arguments.
+ output = shlex.split(str(replacement))
+ else:
+ # Expanding in string context.
+ encoded_replacement = ''
+ if type(replacement) is list:
+ # When expanding a list into string context, turn the list items
+ # into a string in a way that will work with a subprocess call.
+ #
+ # TODO(mark): This isn't completely correct. This should
+ # call a generator-provided function that observes the
+ # proper list-to-argument quoting rules on a specific
+ # platform instead of just calling the POSIX encoding
+ # routine.
+ encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
+ else:
+ encoded_replacement = replacement
+
+ output = output[:replace_start] + str(encoded_replacement) + \
+ output[replace_end:]
+ # Prepare for the next match iteration.
+ input_str = output
+
+ if output == input:
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES,
+ "Found only identity matches on %r, avoiding infinite "
+ "recursion.",
+ output)
+ else:
+ # Look for more matches now that we've replaced some, to deal with
+ # expanding local variables (variables defined in the same
+ # variables block as this one).
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
+ if type(output) is list:
+ if output and type(output[0]) is list:
+ # Leave output alone if it's a list of lists.
+ # We don't want such lists to be stringified.
+ pass
+ else:
+ new_output = []
+ for item in output:
+ new_output.append(
+ ExpandVariables(item, phase, variables, build_file))
+ output = new_output
+ else:
+ output = ExpandVariables(output, phase, variables, build_file)
+
+ # Convert all strings that are canonically-represented integers into integers.
+ if type(output) is list:
+ for index, outstr in enumerate(output):
+ if IsStrCanonicalInt(outstr):
+ output[index] = int(outstr)
+ elif IsStrCanonicalInt(output):
+ output = int(output)
+
+ return output
+
+# The same condition is often evaluated over and over again so it
+# makes sense to cache as much as possible between evaluations.
+cached_conditions_asts = {}
+
+def EvalCondition(condition, conditions_key, phase, variables, build_file):
+ """Returns the dict that should be used or None if the result was
+ that nothing should be used."""
+ if type(condition) is not list:
+ raise GypError(conditions_key + ' must be a list')
+ if len(condition) < 2:
+ # It's possible that condition[0] won't work in which case this
+ # attempt will raise its own IndexError. That's probably fine.
+ raise GypError(conditions_key + ' ' + condition[0] +
+ ' must be at least length 2, not ' + str(len(condition)))
+
+ i = 0
+ result = None
+ while i < len(condition):
+ cond_expr = condition[i]
+ true_dict = condition[i + 1]
+ if type(true_dict) is not dict:
+ raise GypError('{} {} must be followed by a dictionary, not {}'.format(
+ conditions_key, cond_expr, type(true_dict)))
+ if len(condition) > i + 2 and type(condition[i + 2]) is dict:
+ false_dict = condition[i + 2]
+ i = i + 3
+ if i != len(condition):
+ raise GypError('{} {} has {} unexpected trailing items'.format(
+ conditions_key, cond_expr, len(condition) - i))
+ else:
+ false_dict = None
+ i = i + 2
+ if result == None:
+ result = EvalSingleCondition(
+ cond_expr, true_dict, false_dict, phase, variables, build_file)
+
+ return result
+
+
+def EvalSingleCondition(
+ cond_expr, true_dict, false_dict, phase, variables, build_file):
+ """Returns true_dict if cond_expr evaluates to true, and false_dict
+ otherwise."""
+ # Do expansions on the condition itself. Since the conditon can naturally
+ # contain variable references without needing to resort to GYP expansion
+ # syntax, this is of dubious value for variables, but someone might want to
+ # use a command expansion directly inside a condition.
+ cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
+ build_file)
+ if not isinstance(cond_expr_expanded, _str_int_types):
+ raise ValueError(
+ 'Variable expansion in this context permits str and int ' + \
+ 'only, found ' + cond_expr_expanded.__class__.__name__)
+
+ try:
+ if cond_expr_expanded in cached_conditions_asts:
+ ast_code = cached_conditions_asts[cond_expr_expanded]
+ else:
+ ast_code = compile(cond_expr_expanded, '<string>', 'eval')
+ cached_conditions_asts[cond_expr_expanded] = ast_code
+ if eval(ast_code, {'__builtins__': None}, variables):
+ return true_dict
+ return false_dict
+ except SyntaxError as e:
+ syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
+ 'at character %d.' %
+ (str(e.args[0]), e.text, build_file, e.offset),
+ e.filename, e.lineno, e.offset, e.text)
+ raise syntax_error
+ except NameError as e:
+ gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
+ (cond_expr_expanded, build_file))
+ raise GypError(e)
+
+
+def ProcessConditionsInDict(the_dict, phase, variables, build_file):
+ # Process a 'conditions' or 'target_conditions' section in the_dict,
+ # depending on phase.
+ # early -> conditions
+ # late -> target_conditions
+ # latelate -> no conditions
+ #
+ # Each item in a conditions list consists of cond_expr, a string expression
+ # evaluated as the condition, and true_dict, a dict that will be merged into
+ # the_dict if cond_expr evaluates to true. Optionally, a third item,
+ # false_dict, may be present. false_dict is merged into the_dict if
+ # cond_expr evaluates to false.
+ #
+ # Any dict merged into the_dict will be recursively processed for nested
+ # conditionals and other expansions, also according to phase, immediately
+ # prior to being merged.
+
+ if phase == PHASE_EARLY:
+ conditions_key = 'conditions'
+ elif phase == PHASE_LATE:
+ conditions_key = 'target_conditions'
+ elif phase == PHASE_LATELATE:
+ return
+ else:
+ assert False
+
+ if not conditions_key in the_dict:
+ return
+
+ conditions_list = the_dict[conditions_key]
+ # Unhook the conditions list, it's no longer needed.
+ del the_dict[conditions_key]
+
+ for condition in conditions_list:
+ merge_dict = EvalCondition(condition, conditions_key, phase, variables,
+ build_file)
+
+ if merge_dict != None:
+ # Expand variables and nested conditinals in the merge_dict before
+ # merging it.
+ ProcessVariablesAndConditionsInDict(merge_dict, phase,
+ variables, build_file)
+
+ MergeDicts(the_dict, merge_dict, build_file, build_file)
+
+
+def LoadAutomaticVariablesFromDict(variables, the_dict):
+ # Any keys with plain string values in the_dict become automatic variables.
+ # The variable name is the key name with a "_" character prepended.
+ for key, value in the_dict.items():
+ if isinstance(value, _str_int_list_types):
+ variables['_' + key] = value
+
+
+def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
+ # Any keys in the_dict's "variables" dict, if it has one, becomes a
+ # variable. The variable name is the key name in the "variables" dict.
+ # Variables that end with the % character are set only if they are unset in
+ # the variables dict. the_dict_key is the name of the key that accesses
+ # the_dict in the_dict's parent dict. If the_dict's parent is not a dict
+ # (it could be a list or it could be parentless because it is a root dict),
+ # the_dict_key will be None.
+ for key, value in the_dict.get('variables', {}).items():
+ if not isinstance(value, _str_int_list_types):
+ continue
+
+ if key.endswith('%'):
+ variable_name = key[:-1]
+ if variable_name in variables:
+ # If the variable is already set, don't set it.
+ continue
+ if the_dict_key == 'variables' and variable_name in the_dict:
+ # If the variable is set without a % in the_dict, and the_dict is a
+ # variables dict (making |variables| a varaibles sub-dict of a
+ # variables dict), use the_dict's definition.
+ value = the_dict[variable_name]
+ else:
+ variable_name = key
+
+ variables[variable_name] = value
+
+
+def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
+ build_file, the_dict_key=None):
+ """Handle all variable and command expansion and conditional evaluation.
+
+ This function is the public entry point for all variable expansions and
+ conditional evaluations. The variables_in dictionary will not be modified
+ by this function.
+ """
+
+ # Make a copy of the variables_in dict that can be modified during the
+ # loading of automatics and the loading of the variables dict.
+ variables = variables_in.copy()
+ LoadAutomaticVariablesFromDict(variables, the_dict)
+
+ if 'variables' in the_dict:
+ # Make sure all the local variables are added to the variables
+ # list before we process them so that you can reference one
+ # variable from another. They will be fully expanded by recursion
+ # in ExpandVariables.
+ for key, value in the_dict['variables'].items():
+ variables[key] = value
+
+ # Handle the associated variables dict first, so that any variable
+ # references within can be resolved prior to using them as variables.
+ # Pass a copy of the variables dict to avoid having it be tainted.
+ # Otherwise, it would have extra automatics added for everything that
+ # should just be an ordinary variable in this scope.
+ ProcessVariablesAndConditionsInDict(the_dict['variables'], phase,
+ variables, build_file, 'variables')
+
+ LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
+
+ for key, value in the_dict.items():
+ # Skip "variables", which was already processed if present.
+ if key != 'variables' and isinstance(value, _str_types):
+ expanded = ExpandVariables(value, phase, variables, build_file)
+ if not isinstance(expanded, _str_int_types):
+ raise ValueError(
+ 'Variable expansion in this context permits str and int ' + \
+ 'only, found ' + expanded.__class__.__name__ + ' for ' + key)
+ the_dict[key] = expanded
+
+ # Variable expansion may have resulted in changes to automatics. Reload.
+ # TODO(mark): Optimization: only reload if no changes were made.
+ variables = variables_in.copy()
+ LoadAutomaticVariablesFromDict(variables, the_dict)
+ LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
+
+ # Process conditions in this dict. This is done after variable expansion
+ # so that conditions may take advantage of expanded variables. For example,
+ # if the_dict contains:
+ # {'type': '<(library_type)',
+ # 'conditions': [['_type=="static_library"', { ... }]]},
+ # _type, as used in the condition, will only be set to the value of
+ # library_type if variable expansion is performed before condition
+ # processing. However, condition processing should occur prior to recursion
+ # so that variables (both automatic and "variables" dict type) may be
+ # adjusted by conditions sections, merged into the_dict, and have the
+ # intended impact on contained dicts.
+ #
+ # This arrangement means that a "conditions" section containing a "variables"
+ # section will only have those variables effective in subdicts, not in
+ # the_dict. The workaround is to put a "conditions" section within a
+ # "variables" section. For example:
+ # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
+ # 'defines': ['<(define)'],
+ # 'my_subdict': {'defines': ['<(define)']}},
+ # will not result in "IS_MAC" being appended to the "defines" list in the
+ # current scope but would result in it being appended to the "defines" list
+ # within "my_subdict". By comparison:
+ # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
+ # 'defines': ['<(define)'],
+ # 'my_subdict': {'defines': ['<(define)']}},
+ # will append "IS_MAC" to both "defines" lists.
+
+ # Evaluate conditions sections, allowing variable expansions within them
+ # as well as nested conditionals. This will process a 'conditions' or
+ # 'target_conditions' section, perform appropriate merging and recursive
+ # conditional and variable processing, and then remove the conditions section
+ # from the_dict if it is present.
+ ProcessConditionsInDict(the_dict, phase, variables, build_file)
+
+ # Conditional processing may have resulted in changes to automatics or the
+ # variables dict. Reload.
+ variables = variables_in.copy()
+ LoadAutomaticVariablesFromDict(variables, the_dict)
+ LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
+
+ # Recurse into child dicts, or process child lists which may result in
+ # further recursion into descendant dicts.
+ for key, value in the_dict.items():
+ # Skip "variables" and string values, which were already processed if
+ # present.
+ if key == 'variables' or isinstance(value, _str_types):
+ continue
+ if type(value) is dict:
+ # Pass a copy of the variables dict so that subdicts can't influence
+ # parents.
+ ProcessVariablesAndConditionsInDict(value, phase, variables,
+ build_file, key)
+ elif type(value) is list:
+ # The list itself can't influence the variables dict, and
+ # ProcessVariablesAndConditionsInList will make copies of the variables
+ # dict if it needs to pass it to something that can influence it. No
+ # copy is necessary here.
+ ProcessVariablesAndConditionsInList(value, phase, variables,
+ build_file)
+ elif not isinstance(value, _int_types):
+ raise TypeError('Unknown type ' + value.__class__.__name__ + \
+ ' for ' + key)
+
+
+def ProcessVariablesAndConditionsInList(the_list, phase, variables,
+ build_file):
+ # Iterate using an index so that new values can be assigned into the_list.
+ index = 0
+ while index < len(the_list):
+ item = the_list[index]
+ if type(item) is dict:
+ # Make a copy of the variables dict so that it won't influence anything
+ # outside of its own scope.
+ ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
+ elif type(item) is list:
+ ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
+ elif isinstance(item, _str_types):
+ expanded = ExpandVariables(item, phase, variables, build_file)
+ if isinstance(expanded, _str_int_types):
+ the_list[index] = expanded
+ elif type(expanded) is list:
+ the_list[index:index+1] = expanded
+ index += len(expanded)
+
+ # index now identifies the next item to examine. Continue right now
+ # without falling into the index increment below.
+ continue
+ else:
+ raise ValueError(
+ 'Variable expansion in this context permits strings and ' + \
+ 'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
+ index)
+ elif not isinstance(item, _int_types):
+ raise TypeError('Unknown type ' + item.__class__.__name__ + \
+ ' at index ' + index)
+ index = index + 1
+
+
+def BuildTargetsDict(data):
+ """Builds a dict mapping fully-qualified target names to their target dicts.
+
+ |data| is a dict mapping loaded build files by pathname relative to the
+ current directory. Values in |data| are build file contents. For each
+ |data| value with a "targets" key, the value of the "targets" key is taken
+ as a list containing target dicts. Each target's fully-qualified name is
+ constructed from the pathname of the build file (|data| key) and its
+ "target_name" property. These fully-qualified names are used as the keys
+ in the returned dict. These keys provide access to the target dicts,
+ the dicts in the "targets" lists.
+ """
+
+ targets = {}
+ for build_file in data['target_build_files']:
+ for target in data[build_file].get('targets', []):
+ target_name = gyp.common.QualifiedTarget(build_file,
+ target['target_name'],
+ target['toolset'])
+ if target_name in targets:
+ raise GypError('Duplicate target definitions for ' + target_name)
+ targets[target_name] = target
+
+ return targets
+
+
+def QualifyDependencies(targets):
+ """Make dependency links fully-qualified relative to the current directory.
+
+ |targets| is a dict mapping fully-qualified target names to their target
+ dicts. For each target in this dict, keys known to contain dependency
+ links are examined, and any dependencies referenced will be rewritten
+ so that they are fully-qualified and relative to the current directory.
+ All rewritten dependencies are suitable for use as keys to |targets| or a
+ similar dict.
+ """
+
+ all_dependency_sections = [dep + op
+ for dep in dependency_sections
+ for op in ('', '!', '/')]
+
+ for target, target_dict in targets.items():
+ target_build_file = gyp.common.BuildFile(target)
+ toolset = target_dict['toolset']
+ for dependency_key in all_dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+ for index, dep in enumerate(dependencies):
+ dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
+ target_build_file, dep, toolset)
+ if not multiple_toolsets:
+ # Ignore toolset specification in the dependency if it is specified.
+ dep_toolset = toolset
+ dependency = gyp.common.QualifiedTarget(dep_file,
+ dep_target,
+ dep_toolset)
+ dependencies[index] = dependency
+
+ # Make sure anything appearing in a list other than "dependencies" also
+ # appears in the "dependencies" list.
+ if dependency_key != 'dependencies' and \
+ dependency not in target_dict['dependencies']:
+ raise GypError('Found ' + dependency + ' in ' + dependency_key +
+ ' of ' + target + ', but not in dependencies')
+
+
+def ExpandWildcardDependencies(targets, data):
+ """Expands dependencies specified as build_file:*.
+
+ For each target in |targets|, examines sections containing links to other
+ targets. If any such section contains a link of the form build_file:*, it
+ is taken as a wildcard link, and is expanded to list each target in
+ build_file. The |data| dict provides access to build file dicts.
+
+ Any target that does not wish to be included by wildcard can provide an
+ optional "suppress_wildcard" key in its target dict. When present and
+ true, a wildcard dependency link will not include such targets.
+
+ All dependency names, including the keys to |targets| and the values in each
+ dependency list, must be qualified when this function is called.
+ """
+
+ for target, target_dict in targets.items():
+ toolset = target_dict['toolset']
+ target_build_file = gyp.common.BuildFile(target)
+ for dependency_key in dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+
+ # Loop this way instead of "for dependency in" or "for index in xrange"
+ # because the dependencies list will be modified within the loop body.
+ index = 0
+ while index < len(dependencies):
+ (dependency_build_file, dependency_target, dependency_toolset) = \
+ gyp.common.ParseQualifiedTarget(dependencies[index])
+ if dependency_target != '*' and dependency_toolset != '*':
+ # Not a wildcard. Keep it moving.
+ index = index + 1
+ continue
+
+ if dependency_build_file == target_build_file:
+ # It's an error for a target to depend on all other targets in
+ # the same file, because a target cannot depend on itself.
+ raise GypError('Found wildcard in ' + dependency_key + ' of ' +
+ target + ' referring to same build file')
+
+ # Take the wildcard out and adjust the index so that the next
+ # dependency in the list will be processed the next time through the
+ # loop.
+ del dependencies[index]
+ index = index - 1
+
+ # Loop through the targets in the other build file, adding them to
+ # this target's list of dependencies in place of the removed
+ # wildcard.
+ dependency_target_dicts = data[dependency_build_file]['targets']
+ for dependency_target_dict in dependency_target_dicts:
+ if int(dependency_target_dict.get('suppress_wildcard', False)):
+ continue
+ dependency_target_name = dependency_target_dict['target_name']
+ if (dependency_target != '*' and
+ dependency_target != dependency_target_name):
+ continue
+ dependency_target_toolset = dependency_target_dict['toolset']
+ if (dependency_toolset != '*' and
+ dependency_toolset != dependency_target_toolset):
+ continue
+ dependency = gyp.common.QualifiedTarget(dependency_build_file,
+ dependency_target_name,
+ dependency_target_toolset)
+ index = index + 1
+ dependencies.insert(index, dependency)
+
+ index = index + 1
+
+
+def Unify(l):
+ """Removes duplicate elements from l, keeping the first element."""
+ seen = {}
+ return [seen.setdefault(e, e) for e in l if e not in seen]
+
+
+def RemoveDuplicateDependencies(targets):
+ """Makes sure every dependency appears only once in all targets's dependency
+ lists."""
+ for target_name, target_dict in targets.items():
+ for dependency_key in dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+ if dependencies:
+ target_dict[dependency_key] = Unify(dependencies)
+
+
+def Filter(l, item):
+ """Removes item from l."""
+ res = {}
+ return [res.setdefault(e, e) for e in l if e != item]
+
+
+def RemoveSelfDependencies(targets):
+ """Remove self dependencies from targets that have the prune_self_dependency
+ variable set."""
+ for target_name, target_dict in targets.items():
+ for dependency_key in dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+ if dependencies:
+ for t in dependencies:
+ if t == target_name:
+ if targets[t].get('variables', {}).get('prune_self_dependency', 0):
+ target_dict[dependency_key] = Filter(dependencies, target_name)
+
+
+def RemoveLinkDependenciesFromNoneTargets(targets):
+ """Remove dependencies having the 'link_dependency' attribute from the 'none'
+ targets."""
+ for target_name, target_dict in targets.items():
+ for dependency_key in dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+ if dependencies:
+ for t in dependencies:
+ if target_dict.get('type', None) == 'none':
+ if targets[t].get('variables', {}).get('link_dependency', 0):
+ target_dict[dependency_key] = \
+ Filter(target_dict[dependency_key], t)
+
+
+class DependencyGraphNode(object):
+ """
+
+ Attributes:
+ ref: A reference to an object that this DependencyGraphNode represents.
+ dependencies: List of DependencyGraphNodes on which this one depends.
+ dependents: List of DependencyGraphNodes that depend on this one.
+ """
+
+ class CircularException(GypError):
+ pass
+
+ def __init__(self, ref):
+ self.ref = ref
+ self.dependencies = []
+ self.dependents = []
+
+ def __repr__(self):
+ return '<DependencyGraphNode: %r>' % self.ref
+
+ def FlattenToList(self):
+ # flat_list is the sorted list of dependencies - actually, the list items
+ # are the "ref" attributes of DependencyGraphNodes. Every target will
+ # appear in flat_list after all of its dependencies, and before all of its
+ # dependents.
+ flat_list = OrderedSet()
+
+ def ExtractNodeRef(node):
+ """Extracts the object that the node represents from the given node."""
+ return node.ref
+
+ # in_degree_zeros is the list of DependencyGraphNodes that have no
+ # dependencies not in flat_list. Initially, it is a copy of the children
+ # of this node, because when the graph was built, nodes with no
+ # dependencies were made implicit dependents of the root node.
+ in_degree_zeros = sorted(self.dependents[:], key=ExtractNodeRef)
+
+ while in_degree_zeros:
+ # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
+ # can be appended to flat_list. Take these nodes out of in_degree_zeros
+ # as work progresses, so that the next node to process from the list can
+ # always be accessed at a consistent position.
+ node = in_degree_zeros.pop()
+ flat_list.add(node.ref)
+
+ # Look at dependents of the node just added to flat_list. Some of them
+ # may now belong in in_degree_zeros.
+ for node_dependent in sorted(node.dependents, key=ExtractNodeRef):
+ is_in_degree_zero = True
+ # TODO: We want to check through the
+ # node_dependent.dependencies list but if it's long and we
+ # always start at the beginning, then we get O(n^2) behaviour.
+ for node_dependent_dependency in (sorted(node_dependent.dependencies,
+ key=ExtractNodeRef)):
+ if not node_dependent_dependency.ref in flat_list:
+ # The dependent one or more dependencies not in flat_list. There
+ # will be more chances to add it to flat_list when examining
+ # it again as a dependent of those other dependencies, provided
+ # that there are no cycles.
+ is_in_degree_zero = False
+ break
+
+ if is_in_degree_zero:
+ # All of the dependent's dependencies are already in flat_list. Add
+ # it to in_degree_zeros where it will be processed in a future
+ # iteration of the outer loop.
+ in_degree_zeros += [node_dependent]
+
+ return list(flat_list)
+
+ def FindCycles(self):
+ """
+ Returns a list of cycles in the graph, where each cycle is its own list.
+ """
+ results = []
+ visited = set()
+
+ def Visit(node, path):
+ for child in node.dependents:
+ if child in path:
+ results.append([child] + path[:path.index(child) + 1])
+ elif not child in visited:
+ visited.add(child)
+ Visit(child, [child] + path)
+
+ visited.add(self)
+ Visit(self, [self])
+
+ return results
+
+ def DirectDependencies(self, dependencies=None):
+ """Returns a list of just direct dependencies."""
+ if dependencies == None:
+ dependencies = []
+
+ for dependency in self.dependencies:
+ # Check for None, corresponding to the root node.
+ if dependency.ref != None and dependency.ref not in dependencies:
+ dependencies.append(dependency.ref)
+
+ return dependencies
+
+ def _AddImportedDependencies(self, targets, dependencies=None):
+ """Given a list of direct dependencies, adds indirect dependencies that
+ other dependencies have declared to export their settings.
+
+ This method does not operate on self. Rather, it operates on the list
+ of dependencies in the |dependencies| argument. For each dependency in
+ that list, if any declares that it exports the settings of one of its
+ own dependencies, those dependencies whose settings are "passed through"
+ are added to the list. As new items are added to the list, they too will
+ be processed, so it is possible to import settings through multiple levels
+ of dependencies.
+
+ This method is not terribly useful on its own, it depends on being
+ "primed" with a list of direct dependencies such as one provided by
+ DirectDependencies. DirectAndImportedDependencies is intended to be the
+ public entry point.
+ """
+
+ if dependencies == None:
+ dependencies = []
+
+ index = 0
+ while index < len(dependencies):
+ dependency = dependencies[index]
+ dependency_dict = targets[dependency]
+ # Add any dependencies whose settings should be imported to the list
+ # if not already present. Newly-added items will be checked for
+ # their own imports when the list iteration reaches them.
+ # Rather than simply appending new items, insert them after the
+ # dependency that exported them. This is done to more closely match
+ # the depth-first method used by DeepDependencies.
+ add_index = 1
+ for imported_dependency in \
+ dependency_dict.get('export_dependent_settings', []):
+ if imported_dependency not in dependencies:
+ dependencies.insert(index + add_index, imported_dependency)
+ add_index = add_index + 1
+ index = index + 1
+
+ return dependencies
+
+ def DirectAndImportedDependencies(self, targets, dependencies=None):
+ """Returns a list of a target's direct dependencies and all indirect
+ dependencies that a dependency has advertised settings should be exported
+ through the dependency for.
+ """
+
+ dependencies = self.DirectDependencies(dependencies)
+ return self._AddImportedDependencies(targets, dependencies)
+
+ def DeepDependencies(self, dependencies=None):
+ """Returns an OrderedSet of all of a target's dependencies, recursively."""
+ if dependencies is None:
+ # Using a list to get ordered output and a set to do fast "is it
+ # already added" checks.
+ dependencies = OrderedSet()
+
+ for dependency in self.dependencies:
+ # Check for None, corresponding to the root node.
+ if dependency.ref is None:
+ continue
+ if dependency.ref not in dependencies:
+ dependency.DeepDependencies(dependencies)
+ dependencies.add(dependency.ref)
+
+ return dependencies
+
+ def _LinkDependenciesInternal(self, targets, include_shared_libraries,
+ dependencies=None, initial=True):
+ """Returns an OrderedSet of dependency targets that are linked
+ into this target.
+
+ This function has a split personality, depending on the setting of
+ |initial|. Outside callers should always leave |initial| at its default
+ setting.
+
+ When adding a target to the list of dependencies, this function will
+ recurse into itself with |initial| set to False, to collect dependencies
+ that are linked into the linkable target for which the list is being built.
+
+ If |include_shared_libraries| is False, the resulting dependencies will not
+ include shared_library targets that are linked into this target.
+ """
+ if dependencies is None:
+ # Using a list to get ordered output and a set to do fast "is it
+ # already added" checks.
+ dependencies = OrderedSet()
+
+ # Check for None, corresponding to the root node.
+ if self.ref is None:
+ return dependencies
+
+ # It's kind of sucky that |targets| has to be passed into this function,
+ # but that's presently the easiest way to access the target dicts so that
+ # this function can find target types.
+
+ if 'target_name' not in targets[self.ref]:
+ raise GypError("Missing 'target_name' field in target.")
+
+ if 'type' not in targets[self.ref]:
+ raise GypError("Missing 'type' field in target %s" %
+ targets[self.ref]['target_name'])
+
+ target_type = targets[self.ref]['type']
+
+ is_linkable = target_type in linkable_types
+
+ if initial and not is_linkable:
+ # If this is the first target being examined and it's not linkable,
+ # return an empty list of link dependencies, because the link
+ # dependencies are intended to apply to the target itself (initial is
+ # True) and this target won't be linked.
+ return dependencies
+
+ # Don't traverse 'none' targets if explicitly excluded.
+ if (target_type == 'none' and
+ not targets[self.ref].get('dependencies_traverse', True)):
+ dependencies.add(self.ref)
+ return dependencies
+
+ # Executables, mac kernel extensions, windows drivers and loadable modules
+ # are already fully and finally linked. Nothing else can be a link
+ # dependency of them, there can only be dependencies in the sense that a
+ # dependent target might run an executable or load the loadable_module.
+ if not initial and target_type in ('executable', 'loadable_module',
+ 'mac_kernel_extension',
+ 'windows_driver'):
+ return dependencies
+
+ # Shared libraries are already fully linked. They should only be included
+ # in |dependencies| when adjusting static library dependencies (in order to
+ # link against the shared_library's import lib), but should not be included
+ # in |dependencies| when propagating link_settings.
+ # The |include_shared_libraries| flag controls which of these two cases we
+ # are handling.
+ if (not initial and target_type == 'shared_library' and
+ not include_shared_libraries):
+ return dependencies
+
+ # The target is linkable, add it to the list of link dependencies.
+ if self.ref not in dependencies:
+ dependencies.add(self.ref)
+ if initial or not is_linkable:
+ # If this is a subsequent target and it's linkable, don't look any
+ # further for linkable dependencies, as they'll already be linked into
+ # this target linkable. Always look at dependencies of the initial
+ # target, and always look at dependencies of non-linkables.
+ for dependency in self.dependencies:
+ dependency._LinkDependenciesInternal(targets,
+ include_shared_libraries,
+ dependencies, False)
+
+ return dependencies
+
+ def DependenciesForLinkSettings(self, targets):
+ """
+ Returns a list of dependency targets whose link_settings should be merged
+ into this target.
+ """
+
+ # TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
+ # link_settings are propagated. So for now, we will allow it, unless the
+ # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
+ # False. Once chrome is fixed, we can remove this flag.
+ include_shared_libraries = \
+ targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
+ return self._LinkDependenciesInternal(targets, include_shared_libraries)
+
+ def DependenciesToLinkAgainst(self, targets):
+ """
+ Returns a list of dependency targets that are linked into this target.
+ """
+ return self._LinkDependenciesInternal(targets, True)
+
+
+def BuildDependencyList(targets):
+ # Create a DependencyGraphNode for each target. Put it into a dict for easy
+ # access.
+ dependency_nodes = {}
+ for target, spec in targets.items():
+ if target not in dependency_nodes:
+ dependency_nodes[target] = DependencyGraphNode(target)
+
+ # Set up the dependency links. Targets that have no dependencies are treated
+ # as dependent on root_node.
+ root_node = DependencyGraphNode(None)
+ for target, spec in targets.items():
+ target_node = dependency_nodes[target]
+ target_build_file = gyp.common.BuildFile(target)
+ dependencies = spec.get('dependencies')
+ if not dependencies:
+ target_node.dependencies = [root_node]
+ root_node.dependents.append(target_node)
+ else:
+ for dependency in dependencies:
+ dependency_node = dependency_nodes.get(dependency)
+ if not dependency_node:
+ raise GypError("Dependency '%s' not found while "
+ "trying to load target %s" % (dependency, target))
+ target_node.dependencies.append(dependency_node)
+ dependency_node.dependents.append(target_node)
+
+ flat_list = root_node.FlattenToList()
+
+ # If there's anything left unvisited, there must be a circular dependency
+ # (cycle).
+ if len(flat_list) != len(targets):
+ if not root_node.dependents:
+ # If all targets have dependencies, add the first target as a dependent
+ # of root_node so that the cycle can be discovered from root_node.
+ target = next(iter(targets))
+ target_node = dependency_nodes[target]
+ target_node.dependencies.append(root_node)
+ root_node.dependents.append(target_node)
+
+ cycles = []
+ for cycle in root_node.FindCycles():
+ paths = [node.ref for node in cycle]
+ cycles.append('Cycle: %s' % ' -> '.join(paths))
+ raise DependencyGraphNode.CircularException(
+ 'Cycles in dependency graph detected:\n' + '\n'.join(cycles))
+
+ return [dependency_nodes, flat_list]
+
+
+def VerifyNoGYPFileCircularDependencies(targets):
+ # Create a DependencyGraphNode for each gyp file containing a target. Put
+ # it into a dict for easy access.
+ dependency_nodes = {}
+ for target in targets.keys():
+ build_file = gyp.common.BuildFile(target)
+ if not build_file in dependency_nodes:
+ dependency_nodes[build_file] = DependencyGraphNode(build_file)
+
+ # Set up the dependency links.
+ for target, spec in targets.items():
+ build_file = gyp.common.BuildFile(target)
+ build_file_node = dependency_nodes[build_file]
+ target_dependencies = spec.get('dependencies', [])
+ for dependency in target_dependencies:
+ try:
+ dependency_build_file = gyp.common.BuildFile(dependency)
+ except GypError as e:
+ gyp.common.ExceptionAppend(
+ e, 'while computing dependencies of .gyp file %s' % build_file)
+ raise
+
+ if dependency_build_file == build_file:
+ # A .gyp file is allowed to refer back to itself.
+ continue
+ dependency_node = dependency_nodes.get(dependency_build_file)
+ if not dependency_node:
+ raise GypError("Dependancy '%s' not found" % dependency_build_file)
+ if dependency_node not in build_file_node.dependencies:
+ build_file_node.dependencies.append(dependency_node)
+ dependency_node.dependents.append(build_file_node)
+
+
+ # Files that have no dependencies are treated as dependent on root_node.
+ root_node = DependencyGraphNode(None)
+ for build_file_node in dependency_nodes.values():
+ if len(build_file_node.dependencies) == 0:
+ build_file_node.dependencies.append(root_node)
+ root_node.dependents.append(build_file_node)
+
+ flat_list = root_node.FlattenToList()
+
+ # If there's anything left unvisited, there must be a circular dependency
+ # (cycle).
+ if len(flat_list) != len(dependency_nodes):
+ if not root_node.dependents:
+ # If all files have dependencies, add the first file as a dependent
+ # of root_node so that the cycle can be discovered from root_node.
+ file_node = next(iter(dependency_nodes.values()))
+ file_node.dependencies.append(root_node)
+ root_node.dependents.append(file_node)
+ cycles = []
+ for cycle in root_node.FindCycles():
+ paths = [node.ref for node in cycle]
+ cycles.append('Cycle: %s' % ' -> '.join(paths))
+ raise DependencyGraphNode.CircularException(
+ 'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles))
+
+
+def DoDependentSettings(key, flat_list, targets, dependency_nodes):
+ # key should be one of all_dependent_settings, direct_dependent_settings,
+ # or link_settings.
+
+ for target in flat_list:
+ target_dict = targets[target]
+ build_file = gyp.common.BuildFile(target)
+
+ if key == 'all_dependent_settings':
+ dependencies = dependency_nodes[target].DeepDependencies()
+ elif key == 'direct_dependent_settings':
+ dependencies = \
+ dependency_nodes[target].DirectAndImportedDependencies(targets)
+ elif key == 'link_settings':
+ dependencies = \
+ dependency_nodes[target].DependenciesForLinkSettings(targets)
+ else:
+ raise GypError("DoDependentSettings doesn't know how to determine "
+ 'dependencies for ' + key)
+
+ for dependency in dependencies:
+ dependency_dict = targets[dependency]
+ if not key in dependency_dict:
+ continue
+ dependency_build_file = gyp.common.BuildFile(dependency)
+ MergeDicts(target_dict, dependency_dict[key],
+ build_file, dependency_build_file)
+
+
+def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
+ sort_dependencies):
+ # Recompute target "dependencies" properties. For each static library
+ # target, remove "dependencies" entries referring to other static libraries,
+ # unless the dependency has the "hard_dependency" attribute set. For each
+ # linkable target, add a "dependencies" entry referring to all of the
+ # target's computed list of link dependencies (including static libraries
+ # if no such entry is already present.
+ for target in flat_list:
+ target_dict = targets[target]
+ target_type = target_dict['type']
+
+ if target_type == 'static_library':
+ if not 'dependencies' in target_dict:
+ continue
+
+ target_dict['dependencies_original'] = target_dict.get(
+ 'dependencies', [])[:]
+
+ # A static library should not depend on another static library unless
+ # the dependency relationship is "hard," which should only be done when
+ # a dependent relies on some side effect other than just the build
+ # product, like a rule or action output. Further, if a target has a
+ # non-hard dependency, but that dependency exports a hard dependency,
+ # the non-hard dependency can safely be removed, but the exported hard
+ # dependency must be added to the target to keep the same dependency
+ # ordering.
+ dependencies = \
+ dependency_nodes[target].DirectAndImportedDependencies(targets)
+ index = 0
+ while index < len(dependencies):
+ dependency = dependencies[index]
+ dependency_dict = targets[dependency]
+
+ # Remove every non-hard static library dependency and remove every
+ # non-static library dependency that isn't a direct dependency.
+ if (dependency_dict['type'] == 'static_library' and \
+ not dependency_dict.get('hard_dependency', False)) or \
+ (dependency_dict['type'] != 'static_library' and \
+ not dependency in target_dict['dependencies']):
+ # Take the dependency out of the list, and don't increment index
+ # because the next dependency to analyze will shift into the index
+ # formerly occupied by the one being removed.
+ del dependencies[index]
+ else:
+ index = index + 1
+
+ # Update the dependencies. If the dependencies list is empty, it's not
+ # needed, so unhook it.
+ if len(dependencies) > 0:
+ target_dict['dependencies'] = dependencies
+ else:
+ del target_dict['dependencies']
+
+ elif target_type in linkable_types:
+ # Get a list of dependency targets that should be linked into this
+ # target. Add them to the dependencies list if they're not already
+ # present.
+
+ link_dependencies = \
+ dependency_nodes[target].DependenciesToLinkAgainst(targets)
+ for dependency in link_dependencies:
+ if dependency == target:
+ continue
+ if not 'dependencies' in target_dict:
+ target_dict['dependencies'] = []
+ if not dependency in target_dict['dependencies']:
+ target_dict['dependencies'].append(dependency)
+ # Sort the dependencies list in the order from dependents to dependencies.
+ # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
+ # Note: flat_list is already sorted in the order from dependencies to
+ # dependents.
+ if sort_dependencies and 'dependencies' in target_dict:
+ target_dict['dependencies'] = [dep for dep in reversed(flat_list)
+ if dep in target_dict['dependencies']]
+
+
+# Initialize this here to speed up MakePathRelative.
+exception_re = re.compile(r'''["']?[-/$<>^]''')
+
+
+def MakePathRelative(to_file, fro_file, item):
+ # If item is a relative path, it's relative to the build file dict that it's
+ # coming from. Fix it up to make it relative to the build file dict that
+ # it's going into.
+ # Exception: any |item| that begins with these special characters is
+ # returned without modification.
+ # / Used when a path is already absolute (shortcut optimization;
+ # such paths would be returned as absolute anyway)
+ # $ Used for build environment variables
+ # - Used for some build environment flags (such as -lapr-1 in a
+ # "libraries" section)
+ # < Used for our own variable and command expansions (see ExpandVariables)
+ # > Used for our own variable and command expansions (see ExpandVariables)
+ # ^ Used for our own variable and command expansions (see ExpandVariables)
+ #
+ # "/' Used when a value is quoted. If these are present, then we
+ # check the second character instead.
+ #
+ if to_file == fro_file or exception_re.match(item):
+ return item
+ else:
+ # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
+ # temporary measure. This should really be addressed by keeping all paths
+ # in POSIX until actual project generation.
+ ret = os.path.normpath(os.path.join(
+ gyp.common.RelativePath(os.path.dirname(fro_file),
+ os.path.dirname(to_file)),
+ item)).replace('\\', '/')
+ if item[-1] == '/':
+ ret += '/'
+ return ret
+
+def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
+ # Python documentation recommends objects which do not support hash
+ # set this value to None. Python library objects follow this rule.
+ is_hashable = lambda val: val.__hash__
+
+ # If x is hashable, returns whether x is in s. Else returns whether x is in l.
+ def is_in_set_or_list(x, s, l):
+ if is_hashable(x):
+ return x in s
+ return x in l
+
+ prepend_index = 0
+
+ # Make membership testing of hashables in |to| (in particular, strings)
+ # faster.
+ hashable_to_set = set(x for x in to if is_hashable(x))
+ for item in fro:
+ singleton = False
+ if isinstance(item, _str_int_types):
+ # The cheap and easy case.
+ if is_paths:
+ to_item = MakePathRelative(to_file, fro_file, item)
+ else:
+ to_item = item
+
+ if not (isinstance(item, _str_types) and item.startswith('-')):
+ # Any string that doesn't begin with a "-" is a singleton - it can
+ # only appear once in a list, to be enforced by the list merge append
+ # or prepend.
+ singleton = True
+ elif type(item) is dict:
+ # Make a copy of the dictionary, continuing to look for paths to fix.
+ # The other intelligent aspects of merge processing won't apply because
+ # item is being merged into an empty dict.
+ to_item = {}
+ MergeDicts(to_item, item, to_file, fro_file)
+ elif type(item) is list:
+ # Recurse, making a copy of the list. If the list contains any
+ # descendant dicts, path fixing will occur. Note that here, custom
+ # values for is_paths and append are dropped; those are only to be
+ # applied to |to| and |fro|, not sublists of |fro|. append shouldn't
+ # matter anyway because the new |to_item| list is empty.
+ to_item = []
+ MergeLists(to_item, item, to_file, fro_file)
+ else:
+ raise TypeError(
+ 'Attempt to merge list item of unsupported type ' + \
+ item.__class__.__name__)
+
+ if append:
+ # If appending a singleton that's already in the list, don't append.
+ # This ensures that the earliest occurrence of the item will stay put.
+ if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
+ to.append(to_item)
+ if is_hashable(to_item):
+ hashable_to_set.add(to_item)
+ else:
+ # If prepending a singleton that's already in the list, remove the
+ # existing instance and proceed with the prepend. This ensures that the
+ # item appears at the earliest possible position in the list.
+ while singleton and to_item in to:
+ to.remove(to_item)
+
+ # Don't just insert everything at index 0. That would prepend the new
+ # items to the list in reverse order, which would be an unwelcome
+ # surprise.
+ to.insert(prepend_index, to_item)
+ if is_hashable(to_item):
+ hashable_to_set.add(to_item)
+ prepend_index = prepend_index + 1
+
+
+def MergeDicts(to, fro, to_file, fro_file):
+ # I wanted to name the parameter "from" but it's a Python keyword...
+ for k, v in fro.items():
+ # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
+ # copy semantics. Something else may want to merge from the |fro| dict
+ # later, and having the same dict ref pointed to twice in the tree isn't
+ # what anyone wants considering that the dicts may subsequently be
+ # modified.
+ if k in to:
+ bad_merge = False
+ if isinstance(v, _str_int_types):
+ if not isinstance(to[k], _str_int_types):
+ bad_merge = True
+ elif type(v) is not type(to[k]):
+ bad_merge = True
+
+ if bad_merge:
+ raise TypeError(
+ 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
+ ' into incompatible type ' + to[k].__class__.__name__ + \
+ ' for key ' + k)
+ if isinstance(v, _str_int_types):
+ # Overwrite the existing value, if any. Cheap and easy.
+ is_path = IsPathSection(k)
+ if is_path:
+ to[k] = MakePathRelative(to_file, fro_file, v)
+ else:
+ to[k] = v
+ elif type(v) is dict:
+ # Recurse, guaranteeing copies will be made of objects that require it.
+ if not k in to:
+ to[k] = {}
+ MergeDicts(to[k], v, to_file, fro_file)
+ elif type(v) is list:
+ # Lists in dicts can be merged with different policies, depending on
+ # how the key in the "from" dict (k, the from-key) is written.
+ #
+ # If the from-key has ...the to-list will have this action
+ # this character appended:... applied when receiving the from-list:
+ # = replace
+ # + prepend
+ # ? set, only if to-list does not yet exist
+ # (none) append
+ #
+ # This logic is list-specific, but since it relies on the associated
+ # dict key, it's checked in this dict-oriented function.
+ ext = k[-1]
+ append = True
+ if ext == '=':
+ list_base = k[:-1]
+ lists_incompatible = [list_base, list_base + '?']
+ to[list_base] = []
+ elif ext == '+':
+ list_base = k[:-1]
+ lists_incompatible = [list_base + '=', list_base + '?']
+ append = False
+ elif ext == '?':
+ list_base = k[:-1]
+ lists_incompatible = [list_base, list_base + '=', list_base + '+']
+ else:
+ list_base = k
+ lists_incompatible = [list_base + '=', list_base + '?']
+
+ # Some combinations of merge policies appearing together are meaningless.
+ # It's stupid to replace and append simultaneously, for example. Append
+ # and prepend are the only policies that can coexist.
+ for list_incompatible in lists_incompatible:
+ if list_incompatible in fro:
+ raise GypError('Incompatible list policies ' + k + ' and ' +
+ list_incompatible)
+
+ if list_base in to:
+ if ext == '?':
+ # If the key ends in "?", the list will only be merged if it doesn't
+ # already exist.
+ continue
+ elif type(to[list_base]) is not list:
+ # This may not have been checked above if merging in a list with an
+ # extension character.
+ raise TypeError(
+ 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
+ ' into incompatible type ' + to[list_base].__class__.__name__ + \
+ ' for key ' + list_base + '(' + k + ')')
+ else:
+ to[list_base] = []
+
+ # Call MergeLists, which will make copies of objects that require it.
+ # MergeLists can recurse back into MergeDicts, although this will be
+ # to make copies of dicts (with paths fixed), there will be no
+ # subsequent dict "merging" once entering a list because lists are
+ # always replaced, appended to, or prepended to.
+ is_paths = IsPathSection(list_base)
+ MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
+ else:
+ raise TypeError(
+ 'Attempt to merge dict value of unsupported type ' + \
+ v.__class__.__name__ + ' for key ' + k)
+
+
+def MergeConfigWithInheritance(new_configuration_dict, build_file,
+ target_dict, configuration, visited):
+ # Skip if previously visted.
+ if configuration in visited:
+ return
+
+ # Look at this configuration.
+ configuration_dict = target_dict['configurations'][configuration]
+
+ # Merge in parents.
+ for parent in configuration_dict.get('inherit_from', []):
+ MergeConfigWithInheritance(new_configuration_dict, build_file,
+ target_dict, parent, visited + [configuration])
+
+ # Merge it into the new config.
+ MergeDicts(new_configuration_dict, configuration_dict,
+ build_file, build_file)
+
+ # Drop abstract.
+ if 'abstract' in new_configuration_dict:
+ del new_configuration_dict['abstract']
+
+
+def SetUpConfigurations(target, target_dict):
+ # key_suffixes is a list of key suffixes that might appear on key names.
+ # These suffixes are handled in conditional evaluations (for =, +, and ?)
+ # and rules/exclude processing (for ! and /). Keys with these suffixes
+ # should be treated the same as keys without.
+ key_suffixes = ['=', '+', '?', '!', '/']
+
+ build_file = gyp.common.BuildFile(target)
+
+ # Provide a single configuration by default if none exists.
+ # TODO(mark): Signal an error if default_configurations exists but
+ # configurations does not.
+ if not 'configurations' in target_dict:
+ target_dict['configurations'] = {'Default': {}}
+ if not 'default_configuration' in target_dict:
+ concrete = [i for (i, config) in target_dict['configurations'].items()
+ if not config.get('abstract')]
+ target_dict['default_configuration'] = sorted(concrete)[0]
+
+ merged_configurations = {}
+ configs = target_dict['configurations']
+ for (configuration, old_configuration_dict) in configs.items():
+ # Skip abstract configurations (saves work only).
+ if old_configuration_dict.get('abstract'):
+ continue
+ # Configurations inherit (most) settings from the enclosing target scope.
+ # Get the inheritance relationship right by making a copy of the target
+ # dict.
+ new_configuration_dict = {}
+ for (key, target_val) in target_dict.items():
+ key_ext = key[-1:]
+ if key_ext in key_suffixes:
+ key_base = key[:-1]
+ else:
+ key_base = key
+ if not key_base in non_configuration_keys:
+ new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val)
+
+ # Merge in configuration (with all its parents first).
+ MergeConfigWithInheritance(new_configuration_dict, build_file,
+ target_dict, configuration, [])
+
+ merged_configurations[configuration] = new_configuration_dict
+
+ # Put the new configurations back into the target dict as a configuration.
+ for configuration in merged_configurations.keys():
+ target_dict['configurations'][configuration] = (
+ merged_configurations[configuration])
+
+ # Now drop all the abstract ones.
+ configs = target_dict['configurations']
+ target_dict['configurations'] = \
+ {k: v for k, v in configs.items() if not v.get('abstract')}
+
+ # Now that all of the target's configurations have been built, go through
+ # the target dict's keys and remove everything that's been moved into a
+ # "configurations" section.
+ delete_keys = []
+ for key in target_dict:
+ key_ext = key[-1:]
+ if key_ext in key_suffixes:
+ key_base = key[:-1]
+ else:
+ key_base = key
+ if not key_base in non_configuration_keys:
+ delete_keys.append(key)
+ for key in delete_keys:
+ del target_dict[key]
+
+ # Check the configurations to see if they contain invalid keys.
+ for configuration in target_dict['configurations'].keys():
+ configuration_dict = target_dict['configurations'][configuration]
+ for key in configuration_dict.keys():
+ if key in invalid_configuration_keys:
+ raise GypError('%s not allowed in the %s configuration, found in '
+ 'target %s' % (key, configuration, target))
+
+
+
+def ProcessListFiltersInDict(name, the_dict):
+ """Process regular expression and exclusion-based filters on lists.
+
+ An exclusion list is in a dict key named with a trailing "!", like
+ "sources!". Every item in such a list is removed from the associated
+ main list, which in this example, would be "sources". Removed items are
+ placed into a "sources_excluded" list in the dict.
+
+ Regular expression (regex) filters are contained in dict keys named with a
+ trailing "/", such as "sources/" to operate on the "sources" list. Regex
+ filters in a dict take the form:
+ 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
+ ['include', '_mac\\.cc$'] ],
+ The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
+ _win.cc. The second filter then includes all files ending in _mac.cc that
+ are now or were once in the "sources" list. Items matching an "exclude"
+ filter are subject to the same processing as would occur if they were listed
+ by name in an exclusion list (ending in "!"). Items matching an "include"
+ filter are brought back into the main list if previously excluded by an
+ exclusion list or exclusion regex filter. Subsequent matching "exclude"
+ patterns can still cause items to be excluded after matching an "include".
+ """
+
+ # Look through the dictionary for any lists whose keys end in "!" or "/".
+ # These are lists that will be treated as exclude lists and regular
+ # expression-based exclude/include lists. Collect the lists that are
+ # needed first, looking for the lists that they operate on, and assemble
+ # then into |lists|. This is done in a separate loop up front, because
+ # the _included and _excluded keys need to be added to the_dict, and that
+ # can't be done while iterating through it.
+
+ lists = []
+ del_lists = []
+ for key, value in the_dict.items():
+ operation = key[-1]
+ if operation != '!' and operation != '/':
+ continue
+
+ if type(value) is not list:
+ raise ValueError(name + ' key ' + key + ' must be list, not ' + \
+ value.__class__.__name__)
+
+ list_key = key[:-1]
+ if list_key not in the_dict:
+ # This happens when there's a list like "sources!" but no corresponding
+ # "sources" list. Since there's nothing for it to operate on, queue up
+ # the "sources!" list for deletion now.
+ del_lists.append(key)
+ continue
+
+ if type(the_dict[list_key]) is not list:
+ value = the_dict[list_key]
+ raise ValueError(name + ' key ' + list_key + \
+ ' must be list, not ' + \
+ value.__class__.__name__ + ' when applying ' + \
+ {'!': 'exclusion', '/': 'regex'}[operation])
+
+ if not list_key in lists:
+ lists.append(list_key)
+
+ # Delete the lists that are known to be unneeded at this point.
+ for del_list in del_lists:
+ del the_dict[del_list]
+
+ for list_key in lists:
+ the_list = the_dict[list_key]
+
+ # Initialize the list_actions list, which is parallel to the_list. Each
+ # item in list_actions identifies whether the corresponding item in
+ # the_list should be excluded, unconditionally preserved (included), or
+ # whether no exclusion or inclusion has been applied. Items for which
+ # no exclusion or inclusion has been applied (yet) have value -1, items
+ # excluded have value 0, and items included have value 1. Includes and
+ # excludes override previous actions. All items in list_actions are
+ # initialized to -1 because no excludes or includes have been processed
+ # yet.
+ list_actions = list((-1,) * len(the_list))
+
+ exclude_key = list_key + '!'
+ if exclude_key in the_dict:
+ for exclude_item in the_dict[exclude_key]:
+ for index, list_item in enumerate(the_list):
+ if exclude_item == list_item:
+ # This item matches the exclude_item, so set its action to 0
+ # (exclude).
+ list_actions[index] = 0
+
+ # The "whatever!" list is no longer needed, dump it.
+ del the_dict[exclude_key]
+
+ regex_key = list_key + '/'
+ if regex_key in the_dict:
+ for regex_item in the_dict[regex_key]:
+ [action, pattern] = regex_item
+ pattern_re = re.compile(pattern)
+
+ if action == 'exclude':
+ # This item matches an exclude regex, so set its value to 0 (exclude).
+ action_value = 0
+ elif action == 'include':
+ # This item matches an include regex, so set its value to 1 (include).
+ action_value = 1
+ else:
+ # This is an action that doesn't make any sense.
+ raise ValueError('Unrecognized action ' + action + ' in ' + name + \
+ ' key ' + regex_key)
+
+ for index, list_item in enumerate(the_list):
+ if list_actions[index] == action_value:
+ # Even if the regex matches, nothing will change so continue (regex
+ # searches are expensive).
+ continue
+ if pattern_re.search(list_item):
+ # Regular expression match.
+ list_actions[index] = action_value
+
+ # The "whatever/" list is no longer needed, dump it.
+ del the_dict[regex_key]
+
+ # Add excluded items to the excluded list.
+ #
+ # Note that exclude_key ("sources!") is different from excluded_key
+ # ("sources_excluded"). The exclude_key list is input and it was already
+ # processed and deleted; the excluded_key list is output and it's about
+ # to be created.
+ excluded_key = list_key + '_excluded'
+ if excluded_key in the_dict:
+ raise GypError(name + ' key ' + excluded_key +
+ ' must not be present prior '
+ ' to applying exclusion/regex filters for ' + list_key)
+
+ excluded_list = []
+
+ # Go backwards through the list_actions list so that as items are deleted,
+ # the indices of items that haven't been seen yet don't shift. That means
+ # that things need to be prepended to excluded_list to maintain them in the
+ # same order that they existed in the_list.
+ for index in range(len(list_actions) - 1, -1, -1):
+ if list_actions[index] == 0:
+ # Dump anything with action 0 (exclude). Keep anything with action 1
+ # (include) or -1 (no include or exclude seen for the item).
+ excluded_list.insert(0, the_list[index])
+ del the_list[index]
+
+ # If anything was excluded, put the excluded list into the_dict at
+ # excluded_key.
+ if len(excluded_list) > 0:
+ the_dict[excluded_key] = excluded_list
+
+ # Now recurse into subdicts and lists that may contain dicts.
+ for key, value in the_dict.items():
+ if type(value) is dict:
+ ProcessListFiltersInDict(key, value)
+ elif type(value) is list:
+ ProcessListFiltersInList(key, value)
+
+
+def ProcessListFiltersInList(name, the_list):
+ for item in the_list:
+ if type(item) is dict:
+ ProcessListFiltersInDict(name, item)
+ elif type(item) is list:
+ ProcessListFiltersInList(name, item)
+
+
+def ValidateTargetType(target, target_dict):
+ """Ensures the 'type' field on the target is one of the known types.
+
+ Arguments:
+ target: string, name of target.
+ target_dict: dict, target spec.
+
+ Raises an exception on error.
+ """
+ VALID_TARGET_TYPES = ('executable', 'loadable_module',
+ 'static_library', 'shared_library',
+ 'mac_kernel_extension', 'none', 'windows_driver')
+ target_type = target_dict.get('type', None)
+ if target_type not in VALID_TARGET_TYPES:
+ raise GypError("Target %s has an invalid target type '%s'. "
+ "Must be one of %s." %
+ (target, target_type, '/'.join(VALID_TARGET_TYPES)))
+ if (target_dict.get('standalone_static_library', 0) and
+ not target_type == 'static_library'):
+ raise GypError('Target %s has type %s but standalone_static_library flag is'
+ ' only valid for static_library type.' % (target,
+ target_type))
+
+
+def ValidateSourcesInTarget(target, target_dict, build_file,
+ duplicate_basename_check):
+ if not duplicate_basename_check:
+ return
+ if target_dict.get('type', None) != 'static_library':
+ return
+ sources = target_dict.get('sources', [])
+ basenames = {}
+ for source in sources:
+ name, ext = os.path.splitext(source)
+ is_compiled_file = ext in [
+ '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
+ if not is_compiled_file:
+ continue
+ basename = os.path.basename(name) # Don't include extension.
+ basenames.setdefault(basename, []).append(source)
+
+ error = ''
+ for basename, files in basenames.items():
+ if len(files) > 1:
+ error += ' %s: %s\n' % (basename, ' '.join(files))
+
+ if error:
+ print('static library %s has several files with the same basename:\n' %
+ target + error + 'libtool on Mac cannot handle that. Use '
+ '--no-duplicate-basename-check to disable this validation.')
+ raise GypError('Duplicate basenames in sources section, see list above')
+
+
+def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
+ """Ensures that the rules sections in target_dict are valid and consistent,
+ and determines which sources they apply to.
+
+ Arguments:
+ target: string, name of target.
+ target_dict: dict, target spec containing "rules" and "sources" lists.
+ extra_sources_for_rules: a list of keys to scan for rule matches in
+ addition to 'sources'.
+ """
+
+ # Dicts to map between values found in rules' 'rule_name' and 'extension'
+ # keys and the rule dicts themselves.
+ rule_names = {}
+ rule_extensions = {}
+
+ rules = target_dict.get('rules', [])
+ for rule in rules:
+ # Make sure that there's no conflict among rule names and extensions.
+ rule_name = rule['rule_name']
+ if rule_name in rule_names:
+ raise GypError('rule %s exists in duplicate, target %s' %
+ (rule_name, target))
+ rule_names[rule_name] = rule
+
+ rule_extension = rule['extension']
+ if rule_extension.startswith('.'):
+ rule_extension = rule_extension[1:]
+ if rule_extension in rule_extensions:
+ raise GypError(('extension %s associated with multiple rules, ' +
+ 'target %s rules %s and %s') %
+ (rule_extension, target,
+ rule_extensions[rule_extension]['rule_name'],
+ rule_name))
+ rule_extensions[rule_extension] = rule
+
+ # Make sure rule_sources isn't already there. It's going to be
+ # created below if needed.
+ if 'rule_sources' in rule:
+ raise GypError(
+ 'rule_sources must not exist in input, target %s rule %s' %
+ (target, rule_name))
+
+ rule_sources = []
+ source_keys = ['sources']
+ source_keys.extend(extra_sources_for_rules)
+ for source_key in source_keys:
+ for source in target_dict.get(source_key, []):
+ (source_root, source_extension) = os.path.splitext(source)
+ if source_extension.startswith('.'):
+ source_extension = source_extension[1:]
+ if source_extension == rule_extension:
+ rule_sources.append(source)
+
+ if len(rule_sources) > 0:
+ rule['rule_sources'] = rule_sources
+
+
+def ValidateRunAsInTarget(target, target_dict, build_file):
+ target_name = target_dict.get('target_name')
+ run_as = target_dict.get('run_as')
+ if not run_as:
+ return
+ if type(run_as) is not dict:
+ raise GypError("The 'run_as' in target %s from file %s should be a "
+ "dictionary." %
+ (target_name, build_file))
+ action = run_as.get('action')
+ if not action:
+ raise GypError("The 'run_as' in target %s from file %s must have an "
+ "'action' section." %
+ (target_name, build_file))
+ if type(action) is not list:
+ raise GypError("The 'action' for 'run_as' in target %s from file %s "
+ "must be a list." %
+ (target_name, build_file))
+ working_directory = run_as.get('working_directory')
+ if working_directory and not isinstance(working_directory, _str_types):
+ raise GypError("The 'working_directory' for 'run_as' in target %s "
+ "in file %s should be a string." %
+ (target_name, build_file))
+ environment = run_as.get('environment')
+ if environment and type(environment) is not dict:
+ raise GypError("The 'environment' for 'run_as' in target %s "
+ "in file %s should be a dictionary." %
+ (target_name, build_file))
+
+
+def ValidateActionsInTarget(target, target_dict, build_file):
+ '''Validates the inputs to the actions in a target.'''
+ target_name = target_dict.get('target_name')
+ actions = target_dict.get('actions', [])
+ for action in actions:
+ action_name = action.get('action_name')
+ if not action_name:
+ raise GypError("Anonymous action in target %s. "
+ "An action must have an 'action_name' field." %
+ target_name)
+ inputs = action.get('inputs', None)
+ if inputs is None:
+ raise GypError('Action in target %s has no inputs.' % target_name)
+ action_command = action.get('action')
+ if action_command and not action_command[0]:
+ raise GypError("Empty action as command in target %s." % target_name)
+
+
+def TurnIntIntoStrInDict(the_dict):
+ """Given dict the_dict, recursively converts all integers into strings.
+ """
+ # Use items instead of iteritems because there's no need to try to look at
+ # reinserted keys and their associated values.
+ for k, v in the_dict.items():
+ if isinstance(v, _int_types):
+ v = str(v)
+ the_dict[k] = v
+ elif type(v) is dict:
+ TurnIntIntoStrInDict(v)
+ elif type(v) is list:
+ TurnIntIntoStrInList(v)
+
+ if isinstance(k, _int_types):
+ del the_dict[k]
+ the_dict[str(k)] = v
+
+
+def TurnIntIntoStrInList(the_list):
+ """Given list the_list, recursively converts all integers into strings.
+ """
+ for index, item in enumerate(the_list):
+ if isinstance(item, _int_types):
+ the_list[index] = str(item)
+ elif type(item) is dict:
+ TurnIntIntoStrInDict(item)
+ elif type(item) is list:
+ TurnIntIntoStrInList(item)
+
+
+def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
+ data):
+ """Return only the targets that are deep dependencies of |root_targets|."""
+ qualified_root_targets = []
+ for target in root_targets:
+ target = target.strip()
+ qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
+ if not qualified_targets:
+ raise GypError("Could not find target %s" % target)
+ qualified_root_targets.extend(qualified_targets)
+
+ wanted_targets = {}
+ for target in qualified_root_targets:
+ wanted_targets[target] = targets[target]
+ for dependency in dependency_nodes[target].DeepDependencies():
+ wanted_targets[dependency] = targets[dependency]
+
+ wanted_flat_list = [t for t in flat_list if t in wanted_targets]
+
+ # Prune unwanted targets from each build_file's data dict.
+ for build_file in data['target_build_files']:
+ if not 'targets' in data[build_file]:
+ continue
+ new_targets = []
+ for target in data[build_file]['targets']:
+ qualified_name = gyp.common.QualifiedTarget(build_file,
+ target['target_name'],
+ target['toolset'])
+ if qualified_name in wanted_targets:
+ new_targets.append(target)
+ data[build_file]['targets'] = new_targets
+
+ return wanted_targets, wanted_flat_list
+
+
+def VerifyNoCollidingTargets(targets):
+ """Verify that no two targets in the same directory share the same name.
+
+ Arguments:
+ targets: A list of targets in the form 'path/to/file.gyp:target_name'.
+ """
+ # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
+ used = {}
+ for target in targets:
+ # Separate out 'path/to/file.gyp, 'target_name' from
+ # 'path/to/file.gyp:target_name'.
+ path, name = target.rsplit(':', 1)
+ # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
+ subdir, gyp = os.path.split(path)
+ # Use '.' for the current directory '', so that the error messages make
+ # more sense.
+ if not subdir:
+ subdir = '.'
+ # Prepare a key like 'path/to:target_name'.
+ key = subdir + ':' + name
+ if key in used:
+ # Complain if this target is already used.
+ raise GypError('Duplicate target name "%s" in directory "%s" used both '
+ 'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
+ used[key] = gyp
+
+
+def SetGeneratorGlobals(generator_input_info):
+ # Set up path_sections and non_configuration_keys with the default data plus
+ # the generator-specific data.
+ global path_sections
+ path_sections = set(base_path_sections)
+ path_sections.update(generator_input_info['path_sections'])
+
+ global non_configuration_keys
+ non_configuration_keys = base_non_configuration_keys[:]
+ non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
+
+ global multiple_toolsets
+ multiple_toolsets = generator_input_info[
+ 'generator_supports_multiple_toolsets']
+
+ global generator_filelist_paths
+ generator_filelist_paths = generator_input_info['generator_filelist_paths']
+
+
+def Load(build_files, variables, includes, depth, generator_input_info, check,
+ circular_check, duplicate_basename_check, parallel, root_targets):
+ SetGeneratorGlobals(generator_input_info)
+ # A generator can have other lists (in addition to sources) be processed
+ # for rules.
+ extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
+
+ # Load build files. This loads every target-containing build file into
+ # the |data| dictionary such that the keys to |data| are build file names,
+ # and the values are the entire build file contents after "early" or "pre"
+ # processing has been done and includes have been resolved.
+ # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
+ # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
+ # track of the keys corresponding to "target" files.
+ data = {'target_build_files': set()}
+ # Normalize paths everywhere. This is important because paths will be
+ # used as keys to the data dict and for references between input files.
+ build_files = set(map(os.path.normpath, build_files))
+ if parallel:
+ LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
+ check, generator_input_info)
+ else:
+ aux_data = {}
+ for build_file in build_files:
+ try:
+ LoadTargetBuildFile(build_file, data, aux_data,
+ variables, includes, depth, check, True)
+ except Exception as e:
+ gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
+ raise
+
+ # Build a dict to access each target's subdict by qualified name.
+ targets = BuildTargetsDict(data)
+
+ # Fully qualify all dependency links.
+ QualifyDependencies(targets)
+
+ # Remove self-dependencies from targets that have 'prune_self_dependencies'
+ # set to 1.
+ RemoveSelfDependencies(targets)
+
+ # Expand dependencies specified as build_file:*.
+ ExpandWildcardDependencies(targets, data)
+
+ # Remove all dependencies marked as 'link_dependency' from the targets of
+ # type 'none'.
+ RemoveLinkDependenciesFromNoneTargets(targets)
+
+ # Apply exclude (!) and regex (/) list filters only for dependency_sections.
+ for target_name, target_dict in targets.items():
+ tmp_dict = {}
+ for key_base in dependency_sections:
+ for op in ('', '!', '/'):
+ key = key_base + op
+ if key in target_dict:
+ tmp_dict[key] = target_dict[key]
+ del target_dict[key]
+ ProcessListFiltersInDict(target_name, tmp_dict)
+ # Write the results back to |target_dict|.
+ for key in tmp_dict:
+ target_dict[key] = tmp_dict[key]
+
+ # Make sure every dependency appears at most once.
+ RemoveDuplicateDependencies(targets)
+
+ if circular_check:
+ # Make sure that any targets in a.gyp don't contain dependencies in other
+ # .gyp files that further depend on a.gyp.
+ VerifyNoGYPFileCircularDependencies(targets)
+
+ [dependency_nodes, flat_list] = BuildDependencyList(targets)
+
+ if root_targets:
+ # Remove, from |targets| and |flat_list|, the targets that are not deep
+ # dependencies of the targets specified in |root_targets|.
+ targets, flat_list = PruneUnwantedTargets(
+ targets, flat_list, dependency_nodes, root_targets, data)
+
+ # Check that no two targets in the same directory have the same name.
+ VerifyNoCollidingTargets(flat_list)
+
+ # Handle dependent settings of various types.
+ for settings_type in ['all_dependent_settings',
+ 'direct_dependent_settings',
+ 'link_settings']:
+ DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
+
+ # Take out the dependent settings now that they've been published to all
+ # of the targets that require them.
+ for target in flat_list:
+ if settings_type in targets[target]:
+ del targets[target][settings_type]
+
+ # Make sure static libraries don't declare dependencies on other static
+ # libraries, but that linkables depend on all unlinked static libraries
+ # that they need so that their link steps will be correct.
+ gii = generator_input_info
+ if gii['generator_wants_static_library_dependencies_adjusted']:
+ AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
+ gii['generator_wants_sorted_dependencies'])
+
+ # Apply "post"/"late"/"target" variable expansions and condition evaluations.
+ for target in flat_list:
+ target_dict = targets[target]
+ build_file = gyp.common.BuildFile(target)
+ ProcessVariablesAndConditionsInDict(
+ target_dict, PHASE_LATE, variables, build_file)
+
+ # Move everything that can go into a "configurations" section into one.
+ for target in flat_list:
+ target_dict = targets[target]
+ SetUpConfigurations(target, target_dict)
+
+ # Apply exclude (!) and regex (/) list filters.
+ for target in flat_list:
+ target_dict = targets[target]
+ ProcessListFiltersInDict(target, target_dict)
+
+ # Apply "latelate" variable expansions and condition evaluations.
+ for target in flat_list:
+ target_dict = targets[target]
+ build_file = gyp.common.BuildFile(target)
+ ProcessVariablesAndConditionsInDict(
+ target_dict, PHASE_LATELATE, variables, build_file)
+
+ # Make sure that the rules make sense, and build up rule_sources lists as
+ # needed. Not all generators will need to use the rule_sources lists, but
+ # some may, and it seems best to build the list in a common spot.
+ # Also validate actions and run_as elements in targets.
+ for target in flat_list:
+ target_dict = targets[target]
+ build_file = gyp.common.BuildFile(target)
+ ValidateTargetType(target, target_dict)
+ ValidateSourcesInTarget(target, target_dict, build_file,
+ duplicate_basename_check)
+ ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
+ ValidateRunAsInTarget(target, target_dict, build_file)
+ ValidateActionsInTarget(target, target_dict, build_file)
+
+ # Generators might not expect ints. Turn them into strs.
+ TurnIntIntoStrInDict(data)
+
+ # TODO(mark): Return |data| for now because the generator needs a list of
+ # build files that came in. In the future, maybe it should just accept
+ # a list, and not the whole data dict.
+ return [flat_list, targets, data]
diff --git a/third_party/python/gyp/pylib/gyp/input_test.py b/third_party/python/gyp/pylib/gyp/input_test.py
new file mode 100755
index 0000000000..6c4b1cc526
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/input_test.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+
+# Copyright 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the input.py file."""
+
+import gyp.input
+import unittest
+import sys
+
+
+class TestFindCycles(unittest.TestCase):
+ def setUp(self):
+ self.nodes = {}
+ for x in ('a', 'b', 'c', 'd', 'e'):
+ self.nodes[x] = gyp.input.DependencyGraphNode(x)
+
+ def _create_dependency(self, dependent, dependency):
+ dependent.dependencies.append(dependency)
+ dependency.dependents.append(dependent)
+
+ def test_no_cycle_empty_graph(self):
+ for label, node in self.nodes.items():
+ self.assertEquals([], node.FindCycles())
+
+ def test_no_cycle_line(self):
+ self._create_dependency(self.nodes['a'], self.nodes['b'])
+ self._create_dependency(self.nodes['b'], self.nodes['c'])
+ self._create_dependency(self.nodes['c'], self.nodes['d'])
+
+ for label, node in self.nodes.items():
+ self.assertEquals([], node.FindCycles())
+
+ def test_no_cycle_dag(self):
+ self._create_dependency(self.nodes['a'], self.nodes['b'])
+ self._create_dependency(self.nodes['a'], self.nodes['c'])
+ self._create_dependency(self.nodes['b'], self.nodes['c'])
+
+ for label, node in self.nodes.items():
+ self.assertEquals([], node.FindCycles())
+
+ def test_cycle_self_reference(self):
+ self._create_dependency(self.nodes['a'], self.nodes['a'])
+
+ self.assertEquals([[self.nodes['a'], self.nodes['a']]],
+ self.nodes['a'].FindCycles())
+
+ def test_cycle_two_nodes(self):
+ self._create_dependency(self.nodes['a'], self.nodes['b'])
+ self._create_dependency(self.nodes['b'], self.nodes['a'])
+
+ self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
+ self.nodes['a'].FindCycles())
+ self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
+ self.nodes['b'].FindCycles())
+
+ def test_two_cycles(self):
+ self._create_dependency(self.nodes['a'], self.nodes['b'])
+ self._create_dependency(self.nodes['b'], self.nodes['a'])
+
+ self._create_dependency(self.nodes['b'], self.nodes['c'])
+ self._create_dependency(self.nodes['c'], self.nodes['b'])
+
+ cycles = self.nodes['a'].FindCycles()
+ self.assertTrue(
+ [self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles)
+ self.assertTrue(
+ [self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles)
+ self.assertEquals(2, len(cycles))
+
+ def test_big_cycle(self):
+ self._create_dependency(self.nodes['a'], self.nodes['b'])
+ self._create_dependency(self.nodes['b'], self.nodes['c'])
+ self._create_dependency(self.nodes['c'], self.nodes['d'])
+ self._create_dependency(self.nodes['d'], self.nodes['e'])
+ self._create_dependency(self.nodes['e'], self.nodes['a'])
+
+ self.assertEquals([[self.nodes['a'],
+ self.nodes['b'],
+ self.nodes['c'],
+ self.nodes['d'],
+ self.nodes['e'],
+ self.nodes['a']]],
+ self.nodes['a'].FindCycles())
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/python/gyp/pylib/gyp/mac_tool.py b/third_party/python/gyp/pylib/gyp/mac_tool.py
new file mode 100755
index 0000000000..64d21063ff
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/mac_tool.py
@@ -0,0 +1,721 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions to perform Xcode-style build steps.
+
+These functions are executed via gyp-mac-tool when using the Makefile generator.
+"""
+
+from __future__ import print_function
+
+import fcntl
+import fnmatch
+import glob
+import json
+import os
+import plistlib
+import re
+import shutil
+import struct
+import subprocess
+import sys
+import tempfile
+
+
+def main(args):
+ executor = MacTool()
+ exit_code = executor.Dispatch(args)
+ if exit_code is not None:
+ sys.exit(exit_code)
+
+
+class MacTool(object):
+ """This class performs all the Mac tooling steps. The methods can either be
+ executed directly, or dispatched from an argument list."""
+
+ def Dispatch(self, args):
+ """Dispatches a string command to a method."""
+ if len(args) < 1:
+ raise Exception("Not enough arguments")
+
+ method = "Exec%s" % self._CommandifyName(args[0])
+ return getattr(self, method)(*args[1:])
+
+ def _CommandifyName(self, name_string):
+ """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
+ return name_string.title().replace('-', '')
+
+ def ExecCopyBundleResource(self, source, dest, convert_to_binary):
+ """Copies a resource file to the bundle/Resources directory, performing any
+ necessary compilation on each resource."""
+ convert_to_binary = convert_to_binary == 'True'
+ extension = os.path.splitext(source)[1].lower()
+ if os.path.isdir(source):
+ # Copy tree.
+ # TODO(thakis): This copies file attributes like mtime, while the
+ # single-file branch below doesn't. This should probably be changed to
+ # be consistent with the single-file branch.
+ if os.path.exists(dest):
+ shutil.rmtree(dest)
+ shutil.copytree(source, dest)
+ elif extension == '.xib':
+ return self._CopyXIBFile(source, dest)
+ elif extension == '.storyboard':
+ return self._CopyXIBFile(source, dest)
+ elif extension == '.strings' and not convert_to_binary:
+ self._CopyStringsFile(source, dest)
+ else:
+ if os.path.exists(dest):
+ os.unlink(dest)
+ shutil.copy(source, dest)
+
+ if convert_to_binary and extension in ('.plist', '.strings'):
+ self._ConvertToBinary(dest)
+
+ def _CopyXIBFile(self, source, dest):
+ """Compiles a XIB file with ibtool into a binary plist in the bundle."""
+
+ # ibtool sometimes crashes with relative paths. See crbug.com/314728.
+ base = os.path.dirname(os.path.realpath(__file__))
+ if os.path.relpath(source):
+ source = os.path.join(base, source)
+ if os.path.relpath(dest):
+ dest = os.path.join(base, dest)
+
+ args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices']
+
+ if os.environ['XCODE_VERSION_ACTUAL'] > '0700':
+ args.extend(['--auto-activate-custom-fonts'])
+ if 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ:
+ args.extend([
+ '--target-device', 'iphone', '--target-device', 'ipad',
+ '--minimum-deployment-target',
+ os.environ['IPHONEOS_DEPLOYMENT_TARGET'],
+ ])
+ else:
+ args.extend([
+ '--target-device', 'mac',
+ '--minimum-deployment-target',
+ os.environ['MACOSX_DEPLOYMENT_TARGET'],
+ ])
+
+ args.extend(['--output-format', 'human-readable-text', '--compile', dest,
+ source])
+
+ ibtool_section_re = re.compile(r'/\*.*\*/')
+ ibtool_re = re.compile(r'.*note:.*is clipping its content')
+ try:
+ stdout = subprocess.check_output(args)
+ except subprocess.CalledProcessError as e:
+ print(e.output)
+ raise
+ current_section_header = None
+ for line in stdout.splitlines():
+ line_decoded = line.decode('utf-8')
+ if ibtool_section_re.match(line_decoded):
+ current_section_header = line_decoded
+ elif not ibtool_re.match(line_decoded):
+ if current_section_header:
+ print(current_section_header)
+ current_section_header = None
+ print(line_decoded)
+ return 0
+
+ def _ConvertToBinary(self, dest):
+ subprocess.check_call([
+ 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
+
+ def _CopyStringsFile(self, source, dest):
+ """Copies a .strings file using iconv to reconvert the input into UTF-16."""
+ input_code = self._DetectInputEncoding(source) or "UTF-8"
+
+ # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
+ # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
+ # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
+ # semicolon in dictionary.
+ # on invalid files. Do the same kind of validation.
+ import CoreFoundation
+ s = open(source, 'rb').read()
+ d = CoreFoundation.CFDataCreate(None, s, len(s))
+ _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
+ if error:
+ return
+
+ fp = open(dest, 'wb')
+ fp.write(s.decode(input_code).encode('UTF-16'))
+ fp.close()
+
+ def _DetectInputEncoding(self, file_name):
+ """Reads the first few bytes from file_name and tries to guess the text
+ encoding. Returns None as a guess if it can't detect it."""
+ fp = open(file_name, 'rb')
+ try:
+ header = fp.read(3)
+ except:
+ fp.close()
+ return None
+ fp.close()
+ if header.startswith(b"\xFE\xFF"):
+ return "UTF-16"
+ elif header.startswith(b"\xFF\xFE"):
+ return "UTF-16"
+ elif header.startswith(b"\xEF\xBB\xBF"):
+ return "UTF-8"
+ else:
+ return None
+
+ def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
+ """Copies the |source| Info.plist to the destination directory |dest|."""
+ # Read the source Info.plist into memory.
+ fd = open(source, 'r')
+ lines = fd.read()
+ fd.close()
+
+ # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
+ plist = plistlib.readPlistFromString(lines)
+ if keys:
+ plist.update(json.loads(keys[0]))
+ lines = plistlib.writePlistToString(plist)
+
+ # Go through all the environment variables and replace them as variables in
+ # the file.
+ IDENT_RE = re.compile(r'[_/\s]')
+ for key in os.environ:
+ if key.startswith('_'):
+ continue
+ evar = '${%s}' % key
+ evalue = os.environ[key]
+ lines = lines.replace(evar, evalue)
+
+ # Xcode supports various suffices on environment variables, which are
+ # all undocumented. :rfc1034identifier is used in the standard project
+ # template these days, and :identifier was used earlier. They are used to
+ # convert non-url characters into things that look like valid urls --
+ # except that the replacement character for :identifier, '_' isn't valid
+ # in a URL either -- oops, hence :rfc1034identifier was born.
+ evar = '${%s:identifier}' % key
+ evalue = IDENT_RE.sub('_', os.environ[key])
+ lines = lines.replace(evar, evalue)
+
+ evar = '${%s:rfc1034identifier}' % key
+ evalue = IDENT_RE.sub('-', os.environ[key])
+ lines = lines.replace(evar, evalue)
+
+ # Remove any keys with values that haven't been replaced.
+ lines = lines.split('\n')
+ for i in range(len(lines)):
+ if lines[i].strip().startswith("<string>${"):
+ lines[i] = None
+ lines[i - 1] = None
+ lines = '\n'.join(filter(lambda x: x is not None, lines))
+
+ # Write out the file with variables replaced.
+ fd = open(dest, 'w')
+ fd.write(lines)
+ fd.close()
+
+ # Now write out PkgInfo file now that the Info.plist file has been
+ # "compiled".
+ self._WritePkgInfo(dest)
+
+ if convert_to_binary == 'True':
+ self._ConvertToBinary(dest)
+
+ def _WritePkgInfo(self, info_plist):
+ """This writes the PkgInfo file from the data stored in Info.plist."""
+ plist = plistlib.readPlist(info_plist)
+ if not plist:
+ return
+
+ # Only create PkgInfo for executable types.
+ package_type = plist['CFBundlePackageType']
+ if package_type != 'APPL':
+ return
+
+ # The format of PkgInfo is eight characters, representing the bundle type
+ # and bundle signature, each four characters. If that is missing, four
+ # '?' characters are used instead.
+ signature_code = plist.get('CFBundleSignature', '????')
+ if len(signature_code) != 4: # Wrong length resets everything, too.
+ signature_code = '?' * 4
+
+ dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
+ fp = open(dest, 'w')
+ fp.write('%s%s' % (package_type, signature_code))
+ fp.close()
+
+ def ExecFlock(self, lockfile, *cmd_list):
+ """Emulates the most basic behavior of Linux's flock(1)."""
+ # Rely on exception handling to report errors.
+ fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
+ fcntl.flock(fd, fcntl.LOCK_EX)
+ return subprocess.call(cmd_list)
+
+ def ExecFilterLibtool(self, *cmd_list):
+ """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
+ symbols'."""
+ libtool_re = re.compile(r'^.*libtool: (?:for architecture: \S* )?'
+ r'file: .* has no symbols$')
+ libtool_re5 = re.compile(
+ r'^.*libtool: warning for library: ' +
+ r'.* the table of contents is empty ' +
+ r'\(no object file members in the library define global symbols\)$')
+ env = os.environ.copy()
+ # Ref:
+ # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
+ # The problem with this flag is that it resets the file mtime on the file to
+ # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
+ env['ZERO_AR_DATE'] = '1'
+ libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
+ _, err = libtoolout.communicate()
+ for line in err.splitlines():
+ line_decoded = line.decode('utf-8')
+ if not libtool_re.match(line_decoded) and not libtool_re5.match(line_decoded):
+ print(line_decoded, file=sys.stderr)
+ # Unconditionally touch the output .a file on the command line if present
+ # and the command succeeded. A bit hacky.
+ if not libtoolout.returncode:
+ for i in range(len(cmd_list) - 1):
+ if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
+ os.utime(cmd_list[i+1], None)
+ break
+ return libtoolout.returncode
+
+ def ExecPackageIosFramework(self, framework):
+ # Find the name of the binary based on the part before the ".framework".
+ binary = os.path.basename(framework).split('.')[0]
+ module_path = os.path.join(framework, 'Modules');
+ if not os.path.exists(module_path):
+ os.mkdir(module_path)
+ module_template = 'framework module %s {\n' \
+ ' umbrella header "%s.h"\n' \
+ '\n' \
+ ' export *\n' \
+ ' module * { export * }\n' \
+ '}\n' % (binary, binary)
+
+ module_file = open(os.path.join(module_path, 'module.modulemap'), "w")
+ module_file.write(module_template)
+ module_file.close()
+
+ def ExecPackageFramework(self, framework, version):
+ """Takes a path to Something.framework and the Current version of that and
+ sets up all the symlinks."""
+ # Find the name of the binary based on the part before the ".framework".
+ binary = os.path.basename(framework).split('.')[0]
+
+ CURRENT = 'Current'
+ RESOURCES = 'Resources'
+ VERSIONS = 'Versions'
+
+ if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
+ # Binary-less frameworks don't seem to contain symlinks (see e.g.
+ # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
+ return
+
+ # Move into the framework directory to set the symlinks correctly.
+ pwd = os.getcwd()
+ os.chdir(framework)
+
+ # Set up the Current version.
+ self._Relink(version, os.path.join(VERSIONS, CURRENT))
+
+ # Set up the root symlinks.
+ self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
+ self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
+
+ # Back to where we were before!
+ os.chdir(pwd)
+
+ def _Relink(self, dest, link):
+ """Creates a symlink to |dest| named |link|. If |link| already exists,
+ it is overwritten."""
+ if os.path.lexists(link):
+ os.remove(link)
+ os.symlink(dest, link)
+
+ def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers):
+ framework_name = os.path.basename(framework).split('.')[0]
+ all_headers = map(os.path.abspath, all_headers)
+ filelist = {}
+ for header in all_headers:
+ filename = os.path.basename(header)
+ filelist[filename] = header
+ filelist[os.path.join(framework_name, filename)] = header
+ WriteHmap(out, filelist)
+
+ def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers):
+ header_path = os.path.join(framework, 'Headers');
+ if not os.path.exists(header_path):
+ os.makedirs(header_path)
+ for header in copy_headers:
+ shutil.copy(header, os.path.join(header_path, os.path.basename(header)))
+
+ def ExecCompileXcassets(self, keys, *inputs):
+ """Compiles multiple .xcassets files into a single .car file.
+
+ This invokes 'actool' to compile all the inputs .xcassets files. The
+ |keys| arguments is a json-encoded dictionary of extra arguments to
+ pass to 'actool' when the asset catalogs contains an application icon
+ or a launch image.
+
+ Note that 'actool' does not create the Assets.car file if the asset
+ catalogs does not contains imageset.
+ """
+ command_line = [
+ 'xcrun', 'actool', '--output-format', 'human-readable-text',
+ '--compress-pngs', '--notices', '--warnings', '--errors',
+ ]
+ is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
+ if is_iphone_target:
+ platform = os.environ['CONFIGURATION'].split('-')[-1]
+ if platform not in ('iphoneos', 'iphonesimulator'):
+ platform = 'iphonesimulator'
+ command_line.extend([
+ '--platform', platform, '--target-device', 'iphone',
+ '--target-device', 'ipad', '--minimum-deployment-target',
+ os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
+ os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
+ ])
+ else:
+ command_line.extend([
+ '--platform', 'macosx', '--target-device', 'mac',
+ '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
+ '--compile',
+ os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
+ ])
+ if keys:
+ keys = json.loads(keys)
+ for key, value in keys.items():
+ arg_name = '--' + key
+ if isinstance(value, bool):
+ if value:
+ command_line.append(arg_name)
+ elif isinstance(value, list):
+ for v in value:
+ command_line.append(arg_name)
+ command_line.append(str(v))
+ else:
+ command_line.append(arg_name)
+ command_line.append(str(value))
+ # Note: actool crashes if inputs path are relative, so use os.path.abspath
+ # to get absolute path name for inputs.
+ command_line.extend(map(os.path.abspath, inputs))
+ subprocess.check_call(command_line)
+
+ def ExecMergeInfoPlist(self, output, *inputs):
+ """Merge multiple .plist files into a single .plist file."""
+ merged_plist = {}
+ for path in inputs:
+ plist = self._LoadPlistMaybeBinary(path)
+ self._MergePlist(merged_plist, plist)
+ plistlib.writePlist(merged_plist, output)
+
+ def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve):
+ """Code sign a bundle.
+
+ This function tries to code sign an iOS bundle, following the same
+ algorithm as Xcode:
+ 1. pick the provisioning profile that best match the bundle identifier,
+ and copy it into the bundle as embedded.mobileprovision,
+ 2. copy Entitlements.plist from user or SDK next to the bundle,
+ 3. code sign the bundle.
+ """
+ substitutions, overrides = self._InstallProvisioningProfile(
+ provisioning, self._GetCFBundleIdentifier())
+ entitlements_path = self._InstallEntitlements(
+ entitlements, substitutions, overrides)
+
+ args = ['codesign', '--force', '--sign', key]
+ if preserve == 'True':
+ args.extend(['--deep', '--preserve-metadata=identifier,entitlements'])
+ else:
+ args.extend(['--entitlements', entitlements_path])
+ args.extend(['--timestamp=none', path])
+ subprocess.check_call(args)
+
+ def _InstallProvisioningProfile(self, profile, bundle_identifier):
+ """Installs embedded.mobileprovision into the bundle.
+
+ Args:
+ profile: string, optional, short name of the .mobileprovision file
+ to use, if empty or the file is missing, the best file installed
+ will be used
+ bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+
+ Returns:
+ A tuple containing two dictionary: variables substitutions and values
+ to overrides when generating the entitlements file.
+ """
+ source_path, provisioning_data, team_id = self._FindProvisioningProfile(
+ profile, bundle_identifier)
+ target_path = os.path.join(
+ os.environ['BUILT_PRODUCTS_DIR'],
+ os.environ['CONTENTS_FOLDER_PATH'],
+ 'embedded.mobileprovision')
+ shutil.copy2(source_path, target_path)
+ substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
+ return substitutions, provisioning_data['Entitlements']
+
+ def _FindProvisioningProfile(self, profile, bundle_identifier):
+ """Finds the .mobileprovision file to use for signing the bundle.
+
+ Checks all the installed provisioning profiles (or if the user specified
+ the PROVISIONING_PROFILE variable, only consult it) and select the most
+ specific that correspond to the bundle identifier.
+
+ Args:
+ profile: string, optional, short name of the .mobileprovision file
+ to use, if empty or the file is missing, the best file installed
+ will be used
+ bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+
+ Returns:
+ A tuple of the path to the selected provisioning profile, the data of
+ the embedded plist in the provisioning profile and the team identifier
+ to use for code signing.
+
+ Raises:
+ SystemExit: if no .mobileprovision can be used to sign the bundle.
+ """
+ profiles_dir = os.path.join(
+ os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
+ if not os.path.isdir(profiles_dir):
+ print((
+ 'cannot find mobile provisioning for %s' % bundle_identifier),
+ file=sys.stderr)
+ sys.exit(1)
+ provisioning_profiles = None
+ if profile:
+ profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
+ if os.path.exists(profile_path):
+ provisioning_profiles = [profile_path]
+ if not provisioning_profiles:
+ provisioning_profiles = glob.glob(
+ os.path.join(profiles_dir, '*.mobileprovision'))
+ valid_provisioning_profiles = {}
+ for profile_path in provisioning_profiles:
+ profile_data = self._LoadProvisioningProfile(profile_path)
+ app_id_pattern = profile_data.get(
+ 'Entitlements', {}).get('application-identifier', '')
+ for team_identifier in profile_data.get('TeamIdentifier', []):
+ app_id = '%s.%s' % (team_identifier, bundle_identifier)
+ if fnmatch.fnmatch(app_id, app_id_pattern):
+ valid_provisioning_profiles[app_id_pattern] = (
+ profile_path, profile_data, team_identifier)
+ if not valid_provisioning_profiles:
+ print((
+ 'cannot find mobile provisioning for %s' % bundle_identifier),
+ file=sys.stderr)
+ sys.exit(1)
+ # If the user has multiple provisioning profiles installed that can be
+ # used for ${bundle_identifier}, pick the most specific one (ie. the
+ # provisioning profile whose pattern is the longest).
+ selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
+ return valid_provisioning_profiles[selected_key]
+
+ def _LoadProvisioningProfile(self, profile_path):
+ """Extracts the plist embedded in a provisioning profile.
+
+ Args:
+ profile_path: string, path to the .mobileprovision file
+
+ Returns:
+ Content of the plist embedded in the provisioning profile as a dictionary.
+ """
+ with tempfile.NamedTemporaryFile() as temp:
+ subprocess.check_call([
+ 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
+ return self._LoadPlistMaybeBinary(temp.name)
+
+ def _MergePlist(self, merged_plist, plist):
+ """Merge |plist| into |merged_plist|."""
+ for key, value in plist.items():
+ if isinstance(value, dict):
+ merged_value = merged_plist.get(key, {})
+ if isinstance(merged_value, dict):
+ self._MergePlist(merged_value, value)
+ merged_plist[key] = merged_value
+ else:
+ merged_plist[key] = value
+ else:
+ merged_plist[key] = value
+
+ def _LoadPlistMaybeBinary(self, plist_path):
+ """Loads into a memory a plist possibly encoded in binary format.
+
+ This is a wrapper around plistlib.readPlist that tries to convert the
+ plist to the XML format if it can't be parsed (assuming that it is in
+ the binary format).
+
+ Args:
+ plist_path: string, path to a plist file, in XML or binary format
+
+ Returns:
+ Content of the plist as a dictionary.
+ """
+ try:
+ # First, try to read the file using plistlib that only supports XML,
+ # and if an exception is raised, convert a temporary copy to XML and
+ # load that copy.
+ return plistlib.readPlist(plist_path)
+ except:
+ pass
+ with tempfile.NamedTemporaryFile() as temp:
+ shutil.copy2(plist_path, temp.name)
+ subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
+ return plistlib.readPlist(temp.name)
+
+ def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
+ """Constructs a dictionary of variable substitutions for Entitlements.plist.
+
+ Args:
+ bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+ app_identifier_prefix: string, value for AppIdentifierPrefix
+
+ Returns:
+ Dictionary of substitutions to apply when generating Entitlements.plist.
+ """
+ return {
+ 'CFBundleIdentifier': bundle_identifier,
+ 'AppIdentifierPrefix': app_identifier_prefix,
+ }
+
+ def _GetCFBundleIdentifier(self):
+ """Extracts CFBundleIdentifier value from Info.plist in the bundle.
+
+ Returns:
+ Value of CFBundleIdentifier in the Info.plist located in the bundle.
+ """
+ info_plist_path = os.path.join(
+ os.environ['TARGET_BUILD_DIR'],
+ os.environ['INFOPLIST_PATH'])
+ info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
+ return info_plist_data['CFBundleIdentifier']
+
+ def _InstallEntitlements(self, entitlements, substitutions, overrides):
+ """Generates and install the ${BundleName}.xcent entitlements file.
+
+ Expands variables "$(variable)" pattern in the source entitlements file,
+ add extra entitlements defined in the .mobileprovision file and the copy
+ the generated plist to "${BundlePath}.xcent".
+
+ Args:
+ entitlements: string, optional, path to the Entitlements.plist template
+ to use, defaults to "${SDKROOT}/Entitlements.plist"
+ substitutions: dictionary, variable substitutions
+ overrides: dictionary, values to add to the entitlements
+
+ Returns:
+ Path to the generated entitlements file.
+ """
+ source_path = entitlements
+ target_path = os.path.join(
+ os.environ['BUILT_PRODUCTS_DIR'],
+ os.environ['PRODUCT_NAME'] + '.xcent')
+ if not source_path:
+ source_path = os.path.join(
+ os.environ['SDKROOT'],
+ 'Entitlements.plist')
+ shutil.copy2(source_path, target_path)
+ data = self._LoadPlistMaybeBinary(target_path)
+ data = self._ExpandVariables(data, substitutions)
+ if overrides:
+ for key in overrides:
+ if key not in data:
+ data[key] = overrides[key]
+ plistlib.writePlist(data, target_path)
+ return target_path
+
+ def _ExpandVariables(self, data, substitutions):
+ """Expands variables "$(variable)" in data.
+
+ Args:
+ data: object, can be either string, list or dictionary
+ substitutions: dictionary, variable substitutions to perform
+
+ Returns:
+ Copy of data where each references to "$(variable)" has been replaced
+ by the corresponding value found in substitutions, or left intact if
+ the key was not found.
+ """
+ if isinstance(data, str):
+ for key, value in substitutions.items():
+ data = data.replace('$(%s)' % key, value)
+ return data
+ if isinstance(data, list):
+ return [self._ExpandVariables(v, substitutions) for v in data]
+ if isinstance(data, dict):
+ return {k: self._ExpandVariables(data[k], substitutions) for k in data}
+ return data
+
+def NextGreaterPowerOf2(x):
+ return 2**(x).bit_length()
+
+def WriteHmap(output_name, filelist):
+ """Generates a header map based on |filelist|.
+
+ Per Mark Mentovai:
+ A header map is structured essentially as a hash table, keyed by names used
+ in #includes, and providing pathnames to the actual files.
+
+ The implementation below and the comment above comes from inspecting:
+ http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
+ while also looking at the implementation in clang in:
+ https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
+ """
+ magic = 1751998832
+ version = 1
+ _reserved = 0
+ count = len(filelist)
+ capacity = NextGreaterPowerOf2(count)
+ strings_offset = 24 + (12 * capacity)
+ max_value_length = len(max(filelist.items(), key=lambda t: len(t[1]))[1])
+
+ out = open(output_name, "wb")
+ out.write(struct.pack('<LHHLLLL', magic, version, _reserved, strings_offset,
+ count, capacity, max_value_length))
+
+ # Create empty hashmap buckets.
+ buckets = [None] * capacity
+ for file, path in filelist.items():
+ key = 0
+ for c in file:
+ key += ord(c.lower()) * 13
+
+ # Fill next empty bucket.
+ while buckets[key & capacity - 1] is not None:
+ key = key + 1
+ buckets[key & capacity - 1] = (file, path)
+
+ next_offset = 1
+ for bucket in buckets:
+ if bucket is None:
+ out.write(struct.pack('<LLL', 0, 0, 0))
+ else:
+ (file, path) = bucket
+ key_offset = next_offset
+ prefix_offset = key_offset + len(file) + 1
+ suffix_offset = prefix_offset + len(os.path.dirname(path) + os.sep) + 1
+ next_offset = suffix_offset + len(os.path.basename(path)) + 1
+ out.write(struct.pack('<LLL', key_offset, prefix_offset, suffix_offset))
+
+ # Pad byte since next offset starts at 1.
+ out.write(struct.pack('<x'))
+
+ for bucket in buckets:
+ if bucket is not None:
+ (file, path) = bucket
+ out.write(struct.pack('<%ds' % len(file), file))
+ out.write(struct.pack('<s', '\0'))
+ base = os.path.dirname(path) + os.sep
+ out.write(struct.pack('<%ds' % len(base), base))
+ out.write(struct.pack('<s', '\0'))
+ path = os.path.basename(path)
+ out.write(struct.pack('<%ds' % len(path), path))
+ out.write(struct.pack('<s', '\0'))
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/python/gyp/pylib/gyp/msvs_emulation.py b/third_party/python/gyp/pylib/gyp/msvs_emulation.py
new file mode 100644
index 0000000000..63d40e63ec
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/msvs_emulation.py
@@ -0,0 +1,1112 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This module helps emulate Visual Studio 2008 behavior on top of other
+build systems, primarily ninja.
+"""
+
+import collections
+import os
+import re
+import subprocess
+import sys
+
+from gyp.common import OrderedSet
+import gyp.MSVSUtil
+import gyp.MSVSVersion
+
+try:
+ # basestring was removed in python3.
+ basestring
+except NameError:
+ basestring = str
+
+
+windows_quoter_regex = re.compile(r'(\\*)"')
+
+
+def QuoteForRspFile(arg):
+ """Quote a command line argument so that it appears as one argument when
+ processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
+ Windows programs)."""
+ # See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
+ # threads. This is actually the quoting rules for CommandLineToArgvW, not
+ # for the shell, because the shell doesn't do anything in Windows. This
+ # works more or less because most programs (including the compiler, etc.)
+ # use that function to handle command line arguments.
+
+ # Use a heuristic to try to find args that are paths, and normalize them
+ if arg.find('/') > 0 or arg.count('/') > 1:
+ arg = os.path.normpath(arg)
+
+ # For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
+ # preceding it, and results in n backslashes + the quote. So we substitute
+ # in 2* what we match, +1 more, plus the quote.
+ arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
+
+ # %'s also need to be doubled otherwise they're interpreted as batch
+ # positional arguments. Also make sure to escape the % so that they're
+ # passed literally through escaping so they can be singled to just the
+ # original %. Otherwise, trying to pass the literal representation that
+ # looks like an environment variable to the shell (e.g. %PATH%) would fail.
+ arg = arg.replace('%', '%%')
+
+ # These commands are used in rsp files, so no escaping for the shell (via ^)
+ # is necessary.
+
+ # Finally, wrap the whole thing in quotes so that the above quote rule
+ # applies and whitespace isn't a word break.
+ return '"' + arg + '"'
+
+
+def EncodeRspFileList(args):
+ """Process a list of arguments using QuoteCmdExeArgument."""
+ # Note that the first argument is assumed to be the command. Don't add
+ # quotes around it because then built-ins like 'echo', etc. won't work.
+ # Take care to normpath only the path in the case of 'call ../x.bat' because
+ # otherwise the whole thing is incorrectly interpreted as a path and not
+ # normalized correctly.
+ if not args: return ''
+ if args[0].startswith('call '):
+ call, program = args[0].split(' ', 1)
+ program = call + ' ' + os.path.normpath(program)
+ else:
+ program = os.path.normpath(args[0])
+ return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:])
+
+
+def _GenericRetrieve(root, default, path):
+ """Given a list of dictionary keys |path| and a tree of dicts |root|, find
+ value at path, or return |default| if any of the path doesn't exist."""
+ if not root:
+ return default
+ if not path:
+ return root
+ return _GenericRetrieve(root.get(path[0]), default, path[1:])
+
+
+def _AddPrefix(element, prefix):
+ """Add |prefix| to |element| or each subelement if element is iterable."""
+ if element is None:
+ return element
+ if (isinstance(element, collections.Iterable) and
+ not isinstance(element, basestring)):
+ return [prefix + e for e in element]
+ else:
+ return prefix + element
+
+
+def _DoRemapping(element, map):
+ """If |element| then remap it through |map|. If |element| is iterable then
+ each item will be remapped. Any elements not found will be removed."""
+ if map is not None and element is not None:
+ if not callable(map):
+ map = map.get # Assume it's a dict, otherwise a callable to do the remap.
+ if (isinstance(element, collections.Iterable) and
+ not isinstance(element, basestring)):
+ element = filter(None, [map(elem) for elem in element])
+ else:
+ element = map(element)
+ return element
+
+
+def _AppendOrReturn(append, element):
+ """If |append| is None, simply return |element|. If |append| is not None,
+ then add |element| to it, adding each item in |element| if it's a list or
+ tuple."""
+ if append is not None and element is not None:
+ if (isinstance(element, collections.Iterable) and
+ not isinstance(element, basestring)):
+ append.extend(element)
+ else:
+ append.append(element)
+ else:
+ return element
+
+
+def _FindDirectXInstallation():
+ """Try to find an installation location for the DirectX SDK. Check for the
+ standard environment variable, and if that doesn't exist, try to find
+ via the registry. May return None if not found in either location."""
+ # Return previously calculated value, if there is one
+ if hasattr(_FindDirectXInstallation, 'dxsdk_dir'):
+ return _FindDirectXInstallation.dxsdk_dir
+
+ dxsdk_dir = os.environ.get('DXSDK_DIR')
+ if not dxsdk_dir:
+ # Setup params to pass to and attempt to launch reg.exe.
+ cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s']
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ for line in p.communicate()[0].splitlines():
+ if 'InstallPath' in line:
+ dxsdk_dir = line.split(' ')[3] + "\\"
+
+ # Cache return value
+ _FindDirectXInstallation.dxsdk_dir = dxsdk_dir
+ return dxsdk_dir
+
+
+def GetGlobalVSMacroEnv(vs_version):
+ """Get a dict of variables mapping internal VS macro names to their gyp
+ equivalents. Returns all variables that are independent of the target."""
+ env = {}
+ # '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
+ # Visual Studio is actually installed.
+ if vs_version.Path():
+ env['$(VSInstallDir)'] = vs_version.Path()
+ env['$(VCInstallDir)'] = os.path.join(vs_version.Path(), 'VC') + '\\'
+ # Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
+ # set. This happens when the SDK is sync'd via src-internal, rather than
+ # by typical end-user installation of the SDK. If it's not set, we don't
+ # want to leave the unexpanded variable in the path, so simply strip it.
+ dxsdk_dir = _FindDirectXInstallation()
+ env['$(DXSDK_DIR)'] = dxsdk_dir if dxsdk_dir else ''
+ # Try to find an installation location for the Windows DDK by checking
+ # the WDK_DIR environment variable, may be None.
+ env['$(WDK_DIR)'] = os.environ.get('WDK_DIR', '')
+ return env
+
+def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
+ """Finds msvs_system_include_dirs that are common to all targets, removes
+ them from all targets, and returns an OrderedSet containing them."""
+ all_system_includes = OrderedSet(
+ configs[0].get('msvs_system_include_dirs', []))
+ for config in configs[1:]:
+ system_includes = config.get('msvs_system_include_dirs', [])
+ all_system_includes = all_system_includes & OrderedSet(system_includes)
+ if not all_system_includes:
+ return None
+ # Expand macros in all_system_includes.
+ env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags))
+ expanded_system_includes = OrderedSet([ExpandMacros(include, env)
+ for include in all_system_includes])
+ if any(['$' in include for include in expanded_system_includes]):
+ # Some path relies on target-specific variables, bail.
+ return None
+
+ # Remove system includes shared by all targets from the targets.
+ for config in configs:
+ includes = config.get('msvs_system_include_dirs', [])
+ if includes: # Don't insert a msvs_system_include_dirs key if not needed.
+ # This must check the unexpanded includes list:
+ new_includes = [i for i in includes if i not in all_system_includes]
+ config['msvs_system_include_dirs'] = new_includes
+ return expanded_system_includes
+
+
+class MsvsSettings(object):
+ """A class that understands the gyp 'msvs_...' values (especially the
+ msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
+ class helps map those settings to command line options."""
+
+ def __init__(self, spec, generator_flags):
+ self.spec = spec
+ self.vs_version = GetVSVersion(generator_flags)
+
+ supported_fields = [
+ ('msvs_configuration_attributes', dict),
+ ('msvs_settings', dict),
+ ('msvs_system_include_dirs', list),
+ ('msvs_disabled_warnings', list),
+ ('msvs_precompiled_header', str),
+ ('msvs_precompiled_source', str),
+ ('msvs_configuration_platform', str),
+ ('msvs_target_platform', str),
+ ]
+ configs = spec['configurations']
+ for field, default in supported_fields:
+ setattr(self, field, {})
+ for configname, config in configs.items():
+ getattr(self, field)[configname] = config.get(field, default())
+
+ self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
+
+ unsupported_fields = [
+ 'msvs_prebuild',
+ 'msvs_postbuild',
+ ]
+ unsupported = []
+ for field in unsupported_fields:
+ for config in configs.values():
+ if field in config:
+ unsupported += ["%s not supported (target %s)." %
+ (field, spec['target_name'])]
+ if unsupported:
+ raise Exception('\n'.join(unsupported))
+
+ def GetExtension(self):
+ """Returns the extension for the target, with no leading dot.
+
+ Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
+ the target type.
+ """
+ ext = self.spec.get('product_extension', None)
+ if ext:
+ return ext
+ return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec['type'], '')
+
+ def GetVSMacroEnv(self, base_to_build=None, config=None):
+ """Get a dict of variables mapping internal VS macro names to their gyp
+ equivalents."""
+ target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
+ target_name = self.spec.get('product_prefix', '') + \
+ self.spec.get('product_name', self.spec['target_name'])
+ target_dir = base_to_build + '\\' if base_to_build else ''
+ target_ext = '.' + self.GetExtension()
+ target_file_name = target_name + target_ext
+
+ replacements = {
+ '$(InputName)': '${root}',
+ '$(InputPath)': '${source}',
+ '$(IntDir)': '$!INTERMEDIATE_DIR',
+ '$(OutDir)\\': target_dir,
+ '$(PlatformName)': target_platform,
+ '$(ProjectDir)\\': '',
+ '$(ProjectName)': self.spec['target_name'],
+ '$(TargetDir)\\': target_dir,
+ '$(TargetExt)': target_ext,
+ '$(TargetFileName)': target_file_name,
+ '$(TargetName)': target_name,
+ '$(TargetPath)': os.path.join(target_dir, target_file_name),
+ }
+ replacements.update(GetGlobalVSMacroEnv(self.vs_version))
+ return replacements
+
+ def ConvertVSMacros(self, s, base_to_build=None, config=None):
+ """Convert from VS macro names to something equivalent."""
+ env = self.GetVSMacroEnv(base_to_build, config=config)
+ return ExpandMacros(s, env)
+
+ def AdjustLibraries(self, libraries):
+ """Strip -l from library if it's specified with that."""
+ libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
+ return [lib + '.lib' if not lib.lower().endswith('.lib') else lib
+ for lib in libs]
+
+ def _GetAndMunge(self, field, path, default, prefix, append, map):
+ """Retrieve a value from |field| at |path| or return |default|. If
+ |append| is specified, and the item is found, it will be appended to that
+ object instead of returned. If |map| is specified, results will be
+ remapped through |map| before being returned or appended."""
+ result = _GenericRetrieve(field, default, path)
+ result = _DoRemapping(result, map)
+ result = _AddPrefix(result, prefix)
+ return _AppendOrReturn(append, result)
+
+ class _GetWrapper(object):
+ def __init__(self, parent, field, base_path, append=None):
+ self.parent = parent
+ self.field = field
+ self.base_path = [base_path]
+ self.append = append
+ def __call__(self, name, map=None, prefix='', default=None):
+ return self.parent._GetAndMunge(self.field, self.base_path + [name],
+ default=default, prefix=prefix, append=self.append, map=map)
+
+ def GetArch(self, config):
+ """Get architecture based on msvs_configuration_platform and
+ msvs_target_platform. Returns either 'x86' or 'x64'."""
+ configuration_platform = self.msvs_configuration_platform.get(config, '')
+ platform = self.msvs_target_platform.get(config, '')
+ if not platform: # If no specific override, use the configuration's.
+ platform = configuration_platform
+ # Map from platform to architecture.
+ return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86')
+
+ def _TargetConfig(self, config):
+ """Returns the target-specific configuration."""
+ # There's two levels of architecture/platform specification in VS. The
+ # first level is globally for the configuration (this is what we consider
+ # "the" config at the gyp level, which will be something like 'Debug' or
+ # 'Release'), VS2015 and later only use this level
+ if self.vs_version.short_name >= 2015:
+ return config
+ # and a second target-specific configuration, which is an
+ # override for the global one. |config| is remapped here to take into
+ # account the local target-specific overrides to the global configuration.
+ arch = self.GetArch(config)
+ if arch == 'x64' and not config.endswith('_x64'):
+ config += '_x64'
+ if arch == 'x86' and config.endswith('_x64'):
+ config = config.rsplit('_', 1)[0]
+ return config
+
+ def _Setting(self, path, config,
+ default=None, prefix='', append=None, map=None):
+ """_GetAndMunge for msvs_settings."""
+ return self._GetAndMunge(
+ self.msvs_settings[config], path, default, prefix, append, map)
+
+ def _ConfigAttrib(self, path, config,
+ default=None, prefix='', append=None, map=None):
+ """_GetAndMunge for msvs_configuration_attributes."""
+ return self._GetAndMunge(
+ self.msvs_configuration_attributes[config],
+ path, default, prefix, append, map)
+
+ def AdjustIncludeDirs(self, include_dirs, config):
+ """Updates include_dirs to expand VS specific paths, and adds the system
+ include dirs used for platform SDK and similar."""
+ config = self._TargetConfig(config)
+ includes = include_dirs + self.msvs_system_include_dirs[config]
+ includes.extend(self._Setting(
+ ('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
+ return [self.ConvertVSMacros(p, config=config) for p in includes]
+
+ def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
+ """Updates midl_include_dirs to expand VS specific paths, and adds the
+ system include dirs used for platform SDK and similar."""
+ config = self._TargetConfig(config)
+ includes = midl_include_dirs + self.msvs_system_include_dirs[config]
+ includes.extend(self._Setting(
+ ('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[]))
+ return [self.ConvertVSMacros(p, config=config) for p in includes]
+
+ def GetComputedDefines(self, config):
+ """Returns the set of defines that are injected to the defines list based
+ on other VS settings."""
+ config = self._TargetConfig(config)
+ defines = []
+ if self._ConfigAttrib(['CharacterSet'], config) == '1':
+ defines.extend(('_UNICODE', 'UNICODE'))
+ if self._ConfigAttrib(['CharacterSet'], config) == '2':
+ defines.append('_MBCS')
+ defines.extend(self._Setting(
+ ('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
+ return defines
+
+ def GetCompilerPdbName(self, config, expand_special):
+ """Get the pdb file name that should be used for compiler invocations, or
+ None if there's no explicit name specified."""
+ config = self._TargetConfig(config)
+ pdbname = self._Setting(
+ ('VCCLCompilerTool', 'ProgramDataBaseFileName'), config)
+ if pdbname:
+ pdbname = expand_special(self.ConvertVSMacros(pdbname))
+ return pdbname
+
+ def GetMapFileName(self, config, expand_special):
+ """Gets the explicitly overriden map file name for a target or returns None
+ if it's not set."""
+ config = self._TargetConfig(config)
+ map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config)
+ if map_file:
+ map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
+ return map_file
+
+ def GetOutputName(self, config, expand_special):
+ """Gets the explicitly overridden output name for a target or returns None
+ if it's not overridden."""
+ config = self._TargetConfig(config)
+ type = self.spec['type']
+ root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
+ # TODO(scottmg): Handle OutputDirectory without OutputFile.
+ output_file = self._Setting((root, 'OutputFile'), config)
+ if output_file:
+ output_file = expand_special(self.ConvertVSMacros(
+ output_file, config=config))
+ return output_file
+
+ def GetPDBName(self, config, expand_special, default):
+ """Gets the explicitly overridden pdb name for a target or returns
+ default if it's not overridden, or if no pdb will be generated."""
+ config = self._TargetConfig(config)
+ output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
+ generate_debug_info = self._Setting(
+ ('VCLinkerTool', 'GenerateDebugInformation'), config)
+ if generate_debug_info == 'true':
+ if output_file:
+ return expand_special(self.ConvertVSMacros(output_file, config=config))
+ else:
+ return default
+ else:
+ return None
+
+ def GetNoImportLibrary(self, config):
+ """If NoImportLibrary: true, ninja will not expect the output to include
+ an import library."""
+ config = self._TargetConfig(config)
+ noimplib = self._Setting(('NoImportLibrary',), config)
+ return noimplib == 'true'
+
+ def GetAsmflags(self, config):
+ """Returns the flags that need to be added to ml invocations."""
+ config = self._TargetConfig(config)
+ asmflags = []
+ safeseh = self._Setting(('MASM', 'UseSafeExceptionHandlers'), config)
+ if safeseh == 'true':
+ asmflags.append('/safeseh')
+ return asmflags
+
+ def GetCflags(self, config):
+ """Returns the flags that need to be added to .c and .cc compilations."""
+ config = self._TargetConfig(config)
+ cflags = []
+ cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
+ cl = self._GetWrapper(self, self.msvs_settings[config],
+ 'VCCLCompilerTool', append=cflags)
+ cl('Optimization',
+ map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
+ cl('InlineFunctionExpansion', prefix='/Ob')
+ cl('DisableSpecificWarnings', prefix='/wd')
+ cl('StringPooling', map={'true': '/GF'})
+ cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
+ cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
+ cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
+ cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
+ cl('FloatingPointModel',
+ map={'0': 'precise', '1': 'strict', '2': 'fast'}, prefix='/fp:',
+ default='0')
+ cl('CompileAsManaged', map={'false': '', 'true': '/clr'})
+ cl('WholeProgramOptimization', map={'true': '/GL'})
+ cl('WarningLevel', prefix='/W')
+ cl('WarnAsError', map={'true': '/WX'})
+ cl('CallingConvention',
+ map={'0': 'd', '1': 'r', '2': 'z', '3': 'v'}, prefix='/G')
+ cl('DebugInformationFormat',
+ map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
+ cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
+ cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'})
+ cl('MinimalRebuild', map={'true': '/Gm'})
+ cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'})
+ cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC')
+ cl('RuntimeLibrary',
+ map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
+ cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
+ cl('DefaultCharIsUnsigned', map={'true': '/J'})
+ cl('TreatWChar_tAsBuiltInType',
+ map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
+ cl('EnablePREfast', map={'true': '/analyze'})
+ cl('AdditionalOptions', prefix='')
+ cl('EnableEnhancedInstructionSet',
+ map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32', '5': 'AVX2'},
+ prefix='/arch:')
+ cflags.extend(['/FI' + f for f in self._Setting(
+ ('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
+ if self.vs_version.project_version >= 12.0:
+ # New flag introduced in VS2013 (project version 12.0) Forces writes to
+ # the program database (PDB) to be serialized through MSPDBSRV.EXE.
+ # https://msdn.microsoft.com/en-us/library/dn502518.aspx
+ cflags.append('/FS')
+ # ninja handles parallelism by itself, don't have the compiler do it too.
+ cflags = [x for x in cflags if not x.startswith('/MP')]
+ return cflags
+
+ def _GetPchFlags(self, config, extension):
+ """Get the flags to be added to the cflags for precompiled header support.
+ """
+ config = self._TargetConfig(config)
+ # The PCH is only built once by a particular source file. Usage of PCH must
+ # only be for the same language (i.e. C vs. C++), so only include the pch
+ # flags when the language matches.
+ if self.msvs_precompiled_header[config]:
+ source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
+ if _LanguageMatchesForPch(source_ext, extension):
+ pch = self.msvs_precompiled_header[config]
+ pchbase = os.path.split(pch)[1]
+ return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pchbase + '.pch']
+ return []
+
+ def GetCflagsC(self, config):
+ """Returns the flags that need to be added to .c compilations."""
+ config = self._TargetConfig(config)
+ return self._GetPchFlags(config, '.c')
+
+ def GetCflagsCC(self, config):
+ """Returns the flags that need to be added to .cc compilations."""
+ config = self._TargetConfig(config)
+ return ['/TP'] + self._GetPchFlags(config, '.cc')
+
+ def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
+ """Get and normalize the list of paths in AdditionalLibraryDirectories
+ setting."""
+ config = self._TargetConfig(config)
+ libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
+ config, default=[])
+ libpaths = [os.path.normpath(
+ gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
+ for p in libpaths]
+ return ['/LIBPATH:"' + p + '"' for p in libpaths]
+
+ def GetLibFlags(self, config, gyp_to_build_path):
+ """Returns the flags that need to be added to lib commands."""
+ config = self._TargetConfig(config)
+ libflags = []
+ lib = self._GetWrapper(self, self.msvs_settings[config],
+ 'VCLibrarianTool', append=libflags)
+ libflags.extend(self._GetAdditionalLibraryDirectories(
+ 'VCLibrarianTool', config, gyp_to_build_path))
+ lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
+ lib('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
+ prefix='/MACHINE:')
+ lib('AdditionalOptions')
+ return libflags
+
+ def GetDefFile(self, gyp_to_build_path):
+ """Returns the .def file from sources, if any. Otherwise returns None."""
+ spec = self.spec
+ if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
+ def_files = [s for s in spec.get('sources', [])
+ if s.lower().endswith('.def')]
+ if len(def_files) == 1:
+ return gyp_to_build_path(def_files[0])
+ elif len(def_files) > 1:
+ raise Exception("Multiple .def files")
+ return None
+
+ def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
+ """.def files get implicitly converted to a ModuleDefinitionFile for the
+ linker in the VS generator. Emulate that behaviour here."""
+ def_file = self.GetDefFile(gyp_to_build_path)
+ if def_file:
+ ldflags.append('/DEF:"%s"' % def_file)
+
+ def GetPGDName(self, config, expand_special):
+ """Gets the explicitly overridden pgd name for a target or returns None
+ if it's not overridden."""
+ config = self._TargetConfig(config)
+ output_file = self._Setting(
+ ('VCLinkerTool', 'ProfileGuidedDatabase'), config)
+ if output_file:
+ output_file = expand_special(self.ConvertVSMacros(
+ output_file, config=config))
+ return output_file
+
+ def GetLdflags(self, config, gyp_to_build_path, expand_special,
+ manifest_base_name, output_name, is_executable, build_dir):
+ """Returns the flags that need to be added to link commands, and the
+ manifest files."""
+ config = self._TargetConfig(config)
+ ldflags = []
+ ld = self._GetWrapper(self, self.msvs_settings[config],
+ 'VCLinkerTool', append=ldflags)
+ self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
+ ld('GenerateDebugInformation', map={'true': '/DEBUG'})
+ ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
+ prefix='/MACHINE:')
+ ldflags.extend(self._GetAdditionalLibraryDirectories(
+ 'VCLinkerTool', config, gyp_to_build_path))
+ ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
+ ld('TreatLinkerWarningAsErrors', prefix='/WX',
+ map={'true': '', 'false': ':NO'})
+ out = self.GetOutputName(config, expand_special)
+ if out:
+ ldflags.append('/OUT:' + out)
+ pdb = self.GetPDBName(config, expand_special, output_name + '.pdb')
+ if pdb:
+ ldflags.append('/PDB:' + pdb)
+ pgd = self.GetPGDName(config, expand_special)
+ if pgd:
+ ldflags.append('/PGD:' + pgd)
+ map_file = self.GetMapFileName(config, expand_special)
+ ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
+ else '/MAP'})
+ ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
+ ld('AdditionalOptions', prefix='')
+
+ minimum_required_version = self._Setting(
+ ('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
+ if minimum_required_version:
+ minimum_required_version = ',' + minimum_required_version
+ ld('SubSystem',
+ map={'1': 'CONSOLE%s' % minimum_required_version,
+ '2': 'WINDOWS%s' % minimum_required_version},
+ prefix='/SUBSYSTEM:')
+
+ stack_reserve_size = self._Setting(
+ ('VCLinkerTool', 'StackReserveSize'), config, default='')
+ if stack_reserve_size:
+ stack_commit_size = self._Setting(
+ ('VCLinkerTool', 'StackCommitSize'), config, default='')
+ if stack_commit_size:
+ stack_commit_size = ',' + stack_commit_size
+ ldflags.append('/STACK:%s%s' % (stack_reserve_size, stack_commit_size))
+
+ ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
+ ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
+ ld('BaseAddress', prefix='/BASE:')
+ ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
+ ld('RandomizedBaseAddress',
+ map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
+ ld('DataExecutionPrevention',
+ map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
+ ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
+ ld('ForceSymbolReferences', prefix='/INCLUDE:')
+ ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
+ ld('LinkTimeCodeGeneration',
+ map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
+ '4': ':PGUPDATE'},
+ prefix='/LTCG')
+ ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
+ ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
+ ld('EntryPointSymbol', prefix='/ENTRY:')
+ ld('Profile', map={'true': '/PROFILE'})
+ ld('LargeAddressAware',
+ map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
+ # TODO(scottmg): This should sort of be somewhere else (not really a flag).
+ ld('AdditionalDependencies', prefix='')
+
+ if self.GetArch(config) == 'x86':
+ safeseh_default = 'true'
+ else:
+ safeseh_default = None
+ ld('ImageHasSafeExceptionHandlers',
+ map={'false': ':NO', 'true': ''}, prefix='/SAFESEH',
+ default=safeseh_default)
+
+ # If the base address is not specifically controlled, DYNAMICBASE should
+ # be on by default.
+ if not any('DYNAMICBASE' in flag or flag == '/FIXED' for flag in ldflags):
+ ldflags.append('/DYNAMICBASE')
+
+ # If the NXCOMPAT flag has not been specified, default to on. Despite the
+ # documentation that says this only defaults to on when the subsystem is
+ # Vista or greater (which applies to the linker), the IDE defaults it on
+ # unless it's explicitly off.
+ if not any('NXCOMPAT' in flag for flag in ldflags):
+ ldflags.append('/NXCOMPAT')
+
+ have_def_file = any(flag.startswith('/DEF:') for flag in ldflags)
+ manifest_flags, intermediate_manifest, manifest_files = \
+ self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
+ is_executable and not have_def_file, build_dir)
+ ldflags.extend(manifest_flags)
+ return ldflags, intermediate_manifest, manifest_files
+
+ def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
+ allow_isolation, build_dir):
+ """Returns a 3-tuple:
+ - the set of flags that need to be added to the link to generate
+ a default manifest
+ - the intermediate manifest that the linker will generate that should be
+ used to assert it doesn't add anything to the merged one.
+ - the list of all the manifest files to be merged by the manifest tool and
+ included into the link."""
+ generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
+ config,
+ default='true')
+ if generate_manifest != 'true':
+ # This means not only that the linker should not generate the intermediate
+ # manifest but also that the manifest tool should do nothing even when
+ # additional manifests are specified.
+ return ['/MANIFEST:NO'], [], []
+
+ output_name = name + '.intermediate.manifest'
+ flags = [
+ '/MANIFEST',
+ '/ManifestFile:' + output_name,
+ ]
+
+ # Instead of using the MANIFESTUAC flags, we generate a .manifest to
+ # include into the list of manifests. This allows us to avoid the need to
+ # do two passes during linking. The /MANIFEST flag and /ManifestFile are
+ # still used, and the intermediate manifest is used to assert that the
+ # final manifest we get from merging all the additional manifest files
+ # (plus the one we generate here) isn't modified by merging the
+ # intermediate into it.
+
+ # Always NO, because we generate a manifest file that has what we want.
+ flags.append('/MANIFESTUAC:NO')
+
+ config = self._TargetConfig(config)
+ enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
+ default='true')
+ manifest_files = []
+ generated_manifest_outer = \
+"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
+"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
+"</assembly>"
+ if enable_uac == 'true':
+ execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
+ config, default='0')
+ execution_level_map = {
+ '0': 'asInvoker',
+ '1': 'highestAvailable',
+ '2': 'requireAdministrator'
+ }
+
+ ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
+ default='false')
+
+ inner = '''
+<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+ <security>
+ <requestedPrivileges>
+ <requestedExecutionLevel level='%s' uiAccess='%s' />
+ </requestedPrivileges>
+ </security>
+</trustInfo>''' % (execution_level_map[execution_level], ui_access)
+ else:
+ inner = ''
+
+ generated_manifest_contents = generated_manifest_outer % inner
+ generated_name = name + '.generated.manifest'
+ # Need to join with the build_dir here as we're writing it during
+ # generation time, but we return the un-joined version because the build
+ # will occur in that directory. We only write the file if the contents
+ # have changed so that simply regenerating the project files doesn't
+ # cause a relink.
+ build_dir_generated_name = os.path.join(build_dir, generated_name)
+ gyp.common.EnsureDirExists(build_dir_generated_name)
+ f = gyp.common.WriteOnDiff(build_dir_generated_name)
+ f.write(generated_manifest_contents)
+ f.close()
+ manifest_files = [generated_name]
+
+ if allow_isolation:
+ flags.append('/ALLOWISOLATION')
+
+ manifest_files += self._GetAdditionalManifestFiles(config,
+ gyp_to_build_path)
+ return flags, output_name, manifest_files
+
+ def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
+ """Gets additional manifest files that are added to the default one
+ generated by the linker."""
+ files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
+ default=[])
+ if isinstance(files, str):
+ files = files.split(';')
+ return [os.path.normpath(
+ gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
+ for f in files]
+
+ def IsUseLibraryDependencyInputs(self, config):
+ """Returns whether the target should be linked via Use Library Dependency
+ Inputs (using component .objs of a given .lib)."""
+ config = self._TargetConfig(config)
+ uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
+ return uldi == 'true'
+
+ def IsEmbedManifest(self, config):
+ """Returns whether manifest should be linked into binary."""
+ config = self._TargetConfig(config)
+ embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
+ default='true')
+ return embed == 'true'
+
+ def IsLinkIncremental(self, config):
+ """Returns whether the target should be linked incrementally."""
+ config = self._TargetConfig(config)
+ link_inc = self._Setting(('VCLinkerTool', 'LinkIncremental'), config)
+ return link_inc != '1'
+
+ def GetRcflags(self, config, gyp_to_ninja_path):
+ """Returns the flags that need to be added to invocations of the resource
+ compiler."""
+ config = self._TargetConfig(config)
+ rcflags = []
+ rc = self._GetWrapper(self, self.msvs_settings[config],
+ 'VCResourceCompilerTool', append=rcflags)
+ rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I')
+ rcflags.append('/I' + gyp_to_ninja_path('.'))
+ rc('PreprocessorDefinitions', prefix='/d')
+ # /l arg must be in hex without leading '0x'
+ rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:])
+ return rcflags
+
+ def BuildCygwinBashCommandLine(self, args, path_to_base):
+ """Build a command line that runs args via cygwin bash. We assume that all
+ incoming paths are in Windows normpath'd form, so they need to be
+ converted to posix style for the part of the command line that's passed to
+ bash. We also have to do some Visual Studio macro emulation here because
+ various rules use magic VS names for things. Also note that rules that
+ contain ninja variables cannot be fixed here (for example ${source}), so
+ the outer generator needs to make sure that the paths that are written out
+ are in posix style, if the command line will be used here."""
+ cygwin_dir = os.path.normpath(
+ os.path.join(path_to_base, self.msvs_cygwin_dirs[0]))
+ cd = ('cd %s' % path_to_base).replace('\\', '/')
+ args = [a.replace('\\', '/').replace('"', '\\"') for a in args]
+ args = ["'%s'" % a.replace("'", "'\\''") for a in args]
+ bash_cmd = ' '.join(args)
+ cmd = (
+ 'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir +
+ 'bash -c "%s ; %s"' % (cd, bash_cmd))
+ return cmd
+
+ def IsRuleRunUnderCygwin(self, rule):
+ """Determine if an action should be run under cygwin. If the variable is
+ unset, or set to 1 we use cygwin."""
+ return int(rule.get('msvs_cygwin_shell',
+ self.spec.get('msvs_cygwin_shell', 1))) != 0
+
+ def _HasExplicitRuleForExtension(self, spec, extension):
+ """Determine if there's an explicit rule for a particular extension."""
+ for rule in spec.get('rules', []):
+ if rule['extension'] == extension:
+ return True
+ return False
+
+ def _HasExplicitIdlActions(self, spec):
+ """Determine if an action should not run midl for .idl files."""
+ return any([action.get('explicit_idl_action', 0)
+ for action in spec.get('actions', [])])
+
+ def HasExplicitIdlRulesOrActions(self, spec):
+ """Determine if there's an explicit rule or action for idl files. When
+ there isn't we need to generate implicit rules to build MIDL .idl files."""
+ return (self._HasExplicitRuleForExtension(spec, 'idl') or
+ self._HasExplicitIdlActions(spec))
+
+ def HasExplicitAsmRules(self, spec):
+ """Determine if there's an explicit rule for asm files. When there isn't we
+ need to generate implicit rules to assemble .asm files."""
+ return self._HasExplicitRuleForExtension(spec, 'asm')
+
+ def GetIdlBuildData(self, source, config):
+ """Determine the implicit outputs for an idl file. Returns output
+ directory, outputs, and variables and flags that are required."""
+ config = self._TargetConfig(config)
+ midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
+ def midl(name, default=None):
+ return self.ConvertVSMacros(midl_get(name, default=default),
+ config=config)
+ tlb = midl('TypeLibraryName', default='${root}.tlb')
+ header = midl('HeaderFileName', default='${root}.h')
+ dlldata = midl('DLLDataFileName', default='dlldata.c')
+ iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
+ proxy = midl('ProxyFileName', default='${root}_p.c')
+ # Note that .tlb is not included in the outputs as it is not always
+ # generated depending on the content of the input idl file.
+ outdir = midl('OutputDirectory', default='')
+ output = [header, dlldata, iid, proxy]
+ variables = [('tlb', tlb),
+ ('h', header),
+ ('dlldata', dlldata),
+ ('iid', iid),
+ ('proxy', proxy)]
+ # TODO(scottmg): Are there configuration settings to set these flags?
+ target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64'
+ flags = ['/char', 'signed', '/env', target_platform, '/Oicf']
+ return outdir, output, variables, flags
+
+
+def _LanguageMatchesForPch(source_ext, pch_source_ext):
+ c_exts = ('.c',)
+ cc_exts = ('.cc', '.cxx', '.cpp')
+ return ((source_ext in c_exts and pch_source_ext in c_exts) or
+ (source_ext in cc_exts and pch_source_ext in cc_exts))
+
+
+class PrecompiledHeader(object):
+ """Helper to generate dependencies and build rules to handle generation of
+ precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
+ """
+ def __init__(
+ self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext):
+ self.settings = settings
+ self.config = config
+ pch_source = self.settings.msvs_precompiled_source[self.config]
+ self.pch_source = gyp_to_build_path(pch_source)
+ filename, _ = os.path.splitext(pch_source)
+ self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()
+
+ def _PchHeader(self):
+ """Get the header that will appear in an #include line for all source
+ files."""
+ return self.settings.msvs_precompiled_header[self.config]
+
+ def GetObjDependencies(self, sources, objs, arch):
+ """Given a list of sources files and the corresponding object files,
+ returns a list of the pch files that should be depended upon. The
+ additional wrapping in the return value is for interface compatibility
+ with make.py on Mac, and xcode_emulation.py."""
+ assert arch is None
+ if not self._PchHeader():
+ return []
+ pch_ext = os.path.splitext(self.pch_source)[1]
+ for source in sources:
+ if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
+ return [(None, None, self.output_obj)]
+ return []
+
+ def GetPchBuildCommands(self, arch):
+ """Not used on Windows as there are no additional build steps required
+ (instead, existing steps are modified in GetFlagsModifications below)."""
+ return []
+
+ def GetFlagsModifications(self, input, output, implicit, command,
+ cflags_c, cflags_cc, expand_special):
+ """Get the modified cflags and implicit dependencies that should be used
+ for the pch compilation step."""
+ if input == self.pch_source:
+ pch_output = ['/Yc' + self._PchHeader()]
+ if command == 'cxx':
+ return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))],
+ self.output_obj, [])
+ elif command == 'cc':
+ return ([('cflags_c', map(expand_special, cflags_c + pch_output))],
+ self.output_obj, [])
+ return [], output, implicit
+
+
+vs_version = None
+def GetVSVersion(generator_flags):
+ global vs_version
+ if not vs_version:
+ vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
+ generator_flags.get('msvs_version', 'auto'),
+ allow_fallback=False)
+ return vs_version
+
+def _GetVsvarsSetupArgs(generator_flags, arch):
+ vs = GetVSVersion(generator_flags)
+ return vs.SetupScript()
+
+def ExpandMacros(string, expansions):
+ """Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
+ for the canonical way to retrieve a suitable dict."""
+ if '$' in string:
+ for old, new in expansions.items():
+ assert '$(' not in new, new
+ string = string.replace(old, new)
+ return string
+
+def _ExtractImportantEnvironment(output_of_set):
+ """Extracts environment variables required for the toolchain to run from
+ a textual dump output by the cmd.exe 'set' command."""
+ envvars_to_save = (
+ 'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
+ 'include',
+ 'lib',
+ 'libpath',
+ 'path',
+ 'pathext',
+ 'systemroot',
+ 'temp',
+ 'tmp',
+ )
+ env = {}
+ # This occasionally happens and leads to misleading SYSTEMROOT error messages
+ # if not caught here.
+ if output_of_set.count('=') == 0:
+ raise Exception('Invalid output_of_set. Value is:\n%s' % output_of_set)
+ for line in output_of_set.splitlines():
+ for envvar in envvars_to_save:
+ if re.match(envvar + '=', line.lower()):
+ var, setting = line.split('=', 1)
+ if envvar == 'path':
+ # Our own rules (for running gyp-win-tool) and other actions in
+ # Chromium rely on python being in the path. Add the path to this
+ # python here so that if it's not in the path when ninja is run
+ # later, python will still be found.
+ setting = os.path.dirname(sys.executable) + os.pathsep + setting
+ env[var.upper()] = setting
+ break
+ for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
+ if required not in env:
+ raise Exception('Environment variable "%s" '
+ 'required to be set to valid path' % required)
+ return env
+
+def _FormatAsEnvironmentBlock(envvar_dict):
+ """Format as an 'environment block' directly suitable for CreateProcess.
+ Briefly this is a list of key=value\0, terminated by an additional \0. See
+ CreateProcess documentation for more details."""
+ block = ''
+ nul = '\0'
+ for key, value in envvar_dict.items():
+ block += key + '=' + value + nul
+ block += nul
+ return block
+
+def _ExtractCLPath(output_of_where):
+ """Gets the path to cl.exe based on the output of calling the environment
+ setup batch file, followed by the equivalent of `where`."""
+ # Take the first line, as that's the first found in the PATH.
+ for line in output_of_where.strip().splitlines():
+ if line.startswith('LOC:'):
+ return line[len('LOC:'):].strip()
+
+def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags,
+ system_includes, open_out):
+ """It's not sufficient to have the absolute path to the compiler, linker,
+ etc. on Windows, as those tools rely on .dlls being in the PATH. We also
+ need to support both x86 and x64 compilers within the same build (to support
+ msvs_target_platform hackery). Different architectures require a different
+ compiler binary, and different supporting environment variables (INCLUDE,
+ LIB, LIBPATH). So, we extract the environment here, wrap all invocations
+ of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
+ sets up the environment, and then we do not prefix the compiler with
+ an absolute path, instead preferring something like "cl.exe" in the rule
+ which will then run whichever the environment setup has put in the path.
+ When the following procedure to generate environment files does not
+ meet your requirement (e.g. for custom toolchains), you can pass
+ "-G ninja_use_custom_environment_files" to the gyp to suppress file
+ generation and use custom environment files prepared by yourself."""
+ archs = ('x86', 'x64')
+ if generator_flags.get('ninja_use_custom_environment_files', 0):
+ cl_paths = {}
+ for arch in archs:
+ cl_paths[arch] = 'cl.exe'
+ return cl_paths
+ vs = GetVSVersion(generator_flags)
+ cl_paths = {}
+ for arch in archs:
+ # Extract environment variables for subprocesses.
+ args = vs.SetupScript(arch)
+ args.extend(('&&', 'set'))
+ popen = subprocess.Popen(
+ args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ variables, _ = popen.communicate()
+ if popen.returncode != 0:
+ raise Exception('"%s" failed with error %d' % (args, popen.returncode))
+ env = _ExtractImportantEnvironment(variables)
+
+ # Inject system includes from gyp files into INCLUDE.
+ if system_includes:
+ system_includes = system_includes | OrderedSet(
+ env.get('INCLUDE', '').split(';'))
+ env['INCLUDE'] = ';'.join(system_includes)
+
+ env_block = _FormatAsEnvironmentBlock(env)
+ f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'w')
+ f.write(env_block)
+ f.close()
+
+ # Find cl.exe location for this architecture.
+ args = vs.SetupScript(arch)
+ args.extend(('&&',
+ 'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
+ popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
+ output, _ = popen.communicate()
+ cl_paths[arch] = _ExtractCLPath(output)
+ return cl_paths
+
+def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
+ """Emulate behavior of msvs_error_on_missing_sources present in the msvs
+ generator: Check that all regular source files, i.e. not created at run time,
+ exist on disk. Missing files cause needless recompilation when building via
+ VS, and we want this check to match for people/bots that build using ninja,
+ so they're not surprised when the VS build fails."""
+ if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
+ no_specials = filter(lambda x: '$' not in x, sources)
+ relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
+ missing = [x for x in relative if not os.path.exists(x)]
+ if missing:
+ # They'll look like out\Release\..\..\stuff\things.cc, so normalize the
+ # path for a slightly less crazy looking output.
+ cleaned_up = [os.path.normpath(x) for x in missing]
+ raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
+
+# Sets some values in default_variables, which are required for many
+# generators, run on Windows.
+def CalculateCommonVariables(default_variables, params):
+ generator_flags = params.get('generator_flags', {})
+
+ # Set a variable so conditions can be based on msvs_version.
+ msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
+ default_variables['MSVS_VERSION'] = msvs_version.ShortName()
+
+ # To determine processor word size on Windows, in addition to checking
+ # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
+ # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
+ # contains the actual word size of the system when running thru WOW64).
+ if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
+ '64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
+ default_variables['MSVS_OS_BITS'] = 64
+ else:
+ default_variables['MSVS_OS_BITS'] = 32
diff --git a/third_party/python/gyp/pylib/gyp/ninja_syntax.py b/third_party/python/gyp/pylib/gyp/ninja_syntax.py
new file mode 100644
index 0000000000..95e894276e
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/ninja_syntax.py
@@ -0,0 +1,168 @@
+# This file comes from
+# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py
+# Do not edit! Edit the upstream one instead.
+
+"""Python module for generating .ninja files.
+
+Note that this is emphatically not a required piece of Ninja; it's
+just a helpful utility for build-file-generation systems that already
+use Python.
+"""
+
+import textwrap
+import re
+
+def escape_path(word):
+ return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:')
+
+class Writer(object):
+ def __init__(self, output, width=78):
+ self.output = output
+ self.width = width
+
+ def newline(self):
+ self.output.write('\n')
+
+ def comment(self, text):
+ for line in textwrap.wrap(text, self.width - 2):
+ self.output.write('# ' + line + '\n')
+
+ def variable(self, key, value, indent=0):
+ if value is None:
+ return
+ if isinstance(value, list):
+ value = ' '.join(filter(None, value)) # Filter out empty strings.
+ self._line('%s = %s' % (key, value), indent)
+
+ def pool(self, name, depth):
+ self._line('pool %s' % name)
+ self.variable('depth', depth, indent=1)
+
+ def rule(self, name, command, description=None, depfile=None,
+ generator=False, pool=None, restat=False, rspfile=None,
+ rspfile_content=None, deps=None):
+ self._line('rule %s' % name)
+ self.variable('command', command, indent=1)
+ if description:
+ self.variable('description', description, indent=1)
+ if depfile:
+ self.variable('depfile', depfile, indent=1)
+ if generator:
+ self.variable('generator', '1', indent=1)
+ if pool:
+ self.variable('pool', pool, indent=1)
+ if restat:
+ self.variable('restat', '1', indent=1)
+ if rspfile:
+ self.variable('rspfile', rspfile, indent=1)
+ if rspfile_content:
+ self.variable('rspfile_content', rspfile_content, indent=1)
+ if deps:
+ self.variable('deps', deps, indent=1)
+
+ def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
+ variables=None):
+ outputs = self._as_list(outputs)
+ all_inputs = self._as_list(inputs)[:]
+ out_outputs = list(map(escape_path, outputs))
+ all_inputs = list(map(escape_path, all_inputs))
+
+ if implicit:
+ implicit = map(escape_path, self._as_list(implicit))
+ all_inputs.append('|')
+ all_inputs.extend(implicit)
+ if order_only:
+ order_only = map(escape_path, self._as_list(order_only))
+ all_inputs.append('||')
+ all_inputs.extend(order_only)
+
+ self._line('build %s: %s' % (' '.join(out_outputs),
+ ' '.join([rule] + all_inputs)))
+
+ if variables:
+ if isinstance(variables, dict):
+ iterator = iter(variables.items())
+ else:
+ iterator = iter(variables)
+
+ for key, val in iterator:
+ self.variable(key, val, indent=1)
+
+ return outputs
+
+ def include(self, path):
+ self._line('include %s' % path)
+
+ def subninja(self, path):
+ self._line('subninja %s' % path)
+
+ def default(self, paths):
+ self._line('default %s' % ' '.join(self._as_list(paths)))
+
+ def _count_dollars_before_index(self, s, i):
+ """Returns the number of '$' characters right in front of s[i]."""
+ dollar_count = 0
+ dollar_index = i - 1
+ while dollar_index > 0 and s[dollar_index] == '$':
+ dollar_count += 1
+ dollar_index -= 1
+ return dollar_count
+
+ def _line(self, text, indent=0):
+ """Write 'text' word-wrapped at self.width characters."""
+ leading_space = ' ' * indent
+ while len(leading_space) + len(text) > self.width:
+ # The text is too wide; wrap if possible.
+
+ # Find the rightmost space that would obey our width constraint and
+ # that's not an escaped space.
+ available_space = self.width - len(leading_space) - len(' $')
+ space = available_space
+ while True:
+ space = text.rfind(' ', 0, space)
+ if space < 0 or \
+ self._count_dollars_before_index(text, space) % 2 == 0:
+ break
+
+ if space < 0:
+ # No such space; just use the first unescaped space we can find.
+ space = available_space - 1
+ while True:
+ space = text.find(' ', space + 1)
+ if space < 0 or \
+ self._count_dollars_before_index(text, space) % 2 == 0:
+ break
+ if space < 0:
+ # Give up on breaking.
+ break
+
+ self.output.write(leading_space + text[0:space] + ' $\n')
+ text = text[space+1:]
+
+ # Subsequent lines are continuations, so indent them.
+ leading_space = ' ' * (indent+2)
+
+ self.output.write(leading_space + text + '\n')
+
+ def _as_list(self, input):
+ if input is None:
+ return []
+ if isinstance(input, list):
+ return input
+
+ # map is not a class in Python 2
+ try:
+ if isinstance(input, map):
+ return list(input)
+ except TypeError:
+ pass
+
+ return [input]
+
+
+def escape(string):
+ """Escape a string such that it can be embedded into a Ninja file without
+ further interpretation."""
+ assert '\n' not in string, 'Ninja syntax does not allow newlines'
+ # We only have one special metacharacter: '$'.
+ return string.replace('$', '$$')
diff --git a/third_party/python/gyp/pylib/gyp/simple_copy.py b/third_party/python/gyp/pylib/gyp/simple_copy.py
new file mode 100644
index 0000000000..58a61c3423
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/simple_copy.py
@@ -0,0 +1,57 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A clone of the default copy.deepcopy that doesn't handle cyclic
+structures or complex types except for dicts and lists. This is
+because gyp copies so large structure that small copy overhead ends up
+taking seconds in a project the size of Chromium."""
+
+class Error(Exception):
+ pass
+
+__all__ = ["Error", "deepcopy"]
+
+def deepcopy(x):
+ """Deep copy operation on gyp objects such as strings, ints, dicts
+ and lists. More than twice as fast as copy.deepcopy but much less
+ generic."""
+
+ try:
+ return _deepcopy_dispatch[type(x)](x)
+ except KeyError:
+ raise Error('Unsupported type %s for deepcopy. Use copy.deepcopy ' +
+ 'or expand simple_copy support.' % type(x))
+
+_deepcopy_dispatch = d = {}
+
+def _deepcopy_atomic(x):
+ return x
+
+try:
+ _string_types = (str, unicode)
+# There's no unicode in python3
+except NameError:
+ _string_types = (str, )
+
+try:
+ _integer_types = (int, long)
+# There's no long in python3
+except NameError:
+ _integer_types = (int, )
+
+for x in (type(None), float, bool, type) + _integer_types + _string_types:
+ d[x] = _deepcopy_atomic
+
+def _deepcopy_list(x):
+ return [deepcopy(a) for a in x]
+d[list] = _deepcopy_list
+
+def _deepcopy_dict(x):
+ y = {}
+ for key, value in x.items():
+ y[deepcopy(key)] = deepcopy(value)
+ return y
+d[dict] = _deepcopy_dict
+
+del d
diff --git a/third_party/python/gyp/pylib/gyp/win_tool.py b/third_party/python/gyp/pylib/gyp/win_tool.py
new file mode 100755
index 0000000000..897348468a
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/win_tool.py
@@ -0,0 +1,326 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions for Windows builds.
+
+These functions are executed via gyp-win-tool when using the ninja generator.
+"""
+
+from __future__ import print_function
+
+import os
+import re
+import shutil
+import subprocess
+import stat
+import string
+import sys
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+
+# A regex matching an argument corresponding to the output filename passed to
+# link.exe.
+_LINK_EXE_OUT_ARG = re.compile('/OUT:(?P<out>.+)$', re.IGNORECASE)
+
+def main(args):
+ executor = WinTool()
+ exit_code = executor.Dispatch(args)
+ if exit_code is not None:
+ sys.exit(exit_code)
+
+
+class WinTool(object):
+ """This class performs all the Windows tooling steps. The methods can either
+ be executed directly, or dispatched from an argument list."""
+
+ def _UseSeparateMspdbsrv(self, env, args):
+ """Allows to use a unique instance of mspdbsrv.exe per linker instead of a
+ shared one."""
+ if len(args) < 1:
+ raise Exception("Not enough arguments")
+
+ if args[0] != 'link.exe':
+ return
+
+ # Use the output filename passed to the linker to generate an endpoint name
+ # for mspdbsrv.exe.
+ endpoint_name = None
+ for arg in args:
+ m = _LINK_EXE_OUT_ARG.match(arg)
+ if m:
+ endpoint_name = re.sub(r'\W+', '',
+ '%s_%d' % (m.group('out'), os.getpid()))
+ break
+
+ if endpoint_name is None:
+ return
+
+ # Adds the appropriate environment variable. This will be read by link.exe
+ # to know which instance of mspdbsrv.exe it should connect to (if it's
+ # not set then the default endpoint is used).
+ env['_MSPDBSRV_ENDPOINT_'] = endpoint_name
+
+ def Dispatch(self, args):
+ """Dispatches a string command to a method."""
+ if len(args) < 1:
+ raise Exception("Not enough arguments")
+
+ method = "Exec%s" % self._CommandifyName(args[0])
+ return getattr(self, method)(*args[1:])
+
+ def _CommandifyName(self, name_string):
+ """Transforms a tool name like recursive-mirror to RecursiveMirror."""
+ return name_string.title().replace('-', '')
+
+ def _GetEnv(self, arch):
+ """Gets the saved environment from a file for a given architecture."""
+ # The environment is saved as an "environment block" (see CreateProcess
+ # and msvs_emulation for details). We convert to a dict here.
+ # Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
+ pairs = open(arch).read()[:-2].split('\0')
+ kvs = [item.split('=', 1) for item in pairs]
+ return dict(kvs)
+
+ def ExecStamp(self, path):
+ """Simple stamp command."""
+ open(path, 'w').close()
+
+ def ExecRecursiveMirror(self, source, dest):
+ """Emulation of rm -rf out && cp -af in out."""
+ if os.path.exists(dest):
+ if os.path.isdir(dest):
+ def _on_error(fn, path, excinfo):
+ # The operation failed, possibly because the file is set to
+ # read-only. If that's why, make it writable and try the op again.
+ if not os.access(path, os.W_OK):
+ os.chmod(path, stat.S_IWRITE)
+ fn(path)
+ shutil.rmtree(dest, onerror=_on_error)
+ else:
+ if not os.access(dest, os.W_OK):
+ # Attempt to make the file writable before deleting it.
+ os.chmod(dest, stat.S_IWRITE)
+ os.unlink(dest)
+
+ if os.path.isdir(source):
+ shutil.copytree(source, dest)
+ else:
+ shutil.copy2(source, dest)
+
+ def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
+ """Filter diagnostic output from link that looks like:
+ ' Creating library ui.dll.lib and object ui.dll.exp'
+ This happens when there are exports from the dll or exe.
+ """
+ env = self._GetEnv(arch)
+ if use_separate_mspdbsrv == 'True':
+ self._UseSeparateMspdbsrv(env, args)
+ if sys.platform == 'win32':
+ args = list(args) # *args is a tuple by default, which is read-only.
+ args[0] = args[0].replace('/', '\\')
+ # https://docs.python.org/2/library/subprocess.html:
+ # "On Unix with shell=True [...] if args is a sequence, the first item
+ # specifies the command string, and any additional items will be treated as
+ # additional arguments to the shell itself. That is to say, Popen does the
+ # equivalent of:
+ # Popen(['/bin/sh', '-c', args[0], args[1], ...])"
+ # For that reason, since going through the shell doesn't seem necessary on
+ # non-Windows don't do that there.
+ link = subprocess.Popen(args, shell=sys.platform == 'win32', env=env,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ out, _ = link.communicate()
+ for line in out.splitlines():
+ if (not line.startswith(' Creating library ') and
+ not line.startswith('Generating code') and
+ not line.startswith('Finished generating code')):
+ print(line)
+ return link.returncode
+
+ def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname,
+ mt, rc, intermediate_manifest, *manifests):
+ """A wrapper for handling creating a manifest resource and then executing
+ a link command."""
+ # The 'normal' way to do manifests is to have link generate a manifest
+ # based on gathering dependencies from the object files, then merge that
+ # manifest with other manifests supplied as sources, convert the merged
+ # manifest to a resource, and then *relink*, including the compiled
+ # version of the manifest resource. This breaks incremental linking, and
+ # is generally overly complicated. Instead, we merge all the manifests
+ # provided (along with one that includes what would normally be in the
+ # linker-generated one, see msvs_emulation.py), and include that into the
+ # first and only link. We still tell link to generate a manifest, but we
+ # only use that to assert that our simpler process did not miss anything.
+ variables = {
+ 'python': sys.executable,
+ 'arch': arch,
+ 'out': out,
+ 'ldcmd': ldcmd,
+ 'resname': resname,
+ 'mt': mt,
+ 'rc': rc,
+ 'intermediate_manifest': intermediate_manifest,
+ 'manifests': ' '.join(manifests),
+ }
+ add_to_ld = ''
+ if manifests:
+ subprocess.check_call(
+ '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
+ '-manifest %(manifests)s -out:%(out)s.manifest' % variables)
+ if embed_manifest == 'True':
+ subprocess.check_call(
+ '%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest'
+ ' %(out)s.manifest.rc %(resname)s' % variables)
+ subprocess.check_call(
+ '%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s '
+ '%(out)s.manifest.rc' % variables)
+ add_to_ld = ' %(out)s.manifest.res' % variables
+ subprocess.check_call(ldcmd + add_to_ld)
+
+ # Run mt.exe on the theoretically complete manifest we generated, merging
+ # it with the one the linker generated to confirm that the linker
+ # generated one does not add anything. This is strictly unnecessary for
+ # correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
+ # used in a #pragma comment.
+ if manifests:
+ # Merge the intermediate one with ours to .assert.manifest, then check
+ # that .assert.manifest is identical to ours.
+ subprocess.check_call(
+ '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
+ '-manifest %(out)s.manifest %(intermediate_manifest)s '
+ '-out:%(out)s.assert.manifest' % variables)
+ assert_manifest = '%(out)s.assert.manifest' % variables
+ our_manifest = '%(out)s.manifest' % variables
+ # Load and normalize the manifests. mt.exe sometimes removes whitespace,
+ # and sometimes doesn't unfortunately.
+ with open(our_manifest, 'r') as our_f:
+ with open(assert_manifest, 'r') as assert_f:
+ our_data = our_f.read().translate(None, string.whitespace)
+ assert_data = assert_f.read().translate(None, string.whitespace)
+ if our_data != assert_data:
+ os.unlink(out)
+ def dump(filename):
+ print(filename, file=sys.stderr)
+ print('-----', file=sys.stderr)
+ with open(filename, 'r') as f:
+ print(f.read(), file=sys.stderr)
+ print('-----', file=sys.stderr)
+ dump(intermediate_manifest)
+ dump(our_manifest)
+ dump(assert_manifest)
+ sys.stderr.write(
+ 'Linker generated manifest "%s" added to final manifest "%s" '
+ '(result in "%s"). '
+ 'Were /MANIFEST switches used in #pragma statements? ' % (
+ intermediate_manifest, our_manifest, assert_manifest))
+ return 1
+
+ def ExecManifestWrapper(self, arch, *args):
+ """Run manifest tool with environment set. Strip out undesirable warning
+ (some XML blocks are recognized by the OS loader, but not the manifest
+ tool)."""
+ env = self._GetEnv(arch)
+ popen = subprocess.Popen(args, shell=True, env=env,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ out, _ = popen.communicate()
+ for line in out.splitlines():
+ if line and 'manifest authoring warning 81010002' not in line:
+ print(line)
+ return popen.returncode
+
+ def ExecManifestToRc(self, arch, *args):
+ """Creates a resource file pointing a SxS assembly manifest.
+ |args| is tuple containing path to resource file, path to manifest file
+ and resource name which can be "1" (for executables) or "2" (for DLLs)."""
+ manifest_path, resource_path, resource_name = args
+ with open(resource_path, 'w') as output:
+ output.write('#include <windows.h>\n%s RT_MANIFEST "%s"' % (
+ resource_name,
+ os.path.abspath(manifest_path).replace('\\', '/')))
+
+ def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl,
+ *flags):
+ """Filter noisy filenames output from MIDL compile step that isn't
+ quietable via command line flags.
+ """
+ args = ['midl', '/nologo'] + list(flags) + [
+ '/out', outdir,
+ '/tlb', tlb,
+ '/h', h,
+ '/dlldata', dlldata,
+ '/iid', iid,
+ '/proxy', proxy,
+ idl]
+ env = self._GetEnv(arch)
+ popen = subprocess.Popen(args, shell=True, env=env,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ out, _ = popen.communicate()
+ # Filter junk out of stdout, and write filtered versions. Output we want
+ # to filter is pairs of lines that look like this:
+ # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
+ # objidl.idl
+ lines = out.splitlines()
+ prefixes = ('Processing ', '64 bit Processing ')
+ processing = set(os.path.basename(x)
+ for x in lines if x.startswith(prefixes))
+ for line in lines:
+ if not line.startswith(prefixes) and line not in processing:
+ print(line)
+ return popen.returncode
+
+ def ExecAsmWrapper(self, arch, *args):
+ """Filter logo banner from invocations of asm.exe."""
+ env = self._GetEnv(arch)
+ popen = subprocess.Popen(args, shell=True, env=env,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ out, _ = popen.communicate()
+ for line in out.splitlines():
+ if (not line.startswith('Copyright (C) Microsoft Corporation') and
+ not line.startswith('Microsoft (R) Macro Assembler') and
+ not line.startswith(' Assembling: ') and
+ line):
+ print(line)
+ return popen.returncode
+
+ def ExecRcWrapper(self, arch, *args):
+ """Filter logo banner from invocations of rc.exe. Older versions of RC
+ don't support the /nologo flag."""
+ env = self._GetEnv(arch)
+ popen = subprocess.Popen(args, shell=True, env=env,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ out, _ = popen.communicate()
+ for line in out.splitlines():
+ if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and
+ not line.startswith('Copyright (C) Microsoft Corporation') and
+ line):
+ print(line)
+ return popen.returncode
+
+ def ExecActionWrapper(self, arch, rspfile, *dir):
+ """Runs an action command line from a response file using the environment
+ for |arch|. If |dir| is supplied, use that as the working directory."""
+ env = self._GetEnv(arch)
+ # TODO(scottmg): This is a temporary hack to get some specific variables
+ # through to actions that are set after gyp-time. http://crbug.com/333738.
+ for k, v in os.environ.items():
+ if k not in env:
+ env[k] = v
+ args = open(rspfile).read()
+ dir = dir[0] if dir else None
+ return subprocess.call(args, shell=True, env=env, cwd=dir)
+
+ def ExecClCompile(self, project_dir, selected_files):
+ """Executed by msvs-ninja projects when the 'ClCompile' target is used to
+ build selected C/C++ files."""
+ project_dir = os.path.relpath(project_dir, BASE_DIR)
+ selected_files = selected_files.split(';')
+ ninja_targets = [os.path.join(project_dir, filename) + '^^'
+ for filename in selected_files]
+ cmd = ['ninja.exe']
+ cmd.extend(ninja_targets)
+ return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/python/gyp/pylib/gyp/xcode_emulation.py b/third_party/python/gyp/pylib/gyp/xcode_emulation.py
new file mode 100644
index 0000000000..0bdf88dbef
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/xcode_emulation.py
@@ -0,0 +1,1798 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This module contains classes that help to emulate xcodebuild behavior on top of
+other build systems, such as make and ninja.
+"""
+
+from __future__ import print_function
+
+import copy
+import gyp.common
+import os
+import os.path
+import re
+import shlex
+import subprocess
+import sys
+import tempfile
+from gyp.common import GypError
+
+# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when
+# "xcodebuild" is called too quickly (it has been found to return incorrect
+# version number).
+XCODE_VERSION_CACHE = None
+
+# Populated lazily by GetXcodeArchsDefault, to an |XcodeArchsDefault| instance
+# corresponding to the installed version of Xcode.
+XCODE_ARCHS_DEFAULT_CACHE = None
+
+
+def XcodeArchsVariableMapping(archs, archs_including_64_bit=None):
+ """Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable,
+ and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT)."""
+ mapping = {'$(ARCHS_STANDARD)': archs}
+ if archs_including_64_bit:
+ mapping['$(ARCHS_STANDARD_INCLUDING_64_BIT)'] = archs_including_64_bit
+ return mapping
+
+class XcodeArchsDefault(object):
+ """A class to resolve ARCHS variable from xcode_settings, resolving Xcode
+ macros and implementing filtering by VALID_ARCHS. The expansion of macros
+ depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and
+ on the version of Xcode.
+ """
+
+ # Match variable like $(ARCHS_STANDARD).
+ variable_pattern = re.compile(r'\$\([a-zA-Z_][a-zA-Z0-9_]*\)$')
+
+ def __init__(self, default, mac, iphonesimulator, iphoneos):
+ self._default = (default,)
+ self._archs = {'mac': mac, 'ios': iphoneos, 'iossim': iphonesimulator}
+
+ def _VariableMapping(self, sdkroot):
+ """Returns the dictionary of variable mapping depending on the SDKROOT."""
+ sdkroot = sdkroot.lower()
+ if 'iphoneos' in sdkroot:
+ return self._archs['ios']
+ elif 'iphonesimulator' in sdkroot:
+ return self._archs['iossim']
+ else:
+ return self._archs['mac']
+
+ def _ExpandArchs(self, archs, sdkroot):
+ """Expands variables references in ARCHS, and remove duplicates."""
+ variable_mapping = self._VariableMapping(sdkroot)
+ expanded_archs = []
+ for arch in archs:
+ if self.variable_pattern.match(arch):
+ variable = arch
+ try:
+ variable_expansion = variable_mapping[variable]
+ for arch in variable_expansion:
+ if arch not in expanded_archs:
+ expanded_archs.append(arch)
+ except KeyError as e:
+ print('Warning: Ignoring unsupported variable "%s".' % variable)
+ elif arch not in expanded_archs:
+ expanded_archs.append(arch)
+ return expanded_archs
+
+ def ActiveArchs(self, archs, valid_archs, sdkroot):
+ """Expands variables references in ARCHS, and filter by VALID_ARCHS if it
+ is defined (if not set, Xcode accept any value in ARCHS, otherwise, only
+ values present in VALID_ARCHS are kept)."""
+ expanded_archs = self._ExpandArchs(archs or self._default, sdkroot or '')
+ if valid_archs:
+ filtered_archs = []
+ for arch in expanded_archs:
+ if arch in valid_archs:
+ filtered_archs.append(arch)
+ expanded_archs = filtered_archs
+ return expanded_archs
+
+
+def GetXcodeArchsDefault():
+ """Returns the |XcodeArchsDefault| object to use to expand ARCHS for the
+ installed version of Xcode. The default values used by Xcode for ARCHS
+ and the expansion of the variables depends on the version of Xcode used.
+
+ For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included
+ uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses
+ $(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0
+ and deprecated with Xcode 5.1.
+
+ For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit
+ architecture as part of $(ARCHS_STANDARD) and default to only building it.
+
+ For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part
+ of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they
+ are also part of $(ARCHS_STANDARD).
+
+ All thoses rules are coded in the construction of the |XcodeArchsDefault|
+ object to use depending on the version of Xcode detected. The object is
+ for performance reason."""
+ global XCODE_ARCHS_DEFAULT_CACHE
+ if XCODE_ARCHS_DEFAULT_CACHE:
+ return XCODE_ARCHS_DEFAULT_CACHE
+ xcode_version, _ = XcodeVersion()
+ if xcode_version < '0500':
+ XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
+ '$(ARCHS_STANDARD)',
+ XcodeArchsVariableMapping(['i386']),
+ XcodeArchsVariableMapping(['i386']),
+ XcodeArchsVariableMapping(['armv7']))
+ elif xcode_version < '0510':
+ XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
+ '$(ARCHS_STANDARD_INCLUDING_64_BIT)',
+ XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
+ XcodeArchsVariableMapping(['i386'], ['i386', 'x86_64']),
+ XcodeArchsVariableMapping(
+ ['armv7', 'armv7s'],
+ ['armv7', 'armv7s', 'arm64']))
+ else:
+ XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
+ '$(ARCHS_STANDARD)',
+ XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
+ XcodeArchsVariableMapping(['i386', 'x86_64'], ['i386', 'x86_64']),
+ XcodeArchsVariableMapping(
+ ['armv7', 'armv7s', 'arm64'],
+ ['armv7', 'armv7s', 'arm64']))
+ return XCODE_ARCHS_DEFAULT_CACHE
+
+
+class XcodeSettings(object):
+ """A class that understands the gyp 'xcode_settings' object."""
+
+ # Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
+ # at class-level for efficiency.
+ _sdk_path_cache = {}
+ _platform_path_cache = {}
+ _sdk_root_cache = {}
+
+ # Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
+ # cached at class-level for efficiency.
+ _plist_cache = {}
+
+ # Populated lazily by GetIOSPostbuilds. Shared by all XcodeSettings, so
+ # cached at class-level for efficiency.
+ _codesigning_key_cache = {}
+
+ def __init__(self, spec):
+ self.spec = spec
+
+ self.isIOS = False
+ self.mac_toolchain_dir = None
+ self.header_map_path = None
+
+ # Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
+ # This means self.xcode_settings[config] always contains all settings
+ # for that config -- the per-target settings as well. Settings that are
+ # the same for all configs are implicitly per-target settings.
+ self.xcode_settings = {}
+ configs = spec['configurations']
+ for configname, config in configs.items():
+ self.xcode_settings[configname] = config.get('xcode_settings', {})
+ self._ConvertConditionalKeys(configname)
+ if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET',
+ None):
+ self.isIOS = True
+
+ # This is only non-None temporarily during the execution of some methods.
+ self.configname = None
+
+ # Used by _AdjustLibrary to match .a and .dylib entries in libraries.
+ self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$')
+
+ def _ConvertConditionalKeys(self, configname):
+ """Converts or warns on conditional keys. Xcode supports conditional keys,
+ such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation
+ with some keys converted while the rest force a warning."""
+ settings = self.xcode_settings[configname]
+ conditional_keys = [key for key in settings if key.endswith(']')]
+ for key in conditional_keys:
+ # If you need more, speak up at http://crbug.com/122592
+ if key.endswith("[sdk=iphoneos*]"):
+ if configname.endswith("iphoneos"):
+ new_key = key.split("[")[0]
+ settings[new_key] = settings[key]
+ else:
+ print('Warning: Conditional keys not implemented, ignoring:', \
+ ' '.join(conditional_keys))
+ del settings[key]
+
+ def _Settings(self):
+ assert self.configname
+ return self.xcode_settings[self.configname]
+
+ def _Test(self, test_key, cond_key, default):
+ return self._Settings().get(test_key, default) == cond_key
+
+ def _Appendf(self, lst, test_key, format_str, default=None):
+ if test_key in self._Settings():
+ lst.append(format_str % str(self._Settings()[test_key]))
+ elif default:
+ lst.append(format_str % str(default))
+
+ def _WarnUnimplemented(self, test_key):
+ if test_key in self._Settings():
+ print('Warning: Ignoring not yet implemented key "%s".' % test_key)
+
+ def IsBinaryOutputFormat(self, configname):
+ default = "binary" if self.isIOS else "xml"
+ format = self.xcode_settings[configname].get('INFOPLIST_OUTPUT_FORMAT',
+ default)
+ return format == "binary"
+
+ def IsIosFramework(self):
+ return self.spec['type'] == 'shared_library' and self._IsBundle() and \
+ self.isIOS
+
+ def _IsBundle(self):
+ return int(self.spec.get('mac_bundle', 0)) != 0 or self._IsXCTest() or \
+ self._IsXCUiTest()
+
+ def _IsXCTest(self):
+ return int(self.spec.get('mac_xctest_bundle', 0)) != 0
+
+ def _IsXCUiTest(self):
+ return int(self.spec.get('mac_xcuitest_bundle', 0)) != 0
+
+ def _IsIosAppExtension(self):
+ return int(self.spec.get('ios_app_extension', 0)) != 0
+
+ def _IsIosWatchKitExtension(self):
+ return int(self.spec.get('ios_watchkit_extension', 0)) != 0
+
+ def _IsIosWatchApp(self):
+ return int(self.spec.get('ios_watch_app', 0)) != 0
+
+ def GetFrameworkVersion(self):
+ """Returns the framework version of the current target. Only valid for
+ bundles."""
+ assert self._IsBundle()
+ return self.GetPerTargetSetting('FRAMEWORK_VERSION', default='A')
+
+ def GetWrapperExtension(self):
+ """Returns the bundle extension (.app, .framework, .plugin, etc). Only
+ valid for bundles."""
+ assert self._IsBundle()
+ if self.spec['type'] in ('loadable_module', 'shared_library'):
+ default_wrapper_extension = {
+ 'loadable_module': 'bundle',
+ 'shared_library': 'framework',
+ }[self.spec['type']]
+ wrapper_extension = self.GetPerTargetSetting(
+ 'WRAPPER_EXTENSION', default=default_wrapper_extension)
+ return '.' + self.spec.get('product_extension', wrapper_extension)
+ elif self.spec['type'] == 'executable':
+ if self._IsIosAppExtension() or self._IsIosWatchKitExtension():
+ return '.' + self.spec.get('product_extension', 'appex')
+ else:
+ return '.' + self.spec.get('product_extension', 'app')
+ else:
+ assert False, "Don't know extension for '%s', target '%s'" % (
+ self.spec['type'], self.spec['target_name'])
+
+ def GetProductName(self):
+ """Returns PRODUCT_NAME."""
+ return self.spec.get('product_name', self.spec['target_name'])
+
+ def GetFullProductName(self):
+ """Returns FULL_PRODUCT_NAME."""
+ if self._IsBundle():
+ return self.GetWrapperName()
+ else:
+ return self._GetStandaloneBinaryPath()
+
+ def GetWrapperName(self):
+ """Returns the directory name of the bundle represented by this target.
+ Only valid for bundles."""
+ assert self._IsBundle()
+ return self.GetProductName() + self.GetWrapperExtension()
+
+ def GetBundleContentsFolderPath(self):
+ """Returns the qualified path to the bundle's contents folder. E.g.
+ Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
+ if self.isIOS:
+ return self.GetWrapperName()
+ assert self._IsBundle()
+ if self.spec['type'] == 'shared_library':
+ return os.path.join(
+ self.GetWrapperName(), 'Versions', self.GetFrameworkVersion())
+ else:
+ # loadable_modules have a 'Contents' folder like executables.
+ return os.path.join(self.GetWrapperName(), 'Contents')
+
+ def GetBundleResourceFolder(self):
+ """Returns the qualified path to the bundle's resource folder. E.g.
+ Chromium.app/Contents/Resources. Only valid for bundles."""
+ assert self._IsBundle()
+ if self.isIOS:
+ return self.GetBundleContentsFolderPath()
+ return os.path.join(self.GetBundleContentsFolderPath(), 'Resources')
+
+ def GetBundleExecutableFolderPath(self):
+ """Returns the qualified path to the bundle's executables folder. E.g.
+ Chromium.app/Contents/MacOS. Only valid for bundles."""
+ assert self._IsBundle()
+ if self.spec['type'] in ('shared_library') or self.isIOS:
+ return self.GetBundleContentsFolderPath()
+ elif self.spec['type'] in ('executable', 'loadable_module'):
+ return os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
+
+ def GetBundleJavaFolderPath(self):
+ """Returns the qualified path to the bundle's Java resource folder.
+ E.g. Chromium.app/Contents/Resources/Java. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleResourceFolder(), 'Java')
+
+ def GetBundleFrameworksFolderPath(self):
+ """Returns the qualified path to the bundle's frameworks folder. E.g,
+ Chromium.app/Contents/Frameworks. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleContentsFolderPath(), 'Frameworks')
+
+ def GetBundleSharedFrameworksFolderPath(self):
+ """Returns the qualified path to the bundle's frameworks folder. E.g,
+ Chromium.app/Contents/SharedFrameworks. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleContentsFolderPath(),
+ 'SharedFrameworks')
+
+ def GetBundleSharedSupportFolderPath(self):
+ """Returns the qualified path to the bundle's shared support folder. E.g,
+ Chromium.app/Contents/SharedSupport. Only valid for bundles."""
+ assert self._IsBundle()
+ if self.spec['type'] == 'shared_library':
+ return self.GetBundleResourceFolder()
+ else:
+ return os.path.join(self.GetBundleContentsFolderPath(),
+ 'SharedSupport')
+
+ def GetBundlePlugInsFolderPath(self):
+ """Returns the qualified path to the bundle's plugins folder. E.g,
+ Chromium.app/Contents/PlugIns. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleContentsFolderPath(), 'PlugIns')
+
+ def GetBundleXPCServicesFolderPath(self):
+ """Returns the qualified path to the bundle's XPC services folder. E.g,
+ Chromium.app/Contents/XPCServices. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleContentsFolderPath(), 'XPCServices')
+
+ def GetBundlePlistPath(self):
+ """Returns the qualified path to the bundle's plist file. E.g.
+ Chromium.app/Contents/Info.plist. Only valid for bundles."""
+ assert self._IsBundle()
+ if self.spec['type'] in ('executable', 'loadable_module') or \
+ self.IsIosFramework():
+ return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist')
+ else:
+ return os.path.join(self.GetBundleContentsFolderPath(),
+ 'Resources', 'Info.plist')
+
+ def GetProductType(self):
+ """Returns the PRODUCT_TYPE of this target."""
+ if self._IsIosAppExtension():
+ assert self._IsBundle(), ('ios_app_extension flag requires mac_bundle '
+ '(target %s)' % self.spec['target_name'])
+ return 'com.apple.product-type.app-extension'
+ if self._IsIosWatchKitExtension():
+ assert self._IsBundle(), ('ios_watchkit_extension flag requires '
+ 'mac_bundle (target %s)' % self.spec['target_name'])
+ return 'com.apple.product-type.watchkit-extension'
+ if self._IsIosWatchApp():
+ assert self._IsBundle(), ('ios_watch_app flag requires mac_bundle '
+ '(target %s)' % self.spec['target_name'])
+ return 'com.apple.product-type.application.watchapp'
+ if self._IsXCUiTest():
+ assert self._IsBundle(), ('mac_xcuitest_bundle flag requires mac_bundle '
+ '(target %s)' % self.spec['target_name'])
+ return 'com.apple.product-type.bundle.ui-testing'
+ if self._IsBundle():
+ return {
+ 'executable': 'com.apple.product-type.application',
+ 'loadable_module': 'com.apple.product-type.bundle',
+ 'shared_library': 'com.apple.product-type.framework',
+ }[self.spec['type']]
+ else:
+ return {
+ 'executable': 'com.apple.product-type.tool',
+ 'loadable_module': 'com.apple.product-type.library.dynamic',
+ 'shared_library': 'com.apple.product-type.library.dynamic',
+ 'static_library': 'com.apple.product-type.library.static',
+ }[self.spec['type']]
+
+ def GetMachOType(self):
+ """Returns the MACH_O_TYPE of this target."""
+ # Weird, but matches Xcode.
+ if not self._IsBundle() and self.spec['type'] == 'executable':
+ return ''
+ return {
+ 'executable': 'mh_execute',
+ 'static_library': 'staticlib',
+ 'shared_library': 'mh_dylib',
+ 'loadable_module': 'mh_bundle',
+ }[self.spec['type']]
+
+ def _GetBundleBinaryPath(self):
+ """Returns the name of the bundle binary of by this target.
+ E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleExecutableFolderPath(), \
+ self.GetExecutableName())
+
+ def _GetStandaloneExecutableSuffix(self):
+ if 'product_extension' in self.spec:
+ return '.' + self.spec['product_extension']
+ return {
+ 'executable': '',
+ 'static_library': '.a',
+ 'shared_library': '.dylib',
+ 'loadable_module': '.so',
+ }[self.spec['type']]
+
+ def _GetStandaloneExecutablePrefix(self):
+ return self.spec.get('product_prefix', {
+ 'executable': '',
+ 'static_library': 'lib',
+ 'shared_library': 'lib',
+ # Non-bundled loadable_modules are called foo.so for some reason
+ # (that is, .so and no prefix) with the xcode build -- match that.
+ 'loadable_module': '',
+ }[self.spec['type']])
+
+ def _GetStandaloneBinaryPath(self):
+ """Returns the name of the non-bundle binary represented by this target.
+ E.g. hello_world. Only valid for non-bundles."""
+ assert not self._IsBundle()
+ assert self.spec['type'] in (
+ 'executable', 'shared_library', 'static_library', 'loadable_module'), (
+ 'Unexpected type %s' % self.spec['type'])
+ target = self.spec['target_name']
+ if self.spec['type'] == 'static_library':
+ if target[:3] == 'lib':
+ target = target[3:]
+ elif self.spec['type'] in ('loadable_module', 'shared_library'):
+ if target[:3] == 'lib':
+ target = target[3:]
+
+ target_prefix = self._GetStandaloneExecutablePrefix()
+ target = self.spec.get('product_name', target)
+ target_ext = self._GetStandaloneExecutableSuffix()
+ return target_prefix + target + target_ext
+
+ def GetExecutableName(self):
+ """Returns the executable name of the bundle represented by this target.
+ E.g. Chromium."""
+ if self._IsBundle():
+ return self.spec.get('product_name', self.spec['target_name'])
+ else:
+ return self._GetStandaloneBinaryPath()
+
+ def GetExecutablePath(self):
+ """Returns the qualified path to the primary executable of the bundle
+ represented by this target. E.g. Chromium.app/Contents/MacOS/Chromium."""
+ if self._IsBundle():
+ return self._GetBundleBinaryPath()
+ else:
+ return self._GetStandaloneBinaryPath()
+
+ def GetActiveArchs(self, configname):
+ """Returns the architectures this target should be built for."""
+ config_settings = self.xcode_settings[configname]
+ xcode_archs_default = GetXcodeArchsDefault()
+ return xcode_archs_default.ActiveArchs(
+ config_settings.get('ARCHS'),
+ config_settings.get('VALID_ARCHS'),
+ config_settings.get('SDKROOT'))
+
+ def _GetSdkVersionInfoItem(self, sdk, infoitem):
+ # xcodebuild requires Xcode and can't run on Command Line Tools-only
+ # systems from 10.7 onward.
+ # Since the CLT has no SDK paths anyway, returning None is the
+ # most sensible route and should still do the right thing.
+ try:
+ return GetStdout(['xcrun', '--sdk', sdk, infoitem])
+ except:
+ pass
+
+ def _SdkRoot(self, configname):
+ if configname is None:
+ configname = self.configname
+ return self.GetPerConfigSetting('SDKROOT', configname, default='')
+
+ def _XcodePlatformPath(self, configname=None):
+ sdk_root = self._SdkRoot(configname)
+ if sdk_root not in XcodeSettings._platform_path_cache:
+ platform_path = self._GetSdkVersionInfoItem(sdk_root,
+ '--show-sdk-platform-path')
+ XcodeSettings._platform_path_cache[sdk_root] = platform_path
+ return XcodeSettings._platform_path_cache[sdk_root]
+
+ def _SdkPath(self, configname=None):
+ sdk_root = self._SdkRoot(configname)
+ if sdk_root.startswith('/'):
+ return sdk_root
+ return self._XcodeSdkPath(sdk_root)
+
+ def _XcodeSdkPath(self, sdk_root):
+ if sdk_root not in XcodeSettings._sdk_path_cache:
+ sdk_path = self._GetSdkVersionInfoItem(sdk_root, '--show-sdk-path')
+ XcodeSettings._sdk_path_cache[sdk_root] = sdk_path
+ if sdk_root:
+ XcodeSettings._sdk_root_cache[sdk_path] = sdk_root
+ return XcodeSettings._sdk_path_cache[sdk_root]
+
+ def _AppendPlatformVersionMinFlags(self, lst):
+ self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s')
+ if 'IPHONEOS_DEPLOYMENT_TARGET' in self._Settings():
+ # TODO: Implement this better?
+ sdk_path_basename = os.path.basename(self._SdkPath())
+ if sdk_path_basename.lower().startswith('iphonesimulator'):
+ self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
+ '-mios-simulator-version-min=%s')
+ else:
+ self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
+ '-miphoneos-version-min=%s')
+
+ def GetCflags(self, configname, arch=None):
+ """Returns flags that need to be added to .c, .cc, .m, and .mm
+ compilations."""
+ # This functions (and the similar ones below) do not offer complete
+ # emulation of all xcode_settings keys. They're implemented on demand.
+
+ self.configname = configname
+ cflags = []
+
+ sdk_root = self._SdkPath()
+ if 'SDKROOT' in self._Settings() and sdk_root:
+ cflags.append('-isysroot %s' % sdk_root)
+
+ if self.header_map_path:
+ cflags.append('-I%s' % self.header_map_path)
+
+ if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
+ cflags.append('-Wconstant-conversion')
+
+ if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'):
+ cflags.append('-funsigned-char')
+
+ if self._Test('GCC_CW_ASM_SYNTAX', 'YES', default='YES'):
+ cflags.append('-fasm-blocks')
+
+ if 'GCC_DYNAMIC_NO_PIC' in self._Settings():
+ if self._Settings()['GCC_DYNAMIC_NO_PIC'] == 'YES':
+ cflags.append('-mdynamic-no-pic')
+ else:
+ pass
+ # TODO: In this case, it depends on the target. xcode passes
+ # mdynamic-no-pic by default for executable and possibly static lib
+ # according to mento
+
+ if self._Test('GCC_ENABLE_PASCAL_STRINGS', 'YES', default='YES'):
+ cflags.append('-mpascal-strings')
+
+ self._Appendf(cflags, 'GCC_OPTIMIZATION_LEVEL', '-O%s', default='s')
+
+ if self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES'):
+ dbg_format = self._Settings().get('DEBUG_INFORMATION_FORMAT', 'dwarf')
+ if dbg_format == 'dwarf':
+ cflags.append('-gdwarf-2')
+ elif dbg_format == 'stabs':
+ raise NotImplementedError('stabs debug format is not supported yet.')
+ elif dbg_format == 'dwarf-with-dsym':
+ cflags.append('-gdwarf-2')
+ else:
+ raise NotImplementedError('Unknown debug format %s' % dbg_format)
+
+ if self._Settings().get('GCC_STRICT_ALIASING') == 'YES':
+ cflags.append('-fstrict-aliasing')
+ elif self._Settings().get('GCC_STRICT_ALIASING') == 'NO':
+ cflags.append('-fno-strict-aliasing')
+
+ if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'):
+ cflags.append('-fvisibility=hidden')
+
+ if self._Test('GCC_TREAT_WARNINGS_AS_ERRORS', 'YES', default='NO'):
+ cflags.append('-Werror')
+
+ if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'):
+ cflags.append('-Wnewline-eof')
+
+ # In Xcode, this is only activated when GCC_COMPILER_VERSION is clang or
+ # llvm-gcc. It also requires a fairly recent libtool, and
+ # if the system clang isn't used, DYLD_LIBRARY_PATH needs to contain the
+ # path to the libLTO.dylib that matches the used clang.
+ if self._Test('LLVM_LTO', 'YES', default='NO'):
+ cflags.append('-flto')
+
+ self._AppendPlatformVersionMinFlags(cflags)
+
+ # TODO:
+ if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'):
+ self._WarnUnimplemented('COPY_PHASE_STRIP')
+ self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS')
+ self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS')
+
+ # TODO: This is exported correctly, but assigning to it is not supported.
+ self._WarnUnimplemented('MACH_O_TYPE')
+ self._WarnUnimplemented('PRODUCT_TYPE')
+
+ if arch is not None:
+ archs = [arch]
+ else:
+ assert self.configname
+ archs = self.GetActiveArchs(self.configname)
+ if len(archs) != 1:
+ # TODO: Supporting fat binaries will be annoying.
+ self._WarnUnimplemented('ARCHS')
+ archs = ['i386']
+ cflags.append('-arch ' + archs[0])
+
+ if archs[0] in ('i386', 'x86_64'):
+ if self._Test('GCC_ENABLE_SSE3_EXTENSIONS', 'YES', default='NO'):
+ cflags.append('-msse3')
+ if self._Test('GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS', 'YES',
+ default='NO'):
+ cflags.append('-mssse3') # Note 3rd 's'.
+ if self._Test('GCC_ENABLE_SSE41_EXTENSIONS', 'YES', default='NO'):
+ cflags.append('-msse4.1')
+ if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'):
+ cflags.append('-msse4.2')
+
+ cflags += self._Settings().get('WARNING_CFLAGS', [])
+
+ platform_root = self._XcodePlatformPath(configname)
+ if platform_root and self._IsXCTest():
+ cflags.append('-F' + platform_root + '/Developer/Library/Frameworks/')
+
+ if sdk_root:
+ framework_root = sdk_root
+ else:
+ framework_root = ''
+ config = self.spec['configurations'][self.configname]
+ framework_dirs = config.get('mac_framework_dirs', [])
+ for directory in framework_dirs:
+ cflags.append('-F' + directory.replace('$(SDKROOT)', framework_root))
+
+ self.configname = None
+ return cflags
+
+ def GetCflagsC(self, configname):
+ """Returns flags that need to be added to .c, and .m compilations."""
+ self.configname = configname
+ cflags_c = []
+ if self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi':
+ cflags_c.append('-ansi')
+ else:
+ self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s')
+ cflags_c += self._Settings().get('OTHER_CFLAGS', [])
+ self.configname = None
+ return cflags_c
+
+ def GetCflagsCC(self, configname):
+ """Returns flags that need to be added to .cc, and .mm compilations."""
+ self.configname = configname
+ cflags_cc = []
+
+ clang_cxx_language_standard = self._Settings().get(
+ 'CLANG_CXX_LANGUAGE_STANDARD')
+ # Note: Don't make c++0x to c++11 so that c++0x can be used with older
+ # clangs that don't understand c++11 yet (like Xcode 4.2's).
+ if clang_cxx_language_standard:
+ cflags_cc.append('-std=%s' % clang_cxx_language_standard)
+
+ self._Appendf(cflags_cc, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
+
+ if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'):
+ cflags_cc.append('-fno-rtti')
+ if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'):
+ cflags_cc.append('-fno-exceptions')
+ if self._Test('GCC_INLINES_ARE_PRIVATE_EXTERN', 'YES', default='NO'):
+ cflags_cc.append('-fvisibility-inlines-hidden')
+ if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'):
+ cflags_cc.append('-fno-threadsafe-statics')
+ # Note: This flag is a no-op for clang, it only has an effect for gcc.
+ if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'):
+ cflags_cc.append('-Wno-invalid-offsetof')
+
+ other_ccflags = []
+
+ for flag in self._Settings().get('OTHER_CPLUSPLUSFLAGS', ['$(inherited)']):
+ # TODO: More general variable expansion. Missing in many other places too.
+ if flag in ('$inherited', '$(inherited)', '${inherited}'):
+ flag = '$OTHER_CFLAGS'
+ if flag in ('$OTHER_CFLAGS', '$(OTHER_CFLAGS)', '${OTHER_CFLAGS}'):
+ other_ccflags += self._Settings().get('OTHER_CFLAGS', [])
+ else:
+ other_ccflags.append(flag)
+ cflags_cc += other_ccflags
+
+ self.configname = None
+ return cflags_cc
+
+ def _AddObjectiveCGarbageCollectionFlags(self, flags):
+ gc_policy = self._Settings().get('GCC_ENABLE_OBJC_GC', 'unsupported')
+ if gc_policy == 'supported':
+ flags.append('-fobjc-gc')
+ elif gc_policy == 'required':
+ flags.append('-fobjc-gc-only')
+
+ def _AddObjectiveCARCFlags(self, flags):
+ if self._Test('CLANG_ENABLE_OBJC_ARC', 'YES', default='NO'):
+ flags.append('-fobjc-arc')
+
+ def _AddObjectiveCMissingPropertySynthesisFlags(self, flags):
+ if self._Test('CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS',
+ 'YES', default='NO'):
+ flags.append('-Wobjc-missing-property-synthesis')
+
+ def GetCflagsObjC(self, configname):
+ """Returns flags that need to be added to .m compilations."""
+ self.configname = configname
+ cflags_objc = []
+ self._AddObjectiveCGarbageCollectionFlags(cflags_objc)
+ self._AddObjectiveCARCFlags(cflags_objc)
+ self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc)
+ self.configname = None
+ return cflags_objc
+
+ def GetCflagsObjCC(self, configname):
+ """Returns flags that need to be added to .mm compilations."""
+ self.configname = configname
+ cflags_objcc = []
+ self._AddObjectiveCGarbageCollectionFlags(cflags_objcc)
+ self._AddObjectiveCARCFlags(cflags_objcc)
+ self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc)
+ if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'):
+ cflags_objcc.append('-fobjc-call-cxx-cdtors')
+ self.configname = None
+ return cflags_objcc
+
+ def GetInstallNameBase(self):
+ """Return DYLIB_INSTALL_NAME_BASE for this target."""
+ # Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
+ if (self.spec['type'] != 'shared_library' and
+ (self.spec['type'] != 'loadable_module' or self._IsBundle())):
+ return None
+ install_base = self.GetPerTargetSetting(
+ 'DYLIB_INSTALL_NAME_BASE',
+ default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib')
+ return install_base
+
+ def _StandardizePath(self, path):
+ """Do :standardizepath processing for path."""
+ # I'm not quite sure what :standardizepath does. Just call normpath(),
+ # but don't let @executable_path/../foo collapse to foo.
+ if '/' in path:
+ prefix, rest = '', path
+ if path.startswith('@'):
+ prefix, rest = path.split('/', 1)
+ rest = os.path.normpath(rest) # :standardizepath
+ path = os.path.join(prefix, rest)
+ return path
+
+ def GetInstallName(self):
+ """Return LD_DYLIB_INSTALL_NAME for this target."""
+ # Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
+ if (self.spec['type'] != 'shared_library' and
+ (self.spec['type'] != 'loadable_module' or self._IsBundle())):
+ return None
+
+ default_install_name = \
+ '$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)'
+ install_name = self.GetPerTargetSetting(
+ 'LD_DYLIB_INSTALL_NAME', default=default_install_name)
+
+ # Hardcode support for the variables used in chromium for now, to
+ # unblock people using the make build.
+ if '$' in install_name:
+ assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/'
+ '$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), (
+ 'Variables in LD_DYLIB_INSTALL_NAME are not generally supported '
+ 'yet in target \'%s\' (got \'%s\')' %
+ (self.spec['target_name'], install_name))
+
+ install_name = install_name.replace(
+ '$(DYLIB_INSTALL_NAME_BASE:standardizepath)',
+ self._StandardizePath(self.GetInstallNameBase()))
+ if self._IsBundle():
+ # These are only valid for bundles, hence the |if|.
+ install_name = install_name.replace(
+ '$(WRAPPER_NAME)', self.GetWrapperName())
+ install_name = install_name.replace(
+ '$(PRODUCT_NAME)', self.GetProductName())
+ else:
+ assert '$(WRAPPER_NAME)' not in install_name
+ assert '$(PRODUCT_NAME)' not in install_name
+
+ install_name = install_name.replace(
+ '$(EXECUTABLE_PATH)', self.GetExecutablePath())
+ return install_name
+
+ def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
+ """Checks if ldflag contains a filename and if so remaps it from
+ gyp-directory-relative to build-directory-relative."""
+ # This list is expanded on demand.
+ # They get matched as:
+ # -exported_symbols_list file
+ # -Wl,exported_symbols_list file
+ # -Wl,exported_symbols_list,file
+ LINKER_FILE = r'(\S+)'
+ WORD = r'\S+'
+ linker_flags = [
+ ['-exported_symbols_list', LINKER_FILE], # Needed for NaCl.
+ ['-unexported_symbols_list', LINKER_FILE],
+ ['-reexported_symbols_list', LINKER_FILE],
+ ['-sectcreate', WORD, WORD, LINKER_FILE], # Needed for remoting.
+ ]
+ for flag_pattern in linker_flags:
+ regex = re.compile('(?:-Wl,)?' + '[ ,]'.join(flag_pattern))
+ m = regex.match(ldflag)
+ if m:
+ ldflag = ldflag[:m.start(1)] + gyp_to_build_path(m.group(1)) + \
+ ldflag[m.end(1):]
+ # Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS,
+ # TODO(thakis): Update ffmpeg.gyp):
+ if ldflag.startswith('-L'):
+ ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):])
+ return ldflag
+
+ def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
+ """Returns flags that need to be passed to the linker.
+
+ Args:
+ configname: The name of the configuration to get ld flags for.
+ product_dir: The directory where products such static and dynamic
+ libraries are placed. This is added to the library search path.
+ gyp_to_build_path: A function that converts paths relative to the
+ current gyp file to paths relative to the build direcotry.
+ """
+ self.configname = configname
+ ldflags = []
+
+ # The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS
+ # can contain entries that depend on this. Explicitly absolutify these.
+ for ldflag in self._Settings().get('OTHER_LDFLAGS', []):
+ ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path))
+
+ if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'):
+ ldflags.append('-Wl,-dead_strip')
+
+ if self._Test('PREBINDING', 'YES', default='NO'):
+ ldflags.append('-Wl,-prebind')
+
+ self._Appendf(
+ ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s')
+ self._Appendf(
+ ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s')
+
+ self._AppendPlatformVersionMinFlags(ldflags)
+
+ if 'SDKROOT' in self._Settings() and self._SdkPath():
+ ldflags.append('-isysroot ' + self._SdkPath())
+
+ for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []):
+ ldflags.append('-L' + gyp_to_build_path(library_path))
+
+ if 'ORDER_FILE' in self._Settings():
+ ldflags.append('-Wl,-order_file ' +
+ '-Wl,' + gyp_to_build_path(
+ self._Settings()['ORDER_FILE']))
+
+ if arch is not None:
+ archs = [arch]
+ else:
+ assert self.configname
+ archs = self.GetActiveArchs(self.configname)
+ if len(archs) != 1:
+ # TODO: Supporting fat binaries will be annoying.
+ self._WarnUnimplemented('ARCHS')
+ archs = ['i386']
+ ldflags.append('-arch ' + archs[0])
+
+ # Xcode adds the product directory by default.
+ # Rewrite -L. to -L./ to work around http://www.openradar.me/25313838
+ ldflags.append('-L' + (product_dir if product_dir != '.' else './'))
+
+ install_name = self.GetInstallName()
+ if install_name and self.spec['type'] != 'loadable_module':
+ ldflags.append('-install_name ' + install_name.replace(' ', r'\ '))
+
+ for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []):
+ ldflags.append('-Wl,-rpath,' + rpath)
+
+ sdk_root = self._SdkPath()
+ if not sdk_root:
+ sdk_root = ''
+ config = self.spec['configurations'][self.configname]
+ framework_dirs = config.get('mac_framework_dirs', [])
+ for directory in framework_dirs:
+ ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
+
+ platform_root = self._XcodePlatformPath(configname)
+ if sdk_root and platform_root and self._IsXCTest():
+ ldflags.append('-F' + platform_root + '/Developer/Library/Frameworks/')
+ ldflags.append('-framework XCTest')
+
+ is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension()
+ if sdk_root and is_extension:
+ # Adds the link flags for extensions. These flags are common for all
+ # extensions and provide loader and main function.
+ # These flags reflect the compilation options used by xcode to compile
+ # extensions.
+ if XcodeVersion()[0] < '0900':
+ ldflags.append('-lpkstart')
+ ldflags.append(sdk_root +
+ '/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit')
+ else:
+ ldflags.append('-e _NSExtensionMain')
+ ldflags.append('-fapplication-extension')
+
+ self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
+
+ self.configname = None
+ return ldflags
+
+ def GetLibtoolflags(self, configname):
+ """Returns flags that need to be passed to the static linker.
+
+ Args:
+ configname: The name of the configuration to get ld flags for.
+ """
+ self.configname = configname
+ libtoolflags = []
+
+ for libtoolflag in self._Settings().get('OTHER_LDFLAGS', []):
+ libtoolflags.append(libtoolflag)
+ # TODO(thakis): ARCHS?
+
+ self.configname = None
+ return libtoolflags
+
+ def GetPerTargetSettings(self):
+ """Gets a list of all the per-target settings. This will only fetch keys
+ whose values are the same across all configurations."""
+ first_pass = True
+ result = {}
+ for configname in sorted(self.xcode_settings.keys()):
+ if first_pass:
+ result = dict(self.xcode_settings[configname])
+ first_pass = False
+ else:
+ for key, value in self.xcode_settings[configname].items():
+ if key not in result:
+ continue
+ elif result[key] != value:
+ del result[key]
+ return result
+
+ def GetPerConfigSetting(self, setting, configname, default=None):
+ if configname in self.xcode_settings:
+ return self.xcode_settings[configname].get(setting, default)
+ else:
+ return self.GetPerTargetSetting(setting, default)
+
+ def GetPerTargetSetting(self, setting, default=None):
+ """Tries to get xcode_settings.setting from spec. Assumes that the setting
+ has the same value in all configurations and throws otherwise."""
+ is_first_pass = True
+ result = None
+ for configname in sorted(self.xcode_settings.keys()):
+ if is_first_pass:
+ result = self.xcode_settings[configname].get(setting, None)
+ is_first_pass = False
+ else:
+ assert result == self.xcode_settings[configname].get(setting, None), (
+ "Expected per-target setting for '%s', got per-config setting "
+ "(target %s)" % (setting, self.spec['target_name']))
+ if result is None:
+ return default
+ return result
+
+ def _GetStripPostbuilds(self, configname, output_binary, quiet):
+ """Returns a list of shell commands that contain the shell commands
+ neccessary to strip this target's binary. These should be run as postbuilds
+ before the actual postbuilds run."""
+ self.configname = configname
+
+ result = []
+ if (self._Test('DEPLOYMENT_POSTPROCESSING', 'YES', default='NO') and
+ self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
+
+ default_strip_style = 'debugging'
+ if ((self.spec['type'] == 'loadable_module' or self._IsIosAppExtension())
+ and self._IsBundle()):
+ default_strip_style = 'non-global'
+ elif self.spec['type'] == 'executable':
+ default_strip_style = 'all'
+
+ strip_style = self._Settings().get('STRIP_STYLE', default_strip_style)
+ strip_flags = {
+ 'all': '',
+ 'non-global': '-x',
+ 'debugging': '-S',
+ }[strip_style]
+
+ explicit_strip_flags = self._Settings().get('STRIPFLAGS', '')
+ if explicit_strip_flags:
+ strip_flags += ' ' + _NormalizeEnvVarReferences(explicit_strip_flags)
+
+ if not quiet:
+ result.append('echo STRIP\\(%s\\)' % self.spec['target_name'])
+ result.append('strip %s %s' % (strip_flags, output_binary))
+
+ self.configname = None
+ return result
+
+ def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet):
+ """Returns a list of shell commands that contain the shell commands
+ neccessary to massage this target's debug information. These should be run
+ as postbuilds before the actual postbuilds run."""
+ self.configname = configname
+
+ # For static libraries, no dSYMs are created.
+ result = []
+ if (self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES') and
+ self._Test(
+ 'DEBUG_INFORMATION_FORMAT', 'dwarf-with-dsym', default='dwarf') and
+ self.spec['type'] != 'static_library'):
+ if not quiet:
+ result.append('echo DSYMUTIL\\(%s\\)' % self.spec['target_name'])
+ result.append('dsymutil %s -o %s' % (output_binary, output + '.dSYM'))
+
+ self.configname = None
+ return result
+
+ def _GetTargetPostbuilds(self, configname, output, output_binary,
+ quiet=False):
+ """Returns a list of shell commands that contain the shell commands
+ to run as postbuilds for this target, before the actual postbuilds."""
+ # dSYMs need to build before stripping happens.
+ return (
+ self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) +
+ self._GetStripPostbuilds(configname, output_binary, quiet))
+
+ def _GetIOSPostbuilds(self, configname, output_binary):
+ """Return a shell command to codesign the iOS output binary so it can
+ be deployed to a device. This should be run as the very last step of the
+ build."""
+ if not (self.isIOS and
+ (self.spec['type'] == 'executable' or self._IsXCTest()) or
+ self.IsIosFramework()):
+ return []
+
+ postbuilds = []
+ product_name = self.GetFullProductName()
+ settings = self.xcode_settings[configname]
+
+ # Xcode expects XCTests to be copied into the TEST_HOST dir.
+ if self._IsXCTest():
+ source = os.path.join("${BUILT_PRODUCTS_DIR}", product_name)
+ test_host = os.path.dirname(settings.get('TEST_HOST'));
+ xctest_destination = os.path.join(test_host, 'PlugIns', product_name)
+ postbuilds.extend(['ditto %s %s' % (source, xctest_destination)])
+
+ key = self._GetIOSCodeSignIdentityKey(settings)
+ if not key:
+ return postbuilds
+
+ # Warn for any unimplemented signing xcode keys.
+ unimpl = ['OTHER_CODE_SIGN_FLAGS']
+ unimpl = set(unimpl) & set(self.xcode_settings[configname].keys())
+ if unimpl:
+ print('Warning: Some codesign keys not implemented, ignoring: %s' % (
+ ', '.join(sorted(unimpl))))
+
+ if self._IsXCTest():
+ # For device xctests, Xcode copies two extra frameworks into $TEST_HOST.
+ test_host = os.path.dirname(settings.get('TEST_HOST'));
+ frameworks_dir = os.path.join(test_host, 'Frameworks')
+ platform_root = self._XcodePlatformPath(configname)
+ frameworks = \
+ ['Developer/Library/PrivateFrameworks/IDEBundleInjection.framework',
+ 'Developer/Library/Frameworks/XCTest.framework']
+ for framework in frameworks:
+ source = os.path.join(platform_root, framework)
+ destination = os.path.join(frameworks_dir, os.path.basename(framework))
+ postbuilds.extend(['ditto %s %s' % (source, destination)])
+
+ # Then re-sign everything with 'preserve=True'
+ postbuilds.extend(['%s code-sign-bundle "%s" "%s" "%s" "%s" %s' % (
+ os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
+ settings.get('CODE_SIGN_ENTITLEMENTS', ''),
+ settings.get('PROVISIONING_PROFILE', ''), destination, True)
+ ])
+ plugin_dir = os.path.join(test_host, 'PlugIns')
+ targets = [os.path.join(plugin_dir, product_name), test_host]
+ for target in targets:
+ postbuilds.extend(['%s code-sign-bundle "%s" "%s" "%s" "%s" %s' % (
+ os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
+ settings.get('CODE_SIGN_ENTITLEMENTS', ''),
+ settings.get('PROVISIONING_PROFILE', ''), target, True)
+ ])
+
+ postbuilds.extend(['%s code-sign-bundle "%s" "%s" "%s" "%s" %s' % (
+ os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
+ settings.get('CODE_SIGN_ENTITLEMENTS', ''),
+ settings.get('PROVISIONING_PROFILE', ''),
+ os.path.join("${BUILT_PRODUCTS_DIR}", product_name), False)
+ ])
+ return postbuilds
+
+ def _GetIOSCodeSignIdentityKey(self, settings):
+ identity = settings.get('CODE_SIGN_IDENTITY')
+ if not identity:
+ return None
+ if identity not in XcodeSettings._codesigning_key_cache:
+ output = subprocess.check_output(
+ ['security', 'find-identity', '-p', 'codesigning', '-v'])
+ for line in output.splitlines():
+ line_decoded = line.decode('utf-8')
+ if identity in line_decoded:
+ fingerprint = line_decoded.split()[1]
+ cache = XcodeSettings._codesigning_key_cache
+ assert identity not in cache or fingerprint == cache[identity], (
+ "Multiple codesigning fingerprints for identity: %s" % identity)
+ XcodeSettings._codesigning_key_cache[identity] = fingerprint
+ return XcodeSettings._codesigning_key_cache.get(identity, '')
+
+ def AddImplicitPostbuilds(self, configname, output, output_binary,
+ postbuilds=[], quiet=False):
+ """Returns a list of shell commands that should run before and after
+ |postbuilds|."""
+ assert output_binary is not None
+ pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet)
+ post = self._GetIOSPostbuilds(configname, output_binary)
+ return pre + postbuilds + post
+
+ def _AdjustLibrary(self, library, config_name=None):
+ if library.endswith('.framework'):
+ l = '-framework ' + os.path.splitext(os.path.basename(library))[0]
+ else:
+ m = self.library_re.match(library)
+ if m:
+ l = '-l' + m.group(1)
+ else:
+ l = library
+
+ sdk_root = self._SdkPath(config_name)
+ if not sdk_root:
+ sdk_root = ''
+ # Xcode 7 started shipping with ".tbd" (text based stubs) files instead of
+ # ".dylib" without providing a real support for them. What it does, for
+ # "/usr/lib" libraries, is do "-L/usr/lib -lname" which is dependent on the
+ # library order and cause collision when building Chrome.
+ #
+ # Instead substitude ".tbd" to ".dylib" in the generated project when the
+ # following conditions are both true:
+ # - library is referenced in the gyp file as "$(SDKROOT)/**/*.dylib",
+ # - the ".dylib" file does not exists but a ".tbd" file do.
+ library = l.replace('$(SDKROOT)', sdk_root)
+ if l.startswith('$(SDKROOT)'):
+ basename, ext = os.path.splitext(library)
+ if ext == '.dylib' and not os.path.exists(library):
+ tbd_library = basename + '.tbd'
+ if os.path.exists(tbd_library):
+ library = tbd_library
+ return library
+
+ def AdjustLibraries(self, libraries, config_name=None):
+ """Transforms entries like 'Cocoa.framework' in libraries into entries like
+ '-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
+ """
+ libraries = [self._AdjustLibrary(library, config_name)
+ for library in libraries]
+ return libraries
+
+ def _BuildMachineOSBuild(self):
+ return GetStdout(['sw_vers', '-buildVersion'])
+
+ def _XcodeIOSDeviceFamily(self, configname):
+ family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
+ return [int(x) for x in family.split(',')]
+
+ def GetExtraPlistItems(self, configname=None):
+ """Returns a dictionary with extra items to insert into Info.plist."""
+ if configname not in XcodeSettings._plist_cache:
+ cache = {}
+ cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild()
+
+ xcode, xcode_build = XcodeVersion()
+ cache['DTXcode'] = xcode
+ cache['DTXcodeBuild'] = xcode_build
+ compiler = self.xcode_settings[configname].get('GCC_VERSION')
+ if compiler is not None:
+ cache['DTCompiler'] = compiler
+
+ sdk_root = self._SdkRoot(configname)
+ if not sdk_root:
+ sdk_root = self._DefaultSdkRoot()
+ sdk_version = self._GetSdkVersionInfoItem(sdk_root, '--show-sdk-version')
+ cache['DTSDKName'] = sdk_root + (sdk_version or '')
+ if xcode >= '0720':
+ cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
+ sdk_root, '--show-sdk-build-version')
+ elif xcode >= '0430':
+ cache['DTSDKBuild'] = sdk_version
+ else:
+ cache['DTSDKBuild'] = cache['BuildMachineOSBuild']
+
+ if self.isIOS:
+ cache['MinimumOSVersion'] = self.xcode_settings[configname].get(
+ 'IPHONEOS_DEPLOYMENT_TARGET')
+ cache['DTPlatformName'] = sdk_root
+ cache['DTPlatformVersion'] = sdk_version
+
+ if configname.endswith("iphoneos"):
+ cache['CFBundleSupportedPlatforms'] = ['iPhoneOS']
+ cache['DTPlatformBuild'] = cache['DTSDKBuild']
+ else:
+ cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator']
+ # This is weird, but Xcode sets DTPlatformBuild to an empty field
+ # for simulator builds.
+ cache['DTPlatformBuild'] = ""
+ XcodeSettings._plist_cache[configname] = cache
+
+ # Include extra plist items that are per-target, not per global
+ # XcodeSettings.
+ items = dict(XcodeSettings._plist_cache[configname])
+ if self.isIOS:
+ items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname)
+ return items
+
+ def _DefaultSdkRoot(self):
+ """Returns the default SDKROOT to use.
+
+ Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
+ project, then the environment variable was empty. Starting with this
+ version, Xcode uses the name of the newest SDK installed.
+ """
+ xcode_version, xcode_build = XcodeVersion()
+ if xcode_version < '0500':
+ return ''
+ default_sdk_path = self._XcodeSdkPath('')
+ default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
+ if default_sdk_root:
+ return default_sdk_root
+ try:
+ all_sdks = GetStdout(['xcodebuild', '-showsdks'])
+ except:
+ # If xcodebuild fails, there will be no valid SDKs
+ return ''
+ for line in all_sdks.splitlines():
+ items = line.split()
+ if len(items) >= 3 and items[-2] == '-sdk':
+ sdk_root = items[-1]
+ sdk_path = self._XcodeSdkPath(sdk_root)
+ if sdk_path == default_sdk_path:
+ return sdk_root
+ return ''
+
+
+class MacPrefixHeader(object):
+ """A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
+
+ This feature consists of several pieces:
+ * If GCC_PREFIX_HEADER is present, all compilations in that project get an
+ additional |-include path_to_prefix_header| cflag.
+ * If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is
+ instead compiled, and all other compilations in the project get an
+ additional |-include path_to_compiled_header| instead.
+ + Compiled prefix headers have the extension gch. There is one gch file for
+ every language used in the project (c, cc, m, mm), since gch files for
+ different languages aren't compatible.
+ + gch files themselves are built with the target's normal cflags, but they
+ obviously don't get the |-include| flag. Instead, they need a -x flag that
+ describes their language.
+ + All o files in the target need to depend on the gch file, to make sure
+ it's built before any o file is built.
+
+ This class helps with some of these tasks, but it needs help from the build
+ system for writing dependencies to the gch files, for writing build commands
+ for the gch files, and for figuring out the location of the gch files.
+ """
+ def __init__(self, xcode_settings,
+ gyp_path_to_build_path, gyp_path_to_build_output):
+ """If xcode_settings is None, all methods on this class are no-ops.
+
+ Args:
+ gyp_path_to_build_path: A function that takes a gyp-relative path,
+ and returns a path relative to the build directory.
+ gyp_path_to_build_output: A function that takes a gyp-relative path and
+ a language code ('c', 'cc', 'm', or 'mm'), and that returns a path
+ to where the output of precompiling that path for that language
+ should be placed (without the trailing '.gch').
+ """
+ # This doesn't support per-configuration prefix headers. Good enough
+ # for now.
+ self.header = None
+ self.compile_headers = False
+ if xcode_settings:
+ self.header = xcode_settings.GetPerTargetSetting('GCC_PREFIX_HEADER')
+ self.compile_headers = xcode_settings.GetPerTargetSetting(
+ 'GCC_PRECOMPILE_PREFIX_HEADER', default='NO') != 'NO'
+ self.compiled_headers = {}
+ if self.header:
+ if self.compile_headers:
+ for lang in ['c', 'cc', 'm', 'mm']:
+ self.compiled_headers[lang] = gyp_path_to_build_output(
+ self.header, lang)
+ self.header = gyp_path_to_build_path(self.header)
+
+ def _CompiledHeader(self, lang, arch):
+ assert self.compile_headers
+ h = self.compiled_headers[lang]
+ if arch:
+ h += '.' + arch
+ return h
+
+ def GetInclude(self, lang, arch=None):
+ """Gets the cflags to include the prefix header for language |lang|."""
+ if self.compile_headers and lang in self.compiled_headers:
+ return '-include %s' % self._CompiledHeader(lang, arch)
+ elif self.header:
+ return '-include %s' % self.header
+ else:
+ return ''
+
+ def _Gch(self, lang, arch):
+ """Returns the actual file name of the prefix header for language |lang|."""
+ assert self.compile_headers
+ return self._CompiledHeader(lang, arch) + '.gch'
+
+ def GetObjDependencies(self, sources, objs, arch=None):
+ """Given a list of source files and the corresponding object files, returns
+ a list of (source, object, gch) tuples, where |gch| is the build-directory
+ relative path to the gch file each object file depends on. |compilable[i]|
+ has to be the source file belonging to |objs[i]|."""
+ if not self.header or not self.compile_headers:
+ return []
+
+ result = []
+ for source, obj in zip(sources, objs):
+ ext = os.path.splitext(source)[1]
+ lang = {
+ '.c': 'c',
+ '.cpp': 'cc', '.cc': 'cc', '.cxx': 'cc',
+ '.m': 'm',
+ '.mm': 'mm',
+ }.get(ext, None)
+ if lang:
+ result.append((source, obj, self._Gch(lang, arch)))
+ return result
+
+ def GetPchBuildCommands(self, arch=None):
+ """Returns [(path_to_gch, language_flag, language, header)].
+ |path_to_gch| and |header| are relative to the build directory.
+ """
+ if not self.header or not self.compile_headers:
+ return []
+ return [
+ (self._Gch('c', arch), '-x c-header', 'c', self.header),
+ (self._Gch('cc', arch), '-x c++-header', 'cc', self.header),
+ (self._Gch('m', arch), '-x objective-c-header', 'm', self.header),
+ (self._Gch('mm', arch), '-x objective-c++-header', 'mm', self.header),
+ ]
+
+
+def XcodeVersion():
+ """Returns a tuple of version and build version of installed Xcode."""
+ # `xcodebuild -version` output looks like
+ # Xcode 4.6.3
+ # Build version 4H1503
+ # or like
+ # Xcode 3.2.6
+ # Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
+ # BuildVersion: 10M2518
+ # Convert that to '0463', '4H1503'.
+ global XCODE_VERSION_CACHE
+ if XCODE_VERSION_CACHE:
+ return XCODE_VERSION_CACHE
+ try:
+ version_list = GetStdout(['xcodebuild', '-version']).splitlines()
+ # In some circumstances xcodebuild exits 0 but doesn't return
+ # the right results; for example, a user on 10.7 or 10.8 with
+ # a bogus path set via xcode-select
+ # In that case this may be a CLT-only install so fall back to
+ # checking that version.
+ if len(version_list) < 2:
+ raise GypError("xcodebuild returned unexpected results")
+ except:
+ version = CLTVersion()
+ if version:
+ version = re.match(r'(\d\.\d\.?\d*)', version).groups()[0]
+ else:
+ raise GypError("No Xcode or CLT version detected!")
+ # The CLT has no build information, so we return an empty string.
+ version_list = [version, '']
+ version = version_list[0]
+ build = version_list[-1]
+ # Be careful to convert "4.2" to "0420":
+ version = version.split()[-1].replace('.', '')
+ version = (version + '0' * (3 - len(version))).zfill(4)
+ if build:
+ build = build.split()[-1]
+ XCODE_VERSION_CACHE = (version, build)
+ return XCODE_VERSION_CACHE
+
+
+# This function ported from the logic in Homebrew's CLT version check
+def CLTVersion():
+ """Returns the version of command-line tools from pkgutil."""
+ # pkgutil output looks like
+ # package-id: com.apple.pkg.CLTools_Executables
+ # version: 5.0.1.0.1.1382131676
+ # volume: /
+ # location: /
+ # install-time: 1382544035
+ # groups: com.apple.FindSystemFiles.pkg-group com.apple.DevToolsBoth.pkg-group com.apple.DevToolsNonRelocatableShared.pkg-group
+ STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo"
+ FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
+ MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
+
+ regex = re.compile('version: (?P<version>.+)')
+ for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
+ try:
+ output = GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key])
+ return re.search(regex, output).groupdict()['version']
+ except:
+ continue
+
+
+def GetStdout(cmdlist):
+ """Returns the content of standard output returned by invoking |cmdlist|.
+ Raises |GypError| if the command return with a non-zero return code."""
+ job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
+ out = job.communicate()[0]
+ if job.returncode != 0:
+ sys.stderr.write(out + b'\n')
+ raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
+ return out.rstrip(b'\n').decode('utf-8')
+
+
+def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
+ """Merges the global xcode_settings dictionary into each configuration of the
+ target represented by spec. For keys that are both in the global and the local
+ xcode_settings dict, the local key gets precendence.
+ """
+ # The xcode generator special-cases global xcode_settings and does something
+ # that amounts to merging in the global xcode_settings into each local
+ # xcode_settings dict.
+ global_xcode_settings = global_dict.get('xcode_settings', {})
+ for config in spec['configurations'].values():
+ if 'xcode_settings' in config:
+ new_settings = global_xcode_settings.copy()
+ new_settings.update(config['xcode_settings'])
+ config['xcode_settings'] = new_settings
+
+
+def IsMacBundle(flavor, spec):
+ """Returns if |spec| should be treated as a bundle.
+
+ Bundles are directories with a certain subdirectory structure, instead of
+ just a single file. Bundle rules do not produce a binary but also package
+ resources into that directory."""
+ is_mac_bundle = int(spec.get('mac_xctest_bundle', 0)) != 0 or \
+ int(spec.get('mac_xcuitest_bundle', 0)) != 0 or \
+ (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac')
+
+ if is_mac_bundle:
+ assert spec['type'] != 'none', (
+ 'mac_bundle targets cannot have type none (target "%s")' %
+ spec['target_name'])
+ return is_mac_bundle
+
+
+def GetMacBundleResources(product_dir, xcode_settings, resources):
+ """Yields (output, resource) pairs for every resource in |resources|.
+ Only call this for mac bundle targets.
+
+ Args:
+ product_dir: Path to the directory containing the output bundle,
+ relative to the build directory.
+ xcode_settings: The XcodeSettings of the current target.
+ resources: A list of bundle resources, relative to the build directory.
+ """
+ dest = os.path.join(product_dir,
+ xcode_settings.GetBundleResourceFolder())
+ for res in resources:
+ output = dest
+
+ # The make generator doesn't support it, so forbid it everywhere
+ # to keep the generators more interchangable.
+ assert ' ' not in res, (
+ "Spaces in resource filenames not supported (%s)" % res)
+
+ # Split into (path,file).
+ res_parts = os.path.split(res)
+
+ # Now split the path into (prefix,maybe.lproj).
+ lproj_parts = os.path.split(res_parts[0])
+ # If the resource lives in a .lproj bundle, add that to the destination.
+ if lproj_parts[1].endswith('.lproj'):
+ output = os.path.join(output, lproj_parts[1])
+
+ output = os.path.join(output, res_parts[1])
+ # Compiled XIB files are referred to by .nib.
+ if output.endswith('.xib'):
+ output = os.path.splitext(output)[0] + '.nib'
+ # Compiled storyboard files are referred to by .storyboardc.
+ if output.endswith('.storyboard'):
+ output = os.path.splitext(output)[0] + '.storyboardc'
+
+ yield output, res
+
+
+def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
+ """Returns (info_plist, dest_plist, defines, extra_env), where:
+ * |info_plist| is the source plist path, relative to the
+ build directory,
+ * |dest_plist| is the destination plist path, relative to the
+ build directory,
+ * |defines| is a list of preprocessor defines (empty if the plist
+ shouldn't be preprocessed,
+ * |extra_env| is a dict of env variables that should be exported when
+ invoking |mac_tool copy-info-plist|.
+
+ Only call this for mac bundle targets.
+
+ Args:
+ product_dir: Path to the directory containing the output bundle,
+ relative to the build directory.
+ xcode_settings: The XcodeSettings of the current target.
+ gyp_to_build_path: A function that converts paths relative to the
+ current gyp file to paths relative to the build direcotry.
+ """
+ info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE')
+ if not info_plist:
+ return None, None, [], {}
+
+ # The make generator doesn't support it, so forbid it everywhere
+ # to keep the generators more interchangable.
+ assert ' ' not in info_plist, (
+ "Spaces in Info.plist filenames not supported (%s)" % info_plist)
+
+ info_plist = gyp_path_to_build_path(info_plist)
+
+ # If explicitly set to preprocess the plist, invoke the C preprocessor and
+ # specify any defines as -D flags.
+ if xcode_settings.GetPerTargetSetting(
+ 'INFOPLIST_PREPROCESS', default='NO') == 'YES':
+ # Create an intermediate file based on the path.
+ defines = shlex.split(xcode_settings.GetPerTargetSetting(
+ 'INFOPLIST_PREPROCESSOR_DEFINITIONS', default=''))
+ else:
+ defines = []
+
+ dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath())
+ extra_env = xcode_settings.GetPerTargetSettings()
+
+ return info_plist, dest_plist, defines, extra_env
+
+
+def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
+ additional_settings=None):
+ """Return the environment variables that Xcode would set. See
+ http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
+ for a full list.
+
+ Args:
+ xcode_settings: An XcodeSettings object. If this is None, this function
+ returns an empty dict.
+ built_products_dir: Absolute path to the built products dir.
+ srcroot: Absolute path to the source root.
+ configuration: The build configuration name.
+ additional_settings: An optional dict with more values to add to the
+ result.
+ """
+
+ if not xcode_settings: return {}
+
+ # This function is considered a friend of XcodeSettings, so let it reach into
+ # its implementation details.
+ spec = xcode_settings.spec
+
+ # These are filled in on an as-needed basis.
+ env = {
+ 'BUILT_FRAMEWORKS_DIR' : built_products_dir,
+ 'BUILT_PRODUCTS_DIR' : built_products_dir,
+ 'CONFIGURATION' : configuration,
+ 'PRODUCT_NAME' : xcode_settings.GetProductName(),
+ # See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME
+ 'SRCROOT' : srcroot,
+ 'SOURCE_ROOT': '${SRCROOT}',
+ # This is not true for static libraries, but currently the env is only
+ # written for bundles:
+ 'TARGET_BUILD_DIR' : built_products_dir,
+ 'TEMP_DIR' : '${TMPDIR}',
+ 'XCODE_VERSION_ACTUAL' : XcodeVersion()[0],
+ }
+ if xcode_settings.GetPerConfigSetting('SDKROOT', configuration):
+ env['SDKROOT'] = xcode_settings._SdkPath(configuration)
+ else:
+ env['SDKROOT'] = ''
+
+ if xcode_settings.mac_toolchain_dir:
+ env['DEVELOPER_DIR'] = xcode_settings.mac_toolchain_dir
+
+ if spec['type'] in (
+ 'executable', 'static_library', 'shared_library', 'loadable_module'):
+ env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName()
+ env['EXECUTABLE_PATH'] = xcode_settings.GetExecutablePath()
+ env['FULL_PRODUCT_NAME'] = xcode_settings.GetFullProductName()
+ mach_o_type = xcode_settings.GetMachOType()
+ if mach_o_type:
+ env['MACH_O_TYPE'] = mach_o_type
+ env['PRODUCT_TYPE'] = xcode_settings.GetProductType()
+ if xcode_settings._IsBundle():
+ # xcodeproj_file.py sets the same Xcode subfolder value for this as for
+ # FRAMEWORKS_FOLDER_PATH so Xcode builds will actually use FFP's value.
+ env['BUILT_FRAMEWORKS_DIR'] = \
+ os.path.join(built_products_dir + os.sep \
+ + xcode_settings.GetBundleFrameworksFolderPath())
+ env['CONTENTS_FOLDER_PATH'] = \
+ xcode_settings.GetBundleContentsFolderPath()
+ env['EXECUTABLE_FOLDER_PATH'] = \
+ xcode_settings.GetBundleExecutableFolderPath()
+ env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \
+ xcode_settings.GetBundleResourceFolder()
+ env['JAVA_FOLDER_PATH'] = xcode_settings.GetBundleJavaFolderPath()
+ env['FRAMEWORKS_FOLDER_PATH'] = \
+ xcode_settings.GetBundleFrameworksFolderPath()
+ env['SHARED_FRAMEWORKS_FOLDER_PATH'] = \
+ xcode_settings.GetBundleSharedFrameworksFolderPath()
+ env['SHARED_SUPPORT_FOLDER_PATH'] = \
+ xcode_settings.GetBundleSharedSupportFolderPath()
+ env['PLUGINS_FOLDER_PATH'] = xcode_settings.GetBundlePlugInsFolderPath()
+ env['XPCSERVICES_FOLDER_PATH'] = \
+ xcode_settings.GetBundleXPCServicesFolderPath()
+ env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath()
+ env['WRAPPER_NAME'] = xcode_settings.GetWrapperName()
+
+ install_name = xcode_settings.GetInstallName()
+ if install_name:
+ env['LD_DYLIB_INSTALL_NAME'] = install_name
+ install_name_base = xcode_settings.GetInstallNameBase()
+ if install_name_base:
+ env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
+ if XcodeVersion()[0] >= '0500' and not env.get('SDKROOT'):
+ sdk_root = xcode_settings._SdkRoot(configuration)
+ if not sdk_root:
+ sdk_root = xcode_settings._XcodeSdkPath('')
+ env['SDKROOT'] = sdk_root
+
+ if not additional_settings:
+ additional_settings = {}
+ else:
+ # Flatten lists to strings.
+ for k in additional_settings:
+ if not isinstance(additional_settings[k], str):
+ additional_settings[k] = ' '.join(additional_settings[k])
+ additional_settings.update(env)
+
+ for k in additional_settings:
+ additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k])
+
+ return additional_settings
+
+
+def _NormalizeEnvVarReferences(str):
+ """Takes a string containing variable references in the form ${FOO}, $(FOO),
+ or $FOO, and returns a string with all variable references in the form ${FOO}.
+ """
+ # $FOO -> ${FOO}
+ str = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'${\1}', str)
+
+ # $(FOO) -> ${FOO}
+ matches = re.findall(r'(\$\(([a-zA-Z0-9\-_]+)\))', str)
+ for match in matches:
+ to_replace, variable = match
+ assert '$(' not in match, '$($(FOO)) variables not supported: ' + match
+ str = str.replace(to_replace, '${' + variable + '}')
+
+ return str
+
+
+def ExpandEnvVars(string, expansions):
+ """Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the
+ expansions list. If the variable expands to something that references
+ another variable, this variable is expanded as well if it's in env --
+ until no variables present in env are left."""
+ for k, v in reversed(expansions):
+ string = string.replace('${' + k + '}', v)
+ string = string.replace('$(' + k + ')', v)
+ string = string.replace('$' + k, v)
+ return string
+
+
+def _TopologicallySortedEnvVarKeys(env):
+ """Takes a dict |env| whose values are strings that can refer to other keys,
+ for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
+ env such that key2 is after key1 in L if env[key2] refers to env[key1].
+
+ Throws an Exception in case of dependency cycles.
+ """
+ # Since environment variables can refer to other variables, the evaluation
+ # order is important. Below is the logic to compute the dependency graph
+ # and sort it.
+ regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}')
+ def GetEdges(node):
+ # Use a definition of edges such that user_of_variable -> used_varible.
+ # This happens to be easier in this case, since a variable's
+ # definition contains all variables it references in a single string.
+ # We can then reverse the result of the topological sort at the end.
+ # Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
+ matches = set([v for v in regex.findall(env[node]) if v in env])
+ for dependee in matches:
+ assert '${' not in dependee, 'Nested variables not supported: ' + dependee
+ return matches
+
+ try:
+ # Topologically sort, and then reverse, because we used an edge definition
+ # that's inverted from the expected result of this function (see comment
+ # above).
+ order = gyp.common.TopologicallySorted(env.keys(), GetEdges)
+ order.reverse()
+ return order
+ except gyp.common.CycleError as e:
+ raise GypError(
+ 'Xcode environment variables are cyclically dependent: ' + str(e.nodes))
+
+
+def GetSortedXcodeEnv(xcode_settings, built_products_dir, srcroot,
+ configuration, additional_settings=None):
+ env = _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
+ additional_settings)
+ return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)]
+
+
+def GetSpecPostbuildCommands(spec, quiet=False):
+ """Returns the list of postbuilds explicitly defined on |spec|, in a form
+ executable by a shell."""
+ postbuilds = []
+ for postbuild in spec.get('postbuilds', []):
+ if not quiet:
+ postbuilds.append('echo POSTBUILD\\(%s\\) %s' % (
+ spec['target_name'], postbuild['postbuild_name']))
+ postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action']))
+ return postbuilds
+
+
+def _HasIOSTarget(targets):
+ """Returns true if any target contains the iOS specific key
+ IPHONEOS_DEPLOYMENT_TARGET."""
+ for target_dict in targets.values():
+ for config in target_dict['configurations'].values():
+ if config.get('xcode_settings', {}).get('IPHONEOS_DEPLOYMENT_TARGET'):
+ return True
+ return False
+
+
+def _AddIOSDeviceConfigurations(targets):
+ """Clone all targets and append -iphoneos to the name. Configure these targets
+ to build for iOS devices and use correct architectures for those builds."""
+ for target_dict in targets.values():
+ toolset = target_dict['toolset']
+ configs = target_dict['configurations']
+
+ for config_name, simulator_config_dict in dict(configs).items():
+ iphoneos_config_dict = copy.deepcopy(simulator_config_dict)
+ configs[config_name + '-iphoneos'] = iphoneos_config_dict
+ configs[config_name + '-iphonesimulator'] = simulator_config_dict
+ if toolset == 'target':
+ simulator_config_dict['xcode_settings']['SDKROOT'] = 'iphonesimulator'
+ iphoneos_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
+ return targets
+
+def CloneConfigurationForDeviceAndEmulator(target_dicts):
+ """If |target_dicts| contains any iOS targets, automatically create -iphoneos
+ targets for iOS device builds."""
+ if _HasIOSTarget(target_dicts):
+ return _AddIOSDeviceConfigurations(target_dicts)
+ return target_dicts
diff --git a/third_party/python/gyp/pylib/gyp/xcode_ninja.py b/third_party/python/gyp/pylib/gyp/xcode_ninja.py
new file mode 100644
index 0000000000..1d71b8c5f8
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/xcode_ninja.py
@@ -0,0 +1,289 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Xcode-ninja wrapper project file generator.
+
+This updates the data structures passed to the Xcode gyp generator to build
+with ninja instead. The Xcode project itself is transformed into a list of
+executable targets, each with a build step to build with ninja, and a target
+with every source and resource file. This appears to sidestep some of the
+major performance headaches experienced using complex projects and large number
+of targets within Xcode.
+"""
+
+import errno
+import gyp.generator.ninja
+import os
+import re
+import xml.sax.saxutils
+
+
+def _WriteWorkspace(main_gyp, sources_gyp, params):
+ """ Create a workspace to wrap main and sources gyp paths. """
+ (build_file_root, build_file_ext) = os.path.splitext(main_gyp)
+ workspace_path = build_file_root + '.xcworkspace'
+ options = params['options']
+ if options.generator_output:
+ workspace_path = os.path.join(options.generator_output, workspace_path)
+ try:
+ os.makedirs(workspace_path)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ output_string = '<?xml version="1.0" encoding="UTF-8"?>\n' + \
+ '<Workspace version = "1.0">\n'
+ for gyp_name in [main_gyp, sources_gyp]:
+ name = os.path.splitext(os.path.basename(gyp_name))[0] + '.xcodeproj'
+ name = xml.sax.saxutils.quoteattr("group:" + name)
+ output_string += ' <FileRef location = %s></FileRef>\n' % name
+ output_string += '</Workspace>\n'
+
+ workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata")
+
+ try:
+ with open(workspace_file, 'r') as input_file:
+ input_string = input_file.read()
+ if input_string == output_string:
+ return
+ except IOError:
+ # Ignore errors if the file doesn't exist.
+ pass
+
+ with open(workspace_file, 'w') as output_file:
+ output_file.write(output_string)
+
+def _TargetFromSpec(old_spec, params):
+ """ Create fake target for xcode-ninja wrapper. """
+ # Determine ninja top level build dir (e.g. /path/to/out).
+ ninja_toplevel = None
+ jobs = 0
+ if params:
+ options = params['options']
+ ninja_toplevel = \
+ os.path.join(options.toplevel_dir,
+ gyp.generator.ninja.ComputeOutputDir(params))
+ jobs = params.get('generator_flags', {}).get('xcode_ninja_jobs', 0)
+
+ target_name = old_spec.get('target_name')
+ product_name = old_spec.get('product_name', target_name)
+ product_extension = old_spec.get('product_extension')
+
+ ninja_target = {}
+ ninja_target['target_name'] = target_name
+ ninja_target['product_name'] = product_name
+ if product_extension:
+ ninja_target['product_extension'] = product_extension
+ ninja_target['toolset'] = old_spec.get('toolset')
+ ninja_target['default_configuration'] = old_spec.get('default_configuration')
+ ninja_target['configurations'] = {}
+
+ # Tell Xcode to look in |ninja_toplevel| for build products.
+ new_xcode_settings = {}
+ if ninja_toplevel:
+ new_xcode_settings['CONFIGURATION_BUILD_DIR'] = \
+ "%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel
+
+ if 'configurations' in old_spec:
+ for config in old_spec['configurations'].keys():
+ old_xcode_settings = \
+ old_spec['configurations'][config].get('xcode_settings', {})
+ if 'IPHONEOS_DEPLOYMENT_TARGET' in old_xcode_settings:
+ new_xcode_settings['CODE_SIGNING_REQUIRED'] = "NO"
+ new_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET'] = \
+ old_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET']
+ for key in ['BUNDLE_LOADER', 'TEST_HOST']:
+ if key in old_xcode_settings:
+ new_xcode_settings[key] = old_xcode_settings[key]
+
+ ninja_target['configurations'][config] = {}
+ ninja_target['configurations'][config]['xcode_settings'] = \
+ new_xcode_settings
+
+ ninja_target['mac_bundle'] = old_spec.get('mac_bundle', 0)
+ ninja_target['mac_xctest_bundle'] = old_spec.get('mac_xctest_bundle', 0)
+ ninja_target['ios_app_extension'] = old_spec.get('ios_app_extension', 0)
+ ninja_target['ios_watchkit_extension'] = \
+ old_spec.get('ios_watchkit_extension', 0)
+ ninja_target['ios_watchkit_app'] = old_spec.get('ios_watchkit_app', 0)
+ ninja_target['type'] = old_spec['type']
+ if ninja_toplevel:
+ ninja_target['actions'] = [
+ {
+ 'action_name': 'Compile and copy %s via ninja' % target_name,
+ 'inputs': [],
+ 'outputs': [],
+ 'action': [
+ 'env',
+ 'PATH=%s' % os.environ['PATH'],
+ 'ninja',
+ '-C',
+ new_xcode_settings['CONFIGURATION_BUILD_DIR'],
+ target_name,
+ ],
+ 'message': 'Compile and copy %s via ninja' % target_name,
+ },
+ ]
+ if jobs > 0:
+ ninja_target['actions'][0]['action'].extend(('-j', jobs))
+ return ninja_target
+
+def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
+ """Limit targets for Xcode wrapper.
+
+ Xcode sometimes performs poorly with too many targets, so only include
+ proper executable targets, with filters to customize.
+ Arguments:
+ target_extras: Regular expression to always add, matching any target.
+ executable_target_pattern: Regular expression limiting executable targets.
+ spec: Specifications for target.
+ """
+ target_name = spec.get('target_name')
+ # Always include targets matching target_extras.
+ if target_extras is not None and re.search(target_extras, target_name):
+ return True
+
+ # Otherwise just show executable targets and xc_tests.
+ if (int(spec.get('mac_xctest_bundle', 0)) != 0 or
+ (spec.get('type', '') == 'executable' and
+ spec.get('product_extension', '') != 'bundle')):
+
+ # If there is a filter and the target does not match, exclude the target.
+ if executable_target_pattern is not None:
+ if not re.search(executable_target_pattern, target_name):
+ return False
+ return True
+ return False
+
+def CreateWrapper(target_list, target_dicts, data, params):
+ """Initialize targets for the ninja wrapper.
+
+ This sets up the necessary variables in the targets to generate Xcode projects
+ that use ninja as an external builder.
+ Arguments:
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ data: Dict of flattened build files keyed on gyp path.
+ params: Dict of global options for gyp.
+ """
+ orig_gyp = params['build_files'][0]
+ for gyp_name, gyp_dict in data.items():
+ if gyp_name == orig_gyp:
+ depth = gyp_dict['_DEPTH']
+
+ # Check for custom main gyp name, otherwise use the default CHROMIUM_GYP_FILE
+ # and prepend .ninja before the .gyp extension.
+ generator_flags = params.get('generator_flags', {})
+ main_gyp = generator_flags.get('xcode_ninja_main_gyp', None)
+ if main_gyp is None:
+ (build_file_root, build_file_ext) = os.path.splitext(orig_gyp)
+ main_gyp = build_file_root + ".ninja" + build_file_ext
+
+ # Create new |target_list|, |target_dicts| and |data| data structures.
+ new_target_list = []
+ new_target_dicts = {}
+ new_data = {}
+
+ # Set base keys needed for |data|.
+ new_data[main_gyp] = {}
+ new_data[main_gyp]['included_files'] = []
+ new_data[main_gyp]['targets'] = []
+ new_data[main_gyp]['xcode_settings'] = \
+ data[orig_gyp].get('xcode_settings', {})
+
+ # Normally the xcode-ninja generator includes only valid executable targets.
+ # If |xcode_ninja_executable_target_pattern| is set, that list is reduced to
+ # executable targets that match the pattern. (Default all)
+ executable_target_pattern = \
+ generator_flags.get('xcode_ninja_executable_target_pattern', None)
+
+ # For including other non-executable targets, add the matching target name
+ # to the |xcode_ninja_target_pattern| regular expression. (Default none)
+ target_extras = generator_flags.get('xcode_ninja_target_pattern', None)
+
+ for old_qualified_target in target_list:
+ spec = target_dicts[old_qualified_target]
+ if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
+ # Add to new_target_list.
+ target_name = spec.get('target_name')
+ new_target_name = '%s:%s#target' % (main_gyp, target_name)
+ new_target_list.append(new_target_name)
+
+ # Add to new_target_dicts.
+ new_target_dicts[new_target_name] = _TargetFromSpec(spec, params)
+
+ # Add to new_data.
+ for old_target in data[old_qualified_target.split(':')[0]]['targets']:
+ if old_target['target_name'] == target_name:
+ new_data_target = {}
+ new_data_target['target_name'] = old_target['target_name']
+ new_data_target['toolset'] = old_target['toolset']
+ new_data[main_gyp]['targets'].append(new_data_target)
+
+ # Create sources target.
+ sources_target_name = 'sources_for_indexing'
+ sources_target = _TargetFromSpec(
+ { 'target_name' : sources_target_name,
+ 'toolset': 'target',
+ 'default_configuration': 'Default',
+ 'mac_bundle': '0',
+ 'type': 'executable'
+ }, None)
+
+ # Tell Xcode to look everywhere for headers.
+ sources_target['configurations'] = {'Default': { 'include_dirs': [ depth ] } }
+
+ # Put excluded files into the sources target so they can be opened in Xcode.
+ skip_excluded_files = \
+ not generator_flags.get('xcode_ninja_list_excluded_files', True)
+
+ sources = []
+ for target, target_dict in target_dicts.items():
+ base = os.path.dirname(target)
+ files = target_dict.get('sources', []) + \
+ target_dict.get('mac_bundle_resources', [])
+
+ if not skip_excluded_files:
+ files.extend(target_dict.get('sources_excluded', []) +
+ target_dict.get('mac_bundle_resources_excluded', []))
+
+ for action in target_dict.get('actions', []):
+ files.extend(action.get('inputs', []))
+
+ if not skip_excluded_files:
+ files.extend(action.get('inputs_excluded', []))
+
+ # Remove files starting with $. These are mostly intermediate files for the
+ # build system.
+ files = [ file for file in files if not file.startswith('$')]
+
+ # Make sources relative to root build file.
+ relative_path = os.path.dirname(main_gyp)
+ sources += [ os.path.relpath(os.path.join(base, file), relative_path)
+ for file in files ]
+
+ sources_target['sources'] = sorted(set(sources))
+
+ # Put sources_to_index in it's own gyp.
+ sources_gyp = \
+ os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp")
+ fully_qualified_target_name = \
+ '%s:%s#target' % (sources_gyp, sources_target_name)
+
+ # Add to new_target_list, new_target_dicts and new_data.
+ new_target_list.append(fully_qualified_target_name)
+ new_target_dicts[fully_qualified_target_name] = sources_target
+ new_data_target = {}
+ new_data_target['target_name'] = sources_target['target_name']
+ new_data_target['_DEPTH'] = depth
+ new_data_target['toolset'] = "target"
+ new_data[sources_gyp] = {}
+ new_data[sources_gyp]['targets'] = []
+ new_data[sources_gyp]['included_files'] = []
+ new_data[sources_gyp]['xcode_settings'] = \
+ data[orig_gyp].get('xcode_settings', {})
+ new_data[sources_gyp]['targets'].append(new_data_target)
+
+ # Write workspace to file.
+ _WriteWorkspace(main_gyp, sources_gyp, params)
+ return (new_target_list, new_target_dicts, new_data)
diff --git a/third_party/python/gyp/pylib/gyp/xcodeproj_file.py b/third_party/python/gyp/pylib/gyp/xcodeproj_file.py
new file mode 100644
index 0000000000..19edcb07fb
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/xcodeproj_file.py
@@ -0,0 +1,2995 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Xcode project file generator.
+
+This module is both an Xcode project file generator and a documentation of the
+Xcode project file format. Knowledge of the project file format was gained
+based on extensive experience with Xcode, and by making changes to projects in
+Xcode.app and observing the resultant changes in the associated project files.
+
+XCODE PROJECT FILES
+
+The generator targets the file format as written by Xcode 3.2 (specifically,
+3.2.6), but past experience has taught that the format has not changed
+significantly in the past several years, and future versions of Xcode are able
+to read older project files.
+
+Xcode project files are "bundled": the project "file" from an end-user's
+perspective is actually a directory with an ".xcodeproj" extension. The
+project file from this module's perspective is actually a file inside this
+directory, always named "project.pbxproj". This file contains a complete
+description of the project and is all that is needed to use the xcodeproj.
+Other files contained in the xcodeproj directory are simply used to store
+per-user settings, such as the state of various UI elements in the Xcode
+application.
+
+The project.pbxproj file is a property list, stored in a format almost
+identical to the NeXTstep property list format. The file is able to carry
+Unicode data, and is encoded in UTF-8. The root element in the property list
+is a dictionary that contains several properties of minimal interest, and two
+properties of immense interest. The most important property is a dictionary
+named "objects". The entire structure of the project is represented by the
+children of this property. The objects dictionary is keyed by unique 96-bit
+values represented by 24 uppercase hexadecimal characters. Each value in the
+objects dictionary is itself a dictionary, describing an individual object.
+
+Each object in the dictionary is a member of a class, which is identified by
+the "isa" property of each object. A variety of classes are represented in a
+project file. Objects can refer to other objects by ID, using the 24-character
+hexadecimal object key. A project's objects form a tree, with a root object
+of class PBXProject at the root. As an example, the PBXProject object serves
+as parent to an XCConfigurationList object defining the build configurations
+used in the project, a PBXGroup object serving as a container for all files
+referenced in the project, and a list of target objects, each of which defines
+a target in the project. There are several different types of target object,
+such as PBXNativeTarget and PBXAggregateTarget. In this module, this
+relationship is expressed by having each target type derive from an abstract
+base named XCTarget.
+
+The project.pbxproj file's root dictionary also contains a property, sibling to
+the "objects" dictionary, named "rootObject". The value of rootObject is a
+24-character object key referring to the root PBXProject object in the
+objects dictionary.
+
+In Xcode, every file used as input to a target or produced as a final product
+of a target must appear somewhere in the hierarchy rooted at the PBXGroup
+object referenced by the PBXProject's mainGroup property. A PBXGroup is
+generally represented as a folder in the Xcode application. PBXGroups can
+contain other PBXGroups as well as PBXFileReferences, which are pointers to
+actual files.
+
+Each XCTarget contains a list of build phases, represented in this module by
+the abstract base XCBuildPhase. Examples of concrete XCBuildPhase derivations
+are PBXSourcesBuildPhase and PBXFrameworksBuildPhase, which correspond to the
+"Compile Sources" and "Link Binary With Libraries" phases displayed in the
+Xcode application. Files used as input to these phases (for example, source
+files in the former case and libraries and frameworks in the latter) are
+represented by PBXBuildFile objects, referenced by elements of "files" lists
+in XCTarget objects. Each PBXBuildFile object refers to a PBXBuildFile
+object as a "weak" reference: it does not "own" the PBXBuildFile, which is
+owned by the root object's mainGroup or a descendant group. In most cases, the
+layer of indirection between an XCBuildPhase and a PBXFileReference via a
+PBXBuildFile appears extraneous, but there's actually one reason for this:
+file-specific compiler flags are added to the PBXBuildFile object so as to
+allow a single file to be a member of multiple targets while having distinct
+compiler flags for each. These flags can be modified in the Xcode applciation
+in the "Build" tab of a File Info window.
+
+When a project is open in the Xcode application, Xcode will rewrite it. As
+such, this module is careful to adhere to the formatting used by Xcode, to
+avoid insignificant changes appearing in the file when it is used in the
+Xcode application. This will keep version control repositories happy, and
+makes it possible to compare a project file used in Xcode to one generated by
+this module to determine if any significant changes were made in the
+application.
+
+Xcode has its own way of assigning 24-character identifiers to each object,
+which is not duplicated here. Because the identifier only is only generated
+once, when an object is created, and is then left unchanged, there is no need
+to attempt to duplicate Xcode's behavior in this area. The generator is free
+to select any identifier, even at random, to refer to the objects it creates,
+and Xcode will retain those identifiers and use them when subsequently
+rewriting the project file. However, the generator would choose new random
+identifiers each time the project files are generated, leading to difficulties
+comparing "used" project files to "pristine" ones produced by this module,
+and causing the appearance of changes as every object identifier is changed
+when updated projects are checked in to a version control repository. To
+mitigate this problem, this module chooses identifiers in a more deterministic
+way, by hashing a description of each object as well as its parent and ancestor
+objects. This strategy should result in minimal "shift" in IDs as successive
+generations of project files are produced.
+
+THIS MODULE
+
+This module introduces several classes, all derived from the XCObject class.
+Nearly all of the "brains" are built into the XCObject class, which understands
+how to create and modify objects, maintain the proper tree structure, compute
+identifiers, and print objects. For the most part, classes derived from
+XCObject need only provide a _schema class object, a dictionary that
+expresses what properties objects of the class may contain.
+
+Given this structure, it's possible to build a minimal project file by creating
+objects of the appropriate types and making the proper connections:
+
+ config_list = XCConfigurationList()
+ group = PBXGroup()
+ project = PBXProject({'buildConfigurationList': config_list,
+ 'mainGroup': group})
+
+With the project object set up, it can be added to an XCProjectFile object.
+XCProjectFile is a pseudo-class in the sense that it is a concrete XCObject
+subclass that does not actually correspond to a class type found in a project
+file. Rather, it is used to represent the project file's root dictionary.
+Printing an XCProjectFile will print the entire project file, including the
+full "objects" dictionary.
+
+ project_file = XCProjectFile({'rootObject': project})
+ project_file.ComputeIDs()
+ project_file.Print()
+
+Xcode project files are always encoded in UTF-8. This module will accept
+strings of either the str class or the unicode class. Strings of class str
+are assumed to already be encoded in UTF-8. Obviously, if you're just using
+ASCII, you won't encounter difficulties because ASCII is a UTF-8 subset.
+Strings of class unicode are handled properly and encoded in UTF-8 when
+a project file is output.
+"""
+
+import functools
+import gyp.common
+import posixpath
+import re
+import struct
+import sys
+
+# hashlib is supplied as of Python 2.5 as the replacement interface for sha
+# and other secure hashes. In 2.6, sha is deprecated. Import hashlib if
+# available, avoiding a deprecation warning under 2.6. Import sha otherwise,
+# preserving 2.4 compatibility.
+try:
+ import hashlib
+ _new_sha1 = hashlib.sha1
+except ImportError:
+ import sha
+ _new_sha1 = sha.new
+
+try:
+ # basestring was removed in python3.
+ basestring
+except NameError:
+ basestring = str
+
+try:
+ # cmp was removed in python3.
+ cmp
+except NameError:
+ def cmp(a, b):
+ return (a > b) - (a < b)
+
+# See XCObject._EncodeString. This pattern is used to determine when a string
+# can be printed unquoted. Strings that match this pattern may be printed
+# unquoted. Strings that do not match must be quoted and may be further
+# transformed to be properly encoded. Note that this expression matches the
+# characters listed with "+", for 1 or more occurrences: if a string is empty,
+# it must not match this pattern, because it needs to be encoded as "".
+_unquoted = re.compile('^[A-Za-z0-9$./_]+$')
+
+# Strings that match this pattern are quoted regardless of what _unquoted says.
+# Oddly, Xcode will quote any string with a run of three or more underscores.
+_quoted = re.compile('___')
+
+# This pattern should match any character that needs to be escaped by
+# XCObject._EncodeString. See that function.
+_escaped = re.compile('[\\\\"]|[\x00-\x1f]')
+
+
+# Used by SourceTreeAndPathFromPath
+_path_leading_variable = re.compile(r'^\$\((.*?)\)(/(.*))?$')
+
+def SourceTreeAndPathFromPath(input_path):
+ """Given input_path, returns a tuple with sourceTree and path values.
+
+ Examples:
+ input_path (source_tree, output_path)
+ '$(VAR)/path' ('VAR', 'path')
+ '$(VAR)' ('VAR', None)
+ 'path' (None, 'path')
+ """
+
+ source_group_match = _path_leading_variable.match(input_path)
+ if source_group_match:
+ source_tree = source_group_match.group(1)
+ output_path = source_group_match.group(3) # This may be None.
+ else:
+ source_tree = None
+ output_path = input_path
+
+ return (source_tree, output_path)
+
+def ConvertVariablesToShellSyntax(input_string):
+ return re.sub(r'\$\((.*?)\)', '${\\1}', input_string)
+
+class XCObject(object):
+ """The abstract base of all class types used in Xcode project files.
+
+ Class variables:
+ _schema: A dictionary defining the properties of this class. The keys to
+ _schema are string property keys as used in project files. Values
+ are a list of four or five elements:
+ [ is_list, property_type, is_strong, is_required, default ]
+ is_list: True if the property described is a list, as opposed
+ to a single element.
+ property_type: The type to use as the value of the property,
+ or if is_list is True, the type to use for each
+ element of the value's list. property_type must
+ be an XCObject subclass, or one of the built-in
+ types str, int, or dict.
+ is_strong: If property_type is an XCObject subclass, is_strong
+ is True to assert that this class "owns," or serves
+ as parent, to the property value (or, if is_list is
+ True, values). is_strong must be False if
+ property_type is not an XCObject subclass.
+ is_required: True if the property is required for the class.
+ Note that is_required being True does not preclude
+ an empty string ("", in the case of property_type
+ str) or list ([], in the case of is_list True) from
+ being set for the property.
+ default: Optional. If is_requried is True, default may be set
+ to provide a default value for objects that do not supply
+ their own value. If is_required is True and default
+ is not provided, users of the class must supply their own
+ value for the property.
+ Note that although the values of the array are expressed in
+ boolean terms, subclasses provide values as integers to conserve
+ horizontal space.
+ _should_print_single_line: False in XCObject. Subclasses whose objects
+ should be written to the project file in the
+ alternate single-line format, such as
+ PBXFileReference and PBXBuildFile, should
+ set this to True.
+ _encode_transforms: Used by _EncodeString to encode unprintable characters.
+ The index into this list is the ordinal of the
+ character to transform; each value is a string
+ used to represent the character in the output. XCObject
+ provides an _encode_transforms list suitable for most
+ XCObject subclasses.
+ _alternate_encode_transforms: Provided for subclasses that wish to use
+ the alternate encoding rules. Xcode seems
+ to use these rules when printing objects in
+ single-line format. Subclasses that desire
+ this behavior should set _encode_transforms
+ to _alternate_encode_transforms.
+ _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs
+ to construct this object's ID. Most classes that need custom
+ hashing behavior should do it by overriding Hashables,
+ but in some cases an object's parent may wish to push a
+ hashable value into its child, and it can do so by appending
+ to _hashables.
+ Attributes:
+ id: The object's identifier, a 24-character uppercase hexadecimal string.
+ Usually, objects being created should not set id until the entire
+ project file structure is built. At that point, UpdateIDs() should
+ be called on the root object to assign deterministic values for id to
+ each object in the tree.
+ parent: The object's parent. This is set by a parent XCObject when a child
+ object is added to it.
+ _properties: The object's property dictionary. An object's properties are
+ described by its class' _schema variable.
+ """
+
+ _schema = {}
+ _should_print_single_line = False
+
+ # See _EncodeString.
+ _encode_transforms = []
+ i = 0
+ while i < ord(' '):
+ _encode_transforms.append('\\U%04x' % i)
+ i = i + 1
+ _encode_transforms[7] = '\\a'
+ _encode_transforms[8] = '\\b'
+ _encode_transforms[9] = '\\t'
+ _encode_transforms[10] = '\\n'
+ _encode_transforms[11] = '\\v'
+ _encode_transforms[12] = '\\f'
+ _encode_transforms[13] = '\\n'
+
+ _alternate_encode_transforms = list(_encode_transforms)
+ _alternate_encode_transforms[9] = chr(9)
+ _alternate_encode_transforms[10] = chr(10)
+ _alternate_encode_transforms[11] = chr(11)
+
+ def __init__(self, properties=None, id=None, parent=None):
+ self.id = id
+ self.parent = parent
+ self._properties = {}
+ self._hashables = []
+ self._SetDefaultsFromSchema()
+ self.UpdateProperties(properties)
+
+ def __repr__(self):
+ try:
+ name = self.Name()
+ except NotImplementedError:
+ return '<%s at 0x%x>' % (self.__class__.__name__, id(self))
+ return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
+
+ def Copy(self):
+ """Make a copy of this object.
+
+ The new object will have its own copy of lists and dicts. Any XCObject
+ objects owned by this object (marked "strong") will be copied in the
+ new object, even those found in lists. If this object has any weak
+ references to other XCObjects, the same references are added to the new
+ object without making a copy.
+ """
+
+ that = self.__class__(id=self.id, parent=self.parent)
+ for key, value in self._properties.items():
+ is_strong = self._schema[key][2]
+
+ if isinstance(value, XCObject):
+ if is_strong:
+ new_value = value.Copy()
+ new_value.parent = that
+ that._properties[key] = new_value
+ else:
+ that._properties[key] = value
+ elif isinstance(value, basestring) or isinstance(value, int):
+ that._properties[key] = value
+ elif isinstance(value, list):
+ if is_strong:
+ # If is_strong is True, each element is an XCObject, so it's safe to
+ # call Copy.
+ that._properties[key] = []
+ for item in value:
+ new_item = item.Copy()
+ new_item.parent = that
+ that._properties[key].append(new_item)
+ else:
+ that._properties[key] = value[:]
+ elif isinstance(value, dict):
+ # dicts are never strong.
+ if is_strong:
+ raise TypeError('Strong dict for key ' + key + ' in ' + \
+ self.__class__.__name__)
+ else:
+ that._properties[key] = value.copy()
+ else:
+ raise TypeError('Unexpected type ' + value.__class__.__name__ + \
+ ' for key ' + key + ' in ' + self.__class__.__name__)
+
+ return that
+
+ def Name(self):
+ """Return the name corresponding to an object.
+
+ Not all objects necessarily need to be nameable, and not all that do have
+ a "name" property. Override as needed.
+ """
+
+ # If the schema indicates that "name" is required, try to access the
+ # property even if it doesn't exist. This will result in a KeyError
+ # being raised for the property that should be present, which seems more
+ # appropriate than NotImplementedError in this case.
+ if 'name' in self._properties or \
+ ('name' in self._schema and self._schema['name'][3]):
+ return self._properties['name']
+
+ raise NotImplementedError(self.__class__.__name__ + ' must implement Name')
+
+ def Comment(self):
+ """Return a comment string for the object.
+
+ Most objects just use their name as the comment, but PBXProject uses
+ different values.
+
+ The returned comment is not escaped and does not have any comment marker
+ strings applied to it.
+ """
+
+ return self.Name()
+
+ def Hashables(self):
+ hashables = [self.__class__.__name__]
+
+ name = self.Name()
+ if name != None:
+ hashables.append(name)
+
+ hashables.extend(self._hashables)
+
+ return hashables
+
+ def HashablesForChild(self):
+ return None
+
+ def ComputeIDs(self, recursive=True, overwrite=True, seed_hash=None):
+ """Set "id" properties deterministically.
+
+ An object's "id" property is set based on a hash of its class type and
+ name, as well as the class type and name of all ancestor objects. As
+ such, it is only advisable to call ComputeIDs once an entire project file
+ tree is built.
+
+ If recursive is True, recurse into all descendant objects and update their
+ hashes.
+
+ If overwrite is True, any existing value set in the "id" property will be
+ replaced.
+ """
+
+ def _HashUpdate(hash, data):
+ """Update hash with data's length and contents.
+
+ If the hash were updated only with the value of data, it would be
+ possible for clowns to induce collisions by manipulating the names of
+ their objects. By adding the length, it's exceedingly less likely that
+ ID collisions will be encountered, intentionally or not.
+ """
+
+ hash.update(struct.pack('>i', len(data)))
+ hash.update(data.encode('utf-8'))
+
+ if seed_hash is None:
+ seed_hash = _new_sha1()
+
+ hash = seed_hash.copy()
+
+ hashables = self.Hashables()
+ assert len(hashables) > 0
+ for hashable in hashables:
+ _HashUpdate(hash, hashable)
+
+ if recursive:
+ hashables_for_child = self.HashablesForChild()
+ if hashables_for_child is None:
+ child_hash = hash
+ else:
+ assert len(hashables_for_child) > 0
+ child_hash = seed_hash.copy()
+ for hashable in hashables_for_child:
+ _HashUpdate(child_hash, hashable)
+
+ for child in self.Children():
+ child.ComputeIDs(recursive, overwrite, child_hash)
+
+ if overwrite or self.id is None:
+ # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is
+ # is 160 bits. Instead of throwing out 64 bits of the digest, xor them
+ # into the portion that gets used.
+ assert hash.digest_size % 4 == 0
+ digest_int_count = hash.digest_size // 4
+ digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest())
+ id_ints = [0, 0, 0]
+ for index in range(0, digest_int_count):
+ id_ints[index % 3] ^= digest_ints[index]
+ self.id = '%08X%08X%08X' % tuple(id_ints)
+
+ def EnsureNoIDCollisions(self):
+ """Verifies that no two objects have the same ID. Checks all descendants.
+ """
+
+ ids = {}
+ descendants = self.Descendants()
+ for descendant in descendants:
+ if descendant.id in ids:
+ other = ids[descendant.id]
+ raise KeyError(
+ 'Duplicate ID %s, objects "%s" and "%s" in "%s"' % \
+ (descendant.id, str(descendant._properties),
+ str(other._properties), self._properties['rootObject'].Name()))
+ ids[descendant.id] = descendant
+
+ def Children(self):
+ """Returns a list of all of this object's owned (strong) children."""
+
+ children = []
+ for property, attributes in self._schema.items():
+ (is_list, property_type, is_strong) = attributes[0:3]
+ if is_strong and property in self._properties:
+ if not is_list:
+ children.append(self._properties[property])
+ else:
+ children.extend(self._properties[property])
+ return children
+
+ def Descendants(self):
+ """Returns a list of all of this object's descendants, including this
+ object.
+ """
+
+ children = self.Children()
+ descendants = [self]
+ for child in children:
+ descendants.extend(child.Descendants())
+ return descendants
+
+ def PBXProjectAncestor(self):
+ # The base case for recursion is defined at PBXProject.PBXProjectAncestor.
+ if self.parent:
+ return self.parent.PBXProjectAncestor()
+ return None
+
+ def _EncodeComment(self, comment):
+ """Encodes a comment to be placed in the project file output, mimicing
+ Xcode behavior.
+ """
+
+ # This mimics Xcode behavior by wrapping the comment in "/*" and "*/". If
+ # the string already contains a "*/", it is turned into "(*)/". This keeps
+ # the file writer from outputting something that would be treated as the
+ # end of a comment in the middle of something intended to be entirely a
+ # comment.
+
+ return '/* ' + comment.replace('*/', '(*)/') + ' */'
+
+ def _EncodeTransform(self, match):
+ # This function works closely with _EncodeString. It will only be called
+ # by re.sub with match.group(0) containing a character matched by the
+ # the _escaped expression.
+ char = match.group(0)
+
+ # Backslashes (\) and quotation marks (") are always replaced with a
+ # backslash-escaped version of the same. Everything else gets its
+ # replacement from the class' _encode_transforms array.
+ if char == '\\':
+ return '\\\\'
+ if char == '"':
+ return '\\"'
+ return self._encode_transforms[ord(char)]
+
+ def _EncodeString(self, value):
+ """Encodes a string to be placed in the project file output, mimicing
+ Xcode behavior.
+ """
+
+ # Use quotation marks when any character outside of the range A-Z, a-z, 0-9,
+ # $ (dollar sign), . (period), and _ (underscore) is present. Also use
+ # quotation marks to represent empty strings.
+ #
+ # Escape " (double-quote) and \ (backslash) by preceding them with a
+ # backslash.
+ #
+ # Some characters below the printable ASCII range are encoded specially:
+ # 7 ^G BEL is encoded as "\a"
+ # 8 ^H BS is encoded as "\b"
+ # 11 ^K VT is encoded as "\v"
+ # 12 ^L NP is encoded as "\f"
+ # 127 ^? DEL is passed through as-is without escaping
+ # - In PBXFileReference and PBXBuildFile objects:
+ # 9 ^I HT is passed through as-is without escaping
+ # 10 ^J NL is passed through as-is without escaping
+ # 13 ^M CR is passed through as-is without escaping
+ # - In other objects:
+ # 9 ^I HT is encoded as "\t"
+ # 10 ^J NL is encoded as "\n"
+ # 13 ^M CR is encoded as "\n" rendering it indistinguishable from
+ # 10 ^J NL
+ # All other characters within the ASCII control character range (0 through
+ # 31 inclusive) are encoded as "\U001f" referring to the Unicode code point
+ # in hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e".
+ # Characters above the ASCII range are passed through to the output encoded
+ # as UTF-8 without any escaping. These mappings are contained in the
+ # class' _encode_transforms list.
+
+ if _unquoted.search(value) and not _quoted.search(value):
+ return value
+
+ return '"' + _escaped.sub(self._EncodeTransform, value) + '"'
+
+ def _XCPrint(self, file, tabs, line):
+ file.write('\t' * tabs + line)
+
+ def _XCPrintableValue(self, tabs, value, flatten_list=False):
+ """Returns a representation of value that may be printed in a project file,
+ mimicing Xcode's behavior.
+
+ _XCPrintableValue can handle str and int values, XCObjects (which are
+ made printable by returning their id property), and list and dict objects
+ composed of any of the above types. When printing a list or dict, and
+ _should_print_single_line is False, the tabs parameter is used to determine
+ how much to indent the lines corresponding to the items in the list or
+ dict.
+
+ If flatten_list is True, single-element lists will be transformed into
+ strings.
+ """
+
+ printable = ''
+ comment = None
+
+ if self._should_print_single_line:
+ sep = ' '
+ element_tabs = ''
+ end_tabs = ''
+ else:
+ sep = '\n'
+ element_tabs = '\t' * (tabs + 1)
+ end_tabs = '\t' * tabs
+
+ if isinstance(value, XCObject):
+ printable += value.id
+ comment = value.Comment()
+ elif isinstance(value, str):
+ printable += self._EncodeString(value)
+ # A python3 compatible way of saying isinstance(value, unicode).
+ # basestring is str in python3 so this is equivalent to the above
+ # isinstance. Thus if it failed above it will fail here.
+ # In python2 we test against str and unicode at this point. str has already
+ # failed in the above isinstance so we test against unicode.
+ elif isinstance(value, basestring):
+ printable += self._EncodeString(value.encode('utf-8'))
+ elif isinstance(value, int):
+ printable += str(value)
+ elif isinstance(value, list):
+ if flatten_list and len(value) <= 1:
+ if len(value) == 0:
+ printable += self._EncodeString('')
+ else:
+ printable += self._EncodeString(value[0])
+ else:
+ printable = '(' + sep
+ for item in value:
+ printable += element_tabs + \
+ self._XCPrintableValue(tabs + 1, item, flatten_list) + \
+ ',' + sep
+ printable += end_tabs + ')'
+ elif isinstance(value, dict):
+ printable = '{' + sep
+ for item_key, item_value in sorted(value.items()):
+ printable += element_tabs + \
+ self._XCPrintableValue(tabs + 1, item_key, flatten_list) + ' = ' + \
+ self._XCPrintableValue(tabs + 1, item_value, flatten_list) + ';' + \
+ sep
+ printable += end_tabs + '}'
+ else:
+ raise TypeError("Can't make " + value.__class__.__name__ + ' printable')
+
+ if comment != None:
+ printable += ' ' + self._EncodeComment(comment)
+
+ return printable
+
+ def _XCKVPrint(self, file, tabs, key, value):
+ """Prints a key and value, members of an XCObject's _properties dictionary,
+ to file.
+
+ tabs is an int identifying the indentation level. If the class'
+ _should_print_single_line variable is True, tabs is ignored and the
+ key-value pair will be followed by a space insead of a newline.
+ """
+
+ if self._should_print_single_line:
+ printable = ''
+ after_kv = ' '
+ else:
+ printable = '\t' * tabs
+ after_kv = '\n'
+
+ # Xcode usually prints remoteGlobalIDString values in PBXContainerItemProxy
+ # objects without comments. Sometimes it prints them with comments, but
+ # the majority of the time, it doesn't. To avoid unnecessary changes to
+ # the project file after Xcode opens it, don't write comments for
+ # remoteGlobalIDString. This is a sucky hack and it would certainly be
+ # cleaner to extend the schema to indicate whether or not a comment should
+ # be printed, but since this is the only case where the problem occurs and
+ # Xcode itself can't seem to make up its mind, the hack will suffice.
+ #
+ # Also see PBXContainerItemProxy._schema['remoteGlobalIDString'].
+ if key == 'remoteGlobalIDString' and isinstance(self,
+ PBXContainerItemProxy):
+ value_to_print = value.id
+ else:
+ value_to_print = value
+
+ # PBXBuildFile's settings property is represented in the output as a dict,
+ # but a hack here has it represented as a string. Arrange to strip off the
+ # quotes so that it shows up in the output as expected.
+ if key == 'settings' and isinstance(self, PBXBuildFile):
+ strip_value_quotes = True
+ else:
+ strip_value_quotes = False
+
+ # In another one-off, let's set flatten_list on buildSettings properties
+ # of XCBuildConfiguration objects, because that's how Xcode treats them.
+ if key == 'buildSettings' and isinstance(self, XCBuildConfiguration):
+ flatten_list = True
+ else:
+ flatten_list = False
+
+ try:
+ printable_key = self._XCPrintableValue(tabs, key, flatten_list)
+ printable_value = self._XCPrintableValue(tabs, value_to_print,
+ flatten_list)
+ if strip_value_quotes and len(printable_value) > 1 and \
+ printable_value[0] == '"' and printable_value[-1] == '"':
+ printable_value = printable_value[1:-1]
+ printable += printable_key + ' = ' + printable_value + ';' + after_kv
+ except TypeError as e:
+ gyp.common.ExceptionAppend(e,
+ 'while printing key "%s"' % key)
+ raise
+
+ self._XCPrint(file, 0, printable)
+
+ def Print(self, file=sys.stdout):
+ """Prints a reprentation of this object to file, adhering to Xcode output
+ formatting.
+ """
+
+ self.VerifyHasRequiredProperties()
+
+ if self._should_print_single_line:
+ # When printing an object in a single line, Xcode doesn't put any space
+ # between the beginning of a dictionary (or presumably a list) and the
+ # first contained item, so you wind up with snippets like
+ # ...CDEF = {isa = PBXFileReference; fileRef = 0123...
+ # If it were me, I would have put a space in there after the opening
+ # curly, but I guess this is just another one of those inconsistencies
+ # between how Xcode prints PBXFileReference and PBXBuildFile objects as
+ # compared to other objects. Mimic Xcode's behavior here by using an
+ # empty string for sep.
+ sep = ''
+ end_tabs = 0
+ else:
+ sep = '\n'
+ end_tabs = 2
+
+ # Start the object. For example, '\t\tPBXProject = {\n'.
+ self._XCPrint(file, 2, self._XCPrintableValue(2, self) + ' = {' + sep)
+
+ # "isa" isn't in the _properties dictionary, it's an intrinsic property
+ # of the class which the object belongs to. Xcode always outputs "isa"
+ # as the first element of an object dictionary.
+ self._XCKVPrint(file, 3, 'isa', self.__class__.__name__)
+
+ # The remaining elements of an object dictionary are sorted alphabetically.
+ for property, value in sorted(self._properties.items()):
+ self._XCKVPrint(file, 3, property, value)
+
+ # End the object.
+ self._XCPrint(file, end_tabs, '};\n')
+
+ def UpdateProperties(self, properties, do_copy=False):
+ """Merge the supplied properties into the _properties dictionary.
+
+ The input properties must adhere to the class schema or a KeyError or
+ TypeError exception will be raised. If adding an object of an XCObject
+ subclass and the schema indicates a strong relationship, the object's
+ parent will be set to this object.
+
+ If do_copy is True, then lists, dicts, strong-owned XCObjects, and
+ strong-owned XCObjects in lists will be copied instead of having their
+ references added.
+ """
+
+ if properties is None:
+ return
+
+ for property, value in properties.items():
+ # Make sure the property is in the schema.
+ if not property in self._schema:
+ raise KeyError(property + ' not in ' + self.__class__.__name__)
+
+ # Make sure the property conforms to the schema.
+ (is_list, property_type, is_strong) = self._schema[property][0:3]
+ if is_list:
+ if value.__class__ != list:
+ raise TypeError(
+ property + ' of ' + self.__class__.__name__ + \
+ ' must be list, not ' + value.__class__.__name__)
+ for item in value:
+ if not isinstance(item, property_type) and \
+ not (isinstance(item, basestring) and property_type == str):
+ # Accept unicode where str is specified. str is treated as
+ # UTF-8-encoded.
+ raise TypeError(
+ 'item of ' + property + ' of ' + self.__class__.__name__ + \
+ ' must be ' + property_type.__name__ + ', not ' + \
+ item.__class__.__name__)
+ elif not isinstance(value, property_type) and \
+ not (isinstance(value, basestring) and property_type == str):
+ # Accept unicode where str is specified. str is treated as
+ # UTF-8-encoded.
+ raise TypeError(
+ property + ' of ' + self.__class__.__name__ + ' must be ' + \
+ property_type.__name__ + ', not ' + value.__class__.__name__)
+
+ # Checks passed, perform the assignment.
+ if do_copy:
+ if isinstance(value, XCObject):
+ if is_strong:
+ self._properties[property] = value.Copy()
+ else:
+ self._properties[property] = value
+ elif isinstance(value, basestring) or isinstance(value, int):
+ self._properties[property] = value
+ elif isinstance(value, list):
+ if is_strong:
+ # If is_strong is True, each element is an XCObject, so it's safe
+ # to call Copy.
+ self._properties[property] = []
+ for item in value:
+ self._properties[property].append(item.Copy())
+ else:
+ self._properties[property] = value[:]
+ elif isinstance(value, dict):
+ self._properties[property] = value.copy()
+ else:
+ raise TypeError("Don't know how to copy a " + \
+ value.__class__.__name__ + ' object for ' + \
+ property + ' in ' + self.__class__.__name__)
+ else:
+ self._properties[property] = value
+
+ # Set up the child's back-reference to this object. Don't use |value|
+ # any more because it may not be right if do_copy is true.
+ if is_strong:
+ if not is_list:
+ self._properties[property].parent = self
+ else:
+ for item in self._properties[property]:
+ item.parent = self
+
+ def HasProperty(self, key):
+ return key in self._properties
+
+ def GetProperty(self, key):
+ return self._properties[key]
+
+ def SetProperty(self, key, value):
+ self.UpdateProperties({key: value})
+
+ def DelProperty(self, key):
+ if key in self._properties:
+ del self._properties[key]
+
+ def AppendProperty(self, key, value):
+ # TODO(mark): Support ExtendProperty too (and make this call that)?
+
+ # Schema validation.
+ if not key in self._schema:
+ raise KeyError(key + ' not in ' + self.__class__.__name__)
+
+ (is_list, property_type, is_strong) = self._schema[key][0:3]
+ if not is_list:
+ raise TypeError(key + ' of ' + self.__class__.__name__ + ' must be list')
+ if not isinstance(value, property_type):
+ raise TypeError('item of ' + key + ' of ' + self.__class__.__name__ + \
+ ' must be ' + property_type.__name__ + ', not ' + \
+ value.__class__.__name__)
+
+ # If the property doesn't exist yet, create a new empty list to receive the
+ # item.
+ if not key in self._properties:
+ self._properties[key] = []
+
+ # Set up the ownership link.
+ if is_strong:
+ value.parent = self
+
+ # Store the item.
+ self._properties[key].append(value)
+
+ def VerifyHasRequiredProperties(self):
+ """Ensure that all properties identified as required by the schema are
+ set.
+ """
+
+ # TODO(mark): A stronger verification mechanism is needed. Some
+ # subclasses need to perform validation beyond what the schema can enforce.
+ for property, attributes in self._schema.items():
+ (is_list, property_type, is_strong, is_required) = attributes[0:4]
+ if is_required and not property in self._properties:
+ raise KeyError(self.__class__.__name__ + ' requires ' + property)
+
+ def _SetDefaultsFromSchema(self):
+ """Assign object default values according to the schema. This will not
+ overwrite properties that have already been set."""
+
+ defaults = {}
+ for property, attributes in self._schema.items():
+ (is_list, property_type, is_strong, is_required) = attributes[0:4]
+ if is_required and len(attributes) >= 5 and \
+ not property in self._properties:
+ default = attributes[4]
+
+ defaults[property] = default
+
+ if len(defaults) > 0:
+ # Use do_copy=True so that each new object gets its own copy of strong
+ # objects, lists, and dicts.
+ self.UpdateProperties(defaults, do_copy=True)
+
+
+class XCHierarchicalElement(XCObject):
+ """Abstract base for PBXGroup and PBXFileReference. Not represented in a
+ project file."""
+
+ # TODO(mark): Do name and path belong here? Probably so.
+ # If path is set and name is not, name may have a default value. Name will
+ # be set to the basename of path, if the basename of path is different from
+ # the full value of path. If path is already just a leaf name, name will
+ # not be set.
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'comments': [0, str, 0, 0],
+ 'fileEncoding': [0, str, 0, 0],
+ 'includeInIndex': [0, int, 0, 0],
+ 'indentWidth': [0, int, 0, 0],
+ 'lineEnding': [0, int, 0, 0],
+ 'sourceTree': [0, str, 0, 1, '<group>'],
+ 'tabWidth': [0, int, 0, 0],
+ 'usesTabs': [0, int, 0, 0],
+ 'wrapsLines': [0, int, 0, 0],
+ })
+
+ def __init__(self, properties=None, id=None, parent=None):
+ # super
+ XCObject.__init__(self, properties, id, parent)
+ if 'path' in self._properties and not 'name' in self._properties:
+ path = self._properties['path']
+ name = posixpath.basename(path)
+ if name != '' and path != name:
+ self.SetProperty('name', name)
+
+ if 'path' in self._properties and \
+ (not 'sourceTree' in self._properties or \
+ self._properties['sourceTree'] == '<group>'):
+ # If the pathname begins with an Xcode variable like "$(SDKROOT)/", take
+ # the variable out and make the path be relative to that variable by
+ # assigning the variable name as the sourceTree.
+ (source_tree, path) = SourceTreeAndPathFromPath(self._properties['path'])
+ if source_tree != None:
+ self._properties['sourceTree'] = source_tree
+ if path != None:
+ self._properties['path'] = path
+ if source_tree != None and path is None and \
+ not 'name' in self._properties:
+ # The path was of the form "$(SDKROOT)" with no path following it.
+ # This object is now relative to that variable, so it has no path
+ # attribute of its own. It does, however, keep a name.
+ del self._properties['path']
+ self._properties['name'] = source_tree
+
+ def Name(self):
+ if 'name' in self._properties:
+ return self._properties['name']
+ elif 'path' in self._properties:
+ return self._properties['path']
+ else:
+ # This happens in the case of the root PBXGroup.
+ return None
+
+ def Hashables(self):
+ """Custom hashables for XCHierarchicalElements.
+
+ XCHierarchicalElements are special. Generally, their hashes shouldn't
+ change if the paths don't change. The normal XCObject implementation of
+ Hashables adds a hashable for each object, which means that if
+ the hierarchical structure changes (possibly due to changes caused when
+ TakeOverOnlyChild runs and encounters slight changes in the hierarchy),
+ the hashes will change. For example, if a project file initially contains
+ a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent
+ a/b. If someone later adds a/f2 to the project file, a/b can no longer be
+ collapsed, and f1 winds up with parent b and grandparent a. That would
+ be sufficient to change f1's hash.
+
+ To counteract this problem, hashables for all XCHierarchicalElements except
+ for the main group (which has neither a name nor a path) are taken to be
+ just the set of path components. Because hashables are inherited from
+ parents, this provides assurance that a/b/f1 has the same set of hashables
+ whether its parent is b or a/b.
+
+ The main group is a special case. As it is permitted to have no name or
+ path, it is permitted to use the standard XCObject hash mechanism. This
+ is not considered a problem because there can be only one main group.
+ """
+
+ if self == self.PBXProjectAncestor()._properties['mainGroup']:
+ # super
+ return XCObject.Hashables(self)
+
+ hashables = []
+
+ # Put the name in first, ensuring that if TakeOverOnlyChild collapses
+ # children into a top-level group like "Source", the name always goes
+ # into the list of hashables without interfering with path components.
+ if 'name' in self._properties:
+ # Make it less likely for people to manipulate hashes by following the
+ # pattern of always pushing an object type value onto the list first.
+ hashables.append(self.__class__.__name__ + '.name')
+ hashables.append(self._properties['name'])
+
+ # NOTE: This still has the problem that if an absolute path is encountered,
+ # including paths with a sourceTree, they'll still inherit their parents'
+ # hashables, even though the paths aren't relative to their parents. This
+ # is not expected to be much of a problem in practice.
+ path = self.PathFromSourceTreeAndPath()
+ if path != None:
+ components = path.split(posixpath.sep)
+ for component in components:
+ hashables.append(self.__class__.__name__ + '.path')
+ hashables.append(component)
+
+ hashables.extend(self._hashables)
+
+ return hashables
+
+ def Compare(self, other):
+ # Allow comparison of these types. PBXGroup has the highest sort rank;
+ # PBXVariantGroup is treated as equal to PBXFileReference.
+ valid_class_types = {
+ PBXFileReference: 'file',
+ PBXGroup: 'group',
+ PBXVariantGroup: 'file',
+ }
+ self_type = valid_class_types[self.__class__]
+ other_type = valid_class_types[other.__class__]
+
+ if self_type == other_type:
+ # If the two objects are of the same sort rank, compare their names.
+ return cmp(self.Name(), other.Name())
+
+ # Otherwise, sort groups before everything else.
+ if self_type == 'group':
+ return -1
+ return 1
+
+ def CompareRootGroup(self, other):
+ # This function should be used only to compare direct children of the
+ # containing PBXProject's mainGroup. These groups should appear in the
+ # listed order.
+ # TODO(mark): "Build" is used by gyp.generator.xcode, perhaps the
+ # generator should have a way of influencing this list rather than having
+ # to hardcode for the generator here.
+ order = ['Source', 'Intermediates', 'Projects', 'Frameworks', 'Products',
+ 'Build']
+
+ # If the groups aren't in the listed order, do a name comparison.
+ # Otherwise, groups in the listed order should come before those that
+ # aren't.
+ self_name = self.Name()
+ other_name = other.Name()
+ self_in = isinstance(self, PBXGroup) and self_name in order
+ other_in = isinstance(self, PBXGroup) and other_name in order
+ if not self_in and not other_in:
+ return self.Compare(other)
+ if self_name in order and not other_name in order:
+ return -1
+ if other_name in order and not self_name in order:
+ return 1
+
+ # If both groups are in the listed order, go by the defined order.
+ self_index = order.index(self_name)
+ other_index = order.index(other_name)
+ if self_index < other_index:
+ return -1
+ if self_index > other_index:
+ return 1
+ return 0
+
+ def PathFromSourceTreeAndPath(self):
+ # Turn the object's sourceTree and path properties into a single flat
+ # string of a form comparable to the path parameter. If there's a
+ # sourceTree property other than "<group>", wrap it in $(...) for the
+ # comparison.
+ components = []
+ if self._properties['sourceTree'] != '<group>':
+ components.append('$(' + self._properties['sourceTree'] + ')')
+ if 'path' in self._properties:
+ components.append(self._properties['path'])
+
+ if len(components) > 0:
+ return posixpath.join(*components)
+
+ return None
+
+ def FullPath(self):
+ # Returns a full path to self relative to the project file, or relative
+ # to some other source tree. Start with self, and walk up the chain of
+ # parents prepending their paths, if any, until no more parents are
+ # available (project-relative path) or until a path relative to some
+ # source tree is found.
+ xche = self
+ path = None
+ while isinstance(xche, XCHierarchicalElement) and \
+ (path is None or \
+ (not path.startswith('/') and not path.startswith('$'))):
+ this_path = xche.PathFromSourceTreeAndPath()
+ if this_path != None and path != None:
+ path = posixpath.join(this_path, path)
+ elif this_path != None:
+ path = this_path
+ xche = xche.parent
+
+ return path
+
+
+class PBXGroup(XCHierarchicalElement):
+ """
+ Attributes:
+ _children_by_path: Maps pathnames of children of this PBXGroup to the
+ actual child XCHierarchicalElement objects.
+ _variant_children_by_name_and_path: Maps (name, path) tuples of
+ PBXVariantGroup children to the actual child PBXVariantGroup objects.
+ """
+
+ _schema = XCHierarchicalElement._schema.copy()
+ _schema.update({
+ 'children': [1, XCHierarchicalElement, 1, 1, []],
+ 'name': [0, str, 0, 0],
+ 'path': [0, str, 0, 0],
+ })
+
+ def __init__(self, properties=None, id=None, parent=None):
+ # super
+ XCHierarchicalElement.__init__(self, properties, id, parent)
+ self._children_by_path = {}
+ self._variant_children_by_name_and_path = {}
+ for child in self._properties.get('children', []):
+ self._AddChildToDicts(child)
+
+ def Hashables(self):
+ # super
+ hashables = XCHierarchicalElement.Hashables(self)
+
+ # It is not sufficient to just rely on name and parent to build a unique
+ # hashable : a node could have two child PBXGroup sharing a common name.
+ # To add entropy the hashable is enhanced with the names of all its
+ # children.
+ for child in self._properties.get('children', []):
+ child_name = child.Name()
+ if child_name != None:
+ hashables.append(child_name)
+
+ return hashables
+
+ def HashablesForChild(self):
+ # To avoid a circular reference the hashables used to compute a child id do
+ # not include the child names.
+ return XCHierarchicalElement.Hashables(self)
+
+ def _AddChildToDicts(self, child):
+ # Sets up this PBXGroup object's dicts to reference the child properly.
+ child_path = child.PathFromSourceTreeAndPath()
+ if child_path:
+ if child_path in self._children_by_path:
+ raise ValueError('Found multiple children with path ' + child_path)
+ self._children_by_path[child_path] = child
+
+ if isinstance(child, PBXVariantGroup):
+ child_name = child._properties.get('name', None)
+ key = (child_name, child_path)
+ if key in self._variant_children_by_name_and_path:
+ raise ValueError('Found multiple PBXVariantGroup children with ' + \
+ 'name ' + str(child_name) + ' and path ' + \
+ str(child_path))
+ self._variant_children_by_name_and_path[key] = child
+
+ def AppendChild(self, child):
+ # Callers should use this instead of calling
+ # AppendProperty('children', child) directly because this function
+ # maintains the group's dicts.
+ self.AppendProperty('children', child)
+ self._AddChildToDicts(child)
+
+ def GetChildByName(self, name):
+ # This is not currently optimized with a dict as GetChildByPath is because
+ # it has few callers. Most callers probably want GetChildByPath. This
+ # function is only useful to get children that have names but no paths,
+ # which is rare. The children of the main group ("Source", "Products",
+ # etc.) is pretty much the only case where this likely to come up.
+ #
+ # TODO(mark): Maybe this should raise an error if more than one child is
+ # present with the same name.
+ if not 'children' in self._properties:
+ return None
+
+ for child in self._properties['children']:
+ if child.Name() == name:
+ return child
+
+ return None
+
+ def GetChildByPath(self, path):
+ if not path:
+ return None
+
+ if path in self._children_by_path:
+ return self._children_by_path[path]
+
+ return None
+
+ def GetChildByRemoteObject(self, remote_object):
+ # This method is a little bit esoteric. Given a remote_object, which
+ # should be a PBXFileReference in another project file, this method will
+ # return this group's PBXReferenceProxy object serving as a local proxy
+ # for the remote PBXFileReference.
+ #
+ # This function might benefit from a dict optimization as GetChildByPath
+ # for some workloads, but profiling shows that it's not currently a
+ # problem.
+ if not 'children' in self._properties:
+ return None
+
+ for child in self._properties['children']:
+ if not isinstance(child, PBXReferenceProxy):
+ continue
+
+ container_proxy = child._properties['remoteRef']
+ if container_proxy._properties['remoteGlobalIDString'] == remote_object:
+ return child
+
+ return None
+
+ def AddOrGetFileByPath(self, path, hierarchical):
+ """Returns an existing or new file reference corresponding to path.
+
+ If hierarchical is True, this method will create or use the necessary
+ hierarchical group structure corresponding to path. Otherwise, it will
+ look in and create an item in the current group only.
+
+ If an existing matching reference is found, it is returned, otherwise, a
+ new one will be created, added to the correct group, and returned.
+
+ If path identifies a directory by virtue of carrying a trailing slash,
+ this method returns a PBXFileReference of "folder" type. If path
+ identifies a variant, by virtue of it identifying a file inside a directory
+ with an ".lproj" extension, this method returns a PBXVariantGroup
+ containing the variant named by path, and possibly other variants. For
+ all other paths, a "normal" PBXFileReference will be returned.
+ """
+
+ # Adding or getting a directory? Directories end with a trailing slash.
+ is_dir = False
+ if path.endswith('/'):
+ is_dir = True
+ path = posixpath.normpath(path)
+ if is_dir:
+ path = path + '/'
+
+ # Adding or getting a variant? Variants are files inside directories
+ # with an ".lproj" extension. Xcode uses variants for localization. For
+ # a variant path/to/Language.lproj/MainMenu.nib, put a variant group named
+ # MainMenu.nib inside path/to, and give it a variant named Language. In
+ # this example, grandparent would be set to path/to and parent_root would
+ # be set to Language.
+ variant_name = None
+ parent = posixpath.dirname(path)
+ grandparent = posixpath.dirname(parent)
+ parent_basename = posixpath.basename(parent)
+ (parent_root, parent_ext) = posixpath.splitext(parent_basename)
+ if parent_ext == '.lproj':
+ variant_name = parent_root
+ if grandparent == '':
+ grandparent = None
+
+ # Putting a directory inside a variant group is not currently supported.
+ assert not is_dir or variant_name is None
+
+ path_split = path.split(posixpath.sep)
+ if len(path_split) == 1 or \
+ ((is_dir or variant_name != None) and len(path_split) == 2) or \
+ not hierarchical:
+ # The PBXFileReference or PBXVariantGroup will be added to or gotten from
+ # this PBXGroup, no recursion necessary.
+ if variant_name is None:
+ # Add or get a PBXFileReference.
+ file_ref = self.GetChildByPath(path)
+ if file_ref != None:
+ assert file_ref.__class__ == PBXFileReference
+ else:
+ file_ref = PBXFileReference({'path': path})
+ self.AppendChild(file_ref)
+ else:
+ # Add or get a PBXVariantGroup. The variant group name is the same
+ # as the basename (MainMenu.nib in the example above). grandparent
+ # specifies the path to the variant group itself, and path_split[-2:]
+ # is the path of the specific variant relative to its group.
+ variant_group_name = posixpath.basename(path)
+ variant_group_ref = self.AddOrGetVariantGroupByNameAndPath(
+ variant_group_name, grandparent)
+ variant_path = posixpath.sep.join(path_split[-2:])
+ variant_ref = variant_group_ref.GetChildByPath(variant_path)
+ if variant_ref != None:
+ assert variant_ref.__class__ == PBXFileReference
+ else:
+ variant_ref = PBXFileReference({'name': variant_name,
+ 'path': variant_path})
+ variant_group_ref.AppendChild(variant_ref)
+ # The caller is interested in the variant group, not the specific
+ # variant file.
+ file_ref = variant_group_ref
+ return file_ref
+ else:
+ # Hierarchical recursion. Add or get a PBXGroup corresponding to the
+ # outermost path component, and then recurse into it, chopping off that
+ # path component.
+ next_dir = path_split[0]
+ group_ref = self.GetChildByPath(next_dir)
+ if group_ref != None:
+ assert group_ref.__class__ == PBXGroup
+ else:
+ group_ref = PBXGroup({'path': next_dir})
+ self.AppendChild(group_ref)
+ return group_ref.AddOrGetFileByPath(posixpath.sep.join(path_split[1:]),
+ hierarchical)
+
+ def AddOrGetVariantGroupByNameAndPath(self, name, path):
+ """Returns an existing or new PBXVariantGroup for name and path.
+
+ If a PBXVariantGroup identified by the name and path arguments is already
+ present as a child of this object, it is returned. Otherwise, a new
+ PBXVariantGroup with the correct properties is created, added as a child,
+ and returned.
+
+ This method will generally be called by AddOrGetFileByPath, which knows
+ when to create a variant group based on the structure of the pathnames
+ passed to it.
+ """
+
+ key = (name, path)
+ if key in self._variant_children_by_name_and_path:
+ variant_group_ref = self._variant_children_by_name_and_path[key]
+ assert variant_group_ref.__class__ == PBXVariantGroup
+ return variant_group_ref
+
+ variant_group_properties = {'name': name}
+ if path != None:
+ variant_group_properties['path'] = path
+ variant_group_ref = PBXVariantGroup(variant_group_properties)
+ self.AppendChild(variant_group_ref)
+
+ return variant_group_ref
+
+ def TakeOverOnlyChild(self, recurse=False):
+ """If this PBXGroup has only one child and it's also a PBXGroup, take
+ it over by making all of its children this object's children.
+
+ This function will continue to take over only children when those children
+ are groups. If there are three PBXGroups representing a, b, and c, with
+ c inside b and b inside a, and a and b have no other children, this will
+ result in a taking over both b and c, forming a PBXGroup for a/b/c.
+
+ If recurse is True, this function will recurse into children and ask them
+ to collapse themselves by taking over only children as well. Assuming
+ an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f
+ (d1, d2, and f are files, the rest are groups), recursion will result in
+ a group for a/b/c containing a group for d3/e.
+ """
+
+ # At this stage, check that child class types are PBXGroup exactly,
+ # instead of using isinstance. The only subclass of PBXGroup,
+ # PBXVariantGroup, should not participate in reparenting in the same way:
+ # reparenting by merging different object types would be wrong.
+ while len(self._properties['children']) == 1 and \
+ self._properties['children'][0].__class__ == PBXGroup:
+ # Loop to take over the innermost only-child group possible.
+
+ child = self._properties['children'][0]
+
+ # Assume the child's properties, including its children. Save a copy
+ # of this object's old properties, because they'll still be needed.
+ # This object retains its existing id and parent attributes.
+ old_properties = self._properties
+ self._properties = child._properties
+ self._children_by_path = child._children_by_path
+
+ if not 'sourceTree' in self._properties or \
+ self._properties['sourceTree'] == '<group>':
+ # The child was relative to its parent. Fix up the path. Note that
+ # children with a sourceTree other than "<group>" are not relative to
+ # their parents, so no path fix-up is needed in that case.
+ if 'path' in old_properties:
+ if 'path' in self._properties:
+ # Both the original parent and child have paths set.
+ self._properties['path'] = posixpath.join(old_properties['path'],
+ self._properties['path'])
+ else:
+ # Only the original parent has a path, use it.
+ self._properties['path'] = old_properties['path']
+ if 'sourceTree' in old_properties:
+ # The original parent had a sourceTree set, use it.
+ self._properties['sourceTree'] = old_properties['sourceTree']
+
+ # If the original parent had a name set, keep using it. If the original
+ # parent didn't have a name but the child did, let the child's name
+ # live on. If the name attribute seems unnecessary now, get rid of it.
+ if 'name' in old_properties and old_properties['name'] != None and \
+ old_properties['name'] != self.Name():
+ self._properties['name'] = old_properties['name']
+ if 'name' in self._properties and 'path' in self._properties and \
+ self._properties['name'] == self._properties['path']:
+ del self._properties['name']
+
+ # Notify all children of their new parent.
+ for child in self._properties['children']:
+ child.parent = self
+
+ # If asked to recurse, recurse.
+ if recurse:
+ for child in self._properties['children']:
+ if child.__class__ == PBXGroup:
+ child.TakeOverOnlyChild(recurse)
+
+ def SortGroup(self):
+ self._properties['children'] = \
+ sorted(self._properties['children'],
+ key=functools.cmp_to_key(XCHierarchicalElement.Compare))
+
+ # Recurse.
+ for child in self._properties['children']:
+ if isinstance(child, PBXGroup):
+ child.SortGroup()
+
+
+class XCFileLikeElement(XCHierarchicalElement):
+ # Abstract base for objects that can be used as the fileRef property of
+ # PBXBuildFile.
+
+ def PathHashables(self):
+ # A PBXBuildFile that refers to this object will call this method to
+ # obtain additional hashables specific to this XCFileLikeElement. Don't
+ # just use this object's hashables, they're not specific and unique enough
+ # on their own (without access to the parent hashables.) Instead, provide
+ # hashables that identify this object by path by getting its hashables as
+ # well as the hashables of ancestor XCHierarchicalElement objects.
+
+ hashables = []
+ xche = self
+ while xche != None and isinstance(xche, XCHierarchicalElement):
+ xche_hashables = xche.Hashables()
+ for index, xche_hashable in enumerate(xche_hashables):
+ hashables.insert(index, xche_hashable)
+ xche = xche.parent
+ return hashables
+
+
+class XCContainerPortal(XCObject):
+ # Abstract base for objects that can be used as the containerPortal property
+ # of PBXContainerItemProxy.
+ pass
+
+
+class XCRemoteObject(XCObject):
+ # Abstract base for objects that can be used as the remoteGlobalIDString
+ # property of PBXContainerItemProxy.
+ pass
+
+
+class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
+ _schema = XCFileLikeElement._schema.copy()
+ _schema.update({
+ 'explicitFileType': [0, str, 0, 0],
+ 'lastKnownFileType': [0, str, 0, 0],
+ 'name': [0, str, 0, 0],
+ 'path': [0, str, 0, 1],
+ })
+
+ # Weird output rules for PBXFileReference.
+ _should_print_single_line = True
+ # super
+ _encode_transforms = XCFileLikeElement._alternate_encode_transforms
+
+ def __init__(self, properties=None, id=None, parent=None):
+ # super
+ XCFileLikeElement.__init__(self, properties, id, parent)
+ if 'path' in self._properties and self._properties['path'].endswith('/'):
+ self._properties['path'] = self._properties['path'][:-1]
+ is_dir = True
+ else:
+ is_dir = False
+
+ if 'path' in self._properties and \
+ not 'lastKnownFileType' in self._properties and \
+ not 'explicitFileType' in self._properties:
+ # TODO(mark): This is the replacement for a replacement for a quick hack.
+ # It is no longer incredibly sucky, but this list needs to be extended.
+ extension_map = {
+ 'a': 'archive.ar',
+ 'app': 'wrapper.application',
+ 'bdic': 'file',
+ 'bundle': 'wrapper.cfbundle',
+ 'c': 'sourcecode.c.c',
+ 'cc': 'sourcecode.cpp.cpp',
+ 'cpp': 'sourcecode.cpp.cpp',
+ 'css': 'text.css',
+ 'cxx': 'sourcecode.cpp.cpp',
+ 'dart': 'sourcecode',
+ 'dylib': 'compiled.mach-o.dylib',
+ 'framework': 'wrapper.framework',
+ 'gyp': 'sourcecode',
+ 'gypi': 'sourcecode',
+ 'h': 'sourcecode.c.h',
+ 'hxx': 'sourcecode.cpp.h',
+ 'icns': 'image.icns',
+ 'java': 'sourcecode.java',
+ 'js': 'sourcecode.javascript',
+ 'kext': 'wrapper.kext',
+ 'm': 'sourcecode.c.objc',
+ 'mm': 'sourcecode.cpp.objcpp',
+ 'nib': 'wrapper.nib',
+ 'o': 'compiled.mach-o.objfile',
+ 'pdf': 'image.pdf',
+ 'pl': 'text.script.perl',
+ 'plist': 'text.plist.xml',
+ 'pm': 'text.script.perl',
+ 'png': 'image.png',
+ 'py': 'text.script.python',
+ 'r': 'sourcecode.rez',
+ 'rez': 'sourcecode.rez',
+ 's': 'sourcecode.asm',
+ 'storyboard': 'file.storyboard',
+ 'strings': 'text.plist.strings',
+ 'swift': 'sourcecode.swift',
+ 'ttf': 'file',
+ 'xcassets': 'folder.assetcatalog',
+ 'xcconfig': 'text.xcconfig',
+ 'xcdatamodel': 'wrapper.xcdatamodel',
+ 'xcdatamodeld':'wrapper.xcdatamodeld',
+ 'xib': 'file.xib',
+ 'y': 'sourcecode.yacc',
+ 'tbd': 'sourcecode.text-based-dylib-definition',
+ }
+
+ prop_map = {
+ 'dart': 'explicitFileType',
+ 'gyp': 'explicitFileType',
+ 'gypi': 'explicitFileType',
+ }
+
+ if is_dir:
+ file_type = 'folder'
+ prop_name = 'lastKnownFileType'
+ else:
+ basename = posixpath.basename(self._properties['path'])
+ (root, ext) = posixpath.splitext(basename)
+ # Check the map using a lowercase extension.
+ # TODO(mark): Maybe it should try with the original case first and fall
+ # back to lowercase, in case there are any instances where case
+ # matters. There currently aren't.
+ if ext != '':
+ ext = ext[1:].lower()
+
+ # TODO(mark): "text" is the default value, but "file" is appropriate
+ # for unrecognized files not containing text. Xcode seems to choose
+ # based on content.
+ file_type = extension_map.get(ext, 'text')
+ prop_name = prop_map.get(ext, 'lastKnownFileType')
+
+ self._properties[prop_name] = file_type
+
+
+class PBXVariantGroup(PBXGroup, XCFileLikeElement):
+ """PBXVariantGroup is used by Xcode to represent localizations."""
+ # No additions to the schema relative to PBXGroup.
+ pass
+
+
+# PBXReferenceProxy is also an XCFileLikeElement subclass. It is defined below
+# because it uses PBXContainerItemProxy, defined below.
+
+
+class XCBuildConfiguration(XCObject):
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'baseConfigurationReference': [0, PBXFileReference, 0, 0],
+ 'buildSettings': [0, dict, 0, 1, {}],
+ 'name': [0, str, 0, 1],
+ })
+
+ def HasBuildSetting(self, key):
+ return key in self._properties['buildSettings']
+
+ def GetBuildSetting(self, key):
+ return self._properties['buildSettings'][key]
+
+ def SetBuildSetting(self, key, value):
+ # TODO(mark): If a list, copy?
+ self._properties['buildSettings'][key] = value
+
+ def AppendBuildSetting(self, key, value):
+ if not key in self._properties['buildSettings']:
+ self._properties['buildSettings'][key] = []
+ self._properties['buildSettings'][key].append(value)
+
+ def DelBuildSetting(self, key):
+ if key in self._properties['buildSettings']:
+ del self._properties['buildSettings'][key]
+
+ def SetBaseConfiguration(self, value):
+ self._properties['baseConfigurationReference'] = value
+
+class XCConfigurationList(XCObject):
+ # _configs is the default list of configurations.
+ _configs = [ XCBuildConfiguration({'name': 'Debug'}),
+ XCBuildConfiguration({'name': 'Release'}) ]
+
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'buildConfigurations': [1, XCBuildConfiguration, 1, 1, _configs],
+ 'defaultConfigurationIsVisible': [0, int, 0, 1, 1],
+ 'defaultConfigurationName': [0, str, 0, 1, 'Release'],
+ })
+
+ def Name(self):
+ return 'Build configuration list for ' + \
+ self.parent.__class__.__name__ + ' "' + self.parent.Name() + '"'
+
+ def ConfigurationNamed(self, name):
+ """Convenience accessor to obtain an XCBuildConfiguration by name."""
+ for configuration in self._properties['buildConfigurations']:
+ if configuration._properties['name'] == name:
+ return configuration
+
+ raise KeyError(name)
+
+ def DefaultConfiguration(self):
+ """Convenience accessor to obtain the default XCBuildConfiguration."""
+ return self.ConfigurationNamed(self._properties['defaultConfigurationName'])
+
+ def HasBuildSetting(self, key):
+ """Determines the state of a build setting in all XCBuildConfiguration
+ child objects.
+
+ If all child objects have key in their build settings, and the value is the
+ same in all child objects, returns 1.
+
+ If no child objects have the key in their build settings, returns 0.
+
+ If some, but not all, child objects have the key in their build settings,
+ or if any children have different values for the key, returns -1.
+ """
+
+ has = None
+ value = None
+ for configuration in self._properties['buildConfigurations']:
+ configuration_has = configuration.HasBuildSetting(key)
+ if has is None:
+ has = configuration_has
+ elif has != configuration_has:
+ return -1
+
+ if configuration_has:
+ configuration_value = configuration.GetBuildSetting(key)
+ if value is None:
+ value = configuration_value
+ elif value != configuration_value:
+ return -1
+
+ if not has:
+ return 0
+
+ return 1
+
+ def GetBuildSetting(self, key):
+ """Gets the build setting for key.
+
+ All child XCConfiguration objects must have the same value set for the
+ setting, or a ValueError will be raised.
+ """
+
+ # TODO(mark): This is wrong for build settings that are lists. The list
+ # contents should be compared (and a list copy returned?)
+
+ value = None
+ for configuration in self._properties['buildConfigurations']:
+ configuration_value = configuration.GetBuildSetting(key)
+ if value is None:
+ value = configuration_value
+ else:
+ if value != configuration_value:
+ raise ValueError('Variant values for ' + key)
+
+ return value
+
+ def SetBuildSetting(self, key, value):
+ """Sets the build setting for key to value in all child
+ XCBuildConfiguration objects.
+ """
+
+ for configuration in self._properties['buildConfigurations']:
+ configuration.SetBuildSetting(key, value)
+
+ def AppendBuildSetting(self, key, value):
+ """Appends value to the build setting for key, which is treated as a list,
+ in all child XCBuildConfiguration objects.
+ """
+
+ for configuration in self._properties['buildConfigurations']:
+ configuration.AppendBuildSetting(key, value)
+
+ def DelBuildSetting(self, key):
+ """Deletes the build setting key from all child XCBuildConfiguration
+ objects.
+ """
+
+ for configuration in self._properties['buildConfigurations']:
+ configuration.DelBuildSetting(key)
+
+ def SetBaseConfiguration(self, value):
+ """Sets the build configuration in all child XCBuildConfiguration objects.
+ """
+
+ for configuration in self._properties['buildConfigurations']:
+ configuration.SetBaseConfiguration(value)
+
+
+class PBXBuildFile(XCObject):
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'fileRef': [0, XCFileLikeElement, 0, 1],
+ 'settings': [0, str, 0, 0], # hack, it's a dict
+ })
+
+ # Weird output rules for PBXBuildFile.
+ _should_print_single_line = True
+ _encode_transforms = XCObject._alternate_encode_transforms
+
+ def Name(self):
+ # Example: "main.cc in Sources"
+ return self._properties['fileRef'].Name() + ' in ' + self.parent.Name()
+
+ def Hashables(self):
+ # super
+ hashables = XCObject.Hashables(self)
+
+ # It is not sufficient to just rely on Name() to get the
+ # XCFileLikeElement's name, because that is not a complete pathname.
+ # PathHashables returns hashables unique enough that no two
+ # PBXBuildFiles should wind up with the same set of hashables, unless
+ # someone adds the same file multiple times to the same target. That
+ # would be considered invalid anyway.
+ hashables.extend(self._properties['fileRef'].PathHashables())
+
+ return hashables
+
+
+class XCBuildPhase(XCObject):
+ """Abstract base for build phase classes. Not represented in a project
+ file.
+
+ Attributes:
+ _files_by_path: A dict mapping each path of a child in the files list by
+ path (keys) to the corresponding PBXBuildFile children (values).
+ _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys)
+ to the corresponding PBXBuildFile children (values).
+ """
+
+ # TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't
+ # actually have a "files" list. XCBuildPhase should not have "files" but
+ # another abstract subclass of it should provide this, and concrete build
+ # phase types that do have "files" lists should be derived from that new
+ # abstract subclass. XCBuildPhase should only provide buildActionMask and
+ # runOnlyForDeploymentPostprocessing, and not files or the various
+ # file-related methods and attributes.
+
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'buildActionMask': [0, int, 0, 1, 0x7fffffff],
+ 'files': [1, PBXBuildFile, 1, 1, []],
+ 'runOnlyForDeploymentPostprocessing': [0, int, 0, 1, 0],
+ })
+
+ def __init__(self, properties=None, id=None, parent=None):
+ # super
+ XCObject.__init__(self, properties, id, parent)
+
+ self._files_by_path = {}
+ self._files_by_xcfilelikeelement = {}
+ for pbxbuildfile in self._properties.get('files', []):
+ self._AddBuildFileToDicts(pbxbuildfile)
+
+ def FileGroup(self, path):
+ # Subclasses must override this by returning a two-element tuple. The
+ # first item in the tuple should be the PBXGroup to which "path" should be
+ # added, either as a child or deeper descendant. The second item should
+ # be a boolean indicating whether files should be added into hierarchical
+ # groups or one single flat group.
+ raise NotImplementedError(
+ self.__class__.__name__ + ' must implement FileGroup')
+
+ def _AddPathToDict(self, pbxbuildfile, path):
+ """Adds path to the dict tracking paths belonging to this build phase.
+
+ If the path is already a member of this build phase, raises an exception.
+ """
+
+ if path in self._files_by_path:
+ raise ValueError('Found multiple build files with path ' + path)
+ self._files_by_path[path] = pbxbuildfile
+
+ def _AddBuildFileToDicts(self, pbxbuildfile, path=None):
+ """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts.
+
+ If path is specified, then it is the path that is being added to the
+ phase, and pbxbuildfile must contain either a PBXFileReference directly
+ referencing that path, or it must contain a PBXVariantGroup that itself
+ contains a PBXFileReference referencing the path.
+
+ If path is not specified, either the PBXFileReference's path or the paths
+ of all children of the PBXVariantGroup are taken as being added to the
+ phase.
+
+ If the path is already present in the phase, raises an exception.
+
+ If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile
+ are already present in the phase, referenced by a different PBXBuildFile
+ object, raises an exception. This does not raise an exception when
+ a PBXFileReference or PBXVariantGroup reappear and are referenced by the
+ same PBXBuildFile that has already introduced them, because in the case
+ of PBXVariantGroup objects, they may correspond to multiple paths that are
+ not all added simultaneously. When this situation occurs, the path needs
+ to be added to _files_by_path, but nothing needs to change in
+ _files_by_xcfilelikeelement, and the caller should have avoided adding
+ the PBXBuildFile if it is already present in the list of children.
+ """
+
+ xcfilelikeelement = pbxbuildfile._properties['fileRef']
+
+ paths = []
+ if path != None:
+ # It's best when the caller provides the path.
+ if isinstance(xcfilelikeelement, PBXVariantGroup):
+ paths.append(path)
+ else:
+ # If the caller didn't provide a path, there can be either multiple
+ # paths (PBXVariantGroup) or one.
+ if isinstance(xcfilelikeelement, PBXVariantGroup):
+ for variant in xcfilelikeelement._properties['children']:
+ paths.append(variant.FullPath())
+ else:
+ paths.append(xcfilelikeelement.FullPath())
+
+ # Add the paths first, because if something's going to raise, the
+ # messages provided by _AddPathToDict are more useful owing to its
+ # having access to a real pathname and not just an object's Name().
+ for a_path in paths:
+ self._AddPathToDict(pbxbuildfile, a_path)
+
+ # If another PBXBuildFile references this XCFileLikeElement, there's a
+ # problem.
+ if xcfilelikeelement in self._files_by_xcfilelikeelement and \
+ self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile:
+ raise ValueError('Found multiple build files for ' + \
+ xcfilelikeelement.Name())
+ self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile
+
+ def AppendBuildFile(self, pbxbuildfile, path=None):
+ # Callers should use this instead of calling
+ # AppendProperty('files', pbxbuildfile) directly because this function
+ # maintains the object's dicts. Better yet, callers can just call AddFile
+ # with a pathname and not worry about building their own PBXBuildFile
+ # objects.
+ self.AppendProperty('files', pbxbuildfile)
+ self._AddBuildFileToDicts(pbxbuildfile, path)
+
+ def AddFile(self, path, settings=None):
+ (file_group, hierarchical) = self.FileGroup(path)
+ file_ref = file_group.AddOrGetFileByPath(path, hierarchical)
+
+ if file_ref in self._files_by_xcfilelikeelement and \
+ isinstance(file_ref, PBXVariantGroup):
+ # There's already a PBXBuildFile in this phase corresponding to the
+ # PBXVariantGroup. path just provides a new variant that belongs to
+ # the group. Add the path to the dict.
+ pbxbuildfile = self._files_by_xcfilelikeelement[file_ref]
+ self._AddBuildFileToDicts(pbxbuildfile, path)
+ else:
+ # Add a new PBXBuildFile to get file_ref into the phase.
+ if settings is None:
+ pbxbuildfile = PBXBuildFile({'fileRef': file_ref})
+ else:
+ pbxbuildfile = PBXBuildFile({'fileRef': file_ref, 'settings': settings})
+ self.AppendBuildFile(pbxbuildfile, path)
+
+
+class PBXHeadersBuildPhase(XCBuildPhase):
+ # No additions to the schema relative to XCBuildPhase.
+
+ def Name(self):
+ return 'Headers'
+
+ def FileGroup(self, path):
+ return self.PBXProjectAncestor().RootGroupForPath(path)
+
+
+class PBXResourcesBuildPhase(XCBuildPhase):
+ # No additions to the schema relative to XCBuildPhase.
+
+ def Name(self):
+ return 'Resources'
+
+ def FileGroup(self, path):
+ return self.PBXProjectAncestor().RootGroupForPath(path)
+
+
+class PBXSourcesBuildPhase(XCBuildPhase):
+ # No additions to the schema relative to XCBuildPhase.
+
+ def Name(self):
+ return 'Sources'
+
+ def FileGroup(self, path):
+ return self.PBXProjectAncestor().RootGroupForPath(path)
+
+
+class PBXFrameworksBuildPhase(XCBuildPhase):
+ # No additions to the schema relative to XCBuildPhase.
+
+ def Name(self):
+ return 'Frameworks'
+
+ def FileGroup(self, path):
+ (root, ext) = posixpath.splitext(path)
+ if ext != '':
+ ext = ext[1:].lower()
+ if ext == 'o':
+ # .o files are added to Xcode Frameworks phases, but conceptually aren't
+ # frameworks, they're more like sources or intermediates. Redirect them
+ # to show up in one of those other groups.
+ return self.PBXProjectAncestor().RootGroupForPath(path)
+ else:
+ return (self.PBXProjectAncestor().FrameworksGroup(), False)
+
+
+class PBXShellScriptBuildPhase(XCBuildPhase):
+ _schema = XCBuildPhase._schema.copy()
+ _schema.update({
+ 'inputPaths': [1, str, 0, 1, []],
+ 'name': [0, str, 0, 0],
+ 'outputPaths': [1, str, 0, 1, []],
+ 'shellPath': [0, str, 0, 1, '/bin/sh'],
+ 'shellScript': [0, str, 0, 1],
+ 'showEnvVarsInLog': [0, int, 0, 0],
+ })
+
+ def Name(self):
+ if 'name' in self._properties:
+ return self._properties['name']
+
+ return 'ShellScript'
+
+
+class PBXCopyFilesBuildPhase(XCBuildPhase):
+ _schema = XCBuildPhase._schema.copy()
+ _schema.update({
+ 'dstPath': [0, str, 0, 1],
+ 'dstSubfolderSpec': [0, int, 0, 1],
+ 'name': [0, str, 0, 0],
+ })
+
+ # path_tree_re matches "$(DIR)/path", "$(DIR)/$(DIR2)/path" or just "$(DIR)".
+ # Match group 1 is "DIR", group 3 is "path" or "$(DIR2") or "$(DIR2)/path"
+ # or None. If group 3 is "path", group 4 will be None otherwise group 4 is
+ # "DIR2" and group 6 is "path".
+ path_tree_re = re.compile(r'^\$\((.*?)\)(/(\$\((.*?)\)(/(.*)|)|(.*)|)|)$')
+
+ # path_tree_{first,second}_to_subfolder map names of Xcode variables to the
+ # associated dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase
+ # object.
+ path_tree_first_to_subfolder = {
+ # Types that can be chosen via the Xcode UI.
+ 'BUILT_PRODUCTS_DIR': 16, # Products Directory
+ 'BUILT_FRAMEWORKS_DIR': 10, # Not an official Xcode macro.
+ # Existed before support for the
+ # names below was added. Maps to
+ # "Frameworks".
+ }
+
+ path_tree_second_to_subfolder = {
+ 'WRAPPER_NAME': 1, # Wrapper
+ # Although Xcode's friendly name is "Executables", the destination
+ # is demonstrably the value of the build setting
+ # EXECUTABLE_FOLDER_PATH not EXECUTABLES_FOLDER_PATH.
+ 'EXECUTABLE_FOLDER_PATH': 6, # Executables.
+ 'UNLOCALIZED_RESOURCES_FOLDER_PATH': 7, # Resources
+ 'JAVA_FOLDER_PATH': 15, # Java Resources
+ 'FRAMEWORKS_FOLDER_PATH': 10, # Frameworks
+ 'SHARED_FRAMEWORKS_FOLDER_PATH': 11, # Shared Frameworks
+ 'SHARED_SUPPORT_FOLDER_PATH': 12, # Shared Support
+ 'PLUGINS_FOLDER_PATH': 13, # PlugIns
+ # For XPC Services, Xcode sets both dstPath and dstSubfolderSpec.
+ # Note that it re-uses the BUILT_PRODUCTS_DIR value for
+ # dstSubfolderSpec. dstPath is set below.
+ 'XPCSERVICES_FOLDER_PATH': 16, # XPC Services.
+ }
+
+ def Name(self):
+ if 'name' in self._properties:
+ return self._properties['name']
+
+ return 'CopyFiles'
+
+ def FileGroup(self, path):
+ return self.PBXProjectAncestor().RootGroupForPath(path)
+
+ def SetDestination(self, path):
+ """Set the dstSubfolderSpec and dstPath properties from path.
+
+ path may be specified in the same notation used for XCHierarchicalElements,
+ specifically, "$(DIR)/path".
+ """
+
+ path_tree_match = self.path_tree_re.search(path)
+ if path_tree_match:
+ path_tree = path_tree_match.group(1);
+ if path_tree in self.path_tree_first_to_subfolder:
+ subfolder = self.path_tree_first_to_subfolder[path_tree]
+ relative_path = path_tree_match.group(3)
+ if relative_path is None:
+ relative_path = ''
+
+ if subfolder == 16 and path_tree_match.group(4) is not None:
+ # BUILT_PRODUCTS_DIR (16) is the first element in a path whose
+ # second element is possibly one of the variable names in
+ # path_tree_second_to_subfolder. Xcode sets the values of all these
+ # variables to relative paths so .gyp files must prefix them with
+ # BUILT_PRODUCTS_DIR, e.g.
+ # $(BUILT_PRODUCTS_DIR)/$(PLUGINS_FOLDER_PATH). Then
+ # xcode_emulation.py can export these variables with the same values
+ # as Xcode yet make & ninja files can determine the absolute path
+ # to the target. Xcode uses the dstSubfolderSpec value set here
+ # to determine the full path.
+ #
+ # An alternative of xcode_emulation.py setting the values to absolute
+ # paths when exporting these variables has been ruled out because
+ # then the values would be different depending on the build tool.
+ #
+ # Another alternative is to invent new names for the variables used
+ # to match to the subfolder indices in the second table. .gyp files
+ # then will not need to prepend $(BUILT_PRODUCTS_DIR) because
+ # xcode_emulation.py can set the values of those variables to
+ # the absolute paths when exporting. This is possibly the thinking
+ # behind BUILT_FRAMEWORKS_DIR which is used in exactly this manner.
+ #
+ # Requiring prepending BUILT_PRODUCTS_DIR has been chosen because
+ # this same way could be used to specify destinations in .gyp files
+ # that pre-date this addition to GYP. However they would only work
+ # with the Xcode generator. The previous version of xcode_emulation.py
+ # does not export these variables. Such files will get the benefit
+ # of the Xcode UI showing the proper destination name simply by
+ # regenerating the projects with this version of GYP.
+ path_tree = path_tree_match.group(4)
+ relative_path = path_tree_match.group(6)
+ separator = '/'
+
+ if path_tree in self.path_tree_second_to_subfolder:
+ subfolder = self.path_tree_second_to_subfolder[path_tree]
+ if relative_path is None:
+ relative_path = ''
+ separator = ''
+ if path_tree == 'XPCSERVICES_FOLDER_PATH':
+ relative_path = '$(CONTENTS_FOLDER_PATH)/XPCServices' \
+ + separator + relative_path
+ else:
+ # subfolder = 16 from above
+ # The second element of the path is an unrecognized variable.
+ # Include it and any remaining elements in relative_path.
+ relative_path = path_tree_match.group(3);
+
+ else:
+ # The path starts with an unrecognized Xcode variable
+ # name like $(SRCROOT). Xcode will still handle this
+ # as an "absolute path" that starts with the variable.
+ subfolder = 0
+ relative_path = path
+ elif path.startswith('/'):
+ # Special case. Absolute paths are in dstSubfolderSpec 0.
+ subfolder = 0
+ relative_path = path[1:]
+ else:
+ raise ValueError('Can\'t use path %s in a %s' % \
+ (path, self.__class__.__name__))
+
+ self._properties['dstPath'] = relative_path
+ self._properties['dstSubfolderSpec'] = subfolder
+
+
+class PBXBuildRule(XCObject):
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'compilerSpec': [0, str, 0, 1],
+ 'filePatterns': [0, str, 0, 0],
+ 'fileType': [0, str, 0, 1],
+ 'isEditable': [0, int, 0, 1, 1],
+ 'outputFiles': [1, str, 0, 1, []],
+ 'script': [0, str, 0, 0],
+ })
+
+ def Name(self):
+ # Not very inspired, but it's what Xcode uses.
+ return self.__class__.__name__
+
+ def Hashables(self):
+ # super
+ hashables = XCObject.Hashables(self)
+
+ # Use the hashables of the weak objects that this object refers to.
+ hashables.append(self._properties['fileType'])
+ if 'filePatterns' in self._properties:
+ hashables.append(self._properties['filePatterns'])
+ return hashables
+
+
+class PBXContainerItemProxy(XCObject):
+ # When referencing an item in this project file, containerPortal is the
+ # PBXProject root object of this project file. When referencing an item in
+ # another project file, containerPortal is a PBXFileReference identifying
+ # the other project file.
+ #
+ # When serving as a proxy to an XCTarget (in this project file or another),
+ # proxyType is 1. When serving as a proxy to a PBXFileReference (in another
+ # project file), proxyType is 2. Type 2 is used for references to the
+ # producs of the other project file's targets.
+ #
+ # Xcode is weird about remoteGlobalIDString. Usually, it's printed without
+ # a comment, indicating that it's tracked internally simply as a string, but
+ # sometimes it's printed with a comment (usually when the object is initially
+ # created), indicating that it's tracked as a project file object at least
+ # sometimes. This module always tracks it as an object, but contains a hack
+ # to prevent it from printing the comment in the project file output. See
+ # _XCKVPrint.
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'containerPortal': [0, XCContainerPortal, 0, 1],
+ 'proxyType': [0, int, 0, 1],
+ 'remoteGlobalIDString': [0, XCRemoteObject, 0, 1],
+ 'remoteInfo': [0, str, 0, 1],
+ })
+
+ def __repr__(self):
+ props = self._properties
+ name = '%s.gyp:%s' % (props['containerPortal'].Name(), props['remoteInfo'])
+ return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
+
+ def Name(self):
+ # Admittedly not the best name, but it's what Xcode uses.
+ return self.__class__.__name__
+
+ def Hashables(self):
+ # super
+ hashables = XCObject.Hashables(self)
+
+ # Use the hashables of the weak objects that this object refers to.
+ hashables.extend(self._properties['containerPortal'].Hashables())
+ hashables.extend(self._properties['remoteGlobalIDString'].Hashables())
+ return hashables
+
+
+class PBXTargetDependency(XCObject):
+ # The "target" property accepts an XCTarget object, and obviously not
+ # NoneType. But XCTarget is defined below, so it can't be put into the
+ # schema yet. The definition of PBXTargetDependency can't be moved below
+ # XCTarget because XCTarget's own schema references PBXTargetDependency.
+ # Python doesn't deal well with this circular relationship, and doesn't have
+ # a real way to do forward declarations. To work around, the type of
+ # the "target" property is reset below, after XCTarget is defined.
+ #
+ # At least one of "name" and "target" is required.
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'name': [0, str, 0, 0],
+ 'target': [0, None.__class__, 0, 0],
+ 'targetProxy': [0, PBXContainerItemProxy, 1, 1],
+ })
+
+ def __repr__(self):
+ name = self._properties.get('name') or self._properties['target'].Name()
+ return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
+
+ def Name(self):
+ # Admittedly not the best name, but it's what Xcode uses.
+ return self.__class__.__name__
+
+ def Hashables(self):
+ # super
+ hashables = XCObject.Hashables(self)
+
+ # Use the hashables of the weak objects that this object refers to.
+ hashables.extend(self._properties['targetProxy'].Hashables())
+ return hashables
+
+
+class PBXReferenceProxy(XCFileLikeElement):
+ _schema = XCFileLikeElement._schema.copy()
+ _schema.update({
+ 'fileType': [0, str, 0, 1],
+ 'path': [0, str, 0, 1],
+ 'remoteRef': [0, PBXContainerItemProxy, 1, 1],
+ })
+
+
+class XCTarget(XCRemoteObject):
+ # An XCTarget is really just an XCObject, the XCRemoteObject thing is just
+ # to allow PBXProject to be used in the remoteGlobalIDString property of
+ # PBXContainerItemProxy.
+ #
+ # Setting a "name" property at instantiation may also affect "productName",
+ # which may in turn affect the "PRODUCT_NAME" build setting in children of
+ # "buildConfigurationList". See __init__ below.
+ _schema = XCRemoteObject._schema.copy()
+ _schema.update({
+ 'buildConfigurationList': [0, XCConfigurationList, 1, 1,
+ XCConfigurationList()],
+ 'buildPhases': [1, XCBuildPhase, 1, 1, []],
+ 'dependencies': [1, PBXTargetDependency, 1, 1, []],
+ 'name': [0, str, 0, 1],
+ 'productName': [0, str, 0, 1],
+ })
+
+ def __init__(self, properties=None, id=None, parent=None,
+ force_outdir=None, force_prefix=None, force_extension=None):
+ # super
+ XCRemoteObject.__init__(self, properties, id, parent)
+
+ # Set up additional defaults not expressed in the schema. If a "name"
+ # property was supplied, set "productName" if it is not present. Also set
+ # the "PRODUCT_NAME" build setting in each configuration, but only if
+ # the setting is not present in any build configuration.
+ if 'name' in self._properties:
+ if not 'productName' in self._properties:
+ self.SetProperty('productName', self._properties['name'])
+
+ if 'productName' in self._properties:
+ if 'buildConfigurationList' in self._properties:
+ configs = self._properties['buildConfigurationList']
+ if configs.HasBuildSetting('PRODUCT_NAME') == 0:
+ configs.SetBuildSetting('PRODUCT_NAME',
+ self._properties['productName'])
+
+ def AddDependency(self, other):
+ pbxproject = self.PBXProjectAncestor()
+ other_pbxproject = other.PBXProjectAncestor()
+ if pbxproject == other_pbxproject:
+ # Add a dependency to another target in the same project file.
+ container = PBXContainerItemProxy({'containerPortal': pbxproject,
+ 'proxyType': 1,
+ 'remoteGlobalIDString': other,
+ 'remoteInfo': other.Name()})
+ dependency = PBXTargetDependency({'target': other,
+ 'targetProxy': container})
+ self.AppendProperty('dependencies', dependency)
+ else:
+ # Add a dependency to a target in a different project file.
+ other_project_ref = \
+ pbxproject.AddOrGetProjectReference(other_pbxproject)[1]
+ container = PBXContainerItemProxy({
+ 'containerPortal': other_project_ref,
+ 'proxyType': 1,
+ 'remoteGlobalIDString': other,
+ 'remoteInfo': other.Name(),
+ })
+ dependency = PBXTargetDependency({'name': other.Name(),
+ 'targetProxy': container})
+ self.AppendProperty('dependencies', dependency)
+
+ # Proxy all of these through to the build configuration list.
+
+ def ConfigurationNamed(self, name):
+ return self._properties['buildConfigurationList'].ConfigurationNamed(name)
+
+ def DefaultConfiguration(self):
+ return self._properties['buildConfigurationList'].DefaultConfiguration()
+
+ def HasBuildSetting(self, key):
+ return self._properties['buildConfigurationList'].HasBuildSetting(key)
+
+ def GetBuildSetting(self, key):
+ return self._properties['buildConfigurationList'].GetBuildSetting(key)
+
+ def SetBuildSetting(self, key, value):
+ return self._properties['buildConfigurationList'].SetBuildSetting(key, \
+ value)
+
+ def AppendBuildSetting(self, key, value):
+ return self._properties['buildConfigurationList'].AppendBuildSetting(key, \
+ value)
+
+ def DelBuildSetting(self, key):
+ return self._properties['buildConfigurationList'].DelBuildSetting(key)
+
+
+# Redefine the type of the "target" property. See PBXTargetDependency._schema
+# above.
+PBXTargetDependency._schema['target'][1] = XCTarget
+
+
+class PBXNativeTarget(XCTarget):
+ # buildPhases is overridden in the schema to be able to set defaults.
+ #
+ # NOTE: Contrary to most objects, it is advisable to set parent when
+ # constructing PBXNativeTarget. A parent of an XCTarget must be a PBXProject
+ # object. A parent reference is required for a PBXNativeTarget during
+ # construction to be able to set up the target defaults for productReference,
+ # because a PBXBuildFile object must be created for the target and it must
+ # be added to the PBXProject's mainGroup hierarchy.
+ _schema = XCTarget._schema.copy()
+ _schema.update({
+ 'buildPhases': [1, XCBuildPhase, 1, 1,
+ [PBXSourcesBuildPhase(), PBXFrameworksBuildPhase()]],
+ 'buildRules': [1, PBXBuildRule, 1, 1, []],
+ 'productReference': [0, PBXFileReference, 0, 1],
+ 'productType': [0, str, 0, 1],
+ })
+
+ # Mapping from Xcode product-types to settings. The settings are:
+ # filetype : used for explicitFileType in the project file
+ # prefix : the prefix for the file name
+ # suffix : the suffix for the file name
+ _product_filetypes = {
+ 'com.apple.product-type.application': ['wrapper.application',
+ '', '.app'],
+ 'com.apple.product-type.application.watchapp': ['wrapper.application',
+ '', '.app'],
+ 'com.apple.product-type.watchkit-extension': ['wrapper.app-extension',
+ '', '.appex'],
+ 'com.apple.product-type.app-extension': ['wrapper.app-extension',
+ '', '.appex'],
+ 'com.apple.product-type.bundle': ['wrapper.cfbundle',
+ '', '.bundle'],
+ 'com.apple.product-type.framework': ['wrapper.framework',
+ '', '.framework'],
+ 'com.apple.product-type.library.dynamic': ['compiled.mach-o.dylib',
+ 'lib', '.dylib'],
+ 'com.apple.product-type.library.static': ['archive.ar',
+ 'lib', '.a'],
+ 'com.apple.product-type.tool': ['compiled.mach-o.executable',
+ '', ''],
+ 'com.apple.product-type.bundle.unit-test': ['wrapper.cfbundle',
+ '', '.xctest'],
+ 'com.apple.product-type.bundle.ui-testing': ['wrapper.cfbundle',
+ '', '.xctest'],
+ 'com.googlecode.gyp.xcode.bundle': ['compiled.mach-o.dylib',
+ '', '.so'],
+ 'com.apple.product-type.kernel-extension': ['wrapper.kext',
+ '', '.kext'],
+ }
+
+ def __init__(self, properties=None, id=None, parent=None,
+ force_outdir=None, force_prefix=None, force_extension=None):
+ # super
+ XCTarget.__init__(self, properties, id, parent)
+
+ if 'productName' in self._properties and \
+ 'productType' in self._properties and \
+ not 'productReference' in self._properties and \
+ self._properties['productType'] in self._product_filetypes:
+ products_group = None
+ pbxproject = self.PBXProjectAncestor()
+ if pbxproject != None:
+ products_group = pbxproject.ProductsGroup()
+
+ if products_group != None:
+ (filetype, prefix, suffix) = \
+ self._product_filetypes[self._properties['productType']]
+ # Xcode does not have a distinct type for loadable modules that are
+ # pure BSD targets (not in a bundle wrapper). GYP allows such modules
+ # to be specified by setting a target type to loadable_module without
+ # having mac_bundle set. These are mapped to the pseudo-product type
+ # com.googlecode.gyp.xcode.bundle.
+ #
+ # By picking up this special type and converting it to a dynamic
+ # library (com.apple.product-type.library.dynamic) with fix-ups,
+ # single-file loadable modules can be produced.
+ #
+ # MACH_O_TYPE is changed to mh_bundle to produce the proper file type
+ # (as opposed to mh_dylib). In order for linking to succeed,
+ # DYLIB_CURRENT_VERSION and DYLIB_COMPATIBILITY_VERSION must be
+ # cleared. They are meaningless for type mh_bundle.
+ #
+ # Finally, the .so extension is forcibly applied over the default
+ # (.dylib), unless another forced extension is already selected.
+ # .dylib is plainly wrong, and .bundle is used by loadable_modules in
+ # bundle wrappers (com.apple.product-type.bundle). .so seems an odd
+ # choice because it's used as the extension on many other systems that
+ # don't distinguish between linkable shared libraries and non-linkable
+ # loadable modules, but there's precedent: Python loadable modules on
+ # Mac OS X use an .so extension.
+ if self._properties['productType'] == 'com.googlecode.gyp.xcode.bundle':
+ self._properties['productType'] = \
+ 'com.apple.product-type.library.dynamic'
+ self.SetBuildSetting('MACH_O_TYPE', 'mh_bundle')
+ self.SetBuildSetting('DYLIB_CURRENT_VERSION', '')
+ self.SetBuildSetting('DYLIB_COMPATIBILITY_VERSION', '')
+ if force_extension is None:
+ force_extension = suffix[1:]
+
+ if self._properties['productType'] == \
+ 'com.apple.product-type-bundle.unit.test' or \
+ self._properties['productType'] == \
+ 'com.apple.product-type-bundle.ui-testing':
+ if force_extension is None:
+ force_extension = suffix[1:]
+
+ if force_extension is not None:
+ # If it's a wrapper (bundle), set WRAPPER_EXTENSION.
+ # Extension override.
+ suffix = '.' + force_extension
+ if filetype.startswith('wrapper.'):
+ self.SetBuildSetting('WRAPPER_EXTENSION', force_extension)
+ else:
+ self.SetBuildSetting('EXECUTABLE_EXTENSION', force_extension)
+
+ if filetype.startswith('compiled.mach-o.executable'):
+ product_name = self._properties['productName']
+ product_name += suffix
+ suffix = ''
+ self.SetProperty('productName', product_name)
+ self.SetBuildSetting('PRODUCT_NAME', product_name)
+
+ # Xcode handles most prefixes based on the target type, however there
+ # are exceptions. If a "BSD Dynamic Library" target is added in the
+ # Xcode UI, Xcode sets EXECUTABLE_PREFIX. This check duplicates that
+ # behavior.
+ if force_prefix is not None:
+ prefix = force_prefix
+ if filetype.startswith('wrapper.'):
+ self.SetBuildSetting('WRAPPER_PREFIX', prefix)
+ else:
+ self.SetBuildSetting('EXECUTABLE_PREFIX', prefix)
+
+ if force_outdir is not None:
+ self.SetBuildSetting('TARGET_BUILD_DIR', force_outdir)
+
+ # TODO(tvl): Remove the below hack.
+ # http://code.google.com/p/gyp/issues/detail?id=122
+
+ # Some targets include the prefix in the target_name. These targets
+ # really should just add a product_name setting that doesn't include
+ # the prefix. For example:
+ # target_name = 'libevent', product_name = 'event'
+ # This check cleans up for them.
+ product_name = self._properties['productName']
+ prefix_len = len(prefix)
+ if prefix_len and (product_name[:prefix_len] == prefix):
+ product_name = product_name[prefix_len:]
+ self.SetProperty('productName', product_name)
+ self.SetBuildSetting('PRODUCT_NAME', product_name)
+
+ ref_props = {
+ 'explicitFileType': filetype,
+ 'includeInIndex': 0,
+ 'path': prefix + product_name + suffix,
+ 'sourceTree': 'BUILT_PRODUCTS_DIR',
+ }
+ file_ref = PBXFileReference(ref_props)
+ products_group.AppendChild(file_ref)
+ self.SetProperty('productReference', file_ref)
+
+ def GetBuildPhaseByType(self, type):
+ if not 'buildPhases' in self._properties:
+ return None
+
+ the_phase = None
+ for phase in self._properties['buildPhases']:
+ if isinstance(phase, type):
+ # Some phases may be present in multiples in a well-formed project file,
+ # but phases like PBXSourcesBuildPhase may only be present singly, and
+ # this function is intended as an aid to GetBuildPhaseByType. Loop
+ # over the entire list of phases and assert if more than one of the
+ # desired type is found.
+ assert the_phase is None
+ the_phase = phase
+
+ return the_phase
+
+ def HeadersPhase(self):
+ headers_phase = self.GetBuildPhaseByType(PBXHeadersBuildPhase)
+ if headers_phase is None:
+ headers_phase = PBXHeadersBuildPhase()
+
+ # The headers phase should come before the resources, sources, and
+ # frameworks phases, if any.
+ insert_at = len(self._properties['buildPhases'])
+ for index, phase in enumerate(self._properties['buildPhases']):
+ if isinstance(phase, PBXResourcesBuildPhase) or \
+ isinstance(phase, PBXSourcesBuildPhase) or \
+ isinstance(phase, PBXFrameworksBuildPhase):
+ insert_at = index
+ break
+
+ self._properties['buildPhases'].insert(insert_at, headers_phase)
+ headers_phase.parent = self
+
+ return headers_phase
+
+ def ResourcesPhase(self):
+ resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase)
+ if resources_phase is None:
+ resources_phase = PBXResourcesBuildPhase()
+
+ # The resources phase should come before the sources and frameworks
+ # phases, if any.
+ insert_at = len(self._properties['buildPhases'])
+ for index, phase in enumerate(self._properties['buildPhases']):
+ if isinstance(phase, PBXSourcesBuildPhase) or \
+ isinstance(phase, PBXFrameworksBuildPhase):
+ insert_at = index
+ break
+
+ self._properties['buildPhases'].insert(insert_at, resources_phase)
+ resources_phase.parent = self
+
+ return resources_phase
+
+ def SourcesPhase(self):
+ sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase)
+ if sources_phase is None:
+ sources_phase = PBXSourcesBuildPhase()
+ self.AppendProperty('buildPhases', sources_phase)
+
+ return sources_phase
+
+ def FrameworksPhase(self):
+ frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase)
+ if frameworks_phase is None:
+ frameworks_phase = PBXFrameworksBuildPhase()
+ self.AppendProperty('buildPhases', frameworks_phase)
+
+ return frameworks_phase
+
+ def AddDependency(self, other):
+ # super
+ XCTarget.AddDependency(self, other)
+
+ static_library_type = 'com.apple.product-type.library.static'
+ shared_library_type = 'com.apple.product-type.library.dynamic'
+ framework_type = 'com.apple.product-type.framework'
+ if isinstance(other, PBXNativeTarget) and \
+ 'productType' in self._properties and \
+ self._properties['productType'] != static_library_type and \
+ 'productType' in other._properties and \
+ (other._properties['productType'] == static_library_type or \
+ ((other._properties['productType'] == shared_library_type or \
+ other._properties['productType'] == framework_type) and \
+ ((not other.HasBuildSetting('MACH_O_TYPE')) or
+ other.GetBuildSetting('MACH_O_TYPE') != 'mh_bundle'))):
+
+ file_ref = other.GetProperty('productReference')
+
+ pbxproject = self.PBXProjectAncestor()
+ other_pbxproject = other.PBXProjectAncestor()
+ if pbxproject != other_pbxproject:
+ other_project_product_group = \
+ pbxproject.AddOrGetProjectReference(other_pbxproject)[0]
+ file_ref = other_project_product_group.GetChildByRemoteObject(file_ref)
+
+ self.FrameworksPhase().AppendProperty('files',
+ PBXBuildFile({'fileRef': file_ref}))
+
+
+class PBXAggregateTarget(XCTarget):
+ pass
+
+
+class PBXProject(XCContainerPortal):
+ # A PBXProject is really just an XCObject, the XCContainerPortal thing is
+ # just to allow PBXProject to be used in the containerPortal property of
+ # PBXContainerItemProxy.
+ """
+
+ Attributes:
+ path: "sample.xcodeproj". TODO(mark) Document me!
+ _other_pbxprojects: A dictionary, keyed by other PBXProject objects. Each
+ value is a reference to the dict in the
+ projectReferences list associated with the keyed
+ PBXProject.
+ """
+
+ _schema = XCContainerPortal._schema.copy()
+ _schema.update({
+ 'attributes': [0, dict, 0, 0],
+ 'buildConfigurationList': [0, XCConfigurationList, 1, 1,
+ XCConfigurationList()],
+ 'compatibilityVersion': [0, str, 0, 1, 'Xcode 3.2'],
+ 'hasScannedForEncodings': [0, int, 0, 1, 1],
+ 'mainGroup': [0, PBXGroup, 1, 1, PBXGroup()],
+ 'projectDirPath': [0, str, 0, 1, ''],
+ 'projectReferences': [1, dict, 0, 0],
+ 'projectRoot': [0, str, 0, 1, ''],
+ 'targets': [1, XCTarget, 1, 1, []],
+ })
+
+ def __init__(self, properties=None, id=None, parent=None, path=None):
+ self.path = path
+ self._other_pbxprojects = {}
+ # super
+ return XCContainerPortal.__init__(self, properties, id, parent)
+
+ def Name(self):
+ name = self.path
+ if name[-10:] == '.xcodeproj':
+ name = name[:-10]
+ return posixpath.basename(name)
+
+ def Path(self):
+ return self.path
+
+ def Comment(self):
+ return 'Project object'
+
+ def Children(self):
+ # super
+ children = XCContainerPortal.Children(self)
+
+ # Add children that the schema doesn't know about. Maybe there's a more
+ # elegant way around this, but this is the only case where we need to own
+ # objects in a dictionary (that is itself in a list), and three lines for
+ # a one-off isn't that big a deal.
+ if 'projectReferences' in self._properties:
+ for reference in self._properties['projectReferences']:
+ children.append(reference['ProductGroup'])
+
+ return children
+
+ def PBXProjectAncestor(self):
+ return self
+
+ def _GroupByName(self, name):
+ if not 'mainGroup' in self._properties:
+ self.SetProperty('mainGroup', PBXGroup())
+
+ main_group = self._properties['mainGroup']
+ group = main_group.GetChildByName(name)
+ if group is None:
+ group = PBXGroup({'name': name})
+ main_group.AppendChild(group)
+
+ return group
+
+ # SourceGroup and ProductsGroup are created by default in Xcode's own
+ # templates.
+ def SourceGroup(self):
+ return self._GroupByName('Source')
+
+ def ProductsGroup(self):
+ return self._GroupByName('Products')
+
+ # IntermediatesGroup is used to collect source-like files that are generated
+ # by rules or script phases and are placed in intermediate directories such
+ # as DerivedSources.
+ def IntermediatesGroup(self):
+ return self._GroupByName('Intermediates')
+
+ # FrameworksGroup and ProjectsGroup are top-level groups used to collect
+ # frameworks and projects.
+ def FrameworksGroup(self):
+ return self._GroupByName('Frameworks')
+
+ def ProjectsGroup(self):
+ return self._GroupByName('Projects')
+
+ def RootGroupForPath(self, path):
+ """Returns a PBXGroup child of this object to which path should be added.
+
+ This method is intended to choose between SourceGroup and
+ IntermediatesGroup on the basis of whether path is present in a source
+ directory or an intermediates directory. For the purposes of this
+ determination, any path located within a derived file directory such as
+ PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates
+ directory.
+
+ The returned value is a two-element tuple. The first element is the
+ PBXGroup, and the second element specifies whether that group should be
+ organized hierarchically (True) or as a single flat list (False).
+ """
+
+ # TODO(mark): make this a class variable and bind to self on call?
+ # Also, this list is nowhere near exhaustive.
+ # INTERMEDIATE_DIR and SHARED_INTERMEDIATE_DIR are used by
+ # gyp.generator.xcode. There should probably be some way for that module
+ # to push the names in, rather than having to hard-code them here.
+ source_tree_groups = {
+ 'DERIVED_FILE_DIR': (self.IntermediatesGroup, True),
+ 'INTERMEDIATE_DIR': (self.IntermediatesGroup, True),
+ 'PROJECT_DERIVED_FILE_DIR': (self.IntermediatesGroup, True),
+ 'SHARED_INTERMEDIATE_DIR': (self.IntermediatesGroup, True),
+ }
+
+ (source_tree, path) = SourceTreeAndPathFromPath(path)
+ if source_tree != None and source_tree in source_tree_groups:
+ (group_func, hierarchical) = source_tree_groups[source_tree]
+ group = group_func()
+ return (group, hierarchical)
+
+ # TODO(mark): make additional choices based on file extension.
+
+ return (self.SourceGroup(), True)
+
+ def AddOrGetFileInRootGroup(self, path):
+ """Returns a PBXFileReference corresponding to path in the correct group
+ according to RootGroupForPath's heuristics.
+
+ If an existing PBXFileReference for path exists, it will be returned.
+ Otherwise, one will be created and returned.
+ """
+
+ (group, hierarchical) = self.RootGroupForPath(path)
+ return group.AddOrGetFileByPath(path, hierarchical)
+
+ def RootGroupsTakeOverOnlyChildren(self, recurse=False):
+ """Calls TakeOverOnlyChild for all groups in the main group."""
+
+ for group in self._properties['mainGroup']._properties['children']:
+ if isinstance(group, PBXGroup):
+ group.TakeOverOnlyChild(recurse)
+
+ def SortGroups(self):
+ # Sort the children of the mainGroup (like "Source" and "Products")
+ # according to their defined order.
+ self._properties['mainGroup']._properties['children'] = \
+ sorted(self._properties['mainGroup']._properties['children'],
+ key=functools.cmp_to_key(XCHierarchicalElement.CompareRootGroup))
+
+ # Sort everything else by putting group before files, and going
+ # alphabetically by name within sections of groups and files. SortGroup
+ # is recursive.
+ for group in self._properties['mainGroup']._properties['children']:
+ if not isinstance(group, PBXGroup):
+ continue
+
+ if group.Name() == 'Products':
+ # The Products group is a special case. Instead of sorting
+ # alphabetically, sort things in the order of the targets that
+ # produce the products. To do this, just build up a new list of
+ # products based on the targets.
+ products = []
+ for target in self._properties['targets']:
+ if not isinstance(target, PBXNativeTarget):
+ continue
+ product = target._properties['productReference']
+ # Make sure that the product is already in the products group.
+ assert product in group._properties['children']
+ products.append(product)
+
+ # Make sure that this process doesn't miss anything that was already
+ # in the products group.
+ assert len(products) == len(group._properties['children'])
+ group._properties['children'] = products
+ else:
+ group.SortGroup()
+
+ def AddOrGetProjectReference(self, other_pbxproject):
+ """Add a reference to another project file (via PBXProject object) to this
+ one.
+
+ Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in
+ this project file that contains a PBXReferenceProxy object for each
+ product of each PBXNativeTarget in the other project file. ProjectRef is
+ a PBXFileReference to the other project file.
+
+ If this project file already references the other project file, the
+ existing ProductGroup and ProjectRef are returned. The ProductGroup will
+ still be updated if necessary.
+ """
+
+ if not 'projectReferences' in self._properties:
+ self._properties['projectReferences'] = []
+
+ product_group = None
+ project_ref = None
+
+ if not other_pbxproject in self._other_pbxprojects:
+ # This project file isn't yet linked to the other one. Establish the
+ # link.
+ product_group = PBXGroup({'name': 'Products'})
+
+ # ProductGroup is strong.
+ product_group.parent = self
+
+ # There's nothing unique about this PBXGroup, and if left alone, it will
+ # wind up with the same set of hashables as all other PBXGroup objects
+ # owned by the projectReferences list. Add the hashables of the
+ # remote PBXProject that it's related to.
+ product_group._hashables.extend(other_pbxproject.Hashables())
+
+ # The other project reports its path as relative to the same directory
+ # that this project's path is relative to. The other project's path
+ # is not necessarily already relative to this project. Figure out the
+ # pathname that this project needs to use to refer to the other one.
+ this_path = posixpath.dirname(self.Path())
+ projectDirPath = self.GetProperty('projectDirPath')
+ if projectDirPath:
+ if posixpath.isabs(projectDirPath[0]):
+ this_path = projectDirPath
+ else:
+ this_path = posixpath.join(this_path, projectDirPath)
+ other_path = gyp.common.RelativePath(other_pbxproject.Path(), this_path)
+
+ # ProjectRef is weak (it's owned by the mainGroup hierarchy).
+ project_ref = PBXFileReference({
+ 'lastKnownFileType': 'wrapper.pb-project',
+ 'path': other_path,
+ 'sourceTree': 'SOURCE_ROOT',
+ })
+ self.ProjectsGroup().AppendChild(project_ref)
+
+ ref_dict = {'ProductGroup': product_group, 'ProjectRef': project_ref}
+ self._other_pbxprojects[other_pbxproject] = ref_dict
+ self.AppendProperty('projectReferences', ref_dict)
+
+ # Xcode seems to sort this list case-insensitively
+ self._properties['projectReferences'] = \
+ sorted(self._properties['projectReferences'],
+ key=lambda x: x['ProjectRef'].Name().lower())
+ else:
+ # The link already exists. Pull out the relevnt data.
+ project_ref_dict = self._other_pbxprojects[other_pbxproject]
+ product_group = project_ref_dict['ProductGroup']
+ project_ref = project_ref_dict['ProjectRef']
+
+ self._SetUpProductReferences(other_pbxproject, product_group, project_ref)
+
+ inherit_unique_symroot = self._AllSymrootsUnique(other_pbxproject, False)
+ targets = other_pbxproject.GetProperty('targets')
+ if all(self._AllSymrootsUnique(t, inherit_unique_symroot) for t in targets):
+ dir_path = project_ref._properties['path']
+ product_group._hashables.extend(dir_path)
+
+ return [product_group, project_ref]
+
+ def _AllSymrootsUnique(self, target, inherit_unique_symroot):
+ # Returns True if all configurations have a unique 'SYMROOT' attribute.
+ # The value of inherit_unique_symroot decides, if a configuration is assumed
+ # to inherit a unique 'SYMROOT' attribute from its parent, if it doesn't
+ # define an explicit value for 'SYMROOT'.
+ symroots = self._DefinedSymroots(target)
+ for s in self._DefinedSymroots(target):
+ if (s is not None and not self._IsUniqueSymrootForTarget(s) or
+ s is None and not inherit_unique_symroot):
+ return False
+ return True if symroots else inherit_unique_symroot
+
+ def _DefinedSymroots(self, target):
+ # Returns all values for the 'SYMROOT' attribute defined in all
+ # configurations for this target. If any configuration doesn't define the
+ # 'SYMROOT' attribute, None is added to the returned set. If all
+ # configurations don't define the 'SYMROOT' attribute, an empty set is
+ # returned.
+ config_list = target.GetProperty('buildConfigurationList')
+ symroots = set()
+ for config in config_list.GetProperty('buildConfigurations'):
+ setting = config.GetProperty('buildSettings')
+ if 'SYMROOT' in setting:
+ symroots.add(setting['SYMROOT'])
+ else:
+ symroots.add(None)
+ if len(symroots) == 1 and None in symroots:
+ return set()
+ return symroots
+
+ def _IsUniqueSymrootForTarget(self, symroot):
+ # This method returns True if all configurations in target contain a
+ # 'SYMROOT' attribute that is unique for the given target. A value is
+ # unique, if the Xcode macro '$SRCROOT' appears in it in any form.
+ uniquifier = ['$SRCROOT', '$(SRCROOT)']
+ if any(x in symroot for x in uniquifier):
+ return True
+ return False
+
+ def _SetUpProductReferences(self, other_pbxproject, product_group,
+ project_ref):
+ # TODO(mark): This only adds references to products in other_pbxproject
+ # when they don't exist in this pbxproject. Perhaps it should also
+ # remove references from this pbxproject that are no longer present in
+ # other_pbxproject. Perhaps it should update various properties if they
+ # change.
+ for target in other_pbxproject._properties['targets']:
+ if not isinstance(target, PBXNativeTarget):
+ continue
+
+ other_fileref = target._properties['productReference']
+ if product_group.GetChildByRemoteObject(other_fileref) is None:
+ # Xcode sets remoteInfo to the name of the target and not the name
+ # of its product, despite this proxy being a reference to the product.
+ container_item = PBXContainerItemProxy({
+ 'containerPortal': project_ref,
+ 'proxyType': 2,
+ 'remoteGlobalIDString': other_fileref,
+ 'remoteInfo': target.Name()
+ })
+ # TODO(mark): Does sourceTree get copied straight over from the other
+ # project? Can the other project ever have lastKnownFileType here
+ # instead of explicitFileType? (Use it if so?) Can path ever be
+ # unset? (I don't think so.) Can other_fileref have name set, and
+ # does it impact the PBXReferenceProxy if so? These are the questions
+ # that perhaps will be answered one day.
+ reference_proxy = PBXReferenceProxy({
+ 'fileType': other_fileref._properties['explicitFileType'],
+ 'path': other_fileref._properties['path'],
+ 'sourceTree': other_fileref._properties['sourceTree'],
+ 'remoteRef': container_item,
+ })
+
+ product_group.AppendChild(reference_proxy)
+
+ def SortRemoteProductReferences(self):
+ # For each remote project file, sort the associated ProductGroup in the
+ # same order that the targets are sorted in the remote project file. This
+ # is the sort order used by Xcode.
+
+ for other_pbxproject, ref_dict in self._other_pbxprojects.items():
+ # Build up a list of products in the remote project file, ordered the
+ # same as the targets that produce them.
+ remote_products = []
+ for target in other_pbxproject._properties['targets']:
+ if not isinstance(target, PBXNativeTarget):
+ continue
+ remote_products.append(target._properties['productReference'])
+
+ # Sort the PBXReferenceProxy children according to the list of remote
+ # products.
+ product_group = ref_dict['ProductGroup']
+ product_group._properties['children'] = sorted(
+ product_group._properties['children'],
+ key=lambda x: remote_products.index(x._properties['remoteRef']._properties['remoteGlobalIDString']))
+
+
+class XCProjectFile(XCObject):
+ _schema = XCObject._schema.copy()
+ _schema.update({
+ 'archiveVersion': [0, int, 0, 1, 1],
+ 'classes': [0, dict, 0, 1, {}],
+ 'objectVersion': [0, int, 0, 1, 46],
+ 'rootObject': [0, PBXProject, 1, 1],
+ })
+
+ def ComputeIDs(self, recursive=True, overwrite=True, hash=None):
+ # Although XCProjectFile is implemented here as an XCObject, it's not a
+ # proper object in the Xcode sense, and it certainly doesn't have its own
+ # ID. Pass through an attempt to update IDs to the real root object.
+ if recursive:
+ self._properties['rootObject'].ComputeIDs(recursive, overwrite, hash)
+
+ def Print(self, file=sys.stdout):
+ self.VerifyHasRequiredProperties()
+
+ # Add the special "objects" property, which will be caught and handled
+ # separately during printing. This structure allows a fairly standard
+ # loop do the normal printing.
+ self._properties['objects'] = {}
+ self._XCPrint(file, 0, '// !$*UTF8*$!\n')
+ if self._should_print_single_line:
+ self._XCPrint(file, 0, '{ ')
+ else:
+ self._XCPrint(file, 0, '{\n')
+ for property, value in sorted(self._properties.items()):
+ if property == 'objects':
+ self._PrintObjects(file)
+ else:
+ self._XCKVPrint(file, 1, property, value)
+ self._XCPrint(file, 0, '}\n')
+ del self._properties['objects']
+
+ def _PrintObjects(self, file):
+ if self._should_print_single_line:
+ self._XCPrint(file, 0, 'objects = {')
+ else:
+ self._XCPrint(file, 1, 'objects = {\n')
+
+ objects_by_class = {}
+ for object in self.Descendants():
+ if object == self:
+ continue
+ class_name = object.__class__.__name__
+ if not class_name in objects_by_class:
+ objects_by_class[class_name] = []
+ objects_by_class[class_name].append(object)
+
+ for class_name in sorted(objects_by_class):
+ self._XCPrint(file, 0, '\n')
+ self._XCPrint(file, 0, '/* Begin ' + class_name + ' section */\n')
+ for object in sorted(objects_by_class[class_name],
+ key=lambda x: x.id):
+ object.Print(file)
+ self._XCPrint(file, 0, '/* End ' + class_name + ' section */\n')
+
+ if self._should_print_single_line:
+ self._XCPrint(file, 0, '}; ')
+ else:
+ self._XCPrint(file, 1, '};\n')
diff --git a/third_party/python/gyp/pylib/gyp/xml_fix.py b/third_party/python/gyp/pylib/gyp/xml_fix.py
new file mode 100644
index 0000000000..4308d99b47
--- /dev/null
+++ b/third_party/python/gyp/pylib/gyp/xml_fix.py
@@ -0,0 +1,68 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Applies a fix to CR LF TAB handling in xml.dom.
+
+Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293
+Working around this: http://bugs.python.org/issue5752
+TODO(bradnelson): Consider dropping this when we drop XP support.
+"""
+
+
+import xml.dom.minidom
+
+
+def _Replacement_write_data(writer, data, is_attrib=False):
+ """Writes datachars to writer."""
+ data = data.replace("&", "&amp;").replace("<", "&lt;")
+ data = data.replace("\"", "&quot;").replace(">", "&gt;")
+ if is_attrib:
+ data = data.replace(
+ "\r", "&#xD;").replace(
+ "\n", "&#xA;").replace(
+ "\t", "&#x9;")
+ writer.write(data)
+
+
+def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
+ # indent = current indentation
+ # addindent = indentation to add to higher levels
+ # newl = newline string
+ writer.write(indent+"<" + self.tagName)
+
+ attrs = self._get_attributes()
+ a_names = sorted(attrs.keys())
+
+ for a_name in a_names:
+ writer.write(" %s=\"" % a_name)
+ _Replacement_write_data(writer, attrs[a_name].value, is_attrib=True)
+ writer.write("\"")
+ if self.childNodes:
+ writer.write(">%s" % newl)
+ for node in self.childNodes:
+ node.writexml(writer, indent + addindent, addindent, newl)
+ writer.write("%s</%s>%s" % (indent, self.tagName, newl))
+ else:
+ writer.write("/>%s" % newl)
+
+
+class XmlFix(object):
+ """Object to manage temporary patching of xml.dom.minidom."""
+
+ def __init__(self):
+ # Preserve current xml.dom.minidom functions.
+ self.write_data = xml.dom.minidom._write_data
+ self.writexml = xml.dom.minidom.Element.writexml
+ # Inject replacement versions of a function and a method.
+ xml.dom.minidom._write_data = _Replacement_write_data
+ xml.dom.minidom.Element.writexml = _Replacement_writexml
+
+ def Cleanup(self):
+ if self.write_data:
+ xml.dom.minidom._write_data = self.write_data
+ xml.dom.minidom.Element.writexml = self.writexml
+ self.write_data = None
+
+ def __del__(self):
+ self.Cleanup()
diff --git a/third_party/python/gyp/samples/samples b/third_party/python/gyp/samples/samples
new file mode 100755
index 0000000000..ff26de3825
--- /dev/null
+++ b/third_party/python/gyp/samples/samples
@@ -0,0 +1,83 @@
+#!/usr/bin/python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import os.path
+import shutil
+import sys
+
+
+gyps = [
+ 'app/app.gyp',
+ 'base/base.gyp',
+ 'build/temp_gyp/googleurl.gyp',
+ 'build/all.gyp',
+ 'build/common.gypi',
+ 'build/external_code.gypi',
+ 'chrome/test/security_tests/security_tests.gyp',
+ 'chrome/third_party/hunspell/hunspell.gyp',
+ 'chrome/chrome.gyp',
+ 'media/media.gyp',
+ 'net/net.gyp',
+ 'printing/printing.gyp',
+ 'sdch/sdch.gyp',
+ 'skia/skia.gyp',
+ 'testing/gmock.gyp',
+ 'testing/gtest.gyp',
+ 'third_party/bzip2/bzip2.gyp',
+ 'third_party/icu38/icu38.gyp',
+ 'third_party/libevent/libevent.gyp',
+ 'third_party/libjpeg/libjpeg.gyp',
+ 'third_party/libpng/libpng.gyp',
+ 'third_party/libxml/libxml.gyp',
+ 'third_party/libxslt/libxslt.gyp',
+ 'third_party/lzma_sdk/lzma_sdk.gyp',
+ 'third_party/modp_b64/modp_b64.gyp',
+ 'third_party/npapi/npapi.gyp',
+ 'third_party/sqlite/sqlite.gyp',
+ 'third_party/zlib/zlib.gyp',
+ 'v8/tools/gyp/v8.gyp',
+ 'webkit/activex_shim/activex_shim.gyp',
+ 'webkit/activex_shim_dll/activex_shim_dll.gyp',
+ 'webkit/build/action_csspropertynames.py',
+ 'webkit/build/action_cssvaluekeywords.py',
+ 'webkit/build/action_jsconfig.py',
+ 'webkit/build/action_makenames.py',
+ 'webkit/build/action_maketokenizer.py',
+ 'webkit/build/action_useragentstylesheets.py',
+ 'webkit/build/rule_binding.py',
+ 'webkit/build/rule_bison.py',
+ 'webkit/build/rule_gperf.py',
+ 'webkit/tools/test_shell/test_shell.gyp',
+ 'webkit/webkit.gyp',
+]
+
+
+def Main(argv):
+ if len(argv) != 3 or argv[1] not in ['push', 'pull']:
+ print('Usage: %s push/pull PATH_TO_CHROME' % argv[0])
+ return 1
+
+ path_to_chrome = argv[2]
+
+ for g in gyps:
+ chrome_file = os.path.join(path_to_chrome, g)
+ local_file = os.path.join(os.path.dirname(argv[0]), os.path.split(g)[1])
+ if argv[1] == 'push':
+ print('Copying %s to %s' % (local_file, chrome_file))
+ shutil.copyfile(local_file, chrome_file)
+ elif argv[1] == 'pull':
+ print('Copying %s to %s' % (chrome_file, local_file))
+ shutil.copyfile(chrome_file, local_file)
+ else:
+ assert False
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv))
diff --git a/third_party/python/gyp/samples/samples.bat b/third_party/python/gyp/samples/samples.bat
new file mode 100644
index 0000000000..778d9c90f0
--- /dev/null
+++ b/third_party/python/gyp/samples/samples.bat
@@ -0,0 +1,5 @@
+@rem Copyright (c) 2009 Google Inc. All rights reserved.
+@rem Use of this source code is governed by a BSD-style license that can be
+@rem found in the LICENSE file.
+
+@python %~dp0/samples %*
diff --git a/third_party/python/gyp/setup.py b/third_party/python/gyp/setup.py
new file mode 100755
index 0000000000..75a42558d8
--- /dev/null
+++ b/third_party/python/gyp/setup.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from setuptools import setup
+
+setup(
+ name='gyp',
+ version='0.1',
+ description='Generate Your Projects',
+ author='Chromium Authors',
+ author_email='chromium-dev@googlegroups.com',
+ url='http://code.google.com/p/gyp',
+ package_dir = {'': 'pylib'},
+ packages=['gyp', 'gyp.generator'],
+ entry_points = {'console_scripts': ['gyp=gyp:script_main'] }
+)
diff --git a/third_party/python/gyp/test/actions-bare/gyptest-bare.py b/third_party/python/gyp/test/actions-bare/gyptest-bare.py
new file mode 100755
index 0000000000..e3d6db1029
--- /dev/null
+++ b/third_party/python/gyp/test/actions-bare/gyptest-bare.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies actions which are not depended on by other targets get executed.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('bare.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+test.build('bare.gyp', chdir='relocate/src')
+
+file_content = 'Hello from bare.py\n'
+
+test.built_file_must_match('out.txt', file_content, chdir='relocate/src')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/actions-bare/src/bare.gyp b/third_party/python/gyp/test/actions-bare/src/bare.gyp
new file mode 100644
index 0000000000..3d28f099d4
--- /dev/null
+++ b/third_party/python/gyp/test/actions-bare/src/bare.gyp
@@ -0,0 +1,25 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'bare',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'action1',
+ 'inputs': [
+ 'bare.py',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/out.txt',
+ ],
+ 'action': ['python', 'bare.py', '<(PRODUCT_DIR)/out.txt'],
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/actions-bare/src/bare.py b/third_party/python/gyp/test/actions-bare/src/bare.py
new file mode 100755
index 0000000000..e153b774f9
--- /dev/null
+++ b/third_party/python/gyp/test/actions-bare/src/bare.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+f = open(sys.argv[1], 'w')
+f.write('Hello from bare.py\n')
+f.close()
diff --git a/third_party/python/gyp/test/actions-depfile/depfile.gyp b/third_party/python/gyp/test/actions-depfile/depfile.gyp
new file mode 100644
index 0000000000..617fe705b5
--- /dev/null
+++ b/third_party/python/gyp/test/actions-depfile/depfile.gyp
@@ -0,0 +1,42 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'depfile_target',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'depfile_action',
+ 'inputs': [
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'output.txt',
+ ],
+ 'depfile': 'depfile_action.d',
+ 'action': [
+ 'python', 'touch.py', '<(PRODUCT_DIR)/<(_depfile)',
+ ],
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'action_name': 'depfile_action_intermediate_dir',
+ 'inputs': [
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'output-intermediate.txt',
+ ],
+ 'depfile': '<(INTERMEDIATE_DIR)/depfile_action_intermediate_dir.d',
+ 'action': [
+ 'python', 'touch.py', '<(_depfile)',
+ ],
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/actions-depfile/gyptest-all.py b/third_party/python/gyp/test/actions-depfile/gyptest-all.py
new file mode 100644
index 0000000000..68b32d611c
--- /dev/null
+++ b/third_party/python/gyp/test/actions-depfile/gyptest-all.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Verifies that depfile fields are output in ninja rules."""
+
+import TestGyp
+import os
+
+test = TestGyp.TestGyp()
+
+if test.format == 'ninja':
+ test.run_gyp('depfile.gyp')
+ contents = open(test.built_file_path('obj/depfile_target.ninja')).read()
+
+ expected = [
+ 'depfile = depfile_action.d',
+ 'depfile = ' + os.path.join(
+ 'obj', 'depfile_target.gen/depfile_action_intermediate_dir.d'),
+ ]
+ test.must_contain_all_lines(contents, expected)
+
+ test.build('depfile.gyp')
+ test.built_file_must_exist('depfile_action.d')
+ test.built_file_must_exist(
+ 'obj/depfile_target.gen/depfile_action_intermediate_dir.d')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/actions-depfile/input.txt b/third_party/python/gyp/test/actions-depfile/input.txt
new file mode 100644
index 0000000000..3f9177e45e
--- /dev/null
+++ b/third_party/python/gyp/test/actions-depfile/input.txt
@@ -0,0 +1 @@
+input
diff --git a/third_party/python/gyp/test/actions-depfile/touch.py b/third_party/python/gyp/test/actions-depfile/touch.py
new file mode 100644
index 0000000000..57f8316093
--- /dev/null
+++ b/third_party/python/gyp/test/actions-depfile/touch.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+"""Cross-platform touch."""
+
+for fname in sys.argv[1:]:
+ if os.path.exists(fname):
+ os.utime(fname, None)
+ else:
+ if not os.path.exists(os.path.join('.', os.path.dirname(fname))):
+ os.makedirs(os.path.dirname(fname))
+ open(fname, 'w').close()
diff --git a/third_party/python/gyp/test/actions-multiple-outputs-with-dependencies/gyptest-action.py b/third_party/python/gyp/test/actions-multiple-outputs-with-dependencies/gyptest-action.py
new file mode 100755
index 0000000000..a9d218282c
--- /dev/null
+++ b/third_party/python/gyp/test/actions-multiple-outputs-with-dependencies/gyptest-action.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies actions with multiple outputs & dependncies will correctly rebuild.
+
+This is a regression test for crrev.com/1177163002.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+import os
+import sys
+import time
+
+if sys.platform in ('darwin', 'win32'):
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+test = TestGyp.TestGyp()
+
+TESTDIR='relocate/src'
+test.run_gyp('action.gyp', chdir='src')
+test.relocate('src', TESTDIR)
+
+def build_and_check(content):
+ test.write(TESTDIR + '/input.txt', content)
+ test.build('action.gyp', 'upper', chdir=TESTDIR)
+ test.built_file_must_match('result.txt', content, chdir=TESTDIR)
+
+build_and_check('Content for first build.')
+
+# Ninja works with timestamps and the test above is fast enough that the
+# 'updated' file may end up with the same timestamp as the original, meaning
+# that ninja may not always recognize the input file has changed.
+if test.format == 'ninja':
+ time.sleep(1)
+
+build_and_check('An updated input file.')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/actions-multiple-outputs-with-dependencies/src/action.gyp b/third_party/python/gyp/test/actions-multiple-outputs-with-dependencies/src/action.gyp
new file mode 100644
index 0000000000..a305d65ea9
--- /dev/null
+++ b/third_party/python/gyp/test/actions-multiple-outputs-with-dependencies/src/action.gyp
@@ -0,0 +1,28 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'upper',
+ 'type': 'none',
+ 'actions': [{
+ 'action_name': 'upper_action',
+ 'inputs': ['<(PRODUCT_DIR)/out2.txt'],
+ 'outputs': ['<(PRODUCT_DIR)/result.txt'],
+ 'action': ['python', 'rcopy.py', '<@(_inputs)', '<@(_outputs)'],
+ }],
+ },
+ {
+ 'target_name': 'lower',
+ 'type': 'none',
+ 'actions': [{
+ 'action_name': 'lower_action',
+ 'inputs': ['input.txt'],
+ 'outputs': ['<(PRODUCT_DIR)/out1.txt', '<(PRODUCT_DIR)/out2.txt'],
+ 'action': ['python', 'rcopy.py', '<@(_inputs)', '<@(_outputs)'],
+ }],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/actions-multiple-outputs-with-dependencies/src/rcopy.py b/third_party/python/gyp/test/actions-multiple-outputs-with-dependencies/src/rcopy.py
new file mode 100644
index 0000000000..fb029598c4
--- /dev/null
+++ b/third_party/python/gyp/test/actions-multiple-outputs-with-dependencies/src/rcopy.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+"""A slightly odd 'cp' implementation for this test.
+
+This 'cp' can have many targets, but only one source. 'cp src dest1 dest2'
+will copy the file 'src' to both 'dest1' and 'dest2'."""
+
+with open(sys.argv[1], 'r') as f:
+ src = f.read()
+for dest in sys.argv[2:]:
+ with open(dest, 'w') as f:
+ f.write(src)
+
diff --git a/third_party/python/gyp/test/actions-multiple-outputs/gyptest-multiple-outputs.py b/third_party/python/gyp/test/actions-multiple-outputs/gyptest-multiple-outputs.py
new file mode 100755
index 0000000000..5e2682d00f
--- /dev/null
+++ b/third_party/python/gyp/test/actions-multiple-outputs/gyptest-multiple-outputs.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies actions with multiple outputs will correctly rebuild.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+import os
+import sys
+
+if sys.platform == 'win32':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('multiple-outputs.gyp', chdir='src')
+
+chdir = 'relocate/src'
+test.relocate('src', chdir)
+
+def build_and_check():
+ # Build + check that both outputs exist.
+ test.build('multiple-outputs.gyp', chdir=chdir)
+ test.built_file_must_exist('out1.txt', chdir=chdir)
+ test.built_file_must_exist('out2.txt', chdir=chdir)
+
+# Plain build.
+build_and_check()
+
+# Remove either + rebuild. Both should exist (again).
+os.remove(test.built_file_path('out1.txt', chdir=chdir))
+build_and_check();
+
+# Remove the other + rebuild. Both should exist (again).
+os.remove(test.built_file_path('out2.txt', chdir=chdir))
+build_and_check();
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/actions-multiple-outputs/src/multiple-outputs.gyp b/third_party/python/gyp/test/actions-multiple-outputs/src/multiple-outputs.gyp
new file mode 100644
index 0000000000..7a3d74b11a
--- /dev/null
+++ b/third_party/python/gyp/test/actions-multiple-outputs/src/multiple-outputs.gyp
@@ -0,0 +1,23 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'multiple-outputs',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'action1',
+ 'inputs': [],
+ 'outputs': [
+ '<(PRODUCT_DIR)/out1.txt',
+ '<(PRODUCT_DIR)/out2.txt',
+ ],
+ 'action': ['python', 'touch.py', '<@(_outputs)'],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/actions-multiple-outputs/src/touch.py b/third_party/python/gyp/test/actions-multiple-outputs/src/touch.py
new file mode 100644
index 0000000000..bc61267f39
--- /dev/null
+++ b/third_party/python/gyp/test/actions-multiple-outputs/src/touch.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+"""Cross-platform touch."""
+
+for fname in sys.argv[1:]:
+ if os.path.exists(fname):
+ os.utime(fname, None)
+ else:
+ open(fname, 'w').close()
diff --git a/third_party/python/gyp/test/actions-multiple/gyptest-all.py b/third_party/python/gyp/test/actions-multiple/gyptest-all.py
new file mode 100755
index 0000000000..2a083de9b0
--- /dev/null
+++ b/third_party/python/gyp/test/actions-multiple/gyptest-all.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies two actions can be attached to the same input files.
+"""
+
+import sys
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('actions.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+# Test of fine-grained dependencies for generators that can build individual
+# files on demand.
+# In particular:
+# - TargetA depends on TargetB.
+# - TargetA and TargetB are 'none' type with actions attached.
+# - TargetA has multiple actions.
+# - An output from one of the actions in TargetA (not the first listed),
+# is requested as the build target.
+# Ensure that TargetB gets built.
+#
+# This sub-test can only be done with generators/build tools that can
+# be asked to build individual files rather than whole targets (make, ninja).
+if test.format in ['make', 'ninja']:
+ # Select location of target based on generator.
+ if test.format == 'make':
+ target = 'multi2.txt'
+ elif test.format == 'ninja':
+ if sys.platform in ['win32', 'cygwin']:
+ target = '..\\..\\multi2.txt'
+ else:
+ target = '../../multi2.txt'
+ else:
+ assert False
+ test.build('actions.gyp', chdir='relocate/src', target=target)
+ test.must_contain('relocate/src/multi2.txt', 'hello there')
+ test.must_contain('relocate/src/multi_dep.txt', 'hello there')
+
+
+# Test that two actions can be attached to the same inputs.
+test.build('actions.gyp', test.ALL, chdir='relocate/src')
+test.must_contain('relocate/src/output1.txt', 'hello there')
+test.must_contain('relocate/src/output2.txt', 'hello there')
+test.must_contain('relocate/src/output3.txt', 'hello there')
+test.must_contain('relocate/src/output4.txt', 'hello there')
+
+# Test that process_outputs_as_sources works in conjuction with merged
+# actions.
+test.run_built_executable(
+ 'multiple_action_source_filter',
+ chdir='relocate/src',
+ stdout=(
+ '{\n'
+ 'bar\n'
+ 'car\n'
+ 'dar\n'
+ 'ear\n'
+ '}\n'
+ ),
+)
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/actions-multiple/src/actions.gyp b/third_party/python/gyp/test/actions-multiple/src/actions.gyp
new file mode 100644
index 0000000000..d7423b589f
--- /dev/null
+++ b/third_party/python/gyp/test/actions-multiple/src/actions.gyp
@@ -0,0 +1,226 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ # Have a long string so that actions will exceed xp 512 character
+ # command limit on xp.
+ 'long_string':
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ 'abcdefghijklmnopqrstuvwxyz0123456789'
+ },
+ 'targets': [
+ {
+ 'target_name': 'multiple_action_target',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'action1',
+ 'inputs': [
+ 'copyfile.py',
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'output1.txt',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'action_name': 'action2',
+ 'inputs': [
+ 'copyfile.py',
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'output2.txt',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'action_name': 'action3',
+ 'inputs': [
+ 'copyfile.py',
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'output3.txt',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'action_name': 'action4',
+ 'inputs': [
+ 'copyfile.py',
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'output4.txt',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ {
+ 'target_name': 'multiple_action_source_filter',
+ 'type': 'executable',
+ 'sources': [
+ 'main.c',
+ # TODO(bradnelson): add foo.c here once this issue is fixed:
+ # http://code.google.com/p/gyp/issues/detail?id=175
+ ],
+ 'actions': [
+ {
+ 'action_name': 'action1',
+ 'inputs': [
+ 'foo.c',
+ 'filter.py',
+ ],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/output1.c',
+ ],
+ 'process_outputs_as_sources': 1,
+ 'action': [
+ 'python', 'filter.py', 'foo', 'bar', 'foo.c', '<@(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'action_name': 'action2',
+ 'inputs': [
+ 'foo.c',
+ 'filter.py',
+ ],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/output2.c',
+ ],
+ 'process_outputs_as_sources': 1,
+ 'action': [
+ 'python', 'filter.py', 'foo', 'car', 'foo.c', '<@(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'action_name': 'action3',
+ 'inputs': [
+ 'foo.c',
+ 'filter.py',
+ ],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/output3.c',
+ ],
+ 'process_outputs_as_sources': 1,
+ 'action': [
+ 'python', 'filter.py', 'foo', 'dar', 'foo.c', '<@(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'action_name': 'action4',
+ 'inputs': [
+ 'foo.c',
+ 'filter.py',
+ ],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/output4.c',
+ ],
+ 'process_outputs_as_sources': 1,
+ 'action': [
+ 'python', 'filter.py', 'foo', 'ear', 'foo.c', '<@(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ {
+ 'target_name': 'multiple_dependent_target',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'action1',
+ 'inputs': [
+ 'copyfile.py',
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'multi1.txt',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'action_name': 'action2',
+ 'inputs': [
+ 'copyfile.py',
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'multi2.txt',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ 'dependencies': [
+ 'multiple_required_target',
+ ],
+ },
+ {
+ 'target_name': 'multiple_required_target',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'multi_dep',
+ 'inputs': [
+ 'copyfile.py',
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'multi_dep.txt',
+ ],
+ 'process_outputs_as_sources': 1,
+ 'action': [
+ 'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/actions-multiple/src/copyfile.py b/third_party/python/gyp/test/actions-multiple/src/copyfile.py
new file mode 100755
index 0000000000..0774679380
--- /dev/null
+++ b/third_party/python/gyp/test/actions-multiple/src/copyfile.py
@@ -0,0 +1,9 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import shutil
+import sys
+
+shutil.copyfile(sys.argv[1], sys.argv[2])
diff --git a/third_party/python/gyp/test/actions-multiple/src/filter.py b/third_party/python/gyp/test/actions-multiple/src/filter.py
new file mode 100755
index 0000000000..f61a5fa59a
--- /dev/null
+++ b/third_party/python/gyp/test/actions-multiple/src/filter.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import sys
+
+data = open(sys.argv[3], 'r').read()
+fh = open(sys.argv[4], 'w')
+fh.write(data.replace(sys.argv[1], sys.argv[2]))
+fh.close()
diff --git a/third_party/python/gyp/test/actions-multiple/src/foo.c b/third_party/python/gyp/test/actions-multiple/src/foo.c
new file mode 100644
index 0000000000..23c4ef7f26
--- /dev/null
+++ b/third_party/python/gyp/test/actions-multiple/src/foo.c
@@ -0,0 +1,11 @@
+/*
+ * Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <stdio.h>
+
+void foo(void) {
+ printf("foo\n");
+}
diff --git a/third_party/python/gyp/test/actions-multiple/src/input.txt b/third_party/python/gyp/test/actions-multiple/src/input.txt
new file mode 100644
index 0000000000..c7c7da3c64
--- /dev/null
+++ b/third_party/python/gyp/test/actions-multiple/src/input.txt
@@ -0,0 +1 @@
+hello there
diff --git a/third_party/python/gyp/test/actions-multiple/src/main.c b/third_party/python/gyp/test/actions-multiple/src/main.c
new file mode 100644
index 0000000000..0a420b9034
--- /dev/null
+++ b/third_party/python/gyp/test/actions-multiple/src/main.c
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <stdio.h>
+
+void bar(void);
+void car(void);
+void dar(void);
+void ear(void);
+
+int main() {
+ printf("{\n");
+ bar();
+ car();
+ dar();
+ ear();
+ printf("}\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/actions-none/gyptest-none.py b/third_party/python/gyp/test/actions-none/gyptest-none.py
new file mode 100755
index 0000000000..933cfad30c
--- /dev/null
+++ b/third_party/python/gyp/test/actions-none/gyptest-none.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies actions can be in 'none' type targets with source files.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('none_with_source_files.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+test.build('none_with_source_files.gyp', chdir='relocate/src')
+
+file_content = 'foo.cc\n'
+
+test.built_file_must_match('fake.out', file_content, chdir='relocate/src')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/actions-none/src/fake_cross.py b/third_party/python/gyp/test/actions-none/src/fake_cross.py
new file mode 100644
index 0000000000..a03ea87fc9
--- /dev/null
+++ b/third_party/python/gyp/test/actions-none/src/fake_cross.py
@@ -0,0 +1,12 @@
+#!/usr/bin/python
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import sys
+
+fh = open(sys.argv[-1], 'w')
+for filename in sys.argv[1:-1]:
+ fh.write(open(filename).read())
+fh.close()
diff --git a/third_party/python/gyp/test/actions-none/src/foo.cc b/third_party/python/gyp/test/actions-none/src/foo.cc
new file mode 100644
index 0000000000..c6c61745ba
--- /dev/null
+++ b/third_party/python/gyp/test/actions-none/src/foo.cc
@@ -0,0 +1 @@
+foo.cc
diff --git a/third_party/python/gyp/test/actions-none/src/none_with_source_files.gyp b/third_party/python/gyp/test/actions-none/src/none_with_source_files.gyp
new file mode 100644
index 0000000000..e2aaebc10a
--- /dev/null
+++ b/third_party/python/gyp/test/actions-none/src/none_with_source_files.gyp
@@ -0,0 +1,35 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Test that 'none' type targets can have .cc files in them.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'none_with_sources',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'foo.cc',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'fake_cross',
+ 'inputs': [
+ 'fake_cross.py',
+ '<@(_sources)',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/fake.out',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<@(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ }
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/actions-subdir/gyptest-action.py b/third_party/python/gyp/test/actions-subdir/gyptest-action.py
new file mode 100755
index 0000000000..09cfef1893
--- /dev/null
+++ b/third_party/python/gyp/test/actions-subdir/gyptest-action.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test actions that output to PRODUCT_DIR.
+"""
+
+import TestGyp
+
+# TODO fix this for xcode: http://code.google.com/p/gyp/issues/detail?id=88
+test = TestGyp.TestGyp(formats=['!xcode'])
+
+test.run_gyp('none.gyp', chdir='src')
+
+test.build('none.gyp', test.ALL, chdir='src')
+
+file_content = 'Hello from make-file.py\n'
+subdir_file_content = 'Hello from make-subdir-file.py\n'
+
+test.built_file_must_match('file.out', file_content, chdir='src')
+test.built_file_must_match('subdir_file.out', subdir_file_content, chdir='src')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/actions-subdir/src/make-file.py b/third_party/python/gyp/test/actions-subdir/src/make-file.py
new file mode 100755
index 0000000000..6055ab9bb3
--- /dev/null
+++ b/third_party/python/gyp/test/actions-subdir/src/make-file.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = 'Hello from make-file.py\n'
+
+open(sys.argv[1], 'w').write(contents)
diff --git a/third_party/python/gyp/test/actions-subdir/src/none.gyp b/third_party/python/gyp/test/actions-subdir/src/none.gyp
new file mode 100644
index 0000000000..23f8d25a53
--- /dev/null
+++ b/third_party/python/gyp/test/actions-subdir/src/none.gyp
@@ -0,0 +1,31 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'file',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'actions': [
+ {
+ 'action_name': 'make-file',
+ 'inputs': [
+ 'make-file.py',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/file.out',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ }
+ ],
+ 'dependencies': [
+ 'subdir/subdir.gyp:subdir_file',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/actions-subdir/src/subdir/make-subdir-file.py b/third_party/python/gyp/test/actions-subdir/src/subdir/make-subdir-file.py
new file mode 100755
index 0000000000..02c090a021
--- /dev/null
+++ b/third_party/python/gyp/test/actions-subdir/src/subdir/make-subdir-file.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = 'Hello from make-subdir-file.py\n'
+
+open(sys.argv[1], 'w').write(contents)
diff --git a/third_party/python/gyp/test/actions-subdir/src/subdir/subdir.gyp b/third_party/python/gyp/test/actions-subdir/src/subdir/subdir.gyp
new file mode 100644
index 0000000000..0315d4eb83
--- /dev/null
+++ b/third_party/python/gyp/test/actions-subdir/src/subdir/subdir.gyp
@@ -0,0 +1,28 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'subdir_file',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'actions': [
+ {
+ 'action_name': 'make-subdir-file',
+ 'inputs': [
+ 'make-subdir-file.py',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/subdir_file.out',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ }
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/actions/generated-header/action.py b/third_party/python/gyp/test/actions/generated-header/action.py
new file mode 100644
index 0000000000..9be98798d6
--- /dev/null
+++ b/third_party/python/gyp/test/actions/generated-header/action.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+outfile = sys.argv[1]
+open(outfile, 'w').write('const char kFoo[] = "%s";' % sys.argv[2])
diff --git a/third_party/python/gyp/test/actions/generated-header/main.cc b/third_party/python/gyp/test/actions/generated-header/main.cc
new file mode 100644
index 0000000000..7973781bc6
--- /dev/null
+++ b/third_party/python/gyp/test/actions/generated-header/main.cc
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+#include "MyHeader.h"
+
+int main() {
+ printf("%s\n", kFoo);
+}
diff --git a/third_party/python/gyp/test/actions/generated-header/test.gyp b/third_party/python/gyp/test/actions/generated-header/test.gyp
new file mode 100644
index 0000000000..209b951ef6
--- /dev/null
+++ b/third_party/python/gyp/test/actions/generated-header/test.gyp
@@ -0,0 +1,34 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'generate_header',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'inputs': [ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/MyHeader.h',
+ ],
+ 'action_name': 'generate header',
+ 'action': ['python', './action.py',
+ '<(SHARED_INTERMEDIATE_DIR)/MyHeader.h', 'foobar output' ],
+ },
+ ],
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'dependencies': [
+ 'generate_header',
+ ],
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)',
+ ],
+ 'sources': [ 'main.cc' ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/actions/gyptest-all.py b/third_party/python/gyp/test/actions/gyptest-all.py
new file mode 100755
index 0000000000..c8833a5d1e
--- /dev/null
+++ b/third_party/python/gyp/test/actions/gyptest-all.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simple actions when using an explicit build target of 'all'.
+"""
+
+import glob
+import os
+import TestGyp
+
+test = TestGyp.TestGyp(workdir='workarea_all')
+
+test.run_gyp('actions.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+# Some gyp files use an action that mentions an output but never
+# writes it as a means to making the action run on every build. That
+# doesn't mesh well with ninja's semantics. TODO(evan): figure out
+# how to work always-run actions in to ninja.
+if test.format in ['ninja', 'xcode-ninja']:
+ test.build('actions.gyp', test.ALL, chdir='relocate/src')
+else:
+ # Test that an "always run" action increases a counter on multiple
+ # invocations, and that a dependent action updates in step.
+ test.build('actions.gyp', test.ALL, chdir='relocate/src')
+ test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1')
+ test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1')
+ test.build('actions.gyp', test.ALL, chdir='relocate/src')
+ test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
+ test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
+
+ # The "always run" action only counts to 2, but the dependent target
+ # will count forever if it's allowed to run. This verifies that the
+ # dependent target only runs when the "always run" action generates
+ # new output, not just because the "always run" ran.
+ test.build('actions.gyp', test.ALL, chdir='relocate/src')
+ test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
+ test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
+
+expect = """\
+Hello from program.c
+Hello from make-prog1.py
+Hello from make-prog2.py
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir1'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('program', chdir=chdir, stdout=expect)
+
+
+test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n")
+
+
+expect = "Hello from generate_main.py\n"
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir3'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('null_input', chdir=chdir, stdout=expect)
+
+
+# Clean out files which may have been created if test.ALL was run.
+def clean_dep_files():
+ for file in (glob.glob('relocate/src/dep_*.txt') +
+ glob.glob('relocate/src/deps_all_done_*.txt')):
+ if os.path.exists(file):
+ os.remove(file)
+
+# Confirm our clean.
+clean_dep_files()
+test.must_not_exist('relocate/src/dep_1.txt')
+test.must_not_exist('relocate/src/deps_all_done_first_123.txt')
+
+# Make sure all deps finish before an action is run on a 'None' target.
+# If using the Make builder, add -j to make things more difficult.
+arguments = []
+if test.format == 'make':
+ arguments = ['-j']
+test.build('actions.gyp', 'action_with_dependencies_123', chdir='relocate/src',
+ arguments=arguments)
+test.must_exist('relocate/src/deps_all_done_first_123.txt')
+
+# Try again with a target that has deps in reverse. Output files from
+# previous tests deleted. Confirm this execution did NOT run the ALL
+# target which would mess up our dep tests.
+clean_dep_files()
+test.build('actions.gyp', 'action_with_dependencies_321', chdir='relocate/src',
+ arguments=arguments)
+test.must_exist('relocate/src/deps_all_done_first_321.txt')
+test.must_not_exist('relocate/src/deps_all_done_first_123.txt')
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/actions/gyptest-default.py b/third_party/python/gyp/test/actions/gyptest-default.py
new file mode 100755
index 0000000000..70c99ec9ce
--- /dev/null
+++ b/third_party/python/gyp/test/actions/gyptest-default.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simple actions when using the default build target.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(workdir='workarea_default')
+
+test.run_gyp('actions.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+# Some gyp files use an action that mentions an output but never
+# writes it as a means to making the action run on every build. That
+# doesn't mesh well with ninja's semantics. TODO(evan): figure out
+# how to work always-run actions in to ninja.
+if test.format in ['ninja', 'xcode-ninja']:
+ test.build('actions.gyp', test.ALL, chdir='relocate/src')
+else:
+ # Test that an "always run" action increases a counter on multiple
+ # invocations, and that a dependent action updates in step.
+ test.build('actions.gyp', chdir='relocate/src')
+ test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1')
+ test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1')
+ test.build('actions.gyp', chdir='relocate/src')
+ test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
+ test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
+
+ # The "always run" action only counts to 2, but the dependent target
+ # will count forever if it's allowed to run. This verifies that the
+ # dependent target only runs when the "always run" action generates
+ # new output, not just because the "always run" ran.
+ test.build('actions.gyp', test.ALL, chdir='relocate/src')
+ test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
+ test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
+
+expect = """\
+Hello from program.c
+Hello from make-prog1.py
+Hello from make-prog2.py
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir1'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('program', chdir=chdir, stdout=expect)
+
+
+test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n")
+
+
+expect = "Hello from generate_main.py\n"
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir3'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('null_input', chdir=chdir, stdout=expect)
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/actions/gyptest-errors.py b/third_party/python/gyp/test/actions/gyptest-errors.py
new file mode 100755
index 0000000000..e1ef883e1e
--- /dev/null
+++ b/third_party/python/gyp/test/actions/gyptest-errors.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies behavior for different action configuration errors:
+exit status of 1, and the expected error message must be in stderr.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(workdir='workarea_errors')
+
+
+test.run_gyp('action_missing_name.gyp', chdir='src', status=1, stderr=None)
+expect = [
+ "Anonymous action in target broken_actions2. An action must have an 'action_name' field.",
+]
+test.must_contain_all_lines(test.stderr(), expect)
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/actions/gyptest-generated-header.py b/third_party/python/gyp/test/actions/gyptest-generated-header.py
new file mode 100644
index 0000000000..cd5bd691a6
--- /dev/null
+++ b/third_party/python/gyp/test/actions/gyptest-generated-header.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that dependencies on generated headers work, even if the header has
+a mixed-case file name.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+CHDIR = 'generated-header'
+
+test.run_gyp('test.gyp', chdir=CHDIR)
+test.build('test.gyp', 'program', chdir=CHDIR)
+test.up_to_date('test.gyp', 'program', chdir=CHDIR)
+
+expect = 'foobar output\n'
+test.run_built_executable('program', chdir=CHDIR, stdout=expect)
+
+# Change what's written to the generated header, regyp and rebuild, and check
+# that the change makes it to the executable and that the build is clean.
+test.sleep()
+test.write('generated-header/test.gyp',
+ test.read('generated-header/test.gyp').replace('foobar', 'barbaz'))
+
+test.run_gyp('test.gyp', chdir=CHDIR)
+test.build('test.gyp', 'program', chdir=CHDIR)
+test.up_to_date('test.gyp', 'program', chdir=CHDIR)
+
+expect = 'barbaz output\n'
+test.run_built_executable('program', chdir=CHDIR, stdout=expect)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/actions/src/action_missing_name.gyp b/third_party/python/gyp/test/actions/src/action_missing_name.gyp
new file mode 100644
index 0000000000..6647aac3b5
--- /dev/null
+++ b/third_party/python/gyp/test/actions/src/action_missing_name.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'broken_actions2',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'inputs': [
+ 'no_name.input',
+ ],
+ 'action': [
+ 'python',
+ '-c',
+ 'from __future__ import print_function; print(\'missing name\')',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/actions/src/actions.gyp b/third_party/python/gyp/test/actions/src/actions.gyp
new file mode 100644
index 0000000000..5d2db1955e
--- /dev/null
+++ b/third_party/python/gyp/test/actions/src/actions.gyp
@@ -0,0 +1,114 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'pull_in_all_actions',
+ 'type': 'none',
+ 'dependencies': [
+ 'subdir1/executable.gyp:*',
+ 'subdir2/none.gyp:*',
+ 'subdir3/null_input.gyp:*',
+ ],
+ },
+ {
+ 'target_name': 'depend_on_always_run_action',
+ 'type': 'none',
+ 'dependencies': [ 'subdir1/executable.gyp:counter' ],
+ 'actions': [
+ {
+ 'action_name': 'use_always_run_output',
+ 'inputs': [
+ 'subdir1/actions-out/action-counter.txt',
+ 'subdir1/counter.py',
+ ],
+ 'outputs': [
+ 'subdir1/actions-out/action-counter_2.txt',
+ ],
+ 'action': [
+ 'python', 'subdir1/counter.py', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+
+ # Three deps which don't finish immediately.
+ # Each one has a small delay then creates a file.
+ # Delays are 1.0, 1.1, and 2.0 seconds.
+ {
+ 'target_name': 'dep_1',
+ 'type': 'none',
+ 'actions': [{
+ 'inputs': [ 'actions.gyp' ],
+ 'outputs': [ 'dep_1.txt' ],
+ 'action_name': 'dep_1',
+ 'action': [ 'python', '-c',
+ 'import time; time.sleep(1); open(\'dep_1.txt\', \'w\')' ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ }],
+ },
+ {
+ 'target_name': 'dep_2',
+ 'type': 'none',
+ 'actions': [{
+ 'inputs': [ 'actions.gyp' ],
+ 'outputs': [ 'dep_2.txt' ],
+ 'action_name': 'dep_2',
+ 'action': [ 'python', '-c',
+ 'import time; time.sleep(1.1); open(\'dep_2.txt\', \'w\')' ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ }],
+ },
+ {
+ 'target_name': 'dep_3',
+ 'type': 'none',
+ 'actions': [{
+ 'inputs': [ 'actions.gyp' ],
+ 'outputs': [ 'dep_3.txt' ],
+ 'action_name': 'dep_3',
+ 'action': [ 'python', '-c',
+ 'import time; time.sleep(2.0); open(\'dep_3.txt\', \'w\')' ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ }],
+ },
+
+ # An action which assumes the deps have completed.
+ # Does NOT list the output files of it's deps as inputs.
+ # On success create the file deps_all_done_first.txt.
+ {
+ 'target_name': 'action_with_dependencies_123',
+ 'type': 'none',
+ 'dependencies': [ 'dep_1', 'dep_2', 'dep_3' ],
+ 'actions': [{
+ 'inputs': [ 'actions.gyp' ],
+ 'outputs': [ 'deps_all_done_first_123.txt' ],
+ 'action_name': 'action_with_dependencies_123',
+ 'action': [ 'python', 'confirm-dep-files.py', '<(_outputs)' ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ }],
+ },
+ # Same as above but with deps in reverse.
+ {
+ 'target_name': 'action_with_dependencies_321',
+ 'type': 'none',
+ 'dependencies': [ 'dep_3', 'dep_2', 'dep_1' ],
+ 'actions': [{
+ 'inputs': [ 'actions.gyp' ],
+ 'outputs': [ 'deps_all_done_first_321.txt' ],
+ 'action_name': 'action_with_dependencies_321',
+ 'action': [ 'python', 'confirm-dep-files.py', '<(_outputs)' ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ }],
+ },
+
+ ],
+}
diff --git a/third_party/python/gyp/test/actions/src/confirm-dep-files.py b/third_party/python/gyp/test/actions/src/confirm-dep-files.py
new file mode 100755
index 0000000000..3b8463057d
--- /dev/null
+++ b/third_party/python/gyp/test/actions/src/confirm-dep-files.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Confirms presence of files generated by our targets we depend on.
+If they exist, create a new file.
+
+Note target's input files are explicitly NOT defined in the gyp file
+so they can't easily be passed to this script as args.
+"""
+
+import os
+import sys
+
+outfile = sys.argv[1] # Example value we expect: deps_all_done_first_123.txt
+if (os.path.exists("dep_1.txt") and
+ os.path.exists("dep_2.txt") and
+ os.path.exists("dep_3.txt")):
+ open(outfile, "w")
diff --git a/third_party/python/gyp/test/actions/src/subdir1/counter.py b/third_party/python/gyp/test/actions/src/subdir1/counter.py
new file mode 100755
index 0000000000..d888f2e803
--- /dev/null
+++ b/third_party/python/gyp/test/actions/src/subdir1/counter.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+import time
+
+output = sys.argv[1]
+persistoutput = "%s.persist" % sys.argv[1]
+
+count = 0
+try:
+ count = open(persistoutput, 'r').read()
+except:
+ pass
+count = int(count) + 1
+
+if len(sys.argv) > 2:
+ max_count = int(sys.argv[2])
+ if count > max_count:
+ count = max_count
+
+oldcount = 0
+try:
+ oldcount = open(output, 'r').read()
+except:
+ pass
+
+# Save the count in a file that is undeclared, and thus hidden, to gyp. We need
+# to do this because, prior to running commands, some build systems deletes
+# any declared outputs, so we would lose our count if we just wrote to the
+# given output file.
+open(persistoutput, 'w').write('%d' % (count))
+
+# Only write the given output file if the count has changed.
+if int(oldcount) != count:
+ open(output, 'w').write('%d' % (count))
+ # Sleep so the next run changes the file time sufficiently to make the build
+ # detect the file as changed.
+ time.sleep(1)
+
+sys.exit(0)
diff --git a/third_party/python/gyp/test/actions/src/subdir1/executable.gyp b/third_party/python/gyp/test/actions/src/subdir1/executable.gyp
new file mode 100644
index 0000000000..6a1ce4f91e
--- /dev/null
+++ b/third_party/python/gyp/test/actions/src/subdir1/executable.gyp
@@ -0,0 +1,74 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'program.c',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'make-prog1',
+ 'inputs': [
+ 'make-prog1.py',
+ ],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/prog1.c',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ {
+ 'action_name': 'make-prog2',
+ 'inputs': [
+ 'make-prog2.py',
+ ],
+ 'outputs': [
+ 'actions-out/prog2.c',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ {
+ 'target_name': 'counter',
+ 'type': 'none',
+ 'actions': [
+ {
+ # This action should always run, regardless of whether or not it's
+ # inputs or the command-line change. We do this by creating a dummy
+ # first output, which is always missing, thus causing the build to
+ # always try to recreate it. Actual output files should be listed
+ # after the dummy one, and dependent targets should list the real
+ # output(s) in their inputs
+ # (see '../actions.gyp:depend_on_always_run_action').
+ 'action_name': 'action_counter',
+ 'inputs': [
+ 'counter.py',
+ ],
+ 'outputs': [
+ 'actions-out/action-counter.txt.always',
+ 'actions-out/action-counter.txt',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', 'actions-out/action-counter.txt', '2',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/actions/src/subdir1/make-prog1.py b/third_party/python/gyp/test/actions/src/subdir1/make-prog1.py
new file mode 100755
index 0000000000..7ea1d8a2d4
--- /dev/null
+++ b/third_party/python/gyp/test/actions/src/subdir1/make-prog1.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = r"""
+#include <stdio.h>
+
+void prog1(void)
+{
+ printf("Hello from make-prog1.py\n");
+}
+"""
+
+open(sys.argv[1], 'w').write(contents)
+
+sys.exit(0)
diff --git a/third_party/python/gyp/test/actions/src/subdir1/make-prog2.py b/third_party/python/gyp/test/actions/src/subdir1/make-prog2.py
new file mode 100755
index 0000000000..0bfe4973c2
--- /dev/null
+++ b/third_party/python/gyp/test/actions/src/subdir1/make-prog2.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = r"""
+#include <stdio.h>
+
+void prog2(void)
+{
+ printf("Hello from make-prog2.py\n");
+}
+"""
+
+open(sys.argv[1], 'w').write(contents)
+
+sys.exit(0)
diff --git a/third_party/python/gyp/test/actions/src/subdir1/program.c b/third_party/python/gyp/test/actions/src/subdir1/program.c
new file mode 100644
index 0000000000..c0931534eb
--- /dev/null
+++ b/third_party/python/gyp/test/actions/src/subdir1/program.c
@@ -0,0 +1,12 @@
+#include <stdio.h>
+
+extern void prog1(void);
+extern void prog2(void);
+
+int main(void)
+{
+ printf("Hello from program.c\n");
+ prog1();
+ prog2();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/actions/src/subdir2/make-file.py b/third_party/python/gyp/test/actions/src/subdir2/make-file.py
new file mode 100755
index 0000000000..088a05e0b0
--- /dev/null
+++ b/third_party/python/gyp/test/actions/src/subdir2/make-file.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = "Hello from make-file.py\n"
+
+open(sys.argv[1], 'w').write(contents)
diff --git a/third_party/python/gyp/test/actions/src/subdir2/none.gyp b/third_party/python/gyp/test/actions/src/subdir2/none.gyp
new file mode 100644
index 0000000000..2caa97d55c
--- /dev/null
+++ b/third_party/python/gyp/test/actions/src/subdir2/none.gyp
@@ -0,0 +1,33 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'file',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'actions': [
+ {
+ 'action_name': 'make-file',
+ 'inputs': [
+ 'make-file.py',
+ ],
+ 'outputs': [
+ 'file.out',
+ # TODO: enhance testing infrastructure to test this
+ # without having to hard-code the intermediate dir paths.
+ #'<(INTERMEDIATE_DIR)/file.out',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ }
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/actions/src/subdir3/generate_main.py b/third_party/python/gyp/test/actions/src/subdir3/generate_main.py
new file mode 100755
index 0000000000..804d38df31
--- /dev/null
+++ b/third_party/python/gyp/test/actions/src/subdir3/generate_main.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = """
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello from generate_main.py\\n");
+ return 0;
+}
+"""
+
+open(sys.argv[1], 'w').write(contents)
+
+sys.exit(0)
diff --git a/third_party/python/gyp/test/actions/src/subdir3/null_input.gyp b/third_party/python/gyp/test/actions/src/subdir3/null_input.gyp
new file mode 100644
index 0000000000..9b0bea5fdb
--- /dev/null
+++ b/third_party/python/gyp/test/actions/src/subdir3/null_input.gyp
@@ -0,0 +1,29 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'null_input',
+ 'type': 'executable',
+ 'msvs_cygwin_shell': 0,
+ 'actions': [
+ {
+ 'action_name': 'generate_main',
+ 'process_outputs_as_sources': 1,
+ 'inputs': [],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/main.c',
+ ],
+ 'action': [
+ # TODO: we can't just use <(_outputs) here?!
+ 'python', 'generate_main.py', '<(INTERMEDIATE_DIR)/main.c',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/additional-targets/gyptest-additional.py b/third_party/python/gyp/test/additional-targets/gyptest-additional.py
new file mode 100755
index 0000000000..466283e55c
--- /dev/null
+++ b/third_party/python/gyp/test/additional-targets/gyptest-additional.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simple actions when using an explicit build target of 'all'.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('all.gyp',
+ '-G', 'xcode_ninja_target_pattern=^all_targets$',
+ chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+# Build all.
+test.build('all.gyp', chdir='relocate/src')
+
+if test.format=='xcode':
+ chdir = 'relocate/src/dir1'
+else:
+ chdir = 'relocate/src'
+
+# Output is as expected.
+file_content = 'Hello from emit.py\n'
+test.built_file_must_match('out2.txt', file_content, chdir=chdir)
+
+test.built_file_must_not_exist('out.txt', chdir='relocate/src')
+test.built_file_must_not_exist('foolib1',
+ type=test.SHARED_LIB,
+ chdir=chdir)
+
+# xcode-ninja doesn't generate separate workspaces for sub-gyps by design
+if test.format == 'xcode-ninja':
+ test.pass_test()
+
+# TODO(mmoss) Make consistent with msvs, with 'dir1' before 'out/Default'?
+if test.format in ('make', 'ninja', 'cmake'):
+ chdir='relocate/src'
+else:
+ chdir='relocate/src/dir1'
+
+# Build the action explicitly.
+test.build('actions.gyp', 'action1_target', chdir=chdir)
+
+# Check that things got run.
+file_content = 'Hello from emit.py\n'
+test.built_file_must_exist('out.txt', chdir=chdir)
+
+# Build the shared library explicitly.
+test.build('actions.gyp', 'foolib1', chdir=chdir)
+
+test.built_file_must_exist('foolib1',
+ type=test.SHARED_LIB,
+ chdir=chdir,
+ subdir='dir1')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/additional-targets/src/all.gyp b/third_party/python/gyp/test/additional-targets/src/all.gyp
new file mode 100644
index 0000000000..21c83080aa
--- /dev/null
+++ b/third_party/python/gyp/test/additional-targets/src/all.gyp
@@ -0,0 +1,13 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'all_targets',
+ 'type': 'none',
+ 'dependencies': ['dir1/actions.gyp:*'],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/additional-targets/src/dir1/actions.gyp b/third_party/python/gyp/test/additional-targets/src/dir1/actions.gyp
new file mode 100644
index 0000000000..5089c80913
--- /dev/null
+++ b/third_party/python/gyp/test/additional-targets/src/dir1/actions.gyp
@@ -0,0 +1,56 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'action1_target',
+ 'type': 'none',
+ 'suppress_wildcard': 1,
+ 'actions': [
+ {
+ 'action_name': 'action1',
+ 'inputs': [
+ 'emit.py',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/out.txt',
+ ],
+ 'action': ['python', 'emit.py', '<(PRODUCT_DIR)/out.txt'],
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ {
+ 'target_name': 'action2_target',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'action2',
+ 'inputs': [
+ 'emit.py',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/out2.txt',
+ ],
+ 'action': ['python', 'emit.py', '<(PRODUCT_DIR)/out2.txt'],
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ {
+ 'target_name': 'foolib1',
+ 'type': 'shared_library',
+ 'suppress_wildcard': 1,
+ 'sources': ['lib1.c'],
+ },
+ ],
+ 'conditions': [
+ ['OS=="linux"', {
+ 'target_defaults': {
+ 'cflags': ['-fPIC'],
+ },
+ }],
+ ],
+}
diff --git a/third_party/python/gyp/test/additional-targets/src/dir1/emit.py b/third_party/python/gyp/test/additional-targets/src/dir1/emit.py
new file mode 100755
index 0000000000..96db7a57df
--- /dev/null
+++ b/third_party/python/gyp/test/additional-targets/src/dir1/emit.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+f = open(sys.argv[1], 'w')
+f.write('Hello from emit.py\n')
+f.close()
diff --git a/third_party/python/gyp/test/additional-targets/src/dir1/lib1.c b/third_party/python/gyp/test/additional-targets/src/dir1/lib1.c
new file mode 100644
index 0000000000..df4cb10f79
--- /dev/null
+++ b/third_party/python/gyp/test/additional-targets/src/dir1/lib1.c
@@ -0,0 +1,6 @@
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+int func1(void) {
+ return 42;
+}
diff --git a/third_party/python/gyp/test/analyzer/common.gypi b/third_party/python/gyp/test/analyzer/common.gypi
new file mode 100644
index 0000000000..7c664e40da
--- /dev/null
+++ b/third_party/python/gyp/test/analyzer/common.gypi
@@ -0,0 +1,6 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+}
diff --git a/third_party/python/gyp/test/analyzer/gyptest-analyzer.py b/third_party/python/gyp/test/analyzer/gyptest-analyzer.py
new file mode 100644
index 0000000000..58a1ce6f07
--- /dev/null
+++ b/third_party/python/gyp/test/analyzer/gyptest-analyzer.py
@@ -0,0 +1,427 @@
+#!/usr/bin/env python
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for analyzer
+"""
+
+from __future__ import print_function
+
+import json
+import TestGyp
+
+found = 'Found dependency'
+found_all = 'Found dependency (all)'
+not_found = 'No dependencies'
+
+
+def _CreateConfigFile(files, additional_compile_targets, test_targets=[]):
+ """Creates the analyzer config file, which is used as the input to analyzer.
+ See description of analyzer.py for description of the arguments."""
+ f = open('test_file', 'w')
+ to_write = {'files': files,
+ 'test_targets': test_targets,
+ 'additional_compile_targets': additional_compile_targets }
+ json.dump(to_write, f)
+ f.close()
+
+
+def _CreateBogusConfigFile():
+ f = open('test_file','w')
+ f.write('bogus')
+ f.close()
+
+
+def _ReadOutputFileContents():
+ f = open('analyzer_output', 'r')
+ result = json.load(f)
+ f.close()
+ return result
+
+
+# NOTE: this would be clearer if it subclassed TestGypCustom, but that trips
+# over a bug in pylint (E1002).
+test = TestGyp.TestGypCustom(format='analyzer')
+
+def CommonArgs():
+ return ('-Gconfig_path=test_file',
+ '-Ganalyzer_output_path=analyzer_output')
+
+
+def run_analyzer(*args, **kw):
+ """Runs the test specifying a particular config and output path."""
+ args += CommonArgs()
+ test.run_gyp('test.gyp', *args, **kw)
+
+
+def run_analyzer2(*args, **kw):
+ """Same as run_analyzer(), but passes in test2.gyp instead of test.gyp."""
+ args += CommonArgs()
+ test.run_gyp('test2.gyp', *args, **kw)
+
+
+def run_analyzer3(*args, **kw):
+ """Same as run_analyzer(), but passes in test3.gyp instead of test.gyp."""
+ args += CommonArgs()
+ test.run_gyp('test3.gyp', *args, **kw)
+
+
+def run_analyzer4(*args, **kw):
+ """Same as run_analyzer(), but passes in test3.gyp instead of test.gyp."""
+ args += CommonArgs()
+ test.run_gyp('test4.gyp', *args, **kw)
+
+
+def EnsureContains(matched=False, compile_targets=set(), test_targets=set()):
+ """Verifies output contains |compile_targets|."""
+ result = _ReadOutputFileContents()
+ if 'error' in result:
+ print('unexpected error', result.get('error'))
+ test.fail_test()
+
+ if 'invalid_targets' in result:
+ print('unexpected invalid_targets', result.get('invalid_targets'))
+ test.fail_test()
+
+ actual_compile_targets = set(result['compile_targets'])
+ if actual_compile_targets != compile_targets:
+ print('actual compile_targets:', actual_compile_targets,
+ '\nexpected compile_targets:', compile_targets)
+ test.fail_test()
+
+ actual_test_targets = set(result['test_targets'])
+ if actual_test_targets != test_targets:
+ print('actual test_targets:', actual_test_targets,
+ '\nexpected test_targets:', test_targets)
+ test.fail_test()
+
+ if matched and result['status'] != found:
+ print('expected', found, 'got', result['status'])
+ test.fail_test()
+ elif not matched and result['status'] != not_found:
+ print('expected', not_found, 'got', result['status'])
+ test.fail_test()
+
+
+def EnsureMatchedAll(compile_targets, test_targets=set()):
+ result = _ReadOutputFileContents()
+ if 'error' in result:
+ print('unexpected error', result.get('error'))
+ test.fail_test()
+
+ if 'invalid_targets' in result:
+ print('unexpected invalid_targets', result.get('invalid_targets'))
+ test.fail_test()
+
+ if result['status'] != found_all:
+ print('expected', found_all, 'got', result['status'])
+ test.fail_test()
+
+ actual_compile_targets = set(result['compile_targets'])
+ if actual_compile_targets != compile_targets:
+ print('actual compile_targets:', actual_compile_targets,
+ '\nexpected compile_targets:', compile_targets)
+ test.fail_test()
+
+ actual_test_targets = set(result['test_targets'])
+ if actual_test_targets != test_targets:
+ print('actual test_targets:', actual_test_targets,
+ '\nexpected test_targets:', test_targets)
+ test.fail_test()
+
+
+def EnsureError(expected_error_string):
+ """Verifies output contains the error string."""
+ result = _ReadOutputFileContents()
+ if result.get('error', '').find(expected_error_string) == -1:
+ print('actual error:', result.get('error', ''), '\nexpected error:',
+ expected_error_string)
+ test.fail_test()
+
+
+def EnsureStdoutContains(expected_error_string):
+ if test.stdout().find(expected_error_string) == -1:
+ print('actual stdout:', test.stdout(), '\nexpected stdout:',
+ expected_error_string)
+ test.fail_test()
+
+
+def EnsureInvalidTargets(expected_invalid_targets):
+ """Verifies output contains invalid_targets."""
+ result = _ReadOutputFileContents()
+ actual_invalid_targets = set(result['invalid_targets'])
+ if actual_invalid_targets != expected_invalid_targets:
+ print('actual invalid_targets:', actual_invalid_targets,
+ '\nexpected :', expected_invalid_targets)
+ test.fail_test()
+
+
+# Two targets, A and B (both static_libraries) and A depends upon B. If a file
+# in B changes, then both A and B are output. It is not strictly necessary that
+# A is compiled in this case, only B.
+_CreateConfigFile(['b.c'], ['all'])
+test.run_gyp('static_library_test.gyp', *CommonArgs())
+EnsureContains(matched=True, compile_targets={'a' ,'b'})
+
+# Verifies config_path must be specified.
+test.run_gyp('test.gyp')
+EnsureStdoutContains('Must specify files to analyze via config_path')
+
+# Verifies config_path must point to a valid file.
+test.run_gyp('test.gyp', '-Gconfig_path=bogus_file',
+ '-Ganalyzer_output_path=analyzer_output')
+EnsureError('Unable to open file bogus_file')
+
+# Verify 'invalid_targets' is present when bad target is specified.
+_CreateConfigFile(['exe2.c'], ['bad_target'])
+run_analyzer()
+EnsureInvalidTargets({'bad_target'})
+
+# Verifies config_path must point to a valid json file.
+_CreateBogusConfigFile()
+run_analyzer()
+EnsureError('Unable to parse config file test_file')
+
+# Trivial test of a source.
+_CreateConfigFile(['foo.c'], ['all'])
+run_analyzer()
+EnsureContains(matched=True, compile_targets={'exe'})
+
+# Conditional source that is excluded.
+_CreateConfigFile(['conditional_source.c'], ['all'])
+run_analyzer()
+EnsureContains(matched=False)
+
+# Conditional source that is included by way of argument.
+_CreateConfigFile(['conditional_source.c'], ['all'])
+run_analyzer('-Dtest_variable=1')
+EnsureContains(matched=True, compile_targets={'exe'})
+
+# Two unknown files.
+_CreateConfigFile(['unknown1.c', 'unoknow2.cc'], ['all'])
+run_analyzer()
+EnsureContains()
+
+# Two unknown files.
+_CreateConfigFile(['unknown1.c', 'subdir/subdir_sourcex.c'], ['all'])
+run_analyzer()
+EnsureContains()
+
+# Included dependency
+_CreateConfigFile(['unknown1.c', 'subdir/subdir_source.c'], ['all'])
+run_analyzer()
+EnsureContains(matched=True, compile_targets={'exe', 'exe3'})
+
+# Included inputs to actions.
+_CreateConfigFile(['action_input.c'], ['all'])
+run_analyzer()
+EnsureContains(matched=True, compile_targets={'exe'})
+
+# Don't consider outputs.
+_CreateConfigFile(['action_output.c'], ['all'])
+run_analyzer()
+EnsureContains(matched=False)
+
+# Rule inputs.
+_CreateConfigFile(['rule_input.c'], ['all'])
+run_analyzer()
+EnsureContains(matched=True, compile_targets={'exe'})
+
+# Ignore path specified with PRODUCT_DIR.
+_CreateConfigFile(['product_dir_input.c'], ['all'])
+run_analyzer()
+EnsureContains(matched=False)
+
+# Path specified via a variable.
+_CreateConfigFile(['subdir/subdir_source2.c'], ['all'])
+run_analyzer()
+EnsureContains(matched=True, compile_targets={'exe'})
+
+# Verifies paths with // are fixed up correctly.
+_CreateConfigFile(['parent_source.c'], ['all'])
+run_analyzer()
+EnsureContains(matched=True, compile_targets={'exe', 'exe3'})
+
+# Verifies relative paths are resolved correctly.
+_CreateConfigFile(['subdir/subdir_source.h'], ['all'])
+run_analyzer()
+EnsureContains(matched=True, compile_targets={'exe'})
+
+# Verifies relative paths in inputs are resolved correctly.
+_CreateConfigFile(['rel_path1.h'], ['all'])
+run_analyzer()
+EnsureContains(matched=True, compile_targets={'exe'})
+
+# Various permutations when passing in targets.
+_CreateConfigFile(['exe2.c', 'subdir/subdir2b_source.c'],
+ ['all'], ['exe', 'exe3'])
+run_analyzer()
+EnsureContains(matched=True, test_targets={'exe3'},
+ compile_targets={'exe2', 'exe3'})
+
+_CreateConfigFile(['exe2.c', 'subdir/subdir2b_source.c'], ['all'], ['exe'])
+run_analyzer()
+EnsureContains(matched=True, compile_targets={'exe2', 'exe3'})
+
+# Verifies duplicates are ignored.
+_CreateConfigFile(['exe2.c', 'subdir/subdir2b_source.c'], ['all'],
+ ['exe', 'exe'])
+run_analyzer()
+EnsureContains(matched=True, compile_targets={'exe2', 'exe3'})
+
+_CreateConfigFile(['exe2.c'], ['all'], ['exe'])
+run_analyzer()
+EnsureContains(matched=True, compile_targets={'exe2'})
+
+_CreateConfigFile(['exe2.c'], ['all'])
+run_analyzer()
+EnsureContains(matched=True, compile_targets={'exe2'})
+
+_CreateConfigFile(['subdir/subdir2b_source.c', 'exe2.c'], ['all'])
+run_analyzer()
+EnsureContains(matched=True, compile_targets={'exe2', 'exe3'})
+
+_CreateConfigFile(['subdir/subdir2b_source.c'], ['all'], ['exe3'])
+run_analyzer()
+EnsureContains(matched=True, test_targets={'exe3'}, compile_targets={'exe3'})
+
+_CreateConfigFile(['exe2.c'], ['all'])
+run_analyzer()
+EnsureContains(matched=True, compile_targets={'exe2'})
+
+_CreateConfigFile(['foo.c'], ['all'])
+run_analyzer()
+EnsureContains(matched=True, compile_targets={'exe'})
+
+# Assertions when modifying build (gyp/gypi) files, especially when said files
+# are included.
+_CreateConfigFile(['subdir2/d.cc'], ['all'], ['exe', 'exe2', 'foo', 'exe3'])
+run_analyzer2()
+EnsureContains(matched=True, test_targets={'exe', 'foo'},
+ compile_targets={'exe', 'foo'})
+
+_CreateConfigFile(['subdir2/subdir.includes.gypi'], ['all'],
+ ['exe', 'exe2', 'foo', 'exe3'])
+run_analyzer2()
+EnsureContains(matched=True, test_targets={'exe', 'foo'},
+ compile_targets={'exe', 'foo'})
+
+_CreateConfigFile(['subdir2/subdir.gyp'], ['all'],
+ ['exe', 'exe2', 'foo', 'exe3'])
+run_analyzer2()
+EnsureContains(matched=True, test_targets={'exe', 'foo'},
+ compile_targets={'exe', 'foo'})
+
+_CreateConfigFile(['test2.includes.gypi'], ['all'],
+ ['exe', 'exe2', 'foo', 'exe3'])
+run_analyzer2()
+EnsureContains(matched=True, test_targets={'exe', 'exe2', 'exe3'},
+ compile_targets={'exe', 'exe2', 'exe3'})
+
+# Verify modifying a file included makes all targets dirty.
+_CreateConfigFile(['common.gypi'], ['all'], ['exe', 'exe2', 'foo', 'exe3'])
+run_analyzer2('-Icommon.gypi')
+EnsureMatchedAll({'all', 'exe', 'exe2', 'foo', 'exe3'},
+ {'exe', 'exe2', 'foo', 'exe3'})
+
+# Assertions from test3.gyp.
+_CreateConfigFile(['d.c', 'f.c'], ['all'], ['a'])
+run_analyzer3()
+EnsureContains(matched=True, test_targets={'a'}, compile_targets={'a', 'b'})
+
+_CreateConfigFile(['f.c'], ['all'], ['a'])
+run_analyzer3()
+EnsureContains(matched=True, test_targets={'a'}, compile_targets={'a', 'b'})
+
+_CreateConfigFile(['f.c'], ['all'])
+run_analyzer3()
+EnsureContains(matched=True, compile_targets={'a', 'b'})
+
+_CreateConfigFile(['c.c', 'e.c'], ['all'])
+run_analyzer3()
+EnsureContains(matched=True, compile_targets={'a', 'b', 'c', 'e'})
+
+_CreateConfigFile(['d.c'], ['all'], ['a'])
+run_analyzer3()
+EnsureContains(matched=True, test_targets={'a'}, compile_targets={'a', 'b'})
+
+_CreateConfigFile(['a.c'], ['all'], ['a', 'b'])
+run_analyzer3()
+EnsureContains(matched=True, test_targets={'a'}, compile_targets={'a'})
+
+_CreateConfigFile(['a.c'], ['all'], ['a', 'b'])
+run_analyzer3()
+EnsureContains(matched=True, test_targets={'a'}, compile_targets={'a'})
+
+_CreateConfigFile(['d.c'], ['all'], ['a', 'b'])
+run_analyzer3()
+EnsureContains(matched=True, test_targets={'a', 'b'},
+ compile_targets={'a', 'b'})
+
+_CreateConfigFile(['f.c'], ['all'], ['a'])
+run_analyzer3()
+EnsureContains(matched=True, test_targets={'a'}, compile_targets={'a', 'b'})
+
+_CreateConfigFile(['a.c'], ['all'], ['a'])
+run_analyzer3()
+EnsureContains(matched=True, test_targets={'a'}, compile_targets={'a'})
+
+_CreateConfigFile(['a.c'], ['all'])
+run_analyzer3()
+EnsureContains(matched=True, compile_targets={'a'})
+
+_CreateConfigFile(['d.c'], ['all'])
+run_analyzer3()
+EnsureContains(matched=True, compile_targets={'a', 'b'})
+
+# Assertions around test4.gyp.
+_CreateConfigFile(['f.c'], ['all'])
+run_analyzer4()
+EnsureContains(matched=True, compile_targets={'e', 'f'})
+
+_CreateConfigFile(['d.c'], ['all'])
+run_analyzer4()
+EnsureContains(matched=True, compile_targets={'a', 'b', 'c', 'd'})
+
+_CreateConfigFile(['i.c'], ['all'])
+run_analyzer4()
+EnsureContains(matched=True, compile_targets={'h', 'i'})
+
+# Assertions where 'all' is not supplied in compile_targets.
+
+_CreateConfigFile(['exe2.c'], [], ['exe2'])
+run_analyzer()
+EnsureContains(matched=True, test_targets={'exe2'}, compile_targets={'exe2'})
+
+_CreateConfigFile(['exe20.c'], [], ['exe2'])
+run_analyzer()
+EnsureContains(matched=False)
+
+
+_CreateConfigFile(['exe2.c', 'exe3.c'], [], ['exe2', 'exe3'])
+run_analyzer()
+EnsureContains(matched=True, test_targets={'exe2', 'exe3'},
+ compile_targets={'exe2', 'exe3'})
+
+_CreateConfigFile(['exe2.c', 'exe3.c'], ['exe3'], ['exe2'])
+run_analyzer()
+EnsureContains(matched=True, test_targets={'exe2'},
+ compile_targets={'exe2', 'exe3'})
+
+_CreateConfigFile(['exe3.c'], ['exe2'], ['exe2'])
+run_analyzer()
+EnsureContains(matched=False)
+
+# Assertions with 'all' listed as a test_target.
+_CreateConfigFile(['exe3.c'], [], ['all'])
+run_analyzer()
+EnsureContains(matched=True, compile_targets={'exe3', 'all'},
+ test_targets={'all'})
+
+_CreateConfigFile(['exe2.c'], [], ['all', 'exe2'])
+run_analyzer()
+EnsureContains(matched=True, compile_targets={'exe2', 'all'},
+ test_targets={'all', 'exe2'})
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/analyzer/static_library_test.gyp b/third_party/python/gyp/test/analyzer/static_library_test.gyp
new file mode 100644
index 0000000000..2c8e4bd826
--- /dev/null
+++ b/third_party/python/gyp/test/analyzer/static_library_test.gyp
@@ -0,0 +1,34 @@
+# Copyright 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# These gyp files create the following dependencies:
+#
+# test.gyp:
+# #a -> b
+# a.c
+# #b
+# b.c
+# a and b are static libraries.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'static_library',
+ 'sources': [
+ 'a.c',
+ ],
+ 'dependencies': [
+ 'b',
+ ],
+ },
+ {
+ 'target_name': 'b',
+ 'type': 'static_library',
+ 'sources': [
+ 'b.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/analyzer/subdir/subdir.gyp b/third_party/python/gyp/test/analyzer/subdir/subdir.gyp
new file mode 100644
index 0000000000..bfa2df48e1
--- /dev/null
+++ b/third_party/python/gyp/test/analyzer/subdir/subdir.gyp
@@ -0,0 +1,36 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'trailing_dir_path': '../',
+ },
+ 'targets': [
+ {
+ 'target_name': 'foo',
+ 'type': 'static_library',
+ 'sources': [
+ 'subdir_source.c',
+ '<(trailing_dir_path)/parent_source.c',
+ ],
+ },
+ {
+ 'target_name': 'subdir2a',
+ 'type': 'static_library',
+ 'sources': [
+ 'subdir2_source.c',
+ ],
+ 'dependencies': [
+ 'subdir2b',
+ ],
+ },
+ {
+ 'target_name': 'subdir2b',
+ 'type': 'static_library',
+ 'sources': [
+ 'subdir2b_source.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/analyzer/subdir/subdir2/subdir2.gyp b/third_party/python/gyp/test/analyzer/subdir/subdir2/subdir2.gyp
new file mode 100644
index 0000000000..e5aaa92b18
--- /dev/null
+++ b/third_party/python/gyp/test/analyzer/subdir/subdir2/subdir2.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'subdir2',
+ 'type': 'static_library',
+ 'sources': [
+ '../subdir_source.h',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/analyzer/subdir2/subdir.gyp b/third_party/python/gyp/test/analyzer/subdir2/subdir.gyp
new file mode 100644
index 0000000000..d6c709c9ef
--- /dev/null
+++ b/third_party/python/gyp/test/analyzer/subdir2/subdir.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'foo',
+ 'type': 'static_library',
+ 'sources': [
+ 'subdir_source.c',
+ ],
+ 'includes': [
+ 'subdir.includes.gypi',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/analyzer/subdir2/subdir.includes.gypi b/third_party/python/gyp/test/analyzer/subdir2/subdir.includes.gypi
new file mode 100644
index 0000000000..324e92bcd4
--- /dev/null
+++ b/third_party/python/gyp/test/analyzer/subdir2/subdir.includes.gypi
@@ -0,0 +1,9 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'sources': [
+ 'd.cc'
+ ],
+}
diff --git a/third_party/python/gyp/test/analyzer/test.gyp b/third_party/python/gyp/test/analyzer/test.gyp
new file mode 100644
index 0000000000..c25ca73bff
--- /dev/null
+++ b/third_party/python/gyp/test/analyzer/test.gyp
@@ -0,0 +1,114 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# These gyp files create the following dependencies:
+#
+# test.gyp:
+# #exe -> subdir/subdir.gyp#foo, subdir/subdir2/subdir2.gyp#subdir2
+# foo.c
+# subdir/subdir_source2.c
+# conditional_source.c (if test_variable==1)
+# action_input.c
+# action_output.c
+# rule_input.c
+# rule_output.pdf
+# #exe2
+# exe2.c
+# #exe3 -> subdir/subdir.gyp#foo, subdir/subdir.gyp#subdir2a
+# exe3.c
+# #allx (type none) -> exe, exe3
+#
+# subdir/subdir.gyp
+# #foo
+# subdir/subdir_source.c
+# parent_source.c
+# #subdir2a -> subdir2b
+# subdir/subdir2_source.c
+# #subdir2b
+# subdir/subdir2b_source.c
+#
+# subdir/subdir2/subdir2.gyp
+# #subdir2
+# subdir/subdir_source.h
+
+{
+ 'variables': {
+ 'test_variable%': 0,
+ 'variable_path': 'subdir',
+ },
+ 'targets': [
+ {
+ 'target_name': 'exe',
+ 'type': 'executable',
+ 'dependencies': [
+ 'subdir/subdir.gyp:foo',
+ 'subdir/subdir2/subdir2.gyp:subdir2',
+ ],
+ 'sources': [
+ 'foo.c',
+ '<(variable_path)/subdir_source2.c',
+ ],
+ 'conditions': [
+ ['test_variable==1', {
+ 'sources': [
+ 'conditional_source.c',
+ ],
+ }],
+ ],
+ 'actions': [
+ {
+ 'action_name': 'action',
+ 'inputs': [
+ '<(PRODUCT_DIR)/product_dir_input.c',
+ 'action_input.c',
+ '../bad_path1.h',
+ '../../bad_path2.h',
+ './rel_path1.h',
+ ],
+ 'outputs': [
+ 'action_output.c',
+ ],
+ },
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'rule',
+ 'extension': 'pdf',
+ 'inputs': [
+ 'rule_input.c',
+ ],
+ 'outputs': [
+ 'rule_output.pdf',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'exe2',
+ 'type': 'executable',
+ 'sources': [
+ 'exe2.c',
+ ],
+ },
+ {
+ 'target_name': 'exe3',
+ 'type': 'executable',
+ 'dependencies': [
+ 'subdir/subdir.gyp:foo',
+ 'subdir/subdir.gyp:subdir2a',
+ ],
+ 'sources': [
+ 'exe3.c',
+ ],
+ },
+ {
+ 'target_name': 'allx',
+ 'type': 'none',
+ 'dependencies': [
+ 'exe',
+ 'exe3',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/analyzer/test2.gyp b/third_party/python/gyp/test/analyzer/test2.gyp
new file mode 100644
index 0000000000..782b6e6428
--- /dev/null
+++ b/third_party/python/gyp/test/analyzer/test2.gyp
@@ -0,0 +1,25 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'exe',
+ 'type': 'executable',
+ 'dependencies': [
+ 'subdir2/subdir.gyp:foo',
+ ],
+ },
+ {
+ 'target_name': 'exe2',
+ 'type': 'executable',
+ 'includes': [
+ 'test2.includes.gypi',
+ ],
+ },
+ ],
+ 'includes': [
+ 'test2.toplevel_includes.gypi',
+ ],
+}
diff --git a/third_party/python/gyp/test/analyzer/test2.includes.gypi b/third_party/python/gyp/test/analyzer/test2.includes.gypi
new file mode 100644
index 0000000000..3e21de23cb
--- /dev/null
+++ b/third_party/python/gyp/test/analyzer/test2.includes.gypi
@@ -0,0 +1,13 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'sources': [
+ 'a.cc',
+ 'b.cc'
+ ],
+ 'includes': [
+ 'test2.includes.includes.gypi',
+ ],
+}
diff --git a/third_party/python/gyp/test/analyzer/test2.includes.includes.gypi b/third_party/python/gyp/test/analyzer/test2.includes.includes.gypi
new file mode 100644
index 0000000000..de3a025dbb
--- /dev/null
+++ b/third_party/python/gyp/test/analyzer/test2.includes.includes.gypi
@@ -0,0 +1,9 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'sources': [
+ 'c.cc'
+ ],
+}
diff --git a/third_party/python/gyp/test/analyzer/test2.toplevel_includes.gypi b/third_party/python/gyp/test/analyzer/test2.toplevel_includes.gypi
new file mode 100644
index 0000000000..54fa453b08
--- /dev/null
+++ b/third_party/python/gyp/test/analyzer/test2.toplevel_includes.gypi
@@ -0,0 +1,15 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'exe3',
+ 'type': 'executable',
+ 'sources': [
+ 'e.cc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/analyzer/test3.gyp b/third_party/python/gyp/test/analyzer/test3.gyp
new file mode 100644
index 0000000000..e52f6bc7d3
--- /dev/null
+++ b/third_party/python/gyp/test/analyzer/test3.gyp
@@ -0,0 +1,77 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'allx',
+ 'type': 'none',
+ 'dependencies': [
+ 'a',
+ 'b',
+ ],
+ },
+ {
+ 'target_name': 'a',
+ 'type': 'executable',
+ 'sources': [
+ 'a.c',
+ ],
+ 'dependencies': [
+ 'c',
+ 'd',
+ ],
+ },
+ {
+ 'target_name': 'b',
+ 'type': 'executable',
+ 'sources': [
+ 'b.c',
+ ],
+ 'dependencies': [
+ 'd',
+ 'e',
+ ],
+ },
+ {
+ 'target_name': 'c',
+ 'type': 'executable',
+ 'sources': [
+ 'c.c',
+ ],
+ },
+ {
+ 'target_name': 'd',
+ 'type': 'none',
+ 'sources': [
+ 'd.c',
+ ],
+ 'dependencies': [
+ 'f',
+ 'g',
+ ],
+ },
+ {
+ 'target_name': 'e',
+ 'type': 'executable',
+ 'sources': [
+ 'e.c',
+ ],
+ },
+ {
+ 'target_name': 'f',
+ 'type': 'static_library',
+ 'sources': [
+ 'f.c',
+ ],
+ },
+ {
+ 'target_name': 'g',
+ 'type': 'executable',
+ 'sources': [
+ 'g.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/analyzer/test4.gyp b/third_party/python/gyp/test/analyzer/test4.gyp
new file mode 100644
index 0000000000..91cea56c1f
--- /dev/null
+++ b/third_party/python/gyp/test/analyzer/test4.gyp
@@ -0,0 +1,80 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'executable',
+ 'sources': [
+ 'a.c',
+ ],
+ 'dependencies': [
+ 'b',
+ 'c',
+ ],
+ },
+ {
+ 'target_name': 'b',
+ 'type': 'executable',
+ 'sources': [
+ 'b.c',
+ ],
+ 'dependencies': [
+ 'd',
+ ],
+ },
+ {
+ 'target_name': 'c',
+ 'type': 'executable',
+ 'sources': [
+ 'c.c',
+ ],
+ 'dependencies': [
+ 'b',
+ 'd',
+ ],
+ },
+ {
+ 'target_name': 'd',
+ 'type': 'executable',
+ 'sources': [
+ 'd.c',
+ ],
+ },
+ {
+ 'target_name': 'e',
+ 'type': 'executable',
+ 'dependencies': [
+ 'test5.gyp:f',
+ ],
+ },
+ {
+ 'target_name': 'h',
+ 'type': 'none',
+ 'dependencies': [
+ 'i',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'rule',
+ 'extension': 'pdf',
+ 'inputs': [
+ 'rule_input.c',
+ ],
+ 'outputs': [
+ 'rule_output.pdf',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'i',
+ 'type': 'static_library',
+ 'sources': [
+ 'i.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/analyzer/test5.gyp b/third_party/python/gyp/test/analyzer/test5.gyp
new file mode 100644
index 0000000000..f3ea5b0061
--- /dev/null
+++ b/third_party/python/gyp/test/analyzer/test5.gyp
@@ -0,0 +1,25 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'f',
+ 'type': 'executable',
+ 'sources': [
+ 'f.c',
+ ],
+ },
+ {
+ 'target_name': 'g',
+ 'type': 'executable',
+ 'sources': [
+ 'g.c',
+ ],
+ 'dependencies': [
+ 'f',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/arflags/gyptest-arflags.py b/third_party/python/gyp/test/arflags/gyptest-arflags.py
new file mode 100644
index 0000000000..870a2d8946
--- /dev/null
+++ b/third_party/python/gyp/test/arflags/gyptest-arflags.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that building a target with invalid arflags fails.
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+import TestGyp
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+
+test = TestGyp.TestGyp(formats=['ninja'])
+test.run_gyp('test.gyp')
+expected_status = 0 if sys.platform in ['darwin', 'win32'] else 1
+test.build('test.gyp', target='lib', status=expected_status)
+test.pass_test()
diff --git a/third_party/python/gyp/test/arflags/lib.cc b/third_party/python/gyp/test/arflags/lib.cc
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/arflags/lib.cc
diff --git a/third_party/python/gyp/test/arflags/test.gyp b/third_party/python/gyp/test/arflags/test.gyp
new file mode 100644
index 0000000000..f7430fae2d
--- /dev/null
+++ b/third_party/python/gyp/test/arflags/test.gyp
@@ -0,0 +1,10 @@
+{
+ 'targets': [
+ {
+ 'target_name': 'lib',
+ 'type': 'static_library',
+ 'sources': ['lib.cc'],
+ 'arflags': ['--nonexistent'],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/assembly/gyptest-assembly.py b/third_party/python/gyp/test/assembly/gyptest-assembly.py
new file mode 100755
index 0000000000..8a84310544
--- /dev/null
+++ b/third_party/python/gyp/test/assembly/gyptest-assembly.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+A basic test of compiling assembler files.
+"""
+
+import sys
+import TestGyp
+
+if sys.platform != 'win32':
+ # TODO(bradnelson): get this working for windows.
+ test = TestGyp.TestGyp(formats=['!msvs'])
+
+ test.run_gyp('assembly.gyp', chdir='src')
+
+ test.relocate('src', 'relocate/src')
+
+ test.build('assembly.gyp', test.ALL, chdir='relocate/src')
+
+ expect = """\
+Hello from program.c
+Got 42.
+"""
+ test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/assembly/gyptest-override.py b/third_party/python/gyp/test/assembly/gyptest-override.py
new file mode 100644
index 0000000000..e84a23e855
--- /dev/null
+++ b/third_party/python/gyp/test/assembly/gyptest-override.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure that manual rules on Windows override the built in ones.
+"""
+
+import sys
+import TestGyp
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+ CHDIR = 'src'
+ test.run_gyp('override.gyp', chdir=CHDIR)
+ test.build('override.gyp', test.ALL, chdir=CHDIR)
+ expect = """\
+Hello from program.c
+Got 42.
+"""
+ test.run_built_executable('program', chdir=CHDIR, stdout=expect)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/assembly/src/as.bat b/third_party/python/gyp/test/assembly/src/as.bat
new file mode 100644
index 0000000000..b796db97ca
--- /dev/null
+++ b/third_party/python/gyp/test/assembly/src/as.bat
@@ -0,0 +1,4 @@
+@echo off
+:: Mock windows assembler.
+cl /MD /c %1 /Fo"%2"
+
diff --git a/third_party/python/gyp/test/assembly/src/assembly.gyp b/third_party/python/gyp/test/assembly/src/assembly.gyp
new file mode 100644
index 0000000000..565cb0fa0e
--- /dev/null
+++ b/third_party/python/gyp/test/assembly/src/assembly.gyp
@@ -0,0 +1,62 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'conditions': [
+ ['OS=="win"', {
+ 'defines': ['PLATFORM_WIN'],
+ }],
+ ['OS=="mac" or OS=="ios"', {
+ 'defines': ['PLATFORM_MAC'],
+ }],
+ ['OS=="linux"', {
+ 'defines': ['PLATFORM_LINUX'],
+ }],
+ ['OS=="android"', {
+ 'defines': ['PLATFORM_ANDROID'],
+ }],
+ ],
+ },
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'dependencies': ['lib1'],
+ 'sources': [
+ 'program.c',
+ ],
+ },
+ {
+ 'target_name': 'lib1',
+ 'type': 'static_library',
+ 'sources': [
+ 'lib1.S',
+ ],
+ },
+ ],
+ 'conditions': [
+ ['OS=="win"', {
+ 'target_defaults': {
+ 'rules': [
+ {
+ 'rule_name': 'assembler',
+ 'msvs_cygwin_shell': 0,
+ 'extension': 'S',
+ 'inputs': [
+ 'as.bat',
+ ],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).obj',
+ ],
+ 'action':
+ ['as.bat', 'lib1.c', '<(_outputs)'],
+ 'message': 'Building assembly file <(RULE_INPUT_PATH)',
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ },],
+ ],
+}
diff --git a/third_party/python/gyp/test/assembly/src/lib1.S b/third_party/python/gyp/test/assembly/src/lib1.S
new file mode 100644
index 0000000000..7de9f19cf9
--- /dev/null
+++ b/third_party/python/gyp/test/assembly/src/lib1.S
@@ -0,0 +1,15 @@
+#if PLATFORM_WINDOWS || PLATFORM_MAC
+# define IDENTIFIER(n) _##n
+#else /* Linux */
+# define IDENTIFIER(n) n
+#endif
+
+.globl IDENTIFIER(lib1_function)
+IDENTIFIER(lib1_function):
+#if !defined(PLATFORM_ANDROID)
+ movl $42, %eax
+ ret
+#else /* Android (assuming ARM) */
+ mov r0, #42
+ bx lr
+#endif
diff --git a/third_party/python/gyp/test/assembly/src/lib1.c b/third_party/python/gyp/test/assembly/src/lib1.c
new file mode 100644
index 0000000000..be21ecd5f6
--- /dev/null
+++ b/third_party/python/gyp/test/assembly/src/lib1.c
@@ -0,0 +1,3 @@
+int lib1_function(void) {
+ return 42;
+}
diff --git a/third_party/python/gyp/test/assembly/src/override.gyp b/third_party/python/gyp/test/assembly/src/override.gyp
new file mode 100644
index 0000000000..39a4072eff
--- /dev/null
+++ b/third_party/python/gyp/test/assembly/src/override.gyp
@@ -0,0 +1,34 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'sources': [
+ 'program.c',
+ 'override_asm.asm',
+ ],
+ 'rules': [
+ {
+ # Test that if there's a specific .asm rule, it overrides the
+ # built in one on Windows.
+ 'rule_name': 'assembler',
+ 'msvs_cygwin_shell': 0,
+ 'extension': 'asm',
+ 'inputs': [
+ 'as.bat',
+ ],
+ 'outputs': [
+ 'output.obj',
+ ],
+ 'action': ['as.bat', 'lib1.c', '<(_outputs)'],
+ 'message': 'Building assembly file <(RULE_INPUT_PATH)',
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/assembly/src/override_asm.asm b/third_party/python/gyp/test/assembly/src/override_asm.asm
new file mode 100644
index 0000000000..be93b23baa
--- /dev/null
+++ b/third_party/python/gyp/test/assembly/src/override_asm.asm
@@ -0,0 +1,8 @@
+; Copyright (c) 2012 Google Inc. All rights reserved.
+; Use of this source code is governed by a BSD-style license that can be
+; found in the LICENSE file.
+
+; This is a placeholder. It should not be referenced if overrides work
+; correctly.
+
+Bad stuff that shouldn't assemble.
diff --git a/third_party/python/gyp/test/assembly/src/program.c b/third_party/python/gyp/test/assembly/src/program.c
new file mode 100644
index 0000000000..eee862712e
--- /dev/null
+++ b/third_party/python/gyp/test/assembly/src/program.c
@@ -0,0 +1,12 @@
+#include <stdio.h>
+
+extern int lib1_function(void);
+
+int main(void)
+{
+ fprintf(stdout, "Hello from program.c\n");
+ fflush(stdout);
+ fprintf(stdout, "Got %d.\n", lib1_function());
+ fflush(stdout);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/build-option/gyptest-build.py b/third_party/python/gyp/test/build-option/gyptest-build.py
new file mode 100755
index 0000000000..34a9e11d35
--- /dev/null
+++ b/third_party/python/gyp/test/build-option/gyptest-build.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simplest-possible build of a "Hello, world!" program
+using the default build target.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(workdir='workarea_default')
+
+if test.format == 'xcode-ninja':
+ # The xcode-ninja generator doesn't support --build
+ # cf. https://code.google.com/p/gyp/issues/detail?id=453
+ test.skip_test()
+
+test.run_gyp('hello.gyp', '--build=Default')
+
+test.run_built_executable('hello', stdout="Hello, world!\n")
+
+test.up_to_date('hello.gyp', test.DEFAULT)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/build-option/hello.c b/third_party/python/gyp/test/build-option/hello.c
new file mode 100644
index 0000000000..f6ad129fd7
--- /dev/null
+++ b/third_party/python/gyp/test/build-option/hello.c
@@ -0,0 +1,13 @@
+/*
+ * Copyright (c) 2012 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello, world!\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/build-option/hello.gyp b/third_party/python/gyp/test/build-option/hello.gyp
new file mode 100644
index 0000000000..1974d51ccd
--- /dev/null
+++ b/third_party/python/gyp/test/build-option/hello.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/builddir/gyptest-all.py b/third_party/python/gyp/test/builddir/gyptest-all.py
new file mode 100755
index 0000000000..a26543f49e
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/gyptest-all.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify the settings that cause a set of programs to be created in
+a specific build directory, and that no intermediate built files
+get created outside of that build directory hierarchy even when
+referred to with deeply-nested ../../.. paths.
+"""
+
+import TestGyp
+
+# TODO(mmoss): Make only supports (theoretically) a single, global build
+# directory (through GYP_GENERATOR_FLAGS 'output_dir'), rather than
+# gyp-file-specific settings (e.g. the stuff in builddir.gypi) that the other
+# generators support, so this doesn't work yet for make.
+# TODO(mmoss) Make also has the issue that the top-level Makefile is written to
+# the "--depth" location, which is one level above 'src', but then this test
+# moves 'src' somewhere else, leaving the Makefile behind, so make can't find
+# its sources. I'm not sure if make is wrong for writing outside the current
+# directory, or if the test is wrong for assuming everything generated is under
+# the current directory.
+# Ninja and CMake do not support setting the build directory.
+test = TestGyp.TestGyp(formats=['!make', '!ninja', '!cmake'])
+
+test.run_gyp('prog1.gyp', '--depth=..', chdir='src')
+if test.format == 'msvs':
+ if test.uses_msbuild:
+ test.must_contain('src/prog1.vcxproj',
+ '<OutDir>..\\builddir\\Default\\</OutDir>')
+ else:
+ test.must_contain('src/prog1.vcproj',
+ 'OutputDirectory="..\\builddir\\Default\\"')
+
+test.relocate('src', 'relocate/src')
+
+test.subdir('relocate/builddir')
+
+# Make sure that all the built ../../etc. files only get put under builddir,
+# by making all of relocate read-only and then making only builddir writable.
+test.writable('relocate', False)
+test.writable('relocate/builddir', True)
+
+# Suppress the test infrastructure's setting SYMROOT on the command line.
+test.build('prog1.gyp', test.ALL, SYMROOT=None, chdir='relocate/src')
+
+expect1 = """\
+Hello from prog1.c
+Hello from func1.c
+"""
+
+expect2 = """\
+Hello from subdir2/prog2.c
+Hello from func2.c
+"""
+
+expect3 = """\
+Hello from subdir2/subdir3/prog3.c
+Hello from func3.c
+"""
+
+expect4 = """\
+Hello from subdir2/subdir3/subdir4/prog4.c
+Hello from func4.c
+"""
+
+expect5 = """\
+Hello from subdir2/subdir3/subdir4/subdir5/prog5.c
+Hello from func5.c
+"""
+
+def run_builddir(prog, expect):
+ dir = 'relocate/builddir/Default/'
+ test.run(program=test.workpath(dir + prog), stdout=expect)
+
+run_builddir('prog1', expect1)
+run_builddir('prog2', expect2)
+run_builddir('prog3', expect3)
+run_builddir('prog4', expect4)
+run_builddir('prog5', expect5)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/builddir/gyptest-default.py b/third_party/python/gyp/test/builddir/gyptest-default.py
new file mode 100755
index 0000000000..4904cdab42
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/gyptest-default.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify the settings that cause a set of programs to be created in
+a specific build directory, and that no intermediate built files
+get created outside of that build directory hierarchy even when
+referred to with deeply-nested ../../.. paths.
+"""
+
+import TestGyp
+
+# TODO(mmoss): Make only supports (theoretically) a single, global build
+# directory (through GYP_GENERATOR_FLAGS 'output_dir'), rather than
+# gyp-file-specific settings (e.g. the stuff in builddir.gypi) that the other
+# generators support, so this doesn't work yet for make.
+# TODO(mmoss) Make also has the issue that the top-level Makefile is written to
+# the "--depth" location, which is one level above 'src', but then this test
+# moves 'src' somewhere else, leaving the Makefile behind, so make can't find
+# its sources. I'm not sure if make is wrong for writing outside the current
+# directory, or if the test is wrong for assuming everything generated is under
+# the current directory.
+# Ninja and CMake do not support setting the build directory.
+test = TestGyp.TestGyp(formats=['!make', '!ninja', '!cmake'])
+
+test.run_gyp('prog1.gyp', '--depth=..', chdir='src')
+if test.format == 'msvs':
+ if test.uses_msbuild:
+ test.must_contain('src/prog1.vcxproj',
+ '<OutDir>..\\builddir\\Default\\</OutDir>')
+ else:
+ test.must_contain('src/prog1.vcproj',
+ 'OutputDirectory="..\\builddir\\Default\\"')
+
+test.relocate('src', 'relocate/src')
+
+test.subdir('relocate/builddir')
+
+# Make sure that all the built ../../etc. files only get put under builddir,
+# by making all of relocate read-only and then making only builddir writable.
+test.writable('relocate', False)
+test.writable('relocate/builddir', True)
+
+# Suppress the test infrastructure's setting SYMROOT on the command line.
+test.build('prog1.gyp', SYMROOT=None, chdir='relocate/src')
+
+expect1 = """\
+Hello from prog1.c
+Hello from func1.c
+"""
+
+expect2 = """\
+Hello from subdir2/prog2.c
+Hello from func2.c
+"""
+
+expect3 = """\
+Hello from subdir2/subdir3/prog3.c
+Hello from func3.c
+"""
+
+expect4 = """\
+Hello from subdir2/subdir3/subdir4/prog4.c
+Hello from func4.c
+"""
+
+expect5 = """\
+Hello from subdir2/subdir3/subdir4/subdir5/prog5.c
+Hello from func5.c
+"""
+
+def run_builddir(prog, expect):
+ dir = 'relocate/builddir/Default/'
+ test.run(program=test.workpath(dir + prog), stdout=expect)
+
+run_builddir('prog1', expect1)
+run_builddir('prog2', expect2)
+run_builddir('prog3', expect3)
+run_builddir('prog4', expect4)
+run_builddir('prog5', expect5)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/builddir/src/builddir.gypi b/third_party/python/gyp/test/builddir/src/builddir.gypi
new file mode 100644
index 0000000000..ce175db8f8
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/src/builddir.gypi
@@ -0,0 +1,18 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'configurations': {
+ 'Default': {
+ 'msvs_configuration_attributes': {
+ 'OutputDirectory': '<(DEPTH)\\builddir/Default',
+ },
+ },
+ },
+ },
+ 'xcode_settings': {
+ 'SYMROOT': '<(DEPTH)/builddir',
+ },
+}
diff --git a/third_party/python/gyp/test/builddir/src/func1.c b/third_party/python/gyp/test/builddir/src/func1.c
new file mode 100644
index 0000000000..b8e6a06951
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/src/func1.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void func1(void)
+{
+ printf("Hello from func1.c\n");
+}
diff --git a/third_party/python/gyp/test/builddir/src/func2.c b/third_party/python/gyp/test/builddir/src/func2.c
new file mode 100644
index 0000000000..14aabac475
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/src/func2.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void func2(void)
+{
+ printf("Hello from func2.c\n");
+}
diff --git a/third_party/python/gyp/test/builddir/src/func3.c b/third_party/python/gyp/test/builddir/src/func3.c
new file mode 100644
index 0000000000..3b4edeae6d
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/src/func3.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void func3(void)
+{
+ printf("Hello from func3.c\n");
+}
diff --git a/third_party/python/gyp/test/builddir/src/func4.c b/third_party/python/gyp/test/builddir/src/func4.c
new file mode 100644
index 0000000000..732891b79a
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/src/func4.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void func4(void)
+{
+ printf("Hello from func4.c\n");
+}
diff --git a/third_party/python/gyp/test/builddir/src/func5.c b/third_party/python/gyp/test/builddir/src/func5.c
new file mode 100644
index 0000000000..18fdfabbbe
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/src/func5.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void func5(void)
+{
+ printf("Hello from func5.c\n");
+}
diff --git a/third_party/python/gyp/test/builddir/src/prog1.c b/third_party/python/gyp/test/builddir/src/prog1.c
new file mode 100644
index 0000000000..a32aaf04f9
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/src/prog1.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+extern void func1(void);
+
+int main(void)
+{
+ printf("Hello from prog1.c\n");
+ func1();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/builddir/src/prog1.gyp b/third_party/python/gyp/test/builddir/src/prog1.gyp
new file mode 100644
index 0000000000..5b96f035ec
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/src/prog1.gyp
@@ -0,0 +1,30 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ 'builddir.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'pull_in_all',
+ 'type': 'none',
+ 'dependencies': [
+ 'prog1',
+ 'subdir2/prog2.gyp:prog2',
+ 'subdir2/subdir3/prog3.gyp:prog3',
+ 'subdir2/subdir3/subdir4/prog4.gyp:prog4',
+ 'subdir2/subdir3/subdir4/subdir5/prog5.gyp:prog5',
+ ],
+ },
+ {
+ 'target_name': 'prog1',
+ 'type': 'executable',
+ 'sources': [
+ 'prog1.c',
+ 'func1.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/builddir/src/subdir2/prog2.c b/third_party/python/gyp/test/builddir/src/subdir2/prog2.c
new file mode 100644
index 0000000000..9d682cd783
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/src/subdir2/prog2.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+extern void func2(void);
+
+int main(void)
+{
+ printf("Hello from subdir2/prog2.c\n");
+ func2();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/builddir/src/subdir2/prog2.gyp b/third_party/python/gyp/test/builddir/src/subdir2/prog2.gyp
new file mode 100644
index 0000000000..96299b646d
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/src/subdir2/prog2.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../builddir.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog2',
+ 'type': 'executable',
+ 'sources': [
+ 'prog2.c',
+ '../func2.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/builddir/src/subdir2/subdir3/prog3.c b/third_party/python/gyp/test/builddir/src/subdir2/subdir3/prog3.c
new file mode 100644
index 0000000000..da74965985
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/src/subdir2/subdir3/prog3.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+extern void func3(void);
+
+int main(void)
+{
+ printf("Hello from subdir2/subdir3/prog3.c\n");
+ func3();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp b/third_party/python/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp
new file mode 100644
index 0000000000..d7df43c7bd
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../../builddir.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog3',
+ 'type': 'executable',
+ 'sources': [
+ 'prog3.c',
+ '../../func3.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c b/third_party/python/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c
new file mode 100644
index 0000000000..5787d5fa43
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+extern void func4(void);
+
+int main(void)
+{
+ printf("Hello from subdir2/subdir3/subdir4/prog4.c\n");
+ func4();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp b/third_party/python/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp
new file mode 100644
index 0000000000..862a8a18cd
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../../../builddir.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog4',
+ 'type': 'executable',
+ 'sources': [
+ 'prog4.c',
+ '../../../func4.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c b/third_party/python/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c
new file mode 100644
index 0000000000..c6e2ab521f
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+extern void func5(void);
+
+int main(void)
+{
+ printf("Hello from subdir2/subdir3/subdir4/subdir5/prog5.c\n");
+ func5();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp b/third_party/python/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp
new file mode 100644
index 0000000000..fe1c9cbf50
--- /dev/null
+++ b/third_party/python/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../../../../builddir.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog5',
+ 'type': 'executable',
+ 'sources': [
+ 'prog5.c',
+ '../../../../func5.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/cflags/cflags.c b/third_party/python/gyp/test/cflags/cflags.c
new file mode 100644
index 0000000000..0a02ba9074
--- /dev/null
+++ b/third_party/python/gyp/test/cflags/cflags.c
@@ -0,0 +1,15 @@
+/* Copyright (c) 2010 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int main(void)
+{
+#ifdef FOO
+ printf("FOO defined\n");
+#else
+ printf("FOO not defined\n");
+#endif
+ return 0;
+}
diff --git a/third_party/python/gyp/test/cflags/cflags.gyp b/third_party/python/gyp/test/cflags/cflags.gyp
new file mode 100644
index 0000000000..2840dc6318
--- /dev/null
+++ b/third_party/python/gyp/test/cflags/cflags.gyp
@@ -0,0 +1,23 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'cflags',
+ 'type': 'executable',
+ 'sources': [
+ 'cflags.c',
+ ],
+ },
+ {
+ 'target_name': 'cflags_host',
+ 'toolsets': ['host'],
+ 'type': 'executable',
+ 'sources': [
+ 'cflags.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/cflags/gyptest-cflags.py b/third_party/python/gyp/test/cflags/gyptest-cflags.py
new file mode 100755
index 0000000000..f4efccba9b
--- /dev/null
+++ b/third_party/python/gyp/test/cflags/gyptest-cflags.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies the use of the environment during regeneration when the gyp file
+changes, specifically via build of an executable with C preprocessor
+definition specified by CFLAGS.
+
+In this test, gyp and build both run in same local environment.
+"""
+
+import TestGyp
+
+# CPPFLAGS works in ninja but not make; CFLAGS works in both
+FORMATS = ('make', 'ninja')
+
+test = TestGyp.TestGyp(formats=FORMATS)
+
+# First set CFLAGS to blank in case the platform doesn't support unsetenv.
+with TestGyp.LocalEnv({'CFLAGS': '',
+ 'GYP_CROSSCOMPILE': '1'}):
+ test.run_gyp('cflags.gyp')
+ test.build('cflags.gyp')
+
+expect = """FOO not defined\n"""
+test.run_built_executable('cflags', stdout=expect)
+test.run_built_executable('cflags_host', stdout=expect)
+
+test.sleep()
+
+with TestGyp.LocalEnv({'CFLAGS': '-DFOO=1',
+ 'GYP_CROSSCOMPILE': '1'}):
+ test.run_gyp('cflags.gyp')
+ test.build('cflags.gyp')
+
+expect = """FOO defined\n"""
+test.run_built_executable('cflags', stdout=expect)
+
+# Environment variable CFLAGS shouldn't influence the flags for the host.
+expect = """FOO not defined\n"""
+test.run_built_executable('cflags_host', stdout=expect)
+
+test.sleep()
+
+with TestGyp.LocalEnv({'CFLAGS_host': '-DFOO=1',
+ 'GYP_CROSSCOMPILE': '1'}):
+ test.run_gyp('cflags.gyp')
+ test.build('cflags.gyp')
+
+# Environment variable CFLAGS_host should influence the flags for the host.
+expect = """FOO defined\n"""
+test.run_built_executable('cflags_host', stdout=expect)
+
+test.sleep()
+
+with TestGyp.LocalEnv({'CFLAGS': ''}):
+ test.run_gyp('cflags.gyp')
+ test.build('cflags.gyp')
+
+expect = """FOO not defined\n"""
+test.run_built_executable('cflags', stdout=expect)
+
+test.sleep()
+
+with TestGyp.LocalEnv({'CFLAGS': '-DFOO=1'}):
+ test.run_gyp('cflags.gyp')
+ test.build('cflags.gyp')
+
+expect = """FOO defined\n"""
+test.run_built_executable('cflags', stdout=expect)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/compilable/gyptest-headers.py b/third_party/python/gyp/test/compilable/gyptest-headers.py
new file mode 100755
index 0000000000..91760216fb
--- /dev/null
+++ b/third_party/python/gyp/test/compilable/gyptest-headers.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that .hpp files are ignored when included in the source list on all
+platforms.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('headers.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('headers.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from program.c
+Hello from lib1.c
+"""
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/compilable/src/headers.gyp b/third_party/python/gyp/test/compilable/src/headers.gyp
new file mode 100644
index 0000000000..b6c2a8857b
--- /dev/null
+++ b/third_party/python/gyp/test/compilable/src/headers.gyp
@@ -0,0 +1,26 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'dependencies': [
+ 'lib1'
+ ],
+ 'sources': [
+ 'program.cpp',
+ ],
+ },
+ {
+ 'target_name': 'lib1',
+ 'type': 'static_library',
+ 'sources': [
+ 'lib1.hpp',
+ 'lib1.cpp',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/compilable/src/lib1.cpp b/third_party/python/gyp/test/compilable/src/lib1.cpp
new file mode 100644
index 0000000000..51bc31a40b
--- /dev/null
+++ b/third_party/python/gyp/test/compilable/src/lib1.cpp
@@ -0,0 +1,7 @@
+#include <stdio.h>
+#include "lib1.hpp"
+
+void lib1_function(void) {
+ fprintf(stdout, "Hello from lib1.c\n");
+ fflush(stdout);
+}
diff --git a/third_party/python/gyp/test/compilable/src/lib1.hpp b/third_party/python/gyp/test/compilable/src/lib1.hpp
new file mode 100644
index 0000000000..72e63e8acd
--- /dev/null
+++ b/third_party/python/gyp/test/compilable/src/lib1.hpp
@@ -0,0 +1,6 @@
+#ifndef _lib1_hpp
+#define _lib1_hpp
+
+extern void lib1_function(void);
+
+#endif
diff --git a/third_party/python/gyp/test/compilable/src/program.cpp b/third_party/python/gyp/test/compilable/src/program.cpp
new file mode 100644
index 0000000000..8af2c9b6ff
--- /dev/null
+++ b/third_party/python/gyp/test/compilable/src/program.cpp
@@ -0,0 +1,9 @@
+#include <stdio.h>
+#include "lib1.hpp"
+
+int main(void) {
+ fprintf(stdout, "Hello from program.c\n");
+ fflush(stdout);
+ lib1_function();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/compiler-override/compiler-exe.gyp b/third_party/python/gyp/test/compiler-override/compiler-exe.gyp
new file mode 100644
index 0000000000..c2f3002f20
--- /dev/null
+++ b/third_party/python/gyp/test/compiler-override/compiler-exe.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'test.c',
+ 'cxxtest.cc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/compiler-override/compiler-global-settings.gyp.in b/third_party/python/gyp/test/compiler-override/compiler-global-settings.gyp.in
new file mode 100644
index 0000000000..ca13a53e8d
--- /dev/null
+++ b/third_party/python/gyp/test/compiler-override/compiler-global-settings.gyp.in
@@ -0,0 +1,34 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ # PYTHON and PWD are replaced by the test code before this
+ # gyp file runs
+ 'make_global_settings': [
+ ['CC', r'$PYTHON $PWD/my_cc.py FOO'],
+ ['CXX', r'$PYTHON $PWD/my_cxx.py FOO'],
+ ['CC.host', r'$PYTHON $PWD/my_cc.py BAR'],
+ ['CXX.host', r'$PYTHON $PWD/my_cxx.py BAR'],
+
+ ['LD', r'$PYTHON $PWD/my_ld.py FOO_LINK'],
+ ['LD.host', r'$PYTHON $PWD/my_ld.py BAR_LINK'],
+ ['LINK', r'$PYTHON $PWD/my_ld.py FOO_LINK'],
+ ['LINK.host', r'$PYTHON $PWD/my_ld.py BAR_LINK'],
+ ],
+
+ # The above global settings should mean that
+ # that these targets are built using the fake
+ # toolchain above.
+ 'targets': [
+ {
+ 'toolset': '$TOOLSET',
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'test.c',
+ 'cxxtest.cc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/compiler-override/compiler-host.gyp b/third_party/python/gyp/test/compiler-override/compiler-host.gyp
new file mode 100644
index 0000000000..ab3d247e0b
--- /dev/null
+++ b/third_party/python/gyp/test/compiler-override/compiler-host.gyp
@@ -0,0 +1,17 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'toolset': 'host',
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'test.c',
+ 'cxxtest.cc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/compiler-override/compiler-shared-lib.gyp b/third_party/python/gyp/test/compiler-override/compiler-shared-lib.gyp
new file mode 100644
index 0000000000..d3e4316135
--- /dev/null
+++ b/third_party/python/gyp/test/compiler-override/compiler-shared-lib.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello-lib',
+ 'type': 'shared_library',
+ 'sources': [
+ 'test.c',
+ 'cxxtest.cc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/compiler-override/cxxtest.cc b/third_party/python/gyp/test/compiler-override/cxxtest.cc
new file mode 100644
index 0000000000..517a353619
--- /dev/null
+++ b/third_party/python/gyp/test/compiler-override/cxxtest.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Deliberate C syntax error as this file should never be passed to
+// the actual compiler
+#error Should not be passed to a real compiler
diff --git a/third_party/python/gyp/test/compiler-override/gyptest-compiler-env-toolchain.py b/third_party/python/gyp/test/compiler-override/gyptest-compiler-env-toolchain.py
new file mode 100644
index 0000000000..2361d0c7c2
--- /dev/null
+++ b/third_party/python/gyp/test/compiler-override/gyptest-compiler-env-toolchain.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+Verifies that the user can override the compiler and linker using
+CC/CXX/NM/READELF environment variables.
+"""
+
+import TestGyp
+import os
+import copy
+import sys
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+if sys.platform == 'win32':
+ # cross compiling not supported by ninja on windows
+ # and make not supported on windows at all.
+ sys.exit(0)
+
+# Clear any existing compiler related env vars.
+for key in ['CC', 'CXX', 'LINK', 'CC_host', 'CXX_host', 'LINK_host',
+ 'NM_target', 'READELF_target']:
+ if key in os.environ:
+ del os.environ[key]
+
+
+def CheckCompiler(test, gypfile, check_for, run_gyp):
+ if run_gyp:
+ test.run_gyp(gypfile)
+ test.build(gypfile)
+
+ test.must_contain_all_lines(test.stdout(), check_for)
+
+
+test = TestGyp.TestGyp(formats=['ninja'])
+# Must set the test format to something with a flavor (the part after the '-')
+# in order to test the desired behavior. Since we want to run a non-host
+# toolchain, we have to set the flavor to something that the ninja generator
+# doesn't know about, so it doesn't default to the host-specific tools (e.g.,
+# 'otool' on mac to generate the .TOC).
+#
+# Note that we can't just pass format=['ninja-some_toolchain'] to the
+# constructor above, because then this test wouldn't be recognized as a ninja
+# format test.
+test.formats = ['ninja-my_flavor' if f == 'ninja' else f for f in test.formats]
+
+
+def TestTargetOverideSharedLib():
+ # The std output from nm and readelf is redirected to files, so we can't
+ # expect their output to appear. Instead, check for the files they create to
+ # see if they actually ran.
+ expected = ['my_cc.py', 'my_cxx.py', 'FOO']
+
+ # Check that CC, CXX, NM, READELF, set target compiler
+ env = {'CC': 'python %s/my_cc.py FOO' % here,
+ 'CXX': 'python %s/my_cxx.py FOO' % here,
+ 'NM': 'python %s/my_nm.py' % here,
+ 'READELF': 'python %s/my_readelf.py' % here}
+
+ with TestGyp.LocalEnv(env):
+ CheckCompiler(test, 'compiler-shared-lib.gyp', expected, True)
+ test.must_contain(test.built_file_path('RAN_MY_NM'), 'RAN_MY_NM')
+ test.must_contain(test.built_file_path('RAN_MY_READELF'), 'RAN_MY_READELF')
+ test.unlink(test.built_file_path('RAN_MY_NM'))
+ test.unlink(test.built_file_path('RAN_MY_READELF'))
+
+ # Run the same tests once the eviron has been restored. The generated
+ # projects should have embedded all the settings in the project files so the
+ # results should be the same.
+ CheckCompiler(test, 'compiler-shared-lib.gyp', expected, False)
+ test.must_contain(test.built_file_path('RAN_MY_NM'), 'RAN_MY_NM')
+ test.must_contain(test.built_file_path('RAN_MY_READELF'), 'RAN_MY_READELF')
+
+
+TestTargetOverideSharedLib()
+test.pass_test()
diff --git a/third_party/python/gyp/test/compiler-override/gyptest-compiler-env.py b/third_party/python/gyp/test/compiler-override/gyptest-compiler-env.py
new file mode 100755
index 0000000000..bb38b6e55b
--- /dev/null
+++ b/third_party/python/gyp/test/compiler-override/gyptest-compiler-env.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+Verifies that the user can override the compiler and linker using CC/CXX/LD
+environment variables.
+"""
+
+import TestGyp
+import os
+import copy
+import sys
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+if sys.platform == 'win32':
+ # cross compiling not supported by ninja on windows
+ # and make not supported on windows at all.
+ sys.exit(0)
+
+# Clear any existing compiler related env vars.
+for key in ['CC', 'CXX', 'LINK', 'CC_host', 'CXX_host', 'LINK_host']:
+ if key in os.environ:
+ del os.environ[key]
+
+
+def CheckCompiler(test, gypfile, check_for, run_gyp):
+ if run_gyp:
+ test.run_gyp(gypfile)
+ test.build(gypfile)
+
+ test.must_contain_all_lines(test.stdout(), check_for)
+
+
+test = TestGyp.TestGyp(formats=['ninja', 'make'])
+
+def TestTargetOveride():
+ expected = ['my_cc.py', 'my_cxx.py', 'FOO' ]
+
+ # ninja just uses $CC / $CXX as linker.
+ if test.format not in ['ninja', 'xcode-ninja']:
+ expected.append('FOO_LINK')
+
+ # Check that CC, CXX and LD set target compiler
+ oldenv = os.environ.copy()
+ try:
+ os.environ['CC'] = 'python %s/my_cc.py FOO' % here
+ os.environ['CXX'] = 'python %s/my_cxx.py FOO' % here
+ os.environ['LINK'] = 'python %s/my_ld.py FOO_LINK' % here
+
+ CheckCompiler(test, 'compiler-exe.gyp', expected, True)
+ finally:
+ os.environ.clear()
+ os.environ.update(oldenv)
+
+ # Run the same tests once the eviron has been restored. The
+ # generated should have embedded all the settings in the
+ # project files so the results should be the same.
+ CheckCompiler(test, 'compiler-exe.gyp', expected, False)
+
+
+def TestTargetOverideCompilerOnly():
+ # Same test again but with that CC, CXX and not LD
+ oldenv = os.environ.copy()
+ try:
+ os.environ['CC'] = 'python %s/my_cc.py FOO' % here
+ os.environ['CXX'] = 'python %s/my_cxx.py FOO' % here
+
+ CheckCompiler(test, 'compiler-exe.gyp',
+ ['my_cc.py', 'my_cxx.py', 'FOO'],
+ True)
+ finally:
+ os.environ.clear()
+ os.environ.update(oldenv)
+
+ # Run the same tests once the eviron has been restored. The
+ # generated should have embedded all the settings in the
+ # project files so the results should be the same.
+ CheckCompiler(test, 'compiler-exe.gyp',
+ ['my_cc.py', 'my_cxx.py', 'FOO'],
+ False)
+
+
+def TestHostOveride():
+ expected = ['my_cc.py', 'my_cxx.py', 'HOST' ]
+ if test.format != 'ninja': # ninja just uses $CC / $CXX as linker.
+ expected.append('HOST_LINK')
+
+ # Check that CC_host sets host compilee
+ oldenv = os.environ.copy()
+ try:
+ os.environ['CC_host'] = 'python %s/my_cc.py HOST' % here
+ os.environ['CXX_host'] = 'python %s/my_cxx.py HOST' % here
+ os.environ['LINK_host'] = 'python %s/my_ld.py HOST_LINK' % here
+ CheckCompiler(test, 'compiler-host.gyp', expected, True)
+ finally:
+ os.environ.clear()
+ os.environ.update(oldenv)
+
+ # Run the same tests once the eviron has been restored. The
+ # generated should have embedded all the settings in the
+ # project files so the results should be the same.
+ CheckCompiler(test, 'compiler-host.gyp', expected, False)
+
+
+TestTargetOveride()
+TestTargetOverideCompilerOnly()
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/compiler-override/gyptest-compiler-global-settings.py b/third_party/python/gyp/test/compiler-override/gyptest-compiler-global-settings.py
new file mode 100755
index 0000000000..9f062a4fef
--- /dev/null
+++ b/third_party/python/gyp/test/compiler-override/gyptest-compiler-global-settings.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+Verifies that make_global_settings can be used to override the
+compiler settings.
+"""
+from __future__ import print_function
+
+import TestGyp
+import os
+import copy
+import sys
+from string import Template
+
+
+if sys.platform == 'win32':
+ # cross compiling not support by ninja on windows
+ # and make not supported on windows at all.
+ sys.exit(0)
+
+print("This test is currently disabled: https://crbug.com/483696.")
+sys.exit(0)
+
+test = TestGyp.TestGyp(formats=['ninja', 'make'])
+
+gypfile = 'compiler-global-settings.gyp'
+
+replacements = { 'PYTHON': '/usr/bin/python', 'PWD': os.getcwd()}
+
+# Process the .in gyp file to produce the final gyp file
+# since we need to include absolute paths in the make_global_settings
+# section.
+replacements['TOOLSET'] = 'target'
+s = Template(open(gypfile + '.in').read())
+output = open(gypfile, 'w')
+output.write(s.substitute(replacements))
+output.close()
+
+old_env = dict(os.environ)
+os.environ['GYP_CROSSCOMPILE'] = '1'
+test.run_gyp(gypfile)
+os.environ.clear()
+os.environ.update(old_env)
+
+test.build(gypfile)
+test.must_contain_all_lines(test.stdout(), ['my_cc.py', 'my_cxx.py', 'FOO'])
+
+# The xcode generator chokes on the 'host' toolset. Skip the rest of
+# this test (cf. https://code.google.com/p/gyp/issues/detail?id=454).
+if test.format == 'xcode-ninja':
+ test.pass_test()
+
+# Same again but with the host toolset.
+replacements['TOOLSET'] = 'host'
+s = Template(open(gypfile + '.in').read())
+output = open(gypfile, 'w')
+output.write(s.substitute(replacements))
+output.close()
+
+old_env = dict(os.environ)
+os.environ['GYP_CROSSCOMPILE'] = '1'
+test.run_gyp(gypfile)
+os.environ.clear()
+os.environ.update(old_env)
+
+test.build(gypfile)
+test.must_contain_all_lines(test.stdout(), ['my_cc.py', 'my_cxx.py', 'BAR'])
+
+# Check that CC_host overrides make_global_settings
+old_env = dict(os.environ)
+os.environ['CC_host'] = '%s %s/my_cc.py SECRET' % (replacements['PYTHON'],
+ replacements['PWD'])
+test.run_gyp(gypfile)
+os.environ.clear()
+os.environ.update(old_env)
+
+test.build(gypfile)
+test.must_contain_all_lines(test.stdout(), ['SECRET', 'my_cxx.py', 'BAR'])
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/compiler-override/my_cc.py b/third_party/python/gyp/test/compiler-override/my_cc.py
new file mode 100755
index 0000000000..09e1d3c58d
--- /dev/null
+++ b/third_party/python/gyp/test/compiler-override/my_cc.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+from __future__ import print_function
+import sys
+print(sys.argv)
diff --git a/third_party/python/gyp/test/compiler-override/my_cxx.py b/third_party/python/gyp/test/compiler-override/my_cxx.py
new file mode 100755
index 0000000000..09e1d3c58d
--- /dev/null
+++ b/third_party/python/gyp/test/compiler-override/my_cxx.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+from __future__ import print_function
+import sys
+print(sys.argv)
diff --git a/third_party/python/gyp/test/compiler-override/my_ld.py b/third_party/python/gyp/test/compiler-override/my_ld.py
new file mode 100755
index 0000000000..09e1d3c58d
--- /dev/null
+++ b/third_party/python/gyp/test/compiler-override/my_ld.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+from __future__ import print_function
+import sys
+print(sys.argv)
diff --git a/third_party/python/gyp/test/compiler-override/my_nm.py b/third_party/python/gyp/test/compiler-override/my_nm.py
new file mode 100755
index 0000000000..2c4e678110
--- /dev/null
+++ b/third_party/python/gyp/test/compiler-override/my_nm.py
@@ -0,0 +1,9 @@
+#!/usr/bin/env python
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+from __future__ import print_function
+import sys
+print(sys.argv)
+with open('RAN_MY_NM', 'w') as f:
+ f.write('RAN_MY_NM')
diff --git a/third_party/python/gyp/test/compiler-override/my_readelf.py b/third_party/python/gyp/test/compiler-override/my_readelf.py
new file mode 100755
index 0000000000..626665435e
--- /dev/null
+++ b/third_party/python/gyp/test/compiler-override/my_readelf.py
@@ -0,0 +1,9 @@
+#!/usr/bin/env python
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+from __future__ import print_function
+import sys
+print(sys.argv)
+with open('RAN_MY_READELF', 'w') as f:
+ f.write('RAN_MY_READELF')
diff --git a/third_party/python/gyp/test/compiler-override/test.c b/third_party/python/gyp/test/compiler-override/test.c
new file mode 100644
index 0000000000..517a353619
--- /dev/null
+++ b/third_party/python/gyp/test/compiler-override/test.c
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Deliberate C syntax error as this file should never be passed to
+// the actual compiler
+#error Should not be passed to a real compiler
diff --git a/third_party/python/gyp/test/conditions/elseif/elseif.gyp b/third_party/python/gyp/test/conditions/elseif/elseif.gyp
new file mode 100644
index 0000000000..6367ff7d7a
--- /dev/null
+++ b/third_party/python/gyp/test/conditions/elseif/elseif.gyp
@@ -0,0 +1,43 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'variables': { 'test_var': 0 },
+ 'target_name': 'program0',
+ 'type': 'executable',
+ 'sources': [ 'program.cc' ],
+ 'includes': [ 'elseif_conditions.gypi' ],
+ },
+ {
+ 'variables': { 'test_var': 1 },
+ 'target_name': 'program1',
+ 'type': 'executable',
+ 'sources': [ 'program.cc' ],
+ 'includes': [ 'elseif_conditions.gypi' ],
+ },
+ {
+ 'variables': { 'test_var': 2 },
+ 'target_name': 'program2',
+ 'type': 'executable',
+ 'sources': [ 'program.cc' ],
+ 'includes': [ 'elseif_conditions.gypi' ],
+ },
+ {
+ 'variables': { 'test_var': 3 },
+ 'target_name': 'program3',
+ 'type': 'executable',
+ 'sources': [ 'program.cc' ],
+ 'includes': [ 'elseif_conditions.gypi' ],
+ },
+ {
+ 'variables': { 'test_var': 4 },
+ 'target_name': 'program4',
+ 'type': 'executable',
+ 'sources': [ 'program.cc' ],
+ 'includes': [ 'elseif_conditions.gypi' ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/conditions/elseif/elseif_bad1.gyp b/third_party/python/gyp/test/conditions/elseif/elseif_bad1.gyp
new file mode 100644
index 0000000000..35c8455cca
--- /dev/null
+++ b/third_party/python/gyp/test/conditions/elseif/elseif_bad1.gyp
@@ -0,0 +1,20 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Trigger an error because of two consecutive string conditions.
+
+{
+ 'targets': [
+ {
+ 'variables': { 'test_var': 0 },
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'sources': [ 'program.cc' ],
+ 'conditions': [
+ ['test_var==0', 'test_var==1', {
+ }],
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/conditions/elseif/elseif_bad2.gyp b/third_party/python/gyp/test/conditions/elseif/elseif_bad2.gyp
new file mode 100644
index 0000000000..b529f292c0
--- /dev/null
+++ b/third_party/python/gyp/test/conditions/elseif/elseif_bad2.gyp
@@ -0,0 +1,22 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Trigger an error because of two consecutive string conditions, even if the
+# conditions are not actually evaluated.
+
+{
+ 'targets': [
+ {
+ 'variables': { 'test_var': 0 },
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'sources': [ 'program.cc' ],
+ 'conditions': [
+ ['test_var==0', {
+ }, 'test_var==1', 'test_var==2', {
+ }],
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/conditions/elseif/elseif_bad3.gyp b/third_party/python/gyp/test/conditions/elseif/elseif_bad3.gyp
new file mode 100644
index 0000000000..126e186053
--- /dev/null
+++ b/third_party/python/gyp/test/conditions/elseif/elseif_bad3.gyp
@@ -0,0 +1,23 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Trigger an error because there are unexpected trailing items in a condition.
+
+{
+ 'targets': [
+ {
+ 'variables': { 'test_var': 0 },
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'sources': [ 'program.cc' ],
+ 'conditions': [
+ ['test_var==0' {
+ }, 'test_var==1', {
+ }, {
+ }, 'test_var==2', {
+ }],
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/conditions/elseif/elseif_conditions.gypi b/third_party/python/gyp/test/conditions/elseif/elseif_conditions.gypi
new file mode 100644
index 0000000000..4310ccc031
--- /dev/null
+++ b/third_party/python/gyp/test/conditions/elseif/elseif_conditions.gypi
@@ -0,0 +1,15 @@
+{
+ 'conditions': [
+ ['test_var==0', {
+ 'defines': ['FOO="first_if"'],
+ }, 'test_var==1', {
+ 'defines': ['FOO="first_else_if"'],
+ }, 'test_var==2', {
+ 'defines': ['FOO="second_else_if"'],
+ }, 'test_var==3', {
+ 'defines': ['FOO="third_else_if"'],
+ }, {
+ 'defines': ['FOO="last_else"'],
+ }],
+ ],
+}
diff --git a/third_party/python/gyp/test/conditions/elseif/gyptest_elseif.py b/third_party/python/gyp/test/conditions/elseif/gyptest_elseif.py
new file mode 100644
index 0000000000..9d030cf3fe
--- /dev/null
+++ b/third_party/python/gyp/test/conditions/elseif/gyptest_elseif.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that "else-if" conditions work.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('elseif.gyp')
+test.build('elseif.gyp', test.ALL)
+test.run_built_executable(
+ 'program0', stdout='first_if\n')
+test.run_built_executable(
+ 'program1', stdout='first_else_if\n')
+test.run_built_executable(
+ 'program2', stdout='second_else_if\n')
+test.run_built_executable(
+ 'program3', stdout='third_else_if\n')
+test.run_built_executable(
+ 'program4', stdout='last_else\n')
+
+# Verify that bad condition blocks fail at gyp time.
+test.run_gyp('elseif_bad1.gyp', status=1, stderr=None)
+test.run_gyp('elseif_bad2.gyp', status=1, stderr=None)
+test.run_gyp('elseif_bad3.gyp', status=1, stderr=None)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/conditions/elseif/program.cc b/third_party/python/gyp/test/conditions/elseif/program.cc
new file mode 100644
index 0000000000..147fe2f75e
--- /dev/null
+++ b/third_party/python/gyp/test/conditions/elseif/program.cc
@@ -0,0 +1,10 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+int main() {
+ printf("%s\n", FOO);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/configurations/basics/configurations.c b/third_party/python/gyp/test/configurations/basics/configurations.c
new file mode 100644
index 0000000000..39e13c9c83
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/basics/configurations.c
@@ -0,0 +1,15 @@
+#include <stdio.h>
+
+int main(void)
+{
+#ifdef FOO
+ printf("Foo configuration\n");
+#endif
+#ifdef DEBUG
+ printf("Debug configuration\n");
+#endif
+#ifdef RELEASE
+ printf("Release configuration\n");
+#endif
+ return 0;
+}
diff --git a/third_party/python/gyp/test/configurations/basics/configurations.gyp b/third_party/python/gyp/test/configurations/basics/configurations.gyp
new file mode 100644
index 0000000000..93f1d8d5c7
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/basics/configurations.gyp
@@ -0,0 +1,32 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'executable',
+ 'sources': [
+ 'configurations.c',
+ ],
+ 'configurations': {
+ 'Debug': {
+ 'defines': [
+ 'DEBUG',
+ ],
+ },
+ 'Release': {
+ 'defines': [
+ 'RELEASE',
+ ],
+ },
+ 'Foo': {
+ 'defines': [
+ 'FOO',
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/configurations/basics/gyptest-configurations.py b/third_party/python/gyp/test/configurations/basics/gyptest-configurations.py
new file mode 100755
index 0000000000..27cd2e87d2
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/basics/gyptest-configurations.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable in three different configurations.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('configurations.gyp')
+
+test.set_configuration('Release')
+test.build('configurations.gyp')
+test.run_built_executable('configurations', stdout="Release configuration\n")
+
+test.set_configuration('Debug')
+test.build('configurations.gyp')
+test.run_built_executable('configurations', stdout="Debug configuration\n")
+
+test.set_configuration('Foo')
+test.build('configurations.gyp')
+test.run_built_executable('configurations', stdout="Foo configuration\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/configurations/inheritance/configurations.c b/third_party/python/gyp/test/configurations/inheritance/configurations.c
new file mode 100644
index 0000000000..ebb9f8450e
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/inheritance/configurations.c
@@ -0,0 +1,21 @@
+#include <stdio.h>
+
+int main(void)
+{
+#ifdef BASE
+ printf("Base configuration\n");
+#endif
+#ifdef COMMON
+ printf("Common configuration\n");
+#endif
+#ifdef COMMON2
+ printf("Common2 configuration\n");
+#endif
+#ifdef DEBUG
+ printf("Debug configuration\n");
+#endif
+#ifdef RELEASE
+ printf("Release configuration\n");
+#endif
+ return 0;
+}
diff --git a/third_party/python/gyp/test/configurations/inheritance/configurations.gyp b/third_party/python/gyp/test/configurations/inheritance/configurations.gyp
new file mode 100644
index 0000000000..9441376b4d
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/inheritance/configurations.gyp
@@ -0,0 +1,40 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'configurations': {
+ 'Base': {
+ 'abstract': 1,
+ 'defines': ['BASE'],
+ },
+ 'Common': {
+ 'abstract': 1,
+ 'inherit_from': ['Base'],
+ 'defines': ['COMMON'],
+ },
+ 'Common2': {
+ 'abstract': 1,
+ 'defines': ['COMMON2'],
+ },
+ 'Debug': {
+ 'inherit_from': ['Common', 'Common2'],
+ 'defines': ['DEBUG'],
+ },
+ 'Release': {
+ 'inherit_from': ['Common', 'Common2'],
+ 'defines': ['RELEASE'],
+ },
+ },
+ },
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'executable',
+ 'sources': [
+ 'configurations.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/configurations/inheritance/duplicates.gyp b/third_party/python/gyp/test/configurations/inheritance/duplicates.gyp
new file mode 100644
index 0000000000..6930ce3b39
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/inheritance/duplicates.gyp
@@ -0,0 +1,27 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'default_configuration': 'A',
+ 'configurations': {
+ 'A': {
+ 'defines': ['SOMETHING'],
+ },
+ 'B': {
+ 'inherit_from': ['A'],
+ },
+ },
+ 'cflags': ['-g'],
+ },
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'executable',
+ 'sources': [
+ 'configurations.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/configurations/inheritance/duplicates.gypd.golden b/third_party/python/gyp/test/configurations/inheritance/duplicates.gypd.golden
new file mode 100644
index 0000000000..719b70861e
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/inheritance/duplicates.gypd.golden
@@ -0,0 +1,12 @@
+{'_DEPTH': '.',
+ 'included_files': ['duplicates.gyp'],
+ 'targets': [{'configurations': {'A': {'cflags': ['-g'],
+ 'defines': ['SOMETHING']},
+ 'B': {'cflags': ['-g'],
+ 'defines': ['SOMETHING'],
+ 'inherit_from': ['A']}},
+ 'default_configuration': 'A',
+ 'sources': ['configurations.c'],
+ 'target_name': 'configurations',
+ 'toolset': 'target',
+ 'type': 'executable'}]}
diff --git a/third_party/python/gyp/test/configurations/inheritance/gyptest-duplicates.py b/third_party/python/gyp/test/configurations/inheritance/gyptest-duplicates.py
new file mode 100755
index 0000000000..f015638b6d
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/inheritance/gyptest-duplicates.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that configurations do not duplicate other settings.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+test = TestGyp.TestGyp(format='gypd')
+
+test.run_gyp('duplicates.gyp')
+
+# Verify the duplicates.gypd against the checked-in expected contents.
+#
+# Normally, we should canonicalize line endings in the expected
+# contents file setting the Subversion svn:eol-style to native,
+# but that would still fail if multiple systems are sharing a single
+# workspace on a network-mounted file system. Consequently, we
+# massage the Windows line endings ('\r\n') in the output to the
+# checked-in UNIX endings ('\n').
+
+contents = test.read('duplicates.gypd').replace(
+ '\r', '').replace('\\\\', '/')
+expect = test.read('duplicates.gypd.golden').replace('\r', '')
+if not test.match(contents, expect):
+ print("Unexpected contents of `duplicates.gypd'")
+ test.diff(expect, contents, 'duplicates.gypd ')
+ test.fail_test()
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/configurations/inheritance/gyptest-inheritance.py b/third_party/python/gyp/test/configurations/inheritance/gyptest-inheritance.py
new file mode 100755
index 0000000000..22c73a3754
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/inheritance/gyptest-inheritance.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable in three different configurations.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('configurations.gyp')
+
+test.set_configuration('Release')
+test.build('configurations.gyp')
+test.run_built_executable('configurations',
+ stdout=('Base configuration\n'
+ 'Common configuration\n'
+ 'Common2 configuration\n'
+ 'Release configuration\n'))
+
+test.set_configuration('Debug')
+test.build('configurations.gyp')
+test.run_built_executable('configurations',
+ stdout=('Base configuration\n'
+ 'Common configuration\n'
+ 'Common2 configuration\n'
+ 'Debug configuration\n'))
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/configurations/invalid/actions.gyp b/third_party/python/gyp/test/configurations/invalid/actions.gyp
new file mode 100644
index 0000000000..a6e42089eb
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/invalid/actions.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'actions': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/configurations/invalid/all_dependent_settings.gyp b/third_party/python/gyp/test/configurations/invalid/all_dependent_settings.gyp
new file mode 100644
index 0000000000..b16a245df5
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/invalid/all_dependent_settings.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'all_dependent_settings': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/configurations/invalid/configurations.gyp b/third_party/python/gyp/test/configurations/invalid/configurations.gyp
new file mode 100644
index 0000000000..2cfc960049
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/invalid/configurations.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'configurations': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/configurations/invalid/dependencies.gyp b/third_party/python/gyp/test/configurations/invalid/dependencies.gyp
new file mode 100644
index 0000000000..74633f3f11
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/invalid/dependencies.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'dependencies': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/configurations/invalid/direct_dependent_settings.gyp b/third_party/python/gyp/test/configurations/invalid/direct_dependent_settings.gyp
new file mode 100644
index 0000000000..8a0f2e95ea
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/invalid/direct_dependent_settings.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'direct_dependent_settings': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/configurations/invalid/gyptest-configurations.py b/third_party/python/gyp/test/configurations/invalid/gyptest-configurations.py
new file mode 100755
index 0000000000..bd844b95dd
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/invalid/gyptest-configurations.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable in three different configurations.
+"""
+
+import TestGyp
+
+# Keys that do not belong inside a configuration dictionary.
+invalid_configuration_keys = [
+ 'actions',
+ 'all_dependent_settings',
+ 'configurations',
+ 'dependencies',
+ 'direct_dependent_settings',
+ 'libraries',
+ 'link_settings',
+ 'sources',
+ 'standalone_static_library',
+ 'target_name',
+ 'type',
+]
+
+test = TestGyp.TestGyp()
+
+for test_key in invalid_configuration_keys:
+ test.run_gyp('%s.gyp' % test_key, status=1, stderr=None)
+ expect = ['%s not allowed in the Debug configuration, found in target '
+ '%s.gyp:configurations#target' % (test_key, test_key)]
+ test.must_contain_all_lines(test.stderr(), expect)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/configurations/invalid/libraries.gyp b/third_party/python/gyp/test/configurations/invalid/libraries.gyp
new file mode 100644
index 0000000000..c4014ed406
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/invalid/libraries.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'libraries': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/configurations/invalid/link_settings.gyp b/third_party/python/gyp/test/configurations/invalid/link_settings.gyp
new file mode 100644
index 0000000000..2f0e1c46f5
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/invalid/link_settings.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'link_settings': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/configurations/invalid/sources.gyp b/third_party/python/gyp/test/configurations/invalid/sources.gyp
new file mode 100644
index 0000000000..b38cca0381
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/invalid/sources.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'sources': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/configurations/invalid/standalone_static_library.gyp b/third_party/python/gyp/test/configurations/invalid/standalone_static_library.gyp
new file mode 100644
index 0000000000..2edb9febd6
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/invalid/standalone_static_library.gyp
@@ -0,0 +1,17 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'standalone_static_library': 1,
+ },
+ }
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/configurations/invalid/target_name.gyp b/third_party/python/gyp/test/configurations/invalid/target_name.gyp
new file mode 100644
index 0000000000..83baad95d6
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/invalid/target_name.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'target_name': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/configurations/invalid/type.gyp b/third_party/python/gyp/test/configurations/invalid/type.gyp
new file mode 100644
index 0000000000..bc55898b89
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/invalid/type.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'none',
+ 'configurations': {
+ 'Debug': {
+ 'type': [
+ ],
+ },
+ }
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/configurations/target_platform/configurations.gyp b/third_party/python/gyp/test/configurations/target_platform/configurations.gyp
new file mode 100644
index 0000000000..d15429f4e5
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/target_platform/configurations.gyp
@@ -0,0 +1,58 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'configurations': {
+ 'Debug_Win32': {
+ 'msvs_configuration_platform': 'Win32',
+ },
+ 'Debug_x64': {
+ 'msvs_configuration_platform': 'x64',
+ },
+ },
+ },
+ 'targets': [
+ {
+ 'target_name': 'left',
+ 'type': 'static_library',
+ 'sources': [
+ 'left.c',
+ ],
+ 'configurations': {
+ 'Debug_Win32': {
+ 'msvs_target_platform': 'x64',
+ },
+ },
+ },
+ {
+ 'target_name': 'right',
+ 'type': 'static_library',
+ 'sources': [
+ 'right.c',
+ ],
+ },
+ {
+ 'target_name': 'front_left',
+ 'type': 'executable',
+ 'dependencies': ['left'],
+ 'sources': [
+ 'front.c',
+ ],
+ 'configurations': {
+ 'Debug_Win32': {
+ 'msvs_target_platform': 'x64',
+ },
+ },
+ },
+ {
+ 'target_name': 'front_right',
+ 'type': 'executable',
+ 'dependencies': ['right'],
+ 'sources': [
+ 'front.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/configurations/target_platform/front.c b/third_party/python/gyp/test/configurations/target_platform/front.c
new file mode 100644
index 0000000000..7a91689ff5
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/target_platform/front.c
@@ -0,0 +1,8 @@
+#include <stdio.h>
+
+const char *message(void);
+
+int main(void) {
+ printf("%s\n", message());
+ return 0;
+}
diff --git a/third_party/python/gyp/test/configurations/target_platform/gyptest-target_platform.py b/third_party/python/gyp/test/configurations/target_platform/gyptest-target_platform.py
new file mode 100755
index 0000000000..1645d6ec08
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/target_platform/gyptest-target_platform.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Tests the msvs specific msvs_target_platform option.
+"""
+
+import TestGyp
+import TestCommon
+
+
+def RunX64(exe, stdout):
+ try:
+ test.run_built_executable(exe, stdout=stdout)
+ except WindowsError as e:
+ # Assume the exe is 64-bit if it can't load on 32-bit systems.
+ # Both versions of the error are required because different versions
+ # of python seem to return different errors for invalid exe type.
+ if e.errno != 193 and '[Error 193]' not in str(e):
+ raise
+
+
+test = TestGyp.TestGyp(formats=['msvs'])
+
+test.run_gyp('configurations.gyp')
+
+test.set_configuration('Debug|x64')
+test.build('configurations.gyp', rebuild=True)
+RunX64('front_left', stdout=('left\n'))
+RunX64('front_right', stdout=('right\n'))
+
+test.set_configuration('Debug|Win32')
+test.build('configurations.gyp', rebuild=True)
+RunX64('front_left', stdout=('left\n'))
+test.run_built_executable('front_right', stdout=('right\n'))
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/configurations/target_platform/left.c b/third_party/python/gyp/test/configurations/target_platform/left.c
new file mode 100644
index 0000000000..1ce2ea1227
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/target_platform/left.c
@@ -0,0 +1,3 @@
+const char *message(void) {
+ return "left";
+}
diff --git a/third_party/python/gyp/test/configurations/target_platform/right.c b/third_party/python/gyp/test/configurations/target_platform/right.c
new file mode 100644
index 0000000000..b1578492fe
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/target_platform/right.c
@@ -0,0 +1,3 @@
+const char *message(void) {
+ return "right";
+}
diff --git a/third_party/python/gyp/test/configurations/x64/configurations.c b/third_party/python/gyp/test/configurations/x64/configurations.c
new file mode 100644
index 0000000000..37018438fc
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/x64/configurations.c
@@ -0,0 +1,12 @@
+#include <stdio.h>
+
+int main(void) {
+ if (sizeof(void*) == 4) {
+ printf("Running Win32\n");
+ } else if (sizeof(void*) == 8) {
+ printf("Running x64\n");
+ } else {
+ printf("Unexpected platform\n");
+ }
+ return 0;
+}
diff --git a/third_party/python/gyp/test/configurations/x64/configurations.gyp b/third_party/python/gyp/test/configurations/x64/configurations.gyp
new file mode 100644
index 0000000000..8b0139f141
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/x64/configurations.gyp
@@ -0,0 +1,38 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'configurations': {
+ 'Debug': {
+ 'msvs_configuration_platform': 'Win32',
+ },
+ 'Debug_x64': {
+ 'inherit_from': ['Debug'],
+ 'msvs_configuration_platform': 'x64',
+ },
+ },
+ },
+ 'targets': [
+ {
+ 'target_name': 'configurations',
+ 'type': 'executable',
+ 'sources': [
+ 'configurations.c',
+ ],
+ },
+ {
+ 'target_name': 'configurations64',
+ 'type': 'executable',
+ 'sources': [
+ 'configurations.c',
+ ],
+ 'configurations': {
+ 'Debug': {
+ 'msvs_target_platform': 'x64',
+ },
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/configurations/x64/gyptest-x86.py b/third_party/python/gyp/test/configurations/x64/gyptest-x86.py
new file mode 100755
index 0000000000..8675d8f7e7
--- /dev/null
+++ b/third_party/python/gyp/test/configurations/x64/gyptest-x86.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable in three different configurations.
+"""
+
+import TestGyp
+
+import sys
+
+formats = ['msvs']
+if sys.platform == 'win32':
+ formats += ['ninja']
+test = TestGyp.TestGyp(formats=formats)
+
+test.run_gyp('configurations.gyp')
+test.set_configuration('Debug|Win32')
+test.build('configurations.gyp', test.ALL)
+
+for machine, suffix in [('14C machine (x86)', ''),
+ ('8664 machine (x64)', '64')]:
+ output = test.run_dumpbin(
+ '/headers', test.built_file_path('configurations%s.exe' % suffix))
+ if machine not in output:
+ test.fail_test()
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/copies/gyptest-all.py b/third_party/python/gyp/test/copies/gyptest-all.py
new file mode 100755
index 0000000000..aeccf3324c
--- /dev/null
+++ b/third_party/python/gyp/test/copies/gyptest-all.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies file copies using an explicit build target of 'all'.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('copies.gyp',
+ '-G', 'xcode_ninja_target_pattern=^(?!copies_null)',
+ chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('copies.gyp', test.ALL, chdir='relocate/src')
+
+test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n')
+
+test.built_file_must_match('copies-out/file2',
+ 'file2 contents\n',
+ chdir='relocate/src')
+
+test.built_file_must_match('copies-out/directory/file3',
+ 'file3 contents\n',
+ chdir='relocate/src')
+test.built_file_must_match('copies-out/directory/file4',
+ 'file4 contents\n',
+ chdir='relocate/src')
+test.built_file_must_match('copies-out/directory/subdir/file5',
+ 'file5 contents\n',
+ chdir='relocate/src')
+test.built_file_must_match('copies-out/subdir/file6',
+ 'file6 contents\n',
+ chdir='relocate/src')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/copies/gyptest-attribs.py b/third_party/python/gyp/test/copies/gyptest-attribs.py
new file mode 100644
index 0000000000..70d717a45e
--- /dev/null
+++ b/third_party/python/gyp/test/copies/gyptest-attribs.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that copying files preserves file attributes.
+"""
+
+import TestGyp
+
+import os
+import stat
+import sys
+
+
+def check_attribs(path, expected_exec_bit):
+ out_path = test.built_file_path(path, chdir='src')
+
+ in_stat = os.stat(os.path.join('src', path))
+ out_stat = os.stat(out_path)
+ if out_stat.st_mode & stat.S_IXUSR != expected_exec_bit:
+ test.fail_test()
+
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('copies-attribs.gyp', chdir='src')
+
+test.build('copies-attribs.gyp', chdir='src')
+
+if sys.platform != 'win32':
+ out_path = test.built_file_path('executable-file.sh', chdir='src')
+ test.must_contain(out_path,
+ '#!/bin/bash\n'
+ '\n'
+ 'echo echo echo echo cho ho o o\n')
+ check_attribs('executable-file.sh', expected_exec_bit=stat.S_IXUSR)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/copies/gyptest-default.py b/third_party/python/gyp/test/copies/gyptest-default.py
new file mode 100755
index 0000000000..a916869f0d
--- /dev/null
+++ b/third_party/python/gyp/test/copies/gyptest-default.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies file copies using the build tool default.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('copies.gyp',
+ '-G', 'xcode_ninja_target_pattern=^(?!copies_null)',
+ chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('copies.gyp', chdir='relocate/src')
+
+test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n')
+
+test.built_file_must_match('copies-out/file2',
+ 'file2 contents\n',
+ chdir='relocate/src')
+
+test.built_file_must_match('copies-out/directory/file3',
+ 'file3 contents\n',
+ chdir='relocate/src')
+test.built_file_must_match('copies-out/directory/file4',
+ 'file4 contents\n',
+ chdir='relocate/src')
+test.built_file_must_match('copies-out/directory/subdir/file5',
+ 'file5 contents\n',
+ chdir='relocate/src')
+test.built_file_must_match('copies-out/subdir/file6',
+ 'file6 contents\n',
+ chdir='relocate/src')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/copies/gyptest-samedir.py b/third_party/python/gyp/test/copies/gyptest-samedir.py
new file mode 100755
index 0000000000..923ca61557
--- /dev/null
+++ b/third_party/python/gyp/test/copies/gyptest-samedir.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies file copies where two copies sections in the same target have the
+same destination directory.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+test.run_gyp('copies-samedir.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+test.build('copies-samedir.gyp', 'copies_samedir', chdir='relocate/src')
+
+test.built_file_must_match('copies-out-samedir/file1',
+ 'file1 contents\n',
+ chdir='relocate/src')
+
+test.built_file_must_match('copies-out-samedir/file2',
+ 'file2 contents\n',
+ chdir='relocate/src')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/copies/gyptest-slash.py b/third_party/python/gyp/test/copies/gyptest-slash.py
new file mode 100755
index 0000000000..f7a2e549eb
--- /dev/null
+++ b/third_party/python/gyp/test/copies/gyptest-slash.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies file copies with a trailing slash in the destination directory.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+test.run_gyp('copies-slash.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+test.build('copies-slash.gyp', chdir='relocate/src')
+
+test.built_file_must_match('copies-out-slash/directory/file3',
+ 'file3 contents\n',
+ chdir='relocate/src')
+test.built_file_must_match('copies-out-slash/directory/file4',
+ 'file4 contents\n',
+ chdir='relocate/src')
+test.built_file_must_match('copies-out-slash/directory/subdir/file5',
+ 'file5 contents\n',
+ chdir='relocate/src')
+
+test.built_file_must_match('copies-out-slash-2/directory/file3',
+ 'file3 contents\n',
+ chdir='relocate/src')
+test.built_file_must_match('copies-out-slash-2/directory/file4',
+ 'file4 contents\n',
+ chdir='relocate/src')
+test.built_file_must_match('copies-out-slash-2/directory/subdir/file5',
+ 'file5 contents\n',
+ chdir='relocate/src')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/copies/gyptest-sourceless-shared-lib.py b/third_party/python/gyp/test/copies/gyptest-sourceless-shared-lib.py
new file mode 100644
index 0000000000..6ec2e512ad
--- /dev/null
+++ b/third_party/python/gyp/test/copies/gyptest-sourceless-shared-lib.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies copies in sourceless shared_library targets are executed.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+test.run_gyp('copies-sourceless-shared-lib.gyp', chdir='src')
+test.relocate('src', 'relocate/src')
+test.build('copies-sourceless-shared-lib.gyp', chdir='relocate/src')
+test.built_file_must_match('copies-out/file1',
+ 'file1 contents\n',
+ chdir='relocate/src')
+test.pass_test()
diff --git a/third_party/python/gyp/test/copies/gyptest-updir.py b/third_party/python/gyp/test/copies/gyptest-updir.py
new file mode 100755
index 0000000000..47a2ca2e1d
--- /dev/null
+++ b/third_party/python/gyp/test/copies/gyptest-updir.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies file copies where the destination is one level above an expansion that
+yields a make variable.
+"""
+
+from __future__ import print_function
+
+import sys
+
+import TestGyp
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+test = TestGyp.TestGyp()
+test.run_gyp('copies-updir.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+test.build('copies-updir.gyp', 'copies_up', chdir='relocate/src')
+
+test.built_file_must_match('../copies-out-updir/file1',
+ 'file1 contents\n',
+ chdir='relocate/src')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/copies/src/copies-attribs.gyp b/third_party/python/gyp/test/copies/src/copies-attribs.gyp
new file mode 100644
index 0000000000..073e0d0cf6
--- /dev/null
+++ b/third_party/python/gyp/test/copies/src/copies-attribs.gyp
@@ -0,0 +1,20 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'copies1',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)',
+ 'files': [
+ 'executable-file.sh',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/copies/src/copies-samedir.gyp b/third_party/python/gyp/test/copies/src/copies-samedir.gyp
new file mode 100644
index 0000000000..2919ce503e
--- /dev/null
+++ b/third_party/python/gyp/test/copies/src/copies-samedir.gyp
@@ -0,0 +1,37 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'copies_samedir',
+ 'type': 'none',
+ 'dependencies': [
+ 'copies_samedir_dependency',
+ ],
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-out-samedir',
+ 'files': [
+ 'file1',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'copies_samedir_dependency',
+ 'type': 'none',
+ 'direct_dependent_settings': {
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-out-samedir',
+ 'files': [
+ 'file2',
+ ],
+ },
+ ],
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/copies/src/copies-slash.gyp b/third_party/python/gyp/test/copies/src/copies-slash.gyp
new file mode 100644
index 0000000000..9bf54bd181
--- /dev/null
+++ b/third_party/python/gyp/test/copies/src/copies-slash.gyp
@@ -0,0 +1,36 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ # A trailing slash on the destination directory should be ignored.
+ {
+ 'target_name': 'copies_recursive_trailing_slash',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-out-slash/',
+ 'files': [
+ 'directory/',
+ ],
+ },
+ ],
+ },
+ # Even if the source directory is below <(PRODUCT_DIR).
+ {
+ 'target_name': 'copies_recursive_trailing_slash_in_product_dir',
+ 'type': 'none',
+ 'dependencies': [ ':copies_recursive_trailing_slash' ],
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-out-slash-2/',
+ 'files': [
+ '<(PRODUCT_DIR)/copies-out-slash/directory/',
+ ],
+ },
+ ],
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/copies/src/copies-sourceless-shared-lib.gyp b/third_party/python/gyp/test/copies/src/copies-sourceless-shared-lib.gyp
new file mode 100644
index 0000000000..7908f716a9
--- /dev/null
+++ b/third_party/python/gyp/test/copies/src/copies-sourceless-shared-lib.gyp
@@ -0,0 +1,27 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'mylib',
+ 'type': 'static_library',
+ 'sources': [ 'foo.c' ],
+ },
+ {
+ 'target_name': 'mysolib',
+ 'type': 'shared_library',
+ 'dependencies': [ 'mylib' ],
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-out',
+ 'files': [ 'file1' ],
+ },
+ ],
+ # link.exe gets confused by sourceless shared libraries and needs this
+ # to become unconfused.
+ 'msvs_settings': { 'VCLinkerTool': { 'TargetMachine': '1', }, },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/copies/src/copies-updir.gyp b/third_party/python/gyp/test/copies/src/copies-updir.gyp
new file mode 100644
index 0000000000..bd3bfdd1d2
--- /dev/null
+++ b/third_party/python/gyp/test/copies/src/copies-updir.gyp
@@ -0,0 +1,21 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'copies_up',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/../copies-out-updir',
+ 'files': [
+ 'file1',
+ ],
+ },
+ ],
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/copies/src/copies.gyp b/third_party/python/gyp/test/copies/src/copies.gyp
new file mode 100644
index 0000000000..ce2e0cabca
--- /dev/null
+++ b/third_party/python/gyp/test/copies/src/copies.gyp
@@ -0,0 +1,70 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'copies1',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': 'copies-out',
+ 'files': [
+ 'file1',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'copies2',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-out',
+ 'files': [
+ 'file2',
+ ],
+ },
+ ],
+ },
+ # Copy a directory tree.
+ {
+ 'target_name': 'copies_recursive',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-out',
+ 'files': [
+ 'directory/',
+ ],
+ },
+ ],
+ },
+ # Copy a directory from deeper in the tree (this should not reproduce the
+ # entire directory path in the destination, only the final directory).
+ {
+ 'target_name': 'copies_recursive_depth',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-out',
+ 'files': [
+ 'parentdir/subdir/',
+ ],
+ },
+ ],
+ },
+ # Verify that a null 'files' list doesn't gag the generators.
+ {
+ 'target_name': 'copies_null',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-null',
+ 'files': [],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/copies/src/directory/file3 b/third_party/python/gyp/test/copies/src/directory/file3
new file mode 100644
index 0000000000..43f16f3522
--- /dev/null
+++ b/third_party/python/gyp/test/copies/src/directory/file3
@@ -0,0 +1 @@
+file3 contents
diff --git a/third_party/python/gyp/test/copies/src/directory/file4 b/third_party/python/gyp/test/copies/src/directory/file4
new file mode 100644
index 0000000000..5f7270a084
--- /dev/null
+++ b/third_party/python/gyp/test/copies/src/directory/file4
@@ -0,0 +1 @@
+file4 contents
diff --git a/third_party/python/gyp/test/copies/src/directory/subdir/file5 b/third_party/python/gyp/test/copies/src/directory/subdir/file5
new file mode 100644
index 0000000000..41f47186bd
--- /dev/null
+++ b/third_party/python/gyp/test/copies/src/directory/subdir/file5
@@ -0,0 +1 @@
+file5 contents
diff --git a/third_party/python/gyp/test/copies/src/executable-file.sh b/third_party/python/gyp/test/copies/src/executable-file.sh
new file mode 100755
index 0000000000..796953a1a2
--- /dev/null
+++ b/third_party/python/gyp/test/copies/src/executable-file.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+echo echo echo echo cho ho o o
diff --git a/third_party/python/gyp/test/copies/src/file1 b/third_party/python/gyp/test/copies/src/file1
new file mode 100644
index 0000000000..84d55c5759
--- /dev/null
+++ b/third_party/python/gyp/test/copies/src/file1
@@ -0,0 +1 @@
+file1 contents
diff --git a/third_party/python/gyp/test/copies/src/file2 b/third_party/python/gyp/test/copies/src/file2
new file mode 100644
index 0000000000..af1b8ae35d
--- /dev/null
+++ b/third_party/python/gyp/test/copies/src/file2
@@ -0,0 +1 @@
+file2 contents
diff --git a/third_party/python/gyp/test/copies/src/foo.c b/third_party/python/gyp/test/copies/src/foo.c
new file mode 100644
index 0000000000..99a4c103ba
--- /dev/null
+++ b/third_party/python/gyp/test/copies/src/foo.c
@@ -0,0 +1,13 @@
+// Copyright (c) 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+int f() { return 42; }
+
+#ifdef _MSC_VER
+// link.exe gets confused by sourceless shared libraries and needs this
+// to become unconfused.
+int __stdcall _DllMainCRTStartup(
+ unsigned hInst, unsigned reason, void* reserved) {
+ return 1;
+}
+#endif
diff --git a/third_party/python/gyp/test/copies/src/parentdir/subdir/file6 b/third_party/python/gyp/test/copies/src/parentdir/subdir/file6
new file mode 100644
index 0000000000..f5d5757348
--- /dev/null
+++ b/third_party/python/gyp/test/copies/src/parentdir/subdir/file6
@@ -0,0 +1 @@
+file6 contents
diff --git a/third_party/python/gyp/test/custom-generator/gyptest-custom-generator.py b/third_party/python/gyp/test/custom-generator/gyptest-custom-generator.py
new file mode 100755
index 0000000000..85fd0724a1
--- /dev/null
+++ b/third_party/python/gyp/test/custom-generator/gyptest-custom-generator.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test that custom generators can be passed to --format
+"""
+
+import TestGyp
+
+test = TestGyp.TestGypCustom(format='mygenerator.py')
+test.run_gyp('test.gyp')
+
+# mygenerator.py should generate a file called MyBuildFile containing
+# "Testing..." alongside the gyp file.
+test.must_match('MyBuildFile', 'Testing...\n')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/custom-generator/mygenerator.py b/third_party/python/gyp/test/custom-generator/mygenerator.py
new file mode 100644
index 0000000000..5fcac3d779
--- /dev/null
+++ b/third_party/python/gyp/test/custom-generator/mygenerator.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Custom gyp generator that doesn't do much."""
+
+import gyp.common
+
+generator_default_variables = {}
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ f = open("MyBuildFile", "w")
+ f.write("Testing...\n")
+ f.close()
diff --git a/third_party/python/gyp/test/custom-generator/test.gyp b/third_party/python/gyp/test/custom-generator/test.gyp
new file mode 100644
index 0000000000..aa5f864a3b
--- /dev/null
+++ b/third_party/python/gyp/test/custom-generator/test.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'exe',
+ 'type': 'executable',
+ 'sources': [
+ 'main.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/cxxflags/cxxflags.cc b/third_party/python/gyp/test/cxxflags/cxxflags.cc
new file mode 100644
index 0000000000..e70e39dfd3
--- /dev/null
+++ b/third_party/python/gyp/test/cxxflags/cxxflags.cc
@@ -0,0 +1,15 @@
+/* Copyright (c) 2010 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int main(void)
+{
+#ifdef ABC
+ printf("With define\n");
+#else
+ printf("No define\n");
+#endif
+ return 0;
+}
diff --git a/third_party/python/gyp/test/cxxflags/cxxflags.gyp b/third_party/python/gyp/test/cxxflags/cxxflags.gyp
new file mode 100644
index 0000000000..a082d49492
--- /dev/null
+++ b/third_party/python/gyp/test/cxxflags/cxxflags.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'cxxflags',
+ 'type': 'executable',
+ 'sources': [
+ 'cxxflags.cc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/cxxflags/gyptest-cxxflags.py b/third_party/python/gyp/test/cxxflags/gyptest-cxxflags.py
new file mode 100755
index 0000000000..117a1800de
--- /dev/null
+++ b/third_party/python/gyp/test/cxxflags/gyptest-cxxflags.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies the use of the environment during regeneration when the gyp file
+changes, specifically via build of an executable with C++ flags specified by
+CXXFLAGS.
+
+In this test, gyp happens within a local environment, but build outside of it.
+"""
+
+import TestGyp
+
+FORMATS = ('ninja',)
+
+test = TestGyp.TestGyp(formats=FORMATS)
+
+# We reset the environ after calling gyp. When the auto-regeneration happens,
+# the same define should be reused anyway.
+with TestGyp.LocalEnv({'CXXFLAGS': ''}):
+ test.run_gyp('cxxflags.gyp')
+
+test.build('cxxflags.gyp')
+
+expect = """\
+No define
+"""
+test.run_built_executable('cxxflags', stdout=expect)
+
+test.sleep()
+
+with TestGyp.LocalEnv({'CXXFLAGS': '-DABC'}):
+ test.run_gyp('cxxflags.gyp')
+
+test.build('cxxflags.gyp')
+
+expect = """\
+With define
+"""
+test.run_built_executable('cxxflags', stdout=expect)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/defines-escaping/defines-escaping.c b/third_party/python/gyp/test/defines-escaping/defines-escaping.c
new file mode 100644
index 0000000000..a0aa4c286d
--- /dev/null
+++ b/third_party/python/gyp/test/defines-escaping/defines-escaping.c
@@ -0,0 +1,11 @@
+/* Copyright (c) 2010 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int main(void)
+{
+ printf(TEST_FORMAT, TEST_ARGS);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/defines-escaping/defines-escaping.gyp b/third_party/python/gyp/test/defines-escaping/defines-escaping.gyp
new file mode 100644
index 0000000000..6f0f3fde41
--- /dev/null
+++ b/third_party/python/gyp/test/defines-escaping/defines-escaping.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'defines_escaping',
+ 'type': 'executable',
+ 'sources': [
+ 'defines-escaping.c',
+ ],
+ 'defines': [
+ 'TEST_FORMAT="<(test_format)"',
+ 'TEST_ARGS=<(test_args)',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/defines-escaping/gyptest-defines-escaping.py b/third_party/python/gyp/test/defines-escaping/gyptest-defines-escaping.py
new file mode 100755
index 0000000000..eb18a3d369
--- /dev/null
+++ b/third_party/python/gyp/test/defines-escaping/gyptest-defines-escaping.py
@@ -0,0 +1,184 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable with C++ define specified by a gyp define using
+various special characters such as quotes, commas, etc.
+"""
+
+import os
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+# Tests string literals, percents, and backslash escapes.
+try:
+ os.environ['GYP_DEFINES'] = (
+ r"""test_format='\n%s\n' """
+ r"""test_args='"Simple test of %s with a literal"'""")
+ test.run_gyp('defines-escaping.gyp')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.build('defines-escaping.gyp')
+
+expect = """
+Simple test of %s with a literal
+"""
+test.run_built_executable('defines_escaping', stdout=expect)
+
+
+# Test multiple comma-and-space-separated string literals.
+try:
+ os.environ['GYP_DEFINES'] = \
+ r"""test_format='\n%s and %s\n' test_args='"foo", "bar"'"""
+ test.run_gyp('defines-escaping.gyp')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines-escaping.c')
+test.build('defines-escaping.gyp')
+
+expect = """
+foo and bar
+"""
+test.run_built_executable('defines_escaping', stdout=expect)
+
+
+# Test string literals containing quotes.
+try:
+ os.environ['GYP_DEFINES'] = (
+ r"""test_format='\n%s %s %s %s %s\n' """
+ r"""test_args='"\"These,\"","""
+ r""" "\"words,\"","""
+ r""" "\"are,\"","""
+ r""" "\"in,\"","""
+ r""" "\"quotes.\""'""")
+ test.run_gyp('defines-escaping.gyp')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines-escaping.c')
+test.build('defines-escaping.gyp')
+
+expect = """
+"These," "words," "are," "in," "quotes."
+"""
+test.run_built_executable('defines_escaping', stdout=expect)
+
+
+# Test string literals containing single quotes.
+try:
+ os.environ['GYP_DEFINES'] = (
+ r"""test_format='\n%s %s %s %s %s\n' """
+ r"""test_args="\"'These,'\","""
+ r""" \"'words,'\","""
+ r""" \"'are,'\","""
+ r""" \"'in,'\","""
+ r""" \"'quotes.'\"" """)
+ test.run_gyp('defines-escaping.gyp')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines-escaping.c')
+test.build('defines-escaping.gyp')
+
+expect = """
+'These,' 'words,' 'are,' 'in,' 'quotes.'
+"""
+test.run_built_executable('defines_escaping', stdout=expect)
+
+
+# Test string literals containing different numbers of backslashes before quotes
+# (to exercise Windows' quoting behaviour).
+try:
+ os.environ['GYP_DEFINES'] = (
+ r"""test_format='\n%s\n%s\n%s\n' """
+ r"""test_args='"\\\"1 visible slash\\\"","""
+ r""" "\\\\\"2 visible slashes\\\\\"","""
+ r""" "\\\\\\\"3 visible slashes\\\\\\\""'""")
+ test.run_gyp('defines-escaping.gyp')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines-escaping.c')
+test.build('defines-escaping.gyp')
+
+expect = r"""
+\"1 visible slash\"
+\\"2 visible slashes\\"
+\\\"3 visible slashes\\\"
+"""
+test.run_built_executable('defines_escaping', stdout=expect)
+
+
+# Test that various scary sequences are passed unfettered.
+try:
+ os.environ['GYP_DEFINES'] = (
+ r"""test_format='\n%s\n' """
+ r"""test_args='"$foo, &quot; `foo`;"'""")
+ test.run_gyp('defines-escaping.gyp')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines-escaping.c')
+test.build('defines-escaping.gyp')
+
+expect = """
+$foo, &quot; `foo`;
+"""
+test.run_built_executable('defines_escaping', stdout=expect)
+
+
+# VisualStudio 2010 can't handle passing %PATH%
+if not (test.format == 'msvs' and test.uses_msbuild):
+ try:
+ os.environ['GYP_DEFINES'] = (
+ """test_format='%s' """
+ """test_args='"%PATH%"'""")
+ test.run_gyp('defines-escaping.gyp')
+ finally:
+ del os.environ['GYP_DEFINES']
+
+ test.sleep()
+ test.touch('defines-escaping.c')
+ test.build('defines-escaping.gyp')
+
+ expect = "%PATH%"
+ test.run_built_executable('defines_escaping', stdout=expect)
+
+
+# Test commas and semi-colons preceded by backslashes (to exercise Windows'
+# quoting behaviour).
+try:
+ os.environ['GYP_DEFINES'] = (
+ r"""test_format='\n%s\n%s\n' """
+ r"""test_args='"\\, \\\\;","""
+ # Same thing again, but enclosed in visible quotes.
+ r""" "\"\\, \\\\;\""'""")
+ test.run_gyp('defines-escaping.gyp')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines-escaping.c')
+test.build('defines-escaping.gyp')
+
+expect = r"""
+\, \\;
+"\, \\;"
+"""
+test.run_built_executable('defines_escaping', stdout=expect)
+
+# We deliberately do not test having an odd number of quotes in a string
+# literal because that isn't feasible in MSVS.
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/defines/defines-env.gyp b/third_party/python/gyp/test/defines/defines-env.gyp
new file mode 100644
index 0000000000..1781546ae0
--- /dev/null
+++ b/third_party/python/gyp/test/defines/defines-env.gyp
@@ -0,0 +1,22 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'value%': '5',
+ },
+ 'targets': [
+ {
+ 'target_name': 'defines',
+ 'type': 'executable',
+ 'sources': [
+ 'defines.c',
+ ],
+ 'defines': [
+ 'VALUE=<(value)',
+ ],
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/defines/defines.c b/third_party/python/gyp/test/defines/defines.c
new file mode 100644
index 0000000000..dda139275d
--- /dev/null
+++ b/third_party/python/gyp/test/defines/defines.c
@@ -0,0 +1,23 @@
+/* Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int main(void)
+{
+#ifdef FOO
+ printf("FOO is defined\n");
+#endif
+ printf("VALUE is %d\n", VALUE);
+
+#ifdef PAREN_VALUE
+ printf("2*PAREN_VALUE is %d\n", 2*PAREN_VALUE);
+#endif
+
+#ifdef HASH_VALUE
+ printf("HASH_VALUE is %s\n", HASH_VALUE);
+#endif
+
+ return 0;
+}
diff --git a/third_party/python/gyp/test/defines/defines.gyp b/third_party/python/gyp/test/defines/defines.gyp
new file mode 100644
index 0000000000..90a755eb84
--- /dev/null
+++ b/third_party/python/gyp/test/defines/defines.gyp
@@ -0,0 +1,38 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'defines',
+ 'type': 'executable',
+ 'sources': [
+ 'defines.c',
+ ],
+ 'defines': [
+ 'FOO',
+ 'VALUE=1',
+ 'PAREN_VALUE=(1+2+3)',
+ 'HASH_VALUE="a#1"',
+ ],
+ },
+ ],
+ 'conditions': [
+ ['OS=="fakeos"', {
+ 'targets': [
+ {
+ 'target_name': 'fakeosprogram',
+ 'type': 'executable',
+ 'sources': [
+ 'defines.c',
+ ],
+ 'defines': [
+ 'FOO',
+ 'VALUE=1',
+ ],
+ },
+ ],
+ }],
+ ],
+}
diff --git a/third_party/python/gyp/test/defines/gyptest-define-override.py b/third_party/python/gyp/test/defines/gyptest-define-override.py
new file mode 100755
index 0000000000..9730455b67
--- /dev/null
+++ b/third_party/python/gyp/test/defines/gyptest-define-override.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a default gyp define can be overridden.
+"""
+
+import os
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+# CMake loudly warns about passing '#' to the compiler and drops the define.
+expect_stderr = ''
+if test.format == 'cmake':
+ expect_stderr = (
+"""WARNING: Preprocessor definitions containing '#' may not be passed on the"""
+""" compiler command line because many compilers do not support it.\n"""
+"""CMake is dropping a preprocessor definition: HASH_VALUE="a#1"\n"""
+"""Consider defining the macro in a (configured) header file.\n\n""")
+
+# Command-line define
+test.run_gyp('defines.gyp', '-D', 'OS=fakeos')
+test.build('defines.gyp', stderr=expect_stderr)
+test.built_file_must_exist('fakeosprogram', type=test.EXECUTABLE)
+# Clean up the exe so subsequent tests don't find an old exe.
+os.remove(test.built_file_path('fakeosprogram', type=test.EXECUTABLE))
+
+# Without "OS" override, fokeosprogram shouldn't be built.
+test.run_gyp('defines.gyp')
+test.build('defines.gyp', stderr=expect_stderr)
+test.built_file_must_not_exist('fakeosprogram', type=test.EXECUTABLE)
+
+# Environment define
+os.environ['GYP_DEFINES'] = 'OS=fakeos'
+test.run_gyp('defines.gyp')
+test.build('defines.gyp', stderr=expect_stderr)
+test.built_file_must_exist('fakeosprogram', type=test.EXECUTABLE)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/defines/gyptest-defines-env-regyp.py b/third_party/python/gyp/test/defines/gyptest-defines-env-regyp.py
new file mode 100755
index 0000000000..f2d931c2f7
--- /dev/null
+++ b/third_party/python/gyp/test/defines/gyptest-defines-env-regyp.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable with C++ define specified by a gyp define, and
+the use of the environment during regeneration when the gyp file changes.
+"""
+
+import os
+import TestGyp
+
+# Regenerating build files when a gyp file changes is currently only supported
+# by the make generator.
+test = TestGyp.TestGyp(formats=['make'])
+
+try:
+ os.environ['GYP_DEFINES'] = 'value=50'
+ test.run_gyp('defines.gyp')
+finally:
+ # We clear the environ after calling gyp. When the auto-regeneration happens,
+ # the same define should be reused anyway. Reset to empty string first in
+ # case the platform doesn't support unsetenv.
+ os.environ['GYP_DEFINES'] = ''
+ del os.environ['GYP_DEFINES']
+
+test.build('defines.gyp')
+
+expect = """\
+FOO is defined
+VALUE is 1
+2*PAREN_VALUE is 12
+HASH_VALUE is a#1
+"""
+test.run_built_executable('defines', stdout=expect)
+
+# Sleep so that the changed gyp file will have a newer timestamp than the
+# previously generated build files.
+test.sleep()
+test.write('defines.gyp', test.read('defines-env.gyp'))
+
+test.build('defines.gyp', test.ALL)
+
+expect = """\
+VALUE is 50
+"""
+test.run_built_executable('defines', stdout=expect)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/defines/gyptest-defines-env.py b/third_party/python/gyp/test/defines/gyptest-defines-env.py
new file mode 100755
index 0000000000..6b4e7175a6
--- /dev/null
+++ b/third_party/python/gyp/test/defines/gyptest-defines-env.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable with C++ define specified by a gyp define.
+"""
+
+import os
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+# With the value only given in environment, it should be used.
+try:
+ os.environ['GYP_DEFINES'] = 'value=10'
+ test.run_gyp('defines-env.gyp')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.build('defines-env.gyp')
+
+expect = """\
+VALUE is 10
+"""
+test.run_built_executable('defines', stdout=expect)
+
+
+# With the value given in both command line and environment,
+# command line should take precedence.
+try:
+ os.environ['GYP_DEFINES'] = 'value=20'
+ test.run_gyp('defines-env.gyp', '-Dvalue=25')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines.c')
+test.build('defines-env.gyp')
+
+expect = """\
+VALUE is 25
+"""
+test.run_built_executable('defines', stdout=expect)
+
+
+# With the value only given in environment, it should be ignored if
+# --ignore-environment is specified.
+try:
+ os.environ['GYP_DEFINES'] = 'value=30'
+ test.run_gyp('defines-env.gyp', '--ignore-environment')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines.c')
+test.build('defines-env.gyp')
+
+expect = """\
+VALUE is 5
+"""
+test.run_built_executable('defines', stdout=expect)
+
+
+# With the value given in both command line and environment, and
+# --ignore-environment also specified, command line should still be used.
+try:
+ os.environ['GYP_DEFINES'] = 'value=40'
+ test.run_gyp('defines-env.gyp', '--ignore-environment', '-Dvalue=45')
+finally:
+ del os.environ['GYP_DEFINES']
+
+test.sleep()
+test.touch('defines.c')
+test.build('defines-env.gyp')
+
+expect = """\
+VALUE is 45
+"""
+test.run_built_executable('defines', stdout=expect)
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/defines/gyptest-defines.py b/third_party/python/gyp/test/defines/gyptest-defines.py
new file mode 100755
index 0000000000..77a3af53b9
--- /dev/null
+++ b/third_party/python/gyp/test/defines/gyptest-defines.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of an executable with C++ defines.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('defines.gyp')
+
+expect = """\
+FOO is defined
+VALUE is 1
+2*PAREN_VALUE is 12
+"""
+
+#CMake loudly warns about passing '#' to the compiler and drops the define.
+expect_stderr = ''
+if test.format == 'cmake':
+ expect_stderr = (
+"""WARNING: Preprocessor definitions containing '#' may not be passed on the"""
+""" compiler command line because many compilers do not support it.\n"""
+"""CMake is dropping a preprocessor definition: HASH_VALUE="a#1"\n"""
+"""Consider defining the macro in a (configured) header file.\n\n""")
+else:
+ expect += """HASH_VALUE is a#1
+"""
+
+test.build('defines.gyp', stderr=expect_stderr)
+
+test.run_built_executable('defines', stdout=expect)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/dependencies/a.c b/third_party/python/gyp/test/dependencies/a.c
new file mode 100755
index 0000000000..3bba111d24
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/a.c
@@ -0,0 +1,9 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+extern int funcB();
+
+int funcA() {
+ return funcB();
+}
diff --git a/third_party/python/gyp/test/dependencies/adso/all_dependent_settings_order.gyp b/third_party/python/gyp/test/dependencies/adso/all_dependent_settings_order.gyp
new file mode 100644
index 0000000000..89817d6bbb
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/adso/all_dependent_settings_order.gyp
@@ -0,0 +1,45 @@
+{
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'none',
+ 'sources': ['a.cc'],
+ 'all_dependent_settings': {'sources': ['a.cc']},
+ },
+ {
+ 'target_name': 'b',
+ 'type': 'none',
+ 'sources': ['b.cc'],
+ 'all_dependent_settings': {'sources': ['b.cc']},
+ 'dependencies': ['a'],
+ },
+
+ {
+ 'target_name': 'c',
+ 'type': 'none',
+ 'sources': ['c.cc'],
+ 'all_dependent_settings': {'sources': ['c.cc']},
+ 'dependencies': ['b', 'a'],
+ },
+ {
+ 'target_name': 'd',
+ 'type': 'none',
+ 'sources': ['d.cc'],
+ 'dependencies': ['c', 'a', 'b'],
+ 'actions': [
+ {
+ 'action_name': 'write_sources',
+ 'inputs': ['write_args.py'],
+ 'outputs': ['<(PRODUCT_DIR)/out.txt'],
+ 'action': [
+ 'python',
+ 'write_args.py',
+ '<(PRODUCT_DIR)/out.txt',
+ '>@(_sources)'
+ ],
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/dependencies/adso/write_args.py b/third_party/python/gyp/test/dependencies/adso/write_args.py
new file mode 100755
index 0000000000..5e388b8f70
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/adso/write_args.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+f = open(sys.argv[1], 'w')
+f.write(' '.join(sys.argv[2:]))
+f.close()
diff --git a/third_party/python/gyp/test/dependencies/b/b.c b/third_party/python/gyp/test/dependencies/b/b.c
new file mode 100755
index 0000000000..b5e771bcc7
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/b/b.c
@@ -0,0 +1,3 @@
+int funcB() {
+ return 2;
+}
diff --git a/third_party/python/gyp/test/dependencies/b/b.gyp b/third_party/python/gyp/test/dependencies/b/b.gyp
new file mode 100755
index 0000000000..893dc64d65
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/b/b.gyp
@@ -0,0 +1,22 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'b',
+ 'type': 'static_library',
+ 'sources': [
+ 'b.c',
+ ],
+ },
+ {
+ 'target_name': 'b3',
+ 'type': 'static_library',
+ 'sources': [
+ 'b3.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/dependencies/b/b3.c b/third_party/python/gyp/test/dependencies/b/b3.c
new file mode 100755
index 0000000000..287f67ff31
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/b/b3.c
@@ -0,0 +1,9 @@
+/*
+ * Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+int funcB() {
+ return 3;
+}
diff --git a/third_party/python/gyp/test/dependencies/c/c.c b/third_party/python/gyp/test/dependencies/c/c.c
new file mode 100644
index 0000000000..4949daf3ee
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/c/c.c
@@ -0,0 +1,4 @@
+int funcC() {
+ return 3
+ // Intentional syntax error. This file should never be compiled, so this
+ // shouldn't be a problem.
diff --git a/third_party/python/gyp/test/dependencies/c/c.gyp b/third_party/python/gyp/test/dependencies/c/c.gyp
new file mode 100644
index 0000000000..eabebea9ef
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/c/c.gyp
@@ -0,0 +1,22 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'c_unused',
+ 'type': 'static_library',
+ 'sources': [
+ 'c.c',
+ ],
+ },
+ {
+ 'target_name': 'd',
+ 'type': 'static_library',
+ 'sources': [
+ 'd.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/dependencies/c/d.c b/third_party/python/gyp/test/dependencies/c/d.c
new file mode 100644
index 0000000000..05465fc1af
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/c/d.c
@@ -0,0 +1,3 @@
+int funcD() {
+ return 4;
+}
diff --git a/third_party/python/gyp/test/dependencies/double_dependency.gyp b/third_party/python/gyp/test/dependencies/double_dependency.gyp
new file mode 100644
index 0000000000..c4a2d00139
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/double_dependency.gyp
@@ -0,0 +1,23 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'double_dependency',
+ 'type': 'shared_library',
+ 'dependencies': [
+ 'double_dependent.gyp:double_dependent',
+ ],
+ 'conditions': [
+ ['1==1', {
+ 'dependencies': [
+ 'double_dependent.gyp:*',
+ ],
+ }],
+ ],
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/dependencies/double_dependent.gyp b/third_party/python/gyp/test/dependencies/double_dependent.gyp
new file mode 100644
index 0000000000..334caff723
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/double_dependent.gyp
@@ -0,0 +1,12 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'double_dependent',
+ 'type': 'none',
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/dependencies/extra_targets.gyp b/third_party/python/gyp/test/dependencies/extra_targets.gyp
new file mode 100644
index 0000000000..c1a26de422
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/extra_targets.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'static_library',
+ 'sources': [
+ 'a.c',
+ ],
+ # This only depends on the "d" target; other targets in c.gyp
+ # should not become part of the build (unlike with 'c/c.gyp:*').
+ 'dependencies': ['c/c.gyp:d'],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/dependencies/gyptest-all-dependent-settings-order.py b/third_party/python/gyp/test/dependencies/gyptest-all-dependent-settings-order.py
new file mode 100644
index 0000000000..715f322f41
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/gyptest-all-dependent-settings-order.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+
+# Copyright 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Tests that all_dependent_settings are processed in topological order.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('all_dependent_settings_order.gyp', chdir='adso')
+test.build('all_dependent_settings_order.gyp', chdir='adso')
+test.built_file_must_match('out.txt', 'd.cc a.cc b.cc c.cc',
+ chdir='adso')
+test.pass_test()
diff --git a/third_party/python/gyp/test/dependencies/gyptest-double-dependency.py b/third_party/python/gyp/test/dependencies/gyptest-double-dependency.py
new file mode 100644
index 0000000000..7692740c54
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/gyptest-double-dependency.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that pulling in a dependency a second time in a conditional works for
+shared_library targets. Regression test for http://crbug.com/122588
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('double_dependency.gyp')
+
+# If running gyp worked, all is well.
+test.pass_test()
diff --git a/third_party/python/gyp/test/dependencies/gyptest-extra-targets.py b/third_party/python/gyp/test/dependencies/gyptest-extra-targets.py
new file mode 100755
index 0000000000..09b00d958b
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/gyptest-extra-targets.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that dependencies don't pull unused targets into the build.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('extra_targets.gyp',
+ '-G', 'xcode_ninja_target_pattern=^a$')
+
+# This should fail if it tries to build 'c_unused' since 'c/c.c' has a syntax
+# error and won't compile.
+test.build('extra_targets.gyp', test.ALL)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/dependencies/gyptest-indirect-module-dependency.py b/third_party/python/gyp/test/dependencies/gyptest-indirect-module-dependency.py
new file mode 100644
index 0000000000..d001b57e7d
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/gyptest-indirect-module-dependency.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure that we cause downstream modules to get built when we depend on the
+parent targets.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+CHDIR = 'module-dep'
+test.run_gyp('indirect-module-dependency.gyp', chdir=CHDIR)
+test.build('indirect-module-dependency.gyp', 'an_exe', chdir=CHDIR)
+test.built_file_must_exist(
+ test.built_file_basename('a_module', test.LOADABLE_MODULE), chdir=CHDIR)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/dependencies/gyptest-lib-only.py b/third_party/python/gyp/test/dependencies/gyptest-lib-only.py
new file mode 100755
index 0000000000..3a99a7f4aa
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/gyptest-lib-only.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that a link time only dependency will get pulled into the set of built
+targets, even if no executable uses it.
+"""
+
+import TestGyp
+
+import sys
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('lib_only.gyp')
+
+test.build('lib_only.gyp', test.ALL)
+
+test.built_file_must_exist('a', type=test.STATIC_LIB)
+
+# TODO(bradnelson/mark):
+# On linux and windows a library target will at least pull its link dependencies
+# into the generated project, since not doing so confuses users.
+# This is not currently implemented on mac, which has the opposite behavior.
+if sys.platform == 'darwin':
+ if test.format == 'xcode':
+ test.built_file_must_not_exist('b', type=test.STATIC_LIB)
+ else:
+ assert test.format in ('make', 'ninja', 'xcode-ninja')
+ test.built_file_must_exist('b', type=test.STATIC_LIB)
+else:
+ # Make puts the resulting library in a directory matching the input gyp file;
+ # for the 'b' library, that is in the 'b' subdirectory.
+ test.built_file_must_exist('b', type=test.STATIC_LIB, subdir='b')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/dependencies/gyptest-none-traversal.py b/third_party/python/gyp/test/dependencies/gyptest-none-traversal.py
new file mode 100755
index 0000000000..c09063dad3
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/gyptest-none-traversal.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that static library dependencies don't traverse none targets, unless
+explicitly specified.
+"""
+
+import TestGyp
+
+import sys
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('none_traversal.gyp')
+
+test.build('none_traversal.gyp', test.ALL)
+
+test.run_built_executable('needs_chain', stdout="2\n")
+test.run_built_executable('doesnt_need_chain', stdout="3\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/dependencies/gyptest-sharedlib-linksettings.py b/third_party/python/gyp/test/dependencies/gyptest-sharedlib-linksettings.py
new file mode 100644
index 0000000000..87428af459
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/gyptest-sharedlib-linksettings.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that link_settings in a shared_library are not propagated to targets
+that depend on the shared_library, but are used in the shared_library itself.
+"""
+
+import TestGyp
+import sys
+
+CHDIR='sharedlib-linksettings'
+
+test = TestGyp.TestGyp()
+test.run_gyp('test.gyp', chdir=CHDIR)
+test.build('test.gyp', test.ALL, chdir=CHDIR)
+test.run_built_executable('program', stdout="1\n2\n", chdir=CHDIR)
+test.pass_test()
diff --git a/third_party/python/gyp/test/dependencies/lib_only.gyp b/third_party/python/gyp/test/dependencies/lib_only.gyp
new file mode 100755
index 0000000000..f6c84dea64
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/lib_only.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'static_library',
+ 'sources': [
+ 'a.c',
+ ],
+ 'dependencies': ['b/b.gyp:b'],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/dependencies/main.c b/third_party/python/gyp/test/dependencies/main.c
new file mode 100644
index 0000000000..185bd482f2
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/main.c
@@ -0,0 +1,14 @@
+/*
+ * Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <stdio.h>
+
+extern int funcA();
+
+int main() {
+ printf("%d\n", funcA());
+ return 0;
+}
diff --git a/third_party/python/gyp/test/dependencies/module-dep/a.cc b/third_party/python/gyp/test/dependencies/module-dep/a.cc
new file mode 100644
index 0000000000..231fc7a9df
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/module-dep/a.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int some_function() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/dependencies/module-dep/dll.cc b/third_party/python/gyp/test/dependencies/module-dep/dll.cc
new file mode 100644
index 0000000000..e1eea0205b
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/module-dep/dll.cc
@@ -0,0 +1,9 @@
+// Copyright (c) 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#if defined(_MSC_VER)
+__declspec(dllexport)
+#endif
+ void SomeFunction() {
+}
diff --git a/third_party/python/gyp/test/dependencies/module-dep/exe.cc b/third_party/python/gyp/test/dependencies/module-dep/exe.cc
new file mode 100644
index 0000000000..b3039ace96
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/module-dep/exe.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/dependencies/module-dep/indirect-module-dependency.gyp b/third_party/python/gyp/test/dependencies/module-dep/indirect-module-dependency.gyp
new file mode 100644
index 0000000000..f3fb5320fe
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/module-dep/indirect-module-dependency.gyp
@@ -0,0 +1,37 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'an_exe',
+ 'type': 'executable',
+ 'sources': ['exe.cc'],
+ 'dependencies': [
+ 'a_dll',
+ ],
+ },
+ {
+ 'target_name': 'a_dll',
+ 'type': 'shared_library',
+ 'sources': ['dll.cc'],
+ 'dependencies': [
+ 'a_lib',
+ ],
+ },
+ {
+ 'target_name': 'a_lib',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'a_module',
+ ],
+ 'sources': ['a.cc'],
+ },
+ {
+ 'target_name': 'a_module',
+ 'type': 'loadable_module',
+ 'sources': ['a.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/dependencies/none_traversal.gyp b/third_party/python/gyp/test/dependencies/none_traversal.gyp
new file mode 100755
index 0000000000..3d8ab30aff
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/none_traversal.gyp
@@ -0,0 +1,46 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'needs_chain',
+ 'type': 'executable',
+ 'sources': [
+ 'a.c',
+ 'main.c',
+ ],
+ 'dependencies': ['chain'],
+ },
+ {
+ 'target_name': 'chain',
+ 'type': 'none',
+ 'dependencies': ['b/b.gyp:b'],
+ },
+ {
+ 'target_name': 'doesnt_need_chain',
+ 'type': 'executable',
+ 'sources': [
+ 'main.c',
+ ],
+ 'dependencies': ['no_chain', 'other_chain'],
+ },
+ {
+ 'target_name': 'no_chain',
+ 'type': 'none',
+ 'sources': [
+ ],
+ 'dependencies': ['b/b.gyp:b'],
+ 'dependencies_traverse': 0,
+ },
+ {
+ 'target_name': 'other_chain',
+ 'type': 'static_library',
+ 'sources': [
+ 'a.c',
+ ],
+ 'dependencies': ['b/b.gyp:b3'],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/dependencies/sharedlib-linksettings/program.c b/third_party/python/gyp/test/dependencies/sharedlib-linksettings/program.c
new file mode 100644
index 0000000000..b7c15edcd6
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/sharedlib-linksettings/program.c
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2013 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <stdio.h>
+
+/*
+ * This will fail to compile if TEST_DEFINE was propagated from sharedlib to
+ * program.
+ */
+#ifdef TEST_DEFINE
+#error TEST_DEFINE is already defined!
+#endif
+
+#define TEST_DEFINE 2
+
+extern int staticLibFunc();
+
+int main() {
+ printf("%d\n", staticLibFunc());
+ printf("%d\n", TEST_DEFINE);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/dependencies/sharedlib-linksettings/sharedlib.c b/third_party/python/gyp/test/dependencies/sharedlib-linksettings/sharedlib.c
new file mode 100644
index 0000000000..3199bccd66
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/sharedlib-linksettings/sharedlib.c
@@ -0,0 +1,16 @@
+/*
+ * Copyright (c) 2013 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+int sharedLibFunc() {
+ /*
+ * This will fail to compile if TEST_DEFINE was not obtained from sharedlib's
+ * link_settings.
+ */
+ return TEST_DEFINE;
+}
diff --git a/third_party/python/gyp/test/dependencies/sharedlib-linksettings/staticlib.c b/third_party/python/gyp/test/dependencies/sharedlib-linksettings/staticlib.c
new file mode 100644
index 0000000000..e889b419fd
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/sharedlib-linksettings/staticlib.c
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2013 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+/*
+ * This will fail to compile if TEST_DEFINE was propagated from sharedlib to
+ * staticlib.
+ */
+#ifdef TEST_DEFINE
+#error TEST_DEFINE is defined!
+#endif
+
+#ifdef _WIN32
+__declspec(dllimport)
+#else
+extern
+#endif
+int sharedLibFunc();
+
+int staticLibFunc() {
+ return sharedLibFunc();
+}
diff --git a/third_party/python/gyp/test/dependencies/sharedlib-linksettings/test.gyp b/third_party/python/gyp/test/dependencies/sharedlib-linksettings/test.gyp
new file mode 100644
index 0000000000..830ce3236d
--- /dev/null
+++ b/third_party/python/gyp/test/dependencies/sharedlib-linksettings/test.gyp
@@ -0,0 +1,37 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'allow_sharedlib_linksettings_propagation': 0,
+ },
+ 'targets': [
+ {
+ 'target_name': 'sharedlib',
+ 'type': 'shared_library',
+ 'sources': [ 'sharedlib.c' ],
+ 'link_settings': {
+ 'defines': [ 'TEST_DEFINE=1' ],
+ },
+ 'conditions': [
+ ['OS=="linux"', {
+ # Support 64-bit shared libs (also works fine for 32-bit).
+ 'cflags': ['-fPIC'],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'staticlib',
+ 'type': 'static_library',
+ 'sources': [ 'staticlib.c' ],
+ 'dependencies': [ 'sharedlib' ],
+ },
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'sources': [ 'program.c' ],
+ 'dependencies': [ 'staticlib' ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/dependency-copy/gyptest-copy.py b/third_party/python/gyp/test/dependency-copy/gyptest-copy.py
new file mode 100755
index 0000000000..5ba7c73d41
--- /dev/null
+++ b/third_party/python/gyp/test/dependency-copy/gyptest-copy.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies dependencies do the copy step.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('copies.gyp', chdir='src')
+
+test.build('copies.gyp', 'proj2', chdir='src')
+
+test.run_built_executable('proj1',
+ chdir='src',
+ stdout="Hello from file1.c\n")
+test.run_built_executable('proj2',
+ chdir='src',
+ stdout="Hello from file2.c\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/dependency-copy/src/copies.gyp b/third_party/python/gyp/test/dependency-copy/src/copies.gyp
new file mode 100644
index 0000000000..4176b18787
--- /dev/null
+++ b/third_party/python/gyp/test/dependency-copy/src/copies.gyp
@@ -0,0 +1,25 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'proj1',
+ 'type': 'executable',
+ 'sources': [
+ 'file1.c',
+ ],
+ },
+ {
+ 'target_name': 'proj2',
+ 'type': 'executable',
+ 'sources': [
+ 'file2.c',
+ ],
+ 'dependencies': [
+ 'proj1',
+ ]
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/dependency-copy/src/file1.c b/third_party/python/gyp/test/dependency-copy/src/file1.c
new file mode 100644
index 0000000000..d7c3159186
--- /dev/null
+++ b/third_party/python/gyp/test/dependency-copy/src/file1.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello from file1.c\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/dependency-copy/src/file2.c b/third_party/python/gyp/test/dependency-copy/src/file2.c
new file mode 100644
index 0000000000..cf40f57f94
--- /dev/null
+++ b/third_party/python/gyp/test/dependency-copy/src/file2.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello from file2.c\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/dependent-settings/nested-dependent-settings/all-dependent-settings.gyp b/third_party/python/gyp/test/dependent-settings/nested-dependent-settings/all-dependent-settings.gyp
new file mode 100644
index 0000000000..b67ccaeb69
--- /dev/null
+++ b/third_party/python/gyp/test/dependent-settings/nested-dependent-settings/all-dependent-settings.gyp
@@ -0,0 +1,19 @@
+{
+ "targets": [
+ {
+ "target_name": "settings",
+ "type": "none",
+ "all_dependent_settings": {
+ "target_conditions": [
+ ["'library' in _type", {"all_dependent_settings": {}}]
+ ]
+ },
+ },
+ {
+ "target_name": "library",
+ "type": "static_library",
+ "dependencies": ["settings"],
+ },
+ ]
+}
+
diff --git a/third_party/python/gyp/test/dependent-settings/nested-dependent-settings/direct-dependent-settings.gyp b/third_party/python/gyp/test/dependent-settings/nested-dependent-settings/direct-dependent-settings.gyp
new file mode 100644
index 0000000000..6e8a6165e4
--- /dev/null
+++ b/third_party/python/gyp/test/dependent-settings/nested-dependent-settings/direct-dependent-settings.gyp
@@ -0,0 +1,19 @@
+{
+ "targets": [
+ {
+ "target_name": "settings",
+ "type": "none",
+ "all_dependent_settings": {
+ "target_conditions": [
+ ["'library' in _type", {"direct_dependent_settings": {}}]
+ ]
+ },
+ },
+ {
+ "target_name": "library",
+ "type": "static_library",
+ "dependencies": ["settings"],
+ },
+ ]
+}
+
diff --git a/third_party/python/gyp/test/dependent-settings/nested-dependent-settings/gyptest-nested-dependent-settings.py b/third_party/python/gyp/test/dependent-settings/nested-dependent-settings/gyptest-nested-dependent-settings.py
new file mode 100644
index 0000000000..a45de898a4
--- /dev/null
+++ b/third_party/python/gyp/test/dependent-settings/nested-dependent-settings/gyptest-nested-dependent-settings.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies nested dependent_settings directives project generation.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp("all-dependent-settings.gyp")
+test.run_gyp("direct-dependent-settings.gyp")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/determinism/determinism.gyp b/third_party/python/gyp/test/determinism/determinism.gyp
new file mode 100644
index 0000000000..81346748a1
--- /dev/null
+++ b/third_party/python/gyp/test/determinism/determinism.gyp
@@ -0,0 +1,59 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'determinism',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'depfile_action',
+ 'inputs': [
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'output.txt',
+ ],
+ 'depfile': 'depfile.d',
+ 'action': [ ]
+ },
+ ],
+ },
+ {
+ 'target_name': 'determinism2',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'depfile_action',
+ 'inputs': [
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'output.txt',
+ ],
+ 'depfile': 'depfile.d',
+ 'action': [ ]
+ },
+ ],
+ },
+ {
+ 'target_name': 'determinism3',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'depfile_action',
+ 'inputs': [
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'output.txt',
+ ],
+ 'depfile': 'depfile.d',
+ 'action': [ ]
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/determinism/empty-targets.gyp b/third_party/python/gyp/test/determinism/empty-targets.gyp
new file mode 100644
index 0000000000..a4ccdd703c
--- /dev/null
+++ b/third_party/python/gyp/test/determinism/empty-targets.gyp
@@ -0,0 +1,32 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'empty_target1',
+ 'type': 'none',
+ },
+ {
+ 'target_name': 'empty_target2',
+ 'type': 'none',
+ },
+ {
+ 'target_name': 'empty_target3',
+ 'type': 'none',
+ },
+ {
+ 'target_name': 'empty_target4',
+ 'type': 'none',
+ },
+ {
+ 'target_name': 'empty_target5',
+ 'type': 'none',
+ },
+ {
+ 'target_name': 'empty_target6',
+ 'type': 'none',
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/determinism/gyptest-determinism.py b/third_party/python/gyp/test/determinism/gyptest-determinism.py
new file mode 100644
index 0000000000..670cb4bc9f
--- /dev/null
+++ b/third_party/python/gyp/test/determinism/gyptest-determinism.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies builds are the same even with different PYTHONHASHSEEDs.
+Tests target_short_names and FlattenToList.
+"""
+
+import os
+import sys
+import TestGyp
+
+test = TestGyp.TestGyp()
+if test.format == 'ninja':
+ os.environ["PYTHONHASHSEED"] = "1"
+ test.run_gyp('determinism.gyp')
+ base = open(test.built_file_path('build.ninja')).read()
+
+ for i in range(1,5):
+ os.environ["PYTHONHASHSEED"] = str(i)
+ test.run_gyp('determinism.gyp')
+ contents = open(test.built_file_path('build.ninja')).read()
+ if base != contents:
+ test.fail_test()
+
+ del os.environ["PYTHONHASHSEED"]
+ test.pass_test()
diff --git a/third_party/python/gyp/test/determinism/gyptest-empty-target-names.py b/third_party/python/gyp/test/determinism/gyptest-empty-target-names.py
new file mode 100644
index 0000000000..cf49f50084
--- /dev/null
+++ b/third_party/python/gyp/test/determinism/gyptest-empty-target-names.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies builds are the same even with different PYTHONHASHSEEDs.
+Tests both solibs and implicit_deps.
+"""
+
+import os
+import sys
+import TestGyp
+
+test = TestGyp.TestGyp()
+if test.format == 'ninja':
+ os.environ["PYTHONHASHSEED"] = "1"
+ test.run_gyp('empty-targets.gyp')
+ base = open(test.built_file_path('build.ninja')).read()
+
+ for i in range(1,5):
+ os.environ["PYTHONHASHSEED"] = str(i)
+ test.run_gyp('empty-targets.gyp')
+ contents = open(test.built_file_path('build.ninja')).read()
+ if base != contents:
+ test.fail_test()
+
+ del os.environ["PYTHONHASHSEED"]
+ test.pass_test()
diff --git a/third_party/python/gyp/test/determinism/gyptest-needed-variables.py b/third_party/python/gyp/test/determinism/gyptest-needed-variables.py
new file mode 100644
index 0000000000..7b97cca0d2
--- /dev/null
+++ b/third_party/python/gyp/test/determinism/gyptest-needed-variables.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies builds are the same even with different PYTHONHASHSEEDs.
+Tests needed_variables.
+"""
+
+import os
+import sys
+import TestGyp
+
+test = TestGyp.TestGyp()
+if test.format == 'ninja':
+ os.environ["PYTHONHASHSEED"] = "1"
+ test.run_gyp('needed-variables.gyp')
+ base = open(test.built_file_path('test.ninja', subdir='obj')).read()
+
+ for i in range(1,5):
+ os.environ["PYTHONHASHSEED"] = str(i)
+ test.run_gyp('needed-variables.gyp')
+ contents = open(test.built_file_path('test.ninja', subdir='obj')).read()
+ if base != contents:
+ test.fail_test()
+
+ del os.environ["PYTHONHASHSEED"]
+ test.pass_test()
diff --git a/third_party/python/gyp/test/determinism/gyptest-solibs.py b/third_party/python/gyp/test/determinism/gyptest-solibs.py
new file mode 100644
index 0000000000..a9c312573b
--- /dev/null
+++ b/third_party/python/gyp/test/determinism/gyptest-solibs.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies builds are the same even with different PYTHONHASHSEEDs.
+Tests all_targets, implicit_deps and solibs.
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+import TestGyp
+
+test = TestGyp.TestGyp()
+if test.format == 'ninja':
+ os.environ["PYTHONHASHSEED"] = "1"
+ test.run_gyp('solibs.gyp')
+ base1 = open(test.built_file_path('c.ninja', subdir='obj')).read()
+ base2 = open(test.built_file_path('build.ninja')).read()
+
+ for i in range(1,5):
+ os.environ["PYTHONHASHSEED"] = str(i)
+ test.run_gyp('solibs.gyp')
+ contents1 = open(test.built_file_path('c.ninja', subdir='obj')).read()
+ contents2 = open(test.built_file_path('build.ninja')).read()
+ if base1 != contents1:
+ test.fail_test()
+ if base2 != contents2:
+ print(base2)
+ test.fail_test()
+
+ del os.environ["PYTHONHASHSEED"]
+ test.pass_test()
diff --git a/third_party/python/gyp/test/determinism/main.cc b/third_party/python/gyp/test/determinism/main.cc
new file mode 100644
index 0000000000..2cd74d3c77
--- /dev/null
+++ b/third_party/python/gyp/test/determinism/main.cc
@@ -0,0 +1,5 @@
+extern int foo();
+
+int main() {
+ return foo();
+}
diff --git a/third_party/python/gyp/test/determinism/needed-variables.gyp b/third_party/python/gyp/test/determinism/needed-variables.gyp
new file mode 100644
index 0000000000..022165bebd
--- /dev/null
+++ b/third_party/python/gyp/test/determinism/needed-variables.gyp
@@ -0,0 +1,33 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test',
+ 'type': 'executable',
+ 'sources': ['rule.ext'],
+ 'rules': [{
+ 'rule_name': 'rule',
+ 'extension': 'ext',
+ 'inputs': [ 'rule.py', ],
+ 'action': [
+ 'python',
+ 'rule.py',
+ '<(RULE_INPUT_ROOT)',
+ '<(RULE_INPUT_EXT)',
+ '<(RULE_INPUT_DIRNAME)',
+ '<(RULE_INPUT_NAME)',
+ '<(RULE_INPUT_PATH)',
+ ],
+ 'outputs': [ 'hello_world.txt' ],
+ 'sources': ['rule.ext'],
+ 'message': 'Processing <(RULE_INPUT_PATH)',
+ 'process_outputs_as_sources': 1,
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ }],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/determinism/rule.py b/third_party/python/gyp/test/determinism/rule.py
new file mode 100644
index 0000000000..e18c314557
--- /dev/null
+++ b/third_party/python/gyp/test/determinism/rule.py
@@ -0,0 +1,8 @@
+#!/usr/bin/env python
+# Copyright (c) 2017 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+print('Hello World')
diff --git a/third_party/python/gyp/test/determinism/solib.cc b/third_party/python/gyp/test/determinism/solib.cc
new file mode 100644
index 0000000000..0856cd4e00
--- /dev/null
+++ b/third_party/python/gyp/test/determinism/solib.cc
@@ -0,0 +1,8 @@
+#ifdef _MSC_VER
+__declspec(dllexport)
+#else
+__attribute__((visibility("default")))
+#endif
+int foo() {
+ return 42;
+}
diff --git a/third_party/python/gyp/test/determinism/solibs.gyp b/third_party/python/gyp/test/determinism/solibs.gyp
new file mode 100644
index 0000000000..9ae3246d63
--- /dev/null
+++ b/third_party/python/gyp/test/determinism/solibs.gyp
@@ -0,0 +1,32 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This test both tests solibs and implicit_deps.
+{
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'shared_library',
+ 'sources': [ 'solib.cc' ],
+ },
+ {
+ 'target_name': 'b',
+ 'type': 'shared_library',
+ 'sources': [ 'solib.cc' ],
+ },
+ {
+ 'target_name': 'c',
+ 'type': 'executable',
+ 'sources': [ 'main.cc' ],
+ 'dependencies': [ 'a', 'b' ],
+ },
+ ],
+ 'conditions': [
+ ['OS=="linux"', {
+ 'target_defaults': {
+ 'cflags': ['-fPIC'],
+ },
+ }],
+ ],
+}
diff --git a/third_party/python/gyp/test/empty-target/empty-target.gyp b/third_party/python/gyp/test/empty-target/empty-target.gyp
new file mode 100644
index 0000000000..feefa28058
--- /dev/null
+++ b/third_party/python/gyp/test/empty-target/empty-target.gyp
@@ -0,0 +1,12 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'empty_target',
+ 'type': 'none',
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/empty-target/gyptest-empty-target.py b/third_party/python/gyp/test/empty-target/gyptest-empty-target.py
new file mode 100644
index 0000000000..ecadd4a87f
--- /dev/null
+++ b/third_party/python/gyp/test/empty-target/gyptest-empty-target.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a target with nothing succeeds.
+"""
+
+import os
+import sys
+import TestGyp
+
+test = TestGyp.TestGyp()
+test.run_gyp('empty-target.gyp')
+test.build('empty-target.gyp', target='empty_target')
+test.pass_test()
diff --git a/third_party/python/gyp/test/errors/dependency_cycle.gyp b/third_party/python/gyp/test/errors/dependency_cycle.gyp
new file mode 100644
index 0000000000..eef44bc9eb
--- /dev/null
+++ b/third_party/python/gyp/test/errors/dependency_cycle.gyp
@@ -0,0 +1,23 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'target0',
+ 'type': 'none',
+ 'dependencies': [ 'target1' ],
+ },
+ {
+ 'target_name': 'target1',
+ 'type': 'none',
+ 'dependencies': [ 'target2' ],
+ },
+ {
+ 'target_name': 'target2',
+ 'type': 'none',
+ 'dependencies': [ 'target0' ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/errors/duplicate_basenames.gyp b/third_party/python/gyp/test/errors/duplicate_basenames.gyp
new file mode 100644
index 0000000000..b3dceb3949
--- /dev/null
+++ b/third_party/python/gyp/test/errors/duplicate_basenames.gyp
@@ -0,0 +1,13 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'foo',
+ 'type': 'static_library',
+ 'sources': ['foo.c', 'foo.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/errors/duplicate_node.gyp b/third_party/python/gyp/test/errors/duplicate_node.gyp
new file mode 100644
index 0000000000..d6096096bd
--- /dev/null
+++ b/third_party/python/gyp/test/errors/duplicate_node.gyp
@@ -0,0 +1,12 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ { 'target_name' : 'foo', 'type': 'executable' },
+ ],
+ 'targets': [
+ { 'target_name' : 'bar', 'type': 'executable' },
+ ]
+}
diff --git a/third_party/python/gyp/test/errors/duplicate_rule.gyp b/third_party/python/gyp/test/errors/duplicate_rule.gyp
new file mode 100644
index 0000000000..dab98e96c2
--- /dev/null
+++ b/third_party/python/gyp/test/errors/duplicate_rule.gyp
@@ -0,0 +1,22 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'foo',
+ 'type': 'executable',
+ 'rules': [
+ {
+ 'rule_name': 'bar',
+ 'extension': '',
+ },
+ {
+ 'rule_name': 'bar',
+ 'extension': '',
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/errors/duplicate_targets.gyp b/third_party/python/gyp/test/errors/duplicate_targets.gyp
new file mode 100644
index 0000000000..aec470eefa
--- /dev/null
+++ b/third_party/python/gyp/test/errors/duplicate_targets.gyp
@@ -0,0 +1,14 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'foo'
+ },
+ {
+ 'target_name': 'foo'
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/errors/error_command.gyp b/third_party/python/gyp/test/errors/error_command.gyp
new file mode 100644
index 0000000000..1736fc9882
--- /dev/null
+++ b/third_party/python/gyp/test/errors/error_command.gyp
@@ -0,0 +1,12 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'foo',
+ 'type': '<!(["python", "-c", "import sys; sys.exit(3)"])',
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/errors/file_cycle0.gyp b/third_party/python/gyp/test/errors/file_cycle0.gyp
new file mode 100644
index 0000000000..3bfafb6cb3
--- /dev/null
+++ b/third_party/python/gyp/test/errors/file_cycle0.gyp
@@ -0,0 +1,17 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'top',
+ 'type': 'none',
+ 'dependencies': [ 'file_cycle1.gyp:middle' ],
+ },
+ {
+ 'target_name': 'bottom',
+ 'type': 'none',
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/errors/file_cycle1.gyp b/third_party/python/gyp/test/errors/file_cycle1.gyp
new file mode 100644
index 0000000000..fbd7a0d167
--- /dev/null
+++ b/third_party/python/gyp/test/errors/file_cycle1.gyp
@@ -0,0 +1,13 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'middle',
+ 'type': 'none',
+ 'dependencies': [ 'file_cycle0.gyp:bottom' ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/errors/gyptest-errors.py b/third_party/python/gyp/test/errors/gyptest-errors.py
new file mode 100755
index 0000000000..0296f800f5
--- /dev/null
+++ b/third_party/python/gyp/test/errors/gyptest-errors.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test that two targets with the same name generates an error.
+"""
+
+import os
+import sys
+
+import TestGyp
+import TestCmd
+
+# TODO(sbc): Remove the use of match_re below, done because scons
+# error messages were not consistent with other generators.
+# Also remove input.py:generator_wants_absolute_build_file_paths.
+
+test = TestGyp.TestGyp()
+
+stderr = ('gyp: Duplicate target definitions for '
+ '.*duplicate_targets.gyp:foo#target\n')
+test.run_gyp('duplicate_targets.gyp', status=1, stderr=stderr,
+ match=TestCmd.match_re)
+
+stderr = ('.*: Unable to find targets in build file .*missing_targets.gyp.*')
+test.run_gyp('missing_targets.gyp', status=1, stderr=stderr,
+ match=TestCmd.match_re_dotall)
+
+stderr = ('gyp: rule bar exists in duplicate, target '
+ '.*duplicate_rule.gyp:foo#target\n')
+test.run_gyp('duplicate_rule.gyp', status=1, stderr=stderr,
+ match=TestCmd.match_re)
+
+stderr = ("gyp: Key 'targets' repeated at level 1 with key path '' while "
+ "reading .*duplicate_node.gyp.*")
+test.run_gyp('duplicate_node.gyp', '--check', status=1, stderr=stderr,
+ match=TestCmd.match_re_dotall)
+
+stderr = (".*target0.*target1.*target2.*target0.*")
+test.run_gyp('dependency_cycle.gyp', status=1, stderr=stderr,
+ match=TestCmd.match_re_dotall)
+
+stderr = (".*file_cycle0.*file_cycle1.*file_cycle0.*")
+test.run_gyp('file_cycle0.gyp', status=1, stderr=stderr,
+ match=TestCmd.match_re_dotall)
+
+stderr = 'gyp: Duplicate basenames in sources section, see list above\n'
+test.run_gyp('duplicate_basenames.gyp', status=1, stderr=stderr)
+
+# Check if '--no-duplicate-basename-check' works.
+if ((test.format == 'make' and sys.platform == 'darwin') or
+ (test.format == 'msvs' and
+ int(os.environ.get('GYP_MSVS_VERSION', 2010)) < 2010)):
+ stderr = 'gyp: Duplicate basenames in sources section, see list above\n'
+ test.run_gyp('duplicate_basenames.gyp', '--no-duplicate-basename-check',
+ status=1, stderr=stderr)
+else:
+ test.run_gyp('duplicate_basenames.gyp', '--no-duplicate-basename-check')
+
+stderr = ("gyp: Dependency '.*missing_dep.gyp:missing.gyp#target' not found "
+ "while trying to load target .*missing_dep.gyp:foo#target\n")
+test.run_gyp('missing_dep.gyp', status=1, stderr=stderr,
+ match=TestCmd.match_re)
+
+# Make sure invalid <!() command invocations say what command it was and
+# mention the gyp file name. Use a "random" command name to trigger an ENOENT.
+stderr = (".*invalid-command-name-egtyevNif3.*netDurj9.*missing_command.gyp.*")
+test.run_gyp('missing_command.gyp', status=1, stderr=stderr,
+ match=TestCmd.match_re_dotall)
+
+# Make sure <!() commands that error out result in a message that mentions
+# the command and gyp file name
+stderr = (".*python.*-c.*import sys.*sys.exit.*3.*error_command.gyp.*")
+test.run_gyp('error_command.gyp', status=1, stderr=stderr,
+ match=TestCmd.match_re_dotall)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/errors/missing_command.gyp b/third_party/python/gyp/test/errors/missing_command.gyp
new file mode 100644
index 0000000000..c93d9542c6
--- /dev/null
+++ b/third_party/python/gyp/test/errors/missing_command.gyp
@@ -0,0 +1,12 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'foo',
+ 'type': '<!(["invalid-command-name-egtyevNif3", "netDurj9"])',
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/errors/missing_dep.gyp b/third_party/python/gyp/test/errors/missing_dep.gyp
new file mode 100644
index 0000000000..08746be3d7
--- /dev/null
+++ b/third_party/python/gyp/test/errors/missing_dep.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'foo',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'missing.gyp'
+ ]
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/errors/missing_targets.gyp b/third_party/python/gyp/test/errors/missing_targets.gyp
new file mode 100644
index 0000000000..13d4f924c1
--- /dev/null
+++ b/third_party/python/gyp/test/errors/missing_targets.gyp
@@ -0,0 +1,8 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ },
+}
diff --git a/third_party/python/gyp/test/escaping/colon/test.gyp b/third_party/python/gyp/test/escaping/colon/test.gyp
new file mode 100644
index 0000000000..715f95490e
--- /dev/null
+++ b/third_party/python/gyp/test/escaping/colon/test.gyp
@@ -0,0 +1,21 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'colon',
+ 'type': 'executable',
+ 'sources': [
+ 'a:b.c',
+ ],
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/',
+ # MSVS2008 gets confused if the same file is in 'sources' and 'copies'
+ 'files': [ 'a:b.c-d', ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/escaping/gyptest-colon.py b/third_party/python/gyp/test/escaping/gyptest-colon.py
new file mode 100644
index 0000000000..f62f8dc65e
--- /dev/null
+++ b/third_party/python/gyp/test/escaping/gyptest-colon.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Tests that filenames that contain colons are handled correctly.
+(This is important for absolute paths on Windows.)
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+
+if sys.platform == 'win32':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+
+import TestGyp
+
+# TODO: Make colons in filenames work with make, if required.
+test = TestGyp.TestGyp(formats=['!make'])
+CHDIR = 'colon'
+
+source_name = 'colon/a:b.c'
+copies_name = 'colon/a:b.c-d'
+if sys.platform == 'win32':
+ # Windows uses : as drive separator and doesn't allow it in regular filenames.
+ # Use abspath() to create a path that contains a colon instead.
+ abs_source = os.path.abspath('colon/file.c')
+ test.write('colon/test.gyp',
+ test.read('colon/test.gyp').replace("'a:b.c'", repr(abs_source)))
+ source_name = abs_source
+
+ abs_copies = os.path.abspath('colon/file.txt')
+ test.write('colon/test.gyp',
+ test.read('colon/test.gyp').replace("'a:b.c-d'", repr(abs_copies)))
+ copies_name = abs_copies
+
+# Create the file dynamically, Windows is unhappy if a file with a colon in
+# its name is checked in.
+test.write(source_name, 'int main() {}')
+test.write(copies_name, 'foo')
+
+test.run_gyp('test.gyp', chdir=CHDIR)
+test.build('test.gyp', test.ALL, chdir=CHDIR)
+test.built_file_must_exist(os.path.basename(copies_name), chdir=CHDIR)
+test.pass_test()
diff --git a/third_party/python/gyp/test/exclusion/exclusion.gyp b/third_party/python/gyp/test/exclusion/exclusion.gyp
new file mode 100644
index 0000000000..1232dabaef
--- /dev/null
+++ b/third_party/python/gyp/test/exclusion/exclusion.gyp
@@ -0,0 +1,23 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ 'bogus.c',
+ 'also/not/real.c',
+ 'also/not/real2.c',
+ ],
+ 'sources!': [
+ 'bogus.c',
+ 'also/not/real.c',
+ 'also/not/real2.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/exclusion/gyptest-exclusion.py b/third_party/python/gyp/test/exclusion/gyptest-exclusion.py
new file mode 100755
index 0000000000..1fc32bf871
--- /dev/null
+++ b/third_party/python/gyp/test/exclusion/gyptest-exclusion.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that exclusions (e.g. sources!) are respected. Excluded sources
+that do not exist should not prevent the build from succeeding.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('exclusion.gyp')
+test.build('exclusion.gyp')
+
+# executables
+test.built_file_must_exist('hello' + test._exe, test.EXECUTABLE, bare=True)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/exclusion/hello.c b/third_party/python/gyp/test/exclusion/hello.c
new file mode 100644
index 0000000000..6e7dc8e419
--- /dev/null
+++ b/third_party/python/gyp/test/exclusion/hello.c
@@ -0,0 +1,15 @@
+/* Copyright (c) 2010 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int func1(void) {
+ return 42;
+}
+
+int main(void) {
+ printf("Hello, world!\n");
+ printf("%d\n", func1());
+ return 0;
+}
diff --git a/third_party/python/gyp/test/external-cross-compile/gyptest-cross.py b/third_party/python/gyp/test/external-cross-compile/gyptest-cross.py
new file mode 100755
index 0000000000..a837ec57dc
--- /dev/null
+++ b/third_party/python/gyp/test/external-cross-compile/gyptest-cross.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that actions can be + a source scanner can be used to implement,
+cross-compiles (for Native Client at this point).
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('cross.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('cross.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+From test1.cc
+From test2.c
+From test3.cc
+From test4.c
+"""
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/external-cross-compile/src/bogus1.cc b/third_party/python/gyp/test/external-cross-compile/src/bogus1.cc
new file mode 100644
index 0000000000..1b8d01199b
--- /dev/null
+++ b/third_party/python/gyp/test/external-cross-compile/src/bogus1.cc
@@ -0,0 +1 @@
+From bogus1.cc
diff --git a/third_party/python/gyp/test/external-cross-compile/src/bogus2.c b/third_party/python/gyp/test/external-cross-compile/src/bogus2.c
new file mode 100644
index 0000000000..cbf4a123c4
--- /dev/null
+++ b/third_party/python/gyp/test/external-cross-compile/src/bogus2.c
@@ -0,0 +1 @@
+From bogus2.c
diff --git a/third_party/python/gyp/test/external-cross-compile/src/cross.gyp b/third_party/python/gyp/test/external-cross-compile/src/cross.gyp
new file mode 100644
index 0000000000..aeda76b5bd
--- /dev/null
+++ b/third_party/python/gyp/test/external-cross-compile/src/cross.gyp
@@ -0,0 +1,83 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': ['cross_compile.gypi'],
+ 'target_defaults': {
+ 'variables': {
+ 'nix_lame%': 0,
+ },
+ 'target_conditions': [
+ ['nix_lame==1', {
+ 'sources/': [
+ ['exclude', 'lame'],
+ ],
+ }],
+ ],
+ },
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'dependencies': [
+ 'program_inc',
+ ],
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)',
+ ],
+ 'sources': [
+ 'program.cc',
+ ],
+ },
+ {
+ 'target_name': 'program_inc',
+ 'type': 'none',
+ 'dependencies': ['cross_program'],
+ 'actions': [
+ {
+ 'action_name': 'program_inc',
+ 'inputs': ['<(SHARED_INTERMEDIATE_DIR)/cross_program.fake'],
+ 'outputs': ['<(SHARED_INTERMEDIATE_DIR)/cross_program.h'],
+ 'action': ['python', 'tochar.py', '<@(_inputs)', '<@(_outputs)'],
+ },
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'target_name': 'cross_program',
+ 'type': 'none',
+ 'variables': {
+ 'cross': 1,
+ 'nix_lame': 1,
+ },
+ 'dependencies': ['cross_lib'],
+ 'sources': [
+ 'test1.cc',
+ 'test2.c',
+ 'very_lame.cc',
+ '<(SHARED_INTERMEDIATE_DIR)/cross_lib.fake',
+ ],
+ },
+ {
+ 'target_name': 'cross_lib',
+ 'type': 'none',
+ 'variables': {
+ 'cross': 1,
+ 'nix_lame': 1,
+ },
+ 'sources': [
+ 'test3.cc',
+ 'test4.c',
+ 'bogus1.cc',
+ 'bogus2.c',
+ 'sort_of_lame.cc',
+ ],
+ 'sources!': [
+ 'bogus1.cc',
+ 'bogus2.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/external-cross-compile/src/cross_compile.gypi b/third_party/python/gyp/test/external-cross-compile/src/cross_compile.gypi
new file mode 100644
index 0000000000..36e651903f
--- /dev/null
+++ b/third_party/python/gyp/test/external-cross-compile/src/cross_compile.gypi
@@ -0,0 +1,23 @@
+{
+ 'target_defaults': {
+ 'variables': {
+ 'cross%': 0,
+ },
+ 'target_conditions': [
+ ['cross==1', {
+ 'actions': [
+ {
+ 'action_name': 'cross compile >(_target_name)',
+ 'inputs': ['^@(_sources)'],
+ 'outputs': ['<(SHARED_INTERMEDIATE_DIR)/>(_target_name).fake'],
+ 'action': [
+ 'python', 'fake_cross.py', '>@(_outputs)', '^@(_sources)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ }],
+ ],
+ },
+}
diff --git a/third_party/python/gyp/test/external-cross-compile/src/fake_cross.py b/third_party/python/gyp/test/external-cross-compile/src/fake_cross.py
new file mode 100644
index 0000000000..05eacc6a63
--- /dev/null
+++ b/third_party/python/gyp/test/external-cross-compile/src/fake_cross.py
@@ -0,0 +1,18 @@
+#!/usr/bin/python
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+fh = open(sys.argv[1], 'w')
+
+filenames = sys.argv[2:]
+
+for filename in filenames:
+ subfile = open(filename)
+ data = subfile.read()
+ subfile.close()
+ fh.write(data)
+
+fh.close()
diff --git a/third_party/python/gyp/test/external-cross-compile/src/program.cc b/third_party/python/gyp/test/external-cross-compile/src/program.cc
new file mode 100644
index 0000000000..5172ae90fe
--- /dev/null
+++ b/third_party/python/gyp/test/external-cross-compile/src/program.cc
@@ -0,0 +1,16 @@
+/*
+ * Copyright (c) 2012 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <stdio.h>
+
+static char data[] = {
+#include "cross_program.h"
+};
+
+int main(void) {
+ fwrite(data, 1, sizeof(data), stdout);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/external-cross-compile/src/test1.cc b/third_party/python/gyp/test/external-cross-compile/src/test1.cc
new file mode 100644
index 0000000000..b584c31d15
--- /dev/null
+++ b/third_party/python/gyp/test/external-cross-compile/src/test1.cc
@@ -0,0 +1 @@
+From test1.cc
diff --git a/third_party/python/gyp/test/external-cross-compile/src/test2.c b/third_party/python/gyp/test/external-cross-compile/src/test2.c
new file mode 100644
index 0000000000..367ae19ea0
--- /dev/null
+++ b/third_party/python/gyp/test/external-cross-compile/src/test2.c
@@ -0,0 +1 @@
+From test2.c
diff --git a/third_party/python/gyp/test/external-cross-compile/src/test3.cc b/third_party/python/gyp/test/external-cross-compile/src/test3.cc
new file mode 100644
index 0000000000..9eb64735b8
--- /dev/null
+++ b/third_party/python/gyp/test/external-cross-compile/src/test3.cc
@@ -0,0 +1 @@
+From test3.cc
diff --git a/third_party/python/gyp/test/external-cross-compile/src/test4.c b/third_party/python/gyp/test/external-cross-compile/src/test4.c
new file mode 100644
index 0000000000..8ecc33ec16
--- /dev/null
+++ b/third_party/python/gyp/test/external-cross-compile/src/test4.c
@@ -0,0 +1 @@
+From test4.c
diff --git a/third_party/python/gyp/test/external-cross-compile/src/tochar.py b/third_party/python/gyp/test/external-cross-compile/src/tochar.py
new file mode 100644
index 0000000000..c0780d984f
--- /dev/null
+++ b/third_party/python/gyp/test/external-cross-compile/src/tochar.py
@@ -0,0 +1,13 @@
+#!/usr/bin/python
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+src = open(sys.argv[1])
+dst = open(sys.argv[2], 'w')
+for ch in src.read():
+ dst.write('%d,\n' % ord(ch))
+src.close()
+dst.close()
diff --git a/third_party/python/gyp/test/generator-output/actions/actions.gyp b/third_party/python/gyp/test/generator-output/actions/actions.gyp
new file mode 100644
index 0000000000..dded59aff3
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/actions/actions.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'pull_in_all_actions',
+ 'type': 'none',
+ 'dependencies': [
+ 'subdir1/executable.gyp:*',
+ 'subdir2/none.gyp:*',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/generator-output/actions/build/README.txt b/third_party/python/gyp/test/generator-output/actions/build/README.txt
new file mode 100644
index 0000000000..1b052c9a24
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/actions/build/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/third_party/python/gyp/test/generator-output/actions/subdir1/actions-out/README.txt b/third_party/python/gyp/test/generator-output/actions/subdir1/actions-out/README.txt
new file mode 100644
index 0000000000..1b052c9a24
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/actions/subdir1/actions-out/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/third_party/python/gyp/test/generator-output/actions/subdir1/build/README.txt b/third_party/python/gyp/test/generator-output/actions/subdir1/build/README.txt
new file mode 100644
index 0000000000..1b052c9a24
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/actions/subdir1/build/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/third_party/python/gyp/test/generator-output/actions/subdir1/executable.gyp b/third_party/python/gyp/test/generator-output/actions/subdir1/executable.gyp
new file mode 100644
index 0000000000..6bdd60a1fb
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/actions/subdir1/executable.gyp
@@ -0,0 +1,44 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'program.c',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'make-prog1',
+ 'inputs': [
+ 'make-prog1.py',
+ ],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/prog1.c',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ {
+ 'action_name': 'make-prog2',
+ 'inputs': [
+ 'make-prog2.py',
+ ],
+ 'outputs': [
+ 'actions-out/prog2.c',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/generator-output/actions/subdir1/make-prog1.py b/third_party/python/gyp/test/generator-output/actions/subdir1/make-prog1.py
new file mode 100755
index 0000000000..7ea1d8a2d4
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/actions/subdir1/make-prog1.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = r"""
+#include <stdio.h>
+
+void prog1(void)
+{
+ printf("Hello from make-prog1.py\n");
+}
+"""
+
+open(sys.argv[1], 'w').write(contents)
+
+sys.exit(0)
diff --git a/third_party/python/gyp/test/generator-output/actions/subdir1/make-prog2.py b/third_party/python/gyp/test/generator-output/actions/subdir1/make-prog2.py
new file mode 100755
index 0000000000..0bfe4973c2
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/actions/subdir1/make-prog2.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = r"""
+#include <stdio.h>
+
+void prog2(void)
+{
+ printf("Hello from make-prog2.py\n");
+}
+"""
+
+open(sys.argv[1], 'w').write(contents)
+
+sys.exit(0)
diff --git a/third_party/python/gyp/test/generator-output/actions/subdir1/program.c b/third_party/python/gyp/test/generator-output/actions/subdir1/program.c
new file mode 100644
index 0000000000..c0931534eb
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/actions/subdir1/program.c
@@ -0,0 +1,12 @@
+#include <stdio.h>
+
+extern void prog1(void);
+extern void prog2(void);
+
+int main(void)
+{
+ printf("Hello from program.c\n");
+ prog1();
+ prog2();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/generator-output/actions/subdir2/actions-out/README.txt b/third_party/python/gyp/test/generator-output/actions/subdir2/actions-out/README.txt
new file mode 100644
index 0000000000..1b052c9a24
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/actions/subdir2/actions-out/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/third_party/python/gyp/test/generator-output/actions/subdir2/build/README.txt b/third_party/python/gyp/test/generator-output/actions/subdir2/build/README.txt
new file mode 100644
index 0000000000..1b052c9a24
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/actions/subdir2/build/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/third_party/python/gyp/test/generator-output/actions/subdir2/make-file.py b/third_party/python/gyp/test/generator-output/actions/subdir2/make-file.py
new file mode 100755
index 0000000000..088a05e0b0
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/actions/subdir2/make-file.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = "Hello from make-file.py\n"
+
+open(sys.argv[1], 'w').write(contents)
diff --git a/third_party/python/gyp/test/generator-output/actions/subdir2/none.gyp b/third_party/python/gyp/test/generator-output/actions/subdir2/none.gyp
new file mode 100644
index 0000000000..f98f52753d
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/actions/subdir2/none.gyp
@@ -0,0 +1,31 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'file',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'actions': [
+ {
+ 'action_name': 'make-file',
+ 'inputs': [
+ 'make-file.py',
+ ],
+ 'outputs': [
+ 'actions-out/file.out',
+ # TODO: enhance testing infrastructure to test this
+ # without having to hard-code the intermediate dir paths.
+ #'<(INTERMEDIATE_DIR)/file.out',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ }
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/generator-output/copies/build/README.txt b/third_party/python/gyp/test/generator-output/copies/build/README.txt
new file mode 100644
index 0000000000..90ef886193
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/copies/build/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/third_party/python/gyp/test/generator-output/copies/copies-out/README.txt b/third_party/python/gyp/test/generator-output/copies/copies-out/README.txt
new file mode 100644
index 0000000000..90ef886193
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/copies/copies-out/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/third_party/python/gyp/test/generator-output/copies/copies.gyp b/third_party/python/gyp/test/generator-output/copies/copies.gyp
new file mode 100644
index 0000000000..479a3d9b6e
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/copies/copies.gyp
@@ -0,0 +1,50 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'pull_in_subdir',
+ 'type': 'none',
+ 'dependencies': [
+ 'subdir/subdir.gyp:*',
+ ],
+ },
+ {
+ 'target_name': 'copies1',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': 'copies-out',
+ 'files': [
+ 'file1',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'copies2',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-out',
+ 'files': [
+ 'file2',
+ ],
+ },
+ ],
+ },
+ # Verify that a null 'files' list doesn't gag the generators.
+ {
+ 'target_name': 'copies_null',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-null',
+ 'files': [],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/generator-output/copies/file1 b/third_party/python/gyp/test/generator-output/copies/file1
new file mode 100644
index 0000000000..84d55c5759
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/copies/file1
@@ -0,0 +1 @@
+file1 contents
diff --git a/third_party/python/gyp/test/generator-output/copies/file2 b/third_party/python/gyp/test/generator-output/copies/file2
new file mode 100644
index 0000000000..af1b8ae35d
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/copies/file2
@@ -0,0 +1 @@
+file2 contents
diff --git a/third_party/python/gyp/test/generator-output/copies/subdir/build/README.txt b/third_party/python/gyp/test/generator-output/copies/subdir/build/README.txt
new file mode 100644
index 0000000000..90ef886193
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/copies/subdir/build/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/third_party/python/gyp/test/generator-output/copies/subdir/copies-out/README.txt b/third_party/python/gyp/test/generator-output/copies/subdir/copies-out/README.txt
new file mode 100644
index 0000000000..90ef886193
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/copies/subdir/copies-out/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/third_party/python/gyp/test/generator-output/copies/subdir/file3 b/third_party/python/gyp/test/generator-output/copies/subdir/file3
new file mode 100644
index 0000000000..43f16f3522
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/copies/subdir/file3
@@ -0,0 +1 @@
+file3 contents
diff --git a/third_party/python/gyp/test/generator-output/copies/subdir/file4 b/third_party/python/gyp/test/generator-output/copies/subdir/file4
new file mode 100644
index 0000000000..5f7270a084
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/copies/subdir/file4
@@ -0,0 +1 @@
+file4 contents
diff --git a/third_party/python/gyp/test/generator-output/copies/subdir/subdir.gyp b/third_party/python/gyp/test/generator-output/copies/subdir/subdir.gyp
new file mode 100644
index 0000000000..af031d283a
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/copies/subdir/subdir.gyp
@@ -0,0 +1,32 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'copies3',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': 'copies-out',
+ 'files': [
+ 'file3',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'copies4',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/copies-out',
+ 'files': [
+ 'file4',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/generator-output/gyptest-actions.py b/third_party/python/gyp/test/generator-output/gyptest-actions.py
new file mode 100755
index 0000000000..47121d0770
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/gyptest-actions.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies --generator-output= behavior when using actions.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+# All the generated files should go under 'gypfiles'. The source directory
+# ('actions') should be untouched.
+test.writable(test.workpath('actions'), False)
+test.run_gyp('actions.gyp',
+ '--generator-output=' + test.workpath('gypfiles'),
+ chdir='actions')
+
+test.writable(test.workpath('actions'), True)
+
+test.relocate('actions', 'relocate/actions')
+test.relocate('gypfiles', 'relocate/gypfiles')
+
+test.writable(test.workpath('relocate/actions'), False)
+
+# Some of the action outputs use "pure" relative paths (i.e. without prefixes
+# like <(INTERMEDIATE_DIR) or <(PROGRAM_DIR)). Even though we are building under
+# 'gypfiles', such outputs will still be created relative to the original .gyp
+# sources. Projects probably wouldn't normally do this, since it kind of defeats
+# the purpose of '--generator-output', but it is supported behaviour.
+test.writable(test.workpath('relocate/actions/build'), True)
+test.writable(test.workpath('relocate/actions/subdir1/build'), True)
+test.writable(test.workpath('relocate/actions/subdir1/actions-out'), True)
+test.writable(test.workpath('relocate/actions/subdir2/build'), True)
+test.writable(test.workpath('relocate/actions/subdir2/actions-out'), True)
+
+test.build('actions.gyp', test.ALL, chdir='relocate/gypfiles')
+
+expect = """\
+Hello from program.c
+Hello from make-prog1.py
+Hello from make-prog2.py
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/actions/subdir1'
+else:
+ chdir = 'relocate/gypfiles'
+test.run_built_executable('program', chdir=chdir, stdout=expect)
+
+test.must_match('relocate/actions/subdir2/actions-out/file.out',
+ "Hello from make-file.py\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/generator-output/gyptest-copies.py b/third_party/python/gyp/test/generator-output/gyptest-copies.py
new file mode 100755
index 0000000000..262dfc30fa
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/gyptest-copies.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies file copies with --generator-output using an explicit build
+target of 'all'.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.writable(test.workpath('copies'), False)
+
+test.run_gyp('copies.gyp',
+ '--generator-output=' + test.workpath('gypfiles'),
+ '-G', 'xcode_ninja_target_pattern=^(?!copies_null)',
+ chdir='copies')
+
+test.writable(test.workpath('copies'), True)
+
+test.relocate('copies', 'relocate/copies')
+test.relocate('gypfiles', 'relocate/gypfiles')
+
+test.writable(test.workpath('relocate/copies'), False)
+
+test.writable(test.workpath('relocate/copies/build'), True)
+test.writable(test.workpath('relocate/copies/copies-out'), True)
+test.writable(test.workpath('relocate/copies/subdir/build'), True)
+test.writable(test.workpath('relocate/copies/subdir/copies-out'), True)
+
+test.build('copies.gyp', test.ALL, chdir='relocate/gypfiles')
+
+test.must_match(['relocate', 'copies', 'copies-out', 'file1'],
+ "file1 contents\n")
+
+if test.format == 'xcode':
+ chdir = 'relocate/copies/build'
+elif test.format in ['make', 'ninja', 'xcode-ninja', 'cmake']:
+ chdir = 'relocate/gypfiles/out'
+else:
+ chdir = 'relocate/gypfiles'
+test.must_match([chdir, 'Default', 'copies-out', 'file2'], "file2 contents\n")
+
+test.must_match(['relocate', 'copies', 'subdir', 'copies-out', 'file3'],
+ "file3 contents\n")
+
+if test.format == 'xcode':
+ chdir = 'relocate/copies/subdir/build'
+elif test.format in ['make', 'ninja', 'xcode-ninja', 'cmake']:
+ chdir = 'relocate/gypfiles/out'
+else:
+ chdir = 'relocate/gypfiles'
+test.must_match([chdir, 'Default', 'copies-out', 'file4'], "file4 contents\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/generator-output/gyptest-depth.py b/third_party/python/gyp/test/generator-output/gyptest-depth.py
new file mode 100755
index 0000000000..ee59a11f04
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/gyptest-depth.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a project hierarchy created when the --generator-output=
+and --depth= options is used to put the build configuration files in a separate
+directory tree.
+"""
+
+import TestGyp
+import os
+
+# This is a regression test for the make generator only.
+test = TestGyp.TestGyp(formats=['make'])
+
+test.writable(test.workpath('src'), False)
+
+toplevel_dir = os.path.basename(test.workpath())
+
+test.run_gyp(os.path.join(toplevel_dir, 'src', 'prog1.gyp'),
+ '-Dset_symroot=1',
+ '--generator-output=gypfiles',
+ depth=toplevel_dir,
+ chdir='..')
+
+test.writable(test.workpath('src/build'), True)
+test.writable(test.workpath('src/subdir2/build'), True)
+test.writable(test.workpath('src/subdir3/build'), True)
+
+test.build('prog1.gyp', test.ALL, chdir='gypfiles')
+
+chdir = 'gypfiles'
+
+expect = """\
+Hello from %s
+Hello from inc.h
+Hello from inc1/include1.h
+Hello from inc2/include2.h
+Hello from inc3/include3.h
+Hello from subdir2/deeper/deeper.h
+"""
+
+if test.format == 'xcode':
+ chdir = 'src'
+test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
+
+if test.format == 'xcode':
+ chdir = 'src/subdir2'
+test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
+
+if test.format == 'xcode':
+ chdir = 'src/subdir3'
+test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/generator-output/gyptest-mac-bundle.py b/third_party/python/gyp/test/generator-output/gyptest-mac-bundle.py
new file mode 100644
index 0000000000..14597d8de2
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/gyptest-mac-bundle.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies mac bundles work with --generator-output.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=[])
+
+ MAC_BUNDLE_DIR = 'mac-bundle'
+ GYPFILES_DIR = 'gypfiles'
+ test.writable(test.workpath(MAC_BUNDLE_DIR), False)
+ test.run_gyp('test.gyp',
+ '--generator-output=' + test.workpath(GYPFILES_DIR),
+ chdir=MAC_BUNDLE_DIR)
+ test.writable(test.workpath(MAC_BUNDLE_DIR), True)
+
+ test.build('test.gyp', test.ALL, chdir=GYPFILES_DIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/generator-output/gyptest-relocate.py b/third_party/python/gyp/test/generator-output/gyptest-relocate.py
new file mode 100755
index 0000000000..b867a6cffb
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/gyptest-relocate.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a project hierarchy created with the --generator-output=
+option can be built even when it's relocated to a different path.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.writable(test.workpath('src'), False)
+
+test.run_gyp('prog1.gyp',
+ '-Dset_symroot=1',
+ '--generator-output=' + test.workpath('gypfiles'),
+ chdir='src')
+
+test.writable(test.workpath('src'), True)
+
+test.relocate('src', 'relocate/src')
+test.relocate('gypfiles', 'relocate/gypfiles')
+
+test.writable(test.workpath('relocate/src'), False)
+
+test.writable(test.workpath('relocate/src/build'), True)
+test.writable(test.workpath('relocate/src/subdir2/build'), True)
+test.writable(test.workpath('relocate/src/subdir3/build'), True)
+
+test.build('prog1.gyp', test.ALL, chdir='relocate/gypfiles')
+
+chdir = 'relocate/gypfiles'
+
+expect = """\
+Hello from %s
+Hello from inc.h
+Hello from inc1/include1.h
+Hello from inc2/include2.h
+Hello from inc3/include3.h
+Hello from subdir2/deeper/deeper.h
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/src'
+test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir2'
+test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir3'
+test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/generator-output/gyptest-rules.py b/third_party/python/gyp/test/generator-output/gyptest-rules.py
new file mode 100755
index 0000000000..a3ff8bd858
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/gyptest-rules.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies --generator-output= behavior when using rules.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.writable(test.workpath('rules'), False)
+
+test.run_gyp('rules.gyp',
+ '--generator-output=' + test.workpath('gypfiles'),
+ chdir='rules')
+
+test.writable(test.workpath('rules'), True)
+
+test.relocate('rules', 'relocate/rules')
+test.relocate('gypfiles', 'relocate/gypfiles')
+
+test.writable(test.workpath('relocate/rules'), False)
+
+test.writable(test.workpath('relocate/rules/build'), True)
+test.writable(test.workpath('relocate/rules/subdir1/build'), True)
+test.writable(test.workpath('relocate/rules/subdir2/build'), True)
+test.writable(test.workpath('relocate/rules/subdir2/rules-out'), True)
+
+test.build('rules.gyp', test.ALL, chdir='relocate/gypfiles')
+
+expect = """\
+Hello from program.c
+Hello from function1.in1
+Hello from function2.in1
+Hello from define3.in0
+Hello from define4.in0
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/rules/subdir1'
+else:
+ chdir = 'relocate/gypfiles'
+test.run_built_executable('program', chdir=chdir, stdout=expect)
+
+test.must_match('relocate/rules/subdir2/rules-out/file1.out',
+ "Hello from file1.in0\n")
+test.must_match('relocate/rules/subdir2/rules-out/file2.out',
+ "Hello from file2.in0\n")
+test.must_match('relocate/rules/subdir2/rules-out/file3.out',
+ "Hello from file3.in1\n")
+test.must_match('relocate/rules/subdir2/rules-out/file4.out',
+ "Hello from file4.in1\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/generator-output/gyptest-subdir2-deep.py b/third_party/python/gyp/test/generator-output/gyptest-subdir2-deep.py
new file mode 100755
index 0000000000..ec7862ddd9
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/gyptest-subdir2-deep.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a target from a .gyp file a few subdirectories
+deep when the --generator-output= option is used to put the build
+configuration files in a separate directory tree.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.writable(test.workpath('src'), False)
+
+test.writable(test.workpath('src/subdir2/deeper/build'), True)
+
+test.run_gyp('deeper.gyp',
+ '-Dset_symroot=1',
+ '--generator-output=' + test.workpath('gypfiles'),
+ chdir='src/subdir2/deeper')
+
+test.build('deeper.gyp', test.ALL, chdir='gypfiles')
+
+chdir = 'gypfiles'
+
+if test.format == 'xcode':
+ chdir = 'src/subdir2/deeper'
+test.run_built_executable('deeper',
+ chdir=chdir,
+ stdout="Hello from deeper.c\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/generator-output/gyptest-symlink.py b/third_party/python/gyp/test/generator-output/gyptest-symlink.py
new file mode 100755
index 0000000000..d7fe05830f
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/gyptest-symlink.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a target when the --generator-output= option is used to put
+the build configuration files in a separate directory tree referenced by a
+symlink.
+"""
+
+import TestGyp
+import os
+import sys
+
+test = TestGyp.TestGyp()
+if not hasattr(os, 'symlink') or sys.platform == 'win32':
+ # Python3 on windows has symlink but it doesn't work reliably.
+ test.skip_test('Missing or bad os.symlink -- skipping test.\n')
+
+test.writable(test.workpath('src'), False)
+
+test.writable(test.workpath('src/subdir2/deeper/build'), True)
+
+test.subdir(test.workpath('build'))
+test.subdir(test.workpath('build/deeper'))
+test.symlink('build/deeper', test.workpath('symlink'))
+
+test.writable(test.workpath('build/deeper'), True)
+test.run_gyp('deeper.gyp',
+ '-Dset_symroot=2',
+ '--generator-output=' + test.workpath('symlink'),
+ chdir='src/subdir2/deeper')
+
+chdir = 'symlink'
+test.build('deeper.gyp', test.ALL, chdir=chdir)
+
+if test.format == 'xcode':
+ chdir = 'src/subdir2/deeper'
+test.run_built_executable('deeper',
+ chdir=chdir,
+ stdout="Hello from deeper.c\n")
+test.pass_test()
diff --git a/third_party/python/gyp/test/generator-output/gyptest-top-all.py b/third_party/python/gyp/test/generator-output/gyptest-top-all.py
new file mode 100755
index 0000000000..b1776776ea
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/gyptest-top-all.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a project hierarchy created when the --generator-output=
+option is used to put the build configuration files in a separate
+directory tree.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.writable(test.workpath('src'), False)
+
+test.run_gyp('prog1.gyp',
+ '-Dset_symroot=1',
+ '--generator-output=' + test.workpath('gypfiles'),
+ chdir='src')
+
+test.writable(test.workpath('src/build'), True)
+test.writable(test.workpath('src/subdir2/build'), True)
+test.writable(test.workpath('src/subdir3/build'), True)
+
+test.build('prog1.gyp', test.ALL, chdir='gypfiles')
+
+chdir = 'gypfiles'
+
+expect = """\
+Hello from %s
+Hello from inc.h
+Hello from inc1/include1.h
+Hello from inc2/include2.h
+Hello from inc3/include3.h
+Hello from subdir2/deeper/deeper.h
+"""
+
+if test.format == 'xcode':
+ chdir = 'src'
+test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
+
+if test.format == 'xcode':
+ chdir = 'src/subdir2'
+test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
+
+if test.format == 'xcode':
+ chdir = 'src/subdir3'
+test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/generator-output/mac-bundle/Info.plist b/third_party/python/gyp/test/generator-output/mac-bundle/Info.plist
new file mode 100644
index 0000000000..8cb142e9f5
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/mac-bundle/Info.plist
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIconFile</key>
+ <string></string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.${PRODUCT_NAME}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>ause</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>LSMinimumSystemVersion</key>
+ <string>${MACOSX_DEPLOYMENT_TARGET}</string>
+ <key>NSMainNibFile</key>
+ <string>MainMenu</string>
+ <key>NSPrincipalClass</key>
+ <string>NSApplication</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/generator-output/mac-bundle/app.order b/third_party/python/gyp/test/generator-output/mac-bundle/app.order
new file mode 100644
index 0000000000..4eb9e89d39
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/mac-bundle/app.order
@@ -0,0 +1 @@
+_main
diff --git a/third_party/python/gyp/test/generator-output/mac-bundle/header.h b/third_party/python/gyp/test/generator-output/mac-bundle/header.h
new file mode 100644
index 0000000000..7ed7775122
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/mac-bundle/header.h
@@ -0,0 +1 @@
+int f();
diff --git a/third_party/python/gyp/test/generator-output/mac-bundle/main.c b/third_party/python/gyp/test/generator-output/mac-bundle/main.c
new file mode 100644
index 0000000000..237c8ce181
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/mac-bundle/main.c
@@ -0,0 +1 @@
+int main() {}
diff --git a/third_party/python/gyp/test/generator-output/mac-bundle/resource.sb b/third_party/python/gyp/test/generator-output/mac-bundle/resource.sb
new file mode 100644
index 0000000000..731befc457
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/mac-bundle/resource.sb
@@ -0,0 +1 @@
+A text file.
diff --git a/third_party/python/gyp/test/generator-output/mac-bundle/test.gyp b/third_party/python/gyp/test/generator-output/mac-bundle/test.gyp
new file mode 100644
index 0000000000..35ac674f6d
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/mac-bundle/test.gyp
@@ -0,0 +1,25 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'test_app',
+ 'product_name': 'Test App Gyp',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'main.c',
+ ],
+ 'mac_bundle_resources': [
+ 'resource.sb',
+ ],
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'Info.plist',
+ 'ORDER_FILE': 'app.order',
+ 'GCC_PREFIX_HEADER': 'header.h',
+ 'GCC_PRECOMPILE_PREFIX_HEADER': 'YES',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/generator-output/rules/build/README.txt b/third_party/python/gyp/test/generator-output/rules/build/README.txt
new file mode 100644
index 0000000000..1b052c9a24
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/rules/build/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/third_party/python/gyp/test/generator-output/rules/copy-file.py b/third_party/python/gyp/test/generator-output/rules/copy-file.py
new file mode 100755
index 0000000000..80c6749f93
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/rules/copy-file.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+contents = open(sys.argv[1], 'r').read()
+open(sys.argv[2], 'w').write(contents)
+
+sys.exit(0)
diff --git a/third_party/python/gyp/test/generator-output/rules/rules.gyp b/third_party/python/gyp/test/generator-output/rules/rules.gyp
new file mode 100644
index 0000000000..dded59aff3
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/rules/rules.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'pull_in_all_actions',
+ 'type': 'none',
+ 'dependencies': [
+ 'subdir1/executable.gyp:*',
+ 'subdir2/none.gyp:*',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/generator-output/rules/subdir1/build/README.txt b/third_party/python/gyp/test/generator-output/rules/subdir1/build/README.txt
new file mode 100644
index 0000000000..1b052c9a24
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/rules/subdir1/build/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/third_party/python/gyp/test/generator-output/rules/subdir1/define3.in0 b/third_party/python/gyp/test/generator-output/rules/subdir1/define3.in0
new file mode 100644
index 0000000000..cc29c643f3
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/rules/subdir1/define3.in0
@@ -0,0 +1 @@
+#define STRING3 "Hello from define3.in0\n"
diff --git a/third_party/python/gyp/test/generator-output/rules/subdir1/define4.in0 b/third_party/python/gyp/test/generator-output/rules/subdir1/define4.in0
new file mode 100644
index 0000000000..c9b0467b32
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/rules/subdir1/define4.in0
@@ -0,0 +1 @@
+#define STRING4 "Hello from define4.in0\n"
diff --git a/third_party/python/gyp/test/generator-output/rules/subdir1/executable.gyp b/third_party/python/gyp/test/generator-output/rules/subdir1/executable.gyp
new file mode 100644
index 0000000000..42bee4d746
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/rules/subdir1/executable.gyp
@@ -0,0 +1,59 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'program.c',
+ 'function1.in1',
+ 'function2.in1',
+ 'define3.in0',
+ 'define4.in0',
+ ],
+ 'include_dirs': [
+ '<(INTERMEDIATE_DIR)',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'copy_file_0',
+ 'extension': 'in0',
+ 'inputs': [
+ '../copy-file.py',
+ ],
+ 'outputs': [
+ # TODO: fix Make to support generated files not
+ # in a variable-named path like <(INTERMEDIATE_DIR)
+ #'<(RULE_INPUT_ROOT).c',
+ '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).h',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 0,
+ },
+ {
+ 'rule_name': 'copy_file_1',
+ 'extension': 'in1',
+ 'inputs': [
+ '../copy-file.py',
+ ],
+ 'outputs': [
+ # TODO: fix Make to support generated files not
+ # in a variable-named path like <(INTERMEDIATE_DIR)
+ #'<(RULE_INPUT_ROOT).c',
+ '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/generator-output/rules/subdir1/function1.in1 b/third_party/python/gyp/test/generator-output/rules/subdir1/function1.in1
new file mode 100644
index 0000000000..545e7ca16b
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/rules/subdir1/function1.in1
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void function1(void)
+{
+ printf("Hello from function1.in1\n");
+}
diff --git a/third_party/python/gyp/test/generator-output/rules/subdir1/function2.in1 b/third_party/python/gyp/test/generator-output/rules/subdir1/function2.in1
new file mode 100644
index 0000000000..6bad43f9cf
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/rules/subdir1/function2.in1
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void function2(void)
+{
+ printf("Hello from function2.in1\n");
+}
diff --git a/third_party/python/gyp/test/generator-output/rules/subdir1/program.c b/third_party/python/gyp/test/generator-output/rules/subdir1/program.c
new file mode 100644
index 0000000000..56b320632a
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/rules/subdir1/program.c
@@ -0,0 +1,18 @@
+#include <stdio.h>
+#include "define3.h"
+#include "define4.h"
+
+extern void function1(void);
+extern void function2(void);
+extern void function3(void);
+extern void function4(void);
+
+int main(void)
+{
+ printf("Hello from program.c\n");
+ function1();
+ function2();
+ printf("%s", STRING3);
+ printf("%s", STRING4);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/generator-output/rules/subdir2/build/README.txt b/third_party/python/gyp/test/generator-output/rules/subdir2/build/README.txt
new file mode 100644
index 0000000000..1b052c9a24
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/rules/subdir2/build/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/third_party/python/gyp/test/generator-output/rules/subdir2/file1.in0 b/third_party/python/gyp/test/generator-output/rules/subdir2/file1.in0
new file mode 100644
index 0000000000..7aca64f4ce
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/rules/subdir2/file1.in0
@@ -0,0 +1 @@
+Hello from file1.in0
diff --git a/third_party/python/gyp/test/generator-output/rules/subdir2/file2.in0 b/third_party/python/gyp/test/generator-output/rules/subdir2/file2.in0
new file mode 100644
index 0000000000..80a281a2a9
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/rules/subdir2/file2.in0
@@ -0,0 +1 @@
+Hello from file2.in0
diff --git a/third_party/python/gyp/test/generator-output/rules/subdir2/file3.in1 b/third_party/python/gyp/test/generator-output/rules/subdir2/file3.in1
new file mode 100644
index 0000000000..60ae2e7931
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/rules/subdir2/file3.in1
@@ -0,0 +1 @@
+Hello from file3.in1
diff --git a/third_party/python/gyp/test/generator-output/rules/subdir2/file4.in1 b/third_party/python/gyp/test/generator-output/rules/subdir2/file4.in1
new file mode 100644
index 0000000000..5a3c30720e
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/rules/subdir2/file4.in1
@@ -0,0 +1 @@
+Hello from file4.in1
diff --git a/third_party/python/gyp/test/generator-output/rules/subdir2/none.gyp b/third_party/python/gyp/test/generator-output/rules/subdir2/none.gyp
new file mode 100644
index 0000000000..664cbd9cb7
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/rules/subdir2/none.gyp
@@ -0,0 +1,49 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'files',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'file1.in0',
+ 'file2.in0',
+ 'file3.in1',
+ 'file4.in1',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'copy_file_0',
+ 'extension': 'in0',
+ 'inputs': [
+ '../copy-file.py',
+ ],
+ 'outputs': [
+ 'rules-out/<(RULE_INPUT_ROOT).out',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 0,
+ },
+ {
+ 'rule_name': 'copy_file_1',
+ 'extension': 'in1',
+ 'inputs': [
+ '../copy-file.py',
+ ],
+ 'outputs': [
+ 'rules-out/<(RULE_INPUT_ROOT).out',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/generator-output/rules/subdir2/rules-out/README.txt b/third_party/python/gyp/test/generator-output/rules/subdir2/rules-out/README.txt
new file mode 100644
index 0000000000..1b052c9a24
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/rules/subdir2/rules-out/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/third_party/python/gyp/test/generator-output/src/build/README.txt b/third_party/python/gyp/test/generator-output/src/build/README.txt
new file mode 100644
index 0000000000..90ef886193
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/build/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/third_party/python/gyp/test/generator-output/src/inc.h b/third_party/python/gyp/test/generator-output/src/inc.h
new file mode 100644
index 0000000000..57aa1a5a74
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/inc.h
@@ -0,0 +1 @@
+#define INC_STRING "inc.h"
diff --git a/third_party/python/gyp/test/generator-output/src/inc1/include1.h b/third_party/python/gyp/test/generator-output/src/inc1/include1.h
new file mode 100644
index 0000000000..1d59065fc9
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/inc1/include1.h
@@ -0,0 +1 @@
+#define INCLUDE1_STRING "inc1/include1.h"
diff --git a/third_party/python/gyp/test/generator-output/src/prog1.c b/third_party/python/gyp/test/generator-output/src/prog1.c
new file mode 100644
index 0000000000..bf7c2a17bd
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/prog1.c
@@ -0,0 +1,18 @@
+#include <stdio.h>
+
+#include "inc.h"
+#include "include1.h"
+#include "include2.h"
+#include "include3.h"
+#include "deeper.h"
+
+int main(void)
+{
+ printf("Hello from prog1.c\n");
+ printf("Hello from %s\n", INC_STRING);
+ printf("Hello from %s\n", INCLUDE1_STRING);
+ printf("Hello from %s\n", INCLUDE2_STRING);
+ printf("Hello from %s\n", INCLUDE3_STRING);
+ printf("Hello from %s\n", DEEPER_STRING);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/generator-output/src/prog1.gyp b/third_party/python/gyp/test/generator-output/src/prog1.gyp
new file mode 100644
index 0000000000..d50e6fb0a7
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/prog1.gyp
@@ -0,0 +1,28 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ 'symroot.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog1',
+ 'type': 'executable',
+ 'dependencies': [
+ 'subdir2/prog2.gyp:prog2',
+ ],
+ 'include_dirs': [
+ '.',
+ 'inc1',
+ 'subdir2/inc2',
+ 'subdir3/inc3',
+ 'subdir2/deeper',
+ ],
+ 'sources': [
+ 'prog1.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/generator-output/src/subdir2/build/README.txt b/third_party/python/gyp/test/generator-output/src/subdir2/build/README.txt
new file mode 100644
index 0000000000..90ef886193
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/subdir2/build/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/third_party/python/gyp/test/generator-output/src/subdir2/deeper/build/README.txt b/third_party/python/gyp/test/generator-output/src/subdir2/deeper/build/README.txt
new file mode 100644
index 0000000000..90ef886193
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/subdir2/deeper/build/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/third_party/python/gyp/test/generator-output/src/subdir2/deeper/deeper.c b/third_party/python/gyp/test/generator-output/src/subdir2/deeper/deeper.c
new file mode 100644
index 0000000000..843505cd11
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/subdir2/deeper/deeper.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello from deeper.c\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp b/third_party/python/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp
new file mode 100644
index 0000000000..8648770872
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../../symroot.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'deeper',
+ 'type': 'executable',
+ 'sources': [
+ 'deeper.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/generator-output/src/subdir2/deeper/deeper.h b/third_party/python/gyp/test/generator-output/src/subdir2/deeper/deeper.h
new file mode 100644
index 0000000000..f6484a0fe5
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/subdir2/deeper/deeper.h
@@ -0,0 +1 @@
+#define DEEPER_STRING "subdir2/deeper/deeper.h"
diff --git a/third_party/python/gyp/test/generator-output/src/subdir2/inc2/include2.h b/third_party/python/gyp/test/generator-output/src/subdir2/inc2/include2.h
new file mode 100644
index 0000000000..1ccfa5dea7
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/subdir2/inc2/include2.h
@@ -0,0 +1 @@
+#define INCLUDE2_STRING "inc2/include2.h"
diff --git a/third_party/python/gyp/test/generator-output/src/subdir2/prog2.c b/third_party/python/gyp/test/generator-output/src/subdir2/prog2.c
new file mode 100644
index 0000000000..d80d871984
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/subdir2/prog2.c
@@ -0,0 +1,18 @@
+#include <stdio.h>
+
+#include "inc.h"
+#include "include1.h"
+#include "include2.h"
+#include "include3.h"
+#include "deeper.h"
+
+int main(void)
+{
+ printf("Hello from prog2.c\n");
+ printf("Hello from %s\n", INC_STRING);
+ printf("Hello from %s\n", INCLUDE1_STRING);
+ printf("Hello from %s\n", INCLUDE2_STRING);
+ printf("Hello from %s\n", INCLUDE3_STRING);
+ printf("Hello from %s\n", DEEPER_STRING);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/generator-output/src/subdir2/prog2.gyp b/third_party/python/gyp/test/generator-output/src/subdir2/prog2.gyp
new file mode 100644
index 0000000000..7176ed8be7
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/subdir2/prog2.gyp
@@ -0,0 +1,28 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../symroot.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog2',
+ 'type': 'executable',
+ 'include_dirs': [
+ '..',
+ '../inc1',
+ 'inc2',
+ '../subdir3/inc3',
+ 'deeper',
+ ],
+ 'dependencies': [
+ '../subdir3/prog3.gyp:prog3',
+ ],
+ 'sources': [
+ 'prog2.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/generator-output/src/subdir3/build/README.txt b/third_party/python/gyp/test/generator-output/src/subdir3/build/README.txt
new file mode 100644
index 0000000000..90ef886193
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/subdir3/build/README.txt
@@ -0,0 +1,4 @@
+A place-holder for this Xcode build output directory, so that the
+test script can verify that .xcodeproj files are not created in
+their normal location by making the src/ read-only, and then
+selectively making this build directory writable.
diff --git a/third_party/python/gyp/test/generator-output/src/subdir3/inc3/include3.h b/third_party/python/gyp/test/generator-output/src/subdir3/inc3/include3.h
new file mode 100644
index 0000000000..bf53bf1f00
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/subdir3/inc3/include3.h
@@ -0,0 +1 @@
+#define INCLUDE3_STRING "inc3/include3.h"
diff --git a/third_party/python/gyp/test/generator-output/src/subdir3/prog3.c b/third_party/python/gyp/test/generator-output/src/subdir3/prog3.c
new file mode 100644
index 0000000000..c72233da19
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/subdir3/prog3.c
@@ -0,0 +1,18 @@
+#include <stdio.h>
+
+#include "inc.h"
+#include "include1.h"
+#include "include2.h"
+#include "include3.h"
+#include "deeper.h"
+
+int main(void)
+{
+ printf("Hello from prog3.c\n");
+ printf("Hello from %s\n", INC_STRING);
+ printf("Hello from %s\n", INCLUDE1_STRING);
+ printf("Hello from %s\n", INCLUDE2_STRING);
+ printf("Hello from %s\n", INCLUDE3_STRING);
+ printf("Hello from %s\n", DEEPER_STRING);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/generator-output/src/subdir3/prog3.gyp b/third_party/python/gyp/test/generator-output/src/subdir3/prog3.gyp
new file mode 100644
index 0000000000..46c5e000a2
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/subdir3/prog3.gyp
@@ -0,0 +1,25 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../symroot.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog3',
+ 'type': 'executable',
+ 'include_dirs': [
+ '..',
+ '../inc1',
+ '../subdir2/inc2',
+ 'inc3',
+ '../subdir2/deeper',
+ ],
+ 'sources': [
+ 'prog3.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/generator-output/src/symroot.gypi b/third_party/python/gyp/test/generator-output/src/symroot.gypi
new file mode 100644
index 0000000000..519916427c
--- /dev/null
+++ b/third_party/python/gyp/test/generator-output/src/symroot.gypi
@@ -0,0 +1,16 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'set_symroot%': 0,
+ },
+ 'conditions': [
+ ['set_symroot == 1', {
+ 'xcode_settings': {
+ 'SYMROOT': '<(DEPTH)/build',
+ },
+ }],
+ ],
+}
diff --git a/third_party/python/gyp/test/gyp-defines/defines.gyp b/third_party/python/gyp/test/gyp-defines/defines.gyp
new file mode 100644
index 0000000000..f59bbd20d2
--- /dev/null
+++ b/third_party/python/gyp/test/gyp-defines/defines.gyp
@@ -0,0 +1,26 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_target',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'test_action',
+ 'inputs': [],
+ 'outputs': [ 'action.txt' ],
+ 'action': [
+ 'python',
+ 'echo.py',
+ '<(key)',
+ '<(_outputs)',
+ ],
+ 'msvs_cygwin_shell': 0,
+ }
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/gyp-defines/echo.py b/third_party/python/gyp/test/gyp-defines/echo.py
new file mode 100644
index 0000000000..b85add12f6
--- /dev/null
+++ b/third_party/python/gyp/test/gyp-defines/echo.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+f = open(sys.argv[2], 'w+')
+f.write(sys.argv[1])
+f.close()
diff --git a/third_party/python/gyp/test/gyp-defines/gyptest-multiple-values.py b/third_party/python/gyp/test/gyp-defines/gyptest-multiple-values.py
new file mode 100644
index 0000000000..67735cce6a
--- /dev/null
+++ b/third_party/python/gyp/test/gyp-defines/gyptest-multiple-values.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that when multiple values are supplied for a gyp define, the last one
+is used.
+"""
+
+import os
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+os.environ['GYP_DEFINES'] = 'key=value1 key=value2 key=value3'
+test.run_gyp('defines.gyp')
+
+test.build('defines.gyp')
+test.must_contain('action.txt', 'value3')
+
+# The last occurrence of a repeated set should take precedence over other
+# values.
+os.environ['GYP_DEFINES'] = 'key=repeated_value key=value1 key=repeated_value'
+test.run_gyp('defines.gyp')
+
+if test.format == 'msvs' and not test.uses_msbuild:
+ # msvs versions before 2010 don't detect build rule changes not reflected
+ # in file system timestamps. Rebuild to see differences.
+ test.build('defines.gyp', rebuild=True)
+else:
+ test.build('defines.gyp')
+test.must_contain('action.txt', 'repeated_value')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/gyp-defines/gyptest-regyp.py b/third_party/python/gyp/test/gyp-defines/gyptest-regyp.py
new file mode 100644
index 0000000000..0895d81d4f
--- /dev/null
+++ b/third_party/python/gyp/test/gyp-defines/gyptest-regyp.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that when the same value is repeated for a gyp define, duplicates are
+stripped from the regeneration rule.
+"""
+
+import os
+import TestGyp
+
+# Regenerating build files when a gyp file changes is currently only supported
+# by the make generator.
+test = TestGyp.TestGyp(formats=['make'])
+
+os.environ['GYP_DEFINES'] = 'key=repeated_value key=value1 key=repeated_value'
+test.run_gyp('defines.gyp')
+test.build('defines.gyp')
+
+# The last occurrence of a repeated set should take precedence over other
+# values. See gyptest-multiple-values.py.
+test.must_contain('action.txt', 'repeated_value')
+
+# So the regeneration rule needs to use the correct order.
+test.must_not_contain(
+ 'Makefile', '"-Dkey=repeated_value" "-Dkey=value1" "-Dkey=repeated_value"')
+test.must_contain('Makefile', '"-Dkey=value1" "-Dkey=repeated_value"')
+
+# Sleep so that the changed gyp file will have a newer timestamp than the
+# previously generated build files.
+test.sleep()
+os.utime("defines.gyp", None)
+
+test.build('defines.gyp')
+test.must_contain('action.txt', 'repeated_value')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/hard_dependency/gyptest-exported-hard-dependency.py b/third_party/python/gyp/test/hard_dependency/gyptest-exported-hard-dependency.py
new file mode 100755
index 0000000000..ba51528800
--- /dev/null
+++ b/third_party/python/gyp/test/hard_dependency/gyptest-exported-hard-dependency.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that a hard_dependency that is exported is pulled in as a dependency
+for a target if the target is a static library and if the generator will
+remove dependencies between static libraries.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+if test.format == 'dump_dependency_json':
+ test.skip_test('Skipping test; dependency JSON does not adjust ' \
+ 'static libraries.\n')
+
+test.run_gyp('hard_dependency.gyp', chdir='src')
+
+chdir = 'relocate/src'
+test.relocate('src', chdir)
+
+test.build('hard_dependency.gyp', 'c', chdir=chdir)
+
+# The 'a' static library should be built, as it has actions with side-effects
+# that are necessary to compile 'c'. Even though 'c' does not directly depend
+# on 'a', because 'a' is a hard_dependency that 'b' exports, 'c' should import
+# it as a hard_dependency and ensure it is built before building 'c'.
+test.built_file_must_exist('a', type=test.STATIC_LIB, chdir=chdir)
+test.built_file_must_not_exist('b', type=test.STATIC_LIB, chdir=chdir)
+test.built_file_must_exist('c', type=test.STATIC_LIB, chdir=chdir)
+test.built_file_must_not_exist('d', type=test.STATIC_LIB, chdir=chdir)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/hard_dependency/gyptest-no-exported-hard-dependency.py b/third_party/python/gyp/test/hard_dependency/gyptest-no-exported-hard-dependency.py
new file mode 100755
index 0000000000..10774ca2a0
--- /dev/null
+++ b/third_party/python/gyp/test/hard_dependency/gyptest-no-exported-hard-dependency.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that a hard_dependency that is not exported is not pulled in as a
+dependency for a target if the target does not explicitly specify a dependency
+and none of its dependencies export the hard_dependency.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+if test.format == 'dump_dependency_json':
+ test.skip_test('Skipping test; dependency JSON does not adjust ' \
+ 'static libaries.\n')
+
+test.run_gyp('hard_dependency.gyp', chdir='src')
+
+chdir = 'relocate/src'
+test.relocate('src', chdir)
+
+test.build('hard_dependency.gyp', 'd', chdir=chdir)
+
+# Because 'c' does not export a hard_dependency, only the target 'd' should
+# be built. This is because the 'd' target does not need the generated headers
+# in order to be compiled.
+test.built_file_must_not_exist('a', type=test.STATIC_LIB, chdir=chdir)
+test.built_file_must_not_exist('b', type=test.STATIC_LIB, chdir=chdir)
+test.built_file_must_not_exist('c', type=test.STATIC_LIB, chdir=chdir)
+test.built_file_must_exist('d', type=test.STATIC_LIB, chdir=chdir)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/hard_dependency/src/a.c b/third_party/python/gyp/test/hard_dependency/src/a.c
new file mode 100644
index 0000000000..0fa0223c97
--- /dev/null
+++ b/third_party/python/gyp/test/hard_dependency/src/a.c
@@ -0,0 +1,9 @@
+/* Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include "a.h"
+
+int funcA() {
+ return 42;
+}
diff --git a/third_party/python/gyp/test/hard_dependency/src/a.h b/third_party/python/gyp/test/hard_dependency/src/a.h
new file mode 100644
index 0000000000..854a06504a
--- /dev/null
+++ b/third_party/python/gyp/test/hard_dependency/src/a.h
@@ -0,0 +1,12 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#ifndef A_H_
+#define A_H_
+
+#include "generated.h"
+
+int funcA();
+
+#endif // A_H_
diff --git a/third_party/python/gyp/test/hard_dependency/src/b.c b/third_party/python/gyp/test/hard_dependency/src/b.c
new file mode 100644
index 0000000000..0baace929e
--- /dev/null
+++ b/third_party/python/gyp/test/hard_dependency/src/b.c
@@ -0,0 +1,9 @@
+/* Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include "a.h"
+
+int funcB() {
+ return funcA();
+}
diff --git a/third_party/python/gyp/test/hard_dependency/src/b.h b/third_party/python/gyp/test/hard_dependency/src/b.h
new file mode 100644
index 0000000000..22b48cefe2
--- /dev/null
+++ b/third_party/python/gyp/test/hard_dependency/src/b.h
@@ -0,0 +1,12 @@
+/* Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#ifndef B_H_
+#define B_H_
+
+#include "a.h"
+
+int funcB();
+
+#endif // B_H_
diff --git a/third_party/python/gyp/test/hard_dependency/src/c.c b/third_party/python/gyp/test/hard_dependency/src/c.c
new file mode 100644
index 0000000000..7d0068208e
--- /dev/null
+++ b/third_party/python/gyp/test/hard_dependency/src/c.c
@@ -0,0 +1,10 @@
+/* Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include "b.h"
+#include "c.h"
+
+int funcC() {
+ return funcB();
+}
diff --git a/third_party/python/gyp/test/hard_dependency/src/c.h b/third_party/python/gyp/test/hard_dependency/src/c.h
new file mode 100644
index 0000000000..f4ea7fefa2
--- /dev/null
+++ b/third_party/python/gyp/test/hard_dependency/src/c.h
@@ -0,0 +1,10 @@
+/* Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#ifndef C_H_
+#define C_H_
+
+int funcC();
+
+#endif // C_H_
diff --git a/third_party/python/gyp/test/hard_dependency/src/d.c b/third_party/python/gyp/test/hard_dependency/src/d.c
new file mode 100644
index 0000000000..d016c3ce71
--- /dev/null
+++ b/third_party/python/gyp/test/hard_dependency/src/d.c
@@ -0,0 +1,9 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include "c.h"
+
+int funcD() {
+ return funcC();
+}
diff --git a/third_party/python/gyp/test/hard_dependency/src/emit.py b/third_party/python/gyp/test/hard_dependency/src/emit.py
new file mode 100755
index 0000000000..8ed12f7393
--- /dev/null
+++ b/third_party/python/gyp/test/hard_dependency/src/emit.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+f = open(sys.argv[1], 'w')
+f.write('/* Hello World */\n')
+f.close()
diff --git a/third_party/python/gyp/test/hard_dependency/src/hard_dependency.gyp b/third_party/python/gyp/test/hard_dependency/src/hard_dependency.gyp
new file mode 100644
index 0000000000..4479c5f045
--- /dev/null
+++ b/third_party/python/gyp/test/hard_dependency/src/hard_dependency.gyp
@@ -0,0 +1,78 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'static_library',
+ 'sources': [
+ 'a.c',
+ 'a.h',
+ ],
+ 'hard_dependency': 1,
+ 'actions': [
+ {
+ 'action_name': 'generate_headers',
+ 'inputs': [
+ 'emit.py'
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/generated.h'
+ ],
+ 'action': [
+ 'python',
+ 'emit.py',
+ '<(SHARED_INTERMEDIATE_DIR)/generated.h',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)',
+ ],
+ },
+ },
+ {
+ 'target_name': 'b',
+ 'type': 'static_library',
+ 'sources': [
+ 'b.c',
+ 'b.h',
+ ],
+ 'dependencies': [
+ 'a',
+ ],
+ 'export_dependent_settings': [
+ 'a',
+ ],
+ },
+ {
+ 'target_name': 'c',
+ 'type': 'static_library',
+ 'sources': [
+ 'c.c',
+ 'c.h',
+ ],
+ 'dependencies': [
+ 'b',
+ ],
+ },
+ {
+ 'target_name': 'd',
+ 'type': 'static_library',
+ 'sources': [
+ 'd.c',
+ ],
+ 'dependencies': [
+ 'c',
+ ],
+ }
+ ],
+}
diff --git a/third_party/python/gyp/test/hello/gyptest-all.py b/third_party/python/gyp/test/hello/gyptest-all.py
new file mode 100755
index 0000000000..1739b6886e
--- /dev/null
+++ b/third_party/python/gyp/test/hello/gyptest-all.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simplest-possible build of a "Hello, world!" program
+using an explicit build target of 'all'.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(workdir='workarea_all')
+
+test.run_gyp('hello.gyp')
+
+test.build('hello.gyp', test.ALL)
+
+test.run_built_executable('hello', stdout="Hello, world!\n")
+
+test.up_to_date('hello.gyp', test.ALL)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/hello/gyptest-default.py b/third_party/python/gyp/test/hello/gyptest-default.py
new file mode 100755
index 0000000000..22377e7ac5
--- /dev/null
+++ b/third_party/python/gyp/test/hello/gyptest-default.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simplest-possible build of a "Hello, world!" program
+using the default build target.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(workdir='workarea_default')
+
+test.run_gyp('hello.gyp')
+
+test.build('hello.gyp')
+
+test.run_built_executable('hello', stdout="Hello, world!\n")
+
+test.up_to_date('hello.gyp', test.DEFAULT)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/hello/gyptest-disable-regyp.py b/third_party/python/gyp/test/hello/gyptest-disable-regyp.py
new file mode 100755
index 0000000000..1e4b306674
--- /dev/null
+++ b/third_party/python/gyp/test/hello/gyptest-disable-regyp.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that Makefiles don't get rebuilt when a source gyp file changes and
+the disable_regeneration generator flag is set.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('hello.gyp', '-Gauto_regeneration=0')
+
+test.build('hello.gyp', test.ALL)
+
+test.run_built_executable('hello', stdout="Hello, world!\n")
+
+# Sleep so that the changed gyp file will have a newer timestamp than the
+# previously generated build files.
+test.sleep()
+test.write('hello.gyp', test.read('hello2.gyp'))
+
+test.build('hello.gyp', test.ALL)
+
+# Should still be the old executable, as regeneration was disabled.
+test.run_built_executable('hello', stdout="Hello, world!\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/hello/gyptest-regyp-output.py b/third_party/python/gyp/test/hello/gyptest-regyp-output.py
new file mode 100644
index 0000000000..fd88a85503
--- /dev/null
+++ b/third_party/python/gyp/test/hello/gyptest-regyp-output.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that Makefiles get rebuilt when a source gyp file changes and
+--generator-output is used.
+"""
+
+import TestGyp
+
+# Regenerating build files when a gyp file changes is currently only supported
+# by the make generator, and --generator-output is not supported by ninja, so we
+# can only test for make.
+test = TestGyp.TestGyp(formats=['make'])
+
+CHDIR='generator-output'
+
+test.run_gyp('hello.gyp', '--generator-output=%s' % CHDIR)
+
+test.build('hello.gyp', test.ALL, chdir=CHDIR)
+
+test.run_built_executable('hello', stdout="Hello, world!\n", chdir=CHDIR)
+
+# Sleep so that the changed gyp file will have a newer timestamp than the
+# previously generated build files.
+test.sleep()
+test.write('hello.gyp', test.read('hello2.gyp'))
+
+test.build('hello.gyp', test.ALL, chdir=CHDIR)
+
+test.run_built_executable('hello', stdout="Hello, two!\n", chdir=CHDIR)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/hello/gyptest-regyp.py b/third_party/python/gyp/test/hello/gyptest-regyp.py
new file mode 100755
index 0000000000..b513edcd07
--- /dev/null
+++ b/third_party/python/gyp/test/hello/gyptest-regyp.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that Makefiles get rebuilt when a source gyp file changes.
+"""
+
+import TestGyp
+
+# Regenerating build files when a gyp file changes is currently only supported
+# by the make generator.
+test = TestGyp.TestGyp(formats=['make'])
+
+test.run_gyp('hello.gyp')
+
+test.build('hello.gyp', test.ALL)
+
+test.run_built_executable('hello', stdout="Hello, world!\n")
+
+# Sleep so that the changed gyp file will have a newer timestamp than the
+# previously generated build files.
+test.sleep()
+test.write('hello.gyp', test.read('hello2.gyp'))
+
+test.build('hello.gyp', test.ALL)
+
+test.run_built_executable('hello', stdout="Hello, two!\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/hello/gyptest-target.py b/third_party/python/gyp/test/hello/gyptest-target.py
new file mode 100755
index 0000000000..1abaf7057b
--- /dev/null
+++ b/third_party/python/gyp/test/hello/gyptest-target.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simplest-possible build of a "Hello, world!" program
+using an explicit build target of 'hello'.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(workdir='workarea_target')
+
+test.run_gyp('hello.gyp')
+
+test.build('hello.gyp', 'hello')
+
+test.run_built_executable('hello', stdout="Hello, world!\n")
+
+test.up_to_date('hello.gyp', 'hello')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/hello/hello.c b/third_party/python/gyp/test/hello/hello.c
new file mode 100644
index 0000000000..0a4c806019
--- /dev/null
+++ b/third_party/python/gyp/test/hello/hello.c
@@ -0,0 +1,11 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello, world!\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/hello/hello.gyp b/third_party/python/gyp/test/hello/hello.gyp
new file mode 100644
index 0000000000..1974d51ccd
--- /dev/null
+++ b/third_party/python/gyp/test/hello/hello.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/hello/hello2.c b/third_party/python/gyp/test/hello/hello2.c
new file mode 100644
index 0000000000..b14299cae0
--- /dev/null
+++ b/third_party/python/gyp/test/hello/hello2.c
@@ -0,0 +1,11 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello, two!\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/hello/hello2.gyp b/third_party/python/gyp/test/hello/hello2.gyp
new file mode 100644
index 0000000000..25b08caf3c
--- /dev/null
+++ b/third_party/python/gyp/test/hello/hello2.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'hello2.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/home_dot_gyp/gyptest-home-includes-config-arg.py b/third_party/python/gyp/test/home_dot_gyp/gyptest-home-includes-config-arg.py
new file mode 100755
index 0000000000..82e39f9d07
--- /dev/null
+++ b/third_party/python/gyp/test/home_dot_gyp/gyptest-home-includes-config-arg.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies inclusion of $HOME/.gyp/include.gypi works when --config-dir is
+specified.
+"""
+
+import os
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+os.environ['HOME'] = os.path.abspath('home2')
+
+test.run_gyp('all.gyp', '--config-dir=~/.gyp_new', chdir='src')
+
+# After relocating, we should still be able to build (build file shouldn't
+# contain relative reference to ~/.gyp/include.gypi)
+test.relocate('src', 'relocate/src')
+
+test.build('all.gyp', test.ALL, chdir='relocate/src')
+
+test.run_built_executable('printfoo',
+ chdir='relocate/src',
+ stdout='FOO is fromhome3\n')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/home_dot_gyp/gyptest-home-includes-config-env.py b/third_party/python/gyp/test/home_dot_gyp/gyptest-home-includes-config-env.py
new file mode 100755
index 0000000000..6f4b299ede
--- /dev/null
+++ b/third_party/python/gyp/test/home_dot_gyp/gyptest-home-includes-config-env.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies inclusion of $HOME/.gyp_new/include.gypi works when GYP_CONFIG_DIR
+is set.
+"""
+
+import os
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+os.environ['HOME'] = os.path.abspath('home')
+os.environ['GYP_CONFIG_DIR'] = os.path.join(os.path.abspath('home2'),
+ '.gyp_new')
+
+test.run_gyp('all.gyp', chdir='src')
+
+# After relocating, we should still be able to build (build file shouldn't
+# contain relative reference to ~/.gyp_new/include.gypi)
+test.relocate('src', 'relocate/src')
+
+test.build('all.gyp', test.ALL, chdir='relocate/src')
+
+test.run_built_executable('printfoo',
+ chdir='relocate/src',
+ stdout='FOO is fromhome3\n')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py b/third_party/python/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py
new file mode 100755
index 0000000000..fdf8b14464
--- /dev/null
+++ b/third_party/python/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies inclusion of $HOME/.gyp/include.gypi works properly with relocation
+and with regeneration.
+"""
+
+import os
+import TestGyp
+
+# Regenerating build files when a gyp file changes is currently only supported
+# by the make generator.
+test = TestGyp.TestGyp(formats=['make'])
+
+os.environ['HOME'] = os.path.abspath('home')
+
+test.run_gyp('all.gyp', chdir='src')
+
+# After relocating, we should still be able to build (build file shouldn't
+# contain relative reference to ~/.gyp/include.gypi)
+test.relocate('src', 'relocate/src')
+
+test.build('all.gyp', test.ALL, chdir='relocate/src')
+
+test.run_built_executable('printfoo',
+ chdir='relocate/src',
+ stdout='FOO is fromhome\n')
+
+# Building should notice any changes to ~/.gyp/include.gypi and regyp.
+test.sleep()
+
+test.write('home/.gyp/include.gypi', test.read('home2/.gyp/include.gypi'))
+
+test.build('all.gyp', test.ALL, chdir='relocate/src')
+
+test.run_built_executable('printfoo',
+ chdir='relocate/src',
+ stdout='FOO is fromhome2\n')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/home_dot_gyp/gyptest-home-includes.py b/third_party/python/gyp/test/home_dot_gyp/gyptest-home-includes.py
new file mode 100755
index 0000000000..8ad52556be
--- /dev/null
+++ b/third_party/python/gyp/test/home_dot_gyp/gyptest-home-includes.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies inclusion of $HOME/.gyp/include.gypi works.
+"""
+
+import os
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+os.environ['HOME'] = os.path.abspath('home')
+
+test.run_gyp('all.gyp', chdir='src')
+
+# After relocating, we should still be able to build (build file shouldn't
+# contain relative reference to ~/.gyp/include.gypi)
+test.relocate('src', 'relocate/src')
+
+test.build('all.gyp', test.ALL, chdir='relocate/src')
+
+test.run_built_executable('printfoo',
+ chdir='relocate/src',
+ stdout='FOO is fromhome\n')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/home_dot_gyp/home/.gyp/include.gypi b/third_party/python/gyp/test/home_dot_gyp/home/.gyp/include.gypi
new file mode 100644
index 0000000000..fcfb39befd
--- /dev/null
+++ b/third_party/python/gyp/test/home_dot_gyp/home/.gyp/include.gypi
@@ -0,0 +1,5 @@
+{
+ 'variables': {
+ 'foo': '"fromhome"',
+ },
+}
diff --git a/third_party/python/gyp/test/home_dot_gyp/home2/.gyp/include.gypi b/third_party/python/gyp/test/home_dot_gyp/home2/.gyp/include.gypi
new file mode 100644
index 0000000000..f0d84b31ad
--- /dev/null
+++ b/third_party/python/gyp/test/home_dot_gyp/home2/.gyp/include.gypi
@@ -0,0 +1,5 @@
+{
+ 'variables': {
+ 'foo': '"fromhome2"',
+ },
+}
diff --git a/third_party/python/gyp/test/home_dot_gyp/home2/.gyp_new/include.gypi b/third_party/python/gyp/test/home_dot_gyp/home2/.gyp_new/include.gypi
new file mode 100644
index 0000000000..4094dfd2f8
--- /dev/null
+++ b/third_party/python/gyp/test/home_dot_gyp/home2/.gyp_new/include.gypi
@@ -0,0 +1,5 @@
+{
+ 'variables': {
+ 'foo': '"fromhome3"',
+ },
+}
diff --git a/third_party/python/gyp/test/home_dot_gyp/src/all.gyp b/third_party/python/gyp/test/home_dot_gyp/src/all.gyp
new file mode 100644
index 0000000000..14b6aea285
--- /dev/null
+++ b/third_party/python/gyp/test/home_dot_gyp/src/all.gyp
@@ -0,0 +1,22 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'foo%': '"fromdefault"',
+ },
+ 'targets': [
+ {
+ 'target_name': 'printfoo',
+ 'type': 'executable',
+ 'sources': [
+ 'printfoo.c',
+ ],
+ 'defines': [
+ 'FOO=<(foo)',
+ ],
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/home_dot_gyp/src/printfoo.c b/third_party/python/gyp/test/home_dot_gyp/src/printfoo.c
new file mode 100644
index 0000000000..9bb67181b9
--- /dev/null
+++ b/third_party/python/gyp/test/home_dot_gyp/src/printfoo.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(void)
+{
+ printf("FOO is %s\n", FOO);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/include_dirs/gyptest-all.py b/third_party/python/gyp/test/include_dirs/gyptest-all.py
new file mode 100755
index 0000000000..d64bc6a9ca
--- /dev/null
+++ b/third_party/python/gyp/test/include_dirs/gyptest-all.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies use of include_dirs when using an explicit build target of 'all'.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('includes.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('includes.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from includes.c
+Hello from inc.h
+Hello from include1.h
+Hello from subdir/inc2/include2.h
+Hello from shadow2/shadow.h
+"""
+test.run_built_executable('includes', stdout=expect, chdir='relocate/src')
+
+if test.format == 'xcode':
+ chdir='relocate/src/subdir'
+else:
+ chdir='relocate/src'
+
+expect = """\
+Hello from subdir/subdir_includes.c
+Hello from subdir/inc.h
+Hello from include1.h
+Hello from subdir/inc2/include2.h
+"""
+test.run_built_executable('subdir_includes', stdout=expect, chdir=chdir)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/include_dirs/gyptest-default.py b/third_party/python/gyp/test/include_dirs/gyptest-default.py
new file mode 100755
index 0000000000..fc6141587e
--- /dev/null
+++ b/third_party/python/gyp/test/include_dirs/gyptest-default.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies use of include_dirs when using the default build target.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('includes.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('includes.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from includes.c
+Hello from inc.h
+Hello from include1.h
+Hello from subdir/inc2/include2.h
+Hello from shadow2/shadow.h
+"""
+test.run_built_executable('includes', stdout=expect, chdir='relocate/src')
+
+if test.format == 'xcode':
+ chdir='relocate/src/subdir'
+else:
+ chdir='relocate/src'
+
+expect = """\
+Hello from subdir/subdir_includes.c
+Hello from subdir/inc.h
+Hello from include1.h
+Hello from subdir/inc2/include2.h
+"""
+test.run_built_executable('subdir_includes', stdout=expect, chdir=chdir)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/include_dirs/src/inc.h b/third_party/python/gyp/test/include_dirs/src/inc.h
new file mode 100644
index 0000000000..0398d6915f
--- /dev/null
+++ b/third_party/python/gyp/test/include_dirs/src/inc.h
@@ -0,0 +1 @@
+#define INC_STRING "inc.h"
diff --git a/third_party/python/gyp/test/include_dirs/src/inc1/include1.h b/third_party/python/gyp/test/include_dirs/src/inc1/include1.h
new file mode 100644
index 0000000000..43356b5f47
--- /dev/null
+++ b/third_party/python/gyp/test/include_dirs/src/inc1/include1.h
@@ -0,0 +1 @@
+#define INCLUDE1_STRING "include1.h"
diff --git a/third_party/python/gyp/test/include_dirs/src/includes.c b/third_party/python/gyp/test/include_dirs/src/includes.c
new file mode 100644
index 0000000000..6e2a23cdff
--- /dev/null
+++ b/third_party/python/gyp/test/include_dirs/src/includes.c
@@ -0,0 +1,19 @@
+#include <stdio.h>
+
+#include "inc.h"
+#include "include1.h"
+#include "include2.h"
+#include "shadow.h"
+
+int main(void)
+{
+ printf("Hello from includes.c\n");
+ printf("Hello from %s\n", INC_STRING);
+ printf("Hello from %s\n", INCLUDE1_STRING);
+ printf("Hello from %s\n", INCLUDE2_STRING);
+ /* Test that include_dirs happen first: The gyp file has a -Ishadow1
+ cflag and an include_dir of shadow2. Including shadow.h should get
+ the shadow.h from the include_dir. */
+ printf("Hello from %s\n", SHADOW_STRING);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/include_dirs/src/includes.gyp b/third_party/python/gyp/test/include_dirs/src/includes.gyp
new file mode 100644
index 0000000000..3592690208
--- /dev/null
+++ b/third_party/python/gyp/test/include_dirs/src/includes.gyp
@@ -0,0 +1,27 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'includes',
+ 'type': 'executable',
+ 'dependencies': [
+ 'subdir/subdir_includes.gyp:subdir_includes',
+ ],
+ 'cflags': [
+ '-Ishadow1',
+ ],
+ 'include_dirs': [
+ '.',
+ 'inc1',
+ 'shadow2',
+ 'subdir/inc2',
+ ],
+ 'sources': [
+ 'includes.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/include_dirs/src/shadow1/shadow.h b/third_party/python/gyp/test/include_dirs/src/shadow1/shadow.h
new file mode 100644
index 0000000000..80f6de20b8
--- /dev/null
+++ b/third_party/python/gyp/test/include_dirs/src/shadow1/shadow.h
@@ -0,0 +1 @@
+#define SHADOW_STRING "shadow1/shadow.h"
diff --git a/third_party/python/gyp/test/include_dirs/src/shadow2/shadow.h b/third_party/python/gyp/test/include_dirs/src/shadow2/shadow.h
new file mode 100644
index 0000000000..fad5ccd085
--- /dev/null
+++ b/third_party/python/gyp/test/include_dirs/src/shadow2/shadow.h
@@ -0,0 +1 @@
+#define SHADOW_STRING "shadow2/shadow.h"
diff --git a/third_party/python/gyp/test/include_dirs/src/subdir/inc.h b/third_party/python/gyp/test/include_dirs/src/subdir/inc.h
new file mode 100644
index 0000000000..0a68d7b36a
--- /dev/null
+++ b/third_party/python/gyp/test/include_dirs/src/subdir/inc.h
@@ -0,0 +1 @@
+#define INC_STRING "subdir/inc.h"
diff --git a/third_party/python/gyp/test/include_dirs/src/subdir/inc2/include2.h b/third_party/python/gyp/test/include_dirs/src/subdir/inc2/include2.h
new file mode 100644
index 0000000000..721577effb
--- /dev/null
+++ b/third_party/python/gyp/test/include_dirs/src/subdir/inc2/include2.h
@@ -0,0 +1 @@
+#define INCLUDE2_STRING "subdir/inc2/include2.h"
diff --git a/third_party/python/gyp/test/include_dirs/src/subdir/subdir_includes.c b/third_party/python/gyp/test/include_dirs/src/subdir/subdir_includes.c
new file mode 100644
index 0000000000..4623543c43
--- /dev/null
+++ b/third_party/python/gyp/test/include_dirs/src/subdir/subdir_includes.c
@@ -0,0 +1,14 @@
+#include <stdio.h>
+
+#include "inc.h"
+#include "include1.h"
+#include "include2.h"
+
+int main(void)
+{
+ printf("Hello from subdir/subdir_includes.c\n");
+ printf("Hello from %s\n", INC_STRING);
+ printf("Hello from %s\n", INCLUDE1_STRING);
+ printf("Hello from %s\n", INCLUDE2_STRING);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/include_dirs/src/subdir/subdir_includes.gyp b/third_party/python/gyp/test/include_dirs/src/subdir/subdir_includes.gyp
new file mode 100644
index 0000000000..257d052c3c
--- /dev/null
+++ b/third_party/python/gyp/test/include_dirs/src/subdir/subdir_includes.gyp
@@ -0,0 +1,20 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'subdir_includes',
+ 'type': 'executable',
+ 'include_dirs': [
+ '.',
+ '../inc1',
+ 'inc2',
+ ],
+ 'sources': [
+ 'subdir_includes.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/intermediate_dir/gyptest-intermediate-dir.py b/third_party/python/gyp/test/intermediate_dir/gyptest-intermediate-dir.py
new file mode 100755
index 0000000000..bf4b91a2fc
--- /dev/null
+++ b/third_party/python/gyp/test/intermediate_dir/gyptest-intermediate-dir.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that targets have independent INTERMEDIATE_DIRs.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('test.gyp', chdir='src')
+
+test.build('test.gyp', 'target1', chdir='src')
+# Check stuff exists.
+intermediate_file1 = test.read('src/outfile.txt')
+test.must_contain(intermediate_file1, 'target1')
+
+shared_intermediate_file1 = test.read('src/shared_outfile.txt')
+test.must_contain(shared_intermediate_file1, 'shared_target1')
+
+test.run_gyp('test2.gyp', chdir='src')
+
+# Force the shared intermediate to be rebuilt.
+test.sleep()
+test.touch('src/shared_infile.txt')
+test.build('test2.gyp', 'target2', chdir='src')
+# Check INTERMEDIATE_DIR file didn't get overwritten but SHARED_INTERMEDIATE_DIR
+# file did.
+intermediate_file2 = test.read('src/outfile.txt')
+test.must_contain(intermediate_file1, 'target1')
+test.must_contain(intermediate_file2, 'target2')
+
+shared_intermediate_file2 = test.read('src/shared_outfile.txt')
+if shared_intermediate_file1 != shared_intermediate_file2:
+ test.fail_test(shared_intermediate_file1 + ' != ' + shared_intermediate_file2)
+
+test.must_contain(shared_intermediate_file1, 'shared_target2')
+test.must_contain(shared_intermediate_file2, 'shared_target2')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/intermediate_dir/src/script.py b/third_party/python/gyp/test/intermediate_dir/src/script.py
new file mode 100755
index 0000000000..2eb73ac206
--- /dev/null
+++ b/third_party/python/gyp/test/intermediate_dir/src/script.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Takes 3 arguments. Writes the 1st argument to the file in the 2nd argument,
+# and writes the absolute path to the file in the 2nd argument to the file in
+# the 3rd argument.
+
+import os
+import shlex
+import sys
+
+if len(sys.argv) == 3 and ' ' in sys.argv[2]:
+ sys.argv[2], fourth = shlex.split(sys.argv[2].replace('\\', '\\\\'))
+ sys.argv.append(fourth)
+
+with open(sys.argv[2], 'w') as f:
+ f.write(sys.argv[1])
+
+with open(sys.argv[3], 'w') as f:
+ f.write(os.path.abspath(sys.argv[2]))
diff --git a/third_party/python/gyp/test/intermediate_dir/src/shared_infile.txt b/third_party/python/gyp/test/intermediate_dir/src/shared_infile.txt
new file mode 100644
index 0000000000..e2aba15d04
--- /dev/null
+++ b/third_party/python/gyp/test/intermediate_dir/src/shared_infile.txt
@@ -0,0 +1 @@
+dummy input
diff --git a/third_party/python/gyp/test/intermediate_dir/src/test.gyp b/third_party/python/gyp/test/intermediate_dir/src/test.gyp
new file mode 100644
index 0000000000..b61e7e8ea5
--- /dev/null
+++ b/third_party/python/gyp/test/intermediate_dir/src/test.gyp
@@ -0,0 +1,42 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'target1',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'intermediate',
+ 'inputs': [],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/intermediate_out.txt',
+ 'outfile.txt',
+ ],
+ 'action': [
+ 'python', 'script.py', 'target1', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'action_name': 'shared_intermediate',
+ 'inputs': [
+ 'shared_infile.txt',
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/intermediate_out.txt',
+ 'shared_outfile.txt',
+ ],
+ 'action': [
+ 'python', 'script.py', 'shared_target1', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/intermediate_dir/src/test2.gyp b/third_party/python/gyp/test/intermediate_dir/src/test2.gyp
new file mode 100644
index 0000000000..41f5564663
--- /dev/null
+++ b/third_party/python/gyp/test/intermediate_dir/src/test2.gyp
@@ -0,0 +1,42 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'target2',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'intermediate',
+ 'inputs': [],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/intermediate_out.txt',
+ 'outfile.txt',
+ ],
+ 'action': [
+ 'python', 'script.py', 'target2', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'action_name': 'shared_intermediate',
+ 'inputs': [
+ 'shared_infile.txt',
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/intermediate_out.txt',
+ 'shared_outfile.txt',
+ ],
+ 'action': [
+ 'python', 'script.py', 'shared_target2', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/InfoPlist-error.strings b/third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/InfoPlist-error.strings
new file mode 100644
index 0000000000..452e7fabf9
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/InfoPlist-error.strings
@@ -0,0 +1,3 @@
+/* Localized versions of Info.plist keys */
+
+NSHumanReadableCopyright = "Copyright ©2011 Google Inc."
diff --git a/third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/InfoPlist.strings b/third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/InfoPlist.strings
new file mode 100644
index 0000000000..35bd33a96e
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/InfoPlist.strings
@@ -0,0 +1,3 @@
+/* Localized versions of Info.plist keys */
+
+NSHumanReadableCopyright = "Copyright ©2011 Google Inc.";
diff --git a/third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/LanguageMap.plist b/third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/LanguageMap.plist
new file mode 100644
index 0000000000..6b94882328
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/LanguageMap.plist
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>en</key>
+ <string>en</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/MainMenu.xib b/third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/MainMenu.xib
new file mode 100644
index 0000000000..21b60448ad
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/MainMenu.xib
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="9060" systemVersion="15B42" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES">
+ <dependencies>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="9051"/>
+ <capability name="Aspect ratio constraints" minToolsVersion="5.1"/>
+ <capability name="Constraints with non-1.0 multipliers" minToolsVersion="5.1"/>
+ </dependencies>
+ <objects>
+ <placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner"/>
+ <placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
+ <viewController id="Ssz-5V-cv2">
+ <view key="view" contentMode="scaleToFill" id="tRS-Cx-RH3">
+ </view>
+ <point key="canvasLocation" x="548" y="1086"/>
+ </viewController>
+ </objects>
+</document>
diff --git a/third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/Main_iPhone.storyboard b/third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/Main_iPhone.storyboard
new file mode 100644
index 0000000000..723bc85122
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/TestApp/English.lproj/Main_iPhone.storyboard
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="1.0" toolsVersion="1906" systemVersion="11A511" targetRuntime="iOS.CocoaTouch" nextObjectID="6" propertyAccessControl="none" initialViewController="2">
+ <dependencies>
+ <development defaultVersion="4200" identifier="xcode"/>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="902"/>
+ </dependencies>
+ <scenes>
+ <scene sceneID="5">
+ <objects>
+ <placeholder placeholderIdentifier="IBFirstResponder" id="4" sceneMemberID="firstResponder"/>
+ <viewController id="2" customClass="ViewController" sceneMemberID="viewController">
+ <view key="view" contentMode="scaleToFill" id="3">
+ <rect key="frame" x="0.0" y="20" width="320" height="460"/>
+ <autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
+ <subviews/>
+ <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
+ </view>
+ </viewController>
+ </objects>
+ </scene>
+ </scenes>
+ <simulatedMetricsContainer key="defaultSimulatedMetrics">
+ <simulatedStatusBarMetrics key="statusBar"/>
+ <simulatedOrientationMetrics key="orientation"/>
+ <simulatedScreenMetrics key="destination"/>
+ </simulatedMetricsContainer>
+</document>
diff --git a/third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/AppIcon.appiconset/Contents.json b/third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/AppIcon.appiconset/Contents.json
new file mode 100644
index 0000000000..2db2b1c7c6
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/AppIcon.appiconset/Contents.json
@@ -0,0 +1,58 @@
+{
+ "images" : [
+ {
+ "idiom" : "mac",
+ "size" : "16x16",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "16x16",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "32x32",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "32x32",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "128x128",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "128x128",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "256x256",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "256x256",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "512x512",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "512x512",
+ "scale" : "2x"
+ }
+ ],
+ "info" : {
+ "version" : 1,
+ "author" : "xcode"
+ }
+} \ No newline at end of file
diff --git a/third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/Contents.json b/third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/Contents.json
new file mode 100644
index 0000000000..0a87b6edc6
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/Contents.json
@@ -0,0 +1,23 @@
+{
+ "images" : [
+ {
+ "idiom" : "universal",
+ "scale" : "1x",
+ "filename" : "super_sylvain.png"
+ },
+ {
+ "idiom" : "universal",
+ "scale" : "2x",
+ "filename" : "super_sylvain@2x.png"
+ },
+ {
+ "idiom" : "universal",
+ "scale" : "3x",
+ "filename" : "super_sylvain@3x.png"
+ }
+ ],
+ "info" : {
+ "version" : 1,
+ "author" : "xcode"
+ }
+} \ No newline at end of file
diff --git a/third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain.png b/third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain.png
new file mode 100644
index 0000000000..0ba769182f
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain.png
Binary files differ
diff --git a/third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@2x.png b/third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@2x.png
new file mode 100644
index 0000000000..edfa6a5682
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@2x.png
Binary files differ
diff --git a/third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@3x.png b/third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@3x.png
new file mode 100644
index 0000000000..e0652efc72
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@3x.png
Binary files differ
diff --git a/third_party/python/gyp/test/ios/app-bundle/TestApp/TestApp-Info.plist b/third_party/python/gyp/test/ios/app-bundle/TestApp/TestApp-Info.plist
new file mode 100644
index 0000000000..bb90043682
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/TestApp/TestApp-Info.plist
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.${PRODUCT_NAME}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>ause</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>NSMainNibFile</key>
+ <string>MainMenu</string>
+ <key>NSPrincipalClass</key>
+ <string>NSApplication</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/ios/app-bundle/TestApp/check_no_signature.py b/third_party/python/gyp/test/ios/app-bundle/TestApp/check_no_signature.py
new file mode 100644
index 0000000000..4f6e340072
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/TestApp/check_no_signature.py
@@ -0,0 +1,13 @@
+#!/usr/bin/python
+
+import os
+import subprocess
+import sys
+
+p = os.path.join(os.environ['BUILT_PRODUCTS_DIR'],os.environ['EXECUTABLE_PATH'])
+proc = subprocess.Popen(['codesign', '-v', p],
+ stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
+o = proc.communicate()[0].strip()
+if "code object is not signed at all" not in o:
+ sys.stderr.write('File should not already be signed.')
+ sys.exit(1)
diff --git a/third_party/python/gyp/test/ios/app-bundle/TestApp/main.m b/third_party/python/gyp/test/ios/app-bundle/TestApp/main.m
new file mode 100644
index 0000000000..ec93e0e237
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/TestApp/main.m
@@ -0,0 +1,13 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <UIKit/UIKit.h>
+
+int main(int argc, char *argv[])
+{
+ NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
+ int retVal = UIApplicationMain(argc, argv, nil, nil);
+ [pool release];
+ return retVal;
+}
diff --git a/third_party/python/gyp/test/ios/app-bundle/TestApp/only-compile-in-32-bits.m b/third_party/python/gyp/test/ios/app-bundle/TestApp/only-compile-in-32-bits.m
new file mode 100644
index 0000000000..28bb117788
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/TestApp/only-compile-in-32-bits.m
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#if defined(__LP64__)
+# error 64-bit build
+#endif
diff --git a/third_party/python/gyp/test/ios/app-bundle/TestApp/only-compile-in-64-bits.m b/third_party/python/gyp/test/ios/app-bundle/TestApp/only-compile-in-64-bits.m
new file mode 100644
index 0000000000..e6d2558418
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/TestApp/only-compile-in-64-bits.m
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#if !defined(__LP64__)
+# error 32-bit build
+#endif
diff --git a/third_party/python/gyp/test/ios/app-bundle/test-archs.gyp b/third_party/python/gyp/test/ios/app-bundle/test-archs.gyp
new file mode 100644
index 0000000000..fa935c4fb4
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/test-archs.gyp
@@ -0,0 +1,109 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'make_global_settings': [
+ ['CC', '/usr/bin/clang'],
+ ],
+ 'target_defaults': {
+ 'mac_bundle_resources': [
+ 'TestApp/English.lproj/InfoPlist.strings',
+ 'TestApp/English.lproj/MainMenu.xib',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
+ ],
+ },
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-fobjc-abi-version=2',
+ ],
+ 'CODE_SIGNING_REQUIRED': 'NO',
+ 'SDKROOT': 'iphoneos', # -isysroot
+ 'TARGETED_DEVICE_FAMILY': '1,2',
+ 'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
+ 'IPHONEOS_DEPLOYMENT_TARGET': '7.0',
+ 'CONFIGURATION_BUILD_DIR':'build/Default',
+ },
+ },
+ 'targets': [
+ {
+ 'target_name': 'TestNoArchs',
+ 'product_name': 'TestNoArchs',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'TestApp/main.m',
+ 'TestApp/only-compile-in-32-bits.m',
+ ],
+ 'xcode_settings': {
+ 'VALID_ARCHS': [
+ 'i386',
+ 'x86_64',
+ 'arm64',
+ 'armv7',
+ ],
+ }
+ },
+ {
+ 'target_name': 'TestArch32Bits',
+ 'product_name': 'TestArch32Bits',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'TestApp/main.m',
+ 'TestApp/only-compile-in-32-bits.m',
+ ],
+ 'xcode_settings': {
+ 'ARCHS': [
+ '$(ARCHS_STANDARD)',
+ ],
+ 'VALID_ARCHS': [
+ 'i386',
+ 'armv7',
+ ],
+ },
+ },
+ {
+ 'target_name': 'TestArch64Bits',
+ 'product_name': 'TestArch64Bits',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'TestApp/main.m',
+ 'TestApp/only-compile-in-64-bits.m',
+ ],
+ 'xcode_settings': {
+ 'ARCHS': [
+ '$(ARCHS_STANDARD_INCLUDING_64_BIT)',
+ ],
+ 'VALID_ARCHS': [
+ 'x86_64',
+ 'arm64',
+ ],
+ },
+ },
+ {
+ 'target_name': 'TestMultiArchs',
+ 'product_name': 'TestMultiArchs',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'TestApp/main.m',
+ ],
+ 'xcode_settings': {
+ 'ARCHS': [
+ '$(ARCHS_STANDARD_INCLUDING_64_BIT)',
+ ],
+ 'VALID_ARCHS': [
+ 'x86_64',
+ 'i386',
+ 'arm64',
+ 'armv7',
+ ],
+ }
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/ios/app-bundle/test-assets-catalog.gyp b/third_party/python/gyp/test/ios/app-bundle/test-assets-catalog.gyp
new file mode 100644
index 0000000000..9a12d07af7
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/test-assets-catalog.gyp
@@ -0,0 +1,45 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'conditions': [
+ ['"<(GENERATOR)"=="ninja"', {
+ 'make_global_settings': [
+ ['CC', '/usr/bin/clang'],
+ ['CXX', '/usr/bin/clang++'],
+ ],
+ }],
+ ],
+ 'targets': [
+ {
+ 'target_name': 'test_app',
+ 'product_name': 'Test App Assets Catalog Gyp',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'TestApp/main.m',
+ ],
+ 'mac_bundle_resources': [
+ 'TestApp/English.lproj/InfoPlist.strings',
+ 'TestApp/English.lproj/MainMenu.xib',
+ 'TestApp/English.lproj/Main_iPhone.storyboard',
+ 'TestApp/Images.xcassets',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
+ ],
+ },
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-fobjc-abi-version=2',
+ ],
+ 'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
+ 'SDKROOT': 'iphonesimulator', # -isysroot
+ 'IPHONEOS_DEPLOYMENT_TARGET': '7.0',
+ 'CONFIGURATION_BUILD_DIR':'build/Default',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/ios/app-bundle/test-crosscompile.gyp b/third_party/python/gyp/test/ios/app-bundle/test-crosscompile.gyp
new file mode 100644
index 0000000000..d9049588ba
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/test-crosscompile.gyp
@@ -0,0 +1,47 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'make_global_settings': [
+ ['CC', '/usr/bin/clang'],
+ ],
+ 'targets': [
+ # This target will not be built, but is here so that ninja Xcode emulation
+ # understand this is a multi-platform (ios + mac) build.
+ {
+ 'target_name': 'TestDummy',
+ 'product_name': 'TestDummy',
+ 'toolsets': ['target'],
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'tool_main.cc',
+ ],
+ 'xcode_settings': {
+ 'SDKROOT': 'iphonesimulator', # -isysroot
+ 'TARGETED_DEVICE_FAMILY': '1,2',
+ 'IPHONEOS_DEPLOYMENT_TARGET': '7.0',
+ },
+ },
+ {
+ 'target_name': 'TestHost',
+ 'product_name': 'TestHost',
+ 'toolsets': ['host'],
+ 'type': 'executable',
+ 'mac_bundle': 0,
+ 'sources': [
+ 'tool_main.cc',
+ ],
+ 'xcode_settings': {
+ 'SDKROOT': 'macosx',
+ 'ARCHS': [
+ '$(ARCHS_STANDARD)',
+ 'x86_64',
+ ],
+ 'VALID_ARCHS': [
+ 'x86_64',
+ ],
+ }
+ }
+ ],
+}
diff --git a/third_party/python/gyp/test/ios/app-bundle/test-device.gyp b/third_party/python/gyp/test/ios/app-bundle/test-device.gyp
new file mode 100644
index 0000000000..a0cfff7cdb
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/test-device.gyp
@@ -0,0 +1,109 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'conditions': [
+ ['"<(GENERATOR)"=="xcode"', {
+ 'target_defaults': {
+ 'configurations': {
+ 'Default': {
+ 'xcode_settings': {
+ 'SDKROOT': 'iphonesimulator',
+ 'CONFIGURATION_BUILD_DIR':'build/Default',
+ }
+ },
+ 'Default-iphoneos': {
+ 'xcode_settings': {
+ 'SDKROOT': 'iphoneos',
+ 'CONFIGURATION_BUILD_DIR':'build/Default-iphoneos',
+ }
+ },
+ },
+ },
+ }, {
+ 'target_defaults': {
+ 'configurations': {
+ 'Default': {
+ 'xcode_settings': {
+ 'SDKROOT': 'iphonesimulator',
+ }
+ },
+ },
+ },
+ }],
+ ],
+ 'targets': [
+ {
+ 'target_name': 'test_app',
+ 'product_name': 'Test App Gyp',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'TestApp/main.m',
+ ],
+ 'mac_bundle_resources': [
+ 'TestApp/English.lproj/InfoPlist.strings',
+ 'TestApp/English.lproj/MainMenu.xib',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
+ ],
+ },
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-fobjc-abi-version=2',
+ ],
+ 'SDKROOT': 'iphonesimulator', # -isysroot
+ 'TARGETED_DEVICE_FAMILY': '1,2',
+ 'INFOPLIST_OUTPUT_FORMAT':'xml',
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
+ 'IPHONEOS_DEPLOYMENT_TARGET': '8.0',
+ 'CODE_SIGNING_REQUIRED': 'NO',
+ 'CODE_SIGN_IDENTITY[sdk=iphoneos*]': '',
+
+ },
+ },
+ {
+ 'target_name': 'sig_test',
+ 'product_name': 'sigtest',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'TestApp/main.m',
+ ],
+ 'mac_bundle_resources': [
+ 'TestApp/English.lproj/InfoPlist.strings',
+ 'TestApp/English.lproj/MainMenu.xib',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
+ ],
+ },
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'Verify no signature',
+ 'action': [
+ 'python',
+ 'TestApp/check_no_signature.py'
+ ],
+ },
+ ],
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-fobjc-abi-version=2',
+ ],
+ 'SDKROOT': 'iphonesimulator', # -isysroot
+ 'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
+ 'INFOPLIST_OUTPUT_FORMAT':'xml',
+ 'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
+ 'IPHONEOS_DEPLOYMENT_TARGET': '8.0',
+ 'CONFIGURATION_BUILD_DIR':'buildsig/Default',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/ios/app-bundle/test.gyp b/third_party/python/gyp/test/ios/app-bundle/test.gyp
new file mode 100644
index 0000000000..544c589f60
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/test.gyp
@@ -0,0 +1,75 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'conditions': [
+ ['"<(GENERATOR)"=="ninja"', {
+ 'make_global_settings': [
+ ['CC', '/usr/bin/clang'],
+ ['CXX', '/usr/bin/clang++'],
+ ],
+ }],
+ ],
+ 'targets': [
+ {
+ 'target_name': 'test_app',
+ 'product_name': 'Test App Gyp',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'TestApp/main.m',
+ ],
+ 'mac_bundle_resources': [
+ 'TestApp/English.lproj/InfoPlist.strings',
+ 'TestApp/English.lproj/LanguageMap.plist',
+ 'TestApp/English.lproj/MainMenu.xib',
+ 'TestApp/English.lproj/Main_iPhone.storyboard',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
+ ],
+ },
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-fobjc-abi-version=2',
+ ],
+ 'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
+ 'SDKROOT': 'iphonesimulator', # -isysroot
+ 'IPHONEOS_DEPLOYMENT_TARGET': '5.0',
+ 'CONFIGURATION_BUILD_DIR':'build/Default',
+ },
+ },
+ {
+ 'target_name': 'test_app_xml',
+ 'product_name': 'Test App Gyp XML',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'TestApp/main.m',
+ ],
+ 'mac_bundle_resources': [
+ 'TestApp/English.lproj/InfoPlist.strings',
+ 'TestApp/English.lproj/MainMenu.xib',
+ 'TestApp/English.lproj/Main_iPhone.storyboard',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
+ ],
+ },
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-fobjc-abi-version=2',
+ ],
+ 'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
+ 'INFOPLIST_OUTPUT_FORMAT':'xml',
+ 'SDKROOT': 'iphonesimulator', # -isysroot
+ 'IPHONEOS_DEPLOYMENT_TARGET': '5.0',
+ 'CONFIGURATION_BUILD_DIR':'build/Default',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/ios/app-bundle/tool_main.cc b/third_party/python/gyp/test/ios/app-bundle/tool_main.cc
new file mode 100644
index 0000000000..9dc3c94f34
--- /dev/null
+++ b/third_party/python/gyp/test/ios/app-bundle/tool_main.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/ios/copies-with-xcode-envvars/Info.plist b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/Info.plist
new file mode 100644
index 0000000000..a0985c3e4d
--- /dev/null
+++ b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/Info.plist
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/ios/copies-with-xcode-envvars/copies-with-xcode-envvars.gyp b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/copies-with-xcode-envvars.gyp
new file mode 100644
index 0000000000..217dbb0479
--- /dev/null
+++ b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/copies-with-xcode-envvars.gyp
@@ -0,0 +1,97 @@
+# Copyright (c) 2016 Mark Callow. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# For testing use of the UI settings & environment variables
+# available in Xcode's PBXCopyFilesBuildPhase.
+{
+ 'targets': [
+ {
+ 'target_name': 'copies-with-xcode-envvars',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'xcode_settings': {
+ 'SDKROOT': 'iphoneos',
+ 'TARGETED_DEVICE_FAMILY': '1,2',
+ 'IPHONEOS_DEPLOYMENT_TARGET': '7.0',
+ 'CODE_SIGNING_REQUIRED': 'NO',
+ 'INFOPLIST_FILE': 'Info.plist',
+ # This is where the test framework looks for results. Without
+ # this line the result will be in build/Default-iphoneos.
+ 'CONFIGURATION_BUILD_DIR':'build/Default',
+ },
+ 'sources': [ 'empty.c' ],
+ 'conditions': [
+ ['OS == "ios" or OS == "mac"', {
+ 'copies': [{
+ 'destination': '$(BUILT_PRODUCTS_DIR)',
+ 'files': [
+ 'file0',
+ ],
+ }, {
+ 'destination': '$(BUILT_PRODUCTS_DIR)/$(WRAPPER_NAME)',
+ 'files': [
+ 'file1',
+ ],
+ }, {
+ 'destination': '<(PRODUCT_DIR)/$(EXECUTABLE_FOLDER_PATH)',
+ 'files': [
+ 'file2',
+ ],
+ }, {
+ 'destination': '<(PRODUCT_DIR)/$(UNLOCALIZED_RESOURCES_FOLDER_PATH)',
+ 'files': [
+ 'file3',
+ ],
+ }, {
+ 'destination': '<(PRODUCT_DIR)/$(UNLOCALIZED_RESOURCES_FOLDER_PATH)/testimages',
+ 'files': [
+ 'file4',
+ ],
+ }, {
+ 'destination': '$(BUILT_PRODUCTS_DIR)/$(JAVA_FOLDER_PATH)',
+ 'files': [
+ 'file5',
+ ],
+ }, {
+ 'destination': '<(PRODUCT_DIR)/$(FRAMEWORKS_FOLDER_PATH)',
+ 'files': [
+ 'file6',
+ ],
+ }, {
+ # NOTE: This is not an Xcode macro name but
+ # xcodeproj_file.py recognizes it and sends
+ # the output to the same place as
+ # $(FRAMEWORKS_FOLDER_PATH). xcode_emulation.py
+ # sets its value to an absolute path.
+ 'destination': '$(BUILT_FRAMEWORKS_DIR)',
+ 'files': [
+ 'file7',
+ ],
+ }, {
+ 'destination': '<(PRODUCT_DIR)/$(SHARED_FRAMEWORKS_FOLDER_PATH)',
+ 'files': [
+ 'file8',
+ ],
+ }, {
+ 'destination': '<(PRODUCT_DIR)/$(SHARED_SUPPORT_FOLDER_PATH)',
+ 'files': [
+ 'file9',
+ ],
+ }, {
+ 'destination': '<(PRODUCT_DIR)/$(PLUGINS_FOLDER_PATH)',
+ 'files': [
+ 'file10',
+ ],
+ }, {
+ 'destination': '<(PRODUCT_DIR)/$(XPCSERVICES_FOLDER_PATH)',
+ 'files': [
+ 'file11',
+ ],
+ }], # copies
+ }], # OS == "ios" or OS == "mac"
+ ], # conditions
+ }], # targets
+}
+
+# vim:ai:ts=4:sts=4:sw=2:expandtab:textwidth=70
diff --git a/third_party/python/gyp/test/ios/copies-with-xcode-envvars/empty.c b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/empty.c
new file mode 100644
index 0000000000..237c8ce181
--- /dev/null
+++ b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/empty.c
@@ -0,0 +1 @@
+int main() {}
diff --git a/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file0 b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file0
new file mode 100644
index 0000000000..117889361f
--- /dev/null
+++ b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file0
@@ -0,0 +1 @@
+file0 contents
diff --git a/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file1 b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file1
new file mode 100644
index 0000000000..84d55c5759
--- /dev/null
+++ b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file1
@@ -0,0 +1 @@
+file1 contents
diff --git a/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file10 b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file10
new file mode 100644
index 0000000000..372e992ef9
--- /dev/null
+++ b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file10
@@ -0,0 +1 @@
+file10 contents
diff --git a/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file11 b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file11
new file mode 100644
index 0000000000..923e760e1f
--- /dev/null
+++ b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file11
@@ -0,0 +1 @@
+file11 contents
diff --git a/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file2 b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file2
new file mode 100644
index 0000000000..af1b8ae35d
--- /dev/null
+++ b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file2
@@ -0,0 +1 @@
+file2 contents
diff --git a/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file3 b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file3
new file mode 100644
index 0000000000..43f16f3522
--- /dev/null
+++ b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file3
@@ -0,0 +1 @@
+file3 contents
diff --git a/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file4 b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file4
new file mode 100644
index 0000000000..5f7270a084
--- /dev/null
+++ b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file4
@@ -0,0 +1 @@
+file4 contents
diff --git a/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file5 b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file5
new file mode 100644
index 0000000000..41f47186bd
--- /dev/null
+++ b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file5
@@ -0,0 +1 @@
+file5 contents
diff --git a/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file6 b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file6
new file mode 100644
index 0000000000..f5d5757348
--- /dev/null
+++ b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file6
@@ -0,0 +1 @@
+file6 contents
diff --git a/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file7 b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file7
new file mode 100644
index 0000000000..90dbe6e9e1
--- /dev/null
+++ b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file7
@@ -0,0 +1 @@
+file7 contents
diff --git a/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file8 b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file8
new file mode 100644
index 0000000000..9eb613fabb
--- /dev/null
+++ b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file8
@@ -0,0 +1 @@
+file8 contents
diff --git a/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file9 b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file9
new file mode 100644
index 0000000000..e37ac72ada
--- /dev/null
+++ b/third_party/python/gyp/test/ios/copies-with-xcode-envvars/file9
@@ -0,0 +1 @@
+file9 contents
diff --git a/third_party/python/gyp/test/ios/deployment-target/check-version-min.c b/third_party/python/gyp/test/ios/deployment-target/check-version-min.c
new file mode 100644
index 0000000000..761c529085
--- /dev/null
+++ b/third_party/python/gyp/test/ios/deployment-target/check-version-min.c
@@ -0,0 +1,33 @@
+/* Copyright (c) 2013 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <Availability.h>
+
+/* GYPTEST_MAC_VERSION_MIN: should be set to the corresponding value of
+ * xcode setting 'MACOSX_DEPLOYMENT_TARGET', otherwise both should be
+ * left undefined.
+ *
+ * GYPTEST_IOS_VERSION_MIN: should be set to the corresponding value of
+ * xcode setting 'IPHONEOS_DEPLOYMENT_TARGET', otherwise both should be
+ * left undefined.
+ */
+
+#if defined(GYPTEST_MAC_VERSION_MIN)
+# if GYPTEST_MAC_VERSION_MIN != __MAC_OS_X_VERSION_MIN_REQUIRED
+# error __MAC_OS_X_VERSION_MIN_REQUIRED has wrong value
+# endif
+#elif defined(__MAC_OS_X_VERSION_MIN_REQUIRED)
+# error __MAC_OS_X_VERSION_MIN_REQUIRED should be undefined
+#endif
+
+#if defined(GYPTEST_IOS_VERSION_MIN)
+# if GYPTEST_IOS_VERSION_MIN != __IPHONE_OS_VERSION_MIN_REQUIRED
+# error __IPHONE_OS_VERSION_MIN_REQUIRED has wrong value
+# endif
+#elif defined(__IPHONE_OS_VERSION_MIN_REQUIRED)
+# error __IPHONE_OS_VERSION_MIN_REQUIRED should be undefined
+#endif
+
+int main() { return 0; }
+
diff --git a/third_party/python/gyp/test/ios/deployment-target/deployment-target.gyp b/third_party/python/gyp/test/ios/deployment-target/deployment-target.gyp
new file mode 100644
index 0000000000..bdc1439b5e
--- /dev/null
+++ b/third_party/python/gyp/test/ios/deployment-target/deployment-target.gyp
@@ -0,0 +1,34 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'make_global_settings': [
+ ['CC', '/usr/bin/clang'],
+ ['CXX', '/usr/bin/clang++'],
+ ],
+ 'targets': [
+ {
+ 'target_name': 'version-min-4.3',
+ 'type': 'static_library',
+ 'sources': [ 'check-version-min.c', ],
+ 'defines': [ 'GYPTEST_IOS_VERSION_MIN=40300', ],
+ 'xcode_settings': {
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'SDKROOT': 'iphoneos',
+ 'IPHONEOS_DEPLOYMENT_TARGET': '4.3',
+ },
+ },
+ {
+ 'target_name': 'version-min-5.0',
+ 'type': 'static_library',
+ 'sources': [ 'check-version-min.c', ],
+ 'defines': [ 'GYPTEST_IOS_VERSION_MIN=50000', ],
+ 'xcode_settings': {
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'SDKROOT': 'iphoneos',
+ 'IPHONEOS_DEPLOYMENT_TARGET': '5.0',
+ },
+ }
+ ],
+}
+
diff --git a/third_party/python/gyp/test/ios/extension/ActionExtension/ActionViewController.h b/third_party/python/gyp/test/ios/extension/ActionExtension/ActionViewController.h
new file mode 100644
index 0000000000..1c92509029
--- /dev/null
+++ b/third_party/python/gyp/test/ios/extension/ActionExtension/ActionViewController.h
@@ -0,0 +1,9 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <UIKit/UIKit.h>
+
+@interface ActionViewController : UIViewController
+
+@end
diff --git a/third_party/python/gyp/test/ios/extension/ActionExtension/ActionViewController.m b/third_party/python/gyp/test/ios/extension/ActionExtension/ActionViewController.m
new file mode 100644
index 0000000000..d37bacdae1
--- /dev/null
+++ b/third_party/python/gyp/test/ios/extension/ActionExtension/ActionViewController.m
@@ -0,0 +1,31 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "ActionViewController.h"
+#import <MobileCoreServices/MobileCoreServices.h>
+
+@interface ActionViewController ()
+
+@end
+
+@implementation ActionViewController
+
+- (void)viewDidLoad {
+ [super viewDidLoad];
+}
+
+- (void)didReceiveMemoryWarning {
+ [super didReceiveMemoryWarning];
+ // Dispose of any resources that can be recreated.
+}
+
+- (IBAction)done {
+ // Return any edited content to the host app.
+ // This template doesn't do anything, so we just echo the passed in items.
+ [self.extensionContext
+ completeRequestReturningItems:self.extensionContext.inputItems
+ completionHandler:nil];
+}
+
+@end
diff --git a/third_party/python/gyp/test/ios/extension/ActionExtension/Info.plist b/third_party/python/gyp/test/ios/extension/ActionExtension/Info.plist
new file mode 100644
index 0000000000..f89cd790bc
--- /dev/null
+++ b/third_party/python/gyp/test/ios/extension/ActionExtension/Info.plist
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>ActionExtension</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.gyptest.extension.ActionExtension</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>XPC!</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>NSExtension</key>
+ <dict>
+ <key>NSExtensionAttributes</key>
+ <dict>
+ <key>NSExtensionActivationRule</key>
+ <string>TRUEPREDICATE</string>
+ <key>NSExtensionPointName</key>
+ <string>com.apple.ui-services</string>
+ <key>NSExtensionPointVersion</key>
+ <string>1.0</string>
+ </dict>
+ <key>NSExtensionMainStoryboard</key>
+ <string>MainInterface</string>
+ <key>NSExtensionPointIdentifier</key>
+ <string>com.apple.ui-services</string>
+ </dict>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/ios/extension/ActionExtension/MainInterface.storyboard b/third_party/python/gyp/test/ios/extension/ActionExtension/MainInterface.storyboard
new file mode 100644
index 0000000000..5aa58184e8
--- /dev/null
+++ b/third_party/python/gyp/test/ios/extension/ActionExtension/MainInterface.storyboard
@@ -0,0 +1,63 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="6148" systemVersion="14A229a" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" initialViewController="ObA-dk-sSI">
+ <dependencies>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6147"/>
+ </dependencies>
+ <scenes>
+ <!--Action View Controller - Image-->
+ <scene sceneID="7MM-of-jgj">
+ <objects>
+ <viewController title="Image" id="ObA-dk-sSI" customClass="ActionViewController" customModuleProvider="" sceneMemberID="viewController">
+ <layoutGuides>
+ <viewControllerLayoutGuide type="top" id="qkL-Od-lgU"/>
+ <viewControllerLayoutGuide type="bottom" id="n38-gi-rB5"/>
+ </layoutGuides>
+ <view key="view" contentMode="scaleToFill" id="zMn-AG-sqS">
+ <rect key="frame" x="0.0" y="0.0" width="320" height="528"/>
+ <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+ <subviews>
+ <imageView userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" translatesAutoresizingMaskIntoConstraints="NO" id="9ga-4F-77Z">
+ <rect key="frame" x="0.0" y="64" width="320" height="464"/>
+ </imageView>
+ <navigationBar contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="NOA-Dm-cuz">
+ <rect key="frame" x="0.0" y="20" width="320" height="44"/>
+ <items>
+ <navigationItem id="3HJ-uW-3hn">
+ <barButtonItem key="leftBarButtonItem" title="Done" style="done" id="WYi-yp-eM6">
+ <connections>
+ <action selector="done" destination="ObA-dk-sSI" id="Qdu-qn-U6V"/>
+ </connections>
+ </barButtonItem>
+ </navigationItem>
+ </items>
+ </navigationBar>
+ </subviews>
+ <color key="backgroundColor" white="1" alpha="1" colorSpace="calibratedWhite"/>
+ <constraints>
+ <constraint firstAttribute="trailing" secondItem="NOA-Dm-cuz" secondAttribute="trailing" id="A05-Pj-hrr"/>
+ <constraint firstItem="9ga-4F-77Z" firstAttribute="top" secondItem="NOA-Dm-cuz" secondAttribute="bottom" id="Fps-3D-QQW"/>
+ <constraint firstItem="NOA-Dm-cuz" firstAttribute="leading" secondItem="zMn-AG-sqS" secondAttribute="leading" id="HxO-8t-aoh"/>
+ <constraint firstAttribute="trailing" secondItem="9ga-4F-77Z" secondAttribute="trailing" id="Ozw-Hg-0yh"/>
+ <constraint firstItem="9ga-4F-77Z" firstAttribute="leading" secondItem="zMn-AG-sqS" secondAttribute="leading" id="XH5-ld-ONA"/>
+ <constraint firstItem="n38-gi-rB5" firstAttribute="top" secondItem="9ga-4F-77Z" secondAttribute="bottom" id="eQg-nn-Zy4"/>
+ <constraint firstItem="NOA-Dm-cuz" firstAttribute="top" secondItem="qkL-Od-lgU" secondAttribute="bottom" id="we0-1t-bgp"/>
+ </constraints>
+ </view>
+ <freeformSimulatedSizeMetrics key="simulatedDestinationMetrics"/>
+ <size key="freeformSize" width="320" height="528"/>
+ <connections>
+ <outlet property="imageView" destination="9ga-4F-77Z" id="5y6-5w-9QO"/>
+ <outlet property="view" destination="zMn-AG-sqS" id="Qma-de-2ek"/>
+ </connections>
+ </viewController>
+ <placeholder placeholderIdentifier="IBFirstResponder" id="X47-rx-isc" userLabel="First Responder" sceneMemberID="firstResponder"/>
+ </objects>
+ <point key="canvasLocation" x="252" y="-124"/>
+ </scene>
+ </scenes>
+ <simulatedMetricsContainer key="defaultSimulatedMetrics">
+ <simulatedStatusBarMetrics key="statusBar"/>
+ <simulatedOrientationMetrics key="orientation"/>
+ <simulatedScreenMetrics key="destination" type="retina4"/>
+ </simulatedMetricsContainer>
+</document>
diff --git a/third_party/python/gyp/test/ios/extension/ExtensionContainer/AppDelegate.h b/third_party/python/gyp/test/ios/extension/ExtensionContainer/AppDelegate.h
new file mode 100644
index 0000000000..510e2300b1
--- /dev/null
+++ b/third_party/python/gyp/test/ios/extension/ExtensionContainer/AppDelegate.h
@@ -0,0 +1,12 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <UIKit/UIKit.h>
+
+@interface AppDelegate : UIResponder <UIApplicationDelegate>
+
+@property (strong, nonatomic) UIWindow *window;
+
+@end
+
diff --git a/third_party/python/gyp/test/ios/extension/ExtensionContainer/AppDelegate.m b/third_party/python/gyp/test/ios/extension/ExtensionContainer/AppDelegate.m
new file mode 100644
index 0000000000..1197bc1bbc
--- /dev/null
+++ b/third_party/python/gyp/test/ios/extension/ExtensionContainer/AppDelegate.m
@@ -0,0 +1,19 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "AppDelegate.h"
+
+@interface AppDelegate ()
+
+@end
+
+@implementation AppDelegate
+
+- (BOOL)application:(UIApplication*)application
+ didFinishLaunchingWithOptions:(NSDictionary*)launchOptions {
+ // Override point for customization after application launch.
+ return YES;
+}
+
+@end
diff --git a/third_party/python/gyp/test/ios/extension/ExtensionContainer/Base.lproj/Main.storyboard b/third_party/python/gyp/test/ios/extension/ExtensionContainer/Base.lproj/Main.storyboard
new file mode 100644
index 0000000000..e8f3cfb40c
--- /dev/null
+++ b/third_party/python/gyp/test/ios/extension/ExtensionContainer/Base.lproj/Main.storyboard
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="6162" systemVersion="14A238h" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
+ <dependencies>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6160"/>
+ </dependencies>
+ <scenes>
+ <!--View Controller-->
+ <scene sceneID="tne-QT-ifu">
+ <objects>
+ <viewController id="BYZ-38-t0r" customClass="ViewController" customModuleProvider="" sceneMemberID="viewController">
+ <layoutGuides>
+ <viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
+ <viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
+ </layoutGuides>
+ <view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
+ <rect key="frame" x="0.0" y="0.0" width="480" height="480"/>
+ <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+ <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
+ </view>
+ </viewController>
+ <placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
+ </objects>
+ </scene>
+ </scenes>
+</document>
diff --git a/third_party/python/gyp/test/ios/extension/ExtensionContainer/Images.xcassets/AppIcon.appiconset/Contents.json b/third_party/python/gyp/test/ios/extension/ExtensionContainer/Images.xcassets/AppIcon.appiconset/Contents.json
new file mode 100644
index 0000000000..f697f61f4a
--- /dev/null
+++ b/third_party/python/gyp/test/ios/extension/ExtensionContainer/Images.xcassets/AppIcon.appiconset/Contents.json
@@ -0,0 +1,53 @@
+{
+ "images" : [
+ {
+ "idiom" : "iphone",
+ "size" : "29x29",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "iphone",
+ "size" : "40x40",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "iphone",
+ "size" : "60x60",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "29x29",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "29x29",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "40x40",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "40x40",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "76x76",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "76x76",
+ "scale" : "2x"
+ }
+ ],
+ "info" : {
+ "version" : 1,
+ "author" : "xcode"
+ }
+}
diff --git a/third_party/python/gyp/test/ios/extension/ExtensionContainer/Images.xcassets/LaunchImage.launchimage/Contents.json b/third_party/python/gyp/test/ios/extension/ExtensionContainer/Images.xcassets/LaunchImage.launchimage/Contents.json
new file mode 100644
index 0000000000..4458b40c05
--- /dev/null
+++ b/third_party/python/gyp/test/ios/extension/ExtensionContainer/Images.xcassets/LaunchImage.launchimage/Contents.json
@@ -0,0 +1,51 @@
+{
+ "images" : [
+ {
+ "orientation" : "portrait",
+ "idiom" : "iphone",
+ "extent" : "full-screen",
+ "minimum-system-version" : "7.0",
+ "scale" : "2x"
+ },
+ {
+ "orientation" : "portrait",
+ "idiom" : "iphone",
+ "subtype" : "retina4",
+ "extent" : "full-screen",
+ "minimum-system-version" : "7.0",
+ "scale" : "2x"
+ },
+ {
+ "orientation" : "portrait",
+ "idiom" : "ipad",
+ "extent" : "full-screen",
+ "minimum-system-version" : "7.0",
+ "scale" : "1x"
+ },
+ {
+ "orientation" : "landscape",
+ "idiom" : "ipad",
+ "extent" : "full-screen",
+ "minimum-system-version" : "7.0",
+ "scale" : "1x"
+ },
+ {
+ "orientation" : "portrait",
+ "idiom" : "ipad",
+ "extent" : "full-screen",
+ "minimum-system-version" : "7.0",
+ "scale" : "2x"
+ },
+ {
+ "orientation" : "landscape",
+ "idiom" : "ipad",
+ "extent" : "full-screen",
+ "minimum-system-version" : "7.0",
+ "scale" : "2x"
+ }
+ ],
+ "info" : {
+ "version" : 1,
+ "author" : "xcode"
+ }
+}
diff --git a/third_party/python/gyp/test/ios/extension/ExtensionContainer/Info.plist b/third_party/python/gyp/test/ios/extension/ExtensionContainer/Info.plist
new file mode 100644
index 0000000000..31ccf4cc82
--- /dev/null
+++ b/third_party/python/gyp/test/ios/extension/ExtensionContainer/Info.plist
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleExecutable</key>
+ <string>ExtensionContainer</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.gyptest.extension</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>LSRequiresIPhoneOS</key>
+ <true/>
+ <key>UIMainStoryboardFile</key>
+ <string>Main</string>
+ <key>UIRequiredDeviceCapabilities</key>
+ <array>
+ <string>armv7</string>
+ </array>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/ios/extension/ExtensionContainer/ViewController.h b/third_party/python/gyp/test/ios/extension/ExtensionContainer/ViewController.h
new file mode 100644
index 0000000000..fad7754714
--- /dev/null
+++ b/third_party/python/gyp/test/ios/extension/ExtensionContainer/ViewController.h
@@ -0,0 +1,11 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <UIKit/UIKit.h>
+
+@interface ViewController : UIViewController
+
+
+@end
+
diff --git a/third_party/python/gyp/test/ios/extension/ExtensionContainer/ViewController.m b/third_party/python/gyp/test/ios/extension/ExtensionContainer/ViewController.m
new file mode 100644
index 0000000000..3810fa9cba
--- /dev/null
+++ b/third_party/python/gyp/test/ios/extension/ExtensionContainer/ViewController.m
@@ -0,0 +1,24 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "ViewController.h"
+
+@interface ViewController ()
+
+
+@end
+
+@implementation ViewController
+
+- (void)viewDidLoad {
+ [super viewDidLoad];
+ // Do any additional setup after loading the view, typically from a nib.
+}
+
+- (void)didReceiveMemoryWarning {
+ [super didReceiveMemoryWarning];
+ // Dispose of any resources that can be recreated.
+}
+
+@end
diff --git a/third_party/python/gyp/test/ios/extension/ExtensionContainer/main.m b/third_party/python/gyp/test/ios/extension/ExtensionContainer/main.m
new file mode 100644
index 0000000000..47aecb5148
--- /dev/null
+++ b/third_party/python/gyp/test/ios/extension/ExtensionContainer/main.m
@@ -0,0 +1,13 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+#import <UIKit/UIKit.h>
+#import "AppDelegate.h"
+
+int main(int argc, char* argv[]) {
+ @autoreleasepool {
+ return UIApplicationMain(argc, argv, nil,
+ NSStringFromClass([AppDelegate class]));
+ }
+}
diff --git a/third_party/python/gyp/test/ios/extension/extension.gyp b/third_party/python/gyp/test/ios/extension/extension.gyp
new file mode 100644
index 0000000000..91c068413d
--- /dev/null
+++ b/third_party/python/gyp/test/ios/extension/extension.gyp
@@ -0,0 +1,91 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'make_global_settings': [
+ ['CC', '/usr/bin/clang'],
+ ['CXX', '/usr/bin/clang++'],
+ ],
+ 'targets': [
+ {
+ 'target_name': 'ExtensionContainer',
+ 'product_name': 'ExtensionContainer',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'mac_bundle_resources': [
+ 'ExtensionContainer/Base.lproj/Main.storyboard',
+ ],
+ 'sources': [
+ 'ExtensionContainer/AppDelegate.h',
+ 'ExtensionContainer/AppDelegate.m',
+ 'ExtensionContainer/ViewController.h',
+ 'ExtensionContainer/ViewController.m',
+ 'ExtensionContainer/main.m',
+ ],
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/ExtensionContainer.app/PlugIns',
+ 'files': [
+ '<(PRODUCT_DIR)/ActionExtension.appex',
+ ]}],
+ 'dependencies': [
+ 'ActionExtension'
+ ],
+
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
+ ],
+ },
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-fobjc-abi-version=2',
+ ],
+ 'INFOPLIST_FILE': 'ExtensionContainer/Info.plist',
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'ARCHS': [ 'armv7' ],
+ 'SDKROOT': 'iphoneos',
+ 'IPHONEOS_DEPLOYMENT_TARGET': '7.0',
+ 'CODE_SIGNING_REQUIRED': 'NO',
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ 'CONFIGURATION_BUILD_DIR':'build/Default',
+ },
+ },
+ {
+ 'target_name': 'ActionExtension',
+ 'product_name': 'ActionExtension',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'ios_app_extension': 1,
+ 'sources': [
+ 'ActionExtension/ActionViewController.h',
+ 'ActionExtension/ActionViewController.m',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
+ '$(SDKROOT)/System/Library/Frameworks/MobileCoreServices.framework',
+ ],
+ },
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-fobjc-abi-version=2',
+ ],
+ 'INFOPLIST_FILE': 'ActionExtension/Info.plist',
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'ARCHS': [ 'armv7' ],
+ 'SDKROOT': 'iphoneos',
+ 'IPHONEOS_DEPLOYMENT_TARGET': '7.0',
+ 'CODE_SIGNING_REQUIRED': 'NO',
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ 'CONFIGURATION_BUILD_DIR':'build/Default',
+ },
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/ios/framework/framework.gyp b/third_party/python/gyp/test/ios/framework/framework.gyp
new file mode 100644
index 0000000000..2c6fdd5b27
--- /dev/null
+++ b/third_party/python/gyp/test/ios/framework/framework.gyp
@@ -0,0 +1,43 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'iOSFramework',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'iOSFramework/iOSFramework.h',
+ 'iOSFramework/Thing.h',
+ 'iOSFramework/Thing.m',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
+ ],
+ },
+ 'mac_framework_headers': [
+ # Using two headers here tests mac_tool.py NextGreaterPowerOf2.
+ 'iOSFramework/iOSFramework.h',
+ 'iOSFramework/Thing.h',
+ ],
+ 'mac_framework_dirs': [
+ '$(SDKROOT)/../../Library/Frameworks',
+ ],
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-fobjc-abi-version=2',
+ ],
+ 'INFOPLIST_FILE': 'iOSFramework/Info.plist',
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'SDKROOT': 'iphoneos',
+ 'IPHONEOS_DEPLOYMENT_TARGET': '8.0',
+ 'CONFIGURATION_BUILD_DIR':'build/Default',
+ 'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/ios/framework/iOSFramework/Info.plist b/third_party/python/gyp/test/ios/framework/iOSFramework/Info.plist
new file mode 100644
index 0000000000..d3de8eefb6
--- /dev/null
+++ b/third_party/python/gyp/test/ios/framework/iOSFramework/Info.plist
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleExecutable</key>
+ <string>$(EXECUTABLE_NAME)</string>
+ <key>CFBundleIdentifier</key>
+ <string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>$(PRODUCT_NAME)</string>
+ <key>CFBundlePackageType</key>
+ <string>FMWK</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>$(CURRENT_PROJECT_VERSION)</string>
+ <key>NSPrincipalClass</key>
+ <string></string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/ios/framework/iOSFramework/Thing.h b/third_party/python/gyp/test/ios/framework/iOSFramework/Thing.h
new file mode 100644
index 0000000000..a34e908fc9
--- /dev/null
+++ b/third_party/python/gyp/test/ios/framework/iOSFramework/Thing.h
@@ -0,0 +1,10 @@
+#import <Foundation/Foundation.h>
+#import <UIKit/UIKit.h>
+
+@interface Thing : NSObject
+
++ (instancetype)thing;
+
+- (void)sayHello;
+
+@end
diff --git a/third_party/python/gyp/test/ios/framework/iOSFramework/Thing.m b/third_party/python/gyp/test/ios/framework/iOSFramework/Thing.m
new file mode 100644
index 0000000000..5b2b54925e
--- /dev/null
+++ b/third_party/python/gyp/test/ios/framework/iOSFramework/Thing.m
@@ -0,0 +1,22 @@
+#import "Thing.h"
+
+@interface Thing ()
+
+@end
+
+@implementation Thing
+
++ (instancetype)thing {
+ static Thing* thing = nil;
+ static dispatch_once_t onceToken;
+ dispatch_once(&onceToken, ^{
+ thing = [[[self class] alloc] init];
+ });
+ return thing;
+}
+
+- (void)sayHello {
+ NSLog(@"Hello World");
+}
+
+@end
diff --git a/third_party/python/gyp/test/ios/framework/iOSFramework/iOSFramework.h b/third_party/python/gyp/test/ios/framework/iOSFramework/iOSFramework.h
new file mode 100644
index 0000000000..e86b524d17
--- /dev/null
+++ b/third_party/python/gyp/test/ios/framework/iOSFramework/iOSFramework.h
@@ -0,0 +1,9 @@
+#import <UIKit/UIKit.h>
+
+//! Project version number for iOSFramework.
+FOUNDATION_EXPORT double iOSFrameworkVersionNumber;
+
+//! Project version string for iOSFramework.
+FOUNDATION_EXPORT const unsigned char iOSFrameworkVersionString[];
+
+#import <iOSFramework/Thing.h>
diff --git a/third_party/python/gyp/test/ios/gyptest-app-ios-assets-catalog.py b/third_party/python/gyp/test/ios/gyptest-app-ios-assets-catalog.py
new file mode 100755
index 0000000000..efd96ac752
--- /dev/null
+++ b/third_party/python/gyp/test/ios/gyptest-app-ios-assets-catalog.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that ios app bundles are built correctly.
+"""
+
+import TestGyp
+import TestMac
+
+import os.path
+import sys
+
+# Xcode supports for assets catalog was introduced in Xcode 6.0
+if sys.platform == 'darwin' and TestMac.Xcode.Version() >= '0600':
+ test_gyp_path = 'test-assets-catalog.gyp'
+ test_app_path = 'Test App Assets Catalog Gyp.app'
+
+ test = TestGyp.TestGyp(formats=['xcode', 'ninja'])
+ test.run_gyp(test_gyp_path, chdir='app-bundle')
+ test.build(test_gyp_path, test.ALL, chdir='app-bundle')
+
+ # Test that the extension is .bundle
+ test.built_file_must_exist(
+ os.path.join(test_app_path, 'Test App Assets Catalog Gyp'),
+ chdir='app-bundle')
+
+ # Info.plist
+ info_plist = test.built_file_path(
+ os.path.join(test_app_path, 'Info.plist'),
+ chdir='app-bundle')
+ # Resources
+ test.built_file_must_exist(
+ os.path.join(test_app_path, 'English.lproj/InfoPlist.strings'),
+ chdir='app-bundle')
+ test.built_file_must_exist(
+ os.path.join(test_app_path, 'English.lproj/MainMenu.nib'),
+ chdir='app-bundle')
+ test.built_file_must_exist(
+ os.path.join(test_app_path, 'English.lproj/Main_iPhone.storyboardc'),
+ chdir='app-bundle')
+ test.built_file_must_exist(
+ os.path.join(test_app_path, 'Assets.car'),
+ chdir='app-bundle')
+
+ # Packaging
+ test.built_file_must_exist(
+ os.path.join(test_app_path, 'PkgInfo'),
+ chdir='app-bundle')
+ test.built_file_must_match(
+ os.path.join(test_app_path, 'PkgInfo'), 'APPLause',
+ chdir='app-bundle')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/ios/gyptest-app-ios.py b/third_party/python/gyp/test/ios/gyptest-app-ios.py
new file mode 100755
index 0000000000..99f9e865dc
--- /dev/null
+++ b/third_party/python/gyp/test/ios/gyptest-app-ios.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that ios app bundles are built correctly.
+"""
+
+import TestGyp
+
+import subprocess
+import sys
+
+def CheckFileXMLPropertyList(file):
+ output = subprocess.check_output(['file', file])
+ if not 'XML 1.0 document text' in output:
+ print('File: Expected XML 1.0 document text, got %s' % output)
+ test.fail_test()
+
+def CheckFileBinaryPropertyList(file):
+ output = subprocess.check_output(['file', file])
+ if not 'Apple binary property list' in output:
+ print('File: Expected Apple binary property list, got %s' % output)
+ test.fail_test()
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['xcode', 'ninja'])
+
+ test.run_gyp('test.gyp', chdir='app-bundle')
+
+ test.build('test.gyp', test.ALL, chdir='app-bundle')
+
+ # Test that the extension is .bundle
+ test.built_file_must_exist('Test App Gyp.app/Test App Gyp',
+ chdir='app-bundle')
+
+ # Info.plist
+ info_plist = test.built_file_path('Test App Gyp.app/Info.plist',
+ chdir='app-bundle')
+ test.built_file_must_exist(info_plist)
+ CheckFileBinaryPropertyList(info_plist)
+
+ # XML Info.plist
+ info_plist = test.built_file_path('Test App Gyp XML.app/Info.plist',
+ chdir='app-bundle')
+ CheckFileXMLPropertyList(info_plist)
+
+ # Resources
+ strings_file = test.built_file_path(
+ 'Test App Gyp.app/English.lproj/InfoPlist.strings',
+ chdir='app-bundle')
+ test.built_file_must_exist(strings_file)
+ CheckFileBinaryPropertyList(strings_file)
+
+ extra_plist_file = test.built_file_path(
+ 'Test App Gyp.app/English.lproj/LanguageMap.plist',
+ chdir='app-bundle')
+ test.built_file_must_exist(extra_plist_file)
+ CheckFileBinaryPropertyList(extra_plist_file)
+
+ test.built_file_must_exist(
+ 'Test App Gyp.app/English.lproj/MainMenu.nib',
+ chdir='app-bundle')
+ test.built_file_must_exist(
+ 'Test App Gyp.app/English.lproj/Main_iPhone.storyboardc',
+ chdir='app-bundle')
+
+ # Packaging
+ test.built_file_must_exist('Test App Gyp.app/PkgInfo',
+ chdir='app-bundle')
+ test.built_file_must_match('Test App Gyp.app/PkgInfo', 'APPLause',
+ chdir='app-bundle')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/ios/gyptest-archs.py b/third_party/python/gyp/test/ios/gyptest-archs.py
new file mode 100644
index 0000000000..c3340431bd
--- /dev/null
+++ b/third_party/python/gyp/test/ios/gyptest-archs.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that device and simulator bundles are built correctly.
+"""
+
+import TestGyp
+import TestMac
+
+import collections
+import sys
+
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'xcode'])
+
+ if test.format == 'xcode':
+ # This test appears to hang flakily.
+ test.skip_test() # bug=532
+
+ test_cases = [
+ ('Default', 'TestArch32Bits', ['i386']),
+ ('Default-iphoneos', 'TestArch32Bits', ['armv7']),
+ ]
+
+ if TestMac.Xcode.Version() < '0510':
+ test_cases.extend([
+ ('Default', 'TestNoArchs', ['i386']),
+ ('Default-iphoneos', 'TestNoArchs', ['armv7'])])
+
+ if TestMac.Xcode.Version() >= '0500':
+ test_cases.extend([
+ ('Default', 'TestArch64Bits', ['x86_64']),
+ ('Default', 'TestMultiArchs', ['i386', 'x86_64']),
+ ('Default-iphoneos', 'TestArch64Bits', ['arm64']),
+ ('Default-iphoneos', 'TestMultiArchs', ['armv7', 'arm64'])])
+
+ test.run_gyp('test-archs.gyp', chdir='app-bundle')
+ for configuration, target, archs in test_cases:
+ is_device_build = configuration.endswith('-iphoneos')
+
+ kwds = collections.defaultdict(list)
+ if test.format == 'xcode':
+ if is_device_build:
+ configuration, sdk = configuration.split('-')
+ kwds['arguments'].extend(['-sdk', sdk])
+ if TestMac.Xcode.Version() < '0500':
+ kwds['arguments'].extend(['-arch', archs[0]])
+
+ test.set_configuration(configuration)
+ filename = '%s.app/%s' % (target, target)
+ test.build('test-archs.gyp', target, chdir='app-bundle', **kwds)
+ result_file = test.built_file_path(filename, chdir='app-bundle')
+
+ test.must_exist(result_file)
+ TestMac.CheckFileType(test, result_file, archs)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/ios/gyptest-copies-with-xcode-envvars.py b/third_party/python/gyp/test/ios/gyptest-copies-with-xcode-envvars.py
new file mode 100644
index 0000000000..88d9e028b8
--- /dev/null
+++ b/third_party/python/gyp/test/ios/gyptest-copies-with-xcode-envvars.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2016 Mark Callow. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that files are copied to the correct destinations when those
+destinations are specified using environment variables available in
+Xcode's PBXCopyFilesBuildPhase.
+"""
+
+import TestGyp
+
+import os
+import stat
+import sys
+
+
+test = TestGyp.TestGyp(formats=['ninja', 'xcode'])
+
+if sys.platform == 'darwin':
+ test.run_gyp('copies-with-xcode-envvars.gyp',
+ chdir='copies-with-xcode-envvars')
+
+ test.build('copies-with-xcode-envvars.gyp', chdir='copies-with-xcode-envvars')
+
+ wrapper_name = 'copies-with-xcode-envvars.app/'
+ contents_path = wrapper_name
+ out_path = test.built_file_path('file0', chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file0 contents\n')
+ out_path = test.built_file_path(wrapper_name + 'file1',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file1 contents\n')
+ out_path = test.built_file_path(contents_path + 'file2',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file2 contents\n')
+ out_path = test.built_file_path(contents_path + 'file3',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file3 contents\n')
+ out_path = test.built_file_path(contents_path + 'testimages/file4',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file4 contents\n')
+ out_path = test.built_file_path(contents_path + 'Java/file5',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file5 contents\n')
+ out_path = test.built_file_path(contents_path + 'Frameworks/file6',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file6 contents\n')
+ out_path = test.built_file_path(contents_path + 'Frameworks/file7',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file7 contents\n')
+ out_path = test.built_file_path(contents_path + 'SharedFrameworks/file8',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file8 contents\n')
+ out_path = test.built_file_path(contents_path + 'SharedSupport/file9',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file9 contents\n')
+ out_path = test.built_file_path(contents_path + 'PlugIns/file10',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file10 contents\n')
+ out_path = test.built_file_path(contents_path + 'XPCServices/file11',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file11 contents\n')
+ test.pass_test()
diff --git a/third_party/python/gyp/test/ios/gyptest-crosscompile.py b/third_party/python/gyp/test/ios/gyptest-crosscompile.py
new file mode 100644
index 0000000000..a0816836e5
--- /dev/null
+++ b/third_party/python/gyp/test/ios/gyptest-crosscompile.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that tools are built correctly.
+"""
+
+import TestGyp
+import TestMac
+
+import sys
+import os
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'xcode'])
+
+ oldenv = os.environ.copy()
+ try:
+ os.environ['GYP_CROSSCOMPILE'] = '1'
+ test.run_gyp('test-crosscompile.gyp', chdir='app-bundle')
+ finally:
+ os.environ.clear()
+ os.environ.update(oldenv)
+
+ test.set_configuration('Default')
+ test.build('test-crosscompile.gyp', 'TestHost', chdir='app-bundle')
+ result_file = test.built_file_path('TestHost', chdir='app-bundle')
+ test.must_exist(result_file)
+ TestMac.CheckFileType(test, result_file, ['x86_64'])
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/ios/gyptest-deployment-target.py b/third_party/python/gyp/test/ios/gyptest-deployment-target.py
new file mode 100644
index 0000000000..6c09d9dc04
--- /dev/null
+++ b/third_party/python/gyp/test/ios/gyptest-deployment-target.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that IPHONEOS_DEPLOYMENT_TARGET works.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['make', 'ninja', 'xcode'])
+
+ test.run_gyp('deployment-target.gyp', chdir='deployment-target')
+
+ test.build('deployment-target.gyp', test.ALL, chdir='deployment-target')
+
+ test.pass_test()
+
diff --git a/third_party/python/gyp/test/ios/gyptest-extension.py b/third_party/python/gyp/test/ios/gyptest-extension.py
new file mode 100755
index 0000000000..bb239ae5b8
--- /dev/null
+++ b/third_party/python/gyp/test/ios/gyptest-extension.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that ios app extensions are built correctly.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+import TestMac
+import subprocess
+import sys
+
+def CheckStrip(p, expected):
+ if expected not in subprocess.check_output(['nm','-gU', p]):
+ print(expected + " shouldn't get stripped out.")
+ test.fail_test()
+
+def CheckEntrypoint(p, expected):
+ if expected not in subprocess.check_output(['nm', p]):
+ print(expected + "not found.")
+ test.fail_test()
+
+if sys.platform == 'darwin' and TestMac.Xcode.Version()>="0600":
+
+ test = TestGyp.TestGyp(formats=['ninja', 'xcode'])
+
+ if test.format in ('ninja', 'xcode-ninja'):
+ test.skip_test() # bug=534
+
+ test.run_gyp('extension.gyp', chdir='extension')
+
+ test.build('extension.gyp', 'ExtensionContainer', chdir='extension')
+
+ # Test that the extension is .appex
+ test.built_file_must_exist(
+ 'ExtensionContainer.app/PlugIns/ActionExtension.appex',
+ chdir='extension')
+
+ path = test.built_file_path(
+ 'ExtensionContainer.app/PlugIns/ActionExtension.appex/ActionExtension',
+ chdir='extension')
+ CheckStrip(path, "ActionViewController")
+ CheckEntrypoint(path, "_NSExtensionMain")
+
+ test.pass_test()
+
diff --git a/third_party/python/gyp/test/ios/gyptest-framework.py b/third_party/python/gyp/test/ios/gyptest-framework.py
new file mode 100755
index 0000000000..a6dd857b20
--- /dev/null
+++ b/third_party/python/gyp/test/ios/gyptest-framework.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+
+# Copyright 2016 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that ios app frameworks are built correctly.
+"""
+
+import TestGyp
+import TestMac
+import subprocess
+import sys
+
+if sys.platform == 'darwin' and TestMac.Xcode.Version()>="0700":
+
+ test = TestGyp.TestGyp(formats=['ninja'])
+ if test.format == 'xcode-ninja':
+ test.skip_test()
+
+ test.run_gyp('framework.gyp', chdir='framework')
+
+ test.build('framework.gyp', 'iOSFramework', chdir='framework')
+
+ test.built_file_must_exist(
+ 'iOSFramework.framework/Headers/iOSFramework.h',
+ chdir='framework')
+ test.built_file_must_exist(
+ 'iOSFramework.framework/Headers/Thing.h',
+ chdir='framework')
+ test.built_file_must_exist(
+ 'iOSFramework.framework/iOSFramework',
+ chdir='framework')
+
+ test.pass_test()
+
diff --git a/third_party/python/gyp/test/ios/gyptest-per-config-settings.py b/third_party/python/gyp/test/ios/gyptest-per-config-settings.py
new file mode 100644
index 0000000000..c3a22e0562
--- /dev/null
+++ b/third_party/python/gyp/test/ios/gyptest-per-config-settings.py
@@ -0,0 +1,190 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that device and simulator bundles are built correctly.
+"""
+
+from __future__ import print_function
+
+import plistlib
+import TestGyp
+import os
+import struct
+import subprocess
+import sys
+import tempfile
+import TestMac
+
+print("This test is currently disabled: https://crbug.com/483696.")
+sys.exit(0)
+
+def CheckFileType(file, expected):
+ proc = subprocess.Popen(['lipo', '-info', file], stdout=subprocess.PIPE)
+ o = proc.communicate()[0].strip()
+ assert not proc.returncode
+ if not expected in o:
+ print('File: Expected %s, got %s' % (expected, o))
+ test.fail_test()
+
+def HasCerts():
+ # Because the bots do not have certs, don't check them if there are no
+ # certs available.
+ proc = subprocess.Popen(['security','find-identity','-p', 'codesigning',
+ '-v'], stdout=subprocess.PIPE)
+ return "0 valid identities found" not in proc.communicate()[0].strip()
+
+def CheckSignature(file):
+ proc = subprocess.Popen(['codesign', '-v', file], stdout=subprocess.PIPE)
+ o = proc.communicate()[0].strip()
+ assert not proc.returncode
+ if "code object is not signed at all" in o:
+ print('File %s not properly signed.' % (file))
+ test.fail_test()
+
+def CheckEntitlements(file, expected_entitlements):
+ with tempfile.NamedTemporaryFile() as temp:
+ proc = subprocess.Popen(['codesign', '--display', '--entitlements',
+ temp.name, file], stdout=subprocess.PIPE)
+ o = proc.communicate()[0].strip()
+ assert not proc.returncode
+ data = temp.read()
+ entitlements = ParseEntitlements(data)
+ if not entitlements:
+ print('No valid entitlements found in %s.' % (file))
+ test.fail_test()
+ if entitlements != expected_entitlements:
+ print('Unexpected entitlements found in %s.' % (file))
+ test.fail_test()
+
+def ParseEntitlements(data):
+ if len(data) < 8:
+ return None
+ magic, length = struct.unpack('>II', data[:8])
+ if magic != 0xfade7171 or length != len(data):
+ return None
+ return data[8:]
+
+def GetXcodeVersionValue(type):
+ args = ['xcodebuild', '-version', '-sdk', 'iphoneos', type]
+ job = subprocess.Popen(args, stdout=subprocess.PIPE)
+ return job.communicate()[0].strip()
+
+def GetMachineBuild():
+ args = ['sw_vers', '-buildVersion']
+ job = subprocess.Popen(args, stdout=subprocess.PIPE)
+ return job.communicate()[0].strip()
+
+def CheckPlistvalue(plist, key, expected):
+ if key not in plist:
+ print('%s not set in plist' % key)
+ test.fail_test()
+ return
+ actual = plist[key]
+ if actual != expected:
+ print('File: Expected %s, got %s for %s' % (expected, actual, key))
+ test.fail_test()
+
+def CheckPlistNotSet(plist, key):
+ if key in plist:
+ print('%s should not be set in plist' % key)
+ test.fail_test()
+ return
+
+def ConvertBinaryPlistToXML(path):
+ proc = subprocess.call(['plutil', '-convert', 'xml1', path],
+ stdout=subprocess.PIPE)
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'xcode'])
+
+ test.run_gyp('test-device.gyp', chdir='app-bundle')
+
+ test_configs = ['Default-iphoneos', 'Default']
+ for configuration in test_configs:
+ test.set_configuration(configuration)
+ test.build('test-device.gyp', 'test_app', chdir='app-bundle')
+ result_file = test.built_file_path('Test App Gyp.app/Test App Gyp',
+ chdir='app-bundle')
+ test.must_exist(result_file)
+ info_plist = test.built_file_path('Test App Gyp.app/Info.plist',
+ chdir='app-bundle')
+ plist = plistlib.readPlist(info_plist)
+ xcode_version = TestMac.Xcode.Version()
+ if xcode_version >= '0720':
+ if len(plist) != 23:
+ print('plist should have 23 entries, but it has %s' % len(plist))
+ test.fail_test()
+
+ # Values that will hopefully never change.
+ CheckPlistvalue(plist, 'CFBundleDevelopmentRegion', 'English')
+ CheckPlistvalue(plist, 'CFBundleExecutable', 'Test App Gyp')
+ CheckPlistvalue(plist, 'CFBundleIdentifier', 'com.google.Test App Gyp')
+ CheckPlistvalue(plist, 'CFBundleInfoDictionaryVersion', '6.0')
+ CheckPlistvalue(plist, 'CFBundleName', 'Test App Gyp')
+ CheckPlistvalue(plist, 'CFBundlePackageType', 'APPL')
+ CheckPlistvalue(plist, 'CFBundleShortVersionString', '1.0')
+ CheckPlistvalue(plist, 'CFBundleSignature', 'ause')
+ CheckPlistvalue(plist, 'CFBundleVersion', '1')
+ CheckPlistvalue(plist, 'NSMainNibFile', 'MainMenu')
+ CheckPlistvalue(plist, 'NSPrincipalClass', 'NSApplication')
+ CheckPlistvalue(plist, 'UIDeviceFamily', [1, 2])
+
+ # Values that get pulled from xcodebuild.
+ machine_build = GetMachineBuild()
+ platform_version = GetXcodeVersionValue('ProductVersion')
+ sdk_build = GetXcodeVersionValue('ProductBuildVersion')
+ xcode_build = TestMac.Xcode.Build()
+
+ # Xcode keeps changing what gets included in executable plists, and it
+ # changes between device and simuator builds. Allow the strictest tests for
+ # Xcode 7.2 and above.
+ if xcode_version >= '0720':
+ CheckPlistvalue(plist, 'BuildMachineOSBuild', machine_build)
+ CheckPlistvalue(plist, 'DTCompiler', 'com.apple.compilers.llvm.clang.1_0')
+ CheckPlistvalue(plist, 'DTPlatformVersion', platform_version)
+ CheckPlistvalue(plist, 'DTSDKBuild', sdk_build)
+ CheckPlistvalue(plist, 'DTXcode', xcode_version)
+ CheckPlistvalue(plist, 'DTXcodeBuild', xcode_build)
+ CheckPlistvalue(plist, 'MinimumOSVersion', '8.0')
+
+
+ if configuration == 'Default-iphoneos':
+ platform_name = 'iphoneos'
+ CheckFileType(result_file, 'armv7')
+ CheckPlistvalue(plist, 'CFBundleSupportedPlatforms', ['iPhoneOS'])
+ # Apple keeps changing their mind.
+ if xcode_version >= '0720':
+ CheckPlistvalue(plist, 'DTPlatformBuild', sdk_build)
+ else:
+ platform_name = 'iphonesimulator'
+ CheckFileType(result_file, 'i386')
+ CheckPlistvalue(plist, 'CFBundleSupportedPlatforms', ['iPhoneSimulator'])
+ if xcode_version >= '0720':
+ CheckPlistvalue(plist, 'DTPlatformBuild', '')
+
+ CheckPlistvalue(plist, 'DTPlatformName', platform_name)
+ CheckPlistvalue(plist, 'DTSDKName', platform_name + platform_version)
+
+
+ if HasCerts() and configuration == 'Default-iphoneos':
+ test.build('test-device.gyp', 'sig_test', chdir='app-bundle')
+ result_file = test.built_file_path('sigtest.app/sigtest',
+ chdir='app-bundle')
+ CheckSignature(result_file)
+ info_plist = test.built_file_path('sigtest.app/Info.plist',
+ chdir='app-bundle')
+
+ plist = plistlib.readPlist(info_plist)
+ CheckPlistvalue(plist, 'UIDeviceFamily', [1])
+
+ entitlements_file = test.built_file_path('sig_test.xcent',
+ chdir='app-bundle')
+ if os.path.isfile(entitlements_file):
+ expected_entitlements = open(entitlements_file).read()
+ CheckEntitlements(result_file, expected_entitlements)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/ios/gyptest-watch.py b/third_party/python/gyp/test/ios/gyptest-watch.py
new file mode 100755
index 0000000000..39bab49bb0
--- /dev/null
+++ b/third_party/python/gyp/test/ios/gyptest-watch.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that ios watch extensions and apps are built correctly.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+import TestMac
+
+import sys
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+
+if sys.platform == 'darwin' and TestMac.Xcode.Version() >= "0620":
+ test = TestGyp.TestGyp(formats=['ninja', 'xcode'])
+
+ test.run_gyp('watch.gyp', chdir='watch')
+
+ test.build(
+ 'watch.gyp',
+ 'WatchContainer',
+ chdir='watch')
+
+ # Test that the extension exists
+ test.built_file_must_exist(
+ 'WatchContainer.app/PlugIns/WatchKitExtension.appex',
+ chdir='watch')
+
+ # Test that the watch app exists
+ test.built_file_must_exist(
+ 'WatchContainer.app/PlugIns/WatchKitExtension.appex/WatchApp.app',
+ chdir='watch')
+
+ test.pass_test()
+
diff --git a/third_party/python/gyp/test/ios/gyptest-xcode-ninja.py b/third_party/python/gyp/test/ios/gyptest-xcode-ninja.py
new file mode 100644
index 0000000000..609db8c98f
--- /dev/null
+++ b/third_party/python/gyp/test/ios/gyptest-xcode-ninja.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that the xcode-ninja GYP_GENERATOR runs and builds correctly.
+"""
+
+import TestGyp
+
+import os
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['xcode'])
+
+ # Run ninja and xcode-ninja
+ test.formats = ['ninja', 'xcode-ninja']
+ test.run_gyp('test.gyp', chdir='app-bundle')
+
+ # If it builds the target, it works.
+ test.build('test.ninja.gyp', chdir='app-bundle')
+ test.pass_test()
diff --git a/third_party/python/gyp/test/ios/watch/WatchApp/Images.xcassets/AppIcon.appiconset/Contents.json b/third_party/python/gyp/test/ios/watch/WatchApp/Images.xcassets/AppIcon.appiconset/Contents.json
new file mode 100644
index 0000000000..562c5eff97
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchApp/Images.xcassets/AppIcon.appiconset/Contents.json
@@ -0,0 +1,62 @@
+{
+ "images" : [
+ {
+ "size" : "14.5x14.5",
+ "idiom" : "watch",
+ "scale" : "2x",
+ "role" : "notificationCenter",
+ "subtype" : "38mm"
+ },
+ {
+ "size" : "18x18",
+ "idiom" : "watch",
+ "scale" : "2x",
+ "role" : "notificationCenter",
+ "subtype" : "42mm"
+ },
+ {
+ "size" : "29x29",
+ "idiom" : "watch",
+ "role" : "companionSettings",
+ "scale" : "2x"
+ },
+ {
+ "size" : "29.3x29.3",
+ "idiom" : "watch",
+ "role" : "companionSettings",
+ "scale" : "3x"
+ },
+ {
+ "size" : "40x40",
+ "idiom" : "watch",
+ "scale" : "2x",
+ "role" : "appLauncher",
+ "subtype" : "38mm"
+ },
+ {
+ "size" : "44x44",
+ "idiom" : "watch",
+ "scale" : "2x",
+ "role" : "appLauncher",
+ "subtype" : "42mm"
+ },
+ {
+ "size" : "86x86",
+ "idiom" : "watch",
+ "scale" : "2x",
+ "role" : "quickLook",
+ "subtype" : "38mm"
+ },
+ {
+ "size" : "98x98",
+ "idiom" : "watch",
+ "scale" : "2x",
+ "role" : "quickLook",
+ "subtype" : "42mm"
+ }
+ ],
+ "info" : {
+ "version" : 1,
+ "author" : "xcode"
+ }
+}
diff --git a/third_party/python/gyp/test/ios/watch/WatchApp/Images.xcassets/LaunchImage.launchimage/Contents.json b/third_party/python/gyp/test/ios/watch/WatchApp/Images.xcassets/LaunchImage.launchimage/Contents.json
new file mode 100644
index 0000000000..ed123feff6
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchApp/Images.xcassets/LaunchImage.launchimage/Contents.json
@@ -0,0 +1,24 @@
+{
+ "images" : [
+ {
+ "orientation" : "portrait",
+ "idiom" : "watch",
+ "extent" : "full-screen",
+ "minimum-system-version" : "8.0",
+ "subtype" : "38mm",
+ "scale" : "2x"
+ },
+ {
+ "orientation" : "portrait",
+ "idiom" : "watch",
+ "extent" : "full-screen",
+ "minimum-system-version" : "8.0",
+ "subtype" : "42mm",
+ "scale" : "2x"
+ }
+ ],
+ "info" : {
+ "version" : 1,
+ "author" : "xcode"
+ }
+}
diff --git a/third_party/python/gyp/test/ios/watch/WatchApp/Info.plist b/third_party/python/gyp/test/ios/watch/WatchApp/Info.plist
new file mode 100644
index 0000000000..3cf65b8285
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchApp/Info.plist
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>WatchApp</string>
+ <key>CFBundleExecutable</key>
+ <string>$(EXECUTABLE_NAME)</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.gyptest.watch.watchapp</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>$(PRODUCT_NAME)</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>UISupportedInterfaceOrientations</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationPortraitUpsideDown</string>
+ </array>
+ <key>WKCompanionAppBundleIdentifier</key>
+ <string>com.google.gyptest.watch</string>
+ <key>WKWatchKitApp</key>
+ <true/>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/ios/watch/WatchApp/Interface.storyboard b/third_party/python/gyp/test/ios/watch/WatchApp/Interface.storyboard
new file mode 100644
index 0000000000..5f52cb6c90
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchApp/Interface.storyboard
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder.WatchKit.Storyboard" version="3.0" toolsVersion="6221" systemVersion="13E28" targetRuntime="watchKit" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="AgC-eL-Hgc">
+ <dependencies>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6213"/>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBWatchKitPlugin" version="3733"/>
+ </dependencies>
+ <scenes>
+ <!--Interface Controller-->
+ <scene sceneID="aou-V4-d1y">
+ <objects>
+ <controller id="AgC-eL-Hgc" customClass="InterfaceController" customModuleProvider=""/>
+ </objects>
+ </scene>
+ </scenes>
+</document>
diff --git a/third_party/python/gyp/test/ios/watch/WatchContainer/AppDelegate.h b/third_party/python/gyp/test/ios/watch/WatchContainer/AppDelegate.h
new file mode 100644
index 0000000000..510e2300b1
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchContainer/AppDelegate.h
@@ -0,0 +1,12 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <UIKit/UIKit.h>
+
+@interface AppDelegate : UIResponder <UIApplicationDelegate>
+
+@property (strong, nonatomic) UIWindow *window;
+
+@end
+
diff --git a/third_party/python/gyp/test/ios/watch/WatchContainer/AppDelegate.m b/third_party/python/gyp/test/ios/watch/WatchContainer/AppDelegate.m
new file mode 100644
index 0000000000..1197bc1bbc
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchContainer/AppDelegate.m
@@ -0,0 +1,19 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "AppDelegate.h"
+
+@interface AppDelegate ()
+
+@end
+
+@implementation AppDelegate
+
+- (BOOL)application:(UIApplication*)application
+ didFinishLaunchingWithOptions:(NSDictionary*)launchOptions {
+ // Override point for customization after application launch.
+ return YES;
+}
+
+@end
diff --git a/third_party/python/gyp/test/ios/watch/WatchContainer/Base.lproj/Main.storyboard b/third_party/python/gyp/test/ios/watch/WatchContainer/Base.lproj/Main.storyboard
new file mode 100644
index 0000000000..e8f3cfb40c
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchContainer/Base.lproj/Main.storyboard
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="6162" systemVersion="14A238h" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
+ <dependencies>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6160"/>
+ </dependencies>
+ <scenes>
+ <!--View Controller-->
+ <scene sceneID="tne-QT-ifu">
+ <objects>
+ <viewController id="BYZ-38-t0r" customClass="ViewController" customModuleProvider="" sceneMemberID="viewController">
+ <layoutGuides>
+ <viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
+ <viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
+ </layoutGuides>
+ <view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
+ <rect key="frame" x="0.0" y="0.0" width="480" height="480"/>
+ <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+ <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
+ </view>
+ </viewController>
+ <placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
+ </objects>
+ </scene>
+ </scenes>
+</document>
diff --git a/third_party/python/gyp/test/ios/watch/WatchContainer/Images.xcassets/AppIcon.appiconset/Contents.json b/third_party/python/gyp/test/ios/watch/WatchContainer/Images.xcassets/AppIcon.appiconset/Contents.json
new file mode 100644
index 0000000000..f697f61f4a
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchContainer/Images.xcassets/AppIcon.appiconset/Contents.json
@@ -0,0 +1,53 @@
+{
+ "images" : [
+ {
+ "idiom" : "iphone",
+ "size" : "29x29",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "iphone",
+ "size" : "40x40",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "iphone",
+ "size" : "60x60",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "29x29",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "29x29",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "40x40",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "40x40",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "76x76",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "76x76",
+ "scale" : "2x"
+ }
+ ],
+ "info" : {
+ "version" : 1,
+ "author" : "xcode"
+ }
+}
diff --git a/third_party/python/gyp/test/ios/watch/WatchContainer/Images.xcassets/LaunchImage.launchimage/Contents.json b/third_party/python/gyp/test/ios/watch/WatchContainer/Images.xcassets/LaunchImage.launchimage/Contents.json
new file mode 100644
index 0000000000..4458b40c05
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchContainer/Images.xcassets/LaunchImage.launchimage/Contents.json
@@ -0,0 +1,51 @@
+{
+ "images" : [
+ {
+ "orientation" : "portrait",
+ "idiom" : "iphone",
+ "extent" : "full-screen",
+ "minimum-system-version" : "7.0",
+ "scale" : "2x"
+ },
+ {
+ "orientation" : "portrait",
+ "idiom" : "iphone",
+ "subtype" : "retina4",
+ "extent" : "full-screen",
+ "minimum-system-version" : "7.0",
+ "scale" : "2x"
+ },
+ {
+ "orientation" : "portrait",
+ "idiom" : "ipad",
+ "extent" : "full-screen",
+ "minimum-system-version" : "7.0",
+ "scale" : "1x"
+ },
+ {
+ "orientation" : "landscape",
+ "idiom" : "ipad",
+ "extent" : "full-screen",
+ "minimum-system-version" : "7.0",
+ "scale" : "1x"
+ },
+ {
+ "orientation" : "portrait",
+ "idiom" : "ipad",
+ "extent" : "full-screen",
+ "minimum-system-version" : "7.0",
+ "scale" : "2x"
+ },
+ {
+ "orientation" : "landscape",
+ "idiom" : "ipad",
+ "extent" : "full-screen",
+ "minimum-system-version" : "7.0",
+ "scale" : "2x"
+ }
+ ],
+ "info" : {
+ "version" : 1,
+ "author" : "xcode"
+ }
+}
diff --git a/third_party/python/gyp/test/ios/watch/WatchContainer/Info.plist b/third_party/python/gyp/test/ios/watch/WatchContainer/Info.plist
new file mode 100644
index 0000000000..a40319c78e
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchContainer/Info.plist
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleExecutable</key>
+ <string>WatchContainer</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.gyptest.watch</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>LSRequiresIPhoneOS</key>
+ <true/>
+ <key>UIMainStoryboardFile</key>
+ <string>Main</string>
+ <key>UIRequiredDeviceCapabilities</key>
+ <array>
+ <string>armv7</string>
+ </array>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/ios/watch/WatchContainer/ViewController.h b/third_party/python/gyp/test/ios/watch/WatchContainer/ViewController.h
new file mode 100644
index 0000000000..fad7754714
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchContainer/ViewController.h
@@ -0,0 +1,11 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <UIKit/UIKit.h>
+
+@interface ViewController : UIViewController
+
+
+@end
+
diff --git a/third_party/python/gyp/test/ios/watch/WatchContainer/ViewController.m b/third_party/python/gyp/test/ios/watch/WatchContainer/ViewController.m
new file mode 100644
index 0000000000..3810fa9cba
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchContainer/ViewController.m
@@ -0,0 +1,24 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "ViewController.h"
+
+@interface ViewController ()
+
+
+@end
+
+@implementation ViewController
+
+- (void)viewDidLoad {
+ [super viewDidLoad];
+ // Do any additional setup after loading the view, typically from a nib.
+}
+
+- (void)didReceiveMemoryWarning {
+ [super didReceiveMemoryWarning];
+ // Dispose of any resources that can be recreated.
+}
+
+@end
diff --git a/third_party/python/gyp/test/ios/watch/WatchContainer/main.m b/third_party/python/gyp/test/ios/watch/WatchContainer/main.m
new file mode 100644
index 0000000000..47aecb5148
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchContainer/main.m
@@ -0,0 +1,13 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+#import <UIKit/UIKit.h>
+#import "AppDelegate.h"
+
+int main(int argc, char* argv[]) {
+ @autoreleasepool {
+ return UIApplicationMain(argc, argv, nil,
+ NSStringFromClass([AppDelegate class]));
+ }
+}
diff --git a/third_party/python/gyp/test/ios/watch/WatchKitExtension/Images.xcassets/MyImage.imageset/Contents.json b/third_party/python/gyp/test/ios/watch/WatchKitExtension/Images.xcassets/MyImage.imageset/Contents.json
new file mode 100644
index 0000000000..f80d950868
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchKitExtension/Images.xcassets/MyImage.imageset/Contents.json
@@ -0,0 +1,20 @@
+{
+ "images" : [
+ {
+ "idiom" : "universal",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "universal",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "universal",
+ "scale" : "3x"
+ }
+ ],
+ "info" : {
+ "version" : 1,
+ "author" : "xcode"
+ }
+}
diff --git a/third_party/python/gyp/test/ios/watch/WatchKitExtension/Info.plist b/third_party/python/gyp/test/ios/watch/WatchKitExtension/Info.plist
new file mode 100644
index 0000000000..7a354643ef
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchKitExtension/Info.plist
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>WatchContainer WatchKit Extension</string>
+ <key>CFBundleExecutable</key>
+ <string>$(EXECUTABLE_NAME)</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.gyptest.watch.watchkitextension</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>$(PRODUCT_NAME)</string>
+ <key>CFBundlePackageType</key>
+ <string>XPC!</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+ <key>NSExtension</key>
+ <dict>
+ <key>NSExtensionAttributes</key>
+ <dict>
+ <key>WKAppBundleIdentifier</key>
+ <string>com.google.gyptest.watch.watchapp</string>
+ </dict>
+ <key>NSExtensionPointIdentifier</key>
+ <string>com.apple.watchkit</string>
+ </dict>
+ <key>RemoteInterfacePrincipalClass</key>
+ <string>InterfaceController</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/ios/watch/WatchKitExtension/InterfaceController.h b/third_party/python/gyp/test/ios/watch/WatchKitExtension/InterfaceController.h
new file mode 100644
index 0000000000..c3395eb484
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchKitExtension/InterfaceController.h
@@ -0,0 +1,10 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Foundation/Foundation.h>
+#import <WatchKit/WatchKit.h>
+
+@interface InterfaceController : WKInterfaceController
+@end
+
diff --git a/third_party/python/gyp/test/ios/watch/WatchKitExtension/InterfaceController.m b/third_party/python/gyp/test/ios/watch/WatchKitExtension/InterfaceController.m
new file mode 100644
index 0000000000..564b7d1da5
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchKitExtension/InterfaceController.m
@@ -0,0 +1,25 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "InterfaceController.h"
+
+@implementation InterfaceController
+
+- (instancetype)initWithContext:(id)context {
+ if ((self = [super initWithContext:context])) {
+ // -initWithContext:
+ }
+ return self;
+}
+
+- (void)willActivate {
+ // -willActivate
+}
+
+- (void)didDeactivate {
+ // -didDeactivate
+}
+
+@end
+
diff --git a/third_party/python/gyp/test/ios/watch/WatchKitExtension/MainInterface.storyboard b/third_party/python/gyp/test/ios/watch/WatchKitExtension/MainInterface.storyboard
new file mode 100644
index 0000000000..5aa58184e8
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/WatchKitExtension/MainInterface.storyboard
@@ -0,0 +1,63 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="6148" systemVersion="14A229a" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" initialViewController="ObA-dk-sSI">
+ <dependencies>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6147"/>
+ </dependencies>
+ <scenes>
+ <!--Action View Controller - Image-->
+ <scene sceneID="7MM-of-jgj">
+ <objects>
+ <viewController title="Image" id="ObA-dk-sSI" customClass="ActionViewController" customModuleProvider="" sceneMemberID="viewController">
+ <layoutGuides>
+ <viewControllerLayoutGuide type="top" id="qkL-Od-lgU"/>
+ <viewControllerLayoutGuide type="bottom" id="n38-gi-rB5"/>
+ </layoutGuides>
+ <view key="view" contentMode="scaleToFill" id="zMn-AG-sqS">
+ <rect key="frame" x="0.0" y="0.0" width="320" height="528"/>
+ <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+ <subviews>
+ <imageView userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" translatesAutoresizingMaskIntoConstraints="NO" id="9ga-4F-77Z">
+ <rect key="frame" x="0.0" y="64" width="320" height="464"/>
+ </imageView>
+ <navigationBar contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="NOA-Dm-cuz">
+ <rect key="frame" x="0.0" y="20" width="320" height="44"/>
+ <items>
+ <navigationItem id="3HJ-uW-3hn">
+ <barButtonItem key="leftBarButtonItem" title="Done" style="done" id="WYi-yp-eM6">
+ <connections>
+ <action selector="done" destination="ObA-dk-sSI" id="Qdu-qn-U6V"/>
+ </connections>
+ </barButtonItem>
+ </navigationItem>
+ </items>
+ </navigationBar>
+ </subviews>
+ <color key="backgroundColor" white="1" alpha="1" colorSpace="calibratedWhite"/>
+ <constraints>
+ <constraint firstAttribute="trailing" secondItem="NOA-Dm-cuz" secondAttribute="trailing" id="A05-Pj-hrr"/>
+ <constraint firstItem="9ga-4F-77Z" firstAttribute="top" secondItem="NOA-Dm-cuz" secondAttribute="bottom" id="Fps-3D-QQW"/>
+ <constraint firstItem="NOA-Dm-cuz" firstAttribute="leading" secondItem="zMn-AG-sqS" secondAttribute="leading" id="HxO-8t-aoh"/>
+ <constraint firstAttribute="trailing" secondItem="9ga-4F-77Z" secondAttribute="trailing" id="Ozw-Hg-0yh"/>
+ <constraint firstItem="9ga-4F-77Z" firstAttribute="leading" secondItem="zMn-AG-sqS" secondAttribute="leading" id="XH5-ld-ONA"/>
+ <constraint firstItem="n38-gi-rB5" firstAttribute="top" secondItem="9ga-4F-77Z" secondAttribute="bottom" id="eQg-nn-Zy4"/>
+ <constraint firstItem="NOA-Dm-cuz" firstAttribute="top" secondItem="qkL-Od-lgU" secondAttribute="bottom" id="we0-1t-bgp"/>
+ </constraints>
+ </view>
+ <freeformSimulatedSizeMetrics key="simulatedDestinationMetrics"/>
+ <size key="freeformSize" width="320" height="528"/>
+ <connections>
+ <outlet property="imageView" destination="9ga-4F-77Z" id="5y6-5w-9QO"/>
+ <outlet property="view" destination="zMn-AG-sqS" id="Qma-de-2ek"/>
+ </connections>
+ </viewController>
+ <placeholder placeholderIdentifier="IBFirstResponder" id="X47-rx-isc" userLabel="First Responder" sceneMemberID="firstResponder"/>
+ </objects>
+ <point key="canvasLocation" x="252" y="-124"/>
+ </scene>
+ </scenes>
+ <simulatedMetricsContainer key="defaultSimulatedMetrics">
+ <simulatedStatusBarMetrics key="statusBar"/>
+ <simulatedOrientationMetrics key="orientation"/>
+ <simulatedScreenMetrics key="destination" type="retina4"/>
+ </simulatedMetricsContainer>
+</document>
diff --git a/third_party/python/gyp/test/ios/watch/watch.gyp b/third_party/python/gyp/test/ios/watch/watch.gyp
new file mode 100644
index 0000000000..49be5554ee
--- /dev/null
+++ b/third_party/python/gyp/test/ios/watch/watch.gyp
@@ -0,0 +1,105 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'make_global_settings': [
+ ['CC', '/usr/bin/clang'],
+ ['CXX', '/usr/bin/clang++'],
+ ],
+ 'target_defaults': {
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-fobjc-abi-version=2',
+ ],
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'SDKROOT': 'iphoneos',
+ 'IPHONEOS_DEPLOYMENT_TARGET': '8.2',
+ 'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
+ }
+ },
+ 'targets': [
+ {
+ 'target_name': 'WatchContainer',
+ 'product_name': 'WatchContainer',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'mac_bundle_resources': [
+ 'WatchContainer/Base.lproj/Main.storyboard',
+ ],
+ 'sources': [
+ 'WatchContainer/AppDelegate.h',
+ 'WatchContainer/AppDelegate.m',
+ 'WatchContainer/ViewController.h',
+ 'WatchContainer/ViewController.m',
+ 'WatchContainer/main.m',
+ ],
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/WatchContainer.app/PlugIns',
+ 'files': [
+ '<(PRODUCT_DIR)/WatchKitExtension.appex',
+ ]}],
+ 'dependencies': [
+ 'WatchKitExtension'
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
+ ],
+ },
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'WatchContainer/Info.plist',
+ },
+ },
+ {
+ 'target_name': 'WatchKitExtension',
+ 'product_name': 'WatchKitExtension',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'ios_watchkit_extension': 1,
+ 'sources': [
+ 'WatchKitExtension/InterfaceController.h',
+ 'WatchKitExtension/InterfaceController.m',
+ ],
+ 'mac_bundle_resources': [
+ 'WatchKitExtension/Images.xcassets',
+ '<(PRODUCT_DIR)/WatchApp.app',
+ ],
+ 'dependencies': [
+ 'WatchApp'
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ '$(SDKROOT)/System/Library/Frameworks/WatchKit.framework',
+ ],
+ },
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'WatchKitExtension/Info.plist',
+ 'SKIP_INSTALL': 'YES',
+ 'COPY_PHASE_STRIP': 'NO',
+ },
+ },
+ {
+ 'target_name': 'WatchApp',
+ 'product_name': 'WatchApp',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'ios_watch_app': 1,
+ 'mac_bundle_resources': [
+ 'WatchApp/Images.xcassets',
+ 'WatchApp/Interface.storyboard',
+ ],
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'WatchApp/Info.plist',
+ 'SKIP_INSTALL': 'YES',
+ 'COPY_PHASE_STRIP': 'NO',
+ 'TARGETED_DEVICE_FAMILY': '4',
+ 'TARGETED_DEVICE_FAMILY[sdk=iphonesimulator*]': '1,4',
+ },
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/ios/xctests/App/AppDelegate.h b/third_party/python/gyp/test/ios/xctests/App/AppDelegate.h
new file mode 100644
index 0000000000..f8efce97ed
--- /dev/null
+++ b/third_party/python/gyp/test/ios/xctests/App/AppDelegate.h
@@ -0,0 +1,11 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <UIKit/UIKit.h>
+
+@interface AppDelegate : UIResponder<UIApplicationDelegate>
+
+@property(strong, nonatomic) UIWindow* window;
+
+@end
diff --git a/third_party/python/gyp/test/ios/xctests/App/AppDelegate.m b/third_party/python/gyp/test/ios/xctests/App/AppDelegate.m
new file mode 100644
index 0000000000..825dda75b7
--- /dev/null
+++ b/third_party/python/gyp/test/ios/xctests/App/AppDelegate.m
@@ -0,0 +1,18 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "AppDelegate.h"
+
+@interface AppDelegate ()
+
+@end
+
+@implementation AppDelegate
+
+- (BOOL)application:(UIApplication*)application
+ didFinishLaunchingWithOptions:(NSDictionary*)launchOptions {
+ return YES;
+}
+
+@end
diff --git a/third_party/python/gyp/test/ios/xctests/App/Base.lproj/LaunchScreen.xib b/third_party/python/gyp/test/ios/xctests/App/Base.lproj/LaunchScreen.xib
new file mode 100644
index 0000000000..063dc5ea79
--- /dev/null
+++ b/third_party/python/gyp/test/ios/xctests/App/Base.lproj/LaunchScreen.xib
@@ -0,0 +1,41 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="6214" systemVersion="14A314h" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES">
+ <dependencies>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6207"/>
+ <capability name="Constraints with non-1.0 multipliers" minToolsVersion="5.1"/>
+ </dependencies>
+ <objects>
+ <placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner"/>
+ <placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
+ <view contentMode="scaleToFill" id="iN0-l3-epB">
+ <rect key="frame" x="0.0" y="0.0" width="480" height="480"/>
+ <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+ <subviews>
+ <label opaque="NO" clipsSubviews="YES" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=" Copyright (c) 2014 Google. All rights reserved." textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" minimumFontSize="9" translatesAutoresizingMaskIntoConstraints="NO" id="8ie-xW-0ye">
+ <rect key="frame" x="20" y="439" width="441" height="21"/>
+ <fontDescription key="fontDescription" type="system" pointSize="17"/>
+ <color key="textColor" cocoaTouchSystemColor="darkTextColor"/>
+ <nil key="highlightedColor"/>
+ </label>
+ <label opaque="NO" clipsSubviews="YES" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="App" textAlignment="center" lineBreakMode="middleTruncation" baselineAdjustment="alignBaselines" minimumFontSize="18" translatesAutoresizingMaskIntoConstraints="NO" id="kId-c2-rCX">
+ <rect key="frame" x="20" y="140" width="441" height="43"/>
+ <fontDescription key="fontDescription" type="boldSystem" pointSize="36"/>
+ <color key="textColor" cocoaTouchSystemColor="darkTextColor"/>
+ <nil key="highlightedColor"/>
+ </label>
+ </subviews>
+ <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
+ <constraints>
+ <constraint firstItem="kId-c2-rCX" firstAttribute="centerY" secondItem="iN0-l3-epB" secondAttribute="bottom" multiplier="1/3" constant="1" id="5cJ-9S-tgC"/>
+ <constraint firstAttribute="centerX" secondItem="kId-c2-rCX" secondAttribute="centerX" id="Koa-jz-hwk"/>
+ <constraint firstAttribute="bottom" secondItem="8ie-xW-0ye" secondAttribute="bottom" constant="20" id="Kzo-t9-V3l"/>
+ <constraint firstItem="8ie-xW-0ye" firstAttribute="leading" secondItem="iN0-l3-epB" secondAttribute="leading" constant="20" symbolic="YES" id="MfP-vx-nX0"/>
+ <constraint firstAttribute="centerX" secondItem="8ie-xW-0ye" secondAttribute="centerX" id="ZEH-qu-HZ9"/>
+ <constraint firstItem="kId-c2-rCX" firstAttribute="leading" secondItem="iN0-l3-epB" secondAttribute="leading" constant="20" symbolic="YES" id="fvb-Df-36g"/>
+ </constraints>
+ <nil key="simulatedStatusBarMetrics"/>
+ <freeformSimulatedSizeMetrics key="simulatedDestinationMetrics"/>
+ <point key="canvasLocation" x="548" y="455"/>
+ </view>
+ </objects>
+</document>
diff --git a/third_party/python/gyp/test/ios/xctests/App/Base.lproj/Main.storyboard b/third_party/python/gyp/test/ios/xctests/App/Base.lproj/Main.storyboard
new file mode 100644
index 0000000000..f56d2f3bb5
--- /dev/null
+++ b/third_party/python/gyp/test/ios/xctests/App/Base.lproj/Main.storyboard
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="6211" systemVersion="14A298i" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
+ <dependencies>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6204"/>
+ </dependencies>
+ <scenes>
+ <!--View Controller-->
+ <scene sceneID="tne-QT-ifu">
+ <objects>
+ <viewController id="BYZ-38-t0r" customClass="ViewController" customModuleProvider="" sceneMemberID="viewController">
+ <layoutGuides>
+ <viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
+ <viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
+ </layoutGuides>
+ <view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
+ <rect key="frame" x="0.0" y="0.0" width="600" height="600"/>
+ <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+ <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
+ </view>
+ </viewController>
+ <placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
+ </objects>
+ </scene>
+ </scenes>
+</document>
diff --git a/third_party/python/gyp/test/ios/xctests/App/Images.xcassets/AppIcon.appiconset/Contents.json b/third_party/python/gyp/test/ios/xctests/App/Images.xcassets/AppIcon.appiconset/Contents.json
new file mode 100644
index 0000000000..36d2c80d88
--- /dev/null
+++ b/third_party/python/gyp/test/ios/xctests/App/Images.xcassets/AppIcon.appiconset/Contents.json
@@ -0,0 +1,68 @@
+{
+ "images" : [
+ {
+ "idiom" : "iphone",
+ "size" : "29x29",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "iphone",
+ "size" : "29x29",
+ "scale" : "3x"
+ },
+ {
+ "idiom" : "iphone",
+ "size" : "40x40",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "iphone",
+ "size" : "40x40",
+ "scale" : "3x"
+ },
+ {
+ "idiom" : "iphone",
+ "size" : "60x60",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "iphone",
+ "size" : "60x60",
+ "scale" : "3x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "29x29",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "29x29",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "40x40",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "40x40",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "76x76",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "ipad",
+ "size" : "76x76",
+ "scale" : "2x"
+ }
+ ],
+ "info" : {
+ "version" : 1,
+ "author" : "xcode"
+ }
+} \ No newline at end of file
diff --git a/third_party/python/gyp/test/ios/xctests/App/Info.plist b/third_party/python/gyp/test/ios/xctests/App/Info.plist
new file mode 100644
index 0000000000..3f938f60f4
--- /dev/null
+++ b/third_party/python/gyp/test/ios/xctests/App/Info.plist
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.gyptest.App</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>LSRequiresIPhoneOS</key>
+ <true/>
+ <key>UILaunchStoryboardName</key>
+ <string>LaunchScreen</string>
+ <key>UIMainStoryboardFile</key>
+ <string>Main</string>
+ <key>UIRequiredDeviceCapabilities</key>
+ <array>
+ <string>armv7</string>
+ </array>
+ <key>UISupportedInterfaceOrientations</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ </array>
+ <key>UISupportedInterfaceOrientations~ipad</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationPortraitUpsideDown</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ </array>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/ios/xctests/App/ViewController.h b/third_party/python/gyp/test/ios/xctests/App/ViewController.h
new file mode 100644
index 0000000000..95a281e8d8
--- /dev/null
+++ b/third_party/python/gyp/test/ios/xctests/App/ViewController.h
@@ -0,0 +1,9 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <UIKit/UIKit.h>
+
+@interface ViewController : UIViewController
+
+@end
diff --git a/third_party/python/gyp/test/ios/xctests/App/ViewController.m b/third_party/python/gyp/test/ios/xctests/App/ViewController.m
new file mode 100644
index 0000000000..d38e3c5bb7
--- /dev/null
+++ b/third_party/python/gyp/test/ios/xctests/App/ViewController.m
@@ -0,0 +1,21 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "ViewController.h"
+
+@interface ViewController ()
+
+@end
+
+@implementation ViewController
+
+- (void)viewDidLoad {
+ [super viewDidLoad];
+}
+
+- (void)didReceiveMemoryWarning {
+ [super didReceiveMemoryWarning];
+}
+
+@end
diff --git a/third_party/python/gyp/test/ios/xctests/App/main.m b/third_party/python/gyp/test/ios/xctests/App/main.m
new file mode 100644
index 0000000000..83368075cd
--- /dev/null
+++ b/third_party/python/gyp/test/ios/xctests/App/main.m
@@ -0,0 +1,13 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <UIKit/UIKit.h>
+#import "AppDelegate.h"
+
+int main(int argc, char* argv[]) {
+ @autoreleasepool {
+ return UIApplicationMain(
+ argc, argv, nil, NSStringFromClass([AppDelegate class]));
+ }
+}
diff --git a/third_party/python/gyp/test/ios/xctests/AppTests/AppTests.m b/third_party/python/gyp/test/ios/xctests/AppTests/AppTests.m
new file mode 100644
index 0000000000..22121b089d
--- /dev/null
+++ b/third_party/python/gyp/test/ios/xctests/AppTests/AppTests.m
@@ -0,0 +1,31 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <UIKit/UIKit.h>
+#import <XCTest/XCTest.h>
+
+@interface AppTests : XCTestCase
+
+@end
+
+@implementation AppTests
+
+- (void)setUp {
+ [super setUp];
+}
+
+- (void)tearDown {
+ [super tearDown];
+}
+
+- (void)testExample {
+ XCTAssert(YES, @"Pass");
+}
+
+- (void)testPerformanceExample {
+ [self measureBlock:^{
+ }];
+}
+
+@end
diff --git a/third_party/python/gyp/test/ios/xctests/AppTests/Info.plist b/third_party/python/gyp/test/ios/xctests/AppTests/Info.plist
new file mode 100644
index 0000000000..d43ff4ba8d
--- /dev/null
+++ b/third_party/python/gyp/test/ios/xctests/AppTests/Info.plist
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.gyptest.AppTests</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>BNDL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/ios/xctests/gyptest-xctests.py b/third_party/python/gyp/test/ios/xctests/gyptest-xctests.py
new file mode 100644
index 0000000000..6642cfb938
--- /dev/null
+++ b/third_party/python/gyp/test/ios/xctests/gyptest-xctests.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+
+# Copyright 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that iOS XCTests can be built correctly.
+"""
+
+import TestGyp
+
+import os
+import subprocess
+import sys
+
+def HasCerts():
+ # Because the bots do not have certs, don't check them if there are no
+ # certs available.
+ proc = subprocess.Popen(['security','find-identity','-p', 'codesigning',
+ '-v'], stdout=subprocess.PIPE)
+ return "0 valid identities found" not in proc.communicate()[0].strip()
+
+if sys.platform == "darwin":
+ # This test appears to be flaky and hangs some of the time.
+ sys.exit(2) # bug=531
+
+ test = TestGyp.TestGyp(formats=['xcode', 'ninja'])
+ test.run_gyp('xctests.gyp')
+ test_configs = ['Default']
+ # TODO(crbug.com/557418): Enable this once xcodebuild works for iOS devices.
+ #if HasCerts() and test.format == 'xcode':
+ # test_configs.append('Default-iphoneos')
+ for config in test_configs:
+ test.set_configuration(config)
+ test.build('xctests.gyp', test.ALL)
+ test.built_file_must_exist('app_under_test.app/app_under_test')
+ test.built_file_must_exist('app_tests.xctest/app_tests')
+ if 'ninja' in test.format:
+ test.built_file_must_exist('obj/AppTests/app_tests.AppTests.i386.o')
+ test.built_file_must_exist('obj/AppTests/app_tests.AppTests.x86_64.o')
+ elif test.format == 'xcode':
+ xcode_object_path = os.path.join('..', 'xctests.build',
+ 'Default-iphonesimulator',
+ 'app_tests.build', 'Objects-normal',
+ '%s', 'AppTests.o')
+ test.built_file_must_exist(xcode_object_path % 'i386')
+ test.built_file_must_exist(xcode_object_path % 'x86_64')
+ test.pass_test()
diff --git a/third_party/python/gyp/test/ios/xctests/xctests.gyp b/third_party/python/gyp/test/ios/xctests/xctests.gyp
new file mode 100644
index 0000000000..8d4d6393b7
--- /dev/null
+++ b/third_party/python/gyp/test/ios/xctests/xctests.gyp
@@ -0,0 +1,74 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'conditions': [
+ ['"<(GENERATOR)"=="ninja"', {
+ 'make_global_settings': [
+ ['CC', '/usr/bin/clang'],
+ ['CXX', '/usr/bin/clang++'],
+ ],
+ }]
+ ],
+ 'target_defaults': {
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-fobjc-abi-version=2',
+ ],
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'SDKROOT': 'iphonesimulator', # -isysroot
+ 'CONFIGURATION_BUILD_DIR':'build/Default',
+ 'IPHONEOS_DEPLOYMENT_TARGET': '9.0',
+ 'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
+ }
+ },
+ 'targets': [
+ {
+ 'target_name': 'app_under_test',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'mac_bundle_resources': [
+ 'App/Base.lproj/LaunchScreen.xib',
+ 'App/Base.lproj/Main.storyboard',
+ ],
+ 'sources': [
+ 'App/AppDelegate.h',
+ 'App/AppDelegate.m',
+ 'App/ViewController.h',
+ 'App/ViewController.m',
+ 'App/main.m',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
+ ],
+ },
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'App/Info.plist',
+ },
+ },
+ {
+ 'target_name': 'app_tests',
+ 'type': 'loadable_module',
+ 'mac_xctest_bundle': 1,
+ 'sources': [
+ 'AppTests/AppTests.m',
+ ],
+ 'dependencies': [
+ 'app_under_test'
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ ],
+ },
+ 'xcode_settings': {
+ 'WRAPPER_EXTENSION': 'xctest',
+ 'INFOPLIST_FILE': 'AppTests/Info.plist',
+ 'BUNDLE_LOADER': '$(BUILT_PRODUCTS_DIR)/app_under_test.app/app_under_test',
+ 'TEST_HOST': '$(BUNDLE_LOADER)',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/lib/README.txt b/third_party/python/gyp/test/lib/README.txt
new file mode 100644
index 0000000000..b3d724574e
--- /dev/null
+++ b/third_party/python/gyp/test/lib/README.txt
@@ -0,0 +1,17 @@
+Supporting modules for GYP testing.
+
+ TestCmd.py
+ TestCommon.py
+
+ Modules for generic testing of command-line utilities,
+ specifically including the ability to copy a test configuration
+ to temporary directories (with default cleanup on exit) as part
+ of running test scripts that invoke commands, compare actual
+ against expected output, etc.
+
+ Our copies of these come from the SCons project,
+ http://www.scons.org/.
+
+ TestGyp.py
+
+ Modules for GYP-specific tests, of course.
diff --git a/third_party/python/gyp/test/lib/TestCmd.py b/third_party/python/gyp/test/lib/TestCmd.py
new file mode 100644
index 0000000000..1ec50933a4
--- /dev/null
+++ b/third_party/python/gyp/test/lib/TestCmd.py
@@ -0,0 +1,1597 @@
+# Copyright (c) 2018 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+TestCmd.py: a testing framework for commands and scripts.
+
+The TestCmd module provides a framework for portable automated testing
+of executable commands and scripts (in any language, not just Python),
+especially commands and scripts that require file system interaction.
+
+In addition to running tests and evaluating conditions, the TestCmd
+module manages and cleans up one or more temporary workspace
+directories, and provides methods for creating files and directories in
+those workspace directories from in-line data, here-documents), allowing
+tests to be completely self-contained.
+
+A TestCmd environment object is created via the usual invocation:
+
+ import TestCmd
+ test = TestCmd.TestCmd()
+
+There are a bunch of keyword arguments available at instantiation:
+
+ test = TestCmd.TestCmd(description = 'string',
+ program = 'program_or_script_to_test',
+ interpreter = 'script_interpreter',
+ workdir = 'prefix',
+ subdir = 'subdir',
+ verbose = Boolean,
+ match = default_match_function,
+ diff = default_diff_function,
+ combine = Boolean)
+
+There are a bunch of methods that let you do different things:
+
+ test.verbose_set(1)
+
+ test.description_set('string')
+
+ test.program_set('program_or_script_to_test')
+
+ test.interpreter_set('script_interpreter')
+ test.interpreter_set(['script_interpreter', 'arg'])
+
+ test.workdir_set('prefix')
+ test.workdir_set('')
+
+ test.workpath('file')
+ test.workpath('subdir', 'file')
+
+ test.subdir('subdir', ...)
+
+ test.rmdir('subdir', ...)
+
+ test.write('file', "contents\n")
+ test.write(['subdir', 'file'], "contents\n")
+
+ test.read('file')
+ test.read(['subdir', 'file'])
+ test.read('file', mode)
+ test.read(['subdir', 'file'], mode)
+
+ test.writable('dir', 1)
+ test.writable('dir', None)
+
+ test.preserve(condition, ...)
+
+ test.cleanup(condition)
+
+ test.command_args(program = 'program_or_script_to_run',
+ interpreter = 'script_interpreter',
+ arguments = 'arguments to pass to program')
+
+ test.run(program = 'program_or_script_to_run',
+ interpreter = 'script_interpreter',
+ arguments = 'arguments to pass to program',
+ chdir = 'directory_to_chdir_to',
+ stdin = 'input to feed to the program\n')
+ universal_newlines = True)
+
+ p = test.start(program = 'program_or_script_to_run',
+ interpreter = 'script_interpreter',
+ arguments = 'arguments to pass to program',
+ universal_newlines = None)
+
+ test.finish(self, p)
+
+ test.pass_test()
+ test.pass_test(condition)
+ test.pass_test(condition, function)
+
+ test.fail_test()
+ test.fail_test(condition)
+ test.fail_test(condition, function)
+ test.fail_test(condition, function, skip)
+
+ test.no_result()
+ test.no_result(condition)
+ test.no_result(condition, function)
+ test.no_result(condition, function, skip)
+
+ test.stdout()
+ test.stdout(run)
+
+ test.stderr()
+ test.stderr(run)
+
+ test.symlink(target, link)
+
+ test.banner(string)
+ test.banner(string, width)
+
+ test.diff(actual, expected)
+
+ test.match(actual, expected)
+
+ test.match_exact("actual 1\nactual 2\n", "expected 1\nexpected 2\n")
+ test.match_exact(["actual 1\n", "actual 2\n"],
+ ["expected 1\n", "expected 2\n"])
+
+ test.match_re("actual 1\nactual 2\n", regex_string)
+ test.match_re(["actual 1\n", "actual 2\n"], list_of_regexes)
+
+ test.match_re_dotall("actual 1\nactual 2\n", regex_string)
+ test.match_re_dotall(["actual 1\n", "actual 2\n"], list_of_regexes)
+
+ test.tempdir()
+ test.tempdir('temporary-directory')
+
+ test.sleep()
+ test.sleep(seconds)
+
+ test.where_is('foo')
+ test.where_is('foo', 'PATH1:PATH2')
+ test.where_is('foo', 'PATH1;PATH2', '.suffix3;.suffix4')
+
+ test.unlink('file')
+ test.unlink('subdir', 'file')
+
+The TestCmd module provides pass_test(), fail_test(), and no_result()
+unbound functions that report test results for use with the Aegis change
+management system. These methods terminate the test immediately,
+reporting PASSED, FAILED, or NO RESULT respectively, and exiting with
+status 0 (success), 1 or 2 respectively. This allows for a distinction
+between an actual failed test and a test that could not be properly
+evaluated because of an external condition (such as a full file system
+or incorrect permissions).
+
+ import TestCmd
+
+ TestCmd.pass_test()
+ TestCmd.pass_test(condition)
+ TestCmd.pass_test(condition, function)
+
+ TestCmd.fail_test()
+ TestCmd.fail_test(condition)
+ TestCmd.fail_test(condition, function)
+ TestCmd.fail_test(condition, function, skip)
+
+ TestCmd.no_result()
+ TestCmd.no_result(condition)
+ TestCmd.no_result(condition, function)
+ TestCmd.no_result(condition, function, skip)
+
+The TestCmd module also provides unbound functions that handle matching
+in the same way as the match_*() methods described above.
+
+ import TestCmd
+
+ test = TestCmd.TestCmd(match = TestCmd.match_exact)
+
+ test = TestCmd.TestCmd(match = TestCmd.match_re)
+
+ test = TestCmd.TestCmd(match = TestCmd.match_re_dotall)
+
+The TestCmd module provides unbound functions that can be used for the
+"diff" argument to TestCmd.TestCmd instantiation:
+
+ import TestCmd
+
+ test = TestCmd.TestCmd(match = TestCmd.match_re,
+ diff = TestCmd.diff_re)
+
+ test = TestCmd.TestCmd(diff = TestCmd.simple_diff)
+
+The "diff" argument can also be used with standard difflib functions:
+
+ import difflib
+
+ test = TestCmd.TestCmd(diff = difflib.context_diff)
+
+ test = TestCmd.TestCmd(diff = difflib.unified_diff)
+
+Lastly, the where_is() method also exists in an unbound function
+version.
+
+ import TestCmd
+
+ TestCmd.where_is('foo')
+ TestCmd.where_is('foo', 'PATH1:PATH2')
+ TestCmd.where_is('foo', 'PATH1;PATH2', '.suffix3;.suffix4')
+"""
+
+# Copyright 2000-2010 Steven Knight
+# This module is free software, and you may redistribute it and/or modify
+# it under the same terms as Python itself, so long as this copyright message
+# and disclaimer are retained in their original form.
+#
+# IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
+# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
+# THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+# DAMAGE.
+#
+# THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+# PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
+# AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
+# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+from __future__ import print_function
+
+__author__ = "Steven Knight <knight at baldmt dot com>"
+__revision__ = "TestCmd.py 0.37.D001 2010/01/11 16:55:50 knight"
+__version__ = "0.37"
+
+import errno
+import os
+import os.path
+import re
+import shutil
+import stat
+import sys
+import tempfile
+import time
+import traceback
+try:
+ from UserList import UserList
+except ImportError:
+ from collections import UserList
+
+__all__ = [
+ 'diff_re',
+ 'fail_test',
+ 'no_result',
+ 'pass_test',
+ 'match_exact',
+ 'match_re',
+ 'match_re_dotall',
+ 'python_executable',
+ 'TestCmd'
+]
+
+try:
+ import difflib
+except ImportError:
+ __all__.append('simple_diff')
+
+def is_List(e):
+ return (type(e) is list) or isinstance(e, UserList)
+
+try:
+ from UserString import UserString
+except ImportError:
+ try:
+ from collections import UserString
+ except ImportError:
+ class UserString:
+ pass
+
+try:
+ # basestring was removed in python3.
+ basestring
+except NameError:
+ basestring = str
+
+def is_String(e):
+ return isinstance(e, basestring) or isinstance(e, UserString)
+
+tempfile.template = 'testcmd.'
+if os.name in ('posix', 'nt'):
+ tempfile.template = 'testcmd.' + str(os.getpid()) + '.'
+else:
+ tempfile.template = 'testcmd.'
+
+re_space = re.compile('\s')
+
+_Cleanup = []
+
+_chain_to_exitfunc = None
+
+def _clean():
+ global _Cleanup
+ for test in reversed(_Cleanup):
+ if test:
+ test.cleanup()
+ del _Cleanup[:]
+ if _chain_to_exitfunc:
+ _chain_to_exitfunc()
+
+try:
+ import atexit
+except ImportError:
+ # TODO(1.5): atexit requires python 2.0, so chain sys.exitfunc
+ try:
+ _chain_to_exitfunc = sys.exitfunc
+ except AttributeError:
+ pass
+ sys.exitfunc = _clean
+else:
+ atexit.register(_clean)
+
+try:
+ zip
+except NameError:
+ def zip(*lists):
+ result = []
+ for i in range(min(map(len, lists))):
+ result.append(tuple(map(lambda l, i=i: l[i], lists)))
+ return result
+
+class Collector:
+ def __init__(self, top):
+ self.entries = [top]
+ def __call__(self, arg, dirname, names):
+ pathjoin = lambda n, d=dirname: os.path.join(d, n)
+ self.entries.extend(map(pathjoin, names))
+
+def _caller(tblist, skip):
+ string = ""
+ arr = []
+ for file, line, name, text in tblist:
+ if file[-10:] == "TestCmd.py":
+ break
+ arr = [(file, line, name, text)] + arr
+ atfrom = "at"
+ for file, line, name, text in arr[skip:]:
+ if name in ("?", "<module>"):
+ name = ""
+ else:
+ name = " (" + name + ")"
+ string = string + ("%s line %d of %s%s\n" % (atfrom, line, file, name))
+ atfrom = "\tfrom"
+ return string
+
+def fail_test(self = None, condition = 1, function = None, skip = 0):
+ """Cause the test to fail.
+
+ By default, the fail_test() method reports that the test FAILED
+ and exits with a status of 1. If a condition argument is supplied,
+ the test fails only if the condition is true.
+ """
+ if not condition:
+ return
+ if not function is None:
+ function()
+ of = ""
+ desc = ""
+ sep = " "
+ if not self is None:
+ if self.program:
+ of = " of " + self.program
+ sep = "\n\t"
+ if self.description:
+ desc = " [" + self.description + "]"
+ sep = "\n\t"
+
+ at = _caller(traceback.extract_stack(), skip)
+ sys.stderr.write("FAILED test" + of + desc + sep + at)
+
+ sys.exit(1)
+
+def no_result(self = None, condition = 1, function = None, skip = 0):
+ """Causes a test to exit with no valid result.
+
+ By default, the no_result() method reports NO RESULT for the test
+ and exits with a status of 2. If a condition argument is supplied,
+ the test fails only if the condition is true.
+ """
+ if not condition:
+ return
+ if not function is None:
+ function()
+ of = ""
+ desc = ""
+ sep = " "
+ if not self is None:
+ if self.program:
+ of = " of " + self.program
+ sep = "\n\t"
+ if self.description:
+ desc = " [" + self.description + "]"
+ sep = "\n\t"
+
+ if os.environ.get('TESTCMD_DEBUG_SKIPS'):
+ at = _caller(traceback.extract_stack(), skip)
+ sys.stderr.write("NO RESULT for test" + of + desc + sep + at)
+ else:
+ sys.stderr.write("NO RESULT\n")
+
+ sys.exit(2)
+
+def pass_test(self = None, condition = 1, function = None):
+ """Causes a test to pass.
+
+ By default, the pass_test() method reports PASSED for the test
+ and exits with a status of 0. If a condition argument is supplied,
+ the test passes only if the condition is true.
+ """
+ if not condition:
+ return
+ if not function is None:
+ function()
+ sys.stderr.write("PASSED\n")
+ sys.exit(0)
+
+def match_exact(lines = None, matches = None):
+ """
+ """
+ if not is_List(lines):
+ lines = lines.split("\n")
+ if not is_List(matches):
+ matches = matches.split("\n")
+ if len(lines) != len(matches):
+ return
+ for i in range(len(lines)):
+ if lines[i] != matches[i]:
+ return
+ return 1
+
+def match_re(lines = None, res = None):
+ """
+ """
+ if not is_List(lines):
+ lines = lines.split("\n")
+ if not is_List(res):
+ res = res.split("\n")
+ if len(lines) != len(res):
+ return
+ for i in range(len(lines)):
+ s = "^" + res[i] + "$"
+ try:
+ expr = re.compile(s)
+ except re.error as e:
+ msg = "Regular expression error in %s: %s"
+ raise re.error(msg % (repr(s), e[0]))
+ if not expr.search(lines[i]):
+ return
+ return 1
+
+def match_re_dotall(lines = None, res = None):
+ """
+ """
+ if not type(lines) is type(""):
+ lines = "\n".join(lines)
+ if not type(res) is type(""):
+ res = "\n".join(res)
+ s = "^" + res + "$"
+ try:
+ expr = re.compile(s, re.DOTALL)
+ except re.error as e:
+ msg = "Regular expression error in %s: %s"
+ raise re.error(msg % (repr(s), e[0]))
+ if expr.match(lines):
+ return 1
+
+try:
+ import difflib
+except ImportError:
+ pass
+else:
+ def simple_diff(a, b, fromfile='', tofile='',
+ fromfiledate='', tofiledate='', n=3, lineterm='\n'):
+ """
+ A function with the same calling signature as difflib.context_diff
+ (diff -c) and difflib.unified_diff (diff -u) but which prints
+ output like the simple, unadorned 'diff" command.
+ """
+ sm = difflib.SequenceMatcher(None, a, b)
+ def comma(x1, x2):
+ return x1+1 == x2 and str(x2) or '%s,%s' % (x1+1, x2)
+ result = []
+ for op, a1, a2, b1, b2 in sm.get_opcodes():
+ if op == 'delete':
+ result.append("%sd%d" % (comma(a1, a2), b1))
+ result.extend(map(lambda l: '< ' + l, a[a1:a2]))
+ elif op == 'insert':
+ result.append("%da%s" % (a1, comma(b1, b2)))
+ result.extend(map(lambda l: '> ' + l, b[b1:b2]))
+ elif op == 'replace':
+ result.append("%sc%s" % (comma(a1, a2), comma(b1, b2)))
+ result.extend(map(lambda l: '< ' + l, a[a1:a2]))
+ result.append('---')
+ result.extend(map(lambda l: '> ' + l, b[b1:b2]))
+ return result
+
+def diff_re(a, b, fromfile='', tofile='',
+ fromfiledate='', tofiledate='', n=3, lineterm='\n'):
+ """
+ A simple "diff" of two sets of lines when the expected lines
+ are regular expressions. This is a really dumb thing that
+ just compares each line in turn, so it doesn't look for
+ chunks of matching lines and the like--but at least it lets
+ you know exactly which line first didn't compare correctl...
+ """
+ result = []
+ diff = len(a) - len(b)
+ if diff < 0:
+ a = a + ['']*(-diff)
+ elif diff > 0:
+ b = b + ['']*diff
+ i = 0
+ for aline, bline in zip(a, b):
+ s = "^" + aline + "$"
+ try:
+ expr = re.compile(s)
+ except re.error as e:
+ msg = "Regular expression error in %s: %s"
+ raise re.error(msg % (repr(s), e[0]))
+ if not expr.search(bline):
+ result.append("%sc%s" % (i+1, i+1))
+ result.append('< ' + repr(a[i]))
+ result.append('---')
+ result.append('> ' + repr(b[i]))
+ i = i+1
+ return result
+
+if os.name == 'java':
+
+ python_executable = os.path.join(sys.prefix, 'jython')
+
+else:
+
+ python_executable = sys.executable
+
+if sys.platform == 'win32':
+
+ default_sleep_seconds = 2
+
+ def where_is(file, path=None, pathext=None):
+ if path is None:
+ path = os.environ['PATH']
+ if is_String(path):
+ path = path.split(os.pathsep)
+ if pathext is None:
+ pathext = os.environ['PATHEXT']
+ if is_String(pathext):
+ pathext = pathext.split(os.pathsep)
+ for ext in pathext:
+ if ext.lower() == file[-len(ext):].lower():
+ pathext = ['']
+ break
+ for dir in path:
+ f = os.path.join(dir, file)
+ for ext in pathext:
+ fext = f + ext
+ if os.path.isfile(fext):
+ return fext
+ return None
+
+else:
+
+ def where_is(file, path=None, pathext=None):
+ if path is None:
+ path = os.environ['PATH']
+ if is_String(path):
+ path = path.split(os.pathsep)
+ for dir in path:
+ f = os.path.join(dir, file)
+ if os.path.isfile(f):
+ try:
+ st = os.stat(f)
+ except OSError:
+ continue
+ if stat.S_IMODE(st[stat.ST_MODE]) & 0o111:
+ return f
+ return None
+
+ default_sleep_seconds = 1
+
+
+
+try:
+ import subprocess
+except ImportError:
+ # The subprocess module doesn't exist in this version of Python,
+ # so we're going to cobble up something that looks just enough
+ # like its API for our purposes below.
+ import new
+
+ subprocess = new.module('subprocess')
+
+ subprocess.PIPE = 'PIPE'
+ subprocess.STDOUT = 'STDOUT'
+ subprocess.mswindows = (sys.platform == 'win32')
+
+ try:
+ import popen2
+ popen2.Popen3
+ except AttributeError:
+ class Popen3:
+ universal_newlines = 1
+ def __init__(self, command, **kw):
+ if sys.platform == 'win32' and command[0] == '"':
+ command = '"' + command + '"'
+ (stdin, stdout, stderr) = os.popen3(' ' + command)
+ self.stdin = stdin
+ self.stdout = stdout
+ self.stderr = stderr
+ def close_output(self):
+ self.stdout.close()
+ self.resultcode = self.stderr.close()
+ def wait(self):
+ resultcode = self.resultcode
+ if os.WIFEXITED(resultcode):
+ return os.WEXITSTATUS(resultcode)
+ elif os.WIFSIGNALED(resultcode):
+ return os.WTERMSIG(resultcode)
+ else:
+ return None
+
+ else:
+ try:
+ popen2.Popen4
+ except AttributeError:
+ # A cribbed Popen4 class, with some retrofitted code from
+ # the Python 1.5 Popen3 class methods to do certain things
+ # by hand.
+ class Popen4(popen2.Popen3):
+ childerr = None
+
+ def __init__(self, cmd, bufsize=-1):
+ p2cread, p2cwrite = os.pipe()
+ c2pread, c2pwrite = os.pipe()
+ self.pid = os.fork()
+ if self.pid == 0:
+ # Child
+ os.dup2(p2cread, 0)
+ os.dup2(c2pwrite, 1)
+ os.dup2(c2pwrite, 2)
+ for i in range(3, popen2.MAXFD):
+ try:
+ os.close(i)
+ except: pass
+ try:
+ os.execvp(cmd[0], cmd)
+ finally:
+ os._exit(1)
+ # Shouldn't come here, I guess
+ os._exit(1)
+ os.close(p2cread)
+ self.tochild = os.fdopen(p2cwrite, 'w', bufsize)
+ os.close(c2pwrite)
+ self.fromchild = os.fdopen(c2pread, 'r', bufsize)
+ popen2._active.append(self)
+
+ popen2.Popen4 = Popen4
+
+ class Popen3(popen2.Popen3, popen2.Popen4):
+ universal_newlines = 1
+ def __init__(self, command, **kw):
+ if kw.get('stderr') == 'STDOUT':
+ popen2.Popen4.__init__(self, command, 1)
+ else:
+ popen2.Popen3.__init__(self, command, 1)
+ self.stdin = self.tochild
+ self.stdout = self.fromchild
+ self.stderr = self.childerr
+ def wait(self, *args, **kw):
+ resultcode = popen2.Popen3.wait(self, *args, **kw)
+ if os.WIFEXITED(resultcode):
+ return os.WEXITSTATUS(resultcode)
+ elif os.WIFSIGNALED(resultcode):
+ return os.WTERMSIG(resultcode)
+ else:
+ return None
+
+ subprocess.Popen = Popen3
+
+
+
+# From Josiah Carlson,
+# ASPN : Python Cookbook : Module to allow Asynchronous subprocess use on Windows and Posix platforms
+# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/440554
+
+PIPE = subprocess.PIPE
+
+if sys.platform == 'win32':
+ from win32file import ReadFile, WriteFile
+ from win32pipe import PeekNamedPipe
+ import msvcrt
+else:
+ import select
+ import fcntl
+
+ try: fcntl.F_GETFL
+ except AttributeError: fcntl.F_GETFL = 3
+
+ try: fcntl.F_SETFL
+ except AttributeError: fcntl.F_SETFL = 4
+
+class Popen(subprocess.Popen):
+ def recv(self, maxsize=None):
+ return self._recv('stdout', maxsize)
+
+ def recv_err(self, maxsize=None):
+ return self._recv('stderr', maxsize)
+
+ def send_recv(self, input='', maxsize=None):
+ return self.send(input), self.recv(maxsize), self.recv_err(maxsize)
+
+ def get_conn_maxsize(self, which, maxsize):
+ if maxsize is None:
+ maxsize = 1024
+ elif maxsize < 1:
+ maxsize = 1
+ return getattr(self, which), maxsize
+
+ def _close(self, which):
+ getattr(self, which).close()
+ setattr(self, which, None)
+
+ if sys.platform == 'win32':
+ def send(self, input):
+ if not self.stdin:
+ return None
+
+ try:
+ x = msvcrt.get_osfhandle(self.stdin.fileno())
+ (errCode, written) = WriteFile(x, input)
+ except ValueError:
+ return self._close('stdin')
+ except (subprocess.pywintypes.error, Exception) as why:
+ if why[0] in (109, errno.ESHUTDOWN):
+ return self._close('stdin')
+ raise
+
+ return written
+
+ def _recv(self, which, maxsize):
+ conn, maxsize = self.get_conn_maxsize(which, maxsize)
+ if conn is None:
+ return None
+
+ try:
+ x = msvcrt.get_osfhandle(conn.fileno())
+ (read, nAvail, nMessage) = PeekNamedPipe(x, 0)
+ if maxsize < nAvail:
+ nAvail = maxsize
+ if nAvail > 0:
+ (errCode, read) = ReadFile(x, nAvail, None)
+ except ValueError:
+ return self._close(which)
+ except (subprocess.pywintypes.error, Exception) as why:
+ if why[0] in (109, errno.ESHUTDOWN):
+ return self._close(which)
+ raise
+
+ #if self.universal_newlines:
+ # read = self._translate_newlines(read)
+ return read
+
+ else:
+ def send(self, input):
+ if not self.stdin:
+ return None
+
+ if not select.select([], [self.stdin], [], 0)[1]:
+ return 0
+
+ try:
+ written = os.write(self.stdin.fileno(), input)
+ except OSError as why:
+ if why[0] == errno.EPIPE: #broken pipe
+ return self._close('stdin')
+ raise
+
+ return written
+
+ def _recv(self, which, maxsize):
+ conn, maxsize = self.get_conn_maxsize(which, maxsize)
+ if conn is None:
+ return None
+
+ try:
+ flags = fcntl.fcntl(conn, fcntl.F_GETFL)
+ except TypeError:
+ flags = None
+ else:
+ if not conn.closed:
+ fcntl.fcntl(conn, fcntl.F_SETFL, flags| os.O_NONBLOCK)
+
+ try:
+ if not select.select([conn], [], [], 0)[0]:
+ return ''
+
+ r = conn.read(maxsize)
+ if not r:
+ return self._close(which)
+
+ #if self.universal_newlines:
+ # r = self._translate_newlines(r)
+ return r
+ finally:
+ if not conn.closed and not flags is None:
+ fcntl.fcntl(conn, fcntl.F_SETFL, flags)
+
+disconnect_message = "Other end disconnected!"
+
+def recv_some(p, t=.1, e=1, tr=5, stderr=0):
+ if tr < 1:
+ tr = 1
+ x = time.time()+t
+ y = []
+ r = ''
+ pr = p.recv
+ if stderr:
+ pr = p.recv_err
+ while time.time() < x or r:
+ r = pr()
+ if r is None:
+ if e:
+ raise Exception(disconnect_message)
+ else:
+ break
+ elif r:
+ y.append(r)
+ else:
+ time.sleep(max((x-time.time())/tr, 0))
+ return ''.join(y)
+
+def send_all(p, data):
+ data = memoryview(data)
+ while len(data):
+ sent = p.send(data)
+ if sent is None:
+ raise Exception(disconnect_message)
+ data = data[sent:]
+
+
+
+class TestCmd(object):
+ """Class TestCmd
+ """
+
+ def __init__(self, description = None,
+ program = None,
+ interpreter = None,
+ workdir = None,
+ subdir = None,
+ verbose = None,
+ match = None,
+ diff = None,
+ combine = 0,
+ universal_newlines = 1):
+ self._cwd = os.getcwd()
+ self.description_set(description)
+ self.program_set(program)
+ self.interpreter_set(interpreter)
+ if verbose is None:
+ try:
+ verbose = max( 0, int(os.environ.get('TESTCMD_VERBOSE', 0)) )
+ except ValueError:
+ verbose = 0
+ self.verbose_set(verbose)
+ self.combine = combine
+ self.universal_newlines = universal_newlines
+ if match is not None:
+ self.match_function = match
+ else:
+ self.match_function = match_re
+ if diff is not None:
+ self.diff_function = diff
+ else:
+ try:
+ difflib
+ except NameError:
+ pass
+ else:
+ self.diff_function = simple_diff
+ #self.diff_function = difflib.context_diff
+ #self.diff_function = difflib.unified_diff
+ self._dirlist = []
+ self._preserve = {'pass_test': 0, 'fail_test': 0, 'no_result': 0}
+ if 'PRESERVE' in os.environ and os.environ['PRESERVE'] is not '':
+ self._preserve['pass_test'] = os.environ['PRESERVE']
+ self._preserve['fail_test'] = os.environ['PRESERVE']
+ self._preserve['no_result'] = os.environ['PRESERVE']
+ else:
+ try:
+ self._preserve['pass_test'] = os.environ['PRESERVE_PASS']
+ except KeyError:
+ pass
+ try:
+ self._preserve['fail_test'] = os.environ['PRESERVE_FAIL']
+ except KeyError:
+ pass
+ try:
+ self._preserve['no_result'] = os.environ['PRESERVE_NO_RESULT']
+ except KeyError:
+ pass
+ self._stdout = []
+ self._stderr = []
+ self.status = None
+ self.condition = 'no_result'
+ self.workdir_set(workdir)
+ self.subdir(subdir)
+
+ def __del__(self):
+ self.cleanup()
+
+ def __repr__(self):
+ return "%x" % id(self)
+
+ banner_char = '='
+ banner_width = 80
+
+ def banner(self, s, width=None):
+ if width is None:
+ width = self.banner_width
+ return s + self.banner_char * (width - len(s))
+
+ if os.name == 'posix':
+
+ def escape(self, arg):
+ "escape shell special characters"
+ slash = '\\'
+ special = '"$'
+
+ arg = arg.replace(slash, slash+slash)
+ for c in special:
+ arg = arg.replace(c, slash+c)
+
+ if re_space.search(arg):
+ arg = '"' + arg + '"'
+ return arg
+
+ else:
+
+ # Windows does not allow special characters in file names
+ # anyway, so no need for an escape function, we will just quote
+ # the arg.
+ def escape(self, arg):
+ if re_space.search(arg):
+ arg = '"' + arg + '"'
+ return arg
+
+ def canonicalize(self, path):
+ if is_List(path):
+ path = os.path.join(*path)
+ if not os.path.isabs(path):
+ path = os.path.join(self.workdir, path)
+ return path
+
+ def chmod(self, path, mode):
+ """Changes permissions on the specified file or directory
+ path name."""
+ path = self.canonicalize(path)
+ os.chmod(path, mode)
+
+ def cleanup(self, condition = None):
+ """Removes any temporary working directories for the specified
+ TestCmd environment. If the environment variable PRESERVE was
+ set when the TestCmd environment was created, temporary working
+ directories are not removed. If any of the environment variables
+ PRESERVE_PASS, PRESERVE_FAIL, or PRESERVE_NO_RESULT were set
+ when the TestCmd environment was created, then temporary working
+ directories are not removed if the test passed, failed, or had
+ no result, respectively. Temporary working directories are also
+ preserved for conditions specified via the preserve method.
+
+ Typically, this method is not called directly, but is used when
+ the script exits to clean up temporary working directories as
+ appropriate for the exit status.
+ """
+ if not self._dirlist:
+ return
+ os.chdir(self._cwd)
+ self.workdir = None
+ if condition is None:
+ condition = self.condition
+ if self._preserve[condition]:
+ for dir in self._dirlist:
+ print("Preserved directory", dir)
+ else:
+ list = self._dirlist[:]
+ list.reverse()
+ for dir in list:
+ self.writable(dir, 1)
+ shutil.rmtree(dir, ignore_errors = 1)
+ self._dirlist = []
+
+ try:
+ global _Cleanup
+ _Cleanup.remove(self)
+ except (AttributeError, ValueError):
+ pass
+
+ def command_args(self, program = None,
+ interpreter = None,
+ arguments = None):
+ if program:
+ if type(program) == type('') and not os.path.isabs(program):
+ program = os.path.join(self._cwd, program)
+ else:
+ program = self.program
+ if not interpreter:
+ interpreter = self.interpreter
+ if not type(program) in [type([]), type(())]:
+ program = [program]
+ cmd = list(program)
+ if interpreter:
+ if not type(interpreter) in [type([]), type(())]:
+ interpreter = [interpreter]
+ cmd = list(interpreter) + cmd
+ if arguments:
+ if type(arguments) == type(''):
+ arguments = arguments.split()
+ cmd.extend(arguments)
+ return cmd
+
+ def description_set(self, description):
+ """Set the description of the functionality being tested.
+ """
+ self.description = description
+
+ try:
+ difflib
+ except NameError:
+ def diff(self, a, b, name, *args, **kw):
+ print(self.banner('Expected %s' % name))
+ print(a)
+ print(self.banner('Actual %s' % name))
+ print(b)
+ else:
+ def diff(self, a, b, name, *args, **kw):
+ print(self.banner(name))
+ args = (a.splitlines(), b.splitlines()) + args
+ lines = self.diff_function(*args, **kw)
+ for l in lines:
+ print(l)
+
+ def fail_test(self, condition = 1, function = None, skip = 0):
+ """Cause the test to fail.
+ """
+ if not condition:
+ return
+ self.condition = 'fail_test'
+ fail_test(self = self,
+ condition = condition,
+ function = function,
+ skip = skip)
+
+ def interpreter_set(self, interpreter):
+ """Set the program to be used to interpret the program
+ under test as a script.
+ """
+ self.interpreter = interpreter
+
+ def match(self, lines, matches):
+ """Compare actual and expected file contents.
+ """
+ return self.match_function(lines, matches)
+
+ def match_exact(self, lines, matches):
+ """Compare actual and expected file contents.
+ """
+ return match_exact(lines, matches)
+
+ def match_re(self, lines, res):
+ """Compare actual and expected file contents.
+ """
+ return match_re(lines, res)
+
+ def match_re_dotall(self, lines, res):
+ """Compare actual and expected file contents.
+ """
+ return match_re_dotall(lines, res)
+
+ def no_result(self, condition = 1, function = None, skip = 0):
+ """Report that the test could not be run.
+ """
+ if not condition:
+ return
+ self.condition = 'no_result'
+ no_result(self = self,
+ condition = condition,
+ function = function,
+ skip = skip)
+
+ def pass_test(self, condition = 1, function = None):
+ """Cause the test to pass.
+ """
+ if not condition:
+ return
+ self.condition = 'pass_test'
+ pass_test(self = self, condition = condition, function = function)
+
+ def preserve(self, *conditions):
+ """Arrange for the temporary working directories for the
+ specified TestCmd environment to be preserved for one or more
+ conditions. If no conditions are specified, arranges for
+ the temporary working directories to be preserved for all
+ conditions.
+ """
+ if conditions is ():
+ conditions = ('pass_test', 'fail_test', 'no_result')
+ for cond in conditions:
+ self._preserve[cond] = 1
+
+ def program_set(self, program):
+ """Set the executable program or script to be tested.
+ """
+ if program and not os.path.isabs(program):
+ program = os.path.join(self._cwd, program)
+ self.program = program
+
+ def read(self, file, mode = 'r'):
+ """Reads and returns the contents of the specified file name.
+ The file name may be a list, in which case the elements are
+ concatenated with the os.path.join() method. The file is
+ assumed to be under the temporary working directory unless it
+ is an absolute path name. The I/O mode for the file may
+ be specified; it must begin with an 'r'. The default is
+ 'r' (string read).
+ """
+ file = self.canonicalize(file)
+ if mode[0] != 'r':
+ raise ValueError("mode must begin with 'r'")
+ with open(file, mode) as f:
+ result = f.read()
+ return result
+
+ def rmdir(self, dir):
+ """Removes the specified dir name.
+ The dir name may be a list, in which case the elements are
+ concatenated with the os.path.join() method. The dir is
+ assumed to be under the temporary working directory unless it
+ is an absolute path name.
+ The dir must be empty.
+ """
+ dir = self.canonicalize(dir)
+ os.rmdir(dir)
+
+ def start(self, program = None,
+ interpreter = None,
+ arguments = None,
+ universal_newlines = None,
+ **kw):
+ """
+ Starts a program or script for the test environment.
+
+ The specified program will have the original directory
+ prepended unless it is enclosed in a [list].
+ """
+ cmd = self.command_args(program, interpreter, arguments)
+ cmd_string = ' '.join(map(self.escape, cmd))
+ if self.verbose:
+ sys.stderr.write(cmd_string + "\n")
+ if universal_newlines is None:
+ universal_newlines = self.universal_newlines
+
+ # On Windows, if we make stdin a pipe when we plan to send
+ # no input, and the test program exits before
+ # Popen calls msvcrt.open_osfhandle, that call will fail.
+ # So don't use a pipe for stdin if we don't need one.
+ stdin = kw.get('stdin', None)
+ if stdin is not None:
+ stdin = subprocess.PIPE
+
+ combine = kw.get('combine', self.combine)
+ if combine:
+ stderr_value = subprocess.STDOUT
+ else:
+ stderr_value = subprocess.PIPE
+
+ return Popen(cmd,
+ stdin=stdin,
+ stdout=subprocess.PIPE,
+ stderr=stderr_value,
+ universal_newlines=universal_newlines)
+
+ def finish(self, popen, **kw):
+ """
+ Finishes and waits for the process being run under control of
+ the specified popen argument, recording the exit status,
+ standard output and error output.
+ """
+ popen.stdin.close()
+ self.status = popen.wait()
+ if not self.status:
+ self.status = 0
+ self._stdout.append(popen.stdout.read())
+ if popen.stderr:
+ stderr = popen.stderr.read()
+ else:
+ stderr = ''
+ self._stderr.append(stderr)
+
+ def run(self, program = None,
+ interpreter = None,
+ arguments = None,
+ chdir = None,
+ stdin = None,
+ universal_newlines = None):
+ """Runs a test of the program or script for the test
+ environment. Standard output and error output are saved for
+ future retrieval via the stdout() and stderr() methods.
+
+ The specified program will have the original directory
+ prepended unless it is enclosed in a [list].
+ """
+ if chdir:
+ oldcwd = os.getcwd()
+ if not os.path.isabs(chdir):
+ chdir = os.path.join(self.workpath(chdir))
+ if self.verbose:
+ sys.stderr.write("chdir(" + chdir + ")\n")
+ os.chdir(chdir)
+ p = self.start(program,
+ interpreter,
+ arguments,
+ universal_newlines,
+ stdin=stdin)
+ if stdin:
+ if is_List(stdin):
+ for line in stdin:
+ p.stdin.write(line)
+ else:
+ p.stdin.write(stdin)
+ p.stdin.close()
+
+ out = p.stdout.read()
+ if p.stderr is None:
+ err = ''
+ else:
+ err = p.stderr.read()
+ try:
+ close_output = p.close_output
+ except AttributeError:
+ p.stdout.close()
+ if not p.stderr is None:
+ p.stderr.close()
+ else:
+ close_output()
+
+ self._stdout.append(out)
+ self._stderr.append(err)
+
+ self.status = p.wait()
+ if not self.status:
+ self.status = 0
+
+ if chdir:
+ os.chdir(oldcwd)
+ if self.verbose >= 2:
+ write = sys.stdout.write
+ write('============ STATUS: %d\n' % self.status)
+ out = self.stdout()
+ if out or self.verbose >= 3:
+ write('============ BEGIN STDOUT (len=%d):\n' % len(out))
+ write(out)
+ write('============ END STDOUT\n')
+ err = self.stderr()
+ if err or self.verbose >= 3:
+ write('============ BEGIN STDERR (len=%d)\n' % len(err))
+ write(err)
+ write('============ END STDERR\n')
+
+ def sleep(self, seconds = default_sleep_seconds):
+ """Sleeps at least the specified number of seconds. If no
+ number is specified, sleeps at least the minimum number of
+ seconds necessary to advance file time stamps on the current
+ system. Sleeping more seconds is all right.
+ """
+ time.sleep(seconds)
+
+ def stderr(self, run = None):
+ """Returns the error output from the specified run number.
+ If there is no specified run number, then returns the error
+ output of the last run. If the run number is less than zero,
+ then returns the error output from that many runs back from the
+ current run.
+ """
+ if not run:
+ run = len(self._stderr)
+ elif run < 0:
+ run = len(self._stderr) + run
+ run = run - 1
+ return self._stderr[run]
+
+ def stdout(self, run = None):
+ """Returns the standard output from the specified run number.
+ If there is no specified run number, then returns the standard
+ output of the last run. If the run number is less than zero,
+ then returns the standard output from that many runs back from
+ the current run.
+ """
+ if not run:
+ run = len(self._stdout)
+ elif run < 0:
+ run = len(self._stdout) + run
+ run = run - 1
+ return self._stdout[run]
+
+ def subdir(self, *subdirs):
+ """Create new subdirectories under the temporary working
+ directory, one for each argument. An argument may be a list,
+ in which case the list elements are concatenated using the
+ os.path.join() method. Subdirectories multiple levels deep
+ must be created using a separate argument for each level:
+
+ test.subdir('sub', ['sub', 'dir'], ['sub', 'dir', 'ectory'])
+
+ Returns the number of subdirectories actually created.
+ """
+ count = 0
+ for sub in subdirs:
+ if sub is None:
+ continue
+ if is_List(sub):
+ sub = os.path.join(*sub)
+ new = os.path.join(self.workdir, sub)
+ try:
+ os.mkdir(new)
+ except OSError:
+ pass
+ else:
+ count = count + 1
+ return count
+
+ def symlink(self, target, link):
+ """Creates a symlink to the specified target.
+ The link name may be a list, in which case the elements are
+ concatenated with the os.path.join() method. The link is
+ assumed to be under the temporary working directory unless it
+ is an absolute path name. The target is *not* assumed to be
+ under the temporary working directory.
+ """
+ link = self.canonicalize(link)
+ os.symlink(target, link)
+
+ def tempdir(self, path=None):
+ """Creates a temporary directory.
+ A unique directory name is generated if no path name is specified.
+ The directory is created, and will be removed when the TestCmd
+ object is destroyed.
+ """
+ if path is None:
+ try:
+ path = tempfile.mktemp(prefix=tempfile.template)
+ except TypeError:
+ path = tempfile.mktemp()
+ os.mkdir(path)
+
+ # Symlinks in the path will report things
+ # differently from os.getcwd(), so chdir there
+ # and back to fetch the canonical path.
+ cwd = os.getcwd()
+ try:
+ os.chdir(path)
+ path = os.getcwd()
+ finally:
+ os.chdir(cwd)
+
+ # Uppercase the drive letter since the case of drive
+ # letters is pretty much random on win32:
+ drive,rest = os.path.splitdrive(path)
+ if drive:
+ path = drive.upper() + rest
+
+ #
+ self._dirlist.append(path)
+ global _Cleanup
+ try:
+ _Cleanup.index(self)
+ except ValueError:
+ _Cleanup.append(self)
+
+ return path
+
+ def touch(self, path, mtime=None):
+ """Updates the modification time on the specified file or
+ directory path name. The default is to update to the
+ current time if no explicit modification time is specified.
+ """
+ path = self.canonicalize(path)
+ atime = os.path.getatime(path)
+ if mtime is None:
+ mtime = time.time()
+ os.utime(path, (atime, mtime))
+
+ def unlink(self, file):
+ """Unlinks the specified file name.
+ The file name may be a list, in which case the elements are
+ concatenated with the os.path.join() method. The file is
+ assumed to be under the temporary working directory unless it
+ is an absolute path name.
+ """
+ file = self.canonicalize(file)
+ os.unlink(file)
+
+ def verbose_set(self, verbose):
+ """Set the verbose level.
+ """
+ self.verbose = verbose
+
+ def where_is(self, file, path=None, pathext=None):
+ """Find an executable file.
+ """
+ if is_List(file):
+ file = os.path.join(*file)
+ if not os.path.isabs(file):
+ file = where_is(file, path, pathext)
+ return file
+
+ def workdir_set(self, path):
+ """Creates a temporary working directory with the specified
+ path name. If the path is a null string (''), a unique
+ directory name is created.
+ """
+ if (path != None):
+ if path == '':
+ path = None
+ path = self.tempdir(path)
+ self.workdir = path
+
+ def workpath(self, *args):
+ """Returns the absolute path name to a subdirectory or file
+ within the current temporary working directory. Concatenates
+ the temporary working directory name with the specified
+ arguments using the os.path.join() method.
+ """
+ return os.path.join(self.workdir, *args)
+
+ def readable(self, top, read=1):
+ """Make the specified directory tree readable (read == 1)
+ or not (read == None).
+
+ This method has no effect on Windows systems, which use a
+ completely different mechanism to control file readability.
+ """
+
+ if sys.platform == 'win32':
+ return
+
+ if read:
+ def do_chmod(fname):
+ try: st = os.stat(fname)
+ except OSError: pass
+ else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|stat.S_IREAD))
+ else:
+ def do_chmod(fname):
+ try: st = os.stat(fname)
+ except OSError: pass
+ else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~stat.S_IREAD))
+
+ if os.path.isfile(top):
+ # If it's a file, that's easy, just chmod it.
+ do_chmod(top)
+ elif read:
+ # It's a directory and we're trying to turn on read
+ # permission, so it's also pretty easy, just chmod the
+ # directory and then chmod every entry on our walk down the
+ # tree. Because os.walk() is top-down, we'll enable
+ # read permission on any directories that have it disabled
+ # before os.walk() tries to list their contents.
+ do_chmod(top)
+
+ def chmod_entries(arg, dirname, names, do_chmod=do_chmod):
+ for n in names:
+ do_chmod(os.path.join(dirname, n))
+
+ os.walk(top, chmod_entries, None)
+ else:
+ # It's a directory and we're trying to turn off read
+ # permission, which means we have to chmod the directoreis
+ # in the tree bottom-up, lest disabling read permission from
+ # the top down get in the way of being able to get at lower
+ # parts of the tree. But os.walk() visits things top
+ # down, so we just use an object to collect a list of all
+ # of the entries in the tree, reverse the list, and then
+ # chmod the reversed (bottom-up) list.
+ col = Collector(top)
+ os.walk(top, col, None)
+ col.entries.reverse()
+ for d in col.entries: do_chmod(d)
+
+ def writable(self, top, write=1):
+ """Make the specified directory tree writable (write == 1)
+ or not (write == None).
+ """
+
+ if sys.platform == 'win32':
+
+ if write:
+ def do_chmod(fname):
+ try: os.chmod(fname, stat.S_IWRITE)
+ except OSError: pass
+ else:
+ def do_chmod(fname):
+ try: os.chmod(fname, stat.S_IREAD)
+ except OSError: pass
+
+ else:
+
+ if write:
+ def do_chmod(fname):
+ try: st = os.stat(fname)
+ except OSError: pass
+ else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|0o200))
+ else:
+ def do_chmod(fname):
+ try: st = os.stat(fname)
+ except OSError: pass
+ else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~0o200))
+
+ if os.path.isfile(top):
+ do_chmod(top)
+ else:
+ col = Collector(top)
+ os.walk(top, col, None)
+ for d in col.entries: do_chmod(d)
+
+ def executable(self, top, execute=1):
+ """Make the specified directory tree executable (execute == 1)
+ or not (execute == None).
+
+ This method has no effect on Windows systems, which use a
+ completely different mechanism to control file executability.
+ """
+
+ if sys.platform == 'win32':
+ return
+
+ if execute:
+ def do_chmod(fname):
+ try: st = os.stat(fname)
+ except OSError: pass
+ else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|stat.S_IEXEC))
+ else:
+ def do_chmod(fname):
+ try: st = os.stat(fname)
+ except OSError: pass
+ else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~stat.S_IEXEC))
+
+ if os.path.isfile(top):
+ # If it's a file, that's easy, just chmod it.
+ do_chmod(top)
+ elif execute:
+ # It's a directory and we're trying to turn on execute
+ # permission, so it's also pretty easy, just chmod the
+ # directory and then chmod every entry on our walk down the
+ # tree. Because os.walk() is top-down, we'll enable
+ # execute permission on any directories that have it disabled
+ # before os.walk() tries to list their contents.
+ do_chmod(top)
+
+ def chmod_entries(arg, dirname, names, do_chmod=do_chmod):
+ for n in names:
+ do_chmod(os.path.join(dirname, n))
+
+ os.walk(top, chmod_entries, None)
+ else:
+ # It's a directory and we're trying to turn off execute
+ # permission, which means we have to chmod the directories
+ # in the tree bottom-up, lest disabling execute permission from
+ # the top down get in the way of being able to get at lower
+ # parts of the tree. But os.walk() visits things top
+ # down, so we just use an object to collect a list of all
+ # of the entries in the tree, reverse the list, and then
+ # chmod the reversed (bottom-up) list.
+ col = Collector(top)
+ os.walk(top, col, None)
+ col.entries.reverse()
+ for d in col.entries: do_chmod(d)
+
+ def write(self, file, content, mode = 'w'):
+ """Writes the specified content text (second argument) to the
+ specified file name (first argument). The file name may be
+ a list, in which case the elements are concatenated with the
+ os.path.join() method. The file is created under the temporary
+ working directory. Any subdirectories in the path must already
+ exist. The I/O mode for the file may be specified; it must
+ begin with a 'w'. The default is 'w' (binary write).
+ """
+ file = self.canonicalize(file)
+ if mode[0] != 'w':
+ raise ValueError("mode must begin with 'w'")
+ with open(file, mode) as f:
+ f.write(content)
+
+# Local Variables:
+# tab-width:4
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=4 shiftwidth=4:
diff --git a/third_party/python/gyp/test/lib/TestCommon.py b/third_party/python/gyp/test/lib/TestCommon.py
new file mode 100644
index 0000000000..6850ce9ada
--- /dev/null
+++ b/third_party/python/gyp/test/lib/TestCommon.py
@@ -0,0 +1,591 @@
+# Copyright (c) 2017 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+TestCommon.py: a testing framework for commands and scripts
+ with commonly useful error handling
+
+The TestCommon module provides a simple, high-level interface for writing
+tests of executable commands and scripts, especially commands and scripts
+that interact with the file system. All methods throw exceptions and
+exit on failure, with useful error messages. This makes a number of
+explicit checks unnecessary, making the test scripts themselves simpler
+to write and easier to read.
+
+The TestCommon class is a subclass of the TestCmd class. In essence,
+TestCommon is a wrapper that handles common TestCmd error conditions in
+useful ways. You can use TestCommon directly, or subclass it for your
+program and add additional (or override) methods to tailor it to your
+program's specific needs. Alternatively, the TestCommon class serves
+as a useful example of how to define your own TestCmd subclass.
+
+As a subclass of TestCmd, TestCommon provides access to all of the
+variables and methods from the TestCmd module. Consequently, you can
+use any variable or method documented in the TestCmd module without
+having to explicitly import TestCmd.
+
+A TestCommon environment object is created via the usual invocation:
+
+ import TestCommon
+ test = TestCommon.TestCommon()
+
+You can use all of the TestCmd keyword arguments when instantiating a
+TestCommon object; see the TestCmd documentation for details.
+
+Here is an overview of the methods and keyword arguments that are
+provided by the TestCommon class:
+
+ test.must_be_writable('file1', ['file2', ...])
+
+ test.must_contain('file', 'required text\n')
+
+ test.must_contain_all_lines(output, lines, ['title', find])
+
+ test.must_contain_any_line(output, lines, ['title', find])
+
+ test.must_exist('file1', ['file2', ...])
+
+ test.must_match('file', "expected contents\n")
+
+ test.must_not_be_writable('file1', ['file2', ...])
+
+ test.must_not_contain('file', 'banned text\n')
+
+ test.must_not_contain_any_line(output, lines, ['title', find])
+
+ test.must_not_exist('file1', ['file2', ...])
+
+ test.run(options = "options to be prepended to arguments",
+ stdout = "expected standard output from the program",
+ stderr = "expected error output from the program",
+ status = expected_status,
+ match = match_function)
+
+The TestCommon module also provides the following variables
+
+ TestCommon.python_executable
+ TestCommon.exe_suffix
+ TestCommon.obj_suffix
+ TestCommon.shobj_prefix
+ TestCommon.shobj_suffix
+ TestCommon.lib_prefix
+ TestCommon.lib_suffix
+ TestCommon.dll_prefix
+ TestCommon.dll_suffix
+
+"""
+
+# Copyright 2000-2010 Steven Knight
+# This module is free software, and you may redistribute it and/or modify
+# it under the same terms as Python itself, so long as this copyright message
+# and disclaimer are retained in their original form.
+#
+# IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
+# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
+# THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+# DAMAGE.
+#
+# THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+# PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
+# AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
+# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+from __future__ import print_function
+
+__author__ = "Steven Knight <knight at baldmt dot com>"
+__revision__ = "TestCommon.py 0.37.D001 2010/01/11 16:55:50 knight"
+__version__ = "0.37"
+
+import copy
+import os
+import os.path
+import stat
+import sys
+try:
+ from UserList import UserList
+except ImportError:
+ from collections import UserList
+
+from TestCmd import *
+from TestCmd import __all__
+
+__all__.extend([ 'TestCommon',
+ 'exe_suffix',
+ 'obj_suffix',
+ 'shobj_prefix',
+ 'shobj_suffix',
+ 'lib_prefix',
+ 'lib_suffix',
+ 'dll_prefix',
+ 'dll_suffix',
+ ])
+
+# Variables that describe the prefixes and suffixes on this system.
+if sys.platform == 'win32':
+ exe_suffix = '.exe'
+ obj_suffix = '.obj'
+ shobj_suffix = '.obj'
+ shobj_prefix = ''
+ lib_prefix = ''
+ lib_suffix = '.lib'
+ dll_prefix = ''
+ dll_suffix = '.dll'
+ module_prefix = ''
+ module_suffix = '.dll'
+elif sys.platform == 'cygwin':
+ exe_suffix = '.exe'
+ obj_suffix = '.o'
+ shobj_suffix = '.os'
+ shobj_prefix = ''
+ lib_prefix = 'lib'
+ lib_suffix = '.a'
+ dll_prefix = ''
+ dll_suffix = '.dll'
+ module_prefix = ''
+ module_suffix = '.dll'
+elif sys.platform.find('irix') != -1:
+ exe_suffix = ''
+ obj_suffix = '.o'
+ shobj_suffix = '.o'
+ shobj_prefix = ''
+ lib_prefix = 'lib'
+ lib_suffix = '.a'
+ dll_prefix = 'lib'
+ dll_suffix = '.so'
+ module_prefix = 'lib'
+ module_prefix = '.so'
+elif sys.platform.find('darwin') != -1:
+ exe_suffix = ''
+ obj_suffix = '.o'
+ shobj_suffix = '.os'
+ shobj_prefix = ''
+ lib_prefix = 'lib'
+ lib_suffix = '.a'
+ dll_prefix = 'lib'
+ dll_suffix = '.dylib'
+ module_prefix = ''
+ module_suffix = '.so'
+elif sys.platform.find('sunos') != -1:
+ exe_suffix = ''
+ obj_suffix = '.o'
+ shobj_suffix = '.os'
+ shobj_prefix = 'so_'
+ lib_prefix = 'lib'
+ lib_suffix = '.a'
+ dll_prefix = 'lib'
+ dll_suffix = '.dylib'
+ module_prefix = ''
+ module_suffix = '.so'
+else:
+ exe_suffix = ''
+ obj_suffix = '.o'
+ shobj_suffix = '.os'
+ shobj_prefix = ''
+ lib_prefix = 'lib'
+ lib_suffix = '.a'
+ dll_prefix = 'lib'
+ dll_suffix = '.so'
+ module_prefix = 'lib'
+ module_suffix = '.so'
+
+def is_List(e):
+ return type(e) is list \
+ or isinstance(e, UserList)
+
+def is_writable(f):
+ mode = os.stat(f)[stat.ST_MODE]
+ return mode & stat.S_IWUSR
+
+def separate_files(flist):
+ existing = []
+ missing = []
+ for f in flist:
+ if os.path.exists(f):
+ existing.append(f)
+ else:
+ missing.append(f)
+ return existing, missing
+
+def _failed(self, status = 0):
+ if self.status is None or status is None:
+ return None
+ try:
+ return _status(self) not in status
+ except TypeError:
+ # status wasn't an iterable
+ return _status(self) != status
+
+def _status(self):
+ return self.status
+
+class TestCommon(TestCmd):
+
+ # Additional methods from the Perl Test::Cmd::Common module
+ # that we may wish to add in the future:
+ #
+ # $test->subdir('subdir', ...);
+ #
+ # $test->copy('src_file', 'dst_file');
+
+ def __init__(self, **kw):
+ """Initialize a new TestCommon instance. This involves just
+ calling the base class initialization, and then changing directory
+ to the workdir.
+ """
+ TestCmd.__init__(self, **kw)
+ os.chdir(self.workdir)
+
+ def must_be_writable(self, *files):
+ """Ensures that the specified file(s) exist and are writable.
+ An individual file can be specified as a list of directory names,
+ in which case the pathname will be constructed by concatenating
+ them. Exits FAILED if any of the files does not exist or is
+ not writable.
+ """
+ files = map((lambda x: os.path.join(*x) if is_List(x) else x), files)
+ existing, missing = separate_files(files)
+ unwritable = [x for x in existing if not is_writable(x)]
+ if missing:
+ print("Missing files: `%s'" % "', `".join(missing))
+ if unwritable:
+ print("Unwritable files: `%s'" % "', `".join(unwritable))
+ self.fail_test(missing + unwritable)
+
+ def must_contain(self, file, required, mode = 'r'):
+ """Ensures that the specified file contains the required text.
+ """
+ file_contents = self.read(file, mode)
+ contains = (file_contents.find(required) != -1)
+ if not contains:
+ print("File `%s' does not contain required string." % file)
+ print(self.banner('Required string '))
+ print(required)
+ print(self.banner('%s contents ' % file))
+ print(file_contents)
+ self.fail_test(not contains)
+
+ def must_contain_all_lines(self, output, lines, title=None, find=None):
+ """Ensures that the specified output string (first argument)
+ contains all of the specified lines (second argument).
+
+ An optional third argument can be used to describe the type
+ of output being searched, and only shows up in failure output.
+
+ An optional fourth argument can be used to supply a different
+ function, of the form "find(line, output), to use when searching
+ for lines in the output.
+ """
+ if find is None:
+ find = lambda o, l: o.find(l) != -1
+ missing = []
+ for line in lines:
+ if not find(output, line):
+ missing.append(line)
+
+ if missing:
+ if title is None:
+ title = 'output'
+ sys.stdout.write("Missing expected lines from %s:\n" % title)
+ for line in missing:
+ sys.stdout.write(' ' + repr(line) + '\n')
+ sys.stdout.write(self.banner(title + ' '))
+ sys.stdout.write(output)
+ self.fail_test()
+
+ def must_contain_any_line(self, output, lines, title=None, find=None):
+ """Ensures that the specified output string (first argument)
+ contains at least one of the specified lines (second argument).
+
+ An optional third argument can be used to describe the type
+ of output being searched, and only shows up in failure output.
+
+ An optional fourth argument can be used to supply a different
+ function, of the form "find(line, output), to use when searching
+ for lines in the output.
+ """
+ if find is None:
+ find = lambda o, l: o.find(l) != -1
+ for line in lines:
+ if find(output, line):
+ return
+
+ if title is None:
+ title = 'output'
+ sys.stdout.write("Missing any expected line from %s:\n" % title)
+ for line in lines:
+ sys.stdout.write(' ' + repr(line) + '\n')
+ sys.stdout.write(self.banner(title + ' '))
+ sys.stdout.write(output)
+ self.fail_test()
+
+ def must_contain_lines(self, lines, output, title=None):
+ # Deprecated; retain for backwards compatibility.
+ return self.must_contain_all_lines(output, lines, title)
+
+ def must_exist(self, *files):
+ """Ensures that the specified file(s) must exist. An individual
+ file be specified as a list of directory names, in which case the
+ pathname will be constructed by concatenating them. Exits FAILED
+ if any of the files does not exist.
+ """
+ files = map((lambda x: os.path.join(*x) if is_List(x) else x), files)
+ missing = [f for f in files if not os.path.exists(f)]
+ if missing:
+ print("Missing files: `%s'" % "', `".join(missing))
+ self.fail_test(missing)
+
+ def must_match(self, file, expect, mode = 'r'):
+ """Matches the contents of the specified file (first argument)
+ against the expected contents (second argument). The expected
+ contents are a list of lines or a string which will be split
+ on newlines.
+ """
+ file_contents = self.read(file, mode)
+ try:
+ self.fail_test(not self.match(file_contents, expect))
+ except KeyboardInterrupt:
+ raise
+ except:
+ print("Unexpected contents of `%s'" % file)
+ self.diff(expect, file_contents, 'contents ')
+ raise
+
+ def must_not_contain(self, file, banned, mode = 'r'):
+ """Ensures that the specified file doesn't contain the banned text.
+ """
+ file_contents = self.read(file, mode)
+ contains = (file_contents.find(banned) != -1)
+ if contains:
+ print("File `%s' contains banned string." % file)
+ print(self.banner('Banned string '))
+ print(banned)
+ print(self.banner('%s contents ' % file))
+ print(file_contents)
+ self.fail_test(contains)
+
+ def must_not_contain_any_line(self, output, lines, title=None, find=None):
+ """Ensures that the specified output string (first argument)
+ does not contain any of the specified lines (second argument).
+
+ An optional third argument can be used to describe the type
+ of output being searched, and only shows up in failure output.
+
+ An optional fourth argument can be used to supply a different
+ function, of the form "find(line, output), to use when searching
+ for lines in the output.
+ """
+ if find is None:
+ find = lambda o, l: o.find(l) != -1
+ unexpected = []
+ for line in lines:
+ if find(output, line):
+ unexpected.append(line)
+
+ if unexpected:
+ if title is None:
+ title = 'output'
+ sys.stdout.write("Unexpected lines in %s:\n" % title)
+ for line in unexpected:
+ sys.stdout.write(' ' + repr(line) + '\n')
+ sys.stdout.write(self.banner(title + ' '))
+ sys.stdout.write(output)
+ self.fail_test()
+
+ def must_not_contain_lines(self, lines, output, title=None):
+ return self.must_not_contain_any_line(output, lines, title)
+
+ def must_not_exist(self, *files):
+ """Ensures that the specified file(s) must not exist.
+ An individual file be specified as a list of directory names, in
+ which case the pathname will be constructed by concatenating them.
+ Exits FAILED if any of the files exists.
+ """
+ files = map((lambda x: os.path.join(*x) if is_List(x) else x), files)
+ existing = [f for f in files if os.path.exists(f)]
+ if existing:
+ print("Unexpected files exist: `%s'" % "', `".join(existing))
+ self.fail_test(existing)
+
+ def must_not_be_writable(self, *files):
+ """Ensures that the specified file(s) exist and are not writable.
+ An individual file can be specified as a list of directory names,
+ in which case the pathname will be constructed by concatenating
+ them. Exits FAILED if any of the files does not exist or is
+ writable.
+ """
+ files = map((lambda x: os.path.join(*x) if is_List(x) else x), files)
+ existing, missing = separate_files(files)
+ writable = [x for x in existing if is_writable(x)]
+ if missing:
+ print("Missing files: `%s'" % "', `".join(missing))
+ if writable:
+ print("Writable files: `%s'" % "', `".join(writable))
+ self.fail_test(missing + writable)
+
+ def _complete(self, actual_stdout, expected_stdout,
+ actual_stderr, expected_stderr, status, match):
+ """
+ Post-processes running a subcommand, checking for failure
+ status and displaying output appropriately.
+ """
+ if _failed(self, status):
+ expect = ''
+ if status != 0:
+ expect = " (expected %s)" % str(status)
+ print("%s returned %s%s" % (self.program, str(_status(self)),
+ expect))
+ print(self.banner('STDOUT '))
+ print(actual_stdout)
+ print(self.banner('STDERR '))
+ print(actual_stderr)
+ self.fail_test()
+ if not expected_stdout is None and not match(actual_stdout,
+ expected_stdout):
+ self.diff(expected_stdout, actual_stdout, 'STDOUT ')
+ if actual_stderr:
+ print(self.banner('STDERR '))
+ print(actual_stderr)
+ self.fail_test()
+ if not expected_stderr is None and not match(actual_stderr,
+ expected_stderr):
+ print(self.banner('STDOUT '))
+ print(actual_stdout)
+ self.diff(expected_stderr, actual_stderr, 'STDERR ')
+ self.fail_test()
+
+ def start(self, program = None,
+ interpreter = None,
+ arguments = None,
+ universal_newlines = None,
+ **kw):
+ """
+ Starts a program or script for the test environment.
+
+ This handles the "options" keyword argument and exceptions.
+ """
+ options = kw.pop('options', None)
+ if options:
+ if arguments is None:
+ arguments = options
+ else:
+ arguments = options + " " + arguments
+
+ try:
+ return TestCmd.start(self, program, interpreter, arguments,
+ universal_newlines, **kw)
+ except KeyboardInterrupt:
+ raise
+ except Exception as e:
+ print(self.banner('STDOUT '))
+ try:
+ print(self.stdout())
+ except IndexError:
+ pass
+ print(self.banner('STDERR '))
+ try:
+ print(self.stderr())
+ except IndexError:
+ pass
+ cmd_args = self.command_args(program, interpreter, arguments)
+ sys.stderr.write('Exception trying to execute: %s\n' % cmd_args)
+ raise e
+
+ def finish(self, popen, stdout = None, stderr = '', status = 0, **kw):
+ """
+ Finishes and waits for the process being run under control of
+ the specified popen argument. Additional arguments are similar
+ to those of the run() method:
+
+ stdout The expected standard output from
+ the command. A value of None means
+ don't test standard output.
+
+ stderr The expected error output from
+ the command. A value of None means
+ don't test error output.
+
+ status The expected exit status from the
+ command. A value of None means don't
+ test exit status.
+ """
+ TestCmd.finish(self, popen, **kw)
+ match = kw.get('match', self.match)
+ self._complete(self.stdout(), stdout,
+ self.stderr(), stderr, status, match)
+
+ def run(self, options = None, arguments = None,
+ stdout = None, stderr = '', status = 0, **kw):
+ """Runs the program under test, checking that the test succeeded.
+
+ The arguments are the same as the base TestCmd.run() method,
+ with the addition of:
+
+ options Extra options that get appended to the beginning
+ of the arguments.
+
+ stdout The expected standard output from
+ the command. A value of None means
+ don't test standard output.
+
+ stderr The expected error output from
+ the command. A value of None means
+ don't test error output.
+
+ status The expected exit status from the
+ command. A value of None means don't
+ test exit status.
+
+ By default, this expects a successful exit (status = 0), does
+ not test standard output (stdout = None), and expects that error
+ output is empty (stderr = "").
+ """
+ if options:
+ if arguments is None:
+ arguments = options
+ else:
+ arguments = options + " " + arguments
+ kw['arguments'] = arguments
+ match = kw.pop('match', self.match)
+ TestCmd.run(self, **kw)
+ self._complete(self.stdout(), stdout,
+ self.stderr(), stderr, status, match)
+
+ def skip_test(self, message="Skipping test.\n"):
+ """Skips a test.
+
+ Proper test-skipping behavior is dependent on the external
+ TESTCOMMON_PASS_SKIPS environment variable. If set, we treat
+ the skip as a PASS (exit 0), and otherwise treat it as NO RESULT.
+ In either case, we print the specified message as an indication
+ that the substance of the test was skipped.
+
+ (This was originally added to support development under Aegis.
+ Technically, skipping a test is a NO RESULT, but Aegis would
+ treat that as a test failure and prevent the change from going to
+ the next step. Since we ddn't want to force anyone using Aegis
+ to have to install absolutely every tool used by the tests, we
+ would actually report to Aegis that a skipped test has PASSED
+ so that the workflow isn't held up.)
+ """
+ if message:
+ sys.stdout.write(message)
+ sys.stdout.flush()
+ pass_skips = os.environ.get('TESTCOMMON_PASS_SKIPS')
+ if pass_skips in [None, 0, '0']:
+ # skip=1 means skip this function when showing where this
+ # result came from. They only care about the line where the
+ # script called test.skip_test(), not the line number where
+ # we call test.no_result().
+ self.no_result(skip=1)
+ else:
+ # We're under the development directory for this change,
+ # so this is an Aegis invocation; pass the test (exit 0).
+ self.pass_test()
+
+# Local Variables:
+# tab-width:4
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=4 shiftwidth=4:
diff --git a/third_party/python/gyp/test/lib/TestGyp.py b/third_party/python/gyp/test/lib/TestGyp.py
new file mode 100644
index 0000000000..cba2d3ccbc
--- /dev/null
+++ b/third_party/python/gyp/test/lib/TestGyp.py
@@ -0,0 +1,1259 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+TestGyp.py: a testing framework for GYP integration tests.
+"""
+from __future__ import print_function
+
+import collections
+import errno
+import itertools
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+
+from contextlib import contextmanager
+
+import TestCmd
+import TestCommon
+from TestCommon import __all__
+
+__all__.extend([
+ 'TestGyp',
+])
+
+
+def remove_debug_line_numbers(contents):
+ """Function to remove the line numbers from the debug output
+ of gyp and thus reduce the extreme fragility of the stdout
+ comparison tests.
+ """
+ lines = contents.splitlines()
+ # split each line on ":"
+ lines = [l.split(":", 3) for l in lines]
+ # join each line back together while ignoring the
+ # 3rd column which is the line number
+ lines = [len(l) > 3 and ":".join(l[3:]) or l for l in lines]
+ return "\n".join(lines)
+
+
+def match_modulo_line_numbers(contents_a, contents_b):
+ """File contents matcher that ignores line numbers."""
+ contents_a = remove_debug_line_numbers(contents_a)
+ contents_b = remove_debug_line_numbers(contents_b)
+ return TestCommon.match_exact(contents_a, contents_b)
+
+
+@contextmanager
+def LocalEnv(local_env):
+ """Context manager to provide a local OS environment."""
+ old_env = os.environ.copy()
+ os.environ.update(local_env)
+ try:
+ yield
+ finally:
+ os.environ.clear()
+ os.environ.update(old_env)
+
+
+class TestGypBase(TestCommon.TestCommon):
+ """
+ Class for controlling end-to-end tests of gyp generators.
+
+ Instantiating this class will create a temporary directory and
+ arrange for its destruction (via the TestCmd superclass) and
+ copy all of the non-gyptest files in the directory hierarchy of the
+ executing script.
+
+ The default behavior is to test the 'gyp' or 'gyp.bat' file in the
+ current directory. An alternative may be specified explicitly on
+ instantiation, or by setting the TESTGYP_GYP environment variable.
+
+ This class should be subclassed for each supported gyp generator
+ (format). Various abstract methods below define calling signatures
+ used by the test scripts to invoke builds on the generated build
+ configuration and to run executables generated by those builds.
+ """
+
+ formats = []
+ build_tool = None
+ build_tool_list = []
+
+ _exe = TestCommon.exe_suffix
+ _obj = TestCommon.obj_suffix
+ shobj_ = TestCommon.shobj_prefix
+ _shobj = TestCommon.shobj_suffix
+ lib_ = TestCommon.lib_prefix
+ _lib = TestCommon.lib_suffix
+ dll_ = TestCommon.dll_prefix
+ _dll = TestCommon.dll_suffix
+ module_ = TestCommon.module_prefix
+ _module = TestCommon.module_suffix
+
+ # Constants to represent different targets.
+ ALL = '__all__'
+ DEFAULT = '__default__'
+
+ # Constants for different target types.
+ EXECUTABLE = '__executable__'
+ STATIC_LIB = '__static_lib__'
+ SHARED_LIB = '__shared_lib__'
+ LOADABLE_MODULE = '__loadable_module__'
+
+ def __init__(self, gyp=None, *args, **kw):
+ self.origin_cwd = os.path.abspath(os.path.dirname(sys.argv[0]))
+ self.extra_args = sys.argv[1:]
+
+ if not gyp:
+ gyp = os.environ.get('TESTGYP_GYP')
+ if not gyp:
+ if sys.platform == 'win32':
+ gyp = 'gyp.bat'
+ else:
+ gyp = 'gyp'
+ self.gyp = os.path.abspath(gyp)
+ self.no_parallel = False
+
+ self.formats = [self.format]
+
+ self.initialize_build_tool()
+
+ kw.setdefault('match', TestCommon.match_exact)
+
+ # Put test output in out/testworkarea by default.
+ # Use temporary names so there are no collisions.
+ workdir = os.path.join('out', kw.get('workdir', 'testworkarea'))
+ # Create work area if it doesn't already exist.
+ if not os.path.isdir(workdir):
+ os.makedirs(workdir)
+
+ kw['workdir'] = tempfile.mktemp(prefix='testgyp.', dir=workdir)
+
+ formats = kw.pop('formats', [])
+
+ super(TestGypBase, self).__init__(*args, **kw)
+
+ real_format = self.format.split('-')[-1]
+ excluded_formats = set([f for f in formats if f[0] == '!'])
+ included_formats = set(formats) - excluded_formats
+ if ('!'+real_format in excluded_formats or
+ included_formats and real_format not in included_formats):
+ msg = 'Invalid test for %r format; skipping test.\n'
+ self.skip_test(msg % self.format)
+
+ self.copy_test_configuration(self.origin_cwd, self.workdir)
+ self.set_configuration(None)
+
+ # Set $HOME so that gyp doesn't read the user's actual
+ # ~/.gyp/include.gypi file, which may contain variables
+ # and other settings that would change the output.
+ os.environ['HOME'] = self.workpath()
+ # Clear $GYP_DEFINES for the same reason.
+ if 'GYP_DEFINES' in os.environ:
+ del os.environ['GYP_DEFINES']
+ # Override the user's language settings, which could
+ # otherwise make the output vary from what is expected.
+ os.environ['LC_ALL'] = 'C'
+
+ def built_file_must_exist(self, name, type=None, **kw):
+ """
+ Fails the test if the specified built file name does not exist.
+ """
+ return self.must_exist(self.built_file_path(name, type, **kw))
+
+ def built_file_must_not_exist(self, name, type=None, **kw):
+ """
+ Fails the test if the specified built file name exists.
+ """
+ return self.must_not_exist(self.built_file_path(name, type, **kw))
+
+ def built_file_must_match(self, name, contents, **kw):
+ """
+ Fails the test if the contents of the specified built file name
+ do not match the specified contents.
+ """
+ return self.must_match(self.built_file_path(name, **kw), contents)
+
+ def built_file_must_not_match(self, name, contents, **kw):
+ """
+ Fails the test if the contents of the specified built file name
+ match the specified contents.
+ """
+ return self.must_not_match(self.built_file_path(name, **kw), contents)
+
+ def built_file_must_not_contain(self, name, contents, **kw):
+ """
+ Fails the test if the specified built file name contains the specified
+ contents.
+ """
+ return self.must_not_contain(self.built_file_path(name, **kw), contents)
+
+ def copy_test_configuration(self, source_dir, dest_dir):
+ """
+ Copies the test configuration from the specified source_dir
+ (the directory in which the test script lives) to the
+ specified dest_dir (a temporary working directory).
+
+ This ignores all files and directories that begin with
+ the string 'gyptest', and all '.svn' subdirectories.
+ """
+ for root, dirs, files in os.walk(source_dir):
+ if '.svn' in dirs:
+ dirs.remove('.svn')
+ dirs = [ d for d in dirs if not d.startswith('gyptest') ]
+ files = [ f for f in files if not f.startswith('gyptest') ]
+ for dirname in dirs:
+ source = os.path.join(root, dirname)
+ destination = source.replace(source_dir, dest_dir)
+ os.mkdir(destination)
+ if sys.platform != 'win32':
+ shutil.copystat(source, destination)
+ for filename in files:
+ source = os.path.join(root, filename)
+ destination = source.replace(source_dir, dest_dir)
+ shutil.copy2(source, destination)
+
+ # The gyp tests are run with HOME pointing to |dest_dir| to provide an
+ # hermetic environment. Symlink login.keychain and the 'Provisioning
+ # Profiles' folder to allow codesign to access to the data required for
+ # signing binaries.
+ if sys.platform == 'darwin':
+ old_keychain = GetDefaultKeychainPath()
+ old_provisioning_profiles = os.path.join(
+ os.environ['HOME'], 'Library', 'MobileDevice',
+ 'Provisioning Profiles')
+
+ new_keychain = os.path.join(dest_dir, 'Library', 'Keychains')
+ MakeDirs(new_keychain)
+ os.symlink(old_keychain, os.path.join(new_keychain, 'login.keychain'))
+
+ if os.path.exists(old_provisioning_profiles):
+ new_provisioning_profiles = os.path.join(
+ dest_dir, 'Library', 'MobileDevice')
+ MakeDirs(new_provisioning_profiles)
+ os.symlink(old_provisioning_profiles,
+ os.path.join(new_provisioning_profiles, 'Provisioning Profiles'))
+
+ def initialize_build_tool(self):
+ """
+ Initializes the .build_tool attribute.
+
+ Searches the .build_tool_list for an executable name on the user's
+ $PATH. The first tool on the list is used as-is if nothing is found
+ on the current $PATH.
+ """
+ for build_tool in self.build_tool_list:
+ if not build_tool:
+ continue
+ if os.path.isabs(build_tool):
+ self.build_tool = build_tool
+ return
+ build_tool = self.where_is(build_tool)
+ if build_tool:
+ self.build_tool = build_tool
+ return
+
+ if self.build_tool_list:
+ self.build_tool = self.build_tool_list[0]
+
+ def relocate(self, source, destination):
+ """
+ Renames (relocates) the specified source (usually a directory)
+ to the specified destination, creating the destination directory
+ first if necessary.
+
+ Note: Don't use this as a generic "rename" operation. In the
+ future, "relocating" parts of a GYP tree may affect the state of
+ the test to modify the behavior of later method calls.
+ """
+ destination_dir = os.path.dirname(destination)
+ if not os.path.exists(destination_dir):
+ self.subdir(destination_dir)
+ os.rename(source, destination)
+
+ def report_not_up_to_date(self):
+ """
+ Reports that a build is not up-to-date.
+
+ This provides common reporting for formats that have complicated
+ conditions for checking whether a build is up-to-date. Formats
+ that expect exact output from the command (make) can
+ just set stdout= when they call the run_build() method.
+ """
+ print("Build is not up-to-date:")
+ print(self.banner('STDOUT '))
+ print(self.stdout())
+ stderr = self.stderr()
+ if stderr:
+ print(self.banner('STDERR '))
+ print(stderr)
+
+ def run_gyp(self, gyp_file, *args, **kw):
+ """
+ Runs gyp against the specified gyp_file with the specified args.
+ """
+
+ # When running gyp, and comparing its output we use a comparitor
+ # that ignores the line numbers that gyp logs in its debug output.
+ if kw.pop('ignore_line_numbers', False):
+ kw.setdefault('match', match_modulo_line_numbers)
+
+ # TODO: --depth=. works around Chromium-specific tree climbing.
+ depth = kw.pop('depth', '.')
+ run_args = ['--depth='+depth]
+ run_args.extend(['--format='+f for f in self.formats])
+ run_args.append(gyp_file)
+ if self.no_parallel:
+ run_args += ['--no-parallel']
+ # TODO: if extra_args contains a '--build' flag
+ # we really want that to only apply to the last format (self.format).
+ run_args.extend(self.extra_args)
+ # Default xcode_ninja_target_pattern to ^.*$ to fix xcode-ninja tests
+ xcode_ninja_target_pattern = kw.pop('xcode_ninja_target_pattern', '.*')
+ if self is TestGypXcodeNinja:
+ run_args.extend(
+ ['-G', 'xcode_ninja_target_pattern=%s' % xcode_ninja_target_pattern])
+ run_args.extend(args)
+ return self.run(program=self.gyp, arguments=run_args, **kw)
+
+ def run(self, *args, **kw):
+ """
+ Executes a program by calling the superclass .run() method.
+
+ This exists to provide a common place to filter out keyword
+ arguments implemented in this layer, without having to update
+ the tool-specific subclasses or clutter the tests themselves
+ with platform-specific code.
+ """
+ if 'SYMROOT' in kw:
+ del kw['SYMROOT']
+ super(TestGypBase, self).run(*args, **kw)
+
+ def set_configuration(self, configuration):
+ """
+ Sets the configuration, to be used for invoking the build
+ tool and testing potential built output.
+ """
+ self.configuration = configuration
+
+ def configuration_dirname(self):
+ if self.configuration:
+ return self.configuration.split('|')[0]
+ else:
+ return 'Default'
+
+ def configuration_buildname(self):
+ if self.configuration:
+ return self.configuration
+ else:
+ return 'Default'
+
+ #
+ # Abstract methods to be defined by format-specific subclasses.
+ #
+
+ def build(self, gyp_file, target=None, **kw):
+ """
+ Runs a build of the specified target against the configuration
+ generated from the specified gyp_file.
+
+ A 'target' argument of None or the special value TestGyp.DEFAULT
+ specifies the default argument for the underlying build tool.
+ A 'target' argument of TestGyp.ALL specifies the 'all' target
+ (if any) of the underlying build tool.
+ """
+ raise NotImplementedError
+
+ def built_file_path(self, name, type=None, **kw):
+ """
+ Returns a path to the specified file name, of the specified type.
+ """
+ raise NotImplementedError
+
+ def built_file_basename(self, name, type=None, **kw):
+ """
+ Returns the base name of the specified file name, of the specified type.
+
+ A bare=True keyword argument specifies that prefixes and suffixes shouldn't
+ be applied.
+ """
+ if not kw.get('bare'):
+ if type == self.EXECUTABLE:
+ name = name + self._exe
+ elif type == self.STATIC_LIB:
+ name = self.lib_ + name + self._lib
+ elif type == self.SHARED_LIB:
+ name = self.dll_ + name + self._dll
+ elif type == self.LOADABLE_MODULE:
+ name = self.module_ + name + self._module
+ return name
+
+ def run_built_executable(self, name, *args, **kw):
+ """
+ Runs an executable program built from a gyp-generated configuration.
+
+ The specified name should be independent of any particular generator.
+ Subclasses should find the output executable in the appropriate
+ output build directory, tack on any necessary executable suffix, etc.
+ """
+ raise NotImplementedError
+
+ def up_to_date(self, gyp_file, target=None, **kw):
+ """
+ Verifies that a build of the specified target is up to date.
+
+ The subclass should implement this by calling build()
+ (or a reasonable equivalent), checking whatever conditions
+ will tell it the build was an "up to date" null build, and
+ failing if it isn't.
+ """
+ raise NotImplementedError
+
+
+class TestGypGypd(TestGypBase):
+ """
+ Subclass for testing the GYP 'gypd' generator (spit out the
+ internal data structure as pretty-printed Python).
+ """
+ format = 'gypd'
+ def __init__(self, gyp=None, *args, **kw):
+ super(TestGypGypd, self).__init__(*args, **kw)
+ # gypd implies the use of 'golden' files, so parallelizing conflicts as it
+ # causes ordering changes.
+ self.no_parallel = True
+
+
+class TestGypCustom(TestGypBase):
+ """
+ Subclass for testing the GYP with custom generator
+ """
+
+ def __init__(self, gyp=None, *args, **kw):
+ self.format = kw.pop("format")
+ super(TestGypCustom, self).__init__(*args, **kw)
+
+
+class TestGypCMake(TestGypBase):
+ """
+ Subclass for testing the GYP CMake generator, using cmake's ninja backend.
+ """
+ format = 'cmake'
+ build_tool_list = ['cmake']
+ ALL = 'all'
+
+ def cmake_build(self, gyp_file, target=None, **kw):
+ arguments = kw.get('arguments', [])[:]
+
+ self.build_tool_list = ['cmake']
+ self.initialize_build_tool()
+
+ chdir = os.path.join(kw.get('chdir', '.'),
+ 'out',
+ self.configuration_dirname())
+ kw['chdir'] = chdir
+
+ arguments.append('-G')
+ arguments.append('Ninja')
+
+ kw['arguments'] = arguments
+
+ stderr = kw.get('stderr', None)
+ if stderr:
+ kw['stderr'] = stderr.split('$$$')[0]
+
+ self.run(program=self.build_tool, **kw)
+
+ def ninja_build(self, gyp_file, target=None, **kw):
+ arguments = kw.get('arguments', [])[:]
+
+ self.build_tool_list = ['ninja']
+ self.initialize_build_tool()
+
+ # Add a -C output/path to the command line.
+ arguments.append('-C')
+ arguments.append(os.path.join('out', self.configuration_dirname()))
+
+ if target not in (None, self.DEFAULT):
+ arguments.append(target)
+
+ kw['arguments'] = arguments
+
+ stderr = kw.get('stderr', None)
+ if stderr:
+ stderrs = stderr.split('$$$')
+ kw['stderr'] = stderrs[1] if len(stderrs) > 1 else ''
+
+ return self.run(program=self.build_tool, **kw)
+
+ def build(self, gyp_file, target=None, status=0, **kw):
+ # Two tools must be run to build, cmake and the ninja.
+ # Allow cmake to succeed when the overall expectation is to fail.
+ if status is None:
+ kw['status'] = None
+ else:
+ if not isinstance(status, collections.Iterable): status = (status,)
+ kw['status'] = list(itertools.chain((0,), status))
+ self.cmake_build(gyp_file, target, **kw)
+ kw['status'] = status
+ self.ninja_build(gyp_file, target, **kw)
+
+ def run_built_executable(self, name, *args, **kw):
+ # Enclosing the name in a list avoids prepending the original dir.
+ program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
+ if sys.platform == 'darwin':
+ configuration = self.configuration_dirname()
+ os.environ['DYLD_LIBRARY_PATH'] = os.path.join('out', configuration)
+ return self.run(program=program, *args, **kw)
+
+ def built_file_path(self, name, type=None, **kw):
+ result = []
+ chdir = kw.get('chdir')
+ if chdir:
+ result.append(chdir)
+ result.append('out')
+ result.append(self.configuration_dirname())
+ if type == self.STATIC_LIB:
+ if sys.platform != 'darwin':
+ result.append('obj.target')
+ elif type == self.SHARED_LIB:
+ if sys.platform != 'darwin' and sys.platform != 'win32':
+ result.append('lib.target')
+ subdir = kw.get('subdir')
+ if subdir and type != self.SHARED_LIB:
+ result.append(subdir)
+ result.append(self.built_file_basename(name, type, **kw))
+ return self.workpath(*result)
+
+ def up_to_date(self, gyp_file, target=None, **kw):
+ result = self.ninja_build(gyp_file, target, **kw)
+ if not result:
+ stdout = self.stdout()
+ if 'ninja: no work to do' not in stdout:
+ self.report_not_up_to_date()
+ self.fail_test()
+ return result
+
+
+class TestGypMake(TestGypBase):
+ """
+ Subclass for testing the GYP Make generator.
+ """
+ format = 'make'
+ build_tool_list = ['make']
+ ALL = 'all'
+ def build(self, gyp_file, target=None, **kw):
+ """
+ Runs a Make build using the Makefiles generated from the specified
+ gyp_file.
+ """
+ arguments = kw.get('arguments', [])[:]
+ if self.configuration:
+ arguments.append('BUILDTYPE=' + self.configuration)
+ if target not in (None, self.DEFAULT):
+ arguments.append(target)
+ # Sub-directory builds provide per-gyp Makefiles (i.e.
+ # Makefile.gyp_filename), so use that if there is no Makefile.
+ chdir = kw.get('chdir', '')
+ if not os.path.exists(os.path.join(chdir, 'Makefile')):
+ print("NO Makefile in " + os.path.join(chdir, 'Makefile'))
+ arguments.insert(0, '-f')
+ arguments.insert(1, os.path.splitext(gyp_file)[0] + '.Makefile')
+ kw['arguments'] = arguments
+ return self.run(program=self.build_tool, **kw)
+ def up_to_date(self, gyp_file, target=None, **kw):
+ """
+ Verifies that a build of the specified Make target is up to date.
+ """
+ if target in (None, self.DEFAULT):
+ message_target = 'all'
+ else:
+ message_target = target
+ kw['stdout'] = "make: Nothing to be done for '%s'.\n" % message_target
+ return self.build(gyp_file, target, **kw)
+ def run_built_executable(self, name, *args, **kw):
+ """
+ Runs an executable built by Make.
+ """
+ configuration = self.configuration_dirname()
+ libdir = os.path.join('out', configuration, 'lib')
+ # TODO(piman): when everything is cross-compile safe, remove lib.target
+ if sys.platform == 'darwin':
+ # Mac puts target shared libraries right in the product directory.
+ configuration = self.configuration_dirname()
+ os.environ['DYLD_LIBRARY_PATH'] = (
+ libdir + '.host:' + os.path.join('out', configuration))
+ else:
+ os.environ['LD_LIBRARY_PATH'] = libdir + '.host:' + libdir + '.target'
+ # Enclosing the name in a list avoids prepending the original dir.
+ program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
+ return self.run(program=program, *args, **kw)
+ def built_file_path(self, name, type=None, **kw):
+ """
+ Returns a path to the specified file name, of the specified type,
+ as built by Make.
+
+ Built files are in the subdirectory 'out/{configuration}'.
+ The default is 'out/Default'.
+
+ A chdir= keyword argument specifies the source directory
+ relative to which the output subdirectory can be found.
+
+ "type" values of STATIC_LIB or SHARED_LIB append the necessary
+ prefixes and suffixes to a platform-independent library base name.
+
+ A subdir= keyword argument specifies a library subdirectory within
+ the default 'obj.target'.
+ """
+ result = []
+ chdir = kw.get('chdir')
+ if chdir:
+ result.append(chdir)
+ configuration = self.configuration_dirname()
+ result.extend(['out', configuration])
+ if type == self.STATIC_LIB and sys.platform != 'darwin':
+ result.append('obj.target')
+ elif type == self.SHARED_LIB and sys.platform != 'darwin':
+ result.append('lib.target')
+ subdir = kw.get('subdir')
+ if subdir and type != self.SHARED_LIB:
+ result.append(subdir)
+ result.append(self.built_file_basename(name, type, **kw))
+ return self.workpath(*result)
+
+
+def ConvertToCygpath(path):
+ """Convert to cygwin path if we are using cygwin."""
+ if sys.platform == 'cygwin':
+ p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
+ path = p.communicate()[0].strip()
+ return path
+
+
+def MakeDirs(new_dir):
+ """A wrapper around os.makedirs() that emulates "mkdir -p"."""
+ try:
+ os.makedirs(new_dir)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+def GetDefaultKeychainPath():
+ """Get the keychain path, for used before updating HOME."""
+ assert sys.platform == 'darwin'
+ # Format is:
+ # $ security default-keychain
+ # "/Some/Path/To/default.keychain"
+ path = subprocess.check_output(['security', 'default-keychain']).decode(
+ 'utf-8', 'ignore').strip()
+ return path[1:-1]
+
+def FindMSBuildInstallation(msvs_version = 'auto'):
+ """Returns path to MSBuild for msvs_version or latest available.
+
+ Looks in the registry to find install location of MSBuild.
+ MSBuild before v4.0 will not build c++ projects, so only use newer versions.
+ """
+ import TestWin
+ registry = TestWin.Registry()
+
+ msvs_to_msbuild = {
+ '2013': r'12.0',
+ '2012': r'4.0', # Really v4.0.30319 which comes with .NET 4.5.
+ '2010': r'4.0'}
+
+ msbuild_basekey = r'HKLM\SOFTWARE\Microsoft\MSBuild\ToolsVersions'
+ if not registry.KeyExists(msbuild_basekey):
+ print('Error: could not find MSBuild base registry entry')
+ return None
+
+ msbuild_version = None
+ if msvs_version in msvs_to_msbuild:
+ msbuild_test_version = msvs_to_msbuild[msvs_version]
+ if registry.KeyExists(msbuild_basekey + '\\' + msbuild_test_version):
+ msbuild_version = msbuild_test_version
+ else:
+ print('Warning: Environment variable GYP_MSVS_VERSION specifies "%s" '
+ 'but corresponding MSBuild "%s" was not found.' %
+ (msvs_version, msbuild_version))
+ if not msbuild_version:
+ for msvs_version in sorted(msvs_to_msbuild, reverse=True):
+ msbuild_test_version = msvs_to_msbuild[msvs_version]
+ if registry.KeyExists(msbuild_basekey + '\\' + msbuild_test_version):
+ msbuild_version = msbuild_test_version
+ break
+ if not msbuild_version:
+ print('Error: could not find MSBuild registry entry')
+ return None
+
+ msbuild_path = registry.GetValue(msbuild_basekey + '\\' + msbuild_version,
+ 'MSBuildToolsPath')
+ if not msbuild_path:
+ print('Error: could not get MSBuild registry entry value')
+ return None
+
+ return os.path.join(msbuild_path, 'MSBuild.exe')
+
+
+def FindVisualStudioInstallation():
+ """Returns appropriate values for .build_tool and .uses_msbuild fields
+ of TestGypBase for Visual Studio.
+
+ We use the value specified by GYP_MSVS_VERSION. If not specified, we
+ search %PATH% and %PATHEXT% for a devenv.{exe,bat,...} executable.
+ Failing that, we search for likely deployment paths.
+ """
+ override_build_tool = os.environ.get('GYP_BUILD_TOOL')
+ if override_build_tool:
+ return override_build_tool, True, override_build_tool
+
+ possible_roots = ['%s:\\Program Files%s' % (chr(drive), suffix)
+ for drive in range(ord('C'), ord('Z') + 1)
+ for suffix in ['', ' (x86)']]
+ possible_paths = {
+ '2017': r'Microsoft Visual Studio\2017',
+ '2015': r'Microsoft Visual Studio 14.0\Common7\IDE\devenv.com',
+ '2013': r'Microsoft Visual Studio 12.0\Common7\IDE\devenv.com',
+ '2012': r'Microsoft Visual Studio 11.0\Common7\IDE\devenv.com',
+ '2010': r'Microsoft Visual Studio 10.0\Common7\IDE\devenv.com',
+ '2008': r'Microsoft Visual Studio 9.0\Common7\IDE\devenv.com',
+ '2005': r'Microsoft Visual Studio 8\Common7\IDE\devenv.com'}
+
+ possible_roots = [ConvertToCygpath(r) for r in possible_roots]
+
+ msvs_version = 'auto'
+ for flag in (f for f in sys.argv if f.startswith('msvs_version=')):
+ msvs_version = flag.split('=')[-1]
+ msvs_version = os.environ.get('GYP_MSVS_VERSION', msvs_version)
+
+ if msvs_version in ['2017', 'auto']:
+ msbuild_exes = []
+ try:
+ path = possible_paths['2017']
+ for r in possible_roots:
+ build_tool = os.path.join(r, path)
+ if os.path.exists(build_tool):
+ break;
+ else:
+ build_tool = None
+ if not build_tool:
+ args1 = ['reg', 'query',
+ 'HKLM\Software\Microsoft\VisualStudio\SxS\VS7',
+ '/v', '15.0', '/reg:32']
+ build_tool = subprocess.check_output(args1).decode(
+ 'utf-8', 'ignore').strip().split(b'\r\n').pop().split(b' ').pop()
+ build_tool = build_tool.decode('utf-8')
+ if build_tool:
+ args2 = ['cmd.exe', '/d', '/c',
+ 'cd', '/d', build_tool,
+ '&', 'dir', '/b', '/s', 'msbuild.exe']
+ msbuild_exes = subprocess.check_output(args2).strip().split(b'\r\n')
+ msbuild_exes = [m.decode('utf-8') for m in msbuild_exes]
+ if len(msbuild_exes):
+ msbuild_Path = os.path.join(build_tool, msbuild_exes[0])
+ if os.path.exists(msbuild_Path):
+ os.environ['GYP_MSVS_VERSION'] = '2017'
+ os.environ['GYP_BUILD_TOOL'] = msbuild_Path
+ return msbuild_Path, True, msbuild_Path
+ except Exception as e:
+ pass
+
+ if msvs_version in possible_paths:
+ # Check that the path to the specified GYP_MSVS_VERSION exists.
+ path = possible_paths[msvs_version]
+ for r in possible_roots:
+ build_tool = os.path.join(r, path)
+ if os.path.exists(build_tool):
+ uses_msbuild = msvs_version >= '2010'
+ msbuild_path = FindMSBuildInstallation(msvs_version)
+ return build_tool, uses_msbuild, msbuild_path
+ else:
+ print('Warning: Environment variable GYP_MSVS_VERSION specifies "%s" '
+ 'but corresponding "%s" was not found.' % (msvs_version, path))
+ # Neither GYP_MSVS_VERSION nor the path help us out. Iterate through
+ # the choices looking for a match.
+ for version in sorted(possible_paths, reverse=True):
+ path = possible_paths[version]
+ for r in possible_roots:
+ build_tool = os.path.join(r, path)
+ if os.path.exists(build_tool):
+ uses_msbuild = msvs_version >= '2010'
+ msbuild_path = FindMSBuildInstallation(msvs_version)
+ return build_tool, uses_msbuild, msbuild_path
+ print('Error: could not find devenv')
+ sys.exit(1)
+
+class TestGypOnMSToolchain(TestGypBase):
+ """
+ Common subclass for testing generators that target the Microsoft Visual
+ Studio toolchain (cl, link, dumpbin, etc.)
+ """
+ @staticmethod
+ def _ComputeVsvarsPath(devenv_path):
+ devenv_dir = os.path.split(devenv_path)[0]
+
+ # Check for location of Community install (in VS2017, at least).
+ vcvars_path = os.path.join(devenv_path, '..', '..', '..', '..', 'VC',
+ 'Auxiliary', 'Build', 'vcvars32.bat')
+ if os.path.exists(vcvars_path):
+ return os.path.abspath(vcvars_path)
+
+ vsvars_path = os.path.join(devenv_path, '..', '..', 'Tools',
+ 'vsvars32.bat')
+ return os.path.abspath(vsvars_path)
+
+ def initialize_build_tool(self):
+ super(TestGypOnMSToolchain, self).initialize_build_tool()
+ if sys.platform in ('win32', 'cygwin'):
+ build_tools = FindVisualStudioInstallation()
+ self.devenv_path, self.uses_msbuild, self.msbuild_path = build_tools
+ self.vsvars_path = TestGypOnMSToolchain._ComputeVsvarsPath(
+ self.devenv_path)
+
+ def run_dumpbin(self, *dumpbin_args):
+ """Run the dumpbin tool with the specified arguments, and capturing and
+ returning stdout."""
+ assert sys.platform in ('win32', 'cygwin')
+ cmd = os.environ.get('COMSPEC', 'cmd.exe')
+ arguments = [cmd, '/c', self.vsvars_path, '&&', 'dumpbin']
+ arguments.extend(dumpbin_args)
+ proc = subprocess.Popen(arguments, stdout=subprocess.PIPE)
+ output = proc.communicate()[0].decode('utf-8', 'ignore')
+ assert not proc.returncode
+ return output
+
+class TestGypNinja(TestGypOnMSToolchain):
+ """
+ Subclass for testing the GYP Ninja generator.
+ """
+ format = 'ninja'
+ build_tool_list = ['ninja']
+ ALL = 'all'
+ DEFAULT = 'all'
+
+ def run_gyp(self, gyp_file, *args, **kw):
+ TestGypBase.run_gyp(self, gyp_file, *args, **kw)
+
+ def build(self, gyp_file, target=None, **kw):
+ arguments = kw.get('arguments', [])[:]
+
+ # Add a -C output/path to the command line.
+ arguments.append('-C')
+ arguments.append(os.path.join('out', self.configuration_dirname()))
+
+ if target is None:
+ target = 'all'
+ arguments.append(target)
+
+ kw['arguments'] = arguments
+ return self.run(program=self.build_tool, **kw)
+
+ def run_built_executable(self, name, *args, **kw):
+ # Enclosing the name in a list avoids prepending the original dir.
+ program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
+ if sys.platform == 'darwin':
+ configuration = self.configuration_dirname()
+ os.environ['DYLD_LIBRARY_PATH'] = os.path.join('out', configuration)
+ return self.run(program=program, *args, **kw)
+
+ def built_file_path(self, name, type=None, **kw):
+ result = []
+ chdir = kw.get('chdir')
+ if chdir:
+ result.append(chdir)
+ result.append('out')
+ result.append(self.configuration_dirname())
+ if type == self.STATIC_LIB:
+ if sys.platform != 'darwin':
+ result.append('obj')
+ elif type == self.SHARED_LIB:
+ if sys.platform != 'darwin' and sys.platform != 'win32':
+ result.append('lib')
+ subdir = kw.get('subdir')
+ if subdir and type != self.SHARED_LIB:
+ result.append(subdir)
+ result.append(self.built_file_basename(name, type, **kw))
+ return self.workpath(*result)
+
+ def up_to_date(self, gyp_file, target=None, **kw):
+ result = self.build(gyp_file, target, **kw)
+ if not result:
+ stdout = self.stdout()
+ if 'ninja: no work to do' not in stdout:
+ self.report_not_up_to_date()
+ self.fail_test()
+ return result
+
+
+class TestGypMSVS(TestGypOnMSToolchain):
+ """
+ Subclass for testing the GYP Visual Studio generator.
+ """
+ format = 'msvs'
+
+ u = r'=== Build: 0 succeeded, 0 failed, (\d+) up-to-date, 0 skipped ==='
+ up_to_date_re = re.compile(u, re.M)
+
+ # Initial None element will indicate to our .initialize_build_tool()
+ # method below that 'devenv' was not found on %PATH%.
+ #
+ # Note: we must use devenv.com to be able to capture build output.
+ # Directly executing devenv.exe only sends output to BuildLog.htm.
+ build_tool_list = [None, 'devenv.com']
+
+ def initialize_build_tool(self):
+ super(TestGypMSVS, self).initialize_build_tool()
+ self.build_tool = self.devenv_path
+
+ def build(self, gyp_file, target=None, rebuild=False, clean=False, **kw):
+ """
+ Runs a Visual Studio build using the configuration generated
+ from the specified gyp_file.
+ """
+ if '15.0' in self.build_tool:
+ configuration = '/p:Configuration=' + (
+ self.configuration or self.configuration_buildname())
+ build = '/t'
+ if target not in (None, self.ALL, self.DEFAULT):
+ build += ':' + target
+ if clean:
+ build += ':Clean'
+ elif rebuild:
+ build += ':Rebuild'
+ elif ':' not in build:
+ build += ':Build'
+ arguments = kw.get('arguments', [])[:]
+ arguments.extend([gyp_file.replace('.gyp', '.sln'),
+ build, configuration])
+ else:
+ configuration = self.configuration_buildname()
+ if clean:
+ build = '/Clean'
+ elif rebuild:
+ build = '/Rebuild'
+ else:
+ build = '/Build'
+ arguments = kw.get('arguments', [])[:]
+ arguments.extend([gyp_file.replace('.gyp', '.sln'),
+ build, configuration])
+ # Note: the Visual Studio generator doesn't add an explicit 'all'
+ # target, so we just treat it the same as the default.
+ if target not in (None, self.ALL, self.DEFAULT):
+ arguments.extend(['/Project', target])
+ if self.configuration:
+ arguments.extend(['/ProjectConfig', self.configuration])
+ kw['arguments'] = arguments
+ return self.run(program=self.build_tool, **kw)
+ def up_to_date(self, gyp_file, target=None, **kw):
+ r"""
+ Verifies that a build of the specified Visual Studio target is up to date.
+
+ Beware that VS2010 will behave strangely if you build under
+ C:\USERS\yourname\AppData\Local. It will cause needless work. The ouptut
+ will be "1 succeeded and 0 up to date". MSBuild tracing reveals that:
+ "Project 'C:\Users\...\AppData\Local\...vcxproj' not up to date because
+ 'C:\PROGRAM FILES (X86)\MICROSOFT VISUAL STUDIO 10.0\VC\BIN\1033\CLUI.DLL'
+ was modified at 02/21/2011 17:03:30, which is newer than '' which was
+ modified at 01/01/0001 00:00:00.
+
+ The workaround is to specify a workdir when instantiating the test, e.g.
+ test = TestGyp.TestGyp(workdir='workarea')
+ """
+ result = self.build(gyp_file, target, **kw)
+ if not result:
+ stdout = self.stdout()
+
+ m = self.up_to_date_re.search(stdout)
+ up_to_date = m and int(m.group(1)) > 0
+ if not up_to_date:
+ self.report_not_up_to_date()
+ self.fail_test()
+ return result
+ def run_built_executable(self, name, *args, **kw):
+ """
+ Runs an executable built by Visual Studio.
+ """
+ configuration = self.configuration_dirname()
+ # Enclosing the name in a list avoids prepending the original dir.
+ program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
+ return self.run(program=program, *args, **kw)
+ def built_file_path(self, name, type=None, **kw):
+ """
+ Returns a path to the specified file name, of the specified type,
+ as built by Visual Studio.
+
+ Built files are in a subdirectory that matches the configuration
+ name. The default is 'Default'.
+
+ A chdir= keyword argument specifies the source directory
+ relative to which the output subdirectory can be found.
+
+ "type" values of STATIC_LIB or SHARED_LIB append the necessary
+ prefixes and suffixes to a platform-independent library base name.
+ """
+ result = []
+ chdir = kw.get('chdir')
+ if chdir:
+ result.append(chdir)
+ result.append(self.configuration_dirname())
+ if type == self.STATIC_LIB:
+ result.append('lib')
+ result.append(self.built_file_basename(name, type, **kw))
+ return self.workpath(*result)
+
+
+class TestGypMSVSNinja(TestGypNinja):
+ """
+ Subclass for testing the GYP Visual Studio Ninja generator.
+ """
+ format = 'msvs-ninja'
+
+ def initialize_build_tool(self):
+ super(TestGypMSVSNinja, self).initialize_build_tool()
+ # When using '--build', make sure ninja is first in the format list.
+ self.formats.insert(0, 'ninja')
+
+ def build(self, gyp_file, target=None, rebuild=False, clean=False, **kw):
+ """
+ Runs a Visual Studio build using the configuration generated
+ from the specified gyp_file.
+ """
+ arguments = kw.get('arguments', [])[:]
+ if target in (None, self.ALL, self.DEFAULT):
+ # Note: the Visual Studio generator doesn't add an explicit 'all' target.
+ # This will build each project. This will work if projects are hermetic,
+ # but may fail if they are not (a project may run more than once).
+ # It would be nice to supply an all.metaproj for MSBuild.
+ arguments.extend([gyp_file.replace('.gyp', '.sln')])
+ else:
+ # MSBuild documentation claims that one can specify a sln but then build a
+ # project target like 'msbuild a.sln /t:proj:target' but this format only
+ # supports 'Clean', 'Rebuild', and 'Publish' (with none meaning Default).
+ # This limitation is due to the .sln -> .sln.metaproj conversion.
+ # The ':' is not special, 'proj:target' is a target in the metaproj.
+ arguments.extend([target+'.vcxproj'])
+
+ if clean:
+ build = 'Clean'
+ elif rebuild:
+ build = 'Rebuild'
+ else:
+ build = 'Build'
+ arguments.extend(['/target:'+build])
+ configuration = self.configuration_buildname()
+ config = configuration.split('|')
+ arguments.extend(['/property:Configuration='+config[0]])
+ if len(config) > 1:
+ arguments.extend(['/property:Platform='+config[1]])
+ arguments.extend(['/property:BuildInParallel=false'])
+ arguments.extend(['/verbosity:minimal'])
+
+ kw['arguments'] = arguments
+ return self.run(program=self.msbuild_path, **kw)
+
+
+class TestGypXcode(TestGypBase):
+ """
+ Subclass for testing the GYP Xcode generator.
+ """
+ format = 'xcode'
+ build_tool_list = ['xcodebuild']
+
+ phase_script_execution = ("\n"
+ "PhaseScriptExecution /\\S+/Script-[0-9A-F]+\\.sh\n"
+ " cd /\\S+\n"
+ " /bin/sh -c /\\S+/Script-[0-9A-F]+\\.sh\n"
+ "(make: Nothing to be done for .all.\\.\n)?")
+
+ strip_up_to_date_expressions = [
+ # Various actions or rules can run even when the overall build target
+ # is up to date. Strip those phases' GYP-generated output.
+ re.compile(phase_script_execution, re.S),
+
+ # The message from distcc_pump can trail the "BUILD SUCCEEDED"
+ # message, so strip that, too.
+ re.compile('__________Shutting down distcc-pump include server\n', re.S),
+ ]
+
+ up_to_date_endings = (
+ 'Checking Dependencies...\n** BUILD SUCCEEDED **\n', # Xcode 3.0/3.1
+ 'Check dependencies\n** BUILD SUCCEEDED **\n\n', # Xcode 3.2
+ 'Check dependencies\n\n\n** BUILD SUCCEEDED **\n\n', # Xcode 4.2
+ 'Check dependencies\n\n** BUILD SUCCEEDED **\n\n', # Xcode 5.0
+ )
+
+ def build(self, gyp_file, target=None, **kw):
+ """
+ Runs an xcodebuild using the .xcodeproj generated from the specified
+ gyp_file.
+ """
+ # Be sure we're working with a copy of 'arguments' since we modify it.
+ # The caller may not be expecting it to be modified.
+ arguments = kw.get('arguments', [])[:]
+ arguments.extend(['-project', gyp_file.replace('.gyp', '.xcodeproj')])
+ if target == self.ALL:
+ arguments.append('-alltargets',)
+ elif target not in (None, self.DEFAULT):
+ arguments.extend(['-target', target])
+ if self.configuration:
+ arguments.extend(['-configuration', self.configuration])
+ symroot = kw.get('SYMROOT', '$SRCROOT/build')
+ if symroot:
+ arguments.append('SYMROOT='+symroot)
+ kw['arguments'] = arguments
+
+ # Work around spurious stderr output from Xcode 4, http://crbug.com/181012
+ match = kw.pop('match', self.match)
+ def match_filter_xcode(actual, expected):
+ if actual:
+ if not TestCmd.is_List(actual):
+ actual = actual.split('\n')
+ if not TestCmd.is_List(expected):
+ expected = expected.split('\n')
+ actual = [a for a in actual
+ if 'No recorder, buildTask: <Xcode3BuildTask:' not in a and
+ 'Beginning test session' not in a and
+ 'Writing diagnostic log' not in a and
+ 'Logs/Test/' not in a]
+ return match(actual, expected)
+ kw['match'] = match_filter_xcode
+
+ return self.run(program=self.build_tool, **kw)
+ def up_to_date(self, gyp_file, target=None, **kw):
+ """
+ Verifies that a build of the specified Xcode target is up to date.
+ """
+ result = self.build(gyp_file, target, **kw)
+ if not result:
+ output = self.stdout()
+ for expression in self.strip_up_to_date_expressions:
+ output = expression.sub('', output)
+ if not output.endswith(self.up_to_date_endings):
+ self.report_not_up_to_date()
+ self.fail_test()
+ return result
+ def run_built_executable(self, name, *args, **kw):
+ """
+ Runs an executable built by xcodebuild.
+ """
+ configuration = self.configuration_dirname()
+ os.environ['DYLD_LIBRARY_PATH'] = os.path.join('build', configuration)
+ # Enclosing the name in a list avoids prepending the original dir.
+ program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
+ return self.run(program=program, *args, **kw)
+ def built_file_path(self, name, type=None, **kw):
+ """
+ Returns a path to the specified file name, of the specified type,
+ as built by Xcode.
+
+ Built files are in the subdirectory 'build/{configuration}'.
+ The default is 'build/Default'.
+
+ A chdir= keyword argument specifies the source directory
+ relative to which the output subdirectory can be found.
+
+ "type" values of STATIC_LIB or SHARED_LIB append the necessary
+ prefixes and suffixes to a platform-independent library base name.
+ """
+ result = []
+ chdir = kw.get('chdir')
+ if chdir:
+ result.append(chdir)
+ configuration = self.configuration_dirname()
+ result.extend(['build', configuration])
+ result.append(self.built_file_basename(name, type, **kw))
+ return self.workpath(*result)
+
+
+class TestGypXcodeNinja(TestGypXcode):
+ """
+ Subclass for testing the GYP Xcode Ninja generator.
+ """
+ format = 'xcode-ninja'
+
+ def initialize_build_tool(self):
+ super(TestGypXcodeNinja, self).initialize_build_tool()
+ # When using '--build', make sure ninja is first in the format list.
+ self.formats.insert(0, 'ninja')
+
+ def build(self, gyp_file, target=None, **kw):
+ """
+ Runs an xcodebuild using the .xcodeproj generated from the specified
+ gyp_file.
+ """
+ build_config = self.configuration
+ if build_config and build_config.endswith(('-iphoneos',
+ '-iphonesimulator')):
+ build_config, sdk = self.configuration.split('-')
+ kw['arguments'] = kw.get('arguments', []) + ['-sdk', sdk]
+
+ with self._build_configuration(build_config):
+ return super(TestGypXcodeNinja, self).build(
+ gyp_file.replace('.gyp', '.ninja.gyp'), target, **kw)
+
+ @contextmanager
+ def _build_configuration(self, build_config):
+ config = self.configuration
+ self.configuration = build_config
+ try:
+ yield
+ finally:
+ self.configuration = config
+
+ def built_file_path(self, name, type=None, **kw):
+ result = []
+ chdir = kw.get('chdir')
+ if chdir:
+ result.append(chdir)
+ result.append('out')
+ result.append(self.configuration_dirname())
+ subdir = kw.get('subdir')
+ if subdir and type != self.SHARED_LIB:
+ result.append(subdir)
+ result.append(self.built_file_basename(name, type, **kw))
+ return self.workpath(*result)
+
+ def up_to_date(self, gyp_file, target=None, **kw):
+ result = self.build(gyp_file, target, **kw)
+ if not result:
+ stdout = self.stdout()
+ if 'ninja: no work to do' not in stdout:
+ self.report_not_up_to_date()
+ self.fail_test()
+ return result
+
+ def run_built_executable(self, name, *args, **kw):
+ """
+ Runs an executable built by xcodebuild + ninja.
+ """
+ configuration = self.configuration_dirname()
+ os.environ['DYLD_LIBRARY_PATH'] = os.path.join('out', configuration)
+ # Enclosing the name in a list avoids prepending the original dir.
+ program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
+ return self.run(program=program, *args, **kw)
+
+
+format_class_list = [
+ TestGypGypd,
+ TestGypCMake,
+ TestGypMake,
+ TestGypMSVS,
+ TestGypMSVSNinja,
+ TestGypNinja,
+ TestGypXcode,
+ TestGypXcodeNinja,
+]
+
+def TestGyp(*args, **kw):
+ """
+ Returns an appropriate TestGyp* instance for a specified GYP format.
+ """
+ format = kw.pop('format', os.environ.get('TESTGYP_FORMAT'))
+ for format_class in format_class_list:
+ if format == format_class.format:
+ return format_class(*args, **kw)
+ raise Exception("unknown format %r" % format)
diff --git a/third_party/python/gyp/test/lib/TestMac.py b/third_party/python/gyp/test/lib/TestMac.py
new file mode 100644
index 0000000000..d13afd5781
--- /dev/null
+++ b/third_party/python/gyp/test/lib/TestMac.py
@@ -0,0 +1,76 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+TestMac.py: a collection of helper function shared between test on Mac OS X.
+"""
+
+from __future__ import print_function
+
+import re
+import subprocess
+
+__all__ = ['Xcode', 'CheckFileType']
+
+
+def CheckFileType(test, file, archs):
+ """Check that |file| contains exactly |archs| or fails |test|."""
+ proc = subprocess.Popen(['lipo', '-info', file], stdout=subprocess.PIPE)
+ o = proc.communicate()[0].decode('utf-8').strip()
+ assert not proc.returncode
+ if len(archs) == 1:
+ pattern = re.compile('^Non-fat file: (.*) is architecture: (.*)$')
+ else:
+ pattern = re.compile('^Architectures in the fat file: (.*) are: (.*)$')
+ match = pattern.match(o)
+ if match is None:
+ print('Ouput does not match expected pattern: %s' % (pattern.pattern))
+ test.fail_test()
+ else:
+ found_file, found_archs = match.groups()
+ if found_file != file or set(found_archs.split()) != set(archs):
+ print('Expected file %s with arch %s, got %s with arch %s' % (
+ file, ' '.join(archs), found_file, found_archs))
+ test.fail_test()
+
+
+class XcodeInfo(object):
+ """Simplify access to Xcode informations."""
+
+ def __init__(self):
+ self._cache = {}
+
+ def _XcodeVersion(self):
+ lines = subprocess.check_output(['xcodebuild', '-version']).splitlines()
+ version = ''.join(lines[0].decode('utf-8').split()[-1].split('.'))
+ version = (version + '0' * (3 - len(version))).zfill(4)
+ return version, lines[-1].split()[-1]
+
+ def Version(self):
+ if 'Version' not in self._cache:
+ self._cache['Version'], self._cache['Build'] = self._XcodeVersion()
+ return self._cache['Version']
+
+ def Build(self):
+ if 'Build' not in self._cache:
+ self._cache['Version'], self._cache['Build'] = self._XcodeVersion()
+ return self._cache['Build']
+
+ def SDKBuild(self):
+ if 'SDKBuild' not in self._cache:
+ self._cache['SDKBuild'] = subprocess.check_output(
+ ['xcodebuild', '-version', '-sdk', '', 'ProductBuildVersion'])
+ self._cache['SDKBuild'] = self._cache['SDKBuild'].decode('utf-8')
+ self._cache['SDKBuild'] = self._cache['SDKBuild'].rstrip('\n')
+ return self._cache['SDKBuild']
+
+ def SDKVersion(self):
+ if 'SDKVersion' not in self._cache:
+ self._cache['SDKVersion'] = subprocess.check_output(
+ ['xcodebuild', '-version', '-sdk', '', 'SDKVersion'])
+ self._cache['SDKVersion'] = self._cache['SDKVersion'].rstrip('\n')
+ return self._cache['SDKVersion']
+
+
+Xcode = XcodeInfo()
diff --git a/third_party/python/gyp/test/lib/TestWin.py b/third_party/python/gyp/test/lib/TestWin.py
new file mode 100644
index 0000000000..ef676db121
--- /dev/null
+++ b/third_party/python/gyp/test/lib/TestWin.py
@@ -0,0 +1,101 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+TestWin.py: a collection of helpers for testing on Windows.
+"""
+
+import errno
+import os
+import re
+import sys
+import subprocess
+
+class Registry(object):
+ def _QueryBase(self, sysdir, key, value):
+ """Use reg.exe to read a particular key.
+
+ While ideally we might use the win32 module, we would like gyp to be
+ python neutral, so for instance cygwin python lacks this module.
+
+ Arguments:
+ sysdir: The system subdirectory to attempt to launch reg.exe from.
+ key: The registry key to read from.
+ value: The particular value to read.
+ Return:
+ stdout from reg.exe, or None for failure.
+ """
+ # Skip if not on Windows or Python Win32 setup issue
+ if sys.platform not in ('win32', 'cygwin'):
+ return None
+ # Setup params to pass to and attempt to launch reg.exe
+ cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'),
+ 'query', key]
+ if value:
+ cmd.extend(['/v', value])
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ # Get the stdout from reg.exe, reading to the end so p.returncode is valid
+ # Note that the error text may be in [1] in some cases
+ text = p.communicate()[0].decode('utf-8', 'ignore')
+ # Check return code from reg.exe; officially 0==success and 1==error
+ if p.returncode:
+ return None
+ return text
+
+ def Query(self, key, value=None):
+ r"""Use reg.exe to read a particular key through _QueryBase.
+
+ First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
+ that fails, it falls back to System32. Sysnative is available on Vista and
+ up and available on Windows Server 2003 and XP through KB patch 942589. Note
+ that Sysnative will always fail if using 64-bit python due to it being a
+ virtual directory and System32 will work correctly in the first place.
+
+ KB 942589 - http://support.microsoft.com/kb/942589/en-us.
+
+ Arguments:
+ key: The registry key.
+ value: The particular registry value to read (optional).
+ Return:
+ stdout from reg.exe, or None for failure.
+ """
+ text = None
+ try:
+ text = self._QueryBase('Sysnative', key, value)
+ except OSError as e:
+ if e.errno == errno.ENOENT:
+ text = self._QueryBase('System32', key, value)
+ else:
+ raise
+ return text
+
+ def GetValue(self, key, value):
+ """Use reg.exe to obtain the value of a registry key.
+
+ Args:
+ key: The registry key.
+ value: The particular registry value to read.
+ Return:
+ contents of the registry key's value, or None on failure.
+ """
+ text = self.Query(key, value)
+ if not text:
+ return None
+ # Extract value.
+ match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
+ if not match:
+ return None
+ return match.group(1)
+
+ def KeyExists(self, key):
+ """Use reg.exe to see if a key exists.
+
+ Args:
+ key: The registry key to check.
+ Return:
+ True if the key exists
+ """
+ if not self.Query(key):
+ return False
+ return True
diff --git a/third_party/python/gyp/test/library/gyptest-shared-obj-install-path.py b/third_party/python/gyp/test/library/gyptest-shared-obj-install-path.py
new file mode 100755
index 0000000000..af335365f9
--- /dev/null
+++ b/third_party/python/gyp/test/library/gyptest-shared-obj-install-path.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that .so files that are order only dependencies are specified by
+their install location rather than by their alias.
+"""
+
+# Python 2.5 needs this for the with statement.
+from __future__ import with_statement
+
+import os
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['make'])
+
+test.run_gyp('shared_dependency.gyp',
+ chdir='src')
+test.relocate('src', 'relocate/src')
+
+test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
+
+makefile_path = 'relocate/src/Makefile'
+
+with open(makefile_path) as makefile:
+ make_contents = makefile.read()
+
+# If we remove the code to generate lib1, Make should still be able
+# to build lib2 since lib1.so already exists.
+make_contents = make_contents.replace('include lib1.target.mk', '')
+with open(makefile_path, 'w') as makefile:
+ makefile.write(make_contents)
+
+test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/library/gyptest-shared.py b/third_party/python/gyp/test/library/gyptest-shared.py
new file mode 100755
index 0000000000..a1d2985d91
--- /dev/null
+++ b/third_party/python/gyp/test/library/gyptest-shared.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simple build of a "Hello, world!" program with shared libraries,
+including verifying that libraries are rebuilt correctly when functions
+move between libraries.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('library.gyp',
+ '-Dlibrary=shared_library',
+ '-Dmoveable_function=lib1',
+ chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('library.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from program.c
+Hello from lib1.c
+Hello from lib2.c
+Hello from lib1_moveable.c
+"""
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+
+test.run_gyp('library.gyp',
+ '-Dlibrary=shared_library',
+ '-Dmoveable_function=lib2',
+ chdir='relocate/src')
+
+# Update program.c to force a rebuild.
+test.sleep()
+contents = test.read('relocate/src/program.c')
+contents = contents.replace('Hello', 'Hello again')
+test.write('relocate/src/program.c', contents)
+
+test.build('library.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello again from program.c
+Hello from lib1.c
+Hello from lib2.c
+Hello from lib2_moveable.c
+"""
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+
+test.run_gyp('library.gyp',
+ '-Dlibrary=shared_library',
+ '-Dmoveable_function=lib1',
+ chdir='relocate/src')
+
+# Update program.c to force a rebuild.
+test.sleep()
+contents = test.read('relocate/src/program.c')
+contents = contents.replace('again', 'again again')
+test.write('relocate/src/program.c', contents)
+
+# TODO(sgk): we have to force a rebuild of lib2 so that it weeds out
+# the "moved" module. This should be done in gyp by adding a dependency
+# on the generated .vcproj file itself.
+test.touch('relocate/src/lib2.c')
+
+test.build('library.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello again again from program.c
+Hello from lib1.c
+Hello from lib2.c
+Hello from lib1_moveable.c
+"""
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/library/gyptest-static.py b/third_party/python/gyp/test/library/gyptest-static.py
new file mode 100755
index 0000000000..4bc71c4962
--- /dev/null
+++ b/third_party/python/gyp/test/library/gyptest-static.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simple build of a "Hello, world!" program with static libraries,
+including verifying that libraries are rebuilt correctly when functions
+move between libraries.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('library.gyp',
+ '-Dlibrary=static_library',
+ '-Dmoveable_function=lib1',
+ chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('library.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from program.c
+Hello from lib1.c
+Hello from lib2.c
+Hello from lib1_moveable.c
+"""
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+
+test.run_gyp('library.gyp',
+ '-Dlibrary=static_library',
+ '-Dmoveable_function=lib2',
+ chdir='relocate/src')
+
+# Update program.c to force a rebuild.
+test.sleep()
+contents = test.read('relocate/src/program.c')
+contents = contents.replace('Hello', 'Hello again')
+test.write('relocate/src/program.c', contents)
+
+test.build('library.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello again from program.c
+Hello from lib1.c
+Hello from lib2.c
+Hello from lib2_moveable.c
+"""
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+
+test.run_gyp('library.gyp',
+ '-Dlibrary=static_library',
+ '-Dmoveable_function=lib1',
+ chdir='relocate/src')
+
+# Update program.c and lib2.c to force a rebuild.
+test.sleep()
+contents = test.read('relocate/src/program.c')
+contents = contents.replace('again', 'again again')
+test.write('relocate/src/program.c', contents)
+
+# TODO(sgk): we have to force a rebuild of lib2 so that it weeds out
+# the "moved" module. This should be done in gyp by adding a dependency
+# on the generated .vcproj file itself.
+test.touch('relocate/src/lib2.c')
+
+test.build('library.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello again again from program.c
+Hello from lib1.c
+Hello from lib2.c
+Hello from lib1_moveable.c
+"""
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/library/src/lib1.c b/third_party/python/gyp/test/library/src/lib1.c
new file mode 100644
index 0000000000..3866b1b845
--- /dev/null
+++ b/third_party/python/gyp/test/library/src/lib1.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void lib1_function(void)
+{
+ fprintf(stdout, "Hello from lib1.c\n");
+ fflush(stdout);
+}
diff --git a/third_party/python/gyp/test/library/src/lib1_moveable.c b/third_party/python/gyp/test/library/src/lib1_moveable.c
new file mode 100644
index 0000000000..5d3cc1d9aa
--- /dev/null
+++ b/third_party/python/gyp/test/library/src/lib1_moveable.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void moveable_function(void)
+{
+ fprintf(stdout, "Hello from lib1_moveable.c\n");
+ fflush(stdout);
+}
diff --git a/third_party/python/gyp/test/library/src/lib2.c b/third_party/python/gyp/test/library/src/lib2.c
new file mode 100644
index 0000000000..21dda72653
--- /dev/null
+++ b/third_party/python/gyp/test/library/src/lib2.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void lib2_function(void)
+{
+ fprintf(stdout, "Hello from lib2.c\n");
+ fflush(stdout);
+}
diff --git a/third_party/python/gyp/test/library/src/lib2_moveable.c b/third_party/python/gyp/test/library/src/lib2_moveable.c
new file mode 100644
index 0000000000..f645071d1e
--- /dev/null
+++ b/third_party/python/gyp/test/library/src/lib2_moveable.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void moveable_function(void)
+{
+ fprintf(stdout, "Hello from lib2_moveable.c\n");
+ fflush(stdout);
+}
diff --git a/third_party/python/gyp/test/library/src/library.gyp b/third_party/python/gyp/test/library/src/library.gyp
new file mode 100644
index 0000000000..bc35516426
--- /dev/null
+++ b/third_party/python/gyp/test/library/src/library.gyp
@@ -0,0 +1,58 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'moveable_function%': 0,
+ },
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'dependencies': [
+ 'lib1',
+ 'lib2',
+ ],
+ 'sources': [
+ 'program.c',
+ ],
+ },
+ {
+ 'target_name': 'lib1',
+ 'type': '<(library)',
+ 'sources': [
+ 'lib1.c',
+ ],
+ 'conditions': [
+ ['moveable_function=="lib1"', {
+ 'sources': [
+ 'lib1_moveable.c',
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'lib2',
+ 'type': '<(library)',
+ 'sources': [
+ 'lib2.c',
+ ],
+ 'conditions': [
+ ['moveable_function=="lib2"', {
+ 'sources': [
+ 'lib2_moveable.c',
+ ],
+ }],
+ ],
+ },
+ ],
+ 'conditions': [
+ ['OS=="linux"', {
+ 'target_defaults': {
+ # Support 64-bit shared libs (also works fine for 32-bit).
+ 'cflags': ['-fPIC'],
+ },
+ }],
+ ],
+}
diff --git a/third_party/python/gyp/test/library/src/program.c b/third_party/python/gyp/test/library/src/program.c
new file mode 100644
index 0000000000..d460f60e40
--- /dev/null
+++ b/third_party/python/gyp/test/library/src/program.c
@@ -0,0 +1,15 @@
+#include <stdio.h>
+
+extern void lib1_function(void);
+extern void lib2_function(void);
+extern void moveable_function(void);
+
+int main(void)
+{
+ fprintf(stdout, "Hello from program.c\n");
+ fflush(stdout);
+ lib1_function();
+ lib2_function();
+ moveable_function();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/library/src/shared_dependency.gyp b/third_party/python/gyp/test/library/src/shared_dependency.gyp
new file mode 100644
index 0000000000..7d29f5de59
--- /dev/null
+++ b/third_party/python/gyp/test/library/src/shared_dependency.gyp
@@ -0,0 +1,33 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'lib1',
+ 'type': 'shared_library',
+ 'sources': [
+ 'lib1.c',
+ ],
+ },
+ {
+ 'target_name': 'lib2',
+ 'type': 'shared_library',
+ 'sources': [
+ 'lib2.c',
+ ],
+ 'dependencies': [
+ 'lib1',
+ ],
+ },
+ ],
+ 'conditions': [
+ ['OS=="linux"', {
+ 'target_defaults': {
+ # Support 64-bit shared libs (also works fine for 32-bit).
+ 'cflags': ['-fPIC'],
+ },
+ }],
+ ],
+}
diff --git a/third_party/python/gyp/test/library_dirs/gyptest-library-dirs.py b/third_party/python/gyp/test/library_dirs/gyptest-library-dirs.py
new file mode 100644
index 0000000000..e725dd1176
--- /dev/null
+++ b/third_party/python/gyp/test/library_dirs/gyptest-library-dirs.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies library_dirs (in link_settings) are properly found.
+"""
+
+import sys
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+lib_dir = test.tempdir('secret_location')
+
+test.run_gyp('test.gyp',
+ '-D', 'abs_path_to_secret_library_location={0}'.format(lib_dir),
+ chdir='subdir')
+
+# Must build each target independently, since they are not in each others'
+# 'dependencies' (test.ALL does NOT work here for some builders, and in any case
+# would not ensure the correct ordering).
+test.build('test.gyp', 'mylib', chdir='subdir')
+test.build('test.gyp', 'libraries-search-path-test', chdir='subdir')
+
+expect = """Hello world
+"""
+test.run_built_executable(
+ 'libraries-search-path-test', chdir='subdir', stdout=expect)
+
+if sys.platform in ('win32', 'cygwin'):
+ test.run_gyp('test-win.gyp',
+ '-D',
+ 'abs_path_to_secret_library_location={0}'.format(lib_dir),
+ chdir='subdir')
+
+ test.build('test.gyp', 'mylib', chdir='subdir')
+ test.build('test-win.gyp',
+ 'libraries-search-path-test-lib-suffix',
+ chdir='subdir')
+
+ test.run_built_executable(
+ 'libraries-search-path-test-lib-suffix', chdir='subdir', stdout=expect)
+
+
+test.pass_test()
+test.cleanup()
diff --git a/third_party/python/gyp/test/library_dirs/subdir/README.txt b/third_party/python/gyp/test/library_dirs/subdir/README.txt
new file mode 100644
index 0000000000..4031ded85f
--- /dev/null
+++ b/third_party/python/gyp/test/library_dirs/subdir/README.txt
@@ -0,0 +1 @@
+Make things live in a subdirectory, to make sure that DEPTH works correctly.
diff --git a/third_party/python/gyp/test/library_dirs/subdir/hello.cc b/third_party/python/gyp/test/library_dirs/subdir/hello.cc
new file mode 100644
index 0000000000..5dbbd48d34
--- /dev/null
+++ b/third_party/python/gyp/test/library_dirs/subdir/hello.cc
@@ -0,0 +1,11 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <iostream>
+#include "mylib.h"
+
+int main() {
+ std::cout << "Hello " << my_foo(99) << std::endl;
+ return 0;
+}
diff --git a/third_party/python/gyp/test/library_dirs/subdir/mylib.cc b/third_party/python/gyp/test/library_dirs/subdir/mylib.cc
new file mode 100644
index 0000000000..654f3d0e6c
--- /dev/null
+++ b/third_party/python/gyp/test/library_dirs/subdir/mylib.cc
@@ -0,0 +1,9 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "mylib.h"
+
+std::string my_foo(int x) {
+ return std::string("world");
+}
diff --git a/third_party/python/gyp/test/library_dirs/subdir/mylib.h b/third_party/python/gyp/test/library_dirs/subdir/mylib.h
new file mode 100644
index 0000000000..84b4022e7b
--- /dev/null
+++ b/third_party/python/gyp/test/library_dirs/subdir/mylib.h
@@ -0,0 +1,12 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TEST_LIBRARY_DIRS_SUBDIR_MYLIB_H
+#define TEST_LIBRARY_DIRS_SUBDIR_MYLIB_H
+
+#include <string>
+
+std::string my_foo(int);
+
+#endif // TEST_LIBRARY_DIRS_SUBDIR_MYLIB_H
diff --git a/third_party/python/gyp/test/library_dirs/subdir/test-win.gyp b/third_party/python/gyp/test/library_dirs/subdir/test-win.gyp
new file mode 100644
index 0000000000..033b6f7fdd
--- /dev/null
+++ b/third_party/python/gyp/test/library_dirs/subdir/test-win.gyp
@@ -0,0 +1,60 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ # This creates a static library and puts it in a nonstandard location for
+ # libraries-search-path-test.
+ 'target_name': 'mylib',
+ 'type': 'static_library',
+ 'standalone_static_library': 1,
+ # This directory is NOT in the default library search locations. It also
+ # MUST be passed in on the gyp command line:
+ #
+ # -D abs_path_to_secret_library_location=/some_absolute_path
+ #
+ # The gyptest itself (../gyptest-library-dirs.py) provides this.
+ 'product_dir': '<(abs_path_to_secret_library_location)',
+ 'sources': [
+ 'mylib.cc',
+ ],
+ },
+ {
+ 'target_name': 'libraries-search-path-test-lib-suffix',
+ 'type': 'executable',
+ 'dependencies': [
+ # It is important to NOT list the mylib as a dependency here, because
+ # some build systems will track it down based on its product_dir,
+ # such that the link succeeds even without the library_dirs below.
+ #
+ # The point of this weird structuring is to ensure that 'library_dirs'
+ # works as advertised, such that just '-lmylib' (or its equivalent)
+ # works based on the directories that library_dirs puts in the library
+ # link path.
+ #
+ # If 'mylib' was listed as a proper dependency here, the build system
+ # would find it and link with its path on disk.
+ #
+ # Note that this implies 'mylib' must already be built when building
+ # 'libraries-search-path-test' (see ../gyptest-library-dirs.py).
+ #
+ #'mylib',
+ ],
+ 'sources': [
+ 'hello.cc',
+ ],
+ # Note that without this, the mylib library would not be found and
+ # successfully linked.
+ 'library_dirs': [
+ '<(abs_path_to_secret_library_location)',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '-lmylib.lib',
+ ],
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/library_dirs/subdir/test.gyp b/third_party/python/gyp/test/library_dirs/subdir/test.gyp
new file mode 100644
index 0000000000..f83d7f2bf1
--- /dev/null
+++ b/third_party/python/gyp/test/library_dirs/subdir/test.gyp
@@ -0,0 +1,68 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ # This creates a static library and puts it in a nonstandard location for
+ # libraries-search-path-test.
+ 'target_name': 'mylib',
+ 'type': 'static_library',
+ 'standalone_static_library': 1,
+ # This directory is NOT in the default library search locations. It also
+ # MUST be passed in on the gyp command line:
+ #
+ # -D abs_path_to_secret_library_location=/some_absolute_path
+ #
+ # The gyptest itself (../gyptest-library-dirs.py) provides this.
+ 'product_dir': '<(abs_path_to_secret_library_location)',
+ 'sources': [
+ 'mylib.cc',
+ ],
+ },
+ {
+ 'target_name': 'libraries-search-path-test',
+ 'type': 'executable',
+ 'dependencies': [
+ # It is important to NOT list the mylib as a dependency here, because
+ # some build systems will track it down based on its product_dir,
+ # such that the link succeeds even without the library_dirs below.
+ #
+ # The point of this weird structuring is to ensure that 'library_dirs'
+ # works as advertised, such that just '-lmylib' (or its equivalent)
+ # works based on the directories that library_dirs puts in the library
+ # link path.
+ #
+ # If 'mylib' was listed as a proper dependency here, the build system
+ # would find it and link with its path on disk.
+ #
+ # Note that this implies 'mylib' must already be built when building
+ # 'libraries-search-path-test' (see ../gyptest-library-dirs.py).
+ #
+ #'mylib',
+ ],
+ 'sources': [
+ 'hello.cc',
+ ],
+ # Note that without this, the mylib library would not be found and
+ # successfully linked.
+ 'library_dirs': [
+ '<(abs_path_to_secret_library_location)',
+ ],
+ 'link_settings': {
+ 'conditions': [
+ ['OS=="linux"', {
+ 'libraries': [
+ '-lmylib',
+ ],
+ }, { # else
+ 'libraries': [
+ '<(STATIC_LIB_PREFIX)mylib<(STATIC_LIB_SUFFIX)',
+ ],
+ }],
+ ], # conditions
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/link-dependency/gyptest-link-dependency.py b/third_party/python/gyp/test/link-dependency/gyptest-link-dependency.py
new file mode 100755
index 0000000000..3a8300d44e
--- /dev/null
+++ b/third_party/python/gyp/test/link-dependency/gyptest-link-dependency.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that a target marked as 'link_dependency==1' isn't being pulled into
+the 'none' target's dependency (which would otherwise lead to a dependency
+cycle in ninja).
+"""
+
+import TestGyp
+
+# See https://codereview.chromium.org/177043010/#msg15 for why this doesn't
+# work with cmake.
+test = TestGyp.TestGyp(formats=['!cmake'])
+
+test.run_gyp('test.gyp')
+test.build('test.gyp', 'main')
+
+# If running gyp worked, all is well.
+test.pass_test()
diff --git a/third_party/python/gyp/test/link-dependency/main.c b/third_party/python/gyp/test/link-dependency/main.c
new file mode 100644
index 0000000000..543d8b6951
--- /dev/null
+++ b/third_party/python/gyp/test/link-dependency/main.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+#include <stdlib.h>
+int main() {
+ void *p = malloc(1);
+ printf("p: %p\n", p);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/link-dependency/mymalloc.c b/third_party/python/gyp/test/link-dependency/mymalloc.c
new file mode 100644
index 0000000000..f80bc02a62
--- /dev/null
+++ b/third_party/python/gyp/test/link-dependency/mymalloc.c
@@ -0,0 +1,12 @@
+#include <stdlib.h>
+
+// The windows ninja generator is expecting an import library to get generated,
+// but it doesn't if there are no exports.
+#ifdef _MSC_VER
+__declspec(dllexport) void foo() {}
+#endif
+
+void *malloc(size_t size) {
+ (void)size;
+ return (void*)0xdeadbeef;
+}
diff --git a/third_party/python/gyp/test/link-dependency/test.gyp b/third_party/python/gyp/test/link-dependency/test.gyp
new file mode 100644
index 0000000000..47cec15005
--- /dev/null
+++ b/third_party/python/gyp/test/link-dependency/test.gyp
@@ -0,0 +1,37 @@
+{
+ 'variables': {
+ 'custom_malloc%' : 1,
+ },
+ 'target_defaults': {
+ 'conditions': [
+ ['custom_malloc==1', {
+ 'dependencies': [
+ 'malloc',
+ ],
+ }],
+ ],
+ },
+ 'targets': [
+ {
+ 'target_name': 'main',
+ 'type': 'none',
+ 'dependencies': [ 'main_initial',],
+ },
+ {
+ 'target_name': 'main_initial',
+ 'type': 'executable',
+ 'product_name': 'main',
+ 'sources': [ 'main.c' ],
+ },
+ {
+ 'target_name': 'malloc',
+ 'type': 'shared_library',
+ 'variables': {
+ 'prune_self_dependency': 1,
+ # Targets with type 'none' won't depend on this target.
+ 'link_dependency': 1,
+ },
+ 'sources': [ 'mymalloc.c' ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/link-objects/base.c b/third_party/python/gyp/test/link-objects/base.c
new file mode 100644
index 0000000000..3327459205
--- /dev/null
+++ b/third_party/python/gyp/test/link-objects/base.c
@@ -0,0 +1,6 @@
+void extra();
+
+int main(void) {
+ extra();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/link-objects/extra.c b/third_party/python/gyp/test/link-objects/extra.c
new file mode 100644
index 0000000000..1d7ee09b10
--- /dev/null
+++ b/third_party/python/gyp/test/link-objects/extra.c
@@ -0,0 +1,5 @@
+#include <stdio.h>
+
+void extra() {
+ printf("PASS\n");
+}
diff --git a/third_party/python/gyp/test/link-objects/gyptest-all.py b/third_party/python/gyp/test/link-objects/gyptest-all.py
new file mode 100755
index 0000000000..45bd6e1891
--- /dev/null
+++ b/third_party/python/gyp/test/link-objects/gyptest-all.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Put an object file on the sources list.
+Expect the result to link ok.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform != 'darwin':
+ # Currently only works under the linux make build.
+ test = TestGyp.TestGyp(formats=['make'])
+
+ test.run_gyp('link-objects.gyp')
+
+ test.build('link-objects.gyp', test.ALL)
+
+ test.run_built_executable('link-objects', stdout="PASS\n")
+
+ test.up_to_date('link-objects.gyp', test.ALL)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/link-objects/link-objects.gyp b/third_party/python/gyp/test/link-objects/link-objects.gyp
new file mode 100644
index 0000000000..ab72855531
--- /dev/null
+++ b/third_party/python/gyp/test/link-objects/link-objects.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'link-objects',
+ 'type': 'executable',
+ 'actions': [
+ {
+ 'action_name': 'build extra object',
+ 'inputs': ['extra.c'],
+ 'outputs': ['extra.o'],
+ 'action': ['gcc', '-o', 'extra.o', '-c', 'extra.c'],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ 'sources': [
+ 'base.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/linux/gyptest-implicit-rpath.py b/third_party/python/gyp/test/linux/gyptest-implicit-rpath.py
new file mode 100644
index 0000000000..8e17a3f16d
--- /dev/null
+++ b/third_party/python/gyp/test/linux/gyptest-implicit-rpath.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that the implicit rpath is added only when needed.
+"""
+
+import TestGyp
+
+import re
+import subprocess
+import sys
+
+if sys.platform.startswith('linux'):
+ test = TestGyp.TestGyp(formats=['ninja', 'make'])
+
+ CHDIR = 'implicit-rpath'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+ test.build('test.gyp', test.ALL, chdir=CHDIR)
+
+ def GetRpaths(p):
+ p = test.built_file_path(p, chdir=CHDIR)
+ r = re.compile(r'Library rpath: \[([^\]]+)\]')
+ proc = subprocess.Popen(['readelf', '-d', p], stdout=subprocess.PIPE)
+ o = proc.communicate()[0].decode('utf-8')
+ assert not proc.returncode
+ return r.findall(o)
+
+ if test.format == 'ninja':
+ expect = '$ORIGIN/lib/'
+ elif test.format == 'make':
+ expect = '$ORIGIN/lib.target/'
+ else:
+ test.fail_test()
+
+ if GetRpaths('shared_executable') != [expect]:
+ test.fail_test()
+
+ if GetRpaths('shared_executable_no_so_suffix') != [expect]:
+ test.fail_test()
+
+ if GetRpaths('static_executable'):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/linux/gyptest-ldflags-duplicates.py b/third_party/python/gyp/test/linux/gyptest-ldflags-duplicates.py
new file mode 100644
index 0000000000..43a4607adf
--- /dev/null
+++ b/third_party/python/gyp/test/linux/gyptest-ldflags-duplicates.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies duplicate ldflags are not removed.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform.startswith('linux'):
+ test = TestGyp.TestGyp()
+
+ CHDIR = 'ldflags-duplicates'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+ test.build('test.gyp', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/linux/gyptest-ldflags-from-environment.py b/third_party/python/gyp/test/linux/gyptest-ldflags-from-environment.py
new file mode 100644
index 0000000000..4aea193e4b
--- /dev/null
+++ b/third_party/python/gyp/test/linux/gyptest-ldflags-from-environment.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2017 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies the use of linker flags in environment variables.
+
+In this test, gyp and build both run in same local environment.
+"""
+
+import TestGyp
+
+import re
+import subprocess
+import sys
+
+FORMATS = ('make', 'ninja')
+
+if sys.platform.startswith('linux'):
+ test = TestGyp.TestGyp(formats=FORMATS)
+
+ CHDIR = 'ldflags-from-environment'
+ with TestGyp.LocalEnv({'LDFLAGS': '-Wl,--dynamic-linker=/target',
+ 'LDFLAGS_host': '-Wl,--dynamic-linker=/host',
+ 'GYP_CROSSCOMPILE': '1'}):
+ test.run_gyp('test.gyp', chdir=CHDIR)
+ test.build('test.gyp', chdir=CHDIR)
+
+ def GetDynamicLinker(p):
+ p = test.built_file_path(p, chdir=CHDIR)
+ r = re.compile(r'\[Requesting program interpreter: ([^\]]+)\]')
+ proc = subprocess.Popen(['readelf', '-l', p], stdout=subprocess.PIPE)
+ o = proc.communicate()[0].decode('utf-8')
+ assert not proc.returncode
+ return r.search(o).group(1)
+
+ if GetDynamicLinker('ldflags') != '/target':
+ test.fail_test()
+
+ if GetDynamicLinker('ldflags_host') != '/host':
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/linux/gyptest-target-rpath.py b/third_party/python/gyp/test/linux/gyptest-target-rpath.py
new file mode 100644
index 0000000000..f275caaece
--- /dev/null
+++ b/third_party/python/gyp/test/linux/gyptest-target-rpath.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Check target_rpath generator flag for ninja.
+"""
+
+import TestGyp
+
+import re
+import subprocess
+import sys
+
+if sys.platform.startswith('linux'):
+ test = TestGyp.TestGyp(formats=['ninja'])
+
+ CHDIR = 'target-rpath'
+ test.run_gyp('test.gyp', '-G', 'target_rpath=/usr/lib/gyptest/', chdir=CHDIR)
+ test.build('test.gyp', test.ALL, chdir=CHDIR)
+
+ def GetRpaths(p):
+ p = test.built_file_path(p, chdir=CHDIR)
+ r = re.compile(r'Library rpath: \[([^\]]+)\]')
+ proc = subprocess.Popen(['readelf', '-d', p], stdout=subprocess.PIPE)
+ o = proc.communicate()[0].decode('utf-8')
+ assert not proc.returncode
+ return r.findall(o)
+
+ expect = '/usr/lib/gyptest/'
+
+ if GetRpaths('shared_executable') != [expect]:
+ test.fail_test()
+
+ if GetRpaths('shared_executable_no_so_suffix') != [expect]:
+ test.fail_test()
+
+ if GetRpaths('static_executable'):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/linux/implicit-rpath/file.c b/third_party/python/gyp/test/linux/implicit-rpath/file.c
new file mode 100644
index 0000000000..56757a701b
--- /dev/null
+++ b/third_party/python/gyp/test/linux/implicit-rpath/file.c
@@ -0,0 +1 @@
+void f() {}
diff --git a/third_party/python/gyp/test/linux/implicit-rpath/main.c b/third_party/python/gyp/test/linux/implicit-rpath/main.c
new file mode 100644
index 0000000000..237c8ce181
--- /dev/null
+++ b/third_party/python/gyp/test/linux/implicit-rpath/main.c
@@ -0,0 +1 @@
+int main() {}
diff --git a/third_party/python/gyp/test/linux/implicit-rpath/test.gyp b/third_party/python/gyp/test/linux/implicit-rpath/test.gyp
new file mode 100644
index 0000000000..b546106986
--- /dev/null
+++ b/third_party/python/gyp/test/linux/implicit-rpath/test.gyp
@@ -0,0 +1,47 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'shared',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c' ],
+ },
+ {
+ 'target_name': 'shared_no_so_suffix',
+ 'product_extension': 'so.0.1',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c' ],
+ },
+ {
+ 'target_name': 'static',
+ 'type': 'static_library',
+ 'sources': [ 'file.c' ],
+ },
+ {
+ 'target_name': 'shared_executable',
+ 'type': 'executable',
+ 'sources': [ 'main.c' ],
+ 'dependencies': [
+ 'shared',
+ ]
+ },
+ {
+ 'target_name': 'shared_executable_no_so_suffix',
+ 'type': 'executable',
+ 'sources': [ 'main.c' ],
+ 'dependencies': [
+ 'shared_no_so_suffix',
+ ]
+ },
+ {
+ 'target_name': 'static_executable',
+ 'type': 'executable',
+ 'sources': [ 'main.c' ],
+ 'dependencies': [
+ 'static',
+ ]
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/linux/ldflags-duplicates/check-ldflags.py b/third_party/python/gyp/test/linux/ldflags-duplicates/check-ldflags.py
new file mode 100755
index 0000000000..ef1029529d
--- /dev/null
+++ b/third_party/python/gyp/test/linux/ldflags-duplicates/check-ldflags.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies duplicate ldflags are not removed.
+"""
+
+from __future__ import print_function
+
+import sys
+
+def CheckContainsFlags(args, substring):
+ if args.find(substring) is -1:
+ print('ERROR: Linker arguments "%s" are missing in "%s"' % (substring,
+ args))
+ return False;
+ return True;
+
+if __name__ == '__main__':
+ args = " ".join(sys.argv)
+ print("args = " +args)
+ if not CheckContainsFlags(args, 'lib1.a -Wl,--no-whole-archive') \
+ or not CheckContainsFlags(args, 'lib2.a -Wl,--no-whole-archive'):
+ sys.exit(1);
+ sys.exit(0)
diff --git a/third_party/python/gyp/test/linux/ldflags-duplicates/lib1.c b/third_party/python/gyp/test/linux/ldflags-duplicates/lib1.c
new file mode 100644
index 0000000000..a1322e7395
--- /dev/null
+++ b/third_party/python/gyp/test/linux/ldflags-duplicates/lib1.c
@@ -0,0 +1,6 @@
+// Copyright (c) 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+void foo() {
+}
diff --git a/third_party/python/gyp/test/linux/ldflags-duplicates/lib2.c b/third_party/python/gyp/test/linux/ldflags-duplicates/lib2.c
new file mode 100644
index 0000000000..8e7a082820
--- /dev/null
+++ b/third_party/python/gyp/test/linux/ldflags-duplicates/lib2.c
@@ -0,0 +1,6 @@
+// Copyright (c) 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+void bar() {
+}
diff --git a/third_party/python/gyp/test/linux/ldflags-duplicates/main.c b/third_party/python/gyp/test/linux/ldflags-duplicates/main.c
new file mode 100644
index 0000000000..b3039ace96
--- /dev/null
+++ b/third_party/python/gyp/test/linux/ldflags-duplicates/main.c
@@ -0,0 +1,7 @@
+// Copyright (c) 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/linux/ldflags-duplicates/test.gyp b/third_party/python/gyp/test/linux/ldflags-duplicates/test.gyp
new file mode 100644
index 0000000000..c36835b18f
--- /dev/null
+++ b/third_party/python/gyp/test/linux/ldflags-duplicates/test.gyp
@@ -0,0 +1,45 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'make_global_settings': [
+ ['LINK_wrapper', './check-ldflags.py'],
+ ],
+ 'targets': [
+ {
+ 'target_name': 'test',
+ 'type': 'executable',
+ 'ldflags': [
+ '-Wl,--whole-archive <(PRODUCT_DIR)/lib1.a',
+ '-Wl,--no-whole-archive',
+
+ '-Wl,--whole-archive <(PRODUCT_DIR)/lib2.a',
+ '-Wl,--no-whole-archive',
+ ],
+ 'dependencies': [
+ 'lib1',
+ 'lib2',
+ ],
+ 'sources': [
+ 'main.c',
+ ],
+ },
+ {
+ 'target_name': 'lib1',
+ 'type': 'static_library',
+ 'standalone_static_library': 1,
+ 'sources': [
+ 'lib1.c',
+ ],
+ },
+ {
+ 'target_name': 'lib2',
+ 'type': 'static_library',
+ 'standalone_static_library': 1,
+ 'sources': [
+ 'lib2.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/linux/ldflags-from-environment/main.c b/third_party/python/gyp/test/linux/ldflags-from-environment/main.c
new file mode 100644
index 0000000000..1b8742a107
--- /dev/null
+++ b/third_party/python/gyp/test/linux/ldflags-from-environment/main.c
@@ -0,0 +1,7 @@
+// Copyright (c) 2017 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/linux/ldflags-from-environment/test.gyp b/third_party/python/gyp/test/linux/ldflags-from-environment/test.gyp
new file mode 100644
index 0000000000..7ed1d07ed5
--- /dev/null
+++ b/third_party/python/gyp/test/linux/ldflags-from-environment/test.gyp
@@ -0,0 +1,23 @@
+# Copyright (c) 2017 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'ldflags',
+ 'type': 'executable',
+ 'sources': [
+ 'main.c',
+ ],
+ },
+ {
+ 'target_name': 'ldflags_host',
+ 'toolsets': ['host'],
+ 'type': 'executable',
+ 'sources': [
+ 'main.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/linux/target-rpath/file.c b/third_party/python/gyp/test/linux/target-rpath/file.c
new file mode 100644
index 0000000000..56757a701b
--- /dev/null
+++ b/third_party/python/gyp/test/linux/target-rpath/file.c
@@ -0,0 +1 @@
+void f() {}
diff --git a/third_party/python/gyp/test/linux/target-rpath/main.c b/third_party/python/gyp/test/linux/target-rpath/main.c
new file mode 100644
index 0000000000..237c8ce181
--- /dev/null
+++ b/third_party/python/gyp/test/linux/target-rpath/main.c
@@ -0,0 +1 @@
+int main() {}
diff --git a/third_party/python/gyp/test/linux/target-rpath/test.gyp b/third_party/python/gyp/test/linux/target-rpath/test.gyp
new file mode 100644
index 0000000000..b546106986
--- /dev/null
+++ b/third_party/python/gyp/test/linux/target-rpath/test.gyp
@@ -0,0 +1,47 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'shared',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c' ],
+ },
+ {
+ 'target_name': 'shared_no_so_suffix',
+ 'product_extension': 'so.0.1',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c' ],
+ },
+ {
+ 'target_name': 'static',
+ 'type': 'static_library',
+ 'sources': [ 'file.c' ],
+ },
+ {
+ 'target_name': 'shared_executable',
+ 'type': 'executable',
+ 'sources': [ 'main.c' ],
+ 'dependencies': [
+ 'shared',
+ ]
+ },
+ {
+ 'target_name': 'shared_executable_no_so_suffix',
+ 'type': 'executable',
+ 'sources': [ 'main.c' ],
+ 'dependencies': [
+ 'shared_no_so_suffix',
+ ]
+ },
+ {
+ 'target_name': 'static_executable',
+ 'type': 'executable',
+ 'sources': [ 'main.c' ],
+ 'dependencies': [
+ 'static',
+ ]
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/action-envvars/action/action.gyp b/third_party/python/gyp/test/mac/action-envvars/action/action.gyp
new file mode 100644
index 0000000000..d9d65745ca
--- /dev/null
+++ b/third_party/python/gyp/test/mac/action-envvars/action/action.gyp
@@ -0,0 +1,34 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'action',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'inputs': [ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/result',
+ '<(SHARED_INTERMEDIATE_DIR)/tempfile',
+ ],
+ 'action_name': 'Test action',
+ 'action': ['./action.sh', '<(SHARED_INTERMEDIATE_DIR)/tempfile' ],
+ },
+ {
+ 'inputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/tempfile',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/other_result',
+ ],
+ 'action_name': 'Other test action',
+ 'action': ['cp', '<(SHARED_INTERMEDIATE_DIR)/tempfile',
+ '<(PRODUCT_DIR)/other_result' ],
+ },
+ ],
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/mac/action-envvars/action/action.sh b/third_party/python/gyp/test/mac/action-envvars/action/action.sh
new file mode 100755
index 0000000000..48d5f6bf86
--- /dev/null
+++ b/third_party/python/gyp/test/mac/action-envvars/action/action.sh
@@ -0,0 +1,8 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+echo 'Test output' > "${BUILT_PRODUCTS_DIR}/result"
+echo 'Other output' > "$1"
diff --git a/third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/InfoPlist-error.strings b/third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/InfoPlist-error.strings
new file mode 100644
index 0000000000..452e7fabf9
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/InfoPlist-error.strings
@@ -0,0 +1,3 @@
+/* Localized versions of Info.plist keys */
+
+NSHumanReadableCopyright = "Copyright ©2011 Google Inc."
diff --git a/third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/InfoPlist.strings b/third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/InfoPlist.strings
new file mode 100644
index 0000000000..35bd33a96e
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/InfoPlist.strings
@@ -0,0 +1,3 @@
+/* Localized versions of Info.plist keys */
+
+NSHumanReadableCopyright = "Copyright ©2011 Google Inc.";
diff --git a/third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/MainMenu.xib b/third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/MainMenu.xib
new file mode 100644
index 0000000000..4524596787
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/MainMenu.xib
@@ -0,0 +1,4119 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<archive type="com.apple.InterfaceBuilder3.Cocoa.XIB" version="7.10">
+ <data>
+ <int key="IBDocument.SystemTarget">1060</int>
+ <string key="IBDocument.SystemVersion">10A324</string>
+ <string key="IBDocument.InterfaceBuilderVersion">719</string>
+ <string key="IBDocument.AppKitVersion">1015</string>
+ <string key="IBDocument.HIToolboxVersion">418.00</string>
+ <object class="NSMutableDictionary" key="IBDocument.PluginVersions">
+ <string key="NS.key.0">com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string key="NS.object.0">719</string>
+ </object>
+ <object class="NSMutableArray" key="IBDocument.EditedObjectIDs">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <integer value="371"/>
+ <integer value="29"/>
+ </object>
+ <object class="NSArray" key="IBDocument.PluginDependencies">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ </object>
+ <object class="NSMutableDictionary" key="IBDocument.Metadata">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSArray" key="dict.sortedKeys" id="0">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ </object>
+ <object class="NSMutableArray" key="dict.values">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ </object>
+ </object>
+ <object class="NSMutableArray" key="IBDocument.RootObjects" id="1048">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSCustomObject" id="1021">
+ <string key="NSClassName">NSApplication</string>
+ </object>
+ <object class="NSCustomObject" id="1014">
+ <string key="NSClassName">FirstResponder</string>
+ </object>
+ <object class="NSCustomObject" id="1050">
+ <string key="NSClassName">NSApplication</string>
+ </object>
+ <object class="NSMenu" id="649796088">
+ <string key="NSTitle">AMainMenu</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="694149608">
+ <reference key="NSMenu" ref="649796088"/>
+ <string key="NSTitle">TestApp</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <object class="NSCustomResource" key="NSOnImage" id="35465992">
+ <string key="NSClassName">NSImage</string>
+ <string key="NSResourceName">NSMenuCheckmark</string>
+ </object>
+ <object class="NSCustomResource" key="NSMixedImage" id="502551668">
+ <string key="NSClassName">NSImage</string>
+ <string key="NSResourceName">NSMenuMixedState</string>
+ </object>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="110575045">
+ <string key="NSTitle">TestApp</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="238522557">
+ <reference key="NSMenu" ref="110575045"/>
+ <string key="NSTitle">About TestApp</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="304266470">
+ <reference key="NSMenu" ref="110575045"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="609285721">
+ <reference key="NSMenu" ref="110575045"/>
+ <string key="NSTitle">Preferences…</string>
+ <string key="NSKeyEquiv">,</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="481834944">
+ <reference key="NSMenu" ref="110575045"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="1046388886">
+ <reference key="NSMenu" ref="110575045"/>
+ <string key="NSTitle">Services</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="752062318">
+ <string key="NSTitle">Services</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ </object>
+ <string key="NSName">_NSServicesMenu</string>
+ </object>
+ </object>
+ <object class="NSMenuItem" id="646227648">
+ <reference key="NSMenu" ref="110575045"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="755159360">
+ <reference key="NSMenu" ref="110575045"/>
+ <string key="NSTitle">Hide TestApp</string>
+ <string key="NSKeyEquiv">h</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="342932134">
+ <reference key="NSMenu" ref="110575045"/>
+ <string key="NSTitle">Hide Others</string>
+ <string key="NSKeyEquiv">h</string>
+ <int key="NSKeyEquivModMask">1572864</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="908899353">
+ <reference key="NSMenu" ref="110575045"/>
+ <string key="NSTitle">Show All</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="1056857174">
+ <reference key="NSMenu" ref="110575045"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="632727374">
+ <reference key="NSMenu" ref="110575045"/>
+ <string key="NSTitle">Quit TestApp</string>
+ <string key="NSKeyEquiv">q</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ </object>
+ <string key="NSName">_NSAppleMenu</string>
+ </object>
+ </object>
+ <object class="NSMenuItem" id="379814623">
+ <reference key="NSMenu" ref="649796088"/>
+ <string key="NSTitle">File</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="720053764">
+ <string key="NSTitle">File</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="705341025">
+ <reference key="NSMenu" ref="720053764"/>
+ <string key="NSTitle">New</string>
+ <string key="NSKeyEquiv">n</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="722745758">
+ <reference key="NSMenu" ref="720053764"/>
+ <string key="NSTitle">Open…</string>
+ <string key="NSKeyEquiv">o</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="1025936716">
+ <reference key="NSMenu" ref="720053764"/>
+ <string key="NSTitle">Open Recent</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="1065607017">
+ <string key="NSTitle">Open Recent</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="759406840">
+ <reference key="NSMenu" ref="1065607017"/>
+ <string key="NSTitle">Clear Menu</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ </object>
+ <string key="NSName">_NSRecentDocumentsMenu</string>
+ </object>
+ </object>
+ <object class="NSMenuItem" id="425164168">
+ <reference key="NSMenu" ref="720053764"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="776162233">
+ <reference key="NSMenu" ref="720053764"/>
+ <string key="NSTitle">Close</string>
+ <string key="NSKeyEquiv">w</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="1023925487">
+ <reference key="NSMenu" ref="720053764"/>
+ <string key="NSTitle">Save</string>
+ <string key="NSKeyEquiv">s</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="117038363">
+ <reference key="NSMenu" ref="720053764"/>
+ <string key="NSTitle">Save As…</string>
+ <string key="NSKeyEquiv">S</string>
+ <int key="NSKeyEquivModMask">1179648</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="579971712">
+ <reference key="NSMenu" ref="720053764"/>
+ <string key="NSTitle">Revert to Saved</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="1010469920">
+ <reference key="NSMenu" ref="720053764"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="294629803">
+ <reference key="NSMenu" ref="720053764"/>
+ <string key="NSTitle">Page Setup...</string>
+ <string key="NSKeyEquiv">P</string>
+ <int key="NSKeyEquivModMask">1179648</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSToolTip"/>
+ </object>
+ <object class="NSMenuItem" id="49223823">
+ <reference key="NSMenu" ref="720053764"/>
+ <string key="NSTitle">Print…</string>
+ <string key="NSKeyEquiv">p</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ </object>
+ </object>
+ </object>
+ <object class="NSMenuItem" id="952259628">
+ <reference key="NSMenu" ref="649796088"/>
+ <string key="NSTitle">Edit</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="789758025">
+ <string key="NSTitle">Edit</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="1058277027">
+ <reference key="NSMenu" ref="789758025"/>
+ <string key="NSTitle">Undo</string>
+ <string key="NSKeyEquiv">z</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="790794224">
+ <reference key="NSMenu" ref="789758025"/>
+ <string key="NSTitle">Redo</string>
+ <string key="NSKeyEquiv">Z</string>
+ <int key="NSKeyEquivModMask">1179648</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="1040322652">
+ <reference key="NSMenu" ref="789758025"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="296257095">
+ <reference key="NSMenu" ref="789758025"/>
+ <string key="NSTitle">Cut</string>
+ <string key="NSKeyEquiv">x</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="860595796">
+ <reference key="NSMenu" ref="789758025"/>
+ <string key="NSTitle">Copy</string>
+ <string key="NSKeyEquiv">c</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="29853731">
+ <reference key="NSMenu" ref="789758025"/>
+ <string key="NSTitle">Paste</string>
+ <string key="NSKeyEquiv">v</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="82994268">
+ <reference key="NSMenu" ref="789758025"/>
+ <string key="NSTitle">Paste and Match Style</string>
+ <string key="NSKeyEquiv">V</string>
+ <int key="NSKeyEquivModMask">1572864</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="437104165">
+ <reference key="NSMenu" ref="789758025"/>
+ <string key="NSTitle">Delete</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="583158037">
+ <reference key="NSMenu" ref="789758025"/>
+ <string key="NSTitle">Select All</string>
+ <string key="NSKeyEquiv">a</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="212016141">
+ <reference key="NSMenu" ref="789758025"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="892235320">
+ <reference key="NSMenu" ref="789758025"/>
+ <string key="NSTitle">Find</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="963351320">
+ <string key="NSTitle">Find</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="447796847">
+ <reference key="NSMenu" ref="963351320"/>
+ <string key="NSTitle">Find…</string>
+ <string key="NSKeyEquiv">f</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <int key="NSTag">1</int>
+ </object>
+ <object class="NSMenuItem" id="326711663">
+ <reference key="NSMenu" ref="963351320"/>
+ <string key="NSTitle">Find Next</string>
+ <string key="NSKeyEquiv">g</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <int key="NSTag">2</int>
+ </object>
+ <object class="NSMenuItem" id="270902937">
+ <reference key="NSMenu" ref="963351320"/>
+ <string key="NSTitle">Find Previous</string>
+ <string key="NSKeyEquiv">G</string>
+ <int key="NSKeyEquivModMask">1179648</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <int key="NSTag">3</int>
+ </object>
+ <object class="NSMenuItem" id="159080638">
+ <reference key="NSMenu" ref="963351320"/>
+ <string key="NSTitle">Use Selection for Find</string>
+ <string key="NSKeyEquiv">e</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <int key="NSTag">7</int>
+ </object>
+ <object class="NSMenuItem" id="88285865">
+ <reference key="NSMenu" ref="963351320"/>
+ <string key="NSTitle">Jump to Selection</string>
+ <string key="NSKeyEquiv">j</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ </object>
+ </object>
+ </object>
+ <object class="NSMenuItem" id="972420730">
+ <reference key="NSMenu" ref="789758025"/>
+ <string key="NSTitle">Spelling and Grammar</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="769623530">
+ <string key="NSTitle">Spelling and Grammar</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="679648819">
+ <reference key="NSMenu" ref="769623530"/>
+ <string key="NSTitle">Show Spelling and Grammar</string>
+ <string key="NSKeyEquiv">:</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="96193923">
+ <reference key="NSMenu" ref="769623530"/>
+ <string key="NSTitle">Check Document Now</string>
+ <string key="NSKeyEquiv">;</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="859480356">
+ <reference key="NSMenu" ref="769623530"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="948374510">
+ <reference key="NSMenu" ref="769623530"/>
+ <string key="NSTitle">Check Spelling While Typing</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="967646866">
+ <reference key="NSMenu" ref="769623530"/>
+ <string key="NSTitle">Check Grammar With Spelling</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="795346622">
+ <reference key="NSMenu" ref="769623530"/>
+ <string key="NSTitle">Correct Spelling Automatically</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ </object>
+ </object>
+ </object>
+ <object class="NSMenuItem" id="507821607">
+ <reference key="NSMenu" ref="789758025"/>
+ <string key="NSTitle">Substitutions</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="698887838">
+ <string key="NSTitle">Substitutions</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="65139061">
+ <reference key="NSMenu" ref="698887838"/>
+ <string key="NSTitle">Show Substitutions</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="19036812">
+ <reference key="NSMenu" ref="698887838"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="605118523">
+ <reference key="NSMenu" ref="698887838"/>
+ <string key="NSTitle">Smart Copy/Paste</string>
+ <string key="NSKeyEquiv">f</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <int key="NSTag">1</int>
+ </object>
+ <object class="NSMenuItem" id="197661976">
+ <reference key="NSMenu" ref="698887838"/>
+ <string key="NSTitle">Smart Quotes</string>
+ <string key="NSKeyEquiv">g</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <int key="NSTag">2</int>
+ </object>
+ <object class="NSMenuItem" id="672708820">
+ <reference key="NSMenu" ref="698887838"/>
+ <string key="NSTitle">Smart Dashes</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="708854459">
+ <reference key="NSMenu" ref="698887838"/>
+ <string key="NSTitle">Smart Links</string>
+ <string key="NSKeyEquiv">G</string>
+ <int key="NSKeyEquivModMask">1179648</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <int key="NSTag">3</int>
+ </object>
+ <object class="NSMenuItem" id="537092702">
+ <reference key="NSMenu" ref="698887838"/>
+ <string key="NSTitle">Text Replacement</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ </object>
+ </object>
+ </object>
+ <object class="NSMenuItem" id="288088188">
+ <reference key="NSMenu" ref="789758025"/>
+ <string key="NSTitle">Transformations</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="579392910">
+ <string key="NSTitle">Transformations</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="1060694897">
+ <reference key="NSMenu" ref="579392910"/>
+ <string key="NSTitle">Make Upper Case</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="879586729">
+ <reference key="NSMenu" ref="579392910"/>
+ <string key="NSTitle">Make Lower Case</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="56570060">
+ <reference key="NSMenu" ref="579392910"/>
+ <string key="NSTitle">Capitalize</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ </object>
+ </object>
+ </object>
+ <object class="NSMenuItem" id="676164635">
+ <reference key="NSMenu" ref="789758025"/>
+ <string key="NSTitle">Speech</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="785027613">
+ <string key="NSTitle">Speech</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="731782645">
+ <reference key="NSMenu" ref="785027613"/>
+ <string key="NSTitle">Start Speaking</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="680220178">
+ <reference key="NSMenu" ref="785027613"/>
+ <string key="NSTitle">Stop Speaking</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ </object>
+ </object>
+ </object>
+ </object>
+ </object>
+ </object>
+ <object class="NSMenuItem" id="302598603">
+ <reference key="NSMenu" ref="649796088"/>
+ <string key="NSTitle">Format</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="941447902">
+ <string key="NSTitle">Format</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="792887677">
+ <reference key="NSMenu" ref="941447902"/>
+ <string key="NSTitle">Font</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="786677654">
+ <string key="NSTitle">Font</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="159677712">
+ <reference key="NSMenu" ref="786677654"/>
+ <string key="NSTitle">Show Fonts</string>
+ <string key="NSKeyEquiv">t</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="305399458">
+ <reference key="NSMenu" ref="786677654"/>
+ <string key="NSTitle">Bold</string>
+ <string key="NSKeyEquiv">b</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <int key="NSTag">2</int>
+ </object>
+ <object class="NSMenuItem" id="814362025">
+ <reference key="NSMenu" ref="786677654"/>
+ <string key="NSTitle">Italic</string>
+ <string key="NSKeyEquiv">i</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <int key="NSTag">1</int>
+ </object>
+ <object class="NSMenuItem" id="330926929">
+ <reference key="NSMenu" ref="786677654"/>
+ <string key="NSTitle">Underline</string>
+ <string key="NSKeyEquiv">u</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="533507878">
+ <reference key="NSMenu" ref="786677654"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="158063935">
+ <reference key="NSMenu" ref="786677654"/>
+ <string key="NSTitle">Bigger</string>
+ <string key="NSKeyEquiv">+</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <int key="NSTag">3</int>
+ </object>
+ <object class="NSMenuItem" id="885547335">
+ <reference key="NSMenu" ref="786677654"/>
+ <string key="NSTitle">Smaller</string>
+ <string key="NSKeyEquiv">-</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <int key="NSTag">4</int>
+ </object>
+ <object class="NSMenuItem" id="901062459">
+ <reference key="NSMenu" ref="786677654"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="767671776">
+ <reference key="NSMenu" ref="786677654"/>
+ <string key="NSTitle">Kern</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="175441468">
+ <string key="NSTitle">Kern</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="252969304">
+ <reference key="NSMenu" ref="175441468"/>
+ <string key="NSTitle">Use Default</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="766922938">
+ <reference key="NSMenu" ref="175441468"/>
+ <string key="NSTitle">Use None</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="677519740">
+ <reference key="NSMenu" ref="175441468"/>
+ <string key="NSTitle">Tighten</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="238351151">
+ <reference key="NSMenu" ref="175441468"/>
+ <string key="NSTitle">Loosen</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ </object>
+ </object>
+ </object>
+ <object class="NSMenuItem" id="691570813">
+ <reference key="NSMenu" ref="786677654"/>
+ <string key="NSTitle">Ligature</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="1058217995">
+ <string key="NSTitle">Ligature</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="706297211">
+ <reference key="NSMenu" ref="1058217995"/>
+ <string key="NSTitle">Use Default</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="568384683">
+ <reference key="NSMenu" ref="1058217995"/>
+ <string key="NSTitle">Use None</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="663508465">
+ <reference key="NSMenu" ref="1058217995"/>
+ <string key="NSTitle">Use All</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ </object>
+ </object>
+ </object>
+ <object class="NSMenuItem" id="769124883">
+ <reference key="NSMenu" ref="786677654"/>
+ <string key="NSTitle">Baseline</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="18263474">
+ <string key="NSTitle">Baseline</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="257962622">
+ <reference key="NSMenu" ref="18263474"/>
+ <string key="NSTitle">Use Default</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="644725453">
+ <reference key="NSMenu" ref="18263474"/>
+ <string key="NSTitle">Superscript</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="1037576581">
+ <reference key="NSMenu" ref="18263474"/>
+ <string key="NSTitle">Subscript</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="941806246">
+ <reference key="NSMenu" ref="18263474"/>
+ <string key="NSTitle">Raise</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="1045724900">
+ <reference key="NSMenu" ref="18263474"/>
+ <string key="NSTitle">Lower</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ </object>
+ </object>
+ </object>
+ <object class="NSMenuItem" id="739652853">
+ <reference key="NSMenu" ref="786677654"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="1012600125">
+ <reference key="NSMenu" ref="786677654"/>
+ <string key="NSTitle">Show Colors</string>
+ <string key="NSKeyEquiv">C</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="214559597">
+ <reference key="NSMenu" ref="786677654"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="596732606">
+ <reference key="NSMenu" ref="786677654"/>
+ <string key="NSTitle">Copy Style</string>
+ <string key="NSKeyEquiv">c</string>
+ <int key="NSKeyEquivModMask">1572864</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="393423671">
+ <reference key="NSMenu" ref="786677654"/>
+ <string key="NSTitle">Paste Style</string>
+ <string key="NSKeyEquiv">v</string>
+ <int key="NSKeyEquivModMask">1572864</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ </object>
+ <string key="NSName">_NSFontMenu</string>
+ </object>
+ </object>
+ <object class="NSMenuItem" id="215659978">
+ <reference key="NSMenu" ref="941447902"/>
+ <string key="NSTitle">Text</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="446991534">
+ <string key="NSTitle">Text</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="875092757">
+ <reference key="NSMenu" ref="446991534"/>
+ <string key="NSTitle">Align Left</string>
+ <string key="NSKeyEquiv">{</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="630155264">
+ <reference key="NSMenu" ref="446991534"/>
+ <string key="NSTitle">Center</string>
+ <string key="NSKeyEquiv">|</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="945678886">
+ <reference key="NSMenu" ref="446991534"/>
+ <string key="NSTitle">Justify</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="512868991">
+ <reference key="NSMenu" ref="446991534"/>
+ <string key="NSTitle">Align Right</string>
+ <string key="NSKeyEquiv">}</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="163117631">
+ <reference key="NSMenu" ref="446991534"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="31516759">
+ <reference key="NSMenu" ref="446991534"/>
+ <string key="NSTitle">Writing Direction</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="956096989">
+ <string key="NSTitle">Writing Direction</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="257099033">
+ <reference key="NSMenu" ref="956096989"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <string key="NSTitle">Paragraph</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="551969625">
+ <reference key="NSMenu" ref="956096989"/>
+ <string type="base64-UTF8" key="NSTitle">CURlZmF1bHQ</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="249532473">
+ <reference key="NSMenu" ref="956096989"/>
+ <string type="base64-UTF8" key="NSTitle">CUxlZnQgdG8gUmlnaHQ</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="607364498">
+ <reference key="NSMenu" ref="956096989"/>
+ <string type="base64-UTF8" key="NSTitle">CVJpZ2h0IHRvIExlZnQ</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="508151438">
+ <reference key="NSMenu" ref="956096989"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="981751889">
+ <reference key="NSMenu" ref="956096989"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <string key="NSTitle">Selection</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="380031999">
+ <reference key="NSMenu" ref="956096989"/>
+ <string type="base64-UTF8" key="NSTitle">CURlZmF1bHQ</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="825984362">
+ <reference key="NSMenu" ref="956096989"/>
+ <string type="base64-UTF8" key="NSTitle">CUxlZnQgdG8gUmlnaHQ</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="560145579">
+ <reference key="NSMenu" ref="956096989"/>
+ <string type="base64-UTF8" key="NSTitle">CVJpZ2h0IHRvIExlZnQ</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ </object>
+ </object>
+ </object>
+ <object class="NSMenuItem" id="908105787">
+ <reference key="NSMenu" ref="446991534"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="644046920">
+ <reference key="NSMenu" ref="446991534"/>
+ <string key="NSTitle">Show Ruler</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="231811626">
+ <reference key="NSMenu" ref="446991534"/>
+ <string key="NSTitle">Copy Ruler</string>
+ <string key="NSKeyEquiv">c</string>
+ <int key="NSKeyEquivModMask">1310720</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="883618387">
+ <reference key="NSMenu" ref="446991534"/>
+ <string key="NSTitle">Paste Ruler</string>
+ <string key="NSKeyEquiv">v</string>
+ <int key="NSKeyEquivModMask">1310720</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ </object>
+ </object>
+ </object>
+ </object>
+ </object>
+ </object>
+ <object class="NSMenuItem" id="586577488">
+ <reference key="NSMenu" ref="649796088"/>
+ <string key="NSTitle">View</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="466310130">
+ <string key="NSTitle">View</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="102151532">
+ <reference key="NSMenu" ref="466310130"/>
+ <string key="NSTitle">Show Toolbar</string>
+ <string key="NSKeyEquiv">t</string>
+ <int key="NSKeyEquivModMask">1572864</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="237841660">
+ <reference key="NSMenu" ref="466310130"/>
+ <string key="NSTitle">Customize Toolbar…</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ </object>
+ </object>
+ </object>
+ <object class="NSMenuItem" id="713487014">
+ <reference key="NSMenu" ref="649796088"/>
+ <string key="NSTitle">Window</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="835318025">
+ <string key="NSTitle">Window</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="1011231497">
+ <reference key="NSMenu" ref="835318025"/>
+ <string key="NSTitle">Minimize</string>
+ <string key="NSKeyEquiv">m</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="575023229">
+ <reference key="NSMenu" ref="835318025"/>
+ <string key="NSTitle">Zoom</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="299356726">
+ <reference key="NSMenu" ref="835318025"/>
+ <bool key="NSIsDisabled">YES</bool>
+ <bool key="NSIsSeparator">YES</bool>
+ <string key="NSTitle"/>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ <object class="NSMenuItem" id="625202149">
+ <reference key="NSMenu" ref="835318025"/>
+ <string key="NSTitle">Bring All to Front</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ </object>
+ <string key="NSName">_NSWindowsMenu</string>
+ </object>
+ </object>
+ <object class="NSMenuItem" id="448692316">
+ <reference key="NSMenu" ref="649796088"/>
+ <string key="NSTitle">Help</string>
+ <string key="NSKeyEquiv"/>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ <string key="NSAction">submenuAction:</string>
+ <object class="NSMenu" key="NSSubmenu" id="992780483">
+ <string key="NSTitle">Help</string>
+ <object class="NSMutableArray" key="NSMenuItems">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSMenuItem" id="105068016">
+ <reference key="NSMenu" ref="992780483"/>
+ <string key="NSTitle">TestApp Help</string>
+ <string key="NSKeyEquiv">?</string>
+ <int key="NSKeyEquivModMask">1048576</int>
+ <int key="NSMnemonicLoc">2147483647</int>
+ <reference key="NSOnImage" ref="35465992"/>
+ <reference key="NSMixedImage" ref="502551668"/>
+ </object>
+ </object>
+ <string key="NSName">_NSHelpMenu</string>
+ </object>
+ </object>
+ </object>
+ <string key="NSName">_NSMainMenu</string>
+ </object>
+ <object class="NSWindowTemplate" id="972006081">
+ <int key="NSWindowStyleMask">15</int>
+ <int key="NSWindowBacking">2</int>
+ <string key="NSWindowRect">{{335, 390}, {480, 360}}</string>
+ <int key="NSWTFlags">1954021376</int>
+ <string key="NSWindowTitle">TestApp</string>
+ <string key="NSWindowClass">NSWindow</string>
+ <nil key="NSViewClass"/>
+ <string key="NSWindowContentMaxSize">{1.79769e+308, 1.79769e+308}</string>
+ <object class="NSView" key="NSWindowView" id="439893737">
+ <reference key="NSNextResponder"/>
+ <int key="NSvFlags">256</int>
+ <string key="NSFrameSize">{480, 360}</string>
+ <reference key="NSSuperview"/>
+ </object>
+ <string key="NSScreenRect">{{0, 0}, {1920, 1178}}</string>
+ <string key="NSMaxSize">{1.79769e+308, 1.79769e+308}</string>
+ </object>
+ <object class="NSCustomObject" id="976324537">
+ <string key="NSClassName">TestAppAppDelegate</string>
+ </object>
+ <object class="NSCustomObject" id="755631768">
+ <string key="NSClassName">NSFontManager</string>
+ </object>
+ </object>
+ <object class="IBObjectContainer" key="IBDocument.Objects">
+ <object class="NSMutableArray" key="connectionRecords">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">performMiniaturize:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="1011231497"/>
+ </object>
+ <int key="connectionID">37</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">arrangeInFront:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="625202149"/>
+ </object>
+ <int key="connectionID">39</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">print:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="49223823"/>
+ </object>
+ <int key="connectionID">86</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">runPageLayout:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="294629803"/>
+ </object>
+ <int key="connectionID">87</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">clearRecentDocuments:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="759406840"/>
+ </object>
+ <int key="connectionID">127</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">orderFrontStandardAboutPanel:</string>
+ <reference key="source" ref="1021"/>
+ <reference key="destination" ref="238522557"/>
+ </object>
+ <int key="connectionID">142</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">performClose:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="776162233"/>
+ </object>
+ <int key="connectionID">193</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">toggleContinuousSpellChecking:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="948374510"/>
+ </object>
+ <int key="connectionID">222</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">undo:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="1058277027"/>
+ </object>
+ <int key="connectionID">223</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">copy:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="860595796"/>
+ </object>
+ <int key="connectionID">224</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">checkSpelling:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="96193923"/>
+ </object>
+ <int key="connectionID">225</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">paste:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="29853731"/>
+ </object>
+ <int key="connectionID">226</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">stopSpeaking:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="680220178"/>
+ </object>
+ <int key="connectionID">227</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">cut:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="296257095"/>
+ </object>
+ <int key="connectionID">228</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">showGuessPanel:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="679648819"/>
+ </object>
+ <int key="connectionID">230</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">redo:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="790794224"/>
+ </object>
+ <int key="connectionID">231</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">selectAll:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="583158037"/>
+ </object>
+ <int key="connectionID">232</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">startSpeaking:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="731782645"/>
+ </object>
+ <int key="connectionID">233</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">delete:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="437104165"/>
+ </object>
+ <int key="connectionID">235</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">performZoom:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="575023229"/>
+ </object>
+ <int key="connectionID">240</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">performFindPanelAction:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="447796847"/>
+ </object>
+ <int key="connectionID">241</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">centerSelectionInVisibleArea:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="88285865"/>
+ </object>
+ <int key="connectionID">245</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">toggleGrammarChecking:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="967646866"/>
+ </object>
+ <int key="connectionID">347</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">toggleSmartInsertDelete:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="605118523"/>
+ </object>
+ <int key="connectionID">355</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">toggleAutomaticQuoteSubstitution:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="197661976"/>
+ </object>
+ <int key="connectionID">356</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">toggleAutomaticLinkDetection:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="708854459"/>
+ </object>
+ <int key="connectionID">357</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">saveDocument:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="1023925487"/>
+ </object>
+ <int key="connectionID">362</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">saveDocumentAs:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="117038363"/>
+ </object>
+ <int key="connectionID">363</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">revertDocumentToSaved:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="579971712"/>
+ </object>
+ <int key="connectionID">364</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">runToolbarCustomizationPalette:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="237841660"/>
+ </object>
+ <int key="connectionID">365</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">toggleToolbarShown:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="102151532"/>
+ </object>
+ <int key="connectionID">366</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">hide:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="755159360"/>
+ </object>
+ <int key="connectionID">367</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">hideOtherApplications:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="342932134"/>
+ </object>
+ <int key="connectionID">368</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">unhideAllApplications:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="908899353"/>
+ </object>
+ <int key="connectionID">370</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">newDocument:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="705341025"/>
+ </object>
+ <int key="connectionID">373</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">openDocument:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="722745758"/>
+ </object>
+ <int key="connectionID">374</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">addFontTrait:</string>
+ <reference key="source" ref="755631768"/>
+ <reference key="destination" ref="305399458"/>
+ </object>
+ <int key="connectionID">421</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">addFontTrait:</string>
+ <reference key="source" ref="755631768"/>
+ <reference key="destination" ref="814362025"/>
+ </object>
+ <int key="connectionID">422</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">modifyFont:</string>
+ <reference key="source" ref="755631768"/>
+ <reference key="destination" ref="885547335"/>
+ </object>
+ <int key="connectionID">423</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">orderFrontFontPanel:</string>
+ <reference key="source" ref="755631768"/>
+ <reference key="destination" ref="159677712"/>
+ </object>
+ <int key="connectionID">424</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">modifyFont:</string>
+ <reference key="source" ref="755631768"/>
+ <reference key="destination" ref="158063935"/>
+ </object>
+ <int key="connectionID">425</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">raiseBaseline:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="941806246"/>
+ </object>
+ <int key="connectionID">426</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">lowerBaseline:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="1045724900"/>
+ </object>
+ <int key="connectionID">427</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">copyFont:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="596732606"/>
+ </object>
+ <int key="connectionID">428</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">subscript:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="1037576581"/>
+ </object>
+ <int key="connectionID">429</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">superscript:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="644725453"/>
+ </object>
+ <int key="connectionID">430</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">tightenKerning:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="677519740"/>
+ </object>
+ <int key="connectionID">431</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">underline:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="330926929"/>
+ </object>
+ <int key="connectionID">432</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">orderFrontColorPanel:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="1012600125"/>
+ </object>
+ <int key="connectionID">433</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">useAllLigatures:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="663508465"/>
+ </object>
+ <int key="connectionID">434</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">loosenKerning:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="238351151"/>
+ </object>
+ <int key="connectionID">435</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">pasteFont:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="393423671"/>
+ </object>
+ <int key="connectionID">436</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">unscript:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="257962622"/>
+ </object>
+ <int key="connectionID">437</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">useStandardKerning:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="252969304"/>
+ </object>
+ <int key="connectionID">438</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">useStandardLigatures:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="706297211"/>
+ </object>
+ <int key="connectionID">439</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">turnOffLigatures:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="568384683"/>
+ </object>
+ <int key="connectionID">440</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">turnOffKerning:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="766922938"/>
+ </object>
+ <int key="connectionID">441</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">terminate:</string>
+ <reference key="source" ref="1050"/>
+ <reference key="destination" ref="632727374"/>
+ </object>
+ <int key="connectionID">449</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">toggleAutomaticSpellingCorrection:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="795346622"/>
+ </object>
+ <int key="connectionID">456</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">orderFrontSubstitutionsPanel:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="65139061"/>
+ </object>
+ <int key="connectionID">458</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">toggleAutomaticDashSubstitution:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="672708820"/>
+ </object>
+ <int key="connectionID">461</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">toggleAutomaticTextReplacement:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="537092702"/>
+ </object>
+ <int key="connectionID">463</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">uppercaseWord:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="1060694897"/>
+ </object>
+ <int key="connectionID">464</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">capitalizeWord:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="56570060"/>
+ </object>
+ <int key="connectionID">467</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">lowercaseWord:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="879586729"/>
+ </object>
+ <int key="connectionID">468</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">pasteAsPlainText:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="82994268"/>
+ </object>
+ <int key="connectionID">486</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">performFindPanelAction:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="326711663"/>
+ </object>
+ <int key="connectionID">487</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">performFindPanelAction:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="270902937"/>
+ </object>
+ <int key="connectionID">488</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">performFindPanelAction:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="159080638"/>
+ </object>
+ <int key="connectionID">489</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">showHelp:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="105068016"/>
+ </object>
+ <int key="connectionID">493</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBOutletConnection" key="connection">
+ <string key="label">delegate</string>
+ <reference key="source" ref="1021"/>
+ <reference key="destination" ref="976324537"/>
+ </object>
+ <int key="connectionID">495</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">alignCenter:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="630155264"/>
+ </object>
+ <int key="connectionID">518</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">pasteRuler:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="883618387"/>
+ </object>
+ <int key="connectionID">519</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">toggleRuler:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="644046920"/>
+ </object>
+ <int key="connectionID">520</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">alignRight:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="512868991"/>
+ </object>
+ <int key="connectionID">521</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">copyRuler:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="231811626"/>
+ </object>
+ <int key="connectionID">522</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">alignJustified:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="945678886"/>
+ </object>
+ <int key="connectionID">523</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">alignLeft:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="875092757"/>
+ </object>
+ <int key="connectionID">524</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">makeBaseWritingDirectionNatural:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="551969625"/>
+ </object>
+ <int key="connectionID">525</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">makeBaseWritingDirectionLeftToRight:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="249532473"/>
+ </object>
+ <int key="connectionID">526</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">makeBaseWritingDirectionRightToLeft:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="607364498"/>
+ </object>
+ <int key="connectionID">527</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">makeTextWritingDirectionNatural:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="380031999"/>
+ </object>
+ <int key="connectionID">528</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">makeTextWritingDirectionLeftToRight:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="825984362"/>
+ </object>
+ <int key="connectionID">529</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBActionConnection" key="connection">
+ <string key="label">makeTextWritingDirectionRightToLeft:</string>
+ <reference key="source" ref="1014"/>
+ <reference key="destination" ref="560145579"/>
+ </object>
+ <int key="connectionID">530</int>
+ </object>
+ <object class="IBConnectionRecord">
+ <object class="IBOutletConnection" key="connection">
+ <string key="label">window</string>
+ <reference key="source" ref="976324537"/>
+ <reference key="destination" ref="972006081"/>
+ </object>
+ <int key="connectionID">532</int>
+ </object>
+ </object>
+ <object class="IBMutableOrderedSet" key="objectRecords">
+ <object class="NSArray" key="orderedObjects">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="IBObjectRecord">
+ <int key="objectID">0</int>
+ <reference key="object" ref="0"/>
+ <reference key="children" ref="1048"/>
+ <nil key="parent"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">-2</int>
+ <reference key="object" ref="1021"/>
+ <reference key="parent" ref="0"/>
+ <string key="objectName">File's Owner</string>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">-1</int>
+ <reference key="object" ref="1014"/>
+ <reference key="parent" ref="0"/>
+ <string key="objectName">First Responder</string>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">-3</int>
+ <reference key="object" ref="1050"/>
+ <reference key="parent" ref="0"/>
+ <string key="objectName">Application</string>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">29</int>
+ <reference key="object" ref="649796088"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="713487014"/>
+ <reference ref="694149608"/>
+ <reference ref="952259628"/>
+ <reference ref="379814623"/>
+ <reference ref="586577488"/>
+ <reference ref="302598603"/>
+ <reference ref="448692316"/>
+ </object>
+ <reference key="parent" ref="0"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">19</int>
+ <reference key="object" ref="713487014"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="835318025"/>
+ </object>
+ <reference key="parent" ref="649796088"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">56</int>
+ <reference key="object" ref="694149608"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="110575045"/>
+ </object>
+ <reference key="parent" ref="649796088"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">217</int>
+ <reference key="object" ref="952259628"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="789758025"/>
+ </object>
+ <reference key="parent" ref="649796088"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">83</int>
+ <reference key="object" ref="379814623"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="720053764"/>
+ </object>
+ <reference key="parent" ref="649796088"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">81</int>
+ <reference key="object" ref="720053764"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="1023925487"/>
+ <reference ref="117038363"/>
+ <reference ref="49223823"/>
+ <reference ref="722745758"/>
+ <reference ref="705341025"/>
+ <reference ref="1025936716"/>
+ <reference ref="294629803"/>
+ <reference ref="776162233"/>
+ <reference ref="425164168"/>
+ <reference ref="579971712"/>
+ <reference ref="1010469920"/>
+ </object>
+ <reference key="parent" ref="379814623"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">75</int>
+ <reference key="object" ref="1023925487"/>
+ <reference key="parent" ref="720053764"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">80</int>
+ <reference key="object" ref="117038363"/>
+ <reference key="parent" ref="720053764"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">78</int>
+ <reference key="object" ref="49223823"/>
+ <reference key="parent" ref="720053764"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">72</int>
+ <reference key="object" ref="722745758"/>
+ <reference key="parent" ref="720053764"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">82</int>
+ <reference key="object" ref="705341025"/>
+ <reference key="parent" ref="720053764"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">124</int>
+ <reference key="object" ref="1025936716"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="1065607017"/>
+ </object>
+ <reference key="parent" ref="720053764"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">77</int>
+ <reference key="object" ref="294629803"/>
+ <reference key="parent" ref="720053764"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">73</int>
+ <reference key="object" ref="776162233"/>
+ <reference key="parent" ref="720053764"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">79</int>
+ <reference key="object" ref="425164168"/>
+ <reference key="parent" ref="720053764"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">112</int>
+ <reference key="object" ref="579971712"/>
+ <reference key="parent" ref="720053764"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">74</int>
+ <reference key="object" ref="1010469920"/>
+ <reference key="parent" ref="720053764"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">125</int>
+ <reference key="object" ref="1065607017"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="759406840"/>
+ </object>
+ <reference key="parent" ref="1025936716"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">126</int>
+ <reference key="object" ref="759406840"/>
+ <reference key="parent" ref="1065607017"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">205</int>
+ <reference key="object" ref="789758025"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="437104165"/>
+ <reference ref="583158037"/>
+ <reference ref="1058277027"/>
+ <reference ref="212016141"/>
+ <reference ref="296257095"/>
+ <reference ref="29853731"/>
+ <reference ref="860595796"/>
+ <reference ref="1040322652"/>
+ <reference ref="790794224"/>
+ <reference ref="892235320"/>
+ <reference ref="972420730"/>
+ <reference ref="676164635"/>
+ <reference ref="507821607"/>
+ <reference ref="288088188"/>
+ <reference ref="82994268"/>
+ </object>
+ <reference key="parent" ref="952259628"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">202</int>
+ <reference key="object" ref="437104165"/>
+ <reference key="parent" ref="789758025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">198</int>
+ <reference key="object" ref="583158037"/>
+ <reference key="parent" ref="789758025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">207</int>
+ <reference key="object" ref="1058277027"/>
+ <reference key="parent" ref="789758025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">214</int>
+ <reference key="object" ref="212016141"/>
+ <reference key="parent" ref="789758025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">199</int>
+ <reference key="object" ref="296257095"/>
+ <reference key="parent" ref="789758025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">203</int>
+ <reference key="object" ref="29853731"/>
+ <reference key="parent" ref="789758025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">197</int>
+ <reference key="object" ref="860595796"/>
+ <reference key="parent" ref="789758025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">206</int>
+ <reference key="object" ref="1040322652"/>
+ <reference key="parent" ref="789758025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">215</int>
+ <reference key="object" ref="790794224"/>
+ <reference key="parent" ref="789758025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">218</int>
+ <reference key="object" ref="892235320"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="963351320"/>
+ </object>
+ <reference key="parent" ref="789758025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">216</int>
+ <reference key="object" ref="972420730"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="769623530"/>
+ </object>
+ <reference key="parent" ref="789758025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">200</int>
+ <reference key="object" ref="769623530"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="948374510"/>
+ <reference ref="96193923"/>
+ <reference ref="679648819"/>
+ <reference ref="967646866"/>
+ <reference ref="859480356"/>
+ <reference ref="795346622"/>
+ </object>
+ <reference key="parent" ref="972420730"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">219</int>
+ <reference key="object" ref="948374510"/>
+ <reference key="parent" ref="769623530"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">201</int>
+ <reference key="object" ref="96193923"/>
+ <reference key="parent" ref="769623530"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">204</int>
+ <reference key="object" ref="679648819"/>
+ <reference key="parent" ref="769623530"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">220</int>
+ <reference key="object" ref="963351320"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="270902937"/>
+ <reference ref="88285865"/>
+ <reference ref="159080638"/>
+ <reference ref="326711663"/>
+ <reference ref="447796847"/>
+ </object>
+ <reference key="parent" ref="892235320"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">213</int>
+ <reference key="object" ref="270902937"/>
+ <reference key="parent" ref="963351320"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">210</int>
+ <reference key="object" ref="88285865"/>
+ <reference key="parent" ref="963351320"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">221</int>
+ <reference key="object" ref="159080638"/>
+ <reference key="parent" ref="963351320"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">208</int>
+ <reference key="object" ref="326711663"/>
+ <reference key="parent" ref="963351320"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">209</int>
+ <reference key="object" ref="447796847"/>
+ <reference key="parent" ref="963351320"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">57</int>
+ <reference key="object" ref="110575045"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="238522557"/>
+ <reference ref="755159360"/>
+ <reference ref="908899353"/>
+ <reference ref="632727374"/>
+ <reference ref="646227648"/>
+ <reference ref="609285721"/>
+ <reference ref="481834944"/>
+ <reference ref="304266470"/>
+ <reference ref="1046388886"/>
+ <reference ref="1056857174"/>
+ <reference ref="342932134"/>
+ </object>
+ <reference key="parent" ref="694149608"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">58</int>
+ <reference key="object" ref="238522557"/>
+ <reference key="parent" ref="110575045"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">134</int>
+ <reference key="object" ref="755159360"/>
+ <reference key="parent" ref="110575045"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">150</int>
+ <reference key="object" ref="908899353"/>
+ <reference key="parent" ref="110575045"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">136</int>
+ <reference key="object" ref="632727374"/>
+ <reference key="parent" ref="110575045"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">144</int>
+ <reference key="object" ref="646227648"/>
+ <reference key="parent" ref="110575045"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">129</int>
+ <reference key="object" ref="609285721"/>
+ <reference key="parent" ref="110575045"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">143</int>
+ <reference key="object" ref="481834944"/>
+ <reference key="parent" ref="110575045"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">236</int>
+ <reference key="object" ref="304266470"/>
+ <reference key="parent" ref="110575045"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">131</int>
+ <reference key="object" ref="1046388886"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="752062318"/>
+ </object>
+ <reference key="parent" ref="110575045"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">149</int>
+ <reference key="object" ref="1056857174"/>
+ <reference key="parent" ref="110575045"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">145</int>
+ <reference key="object" ref="342932134"/>
+ <reference key="parent" ref="110575045"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">130</int>
+ <reference key="object" ref="752062318"/>
+ <reference key="parent" ref="1046388886"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">24</int>
+ <reference key="object" ref="835318025"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="299356726"/>
+ <reference ref="625202149"/>
+ <reference ref="575023229"/>
+ <reference ref="1011231497"/>
+ </object>
+ <reference key="parent" ref="713487014"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">92</int>
+ <reference key="object" ref="299356726"/>
+ <reference key="parent" ref="835318025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">5</int>
+ <reference key="object" ref="625202149"/>
+ <reference key="parent" ref="835318025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">239</int>
+ <reference key="object" ref="575023229"/>
+ <reference key="parent" ref="835318025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">23</int>
+ <reference key="object" ref="1011231497"/>
+ <reference key="parent" ref="835318025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">295</int>
+ <reference key="object" ref="586577488"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="466310130"/>
+ </object>
+ <reference key="parent" ref="649796088"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">296</int>
+ <reference key="object" ref="466310130"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="102151532"/>
+ <reference ref="237841660"/>
+ </object>
+ <reference key="parent" ref="586577488"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">297</int>
+ <reference key="object" ref="102151532"/>
+ <reference key="parent" ref="466310130"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">298</int>
+ <reference key="object" ref="237841660"/>
+ <reference key="parent" ref="466310130"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">211</int>
+ <reference key="object" ref="676164635"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="785027613"/>
+ </object>
+ <reference key="parent" ref="789758025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">212</int>
+ <reference key="object" ref="785027613"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="680220178"/>
+ <reference ref="731782645"/>
+ </object>
+ <reference key="parent" ref="676164635"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">195</int>
+ <reference key="object" ref="680220178"/>
+ <reference key="parent" ref="785027613"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">196</int>
+ <reference key="object" ref="731782645"/>
+ <reference key="parent" ref="785027613"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">346</int>
+ <reference key="object" ref="967646866"/>
+ <reference key="parent" ref="769623530"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">348</int>
+ <reference key="object" ref="507821607"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="698887838"/>
+ </object>
+ <reference key="parent" ref="789758025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">349</int>
+ <reference key="object" ref="698887838"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="605118523"/>
+ <reference ref="197661976"/>
+ <reference ref="708854459"/>
+ <reference ref="65139061"/>
+ <reference ref="19036812"/>
+ <reference ref="672708820"/>
+ <reference ref="537092702"/>
+ </object>
+ <reference key="parent" ref="507821607"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">350</int>
+ <reference key="object" ref="605118523"/>
+ <reference key="parent" ref="698887838"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">351</int>
+ <reference key="object" ref="197661976"/>
+ <reference key="parent" ref="698887838"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">354</int>
+ <reference key="object" ref="708854459"/>
+ <reference key="parent" ref="698887838"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">371</int>
+ <reference key="object" ref="972006081"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="439893737"/>
+ </object>
+ <reference key="parent" ref="0"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">372</int>
+ <reference key="object" ref="439893737"/>
+ <reference key="parent" ref="972006081"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">375</int>
+ <reference key="object" ref="302598603"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="941447902"/>
+ </object>
+ <reference key="parent" ref="649796088"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">376</int>
+ <reference key="object" ref="941447902"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="792887677"/>
+ <reference ref="215659978"/>
+ </object>
+ <reference key="parent" ref="302598603"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">377</int>
+ <reference key="object" ref="792887677"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="786677654"/>
+ </object>
+ <reference key="parent" ref="941447902"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">388</int>
+ <reference key="object" ref="786677654"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="159677712"/>
+ <reference ref="305399458"/>
+ <reference ref="814362025"/>
+ <reference ref="330926929"/>
+ <reference ref="533507878"/>
+ <reference ref="158063935"/>
+ <reference ref="885547335"/>
+ <reference ref="901062459"/>
+ <reference ref="767671776"/>
+ <reference ref="691570813"/>
+ <reference ref="769124883"/>
+ <reference ref="739652853"/>
+ <reference ref="1012600125"/>
+ <reference ref="214559597"/>
+ <reference ref="596732606"/>
+ <reference ref="393423671"/>
+ </object>
+ <reference key="parent" ref="792887677"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">389</int>
+ <reference key="object" ref="159677712"/>
+ <reference key="parent" ref="786677654"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">390</int>
+ <reference key="object" ref="305399458"/>
+ <reference key="parent" ref="786677654"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">391</int>
+ <reference key="object" ref="814362025"/>
+ <reference key="parent" ref="786677654"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">392</int>
+ <reference key="object" ref="330926929"/>
+ <reference key="parent" ref="786677654"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">393</int>
+ <reference key="object" ref="533507878"/>
+ <reference key="parent" ref="786677654"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">394</int>
+ <reference key="object" ref="158063935"/>
+ <reference key="parent" ref="786677654"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">395</int>
+ <reference key="object" ref="885547335"/>
+ <reference key="parent" ref="786677654"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">396</int>
+ <reference key="object" ref="901062459"/>
+ <reference key="parent" ref="786677654"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">397</int>
+ <reference key="object" ref="767671776"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="175441468"/>
+ </object>
+ <reference key="parent" ref="786677654"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">398</int>
+ <reference key="object" ref="691570813"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="1058217995"/>
+ </object>
+ <reference key="parent" ref="786677654"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">399</int>
+ <reference key="object" ref="769124883"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="18263474"/>
+ </object>
+ <reference key="parent" ref="786677654"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">400</int>
+ <reference key="object" ref="739652853"/>
+ <reference key="parent" ref="786677654"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">401</int>
+ <reference key="object" ref="1012600125"/>
+ <reference key="parent" ref="786677654"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">402</int>
+ <reference key="object" ref="214559597"/>
+ <reference key="parent" ref="786677654"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">403</int>
+ <reference key="object" ref="596732606"/>
+ <reference key="parent" ref="786677654"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">404</int>
+ <reference key="object" ref="393423671"/>
+ <reference key="parent" ref="786677654"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">405</int>
+ <reference key="object" ref="18263474"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="257962622"/>
+ <reference ref="644725453"/>
+ <reference ref="1037576581"/>
+ <reference ref="941806246"/>
+ <reference ref="1045724900"/>
+ </object>
+ <reference key="parent" ref="769124883"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">406</int>
+ <reference key="object" ref="257962622"/>
+ <reference key="parent" ref="18263474"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">407</int>
+ <reference key="object" ref="644725453"/>
+ <reference key="parent" ref="18263474"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">408</int>
+ <reference key="object" ref="1037576581"/>
+ <reference key="parent" ref="18263474"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">409</int>
+ <reference key="object" ref="941806246"/>
+ <reference key="parent" ref="18263474"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">410</int>
+ <reference key="object" ref="1045724900"/>
+ <reference key="parent" ref="18263474"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">411</int>
+ <reference key="object" ref="1058217995"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="706297211"/>
+ <reference ref="568384683"/>
+ <reference ref="663508465"/>
+ </object>
+ <reference key="parent" ref="691570813"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">412</int>
+ <reference key="object" ref="706297211"/>
+ <reference key="parent" ref="1058217995"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">413</int>
+ <reference key="object" ref="568384683"/>
+ <reference key="parent" ref="1058217995"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">414</int>
+ <reference key="object" ref="663508465"/>
+ <reference key="parent" ref="1058217995"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">415</int>
+ <reference key="object" ref="175441468"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="252969304"/>
+ <reference ref="766922938"/>
+ <reference ref="677519740"/>
+ <reference ref="238351151"/>
+ </object>
+ <reference key="parent" ref="767671776"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">416</int>
+ <reference key="object" ref="252969304"/>
+ <reference key="parent" ref="175441468"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">417</int>
+ <reference key="object" ref="766922938"/>
+ <reference key="parent" ref="175441468"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">418</int>
+ <reference key="object" ref="677519740"/>
+ <reference key="parent" ref="175441468"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">419</int>
+ <reference key="object" ref="238351151"/>
+ <reference key="parent" ref="175441468"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">420</int>
+ <reference key="object" ref="755631768"/>
+ <reference key="parent" ref="0"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">450</int>
+ <reference key="object" ref="288088188"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="579392910"/>
+ </object>
+ <reference key="parent" ref="789758025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">451</int>
+ <reference key="object" ref="579392910"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="1060694897"/>
+ <reference ref="879586729"/>
+ <reference ref="56570060"/>
+ </object>
+ <reference key="parent" ref="288088188"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">452</int>
+ <reference key="object" ref="1060694897"/>
+ <reference key="parent" ref="579392910"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">453</int>
+ <reference key="object" ref="859480356"/>
+ <reference key="parent" ref="769623530"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">454</int>
+ <reference key="object" ref="795346622"/>
+ <reference key="parent" ref="769623530"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">457</int>
+ <reference key="object" ref="65139061"/>
+ <reference key="parent" ref="698887838"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">459</int>
+ <reference key="object" ref="19036812"/>
+ <reference key="parent" ref="698887838"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">460</int>
+ <reference key="object" ref="672708820"/>
+ <reference key="parent" ref="698887838"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">462</int>
+ <reference key="object" ref="537092702"/>
+ <reference key="parent" ref="698887838"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">465</int>
+ <reference key="object" ref="879586729"/>
+ <reference key="parent" ref="579392910"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">466</int>
+ <reference key="object" ref="56570060"/>
+ <reference key="parent" ref="579392910"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">485</int>
+ <reference key="object" ref="82994268"/>
+ <reference key="parent" ref="789758025"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">490</int>
+ <reference key="object" ref="448692316"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="992780483"/>
+ </object>
+ <reference key="parent" ref="649796088"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">491</int>
+ <reference key="object" ref="992780483"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="105068016"/>
+ </object>
+ <reference key="parent" ref="448692316"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">492</int>
+ <reference key="object" ref="105068016"/>
+ <reference key="parent" ref="992780483"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">494</int>
+ <reference key="object" ref="976324537"/>
+ <reference key="parent" ref="0"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">496</int>
+ <reference key="object" ref="215659978"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="446991534"/>
+ </object>
+ <reference key="parent" ref="941447902"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">497</int>
+ <reference key="object" ref="446991534"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="875092757"/>
+ <reference ref="630155264"/>
+ <reference ref="945678886"/>
+ <reference ref="512868991"/>
+ <reference ref="163117631"/>
+ <reference ref="31516759"/>
+ <reference ref="908105787"/>
+ <reference ref="644046920"/>
+ <reference ref="231811626"/>
+ <reference ref="883618387"/>
+ </object>
+ <reference key="parent" ref="215659978"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">498</int>
+ <reference key="object" ref="875092757"/>
+ <reference key="parent" ref="446991534"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">499</int>
+ <reference key="object" ref="630155264"/>
+ <reference key="parent" ref="446991534"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">500</int>
+ <reference key="object" ref="945678886"/>
+ <reference key="parent" ref="446991534"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">501</int>
+ <reference key="object" ref="512868991"/>
+ <reference key="parent" ref="446991534"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">502</int>
+ <reference key="object" ref="163117631"/>
+ <reference key="parent" ref="446991534"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">503</int>
+ <reference key="object" ref="31516759"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="956096989"/>
+ </object>
+ <reference key="parent" ref="446991534"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">504</int>
+ <reference key="object" ref="908105787"/>
+ <reference key="parent" ref="446991534"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">505</int>
+ <reference key="object" ref="644046920"/>
+ <reference key="parent" ref="446991534"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">506</int>
+ <reference key="object" ref="231811626"/>
+ <reference key="parent" ref="446991534"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">507</int>
+ <reference key="object" ref="883618387"/>
+ <reference key="parent" ref="446991534"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">508</int>
+ <reference key="object" ref="956096989"/>
+ <object class="NSMutableArray" key="children">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference ref="257099033"/>
+ <reference ref="551969625"/>
+ <reference ref="249532473"/>
+ <reference ref="607364498"/>
+ <reference ref="508151438"/>
+ <reference ref="981751889"/>
+ <reference ref="380031999"/>
+ <reference ref="825984362"/>
+ <reference ref="560145579"/>
+ </object>
+ <reference key="parent" ref="31516759"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">509</int>
+ <reference key="object" ref="257099033"/>
+ <reference key="parent" ref="956096989"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">510</int>
+ <reference key="object" ref="551969625"/>
+ <reference key="parent" ref="956096989"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">511</int>
+ <reference key="object" ref="249532473"/>
+ <reference key="parent" ref="956096989"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">512</int>
+ <reference key="object" ref="607364498"/>
+ <reference key="parent" ref="956096989"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">513</int>
+ <reference key="object" ref="508151438"/>
+ <reference key="parent" ref="956096989"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">514</int>
+ <reference key="object" ref="981751889"/>
+ <reference key="parent" ref="956096989"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">515</int>
+ <reference key="object" ref="380031999"/>
+ <reference key="parent" ref="956096989"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">516</int>
+ <reference key="object" ref="825984362"/>
+ <reference key="parent" ref="956096989"/>
+ </object>
+ <object class="IBObjectRecord">
+ <int key="objectID">517</int>
+ <reference key="object" ref="560145579"/>
+ <reference key="parent" ref="956096989"/>
+ </object>
+ </object>
+ </object>
+ <object class="NSMutableDictionary" key="flattenedProperties">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSArray" key="dict.sortedKeys">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <string>-3.IBPluginDependency</string>
+ <string>112.IBPluginDependency</string>
+ <string>112.ImportedFromIB2</string>
+ <string>124.IBPluginDependency</string>
+ <string>124.ImportedFromIB2</string>
+ <string>125.IBPluginDependency</string>
+ <string>125.ImportedFromIB2</string>
+ <string>125.editorWindowContentRectSynchronizationRect</string>
+ <string>126.IBPluginDependency</string>
+ <string>126.ImportedFromIB2</string>
+ <string>129.IBPluginDependency</string>
+ <string>129.ImportedFromIB2</string>
+ <string>130.IBPluginDependency</string>
+ <string>130.ImportedFromIB2</string>
+ <string>130.editorWindowContentRectSynchronizationRect</string>
+ <string>131.IBPluginDependency</string>
+ <string>131.ImportedFromIB2</string>
+ <string>134.IBPluginDependency</string>
+ <string>134.ImportedFromIB2</string>
+ <string>136.IBPluginDependency</string>
+ <string>136.ImportedFromIB2</string>
+ <string>143.IBPluginDependency</string>
+ <string>143.ImportedFromIB2</string>
+ <string>144.IBPluginDependency</string>
+ <string>144.ImportedFromIB2</string>
+ <string>145.IBPluginDependency</string>
+ <string>145.ImportedFromIB2</string>
+ <string>149.IBPluginDependency</string>
+ <string>149.ImportedFromIB2</string>
+ <string>150.IBPluginDependency</string>
+ <string>150.ImportedFromIB2</string>
+ <string>19.IBPluginDependency</string>
+ <string>19.ImportedFromIB2</string>
+ <string>195.IBPluginDependency</string>
+ <string>195.ImportedFromIB2</string>
+ <string>196.IBPluginDependency</string>
+ <string>196.ImportedFromIB2</string>
+ <string>197.IBPluginDependency</string>
+ <string>197.ImportedFromIB2</string>
+ <string>198.IBPluginDependency</string>
+ <string>198.ImportedFromIB2</string>
+ <string>199.IBPluginDependency</string>
+ <string>199.ImportedFromIB2</string>
+ <string>200.IBEditorWindowLastContentRect</string>
+ <string>200.IBPluginDependency</string>
+ <string>200.ImportedFromIB2</string>
+ <string>200.editorWindowContentRectSynchronizationRect</string>
+ <string>201.IBPluginDependency</string>
+ <string>201.ImportedFromIB2</string>
+ <string>202.IBPluginDependency</string>
+ <string>202.ImportedFromIB2</string>
+ <string>203.IBPluginDependency</string>
+ <string>203.ImportedFromIB2</string>
+ <string>204.IBPluginDependency</string>
+ <string>204.ImportedFromIB2</string>
+ <string>205.IBEditorWindowLastContentRect</string>
+ <string>205.IBPluginDependency</string>
+ <string>205.ImportedFromIB2</string>
+ <string>205.editorWindowContentRectSynchronizationRect</string>
+ <string>206.IBPluginDependency</string>
+ <string>206.ImportedFromIB2</string>
+ <string>207.IBPluginDependency</string>
+ <string>207.ImportedFromIB2</string>
+ <string>208.IBPluginDependency</string>
+ <string>208.ImportedFromIB2</string>
+ <string>209.IBPluginDependency</string>
+ <string>209.ImportedFromIB2</string>
+ <string>210.IBPluginDependency</string>
+ <string>210.ImportedFromIB2</string>
+ <string>211.IBPluginDependency</string>
+ <string>211.ImportedFromIB2</string>
+ <string>212.IBPluginDependency</string>
+ <string>212.ImportedFromIB2</string>
+ <string>212.editorWindowContentRectSynchronizationRect</string>
+ <string>213.IBPluginDependency</string>
+ <string>213.ImportedFromIB2</string>
+ <string>214.IBPluginDependency</string>
+ <string>214.ImportedFromIB2</string>
+ <string>215.IBPluginDependency</string>
+ <string>215.ImportedFromIB2</string>
+ <string>216.IBPluginDependency</string>
+ <string>216.ImportedFromIB2</string>
+ <string>217.IBPluginDependency</string>
+ <string>217.ImportedFromIB2</string>
+ <string>218.IBPluginDependency</string>
+ <string>218.ImportedFromIB2</string>
+ <string>219.IBPluginDependency</string>
+ <string>219.ImportedFromIB2</string>
+ <string>220.IBEditorWindowLastContentRect</string>
+ <string>220.IBPluginDependency</string>
+ <string>220.ImportedFromIB2</string>
+ <string>220.editorWindowContentRectSynchronizationRect</string>
+ <string>221.IBPluginDependency</string>
+ <string>221.ImportedFromIB2</string>
+ <string>23.IBPluginDependency</string>
+ <string>23.ImportedFromIB2</string>
+ <string>236.IBPluginDependency</string>
+ <string>236.ImportedFromIB2</string>
+ <string>239.IBPluginDependency</string>
+ <string>239.ImportedFromIB2</string>
+ <string>24.IBEditorWindowLastContentRect</string>
+ <string>24.IBPluginDependency</string>
+ <string>24.ImportedFromIB2</string>
+ <string>24.editorWindowContentRectSynchronizationRect</string>
+ <string>29.IBEditorWindowLastContentRect</string>
+ <string>29.IBPluginDependency</string>
+ <string>29.ImportedFromIB2</string>
+ <string>29.WindowOrigin</string>
+ <string>29.editorWindowContentRectSynchronizationRect</string>
+ <string>295.IBPluginDependency</string>
+ <string>296.IBEditorWindowLastContentRect</string>
+ <string>296.IBPluginDependency</string>
+ <string>296.editorWindowContentRectSynchronizationRect</string>
+ <string>297.IBPluginDependency</string>
+ <string>298.IBPluginDependency</string>
+ <string>346.IBPluginDependency</string>
+ <string>346.ImportedFromIB2</string>
+ <string>348.IBPluginDependency</string>
+ <string>348.ImportedFromIB2</string>
+ <string>349.IBEditorWindowLastContentRect</string>
+ <string>349.IBPluginDependency</string>
+ <string>349.ImportedFromIB2</string>
+ <string>349.editorWindowContentRectSynchronizationRect</string>
+ <string>350.IBPluginDependency</string>
+ <string>350.ImportedFromIB2</string>
+ <string>351.IBPluginDependency</string>
+ <string>351.ImportedFromIB2</string>
+ <string>354.IBPluginDependency</string>
+ <string>354.ImportedFromIB2</string>
+ <string>371.IBEditorWindowLastContentRect</string>
+ <string>371.IBPluginDependency</string>
+ <string>371.IBWindowTemplateEditedContentRect</string>
+ <string>371.NSWindowTemplate.visibleAtLaunch</string>
+ <string>371.editorWindowContentRectSynchronizationRect</string>
+ <string>371.windowTemplate.maxSize</string>
+ <string>372.IBPluginDependency</string>
+ <string>375.IBPluginDependency</string>
+ <string>376.IBEditorWindowLastContentRect</string>
+ <string>376.IBPluginDependency</string>
+ <string>377.IBPluginDependency</string>
+ <string>388.IBEditorWindowLastContentRect</string>
+ <string>388.IBPluginDependency</string>
+ <string>389.IBPluginDependency</string>
+ <string>390.IBPluginDependency</string>
+ <string>391.IBPluginDependency</string>
+ <string>392.IBPluginDependency</string>
+ <string>393.IBPluginDependency</string>
+ <string>394.IBPluginDependency</string>
+ <string>395.IBPluginDependency</string>
+ <string>396.IBPluginDependency</string>
+ <string>397.IBPluginDependency</string>
+ <string>398.IBPluginDependency</string>
+ <string>399.IBPluginDependency</string>
+ <string>400.IBPluginDependency</string>
+ <string>401.IBPluginDependency</string>
+ <string>402.IBPluginDependency</string>
+ <string>403.IBPluginDependency</string>
+ <string>404.IBPluginDependency</string>
+ <string>405.IBPluginDependency</string>
+ <string>406.IBPluginDependency</string>
+ <string>407.IBPluginDependency</string>
+ <string>408.IBPluginDependency</string>
+ <string>409.IBPluginDependency</string>
+ <string>410.IBPluginDependency</string>
+ <string>411.IBPluginDependency</string>
+ <string>412.IBPluginDependency</string>
+ <string>413.IBPluginDependency</string>
+ <string>414.IBPluginDependency</string>
+ <string>415.IBPluginDependency</string>
+ <string>416.IBPluginDependency</string>
+ <string>417.IBPluginDependency</string>
+ <string>418.IBPluginDependency</string>
+ <string>419.IBPluginDependency</string>
+ <string>450.IBPluginDependency</string>
+ <string>451.IBEditorWindowLastContentRect</string>
+ <string>451.IBPluginDependency</string>
+ <string>452.IBPluginDependency</string>
+ <string>453.IBPluginDependency</string>
+ <string>454.IBPluginDependency</string>
+ <string>457.IBPluginDependency</string>
+ <string>459.IBPluginDependency</string>
+ <string>460.IBPluginDependency</string>
+ <string>462.IBPluginDependency</string>
+ <string>465.IBPluginDependency</string>
+ <string>466.IBPluginDependency</string>
+ <string>485.IBPluginDependency</string>
+ <string>490.IBPluginDependency</string>
+ <string>491.IBEditorWindowLastContentRect</string>
+ <string>491.IBPluginDependency</string>
+ <string>492.IBPluginDependency</string>
+ <string>496.IBPluginDependency</string>
+ <string>497.IBEditorWindowLastContentRect</string>
+ <string>497.IBPluginDependency</string>
+ <string>498.IBPluginDependency</string>
+ <string>499.IBPluginDependency</string>
+ <string>5.IBPluginDependency</string>
+ <string>5.ImportedFromIB2</string>
+ <string>500.IBPluginDependency</string>
+ <string>501.IBPluginDependency</string>
+ <string>502.IBPluginDependency</string>
+ <string>503.IBPluginDependency</string>
+ <string>504.IBPluginDependency</string>
+ <string>505.IBPluginDependency</string>
+ <string>506.IBPluginDependency</string>
+ <string>507.IBPluginDependency</string>
+ <string>508.IBEditorWindowLastContentRect</string>
+ <string>508.IBPluginDependency</string>
+ <string>509.IBPluginDependency</string>
+ <string>510.IBPluginDependency</string>
+ <string>511.IBPluginDependency</string>
+ <string>512.IBPluginDependency</string>
+ <string>513.IBPluginDependency</string>
+ <string>514.IBPluginDependency</string>
+ <string>515.IBPluginDependency</string>
+ <string>516.IBPluginDependency</string>
+ <string>517.IBPluginDependency</string>
+ <string>56.IBPluginDependency</string>
+ <string>56.ImportedFromIB2</string>
+ <string>57.IBEditorWindowLastContentRect</string>
+ <string>57.IBPluginDependency</string>
+ <string>57.ImportedFromIB2</string>
+ <string>57.editorWindowContentRectSynchronizationRect</string>
+ <string>58.IBPluginDependency</string>
+ <string>58.ImportedFromIB2</string>
+ <string>72.IBPluginDependency</string>
+ <string>72.ImportedFromIB2</string>
+ <string>73.IBPluginDependency</string>
+ <string>73.ImportedFromIB2</string>
+ <string>74.IBPluginDependency</string>
+ <string>74.ImportedFromIB2</string>
+ <string>75.IBPluginDependency</string>
+ <string>75.ImportedFromIB2</string>
+ <string>77.IBPluginDependency</string>
+ <string>77.ImportedFromIB2</string>
+ <string>78.IBPluginDependency</string>
+ <string>78.ImportedFromIB2</string>
+ <string>79.IBPluginDependency</string>
+ <string>79.ImportedFromIB2</string>
+ <string>80.IBPluginDependency</string>
+ <string>80.ImportedFromIB2</string>
+ <string>81.IBEditorWindowLastContentRect</string>
+ <string>81.IBPluginDependency</string>
+ <string>81.ImportedFromIB2</string>
+ <string>81.editorWindowContentRectSynchronizationRect</string>
+ <string>82.IBPluginDependency</string>
+ <string>82.ImportedFromIB2</string>
+ <string>83.IBPluginDependency</string>
+ <string>83.ImportedFromIB2</string>
+ <string>92.IBPluginDependency</string>
+ <string>92.ImportedFromIB2</string>
+ </object>
+ <object class="NSMutableArray" key="dict.values">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{522, 812}, {146, 23}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{436, 809}, {64, 6}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{753, 187}, {275, 113}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{608, 612}, {275, 83}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{547, 180}, {254, 283}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{187, 434}, {243, 243}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{608, 612}, {167, 43}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{753, 217}, {238, 103}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{608, 612}, {241, 103}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{654, 239}, {194, 73}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{525, 802}, {197, 73}}</string>
+ <string>{{380, 836}, {512, 20}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{74, 862}</string>
+ <string>{{6, 978}, {478, 20}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>{{604, 269}, {231, 43}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>{{475, 832}, {234, 43}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{746, 287}, {220, 133}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{608, 612}, {215, 63}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{380, 496}, {480, 360}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>{{380, 496}, {480, 360}}</string>
+ <integer value="1"/>
+ <string>{{33, 99}, {480, 360}}</string>
+ <string>{3.40282e+38, 3.40282e+38}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>{{591, 420}, {83, 43}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>{{523, 2}, {178, 283}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>{{753, 197}, {170, 63}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>{{725, 289}, {246, 23}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>{{674, 260}, {204, 183}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>{{878, 180}, {164, 173}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{286, 129}, {275, 183}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{23, 794}, {245, 183}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{452, 109}, {196, 203}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>{{145, 474}, {199, 203}}</string>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ <string>com.apple.InterfaceBuilder.CocoaPlugin</string>
+ <integer value="1"/>
+ </object>
+ </object>
+ <object class="NSMutableDictionary" key="unlocalizedProperties">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference key="dict.sortedKeys" ref="0"/>
+ <object class="NSMutableArray" key="dict.values">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ </object>
+ </object>
+ <nil key="activeLocalization"/>
+ <object class="NSMutableDictionary" key="localizations">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <reference key="dict.sortedKeys" ref="0"/>
+ <object class="NSMutableArray" key="dict.values">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ </object>
+ </object>
+ <nil key="sourceID"/>
+ <int key="maxID">532</int>
+ </object>
+ <object class="IBClassDescriber" key="IBDocument.Classes">
+ <object class="NSMutableArray" key="referencedPartialClassDescriptions">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="IBPartialClassDescription">
+ <string key="className">TestAppAppDelegate</string>
+ <string key="superclassName">NSObject</string>
+ <object class="NSMutableDictionary" key="outlets">
+ <string key="NS.key.0">window</string>
+ <string key="NS.object.0">NSWindow</string>
+ </object>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBProjectSource</string>
+ <string key="minorKey">TestAppAppDelegate.h</string>
+ </object>
+ </object>
+ </object>
+ <object class="NSMutableArray" key="referencedPartialClassDescriptionsV3.2+">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSApplication</string>
+ <string key="superclassName">NSResponder</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier" id="822405504">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSApplication.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSApplication</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier" id="850738725">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSApplicationScripting.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSApplication</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier" id="624831158">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSColorPanel.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSApplication</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSHelpManager.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSApplication</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSPageLayout.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSApplication</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSUserInterfaceItemSearching.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSBrowser</string>
+ <string key="superclassName">NSControl</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSBrowser.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSControl</string>
+ <string key="superclassName">NSView</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier" id="310914472">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSControl.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSDocument</string>
+ <string key="superclassName">NSObject</string>
+ <object class="NSMutableDictionary" key="actions">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSArray" key="dict.sortedKeys">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <string>printDocument:</string>
+ <string>revertDocumentToSaved:</string>
+ <string>runPageLayout:</string>
+ <string>saveDocument:</string>
+ <string>saveDocumentAs:</string>
+ <string>saveDocumentTo:</string>
+ </object>
+ <object class="NSMutableArray" key="dict.values">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <string>id</string>
+ <string>id</string>
+ <string>id</string>
+ <string>id</string>
+ <string>id</string>
+ <string>id</string>
+ </object>
+ </object>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSDocument.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSDocument</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSDocumentScripting.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSDocumentController</string>
+ <string key="superclassName">NSObject</string>
+ <object class="NSMutableDictionary" key="actions">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <object class="NSArray" key="dict.sortedKeys">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <string>clearRecentDocuments:</string>
+ <string>newDocument:</string>
+ <string>openDocument:</string>
+ <string>saveAllDocuments:</string>
+ </object>
+ <object class="NSMutableArray" key="dict.values">
+ <bool key="EncodedWithXMLCoder">YES</bool>
+ <string>id</string>
+ <string>id</string>
+ <string>id</string>
+ <string>id</string>
+ </object>
+ </object>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSDocumentController.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSFontManager</string>
+ <string key="superclassName">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier" id="946436764">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSFontManager.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSFormatter</string>
+ <string key="superclassName">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSFormatter.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSMatrix</string>
+ <string key="superclassName">NSControl</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSMatrix.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSMenu</string>
+ <string key="superclassName">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier" id="1056362899">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSMenu.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSMenuItem</string>
+ <string key="superclassName">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier" id="472958451">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSMenuItem.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSMovieView</string>
+ <string key="superclassName">NSView</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSMovieView.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSAccessibility.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <reference key="sourceIdentifier" ref="822405504"/>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <reference key="sourceIdentifier" ref="850738725"/>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <reference key="sourceIdentifier" ref="624831158"/>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <reference key="sourceIdentifier" ref="310914472"/>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSDictionaryController.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSDragging.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <reference key="sourceIdentifier" ref="946436764"/>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSFontPanel.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSKeyValueBinding.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <reference key="sourceIdentifier" ref="1056362899"/>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSNibLoading.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSOutlineView.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSPasteboard.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSSavePanel.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier" id="809545482">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSTableView.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSToolbarItem.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier" id="260078765">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSView.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSArchiver.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSClassDescription.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSError.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSFileManager.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSKeyValueCoding.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSKeyValueObserving.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSKeyedArchiver.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSObject.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSObjectScripting.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSPortCoder.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSRunLoop.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSScriptClassDescription.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSScriptKeyValueCoding.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSScriptObjectSpecifiers.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSScriptWhoseTests.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSThread.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSURL.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSURLConnection.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">Foundation.framework/Headers/NSURLDownload.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSResponder</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSInterfaceStyle.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSResponder</string>
+ <string key="superclassName">NSObject</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSResponder.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSTableView</string>
+ <string key="superclassName">NSControl</string>
+ <reference key="sourceIdentifier" ref="809545482"/>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSText</string>
+ <string key="superclassName">NSView</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSText.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSTextView</string>
+ <string key="superclassName">NSText</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSTextView.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSView</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSClipView.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSView</string>
+ <reference key="sourceIdentifier" ref="472958451"/>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSView</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSRulerView.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSView</string>
+ <string key="superclassName">NSResponder</string>
+ <reference key="sourceIdentifier" ref="260078765"/>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSWindow</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSDrawer.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSWindow</string>
+ <string key="superclassName">NSResponder</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSWindow.h</string>
+ </object>
+ </object>
+ <object class="IBPartialClassDescription">
+ <string key="className">NSWindow</string>
+ <object class="IBClassDescriptionSource" key="sourceIdentifier">
+ <string key="majorKey">IBFrameworkSource</string>
+ <string key="minorKey">AppKit.framework/Headers/NSWindowScripting.h</string>
+ </object>
+ </object>
+ </object>
+ </object>
+ <int key="IBDocument.localizationMode">0</int>
+ <object class="NSMutableDictionary" key="IBDocument.PluginDeclaredDependencyDefaults">
+ <string key="NS.key.0">com.apple.InterfaceBuilder.CocoaPlugin.macosx</string>
+ <integer value="1060" key="NS.object.0"/>
+ </object>
+ <object class="NSMutableDictionary" key="IBDocument.PluginDeclaredDevelopmentDependencies">
+ <string key="NS.key.0">com.apple.InterfaceBuilder.CocoaPlugin.InterfaceBuilder3</string>
+ <integer value="3000" key="NS.object.0"/>
+ </object>
+ <bool key="IBDocument.PluginDeclaredDependenciesTrackSystemTargetVersion">YES</bool>
+ <string key="IBDocument.LastKnownRelativeProjectPath">../TestApp.xcodeproj</string>
+ <int key="IBDocument.defaultPropertyAccessControl">3</int>
+ </data>
+</archive>
diff --git a/third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/utf-16be.strings b/third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/utf-16be.strings
new file mode 100644
index 0000000000..580783735f
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/utf-16be.strings
Binary files differ
diff --git a/third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/utf-16le.strings b/third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/utf-16le.strings
new file mode 100644
index 0000000000..eeb383784c
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/TestApp/English.lproj/utf-16le.strings
Binary files differ
diff --git a/third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/AppIcon.appiconset/Contents.json b/third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/AppIcon.appiconset/Contents.json
new file mode 100644
index 0000000000..2db2b1c7c6
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/AppIcon.appiconset/Contents.json
@@ -0,0 +1,58 @@
+{
+ "images" : [
+ {
+ "idiom" : "mac",
+ "size" : "16x16",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "16x16",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "32x32",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "32x32",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "128x128",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "128x128",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "256x256",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "256x256",
+ "scale" : "2x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "512x512",
+ "scale" : "1x"
+ },
+ {
+ "idiom" : "mac",
+ "size" : "512x512",
+ "scale" : "2x"
+ }
+ ],
+ "info" : {
+ "version" : 1,
+ "author" : "xcode"
+ }
+} \ No newline at end of file
diff --git a/third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/Contents.json b/third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/Contents.json
new file mode 100644
index 0000000000..0a87b6edc6
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/Contents.json
@@ -0,0 +1,23 @@
+{
+ "images" : [
+ {
+ "idiom" : "universal",
+ "scale" : "1x",
+ "filename" : "super_sylvain.png"
+ },
+ {
+ "idiom" : "universal",
+ "scale" : "2x",
+ "filename" : "super_sylvain@2x.png"
+ },
+ {
+ "idiom" : "universal",
+ "scale" : "3x",
+ "filename" : "super_sylvain@3x.png"
+ }
+ ],
+ "info" : {
+ "version" : 1,
+ "author" : "xcode"
+ }
+} \ No newline at end of file
diff --git a/third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain.png b/third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain.png
new file mode 100644
index 0000000000..0ba769182f
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain.png
Binary files differ
diff --git a/third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@2x.png b/third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@2x.png
new file mode 100644
index 0000000000..edfa6a5682
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@2x.png
Binary files differ
diff --git a/third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@3x.png b/third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@3x.png
new file mode 100644
index 0000000000..e0652efc72
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@3x.png
Binary files differ
diff --git a/third_party/python/gyp/test/mac/app-bundle/TestApp/TestApp-Info.plist b/third_party/python/gyp/test/mac/app-bundle/TestApp/TestApp-Info.plist
new file mode 100644
index 0000000000..e005852f9f
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/TestApp/TestApp-Info.plist
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>BuildMachineOSBuild</key>
+ <string>Doesn't matter, will be overwritten</string>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIconFile</key>
+ <string></string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.${PRODUCT_NAME:rfc1034identifier}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>ause</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>LSMinimumSystemVersion</key>
+ <string>${MACOSX_DEPLOYMENT_TARGET}</string>
+ <key>NSMainNibFile</key>
+ <string>MainMenu</string>
+ <key>NSPrincipalClass</key>
+ <string>NSApplication</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/app-bundle/TestApp/TestAppAppDelegate.h b/third_party/python/gyp/test/mac/app-bundle/TestApp/TestAppAppDelegate.h
new file mode 100644
index 0000000000..518645eae9
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/TestApp/TestAppAppDelegate.h
@@ -0,0 +1,13 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Cocoa/Cocoa.h>
+
+@interface TestAppAppDelegate : NSObject <NSApplicationDelegate> {
+ NSWindow *window;
+}
+
+@property (assign) IBOutlet NSWindow *window;
+
+@end
diff --git a/third_party/python/gyp/test/mac/app-bundle/TestApp/TestAppAppDelegate.m b/third_party/python/gyp/test/mac/app-bundle/TestApp/TestAppAppDelegate.m
new file mode 100644
index 0000000000..9aafa42000
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/TestApp/TestAppAppDelegate.m
@@ -0,0 +1,15 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "TestAppAppDelegate.h"
+
+@implementation TestAppAppDelegate
+
+@synthesize window;
+
+- (void)applicationDidFinishLaunching:(NSNotification *)aNotification {
+ // Insert code here to initialize your application
+}
+
+@end
diff --git a/third_party/python/gyp/test/mac/app-bundle/TestApp/main.m b/third_party/python/gyp/test/mac/app-bundle/TestApp/main.m
new file mode 100644
index 0000000000..df6a12d065
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/TestApp/main.m
@@ -0,0 +1,10 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Cocoa/Cocoa.h>
+
+int main(int argc, char *argv[])
+{
+ return NSApplicationMain(argc, (const char **) argv);
+}
diff --git a/third_party/python/gyp/test/mac/app-bundle/empty.c b/third_party/python/gyp/test/mac/app-bundle/empty.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/empty.c
diff --git a/third_party/python/gyp/test/mac/app-bundle/test-assets-catalog.gyp b/third_party/python/gyp/test/mac/app-bundle/test-assets-catalog.gyp
new file mode 100644
index 0000000000..25f94a12d0
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/test-assets-catalog.gyp
@@ -0,0 +1,43 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'dep_framework',
+ 'product_name': 'Dependency Framework',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'empty.c', ],
+ },
+ {
+ 'target_name': 'test_app',
+ 'product_name': 'Test App Assets Catalog Gyp',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'dependencies': [ 'dep_framework', ],
+ 'sources': [
+ 'TestApp/main.m',
+ 'TestApp/TestApp_Prefix.pch',
+ 'TestApp/TestAppAppDelegate.h',
+ 'TestApp/TestAppAppDelegate.m',
+ ],
+ 'mac_bundle_resources': [
+ 'TestApp/English.lproj/InfoPlist.strings', # UTF-8
+ 'TestApp/English.lproj/utf-16be.strings',
+ 'TestApp/English.lproj/utf-16le.strings',
+ 'TestApp/English.lproj/MainMenu.xib',
+ 'TestApp/Images.xcassets',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Cocoa.framework',
+ ],
+ },
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
+ 'MACOSX_DEPLOYMENT_TARGET': '10.9',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/app-bundle/test-error.gyp b/third_party/python/gyp/test/mac/app-bundle/test-error.gyp
new file mode 100644
index 0000000000..370772cc31
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/test-error.gyp
@@ -0,0 +1,31 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'test_app',
+ 'product_name': 'Test App Gyp',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'TestApp/main.m',
+ 'TestApp/TestApp_Prefix.pch',
+ 'TestApp/TestAppAppDelegate.h',
+ 'TestApp/TestAppAppDelegate.m',
+ ],
+ 'mac_bundle_resources': [
+ 'TestApp/English.lproj/InfoPlist-error.strings',
+ 'TestApp/English.lproj/MainMenu.xib',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Cocoa.framework',
+ ],
+ },
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/app-bundle/test.gyp b/third_party/python/gyp/test/mac/app-bundle/test.gyp
new file mode 100644
index 0000000000..21973c3623
--- /dev/null
+++ b/third_party/python/gyp/test/mac/app-bundle/test.gyp
@@ -0,0 +1,41 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'dep_framework',
+ 'product_name': 'Dependency Framework',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'empty.c', ],
+ },
+ {
+ 'target_name': 'test_app',
+ 'product_name': 'Test App Gyp',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'dependencies': [ 'dep_framework', ],
+ 'sources': [
+ 'TestApp/main.m',
+ 'TestApp/TestApp_Prefix.pch',
+ 'TestApp/TestAppAppDelegate.h',
+ 'TestApp/TestAppAppDelegate.m',
+ ],
+ 'mac_bundle_resources': [
+ 'TestApp/English.lproj/InfoPlist.strings', # UTF-8
+ 'TestApp/English.lproj/utf-16be.strings',
+ 'TestApp/English.lproj/utf-16le.strings',
+ 'TestApp/English.lproj/MainMenu.xib',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Cocoa.framework',
+ ],
+ },
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/archs/empty_main.cc b/third_party/python/gyp/test/mac/archs/empty_main.cc
new file mode 100644
index 0000000000..237c8ce181
--- /dev/null
+++ b/third_party/python/gyp/test/mac/archs/empty_main.cc
@@ -0,0 +1 @@
+int main() {}
diff --git a/third_party/python/gyp/test/mac/archs/file.mm b/third_party/python/gyp/test/mac/archs/file.mm
new file mode 100644
index 0000000000..d0b39d1f6d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/archs/file.mm
@@ -0,0 +1 @@
+MyInt f() { return 0; }
diff --git a/third_party/python/gyp/test/mac/archs/file_a.cc b/third_party/python/gyp/test/mac/archs/file_a.cc
new file mode 100644
index 0000000000..7307873c83
--- /dev/null
+++ b/third_party/python/gyp/test/mac/archs/file_a.cc
@@ -0,0 +1,8 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "file_a.h"
+
+void DependentFunctionA() {
+}
diff --git a/third_party/python/gyp/test/mac/archs/file_a.h b/third_party/python/gyp/test/mac/archs/file_a.h
new file mode 100644
index 0000000000..7439d13182
--- /dev/null
+++ b/third_party/python/gyp/test/mac/archs/file_a.h
@@ -0,0 +1,10 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef _INCLUDED_TEST_MAC_DEPENDENCIES_FILE_A_H_
+#define _INCLUDED_TEST_MAC_DEPENDENCIES_FILE_A_H_
+
+void DependentFunctionA();
+
+#endif // _INCLUDED_TEST_MAC_DEPENDENCIES_FILE_A_H_
diff --git a/third_party/python/gyp/test/mac/archs/file_b.cc b/third_party/python/gyp/test/mac/archs/file_b.cc
new file mode 100644
index 0000000000..72d59cbfb4
--- /dev/null
+++ b/third_party/python/gyp/test/mac/archs/file_b.cc
@@ -0,0 +1,8 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "file_b.h"
+
+void DependentFunctionB() {
+}
diff --git a/third_party/python/gyp/test/mac/archs/file_b.h b/third_party/python/gyp/test/mac/archs/file_b.h
new file mode 100644
index 0000000000..eb272ece55
--- /dev/null
+++ b/third_party/python/gyp/test/mac/archs/file_b.h
@@ -0,0 +1,10 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef _INCLUDED_TEST_MAC_DEPENDENCIES_FILE_B_H_
+#define _INCLUDED_TEST_MAC_DEPENDENCIES_FILE_B_H_
+
+void DependentFunctionB();
+
+#endif // _INCLUDED_TEST_MAC_DEPENDENCIES_FILE_B_H_
diff --git a/third_party/python/gyp/test/mac/archs/file_c.cc b/third_party/python/gyp/test/mac/archs/file_c.cc
new file mode 100644
index 0000000000..ca39f7a671
--- /dev/null
+++ b/third_party/python/gyp/test/mac/archs/file_c.cc
@@ -0,0 +1,11 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "file_a.h"
+#include "file_b.h"
+
+void PublicFunctionC() {
+ DependentFunctionA();
+ DependentFunctionB();
+}
diff --git a/third_party/python/gyp/test/mac/archs/file_d.cc b/third_party/python/gyp/test/mac/archs/file_d.cc
new file mode 100644
index 0000000000..c40911cdca
--- /dev/null
+++ b/third_party/python/gyp/test/mac/archs/file_d.cc
@@ -0,0 +1,11 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "file_a.h"
+#include "file_b.h"
+
+void PublicFunctionD() {
+ DependentFunctionA();
+ DependentFunctionB();
+}
diff --git a/third_party/python/gyp/test/mac/archs/header.h b/third_party/python/gyp/test/mac/archs/header.h
new file mode 100644
index 0000000000..0716e500c5
--- /dev/null
+++ b/third_party/python/gyp/test/mac/archs/header.h
@@ -0,0 +1 @@
+typedef int MyInt;
diff --git a/third_party/python/gyp/test/mac/archs/my_file.cc b/third_party/python/gyp/test/mac/archs/my_file.cc
new file mode 100644
index 0000000000..94216a74df
--- /dev/null
+++ b/third_party/python/gyp/test/mac/archs/my_file.cc
@@ -0,0 +1,4 @@
+/* Copyright (c) 2012 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+int x = 1;
diff --git a/third_party/python/gyp/test/mac/archs/my_main_file.cc b/third_party/python/gyp/test/mac/archs/my_main_file.cc
new file mode 100644
index 0000000000..f1fa06f276
--- /dev/null
+++ b/third_party/python/gyp/test/mac/archs/my_main_file.cc
@@ -0,0 +1,9 @@
+/* Copyright (c) 2012 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+#include <stdio.h>
+extern int x;
+int main() {
+ printf("hello, world %d\n", x);
+}
+
diff --git a/third_party/python/gyp/test/mac/archs/test-archs-multiarch.gyp b/third_party/python/gyp/test/mac/archs/test-archs-multiarch.gyp
new file mode 100644
index 0000000000..567e8a6653
--- /dev/null
+++ b/third_party/python/gyp/test/mac/archs/test-archs-multiarch.gyp
@@ -0,0 +1,92 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'static_32_64',
+ 'type': 'static_library',
+ 'sources': [ 'my_file.cc' ],
+ 'xcode_settings': {
+ 'ARCHS': [ 'i386', 'x86_64' ],
+ },
+ },
+ {
+ 'target_name': 'shared_32_64',
+ 'type': 'shared_library',
+ 'sources': [ 'my_file.cc' ],
+ 'xcode_settings': {
+ 'ARCHS': [ 'i386', 'x86_64' ],
+ },
+ },
+ {
+ 'target_name': 'shared_32_64_bundle',
+ 'product_name': 'My Framework',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'my_file.cc' ],
+ 'xcode_settings': {
+ 'ARCHS': [ 'i386', 'x86_64' ],
+ },
+ },
+ {
+ 'target_name': 'module_32_64',
+ 'type': 'loadable_module',
+ 'sources': [ 'my_file.cc' ],
+ 'xcode_settings': {
+ 'ARCHS': [ 'i386', 'x86_64' ],
+ },
+ },
+ {
+ 'target_name': 'module_32_64_bundle',
+ 'product_name': 'My Bundle',
+ 'type': 'loadable_module',
+ 'mac_bundle': 1,
+ 'sources': [ 'my_file.cc' ],
+ 'xcode_settings': {
+ 'ARCHS': [ 'i386', 'x86_64' ],
+ },
+ },
+ {
+ 'target_name': 'exe_32_64',
+ 'type': 'executable',
+ 'sources': [ 'empty_main.cc' ],
+ 'xcode_settings': {
+ 'ARCHS': [ 'i386', 'x86_64' ],
+ },
+ },
+ {
+ 'target_name': 'exe_32_64_bundle',
+ 'product_name': 'Test App',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [ 'empty_main.cc' ],
+ 'xcode_settings': {
+ 'ARCHS': [ 'i386', 'x86_64' ],
+ },
+ },
+ # This only needs to compile.
+ {
+ 'target_name': 'precompiled_prefix_header_mm_32_64',
+ 'type': 'shared_library',
+ 'sources': [ 'file.mm', ],
+ 'xcode_settings': {
+ 'GCC_PREFIX_HEADER': 'header.h',
+ 'GCC_PRECOMPILE_PREFIX_HEADER': 'YES',
+ },
+ },
+ # This does not compile but should not cause generation errors.
+ {
+ 'target_name': 'exe_32_64_no_sources',
+ 'type': 'executable',
+ 'dependencies': [
+ 'static_32_64',
+ ],
+ 'sources': [],
+ 'xcode_settings': {
+ 'ARCHS': ['i386', 'x86_64'],
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/mac/archs/test-archs-x86_64.gyp b/third_party/python/gyp/test/mac/archs/test-archs-x86_64.gyp
new file mode 100644
index 0000000000..d11a896273
--- /dev/null
+++ b/third_party/python/gyp/test/mac/archs/test-archs-x86_64.gyp
@@ -0,0 +1,27 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'lib',
+ 'product_name': 'Test64',
+ 'type': 'static_library',
+ 'sources': [ 'my_file.cc' ],
+ 'xcode_settings': {
+ 'ARCHS': [ 'x86_64' ],
+ },
+ },
+ {
+ 'target_name': 'exe',
+ 'product_name': 'Test64',
+ 'type': 'executable',
+ 'dependencies': [ 'lib' ],
+ 'sources': [ 'my_main_file.cc' ],
+ 'xcode_settings': {
+ 'ARCHS': [ 'x86_64' ],
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/mac/archs/test-dependencies.gyp b/third_party/python/gyp/test/mac/archs/test-dependencies.gyp
new file mode 100644
index 0000000000..0431f5f2f4
--- /dev/null
+++ b/third_party/python/gyp/test/mac/archs/test-dependencies.gyp
@@ -0,0 +1,92 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'xcode_settings': {
+ 'ARCHS': ['i386', 'x86_64'],
+ },
+ },
+ 'targets': [
+ {
+ 'target_name': 'target_a',
+ 'type': 'static_library',
+ 'sources': [
+ 'file_a.cc',
+ 'file_a.h',
+ ],
+ },
+ {
+ 'target_name': 'target_b',
+ 'type': 'static_library',
+ 'sources': [
+ 'file_b.cc',
+ 'file_b.h',
+ ],
+ },
+ {
+ 'target_name': 'target_c_standalone_helper',
+ 'type': 'loadable_module',
+ 'hard_dependency': 1,
+ 'dependencies': [
+ 'target_a',
+ 'target_b',
+ ],
+ 'sources': [
+ 'file_c.cc',
+ ],
+ },
+ {
+ 'target_name': 'target_c_standalone',
+ 'type': 'none',
+ 'dependencies': [
+ 'target_c_standalone_helper',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'Package C',
+ 'inputs': [],
+ 'outputs': [
+ '<(PRODUCT_DIR)/libc_standalone.a',
+ ],
+ 'action': [
+ 'touch',
+ '<@(_outputs)',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'target_d_standalone_helper',
+ 'type': 'shared_library',
+ 'dependencies': [
+ 'target_a',
+ 'target_b',
+ ],
+ 'sources': [
+ 'file_d.cc',
+ ],
+ },
+ {
+ 'target_name': 'target_d_standalone',
+ 'type': 'none',
+ 'dependencies': [
+ 'target_d_standalone_helper',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'Package D',
+ 'inputs': [],
+ 'outputs': [
+ '<(PRODUCT_DIR)/libd_standalone.a',
+ ],
+ 'action': [
+ 'touch',
+ '<@(_outputs)',
+ ],
+ },
+ ],
+ }
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/archs/test-no-archs.gyp b/third_party/python/gyp/test/mac/archs/test-no-archs.gyp
new file mode 100644
index 0000000000..8f3b6b47cc
--- /dev/null
+++ b/third_party/python/gyp/test/mac/archs/test-no-archs.gyp
@@ -0,0 +1,21 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'lib',
+ 'product_name': 'Test',
+ 'type': 'static_library',
+ 'sources': [ 'my_file.cc' ],
+ },
+ {
+ 'target_name': 'exe',
+ 'product_name': 'Test',
+ 'type': 'executable',
+ 'dependencies': [ 'lib' ],
+ 'sources': [ 'my_main_file.cc' ],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/mac/archs/test-valid-archs.gyp b/third_party/python/gyp/test/mac/archs/test-valid-archs.gyp
new file mode 100644
index 0000000000..c90ec1fe9b
--- /dev/null
+++ b/third_party/python/gyp/test/mac/archs/test-valid-archs.gyp
@@ -0,0 +1,28 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'lib',
+ 'product_name': 'Test',
+ 'type': 'static_library',
+ 'sources': [ 'my_file.cc' ],
+ 'xcode_settings': {
+ 'ARCHS': ['i386', 'x86_64', 'unknown-arch'],
+ 'VALID_ARCHS': ['x86_64'],
+ },
+ },
+ {
+ 'target_name': 'exe',
+ 'product_name': 'Test',
+ 'type': 'executable',
+ 'dependencies': [ 'lib' ],
+ 'sources': [ 'my_main_file.cc' ],
+ 'xcode_settings': {
+ 'ARCHS': ['i386', 'x86_64', 'unknown-arch'],
+ 'VALID_ARCHS': ['x86_64'],
+ },
+ }]
+}
diff --git a/third_party/python/gyp/test/mac/bundle-resources/change.sh b/third_party/python/gyp/test/mac/bundle-resources/change.sh
new file mode 100755
index 0000000000..6d0fe6c7c2
--- /dev/null
+++ b/third_party/python/gyp/test/mac/bundle-resources/change.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+tr a-z A-Z < "${1}" > "${2}"
diff --git a/third_party/python/gyp/test/mac/bundle-resources/executable-file.sh b/third_party/python/gyp/test/mac/bundle-resources/executable-file.sh
new file mode 100755
index 0000000000..796953a1a2
--- /dev/null
+++ b/third_party/python/gyp/test/mac/bundle-resources/executable-file.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+echo echo echo echo cho ho o o
diff --git a/third_party/python/gyp/test/mac/bundle-resources/secret.txt b/third_party/python/gyp/test/mac/bundle-resources/secret.txt
new file mode 100644
index 0000000000..8baef1b4ab
--- /dev/null
+++ b/third_party/python/gyp/test/mac/bundle-resources/secret.txt
@@ -0,0 +1 @@
+abc
diff --git a/third_party/python/gyp/test/mac/bundle-resources/test.gyp b/third_party/python/gyp/test/mac/bundle-resources/test.gyp
new file mode 100644
index 0000000000..af034ce3f4
--- /dev/null
+++ b/third_party/python/gyp/test/mac/bundle-resources/test.gyp
@@ -0,0 +1,59 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'resource',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'mac_bundle_resources': [
+ 'secret.txt',
+ 'executable-file.sh',
+ ],
+ },
+ # A rule with process_outputs_as_mac_bundle_resources should copy files
+ # into the Resources folder.
+ {
+ 'target_name': 'source_rule',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'secret.txt',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'bundlerule',
+ 'extension': 'txt',
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).txt',
+ ],
+ 'action': ['./change.sh', '<(RULE_INPUT_PATH)', '<@(_outputs)'],
+ 'message': 'Running rule on <(RULE_INPUT_PATH)',
+ 'process_outputs_as_mac_bundle_resources': 1,
+ },
+ ],
+ },
+ # So should an ordinary rule acting on mac_bundle_resources.
+ {
+ 'target_name': 'resource_rule',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'mac_bundle_resources': [
+ 'secret.txt',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'bundlerule',
+ 'extension': 'txt',
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).txt',
+ ],
+ 'action': ['./change.sh', '<(RULE_INPUT_PATH)', '<@(_outputs)'],
+ 'message': 'Running rule on <(RULE_INPUT_PATH)',
+ },
+ ],
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/mac/cflags/ccfile.cc b/third_party/python/gyp/test/mac/cflags/ccfile.cc
new file mode 100644
index 0000000000..1a54d18eec
--- /dev/null
+++ b/third_party/python/gyp/test/mac/cflags/ccfile.cc
@@ -0,0 +1,7 @@
+#ifdef CFLAG
+#error CFLAG should not be set
+#endif
+
+#ifndef CCFLAG
+#error CCFLAG should be set
+#endif
diff --git a/third_party/python/gyp/test/mac/cflags/ccfile_withcflags.cc b/third_party/python/gyp/test/mac/cflags/ccfile_withcflags.cc
new file mode 100644
index 0000000000..de078a0641
--- /dev/null
+++ b/third_party/python/gyp/test/mac/cflags/ccfile_withcflags.cc
@@ -0,0 +1,7 @@
+#ifndef CFLAG
+#error CFLAG should be set
+#endif
+
+#ifndef CCFLAG
+#error CCFLAG should be set
+#endif
diff --git a/third_party/python/gyp/test/mac/cflags/cfile.c b/third_party/python/gyp/test/mac/cflags/cfile.c
new file mode 100644
index 0000000000..0af9d0af5d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/cflags/cfile.c
@@ -0,0 +1,7 @@
+#ifndef CFLAG
+#error CFLAG should be set
+#endif
+
+#ifdef CCFLAG
+#error CCFLAG should not be set
+#endif
diff --git a/third_party/python/gyp/test/mac/cflags/cppfile.cpp b/third_party/python/gyp/test/mac/cflags/cppfile.cpp
new file mode 100644
index 0000000000..1a54d18eec
--- /dev/null
+++ b/third_party/python/gyp/test/mac/cflags/cppfile.cpp
@@ -0,0 +1,7 @@
+#ifdef CFLAG
+#error CFLAG should not be set
+#endif
+
+#ifndef CCFLAG
+#error CCFLAG should be set
+#endif
diff --git a/third_party/python/gyp/test/mac/cflags/cppfile_withcflags.cpp b/third_party/python/gyp/test/mac/cflags/cppfile_withcflags.cpp
new file mode 100644
index 0000000000..de078a0641
--- /dev/null
+++ b/third_party/python/gyp/test/mac/cflags/cppfile_withcflags.cpp
@@ -0,0 +1,7 @@
+#ifndef CFLAG
+#error CFLAG should be set
+#endif
+
+#ifndef CCFLAG
+#error CCFLAG should be set
+#endif
diff --git a/third_party/python/gyp/test/mac/cflags/cxxfile.cxx b/third_party/python/gyp/test/mac/cflags/cxxfile.cxx
new file mode 100644
index 0000000000..1a54d18eec
--- /dev/null
+++ b/third_party/python/gyp/test/mac/cflags/cxxfile.cxx
@@ -0,0 +1,7 @@
+#ifdef CFLAG
+#error CFLAG should not be set
+#endif
+
+#ifndef CCFLAG
+#error CCFLAG should be set
+#endif
diff --git a/third_party/python/gyp/test/mac/cflags/cxxfile_withcflags.cxx b/third_party/python/gyp/test/mac/cflags/cxxfile_withcflags.cxx
new file mode 100644
index 0000000000..de078a0641
--- /dev/null
+++ b/third_party/python/gyp/test/mac/cflags/cxxfile_withcflags.cxx
@@ -0,0 +1,7 @@
+#ifndef CFLAG
+#error CFLAG should be set
+#endif
+
+#ifndef CCFLAG
+#error CCFLAG should be set
+#endif
diff --git a/third_party/python/gyp/test/mac/cflags/mfile.m b/third_party/python/gyp/test/mac/cflags/mfile.m
new file mode 100644
index 0000000000..0af9d0af5d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/cflags/mfile.m
@@ -0,0 +1,7 @@
+#ifndef CFLAG
+#error CFLAG should be set
+#endif
+
+#ifdef CCFLAG
+#error CCFLAG should not be set
+#endif
diff --git a/third_party/python/gyp/test/mac/cflags/mmfile.mm b/third_party/python/gyp/test/mac/cflags/mmfile.mm
new file mode 100644
index 0000000000..1a54d18eec
--- /dev/null
+++ b/third_party/python/gyp/test/mac/cflags/mmfile.mm
@@ -0,0 +1,7 @@
+#ifdef CFLAG
+#error CFLAG should not be set
+#endif
+
+#ifndef CCFLAG
+#error CCFLAG should be set
+#endif
diff --git a/third_party/python/gyp/test/mac/cflags/mmfile_withcflags.mm b/third_party/python/gyp/test/mac/cflags/mmfile_withcflags.mm
new file mode 100644
index 0000000000..de078a0641
--- /dev/null
+++ b/third_party/python/gyp/test/mac/cflags/mmfile_withcflags.mm
@@ -0,0 +1,7 @@
+#ifndef CFLAG
+#error CFLAG should be set
+#endif
+
+#ifndef CCFLAG
+#error CCFLAG should be set
+#endif
diff --git a/third_party/python/gyp/test/mac/cflags/test.gyp b/third_party/python/gyp/test/mac/cflags/test.gyp
new file mode 100644
index 0000000000..d330a548f6
--- /dev/null
+++ b/third_party/python/gyp/test/mac/cflags/test.gyp
@@ -0,0 +1,132 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'mytarget',
+ 'type': 'shared_library',
+ 'sources': [
+ 'cfile.c',
+ 'mfile.m',
+ 'ccfile.cc',
+ 'cppfile.cpp',
+ 'cxxfile.cxx',
+ 'mmfile.mm',
+ ],
+ 'xcode_settings': {
+ # Normally, defines would go in 'defines' instead. This is just for
+ # testing.
+ 'OTHER_CFLAGS': [
+ '-DCFLAG',
+ ],
+ 'OTHER_CPLUSPLUSFLAGS': [
+ '-DCCFLAG',
+ ],
+ 'GCC_C_LANGUAGE_STANDARD': 'c99',
+ },
+ },
+ {
+ 'target_name': 'mytarget_reuse_cflags',
+ 'type': 'shared_library',
+ 'sources': [
+ 'cfile.c',
+ 'mfile.m',
+ 'ccfile_withcflags.cc',
+ 'cppfile_withcflags.cpp',
+ 'cxxfile_withcflags.cxx',
+ 'mmfile_withcflags.mm',
+ ],
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-DCFLAG',
+ ],
+ 'OTHER_CPLUSPLUSFLAGS': [
+ '$OTHER_CFLAGS',
+ '-DCCFLAG',
+ ],
+ # This is a C-only flag, to check these don't get added to C++ files.
+ 'GCC_C_LANGUAGE_STANDARD': 'c99',
+ },
+ },
+ {
+ 'target_name': 'mytarget_inherit_cflags',
+ 'type': 'shared_library',
+ 'sources': [
+ 'cfile.c',
+ 'mfile.m',
+ 'ccfile_withcflags.cc',
+ 'cppfile_withcflags.cpp',
+ 'cxxfile_withcflags.cxx',
+ 'mmfile_withcflags.mm',
+ ],
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-DCFLAG',
+ ],
+ 'OTHER_CPLUSPLUSFLAGS': [
+ '$inherited',
+ '-DCCFLAG',
+ ],
+ 'GCC_C_LANGUAGE_STANDARD': 'c99',
+ },
+ },
+ {
+ 'target_name': 'mytarget_inherit_cflags_parens',
+ 'type': 'shared_library',
+ 'sources': [
+ 'cfile.c',
+ 'mfile.m',
+ 'ccfile_withcflags.cc',
+ 'cppfile_withcflags.cpp',
+ 'cxxfile_withcflags.cxx',
+ 'mmfile_withcflags.mm',
+ ],
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-DCFLAG',
+ ],
+ 'OTHER_CPLUSPLUSFLAGS': [
+ '$(inherited)',
+ '-DCCFLAG',
+ ],
+ 'GCC_C_LANGUAGE_STANDARD': 'c99',
+ },
+ },
+ {
+ 'target_name': 'mytarget_inherit_cflags_braces',
+ 'type': 'shared_library',
+ 'sources': [
+ 'cfile.c',
+ 'mfile.m',
+ 'ccfile_withcflags.cc',
+ 'cppfile_withcflags.cpp',
+ 'cxxfile_withcflags.cxx',
+ 'mmfile_withcflags.mm',
+ ],
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-DCFLAG',
+ ],
+ 'OTHER_CPLUSPLUSFLAGS': [
+ '${inherited}',
+ '-DCCFLAG',
+ ],
+ 'GCC_C_LANGUAGE_STANDARD': 'c99',
+ },
+ },
+ {
+ 'target_name': 'ansi_standard',
+ 'type': 'shared_library',
+ 'sources': [
+ 'cfile.c',
+ ],
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-DCFLAG',
+ ],
+ 'GCC_C_LANGUAGE_STANDARD': 'ansi',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/clang-cxx-language-standard/c++11.cc b/third_party/python/gyp/test/mac/clang-cxx-language-standard/c++11.cc
new file mode 100644
index 0000000000..756dc1c7e6
--- /dev/null
+++ b/third_party/python/gyp/test/mac/clang-cxx-language-standard/c++11.cc
@@ -0,0 +1,8 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+static_assert(__cplusplus == 201103L, "wrong c++ standard version");
+
+int main() { return 0; }
+
diff --git a/third_party/python/gyp/test/mac/clang-cxx-language-standard/c++98.cc b/third_party/python/gyp/test/mac/clang-cxx-language-standard/c++98.cc
new file mode 100644
index 0000000000..a6a00c70f4
--- /dev/null
+++ b/third_party/python/gyp/test/mac/clang-cxx-language-standard/c++98.cc
@@ -0,0 +1,24 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#if __cplusplus != 199711L
+#error wrong c++ standard version
+#endif
+
+enum cxx11_keywords {
+ alignas,
+ alignof,
+ char16_t,
+ char32_t,
+ constexpr,
+ decltype,
+ noexcept,
+ nullptr,
+ override,
+ static_assert,
+ thread_local,
+};
+
+int main() { return 0; }
+
diff --git a/third_party/python/gyp/test/mac/clang-cxx-language-standard/clang-cxx-language-standard.gyp b/third_party/python/gyp/test/mac/clang-cxx-language-standard/clang-cxx-language-standard.gyp
new file mode 100644
index 0000000000..eb60bbd0e8
--- /dev/null
+++ b/third_party/python/gyp/test/mac/clang-cxx-language-standard/clang-cxx-language-standard.gyp
@@ -0,0 +1,30 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'make_global_settings': [
+ ['CC', '/usr/bin/clang'],
+ ['CXX', '/usr/bin/clang++'],
+ ],
+ 'targets': [
+ {
+ 'target_name': 'c++98',
+ 'type': 'executable',
+ 'sources': [ 'c++98.cc', ],
+ 'xcode_settings': {
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'CLANG_CXX_LANGUAGE_STANDARD': 'c++98',
+ },
+ },
+ {
+ 'target_name': 'c++11',
+ 'type': 'executable',
+ 'sources': [ 'c++11.cc', ],
+ 'xcode_settings': {
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'CLANG_CXX_LANGUAGE_STANDARD': 'c++0x',
+ },
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/mac/clang-cxx-library/clang-cxx-library.gyp b/third_party/python/gyp/test/mac/clang-cxx-library/clang-cxx-library.gyp
new file mode 100644
index 0000000000..67006e50a0
--- /dev/null
+++ b/third_party/python/gyp/test/mac/clang-cxx-library/clang-cxx-library.gyp
@@ -0,0 +1,32 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'make_global_settings': [
+ ['CC', '/usr/bin/clang'],
+ ['CXX', '/usr/bin/clang++'],
+ ],
+ 'targets': [
+ {
+ 'target_name': 'libc++',
+ 'type': 'executable',
+ 'sources': [ 'libc++.cc', ],
+ 'xcode_settings': {
+ 'CC': 'clang',
+ # libc++ requires OS X 10.7+.
+ 'MACOSX_DEPLOYMENT_TARGET': '10.7',
+ 'CLANG_CXX_LIBRARY': 'libc++',
+ },
+ },
+ {
+ 'target_name': 'libstdc++',
+ 'type': 'executable',
+ 'sources': [ 'libstdc++.cc', ],
+ 'xcode_settings': {
+ 'CC': 'clang',
+ 'CLANG_CXX_LIBRARY': 'libstdc++',
+ },
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/mac/clang-cxx-library/libc++.cc b/third_party/python/gyp/test/mac/clang-cxx-library/libc++.cc
new file mode 100644
index 0000000000..b8d6e6b3e2
--- /dev/null
+++ b/third_party/python/gyp/test/mac/clang-cxx-library/libc++.cc
@@ -0,0 +1,11 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <string>
+#ifndef _LIBCPP_VERSION
+#error expected std library: libc++
+#endif
+
+int main() { std::string x; return x.size(); }
+
diff --git a/third_party/python/gyp/test/mac/clang-cxx-library/libstdc++.cc b/third_party/python/gyp/test/mac/clang-cxx-library/libstdc++.cc
new file mode 100644
index 0000000000..474dbf350d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/clang-cxx-library/libstdc++.cc
@@ -0,0 +1,11 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <string>
+#ifndef __GLIBCXX__
+#error expected std library: libstdc++
+#endif
+
+int main() { std::string x; return x.size(); }
+
diff --git a/third_party/python/gyp/test/mac/copies-with-xcode-envvars/copies-with-xcode-envvars.gyp b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/copies-with-xcode-envvars.gyp
new file mode 100644
index 0000000000..c1b1241fb7
--- /dev/null
+++ b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/copies-with-xcode-envvars.gyp
@@ -0,0 +1,87 @@
+# Copyright (c) 2016 Mark Callow. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# For testing use of the UI settings & environment variables
+# available in Xcode's PBXCopyFilesBuildPhase.
+{
+'targets': [
+ {
+ 'target_name': 'copies-with-xcode-envvars',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [ 'empty.c' ],
+ 'conditions': [
+ ['OS == "ios" or OS == "mac"', {
+ 'copies': [{
+ 'destination': '$(BUILT_PRODUCTS_DIR)',
+ 'files': [
+ 'file0',
+ ],
+ }, {
+ 'destination': '$(BUILT_PRODUCTS_DIR)/$(WRAPPER_NAME)',
+ 'files': [
+ 'file1',
+ ],
+ }, {
+ 'destination': '<(PRODUCT_DIR)/$(EXECUTABLE_FOLDER_PATH)',
+ 'files': [
+ 'file2',
+ ],
+ }, {
+ 'destination': '<(PRODUCT_DIR)/$(UNLOCALIZED_RESOURCES_FOLDER_PATH)',
+ 'files': [
+ 'file3',
+ ],
+ }, {
+ 'destination': '<(PRODUCT_DIR)/$(UNLOCALIZED_RESOURCES_FOLDER_PATH)/testimages',
+ 'files': [
+ 'file4',
+ ],
+ }, {
+ 'destination': '$(BUILT_PRODUCTS_DIR)/$(JAVA_FOLDER_PATH)',
+ 'files': [
+ 'file5',
+ ],
+ }, {
+ 'destination': '<(PRODUCT_DIR)/$(FRAMEWORKS_FOLDER_PATH)',
+ 'files': [
+ 'file6',
+ ],
+ }, {
+ # NOTE: This is not an Xcode macro name but
+ # xcodeproj_file.py recognizes it and sends
+ # the output to the same place as
+ # $(FRAMEWORKS_FOLDER_PATH). xcode_emulation.py
+ # sets its value to an absolute path.
+ 'destination': '$(BUILT_FRAMEWORKS_DIR)',
+ 'files': [
+ 'file7',
+ ],
+ }, {
+ 'destination': '<(PRODUCT_DIR)/$(SHARED_FRAMEWORKS_FOLDER_PATH)',
+ 'files': [
+ 'file8',
+ ],
+ }, {
+ 'destination': '<(PRODUCT_DIR)/$(SHARED_SUPPORT_FOLDER_PATH)',
+ 'files': [
+ 'file9',
+ ],
+ }, {
+ 'destination': '<(PRODUCT_DIR)/$(PLUGINS_FOLDER_PATH)',
+ 'files': [
+ 'file10',
+ ],
+ }, {
+ 'destination': '<(PRODUCT_DIR)/$(XPCSERVICES_FOLDER_PATH)',
+ 'files': [
+ 'file11',
+ ],
+ }], # copies
+ }], # OS == "ios" or OS == "mac"
+ ], # conditions
+ }], # targets
+}
+
+# vim:ai:ts=4:sts=4:sw=2:expandtab:textwidth=70
diff --git a/third_party/python/gyp/test/mac/copies-with-xcode-envvars/empty.c b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/empty.c
new file mode 100644
index 0000000000..237c8ce181
--- /dev/null
+++ b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/empty.c
@@ -0,0 +1 @@
+int main() {}
diff --git a/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file0 b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file0
new file mode 100644
index 0000000000..117889361f
--- /dev/null
+++ b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file0
@@ -0,0 +1 @@
+file0 contents
diff --git a/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file1 b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file1
new file mode 100644
index 0000000000..84d55c5759
--- /dev/null
+++ b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file1
@@ -0,0 +1 @@
+file1 contents
diff --git a/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file10 b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file10
new file mode 100644
index 0000000000..372e992ef9
--- /dev/null
+++ b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file10
@@ -0,0 +1 @@
+file10 contents
diff --git a/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file11 b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file11
new file mode 100644
index 0000000000..923e760e1f
--- /dev/null
+++ b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file11
@@ -0,0 +1 @@
+file11 contents
diff --git a/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file2 b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file2
new file mode 100644
index 0000000000..af1b8ae35d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file2
@@ -0,0 +1 @@
+file2 contents
diff --git a/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file3 b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file3
new file mode 100644
index 0000000000..43f16f3522
--- /dev/null
+++ b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file3
@@ -0,0 +1 @@
+file3 contents
diff --git a/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file4 b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file4
new file mode 100644
index 0000000000..5f7270a084
--- /dev/null
+++ b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file4
@@ -0,0 +1 @@
+file4 contents
diff --git a/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file5 b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file5
new file mode 100644
index 0000000000..41f47186bd
--- /dev/null
+++ b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file5
@@ -0,0 +1 @@
+file5 contents
diff --git a/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file6 b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file6
new file mode 100644
index 0000000000..f5d5757348
--- /dev/null
+++ b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file6
@@ -0,0 +1 @@
+file6 contents
diff --git a/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file7 b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file7
new file mode 100644
index 0000000000..90dbe6e9e1
--- /dev/null
+++ b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file7
@@ -0,0 +1 @@
+file7 contents
diff --git a/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file8 b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file8
new file mode 100644
index 0000000000..9eb613fabb
--- /dev/null
+++ b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file8
@@ -0,0 +1 @@
+file8 contents
diff --git a/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file9 b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file9
new file mode 100644
index 0000000000..e37ac72ada
--- /dev/null
+++ b/third_party/python/gyp/test/mac/copies-with-xcode-envvars/file9
@@ -0,0 +1 @@
+file9 contents
diff --git a/third_party/python/gyp/test/mac/copy-dylib/empty.c b/third_party/python/gyp/test/mac/copy-dylib/empty.c
new file mode 100644
index 0000000000..237c8ce181
--- /dev/null
+++ b/third_party/python/gyp/test/mac/copy-dylib/empty.c
@@ -0,0 +1 @@
+int main() {}
diff --git a/third_party/python/gyp/test/mac/copy-dylib/test.gyp b/third_party/python/gyp/test/mac/copy-dylib/test.gyp
new file mode 100644
index 0000000000..4210c51463
--- /dev/null
+++ b/third_party/python/gyp/test/mac/copy-dylib/test.gyp
@@ -0,0 +1,31 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'my_dylib',
+ 'type': 'shared_library',
+ 'sources': [ 'empty.c', ],
+ },
+ {
+ 'target_name': 'test_app',
+ 'product_name': 'Test App',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'dependencies': [ 'my_dylib', ],
+ 'sources': [
+ 'empty.c',
+ ],
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/Test App.app/Contents/Resources',
+ 'files': [
+ '<(PRODUCT_DIR)/libmy_dylib.dylib',
+ ],
+ },
+ ],
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/mac/debuginfo/file.c b/third_party/python/gyp/test/mac/debuginfo/file.c
new file mode 100644
index 0000000000..9cddaf1b0b
--- /dev/null
+++ b/third_party/python/gyp/test/mac/debuginfo/file.c
@@ -0,0 +1,6 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+void f() {}
+int main() {}
diff --git a/third_party/python/gyp/test/mac/debuginfo/test.gyp b/third_party/python/gyp/test/mac/debuginfo/test.gyp
new file mode 100644
index 0000000000..3faf6b5c76
--- /dev/null
+++ b/third_party/python/gyp/test/mac/debuginfo/test.gyp
@@ -0,0 +1,82 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'nonbundle_static_library',
+ 'type': 'static_library',
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ },
+ },
+ {
+ 'target_name': 'nonbundle_shared_library',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ },
+ },
+ {
+ 'target_name': 'nonbundle_loadable_module',
+ 'type': 'loadable_module',
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ },
+ },
+ {
+ 'target_name': 'nonbundle_executable',
+ 'type': 'executable',
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ },
+ },
+
+ {
+ 'target_name': 'bundle_shared_library',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ },
+ },
+ {
+ 'target_name': 'bundle_loadable_module',
+ 'type': 'loadable_module',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ },
+ },
+ {
+ 'target_name': 'my_app',
+ 'product_name': 'My App',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/depend-on-bundle/English.lproj/InfoPlist.strings b/third_party/python/gyp/test/mac/depend-on-bundle/English.lproj/InfoPlist.strings
new file mode 100644
index 0000000000..b92732c79e
--- /dev/null
+++ b/third_party/python/gyp/test/mac/depend-on-bundle/English.lproj/InfoPlist.strings
@@ -0,0 +1 @@
+/* Localized versions of Info.plist keys */
diff --git a/third_party/python/gyp/test/mac/depend-on-bundle/Info.plist b/third_party/python/gyp/test/mac/depend-on-bundle/Info.plist
new file mode 100644
index 0000000000..5e05a5190c
--- /dev/null
+++ b/third_party/python/gyp/test/mac/depend-on-bundle/Info.plist
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIconFile</key>
+ <string></string>
+ <key>CFBundleIdentifier</key>
+ <string>com.yourcompany.${PRODUCT_NAME}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>FMWK</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>NSPrincipalClass</key>
+ <string></string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/depend-on-bundle/bundle.c b/third_party/python/gyp/test/mac/depend-on-bundle/bundle.c
new file mode 100644
index 0000000000..d64ff8ca23
--- /dev/null
+++ b/third_party/python/gyp/test/mac/depend-on-bundle/bundle.c
@@ -0,0 +1 @@
+int f() { return 42; }
diff --git a/third_party/python/gyp/test/mac/depend-on-bundle/executable.c b/third_party/python/gyp/test/mac/depend-on-bundle/executable.c
new file mode 100644
index 0000000000..931bce637e
--- /dev/null
+++ b/third_party/python/gyp/test/mac/depend-on-bundle/executable.c
@@ -0,0 +1,4 @@
+int f();
+int main() {
+ return f();
+}
diff --git a/third_party/python/gyp/test/mac/depend-on-bundle/test.gyp b/third_party/python/gyp/test/mac/depend-on-bundle/test.gyp
new file mode 100644
index 0000000000..e00b105415
--- /dev/null
+++ b/third_party/python/gyp/test/mac/depend-on-bundle/test.gyp
@@ -0,0 +1,28 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'my_bundle',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'bundle.c' ],
+ 'mac_bundle_resources': [
+ 'English.lproj/InfoPlist.strings',
+ ],
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'Info.plist',
+ }
+ },
+ {
+ 'target_name': 'dependent_on_bundle',
+ 'type': 'executable',
+ 'sources': [ 'executable.c' ],
+ 'dependencies': [
+ 'my_bundle',
+ ],
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/mac/deployment-target/check-version-min.c b/third_party/python/gyp/test/mac/deployment-target/check-version-min.c
new file mode 100644
index 0000000000..761c529085
--- /dev/null
+++ b/third_party/python/gyp/test/mac/deployment-target/check-version-min.c
@@ -0,0 +1,33 @@
+/* Copyright (c) 2013 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <Availability.h>
+
+/* GYPTEST_MAC_VERSION_MIN: should be set to the corresponding value of
+ * xcode setting 'MACOSX_DEPLOYMENT_TARGET', otherwise both should be
+ * left undefined.
+ *
+ * GYPTEST_IOS_VERSION_MIN: should be set to the corresponding value of
+ * xcode setting 'IPHONEOS_DEPLOYMENT_TARGET', otherwise both should be
+ * left undefined.
+ */
+
+#if defined(GYPTEST_MAC_VERSION_MIN)
+# if GYPTEST_MAC_VERSION_MIN != __MAC_OS_X_VERSION_MIN_REQUIRED
+# error __MAC_OS_X_VERSION_MIN_REQUIRED has wrong value
+# endif
+#elif defined(__MAC_OS_X_VERSION_MIN_REQUIRED)
+# error __MAC_OS_X_VERSION_MIN_REQUIRED should be undefined
+#endif
+
+#if defined(GYPTEST_IOS_VERSION_MIN)
+# if GYPTEST_IOS_VERSION_MIN != __IPHONE_OS_VERSION_MIN_REQUIRED
+# error __IPHONE_OS_VERSION_MIN_REQUIRED has wrong value
+# endif
+#elif defined(__IPHONE_OS_VERSION_MIN_REQUIRED)
+# error __IPHONE_OS_VERSION_MIN_REQUIRED should be undefined
+#endif
+
+int main() { return 0; }
+
diff --git a/third_party/python/gyp/test/mac/deployment-target/deployment-target.gyp b/third_party/python/gyp/test/mac/deployment-target/deployment-target.gyp
new file mode 100644
index 0000000000..47e0565c0c
--- /dev/null
+++ b/third_party/python/gyp/test/mac/deployment-target/deployment-target.gyp
@@ -0,0 +1,28 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'macosx-version-min-10.5',
+ 'type': 'executable',
+ 'sources': [ 'check-version-min.c', ],
+ 'defines': [ 'GYPTEST_MAC_VERSION_MIN=1050', ],
+ 'xcode_settings': {
+ 'SDKROOT': 'macosx',
+ 'MACOSX_DEPLOYMENT_TARGET': '10.5',
+ },
+ },
+ {
+ 'target_name': 'macosx-version-min-10.6',
+ 'type': 'executable',
+ 'sources': [ 'check-version-min.c', ],
+ 'defines': [ 'GYPTEST_MAC_VERSION_MIN=1060', ],
+ 'xcode_settings': {
+ 'SDKROOT': 'macosx',
+ 'MACOSX_DEPLOYMENT_TARGET': '10.6',
+ },
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/mac/framework-dirs/calculate.c b/third_party/python/gyp/test/mac/framework-dirs/calculate.c
new file mode 100644
index 0000000000..7dc9d2d8b4
--- /dev/null
+++ b/third_party/python/gyp/test/mac/framework-dirs/calculate.c
@@ -0,0 +1,15 @@
+/* Copyright (c) 2012 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+int CalculatePerformExpression(char* expr,
+ int significantDigits,
+ int flags,
+ char* answer);
+
+int main() {
+ char buffer[1024];
+ return CalculatePerformExpression("42", 1, 0, buffer);
+}
+
diff --git a/third_party/python/gyp/test/mac/framework-dirs/framework-dirs.gyp b/third_party/python/gyp/test/mac/framework-dirs/framework-dirs.gyp
new file mode 100644
index 0000000000..bf1cbde4de
--- /dev/null
+++ b/third_party/python/gyp/test/mac/framework-dirs/framework-dirs.gyp
@@ -0,0 +1,21 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'calculate',
+ 'type': 'executable',
+ 'sources': [
+ 'calculate.c',
+ ],
+ 'libraries': [
+ '/System/Library/PrivateFrameworks/Calculate.framework',
+ ],
+ 'mac_framework_dirs': [
+ '/System/Library/PrivateFrameworks',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/framework-headers/myframework.h b/third_party/python/gyp/test/mac/framework-headers/myframework.h
new file mode 100644
index 0000000000..961fc701bc
--- /dev/null
+++ b/third_party/python/gyp/test/mac/framework-headers/myframework.h
@@ -0,0 +1,8 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Foundation/Foundation.h>
+
+@interface TestObject : NSObject
+@end
diff --git a/third_party/python/gyp/test/mac/framework-headers/myframework.m b/third_party/python/gyp/test/mac/framework-headers/myframework.m
new file mode 100644
index 0000000000..13d53a37ab
--- /dev/null
+++ b/third_party/python/gyp/test/mac/framework-headers/myframework.m
@@ -0,0 +1,8 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "myframework.h"
+
+@implementation TestObject
+@end
diff --git a/third_party/python/gyp/test/mac/framework-headers/test.gyp b/third_party/python/gyp/test/mac/framework-headers/test.gyp
new file mode 100644
index 0000000000..70ed00715c
--- /dev/null
+++ b/third_party/python/gyp/test/mac/framework-headers/test.gyp
@@ -0,0 +1,44 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'test_framework_headers_framework',
+ 'product_name': 'TestFramework',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'myframework.h',
+ 'myframework.m',
+ ],
+ 'mac_framework_headers': [
+ 'myframework.h',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ ],
+ },
+ },{
+ 'target_name': 'test_framework_headers_static',
+ 'product_name': 'TestLibrary',
+ 'type': 'static_library',
+ 'xcode_settings': {
+ 'PUBLIC_HEADERS_FOLDER_PATH': 'include',
+ },
+ 'sources': [
+ 'myframework.h',
+ 'myframework.m',
+ ],
+ 'mac_framework_headers': [
+ 'myframework.h',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ ],
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/framework/TestFramework/English.lproj/InfoPlist.strings b/third_party/python/gyp/test/mac/framework/TestFramework/English.lproj/InfoPlist.strings
new file mode 100644
index 0000000000..88f65cf6ea
--- /dev/null
+++ b/third_party/python/gyp/test/mac/framework/TestFramework/English.lproj/InfoPlist.strings
@@ -0,0 +1,2 @@
+/* Localized versions of Info.plist keys */
+
diff --git a/third_party/python/gyp/test/mac/framework/TestFramework/Info.plist b/third_party/python/gyp/test/mac/framework/TestFramework/Info.plist
new file mode 100644
index 0000000000..a791b3ee48
--- /dev/null
+++ b/third_party/python/gyp/test/mac/framework/TestFramework/Info.plist
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIconFile</key>
+ <string></string>
+ <key>CFBundleIdentifier</key>
+ <string>com.yourcompany.${PRODUCT_NAME:identifier}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>FMWK</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>NSPrincipalClass</key>
+ <string></string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/framework/TestFramework/ObjCVector.h b/third_party/python/gyp/test/mac/framework/TestFramework/ObjCVector.h
new file mode 100644
index 0000000000..c2450960cd
--- /dev/null
+++ b/third_party/python/gyp/test/mac/framework/TestFramework/ObjCVector.h
@@ -0,0 +1,28 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Cocoa/Cocoa.h>
+
+#ifdef __cplusplus
+struct ObjCVectorImp;
+#else
+typedef struct _ObjCVectorImpT ObjCVectorImp;
+#endif
+
+@interface ObjCVector : NSObject {
+ @private
+ ObjCVectorImp* imp_;
+}
+
+- (id)init;
+
+- (void)addObject:(id)obj;
+- (void)addObject:(id)obj atIndex:(NSUInteger)index;
+
+- (void)removeObject:(id)obj;
+- (void)removeObjectAtIndex:(NSUInteger)index;
+
+- (id)objectAtIndex:(NSUInteger)index;
+
+@end
diff --git a/third_party/python/gyp/test/mac/framework/TestFramework/ObjCVector.mm b/third_party/python/gyp/test/mac/framework/TestFramework/ObjCVector.mm
new file mode 100644
index 0000000000..cbf431f28d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/framework/TestFramework/ObjCVector.mm
@@ -0,0 +1,63 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "ObjCVectorInternal.h"
+#import "ObjCVector.h"
+
+#include <vector>
+
+@interface ObjCVector (Private)
+- (std::vector<id>::iterator)makeIterator:(NSUInteger)index;
+@end
+
+@implementation ObjCVector
+
+- (id)init {
+ if ((self = [super init])) {
+ imp_ = new ObjCVectorImp();
+ }
+ return self;
+}
+
+- (void)dealloc {
+ delete imp_;
+ [super dealloc];
+}
+
+- (void)addObject:(id)obj {
+ imp_->v.push_back([obj retain]);
+}
+
+- (void)addObject:(id)obj atIndex:(NSUInteger)index {
+ imp_->v.insert([self makeIterator:index], [obj retain]);
+}
+
+- (void)removeObject:(id)obj {
+ for (std::vector<id>::iterator it = imp_->v.begin();
+ it != imp_->v.end();
+ ++it) {
+ if ([*it isEqual:obj]) {
+ [*it autorelease];
+ imp_->v.erase(it);
+ return;
+ }
+ }
+}
+
+- (void)removeObjectAtIndex:(NSUInteger)index {
+ [imp_->v[index] autorelease];
+ imp_->v.erase([self makeIterator:index]);
+}
+
+- (id)objectAtIndex:(NSUInteger)index {
+ return imp_->v[index];
+}
+
+- (std::vector<id>::iterator)makeIterator:(NSUInteger)index {
+ std::vector<id>::iterator it = imp_->v.begin();
+ it += index;
+ return it;
+}
+
+@end
diff --git a/third_party/python/gyp/test/mac/framework/TestFramework/ObjCVectorInternal.h b/third_party/python/gyp/test/mac/framework/TestFramework/ObjCVectorInternal.h
new file mode 100644
index 0000000000..fb6c98258b
--- /dev/null
+++ b/third_party/python/gyp/test/mac/framework/TestFramework/ObjCVectorInternal.h
@@ -0,0 +1,9 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <vector>
+
+struct ObjCVectorImp {
+ std::vector<id> v;
+};
diff --git a/third_party/python/gyp/test/mac/framework/TestFramework/TestFramework_Prefix.pch b/third_party/python/gyp/test/mac/framework/TestFramework/TestFramework_Prefix.pch
new file mode 100644
index 0000000000..394f41d957
--- /dev/null
+++ b/third_party/python/gyp/test/mac/framework/TestFramework/TestFramework_Prefix.pch
@@ -0,0 +1,7 @@
+//
+// Prefix header for all source files of the 'TestFramework' target in the 'TestFramework' project.
+//
+
+#ifdef __OBJC__
+ #import <Cocoa/Cocoa.h>
+#endif
diff --git a/third_party/python/gyp/test/mac/framework/empty.c b/third_party/python/gyp/test/mac/framework/empty.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/mac/framework/empty.c
diff --git a/third_party/python/gyp/test/mac/framework/framework.gyp b/third_party/python/gyp/test/mac/framework/framework.gyp
new file mode 100644
index 0000000000..52b4f37be9
--- /dev/null
+++ b/third_party/python/gyp/test/mac/framework/framework.gyp
@@ -0,0 +1,108 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'dep_framework',
+ 'product_name': 'Dependency Bundle',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'empty.c', ],
+ },
+ {
+ 'target_name': 'test_framework',
+ 'product_name': 'Test Framework',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'dependencies': [ 'dep_framework', ],
+ 'sources': [
+ 'TestFramework/ObjCVector.h',
+ 'TestFramework/ObjCVectorInternal.h',
+ 'TestFramework/ObjCVector.mm',
+ ],
+ 'mac_bundle_resources': [
+ 'TestFramework/English.lproj/InfoPlist.strings',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Cocoa.framework',
+ ],
+ },
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'TestFramework/Info.plist',
+ 'GCC_DYNAMIC_NO_PIC': 'NO',
+ },
+ 'copies': [
+ # Test copying to a file that has envvars in its dest path.
+ # Needs to be in a mac_bundle target, else CONTENTS_FOLDER_PATH isn't
+ # set.
+ {
+ 'destination': '<(PRODUCT_DIR)/$(CONTENTS_FOLDER_PATH)/Libraries',
+ 'files': [
+ 'empty.c',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'copy_target',
+ 'type': 'none',
+ 'dependencies': [ 'test_framework', 'dep_framework', ],
+ 'copies': [
+ # Test copying directories with spaces in src and dest paths.
+ {
+ 'destination': '<(PRODUCT_DIR)/Test Framework.framework/foo',
+ 'files': [
+ '<(PRODUCT_DIR)/Dependency Bundle.framework',
+ ],
+ },
+ ],
+ 'actions': [
+ {
+ 'action_name': 'aektschn',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/touched_file'],
+ 'action': ['touch', '${BUILT_PRODUCTS_DIR}/action_file'],
+ },
+ ],
+ },
+ {
+ 'target_name': 'copy_embedded',
+ 'type': 'none',
+ 'dependencies': [ 'test_framework' ],
+ 'copies': [
+ # Test copying framework to FRAMEWORK directory.
+ {
+ 'destination': '$(BUILT_FRAMEWORKS_DIR)/Embedded',
+ 'files': [
+ '<(PRODUCT_DIR)/Test Framework.framework',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'copy_target_code_sign',
+ 'type': 'none',
+ 'dependencies': [ 'test_framework', 'dep_framework', ],
+ 'copies': [
+ # Test copying directories with spaces in src and dest paths.
+ {
+ 'destination': '<(PRODUCT_DIR)/Test Framework.framework/foo',
+ 'files': [
+ '<(PRODUCT_DIR)/Dependency Bundle.framework',
+ ],
+ 'xcode_code_sign': 1,
+ },
+ ],
+ 'actions': [
+ {
+ 'action_name': 'aektschn',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/touched_file'],
+ 'action': ['touch', '${BUILT_PRODUCTS_DIR}/action_file'],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/global-settings/src/dir1/dir1.gyp b/third_party/python/gyp/test/mac/global-settings/src/dir1/dir1.gyp
new file mode 100644
index 0000000000..153e34ddd6
--- /dev/null
+++ b/third_party/python/gyp/test/mac/global-settings/src/dir1/dir1.gyp
@@ -0,0 +1,11 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'dir1_target',
+ 'type': 'none',
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/global-settings/src/dir2/dir2.gyp b/third_party/python/gyp/test/mac/global-settings/src/dir2/dir2.gyp
new file mode 100644
index 0000000000..cda46c839b
--- /dev/null
+++ b/third_party/python/gyp/test/mac/global-settings/src/dir2/dir2.gyp
@@ -0,0 +1,22 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'dir2_target',
+ 'type': 'none',
+ 'dependencies': [
+ '../dir1/dir1.gyp:dir1_target',
+ ],
+ 'actions': [
+ {
+ 'inputs': [ ],
+ 'outputs': [ '<(PRODUCT_DIR)/file.txt' ],
+ 'action_name': 'Test action',
+ 'action': ['cp', 'file.txt', '${BUILT_PRODUCTS_DIR}/file.txt' ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/global-settings/src/dir2/file.txt b/third_party/python/gyp/test/mac/global-settings/src/dir2/file.txt
new file mode 100644
index 0000000000..58da2d8e9a
--- /dev/null
+++ b/third_party/python/gyp/test/mac/global-settings/src/dir2/file.txt
@@ -0,0 +1 @@
+File.
diff --git a/third_party/python/gyp/test/mac/gyptest-action-envvars.py b/third_party/python/gyp/test/mac/gyptest-action-envvars.py
new file mode 100644
index 0000000000..c84eeaa465
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-action-envvars.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that env vars work with actions, with relative directory paths.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ # The xcode-ninja generator handles gypfiles which are not at the
+ # project root incorrectly.
+ # cf. https://code.google.com/p/gyp/issues/detail?id=460
+ if test.format == 'xcode-ninja':
+ test.skip_test()
+
+ CHDIR = 'action-envvars'
+ test.run_gyp('action/action.gyp', chdir=CHDIR)
+ test.build('action/action.gyp', 'action', chdir=CHDIR, SYMROOT='../build')
+
+ result_file = test.built_file_path('result', chdir=CHDIR)
+ test.must_exist(result_file)
+ test.must_contain(result_file, 'Test output')
+
+ other_result_file = test.built_file_path('other_result', chdir=CHDIR)
+ test.must_exist(other_result_file)
+ test.must_contain(other_result_file, 'Other output')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-app-assets-catalog.py b/third_party/python/gyp/test/mac/gyptest-app-assets-catalog.py
new file mode 100755
index 0000000000..7b1c0f67de
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-app-assets-catalog.py
@@ -0,0 +1,125 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that app bundles are built correctly.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+import TestMac
+
+import os
+import plistlib
+import subprocess
+import sys
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+def ExpectEq(expected, actual):
+ if expected != actual:
+ print('Expected "%s", got "%s"' % (expected, actual), file=sys.stderr)
+ test.fail_test()
+
+def ls(path):
+ '''Returns a list of all files in a directory, relative to the directory.'''
+ result = []
+ for dirpath, _, files in os.walk(path):
+ for f in files:
+ result.append(os.path.join(dirpath, f)[len(path) + 1:])
+ return result
+
+# Xcode supports for assets catalog was introduced in Xcode 6.0
+if sys.platform == 'darwin' and TestMac.Xcode.Version() >= '0600':
+ test_gyp_path = 'test-assets-catalog.gyp'
+ test_app_path = 'Test App Assets Catalog Gyp.app'
+
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+ test.run_gyp(test_gyp_path, chdir='app-bundle')
+ test.build(test_gyp_path, test.ALL, chdir='app-bundle')
+
+ # Binary
+ test.built_file_must_exist(
+ os.path.join(test_app_path, 'Contents/MacOS/Test App Assets Catalog Gyp'),
+ chdir='app-bundle')
+
+ # Info.plist
+ info_plist = test.built_file_path(
+ os.path.join(test_app_path, 'Contents/Info.plist'),
+ chdir='app-bundle')
+ test.must_exist(info_plist)
+ test.must_contain(
+ info_plist,
+ 'com.google.Test-App-Assets-Catalog-Gyp') # Variable expansion
+ test.must_not_contain(info_plist, '${MACOSX_DEPLOYMENT_TARGET}');
+
+ if test.format != 'make':
+ # TODO: Synthesized plist entries aren't hooked up in the make generator.
+ machine = subprocess.check_output(['sw_vers', '-buildVersion']).rstrip('\n')
+ plist = plistlib.readPlist(info_plist)
+ ExpectEq(machine, plist['BuildMachineOSBuild'])
+
+ expected = ''
+ version = TestMac.Xcode.SDKVersion()
+ expected = 'macosx' + version
+ ExpectEq(expected, plist['DTSDKName'])
+ sdkbuild = TestMac.Xcode.SDKBuild()
+ if not sdkbuild:
+ # Above command doesn't work in Xcode 4.2.
+ sdkbuild = plist['BuildMachineOSBuild']
+ ExpectEq(sdkbuild, plist['DTSDKBuild'])
+ ExpectEq(TestMac.Xcode.Version(), plist['DTXcode'])
+ ExpectEq(TestMac.Xcode.Build(), plist['DTXcodeBuild'])
+
+ # Resources
+ strings_files = ['InfoPlist.strings', 'utf-16be.strings', 'utf-16le.strings']
+ for f in strings_files:
+ strings = test.built_file_path(
+ os.path.join(test_app_path, 'Contents/Resources/English.lproj', f),
+ chdir='app-bundle')
+ test.must_exist(strings)
+ # Xcodes writes UTF-16LE with BOM.
+ contents = open(strings, 'rb').read()
+ if not contents.startswith('\xff\xfe' + '/* Localized'.encode('utf-16le')):
+ test.fail_test()
+
+ test.built_file_must_exist(
+ os.path.join(
+ test_app_path, 'Contents/Resources/English.lproj/MainMenu.nib'),
+ chdir='app-bundle')
+
+ # make does not supports .xcassets files
+ extra_content_files = []
+ if test.format != 'make':
+ extra_content_files = ['Contents/Resources/Assets.car']
+ for f in extra_content_files:
+ test.built_file_must_exist(
+ os.path.join(test_app_path, f),
+ chdir='app-bundle')
+
+ # Packaging
+ test.built_file_must_exist(
+ os.path.join(test_app_path, 'Contents/PkgInfo'),
+ chdir='app-bundle')
+ test.built_file_must_match(
+ os.path.join(test_app_path, 'Contents/PkgInfo'), 'APPLause',
+ chdir='app-bundle')
+
+ # Check that no other files get added to the bundle.
+ if set(ls(test.built_file_path(test_app_path, chdir='app-bundle'))) != \
+ set(['Contents/MacOS/Test App Assets Catalog Gyp',
+ 'Contents/Info.plist',
+ 'Contents/Resources/English.lproj/MainMenu.nib',
+ 'Contents/PkgInfo',
+ ] + extra_content_files +
+ [os.path.join('Contents/Resources/English.lproj', f)
+ for f in strings_files]):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-app-error.py b/third_party/python/gyp/test/mac/gyptest-app-error.py
new file mode 100755
index 0000000000..df0781d455
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-app-error.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that invalid strings files cause the build to fail.
+"""
+
+from __future__ import print_function
+
+import TestCmd
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ expected_error = 'Old-style plist parser: missing semicolon in dictionary'
+ saw_expected_error = [False] # Python2 has no "nonlocal" keyword.
+ def match(a, b):
+ if a == b:
+ return True
+ if not TestCmd.is_List(a):
+ a = a.split('\n')
+ if not TestCmd.is_List(b):
+ b = b.split('\n')
+ if expected_error in '\n'.join(a) + '\n'.join(b):
+ saw_expected_error[0] = True
+ return True
+ return False
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'], match=match)
+
+ test.run_gyp('test-error.gyp', chdir='app-bundle')
+
+ test.build('test-error.gyp', test.ALL, chdir='app-bundle')
+
+ # Ninja pipes stderr of subprocesses to stdout.
+ if test.format in ['ninja', 'xcode-ninja'] \
+ and expected_error in test.stdout():
+ saw_expected_error[0] = True
+
+ if saw_expected_error[0]:
+ test.pass_test()
+ else:
+ test.fail_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-app.py b/third_party/python/gyp/test/mac/gyptest-app.py
new file mode 100755
index 0000000000..16c9640373
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-app.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that app bundles are built correctly.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+import TestMac
+
+import os
+import plistlib
+import subprocess
+import sys
+
+
+if sys.platform in ('darwin', 'win32'):
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+
+def CheckFileXMLPropertyList(file):
+ output = subprocess.check_output(['file', file])
+ # The double space after XML is intentional.
+ if not 'XML document text' in output:
+ print('File: Expected XML document text, got %s' % output)
+ test.fail_test()
+
+def ExpectEq(expected, actual):
+ if expected != actual:
+ print('Expected "%s", got "%s"' % (expected, actual), file=sys.stderr)
+ test.fail_test()
+
+def ls(path):
+ '''Returns a list of all files in a directory, relative to the directory.'''
+ result = []
+ for dirpath, _, files in os.walk(path):
+ for f in files:
+ result.append(os.path.join(dirpath, f)[len(path) + 1:])
+ return result
+
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ test.run_gyp('test.gyp', chdir='app-bundle')
+
+ test.build('test.gyp', test.ALL, chdir='app-bundle')
+
+ # Binary
+ test.built_file_must_exist('Test App Gyp.app/Contents/MacOS/Test App Gyp',
+ chdir='app-bundle')
+
+ # Info.plist
+ info_plist = test.built_file_path('Test App Gyp.app/Contents/Info.plist',
+ chdir='app-bundle')
+ test.must_exist(info_plist)
+ test.must_contain(info_plist, 'com.google.Test-App-Gyp') # Variable expansion
+ test.must_not_contain(info_plist, '${MACOSX_DEPLOYMENT_TARGET}');
+ CheckFileXMLPropertyList(info_plist)
+
+ if test.format != 'make':
+ # TODO: Synthesized plist entries aren't hooked up in the make generator.
+ machine = subprocess.check_output(['sw_vers', '-buildVersion']).rstrip('\n')
+ plist = plistlib.readPlist(info_plist)
+ ExpectEq(machine, plist['BuildMachineOSBuild'])
+
+ # Prior to Xcode 5.0.0, SDKROOT (and thus DTSDKName) was only defined if
+ # set in the Xcode project file. Starting with that version, it is always
+ # defined.
+ expected = ''
+ if TestMac.Xcode.Version() >= '0500':
+ version = TestMac.Xcode.SDKVersion()
+ expected = 'macosx' + version
+ ExpectEq(expected, plist['DTSDKName'])
+ sdkbuild = TestMac.Xcode.SDKBuild()
+ if not sdkbuild:
+ # Above command doesn't work in Xcode 4.2.
+ sdkbuild = plist['BuildMachineOSBuild']
+ ExpectEq(sdkbuild, plist['DTSDKBuild'])
+ ExpectEq(TestMac.Xcode.Version(), plist['DTXcode'])
+ ExpectEq(TestMac.Xcode.Build(), plist['DTXcodeBuild'])
+
+ # Resources
+ strings_files = ['InfoPlist.strings', 'utf-16be.strings', 'utf-16le.strings']
+ for f in strings_files:
+ strings = test.built_file_path(
+ os.path.join('Test App Gyp.app/Contents/Resources/English.lproj', f),
+ chdir='app-bundle')
+ test.must_exist(strings)
+ # Xcodes writes UTF-16LE with BOM.
+ contents = open(strings, 'rb').read()
+ if not contents.startswith('\xff\xfe' + '/* Localized'.encode('utf-16le')):
+ test.fail_test()
+
+ test.built_file_must_exist(
+ 'Test App Gyp.app/Contents/Resources/English.lproj/MainMenu.nib',
+ chdir='app-bundle')
+
+ # Packaging
+ test.built_file_must_exist('Test App Gyp.app/Contents/PkgInfo',
+ chdir='app-bundle')
+ test.built_file_must_match('Test App Gyp.app/Contents/PkgInfo', 'APPLause',
+ chdir='app-bundle')
+
+ # Check that no other files get added to the bundle.
+ if set(ls(test.built_file_path('Test App Gyp.app', chdir='app-bundle'))) != \
+ set(['Contents/MacOS/Test App Gyp',
+ 'Contents/Info.plist',
+ 'Contents/Resources/English.lproj/MainMenu.nib',
+ 'Contents/PkgInfo',
+ ] +
+ [os.path.join('Contents/Resources/English.lproj', f)
+ for f in strings_files]):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-archs.py b/third_party/python/gyp/test/mac/gyptest-archs.py
new file mode 100644
index 0000000000..c56f20c4d6
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-archs.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Tests things related to ARCHS.
+"""
+
+import TestGyp
+import TestMac
+
+import re
+import subprocess
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ test.run_gyp('test-no-archs.gyp', chdir='archs')
+ test.build('test-no-archs.gyp', test.ALL, chdir='archs')
+ result_file = test.built_file_path('Test', chdir='archs')
+ test.must_exist(result_file)
+
+ if TestMac.Xcode.Version() >= '0500':
+ expected_type = ['x86_64']
+ else:
+ expected_type = ['i386']
+ TestMac.CheckFileType(test, result_file, expected_type)
+
+ test.run_gyp('test-valid-archs.gyp', chdir='archs')
+ test.build('test-valid-archs.gyp', test.ALL, chdir='archs')
+ result_file = test.built_file_path('Test', chdir='archs')
+ test.must_exist(result_file)
+ TestMac.CheckFileType(test, result_file, ['x86_64'])
+
+ test.run_gyp('test-archs-x86_64.gyp', chdir='archs')
+ test.build('test-archs-x86_64.gyp', test.ALL, chdir='archs')
+ result_file = test.built_file_path('Test64', chdir='archs')
+ test.must_exist(result_file)
+ TestMac.CheckFileType(test, result_file, ['x86_64'])
+
+ test.run_gyp('test-dependencies.gyp', chdir='archs')
+ test.build('test-dependencies.gyp', target=test.ALL, chdir='archs')
+ products = ['c_standalone', 'd_standalone']
+ for product in products:
+ result_file = test.built_file_path(
+ product, chdir='archs', type=test.STATIC_LIB)
+ test.must_exist(result_file)
+
+ if test.format != 'make':
+ # Build all targets except 'exe_32_64_no_sources' that does build
+ # but should not cause error when generating ninja files
+ targets = [
+ 'static_32_64', 'shared_32_64', 'shared_32_64_bundle',
+ 'module_32_64', 'module_32_64_bundle',
+ 'exe_32_64', 'exe_32_64_bundle', 'precompiled_prefix_header_mm_32_64',
+ ]
+
+ test.run_gyp('test-archs-multiarch.gyp', chdir='archs')
+
+ for target in targets:
+ test.build('test-archs-multiarch.gyp', target=target, chdir='archs')
+
+ result_file = test.built_file_path(
+ 'static_32_64', chdir='archs', type=test.STATIC_LIB)
+ test.must_exist(result_file)
+ TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
+
+ result_file = test.built_file_path(
+ 'shared_32_64', chdir='archs', type=test.SHARED_LIB)
+ test.must_exist(result_file)
+ TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
+
+ result_file = test.built_file_path('My Framework.framework/My Framework',
+ chdir='archs')
+ test.must_exist(result_file)
+ TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
+ # Check that symbol "_x" made it into both versions of the binary:
+ if not all(['D _x' in subprocess.check_output(
+ ['nm', '-arch', arch, result_file]).decode('utf-8')
+ for arch in ['i386', 'x86_64']]):
+ # This can only flakily fail, due to process ordering issues. If this
+ # does fail flakily, then something's broken, it's not the test at fault.
+ test.fail_test()
+
+ result_file = test.built_file_path(
+ 'exe_32_64', chdir='archs', type=test.EXECUTABLE)
+ test.must_exist(result_file)
+ TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
+
+ result_file = test.built_file_path('Test App.app/Contents/MacOS/Test App',
+ chdir='archs')
+ test.must_exist(result_file)
+ TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
diff --git a/third_party/python/gyp/test/mac/gyptest-bundle-resources.py b/third_party/python/gyp/test/mac/gyptest-bundle-resources.py
new file mode 100644
index 0000000000..275cdbaa54
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-bundle-resources.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies things related to bundle resources.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import os
+import stat
+import sys
+
+if sys.platform in ('darwin'):
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+
+def check_attribs(path, expected_exec_bit):
+ out_path = test.built_file_path(
+ os.path.join('resource.app/Contents/Resources', path), chdir=CHDIR)
+
+ in_stat = os.stat(os.path.join(CHDIR, path))
+ out_stat = os.stat(out_path)
+ if in_stat.st_mtime == out_stat.st_mtime:
+ test.fail_test()
+ if out_stat.st_mode & stat.S_IXUSR != expected_exec_bit:
+ test.fail_test()
+
+
+if sys.platform == 'darwin':
+ # set |match| to ignore build stderr output.
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ CHDIR = 'bundle-resources'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+
+ test.build('test.gyp', test.ALL, chdir=CHDIR)
+
+ test.built_file_must_match('resource.app/Contents/Resources/secret.txt',
+ 'abc\n', chdir=CHDIR)
+ test.built_file_must_match('source_rule.app/Contents/Resources/secret.txt',
+ 'ABC\n', chdir=CHDIR)
+
+ test.built_file_must_match(
+ 'resource.app/Contents/Resources/executable-file.sh',
+ '#!/bin/bash\n'
+ '\n'
+ 'echo echo echo echo cho ho o o\n', chdir=CHDIR)
+
+ check_attribs('executable-file.sh', expected_exec_bit=stat.S_IXUSR)
+ check_attribs('secret.txt', expected_exec_bit=0)
+
+ # TODO(thakis): This currently fails with make.
+ if test.format != 'make':
+ test.built_file_must_match(
+ 'resource_rule.app/Contents/Resources/secret.txt', 'ABC\n', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-cflags.py b/third_party/python/gyp/test/mac/gyptest-cflags.py
new file mode 100644
index 0000000000..17afd15665
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-cflags.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that compile-time flags work.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+ CHDIR = 'cflags'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+
+ test.build('test.gyp', test.ALL, chdir=CHDIR)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-clang-cxx-language-standard.py b/third_party/python/gyp/test/mac/gyptest-clang-cxx-language-standard.py
new file mode 100644
index 0000000000..75c6c74c97
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-clang-cxx-language-standard.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that CLANG_CXX_LANGUAGE_STANDARD works.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['make', 'ninja', 'xcode'])
+
+ test.run_gyp('clang-cxx-language-standard.gyp',
+ chdir='clang-cxx-language-standard')
+
+ test.build('clang-cxx-language-standard.gyp', test.ALL,
+ chdir='clang-cxx-language-standard')
+
+ test.pass_test()
+
diff --git a/third_party/python/gyp/test/mac/gyptest-clang-cxx-library.py b/third_party/python/gyp/test/mac/gyptest-clang-cxx-library.py
new file mode 100644
index 0000000000..177d6376ff
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-clang-cxx-library.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that CLANG_CXX_LIBRARY works.
+"""
+
+import TestGyp
+import TestMac
+
+import sys
+
+if sys.platform == 'darwin':
+ # Xcode 4.2 on OS X 10.6 doesn't install the libc++ headers, don't run this
+ # test there.
+ if TestMac.Xcode.Version() <= '0420':
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['make', 'ninja', 'xcode'])
+
+ if test.format == 'make':
+ # This is failing because of a deprecation warning for libstdc++.
+ test.skip_test() # bug=533
+
+ test.run_gyp('clang-cxx-library.gyp', chdir='clang-cxx-library')
+ test.build('clang-cxx-library.gyp', test.ALL, chdir='clang-cxx-library')
+
+ test.pass_test()
+
diff --git a/third_party/python/gyp/test/mac/gyptest-copies-with-xcode-envvars.py b/third_party/python/gyp/test/mac/gyptest-copies-with-xcode-envvars.py
new file mode 100644
index 0000000000..80b0ecfa28
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-copies-with-xcode-envvars.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2016 Mark Callow. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that files are copied to the correct destinations when those
+destinations are specified using environment variables available in
+Xcode's PBXCopyFilesBuildPhase.
+"""
+
+import TestGyp
+
+import os
+import stat
+import sys
+
+
+test = TestGyp.TestGyp(formats=['make', 'ninja', 'xcode'])
+
+if sys.platform == 'darwin':
+ test.run_gyp('copies-with-xcode-envvars.gyp',
+ chdir='copies-with-xcode-envvars')
+
+ test.build('copies-with-xcode-envvars.gyp', chdir='copies-with-xcode-envvars')
+
+ wrapper_name = 'copies-with-xcode-envvars.app/'
+ contents_path = wrapper_name + 'Contents/'
+ out_path = test.built_file_path('file0', chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file0 contents\n')
+ out_path = test.built_file_path(wrapper_name + 'file1',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file1 contents\n')
+ out_path = test.built_file_path(contents_path + 'MacOS/file2',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file2 contents\n')
+ out_path = test.built_file_path(contents_path + 'Resources/file3',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file3 contents\n')
+ out_path = test.built_file_path(contents_path + 'Resources/testimages/file4',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file4 contents\n')
+ out_path = test.built_file_path(contents_path + 'Resources/Java/file5',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file5 contents\n')
+ out_path = test.built_file_path(contents_path + 'Frameworks/file6',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file6 contents\n')
+ out_path = test.built_file_path(contents_path + 'Frameworks/file7',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file7 contents\n')
+ out_path = test.built_file_path(contents_path + 'SharedFrameworks/file8',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file8 contents\n')
+ out_path = test.built_file_path(contents_path + 'SharedSupport/file9',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file9 contents\n')
+ out_path = test.built_file_path(contents_path + 'PlugIns/file10',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file10 contents\n')
+ out_path = test.built_file_path(contents_path + 'XPCServices/file11',
+ chdir='copies-with-xcode-envvars')
+ test.must_contain(out_path, 'file11 contents\n')
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-copies.py b/third_party/python/gyp/test/mac/gyptest-copies.py
new file mode 100755
index 0000000000..838c62dc37
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-copies.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that 'copies' with app bundles are handled correctly.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import os
+import sys
+import time
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ test.run_gyp('framework.gyp', chdir='framework')
+
+ test.build('framework.gyp', 'copy_target', chdir='framework')
+
+ # Check that the copy succeeded.
+ test.built_file_must_exist(
+ 'Test Framework.framework/foo/Dependency Bundle.framework',
+ chdir='framework')
+ test.built_file_must_exist(
+ 'Test Framework.framework/foo/Dependency Bundle.framework/Versions/A',
+ chdir='framework')
+ test.built_file_must_exist(
+ 'Test Framework.framework/Versions/A/Libraries/empty.c',
+ chdir='framework')
+
+ # Verify BUILT_FRAMEWORKS_DIR is set and working.
+ test.build('framework.gyp', 'copy_embedded', chdir='framework')
+
+ test.built_file_must_exist(
+ 'Embedded/Test Framework.framework', chdir='framework')
+
+ # Check that rebuilding the target a few times works.
+ dep_bundle = test.built_file_path('Dependency Bundle.framework',
+ chdir='framework')
+ mtime = os.path.getmtime(dep_bundle)
+ atime = os.path.getatime(dep_bundle)
+ for i in range(3):
+ os.utime(dep_bundle, (atime + i * 1000, mtime + i * 1000))
+ test.build('framework.gyp', 'copy_target', chdir='framework')
+
+
+ # Check that actions ran.
+ test.built_file_must_exist('action_file', chdir='framework')
+
+ # Test that a copy with the "Code Sign on Copy" flag on succeeds.
+ test.build('framework.gyp', 'copy_target_code_sign', chdir='framework')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-copy-dylib.py b/third_party/python/gyp/test/mac/gyptest-copy-dylib.py
new file mode 100644
index 0000000000..253623d1c6
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-copy-dylib.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that dylibs can be copied into app bundles.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ test.run_gyp('test.gyp', chdir='copy-dylib')
+
+ test.build('test.gyp', 'test_app', chdir='copy-dylib')
+
+ test.built_file_must_exist(
+ 'Test App.app/Contents/Resources/libmy_dylib.dylib', chdir='copy-dylib')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-debuginfo.py b/third_party/python/gyp/test/mac/gyptest-debuginfo.py
new file mode 100755
index 0000000000..a0e9438e2a
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-debuginfo.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Tests things related to debug information generation.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ test.run_gyp('test.gyp', chdir='debuginfo')
+
+ test.build('test.gyp', test.ALL, chdir='debuginfo')
+
+ test.built_file_must_exist('libnonbundle_shared_library.dylib.dSYM',
+ chdir='debuginfo')
+ test.built_file_must_exist('nonbundle_loadable_module.so.dSYM',
+ chdir='debuginfo')
+ test.built_file_must_exist('nonbundle_executable.dSYM',
+ chdir='debuginfo')
+
+ test.built_file_must_exist('bundle_shared_library.framework.dSYM',
+ chdir='debuginfo')
+ test.built_file_must_exist('bundle_loadable_module.bundle.dSYM',
+ chdir='debuginfo')
+ test.built_file_must_exist('My App.app.dSYM',
+ chdir='debuginfo')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-depend-on-bundle.py b/third_party/python/gyp/test/mac/gyptest-depend-on-bundle.py
new file mode 100644
index 0000000000..b8b06d4cc0
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-depend-on-bundle.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a dependency on a bundle causes the whole bundle to be built.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ test.run_gyp('test.gyp', chdir='depend-on-bundle')
+
+ test.build('test.gyp', 'dependent_on_bundle', chdir='depend-on-bundle')
+
+ # Binary itself.
+ test.built_file_must_exist('dependent_on_bundle', chdir='depend-on-bundle')
+
+ # Bundle dependency.
+ test.built_file_must_exist(
+ 'my_bundle.framework/Versions/A/my_bundle',
+ chdir='depend-on-bundle')
+ test.built_file_must_exist( # package_framework
+ 'my_bundle.framework/my_bundle',
+ chdir='depend-on-bundle')
+ test.built_file_must_exist( # plist
+ 'my_bundle.framework/Versions/A/Resources/Info.plist',
+ chdir='depend-on-bundle')
+ test.built_file_must_exist(
+ 'my_bundle.framework/Versions/A/Resources/English.lproj/' # Resources
+ 'InfoPlist.strings',
+ chdir='depend-on-bundle')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-deployment-target.py b/third_party/python/gyp/test/mac/gyptest-deployment-target.py
new file mode 100644
index 0000000000..c7eabde6fe
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-deployment-target.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that MACOSX_DEPLOYMENT_TARGET works.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['make', 'ninja', 'xcode'])
+
+ if test.format == 'make':
+ # This is failing because of a deprecation warning for libstdc++.
+ test.skip_test() # bug=533
+
+ test.run_gyp('deployment-target.gyp', chdir='deployment-target')
+
+ test.build('deployment-target.gyp', test.ALL, chdir='deployment-target')
+
+ test.pass_test()
+
diff --git a/third_party/python/gyp/test/mac/gyptest-framework-dirs.py b/third_party/python/gyp/test/mac/gyptest-framework-dirs.py
new file mode 100644
index 0000000000..a1ae54c57f
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-framework-dirs.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that it is possible to build an object that depends on a
+PrivateFramework.
+"""
+
+import os
+import sys
+import TestGyp
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ CHDIR = 'framework-dirs'
+ test.run_gyp('framework-dirs.gyp', chdir=CHDIR)
+ test.build('framework-dirs.gyp', 'calculate', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-framework-headers.py b/third_party/python/gyp/test/mac/gyptest-framework-headers.py
new file mode 100644
index 0000000000..aa13a742cd
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-framework-headers.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that mac_framework_headers works properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ # TODO(thakis): Make this work with ninja, make. http://crbug.com/129013
+ test = TestGyp.TestGyp(formats=['xcode'])
+
+ CHDIR = 'framework-headers'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+
+ # Test that headers are installed for frameworks
+ test.build('test.gyp', 'test_framework_headers_framework', chdir=CHDIR)
+
+ test.built_file_must_exist(
+ 'TestFramework.framework/Versions/A/TestFramework', chdir=CHDIR)
+
+ test.built_file_must_exist(
+ 'TestFramework.framework/Versions/A/Headers/myframework.h', chdir=CHDIR)
+
+ # Test that headers are installed for static libraries.
+ test.build('test.gyp', 'test_framework_headers_static', chdir=CHDIR)
+
+ test.built_file_must_exist('libTestLibrary.a', chdir=CHDIR)
+
+ test.built_file_must_exist('include/myframework.h', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-framework.py b/third_party/python/gyp/test/mac/gyptest-framework.py
new file mode 100755
index 0000000000..faf05cf313
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-framework.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that app bundles are built correctly.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import os
+import sys
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+
+def ls(path):
+ '''Returns a list of all files in a directory, relative to the directory.'''
+ result = []
+ for dirpath, _, files in os.walk(path):
+ for f in files:
+ result.append(os.path.join(dirpath, f)[len(path) + 1:])
+ return result
+
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ test.run_gyp('framework.gyp', chdir='framework')
+
+ test.build('framework.gyp', 'test_framework', chdir='framework')
+
+ # Binary
+ test.built_file_must_exist(
+ 'Test Framework.framework/Versions/A/Test Framework',
+ chdir='framework')
+
+ # Info.plist
+ info_plist = test.built_file_path(
+ 'Test Framework.framework/Versions/A/Resources/Info.plist',
+ chdir='framework')
+ test.must_exist(info_plist)
+ test.must_contain(info_plist, 'com.yourcompany.Test_Framework')
+
+ # Resources
+ test.built_file_must_exist(
+ 'Test Framework.framework/Versions/A/Resources/English.lproj/'
+ 'InfoPlist.strings',
+ chdir='framework')
+
+ # Symlinks created by packaging process
+ test.built_file_must_exist('Test Framework.framework/Versions/Current',
+ chdir='framework')
+ test.built_file_must_exist('Test Framework.framework/Resources',
+ chdir='framework')
+ test.built_file_must_exist('Test Framework.framework/Test Framework',
+ chdir='framework')
+ # PkgInfo.
+ test.built_file_must_not_exist(
+ 'Test Framework.framework/Versions/A/Resources/PkgInfo',
+ chdir='framework')
+
+ # Check that no other files get added to the bundle.
+ if set(ls(test.built_file_path('Test Framework.framework',
+ chdir='framework'))) != \
+ set(['Versions/A/Test Framework',
+ 'Versions/A/Resources/Info.plist',
+ 'Versions/A/Resources/English.lproj/InfoPlist.strings',
+ 'Test Framework',
+ 'Versions/A/Libraries/empty.c', # Written by a gyp action.
+ ]):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-global-settings.py b/third_party/python/gyp/test/mac/gyptest-global-settings.py
new file mode 100644
index 0000000000..f4ed16630e
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-global-settings.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that the global xcode_settings processing doesn't throw.
+Regression test for http://crbug.com/109163
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ # The xcode-ninja generator handles gypfiles which are not at the
+ # project root incorrectly.
+ # cf. https://code.google.com/p/gyp/issues/detail?id=460
+ if test.format == 'xcode-ninja':
+ test.skip_test()
+
+ test.run_gyp('src/dir2/dir2.gyp', chdir='global-settings', depth='src')
+ # run_gyp shouldn't throw.
+
+ # Check that BUILT_PRODUCTS_DIR was set correctly, too.
+ test.build('dir2/dir2.gyp', 'dir2_target', chdir='global-settings/src',
+ SYMROOT='../build')
+ test.built_file_must_exist('file.txt', chdir='global-settings/src')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-identical-name.py b/third_party/python/gyp/test/mac/gyptest-identical-name.py
new file mode 100644
index 0000000000..0d358df921
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-identical-name.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies libraries (in identical-names) are properly handeled by xcode.
+
+The names for all libraries participating in this build are:
+libtestlib.a - identical-name/testlib
+libtestlib.a - identical-name/proxy/testlib
+libproxy.a - identical-name/proxy
+The first two libs produce a hash collision in Xcode when Gyp is executed,
+because they have the same name and would be copied to the same directory with
+Xcode default settings.
+For this scenario to work one needs to change the Xcode variables SYMROOT and
+CONFIGURATION_BUILD_DIR. Setting these to per-lib-unique directories, avoids
+copying the libs into the same directory.
+
+The test consists of two steps. The first one verifies that by setting both
+vars, there is no hash collision anymore during Gyp execution and that the libs
+can actually be be built. The second one verifies that there is still a hash
+collision if the vars are not set and thus the current behavior is preserved.
+"""
+
+import TestGyp
+
+import sys
+
+def IgnoreOutput(string, expected_string):
+ return True
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['xcode'])
+
+
+ test.run_gyp('test.gyp', chdir='identical-name')
+ test.build('test.gyp', test.ALL, chdir='identical-name')
+
+ test.run_gyp('test-should-fail.gyp', chdir='identical-name')
+ test.built_file_must_not_exist('test-should-fail.xcodeproj')
+
+ test.pass_test()
+
diff --git a/third_party/python/gyp/test/mac/gyptest-infoplist-process.py b/third_party/python/gyp/test/mac/gyptest-infoplist-process.py
new file mode 100755
index 0000000000..24260e1c34
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-infoplist-process.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies the Info.plist preprocessor functionality.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ CHDIR = 'infoplist-process'
+ INFO_PLIST_PATH = 'Test.app/Contents/Info.plist'
+
+ # First process both keys.
+ test.set_configuration('One')
+ test.run_gyp('test1.gyp', chdir=CHDIR)
+ test.build('test1.gyp', test.ALL, chdir=CHDIR)
+ info_plist = test.built_file_path(INFO_PLIST_PATH, chdir=CHDIR)
+ test.must_exist(info_plist)
+ test.must_contain(info_plist, 'Foo')
+ test.must_contain(info_plist, 'Bar')
+
+ # Then process a single key.
+ test.set_configuration('Two')
+ test.run_gyp('test2.gyp', chdir=CHDIR)
+ test.build('test2.gyp', chdir=CHDIR)
+ info_plist = test.built_file_path(INFO_PLIST_PATH, chdir=CHDIR)
+ test.must_exist(info_plist)
+ test.must_contain(info_plist, 'com.google.Test') # Normal expansion works.
+ test.must_contain(info_plist, 'Foo (Bar)')
+ test.must_contain(info_plist, 'PROCESSED_KEY2')
+
+ # Then turn off the processor.
+ test.set_configuration('Three')
+ test.run_gyp('test3.gyp', chdir=CHDIR)
+ test.build('test3.gyp', chdir=CHDIR)
+ info_plist = test.built_file_path('Test App.app/Contents/Info.plist',
+ chdir=CHDIR)
+ test.must_exist(info_plist)
+ test.must_contain(info_plist, 'com.google.Test') # Normal expansion works.
+ test.must_contain(info_plist, 'PROCESSED_KEY1')
+ test.must_contain(info_plist, 'PROCESSED_KEY2')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-installname.py b/third_party/python/gyp/test/mac/gyptest-installname.py
new file mode 100644
index 0000000000..17831aeaf4
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-installname.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that LD_DYLIB_INSTALL_NAME and DYLIB_INSTALL_NAME_BASE are handled
+correctly.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import re
+import subprocess
+import sys
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ CHDIR = 'installname'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+
+ test.build('test.gyp', test.ALL, chdir=CHDIR)
+
+ def GetInstallname(p):
+ p = test.built_file_path(p, chdir=CHDIR)
+ r = re.compile(r'cmd LC_ID_DYLIB.*?name (.*?) \(offset \d+\)', re.DOTALL)
+ proc = subprocess.Popen(['otool', '-l', p], stdout=subprocess.PIPE)
+ o = proc.communicate()[0]
+ assert not proc.returncode
+ m = r.search(o)
+ assert m
+ return m.group(1)
+
+ if (GetInstallname('libdefault_installname.dylib') !=
+ '/usr/local/lib/libdefault_installname.dylib'):
+ test.fail_test()
+
+ if (GetInstallname('My Framework.framework/My Framework') !=
+ '/Library/Frameworks/My Framework.framework/'
+ 'Versions/A/My Framework'):
+ test.fail_test()
+
+ if (GetInstallname('libexplicit_installname.dylib') !=
+ 'Trapped in a dynamiclib factory'):
+ test.fail_test()
+
+ if (GetInstallname('libexplicit_installname_base.dylib') !=
+ '@executable_path/../../../libexplicit_installname_base.dylib'):
+ test.fail_test()
+
+ if (GetInstallname('My Other Framework.framework/My Other Framework') !=
+ '@executable_path/../../../My Other Framework.framework/'
+ 'Versions/A/My Other Framework'):
+ test.fail_test()
+
+ if (GetInstallname('libexplicit_installname_with_base.dylib') !=
+ '/usr/local/lib/libexplicit_installname_with_base.dylib'):
+ test.fail_test()
+
+ if (GetInstallname('libexplicit_installname_with_explicit_base.dylib') !=
+ '@executable_path/../libexplicit_installname_with_explicit_base.dylib'):
+ test.fail_test()
+
+ if (GetInstallname('libboth_base_and_installname.dylib') !=
+ 'Still trapped in a dynamiclib factory'):
+ test.fail_test()
+
+ if (GetInstallname('install_name_with_info_plist.framework/'
+ 'install_name_with_info_plist') !=
+ '/Library/Frameworks/install_name_with_info_plist.framework/'
+ 'Versions/A/install_name_with_info_plist'):
+ test.fail_test()
+
+ if ('DYLIB_INSTALL_NAME_BASE:standardizepath: command not found' in
+ test.stdout()):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-kext.py b/third_party/python/gyp/test/mac/gyptest-kext.py
new file mode 100755
index 0000000000..56790bdb65
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-kext.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that kext bundles are built correctly.
+"""
+
+import TestGyp
+import TestMac
+
+import os
+import plistlib
+import subprocess
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['xcode'])
+ test.run_gyp('kext.gyp', chdir='kext')
+ test.build('kext.gyp', test.ALL, chdir='kext')
+ test.built_file_must_exist('GypKext.kext/Contents/MacOS/GypKext',
+ chdir='kext')
+ test.built_file_must_exist('GypKext.kext/Contents/Info.plist',
+ chdir='kext')
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-ldflags-passed-to-libtool.py b/third_party/python/gyp/test/mac/gyptest-ldflags-passed-to-libtool.py
new file mode 100644
index 0000000000..e24e305d9f
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-ldflags-passed-to-libtool.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that OTHER_LDFLAGS is passed to libtool.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'],
+ match = lambda a, b: True)
+
+ build_error_code = {
+ 'xcode': [1, 65], # 1 for xcode 3, 65 for xcode 4 (see `man sysexits`)
+ 'make': 2,
+ 'ninja': 1,
+ 'xcode-ninja': [1, 65],
+ }[test.format]
+
+ CHDIR = 'ldflags-libtool'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+
+ test.build('test.gyp', 'ldflags_passed_to_libtool', chdir=CHDIR,
+ status=build_error_code)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-ldflags.py b/third_party/python/gyp/test/mac/gyptest-ldflags.py
new file mode 100644
index 0000000000..af44b8c5a0
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-ldflags.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that filenames passed to various linker flags are converted into
+build-directory relative paths correctly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ # The xcode-ninja generator handles gypfiles which are not at the
+ # project root incorrectly.
+ # cf. https://code.google.com/p/gyp/issues/detail?id=460
+ if test.format == 'xcode-ninja':
+ test.skip_test()
+
+ CHDIR = 'ldflags'
+ test.run_gyp('subdirectory/test.gyp', chdir=CHDIR)
+
+ test.build('subdirectory/test.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
+
+
+# These flags from `man ld` couldl show up in OTHER_LDFLAGS and need path
+# translation.
+#
+# Done:
+# -exported_symbols_list filename
+# -unexported_symbols_list file
+# -reexported_symbols_list file
+# -sectcreate segname sectname file
+#
+# Will be done on demand:
+# -weak_library path_to_library
+# -reexport_library path_to_library
+# -lazy_library path_to_library
+# -upward_library path_to_library
+# -syslibroot rootdir
+# -framework name[,suffix]
+# -weak_framework name[,suffix]
+# -reexport_framework name[,suffix]
+# -lazy_framework name[,suffix]
+# -upward_framework name[,suffix]
+# -force_load path_to_archive
+# -filelist file[,dirname]
+# -dtrace file
+# -order_file file # should use ORDER_FILE
+# -exported_symbols_order file
+# -bundle_loader executable # should use BUNDLE_LOADER
+# -alias_list filename
+# -seg_addr_table filename
+# -dylib_file install_name:file_name
+# -interposable_list filename
+# -object_path_lto filename
+#
+#
+# obsolete:
+# -sectorder segname sectname orderfile
+# -seg_addr_table_filename path
+#
+#
+# ??:
+# -map map_file_path
+# -sub_library library_name
+# -sub_umbrella framework_name
diff --git a/third_party/python/gyp/test/mac/gyptest-libraries.py b/third_party/python/gyp/test/mac/gyptest-libraries.py
new file mode 100755
index 0000000000..5ea4faf6ea
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-libraries.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies libraries (in link_settings) are properly found.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ # The xcode-ninja generator handles gypfiles which are not at the
+ # project root incorrectly.
+ # cf. https://code.google.com/p/gyp/issues/detail?id=460
+ if test.format == 'xcode-ninja':
+ test.skip_test()
+
+ test.skip_test() # bug=535
+
+ test.run_gyp('subdir/test.gyp', chdir='libraries')
+
+ test.build('subdir/test.gyp', test.ALL, chdir='libraries')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-libtool-zero.py b/third_party/python/gyp/test/mac/gyptest-libtool-zero.py
new file mode 100644
index 0000000000..ae5b7e635b
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-libtool-zero.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies libraries have proper mtime.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ CHDIR = 'libtool-zero'
+
+ test.run_gyp('test.gyp', chdir=CHDIR)
+
+ test.build('test.gyp', 'mylib', chdir=CHDIR)
+
+ test.up_to_date('test.gyp', 'mylib', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-loadable-module-bundle-product-extension.py b/third_party/python/gyp/test/mac/gyptest-loadable-module-bundle-product-extension.py
new file mode 100644
index 0000000000..7a60ca2d17
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-loadable-module-bundle-product-extension.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Tests that loadable_modules don't collide when using the same name with
+different file extensions.
+"""
+
+import TestGyp
+
+import os
+import struct
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ CHDIR = 'loadable-module-bundle-product-extension'
+ test.run_gyp('test.gyp',
+ '-G', 'xcode_ninja_target_pattern=^.*$',
+ chdir=CHDIR)
+
+ test.build('test.gyp', test.ALL, chdir=CHDIR)
+
+ test.must_exist(test.built_file_path('Collide.foo', chdir=CHDIR))
+ test.must_exist(test.built_file_path('Collide.bar', chdir=CHDIR))
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-loadable-module.py b/third_party/python/gyp/test/mac/gyptest-loadable-module.py
new file mode 100755
index 0000000000..77dde1d6cd
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-loadable-module.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Tests that a loadable_module target is built correctly.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import os
+import struct
+import sys
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ CHDIR = 'loadable-module'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+
+ test.build('test.gyp', test.ALL, chdir=CHDIR)
+
+ # Binary.
+ binary = test.built_file_path(
+ 'test_loadable_module.plugin/Contents/MacOS/test_loadable_module',
+ chdir=CHDIR)
+ test.must_exist(binary)
+ MH_BUNDLE = 8
+ if struct.unpack('4I', open(binary, 'rb').read(16))[3] != MH_BUNDLE:
+ test.fail_test()
+
+ # Info.plist.
+ info_plist = test.built_file_path(
+ 'test_loadable_module.plugin/Contents/Info.plist', chdir=CHDIR)
+ test.must_exist(info_plist)
+ test.must_contain(info_plist, """
+ <key>CFBundleExecutable</key>
+ <string>test_loadable_module</string>
+""")
+
+ # PkgInfo.
+ test.built_file_must_not_exist(
+ 'test_loadable_module.plugin/Contents/PkgInfo', chdir=CHDIR)
+ test.built_file_must_not_exist(
+ 'test_loadable_module.plugin/Contents/Resources', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-lto.py b/third_party/python/gyp/test/mac/gyptest-lto.py
new file mode 100644
index 0000000000..d37068f336
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-lto.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that LTO flags work.
+"""
+
+import TestGyp
+
+import os
+import re
+import subprocess
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ CHDIR = 'lto'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+
+ test.build('test.gyp', test.ALL, chdir=CHDIR)
+
+ def ObjPath(srcpath, target):
+ # TODO: Move this into TestGyp if it's needed elsewhere.
+ if test.format == 'xcode':
+ return os.path.join(CHDIR, 'build', 'test.build', 'Default',
+ target + '.build', 'Objects-normal', 'x86_64',
+ srcpath + '.o')
+ elif 'ninja' in test.format: # ninja, xcode-ninja
+ return os.path.join(CHDIR, 'out', 'Default', 'obj',
+ target + '.' + srcpath + '.o')
+ elif test.format == 'make':
+ return os.path.join(CHDIR, 'out', 'Default', 'obj.target',
+ target, srcpath + '.o')
+
+ def ObjType(p, t_expected):
+ r = re.compile(r'nsyms\s+(\d+)')
+ o = subprocess.check_output(['file', p]).decode('utf-8')
+ objtype = 'unknown'
+ if ': Mach-O ' in o:
+ objtype = 'mach-o'
+ elif ': LLVM bitcode' in o:
+ objtype = 'llvm'
+ if objtype != t_expected:
+ print('Expected %s, got %s' % (t_expected, objtype))
+ test.fail_test()
+
+ ObjType(ObjPath('cfile', 'lto'), 'llvm')
+ ObjType(ObjPath('ccfile', 'lto'), 'llvm')
+ ObjType(ObjPath('mfile', 'lto'), 'llvm')
+ ObjType(ObjPath('mmfile', 'lto'), 'llvm')
+ ObjType(ObjPath('asmfile', 'lto'), 'mach-o')
+
+ ObjType(ObjPath('cfile', 'lto_static'), 'llvm')
+ ObjType(ObjPath('ccfile', 'lto_static'), 'llvm')
+ ObjType(ObjPath('mfile', 'lto_static'), 'llvm')
+ ObjType(ObjPath('mmfile', 'lto_static'), 'llvm')
+ ObjType(ObjPath('asmfile', 'lto_static'), 'mach-o')
+
+ test.pass_test()
+
+ # TODO: Probably test for -object_path_lto too, else dsymutil won't be
+ # useful maybe?
diff --git a/third_party/python/gyp/test/mac/gyptest-missing-cfbundlesignature.py b/third_party/python/gyp/test/mac/gyptest-missing-cfbundlesignature.py
new file mode 100644
index 0000000000..be66492467
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-missing-cfbundlesignature.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that an Info.plist with CFBundleSignature works.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ test.run_gyp('test.gyp', chdir='missing-cfbundlesignature')
+ test.build('test.gyp', test.ALL, chdir='missing-cfbundlesignature')
+
+ test.built_file_must_match('mytarget.app/Contents/PkgInfo', 'APPL????',
+ chdir='missing-cfbundlesignature')
+
+ test.built_file_must_match('myothertarget.app/Contents/PkgInfo', 'APPL????',
+ chdir='missing-cfbundlesignature')
+
+ test.built_file_must_match('thirdtarget.app/Contents/PkgInfo', 'APPL????',
+ chdir='missing-cfbundlesignature')
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-non-strs-flattened-to-env.py b/third_party/python/gyp/test/mac/gyptest-non-strs-flattened-to-env.py
new file mode 100644
index 0000000000..cb42a939d4
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-non-strs-flattened-to-env.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that list xcode_settings are flattened before being exported to the
+environment.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ CHDIR = 'non-strs-flattened-to-env'
+ INFO_PLIST_PATH = 'Test.app/Contents/Info.plist'
+
+ test.run_gyp('test.gyp', chdir=CHDIR)
+ test.build('test.gyp', test.ALL, chdir=CHDIR)
+ info_plist = test.built_file_path(INFO_PLIST_PATH, chdir=CHDIR)
+ test.must_exist(info_plist)
+ test.must_contain(info_plist, '''\
+\t<key>My Variable</key>
+\t<string>some expansion</string>''')
+ test.must_contain(info_plist, '''\
+\t<key>CFlags</key>
+\t<string>-fstack-protector-all -fno-strict-aliasing -DS="A Space"</string>''')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-objc-arc.py b/third_party/python/gyp/test/mac/gyptest-objc-arc.py
new file mode 100755
index 0000000000..b3192a19dd
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-objc-arc.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that ARC objc settings are handled correctly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ # set |match| to ignore build stderr output.
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'],
+ match = lambda a, b: True)
+
+ CHDIR = 'objc-arc'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+
+ test.build('test.gyp', 'arc_enabled', chdir=CHDIR)
+ test.build('test.gyp', 'arc_disabled', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-objc-gc.py b/third_party/python/gyp/test/mac/gyptest-objc-gc.py
new file mode 100644
index 0000000000..0cec458983
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-objc-gc.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that GC objc settings are handled correctly.
+"""
+
+import TestGyp
+import TestMac
+
+import sys
+
+if sys.platform == 'darwin':
+ # set |match| to ignore build stderr output.
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'],
+ match = lambda a, b: True)
+
+ # Xcode 5.1 removed support for garbage-collection:
+ # error: garbage collection is no longer supported
+ if TestMac.Xcode.Version() < '0510':
+
+ CHDIR = 'objc-gc'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+
+ build_error_code = {
+ 'xcode': [1, 65], # 1 for xcode 3, 65 for xcode 4 (see `man sysexits`)
+ 'make': 2,
+ 'ninja': 1,
+ }[test.format]
+
+ test.build('test.gyp', 'gc_exe_fails', chdir=CHDIR, status=build_error_code)
+ test.build(
+ 'test.gyp', 'gc_off_exe_req_lib', chdir=CHDIR, status=build_error_code)
+
+ test.build('test.gyp', 'gc_req_exe', chdir=CHDIR)
+ test.run_built_executable('gc_req_exe', chdir=CHDIR, stdout="gc on: 1\n")
+
+ test.build('test.gyp', 'gc_exe_req_lib', chdir=CHDIR)
+ test.run_built_executable(
+ 'gc_exe_req_lib', chdir=CHDIR, stdout="gc on: 1\n")
+
+ test.build('test.gyp', 'gc_exe', chdir=CHDIR)
+ test.run_built_executable('gc_exe', chdir=CHDIR, stdout="gc on: 1\n")
+
+ test.build('test.gyp', 'gc_off_exe', chdir=CHDIR)
+ test.run_built_executable('gc_off_exe', chdir=CHDIR, stdout="gc on: 0\n")
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-postbuild-copy-bundle.py b/third_party/python/gyp/test/mac/gyptest-postbuild-copy-bundle.py
new file mode 100644
index 0000000000..1f04d1cb36
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-postbuild-copy-bundle.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a postbuild copying a dependend framework into an app bundle is
+rerun if the resources in the framework change.
+"""
+
+import TestGyp
+
+import os.path
+import sys
+
+if sys.platform == 'darwin':
+ # TODO(thakis): Make this pass with the make generator, http://crbug.com/95529
+ test = TestGyp.TestGyp(formats=['ninja', 'xcode'])
+
+ CHDIR = 'postbuild-copy-bundle'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+
+ app_bundle_dir = test.built_file_path('Test App.app', chdir=CHDIR)
+ bundled_framework_dir = os.path.join(
+ app_bundle_dir, 'Contents', 'My Framework.framework', 'Resources')
+ final_plist_path = os.path.join(bundled_framework_dir, 'Info.plist')
+ final_resource_path = os.path.join(bundled_framework_dir, 'resource_file.sb')
+ final_copies_path = os.path.join(
+ app_bundle_dir, 'Contents', 'My Framework.framework', 'Versions', 'A',
+ 'Libraries', 'copied.txt')
+
+ # Check that the dependency was built and copied into the app bundle:
+ test.build('test.gyp', 'test_app', chdir=CHDIR)
+ test.must_exist(final_resource_path)
+ test.must_match(final_resource_path,
+ 'This is included in the framework bundle.\n')
+
+ test.must_exist(final_plist_path)
+ test.must_contain(final_plist_path, '''\
+\t<key>RandomKey</key>
+\t<string>RandomValue</string>''')
+
+ # Touch the dependency's bundle resource, and check that the modification
+ # makes it all the way into the app bundle:
+ test.sleep()
+ test.write('postbuild-copy-bundle/resource_file.sb', 'New text\n')
+ test.build('test.gyp', 'test_app', chdir=CHDIR)
+
+ test.must_exist(final_resource_path)
+ test.must_match(final_resource_path, 'New text\n')
+
+ # Check the same for the plist file.
+ test.sleep()
+ contents = test.read('postbuild-copy-bundle/Framework-Info.plist')
+ contents = contents.replace('RandomValue', 'NewRandomValue')
+ test.write('postbuild-copy-bundle/Framework-Info.plist', contents)
+ test.build('test.gyp', 'test_app', chdir=CHDIR)
+
+ test.must_exist(final_plist_path)
+ test.must_contain(final_plist_path, '''\
+\t<key>RandomKey</key>
+\t<string>NewRandomValue</string>''')
+
+ # Check the same for the copies section, test for http://crbug.com/157077
+ test.sleep()
+ contents = test.read('postbuild-copy-bundle/copied.txt')
+ contents = contents.replace('old', 'new')
+ test.write('postbuild-copy-bundle/copied.txt', contents)
+ test.build('test.gyp', 'test_app', chdir=CHDIR)
+
+ test.must_exist(final_copies_path)
+ test.must_contain(final_copies_path, 'new copied file')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-postbuild-defaults.py b/third_party/python/gyp/test/mac/gyptest-postbuild-defaults.py
new file mode 100644
index 0000000000..0f7d25bd89
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-postbuild-defaults.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a postbuild invoking |defaults| works.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ CHDIR = 'postbuild-defaults'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+ test.build('test.gyp', test.ALL, chdir=CHDIR)
+
+ result_file = test.built_file_path('result', chdir=CHDIR)
+ test.must_exist(result_file)
+ test.must_contain(result_file, '''\
+Test
+${PRODUCT_NAME}
+''')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-postbuild-fail.py b/third_party/python/gyp/test/mac/gyptest-postbuild-fail.py
new file mode 100755
index 0000000000..1a229df695
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-postbuild-fail.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a failing postbuild step lets the build fail.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ # set |match| to ignore build stderr output.
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'],
+ match = lambda a, b: True)
+
+ test.run_gyp('test.gyp', chdir='postbuild-fail')
+
+ build_error_code = {
+ 'xcode': [1, 65], # 1 for xcode 3, 65 for xcode 4 (see `man sysexits`)
+ 'make': 2,
+ 'ninja': 1,
+ 'xcode-ninja': [1, 65],
+ }[test.format]
+
+
+ # If a postbuild fails, all postbuilds should be re-run on the next build.
+ # In Xcode 3, even if the first postbuild fails the other postbuilds were
+ # still executed. In Xcode 4, postbuilds are stopped after the first
+ # failing postbuild. This test checks for the Xcode 4 behavior.
+
+ # Ignore this test on Xcode 3.
+ import subprocess
+ job = subprocess.Popen(['xcodebuild', '-version'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ out, _ = job.communicate()
+ out = out.decode('utf-8')
+ if job.returncode != 0:
+ print(out)
+ raise Exception('Error %d running xcodebuild' % job.returncode)
+ if out.startswith('Xcode 3.'):
+ test.pass_test()
+
+ # Non-bundles
+ test.build('test.gyp', 'nonbundle', chdir='postbuild-fail',
+ status=build_error_code)
+ test.built_file_must_not_exist('static_touch',
+ chdir='postbuild-fail')
+ # Check for non-up-to-date-ness by checking if building again produces an
+ # error.
+ test.build('test.gyp', 'nonbundle', chdir='postbuild-fail',
+ status=build_error_code)
+
+
+ # Bundles
+ test.build('test.gyp', 'bundle', chdir='postbuild-fail',
+ status=build_error_code)
+ test.built_file_must_not_exist('dynamic_touch',
+ chdir='postbuild-fail')
+ # Check for non-up-to-date-ness by checking if building again produces an
+ # error.
+ test.build('test.gyp', 'bundle', chdir='postbuild-fail',
+ status=build_error_code)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-postbuild-multiple-configurations.py b/third_party/python/gyp/test/mac/gyptest-postbuild-multiple-configurations.py
new file mode 100644
index 0000000000..84694f36cc
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-postbuild-multiple-configurations.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a postbuild work in projects with multiple configurations.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ CHDIR = 'postbuild-multiple-configurations'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+
+ for configuration in ['Debug', 'Release']:
+ test.set_configuration(configuration)
+ test.build('test.gyp', test.ALL, chdir=CHDIR)
+ test.built_file_must_exist('postbuild-file', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-postbuild-static-library.py b/third_party/python/gyp/test/mac/gyptest-postbuild-static-library.py
new file mode 100644
index 0000000000..8f9a6ebcb0
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-postbuild-static-library.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a postbuilds on static libraries work, and that sourceless
+libraries don't cause failures at gyp time.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['make', 'xcode'])
+
+ CHDIR = 'postbuild-static-library'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+ test.build('test.gyp', 'my_lib', chdir=CHDIR)
+ # Building my_sourceless_lib doesn't work with make. gyp should probably
+ # forbid sourceless static libraries, since they're pretty pointless.
+ # But they shouldn't cause gyp time exceptions.
+
+ test.built_file_must_exist('postbuild-file', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-postbuild.py b/third_party/python/gyp/test/mac/gyptest-postbuild.py
new file mode 100755
index 0000000000..684e7b8426
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-postbuild.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that postbuild steps work.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ test.run_gyp('test.gyp', chdir='postbuilds')
+
+ test.build('test.gyp', test.ALL, chdir='postbuilds')
+
+ # See comment in test/subdirectory/gyptest-subdir-default.py
+ if test.format == 'xcode':
+ chdir = 'postbuilds/subdirectory'
+ else:
+ chdir = 'postbuilds'
+
+ # Created by the postbuild scripts
+ test.built_file_must_exist('el.a_touch',
+ type=test.STATIC_LIB,
+ chdir='postbuilds')
+ test.built_file_must_exist('el.a_gyp_touch',
+ type=test.STATIC_LIB,
+ chdir='postbuilds')
+ test.built_file_must_exist('nest_el.a_touch',
+ type=test.STATIC_LIB,
+ chdir=chdir)
+ test.built_file_must_exist(
+ 'dyna.framework/Versions/A/dyna_touch',
+ chdir='postbuilds')
+ test.built_file_must_exist(
+ 'dyna.framework/Versions/A/dyna_gyp_touch',
+ chdir='postbuilds')
+ test.built_file_must_exist(
+ 'nest_dyna.framework/Versions/A/nest_dyna_touch',
+ chdir=chdir)
+ test.built_file_must_exist('dyna_standalone.dylib_gyp_touch',
+ type=test.SHARED_LIB,
+ chdir='postbuilds')
+ test.built_file_must_exist('copied_file.txt', chdir='postbuilds')
+ test.built_file_must_exist('copied_file_2.txt', chdir=chdir)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-prefixheader.py b/third_party/python/gyp/test/mac/gyptest-prefixheader.py
new file mode 100755
index 0000000000..0cf85f9422
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-prefixheader.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that GCC_PREFIX_HEADER works.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+ test.run_gyp('test.gyp', chdir='prefixheader')
+
+ test.build('test.gyp', test.ALL, chdir='prefixheader')
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-rebuild.py b/third_party/python/gyp/test/mac/gyptest-rebuild.py
new file mode 100755
index 0000000000..c7d8cad02d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-rebuild.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that app bundles are rebuilt correctly.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ CHDIR = 'rebuild'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+
+ test.build('test.gyp', 'test_app', chdir=CHDIR)
+
+ # Touch a source file, rebuild, and check that the app target is up-to-date.
+ test.touch('rebuild/main.c')
+ test.build('test.gyp', 'test_app', chdir=CHDIR)
+
+ test.up_to_date('test.gyp', 'test_app', chdir=CHDIR)
+
+ # Xcode runs postbuilds on every build, so targets with postbuilds are
+ # never marked as up_to_date.
+ if test.format != 'xcode':
+ # Same for a framework bundle.
+ test.build('test.gyp', 'test_framework_postbuilds', chdir=CHDIR)
+ test.up_to_date('test.gyp', 'test_framework_postbuilds', chdir=CHDIR)
+
+ # Test that an app bundle with a postbuild that touches the app binary needs
+ # to be built only once.
+ test.build('test.gyp', 'test_app_postbuilds', chdir=CHDIR)
+ test.up_to_date('test.gyp', 'test_app_postbuilds', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-rpath.py b/third_party/python/gyp/test/mac/gyptest-rpath.py
new file mode 100644
index 0000000000..a82e2fd562
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-rpath.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that LD_DYLIB_INSTALL_NAME and DYLIB_INSTALL_NAME_BASE are handled
+correctly.
+"""
+
+import TestGyp
+
+import re
+import subprocess
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ CHDIR = 'rpath'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+
+ test.build('test.gyp', test.ALL, chdir=CHDIR)
+
+ def GetRpaths(p):
+ p = test.built_file_path(p, chdir=CHDIR)
+ r = re.compile(r'cmd LC_RPATH.*?path (.*?) \(offset \d+\)', re.DOTALL)
+ proc = subprocess.Popen(['otool', '-l', p], stdout=subprocess.PIPE)
+ o = proc.communicate()[0].decode('utf-8')
+ assert not proc.returncode
+ return r.findall(o)
+
+ if GetRpaths('libdefault_rpath.dylib') != []:
+ test.fail_test()
+
+ if GetRpaths('libexplicit_rpath.dylib') != ['@executable_path/.']:
+ test.fail_test()
+
+ if (GetRpaths('libexplicit_rpaths_escaped.dylib') !=
+ ['First rpath', 'Second rpath']):
+ test.fail_test()
+
+ if GetRpaths('My Framework.framework/My Framework') != ['@loader_path/.']:
+ test.fail_test()
+
+ if GetRpaths('executable') != ['@executable_path/.']:
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-sdkroot.py b/third_party/python/gyp/test/mac/gyptest-sdkroot.py
new file mode 100644
index 0000000000..f8edbaa583
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-sdkroot.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that setting SDKROOT works.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import os
+import subprocess
+import sys
+
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ def GetSDKPath(sdk):
+ """Return SDKROOT if the SDK version |sdk| is installed or empty string."""
+ DEVNULL = open(os.devnull, 'wb')
+ try:
+ proc = subprocess.Popen(
+ ['xcodebuild', '-version', '-sdk', 'macosx' + sdk, 'Path'],
+ stdout=subprocess.PIPE, stderr=DEVNULL)
+ return proc.communicate()[0].rstrip('\n')
+ finally:
+ DEVNULL.close()
+
+ def SelectSDK():
+ """Select the oldest SDK installed (greater than 10.6)."""
+ for sdk in ['10.6', '10.7', '10.8', '10.9']:
+ path = GetSDKPath(sdk)
+ if path:
+ return True, sdk, path
+ return False, '', ''
+
+ # Make sure this works on the bots, which only have the 10.6 sdk, and on
+ # dev machines which usually don't have the 10.6 sdk.
+ sdk_found, sdk, sdk_path = SelectSDK()
+ if not sdk_found:
+ test.fail_test()
+
+ test.write('sdkroot/test.gyp', test.read('sdkroot/test.gyp') % sdk)
+
+ test.run_gyp('test.gyp', '-D', 'sdk_path=%s' % sdk_path,
+ chdir='sdkroot')
+ test.build('test.gyp', test.ALL, chdir='sdkroot')
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-sourceless-module.py b/third_party/python/gyp/test/mac/gyptest-sourceless-module.py
new file mode 100644
index 0000000000..f2801c20aa
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-sourceless-module.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that bundles that have no 'sources' (pure resource containers) work.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ test.run_gyp('test.gyp', chdir='sourceless-module')
+
+ # Just needs to build without errors.
+ test.build('test.gyp', 'empty_bundle', chdir='sourceless-module')
+ test.built_file_must_not_exist(
+ 'empty_bundle.bundle', chdir='sourceless-module')
+
+ # Needs to build, and contain a resource.
+ test.build('test.gyp', 'resource_bundle', chdir='sourceless-module')
+
+ test.built_file_must_exist(
+ 'resource_bundle.bundle/Contents/Resources/foo.manifest',
+ chdir='sourceless-module')
+ test.built_file_must_not_exist(
+ 'resource_bundle.bundle/Contents/MacOS/resource_bundle',
+ chdir='sourceless-module')
+
+ # Build an app containing an actionless bundle.
+ test.build(
+ 'test.gyp',
+ 'bundle_dependent_on_resource_bundle_no_actions',
+ chdir='sourceless-module')
+
+ test.built_file_must_exist(
+ 'bundle_dependent_on_resource_bundle_no_actions.app/Contents/Resources/'
+ 'mac_resource_bundle_no_actions.bundle/Contents/Resources/empty.txt',
+ chdir='sourceless-module')
+
+ # Needs to build and cause the bundle to be built.
+ test.build(
+ 'test.gyp', 'dependent_on_resource_bundle', chdir='sourceless-module')
+
+ test.built_file_must_exist(
+ 'resource_bundle.bundle/Contents/Resources/foo.manifest',
+ chdir='sourceless-module')
+ test.built_file_must_not_exist(
+ 'resource_bundle.bundle/Contents/MacOS/resource_bundle',
+ chdir='sourceless-module')
+
+ # TODO(thakis): shared_libraries that have no sources but depend on static
+ # libraries currently only work with the ninja generator. This is used by
+ # chrome/mac's components build.
+ if test.format == 'ninja':
+ # Check that an executable depending on a resource framework links fine too.
+ test.build(
+ 'test.gyp', 'dependent_on_resource_framework', chdir='sourceless-module')
+
+ test.built_file_must_exist(
+ 'resource_framework.framework/Resources/foo.manifest',
+ chdir='sourceless-module')
+ test.built_file_must_exist(
+ 'resource_framework.framework/resource_framework',
+ chdir='sourceless-module')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-strip-default.py b/third_party/python/gyp/test/mac/gyptest-strip-default.py
new file mode 100644
index 0000000000..b851782fd5
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-strip-default.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that the default STRIP_STYLEs match between different generators.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import re
+import subprocess
+import sys
+import time
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ CHDIR='strip'
+ test.run_gyp('test-defaults.gyp', chdir=CHDIR)
+
+ test.build('test-defaults.gyp', test.ALL, chdir=CHDIR)
+
+ # Lightweight check if stripping was done.
+ def OutPath(s):
+ return test.built_file_path(s, chdir=CHDIR)
+
+ def CheckNsyms(p, o_expected):
+ proc = subprocess.Popen(['nm', '-aU', p], stdout=subprocess.PIPE)
+ o = proc.communicate()[0].decode('utf-8')
+
+ # Filter out mysterious "00 0000 OPT radr://5614542" symbol which
+ # is apparently only printed on the bots (older toolchain?).
+ # Yes, "radr", not "rdar".
+ o = ''.join(filter(lambda s: 'radr://5614542' not in s, o.splitlines(True)))
+
+ o = o.replace('A', 'T')
+ o = re.sub(r'^[a-fA-F0-9]+', 'XXXXXXXX', o, flags=re.MULTILINE)
+ assert not proc.returncode
+ if o != o_expected:
+ print('Stripping: Expected symbols """\n%s""", got """\n%s"""' % (
+ o_expected, o))
+ test.fail_test()
+
+ CheckNsyms(OutPath('libsingle_dylib.dylib'),
+"""\
+XXXXXXXX S _ci
+XXXXXXXX S _i
+XXXXXXXX T _the_function
+XXXXXXXX t _the_hidden_function
+XXXXXXXX T _the_used_function
+XXXXXXXX T _the_visible_function
+""")
+ CheckNsyms(OutPath('single_so.so'),
+"""\
+XXXXXXXX S _ci
+XXXXXXXX S _i
+XXXXXXXX T _the_function
+XXXXXXXX t _the_hidden_function
+XXXXXXXX T _the_used_function
+XXXXXXXX T _the_visible_function
+""")
+ CheckNsyms(OutPath('single_exe'),
+"""\
+XXXXXXXX T __mh_execute_header
+""")
+
+ CheckNsyms(test.built_file_path(
+ 'bundle_dylib.framework/Versions/A/bundle_dylib', chdir=CHDIR),
+"""\
+XXXXXXXX S _ci
+XXXXXXXX S _i
+XXXXXXXX T _the_function
+XXXXXXXX t _the_hidden_function
+XXXXXXXX T _the_used_function
+XXXXXXXX T _the_visible_function
+""")
+ CheckNsyms(test.built_file_path(
+ 'bundle_so.bundle/Contents/MacOS/bundle_so', chdir=CHDIR),
+"""\
+XXXXXXXX S _ci
+XXXXXXXX S _i
+XXXXXXXX T _the_function
+XXXXXXXX T _the_used_function
+XXXXXXXX T _the_visible_function
+""")
+ CheckNsyms(test.built_file_path(
+ 'bundle_exe.app/Contents/MacOS/bundle_exe', chdir=CHDIR),
+"""\
+XXXXXXXX T __mh_execute_header
+""")
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-strip.py b/third_party/python/gyp/test/mac/gyptest-strip.py
new file mode 100755
index 0000000000..d4694834ac
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-strip.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that stripping works.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+import TestMac
+
+import re
+import subprocess
+import sys
+import time
+
+print("This test is currently disabled: https://crbug.com/483696.")
+sys.exit(0)
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ test.run_gyp('test.gyp', chdir='strip')
+
+ test.build('test.gyp', test.ALL, chdir='strip')
+
+ # Lightweight check if stripping was done.
+ def OutPath(s):
+ return test.built_file_path(s, type=test.SHARED_LIB, chdir='strip')
+
+ def CheckNsyms(p, n_expected):
+ r = re.compile(r'nsyms\s+(\d+)')
+ o = subprocess.check_output(['otool', '-l', p])
+ m = r.search(o)
+ n = int(m.group(1))
+ if n != n_expected:
+ print('Stripping: Expected %d symbols, got %d' % (n_expected, n))
+ test.fail_test()
+
+ # Starting with Xcode 5.0, clang adds an additional symbols to the compiled
+ # file when using a relative path to the input file. So when using ninja
+ # with Xcode 5.0 or higher, take this additional symbol into consideration
+ # for unstripped builds (it is stripped by all strip commands).
+ expected_extra_symbol_count = 0
+ if test.format in ['ninja', 'xcode-ninja'] \
+ and TestMac.Xcode.Version() >= '0500':
+ expected_extra_symbol_count = 1
+
+ # The actual numbers here are not interesting, they just need to be the same
+ # in both the xcode and the make build.
+ CheckNsyms(OutPath('no_postprocess'), 29 + expected_extra_symbol_count)
+ CheckNsyms(OutPath('no_strip'), 29 + expected_extra_symbol_count)
+ CheckNsyms(OutPath('strip_all'), 0)
+ CheckNsyms(OutPath('strip_nonglobal'), 6)
+ CheckNsyms(OutPath('strip_debugging'), 7)
+ CheckNsyms(OutPath('strip_all_custom_flags'), 0)
+ CheckNsyms(test.built_file_path(
+ 'strip_all_bundle.framework/Versions/A/strip_all_bundle', chdir='strip'),
+ 0)
+ CheckNsyms(OutPath('strip_save'), 7)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-swift-library.py b/third_party/python/gyp/test/mac/gyptest-swift-library.py
new file mode 100644
index 0000000000..d3433753fd
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-swift-library.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a swift framework builds correctly.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+import TestMac
+
+import collections
+import sys
+import subprocess
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['xcode'])
+
+ # Ensures that the given symbol is present in the given file, by running nm.
+ def CheckHasSymbolName(path, symbol):
+ output = subprocess.check_output(['nm', '-j', path])
+ idx = output.find(symbol)
+ if idx == -1:
+ print('Swift: Could not find symobl: %s' % symbol)
+ test.fail_test()
+
+ test_cases = []
+
+ # Run this for iOS on XCode 6.0 or greater
+ if TestMac.Xcode.Version() >= '0600':
+ test_cases.append(('Default', 'iphoneos'))
+ test_cases.append(('Default', 'iphonesimulator'))
+
+ # Run it for Mac on XCode 6.1 or greater
+ if TestMac.Xcode.Version() >= '0610':
+ test_cases.append(('Default', None))
+
+ # Generate the project.
+ test.run_gyp('test.gyp', chdir='swift-library')
+
+ # Build and verify for each configuration.
+ for configuration, sdk in test_cases:
+ kwds = collections.defaultdict(list)
+ if test.format == 'xcode':
+ if sdk is not None:
+ kwds['arguments'].extend(['-sdk', sdk])
+
+ test.set_configuration(configuration)
+ test.build('test.gyp', 'SwiftFramework', chdir='swift-library', **kwds)
+
+ filename = 'SwiftFramework.framework/SwiftFramework'
+ result_file = test.built_file_path(filename, chdir='swift-library')
+
+ test.must_exist(result_file)
+
+ # Check to make sure that our swift class (GypSwiftTest) is present in the
+ # built binary
+ CheckHasSymbolName(result_file, "C14SwiftFramework12GypSwiftTest")
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-type-envvars.py b/third_party/python/gyp/test/mac/gyptest-type-envvars.py
new file mode 100755
index 0000000000..a5203c5f1e
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-type-envvars.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test that MACH_O_TYPE etc are set correctly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ test.run_gyp('test.gyp',
+ '-G', 'xcode_ninja_target_pattern=^(?!nonbundle_none).*$',
+ chdir='type_envvars')
+
+ test.build('test.gyp', test.ALL, chdir='type_envvars')
+
+ # The actual test is done by postbuild scripts during |test.build()|.
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-unicode-settings.py b/third_party/python/gyp/test/mac/gyptest-unicode-settings.py
new file mode 100644
index 0000000000..a71b3bd9a3
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-unicode-settings.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+# Copyright 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that unicode strings in 'xcode_settings' work.
+Also checks that ASCII control characters are escaped properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['xcode'])
+ test.run_gyp('test.gyp', chdir='unicode-settings')
+ test.build('test.gyp', test.ALL, chdir='unicode-settings')
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-xcode-env-order.py b/third_party/python/gyp/test/mac/gyptest-xcode-env-order.py
new file mode 100755
index 0000000000..bda19988b2
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-xcode-env-order.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that dependent Xcode settings are processed correctly.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+import TestMac
+
+import subprocess
+import sys
+
+if sys.platform == 'darwin':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ CHDIR = 'xcode-env-order'
+ INFO_PLIST_PATH = 'Test.app/Contents/Info.plist'
+
+ test.run_gyp('test.gyp', chdir=CHDIR)
+ test.build('test.gyp', test.ALL, chdir=CHDIR)
+
+ # Env vars in 'copies' filenames.
+ test.built_file_must_exist('Test-copy-brace/main.c', chdir=CHDIR)
+ test.built_file_must_exist('Test-copy-paren/main.c', chdir=CHDIR)
+ test.built_file_must_exist('Test-copy-bare/main.c', chdir=CHDIR)
+
+ # Env vars in 'actions' filenames and inline actions
+ test.built_file_must_exist('action-copy-brace.txt', chdir=CHDIR)
+ test.built_file_must_exist('action-copy-paren.txt', chdir=CHDIR)
+ test.built_file_must_exist('action-copy-bare.txt', chdir=CHDIR)
+
+ # Env vars in 'rules' filenames and inline actions
+ test.built_file_must_exist('rule-copy-brace.txt', chdir=CHDIR)
+ test.built_file_must_exist('rule-copy-paren.txt', chdir=CHDIR)
+ # TODO: see comment in test.gyp for this file.
+ #test.built_file_must_exist('rule-copy-bare.txt', chdir=CHDIR)
+
+ # Env vars in Info.plist.
+ info_plist = test.built_file_path(INFO_PLIST_PATH, chdir=CHDIR)
+ test.must_exist(info_plist)
+
+ test.must_contain(info_plist, '''\
+\t<key>BraceProcessedKey1</key>
+\t<string>D:/Source/Project/Test</string>''')
+ test.must_contain(info_plist, '''\
+\t<key>BraceProcessedKey2</key>
+\t<string>/Source/Project/Test</string>''')
+ test.must_contain(info_plist, '''\
+\t<key>BraceProcessedKey3</key>
+\t<string>com.apple.product-type.application:D:/Source/Project/Test</string>''')
+
+ test.must_contain(info_plist, '''\
+\t<key>ParenProcessedKey1</key>
+\t<string>D:/Source/Project/Test</string>''')
+ test.must_contain(info_plist, '''\
+\t<key>ParenProcessedKey2</key>
+\t<string>/Source/Project/Test</string>''')
+ test.must_contain(info_plist, '''\
+\t<key>ParenProcessedKey3</key>
+\t<string>com.apple.product-type.application:D:/Source/Project/Test</string>''')
+
+ test.must_contain(info_plist, '''\
+\t<key>BareProcessedKey1</key>
+\t<string>D:/Source/Project/Test</string>''')
+ test.must_contain(info_plist, '''\
+\t<key>BareProcessedKey2</key>
+\t<string>/Source/Project/Test</string>''')
+ # NOTE: For bare variables, $PRODUCT_TYPE is not replaced! It _is_ replaced
+ # if it's not right at the start of the string (e.g. ':$PRODUCT_TYPE'), so
+ # this looks like an Xcode bug. This bug isn't emulated (yet?), so check this
+ # only for Xcode.
+ if test.format == 'xcode' and TestMac.Xcode.Version() < '0500':
+ test.must_contain(info_plist, '''\
+\t<key>BareProcessedKey3</key>
+\t<string>$PRODUCT_TYPE:D:/Source/Project/Test</string>''')
+ else:
+ # The bug has been fixed by Xcode version 5.0.0.
+ test.must_contain(info_plist, '''\
+\t<key>BareProcessedKey3</key>
+\t<string>com.apple.product-type.application:D:/Source/Project/Test</string>''')
+
+ test.must_contain(info_plist, '''\
+\t<key>MixedProcessedKey</key>
+\t<string>/Source/Project:Test:mh_execute</string>''')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-xcode-gcc-clang.py b/third_party/python/gyp/test/mac/gyptest-xcode-gcc-clang.py
new file mode 100644
index 0000000000..981c3fc564
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-xcode-gcc-clang.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that xcode-style GCC_... settings that require clang are handled
+properly.
+"""
+
+import TestGyp
+
+import os
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ CHDIR = 'xcode-gcc'
+ test.run_gyp('test-clang.gyp', chdir=CHDIR)
+
+ test.build('test-clang.gyp', 'aliasing_yes', chdir=CHDIR)
+ test.run_built_executable('aliasing_yes', chdir=CHDIR, stdout="1\n")
+ test.build('test-clang.gyp', 'aliasing_no', chdir=CHDIR)
+ test.run_built_executable('aliasing_no', chdir=CHDIR, stdout="0\n")
+
+ # The default behavior changed: strict aliasing used to be off, now it's on
+ # by default. The important part is that this is identical for all generators
+ # (which it is). TODO(thakis): Enable this once the bots have a newer Xcode.
+ #test.build('test-clang.gyp', 'aliasing_default', chdir=CHDIR)
+ #test.run_built_executable('aliasing_default', chdir=CHDIR, stdout="1\n")
+ # For now, just check the generated ninja file:
+ if test.format == 'ninja':
+ contents = open(test.built_file_path('obj/aliasing_default.ninja',
+ chdir=CHDIR)).read()
+ if 'strict-aliasing' in contents:
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-xcode-gcc.py b/third_party/python/gyp/test/mac/gyptest-xcode-gcc.py
new file mode 100644
index 0000000000..a1d201ae03
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-xcode-gcc.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that xcode-style GCC_... settings are handled properly.
+"""
+
+import TestGyp
+
+import os
+import subprocess
+import sys
+
+def IgnoreOutput(string, expected_string):
+ return True
+
+def CompilerVersion(compiler):
+ stdout = subprocess.check_output([compiler, '-v'], stderr=subprocess.STDOUT)
+ stdout = stdout.decode('utf-8')
+ return stdout.rstrip('\n')
+
+def CompilerSupportsWarnAboutInvalidOffsetOfMacro(test):
+ # "clang" does not support the "-Winvalid-offsetof" flag, and silently
+ # ignore it. Starting with Xcode 5.0.0, "gcc" is just a "clang" binary with
+ # some hard-coded include path hack, so use the output of "-v" to detect if
+ # the compiler supports the flag or not.
+ return 'clang' not in CompilerVersion('/usr/bin/cc')
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+ if test.format == 'xcode-ninja':
+ test.skip_test()
+
+ CHDIR = 'xcode-gcc'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+
+ # List of targets that'll pass. It expects targets of the same name with
+ # '-fail' appended that'll fail to build.
+ targets = [
+ 'warn_about_missing_newline',
+ ]
+
+ # clang doesn't warn on invalid offsetofs, it silently ignores
+ # -Wno-invalid-offsetof.
+ if CompilerSupportsWarnAboutInvalidOffsetOfMacro(test):
+ targets.append('warn_about_invalid_offsetof_macro')
+
+ for target in targets:
+ test.build('test.gyp', target, chdir=CHDIR)
+ test.built_file_must_exist(target, chdir=CHDIR)
+ fail_target = target + '-fail'
+ test.build('test.gyp', fail_target, chdir=CHDIR, status=None,
+ stderr=None, match=IgnoreOutput)
+ test.built_file_must_not_exist(fail_target, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-xcode-support-actions.py b/third_party/python/gyp/test/mac/gyptest-xcode-support-actions.py
new file mode 100755
index 0000000000..ecc1402972
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-xcode-support-actions.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that support actions are properly created.
+"""
+
+import TestGyp
+
+import os
+import subprocess
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['xcode'])
+
+ CHDIR = 'xcode-support-actions'
+
+ test.run_gyp('test.gyp', '-Gsupport_target_suffix=_customsuffix', chdir=CHDIR)
+ test.build('test.gyp', target='target_customsuffix', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-xctest.py b/third_party/python/gyp/test/mac/gyptest-xctest.py
new file mode 100644
index 0000000000..fb478bb31c
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-xctest.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that xctest targets are correctly configured.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['xcode'])
+
+ # This test appears to be flaky.
+ test.skip_test() # bug=531
+
+ # Ignore this test if Xcode 5 is not installed
+ import subprocess
+ job = subprocess.Popen(['xcodebuild', '-version'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ out, err = job.communicate()
+ if job.returncode != 0:
+ raise Exception('Error %d running xcodebuild' % job.returncode)
+ xcode_version, build_number = out.splitlines()
+ # Convert the version string from 'Xcode 5.0' to ['5','0'].
+ xcode_version = xcode_version.split()[-1].split('.')
+ if xcode_version < ['5']:
+ test.pass_test()
+
+ CHDIR = 'xctest'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+ test.build('test.gyp', chdir=CHDIR, arguments=['-scheme', 'classes', 'test'])
+
+ test.built_file_must_match('tests.xctest/Contents/Resources/resource.txt',
+ 'foo\n', chdir=CHDIR)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/gyptest-xcuitest.py b/third_party/python/gyp/test/mac/gyptest-xcuitest.py
new file mode 100755
index 0000000000..410de297d0
--- /dev/null
+++ b/third_party/python/gyp/test/mac/gyptest-xcuitest.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that xcuitest targets are correctly configured.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+ test = TestGyp.TestGyp(formats=['xcode'])
+
+ # Ignore this test if Xcode 5 is not installed
+ import subprocess
+ job = subprocess.Popen(['xcodebuild', '-version'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ out, err = job.communicate()
+ if job.returncode != 0:
+ raise Exception('Error %d running xcodebuild' % job.returncode)
+ xcode_version, build_number = out.decode('utf-8').splitlines()
+ # Convert the version string from 'Xcode 5.0' to ['5','0'].
+ xcode_version = xcode_version.split()[-1].split('.')
+ if xcode_version < ['7']:
+ test.pass_test()
+
+ CHDIR = 'xcuitest'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+ test.build('test.gyp', chdir=CHDIR, arguments=[
+ '-target', 'tests',
+ '-sdk', 'iphonesimulator',
+ ])
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/mac/identical-name/proxy/proxy.cc b/third_party/python/gyp/test/mac/identical-name/proxy/proxy.cc
new file mode 100644
index 0000000000..8e1782da63
--- /dev/null
+++ b/third_party/python/gyp/test/mac/identical-name/proxy/proxy.cc
@@ -0,0 +1,2 @@
+// Empty file
+
diff --git a/third_party/python/gyp/test/mac/identical-name/proxy/proxy.gyp b/third_party/python/gyp/test/mac/identical-name/proxy/proxy.gyp
new file mode 100644
index 0000000000..38f44af1b5
--- /dev/null
+++ b/third_party/python/gyp/test/mac/identical-name/proxy/proxy.gyp
@@ -0,0 +1,9 @@
+{
+ 'includes': ['../test.gypi'],
+ 'targets': [{
+ 'target_name': 'testlib',
+ 'type': 'none',
+ 'dependencies': ['testlib/testlib.gyp:testlib'],
+ 'sources': ['proxy.cc'],
+ }],
+}
diff --git a/third_party/python/gyp/test/mac/identical-name/proxy/testlib/testlib.cc b/third_party/python/gyp/test/mac/identical-name/proxy/testlib/testlib.cc
new file mode 100644
index 0000000000..8e1782da63
--- /dev/null
+++ b/third_party/python/gyp/test/mac/identical-name/proxy/testlib/testlib.cc
@@ -0,0 +1,2 @@
+// Empty file
+
diff --git a/third_party/python/gyp/test/mac/identical-name/proxy/testlib/testlib.gyp b/third_party/python/gyp/test/mac/identical-name/proxy/testlib/testlib.gyp
new file mode 100644
index 0000000000..ed1c62e982
--- /dev/null
+++ b/third_party/python/gyp/test/mac/identical-name/proxy/testlib/testlib.gyp
@@ -0,0 +1,8 @@
+{
+ 'includes': ['../../test.gypi'],
+ 'targets': [{
+ 'target_name': 'testlib',
+ 'type': 'static_library',
+ 'sources': ['testlib.cc'],
+ }],
+}
diff --git a/third_party/python/gyp/test/mac/identical-name/test-should-fail.gyp b/third_party/python/gyp/test/mac/identical-name/test-should-fail.gyp
new file mode 100644
index 0000000000..72bfc7af0f
--- /dev/null
+++ b/third_party/python/gyp/test/mac/identical-name/test-should-fail.gyp
@@ -0,0 +1,10 @@
+{
+ 'targets': [{
+ 'target_name': 'test',
+ 'type': 'executable',
+ 'dependencies': [
+ 'testlib/testlib.gyp:proxy',
+ 'proxy/proxy.gyp:testlib',
+ ],
+ }],
+}
diff --git a/third_party/python/gyp/test/mac/identical-name/test.gyp b/third_party/python/gyp/test/mac/identical-name/test.gyp
new file mode 100644
index 0000000000..717220e866
--- /dev/null
+++ b/third_party/python/gyp/test/mac/identical-name/test.gyp
@@ -0,0 +1,11 @@
+{
+ 'includes': ['test.gypi'],
+ 'targets': [{
+ 'target_name': 'test',
+ 'type': 'executable',
+ 'dependencies': [
+ 'testlib/testlib.gyp:proxy',
+ 'proxy/proxy.gyp:testlib',
+ ],
+ }],
+} \ No newline at end of file
diff --git a/third_party/python/gyp/test/mac/identical-name/test.gypi b/third_party/python/gyp/test/mac/identical-name/test.gypi
new file mode 100644
index 0000000000..61b7c2badf
--- /dev/null
+++ b/third_party/python/gyp/test/mac/identical-name/test.gypi
@@ -0,0 +1,7 @@
+{
+ 'target_defaults': {
+ 'xcode_settings': {
+ 'SYMROOT': '<(DEPTH)/$SRCROOT/',
+ },
+ },
+}
diff --git a/third_party/python/gyp/test/mac/identical-name/testlib/main.cc b/third_party/python/gyp/test/mac/identical-name/testlib/main.cc
new file mode 100644
index 0000000000..5c2fa9bb6a
--- /dev/null
+++ b/third_party/python/gyp/test/mac/identical-name/testlib/main.cc
@@ -0,0 +1,3 @@
+int main(int argc, char **argv) {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/mac/identical-name/testlib/testlib.gyp b/third_party/python/gyp/test/mac/identical-name/testlib/testlib.gyp
new file mode 100644
index 0000000000..aa8b851004
--- /dev/null
+++ b/third_party/python/gyp/test/mac/identical-name/testlib/testlib.gyp
@@ -0,0 +1,14 @@
+{
+ 'includes': ['../test.gypi'],
+ 'targets': [{
+ 'target_name': 'proxy',
+ 'type': 'static_library',
+ 'sources': ['void.cc'],
+ 'dependencies': ['testlib'],
+ 'export_dependent_settings': ['testlib'],
+ }, {
+ 'target_name': 'testlib',
+ 'type': 'static_library',
+ 'sources': ['main.cc'],
+ }],
+}
diff --git a/third_party/python/gyp/test/mac/identical-name/testlib/void.cc b/third_party/python/gyp/test/mac/identical-name/testlib/void.cc
new file mode 100644
index 0000000000..8e1782da63
--- /dev/null
+++ b/third_party/python/gyp/test/mac/identical-name/testlib/void.cc
@@ -0,0 +1,2 @@
+// Empty file
+
diff --git a/third_party/python/gyp/test/mac/infoplist-process/Info.plist b/third_party/python/gyp/test/mac/infoplist-process/Info.plist
new file mode 100644
index 0000000000..cb65721f43
--- /dev/null
+++ b/third_party/python/gyp/test/mac/infoplist-process/Info.plist
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIconFile</key>
+ <string></string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.${PRODUCT_NAME}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>LSMinimumSystemVersion</key>
+ <string>${MACOSX_DEPLOYMENT_TARGET}</string>
+ <key>NSMainNibFile</key>
+ <string>MainMenu</string>
+ <key>NSPrincipalClass</key>
+ <string>NSApplication</string>
+ <key>ProcessedKey1</key>
+ <string>PROCESSED_KEY1</string>
+ <key>ProcessedKey2</key>
+ <string>PROCESSED_KEY2</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/infoplist-process/main.c b/third_party/python/gyp/test/mac/infoplist-process/main.c
new file mode 100644
index 0000000000..1bf4b2a11a
--- /dev/null
+++ b/third_party/python/gyp/test/mac/infoplist-process/main.c
@@ -0,0 +1,7 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/mac/infoplist-process/test1.gyp b/third_party/python/gyp/test/mac/infoplist-process/test1.gyp
new file mode 100644
index 0000000000..bc625a968b
--- /dev/null
+++ b/third_party/python/gyp/test/mac/infoplist-process/test1.gyp
@@ -0,0 +1,25 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'test_app',
+ 'product_name': 'Test',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'main.c',
+ ],
+ 'configurations': {
+ 'One': {
+ },
+ },
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'Info.plist',
+ 'INFOPLIST_PREPROCESS': 'YES',
+ 'INFOPLIST_PREPROCESSOR_DEFINITIONS': 'PROCESSED_KEY1=Foo PROCESSED_KEY2=Bar',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/infoplist-process/test2.gyp b/third_party/python/gyp/test/mac/infoplist-process/test2.gyp
new file mode 100644
index 0000000000..ecfbc9f64c
--- /dev/null
+++ b/third_party/python/gyp/test/mac/infoplist-process/test2.gyp
@@ -0,0 +1,25 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'test_app',
+ 'product_name': 'Test',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'main.c',
+ ],
+ 'configurations': {
+ 'Two': {
+ },
+ },
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'Info.plist',
+ 'INFOPLIST_PREPROCESS': 'YES',
+ 'INFOPLIST_PREPROCESSOR_DEFINITIONS': 'PROCESSED_KEY1="Foo (Bar)"',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/infoplist-process/test3.gyp b/third_party/python/gyp/test/mac/infoplist-process/test3.gyp
new file mode 100644
index 0000000000..be8fe75a53
--- /dev/null
+++ b/third_party/python/gyp/test/mac/infoplist-process/test3.gyp
@@ -0,0 +1,25 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'test_app',
+ 'product_name': 'Test App',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'main.c',
+ ],
+ 'configurations': {
+ 'Three': {
+ },
+ },
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'Info.plist',
+ 'INFOPLIST_PREPROCESS': 'NO',
+ 'INFOPLIST_PREPROCESSOR_DEFINITIONS': 'PROCESSED_KEY1=Foo',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/installname/Info.plist b/third_party/python/gyp/test/mac/installname/Info.plist
new file mode 100644
index 0000000000..5e05a5190c
--- /dev/null
+++ b/third_party/python/gyp/test/mac/installname/Info.plist
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIconFile</key>
+ <string></string>
+ <key>CFBundleIdentifier</key>
+ <string>com.yourcompany.${PRODUCT_NAME}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>FMWK</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>NSPrincipalClass</key>
+ <string></string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/installname/file.c b/third_party/python/gyp/test/mac/installname/file.c
new file mode 100644
index 0000000000..a39fce095f
--- /dev/null
+++ b/third_party/python/gyp/test/mac/installname/file.c
@@ -0,0 +1 @@
+int f() { return 0; }
diff --git a/third_party/python/gyp/test/mac/installname/main.c b/third_party/python/gyp/test/mac/installname/main.c
new file mode 100644
index 0000000000..237c8ce181
--- /dev/null
+++ b/third_party/python/gyp/test/mac/installname/main.c
@@ -0,0 +1 @@
+int main() {}
diff --git a/third_party/python/gyp/test/mac/installname/test.gyp b/third_party/python/gyp/test/mac/installname/test.gyp
new file mode 100644
index 0000000000..60c867ff12
--- /dev/null
+++ b/third_party/python/gyp/test/mac/installname/test.gyp
@@ -0,0 +1,93 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'default_installname',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c' ],
+ },
+ {
+ 'target_name': 'default_bundle_installname',
+ 'product_name': 'My Framework',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.c' ],
+ },
+ {
+ 'target_name': 'explicit_installname',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c' ],
+ 'xcode_settings': {
+ 'LD_DYLIB_INSTALL_NAME': 'Trapped in a dynamiclib factory',
+ },
+ },
+ {
+ 'target_name': 'explicit_installname_base',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c' ],
+ 'xcode_settings': {
+ 'DYLIB_INSTALL_NAME_BASE': '@executable_path/../../..',
+
+ },
+ },
+ {
+ 'target_name': 'explicit_installname_base_bundle',
+ 'product_name': 'My Other Framework',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.c' ],
+ 'xcode_settings': {
+ 'DYLIB_INSTALL_NAME_BASE': '@executable_path/../../..',
+
+ },
+ },
+ {
+ 'target_name': 'both_base_and_installname',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c' ],
+ 'xcode_settings': {
+ # LD_DYLIB_INSTALL_NAME wins.
+ 'LD_DYLIB_INSTALL_NAME': 'Still trapped in a dynamiclib factory',
+ 'DYLIB_INSTALL_NAME_BASE': '@executable_path/../../..',
+ },
+ },
+ {
+ 'target_name': 'explicit_installname_with_base',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c' ],
+ 'xcode_settings': {
+ 'LD_DYLIB_INSTALL_NAME': '$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)',
+ },
+ },
+ {
+ 'target_name': 'explicit_installname_with_explicit_base',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c' ],
+ 'xcode_settings': {
+ 'DYLIB_INSTALL_NAME_BASE': '@executable_path/..',
+ 'LD_DYLIB_INSTALL_NAME': '$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)',
+ },
+ },
+ {
+ 'target_name': 'executable',
+ 'type': 'executable',
+ 'sources': [ 'main.c' ],
+ 'xcode_settings': {
+ 'LD_DYLIB_INSTALL_NAME': 'Should be ignored for not shared_lib',
+ },
+ },
+ # Regression test for http://crbug.com/113918
+ {
+ 'target_name': 'install_name_with_info_plist',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.c' ],
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'Info.plist',
+ 'LD_DYLIB_INSTALL_NAME': '$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/kext/GypKext/GypKext-Info.plist b/third_party/python/gyp/test/mac/kext/GypKext/GypKext-Info.plist
new file mode 100644
index 0000000000..84226099c1
--- /dev/null
+++ b/third_party/python/gyp/test/mac/kext/GypKext/GypKext-Info.plist
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>BuildMachineOSBuild</key>
+ <string>Doesn't matter, will be overwritten</string>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIconFile</key>
+ <string></string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.${PRODUCT_NAME:rfc1034identifier}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>KEXT</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>ause</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>LSMinimumSystemVersion</key>
+ <string>${MACOSX_DEPLOYMENT_TARGET}</string>
+ <key>OSBundleLibraries</key>
+ <dict>
+ <key>com.apple.kpi.libkern</key>
+ <string>10.0</string>
+ </dict>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/kext/GypKext/GypKext.c b/third_party/python/gyp/test/mac/kext/GypKext/GypKext.c
new file mode 100644
index 0000000000..9b611b0dc5
--- /dev/null
+++ b/third_party/python/gyp/test/mac/kext/GypKext/GypKext.c
@@ -0,0 +1,16 @@
+// Copyright (c) 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <sys/systm.h>
+#include <mach/mach_types.h>
+
+kern_return_t GypKext_start(kmod_info_t* ki, void* d) {
+ printf("GypKext has started.\n");
+ return KERN_SUCCESS;
+}
+
+kern_return_t GypKext_stop(kmod_info_t* ki, void* d) {
+ printf("GypKext has stopped.\n");
+ return KERN_SUCCESS;
+}
diff --git a/third_party/python/gyp/test/mac/kext/kext.gyp b/third_party/python/gyp/test/mac/kext/kext.gyp
new file mode 100644
index 0000000000..5b93087543
--- /dev/null
+++ b/third_party/python/gyp/test/mac/kext/kext.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'gypkext',
+ 'product_name': 'GypKext',
+ 'type': 'mac_kernel_extension',
+ 'sources': [
+ 'GypKext/GypKext.c',
+ ],
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'GypKext/GypKext-Info.plist',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/ldflags-libtool/file.c b/third_party/python/gyp/test/mac/ldflags-libtool/file.c
new file mode 100644
index 0000000000..56757a701b
--- /dev/null
+++ b/third_party/python/gyp/test/mac/ldflags-libtool/file.c
@@ -0,0 +1 @@
+void f() {}
diff --git a/third_party/python/gyp/test/mac/ldflags-libtool/test.gyp b/third_party/python/gyp/test/mac/ldflags-libtool/test.gyp
new file mode 100644
index 0000000000..4e7aa07106
--- /dev/null
+++ b/third_party/python/gyp/test/mac/ldflags-libtool/test.gyp
@@ -0,0 +1,17 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'ldflags_passed_to_libtool',
+ 'type': 'static_library',
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-fblorfen-horf-does-not-exist',
+ ],
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/ldflags/subdirectory/Info.plist b/third_party/python/gyp/test/mac/ldflags/subdirectory/Info.plist
new file mode 100644
index 0000000000..5f5e9abfbb
--- /dev/null
+++ b/third_party/python/gyp/test/mac/ldflags/subdirectory/Info.plist
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/ldflags/subdirectory/file.c b/third_party/python/gyp/test/mac/ldflags/subdirectory/file.c
new file mode 100644
index 0000000000..90c45543bf
--- /dev/null
+++ b/third_party/python/gyp/test/mac/ldflags/subdirectory/file.c
@@ -0,0 +1,2 @@
+void f() {}
+void g() {}
diff --git a/third_party/python/gyp/test/mac/ldflags/subdirectory/symbol_list.def b/third_party/python/gyp/test/mac/ldflags/subdirectory/symbol_list.def
new file mode 100644
index 0000000000..0ab7543b1f
--- /dev/null
+++ b/third_party/python/gyp/test/mac/ldflags/subdirectory/symbol_list.def
@@ -0,0 +1 @@
+_f
diff --git a/third_party/python/gyp/test/mac/ldflags/subdirectory/test.gyp b/third_party/python/gyp/test/mac/ldflags/subdirectory/test.gyp
new file mode 100644
index 0000000000..db00c7465c
--- /dev/null
+++ b/third_party/python/gyp/test/mac/ldflags/subdirectory/test.gyp
@@ -0,0 +1,66 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'raw',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-exported_symbols_list symbol_list.def',
+ '-sectcreate __TEXT __info_plist Info.plist',
+ ],
+ },
+ },
+ # TODO(thakis): This form should ideally be supported, too. (But
+ # -Wlfoo,bar,baz is cleaner so people should use that anyway.)
+ #{
+ # 'target_name': 'raw_sep',
+ # 'type': 'shared_library',
+ # 'sources': [ 'file.c', ],
+ # 'xcode_settings': {
+ # 'OTHER_LDFLAGS': [
+ # '-exported_symbols_list', 'symbol_list.def',
+ # '-sectcreate', '__TEXT', '__info_plist', 'Info.plist',
+ # ],
+ # },
+ #},
+ {
+ 'target_name': 'wl_space',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ # Works because clang passes unknown files on to the linker.
+ '-Wl,-exported_symbols_list symbol_list.def',
+ ],
+ },
+ },
+ # TODO(thakis): This form should ideally be supported, too. (But
+ # -Wlfoo,bar,baz is cleaner so people should use that anyway.)
+ #{
+ # 'target_name': 'wl_space_sep',
+ # 'type': 'shared_library',
+ # 'sources': [ 'file.c', ],
+ # 'xcode_settings': {
+ # 'OTHER_LDFLAGS': [
+ # # Works because clang passes unknown files on to the linker.
+ # '-Wl,-exported_symbols_list', 'symbol_list.def',
+ # ],
+ # },
+ #},
+ {
+ 'target_name': 'wl_comma',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-Wl,-exported_symbols_list,symbol_list.def',
+ '-Wl,-sectcreate,__TEXT,__info_plist,Info.plist',
+ ],
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/libraries/subdir/README.txt b/third_party/python/gyp/test/mac/libraries/subdir/README.txt
new file mode 100644
index 0000000000..4031ded85f
--- /dev/null
+++ b/third_party/python/gyp/test/mac/libraries/subdir/README.txt
@@ -0,0 +1 @@
+Make things live in a subdirectory, to make sure that DEPTH works correctly.
diff --git a/third_party/python/gyp/test/mac/libraries/subdir/hello.cc b/third_party/python/gyp/test/mac/libraries/subdir/hello.cc
new file mode 100644
index 0000000000..a43554c8ca
--- /dev/null
+++ b/third_party/python/gyp/test/mac/libraries/subdir/hello.cc
@@ -0,0 +1,10 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <iostream>
+
+int main() {
+ std::cout << "Hello, world!" << std::endl;
+ return 0;
+}
diff --git a/third_party/python/gyp/test/mac/libraries/subdir/mylib.c b/third_party/python/gyp/test/mac/libraries/subdir/mylib.c
new file mode 100644
index 0000000000..e771991e83
--- /dev/null
+++ b/third_party/python/gyp/test/mac/libraries/subdir/mylib.c
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int my_foo(int x) {
+ return x + 1;
+}
diff --git a/third_party/python/gyp/test/mac/libraries/subdir/test.gyp b/third_party/python/gyp/test/mac/libraries/subdir/test.gyp
new file mode 100644
index 0000000000..59fef51017
--- /dev/null
+++ b/third_party/python/gyp/test/mac/libraries/subdir/test.gyp
@@ -0,0 +1,65 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'libraries-test',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.cc',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ 'libcrypto.dylib',
+ ],
+ },
+ },
+ {
+ # This creates a static library and puts it in a nonstandard location for
+ # libraries-search-path-test.
+ 'target_name': 'mylib',
+ 'type': 'static_library',
+ 'sources': [
+ 'mylib.c',
+ ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'Make a secret location',
+ 'action': [
+ 'mkdir',
+ '-p',
+ '${SRCROOT}/../secret_location',
+ ],
+ },
+ {
+ 'postbuild_name': 'Copy to secret location, with secret name',
+ 'action': [
+ 'cp',
+ '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}',
+ '${SRCROOT}/../secret_location/libmysecretlib.a',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'libraries-search-path-test',
+ 'type': 'executable',
+ 'dependencies': [ 'mylib' ],
+ 'sources': [
+ 'hello.cc',
+ ],
+ 'xcode_settings': {
+ 'LIBRARY_SEARCH_PATHS': [
+ '<(DEPTH)/secret_location',
+ ],
+ },
+ 'link_settings': {
+ 'libraries': [
+ 'libmysecretlib.a',
+ ],
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/libtool-zero/mylib.c b/third_party/python/gyp/test/mac/libtool-zero/mylib.c
new file mode 100644
index 0000000000..b26d61bd6b
--- /dev/null
+++ b/third_party/python/gyp/test/mac/libtool-zero/mylib.c
@@ -0,0 +1,7 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int my_foo(int x) {
+ return x + 1;
+}
diff --git a/third_party/python/gyp/test/mac/libtool-zero/test.gyp b/third_party/python/gyp/test/mac/libtool-zero/test.gyp
new file mode 100644
index 0000000000..0d6ee5535e
--- /dev/null
+++ b/third_party/python/gyp/test/mac/libtool-zero/test.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'mylib',
+ 'type': 'static_library',
+ 'sources': [
+ 'mylib.c',
+ ],
+ 'xcode_settings': {
+ 'ARCHS': [ 'i386', 'x86_64' ],
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/loadable-module-bundle-product-extension/src.cc b/third_party/python/gyp/test/mac/loadable-module-bundle-product-extension/src.cc
new file mode 100644
index 0000000000..3d878e9697
--- /dev/null
+++ b/third_party/python/gyp/test/mac/loadable-module-bundle-product-extension/src.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int test() {
+ return 1337;
+}
diff --git a/third_party/python/gyp/test/mac/loadable-module-bundle-product-extension/test.gyp b/third_party/python/gyp/test/mac/loadable-module-bundle-product-extension/test.gyp
new file mode 100644
index 0000000000..684a2c02aa
--- /dev/null
+++ b/third_party/python/gyp/test/mac/loadable-module-bundle-product-extension/test.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [{
+ 'target_name': 'test',
+ 'type': 'none',
+ 'dependencies': ['child_one', 'child_two'],
+ }, {
+ 'target_name': 'child_one',
+ 'product_name': 'Collide',
+ 'product_extension': 'bar',
+ 'sources': ['src.cc'],
+ 'type': 'loadable_module',
+ 'mac_bundle': 1,
+ }, {
+ 'target_name': 'child_two',
+ 'product_name': 'Collide',
+ 'product_extension': 'foo',
+ 'sources': ['src.cc'],
+ 'type': 'loadable_module',
+ 'mac_bundle': 1,
+ }],
+}
diff --git a/third_party/python/gyp/test/mac/loadable-module/Info.plist b/third_party/python/gyp/test/mac/loadable-module/Info.plist
new file mode 100644
index 0000000000..f6607aebd9
--- /dev/null
+++ b/third_party/python/gyp/test/mac/loadable-module/Info.plist
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.test_loadable_module</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>BRPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+ <key>CFPlugInDynamicRegisterFunction</key>
+ <string></string>
+ <key>CFPlugInDynamicRegistration</key>
+ <string>NO</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/loadable-module/module.c b/third_party/python/gyp/test/mac/loadable-module/module.c
new file mode 100644
index 0000000000..9584538347
--- /dev/null
+++ b/third_party/python/gyp/test/mac/loadable-module/module.c
@@ -0,0 +1,11 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int SuperFly() {
+ return 42;
+}
+
+const char* SuperFoo() {
+ return "Hello World";
+}
diff --git a/third_party/python/gyp/test/mac/loadable-module/test.gyp b/third_party/python/gyp/test/mac/loadable-module/test.gyp
new file mode 100644
index 0000000000..3c8a5309d2
--- /dev/null
+++ b/third_party/python/gyp/test/mac/loadable-module/test.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_loadable_module',
+ 'type': 'loadable_module',
+ 'mac_bundle': 1,
+ 'sources': [ 'module.c' ],
+ 'product_extension': 'plugin',
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'Info.plist',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/lto/asmfile.S b/third_party/python/gyp/test/mac/lto/asmfile.S
new file mode 100644
index 0000000000..ea23759a39
--- /dev/null
+++ b/third_party/python/gyp/test/mac/lto/asmfile.S
@@ -0,0 +1,2 @@
+.globl _asfun
+ret
diff --git a/third_party/python/gyp/test/mac/lto/ccfile.cc b/third_party/python/gyp/test/mac/lto/ccfile.cc
new file mode 100644
index 0000000000..2503afd7b1
--- /dev/null
+++ b/third_party/python/gyp/test/mac/lto/ccfile.cc
@@ -0,0 +1 @@
+void ccfun() {}
diff --git a/third_party/python/gyp/test/mac/lto/cfile.c b/third_party/python/gyp/test/mac/lto/cfile.c
new file mode 100644
index 0000000000..d02ef4b8d6
--- /dev/null
+++ b/third_party/python/gyp/test/mac/lto/cfile.c
@@ -0,0 +1 @@
+void cfun() {}
diff --git a/third_party/python/gyp/test/mac/lto/mfile.m b/third_party/python/gyp/test/mac/lto/mfile.m
new file mode 100644
index 0000000000..85b7d93afe
--- /dev/null
+++ b/third_party/python/gyp/test/mac/lto/mfile.m
@@ -0,0 +1 @@
+void mfun() {}
diff --git a/third_party/python/gyp/test/mac/lto/mmfile.mm b/third_party/python/gyp/test/mac/lto/mmfile.mm
new file mode 100644
index 0000000000..beaa3595f8
--- /dev/null
+++ b/third_party/python/gyp/test/mac/lto/mmfile.mm
@@ -0,0 +1 @@
+void mmfun() {}
diff --git a/third_party/python/gyp/test/mac/lto/test.gyp b/third_party/python/gyp/test/mac/lto/test.gyp
new file mode 100644
index 0000000000..0a8e85183d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/lto/test.gyp
@@ -0,0 +1,35 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'lto',
+ 'type': 'shared_library',
+ 'sources': [
+ 'cfile.c',
+ 'mfile.m',
+ 'ccfile.cc',
+ 'mmfile.mm',
+ 'asmfile.S',
+ ],
+ 'xcode_settings': {
+ 'LLVM_LTO': 'YES',
+ },
+ },
+ {
+ 'target_name': 'lto_static',
+ 'type': 'static_library',
+ 'sources': [
+ 'cfile.c',
+ 'mfile.m',
+ 'ccfile.cc',
+ 'mmfile.mm',
+ 'asmfile.S',
+ ],
+ 'xcode_settings': {
+ 'LLVM_LTO': 'YES',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/missing-cfbundlesignature/Info.plist b/third_party/python/gyp/test/mac/missing-cfbundlesignature/Info.plist
new file mode 100644
index 0000000000..0c31674884
--- /dev/null
+++ b/third_party/python/gyp/test/mac/missing-cfbundlesignature/Info.plist
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/missing-cfbundlesignature/Other-Info.plist b/third_party/python/gyp/test/mac/missing-cfbundlesignature/Other-Info.plist
new file mode 100644
index 0000000000..47095281c8
--- /dev/null
+++ b/third_party/python/gyp/test/mac/missing-cfbundlesignature/Other-Info.plist
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleSignature</key>
+ <string>F</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/missing-cfbundlesignature/Third-Info.plist b/third_party/python/gyp/test/mac/missing-cfbundlesignature/Third-Info.plist
new file mode 100644
index 0000000000..5b61fe2664
--- /dev/null
+++ b/third_party/python/gyp/test/mac/missing-cfbundlesignature/Third-Info.plist
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleSignature</key>
+ <string>some really long string</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/missing-cfbundlesignature/file.c b/third_party/python/gyp/test/mac/missing-cfbundlesignature/file.c
new file mode 100644
index 0000000000..237c8ce181
--- /dev/null
+++ b/third_party/python/gyp/test/mac/missing-cfbundlesignature/file.c
@@ -0,0 +1 @@
+int main() {}
diff --git a/third_party/python/gyp/test/mac/missing-cfbundlesignature/test.gyp b/third_party/python/gyp/test/mac/missing-cfbundlesignature/test.gyp
new file mode 100644
index 0000000000..b50cc2791a
--- /dev/null
+++ b/third_party/python/gyp/test/mac/missing-cfbundlesignature/test.gyp
@@ -0,0 +1,34 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'mytarget',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'Info.plist',
+ },
+ },
+ {
+ 'target_name': 'myothertarget',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'Other-Info.plist',
+ },
+ },
+ {
+ 'target_name': 'thirdtarget',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'Third-Info.plist',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/non-strs-flattened-to-env/Info.plist b/third_party/python/gyp/test/mac/non-strs-flattened-to-env/Info.plist
new file mode 100644
index 0000000000..11fc4b660d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/non-strs-flattened-to-env/Info.plist
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <!-- Not a valid plist file since it's missing so much. That's fine. -->
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>My Variable</key>
+ <string>${MY_VAR}</string>
+ <key>CFlags</key>
+ <string>${OTHER_CFLAGS}</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/non-strs-flattened-to-env/main.c b/third_party/python/gyp/test/mac/non-strs-flattened-to-env/main.c
new file mode 100644
index 0000000000..1711567ef5
--- /dev/null
+++ b/third_party/python/gyp/test/mac/non-strs-flattened-to-env/main.c
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/mac/non-strs-flattened-to-env/test.gyp b/third_party/python/gyp/test/mac/non-strs-flattened-to-env/test.gyp
new file mode 100644
index 0000000000..aaf821c925
--- /dev/null
+++ b/third_party/python/gyp/test/mac/non-strs-flattened-to-env/test.gyp
@@ -0,0 +1,27 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'test_app',
+ 'product_name': 'Test',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [ 'main.c', ],
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'Info.plist',
+ 'MY_VAR': 'some expansion',
+ 'OTHER_CFLAGS': [
+ # Just some (more than one) random flags.
+ '-fstack-protector-all',
+ '-fno-strict-aliasing',
+ '-DS="A Space"', # Would normally be in 'defines'
+ ],
+ },
+ 'include_dirs': [
+ '$(SDKROOT)/usr/include/libxml2',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/objc-arc/c-file.c b/third_party/python/gyp/test/mac/objc-arc/c-file.c
new file mode 100644
index 0000000000..653613206e
--- /dev/null
+++ b/third_party/python/gyp/test/mac/objc-arc/c-file.c
@@ -0,0 +1,6 @@
+#if __has_feature(objc_arc)
+#error "C files shouldn't be ARC'd!"
+#endif
+
+void c_fun() {}
+
diff --git a/third_party/python/gyp/test/mac/objc-arc/cc-file.cc b/third_party/python/gyp/test/mac/objc-arc/cc-file.cc
new file mode 100644
index 0000000000..95e14ea3e0
--- /dev/null
+++ b/third_party/python/gyp/test/mac/objc-arc/cc-file.cc
@@ -0,0 +1,5 @@
+#if __has_feature(objc_arc)
+#error "C++ files shouldn't be ARC'd!"
+#endif
+
+void cc_fun() {}
diff --git a/third_party/python/gyp/test/mac/objc-arc/m-file-no-arc.m b/third_party/python/gyp/test/mac/objc-arc/m-file-no-arc.m
new file mode 100644
index 0000000000..8ffaabfaae
--- /dev/null
+++ b/third_party/python/gyp/test/mac/objc-arc/m-file-no-arc.m
@@ -0,0 +1,5 @@
+#if __has_feature(objc_arc)
+#error "ObjC files without CLANG_ENABLE_OBJC_ARC should not be ARC'd!"
+#endif
+
+void m_fun() {}
diff --git a/third_party/python/gyp/test/mac/objc-arc/m-file.m b/third_party/python/gyp/test/mac/objc-arc/m-file.m
new file mode 100644
index 0000000000..9689b1f8e5
--- /dev/null
+++ b/third_party/python/gyp/test/mac/objc-arc/m-file.m
@@ -0,0 +1,5 @@
+#if !__has_feature(objc_arc)
+#error "ObjC files with CLANG_ENABLE_OBJC_ARC should be ARC'd!"
+#endif
+
+void m_fun() {}
diff --git a/third_party/python/gyp/test/mac/objc-arc/mm-file-no-arc.mm b/third_party/python/gyp/test/mac/objc-arc/mm-file-no-arc.mm
new file mode 100644
index 0000000000..0dac539494
--- /dev/null
+++ b/third_party/python/gyp/test/mac/objc-arc/mm-file-no-arc.mm
@@ -0,0 +1,5 @@
+#if __has_feature(objc_arc)
+#error "ObjC++ files without CLANG_ENABLE_OBJC_ARC should not be ARC'd!"
+#endif
+
+void mm_fun() {}
diff --git a/third_party/python/gyp/test/mac/objc-arc/mm-file.mm b/third_party/python/gyp/test/mac/objc-arc/mm-file.mm
new file mode 100644
index 0000000000..9467e96563
--- /dev/null
+++ b/third_party/python/gyp/test/mac/objc-arc/mm-file.mm
@@ -0,0 +1,5 @@
+#if !__has_feature(objc_arc)
+#error "ObjC++ files with CLANG_ENABLE_OBJC_ARC should be ARC'd!"
+#endif
+
+void mm_fun() {}
diff --git a/third_party/python/gyp/test/mac/objc-arc/test.gyp b/third_party/python/gyp/test/mac/objc-arc/test.gyp
new file mode 100644
index 0000000000..59cf0e29ce
--- /dev/null
+++ b/third_party/python/gyp/test/mac/objc-arc/test.gyp
@@ -0,0 +1,45 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'make_global_settings': [
+ ['CC', '/usr/bin/clang'],
+ ['CXX', '/usr/bin/clang++'],
+ ],
+
+ 'targets': [
+ {
+ 'target_name': 'arc_enabled',
+ 'type': 'static_library',
+ 'sources': [
+ 'c-file.c',
+ 'cc-file.cc',
+ 'm-file.m',
+ 'mm-file.mm',
+ ],
+ 'xcode_settings': {
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'MACOSX_DEPLOYMENT_TARGET': '10.6',
+ 'ARCHS': [ 'x86_64' ], # For the non-fragile objc ABI.
+ 'CLANG_ENABLE_OBJC_ARC': 'YES',
+ },
+ },
+
+ {
+ 'target_name': 'arc_disabled',
+ 'type': 'static_library',
+ 'sources': [
+ 'c-file.c',
+ 'cc-file.cc',
+ 'm-file-no-arc.m',
+ 'mm-file-no-arc.mm',
+ ],
+ 'xcode_settings': {
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'MACOSX_DEPLOYMENT_TARGET': '10.6',
+ 'ARCHS': [ 'x86_64' ], # For the non-fragile objc ABI.
+ },
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/mac/objc-gc/c-file.c b/third_party/python/gyp/test/mac/objc-gc/c-file.c
new file mode 100644
index 0000000000..2855a00eaa
--- /dev/null
+++ b/third_party/python/gyp/test/mac/objc-gc/c-file.c
@@ -0,0 +1 @@
+void c_fun() {}
diff --git a/third_party/python/gyp/test/mac/objc-gc/cc-file.cc b/third_party/python/gyp/test/mac/objc-gc/cc-file.cc
new file mode 100644
index 0000000000..71e47a0126
--- /dev/null
+++ b/third_party/python/gyp/test/mac/objc-gc/cc-file.cc
@@ -0,0 +1 @@
+void cc_fun() {}
diff --git a/third_party/python/gyp/test/mac/objc-gc/main.m b/third_party/python/gyp/test/mac/objc-gc/main.m
new file mode 100644
index 0000000000..1a87f8e70f
--- /dev/null
+++ b/third_party/python/gyp/test/mac/objc-gc/main.m
@@ -0,0 +1,6 @@
+#import <Foundation/Foundation.h>
+
+int main() {
+ printf("gc on: %d\n", [NSGarbageCollector defaultCollector] != NULL);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/mac/objc-gc/needs-gc-mm.mm b/third_party/python/gyp/test/mac/objc-gc/needs-gc-mm.mm
new file mode 100644
index 0000000000..fc3fee9f34
--- /dev/null
+++ b/third_party/python/gyp/test/mac/objc-gc/needs-gc-mm.mm
@@ -0,0 +1 @@
+void objcpp_fun() { }
diff --git a/third_party/python/gyp/test/mac/objc-gc/needs-gc.m b/third_party/python/gyp/test/mac/objc-gc/needs-gc.m
new file mode 100644
index 0000000000..ca77976b1d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/objc-gc/needs-gc.m
@@ -0,0 +1 @@
+void objc_fun() { }
diff --git a/third_party/python/gyp/test/mac/objc-gc/test.gyp b/third_party/python/gyp/test/mac/objc-gc/test.gyp
new file mode 100644
index 0000000000..4d827c1b39
--- /dev/null
+++ b/third_party/python/gyp/test/mac/objc-gc/test.gyp
@@ -0,0 +1,102 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ # For some reason, static_library targets that are built with gc=required
+ # and then linked to executables that don't use gc, the linker doesn't
+ # complain. For shared_libraries it does, so use that.
+ {
+ 'target_name': 'no_gc_lib',
+ 'type': 'shared_library',
+ 'sources': [
+ 'c-file.c',
+ 'cc-file.cc',
+ 'needs-gc-mm.mm',
+ 'needs-gc.m',
+ ],
+ },
+ {
+ 'target_name': 'gc_lib',
+ 'type': 'shared_library',
+ 'sources': [
+ 'c-file.c',
+ 'cc-file.cc',
+ 'needs-gc-mm.mm',
+ 'needs-gc.m',
+ ],
+ 'xcode_settings': {
+ 'GCC_ENABLE_OBJC_GC': 'supported',
+ },
+ },
+ {
+ 'target_name': 'gc_req_lib',
+ 'type': 'shared_library',
+ 'sources': [
+ 'c-file.c',
+ 'cc-file.cc',
+ 'needs-gc-mm.mm',
+ 'needs-gc.m',
+ ],
+ 'xcode_settings': {
+ 'GCC_ENABLE_OBJC_GC': 'required',
+ },
+ },
+
+ {
+ 'target_name': 'gc_exe_fails',
+ 'type': 'executable',
+ 'sources': [ 'main.m' ],
+ 'dependencies': [ 'no_gc_lib' ],
+ 'xcode_settings': {
+ 'GCC_ENABLE_OBJC_GC': 'required',
+ },
+ 'libraries': [ 'Foundation.framework' ],
+ },
+ {
+ 'target_name': 'gc_req_exe',
+ 'type': 'executable',
+ 'sources': [ 'main.m' ],
+ 'dependencies': [ 'gc_lib' ],
+ 'xcode_settings': {
+ 'GCC_ENABLE_OBJC_GC': 'required',
+ },
+ 'libraries': [ 'Foundation.framework' ],
+ },
+ {
+ 'target_name': 'gc_exe_req_lib',
+ 'type': 'executable',
+ 'sources': [ 'main.m' ],
+ 'dependencies': [ 'gc_req_lib' ],
+ 'xcode_settings': {
+ 'GCC_ENABLE_OBJC_GC': 'supported',
+ },
+ 'libraries': [ 'Foundation.framework' ],
+ },
+ {
+ 'target_name': 'gc_exe',
+ 'type': 'executable',
+ 'sources': [ 'main.m' ],
+ 'dependencies': [ 'gc_lib' ],
+ 'xcode_settings': {
+ 'GCC_ENABLE_OBJC_GC': 'supported',
+ },
+ 'libraries': [ 'Foundation.framework' ],
+ },
+ {
+ 'target_name': 'gc_off_exe_req_lib',
+ 'type': 'executable',
+ 'sources': [ 'main.m' ],
+ 'dependencies': [ 'gc_req_lib' ],
+ 'libraries': [ 'Foundation.framework' ],
+ },
+ {
+ 'target_name': 'gc_off_exe',
+ 'type': 'executable',
+ 'sources': [ 'main.m' ],
+ 'dependencies': [ 'gc_lib' ],
+ 'libraries': [ 'Foundation.framework' ],
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/mac/postbuild-copy-bundle/Framework-Info.plist b/third_party/python/gyp/test/mac/postbuild-copy-bundle/Framework-Info.plist
new file mode 100644
index 0000000000..ec36829c08
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-copy-bundle/Framework-Info.plist
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIconFile</key>
+ <string></string>
+ <key>CFBundleIdentifier</key>
+ <string>com.yourcompany.${PRODUCT_NAME}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>FMWK</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>NSPrincipalClass</key>
+ <string></string>
+ <key>RandomKey</key>
+ <string>RandomValue</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/postbuild-copy-bundle/TestApp-Info.plist b/third_party/python/gyp/test/mac/postbuild-copy-bundle/TestApp-Info.plist
new file mode 100644
index 0000000000..98fd515200
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-copy-bundle/TestApp-Info.plist
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIconFile</key>
+ <string></string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.${PRODUCT_NAME}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>LSMinimumSystemVersion</key>
+ <string>${MACOSX_DEPLOYMENT_TARGET}</string>
+ <key>NSMainNibFile</key>
+ <string>MainMenu</string>
+ <key>NSPrincipalClass</key>
+ <string>NSApplication</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/postbuild-copy-bundle/copied.txt b/third_party/python/gyp/test/mac/postbuild-copy-bundle/copied.txt
new file mode 100644
index 0000000000..178413886a
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-copy-bundle/copied.txt
@@ -0,0 +1 @@
+old copied file
diff --git a/third_party/python/gyp/test/mac/postbuild-copy-bundle/empty.c b/third_party/python/gyp/test/mac/postbuild-copy-bundle/empty.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-copy-bundle/empty.c
diff --git a/third_party/python/gyp/test/mac/postbuild-copy-bundle/main.c b/third_party/python/gyp/test/mac/postbuild-copy-bundle/main.c
new file mode 100644
index 0000000000..21c1963526
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-copy-bundle/main.c
@@ -0,0 +1,4 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+int main() {}
diff --git a/third_party/python/gyp/test/mac/postbuild-copy-bundle/postbuild-copy-framework.sh b/third_party/python/gyp/test/mac/postbuild-copy-bundle/postbuild-copy-framework.sh
new file mode 100755
index 0000000000..930fec6612
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-copy-bundle/postbuild-copy-framework.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+rsync -acC --delete "$1" "$2"
diff --git a/third_party/python/gyp/test/mac/postbuild-copy-bundle/resource_file.sb b/third_party/python/gyp/test/mac/postbuild-copy-bundle/resource_file.sb
new file mode 100644
index 0000000000..42057fa235
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-copy-bundle/resource_file.sb
@@ -0,0 +1 @@
+This is included in the framework bundle.
diff --git a/third_party/python/gyp/test/mac/postbuild-copy-bundle/test.gyp b/third_party/python/gyp/test/mac/postbuild-copy-bundle/test.gyp
new file mode 100644
index 0000000000..a03e6432f2
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-copy-bundle/test.gyp
@@ -0,0 +1,49 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'test_bundle',
+ 'product_name': 'My Framework',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'empty.c', ],
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'Framework-Info.plist',
+ },
+ 'mac_bundle_resources': [
+ 'resource_file.sb',
+ ],
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/$(CONTENTS_FOLDER_PATH)/Libraries',
+ 'files': [ 'copied.txt' ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'test_app',
+ 'product_name': 'Test App',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'dependencies': [
+ 'test_bundle',
+ ],
+ 'sources': [ 'main.c', ],
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'TestApp-Info.plist',
+ },
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'Copy dependent framework into app',
+ 'action': [
+ './postbuild-copy-framework.sh',
+ '${BUILT_PRODUCTS_DIR}/My Framework.framework',
+ '${BUILT_PRODUCTS_DIR}/${CONTENTS_FOLDER_PATH}/',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/postbuild-defaults/Info.plist b/third_party/python/gyp/test/mac/postbuild-defaults/Info.plist
new file mode 100644
index 0000000000..d3f54d76cd
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-defaults/Info.plist
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <!-- Not a valid plist file since it's missing so much. That's fine. -->
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/postbuild-defaults/main.c b/third_party/python/gyp/test/mac/postbuild-defaults/main.c
new file mode 100644
index 0000000000..1711567ef5
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-defaults/main.c
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/mac/postbuild-defaults/postbuild-defaults.sh b/third_party/python/gyp/test/mac/postbuild-defaults/postbuild-defaults.sh
new file mode 100755
index 0000000000..56af2a8329
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-defaults/postbuild-defaults.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+# This is the built Info.plist in the output directory.
+PLIST="${BUILT_PRODUCTS_DIR}"/Test.app/Contents/Info # No trailing .plist
+echo $(defaults read "${PLIST}" "CFBundleName") > "${BUILT_PRODUCTS_DIR}/result"
+
+# This is the source Info.plist next to this script file.
+PLIST="${SRCROOT}"/Info # No trailing .plist
+echo $(defaults read "${PLIST}" "CFBundleName") \
+ >> "${BUILT_PRODUCTS_DIR}/result"
diff --git a/third_party/python/gyp/test/mac/postbuild-defaults/test.gyp b/third_party/python/gyp/test/mac/postbuild-defaults/test.gyp
new file mode 100644
index 0000000000..be0a075efc
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-defaults/test.gyp
@@ -0,0 +1,26 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'test_app',
+ 'product_name': 'Test',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [ 'main.c', ],
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'Info.plist',
+ },
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'Postbuild that calls defaults',
+ 'action': [
+ './postbuild-defaults.sh',
+ '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/postbuild-fail/file.c b/third_party/python/gyp/test/mac/postbuild-fail/file.c
new file mode 100644
index 0000000000..91695b10c6
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-fail/file.c
@@ -0,0 +1,6 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// That's right, this is copyrighted.
+void f() {}
diff --git a/third_party/python/gyp/test/mac/postbuild-fail/postbuild-fail.sh b/third_party/python/gyp/test/mac/postbuild-fail/postbuild-fail.sh
new file mode 100755
index 0000000000..dc1a60d987
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-fail/postbuild-fail.sh
@@ -0,0 +1,6 @@
+#!/usr/bin/bash
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+exit 1
diff --git a/third_party/python/gyp/test/mac/postbuild-fail/test.gyp b/third_party/python/gyp/test/mac/postbuild-fail/test.gyp
new file mode 100644
index 0000000000..e63283db03
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-fail/test.gyp
@@ -0,0 +1,38 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'nonbundle',
+ 'type': 'static_library',
+ 'sources': [ 'file.c', ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'Postbuild Fail',
+ 'action': [ './postbuild-fail.sh', ],
+ },
+ {
+ 'postbuild_name': 'Runs after failing postbuild',
+ 'action': [ './touch-static.sh', ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'bundle',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.c', ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'Postbuild Fail',
+ 'action': [ './postbuild-fail.sh', ],
+ },
+ {
+ 'postbuild_name': 'Runs after failing postbuild',
+ 'action': [ './touch-dynamic.sh', ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/postbuild-fail/touch-dynamic.sh b/third_party/python/gyp/test/mac/postbuild-fail/touch-dynamic.sh
new file mode 100755
index 0000000000..a388a64102
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-fail/touch-dynamic.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+touch "${BUILT_PRODUCTS_DIR}/dynamic_touch"
diff --git a/third_party/python/gyp/test/mac/postbuild-fail/touch-static.sh b/third_party/python/gyp/test/mac/postbuild-fail/touch-static.sh
new file mode 100755
index 0000000000..97ecaa6868
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-fail/touch-static.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+touch "${BUILT_PRODUCTS_DIR}/static_touch"
diff --git a/third_party/python/gyp/test/mac/postbuild-multiple-configurations/main.c b/third_party/python/gyp/test/mac/postbuild-multiple-configurations/main.c
new file mode 100644
index 0000000000..21c1963526
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-multiple-configurations/main.c
@@ -0,0 +1,4 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+int main() {}
diff --git a/third_party/python/gyp/test/mac/postbuild-multiple-configurations/postbuild-touch-file.sh b/third_party/python/gyp/test/mac/postbuild-multiple-configurations/postbuild-touch-file.sh
new file mode 100755
index 0000000000..b6170cf7a7
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-multiple-configurations/postbuild-touch-file.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+touch "${BUILT_PRODUCTS_DIR}/postbuild-file"
diff --git a/third_party/python/gyp/test/mac/postbuild-multiple-configurations/test.gyp b/third_party/python/gyp/test/mac/postbuild-multiple-configurations/test.gyp
new file mode 100644
index 0000000000..c350b20d68
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-multiple-configurations/test.gyp
@@ -0,0 +1,26 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'target_defaults': {
+ 'configurations': {
+ 'Debug': {},
+ 'Release': {},
+ },
+ },
+ 'targets': [
+ {
+ 'target_name': 'random_target',
+ 'type': 'executable',
+ 'sources': [ 'main.c', ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'Touch a file.',
+ 'action': [
+ './postbuild-touch-file.sh',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/postbuild-static-library/empty.c b/third_party/python/gyp/test/mac/postbuild-static-library/empty.c
new file mode 100644
index 0000000000..9554336c0c
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-static-library/empty.c
@@ -0,0 +1,4 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+void f() {}
diff --git a/third_party/python/gyp/test/mac/postbuild-static-library/postbuild-touch-file.sh b/third_party/python/gyp/test/mac/postbuild-static-library/postbuild-touch-file.sh
new file mode 100755
index 0000000000..37de4de4f6
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-static-library/postbuild-touch-file.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+touch "${BUILT_PRODUCTS_DIR}/$1"
diff --git a/third_party/python/gyp/test/mac/postbuild-static-library/test.gyp b/third_party/python/gyp/test/mac/postbuild-static-library/test.gyp
new file mode 100644
index 0000000000..9ef55a0afa
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuild-static-library/test.gyp
@@ -0,0 +1,34 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'my_lib',
+ 'type': 'static_library',
+ 'sources': [ 'empty.c', ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'Postbuild that touches a file',
+ 'action': [
+ './postbuild-touch-file.sh', 'postbuild-file'
+ ],
+ },
+ ],
+ },
+
+ {
+ 'target_name': 'my_sourceless_lib',
+ 'type': 'static_library',
+ 'dependencies': [ 'my_lib' ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'Postbuild that touches a file',
+ 'action': [
+ './postbuild-touch-file.sh', 'postbuild-file-sourceless'
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/postbuilds/copy.sh b/third_party/python/gyp/test/mac/postbuilds/copy.sh
new file mode 100755
index 0000000000..ecad0381db
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuilds/copy.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+cp "$@"
diff --git a/third_party/python/gyp/test/mac/postbuilds/file.c b/third_party/python/gyp/test/mac/postbuilds/file.c
new file mode 100644
index 0000000000..653e71ff7e
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuilds/file.c
@@ -0,0 +1,4 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+void f() {}
diff --git a/third_party/python/gyp/test/mac/postbuilds/file_g.c b/third_party/python/gyp/test/mac/postbuilds/file_g.c
new file mode 100644
index 0000000000..0f7849d208
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuilds/file_g.c
@@ -0,0 +1,4 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+void g() {}
diff --git a/third_party/python/gyp/test/mac/postbuilds/file_h.c b/third_party/python/gyp/test/mac/postbuilds/file_h.c
new file mode 100644
index 0000000000..521d1f4d56
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuilds/file_h.c
@@ -0,0 +1,4 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+void h() {}
diff --git a/third_party/python/gyp/test/mac/postbuilds/script/shared_library_postbuild.sh b/third_party/python/gyp/test/mac/postbuilds/script/shared_library_postbuild.sh
new file mode 100755
index 0000000000..c623c8bf21
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuilds/script/shared_library_postbuild.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+lib="${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
+nm ${lib} > /dev/null # Just make sure this works.
+
+pattern="${1}"
+
+if [ $pattern != "a|b" ]; then
+ echo "Parameter quoting is broken"
+ exit 1
+fi
+
+if [ "${2}" != "arg with spaces" ]; then
+ echo "Parameter space escaping is broken"
+ exit 1
+fi
+
+touch "${lib}"_touch
diff --git a/third_party/python/gyp/test/mac/postbuilds/script/static_library_postbuild.sh b/third_party/python/gyp/test/mac/postbuilds/script/static_library_postbuild.sh
new file mode 100755
index 0000000000..2bf09b34e1
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuilds/script/static_library_postbuild.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+lib="${BUILT_PRODUCTS_DIR}/${FULL_PRODUCT_NAME}"
+nm ${lib} > /dev/null # Just make sure this works.
+
+pattern="${1}"
+
+if [ $pattern != "a|b" ]; then
+ echo "Parameter quote escaping is broken"
+ exit 1
+fi
+
+if [ "${2}" != "arg with spaces" ]; then
+ echo "Parameter space escaping is broken"
+ exit 1
+fi
+
+touch "${lib}"_touch.a
diff --git a/third_party/python/gyp/test/mac/postbuilds/subdirectory/copied_file.txt b/third_party/python/gyp/test/mac/postbuilds/subdirectory/copied_file.txt
new file mode 100644
index 0000000000..a634f85b6c
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuilds/subdirectory/copied_file.txt
@@ -0,0 +1 @@
+This file should be copied to the products dir.
diff --git a/third_party/python/gyp/test/mac/postbuilds/subdirectory/nested_target.gyp b/third_party/python/gyp/test/mac/postbuilds/subdirectory/nested_target.gyp
new file mode 100644
index 0000000000..6d4f2395e3
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuilds/subdirectory/nested_target.gyp
@@ -0,0 +1,53 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'nest_el',
+ 'type': 'static_library',
+ 'sources': [ '../file_g.c', ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'Static library postbuild',
+ 'variables': {
+ 'some_regex': 'a|b',
+ },
+ 'action': [
+ '../script/static_library_postbuild.sh',
+ '<(some_regex)',
+ 'arg with spaces',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'nest_dyna',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ '../file_h.c', ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'Dynamic library postbuild',
+ 'variables': {
+ 'some_regex': 'a|b',
+ },
+ 'action': [
+ '../script/shared_library_postbuild.sh',
+ '<(some_regex)',
+ 'arg with spaces',
+ ],
+ },
+ {
+ 'postbuild_name': 'Test paths relative to gyp file',
+ 'action': [
+ '../copy.sh',
+ './copied_file.txt',
+ '${BUILT_PRODUCTS_DIR}/copied_file_2.txt',
+ ],
+ },
+ ],
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/mac/postbuilds/test.gyp b/third_party/python/gyp/test/mac/postbuilds/test.gyp
new file mode 100644
index 0000000000..7c0b523f86
--- /dev/null
+++ b/third_party/python/gyp/test/mac/postbuilds/test.gyp
@@ -0,0 +1,93 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'el',
+ 'type': 'static_library',
+ 'sources': [ 'file.c', ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'Static library postbuild',
+ 'variables': {
+ 'some_regex': 'a|b',
+ },
+ 'action': [
+ 'script/static_library_postbuild.sh',
+ '<(some_regex)',
+ 'arg with spaces',
+ ],
+ },
+ {
+ 'postbuild_name': 'Test variable in gyp file',
+ 'action': [
+ 'cp',
+ '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}',
+ '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}_gyp_touch.a',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'dyna',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.c', ],
+ 'dependencies': [
+ 'subdirectory/nested_target.gyp:nest_dyna',
+ 'subdirectory/nested_target.gyp:nest_el',
+ ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'Dynamic library postbuild',
+ 'variables': {
+ 'some_regex': 'a|b',
+ },
+ 'action': [
+ 'script/shared_library_postbuild.sh',
+ '<(some_regex)',
+ 'arg with spaces',
+ ],
+ },
+ {
+ 'postbuild_name': 'Test variable in gyp file',
+ 'action': [
+ 'cp',
+ '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}',
+ '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}_gyp_touch',
+ ],
+ },
+ {
+ 'postbuild_name': 'Test paths relative to gyp file',
+ 'action': [
+ './copy.sh',
+ 'subdirectory/copied_file.txt',
+ '${BUILT_PRODUCTS_DIR}',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'dyna_standalone',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c', ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'Test variable in gyp file',
+ 'action': [
+ 'cp',
+ '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}',
+ '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}_gyp_touch.dylib',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'EmptyBundle',
+ 'product_extension': 'bundle',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/prefixheader/file.c b/third_party/python/gyp/test/mac/prefixheader/file.c
new file mode 100644
index 0000000000..d0b39d1f6d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/prefixheader/file.c
@@ -0,0 +1 @@
+MyInt f() { return 0; }
diff --git a/third_party/python/gyp/test/mac/prefixheader/file.cc b/third_party/python/gyp/test/mac/prefixheader/file.cc
new file mode 100644
index 0000000000..d0b39d1f6d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/prefixheader/file.cc
@@ -0,0 +1 @@
+MyInt f() { return 0; }
diff --git a/third_party/python/gyp/test/mac/prefixheader/file.m b/third_party/python/gyp/test/mac/prefixheader/file.m
new file mode 100644
index 0000000000..d0b39d1f6d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/prefixheader/file.m
@@ -0,0 +1 @@
+MyInt f() { return 0; }
diff --git a/third_party/python/gyp/test/mac/prefixheader/file.mm b/third_party/python/gyp/test/mac/prefixheader/file.mm
new file mode 100644
index 0000000000..d0b39d1f6d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/prefixheader/file.mm
@@ -0,0 +1 @@
+MyInt f() { return 0; }
diff --git a/third_party/python/gyp/test/mac/prefixheader/header.h b/third_party/python/gyp/test/mac/prefixheader/header.h
new file mode 100644
index 0000000000..0716e500c5
--- /dev/null
+++ b/third_party/python/gyp/test/mac/prefixheader/header.h
@@ -0,0 +1 @@
+typedef int MyInt;
diff --git a/third_party/python/gyp/test/mac/prefixheader/test.gyp b/third_party/python/gyp/test/mac/prefixheader/test.gyp
new file mode 100644
index 0000000000..7e6b1af807
--- /dev/null
+++ b/third_party/python/gyp/test/mac/prefixheader/test.gyp
@@ -0,0 +1,82 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'prefix_header_c',
+ 'type': 'static_library',
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'GCC_PREFIX_HEADER': 'header.h',
+ },
+ },
+ {
+ 'target_name': 'precompiled_prefix_header_c',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'GCC_PREFIX_HEADER': 'header.h',
+ 'GCC_PRECOMPILE_PREFIX_HEADER': 'YES',
+ },
+ },
+
+ {
+ 'target_name': 'prefix_header_cc',
+ 'type': 'static_library',
+ 'sources': [ 'file.cc', ],
+ 'xcode_settings': {
+ 'GCC_PREFIX_HEADER': 'header.h',
+ },
+ },
+ {
+ 'target_name': 'precompiled_prefix_header_cc',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.cc', ],
+ 'xcode_settings': {
+ 'GCC_PREFIX_HEADER': 'header.h',
+ 'GCC_PRECOMPILE_PREFIX_HEADER': 'YES',
+ },
+ },
+
+ {
+ 'target_name': 'prefix_header_m',
+ 'type': 'static_library',
+ 'sources': [ 'file.m', ],
+ 'xcode_settings': {
+ 'GCC_PREFIX_HEADER': 'header.h',
+ },
+ },
+ {
+ 'target_name': 'precompiled_prefix_header_m',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.m', ],
+ 'xcode_settings': {
+ 'GCC_PREFIX_HEADER': 'header.h',
+ 'GCC_PRECOMPILE_PREFIX_HEADER': 'YES',
+ },
+ },
+
+ {
+ 'target_name': 'prefix_header_mm',
+ 'type': 'static_library',
+ 'sources': [ 'file.mm', ],
+ 'xcode_settings': {
+ 'GCC_PREFIX_HEADER': 'header.h',
+ },
+ },
+ {
+ 'target_name': 'precompiled_prefix_header_mm',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.mm', ],
+ 'xcode_settings': {
+ 'GCC_PREFIX_HEADER': 'header.h',
+ 'GCC_PRECOMPILE_PREFIX_HEADER': 'YES',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/rebuild/TestApp-Info.plist b/third_party/python/gyp/test/mac/rebuild/TestApp-Info.plist
new file mode 100644
index 0000000000..98fd515200
--- /dev/null
+++ b/third_party/python/gyp/test/mac/rebuild/TestApp-Info.plist
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIconFile</key>
+ <string></string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.${PRODUCT_NAME}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>LSMinimumSystemVersion</key>
+ <string>${MACOSX_DEPLOYMENT_TARGET}</string>
+ <key>NSMainNibFile</key>
+ <string>MainMenu</string>
+ <key>NSPrincipalClass</key>
+ <string>NSApplication</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/rebuild/delay-touch.sh b/third_party/python/gyp/test/mac/rebuild/delay-touch.sh
new file mode 100755
index 0000000000..7caf105b6e
--- /dev/null
+++ b/third_party/python/gyp/test/mac/rebuild/delay-touch.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+set -e
+
+sleep 1 # mtime resolution is 1 sec on unix.
+touch "$1"
diff --git a/third_party/python/gyp/test/mac/rebuild/empty.c b/third_party/python/gyp/test/mac/rebuild/empty.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/mac/rebuild/empty.c
diff --git a/third_party/python/gyp/test/mac/rebuild/main.c b/third_party/python/gyp/test/mac/rebuild/main.c
new file mode 100644
index 0000000000..237c8ce181
--- /dev/null
+++ b/third_party/python/gyp/test/mac/rebuild/main.c
@@ -0,0 +1 @@
+int main() {}
diff --git a/third_party/python/gyp/test/mac/rebuild/test.gyp b/third_party/python/gyp/test/mac/rebuild/test.gyp
new file mode 100644
index 0000000000..15b4e4ef2f
--- /dev/null
+++ b/third_party/python/gyp/test/mac/rebuild/test.gyp
@@ -0,0 +1,56 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'test_app',
+ 'product_name': 'Test App',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'main.c',
+ ],
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'TestApp-Info.plist',
+ },
+ },
+ {
+ 'target_name': 'test_app_postbuilds',
+ 'product_name': 'Test App 2',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'main.c',
+ ],
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'TestApp-Info.plist',
+ },
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'Postbuild that touches the app binary',
+ 'action': [
+ './delay-touch.sh', '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'test_framework_postbuilds',
+ 'product_name': 'Test Framework',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'empty.c',
+ ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'Postbuild that touches the framework binary',
+ 'action': [
+ './delay-touch.sh', '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/rpath/file.c b/third_party/python/gyp/test/mac/rpath/file.c
new file mode 100644
index 0000000000..56757a701b
--- /dev/null
+++ b/third_party/python/gyp/test/mac/rpath/file.c
@@ -0,0 +1 @@
+void f() {}
diff --git a/third_party/python/gyp/test/mac/rpath/main.c b/third_party/python/gyp/test/mac/rpath/main.c
new file mode 100644
index 0000000000..237c8ce181
--- /dev/null
+++ b/third_party/python/gyp/test/mac/rpath/main.c
@@ -0,0 +1 @@
+int main() {}
diff --git a/third_party/python/gyp/test/mac/rpath/test.gyp b/third_party/python/gyp/test/mac/rpath/test.gyp
new file mode 100644
index 0000000000..7255cb7cd2
--- /dev/null
+++ b/third_party/python/gyp/test/mac/rpath/test.gyp
@@ -0,0 +1,48 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'default_rpath',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c' ],
+ },
+ {
+ 'target_name': 'explicit_rpath',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c' ],
+ 'xcode_settings': {
+ 'LD_RUNPATH_SEARCH_PATHS': ['@executable_path/.'],
+ },
+ },
+ {
+ 'target_name': 'explicit_rpaths_escaped',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c' ],
+ 'xcode_settings': {
+ # Xcode requires spaces to be escaped, else it ends up adding two
+ # independent rpaths.
+ 'LD_RUNPATH_SEARCH_PATHS': ['First\\ rpath', 'Second\\ rpath'],
+ },
+ },
+ {
+ 'target_name': 'explicit_rpaths_bundle',
+ 'product_name': 'My Framework',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.c' ],
+ 'xcode_settings': {
+ 'LD_RUNPATH_SEARCH_PATHS': ['@loader_path/.'],
+ },
+ },
+ {
+ 'target_name': 'executable',
+ 'type': 'executable',
+ 'sources': [ 'main.c' ],
+ 'xcode_settings': {
+ 'LD_RUNPATH_SEARCH_PATHS': ['@executable_path/.'],
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/sdkroot/file.cc b/third_party/python/gyp/test/mac/sdkroot/file.cc
new file mode 100644
index 0000000000..13ae971040
--- /dev/null
+++ b/third_party/python/gyp/test/mac/sdkroot/file.cc
@@ -0,0 +1,5 @@
+#include <map>
+using std::map;
+
+int main() {
+}
diff --git a/third_party/python/gyp/test/mac/sdkroot/test.gyp b/third_party/python/gyp/test/mac/sdkroot/test.gyp
new file mode 100644
index 0000000000..2fc11a0280
--- /dev/null
+++ b/third_party/python/gyp/test/mac/sdkroot/test.gyp
@@ -0,0 +1,35 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'mytarget',
+ 'type': 'executable',
+ 'sources': [ 'file.cc', ],
+ 'xcode_settings': {
+ 'SDKROOT': 'macosx%s',
+ },
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'envtest',
+ 'action': [ './test_shorthand.sh', ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'absolute',
+ 'type': 'executable',
+ 'sources': [ 'file.cc', ],
+ 'xcode_settings': {
+ 'SDKROOT': '<(sdk_path)',
+ },
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'envtest',
+ 'action': [ './test_shorthand.sh', ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/sdkroot/test_shorthand.sh b/third_party/python/gyp/test/mac/sdkroot/test_shorthand.sh
new file mode 100755
index 0000000000..ac4ac229ae
--- /dev/null
+++ b/third_party/python/gyp/test/mac/sdkroot/test_shorthand.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+found=false
+for sdk in 10.6 10.7 10.8 10.9 ; do
+ if expected=$(xcodebuild -version -sdk macosx$sdk Path 2>/dev/null) ; then
+ found=true
+ break
+ fi
+done
+if ! $found ; then
+ echo >&2 "cannot find installed SDK"
+ exit 1
+fi
+
+test $SDKROOT = $expected
diff --git a/third_party/python/gyp/test/mac/sourceless-module/empty.c b/third_party/python/gyp/test/mac/sourceless-module/empty.c
new file mode 100644
index 0000000000..237c8ce181
--- /dev/null
+++ b/third_party/python/gyp/test/mac/sourceless-module/empty.c
@@ -0,0 +1 @@
+int main() {}
diff --git a/third_party/python/gyp/test/mac/sourceless-module/empty.txt b/third_party/python/gyp/test/mac/sourceless-module/empty.txt
new file mode 100644
index 0000000000..139597f9cb
--- /dev/null
+++ b/third_party/python/gyp/test/mac/sourceless-module/empty.txt
@@ -0,0 +1,2 @@
+
+
diff --git a/third_party/python/gyp/test/mac/sourceless-module/fun.c b/third_party/python/gyp/test/mac/sourceless-module/fun.c
new file mode 100644
index 0000000000..d64ff8ca23
--- /dev/null
+++ b/third_party/python/gyp/test/mac/sourceless-module/fun.c
@@ -0,0 +1 @@
+int f() { return 42; }
diff --git a/third_party/python/gyp/test/mac/sourceless-module/test.gyp b/third_party/python/gyp/test/mac/sourceless-module/test.gyp
new file mode 100644
index 0000000000..cbbe63df02
--- /dev/null
+++ b/third_party/python/gyp/test/mac/sourceless-module/test.gyp
@@ -0,0 +1,96 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'empty_bundle',
+ 'type': 'loadable_module',
+ 'mac_bundle': 1,
+ },
+ {
+ 'target_name': 'resource_bundle',
+ 'type': 'loadable_module',
+ 'mac_bundle': 1,
+ 'actions': [
+ {
+ 'action_name': 'Add Resource',
+ 'inputs': [],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/app_manifest/foo.manifest',
+ ],
+ 'action': [
+ 'touch', '<(INTERMEDIATE_DIR)/app_manifest/foo.manifest',
+ ],
+ 'process_outputs_as_mac_bundle_resources': 1,
+ },
+ ],
+ },
+ {
+ 'target_name': 'dependent_on_resource_bundle',
+ 'type': 'executable',
+ 'sources': [ 'empty.c' ],
+ 'dependencies': [
+ 'resource_bundle',
+ ],
+ },
+
+ {
+ 'target_name': 'alib',
+ 'type': 'static_library',
+ 'sources': [ 'fun.c' ]
+ },
+ { # No sources, but depends on a static_library so must be linked.
+ 'target_name': 'resource_framework',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'dependencies': [
+ 'alib',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'Add Resource',
+ 'inputs': [],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/app_manifest/foo.manifest',
+ ],
+ 'action': [
+ 'touch', '<(INTERMEDIATE_DIR)/app_manifest/foo.manifest',
+ ],
+ 'process_outputs_as_mac_bundle_resources': 1,
+ },
+ ],
+ },
+ {
+ 'target_name': 'dependent_on_resource_framework',
+ 'type': 'executable',
+ 'sources': [ 'empty.c' ],
+ 'dependencies': [
+ 'resource_framework',
+ ],
+ },
+
+ { # No actions, but still have resources.
+ 'target_name': 'mac_resource_bundle_no_actions',
+ 'product_extension': 'bundle',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'mac_bundle_resources': [
+ 'empty.txt',
+ ],
+ },
+ {
+ 'target_name': 'bundle_dependent_on_resource_bundle_no_actions',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [ 'empty.c' ],
+ 'dependencies': [
+ 'mac_resource_bundle_no_actions',
+ ],
+ 'mac_bundle_resources': [
+ '<(PRODUCT_DIR)/mac_resource_bundle_no_actions.bundle',
+ ],
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/mac/strip/file.c b/third_party/python/gyp/test/mac/strip/file.c
new file mode 100644
index 0000000000..a4c504de71
--- /dev/null
+++ b/third_party/python/gyp/test/mac/strip/file.c
@@ -0,0 +1,22 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+static void the_static_function() {}
+__attribute__((used)) void the_used_function() {}
+
+__attribute__((visibility("hidden"))) __attribute__((used))
+ void the_hidden_function() {}
+__attribute__((visibility("default"))) __attribute__((used))
+ void the_visible_function() {}
+
+extern const int eci;
+__attribute__((used)) int i;
+__attribute__((used)) const int ci = 34623;
+
+void the_function() {
+ the_static_function();
+ the_used_function();
+ the_hidden_function();
+ the_visible_function();
+}
diff --git a/third_party/python/gyp/test/mac/strip/main.c b/third_party/python/gyp/test/mac/strip/main.c
new file mode 100644
index 0000000000..b2291a6b09
--- /dev/null
+++ b/third_party/python/gyp/test/mac/strip/main.c
@@ -0,0 +1,25 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+static void the_static_function() {}
+__attribute__((used)) void the_used_function() {}
+
+__attribute__((visibility("hidden"))) __attribute__((used))
+void the_hidden_function() {}
+__attribute__((visibility("default"))) __attribute__((used))
+void the_visible_function() {}
+
+void the_function() {}
+
+extern const int eci;
+__attribute__((used)) int i;
+__attribute__((used)) const int ci = 34623;
+
+int main() {
+ the_function();
+ the_static_function();
+ the_used_function();
+ the_hidden_function();
+ the_visible_function();
+}
diff --git a/third_party/python/gyp/test/mac/strip/strip.saves b/third_party/python/gyp/test/mac/strip/strip.saves
new file mode 100644
index 0000000000..b60ca62857
--- /dev/null
+++ b/third_party/python/gyp/test/mac/strip/strip.saves
@@ -0,0 +1,5 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file would list symbols that should not be stripped.
diff --git a/third_party/python/gyp/test/mac/strip/subdirectory/nested_file.c b/third_party/python/gyp/test/mac/strip/subdirectory/nested_file.c
new file mode 100644
index 0000000000..50daa6c13b
--- /dev/null
+++ b/third_party/python/gyp/test/mac/strip/subdirectory/nested_file.c
@@ -0,0 +1 @@
+void nested_f() {}
diff --git a/third_party/python/gyp/test/mac/strip/subdirectory/nested_strip.saves b/third_party/python/gyp/test/mac/strip/subdirectory/nested_strip.saves
new file mode 100644
index 0000000000..d434c0ef45
--- /dev/null
+++ b/third_party/python/gyp/test/mac/strip/subdirectory/nested_strip.saves
@@ -0,0 +1,5 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file would list symbols that should not be stripped.
diff --git a/third_party/python/gyp/test/mac/strip/subdirectory/subdirectory.gyp b/third_party/python/gyp/test/mac/strip/subdirectory/subdirectory.gyp
new file mode 100644
index 0000000000..5d0d190914
--- /dev/null
+++ b/third_party/python/gyp/test/mac/strip/subdirectory/subdirectory.gyp
@@ -0,0 +1,38 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'nested_strip_save',
+ 'type': 'shared_library',
+ 'sources': [ 'nested_file.c', ],
+ 'xcode_settings': {
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ 'STRIPFLAGS': '-s $(CHROMIUM_STRIP_SAVE_FILE)',
+ 'CHROMIUM_STRIP_SAVE_FILE': 'nested_strip.saves',
+ },
+ },
+ {
+ 'target_name': 'nested_strip_save_postbuild',
+ 'type': 'shared_library',
+ 'sources': [ 'nested_file.c', ],
+ 'xcode_settings': {
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ 'STRIPFLAGS': '-s $(CHROMIUM_STRIP_SAVE_FILE)',
+ 'CHROMIUM_STRIP_SAVE_FILE': 'nested_strip.saves',
+ },
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'Action that reads CHROMIUM_STRIP_SAVE_FILE',
+ 'action': [
+ './test_reading_save_file_from_postbuild.sh',
+ ],
+ },
+ ],
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/mac/strip/subdirectory/test_reading_save_file_from_postbuild.sh b/third_party/python/gyp/test/mac/strip/subdirectory/test_reading_save_file_from_postbuild.sh
new file mode 100755
index 0000000000..976943680e
--- /dev/null
+++ b/third_party/python/gyp/test/mac/strip/subdirectory/test_reading_save_file_from_postbuild.sh
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+set -e
+
+test -f ${CHROMIUM_STRIP_SAVE_FILE}
diff --git a/third_party/python/gyp/test/mac/strip/test-defaults.gyp b/third_party/python/gyp/test/mac/strip/test-defaults.gyp
new file mode 100644
index 0000000000..e688b955a7
--- /dev/null
+++ b/third_party/python/gyp/test/mac/strip/test-defaults.gyp
@@ -0,0 +1,51 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'make_global_settings': [
+ ['CC', '/usr/bin/clang'],
+ ],
+ 'target_defaults': {
+ 'xcode_settings': {
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ },
+ },
+ 'targets': [
+ {
+ 'target_name': 'single_dylib',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c', ],
+ },
+ {
+ 'target_name': 'single_so',
+ 'type': 'loadable_module',
+ 'sources': [ 'file.c', ],
+ },
+ {
+ 'target_name': 'single_exe',
+ 'type': 'executable',
+ 'sources': [ 'main.c', ],
+ },
+
+ {
+ 'target_name': 'bundle_dylib',
+ 'type': 'shared_library',
+ 'mac_bundle': '1',
+ 'sources': [ 'file.c', ],
+ },
+ {
+ 'target_name': 'bundle_so',
+ 'type': 'loadable_module',
+ 'mac_bundle': '1',
+ 'sources': [ 'file.c', ],
+ },
+ {
+ 'target_name': 'bundle_exe',
+ 'type': 'executable',
+ 'mac_bundle': '1',
+ 'sources': [ 'main.c', ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/strip/test.gyp b/third_party/python/gyp/test/mac/strip/test.gyp
new file mode 100644
index 0000000000..2558aa91bb
--- /dev/null
+++ b/third_party/python/gyp/test/mac/strip/test.gyp
@@ -0,0 +1,119 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# These xcode_settings affect stripping:
+# "Deployment postprocessing involves stripping the binary, and setting
+# its file mode, owner, and group."
+#'DEPLOYMENT_POSTPROCESSING': 'YES',
+
+# "Specifies whether to strip symbol information from the binary.
+# Prerequisite: $DEPLOYMENT_POSTPROCESSING = YES" "Default Value: 'NO'"
+#'STRIP_INSTALLED_PRODUCT': 'YES',
+
+# "Values:
+# * all: Strips the binary completely, removing the symbol table and
+# relocation information
+# * non-global: Strips nonglobal symbols but saves external symbols.
+# * debugging: Strips debugging symbols but saves local and global
+# symbols."
+# (maps to no flag, -x, -S in that order)
+#'STRIP_STYLE': 'non-global',
+
+# "Additional strip flags"
+#'STRIPFLAGS': '-c',
+
+# "YES: Copied binaries are stripped of debugging symbols. This does
+# not cause the binary produced by the linker to be stripped. Use
+# 'STRIP_INSTALLED_PRODUCT (Strip Linked Product)' to have the linker
+# strip the binary."
+#'COPY_PHASE_STRIP': 'NO',
+{
+ 'targets': [
+ {
+ 'target_name': 'no_postprocess',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'DEPLOYMENT_POSTPROCESSING': 'NO',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ },
+ },
+ {
+ 'target_name': 'no_strip',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'NO',
+ },
+ },
+ {
+ 'target_name': 'strip_all',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ 'STRIP_STYLE': 'all',
+ },
+ },
+ {
+ 'target_name': 'strip_nonglobal',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ 'STRIP_STYLE': 'non-global',
+ },
+ },
+ {
+ 'target_name': 'strip_debugging',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ 'STRIP_STYLE': 'debugging',
+ },
+ },
+ {
+ 'target_name': 'strip_all_custom_flags',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ 'STRIP_STYLE': 'all',
+ 'STRIPFLAGS': '-c',
+ },
+ },
+ {
+ 'target_name': 'strip_all_bundle',
+ 'type': 'shared_library',
+ 'mac_bundle': '1',
+ 'sources': [ 'file.c', ],
+ 'xcode_settings': {
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ 'STRIP_STYLE': 'all',
+ },
+ },
+ {
+ 'target_name': 'strip_save',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c', ],
+ 'dependencies': [
+ 'subdirectory/subdirectory.gyp:nested_strip_save',
+ 'subdirectory/subdirectory.gyp:nested_strip_save_postbuild',
+ ],
+ 'xcode_settings': {
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ 'STRIPFLAGS': '-s $(CHROMIUM_STRIP_SAVE_FILE)',
+ 'CHROMIUM_STRIP_SAVE_FILE': 'strip.saves',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/swift-library/Info.plist b/third_party/python/gyp/test/mac/swift-library/Info.plist
new file mode 100644
index 0000000000..804990ca5e
--- /dev/null
+++ b/third_party/python/gyp/test/mac/swift-library/Info.plist
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIconFile</key>
+ <string></string>
+ <key>CFBundleIdentifier</key>
+ <string>com.yourcompany.${PRODUCT_NAME:identifier}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>FMWK</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>NSPrincipalClass</key>
+ <string></string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/swift-library/file.swift b/third_party/python/gyp/test/mac/swift-library/file.swift
new file mode 100644
index 0000000000..88db7da5c6
--- /dev/null
+++ b/third_party/python/gyp/test/mac/swift-library/file.swift
@@ -0,0 +1,9 @@
+import Foundation
+
+public class GypSwiftTest {
+ let myProperty = false
+
+ init() {
+ self.myProperty = true
+ }
+} \ No newline at end of file
diff --git a/third_party/python/gyp/test/mac/swift-library/test.gyp b/third_party/python/gyp/test/mac/swift-library/test.gyp
new file mode 100644
index 0000000000..373a677cbd
--- /dev/null
+++ b/third_party/python/gyp/test/mac/swift-library/test.gyp
@@ -0,0 +1,21 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'SwiftFramework',
+ 'product_name': 'SwiftFramework',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'Info.plist',
+ 'CODE_SIGNING_REQUIRED': 'NO',
+ 'CONFIGURATION_BUILD_DIR':'build/Default',
+ },
+ 'sources': [
+ 'file.swift',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/type_envvars/file.c b/third_party/python/gyp/test/mac/type_envvars/file.c
new file mode 100644
index 0000000000..9cddaf1b0b
--- /dev/null
+++ b/third_party/python/gyp/test/mac/type_envvars/file.c
@@ -0,0 +1,6 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+void f() {}
+int main() {}
diff --git a/third_party/python/gyp/test/mac/type_envvars/test.gyp b/third_party/python/gyp/test/mac/type_envvars/test.gyp
new file mode 100644
index 0000000000..465670056b
--- /dev/null
+++ b/third_party/python/gyp/test/mac/type_envvars/test.gyp
@@ -0,0 +1,100 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'my_app',
+ 'product_name': 'My App',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.c', ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'envtest',
+ 'action': [ './test_bundle_executable.sh', ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'bundle_loadable_module',
+ 'type': 'loadable_module',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.c', ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'envtest',
+ 'action': [ './test_bundle_loadable_module.sh', ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'bundle_shared_library',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.c', ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'envtest',
+ 'action': [ './test_bundle_shared_library.sh', ],
+ },
+ ],
+ },
+ # Types 'static_library' and 'none' can't exist as bundles.
+
+ {
+ 'target_name': 'nonbundle_executable',
+ 'type': 'executable',
+ 'sources': [ 'file.c', ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'envtest',
+ 'action': [ './test_nonbundle_executable.sh', ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'nonbundle_loadable_module',
+ 'type': 'loadable_module',
+ 'sources': [ 'file.c', ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'envtest',
+ 'action': [ './test_nonbundle_loadable_module.sh', ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'nonbundle_shared_library',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c', ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'envtest',
+ 'action': [ './test_nonbundle_shared_library.sh', ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'nonbundle_static_library',
+ 'type': 'static_library',
+ 'sources': [ 'file.c', ],
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'envtest',
+ 'action': [ './test_nonbundle_static_library.sh', ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'nonbundle_none',
+ 'type': 'none',
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'envtest',
+ 'action': [ './test_nonbundle_none.sh', ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/type_envvars/test_bundle_executable.sh b/third_party/python/gyp/test/mac/type_envvars/test_bundle_executable.sh
new file mode 100755
index 0000000000..9a08c8f0cc
--- /dev/null
+++ b/third_party/python/gyp/test/mac/type_envvars/test_bundle_executable.sh
@@ -0,0 +1,30 @@
+#!/bin/bash
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+test $MACH_O_TYPE = mh_execute
+test $PRODUCT_TYPE = com.apple.product-type.application
+test "${PRODUCT_NAME}" = "My App"
+test "${FULL_PRODUCT_NAME}" = "My App.app"
+
+test "${EXECUTABLE_NAME}" = "My App"
+test "${EXECUTABLE_PATH}" = "My App.app/Contents/MacOS/My App"
+test "${WRAPPER_NAME}" = "My App.app"
+
+test "${CONTENTS_FOLDER_PATH}" = "My App.app/Contents"
+test "${EXECUTABLE_FOLDER_PATH}" = "My App.app/Contents/MacOS"
+test "${UNLOCALIZED_RESOURCES_FOLDER_PATH}" = "My App.app/Contents/Resources"
+test "${JAVA_FOLDER_PATH}" = "My App.app/Contents/Resources/Java"
+test "${FRAMEWORKS_FOLDER_PATH}" = "My App.app/Contents/Frameworks"
+test "${SHARED_FRAMEWORKS_FOLDER_PATH}" = "My App.app/Contents/SharedFrameworks"
+test "${SHARED_SUPPORT_FOLDER_PATH}" = "My App.app/Contents/SharedSupport"
+test "${PLUGINS_FOLDER_PATH}" = "My App.app/Contents/PlugIns"
+test "${XPCSERVICES_FOLDER_PATH}" = "My App.app/Contents/XPCServices"
+
+[[ ! $DYLIB_INSTALL_NAME_BASE && ${DYLIB_INSTALL_NAME_BASE-_} ]]
+[[ ! $LD_DYLIB_INSTALL_NAME && ${LD_DYLIB_INSTALL_NAME-_} ]]
+
+"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/third_party/python/gyp/test/mac/type_envvars/test_bundle_loadable_module.sh b/third_party/python/gyp/test/mac/type_envvars/test_bundle_loadable_module.sh
new file mode 100755
index 0000000000..b5c7638293
--- /dev/null
+++ b/third_party/python/gyp/test/mac/type_envvars/test_bundle_loadable_module.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+test $MACH_O_TYPE = mh_bundle
+test $PRODUCT_TYPE = com.apple.product-type.bundle
+test $PRODUCT_NAME = bundle_loadable_module
+test $FULL_PRODUCT_NAME = bundle_loadable_module.bundle
+
+test $EXECUTABLE_NAME = bundle_loadable_module
+test $EXECUTABLE_PATH = \
+ "bundle_loadable_module.bundle/Contents/MacOS/bundle_loadable_module"
+test $WRAPPER_NAME = bundle_loadable_module.bundle
+
+test $CONTENTS_FOLDER_PATH = bundle_loadable_module.bundle/Contents
+test $EXECUTABLE_FOLDER_PATH = bundle_loadable_module.bundle/Contents/MacOS
+test $UNLOCALIZED_RESOURCES_FOLDER_PATH = \
+ bundle_loadable_module.bundle/Contents/Resources
+test $JAVA_FOLDER_PATH = bundle_loadable_module.bundle/Contents/Resources/Java
+test $FRAMEWORKS_FOLDER_PATH = bundle_loadable_module.bundle/Contents/Frameworks
+test $SHARED_FRAMEWORKS_FOLDER_PATH = \
+ bundle_loadable_module.bundle/Contents/SharedFrameworks
+test $SHARED_SUPPORT_FOLDER_PATH = \
+ bundle_loadable_module.bundle/Contents/SharedSupport
+test $PLUGINS_FOLDER_PATH = bundle_loadable_module.bundle/Contents/PlugIns
+test $XPCSERVICES_FOLDER_PATH = \
+ bundle_loadable_module.bundle/Contents/XPCServices
+
+[[ ! $DYLIB_INSTALL_NAME_BASE && ${DYLIB_INSTALL_NAME_BASE-_} ]]
+[[ ! $LD_DYLIB_INSTALL_NAME && ${LD_DYLIB_INSTALL_NAME-_} ]]
+
+"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/third_party/python/gyp/test/mac/type_envvars/test_bundle_shared_library.sh b/third_party/python/gyp/test/mac/type_envvars/test_bundle_shared_library.sh
new file mode 100755
index 0000000000..9c2dc0626f
--- /dev/null
+++ b/third_party/python/gyp/test/mac/type_envvars/test_bundle_shared_library.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+test $MACH_O_TYPE = mh_dylib
+test $PRODUCT_TYPE = com.apple.product-type.framework
+test $PRODUCT_NAME = bundle_shared_library
+test $FULL_PRODUCT_NAME = bundle_shared_library.framework
+
+test $EXECUTABLE_NAME = bundle_shared_library
+test $EXECUTABLE_PATH = \
+ "bundle_shared_library.framework/Versions/A/bundle_shared_library"
+test $WRAPPER_NAME = bundle_shared_library.framework
+
+test $CONTENTS_FOLDER_PATH = bundle_shared_library.framework/Versions/A
+test $EXECUTABLE_FOLDER_PATH = bundle_shared_library.framework/Versions/A
+test $UNLOCALIZED_RESOURCES_FOLDER_PATH = \
+ bundle_shared_library.framework/Versions/A/Resources
+test $JAVA_FOLDER_PATH = \
+ bundle_shared_library.framework/Versions/A/Resources/Java
+test $FRAMEWORKS_FOLDER_PATH = \
+ bundle_shared_library.framework/Versions/A/Frameworks
+test $SHARED_FRAMEWORKS_FOLDER_PATH = \
+ bundle_shared_library.framework/Versions/A/SharedFrameworks
+test $SHARED_SUPPORT_FOLDER_PATH = \
+ bundle_shared_library.framework/Versions/A/Resources
+test $PLUGINS_FOLDER_PATH = bundle_shared_library.framework/Versions/A/PlugIns
+test $XPCSERVICES_FOLDER_PATH = \
+ bundle_shared_library.framework/Versions/A/XPCServices
+
+test $DYLIB_INSTALL_NAME_BASE = "/Library/Frameworks"
+test $LD_DYLIB_INSTALL_NAME = \
+ "/Library/Frameworks/bundle_shared_library.framework/Versions/A/bundle_shared_library"
+
+"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/third_party/python/gyp/test/mac/type_envvars/test_check_sdkroot.sh b/third_party/python/gyp/test/mac/type_envvars/test_check_sdkroot.sh
new file mode 100755
index 0000000000..1297dbeff1
--- /dev/null
+++ b/third_party/python/gyp/test/mac/type_envvars/test_check_sdkroot.sh
@@ -0,0 +1,47 @@
+#!/bin/bash
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+# `xcodebuild -version` output looks like
+# Xcode 4.6.3
+# Build version 4H1503
+# or like
+# Xcode 4.2
+# Build version 4C199
+# or like
+# Xcode 3.2.6
+# Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
+# BuildVersion: 10M2518
+# Convert that to '0463', '0420' and '0326' respectively.
+function xcodeversion() {
+ xcodebuild -version | awk '/Xcode ([0-9]+\.[0-9]+(\.[0-9]+)?)/ {
+ version = $2
+ gsub(/\./, "", version)
+ if (length(version) < 3) {
+ version = version "0"
+ }
+ if (length(version) < 4) {
+ version = "0" version
+ }
+ }
+ END { print version }'
+}
+
+# Returns true if |string1| is smaller than |string2|.
+# This function assumes that both strings represent Xcode version numbers
+# as returned by |xcodeversion|.
+function smaller() {
+ local min="$(echo -ne "${1}\n${2}\n" | sort -n | head -n1)"
+ test "${min}" != "${2}"
+}
+
+if [[ "$(xcodeversion)" < "0500" ]]; then
+ # Xcode version is older than 5.0, check that SDKROOT is set but empty.
+ [[ -z "${SDKROOT}" && -z "${SDKROOT-_}" ]]
+else
+ # Xcode version is newer than 5.0, check that SDKROOT is set.
+ [[ "${SDKROOT}" == "$(xcodebuild -version -sdk '' Path)" ]]
+fi
diff --git a/third_party/python/gyp/test/mac/type_envvars/test_nonbundle_executable.sh b/third_party/python/gyp/test/mac/type_envvars/test_nonbundle_executable.sh
new file mode 100755
index 0000000000..9fbbd95b8d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/type_envvars/test_nonbundle_executable.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+# For some reason, Xcode doesn't set MACH_O_TYPE for non-bundle executables.
+# Check for "not set", not just "empty":
+[[ ! $MACH_O_TYPE && ${MACH_O_TYPE-_} ]]
+test $PRODUCT_TYPE = com.apple.product-type.tool
+test $PRODUCT_NAME = nonbundle_executable
+test $FULL_PRODUCT_NAME = nonbundle_executable
+
+test $EXECUTABLE_NAME = nonbundle_executable
+test $EXECUTABLE_PATH = nonbundle_executable
+[[ ! $WRAPPER_NAME && ${WRAPPER_NAME-_} ]]
+
+[[ ! $CONTENTS_FOLDER_PATH && ${CONTENTS_FOLDER_PATH-_} ]]
+[[ ! $EXECUTABLE_FOLDER_PATH && ${EXECUTABLE_FOLDER_PATH-_} ]]
+[[ ! $UNLOCALIZED_RESOURCES_FOLDER_PATH \
+ && ${UNLOCALIZED_RESOURCES_FOLDER_PATH-_} ]]
+[[ ! $JAVA_FOLDER_PATH && ${JAVA_FOLDER_PATH-_} ]]
+[[ ! $FRAMEWORKS_FOLDER_PATH && ${FRAMEWORKS_FOLDER_PATH-_} ]]
+[[ ! $SHARED_FRAMEWORKS_FOLDER_PATH && ${SHARED_FRAMEWORKS_FOLDER_PATH-_} ]]
+[[ ! $SHARED_SUPPORT_FOLDER_PATH && ${SHARED_SUPPORT_FOLDER_PATH-_} ]]
+[[ ! $PLUGINS_FOLDER_PATH && ${PLUGINS_FOLDER_PATH-_} ]]
+[[ ! $XPCSERVICES_FOLDER_PATH && ${XPCSERVICES_FOLDER_PATH-_} ]]
+
+[[ ! $DYLIB_INSTALL_NAME_BASE && ${DYLIB_INSTALL_NAME_BASE-_} ]]
+[[ ! $LD_DYLIB_INSTALL_NAME && ${LD_DYLIB_INSTALL_NAME-_} ]]
+
+"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/third_party/python/gyp/test/mac/type_envvars/test_nonbundle_loadable_module.sh b/third_party/python/gyp/test/mac/type_envvars/test_nonbundle_loadable_module.sh
new file mode 100755
index 0000000000..b4c3ba976e
--- /dev/null
+++ b/third_party/python/gyp/test/mac/type_envvars/test_nonbundle_loadable_module.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+test $MACH_O_TYPE = mh_bundle
+test $PRODUCT_TYPE = com.apple.product-type.library.dynamic
+test $PRODUCT_NAME = nonbundle_loadable_module
+test $FULL_PRODUCT_NAME = nonbundle_loadable_module.so
+
+test $EXECUTABLE_NAME = nonbundle_loadable_module.so
+test $EXECUTABLE_PATH = nonbundle_loadable_module.so
+[[ ! $WRAPPER_NAME && ${WRAPPER_NAME-_} ]]
+
+[[ ! $CONTENTS_FOLDER_PATH && ${CONTENTS_FOLDER_PATH-_} ]]
+[[ ! $EXECUTABLE_FOLDER_PATH && ${EXECUTABLE_FOLDER_PATH-_} ]]
+[[ ! $UNLOCALIZED_RESOURCES_FOLDER_PATH \
+ && ${UNLOCALIZED_RESOURCES_FOLDER_PATH-_} ]]
+[[ ! $JAVA_FOLDER_PATH && ${JAVA_FOLDER_PATH-_} ]]
+[[ ! $FRAMEWORKS_FOLDER_PATH && ${FRAMEWORKS_FOLDER_PATH-_} ]]
+[[ ! $SHARED_FRAMEWORKS_FOLDER_PATH && ${SHARED_FRAMEWORKS_FOLDER_PATH-_} ]]
+[[ ! $SHARED_SUPPORT_FOLDER_PATH && ${SHARED_SUPPORT_FOLDER_PATH-_} ]]
+[[ ! $PLUGINS_FOLDER_PATH && ${PLUGINS_FOLDER_PATH-_} ]]
+[[ ! $XPCSERVICES_FOLDER_PATH && ${XPCSERVICES_FOLDER_PATH-_} ]]
+
+test $DYLIB_INSTALL_NAME_BASE = "/usr/local/lib"
+test $LD_DYLIB_INSTALL_NAME = "/usr/local/lib/nonbundle_loadable_module.so"
+
+"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/third_party/python/gyp/test/mac/type_envvars/test_nonbundle_none.sh b/third_party/python/gyp/test/mac/type_envvars/test_nonbundle_none.sh
new file mode 100755
index 0000000000..e2dc7fd9cd
--- /dev/null
+++ b/third_party/python/gyp/test/mac/type_envvars/test_nonbundle_none.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+# Check for "not set", not just "empty":
+[[ ! $MACH_O_TYPE && ${MACH_O_TYPE-_} ]]
+[[ ! $PRODUCT_TYPE && ${PRODUCT_TYPE-_} ]]
+test $PRODUCT_NAME = nonbundle_none
+[[ ! $FULL_PRODUCT_NAME && ${FULL_PRODUCT_NAME-_} ]]
+
+[[ ! $EXECUTABLE_NAME && ${EXECUTABLE_NAME-_} ]]
+[[ ! $EXECUTABLE_PATH && ${EXECUTABLE_PATH-_} ]]
+[[ ! $WRAPPER_NAME && ${WRAPPER_NAME-_} ]]
+
+[[ ! $CONTENTS_FOLDER_PATH && ${CONTENTS_FOLDER_PATH-_} ]]
+[[ ! $EXECUTABLE_FOLDER_PATH && ${EXECUTABLE_FOLDER_PATH-_} ]]
+[[ ! $UNLOCALIZED_RESOURCES_FOLDER_PATH \
+ && ${UNLOCALIZED_RESOURCES_FOLDER_PATH-_} ]]
+[[ ! $JAVA_FOLDER_PATH && ${JAVA_FOLDER_PATH-_} ]]
+[[ ! $FRAMEWORKS_FOLDER_PATH && ${FRAMEWORKS_FOLDER_PATH-_} ]]
+[[ ! $SHARED_FRAMEWORKS_FOLDER_PATH && ${SHARED_FRAMEWORKS_FOLDER_PATH-_} ]]
+[[ ! $SHARED_SUPPORT_FOLDER_PATH && ${SHARED_SUPPORT_FOLDER_PATH-_} ]]
+[[ ! $PLUGINS_FOLDER_PATH && ${PLUGINS_FOLDER_PATH-_} ]]
+[[ ! $XPCSERVICES_FOLDER_PATH && ${XPCSERVICES_FOLDER_PATH-_} ]]
+
+[[ ! $DYLIB_INSTALL_NAME_BASE && ${DYLIB_INSTALL_NAME_BASE-_} ]]
+[[ ! $LD_DYLIB_INSTALL_NAME && ${LD_DYLIB_INSTALL_NAME-_} ]]
+
+"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/third_party/python/gyp/test/mac/type_envvars/test_nonbundle_shared_library.sh b/third_party/python/gyp/test/mac/type_envvars/test_nonbundle_shared_library.sh
new file mode 100755
index 0000000000..ba63ec1a48
--- /dev/null
+++ b/third_party/python/gyp/test/mac/type_envvars/test_nonbundle_shared_library.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+test $MACH_O_TYPE = mh_dylib
+test $PRODUCT_TYPE = com.apple.product-type.library.dynamic
+test $PRODUCT_NAME = nonbundle_shared_library
+test $FULL_PRODUCT_NAME = libnonbundle_shared_library.dylib
+
+test $EXECUTABLE_NAME = libnonbundle_shared_library.dylib
+test $EXECUTABLE_PATH = libnonbundle_shared_library.dylib
+[[ ! $WRAPPER_NAME && ${WRAPPER_NAME-_} ]]
+
+[[ ! $CONTENTS_FOLDER_PATH && ${CONTENTS_FOLDER_PATH-_} ]]
+[[ ! $EXECUTABLE_FOLDER_PATH && ${EXECUTABLE_FOLDER_PATH-_} ]]
+[[ ! $UNLOCALIZED_RESOURCES_FOLDER_PATH && \
+ ${UNLOCALIZED_RESOURCES_FOLDER_PATH-_} ]]
+[[ ! $JAVA_FOLDER_PATH && ${JAVA_FOLDER_PATH-_} ]]
+[[ ! $FRAMEWORKS_FOLDER_PATH && ${FRAMEWORKS_FOLDER_PATH-_} ]]
+[[ ! $SHARED_FRAMEWORKS_FOLDER_PATH && ${SHARED_FRAMEWORKS_FOLDER_PATH-_} ]]
+[[ ! $SHARED_SUPPORT_FOLDER_PATH && ${SHARED_SUPPORT_FOLDER_PATH-_} ]]
+[[ ! $PLUGINS_FOLDER_PATH && ${PLUGINS_FOLDER_PATH-_} ]]
+[[ ! $XPCSERVICES_FOLDER_PATH && ${XPCSERVICES_FOLDER_PATH-_} ]]
+
+test $DYLIB_INSTALL_NAME_BASE = "/usr/local/lib"
+test $LD_DYLIB_INSTALL_NAME = "/usr/local/lib/libnonbundle_shared_library.dylib"
+
+"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/third_party/python/gyp/test/mac/type_envvars/test_nonbundle_static_library.sh b/third_party/python/gyp/test/mac/type_envvars/test_nonbundle_static_library.sh
new file mode 100755
index 0000000000..63aac57b0b
--- /dev/null
+++ b/third_party/python/gyp/test/mac/type_envvars/test_nonbundle_static_library.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+test $MACH_O_TYPE = staticlib
+test $PRODUCT_TYPE = com.apple.product-type.library.static
+test $PRODUCT_NAME = nonbundle_static_library
+test $FULL_PRODUCT_NAME = libnonbundle_static_library.a
+
+test $EXECUTABLE_NAME = libnonbundle_static_library.a
+test $EXECUTABLE_PATH = libnonbundle_static_library.a
+[[ ! $WRAPPER_NAME && ${WRAPPER_NAME-_} ]]
+
+[[ ! $CONTENTS_FOLDER_PATH && ${CONTENTS_FOLDER_PATH-_} ]]
+[[ ! $EXECUTABLE_FOLDER_PATH && ${EXECUTABLE_FOLDER_PATH-_} ]]
+[[ ! $UNLOCALIZED_RESOURCES_FOLDER_PATH && \
+ ${UNLOCALIZED_RESOURCES_FOLDER_PATH-_} ]]
+[[ ! $JAVA_FOLDER_PATH && ${JAVA_FOLDER_PATH-_} ]]
+[[ ! $FRAMEWORKS_FOLDER_PATH && ${FRAMEWORKS_FOLDER_PATH-_} ]]
+[[ ! $SHARED_FRAMEWORKS_FOLDER_PATH && ${SHARED_FRAMEWORKS_FOLDER_PATH-_} ]]
+[[ ! $SHARED_SUPPORT_FOLDER_PATH && ${SHARED_SUPPORT_FOLDER_PATH-_} ]]
+[[ ! $PLUGINS_FOLDER_PATH && ${PLUGINS_FOLDER_PATH-_} ]]
+[[ ! $XPCSERVICES_FOLDER_PATH && ${XPCSERVICES_FOLDER_PATH-_} ]]
+
+[[ ! $DYLIB_INSTALL_NAME_BASE && ${DYLIB_INSTALL_NAME_BASE-_} ]]
+[[ ! $LD_DYLIB_INSTALL_NAME && ${LD_DYLIB_INSTALL_NAME-_} ]]
+
+"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/third_party/python/gyp/test/mac/unicode-settings/file.cc b/third_party/python/gyp/test/mac/unicode-settings/file.cc
new file mode 100644
index 0000000000..b2f997621b
--- /dev/null
+++ b/third_party/python/gyp/test/mac/unicode-settings/file.cc
@@ -0,0 +1,2 @@
+int main() {
+}
diff --git a/third_party/python/gyp/test/mac/unicode-settings/test.gyp b/third_party/python/gyp/test/mac/unicode-settings/test.gyp
new file mode 100644
index 0000000000..b331ae453f
--- /dev/null
+++ b/third_party/python/gyp/test/mac/unicode-settings/test.gyp
@@ -0,0 +1,23 @@
+# Copyright 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'myapp',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [ 'file.cc', ],
+ 'xcode_settings': {
+ 'BUNDLE_DISPLAY_NAME': 'α\011',
+ },
+ 'postbuilds': [
+ {
+ 'postbuild_name': 'envtest',
+ 'action': [ './test_bundle_display_name.sh', ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/unicode-settings/test_bundle_display_name.sh b/third_party/python/gyp/test/mac/unicode-settings/test_bundle_display_name.sh
new file mode 100755
index 0000000000..95dd6267a3
--- /dev/null
+++ b/third_party/python/gyp/test/mac/unicode-settings/test_bundle_display_name.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+# Copyright 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+test "${BUNDLE_DISPLAY_NAME}" = 'α '
diff --git a/third_party/python/gyp/test/mac/xcode-env-order/Info.plist b/third_party/python/gyp/test/mac/xcode-env-order/Info.plist
new file mode 100644
index 0000000000..e11f21e52d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcode-env-order/Info.plist
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIconFile</key>
+ <string></string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.${PRODUCT_NAME}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>LSMinimumSystemVersion</key>
+ <string>${MACOSX_DEPLOYMENT_TARGET}</string>
+ <key>NSMainNibFile</key>
+ <string>MainMenu</string>
+ <key>NSPrincipalClass</key>
+ <string>NSApplication</string>
+
+ <key>BraceProcessedKey1</key>
+ <string>${BRACE_DEPENDENT_KEY1}</string>
+ <key>BraceProcessedKey2</key>
+ <string>${BRACE_DEPENDENT_KEY2}</string>
+ <key>BraceProcessedKey3</key>
+ <string>${BRACE_DEPENDENT_KEY3}</string>
+
+ <key>ParenProcessedKey1</key>
+ <string>${PAREN_DEPENDENT_KEY1}</string>
+ <key>ParenProcessedKey2</key>
+ <string>${PAREN_DEPENDENT_KEY2}</string>
+ <key>ParenProcessedKey3</key>
+ <string>${PAREN_DEPENDENT_KEY3}</string>
+
+ <key>BareProcessedKey1</key>
+ <string>${BARE_DEPENDENT_KEY1}</string>
+ <key>BareProcessedKey2</key>
+ <string>${BARE_DEPENDENT_KEY2}</string>
+ <key>BareProcessedKey3</key>
+ <string>${BARE_DEPENDENT_KEY3}</string>
+
+ <key>MixedProcessedKey</key>
+ <string>${MIXED_DEPENDENT_KEY}</string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/xcode-env-order/file.ext1 b/third_party/python/gyp/test/mac/xcode-env-order/file.ext1
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcode-env-order/file.ext1
diff --git a/third_party/python/gyp/test/mac/xcode-env-order/file.ext2 b/third_party/python/gyp/test/mac/xcode-env-order/file.ext2
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcode-env-order/file.ext2
diff --git a/third_party/python/gyp/test/mac/xcode-env-order/file.ext3 b/third_party/python/gyp/test/mac/xcode-env-order/file.ext3
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcode-env-order/file.ext3
diff --git a/third_party/python/gyp/test/mac/xcode-env-order/main.c b/third_party/python/gyp/test/mac/xcode-env-order/main.c
new file mode 100644
index 0000000000..1bf4b2a11a
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcode-env-order/main.c
@@ -0,0 +1,7 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/mac/xcode-env-order/test.gyp b/third_party/python/gyp/test/mac/xcode-env-order/test.gyp
new file mode 100644
index 0000000000..8f975f7d6b
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcode-env-order/test.gyp
@@ -0,0 +1,121 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'test_app',
+ 'product_name': 'Test',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'main.c',
+ 'file.ext1',
+ 'file.ext2',
+ 'file.ext3',
+ ],
+ # Env vars in copies.
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/${PRODUCT_NAME}-copy-brace',
+ 'files': [ 'main.c', ], # ${SOURCE_ROOT} doesn't work with xcode
+ },
+ {
+ 'destination': '<(PRODUCT_DIR)/$(PRODUCT_NAME)-copy-paren',
+ 'files': [ '$(SOURCE_ROOT)/main.c', ],
+ },
+ {
+ 'destination': '<(PRODUCT_DIR)/$PRODUCT_NAME-copy-bare',
+ 'files': [ 'main.c', ], # $SOURCE_ROOT doesn't work with xcode
+ },
+ ],
+ # Env vars in actions. The $FOO's are here to test that env vars that
+ # aren't defined are handled in some way that doesn't break the build.
+ 'actions': [
+ {
+ 'action_name': 'Action copy braces ${PRODUCT_NAME} ${FOO}',
+ 'description': 'Action copy braces ${PRODUCT_NAME} ${FOO}',
+ 'inputs': [ '${SOURCE_ROOT}/main.c' ],
+ # Referencing ${PRODUCT_NAME} in action outputs doesn't work with
+ # the Xcode generator (PRODUCT_NAME expands to "Test Support").
+ 'outputs': [ '<(PRODUCT_DIR)/action-copy-brace.txt' ],
+ 'action': [ 'cp', '${SOURCE_ROOT}/main.c',
+ '<(PRODUCT_DIR)/action-copy-brace.txt' ],
+ },
+ {
+ 'action_name': 'Action copy parens $(PRODUCT_NAME) $(FOO)',
+ 'description': 'Action copy parens $(PRODUCT_NAME) $(FOO)',
+ 'inputs': [ '$(SOURCE_ROOT)/main.c' ],
+ # Referencing $(PRODUCT_NAME) in action outputs doesn't work with
+ # the Xcode generator (PRODUCT_NAME expands to "Test Support").
+ 'outputs': [ '<(PRODUCT_DIR)/action-copy-paren.txt' ],
+ 'action': [ 'cp', '$(SOURCE_ROOT)/main.c',
+ '<(PRODUCT_DIR)/action-copy-paren.txt' ],
+ },
+ {
+ 'action_name': 'Action copy bare $PRODUCT_NAME $FOO',
+ 'description': 'Action copy bare $PRODUCT_NAME $FOO',
+ 'inputs': [ '$SOURCE_ROOT/main.c' ],
+ # Referencing $PRODUCT_NAME in action outputs doesn't work with
+ # the Xcode generator (PRODUCT_NAME expands to "Test Support").
+ 'outputs': [ '<(PRODUCT_DIR)/action-copy-bare.txt' ],
+ 'action': [ 'cp', '$SOURCE_ROOT/main.c',
+ '<(PRODUCT_DIR)/action-copy-bare.txt' ],
+ },
+ ],
+ # Env vars in xcode_settings.
+ 'xcode_settings': {
+ 'INFOPLIST_FILE': 'Info.plist',
+ 'STRING_KEY': '/Source/Project',
+
+ 'BRACE_DEPENDENT_KEY2': '${STRING_KEY}/${PRODUCT_NAME}',
+ 'BRACE_DEPENDENT_KEY1': 'D:${BRACE_DEPENDENT_KEY2}',
+ 'BRACE_DEPENDENT_KEY3': '${PRODUCT_TYPE}:${BRACE_DEPENDENT_KEY1}',
+
+ 'PAREN_DEPENDENT_KEY2': '$(STRING_KEY)/$(PRODUCT_NAME)',
+ 'PAREN_DEPENDENT_KEY1': 'D:$(PAREN_DEPENDENT_KEY2)',
+ 'PAREN_DEPENDENT_KEY3': '$(PRODUCT_TYPE):$(PAREN_DEPENDENT_KEY1)',
+
+ 'BARE_DEPENDENT_KEY2': '$STRING_KEY/$PRODUCT_NAME',
+ 'BARE_DEPENDENT_KEY1': 'D:$BARE_DEPENDENT_KEY2',
+ 'BARE_DEPENDENT_KEY3': '$PRODUCT_TYPE:$BARE_DEPENDENT_KEY1',
+
+ 'MIXED_DEPENDENT_KEY': '${STRING_KEY}:$(PRODUCT_NAME):$MACH_O_TYPE',
+ },
+ # Env vars in rules. The $FOO's are here to test that env vars that
+ # aren't defined are handled in some way that doesn't break the build.
+ 'rules': [
+ {
+ 'rule_name': 'brace_rule',
+ 'message': 'Rule braces ${PRODUCT_NAME} ${FOO} <(RULE_INPUT_NAME)',
+ 'extension': 'ext1',
+ 'inputs': [ '${SOURCE_ROOT}/main.c' ],
+ 'outputs': [ '<(PRODUCT_DIR)/rule-copy-brace.txt' ],
+ 'action': [ 'cp', '${SOURCE_ROOT}/main.c',
+ '<(PRODUCT_DIR)/rule-copy-brace.txt' ],
+ },
+ {
+ 'rule_name': 'paren_rule',
+ 'message': 'Rule parens $(PRODUCT_NAME) $(FOO) <(RULE_INPUT_NAME)',
+ 'extension': 'ext2',
+ 'inputs': [ '$(SOURCE_ROOT)/main.c' ],
+ 'outputs': [ '<(PRODUCT_DIR)/rule-copy-paren.txt' ],
+ 'action': [ 'cp', '$(SOURCE_ROOT)/main.c',
+ '<(PRODUCT_DIR)/rule-copy-paren.txt' ],
+ },
+ # TODO: Fails in xcode. Looks like a bug in the xcode generator though
+ # (which uses makefiles for rules, and thinks $PRODUCT_NAME is
+ # $(P)RODUCT_NAME).
+ #{
+ # 'rule_name': 'bare_rule',
+ # 'message': 'Rule copy bare $PRODUCT_NAME $FOO',
+ # 'extension': 'ext3',
+ # 'inputs': [ '$SOURCE_ROOT/main.c' ],
+ # 'outputs': [ '<(PRODUCT_DIR)/rule-copy-bare.txt' ],
+ # 'action': [ 'cp', '$SOURCE_ROOT/main.c',
+ # '<(PRODUCT_DIR)/rule-copy-bare.txt' ],
+ #},
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/xcode-gcc/aliasing.cc b/third_party/python/gyp/test/mac/xcode-gcc/aliasing.cc
new file mode 100644
index 0000000000..16a41efb15
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcode-gcc/aliasing.cc
@@ -0,0 +1,13 @@
+#include <stdio.h>
+
+void check(int* h, long* k) {
+ *h = 1;
+ *k = 0;
+ printf("%d\n", *h);
+}
+
+int main(void) {
+ long k;
+ check((int*)&k, &k);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/mac/xcode-gcc/test-clang.gyp b/third_party/python/gyp/test/mac/xcode-gcc/test-clang.gyp
new file mode 100644
index 0000000000..9f4a98ae73
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcode-gcc/test-clang.gyp
@@ -0,0 +1,42 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'make_global_settings': [
+ ['CC', '/usr/bin/clang'],
+ ['CXX', '/usr/bin/clang++'],
+ ],
+
+ 'targets': [
+ {
+ 'target_name': 'aliasing_yes',
+ 'type': 'executable',
+ 'sources': [ 'aliasing.cc', ],
+ 'xcode_settings': {
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'GCC_STRICT_ALIASING': 'YES',
+ 'GCC_OPTIMIZATION_LEVEL': 2,
+ },
+ },
+ {
+ 'target_name': 'aliasing_no',
+ 'type': 'executable',
+ 'sources': [ 'aliasing.cc', ],
+ 'xcode_settings': {
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'GCC_STRICT_ALIASING': 'NO',
+ 'GCC_OPTIMIZATION_LEVEL': 2,
+ },
+ },
+ {
+ 'target_name': 'aliasing_default',
+ 'type': 'executable',
+ 'sources': [ 'aliasing.cc', ],
+ 'xcode_settings': {
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'GCC_OPTIMIZATION_LEVEL': 2,
+ },
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/mac/xcode-gcc/test.gyp b/third_party/python/gyp/test/mac/xcode-gcc/test.gyp
new file mode 100644
index 0000000000..1ca8b215d8
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcode-gcc/test.gyp
@@ -0,0 +1,60 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'target_defaults': {
+ 'xcode_settings': {
+ 'GCC_TREAT_WARNINGS_AS_ERRORS': 'YES',
+ },
+ },
+
+ 'variables': {
+ # Non-failing tests should check that these trivial files in every language
+ # still compile correctly.
+ 'valid_sources': [
+ 'valid_c.c',
+ 'valid_cc.cc',
+ 'valid_m.m',
+ 'valid_mm.mm',
+ ],
+ },
+
+ # Targets come in pairs: 'foo' and 'foo-fail', with the former building with
+ # no warnings and the latter not.
+ 'targets': [
+ # GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO (default: YES):
+ {
+ 'target_name': 'warn_about_invalid_offsetof_macro',
+ 'type': 'executable',
+ 'sources': [
+ 'warn_about_invalid_offsetof_macro.cc',
+ '<@(valid_sources)',
+ ],
+ 'xcode_settings': {
+ 'GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO': 'NO',
+ },
+ },
+ {
+ 'target_name': 'warn_about_invalid_offsetof_macro-fail',
+ 'type': 'executable',
+ 'sources': [ 'warn_about_invalid_offsetof_macro.cc', ],
+ },
+ # GCC_WARN_ABOUT_MISSING_NEWLINE (default: NO):
+ {
+ 'target_name': 'warn_about_missing_newline',
+ 'type': 'executable',
+ 'sources': [
+ 'warn_about_missing_newline.c',
+ '<@(valid_sources)',
+ ],
+ },
+ {
+ 'target_name': 'warn_about_missing_newline-fail',
+ 'type': 'executable',
+ 'sources': [ 'warn_about_missing_newline.c', ],
+ 'xcode_settings': {
+ 'GCC_WARN_ABOUT_MISSING_NEWLINE': 'YES',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/xcode-gcc/valid_c.c b/third_party/python/gyp/test/mac/xcode-gcc/valid_c.c
new file mode 100644
index 0000000000..2b10ac3ed7
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcode-gcc/valid_c.c
@@ -0,0 +1,8 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file exists to test that valid C files compile correctly.
+
+void FunctionInCFile(void) {
+}
diff --git a/third_party/python/gyp/test/mac/xcode-gcc/valid_cc.cc b/third_party/python/gyp/test/mac/xcode-gcc/valid_cc.cc
new file mode 100644
index 0000000000..31cddc3c9c
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcode-gcc/valid_cc.cc
@@ -0,0 +1,8 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file exists to test that valid C++ files compile correctly.
+
+void FunctionInCCFile() {
+}
diff --git a/third_party/python/gyp/test/mac/xcode-gcc/valid_m.m b/third_party/python/gyp/test/mac/xcode-gcc/valid_m.m
new file mode 100644
index 0000000000..95bddb2723
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcode-gcc/valid_m.m
@@ -0,0 +1,8 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file exists to test that valid Objective-C files compile correctly.
+
+void FunctionInMFile(void) {
+}
diff --git a/third_party/python/gyp/test/mac/xcode-gcc/valid_mm.mm b/third_party/python/gyp/test/mac/xcode-gcc/valid_mm.mm
new file mode 100644
index 0000000000..a7db7e3ad6
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcode-gcc/valid_mm.mm
@@ -0,0 +1,8 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file exists to test that valid Objective-C++ files compile correctly.
+
+void FunctionInMMFile() {
+}
diff --git a/third_party/python/gyp/test/mac/xcode-gcc/warn_about_invalid_offsetof_macro.cc b/third_party/python/gyp/test/mac/xcode-gcc/warn_about_invalid_offsetof_macro.cc
new file mode 100644
index 0000000000..4a4612be0d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcode-gcc/warn_about_invalid_offsetof_macro.cc
@@ -0,0 +1,15 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#define offsetof(st, m) ((unsigned)((char*)&((st*)0)->m - (char*)0))
+
+struct MyStruct {
+ virtual void MyFunc() = 0;
+ int my_member;
+};
+
+int main() {
+ unsigned x = offsetof(MyStruct, my_member);
+ return x ? 0 : 1;
+}
diff --git a/third_party/python/gyp/test/mac/xcode-gcc/warn_about_missing_newline.c b/third_party/python/gyp/test/mac/xcode-gcc/warn_about_missing_newline.c
new file mode 100644
index 0000000000..6faf0895db
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcode-gcc/warn_about_missing_newline.c
@@ -0,0 +1,8 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Important: Don't terminate this file with a newline.
+int main() {
+ return 0;
+} \ No newline at end of file
diff --git a/third_party/python/gyp/test/mac/xcode-support-actions/source.c b/third_party/python/gyp/test/mac/xcode-support-actions/source.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcode-support-actions/source.c
diff --git a/third_party/python/gyp/test/mac/xcode-support-actions/test.gyp b/third_party/python/gyp/test/mac/xcode-support-actions/test.gyp
new file mode 100644
index 0000000000..ad81b8c456
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcode-support-actions/test.gyp
@@ -0,0 +1,26 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'target',
+ 'product_name': 'Product',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [
+ '<(PRODUCT_DIR)/copy.c',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'Helper',
+ 'description': 'Helps',
+ 'inputs': [ 'source.c' ],
+ 'outputs': [ '<(PRODUCT_DIR)/copy.c' ],
+ 'action': [ 'cp', '${SOURCE_ROOT}/source.c',
+ '<(PRODUCT_DIR)/copy.c' ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/mac/xctest/MyClass.h b/third_party/python/gyp/test/mac/xctest/MyClass.h
new file mode 100644
index 0000000000..dde13aa33d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xctest/MyClass.h
@@ -0,0 +1,8 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Foundation/Foundation.h>
+
+@interface MyClass : NSObject
+@end
diff --git a/third_party/python/gyp/test/mac/xctest/MyClass.m b/third_party/python/gyp/test/mac/xctest/MyClass.m
new file mode 100644
index 0000000000..df11471b07
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xctest/MyClass.m
@@ -0,0 +1,8 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "MyClass.h"
+
+@implementation MyClass
+@end
diff --git a/third_party/python/gyp/test/mac/xctest/TestCase.m b/third_party/python/gyp/test/mac/xctest/TestCase.m
new file mode 100644
index 0000000000..36846a1fda
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xctest/TestCase.m
@@ -0,0 +1,16 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <XCTest/XCTest.h>
+#import "MyClass.h"
+
+@interface TestCase : XCTestCase
+@end
+
+@implementation TestCase
+- (void)testFoo {
+ MyClass *foo = [[MyClass alloc] init];
+ XCTAssertNotNil(foo, @"expected non-nil object");
+}
+@end
diff --git a/third_party/python/gyp/test/mac/xctest/resource.txt b/third_party/python/gyp/test/mac/xctest/resource.txt
new file mode 100644
index 0000000000..257cc5642c
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xctest/resource.txt
@@ -0,0 +1 @@
+foo
diff --git a/third_party/python/gyp/test/mac/xctest/test.gyp b/third_party/python/gyp/test/mac/xctest/test.gyp
new file mode 100644
index 0000000000..ac25656b35
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xctest/test.gyp
@@ -0,0 +1,47 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'classes',
+ 'type': 'static_library',
+ 'sources': [
+ 'MyClass.h',
+ 'MyClass.m',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ ],
+ },
+ },
+ {
+ 'target_name': 'tests',
+ 'type': 'loadable_module',
+ 'mac_xctest_bundle': 1,
+ 'sources': [
+ 'TestCase.m',
+ ],
+ 'dependencies': [
+ 'classes',
+ ],
+ 'mac_bundle_resources': [
+ 'resource.txt',
+ ],
+ 'xcode_settings': {
+ 'WRAPPER_EXTENSION': 'xctest',
+ 'FRAMEWORK_SEARCH_PATHS': [
+ '$(inherited)',
+ '$(DEVELOPER_FRAMEWORKS_DIR)',
+ ],
+ 'OTHER_LDFLAGS': [
+ '$(inherited)',
+ '-ObjC',
+ ],
+ },
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/mac/xctest/test.xcodeproj/xcshareddata/xcschemes/classes.xcscheme b/third_party/python/gyp/test/mac/xctest/test.xcodeproj/xcshareddata/xcschemes/classes.xcscheme
new file mode 100644
index 0000000000..6bd1bb9696
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xctest/test.xcodeproj/xcshareddata/xcschemes/classes.xcscheme
@@ -0,0 +1,69 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Scheme
+ LastUpgradeVersion = "0500"
+ version = "1.3">
+ <BuildAction
+ parallelizeBuildables = "YES"
+ buildImplicitDependencies = "YES">
+ <BuildActionEntries>
+ <BuildActionEntry
+ buildForTesting = "YES"
+ buildForRunning = "YES"
+ buildForProfiling = "YES"
+ buildForArchiving = "YES"
+ buildForAnalyzing = "YES">
+ <BuildableReference
+ BuildableIdentifier = "primary"
+ BlueprintIdentifier = "D3B79173B4570A3C70A902FF"
+ BuildableName = "libclasses.a"
+ BlueprintName = "classes"
+ ReferencedContainer = "container:test.xcodeproj">
+ </BuildableReference>
+ </BuildActionEntry>
+ </BuildActionEntries>
+ </BuildAction>
+ <TestAction
+ selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
+ selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
+ shouldUseLaunchSchemeArgsEnv = "YES"
+ buildConfiguration = "Default">
+ <Testables>
+ <TestableReference
+ skipped = "NO">
+ <BuildableReference
+ BuildableIdentifier = "primary"
+ BlueprintIdentifier = "2ACDAB234B9E5D65CACBCF9C"
+ BuildableName = "tests.xctest"
+ BlueprintName = "tests"
+ ReferencedContainer = "container:test.xcodeproj">
+ </BuildableReference>
+ </TestableReference>
+ </Testables>
+ </TestAction>
+ <LaunchAction
+ selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
+ selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
+ launchStyle = "0"
+ useCustomWorkingDirectory = "NO"
+ buildConfiguration = "Default"
+ ignoresPersistentStateOnLaunch = "NO"
+ debugDocumentVersioning = "YES"
+ allowLocationSimulation = "YES">
+ <AdditionalOptions>
+ </AdditionalOptions>
+ </LaunchAction>
+ <ProfileAction
+ shouldUseLaunchSchemeArgsEnv = "YES"
+ savedToolIdentifier = ""
+ useCustomWorkingDirectory = "NO"
+ buildConfiguration = "Default"
+ debugDocumentVersioning = "YES">
+ </ProfileAction>
+ <AnalyzeAction
+ buildConfiguration = "Default">
+ </AnalyzeAction>
+ <ArchiveAction
+ buildConfiguration = "Default"
+ revealArchiveInOrganizer = "YES">
+ </ArchiveAction>
+</Scheme>
diff --git a/third_party/python/gyp/test/mac/xcuitest/Info.plist b/third_party/python/gyp/test/mac/xcuitest/Info.plist
new file mode 100644
index 0000000000..ae8852b836
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcuitest/Info.plist
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIconFile</key>
+ <string></string>
+ <key>CFBundleIdentifier</key>
+ <string>com.yourcompany.${PRODUCT_NAME}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>BNDL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>NSPrincipalClass</key>
+ <string></string>
+</dict>
+</plist>
diff --git a/third_party/python/gyp/test/mac/xcuitest/MyAppDelegate.h b/third_party/python/gyp/test/mac/xcuitest/MyAppDelegate.h
new file mode 100644
index 0000000000..445be2cb42
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcuitest/MyAppDelegate.h
@@ -0,0 +1,8 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <UIKit/UIKit.h>
+
+@interface MyAppDelegate : NSObject<UIApplicationDelegate>
+@end
diff --git a/third_party/python/gyp/test/mac/xcuitest/MyAppDelegate.m b/third_party/python/gyp/test/mac/xcuitest/MyAppDelegate.m
new file mode 100644
index 0000000000..6ad60fa9d4
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcuitest/MyAppDelegate.m
@@ -0,0 +1,19 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "MyAppDelegate.h"
+
+
+@implementation MyAppDelegate
+@synthesize window;
+
+- (BOOL)application:(UIApplication *)application
+ didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
+ self.window = [[UIWindow alloc] init];
+ self.window.rootViewController = [[UIViewController alloc] init];
+ [self.window makeKeyAndVisible];
+ return YES;
+}
+
+@end
diff --git a/third_party/python/gyp/test/mac/xcuitest/TestCase.m b/third_party/python/gyp/test/mac/xcuitest/TestCase.m
new file mode 100644
index 0000000000..1f32b7af74
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcuitest/TestCase.m
@@ -0,0 +1,15 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <XCTest/XCTest.h>
+
+@interface TestCase : XCTestCase
+@end
+
+@implementation TestCase
+- (void)testFoo {
+ XCUIApplication *foo = [[XCUIApplication alloc] init];
+ XCTAssertNotNil(foo, @"expected non-nil object");
+}
+@end
diff --git a/third_party/python/gyp/test/mac/xcuitest/main.m b/third_party/python/gyp/test/mac/xcuitest/main.m
new file mode 100644
index 0000000000..e7cb62e639
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcuitest/main.m
@@ -0,0 +1,15 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <UIKit/UIKit.h>
+
+#import "MyAppDelegate.h"
+
+int main(int argc, char * argv[]) {
+ @autoreleasepool {
+ UIApplicationMain(argc, argv,
+ nil, NSStringFromClass([MyAppDelegate class]));
+ }
+ return 1;
+}
diff --git a/third_party/python/gyp/test/mac/xcuitest/resource.txt b/third_party/python/gyp/test/mac/xcuitest/resource.txt
new file mode 100644
index 0000000000..257cc5642c
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcuitest/resource.txt
@@ -0,0 +1 @@
+foo
diff --git a/third_party/python/gyp/test/mac/xcuitest/test.gyp b/third_party/python/gyp/test/mac/xcuitest/test.gyp
new file mode 100644
index 0000000000..80cdf9032d
--- /dev/null
+++ b/third_party/python/gyp/test/mac/xcuitest/test.gyp
@@ -0,0 +1,69 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'xcode_settings': {
+ 'SDKROOT': 'iphoneos',
+ 'FRAMEWORK_SEARCH_PATHS': [
+ '$(inherited)',
+ '$(DEVELOPER_FRAMEWORKS_DIR)',
+ ],
+ 'OTHER_LDFLAGS': [
+ '$(inherited)',
+ '-ObjC',
+ ],
+ 'GCC_PREFIX_HEADER': '',
+ 'CLANG_ENABLE_OBJC_ARC': 'YES',
+ 'INFOPLIST_FILE': 'Info.plist',
+ },
+ },
+ 'targets': [
+ {
+ 'target_name': 'testApp',
+ 'type': 'executable',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'MyAppDelegate.h',
+ 'MyAppDelegate.m',
+ 'main.m',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
+ ],
+ },
+ },
+ {
+ 'target_name': 'tests',
+ 'type': 'loadable_module',
+ 'mac_bundle': 1,
+ 'mac_xcuitest_bundle': 1,
+ 'sources': [
+ 'TestCase.m',
+ ],
+ 'dependencies': [
+ 'testApp',
+ ],
+ 'mac_bundle_resources': [
+ 'resource.txt',
+ ],
+ 'variables': {
+ # This must *not* be set for xctest ui tests.
+ 'xctest_host': '',
+ },
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/XCTest.framework',
+ ]
+ },
+ 'xcode_settings': {
+ 'WRAPPER_EXTENSION': 'xctest',
+ 'TEST_TARGET_NAME': 'testApp',
+ },
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/make/dependencies.gyp b/third_party/python/gyp/test/make/dependencies.gyp
new file mode 100644
index 0000000000..e2bee24fce
--- /dev/null
+++ b/third_party/python/gyp/test/make/dependencies.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'main',
+ 'type': 'executable',
+ 'sources': [
+ 'main.cc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/make/gyptest-dependencies.py b/third_party/python/gyp/test/make/gyptest-dependencies.py
new file mode 100755
index 0000000000..d215f76782
--- /dev/null
+++ b/third_party/python/gyp/test/make/gyptest-dependencies.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that .d files and all.deps are properly generated.
+"""
+
+import TestGyp
+
+# .d files are only used by the make build.
+test = TestGyp.TestGyp(formats=['make'])
+
+test.run_gyp('dependencies.gyp')
+
+test.build('dependencies.gyp', test.ALL)
+
+deps_file = test.built_file_path(".deps/out/Default/obj.target/main/main.o.d")
+test.must_contain(deps_file, "main.h")
+
+# Build a second time to make sure we generate all.deps.
+test.build('dependencies.gyp', test.ALL)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/make/gyptest-noload.py b/third_party/python/gyp/test/make/gyptest-noload.py
new file mode 100755
index 0000000000..1f5103315c
--- /dev/null
+++ b/third_party/python/gyp/test/make/gyptest-noload.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Tests the use of the NO_LOAD flag which makes loading sub .mk files
+optional.
+"""
+
+# Python 2.5 needs this for the with statement.
+from __future__ import with_statement
+
+import os
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['make'])
+
+test.run_gyp('all.gyp', chdir='noload')
+
+test.relocate('noload', 'relocate/noload')
+
+test.build('build/all.gyp', test.ALL, chdir='relocate/noload')
+test.run_built_executable('exe', chdir='relocate/noload',
+ stdout='Hello from shared.c.\n')
+
+# Just sanity test that NO_LOAD=lib doesn't break anything.
+test.build('build/all.gyp', test.ALL, chdir='relocate/noload',
+ arguments=['NO_LOAD=lib'])
+test.run_built_executable('exe', chdir='relocate/noload',
+ stdout='Hello from shared.c.\n')
+test.build('build/all.gyp', test.ALL, chdir='relocate/noload',
+ arguments=['NO_LOAD=z'])
+test.run_built_executable('exe', chdir='relocate/noload',
+ stdout='Hello from shared.c.\n')
+
+# Make sure we can rebuild without reloading the sub .mk file.
+with open('relocate/noload/main.c', 'a') as src_file:
+ src_file.write("\n")
+test.build('build/all.gyp', test.ALL, chdir='relocate/noload',
+ arguments=['NO_LOAD=lib'])
+test.run_built_executable('exe', chdir='relocate/noload',
+ stdout='Hello from shared.c.\n')
+
+# Change shared.c, but verify that it doesn't get rebuild if we don't load it.
+with open('relocate/noload/lib/shared.c', 'w') as shared_file:
+ shared_file.write(
+ '#include "shared.h"\n'
+ 'const char kSharedStr[] = "modified";\n'
+ )
+test.build('build/all.gyp', test.ALL, chdir='relocate/noload',
+ arguments=['NO_LOAD=lib'])
+test.run_built_executable('exe', chdir='relocate/noload',
+ stdout='Hello from shared.c.\n')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/make/main.cc b/third_party/python/gyp/test/make/main.cc
new file mode 100644
index 0000000000..3b9a705c24
--- /dev/null
+++ b/third_party/python/gyp/test/make/main.cc
@@ -0,0 +1,12 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+#include "main.h"
+
+int main(void) {
+ printf("hello world\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/make/main.h b/third_party/python/gyp/test/make/main.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/make/main.h
diff --git a/third_party/python/gyp/test/make/noload/all.gyp b/third_party/python/gyp/test/make/noload/all.gyp
new file mode 100644
index 0000000000..1617a9e97c
--- /dev/null
+++ b/third_party/python/gyp/test/make/noload/all.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'exe',
+ 'type': 'executable',
+ 'sources': [
+ 'main.c',
+ ],
+ 'dependencies': [
+ 'lib/shared.gyp:shared',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/make/noload/lib/shared.c b/third_party/python/gyp/test/make/noload/lib/shared.c
new file mode 100644
index 0000000000..51776c5acf
--- /dev/null
+++ b/third_party/python/gyp/test/make/noload/lib/shared.c
@@ -0,0 +1,3 @@
+#include "shared.h"
+
+const char kSharedStr[] = "shared.c";
diff --git a/third_party/python/gyp/test/make/noload/lib/shared.gyp b/third_party/python/gyp/test/make/noload/lib/shared.gyp
new file mode 100644
index 0000000000..8a8841b3a0
--- /dev/null
+++ b/third_party/python/gyp/test/make/noload/lib/shared.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'shared',
+ 'type': 'shared_library',
+ 'sources': [
+ 'shared.c',
+ 'shared.h',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/make/noload/lib/shared.h b/third_party/python/gyp/test/make/noload/lib/shared.h
new file mode 100644
index 0000000000..a21da7538b
--- /dev/null
+++ b/third_party/python/gyp/test/make/noload/lib/shared.h
@@ -0,0 +1 @@
+extern const char kSharedStr[];
diff --git a/third_party/python/gyp/test/make/noload/main.c b/third_party/python/gyp/test/make/noload/main.c
new file mode 100644
index 0000000000..26ec1889ad
--- /dev/null
+++ b/third_party/python/gyp/test/make/noload/main.c
@@ -0,0 +1,9 @@
+#include <stdio.h>
+
+#include "lib/shared.h"
+
+int main(void)
+{
+ printf("Hello from %s.\n", kSharedStr);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/make_global_settings/ar/gyptest-make_global_settings_ar.py b/third_party/python/gyp/test/make_global_settings/ar/gyptest-make_global_settings_ar.py
new file mode 100644
index 0000000000..aabc5618d5
--- /dev/null
+++ b/third_party/python/gyp/test/make_global_settings/ar/gyptest-make_global_settings_ar.py
@@ -0,0 +1,126 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies 'AR' in make_global_settings.
+"""
+
+import os
+import sys
+import TestGyp
+
+def resolve_path(test, path):
+ if path is None:
+ return None
+ elif test.format == 'make':
+ return '$(abspath %s)' % path
+ elif test.format in ['ninja', 'xcode-ninja']:
+ return os.path.join('..', '..', path)
+ else:
+ test.fail_test()
+
+
+def verify_ar_target(test, ar=None, rel_path=False):
+ if rel_path:
+ ar_expected = resolve_path(test, ar)
+ else:
+ ar_expected = ar
+ # Resolve default values
+ if ar_expected is None:
+ if test.format == 'make':
+ # Make generator hasn't set the default value for AR.
+ # You can remove the following assertion as long as it doesn't
+ # break existing projects.
+ test.must_not_contain('Makefile', 'AR ?= ')
+ return
+ elif test.format in ['ninja', 'xcode-ninja']:
+ if sys.platform == 'win32':
+ ar_expected = 'lib.exe'
+ else:
+ ar_expected = 'ar'
+ if test.format == 'make':
+ test.must_contain('Makefile', 'AR ?= %s' % ar_expected)
+ elif test.format in ['ninja', 'xcode-ninja']:
+ test.must_contain('out/Default/build.ninja', 'ar = %s' % ar_expected)
+ else:
+ test.fail_test()
+
+
+def verify_ar_host(test, ar=None, rel_path=False):
+ if rel_path:
+ ar_expected = resolve_path(test, ar)
+ else:
+ ar_expected = ar
+ # Resolve default values
+ if ar_expected is None:
+ if sys.platform == 'win32':
+ ar_expected = 'lib.exe'
+ else:
+ ar_expected = 'ar'
+ if test.format == 'make':
+ test.must_contain('Makefile', 'AR.host ?= %s' % ar_expected)
+ elif test.format in ['ninja', 'xcode-ninja']:
+ test.must_contain('out/Default/build.ninja', 'ar_host = %s' % ar_expected)
+ else:
+ test.fail_test()
+
+
+test_format = ['ninja']
+if sys.platform.startswith('linux') or sys.platform == 'darwin':
+ test_format += ['make']
+
+test = TestGyp.TestGyp(formats=test_format)
+
+# Check default values
+test.run_gyp('make_global_settings_ar.gyp')
+verify_ar_target(test)
+
+
+# Check default values with GYP_CROSSCOMPILE enabled.
+with TestGyp.LocalEnv({'GYP_CROSSCOMPILE': '1'}):
+ test.run_gyp('make_global_settings_ar.gyp')
+verify_ar_target(test)
+verify_ar_host(test)
+
+
+# Test 'AR' in 'make_global_settings'.
+with TestGyp.LocalEnv({'GYP_CROSSCOMPILE': '1'}):
+ test.run_gyp('make_global_settings_ar.gyp', '-Dcustom_ar_target=my_ar')
+verify_ar_target(test, ar='my_ar', rel_path=True)
+
+
+# Test 'AR'/'AR.host' in 'make_global_settings'.
+with TestGyp.LocalEnv({'GYP_CROSSCOMPILE': '1'}):
+ test.run_gyp('make_global_settings_ar.gyp',
+ '-Dcustom_ar_target=my_ar_target1',
+ '-Dcustom_ar_host=my_ar_host1')
+verify_ar_target(test, ar='my_ar_target1', rel_path=True)
+verify_ar_host(test, ar='my_ar_host1', rel_path=True)
+
+
+# Test $AR and $AR_host environment variables.
+with TestGyp.LocalEnv({'AR': 'my_ar_target2',
+ 'AR_host': 'my_ar_host2'}):
+ test.run_gyp('make_global_settings_ar.gyp')
+# Ninja generator resolves $AR in gyp phase. Make generator doesn't.
+if test.format == 'ninja':
+ if sys.platform == 'win32':
+ # TODO(yukawa): Make sure if this is an expected result or not.
+ verify_ar_target(test, ar='lib.exe', rel_path=False)
+ else:
+ verify_ar_target(test, ar='my_ar_target2', rel_path=False)
+verify_ar_host(test, ar='my_ar_host2', rel_path=False)
+
+
+# Test 'AR' in 'make_global_settings' with $AR_host environment variable.
+with TestGyp.LocalEnv({'AR_host': 'my_ar_host3'}):
+ test.run_gyp('make_global_settings_ar.gyp',
+ '-Dcustom_ar_target=my_ar_target3')
+verify_ar_target(test, ar='my_ar_target3', rel_path=True)
+verify_ar_host(test, ar='my_ar_host3', rel_path=False)
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/make_global_settings/ar/make_global_settings_ar.gyp b/third_party/python/gyp/test/make_global_settings/ar/make_global_settings_ar.gyp
new file mode 100644
index 0000000000..3430d82a51
--- /dev/null
+++ b/third_party/python/gyp/test/make_global_settings/ar/make_global_settings_ar.gyp
@@ -0,0 +1,29 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style licence that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'custom_ar_target%': '',
+ 'custom_ar_host%': '',
+ },
+ 'conditions': [
+ ['"<(custom_ar_target)"!=""', {
+ 'make_global_settings': [
+ ['AR', '<(custom_ar_target)'],
+ ],
+ }],
+ ['"<(custom_ar_host)"!=""', {
+ 'make_global_settings': [
+ ['AR.host', '<(custom_ar_host)'],
+ ],
+ }],
+ ],
+ 'targets': [
+ {
+ 'target_name': 'make_global_settings_ar_test',
+ 'type': 'static_library',
+ 'sources': [ 'foo.c' ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/make_global_settings/basics/gyptest-make_global_settings.py b/third_party/python/gyp/test/make_global_settings/basics/gyptest-make_global_settings.py
new file mode 100644
index 0000000000..8f48875967
--- /dev/null
+++ b/third_party/python/gyp/test/make_global_settings/basics/gyptest-make_global_settings.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies make_global_settings.
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+import TestGyp
+
+print("This test is currently disabled: https://crbug.com/483696.")
+sys.exit(0)
+
+test_format = ['ninja']
+if sys.platform.startswith('linux') or sys.platform == 'darwin':
+ test_format += ['make']
+
+test = TestGyp.TestGyp(formats=test_format)
+
+test.run_gyp('make_global_settings.gyp')
+
+if test.format == 'make':
+ cc_expected = """ifneq (,$(filter $(origin CC), undefined default))
+ CC = $(abspath clang)
+endif
+"""
+ if sys.platform.startswith('linux'):
+ link_expected = """
+LINK ?= $(abspath clang)
+"""
+ elif sys.platform == 'darwin':
+ link_expected = """
+LINK ?= $(abspath clang)
+"""
+ test.must_contain('Makefile', cc_expected)
+ test.must_contain('Makefile', link_expected)
+if test.format == 'ninja':
+ cc_expected = 'cc = ' + os.path.join('..', '..', 'clang')
+ ld_expected = 'ld = $cc'
+ if sys.platform == 'win32':
+ ld_expected = 'link.exe'
+ test.must_contain('out/Default/build.ninja', cc_expected)
+ test.must_contain('out/Default/build.ninja', ld_expected)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/make_global_settings/basics/make_global_settings.gyp b/third_party/python/gyp/test/make_global_settings/basics/make_global_settings.gyp
new file mode 100644
index 0000000000..47dbc8570f
--- /dev/null
+++ b/third_party/python/gyp/test/make_global_settings/basics/make_global_settings.gyp
@@ -0,0 +1,17 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style licence that can be
+# found in the LICENSE file.
+
+{
+ 'make_global_settings': [
+ ['CC', 'clang'],
+ ['LINK', 'clang'],
+ ],
+ 'targets': [
+ {
+ 'target_name': 'test',
+ 'type': 'static_library',
+ 'sources': [ 'foo.c' ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/make_global_settings/env-wrapper/gyptest-wrapper.py b/third_party/python/gyp/test/make_global_settings/env-wrapper/gyptest-wrapper.py
new file mode 100644
index 0000000000..409799e315
--- /dev/null
+++ b/third_party/python/gyp/test/make_global_settings/env-wrapper/gyptest-wrapper.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies *_wrapper in environment.
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+import TestGyp
+
+print("This test is currently disabled: https://crbug.com/483696.")
+sys.exit(0)
+
+test_format = ['ninja']
+
+os.environ['CC_wrapper'] = 'distcc'
+os.environ['LINK_wrapper'] = 'distlink'
+os.environ['CC.host_wrapper'] = 'ccache'
+
+test = TestGyp.TestGyp(formats=test_format)
+
+old_env = dict(os.environ)
+os.environ['GYP_CROSSCOMPILE'] = '1'
+test.run_gyp('wrapper.gyp')
+os.environ.clear()
+os.environ.update(old_env)
+
+if test.format == 'ninja':
+ cc_expected = ('cc = ' + os.path.join('..', '..', 'distcc') + ' ' +
+ os.path.join('..', '..', 'clang'))
+ cc_host_expected = ('cc_host = ' + os.path.join('..', '..', 'ccache') + ' ' +
+ os.path.join('..', '..', 'clang'))
+ ld_expected = 'ld = ../../distlink $cc'
+ if sys.platform != 'win32':
+ ldxx_expected = 'ldxx = ../../distlink $cxx'
+
+ if sys.platform == 'win32':
+ ld_expected = 'link.exe'
+ test.must_contain('out/Default/build.ninja', cc_expected)
+ test.must_contain('out/Default/build.ninja', cc_host_expected)
+ test.must_contain('out/Default/build.ninja', ld_expected)
+ if sys.platform != 'win32':
+ test.must_contain('out/Default/build.ninja', ldxx_expected)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/make_global_settings/env-wrapper/wrapper.gyp b/third_party/python/gyp/test/make_global_settings/env-wrapper/wrapper.gyp
new file mode 100644
index 0000000000..1698d71dd4
--- /dev/null
+++ b/third_party/python/gyp/test/make_global_settings/env-wrapper/wrapper.gyp
@@ -0,0 +1,17 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'make_global_settings': [
+ ['CC', 'clang'],
+ ['CC.host', 'clang'],
+ ],
+ 'targets': [
+ {
+ 'target_name': 'test',
+ 'type': 'static_library',
+ 'sources': [ 'foo.c' ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/make_global_settings/full-toolchain/bar.cc b/third_party/python/gyp/test/make_global_settings/full-toolchain/bar.cc
new file mode 100644
index 0000000000..afb422ba1a
--- /dev/null
+++ b/third_party/python/gyp/test/make_global_settings/full-toolchain/bar.cc
@@ -0,0 +1 @@
+#error Not a real source file
diff --git a/third_party/python/gyp/test/make_global_settings/full-toolchain/foo.c b/third_party/python/gyp/test/make_global_settings/full-toolchain/foo.c
new file mode 100644
index 0000000000..afb422ba1a
--- /dev/null
+++ b/third_party/python/gyp/test/make_global_settings/full-toolchain/foo.c
@@ -0,0 +1 @@
+#error Not a real source file
diff --git a/third_party/python/gyp/test/make_global_settings/full-toolchain/gyptest-make_global_settings.py b/third_party/python/gyp/test/make_global_settings/full-toolchain/gyptest-make_global_settings.py
new file mode 100644
index 0000000000..542fd631c2
--- /dev/null
+++ b/third_party/python/gyp/test/make_global_settings/full-toolchain/gyptest-make_global_settings.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies make_global_settings works with the full toolchain.
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+import TestGyp
+
+if sys.platform == 'win32':
+ # cross compiling not supported by ninja on windows
+ # and make not supported on windows at all.
+ sys.exit(0)
+
+print("This test is currently disabled: https://crbug.com/483696.")
+sys.exit(0)
+
+test = TestGyp.TestGyp(formats=['ninja'])
+# Must set the test format to something with a flavor (the part after the '-')
+# in order to test the desired behavior. Since we want to run a non-host
+# toolchain, we have to set the flavor to something that the ninja generator
+# doesn't know about, so it doesn't default to the host-specific tools (e.g.,
+# 'otool' on mac to generate the .TOC).
+#
+# Note that we can't just pass format=['ninja-some_toolchain'] to the
+# constructor above, because then this test wouldn't be recognized as a ninja
+# format test.
+test.formats = ['ninja-my_flavor' if f == 'ninja' else f for f in test.formats]
+
+gyp_file = 'make_global_settings.gyp'
+
+test.run_gyp(gyp_file,
+ # Teach the .gyp file about the location of my_nm.py and
+ # my_readelf.py, and the python executable.
+ '-Dworkdir=%s' % test.workdir,
+ '-Dpython=%s' % sys.executable)
+test.build(gyp_file,
+ arguments=['-v'] if test.format == 'ninja-my_flavor' else [])
+
+expected = ['MY_CC', 'MY_CXX']
+test.must_contain_all_lines(test.stdout(), expected)
+
+test.must_contain(test.built_file_path('RAN_MY_NM'), 'RAN_MY_NM')
+test.must_contain(test.built_file_path('RAN_MY_READELF'), 'RAN_MY_READELF')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/make_global_settings/full-toolchain/make_global_settings.gyp b/third_party/python/gyp/test/make_global_settings/full-toolchain/make_global_settings.gyp
new file mode 100644
index 0000000000..2c3266322d
--- /dev/null
+++ b/third_party/python/gyp/test/make_global_settings/full-toolchain/make_global_settings.gyp
@@ -0,0 +1,22 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style licence that can be
+# found in the LICENSE file.
+
+{
+ 'make_global_settings': [
+ ['CC', '/bin/echo MY_CC'],
+ ['CXX', '/bin/echo MY_CXX'],
+ ['NM', '<(python) <(workdir)/my_nm.py'],
+ ['READELF', '<(python) <(workdir)/my_readelf.py'],
+ ],
+ 'targets': [
+ {
+ 'target_name': 'test',
+ 'type': 'shared_library',
+ 'sources': [
+ 'foo.c',
+ 'bar.cc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/make_global_settings/full-toolchain/my_nm.py b/third_party/python/gyp/test/make_global_settings/full-toolchain/my_nm.py
new file mode 100755
index 0000000000..2c4e678110
--- /dev/null
+++ b/third_party/python/gyp/test/make_global_settings/full-toolchain/my_nm.py
@@ -0,0 +1,9 @@
+#!/usr/bin/env python
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+from __future__ import print_function
+import sys
+print(sys.argv)
+with open('RAN_MY_NM', 'w') as f:
+ f.write('RAN_MY_NM')
diff --git a/third_party/python/gyp/test/make_global_settings/full-toolchain/my_readelf.py b/third_party/python/gyp/test/make_global_settings/full-toolchain/my_readelf.py
new file mode 100755
index 0000000000..626665435e
--- /dev/null
+++ b/third_party/python/gyp/test/make_global_settings/full-toolchain/my_readelf.py
@@ -0,0 +1,9 @@
+#!/usr/bin/env python
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+from __future__ import print_function
+import sys
+print(sys.argv)
+with open('RAN_MY_READELF', 'w') as f:
+ f.write('RAN_MY_READELF')
diff --git a/third_party/python/gyp/test/make_global_settings/ld/gyptest-make_global_settings_ld.py b/third_party/python/gyp/test/make_global_settings/ld/gyptest-make_global_settings_ld.py
new file mode 100644
index 0000000000..e5f50fbb5b
--- /dev/null
+++ b/third_party/python/gyp/test/make_global_settings/ld/gyptest-make_global_settings_ld.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies 'LD' in make_global_settings.
+"""
+
+import os
+import sys
+import TestGyp
+
+def resolve_path(test, path):
+ if path is None:
+ return None
+ elif test.format == 'make':
+ return '$(abspath %s)' % path
+ elif test.format in ['ninja', 'xcode-ninja']:
+ return os.path.join('..', '..', path)
+ else:
+ test.fail_test()
+
+
+def verify_ld_target(test, ld=None, rel_path=False):
+ if rel_path:
+ ld_expected = resolve_path(test, ld)
+ else:
+ ld_expected = ld
+ # Resolve default values
+ if ld_expected is None:
+ if test.format == 'make':
+ # Make generator hasn't set the default value for LD.
+ # You can remove the following assertion as long as it doesn't
+ # break existing projects.
+ test.must_not_contain('Makefile', 'LD ?= ')
+ return
+ elif test.format in ['ninja', 'xcode-ninja']:
+ if sys.platform == 'win32':
+ ld_expected = 'link.exe'
+ else:
+ ld_expected = '$cc'
+ if test.format == 'make':
+ test.must_contain('Makefile', 'LD ?= %s' % ld_expected)
+ elif test.format in ['ninja', 'xcode-ninja']:
+ test.must_contain('out/Default/build.ninja', 'ld = %s' % ld_expected)
+ else:
+ test.fail_test()
+
+
+def verify_ld_host(test, ld=None, rel_path=False):
+ if rel_path:
+ ld_expected = resolve_path(test, ld)
+ else:
+ ld_expected = ld
+ # Resolve default values
+ if ld_expected is None:
+ if test.format == 'make':
+ # Make generator hasn't set the default value for LD.host.
+ # You can remove the following assertion as long as it doesn't
+ # break existing projects.
+ test.must_not_contain('Makefile', 'LD.host ?= ')
+ return
+ elif test.format in ['ninja', 'xcode-ninja']:
+ if sys.platform == 'win32':
+ ld_expected = '$ld'
+ else:
+ ld_expected = '$cc_host'
+ if test.format == 'make':
+ test.must_contain('Makefile', 'LD.host ?= %s' % ld_expected)
+ elif test.format in ['ninja', 'xcode-ninja']:
+ test.must_contain('out/Default/build.ninja', 'ld_host = %s' % ld_expected)
+ else:
+ test.fail_test()
+
+
+test_format = ['ninja']
+if sys.platform.startswith('linux') or sys.platform == 'darwin':
+ test_format += ['make']
+
+test = TestGyp.TestGyp(formats=test_format)
+
+# Check default values
+test.run_gyp('make_global_settings_ld.gyp')
+verify_ld_target(test)
+
+
+# Check default values with GYP_CROSSCOMPILE enabled.
+with TestGyp.LocalEnv({'GYP_CROSSCOMPILE': '1'}):
+ test.run_gyp('make_global_settings_ld.gyp')
+verify_ld_target(test)
+verify_ld_host(test)
+
+
+# Test 'LD' in 'make_global_settings'.
+with TestGyp.LocalEnv({'GYP_CROSSCOMPILE': '1'}):
+ test.run_gyp('make_global_settings_ld.gyp', '-Dcustom_ld_target=my_ld')
+verify_ld_target(test, ld='my_ld', rel_path=True)
+
+
+# Test 'LD'/'LD.host' in 'make_global_settings'.
+with TestGyp.LocalEnv({'GYP_CROSSCOMPILE': '1'}):
+ test.run_gyp('make_global_settings_ld.gyp',
+ '-Dcustom_ld_target=my_ld_target1',
+ '-Dcustom_ld_host=my_ld_host1')
+verify_ld_target(test, ld='my_ld_target1', rel_path=True)
+verify_ld_host(test, ld='my_ld_host1', rel_path=True)
+
+
+# Unlike other environment variables such as $AR/$AR_host, $CC/$CC_host,
+# and $CXX/$CXX_host, neither Make generator nor Ninja generator recognizes
+# $LD/$LD_host environment variables as of r1935. This may or may not be
+# intentional, but here we leave a test case to verify this behavior just for
+# the record.
+# If you want to support $LD/$LD_host, please revise the following test case as
+# well as the generator.
+with TestGyp.LocalEnv({'GYP_CROSSCOMPILE': '1',
+ 'LD': 'my_ld_target2',
+ 'LD_host': 'my_ld_host2'}):
+ test.run_gyp('make_global_settings_ld.gyp')
+if test.format == 'make':
+ test.must_not_contain('Makefile', 'my_ld_target2')
+ test.must_not_contain('Makefile', 'my_ld_host2')
+elif test.format == 'ninja':
+ test.must_not_contain('out/Default/build.ninja', 'my_ld_target2')
+ test.must_not_contain('out/Default/build.ninja', 'my_ld_host2')
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/make_global_settings/ld/make_global_settings_ld.gyp b/third_party/python/gyp/test/make_global_settings/ld/make_global_settings_ld.gyp
new file mode 100644
index 0000000000..6837c77326
--- /dev/null
+++ b/third_party/python/gyp/test/make_global_settings/ld/make_global_settings_ld.gyp
@@ -0,0 +1,29 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style licence that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'custom_ld_target%': '',
+ 'custom_ld_host%': '',
+ },
+ 'conditions': [
+ ['"<(custom_ld_target)"!=""', {
+ 'make_global_settings': [
+ ['LD', '<(custom_ld_target)'],
+ ],
+ }],
+ ['"<(custom_ld_host)"!=""', {
+ 'make_global_settings': [
+ ['LD.host', '<(custom_ld_host)'],
+ ],
+ }],
+ ],
+ 'targets': [
+ {
+ 'target_name': 'make_global_settings_ld_test',
+ 'type': 'static_library',
+ 'sources': [ 'foo.c' ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/make_global_settings/wrapper/gyptest-wrapper.py b/third_party/python/gyp/test/make_global_settings/wrapper/gyptest-wrapper.py
new file mode 100644
index 0000000000..7ef4314b3e
--- /dev/null
+++ b/third_party/python/gyp/test/make_global_settings/wrapper/gyptest-wrapper.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies *_wrapper in make_global_settings.
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+import TestGyp
+
+print("This test is currently disabled: https://crbug.com/483696.")
+sys.exit(0)
+
+test_format = ['ninja']
+if sys.platform.startswith('linux') or sys.platform == 'darwin':
+ test_format += ['make']
+
+test = TestGyp.TestGyp(formats=test_format)
+
+old_env = dict(os.environ)
+os.environ['GYP_CROSSCOMPILE'] = '1'
+test.run_gyp('wrapper.gyp')
+os.environ.clear()
+os.environ.update(old_env)
+
+if test.format == 'make':
+ cc_expected = """ifneq (,$(filter $(origin CC), undefined default))
+ CC = $(abspath distcc) $(abspath clang)
+endif
+"""
+ link_expected = 'LINK ?= $(abspath distlink) $(abspath clang++)'
+ test.must_contain('Makefile', cc_expected)
+ test.must_contain('Makefile', link_expected)
+if test.format == 'ninja':
+ cc_expected = ('cc = ' + os.path.join('..', '..', 'distcc') + ' ' +
+ os.path.join('..', '..', 'clang'))
+ cc_host_expected = ('cc_host = ' + os.path.join('..', '..', 'ccache') + ' ' +
+ os.path.join('..', '..', 'clang'))
+ ld_expected = 'ld = ../../distlink $cc'
+ if sys.platform == 'win32':
+ ld_expected = 'link.exe'
+ test.must_contain('out/Default/build.ninja', cc_expected)
+ test.must_contain('out/Default/build.ninja', cc_host_expected)
+ test.must_contain('out/Default/build.ninja', ld_expected)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/make_global_settings/wrapper/wrapper.gyp b/third_party/python/gyp/test/make_global_settings/wrapper/wrapper.gyp
new file mode 100644
index 0000000000..3d4cd04b16
--- /dev/null
+++ b/third_party/python/gyp/test/make_global_settings/wrapper/wrapper.gyp
@@ -0,0 +1,21 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'make_global_settings': [
+ ['CC', 'clang'],
+ ['CC_wrapper', 'distcc'],
+ ['LINK', 'clang++'],
+ ['LINK_wrapper', 'distlink'],
+ ['CC.host', 'clang'],
+ ['CC.host_wrapper', 'ccache'],
+ ],
+ 'targets': [
+ {
+ 'target_name': 'test',
+ 'type': 'static_library',
+ 'sources': [ 'foo.c' ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/many-actions/file0 b/third_party/python/gyp/test/many-actions/file0
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/many-actions/file0
diff --git a/third_party/python/gyp/test/many-actions/file1 b/third_party/python/gyp/test/many-actions/file1
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/many-actions/file1
diff --git a/third_party/python/gyp/test/many-actions/file2 b/third_party/python/gyp/test/many-actions/file2
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/many-actions/file2
diff --git a/third_party/python/gyp/test/many-actions/file3 b/third_party/python/gyp/test/many-actions/file3
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/many-actions/file3
diff --git a/third_party/python/gyp/test/many-actions/file4 b/third_party/python/gyp/test/many-actions/file4
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/many-actions/file4
diff --git a/third_party/python/gyp/test/many-actions/gyptest-many-actions-unsorted.py b/third_party/python/gyp/test/many-actions/gyptest-many-actions-unsorted.py
new file mode 100644
index 0000000000..6927d1c7a7
--- /dev/null
+++ b/third_party/python/gyp/test/many-actions/gyptest-many-actions-unsorted.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure lots of actions in the same target don't cause exceeding command
+line length.
+"""
+
+from __future__ import print_function
+
+import sys
+
+if sys.platform == 'win32':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('many-actions-unsorted.gyp')
+
+test.build('many-actions-unsorted.gyp', test.ALL)
+for i in range(15):
+ test.built_file_must_exist('generated_%d.h' % i)
+
+# Make sure the optimized cygwin setup doesn't cause problems for incremental
+# builds.
+test.touch('file1')
+test.build('many-actions-unsorted.gyp', test.ALL)
+
+test.touch('file0')
+test.build('many-actions-unsorted.gyp', test.ALL)
+
+test.touch('file2')
+test.touch('file3')
+test.touch('file4')
+test.build('many-actions-unsorted.gyp', test.ALL)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/many-actions/gyptest-many-actions.py b/third_party/python/gyp/test/many-actions/gyptest-many-actions.py
new file mode 100644
index 0000000000..4a525d32d6
--- /dev/null
+++ b/third_party/python/gyp/test/many-actions/gyptest-many-actions.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure lots of actions in the same target don't cause exceeding command
+line length.
+"""
+
+from __future__ import print_function
+
+import sys
+
+if sys.platform == 'win32':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('many-actions.gyp')
+test.build('many-actions.gyp', test.ALL)
+for i in range(200):
+ test.built_file_must_exist('generated_%d.h' % i)
+test.pass_test()
diff --git a/third_party/python/gyp/test/many-actions/many-actions-unsorted.gyp b/third_party/python/gyp/test/many-actions/many-actions-unsorted.gyp
new file mode 100644
index 0000000000..eec79fe8d8
--- /dev/null
+++ b/third_party/python/gyp/test/many-actions/many-actions-unsorted.gyp
@@ -0,0 +1,154 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'msvs_cygwin_dirs': ['../../../../<(DEPTH)/third_party/cygwin'],
+ },
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'none',
+ 'actions': [
+ # Notice that the inputs go 0, 1, ..., 0, 1, .... This is to test
+ # a regression in the msvs generator in _AddActions.
+ {
+ 'action_name': 'do_0',
+ 'inputs': ['file0'],
+ 'outputs': ['<(PRODUCT_DIR)/generated_0.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_0.h',
+ ],
+ },
+ {
+ 'action_name': 'do_1',
+ 'inputs': ['file1'],
+ 'outputs': ['<(PRODUCT_DIR)/generated_1.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_1.h',
+ ],
+ },
+ {
+ 'action_name': 'do_2',
+ 'inputs': ['file2'],
+ 'outputs': ['<(PRODUCT_DIR)/generated_2.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_2.h',
+ ],
+ },
+ {
+ 'action_name': 'do_3',
+ 'inputs': ['file3'],
+ 'outputs': ['<(PRODUCT_DIR)/generated_3.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_3.h',
+ ],
+ },
+ {
+ 'action_name': 'do_4',
+ 'inputs': ['file4'],
+ 'outputs': ['<(PRODUCT_DIR)/generated_4.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_4.h',
+ ],
+ },
+ {
+ 'action_name': 'do_5',
+ 'inputs': ['file0'],
+ 'outputs': ['<(PRODUCT_DIR)/generated_5.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_5.h',
+ ],
+ },
+ {
+ 'action_name': 'do_6',
+ 'inputs': ['file1'],
+ 'outputs': ['<(PRODUCT_DIR)/generated_6.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_6.h',
+ ],
+ },
+ {
+ 'action_name': 'do_7',
+ 'inputs': ['file2'],
+ 'outputs': ['<(PRODUCT_DIR)/generated_7.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_7.h',
+ ],
+ },
+ {
+ 'action_name': 'do_8',
+ 'inputs': ['file3'],
+ 'outputs': ['<(PRODUCT_DIR)/generated_8.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_8.h',
+ ],
+ },
+ {
+ 'action_name': 'do_9',
+ 'inputs': ['file4'],
+ 'outputs': ['<(PRODUCT_DIR)/generated_9.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_9.h',
+ ],
+ },
+ {
+ 'action_name': 'do_10',
+ 'inputs': ['file0'],
+ 'outputs': ['<(PRODUCT_DIR)/generated_10.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_10.h',
+ ],
+ },
+ {
+ 'action_name': 'do_11',
+ 'inputs': ['file1'],
+ 'outputs': ['<(PRODUCT_DIR)/generated_11.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_11.h',
+ ],
+ },
+ {
+ 'action_name': 'do_12',
+ 'inputs': ['file2'],
+ 'outputs': ['<(PRODUCT_DIR)/generated_12.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_12.h',
+ ],
+ },
+ {
+ 'action_name': 'do_13',
+ 'inputs': ['file3'],
+ 'outputs': ['<(PRODUCT_DIR)/generated_13.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_13.h',
+ ],
+ },
+ {
+ 'action_name': 'do_14',
+ 'inputs': ['file4'],
+ 'outputs': ['<(PRODUCT_DIR)/generated_14.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_14.h',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/many-actions/many-actions.gyp b/third_party/python/gyp/test/many-actions/many-actions.gyp
new file mode 100644
index 0000000000..38545d2d88
--- /dev/null
+++ b/third_party/python/gyp/test/many-actions/many-actions.gyp
@@ -0,0 +1,1817 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'msvs_cygwin_dirs': ['../../../../<(DEPTH)/third_party/cygwin'],
+ },
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'do_0',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_0.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_0.h',
+ ],
+ },
+ {
+ 'action_name': 'do_1',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_1.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_1.h',
+ ],
+ },
+ {
+ 'action_name': 'do_2',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_2.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_2.h',
+ ],
+ },
+ {
+ 'action_name': 'do_3',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_3.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_3.h',
+ ],
+ },
+ {
+ 'action_name': 'do_4',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_4.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_4.h',
+ ],
+ },
+ {
+ 'action_name': 'do_5',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_5.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_5.h',
+ ],
+ },
+ {
+ 'action_name': 'do_6',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_6.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_6.h',
+ ],
+ },
+ {
+ 'action_name': 'do_7',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_7.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_7.h',
+ ],
+ },
+ {
+ 'action_name': 'do_8',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_8.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_8.h',
+ ],
+ },
+ {
+ 'action_name': 'do_9',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_9.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_9.h',
+ ],
+ },
+ {
+ 'action_name': 'do_10',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_10.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_10.h',
+ ],
+ },
+ {
+ 'action_name': 'do_11',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_11.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_11.h',
+ ],
+ },
+ {
+ 'action_name': 'do_12',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_12.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_12.h',
+ ],
+ },
+ {
+ 'action_name': 'do_13',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_13.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_13.h',
+ ],
+ },
+ {
+ 'action_name': 'do_14',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_14.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_14.h',
+ ],
+ },
+ {
+ 'action_name': 'do_15',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_15.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_15.h',
+ ],
+ },
+ {
+ 'action_name': 'do_16',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_16.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_16.h',
+ ],
+ },
+ {
+ 'action_name': 'do_17',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_17.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_17.h',
+ ],
+ },
+ {
+ 'action_name': 'do_18',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_18.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_18.h',
+ ],
+ },
+ {
+ 'action_name': 'do_19',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_19.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_19.h',
+ ],
+ },
+ {
+ 'action_name': 'do_20',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_20.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_20.h',
+ ],
+ },
+ {
+ 'action_name': 'do_21',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_21.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_21.h',
+ ],
+ },
+ {
+ 'action_name': 'do_22',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_22.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_22.h',
+ ],
+ },
+ {
+ 'action_name': 'do_23',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_23.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_23.h',
+ ],
+ },
+ {
+ 'action_name': 'do_24',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_24.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_24.h',
+ ],
+ },
+ {
+ 'action_name': 'do_25',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_25.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_25.h',
+ ],
+ },
+ {
+ 'action_name': 'do_26',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_26.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_26.h',
+ ],
+ },
+ {
+ 'action_name': 'do_27',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_27.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_27.h',
+ ],
+ },
+ {
+ 'action_name': 'do_28',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_28.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_28.h',
+ ],
+ },
+ {
+ 'action_name': 'do_29',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_29.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_29.h',
+ ],
+ },
+ {
+ 'action_name': 'do_30',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_30.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_30.h',
+ ],
+ },
+ {
+ 'action_name': 'do_31',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_31.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_31.h',
+ ],
+ },
+ {
+ 'action_name': 'do_32',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_32.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_32.h',
+ ],
+ },
+ {
+ 'action_name': 'do_33',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_33.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_33.h',
+ ],
+ },
+ {
+ 'action_name': 'do_34',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_34.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_34.h',
+ ],
+ },
+ {
+ 'action_name': 'do_35',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_35.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_35.h',
+ ],
+ },
+ {
+ 'action_name': 'do_36',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_36.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_36.h',
+ ],
+ },
+ {
+ 'action_name': 'do_37',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_37.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_37.h',
+ ],
+ },
+ {
+ 'action_name': 'do_38',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_38.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_38.h',
+ ],
+ },
+ {
+ 'action_name': 'do_39',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_39.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_39.h',
+ ],
+ },
+ {
+ 'action_name': 'do_40',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_40.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_40.h',
+ ],
+ },
+ {
+ 'action_name': 'do_41',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_41.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_41.h',
+ ],
+ },
+ {
+ 'action_name': 'do_42',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_42.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_42.h',
+ ],
+ },
+ {
+ 'action_name': 'do_43',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_43.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_43.h',
+ ],
+ },
+ {
+ 'action_name': 'do_44',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_44.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_44.h',
+ ],
+ },
+ {
+ 'action_name': 'do_45',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_45.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_45.h',
+ ],
+ },
+ {
+ 'action_name': 'do_46',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_46.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_46.h',
+ ],
+ },
+ {
+ 'action_name': 'do_47',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_47.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_47.h',
+ ],
+ },
+ {
+ 'action_name': 'do_48',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_48.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_48.h',
+ ],
+ },
+ {
+ 'action_name': 'do_49',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_49.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_49.h',
+ ],
+ },
+ {
+ 'action_name': 'do_50',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_50.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_50.h',
+ ],
+ },
+ {
+ 'action_name': 'do_51',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_51.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_51.h',
+ ],
+ },
+ {
+ 'action_name': 'do_52',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_52.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_52.h',
+ ],
+ },
+ {
+ 'action_name': 'do_53',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_53.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_53.h',
+ ],
+ },
+ {
+ 'action_name': 'do_54',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_54.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_54.h',
+ ],
+ },
+ {
+ 'action_name': 'do_55',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_55.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_55.h',
+ ],
+ },
+ {
+ 'action_name': 'do_56',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_56.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_56.h',
+ ],
+ },
+ {
+ 'action_name': 'do_57',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_57.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_57.h',
+ ],
+ },
+ {
+ 'action_name': 'do_58',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_58.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_58.h',
+ ],
+ },
+ {
+ 'action_name': 'do_59',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_59.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_59.h',
+ ],
+ },
+ {
+ 'action_name': 'do_60',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_60.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_60.h',
+ ],
+ },
+ {
+ 'action_name': 'do_61',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_61.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_61.h',
+ ],
+ },
+ {
+ 'action_name': 'do_62',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_62.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_62.h',
+ ],
+ },
+ {
+ 'action_name': 'do_63',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_63.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_63.h',
+ ],
+ },
+ {
+ 'action_name': 'do_64',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_64.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_64.h',
+ ],
+ },
+ {
+ 'action_name': 'do_65',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_65.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_65.h',
+ ],
+ },
+ {
+ 'action_name': 'do_66',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_66.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_66.h',
+ ],
+ },
+ {
+ 'action_name': 'do_67',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_67.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_67.h',
+ ],
+ },
+ {
+ 'action_name': 'do_68',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_68.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_68.h',
+ ],
+ },
+ {
+ 'action_name': 'do_69',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_69.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_69.h',
+ ],
+ },
+ {
+ 'action_name': 'do_70',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_70.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_70.h',
+ ],
+ },
+ {
+ 'action_name': 'do_71',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_71.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_71.h',
+ ],
+ },
+ {
+ 'action_name': 'do_72',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_72.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_72.h',
+ ],
+ },
+ {
+ 'action_name': 'do_73',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_73.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_73.h',
+ ],
+ },
+ {
+ 'action_name': 'do_74',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_74.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_74.h',
+ ],
+ },
+ {
+ 'action_name': 'do_75',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_75.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_75.h',
+ ],
+ },
+ {
+ 'action_name': 'do_76',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_76.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_76.h',
+ ],
+ },
+ {
+ 'action_name': 'do_77',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_77.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_77.h',
+ ],
+ },
+ {
+ 'action_name': 'do_78',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_78.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_78.h',
+ ],
+ },
+ {
+ 'action_name': 'do_79',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_79.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_79.h',
+ ],
+ },
+ {
+ 'action_name': 'do_80',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_80.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_80.h',
+ ],
+ },
+ {
+ 'action_name': 'do_81',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_81.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_81.h',
+ ],
+ },
+ {
+ 'action_name': 'do_82',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_82.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_82.h',
+ ],
+ },
+ {
+ 'action_name': 'do_83',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_83.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_83.h',
+ ],
+ },
+ {
+ 'action_name': 'do_84',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_84.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_84.h',
+ ],
+ },
+ {
+ 'action_name': 'do_85',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_85.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_85.h',
+ ],
+ },
+ {
+ 'action_name': 'do_86',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_86.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_86.h',
+ ],
+ },
+ {
+ 'action_name': 'do_87',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_87.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_87.h',
+ ],
+ },
+ {
+ 'action_name': 'do_88',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_88.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_88.h',
+ ],
+ },
+ {
+ 'action_name': 'do_89',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_89.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_89.h',
+ ],
+ },
+ {
+ 'action_name': 'do_90',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_90.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_90.h',
+ ],
+ },
+ {
+ 'action_name': 'do_91',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_91.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_91.h',
+ ],
+ },
+ {
+ 'action_name': 'do_92',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_92.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_92.h',
+ ],
+ },
+ {
+ 'action_name': 'do_93',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_93.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_93.h',
+ ],
+ },
+ {
+ 'action_name': 'do_94',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_94.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_94.h',
+ ],
+ },
+ {
+ 'action_name': 'do_95',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_95.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_95.h',
+ ],
+ },
+ {
+ 'action_name': 'do_96',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_96.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_96.h',
+ ],
+ },
+ {
+ 'action_name': 'do_97',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_97.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_97.h',
+ ],
+ },
+ {
+ 'action_name': 'do_98',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_98.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_98.h',
+ ],
+ },
+ {
+ 'action_name': 'do_99',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_99.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_99.h',
+ ],
+ },
+ {
+ 'action_name': 'do_100',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_100.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_100.h',
+ ],
+ },
+ {
+ 'action_name': 'do_101',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_101.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_101.h',
+ ],
+ },
+ {
+ 'action_name': 'do_102',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_102.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_102.h',
+ ],
+ },
+ {
+ 'action_name': 'do_103',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_103.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_103.h',
+ ],
+ },
+ {
+ 'action_name': 'do_104',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_104.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_104.h',
+ ],
+ },
+ {
+ 'action_name': 'do_105',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_105.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_105.h',
+ ],
+ },
+ {
+ 'action_name': 'do_106',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_106.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_106.h',
+ ],
+ },
+ {
+ 'action_name': 'do_107',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_107.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_107.h',
+ ],
+ },
+ {
+ 'action_name': 'do_108',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_108.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_108.h',
+ ],
+ },
+ {
+ 'action_name': 'do_109',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_109.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_109.h',
+ ],
+ },
+ {
+ 'action_name': 'do_110',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_110.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_110.h',
+ ],
+ },
+ {
+ 'action_name': 'do_111',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_111.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_111.h',
+ ],
+ },
+ {
+ 'action_name': 'do_112',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_112.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_112.h',
+ ],
+ },
+ {
+ 'action_name': 'do_113',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_113.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_113.h',
+ ],
+ },
+ {
+ 'action_name': 'do_114',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_114.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_114.h',
+ ],
+ },
+ {
+ 'action_name': 'do_115',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_115.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_115.h',
+ ],
+ },
+ {
+ 'action_name': 'do_116',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_116.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_116.h',
+ ],
+ },
+ {
+ 'action_name': 'do_117',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_117.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_117.h',
+ ],
+ },
+ {
+ 'action_name': 'do_118',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_118.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_118.h',
+ ],
+ },
+ {
+ 'action_name': 'do_119',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_119.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_119.h',
+ ],
+ },
+ {
+ 'action_name': 'do_120',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_120.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_120.h',
+ ],
+ },
+ {
+ 'action_name': 'do_121',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_121.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_121.h',
+ ],
+ },
+ {
+ 'action_name': 'do_122',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_122.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_122.h',
+ ],
+ },
+ {
+ 'action_name': 'do_123',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_123.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_123.h',
+ ],
+ },
+ {
+ 'action_name': 'do_124',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_124.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_124.h',
+ ],
+ },
+ {
+ 'action_name': 'do_125',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_125.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_125.h',
+ ],
+ },
+ {
+ 'action_name': 'do_126',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_126.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_126.h',
+ ],
+ },
+ {
+ 'action_name': 'do_127',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_127.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_127.h',
+ ],
+ },
+ {
+ 'action_name': 'do_128',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_128.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_128.h',
+ ],
+ },
+ {
+ 'action_name': 'do_129',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_129.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_129.h',
+ ],
+ },
+ {
+ 'action_name': 'do_130',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_130.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_130.h',
+ ],
+ },
+ {
+ 'action_name': 'do_131',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_131.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_131.h',
+ ],
+ },
+ {
+ 'action_name': 'do_132',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_132.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_132.h',
+ ],
+ },
+ {
+ 'action_name': 'do_133',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_133.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_133.h',
+ ],
+ },
+ {
+ 'action_name': 'do_134',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_134.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_134.h',
+ ],
+ },
+ {
+ 'action_name': 'do_135',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_135.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_135.h',
+ ],
+ },
+ {
+ 'action_name': 'do_136',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_136.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_136.h',
+ ],
+ },
+ {
+ 'action_name': 'do_137',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_137.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_137.h',
+ ],
+ },
+ {
+ 'action_name': 'do_138',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_138.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_138.h',
+ ],
+ },
+ {
+ 'action_name': 'do_139',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_139.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_139.h',
+ ],
+ },
+ {
+ 'action_name': 'do_140',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_140.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_140.h',
+ ],
+ },
+ {
+ 'action_name': 'do_141',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_141.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_141.h',
+ ],
+ },
+ {
+ 'action_name': 'do_142',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_142.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_142.h',
+ ],
+ },
+ {
+ 'action_name': 'do_143',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_143.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_143.h',
+ ],
+ },
+ {
+ 'action_name': 'do_144',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_144.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_144.h',
+ ],
+ },
+ {
+ 'action_name': 'do_145',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_145.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_145.h',
+ ],
+ },
+ {
+ 'action_name': 'do_146',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_146.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_146.h',
+ ],
+ },
+ {
+ 'action_name': 'do_147',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_147.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_147.h',
+ ],
+ },
+ {
+ 'action_name': 'do_148',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_148.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_148.h',
+ ],
+ },
+ {
+ 'action_name': 'do_149',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_149.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_149.h',
+ ],
+ },
+ {
+ 'action_name': 'do_150',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_150.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_150.h',
+ ],
+ },
+ {
+ 'action_name': 'do_151',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_151.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_151.h',
+ ],
+ },
+ {
+ 'action_name': 'do_152',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_152.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_152.h',
+ ],
+ },
+ {
+ 'action_name': 'do_153',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_153.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_153.h',
+ ],
+ },
+ {
+ 'action_name': 'do_154',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_154.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_154.h',
+ ],
+ },
+ {
+ 'action_name': 'do_155',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_155.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_155.h',
+ ],
+ },
+ {
+ 'action_name': 'do_156',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_156.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_156.h',
+ ],
+ },
+ {
+ 'action_name': 'do_157',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_157.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_157.h',
+ ],
+ },
+ {
+ 'action_name': 'do_158',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_158.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_158.h',
+ ],
+ },
+ {
+ 'action_name': 'do_159',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_159.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_159.h',
+ ],
+ },
+ {
+ 'action_name': 'do_160',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_160.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_160.h',
+ ],
+ },
+ {
+ 'action_name': 'do_161',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_161.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_161.h',
+ ],
+ },
+ {
+ 'action_name': 'do_162',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_162.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_162.h',
+ ],
+ },
+ {
+ 'action_name': 'do_163',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_163.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_163.h',
+ ],
+ },
+ {
+ 'action_name': 'do_164',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_164.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_164.h',
+ ],
+ },
+ {
+ 'action_name': 'do_165',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_165.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_165.h',
+ ],
+ },
+ {
+ 'action_name': 'do_166',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_166.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_166.h',
+ ],
+ },
+ {
+ 'action_name': 'do_167',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_167.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_167.h',
+ ],
+ },
+ {
+ 'action_name': 'do_168',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_168.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_168.h',
+ ],
+ },
+ {
+ 'action_name': 'do_169',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_169.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_169.h',
+ ],
+ },
+ {
+ 'action_name': 'do_170',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_170.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_170.h',
+ ],
+ },
+ {
+ 'action_name': 'do_171',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_171.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_171.h',
+ ],
+ },
+ {
+ 'action_name': 'do_172',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_172.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_172.h',
+ ],
+ },
+ {
+ 'action_name': 'do_173',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_173.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_173.h',
+ ],
+ },
+ {
+ 'action_name': 'do_174',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_174.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_174.h',
+ ],
+ },
+ {
+ 'action_name': 'do_175',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_175.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_175.h',
+ ],
+ },
+ {
+ 'action_name': 'do_176',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_176.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_176.h',
+ ],
+ },
+ {
+ 'action_name': 'do_177',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_177.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_177.h',
+ ],
+ },
+ {
+ 'action_name': 'do_178',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_178.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_178.h',
+ ],
+ },
+ {
+ 'action_name': 'do_179',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_179.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_179.h',
+ ],
+ },
+ {
+ 'action_name': 'do_180',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_180.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_180.h',
+ ],
+ },
+ {
+ 'action_name': 'do_181',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_181.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_181.h',
+ ],
+ },
+ {
+ 'action_name': 'do_182',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_182.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_182.h',
+ ],
+ },
+ {
+ 'action_name': 'do_183',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_183.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_183.h',
+ ],
+ },
+ {
+ 'action_name': 'do_184',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_184.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_184.h',
+ ],
+ },
+ {
+ 'action_name': 'do_185',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_185.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_185.h',
+ ],
+ },
+ {
+ 'action_name': 'do_186',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_186.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_186.h',
+ ],
+ },
+ {
+ 'action_name': 'do_187',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_187.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_187.h',
+ ],
+ },
+ {
+ 'action_name': 'do_188',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_188.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_188.h',
+ ],
+ },
+ {
+ 'action_name': 'do_189',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_189.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_189.h',
+ ],
+ },
+ {
+ 'action_name': 'do_190',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_190.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_190.h',
+ ],
+ },
+ {
+ 'action_name': 'do_191',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_191.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_191.h',
+ ],
+ },
+ {
+ 'action_name': 'do_192',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_192.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_192.h',
+ ],
+ },
+ {
+ 'action_name': 'do_193',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_193.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_193.h',
+ ],
+ },
+ {
+ 'action_name': 'do_194',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_194.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_194.h',
+ ],
+ },
+ {
+ 'action_name': 'do_195',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_195.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_195.h',
+ ],
+ },
+ {
+ 'action_name': 'do_196',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_196.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_196.h',
+ ],
+ },
+ {
+ 'action_name': 'do_197',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_197.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_197.h',
+ ],
+ },
+ {
+ 'action_name': 'do_198',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_198.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_198.h',
+ ],
+ },
+ {
+ 'action_name': 'do_199',
+ 'inputs': [],
+ 'outputs': ['<(PRODUCT_DIR)/generated_199.h'],
+ 'action': [
+ 'touch',
+ '<(PRODUCT_DIR)/generated_199.h',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/module/gyptest-default.py b/third_party/python/gyp/test/module/gyptest-default.py
new file mode 100755
index 0000000000..7fecf3ca4d
--- /dev/null
+++ b/third_party/python/gyp/test/module/gyptest-default.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simple build of a "Hello, world!" program with loadable modules. The
+default for all platforms should be to output the loadable modules to the same
+path as the executable.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('module.gyp', chdir='src')
+
+test.build('module.gyp', test.ALL, chdir='src')
+
+expect = """\
+Hello from program.c
+Hello from lib1.c
+Hello from lib2.c
+"""
+test.run_built_executable('program', chdir='src', stdout=expect)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/module/src/lib1.c b/third_party/python/gyp/test/module/src/lib1.c
new file mode 100644
index 0000000000..8de0e94bee
--- /dev/null
+++ b/third_party/python/gyp/test/module/src/lib1.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void module_main(void)
+{
+ fprintf(stdout, "Hello from lib1.c\n");
+ fflush(stdout);
+}
diff --git a/third_party/python/gyp/test/module/src/lib2.c b/third_party/python/gyp/test/module/src/lib2.c
new file mode 100644
index 0000000000..266396dc91
--- /dev/null
+++ b/third_party/python/gyp/test/module/src/lib2.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void module_main(void)
+{
+ fprintf(stdout, "Hello from lib2.c\n");
+ fflush(stdout);
+}
diff --git a/third_party/python/gyp/test/module/src/module.gyp b/third_party/python/gyp/test/module/src/module.gyp
new file mode 100644
index 0000000000..2bc398bb3b
--- /dev/null
+++ b/third_party/python/gyp/test/module/src/module.gyp
@@ -0,0 +1,53 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'conditions': [
+ ['OS=="win"', {
+ 'defines': ['PLATFORM_WIN'],
+ }],
+ ['OS=="mac" or OS=="ios"', {
+ 'defines': ['PLATFORM_MAC'],
+ }],
+ ['OS=="linux"', {
+ 'defines': ['PLATFORM_LINUX'],
+ # Support 64-bit shared libs (also works fine for 32-bit).
+ 'cflags': ['-fPIC'],
+ 'libraries': ['-ldl'],
+ }],
+ ],
+ },
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'dependencies': [
+ 'lib1',
+ 'lib2',
+ ],
+ 'sources': [
+ 'program.c',
+ ],
+ },
+ {
+ 'target_name': 'lib1',
+ 'type': 'loadable_module',
+ 'product_name': 'lib1',
+ 'product_prefix': '',
+ 'sources': [
+ 'lib1.c',
+ ],
+ },
+ {
+ 'target_name': 'lib2',
+ 'product_name': 'lib2',
+ 'product_prefix': '',
+ 'type': 'loadable_module',
+ 'sources': [
+ 'lib2.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/module/src/program.c b/third_party/python/gyp/test/module/src/program.c
new file mode 100644
index 0000000000..7cc3dd3466
--- /dev/null
+++ b/third_party/python/gyp/test/module/src/program.c
@@ -0,0 +1,111 @@
+#include <stdio.h>
+#include <stdlib.h>
+
+#if defined(PLATFORM_WIN)
+#include <windows.h>
+#elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX)
+#include <dlfcn.h>
+#include <libgen.h>
+#include <string.h>
+#include <sys/param.h>
+#define MAX_PATH PATH_MAX
+#endif
+
+#if defined(PLATFORM_WIN)
+#define MODULE_SUFFIX ".dll"
+#elif defined(PLATFORM_MAC)
+#define MODULE_SUFFIX ".so"
+#elif defined(PLATFORM_LINUX)
+#define MODULE_SUFFIX ".so"
+#endif
+
+typedef void (*module_symbol)(void);
+char bin_path[MAX_PATH + 1];
+
+
+void CallModule(const char* module) {
+ char module_path[MAX_PATH + 1];
+ const char* module_function = "module_main";
+ module_symbol funcptr;
+#if defined(PLATFORM_WIN)
+ HMODULE dl;
+ char drive[_MAX_DRIVE];
+ char dir[_MAX_DIR];
+
+ if (_splitpath_s(bin_path, drive, _MAX_DRIVE, dir, _MAX_DIR,
+ NULL, 0, NULL, 0)) {
+ fprintf(stderr, "Failed to split executable path.\n");
+ return;
+ }
+ if (_makepath_s(module_path, MAX_PATH, drive, dir, module, MODULE_SUFFIX)) {
+ fprintf(stderr, "Failed to calculate module path.\n");
+ return;
+ }
+
+ dl = LoadLibrary(module_path);
+ if (!dl) {
+ fprintf(stderr, "Failed to open module: %s\n", module_path);
+ return;
+ }
+
+ funcptr = (module_symbol) GetProcAddress(dl, module_function);
+ if (!funcptr) {
+ fprintf(stderr, "Failed to find symbol: %s\n", module_function);
+ return;
+ }
+ funcptr();
+
+ FreeLibrary(dl);
+#elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX)
+ void* dl;
+ char* path_copy = strdup(bin_path);
+ char* bin_dir = dirname(path_copy);
+ int path_size = snprintf(module_path, MAX_PATH, "%s/%s%s", bin_dir, module,
+ MODULE_SUFFIX);
+ free(path_copy);
+ if (path_size < 0 || path_size > MAX_PATH) {
+ fprintf(stderr, "Failed to calculate module path.\n");
+ return;
+ }
+ module_path[path_size] = 0;
+
+ dl = dlopen(module_path, RTLD_LAZY);
+ if (!dl) {
+ fprintf(stderr, "Failed to open module: %s\n", module_path);
+ return;
+ }
+
+ funcptr = dlsym(dl, module_function);
+ if (!funcptr) {
+ fprintf(stderr, "Failed to find symbol: %s\n", module_function);
+ return;
+ }
+ funcptr();
+
+ dlclose(dl);
+#endif
+}
+
+int main(int argc, char *argv[])
+{
+ fprintf(stdout, "Hello from program.c\n");
+ fflush(stdout);
+
+#if defined(PLATFORM_WIN)
+ if (!GetModuleFileName(NULL, bin_path, MAX_PATH)) {
+ fprintf(stderr, "Failed to determine executable path.\n");
+ return 1;
+ }
+#elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX)
+ // Using argv[0] should be OK here since we control how the tests run, and
+ // can avoid exec and such issues that make it unreliable.
+ if (!realpath(argv[0], bin_path)) {
+ fprintf(stderr, "Failed to determine executable path (%s).\n", argv[0]);
+ return 1;
+ }
+#endif
+
+ CallModule("lib1");
+ CallModule("lib2");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/msvs/buildevents/buildevents.gyp b/third_party/python/gyp/test/msvs/buildevents/buildevents.gyp
new file mode 100644
index 0000000000..e0304dd5c6
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/buildevents/buildevents.gyp
@@ -0,0 +1,14 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'main',
+ 'type': 'executable',
+ 'sources': [ 'main.cc', ],
+ 'msvs_prebuild': r'echo starting',
+ 'msvs_postbuild': r'echo finished',
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/msvs/buildevents/gyptest-msbuild-supports-prepostbuild.py b/third_party/python/gyp/test/msvs/buildevents/gyptest-msbuild-supports-prepostbuild.py
new file mode 100755
index 0000000000..208f434560
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/buildevents/gyptest-msbuild-supports-prepostbuild.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that msvs_prebuild and msvs_postbuild can be specified in both
+VS 2008 and 2010.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['msvs'], workdir='workarea_all')
+
+test.run_gyp('buildevents.gyp', '-G', 'msvs_version=2008')
+test.must_contain('main.vcproj', 'Name="VCPreBuildEventTool"')
+test.must_contain('main.vcproj', 'Name="VCPostBuildEventTool"')
+
+test.run_gyp('buildevents.gyp', '-G', 'msvs_version=2010')
+test.must_contain('main.vcxproj', '<PreBuildEvent>')
+test.must_contain('main.vcxproj', '<PostBuildEvent>')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/msvs/buildevents/gyptest-ninja-warnings.py b/third_party/python/gyp/test/msvs/buildevents/gyptest-ninja-warnings.py
new file mode 100755
index 0000000000..be4ec9921a
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/buildevents/gyptest-ninja-warnings.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that ninja errors out when encountering msvs_prebuild/msvs_postbuild.
+"""
+
+import sys
+import TestCmd
+import TestGyp
+
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['ninja'])
+
+ test.run_gyp('buildevents.gyp',
+ status=1,
+ stderr=r'.*msvs_prebuild not supported \(target main\).*',
+ match=TestCmd.match_re_dotall)
+
+ test.run_gyp('buildevents.gyp',
+ status=1,
+ stderr=r'.*msvs_postbuild not supported \(target main\).*',
+ match=TestCmd.match_re_dotall)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/msvs/buildevents/main.cc b/third_party/python/gyp/test/msvs/buildevents/main.cc
new file mode 100644
index 0000000000..03c0285a6c
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/buildevents/main.cc
@@ -0,0 +1,5 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {}
diff --git a/third_party/python/gyp/test/msvs/config_attrs/gyptest-config_attrs.py b/third_party/python/gyp/test/msvs/config_attrs/gyptest-config_attrs.py
new file mode 100644
index 0000000000..29a8022bd4
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/config_attrs/gyptest-config_attrs.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that msvs_configuration_attributes and
+msbuild_configuration_attributes are applied by using
+them to set the OutputDirectory.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+import os
+
+import sys
+
+if sys.platform == 'win32':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+
+
+test = TestGyp.TestGyp(workdir='workarea_all',formats=['msvs'])
+
+vc_version = 'VC90'
+
+if os.getenv('GYP_MSVS_VERSION'):
+ vc_version = ['VC90','VC100'][int(os.getenv('GYP_MSVS_VERSION')) >= 2010]
+
+expected_exe_file = os.path.join(test.workdir, vc_version, 'hello.exe')
+
+test.run_gyp('hello.gyp')
+
+test.build('hello.gyp')
+
+test.must_exist(expected_exe_file)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/msvs/config_attrs/hello.c b/third_party/python/gyp/test/msvs/config_attrs/hello.c
new file mode 100644
index 0000000000..faadc75e2c
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/config_attrs/hello.c
@@ -0,0 +1,11 @@
+/* Copyright (c) 2012 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello, world!\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/msvs/config_attrs/hello.gyp b/third_party/python/gyp/test/msvs/config_attrs/hello.gyp
new file mode 100644
index 0000000000..810a80edd8
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/config_attrs/hello.gyp
@@ -0,0 +1,21 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ ],
+ 'msvs_configuration_attributes': {
+ 'OutputDirectory':'$(SolutionDir)VC90/'
+ },
+ 'msbuild_configuration_attributes': {
+ 'OutputDirectory':'$(SolutionDir)VC100/',
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/msvs/express/base/base.gyp b/third_party/python/gyp/test/msvs/express/base/base.gyp
new file mode 100644
index 0000000000..b7c9fc6d81
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/express/base/base.gyp
@@ -0,0 +1,22 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'static_library',
+ 'sources': [
+ 'a.c',
+ ],
+ },
+ {
+ 'target_name': 'b',
+ 'type': 'static_library',
+ 'sources': [
+ 'b.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/msvs/express/express.gyp b/third_party/python/gyp/test/msvs/express/express.gyp
new file mode 100644
index 0000000000..917abe2cc0
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/express/express.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'express',
+ 'type': 'executable',
+ 'dependencies': [
+ 'base/base.gyp:a',
+ 'base/base.gyp:b',
+ ],
+ 'sources': [
+ 'main.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/msvs/express/gyptest-express.py b/third_party/python/gyp/test/msvs/express/gyptest-express.py
new file mode 100755
index 0000000000..54c06f664a
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/express/gyptest-express.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that flat solutions get generated for Express versions of
+Visual Studio.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['msvs'])
+
+test.run_gyp('express.gyp', '-G', 'msvs_version=2005')
+test.must_contain('express.sln', '(base)')
+
+test.run_gyp('express.gyp', '-G', 'msvs_version=2008')
+test.must_contain('express.sln', '(base)')
+
+test.run_gyp('express.gyp', '-G', 'msvs_version=2005e')
+test.must_not_contain('express.sln', '(base)')
+
+test.run_gyp('express.gyp', '-G', 'msvs_version=2008e')
+test.must_not_contain('express.sln', '(base)')
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/msvs/external_builder/external.gyp b/third_party/python/gyp/test/msvs/external_builder/external.gyp
new file mode 100644
index 0000000000..abe5b5889c
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/external_builder/external.gyp
@@ -0,0 +1,68 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ # the test driver switches this flag when testing external builder
+ 'use_external_builder%': 0,
+ },
+ 'targets': [
+ {
+ 'target_name': 'external',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.cpp',
+ 'hello.z',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'test_rule',
+ 'extension': 'z',
+ 'outputs': [
+ 'msbuild_rule.out',
+ ],
+ 'action': [
+ 'python',
+ 'msbuild_rule.py',
+ '<(RULE_INPUT_PATH)',
+ 'a', 'b', 'c',
+ ],
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ 'actions': [
+ {
+ 'action_name': 'test action',
+ 'inputs': [
+ 'msbuild_action.py',
+ ],
+ 'outputs': [
+ 'msbuild_action.out',
+ ],
+ 'action': [
+ 'python',
+ '<@(_inputs)',
+ 'x', 'y', 'z',
+ ],
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ 'conditions': [
+ ['use_external_builder==1', {
+ 'msvs_external_builder': 'test',
+ 'msvs_external_builder_build_cmd': [
+ 'python',
+ 'external_builder.py',
+ 'build', '1', '2', '3',
+ ],
+ 'msvs_external_builder_clean_cmd': [
+ 'python',
+ 'external_builder.py',
+ 'clean', '4', '5',
+ ],
+ }],
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/msvs/external_builder/external_builder.py b/third_party/python/gyp/test/msvs/external_builder/external_builder.py
new file mode 100644
index 0000000000..ddfc1e5e33
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/external_builder/external_builder.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+with open('external_builder.out', 'w') as f:
+ f.write(' '.join(sys.argv))
+
diff --git a/third_party/python/gyp/test/msvs/external_builder/gyptest-all.py b/third_party/python/gyp/test/msvs/external_builder/gyptest-all.py
new file mode 100644
index 0000000000..72faa7ab7f
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/external_builder/gyptest-all.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that msvs_external_builder being set will invoke the provided
+msvs_external_builder_build_cmd and msvs_external_builder_clean_cmd, and will
+not invoke MSBuild actions and rules.
+"""
+
+import os
+import sys
+import TestGyp
+
+if int(os.environ.get('GYP_MSVS_VERSION', 0)) < 2010:
+ sys.exit(0)
+
+test = TestGyp.TestGyp(formats=['msvs'], workdir='workarea_all')
+
+# without the flag set
+test.run_gyp('external.gyp')
+test.build('external.gyp', target='external')
+test.must_not_exist('external_builder.out')
+test.must_exist('msbuild_rule.out')
+test.must_exist('msbuild_action.out')
+test.must_match('msbuild_rule.out', 'msbuild_rule.py hello.z a b c')
+test.must_match('msbuild_action.out', 'msbuild_action.py x y z')
+os.remove('msbuild_rule.out')
+os.remove('msbuild_action.out')
+
+# with the flag set, using Build
+try:
+ os.environ['GYP_DEFINES'] = 'use_external_builder=1'
+ test.run_gyp('external.gyp')
+ test.build('external.gyp', target='external')
+finally:
+ del os.environ['GYP_DEFINES']
+test.must_not_exist('msbuild_rule.out')
+test.must_not_exist('msbuild_action.out')
+test.must_exist('external_builder.out')
+test.must_match('external_builder.out', 'external_builder.py build 1 2 3')
+os.remove('external_builder.out')
+
+# with the flag set, using Clean
+try:
+ os.environ['GYP_DEFINES'] = 'use_external_builder=1'
+ test.run_gyp('external.gyp')
+ test.build('external.gyp', target='external', clean=True)
+finally:
+ del os.environ['GYP_DEFINES']
+test.must_not_exist('msbuild_rule.out')
+test.must_not_exist('msbuild_action.out')
+test.must_exist('external_builder.out')
+test.must_match('external_builder.out', 'external_builder.py clean 4 5')
+os.remove('external_builder.out')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/msvs/external_builder/hello.cpp b/third_party/python/gyp/test/msvs/external_builder/hello.cpp
new file mode 100644
index 0000000000..bc0c0265b5
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/external_builder/hello.cpp
@@ -0,0 +1,10 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+int main(void) {
+ printf("Hello, world!\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/msvs/external_builder/hello.z b/third_party/python/gyp/test/msvs/external_builder/hello.z
new file mode 100644
index 0000000000..aa478827b5
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/external_builder/hello.z
@@ -0,0 +1,6 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+This file will be passed to the test rule.
+
diff --git a/third_party/python/gyp/test/msvs/external_builder/msbuild_action.py b/third_party/python/gyp/test/msvs/external_builder/msbuild_action.py
new file mode 100644
index 0000000000..632d786922
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/external_builder/msbuild_action.py
@@ -0,0 +1,9 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+with open('msbuild_action.out', 'w') as f:
+ f.write(' '.join(sys.argv))
+
diff --git a/third_party/python/gyp/test/msvs/external_builder/msbuild_rule.py b/third_party/python/gyp/test/msvs/external_builder/msbuild_rule.py
new file mode 100644
index 0000000000..0d6e315775
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/external_builder/msbuild_rule.py
@@ -0,0 +1,11 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys, os.path
+
+sys.argv[1] = os.path.basename(sys.argv[1])
+
+with open('msbuild_rule.out', 'w') as f:
+ f.write(' '.join(sys.argv))
+
diff --git a/third_party/python/gyp/test/msvs/filters/filters.gyp b/third_party/python/gyp/test/msvs/filters/filters.gyp
new file mode 100644
index 0000000000..a4106dc8eb
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/filters/filters.gyp
@@ -0,0 +1,47 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'no_source_files',
+ 'type': 'none',
+ 'sources': [ ],
+ },
+ {
+ 'target_name': 'one_source_file',
+ 'type': 'executable',
+ 'sources': [
+ '../folder/a.c',
+ ],
+ },
+ {
+ 'target_name': 'two_source_files',
+ 'type': 'executable',
+ 'sources': [
+ '../folder/a.c',
+ '../folder/b.c',
+ ],
+ },
+ {
+ 'target_name': 'three_files_in_two_folders',
+ 'type': 'executable',
+ 'sources': [
+ '../folder1/a.c',
+ '../folder1/b.c',
+ '../folder2/c.c',
+ ],
+ },
+ {
+ 'target_name': 'nested_folders',
+ 'type': 'executable',
+ 'sources': [
+ '../folder1/nested/a.c',
+ '../folder2/d.c',
+ '../folder1/nested/b.c',
+ '../folder1/other/c.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/msvs/filters/gyptest-filters-2008.py b/third_party/python/gyp/test/msvs/filters/gyptest-filters-2008.py
new file mode 100644
index 0000000000..41ca085823
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/filters/gyptest-filters-2008.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that extra filters are pruned correctly for Visual Studio 2008.
+"""
+
+import re
+import TestGyp
+
+
+def strip_ws(str):
+ return re.sub('^ +', '', str, flags=re.M).replace('\n', '')
+
+
+test = TestGyp.TestGyp(formats=['msvs'])
+
+test.run_gyp('filters.gyp', '-G', 'standalone', '-G', 'msvs_version=2008')
+
+test.must_contain('no_source_files.vcproj', '<Files/>')
+
+test.must_contain('one_source_file.vcproj', strip_ws('''\
+<Files>
+ <File RelativePath="..\\folder\\a.c"/>
+</Files>
+'''))
+
+test.must_contain('two_source_files.vcproj', strip_ws('''\
+<Files>
+ <File RelativePath="..\\folder\\a.c"/>
+ <File RelativePath="..\\folder\\b.c"/>
+</Files>
+'''))
+
+test.must_contain('three_files_in_two_folders.vcproj', strip_ws('''\
+<Files>
+ <Filter Name="folder1">
+ <File RelativePath="..\\folder1\\a.c"/>
+ <File RelativePath="..\\folder1\\b.c"/>
+ </Filter>
+ <Filter Name="folder2">
+ <File RelativePath="..\\folder2\\c.c"/>
+ </Filter>
+</Files>
+'''))
+
+test.must_contain('nested_folders.vcproj', strip_ws('''\
+<Files>
+ <Filter Name="folder1">
+ <Filter Name="nested">
+ <File RelativePath="..\\folder1\\nested\\a.c"/>
+ <File RelativePath="..\\folder1\\nested\\b.c"/>
+ </Filter>
+ <Filter Name="other">
+ <File RelativePath="..\\folder1\\other\\c.c"/>
+ </Filter>
+ </Filter>
+ <Filter Name="folder2">
+ <File RelativePath="..\\folder2\\d.c"/>
+ </Filter>
+</Files>
+'''))
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/msvs/filters/gyptest-filters-2010.py b/third_party/python/gyp/test/msvs/filters/gyptest-filters-2010.py
new file mode 100644
index 0000000000..d8131d5d7b
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/filters/gyptest-filters-2010.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that extra filters are pruned correctly for Visual Studio 2010
+and later.
+"""
+
+import TestGyp
+
+
+test = TestGyp.TestGyp(formats=['msvs'])
+
+test.run_gyp('filters.gyp', '-G', 'standalone', '-G', 'msvs_version=2010')
+
+test.must_not_exist('no_source_files.vcxproj.filters')
+
+test.must_not_exist('one_source_file.vcxproj.filters')
+
+test.must_not_exist('two_source_files.vcxproj.filters')
+
+test.must_contain('three_files_in_two_folders.vcxproj.filters', '''\
+ <ItemGroup>
+ <ClCompile Include="..\\folder1\\a.c">
+ <Filter>folder1</Filter>
+ </ClCompile>
+ <ClCompile Include="..\\folder1\\b.c">
+ <Filter>folder1</Filter>
+ </ClCompile>
+ <ClCompile Include="..\\folder2\\c.c">
+ <Filter>folder2</Filter>
+ </ClCompile>
+ </ItemGroup>
+'''.replace('\n', '\r\n'))
+
+test.must_contain('nested_folders.vcxproj.filters', '''\
+ <ItemGroup>
+ <ClCompile Include="..\\folder1\\nested\\a.c">
+ <Filter>folder1\\nested</Filter>
+ </ClCompile>
+ <ClCompile Include="..\\folder2\\d.c">
+ <Filter>folder2</Filter>
+ </ClCompile>
+ <ClCompile Include="..\\folder1\\nested\\b.c">
+ <Filter>folder1\\nested</Filter>
+ </ClCompile>
+ <ClCompile Include="..\\folder1\\other\\c.c">
+ <Filter>folder1\\other</Filter>
+ </ClCompile>
+ </ItemGroup>
+'''.replace('\n', '\r\n'))
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/msvs/list_excluded/gyptest-all.py b/third_party/python/gyp/test/msvs/list_excluded/gyptest-all.py
new file mode 100644
index 0000000000..5a370f6b47
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/list_excluded/gyptest-all.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that msvs_list_excluded_files=0 doesn't list files that would
+normally be in _excluded_files, and that if that flag is not set, then they
+are still listed.
+"""
+
+import os
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['msvs'], workdir='workarea_all')
+
+
+# with the flag set to 0
+try:
+ os.environ['GYP_GENERATOR_FLAGS'] = 'msvs_list_excluded_files=0'
+ test.run_gyp('hello_exclude.gyp')
+finally:
+ del os.environ['GYP_GENERATOR_FLAGS']
+if test.uses_msbuild:
+ test.must_not_contain('hello.vcxproj', 'hello_mac')
+else:
+ test.must_not_contain('hello.vcproj', 'hello_mac')
+
+
+# with the flag not set
+test.run_gyp('hello_exclude.gyp')
+if test.uses_msbuild:
+ test.must_contain('hello.vcxproj', 'hello_mac')
+else:
+ test.must_contain('hello.vcproj', 'hello_mac')
+
+
+# with the flag explicitly set to 1
+try:
+ os.environ['GYP_GENERATOR_FLAGS'] = 'msvs_list_excluded_files=1'
+ test.run_gyp('hello_exclude.gyp')
+finally:
+ del os.environ['GYP_GENERATOR_FLAGS']
+if test.uses_msbuild:
+ test.must_contain('hello.vcxproj', 'hello_mac')
+else:
+ test.must_contain('hello.vcproj', 'hello_mac')
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/msvs/list_excluded/hello.cpp b/third_party/python/gyp/test/msvs/list_excluded/hello.cpp
new file mode 100644
index 0000000000..bc0c0265b5
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/list_excluded/hello.cpp
@@ -0,0 +1,10 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+int main(void) {
+ printf("Hello, world!\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/msvs/list_excluded/hello_exclude.gyp b/third_party/python/gyp/test/msvs/list_excluded/hello_exclude.gyp
new file mode 100644
index 0000000000..aa160f2367
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/list_excluded/hello_exclude.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.cpp',
+ 'hello_mac.cpp',
+ ],
+ 'conditions': [
+ ['OS!="mac"', {'sources!': ['hello_mac.cpp']}],
+ ]
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/msvs/list_excluded/hello_mac.cpp b/third_party/python/gyp/test/msvs/list_excluded/hello_mac.cpp
new file mode 100644
index 0000000000..b9f6242c4b
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/list_excluded/hello_mac.cpp
@@ -0,0 +1,10 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+int hello2() {
+ printf("Hello, two!\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/msvs/missing_sources/gyptest-missing.py b/third_party/python/gyp/test/msvs/missing_sources/gyptest-missing.py
new file mode 100644
index 0000000000..62a99ef0f1
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/missing_sources/gyptest-missing.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that missing 'sources' files are treated as fatal errors when the
+the generator flag 'msvs_error_on_missing_sources' is set.
+"""
+
+import TestGyp
+import os
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'], workdir='workarea_all')
+
+ # With the flag not set
+ test.run_gyp('hello_missing.gyp')
+
+ # With the flag explicitly set to 0
+ try:
+ os.environ['GYP_GENERATOR_FLAGS'] = 'msvs_error_on_missing_sources=0'
+ test.run_gyp('hello_missing.gyp')
+ finally:
+ del os.environ['GYP_GENERATOR_FLAGS']
+
+ # With the flag explicitly set to 1
+ try:
+ os.environ['GYP_GENERATOR_FLAGS'] = 'msvs_error_on_missing_sources=1'
+ # Test to make sure GYP raises an exception (exit status 1). Since this will
+ # also print a backtrace, ensure that TestGyp is not checking that stderr is
+ # empty by specifying None, which means do not perform any checking.
+ # Instead, stderr is checked below to ensure it contains the expected
+ # output.
+ test.run_gyp('hello_missing.gyp', status=1, stderr=None)
+ finally:
+ del os.environ['GYP_GENERATOR_FLAGS']
+ test.must_contain_any_line(test.stderr(),
+ ["Missing input files:"])
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/msvs/missing_sources/hello_missing.gyp b/third_party/python/gyp/test/msvs/missing_sources/hello_missing.gyp
new file mode 100644
index 0000000000..c08926bbff
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/missing_sources/hello_missing.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'hello_missing.cpp',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/msvs/multiple_actions_error_handling/action_fail.py b/third_party/python/gyp/test/msvs/multiple_actions_error_handling/action_fail.py
new file mode 100644
index 0000000000..286fc4e132
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/multiple_actions_error_handling/action_fail.py
@@ -0,0 +1,7 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+sys.exit(1)
diff --git a/third_party/python/gyp/test/msvs/multiple_actions_error_handling/action_succeed.py b/third_party/python/gyp/test/msvs/multiple_actions_error_handling/action_succeed.py
new file mode 100644
index 0000000000..3554373197
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/multiple_actions_error_handling/action_succeed.py
@@ -0,0 +1,7 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+sys.exit(0)
diff --git a/third_party/python/gyp/test/msvs/multiple_actions_error_handling/actions.gyp b/third_party/python/gyp/test/msvs/multiple_actions_error_handling/actions.gyp
new file mode 100644
index 0000000000..ab99e929e2
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/multiple_actions_error_handling/actions.gyp
@@ -0,0 +1,40 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'actions-test',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'first action (fails)',
+ 'inputs': [
+ 'action_fail.py',
+ ],
+ 'outputs': [
+ 'ALWAYS_OUT_OF_DATE',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)'
+ ],
+ 'msvs_cygwin_shell': 0,
+ },
+ {
+ 'action_name': 'second action (succeeds)',
+ 'inputs': [
+ 'action_succeed.py',
+ ],
+ 'outputs': [
+ 'ALWAYS_OUT_OF_DATE',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)'
+ ],
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/msvs/multiple_actions_error_handling/gyptest.py b/third_party/python/gyp/test/msvs/multiple_actions_error_handling/gyptest.py
new file mode 100644
index 0000000000..3aa6b8fdb2
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/multiple_actions_error_handling/gyptest.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that failing actions make the build fail reliably, even when there
+are multiple actions in one project.
+"""
+
+import os
+import sys
+import TestGyp
+import TestCmd
+
+test = TestGyp.TestGyp(formats=['msvs'], workdir='workarea_all')
+
+test.run_gyp('actions.gyp')
+test.build('actions.gyp',
+ target='actions-test',
+ status=1,
+ stdout=r'.*"cmd\.exe" exited with code 1\..*',
+ match=TestCmd.match_re_dotall)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/msvs/props/AppName.props b/third_party/python/gyp/test/msvs/props/AppName.props
new file mode 100644
index 0000000000..b688f663d5
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/props/AppName.props
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <PropertyGroup Label="UserMacros">
+ <AppName>Greet</AppName>
+ </PropertyGroup>
+ <PropertyGroup>
+ <_ProjectFileVersion>10.0.40219.1</_ProjectFileVersion>
+ </PropertyGroup>
+ <ItemGroup>
+ <BuildMacro Include="AppName">
+ <Value>$(AppName)</Value>
+ </BuildMacro>
+ </ItemGroup>
+</Project>
diff --git a/third_party/python/gyp/test/msvs/props/AppName.vsprops b/third_party/python/gyp/test/msvs/props/AppName.vsprops
new file mode 100644
index 0000000000..84b9af3800
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/props/AppName.vsprops
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="Windows-1252"?>
+<VisualStudioPropertySheet
+ ProjectType="Visual C++"
+ Version="8.00"
+ Name="Common"
+ >
+ <UserMacro
+ Name="AppName"
+ Value="Greet"
+ />
+</VisualStudioPropertySheet>
diff --git a/third_party/python/gyp/test/msvs/props/gyptest-props.py b/third_party/python/gyp/test/msvs/props/gyptest-props.py
new file mode 100644
index 0000000000..abd4df2241
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/props/gyptest-props.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies props files are added by using a
+props file to set the name of the built executable.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(workdir='workarea_all', formats=['msvs'])
+
+test.run_gyp('hello.gyp')
+
+test.build('hello.gyp')
+
+test.built_file_must_exist('Greet.exe')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/msvs/props/hello.c b/third_party/python/gyp/test/msvs/props/hello.c
new file mode 100644
index 0000000000..faadc75e2c
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/props/hello.c
@@ -0,0 +1,11 @@
+/* Copyright (c) 2012 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello, world!\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/msvs/props/hello.gyp b/third_party/python/gyp/test/msvs/props/hello.gyp
new file mode 100644
index 0000000000..5a58317fa7
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/props/hello.gyp
@@ -0,0 +1,22 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'hello',
+ 'product_name': '$(AppName)',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ ],
+ 'msvs_props': [
+ '$(SolutionDir)AppName.vsprops'
+ ],
+ 'msbuild_props': [
+ '$(SolutionDir)AppName.props'
+ ],
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/msvs/rules_stdout_stderr/dummy.bar b/third_party/python/gyp/test/msvs/rules_stdout_stderr/dummy.bar
new file mode 100644
index 0000000000..25178696d2
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/rules_stdout_stderr/dummy.bar
@@ -0,0 +1,5 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+A dummy file with the .bar extension (used for stderr rule).
diff --git a/third_party/python/gyp/test/msvs/rules_stdout_stderr/dummy.foo b/third_party/python/gyp/test/msvs/rules_stdout_stderr/dummy.foo
new file mode 100644
index 0000000000..6a7990bb98
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/rules_stdout_stderr/dummy.foo
@@ -0,0 +1,5 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+A dummy file with the .foo extension (used for stdout rule).
diff --git a/third_party/python/gyp/test/msvs/rules_stdout_stderr/gyptest-rules-stdout-stderr.py b/third_party/python/gyp/test/msvs/rules_stdout_stderr/gyptest-rules-stdout-stderr.py
new file mode 100644
index 0000000000..804505a23d
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/rules_stdout_stderr/gyptest-rules-stdout-stderr.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Verifies that stdout and stderr from rules get logged in the build's
+stdout."""
+
+import sys
+import TestGyp
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs'])
+
+ test.run_gyp('rules-stdout-stderr.gyp')
+ test.build('rules-stdout-stderr.gyp', test.ALL)
+
+ expected_stdout_lines = [
+ 'testing stdout',
+ 'This will go to stdout',
+
+ # Note: stderr output from rules will go to the build's stdout.
+ 'testing stderr',
+ 'This will go to stderr',
+ ]
+ test.must_contain_all_lines(test.stdout(), expected_stdout_lines)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/msvs/rules_stdout_stderr/rule_stderr.py b/third_party/python/gyp/test/msvs/rules_stdout_stderr/rule_stderr.py
new file mode 100644
index 0000000000..2081513ec8
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/rules_stdout_stderr/rule_stderr.py
@@ -0,0 +1,8 @@
+#!/usr/bin/env python
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+import sys
+print("This will go to stderr", file=sys.stderr)
diff --git a/third_party/python/gyp/test/msvs/rules_stdout_stderr/rule_stdout.py b/third_party/python/gyp/test/msvs/rules_stdout_stderr/rule_stdout.py
new file mode 100644
index 0000000000..4c073ebc45
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/rules_stdout_stderr/rule_stdout.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+print("This will go to stdout")
diff --git a/third_party/python/gyp/test/msvs/rules_stdout_stderr/rules-stdout-stderr.gyp b/third_party/python/gyp/test/msvs/rules_stdout_stderr/rules-stdout-stderr.gyp
new file mode 100644
index 0000000000..ce93643f8e
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/rules_stdout_stderr/rules-stdout-stderr.gyp
@@ -0,0 +1,52 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test',
+ 'type': 'none',
+ 'sources': [
+ 'dummy.foo',
+ 'dummy.bar',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'test_stdout',
+ 'extension': 'foo',
+ 'message': 'testing stdout',
+ 'msvs_cygwin_shell': 0,
+ 'inputs': [
+ 'rule_stdout.py',
+ ],
+ 'outputs': [
+ 'dummy.foo_output',
+ ],
+ 'action': [
+ 'python',
+ 'rule_stdout.py',
+ '<(RULE_INPUT_PATH)',
+ ],
+ },
+ {
+ 'rule_name': 'test_stderr',
+ 'extension': 'bar',
+ 'message': 'testing stderr',
+ 'msvs_cygwin_shell': 0,
+ 'inputs': [
+ 'rule_stderr.py',
+ ],
+ 'outputs': [
+ 'dummy.bar_output',
+ ],
+ 'action': [
+ 'python',
+ 'rule_stderr.py',
+ '<(RULE_INPUT_PATH)',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/msvs/shared_output/common.gypi b/third_party/python/gyp/test/msvs/shared_output/common.gypi
new file mode 100644
index 0000000000..c6fa341d68
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/shared_output/common.gypi
@@ -0,0 +1,17 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'default_configuration': 'Baz',
+ 'configurations': {
+ 'Baz': {
+ 'msvs_configuration_attributes': {
+ 'OutputDirectory': '<(DEPTH)/foo',
+ 'IntermediateDirectory': '$(OutDir)/bar',
+ },
+ },
+ },
+ },
+}
diff --git a/third_party/python/gyp/test/msvs/shared_output/gyptest-shared_output.py b/third_party/python/gyp/test/msvs/shared_output/gyptest-shared_output.py
new file mode 100644
index 0000000000..270b280e6b
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/shared_output/gyptest-shared_output.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test checking that IntermediateDirectory can be defined in terms of
+OutputDirectory. We previously had emitted the definition of
+IntermediateDirectory before the definition of OutputDirectory.
+This is required so that $(IntDir) can be based on $(OutDir).
+"""
+
+import TestGyp
+import os
+
+# NOTE: This test really is vcbuild/msbuild specific (not applicable to windows
+# ninja), as it is testing the msvs output location when opening an .sln
+# other than all.sln.
+test = TestGyp.TestGyp(workdir='workarea_shared_output', formats=['msvs'])
+
+test.run_gyp('hello.gyp')
+test.set_configuration('Baz')
+
+test.build('there/there.gyp', test.ALL)
+test.must_exist(os.path.join(test.workdir, 'foo', 'there.exe'))
+test.must_exist(os.path.join(test.workdir, 'foo', 'bar', 'there.obj'))
+
+test.build('hello.gyp', test.ALL)
+test.must_exist(os.path.join(test.workdir, 'foo', 'hello.exe'))
+test.must_exist(os.path.join(test.workdir, 'foo', 'bar', 'hello.obj'))
+
+if test.format == 'msvs':
+ if test.uses_msbuild:
+ test.must_contain('pull_in_there.vcxproj',
+ '<IntDir>$(OutDir)bar\\</IntDir>')
+ else:
+ test.must_contain('pull_in_there.vcproj',
+ 'IntermediateDirectory="$(OutDir)bar\\"')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/msvs/shared_output/hello.c b/third_party/python/gyp/test/msvs/shared_output/hello.c
new file mode 100644
index 0000000000..698e4fd36c
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/shared_output/hello.c
@@ -0,0 +1,12 @@
+/*
+ * Copyright (c) 2012 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <stdio.h>
+
+int main(void) {
+ printf("Hello, world!\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/msvs/shared_output/hello.gyp b/third_party/python/gyp/test/msvs/shared_output/hello.gyp
new file mode 100644
index 0000000000..f80e5cfca1
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/shared_output/hello.gyp
@@ -0,0 +1,21 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': ['common.gypi'],
+ 'targets': [
+ {
+ 'target_name': 'pull_in_there',
+ 'type': 'none',
+ 'dependencies': ['there/there.gyp:*'],
+ },
+ {
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/msvs/shared_output/there/there.c b/third_party/python/gyp/test/msvs/shared_output/there/there.c
new file mode 100644
index 0000000000..698e4fd36c
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/shared_output/there/there.c
@@ -0,0 +1,12 @@
+/*
+ * Copyright (c) 2012 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <stdio.h>
+
+int main(void) {
+ printf("Hello, world!\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/msvs/shared_output/there/there.gyp b/third_party/python/gyp/test/msvs/shared_output/there/there.gyp
new file mode 100644
index 0000000000..56feff326c
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/shared_output/there/there.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': ['../common.gypi'],
+ 'targets': [
+ {
+ 'target_name': 'there',
+ 'type': 'executable',
+ 'sources': [
+ 'there.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/msvs/uldi2010/gyptest-all.py b/third_party/python/gyp/test/msvs/uldi2010/gyptest-all.py
new file mode 100644
index 0000000000..cc248fbd63
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/uldi2010/gyptest-all.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that uldi can be disabled on a per-project-reference basis in vs2010.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['msvs'], workdir='workarea_all')
+
+test.run_gyp('hello.gyp')
+
+if test.uses_msbuild:
+ test.must_contain('hello.vcxproj', '<UseLibraryDependencyInputs>false')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/msvs/uldi2010/hello.c b/third_party/python/gyp/test/msvs/uldi2010/hello.c
new file mode 100644
index 0000000000..06e6a02905
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/uldi2010/hello.c
@@ -0,0 +1,13 @@
+/* Copyright (c) 2012 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+extern int hello2();
+
+int main(void) {
+ printf("Hello, world!\n");
+ hello2();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/msvs/uldi2010/hello.gyp b/third_party/python/gyp/test/msvs/uldi2010/hello.gyp
new file mode 100644
index 0000000000..a2bf2badb1
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/uldi2010/hello.gyp
@@ -0,0 +1,26 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ ],
+ 'dependencies': [
+ 'hellolib',
+ ]
+ },
+ {
+ 'target_name': 'hellolib',
+ 'type': 'static_library',
+ 'sources': [
+ 'hello2.c',
+ ],
+ 'msvs_2010_disable_uldi_when_referenced': 1,
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/msvs/uldi2010/hello2.c b/third_party/python/gyp/test/msvs/uldi2010/hello2.c
new file mode 100644
index 0000000000..e2f23238d1
--- /dev/null
+++ b/third_party/python/gyp/test/msvs/uldi2010/hello2.c
@@ -0,0 +1,10 @@
+/* Copyright (c) 2012 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int hello2() {
+ printf("Hello, two!\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/multiple-targets/gyptest-all.py b/third_party/python/gyp/test/multiple-targets/gyptest-all.py
new file mode 100755
index 0000000000..3ef50090d1
--- /dev/null
+++ b/third_party/python/gyp/test/multiple-targets/gyptest-all.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('multiple.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('multiple.gyp', test.ALL, chdir='relocate/src', stderr=None)
+
+expect1 = """\
+hello from prog1.c
+hello from common.c
+"""
+
+expect2 = """\
+hello from prog2.c
+hello from common.c
+"""
+
+test.run_built_executable('prog1', stdout=expect1, chdir='relocate/src')
+test.run_built_executable('prog2', stdout=expect2, chdir='relocate/src')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/multiple-targets/gyptest-default.py b/third_party/python/gyp/test/multiple-targets/gyptest-default.py
new file mode 100755
index 0000000000..db15d794c3
--- /dev/null
+++ b/third_party/python/gyp/test/multiple-targets/gyptest-default.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('multiple.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('multiple.gyp', chdir='relocate/src')
+
+expect1 = """\
+hello from prog1.c
+hello from common.c
+"""
+
+expect2 = """\
+hello from prog2.c
+hello from common.c
+"""
+
+test.run_built_executable('prog1', stdout=expect1, chdir='relocate/src')
+test.run_built_executable('prog2', stdout=expect2, chdir='relocate/src')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/multiple-targets/src/common.c b/third_party/python/gyp/test/multiple-targets/src/common.c
new file mode 100644
index 0000000000..f1df7c1431
--- /dev/null
+++ b/third_party/python/gyp/test/multiple-targets/src/common.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+void common(void)
+{
+ printf("hello from common.c\n");
+ return;
+}
diff --git a/third_party/python/gyp/test/multiple-targets/src/multiple.gyp b/third_party/python/gyp/test/multiple-targets/src/multiple.gyp
new file mode 100644
index 0000000000..3db4ea30cd
--- /dev/null
+++ b/third_party/python/gyp/test/multiple-targets/src/multiple.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'prog1',
+ 'type': 'executable',
+ 'sources': [
+ 'prog1.c',
+ 'common.c',
+ ],
+ },
+ {
+ 'target_name': 'prog2',
+ 'type': 'executable',
+ 'sources': [
+ 'prog2.c',
+ 'common.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/multiple-targets/src/prog1.c b/third_party/python/gyp/test/multiple-targets/src/prog1.c
new file mode 100644
index 0000000000..fbf8d4cd7c
--- /dev/null
+++ b/third_party/python/gyp/test/multiple-targets/src/prog1.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+extern void common(void);
+
+int main(void)
+{
+ printf("hello from prog1.c\n");
+ common();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/multiple-targets/src/prog2.c b/third_party/python/gyp/test/multiple-targets/src/prog2.c
new file mode 100644
index 0000000000..a94b5c155e
--- /dev/null
+++ b/third_party/python/gyp/test/multiple-targets/src/prog2.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+extern void common(void);
+
+int main(void)
+{
+ printf("hello from prog2.c\n");
+ common();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/ninja/action-rule-hash/gyptest-action-rule-hash.py b/third_party/python/gyp/test/ninja/action-rule-hash/gyptest-action-rule-hash.py
new file mode 100644
index 0000000000..7147fd2fc3
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/action-rule-hash/gyptest-action-rule-hash.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that running gyp in a different directory does not cause actions and
+rules to rerun.
+"""
+
+import os
+import sys
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['ninja'])
+# The xcode-ninja generator handles gypfiles which are not at the
+# project root incorrectly.
+# cf. https://code.google.com/p/gyp/issues/detail?id=460
+if test.format == 'xcode-ninja':
+ test.skip_test()
+
+test.run_gyp('subdir/action-rule-hash.gyp')
+test.build('subdir/action-rule-hash.gyp', test.ALL)
+test.up_to_date('subdir/action-rule-hash.gyp')
+
+# Verify that everything is still up-to-date when we re-invoke gyp from a
+# different directory.
+test.run_gyp('action-rule-hash.gyp', '--depth=../', chdir='subdir')
+test.up_to_date('subdir/action-rule-hash.gyp')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/ninja/action-rule-hash/subdir/action-rule-hash.gyp b/third_party/python/gyp/test/ninja/action-rule-hash/subdir/action-rule-hash.gyp
new file mode 100644
index 0000000000..0e88a3019f
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/action-rule-hash/subdir/action-rule-hash.gyp
@@ -0,0 +1,29 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'sources': [
+ '<(INTERMEDIATE_DIR)/main.cc',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'emit_main_cc',
+ 'inputs': ['emit.py'],
+ 'outputs': ['<(INTERMEDIATE_DIR)/main.cc'],
+ 'action': [
+ 'python',
+ 'emit.py',
+ '<(INTERMEDIATE_DIR)/main.cc',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/ninja/action-rule-hash/subdir/emit.py b/third_party/python/gyp/test/ninja/action-rule-hash/subdir/emit.py
new file mode 100644
index 0000000000..6b17125574
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/action-rule-hash/subdir/emit.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+f = open(sys.argv[1], 'w')
+f.write('int main() {\n')
+f.write(' return 0;\n')
+f.write('}\n')
+f.close()
diff --git a/third_party/python/gyp/test/ninja/action_dependencies/gyptest-action-dependencies.py b/third_party/python/gyp/test/ninja/action_dependencies/gyptest-action-dependencies.py
new file mode 100755
index 0000000000..89813bab17
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/action_dependencies/gyptest-action-dependencies.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that building an object file correctly depends on running actions in
+dependent targets, but not the targets themselves.
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+
+if sys.platform == 'win32':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+
+import TestGyp
+
+# NOTE(piman): This test will not work with other generators because:
+# - it explicitly tests the optimization, which is not implemented (yet?) on
+# other generators
+# - it relies on the exact path to output object files, which is generator
+# dependent, and actually, relies on the ability to build only that object file,
+# which I don't think is available on all generators.
+# TODO(piman): Extend to other generators when possible.
+test = TestGyp.TestGyp(formats=['ninja'])
+# xcode-ninja doesn't support building single object files by design.
+if test.format == 'xcode-ninja':
+ test.skip_test()
+
+test.run_gyp('action_dependencies.gyp', chdir='src')
+
+chdir = 'relocate/src'
+test.relocate('src', chdir)
+
+objext = '.obj' if sys.platform == 'win32' else '.o'
+
+test.build('action_dependencies.gyp',
+ os.path.join('obj', 'b.b' + objext),
+ chdir=chdir)
+
+# The 'a' actions should be run (letting b.c compile), but the a static library
+# should not be built.
+test.built_file_must_not_exist('a', type=test.STATIC_LIB, chdir=chdir)
+test.built_file_must_not_exist('b', type=test.STATIC_LIB, chdir=chdir)
+test.built_file_must_exist(os.path.join('obj', 'b.b' + objext), chdir=chdir)
+
+test.build('action_dependencies.gyp',
+ os.path.join('obj', 'c.c' + objext),
+ chdir=chdir)
+
+# 'a' and 'b' should be built, so that the 'c' action succeeds, letting c.c
+# compile
+test.built_file_must_exist('a', type=test.STATIC_LIB, chdir=chdir)
+test.built_file_must_exist('b', type=test.EXECUTABLE, chdir=chdir)
+test.built_file_must_exist(os.path.join('obj', 'c.c' + objext), chdir=chdir)
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/ninja/action_dependencies/src/a.c b/third_party/python/gyp/test/ninja/action_dependencies/src/a.c
new file mode 100644
index 0000000000..4d7af9b26c
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/action_dependencies/src/a.c
@@ -0,0 +1,10 @@
+/* Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include "a.h"
+
+int funcA() {
+ return 42;
+}
diff --git a/third_party/python/gyp/test/ninja/action_dependencies/src/a.h b/third_party/python/gyp/test/ninja/action_dependencies/src/a.h
new file mode 100644
index 0000000000..335db56739
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/action_dependencies/src/a.h
@@ -0,0 +1,13 @@
+/* Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#ifndef A_H_
+#define A_H_
+
+#include "a/generated.h"
+
+int funcA();
+
+#endif // A_H_
diff --git a/third_party/python/gyp/test/ninja/action_dependencies/src/action_dependencies.gyp b/third_party/python/gyp/test/ninja/action_dependencies/src/action_dependencies.gyp
new file mode 100644
index 0000000000..5baa7a7d47
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/action_dependencies/src/action_dependencies.gyp
@@ -0,0 +1,88 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'static_library',
+ 'sources': [
+ 'a.c',
+ 'a.h',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'generate_headers',
+ 'inputs': [
+ 'emit.py'
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/a/generated.h'
+ ],
+ 'action': [
+ 'python',
+ 'emit.py',
+ '<(SHARED_INTERMEDIATE_DIR)/a/generated.h',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)',
+ ],
+ },
+ },
+ {
+ 'target_name': 'b',
+ 'type': 'executable',
+ 'sources': [
+ 'b.c',
+ 'b.h',
+ ],
+ 'dependencies': [
+ 'a',
+ ],
+ },
+ {
+ 'target_name': 'c',
+ 'type': 'static_library',
+ 'sources': [
+ 'c.c',
+ 'c.h',
+ ],
+ 'dependencies': [
+ 'b',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'generate_headers',
+ 'inputs': [
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/c/generated.h'
+ ],
+ 'action': [
+ '<(PRODUCT_DIR)/b',
+ '<(SHARED_INTERMEDIATE_DIR)/c/generated.h',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)',
+ ],
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/ninja/action_dependencies/src/b.c b/third_party/python/gyp/test/ninja/action_dependencies/src/b.c
new file mode 100644
index 0000000000..824464695a
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/action_dependencies/src/b.c
@@ -0,0 +1,18 @@
+/* Copyright (c) 2012 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <stdio.h>
+
+#include "b.h"
+
+int main(int argc, char** argv) {
+ FILE* f;
+ if (argc < 2)
+ return 1;
+ f = fopen(argv[1], "wt");
+ fprintf(f, "#define VALUE %d\n", funcA());
+ fclose(f);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/ninja/action_dependencies/src/b.h b/third_party/python/gyp/test/ninja/action_dependencies/src/b.h
new file mode 100644
index 0000000000..91362cd899
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/action_dependencies/src/b.h
@@ -0,0 +1,13 @@
+/* Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#ifndef B_H_
+#define B_H_
+
+#include "a.h"
+
+int funcB();
+
+#endif // B_H_
diff --git a/third_party/python/gyp/test/ninja/action_dependencies/src/c.c b/third_party/python/gyp/test/ninja/action_dependencies/src/c.c
new file mode 100644
index 0000000000..b412087ec8
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/action_dependencies/src/c.c
@@ -0,0 +1,10 @@
+/* Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include "c.h"
+
+int funcC() {
+ return VALUE;
+}
diff --git a/third_party/python/gyp/test/ninja/action_dependencies/src/c.h b/third_party/python/gyp/test/ninja/action_dependencies/src/c.h
new file mode 100644
index 0000000000..c81a45bbe7
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/action_dependencies/src/c.h
@@ -0,0 +1,13 @@
+/* Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#ifndef C_H_
+#define C_H_
+
+#include "c/generated.h"
+
+int funcC();
+
+#endif // C_H_
diff --git a/third_party/python/gyp/test/ninja/action_dependencies/src/emit.py b/third_party/python/gyp/test/ninja/action_dependencies/src/emit.py
new file mode 100755
index 0000000000..8ed12f7393
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/action_dependencies/src/emit.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+f = open(sys.argv[1], 'w')
+f.write('/* Hello World */\n')
+f.close()
diff --git a/third_party/python/gyp/test/ninja/chained-dependency/chained-dependency.gyp b/third_party/python/gyp/test/ninja/chained-dependency/chained-dependency.gyp
new file mode 100644
index 0000000000..3fe68ae85a
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/chained-dependency/chained-dependency.gyp
@@ -0,0 +1,53 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ # This first target generates a header.
+ {
+ 'target_name': 'generate_header',
+ 'type': 'none',
+ 'msvs_cygwin_shell': '0',
+ 'actions': [
+ {
+ 'action_name': 'generate header',
+ 'inputs': [],
+ 'outputs': ['<(SHARED_INTERMEDIATE_DIR)/generated/header.h'],
+ 'action': [
+ 'python', '-c', 'open(<(_outputs), "w")'
+ ]
+ },
+ ],
+ 'all_dependent_settings': {
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)',
+ ],
+ },
+ },
+
+ # This intermediate target does nothing other than pull in a
+ # dependency on the above generated target.
+ {
+ 'target_name': 'chain',
+ 'type': 'none',
+ 'dependencies': [
+ 'generate_header',
+ ],
+ },
+
+ # This final target is:
+ # - a static library (so gyp doesn't transitively pull in dependencies);
+ # - that relies on the generated file two dependencies away.
+ {
+ 'target_name': 'chained',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'chain',
+ ],
+ 'sources': [
+ 'chained.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/ninja/chained-dependency/chained.c b/third_party/python/gyp/test/ninja/chained-dependency/chained.c
new file mode 100644
index 0000000000..c1ff1a7b12
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/chained-dependency/chained.c
@@ -0,0 +1,5 @@
+#include "generated/header.h"
+
+int main(void) {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/ninja/chained-dependency/gyptest-chained-dependency.py b/third_party/python/gyp/test/ninja/chained-dependency/gyptest-chained-dependency.py
new file mode 100755
index 0000000000..d8763f1d3d
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/chained-dependency/gyptest-chained-dependency.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that files generated by two-steps-removed actions are built before
+dependent compile steps.
+"""
+
+import os
+import sys
+import TestGyp
+
+# This test is Ninja-specific in that:
+# - the bug only showed nondeterministically in parallel builds;
+# - it relies on a ninja-specific output file path.
+
+test = TestGyp.TestGyp(formats=['ninja'])
+# xcode-ninja doesn't support building single object files by design.
+if test.format == 'xcode-ninja':
+ test.skip_test()
+
+test.run_gyp('chained-dependency.gyp')
+objext = '.obj' if sys.platform == 'win32' else '.o'
+test.build('chained-dependency.gyp',
+ os.path.join('obj', 'chained.chained' + objext))
+# The test passes if the .o file builds successfully.
+test.pass_test()
diff --git a/third_party/python/gyp/test/ninja/empty-and-non-empty-duplicate-name/gyptest-empty-and-non-empty-duplicate-name.py b/third_party/python/gyp/test/ninja/empty-and-non-empty-duplicate-name/gyptest-empty-and-non-empty-duplicate-name.py
new file mode 100644
index 0000000000..0bdca66cdc
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/empty-and-non-empty-duplicate-name/gyptest-empty-and-non-empty-duplicate-name.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies a phony target isn't output if a target exists with the same name that
+was output.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['ninja'])
+
+# Reset xcode_ninja_target_pattern to its default for this test.
+test.run_gyp('test.gyp', '-G', 'xcode_ninja_target_pattern=^$')
+
+# Check for both \r and \n to cover both windows and linux.
+test.must_not_contain('out/Default/build.ninja', 'build empty_target: phony\r')
+test.must_not_contain('out/Default/build.ninja', 'build empty_target: phony\n')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/ninja/empty-and-non-empty-duplicate-name/subdir/included.gyp b/third_party/python/gyp/test/ninja/empty-and-non-empty-duplicate-name/subdir/included.gyp
new file mode 100644
index 0000000000..1b9fc42f3f
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/empty-and-non-empty-duplicate-name/subdir/included.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'empty_target',
+ 'type': 'executable',
+ 'sources': [
+ 'test.cc',
+ ],
+ },
+ {
+ 'target_name': 'included_empty_target',
+ 'type': 'none',
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/ninja/empty-and-non-empty-duplicate-name/test.gyp b/third_party/python/gyp/test/ninja/empty-and-non-empty-duplicate-name/test.gyp
new file mode 100644
index 0000000000..9aa6287c7c
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/empty-and-non-empty-duplicate-name/test.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'All',
+ 'type': 'none',
+ 'dependencies': [
+ 'subdir/included.gyp:included_empty_target'
+ ]
+ },
+ {
+ 'target_name': 'empty_target',
+ 'type': 'none',
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/ninja/normalize-paths-win/gyptest-normalize-paths.py b/third_party/python/gyp/test/ninja/normalize-paths-win/gyptest-normalize-paths.py
new file mode 100644
index 0000000000..f56dbe5921
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/normalize-paths-win/gyptest-normalize-paths.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure paths are normalized with VS macros properly expanded on Windows.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['ninja'])
+
+ test.run_gyp('normalize-paths.gyp')
+
+ # We can't use existence tests because any case will pass, so we check the
+ # contents of ninja files directly since that's what we're most concerned
+ # with anyway.
+ subninja = open(test.built_file_path('obj/some_target.ninja')).read()
+ if '$!product_dir' in subninja:
+ test.fail_test()
+ if 'out\\Default' in subninja:
+ test.fail_test()
+
+ second = open(test.built_file_path('obj/second.ninja')).read()
+ if ('..\\..\\things\\AnotherName.exe' in second or
+ 'AnotherName.exe' not in second):
+ test.fail_test()
+
+ copytarget = open(test.built_file_path('obj/copy_target.ninja')).read()
+ if '$(VSInstallDir)' in copytarget:
+ test.fail_test()
+
+ action = open(test.built_file_path('obj/action.ninja')).read()
+ if '..\\..\\out\\Default' in action:
+ test.fail_test()
+ if '..\\..\\SomethingElse' in action or 'SomethingElse' not in action:
+ test.fail_test()
+ if '..\\..\\SomeOtherInput' in action or 'SomeOtherInput' not in action:
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/ninja/normalize-paths-win/hello.cc b/third_party/python/gyp/test/ninja/normalize-paths-win/hello.cc
new file mode 100644
index 0000000000..1711567ef5
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/normalize-paths-win/hello.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/ninja/normalize-paths-win/normalize-paths.gyp b/third_party/python/gyp/test/ninja/normalize-paths-win/normalize-paths.gyp
new file mode 100644
index 0000000000..544d06456d
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/normalize-paths-win/normalize-paths.gyp
@@ -0,0 +1,68 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'Some_Target',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '<(PRODUCT_DIR)/stuff/AnotherName.exe',
+ },
+ },
+ 'sources': [
+ 'HeLLo.cc',
+ 'blOrP.idl',
+ ],
+ },
+ {
+ 'target_name': 'second',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(OutDir)\\things\\AnotherName.exe',
+ },
+ },
+ 'sources': [
+ 'HeLLo.cc',
+ ],
+ },
+ {
+ 'target_name': 'Copy_Target',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)',
+ 'files': [
+ '$(VSInstallDir)\\bin\\cl.exe',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'action',
+ 'type': 'none',
+ 'msvs_cygwin_shell': '0',
+ 'actions': [
+ {
+ 'inputs': [
+ '$(IntDir)\\SomeInput',
+ '$(OutDir)\\SomeOtherInput',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/ReSuLt',
+ '<(SHARED_INTERMEDIATE_DIR)/TempFile',
+ '$(OutDir)\SomethingElse',
+ ],
+ 'action_name': 'Test action',
+ # Unfortunately, we can't normalize this field because it's
+ # free-form. Fortunately, ninja doesn't inspect it at all (only the
+ # inputs and outputs) so it's not mandatory.
+ 'action': [],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/ninja/s-needs-no-depfiles/empty.s b/third_party/python/gyp/test/ninja/s-needs-no-depfiles/empty.s
new file mode 100644
index 0000000000..218d8921e5
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/s-needs-no-depfiles/empty.s
@@ -0,0 +1 @@
+# This file intentionally left blank.
diff --git a/third_party/python/gyp/test/ninja/s-needs-no-depfiles/gyptest-s-needs-no-depfiles.py b/third_party/python/gyp/test/ninja/s-needs-no-depfiles/gyptest-s-needs-no-depfiles.py
new file mode 100755
index 0000000000..77a3245d46
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/s-needs-no-depfiles/gyptest-s-needs-no-depfiles.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that .s files don't always trigger a rebuild, as would happen if depfiles
+were used for them (since clang & gcc ignore -MMD when building .s->.o on
+linux).
+"""
+
+import os
+import sys
+import TestCommon
+import TestGyp
+
+# NOTE(fischman): Each generator uses depfiles (or not) differently, so this is
+# a ninja-specific test.
+test = TestGyp.TestGyp(formats=['ninja'])
+
+if sys.platform == 'win32' or sys.platform == 'win64':
+ # This test is about clang/gcc vs. depfiles; VS gets a pass.
+ test.pass_test()
+ sys.exit(0)
+
+test.run_gyp('s-needs-no-depfiles.gyp')
+
+# Build the library, grab its timestamp, rebuild the library, ensure timestamp
+# hasn't changed.
+test.build('s-needs-no-depfiles.gyp', 'empty')
+empty_dll = test.built_file_path('empty', test.SHARED_LIB)
+test.built_file_must_exist(empty_dll)
+pre_stat = os.stat(test.built_file_path(empty_dll))
+test.sleep()
+test.build('s-needs-no-depfiles.gyp', 'empty')
+post_stat = os.stat(test.built_file_path(empty_dll))
+
+if pre_stat.st_mtime != post_stat.st_mtime:
+ test.fail_test()
+else:
+ test.pass_test()
diff --git a/third_party/python/gyp/test/ninja/s-needs-no-depfiles/s-needs-no-depfiles.gyp b/third_party/python/gyp/test/ninja/s-needs-no-depfiles/s-needs-no-depfiles.gyp
new file mode 100644
index 0000000000..bd66b1a70a
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/s-needs-no-depfiles/s-needs-no-depfiles.gyp
@@ -0,0 +1,13 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'empty',
+ 'type': 'shared_library',
+ 'sources': [ 'empty.s' ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/ninja/solibs_avoid_relinking/gyptest-solibs-avoid-relinking.py b/third_party/python/gyp/test/ninja/solibs_avoid_relinking/gyptest-solibs-avoid-relinking.py
new file mode 100755
index 0000000000..fd4470ac23
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/solibs_avoid_relinking/gyptest-solibs-avoid-relinking.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that relinking a solib doesn't relink a dependent executable if the
+solib's public API hasn't changed.
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+import TestCommon
+import TestGyp
+
+# NOTE(fischman): This test will not work with other generators because the
+# API-hash-based-mtime-preservation optimization is only implemented in
+# ninja.py. It could be extended to the make.py generator as well pretty
+# easily, probably.
+# (also, it tests ninja-specific out paths, which would have to be generalized
+# if this was extended to other generators).
+test = TestGyp.TestGyp(formats=['ninja'])
+
+if not os.environ.get('ProgramFiles(x86)'):
+ # TODO(scottmg)
+ print('Skipping test on x86, http://crbug.com/365833')
+ test.pass_test()
+
+test.run_gyp('solibs_avoid_relinking.gyp')
+
+# Build the executable, grab its timestamp, touch the solib's source, rebuild
+# executable, ensure timestamp hasn't changed.
+test.build('solibs_avoid_relinking.gyp', 'b')
+test.built_file_must_exist('b' + TestCommon.exe_suffix)
+pre_stat = os.stat(test.built_file_path('b' + TestCommon.exe_suffix))
+os.utime(os.path.join(test.workdir, 'solib.cc'),
+ (pre_stat.st_atime, pre_stat.st_mtime + 100))
+test.sleep()
+test.build('solibs_avoid_relinking.gyp', 'b')
+post_stat = os.stat(test.built_file_path('b' + TestCommon.exe_suffix))
+
+if pre_stat.st_mtime != post_stat.st_mtime:
+ test.fail_test()
+else:
+ test.pass_test()
diff --git a/third_party/python/gyp/test/ninja/solibs_avoid_relinking/main.cc b/third_party/python/gyp/test/ninja/solibs_avoid_relinking/main.cc
new file mode 100644
index 0000000000..2cd74d3c77
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/solibs_avoid_relinking/main.cc
@@ -0,0 +1,5 @@
+extern int foo();
+
+int main() {
+ return foo();
+}
diff --git a/third_party/python/gyp/test/ninja/solibs_avoid_relinking/solib.cc b/third_party/python/gyp/test/ninja/solibs_avoid_relinking/solib.cc
new file mode 100644
index 0000000000..0856cd4e00
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/solibs_avoid_relinking/solib.cc
@@ -0,0 +1,8 @@
+#ifdef _MSC_VER
+__declspec(dllexport)
+#else
+__attribute__((visibility("default")))
+#endif
+int foo() {
+ return 42;
+}
diff --git a/third_party/python/gyp/test/ninja/solibs_avoid_relinking/solibs_avoid_relinking.gyp b/third_party/python/gyp/test/ninja/solibs_avoid_relinking/solibs_avoid_relinking.gyp
new file mode 100644
index 0000000000..e816351d68
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/solibs_avoid_relinking/solibs_avoid_relinking.gyp
@@ -0,0 +1,38 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'shared_library',
+ 'sources': [ 'solib.cc' ],
+ # Incremental linking enabled so that .lib timestamp is maintained when
+ # exports are unchanged.
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'LinkIncremental': '2',
+ }
+ },
+ },
+ {
+ 'target_name': 'b',
+ 'type': 'executable',
+ 'sources': [ 'main.cc' ],
+ 'dependencies': [ 'a' ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'LinkIncremental': '2',
+ }
+ },
+ },
+ ],
+ 'conditions': [
+ ['OS=="linux"', {
+ 'target_defaults': {
+ 'cflags': ['-fPIC'],
+ },
+ }],
+ ],
+}
diff --git a/third_party/python/gyp/test/ninja/use-console/foo.bar b/third_party/python/gyp/test/ninja/use-console/foo.bar
new file mode 100644
index 0000000000..07c476a866
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/use-console/foo.bar
@@ -0,0 +1,5 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+This is a dummy file for rule/action input.
diff --git a/third_party/python/gyp/test/ninja/use-console/gyptest-use-console.py b/third_party/python/gyp/test/ninja/use-console/gyptest-use-console.py
new file mode 100644
index 0000000000..f76fcd9829
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/use-console/gyptest-use-console.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure 'ninja_use_console' is supported in actions and rules.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['ninja'])
+
+test.run_gyp('use-console.gyp')
+
+no_pool = open(test.built_file_path('obj/no_pool.ninja')).read()
+if 'pool =' in no_pool:
+ test.fail_test()
+
+action_pool = open(test.built_file_path('obj/action_pool.ninja')).read()
+if 'pool = console' not in action_pool:
+ test.fail_test()
+
+rule_pool = open(test.built_file_path('obj/rule_pool.ninja')).read()
+if 'pool = console' not in rule_pool:
+ test.fail_test()
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/ninja/use-console/use-console.gyp b/third_party/python/gyp/test/ninja/use-console/use-console.gyp
new file mode 100644
index 0000000000..84e63184c6
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/use-console/use-console.gyp
@@ -0,0 +1,60 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'no_pool',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'some_action',
+ 'action': ['echo', 'hello'],
+ 'inputs': ['foo.bar'],
+ 'outputs': ['dummy'],
+ },
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'some_rule',
+ 'extension': 'bar',
+ 'action': ['echo', 'hello'],
+ 'outputs': ['dummy'],
+ },
+ ],
+ 'sources': [
+ 'foo.bar',
+ ],
+ },
+ {
+ 'target_name': 'action_pool',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'some_action',
+ 'action': ['echo', 'hello'],
+ 'inputs': ['foo.bar'],
+ 'outputs': ['dummy'],
+ 'ninja_use_console': 1,
+ },
+ ],
+ },
+ {
+ 'target_name': 'rule_pool',
+ 'type': 'none',
+ 'rules': [
+ {
+ 'rule_name': 'some_rule',
+ 'extension': 'bar',
+ 'action': ['echo', 'hello'],
+ 'outputs': ['dummy'],
+ 'ninja_use_console': 1,
+ },
+ ],
+ 'sources': [
+ 'foo.bar',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/ninja/use-custom-environment-files/gyptest-use-custom-environment-files.py b/third_party/python/gyp/test/ninja/use-custom-environment-files/gyptest-use-custom-environment-files.py
new file mode 100644
index 0000000000..0c44b1d1c9
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/use-custom-environment-files/gyptest-use-custom-environment-files.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure environment files can be suppressed.
+"""
+
+import TestGyp
+
+import os
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['ninja'])
+
+ test.run_gyp('use-custom-environment-files.gyp',
+ '-G', 'ninja_use_custom_environment_files')
+
+ # Make sure environment files do not exist.
+ if os.path.exists(test.built_file_path('environment.x86')):
+ test.fail_test()
+ if os.path.exists(test.built_file_path('environment.x64')):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/ninja/use-custom-environment-files/use-custom-environment-files.cc b/third_party/python/gyp/test/ninja/use-custom-environment-files/use-custom-environment-files.cc
new file mode 100644
index 0000000000..1711567ef5
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/use-custom-environment-files/use-custom-environment-files.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/ninja/use-custom-environment-files/use-custom-environment-files.gyp b/third_party/python/gyp/test/ninja/use-custom-environment-files/use-custom-environment-files.gyp
new file mode 100644
index 0000000000..dbc95a9439
--- /dev/null
+++ b/third_party/python/gyp/test/ninja/use-custom-environment-files/use-custom-environment-files.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_use_custom_environment_files',
+ 'type': 'executable',
+ 'sources': [
+ 'use-custom-environment-files.cc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/no-cpp/gyptest-no-cpp.py b/third_party/python/gyp/test/no-cpp/gyptest-no-cpp.py
new file mode 100644
index 0000000000..a5d64512af
--- /dev/null
+++ b/third_party/python/gyp/test/no-cpp/gyptest-no-cpp.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Checks that C-only targets aren't linked against libstdc++.
+"""
+
+import TestGyp
+
+import re
+import subprocess
+import sys
+
+# set |match| to ignore build stderr output.
+test = TestGyp.TestGyp(match = lambda a, b: True)
+if (sys.platform != 'win32' and
+ not (sys.platform == 'darwin' and test.format == 'make')):
+ # TODO: Does a test like this make sense with Windows?
+
+ CHDIR = 'src'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+ test.build('test.gyp', 'no_cpp', chdir=CHDIR)
+
+ def LinksLibStdCpp(path):
+ path = test.built_file_path(path, chdir=CHDIR)
+ if sys.platform == 'darwin':
+ proc = subprocess.Popen(['otool', '-L', path], stdout=subprocess.PIPE)
+ else:
+ proc = subprocess.Popen(['ldd', path], stdout=subprocess.PIPE)
+ output = proc.communicate()[0].decode('utf-8')
+ assert not proc.returncode
+ return 'libstdc++' in output or 'libc++' in output
+
+ if LinksLibStdCpp('no_cpp'):
+ test.fail_test()
+
+ # Make, ninja, and CMake pick the compiler driver based on transitive
+ # checks. Xcode doesn't.
+ build_error_code = {
+ 'xcode': 65, # EX_DATAERR, see `man sysexits`
+ 'make': 0,
+ 'ninja': 0,
+ 'cmake': 0,
+ 'xcode-ninja': 0,
+ }[test.format]
+
+ test.build('test.gyp', 'no_cpp_dep_on_cc_lib', chdir=CHDIR,
+ status=build_error_code)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/no-cpp/src/call-f-main.c b/third_party/python/gyp/test/no-cpp/src/call-f-main.c
new file mode 100644
index 0000000000..8b95c5910e
--- /dev/null
+++ b/third_party/python/gyp/test/no-cpp/src/call-f-main.c
@@ -0,0 +1,2 @@
+void* f();
+int main() { f(); }
diff --git a/third_party/python/gyp/test/no-cpp/src/empty-main.c b/third_party/python/gyp/test/no-cpp/src/empty-main.c
new file mode 100644
index 0000000000..237c8ce181
--- /dev/null
+++ b/third_party/python/gyp/test/no-cpp/src/empty-main.c
@@ -0,0 +1 @@
+int main() {}
diff --git a/third_party/python/gyp/test/no-cpp/src/f.cc b/third_party/python/gyp/test/no-cpp/src/f.cc
new file mode 100644
index 0000000000..02f50f21a0
--- /dev/null
+++ b/third_party/python/gyp/test/no-cpp/src/f.cc
@@ -0,0 +1,3 @@
+extern "C" { void* f(); }
+
+void* f() { return new int; }
diff --git a/third_party/python/gyp/test/no-cpp/src/test.gyp b/third_party/python/gyp/test/no-cpp/src/test.gyp
new file mode 100644
index 0000000000..417015ec80
--- /dev/null
+++ b/third_party/python/gyp/test/no-cpp/src/test.gyp
@@ -0,0 +1,25 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'no_cpp',
+ 'type': 'executable',
+ 'sources': [ 'empty-main.c' ],
+ },
+ # A static_library with a cpp file and a linkable with only .c files
+ # depending on it causes a linker error:
+ {
+ 'target_name': 'cpp_lib',
+ 'type': 'static_library',
+ 'sources': [ 'f.cc' ],
+ },
+ {
+ 'target_name': 'no_cpp_dep_on_cc_lib',
+ 'type': 'executable',
+ 'dependencies': [ 'cpp_lib' ],
+ 'sources': [ 'call-f-main.c' ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/no-output/gyptest-no-output.py b/third_party/python/gyp/test/no-output/gyptest-no-output.py
new file mode 100755
index 0000000000..bf9a0b5aaa
--- /dev/null
+++ b/third_party/python/gyp/test/no-output/gyptest-no-output.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verified things don't explode when there are targets without outputs.
+"""
+
+import TestGyp
+
+# TODO(evan): in ninja when there are no targets, there is no 'all'
+# target either. Disabling this test for now.
+test = TestGyp.TestGyp(formats=['!ninja'])
+
+test.run_gyp('nooutput.gyp', chdir='src')
+test.relocate('src', 'relocate/src')
+test.build('nooutput.gyp', chdir='relocate/src')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/no-output/src/nooutput.gyp b/third_party/python/gyp/test/no-output/src/nooutput.gyp
new file mode 100644
index 0000000000..c40124efc1
--- /dev/null
+++ b/third_party/python/gyp/test/no-output/src/nooutput.gyp
@@ -0,0 +1,17 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'no_output',
+ 'type': 'none',
+ 'direct_dependent_settings': {
+ 'defines': [
+ 'NADA',
+ ],
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/product/gyptest-product.py b/third_party/python/gyp/test/product/gyptest-product.py
new file mode 100755
index 0000000000..53eb5c376b
--- /dev/null
+++ b/third_party/python/gyp/test/product/gyptest-product.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simplest-possible build of a "Hello, world!" program
+using the default build target.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('product.gyp')
+test.build('product.gyp')
+
+# executables
+test.built_file_must_exist('alt1' + test._exe, test.EXECUTABLE, bare=True)
+test.built_file_must_exist('hello2.stuff', test.EXECUTABLE, bare=True)
+test.built_file_must_exist('yoalt3.stuff', test.EXECUTABLE, bare=True)
+
+# shared libraries
+test.built_file_must_exist(test.dll_ + 'alt4' + test._dll,
+ test.SHARED_LIB, bare=True)
+test.built_file_must_exist(test.dll_ + 'hello5.stuff',
+ test.SHARED_LIB, bare=True)
+test.built_file_must_exist('yoalt6.stuff', test.SHARED_LIB, bare=True)
+
+# static libraries
+test.built_file_must_exist(test.lib_ + 'alt7' + test._lib,
+ test.STATIC_LIB, bare=True)
+test.built_file_must_exist(test.lib_ + 'hello8.stuff',
+ test.STATIC_LIB, bare=True)
+test.built_file_must_exist('yoalt9.stuff', test.STATIC_LIB, bare=True)
+
+# alternate product_dir
+test.built_file_must_exist('bob/yoalt10.stuff', test.EXECUTABLE, bare=True)
+test.built_file_must_exist('bob/yoalt11.stuff', test.EXECUTABLE, bare=True)
+test.built_file_must_exist('bob/yoalt12.stuff', test.EXECUTABLE, bare=True)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/product/hello.c b/third_party/python/gyp/test/product/hello.c
new file mode 100644
index 0000000000..41fdff0e38
--- /dev/null
+++ b/third_party/python/gyp/test/product/hello.c
@@ -0,0 +1,15 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+int func1(void) {
+ return 42;
+}
+
+int main(void) {
+ printf("Hello, world!\n");
+ printf("%d\n", func1());
+ return 0;
+}
diff --git a/third_party/python/gyp/test/product/product.gyp b/third_party/python/gyp/test/product/product.gyp
new file mode 100644
index 0000000000..c25eaaacb5
--- /dev/null
+++ b/third_party/python/gyp/test/product/product.gyp
@@ -0,0 +1,128 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello1',
+ 'product_name': 'alt1',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello2',
+ 'product_extension': 'stuff',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello3',
+ 'product_name': 'alt3',
+ 'product_extension': 'stuff',
+ 'product_prefix': 'yo',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+
+ {
+ 'target_name': 'hello4',
+ 'product_name': 'alt4',
+ 'type': 'shared_library',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello5',
+ 'product_extension': 'stuff',
+ 'type': 'shared_library',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello6',
+ 'product_name': 'alt6',
+ 'product_extension': 'stuff',
+ 'product_prefix': 'yo',
+ 'type': 'shared_library',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+
+ {
+ 'target_name': 'hello7',
+ 'product_name': 'alt7',
+ 'type': 'static_library',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello8',
+ 'product_extension': 'stuff',
+ 'type': 'static_library',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello9',
+ 'product_name': 'alt9',
+ 'product_extension': 'stuff',
+ 'product_prefix': 'yo',
+ 'type': 'static_library',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello10',
+ 'product_name': 'alt10',
+ 'product_extension': 'stuff',
+ 'product_prefix': 'yo',
+ 'product_dir': '<(PRODUCT_DIR)/bob',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello11',
+ 'product_name': 'alt11',
+ 'product_extension': 'stuff',
+ 'product_prefix': 'yo',
+ 'product_dir': '<(PRODUCT_DIR)/bob',
+ 'type': 'shared_library',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello12',
+ 'product_name': 'alt12',
+ 'product_extension': 'stuff',
+ 'product_prefix': 'yo',
+ 'product_dir': '<(PRODUCT_DIR)/bob',
+ 'type': 'static_library',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ ],
+ 'conditions': [
+ ['OS=="linux"', {
+ 'target_defaults': {
+ 'cflags': ['-fPIC'],
+ },
+ }],
+ ],
+}
diff --git a/third_party/python/gyp/test/prune_targets/gyptest-prune-targets.py b/third_party/python/gyp/test/prune_targets/gyptest-prune-targets.py
new file mode 100644
index 0000000000..b2c90f717e
--- /dev/null
+++ b/third_party/python/gyp/test/prune_targets/gyptest-prune-targets.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies --root-target removes the unnecessary targets.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+# The xcode-ninja generator has its own logic for which targets to include
+if test.format == 'xcode-ninja':
+ test.skip_test()
+
+build_error_code = {
+ 'cmake': 1,
+ 'make': 2,
+ 'msvs': 1,
+ 'ninja': 1,
+ 'xcode': 65,
+}[test.format]
+
+# By default, everything will be included.
+test.run_gyp('test1.gyp')
+test.build('test2.gyp', 'lib1')
+test.build('test2.gyp', 'lib2')
+test.build('test2.gyp', 'lib3')
+test.build('test2.gyp', 'lib_indirect')
+test.build('test1.gyp', 'program1')
+test.build('test1.gyp', 'program2')
+test.build('test1.gyp', 'program3')
+
+# With deep dependencies of program1 only.
+test.run_gyp('test1.gyp', '--root-target=program1')
+test.build('test2.gyp', 'lib1')
+test.build('test2.gyp', 'lib2', status=build_error_code, stderr=None)
+test.build('test2.gyp', 'lib3', status=build_error_code, stderr=None)
+test.build('test2.gyp', 'lib_indirect')
+test.build('test1.gyp', 'program1')
+test.build('test1.gyp', 'program2', status=build_error_code, stderr=None)
+test.build('test1.gyp', 'program3', status=build_error_code, stderr=None)
+
+# With deep dependencies of program2 only.
+test.run_gyp('test1.gyp', '--root-target=program2')
+test.build('test2.gyp', 'lib1', status=build_error_code, stderr=None)
+test.build('test2.gyp', 'lib2')
+test.build('test2.gyp', 'lib3', status=build_error_code, stderr=None)
+test.build('test2.gyp', 'lib_indirect')
+test.build('test1.gyp', 'program1', status=build_error_code, stderr=None)
+test.build('test1.gyp', 'program2')
+test.build('test1.gyp', 'program3', status=build_error_code, stderr=None)
+
+# With deep dependencies of program1 and program2.
+test.run_gyp('test1.gyp', '--root-target=program1', '--root-target=program2')
+test.build('test2.gyp', 'lib1')
+test.build('test2.gyp', 'lib2')
+test.build('test2.gyp', 'lib3', status=build_error_code, stderr=None)
+test.build('test2.gyp', 'lib_indirect')
+test.build('test1.gyp', 'program1')
+test.build('test1.gyp', 'program2')
+test.build('test1.gyp', 'program3', status=build_error_code, stderr=None)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/prune_targets/lib1.cc b/third_party/python/gyp/test/prune_targets/lib1.cc
new file mode 100644
index 0000000000..692b7de6d8
--- /dev/null
+++ b/third_party/python/gyp/test/prune_targets/lib1.cc
@@ -0,0 +1,6 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+void libfunc1() {
+}
diff --git a/third_party/python/gyp/test/prune_targets/lib2.cc b/third_party/python/gyp/test/prune_targets/lib2.cc
new file mode 100644
index 0000000000..aed394afcf
--- /dev/null
+++ b/third_party/python/gyp/test/prune_targets/lib2.cc
@@ -0,0 +1,6 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+void libfunc2() {
+}
diff --git a/third_party/python/gyp/test/prune_targets/lib3.cc b/third_party/python/gyp/test/prune_targets/lib3.cc
new file mode 100644
index 0000000000..af0f717b02
--- /dev/null
+++ b/third_party/python/gyp/test/prune_targets/lib3.cc
@@ -0,0 +1,6 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+void libfunc3() {
+}
diff --git a/third_party/python/gyp/test/prune_targets/lib_indirect.cc b/third_party/python/gyp/test/prune_targets/lib_indirect.cc
new file mode 100644
index 0000000000..92d9ea40db
--- /dev/null
+++ b/third_party/python/gyp/test/prune_targets/lib_indirect.cc
@@ -0,0 +1,6 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+void libfunc_indirect() {
+}
diff --git a/third_party/python/gyp/test/prune_targets/program.cc b/third_party/python/gyp/test/prune_targets/program.cc
new file mode 100644
index 0000000000..c9ac070ecd
--- /dev/null
+++ b/third_party/python/gyp/test/prune_targets/program.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/prune_targets/test1.gyp b/third_party/python/gyp/test/prune_targets/test1.gyp
new file mode 100644
index 0000000000..b65ec19fa4
--- /dev/null
+++ b/third_party/python/gyp/test/prune_targets/test1.gyp
@@ -0,0 +1,26 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program1',
+ 'type': 'executable',
+ 'sources': [ 'program.cc' ],
+ 'dependencies': [ 'test2.gyp:lib1' ],
+ },
+ {
+ 'target_name': 'program2',
+ 'type': 'executable',
+ 'sources': [ 'program.cc' ],
+ 'dependencies': [ 'test2.gyp:lib2' ],
+ },
+ {
+ 'target_name': 'program3',
+ 'type': 'executable',
+ 'sources': [ 'program.cc' ],
+ 'dependencies': [ 'test2.gyp:lib3' ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/prune_targets/test2.gyp b/third_party/python/gyp/test/prune_targets/test2.gyp
new file mode 100644
index 0000000000..16f0fd3290
--- /dev/null
+++ b/third_party/python/gyp/test/prune_targets/test2.gyp
@@ -0,0 +1,30 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'lib1',
+ 'type': 'static_library',
+ 'sources': [ 'lib1.cc' ],
+ 'dependencies': [ 'lib_indirect' ],
+ },
+ {
+ 'target_name': 'lib2',
+ 'type': 'static_library',
+ 'sources': [ 'lib2.cc' ],
+ 'dependencies': [ 'lib_indirect' ],
+ },
+ {
+ 'target_name': 'lib3',
+ 'type': 'static_library',
+ 'sources': [ 'lib3.cc' ],
+ },
+ {
+ 'target_name': 'lib_indirect',
+ 'type': 'static_library',
+ 'sources': [ 'lib_indirect.cc' ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/relative/foo/a/a.cc b/third_party/python/gyp/test/relative/foo/a/a.cc
new file mode 100644
index 0000000000..7d1c953448
--- /dev/null
+++ b/third_party/python/gyp/test/relative/foo/a/a.cc
@@ -0,0 +1,9 @@
+/*
+ * Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/relative/foo/a/a.gyp b/third_party/python/gyp/test/relative/foo/a/a.gyp
new file mode 100644
index 0000000000..66316ac681
--- /dev/null
+++ b/third_party/python/gyp/test/relative/foo/a/a.gyp
@@ -0,0 +1,13 @@
+{
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'executable',
+ 'sources': ['a.cc'],
+ 'dependencies': [
+ '../../foo/b/b.gyp:b',
+ 'c/c.gyp:c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/relative/foo/a/c/c.cc b/third_party/python/gyp/test/relative/foo/a/c/c.cc
new file mode 100644
index 0000000000..9d22471684
--- /dev/null
+++ b/third_party/python/gyp/test/relative/foo/a/c/c.cc
@@ -0,0 +1,9 @@
+/*
+ * Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+int func() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/relative/foo/a/c/c.gyp b/third_party/python/gyp/test/relative/foo/a/c/c.gyp
new file mode 100644
index 0000000000..c1f087db99
--- /dev/null
+++ b/third_party/python/gyp/test/relative/foo/a/c/c.gyp
@@ -0,0 +1,12 @@
+{
+ 'targets': [
+ {
+ 'target_name': 'c',
+ 'type': 'static_library',
+ 'sources': ['c.cc'],
+ 'dependencies': [
+ '../../b/b.gyp:b',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/relative/foo/b/b.cc b/third_party/python/gyp/test/relative/foo/b/b.cc
new file mode 100644
index 0000000000..011d59cebb
--- /dev/null
+++ b/third_party/python/gyp/test/relative/foo/b/b.cc
@@ -0,0 +1,9 @@
+/*
+ * Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+int func2() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/relative/foo/b/b.gyp b/third_party/python/gyp/test/relative/foo/b/b.gyp
new file mode 100644
index 0000000000..0ebe4533d3
--- /dev/null
+++ b/third_party/python/gyp/test/relative/foo/b/b.gyp
@@ -0,0 +1,9 @@
+{
+ 'targets': [
+ {
+ 'target_name': 'b',
+ 'type': 'static_library',
+ 'sources': ['b.cc'],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/relative/gyptest-default.py b/third_party/python/gyp/test/relative/gyptest-default.py
new file mode 100755
index 0000000000..685cdfd75e
--- /dev/null
+++ b/third_party/python/gyp/test/relative/gyptest-default.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simplest-possible build of a "Hello, world!" program
+using the default build target.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(workdir='workarea_default', formats=['msvs'])
+
+# Run from down in foo.
+test.run_gyp('a.gyp', chdir='foo/a')
+sln = test.workpath('foo/a/a.sln')
+sln_data = open(sln, 'rb').read().decode('utf-8', 'ignore')
+vcproj = sln_data.count('b.vcproj')
+vcxproj = sln_data.count('b.vcxproj')
+if (vcproj, vcxproj) not in [(1, 0), (0, 1)]:
+ test.fail_test()
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/rename/filecase/file.c b/third_party/python/gyp/test/rename/filecase/file.c
new file mode 100644
index 0000000000..76e8197013
--- /dev/null
+++ b/third_party/python/gyp/test/rename/filecase/file.c
@@ -0,0 +1 @@
+int main() { return 0; }
diff --git a/third_party/python/gyp/test/rename/filecase/test-casesensitive.gyp b/third_party/python/gyp/test/rename/filecase/test-casesensitive.gyp
new file mode 100644
index 0000000000..48eaa6eb67
--- /dev/null
+++ b/third_party/python/gyp/test/rename/filecase/test-casesensitive.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'filecaserename_sensitive',
+ 'type': 'executable',
+ 'sources': [
+ 'FiLe.c',
+ 'fIlE.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rename/filecase/test.gyp b/third_party/python/gyp/test/rename/filecase/test.gyp
new file mode 100644
index 0000000000..eaee9337b6
--- /dev/null
+++ b/third_party/python/gyp/test/rename/filecase/test.gyp
@@ -0,0 +1,14 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'filecaserename',
+ 'type': 'executable',
+ 'sources': [
+ 'file.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rename/gyptest-filecase.py b/third_party/python/gyp/test/rename/gyptest-filecase.py
new file mode 100644
index 0000000000..daed5180d3
--- /dev/null
+++ b/third_party/python/gyp/test/rename/gyptest-filecase.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Checks that files whose file case changes get rebuilt correctly.
+"""
+
+import os
+import TestGyp
+
+test = TestGyp.TestGyp()
+CHDIR = 'filecase'
+test.run_gyp('test.gyp', chdir=CHDIR)
+test.build('test.gyp', test.ALL, chdir=CHDIR)
+
+os.rename('filecase/file.c', 'filecase/fIlE.c')
+test.write('filecase/test.gyp',
+ test.read('filecase/test.gyp').replace('file.c', 'fIlE.c'))
+test.run_gyp('test.gyp', chdir=CHDIR)
+test.build('test.gyp', test.ALL, chdir=CHDIR)
+
+
+# Check that having files that differ just in their case still work on
+# case-sensitive file systems.
+test.write('filecase/FiLe.c', 'int f(); int main() { return f(); }')
+test.write('filecase/fIlE.c', 'int f() { return 42; }')
+is_case_sensitive = test.read('filecase/FiLe.c') != test.read('filecase/fIlE.c')
+if is_case_sensitive:
+ test.run_gyp('test-casesensitive.gyp', chdir=CHDIR)
+ test.build('test-casesensitive.gyp', test.ALL, chdir=CHDIR)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/restat/gyptest-restat.py b/third_party/python/gyp/test/restat/gyptest-restat.py
new file mode 100644
index 0000000000..87379044dd
--- /dev/null
+++ b/third_party/python/gyp/test/restat/gyptest-restat.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that dependent rules are executed iff a dependency action modifies its
+outputs.
+"""
+
+import TestGyp
+import os
+
+test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+test.run_gyp('restat.gyp', chdir='src')
+
+chdir = 'relocate/src'
+test.relocate('src', chdir)
+
+# Building 'dependent' the first time generates 'side_effect', but building it
+# the second time doesn't, because 'create_intermediate' doesn't update its
+# output.
+test.build('restat.gyp', 'dependent', chdir=chdir)
+test.built_file_must_exist('side_effect', chdir=chdir)
+os.remove(test.built_file_path('side_effect', chdir=chdir))
+test.build('restat.gyp', 'dependent', chdir=chdir)
+test.built_file_must_not_exist('side_effect', chdir=chdir)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/restat/src/create_intermediate.py b/third_party/python/gyp/test/restat/src/create_intermediate.py
new file mode 100644
index 0000000000..a4d7450371
--- /dev/null
+++ b/third_party/python/gyp/test/restat/src/create_intermediate.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+"""
+Create argv[1] iff it doesn't already exist.
+"""
+
+outfile = sys.argv[1]
+if os.path.exists(outfile):
+ sys.exit()
+open(outfile, "wb").close()
diff --git a/third_party/python/gyp/test/restat/src/restat.gyp b/third_party/python/gyp/test/restat/src/restat.gyp
new file mode 100644
index 0000000000..ff020e0ce6
--- /dev/null
+++ b/third_party/python/gyp/test/restat/src/restat.gyp
@@ -0,0 +1,50 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'create_intermediate',
+ 'type': 'none',
+ 'msvs_cygwin_shell': '0',
+ 'actions': [
+ {
+ 'action_name': 'create_intermediate',
+ 'inputs': [
+ 'create_intermediate.py',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/intermediate',
+ 'ALWAYS.run.ALWAYS',
+ ],
+ 'action': [
+ 'python', 'create_intermediate.py', '<(PRODUCT_DIR)/intermediate',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'dependent',
+ 'type': 'none',
+ 'msvs_cygwin_shell': '0',
+ 'dependencies': [
+ 'create_intermediate',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'dependent',
+ 'inputs': [
+ '<(PRODUCT_DIR)/intermediate',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/dependent'
+ ],
+ 'action': [
+ 'python', 'touch.py', '<(PRODUCT_DIR)/dependent', '<(PRODUCT_DIR)/side_effect',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/restat/src/touch.py b/third_party/python/gyp/test/restat/src/touch.py
new file mode 100644
index 0000000000..7cd781a90c
--- /dev/null
+++ b/third_party/python/gyp/test/restat/src/touch.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+"""Cross-platform touch."""
+
+for fname in sys.argv[1:]:
+ if os.path.exists(fname):
+ os.utime(fname, None)
+ else:
+ open(fname, 'w').close()
diff --git a/third_party/python/gyp/test/rules-dirname/gyptest-dirname.py b/third_party/python/gyp/test/rules-dirname/gyptest-dirname.py
new file mode 100755
index 0000000000..da5429cbad
--- /dev/null
+++ b/third_party/python/gyp/test/rules-dirname/gyptest-dirname.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simple rules when using an explicit build target of 'all'.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+import os
+import sys
+
+if sys.platform == 'win32':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+
+test = TestGyp.TestGyp(formats=['make', 'ninja', 'xcode', 'msvs'])
+
+test.run_gyp('actions.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('actions.gyp', chdir='relocate/src')
+
+expect = """\
+no dir here
+hi c
+hello baz
+"""
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('gencc_int_output', chdir=chdir, stdout=expect)
+if test.format == 'msvs':
+ test.run_built_executable('gencc_int_output_external', chdir=chdir,
+ stdout=expect)
+
+test.must_match('relocate/src/subdir/foo/bar/baz.dirname',
+ os.path.join('foo', 'bar'))
+test.must_match('relocate/src/subdir/a/b/c.dirname',
+ os.path.join('a', 'b'))
+
+# FIXME the xcode and make generators incorrectly convert RULE_INPUT_PATH
+# to an absolute path, making the tests below fail!
+if test.format != 'xcode' and test.format != 'make':
+ test.must_match('relocate/src/subdir/foo/bar/baz.path',
+ os.path.join('foo', 'bar', 'baz.printvars'))
+ test.must_match('relocate/src/subdir/a/b/c.path',
+ os.path.join('a', 'b', 'c.printvars'))
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/rules-dirname/src/actions.gyp b/third_party/python/gyp/test/rules-dirname/src/actions.gyp
new file mode 100644
index 0000000000..c5693c6c9e
--- /dev/null
+++ b/third_party/python/gyp/test/rules-dirname/src/actions.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'pull_in_all_actions',
+ 'type': 'none',
+ 'dependencies': [
+ 'subdir/input-rule-dirname.gyp:*',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rules-dirname/src/copy-file.py b/third_party/python/gyp/test/rules-dirname/src/copy-file.py
new file mode 100755
index 0000000000..271a72b6b1
--- /dev/null
+++ b/third_party/python/gyp/test/rules-dirname/src/copy-file.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+import sys
+
+contents = open(sys.argv[1], 'r').read()
+open(sys.argv[2], 'w').write(contents)
+
+sys.exit(0)
diff --git a/third_party/python/gyp/test/rules-dirname/src/subdir/a/b/c.gencc b/third_party/python/gyp/test/rules-dirname/src/subdir/a/b/c.gencc
new file mode 100644
index 0000000000..29cb5f79ad
--- /dev/null
+++ b/third_party/python/gyp/test/rules-dirname/src/subdir/a/b/c.gencc
@@ -0,0 +1,8 @@
+// -*- mode: c++ -*-
+#include <stdio.h>
+
+namespace gen {
+ void c() {
+ printf("hi c\n");
+ }
+}
diff --git a/third_party/python/gyp/test/rules-dirname/src/subdir/a/b/c.printvars b/third_party/python/gyp/test/rules-dirname/src/subdir/a/b/c.printvars
new file mode 100644
index 0000000000..cc4561dc41
--- /dev/null
+++ b/third_party/python/gyp/test/rules-dirname/src/subdir/a/b/c.printvars
@@ -0,0 +1 @@
+# Empty file for testing build rules
diff --git a/third_party/python/gyp/test/rules-dirname/src/subdir/foo/bar/baz.gencc b/third_party/python/gyp/test/rules-dirname/src/subdir/foo/bar/baz.gencc
new file mode 100644
index 0000000000..90b4ce9243
--- /dev/null
+++ b/third_party/python/gyp/test/rules-dirname/src/subdir/foo/bar/baz.gencc
@@ -0,0 +1,8 @@
+// -*- mode: c++ -*-
+#include <stdio.h>
+
+namespace gen {
+ void baz() {
+ printf("hello baz\n");
+ }
+}
diff --git a/third_party/python/gyp/test/rules-dirname/src/subdir/foo/bar/baz.printvars b/third_party/python/gyp/test/rules-dirname/src/subdir/foo/bar/baz.printvars
new file mode 100644
index 0000000000..cc4561dc41
--- /dev/null
+++ b/third_party/python/gyp/test/rules-dirname/src/subdir/foo/bar/baz.printvars
@@ -0,0 +1 @@
+# Empty file for testing build rules
diff --git a/third_party/python/gyp/test/rules-dirname/src/subdir/input-rule-dirname.gyp b/third_party/python/gyp/test/rules-dirname/src/subdir/input-rule-dirname.gyp
new file mode 100644
index 0000000000..da749a2231
--- /dev/null
+++ b/third_party/python/gyp/test/rules-dirname/src/subdir/input-rule-dirname.gyp
@@ -0,0 +1,140 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'print_rule_input_dirname',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'foo/bar/baz.printvars',
+ 'a/b/c.printvars',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'printvars',
+ 'extension': 'printvars',
+ 'inputs': [
+ 'printvars.py',
+ ],
+ 'outputs': [
+ '<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).dirname',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<(RULE_INPUT_DIRNAME)', '<@(_outputs)',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'print_rule_input_path',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'foo/bar/baz.printvars',
+ 'a/b/c.printvars',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'printvars',
+ 'extension': 'printvars',
+ 'inputs': [
+ 'printvars.py',
+ ],
+ 'outputs': [
+ '<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).path',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'gencc_int_output',
+ 'type': 'executable',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'nodir.gencc',
+ 'foo/bar/baz.gencc',
+ 'a/b/c.gencc',
+ 'main.cc',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'gencc',
+ 'extension': 'gencc',
+ 'inputs': [
+ '<(DEPTH)/copy-file.py',
+ ],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).cc',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+ 'conditions': [
+ ['OS=="win"', {
+ 'targets': [
+ {
+ 'target_name': 'gencc_int_output_external',
+ 'type': 'executable',
+ 'msvs_cygwin_shell': 0,
+ 'msvs_cygwin_dirs': ['../../../../../../<(DEPTH)/third_party/cygwin'],
+ 'sources': [
+ 'nodir.gencc',
+ 'foo/bar/baz.gencc',
+ 'a/b/c.gencc',
+ 'main.cc',
+ ],
+ 'dependencies': [
+ 'cygwin',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'gencc',
+ 'extension': 'gencc',
+ 'msvs_external_rule': 1,
+ 'inputs': [
+ '<(DEPTH)/copy-file.py',
+ ],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).cc',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ {
+ 'target_name': 'cygwin',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'setup_mount',
+ 'msvs_cygwin_shell': 0,
+ 'inputs': [
+ '../../../../../../<(DEPTH)/third_party/cygwin/setup_mount.bat',
+ ],
+ # Visual Studio requires an output file, or else the
+ # custom build step won't run.
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/_always_run_setup_mount.marker',
+ ],
+ 'action': ['<@(_inputs)'],
+ },
+ ],
+ },
+ ],
+ }],
+ ],
+}
diff --git a/third_party/python/gyp/test/rules-dirname/src/subdir/main.cc b/third_party/python/gyp/test/rules-dirname/src/subdir/main.cc
new file mode 100644
index 0000000000..3bb8e01395
--- /dev/null
+++ b/third_party/python/gyp/test/rules-dirname/src/subdir/main.cc
@@ -0,0 +1,14 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+namespace gen {
+ extern void nodir();
+ extern void c();
+ extern void baz();
+}
+
+int main() {
+ gen::nodir();
+ gen::c();
+ gen::baz();
+}
diff --git a/third_party/python/gyp/test/rules-dirname/src/subdir/nodir.gencc b/third_party/python/gyp/test/rules-dirname/src/subdir/nodir.gencc
new file mode 100644
index 0000000000..720f589bc2
--- /dev/null
+++ b/third_party/python/gyp/test/rules-dirname/src/subdir/nodir.gencc
@@ -0,0 +1,8 @@
+// -*- mode: c++ -*-
+#include <stdio.h>
+
+namespace gen {
+ void nodir() {
+ printf("no dir here\n");
+ }
+}
diff --git a/third_party/python/gyp/test/rules-dirname/src/subdir/printvars.py b/third_party/python/gyp/test/rules-dirname/src/subdir/printvars.py
new file mode 100755
index 0000000000..ef3d92e8cf
--- /dev/null
+++ b/third_party/python/gyp/test/rules-dirname/src/subdir/printvars.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Prints interesting vars
+"""
+
+import sys;
+
+out = open(sys.argv[2], 'w')
+out.write(sys.argv[1]);
diff --git a/third_party/python/gyp/test/rules-rebuild/gyptest-all.py b/third_party/python/gyp/test/rules-rebuild/gyptest-all.py
new file mode 100755
index 0000000000..aaaa2a6e6f
--- /dev/null
+++ b/third_party/python/gyp/test/rules-rebuild/gyptest-all.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a rule that generates multiple outputs rebuilds
+correctly when the inputs change.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(workdir='workarea_all')
+
+test.run_gyp('same_target.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+
+test.build('same_target.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from main.c
+Hello from prog1.in!
+Hello from prog2.in!
+"""
+
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
+
+
+test.sleep()
+contents = test.read(['relocate', 'src', 'prog1.in'])
+contents = contents.replace('!', ' AGAIN!')
+test.write(['relocate', 'src', 'prog1.in'], contents)
+
+test.build('same_target.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from main.c
+Hello from prog1.in AGAIN!
+Hello from prog2.in!
+"""
+
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
+
+
+test.sleep()
+contents = test.read(['relocate', 'src', 'prog2.in'])
+contents = contents.replace('!', ' AGAIN!')
+test.write(['relocate', 'src', 'prog2.in'], contents)
+
+test.build('same_target.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from main.c
+Hello from prog1.in AGAIN!
+Hello from prog2.in AGAIN!
+"""
+
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/rules-rebuild/gyptest-default.py b/third_party/python/gyp/test/rules-rebuild/gyptest-default.py
new file mode 100755
index 0000000000..ac3f0209aa
--- /dev/null
+++ b/third_party/python/gyp/test/rules-rebuild/gyptest-default.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a rule that generates multiple outputs rebuilds
+correctly when the inputs change.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(workdir='workarea_default')
+
+test.run_gyp('same_target.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+
+test.build('same_target.gyp', chdir='relocate/src')
+
+expect = """\
+Hello from main.c
+Hello from prog1.in!
+Hello from prog2.in!
+"""
+
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
+
+
+test.sleep()
+contents = test.read(['relocate', 'src', 'prog1.in'])
+contents = contents.replace('!', ' AGAIN!')
+test.write(['relocate', 'src', 'prog1.in'], contents)
+
+test.build('same_target.gyp', chdir='relocate/src')
+
+expect = """\
+Hello from main.c
+Hello from prog1.in AGAIN!
+Hello from prog2.in!
+"""
+
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
+
+
+test.sleep()
+contents = test.read(['relocate', 'src', 'prog2.in'])
+contents = contents.replace('!', ' AGAIN!')
+test.write(['relocate', 'src', 'prog2.in'], contents)
+
+test.build('same_target.gyp', chdir='relocate/src')
+
+expect = """\
+Hello from main.c
+Hello from prog1.in AGAIN!
+Hello from prog2.in AGAIN!
+"""
+
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
+
+
+# Test that modifying a rule's inputs (specifically, make-sources.py) causes
+# the targets to be built.
+
+test.sleep()
+contents = test.read(['relocate', 'src', 'make-sources.py'])
+contents = contents.replace('%s', 'the amazing %s')
+test.write(['relocate', 'src', 'make-sources.py'], contents)
+
+test.build('same_target.gyp', chdir='relocate/src')
+
+expect = """\
+Hello from main.c
+Hello from the amazing prog1.in AGAIN!
+Hello from the amazing prog2.in AGAIN!
+"""
+
+test.run_built_executable('program', chdir='relocate/src', stdout=expect)
+
+test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/rules-rebuild/src/main.c b/third_party/python/gyp/test/rules-rebuild/src/main.c
new file mode 100644
index 0000000000..bd8fbb20ea
--- /dev/null
+++ b/third_party/python/gyp/test/rules-rebuild/src/main.c
@@ -0,0 +1,12 @@
+#include <stdio.h>
+
+extern void prog1(void);
+extern void prog2(void);
+
+int main(void)
+{
+ printf("Hello from main.c\n");
+ prog1();
+ prog2();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/rules-rebuild/src/make-sources.py b/third_party/python/gyp/test/rules-rebuild/src/make-sources.py
new file mode 100755
index 0000000000..dd9e52856e
--- /dev/null
+++ b/third_party/python/gyp/test/rules-rebuild/src/make-sources.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+assert len(sys.argv) == 4, sys.argv
+
+(in_file, c_file, h_file) = sys.argv[1:]
+
+def write_file(filename, contents):
+ open(filename, 'w').write(contents)
+
+write_file(c_file, open(in_file, 'r').read())
+
+write_file(h_file, '#define NAME "%s"\n' % in_file)
+
+sys.exit(0)
diff --git a/third_party/python/gyp/test/rules-rebuild/src/prog1.in b/third_party/python/gyp/test/rules-rebuild/src/prog1.in
new file mode 100644
index 0000000000..191b00ef1e
--- /dev/null
+++ b/third_party/python/gyp/test/rules-rebuild/src/prog1.in
@@ -0,0 +1,7 @@
+#include <stdio.h>
+#include "prog1.h"
+
+void prog1(void)
+{
+ printf("Hello from %s!\n", NAME);
+}
diff --git a/third_party/python/gyp/test/rules-rebuild/src/prog2.in b/third_party/python/gyp/test/rules-rebuild/src/prog2.in
new file mode 100644
index 0000000000..7bfac5104c
--- /dev/null
+++ b/third_party/python/gyp/test/rules-rebuild/src/prog2.in
@@ -0,0 +1,7 @@
+#include <stdio.h>
+#include "prog2.h"
+
+void prog2(void)
+{
+ printf("Hello from %s!\n", NAME);
+}
diff --git a/third_party/python/gyp/test/rules-rebuild/src/same_target.gyp b/third_party/python/gyp/test/rules-rebuild/src/same_target.gyp
new file mode 100644
index 0000000000..22ba56056d
--- /dev/null
+++ b/third_party/python/gyp/test/rules-rebuild/src/same_target.gyp
@@ -0,0 +1,31 @@
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'main.c',
+ 'prog1.in',
+ 'prog2.in',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'make_sources',
+ 'extension': 'in',
+ 'inputs': [
+ 'make-sources.py',
+ ],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c',
+ '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).h',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<(RULE_INPUT_NAME)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rules-use-built-dependencies/gyptest-use-built-dependencies.py b/third_party/python/gyp/test/rules-use-built-dependencies/gyptest-use-built-dependencies.py
new file mode 100755
index 0000000000..a57c36d5b0
--- /dev/null
+++ b/third_party/python/gyp/test/rules-use-built-dependencies/gyptest-use-built-dependencies.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that rules which use built dependencies work correctly.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('use-built-dependencies-rule.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+test.build('use-built-dependencies-rule.gyp', chdir='relocate/src')
+
+test.built_file_must_exist('main_output', chdir='relocate/src')
+test.built_file_must_match('main_output', 'output', chdir='relocate/src')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/rules-use-built-dependencies/src/main.cc b/third_party/python/gyp/test/rules-use-built-dependencies/src/main.cc
new file mode 100644
index 0000000000..937d284599
--- /dev/null
+++ b/third_party/python/gyp/test/rules-use-built-dependencies/src/main.cc
@@ -0,0 +1,17 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+#include <stdio.h>
+
+int main(int argc, char *argv[]) {
+ if (argc < 2) {
+ return 2;
+ }
+ FILE* file;
+ file = fopen(argv[1], "wb");
+ const char output[] = "output";
+ fwrite(output, 1, sizeof(output) - 1, file);
+ fclose(file);
+ return 0;
+}
+
diff --git a/third_party/python/gyp/test/rules-use-built-dependencies/src/use-built-dependencies-rule.gyp b/third_party/python/gyp/test/rules-use-built-dependencies/src/use-built-dependencies-rule.gyp
new file mode 100644
index 0000000000..92bfeda392
--- /dev/null
+++ b/third_party/python/gyp/test/rules-use-built-dependencies/src/use-built-dependencies-rule.gyp
@@ -0,0 +1,42 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'main',
+ 'toolsets': ['host'],
+ 'type': 'executable',
+ 'sources': [
+ 'main.cc',
+ ],
+ },
+ {
+ 'target_name': 'post',
+ 'toolsets': ['host'],
+ 'type': 'none',
+ 'dependencies': [
+ 'main',
+ ],
+ 'sources': [
+ # As this test is written it could easily be made into an action.
+ # An acutal use case would have a number of these 'sources'.
+ '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)main<(EXECUTABLE_SUFFIX)',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'generate_output',
+ 'extension': '<(EXECUTABLE_SUFFIX)',
+ 'outputs': [ '<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT)_output', ],
+ 'msvs_cygwin_shell': 0,
+ 'action': [
+ '<(RULE_INPUT_PATH)',
+ '<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT)_output',
+ ],
+ 'message': 'Generating output for <(RULE_INPUT_ROOT)'
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rules-variables/gyptest-rules-variables.py b/third_party/python/gyp/test/rules-variables/gyptest-rules-variables.py
new file mode 100755
index 0000000000..16afc22ef9
--- /dev/null
+++ b/third_party/python/gyp/test/rules-variables/gyptest-rules-variables.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies rules related variables are expanded.
+"""
+
+from __future__ import print_function
+
+import sys
+
+if sys.platform == 'win32':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['ninja'])
+
+test.relocate('src', 'relocate/src')
+
+test.run_gyp('variables.gyp', chdir='relocate/src')
+
+test.build('variables.gyp', chdir='relocate/src')
+
+test.run_built_executable('all_rule_variables',
+ chdir='relocate/src',
+ stdout="input_root\ninput_dirname\ninput_path\n" +
+ "input_ext\ninput_name\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/rules-variables/src/input_ext.c b/third_party/python/gyp/test/rules-variables/src/input_ext.c
new file mode 100644
index 0000000000..f41e73ef8a
--- /dev/null
+++ b/third_party/python/gyp/test/rules-variables/src/input_ext.c
@@ -0,0 +1,9 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+void input_ext() {
+ printf("input_ext\n");
+}
diff --git a/third_party/python/gyp/test/rules-variables/src/input_name/test.c b/third_party/python/gyp/test/rules-variables/src/input_name/test.c
new file mode 100644
index 0000000000..e28b74d115
--- /dev/null
+++ b/third_party/python/gyp/test/rules-variables/src/input_name/test.c
@@ -0,0 +1,9 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+void input_name() {
+ printf("input_name\n");
+}
diff --git a/third_party/python/gyp/test/rules-variables/src/input_path/subdir/test.c b/third_party/python/gyp/test/rules-variables/src/input_path/subdir/test.c
new file mode 100644
index 0000000000..403dbbda4c
--- /dev/null
+++ b/third_party/python/gyp/test/rules-variables/src/input_path/subdir/test.c
@@ -0,0 +1,9 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+void input_path() {
+ printf("input_path\n");
+}
diff --git a/third_party/python/gyp/test/rules-variables/src/subdir/input_dirname.c b/third_party/python/gyp/test/rules-variables/src/subdir/input_dirname.c
new file mode 100644
index 0000000000..40cecd87d9
--- /dev/null
+++ b/third_party/python/gyp/test/rules-variables/src/subdir/input_dirname.c
@@ -0,0 +1,9 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+void input_dirname() {
+ printf("input_dirname\n");
+}
diff --git a/third_party/python/gyp/test/rules-variables/src/subdir/test.c b/third_party/python/gyp/test/rules-variables/src/subdir/test.c
new file mode 100644
index 0000000000..6c0280b8ad
--- /dev/null
+++ b/third_party/python/gyp/test/rules-variables/src/subdir/test.c
@@ -0,0 +1,18 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+extern void input_root();
+extern void input_dirname();
+extern void input_path();
+extern void input_ext();
+extern void input_name();
+
+int main() {
+ input_root();
+ input_dirname();
+ input_path();
+ input_ext();
+ input_name();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/rules-variables/src/test.input_root.c b/third_party/python/gyp/test/rules-variables/src/test.input_root.c
new file mode 100644
index 0000000000..33a7740a5c
--- /dev/null
+++ b/third_party/python/gyp/test/rules-variables/src/test.input_root.c
@@ -0,0 +1,9 @@
+// Copyright (c) 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+void input_root() {
+ printf("input_root\n");
+}
diff --git a/third_party/python/gyp/test/rules-variables/src/variables.gyp b/third_party/python/gyp/test/rules-variables/src/variables.gyp
new file mode 100644
index 0000000000..6debba12e3
--- /dev/null
+++ b/third_party/python/gyp/test/rules-variables/src/variables.gyp
@@ -0,0 +1,40 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ # This test shouldn't ever actually need to execute its rules: there's no
+ # command line that generates any output anyway. However, there's something
+ # slightly broken in either ninja or (maybe more likely?) on the win32 VM
+ # gypbots that breaks dependency checking and causes this rule to want to
+ # run. When it does run, the cygwin path is wrong, so the do-nothing step
+ # fails.
+ # TODO: Investigate and fix whatever's actually failing and remove this.
+ 'msvs_cygwin_dirs': ['../../../../../../<(DEPTH)/third_party/cygwin'],
+ },
+ 'targets': [
+ {
+ 'target_name': 'all_rule_variables',
+ 'type': 'executable',
+ 'sources': [
+ 'subdir/test.c',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'rule_variable',
+ 'extension': 'c',
+ 'outputs': [
+ '<(RULE_INPUT_ROOT).input_root.c',
+ '<(RULE_INPUT_DIRNAME)/input_dirname.c',
+ 'input_path/<(RULE_INPUT_PATH)',
+ 'input_ext<(RULE_INPUT_EXT)',
+ 'input_name/<(RULE_INPUT_NAME)',
+ ],
+ 'action': [],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rules/gyptest-all.py b/third_party/python/gyp/test/rules/gyptest-all.py
new file mode 100755
index 0000000000..0520c2f6a0
--- /dev/null
+++ b/third_party/python/gyp/test/rules/gyptest-all.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simple rules when using an explicit build target of 'all'.
+"""
+
+from __future__ import print_function
+
+import sys
+
+if sys.platform == 'win32':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('no_action_with_rules_fails.gyp', chdir='src/noaction', status=1,
+ stderr=None)
+
+test.run_gyp('actions.gyp',
+ '-G', 'xcode_ninja_target_pattern=^pull_in_all_actions$',
+ chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('actions.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from program.c
+Hello from function1.in
+Hello from function2.in
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir1'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('program', chdir=chdir, stdout=expect)
+
+expect = """\
+Hello from program.c
+Hello from function3.in
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir3'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('program2', chdir=chdir, stdout=expect)
+
+test.must_match('relocate/src/subdir2/file1.out', 'Hello from file1.in\n')
+test.must_match('relocate/src/subdir2/file2.out', 'Hello from file2.in\n')
+
+test.must_match('relocate/src/subdir2/file1.out2', 'Hello from file1.in\n')
+test.must_match('relocate/src/subdir2/file2.out2', 'Hello from file2.in\n')
+
+test.must_match('relocate/src/subdir2/file1.out4', 'Hello from file1.in\n')
+test.must_match('relocate/src/subdir2/file2.out4', 'Hello from file2.in\n')
+test.must_match('relocate/src/subdir2/file1.copy', 'Hello from file1.in\n')
+
+test.must_match('relocate/src/external/file1.external_rules.out',
+ 'Hello from file1.in\n')
+test.must_match('relocate/src/external/file2.external_rules.out',
+ 'Hello from file2.in\n')
+
+expect = """\
+Hello from program.c
+Got 41.
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir4'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('program4', chdir=chdir, stdout=expect)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/rules/gyptest-default.py b/third_party/python/gyp/test/rules/gyptest-default.py
new file mode 100755
index 0000000000..5d01094197
--- /dev/null
+++ b/third_party/python/gyp/test/rules/gyptest-default.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simple rules when using an explicit build target of 'all'.
+"""
+
+from __future__ import print_function
+
+import sys
+
+if sys.platform == 'win32':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('actions.gyp',
+ '-G', 'xcode_ninja_target_pattern=^pull_in_all_actions$',
+ chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('actions.gyp', chdir='relocate/src')
+
+expect = """\
+Hello from program.c
+Hello from function1.in
+Hello from function2.in
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir1'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('program', chdir=chdir, stdout=expect)
+
+expect = """\
+Hello from program.c
+Hello from function3.in
+"""
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir3'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('program2', chdir=chdir, stdout=expect)
+
+test.must_match('relocate/src/subdir2/file1.out', 'Hello from file1.in\n')
+test.must_match('relocate/src/subdir2/file2.out', 'Hello from file2.in\n')
+
+test.must_match('relocate/src/subdir2/file1.out2', 'Hello from file1.in\n')
+test.must_match('relocate/src/subdir2/file2.out2', 'Hello from file2.in\n')
+
+test.must_match('relocate/src/subdir2/file1.out4', 'Hello from file1.in\n')
+test.must_match('relocate/src/subdir2/file2.out4', 'Hello from file2.in\n')
+test.must_match('relocate/src/subdir2/file1.copy', 'Hello from file1.in\n')
+
+test.must_match('relocate/src/external/file1.external_rules.out',
+ 'Hello from file1.in\n')
+test.must_match('relocate/src/external/file2.external_rules.out',
+ 'Hello from file2.in\n')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/rules/gyptest-input-root.py b/third_party/python/gyp/test/rules/gyptest-input-root.py
new file mode 100755
index 0000000000..92bade6d48
--- /dev/null
+++ b/third_party/python/gyp/test/rules/gyptest-input-root.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that RULE_INPUT_ROOT isn't turned into a path in rule actions
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('input-root.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('input-root.gyp', target='test', chdir='relocate/src')
+
+expect = """\
+Hello somefile
+"""
+
+test.run_built_executable('test', chdir='relocate/src', stdout=expect)
+test.pass_test()
diff --git a/third_party/python/gyp/test/rules/gyptest-special-variables.py b/third_party/python/gyp/test/rules/gyptest-special-variables.py
new file mode 100644
index 0000000000..05ea7cee16
--- /dev/null
+++ b/third_party/python/gyp/test/rules/gyptest-special-variables.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Verifies that VS variables that require special variables are expanded
+correctly. """
+
+import sys
+import TestGyp
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp()
+
+ test.run_gyp('special-variables.gyp', chdir='src')
+ test.build('special-variables.gyp', test.ALL, chdir='src')
+ test.pass_test()
diff --git a/third_party/python/gyp/test/rules/src/actions.gyp b/third_party/python/gyp/test/rules/src/actions.gyp
new file mode 100644
index 0000000000..84376a7193
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/actions.gyp
@@ -0,0 +1,23 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'pull_in_all_actions',
+ 'type': 'none',
+ 'dependencies': [
+ 'subdir1/executable.gyp:*',
+ 'subdir2/both_rule_and_action_input.gyp:*',
+ 'subdir2/never_used.gyp:*',
+ 'subdir2/no_inputs.gyp:*',
+ 'subdir2/no_action.gyp:*',
+ 'subdir2/none.gyp:*',
+ 'subdir3/executable2.gyp:*',
+ 'subdir4/build-asm.gyp:*',
+ 'external/external.gyp:*',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rules/src/an_asm.S b/third_party/python/gyp/test/rules/src/an_asm.S
new file mode 100644
index 0000000000..eeb1345550
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/an_asm.S
@@ -0,0 +1,6 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Fake asm file.
+int main() {}
diff --git a/third_party/python/gyp/test/rules/src/as.bat b/third_party/python/gyp/test/rules/src/as.bat
new file mode 100644
index 0000000000..903c31a726
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/as.bat
@@ -0,0 +1,7 @@
+@echo off
+:: Copyright (c) 2011 Google Inc. All rights reserved.
+:: Use of this source code is governed by a BSD-style license that can be
+:: found in the LICENSE file.
+
+:: Fake assembler for Windows
+cl /TP /c %1 /Fo%2
diff --git a/third_party/python/gyp/test/rules/src/copy-file.py b/third_party/python/gyp/test/rules/src/copy-file.py
new file mode 100755
index 0000000000..7bdfbfd4bd
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/copy-file.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+import sys
+
+contents = open(sys.argv[1], 'r').read()
+open(sys.argv[2], 'w').write(contents)
+
+sys.exit(0)
diff --git a/third_party/python/gyp/test/rules/src/external/external.gyp b/third_party/python/gyp/test/rules/src/external/external.gyp
new file mode 100644
index 0000000000..b28174f57c
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/external/external.gyp
@@ -0,0 +1,66 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Test that the case where there are no inputs (other than the
+# file the rule applies to).
+{
+ 'target_defaults': {
+ 'msvs_cygwin_dirs': ['../../../../../../<(DEPTH)/third_party/cygwin'],
+ },
+ 'targets': [
+ {
+ 'target_name': 'external_rules',
+ 'type': 'none',
+ 'sources': [
+ 'file1.in',
+ 'file2.in',
+ ],
+ 'conditions': [
+ ['OS=="win"', {
+ 'dependencies': [
+ 'cygwin',
+ ],
+ }],
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'copy_file',
+ 'extension': 'in',
+ 'msvs_external_rule': 1,
+ 'outputs': [
+ '<(RULE_INPUT_ROOT).external_rules.out',
+ ],
+ 'action': [
+ 'python', '../copy-file.py', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ },
+ ],
+ },
+ ],
+ 'conditions': [
+ ['OS=="win"', {
+ 'targets': [
+ {
+ 'target_name': 'cygwin',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'setup_mount',
+ 'msvs_cygwin_shell': 0,
+ 'inputs': [
+ '../../../../../../<(DEPTH)/third_party/cygwin/setup_mount.bat',
+ ],
+ # Visual Studio requires an output file, or else the
+ # custom build step won't run.
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/_always_run_setup_mount.marker',
+ ],
+ 'action': ['<@(_inputs)'],
+ },
+ ],
+ },
+ ],
+ }],
+ ],
+}
diff --git a/third_party/python/gyp/test/rules/src/external/file1.in b/third_party/python/gyp/test/rules/src/external/file1.in
new file mode 100644
index 0000000000..86ac3ad389
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/external/file1.in
@@ -0,0 +1 @@
+Hello from file1.in
diff --git a/third_party/python/gyp/test/rules/src/external/file2.in b/third_party/python/gyp/test/rules/src/external/file2.in
new file mode 100644
index 0000000000..bf83d8ecec
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/external/file2.in
@@ -0,0 +1 @@
+Hello from file2.in
diff --git a/third_party/python/gyp/test/rules/src/input-root.gyp b/third_party/python/gyp/test/rules/src/input-root.gyp
new file mode 100644
index 0000000000..b6600e767c
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/input-root.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test',
+ 'type': 'executable',
+ 'sources': [ 'somefile.ext', ],
+ 'rules': [{
+ 'rule_name': 'rule',
+ 'extension': 'ext',
+ 'inputs': [ 'rule.py', ],
+ 'outputs': [ '<(RULE_INPUT_ROOT).cc', ],
+ 'action': [ 'python', 'rule.py', '<(RULE_INPUT_ROOT)', ],
+ 'message': 'Processing <(RULE_INPUT_PATH)',
+ 'process_outputs_as_sources': 1,
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ }],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rules/src/noaction/file1.in b/third_party/python/gyp/test/rules/src/noaction/file1.in
new file mode 100644
index 0000000000..86ac3ad389
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/noaction/file1.in
@@ -0,0 +1 @@
+Hello from file1.in
diff --git a/third_party/python/gyp/test/rules/src/noaction/no_action_with_rules_fails.gyp b/third_party/python/gyp/test/rules/src/noaction/no_action_with_rules_fails.gyp
new file mode 100644
index 0000000000..9b6a65629f
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/noaction/no_action_with_rules_fails.gyp
@@ -0,0 +1,37 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Test the case where there's no action but there are input rules that should
+# be processed results in a gyp failure.
+{
+ 'targets': [
+ {
+ 'target_name': 'extension_does_match_sources_but_no_action',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'file1.in',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'assembled',
+ 'extension': 'in',
+ 'outputs': [
+ '<(RULE_INPUT_ROOT).in',
+ ],
+ 'conditions': [
+ # Always fails.
+ [ '"true"=="false"', {
+ 'action': [
+ 'python', '../copy-file.py', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ 'message': 'test_rule',
+ }],
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rules/src/rule.py b/third_party/python/gyp/test/rules/src/rule.py
new file mode 100755
index 0000000000..8a1f36dedb
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/rule.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+f = open(sys.argv[1] + ".cc", "w")
+f.write("""\
+#include <stdio.h>
+
+int main() {
+ puts("Hello %s");
+ return 0;
+}
+""" % sys.argv[1])
+f.close()
diff --git a/third_party/python/gyp/test/rules/src/somefile.ext b/third_party/python/gyp/test/rules/src/somefile.ext
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/somefile.ext
diff --git a/third_party/python/gyp/test/rules/src/special-variables.gyp b/third_party/python/gyp/test/rules/src/special-variables.gyp
new file mode 100644
index 0000000000..d1443af5ba
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/special-variables.gyp
@@ -0,0 +1,34 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'rules': [
+ {
+ 'rule_name': 'assembler (gnu-compatible)',
+ 'msvs_cygwin_shell': 0,
+ 'msvs_quote_cmd': 0,
+ 'extension': 'S',
+ 'inputs': [
+ 'as.bat',
+ ],
+ 'outputs': [
+ '$(IntDir)/$(InputName).obj',
+ ],
+ 'action': [
+ 'as.bat',
+ '$(InputPath)',
+ '$(IntDir)/$(InputName).obj',
+ ],
+ 'message': 'Building assembly language file $(InputPath)',
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ 'target_name': 'test',
+ 'type': 'static_library',
+ 'sources': [ 'an_asm.S' ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rules/src/subdir1/executable.gyp b/third_party/python/gyp/test/rules/src/subdir1/executable.gyp
new file mode 100644
index 0000000000..c34cce5a92
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir1/executable.gyp
@@ -0,0 +1,37 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'program.c',
+ 'function1.in',
+ 'function2.in',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'copy_file',
+ 'extension': 'in',
+ 'inputs': [
+ '../copy-file.py',
+ ],
+ 'outputs': [
+ # TODO: fix Make to support generated files not
+ # in a variable-named path like <(INTERMEDIATE_DIR)
+ #'<(RULE_INPUT_ROOT).c',
+ '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rules/src/subdir1/function1.in b/third_party/python/gyp/test/rules/src/subdir1/function1.in
new file mode 100644
index 0000000000..60ff28949b
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir1/function1.in
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void function1(void)
+{
+ printf("Hello from function1.in\n");
+}
diff --git a/third_party/python/gyp/test/rules/src/subdir1/function2.in b/third_party/python/gyp/test/rules/src/subdir1/function2.in
new file mode 100644
index 0000000000..0fcfc03fdb
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir1/function2.in
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void function2(void)
+{
+ printf("Hello from function2.in\n");
+}
diff --git a/third_party/python/gyp/test/rules/src/subdir1/program.c b/third_party/python/gyp/test/rules/src/subdir1/program.c
new file mode 100644
index 0000000000..6b11ff9f67
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir1/program.c
@@ -0,0 +1,12 @@
+#include <stdio.h>
+
+extern void function1(void);
+extern void function2(void);
+
+int main(void)
+{
+ printf("Hello from program.c\n");
+ function1();
+ function2();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/rules/src/subdir2/both_rule_and_action_input.gyp b/third_party/python/gyp/test/rules/src/subdir2/both_rule_and_action_input.gyp
new file mode 100644
index 0000000000..e5e6f3ec2b
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir2/both_rule_and_action_input.gyp
@@ -0,0 +1,50 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Tests that if a rule input is also an action input, both the rule and action
+# are executed
+{
+ 'targets': [
+ {
+ 'target_name': 'files_both_rule_and_action_input',
+ 'type': 'executable',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'program.c',
+ 'file1.in',
+ 'file2.in',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'copy_file',
+ 'extension': 'in',
+ 'inputs': [
+ '../copy-file.py',
+ ],
+ 'outputs': [
+ '<(RULE_INPUT_ROOT).out4',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ },
+ ],
+ 'actions': [
+ {
+ 'action_name': 'copy_file1_in',
+ 'inputs': [
+ '../copy-file.py',
+ 'file1.in',
+ ],
+ 'outputs': [
+ 'file1.copy',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<(_outputs)'
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rules/src/subdir2/file1.in b/third_party/python/gyp/test/rules/src/subdir2/file1.in
new file mode 100644
index 0000000000..86ac3ad389
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir2/file1.in
@@ -0,0 +1 @@
+Hello from file1.in
diff --git a/third_party/python/gyp/test/rules/src/subdir2/file2.in b/third_party/python/gyp/test/rules/src/subdir2/file2.in
new file mode 100644
index 0000000000..bf83d8ecec
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir2/file2.in
@@ -0,0 +1 @@
+Hello from file2.in
diff --git a/third_party/python/gyp/test/rules/src/subdir2/never_used.gyp b/third_party/python/gyp/test/rules/src/subdir2/never_used.gyp
new file mode 100644
index 0000000000..17f6f55371
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir2/never_used.gyp
@@ -0,0 +1,31 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Test that the case where there is a rule that doesn't apply to anything.
+{
+ 'targets': [
+ {
+ 'target_name': 'files_no_input2',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'file1.in',
+ 'file2.in',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'copy_file3',
+ 'extension': 'in2',
+ 'outputs': [
+ '<(RULE_INPUT_ROOT).out3',
+ ],
+ 'action': [
+ 'python', '../copy-file.py', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rules/src/subdir2/no_action.gyp b/third_party/python/gyp/test/rules/src/subdir2/no_action.gyp
new file mode 100644
index 0000000000..ffa1cefe18
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir2/no_action.gyp
@@ -0,0 +1,38 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Test that the case where an action is only specified under a conditional is
+# evaluated appropriately.
+{
+ 'targets': [
+ {
+ 'target_name': 'extension_does_not_match_sources_and_no_action',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'file1.in',
+ 'file2.in',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'assemble',
+ 'extension': 'asm',
+ 'outputs': [
+ '<(RULE_INPUT_ROOT).fail',
+ ],
+ 'conditions': [
+ # Always fails.
+ [ '"true"=="false"', {
+ 'action': [
+ 'python', '../copy-file.py', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ 'message': 'test_rule',
+ }],
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rules/src/subdir2/no_inputs.gyp b/third_party/python/gyp/test/rules/src/subdir2/no_inputs.gyp
new file mode 100644
index 0000000000..e61a1a3ff6
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir2/no_inputs.gyp
@@ -0,0 +1,32 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Test that the case where there are no inputs (other than the
+# file the rule applies to).
+{
+ 'targets': [
+ {
+ 'target_name': 'files_no_input',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'file1.in',
+ 'file2.in',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'copy_file2',
+ 'extension': 'in',
+ 'outputs': [
+ '<(RULE_INPUT_ROOT).out2',
+ ],
+ 'action': [
+ 'python', '../copy-file.py', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rules/src/subdir2/none.gyp b/third_party/python/gyp/test/rules/src/subdir2/none.gyp
new file mode 100644
index 0000000000..38bcdabdf6
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir2/none.gyp
@@ -0,0 +1,33 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'files',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'file1.in',
+ 'file2.in',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'copy_file',
+ 'extension': 'in',
+ 'inputs': [
+ '../copy-file.py',
+ ],
+ 'outputs': [
+ '<(RULE_INPUT_ROOT).out',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rules/src/subdir2/program.c b/third_party/python/gyp/test/rules/src/subdir2/program.c
new file mode 100644
index 0000000000..e5db175148
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir2/program.c
@@ -0,0 +1,12 @@
+/* Copyright (c) 2014 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello from program.c\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/rules/src/subdir3/executable2.gyp b/third_party/python/gyp/test/rules/src/subdir3/executable2.gyp
new file mode 100644
index 0000000000..a2a528fc7b
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir3/executable2.gyp
@@ -0,0 +1,37 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This one tests that rules are properly written if extensions are different
+# between the target's sources (program.c) and the generated files
+# (function3.cc)
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program2',
+ 'type': 'executable',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'program.c',
+ 'function3.in',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'copy_file',
+ 'extension': 'in',
+ 'inputs': [
+ '../copy-file.py',
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).cc',
+ ],
+ 'action': [
+ 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rules/src/subdir3/function3.in b/third_party/python/gyp/test/rules/src/subdir3/function3.in
new file mode 100644
index 0000000000..99f46ab05e
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir3/function3.in
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+extern "C" void function3(void)
+{
+ printf("Hello from function3.in\n");
+}
diff --git a/third_party/python/gyp/test/rules/src/subdir3/program.c b/third_party/python/gyp/test/rules/src/subdir3/program.c
new file mode 100644
index 0000000000..c38eead50e
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir3/program.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+extern void function3(void);
+
+int main(void)
+{
+ printf("Hello from program.c\n");
+ function3();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/rules/src/subdir4/asm-function.assem b/third_party/python/gyp/test/rules/src/subdir4/asm-function.assem
new file mode 100644
index 0000000000..ed47cade95
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir4/asm-function.assem
@@ -0,0 +1,10 @@
+#if PLATFORM_WINDOWS || PLATFORM_MAC
+# define IDENTIFIER(n) _##n
+#else /* Linux */
+# define IDENTIFIER(n) n
+#endif
+
+.globl IDENTIFIER(asm_function)
+IDENTIFIER(asm_function):
+ movl $41, %eax
+ ret
diff --git a/third_party/python/gyp/test/rules/src/subdir4/build-asm.gyp b/third_party/python/gyp/test/rules/src/subdir4/build-asm.gyp
new file mode 100644
index 0000000000..fe0fe93787
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir4/build-asm.gyp
@@ -0,0 +1,49 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This one tests that assembly files ended as .s and .S are compiled.
+
+{
+ 'target_defaults': {
+ 'conditions': [
+ ['OS=="win"', {
+ 'defines': ['PLATFORM_WIN'],
+ }],
+ ['OS=="mac"', {
+ 'defines': ['PLATFORM_MAC'],
+ }],
+ ['OS=="linux"', {
+ 'defines': ['PLATFORM_LINUX'],
+ }],
+ ],
+ },
+ 'targets': [
+ {
+ 'target_name': 'program4',
+ 'type': 'executable',
+ 'sources': [
+ 'asm-function.assem',
+ 'program.c',
+ ],
+ 'conditions': [
+ ['OS=="linux" or OS=="mac"', {
+ 'rules': [
+ {
+ 'rule_name': 'convert_assem',
+ 'extension': 'assem',
+ 'inputs': [],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).S',
+ ],
+ 'action': [
+ 'bash', '-c', 'cp <(RULE_INPUT_PATH) <@(_outputs)',
+ ],
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ }],
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/rules/src/subdir4/program.c b/third_party/python/gyp/test/rules/src/subdir4/program.c
new file mode 100644
index 0000000000..ad647f4eb9
--- /dev/null
+++ b/third_party/python/gyp/test/rules/src/subdir4/program.c
@@ -0,0 +1,19 @@
+#include <stdio.h>
+
+// Use the assembly function in linux and mac where it is built.
+#if PLATFORM_LINUX || PLATFORM_MAC
+extern int asm_function(void);
+#else
+int asm_function() {
+ return 41;
+}
+#endif
+
+int main(void)
+{
+ fprintf(stdout, "Hello from program.c\n");
+ fflush(stdout);
+ fprintf(stdout, "Got %d.\n", asm_function());
+ fflush(stdout);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/same-gyp-name/gyptest-all.py b/third_party/python/gyp/test/same-gyp-name/gyptest-all.py
new file mode 100755
index 0000000000..cda1a72d4d
--- /dev/null
+++ b/third_party/python/gyp/test/same-gyp-name/gyptest-all.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Build a .gyp that depends on 2 gyp files with the same name.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('all.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('all.gyp', test.ALL, chdir='relocate/src')
+
+expect1 = """\
+Hello from main1.cc
+"""
+
+expect2 = """\
+Hello from main2.cc
+"""
+
+if test.format == 'xcode':
+ chdir1 = 'relocate/src/subdir1'
+ chdir2 = 'relocate/src/subdir2'
+else:
+ chdir1 = chdir2 = 'relocate/src'
+
+test.run_built_executable('program1', chdir=chdir1, stdout=expect1)
+test.run_built_executable('program2', chdir=chdir2, stdout=expect2)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/same-gyp-name/gyptest-default.py b/third_party/python/gyp/test/same-gyp-name/gyptest-default.py
new file mode 100755
index 0000000000..5e4bba0012
--- /dev/null
+++ b/third_party/python/gyp/test/same-gyp-name/gyptest-default.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Build a .gyp that depends on 2 gyp files with the same name.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('all.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('all.gyp', chdir='relocate/src')
+
+expect1 = """\
+Hello from main1.cc
+"""
+
+expect2 = """\
+Hello from main2.cc
+"""
+
+if test.format == 'xcode':
+ chdir1 = 'relocate/src/subdir1'
+ chdir2 = 'relocate/src/subdir2'
+else:
+ chdir1 = chdir2 = 'relocate/src'
+
+test.run_built_executable('program1', chdir=chdir1, stdout=expect1)
+test.run_built_executable('program2', chdir=chdir2, stdout=expect2)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/same-gyp-name/gyptest-library.py b/third_party/python/gyp/test/same-gyp-name/gyptest-library.py
new file mode 100644
index 0000000000..957a4a52d6
--- /dev/null
+++ b/third_party/python/gyp/test/same-gyp-name/gyptest-library.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a dependency on two gyp files with the same name do not create a
+uid collision in the resulting generated xcode file.
+"""
+
+import TestGyp
+
+import sys
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('test.gyp', chdir='library')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/same-gyp-name/library/one/sub.gyp b/third_party/python/gyp/test/same-gyp-name/library/one/sub.gyp
new file mode 100644
index 0000000000..1bed941e54
--- /dev/null
+++ b/third_party/python/gyp/test/same-gyp-name/library/one/sub.gyp
@@ -0,0 +1,11 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'one',
+ 'type': 'static_library',
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-gyp-name/library/test.gyp b/third_party/python/gyp/test/same-gyp-name/library/test.gyp
new file mode 100644
index 0000000000..552a77ed7e
--- /dev/null
+++ b/third_party/python/gyp/test/same-gyp-name/library/test.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'duplicate_names',
+ 'type': 'shared_library',
+ 'dependencies': [
+ 'one/sub.gyp:one',
+ 'two/sub.gyp:two',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-gyp-name/library/two/sub.gyp b/third_party/python/gyp/test/same-gyp-name/library/two/sub.gyp
new file mode 100644
index 0000000000..934c98a496
--- /dev/null
+++ b/third_party/python/gyp/test/same-gyp-name/library/two/sub.gyp
@@ -0,0 +1,11 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'two',
+ 'type': 'static_library',
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-gyp-name/src/all.gyp b/third_party/python/gyp/test/same-gyp-name/src/all.gyp
new file mode 100644
index 0000000000..229f02ea84
--- /dev/null
+++ b/third_party/python/gyp/test/same-gyp-name/src/all.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'all_exes',
+ 'type': 'none',
+ 'dependencies': [
+ 'subdir1/executable.gyp:*',
+ 'subdir2/executable.gyp:*',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-gyp-name/src/subdir1/executable.gyp b/third_party/python/gyp/test/same-gyp-name/src/subdir1/executable.gyp
new file mode 100644
index 0000000000..82483b4c69
--- /dev/null
+++ b/third_party/python/gyp/test/same-gyp-name/src/subdir1/executable.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program1',
+ 'type': 'executable',
+ 'sources': [
+ 'main1.cc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-gyp-name/src/subdir1/main1.cc b/third_party/python/gyp/test/same-gyp-name/src/subdir1/main1.cc
new file mode 100644
index 0000000000..3645558324
--- /dev/null
+++ b/third_party/python/gyp/test/same-gyp-name/src/subdir1/main1.cc
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+int main() {
+ printf("Hello from main1.cc\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/same-gyp-name/src/subdir2/executable.gyp b/third_party/python/gyp/test/same-gyp-name/src/subdir2/executable.gyp
new file mode 100644
index 0000000000..e3537013eb
--- /dev/null
+++ b/third_party/python/gyp/test/same-gyp-name/src/subdir2/executable.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program2',
+ 'type': 'executable',
+ 'sources': [
+ 'main2.cc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-gyp-name/src/subdir2/main2.cc b/third_party/python/gyp/test/same-gyp-name/src/subdir2/main2.cc
new file mode 100644
index 0000000000..0c724dee35
--- /dev/null
+++ b/third_party/python/gyp/test/same-gyp-name/src/subdir2/main2.cc
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+int main() {
+ printf("Hello from main2.cc\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/same-rule-output-file-name/gyptest-all.py b/third_party/python/gyp/test/same-rule-output-file-name/gyptest-all.py
new file mode 100644
index 0000000000..964e6b7721
--- /dev/null
+++ b/third_party/python/gyp/test/same-rule-output-file-name/gyptest-all.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Tests the use of rules with the same output file name.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('subdirs.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('subdirs.gyp', test.ALL, chdir='relocate/src')
+test.must_exist('relocate/src/subdir1/rule.txt')
+test.must_exist('relocate/src/subdir2/rule.txt')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/same-rule-output-file-name/src/subdir1/subdir1.gyp b/third_party/python/gyp/test/same-rule-output-file-name/src/subdir1/subdir1.gyp
new file mode 100644
index 0000000000..bff381a5a5
--- /dev/null
+++ b/third_party/python/gyp/test/same-rule-output-file-name/src/subdir1/subdir1.gyp
@@ -0,0 +1,30 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'target1',
+ 'type': 'none',
+ 'sources': [
+ '../touch.py'
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'rule1',
+ 'extension': 'py',
+ 'inputs': [],
+ 'outputs': [
+ 'rule.txt',
+ ],
+ 'action': [
+ 'python', '../touch.py', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-rule-output-file-name/src/subdir2/subdir2.gyp b/third_party/python/gyp/test/same-rule-output-file-name/src/subdir2/subdir2.gyp
new file mode 100644
index 0000000000..12a35600a3
--- /dev/null
+++ b/third_party/python/gyp/test/same-rule-output-file-name/src/subdir2/subdir2.gyp
@@ -0,0 +1,30 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'target2',
+ 'type': 'none',
+ 'sources': [
+ '../touch.py'
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'rule2',
+ 'extension': 'py',
+ 'inputs': [],
+ 'outputs': [
+ 'rule.txt',
+ ],
+ 'action': [
+ 'python', '../touch.py', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-rule-output-file-name/src/subdirs.gyp b/third_party/python/gyp/test/same-rule-output-file-name/src/subdirs.gyp
new file mode 100644
index 0000000000..25259a38f4
--- /dev/null
+++ b/third_party/python/gyp/test/same-rule-output-file-name/src/subdirs.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'subdirs',
+ 'type': 'none',
+ 'dependencies': [
+ 'subdir1/subdir1.gyp:*',
+ 'subdir2/subdir2.gyp:*',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-rule-output-file-name/src/touch.py b/third_party/python/gyp/test/same-rule-output-file-name/src/touch.py
new file mode 100644
index 0000000000..2291e9cc56
--- /dev/null
+++ b/third_party/python/gyp/test/same-rule-output-file-name/src/touch.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+f = open(sys.argv[1], 'w+')
+f.write('Hello from touch.py\n')
+f.close()
diff --git a/third_party/python/gyp/test/same-source-file-name/gyptest-all.py b/third_party/python/gyp/test/same-source-file-name/gyptest-all.py
new file mode 100755
index 0000000000..4c215027c2
--- /dev/null
+++ b/third_party/python/gyp/test/same-source-file-name/gyptest-all.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Build a .gyp with two targets that share a common .c source file.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('all.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('all.gyp', test.ALL, chdir='relocate/src')
+
+expect1 = """\
+Hello from prog1.c
+Hello prog1 from func.c
+"""
+
+expect2 = """\
+Hello from prog2.c
+Hello prog2 from func.c
+"""
+
+test.run_built_executable('prog1', chdir='relocate/src', stdout=expect1)
+test.run_built_executable('prog2', chdir='relocate/src', stdout=expect2)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/same-source-file-name/gyptest-default.py b/third_party/python/gyp/test/same-source-file-name/gyptest-default.py
new file mode 100755
index 0000000000..98757c2697
--- /dev/null
+++ b/third_party/python/gyp/test/same-source-file-name/gyptest-default.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Build a .gyp with two targets that share a common .c source file.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('all.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('all.gyp', chdir='relocate/src')
+
+expect1 = """\
+Hello from prog1.c
+Hello prog1 from func.c
+"""
+
+expect2 = """\
+Hello from prog2.c
+Hello prog2 from func.c
+"""
+
+test.run_built_executable('prog1', chdir='relocate/src', stdout=expect1)
+test.run_built_executable('prog2', chdir='relocate/src', stdout=expect2)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/same-source-file-name/gyptest-pass-executable.py b/third_party/python/gyp/test/same-source-file-name/gyptest-pass-executable.py
new file mode 100755
index 0000000000..1a3dcda23d
--- /dev/null
+++ b/third_party/python/gyp/test/same-source-file-name/gyptest-pass-executable.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Checks that gyp does not fail on executable targets which have several files
+with the same basename.
+"""
+
+import TestGyp
+
+# While MSVS supports building executables that contain several files with the
+# same name, the msvs gyp generator does not.
+test = TestGyp.TestGyp(formats=['!msvs'])
+
+test.run_gyp('double-executable.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('double-executable.gyp', test.ALL, chdir='relocate/src')
+
+expect = """\
+Hello from prog3.c
+Hello prog3 from func.c
+Hello prog3 from subdir1/func.c
+Hello prog3 from subdir2/func.c
+"""
+
+test.run_built_executable('prog3', chdir='relocate/src', stdout=expect)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/same-source-file-name/gyptest-pass-shared.py b/third_party/python/gyp/test/same-source-file-name/gyptest-pass-shared.py
new file mode 100755
index 0000000000..a498f1a846
--- /dev/null
+++ b/third_party/python/gyp/test/same-source-file-name/gyptest-pass-shared.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Checks that gyp does not fail on shared_library targets which have several files
+with the same basename.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('double-shared.gyp', chdir='src')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/same-source-file-name/gyptest-static.py b/third_party/python/gyp/test/same-source-file-name/gyptest-static.py
new file mode 100755
index 0000000000..7fa2772040
--- /dev/null
+++ b/third_party/python/gyp/test/same-source-file-name/gyptest-static.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Checks that gyp fails on static_library targets which have several files with
+the same basename.
+"""
+
+import os
+import sys
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+# Fails by default for the compatibility with legacy generators such as
+# VCProj generator for Visual C++ 2008 and Makefile generator on Mac.
+# TODO: Update expected behavior when these legacy generators are deprecated.
+test.run_gyp('double-static.gyp', chdir='src', status=1, stderr=None)
+
+if ((test.format == 'make' and sys.platform == 'darwin') or
+ (test.format == 'msvs' and
+ int(os.environ.get('GYP_MSVS_VERSION', 2010)) < 2010)):
+ test.run_gyp('double-static.gyp', '--no-duplicate-basename-check',
+ chdir='src', status=1, stderr=None)
+else:
+ test.run_gyp('double-static.gyp', '--no-duplicate-basename-check',
+ chdir='src')
+ test.build('double-static.gyp', test.ALL, chdir='src')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/same-source-file-name/src/all.gyp b/third_party/python/gyp/test/same-source-file-name/src/all.gyp
new file mode 100644
index 0000000000..4fe052c668
--- /dev/null
+++ b/third_party/python/gyp/test/same-source-file-name/src/all.gyp
@@ -0,0 +1,30 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'prog1',
+ 'type': 'executable',
+ 'defines': [
+ 'PROG="prog1"',
+ ],
+ 'sources': [
+ 'prog1.c',
+ 'func.c',
+ ],
+ },
+ {
+ 'target_name': 'prog2',
+ 'type': 'executable',
+ 'defines': [
+ 'PROG="prog2"',
+ ],
+ 'sources': [
+ 'prog2.c',
+ 'func.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-source-file-name/src/double-executable.gyp b/third_party/python/gyp/test/same-source-file-name/src/double-executable.gyp
new file mode 100644
index 0000000000..477bd87e0d
--- /dev/null
+++ b/third_party/python/gyp/test/same-source-file-name/src/double-executable.gyp
@@ -0,0 +1,21 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'prog3',
+ 'type': 'executable',
+ 'sources': [
+ 'prog3.c',
+ 'func.c',
+ 'subdir1/func.c',
+ 'subdir2/func.c',
+ ],
+ 'defines': [
+ 'PROG="prog3"',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-source-file-name/src/double-shared.gyp b/third_party/python/gyp/test/same-source-file-name/src/double-shared.gyp
new file mode 100644
index 0000000000..438b50f3f1
--- /dev/null
+++ b/third_party/python/gyp/test/same-source-file-name/src/double-shared.gyp
@@ -0,0 +1,27 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'lib',
+ 'product_name': 'test_shared_lib',
+ 'type': 'shared_library',
+ 'sources': [
+ 'prog2.c',
+ 'func.c',
+ 'subdir1/func.c',
+ 'subdir2/func.c',
+ ],
+ 'defines': [
+ 'PROG="prog2"',
+ ],
+ 'conditions': [
+ ['OS=="linux"', {
+ 'cflags': ['-fPIC'],
+ }],
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-source-file-name/src/double-static.gyp b/third_party/python/gyp/test/same-source-file-name/src/double-static.gyp
new file mode 100644
index 0000000000..e49c0e1251
--- /dev/null
+++ b/third_party/python/gyp/test/same-source-file-name/src/double-static.gyp
@@ -0,0 +1,22 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'lib',
+ 'product_name': 'test_static_lib',
+ 'type': 'static_library',
+ 'sources': [
+ 'prog1.c',
+ 'func.c',
+ 'subdir1/func.c',
+ 'subdir2/func.c',
+ ],
+ 'defines': [
+ 'PROG="prog1"',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-source-file-name/src/func.c b/third_party/python/gyp/test/same-source-file-name/src/func.c
new file mode 100644
index 0000000000..e069c692a6
--- /dev/null
+++ b/third_party/python/gyp/test/same-source-file-name/src/func.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void func(void)
+{
+ printf("Hello %s from func.c\n", PROG);
+}
diff --git a/third_party/python/gyp/test/same-source-file-name/src/prog1.c b/third_party/python/gyp/test/same-source-file-name/src/prog1.c
new file mode 100644
index 0000000000..604e2b9c98
--- /dev/null
+++ b/third_party/python/gyp/test/same-source-file-name/src/prog1.c
@@ -0,0 +1,16 @@
+#include <stdio.h>
+
+extern void func(void);
+
+int main(void)
+{
+ printf("Hello from prog1.c\n");
+ func();
+ /*
+ * Uncomment to test same-named files in different directories,
+ * which Visual Studio doesn't support.
+ subdir1_func();
+ subdir2_func();
+ */
+ return 0;
+}
diff --git a/third_party/python/gyp/test/same-source-file-name/src/prog2.c b/third_party/python/gyp/test/same-source-file-name/src/prog2.c
new file mode 100644
index 0000000000..466ee35003
--- /dev/null
+++ b/third_party/python/gyp/test/same-source-file-name/src/prog2.c
@@ -0,0 +1,16 @@
+#include <stdio.h>
+
+extern void func(void);
+
+int main(void)
+{
+ printf("Hello from prog2.c\n");
+ func();
+ /*
+ * Uncomment to test same-named files in different directories,
+ * which Visual Studio doesn't support.
+ subdir1_func();
+ subdir2_func();
+ */
+ return 0;
+}
diff --git a/third_party/python/gyp/test/same-source-file-name/src/prog3.c b/third_party/python/gyp/test/same-source-file-name/src/prog3.c
new file mode 100644
index 0000000000..34d495ce08
--- /dev/null
+++ b/third_party/python/gyp/test/same-source-file-name/src/prog3.c
@@ -0,0 +1,18 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+extern void func(void);
+extern void subdir1_func(void);
+extern void subdir2_func(void);
+
+int main(void)
+{
+ printf("Hello from prog3.c\n");
+ func();
+ subdir1_func();
+ subdir2_func();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/same-source-file-name/src/subdir1/func.c b/third_party/python/gyp/test/same-source-file-name/src/subdir1/func.c
new file mode 100644
index 0000000000..b73450d105
--- /dev/null
+++ b/third_party/python/gyp/test/same-source-file-name/src/subdir1/func.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void subdir1_func(void)
+{
+ printf("Hello %s from subdir1/func.c\n", PROG);
+}
diff --git a/third_party/python/gyp/test/same-source-file-name/src/subdir2/func.c b/third_party/python/gyp/test/same-source-file-name/src/subdir2/func.c
new file mode 100644
index 0000000000..0248b5720e
--- /dev/null
+++ b/third_party/python/gyp/test/same-source-file-name/src/subdir2/func.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+void subdir2_func(void)
+{
+ printf("Hello %s from subdir2/func.c\n", PROG);
+}
diff --git a/third_party/python/gyp/test/same-target-name-different-directory/gyptest-all.py b/third_party/python/gyp/test/same-target-name-different-directory/gyptest-all.py
new file mode 100644
index 0000000000..755691b576
--- /dev/null
+++ b/third_party/python/gyp/test/same-target-name-different-directory/gyptest-all.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test cases when multiple targets in different directories have the same name.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['ninja', 'make'])
+
+# xcode-ninja fails to generate a project due to id collisions
+# cf. https://code.google.com/p/gyp/issues/detail?id=461
+if test.format == 'xcode-ninja':
+ test.skip_test()
+
+test.run_gyp('subdirs.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+# Test that we build all targets.
+test.build('subdirs.gyp', 'target', chdir='relocate/src')
+test.must_exist('relocate/src/subdir1/action1.txt')
+test.must_exist('relocate/src/subdir2/action2.txt')
+
+# Test that we build all targets using the correct actions, even if they have
+# the same names.
+test.build('subdirs.gyp', 'target_same_action_name', chdir='relocate/src')
+test.must_exist('relocate/src/subdir1/action.txt')
+test.must_exist('relocate/src/subdir2/action.txt')
+
+# Test that we build all targets using the correct rules, even if they have
+# the same names.
+test.build('subdirs.gyp', 'target_same_rule_name', chdir='relocate/src')
+test.must_exist('relocate/src/subdir1/rule.txt')
+test.must_exist('relocate/src/subdir2/rule.txt')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/same-target-name-different-directory/src/subdir1/subdir1.gyp b/third_party/python/gyp/test/same-target-name-different-directory/src/subdir1/subdir1.gyp
new file mode 100644
index 0000000000..d4ec2e679a
--- /dev/null
+++ b/third_party/python/gyp/test/same-target-name-different-directory/src/subdir1/subdir1.gyp
@@ -0,0 +1,66 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'target',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'action1',
+ 'inputs': [],
+ 'outputs': [
+ 'action1.txt',
+ ],
+ 'action': [
+ 'python', '../touch.py', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ {
+ 'target_name': 'target_same_action_name',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'action',
+ 'inputs': [],
+ 'outputs': [
+ 'action.txt',
+ ],
+ 'action': [
+ 'python', '../touch.py', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ {
+ 'target_name': 'target_same_rule_name',
+ 'type': 'none',
+ 'sources': [
+ '../touch.py'
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'rule',
+ 'extension': 'py',
+ 'inputs': [],
+ 'outputs': [
+ 'rule.txt',
+ ],
+ 'action': [
+ 'python', '../touch.py', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-target-name-different-directory/src/subdir2/subdir2.gyp b/third_party/python/gyp/test/same-target-name-different-directory/src/subdir2/subdir2.gyp
new file mode 100644
index 0000000000..9006d450b2
--- /dev/null
+++ b/third_party/python/gyp/test/same-target-name-different-directory/src/subdir2/subdir2.gyp
@@ -0,0 +1,66 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'target',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'action2',
+ 'inputs': [],
+ 'outputs': [
+ 'action2.txt',
+ ],
+ 'action': [
+ 'python', '../touch.py', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ {
+ 'target_name': 'target_same_action_name',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'action',
+ 'inputs': [],
+ 'outputs': [
+ 'action.txt',
+ ],
+ 'action': [
+ 'python', '../touch.py', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ {
+ 'target_name': 'target_same_rule_name',
+ 'type': 'none',
+ 'sources': [
+ '../touch.py'
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'rule',
+ 'extension': 'py',
+ 'inputs': [],
+ 'outputs': [
+ 'rule.txt',
+ ],
+ 'action': [
+ 'python', '../touch.py', '<(_outputs)',
+ ],
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-target-name-different-directory/src/subdirs.gyp b/third_party/python/gyp/test/same-target-name-different-directory/src/subdirs.gyp
new file mode 100644
index 0000000000..65413e73b2
--- /dev/null
+++ b/third_party/python/gyp/test/same-target-name-different-directory/src/subdirs.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'subdirs',
+ 'type': 'none',
+ 'dependencies': [
+ 'subdir1/subdir1.gyp:*',
+ 'subdir2/subdir2.gyp:*',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-target-name-different-directory/src/touch.py b/third_party/python/gyp/test/same-target-name-different-directory/src/touch.py
new file mode 100644
index 0000000000..2291e9cc56
--- /dev/null
+++ b/third_party/python/gyp/test/same-target-name-different-directory/src/touch.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+f = open(sys.argv[1], 'w+')
+f.write('Hello from touch.py\n')
+f.close()
diff --git a/third_party/python/gyp/test/same-target-name/gyptest-same-target-name.py b/third_party/python/gyp/test/same-target-name/gyptest-same-target-name.py
new file mode 100755
index 0000000000..bfe5540f31
--- /dev/null
+++ b/third_party/python/gyp/test/same-target-name/gyptest-same-target-name.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Check that duplicate targets in a directory gives an error.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+# Require that gyp files with duplicate targets spit out an error.
+test.run_gyp('all.gyp', chdir='src', status=1, stderr=None)
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/same-target-name/src/all.gyp b/third_party/python/gyp/test/same-target-name/src/all.gyp
new file mode 100644
index 0000000000..ac16976da6
--- /dev/null
+++ b/third_party/python/gyp/test/same-target-name/src/all.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'all_exes',
+ 'type': 'none',
+ 'dependencies': [
+ 'executable1.gyp:*',
+ 'executable2.gyp:*',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-target-name/src/executable1.gyp b/third_party/python/gyp/test/same-target-name/src/executable1.gyp
new file mode 100644
index 0000000000..3c492c1b37
--- /dev/null
+++ b/third_party/python/gyp/test/same-target-name/src/executable1.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'sources': [
+ 'main1.cc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/same-target-name/src/executable2.gyp b/third_party/python/gyp/test/same-target-name/src/executable2.gyp
new file mode 100644
index 0000000000..41e84a61c6
--- /dev/null
+++ b/third_party/python/gyp/test/same-target-name/src/executable2.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'sources': [
+ 'main2.cc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/sanitize-rule-names/blah.S b/third_party/python/gyp/test/sanitize-rule-names/blah.S
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/sanitize-rule-names/blah.S
diff --git a/third_party/python/gyp/test/sanitize-rule-names/gyptest-sanitize-rule-names.py b/third_party/python/gyp/test/sanitize-rule-names/gyptest-sanitize-rule-names.py
new file mode 100644
index 0000000000..968a0ce5ce
--- /dev/null
+++ b/third_party/python/gyp/test/sanitize-rule-names/gyptest-sanitize-rule-names.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure rule names with non-"normal" characters in them don't cause
+broken build files. This test was originally causing broken .ninja files.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+test.run_gyp('sanitize-rule-names.gyp')
+test.build('sanitize-rule-names.gyp', test.ALL)
+test.pass_test()
diff --git a/third_party/python/gyp/test/sanitize-rule-names/hello.cc b/third_party/python/gyp/test/sanitize-rule-names/hello.cc
new file mode 100644
index 0000000000..1711567ef5
--- /dev/null
+++ b/third_party/python/gyp/test/sanitize-rule-names/hello.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/sanitize-rule-names/sanitize-rule-names.gyp b/third_party/python/gyp/test/sanitize-rule-names/sanitize-rule-names.gyp
new file mode 100644
index 0000000000..184253e966
--- /dev/null
+++ b/third_party/python/gyp/test/sanitize-rule-names/sanitize-rule-names.gyp
@@ -0,0 +1,27 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 's_test',
+ 'type': 'executable',
+ 'rules': [
+ {
+ # Make sure this rule name doesn't cause an invalid ninja file.
+ 'rule_name': 'rule name with odd characters ()/',
+ 'extension': 'S',
+ 'outputs': ['outfile'],
+ 'msvs_cygwin_shell': 0,
+ 'msvs_quote_cmd': 0,
+ 'action': ['python', 'script.py', '<(RULE_INPUT_PATH)', 'outfile'],
+ },
+ ],
+ 'sources': [
+ 'blah.S',
+ 'hello.cc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/sanitize-rule-names/script.py b/third_party/python/gyp/test/sanitize-rule-names/script.py
new file mode 100644
index 0000000000..ae2efa1df4
--- /dev/null
+++ b/third_party/python/gyp/test/sanitize-rule-names/script.py
@@ -0,0 +1,10 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import shutil
+import sys
+
+shutil.copyfile(*sys.argv[1:])
diff --git a/third_party/python/gyp/test/self-dependency/common.gypi b/third_party/python/gyp/test/self-dependency/common.gypi
new file mode 100644
index 0000000000..aae221a5dd
--- /dev/null
+++ b/third_party/python/gyp/test/self-dependency/common.gypi
@@ -0,0 +1,13 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# A common file that other .gyp files include.
+# Makes every target in the project depend on dep.gyp:dep.
+{
+ 'target_defaults': {
+ 'dependencies': [
+ 'dep.gyp:dep',
+ ],
+ },
+}
diff --git a/third_party/python/gyp/test/self-dependency/dep.gyp b/third_party/python/gyp/test/self-dependency/dep.gyp
new file mode 100644
index 0000000000..2b6c9dda85
--- /dev/null
+++ b/third_party/python/gyp/test/self-dependency/dep.gyp
@@ -0,0 +1,23 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# dep.gyp contains a target dep, on which all the targets in the project
+# depend. This means there's a self-dependency of dep on itself, which is
+# pruned by setting prune_self_dependency to 1.
+
+{
+ 'includes': [
+ 'common.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'dep',
+ 'type': 'none',
+ 'variables': {
+ # Without this GYP will report a cycle in dependency graph.
+ 'prune_self_dependency': 1,
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/self-dependency/gyptest-self-dependency.py b/third_party/python/gyp/test/self-dependency/gyptest-self-dependency.py
new file mode 100755
index 0000000000..82fab271c5
--- /dev/null
+++ b/third_party/python/gyp/test/self-dependency/gyptest-self-dependency.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that pulling in a dependency a second time in a conditional works for
+shared_library targets. Regression test for http://crbug.com/122588
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('self_dependency.gyp')
+
+# If running gyp worked, all is well.
+test.pass_test()
diff --git a/third_party/python/gyp/test/self-dependency/self_dependency.gyp b/third_party/python/gyp/test/self-dependency/self_dependency.gyp
new file mode 100644
index 0000000000..0ca76c669b
--- /dev/null
+++ b/third_party/python/gyp/test/self-dependency/self_dependency.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ 'common.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'none',
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/sibling/gyptest-all.py b/third_party/python/gyp/test/sibling/gyptest-all.py
new file mode 100755
index 0000000000..318e1a3d84
--- /dev/null
+++ b/third_party/python/gyp/test/sibling/gyptest-all.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+# The xcode-ninja generator handles gypfiles which are not at the
+# project root incorrectly.
+# cf. https://code.google.com/p/gyp/issues/detail?id=460
+if test.format == 'xcode-ninja':
+ test.skip_test()
+
+test.run_gyp('build/all.gyp', chdir='src')
+
+test.build('build/all.gyp', test.ALL, chdir='src')
+
+chdir = 'src/build'
+
+# The top-level Makefile is in the directory where gyp was run.
+# TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp
+# file? What about when passing in multiple .gyp files? Would sub-project
+# Makefiles (see http://codereview.chromium.org/340008 comments) solve this?
+if test.format in ('make', 'ninja', 'cmake'):
+ chdir = 'src'
+
+if test.format == 'xcode':
+ chdir = 'src/prog1'
+test.run_built_executable('program1',
+ chdir=chdir,
+ stdout="Hello from prog1.c\n")
+
+if test.format == 'xcode':
+ chdir = 'src/prog2'
+test.run_built_executable('program2',
+ chdir=chdir,
+ stdout="Hello from prog2.c\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/sibling/gyptest-relocate.py b/third_party/python/gyp/test/sibling/gyptest-relocate.py
new file mode 100755
index 0000000000..05fa9d96fe
--- /dev/null
+++ b/third_party/python/gyp/test/sibling/gyptest-relocate.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+# The xcode-ninja generator handles gypfiles which are not at the
+# project root incorrectly.
+# cf. https://code.google.com/p/gyp/issues/detail?id=460
+if test.format == 'xcode-ninja':
+ test.skip_test()
+
+test.run_gyp('build/all.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('build/all.gyp', test.ALL, chdir='relocate/src')
+
+chdir = 'relocate/src/build'
+
+# The top-level Makefile is in the directory where gyp was run.
+# TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp
+# file? What about when passing in multiple .gyp files? Would sub-project
+# Makefiles (see http://codereview.chromium.org/340008 comments) solve this?
+if test.format in ('make', 'ninja', 'cmake'):
+ chdir = 'relocate/src'
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/prog1'
+test.run_built_executable('program1',
+ chdir=chdir,
+ stdout="Hello from prog1.c\n")
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/prog2'
+test.run_built_executable('program2',
+ chdir=chdir,
+ stdout="Hello from prog2.c\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/sibling/src/build/all.gyp b/third_party/python/gyp/test/sibling/src/build/all.gyp
new file mode 100644
index 0000000000..79c80c9363
--- /dev/null
+++ b/third_party/python/gyp/test/sibling/src/build/all.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'All',
+ 'type': 'none',
+ 'dependencies': [
+ '../prog1/prog1.gyp:*',
+ '../prog2/prog2.gyp:*',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/sibling/src/prog1/prog1.c b/third_party/python/gyp/test/sibling/src/prog1/prog1.c
new file mode 100644
index 0000000000..218e99401c
--- /dev/null
+++ b/third_party/python/gyp/test/sibling/src/prog1/prog1.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello from prog1.c\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/sibling/src/prog1/prog1.gyp b/third_party/python/gyp/test/sibling/src/prog1/prog1.gyp
new file mode 100644
index 0000000000..4532e4be10
--- /dev/null
+++ b/third_party/python/gyp/test/sibling/src/prog1/prog1.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program1',
+ 'type': 'executable',
+ 'sources': [
+ 'prog1.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/sibling/src/prog2/prog2.c b/third_party/python/gyp/test/sibling/src/prog2/prog2.c
new file mode 100644
index 0000000000..12a31883b9
--- /dev/null
+++ b/third_party/python/gyp/test/sibling/src/prog2/prog2.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello from prog2.c\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/sibling/src/prog2/prog2.gyp b/third_party/python/gyp/test/sibling/src/prog2/prog2.gyp
new file mode 100644
index 0000000000..4cf7f6eb2f
--- /dev/null
+++ b/third_party/python/gyp/test/sibling/src/prog2/prog2.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program2',
+ 'type': 'executable',
+ 'sources': [
+ 'prog2.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/small/gyptest-small.py b/third_party/python/gyp/test/small/gyptest-small.py
new file mode 100755
index 0000000000..e6cb25f3a7
--- /dev/null
+++ b/third_party/python/gyp/test/small/gyptest-small.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Runs small tests.
+"""
+
+import imp
+import os
+import platform
+import sys
+import unittest
+
+import TestGyp
+
+
+test = TestGyp.TestGyp()
+
+# Add pylib to the import path (so tests can import their dependencies).
+# This is consistant with the path.append done in the top file "gyp".
+sys.path.insert(0, os.path.join(test._cwd, 'pylib'))
+
+# Add new test suites here.
+files_to_test = [
+ 'pylib/gyp/MSVSSettings_test.py',
+ 'pylib/gyp/easy_xml_test.py',
+ 'pylib/gyp/generator/msvs_test.py',
+ 'pylib/gyp/generator/ninja_test.py',
+ 'pylib/gyp/generator/xcode_test.py',
+ 'pylib/gyp/common_test.py',
+ 'pylib/gyp/input_test.py',
+]
+
+# Collect all the suites from the above files.
+suites = []
+for filename in files_to_test:
+ # Carve the module name out of the path.
+ name = os.path.splitext(os.path.split(filename)[1])[0]
+ # Find the complete module path.
+ full_filename = os.path.join(test._cwd, filename)
+ # Load the module.
+ module = imp.load_source(name, full_filename)
+ # Add it to the list of test suites.
+ suites.append(unittest.defaultTestLoader.loadTestsFromModule(module))
+# Create combined suite.
+all_tests = unittest.TestSuite(suites)
+
+# Run all the tests.
+result = unittest.TextTestRunner(verbosity=2).run(all_tests)
+if result.failures or result.errors:
+ test.fail_test()
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/standalone-static-library/gyptest-standalone-static-library.py b/third_party/python/gyp/test/standalone-static-library/gyptest-standalone-static-library.py
new file mode 100644
index 0000000000..50535abfc7
--- /dev/null
+++ b/third_party/python/gyp/test/standalone-static-library/gyptest-standalone-static-library.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies build of a static_library with the standalone_static_library flag set.
+"""
+
+import os
+import subprocess
+import sys
+import TestGyp
+
+# standalone_static_library currently means two things: a specific output
+# location for the built target and non-thin archive files.
+test = TestGyp.TestGyp()
+
+# Verify that types other than static_library cause a failure.
+test.run_gyp('invalid.gyp', status=1, stderr=None)
+target_str = 'invalid.gyp:bad#target'
+err = ['gyp: Target %s has type executable but standalone_static_library flag '
+ 'is only valid for static_library type.' % target_str]
+test.must_contain_all_lines(test.stderr(), err)
+
+# Build a valid standalone_static_library.
+test.run_gyp('mylib.gyp')
+test.build('mylib.gyp', target='prog')
+
+# Verify that the static library is copied to the correct location.
+# We expect the library to be copied to $PRODUCT_DIR.
+standalone_static_library_dir = test.EXECUTABLE
+path_to_lib = os.path.split(
+ test.built_file_path('mylib', type=standalone_static_library_dir))[0]
+lib_name = test.built_file_basename('mylib', type=test.STATIC_LIB)
+path = os.path.join(path_to_lib, lib_name)
+test.must_exist(path)
+
+# Verify that the program runs properly.
+expect = 'hello from mylib.c\n'
+test.run_built_executable('prog', stdout=expect)
+
+# Verify that libmylib.a contains symbols. "ar -x" fails on a 'thin' archive.
+supports_thick = ('make', 'ninja', 'cmake')
+if test.format in supports_thick and sys.platform.startswith('linux'):
+ retcode = subprocess.call(['ar', '-x', path])
+ assert retcode == 0
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/standalone-static-library/invalid.gyp b/third_party/python/gyp/test/standalone-static-library/invalid.gyp
new file mode 100644
index 0000000000..54b32117e0
--- /dev/null
+++ b/third_party/python/gyp/test/standalone-static-library/invalid.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'bad',
+ 'type': 'executable',
+ 'standalone_static_library': 1,
+ 'sources': [
+ 'prog.c',
+ ],
+ },
+ ],
+} \ No newline at end of file
diff --git a/third_party/python/gyp/test/standalone-static-library/mylib.c b/third_party/python/gyp/test/standalone-static-library/mylib.c
new file mode 100644
index 0000000000..108be618c2
--- /dev/null
+++ b/third_party/python/gyp/test/standalone-static-library/mylib.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+void print(void)
+{
+ printf("hello from mylib.c\n");
+ return;
+}
diff --git a/third_party/python/gyp/test/standalone-static-library/mylib.gyp b/third_party/python/gyp/test/standalone-static-library/mylib.gyp
new file mode 100644
index 0000000000..2d191de319
--- /dev/null
+++ b/third_party/python/gyp/test/standalone-static-library/mylib.gyp
@@ -0,0 +1,26 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'mylib',
+ 'type': 'static_library',
+ 'standalone_static_library': 1,
+ 'sources': [
+ 'mylib.c',
+ ],
+ },
+ {
+ 'target_name': 'prog',
+ 'type': 'executable',
+ 'sources': [
+ 'prog.c',
+ ],
+ 'dependencies': [
+ 'mylib',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/standalone-static-library/prog.c b/third_party/python/gyp/test/standalone-static-library/prog.c
new file mode 100644
index 0000000000..8af5c90844
--- /dev/null
+++ b/third_party/python/gyp/test/standalone-static-library/prog.c
@@ -0,0 +1,7 @@
+extern void print(void);
+
+int main(void)
+{
+ print();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/standalone/gyptest-standalone.py b/third_party/python/gyp/test/standalone/gyptest-standalone.py
new file mode 100644
index 0000000000..0581d53879
--- /dev/null
+++ b/third_party/python/gyp/test/standalone/gyptest-standalone.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that a project hierarchy created with the --generator-output=
+option can be built even when it's relocated to a different path.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+import os
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('standalone.gyp', '-Gstandalone')
+
+# Look at all the files in the tree to make sure none
+# of them reference the gyp file.
+for root, dirs, files in os.walk("."):
+ for file in files:
+ # ignore ourself
+ if os.path.splitext(__file__)[0] in file:
+ continue
+ file = os.path.join(root, file)
+ contents = open(file, 'rb').read().decode('utf-8', 'ignore')
+ if 'standalone.gyp' in contents:
+ print('gyp file referenced in generated output: %s' % file)
+ test.fail_test()
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/standalone/standalone.gyp b/third_party/python/gyp/test/standalone/standalone.gyp
new file mode 100644
index 0000000000..b2a6785430
--- /dev/null
+++ b/third_party/python/gyp/test/standalone/standalone.gyp
@@ -0,0 +1,12 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name' : 'foo',
+ 'type' : 'executable'
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/subdirectory/gyptest-SYMROOT-all.py b/third_party/python/gyp/test/subdirectory/gyptest-SYMROOT-all.py
new file mode 100755
index 0000000000..9dfb8b05d4
--- /dev/null
+++ b/third_party/python/gyp/test/subdirectory/gyptest-SYMROOT-all.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a target and a subsidiary dependent target from a
+.gyp file in a subdirectory, without specifying an explicit output build
+directory, and using the generated solution or project file at the top
+of the tree as the entry point.
+
+The configuration sets the Xcode SYMROOT variable and uses --depth=
+to make Xcode behave like the other build tools--that is, put all
+built targets in a single output build directory at the top of the tree.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('prog1.gyp', '-Dset_symroot=1', '--depth=.', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+# Suppress the test infrastructure's setting SYMROOT on the command line.
+test.build('prog1.gyp', test.ALL, SYMROOT=None, chdir='relocate/src')
+
+test.run_built_executable('prog1',
+ stdout="Hello from prog1.c\n",
+ chdir='relocate/src')
+test.run_built_executable('prog2',
+ stdout="Hello from prog2.c\n",
+ chdir='relocate/src')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/subdirectory/gyptest-SYMROOT-default.py b/third_party/python/gyp/test/subdirectory/gyptest-SYMROOT-default.py
new file mode 100755
index 0000000000..8796650905
--- /dev/null
+++ b/third_party/python/gyp/test/subdirectory/gyptest-SYMROOT-default.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a target and a subsidiary dependent target from a
+.gyp file in a subdirectory, without specifying an explicit output build
+directory, and using the generated solution or project file at the top
+of the tree as the entry point.
+
+The configuration sets the Xcode SYMROOT variable and uses --depth=
+to make Xcode behave like the other build tools--that is, put all
+built targets in a single output build directory at the top of the tree.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('prog1.gyp', '-Dset_symroot=1', '--depth=.', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+# Suppress the test infrastructure's setting SYMROOT on the command line.
+test.build('prog1.gyp', SYMROOT=None, chdir='relocate/src')
+
+test.run_built_executable('prog1',
+ stdout="Hello from prog1.c\n",
+ chdir='relocate/src')
+
+test.run_built_executable('prog2',
+ stdout="Hello from prog2.c\n",
+ chdir='relocate/src')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/subdirectory/gyptest-subdir-all.py b/third_party/python/gyp/test/subdirectory/gyptest-subdir-all.py
new file mode 100755
index 0000000000..d5c458454e
--- /dev/null
+++ b/third_party/python/gyp/test/subdirectory/gyptest-subdir-all.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a subsidiary dependent target from a .gyp file in a
+subdirectory, without specifying an explicit output build directory,
+and using the subdirectory's solution or project file as the entry point.
+"""
+
+import TestGyp
+
+# Ninja doesn't support relocation.
+# CMake produces a single CMakeLists.txt in the output directory.
+test = TestGyp.TestGyp(formats=['!ninja', '!cmake'])
+
+test.run_gyp('prog1.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+chdir = 'relocate/src/subdir'
+target = test.ALL
+
+test.build('prog2.gyp', target, chdir=chdir)
+
+test.built_file_must_not_exist('prog1', type=test.EXECUTABLE, chdir=chdir)
+
+test.run_built_executable('prog2',
+ chdir=chdir,
+ stdout="Hello from prog2.c\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/subdirectory/gyptest-subdir-default.py b/third_party/python/gyp/test/subdirectory/gyptest-subdir-default.py
new file mode 100755
index 0000000000..2cb6659beb
--- /dev/null
+++ b/third_party/python/gyp/test/subdirectory/gyptest-subdir-default.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a subsidiary dependent target from a .gyp file in a
+subdirectory, without specifying an explicit output build directory,
+and using the subdirectory's solution or project file as the entry point.
+"""
+
+import TestGyp
+import errno
+
+# Ninja doesn't support relocation.
+# CMake produces a single CMakeLists.txt in the output directory.
+test = TestGyp.TestGyp(formats=['!ninja', '!cmake'])
+
+test.run_gyp('prog1.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+chdir = 'relocate/src/subdir'
+
+test.build('prog2.gyp', chdir=chdir)
+
+test.built_file_must_not_exist('prog1', type=test.EXECUTABLE, chdir=chdir)
+
+test.run_built_executable('prog2',
+ chdir=chdir,
+ stdout="Hello from prog2.c\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/subdirectory/gyptest-subdir2-deep.py b/third_party/python/gyp/test/subdirectory/gyptest-subdir2-deep.py
new file mode 100755
index 0000000000..48548982f8
--- /dev/null
+++ b/third_party/python/gyp/test/subdirectory/gyptest-subdir2-deep.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a project rooted several layers under src_dir works.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('prog3.gyp', chdir='src/subdir/subdir2')
+
+test.relocate('src', 'relocate/src')
+
+test.build('prog3.gyp', test.ALL, chdir='relocate/src/subdir/subdir2')
+
+test.run_built_executable('prog3',
+ chdir='relocate/src/subdir/subdir2',
+ stdout="Hello from prog3.c\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/subdirectory/gyptest-top-all.py b/third_party/python/gyp/test/subdirectory/gyptest-top-all.py
new file mode 100755
index 0000000000..b3c25b1f8d
--- /dev/null
+++ b/third_party/python/gyp/test/subdirectory/gyptest-top-all.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a target and a subsidiary dependent target from a
+.gyp file in a subdirectory, without specifying an explicit output build
+directory, and using the generated solution or project file at the top
+of the tree as the entry point.
+
+There is a difference here in the default behavior of the underlying
+build tools. Specifically, when building the entire "solution", Xcode
+puts the output of each project relative to the .xcodeproj directory,
+while Visual Studio (and our implementation of Make) put it
+in a build directory relative to the "solution"--that is, the entry-point
+from which you built the entire tree.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('prog1.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('prog1.gyp', test.ALL, chdir='relocate/src')
+
+test.run_built_executable('prog1',
+ stdout="Hello from prog1.c\n",
+ chdir='relocate/src')
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('prog2',
+ chdir=chdir,
+ stdout="Hello from prog2.c\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/subdirectory/gyptest-top-default.py b/third_party/python/gyp/test/subdirectory/gyptest-top-default.py
new file mode 100755
index 0000000000..2448dd98ea
--- /dev/null
+++ b/third_party/python/gyp/test/subdirectory/gyptest-top-default.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a target and a subsidiary dependent target from a
+.gyp file in a subdirectory, without specifying an explicit output build
+directory, and using the generated solution or project file at the top
+of the tree as the entry point.
+
+There is a difference here in the default behavior of the underlying
+build tools. Specifically, when building the entire "solution", Xcode
+puts the output of each project relative to the .xcodeproj directory,
+while Visual Studio (and our implementation of Make) put it
+in a build directory relative to the "solution"--that is, the entry-point
+from which you built the entire tree.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('prog1.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('prog1.gyp', chdir='relocate/src')
+
+test.run_built_executable('prog1',
+ stdout="Hello from prog1.c\n",
+ chdir='relocate/src')
+
+if test.format == 'xcode':
+ chdir = 'relocate/src/subdir'
+else:
+ chdir = 'relocate/src'
+test.run_built_executable('prog2',
+ chdir=chdir,
+ stdout="Hello from prog2.c\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/subdirectory/src/prog1.c b/third_party/python/gyp/test/subdirectory/src/prog1.c
new file mode 100644
index 0000000000..218e99401c
--- /dev/null
+++ b/third_party/python/gyp/test/subdirectory/src/prog1.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello from prog1.c\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/subdirectory/src/prog1.gyp b/third_party/python/gyp/test/subdirectory/src/prog1.gyp
new file mode 100644
index 0000000000..2aa66ce7d7
--- /dev/null
+++ b/third_party/python/gyp/test/subdirectory/src/prog1.gyp
@@ -0,0 +1,21 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ 'symroot.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog1',
+ 'type': 'executable',
+ 'dependencies': [
+ 'subdir/prog2.gyp:prog2',
+ ],
+ 'sources': [
+ 'prog1.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/subdirectory/src/subdir/prog2.c b/third_party/python/gyp/test/subdirectory/src/subdir/prog2.c
new file mode 100644
index 0000000000..12a31883b9
--- /dev/null
+++ b/third_party/python/gyp/test/subdirectory/src/subdir/prog2.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello from prog2.c\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/subdirectory/src/subdir/prog2.gyp b/third_party/python/gyp/test/subdirectory/src/subdir/prog2.gyp
new file mode 100644
index 0000000000..c6cd35f7f8
--- /dev/null
+++ b/third_party/python/gyp/test/subdirectory/src/subdir/prog2.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../symroot.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog2',
+ 'type': 'executable',
+ 'sources': [
+ 'prog2.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/subdirectory/src/subdir/subdir2/prog3.c b/third_party/python/gyp/test/subdirectory/src/subdir/subdir2/prog3.c
new file mode 100644
index 0000000000..a326dc61b6
--- /dev/null
+++ b/third_party/python/gyp/test/subdirectory/src/subdir/subdir2/prog3.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello from prog3.c\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp b/third_party/python/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp
new file mode 100644
index 0000000000..b49fb59113
--- /dev/null
+++ b/third_party/python/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../../symroot.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'prog3',
+ 'type': 'executable',
+ 'sources': [
+ 'prog3.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/subdirectory/src/symroot.gypi b/third_party/python/gyp/test/subdirectory/src/symroot.gypi
new file mode 100644
index 0000000000..519916427c
--- /dev/null
+++ b/third_party/python/gyp/test/subdirectory/src/symroot.gypi
@@ -0,0 +1,16 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'set_symroot%': 0,
+ },
+ 'conditions': [
+ ['set_symroot == 1', {
+ 'xcode_settings': {
+ 'SYMROOT': '<(DEPTH)/build',
+ },
+ }],
+ ],
+}
diff --git a/third_party/python/gyp/test/symlinks/gyptest-symlinks.py b/third_party/python/gyp/test/symlinks/gyptest-symlinks.py
new file mode 100755
index 0000000000..278818a992
--- /dev/null
+++ b/third_party/python/gyp/test/symlinks/gyptest-symlinks.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test that RelativePath(s, d) doesn't return a path starting with '..' when
+s is textually below d, but is also a symlink to a file that is not below d.
+
+Returning .. in this case would break the Ninja generator in such a case,
+because it computes output directories by concatenating paths, and concat'ing
+a path starting with .. can unexpectedly erase other parts of the path. It's
+difficult to test this directly since the test harness assumes toplevel_dir is
+the root of the repository, but this test should at least verify that the
+required behavior doesn't change.
+"""
+
+import TestGyp
+import os
+import sys
+import tempfile
+
+if sys.platform != 'win32':
+ test = TestGyp.TestGyp()
+
+ # Copy hello.gyp and hello.c to temporary named files, which will then be
+ # symlinked back and processed. Note that we don't ask gyp to touch the
+ # original files at all; they are only there as source material for the copy.
+ # That's why hello.gyp references symlink_hello.c instead of hello.c.
+ with tempfile.NamedTemporaryFile(mode='w+') as gyp_file:
+ with tempfile.NamedTemporaryFile(mode='w+') as c_file:
+ with open('hello.gyp') as orig_gyp_file:
+ gyp_file.write(orig_gyp_file.read())
+ gyp_file.flush()
+ with open('hello.c') as orig_c_file:
+ c_file.write(orig_c_file.read())
+ c_file.flush()
+ # We need to flush the files because we want to read them before closing
+ # them, since when they are closed they will be deleted.
+
+ # Don't proceed with the test on a system that doesn't let you read from
+ # a still-open temporary file.
+ if os.path.getsize(gyp_file.name) == 0:
+ raise OSError("Copy to temporary file didn't work.")
+
+ symlink_gyp = test.built_file_path('symlink_hello.gyp')
+ symlink_c = test.built_file_path('symlink_hello.c')
+ outdir = os.path.dirname(symlink_gyp)
+
+ # Make sure the outdir exists.
+ try:
+ os.makedirs(outdir)
+ except OSError:
+ if not os.path.isdir(outdir):
+ raise
+ os.symlink(gyp_file.name, symlink_gyp)
+ os.symlink(c_file.name, symlink_c)
+
+ # Run gyp on the symlinked files.
+ test.run_gyp(symlink_gyp, chdir=outdir)
+ test.build(symlink_gyp, chdir=outdir)
+ test.run_built_executable('symlink_hello', stdout="Hello, world!\n",
+ chdir=outdir)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/symlinks/hello.c b/third_party/python/gyp/test/symlinks/hello.c
new file mode 100644
index 0000000000..c63204b948
--- /dev/null
+++ b/third_party/python/gyp/test/symlinks/hello.c
@@ -0,0 +1,12 @@
+/* Copyright (c) 2015 Google Inc. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+*/
+
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello, world!\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/symlinks/hello.gyp b/third_party/python/gyp/test/symlinks/hello.gyp
new file mode 100644
index 0000000000..81d9f18e09
--- /dev/null
+++ b/third_party/python/gyp/test/symlinks/hello.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'symlink_hello',
+ 'type': 'executable',
+ 'sources': [
+ 'symlink_hello.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/target/gyptest-target.py b/third_party/python/gyp/test/target/gyptest-target.py
new file mode 100644
index 0000000000..4338db739c
--- /dev/null
+++ b/third_party/python/gyp/test/target/gyptest-target.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simplest-possible build of a "Hello, world!" program
+using non-default extension. In particular, verifies how
+target_extension is used to avoid MSB8012 for msvs.
+"""
+
+import sys
+import TestGyp
+
+if sys.platform in ('win32', 'cygwin'):
+ test = TestGyp.TestGyp()
+
+ test.run_gyp('target.gyp')
+ test.build('target.gyp')
+
+ # executables
+ test.built_file_must_exist('hello1.stuff', test.EXECUTABLE, bare=True)
+ test.built_file_must_exist('hello2.exe', test.EXECUTABLE, bare=True)
+ test.built_file_must_not_exist('hello2.stuff', test.EXECUTABLE, bare=True)
+
+ # check msvs log for errors
+ if test.format == "msvs":
+ log_file = "obj\\hello1\\hello1.log"
+ test.built_file_must_exist(log_file)
+ test.built_file_must_not_contain(log_file, "MSB8012")
+
+ log_file = "obj\\hello2\\hello2.log"
+ test.built_file_must_exist(log_file)
+ test.built_file_must_not_contain(log_file, "MSB8012")
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/target/hello.c b/third_party/python/gyp/test/target/hello.c
new file mode 100644
index 0000000000..3d535d3ec6
--- /dev/null
+++ b/third_party/python/gyp/test/target/hello.c
@@ -0,0 +1,7 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+void main(void) {
+ printf("Hello, world!\n");
+}
diff --git a/third_party/python/gyp/test/target/target.gyp b/third_party/python/gyp/test/target/target.gyp
new file mode 100644
index 0000000000..c87e30f533
--- /dev/null
+++ b/third_party/python/gyp/test/target/target.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello1',
+ 'product_extension': 'stuff',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ ],
+ },
+ {
+ 'target_name': 'hello2',
+ 'target_extension': 'stuff',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ ],
+ }
+ ]
+}
diff --git a/third_party/python/gyp/test/toolsets/gyptest-toolsets.py b/third_party/python/gyp/test/toolsets/gyptest-toolsets.py
new file mode 100755
index 0000000000..f80fce70a2
--- /dev/null
+++ b/third_party/python/gyp/test/toolsets/gyptest-toolsets.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that toolsets are correctly applied
+"""
+import os
+import sys
+import TestGyp
+
+if sys.platform.startswith('linux'):
+
+ test = TestGyp.TestGyp(formats=['make', 'ninja'])
+
+ oldenv = os.environ.copy()
+ try:
+ os.environ['GYP_CROSSCOMPILE'] = '1'
+ test.run_gyp('toolsets.gyp')
+ finally:
+ os.environ.clear()
+ os.environ.update(oldenv)
+
+ test.build('toolsets.gyp', test.ALL)
+
+ test.run_built_executable('host-main', stdout="Host\nShared: Host\n")
+ test.run_built_executable('target-main', stdout="Target\nShared: Target\n")
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/toolsets/main.cc b/third_party/python/gyp/test/toolsets/main.cc
new file mode 100644
index 0000000000..bc47da9978
--- /dev/null
+++ b/third_party/python/gyp/test/toolsets/main.cc
@@ -0,0 +1,13 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+#include <stdio.h>
+
+const char *GetToolset();
+const char *GetToolsetShared();
+
+int main(void) {
+ printf("%s\n", GetToolset());
+ printf("Shared: %s\n", GetToolsetShared());
+}
diff --git a/third_party/python/gyp/test/toolsets/toolsets.cc b/third_party/python/gyp/test/toolsets/toolsets.cc
new file mode 100644
index 0000000000..a45fa029cb
--- /dev/null
+++ b/third_party/python/gyp/test/toolsets/toolsets.cc
@@ -0,0 +1,11 @@
+/* Copyright (c) 2009 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+const char *GetToolset() {
+#ifdef TARGET
+ return "Target";
+#else
+ return "Host";
+#endif
+}
diff --git a/third_party/python/gyp/test/toolsets/toolsets.gyp b/third_party/python/gyp/test/toolsets/toolsets.gyp
new file mode 100644
index 0000000000..3bc3a784ea
--- /dev/null
+++ b/third_party/python/gyp/test/toolsets/toolsets.gyp
@@ -0,0 +1,62 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'target_conditions': [
+ ['_toolset=="target"', {'defines': ['TARGET']}]
+ ]
+ },
+ 'targets': [
+ {
+ 'target_name': 'toolsets',
+ 'type': 'static_library',
+ 'toolsets': ['target', 'host'],
+ 'sources': [
+ 'toolsets.cc',
+ ],
+ },
+ {
+ 'target_name': 'host-main',
+ 'type': 'executable',
+ 'toolsets': ['host'],
+ 'dependencies': ['toolsets', 'toolsets_shared'],
+ 'sources': [
+ 'main.cc',
+ ],
+ },
+ {
+ 'target_name': 'target-main',
+ 'type': 'executable',
+ 'dependencies': ['toolsets', 'toolsets_shared'],
+ 'sources': [
+ 'main.cc',
+ ],
+ },
+ # This tests that build systems can handle a shared library being build for
+ # both host and target.
+ {
+ 'target_name': 'janus',
+ 'type': 'shared_library',
+ 'toolsets': ['target', 'host'],
+ 'sources': [
+ 'toolsets.cc',
+ ],
+ 'cflags': [ '-fPIC' ],
+ },
+ {
+ 'target_name': 'toolsets_shared',
+ 'type': 'shared_library',
+ 'toolsets': ['target', 'host'],
+ 'target_conditions': [
+ # Ensure target and host have different shared_library names
+ ['_toolset=="host"', {'product_extension': 'host'}],
+ ],
+ 'sources': [
+ 'toolsets_shared.cc',
+ ],
+ 'cflags': [ '-fPIC' ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/toolsets/toolsets_shared.cc b/third_party/python/gyp/test/toolsets/toolsets_shared.cc
new file mode 100644
index 0000000000..794af2c0bd
--- /dev/null
+++ b/third_party/python/gyp/test/toolsets/toolsets_shared.cc
@@ -0,0 +1,11 @@
+/* Copyright (c) 2013 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+const char *GetToolsetShared() {
+#ifdef TARGET
+ return "Target";
+#else
+ return "Host";
+#endif
+}
diff --git a/third_party/python/gyp/test/toplevel-dir/gyptest-toplevel-dir.py b/third_party/python/gyp/test/toplevel-dir/gyptest-toplevel-dir.py
new file mode 100755
index 0000000000..9e69512dd3
--- /dev/null
+++ b/third_party/python/gyp/test/toplevel-dir/gyptest-toplevel-dir.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a subsidiary dependent target from a .gyp file in a
+subdirectory, without specifying an explicit output build directory,
+and using the subdirectory's solution or project file as the entry point.
+"""
+
+import TestGyp
+import errno
+
+test = TestGyp.TestGyp(formats=['ninja', 'make'])
+
+# We want our Makefile to be one dir up from main.gyp.
+test.run_gyp('main.gyp', '--toplevel-dir=..', chdir='src/sub1')
+
+toplevel_dir = 'src'
+
+test.build('sub1/main.gyp', test.ALL, chdir=toplevel_dir)
+
+test.built_file_must_exist('prog1', type=test.EXECUTABLE, chdir=toplevel_dir)
+
+test.run_built_executable('prog1',
+ chdir=toplevel_dir,
+ stdout="Hello from prog1.c\n")
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/toplevel-dir/src/sub1/main.gyp b/third_party/python/gyp/test/toplevel-dir/src/sub1/main.gyp
new file mode 100644
index 0000000000..33219010e4
--- /dev/null
+++ b/third_party/python/gyp/test/toplevel-dir/src/sub1/main.gyp
@@ -0,0 +1,18 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'prog1',
+ 'type': 'executable',
+ 'dependencies': [
+ '<(DEPTH)/../sub2/prog2.gyp:prog2',
+ ],
+ 'sources': [
+ 'prog1.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/toplevel-dir/src/sub1/prog1.c b/third_party/python/gyp/test/toplevel-dir/src/sub1/prog1.c
new file mode 100644
index 0000000000..218e99401c
--- /dev/null
+++ b/third_party/python/gyp/test/toplevel-dir/src/sub1/prog1.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello from prog1.c\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/toplevel-dir/src/sub2/prog2.c b/third_party/python/gyp/test/toplevel-dir/src/sub2/prog2.c
new file mode 100644
index 0000000000..12a31883b9
--- /dev/null
+++ b/third_party/python/gyp/test/toplevel-dir/src/sub2/prog2.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(void)
+{
+ printf("Hello from prog2.c\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/toplevel-dir/src/sub2/prog2.gyp b/third_party/python/gyp/test/toplevel-dir/src/sub2/prog2.gyp
new file mode 100644
index 0000000000..5934548369
--- /dev/null
+++ b/third_party/python/gyp/test/toplevel-dir/src/sub2/prog2.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'prog2',
+ 'type': 'executable',
+ 'sources': [
+ 'prog2.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/variables/commands/commands-repeated.gyp b/third_party/python/gyp/test/variables/commands/commands-repeated.gyp
new file mode 100644
index 0000000000..1f52e75936
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/commands-repeated.gyp
@@ -0,0 +1,128 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a simple test file to make sure that variable substitution
+# happens correctly. Run "run_tests.py" using python to generate the
+# output from this gyp file.
+
+{
+ 'variables': {
+ 'pi': 'import math; print(math.pi)',
+ 'third_letters': "<(other_letters)HIJK",
+ 'letters_list': 'ABCD',
+ 'other_letters': '<(letters_list)EFG',
+ 'check_included': '<(included_variable)',
+ 'check_lists': [
+ '<(included_variable)',
+ '<(third_letters)',
+ ],
+ 'check_int': 5,
+ 'check_str_int': '6',
+ 'check_list_int': [
+ 7,
+ '8',
+ 9,
+ ],
+ 'not_int_1': ' 10',
+ 'not_int_2': '11 ',
+ 'not_int_3': '012',
+ 'not_int_4': '13.0',
+ 'not_int_5': '+14',
+ 'negative_int': '-15',
+ 'zero_int': '0',
+ },
+ 'includes': [
+ 'commands.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'foo',
+ 'type': 'none',
+ 'variables': {
+ 'var1': '<!(["python", "-c", "<(pi)"])',
+ 'var2': '<!(python -c "print(\'<!(python -c "<(pi)") <(letters_list)\')")',
+ 'var3': '<!(python -c "print(\'<(letters_list)\')")',
+ 'var4': '<(<!(python -c "print(\'letters_list\')"))',
+ 'var5': 'letters_',
+ 'var6': 'list',
+ 'var7': '<(check_int)',
+ 'var8': '<(check_int)blah',
+ 'var9': '<(check_str_int)',
+ 'var10': '<(check_list_int)',
+ 'var11': ['<@(check_list_int)'],
+ 'var12': '<(not_int_1)',
+ 'var13': '<(not_int_2)',
+ 'var14': '<(not_int_3)',
+ 'var15': '<(not_int_4)',
+ 'var16': '<(not_int_5)',
+ 'var17': '<(negative_int)',
+ 'var18': '<(zero_int)',
+ # A second set with different names to make sure they only execute the
+ # commands once.
+ 'var1prime': '<!(["python", "-c", "<(pi)"])',
+ 'var2prime': '<!(python -c "print(\'<!(python -c "<(pi)") <(letters_list)\')")',
+ 'var3prime': '<!(python -c "print(\'<(letters_list)\')")',
+ 'var4prime': '<(<!(python -c "print(\'letters_list\')"))',
+ },
+ 'actions': [
+ {
+ 'action_name': 'test_action',
+ 'variables': {
+ 'var7': '<!(echo <(var5)<(var6))',
+ },
+ 'inputs' : [
+ '<(var2)',
+ ],
+ 'outputs': [
+ '<(var4)',
+ '<(var7)',
+ ],
+ 'action': [
+ 'echo',
+ '<(_inputs)',
+ '<(_outputs)',
+ ],
+ },
+ # Again with the same vars to make sure the right things happened.
+ {
+ 'action_name': 'test_action_prime',
+ 'variables': {
+ 'var7': '<!(echo <(var5)<(var6))',
+ },
+ 'inputs' : [
+ '<(var2)',
+ ],
+ 'outputs': [
+ '<(var4)',
+ '<(var7)',
+ ],
+ 'action': [
+ 'echo',
+ '<(_inputs)',
+ '<(_outputs)',
+ ],
+ },
+ # And one more time with the other vars...
+ {
+ 'action_name': 'test_action_prime_prime',
+ 'variables': {
+ 'var7': '<!(echo <(var5)<(var6))',
+ },
+ 'inputs' : [
+ '<(var2prime)',
+ ],
+ 'outputs': [
+ '<(var4prime)',
+ '<(var7)',
+ ],
+ 'action': [
+ 'echo',
+ '<(_inputs)',
+ '<(_outputs)',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/variables/commands/commands-repeated.gyp.stdout b/third_party/python/gyp/test/variables/commands/commands-repeated.gyp.stdout
new file mode 100644
index 0000000000..00bba88e4f
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/commands-repeated.gyp.stdout
@@ -0,0 +1,136 @@
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'other_letters', 'is_array': '', 'replace': '<(other_letters)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '<(letters_list)EFGHIJK', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFGHIJK', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFG', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'included_variable', 'is_array': '', 'replace': '<(included_variable)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'XYZ', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'included_variable', 'is_array': '', 'replace': '<(included_variable)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'XYZ', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'third_letters', 'is_array': '', 'replace': '<(third_letters)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '<(other_letters)HIJK', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'other_letters', 'is_array': '', 'replace': '<(other_letters)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '<(letters_list)EFGHIJK', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFGHIJK', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print(\'<!(python -c "<(pi', 'is_array': '', 'replace': '<!(python -c "print(\'<!(python -c "<(pi)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "<(pi', 'is_array': '', 'replace': '<!(python -c "<(pi)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'pi', 'is_array': '', 'replace': '<(pi)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'python -c "import math; print(math.pi)"', recursing.
+VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "import math; print(math.pi)"' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'python -c "print(\'3.14159265359 ABCD\')"', recursing.
+VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print('3.14159265359 ABCD')"' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'replace': '<!(["python", "-c", "<(pi)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'pi', 'is_array': '', 'replace': '<(pi)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '["python", "-c", "import math; print(math.pi)"]', recursing.
+VARIABLES:input.py:838:ExpandVariables Executing command '['python', '-c', 'import math; print(math.pi)']' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '<!(python -c "print(\'letters_list\'', 'is_array': '', 'replace': '<(<!(python -c "print(\'letters_list\')', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print(\'letters_list\'', 'is_array': '', 'replace': '<!(python -c "print(\'letters_list\')', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print('letters_list')"' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_int', 'is_array': '', 'replace': '<(check_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '5', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'replace': '<!(["python", "-c", "<(pi)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'pi', 'is_array': '', 'replace': '<(pi)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '["python", "-c", "import math; print(math.pi)"]', recursing.
+VARIABLES:input.py:889:ExpandVariables Had cache value for command '['python', '-c', 'import math; print(math.pi)']' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print(\'<(letters_list', 'is_array': '', 'replace': '<!(python -c "print(\'<(letters_list)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'python -c "print(\'ABCD\')"', recursing.
+VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print('ABCD')"' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print(\'<!(python -c "<(pi', 'is_array': '', 'replace': '<!(python -c "print(\'<!(python -c "<(pi)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "<(pi', 'is_array': '', 'replace': '<!(python -c "<(pi)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'pi', 'is_array': '', 'replace': '<(pi)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'python -c "import math; print(math.pi)"', recursing.
+VARIABLES:input.py:889:ExpandVariables Had cache value for command 'python -c "import math; print(math.pi)"' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'python -c "print(\'3.14159265359 ABCD\')"', recursing.
+VARIABLES:input.py:889:ExpandVariables Had cache value for command 'python -c "print('3.14159265359 ABCD')"' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_str_int', 'is_array': '', 'replace': '<(check_str_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '6', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_int', 'is_array': '', 'replace': '<(check_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '5blah', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '<!(python -c "print(\'letters_list\'', 'is_array': '', 'replace': '<(<!(python -c "print(\'letters_list\')', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print(\'letters_list\'', 'is_array': '', 'replace': '<!(python -c "print(\'letters_list\')', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:889:ExpandVariables Had cache value for command 'python -c "print('letters_list')"' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print(\'<(letters_list', 'is_array': '', 'replace': '<!(python -c "print(\'<(letters_list)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'python -c "print(\'ABCD\')"', recursing.
+VARIABLES:input.py:889:ExpandVariables Had cache value for command 'python -c "print('ABCD')"' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_4', 'is_array': '', 'replace': '<(not_int_4)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '13.0', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_3', 'is_array': '', 'replace': '<(not_int_3)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '012', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'negative_int', 'is_array': '', 'replace': '<(negative_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '-15', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_5', 'is_array': '', 'replace': '<(not_int_5)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '+14', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_list_int', 'is_array': '', 'replace': '<(check_list_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '7 8 9', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_2', 'is_array': '', 'replace': '<(not_int_2)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '11 ', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_1', 'is_array': '', 'replace': '<(not_int_1)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output ' 10', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'zero_int', 'is_array': '', 'replace': '<(zero_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '0', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_list_int', 'is_array': '', 'replace': '<@(check_list_int)', 'type': '<@', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output [7, 8, 9], recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var6', 'is_array': '', 'replace': '<(var6)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'echo <(var5', 'is_array': '', 'replace': '<!(echo <(var5)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var5', 'is_array': '', 'replace': '<(var5)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'echo letters_list', recursing.
+VARIABLES:input.py:838:ExpandVariables Executing command 'echo letters_list' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_inputs', 'is_array': '', 'replace': '<(_inputs)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var2', 'is_array': '', 'replace': '<(var2)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
+VARIABLES:input.py:964:ExpandVariables Found output '"3.14159265359 ABCD"', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_outputs', 'is_array': '', 'replace': '<(_outputs)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var4', 'is_array': '', 'replace': '<(var4)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var7', 'is_array': '', 'replace': '<(var7)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD letters_list', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var6', 'is_array': '', 'replace': '<(var6)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'echo <(var5', 'is_array': '', 'replace': '<!(echo <(var5)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var5', 'is_array': '', 'replace': '<(var5)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'echo letters_list', recursing.
+VARIABLES:input.py:889:ExpandVariables Had cache value for command 'echo letters_list' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_inputs', 'is_array': '', 'replace': '<(_inputs)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var2', 'is_array': '', 'replace': '<(var2)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
+VARIABLES:input.py:964:ExpandVariables Found output '"3.14159265359 ABCD"', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_outputs', 'is_array': '', 'replace': '<(_outputs)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var4', 'is_array': '', 'replace': '<(var4)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var7', 'is_array': '', 'replace': '<(var7)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD letters_list', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var6', 'is_array': '', 'replace': '<(var6)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'echo <(var5', 'is_array': '', 'replace': '<!(echo <(var5)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var5', 'is_array': '', 'replace': '<(var5)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'echo letters_list', recursing.
+VARIABLES:input.py:889:ExpandVariables Had cache value for command 'echo letters_list' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_inputs', 'is_array': '', 'replace': '<(_inputs)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var2prime', 'is_array': '', 'replace': '<(var2prime)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
+VARIABLES:input.py:964:ExpandVariables Found output '"3.14159265359 ABCD"', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_outputs', 'is_array': '', 'replace': '<(_outputs)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var4prime', 'is_array': '', 'replace': '<(var4prime)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var7', 'is_array': '', 'replace': '<(var7)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD letters_list', recursing.
diff --git a/third_party/python/gyp/test/variables/commands/commands-repeated.gypd.golden b/third_party/python/gyp/test/variables/commands/commands-repeated.gypd.golden
new file mode 100644
index 0000000000..fa4c53716f
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/commands-repeated.gypd.golden
@@ -0,0 +1,77 @@
+{'_DEPTH': '.',
+ 'included_files': ['commands-repeated.gyp', 'commands.gypi'],
+ 'targets': [{'actions': [{'action': ['echo',
+ '"3.14159265359 ABCD"',
+ 'ABCD letters_list'],
+ 'action_name': 'test_action',
+ 'inputs': ['3.14159265359 ABCD'],
+ 'outputs': ['ABCD', 'letters_list'],
+ 'variables': {'var7': 'letters_list'}},
+ {'action': ['echo',
+ '"3.14159265359 ABCD"',
+ 'ABCD letters_list'],
+ 'action_name': 'test_action_prime',
+ 'inputs': ['3.14159265359 ABCD'],
+ 'outputs': ['ABCD', 'letters_list'],
+ 'variables': {'var7': 'letters_list'}},
+ {'action': ['echo',
+ '"3.14159265359 ABCD"',
+ 'ABCD letters_list'],
+ 'action_name': 'test_action_prime_prime',
+ 'inputs': ['3.14159265359 ABCD'],
+ 'outputs': ['ABCD', 'letters_list'],
+ 'variables': {'var7': 'letters_list'}}],
+ 'configurations': {'Default': {}},
+ 'default_configuration': 'Default',
+ 'target_name': 'foo',
+ 'toolset': 'target',
+ 'type': 'none',
+ 'variables': {'var1': '3.14159265359',
+ 'var10': '7 8 9',
+ 'var11': ['7', '8', '9'],
+ 'var12': ' 10',
+ 'var13': '11 ',
+ 'var14': '012',
+ 'var15': '13.0',
+ 'var16': '+14',
+ 'var17': '-15',
+ 'var18': '0',
+ 'var1prime': '3.14159265359',
+ 'var2': '3.14159265359 ABCD',
+ 'var2prime': '3.14159265359 ABCD',
+ 'var3': 'ABCD',
+ 'var3prime': 'ABCD',
+ 'var4': 'ABCD',
+ 'var4prime': 'ABCD',
+ 'var5': 'letters_',
+ 'var6': 'list',
+ 'var7': '5',
+ 'var8': '5blah',
+ 'var9': '6'}},
+ {'configurations': {'Default': {}},
+ 'default_configuration': 'Default',
+ 'target_name': 'dummy',
+ 'toolset': 'target',
+ 'type': 'none'}],
+ 'variables': {'check_included': 'XYZ',
+ 'check_int': '5',
+ 'check_list_int': ['7', '8', '9'],
+ 'check_lists': ['XYZ', 'ABCDEFGHIJK'],
+ 'check_str_int': '6',
+ 'default_empty_files%': '',
+ 'default_empty_str%': '',
+ 'default_int%': '0',
+ 'default_int_files%': '0',
+ 'default_str%': 'my_str',
+ 'included_variable': 'XYZ',
+ 'letters_list': 'ABCD',
+ 'negative_int': '-15',
+ 'not_int_1': ' 10',
+ 'not_int_2': '11 ',
+ 'not_int_3': '012',
+ 'not_int_4': '13.0',
+ 'not_int_5': '+14',
+ 'other_letters': 'ABCDEFG',
+ 'pi': 'import math; print(math.pi)',
+ 'third_letters': 'ABCDEFGHIJK',
+ 'zero_int': '0'}}
diff --git a/third_party/python/gyp/test/variables/commands/commands.gyp b/third_party/python/gyp/test/variables/commands/commands.gyp
new file mode 100644
index 0000000000..7d460b1810
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/commands.gyp
@@ -0,0 +1,91 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a simple test file to make sure that variable substitution
+# happens correctly. Run "run_tests.py" using python to generate the
+# output from this gyp file.
+
+{
+ 'variables': {
+ 'pi': 'import math; print(math.pi)',
+ 'third_letters': "<(other_letters)HIJK",
+ 'letters_list': 'ABCD',
+ 'other_letters': '<(letters_list)EFG',
+ 'check_included': '<(included_variable)',
+ 'check_lists': [
+ '<(included_variable)',
+ '<(third_letters)',
+ ],
+ 'check_int': 5,
+ 'check_str_int': '6',
+ 'check_list_int': [
+ 7,
+ '8',
+ 9,
+ ],
+ 'not_int_1': ' 10',
+ 'not_int_2': '11 ',
+ 'not_int_3': '012',
+ 'not_int_4': '13.0',
+ 'not_int_5': '+14',
+ 'negative_int': '-15',
+ 'zero_int': '0',
+ },
+ 'includes': [
+ 'commands.gypi',
+ ],
+ 'targets': [
+ {
+ 'target_name': 'foo',
+ 'type': 'none',
+ 'variables': {
+ 'var1': '<!(["python", "-c", "<(pi)"])',
+ 'var2': '<!(python -c "print(\'<!(python -c "<(pi)") <(letters_list)\')")',
+ 'var3': '<!(python -c "print(\'<(letters_list)\')")',
+ 'var4': '<(<!(python -c "print(\'letters_list\')"))',
+ 'var5': 'letters_',
+ 'var6': 'list',
+ 'var7': '<(check_int)',
+ 'var8': '<(check_int)blah',
+ 'var9': '<(check_str_int)',
+ 'var10': '<(check_list_int)',
+ 'var11': ['<@(check_list_int)'],
+ 'var12': '<(not_int_1)',
+ 'var13': '<(not_int_2)',
+ 'var14': '<(not_int_3)',
+ 'var15': '<(not_int_4)',
+ 'var16': '<(not_int_5)',
+ 'var17': '<(negative_int)',
+ 'var18': '<(zero_int)',
+ 'var19': ['<!@(python test.py)'],
+ 'var20': '<!(python test.py)',
+ 'var21': '<(default_str)',
+ 'var22': '<(default_empty_str)',
+ 'var23': '<(default_int)',
+ 'var24': '<(default_empty_files)',
+ 'var25': '<(default_int_files)',
+ },
+ 'actions': [
+ {
+ 'action_name': 'test_action',
+ 'variables': {
+ 'var7': '<!(echo <(var5)<(var6))',
+ },
+ 'inputs' : [
+ '<(var2)',
+ ],
+ 'outputs': [
+ '<(var4)',
+ '<(var7)',
+ ],
+ 'action': [
+ 'echo',
+ '<(_inputs)',
+ '<(_outputs)',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/variables/commands/commands.gyp.ignore-env.stdout b/third_party/python/gyp/test/variables/commands/commands.gyp.ignore-env.stdout
new file mode 100644
index 0000000000..1b721d805b
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/commands.gyp.ignore-env.stdout
@@ -0,0 +1,96 @@
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'other_letters', 'is_array': '', 'replace': '<(other_letters)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '<(letters_list)EFGHIJK', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFGHIJK', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFG', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'included_variable', 'is_array': '', 'replace': '<(included_variable)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'XYZ', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'included_variable', 'is_array': '', 'replace': '<(included_variable)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'XYZ', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'third_letters', 'is_array': '', 'replace': '<(third_letters)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '<(other_letters)HIJK', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'other_letters', 'is_array': '', 'replace': '<(other_letters)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '<(letters_list)EFGHIJK', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFGHIJK', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_empty_files', 'is_array': '', 'replace': '<(default_empty_files)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_int_files', 'is_array': '', 'replace': '<(default_int_files)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '0', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python test.py', 'is_array': '', 'replace': '<!(python test.py)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:838:ExpandVariables Executing command 'python test.py' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'sample\\path\\foo.cpp', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_str', 'is_array': '', 'replace': '<(default_str)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'my_str', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_empty_str', 'is_array': '', 'replace': '<(default_empty_str)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_int', 'is_array': '', 'replace': '<(default_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '0', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '<!(python -c "print(\'letters_list\'', 'is_array': '', 'replace': '<(<!(python -c "print(\'letters_list\')', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print(\'letters_list\'', 'is_array': '', 'replace': '<!(python -c "print(\'letters_list\')', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print('letters_list')"' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_int', 'is_array': '', 'replace': '<(check_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '5', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'replace': '<!(["python", "-c", "<(pi)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'pi', 'is_array': '', 'replace': '<(pi)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '["python", "-c", "import math; print(math.pi)"]', recursing.
+VARIABLES:input.py:838:ExpandVariables Executing command '['python', '-c', 'import math; print(math.pi)']' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print(\'<(letters_list', 'is_array': '', 'replace': '<!(python -c "print(\'<(letters_list)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'python -c "print(\'ABCD\')"', recursing.
+VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print('ABCD')"' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print(\'<!(python -c "<(pi', 'is_array': '', 'replace': '<!(python -c "print(\'<!(python -c "<(pi)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "<(pi', 'is_array': '', 'replace': '<!(python -c "<(pi)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'pi', 'is_array': '', 'replace': '<(pi)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'python -c "import math; print(math.pi)"', recursing.
+VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "import math; print(math.pi)"' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'python -c "print(\'3.14159265359 ABCD\')"', recursing.
+VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print('3.14159265359 ABCD')"' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_str_int', 'is_array': '', 'replace': '<(check_str_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '6', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_int', 'is_array': '', 'replace': '<(check_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '5blah', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_4', 'is_array': '', 'replace': '<(not_int_4)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '13.0', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_3', 'is_array': '', 'replace': '<(not_int_3)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '012', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'negative_int', 'is_array': '', 'replace': '<(negative_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '-15', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_5', 'is_array': '', 'replace': '<(not_int_5)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '+14', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_list_int', 'is_array': '', 'replace': '<(check_list_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '7 8 9', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_2', 'is_array': '', 'replace': '<(not_int_2)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '11 ', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_1', 'is_array': '', 'replace': '<(not_int_1)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output ' 10', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'zero_int', 'is_array': '', 'replace': '<(zero_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '0', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_list_int', 'is_array': '', 'replace': '<@(check_list_int)', 'type': '<@', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output [7, 8, 9], recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python test.py', 'is_array': '', 'replace': '<!@(python test.py)', 'type': '<!@', 'command_string': None}
+VARIABLES:input.py:889:ExpandVariables Had cache value for command 'python test.py' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output ['samplepathfoo.cpp'], recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var6', 'is_array': '', 'replace': '<(var6)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'echo <(var5', 'is_array': '', 'replace': '<!(echo <(var5)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var5', 'is_array': '', 'replace': '<(var5)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'echo letters_list', recursing.
+VARIABLES:input.py:838:ExpandVariables Executing command 'echo letters_list' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_inputs', 'is_array': '', 'replace': '<(_inputs)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var2', 'is_array': '', 'replace': '<(var2)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
+VARIABLES:input.py:964:ExpandVariables Found output '"3.14159265359 ABCD"', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_outputs', 'is_array': '', 'replace': '<(_outputs)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var4', 'is_array': '', 'replace': '<(var4)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var7', 'is_array': '', 'replace': '<(var7)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD letters_list', recursing.
diff --git a/third_party/python/gyp/test/variables/commands/commands.gyp.stdout b/third_party/python/gyp/test/variables/commands/commands.gyp.stdout
new file mode 100644
index 0000000000..1b721d805b
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/commands.gyp.stdout
@@ -0,0 +1,96 @@
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'other_letters', 'is_array': '', 'replace': '<(other_letters)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '<(letters_list)EFGHIJK', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFGHIJK', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFG', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'included_variable', 'is_array': '', 'replace': '<(included_variable)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'XYZ', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'included_variable', 'is_array': '', 'replace': '<(included_variable)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'XYZ', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'third_letters', 'is_array': '', 'replace': '<(third_letters)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '<(other_letters)HIJK', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'other_letters', 'is_array': '', 'replace': '<(other_letters)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '<(letters_list)EFGHIJK', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFGHIJK', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_empty_files', 'is_array': '', 'replace': '<(default_empty_files)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_int_files', 'is_array': '', 'replace': '<(default_int_files)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '0', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python test.py', 'is_array': '', 'replace': '<!(python test.py)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:838:ExpandVariables Executing command 'python test.py' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'sample\\path\\foo.cpp', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_str', 'is_array': '', 'replace': '<(default_str)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'my_str', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_empty_str', 'is_array': '', 'replace': '<(default_empty_str)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_int', 'is_array': '', 'replace': '<(default_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '0', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '<!(python -c "print(\'letters_list\'', 'is_array': '', 'replace': '<(<!(python -c "print(\'letters_list\')', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print(\'letters_list\'', 'is_array': '', 'replace': '<!(python -c "print(\'letters_list\')', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print('letters_list')"' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_int', 'is_array': '', 'replace': '<(check_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '5', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'replace': '<!(["python", "-c", "<(pi)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'pi', 'is_array': '', 'replace': '<(pi)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '["python", "-c", "import math; print(math.pi)"]', recursing.
+VARIABLES:input.py:838:ExpandVariables Executing command '['python', '-c', 'import math; print(math.pi)']' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print(\'<(letters_list', 'is_array': '', 'replace': '<!(python -c "print(\'<(letters_list)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'python -c "print(\'ABCD\')"', recursing.
+VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print('ABCD')"' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print(\'<!(python -c "<(pi', 'is_array': '', 'replace': '<!(python -c "print(\'<!(python -c "<(pi)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "<(pi', 'is_array': '', 'replace': '<!(python -c "<(pi)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'pi', 'is_array': '', 'replace': '<(pi)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'python -c "import math; print(math.pi)"', recursing.
+VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "import math; print(math.pi)"' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'python -c "print(\'3.14159265359 ABCD\')"', recursing.
+VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print('3.14159265359 ABCD')"' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_str_int', 'is_array': '', 'replace': '<(check_str_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '6', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_int', 'is_array': '', 'replace': '<(check_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '5blah', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_4', 'is_array': '', 'replace': '<(not_int_4)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '13.0', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_3', 'is_array': '', 'replace': '<(not_int_3)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '012', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'negative_int', 'is_array': '', 'replace': '<(negative_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '-15', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_5', 'is_array': '', 'replace': '<(not_int_5)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '+14', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_list_int', 'is_array': '', 'replace': '<(check_list_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '7 8 9', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_2', 'is_array': '', 'replace': '<(not_int_2)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '11 ', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_1', 'is_array': '', 'replace': '<(not_int_1)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output ' 10', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'zero_int', 'is_array': '', 'replace': '<(zero_int)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '0', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_list_int', 'is_array': '', 'replace': '<@(check_list_int)', 'type': '<@', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output [7, 8, 9], recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python test.py', 'is_array': '', 'replace': '<!@(python test.py)', 'type': '<!@', 'command_string': None}
+VARIABLES:input.py:889:ExpandVariables Had cache value for command 'python test.py' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output ['samplepathfoo.cpp'], recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var6', 'is_array': '', 'replace': '<(var6)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'echo <(var5', 'is_array': '', 'replace': '<!(echo <(var5)', 'type': '<!', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var5', 'is_array': '', 'replace': '<(var5)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'echo letters_list', recursing.
+VARIABLES:input.py:838:ExpandVariables Executing command 'echo letters_list' in directory 'None'
+VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_inputs', 'is_array': '', 'replace': '<(_inputs)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var2', 'is_array': '', 'replace': '<(var2)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
+VARIABLES:input.py:964:ExpandVariables Found output '"3.14159265359 ABCD"', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_outputs', 'is_array': '', 'replace': '<(_outputs)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var4', 'is_array': '', 'replace': '<(var4)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
+VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var7', 'is_array': '', 'replace': '<(var7)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
+VARIABLES:input.py:964:ExpandVariables Found output 'ABCD letters_list', recursing.
diff --git a/third_party/python/gyp/test/variables/commands/commands.gypd.golden b/third_party/python/gyp/test/variables/commands/commands.gypd.golden
new file mode 100644
index 0000000000..13abba26c4
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/commands.gypd.golden
@@ -0,0 +1,66 @@
+{'_DEPTH': '.',
+ 'included_files': ['commands.gyp', 'commands.gypi'],
+ 'targets': [{'actions': [{'action': ['echo',
+ '"3.14159265359 ABCD"',
+ 'ABCD letters_list'],
+ 'action_name': 'test_action',
+ 'inputs': ['3.14159265359 ABCD'],
+ 'outputs': ['ABCD', 'letters_list'],
+ 'variables': {'var7': 'letters_list'}}],
+ 'configurations': {'Default': {}},
+ 'default_configuration': 'Default',
+ 'target_name': 'foo',
+ 'toolset': 'target',
+ 'type': 'none',
+ 'variables': {'var1': '3.14159265359',
+ 'var10': '7 8 9',
+ 'var11': ['7', '8', '9'],
+ 'var12': ' 10',
+ 'var13': '11 ',
+ 'var14': '012',
+ 'var15': '13.0',
+ 'var16': '+14',
+ 'var17': '-15',
+ 'var18': '0',
+ 'var19': ['samplepathfoo.cpp'],
+ 'var2': '3.14159265359 ABCD',
+ 'var20': 'sample\\path\\foo.cpp',
+ 'var21': 'my_str',
+ 'var22': '',
+ 'var23': '0',
+ 'var24': '',
+ 'var25': '0',
+ 'var3': 'ABCD',
+ 'var4': 'ABCD',
+ 'var5': 'letters_',
+ 'var6': 'list',
+ 'var7': '5',
+ 'var8': '5blah',
+ 'var9': '6'}},
+ {'configurations': {'Default': {}},
+ 'default_configuration': 'Default',
+ 'target_name': 'dummy',
+ 'toolset': 'target',
+ 'type': 'none'}],
+ 'variables': {'check_included': 'XYZ',
+ 'check_int': '5',
+ 'check_list_int': ['7', '8', '9'],
+ 'check_lists': ['XYZ', 'ABCDEFGHIJK'],
+ 'check_str_int': '6',
+ 'default_empty_files%': '',
+ 'default_empty_str%': '',
+ 'default_int%': '0',
+ 'default_int_files%': '0',
+ 'default_str%': 'my_str',
+ 'included_variable': 'XYZ',
+ 'letters_list': 'ABCD',
+ 'negative_int': '-15',
+ 'not_int_1': ' 10',
+ 'not_int_2': '11 ',
+ 'not_int_3': '012',
+ 'not_int_4': '13.0',
+ 'not_int_5': '+14',
+ 'other_letters': 'ABCDEFG',
+ 'pi': 'import math; print(math.pi)',
+ 'third_letters': 'ABCDEFGHIJK',
+ 'zero_int': '0'}}
diff --git a/third_party/python/gyp/test/variables/commands/commands.gypi b/third_party/python/gyp/test/variables/commands/commands.gypi
new file mode 100644
index 0000000000..839cb30b7e
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/commands.gypi
@@ -0,0 +1,23 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is included from commands.gyp to test evaluation order of includes.
+{
+ 'variables': {
+ 'included_variable': 'XYZ',
+
+ 'default_str%': 'my_str',
+ 'default_empty_str%': '',
+ 'default_int%': 0,
+
+ 'default_empty_files%': '',
+ 'default_int_files%': 0,
+ },
+ 'targets': [
+ {
+ 'target_name': 'dummy',
+ 'type': 'none',
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/variables/commands/gyptest-commands-ignore-env.py b/third_party/python/gyp/test/variables/commands/gyptest-commands-ignore-env.py
new file mode 100755
index 0000000000..5470d5ce36
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/gyptest-commands-ignore-env.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test that environment variables are ignored when --ignore-environment is
+specified.
+"""
+
+from __future__ import print_function
+import os
+
+import TestGyp
+
+test = TestGyp.TestGyp(format='gypd')
+
+os.environ['GYP_DEFINES'] = 'FOO=BAR'
+os.environ['GYP_GENERATORS'] = 'foo'
+os.environ['GYP_GENERATOR_FLAGS'] = 'genflag=foo'
+os.environ['GYP_GENERATOR_OUTPUT'] = 'somedir'
+
+expect = test.read('commands.gyp.ignore-env.stdout').replace('\r\n', '\n')
+
+test.run_gyp('commands.gyp',
+ '--debug', 'variables',
+ '--ignore-environment',
+ stdout=expect, ignore_line_numbers=True)
+
+# Verify the commands.gypd against the checked-in expected contents.
+#
+# Normally, we should canonicalize line endings in the expected
+# contents file setting the Subversion svn:eol-style to native,
+# but that would still fail if multiple systems are sharing a single
+# workspace on a network-mounted file system. Consequently, we
+# massage the Windows line endings ('\r\n') in the output to the
+# checked-in UNIX endings ('\n').
+
+contents = test.read('commands.gypd').replace('\r', '')
+expect = test.read('commands.gypd.golden').replace('\r', '')
+if not test.match(contents, expect):
+ print("Unexpected contents of `commands.gypd'")
+ test.diff(expect, contents, 'commands.gypd ')
+ test.fail_test()
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/variables/commands/gyptest-commands-repeated-multidir.py b/third_party/python/gyp/test/variables/commands/gyptest-commands-repeated-multidir.py
new file mode 100755
index 0000000000..21e0487565
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/gyptest-commands-repeated-multidir.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test variable expansion of '<!()' syntax commands where they are evaluated
+more than once from different directories.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+# This tests GYP's cache of commands, ensuring that the directory a command is
+# run from is part of its cache key. Parallelism may lead to multiple cache
+# lookups failing, resulting in the command being run multiple times by
+# chance, not by GYP's logic. Turn off parallelism to ensure that the logic is
+# being tested.
+test.run_gyp('repeated_multidir/main.gyp', '--no-parallel')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/variables/commands/gyptest-commands-repeated.py b/third_party/python/gyp/test/variables/commands/gyptest-commands-repeated.py
new file mode 100755
index 0000000000..fcf98ee02a
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/gyptest-commands-repeated.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test variable expansion of '<!()' syntax commands where they are evaluated
+more then once..
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+test = TestGyp.TestGyp(format='gypd')
+
+expect = test.read('commands-repeated.gyp.stdout').replace('\r\n', '\n')
+
+test.run_gyp('commands-repeated.gyp',
+ '--debug', 'variables',
+ stdout=expect, ignore_line_numbers=True)
+
+# Verify the commands-repeated.gypd against the checked-in expected contents.
+#
+# Normally, we should canonicalize line endings in the expected
+# contents file setting the Subversion svn:eol-style to native,
+# but that would still fail if multiple systems are sharing a single
+# workspace on a network-mounted file system. Consequently, we
+# massage the Windows line endings ('\r\n') in the output to the
+# checked-in UNIX endings ('\n').
+
+contents = test.read('commands-repeated.gypd').replace('\r\n', '\n')
+expect = test.read('commands-repeated.gypd.golden').replace('\r\n', '\n')
+if not test.match(contents, expect):
+ print("Unexpected contents of `commands-repeated.gypd'")
+ test.diff(expect, contents, 'commands-repeated.gypd ')
+ test.fail_test()
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/variables/commands/gyptest-commands.py b/third_party/python/gyp/test/variables/commands/gyptest-commands.py
new file mode 100755
index 0000000000..3251120ff4
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/gyptest-commands.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test variable expansion of '<!()' syntax commands.
+"""
+
+from __future__ import print_function
+import os
+
+import TestGyp
+
+test = TestGyp.TestGyp(format='gypd')
+
+expect = test.read('commands.gyp.stdout').replace('\r', '')
+
+test.run_gyp('commands.gyp',
+ '--debug', 'variables',
+ stdout=expect, ignore_line_numbers=True)
+
+# Verify the commands.gypd against the checked-in expected contents.
+#
+# Normally, we should canonicalize line endings in the expected
+# contents file setting the Subversion svn:eol-style to native,
+# but that would still fail if multiple systems are sharing a single
+# workspace on a network-mounted file system. Consequently, we
+# massage the Windows line endings ('\r\n') in the output to the
+# checked-in UNIX endings ('\n').
+
+contents = test.read('commands.gypd').replace('\r', '')
+expect = test.read('commands.gypd.golden').replace('\r', '')
+if not test.match(contents, expect):
+ print("Unexpected contents of `commands.gypd'")
+ test.diff(expect, contents, 'commands.gypd ')
+ test.fail_test()
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/variables/commands/repeated_multidir/dir_1/test_1.gyp b/third_party/python/gyp/test/variables/commands/repeated_multidir/dir_1/test_1.gyp
new file mode 100644
index 0000000000..328fc306cd
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/repeated_multidir/dir_1/test_1.gyp
@@ -0,0 +1,13 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'expected_value': 'dir_1',
+ 'target_name': 'target_1',
+ },
+ 'includes': [
+ '../repeated_command_common.gypi',
+ ],
+}
diff --git a/third_party/python/gyp/test/variables/commands/repeated_multidir/dir_2/test_2.gyp b/third_party/python/gyp/test/variables/commands/repeated_multidir/dir_2/test_2.gyp
new file mode 100644
index 0000000000..18e0c62c93
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/repeated_multidir/dir_2/test_2.gyp
@@ -0,0 +1,13 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'expected_value': 'dir_2',
+ 'target_name': 'target_2',
+ },
+ 'includes': [
+ '../repeated_command_common.gypi',
+ ],
+}
diff --git a/third_party/python/gyp/test/variables/commands/repeated_multidir/main.gyp b/third_party/python/gyp/test/variables/commands/repeated_multidir/main.gyp
new file mode 100644
index 0000000000..5beeeb7244
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/repeated_multidir/main.gyp
@@ -0,0 +1,16 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'main',
+ 'type': 'none',
+ 'dependencies': [
+ 'dir_1/test_1.gyp:target_1',
+ 'dir_2/test_2.gyp:target_2',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/variables/commands/repeated_multidir/print_cwd_basename.py b/third_party/python/gyp/test/variables/commands/repeated_multidir/print_cwd_basename.py
new file mode 100755
index 0000000000..9b206bb3d4
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/repeated_multidir/print_cwd_basename.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+import os
+import os.path
+
+print(os.path.basename(os.getcwd()))
diff --git a/third_party/python/gyp/test/variables/commands/repeated_multidir/repeated_command_common.gypi b/third_party/python/gyp/test/variables/commands/repeated_multidir/repeated_command_common.gypi
new file mode 100644
index 0000000000..74366771aa
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/repeated_multidir/repeated_command_common.gypi
@@ -0,0 +1,25 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ # This command will be run from the directories of the .gyp files that
+ # include this .gypi, the subdirectories dir_1 and dir_2, so use a
+ # relative path from those directories to the script.
+ 'observed_value': '<!(python ../print_cwd_basename.py)',
+ },
+ 'targets': [
+ {
+ 'target_name': '<(target_name)',
+ 'type': 'none',
+ 'conditions': [
+ ['observed_value != expected_value', {
+ # Attempt to expand an undefined variable. This triggers a GYP
+ # error.
+ 'assertion': '<(observed_value_must_equal_expected_value)',
+ }],
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/variables/commands/test.py b/third_party/python/gyp/test/variables/commands/test.py
new file mode 100644
index 0000000000..eb64f95383
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/test.py
@@ -0,0 +1,7 @@
+# Copyright (c) 2017 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+print("sample\\path\\foo.cpp")
diff --git a/third_party/python/gyp/test/variables/commands/update_golden b/third_party/python/gyp/test/variables/commands/update_golden
new file mode 100755
index 0000000000..4fcf1eb961
--- /dev/null
+++ b/third_party/python/gyp/test/variables/commands/update_golden
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+python ../../../gyp --debug variables --format gypd --depth . commands.gyp > commands.gyp.stdout
+python ../../../gyp --ignore-environment --debug variables --format gypd --depth . commands.gyp > commands.gyp.ignore-env.stdout
+cp -f commands.gypd commands.gypd.golden
+python ../../../gyp --debug variables --format gypd --depth . commands-repeated.gyp > commands-repeated.gyp.stdout
+cp -f commands-repeated.gypd commands-repeated.gypd.golden
diff --git a/third_party/python/gyp/test/variables/empty/empty.gyp b/third_party/python/gyp/test/variables/empty/empty.gyp
new file mode 100644
index 0000000000..207be06fe7
--- /dev/null
+++ b/third_party/python/gyp/test/variables/empty/empty.gyp
@@ -0,0 +1,13 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': ['empty.gypi'],
+ 'targets': [
+ {
+ 'target_name': 'empty',
+ 'type': 'none',
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/variables/empty/empty.gypi b/third_party/python/gyp/test/variables/empty/empty.gypi
new file mode 100644
index 0000000000..e95031fca5
--- /dev/null
+++ b/third_party/python/gyp/test/variables/empty/empty.gypi
@@ -0,0 +1,9 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ '': '',
+ },
+}
diff --git a/third_party/python/gyp/test/variables/empty/gyptest-empty.py b/third_party/python/gyp/test/variables/empty/gyptest-empty.py
new file mode 100755
index 0000000000..4cbe166fdc
--- /dev/null
+++ b/third_party/python/gyp/test/variables/empty/gyptest-empty.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test that empty variable names don't cause infinite loops.
+"""
+
+import os
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('empty.gyp')
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/variables/filelist/filelist.gyp.stdout b/third_party/python/gyp/test/variables/filelist/filelist.gyp.stdout
new file mode 100644
index 0000000000..595a19c684
--- /dev/null
+++ b/third_party/python/gyp/test/variables/filelist/filelist.gyp.stdout
@@ -0,0 +1,26 @@
+VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'names.txt <@(names', 'is_array': '', 'replace': '<|(names.txt <@(names)', 'type': '<|', 'command_string': None}
+VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'names', 'is_array': '', 'replace': '<@(names)', 'type': '<@', 'command_string': None}
+VARIABLES:input.py:797:ExpandVariables Found output 'names.txt John Jacob Jingleheimer Schmidt', recursing.
+VARIABLES:input.py:797:ExpandVariables Found output 'names.txt', recursing.
+VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'names_listfile', 'is_array': '', 'replace': '<(names_listfile)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:797:ExpandVariables Found output 'names.txt', recursing.
+VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'names_listfile', 'is_array': '', 'replace': '<(names_listfile)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:797:ExpandVariables Found output 'names.txt', recursing.
+VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'cat <(names_listfile', 'is_array': '', 'replace': '<!@(cat <(names_listfile)', 'type': '<!@', 'command_string': None}
+VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'names_listfile', 'is_array': '', 'replace': '<(names_listfile)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:797:ExpandVariables Found output 'cat names.txt', recursing.
+VARIABLES:input.py:676:ExpandVariables Executing command 'cat names.txt' in directory 'src'
+VARIABLES:input.py:797:ExpandVariables Found output ['John', 'Jacob', 'Jingleheimer', 'Schmidt'], recursing.
+VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'sources.txt <@(_sources', 'is_array': '', 'replace': '<|(sources.txt <@(_sources)', 'type': '<|', 'command_string': None}
+VARIABLES:input.py:562:ExpandVariables Matches: {'content': '_sources', 'is_array': '', 'replace': '<@(_sources)', 'type': '<@', 'command_string': None}
+VARIABLES:input.py:797:ExpandVariables Found output 'sources.txt John Jacob Jingleheimer Schmidt', recursing.
+VARIABLES:input.py:797:ExpandVariables Found output 'sources.txt', recursing.
+VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'sources_listfile', 'is_array': '', 'replace': '<(sources_listfile)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:797:ExpandVariables Found output 'sources.txt', recursing.
+VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'sources_listfile', 'is_array': '', 'replace': '<(sources_listfile)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:797:ExpandVariables Found output 'sources.txt', recursing.
+VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'cat <(sources_listfile', 'is_array': '', 'replace': '<!@(cat <(sources_listfile)', 'type': '<!@', 'command_string': None}
+VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'sources_listfile', 'is_array': '', 'replace': '<(sources_listfile)', 'type': '<', 'command_string': None}
+VARIABLES:input.py:797:ExpandVariables Found output 'cat sources.txt', recursing.
+VARIABLES:input.py:676:ExpandVariables Executing command 'cat sources.txt' in directory 'src'
+VARIABLES:input.py:797:ExpandVariables Found output ['John', 'Jacob', 'Jingleheimer', 'Schmidt'], recursing.
diff --git a/third_party/python/gyp/test/variables/filelist/filelist.gypd.golden b/third_party/python/gyp/test/variables/filelist/filelist.gypd.golden
new file mode 100644
index 0000000000..09d9116047
--- /dev/null
+++ b/third_party/python/gyp/test/variables/filelist/filelist.gypd.golden
@@ -0,0 +1,43 @@
+{'_DEPTH': '.',
+ 'included_files': ['filelist.gyp'],
+ 'targets': [{'actions': [{'action': ['python', 'dummy.py', 'names.txt'],
+ 'action_name': 'test_action',
+ 'inputs': ['names.txt',
+ 'John',
+ 'Jacob',
+ 'Jingleheimer',
+ 'Schmidt'],
+ 'outputs': ['dummy_foo']}],
+ 'configurations': {'Default': {}},
+ 'default_configuration': 'Default',
+ 'target_name': 'foo',
+ 'toolset': 'target',
+ 'type': 'none',
+ 'variables': {'names_listfile': 'names.txt'}},
+ {'actions': [{'action': ['python', 'dummy.py', 'sources.txt'],
+ 'action_name': 'test_action',
+ 'inputs': ['sources.txt',
+ 'John',
+ 'Jacob',
+ 'Jingleheimer',
+ 'Schmidt'],
+ 'outputs': ['dummy_foo']}],
+ 'configurations': {'Default': {}},
+ 'default_configuration': 'Default',
+ 'sources': ['John', 'Jacob', 'Jingleheimer', 'Schmidt'],
+ 'sources_excluded': ['Astor', 'Jerome', 'Schultz'],
+ 'target_name': 'bar',
+ 'toolset': 'target',
+ 'type': 'none',
+ 'variables': {'sources_listfile': 'sources.txt'}}],
+ 'variables': {'names': ['John',
+ 'Jacob',
+ 'Astor',
+ 'Jingleheimer',
+ 'Jerome',
+ 'Schmidt',
+ 'Schultz'],
+ 'names!': ['Astor'],
+ 'names/': [['exclude', 'Sch.*'],
+ ['include', '.*dt'],
+ ['exclude', 'Jer.*']]}}
diff --git a/third_party/python/gyp/test/variables/filelist/gyptest-filelist-golden.py b/third_party/python/gyp/test/variables/filelist/gyptest-filelist-golden.py
new file mode 100644
index 0000000000..3ddc6698f8
--- /dev/null
+++ b/third_party/python/gyp/test/variables/filelist/gyptest-filelist-golden.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test variable expansion of '<|(list.txt ...)' syntax commands.
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+
+import TestGyp
+
+test = TestGyp.TestGyp(format='gypd')
+
+expect = test.read('filelist.gyp.stdout')
+if sys.platform == 'win32':
+ expect = expect.replace('/', r'\\').replace('\r\n', '\n')
+
+test.run_gyp('src/filelist.gyp',
+ '--debug', 'variables',
+ stdout=expect, ignore_line_numbers=True)
+
+# Verify the filelist.gypd against the checked-in expected contents.
+#
+# Normally, we should canonicalize line endings in the expected
+# contents file setting the Subversion svn:eol-style to native,
+# but that would still fail if multiple systems are sharing a single
+# workspace on a network-mounted file system. Consequently, we
+# massage the Windows line endings ('\r\n') in the output to the
+# checked-in UNIX endings ('\n').
+
+contents = test.read('src/filelist.gypd').replace(
+ '\r', '').replace('\\\\', '/')
+expect = test.read('filelist.gypd.golden').replace('\r', '')
+if not test.match(contents, expect):
+ print("Unexpected contents of `src/filelist.gypd'")
+ test.diff(expect, contents, 'src/filelist.gypd ')
+ test.fail_test()
+
+contents = test.read('src/names.txt')
+expect = 'John\nJacob\nJingleheimer\nSchmidt\n'
+if not test.match(contents, expect):
+ print("Unexpected contents of `src/names.txt'")
+ test.diff(expect, contents, 'src/names.txt ')
+ test.fail_test()
+
+test.pass_test()
+
diff --git a/third_party/python/gyp/test/variables/filelist/gyptest-filelist.py b/third_party/python/gyp/test/variables/filelist/gyptest-filelist.py
new file mode 100755
index 0000000000..b12084c21c
--- /dev/null
+++ b/third_party/python/gyp/test/variables/filelist/gyptest-filelist.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test variable expansion of '<|(list.txt ...)' syntax commands.
+"""
+
+import os
+import sys
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+CHDIR = 'src'
+test.run_gyp('filelist2.gyp', chdir=CHDIR)
+
+test.build('filelist2.gyp', 'foo', chdir=CHDIR)
+contents = test.read('src/dummy_foo').replace('\r', '')
+expect = 'John\nJacob\nJingleheimer\nSchmidt\n'
+if not test.match(contents, expect):
+ print("Unexpected contents of `src/dummy_foo'")
+ test.diff(expect, contents, 'src/dummy_foo')
+ test.fail_test()
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/variables/filelist/src/dummy.py b/third_party/python/gyp/test/variables/filelist/src/dummy.py
new file mode 100644
index 0000000000..e41fc9f8e4
--- /dev/null
+++ b/third_party/python/gyp/test/variables/filelist/src/dummy.py
@@ -0,0 +1,5 @@
+#!/usr/bin/env python
+
+import sys
+
+open(sys.argv[1], 'w').write(open(sys.argv[2]).read())
diff --git a/third_party/python/gyp/test/variables/filelist/src/filelist.gyp b/third_party/python/gyp/test/variables/filelist/src/filelist.gyp
new file mode 100644
index 0000000000..df48eb3e4a
--- /dev/null
+++ b/third_party/python/gyp/test/variables/filelist/src/filelist.gyp
@@ -0,0 +1,93 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a test to make sure that <|(foo.txt a b c) generates
+# a pre-calculated file list at gyp time and returns foo.txt.
+# This feature is useful to work around limits in the number of arguments that
+# can be passed to rule/action.
+
+{
+ 'variables': {
+ 'names': [
+ 'John',
+ 'Jacob',
+ 'Astor',
+ 'Jingleheimer',
+ 'Jerome',
+ 'Schmidt',
+ 'Schultz',
+ ],
+ 'names!': [
+ 'Astor',
+ ],
+ 'names/': [
+ ['exclude', 'Sch.*'],
+ ['include', '.*dt'],
+ ['exclude', 'Jer.*'],
+ ],
+ },
+ 'targets': [
+ {
+ 'target_name': 'foo',
+ 'type': 'none',
+ 'variables': {
+ 'names_listfile': '<|(names.txt <@(names))',
+ },
+ 'actions': [
+ {
+ 'action_name': 'test_action',
+ 'inputs' : [
+ '<(names_listfile)',
+ '<!@(cat <(names_listfile))',
+ ],
+ 'outputs': [
+ 'dummy_foo',
+ ],
+ 'action': [
+ 'python', 'dummy.py', '<(names_listfile)',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'bar',
+ 'type': 'none',
+ 'sources': [
+ 'John',
+ 'Jacob',
+ 'Astor',
+ 'Jingleheimer',
+ 'Jerome',
+ 'Schmidt',
+ 'Schultz',
+ ],
+ 'sources!': [
+ 'Astor',
+ ],
+ 'sources/': [
+ ['exclude', 'Sch.*'],
+ ['include', '.*dt'],
+ ['exclude', 'Jer.*'],
+ ],
+ 'variables': {
+ 'sources_listfile': '<|(sources.txt <@(_sources))',
+ },
+ 'actions': [
+ {
+ 'action_name': 'test_action',
+ 'inputs' : [
+ '<(sources_listfile)',
+ '<!@(cat <(sources_listfile))',
+ ],
+ 'outputs': [
+ 'dummy_foo',
+ ],
+ 'action': [
+ 'python', 'dummy.py', '<(sources_listfile)',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/variables/filelist/src/filelist2.gyp b/third_party/python/gyp/test/variables/filelist/src/filelist2.gyp
new file mode 100644
index 0000000000..ec215dbb76
--- /dev/null
+++ b/third_party/python/gyp/test/variables/filelist/src/filelist2.gyp
@@ -0,0 +1,40 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a test to make sure that <|(foo.txt a b c) generates
+# a pre-calculated file list at gyp time and returns foo.txt.
+# This feature is useful to work around limits in the number of arguments that
+# can be passed to rule/action.
+
+{
+ 'variables': {
+ 'names': [
+ 'John',
+ 'Jacob',
+ 'Jingleheimer',
+ 'Schmidt',
+ ],
+ },
+ 'targets': [
+ {
+ 'target_name': 'foo',
+ 'type': 'none',
+ 'variables': {
+ 'names_listfile': '<|(names.txt <@(names))',
+ },
+ 'actions': [
+ {
+ 'action_name': 'test_action',
+ 'msvs_cygwin_shell': 0,
+ 'inputs' : [ '<(names_listfile)' ],
+ 'outputs': [ 'dummy_foo' ],
+ 'action': [
+ 'python', 'dummy.py', '<@(_outputs)', '<(names_listfile)',
+ ],
+ },
+ ],
+ },
+ ],
+}
+
diff --git a/third_party/python/gyp/test/variables/filelist/update_golden b/third_party/python/gyp/test/variables/filelist/update_golden
new file mode 100755
index 0000000000..b4d489a342
--- /dev/null
+++ b/third_party/python/gyp/test/variables/filelist/update_golden
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+python ../../../gyp --debug variables --debug general --format gypd --depth . src/filelist.gyp > filelist.gyp.stdout
+cp -f src/filelist.gypd filelist.gypd.golden
diff --git a/third_party/python/gyp/test/variables/latelate/gyptest-latelate.py b/third_party/python/gyp/test/variables/latelate/gyptest-latelate.py
new file mode 100755
index 0000000000..2d77dfec5e
--- /dev/null
+++ b/third_party/python/gyp/test/variables/latelate/gyptest-latelate.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that ^(latelate) style variables work.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('latelate.gyp', chdir='src')
+
+test.relocate('src', 'relocate/src')
+
+test.build('latelate.gyp', test.ALL, chdir='relocate/src')
+
+test.run_built_executable(
+ 'program', chdir='relocate/src', stdout='program.cc\n')
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/variables/latelate/src/latelate.gyp b/third_party/python/gyp/test/variables/latelate/src/latelate.gyp
new file mode 100644
index 0000000000..312f3765b6
--- /dev/null
+++ b/third_party/python/gyp/test/variables/latelate/src/latelate.gyp
@@ -0,0 +1,34 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'target_conditions': [
+ ['has_lame==1', {
+ 'sources/': [
+ ['exclude', 'lame'],
+ ],
+ }],
+ ],
+ },
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'variables': {
+ 'has_lame': 1,
+ },
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)',
+ ],
+ 'defines': [
+ 'FOO="^(_sources)"',
+ ],
+ 'sources': [
+ 'program.cc',
+ 'this_is_lame.cc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/variables/latelate/src/program.cc b/third_party/python/gyp/test/variables/latelate/src/program.cc
new file mode 100644
index 0000000000..97c98ae5b9
--- /dev/null
+++ b/third_party/python/gyp/test/variables/latelate/src/program.cc
@@ -0,0 +1,13 @@
+/*
+ * Copyright (c) 2012 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <stdio.h>
+
+
+int main(void) {
+ printf(FOO "\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/variables/variable-in-path/C1/hello.cc b/third_party/python/gyp/test/variables/variable-in-path/C1/hello.cc
new file mode 100644
index 0000000000..1711567ef5
--- /dev/null
+++ b/third_party/python/gyp/test/variables/variable-in-path/C1/hello.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/variables/variable-in-path/gyptest-variable-in-path.py b/third_party/python/gyp/test/variables/variable-in-path/gyptest-variable-in-path.py
new file mode 100644
index 0000000000..b73a279da7
--- /dev/null
+++ b/third_party/python/gyp/test/variables/variable-in-path/gyptest-variable-in-path.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure <(CONFIGURATION_NAME) variable is correctly expanded.
+"""
+
+import TestGyp
+
+import sys
+
+test = TestGyp.TestGyp()
+test.set_configuration('C1')
+
+test.run_gyp('variable-in-path.gyp')
+test.build('variable-in-path.gyp', 'hello1')
+test.build('variable-in-path.gyp', 'hello2')
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/variables/variable-in-path/variable-in-path.gyp b/third_party/python/gyp/test/variables/variable-in-path/variable-in-path.gyp
new file mode 100644
index 0000000000..908d21eb66
--- /dev/null
+++ b/third_party/python/gyp/test/variables/variable-in-path/variable-in-path.gyp
@@ -0,0 +1,31 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello1',
+ 'type': 'executable',
+ 'sources': [
+ '<(CONFIGURATION_NAME)/hello.cc',
+ ],
+ },
+ {
+ 'target_name': 'hello2',
+ 'type': 'executable',
+ 'sources': [
+ './<(CONFIGURATION_NAME)/hello.cc',
+ ],
+ },
+ ],
+ 'target_defaults': {
+ 'default_configuration': 'C1',
+ 'configurations': {
+ 'C1': {
+ },
+ 'C2': {
+ },
+ },
+ },
+}
diff --git a/third_party/python/gyp/test/win/asm-files/asm-files.gyp b/third_party/python/gyp/test/win/asm-files/asm-files.gyp
new file mode 100644
index 0000000000..b1f132ceea
--- /dev/null
+++ b/third_party/python/gyp/test/win/asm-files/asm-files.gyp
@@ -0,0 +1,17 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'sources_with_asm',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.cc',
+ 'b.s',
+ 'c.S',
+ ],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/asm-files/b.s b/third_party/python/gyp/test/win/asm-files/b.s
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/win/asm-files/b.s
diff --git a/third_party/python/gyp/test/win/asm-files/c.S b/third_party/python/gyp/test/win/asm-files/c.S
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/win/asm-files/c.S
diff --git a/third_party/python/gyp/test/win/asm-files/hello.cc b/third_party/python/gyp/test/win/asm-files/hello.cc
new file mode 100644
index 0000000000..1711567ef5
--- /dev/null
+++ b/third_party/python/gyp/test/win/asm-files/hello.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/batch-file-action/batch-file-action.gyp b/third_party/python/gyp/test/win/batch-file-action/batch-file-action.gyp
new file mode 100644
index 0000000000..e4db9af9d3
--- /dev/null
+++ b/third_party/python/gyp/test/win/batch-file-action/batch-file-action.gyp
@@ -0,0 +1,21 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_batch',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'copy_to_output',
+ 'inputs': ['infile'],
+ 'outputs': ['outfile'],
+ 'action': ['somecmd.bat', 'infile', 'outfile'],
+ 'msvs_cygwin_shell': 0,
+ }
+ ],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/batch-file-action/infile b/third_party/python/gyp/test/win/batch-file-action/infile
new file mode 100644
index 0000000000..3f9177e45e
--- /dev/null
+++ b/third_party/python/gyp/test/win/batch-file-action/infile
@@ -0,0 +1 @@
+input
diff --git a/third_party/python/gyp/test/win/batch-file-action/somecmd.bat b/third_party/python/gyp/test/win/batch-file-action/somecmd.bat
new file mode 100644
index 0000000000..d487753743
--- /dev/null
+++ b/third_party/python/gyp/test/win/batch-file-action/somecmd.bat
@@ -0,0 +1,5 @@
+@echo off
+:: The redirs to nul are important. %2 can end up being an unterminated "'d
+:: string, so the remainder of the command line becomes the target file name,
+:: which in turn fails because it's a filename containing >, nul, etc.
+copy /y %1 %2 >nul 2>nul
diff --git a/third_party/python/gyp/test/win/command-quote/a.S b/third_party/python/gyp/test/win/command-quote/a.S
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/win/command-quote/a.S
diff --git a/third_party/python/gyp/test/win/command-quote/bat with spaces.bat b/third_party/python/gyp/test/win/command-quote/bat with spaces.bat
new file mode 100644
index 0000000000..dc3508f9a9
--- /dev/null
+++ b/third_party/python/gyp/test/win/command-quote/bat with spaces.bat
@@ -0,0 +1,7 @@
+@echo off
+
+:: Copyright (c) 2012 Google Inc. All rights reserved.
+:: Use of this source code is governed by a BSD-style license that can be
+:: found in the LICENSE file.
+
+copy %1 %2
diff --git a/third_party/python/gyp/test/win/command-quote/command-quote.gyp b/third_party/python/gyp/test/win/command-quote/command-quote.gyp
new file mode 100644
index 0000000000..faf724674f
--- /dev/null
+++ b/third_party/python/gyp/test/win/command-quote/command-quote.gyp
@@ -0,0 +1,79 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'msvs_cygwin_dirs': ['../../../../../<(DEPTH)/third_party/cygwin'],
+ },
+ 'targets': [
+ {
+ 'target_name': 'test_batch',
+ 'type': 'none',
+ 'rules': [
+ {
+ 'rule_name': 'build_with_batch',
+ 'msvs_cygwin_shell': 0,
+ 'extension': 'S',
+ 'outputs': ['output.obj'],
+ 'action': ['call go.bat', '<(RULE_INPUT_PATH)', 'output.obj'],
+ },],
+ 'sources': ['a.S'],
+ },
+ {
+ 'target_name': 'test_call_separate',
+ 'type': 'none',
+ 'rules': [
+ {
+ 'rule_name': 'build_with_batch2',
+ 'msvs_cygwin_shell': 0,
+ 'extension': 'S',
+ 'outputs': ['output2.obj'],
+ 'action': ['call', 'go.bat', '<(RULE_INPUT_PATH)', 'output2.obj'],
+ },],
+ 'sources': ['a.S'],
+ },
+ {
+ 'target_name': 'test_with_spaces',
+ 'type': 'none',
+ 'rules': [
+ {
+ 'rule_name': 'build_with_batch3',
+ 'msvs_cygwin_shell': 0,
+ 'extension': 'S',
+ 'outputs': ['output3.obj'],
+ 'action': ['bat with spaces.bat', '<(RULE_INPUT_PATH)', 'output3.obj'],
+ },],
+ 'sources': ['a.S'],
+ },
+ {
+ 'target_name': 'test_with_double_quotes',
+ 'type': 'none',
+ 'rules': [
+ {
+ 'rule_name': 'build_with_batch3',
+ 'msvs_cygwin_shell': 1,
+ 'extension': 'S',
+ 'outputs': ['output4.obj'],
+ 'arguments': ['-v'],
+ 'action': ['python', '-c', 'import shutil; '
+ 'shutil.copy("<(RULE_INPUT_PATH)", "output4.obj")'],
+ },],
+ 'sources': ['a.S'],
+ },
+ {
+ 'target_name': 'test_with_single_quotes',
+ 'type': 'none',
+ 'rules': [
+ {
+ 'rule_name': 'build_with_batch3',
+ 'msvs_cygwin_shell': 1,
+ 'extension': 'S',
+ 'outputs': ['output5.obj'],
+ 'action': ['python', '-c', "import shutil; "
+ "shutil.copy('<(RULE_INPUT_PATH)', 'output5.obj')"],
+ },],
+ 'sources': ['a.S'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/command-quote/go.bat b/third_party/python/gyp/test/win/command-quote/go.bat
new file mode 100644
index 0000000000..dc3508f9a9
--- /dev/null
+++ b/third_party/python/gyp/test/win/command-quote/go.bat
@@ -0,0 +1,7 @@
+@echo off
+
+:: Copyright (c) 2012 Google Inc. All rights reserved.
+:: Use of this source code is governed by a BSD-style license that can be
+:: found in the LICENSE file.
+
+copy %1 %2
diff --git a/third_party/python/gyp/test/win/command-quote/subdir/and/another/in-subdir.gyp b/third_party/python/gyp/test/win/command-quote/subdir/and/another/in-subdir.gyp
new file mode 100644
index 0000000000..3dff4c40b9
--- /dev/null
+++ b/third_party/python/gyp/test/win/command-quote/subdir/and/another/in-subdir.gyp
@@ -0,0 +1,27 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_batch_depth',
+ 'type': 'none',
+ 'variables': {
+ # Taken from native_client/build/common.gypi. Seems unintentional (a
+ # string in a 1 element list)? But since it works on other generators,
+ # I guess it should work here too.
+ 'filepath': [ 'call <(DEPTH)/../../../go.bat' ],
+ },
+ 'rules': [
+ {
+ 'rule_name': 'build_with_batch4',
+ 'msvs_cygwin_shell': 0,
+ 'extension': 'S',
+ 'outputs': ['output4.obj'],
+ 'action': ['<@(filepath)', '<(RULE_INPUT_PATH)', 'output4.obj'],
+ },],
+ 'sources': ['<(DEPTH)\\..\\..\\..\\a.S'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/additional-include-dirs.cc b/third_party/python/gyp/test/win/compiler-flags/additional-include-dirs.cc
new file mode 100644
index 0000000000..f1e11dd12d
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/additional-include-dirs.cc
@@ -0,0 +1,10 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// No path qualification to test compiler include dir specification.
+#include "header.h"
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/additional-include-dirs.gyp b/third_party/python/gyp/test/win/compiler-flags/additional-include-dirs.gyp
new file mode 100644
index 0000000000..42c7e849f6
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/additional-include-dirs.gyp
@@ -0,0 +1,20 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_incs',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalIncludeDirectories': [
+ 'subdir',
+ ],
+ }
+ },
+ 'sources': ['additional-include-dirs.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/additional-options.cc b/third_party/python/gyp/test/win/compiler-flags/additional-options.cc
new file mode 100644
index 0000000000..c79572bafa
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/additional-options.cc
@@ -0,0 +1,10 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ // Generate a warning that will appear at level 4, but not level 1
+ // (truncation and unused local).
+ char c = 123456;
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/additional-options.gyp b/third_party/python/gyp/test/win/compiler-flags/additional-options.gyp
new file mode 100644
index 0000000000..6a365a2062
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/additional-options.gyp
@@ -0,0 +1,31 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_additional_none',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarningLevel': '4',
+ 'WarnAsError': 'true',
+ }
+ },
+ 'sources': ['additional-options.cc'],
+ },
+ {
+ 'target_name': 'test_additional_one',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarningLevel': '4',
+ 'WarnAsError': 'true',
+ 'AdditionalOptions': [ '/W1' ],
+ }
+ },
+ 'sources': ['additional-options.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/analysis.gyp b/third_party/python/gyp/test/win/compiler-flags/analysis.gyp
new file mode 100644
index 0000000000..97e942258f
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/analysis.gyp
@@ -0,0 +1,40 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_analysis_on',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnablePREfast': 'true',
+ 'WarnAsError': 'true',
+ },
+ },
+ 'sources': ['uninit.cc'],
+ },
+ {
+ 'target_name': 'test_analysis_off',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnablePREfast': 'false',
+ 'WarnAsError': 'true',
+ },
+ },
+ 'sources': ['uninit.cc'],
+ },
+ {
+ 'target_name': 'test_analysis_unspec',
+ 'type': 'executable',
+ 'sources': ['uninit.cc'],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarnAsError': 'true',
+ },
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/buffer-security-check.gyp b/third_party/python/gyp/test/win/compiler-flags/buffer-security-check.gyp
new file mode 100644
index 0000000000..cc5a12b953
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/buffer-security-check.gyp
@@ -0,0 +1,51 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ # Turn debug information on so that we can see the name of the buffer
+ # security check cookie in the disassembly.
+ {
+ 'target_name': 'test_bsc_unset',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ },
+ },
+ 'sources': ['buffer-security.cc'],
+ },
+ {
+ 'target_name': 'test_bsc_off',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'BufferSecurityCheck': 'false',
+ 'DebugInformationFormat': '3',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ },
+ },
+ 'sources': ['buffer-security.cc'],
+ },
+ {
+ 'target_name': 'test_bsc_on',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'BufferSecurityCheck': 'true',
+ 'DebugInformationFormat': '3',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ },
+ },
+ 'sources': ['buffer-security.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/buffer-security.cc b/third_party/python/gyp/test/win/compiler-flags/buffer-security.cc
new file mode 100644
index 0000000000..e8a48a2a67
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/buffer-security.cc
@@ -0,0 +1,12 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <malloc.h>
+#include <string.h>
+
+int main() {
+ char* stuff = reinterpret_cast<char*>(_alloca(256));
+ strcpy(stuff, "blah");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/calling-convention-cdecl.def b/third_party/python/gyp/test/win/compiler-flags/calling-convention-cdecl.def
new file mode 100644
index 0000000000..dc1dba055a
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/calling-convention-cdecl.def
@@ -0,0 +1,6 @@
+; Copyright (c) 2014 Google Inc. All rights reserved.
+; Use of this source code is governed by a BSD-style license that can be
+; found in the LICENSE file.
+
+EXPORTS
+ foo
diff --git a/third_party/python/gyp/test/win/compiler-flags/calling-convention-fastcall.def b/third_party/python/gyp/test/win/compiler-flags/calling-convention-fastcall.def
new file mode 100644
index 0000000000..2c61afe208
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/calling-convention-fastcall.def
@@ -0,0 +1,6 @@
+; Copyright (c) 2014 Google Inc. All rights reserved.
+; Use of this source code is governed by a BSD-style license that can be
+; found in the LICENSE file.
+
+EXPORTS
+ @foo@0
diff --git a/third_party/python/gyp/test/win/compiler-flags/calling-convention-stdcall.def b/third_party/python/gyp/test/win/compiler-flags/calling-convention-stdcall.def
new file mode 100644
index 0000000000..6c7e05e9ea
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/calling-convention-stdcall.def
@@ -0,0 +1,6 @@
+; Copyright (c) 2014 Google Inc. All rights reserved.
+; Use of this source code is governed by a BSD-style license that can be
+; found in the LICENSE file.
+
+EXPORTS
+ _foo@0
diff --git a/third_party/python/gyp/test/win/compiler-flags/calling-convention-vectorcall.def b/third_party/python/gyp/test/win/compiler-flags/calling-convention-vectorcall.def
new file mode 100644
index 0000000000..4ef119c3e3
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/calling-convention-vectorcall.def
@@ -0,0 +1,6 @@
+; Copyright (c) 2014 Google Inc. All rights reserved.
+; Use of this source code is governed by a BSD-style license that can be
+; found in the LICENSE file.
+
+EXPORTS
+ foo@@0
diff --git a/third_party/python/gyp/test/win/compiler-flags/calling-convention.cc b/third_party/python/gyp/test/win/compiler-flags/calling-convention.cc
new file mode 100644
index 0000000000..0d78a0cc05
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/calling-convention.cc
@@ -0,0 +1,6 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+extern "C" void foo() {
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/calling-convention.gyp b/third_party/python/gyp/test/win/compiler-flags/calling-convention.gyp
new file mode 100644
index 0000000000..5069c552bc
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/calling-convention.gyp
@@ -0,0 +1,66 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_cdecl',
+ 'type': 'loadable_module',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'CallingConvention': 0,
+ },
+ },
+ 'sources': [
+ 'calling-convention.cc',
+ 'calling-convention-cdecl.def',
+ ],
+ },
+ {
+ 'target_name': 'test_fastcall',
+ 'type': 'loadable_module',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'CallingConvention': 1,
+ },
+ },
+ 'sources': [
+ 'calling-convention.cc',
+ 'calling-convention-fastcall.def',
+ ],
+ },
+ {
+ 'target_name': 'test_stdcall',
+ 'type': 'loadable_module',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'CallingConvention': 2,
+ },
+ },
+ 'sources': [
+ 'calling-convention.cc',
+ 'calling-convention-stdcall.def',
+ ],
+ },
+ ],
+ 'conditions': [
+ ['MSVS_VERSION[0:4]>="2013"', {
+ 'targets': [
+ {
+ 'target_name': 'test_vectorcall',
+ 'type': 'loadable_module',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'CallingConvention': 3,
+ },
+ },
+ 'sources': [
+ 'calling-convention.cc',
+ 'calling-convention-vectorcall.def',
+ ],
+ },
+ ],
+ }],
+ ],
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/character-set-mbcs.cc b/third_party/python/gyp/test/win/compiler-flags/character-set-mbcs.cc
new file mode 100644
index 0000000000..3286304730
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/character-set-mbcs.cc
@@ -0,0 +1,11 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef _MBCS
+#error
+#endif
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/character-set-unicode.cc b/third_party/python/gyp/test/win/compiler-flags/character-set-unicode.cc
new file mode 100644
index 0000000000..32e69724a5
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/character-set-unicode.cc
@@ -0,0 +1,15 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef _UNICODE
+#error
+#endif
+
+#ifndef UNICODE
+#error
+#endif
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/character-set.gyp b/third_party/python/gyp/test/win/compiler-flags/character-set.gyp
new file mode 100644
index 0000000000..3dc45557d9
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/character-set.gyp
@@ -0,0 +1,35 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_cs_notset',
+ 'product_name': 'test_cs_notset',
+ 'type': 'executable',
+ 'msvs_configuration_attributes': {
+ 'CharacterSet': '0'
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_cs_unicode',
+ 'product_name': 'test_cs_unicode',
+ 'type': 'executable',
+ 'msvs_configuration_attributes': {
+ 'CharacterSet': '1'
+ },
+ 'sources': ['character-set-unicode.cc'],
+ },
+ {
+ 'target_name': 'test_cs_mbcs',
+ 'product_name': 'test_cs_mbcs',
+ 'type': 'executable',
+ 'msvs_configuration_attributes': {
+ 'CharacterSet': '2'
+ },
+ 'sources': ['character-set-mbcs.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/compile-as-managed.cc b/third_party/python/gyp/test/win/compiler-flags/compile-as-managed.cc
new file mode 100644
index 0000000000..a29c71ee1d
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/compile-as-managed.cc
@@ -0,0 +1,9 @@
+// Copyright (c) 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <vcclr.h>
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/compile-as-managed.gyp b/third_party/python/gyp/test/win/compiler-flags/compile-as-managed.gyp
new file mode 100644
index 0000000000..3bacbbc135
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/compile-as-managed.gyp
@@ -0,0 +1,29 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test-compile-as-managed',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'CompileAsManaged': 'true',
+ 'ExceptionHandling': '0' # /clr is incompatible with /EHs
+ }
+ },
+ 'sources': ['compile-as-managed.cc'],
+ },
+ {
+ 'target_name': 'test-compile-as-unmanaged',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'CompileAsManaged': 'false',
+ }
+ },
+ 'sources': ['compile-as-managed.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/compile-as-winrt.cc b/third_party/python/gyp/test/win/compiler-flags/compile-as-winrt.cc
new file mode 100644
index 0000000000..da9954f8fb
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/compile-as-winrt.cc
@@ -0,0 +1,12 @@
+// Copyright (c) 2016 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+using namespace Platform;
+
+int main() {
+ wchar_t msg[] = L"Test";
+ String^ str1 = ref new String(msg);
+ auto str2 = String::Concat(str1, " Concat");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/compile-as-winrt.gyp b/third_party/python/gyp/test/win/compiler-flags/compile-as-winrt.gyp
new file mode 100644
index 0000000000..8978e5059d
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/compile-as-winrt.gyp
@@ -0,0 +1,20 @@
+# Copyright (c) 2016 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test-compile-as-winrt',
+ 'type': 'executable',
+ 'msvs_windows_sdk_version': 'v10.0',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalUsingDirectories': ['$(VCInstallDir)vcpackages;$(WindowsSdkDir)UnionMetadata;%(AdditionalUsingDirectories)'],
+ 'CompileAsWinRT': 'true'
+ }
+ },
+ 'sources': ['compile-as-winrt.cc']
+ }
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/debug-format.gyp b/third_party/python/gyp/test/win/compiler-flags/debug-format.gyp
new file mode 100644
index 0000000000..daaed23ff1
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/debug-format.gyp
@@ -0,0 +1,48 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test-debug-format-off',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '0'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test-debug-format-oldstyle',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '1'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test-debug-format-pdb',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test-debug-format-editcontinue',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '4'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/default-char-is-unsigned.cc b/third_party/python/gyp/test/win/compiler-flags/default-char-is-unsigned.cc
new file mode 100644
index 0000000000..beeca2aa15
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/default-char-is-unsigned.cc
@@ -0,0 +1,15 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+template <bool>
+struct CompileAssert {
+};
+
+#define COMPILE_ASSERT(expr, msg) \
+ typedef CompileAssert<(bool(expr))> msg[bool(expr) ? 1 : -1]
+
+int main() {
+ COMPILE_ASSERT(char(-1) > 0, default_char_is_unsigned);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/default-char-is-unsigned.gyp b/third_party/python/gyp/test/win/compiler-flags/default-char-is-unsigned.gyp
new file mode 100644
index 0000000000..941e5810dd
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/default-char-is-unsigned.gyp
@@ -0,0 +1,20 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_default_char_is_unsigned',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DefaultCharIsUnsigned': 'true',
+ },
+ },
+ 'sources': [
+ 'default-char-is-unsigned.cc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/disable-specific-warnings.cc b/third_party/python/gyp/test/win/compiler-flags/disable-specific-warnings.cc
new file mode 100644
index 0000000000..d312f5f481
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/disable-specific-warnings.cc
@@ -0,0 +1,9 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ // Causes level 1 warning (C4700)
+ int i;
+ return i;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/disable-specific-warnings.gyp b/third_party/python/gyp/test/win/compiler-flags/disable-specific-warnings.gyp
new file mode 100644
index 0000000000..d81d694c62
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/disable-specific-warnings.gyp
@@ -0,0 +1,29 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_disable_specific_warnings_set',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarnAsError': 'true',
+ 'DisableSpecificWarnings': ['4700']
+ }
+ },
+ 'sources': ['disable-specific-warnings.cc']
+ },
+ {
+ 'target_name': 'test_disable_specific_warnings_unset',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarnAsError': 'true'
+ }
+ },
+ 'sources': ['disable-specific-warnings.cc']
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/enable-enhanced-instruction-set.cc b/third_party/python/gyp/test/win/compiler-flags/enable-enhanced-instruction-set.cc
new file mode 100644
index 0000000000..432ef54eda
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/enable-enhanced-instruction-set.cc
@@ -0,0 +1,28 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+static const char* GetArchOption() {
+#if _M_IX86_FP == 0
+ return "IA32";
+#elif _M_IX86_FP == 1
+ return "SSE";
+#elif _M_IX86_FP == 2
+# if defined(__AVX2__)
+ return "AVX2";
+# elif defined(__AVX__)
+ return "AVX";
+# else
+ return "SSE2";
+# endif
+#else
+ return "UNSUPPORTED OPTION";
+#endif
+}
+
+int main() {
+ printf("/arch:%s\n", GetArchOption());
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/enable-enhanced-instruction-set.gyp b/third_party/python/gyp/test/win/compiler-flags/enable-enhanced-instruction-set.gyp
new file mode 100644
index 0000000000..9c49edc7ff
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/enable-enhanced-instruction-set.gyp
@@ -0,0 +1,68 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'sse_extensions',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnableEnhancedInstructionSet': '1', # StreamingSIMDExtensions
+ }
+ },
+ 'sources': ['enable-enhanced-instruction-set.cc'],
+ },
+ {
+ 'target_name': 'sse2_extensions',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnableEnhancedInstructionSet': '2', # StreamingSIMDExtensions2
+ }
+ },
+ 'sources': ['enable-enhanced-instruction-set.cc'],
+ },
+ ],
+ 'conditions': [
+ ['MSVS_VERSION[0:4]>"2010"', {
+ 'targets': [
+ {
+ 'target_name': 'avx_extensions',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnableEnhancedInstructionSet': '3', # AdvancedVectorExtensions
+ }
+ },
+ 'sources': ['enable-enhanced-instruction-set.cc'],
+ },
+ {
+ 'target_name': 'no_extensions',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnableEnhancedInstructionSet': '4', # NoExtensions
+ }
+ },
+ 'sources': ['enable-enhanced-instruction-set.cc'],
+ },
+ ],
+ }],
+ ['MSVS_VERSION[0:4]>="2013"', {
+ 'targets': [
+ {
+ 'target_name': 'avx2_extensions',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnableEnhancedInstructionSet': '5', # AdvancedVectorExtensions2
+ }
+ },
+ 'sources': ['enable-enhanced-instruction-set.cc'],
+ },
+ ],
+ }],
+ ],
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/exception-handling-on.cc b/third_party/python/gyp/test/win/compiler-flags/exception-handling-on.cc
new file mode 100644
index 0000000000..5d9a3af77d
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/exception-handling-on.cc
@@ -0,0 +1,24 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <excpt.h>
+#include <stdlib.h>
+
+void fail() {
+ try {
+ int i = 0, j = 1;
+ j /= i;
+ } catch(...) {
+ exit(1);
+ }
+}
+
+int main() {
+ __try {
+ fail();
+ } __except(EXCEPTION_EXECUTE_HANDLER) {
+ return 2;
+ }
+ return 3;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/exception-handling.gyp b/third_party/python/gyp/test/win/compiler-flags/exception-handling.gyp
new file mode 100644
index 0000000000..c266768dda
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/exception-handling.gyp
@@ -0,0 +1,46 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ # Optimization disabled so that the exception-causing code is not removed
+ # (divide by zero was getting optimized away in VS2010).
+ {
+ 'target_name': 'test_eh_off',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'ExceptionHandling': '0',
+ 'WarnAsError': 'true',
+ 'Optimization': '0',
+ }
+ },
+ 'sources': ['exception-handling-on.cc'],
+ },
+ {
+ 'target_name': 'test_eh_s',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'ExceptionHandling': '1',
+ 'WarnAsError': 'true',
+ 'Optimization': '0',
+ }
+ },
+ 'sources': ['exception-handling-on.cc'],
+ },
+ {
+ 'target_name': 'test_eh_a',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'ExceptionHandling': '2',
+ 'WarnAsError': 'true',
+ 'Optimization': '0',
+ }
+ },
+ 'sources': ['exception-handling-on.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/floating-point-model-fast.cc b/third_party/python/gyp/test/win/compiler-flags/floating-point-model-fast.cc
new file mode 100644
index 0000000000..9d22152f5e
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/floating-point-model-fast.cc
@@ -0,0 +1,19 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifdef _M_FP_PRECISE
+#error
+#endif
+
+#ifdef _M_FP_STRICT
+#error
+#endif
+
+#ifndef _M_FP_FAST
+#error
+#endif
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/floating-point-model-precise.cc b/third_party/python/gyp/test/win/compiler-flags/floating-point-model-precise.cc
new file mode 100644
index 0000000000..1191a74ed1
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/floating-point-model-precise.cc
@@ -0,0 +1,19 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef _M_FP_PRECISE
+#error
+#endif
+
+#ifdef _M_FP_STRICT
+#error
+#endif
+
+#ifdef _M_FP_FAST
+#error
+#endif
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/floating-point-model-strict.cc b/third_party/python/gyp/test/win/compiler-flags/floating-point-model-strict.cc
new file mode 100644
index 0000000000..1ffde36e0c
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/floating-point-model-strict.cc
@@ -0,0 +1,19 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifdef _M_FP_PRECISE
+#error
+#endif
+
+#ifndef _M_FP_STRICT
+#error
+#endif
+
+#ifdef _M_FP_FAST
+#error
+#endif
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/floating-point-model.gyp b/third_party/python/gyp/test/win/compiler-flags/floating-point-model.gyp
new file mode 100644
index 0000000000..857b275a8f
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/floating-point-model.gyp
@@ -0,0 +1,43 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test-floating-point-model-default',
+ 'type': 'executable',
+ 'sources': ['floating-point-model-precise.cc'],
+ },
+ {
+ 'target_name': 'test-floating-point-model-precise',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'FloatingPointModel': '0'
+ }
+ },
+ 'sources': ['floating-point-model-precise.cc'],
+ },
+ {
+ 'target_name': 'test-floating-point-model-strict',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'FloatingPointModel': '1'
+ }
+ },
+ 'sources': ['floating-point-model-strict.cc'],
+ },
+ {
+ 'target_name': 'test-floating-point-model-fast',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'FloatingPointModel': '2'
+ }
+ },
+ 'sources': ['floating-point-model-fast.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/force-include-files-with-precompiled.cc b/third_party/python/gyp/test/win/compiler-flags/force-include-files-with-precompiled.cc
new file mode 100644
index 0000000000..85cb0f32a6
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/force-include-files-with-precompiled.cc
@@ -0,0 +1,10 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+int main() {
+ std::string s;
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/force-include-files.cc b/third_party/python/gyp/test/win/compiler-flags/force-include-files.cc
new file mode 100644
index 0000000000..4a93de55d4
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/force-include-files.cc
@@ -0,0 +1,8 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ std::list<std::vector<std::string> > l;
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/force-include-files.gyp b/third_party/python/gyp/test/win/compiler-flags/force-include-files.gyp
new file mode 100644
index 0000000000..2031546cc5
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/force-include-files.gyp
@@ -0,0 +1,36 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_force_include_files',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'ForcedIncludeFiles': ['string', 'vector', 'list'],
+ },
+ },
+ 'sources': [
+ 'force-include-files.cc',
+ ],
+ },
+ {
+ 'target_name': 'test_force_include_with_precompiled',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'ForcedIncludeFiles': ['string'],
+ },
+ },
+ 'msvs_precompiled_header': 'stdio.h',
+ 'msvs_precompiled_source': 'precomp.cc',
+ 'msvs_disabled_warnings': [ 4530, ],
+ 'sources': [
+ 'force-include-files-with-precompiled.cc',
+ 'precomp.cc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/function-level-linking.cc b/third_party/python/gyp/test/win/compiler-flags/function-level-linking.cc
new file mode 100644
index 0000000000..4952272817
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/function-level-linking.cc
@@ -0,0 +1,11 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int comdat_function() {
+ return 1;
+}
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/function-level-linking.gyp b/third_party/python/gyp/test/win/compiler-flags/function-level-linking.gyp
new file mode 100644
index 0000000000..5858586a24
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/function-level-linking.gyp
@@ -0,0 +1,28 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_fll_off',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnableFunctionLevelLinking': 'false'
+ }
+ },
+ 'sources': ['function-level-linking.cc'],
+ },
+ {
+ 'target_name': 'test_fll_on',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnableFunctionLevelLinking': 'true',
+ }
+ },
+ 'sources': ['function-level-linking.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/hello.cc b/third_party/python/gyp/test/win/compiler-flags/hello.cc
new file mode 100644
index 0000000000..1711567ef5
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/hello.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/optimizations.gyp b/third_party/python/gyp/test/win/compiler-flags/optimizations.gyp
new file mode 100644
index 0000000000..e63096f0f7
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/optimizations.gyp
@@ -0,0 +1,207 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_opt_off',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'Optimization': '0'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_lev_size',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'Optimization': '1'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_lev_speed',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'Optimization': '2'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_lev_max',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'Optimization': '3'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_unset',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_fpo',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'OmitFramePointers': 'true'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_fpo_off',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'OmitFramePointers': 'false'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_intrinsic',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnableIntrinsicFunctions': 'true'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_intrinsic_off',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnableIntrinsicFunctions': 'false'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_inline_off',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'InlineFunctionExpansion': '0'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_inline_manual',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'InlineFunctionExpansion': '1'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_inline_auto',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'InlineFunctionExpansion': '2'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_neither',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'FavorSizeOrSpeed': '0'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_speed',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'FavorSizeOrSpeed': '1'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_size',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'FavorSizeOrSpeed': '2'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_wpo',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WholeProgramOptimization': 'true'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_sp',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'StringPooling': 'true'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_sp_off',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'StringPooling': 'false'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_fso',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnableFiberSafeOptimizations': 'true'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_opt_fso_off',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnableFiberSafeOptimizations': 'false'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/pdbname-override.gyp b/third_party/python/gyp/test/win/compiler-flags/pdbname-override.gyp
new file mode 100644
index 0000000000..dad20e01fd
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/pdbname-override.gyp
@@ -0,0 +1,26 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_pdbname',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.cc',
+ 'pdbname.cc',
+ ],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3',
+ 'ProgramDataBaseFileName': '<(PRODUCT_DIR)/compiler_generated.pdb',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'ProgramDatabaseFile': '<(PRODUCT_DIR)/linker_generated.pdb',
+ },
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/pdbname.cc b/third_party/python/gyp/test/win/compiler-flags/pdbname.cc
new file mode 100644
index 0000000000..0fe05d5afb
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/pdbname.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int some_function() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/pdbname.gyp b/third_party/python/gyp/test/win/compiler-flags/pdbname.gyp
new file mode 100644
index 0000000000..8fcf754727
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/pdbname.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_pdbname',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.cc',
+ 'pdbname.cc',
+ ],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ },
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/precomp.cc b/third_party/python/gyp/test/win/compiler-flags/precomp.cc
new file mode 100644
index 0000000000..d16bac890f
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/precomp.cc
@@ -0,0 +1,6 @@
+// Copyright 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <windows.h>
+#include <stdio.h>
diff --git a/third_party/python/gyp/test/win/compiler-flags/rtti-on.cc b/third_party/python/gyp/test/win/compiler-flags/rtti-on.cc
new file mode 100644
index 0000000000..2d3ad03ae4
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/rtti-on.cc
@@ -0,0 +1,11 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef _CPPRTTI
+#error
+#endif
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/rtti.gyp b/third_party/python/gyp/test/win/compiler-flags/rtti.gyp
new file mode 100644
index 0000000000..704cd58f5a
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/rtti.gyp
@@ -0,0 +1,37 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_rtti_off',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'RuntimeTypeInfo': 'false',
+ 'WarnAsError': 'true'
+ }
+ },
+ 'sources': ['rtti-on.cc'],
+ },
+ {
+ 'target_name': 'test_rtti_on',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'RuntimeTypeInfo': 'true',
+ 'WarnAsError': 'true'
+ }
+ },
+ 'sources': ['rtti-on.cc'],
+ },
+ {
+ 'target_name': 'test_rtti_unset',
+ 'type': 'executable',
+ 'msvs_settings': {
+ },
+ 'sources': ['rtti-on.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/runtime-checks.cc b/third_party/python/gyp/test/win/compiler-flags/runtime-checks.cc
new file mode 100644
index 0000000000..fdb811da87
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/runtime-checks.cc
@@ -0,0 +1,11 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef __MSVC_RUNTIME_CHECKS
+#error
+#endif
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/runtime-checks.gyp b/third_party/python/gyp/test/win/compiler-flags/runtime-checks.gyp
new file mode 100644
index 0000000000..8ea3092057
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/runtime-checks.gyp
@@ -0,0 +1,29 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_brc_none',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'Optimization': '0',
+ }
+ },
+ 'sources': ['runtime-checks.cc'],
+ },
+ {
+ 'target_name': 'test_brc_1',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'Optimization': '0',
+ 'BasicRuntimeChecks': '3'
+ }
+ },
+ 'sources': ['runtime-checks.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/runtime-library-md.cc b/third_party/python/gyp/test/win/compiler-flags/runtime-library-md.cc
new file mode 100644
index 0000000000..87c83021d4
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/runtime-library-md.cc
@@ -0,0 +1,19 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef _MT
+#error
+#endif
+
+#ifdef _DEBUG
+#error
+#endif
+
+#ifndef _DLL
+#error
+#endif
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/runtime-library-mdd.cc b/third_party/python/gyp/test/win/compiler-flags/runtime-library-mdd.cc
new file mode 100644
index 0000000000..9f175e493e
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/runtime-library-mdd.cc
@@ -0,0 +1,19 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef _MT
+#error
+#endif
+
+#ifndef _DEBUG
+#error
+#endif
+
+#ifndef _DLL
+#error
+#endif
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/runtime-library-mt.cc b/third_party/python/gyp/test/win/compiler-flags/runtime-library-mt.cc
new file mode 100644
index 0000000000..27e62b63db
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/runtime-library-mt.cc
@@ -0,0 +1,19 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef _MT
+#error
+#endif
+
+#ifdef _DEBUG
+#error
+#endif
+
+#ifdef _DLL
+#error
+#endif
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/runtime-library-mtd.cc b/third_party/python/gyp/test/win/compiler-flags/runtime-library-mtd.cc
new file mode 100644
index 0000000000..a9921db9e2
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/runtime-library-mtd.cc
@@ -0,0 +1,19 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef _MT
+#error
+#endif
+
+#ifndef _DEBUG
+#error
+#endif
+
+#ifdef _DLL
+#error
+#endif
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/runtime-library.gyp b/third_party/python/gyp/test/win/compiler-flags/runtime-library.gyp
new file mode 100644
index 0000000000..04afc391c7
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/runtime-library.gyp
@@ -0,0 +1,48 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_rl_md',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'RuntimeLibrary': '2'
+ }
+ },
+ 'sources': ['runtime-library-md.cc'],
+ },
+ {
+ 'target_name': 'test_rl_mdd',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'RuntimeLibrary': '3'
+ }
+ },
+ 'sources': ['runtime-library-mdd.cc'],
+ },
+ {
+ 'target_name': 'test_rl_mt',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'RuntimeLibrary': '0'
+ }
+ },
+ 'sources': ['runtime-library-mt.cc'],
+ },
+ {
+ 'target_name': 'test_rl_mtd',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'RuntimeLibrary': '1'
+ }
+ },
+ 'sources': ['runtime-library-mtd.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/subdir/header.h b/third_party/python/gyp/test/win/compiler-flags/subdir/header.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/subdir/header.h
diff --git a/third_party/python/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type.gyp b/third_party/python/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type.gyp
new file mode 100644
index 0000000000..456fe047d0
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type.gyp
@@ -0,0 +1,33 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_treat_wchar_t_as_built_in_type_negative',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'TreatWChar_tAsBuiltInType': 'false',
+ },
+ },
+ 'sources': [
+ 'treat-wchar-t-as-built-in-type1.cc',
+ ],
+ },
+ {
+ 'target_name': 'test_treat_wchar_t_as_built_in_type_positive',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'TreatWChar_tAsBuiltInType': 'true',
+ },
+ },
+ 'sources': [
+ 'treat-wchar-t-as-built-in-type2.cc',
+ ],
+ },
+
+ ],
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type1.cc b/third_party/python/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type1.cc
new file mode 100644
index 0000000000..fc1ed0b7ea
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type1.cc
@@ -0,0 +1,11 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifdef _NATIVE_WCHAR_T_DEFINED
+#error
+#endif
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type2.cc b/third_party/python/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type2.cc
new file mode 100644
index 0000000000..28ab94f742
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type2.cc
@@ -0,0 +1,11 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef _NATIVE_WCHAR_T_DEFINED
+#error
+#endif
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/uninit.cc b/third_party/python/gyp/test/win/compiler-flags/uninit.cc
new file mode 100644
index 0000000000..a9d5f5d483
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/uninit.cc
@@ -0,0 +1,13 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Should trigger C6001: using uninitialized memory <variable> for |i|.
+int f(bool b) {
+ int i;
+ if (b)
+ i = 0;
+ return i;
+}
+
+int main() {}
diff --git a/third_party/python/gyp/test/win/compiler-flags/warning-as-error.cc b/third_party/python/gyp/test/win/compiler-flags/warning-as-error.cc
new file mode 100644
index 0000000000..fd2130aca5
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/warning-as-error.cc
@@ -0,0 +1,9 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ // Cause a warning, even at /W1
+ int export;
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/warning-as-error.gyp b/third_party/python/gyp/test/win/compiler-flags/warning-as-error.gyp
new file mode 100644
index 0000000000..d71f261141
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/warning-as-error.gyp
@@ -0,0 +1,37 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_warn_as_error_false',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarnAsError': 'false'
+ }
+ },
+ 'sources': ['warning-as-error.cc']
+ },
+ {
+ 'target_name': 'test_warn_as_error_true',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarnAsError': 'true'
+ }
+ },
+ 'sources': ['warning-as-error.cc']
+ },
+ {
+ 'target_name': 'test_warn_as_error_unset',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ }
+ },
+ 'sources': ['warning-as-error.cc']
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/warning-level.gyp b/third_party/python/gyp/test/win/compiler-flags/warning-level.gyp
new file mode 100644
index 0000000000..2297aa7cac
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/warning-level.gyp
@@ -0,0 +1,115 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ # Level 1
+ {
+ 'target_name': 'test_wl1_fail',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarningLevel': '1',
+ 'WarnAsError': 'true',
+ }
+ },
+ 'sources': ['warning-level1.cc'],
+ },
+ {
+ 'target_name': 'test_wl1_pass',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarningLevel': '1',
+ 'WarnAsError': 'true',
+ }
+ },
+ 'sources': ['warning-level2.cc'],
+ },
+
+ # Level 2
+ {
+ 'target_name': 'test_wl2_fail',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarningLevel': '2',
+ 'WarnAsError': 'true',
+ }
+ },
+ 'sources': ['warning-level2.cc'],
+ },
+ {
+ 'target_name': 'test_wl2_pass',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarningLevel': '2',
+ 'WarnAsError': 'true',
+ }
+ },
+ 'sources': ['warning-level3.cc'],
+ },
+
+ # Level 3
+ {
+ 'target_name': 'test_wl3_fail',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarningLevel': '3',
+ 'WarnAsError': 'true',
+ }
+ },
+ 'sources': ['warning-level3.cc'],
+ },
+ {
+ 'target_name': 'test_wl3_pass',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarningLevel': '3',
+ 'WarnAsError': 'true',
+ }
+ },
+ 'sources': ['warning-level4.cc'],
+ },
+
+
+ # Level 4
+ {
+ 'target_name': 'test_wl4_fail',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarningLevel': '4',
+ 'WarnAsError': 'true',
+ }
+ },
+ 'sources': ['warning-level4.cc'],
+ },
+
+ # Default level
+ {
+ 'target_name': 'test_def_fail',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarnAsError': 'true',
+ }
+ },
+ 'sources': ['warning-level1.cc'],
+ },
+ {
+ 'target_name': 'test_def_pass',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ }
+ },
+ 'sources': ['warning-level2.cc'],
+ },
+
+ ]
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/warning-level1.cc b/third_party/python/gyp/test/win/compiler-flags/warning-level1.cc
new file mode 100644
index 0000000000..119578d694
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/warning-level1.cc
@@ -0,0 +1,8 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ int export; // Cause a level 1 warning (C4237).
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/warning-level2.cc b/third_party/python/gyp/test/win/compiler-flags/warning-level2.cc
new file mode 100644
index 0000000000..9a26703180
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/warning-level2.cc
@@ -0,0 +1,14 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int f(int x) {
+ return 0;
+}
+
+int main() {
+ double x = 10.1;
+ // Cause a level 2 warning (C4243).
+ return f(x);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/warning-level3.cc b/third_party/python/gyp/test/win/compiler-flags/warning-level3.cc
new file mode 100644
index 0000000000..e0a9f3cdd9
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/warning-level3.cc
@@ -0,0 +1,11 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Cause a level 3 warning (C4359).
+struct __declspec(align(8)) C8 { __int64 i; };
+struct __declspec(align(4)) C4 { C8 m8; };
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/compiler-flags/warning-level4.cc b/third_party/python/gyp/test/win/compiler-flags/warning-level4.cc
new file mode 100644
index 0000000000..48a4fb7018
--- /dev/null
+++ b/third_party/python/gyp/test/win/compiler-flags/warning-level4.cc
@@ -0,0 +1,10 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ const int i = -1;
+ // Cause a level 4 warning (C4245).
+ unsigned int j = i;
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/enable-winrt/dllmain.cc b/third_party/python/gyp/test/win/enable-winrt/dllmain.cc
new file mode 100644
index 0000000000..dedd83c3f6
--- /dev/null
+++ b/third_party/python/gyp/test/win/enable-winrt/dllmain.cc
@@ -0,0 +1,30 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <windows.h>
+#include <wrl.h>
+#include <wrl/wrappers/corewrappers.h>
+#include <windows.graphics.display.h>
+
+using namespace Microsoft::WRL;
+using namespace Microsoft::WRL::Wrappers;
+using namespace ABI::Windows::Foundation;
+using namespace ABI::Windows::Graphics::Display;
+
+bool TryToUseSomeWinRT() {
+ ComPtr<IDisplayPropertiesStatics> dp;
+ HStringReference s(RuntimeClass_Windows_Graphics_Display_DisplayProperties);
+ HRESULT hr = GetActivationFactory(s.Get(), dp.GetAddressOf());
+ if (SUCCEEDED(hr)) {
+ float dpi = 96.0f;
+ if (SUCCEEDED(dp->get_LogicalDpi(&dpi))) {
+ return true;
+ }
+ }
+ return false;
+}
+
+BOOL WINAPI DllMain(HINSTANCE hinstance, DWORD reason, LPVOID reserved) {
+ return TRUE;
+}
diff --git a/third_party/python/gyp/test/win/enable-winrt/enable-winrt.gyp b/third_party/python/gyp/test/win/enable-winrt/enable-winrt.gyp
new file mode 100644
index 0000000000..69f70189db
--- /dev/null
+++ b/third_party/python/gyp/test/win/enable-winrt/enable-winrt.gyp
@@ -0,0 +1,39 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'enable_winrt_dll',
+ 'type': 'shared_library',
+ 'msvs_enable_winrt': 1,
+ 'sources': [
+ 'dllmain.cc',
+ ],
+ },
+ {
+ 'target_name': 'enable_winrt_missing_dll',
+ 'type': 'shared_library',
+ 'sources': [
+ 'dllmain.cc',
+ ],
+ },
+ {
+ 'target_name': 'enable_winrt_winphone_dll',
+ 'type': 'shared_library',
+ 'msvs_enable_winrt': 1,
+ 'msvs_enable_winphone': 1,
+ 'sources': [
+ 'dllmain.cc',
+ ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'AdditionalDependencies': [
+ '%(AdditionalDependencies)',
+ ],
+ },
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/generator-output-different-drive/gyptest-generator-output-different-drive.py b/third_party/python/gyp/test/win/generator-output-different-drive/gyptest-generator-output-different-drive.py
new file mode 100644
index 0000000000..96a30ec5b9
--- /dev/null
+++ b/third_party/python/gyp/test/win/generator-output-different-drive/gyptest-generator-output-different-drive.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test that the generator output can be written to a different drive on Windows.
+"""
+
+import os
+import TestGyp
+import string
+import subprocess
+import sys
+
+
+if sys.platform == 'win32':
+ import win32api
+
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ def GetFirstFreeDriveLetter():
+ """ Returns the first unused Windows drive letter in [A, Z] """
+ all_letters = [c for c in string.ascii_uppercase]
+ in_use = win32api.GetLogicalDriveStrings()
+ free = list(set(all_letters) - set(in_use))
+ return free[0]
+
+ output_dir = os.path.join('different-drive', 'output')
+ if not os.path.isdir(os.path.abspath(output_dir)):
+ os.makedirs(os.path.abspath(output_dir))
+ output_drive = GetFirstFreeDriveLetter()
+ subprocess.call(['subst', '%c:' % output_drive, os.path.abspath(output_dir)])
+ try:
+ test.run_gyp('prog.gyp', '--generator-output=%s' % (
+ os.path.join(output_drive, 'output')))
+ test.build('prog.gyp', test.ALL, chdir=os.path.join(output_drive, 'output'))
+ test.built_file_must_exist('program', chdir=os.path.join(output_drive,
+ 'output'),
+ type=test.EXECUTABLE)
+ test.pass_test()
+ finally:
+ subprocess.call(['subst', '%c:' % output_drive, '/D'])
diff --git a/third_party/python/gyp/test/win/generator-output-different-drive/prog.c b/third_party/python/gyp/test/win/generator-output-different-drive/prog.c
new file mode 100644
index 0000000000..7937f5d07a
--- /dev/null
+++ b/third_party/python/gyp/test/win/generator-output-different-drive/prog.c
@@ -0,0 +1,10 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+int main(void) {
+ printf("Hello from prog.c\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/generator-output-different-drive/prog.gyp b/third_party/python/gyp/test/win/generator-output-different-drive/prog.gyp
new file mode 100644
index 0000000000..92f53e5da5
--- /dev/null
+++ b/third_party/python/gyp/test/win/generator-output-different-drive/prog.gyp
@@ -0,0 +1,15 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'sources': [
+ 'prog.c',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/win/gyptest-asm-files.py b/third_party/python/gyp/test/win/gyptest-asm-files.py
new file mode 100644
index 0000000000..007b52eb26
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-asm-files.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure .s files aren't passed to cl.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'asm-files'
+ test.run_gyp('asm-files.gyp', chdir=CHDIR)
+ # The compiler will error out if it's passed the .s files, so just make sure
+ # the build succeeds. The compiler doesn't directly support building
+ # assembler files on Windows, they have to be built explicitly with a
+ # third-party tool.
+ test.build('asm-files.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-additional-include-dirs.py b/third_party/python/gyp/test/win/gyptest-cl-additional-include-dirs.py
new file mode 100644
index 0000000000..1fabfa9752
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-additional-include-dirs.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure additional include dirs are extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('additional-include-dirs.gyp', chdir=CHDIR)
+ test.build('additional-include-dirs.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-additional-options.py b/third_party/python/gyp/test/win/gyptest-cl-additional-options.py
new file mode 100644
index 0000000000..e9aea10dc9
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-additional-options.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure additional manual compiler flags are extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('additional-options.gyp', chdir=CHDIR)
+
+ # Warning level not overidden, must fail.
+ test.build('additional-options.gyp', 'test_additional_none', chdir=CHDIR,
+ status=1)
+
+ # Warning level is overridden, must succeed.
+ test.build('additional-options.gyp', 'test_additional_one', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-analysis.py b/third_party/python/gyp/test/win/gyptest-cl-analysis.py
new file mode 100644
index 0000000000..7b3b9897f5
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-analysis.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure PREfast (code analysis) setting is extracted properly.
+"""
+
+import TestGyp
+
+import os
+import sys
+
+if (sys.platform == 'win32' and
+ int(os.environ.get('GYP_MSVS_VERSION', 0)) >= 2012):
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('analysis.gyp', chdir=CHDIR)
+
+ # Analysis enabled, should fail.
+ test.build('analysis.gyp', 'test_analysis_on', chdir=CHDIR, status=1)
+
+ # Analysis not enabled, or unspecified, should pass.
+ test.build('analysis.gyp', 'test_analysis_off', chdir=CHDIR)
+ test.build('analysis.gyp', 'test_analysis_unspec', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-buffer-security-check.py b/third_party/python/gyp/test/win/gyptest-cl-buffer-security-check.py
new file mode 100644
index 0000000000..e22869c3d3
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-buffer-security-check.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure buffer security check setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('buffer-security-check.gyp', chdir=CHDIR)
+ test.build('buffer-security-check.gyp', chdir=CHDIR)
+
+ def GetDisassemblyOfMain(exe):
+ # The standard library uses buffer security checks independent of our
+ # buffer security settings, so we extract just our code (i.e. main()) to
+ # check against.
+ full_path = test.built_file_path(exe, chdir=CHDIR)
+ output = test.run_dumpbin('/disasm', full_path)
+ result = []
+ in_main = False
+ for line in output.splitlines():
+ if line == '_main:':
+ in_main = True
+ elif in_main:
+ # Disassembly of next function starts.
+ if line.startswith('_'):
+ break
+ result.append(line)
+ return '\n'.join(result)
+
+ # Buffer security checks are on by default, make sure security_cookie
+ # appears in the disassembly of our code.
+ if 'security_cookie' not in GetDisassemblyOfMain('test_bsc_unset.exe'):
+ test.fail_test()
+
+ # Explicitly on.
+ if 'security_cookie' not in GetDisassemblyOfMain('test_bsc_on.exe'):
+ test.fail_test()
+
+ # Explicitly off, shouldn't be a reference to the security cookie.
+ if 'security_cookie' in GetDisassemblyOfMain('test_bsc_off.exe'):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-calling-convention.py b/third_party/python/gyp/test/win/gyptest-cl-calling-convention.py
new file mode 100644
index 0000000000..b5fdc47744
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-calling-convention.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure calling convention setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('calling-convention.gyp', chdir=CHDIR)
+ test.build('calling-convention.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-character-set.py b/third_party/python/gyp/test/win/gyptest-cl-character-set.py
new file mode 100644
index 0000000000..7fabb6722a
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-character-set.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure character set setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('character-set.gyp', chdir=CHDIR)
+ test.build('character-set.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-compile-as-managed.py b/third_party/python/gyp/test/win/gyptest-cl-compile-as-managed.py
new file mode 100644
index 0000000000..0d7b420485
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-compile-as-managed.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure compile as managed (clr) settings are extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp()
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('compile-as-managed.gyp', chdir=CHDIR)
+ test.build('compile-as-managed.gyp', "test-compile-as-managed", chdir=CHDIR)
+ # Must fail.
+ test.build('compile-as-managed.gyp', "test-compile-as-unmanaged",
+ chdir=CHDIR, status=1)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-compile-as-winrt.py b/third_party/python/gyp/test/win/gyptest-cl-compile-as-winrt.py
new file mode 100644
index 0000000000..3e0168b678
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-compile-as-winrt.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2016 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import TestGyp
+
+import os
+import sys
+
+if (sys.platform == 'win32' and
+ int(os.environ.get('GYP_MSVS_VERSION', 0)) >= 2015):
+ test = TestGyp.TestGyp(formats=['msvs'])
+
+ CHDIR = 'compiler-flags'
+
+ test.run_gyp('compile-as-winrt.gyp', chdir=CHDIR)
+
+ test.build('compile-as-winrt.gyp', 'test-compile-as-winrt', chdir=CHDIR)
+
+ test.pass_test() \ No newline at end of file
diff --git a/third_party/python/gyp/test/win/gyptest-cl-debug-format.py b/third_party/python/gyp/test/win/gyptest-cl-debug-format.py
new file mode 100644
index 0000000000..6c68a619be
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-debug-format.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure debug format settings are extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('debug-format.gyp', chdir=CHDIR)
+
+ # While there's ways to via .pdb contents, the .pdb doesn't include
+ # which style the debug information was created from, so we resort to just
+ # verifying the flags are correct on the command line.
+
+ ninja_file = test.built_file_path('obj/test-debug-format-off.ninja',
+ chdir=CHDIR)
+ test.must_not_contain(ninja_file, '/Z7')
+ test.must_not_contain(ninja_file, '/Zi')
+ test.must_not_contain(ninja_file, '/ZI')
+
+ ninja_file = test.built_file_path('obj/test-debug-format-oldstyle.ninja',
+ chdir=CHDIR)
+ test.must_contain(ninja_file, '/Z7')
+
+ ninja_file = test.built_file_path('obj/test-debug-format-pdb.ninja',
+ chdir=CHDIR)
+ test.must_contain(ninja_file, '/Zi')
+
+ ninja_file = test.built_file_path('obj/test-debug-format-editcontinue.ninja',
+ chdir=CHDIR)
+ test.must_contain(ninja_file, '/ZI')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-default-char-is-unsigned.py b/third_party/python/gyp/test/win/gyptest-cl-default-char-is-unsigned.py
new file mode 100644
index 0000000000..d20f6742f5
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-default-char-is-unsigned.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure DefaultCharIsUnsigned option is functional.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('default-char-is-unsigned.gyp', chdir=CHDIR)
+ test.build('default-char-is-unsigned.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-disable-specific-warnings.py b/third_party/python/gyp/test/win/gyptest-cl-disable-specific-warnings.py
new file mode 100644
index 0000000000..cb253aff62
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-disable-specific-warnings.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure disable specific warnings is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('disable-specific-warnings.gyp', chdir=CHDIR)
+
+ # The source file contains a warning, so if WarnAsError is true and
+ # DisableSpecificWarnings for the warning in question is set, then the build
+ # should succeed, otherwise it must fail.
+
+ test.build('disable-specific-warnings.gyp',
+ 'test_disable_specific_warnings_set',
+ chdir=CHDIR)
+ test.build('disable-specific-warnings.gyp',
+ 'test_disable_specific_warnings_unset',
+ chdir=CHDIR, status=1)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-enable-enhanced-instruction-set.py b/third_party/python/gyp/test/win/gyptest-cl-enable-enhanced-instruction-set.py
new file mode 100644
index 0000000000..f34e671125
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-enable-enhanced-instruction-set.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test VCCLCompilerTool EnableEnhancedInstructionSet setting.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import os
+import sys
+
+if sys.platform == 'win32':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp()
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('enable-enhanced-instruction-set.gyp', chdir=CHDIR)
+
+ test.build('enable-enhanced-instruction-set.gyp', test.ALL, chdir=CHDIR)
+
+ test.run_built_executable('sse_extensions', chdir=CHDIR,
+ stdout='/arch:SSE\n')
+ test.run_built_executable('sse2_extensions', chdir=CHDIR,
+ stdout='/arch:SSE2\n')
+
+ # /arch:AVX introduced in VS2010, but MSBuild support lagged until 2012.
+ if os.path.exists(test.built_file_path('avx_extensions')):
+ test.run_built_executable('avx_extensions', chdir=CHDIR,
+ stdout='/arch:AVX\n')
+
+ # /arch:IA32 introduced in VS2012.
+ if os.path.exists(test.built_file_path('no_extensions')):
+ test.run_built_executable('no_extensions', chdir=CHDIR,
+ stdout='/arch:IA32\n')
+
+ # /arch:AVX2 introduced in VS2013r2.
+ if os.path.exists(test.built_file_path('avx2_extensions')):
+ test.run_built_executable('avx2_extensions', chdir=CHDIR,
+ stdout='/arch:AVX2\n')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-exception-handling.py b/third_party/python/gyp/test/win/gyptest-cl-exception-handling.py
new file mode 100644
index 0000000000..5738a54071
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-exception-handling.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure exception handling settings are extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('exception-handling.gyp', chdir=CHDIR)
+
+ # Must fail.
+ test.build('exception-handling.gyp', 'test_eh_off', chdir=CHDIR,
+ status=1)
+
+ # Must succeed.
+ test.build('exception-handling.gyp', 'test_eh_s', chdir=CHDIR)
+ test.build('exception-handling.gyp', 'test_eh_a', chdir=CHDIR)
+
+ # Error code must be 1 if EHa, and 2 if EHsc.
+ test.run_built_executable('test_eh_a', chdir=CHDIR, status=1)
+ test.run_built_executable('test_eh_s', chdir=CHDIR, status=2)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-floating-point-model.py b/third_party/python/gyp/test/win/gyptest-cl-floating-point-model.py
new file mode 100644
index 0000000000..86ff4785a0
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-floating-point-model.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure floating point model settings are extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp()
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('floating-point-model.gyp', chdir=CHDIR)
+ test.build('floating-point-model.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-force-include-files.py b/third_party/python/gyp/test/win/gyptest-cl-force-include-files.py
new file mode 100644
index 0000000000..b73b8bd503
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-force-include-files.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure ForcedIncludeFiles option is functional.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('force-include-files.gyp', chdir=CHDIR)
+ test.build('force-include-files.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-function-level-linking.py b/third_party/python/gyp/test/win/gyptest-cl-function-level-linking.py
new file mode 100644
index 0000000000..6ad7b8c484
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-function-level-linking.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure function-level linking setting is extracted properly.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('function-level-linking.gyp', chdir=CHDIR)
+ test.build('function-level-linking.gyp', test.ALL, chdir=CHDIR)
+
+ def CheckForSectionString(binary, search_for, should_exist):
+ output = test.run_dumpbin('/headers', binary)
+ if should_exist and search_for not in output:
+ print('Did not find "%s" in %s' % (search_for, binary))
+ test.fail_test()
+ elif not should_exist and search_for in output:
+ print('Found "%s" in %s (and shouldn\'t have)' % (search_for, binary))
+ test.fail_test()
+
+ def Object(proj, obj):
+ sep = '.' if test.format == 'ninja' else '\\'
+ return 'obj\\%s%s%s' % (proj, sep, obj)
+
+ look_for = '''COMDAT; sym= "int __cdecl comdat_function'''
+
+ # When function level linking is on, the functions should be listed as
+ # separate comdat entries.
+
+ CheckForSectionString(
+ test.built_file_path(Object('test_fll_on', 'function-level-linking.obj'),
+ chdir=CHDIR),
+ look_for,
+ should_exist=True)
+
+ CheckForSectionString(
+ test.built_file_path(Object('test_fll_off', 'function-level-linking.obj'),
+ chdir=CHDIR),
+ look_for,
+ should_exist=False)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-optimizations.py b/third_party/python/gyp/test/win/gyptest-cl-optimizations.py
new file mode 100644
index 0000000000..31341f7dd7
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-optimizations.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure optimization settings are extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('optimizations.gyp', chdir=CHDIR)
+
+ # It's hard to map flags to output contents in a non-fragile way (especially
+ # handling both 2008/2010), so just verify the correct ninja command line
+ # contents.
+
+ ninja_file = test.built_file_path('obj/test_opt_off.ninja', chdir=CHDIR)
+ test.must_contain(ninja_file, 'cflags = /Od')
+
+ ninja_file = test.built_file_path('obj/test_opt_lev_size.ninja', chdir=CHDIR)
+ test.must_contain(ninja_file, 'cflags = /O1')
+
+ ninja_file = test.built_file_path('obj/test_opt_lev_speed.ninja', chdir=CHDIR)
+ test.must_contain(ninja_file, 'cflags = /O2')
+
+ ninja_file = test.built_file_path('obj/test_opt_lev_max.ninja', chdir=CHDIR)
+ test.must_contain(ninja_file, 'cflags = /Ox')
+
+ ninja_file = test.built_file_path('obj/test_opt_unset.ninja', chdir=CHDIR)
+ test.must_not_contain(ninja_file, '/Od')
+ test.must_not_contain(ninja_file, '/O1')
+ test.must_not_contain(ninja_file, '/Ox')
+ # Set by default if none specified.
+ test.must_contain(ninja_file, '/O2')
+
+ ninja_file = test.built_file_path('obj/test_opt_fpo.ninja', chdir=CHDIR)
+ test.must_contain(ninja_file, '/Oy')
+ test.must_not_contain(ninja_file, '/Oy-')
+
+ ninja_file = test.built_file_path('obj/test_opt_fpo_off.ninja', chdir=CHDIR)
+ test.must_contain(ninja_file, '/Oy-')
+
+ ninja_file = test.built_file_path('obj/test_opt_intrinsic.ninja',
+ chdir=CHDIR)
+ test.must_contain(ninja_file, '/Oi')
+ test.must_not_contain(ninja_file, '/Oi-')
+
+ ninja_file = test.built_file_path('obj/test_opt_intrinsic_off.ninja',
+ chdir=CHDIR)
+ test.must_contain(ninja_file, '/Oi-')
+
+ ninja_file = test.built_file_path('obj/test_opt_inline_off.ninja',
+ chdir=CHDIR)
+ test.must_contain(ninja_file, '/Ob0')
+
+ ninja_file = test.built_file_path('obj/test_opt_inline_manual.ninja',
+ chdir=CHDIR)
+ test.must_contain(ninja_file, '/Ob1')
+
+ ninja_file = test.built_file_path('obj/test_opt_inline_auto.ninja',
+ chdir=CHDIR)
+ test.must_contain(ninja_file, '/Ob2')
+
+ ninja_file = test.built_file_path('obj/test_opt_neither.ninja',
+ chdir=CHDIR)
+ test.must_not_contain(ninja_file, '/Os')
+ test.must_not_contain(ninja_file, '/Ot')
+
+ ninja_file = test.built_file_path('obj/test_opt_size.ninja',
+ chdir=CHDIR)
+ test.must_contain(ninja_file, '/Os')
+
+ ninja_file = test.built_file_path('obj/test_opt_speed.ninja',
+ chdir=CHDIR)
+ test.must_contain(ninja_file, '/Ot')
+
+ ninja_file = test.built_file_path('obj/test_opt_wpo.ninja',
+ chdir=CHDIR)
+ test.must_contain(ninja_file, '/GL')
+
+ ninja_file = test.built_file_path('obj/test_opt_sp.ninja',
+ chdir=CHDIR)
+ test.must_contain(ninja_file, '/GF')
+
+ ninja_file = test.built_file_path('obj/test_opt_sp_off.ninja',
+ chdir=CHDIR)
+ test.must_not_contain(ninja_file, '/GF')
+
+ ninja_file = test.built_file_path('obj/test_opt_fso.ninja',
+ chdir=CHDIR)
+ test.must_contain(ninja_file, '/GT')
+
+ ninja_file = test.built_file_path('obj/test_opt_fso_off.ninja',
+ chdir=CHDIR)
+ test.must_not_contain(ninja_file, '/GT')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-pdbname-override.py b/third_party/python/gyp/test/win/gyptest-cl-pdbname-override.py
new file mode 100644
index 0000000000..da9b49af16
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-pdbname-override.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure pdb is named as expected (shared between .cc files).
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp()
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('pdbname-override.gyp', chdir=CHDIR)
+ test.build('pdbname-override.gyp', test.ALL, chdir=CHDIR)
+
+ # Confirm that the pdb generated by the compiler was renamed (and we also
+ # have the linker generated one).
+ test.built_file_must_exist('compiler_generated.pdb', chdir=CHDIR)
+ test.built_file_must_exist('linker_generated.pdb', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-pdbname.py b/third_party/python/gyp/test/win/gyptest-cl-pdbname.py
new file mode 100644
index 0000000000..f09ac233cd
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-pdbname.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure pdb is named as expected (shared between .cc files).
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('pdbname.gyp', chdir=CHDIR)
+ test.build('pdbname.gyp', test.ALL, chdir=CHDIR)
+
+ # Confirm that the default behaviour is to name the .pdb per-target (rather
+ # than per .cc file).
+ test.built_file_must_exist('obj/test_pdbname.cc.pdb', chdir=CHDIR)
+
+ # Confirm that there should be a .pdb alongside the executable.
+ test.built_file_must_exist('test_pdbname.exe', chdir=CHDIR)
+ test.built_file_must_exist('test_pdbname.exe.pdb', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-rtti.py b/third_party/python/gyp/test/win/gyptest-cl-rtti.py
new file mode 100644
index 0000000000..d49a094379
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-rtti.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure RTTI setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('rtti.gyp', chdir=CHDIR)
+
+ # Must fail.
+ test.build('rtti.gyp', 'test_rtti_off', chdir=CHDIR, status=1)
+
+ # Must succeed.
+ test.build('rtti.gyp', 'test_rtti_on', chdir=CHDIR)
+
+ # Must succeed.
+ test.build('rtti.gyp', 'test_rtti_unset', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-runtime-checks.py b/third_party/python/gyp/test/win/gyptest-cl-runtime-checks.py
new file mode 100644
index 0000000000..4fd529f892
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-runtime-checks.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure RTC setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('runtime-checks.gyp', chdir=CHDIR)
+
+ # Runtime checks disabled, should fail.
+ test.build('runtime-checks.gyp', 'test_brc_none', chdir=CHDIR, status=1)
+
+ # Runtime checks enabled, should pass.
+ test.build('runtime-checks.gyp', 'test_brc_1', chdir=CHDIR)
+
+ # TODO(scottmg): There are other less frequently used/partial options, but
+ # it's not clear how to verify them, so ignore for now.
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-runtime-library.py b/third_party/python/gyp/test/win/gyptest-cl-runtime-library.py
new file mode 100644
index 0000000000..53c149297b
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-runtime-library.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure runtime C library setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('runtime-library.gyp', chdir=CHDIR)
+ test.build('runtime-library.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-treat-wchar-t-as-built-in-type.py b/third_party/python/gyp/test/win/gyptest-cl-treat-wchar-t-as-built-in-type.py
new file mode 100644
index 0000000000..ca35fb55a0
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-treat-wchar-t-as-built-in-type.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure TreatWChar_tAsBuiltInType option is functional.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('treat-wchar-t-as-built-in-type.gyp', chdir=CHDIR)
+ test.build('treat-wchar-t-as-built-in-type.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-warning-as-error.py b/third_party/python/gyp/test/win/gyptest-cl-warning-as-error.py
new file mode 100644
index 0000000000..d4ef1b362b
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-warning-as-error.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure warning-as-error is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('warning-as-error.gyp', chdir=CHDIR)
+
+ # The source file contains a warning, so if WarnAsError is false (or
+ # default, which is also false), then the build should succeed, otherwise it
+ # must fail.
+
+ test.build('warning-as-error.gyp', 'test_warn_as_error_false', chdir=CHDIR)
+ test.build('warning-as-error.gyp', 'test_warn_as_error_unset', chdir=CHDIR)
+ test.build('warning-as-error.gyp', 'test_warn_as_error_true', chdir=CHDIR,
+ status=1)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-cl-warning-level.py b/third_party/python/gyp/test/win/gyptest-cl-warning-level.py
new file mode 100644
index 0000000000..62a5b39b6a
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-cl-warning-level.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure warning level is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'compiler-flags'
+ test.run_gyp('warning-level.gyp', chdir=CHDIR)
+
+ # A separate target for each warning level: one pass (compiling a file
+ # containing a warning that's above the specified level); and one fail
+ # (compiling a file at the specified level). No pass for 4 of course,
+ # because it would have to have no warnings. The default warning level is
+ # equivalent to level 1.
+
+ test.build('warning-level.gyp', 'test_wl1_fail', chdir=CHDIR, status=1)
+ test.build('warning-level.gyp', 'test_wl1_pass', chdir=CHDIR)
+
+ test.build('warning-level.gyp', 'test_wl2_fail', chdir=CHDIR, status=1)
+ test.build('warning-level.gyp', 'test_wl2_pass', chdir=CHDIR)
+
+ test.build('warning-level.gyp', 'test_wl3_fail', chdir=CHDIR, status=1)
+ test.build('warning-level.gyp', 'test_wl3_pass', chdir=CHDIR)
+
+ test.build('warning-level.gyp', 'test_wl4_fail', chdir=CHDIR, status=1)
+
+ test.build('warning-level.gyp', 'test_def_fail', chdir=CHDIR, status=1)
+ test.build('warning-level.gyp', 'test_def_pass', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-command-quote.py b/third_party/python/gyp/test/win/gyptest-command-quote.py
new file mode 100644
index 0000000000..656a69da53
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-command-quote.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+
+Make sure the program in a command can be a called batch file, or an
+application in the path. Specifically, this means not quoting something like
+"call x.bat", lest the shell look for a program named "call x.bat", rather
+than calling "x.bat".
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+ CHDIR = 'command-quote'
+ test.run_gyp('command-quote.gyp', chdir=CHDIR)
+
+ test.build('command-quote.gyp', 'test_batch', chdir=CHDIR)
+ test.build('command-quote.gyp', 'test_call_separate', chdir=CHDIR)
+ test.build('command-quote.gyp', 'test_with_double_quotes', chdir=CHDIR)
+ test.build('command-quote.gyp', 'test_with_single_quotes', chdir=CHDIR)
+
+ # We confirm that this fails because other generators don't handle spaces in
+ # inputs so it's preferable to not have it work here.
+ test.build('command-quote.gyp', 'test_with_spaces', chdir=CHDIR, status=1)
+
+ CHDIR = 'command-quote/subdir/and/another'
+ test.run_gyp('in-subdir.gyp', chdir=CHDIR)
+ test.build('in-subdir.gyp', 'test_batch_depth', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-crosscompile-ar.py b/third_party/python/gyp/test/win/gyptest-crosscompile-ar.py
new file mode 100644
index 0000000000..dc75d96a84
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-crosscompile-ar.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# Copyright 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that ar_host is set correctly when enabling cross-compile on windows.
+"""
+
+import TestGyp
+
+import sys
+import os
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['ninja'])
+
+ CHDIR = 'lib-crosscompile'
+ oldenv = os.environ.copy()
+ try:
+ os.environ['GYP_CROSSCOMPILE'] = '1'
+ test.run_gyp('use_host_ar.gyp', chdir=CHDIR)
+ finally:
+ os.environ.clear()
+ os.environ.update(oldenv)
+
+ test.build('use_host_ar.gyp', test.ALL, chdir=CHDIR)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-lib-ltcg.py b/third_party/python/gyp/test/win/gyptest-lib-ltcg.py
new file mode 100644
index 0000000000..d1d7bad840
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-lib-ltcg.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure LTCG setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'lib-flags'
+ test.run_gyp('ltcg.gyp', chdir=CHDIR)
+ test.build('ltcg.gyp', test.ALL, chdir=CHDIR)
+ test.must_not_contain_any_line(test.stdout(), ['restarting link with /LTCG'])
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-additional-deps.py b/third_party/python/gyp/test/win/gyptest-link-additional-deps.py
new file mode 100644
index 0000000000..62c57366f9
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-additional-deps.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure additional library dependencies are handled.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('additional-deps.gyp', chdir=CHDIR)
+ test.build('additional-deps.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-additional-options.py b/third_party/python/gyp/test/win/gyptest-link-additional-options.py
new file mode 100644
index 0000000000..7e57ae4764
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-additional-options.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure additional options are handled.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('additional-options.gyp', chdir=CHDIR)
+ test.build('additional-options.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-aslr.py b/third_party/python/gyp/test/win/gyptest-link-aslr.py
new file mode 100644
index 0000000000..e765017d3b
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-aslr.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure aslr setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('aslr.gyp', chdir=CHDIR)
+ test.build('aslr.gyp', test.ALL, chdir=CHDIR)
+
+ def HasDynamicBase(exe):
+ full_path = test.built_file_path(exe, chdir=CHDIR)
+ output = test.run_dumpbin('/headers', full_path)
+ return ' Dynamic base' in output
+
+ # Default is to be on.
+ if not HasDynamicBase('test_aslr_default.exe'):
+ test.fail_test()
+ if HasDynamicBase('test_aslr_no.exe'):
+ test.fail_test()
+ if not HasDynamicBase('test_aslr_yes.exe'):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-base-address.py b/third_party/python/gyp/test/win/gyptest-link-base-address.py
new file mode 100644
index 0000000000..d58527ad7e
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-base-address.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+
+# Copyright 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure the base address setting is extracted properly.
+"""
+
+import TestGyp
+
+import re
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('base-address.gyp', chdir=CHDIR)
+ test.build('base-address.gyp', test.ALL, chdir=CHDIR)
+
+ def GetHeaders(exe):
+ full_path = test.built_file_path(exe, chdir=CHDIR)
+ return test.run_dumpbin('/headers', full_path)
+
+ # Extract the image base address from the headers output.
+ image_base_reg_ex = re.compile(r'.*\s+([0-9]+) image base.*', re.DOTALL)
+
+ exe_headers = GetHeaders('test_base_specified_exe.exe')
+ exe_match = image_base_reg_ex.match(exe_headers)
+
+ if not exe_match or not exe_match.group(1):
+ test.fail_test()
+ if exe_match.group(1) != '420000':
+ test.fail_test()
+
+ dll_headers = GetHeaders('test_base_specified_dll.dll')
+ dll_match = image_base_reg_ex.match(dll_headers)
+
+ if not dll_match or not dll_match.group(1):
+ test.fail_test()
+ if dll_match.group(1) != '10420000':
+ test.fail_test()
+
+ default_exe_headers = GetHeaders('test_base_default_exe.exe')
+ default_exe_match = image_base_reg_ex.match(default_exe_headers)
+
+ if not default_exe_match or not default_exe_match.group(1):
+ test.fail_test()
+ if default_exe_match.group(1) != '400000':
+ test.fail_test()
+
+ default_dll_headers = GetHeaders('test_base_default_dll.dll')
+ default_dll_match = image_base_reg_ex.match(default_dll_headers)
+
+ if not default_dll_match or not default_dll_match.group(1):
+ test.fail_test()
+ if default_dll_match.group(1) != '10000000':
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-debug-info.py b/third_party/python/gyp/test/win/gyptest-link-debug-info.py
new file mode 100644
index 0000000000..33e8ac48bf
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-debug-info.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure debug info setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('debug-info.gyp', chdir=CHDIR)
+ test.build('debug-info.gyp', test.ALL, chdir=CHDIR)
+
+ suffix = '.exe.pdb' if test.format == 'ninja' else '.pdb'
+ test.built_file_must_not_exist('test_debug_off%s' % suffix, chdir=CHDIR)
+ test.built_file_must_exist('test_debug_on%s' % suffix, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-default-libs.py b/third_party/python/gyp/test/win/gyptest-link-default-libs.py
new file mode 100644
index 0000000000..5edf467913
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-default-libs.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure we include the default libs.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('no-default-libs.gyp', chdir=CHDIR)
+ test.build('no-default-libs.gyp', test.ALL, chdir=CHDIR, status=1)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-deffile.py b/third_party/python/gyp/test/win/gyptest-link-deffile.py
new file mode 100644
index 0000000000..94df874f85
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-deffile.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure a .def file is handled in the link.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+
+ # Multiple .def files doesn't make any sense, should fail at generate time.
+ test.run_gyp('deffile-multiple.gyp', chdir=CHDIR, stderr=None, status=1)
+
+ test.run_gyp('deffile.gyp', chdir=CHDIR)
+ test.build('deffile.gyp', test.ALL, chdir=CHDIR)
+
+ def HasExport(binary, export):
+ full_path = test.built_file_path(binary, chdir=CHDIR)
+ output = test.run_dumpbin('/exports', full_path)
+ return export in output
+
+ # Make sure we only have the export when the .def file is in use.
+
+ if HasExport('test_deffile_dll_notexported.dll', 'AnExportedFunction'):
+ test.fail_test()
+ if not HasExport('test_deffile_dll_ok.dll', 'AnExportedFunction'):
+ test.fail_test()
+
+ if HasExport('test_deffile_exe_notexported.exe', 'AnExportedFunction'):
+ test.fail_test()
+ if not HasExport('test_deffile_exe_ok.exe', 'AnExportedFunction'):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-defrelink.py b/third_party/python/gyp/test/win/gyptest-link-defrelink.py
new file mode 100644
index 0000000000..cb3f86bb0e
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-defrelink.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure a relink is performed when a .def file is touched.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ target = 'test_deffile_dll_ok'
+ def_contents = test.read('linker-flags/deffile.def')
+
+ # This first build makes sure everything is up to date.
+ test.run_gyp('deffile.gyp', chdir=CHDIR)
+ test.build('deffile.gyp', target, chdir=CHDIR)
+ test.up_to_date('deffile.gyp', target, chdir=CHDIR)
+
+ def HasExport(binary, export):
+ full_path = test.built_file_path(binary, chdir=CHDIR)
+ output = test.run_dumpbin('/exports', full_path)
+ return export in output
+
+ # Verify that only one function is exported.
+ if not HasExport('test_deffile_dll_ok.dll', 'AnExportedFunction'):
+ test.fail_test()
+ if HasExport('test_deffile_dll_ok.dll', 'AnotherExportedFunction'):
+ test.fail_test()
+
+ # Add AnotherExportedFunction to the def file, then rebuild. If it doesn't
+ # relink the DLL, then the subsequent check for AnotherExportedFunction will
+ # fail.
+ new_def_contents = def_contents + "\n AnotherExportedFunction"
+ test.write('linker-flags/deffile.def', new_def_contents)
+ test.build('deffile.gyp', target, chdir=CHDIR)
+ test.up_to_date('deffile.gyp', target, chdir=CHDIR)
+
+ if not HasExport('test_deffile_dll_ok.dll', 'AnExportedFunction'):
+ test.fail_test()
+ if not HasExport('test_deffile_dll_ok.dll', 'AnotherExportedFunction'):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-delay-load-dlls.py b/third_party/python/gyp/test/win/gyptest-link-delay-load-dlls.py
new file mode 100644
index 0000000000..3880247b4a
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-delay-load-dlls.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure delay load setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('delay-load-dlls.gyp', chdir=CHDIR)
+ test.build('delay-load-dlls.gyp', test.ALL, chdir=CHDIR)
+
+ prefix = 'contains the following delay load imports:'
+ shell32_look_for = prefix + '\r\n\r\n SHELL32.dll'
+
+ output = test.run_dumpbin(
+ '/all', test.built_file_path('test_dld_none.exe', chdir=CHDIR))
+ if prefix in output:
+ test.fail_test()
+
+ output = test.run_dumpbin(
+ '/all', test.built_file_path('test_dld_shell32.exe', chdir=CHDIR))
+ if shell32_look_for not in output:
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-embed-manifest.py b/third_party/python/gyp/test/win/gyptest-link-embed-manifest.py
new file mode 100644
index 0000000000..0e2b628be1
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-embed-manifest.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Yandex LLC. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure manifests are embedded in binaries properly. Handling of
+AdditionalManifestFiles is tested too.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ import pywintypes
+ import win32api
+ import winerror
+
+ RT_MANIFEST = 24
+
+ class LoadLibrary(object):
+ """Context manager for loading and releasing binaries in Windows.
+ Yields the handle of the binary loaded."""
+ def __init__(self, path):
+ self._path = path
+ self._handle = None
+
+ def __enter__(self):
+ self._handle = win32api.LoadLibrary(self._path)
+ return self._handle
+
+ def __exit__(self, type, value, traceback):
+ win32api.FreeLibrary(self._handle)
+
+
+ def extract_manifest(path, resource_name):
+ """Reads manifest from |path| and returns it as a string.
+ Returns None is there is no such manifest."""
+ with LoadLibrary(path) as handle:
+ try:
+ return win32api.LoadResource(
+ handle, RT_MANIFEST, resource_name).decode('utf-8', 'ignore')
+ except pywintypes.error as error:
+ if error.args[0] == winerror.ERROR_RESOURCE_DATA_NOT_FOUND:
+ return None
+ else:
+ raise
+
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+ CHDIR = 'linker-flags'
+ test.run_gyp('embed-manifest.gyp', chdir=CHDIR)
+ test.build('embed-manifest.gyp', test.ALL, chdir=CHDIR)
+
+ # The following binaries must contain a manifest embedded.
+ test.fail_test(not extract_manifest(test.built_file_path(
+ 'test_manifest_exe.exe', chdir=CHDIR), 1))
+ test.fail_test(not extract_manifest(test.built_file_path(
+ 'test_manifest_exe_inc.exe', chdir=CHDIR), 1))
+ test.fail_test(not extract_manifest(test.built_file_path(
+ 'test_manifest_dll.dll', chdir=CHDIR), 2))
+ test.fail_test(not extract_manifest(test.built_file_path(
+ 'test_manifest_dll_inc.dll', chdir=CHDIR), 2))
+
+ # Must contain the Win7 support GUID, but not the Vista one (from
+ # extra2.manifest).
+ test.fail_test(
+ '35138b9a-5d96-4fbd-8e2d-a2440225f93a' not in
+ extract_manifest(test.built_file_path('test_manifest_extra1.exe',
+ chdir=CHDIR), 1))
+ test.fail_test(
+ 'e2011457-1546-43c5-a5fe-008deee3d3f0' in
+ extract_manifest(test.built_file_path('test_manifest_extra1.exe',
+ chdir=CHDIR), 1))
+ # Must contain both.
+ test.fail_test(
+ '35138b9a-5d96-4fbd-8e2d-a2440225f93a' not in
+ extract_manifest(test.built_file_path('test_manifest_extra2.exe',
+ chdir=CHDIR), 1))
+ test.fail_test(
+ 'e2011457-1546-43c5-a5fe-008deee3d3f0' not in
+ extract_manifest(test.built_file_path('test_manifest_extra2.exe',
+ chdir=CHDIR), 1))
+
+ # Same as extra2, but using list syntax instead.
+ test.fail_test(
+ '35138b9a-5d96-4fbd-8e2d-a2440225f93a' not in
+ extract_manifest(test.built_file_path('test_manifest_extra_list.exe',
+ chdir=CHDIR), 1))
+ test.fail_test(
+ 'e2011457-1546-43c5-a5fe-008deee3d3f0' not in
+ extract_manifest(test.built_file_path('test_manifest_extra_list.exe',
+ chdir=CHDIR), 1))
+
+ # Test that incremental linking doesn't force manifest embedding.
+ test.fail_test(extract_manifest(test.built_file_path(
+ 'test_manifest_exe_inc_no_embed.exe', chdir=CHDIR), 1))
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-enable-uac.py b/third_party/python/gyp/test/win/gyptest-link-enable-uac.py
new file mode 100644
index 0000000000..0ddbde5fa5
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-enable-uac.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+
+# Copyright 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that embedding UAC information into the manifest works.
+"""
+
+import TestGyp
+
+import sys
+from xml.dom.minidom import parseString
+
+if sys.platform == 'win32':
+ import pywintypes
+ import win32api
+ import winerror
+
+ RT_MANIFEST = 24
+
+ class LoadLibrary(object):
+ """Context manager for loading and releasing binaries in Windows.
+ Yields the handle of the binary loaded."""
+ def __init__(self, path):
+ self._path = path
+ self._handle = None
+
+ def __enter__(self):
+ self._handle = win32api.LoadLibrary(self._path)
+ return self._handle
+
+ def __exit__(self, type, value, traceback):
+ win32api.FreeLibrary(self._handle)
+
+
+ def extract_manifest(path, resource_name):
+ """Reads manifest from |path| and returns it as a string.
+ Returns None is there is no such manifest."""
+ with LoadLibrary(path) as handle:
+ try:
+ return win32api.LoadResource(handle, RT_MANIFEST, resource_name)
+ except pywintypes.error as error:
+ if error.args[0] == winerror.ERROR_RESOURCE_DATA_NOT_FOUND:
+ return None
+ else:
+ raise
+
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+ CHDIR = 'linker-flags'
+ test.run_gyp('enable-uac.gyp', chdir=CHDIR)
+ test.build('enable-uac.gyp', test.ALL, chdir=CHDIR)
+
+ # The following binaries must contain a manifest embedded.
+ test.fail_test(not extract_manifest(test.built_file_path(
+ 'enable_uac.exe', chdir=CHDIR), 1))
+ test.fail_test(not extract_manifest(test.built_file_path(
+ 'enable_uac_no.exe', chdir=CHDIR), 1))
+ test.fail_test(not extract_manifest(test.built_file_path(
+ 'enable_uac_admin.exe', chdir=CHDIR), 1))
+
+ # Verify that <requestedExecutionLevel level="asInvoker" uiAccess="false" />
+ # is present.
+ manifest = parseString(extract_manifest(
+ test.built_file_path('enable_uac.exe', chdir=CHDIR), 1))
+ execution_level = manifest.getElementsByTagName('requestedExecutionLevel')
+ test.fail_test(len(execution_level) != 1)
+ execution_level = execution_level[0].attributes
+
+ def _has_key(node, key):
+ # 'in' doesn't work with the NamedNodeMap interface in Python2,
+ # but 'has_key' was removed from it in Python3, so we need to
+ # shim things :(.
+ if hasattr(node, 'has_key'):
+ return node.has_key(key)
+ return key in node
+
+ test.fail_test(not (
+ _has_key(execution_level, 'level') and
+ _has_key(execution_level, 'uiAccess') and
+ execution_level['level'].nodeValue == 'asInvoker' and
+ execution_level['uiAccess'].nodeValue == 'false'))
+
+ # Verify that <requestedExecutionLevel> is not in the menifest.
+ manifest = parseString(extract_manifest(
+ test.built_file_path('enable_uac_no.exe', chdir=CHDIR), 1))
+ execution_level = manifest.getElementsByTagName('requestedExecutionLevel')
+ test.fail_test(len(execution_level) != 0)
+
+ # Verify that <requestedExecutionLevel level="requireAdministrator"
+ # uiAccess="true" /> is present.
+ manifest = parseString(extract_manifest(
+ test.built_file_path('enable_uac_admin.exe', chdir=CHDIR), 1))
+ execution_level = manifest.getElementsByTagName('requestedExecutionLevel')
+ test.fail_test(len(execution_level) != 1)
+ execution_level = execution_level[0].attributes
+ test.fail_test(not (
+ _has_key(execution_level, 'level') and
+ _has_key(execution_level, 'uiAccess') and
+ execution_level['level'].nodeValue == 'requireAdministrator' and
+ execution_level['uiAccess'].nodeValue == 'true'))
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-enable-winrt-app-revision.py b/third_party/python/gyp/test/win/gyptest-link-enable-winrt-app-revision.py
new file mode 100644
index 0000000000..45d86e1c69
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-enable-winrt-app-revision.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure msvs_application_type_revision works correctly.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import os
+import sys
+import struct
+
+CHDIR = 'winrt-app-type-revision'
+
+print('This test is not currently working on the bots: https://code.google.com/p/gyp/issues/detail?id=466')
+sys.exit(0)
+
+if (sys.platform == 'win32' and
+ int(os.environ.get('GYP_MSVS_VERSION', 0)) == 2013):
+ test = TestGyp.TestGyp(formats=['msvs'])
+
+ test.run_gyp('winrt-app-type-revision.gyp', chdir=CHDIR)
+
+ test.build('winrt-app-type-revision.gyp', 'enable_winrt_81_revision_dll',
+ chdir=CHDIR)
+
+ # Revision is set to 8.2 which is invalid for 2013 projects so compilation
+ # must fail.
+ test.build('winrt-app-type-revision.gyp', 'enable_winrt_82_revision_dll',
+ chdir=CHDIR, status=1)
+
+ # Revision is set to an invalid value for 2013 projects so compilation
+ # must fail.
+ test.build('winrt-app-type-revision.gyp', 'enable_winrt_invalid_revision_dll',
+ chdir=CHDIR, status=1)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-enable-winrt-target-platform-version.py b/third_party/python/gyp/test/win/gyptest-link-enable-winrt-target-platform-version.py
new file mode 100644
index 0000000000..877cb68f8c
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-enable-winrt-target-platform-version.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure msvs_target_platform_version works correctly.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import os
+import sys
+import struct
+
+CHDIR = 'winrt-target-platform-version'
+
+print('This test is not currently working on the bots: https://code.google.com/p/gyp/issues/detail?id=466')
+sys.exit(0)
+
+if (sys.platform == 'win32' and
+ int(os.environ.get('GYP_MSVS_VERSION', 0)) == 2015):
+ test = TestGyp.TestGyp(formats=['msvs'])
+
+ test.run_gyp('winrt-target-platform-version.gyp', chdir=CHDIR)
+
+ test.build('winrt-target-platform-version.gyp',
+ 'enable_winrt_10_platversion_dll', chdir=CHDIR)
+
+ # Target Platform without Minimum Target Platform version defaults to a valid
+ # Target Platform and compiles.
+ test.build('winrt-target-platform-version.gyp',
+ 'enable_winrt_10_platversion_nominver_dll', chdir=CHDIR)
+
+ # Target Platform is set to 9.0 which is invalid for 2015 projects so
+ # compilation must fail.
+ test.build('winrt-target-platform-version.gyp',
+ 'enable_winrt_9_platversion_dll', chdir=CHDIR, status=1)
+
+ # Missing Target Platform for 2015 projects must fail.
+ test.build('winrt-target-platform-version.gyp',
+ 'enable_winrt_missing_platversion_dll', chdir=CHDIR, status=1)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-enable-winrt.py b/third_party/python/gyp/test/win/gyptest-link-enable-winrt.py
new file mode 100644
index 0000000000..5e0493aade
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-enable-winrt.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure msvs_enable_winrt works correctly.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import os
+import sys
+import struct
+
+CHDIR = 'enable-winrt'
+
+print('This test is not currently working on the bots: https://code.google.com/p/gyp/issues/detail?id=466')
+sys.exit(0)
+
+if (sys.platform == 'win32' and
+ int(os.environ.get('GYP_MSVS_VERSION', 0)) >= 2013):
+ test = TestGyp.TestGyp(formats=['msvs'])
+
+ test.run_gyp('enable-winrt.gyp', chdir=CHDIR)
+
+ test.build('enable-winrt.gyp', 'enable_winrt_dll', chdir=CHDIR)
+
+ test.build('enable-winrt.gyp', 'enable_winrt_missing_dll', chdir=CHDIR,
+ status=1)
+
+ test.build('enable-winrt.gyp', 'enable_winrt_winphone_dll', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-entrypointsymbol.py b/third_party/python/gyp/test/win/gyptest-link-entrypointsymbol.py
new file mode 100644
index 0000000000..e88174a085
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-entrypointsymbol.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure entrypointsymbol setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('entrypointsymbol.gyp', chdir=CHDIR)
+
+ test.build('entrypointsymbol.gyp', 'test_ok', chdir=CHDIR)
+ test.build('entrypointsymbol.gyp', 'test_fail', chdir=CHDIR, status=1)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-fixed-base.py b/third_party/python/gyp/test/win/gyptest-link-fixed-base.py
new file mode 100644
index 0000000000..725a87028a
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-fixed-base.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure fixed base setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('fixed-base.gyp', chdir=CHDIR)
+ test.build('fixed-base.gyp', test.ALL, chdir=CHDIR)
+
+ def GetHeaders(exe):
+ full_path = test.built_file_path(exe, chdir=CHDIR)
+ return test.run_dumpbin('/headers', full_path)
+
+ # For exe, default is fixed, for dll, it's not fixed.
+ if 'Relocations stripped' not in GetHeaders('test_fixed_default_exe.exe'):
+ test.fail_test()
+ if 'Relocations stripped' in GetHeaders('test_fixed_default_dll.dll'):
+ test.fail_test()
+
+ # Explicitly not fixed.
+ if 'Relocations stripped' in GetHeaders('test_fixed_no.exe'):
+ test.fail_test()
+
+ # Explicitly fixed.
+ if 'Relocations stripped' not in GetHeaders('test_fixed_yes.exe'):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-force-symbol-reference.py b/third_party/python/gyp/test/win/gyptest-link-force-symbol-reference.py
new file mode 100644
index 0000000000..235e94f71b
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-force-symbol-reference.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure ForceSymbolReference is translated properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('force-symbol-reference.gyp', chdir=CHDIR)
+ test.build('force-symbol-reference.gyp', test.ALL, chdir=CHDIR)
+
+ output = test.run_dumpbin(
+ '/disasm', test.built_file_path('test_force_reference.exe', chdir=CHDIR))
+ if '?x@@YAHXZ:' not in output or '?y@@YAHXZ:' not in output:
+ test.fail_test()
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-generate-manifest.py b/third_party/python/gyp/test/win/gyptest-link-generate-manifest.py
new file mode 100644
index 0000000000..77c9228ee5
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-generate-manifest.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure we generate a manifest file when linking binaries, including
+handling AdditionalManifestFiles.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ import pywintypes
+ import win32api
+ import winerror
+
+ RT_MANIFEST = 24
+
+ class LoadLibrary(object):
+ """Context manager for loading and releasing binaries in Windows.
+ Yields the handle of the binary loaded."""
+ def __init__(self, path):
+ self._path = path
+ self._handle = None
+
+ def __enter__(self):
+ self._handle = win32api.LoadLibrary(self._path)
+ return self._handle
+
+ def __exit__(self, type, value, traceback):
+ win32api.FreeLibrary(self._handle)
+
+ def extract_manifest(path, resource_name):
+ """Reads manifest from |path| and returns it as a string.
+ Returns None is there is no such manifest."""
+ with LoadLibrary(path) as handle:
+ try:
+ return win32api.LoadResource(handle, RT_MANIFEST, resource_name)
+ except pywintypes.error as error:
+ if error.args[0] == winerror.ERROR_RESOURCE_DATA_NOT_FOUND:
+ return None
+ else:
+ raise
+
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('generate-manifest.gyp', chdir=CHDIR)
+ test.build('generate-manifest.gyp', test.ALL, chdir=CHDIR)
+
+ # Make sure that generation of .generated.manifest does not cause a relink.
+ test.run_gyp('generate-manifest.gyp', chdir=CHDIR)
+ test.up_to_date('generate-manifest.gyp', test.ALL, chdir=CHDIR)
+
+ def test_manifest(filename, generate_manifest, embedded_manifest,
+ extra_manifest):
+ exe_file = test.built_file_path(filename, chdir=CHDIR)
+ if not generate_manifest:
+ test.must_not_exist(exe_file + '.manifest')
+ manifest = extract_manifest(exe_file, 1)
+ test.fail_test(manifest)
+ return
+ if embedded_manifest:
+ manifest = extract_manifest(exe_file, 1)
+ test.fail_test(not manifest)
+ else:
+ test.must_exist(exe_file + '.manifest')
+ manifest = test.read(exe_file + '.manifest')
+ test.fail_test(not manifest)
+ test.fail_test(extract_manifest(exe_file, 1))
+ if generate_manifest:
+ test.must_contain_any_line(manifest, 'requestedExecutionLevel')
+ if extra_manifest:
+ test.must_contain_any_line(manifest,
+ '35138b9a-5d96-4fbd-8e2d-a2440225f93a')
+ test.must_contain_any_line(manifest,
+ 'e2011457-1546-43c5-a5fe-008deee3d3f0')
+
+ test_manifest('test_generate_manifest_true.exe',
+ generate_manifest=True,
+ embedded_manifest=False,
+ extra_manifest=False)
+ test_manifest('test_generate_manifest_false.exe',
+ generate_manifest=False,
+ embedded_manifest=False,
+ extra_manifest=False)
+ test_manifest('test_generate_manifest_default.exe',
+ generate_manifest=True,
+ embedded_manifest=False,
+ extra_manifest=False)
+ test_manifest('test_generate_manifest_true_as_embedded.exe',
+ generate_manifest=True,
+ embedded_manifest=True,
+ extra_manifest=False)
+ test_manifest('test_generate_manifest_false_as_embedded.exe',
+ generate_manifest=False,
+ embedded_manifest=True,
+ extra_manifest=False)
+ test_manifest('test_generate_manifest_default_as_embedded.exe',
+ generate_manifest=True,
+ embedded_manifest=True,
+ extra_manifest=False)
+ test_manifest('test_generate_manifest_true_with_extra_manifest.exe',
+ generate_manifest=True,
+ embedded_manifest=False,
+ extra_manifest=True)
+ test_manifest('test_generate_manifest_false_with_extra_manifest.exe',
+ generate_manifest=False,
+ embedded_manifest=False,
+ extra_manifest=True)
+ test_manifest('test_generate_manifest_true_with_extra_manifest_list.exe',
+ generate_manifest=True,
+ embedded_manifest=False,
+ extra_manifest=True)
+ test_manifest('test_generate_manifest_false_with_extra_manifest_list.exe',
+ generate_manifest=False,
+ embedded_manifest=False,
+ extra_manifest=True)
+ test_manifest('test_generate_manifest_default_embed_default.exe',
+ generate_manifest=True,
+ embedded_manifest=True,
+ extra_manifest=False)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-incremental.py b/third_party/python/gyp/test/win/gyptest-link-incremental.py
new file mode 100644
index 0000000000..e7184e162c
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-incremental.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure incremental linking setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('incremental.gyp', chdir=CHDIR)
+ test.build('incremental.gyp', test.ALL, chdir=CHDIR)
+
+ def HasILTTables(exe):
+ full_path = test.built_file_path(exe, chdir=CHDIR)
+ output = test.run_dumpbin('/disasm', full_path)
+ return '@ILT+' in output
+
+ # Default or unset is to be on.
+ if not HasILTTables('test_incremental_unset.exe'):
+ test.fail_test()
+ if not HasILTTables('test_incremental_default.exe'):
+ test.fail_test()
+ if HasILTTables('test_incremental_no.exe'):
+ test.fail_test()
+ if not HasILTTables('test_incremental_yes.exe'):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-large-address-aware.py b/third_party/python/gyp/test/win/gyptest-link-large-address-aware.py
new file mode 100644
index 0000000000..ea433f2099
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-large-address-aware.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure largeaddressaware setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('large-address-aware.gyp', chdir=CHDIR)
+ test.build('large-address-aware.gyp', test.ALL, chdir=CHDIR)
+
+ def GetHeaders(exe):
+ return test.run_dumpbin('/headers', test.built_file_path(exe, chdir=CHDIR))
+
+ MARKER = 'Application can handle large (>2GB) addresses'
+
+ # Explicitly off.
+ if MARKER in GetHeaders('test_large_address_aware_no.exe'):
+ test.fail_test()
+
+ # Explicitly on.
+ if MARKER not in GetHeaders('test_large_address_aware_yes.exe'):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-large-pdb.py b/third_party/python/gyp/test/win/gyptest-link-large-pdb.py
new file mode 100644
index 0000000000..1fb27b0ab7
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-large-pdb.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure msvs_large_pdb works correctly.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import struct
+import sys
+
+if sys.platform == 'win32':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+
+CHDIR = 'large-pdb'
+
+
+def CheckImageAndPdb(test, image_basename, expected_page_size,
+ pdb_basename=None):
+ if not pdb_basename:
+ pdb_basename = image_basename + '.pdb'
+ test.built_file_must_exist(image_basename, chdir=CHDIR)
+ test.built_file_must_exist(pdb_basename, chdir=CHDIR)
+
+ # We expect the PDB to have the given page size. For full details of the
+ # header look here: https://code.google.com/p/pdbparser/wiki/MSF_Format
+ # We read the little-endian 4-byte unsigned integer at position 32 of the
+ # file.
+ pdb_path = test.built_file_path(pdb_basename, chdir=CHDIR)
+ pdb_file = open(pdb_path, 'rb')
+ pdb_file.seek(32, 0)
+ page_size = struct.unpack('<I', pdb_file.read(4))[0]
+ if page_size != expected_page_size:
+ print("Expected page size of %d, got %d for PDB file `%s'." % (
+ expected_page_size, page_size, pdb_path))
+
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ test.run_gyp('large-pdb.gyp', chdir=CHDIR)
+
+ test.build('large-pdb.gyp', 'large_pdb_exe', chdir=CHDIR)
+ CheckImageAndPdb(test, 'large_pdb_exe.exe', 4096)
+
+ test.build('large-pdb.gyp', 'small_pdb_exe', chdir=CHDIR)
+ CheckImageAndPdb(test, 'small_pdb_exe.exe', 1024)
+
+ test.build('large-pdb.gyp', 'large_pdb_dll', chdir=CHDIR)
+ CheckImageAndPdb(test, 'large_pdb_dll.dll', 4096)
+
+ test.build('large-pdb.gyp', 'small_pdb_dll', chdir=CHDIR)
+ CheckImageAndPdb(test, 'small_pdb_dll.dll', 1024)
+
+ test.build('large-pdb.gyp', 'large_pdb_implicit_exe', chdir=CHDIR)
+ CheckImageAndPdb(test, 'large_pdb_implicit_exe.exe', 4096)
+
+ # This target has a different PDB name because it uses an
+ # 'msvs_large_pdb_path' variable.
+ test.build('large-pdb.gyp', 'large_pdb_variable_exe', chdir=CHDIR)
+ CheckImageAndPdb(test, 'large_pdb_variable_exe.exe', 4096,
+ pdb_basename='foo.pdb')
+
+ # This target has a different output name because it uses 'product_name'.
+ test.build('large-pdb.gyp', 'large_pdb_product_exe', chdir=CHDIR)
+ CheckImageAndPdb(test, 'bar.exe', 4096)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-library-adjust.py b/third_party/python/gyp/test/win/gyptest-link-library-adjust.py
new file mode 100644
index 0000000000..71d1c09360
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-library-adjust.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure link_settings containing -lblah.lib is remapped to just blah.lib.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('library-adjust.gyp', chdir=CHDIR)
+ test.build('library-adjust.gyp', test.ALL, chdir=CHDIR)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-library-directories.py b/third_party/python/gyp/test/win/gyptest-link-library-directories.py
new file mode 100644
index 0000000000..8308e14fcb
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-library-directories.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure libpath is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+
+ # Build subdirectory library.
+ test.run_gyp('subdir/library.gyp', chdir=CHDIR)
+ test.build('subdir/library.gyp', test.ALL, chdir=CHDIR)
+
+ # And then try to link the main project against the library using only
+ # LIBPATH to find it.
+ test.run_gyp('library-directories.gyp', chdir=CHDIR)
+
+ # Without additional paths specified, should fail.
+ test.build('library-directories.gyp', 'test_libdirs_none', chdir=CHDIR,
+ status=1)
+
+ # With the additional library directory, should pass.
+ test.build('library-directories.gyp', 'test_libdirs_with', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-ltcg.py b/third_party/python/gyp/test/win/gyptest-link-ltcg.py
new file mode 100644
index 0000000000..5271e099d7
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-ltcg.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure LTCG is working properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('ltcg.gyp', chdir=CHDIR)
+
+ # Here we expect LTCG is able to inline functions beyond compile unit.
+ # Note: This marker is embedded in 'inline_test_main.cc'
+ INLINE_MARKER = '==== inlined ===='
+
+ # link.exe generates following lines when LTCG is enabled.
+ # Note: Future link.exe may or may not generate them. Update as needed.
+ LTCG_LINKER_MESSAGES = ['Generating code', 'Finished generating code']
+
+ # test 'LinkTimeCodeGenerationOptionDefault'
+ test.build('ltcg.gyp', 'test_ltcg_off', chdir=CHDIR)
+ test.run_built_executable('test_ltcg_off', chdir=CHDIR)
+ test.must_not_contain_any_line(test.stdout(), [INLINE_MARKER])
+
+ # test 'LinkTimeCodeGenerationOptionUse'
+ test.build('ltcg.gyp', 'test_ltcg_on', chdir=CHDIR)
+ if test.format == 'ninja':
+ # Make sure ninja win_tool.py filters out noisy lines.
+ test.must_not_contain_any_line(test.stdout(), LTCG_LINKER_MESSAGES)
+ elif test.format == 'msvs':
+ test.must_contain_any_line(test.stdout(), LTCG_LINKER_MESSAGES)
+ test.run_built_executable('test_ltcg_on', chdir=CHDIR)
+ test.must_contain_any_line(test.stdout(), [INLINE_MARKER])
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-mapfile.py b/third_party/python/gyp/test/win/gyptest-link-mapfile.py
new file mode 100644
index 0000000000..00c1dea9e9
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-mapfile.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure mapfile settings are extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('mapfile.gyp', chdir=CHDIR)
+ test.build('mapfile.gyp', test.ALL, chdir=CHDIR)
+
+ map_file = test.built_file_path('test_mapfile_unset.map', chdir=CHDIR)
+ test.must_not_exist(map_file)
+
+ map_file = test.built_file_path('test_mapfile_generate.map', chdir=CHDIR)
+ test.must_exist(map_file)
+ test.must_contain(map_file, '?AnExportedFunction@@YAXXZ')
+ test.must_not_contain(map_file, 'void __cdecl AnExportedFunction(void)')
+
+ map_file = test.built_file_path('test_mapfile_generate_exports.map',
+ chdir=CHDIR)
+ test.must_exist(map_file)
+ test.must_contain(map_file, 'void __cdecl AnExportedFunction(void)')
+
+ map_file = test.built_file_path('test_mapfile_generate_filename.map',
+ chdir=CHDIR)
+ test.must_not_exist(map_file)
+
+ map_file = test.built_file_path('custom_file_name.map', chdir=CHDIR)
+ test.must_exist(map_file)
+ test.must_contain(map_file, '?AnExportedFunction@@YAXXZ')
+ test.must_not_contain(map_file, 'void __cdecl AnExportedFunction(void)')
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-nodefaultlib.py b/third_party/python/gyp/test/win/gyptest-link-nodefaultlib.py
new file mode 100644
index 0000000000..f00760b882
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-nodefaultlib.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure nodefaultlib setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('nodefaultlib.gyp', chdir=CHDIR)
+
+ test.build('nodefaultlib.gyp', 'test_ok', chdir=CHDIR)
+ test.build('nodefaultlib.gyp', 'test_fail', chdir=CHDIR, status=1)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-noimportlib.py b/third_party/python/gyp/test/win/gyptest-link-noimportlib.py
new file mode 100644
index 0000000000..d12e0ad3ed
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-noimportlib.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure that the (custom) NoImportLibrary flag is handled correctly.
+"""
+
+import TestGyp
+
+import os
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['ninja'])
+
+ CHDIR = 'importlib'
+ test.run_gyp('noimplib.gyp', chdir=CHDIR)
+ test.build('noimplib.gyp', test.ALL, chdir=CHDIR)
+
+ # The target has an entry point, but no exports. Ordinarily, ninja expects
+ # all DLLs to export some symbols (with the exception of /NOENTRY resource-
+ # only DLLs). When the NoImportLibrary flag is set, this is suppressed. If
+ # this is not working correctly, the expected .lib will never be generated
+ # but will be expected, so the build will not be up to date.
+ test.up_to_date('noimplib.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-nxcompat.py b/third_party/python/gyp/test/win/gyptest-link-nxcompat.py
new file mode 100644
index 0000000000..660074397c
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-nxcompat.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure nxcompat setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('nxcompat.gyp', chdir=CHDIR)
+ test.build('nxcompat.gyp', test.ALL, chdir=CHDIR)
+
+ def GetHeaders(exe):
+ return test.run_dumpbin('/headers', test.built_file_path(exe, chdir=CHDIR))
+
+ # NXCOMPAT is on by default.
+ if 'NX compatible' not in GetHeaders('test_nxcompat_default.exe'):
+ test.fail_test()
+
+ # Explicitly off, should not be marked NX compatiable.
+ if 'NX compatible' in GetHeaders('test_nxcompat_no.exe'):
+ test.fail_test()
+
+ # Explicitly on.
+ if 'NX compatible' not in GetHeaders('test_nxcompat_yes.exe'):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-opt-icf.py b/third_party/python/gyp/test/win/gyptest-link-opt-icf.py
new file mode 100644
index 0000000000..3c48ef6eb9
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-opt-icf.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure comdat folding optimization setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('opt-icf.gyp', chdir=CHDIR)
+ test.build('opt-icf.gyp', chdir=CHDIR)
+
+ # We're specifying /DEBUG so the default is to not merge identical
+ # functions, so all of the similar_functions should be preserved.
+ output = test.run_dumpbin(
+ '/disasm', test.built_file_path('test_opticf_default.exe', chdir=CHDIR))
+ if output.count('similar_function') != 6: # 3 definitions, 3 calls.
+ test.fail_test()
+
+ # Explicitly off, all functions preserved seperately.
+ output = test.run_dumpbin(
+ '/disasm', test.built_file_path('test_opticf_no.exe', chdir=CHDIR))
+ if output.count('similar_function') != 6: # 3 definitions, 3 calls.
+ test.fail_test()
+
+ # Explicitly on, all but one removed.
+ output = test.run_dumpbin(
+ '/disasm', test.built_file_path('test_opticf_yes.exe', chdir=CHDIR))
+ if output.count('similar_function') != 4: # 1 definition, 3 calls.
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-opt-ref.py b/third_party/python/gyp/test/win/gyptest-link-opt-ref.py
new file mode 100644
index 0000000000..586b7afcd4
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-opt-ref.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure reference optimization setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('opt-ref.gyp', chdir=CHDIR)
+ test.build('opt-ref.gyp', chdir=CHDIR)
+
+ # We're specifying /DEBUG so the default is to not remove unused functions.
+ output = test.run_dumpbin(
+ '/disasm', test.built_file_path('test_optref_default.exe', chdir=CHDIR))
+ if 'unused_function' not in output:
+ test.fail_test()
+
+ # Explicitly off, unused_function preserved.
+ output = test.run_dumpbin(
+ '/disasm', test.built_file_path('test_optref_no.exe', chdir=CHDIR))
+ if 'unused_function' not in output:
+ test.fail_test()
+
+ # Explicitly on, should be removed.
+ output = test.run_dumpbin(
+ '/disasm', test.built_file_path('test_optref_yes.exe', chdir=CHDIR))
+ if 'unused_function' in output:
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-ordering.py b/third_party/python/gyp/test/win/gyptest-link-ordering.py
new file mode 100644
index 0000000000..ed8ee98c9e
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-ordering.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure the link order of object files is the same between msvs and ninja.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('link-ordering.gyp', chdir=CHDIR)
+ test.build('link-ordering.gyp', test.ALL, chdir=CHDIR)
+
+ def GetDisasm(exe):
+ full_path = test.built_file_path(exe, chdir=CHDIR)
+ # Get disassembly and drop int3 padding between functions.
+ return '\n'.join(
+ x for x in test.run_dumpbin('/disasm', full_path).splitlines()
+ if 'CC' not in x)
+
+ # This is the full dump that we expect. The source files in the .gyp match
+ # this order which is what determines the ordering in the binary.
+
+ expected_disasm_basic = '''
+_mainCRTStartup:
+ 00401000: B8 05 00 00 00 mov eax,5
+ 00401005: C3 ret
+?z@@YAHXZ:
+ 00401010: B8 03 00 00 00 mov eax,3
+ 00401015: C3 ret
+?x@@YAHXZ:
+ 00401020: B8 01 00 00 00 mov eax,1
+ 00401025: C3 ret
+?y@@YAHXZ:
+ 00401030: B8 02 00 00 00 mov eax,2
+ 00401035: C3 ret
+_main:
+ 00401040: 33 C0 xor eax,eax
+ 00401042: C3 ret
+'''
+
+ if expected_disasm_basic not in GetDisasm('test_ordering_exe.exe'):
+ print(GetDisasm('test_ordering_exe.exe'))
+ test.fail_test()
+
+ # Similar to above. The VS generator handles subdirectories differently.
+
+ expected_disasm_subdirs = '''
+_mainCRTStartup:
+ 00401000: B8 05 00 00 00 mov eax,5
+ 00401005: C3 ret
+_main:
+ 00401010: 33 C0 xor eax,eax
+ 00401012: C3 ret
+?y@@YAHXZ:
+ 00401020: B8 02 00 00 00 mov eax,2
+ 00401025: C3 ret
+?z@@YAHXZ:
+ 00401030: B8 03 00 00 00 mov eax,3
+ 00401035: C3 ret
+'''
+
+ if expected_disasm_subdirs not in GetDisasm('test_ordering_subdirs.exe'):
+ print(GetDisasm('test_ordering_subdirs.exe'))
+ test.fail_test()
+
+ # Similar, but with directories mixed into folders (crt and main at the same
+ # level, but with a subdir in the middle).
+
+ expected_disasm_subdirs_mixed = '''
+_mainCRTStartup:
+ 00401000: B8 05 00 00 00 mov eax,5
+ 00401005: C3 ret
+?x@@YAHXZ:
+ 00401010: B8 01 00 00 00 mov eax,1
+ 00401015: C3 ret
+_main:
+ 00401020: 33 C0 xor eax,eax
+ 00401022: C3 ret
+?z@@YAHXZ:
+ 00401030: B8 03 00 00 00 mov eax,3
+ 00401035: C3 ret
+?y@@YAHXZ:
+ 00401040: B8 02 00 00 00 mov eax,2
+ 00401045: C3 ret
+'''
+
+ if (expected_disasm_subdirs_mixed not in
+ GetDisasm('test_ordering_subdirs_mixed.exe')):
+ print(GetDisasm('test_ordering_subdirs_mixed.exe'))
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-outputfile.py b/third_party/python/gyp/test/win/gyptest-link-outputfile.py
new file mode 100644
index 0000000000..b98cdff0f0
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-outputfile.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure linker OutputFile setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('outputfile.gyp', chdir=CHDIR)
+ test.build('outputfile.gyp', test.ALL, chdir=CHDIR)
+
+ test.built_file_must_exist('blorp.exe', chdir=CHDIR)
+ test.built_file_must_exist('blorp.dll', chdir=CHDIR)
+ test.built_file_must_exist('subdir/blorp.exe', chdir=CHDIR)
+ test.built_file_must_exist('blorp.lib', chdir=CHDIR)
+ test.built_file_must_exist('subdir/blorp.lib', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-pdb-no-output.py b/third_party/python/gyp/test/win/gyptest-link-pdb-no-output.py
new file mode 100644
index 0000000000..6da0aeae98
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-pdb-no-output.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Ensure that when debug information is not output, a pdb is not expected.
+"""
+
+import TestGyp
+
+import os
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp()
+ CHDIR = 'linker-flags'
+ test.run_gyp('pdb-output.gyp', chdir=CHDIR)
+ test.build('pdb-output.gyp', 'test_pdb_output_disabled', chdir=CHDIR)
+ # Make sure that the build doesn't expect a PDB to be generated when there
+ # will be none.
+ test.up_to_date('pdb-output.gyp', 'test_pdb_output_disabled', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-pdb-output.py b/third_party/python/gyp/test/win/gyptest-link-pdb-output.py
new file mode 100644
index 0000000000..27245f7ec8
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-pdb-output.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Ensure that ninja includes the .pdb as an output file from linking.
+"""
+
+import TestGyp
+
+import os
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['ninja'])
+ CHDIR = 'linker-flags'
+ test.run_gyp('pdb-output.gyp', chdir=CHDIR)
+ # Note, building the pdbs rather than ALL or gyp target.
+ test.build('pdb-output.gyp', 'output_exe.pdb', chdir=CHDIR)
+ test.build('pdb-output.gyp', 'output_dll.pdb', chdir=CHDIR)
+
+ def FindFile(pdb):
+ full_path = test.built_file_path(pdb, chdir=CHDIR)
+ return os.path.isfile(full_path)
+
+ if not FindFile('output_exe.pdb'):
+ test.fail_test()
+ if not FindFile('output_dll.pdb'):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-pdb.py b/third_party/python/gyp/test/win/gyptest-link-pdb.py
new file mode 100644
index 0000000000..26d744d0b7
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-pdb.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that the 'ProgramDatabaseFile' attribute in VCLinker is extracted
+properly.
+"""
+
+import TestGyp
+
+import os
+import sys
+
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+ CHDIR = 'linker-flags'
+ test.run_gyp('program-database.gyp', chdir=CHDIR)
+ test.build('program-database.gyp', test.ALL, chdir=CHDIR)
+
+ def FindFile(pdb):
+ full_path = test.built_file_path(pdb, chdir=CHDIR)
+ return os.path.isfile(full_path)
+
+ # Verify the specified PDB is created when ProgramDatabaseFile
+ # is provided.
+ if not FindFile('name_outdir.pdb'):
+ test.fail_test()
+ if not FindFile('name_proddir.pdb'):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-pgo.py b/third_party/python/gyp/test/win/gyptest-link-pgo.py
new file mode 100644
index 0000000000..d742047ac3
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-pgo.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure PGO is working properly.
+"""
+
+import TestGyp
+
+import os
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('pgo.gyp', chdir=CHDIR)
+
+ def IsPGOAvailable():
+ """Returns true if the Visual Studio available here supports PGO."""
+ test.build('pgo.gyp', 'gen_linker_option', chdir=CHDIR)
+ tmpfile = test.read(test.built_file_path('linker_options.txt', chdir=CHDIR))
+ return any(line.find('PGOPTIMIZE') for line in tmpfile)
+
+ # Test generated build files look fine.
+ if test.format == 'ninja':
+ ninja = test.built_file_path('obj/test_pgo_instrument.ninja', chdir=CHDIR)
+ test.must_contain(ninja, '/LTCG:PGINSTRUMENT')
+ test.must_contain(ninja, 'test_pgo.pgd')
+ ninja = test.built_file_path('obj/test_pgo_optimize.ninja', chdir=CHDIR)
+ test.must_contain(ninja, '/LTCG:PGOPTIMIZE')
+ test.must_contain(ninja, 'test_pgo.pgd')
+ ninja = test.built_file_path('obj/test_pgo_update.ninja', chdir=CHDIR)
+ test.must_contain(ninja, '/LTCG:PGUPDATE')
+ test.must_contain(ninja, 'test_pgo.pgd')
+ elif test.format == 'msvs':
+ LTCG_FORMAT = '<LinkTimeCodeGeneration>%s</LinkTimeCodeGeneration>'
+ vcproj = test.workpath('linker-flags/test_pgo_instrument.vcxproj')
+ test.must_contain(vcproj, LTCG_FORMAT % 'PGInstrument')
+ test.must_contain(vcproj, 'test_pgo.pgd')
+ vcproj = test.workpath('linker-flags/test_pgo_optimize.vcxproj')
+ test.must_contain(vcproj, LTCG_FORMAT % 'PGOptimization')
+ test.must_contain(vcproj, 'test_pgo.pgd')
+ vcproj = test.workpath('linker-flags/test_pgo_update.vcxproj')
+ test.must_contain(vcproj, LTCG_FORMAT % 'PGUpdate')
+ test.must_contain(vcproj, 'test_pgo.pgd')
+
+ # When PGO is available, try building binaries with PGO.
+ if IsPGOAvailable():
+ pgd_path = test.built_file_path('test_pgo.pgd', chdir=CHDIR)
+
+ # Test if 'PGInstrument' generates PGD (Profile-Guided Database) file.
+ if os.path.exists(pgd_path):
+ test.unlink(pgd_path)
+ test.must_not_exist(pgd_path)
+ test.build('pgo.gyp', 'test_pgo_instrument', chdir=CHDIR)
+ test.must_exist(pgd_path)
+
+ # Test if 'PGOptimize' works well
+ test.build('pgo.gyp', 'test_pgo_optimize', chdir=CHDIR)
+ test.must_contain_any_line(test.stdout(), ['profiled functions'])
+
+ # Test if 'PGUpdate' works well
+ test.build('pgo.gyp', 'test_pgo_update', chdir=CHDIR)
+ # With 'PGUpdate', linker should not complain that sources are changed after
+ # the previous training run.
+ test.touch(test.workpath('linker-flags/inline_test_main.cc'))
+ test.unlink(test.built_file_path('test_pgo_update.exe', chdir=CHDIR))
+ test.build('pgo.gyp', 'test_pgo_update', chdir=CHDIR)
+ test.must_contain_any_line(test.stdout(), ['profiled functions'])
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-profile.py b/third_party/python/gyp/test/win/gyptest-link-profile.py
new file mode 100644
index 0000000000..4dbc9ae4ce
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-profile.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that the 'Profile' attribute in VCLinker is extracted properly.
+"""
+
+import TestGyp
+
+import os
+import sys
+
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+ CHDIR = 'linker-flags'
+ test.run_gyp('profile.gyp', chdir=CHDIR)
+ test.build('profile.gyp', test.ALL, chdir=CHDIR)
+
+ def GetSummary(exe):
+ full_path = test.built_file_path(exe, chdir=CHDIR)
+ return test.run_dumpbin(full_path)
+
+ # '.idata' section will be missing when /PROFILE is enabled.
+ if '.idata' in GetSummary('test_profile_true.exe'):
+ test.fail_test()
+
+ if not '.idata' in GetSummary('test_profile_false.exe'):
+ test.fail_test()
+
+ if not '.idata' in GetSummary('test_profile_default.exe'):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-restat-importlib.py b/third_party/python/gyp/test/win/gyptest-link-restat-importlib.py
new file mode 100644
index 0000000000..d249e0a6bd
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-restat-importlib.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure we don't cause unnecessary builds due to import libs appearing
+to be out of date.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import os
+import sys
+import time
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ if not os.environ.get('ProgramFiles(x86)'):
+ # TODO(scottmg)
+ print('Skipping test on x86, http://crbug.com/365833')
+ test.pass_test()
+
+ CHDIR = 'importlib'
+ test.run_gyp('importlib.gyp', chdir=CHDIR)
+ test.build('importlib.gyp', test.ALL, chdir=CHDIR)
+
+ # Delay briefly so that there's time for this touch not to have the
+ # timestamp as the previous run.
+ test.sleep()
+
+ # Touch the .cc file; the .dll will rebuild, but the import libs timestamp
+ # won't be updated.
+ test.touch('importlib/has-exports.cc')
+ test.build('importlib.gyp', 'test_importlib', chdir=CHDIR)
+
+ # This is the important part. The .dll above will relink and have an updated
+ # timestamp, however the import .libs timestamp won't be updated. So, we
+ # have to handle restating inputs in ninja so the final binary doesn't
+ # continually relink (due to thinking the .lib isn't up to date).
+ test.up_to_date('importlib.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-safeseh.py b/third_party/python/gyp/test/win/gyptest-link-safeseh.py
new file mode 100644
index 0000000000..31a25673f4
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-safeseh.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure safeseh setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp()
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('safeseh.gyp', chdir=CHDIR)
+ test.build('safeseh.gyp', test.ALL, chdir=CHDIR)
+
+ def HasSafeExceptionHandlers(exe):
+ full_path = test.built_file_path(exe, chdir=CHDIR)
+ output = test.run_dumpbin('/LOADCONFIG', full_path)
+ return ' Safe Exception Handler Table' in output
+
+ # From MSDN: http://msdn.microsoft.com/en-us/library/9a89h429.aspx
+ # If /SAFESEH is not specified, the linker will produce an image with a
+ # table of safe exceptions handlers if all modules are compatible with
+ # the safe exception handling feature. If any modules were not
+ # compatible with safe exception handling feature, the resulting image
+ # will not contain a table of safe exception handlers.
+ # However, the msvs IDE passes /SAFESEH to the linker by default, if
+ # ImageHasSafeExceptionHandlers is not set to false in the vcxproj file.
+ # We emulate this behavior in msvs_emulation.py, so 'test_safeseh_default'
+ # and 'test_safeseh_yes' are built identically.
+ if not HasSafeExceptionHandlers('test_safeseh_default.exe'):
+ test.fail_test()
+ if HasSafeExceptionHandlers('test_safeseh_no.exe'):
+ test.fail_test()
+ if not HasSafeExceptionHandlers('test_safeseh_yes.exe'):
+ test.fail_test()
+ if HasSafeExceptionHandlers('test_safeseh_x64.exe'):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-shard.py b/third_party/python/gyp/test/win/gyptest-link-shard.py
new file mode 100644
index 0000000000..9af93284a7
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-shard.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure msvs_shard works correctly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'shard'
+ test.run_gyp('shard.gyp', chdir=CHDIR)
+ test.build('shard.gyp', test.ALL, chdir=CHDIR)
+
+ test.built_file_must_exist('shard_0.lib', chdir=CHDIR)
+ test.built_file_must_exist('shard_1.lib', chdir=CHDIR)
+ test.built_file_must_exist('shard_2.lib', chdir=CHDIR)
+ test.built_file_must_exist('shard_3.lib', chdir=CHDIR)
+
+ test.run_gyp('shard_ref.gyp', chdir=CHDIR)
+ test.build('shard_ref.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-stacksize.py b/third_party/python/gyp/test/win/gyptest-link-stacksize.py
new file mode 100644
index 0000000000..2e952d2c73
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-stacksize.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure StackReserveSize and StackCommitSize settings are extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('stacksize.gyp', chdir=CHDIR)
+ test.build('stacksize.gyp', test.ALL, chdir=CHDIR)
+
+ def GetHeaders(exe):
+ return test.run_dumpbin('/headers', test.built_file_path(exe, chdir=CHDIR))
+
+ # Verify default sizes as reported by dumpbin:
+ # 100000h = 1MB
+ # 1000h = 4KB
+ default_headers = GetHeaders('test_default.exe')
+ if '100000 size of stack reserve' not in default_headers:
+ test.fail_test()
+ if '1000 size of stack commit' not in default_headers:
+ test.fail_test()
+
+ # Verify that reserved size is changed, but commit size is unchanged:
+ # 200000h = 2MB
+ # 1000h = 4KB
+ set_reserved_size_headers = GetHeaders('test_set_reserved_size.exe')
+ if '200000 size of stack reserve' not in set_reserved_size_headers:
+ test.fail_test()
+ if '1000 size of stack commit' not in set_reserved_size_headers:
+ test.fail_test()
+
+ # Verify that setting the commit size, without the reserve size, has no
+ # effect:
+ # 100000h = 1MB
+ # 1000h = 4KB
+ set_commit_size_headers = GetHeaders('test_set_commit_size.exe')
+ if '100000 size of stack reserve' not in set_commit_size_headers:
+ test.fail_test()
+ if '1000 size of stack commit' not in set_commit_size_headers:
+ test.fail_test()
+
+ # Verify that setting both works:
+ # 200000h = 2MB
+ # 2000h = 8KB
+ set_both_headers = GetHeaders('test_set_both.exe')
+ if '200000 size of stack reserve' not in set_both_headers:
+ test.fail_test()
+ if '2000 size of stack commit' not in set_both_headers:
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-subsystem.py b/third_party/python/gyp/test/win/gyptest-link-subsystem.py
new file mode 100644
index 0000000000..a94ba36856
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-subsystem.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure subsystem setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('subsystem.gyp', chdir=CHDIR)
+
+ test.build('subsystem.gyp', 'test_console_ok', chdir=CHDIR)
+ test.build('subsystem.gyp', 'test_console_fail', chdir=CHDIR, status=1)
+ test.build('subsystem.gyp', 'test_windows_ok', chdir=CHDIR)
+ test.build('subsystem.gyp', 'test_windows_fail', chdir=CHDIR, status=1)
+
+ test.build('subsystem.gyp', 'test_console_xp', chdir=CHDIR)
+ test.build('subsystem.gyp', 'test_windows_xp', chdir=CHDIR)
+ # Make sure we are targeting XP.
+ def GetHeaders(exe):
+ return test.run_dumpbin('/headers', test.built_file_path(exe, chdir=CHDIR))
+ if '5.01 subsystem version' not in GetHeaders('test_console_xp.exe'):
+ test.fail_test()
+ if '5.01 subsystem version' not in GetHeaders('test_windows_xp.exe'):
+ test.fail_test()
+
+ # TODO(scottmg): There are other subsystems (WinCE, etc.) that we don't use.
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-target-machine.py b/third_party/python/gyp/test/win/gyptest-link-target-machine.py
new file mode 100644
index 0000000000..5a15f3f4dc
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-target-machine.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure TargetMachine setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('target-machine.gyp', chdir=CHDIR)
+ # The .cc file is compiled as x86 (the default), so the link/libs that are
+ # x64 need to fail.
+ test.build('target-machine.gyp', 'test_target_link_x86', chdir=CHDIR)
+ test.build(
+ 'target-machine.gyp', 'test_target_link_x64', chdir=CHDIR, status=1)
+ test.build('target-machine.gyp', 'test_target_lib_x86', chdir=CHDIR)
+ test.build('target-machine.gyp', 'test_target_lib_x64', chdir=CHDIR, status=1)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-tsaware.py b/third_party/python/gyp/test/win/gyptest-link-tsaware.py
new file mode 100644
index 0000000000..d34b3c24a5
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-tsaware.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure tsaware setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('tsaware.gyp', chdir=CHDIR)
+ test.build('tsaware.gyp', test.ALL, chdir=CHDIR)
+
+ def GetHeaders(exe):
+ return test.run_dumpbin('/headers', test.built_file_path(exe, chdir=CHDIR))
+
+ # Explicitly off, should not be marked NX compatiable.
+ if 'Terminal Server Aware' in GetHeaders('test_tsaware_no.exe'):
+ test.fail_test()
+
+ # Explicitly on.
+ if 'Terminal Server Aware' not in GetHeaders('test_tsaware_yes.exe'):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-uldi-depending-on-module.py b/third_party/python/gyp/test/win/gyptest-link-uldi-depending-on-module.py
new file mode 100644
index 0000000000..75c9503dc4
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-uldi-depending-on-module.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure that when ULDI is on, we link cause downstream modules to get built
+when we depend on the component objs.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'uldi'
+ test.run_gyp('uldi-depending-on-module.gyp', chdir=CHDIR)
+ test.build('uldi-depending-on-module.gyp', 'an_exe', chdir=CHDIR)
+ test.built_file_must_exist('a_module.dll', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-uldi.py b/third_party/python/gyp/test/win/gyptest-link-uldi.py
new file mode 100644
index 0000000000..62c5892c50
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-uldi.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure that when ULDI is on, we link .objs that make up .libs rather than
+the .libs themselves.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'uldi'
+ test.run_gyp('uldi.gyp', chdir=CHDIR)
+ # When linking with ULDI, the duplicated function from the lib will be an
+ # error.
+ test.build('uldi.gyp', 'final_uldi', chdir=CHDIR, status=1)
+ # And when in libs, the duplicated function will be silently dropped, so the
+ # build succeeds.
+ test.build('uldi.gyp', 'final_no_uldi', chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-unsupported-manifest.py b/third_party/python/gyp/test/win/gyptest-link-unsupported-manifest.py
new file mode 100644
index 0000000000..8f7e12bc8c
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-unsupported-manifest.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure we error out if #pragma comments are used to modify manifests.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ # This assertion only applies to the ninja build.
+ test = TestGyp.TestGyp(formats=['ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('unsupported-manifest.gyp', chdir=CHDIR)
+
+ # Just needs to fail to build.
+ test.build('unsupported-manifest.gyp',
+ 'test_unsupported', chdir=CHDIR, status=1)
+ test.must_not_exist(test.built_file_path('test_unsupported.exe', chdir=CHDIR))
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-link-update-manifest.py b/third_party/python/gyp/test/win/gyptest-link-update-manifest.py
new file mode 100644
index 0000000000..7bad1eca77
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-update-manifest.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure binary is relinked when manifest settings are changed.
+"""
+
+import TestGyp
+
+import os
+import sys
+
+if sys.platform == 'win32':
+ import pywintypes
+ import win32api
+ import winerror
+
+ RT_MANIFEST = 24
+
+ class LoadLibrary(object):
+ """Context manager for loading and releasing binaries in Windows.
+ Yields the handle of the binary loaded."""
+ def __init__(self, path):
+ self._path = path
+ self._handle = None
+
+ def __enter__(self):
+ self._handle = win32api.LoadLibrary(self._path)
+ return self._handle
+
+ def __exit__(self, type, value, traceback):
+ win32api.FreeLibrary(self._handle)
+
+ def extract_manifest(path, resource_name):
+ """Reads manifest from |path| and returns it as a string.
+ Returns None is there is no such manifest."""
+ with LoadLibrary(path) as handle:
+ try:
+ return win32api.LoadResource(
+ handle, RT_MANIFEST, resource_name).decode('utf-8', 'ignore')
+ except pywintypes.error as error:
+ if error.args[0] == winerror.ERROR_RESOURCE_DATA_NOT_FOUND:
+ return None
+ else:
+ raise
+
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+
+ gyp_template = '''
+{
+ 'targets': [
+ {
+ 'target_name': 'test_update_manifest',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'EnableUAC': 'true',
+ 'UACExecutionLevel': '%(uac_execution_level)d',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'true',
+ 'AdditionalManifestFiles': '%(additional_manifest_files)s',
+ },
+ },
+ },
+ ],
+}
+'''
+
+ gypfile = 'update-manifest.gyp'
+
+ def WriteAndUpdate(uac_execution_level, additional_manifest_files, do_build):
+ with open(os.path.join(CHDIR, gypfile), 'w') as f:
+ f.write(gyp_template % {
+ 'uac_execution_level': uac_execution_level,
+ 'additional_manifest_files': additional_manifest_files,
+ })
+ test.run_gyp(gypfile, chdir=CHDIR)
+ if do_build:
+ test.build(gypfile, chdir=CHDIR)
+ exe_file = test.built_file_path('test_update_manifest.exe', chdir=CHDIR)
+ return extract_manifest(exe_file, 1)
+
+ manifest = WriteAndUpdate(0, '', True)
+ test.fail_test('asInvoker' not in manifest)
+ test.fail_test('35138b9a-5d96-4fbd-8e2d-a2440225f93a' in manifest)
+
+ # Make sure that updating .gyp and regenerating doesn't cause a rebuild.
+ WriteAndUpdate(0, '', False)
+ test.up_to_date(gypfile, test.ALL, chdir=CHDIR)
+
+ # But make sure that changing a manifest property does cause a relink.
+ manifest = WriteAndUpdate(2, '', True)
+ test.fail_test('requireAdministrator' not in manifest)
+
+ # Adding a manifest causes a rebuild.
+ manifest = WriteAndUpdate(2, 'extra.manifest', True)
+ test.fail_test('35138b9a-5d96-4fbd-8e2d-a2440225f93a' not in manifest)
diff --git a/third_party/python/gyp/test/win/gyptest-link-warnings-as-errors.py b/third_party/python/gyp/test/win/gyptest-link-warnings-as-errors.py
new file mode 100644
index 0000000000..d6a64736fb
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-link-warnings-as-errors.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure linker warnings-as-errors setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('warn-as-error.gyp', chdir=CHDIR)
+
+ test.build('warn-as-error.gyp', 'test_on', chdir=CHDIR, status=1)
+ test.build('warn-as-error.gyp', 'test_off', chdir=CHDIR)
+ test.build('warn-as-error.gyp', 'test_default', chdir=CHDIR)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-long-command-line.py b/third_party/python/gyp/test/win/gyptest-long-command-line.py
new file mode 100644
index 0000000000..8f8b7a3bbd
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-long-command-line.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure long command lines work.
+"""
+
+import TestGyp
+
+import subprocess
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['ninja', 'msvs'])
+
+ CHDIR = 'long-command-line'
+ test.run_gyp('long-command-line.gyp', chdir=CHDIR)
+ test.build('long-command-line.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-macro-projectname.py b/third_party/python/gyp/test/win/gyptest-macro-projectname.py
new file mode 100644
index 0000000000..e411cc04a4
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-macro-projectname.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure macro expansion of $(ProjectName) is handled.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'vs-macros'
+ test.run_gyp('projectname.gyp', chdir=CHDIR)
+ test.build('projectname.gyp', test.ALL, chdir=CHDIR)
+ test.built_file_must_exist('test_expansions_plus_something.exe', chdir=CHDIR)
+ test.built_file_must_exist(
+ 'test_with_product_name_plus_something.exe', chdir=CHDIR)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-macro-targetext.py b/third_party/python/gyp/test/win/gyptest-macro-targetext.py
new file mode 100644
index 0000000000..450710d631
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-macro-targetext.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure macro expansion of $(TargetExt) is handled.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'vs-macros'
+ test.run_gyp('targetext.gyp', chdir=CHDIR)
+ test.build('targetext.gyp', test.ALL, chdir=CHDIR)
+ test.built_file_must_exist('executable.exe', chdir=CHDIR)
+ test.built_file_must_exist('loadable_module.dll', chdir=CHDIR)
+ test.built_file_must_exist('shared_library.dll', chdir=CHDIR)
+ test.built_file_must_exist('static_library.lib', chdir=CHDIR)
+ test.built_file_must_exist('product_extension.library', chdir=CHDIR)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-macro-targetfilename.py b/third_party/python/gyp/test/win/gyptest-macro-targetfilename.py
new file mode 100644
index 0000000000..759e26c566
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-macro-targetfilename.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure macro expansion of $(TargetFileName) is handled.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import os
+import sys
+
+if sys.platform == 'win32':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+ if not (test.format == 'msvs' and
+ int(os.environ.get('GYP_MSVS_VERSION', 0)) == 2013):
+ CHDIR = 'vs-macros'
+ test.run_gyp('targetfilename.gyp', chdir=CHDIR)
+ test.build('targetfilename.gyp', test.ALL, chdir=CHDIR)
+ test.built_file_must_exist('test_targetfilename_executable.exe', chdir=CHDIR)
+ test.built_file_must_exist('test_targetfilename_loadable_module.dll',
+ chdir=CHDIR)
+ test.built_file_must_exist('test_targetfilename_shared_library.dll',
+ chdir=CHDIR)
+ test.built_file_must_exist('test_targetfilename_static_library.lib',
+ chdir=CHDIR)
+ test.built_file_must_exist('test_targetfilename_product_extension.foo',
+ chdir=CHDIR)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-macro-targetname.py b/third_party/python/gyp/test/win/gyptest-macro-targetname.py
new file mode 100644
index 0000000000..b1118019a3
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-macro-targetname.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure macro expansion of $(TargetName) and $(TargetDir) are handled.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'vs-macros'
+ test.run_gyp('targetname.gyp', chdir=CHDIR)
+ test.build('targetname.gyp', test.ALL, chdir=CHDIR)
+ test.built_file_must_exist('test_targetname_plus_something1.exe',
+ chdir=CHDIR)
+ test.built_file_must_exist(
+ 'prod_prefixtest_targetname_with_prefix_plus_something2.exe',
+ chdir=CHDIR)
+ test.built_file_must_exist('prod_name_plus_something3.exe', chdir=CHDIR)
+ test.built_file_must_exist('prod_prefixprod_name_plus_something4.exe',
+ chdir=CHDIR)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-macro-targetpath.py b/third_party/python/gyp/test/win/gyptest-macro-targetpath.py
new file mode 100644
index 0000000000..fe7eac1834
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-macro-targetpath.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure macro expansion of $(TargetPath) is handled.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'vs-macros'
+ test.run_gyp('targetpath.gyp', chdir=CHDIR)
+ test.build('targetpath.gyp', test.ALL, chdir=CHDIR)
+ test.built_file_must_exist('test_targetpath_executable.exe', chdir=CHDIR)
+ test.built_file_must_exist('test_targetpath_loadable_module.dll',
+ chdir=CHDIR)
+ test.built_file_must_exist('test_targetpath_shared_library.dll',
+ chdir=CHDIR)
+ test.built_file_must_exist('test_targetpath_static_library.lib',
+ chdir=CHDIR)
+ test.built_file_must_exist('test_targetpath_product_extension.foo',
+ chdir=CHDIR)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-macro-vcinstalldir.py b/third_party/python/gyp/test/win/gyptest-macro-vcinstalldir.py
new file mode 100644
index 0000000000..37396e161a
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-macro-vcinstalldir.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure macro expansion of $(VCInstallDir) is handled, and specifically
+always / terminated for compatibility.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'vs-macros'
+ test.run_gyp('vcinstalldir.gyp', chdir=CHDIR)
+ # This fails on VS because the trailing slash escapes the trailing quote.
+ test.build('vcinstalldir.gyp', 'test_slash_trailing', chdir=CHDIR, status=1)
+ test.build('vcinstalldir.gyp', 'test_slash_dir', chdir=CHDIR)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-macros-containing-gyp.py b/third_party/python/gyp/test/win/gyptest-macros-containing-gyp.py
new file mode 100644
index 0000000000..f6eaf63dbb
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-macros-containing-gyp.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Handle VS macro expansion containing gyp variables.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'vs-macros'
+ test.run_gyp('containing-gyp.gyp', chdir=CHDIR)
+ test.build('containing-gyp.gyp', test.ALL, chdir=CHDIR)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-macros-in-inputs-and-outputs.py b/third_party/python/gyp/test/win/gyptest-macros-in-inputs-and-outputs.py
new file mode 100644
index 0000000000..3d6fa74e43
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-macros-in-inputs-and-outputs.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Handle macro expansion in inputs and outputs of rules.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'vs-macros'
+ test.run_gyp('input-output-macros.gyp', chdir=CHDIR)
+
+ test.build('input-output-macros.gyp', 'test_expansions', chdir=CHDIR)
+
+ test.built_file_must_exist('stuff.blah.something',
+ content='Random data file.\nModified.',
+ chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-midl-excluded.py b/third_party/python/gyp/test/win/gyptest-midl-excluded.py
new file mode 100644
index 0000000000..70059ab64b
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-midl-excluded.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test that .idl files in actions and non-native rules are excluded.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'idl-excluded'
+ test.run_gyp('idl-excluded.gyp', chdir=CHDIR)
+ test.build('idl-excluded.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-midl-includedirs.py b/third_party/python/gyp/test/win/gyptest-midl-includedirs.py
new file mode 100644
index 0000000000..05f6370409
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-midl-includedirs.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that 'midl_include_dirs' is handled.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'idl-includedirs'
+ test.run_gyp('idl-includedirs.gyp', chdir=CHDIR)
+ test.build('idl-includedirs.gyp', test.ALL, chdir=CHDIR)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-midl-rules.py b/third_party/python/gyp/test/win/gyptest-midl-rules.py
new file mode 100644
index 0000000000..591a507320
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-midl-rules.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Handle default .idl build rules.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'idl-rules'
+ test.run_gyp('basic-idl.gyp', chdir=CHDIR)
+ for platform in ['Win32', 'x64']:
+ test.set_configuration('Debug|%s' % platform)
+ test.build('basic-idl.gyp', test.ALL, chdir=CHDIR)
+
+ # Make sure ninja win_tool.py filters out noisy lines.
+ if test.format == 'ninja' and 'Processing' in test.stdout():
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-ml-safeseh.py b/third_party/python/gyp/test/win/gyptest-ml-safeseh.py
new file mode 100644
index 0000000000..ec702b9df2
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-ml-safeseh.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure the /safeseh option can be passed to ml.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['ninja'])
+
+ CHDIR = 'ml-safeseh'
+ test.run_gyp('ml-safeseh.gyp', chdir=CHDIR)
+ test.build('ml-safeseh.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-quoting-commands.py b/third_party/python/gyp/test/win/gyptest-quoting-commands.py
new file mode 100644
index 0000000000..b40f99f088
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-quoting-commands.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure batch files run as actions. Regression test for previously missing
+trailing quote on command line. cmd typically will implicitly insert a missing
+quote, but if the command ends in a quote, it will not insert another, so the
+command can sometimes become unterminated.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'batch-file-action'
+ test.run_gyp('batch-file-action.gyp', chdir=CHDIR)
+ test.build('batch-file-action.gyp', test.ALL, chdir=CHDIR)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-rc-build.py b/third_party/python/gyp/test/win/gyptest-rc-build.py
new file mode 100644
index 0000000000..4df33ab5d9
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-rc-build.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure we build and include .rc files.
+"""
+
+from __future__ import print_function
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ print("This test is currently disabled: https://crbug.com/483696.")
+ sys.exit(0)
+
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'rc-build'
+ test.run_gyp('hello.gyp', chdir=CHDIR)
+ test.build('hello.gyp', test.ALL, chdir=CHDIR)
+ test.up_to_date('hello.gyp', 'resource_only_dll', chdir=CHDIR)
+ test.run_built_executable('with_resources', chdir=CHDIR, status=4)
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-sys.py b/third_party/python/gyp/test/win/gyptest-sys.py
new file mode 100644
index 0000000000..aceb25428e
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-sys.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2016 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that Windows drivers are built correctly.
+"""
+
+import TestGyp
+import TestCmd
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs'])
+
+ CHDIR = 'win-driver-target-type'
+ test.run_gyp('win-driver-target-type.gyp', chdir=CHDIR)
+ maybe_missing = r'[\s\S]+?(WindowsKernelModeDriver|Build succeeded.)[\s\S]+?'
+ test.build('win-driver-target-type.gyp', 'win_driver_target_type',
+ chdir=CHDIR, stdout=maybe_missing,
+ status=[0, 1], match=TestCmd.match_re_dotall)
+
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/gyptest-system-include.py b/third_party/python/gyp/test/win/gyptest-system-include.py
new file mode 100644
index 0000000000..9a47d98538
--- /dev/null
+++ b/third_party/python/gyp/test/win/gyptest-system-include.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Checks that msvs_system_include_dirs works.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'system-include'
+ test.run_gyp('test.gyp', chdir=CHDIR)
+ test.build('test.gyp', test.ALL, chdir=CHDIR)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/idl-excluded/bad.idl b/third_party/python/gyp/test/win/idl-excluded/bad.idl
new file mode 100644
index 0000000000..38554e9635
--- /dev/null
+++ b/third_party/python/gyp/test/win/idl-excluded/bad.idl
@@ -0,0 +1,6 @@
+// Copyright (c) 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+This is a dummy .idl file that will trigger an error if it is not excluded from
+the build.
diff --git a/third_party/python/gyp/test/win/idl-excluded/copy-file.py b/third_party/python/gyp/test/win/idl-excluded/copy-file.py
new file mode 100644
index 0000000000..7bdfbfd4bd
--- /dev/null
+++ b/third_party/python/gyp/test/win/idl-excluded/copy-file.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+import sys
+
+contents = open(sys.argv[1], 'r').read()
+open(sys.argv[2], 'w').write(contents)
+
+sys.exit(0)
diff --git a/third_party/python/gyp/test/win/idl-excluded/idl-excluded.gyp b/third_party/python/gyp/test/win/idl-excluded/idl-excluded.gyp
new file mode 100644
index 0000000000..972b7dedac
--- /dev/null
+++ b/third_party/python/gyp/test/win/idl-excluded/idl-excluded.gyp
@@ -0,0 +1,58 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'exclude_with_action',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'actions': [{
+ 'action_name': 'copy_action',
+ 'inputs': [
+ 'copy-file.py',
+ 'bad.idl',
+ ],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/bad.idl',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<@(_outputs)',
+ ],
+ }],
+ },
+ {
+ 'target_name': 'exclude_with_rule',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'sources': [
+ 'bad.idl',
+ ],
+ 'rules': [{
+ 'rule_name': 'copy_rule',
+ 'extension': 'idl',
+ 'inputs': [
+ 'copy-file.py',
+ ],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).idl',
+ ],
+ 'action': [
+ 'python', '<@(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
+ ],
+ }],
+ },
+ {
+ 'target_name': 'program',
+ 'type': 'executable',
+ 'sources': [
+ 'program.cc',
+ ],
+ 'dependencies': [
+ 'exclude_with_action',
+ 'exclude_with_rule',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/win/idl-excluded/program.cc b/third_party/python/gyp/test/win/idl-excluded/program.cc
new file mode 100644
index 0000000000..9dc3c94f34
--- /dev/null
+++ b/third_party/python/gyp/test/win/idl-excluded/program.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/idl-includedirs/hello.cc b/third_party/python/gyp/test/win/idl-includedirs/hello.cc
new file mode 100644
index 0000000000..9dc3c94f34
--- /dev/null
+++ b/third_party/python/gyp/test/win/idl-includedirs/hello.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/idl-includedirs/idl-includedirs.gyp b/third_party/python/gyp/test/win/idl-includedirs/idl-includedirs.gyp
new file mode 100644
index 0000000000..fcec063a98
--- /dev/null
+++ b/third_party/python/gyp/test/win/idl-includedirs/idl-includedirs.gyp
@@ -0,0 +1,26 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_midl_include_dirs',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.cc',
+ 'subdir/foo.idl',
+ 'subdir/bar.idl',
+ ],
+ 'midl_include_dirs': [
+ 'subdir',
+ ],
+ 'msvs_settings': {
+ 'VCMIDLTool': {
+ 'OutputDirectory': '<(INTERMEDIATE_DIR)',
+ 'DLLDataFileName': '$(InputName)_dlldata.h',
+ },
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/win/idl-includedirs/subdir/bar.idl b/third_party/python/gyp/test/win/idl-includedirs/subdir/bar.idl
new file mode 100644
index 0000000000..d4e6cbb3eb
--- /dev/null
+++ b/third_party/python/gyp/test/win/idl-includedirs/subdir/bar.idl
@@ -0,0 +1,13 @@
+// Copyright (c) 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import "oaidl.idl";
+
+[
+ object,
+ uuid(A03D1421-B1EC-11D0-8C3A-00C04FC31D3F),
+]
+interface Bar : IUnknown {
+ HRESULT BarFunction();
+};
diff --git a/third_party/python/gyp/test/win/idl-includedirs/subdir/foo.idl b/third_party/python/gyp/test/win/idl-includedirs/subdir/foo.idl
new file mode 100644
index 0000000000..c8c65b9be6
--- /dev/null
+++ b/third_party/python/gyp/test/win/idl-includedirs/subdir/foo.idl
@@ -0,0 +1,14 @@
+// Copyright (c) 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import "oaidl.idl";
+import "bar.idl";
+
+[
+ object,
+ uuid(9C1100DD-51D4-4827-AE9F-3B8FAC4AED72),
+]
+interface Foo : IUnknown {
+ HRESULT FooFunction(Bar* bar);
+};
diff --git a/third_party/python/gyp/test/win/idl-rules/Window.idl b/third_party/python/gyp/test/win/idl-rules/Window.idl
new file mode 100644
index 0000000000..d8ea01bee3
--- /dev/null
+++ b/third_party/python/gyp/test/win/idl-rules/Window.idl
@@ -0,0 +1,9 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+[
+ WillBeGarbageCollected,
+] interface Window {
+ void alert();
+};
diff --git a/third_party/python/gyp/test/win/idl-rules/basic-idl.gyp b/third_party/python/gyp/test/win/idl-rules/basic-idl.gyp
new file mode 100644
index 0000000000..b74622adea
--- /dev/null
+++ b/third_party/python/gyp/test/win/idl-rules/basic-idl.gyp
@@ -0,0 +1,67 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'midl_out_dir': '<(SHARED_INTERMEDIATE_DIR)',
+ },
+ 'target_defaults': {
+ 'configurations': {
+ 'Debug': {
+ 'msvs_configuration_platform': 'Win32',
+ },
+ 'Debug_x64': {
+ 'inherit_from': ['Debug'],
+ 'msvs_configuration_platform': 'x64',
+ },
+ },
+ },
+ 'targets': [
+ {
+ 'target_name': 'idl_test',
+ 'type': 'executable',
+ 'sources': [
+ 'history_indexer.idl',
+ '<(midl_out_dir)/history_indexer.h',
+ '<(midl_out_dir)/history_indexer_i.c',
+ 'history_indexer_user.cc',
+ ],
+ 'libraries': ['ole32.lib'],
+ 'include_dirs': [
+ '<(midl_out_dir)',
+ ],
+ 'msvs_settings': {
+ 'VCMIDLTool': {
+ 'OutputDirectory': '<(midl_out_dir)',
+ 'HeaderFileName': '<(RULE_INPUT_ROOT).h',
+ },
+ },
+ },
+ {
+ 'target_name': 'idl_explicit_action',
+ 'type': 'none',
+ 'sources': [
+ 'Window.idl',
+ ],
+ 'actions': [{
+ 'action_name': 'blink_idl',
+ 'explicit_idl_action': 1,
+ 'msvs_cygwin_shell': 0,
+ 'inputs': [
+ 'Window.idl',
+ 'idl_compiler.py',
+ ],
+ 'outputs': [
+ 'Window.cpp',
+ 'Window.h',
+ ],
+ 'action': [
+ 'python',
+ 'idl_compiler.py',
+ 'Window.idl',
+ ],
+ }],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/win/idl-rules/history_indexer.idl b/third_party/python/gyp/test/win/idl-rules/history_indexer.idl
new file mode 100644
index 0000000000..e866ce6d90
--- /dev/null
+++ b/third_party/python/gyp/test/win/idl-rules/history_indexer.idl
@@ -0,0 +1,17 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import "oaidl.idl";
+import "ocidl.idl";
+
+[
+ object,
+ uuid(9C1100DD-51D4-4827-AE9F-3B8FAC4AED72),
+ oleautomation,
+ nonextensible,
+ pointer_default(unique)
+]
+interface IChromeHistoryIndexer : IUnknown {
+ HRESULT SomeFunction([in] VARIANT begin_time, [in] VARIANT end_time);
+};
diff --git a/third_party/python/gyp/test/win/idl-rules/history_indexer_user.cc b/third_party/python/gyp/test/win/idl-rules/history_indexer_user.cc
new file mode 100644
index 0000000000..071a9ffef5
--- /dev/null
+++ b/third_party/python/gyp/test/win/idl-rules/history_indexer_user.cc
@@ -0,0 +1,15 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "history_indexer.h"
+
+// Use the thing in the IDL.
+int main() {
+ IChromeHistoryIndexer** indexer = 0;
+ IID fake_iid;
+ CoCreateInstance(fake_iid, NULL, CLSCTX_INPROC,
+ __uuidof(IChromeHistoryIndexer),
+ reinterpret_cast<void**>(indexer));
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/idl-rules/idl_compiler.py b/third_party/python/gyp/test/win/idl-rules/idl_compiler.py
new file mode 100644
index 0000000000..a12b274d67
--- /dev/null
+++ b/third_party/python/gyp/test/win/idl-rules/idl_compiler.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# mock, just outputs empty .h/.cpp files
+
+import os
+import sys
+
+if len(sys.argv) == 2:
+ basename, ext = os.path.splitext(sys.argv[1])
+ with open('%s.h' % basename, 'w') as f:
+ f.write('// %s.h\n' % basename)
+ with open('%s.cpp' % basename, 'w') as f:
+ f.write('// %s.cpp\n' % basename)
diff --git a/third_party/python/gyp/test/win/importlib/dll_no_exports.cc b/third_party/python/gyp/test/win/importlib/dll_no_exports.cc
new file mode 100644
index 0000000000..96dd7970b7
--- /dev/null
+++ b/third_party/python/gyp/test/win/importlib/dll_no_exports.cc
@@ -0,0 +1,9 @@
+// Copyright (c) 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <windows.h>
+
+BOOL APIENTRY DllMain(HMODULE module, DWORD reason, LPVOID reserved) {
+ return TRUE;
+}
diff --git a/third_party/python/gyp/test/win/importlib/has-exports.cc b/third_party/python/gyp/test/win/importlib/has-exports.cc
new file mode 100644
index 0000000000..3f62d6c60d
--- /dev/null
+++ b/third_party/python/gyp/test/win/importlib/has-exports.cc
@@ -0,0 +1,10 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+__declspec(dllexport) void some_function() {
+}
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/importlib/hello.cc b/third_party/python/gyp/test/win/importlib/hello.cc
new file mode 100644
index 0000000000..66ff68c113
--- /dev/null
+++ b/third_party/python/gyp/test/win/importlib/hello.cc
@@ -0,0 +1,9 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+__declspec(dllimport) void some_function();
+
+int main() {
+ some_function();
+}
diff --git a/third_party/python/gyp/test/win/importlib/importlib.gyp b/third_party/python/gyp/test/win/importlib/importlib.gyp
new file mode 100644
index 0000000000..ab15b1893d
--- /dev/null
+++ b/third_party/python/gyp/test/win/importlib/importlib.gyp
@@ -0,0 +1,30 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_importlib',
+ 'type': 'shared_library',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'LinkIncremental': '2',
+ }
+ },
+ 'sources': ['has-exports.cc'],
+ },
+
+ {
+ 'target_name': 'test_linkagainst',
+ 'type': 'executable',
+ 'dependencies': ['test_importlib'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'LinkIncremental': '2',
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/importlib/noimplib.gyp b/third_party/python/gyp/test/win/importlib/noimplib.gyp
new file mode 100644
index 0000000000..0245058a99
--- /dev/null
+++ b/third_party/python/gyp/test/win/importlib/noimplib.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'no_import_library',
+ 'type': 'loadable_module',
+ 'msvs_settings': {
+ 'NoImportLibrary': 'true',
+ },
+ 'sources': ['dll_no_exports.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/large-pdb/dllmain.cc b/third_party/python/gyp/test/win/large-pdb/dllmain.cc
new file mode 100644
index 0000000000..14875623e8
--- /dev/null
+++ b/third_party/python/gyp/test/win/large-pdb/dllmain.cc
@@ -0,0 +1,9 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <windows.h>
+
+BOOL WINAPI DllMain(HINSTANCE hinstance, DWORD reason, LPVOID reserved) {
+ return TRUE;
+}
diff --git a/third_party/python/gyp/test/win/large-pdb/large-pdb.gyp b/third_party/python/gyp/test/win/large-pdb/large-pdb.gyp
new file mode 100644
index 0000000000..2a241a5623
--- /dev/null
+++ b/third_party/python/gyp/test/win/large-pdb/large-pdb.gyp
@@ -0,0 +1,98 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'large_pdb_exe',
+ 'type': 'executable',
+ 'msvs_large_pdb': 1,
+ 'sources': [
+ 'main.cc',
+ ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'ProgramDatabaseFile': '<(PRODUCT_DIR)/large_pdb_exe.exe.pdb',
+ },
+ },
+ },
+ {
+ 'target_name': 'small_pdb_exe',
+ 'type': 'executable',
+ 'msvs_large_pdb': 0,
+ 'sources': [
+ 'main.cc',
+ ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'ProgramDatabaseFile': '<(PRODUCT_DIR)/small_pdb_exe.exe.pdb',
+ },
+ },
+ },
+ {
+ 'target_name': 'large_pdb_dll',
+ 'type': 'shared_library',
+ 'msvs_large_pdb': 1,
+ 'sources': [
+ 'dllmain.cc',
+ ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'ProgramDatabaseFile': '<(PRODUCT_DIR)/large_pdb_dll.dll.pdb',
+ },
+ },
+ },
+ {
+ 'target_name': 'small_pdb_dll',
+ 'type': 'shared_library',
+ 'msvs_large_pdb': 0,
+ 'sources': [
+ 'dllmain.cc',
+ ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'ProgramDatabaseFile': '<(PRODUCT_DIR)/small_pdb_dll.dll.pdb',
+ },
+ },
+ },
+ {
+ 'target_name': 'large_pdb_implicit_exe',
+ 'type': 'executable',
+ 'msvs_large_pdb': 1,
+ 'sources': [
+ 'main.cc',
+ ],
+ # No PDB file is specified. However, the msvs_large_pdb mechanism should
+ # default to the appropriate <(PRODUCT_DIR)/<(TARGET_NAME).exe.pdb.
+ },
+ {
+ 'target_name': 'large_pdb_variable_exe',
+ 'type': 'executable',
+ 'msvs_large_pdb': 1,
+ 'sources': [
+ 'main.cc',
+ ],
+ # No PDB file is specified. However, the msvs_large_pdb_path variable
+ # explicitly sets one.
+ 'variables': {
+ 'msvs_large_pdb_path': '<(PRODUCT_DIR)/foo.pdb',
+ },
+ },
+ {
+ 'target_name': 'large_pdb_product_exe',
+ 'product_name': 'bar',
+ 'type': 'executable',
+ 'msvs_large_pdb': 1,
+ 'sources': [
+ 'main.cc',
+ ],
+ # No PDB file is specified. However, we've specified a product name so
+ # it should use <(PRODUCT_DIR)/bar.exe.pdb.
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/large-pdb/main.cc b/third_party/python/gyp/test/win/large-pdb/main.cc
new file mode 100644
index 0000000000..c3da8e9219
--- /dev/null
+++ b/third_party/python/gyp/test/win/large-pdb/main.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main(void) {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/lib-crosscompile/answer.cc b/third_party/python/gyp/test/win/lib-crosscompile/answer.cc
new file mode 100644
index 0000000000..a6ffa16862
--- /dev/null
+++ b/third_party/python/gyp/test/win/lib-crosscompile/answer.cc
@@ -0,0 +1,9 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "answer.h"
+
+int answer() {
+ return 42;
+}
diff --git a/third_party/python/gyp/test/win/lib-crosscompile/answer.h b/third_party/python/gyp/test/win/lib-crosscompile/answer.h
new file mode 100644
index 0000000000..82312d54b8
--- /dev/null
+++ b/third_party/python/gyp/test/win/lib-crosscompile/answer.h
@@ -0,0 +1,5 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int answer(); \ No newline at end of file
diff --git a/third_party/python/gyp/test/win/lib-crosscompile/use_host_ar.gyp b/third_party/python/gyp/test/win/lib-crosscompile/use_host_ar.gyp
new file mode 100644
index 0000000000..4747bc6445
--- /dev/null
+++ b/third_party/python/gyp/test/win/lib-crosscompile/use_host_ar.gyp
@@ -0,0 +1,17 @@
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'lib_answer',
+ 'type': 'static_library',
+ 'toolsets': ['host'],
+ 'msvs_settings': {
+ 'msvs_cygwin_shell': 0,
+ },
+ 'sources': ['answer.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/lib-flags/answer.cc b/third_party/python/gyp/test/win/lib-flags/answer.cc
new file mode 100644
index 0000000000..a6ffa16862
--- /dev/null
+++ b/third_party/python/gyp/test/win/lib-flags/answer.cc
@@ -0,0 +1,9 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "answer.h"
+
+int answer() {
+ return 42;
+}
diff --git a/third_party/python/gyp/test/win/lib-flags/answer.h b/third_party/python/gyp/test/win/lib-flags/answer.h
new file mode 100644
index 0000000000..82312d54b8
--- /dev/null
+++ b/third_party/python/gyp/test/win/lib-flags/answer.h
@@ -0,0 +1,5 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int answer(); \ No newline at end of file
diff --git a/third_party/python/gyp/test/win/lib-flags/ltcg.gyp b/third_party/python/gyp/test/win/lib-flags/ltcg.gyp
new file mode 100644
index 0000000000..c183107730
--- /dev/null
+++ b/third_party/python/gyp/test/win/lib-flags/ltcg.gyp
@@ -0,0 +1,21 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'lib_answer',
+ 'type': 'static_library',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WholeProgramOptimization': 'true', # /GL
+ },
+ 'VCLibrarianTool': {
+ 'LinkTimeCodeGeneration': 'true', # /LTCG
+ },
+ },
+ 'sources': ['answer.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/a/x.cc b/third_party/python/gyp/test/win/linker-flags/a/x.cc
new file mode 100644
index 0000000000..f5f763b095
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/a/x.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int x() {
+ return 1;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/a/z.cc b/third_party/python/gyp/test/win/linker-flags/a/z.cc
new file mode 100644
index 0000000000..8a43501270
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/a/z.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int z() {
+ return 3;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/additional-deps.cc b/third_party/python/gyp/test/win/linker-flags/additional-deps.cc
new file mode 100644
index 0000000000..7dfb589d26
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/additional-deps.cc
@@ -0,0 +1,10 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <winsock2.h>
+
+int main() {
+ WSAStartup(0, 0);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/additional-deps.gyp b/third_party/python/gyp/test/win/linker-flags/additional-deps.gyp
new file mode 100644
index 0000000000..55afe64fb0
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/additional-deps.gyp
@@ -0,0 +1,30 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_deps_none',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_deps_few',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'AdditionalDependencies': [
+ 'wininet.lib',
+ 'ws2_32.lib',
+ ]
+ }
+ },
+ 'sources': ['additional-deps.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/additional-options.gyp b/third_party/python/gyp/test/win/linker-flags/additional-options.gyp
new file mode 100644
index 0000000000..cab3994cd1
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/additional-options.gyp
@@ -0,0 +1,29 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_additional_none',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_additional_few',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'AdditionalOptions': [
+ '/dynamicbase:no',
+ ]
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/aslr.gyp b/third_party/python/gyp/test/win/linker-flags/aslr.gyp
new file mode 100644
index 0000000000..b3aefd50b7
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/aslr.gyp
@@ -0,0 +1,35 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_aslr_default',
+ 'type': 'executable',
+ 'msvs_settings': {
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_aslr_no',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'RandomizedBaseAddress': '1',
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_aslr_yes',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'RandomizedBaseAddress': '2',
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/b/y.cc b/third_party/python/gyp/test/win/linker-flags/b/y.cc
new file mode 100644
index 0000000000..bd884119fc
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/b/y.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int y() {
+ return 2;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/base-address.gyp b/third_party/python/gyp/test/win/linker-flags/base-address.gyp
new file mode 100644
index 0000000000..873ebfea3f
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/base-address.gyp
@@ -0,0 +1,38 @@
+# Copyright 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_base_specified_exe',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'BaseAddress': '0x00420000',
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_base_specified_dll',
+ 'type': 'shared_library',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'BaseAddress': '0x10420000',
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_base_default_exe',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_base_default_dll',
+ 'type': 'shared_library',
+ 'sources': ['hello.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/debug-info.gyp b/third_party/python/gyp/test/win/linker-flags/debug-info.gyp
new file mode 100644
index 0000000000..d47d0ecced
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/debug-info.gyp
@@ -0,0 +1,28 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_debug_off',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'false'
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_debug_on',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true'
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/deffile-multiple.gyp b/third_party/python/gyp/test/win/linker-flags/deffile-multiple.gyp
new file mode 100644
index 0000000000..c74a9af20a
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/deffile-multiple.gyp
@@ -0,0 +1,17 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_deffile_multiple_fail',
+ 'type': 'shared_library',
+ 'sources': [
+ 'deffile.cc',
+ 'deffile.def',
+ 'deffile2.def',
+ ],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/deffile.cc b/third_party/python/gyp/test/win/linker-flags/deffile.cc
new file mode 100644
index 0000000000..fa203b34c8
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/deffile.cc
@@ -0,0 +1,13 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+void AnExportedFunction() {
+}
+
+void AnotherExportedFunction() {
+}
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/deffile.def b/third_party/python/gyp/test/win/linker-flags/deffile.def
new file mode 100644
index 0000000000..ba9d399bd6
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/deffile.def
@@ -0,0 +1,8 @@
+; Copyright (c) 2012 Google Inc. All rights reserved.
+; Use of this source code is governed by a BSD-style license that can be
+; found in the LICENSE file.
+
+LIBRARY test_deffile_ok
+
+EXPORTS
+ AnExportedFunction
diff --git a/third_party/python/gyp/test/win/linker-flags/deffile.gyp b/third_party/python/gyp/test/win/linker-flags/deffile.gyp
new file mode 100644
index 0000000000..7b241d5e3a
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/deffile.gyp
@@ -0,0 +1,38 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_deffile_dll_ok',
+ 'type': 'shared_library',
+ 'sources': [
+ 'deffile.cc',
+ 'deffile.def',
+ ],
+ },
+ {
+ 'target_name': 'test_deffile_dll_notexported',
+ 'type': 'shared_library',
+ 'sources': [
+ 'deffile.cc',
+ ],
+ },
+ {
+ 'target_name': 'test_deffile_exe_ok',
+ 'type': 'executable',
+ 'sources': [
+ 'deffile.cc',
+ 'deffile.def',
+ ],
+ },
+ {
+ 'target_name': 'test_deffile_exe_notexported',
+ 'type': 'executable',
+ 'sources': [
+ 'deffile.cc',
+ ],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/delay-load-dlls.gyp b/third_party/python/gyp/test/win/linker-flags/delay-load-dlls.gyp
new file mode 100644
index 0000000000..671cbaa802
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/delay-load-dlls.gyp
@@ -0,0 +1,35 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_dld_none',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ }
+ },
+ 'sources': ['delay-load.cc'],
+ 'libraries': [
+ 'delayimp.lib',
+ 'shell32.lib',
+ ],
+ },
+ {
+ 'target_name': 'test_dld_shell32',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'DelayLoadDLLs': ['shell32.dll']
+ }
+ },
+ 'sources': ['delay-load.cc'],
+ 'libraries': [
+ 'delayimp.lib',
+ 'shell32.lib',
+ ],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/delay-load.cc b/third_party/python/gyp/test/win/linker-flags/delay-load.cc
new file mode 100644
index 0000000000..2be34aa876
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/delay-load.cc
@@ -0,0 +1,10 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <shlobj.h>
+
+int main() {
+ SHCreateDirectory(0, 0);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/embed-manifest.gyp b/third_party/python/gyp/test/win/linker-flags/embed-manifest.gyp
new file mode 100644
index 0000000000..fefb2f56d8
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/embed-manifest.gyp
@@ -0,0 +1,109 @@
+# Copyright (c) 2013 Yandex LLC. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_manifest_exe',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'LinkIncremental': '1',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'true',
+ }
+ },
+ },
+ {
+ 'target_name': 'test_manifest_dll',
+ 'type': 'loadable_module',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'LinkIncremental': '1',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'true',
+ }
+ },
+ },
+ {
+ 'target_name': 'test_manifest_extra1',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCManifestTool': {
+ 'EmbedManifest': 'true',
+ 'AdditionalManifestFiles': 'extra.manifest',
+ }
+ },
+ },
+ {
+ 'target_name': 'test_manifest_extra2',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCManifestTool': {
+ 'EmbedManifest': 'true',
+ 'AdditionalManifestFiles': 'extra.manifest;extra2.manifest',
+ }
+ },
+ },
+ {
+ 'target_name': 'test_manifest_extra_list',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCManifestTool': {
+ 'EmbedManifest': 'true',
+ 'AdditionalManifestFiles': [
+ 'extra.manifest',
+ 'extra2.manifest'
+ ],
+ }
+ },
+ },
+ {
+ 'target_name': 'test_manifest_dll_inc',
+ 'type': 'loadable_module',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'LinkIncremental': '2',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'true',
+ }
+ },
+ },
+ {
+ 'target_name': 'test_manifest_exe_inc',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'LinkIncremental': '2',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'true',
+ }
+ },
+ },
+ {
+ 'target_name': 'test_manifest_exe_inc_no_embed',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'LinkIncremental': '2',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'false',
+ }
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/enable-uac.gyp b/third_party/python/gyp/test/win/linker-flags/enable-uac.gyp
new file mode 100644
index 0000000000..4e58c86ec8
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/enable-uac.gyp
@@ -0,0 +1,45 @@
+# Copyright 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'enable_uac',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCManifestTool': {
+ 'EmbedManifest': 'true',
+ }
+ },
+ },
+ {
+ 'target_name': 'enable_uac_no',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'EnableUAC': 'false',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'true',
+ }
+ },
+ },
+ {
+ 'target_name': 'enable_uac_admin',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'UACExecutionLevel': 2,
+ 'UACUIAccess': 'true',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'true',
+ }
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/entrypointsymbol.cc b/third_party/python/gyp/test/win/linker-flags/entrypointsymbol.cc
new file mode 100644
index 0000000000..b567bc87b3
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/entrypointsymbol.cc
@@ -0,0 +1,13 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// The entry point specified by link.exe /ENTRY option.
+extern "C" void MainEntryPoint() {
+}
+
+// Still needed because the linker checks for existence of one of main, wmain,
+// WinMain, or wMain to offer informative diagnositics.
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/entrypointsymbol.gyp b/third_party/python/gyp/test/win/linker-flags/entrypointsymbol.gyp
new file mode 100644
index 0000000000..7f2c14252d
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/entrypointsymbol.gyp
@@ -0,0 +1,28 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_ok',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'EntryPointSymbol': 'MainEntryPoint',
+ }
+ },
+ 'sources': ['entrypointsymbol.cc'],
+ },
+ {
+ 'target_name': 'test_fail',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'EntryPointSymbol': 'MainEntryPoint',
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/extra.manifest b/third_party/python/gyp/test/win/linker-flags/extra.manifest
new file mode 100644
index 0000000000..2e436dc251
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/extra.manifest
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+
+ <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
+ <application>
+ <!--This Id value indicates the application supports Windows 7 functionality-->
+ <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
+ </application>
+ </compatibility>
+
+</assembly>
diff --git a/third_party/python/gyp/test/win/linker-flags/extra2.manifest b/third_party/python/gyp/test/win/linker-flags/extra2.manifest
new file mode 100644
index 0000000000..bfb570ca59
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/extra2.manifest
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+
+ <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
+ <application>
+ <!--This Id value indicates the application supports Windows Vista functionality -->
+ <supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
+ </application>
+ </compatibility>
+
+</assembly>
diff --git a/third_party/python/gyp/test/win/linker-flags/fixed-base.gyp b/third_party/python/gyp/test/win/linker-flags/fixed-base.gyp
new file mode 100644
index 0000000000..cc2982eb27
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/fixed-base.gyp
@@ -0,0 +1,52 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ # Disable DYNAMICBASE for these tests because it implies/doesn't imply
+ # FIXED in certain cases so it complicates the test for FIXED.
+ {
+ 'target_name': 'test_fixed_default_exe',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'RandomizedBaseAddress': '1',
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_fixed_default_dll',
+ 'type': 'shared_library',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'RandomizedBaseAddress': '1',
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_fixed_no',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'FixedBaseAddress': '1',
+ 'RandomizedBaseAddress': '1',
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_fixed_yes',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'FixedBaseAddress': '2',
+ 'RandomizedBaseAddress': '1',
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/force-symbol-reference.gyp b/third_party/python/gyp/test/win/linker-flags/force-symbol-reference.gyp
new file mode 100644
index 0000000000..d6d02a6848
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/force-symbol-reference.gyp
@@ -0,0 +1,39 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_force_reference_lib',
+ 'type': 'static_library',
+ 'sources': ['x.cc', 'y.cc'],
+ },
+ {
+ 'target_name': 'test_force_reference',
+ 'type': 'executable',
+ # Turn on debug info to get symbols in disasm for the test code, and
+ # turn on opt:ref to drop unused symbols to make sure we wouldn't
+ # otherwise have the symbols.
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'AdditionalOptions': [
+ '/OPT:REF',
+ ],
+ 'ForceSymbolReferences': [
+ '?x@@YAHXZ',
+ '?y@@YAHXZ',
+ ],
+ },
+ },
+ 'sources': ['hello.cc'],
+ 'dependencies': [
+ 'test_force_reference_lib',
+ ],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/generate-manifest.gyp b/third_party/python/gyp/test/win/linker-flags/generate-manifest.gyp
new file mode 100644
index 0000000000..34a68d1a48
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/generate-manifest.gyp
@@ -0,0 +1,166 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_generate_manifest_true',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'EnableUAC': 'true',
+ 'GenerateManifest': 'true',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'false',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_generate_manifest_false',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'EnableUAC': 'true',
+ 'GenerateManifest': 'false',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'false',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_generate_manifest_default',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'EnableUAC': 'true',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'false',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_generate_manifest_true_as_embedded',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'EnableUAC': 'true',
+ 'GenerateManifest': 'true',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'true',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_generate_manifest_false_as_embedded',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'EnableUAC': 'true',
+ 'GenerateManifest': 'false',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'true',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_generate_manifest_default_as_embedded',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'EnableUAC': 'true',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'true',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_generate_manifest_true_with_extra_manifest',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'EnableUAC': 'true',
+ 'GenerateManifest': 'true',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'false',
+ 'AdditionalManifestFiles': 'extra.manifest;extra2.manifest',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_generate_manifest_false_with_extra_manifest',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'EnableUAC': 'true',
+ 'GenerateManifest': 'false',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'false',
+ 'AdditionalManifestFiles': 'extra.manifest;extra2.manifest',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_generate_manifest_true_with_extra_manifest_list',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'EnableUAC': 'true',
+ 'GenerateManifest': 'true',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'false',
+ 'AdditionalManifestFiles': [
+ 'extra.manifest',
+ 'extra2.manifest',
+ ],
+ },
+ },
+ },
+ {
+ 'target_name': 'test_generate_manifest_false_with_extra_manifest_list',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'EnableUAC': 'true',
+ 'GenerateManifest': 'false',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'false',
+ 'AdditionalManifestFiles': [
+ 'extra.manifest',
+ 'extra2.manifest',
+ ],
+ },
+ },
+ },
+ {
+ 'target_name': 'test_generate_manifest_default_embed_default',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'EnableUAC': 'true',
+ },
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/hello.cc b/third_party/python/gyp/test/win/linker-flags/hello.cc
new file mode 100644
index 0000000000..1711567ef5
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/hello.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/incremental.gyp b/third_party/python/gyp/test/win/linker-flags/incremental.gyp
new file mode 100644
index 0000000000..59f3103253
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/incremental.gyp
@@ -0,0 +1,65 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ # Turn on debug information so the incremental linking tables have a
+ # visible symbolic name in the disassembly.
+ {
+ 'target_name': 'test_incremental_unset',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_incremental_default',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'LinkIncremental': '0',
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_incremental_no',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'LinkIncremental': '1',
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_incremental_yes',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'LinkIncremental': '2',
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/inline_test.cc b/third_party/python/gyp/test/win/linker-flags/inline_test.cc
new file mode 100644
index 0000000000..a9f177e476
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/inline_test.cc
@@ -0,0 +1,12 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "inline_test.h"
+
+#include <intrin.h>
+#pragma intrinsic(_ReturnAddress)
+
+bool IsFunctionInlined(void* caller_return_address) {
+ return _ReturnAddress() == caller_return_address;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/inline_test.h b/third_party/python/gyp/test/win/linker-flags/inline_test.h
new file mode 100644
index 0000000000..117913c4f5
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/inline_test.h
@@ -0,0 +1,5 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+bool IsFunctionInlined(void* current_return_address);
diff --git a/third_party/python/gyp/test/win/linker-flags/inline_test_main.cc b/third_party/python/gyp/test/win/linker-flags/inline_test_main.cc
new file mode 100644
index 0000000000..23cafe8f94
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/inline_test_main.cc
@@ -0,0 +1,15 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "inline_test.h"
+
+#include <intrin.h>
+#include <stdio.h>
+
+#pragma intrinsic(_ReturnAddress)
+
+int main() {
+ if (IsFunctionInlined(_ReturnAddress()))
+ puts("==== inlined ====\n");
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/large-address-aware.gyp b/third_party/python/gyp/test/win/linker-flags/large-address-aware.gyp
new file mode 100644
index 0000000000..fa56d3789c
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/large-address-aware.gyp
@@ -0,0 +1,28 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_large_address_aware_no',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'LargeAddressAware': '1',
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_large_address_aware_yes',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'LargeAddressAware': '2',
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/library-adjust.cc b/third_party/python/gyp/test/win/linker-flags/library-adjust.cc
new file mode 100644
index 0000000000..7dfb589d26
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/library-adjust.cc
@@ -0,0 +1,10 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <winsock2.h>
+
+int main() {
+ WSAStartup(0, 0);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/library-adjust.gyp b/third_party/python/gyp/test/win/linker-flags/library-adjust.gyp
new file mode 100644
index 0000000000..10e9996f5c
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/library-adjust.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_adjust',
+ 'type': 'executable',
+ 'libraries': [
+ '-lws2_32.lib'
+ ],
+ 'sources': ['library-adjust.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/library-directories-define.cc b/third_party/python/gyp/test/win/linker-flags/library-directories-define.cc
new file mode 100644
index 0000000000..211ef062c1
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/library-directories-define.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int library_function() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/library-directories-reference.cc b/third_party/python/gyp/test/win/linker-flags/library-directories-reference.cc
new file mode 100644
index 0000000000..335097839a
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/library-directories-reference.cc
@@ -0,0 +1,10 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+extern int library_function();
+
+int main() {
+ library_function();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/library-directories.gyp b/third_party/python/gyp/test/win/linker-flags/library-directories.gyp
new file mode 100644
index 0000000000..25395d6c87
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/library-directories.gyp
@@ -0,0 +1,42 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_libdirs_none',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'AdditionalDependencies': [
+ 'test_lib.lib',
+ ],
+ },
+ },
+ 'sources': ['library-directories-reference.cc'],
+ },
+ {
+ 'target_name': 'test_libdirs_with',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ # NOTE: Don't use this for general dependencies between gyp
+ # libraries (use 'dependencies' instead). This is done here only for
+ # testing.
+ #
+ # This setting should only be used to depend on third party prebuilt
+ # libraries that are stored as binaries at a known location.
+ 'AdditionalLibraryDirectories': [
+ '<(DEPTH)/out/Default/obj/subdir', # ninja style
+ '<(DEPTH)/subdir/Default/lib', # msvs style
+ ],
+ 'AdditionalDependencies': [
+ 'test_lib.lib',
+ ],
+ },
+ },
+ 'sources': ['library-directories-reference.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/link-ordering.gyp b/third_party/python/gyp/test/win/linker-flags/link-ordering.gyp
new file mode 100644
index 0000000000..66f44309d1
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/link-ordering.gyp
@@ -0,0 +1,95 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_ordering_exe',
+ 'type': 'executable',
+ # These are so the names of the functions appear in the disassembly.
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3',
+ 'Optimization': '2',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'LinkIncremental': '1',
+ 'GenerateManifest': 'false',
+ # Minimize the disassembly to just our code.
+ 'AdditionalOptions': [
+ '/NODEFAULTLIB',
+ ],
+ },
+ },
+ 'sources': [
+ # Explicitly sorted the same way as the disassembly in the test .py.
+ 'main-crt.c',
+ 'z.cc',
+ 'x.cc',
+ 'y.cc',
+ 'hello.cc',
+ ],
+ },
+
+ {
+ 'target_name': 'test_ordering_subdirs',
+ 'type': 'executable',
+ # These are so the names of the functions appear in the disassembly.
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3',
+ 'Optimization': '2',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'LinkIncremental': '1',
+ 'GenerateManifest': 'false',
+ # Minimize the disassembly to just our code.
+ 'AdditionalOptions': [
+ '/NODEFAULTLIB',
+ ],
+ },
+ },
+ 'sources': [
+ # Explicitly sorted the same way as the disassembly in the test .py.
+ 'main-crt.c',
+ 'hello.cc',
+ 'b/y.cc',
+ 'a/z.cc',
+ ],
+ },
+
+
+ {
+ 'target_name': 'test_ordering_subdirs_mixed',
+ 'type': 'executable',
+ # These are so the names of the functions appear in the disassembly.
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3',
+ 'Optimization': '2',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'LinkIncremental': '1',
+ 'GenerateManifest': 'false',
+ # Minimize the disassembly to just our code.
+ 'AdditionalOptions': [
+ '/NODEFAULTLIB',
+ ],
+ },
+ },
+ 'sources': [
+ # Explicitly sorted the same way as the disassembly in the test .py.
+ 'main-crt.c',
+ 'a/x.cc',
+ 'hello.cc',
+ 'a/z.cc',
+ 'y.cc',
+ ],
+ },
+
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/link-warning.cc b/third_party/python/gyp/test/win/linker-flags/link-warning.cc
new file mode 100644
index 0000000000..4b34277ba3
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/link-warning.cc
@@ -0,0 +1,10 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This will cause LNK4254.
+#pragma comment(linker, "/merge:.data=.text")
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/ltcg.gyp b/third_party/python/gyp/test/win/linker-flags/ltcg.gyp
new file mode 100644
index 0000000000..ddb0d9b4e2
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/ltcg.gyp
@@ -0,0 +1,42 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_ltcg_off',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WholeProgramOptimization': 'false',
+ },
+ 'VCLinkerTool': {
+ 'LinkTimeCodeGeneration': '0',
+ },
+ },
+ 'sources': [
+ 'inline_test.h',
+ 'inline_test.cc',
+ 'inline_test_main.cc',
+ ],
+ },
+ {
+ 'target_name': 'test_ltcg_on',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WholeProgramOptimization': 'true', # /GL
+ },
+ 'VCLinkerTool': {
+ 'LinkTimeCodeGeneration': '1', # /LTCG
+ },
+ },
+ 'sources': [
+ 'inline_test.h',
+ 'inline_test.cc',
+ 'inline_test_main.cc',
+ ],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/main-crt.c b/third_party/python/gyp/test/win/linker-flags/main-crt.c
new file mode 100644
index 0000000000..bdc80c54fd
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/main-crt.c
@@ -0,0 +1,8 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Stub so we can link with /NODEFAULTLIB when checking disasm.
+int mainCRTStartup() {
+ return 5;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/manifest-in-comment.cc b/third_party/python/gyp/test/win/linker-flags/manifest-in-comment.cc
new file mode 100644
index 0000000000..ae54ae5462
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/manifest-in-comment.cc
@@ -0,0 +1,13 @@
+// Copyright 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#pragma comment(linker, \
+ "\"/manifestdependency:type='Win32' " \
+ "name='Test.Research.SampleAssembly' version='6.0.0.0' " \
+ "processorArchitecture='X86' " \
+ "publicKeyToken='0000000000000000' language='*'\"")
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/mapfile.cc b/third_party/python/gyp/test/win/linker-flags/mapfile.cc
new file mode 100644
index 0000000000..cebccb264a
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/mapfile.cc
@@ -0,0 +1,12 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+__declspec(dllexport)
+void AnExportedFunction() {
+ // We need an exported function to verify that /MAPINFO:EXPORTS works.
+}
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/mapfile.gyp b/third_party/python/gyp/test/win/linker-flags/mapfile.gyp
new file mode 100644
index 0000000000..14206fe28d
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/mapfile.gyp
@@ -0,0 +1,45 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_mapfile_unset',
+ 'type': 'executable',
+ 'sources': ['mapfile.cc'],
+ },
+ {
+ 'target_name': 'test_mapfile_generate',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'GenerateMapFile': 'true',
+ },
+ },
+ 'sources': ['mapfile.cc'],
+ },
+ {
+ 'target_name': 'test_mapfile_generate_exports',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'GenerateMapFile': 'true',
+ 'MapExports': 'true',
+ },
+ },
+ 'sources': ['mapfile.cc'],
+ },
+ {
+ 'target_name': 'test_mapfile_generate_filename',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'GenerateMapFile': 'true',
+ 'MapFileName': '<(PRODUCT_DIR)/custom_file_name.map',
+ },
+ },
+ 'sources': ['mapfile.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/no-default-libs.cc b/third_party/python/gyp/test/win/linker-flags/no-default-libs.cc
new file mode 100644
index 0000000000..e306846987
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/no-default-libs.cc
@@ -0,0 +1,18 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Reference something in kernel32.dll. This will fail to link, verifying that
+// GYP provides no default import library configuration.
+// Note that we don't include Windows.h, as that will result in generating
+// linker directives in the object file through #pragma comment(lib, ...).
+typedef short BOOL;
+
+extern "C" __declspec(dllimport)
+BOOL CopyFileW(const wchar_t*, const wchar_t*, BOOL);
+
+
+int main() {
+ CopyFileW(0, 0, 0); // kernel32
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/no-default-libs.gyp b/third_party/python/gyp/test/win/linker-flags/no-default-libs.gyp
new file mode 100644
index 0000000000..77838ce8c4
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/no-default-libs.gyp
@@ -0,0 +1,13 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_default',
+ 'type': 'executable',
+ 'sources': ['no-default-libs.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/nodefaultlib.cc b/third_party/python/gyp/test/win/linker-flags/nodefaultlib.cc
new file mode 100644
index 0000000000..24b6eca438
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/nodefaultlib.cc
@@ -0,0 +1,13 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Include entry point function that's excluded by removing C runtime libraries.
+extern "C" void mainCRTStartup() {
+}
+
+// Still needed because the linker checks for existence of one of main, wmain,
+// WinMain, or wMain to offer informative diagnositics.
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/nodefaultlib.gyp b/third_party/python/gyp/test/win/linker-flags/nodefaultlib.gyp
new file mode 100644
index 0000000000..4fb452a18b
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/nodefaultlib.gyp
@@ -0,0 +1,30 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_ok',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'IgnoreDefaultLibraryNames':
+ ['libcmtd.lib', 'libcmt.lib', 'msvcrt.lib', 'msvcrtd.lib'],
+ }
+ },
+ 'sources': ['nodefaultlib.cc'],
+ },
+ {
+ 'target_name': 'test_fail',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'IgnoreDefaultLibraryNames':
+ ['libcmtd.lib', 'libcmt.lib', 'msvcrt.lib', 'msvcrtd.lib'],
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/nxcompat.gyp b/third_party/python/gyp/test/win/linker-flags/nxcompat.gyp
new file mode 100644
index 0000000000..fa4118cbd7
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/nxcompat.gyp
@@ -0,0 +1,35 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_nxcompat_default',
+ 'type': 'executable',
+ 'msvs_settings': {
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_nxcompat_no',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'DataExecutionPrevention': '1',
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_nxcompat_yes',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'DataExecutionPrevention': '2',
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/opt-icf.cc b/third_party/python/gyp/test/win/linker-flags/opt-icf.cc
new file mode 100644
index 0000000000..1f12156b7f
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/opt-icf.cc
@@ -0,0 +1,29 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+void similar_function0(char* x) {
+ while (*x) {
+ ++x;
+ }
+}
+
+void similar_function1(char* p) {
+ while (*p) {
+ ++p;
+ }
+}
+
+void similar_function2(char* q) {
+ while (*q) {
+ ++q;
+ }
+}
+
+int main() {
+ char* x = "hello";
+ similar_function0(x);
+ similar_function1(x);
+ similar_function2(x);
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/opt-icf.gyp b/third_party/python/gyp/test/win/linker-flags/opt-icf.gyp
new file mode 100644
index 0000000000..effe8021c3
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/opt-icf.gyp
@@ -0,0 +1,63 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ # Have to turn on function level linking here to get the function packaged
+ # as a COMDAT so that it's eligible for merging. Also turn on debug
+ # information so that the symbol names for the code appear in the dump.
+ # Finally, specify non-incremental linking so that there's not a bunch of
+ # extra "similar_function"s in the output (the ILT jump table).
+ {
+ 'target_name': 'test_opticf_default',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnableFunctionLevelLinking': 'true',
+ 'DebugInformationFormat': '3',
+ 'Optimization': '0',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'LinkIncremental': '1',
+ },
+ },
+ 'sources': ['opt-icf.cc'],
+ },
+ {
+ 'target_name': 'test_opticf_no',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnableFunctionLevelLinking': 'true',
+ 'DebugInformationFormat': '3',
+ 'Optimization': '0',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'EnableCOMDATFolding': '1',
+ 'LinkIncremental': '1',
+ },
+ },
+ 'sources': ['opt-icf.cc'],
+ },
+ {
+ 'target_name': 'test_opticf_yes',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnableFunctionLevelLinking': 'true',
+ 'DebugInformationFormat': '3',
+ 'Optimization': '0',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'EnableCOMDATFolding': '2',
+ 'LinkIncremental': '1',
+ },
+ },
+ 'sources': ['opt-icf.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/opt-ref.cc b/third_party/python/gyp/test/win/linker-flags/opt-ref.cc
new file mode 100644
index 0000000000..afaa328a5d
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/opt-ref.cc
@@ -0,0 +1,11 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int unused_function() {
+ return 0;
+}
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/opt-ref.gyp b/third_party/python/gyp/test/win/linker-flags/opt-ref.gyp
new file mode 100644
index 0000000000..69d0281a08
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/opt-ref.gyp
@@ -0,0 +1,56 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ # Have to turn on function level linking here to get the function packaged
+ # as a COMDAT so that it's eligible for optimizing away. Also turn on
+ # debug information so that the symbol names for the code appear in the
+ # dump (so we can verify if they are included in the final exe).
+ {
+ 'target_name': 'test_optref_default',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnableFunctionLevelLinking': 'true',
+ 'DebugInformationFormat': '3',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ },
+ },
+ 'sources': ['opt-ref.cc'],
+ },
+ {
+ 'target_name': 'test_optref_no',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnableFunctionLevelLinking': 'true',
+ 'DebugInformationFormat': '3',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'OptimizeReferences': '1',
+ },
+ },
+ 'sources': ['opt-ref.cc'],
+ },
+ {
+ 'target_name': 'test_optref_yes',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'EnableFunctionLevelLinking': 'true',
+ 'DebugInformationFormat': '3',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'OptimizeReferences': '2',
+ },
+ },
+ 'sources': ['opt-ref.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/outputfile.gyp b/third_party/python/gyp/test/win/linker-flags/outputfile.gyp
new file mode 100644
index 0000000000..1022ec2e20
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/outputfile.gyp
@@ -0,0 +1,58 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_output_exe',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(OutDir)\\blorp.exe'
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_output_exe2',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(OutDir)\\subdir\\blorp.exe'
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_output_dll',
+ 'type': 'shared_library',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(OutDir)\\blorp.dll'
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_output_lib',
+ 'type': 'static_library',
+ 'msvs_settings': {
+ 'VCLibrarianTool': {
+ 'OutputFile': '$(OutDir)\\blorp.lib'
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_output_lib2',
+ 'type': 'static_library',
+ 'msvs_settings': {
+ 'VCLibrarianTool': {
+ 'OutputFile': '$(OutDir)\\subdir\\blorp.lib'
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/pdb-output.gyp b/third_party/python/gyp/test/win/linker-flags/pdb-output.gyp
new file mode 100644
index 0000000000..1a03c67cc0
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/pdb-output.gyp
@@ -0,0 +1,49 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_pdb_output_exe',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3'
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'ProgramDatabaseFile': 'output_exe.pdb',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_pdb_output_dll',
+ 'type': 'shared_library',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3'
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'ProgramDatabaseFile': 'output_dll.pdb',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_pdb_output_disabled',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '0'
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'false',
+ },
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/pgo.gyp b/third_party/python/gyp/test/win/linker-flags/pgo.gyp
new file mode 100644
index 0000000000..da32639973
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/pgo.gyp
@@ -0,0 +1,143 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'pgd_basename': 'test_pgo',
+ },
+ 'targets': [
+ # In the PGO (Profile-Guided Optimization) build flow, we need to build the
+ # target binary multiple times. To implement this flow with gyp, here we
+ # define multiple 'executable' targets, each of which represents one build
+ # particular build/profile stage. On tricky part to do this is that these
+ # 'executable' targets should share the code itself so that profile data
+ # can be reused among these 'executable' files. In other words, the only
+ # differences among below 'executable' targets are:
+ # 1) PGO (Profile-Guided Optimization) database, and
+ # 2) linker options.
+ # The following static library contains all the logic including entry point.
+ # Basically we don't need to rebuild this target once we enter profiling
+ # phase of PGO.
+ {
+ 'target_name': 'test_pgo_main',
+ 'type': 'static_library',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WholeProgramOptimization': 'true', # /GL
+ },
+ 'VCLibrarianTool': {
+ 'LinkTimeCodeGeneration': 'true',
+ },
+ },
+ 'link_settings': {
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'ProfileGuidedDatabase': '$(OutDir)\\<(pgd_basename).pgd',
+ 'TargetMachine': '1', # x86 - 32
+ 'SubSystem': '1', # /SUBSYSTEM:CONSOLE
+ # Tell ninja generator not to pass /ManifestFile:<filename> option
+ # to the linker, because it causes LNK1268 error in PGO biuld.
+ 'GenerateManifest': 'false',
+ # We need to specify 'libcmt.lib' here so that the linker can pick
+ # up a valid entry point.
+ 'AdditionalDependencies': [
+ 'libcmt.lib',
+ ],
+ },
+ },
+ },
+ 'sources': [
+ 'inline_test.h',
+ 'inline_test.cc',
+ 'inline_test_main.cc',
+ ],
+ },
+ {
+ 'target_name': 'test_pgo_instrument',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'LinkTimeCodeGeneration': '2',
+ },
+ },
+ 'dependencies': [
+ 'test_pgo_main',
+ ],
+ },
+ {
+ 'target_name': 'gen_profile_guided_database',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'actions': [
+ {
+ 'action_name': 'action_main',
+ 'inputs': [],
+ 'outputs': [
+ '$(OutDir)\\<(pgd_basename).pgd',
+ ],
+ 'action': [
+ 'python', 'update_pgd.py',
+ '--vcbindir', '$(VCInstallDir)bin',
+ '--exe', '$(OutDir)\\test_pgo_instrument.exe',
+ '--pgd', '$(OutDir)\\<(pgd_basename).pgd',
+ ],
+ },
+ ],
+ 'dependencies': [
+ 'test_pgo_instrument',
+ ],
+ },
+ {
+ 'target_name': 'test_pgo_optimize',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'LinkTimeCodeGeneration': '3',
+ },
+ },
+ 'sources': [
+ '$(OutDir)\\<(pgd_basename).pgd',
+ ],
+ 'dependencies': [
+ 'test_pgo_main',
+ 'gen_profile_guided_database',
+ ],
+ },
+ {
+ 'target_name': 'test_pgo_update',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'LinkTimeCodeGeneration': '4',
+ },
+ },
+ 'sources': [
+ '$(OutDir)\\<(pgd_basename).pgd',
+ ],
+ 'dependencies': [
+ 'test_pgo_main',
+ ],
+ },
+ # A helper target to dump link.exe's command line options. We can use the
+ # output to determine if PGO (Profile-Guided Optimization) is available on
+ # the test environment.
+ {
+ 'target_name': 'gen_linker_option',
+ 'type': 'none',
+ 'msvs_cygwin_shell': 0,
+ 'actions': [
+ {
+ 'action_name': 'action_main',
+ 'inputs': [],
+ 'outputs': [
+ '$(OutDir)\\linker_options.txt',
+ ],
+ 'action': [
+ 'cmd.exe', '/c link.exe > $(OutDir)\\linker_options.txt & exit 0',
+ ],
+ },
+ ],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/profile.gyp b/third_party/python/gyp/test/win/linker-flags/profile.gyp
new file mode 100644
index 0000000000..d60a700fbb
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/profile.gyp
@@ -0,0 +1,50 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ # Verify that 'Profile' option correctly makes it to LINK steup in Ninja
+ {
+ 'target_name': 'test_profile_true',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3'
+ },
+ 'VCLinkerTool': {
+ 'Profile': 'true',
+ 'GenerateDebugInformation': 'true',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_profile_false',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3'
+ },
+ 'VCLinkerTool': {
+ 'Profile': 'false',
+ 'GenerateDebugInformation': 'true',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_profile_default',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3'
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ },
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/program-database.gyp b/third_party/python/gyp/test/win/linker-flags/program-database.gyp
new file mode 100644
index 0000000000..6e60ac0dc9
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/program-database.gyp
@@ -0,0 +1,40 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ # Verify that 'ProgramDatabaseFile' option correctly makes it to LINK
+ # step in Ninja.
+ {
+ # Verify that VC macros and windows paths work correctly.
+ 'target_name': 'test_pdb_outdir',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3'
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'ProgramDatabaseFile': '$(OutDir)\\name_outdir.pdb',
+ },
+ },
+ },
+ {
+ # Verify that GYP macros and POSIX paths work correctly.
+ 'target_name': 'test_pdb_proddir',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3'
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'ProgramDatabaseFile': '<(PRODUCT_DIR)/name_proddir.pdb',
+ },
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/safeseh.gyp b/third_party/python/gyp/test/win/linker-flags/safeseh.gyp
new file mode 100644
index 0000000000..d4a62074b8
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/safeseh.gyp
@@ -0,0 +1,79 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'configurations': {
+ 'Default': {
+ 'msvs_configuration_platform': 'Win32',
+ },
+ 'Default_x64': {
+ 'inherit_from': ['Default'],
+ 'msvs_configuration_platform': 'x64',
+ },
+ },
+ },
+ 'targets': [
+ {
+ 'target_name': 'test_safeseh_default',
+ 'type': 'executable',
+ 'msvs_settings': {
+ # By default, msvs passes /SAFESEH for Link, but not for MASM. In
+ # order for test_safeseh_default to link successfully, we need to
+ # explicitly specify /SAFESEH for MASM.
+ 'MASM': {
+ 'UseSafeExceptionHandlers': 'true',
+ },
+ },
+ 'sources': [
+ 'safeseh_hello.cc',
+ 'safeseh_zero.asm',
+ ],
+ },
+ {
+ 'target_name': 'test_safeseh_no',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'ImageHasSafeExceptionHandlers': 'false',
+ },
+ },
+ 'sources': [
+ 'safeseh_hello.cc',
+ 'safeseh_zero.asm',
+ ],
+ },
+ {
+ 'target_name': 'test_safeseh_yes',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'ImageHasSafeExceptionHandlers': 'true',
+ },
+ 'MASM': {
+ 'UseSafeExceptionHandlers': 'true',
+ },
+ },
+ 'sources': [
+ 'safeseh_hello.cc',
+ 'safeseh_zero.asm',
+ ],
+ },
+ {
+ # x64 targets cannot have ImageHasSafeExceptionHandlers or
+ # UseSafeExceptionHandlers set.
+ 'target_name': 'test_safeseh_x64',
+ 'type': 'executable',
+ 'configurations': {
+ 'Default': {
+ 'msvs_target_platform': 'x64',
+ },
+ },
+ 'sources': [
+ 'safeseh_hello.cc',
+ 'safeseh_zero64.asm',
+ ],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/safeseh_hello.cc b/third_party/python/gyp/test/win/linker-flags/safeseh_hello.cc
new file mode 100644
index 0000000000..6141300d2c
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/safeseh_hello.cc
@@ -0,0 +1,11 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+extern "C" {
+int zero(void);
+}
+
+int main() {
+ return zero();
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/safeseh_zero.asm b/third_party/python/gyp/test/win/linker-flags/safeseh_zero.asm
new file mode 100644
index 0000000000..62da0df4f3
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/safeseh_zero.asm
@@ -0,0 +1,10 @@
+.MODEL FLAT, C
+.CODE
+
+PUBLIC zero
+zero PROC
+ xor eax, eax
+ ret 0
+zero ENDP
+
+END
diff --git a/third_party/python/gyp/test/win/linker-flags/safeseh_zero64.asm b/third_party/python/gyp/test/win/linker-flags/safeseh_zero64.asm
new file mode 100644
index 0000000000..a4740c0dfb
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/safeseh_zero64.asm
@@ -0,0 +1,9 @@
+.CODE
+
+PUBLIC zero
+zero PROC
+ xor eax, eax
+ ret 0
+zero ENDP
+
+END
diff --git a/third_party/python/gyp/test/win/linker-flags/stacksize.gyp b/third_party/python/gyp/test/win/linker-flags/stacksize.gyp
new file mode 100644
index 0000000000..bba44ca4a7
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/stacksize.gyp
@@ -0,0 +1,44 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_default',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_set_reserved_size',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'StackReserveSize': 2097152, # 2MB
+ }
+ },
+ },
+ {
+ 'target_name': 'test_set_commit_size',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'StackCommitSize': 8192, # 8KB
+ }
+ },
+ },
+ {
+ 'target_name': 'test_set_both',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'StackReserveSize': 2097152, # 2MB
+ 'StackCommitSize': 8192, # 8KB
+ }
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/subdir/library.gyp b/third_party/python/gyp/test/win/linker-flags/subdir/library.gyp
new file mode 100644
index 0000000000..519577f0d7
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/subdir/library.gyp
@@ -0,0 +1,13 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_lib',
+ 'type': 'static_library',
+ 'sources': ['../library-directories-define.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/subsystem-windows.cc b/third_party/python/gyp/test/win/linker-flags/subsystem-windows.cc
new file mode 100644
index 0000000000..ac99da808e
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/subsystem-windows.cc
@@ -0,0 +1,9 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <windows.h>
+
+int CALLBACK WinMain(HINSTANCE, HINSTANCE, LPSTR, int) {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/subsystem.gyp b/third_party/python/gyp/test/win/linker-flags/subsystem.gyp
new file mode 100644
index 0000000000..63f072a206
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/subsystem.gyp
@@ -0,0 +1,70 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_console_ok',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'SubSystem': '1'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_console_fail',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'SubSystem': '1'
+ }
+ },
+ 'sources': ['subsystem-windows.cc'],
+ },
+ {
+ 'target_name': 'test_windows_ok',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'SubSystem': '2'
+ }
+ },
+ 'sources': ['subsystem-windows.cc'],
+ },
+ {
+ 'target_name': 'test_windows_fail',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'SubSystem': '2'
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_console_xp',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'SubSystem': '1',
+ 'MinimumRequiredVersion': '5.01', # XP.
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_windows_xp',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'SubSystem': '2',
+ 'MinimumRequiredVersion': '5.01', # XP.
+ }
+ },
+ 'sources': ['subsystem-windows.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/target-machine.gyp b/third_party/python/gyp/test/win/linker-flags/target-machine.gyp
new file mode 100644
index 0000000000..30271926c9
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/target-machine.gyp
@@ -0,0 +1,48 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_target_link_x86',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'TargetMachine': '1',
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_target_link_x64',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'TargetMachine': '17',
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_target_lib_x86',
+ 'type': 'static_library',
+ 'msvs_settings': {
+ 'VCLibrarianTool': {
+ 'TargetMachine': '1',
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_target_lib_x64',
+ 'type': 'static_library',
+ 'msvs_settings': {
+ 'VCLibrarianTool': {
+ 'TargetMachine': '17',
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/tsaware.gyp b/third_party/python/gyp/test/win/linker-flags/tsaware.gyp
new file mode 100644
index 0000000000..7ffc7426bb
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/tsaware.gyp
@@ -0,0 +1,28 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_tsaware_no',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'TerminalServerAware': '1',
+ }
+ },
+ 'sources': ['hello.cc'],
+ },
+ {
+ 'target_name': 'test_tsaware_yes',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'TerminalServerAware': '2',
+ },
+ },
+ 'sources': ['hello.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/unsupported-manifest.gyp b/third_party/python/gyp/test/win/linker-flags/unsupported-manifest.gyp
new file mode 100644
index 0000000000..5549e7cb9b
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/unsupported-manifest.gyp
@@ -0,0 +1,13 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_unsupported',
+ 'type': 'executable',
+ 'sources': ['manifest-in-comment.cc'],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/update_pgd.py b/third_party/python/gyp/test/win/linker-flags/update_pgd.py
new file mode 100644
index 0000000000..176e9e5472
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/update_pgd.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from optparse import OptionParser
+import glob
+import os
+import subprocess
+
+parser = OptionParser()
+parser.add_option('--exe', dest='exe')
+parser.add_option('--vcbindir', dest='vcbindir')
+parser.add_option('--pgd', dest='pgd')
+(options, args) = parser.parse_args()
+
+# Instrumented binaries fail to run unless the Visual C++'s bin dir is included
+# in the PATH environment variable.
+os.environ['PATH'] = os.environ['PATH'] + os.pathsep + options.vcbindir
+
+# Run Instrumented binary. The profile will be recorded into *.pgc file.
+subprocess.call([options.exe])
+
+# Merge *.pgc files into a *.pgd (Profile-Guided Database) file.
+subprocess.call(['pgomgr', '/merge', options.pgd])
+
+# *.pgc files are no longer necessary. Clear all of them.
+pgd_file = os.path.abspath(options.pgd)
+pgd_dir = os.path.dirname(pgd_file)
+(pgd_basename, _) = os.path.splitext(os.path.basename(pgd_file))
+pgc_filepattern = os.path.join(pgd_dir, '%s!*.pgc' % pgd_basename)
+pgc_files= glob.glob(pgc_filepattern)
+for pgc_file in pgc_files:
+ os.unlink(pgc_file)
diff --git a/third_party/python/gyp/test/win/linker-flags/warn-as-error.gyp b/third_party/python/gyp/test/win/linker-flags/warn-as-error.gyp
new file mode 100644
index 0000000000..83c67e9df1
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/warn-as-error.gyp
@@ -0,0 +1,33 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_on',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'TreatLinkerWarningAsErrors': 'true',
+ }
+ },
+ 'sources': ['link-warning.cc'],
+ },
+ {
+ 'target_name': 'test_off',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'TreatLinkerWarningAsErrors': 'false',
+ }
+ },
+ 'sources': ['link-warning.cc'],
+ },
+ {
+ 'target_name': 'test_default',
+ 'type': 'executable',
+ 'sources': ['link-warning.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/x.cc b/third_party/python/gyp/test/win/linker-flags/x.cc
new file mode 100644
index 0000000000..f5f763b095
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/x.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int x() {
+ return 1;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/y.cc b/third_party/python/gyp/test/win/linker-flags/y.cc
new file mode 100644
index 0000000000..bd884119fc
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/y.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int y() {
+ return 2;
+}
diff --git a/third_party/python/gyp/test/win/linker-flags/z.cc b/third_party/python/gyp/test/win/linker-flags/z.cc
new file mode 100644
index 0000000000..8a43501270
--- /dev/null
+++ b/third_party/python/gyp/test/win/linker-flags/z.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int z() {
+ return 3;
+}
diff --git a/third_party/python/gyp/test/win/long-command-line/function.cc b/third_party/python/gyp/test/win/long-command-line/function.cc
new file mode 100644
index 0000000000..af44b2cabd
--- /dev/null
+++ b/third_party/python/gyp/test/win/long-command-line/function.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int func() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/long-command-line/hello.cc b/third_party/python/gyp/test/win/long-command-line/hello.cc
new file mode 100644
index 0000000000..1711567ef5
--- /dev/null
+++ b/third_party/python/gyp/test/win/long-command-line/hello.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/long-command-line/long-command-line.gyp b/third_party/python/gyp/test/win/long-command-line/long-command-line.gyp
new file mode 100644
index 0000000000..964c94fa94
--- /dev/null
+++ b/third_party/python/gyp/test/win/long-command-line/long-command-line.gyp
@@ -0,0 +1,54 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'longexe',
+ 'type': 'executable',
+ 'msvs_settings': {
+ # Use this as a simple way to get a long command.
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': '/nologo ' * 8000,
+ },
+ 'VCLinkerTool': {
+ 'AdditionalOptions': '/nologo ' * 8000,
+ },
+ },
+ 'sources': [
+ 'hello.cc',
+ ],
+ },
+ {
+ 'target_name': 'longlib',
+ 'type': 'static_library',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': '/nologo ' * 8000,
+ },
+ 'VCLibrarianTool': {
+ 'AdditionalOptions': '/nologo ' * 8000,
+ },
+ },
+ 'sources': [
+ 'function.cc',
+ ],
+ },
+ {
+ 'target_name': 'longdll',
+ 'type': 'shared_library',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': '/nologo ' * 8000,
+ },
+ 'VCLinkerTool': {
+ 'AdditionalOptions': '/nologo ' * 8000,
+ },
+ },
+ 'sources': [
+ 'hello.cc',
+ ],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/ml-safeseh/a.asm b/third_party/python/gyp/test/win/ml-safeseh/a.asm
new file mode 100644
index 0000000000..62da0df4f3
--- /dev/null
+++ b/third_party/python/gyp/test/win/ml-safeseh/a.asm
@@ -0,0 +1,10 @@
+.MODEL FLAT, C
+.CODE
+
+PUBLIC zero
+zero PROC
+ xor eax, eax
+ ret 0
+zero ENDP
+
+END
diff --git a/third_party/python/gyp/test/win/ml-safeseh/hello.cc b/third_party/python/gyp/test/win/ml-safeseh/hello.cc
new file mode 100644
index 0000000000..6141300d2c
--- /dev/null
+++ b/third_party/python/gyp/test/win/ml-safeseh/hello.cc
@@ -0,0 +1,11 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+extern "C" {
+int zero(void);
+}
+
+int main() {
+ return zero();
+}
diff --git a/third_party/python/gyp/test/win/ml-safeseh/ml-safeseh.gyp b/third_party/python/gyp/test/win/ml-safeseh/ml-safeseh.gyp
new file mode 100644
index 0000000000..bf8618f865
--- /dev/null
+++ b/third_party/python/gyp/test/win/ml-safeseh/ml-safeseh.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'ml_safeseh',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.cc',
+ 'a.asm',
+ ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'ImageHasSafeExceptionHandlers': 'true',
+ },
+ 'MASM': {
+ 'UseSafeExceptionHandlers': 'true',
+ },
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/precompiled/gyptest-all.py b/third_party/python/gyp/test/win/precompiled/gyptest-all.py
new file mode 100644
index 0000000000..9fb5e62edf
--- /dev/null
+++ b/third_party/python/gyp/test/win/precompiled/gyptest-all.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that precompiled headers can be specified.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'], workdir='workarea_all')
+ test.run_gyp('hello.gyp')
+ test.build('hello.gyp', 'hello')
+ test.run_built_executable('hello', stdout="Hello, world!\nHello, two!\n")
+ test.up_to_date('hello.gyp', test.ALL)
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/precompiled/hello.c b/third_party/python/gyp/test/win/precompiled/hello.c
new file mode 100644
index 0000000000..ffb47bf822
--- /dev/null
+++ b/third_party/python/gyp/test/win/precompiled/hello.c
@@ -0,0 +1,14 @@
+/* Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+// Note the abscence of a stdio.h include. This will be inserted because of the
+// precompiled header.
+
+extern int hello2();
+
+int main(void) {
+ printf("Hello, world!\n");
+ hello2();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/precompiled/hello.gyp b/third_party/python/gyp/test/win/precompiled/hello.gyp
new file mode 100644
index 0000000000..5f82c53593
--- /dev/null
+++ b/third_party/python/gyp/test/win/precompiled/hello.gyp
@@ -0,0 +1,28 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.c',
+ 'hello2.c',
+ 'precomp.c',
+ ],
+ 'msvs_precompiled_header': 'stdio.h',
+ 'msvs_precompiled_source': 'precomp.c',
+
+ # Required so that the printf actually causes a build failure
+ # if the pch isn't included.
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarningLevel': '3',
+ 'WarnAsError': 'true',
+ },
+ },
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/win/precompiled/hello2.c b/third_party/python/gyp/test/win/precompiled/hello2.c
new file mode 100644
index 0000000000..d6d53111fb
--- /dev/null
+++ b/third_party/python/gyp/test/win/precompiled/hello2.c
@@ -0,0 +1,13 @@
+/* Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+// Unlike hello.c, this file specifies the headers.
+
+#include <windows.h>
+#include <stdio.h>
+
+int hello2() {
+ printf("Hello, two!\n");
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/precompiled/precomp.c b/third_party/python/gyp/test/win/precompiled/precomp.c
new file mode 100644
index 0000000000..517c61a36b
--- /dev/null
+++ b/third_party/python/gyp/test/win/precompiled/precomp.c
@@ -0,0 +1,8 @@
+/* Copyright (c) 2011 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file. */
+
+// The precompiled header does not have to be the first one in the file.
+
+#include <windows.h>
+#include <stdio.h>
diff --git a/third_party/python/gyp/test/win/rc-build/Resource.h b/third_party/python/gyp/test/win/rc-build/Resource.h
new file mode 100644
index 0000000000..137acf39b5
--- /dev/null
+++ b/third_party/python/gyp/test/win/rc-build/Resource.h
@@ -0,0 +1,26 @@
+//{{NO_DEPENDENCIES}}
+// Microsoft Visual C++ generated include file.
+// Used by hello.rc
+//
+
+#define IDS_APP_TITLE 103
+
+#define IDR_MAINFRAME 128
+#define IDI_HELLO 107
+#define IDI_SMALL 108
+#define IDC_HELLO 109
+#ifndef IDC_STATIC
+#define IDC_STATIC -1
+#endif
+// Next default values for new objects
+//
+#ifdef APSTUDIO_INVOKED
+#ifndef APSTUDIO_READONLY_SYMBOLS
+
+#define _APS_NO_MFC 130
+#define _APS_NEXT_RESOURCE_VALUE 129
+#define _APS_NEXT_COMMAND_VALUE 32771
+#define _APS_NEXT_CONTROL_VALUE 1000
+#define _APS_NEXT_SYMED_VALUE 110
+#endif
+#endif
diff --git a/third_party/python/gyp/test/win/rc-build/hello.cpp b/third_party/python/gyp/test/win/rc-build/hello.cpp
new file mode 100644
index 0000000000..f552ca1591
--- /dev/null
+++ b/third_party/python/gyp/test/win/rc-build/hello.cpp
@@ -0,0 +1,30 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#define WIN32_LEAN_AND_MEAN
+#include <windows.h>
+#include <tchar.h>
+
+#include "resource.h"
+
+#define MAX_LOADSTRING 100
+
+TCHAR szTitle[MAX_LOADSTRING];
+TCHAR szWindowClass[MAX_LOADSTRING];
+
+int APIENTRY _tWinMain(
+ HINSTANCE hInstance,
+ HINSTANCE hPrevInstance,
+ LPTSTR lpCmdLine,
+ int nCmdShow) {
+ // Make sure we can load some resources.
+ int count = 0;
+ LoadString(hInstance, IDS_APP_TITLE, szTitle, MAX_LOADSTRING);
+ if (szTitle[0] != 0) ++count;
+ LoadString(hInstance, IDC_HELLO, szWindowClass, MAX_LOADSTRING);
+ if (szWindowClass[0] != 0) ++count;
+ if (LoadIcon(hInstance, MAKEINTRESOURCE(IDI_SMALL)) != NULL) ++count;
+ if (LoadIcon(hInstance, MAKEINTRESOURCE(IDI_HELLO)) != NULL) ++count;
+ return count;
+}
diff --git a/third_party/python/gyp/test/win/rc-build/hello.gyp b/third_party/python/gyp/test/win/rc-build/hello.gyp
new file mode 100644
index 0000000000..3a66357dd4
--- /dev/null
+++ b/third_party/python/gyp/test/win/rc-build/hello.gyp
@@ -0,0 +1,92 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'with_resources',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ },
+ 'VCResourceCompilerTool': {
+ 'Culture' : '1033',
+ },
+ },
+ 'sources': [
+ 'hello.cpp',
+ 'hello.rc',
+ ],
+ 'libraries': [
+ 'kernel32.lib',
+ 'user32.lib',
+ ],
+ },
+ {
+ 'target_name': 'with_resources_subdir',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ },
+ 'VCResourceCompilerTool': {
+ 'Culture' : '1033',
+ },
+ },
+ 'sources': [
+ 'hello.cpp',
+ 'subdir/hello2.rc',
+ ],
+ 'libraries': [
+ 'kernel32.lib',
+ 'user32.lib',
+ ],
+ },
+ {
+ 'target_name': 'with_include_subdir',
+ 'type': 'executable',
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ },
+ 'VCResourceCompilerTool': {
+ 'Culture' : '1033',
+ },
+ },
+ 'resource_include_dirs': [
+ '$(ProjectDir)\\subdir',
+ ],
+ 'sources': [
+ 'hello.cpp',
+ 'hello3.rc',
+ ],
+ 'libraries': [
+ 'kernel32.lib',
+ 'user32.lib',
+ ],
+ },
+ {
+ 'target_name': 'resource_only_dll',
+ 'type': 'shared_library',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'ResourceOnlyDLL': 'true',
+ },
+ },
+ 'sources': [
+ 'hello.rc',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/win/rc-build/hello.h b/third_party/python/gyp/test/win/rc-build/hello.h
new file mode 100644
index 0000000000..e60f2eb7ed
--- /dev/null
+++ b/third_party/python/gyp/test/win/rc-build/hello.h
@@ -0,0 +1,3 @@
+#pragma once
+
+#include "resource.h"
diff --git a/third_party/python/gyp/test/win/rc-build/hello.ico b/third_party/python/gyp/test/win/rc-build/hello.ico
new file mode 100644
index 0000000000..d551aa3aaf
--- /dev/null
+++ b/third_party/python/gyp/test/win/rc-build/hello.ico
Binary files differ
diff --git a/third_party/python/gyp/test/win/rc-build/hello.rc b/third_party/python/gyp/test/win/rc-build/hello.rc
new file mode 100644
index 0000000000..c9a7af6a07
--- /dev/null
+++ b/third_party/python/gyp/test/win/rc-build/hello.rc
@@ -0,0 +1,86 @@
+//Microsoft Visual C++ generated resource script.
+//
+#include "resource.h"
+
+#define APSTUDIO_READONLY_SYMBOLS
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 2 resource.
+//
+#ifndef APSTUDIO_INVOKED
+#include "targetver.h"
+#endif
+#define APSTUDIO_HIDDEN_SYMBOLS
+#include "windows.h"
+#undef APSTUDIO_HIDDEN_SYMBOLS
+/////////////////////////////////////////////////////////////////////////////
+#undef APSTUDIO_READONLY_SYMBOLS
+
+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)
+LANGUAGE 9, 1
+#pragma code_page(932)
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// Icon
+//
+
+// Icon with lowest ID value placed first to ensure application icon
+// remains consistent on all systems.
+
+IDI_HELLO ICON "hello.ico"
+IDI_SMALL ICON "small.ico"
+
+#ifdef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// TEXTINCLUDE
+//
+1 TEXTINCLUDE
+BEGIN
+ "resource.h\0"
+END
+
+2 TEXTINCLUDE
+BEGIN
+ "#ifndef APSTUDIO_INVOKED\r\n"
+ "#include ""targetver.h""\r\n"
+ "#endif\r\n"
+ "#define APSTUDIO_HIDDEN_SYMBOLS\r\n"
+ "#include ""windows.h""\r\n"
+ "#undef APSTUDIO_HIDDEN_SYMBOLS\r\n"
+ "\0"
+END
+
+3 TEXTINCLUDE
+BEGIN
+ "\r\n"
+ "\0"
+END
+
+#endif // APSTUDIO_INVOKED
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// String Table
+//
+
+STRINGTABLE
+BEGIN
+ IDC_HELLO "HELLO"
+ IDS_APP_TITLE "hello"
+END
+
+#endif
+/////////////////////////////////////////////////////////////////////////////
+
+
+
+#ifndef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 3 resource.
+//
+
+/////////////////////////////////////////////////////////////////////////////
+#endif // not APSTUDIO_INVOKED
diff --git a/third_party/python/gyp/test/win/rc-build/hello3.rc b/third_party/python/gyp/test/win/rc-build/hello3.rc
new file mode 100644
index 0000000000..c74dede576
--- /dev/null
+++ b/third_party/python/gyp/test/win/rc-build/hello3.rc
@@ -0,0 +1,87 @@
+//Microsoft Visual C++ generated resource script.
+//
+#include "include.h"
+#include "resource.h"
+
+#define APSTUDIO_READONLY_SYMBOLS
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 2 resource.
+//
+#ifndef APSTUDIO_INVOKED
+#include "targetver.h"
+#endif
+#define APSTUDIO_HIDDEN_SYMBOLS
+#include "windows.h"
+#undef APSTUDIO_HIDDEN_SYMBOLS
+/////////////////////////////////////////////////////////////////////////////
+#undef APSTUDIO_READONLY_SYMBOLS
+
+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)
+LANGUAGE 9, 1
+#pragma code_page(932)
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// Icon
+//
+
+// Icon with lowest ID value placed first to ensure application icon
+// remains consistent on all systems.
+
+IDI_HELLO ICON "hello.ico"
+IDI_SMALL ICON "small.ico"
+
+#ifdef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// TEXTINCLUDE
+//
+1 TEXTINCLUDE
+BEGIN
+ "resource.h\0"
+END
+
+2 TEXTINCLUDE
+BEGIN
+ "#ifndef APSTUDIO_INVOKED\r\n"
+ "#include ""targetver.h""\r\n"
+ "#endif\r\n"
+ "#define APSTUDIO_HIDDEN_SYMBOLS\r\n"
+ "#include ""windows.h""\r\n"
+ "#undef APSTUDIO_HIDDEN_SYMBOLS\r\n"
+ "\0"
+END
+
+3 TEXTINCLUDE
+BEGIN
+ "\r\n"
+ "\0"
+END
+
+#endif // APSTUDIO_INVOKED
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// String Table
+//
+
+STRINGTABLE
+BEGIN
+ IDC_HELLO "HELLO"
+ IDS_APP_TITLE "hello"
+END
+
+#endif
+/////////////////////////////////////////////////////////////////////////////
+
+
+
+#ifndef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 3 resource.
+//
+
+/////////////////////////////////////////////////////////////////////////////
+#endif // not APSTUDIO_INVOKED
diff --git a/third_party/python/gyp/test/win/rc-build/small.ico b/third_party/python/gyp/test/win/rc-build/small.ico
new file mode 100644
index 0000000000..d551aa3aaf
--- /dev/null
+++ b/third_party/python/gyp/test/win/rc-build/small.ico
Binary files differ
diff --git a/third_party/python/gyp/test/win/rc-build/subdir/hello2.rc b/third_party/python/gyp/test/win/rc-build/subdir/hello2.rc
new file mode 100644
index 0000000000..4c8eab109e
--- /dev/null
+++ b/third_party/python/gyp/test/win/rc-build/subdir/hello2.rc
@@ -0,0 +1,87 @@
+//Microsoft Visual C++ generated resource script.
+//
+#include "subdir/include.h"
+#include "resource.h"
+
+#define APSTUDIO_READONLY_SYMBOLS
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 2 resource.
+//
+#ifndef APSTUDIO_INVOKED
+#include "targetver.h"
+#endif
+#define APSTUDIO_HIDDEN_SYMBOLS
+#include "windows.h"
+#undef APSTUDIO_HIDDEN_SYMBOLS
+/////////////////////////////////////////////////////////////////////////////
+#undef APSTUDIO_READONLY_SYMBOLS
+
+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)
+LANGUAGE 9, 1
+#pragma code_page(932)
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// Icon
+//
+
+// Icon with lowest ID value placed first to ensure application icon
+// remains consistent on all systems.
+
+IDI_HELLO ICON "hello.ico"
+IDI_SMALL ICON "small.ico"
+
+#ifdef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// TEXTINCLUDE
+//
+1 TEXTINCLUDE
+BEGIN
+ "resource.h\0"
+END
+
+2 TEXTINCLUDE
+BEGIN
+ "#ifndef APSTUDIO_INVOKED\r\n"
+ "#include ""targetver.h""\r\n"
+ "#endif\r\n"
+ "#define APSTUDIO_HIDDEN_SYMBOLS\r\n"
+ "#include ""windows.h""\r\n"
+ "#undef APSTUDIO_HIDDEN_SYMBOLS\r\n"
+ "\0"
+END
+
+3 TEXTINCLUDE
+BEGIN
+ "\r\n"
+ "\0"
+END
+
+#endif // APSTUDIO_INVOKED
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// String Table
+//
+
+STRINGTABLE
+BEGIN
+ IDC_HELLO "HELLO"
+ IDS_APP_TITLE "hello"
+END
+
+#endif
+/////////////////////////////////////////////////////////////////////////////
+
+
+
+#ifndef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 3 resource.
+//
+
+/////////////////////////////////////////////////////////////////////////////
+#endif // not APSTUDIO_INVOKED
diff --git a/third_party/python/gyp/test/win/rc-build/subdir/include.h b/third_party/python/gyp/test/win/rc-build/subdir/include.h
new file mode 100644
index 0000000000..f15c48b422
--- /dev/null
+++ b/third_party/python/gyp/test/win/rc-build/subdir/include.h
@@ -0,0 +1 @@
+// Just exists to make sure it can be included.
diff --git a/third_party/python/gyp/test/win/rc-build/targetver.h b/third_party/python/gyp/test/win/rc-build/targetver.h
new file mode 100644
index 0000000000..f583181dfd
--- /dev/null
+++ b/third_party/python/gyp/test/win/rc-build/targetver.h
@@ -0,0 +1,24 @@
+#pragma once
+
+// The following macros define the minimum required platform. The minimum required platform
+// is the earliest version of Windows, Internet Explorer etc. that has the necessary features to run
+// your application. The macros work by enabling all features available on platform versions up to and
+// including the version specified.
+
+// Modify the following defines if you have to target a platform prior to the ones specified below.
+// Refer to MSDN for the latest info on corresponding values for different platforms.
+#ifndef WINVER // Specifies that the minimum required platform is Windows Vista.
+#define WINVER 0x0600 // Change this to the appropriate value to target other versions of Windows.
+#endif
+
+#ifndef _WIN32_WINNT // Specifies that the minimum required platform is Windows Vista.
+#define _WIN32_WINNT 0x0600 // Change this to the appropriate value to target other versions of Windows.
+#endif
+
+#ifndef _WIN32_WINDOWS // Specifies that the minimum required platform is Windows 98.
+#define _WIN32_WINDOWS 0x0410 // Change this to the appropriate value to target Windows Me or later.
+#endif
+
+#ifndef _WIN32_IE // Specifies that the minimum required platform is Internet Explorer 7.0.
+#define _WIN32_IE 0x0700 // Change this to the appropriate value to target other versions of IE.
+#endif
diff --git a/third_party/python/gyp/test/win/shard/hello.cc b/third_party/python/gyp/test/win/shard/hello.cc
new file mode 100644
index 0000000000..a9dce62453
--- /dev/null
+++ b/third_party/python/gyp/test/win/shard/hello.cc
@@ -0,0 +1,7 @@
+// Copyright 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/shard/hello1.cc b/third_party/python/gyp/test/win/shard/hello1.cc
new file mode 100644
index 0000000000..0eccf2861d
--- /dev/null
+++ b/third_party/python/gyp/test/win/shard/hello1.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int f1() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/shard/hello2.cc b/third_party/python/gyp/test/win/shard/hello2.cc
new file mode 100644
index 0000000000..23fcb546cb
--- /dev/null
+++ b/third_party/python/gyp/test/win/shard/hello2.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int f2() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/shard/hello3.cc b/third_party/python/gyp/test/win/shard/hello3.cc
new file mode 100644
index 0000000000..a72e2efb5a
--- /dev/null
+++ b/third_party/python/gyp/test/win/shard/hello3.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int f3() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/shard/hello4.cc b/third_party/python/gyp/test/win/shard/hello4.cc
new file mode 100644
index 0000000000..a94df19499
--- /dev/null
+++ b/third_party/python/gyp/test/win/shard/hello4.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int f4() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/shard/shard.gyp b/third_party/python/gyp/test/win/shard/shard.gyp
new file mode 100644
index 0000000000..eac45fcff7
--- /dev/null
+++ b/third_party/python/gyp/test/win/shard/shard.gyp
@@ -0,0 +1,31 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'shard',
+ 'type': 'static_library',
+ 'msvs_shard': 4,
+ 'sources': [
+ 'hello1.cc',
+ 'hello2.cc',
+ 'hello3.cc',
+ 'hello4.cc',
+ ],
+ 'product_dir': '<(PRODUCT_DIR)',
+ },
+ {
+ 'target_name': 'refs_to_shard',
+ 'type': 'executable',
+ 'dependencies': [
+ # Make sure references are correctly updated.
+ 'shard',
+ ],
+ 'sources': [
+ 'hello.cc',
+ ],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/shard/shard_ref.gyp b/third_party/python/gyp/test/win/shard/shard_ref.gyp
new file mode 100644
index 0000000000..3ec8d76f99
--- /dev/null
+++ b/third_party/python/gyp/test/win/shard/shard_ref.gyp
@@ -0,0 +1,41 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'refs_to_shard_external_lib',
+ 'type': 'static_library',
+ 'dependencies': [
+ # Make sure references in other files are updated correctly.
+ 'shard.gyp:shard',
+ ],
+ 'sources': [
+ 'hello.cc',
+ ],
+ },
+ {
+ 'target_name': 'refs_to_shard_external_exe',
+ 'type': 'executable',
+ 'dependencies': [
+ # Make sure references in other files are updated correctly.
+ 'shard.gyp:shard',
+ ],
+ 'sources': [
+ 'hello.cc',
+ ],
+ },
+ {
+ 'target_name': 'refs_to_shard_external_dll',
+ 'type': 'shared_library',
+ 'dependencies': [
+ # Make sure references in other files are updated correctly.
+ 'shard.gyp:shard',
+ ],
+ 'sources': [
+ 'hello.cc',
+ ],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/system-include/bar/header.h b/third_party/python/gyp/test/win/system-include/bar/header.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/win/system-include/bar/header.h
diff --git a/third_party/python/gyp/test/win/system-include/common/commonheader.h b/third_party/python/gyp/test/win/system-include/common/commonheader.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/win/system-include/common/commonheader.h
diff --git a/third_party/python/gyp/test/win/system-include/foo/header.h b/third_party/python/gyp/test/win/system-include/foo/header.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/win/system-include/foo/header.h
diff --git a/third_party/python/gyp/test/win/system-include/main.cc b/third_party/python/gyp/test/win/system-include/main.cc
new file mode 100644
index 0000000000..b04ea8a530
--- /dev/null
+++ b/third_party/python/gyp/test/win/system-include/main.cc
@@ -0,0 +1,4 @@
+#include <commonheader.h>
+#include <header.h>
+
+int main() {}
diff --git a/third_party/python/gyp/test/win/system-include/test.gyp b/third_party/python/gyp/test/win/system-include/test.gyp
new file mode 100644
index 0000000000..07f2636543
--- /dev/null
+++ b/third_party/python/gyp/test/win/system-include/test.gyp
@@ -0,0 +1,26 @@
+{
+ 'target_defaults': {
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarningLevel': '4',
+ 'WarnAsError': 'true',
+ },
+ },
+ 'msvs_system_include_dirs': [
+ '$(ProjectName)', # Different for each target
+ 'common', # Same for all targets
+ ],
+ },
+ 'targets': [
+ {
+ 'target_name': 'foo',
+ 'type': 'executable',
+ 'sources': [ 'main.cc', ],
+ },
+ {
+ 'target_name': 'bar',
+ 'type': 'executable',
+ 'sources': [ 'main.cc', ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/win/uldi/a.cc b/third_party/python/gyp/test/win/uldi/a.cc
new file mode 100644
index 0000000000..0fe05d5afb
--- /dev/null
+++ b/third_party/python/gyp/test/win/uldi/a.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int some_function() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/uldi/b.cc b/third_party/python/gyp/test/win/uldi/b.cc
new file mode 100644
index 0000000000..0fe05d5afb
--- /dev/null
+++ b/third_party/python/gyp/test/win/uldi/b.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int some_function() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/uldi/dll.cc b/third_party/python/gyp/test/win/uldi/dll.cc
new file mode 100644
index 0000000000..93a6c19003
--- /dev/null
+++ b/third_party/python/gyp/test/win/uldi/dll.cc
@@ -0,0 +1,6 @@
+// Copyright (c) 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+__declspec(dllexport) void SomeFunction() {
+}
diff --git a/third_party/python/gyp/test/win/uldi/exe.cc b/third_party/python/gyp/test/win/uldi/exe.cc
new file mode 100644
index 0000000000..b3039ace96
--- /dev/null
+++ b/third_party/python/gyp/test/win/uldi/exe.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/uldi/main.cc b/third_party/python/gyp/test/win/uldi/main.cc
new file mode 100644
index 0000000000..81b46d863a
--- /dev/null
+++ b/third_party/python/gyp/test/win/uldi/main.cc
@@ -0,0 +1,10 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+extern int some_function();
+
+int main() {
+ some_function();
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/uldi/uldi-depending-on-module.gyp b/third_party/python/gyp/test/win/uldi/uldi-depending-on-module.gyp
new file mode 100644
index 0000000000..3e34de8418
--- /dev/null
+++ b/third_party/python/gyp/test/win/uldi/uldi-depending-on-module.gyp
@@ -0,0 +1,42 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'an_exe',
+ 'type': 'executable',
+ 'sources': ['exe.cc'],
+ 'dependencies': [
+ 'a_dll',
+ ],
+ },
+ {
+ 'target_name': 'a_dll',
+ 'type': 'shared_library',
+ 'sources': ['dll.cc'],
+ 'dependencies': [
+ 'a_lib',
+ ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'UseLibraryDependencyInputs': 'true'
+ },
+ },
+ },
+ {
+ 'target_name': 'a_lib',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'a_module',
+ ],
+ 'sources': ['a.cc'],
+ },
+ {
+ 'target_name': 'a_module',
+ 'type': 'loadable_module',
+ 'sources': ['a.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/uldi/uldi.gyp b/third_party/python/gyp/test/win/uldi/uldi.gyp
new file mode 100644
index 0000000000..c32f5e0956
--- /dev/null
+++ b/third_party/python/gyp/test/win/uldi/uldi.gyp
@@ -0,0 +1,45 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'lib1',
+ 'type': 'static_library',
+ 'sources': ['a.cc'],
+ },
+ {
+ 'target_name': 'final_uldi',
+ 'type': 'executable',
+ 'dependencies': [
+ 'lib1',
+ 'lib2',
+ ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'UseLibraryDependencyInputs': 'true'
+ },
+ },
+ 'sources': ['main.cc'],
+ },
+ {
+ 'target_name': 'final_no_uldi',
+ 'type': 'executable',
+ 'dependencies': [
+ 'lib1',
+ 'lib2',
+ ],
+ 'sources': ['main.cc'],
+ },
+ {
+ 'target_name': 'lib2',
+ 'type': 'static_library',
+ # b.cc has the same named function as a.cc, but don't use the same name
+ # so that the .obj will have a different name. If the obj file has the
+ # same name, the linker will discard the obj file, invalidating the
+ # test.
+ 'sources': ['b.cc'],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/vs-macros/as.py b/third_party/python/gyp/test/win/vs-macros/as.py
new file mode 100644
index 0000000000..806c91d926
--- /dev/null
+++ b/third_party/python/gyp/test/win/vs-macros/as.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+from optparse import OptionParser
+
+parser = OptionParser()
+parser.add_option('-a', dest='platform')
+parser.add_option('-o', dest='output')
+parser.add_option('-p', dest='path')
+(options, args) = parser.parse_args()
+
+f = open(options.output, 'w')
+print('options', options, file=f)
+print('args', args, file=f)
+f.close()
diff --git a/third_party/python/gyp/test/win/vs-macros/containing-gyp.gyp b/third_party/python/gyp/test/win/vs-macros/containing-gyp.gyp
new file mode 100644
index 0000000000..c07b639ff1
--- /dev/null
+++ b/third_party/python/gyp/test/win/vs-macros/containing-gyp.gyp
@@ -0,0 +1,39 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_expansions',
+ 'msvs_cygwin_shell': 0,
+ 'type': 'none',
+ 'rules': [
+ {
+ 'rule_name': 'assembler (gnu-compatible)',
+ 'msvs_cygwin_shell': 0,
+ 'msvs_quote_cmd': 0,
+ 'extension': 'S',
+ 'inputs': [
+ 'as.py',
+ ],
+ 'outputs': [
+ '$(IntDir)/$(InputName).obj',
+ ],
+ 'action':
+ ['python',
+ 'as.py',
+ '-a', '$(PlatformName)',
+ '-o', '$(IntDir)/$(InputName).obj',
+ '-p', '<(DEPTH)',
+ '$(InputPath)'],
+ 'message': 'Building assembly language file $(InputPath)',
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ 'sources': [
+ 'input.S',
+ ],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/vs-macros/do_stuff.py b/third_party/python/gyp/test/win/vs-macros/do_stuff.py
new file mode 100644
index 0000000000..4669d3139b
--- /dev/null
+++ b/third_party/python/gyp/test/win/vs-macros/do_stuff.py
@@ -0,0 +1,8 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+input = open(sys.argv[1], "r").read()
+open(sys.argv[2], "w").write(input + "Modified.")
diff --git a/third_party/python/gyp/test/win/vs-macros/hello.cc b/third_party/python/gyp/test/win/vs-macros/hello.cc
new file mode 100644
index 0000000000..1711567ef5
--- /dev/null
+++ b/third_party/python/gyp/test/win/vs-macros/hello.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/win/vs-macros/input-output-macros.gyp b/third_party/python/gyp/test/win/vs-macros/input-output-macros.gyp
new file mode 100644
index 0000000000..b4520f8cb8
--- /dev/null
+++ b/third_party/python/gyp/test/win/vs-macros/input-output-macros.gyp
@@ -0,0 +1,32 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_expansions',
+ 'msvs_cygwin_shell': 0,
+ 'type': 'none',
+ 'rules': [
+ {
+ 'rule_name': 'generate_file',
+ 'extension': 'blah',
+ 'inputs': [
+ 'do_stuff.py',
+ ],
+ 'outputs': [
+ '$(OutDir)\\<(RULE_INPUT_NAME).something',
+ ],
+ 'action': ['python',
+ 'do_stuff.py',
+ '<(RULE_INPUT_PATH)',
+ '$(OutDir)\\<(RULE_INPUT_NAME).something',],
+ },
+ ],
+ 'sources': [
+ 'stuff.blah',
+ ],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/vs-macros/input.S b/third_party/python/gyp/test/win/vs-macros/input.S
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/gyp/test/win/vs-macros/input.S
diff --git a/third_party/python/gyp/test/win/vs-macros/projectname.gyp b/third_party/python/gyp/test/win/vs-macros/projectname.gyp
new file mode 100644
index 0000000000..625a177643
--- /dev/null
+++ b/third_party/python/gyp/test/win/vs-macros/projectname.gyp
@@ -0,0 +1,29 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_expansions',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(OutDir)\\$(ProjectName)_plus_something.exe',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_with_product_name',
+ 'product_name': 'prod_name',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(OutDir)\\$(ProjectName)_plus_something.exe',
+ },
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/vs-macros/stuff.blah b/third_party/python/gyp/test/win/vs-macros/stuff.blah
new file mode 100644
index 0000000000..d438b4a787
--- /dev/null
+++ b/third_party/python/gyp/test/win/vs-macros/stuff.blah
@@ -0,0 +1 @@
+Random data file.
diff --git a/third_party/python/gyp/test/win/vs-macros/targetext.gyp b/third_party/python/gyp/test/win/vs-macros/targetext.gyp
new file mode 100644
index 0000000000..11f580e4a6
--- /dev/null
+++ b/third_party/python/gyp/test/win/vs-macros/targetext.gyp
@@ -0,0 +1,59 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_targetext_executable',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(TargetDir)\\executable$(TargetExt)',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_targetext_loadable_module',
+ 'type': 'loadable_module',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(TargetDir)\\loadable_module$(TargetExt)',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_targetext_shared_library',
+ 'type': 'shared_library',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(TargetDir)\\shared_library$(TargetExt)',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_targetext_static_library',
+ 'type': 'static_library',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLibrarianTool': {
+ 'OutputFile': '$(TargetDir)\\static_library$(TargetExt)',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_targetext_product_extension',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'product_extension': 'library',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(TargetDir)\\product_extension$(TargetExt)',
+ },
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/vs-macros/targetfilename.gyp b/third_party/python/gyp/test/win/vs-macros/targetfilename.gyp
new file mode 100644
index 0000000000..8287320278
--- /dev/null
+++ b/third_party/python/gyp/test/win/vs-macros/targetfilename.gyp
@@ -0,0 +1,59 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_targetfilename_executable',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(TargetDir)\\$(TargetFileName)',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_targetfilename_loadable_module',
+ 'type': 'loadable_module',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(TargetDir)\\$(TargetFileName)',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_targetfilename_shared_library',
+ 'type': 'loadable_module',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(TargetDir)\\$(TargetFileName)',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_targetfilename_static_library',
+ 'type': 'static_library',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLibrarianTool': {
+ 'OutputFile': '$(TargetDir)\\$(TargetFileName)',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_targetfilename_product_extension',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'product_extension': 'foo',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(TargetDir)\\$(TargetFileName)',
+ },
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/vs-macros/targetname.gyp b/third_party/python/gyp/test/win/vs-macros/targetname.gyp
new file mode 100644
index 0000000000..a53d3c0aa3
--- /dev/null
+++ b/third_party/python/gyp/test/win/vs-macros/targetname.gyp
@@ -0,0 +1,52 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_targetname',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(TargetDir)\\$(TargetName)_plus_something1.exe',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_targetname_with_prefix',
+ 'product_prefix': 'prod_prefix',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(TargetDir)\\$(TargetName)_plus_something2.exe',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_targetname_with_prodname',
+ 'product_name': 'prod_name',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(TargetDir)\\$(TargetName)_plus_something3.exe',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_targetname_with_prodname_with_prefix',
+ 'product_name': 'prod_name',
+ 'product_prefix': 'prod_prefix',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(TargetDir)\\$(TargetName)_plus_something4.exe',
+ },
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/vs-macros/targetpath.gyp b/third_party/python/gyp/test/win/vs-macros/targetpath.gyp
new file mode 100644
index 0000000000..a8699ffb25
--- /dev/null
+++ b/third_party/python/gyp/test/win/vs-macros/targetpath.gyp
@@ -0,0 +1,59 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_targetpath_executable',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(TargetPath)',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_targetpath_loadable_module',
+ 'type': 'loadable_module',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(TargetPath)',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_targetpath_shared_library',
+ 'type': 'loadable_module',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(TargetPath)',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_targetpath_static_library',
+ 'type': 'static_library',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLibrarianTool': {
+ 'OutputFile': '$(TargetPath)',
+ },
+ },
+ },
+ {
+ 'target_name': 'test_targetpath_product_extension',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'product_extension': 'foo',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'OutputFile': '$(TargetPath)',
+ },
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/vs-macros/test_exists.py b/third_party/python/gyp/test/win/vs-macros/test_exists.py
new file mode 100644
index 0000000000..297b1b7d9f
--- /dev/null
+++ b/third_party/python/gyp/test/win/vs-macros/test_exists.py
@@ -0,0 +1,10 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+if not os.path.exists(sys.argv[1]):
+ raise Exception()
+open(sys.argv[2], 'w').close()
diff --git a/third_party/python/gyp/test/win/vs-macros/vcinstalldir.gyp b/third_party/python/gyp/test/win/vs-macros/vcinstalldir.gyp
new file mode 100644
index 0000000000..3763a4eb18
--- /dev/null
+++ b/third_party/python/gyp/test/win/vs-macros/vcinstalldir.gyp
@@ -0,0 +1,41 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_slash_trailing',
+ 'type': 'none',
+ 'msvs_cygwin_shell': '0',
+ 'actions': [
+ {
+ 'action_name': 'root',
+ 'inputs': [],
+ 'outputs': ['out1'],
+ 'action': ['python', 'test_exists.py', '$(VCInstallDir)', 'out1']
+ },
+ ],
+ },
+ {
+ 'target_name': 'test_slash_dir',
+ 'type': 'none',
+ 'msvs_cygwin_shell': '0',
+ 'actions': [
+ {
+ 'action_name': 'bin',
+ 'inputs': [],
+ 'outputs': ['out2'],
+ 'action': ['python', 'test_exists.py', '$(VCInstallDir)bin', 'out2'],
+ },
+ {
+ 'action_name': 'compiler',
+ 'inputs': [],
+ 'outputs': ['out3'],
+ 'action': [
+ 'python', 'test_exists.py', '$(VCInstallDir)bin\\cl.exe', 'out3'],
+ },
+ ],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/win-driver-target-type/win-driver-target-type.c b/third_party/python/gyp/test/win/win-driver-target-type/win-driver-target-type.c
new file mode 100644
index 0000000000..a6bee029ab
--- /dev/null
+++ b/third_party/python/gyp/test/win/win-driver-target-type/win-driver-target-type.c
@@ -0,0 +1,10 @@
+// Copyright (c) 2016 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "win-driver-target-type.h"
+
+NTSTATUS DriverEntry(_In_ struct _DRIVER_OBJECT *DriverObject,
+ _In_ PUNICODE_STRING RegistryPath) {
+ return STATUS_SUCCESS;
+}
diff --git a/third_party/python/gyp/test/win/win-driver-target-type/win-driver-target-type.gyp b/third_party/python/gyp/test/win/win-driver-target-type/win-driver-target-type.gyp
new file mode 100644
index 0000000000..5da9cc9bd8
--- /dev/null
+++ b/third_party/python/gyp/test/win/win-driver-target-type/win-driver-target-type.gyp
@@ -0,0 +1,32 @@
+# Copyright (c) 2016 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'win_driver_target_type',
+ 'type': 'windows_driver',
+ 'msvs_target_version': 'Windows7',
+ 'sources': [
+ 'win-driver-target-type.c',
+ 'win-driver-target-type.h',
+ 'win-driver-target-type.rc',
+ ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'AdditionalDependencies': [
+ 'wdmsec.lib',
+ 'ntoskrnl.lib',
+ 'hal.lib',
+ 'wmilib.lib',
+ 'bufferoverflowfastfailk.lib',
+ ],
+ },
+ 'VCCLCompilerTool': {
+ 'WarnAsError': 'false',
+ },
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/win-driver-target-type/win-driver-target-type.h b/third_party/python/gyp/test/win/win-driver-target-type/win-driver-target-type.h
new file mode 100644
index 0000000000..5bbffd2373
--- /dev/null
+++ b/third_party/python/gyp/test/win/win-driver-target-type/win-driver-target-type.h
@@ -0,0 +1,13 @@
+// Copyright (c) 2016 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE
+
+#ifndef _WIN_DRIVER_TARGET_TYPE_H_
+#define _WIN_DRIVER_TARGET_TYPE_H_
+
+#include <ntifs.h>
+#include <ntdddisk.h>
+
+DRIVER_INITIALIZE DriverEntry;
+
+#endif
diff --git a/third_party/python/gyp/test/win/win-driver-target-type/win-driver-target-type.rc b/third_party/python/gyp/test/win/win-driver-target-type/win-driver-target-type.rc
new file mode 100644
index 0000000000..7a037ef736
--- /dev/null
+++ b/third_party/python/gyp/test/win/win-driver-target-type/win-driver-target-type.rc
@@ -0,0 +1,14 @@
+// Copyright (c) 2016 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <windows.h>
+#include <ntverp.h>
+
+#define VER_FILETYPE VFT_DRV
+#define VER_FILESUBTYPE VFT2_DRV_SYSTEM
+#define VER_FILEDESCRIPTION_STR "Windows Driver GYP target type"
+#define VER_INTERNALNAME_STR "win-driver-target-type.sys"
+#define VER_ORIGINALFILENAME_STR "win-driver-target-type.sys"
+
+#include "common.ver"
diff --git a/third_party/python/gyp/test/win/win-tool/copies_readonly_files.gyp b/third_party/python/gyp/test/win/win-tool/copies_readonly_files.gyp
new file mode 100644
index 0000000000..3cd7e69f1a
--- /dev/null
+++ b/third_party/python/gyp/test/win/win-tool/copies_readonly_files.gyp
@@ -0,0 +1,29 @@
+{
+ 'targets': [
+ {
+ 'target_name': 'foo',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/dest',
+ 'files': [
+ 'read-only-file',
+ ],
+ },
+ ],
+ }, # target: foo
+
+ {
+ 'target_name': 'bar',
+ 'type': 'none',
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/dest',
+ 'files': [
+ 'subdir/',
+ ],
+ },
+ ],
+ }, # target: bar
+ ],
+}
diff --git a/third_party/python/gyp/test/win/win-tool/gyptest-win-tool-handles-readonly-files.py b/third_party/python/gyp/test/win/win-tool/gyptest-win-tool-handles-readonly-files.py
new file mode 100644
index 0000000000..951b952775
--- /dev/null
+++ b/third_party/python/gyp/test/win/win-tool/gyptest-win-tool-handles-readonly-files.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure overwriting read-only files works as expected (via win-tool).
+"""
+
+import TestGyp
+
+import filecmp
+import os
+import stat
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['ninja'])
+
+ # First, create the source files.
+ os.makedirs('subdir')
+ read_only_files = ['read-only-file', 'subdir/A', 'subdir/B', 'subdir/C']
+ for f in read_only_files:
+ test.write(f, 'source_contents')
+ test.chmod(f, stat.S_IREAD)
+ if os.access(f, os.W_OK):
+ test.fail_test()
+
+ # Second, create the read-only destination files. Note that we are creating
+ # them where the ninja and win-tool will try to copy them to, in order to test
+ # that copies overwrite the files.
+ os.makedirs(test.built_file_path('dest/subdir'))
+ for f in read_only_files:
+ f = os.path.join('dest', f)
+ test.write(test.built_file_path(f), 'SHOULD BE OVERWRITTEN')
+ test.chmod(test.built_file_path(f), stat.S_IREAD)
+ # Ensure not writable.
+ if os.access(test.built_file_path(f), os.W_OK):
+ test.fail_test()
+
+ test.run_gyp('copies_readonly_files.gyp')
+ test.build('copies_readonly_files.gyp')
+
+ # Check the destination files were overwritten by ninja.
+ for f in read_only_files:
+ f = os.path.join('dest', f)
+ test.must_contain(test.built_file_path(f), 'source_contents')
+
+ # This will fail if the files are not the same mode or contents.
+ for f in read_only_files:
+ if not filecmp.cmp(f, test.built_file_path(os.path.join('dest', f))):
+ test.fail_test()
+
+ test.pass_test()
diff --git a/third_party/python/gyp/test/win/winrt-app-type-revision/dllmain.cc b/third_party/python/gyp/test/win/winrt-app-type-revision/dllmain.cc
new file mode 100644
index 0000000000..dedd83c3f6
--- /dev/null
+++ b/third_party/python/gyp/test/win/winrt-app-type-revision/dllmain.cc
@@ -0,0 +1,30 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <windows.h>
+#include <wrl.h>
+#include <wrl/wrappers/corewrappers.h>
+#include <windows.graphics.display.h>
+
+using namespace Microsoft::WRL;
+using namespace Microsoft::WRL::Wrappers;
+using namespace ABI::Windows::Foundation;
+using namespace ABI::Windows::Graphics::Display;
+
+bool TryToUseSomeWinRT() {
+ ComPtr<IDisplayPropertiesStatics> dp;
+ HStringReference s(RuntimeClass_Windows_Graphics_Display_DisplayProperties);
+ HRESULT hr = GetActivationFactory(s.Get(), dp.GetAddressOf());
+ if (SUCCEEDED(hr)) {
+ float dpi = 96.0f;
+ if (SUCCEEDED(dp->get_LogicalDpi(&dpi))) {
+ return true;
+ }
+ }
+ return false;
+}
+
+BOOL WINAPI DllMain(HINSTANCE hinstance, DWORD reason, LPVOID reserved) {
+ return TRUE;
+}
diff --git a/third_party/python/gyp/test/win/winrt-app-type-revision/winrt-app-type-revison.gyp b/third_party/python/gyp/test/win/winrt-app-type-revision/winrt-app-type-revison.gyp
new file mode 100644
index 0000000000..5f37b5a2ab
--- /dev/null
+++ b/third_party/python/gyp/test/win/winrt-app-type-revision/winrt-app-type-revison.gyp
@@ -0,0 +1,43 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'enable_winrt_81_revision_dll',
+ 'type': 'shared_library',
+ 'msvs_enable_winrt': 1,
+ 'msvs_application_type_revision': '8.1'
+ 'sources': [
+ 'dllmain.cc',
+ ],
+ },
+ {
+ 'target_name': 'enable_winrt_82_revision_dll',
+ 'type': 'shared_library',
+ 'msvs_enable_winrt': 1,
+ 'msvs_application_type_revision': '8.2'
+ 'sources': [
+ 'dllmain.cc',
+ ],
+ },
+ {
+ 'target_name': 'enable_winrt_invalid_revision_dll',
+ 'type': 'shared_library',
+ 'msvs_enable_winrt': 1,
+ 'msvs_application_type_revision': '999'
+ 'sources': [
+ 'dllmain.cc',
+ ],
+ },
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'AdditionalDependencies': [
+ '%(AdditionalDependencies)',
+ ],
+ },
+ },
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/win/winrt-target-platform-version/dllmain.cc b/third_party/python/gyp/test/win/winrt-target-platform-version/dllmain.cc
new file mode 100644
index 0000000000..d71460c924
--- /dev/null
+++ b/third_party/python/gyp/test/win/winrt-target-platform-version/dllmain.cc
@@ -0,0 +1,30 @@
+// Copyright (c) 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <windows.h>
+#include <wrl.h>
+#include <wrl/wrappers/corewrappers.h>
+#include <windows.graphics.display.h>
+
+using namespace Microsoft::WRL;
+using namespace Microsoft::WRL::Wrappers;
+using namespace ABI::Windows::Foundation;
+using namespace ABI::Windows::Graphics::Display;
+
+bool TryToUseSomeWinRT() {
+ ComPtr<IDisplayPropertiesStatics> dp;
+ HStringReference s(RuntimeClass_Windows_Graphics_Display_DisplayProperties);
+ HRESULT hr = GetActivationFactory(s.Get(), dp.GetAddressOf());
+ if (SUCCEEDED(hr)) {
+ float dpi = 96.0f;
+ if (SUCCEEDED(dp->get_LogicalDpi(&dpi))) {
+ return true;
+ }
+ }
+ return false;
+}
+
+BOOL WINAPI DllMain(HINSTANCE hinstance, DWORD reason, LPVOID reserved) {
+ return TRUE;
+}
diff --git a/third_party/python/gyp/test/win/winrt-target-platform-version/winrt-target-platform-version.gyp b/third_party/python/gyp/test/win/winrt-target-platform-version/winrt-target-platform-version.gyp
new file mode 100644
index 0000000000..dbcfac6962
--- /dev/null
+++ b/third_party/python/gyp/test/win/winrt-target-platform-version/winrt-target-platform-version.gyp
@@ -0,0 +1,49 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'enable_winrt_10_platversion_dll',
+ 'type': 'shared_library',
+ 'msvs_enable_winrt': 1,
+ 'msvs_application_type_revision': '10.0',
+ 'msvs_target_platform_version':'10.0.10240.0',
+ 'msvs_target_platform_minversion':'10.0.10240.0'
+ 'sources': [
+ 'dllmain.cc',
+ ],
+ },
+ {
+ 'target_name': 'enable_winrt_10_platversion_nominver_dll',
+ 'type': 'shared_library',
+ 'msvs_enable_winrt': 1,
+ 'msvs_application_type_revision': '10.0',
+ 'msvs_target_platform_version':'10.0.10240.0',
+ 'sources': [
+ 'dllmain.cc',
+ ],
+ },
+ {
+ 'target_name': 'enable_winrt_9_platversion_dll',
+ 'type': 'shared_library',
+ 'msvs_enable_winrt': 1,
+ 'msvs_application_type_revision': '10.0',
+ 'msvs_target_platform_version':'9.0.0.0',
+ 'msvs_target_platform_minversion':'9.0.0.0'
+ 'sources': [
+ 'dllmain.cc',
+ ],
+ },
+ {
+ 'target_name': 'enable_winrt_missing_platversion_dll',
+ 'type': 'shared_library',
+ 'msvs_enable_winrt': 1,
+ 'msvs_application_type_revision': '10.0',
+ 'sources': [
+ 'dllmain.cc',
+ ],
+ },
+ ]
+}
diff --git a/third_party/python/gyp/test/xcode-ninja/list_excluded/gyptest-all.py b/third_party/python/gyp/test/xcode-ninja/list_excluded/gyptest-all.py
new file mode 100644
index 0000000000..2d6378a7a2
--- /dev/null
+++ b/third_party/python/gyp/test/xcode-ninja/list_excluded/gyptest-all.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that excluded files are listed in sources_for_indexing.xcodeproj by
+default, and that the generator flag xcode_ninja_list_excluded_files can be
+used to override the default behavior.
+"""
+
+import os
+import TestGyp
+
+
+test = TestGyp.TestGyp()
+
+if test.format != 'xcode-ninja':
+ test.skip_test()
+
+
+# With the generator flag not set.
+test.run_gyp('hello_exclude.gyp')
+test.must_contain(
+ 'sources_for_indexing.xcodeproj/project.pbxproj', 'hello_excluded.cpp')
+
+
+# With the generator flag set to 0.
+try:
+ os.environ['GYP_GENERATOR_FLAGS'] = 'xcode_ninja_list_excluded_files=0'
+ test.run_gyp('hello_exclude.gyp')
+finally:
+ del os.environ['GYP_GENERATOR_FLAGS']
+test.must_not_contain(
+ 'sources_for_indexing.xcodeproj/project.pbxproj', 'hello_excluded.cpp')
+
+
+# With the generator flag explicitly set to 1.
+try:
+ os.environ['GYP_GENERATOR_FLAGS'] = 'xcode_ninja_list_excluded_files=1'
+ test.run_gyp('hello_exclude.gyp')
+finally:
+ del os.environ['GYP_GENERATOR_FLAGS']
+test.must_contain(
+ 'sources_for_indexing.xcodeproj/project.pbxproj', 'hello_excluded.cpp')
+
+
+test.pass_test()
diff --git a/third_party/python/gyp/test/xcode-ninja/list_excluded/hello.cpp b/third_party/python/gyp/test/xcode-ninja/list_excluded/hello.cpp
new file mode 100644
index 0000000000..cd409dabf9
--- /dev/null
+++ b/third_party/python/gyp/test/xcode-ninja/list_excluded/hello.cpp
@@ -0,0 +1,7 @@
+// Copyright (c) 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 0;
+}
diff --git a/third_party/python/gyp/test/xcode-ninja/list_excluded/hello_exclude.gyp b/third_party/python/gyp/test/xcode-ninja/list_excluded/hello_exclude.gyp
new file mode 100644
index 0000000000..f5f0e8eafd
--- /dev/null
+++ b/third_party/python/gyp/test/xcode-ninja/list_excluded/hello_exclude.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.cpp',
+ 'hello_excluded.cpp',
+ ],
+ 'sources!': [
+ 'hello_excluded.cpp',
+ ],
+ },
+ ],
+}
diff --git a/third_party/python/gyp/test/xcode-ninja/list_excluded/hello_excluded.cpp b/third_party/python/gyp/test/xcode-ninja/list_excluded/hello_excluded.cpp
new file mode 100644
index 0000000000..2115529542
--- /dev/null
+++ b/third_party/python/gyp/test/xcode-ninja/list_excluded/hello_excluded.cpp
@@ -0,0 +1,7 @@
+// Copyright (c) 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+ return 42;
+}
diff --git a/third_party/python/gyp/tools/README b/third_party/python/gyp/tools/README
new file mode 100644
index 0000000000..712e4efbb7
--- /dev/null
+++ b/third_party/python/gyp/tools/README
@@ -0,0 +1,15 @@
+pretty_vcproj:
+ Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2]
+
+ They key/value pair are used to resolve vsprops name.
+
+ For example, if I want to diff the base.vcproj project:
+
+ pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > orignal.txt
+ pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt
+
+ And you can use your favorite diff tool to see the changes.
+
+ Note: In the case of base.vcproj, the original vcproj is one level up the generated one.
+ I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt
+ before you perform the diff. \ No newline at end of file
diff --git a/third_party/python/gyp/tools/Xcode/README b/third_party/python/gyp/tools/Xcode/README
new file mode 100644
index 0000000000..2492a2c2f8
--- /dev/null
+++ b/third_party/python/gyp/tools/Xcode/README
@@ -0,0 +1,5 @@
+Specifications contains syntax formatters for Xcode 3. These do not appear to be supported yet on Xcode 4. To use these with Xcode 3 please install both the gyp.pbfilespec and gyp.xclangspec files in
+
+~/Library/Application Support/Developer/Shared/Xcode/Specifications/
+
+and restart Xcode. \ No newline at end of file
diff --git a/third_party/python/gyp/tools/Xcode/Specifications/gyp.pbfilespec b/third_party/python/gyp/tools/Xcode/Specifications/gyp.pbfilespec
new file mode 100644
index 0000000000..85e2e268a5
--- /dev/null
+++ b/third_party/python/gyp/tools/Xcode/Specifications/gyp.pbfilespec
@@ -0,0 +1,27 @@
+/*
+ gyp.pbfilespec
+ GYP source file spec for Xcode 3
+
+ There is not much documentation available regarding the format
+ of .pbfilespec files. As a starting point, see for instance the
+ outdated documentation at:
+ http://maxao.free.fr/xcode-plugin-interface/specifications.html
+ and the files in:
+ /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/
+
+ Place this file in directory:
+ ~/Library/Application Support/Developer/Shared/Xcode/Specifications/
+*/
+
+(
+ {
+ Identifier = sourcecode.gyp;
+ BasedOn = sourcecode;
+ Name = "GYP Files";
+ Extensions = ("gyp", "gypi");
+ MIMETypes = ("text/gyp");
+ Language = "xcode.lang.gyp";
+ IsTextFile = YES;
+ IsSourceFile = YES;
+ }
+)
diff --git a/third_party/python/gyp/tools/Xcode/Specifications/gyp.xclangspec b/third_party/python/gyp/tools/Xcode/Specifications/gyp.xclangspec
new file mode 100644
index 0000000000..3b3506d319
--- /dev/null
+++ b/third_party/python/gyp/tools/Xcode/Specifications/gyp.xclangspec
@@ -0,0 +1,226 @@
+/*
+ Copyright (c) 2011 Google Inc. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+
+ gyp.xclangspec
+ GYP language specification for Xcode 3
+
+ There is not much documentation available regarding the format
+ of .xclangspec files. As a starting point, see for instance the
+ outdated documentation at:
+ http://maxao.free.fr/xcode-plugin-interface/specifications.html
+ and the files in:
+ /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/
+
+ Place this file in directory:
+ ~/Library/Application Support/Developer/Shared/Xcode/Specifications/
+*/
+
+(
+
+ {
+ Identifier = "xcode.lang.gyp.keyword";
+ Syntax = {
+ Words = (
+ "and",
+ "or",
+ "<!",
+ "<",
+ );
+ Type = "xcode.syntax.keyword";
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.target.declarator";
+ Syntax = {
+ Words = (
+ "'target_name'",
+ );
+ Type = "xcode.syntax.identifier.type";
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.string.singlequote";
+ Syntax = {
+ IncludeRules = (
+ "xcode.lang.string",
+ "xcode.lang.gyp.keyword",
+ "xcode.lang.number",
+ );
+ Start = "'";
+ End = "'";
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.comma";
+ Syntax = {
+ Words = ( ",", );
+
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp";
+ Description = "GYP Coloring";
+ BasedOn = "xcode.lang.simpleColoring";
+ IncludeInMenu = YES;
+ Name = "GYP";
+ Syntax = {
+ Tokenizer = "xcode.lang.gyp.lexer.toplevel";
+ IncludeRules = (
+ "xcode.lang.gyp.dictionary",
+ );
+ Type = "xcode.syntax.plain";
+ };
+ },
+
+ // The following rule returns tokens to the other rules
+ {
+ Identifier = "xcode.lang.gyp.lexer";
+ Syntax = {
+ IncludeRules = (
+ "xcode.lang.gyp.comment",
+ "xcode.lang.string",
+ 'xcode.lang.gyp.targetname.declarator',
+ "xcode.lang.gyp.string.singlequote",
+ "xcode.lang.number",
+ "xcode.lang.gyp.comma",
+ );
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.lexer.toplevel";
+ Syntax = {
+ IncludeRules = (
+ "xcode.lang.gyp.comment",
+ );
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.assignment";
+ Syntax = {
+ Tokenizer = "xcode.lang.gyp.lexer";
+ Rules = (
+ "xcode.lang.gyp.assignment.lhs",
+ ":",
+ "xcode.lang.gyp.assignment.rhs",
+ );
+ };
+
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.target.declaration";
+ Syntax = {
+ Tokenizer = "xcode.lang.gyp.lexer";
+ Rules = (
+ "xcode.lang.gyp.target.declarator",
+ ":",
+ "xcode.lang.gyp.target.name",
+ );
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.target.name";
+ Syntax = {
+ Tokenizer = "xcode.lang.gyp.lexer";
+ Rules = (
+ "xcode.lang.gyp.string.singlequote",
+ );
+ Type = "xcode.syntax.definition.function";
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.assignment.lhs";
+ Syntax = {
+ Tokenizer = "xcode.lang.gyp.lexer";
+ Rules = (
+ "xcode.lang.gyp.string.singlequote",
+ );
+ Type = "xcode.syntax.identifier.type";
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.assignment.rhs";
+ Syntax = {
+ Tokenizer = "xcode.lang.gyp.lexer";
+ Rules = (
+ "xcode.lang.gyp.string.singlequote?",
+ "xcode.lang.gyp.array?",
+ "xcode.lang.gyp.dictionary?",
+ "xcode.lang.number?",
+ );
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.dictionary";
+ Syntax = {
+ Tokenizer = "xcode.lang.gyp.lexer";
+ Start = "{";
+ End = "}";
+ Foldable = YES;
+ Recursive = YES;
+ IncludeRules = (
+ "xcode.lang.gyp.target.declaration",
+ "xcode.lang.gyp.assignment",
+ );
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.array";
+ Syntax = {
+ Tokenizer = "xcode.lang.gyp.lexer";
+ Start = "[";
+ End = "]";
+ Foldable = YES;
+ Recursive = YES;
+ IncludeRules = (
+ "xcode.lang.gyp.array",
+ "xcode.lang.gyp.dictionary",
+ "xcode.lang.gyp.string.singlequote",
+ );
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.todo.mark";
+ Syntax = {
+ StartChars = "T";
+ Match = (
+ "^\(TODO\(.*\):[ \t]+.*\)$", // include "TODO: " in the markers list
+ );
+ // This is the order of captures. All of the match strings above need the same order.
+ CaptureTypes = (
+ "xcode.syntax.mark"
+ );
+ Type = "xcode.syntax.comment";
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.comment";
+ BasedOn = "xcode.lang.comment"; // for text macros
+ Syntax = {
+ Start = "#";
+ End = "\n";
+ IncludeRules = (
+ "xcode.lang.url",
+ "xcode.lang.url.mail",
+ "xcode.lang.comment.mark",
+ "xcode.lang.gyp.todo.mark",
+ );
+ Type = "xcode.syntax.comment";
+ };
+ },
+)
diff --git a/third_party/python/gyp/tools/emacs/README b/third_party/python/gyp/tools/emacs/README
new file mode 100644
index 0000000000..eeef39f41b
--- /dev/null
+++ b/third_party/python/gyp/tools/emacs/README
@@ -0,0 +1,12 @@
+How to install gyp-mode for emacs:
+
+Add the following to your ~/.emacs (replace ... with the path to your gyp
+checkout).
+
+(setq load-path (cons ".../tools/emacs" load-path))
+(require 'gyp)
+
+Restart emacs (or eval-region the added lines) and you should be all set.
+
+Please note that ert is required for running the tests, which is included in
+Emacs 24, or available separately from https://github.com/ohler/ert
diff --git a/third_party/python/gyp/tools/emacs/gyp-tests.el b/third_party/python/gyp/tools/emacs/gyp-tests.el
new file mode 100644
index 0000000000..11b8497886
--- /dev/null
+++ b/third_party/python/gyp/tools/emacs/gyp-tests.el
@@ -0,0 +1,63 @@
+;;; gyp-tests.el - unit tests for gyp-mode.
+
+;; Copyright (c) 2012 Google Inc. All rights reserved.
+;; Use of this source code is governed by a BSD-style license that can be
+;; found in the LICENSE file.
+
+;; The recommended way to run these tests is to run them from the command-line,
+;; with the run-unit-tests.sh script.
+
+(require 'cl)
+(require 'ert)
+(require 'gyp)
+
+(defconst samples (directory-files "testdata" t ".gyp$")
+ "List of golden samples to check")
+
+(defun fontify (filename)
+ (with-temp-buffer
+ (insert-file-contents-literally filename)
+ (gyp-mode)
+ (font-lock-fontify-buffer)
+ (buffer-string)))
+
+(defun read-golden-sample (filename)
+ (with-temp-buffer
+ (insert-file-contents-literally (concat filename ".fontified"))
+ (read (current-buffer))))
+
+(defun equivalent-face (face)
+ "For the purposes of face comparison, we're not interested in the
+ differences between certain faces. For example, the difference between
+ font-lock-comment-delimiter and font-lock-comment-face."
+ (case face
+ ((font-lock-comment-delimiter-face) font-lock-comment-face)
+ (t face)))
+
+(defun text-face-properties (s)
+ "Extract the text properties from s"
+ (let ((result (list t)))
+ (dotimes (i (length s))
+ (setq result (cons (equivalent-face (get-text-property i 'face s))
+ result)))
+ (nreverse result)))
+
+(ert-deftest test-golden-samples ()
+ "Check that fontification produces the same results as the golden samples"
+ (dolist (sample samples)
+ (let ((golden (read-golden-sample sample))
+ (fontified (fontify sample)))
+ (should (equal golden fontified))
+ (should (equal (text-face-properties golden)
+ (text-face-properties fontified))))))
+
+(defun create-golden-sample (filename)
+ "Create a golden sample by fontifying filename and writing out the printable
+ representation of the fontified buffer (with text properties) to the
+ FILENAME.fontified"
+ (with-temp-file (concat filename ".fontified")
+ (print (fontify filename) (current-buffer))))
+
+(defun create-golden-samples ()
+ "Recreate the golden samples"
+ (dolist (sample samples) (create-golden-sample sample)))
diff --git a/third_party/python/gyp/tools/emacs/gyp.el b/third_party/python/gyp/tools/emacs/gyp.el
new file mode 100644
index 0000000000..b98b155ced
--- /dev/null
+++ b/third_party/python/gyp/tools/emacs/gyp.el
@@ -0,0 +1,275 @@
+;;; gyp.el - font-lock-mode support for gyp files.
+
+;; Copyright (c) 2012 Google Inc. All rights reserved.
+;; Use of this source code is governed by a BSD-style license that can be
+;; found in the LICENSE file.
+
+;; Put this somewhere in your load-path and
+;; (require 'gyp)
+
+(require 'python)
+(require 'cl)
+
+(when (string-match "python-mode.el" (symbol-file 'python-mode 'defun))
+ (error (concat "python-mode must be loaded from python.el (bundled with "
+ "recent emacsen), not from the older and less maintained "
+ "python-mode.el")))
+
+(defadvice python-indent-calculate-levels (after gyp-outdent-closing-parens
+ activate)
+ "De-indent closing parens, braces, and brackets in gyp-mode."
+ (when (and (eq major-mode 'gyp-mode)
+ (string-match "^ *[])}][],)}]* *$"
+ (buffer-substring-no-properties
+ (line-beginning-position) (line-end-position))))
+ (setf (first python-indent-levels)
+ (- (first python-indent-levels) python-continuation-offset))))
+
+(defadvice python-indent-guess-indent-offset (around
+ gyp-indent-guess-indent-offset
+ activate)
+ "Guess correct indent offset in gyp-mode."
+ (or (and (not (eq major-mode 'gyp-mode))
+ ad-do-it)
+ (save-excursion
+ (save-restriction
+ (widen)
+ (goto-char (point-min))
+ ;; Find first line ending with an opening brace that is not a comment.
+ (or (and (re-search-forward "\\(^[[{]$\\|^.*[^#].*[[{]$\\)")
+ (forward-line)
+ (/= (current-indentation) 0)
+ (set (make-local-variable 'python-indent-offset)
+ (current-indentation))
+ (set (make-local-variable 'python-continuation-offset)
+ (current-indentation)))
+ (message "Can't guess gyp indent offset, using default: %s"
+ python-continuation-offset))))))
+
+(define-derived-mode gyp-mode python-mode "Gyp"
+ "Major mode for editing .gyp files. See http://code.google.com/p/gyp/"
+ ;; gyp-parse-history is a stack of (POSITION . PARSE-STATE) tuples,
+ ;; with greater positions at the top of the stack. PARSE-STATE
+ ;; is a list of section symbols (see gyp-section-name and gyp-parse-to)
+ ;; with most nested section symbol at the front of the list.
+ (set (make-local-variable 'gyp-parse-history) '((1 . (list))))
+ (gyp-add-font-lock-keywords))
+
+(defun gyp-set-indentation ()
+ "Hook function to configure python indentation to suit gyp mode."
+ (set (make-local-variable 'python-indent-offset) 2)
+ (set (make-local-variable 'python-continuation-offset) 2)
+ (set (make-local-variable 'python-indent-guess-indent-offset) t)
+ (python-indent-guess-indent-offset))
+
+(add-hook 'gyp-mode-hook 'gyp-set-indentation)
+
+(add-to-list 'auto-mode-alist '("\\.gyp\\'" . gyp-mode))
+(add-to-list 'auto-mode-alist '("\\.gypi\\'" . gyp-mode))
+(add-to-list 'auto-mode-alist '("/\\.gclient\\'" . gyp-mode))
+
+;;; Font-lock support
+
+(defconst gyp-dependencies-regexp
+ (regexp-opt (list "dependencies" "export_dependent_settings"))
+ "Regular expression to introduce 'dependencies' section")
+
+(defconst gyp-sources-regexp
+ (regexp-opt (list "action" "files" "include_dirs" "includes" "inputs"
+ "libraries" "outputs" "sources"))
+ "Regular expression to introduce 'sources' sections")
+
+(defconst gyp-conditions-regexp
+ (regexp-opt (list "conditions" "target_conditions"))
+ "Regular expression to introduce conditions sections")
+
+(defconst gyp-variables-regexp
+ "^variables"
+ "Regular expression to introduce variables sections")
+
+(defconst gyp-defines-regexp
+ "^defines"
+ "Regular expression to introduce 'defines' sections")
+
+(defconst gyp-targets-regexp
+ "^targets"
+ "Regular expression to introduce 'targets' sections")
+
+(defun gyp-section-name (section)
+ "Map the sections we are interested in from SECTION to symbol.
+
+ SECTION is a string from the buffer that introduces a section. The result is
+ a symbol representing the kind of section.
+
+ This allows us to treat (for the purposes of font-lock) several different
+ section names as the same kind of section. For example, a 'sources section
+ can be introduced by the 'sources', 'inputs', 'outputs' keyword.
+
+ 'other is the default section kind when a more specific match is not made."
+ (cond ((string-match-p gyp-dependencies-regexp section) 'dependencies)
+ ((string-match-p gyp-sources-regexp section) 'sources)
+ ((string-match-p gyp-variables-regexp section) 'variables)
+ ((string-match-p gyp-conditions-regexp section) 'conditions)
+ ((string-match-p gyp-targets-regexp section) 'targets)
+ ((string-match-p gyp-defines-regexp section) 'defines)
+ (t 'other)))
+
+(defun gyp-invalidate-parse-states-after (target-point)
+ "Erase any parse information after target-point."
+ (while (> (caar gyp-parse-history) target-point)
+ (setq gyp-parse-history (cdr gyp-parse-history))))
+
+(defun gyp-parse-point ()
+ "The point of the last parse state added by gyp-parse-to."
+ (caar gyp-parse-history))
+
+(defun gyp-parse-sections ()
+ "A list of section symbols holding at the last parse state point."
+ (cdar gyp-parse-history))
+
+(defun gyp-inside-dictionary-p ()
+ "Predicate returning true if the parser is inside a dictionary."
+ (not (eq (cadar gyp-parse-history) 'list)))
+
+(defun gyp-add-parse-history (point sections)
+ "Add parse state SECTIONS to the parse history at POINT so that parsing can be
+ resumed instantly."
+ (while (>= (caar gyp-parse-history) point)
+ (setq gyp-parse-history (cdr gyp-parse-history)))
+ (setq gyp-parse-history (cons (cons point sections) gyp-parse-history)))
+
+(defun gyp-parse-to (target-point)
+ "Parses from (point) to TARGET-POINT adding the parse state information to
+ gyp-parse-state-history. Parsing stops if TARGET-POINT is reached or if a
+ string literal has been parsed. Returns nil if no further parsing can be
+ done, otherwise returns the position of the start of a parsed string, leaving
+ the point at the end of the string."
+ (let ((parsing t)
+ string-start)
+ (while parsing
+ (setq string-start nil)
+ ;; Parse up to a character that starts a sexp, or if the nesting
+ ;; level decreases.
+ (let ((state (parse-partial-sexp (gyp-parse-point)
+ target-point
+ -1
+ t))
+ (sections (gyp-parse-sections)))
+ (if (= (nth 0 state) -1)
+ (setq sections (cdr sections)) ; pop out a level
+ (cond ((looking-at-p "['\"]") ; a string
+ (setq string-start (point))
+ (goto-char (scan-sexps (point) 1))
+ (if (gyp-inside-dictionary-p)
+ ;; Look for sections inside a dictionary
+ (let ((section (gyp-section-name
+ (buffer-substring-no-properties
+ (+ 1 string-start)
+ (- (point) 1)))))
+ (setq sections (cons section (cdr sections)))))
+ ;; Stop after the string so it can be fontified.
+ (setq target-point (point)))
+ ((looking-at-p "{")
+ ;; Inside a dictionary. Increase nesting.
+ (forward-char 1)
+ (setq sections (cons 'unknown sections)))
+ ((looking-at-p "\\[")
+ ;; Inside a list. Increase nesting
+ (forward-char 1)
+ (setq sections (cons 'list sections)))
+ ((not (eobp))
+ ;; other
+ (forward-char 1))))
+ (gyp-add-parse-history (point) sections)
+ (setq parsing (< (point) target-point))))
+ string-start))
+
+(defun gyp-section-at-point ()
+ "Transform the last parse state, which is a list of nested sections and return
+ the section symbol that should be used to determine font-lock information for
+ the string. Can return nil indicating the string should not have any attached
+ section."
+ (let ((sections (gyp-parse-sections)))
+ (cond
+ ((eq (car sections) 'conditions)
+ ;; conditions can occur in a variables section, but we still want to
+ ;; highlight it as a keyword.
+ nil)
+ ((and (eq (car sections) 'list)
+ (eq (cadr sections) 'list))
+ ;; conditions and sources can have items in [[ ]]
+ (caddr sections))
+ (t (cadr sections)))))
+
+(defun gyp-section-match (limit)
+ "Parse from (point) to LIMIT returning by means of match data what was
+ matched. The group of the match indicates what style font-lock should apply.
+ See also `gyp-add-font-lock-keywords'."
+ (gyp-invalidate-parse-states-after (point))
+ (let ((group nil)
+ (string-start t))
+ (while (and (< (point) limit)
+ (not group)
+ string-start)
+ (setq string-start (gyp-parse-to limit))
+ (if string-start
+ (setq group (case (gyp-section-at-point)
+ ('dependencies 1)
+ ('variables 2)
+ ('conditions 2)
+ ('sources 3)
+ ('defines 4)
+ (nil nil)))))
+ (if group
+ (progn
+ ;; Set the match data to indicate to the font-lock mechanism the
+ ;; highlighting to be performed.
+ (set-match-data (append (list string-start (point))
+ (make-list (* (1- group) 2) nil)
+ (list (1+ string-start) (1- (point)))))
+ t))))
+
+;;; Please see http://code.google.com/p/gyp/wiki/GypLanguageSpecification for
+;;; canonical list of keywords.
+(defun gyp-add-font-lock-keywords ()
+ "Add gyp-mode keywords to font-lock mechanism."
+ ;; TODO(jknotten): Move all the keyword highlighting into gyp-section-match
+ ;; so that we can do the font-locking in a single font-lock pass.
+ (font-lock-add-keywords
+ nil
+ (list
+ ;; Top-level keywords
+ (list (concat "['\"]\\("
+ (regexp-opt (list "action" "action_name" "actions" "cflags"
+ "cflags_cc" "conditions" "configurations"
+ "copies" "defines" "dependencies" "destination"
+ "direct_dependent_settings"
+ "export_dependent_settings" "extension" "files"
+ "include_dirs" "includes" "inputs" "ldflags" "libraries"
+ "link_settings" "mac_bundle" "message"
+ "msvs_external_rule" "outputs" "product_name"
+ "process_outputs_as_sources" "rules" "rule_name"
+ "sources" "suppress_wildcard"
+ "target_conditions" "target_defaults"
+ "target_defines" "target_name" "toolsets"
+ "targets" "type" "variables" "xcode_settings"))
+ "[!/+=]?\\)") 1 'font-lock-keyword-face t)
+ ;; Type of target
+ (list (concat "['\"]\\("
+ (regexp-opt (list "loadable_module" "static_library"
+ "shared_library" "executable" "none"))
+ "\\)") 1 'font-lock-type-face t)
+ (list "\\(?:target\\|action\\)_name['\"]\\s-*:\\s-*['\"]\\([^ '\"]*\\)" 1
+ 'font-lock-function-name-face t)
+ (list 'gyp-section-match
+ (list 1 'font-lock-function-name-face t t) ; dependencies
+ (list 2 'font-lock-variable-name-face t t) ; variables, conditions
+ (list 3 'font-lock-constant-face t t) ; sources
+ (list 4 'font-lock-preprocessor-face t t)) ; preprocessor
+ ;; Variable expansion
+ (list "<@?(\\([^\n )]+\\))" 1 'font-lock-variable-name-face t)
+ ;; Command expansion
+ (list "<!@?(\\([^\n )]+\\))" 1 'font-lock-variable-name-face t)
+ )))
+
+(provide 'gyp)
diff --git a/third_party/python/gyp/tools/emacs/run-unit-tests.sh b/third_party/python/gyp/tools/emacs/run-unit-tests.sh
new file mode 100755
index 0000000000..6e62b9b28c
--- /dev/null
+++ b/third_party/python/gyp/tools/emacs/run-unit-tests.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+emacs --no-site-file --no-init-file --batch \
+ --load ert.el --load gyp.el --load gyp-tests.el \
+ -f ert-run-tests-batch-and-exit
diff --git a/third_party/python/gyp/tools/emacs/testdata/media.gyp b/third_party/python/gyp/tools/emacs/testdata/media.gyp
new file mode 100644
index 0000000000..29300fe1b8
--- /dev/null
+++ b/third_party/python/gyp/tools/emacs/testdata/media.gyp
@@ -0,0 +1,1105 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'chromium_code': 1,
+ # Override to dynamically link the PulseAudio library.
+ 'use_pulseaudio%': 0,
+ # Override to dynamically link the cras (ChromeOS audio) library.
+ 'use_cras%': 0,
+ },
+ 'targets': [
+ {
+ 'target_name': 'media',
+ 'type': '<(component)',
+ 'dependencies': [
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
+ '../build/temp_gyp/googleurl.gyp:googleurl',
+ '../crypto/crypto.gyp:crypto',
+ '../third_party/openmax/openmax.gyp:il',
+ '../ui/ui.gyp:ui',
+ ],
+ 'defines': [
+ 'MEDIA_IMPLEMENTATION',
+ ],
+ 'include_dirs': [
+ '..',
+ ],
+ 'sources': [
+ 'audio/android/audio_manager_android.cc',
+ 'audio/android/audio_manager_android.h',
+ 'audio/android/audio_track_output_android.cc',
+ 'audio/android/audio_track_output_android.h',
+ 'audio/android/opensles_input.cc',
+ 'audio/android/opensles_input.h',
+ 'audio/android/opensles_output.cc',
+ 'audio/android/opensles_output.h',
+ 'audio/async_socket_io_handler.h',
+ 'audio/async_socket_io_handler_posix.cc',
+ 'audio/async_socket_io_handler_win.cc',
+ 'audio/audio_buffers_state.cc',
+ 'audio/audio_buffers_state.h',
+ 'audio/audio_io.h',
+ 'audio/audio_input_controller.cc',
+ 'audio/audio_input_controller.h',
+ 'audio/audio_input_stream_impl.cc',
+ 'audio/audio_input_stream_impl.h',
+ 'audio/audio_device_name.cc',
+ 'audio/audio_device_name.h',
+ 'audio/audio_manager.cc',
+ 'audio/audio_manager.h',
+ 'audio/audio_manager_base.cc',
+ 'audio/audio_manager_base.h',
+ 'audio/audio_output_controller.cc',
+ 'audio/audio_output_controller.h',
+ 'audio/audio_output_dispatcher.cc',
+ 'audio/audio_output_dispatcher.h',
+ 'audio/audio_output_dispatcher_impl.cc',
+ 'audio/audio_output_dispatcher_impl.h',
+ 'audio/audio_output_mixer.cc',
+ 'audio/audio_output_mixer.h',
+ 'audio/audio_output_proxy.cc',
+ 'audio/audio_output_proxy.h',
+ 'audio/audio_parameters.cc',
+ 'audio/audio_parameters.h',
+ 'audio/audio_util.cc',
+ 'audio/audio_util.h',
+ 'audio/cross_process_notification.cc',
+ 'audio/cross_process_notification.h',
+ 'audio/cross_process_notification_win.cc',
+ 'audio/cross_process_notification_posix.cc',
+ 'audio/fake_audio_input_stream.cc',
+ 'audio/fake_audio_input_stream.h',
+ 'audio/fake_audio_output_stream.cc',
+ 'audio/fake_audio_output_stream.h',
+ 'audio/linux/audio_manager_linux.cc',
+ 'audio/linux/audio_manager_linux.h',
+ 'audio/linux/alsa_input.cc',
+ 'audio/linux/alsa_input.h',
+ 'audio/linux/alsa_output.cc',
+ 'audio/linux/alsa_output.h',
+ 'audio/linux/alsa_util.cc',
+ 'audio/linux/alsa_util.h',
+ 'audio/linux/alsa_wrapper.cc',
+ 'audio/linux/alsa_wrapper.h',
+ 'audio/linux/cras_output.cc',
+ 'audio/linux/cras_output.h',
+ 'audio/openbsd/audio_manager_openbsd.cc',
+ 'audio/openbsd/audio_manager_openbsd.h',
+ 'audio/mac/audio_input_mac.cc',
+ 'audio/mac/audio_input_mac.h',
+ 'audio/mac/audio_low_latency_input_mac.cc',
+ 'audio/mac/audio_low_latency_input_mac.h',
+ 'audio/mac/audio_low_latency_output_mac.cc',
+ 'audio/mac/audio_low_latency_output_mac.h',
+ 'audio/mac/audio_manager_mac.cc',
+ 'audio/mac/audio_manager_mac.h',
+ 'audio/mac/audio_output_mac.cc',
+ 'audio/mac/audio_output_mac.h',
+ 'audio/null_audio_sink.cc',
+ 'audio/null_audio_sink.h',
+ 'audio/pulse/pulse_output.cc',
+ 'audio/pulse/pulse_output.h',
+ 'audio/sample_rates.cc',
+ 'audio/sample_rates.h',
+ 'audio/simple_sources.cc',
+ 'audio/simple_sources.h',
+ 'audio/win/audio_low_latency_input_win.cc',
+ 'audio/win/audio_low_latency_input_win.h',
+ 'audio/win/audio_low_latency_output_win.cc',
+ 'audio/win/audio_low_latency_output_win.h',
+ 'audio/win/audio_manager_win.cc',
+ 'audio/win/audio_manager_win.h',
+ 'audio/win/avrt_wrapper_win.cc',
+ 'audio/win/avrt_wrapper_win.h',
+ 'audio/win/device_enumeration_win.cc',
+ 'audio/win/device_enumeration_win.h',
+ 'audio/win/wavein_input_win.cc',
+ 'audio/win/wavein_input_win.h',
+ 'audio/win/waveout_output_win.cc',
+ 'audio/win/waveout_output_win.h',
+ 'base/android/media_jni_registrar.cc',
+ 'base/android/media_jni_registrar.h',
+ 'base/audio_decoder.cc',
+ 'base/audio_decoder.h',
+ 'base/audio_decoder_config.cc',
+ 'base/audio_decoder_config.h',
+ 'base/audio_renderer.h',
+ 'base/audio_renderer_mixer.cc',
+ 'base/audio_renderer_mixer.h',
+ 'base/audio_renderer_mixer_input.cc',
+ 'base/audio_renderer_mixer_input.h',
+ 'base/bitstream_buffer.h',
+ 'base/buffers.cc',
+ 'base/buffers.h',
+ 'base/byte_queue.cc',
+ 'base/byte_queue.h',
+ 'base/channel_layout.cc',
+ 'base/channel_layout.h',
+ 'base/clock.cc',
+ 'base/clock.h',
+ 'base/composite_filter.cc',
+ 'base/composite_filter.h',
+ 'base/data_buffer.cc',
+ 'base/data_buffer.h',
+ 'base/data_source.cc',
+ 'base/data_source.h',
+ 'base/decoder_buffer.cc',
+ 'base/decoder_buffer.h',
+ 'base/decrypt_config.cc',
+ 'base/decrypt_config.h',
+ 'base/decryptor.h',
+ 'base/decryptor_client.h',
+ 'base/demuxer.cc',
+ 'base/demuxer.h',
+ 'base/demuxer_stream.cc',
+ 'base/demuxer_stream.h',
+ 'base/djb2.cc',
+ 'base/djb2.h',
+ 'base/filter_collection.cc',
+ 'base/filter_collection.h',
+ 'base/filter_host.h',
+ 'base/filters.cc',
+ 'base/filters.h',
+ 'base/h264_bitstream_converter.cc',
+ 'base/h264_bitstream_converter.h',
+ 'base/media.h',
+ 'base/media_android.cc',
+ 'base/media_export.h',
+ 'base/media_log.cc',
+ 'base/media_log.h',
+ 'base/media_log_event.h',
+ 'base/media_posix.cc',
+ 'base/media_switches.cc',
+ 'base/media_switches.h',
+ 'base/media_win.cc',
+ 'base/message_loop_factory.cc',
+ 'base/message_loop_factory.h',
+ 'base/pipeline.cc',
+ 'base/pipeline.h',
+ 'base/pipeline_status.cc',
+ 'base/pipeline_status.h',
+ 'base/ranges.cc',
+ 'base/ranges.h',
+ 'base/seekable_buffer.cc',
+ 'base/seekable_buffer.h',
+ 'base/state_matrix.cc',
+ 'base/state_matrix.h',
+ 'base/stream_parser.cc',
+ 'base/stream_parser.h',
+ 'base/stream_parser_buffer.cc',
+ 'base/stream_parser_buffer.h',
+ 'base/video_decoder.cc',
+ 'base/video_decoder.h',
+ 'base/video_decoder_config.cc',
+ 'base/video_decoder_config.h',
+ 'base/video_frame.cc',
+ 'base/video_frame.h',
+ 'base/video_renderer.h',
+ 'base/video_util.cc',
+ 'base/video_util.h',
+ 'crypto/aes_decryptor.cc',
+ 'crypto/aes_decryptor.h',
+ 'ffmpeg/ffmpeg_common.cc',
+ 'ffmpeg/ffmpeg_common.h',
+ 'ffmpeg/file_protocol.cc',
+ 'ffmpeg/file_protocol.h',
+ 'filters/audio_file_reader.cc',
+ 'filters/audio_file_reader.h',
+ 'filters/audio_renderer_algorithm.cc',
+ 'filters/audio_renderer_algorithm.h',
+ 'filters/audio_renderer_impl.cc',
+ 'filters/audio_renderer_impl.h',
+ 'filters/bitstream_converter.cc',
+ 'filters/bitstream_converter.h',
+ 'filters/chunk_demuxer.cc',
+ 'filters/chunk_demuxer.h',
+ 'filters/chunk_demuxer_client.h',
+ 'filters/dummy_demuxer.cc',
+ 'filters/dummy_demuxer.h',
+ 'filters/ffmpeg_audio_decoder.cc',
+ 'filters/ffmpeg_audio_decoder.h',
+ 'filters/ffmpeg_demuxer.cc',
+ 'filters/ffmpeg_demuxer.h',
+ 'filters/ffmpeg_h264_bitstream_converter.cc',
+ 'filters/ffmpeg_h264_bitstream_converter.h',
+ 'filters/ffmpeg_glue.cc',
+ 'filters/ffmpeg_glue.h',
+ 'filters/ffmpeg_video_decoder.cc',
+ 'filters/ffmpeg_video_decoder.h',
+ 'filters/file_data_source.cc',
+ 'filters/file_data_source.h',
+ 'filters/gpu_video_decoder.cc',
+ 'filters/gpu_video_decoder.h',
+ 'filters/in_memory_url_protocol.cc',
+ 'filters/in_memory_url_protocol.h',
+ 'filters/source_buffer_stream.cc',
+ 'filters/source_buffer_stream.h',
+ 'filters/video_frame_generator.cc',
+ 'filters/video_frame_generator.h',
+ 'filters/video_renderer_base.cc',
+ 'filters/video_renderer_base.h',
+ 'video/capture/fake_video_capture_device.cc',
+ 'video/capture/fake_video_capture_device.h',
+ 'video/capture/linux/video_capture_device_linux.cc',
+ 'video/capture/linux/video_capture_device_linux.h',
+ 'video/capture/mac/video_capture_device_mac.h',
+ 'video/capture/mac/video_capture_device_mac.mm',
+ 'video/capture/mac/video_capture_device_qtkit_mac.h',
+ 'video/capture/mac/video_capture_device_qtkit_mac.mm',
+ 'video/capture/video_capture.h',
+ 'video/capture/video_capture_device.h',
+ 'video/capture/video_capture_device_dummy.cc',
+ 'video/capture/video_capture_device_dummy.h',
+ 'video/capture/video_capture_proxy.cc',
+ 'video/capture/video_capture_proxy.h',
+ 'video/capture/video_capture_types.h',
+ 'video/capture/win/filter_base_win.cc',
+ 'video/capture/win/filter_base_win.h',
+ 'video/capture/win/pin_base_win.cc',
+ 'video/capture/win/pin_base_win.h',
+ 'video/capture/win/sink_filter_observer_win.h',
+ 'video/capture/win/sink_filter_win.cc',
+ 'video/capture/win/sink_filter_win.h',
+ 'video/capture/win/sink_input_pin_win.cc',
+ 'video/capture/win/sink_input_pin_win.h',
+ 'video/capture/win/video_capture_device_win.cc',
+ 'video/capture/win/video_capture_device_win.h',
+ 'video/picture.cc',
+ 'video/picture.h',
+ 'video/video_decode_accelerator.cc',
+ 'video/video_decode_accelerator.h',
+ 'webm/webm_constants.h',
+ 'webm/webm_cluster_parser.cc',
+ 'webm/webm_cluster_parser.h',
+ 'webm/webm_content_encodings.cc',
+ 'webm/webm_content_encodings.h',
+ 'webm/webm_content_encodings_client.cc',
+ 'webm/webm_content_encodings_client.h',
+ 'webm/webm_info_parser.cc',
+ 'webm/webm_info_parser.h',
+ 'webm/webm_parser.cc',
+ 'webm/webm_parser.h',
+ 'webm/webm_stream_parser.cc',
+ 'webm/webm_stream_parser.h',
+ 'webm/webm_tracks_parser.cc',
+ 'webm/webm_tracks_parser.h',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '..',
+ ],
+ },
+ 'conditions': [
+ # Android doesn't use ffmpeg, so make the dependency conditional
+ # and exclude the sources which depend on ffmpeg.
+ ['OS != "android"', {
+ 'dependencies': [
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ }],
+ ['OS == "android"', {
+ 'sources!': [
+ 'base/media_posix.cc',
+ 'ffmpeg/ffmpeg_common.cc',
+ 'ffmpeg/ffmpeg_common.h',
+ 'ffmpeg/file_protocol.cc',
+ 'ffmpeg/file_protocol.h',
+ 'filters/audio_file_reader.cc',
+ 'filters/audio_file_reader.h',
+ 'filters/bitstream_converter.cc',
+ 'filters/bitstream_converter.h',
+ 'filters/chunk_demuxer.cc',
+ 'filters/chunk_demuxer.h',
+ 'filters/chunk_demuxer_client.h',
+ 'filters/ffmpeg_audio_decoder.cc',
+ 'filters/ffmpeg_audio_decoder.h',
+ 'filters/ffmpeg_demuxer.cc',
+ 'filters/ffmpeg_demuxer.h',
+ 'filters/ffmpeg_h264_bitstream_converter.cc',
+ 'filters/ffmpeg_h264_bitstream_converter.h',
+ 'filters/ffmpeg_glue.cc',
+ 'filters/ffmpeg_glue.h',
+ 'filters/ffmpeg_video_decoder.cc',
+ 'filters/ffmpeg_video_decoder.h',
+ 'filters/gpu_video_decoder.cc',
+ 'filters/gpu_video_decoder.h',
+ 'webm/webm_cluster_parser.cc',
+ 'webm/webm_cluster_parser.h',
+ 'webm/webm_stream_parser.cc',
+ 'webm/webm_stream_parser.h',
+ ],
+ }],
+ # The below 'android' condition were added temporarily and should be
+ # removed in downstream, because there is no Java environment setup in
+ # upstream yet.
+ ['OS == "android"', {
+ 'sources!':[
+ 'audio/android/audio_track_output_android.cc',
+ ],
+ 'sources':[
+ 'audio/android/audio_track_output_stub_android.cc',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '-lOpenSLES',
+ ],
+ },
+ }],
+ ['OS=="linux" or OS=="freebsd" or OS=="solaris"', {
+ 'link_settings': {
+ 'libraries': [
+ '-lasound',
+ ],
+ },
+ }],
+ ['OS=="openbsd"', {
+ 'sources/': [ ['exclude', '/alsa_' ],
+ ['exclude', '/audio_manager_linux' ] ],
+ 'link_settings': {
+ 'libraries': [
+ ],
+ },
+ }],
+ ['OS!="openbsd"', {
+ 'sources!': [
+ 'audio/openbsd/audio_manager_openbsd.cc',
+ 'audio/openbsd/audio_manager_openbsd.h',
+ ],
+ }],
+ ['OS=="linux"', {
+ 'variables': {
+ 'conditions': [
+ ['sysroot!=""', {
+ 'pkg-config': '../build/linux/pkg-config-wrapper "<(sysroot)" "<(target_arch)"',
+ }, {
+ 'pkg-config': 'pkg-config'
+ }],
+ ],
+ },
+ 'conditions': [
+ ['use_cras == 1', {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags libcras)',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '<!@(<(pkg-config) --libs libcras)',
+ ],
+ },
+ 'defines': [
+ 'USE_CRAS',
+ ],
+ }, { # else: use_cras == 0
+ 'sources!': [
+ 'audio/linux/cras_output.cc',
+ 'audio/linux/cras_output.h',
+ ],
+ }],
+ ],
+ }],
+ ['os_posix == 1', {
+ 'conditions': [
+ ['use_pulseaudio == 1', {
+ 'cflags': [
+ '<!@(pkg-config --cflags libpulse)',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '<!@(pkg-config --libs-only-l libpulse)',
+ ],
+ },
+ 'defines': [
+ 'USE_PULSEAUDIO',
+ ],
+ }, { # else: use_pulseaudio == 0
+ 'sources!': [
+ 'audio/pulse/pulse_output.cc',
+ 'audio/pulse/pulse_output.h',
+ ],
+ }],
+ ],
+ }],
+ ['os_posix == 1 and OS != "android"', {
+ # Video capture isn't supported in Android yet.
+ 'sources!': [
+ 'video/capture/video_capture_device_dummy.cc',
+ 'video/capture/video_capture_device_dummy.h',
+ ],
+ }],
+ ['OS=="mac"', {
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/AudioUnit.framework',
+ '$(SDKROOT)/System/Library/Frameworks/AudioToolbox.framework',
+ '$(SDKROOT)/System/Library/Frameworks/CoreAudio.framework',
+ '$(SDKROOT)/System/Library/Frameworks/CoreVideo.framework',
+ '$(SDKROOT)/System/Library/Frameworks/QTKit.framework',
+ ],
+ },
+ }],
+ ['OS=="win"', {
+ 'sources!': [
+ 'audio/pulse/pulse_output.cc',
+ 'audio/pulse/pulse_output.h',
+ 'video/capture/video_capture_device_dummy.cc',
+ 'video/capture/video_capture_device_dummy.h',
+ ],
+ }],
+ ['proprietary_codecs==1 or branding=="Chrome"', {
+ 'sources': [
+ 'mp4/avc.cc',
+ 'mp4/avc.h',
+ 'mp4/box_definitions.cc',
+ 'mp4/box_definitions.h',
+ 'mp4/box_reader.cc',
+ 'mp4/box_reader.h',
+ 'mp4/cenc.cc',
+ 'mp4/cenc.h',
+ 'mp4/mp4_stream_parser.cc',
+ 'mp4/mp4_stream_parser.h',
+ 'mp4/offset_byte_queue.cc',
+ 'mp4/offset_byte_queue.h',
+ 'mp4/track_run_iterator.cc',
+ 'mp4/track_run_iterator.h',
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'yuv_convert',
+ 'type': 'static_library',
+ 'include_dirs': [
+ '..',
+ ],
+ 'conditions': [
+ ['order_profiling != 0', {
+ 'target_conditions' : [
+ ['_toolset=="target"', {
+ 'cflags!': [ '-finstrument-functions' ],
+ }],
+ ],
+ }],
+ [ 'target_arch == "ia32" or target_arch == "x64"', {
+ 'dependencies': [
+ 'yuv_convert_simd_x86',
+ ],
+ }],
+ [ 'target_arch == "arm"', {
+ 'dependencies': [
+ 'yuv_convert_simd_arm',
+ ],
+ }],
+ ],
+ 'sources': [
+ 'base/yuv_convert.cc',
+ 'base/yuv_convert.h',
+ ],
+ },
+ {
+ 'target_name': 'yuv_convert_simd_x86',
+ 'type': 'static_library',
+ 'include_dirs': [
+ '..',
+ ],
+ 'sources': [
+ 'base/simd/convert_rgb_to_yuv_c.cc',
+ 'base/simd/convert_rgb_to_yuv_sse2.cc',
+ 'base/simd/convert_rgb_to_yuv_ssse3.asm',
+ 'base/simd/convert_rgb_to_yuv_ssse3.cc',
+ 'base/simd/convert_rgb_to_yuv_ssse3.inc',
+ 'base/simd/convert_yuv_to_rgb_c.cc',
+ 'base/simd/convert_yuv_to_rgb_x86.cc',
+ 'base/simd/convert_yuv_to_rgb_mmx.asm',
+ 'base/simd/convert_yuv_to_rgb_mmx.inc',
+ 'base/simd/convert_yuv_to_rgb_sse.asm',
+ 'base/simd/filter_yuv.h',
+ 'base/simd/filter_yuv_c.cc',
+ 'base/simd/filter_yuv_mmx.cc',
+ 'base/simd/filter_yuv_sse2.cc',
+ 'base/simd/linear_scale_yuv_to_rgb_mmx.asm',
+ 'base/simd/linear_scale_yuv_to_rgb_mmx.inc',
+ 'base/simd/linear_scale_yuv_to_rgb_sse.asm',
+ 'base/simd/scale_yuv_to_rgb_mmx.asm',
+ 'base/simd/scale_yuv_to_rgb_mmx.inc',
+ 'base/simd/scale_yuv_to_rgb_sse.asm',
+ 'base/simd/yuv_to_rgb_table.cc',
+ 'base/simd/yuv_to_rgb_table.h',
+ ],
+ 'conditions': [
+ ['order_profiling != 0', {
+ 'target_conditions' : [
+ ['_toolset=="target"', {
+ 'cflags!': [ '-finstrument-functions' ],
+ }],
+ ],
+ }],
+ [ 'target_arch == "x64"', {
+ # Source files optimized for X64 systems.
+ 'sources': [
+ 'base/simd/linear_scale_yuv_to_rgb_mmx_x64.asm',
+ 'base/simd/scale_yuv_to_rgb_sse2_x64.asm',
+ ],
+ }],
+ [ 'os_posix == 1 and OS != "mac" and OS != "android"', {
+ 'cflags': [
+ '-msse2',
+ ],
+ }],
+ [ 'OS == "mac"', {
+ 'configurations': {
+ 'Debug': {
+ 'xcode_settings': {
+ # gcc on the mac builds horribly unoptimized sse code in debug
+ # mode. Since this is rarely going to be debugged, run with full
+ # optimizations in Debug as well as Release.
+ 'GCC_OPTIMIZATION_LEVEL': '3', # -O3
+ },
+ },
+ },
+ }],
+ [ 'OS=="win"', {
+ 'variables': {
+ 'yasm_flags': [
+ '-DWIN32',
+ '-DMSVC',
+ '-DCHROMIUM',
+ '-Isimd',
+ ],
+ },
+ }],
+ [ 'OS=="mac"', {
+ 'variables': {
+ 'yasm_flags': [
+ '-DPREFIX',
+ '-DMACHO',
+ '-DCHROMIUM',
+ '-Isimd',
+ ],
+ },
+ }],
+ [ 'os_posix==1 and OS!="mac"', {
+ 'variables': {
+ 'conditions': [
+ [ 'target_arch=="ia32"', {
+ 'yasm_flags': [
+ '-DX86_32',
+ '-DELF',
+ '-DCHROMIUM',
+ '-Isimd',
+ ],
+ }, {
+ 'yasm_flags': [
+ '-DARCH_X86_64',
+ '-DELF',
+ '-DPIC',
+ '-DCHROMIUM',
+ '-Isimd',
+ ],
+ }],
+ ],
+ },
+ }],
+ ],
+ 'variables': {
+ 'yasm_output_path': '<(SHARED_INTERMEDIATE_DIR)/media',
+ },
+ 'msvs_2010_disable_uldi_when_referenced': 1,
+ 'includes': [
+ '../third_party/yasm/yasm_compile.gypi',
+ ],
+ },
+ {
+ 'target_name': 'yuv_convert_simd_arm',
+ 'type': 'static_library',
+ 'include_dirs': [
+ '..',
+ ],
+ 'sources': [
+ 'base/simd/convert_rgb_to_yuv_c.cc',
+ 'base/simd/convert_rgb_to_yuv.h',
+ 'base/simd/convert_yuv_to_rgb_c.cc',
+ 'base/simd/convert_yuv_to_rgb.h',
+ 'base/simd/filter_yuv.h',
+ 'base/simd/filter_yuv_c.cc',
+ 'base/simd/yuv_to_rgb_table.cc',
+ 'base/simd/yuv_to_rgb_table.h',
+ ],
+ },
+ {
+ 'target_name': 'media_unittests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'media_test_support',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../base/base.gyp:base_i18n',
+ '../base/base.gyp:test_support_base',
+ '../testing/gmock.gyp:gmock',
+ '../testing/gtest.gyp:gtest',
+ '../ui/ui.gyp:ui',
+ ],
+ 'sources': [
+ 'audio/async_socket_io_handler_unittest.cc',
+ 'audio/audio_input_controller_unittest.cc',
+ 'audio/audio_input_device_unittest.cc',
+ 'audio/audio_input_unittest.cc',
+ 'audio/audio_input_volume_unittest.cc',
+ 'audio/audio_low_latency_input_output_unittest.cc',
+ 'audio/audio_output_controller_unittest.cc',
+ 'audio/audio_output_proxy_unittest.cc',
+ 'audio/audio_parameters_unittest.cc',
+ 'audio/audio_util_unittest.cc',
+ 'audio/cross_process_notification_unittest.cc',
+ 'audio/linux/alsa_output_unittest.cc',
+ 'audio/mac/audio_low_latency_input_mac_unittest.cc',
+ 'audio/mac/audio_output_mac_unittest.cc',
+ 'audio/simple_sources_unittest.cc',
+ 'audio/win/audio_low_latency_input_win_unittest.cc',
+ 'audio/win/audio_low_latency_output_win_unittest.cc',
+ 'audio/win/audio_output_win_unittest.cc',
+ 'base/audio_renderer_mixer_unittest.cc',
+ 'base/audio_renderer_mixer_input_unittest.cc',
+ 'base/buffers_unittest.cc',
+ 'base/clock_unittest.cc',
+ 'base/composite_filter_unittest.cc',
+ 'base/data_buffer_unittest.cc',
+ 'base/decoder_buffer_unittest.cc',
+ 'base/djb2_unittest.cc',
+ 'base/fake_audio_render_callback.cc',
+ 'base/fake_audio_render_callback.h',
+ 'base/filter_collection_unittest.cc',
+ 'base/h264_bitstream_converter_unittest.cc',
+ 'base/pipeline_unittest.cc',
+ 'base/ranges_unittest.cc',
+ 'base/run_all_unittests.cc',
+ 'base/seekable_buffer_unittest.cc',
+ 'base/state_matrix_unittest.cc',
+ 'base/test_data_util.cc',
+ 'base/test_data_util.h',
+ 'base/video_frame_unittest.cc',
+ 'base/video_util_unittest.cc',
+ 'base/yuv_convert_unittest.cc',
+ 'crypto/aes_decryptor_unittest.cc',
+ 'ffmpeg/ffmpeg_common_unittest.cc',
+ 'filters/audio_renderer_algorithm_unittest.cc',
+ 'filters/audio_renderer_impl_unittest.cc',
+ 'filters/bitstream_converter_unittest.cc',
+ 'filters/chunk_demuxer_unittest.cc',
+ 'filters/ffmpeg_audio_decoder_unittest.cc',
+ 'filters/ffmpeg_decoder_unittest.h',
+ 'filters/ffmpeg_demuxer_unittest.cc',
+ 'filters/ffmpeg_glue_unittest.cc',
+ 'filters/ffmpeg_h264_bitstream_converter_unittest.cc',
+ 'filters/ffmpeg_video_decoder_unittest.cc',
+ 'filters/file_data_source_unittest.cc',
+ 'filters/pipeline_integration_test.cc',
+ 'filters/pipeline_integration_test_base.cc',
+ 'filters/source_buffer_stream_unittest.cc',
+ 'filters/video_renderer_base_unittest.cc',
+ 'video/capture/video_capture_device_unittest.cc',
+ 'webm/cluster_builder.cc',
+ 'webm/cluster_builder.h',
+ 'webm/webm_cluster_parser_unittest.cc',
+ 'webm/webm_content_encodings_client_unittest.cc',
+ 'webm/webm_parser_unittest.cc',
+ ],
+ 'conditions': [
+ ['os_posix==1 and OS!="mac"', {
+ 'conditions': [
+ ['linux_use_tcmalloc==1', {
+ 'dependencies': [
+ '../base/allocator/allocator.gyp:allocator',
+ ],
+ }],
+ ],
+ }],
+ ['OS != "android"', {
+ 'dependencies': [
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ }],
+ ['OS == "android"', {
+ 'sources!': [
+ 'audio/audio_input_volume_unittest.cc',
+ 'base/test_data_util.cc',
+ 'base/test_data_util.h',
+ 'ffmpeg/ffmpeg_common_unittest.cc',
+ 'filters/ffmpeg_audio_decoder_unittest.cc',
+ 'filters/bitstream_converter_unittest.cc',
+ 'filters/chunk_demuxer_unittest.cc',
+ 'filters/ffmpeg_demuxer_unittest.cc',
+ 'filters/ffmpeg_glue_unittest.cc',
+ 'filters/ffmpeg_h264_bitstream_converter_unittest.cc',
+ 'filters/ffmpeg_video_decoder_unittest.cc',
+ 'filters/pipeline_integration_test.cc',
+ 'filters/pipeline_integration_test_base.cc',
+ 'mp4/mp4_stream_parser_unittest.cc',
+ 'webm/webm_cluster_parser_unittest.cc',
+ ],
+ }],
+ ['OS == "linux"', {
+ 'conditions': [
+ ['use_cras == 1', {
+ 'sources': [
+ 'audio/linux/cras_output_unittest.cc',
+ ],
+ 'defines': [
+ 'USE_CRAS',
+ ],
+ }],
+ ],
+ }],
+ [ 'target_arch=="ia32" or target_arch=="x64"', {
+ 'sources': [
+ 'base/simd/convert_rgb_to_yuv_unittest.cc',
+ ],
+ }],
+ ['proprietary_codecs==1 or branding=="Chrome"', {
+ 'sources': [
+ 'mp4/avc_unittest.cc',
+ 'mp4/box_reader_unittest.cc',
+ 'mp4/mp4_stream_parser_unittest.cc',
+ 'mp4/offset_byte_queue_unittest.cc',
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'media_test_support',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'media',
+ '../base/base.gyp:base',
+ '../testing/gmock.gyp:gmock',
+ '../testing/gtest.gyp:gtest',
+ ],
+ 'sources': [
+ 'audio/test_audio_input_controller_factory.cc',
+ 'audio/test_audio_input_controller_factory.h',
+ 'base/mock_callback.cc',
+ 'base/mock_callback.h',
+ 'base/mock_data_source_host.cc',
+ 'base/mock_data_source_host.h',
+ 'base/mock_demuxer_host.cc',
+ 'base/mock_demuxer_host.h',
+ 'base/mock_filter_host.cc',
+ 'base/mock_filter_host.h',
+ 'base/mock_filters.cc',
+ 'base/mock_filters.h',
+ ],
+ },
+ {
+ 'target_name': 'scaler_bench',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../skia/skia.gyp:skia',
+ ],
+ 'sources': [
+ 'tools/scaler_bench/scaler_bench.cc',
+ ],
+ },
+ {
+ 'target_name': 'qt_faststart',
+ 'type': 'executable',
+ 'sources': [
+ 'tools/qt_faststart/qt_faststart.c'
+ ],
+ },
+ {
+ 'target_name': 'seek_tester',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ '../base/base.gyp:base',
+ ],
+ 'sources': [
+ 'tools/seek_tester/seek_tester.cc',
+ ],
+ },
+ ],
+ 'conditions': [
+ ['OS=="win"', {
+ 'targets': [
+ {
+ 'target_name': 'player_wtl',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
+ '../ui/ui.gyp:ui',
+ ],
+ 'include_dirs': [
+ '<(DEPTH)/third_party/wtl/include',
+ ],
+ 'sources': [
+ 'tools/player_wtl/list.h',
+ 'tools/player_wtl/mainfrm.h',
+ 'tools/player_wtl/movie.cc',
+ 'tools/player_wtl/movie.h',
+ 'tools/player_wtl/player_wtl.cc',
+ 'tools/player_wtl/player_wtl.rc',
+ 'tools/player_wtl/props.h',
+ 'tools/player_wtl/seek.h',
+ 'tools/player_wtl/resource.h',
+ 'tools/player_wtl/view.h',
+ ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'SubSystem': '2', # Set /SUBSYSTEM:WINDOWS
+ },
+ },
+ 'defines': [
+ '_CRT_SECURE_NO_WARNINGS=1',
+ ],
+ },
+ ],
+ }],
+ ['OS == "win" or toolkit_uses_gtk == 1', {
+ 'targets': [
+ {
+ 'target_name': 'shader_bench',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../ui/gl/gl.gyp:gl',
+ ],
+ 'sources': [
+ 'tools/shader_bench/shader_bench.cc',
+ 'tools/shader_bench/cpu_color_painter.cc',
+ 'tools/shader_bench/cpu_color_painter.h',
+ 'tools/shader_bench/gpu_color_painter.cc',
+ 'tools/shader_bench/gpu_color_painter.h',
+ 'tools/shader_bench/gpu_painter.cc',
+ 'tools/shader_bench/gpu_painter.h',
+ 'tools/shader_bench/painter.cc',
+ 'tools/shader_bench/painter.h',
+ 'tools/shader_bench/window.cc',
+ 'tools/shader_bench/window.h',
+ ],
+ 'conditions': [
+ ['toolkit_uses_gtk == 1', {
+ 'dependencies': [
+ '../build/linux/system.gyp:gtk',
+ ],
+ 'sources': [
+ 'tools/shader_bench/window_linux.cc',
+ ],
+ }],
+ ['OS=="win"', {
+ 'dependencies': [
+ '../third_party/angle/src/build_angle.gyp:libEGL',
+ '../third_party/angle/src/build_angle.gyp:libGLESv2',
+ ],
+ 'sources': [
+ 'tools/shader_bench/window_win.cc',
+ ],
+ }],
+ ],
+ },
+ ],
+ }],
+ ['OS == "linux" and target_arch != "arm"', {
+ 'targets': [
+ {
+ 'target_name': 'tile_render_bench',
+ 'type': 'executable',
+ 'dependencies': [
+ '../base/base.gyp:base',
+ '../ui/gl/gl.gyp:gl',
+ ],
+ 'libraries': [
+ '-lGL',
+ '-ldl',
+ ],
+ 'sources': [
+ 'tools/tile_render_bench/tile_render_bench.cc',
+ ],
+ },
+ ],
+ }],
+ ['os_posix == 1 and OS != "mac" and OS != "android"', {
+ 'targets': [
+ {
+ 'target_name': 'player_x11',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../ui/gl/gl.gyp:gl',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '-ldl',
+ '-lX11',
+ '-lXrender',
+ '-lXext',
+ ],
+ },
+ 'sources': [
+ 'tools/player_x11/data_source_logger.cc',
+ 'tools/player_x11/data_source_logger.h',
+ 'tools/player_x11/gl_video_renderer.cc',
+ 'tools/player_x11/gl_video_renderer.h',
+ 'tools/player_x11/player_x11.cc',
+ 'tools/player_x11/x11_video_renderer.cc',
+ 'tools/player_x11/x11_video_renderer.h',
+ ],
+ },
+ ],
+ }],
+ ['OS == "android"', {
+ 'targets': [
+ {
+ 'target_name': 'player_android',
+ 'type': 'static_library',
+ 'sources': [
+ 'base/android/media_player_bridge.cc',
+ 'base/android/media_player_bridge.h',
+ ],
+ 'dependencies': [
+ '../base/base.gyp:base',
+ ],
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)/media',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'generate-jni-headers',
+ 'inputs': [
+ '../base/android/jni_generator/jni_generator.py',
+ 'base/android/java/src/org/chromium/media/MediaPlayerListener.java',
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/media/jni/media_player_listener_jni.h',
+ ],
+ 'action': [
+ 'python',
+ '<(DEPTH)/base/android/jni_generator/jni_generator.py',
+ '-o',
+ '<@(_inputs)',
+ '<@(_outputs)',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'media_java',
+ 'type': 'none',
+ 'dependencies': [ '../base/base.gyp:base_java' ],
+ 'variables': {
+ 'package_name': 'media',
+ 'java_in_dir': 'base/android/java',
+ },
+ 'includes': [ '../build/java.gypi' ],
+ },
+
+ ],
+ }, { # OS != "android"'
+ # Android does not use ffmpeg, so disable the targets which require it.
+ 'targets': [
+ {
+ 'target_name': 'ffmpeg_unittests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'media_test_support',
+ '../base/base.gyp:base',
+ '../base/base.gyp:base_i18n',
+ '../base/base.gyp:test_support_base',
+ '../base/base.gyp:test_support_perf',
+ '../testing/gtest.gyp:gtest',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'sources': [
+ 'ffmpeg/ffmpeg_unittest.cc',
+ ],
+ 'conditions': [
+ ['toolkit_uses_gtk == 1', {
+ 'dependencies': [
+ # Needed for the following #include chain:
+ # base/run_all_unittests.cc
+ # ../base/test_suite.h
+ # gtk/gtk.h
+ '../build/linux/system.gyp:gtk',
+ ],
+ 'conditions': [
+ ['linux_use_tcmalloc==1', {
+ 'dependencies': [
+ '../base/allocator/allocator.gyp:allocator',
+ ],
+ }],
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'ffmpeg_regression_tests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'media_test_support',
+ '../base/base.gyp:test_support_base',
+ '../testing/gmock.gyp:gmock',
+ '../testing/gtest.gyp:gtest',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'sources': [
+ 'base/test_data_util.cc',
+ 'base/run_all_unittests.cc',
+ 'ffmpeg/ffmpeg_regression_tests.cc',
+ 'filters/pipeline_integration_test_base.cc',
+ ],
+ 'conditions': [
+ ['os_posix==1 and OS!="mac"', {
+ 'conditions': [
+ ['linux_use_tcmalloc==1', {
+ 'dependencies': [
+ '../base/allocator/allocator.gyp:allocator',
+ ],
+ }],
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'ffmpeg_tests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ '../base/base.gyp:base',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'sources': [
+ 'test/ffmpeg_tests/ffmpeg_tests.cc',
+ ],
+ },
+ {
+ 'target_name': 'media_bench',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ '../base/base.gyp:base',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'sources': [
+ 'tools/media_bench/media_bench.cc',
+ ],
+ },
+ ],
+ }]
+ ],
+}
diff --git a/third_party/python/gyp/tools/emacs/testdata/media.gyp.fontified b/third_party/python/gyp/tools/emacs/testdata/media.gyp.fontified
new file mode 100644
index 0000000000..962b7b2c43
--- /dev/null
+++ b/third_party/python/gyp/tools/emacs/testdata/media.gyp.fontified
@@ -0,0 +1,1107 @@
+
+#("# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'chromium_code': 1,
+ # Override to dynamically link the PulseAudio library.
+ 'use_pulseaudio%': 0,
+ # Override to dynamically link the cras (ChromeOS audio) library.
+ 'use_cras%': 0,
+ },
+ 'targets': [
+ {
+ 'target_name': 'media',
+ 'type': '<(component)',
+ 'dependencies': [
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
+ '../build/temp_gyp/googleurl.gyp:googleurl',
+ '../crypto/crypto.gyp:crypto',
+ '../third_party/openmax/openmax.gyp:il',
+ '../ui/ui.gyp:ui',
+ ],
+ 'defines': [
+ 'MEDIA_IMPLEMENTATION',
+ ],
+ 'include_dirs': [
+ '..',
+ ],
+ 'sources': [
+ 'audio/android/audio_manager_android.cc',
+ 'audio/android/audio_manager_android.h',
+ 'audio/android/audio_track_output_android.cc',
+ 'audio/android/audio_track_output_android.h',
+ 'audio/android/opensles_input.cc',
+ 'audio/android/opensles_input.h',
+ 'audio/android/opensles_output.cc',
+ 'audio/android/opensles_output.h',
+ 'audio/async_socket_io_handler.h',
+ 'audio/async_socket_io_handler_posix.cc',
+ 'audio/async_socket_io_handler_win.cc',
+ 'audio/audio_buffers_state.cc',
+ 'audio/audio_buffers_state.h',
+ 'audio/audio_io.h',
+ 'audio/audio_input_controller.cc',
+ 'audio/audio_input_controller.h',
+ 'audio/audio_input_stream_impl.cc',
+ 'audio/audio_input_stream_impl.h',
+ 'audio/audio_device_name.cc',
+ 'audio/audio_device_name.h',
+ 'audio/audio_manager.cc',
+ 'audio/audio_manager.h',
+ 'audio/audio_manager_base.cc',
+ 'audio/audio_manager_base.h',
+ 'audio/audio_output_controller.cc',
+ 'audio/audio_output_controller.h',
+ 'audio/audio_output_dispatcher.cc',
+ 'audio/audio_output_dispatcher.h',
+ 'audio/audio_output_dispatcher_impl.cc',
+ 'audio/audio_output_dispatcher_impl.h',
+ 'audio/audio_output_mixer.cc',
+ 'audio/audio_output_mixer.h',
+ 'audio/audio_output_proxy.cc',
+ 'audio/audio_output_proxy.h',
+ 'audio/audio_parameters.cc',
+ 'audio/audio_parameters.h',
+ 'audio/audio_util.cc',
+ 'audio/audio_util.h',
+ 'audio/cross_process_notification.cc',
+ 'audio/cross_process_notification.h',
+ 'audio/cross_process_notification_win.cc',
+ 'audio/cross_process_notification_posix.cc',
+ 'audio/fake_audio_input_stream.cc',
+ 'audio/fake_audio_input_stream.h',
+ 'audio/fake_audio_output_stream.cc',
+ 'audio/fake_audio_output_stream.h',
+ 'audio/linux/audio_manager_linux.cc',
+ 'audio/linux/audio_manager_linux.h',
+ 'audio/linux/alsa_input.cc',
+ 'audio/linux/alsa_input.h',
+ 'audio/linux/alsa_output.cc',
+ 'audio/linux/alsa_output.h',
+ 'audio/linux/alsa_util.cc',
+ 'audio/linux/alsa_util.h',
+ 'audio/linux/alsa_wrapper.cc',
+ 'audio/linux/alsa_wrapper.h',
+ 'audio/linux/cras_output.cc',
+ 'audio/linux/cras_output.h',
+ 'audio/openbsd/audio_manager_openbsd.cc',
+ 'audio/openbsd/audio_manager_openbsd.h',
+ 'audio/mac/audio_input_mac.cc',
+ 'audio/mac/audio_input_mac.h',
+ 'audio/mac/audio_low_latency_input_mac.cc',
+ 'audio/mac/audio_low_latency_input_mac.h',
+ 'audio/mac/audio_low_latency_output_mac.cc',
+ 'audio/mac/audio_low_latency_output_mac.h',
+ 'audio/mac/audio_manager_mac.cc',
+ 'audio/mac/audio_manager_mac.h',
+ 'audio/mac/audio_output_mac.cc',
+ 'audio/mac/audio_output_mac.h',
+ 'audio/null_audio_sink.cc',
+ 'audio/null_audio_sink.h',
+ 'audio/pulse/pulse_output.cc',
+ 'audio/pulse/pulse_output.h',
+ 'audio/sample_rates.cc',
+ 'audio/sample_rates.h',
+ 'audio/simple_sources.cc',
+ 'audio/simple_sources.h',
+ 'audio/win/audio_low_latency_input_win.cc',
+ 'audio/win/audio_low_latency_input_win.h',
+ 'audio/win/audio_low_latency_output_win.cc',
+ 'audio/win/audio_low_latency_output_win.h',
+ 'audio/win/audio_manager_win.cc',
+ 'audio/win/audio_manager_win.h',
+ 'audio/win/avrt_wrapper_win.cc',
+ 'audio/win/avrt_wrapper_win.h',
+ 'audio/win/device_enumeration_win.cc',
+ 'audio/win/device_enumeration_win.h',
+ 'audio/win/wavein_input_win.cc',
+ 'audio/win/wavein_input_win.h',
+ 'audio/win/waveout_output_win.cc',
+ 'audio/win/waveout_output_win.h',
+ 'base/android/media_jni_registrar.cc',
+ 'base/android/media_jni_registrar.h',
+ 'base/audio_decoder.cc',
+ 'base/audio_decoder.h',
+ 'base/audio_decoder_config.cc',
+ 'base/audio_decoder_config.h',
+ 'base/audio_renderer.h',
+ 'base/audio_renderer_mixer.cc',
+ 'base/audio_renderer_mixer.h',
+ 'base/audio_renderer_mixer_input.cc',
+ 'base/audio_renderer_mixer_input.h',
+ 'base/bitstream_buffer.h',
+ 'base/buffers.cc',
+ 'base/buffers.h',
+ 'base/byte_queue.cc',
+ 'base/byte_queue.h',
+ 'base/channel_layout.cc',
+ 'base/channel_layout.h',
+ 'base/clock.cc',
+ 'base/clock.h',
+ 'base/composite_filter.cc',
+ 'base/composite_filter.h',
+ 'base/data_buffer.cc',
+ 'base/data_buffer.h',
+ 'base/data_source.cc',
+ 'base/data_source.h',
+ 'base/decoder_buffer.cc',
+ 'base/decoder_buffer.h',
+ 'base/decrypt_config.cc',
+ 'base/decrypt_config.h',
+ 'base/decryptor.h',
+ 'base/decryptor_client.h',
+ 'base/demuxer.cc',
+ 'base/demuxer.h',
+ 'base/demuxer_stream.cc',
+ 'base/demuxer_stream.h',
+ 'base/djb2.cc',
+ 'base/djb2.h',
+ 'base/filter_collection.cc',
+ 'base/filter_collection.h',
+ 'base/filter_host.h',
+ 'base/filters.cc',
+ 'base/filters.h',
+ 'base/h264_bitstream_converter.cc',
+ 'base/h264_bitstream_converter.h',
+ 'base/media.h',
+ 'base/media_android.cc',
+ 'base/media_export.h',
+ 'base/media_log.cc',
+ 'base/media_log.h',
+ 'base/media_log_event.h',
+ 'base/media_posix.cc',
+ 'base/media_switches.cc',
+ 'base/media_switches.h',
+ 'base/media_win.cc',
+ 'base/message_loop_factory.cc',
+ 'base/message_loop_factory.h',
+ 'base/pipeline.cc',
+ 'base/pipeline.h',
+ 'base/pipeline_status.cc',
+ 'base/pipeline_status.h',
+ 'base/ranges.cc',
+ 'base/ranges.h',
+ 'base/seekable_buffer.cc',
+ 'base/seekable_buffer.h',
+ 'base/state_matrix.cc',
+ 'base/state_matrix.h',
+ 'base/stream_parser.cc',
+ 'base/stream_parser.h',
+ 'base/stream_parser_buffer.cc',
+ 'base/stream_parser_buffer.h',
+ 'base/video_decoder.cc',
+ 'base/video_decoder.h',
+ 'base/video_decoder_config.cc',
+ 'base/video_decoder_config.h',
+ 'base/video_frame.cc',
+ 'base/video_frame.h',
+ 'base/video_renderer.h',
+ 'base/video_util.cc',
+ 'base/video_util.h',
+ 'crypto/aes_decryptor.cc',
+ 'crypto/aes_decryptor.h',
+ 'ffmpeg/ffmpeg_common.cc',
+ 'ffmpeg/ffmpeg_common.h',
+ 'ffmpeg/file_protocol.cc',
+ 'ffmpeg/file_protocol.h',
+ 'filters/audio_file_reader.cc',
+ 'filters/audio_file_reader.h',
+ 'filters/audio_renderer_algorithm.cc',
+ 'filters/audio_renderer_algorithm.h',
+ 'filters/audio_renderer_impl.cc',
+ 'filters/audio_renderer_impl.h',
+ 'filters/bitstream_converter.cc',
+ 'filters/bitstream_converter.h',
+ 'filters/chunk_demuxer.cc',
+ 'filters/chunk_demuxer.h',
+ 'filters/chunk_demuxer_client.h',
+ 'filters/dummy_demuxer.cc',
+ 'filters/dummy_demuxer.h',
+ 'filters/ffmpeg_audio_decoder.cc',
+ 'filters/ffmpeg_audio_decoder.h',
+ 'filters/ffmpeg_demuxer.cc',
+ 'filters/ffmpeg_demuxer.h',
+ 'filters/ffmpeg_h264_bitstream_converter.cc',
+ 'filters/ffmpeg_h264_bitstream_converter.h',
+ 'filters/ffmpeg_glue.cc',
+ 'filters/ffmpeg_glue.h',
+ 'filters/ffmpeg_video_decoder.cc',
+ 'filters/ffmpeg_video_decoder.h',
+ 'filters/file_data_source.cc',
+ 'filters/file_data_source.h',
+ 'filters/gpu_video_decoder.cc',
+ 'filters/gpu_video_decoder.h',
+ 'filters/in_memory_url_protocol.cc',
+ 'filters/in_memory_url_protocol.h',
+ 'filters/source_buffer_stream.cc',
+ 'filters/source_buffer_stream.h',
+ 'filters/video_frame_generator.cc',
+ 'filters/video_frame_generator.h',
+ 'filters/video_renderer_base.cc',
+ 'filters/video_renderer_base.h',
+ 'video/capture/fake_video_capture_device.cc',
+ 'video/capture/fake_video_capture_device.h',
+ 'video/capture/linux/video_capture_device_linux.cc',
+ 'video/capture/linux/video_capture_device_linux.h',
+ 'video/capture/mac/video_capture_device_mac.h',
+ 'video/capture/mac/video_capture_device_mac.mm',
+ 'video/capture/mac/video_capture_device_qtkit_mac.h',
+ 'video/capture/mac/video_capture_device_qtkit_mac.mm',
+ 'video/capture/video_capture.h',
+ 'video/capture/video_capture_device.h',
+ 'video/capture/video_capture_device_dummy.cc',
+ 'video/capture/video_capture_device_dummy.h',
+ 'video/capture/video_capture_proxy.cc',
+ 'video/capture/video_capture_proxy.h',
+ 'video/capture/video_capture_types.h',
+ 'video/capture/win/filter_base_win.cc',
+ 'video/capture/win/filter_base_win.h',
+ 'video/capture/win/pin_base_win.cc',
+ 'video/capture/win/pin_base_win.h',
+ 'video/capture/win/sink_filter_observer_win.h',
+ 'video/capture/win/sink_filter_win.cc',
+ 'video/capture/win/sink_filter_win.h',
+ 'video/capture/win/sink_input_pin_win.cc',
+ 'video/capture/win/sink_input_pin_win.h',
+ 'video/capture/win/video_capture_device_win.cc',
+ 'video/capture/win/video_capture_device_win.h',
+ 'video/picture.cc',
+ 'video/picture.h',
+ 'video/video_decode_accelerator.cc',
+ 'video/video_decode_accelerator.h',
+ 'webm/webm_constants.h',
+ 'webm/webm_cluster_parser.cc',
+ 'webm/webm_cluster_parser.h',
+ 'webm/webm_content_encodings.cc',
+ 'webm/webm_content_encodings.h',
+ 'webm/webm_content_encodings_client.cc',
+ 'webm/webm_content_encodings_client.h',
+ 'webm/webm_info_parser.cc',
+ 'webm/webm_info_parser.h',
+ 'webm/webm_parser.cc',
+ 'webm/webm_parser.h',
+ 'webm/webm_stream_parser.cc',
+ 'webm/webm_stream_parser.h',
+ 'webm/webm_tracks_parser.cc',
+ 'webm/webm_tracks_parser.h',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '..',
+ ],
+ },
+ 'conditions': [
+ # Android doesn't use ffmpeg, so make the dependency conditional
+ # and exclude the sources which depend on ffmpeg.
+ ['OS != \"android\"', {
+ 'dependencies': [
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ }],
+ ['OS == \"android\"', {
+ 'sources!': [
+ 'base/media_posix.cc',
+ 'ffmpeg/ffmpeg_common.cc',
+ 'ffmpeg/ffmpeg_common.h',
+ 'ffmpeg/file_protocol.cc',
+ 'ffmpeg/file_protocol.h',
+ 'filters/audio_file_reader.cc',
+ 'filters/audio_file_reader.h',
+ 'filters/bitstream_converter.cc',
+ 'filters/bitstream_converter.h',
+ 'filters/chunk_demuxer.cc',
+ 'filters/chunk_demuxer.h',
+ 'filters/chunk_demuxer_client.h',
+ 'filters/ffmpeg_audio_decoder.cc',
+ 'filters/ffmpeg_audio_decoder.h',
+ 'filters/ffmpeg_demuxer.cc',
+ 'filters/ffmpeg_demuxer.h',
+ 'filters/ffmpeg_h264_bitstream_converter.cc',
+ 'filters/ffmpeg_h264_bitstream_converter.h',
+ 'filters/ffmpeg_glue.cc',
+ 'filters/ffmpeg_glue.h',
+ 'filters/ffmpeg_video_decoder.cc',
+ 'filters/ffmpeg_video_decoder.h',
+ 'filters/gpu_video_decoder.cc',
+ 'filters/gpu_video_decoder.h',
+ 'webm/webm_cluster_parser.cc',
+ 'webm/webm_cluster_parser.h',
+ 'webm/webm_stream_parser.cc',
+ 'webm/webm_stream_parser.h',
+ ],
+ }],
+ # The below 'android' condition were added temporarily and should be
+ # removed in downstream, because there is no Java environment setup in
+ # upstream yet.
+ ['OS == \"android\"', {
+ 'sources!':[
+ 'audio/android/audio_track_output_android.cc',
+ ],
+ 'sources':[
+ 'audio/android/audio_track_output_stub_android.cc',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '-lOpenSLES',
+ ],
+ },
+ }],
+ ['OS==\"linux\" or OS==\"freebsd\" or OS==\"solaris\"', {
+ 'link_settings': {
+ 'libraries': [
+ '-lasound',
+ ],
+ },
+ }],
+ ['OS==\"openbsd\"', {
+ 'sources/': [ ['exclude', '/alsa_' ],
+ ['exclude', '/audio_manager_linux' ] ],
+ 'link_settings': {
+ 'libraries': [
+ ],
+ },
+ }],
+ ['OS!=\"openbsd\"', {
+ 'sources!': [
+ 'audio/openbsd/audio_manager_openbsd.cc',
+ 'audio/openbsd/audio_manager_openbsd.h',
+ ],
+ }],
+ ['OS==\"linux\"', {
+ 'variables': {
+ 'conditions': [
+ ['sysroot!=\"\"', {
+ 'pkg-config': '../build/linux/pkg-config-wrapper \"<(sysroot)\" \"<(target_arch)\"',
+ }, {
+ 'pkg-config': 'pkg-config'
+ }],
+ ],
+ },
+ 'conditions': [
+ ['use_cras == 1', {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags libcras)',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '<!@(<(pkg-config) --libs libcras)',
+ ],
+ },
+ 'defines': [
+ 'USE_CRAS',
+ ],
+ }, { # else: use_cras == 0
+ 'sources!': [
+ 'audio/linux/cras_output.cc',
+ 'audio/linux/cras_output.h',
+ ],
+ }],
+ ],
+ }],
+ ['os_posix == 1', {
+ 'conditions': [
+ ['use_pulseaudio == 1', {
+ 'cflags': [
+ '<!@(pkg-config --cflags libpulse)',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '<!@(pkg-config --libs-only-l libpulse)',
+ ],
+ },
+ 'defines': [
+ 'USE_PULSEAUDIO',
+ ],
+ }, { # else: use_pulseaudio == 0
+ 'sources!': [
+ 'audio/pulse/pulse_output.cc',
+ 'audio/pulse/pulse_output.h',
+ ],
+ }],
+ ],
+ }],
+ ['os_posix == 1 and OS != \"android\"', {
+ # Video capture isn't supported in Android yet.
+ 'sources!': [
+ 'video/capture/video_capture_device_dummy.cc',
+ 'video/capture/video_capture_device_dummy.h',
+ ],
+ }],
+ ['OS==\"mac\"', {
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/AudioUnit.framework',
+ '$(SDKROOT)/System/Library/Frameworks/AudioToolbox.framework',
+ '$(SDKROOT)/System/Library/Frameworks/CoreAudio.framework',
+ '$(SDKROOT)/System/Library/Frameworks/CoreVideo.framework',
+ '$(SDKROOT)/System/Library/Frameworks/QTKit.framework',
+ ],
+ },
+ }],
+ ['OS==\"win\"', {
+ 'sources!': [
+ 'audio/pulse/pulse_output.cc',
+ 'audio/pulse/pulse_output.h',
+ 'video/capture/video_capture_device_dummy.cc',
+ 'video/capture/video_capture_device_dummy.h',
+ ],
+ }],
+ ['proprietary_codecs==1 or branding==\"Chrome\"', {
+ 'sources': [
+ 'mp4/avc.cc',
+ 'mp4/avc.h',
+ 'mp4/box_definitions.cc',
+ 'mp4/box_definitions.h',
+ 'mp4/box_reader.cc',
+ 'mp4/box_reader.h',
+ 'mp4/cenc.cc',
+ 'mp4/cenc.h',
+ 'mp4/mp4_stream_parser.cc',
+ 'mp4/mp4_stream_parser.h',
+ 'mp4/offset_byte_queue.cc',
+ 'mp4/offset_byte_queue.h',
+ 'mp4/track_run_iterator.cc',
+ 'mp4/track_run_iterator.h',
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'yuv_convert',
+ 'type': 'static_library',
+ 'include_dirs': [
+ '..',
+ ],
+ 'conditions': [
+ ['order_profiling != 0', {
+ 'target_conditions' : [
+ ['_toolset==\"target\"', {
+ 'cflags!': [ '-finstrument-functions' ],
+ }],
+ ],
+ }],
+ [ 'target_arch == \"ia32\" or target_arch == \"x64\"', {
+ 'dependencies': [
+ 'yuv_convert_simd_x86',
+ ],
+ }],
+ [ 'target_arch == \"arm\"', {
+ 'dependencies': [
+ 'yuv_convert_simd_arm',
+ ],
+ }],
+ ],
+ 'sources': [
+ 'base/yuv_convert.cc',
+ 'base/yuv_convert.h',
+ ],
+ },
+ {
+ 'target_name': 'yuv_convert_simd_x86',
+ 'type': 'static_library',
+ 'include_dirs': [
+ '..',
+ ],
+ 'sources': [
+ 'base/simd/convert_rgb_to_yuv_c.cc',
+ 'base/simd/convert_rgb_to_yuv_sse2.cc',
+ 'base/simd/convert_rgb_to_yuv_ssse3.asm',
+ 'base/simd/convert_rgb_to_yuv_ssse3.cc',
+ 'base/simd/convert_rgb_to_yuv_ssse3.inc',
+ 'base/simd/convert_yuv_to_rgb_c.cc',
+ 'base/simd/convert_yuv_to_rgb_x86.cc',
+ 'base/simd/convert_yuv_to_rgb_mmx.asm',
+ 'base/simd/convert_yuv_to_rgb_mmx.inc',
+ 'base/simd/convert_yuv_to_rgb_sse.asm',
+ 'base/simd/filter_yuv.h',
+ 'base/simd/filter_yuv_c.cc',
+ 'base/simd/filter_yuv_mmx.cc',
+ 'base/simd/filter_yuv_sse2.cc',
+ 'base/simd/linear_scale_yuv_to_rgb_mmx.asm',
+ 'base/simd/linear_scale_yuv_to_rgb_mmx.inc',
+ 'base/simd/linear_scale_yuv_to_rgb_sse.asm',
+ 'base/simd/scale_yuv_to_rgb_mmx.asm',
+ 'base/simd/scale_yuv_to_rgb_mmx.inc',
+ 'base/simd/scale_yuv_to_rgb_sse.asm',
+ 'base/simd/yuv_to_rgb_table.cc',
+ 'base/simd/yuv_to_rgb_table.h',
+ ],
+ 'conditions': [
+ ['order_profiling != 0', {
+ 'target_conditions' : [
+ ['_toolset==\"target\"', {
+ 'cflags!': [ '-finstrument-functions' ],
+ }],
+ ],
+ }],
+ [ 'target_arch == \"x64\"', {
+ # Source files optimized for X64 systems.
+ 'sources': [
+ 'base/simd/linear_scale_yuv_to_rgb_mmx_x64.asm',
+ 'base/simd/scale_yuv_to_rgb_sse2_x64.asm',
+ ],
+ }],
+ [ 'os_posix == 1 and OS != \"mac\" and OS != \"android\"', {
+ 'cflags': [
+ '-msse2',
+ ],
+ }],
+ [ 'OS == \"mac\"', {
+ 'configurations': {
+ 'Debug': {
+ 'xcode_settings': {
+ # gcc on the mac builds horribly unoptimized sse code in debug
+ # mode. Since this is rarely going to be debugged, run with full
+ # optimizations in Debug as well as Release.
+ 'GCC_OPTIMIZATION_LEVEL': '3', # -O3
+ },
+ },
+ },
+ }],
+ [ 'OS==\"win\"', {
+ 'variables': {
+ 'yasm_flags': [
+ '-DWIN32',
+ '-DMSVC',
+ '-DCHROMIUM',
+ '-Isimd',
+ ],
+ },
+ }],
+ [ 'OS==\"mac\"', {
+ 'variables': {
+ 'yasm_flags': [
+ '-DPREFIX',
+ '-DMACHO',
+ '-DCHROMIUM',
+ '-Isimd',
+ ],
+ },
+ }],
+ [ 'os_posix==1 and OS!=\"mac\"', {
+ 'variables': {
+ 'conditions': [
+ [ 'target_arch==\"ia32\"', {
+ 'yasm_flags': [
+ '-DX86_32',
+ '-DELF',
+ '-DCHROMIUM',
+ '-Isimd',
+ ],
+ }, {
+ 'yasm_flags': [
+ '-DARCH_X86_64',
+ '-DELF',
+ '-DPIC',
+ '-DCHROMIUM',
+ '-Isimd',
+ ],
+ }],
+ ],
+ },
+ }],
+ ],
+ 'variables': {
+ 'yasm_output_path': '<(SHARED_INTERMEDIATE_DIR)/media',
+ },
+ 'msvs_2010_disable_uldi_when_referenced': 1,
+ 'includes': [
+ '../third_party/yasm/yasm_compile.gypi',
+ ],
+ },
+ {
+ 'target_name': 'yuv_convert_simd_arm',
+ 'type': 'static_library',
+ 'include_dirs': [
+ '..',
+ ],
+ 'sources': [
+ 'base/simd/convert_rgb_to_yuv_c.cc',
+ 'base/simd/convert_rgb_to_yuv.h',
+ 'base/simd/convert_yuv_to_rgb_c.cc',
+ 'base/simd/convert_yuv_to_rgb.h',
+ 'base/simd/filter_yuv.h',
+ 'base/simd/filter_yuv_c.cc',
+ 'base/simd/yuv_to_rgb_table.cc',
+ 'base/simd/yuv_to_rgb_table.h',
+ ],
+ },
+ {
+ 'target_name': 'media_unittests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'media_test_support',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../base/base.gyp:base_i18n',
+ '../base/base.gyp:test_support_base',
+ '../testing/gmock.gyp:gmock',
+ '../testing/gtest.gyp:gtest',
+ '../ui/ui.gyp:ui',
+ ],
+ 'sources': [
+ 'audio/async_socket_io_handler_unittest.cc',
+ 'audio/audio_input_controller_unittest.cc',
+ 'audio/audio_input_device_unittest.cc',
+ 'audio/audio_input_unittest.cc',
+ 'audio/audio_input_volume_unittest.cc',
+ 'audio/audio_low_latency_input_output_unittest.cc',
+ 'audio/audio_output_controller_unittest.cc',
+ 'audio/audio_output_proxy_unittest.cc',
+ 'audio/audio_parameters_unittest.cc',
+ 'audio/audio_util_unittest.cc',
+ 'audio/cross_process_notification_unittest.cc',
+ 'audio/linux/alsa_output_unittest.cc',
+ 'audio/mac/audio_low_latency_input_mac_unittest.cc',
+ 'audio/mac/audio_output_mac_unittest.cc',
+ 'audio/simple_sources_unittest.cc',
+ 'audio/win/audio_low_latency_input_win_unittest.cc',
+ 'audio/win/audio_low_latency_output_win_unittest.cc',
+ 'audio/win/audio_output_win_unittest.cc',
+ 'base/audio_renderer_mixer_unittest.cc',
+ 'base/audio_renderer_mixer_input_unittest.cc',
+ 'base/buffers_unittest.cc',
+ 'base/clock_unittest.cc',
+ 'base/composite_filter_unittest.cc',
+ 'base/data_buffer_unittest.cc',
+ 'base/decoder_buffer_unittest.cc',
+ 'base/djb2_unittest.cc',
+ 'base/fake_audio_render_callback.cc',
+ 'base/fake_audio_render_callback.h',
+ 'base/filter_collection_unittest.cc',
+ 'base/h264_bitstream_converter_unittest.cc',
+ 'base/pipeline_unittest.cc',
+ 'base/ranges_unittest.cc',
+ 'base/run_all_unittests.cc',
+ 'base/seekable_buffer_unittest.cc',
+ 'base/state_matrix_unittest.cc',
+ 'base/test_data_util.cc',
+ 'base/test_data_util.h',
+ 'base/video_frame_unittest.cc',
+ 'base/video_util_unittest.cc',
+ 'base/yuv_convert_unittest.cc',
+ 'crypto/aes_decryptor_unittest.cc',
+ 'ffmpeg/ffmpeg_common_unittest.cc',
+ 'filters/audio_renderer_algorithm_unittest.cc',
+ 'filters/audio_renderer_impl_unittest.cc',
+ 'filters/bitstream_converter_unittest.cc',
+ 'filters/chunk_demuxer_unittest.cc',
+ 'filters/ffmpeg_audio_decoder_unittest.cc',
+ 'filters/ffmpeg_decoder_unittest.h',
+ 'filters/ffmpeg_demuxer_unittest.cc',
+ 'filters/ffmpeg_glue_unittest.cc',
+ 'filters/ffmpeg_h264_bitstream_converter_unittest.cc',
+ 'filters/ffmpeg_video_decoder_unittest.cc',
+ 'filters/file_data_source_unittest.cc',
+ 'filters/pipeline_integration_test.cc',
+ 'filters/pipeline_integration_test_base.cc',
+ 'filters/source_buffer_stream_unittest.cc',
+ 'filters/video_renderer_base_unittest.cc',
+ 'video/capture/video_capture_device_unittest.cc',
+ 'webm/cluster_builder.cc',
+ 'webm/cluster_builder.h',
+ 'webm/webm_cluster_parser_unittest.cc',
+ 'webm/webm_content_encodings_client_unittest.cc',
+ 'webm/webm_parser_unittest.cc',
+ ],
+ 'conditions': [
+ ['os_posix==1 and OS!=\"mac\"', {
+ 'conditions': [
+ ['linux_use_tcmalloc==1', {
+ 'dependencies': [
+ '../base/allocator/allocator.gyp:allocator',
+ ],
+ }],
+ ],
+ }],
+ ['OS != \"android\"', {
+ 'dependencies': [
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ }],
+ ['OS == \"android\"', {
+ 'sources!': [
+ 'audio/audio_input_volume_unittest.cc',
+ 'base/test_data_util.cc',
+ 'base/test_data_util.h',
+ 'ffmpeg/ffmpeg_common_unittest.cc',
+ 'filters/ffmpeg_audio_decoder_unittest.cc',
+ 'filters/bitstream_converter_unittest.cc',
+ 'filters/chunk_demuxer_unittest.cc',
+ 'filters/ffmpeg_demuxer_unittest.cc',
+ 'filters/ffmpeg_glue_unittest.cc',
+ 'filters/ffmpeg_h264_bitstream_converter_unittest.cc',
+ 'filters/ffmpeg_video_decoder_unittest.cc',
+ 'filters/pipeline_integration_test.cc',
+ 'filters/pipeline_integration_test_base.cc',
+ 'mp4/mp4_stream_parser_unittest.cc',
+ 'webm/webm_cluster_parser_unittest.cc',
+ ],
+ }],
+ ['OS == \"linux\"', {
+ 'conditions': [
+ ['use_cras == 1', {
+ 'sources': [
+ 'audio/linux/cras_output_unittest.cc',
+ ],
+ 'defines': [
+ 'USE_CRAS',
+ ],
+ }],
+ ],
+ }],
+ [ 'target_arch==\"ia32\" or target_arch==\"x64\"', {
+ 'sources': [
+ 'base/simd/convert_rgb_to_yuv_unittest.cc',
+ ],
+ }],
+ ['proprietary_codecs==1 or branding==\"Chrome\"', {
+ 'sources': [
+ 'mp4/avc_unittest.cc',
+ 'mp4/box_reader_unittest.cc',
+ 'mp4/mp4_stream_parser_unittest.cc',
+ 'mp4/offset_byte_queue_unittest.cc',
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'media_test_support',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'media',
+ '../base/base.gyp:base',
+ '../testing/gmock.gyp:gmock',
+ '../testing/gtest.gyp:gtest',
+ ],
+ 'sources': [
+ 'audio/test_audio_input_controller_factory.cc',
+ 'audio/test_audio_input_controller_factory.h',
+ 'base/mock_callback.cc',
+ 'base/mock_callback.h',
+ 'base/mock_data_source_host.cc',
+ 'base/mock_data_source_host.h',
+ 'base/mock_demuxer_host.cc',
+ 'base/mock_demuxer_host.h',
+ 'base/mock_filter_host.cc',
+ 'base/mock_filter_host.h',
+ 'base/mock_filters.cc',
+ 'base/mock_filters.h',
+ ],
+ },
+ {
+ 'target_name': 'scaler_bench',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../skia/skia.gyp:skia',
+ ],
+ 'sources': [
+ 'tools/scaler_bench/scaler_bench.cc',
+ ],
+ },
+ {
+ 'target_name': 'qt_faststart',
+ 'type': 'executable',
+ 'sources': [
+ 'tools/qt_faststart/qt_faststart.c'
+ ],
+ },
+ {
+ 'target_name': 'seek_tester',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ '../base/base.gyp:base',
+ ],
+ 'sources': [
+ 'tools/seek_tester/seek_tester.cc',
+ ],
+ },
+ ],
+ 'conditions': [
+ ['OS==\"win\"', {
+ 'targets': [
+ {
+ 'target_name': 'player_wtl',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
+ '../ui/ui.gyp:ui',
+ ],
+ 'include_dirs': [
+ '<(DEPTH)/third_party/wtl/include',
+ ],
+ 'sources': [
+ 'tools/player_wtl/list.h',
+ 'tools/player_wtl/mainfrm.h',
+ 'tools/player_wtl/movie.cc',
+ 'tools/player_wtl/movie.h',
+ 'tools/player_wtl/player_wtl.cc',
+ 'tools/player_wtl/player_wtl.rc',
+ 'tools/player_wtl/props.h',
+ 'tools/player_wtl/seek.h',
+ 'tools/player_wtl/resource.h',
+ 'tools/player_wtl/view.h',
+ ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'SubSystem': '2', # Set /SUBSYSTEM:WINDOWS
+ },
+ },
+ 'defines': [
+ '_CRT_SECURE_NO_WARNINGS=1',
+ ],
+ },
+ ],
+ }],
+ ['OS == \"win\" or toolkit_uses_gtk == 1', {
+ 'targets': [
+ {
+ 'target_name': 'shader_bench',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../ui/gl/gl.gyp:gl',
+ ],
+ 'sources': [
+ 'tools/shader_bench/shader_bench.cc',
+ 'tools/shader_bench/cpu_color_painter.cc',
+ 'tools/shader_bench/cpu_color_painter.h',
+ 'tools/shader_bench/gpu_color_painter.cc',
+ 'tools/shader_bench/gpu_color_painter.h',
+ 'tools/shader_bench/gpu_painter.cc',
+ 'tools/shader_bench/gpu_painter.h',
+ 'tools/shader_bench/painter.cc',
+ 'tools/shader_bench/painter.h',
+ 'tools/shader_bench/window.cc',
+ 'tools/shader_bench/window.h',
+ ],
+ 'conditions': [
+ ['toolkit_uses_gtk == 1', {
+ 'dependencies': [
+ '../build/linux/system.gyp:gtk',
+ ],
+ 'sources': [
+ 'tools/shader_bench/window_linux.cc',
+ ],
+ }],
+ ['OS==\"win\"', {
+ 'dependencies': [
+ '../third_party/angle/src/build_angle.gyp:libEGL',
+ '../third_party/angle/src/build_angle.gyp:libGLESv2',
+ ],
+ 'sources': [
+ 'tools/shader_bench/window_win.cc',
+ ],
+ }],
+ ],
+ },
+ ],
+ }],
+ ['OS == \"linux\" and target_arch != \"arm\"', {
+ 'targets': [
+ {
+ 'target_name': 'tile_render_bench',
+ 'type': 'executable',
+ 'dependencies': [
+ '../base/base.gyp:base',
+ '../ui/gl/gl.gyp:gl',
+ ],
+ 'libraries': [
+ '-lGL',
+ '-ldl',
+ ],
+ 'sources': [
+ 'tools/tile_render_bench/tile_render_bench.cc',
+ ],
+ },
+ ],
+ }],
+ ['os_posix == 1 and OS != \"mac\" and OS != \"android\"', {
+ 'targets': [
+ {
+ 'target_name': 'player_x11',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../ui/gl/gl.gyp:gl',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '-ldl',
+ '-lX11',
+ '-lXrender',
+ '-lXext',
+ ],
+ },
+ 'sources': [
+ 'tools/player_x11/data_source_logger.cc',
+ 'tools/player_x11/data_source_logger.h',
+ 'tools/player_x11/gl_video_renderer.cc',
+ 'tools/player_x11/gl_video_renderer.h',
+ 'tools/player_x11/player_x11.cc',
+ 'tools/player_x11/x11_video_renderer.cc',
+ 'tools/player_x11/x11_video_renderer.h',
+ ],
+ },
+ ],
+ }],
+ ['OS == \"android\"', {
+ 'targets': [
+ {
+ 'target_name': 'player_android',
+ 'type': 'static_library',
+ 'sources': [
+ 'base/android/media_player_bridge.cc',
+ 'base/android/media_player_bridge.h',
+ ],
+ 'dependencies': [
+ '../base/base.gyp:base',
+ ],
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)/media',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'generate-jni-headers',
+ 'inputs': [
+ '../base/android/jni_generator/jni_generator.py',
+ 'base/android/java/src/org/chromium/media/MediaPlayerListener.java',
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/media/jni/media_player_listener_jni.h',
+ ],
+ 'action': [
+ 'python',
+ '<(DEPTH)/base/android/jni_generator/jni_generator.py',
+ '-o',
+ '<@(_inputs)',
+ '<@(_outputs)',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'media_java',
+ 'type': 'none',
+ 'dependencies': [ '../base/base.gyp:base_java' ],
+ 'variables': {
+ 'package_name': 'media',
+ 'java_in_dir': 'base/android/java',
+ },
+ 'includes': [ '../build/java.gypi' ],
+ },
+
+ ],
+ }, { # OS != \"android\"'
+ # Android does not use ffmpeg, so disable the targets which require it.
+ 'targets': [
+ {
+ 'target_name': 'ffmpeg_unittests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'media_test_support',
+ '../base/base.gyp:base',
+ '../base/base.gyp:base_i18n',
+ '../base/base.gyp:test_support_base',
+ '../base/base.gyp:test_support_perf',
+ '../testing/gtest.gyp:gtest',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'sources': [
+ 'ffmpeg/ffmpeg_unittest.cc',
+ ],
+ 'conditions': [
+ ['toolkit_uses_gtk == 1', {
+ 'dependencies': [
+ # Needed for the following #include chain:
+ # base/run_all_unittests.cc
+ # ../base/test_suite.h
+ # gtk/gtk.h
+ '../build/linux/system.gyp:gtk',
+ ],
+ 'conditions': [
+ ['linux_use_tcmalloc==1', {
+ 'dependencies': [
+ '../base/allocator/allocator.gyp:allocator',
+ ],
+ }],
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'ffmpeg_regression_tests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'media_test_support',
+ '../base/base.gyp:test_support_base',
+ '../testing/gmock.gyp:gmock',
+ '../testing/gtest.gyp:gtest',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'sources': [
+ 'base/test_data_util.cc',
+ 'base/run_all_unittests.cc',
+ 'ffmpeg/ffmpeg_regression_tests.cc',
+ 'filters/pipeline_integration_test_base.cc',
+ ],
+ 'conditions': [
+ ['os_posix==1 and OS!=\"mac\"', {
+ 'conditions': [
+ ['linux_use_tcmalloc==1', {
+ 'dependencies': [
+ '../base/allocator/allocator.gyp:allocator',
+ ],
+ }],
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'ffmpeg_tests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ '../base/base.gyp:base',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'sources': [
+ 'test/ffmpeg_tests/ffmpeg_tests.cc',
+ ],
+ },
+ {
+ 'target_name': 'media_bench',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ '../base/base.gyp:base',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'sources': [
+ 'tools/media_bench/media_bench.cc',
+ ],
+ },
+ ],
+ }]
+ ],
+}
+" 0 64 (face font-lock-comment-face) 64 137 (face font-lock-comment-face) 137 166 (face font-lock-comment-face) 166 171 nil 171 172 (face font-lock-string-face) 172 181 (face font-lock-keyword-face) 181 182 (face font-lock-string-face) 182 190 nil 190 191 (face font-lock-string-face) 191 204 (face font-lock-variable-name-face) 204 205 (face font-lock-string-face) 205 214 nil 214 269 (face font-lock-comment-face) 269 273 nil 273 274 (face font-lock-string-face) 274 289 (face font-lock-variable-name-face) 289 290 (face font-lock-string-face) 290 299 nil 299 365 (face font-lock-comment-face) 365 369 nil 369 370 (face font-lock-string-face) 370 379 (face font-lock-variable-name-face) 379 380 (face font-lock-string-face) 380 392 nil 392 393 (face font-lock-string-face) 393 400 (face font-lock-keyword-face) 400 401 (face font-lock-string-face) 401 417 nil 417 418 (face font-lock-string-face) 418 429 (face font-lock-keyword-face) 429 430 (face font-lock-string-face) 430 432 nil 432 433 (face font-lock-string-face) 433 438 (face font-lock-function-name-face) 438 439 (face font-lock-string-face) 439 447 nil 447 448 (face font-lock-string-face) 448 452 (face font-lock-keyword-face) 452 453 (face font-lock-string-face) 453 455 nil 455 458 (face font-lock-string-face) 458 467 (face font-lock-variable-name-face) 467 469 (face font-lock-string-face) 469 477 nil 477 478 (face font-lock-string-face) 478 490 (face font-lock-keyword-face) 490 491 (face font-lock-string-face) 491 503 nil 503 504 (face font-lock-string-face) 504 515 (face font-lock-function-name-face) 515 516 (face font-lock-string-face) 516 526 nil 526 527 (face font-lock-string-face) 527 548 (face font-lock-function-name-face) 548 549 (face font-lock-string-face) 549 559 nil 559 560 (face font-lock-string-face) 560 643 (face font-lock-function-name-face) 643 644 (face font-lock-string-face) 644 654 nil 654 655 (face font-lock-string-face) 655 696 (face font-lock-function-name-face) 696 697 (face font-lock-string-face) 697 707 nil 707 708 (face font-lock-string-face) 708 735 (face font-lock-function-name-face) 735 736 (face font-lock-string-face) 736 746 nil 746 747 (face font-lock-string-face) 747 784 (face font-lock-function-name-face) 784 785 (face font-lock-string-face) 785 795 nil 795 796 (face font-lock-string-face) 796 811 (face font-lock-function-name-face) 811 812 (face font-lock-string-face) 812 829 nil 829 830 (face font-lock-string-face) 830 837 (face font-lock-keyword-face) 837 838 (face font-lock-string-face) 838 850 nil 850 851 (face font-lock-string-face) 851 871 (face font-lock-preprocessor-face) 871 872 (face font-lock-string-face) 872 889 nil 889 890 (face font-lock-string-face) 890 902 (face font-lock-keyword-face) 902 903 (face font-lock-string-face) 903 915 nil 915 916 (face font-lock-string-face) 916 918 (face font-lock-constant-face) 918 919 (face font-lock-string-face) 919 936 nil 936 937 (face font-lock-string-face) 937 944 (face font-lock-keyword-face) 944 945 (face font-lock-string-face) 945 957 nil 957 958 (face font-lock-string-face) 958 996 (face font-lock-constant-face) 996 997 (face font-lock-string-face) 997 1007 nil 1007 1008 (face font-lock-string-face) 1008 1045 (face font-lock-constant-face) 1045 1046 (face font-lock-string-face) 1046 1056 nil 1056 1057 (face font-lock-string-face) 1057 1100 (face font-lock-constant-face) 1100 1101 (face font-lock-string-face) 1101 1111 nil 1111 1112 (face font-lock-string-face) 1112 1154 (face font-lock-constant-face) 1154 1155 (face font-lock-string-face) 1155 1165 nil 1165 1166 (face font-lock-string-face) 1166 1197 (face font-lock-constant-face) 1197 1198 (face font-lock-string-face) 1198 1208 nil 1208 1209 (face font-lock-string-face) 1209 1239 (face font-lock-constant-face) 1239 1240 (face font-lock-string-face) 1240 1250 nil 1250 1251 (face font-lock-string-face) 1251 1283 (face font-lock-constant-face) 1283 1284 (face font-lock-string-face) 1284 1294 nil 1294 1295 (face font-lock-string-face) 1295 1326 (face font-lock-constant-face) 1326 1327 (face font-lock-string-face) 1327 1337 nil 1337 1338 (face font-lock-string-face) 1338 1369 (face font-lock-constant-face) 1369 1370 (face font-lock-string-face) 1370 1380 nil 1380 1381 (face font-lock-string-face) 1381 1419 (face font-lock-constant-face) 1419 1420 (face font-lock-string-face) 1420 1430 nil 1430 1431 (face font-lock-string-face) 1431 1467 (face font-lock-constant-face) 1467 1468 (face font-lock-string-face) 1468 1478 nil 1478 1479 (face font-lock-string-face) 1479 1507 (face font-lock-constant-face) 1507 1508 (face font-lock-string-face) 1508 1518 nil 1518 1519 (face font-lock-string-face) 1519 1546 (face font-lock-constant-face) 1546 1547 (face font-lock-string-face) 1547 1557 nil 1557 1558 (face font-lock-string-face) 1558 1574 (face font-lock-constant-face) 1574 1575 (face font-lock-string-face) 1575 1585 nil 1585 1586 (face font-lock-string-face) 1586 1617 (face font-lock-constant-face) 1617 1618 (face font-lock-string-face) 1618 1628 nil 1628 1629 (face font-lock-string-face) 1629 1659 (face font-lock-constant-face) 1659 1660 (face font-lock-string-face) 1660 1670 nil 1670 1671 (face font-lock-string-face) 1671 1703 (face font-lock-constant-face) 1703 1704 (face font-lock-string-face) 1704 1714 nil 1714 1715 (face font-lock-string-face) 1715 1746 (face font-lock-constant-face) 1746 1747 (face font-lock-string-face) 1747 1757 nil 1757 1758 (face font-lock-string-face) 1758 1784 (face font-lock-constant-face) 1784 1785 (face font-lock-string-face) 1785 1795 nil 1795 1796 (face font-lock-string-face) 1796 1821 (face font-lock-constant-face) 1821 1822 (face font-lock-string-face) 1822 1832 nil 1832 1833 (face font-lock-string-face) 1833 1855 (face font-lock-constant-face) 1855 1856 (face font-lock-string-face) 1856 1866 nil 1866 1867 (face font-lock-string-face) 1867 1888 (face font-lock-constant-face) 1888 1889 (face font-lock-string-face) 1889 1899 nil 1899 1900 (face font-lock-string-face) 1900 1927 (face font-lock-constant-face) 1927 1928 (face font-lock-string-face) 1928 1938 nil 1938 1939 (face font-lock-string-face) 1939 1965 (face font-lock-constant-face) 1965 1966 (face font-lock-string-face) 1966 1976 nil 1976 1977 (face font-lock-string-face) 1977 2009 (face font-lock-constant-face) 2009 2010 (face font-lock-string-face) 2010 2020 nil 2020 2021 (face font-lock-string-face) 2021 2052 (face font-lock-constant-face) 2052 2053 (face font-lock-string-face) 2053 2063 nil 2063 2064 (face font-lock-string-face) 2064 2096 (face font-lock-constant-face) 2096 2097 (face font-lock-string-face) 2097 2107 nil 2107 2108 (face font-lock-string-face) 2108 2139 (face font-lock-constant-face) 2139 2140 (face font-lock-string-face) 2140 2150 nil 2150 2151 (face font-lock-string-face) 2151 2188 (face font-lock-constant-face) 2188 2189 (face font-lock-string-face) 2189 2199 nil 2199 2200 (face font-lock-string-face) 2200 2236 (face font-lock-constant-face) 2236 2237 (face font-lock-string-face) 2237 2247 nil 2247 2248 (face font-lock-string-face) 2248 2275 (face font-lock-constant-face) 2275 2276 (face font-lock-string-face) 2276 2286 nil 2286 2287 (face font-lock-string-face) 2287 2313 (face font-lock-constant-face) 2313 2314 (face font-lock-string-face) 2314 2324 nil 2324 2325 (face font-lock-string-face) 2325 2352 (face font-lock-constant-face) 2352 2353 (face font-lock-string-face) 2353 2363 nil 2363 2364 (face font-lock-string-face) 2364 2390 (face font-lock-constant-face) 2390 2391 (face font-lock-string-face) 2391 2401 nil 2401 2402 (face font-lock-string-face) 2402 2427 (face font-lock-constant-face) 2427 2428 (face font-lock-string-face) 2428 2438 nil 2438 2439 (face font-lock-string-face) 2439 2463 (face font-lock-constant-face) 2463 2464 (face font-lock-string-face) 2464 2474 nil 2474 2475 (face font-lock-string-face) 2475 2494 (face font-lock-constant-face) 2494 2495 (face font-lock-string-face) 2495 2505 nil 2505 2506 (face font-lock-string-face) 2506 2524 (face font-lock-constant-face) 2524 2525 (face font-lock-string-face) 2525 2535 nil 2535 2536 (face font-lock-string-face) 2536 2571 (face font-lock-constant-face) 2571 2572 (face font-lock-string-face) 2572 2582 nil 2582 2583 (face font-lock-string-face) 2583 2617 (face font-lock-constant-face) 2617 2618 (face font-lock-string-face) 2618 2628 nil 2628 2629 (face font-lock-string-face) 2629 2668 (face font-lock-constant-face) 2668 2669 (face font-lock-string-face) 2669 2679 nil 2679 2680 (face font-lock-string-face) 2680 2721 (face font-lock-constant-face) 2721 2722 (face font-lock-string-face) 2722 2732 nil 2732 2733 (face font-lock-string-face) 2733 2765 (face font-lock-constant-face) 2765 2766 (face font-lock-string-face) 2766 2776 nil 2776 2777 (face font-lock-string-face) 2777 2808 (face font-lock-constant-face) 2808 2809 (face font-lock-string-face) 2809 2819 nil 2819 2820 (face font-lock-string-face) 2820 2853 (face font-lock-constant-face) 2853 2854 (face font-lock-string-face) 2854 2864 nil 2864 2865 (face font-lock-string-face) 2865 2897 (face font-lock-constant-face) 2897 2898 (face font-lock-string-face) 2898 2908 nil 2908 2909 (face font-lock-string-face) 2909 2943 (face font-lock-constant-face) 2943 2944 (face font-lock-string-face) 2944 2954 nil 2954 2955 (face font-lock-string-face) 2955 2988 (face font-lock-constant-face) 2988 2989 (face font-lock-string-face) 2989 2999 nil 2999 3000 (face font-lock-string-face) 3000 3025 (face font-lock-constant-face) 3025 3026 (face font-lock-string-face) 3026 3036 nil 3036 3037 (face font-lock-string-face) 3037 3061 (face font-lock-constant-face) 3061 3062 (face font-lock-string-face) 3062 3072 nil 3072 3073 (face font-lock-string-face) 3073 3099 (face font-lock-constant-face) 3099 3100 (face font-lock-string-face) 3100 3110 nil 3110 3111 (face font-lock-string-face) 3111 3136 (face font-lock-constant-face) 3136 3137 (face font-lock-string-face) 3137 3147 nil 3147 3148 (face font-lock-string-face) 3148 3172 (face font-lock-constant-face) 3172 3173 (face font-lock-string-face) 3173 3183 nil 3183 3184 (face font-lock-string-face) 3184 3207 (face font-lock-constant-face) 3207 3208 (face font-lock-string-face) 3208 3218 nil 3218 3219 (face font-lock-string-face) 3219 3246 (face font-lock-constant-face) 3246 3247 (face font-lock-string-face) 3247 3257 nil 3257 3258 (face font-lock-string-face) 3258 3284 (face font-lock-constant-face) 3284 3285 (face font-lock-string-face) 3285 3295 nil 3295 3296 (face font-lock-string-face) 3296 3322 (face font-lock-constant-face) 3322 3323 (face font-lock-string-face) 3323 3333 nil 3333 3334 (face font-lock-string-face) 3334 3359 (face font-lock-constant-face) 3359 3360 (face font-lock-string-face) 3360 3370 nil 3370 3371 (face font-lock-string-face) 3371 3409 (face font-lock-constant-face) 3409 3410 (face font-lock-string-face) 3410 3420 nil 3420 3421 (face font-lock-string-face) 3421 3458 (face font-lock-constant-face) 3458 3459 (face font-lock-string-face) 3459 3469 nil 3469 3470 (face font-lock-string-face) 3470 3498 (face font-lock-constant-face) 3498 3499 (face font-lock-string-face) 3499 3509 nil 3509 3510 (face font-lock-string-face) 3510 3537 (face font-lock-constant-face) 3537 3538 (face font-lock-string-face) 3538 3548 nil 3548 3549 (face font-lock-string-face) 3549 3589 (face font-lock-constant-face) 3589 3590 (face font-lock-string-face) 3590 3600 nil 3600 3601 (face font-lock-string-face) 3601 3640 (face font-lock-constant-face) 3640 3641 (face font-lock-string-face) 3641 3651 nil 3651 3652 (face font-lock-string-face) 3652 3693 (face font-lock-constant-face) 3693 3694 (face font-lock-string-face) 3694 3704 nil 3704 3705 (face font-lock-string-face) 3705 3745 (face font-lock-constant-face) 3745 3746 (face font-lock-string-face) 3746 3756 nil 3756 3757 (face font-lock-string-face) 3757 3787 (face font-lock-constant-face) 3787 3788 (face font-lock-string-face) 3788 3798 nil 3798 3799 (face font-lock-string-face) 3799 3828 (face font-lock-constant-face) 3828 3829 (face font-lock-string-face) 3829 3839 nil 3839 3840 (face font-lock-string-face) 3840 3869 (face font-lock-constant-face) 3869 3870 (face font-lock-string-face) 3870 3880 nil 3880 3881 (face font-lock-string-face) 3881 3909 (face font-lock-constant-face) 3909 3910 (face font-lock-string-face) 3910 3920 nil 3920 3921 (face font-lock-string-face) 3921 3945 (face font-lock-constant-face) 3945 3946 (face font-lock-string-face) 3946 3956 nil 3956 3957 (face font-lock-string-face) 3957 3980 (face font-lock-constant-face) 3980 3981 (face font-lock-string-face) 3981 3991 nil 3991 3992 (face font-lock-string-face) 3992 4019 (face font-lock-constant-face) 4019 4020 (face font-lock-string-face) 4020 4030 nil 4030 4031 (face font-lock-string-face) 4031 4057 (face font-lock-constant-face) 4057 4058 (face font-lock-string-face) 4058 4068 nil 4068 4069 (face font-lock-string-face) 4069 4090 (face font-lock-constant-face) 4090 4091 (face font-lock-string-face) 4091 4101 nil 4101 4102 (face font-lock-string-face) 4102 4122 (face font-lock-constant-face) 4122 4123 (face font-lock-string-face) 4123 4133 nil 4133 4134 (face font-lock-string-face) 4134 4157 (face font-lock-constant-face) 4157 4158 (face font-lock-string-face) 4158 4168 nil 4168 4169 (face font-lock-string-face) 4169 4191 (face font-lock-constant-face) 4191 4192 (face font-lock-string-face) 4192 4202 nil 4202 4203 (face font-lock-string-face) 4203 4243 (face font-lock-constant-face) 4243 4244 (face font-lock-string-face) 4244 4254 nil 4254 4255 (face font-lock-string-face) 4255 4294 (face font-lock-constant-face) 4294 4295 (face font-lock-string-face) 4295 4305 nil 4305 4306 (face font-lock-string-face) 4306 4347 (face font-lock-constant-face) 4347 4348 (face font-lock-string-face) 4348 4358 nil 4358 4359 (face font-lock-string-face) 4359 4399 (face font-lock-constant-face) 4399 4400 (face font-lock-string-face) 4400 4410 nil 4410 4411 (face font-lock-string-face) 4411 4441 (face font-lock-constant-face) 4441 4442 (face font-lock-string-face) 4442 4452 nil 4452 4453 (face font-lock-string-face) 4453 4482 (face font-lock-constant-face) 4482 4483 (face font-lock-string-face) 4483 4493 nil 4493 4494 (face font-lock-string-face) 4494 4523 (face font-lock-constant-face) 4523 4524 (face font-lock-string-face) 4524 4534 nil 4534 4535 (face font-lock-string-face) 4535 4563 (face font-lock-constant-face) 4563 4564 (face font-lock-string-face) 4564 4574 nil 4574 4575 (face font-lock-string-face) 4575 4610 (face font-lock-constant-face) 4610 4611 (face font-lock-string-face) 4611 4621 nil 4621 4622 (face font-lock-string-face) 4622 4656 (face font-lock-constant-face) 4656 4657 (face font-lock-string-face) 4657 4667 nil 4667 4668 (face font-lock-string-face) 4668 4697 (face font-lock-constant-face) 4697 4698 (face font-lock-string-face) 4698 4708 nil 4708 4709 (face font-lock-string-face) 4709 4737 (face font-lock-constant-face) 4737 4738 (face font-lock-string-face) 4738 4748 nil 4748 4749 (face font-lock-string-face) 4749 4780 (face font-lock-constant-face) 4780 4781 (face font-lock-string-face) 4781 4791 nil 4791 4792 (face font-lock-string-face) 4792 4822 (face font-lock-constant-face) 4822 4823 (face font-lock-string-face) 4823 4833 nil 4833 4834 (face font-lock-string-face) 4834 4869 (face font-lock-constant-face) 4869 4870 (face font-lock-string-face) 4870 4880 nil 4880 4881 (face font-lock-string-face) 4881 4915 (face font-lock-constant-face) 4915 4916 (face font-lock-string-face) 4916 4926 nil 4926 4927 (face font-lock-string-face) 4927 4948 (face font-lock-constant-face) 4948 4949 (face font-lock-string-face) 4949 4959 nil 4959 4960 (face font-lock-string-face) 4960 4980 (face font-lock-constant-face) 4980 4981 (face font-lock-string-face) 4981 4991 nil 4991 4992 (face font-lock-string-face) 4992 5020 (face font-lock-constant-face) 5020 5021 (face font-lock-string-face) 5021 5031 nil 5031 5032 (face font-lock-string-face) 5032 5059 (face font-lock-constant-face) 5059 5060 (face font-lock-string-face) 5060 5070 nil 5070 5071 (face font-lock-string-face) 5071 5092 (face font-lock-constant-face) 5092 5093 (face font-lock-string-face) 5093 5103 nil 5103 5104 (face font-lock-string-face) 5104 5132 (face font-lock-constant-face) 5132 5133 (face font-lock-string-face) 5133 5143 nil 5143 5144 (face font-lock-string-face) 5144 5171 (face font-lock-constant-face) 5171 5172 (face font-lock-string-face) 5172 5182 nil 5182 5183 (face font-lock-string-face) 5183 5217 (face font-lock-constant-face) 5217 5218 (face font-lock-string-face) 5218 5228 nil 5228 5229 (face font-lock-string-face) 5229 5262 (face font-lock-constant-face) 5262 5263 (face font-lock-string-face) 5263 5273 nil 5273 5274 (face font-lock-string-face) 5274 5297 (face font-lock-constant-face) 5297 5298 (face font-lock-string-face) 5298 5308 nil 5308 5309 (face font-lock-string-face) 5309 5324 (face font-lock-constant-face) 5324 5325 (face font-lock-string-face) 5325 5335 nil 5335 5336 (face font-lock-string-face) 5336 5350 (face font-lock-constant-face) 5350 5351 (face font-lock-string-face) 5351 5361 nil 5361 5362 (face font-lock-string-face) 5362 5380 (face font-lock-constant-face) 5380 5381 (face font-lock-string-face) 5381 5391 nil 5391 5392 (face font-lock-string-face) 5392 5409 (face font-lock-constant-face) 5409 5410 (face font-lock-string-face) 5410 5420 nil 5420 5421 (face font-lock-string-face) 5421 5443 (face font-lock-constant-face) 5443 5444 (face font-lock-string-face) 5444 5454 nil 5454 5455 (face font-lock-string-face) 5455 5476 (face font-lock-constant-face) 5476 5477 (face font-lock-string-face) 5477 5487 nil 5487 5488 (face font-lock-string-face) 5488 5501 (face font-lock-constant-face) 5501 5502 (face font-lock-string-face) 5502 5512 nil 5512 5513 (face font-lock-string-face) 5513 5525 (face font-lock-constant-face) 5525 5526 (face font-lock-string-face) 5526 5536 nil 5536 5537 (face font-lock-string-face) 5537 5561 (face font-lock-constant-face) 5561 5562 (face font-lock-string-face) 5562 5572 nil 5572 5573 (face font-lock-string-face) 5573 5596 (face font-lock-constant-face) 5596 5597 (face font-lock-string-face) 5597 5607 nil 5607 5608 (face font-lock-string-face) 5608 5627 (face font-lock-constant-face) 5627 5628 (face font-lock-string-face) 5628 5638 nil 5638 5639 (face font-lock-string-face) 5639 5657 (face font-lock-constant-face) 5657 5658 (face font-lock-string-face) 5658 5668 nil 5668 5669 (face font-lock-string-face) 5669 5688 (face font-lock-constant-face) 5688 5689 (face font-lock-string-face) 5689 5699 nil 5699 5700 (face font-lock-string-face) 5700 5718 (face font-lock-constant-face) 5718 5719 (face font-lock-string-face) 5719 5729 nil 5729 5730 (face font-lock-string-face) 5730 5752 (face font-lock-constant-face) 5752 5753 (face font-lock-string-face) 5753 5763 nil 5763 5764 (face font-lock-string-face) 5764 5785 (face font-lock-constant-face) 5785 5786 (face font-lock-string-face) 5786 5796 nil 5796 5797 (face font-lock-string-face) 5797 5819 (face font-lock-constant-face) 5819 5820 (face font-lock-string-face) 5820 5830 nil 5830 5831 (face font-lock-string-face) 5831 5852 (face font-lock-constant-face) 5852 5853 (face font-lock-string-face) 5853 5863 nil 5863 5864 (face font-lock-string-face) 5864 5880 (face font-lock-constant-face) 5880 5881 (face font-lock-string-face) 5881 5891 nil 5891 5892 (face font-lock-string-face) 5892 5915 (face font-lock-constant-face) 5915 5916 (face font-lock-string-face) 5916 5926 nil 5926 5927 (face font-lock-string-face) 5927 5942 (face font-lock-constant-face) 5942 5943 (face font-lock-string-face) 5943 5953 nil 5953 5954 (face font-lock-string-face) 5954 5968 (face font-lock-constant-face) 5968 5969 (face font-lock-string-face) 5969 5979 nil 5979 5980 (face font-lock-string-face) 5980 6002 (face font-lock-constant-face) 6002 6003 (face font-lock-string-face) 6003 6013 nil 6013 6014 (face font-lock-string-face) 6014 6035 (face font-lock-constant-face) 6035 6036 (face font-lock-string-face) 6036 6046 nil 6046 6047 (face font-lock-string-face) 6047 6059 (face font-lock-constant-face) 6059 6060 (face font-lock-string-face) 6060 6070 nil 6070 6071 (face font-lock-string-face) 6071 6082 (face font-lock-constant-face) 6082 6083 (face font-lock-string-face) 6083 6093 nil 6093 6094 (face font-lock-string-face) 6094 6119 (face font-lock-constant-face) 6119 6120 (face font-lock-string-face) 6120 6130 nil 6130 6131 (face font-lock-string-face) 6131 6155 (face font-lock-constant-face) 6155 6156 (face font-lock-string-face) 6156 6166 nil 6166 6167 (face font-lock-string-face) 6167 6185 (face font-lock-constant-face) 6185 6186 (face font-lock-string-face) 6186 6196 nil 6196 6197 (face font-lock-string-face) 6197 6212 (face font-lock-constant-face) 6212 6213 (face font-lock-string-face) 6213 6223 nil 6223 6224 (face font-lock-string-face) 6224 6238 (face font-lock-constant-face) 6238 6239 (face font-lock-string-face) 6239 6249 nil 6249 6250 (face font-lock-string-face) 6250 6282 (face font-lock-constant-face) 6282 6283 (face font-lock-string-face) 6283 6293 nil 6293 6294 (face font-lock-string-face) 6294 6325 (face font-lock-constant-face) 6325 6326 (face font-lock-string-face) 6326 6336 nil 6336 6337 (face font-lock-string-face) 6337 6349 (face font-lock-constant-face) 6349 6350 (face font-lock-string-face) 6350 6360 nil 6360 6361 (face font-lock-string-face) 6361 6382 (face font-lock-constant-face) 6382 6383 (face font-lock-string-face) 6383 6393 nil 6393 6394 (face font-lock-string-face) 6394 6413 (face font-lock-constant-face) 6413 6414 (face font-lock-string-face) 6414 6424 nil 6424 6425 (face font-lock-string-face) 6425 6442 (face font-lock-constant-face) 6442 6443 (face font-lock-string-face) 6443 6453 nil 6453 6454 (face font-lock-string-face) 6454 6470 (face font-lock-constant-face) 6470 6471 (face font-lock-string-face) 6471 6481 nil 6481 6482 (face font-lock-string-face) 6482 6504 (face font-lock-constant-face) 6504 6505 (face font-lock-string-face) 6505 6515 nil 6515 6516 (face font-lock-string-face) 6516 6535 (face font-lock-constant-face) 6535 6536 (face font-lock-string-face) 6536 6546 nil 6546 6547 (face font-lock-string-face) 6547 6569 (face font-lock-constant-face) 6569 6570 (face font-lock-string-face) 6570 6580 nil 6580 6581 (face font-lock-string-face) 6581 6602 (face font-lock-constant-face) 6602 6603 (face font-lock-string-face) 6603 6613 nil 6613 6614 (face font-lock-string-face) 6614 6631 (face font-lock-constant-face) 6631 6632 (face font-lock-string-face) 6632 6642 nil 6642 6643 (face font-lock-string-face) 6643 6671 (face font-lock-constant-face) 6671 6672 (face font-lock-string-face) 6672 6682 nil 6682 6683 (face font-lock-string-face) 6683 6710 (face font-lock-constant-face) 6710 6711 (face font-lock-string-face) 6711 6721 nil 6721 6722 (face font-lock-string-face) 6722 6738 (face font-lock-constant-face) 6738 6739 (face font-lock-string-face) 6739 6749 nil 6749 6750 (face font-lock-string-face) 6750 6765 (face font-lock-constant-face) 6765 6766 (face font-lock-string-face) 6766 6776 nil 6776 6777 (face font-lock-string-face) 6777 6800 (face font-lock-constant-face) 6800 6801 (face font-lock-string-face) 6801 6811 nil 6811 6812 (face font-lock-string-face) 6812 6834 (face font-lock-constant-face) 6834 6835 (face font-lock-string-face) 6835 6845 nil 6845 6846 (face font-lock-string-face) 6846 6860 (face font-lock-constant-face) 6860 6861 (face font-lock-string-face) 6861 6871 nil 6871 6872 (face font-lock-string-face) 6872 6885 (face font-lock-constant-face) 6885 6886 (face font-lock-string-face) 6886 6896 nil 6896 6897 (face font-lock-string-face) 6897 6920 (face font-lock-constant-face) 6920 6921 (face font-lock-string-face) 6921 6931 nil 6931 6932 (face font-lock-string-face) 6932 6954 (face font-lock-constant-face) 6954 6955 (face font-lock-string-face) 6955 6965 nil 6965 6966 (face font-lock-string-face) 6966 6986 (face font-lock-constant-face) 6986 6987 (face font-lock-string-face) 6987 6997 nil 6997 6998 (face font-lock-string-face) 6998 7017 (face font-lock-constant-face) 7017 7018 (face font-lock-string-face) 7018 7028 nil 7028 7029 (face font-lock-string-face) 7029 7050 (face font-lock-constant-face) 7050 7051 (face font-lock-string-face) 7051 7061 nil 7061 7062 (face font-lock-string-face) 7062 7082 (face font-lock-constant-face) 7082 7083 (face font-lock-string-face) 7083 7093 nil 7093 7094 (face font-lock-string-face) 7094 7122 (face font-lock-constant-face) 7122 7123 (face font-lock-string-face) 7123 7133 nil 7133 7134 (face font-lock-string-face) 7134 7161 (face font-lock-constant-face) 7161 7162 (face font-lock-string-face) 7162 7172 nil 7172 7173 (face font-lock-string-face) 7173 7194 (face font-lock-constant-face) 7194 7195 (face font-lock-string-face) 7195 7205 nil 7205 7206 (face font-lock-string-face) 7206 7226 (face font-lock-constant-face) 7226 7227 (face font-lock-string-face) 7227 7237 nil 7237 7238 (face font-lock-string-face) 7238 7266 (face font-lock-constant-face) 7266 7267 (face font-lock-string-face) 7267 7277 nil 7277 7278 (face font-lock-string-face) 7278 7305 (face font-lock-constant-face) 7305 7306 (face font-lock-string-face) 7306 7316 nil 7316 7317 (face font-lock-string-face) 7317 7336 (face font-lock-constant-face) 7336 7337 (face font-lock-string-face) 7337 7347 nil 7347 7348 (face font-lock-string-face) 7348 7366 (face font-lock-constant-face) 7366 7367 (face font-lock-string-face) 7367 7377 nil 7377 7378 (face font-lock-string-face) 7378 7399 (face font-lock-constant-face) 7399 7400 (face font-lock-string-face) 7400 7410 nil 7410 7411 (face font-lock-string-face) 7411 7429 (face font-lock-constant-face) 7429 7430 (face font-lock-string-face) 7430 7440 nil 7440 7441 (face font-lock-string-face) 7441 7458 (face font-lock-constant-face) 7458 7459 (face font-lock-string-face) 7459 7469 nil 7469 7470 (face font-lock-string-face) 7470 7493 (face font-lock-constant-face) 7493 7494 (face font-lock-string-face) 7494 7504 nil 7504 7505 (face font-lock-string-face) 7505 7527 (face font-lock-constant-face) 7527 7528 (face font-lock-string-face) 7528 7538 nil 7538 7539 (face font-lock-string-face) 7539 7562 (face font-lock-constant-face) 7562 7563 (face font-lock-string-face) 7563 7573 nil 7573 7574 (face font-lock-string-face) 7574 7596 (face font-lock-constant-face) 7596 7597 (face font-lock-string-face) 7597 7607 nil 7607 7608 (face font-lock-string-face) 7608 7631 (face font-lock-constant-face) 7631 7632 (face font-lock-string-face) 7632 7642 nil 7642 7643 (face font-lock-string-face) 7643 7665 (face font-lock-constant-face) 7665 7666 (face font-lock-string-face) 7666 7676 nil 7676 7677 (face font-lock-string-face) 7677 7705 (face font-lock-constant-face) 7705 7706 (face font-lock-string-face) 7706 7716 nil 7716 7717 (face font-lock-string-face) 7717 7744 (face font-lock-constant-face) 7744 7745 (face font-lock-string-face) 7745 7755 nil 7755 7756 (face font-lock-string-face) 7756 7791 (face font-lock-constant-face) 7791 7792 (face font-lock-string-face) 7792 7802 nil 7802 7803 (face font-lock-string-face) 7803 7837 (face font-lock-constant-face) 7837 7838 (face font-lock-string-face) 7838 7848 nil 7848 7849 (face font-lock-string-face) 7849 7879 (face font-lock-constant-face) 7879 7880 (face font-lock-string-face) 7880 7890 nil 7890 7891 (face font-lock-string-face) 7891 7920 (face font-lock-constant-face) 7920 7921 (face font-lock-string-face) 7921 7931 nil 7931 7932 (face font-lock-string-face) 7932 7962 (face font-lock-constant-face) 7962 7963 (face font-lock-string-face) 7963 7973 nil 7973 7974 (face font-lock-string-face) 7974 8003 (face font-lock-constant-face) 8003 8004 (face font-lock-string-face) 8004 8014 nil 8014 8015 (face font-lock-string-face) 8015 8039 (face font-lock-constant-face) 8039 8040 (face font-lock-string-face) 8040 8050 nil 8050 8051 (face font-lock-string-face) 8051 8074 (face font-lock-constant-face) 8074 8075 (face font-lock-string-face) 8075 8085 nil 8085 8086 (face font-lock-string-face) 8086 8116 (face font-lock-constant-face) 8116 8117 (face font-lock-string-face) 8117 8127 nil 8127 8128 (face font-lock-string-face) 8128 8152 (face font-lock-constant-face) 8152 8153 (face font-lock-string-face) 8153 8163 nil 8163 8164 (face font-lock-string-face) 8164 8187 (face font-lock-constant-face) 8187 8188 (face font-lock-string-face) 8188 8198 nil 8198 8199 (face font-lock-string-face) 8199 8230 (face font-lock-constant-face) 8230 8231 (face font-lock-string-face) 8231 8241 nil 8241 8242 (face font-lock-string-face) 8242 8272 (face font-lock-constant-face) 8272 8273 (face font-lock-string-face) 8273 8283 nil 8283 8284 (face font-lock-string-face) 8284 8309 (face font-lock-constant-face) 8309 8310 (face font-lock-string-face) 8310 8320 nil 8320 8321 (face font-lock-string-face) 8321 8345 (face font-lock-constant-face) 8345 8346 (face font-lock-string-face) 8346 8356 nil 8356 8357 (face font-lock-string-face) 8357 8399 (face font-lock-constant-face) 8399 8400 (face font-lock-string-face) 8400 8410 nil 8410 8411 (face font-lock-string-face) 8411 8452 (face font-lock-constant-face) 8452 8453 (face font-lock-string-face) 8453 8463 nil 8463 8464 (face font-lock-string-face) 8464 8486 (face font-lock-constant-face) 8486 8487 (face font-lock-string-face) 8487 8497 nil 8497 8498 (face font-lock-string-face) 8498 8519 (face font-lock-constant-face) 8519 8520 (face font-lock-string-face) 8520 8530 nil 8530 8531 (face font-lock-string-face) 8531 8562 (face font-lock-constant-face) 8562 8563 (face font-lock-string-face) 8563 8573 nil 8573 8574 (face font-lock-string-face) 8574 8604 (face font-lock-constant-face) 8604 8605 (face font-lock-string-face) 8605 8615 nil 8615 8616 (face font-lock-string-face) 8616 8643 (face font-lock-constant-face) 8643 8644 (face font-lock-string-face) 8644 8654 nil 8654 8655 (face font-lock-string-face) 8655 8681 (face font-lock-constant-face) 8681 8682 (face font-lock-string-face) 8682 8692 nil 8692 8693 (face font-lock-string-face) 8693 8721 (face font-lock-constant-face) 8721 8722 (face font-lock-string-face) 8722 8732 nil 8732 8733 (face font-lock-string-face) 8733 8760 (face font-lock-constant-face) 8760 8761 (face font-lock-string-face) 8761 8771 nil 8771 8772 (face font-lock-string-face) 8772 8805 (face font-lock-constant-face) 8805 8806 (face font-lock-string-face) 8806 8816 nil 8816 8817 (face font-lock-string-face) 8817 8849 (face font-lock-constant-face) 8849 8850 (face font-lock-string-face) 8850 8860 nil 8860 8861 (face font-lock-string-face) 8861 8892 (face font-lock-constant-face) 8892 8893 (face font-lock-string-face) 8893 8903 nil 8903 8904 (face font-lock-string-face) 8904 8934 (face font-lock-constant-face) 8934 8935 (face font-lock-string-face) 8935 8945 nil 8945 8946 (face font-lock-string-face) 8946 8978 (face font-lock-constant-face) 8978 8979 (face font-lock-string-face) 8979 8989 nil 8989 8990 (face font-lock-string-face) 8990 9021 (face font-lock-constant-face) 9021 9022 (face font-lock-string-face) 9022 9032 nil 9032 9033 (face font-lock-string-face) 9033 9063 (face font-lock-constant-face) 9063 9064 (face font-lock-string-face) 9064 9074 nil 9074 9075 (face font-lock-string-face) 9075 9104 (face font-lock-constant-face) 9104 9105 (face font-lock-string-face) 9105 9115 nil 9115 9116 (face font-lock-string-face) 9116 9158 (face font-lock-constant-face) 9158 9159 (face font-lock-string-face) 9159 9169 nil 9169 9170 (face font-lock-string-face) 9170 9211 (face font-lock-constant-face) 9211 9212 (face font-lock-string-face) 9212 9222 nil 9222 9223 (face font-lock-string-face) 9223 9272 (face font-lock-constant-face) 9272 9273 (face font-lock-string-face) 9273 9283 nil 9283 9284 (face font-lock-string-face) 9284 9332 (face font-lock-constant-face) 9332 9333 (face font-lock-string-face) 9333 9343 nil 9343 9344 (face font-lock-string-face) 9344 9388 (face font-lock-constant-face) 9388 9389 (face font-lock-string-face) 9389 9399 nil 9399 9400 (face font-lock-string-face) 9400 9445 (face font-lock-constant-face) 9445 9446 (face font-lock-string-face) 9446 9456 nil 9456 9457 (face font-lock-string-face) 9457 9507 (face font-lock-constant-face) 9507 9508 (face font-lock-string-face) 9508 9518 nil 9518 9519 (face font-lock-string-face) 9519 9570 (face font-lock-constant-face) 9570 9571 (face font-lock-string-face) 9571 9581 nil 9581 9582 (face font-lock-string-face) 9582 9611 (face font-lock-constant-face) 9611 9612 (face font-lock-string-face) 9612 9622 nil 9622 9623 (face font-lock-string-face) 9623 9659 (face font-lock-constant-face) 9659 9660 (face font-lock-string-face) 9660 9670 nil 9670 9671 (face font-lock-string-face) 9671 9714 (face font-lock-constant-face) 9714 9715 (face font-lock-string-face) 9715 9725 nil 9725 9726 (face font-lock-string-face) 9726 9768 (face font-lock-constant-face) 9768 9769 (face font-lock-string-face) 9769 9779 nil 9779 9780 (face font-lock-string-face) 9780 9816 (face font-lock-constant-face) 9816 9817 (face font-lock-string-face) 9817 9827 nil 9827 9828 (face font-lock-string-face) 9828 9863 (face font-lock-constant-face) 9863 9864 (face font-lock-string-face) 9864 9874 nil 9874 9875 (face font-lock-string-face) 9875 9910 (face font-lock-constant-face) 9910 9911 (face font-lock-string-face) 9911 9921 nil 9921 9922 (face font-lock-string-face) 9922 9958 (face font-lock-constant-face) 9958 9959 (face font-lock-string-face) 9959 9969 nil 9969 9970 (face font-lock-string-face) 9970 10005 (face font-lock-constant-face) 10005 10006 (face font-lock-string-face) 10006 10016 nil 10016 10017 (face font-lock-string-face) 10017 10050 (face font-lock-constant-face) 10050 10051 (face font-lock-string-face) 10051 10061 nil 10061 10062 (face font-lock-string-face) 10062 10094 (face font-lock-constant-face) 10094 10095 (face font-lock-string-face) 10095 10105 nil 10105 10106 (face font-lock-string-face) 10106 10150 (face font-lock-constant-face) 10150 10151 (face font-lock-string-face) 10151 10161 nil 10161 10162 (face font-lock-string-face) 10162 10198 (face font-lock-constant-face) 10198 10199 (face font-lock-string-face) 10199 10209 nil 10209 10210 (face font-lock-string-face) 10210 10245 (face font-lock-constant-face) 10245 10246 (face font-lock-string-face) 10246 10256 nil 10256 10257 (face font-lock-string-face) 10257 10296 (face font-lock-constant-face) 10296 10297 (face font-lock-string-face) 10297 10307 nil 10307 10308 (face font-lock-string-face) 10308 10346 (face font-lock-constant-face) 10346 10347 (face font-lock-string-face) 10347 10357 nil 10357 10358 (face font-lock-string-face) 10358 10403 (face font-lock-constant-face) 10403 10404 (face font-lock-string-face) 10404 10414 nil 10414 10415 (face font-lock-string-face) 10415 10459 (face font-lock-constant-face) 10459 10460 (face font-lock-string-face) 10460 10470 nil 10470 10471 (face font-lock-string-face) 10471 10487 (face font-lock-constant-face) 10487 10488 (face font-lock-string-face) 10488 10498 nil 10498 10499 (face font-lock-string-face) 10499 10514 (face font-lock-constant-face) 10514 10515 (face font-lock-string-face) 10515 10525 nil 10525 10526 (face font-lock-string-face) 10526 10559 (face font-lock-constant-face) 10559 10560 (face font-lock-string-face) 10560 10570 nil 10570 10571 (face font-lock-string-face) 10571 10603 (face font-lock-constant-face) 10603 10604 (face font-lock-string-face) 10604 10614 nil 10614 10615 (face font-lock-string-face) 10615 10636 (face font-lock-constant-face) 10636 10637 (face font-lock-string-face) 10637 10647 nil 10647 10648 (face font-lock-string-face) 10648 10675 (face font-lock-constant-face) 10675 10676 (face font-lock-string-face) 10676 10686 nil 10686 10687 (face font-lock-string-face) 10687 10713 (face font-lock-constant-face) 10713 10714 (face font-lock-string-face) 10714 10724 nil 10724 10725 (face font-lock-string-face) 10725 10755 (face font-lock-constant-face) 10755 10756 (face font-lock-string-face) 10756 10766 nil 10766 10767 (face font-lock-string-face) 10767 10796 (face font-lock-constant-face) 10796 10797 (face font-lock-string-face) 10797 10807 nil 10807 10808 (face font-lock-string-face) 10808 10845 (face font-lock-constant-face) 10845 10846 (face font-lock-string-face) 10846 10856 nil 10856 10857 (face font-lock-string-face) 10857 10893 (face font-lock-constant-face) 10893 10894 (face font-lock-string-face) 10894 10904 nil 10904 10905 (face font-lock-string-face) 10905 10929 (face font-lock-constant-face) 10929 10930 (face font-lock-string-face) 10930 10940 nil 10940 10941 (face font-lock-string-face) 10941 10964 (face font-lock-constant-face) 10964 10965 (face font-lock-string-face) 10965 10975 nil 10975 10976 (face font-lock-string-face) 10976 10995 (face font-lock-constant-face) 10995 10996 (face font-lock-string-face) 10996 11006 nil 11006 11007 (face font-lock-string-face) 11007 11025 (face font-lock-constant-face) 11025 11026 (face font-lock-string-face) 11026 11036 nil 11036 11037 (face font-lock-string-face) 11037 11063 (face font-lock-constant-face) 11063 11064 (face font-lock-string-face) 11064 11074 nil 11074 11075 (face font-lock-string-face) 11075 11100 (face font-lock-constant-face) 11100 11101 (face font-lock-string-face) 11101 11111 nil 11111 11112 (face font-lock-string-face) 11112 11138 (face font-lock-constant-face) 11138 11139 (face font-lock-string-face) 11139 11149 nil 11149 11150 (face font-lock-string-face) 11150 11175 (face font-lock-constant-face) 11175 11176 (face font-lock-string-face) 11176 11193 nil 11193 11194 (face font-lock-string-face) 11194 11219 (face font-lock-keyword-face) 11219 11220 (face font-lock-string-face) 11220 11232 nil 11232 11233 (face font-lock-string-face) 11233 11245 (face font-lock-keyword-face) 11245 11246 (face font-lock-string-face) 11246 11260 nil 11260 11261 (face font-lock-string-face) 11261 11263 (face font-lock-constant-face) 11263 11264 (face font-lock-string-face) 11264 11292 nil 11292 11293 (face font-lock-string-face) 11293 11303 (face font-lock-keyword-face) 11303 11304 (face font-lock-string-face) 11304 11316 nil 11316 11381 (face font-lock-comment-face) 11381 11389 nil 11389 11439 (face font-lock-comment-face) 11439 11448 nil 11448 11449 (face font-lock-string-face) 11449 11464 (face font-lock-variable-name-face) 11464 11465 (face font-lock-string-face) 11465 11479 nil 11479 11480 (face font-lock-string-face) 11480 11492 (face font-lock-keyword-face) 11492 11493 (face font-lock-string-face) 11493 11509 nil 11509 11510 (face font-lock-string-face) 11510 11549 (face font-lock-function-name-face) 11549 11550 (face font-lock-string-face) 11550 11586 nil 11586 11587 (face font-lock-string-face) 11587 11602 (face font-lock-variable-name-face) 11602 11603 (face font-lock-string-face) 11603 11617 nil 11617 11618 (face font-lock-string-face) 11618 11626 (face font-lock-keyword-face) 11626 11627 (face font-lock-string-face) 11627 11643 nil 11643 11644 (face font-lock-string-face) 11644 11663 (face font-lock-constant-face) 11663 11664 (face font-lock-string-face) 11664 11678 nil 11678 11679 (face font-lock-string-face) 11679 11702 (face font-lock-constant-face) 11702 11703 (face font-lock-string-face) 11703 11717 nil 11717 11718 (face font-lock-string-face) 11718 11740 (face font-lock-constant-face) 11740 11741 (face font-lock-string-face) 11741 11755 nil 11755 11756 (face font-lock-string-face) 11756 11779 (face font-lock-constant-face) 11779 11780 (face font-lock-string-face) 11780 11794 nil 11794 11795 (face font-lock-string-face) 11795 11817 (face font-lock-constant-face) 11817 11818 (face font-lock-string-face) 11818 11832 nil 11832 11833 (face font-lock-string-face) 11833 11861 (face font-lock-constant-face) 11861 11862 (face font-lock-string-face) 11862 11876 nil 11876 11877 (face font-lock-string-face) 11877 11904 (face font-lock-constant-face) 11904 11905 (face font-lock-string-face) 11905 11919 nil 11919 11920 (face font-lock-string-face) 11920 11950 (face font-lock-constant-face) 11950 11951 (face font-lock-string-face) 11951 11965 nil 11965 11966 (face font-lock-string-face) 11966 11995 (face font-lock-constant-face) 11995 11996 (face font-lock-string-face) 11996 12010 nil 12010 12011 (face font-lock-string-face) 12011 12035 (face font-lock-constant-face) 12035 12036 (face font-lock-string-face) 12036 12050 nil 12050 12051 (face font-lock-string-face) 12051 12074 (face font-lock-constant-face) 12074 12075 (face font-lock-string-face) 12075 12089 nil 12089 12090 (face font-lock-string-face) 12090 12120 (face font-lock-constant-face) 12120 12121 (face font-lock-string-face) 12121 12135 nil 12135 12136 (face font-lock-string-face) 12136 12167 (face font-lock-constant-face) 12167 12168 (face font-lock-string-face) 12168 12182 nil 12182 12183 (face font-lock-string-face) 12183 12213 (face font-lock-constant-face) 12213 12214 (face font-lock-string-face) 12214 12228 nil 12228 12229 (face font-lock-string-face) 12229 12254 (face font-lock-constant-face) 12254 12255 (face font-lock-string-face) 12255 12269 nil 12269 12270 (face font-lock-string-face) 12270 12294 (face font-lock-constant-face) 12294 12295 (face font-lock-string-face) 12295 12309 nil 12309 12310 (face font-lock-string-face) 12310 12352 (face font-lock-constant-face) 12352 12353 (face font-lock-string-face) 12353 12367 nil 12367 12368 (face font-lock-string-face) 12368 12409 (face font-lock-constant-face) 12409 12410 (face font-lock-string-face) 12410 12424 nil 12424 12425 (face font-lock-string-face) 12425 12447 (face font-lock-constant-face) 12447 12448 (face font-lock-string-face) 12448 12462 nil 12462 12463 (face font-lock-string-face) 12463 12484 (face font-lock-constant-face) 12484 12485 (face font-lock-string-face) 12485 12499 nil 12499 12500 (face font-lock-string-face) 12500 12531 (face font-lock-constant-face) 12531 12532 (face font-lock-string-face) 12532 12546 nil 12546 12547 (face font-lock-string-face) 12547 12577 (face font-lock-constant-face) 12577 12578 (face font-lock-string-face) 12578 12592 nil 12592 12593 (face font-lock-string-face) 12593 12621 (face font-lock-constant-face) 12621 12622 (face font-lock-string-face) 12622 12636 nil 12636 12637 (face font-lock-string-face) 12637 12664 (face font-lock-constant-face) 12664 12665 (face font-lock-string-face) 12665 12679 nil 12679 12680 (face font-lock-string-face) 12680 12707 (face font-lock-constant-face) 12707 12708 (face font-lock-string-face) 12708 12722 nil 12722 12723 (face font-lock-string-face) 12723 12749 (face font-lock-constant-face) 12749 12750 (face font-lock-string-face) 12750 12764 nil 12764 12765 (face font-lock-string-face) 12765 12791 (face font-lock-constant-face) 12791 12792 (face font-lock-string-face) 12792 12806 nil 12806 12807 (face font-lock-string-face) 12807 12832 (face font-lock-constant-face) 12832 12833 (face font-lock-string-face) 12833 12868 nil 12868 12937 (face font-lock-comment-face) 12937 12945 nil 12945 13016 (face font-lock-comment-face) 13016 13024 nil 13024 13040 (face font-lock-comment-face) 13040 13049 nil 13049 13050 (face font-lock-string-face) 13050 13065 (face font-lock-variable-name-face) 13065 13066 (face font-lock-string-face) 13066 13080 nil 13080 13081 (face font-lock-string-face) 13081 13089 (face font-lock-keyword-face) 13089 13090 (face font-lock-string-face) 13090 13105 nil 13105 13106 (face font-lock-string-face) 13106 13149 (face font-lock-constant-face) 13149 13150 (face font-lock-string-face) 13150 13175 nil 13175 13176 (face font-lock-string-face) 13176 13183 (face font-lock-keyword-face) 13183 13184 (face font-lock-string-face) 13184 13199 nil 13199 13200 (face font-lock-string-face) 13200 13248 (face font-lock-constant-face) 13248 13249 (face font-lock-string-face) 13249 13274 nil 13274 13275 (face font-lock-string-face) 13275 13288 (face font-lock-keyword-face) 13288 13289 (face font-lock-string-face) 13289 13305 nil 13305 13306 (face font-lock-string-face) 13306 13315 (face font-lock-keyword-face) 13315 13316 (face font-lock-string-face) 13316 13334 nil 13334 13335 (face font-lock-string-face) 13335 13345 (face font-lock-constant-face) 13345 13346 (face font-lock-string-face) 13346 13397 nil 13397 13398 (face font-lock-string-face) 13398 13443 (face font-lock-variable-name-face) 13443 13444 (face font-lock-string-face) 13444 13458 nil 13458 13459 (face font-lock-string-face) 13459 13472 (face font-lock-keyword-face) 13472 13473 (face font-lock-string-face) 13473 13489 nil 13489 13490 (face font-lock-string-face) 13490 13499 (face font-lock-keyword-face) 13499 13500 (face font-lock-string-face) 13500 13518 nil 13518 13519 (face font-lock-string-face) 13519 13527 (face font-lock-constant-face) 13527 13528 (face font-lock-string-face) 13528 13579 nil 13579 13580 (face font-lock-string-face) 13580 13593 (face font-lock-variable-name-face) 13593 13594 (face font-lock-string-face) 13594 13608 nil 13608 13609 (face font-lock-string-face) 13609 13617 (face font-lock-keyword-face) 13617 13618 (face font-lock-string-face) 13618 13623 nil 13623 13624 (face font-lock-string-face) 13624 13631 (face font-lock-constant-face) 13631 13632 (face font-lock-string-face) 13632 13634 nil 13634 13635 (face font-lock-string-face) 13635 13641 (face font-lock-constant-face) 13641 13642 (face font-lock-string-face) 13642 13671 nil 13671 13672 (face font-lock-string-face) 13672 13679 (face font-lock-constant-face) 13679 13680 (face font-lock-string-face) 13680 13682 nil 13682 13683 (face font-lock-string-face) 13683 13703 (face font-lock-constant-face) 13703 13704 (face font-lock-string-face) 13704 13720 nil 13720 13721 (face font-lock-string-face) 13721 13734 (face font-lock-keyword-face) 13734 13735 (face font-lock-string-face) 13735 13751 nil 13751 13752 (face font-lock-string-face) 13752 13761 (face font-lock-keyword-face) 13761 13762 (face font-lock-string-face) 13762 13815 nil 13815 13816 (face font-lock-string-face) 13816 13829 (face font-lock-variable-name-face) 13829 13830 (face font-lock-string-face) 13830 13844 nil 13844 13845 (face font-lock-string-face) 13845 13853 (face font-lock-keyword-face) 13853 13854 (face font-lock-string-face) 13854 13870 nil 13870 13871 (face font-lock-string-face) 13871 13909 (face font-lock-constant-face) 13909 13910 (face font-lock-string-face) 13910 13924 nil 13924 13925 (face font-lock-string-face) 13925 13962 (face font-lock-constant-face) 13962 13963 (face font-lock-string-face) 13963 13999 nil 13999 14000 (face font-lock-string-face) 14000 14011 (face font-lock-variable-name-face) 14011 14012 (face font-lock-string-face) 14012 14026 nil 14026 14027 (face font-lock-string-face) 14027 14036 (face font-lock-keyword-face) 14036 14037 (face font-lock-string-face) 14037 14053 nil 14053 14054 (face font-lock-string-face) 14054 14064 (face font-lock-keyword-face) 14064 14065 (face font-lock-string-face) 14065 14084 nil 14084 14085 (face font-lock-string-face) 14085 14096 (face font-lock-variable-name-face) 14096 14097 (face font-lock-string-face) 14097 14117 nil 14117 14129 (face font-lock-string-face) 14129 14131 nil 14131 14169 (face font-lock-string-face) 14169 14176 (face font-lock-variable-name-face) 14176 14182 (face font-lock-string-face) 14182 14193 (face font-lock-variable-name-face) 14193 14196 (face font-lock-string-face) 14196 14233 nil 14233 14245 (face font-lock-string-face) 14245 14247 nil 14247 14259 (face font-lock-string-face) 14259 14316 nil 14316 14317 (face font-lock-string-face) 14317 14327 (face font-lock-keyword-face) 14327 14328 (face font-lock-string-face) 14328 14345 nil 14345 14346 (face font-lock-string-face) 14346 14359 (face font-lock-variable-name-face) 14359 14360 (face font-lock-string-face) 14360 14378 nil 14378 14379 (face font-lock-string-face) 14379 14385 (face font-lock-keyword-face) 14385 14386 (face font-lock-string-face) 14386 14406 nil 14406 14411 (face font-lock-string-face) 14411 14413 (face font-lock-variable-name-face) 14413 14423 (face font-lock-variable-name-face) 14423 14443 (face font-lock-string-face) 14443 14476 nil 14476 14477 (face font-lock-string-face) 14477 14490 (face font-lock-keyword-face) 14490 14491 (face font-lock-string-face) 14491 14511 nil 14511 14512 (face font-lock-string-face) 14512 14521 (face font-lock-keyword-face) 14521 14522 (face font-lock-string-face) 14522 14544 nil 14544 14545 (face font-lock-string-face) 14545 14549 (face font-lock-constant-face) 14549 14551 (face font-lock-variable-name-face) 14551 14561 (face font-lock-variable-name-face) 14561 14578 (face font-lock-constant-face) 14578 14579 (face font-lock-string-face) 14579 14631 nil 14631 14632 (face font-lock-string-face) 14632 14639 (face font-lock-keyword-face) 14639 14640 (face font-lock-string-face) 14640 14660 nil 14660 14661 (face font-lock-string-face) 14661 14669 (face font-lock-preprocessor-face) 14669 14670 (face font-lock-string-face) 14670 14707 nil 14707 14729 (face font-lock-comment-face) 14729 14743 nil 14743 14744 (face font-lock-string-face) 14744 14752 (face font-lock-keyword-face) 14752 14753 (face font-lock-string-face) 14753 14773 nil 14773 14774 (face font-lock-string-face) 14774 14800 (face font-lock-constant-face) 14800 14801 (face font-lock-string-face) 14801 14819 nil 14819 14820 (face font-lock-string-face) 14820 14845 (face font-lock-constant-face) 14845 14846 (face font-lock-string-face) 14846 14915 nil 14915 14916 (face font-lock-string-face) 14916 14929 (face font-lock-variable-name-face) 14929 14930 (face font-lock-string-face) 14930 14944 nil 14944 14945 (face font-lock-string-face) 14945 14955 (face font-lock-keyword-face) 14955 14956 (face font-lock-string-face) 14956 14973 nil 14973 14974 (face font-lock-string-face) 14974 14993 (face font-lock-variable-name-face) 14993 14994 (face font-lock-string-face) 14994 15012 nil 15012 15013 (face font-lock-string-face) 15013 15019 (face font-lock-keyword-face) 15019 15020 (face font-lock-string-face) 15020 15040 nil 15040 15075 (face font-lock-string-face) 15075 15108 nil 15108 15109 (face font-lock-string-face) 15109 15122 (face font-lock-keyword-face) 15122 15123 (face font-lock-string-face) 15123 15143 nil 15143 15144 (face font-lock-string-face) 15144 15153 (face font-lock-keyword-face) 15153 15154 (face font-lock-string-face) 15154 15176 nil 15176 15177 (face font-lock-string-face) 15177 15215 (face font-lock-constant-face) 15215 15216 (face font-lock-string-face) 15216 15268 nil 15268 15269 (face font-lock-string-face) 15269 15276 (face font-lock-keyword-face) 15276 15277 (face font-lock-string-face) 15277 15297 nil 15297 15298 (face font-lock-string-face) 15298 15312 (face font-lock-preprocessor-face) 15312 15313 (face font-lock-string-face) 15313 15350 nil 15350 15378 (face font-lock-comment-face) 15378 15392 nil 15392 15393 (face font-lock-string-face) 15393 15401 (face font-lock-keyword-face) 15401 15402 (face font-lock-string-face) 15402 15422 nil 15422 15423 (face font-lock-string-face) 15423 15450 (face font-lock-constant-face) 15450 15451 (face font-lock-string-face) 15451 15469 nil 15469 15470 (face font-lock-string-face) 15470 15496 (face font-lock-constant-face) 15496 15497 (face font-lock-string-face) 15497 15566 nil 15566 15567 (face font-lock-string-face) 15567 15600 (face font-lock-variable-name-face) 15600 15601 (face font-lock-string-face) 15601 15615 nil 15615 15663 (face font-lock-comment-face) 15663 15673 nil 15673 15674 (face font-lock-string-face) 15674 15682 (face font-lock-keyword-face) 15682 15683 (face font-lock-string-face) 15683 15699 nil 15699 15700 (face font-lock-string-face) 15700 15743 (face font-lock-constant-face) 15743 15744 (face font-lock-string-face) 15744 15758 nil 15758 15759 (face font-lock-string-face) 15759 15801 (face font-lock-constant-face) 15801 15802 (face font-lock-string-face) 15802 15838 nil 15838 15839 (face font-lock-string-face) 15839 15848 (face font-lock-variable-name-face) 15848 15849 (face font-lock-string-face) 15849 15863 nil 15863 15864 (face font-lock-string-face) 15864 15877 (face font-lock-keyword-face) 15877 15878 (face font-lock-string-face) 15878 15894 nil 15894 15895 (face font-lock-string-face) 15895 15904 (face font-lock-keyword-face) 15904 15905 (face font-lock-string-face) 15905 15923 nil 15923 15924 (face font-lock-string-face) 15924 15980 (face font-lock-constant-face) 15980 15981 (face font-lock-string-face) 15981 15997 nil 15997 15998 (face font-lock-string-face) 15998 16057 (face font-lock-constant-face) 16057 16058 (face font-lock-string-face) 16058 16074 nil 16074 16075 (face font-lock-string-face) 16075 16131 (face font-lock-constant-face) 16131 16132 (face font-lock-string-face) 16132 16148 nil 16148 16149 (face font-lock-string-face) 16149 16205 (face font-lock-constant-face) 16205 16206 (face font-lock-string-face) 16206 16222 nil 16222 16223 (face font-lock-string-face) 16223 16275 (face font-lock-constant-face) 16275 16276 (face font-lock-string-face) 16276 16327 nil 16327 16328 (face font-lock-string-face) 16328 16337 (face font-lock-variable-name-face) 16337 16338 (face font-lock-string-face) 16338 16352 nil 16352 16353 (face font-lock-string-face) 16353 16361 (face font-lock-keyword-face) 16361 16362 (face font-lock-string-face) 16362 16378 nil 16378 16379 (face font-lock-string-face) 16379 16406 (face font-lock-constant-face) 16406 16407 (face font-lock-string-face) 16407 16421 nil 16421 16422 (face font-lock-string-face) 16422 16448 (face font-lock-constant-face) 16448 16449 (face font-lock-string-face) 16449 16463 nil 16463 16464 (face font-lock-string-face) 16464 16507 (face font-lock-constant-face) 16507 16508 (face font-lock-string-face) 16508 16522 nil 16522 16523 (face font-lock-string-face) 16523 16565 (face font-lock-constant-face) 16565 16566 (face font-lock-string-face) 16566 16602 nil 16602 16603 (face font-lock-string-face) 16603 16646 (face font-lock-variable-name-face) 16646 16647 (face font-lock-string-face) 16647 16661 nil 16661 16662 (face font-lock-string-face) 16662 16669 (face font-lock-keyword-face) 16669 16670 (face font-lock-string-face) 16670 16686 nil 16686 16687 (face font-lock-string-face) 16687 16697 (face font-lock-constant-face) 16697 16698 (face font-lock-string-face) 16698 16712 nil 16712 16713 (face font-lock-string-face) 16713 16722 (face font-lock-constant-face) 16722 16723 (face font-lock-string-face) 16723 16737 nil 16737 16738 (face font-lock-string-face) 16738 16760 (face font-lock-constant-face) 16760 16761 (face font-lock-string-face) 16761 16775 nil 16775 16776 (face font-lock-string-face) 16776 16797 (face font-lock-constant-face) 16797 16798 (face font-lock-string-face) 16798 16812 nil 16812 16813 (face font-lock-string-face) 16813 16830 (face font-lock-constant-face) 16830 16831 (face font-lock-string-face) 16831 16845 nil 16845 16846 (face font-lock-string-face) 16846 16862 (face font-lock-constant-face) 16862 16863 (face font-lock-string-face) 16863 16877 nil 16877 16878 (face font-lock-string-face) 16878 16889 (face font-lock-constant-face) 16889 16890 (face font-lock-string-face) 16890 16904 nil 16904 16905 (face font-lock-string-face) 16905 16915 (face font-lock-constant-face) 16915 16916 (face font-lock-string-face) 16916 16930 nil 16930 16931 (face font-lock-string-face) 16931 16955 (face font-lock-constant-face) 16955 16956 (face font-lock-string-face) 16956 16970 nil 16970 16971 (face font-lock-string-face) 16971 16994 (face font-lock-constant-face) 16994 16995 (face font-lock-string-face) 16995 17009 nil 17009 17010 (face font-lock-string-face) 17010 17034 (face font-lock-constant-face) 17034 17035 (face font-lock-string-face) 17035 17049 nil 17049 17050 (face font-lock-string-face) 17050 17073 (face font-lock-constant-face) 17073 17074 (face font-lock-string-face) 17074 17088 nil 17088 17089 (face font-lock-string-face) 17089 17114 (face font-lock-constant-face) 17114 17115 (face font-lock-string-face) 17115 17129 nil 17129 17130 (face font-lock-string-face) 17130 17154 (face font-lock-constant-face) 17154 17155 (face font-lock-string-face) 17155 17210 nil 17210 17211 (face font-lock-string-face) 17211 17222 (face font-lock-keyword-face) 17222 17223 (face font-lock-string-face) 17223 17225 nil 17225 17226 (face font-lock-string-face) 17226 17237 (face font-lock-function-name-face) 17237 17238 (face font-lock-string-face) 17238 17246 nil 17246 17247 (face font-lock-string-face) 17247 17251 (face font-lock-keyword-face) 17251 17252 (face font-lock-string-face) 17252 17254 nil 17254 17255 (face font-lock-string-face) 17255 17269 (face font-lock-type-face) 17269 17270 (face font-lock-string-face) 17270 17278 nil 17278 17279 (face font-lock-string-face) 17279 17291 (face font-lock-keyword-face) 17291 17292 (face font-lock-string-face) 17292 17304 nil 17304 17305 (face font-lock-string-face) 17305 17307 (face font-lock-constant-face) 17307 17308 (face font-lock-string-face) 17308 17325 nil 17325 17326 (face font-lock-string-face) 17326 17336 (face font-lock-keyword-face) 17336 17337 (face font-lock-string-face) 17337 17350 nil 17350 17351 (face font-lock-string-face) 17351 17371 (face font-lock-variable-name-face) 17371 17372 (face font-lock-string-face) 17372 17386 nil 17386 17387 (face font-lock-string-face) 17387 17404 (face font-lock-keyword-face) 17404 17405 (face font-lock-string-face) 17405 17423 nil 17423 17424 (face font-lock-string-face) 17424 17442 (face font-lock-variable-name-face) 17442 17443 (face font-lock-string-face) 17443 17461 nil 17461 17462 (face font-lock-string-face) 17462 17469 (face font-lock-keyword-face) 17469 17470 (face font-lock-string-face) 17470 17474 nil 17474 17498 (face font-lock-string-face) 17498 17553 nil 17553 17554 (face font-lock-string-face) 17554 17599 (face font-lock-variable-name-face) 17599 17600 (face font-lock-string-face) 17600 17614 nil 17614 17615 (face font-lock-string-face) 17615 17627 (face font-lock-keyword-face) 17627 17628 (face font-lock-string-face) 17628 17644 nil 17644 17645 (face font-lock-string-face) 17645 17665 (face font-lock-function-name-face) 17665 17666 (face font-lock-string-face) 17666 17703 nil 17703 17704 (face font-lock-string-face) 17704 17724 (face font-lock-variable-name-face) 17724 17725 (face font-lock-string-face) 17725 17739 nil 17739 17740 (face font-lock-string-face) 17740 17752 (face font-lock-keyword-face) 17752 17753 (face font-lock-string-face) 17753 17769 nil 17769 17770 (face font-lock-string-face) 17770 17790 (face font-lock-function-name-face) 17790 17791 (face font-lock-string-face) 17791 17833 nil 17833 17834 (face font-lock-string-face) 17834 17841 (face font-lock-keyword-face) 17841 17842 (face font-lock-string-face) 17842 17854 nil 17854 17855 (face font-lock-string-face) 17855 17874 (face font-lock-constant-face) 17874 17875 (face font-lock-string-face) 17875 17885 nil 17885 17886 (face font-lock-string-face) 17886 17904 (face font-lock-constant-face) 17904 17905 (face font-lock-string-face) 17905 17935 nil 17935 17936 (face font-lock-string-face) 17936 17947 (face font-lock-keyword-face) 17947 17948 (face font-lock-string-face) 17948 17950 nil 17950 17951 (face font-lock-string-face) 17951 17971 (face font-lock-function-name-face) 17971 17972 (face font-lock-string-face) 17972 17980 nil 17980 17981 (face font-lock-string-face) 17981 17985 (face font-lock-keyword-face) 17985 17986 (face font-lock-string-face) 17986 17988 nil 17988 17989 (face font-lock-string-face) 17989 18003 (face font-lock-type-face) 18003 18004 (face font-lock-string-face) 18004 18012 nil 18012 18013 (face font-lock-string-face) 18013 18025 (face font-lock-keyword-face) 18025 18026 (face font-lock-string-face) 18026 18038 nil 18038 18039 (face font-lock-string-face) 18039 18041 (face font-lock-constant-face) 18041 18042 (face font-lock-string-face) 18042 18059 nil 18059 18060 (face font-lock-string-face) 18060 18067 (face font-lock-keyword-face) 18067 18068 (face font-lock-string-face) 18068 18080 nil 18080 18081 (face font-lock-string-face) 18081 18114 (face font-lock-constant-face) 18114 18115 (face font-lock-string-face) 18115 18125 nil 18125 18126 (face font-lock-string-face) 18126 18162 (face font-lock-constant-face) 18162 18163 (face font-lock-string-face) 18163 18173 nil 18173 18174 (face font-lock-string-face) 18174 18212 (face font-lock-constant-face) 18212 18213 (face font-lock-string-face) 18213 18223 nil 18223 18224 (face font-lock-string-face) 18224 18261 (face font-lock-constant-face) 18261 18262 (face font-lock-string-face) 18262 18272 nil 18272 18273 (face font-lock-string-face) 18273 18311 (face font-lock-constant-face) 18311 18312 (face font-lock-string-face) 18312 18322 nil 18322 18323 (face font-lock-string-face) 18323 18356 (face font-lock-constant-face) 18356 18357 (face font-lock-string-face) 18357 18367 nil 18367 18368 (face font-lock-string-face) 18368 18403 (face font-lock-constant-face) 18403 18404 (face font-lock-string-face) 18404 18414 nil 18414 18415 (face font-lock-string-face) 18415 18451 (face font-lock-constant-face) 18451 18452 (face font-lock-string-face) 18452 18462 nil 18462 18463 (face font-lock-string-face) 18463 18499 (face font-lock-constant-face) 18499 18500 (face font-lock-string-face) 18500 18510 nil 18510 18511 (face font-lock-string-face) 18511 18547 (face font-lock-constant-face) 18547 18548 (face font-lock-string-face) 18548 18558 nil 18558 18559 (face font-lock-string-face) 18559 18581 (face font-lock-constant-face) 18581 18582 (face font-lock-string-face) 18582 18592 nil 18592 18593 (face font-lock-string-face) 18593 18618 (face font-lock-constant-face) 18618 18619 (face font-lock-string-face) 18619 18629 nil 18629 18630 (face font-lock-string-face) 18630 18657 (face font-lock-constant-face) 18657 18658 (face font-lock-string-face) 18658 18668 nil 18668 18669 (face font-lock-string-face) 18669 18697 (face font-lock-constant-face) 18697 18698 (face font-lock-string-face) 18698 18708 nil 18708 18709 (face font-lock-string-face) 18709 18750 (face font-lock-constant-face) 18750 18751 (face font-lock-string-face) 18751 18761 nil 18761 18762 (face font-lock-string-face) 18762 18803 (face font-lock-constant-face) 18803 18804 (face font-lock-string-face) 18804 18814 nil 18814 18815 (face font-lock-string-face) 18815 18856 (face font-lock-constant-face) 18856 18857 (face font-lock-string-face) 18857 18867 nil 18867 18868 (face font-lock-string-face) 18868 18902 (face font-lock-constant-face) 18902 18903 (face font-lock-string-face) 18903 18913 nil 18913 18914 (face font-lock-string-face) 18914 18948 (face font-lock-constant-face) 18948 18949 (face font-lock-string-face) 18949 18959 nil 18959 18960 (face font-lock-string-face) 18960 18994 (face font-lock-constant-face) 18994 18995 (face font-lock-string-face) 18995 19005 nil 19005 19006 (face font-lock-string-face) 19006 19035 (face font-lock-constant-face) 19035 19036 (face font-lock-string-face) 19036 19046 nil 19046 19047 (face font-lock-string-face) 19047 19075 (face font-lock-constant-face) 19075 19076 (face font-lock-string-face) 19076 19093 nil 19093 19094 (face font-lock-string-face) 19094 19104 (face font-lock-keyword-face) 19104 19105 (face font-lock-string-face) 19105 19118 nil 19118 19119 (face font-lock-string-face) 19119 19139 (face font-lock-variable-name-face) 19139 19140 (face font-lock-string-face) 19140 19154 nil 19154 19155 (face font-lock-string-face) 19155 19172 (face font-lock-keyword-face) 19172 19173 (face font-lock-string-face) 19173 19191 nil 19191 19192 (face font-lock-string-face) 19192 19210 (face font-lock-variable-name-face) 19210 19211 (face font-lock-string-face) 19211 19229 nil 19229 19230 (face font-lock-string-face) 19230 19237 (face font-lock-keyword-face) 19237 19238 (face font-lock-string-face) 19238 19242 nil 19242 19266 (face font-lock-string-face) 19266 19321 nil 19321 19322 (face font-lock-string-face) 19322 19342 (face font-lock-variable-name-face) 19342 19343 (face font-lock-string-face) 19343 19357 nil 19357 19399 (face font-lock-comment-face) 19399 19409 nil 19409 19410 (face font-lock-string-face) 19410 19417 (face font-lock-keyword-face) 19417 19418 (face font-lock-string-face) 19418 19434 nil 19434 19435 (face font-lock-string-face) 19435 19480 (face font-lock-constant-face) 19480 19481 (face font-lock-string-face) 19481 19495 nil 19495 19496 (face font-lock-string-face) 19496 19535 (face font-lock-constant-face) 19535 19536 (face font-lock-string-face) 19536 19573 nil 19573 19574 (face font-lock-string-face) 19574 19623 (face font-lock-variable-name-face) 19623 19624 (face font-lock-string-face) 19624 19638 nil 19638 19639 (face font-lock-string-face) 19639 19645 (face font-lock-keyword-face) 19645 19646 (face font-lock-string-face) 19646 19662 nil 19662 19670 (face font-lock-string-face) 19670 19707 nil 19707 19708 (face font-lock-string-face) 19708 19719 (face font-lock-variable-name-face) 19719 19720 (face font-lock-string-face) 19720 19734 nil 19734 19735 (face font-lock-string-face) 19735 19749 (face font-lock-keyword-face) 19749 19750 (face font-lock-string-face) 19750 19766 nil 19766 19773 (face font-lock-string-face) 19773 19791 nil 19791 19792 (face font-lock-string-face) 19792 19806 (face font-lock-keyword-face) 19806 19807 (face font-lock-string-face) 19807 19827 nil 19827 19890 (face font-lock-comment-face) 19890 19906 nil 19906 19971 (face font-lock-comment-face) 19971 19987 nil 19987 20032 (face font-lock-comment-face) 20032 20048 nil 20048 20072 (face font-lock-string-face) 20072 20074 nil 20074 20077 (face font-lock-string-face) 20077 20080 nil 20080 20086 (face font-lock-comment-face) 20086 20155 nil 20155 20156 (face font-lock-string-face) 20156 20165 (face font-lock-variable-name-face) 20165 20166 (face font-lock-string-face) 20166 20180 nil 20180 20181 (face font-lock-string-face) 20181 20190 (face font-lock-keyword-face) 20190 20191 (face font-lock-string-face) 20191 20207 nil 20207 20208 (face font-lock-string-face) 20208 20218 (face font-lock-variable-name-face) 20218 20219 (face font-lock-string-face) 20219 20237 nil 20237 20246 (face font-lock-string-face) 20246 20262 nil 20262 20270 (face font-lock-string-face) 20270 20286 nil 20286 20298 (face font-lock-string-face) 20298 20314 nil 20314 20322 (face font-lock-string-face) 20322 20374 nil 20374 20375 (face font-lock-string-face) 20375 20384 (face font-lock-variable-name-face) 20384 20385 (face font-lock-string-face) 20385 20399 nil 20399 20400 (face font-lock-string-face) 20400 20409 (face font-lock-keyword-face) 20409 20410 (face font-lock-string-face) 20410 20426 nil 20426 20427 (face font-lock-string-face) 20427 20437 (face font-lock-variable-name-face) 20437 20438 (face font-lock-string-face) 20438 20456 nil 20456 20466 (face font-lock-string-face) 20466 20482 nil 20482 20491 (face font-lock-string-face) 20491 20507 nil 20507 20519 (face font-lock-string-face) 20519 20535 nil 20535 20543 (face font-lock-string-face) 20543 20595 nil 20595 20596 (face font-lock-string-face) 20596 20621 (face font-lock-variable-name-face) 20621 20622 (face font-lock-string-face) 20622 20636 nil 20636 20637 (face font-lock-string-face) 20637 20646 (face font-lock-keyword-face) 20646 20647 (face font-lock-string-face) 20647 20663 nil 20663 20664 (face font-lock-string-face) 20664 20674 (face font-lock-keyword-face) 20674 20675 (face font-lock-string-face) 20675 20695 nil 20695 20696 (face font-lock-string-face) 20696 20715 (face font-lock-variable-name-face) 20715 20716 (face font-lock-string-face) 20716 20736 nil 20736 20748 (face font-lock-string-face) 20748 20770 nil 20770 20780 (face font-lock-string-face) 20780 20800 nil 20800 20807 (face font-lock-string-face) 20807 20827 nil 20827 20839 (face font-lock-string-face) 20839 20859 nil 20859 20867 (face font-lock-string-face) 20867 20923 nil 20923 20935 (face font-lock-string-face) 20935 20957 nil 20957 20972 (face font-lock-string-face) 20972 20992 nil 20992 20999 (face font-lock-string-face) 20999 21019 nil 21019 21026 (face font-lock-string-face) 21026 21046 nil 21046 21058 (face font-lock-string-face) 21058 21078 nil 21078 21086 (face font-lock-string-face) 21086 21180 nil 21180 21181 (face font-lock-string-face) 21181 21190 (face font-lock-keyword-face) 21190 21191 (face font-lock-string-face) 21191 21203 nil 21203 21204 (face font-lock-string-face) 21204 21220 (face font-lock-variable-name-face) 21220 21221 (face font-lock-string-face) 21221 21223 nil 21223 21224 (face font-lock-string-face) 21224 21256 (face font-lock-variable-name-face) 21256 21257 (face font-lock-string-face) 21257 21274 nil 21274 21314 (face font-lock-string-face) 21314 21325 nil 21325 21326 (face font-lock-string-face) 21326 21334 (face font-lock-keyword-face) 21334 21335 (face font-lock-string-face) 21335 21347 nil 21347 21348 (face font-lock-string-face) 21348 21385 (face font-lock-constant-face) 21385 21386 (face font-lock-string-face) 21386 21416 nil 21416 21417 (face font-lock-string-face) 21417 21428 (face font-lock-keyword-face) 21428 21429 (face font-lock-string-face) 21429 21431 nil 21431 21432 (face font-lock-string-face) 21432 21452 (face font-lock-function-name-face) 21452 21453 (face font-lock-string-face) 21453 21461 nil 21461 21462 (face font-lock-string-face) 21462 21466 (face font-lock-keyword-face) 21466 21467 (face font-lock-string-face) 21467 21469 nil 21469 21470 (face font-lock-string-face) 21470 21484 (face font-lock-type-face) 21484 21485 (face font-lock-string-face) 21485 21493 nil 21493 21494 (face font-lock-string-face) 21494 21506 (face font-lock-keyword-face) 21506 21507 (face font-lock-string-face) 21507 21519 nil 21519 21520 (face font-lock-string-face) 21520 21522 (face font-lock-constant-face) 21522 21523 (face font-lock-string-face) 21523 21540 nil 21540 21541 (face font-lock-string-face) 21541 21548 (face font-lock-keyword-face) 21548 21549 (face font-lock-string-face) 21549 21561 nil 21561 21562 (face font-lock-string-face) 21562 21595 (face font-lock-constant-face) 21595 21596 (face font-lock-string-face) 21596 21606 nil 21606 21607 (face font-lock-string-face) 21607 21637 (face font-lock-constant-face) 21637 21638 (face font-lock-string-face) 21638 21648 nil 21648 21649 (face font-lock-string-face) 21649 21682 (face font-lock-constant-face) 21682 21683 (face font-lock-string-face) 21683 21693 nil 21693 21694 (face font-lock-string-face) 21694 21724 (face font-lock-constant-face) 21724 21725 (face font-lock-string-face) 21725 21735 nil 21735 21736 (face font-lock-string-face) 21736 21758 (face font-lock-constant-face) 21758 21759 (face font-lock-string-face) 21759 21769 nil 21769 21770 (face font-lock-string-face) 21770 21795 (face font-lock-constant-face) 21795 21796 (face font-lock-string-face) 21796 21806 nil 21806 21807 (face font-lock-string-face) 21807 21836 (face font-lock-constant-face) 21836 21837 (face font-lock-string-face) 21837 21847 nil 21847 21848 (face font-lock-string-face) 21848 21876 (face font-lock-constant-face) 21876 21877 (face font-lock-string-face) 21877 21907 nil 21907 21908 (face font-lock-string-face) 21908 21919 (face font-lock-keyword-face) 21919 21920 (face font-lock-string-face) 21920 21922 nil 21922 21923 (face font-lock-string-face) 21923 21938 (face font-lock-function-name-face) 21938 21939 (face font-lock-string-face) 21939 21947 nil 21947 21948 (face font-lock-string-face) 21948 21952 (face font-lock-keyword-face) 21952 21953 (face font-lock-string-face) 21953 21955 nil 21955 21956 (face font-lock-string-face) 21956 21966 (face font-lock-type-face) 21966 21967 (face font-lock-string-face) 21967 21975 nil 21975 21976 (face font-lock-string-face) 21976 21988 (face font-lock-keyword-face) 21988 21989 (face font-lock-string-face) 21989 22001 nil 22001 22002 (face font-lock-string-face) 22002 22007 (face font-lock-function-name-face) 22007 22008 (face font-lock-string-face) 22008 22018 nil 22018 22019 (face font-lock-string-face) 22019 22037 (face font-lock-function-name-face) 22037 22038 (face font-lock-string-face) 22038 22048 nil 22048 22049 (face font-lock-string-face) 22049 22060 (face font-lock-function-name-face) 22060 22061 (face font-lock-string-face) 22061 22071 nil 22071 22072 (face font-lock-string-face) 22072 22093 (face font-lock-function-name-face) 22093 22094 (face font-lock-string-face) 22094 22104 nil 22104 22105 (face font-lock-string-face) 22105 22131 (face font-lock-function-name-face) 22131 22132 (face font-lock-string-face) 22132 22142 nil 22142 22143 (face font-lock-string-face) 22143 22177 (face font-lock-function-name-face) 22177 22178 (face font-lock-string-face) 22178 22188 nil 22188 22189 (face font-lock-string-face) 22189 22215 (face font-lock-function-name-face) 22215 22216 (face font-lock-string-face) 22216 22226 nil 22226 22227 (face font-lock-string-face) 22227 22253 (face font-lock-function-name-face) 22253 22254 (face font-lock-string-face) 22254 22264 nil 22264 22265 (face font-lock-string-face) 22265 22280 (face font-lock-function-name-face) 22280 22281 (face font-lock-string-face) 22281 22298 nil 22298 22299 (face font-lock-string-face) 22299 22306 (face font-lock-keyword-face) 22306 22307 (face font-lock-string-face) 22307 22319 nil 22319 22320 (face font-lock-string-face) 22320 22361 (face font-lock-constant-face) 22361 22362 (face font-lock-string-face) 22362 22372 nil 22372 22373 (face font-lock-string-face) 22373 22413 (face font-lock-constant-face) 22413 22414 (face font-lock-string-face) 22414 22424 nil 22424 22425 (face font-lock-string-face) 22425 22461 (face font-lock-constant-face) 22461 22462 (face font-lock-string-face) 22462 22472 nil 22472 22473 (face font-lock-string-face) 22473 22502 (face font-lock-constant-face) 22502 22503 (face font-lock-string-face) 22503 22513 nil 22513 22514 (face font-lock-string-face) 22514 22550 (face font-lock-constant-face) 22550 22551 (face font-lock-string-face) 22551 22561 nil 22561 22562 (face font-lock-string-face) 22562 22610 (face font-lock-constant-face) 22610 22611 (face font-lock-string-face) 22611 22621 nil 22621 22622 (face font-lock-string-face) 22622 22663 (face font-lock-constant-face) 22663 22664 (face font-lock-string-face) 22664 22674 nil 22674 22675 (face font-lock-string-face) 22675 22711 (face font-lock-constant-face) 22711 22712 (face font-lock-string-face) 22712 22722 nil 22722 22723 (face font-lock-string-face) 22723 22757 (face font-lock-constant-face) 22757 22758 (face font-lock-string-face) 22758 22768 nil 22768 22769 (face font-lock-string-face) 22769 22797 (face font-lock-constant-face) 22797 22798 (face font-lock-string-face) 22798 22808 nil 22808 22809 (face font-lock-string-face) 22809 22853 (face font-lock-constant-face) 22853 22854 (face font-lock-string-face) 22854 22864 nil 22864 22865 (face font-lock-string-face) 22865 22900 (face font-lock-constant-face) 22900 22901 (face font-lock-string-face) 22901 22911 nil 22911 22912 (face font-lock-string-face) 22912 22961 (face font-lock-constant-face) 22961 22962 (face font-lock-string-face) 22962 22972 nil 22972 22973 (face font-lock-string-face) 22973 23011 (face font-lock-constant-face) 23011 23012 (face font-lock-string-face) 23012 23022 nil 23022 23023 (face font-lock-string-face) 23023 23055 (face font-lock-constant-face) 23055 23056 (face font-lock-string-face) 23056 23066 nil 23066 23067 (face font-lock-string-face) 23067 23116 (face font-lock-constant-face) 23116 23117 (face font-lock-string-face) 23117 23127 nil 23127 23128 (face font-lock-string-face) 23128 23178 (face font-lock-constant-face) 23178 23179 (face font-lock-string-face) 23179 23189 nil 23189 23190 (face font-lock-string-face) 23190 23228 (face font-lock-constant-face) 23228 23229 (face font-lock-string-face) 23229 23239 nil 23239 23240 (face font-lock-string-face) 23240 23277 (face font-lock-constant-face) 23277 23278 (face font-lock-string-face) 23278 23288 nil 23288 23289 (face font-lock-string-face) 23289 23332 (face font-lock-constant-face) 23332 23333 (face font-lock-string-face) 23333 23343 nil 23343 23344 (face font-lock-string-face) 23344 23368 (face font-lock-constant-face) 23368 23369 (face font-lock-string-face) 23369 23379 nil 23379 23380 (face font-lock-string-face) 23380 23402 (face font-lock-constant-face) 23402 23403 (face font-lock-string-face) 23403 23413 nil 23413 23414 (face font-lock-string-face) 23414 23447 (face font-lock-constant-face) 23447 23448 (face font-lock-string-face) 23448 23458 nil 23458 23459 (face font-lock-string-face) 23459 23487 (face font-lock-constant-face) 23487 23488 (face font-lock-string-face) 23488 23498 nil 23498 23499 (face font-lock-string-face) 23499 23530 (face font-lock-constant-face) 23530 23531 (face font-lock-string-face) 23531 23541 nil 23541 23542 (face font-lock-string-face) 23542 23563 (face font-lock-constant-face) 23563 23564 (face font-lock-string-face) 23564 23574 nil 23574 23575 (face font-lock-string-face) 23575 23609 (face font-lock-constant-face) 23609 23610 (face font-lock-string-face) 23610 23620 nil 23620 23621 (face font-lock-string-face) 23621 23654 (face font-lock-constant-face) 23654 23655 (face font-lock-string-face) 23655 23665 nil 23665 23666 (face font-lock-string-face) 23666 23700 (face font-lock-constant-face) 23700 23701 (face font-lock-string-face) 23701 23711 nil 23711 23712 (face font-lock-string-face) 23712 23753 (face font-lock-constant-face) 23753 23754 (face font-lock-string-face) 23754 23764 nil 23764 23765 (face font-lock-string-face) 23765 23790 (face font-lock-constant-face) 23790 23791 (face font-lock-string-face) 23791 23801 nil 23801 23802 (face font-lock-string-face) 23802 23825 (face font-lock-constant-face) 23825 23826 (face font-lock-string-face) 23826 23836 nil 23836 23837 (face font-lock-string-face) 23837 23862 (face font-lock-constant-face) 23862 23863 (face font-lock-string-face) 23863 23873 nil 23873 23874 (face font-lock-string-face) 23874 23906 (face font-lock-constant-face) 23906 23907 (face font-lock-string-face) 23907 23917 nil 23917 23918 (face font-lock-string-face) 23918 23947 (face font-lock-constant-face) 23947 23948 (face font-lock-string-face) 23948 23958 nil 23958 23959 (face font-lock-string-face) 23959 23981 (face font-lock-constant-face) 23981 23982 (face font-lock-string-face) 23982 23992 nil 23992 23993 (face font-lock-string-face) 23993 24014 (face font-lock-constant-face) 24014 24015 (face font-lock-string-face) 24015 24025 nil 24025 24026 (face font-lock-string-face) 24026 24054 (face font-lock-constant-face) 24054 24055 (face font-lock-string-face) 24055 24065 nil 24065 24066 (face font-lock-string-face) 24066 24093 (face font-lock-constant-face) 24093 24094 (face font-lock-string-face) 24094 24104 nil 24104 24105 (face font-lock-string-face) 24105 24133 (face font-lock-constant-face) 24133 24134 (face font-lock-string-face) 24134 24144 nil 24144 24145 (face font-lock-string-face) 24145 24177 (face font-lock-constant-face) 24177 24178 (face font-lock-string-face) 24178 24188 nil 24188 24189 (face font-lock-string-face) 24189 24221 (face font-lock-constant-face) 24221 24222 (face font-lock-string-face) 24222 24232 nil 24232 24233 (face font-lock-string-face) 24233 24277 (face font-lock-constant-face) 24277 24278 (face font-lock-string-face) 24278 24288 nil 24288 24289 (face font-lock-string-face) 24289 24328 (face font-lock-constant-face) 24328 24329 (face font-lock-string-face) 24329 24339 nil 24339 24340 (face font-lock-string-face) 24340 24379 (face font-lock-constant-face) 24379 24380 (face font-lock-string-face) 24380 24390 nil 24390 24391 (face font-lock-string-face) 24391 24424 (face font-lock-constant-face) 24424 24425 (face font-lock-string-face) 24425 24435 nil 24435 24436 (face font-lock-string-face) 24436 24476 (face font-lock-constant-face) 24476 24477 (face font-lock-string-face) 24477 24487 nil 24487 24488 (face font-lock-string-face) 24488 24521 (face font-lock-constant-face) 24521 24522 (face font-lock-string-face) 24522 24532 nil 24532 24533 (face font-lock-string-face) 24533 24567 (face font-lock-constant-face) 24567 24568 (face font-lock-string-face) 24568 24578 nil 24578 24579 (face font-lock-string-face) 24579 24610 (face font-lock-constant-face) 24610 24611 (face font-lock-string-face) 24611 24621 nil 24621 24622 (face font-lock-string-face) 24622 24673 (face font-lock-constant-face) 24673 24674 (face font-lock-string-face) 24674 24684 nil 24684 24685 (face font-lock-string-face) 24685 24725 (face font-lock-constant-face) 24725 24726 (face font-lock-string-face) 24726 24736 nil 24736 24737 (face font-lock-string-face) 24737 24773 (face font-lock-constant-face) 24773 24774 (face font-lock-string-face) 24774 24784 nil 24784 24785 (face font-lock-string-face) 24785 24821 (face font-lock-constant-face) 24821 24822 (face font-lock-string-face) 24822 24832 nil 24832 24833 (face font-lock-string-face) 24833 24874 (face font-lock-constant-face) 24874 24875 (face font-lock-string-face) 24875 24885 nil 24885 24886 (face font-lock-string-face) 24886 24926 (face font-lock-constant-face) 24926 24927 (face font-lock-string-face) 24927 24937 nil 24937 24938 (face font-lock-string-face) 24938 24977 (face font-lock-constant-face) 24977 24978 (face font-lock-string-face) 24978 24988 nil 24988 24989 (face font-lock-string-face) 24989 25035 (face font-lock-constant-face) 25035 25036 (face font-lock-string-face) 25036 25046 nil 25046 25047 (face font-lock-string-face) 25047 25070 (face font-lock-constant-face) 25070 25071 (face font-lock-string-face) 25071 25081 nil 25081 25082 (face font-lock-string-face) 25082 25104 (face font-lock-constant-face) 25104 25105 (face font-lock-string-face) 25105 25115 nil 25115 25116 (face font-lock-string-face) 25116 25152 (face font-lock-constant-face) 25152 25153 (face font-lock-string-face) 25153 25163 nil 25163 25164 (face font-lock-string-face) 25164 25210 (face font-lock-constant-face) 25210 25211 (face font-lock-string-face) 25211 25221 nil 25221 25222 (face font-lock-string-face) 25222 25250 (face font-lock-constant-face) 25250 25251 (face font-lock-string-face) 25251 25268 nil 25268 25269 (face font-lock-string-face) 25269 25279 (face font-lock-keyword-face) 25279 25280 (face font-lock-string-face) 25280 25293 nil 25293 25294 (face font-lock-string-face) 25294 25319 (face font-lock-variable-name-face) 25319 25320 (face font-lock-string-face) 25320 25334 nil 25334 25335 (face font-lock-string-face) 25335 25345 (face font-lock-keyword-face) 25345 25346 (face font-lock-string-face) 25346 25363 nil 25363 25364 (face font-lock-string-face) 25364 25385 (face font-lock-variable-name-face) 25385 25386 (face font-lock-string-face) 25386 25404 nil 25404 25405 (face font-lock-string-face) 25405 25417 (face font-lock-keyword-face) 25417 25418 (face font-lock-string-face) 25418 25438 nil 25438 25439 (face font-lock-string-face) 25439 25480 (face font-lock-function-name-face) 25480 25481 (face font-lock-string-face) 25481 25550 nil 25550 25551 (face font-lock-string-face) 25551 25566 (face font-lock-variable-name-face) 25566 25567 (face font-lock-string-face) 25567 25581 nil 25581 25582 (face font-lock-string-face) 25582 25594 (face font-lock-keyword-face) 25594 25595 (face font-lock-string-face) 25595 25611 nil 25611 25612 (face font-lock-string-face) 25612 25651 (face font-lock-function-name-face) 25651 25652 (face font-lock-string-face) 25652 25688 nil 25688 25689 (face font-lock-string-face) 25689 25704 (face font-lock-variable-name-face) 25704 25705 (face font-lock-string-face) 25705 25719 nil 25719 25720 (face font-lock-string-face) 25720 25728 (face font-lock-keyword-face) 25728 25729 (face font-lock-string-face) 25729 25745 nil 25745 25746 (face font-lock-string-face) 25746 25782 (face font-lock-constant-face) 25782 25783 (face font-lock-string-face) 25783 25797 nil 25797 25798 (face font-lock-string-face) 25798 25820 (face font-lock-constant-face) 25820 25821 (face font-lock-string-face) 25821 25835 nil 25835 25836 (face font-lock-string-face) 25836 25857 (face font-lock-constant-face) 25857 25858 (face font-lock-string-face) 25858 25872 nil 25872 25873 (face font-lock-string-face) 25873 25905 (face font-lock-constant-face) 25905 25906 (face font-lock-string-face) 25906 25920 nil 25920 25921 (face font-lock-string-face) 25921 25961 (face font-lock-constant-face) 25961 25962 (face font-lock-string-face) 25962 25976 nil 25976 25977 (face font-lock-string-face) 25977 26016 (face font-lock-constant-face) 26016 26017 (face font-lock-string-face) 26017 26031 nil 26031 26032 (face font-lock-string-face) 26032 26065 (face font-lock-constant-face) 26065 26066 (face font-lock-string-face) 26066 26080 nil 26080 26081 (face font-lock-string-face) 26081 26115 (face font-lock-constant-face) 26115 26116 (face font-lock-string-face) 26116 26130 nil 26130 26131 (face font-lock-string-face) 26131 26162 (face font-lock-constant-face) 26162 26163 (face font-lock-string-face) 26163 26177 nil 26177 26178 (face font-lock-string-face) 26178 26229 (face font-lock-constant-face) 26229 26230 (face font-lock-string-face) 26230 26244 nil 26244 26245 (face font-lock-string-face) 26245 26285 (face font-lock-constant-face) 26285 26286 (face font-lock-string-face) 26286 26300 nil 26300 26301 (face font-lock-string-face) 26301 26337 (face font-lock-constant-face) 26337 26338 (face font-lock-string-face) 26338 26352 nil 26352 26353 (face font-lock-string-face) 26353 26394 (face font-lock-constant-face) 26394 26395 (face font-lock-string-face) 26395 26409 nil 26409 26410 (face font-lock-string-face) 26410 26443 (face font-lock-constant-face) 26443 26444 (face font-lock-string-face) 26444 26458 nil 26458 26459 (face font-lock-string-face) 26459 26495 (face font-lock-constant-face) 26495 26496 (face font-lock-string-face) 26496 26532 nil 26532 26533 (face font-lock-string-face) 26533 26546 (face font-lock-variable-name-face) 26546 26547 (face font-lock-string-face) 26547 26561 nil 26561 26562 (face font-lock-string-face) 26562 26572 (face font-lock-keyword-face) 26572 26573 (face font-lock-string-face) 26573 26590 nil 26590 26591 (face font-lock-string-face) 26591 26604 (face font-lock-variable-name-face) 26604 26605 (face font-lock-string-face) 26605 26623 nil 26623 26624 (face font-lock-string-face) 26624 26631 (face font-lock-keyword-face) 26631 26632 (face font-lock-string-face) 26632 26652 nil 26652 26653 (face font-lock-string-face) 26653 26688 (face font-lock-constant-face) 26688 26689 (face font-lock-string-face) 26689 26722 nil 26722 26723 (face font-lock-string-face) 26723 26730 (face font-lock-keyword-face) 26730 26731 (face font-lock-string-face) 26731 26751 nil 26751 26752 (face font-lock-string-face) 26752 26760 (face font-lock-preprocessor-face) 26760 26761 (face font-lock-string-face) 26761 26831 nil 26831 26832 (face font-lock-string-face) 26832 26873 (face font-lock-variable-name-face) 26873 26874 (face font-lock-string-face) 26874 26888 nil 26888 26889 (face font-lock-string-face) 26889 26896 (face font-lock-keyword-face) 26896 26897 (face font-lock-string-face) 26897 26913 nil 26913 26914 (face font-lock-string-face) 26914 26954 (face font-lock-constant-face) 26954 26955 (face font-lock-string-face) 26955 26991 nil 26991 26992 (face font-lock-string-face) 26992 27035 (face font-lock-variable-name-face) 27035 27036 (face font-lock-string-face) 27036 27050 nil 27050 27051 (face font-lock-string-face) 27051 27058 (face font-lock-keyword-face) 27058 27059 (face font-lock-string-face) 27059 27075 nil 27075 27076 (face font-lock-string-face) 27076 27095 (face font-lock-constant-face) 27095 27096 (face font-lock-string-face) 27096 27110 nil 27110 27111 (face font-lock-string-face) 27111 27137 (face font-lock-constant-face) 27137 27138 (face font-lock-string-face) 27138 27152 nil 27152 27153 (face font-lock-string-face) 27153 27186 (face font-lock-constant-face) 27186 27187 (face font-lock-string-face) 27187 27201 nil 27201 27202 (face font-lock-string-face) 27202 27235 (face font-lock-constant-face) 27235 27236 (face font-lock-string-face) 27236 27291 nil 27291 27292 (face font-lock-string-face) 27292 27303 (face font-lock-keyword-face) 27303 27304 (face font-lock-string-face) 27304 27306 nil 27306 27307 (face font-lock-string-face) 27307 27325 (face font-lock-function-name-face) 27325 27326 (face font-lock-string-face) 27326 27334 nil 27334 27335 (face font-lock-string-face) 27335 27339 (face font-lock-keyword-face) 27339 27340 (face font-lock-string-face) 27340 27342 nil 27342 27343 (face font-lock-string-face) 27343 27357 (face font-lock-type-face) 27357 27358 (face font-lock-string-face) 27358 27366 nil 27366 27367 (face font-lock-string-face) 27367 27379 (face font-lock-keyword-face) 27379 27380 (face font-lock-string-face) 27380 27392 nil 27392 27393 (face font-lock-string-face) 27393 27398 (face font-lock-function-name-face) 27398 27399 (face font-lock-string-face) 27399 27409 nil 27409 27410 (face font-lock-string-face) 27410 27431 (face font-lock-function-name-face) 27431 27432 (face font-lock-string-face) 27432 27442 nil 27442 27443 (face font-lock-string-face) 27443 27469 (face font-lock-function-name-face) 27469 27470 (face font-lock-string-face) 27470 27480 nil 27480 27481 (face font-lock-string-face) 27481 27507 (face font-lock-function-name-face) 27507 27508 (face font-lock-string-face) 27508 27525 nil 27525 27526 (face font-lock-string-face) 27526 27533 (face font-lock-keyword-face) 27533 27534 (face font-lock-string-face) 27534 27546 nil 27546 27547 (face font-lock-string-face) 27547 27591 (face font-lock-constant-face) 27591 27592 (face font-lock-string-face) 27592 27602 nil 27602 27603 (face font-lock-string-face) 27603 27646 (face font-lock-constant-face) 27646 27647 (face font-lock-string-face) 27647 27657 nil 27657 27658 (face font-lock-string-face) 27658 27679 (face font-lock-constant-face) 27679 27680 (face font-lock-string-face) 27680 27690 nil 27690 27691 (face font-lock-string-face) 27691 27711 (face font-lock-constant-face) 27711 27712 (face font-lock-string-face) 27712 27722 nil 27722 27723 (face font-lock-string-face) 27723 27752 (face font-lock-constant-face) 27752 27753 (face font-lock-string-face) 27753 27763 nil 27763 27764 (face font-lock-string-face) 27764 27792 (face font-lock-constant-face) 27792 27793 (face font-lock-string-face) 27793 27803 nil 27803 27804 (face font-lock-string-face) 27804 27829 (face font-lock-constant-face) 27829 27830 (face font-lock-string-face) 27830 27840 nil 27840 27841 (face font-lock-string-face) 27841 27865 (face font-lock-constant-face) 27865 27866 (face font-lock-string-face) 27866 27876 nil 27876 27877 (face font-lock-string-face) 27877 27901 (face font-lock-constant-face) 27901 27902 (face font-lock-string-face) 27902 27912 nil 27912 27913 (face font-lock-string-face) 27913 27936 (face font-lock-constant-face) 27936 27937 (face font-lock-string-face) 27937 27947 nil 27947 27948 (face font-lock-string-face) 27948 27968 (face font-lock-constant-face) 27968 27969 (face font-lock-string-face) 27969 27979 nil 27979 27980 (face font-lock-string-face) 27980 27999 (face font-lock-constant-face) 27999 28000 (face font-lock-string-face) 28000 28030 nil 28030 28031 (face font-lock-string-face) 28031 28042 (face font-lock-keyword-face) 28042 28043 (face font-lock-string-face) 28043 28045 nil 28045 28046 (face font-lock-string-face) 28046 28058 (face font-lock-function-name-face) 28058 28059 (face font-lock-string-face) 28059 28067 nil 28067 28068 (face font-lock-string-face) 28068 28072 (face font-lock-keyword-face) 28072 28073 (face font-lock-string-face) 28073 28075 nil 28075 28076 (face font-lock-string-face) 28076 28086 (face font-lock-type-face) 28086 28087 (face font-lock-string-face) 28087 28095 nil 28095 28096 (face font-lock-string-face) 28096 28108 (face font-lock-keyword-face) 28108 28109 (face font-lock-string-face) 28109 28121 nil 28121 28122 (face font-lock-string-face) 28122 28127 (face font-lock-function-name-face) 28127 28128 (face font-lock-string-face) 28128 28138 nil 28138 28139 (face font-lock-string-face) 28139 28150 (face font-lock-function-name-face) 28150 28151 (face font-lock-string-face) 28151 28161 nil 28161 28162 (face font-lock-string-face) 28162 28183 (face font-lock-function-name-face) 28183 28184 (face font-lock-string-face) 28184 28194 nil 28194 28195 (face font-lock-string-face) 28195 28216 (face font-lock-function-name-face) 28216 28217 (face font-lock-string-face) 28217 28234 nil 28234 28235 (face font-lock-string-face) 28235 28242 (face font-lock-keyword-face) 28242 28243 (face font-lock-string-face) 28243 28255 nil 28255 28256 (face font-lock-string-face) 28256 28290 (face font-lock-constant-face) 28290 28291 (face font-lock-string-face) 28291 28321 nil 28321 28322 (face font-lock-string-face) 28322 28333 (face font-lock-keyword-face) 28333 28334 (face font-lock-string-face) 28334 28336 nil 28336 28337 (face font-lock-string-face) 28337 28349 (face font-lock-function-name-face) 28349 28350 (face font-lock-string-face) 28350 28358 nil 28358 28359 (face font-lock-string-face) 28359 28363 (face font-lock-keyword-face) 28363 28364 (face font-lock-string-face) 28364 28366 nil 28366 28367 (face font-lock-string-face) 28367 28377 (face font-lock-type-face) 28377 28378 (face font-lock-string-face) 28378 28386 nil 28386 28387 (face font-lock-string-face) 28387 28394 (face font-lock-keyword-face) 28394 28395 (face font-lock-string-face) 28395 28407 nil 28407 28408 (face font-lock-string-face) 28408 28441 (face font-lock-constant-face) 28441 28442 (face font-lock-string-face) 28442 28471 nil 28471 28472 (face font-lock-string-face) 28472 28483 (face font-lock-keyword-face) 28483 28484 (face font-lock-string-face) 28484 28486 nil 28486 28487 (face font-lock-string-face) 28487 28498 (face font-lock-function-name-face) 28498 28499 (face font-lock-string-face) 28499 28507 nil 28507 28508 (face font-lock-string-face) 28508 28512 (face font-lock-keyword-face) 28512 28513 (face font-lock-string-face) 28513 28515 nil 28515 28516 (face font-lock-string-face) 28516 28526 (face font-lock-type-face) 28526 28527 (face font-lock-string-face) 28527 28535 nil 28535 28536 (face font-lock-string-face) 28536 28548 (face font-lock-keyword-face) 28548 28549 (face font-lock-string-face) 28549 28561 nil 28561 28562 (face font-lock-string-face) 28562 28567 (face font-lock-function-name-face) 28567 28568 (face font-lock-string-face) 28568 28578 nil 28578 28579 (face font-lock-string-face) 28579 28600 (face font-lock-function-name-face) 28600 28601 (face font-lock-string-face) 28601 28618 nil 28618 28619 (face font-lock-string-face) 28619 28626 (face font-lock-keyword-face) 28626 28627 (face font-lock-string-face) 28627 28639 nil 28639 28640 (face font-lock-string-face) 28640 28672 (face font-lock-constant-face) 28672 28673 (face font-lock-string-face) 28673 28698 nil 28698 28699 (face font-lock-string-face) 28699 28709 (face font-lock-keyword-face) 28709 28710 (face font-lock-string-face) 28710 28719 nil 28719 28720 (face font-lock-string-face) 28720 28729 (face font-lock-variable-name-face) 28729 28730 (face font-lock-string-face) 28730 28740 nil 28740 28741 (face font-lock-string-face) 28741 28748 (face font-lock-keyword-face) 28748 28749 (face font-lock-string-face) 28749 28773 nil 28773 28774 (face font-lock-string-face) 28774 28785 (face font-lock-keyword-face) 28785 28786 (face font-lock-string-face) 28786 28788 nil 28788 28789 (face font-lock-string-face) 28789 28799 (face font-lock-function-name-face) 28799 28800 (face font-lock-string-face) 28800 28812 nil 28812 28813 (face font-lock-string-face) 28813 28817 (face font-lock-keyword-face) 28817 28818 (face font-lock-string-face) 28818 28820 nil 28820 28821 (face font-lock-string-face) 28821 28831 (face font-lock-type-face) 28831 28832 (face font-lock-string-face) 28832 28844 nil 28844 28845 (face font-lock-string-face) 28845 28857 (face font-lock-keyword-face) 28857 28858 (face font-lock-string-face) 28858 28874 nil 28874 28875 (face font-lock-string-face) 28875 28880 (face font-lock-function-name-face) 28880 28881 (face font-lock-string-face) 28881 28895 nil 28895 28896 (face font-lock-string-face) 28896 28907 (face font-lock-function-name-face) 28907 28908 (face font-lock-string-face) 28908 28922 nil 28922 28923 (face font-lock-string-face) 28923 28944 (face font-lock-function-name-face) 28944 28945 (face font-lock-string-face) 28945 28959 nil 28959 28960 (face font-lock-string-face) 28960 29043 (face font-lock-function-name-face) 29043 29044 (face font-lock-string-face) 29044 29058 nil 29058 29059 (face font-lock-string-face) 29059 29074 (face font-lock-function-name-face) 29074 29075 (face font-lock-string-face) 29075 29100 nil 29100 29101 (face font-lock-string-face) 29101 29113 (face font-lock-keyword-face) 29113 29114 (face font-lock-string-face) 29114 29130 nil 29130 29131 (face font-lock-string-face) 29131 29133 (face font-lock-constant-face) 29133 29138 (face font-lock-variable-name-face) 29138 29163 (face font-lock-constant-face) 29163 29164 (face font-lock-string-face) 29164 29189 nil 29189 29190 (face font-lock-string-face) 29190 29197 (face font-lock-keyword-face) 29197 29198 (face font-lock-string-face) 29198 29214 nil 29214 29215 (face font-lock-string-face) 29215 29238 (face font-lock-constant-face) 29238 29239 (face font-lock-string-face) 29239 29253 nil 29253 29254 (face font-lock-string-face) 29254 29280 (face font-lock-constant-face) 29280 29281 (face font-lock-string-face) 29281 29295 nil 29295 29296 (face font-lock-string-face) 29296 29321 (face font-lock-constant-face) 29321 29322 (face font-lock-string-face) 29322 29336 nil 29336 29337 (face font-lock-string-face) 29337 29361 (face font-lock-constant-face) 29361 29362 (face font-lock-string-face) 29362 29376 nil 29376 29377 (face font-lock-string-face) 29377 29407 (face font-lock-constant-face) 29407 29408 (face font-lock-string-face) 29408 29422 nil 29422 29423 (face font-lock-string-face) 29423 29453 (face font-lock-constant-face) 29453 29454 (face font-lock-string-face) 29454 29468 nil 29468 29469 (face font-lock-string-face) 29469 29493 (face font-lock-constant-face) 29493 29494 (face font-lock-string-face) 29494 29508 nil 29508 29509 (face font-lock-string-face) 29509 29532 (face font-lock-constant-face) 29532 29533 (face font-lock-string-face) 29533 29547 nil 29547 29548 (face font-lock-string-face) 29548 29575 (face font-lock-constant-face) 29575 29576 (face font-lock-string-face) 29576 29590 nil 29590 29591 (face font-lock-string-face) 29591 29614 (face font-lock-constant-face) 29614 29615 (face font-lock-string-face) 29615 29640 nil 29640 29655 (face font-lock-string-face) 29655 29671 nil 29671 29685 (face font-lock-string-face) 29685 29703 nil 29703 29714 (face font-lock-string-face) 29714 29716 nil 29716 29719 (face font-lock-string-face) 29719 29729 nil 29729 29754 (face font-lock-comment-face) 29754 29792 nil 29792 29793 (face font-lock-string-face) 29793 29800 (face font-lock-keyword-face) 29800 29801 (face font-lock-string-face) 29801 29817 nil 29817 29818 (face font-lock-string-face) 29818 29843 (face font-lock-preprocessor-face) 29843 29844 (face font-lock-string-face) 29844 29892 nil 29892 29893 (face font-lock-string-face) 29893 29929 (face font-lock-variable-name-face) 29929 29930 (face font-lock-string-face) 29930 29940 nil 29940 29941 (face font-lock-string-face) 29941 29948 (face font-lock-keyword-face) 29948 29949 (face font-lock-string-face) 29949 29973 nil 29973 29974 (face font-lock-string-face) 29974 29985 (face font-lock-keyword-face) 29985 29986 (face font-lock-string-face) 29986 29988 nil 29988 29989 (face font-lock-string-face) 29989 30001 (face font-lock-function-name-face) 30001 30002 (face font-lock-string-face) 30002 30014 nil 30014 30015 (face font-lock-string-face) 30015 30019 (face font-lock-keyword-face) 30019 30020 (face font-lock-string-face) 30020 30022 nil 30022 30023 (face font-lock-string-face) 30023 30033 (face font-lock-type-face) 30033 30034 (face font-lock-string-face) 30034 30046 nil 30046 30047 (face font-lock-string-face) 30047 30059 (face font-lock-keyword-face) 30059 30060 (face font-lock-string-face) 30060 30076 nil 30076 30077 (face font-lock-string-face) 30077 30082 (face font-lock-function-name-face) 30082 30083 (face font-lock-string-face) 30083 30097 nil 30097 30098 (face font-lock-string-face) 30098 30109 (face font-lock-function-name-face) 30109 30110 (face font-lock-string-face) 30110 30124 nil 30124 30125 (face font-lock-string-face) 30125 30146 (face font-lock-function-name-face) 30146 30147 (face font-lock-string-face) 30147 30161 nil 30161 30162 (face font-lock-string-face) 30162 30180 (face font-lock-function-name-face) 30180 30181 (face font-lock-string-face) 30181 30206 nil 30206 30207 (face font-lock-string-face) 30207 30214 (face font-lock-keyword-face) 30214 30215 (face font-lock-string-face) 30215 30231 nil 30231 30232 (face font-lock-string-face) 30232 30266 (face font-lock-constant-face) 30266 30267 (face font-lock-string-face) 30267 30281 nil 30281 30282 (face font-lock-string-face) 30282 30321 (face font-lock-constant-face) 30321 30322 (face font-lock-string-face) 30322 30336 nil 30336 30337 (face font-lock-string-face) 30337 30375 (face font-lock-constant-face) 30375 30376 (face font-lock-string-face) 30376 30390 nil 30390 30391 (face font-lock-string-face) 30391 30430 (face font-lock-constant-face) 30430 30431 (face font-lock-string-face) 30431 30445 nil 30445 30446 (face font-lock-string-face) 30446 30484 (face font-lock-constant-face) 30484 30485 (face font-lock-string-face) 30485 30499 nil 30499 30500 (face font-lock-string-face) 30500 30533 (face font-lock-constant-face) 30533 30534 (face font-lock-string-face) 30534 30548 nil 30548 30549 (face font-lock-string-face) 30549 30581 (face font-lock-constant-face) 30581 30582 (face font-lock-string-face) 30582 30596 nil 30596 30597 (face font-lock-string-face) 30597 30626 (face font-lock-constant-face) 30626 30627 (face font-lock-string-face) 30627 30641 nil 30641 30642 (face font-lock-string-face) 30642 30670 (face font-lock-constant-face) 30670 30671 (face font-lock-string-face) 30671 30685 nil 30685 30686 (face font-lock-string-face) 30686 30714 (face font-lock-constant-face) 30714 30715 (face font-lock-string-face) 30715 30729 nil 30729 30730 (face font-lock-string-face) 30730 30757 (face font-lock-constant-face) 30757 30758 (face font-lock-string-face) 30758 30783 nil 30783 30784 (face font-lock-string-face) 30784 30794 (face font-lock-keyword-face) 30794 30795 (face font-lock-string-face) 30795 30812 nil 30812 30813 (face font-lock-string-face) 30813 30834 (face font-lock-variable-name-face) 30834 30835 (face font-lock-string-face) 30835 30853 nil 30853 30854 (face font-lock-string-face) 30854 30866 (face font-lock-keyword-face) 30866 30867 (face font-lock-string-face) 30867 30887 nil 30887 30888 (face font-lock-string-face) 30888 30917 (face font-lock-function-name-face) 30917 30918 (face font-lock-string-face) 30918 30951 nil 30951 30952 (face font-lock-string-face) 30952 30959 (face font-lock-keyword-face) 30959 30960 (face font-lock-string-face) 30960 30980 nil 30980 30981 (face font-lock-string-face) 30981 31015 (face font-lock-constant-face) 31015 31016 (face font-lock-string-face) 31016 31064 nil 31064 31065 (face font-lock-string-face) 31065 31074 (face font-lock-variable-name-face) 31074 31075 (face font-lock-string-face) 31075 31093 nil 31093 31094 (face font-lock-string-face) 31094 31106 (face font-lock-keyword-face) 31106 31107 (face font-lock-string-face) 31107 31127 nil 31127 31128 (face font-lock-string-face) 31128 31175 (face font-lock-function-name-face) 31175 31176 (face font-lock-string-face) 31176 31194 nil 31194 31195 (face font-lock-string-face) 31195 31245 (face font-lock-function-name-face) 31245 31246 (face font-lock-string-face) 31246 31279 nil 31279 31280 (face font-lock-string-face) 31280 31287 (face font-lock-keyword-face) 31287 31288 (face font-lock-string-face) 31288 31308 nil 31308 31309 (face font-lock-string-face) 31309 31341 (face font-lock-constant-face) 31341 31342 (face font-lock-string-face) 31342 31423 nil 31423 31424 (face font-lock-string-face) 31424 31462 (face font-lock-variable-name-face) 31462 31463 (face font-lock-string-face) 31463 31473 nil 31473 31474 (face font-lock-string-face) 31474 31481 (face font-lock-keyword-face) 31481 31482 (face font-lock-string-face) 31482 31506 nil 31506 31507 (face font-lock-string-face) 31507 31518 (face font-lock-keyword-face) 31518 31519 (face font-lock-string-face) 31519 31521 nil 31521 31522 (face font-lock-string-face) 31522 31539 (face font-lock-function-name-face) 31539 31540 (face font-lock-string-face) 31540 31552 nil 31552 31553 (face font-lock-string-face) 31553 31557 (face font-lock-keyword-face) 31557 31558 (face font-lock-string-face) 31558 31560 nil 31560 31561 (face font-lock-string-face) 31561 31571 (face font-lock-type-face) 31571 31572 (face font-lock-string-face) 31572 31584 nil 31584 31585 (face font-lock-string-face) 31585 31597 (face font-lock-keyword-face) 31597 31598 (face font-lock-string-face) 31598 31614 nil 31614 31615 (face font-lock-string-face) 31615 31636 (face font-lock-function-name-face) 31636 31637 (face font-lock-string-face) 31637 31651 nil 31651 31652 (face font-lock-string-face) 31652 31670 (face font-lock-function-name-face) 31670 31671 (face font-lock-string-face) 31671 31696 nil 31696 31697 (face font-lock-string-face) 31697 31706 (face font-lock-keyword-face) 31706 31707 (face font-lock-string-face) 31707 31723 nil 31723 31724 (face font-lock-string-face) 31724 31728 (face font-lock-constant-face) 31728 31729 (face font-lock-string-face) 31729 31743 nil 31743 31744 (face font-lock-string-face) 31744 31748 (face font-lock-constant-face) 31748 31749 (face font-lock-string-face) 31749 31774 nil 31774 31775 (face font-lock-string-face) 31775 31782 (face font-lock-keyword-face) 31782 31783 (face font-lock-string-face) 31783 31799 nil 31799 31800 (face font-lock-string-face) 31800 31844 (face font-lock-constant-face) 31844 31845 (face font-lock-string-face) 31845 31893 nil 31893 31894 (face font-lock-string-face) 31894 31943 (face font-lock-variable-name-face) 31943 31944 (face font-lock-string-face) 31944 31954 nil 31954 31955 (face font-lock-string-face) 31955 31962 (face font-lock-keyword-face) 31962 31963 (face font-lock-string-face) 31963 31987 nil 31987 31988 (face font-lock-string-face) 31988 31999 (face font-lock-keyword-face) 31999 32000 (face font-lock-string-face) 32000 32002 nil 32002 32003 (face font-lock-string-face) 32003 32013 (face font-lock-function-name-face) 32013 32014 (face font-lock-string-face) 32014 32026 nil 32026 32027 (face font-lock-string-face) 32027 32031 (face font-lock-keyword-face) 32031 32032 (face font-lock-string-face) 32032 32034 nil 32034 32035 (face font-lock-string-face) 32035 32045 (face font-lock-type-face) 32045 32046 (face font-lock-string-face) 32046 32058 nil 32058 32059 (face font-lock-string-face) 32059 32071 (face font-lock-keyword-face) 32071 32072 (face font-lock-string-face) 32072 32088 nil 32088 32089 (face font-lock-string-face) 32089 32094 (face font-lock-function-name-face) 32094 32095 (face font-lock-string-face) 32095 32109 nil 32109 32110 (face font-lock-string-face) 32110 32121 (face font-lock-function-name-face) 32121 32122 (face font-lock-string-face) 32122 32136 nil 32136 32137 (face font-lock-string-face) 32137 32158 (face font-lock-function-name-face) 32158 32159 (face font-lock-string-face) 32159 32173 nil 32173 32174 (face font-lock-string-face) 32174 32192 (face font-lock-function-name-face) 32192 32193 (face font-lock-string-face) 32193 32218 nil 32218 32219 (face font-lock-string-face) 32219 32232 (face font-lock-keyword-face) 32232 32233 (face font-lock-string-face) 32233 32249 nil 32249 32250 (face font-lock-string-face) 32250 32259 (face font-lock-keyword-face) 32259 32260 (face font-lock-string-face) 32260 32278 nil 32278 32279 (face font-lock-string-face) 32279 32283 (face font-lock-constant-face) 32283 32284 (face font-lock-string-face) 32284 32300 nil 32300 32301 (face font-lock-string-face) 32301 32306 (face font-lock-constant-face) 32306 32307 (face font-lock-string-face) 32307 32323 nil 32323 32324 (face font-lock-string-face) 32324 32333 (face font-lock-constant-face) 32333 32334 (face font-lock-string-face) 32334 32350 nil 32350 32351 (face font-lock-string-face) 32351 32357 (face font-lock-constant-face) 32357 32358 (face font-lock-string-face) 32358 32398 nil 32398 32399 (face font-lock-string-face) 32399 32406 (face font-lock-keyword-face) 32406 32407 (face font-lock-string-face) 32407 32423 nil 32423 32424 (face font-lock-string-face) 32424 32462 (face font-lock-constant-face) 32462 32463 (face font-lock-string-face) 32463 32477 nil 32477 32478 (face font-lock-string-face) 32478 32515 (face font-lock-constant-face) 32515 32516 (face font-lock-string-face) 32516 32530 nil 32530 32531 (face font-lock-string-face) 32531 32568 (face font-lock-constant-face) 32568 32569 (face font-lock-string-face) 32569 32583 nil 32583 32584 (face font-lock-string-face) 32584 32620 (face font-lock-constant-face) 32620 32621 (face font-lock-string-face) 32621 32635 nil 32635 32636 (face font-lock-string-face) 32636 32666 (face font-lock-constant-face) 32666 32667 (face font-lock-string-face) 32667 32681 nil 32681 32682 (face font-lock-string-face) 32682 32720 (face font-lock-constant-face) 32720 32721 (face font-lock-string-face) 32721 32735 nil 32735 32736 (face font-lock-string-face) 32736 32773 (face font-lock-constant-face) 32773 32774 (face font-lock-string-face) 32774 32822 nil 32822 32823 (face font-lock-string-face) 32823 32838 (face font-lock-variable-name-face) 32838 32839 (face font-lock-string-face) 32839 32849 nil 32849 32850 (face font-lock-string-face) 32850 32857 (face font-lock-keyword-face) 32857 32858 (face font-lock-string-face) 32858 32882 nil 32882 32883 (face font-lock-string-face) 32883 32894 (face font-lock-keyword-face) 32894 32895 (face font-lock-string-face) 32895 32897 nil 32897 32898 (face font-lock-string-face) 32898 32912 (face font-lock-function-name-face) 32912 32913 (face font-lock-string-face) 32913 32925 nil 32925 32926 (face font-lock-string-face) 32926 32930 (face font-lock-keyword-face) 32930 32931 (face font-lock-string-face) 32931 32933 nil 32933 32934 (face font-lock-string-face) 32934 32948 (face font-lock-type-face) 32948 32949 (face font-lock-string-face) 32949 32961 nil 32961 32962 (face font-lock-string-face) 32962 32969 (face font-lock-keyword-face) 32969 32970 (face font-lock-string-face) 32970 32986 nil 32986 32987 (face font-lock-string-face) 32987 33022 (face font-lock-constant-face) 33022 33023 (face font-lock-string-face) 33023 33037 nil 33037 33038 (face font-lock-string-face) 33038 33072 (face font-lock-constant-face) 33072 33073 (face font-lock-string-face) 33073 33098 nil 33098 33099 (face font-lock-string-face) 33099 33111 (face font-lock-keyword-face) 33111 33112 (face font-lock-string-face) 33112 33128 nil 33128 33129 (face font-lock-string-face) 33129 33150 (face font-lock-function-name-face) 33150 33151 (face font-lock-string-face) 33151 33176 nil 33176 33177 (face font-lock-string-face) 33177 33189 (face font-lock-keyword-face) 33189 33190 (face font-lock-string-face) 33190 33206 nil 33206 33207 (face font-lock-string-face) 33207 33209 (face font-lock-constant-face) 33209 33232 (face font-lock-variable-name-face) 33232 33239 (face font-lock-constant-face) 33239 33240 (face font-lock-string-face) 33240 33265 nil 33265 33266 (face font-lock-string-face) 33266 33273 (face font-lock-keyword-face) 33273 33274 (face font-lock-string-face) 33274 33306 nil 33306 33307 (face font-lock-string-face) 33307 33318 (face font-lock-keyword-face) 33318 33319 (face font-lock-string-face) 33319 33321 nil 33321 33322 (face font-lock-string-face) 33322 33342 (face font-lock-function-name-face) 33342 33343 (face font-lock-string-face) 33343 33359 nil 33359 33360 (face font-lock-string-face) 33360 33366 (face font-lock-keyword-face) 33366 33367 (face font-lock-string-face) 33367 33387 nil 33387 33388 (face font-lock-string-face) 33388 33434 (face font-lock-constant-face) 33434 33435 (face font-lock-string-face) 33435 33453 nil 33453 33454 (face font-lock-string-face) 33454 33519 (face font-lock-constant-face) 33519 33520 (face font-lock-string-face) 33520 33553 nil 33553 33554 (face font-lock-string-face) 33554 33561 (face font-lock-keyword-face) 33561 33562 (face font-lock-string-face) 33562 33582 nil 33582 33583 (face font-lock-string-face) 33583 33585 (face font-lock-constant-face) 33585 33608 (face font-lock-variable-name-face) 33608 33647 (face font-lock-constant-face) 33647 33648 (face font-lock-string-face) 33648 33681 nil 33681 33682 (face font-lock-string-face) 33682 33688 (face font-lock-keyword-face) 33688 33689 (face font-lock-string-face) 33689 33709 nil 33709 33710 (face font-lock-string-face) 33710 33716 (face font-lock-constant-face) 33716 33717 (face font-lock-string-face) 33717 33735 nil 33735 33736 (face font-lock-string-face) 33736 33738 (face font-lock-constant-face) 33738 33743 (face font-lock-variable-name-face) 33743 33788 (face font-lock-constant-face) 33788 33789 (face font-lock-string-face) 33789 33807 nil 33807 33808 (face font-lock-string-face) 33808 33810 (face font-lock-constant-face) 33810 33811 (face font-lock-string-face) 33811 33829 nil 33829 33830 (face font-lock-string-face) 33830 33833 (face font-lock-constant-face) 33833 33840 (face font-lock-variable-name-face) 33840 33841 (face font-lock-constant-face) 33841 33842 (face font-lock-string-face) 33842 33860 nil 33860 33861 (face font-lock-string-face) 33861 33864 (face font-lock-constant-face) 33864 33872 (face font-lock-variable-name-face) 33872 33873 (face font-lock-constant-face) 33873 33874 (face font-lock-string-face) 33874 33952 nil 33952 33953 (face font-lock-string-face) 33953 33964 (face font-lock-keyword-face) 33964 33965 (face font-lock-string-face) 33965 33967 nil 33967 33968 (face font-lock-string-face) 33968 33978 (face font-lock-function-name-face) 33978 33979 (face font-lock-string-face) 33979 33991 nil 33991 33992 (face font-lock-string-face) 33992 33996 (face font-lock-keyword-face) 33996 33997 (face font-lock-string-face) 33997 33999 nil 33999 34000 (face font-lock-string-face) 34000 34004 (face font-lock-type-face) 34004 34005 (face font-lock-string-face) 34005 34017 nil 34017 34018 (face font-lock-string-face) 34018 34030 (face font-lock-keyword-face) 34030 34031 (face font-lock-string-face) 34031 34035 nil 34035 34036 (face font-lock-string-face) 34036 34062 (face font-lock-function-name-face) 34062 34063 (face font-lock-string-face) 34063 34077 nil 34077 34078 (face font-lock-string-face) 34078 34087 (face font-lock-keyword-face) 34087 34088 (face font-lock-string-face) 34088 34104 nil 34104 34105 (face font-lock-string-face) 34105 34117 (face font-lock-variable-name-face) 34117 34118 (face font-lock-string-face) 34118 34120 nil 34120 34121 (face font-lock-string-face) 34121 34126 (face font-lock-variable-name-face) 34126 34127 (face font-lock-string-face) 34127 34141 nil 34141 34142 (face font-lock-string-face) 34142 34153 (face font-lock-variable-name-face) 34153 34154 (face font-lock-string-face) 34154 34156 nil 34156 34157 (face font-lock-string-face) 34157 34174 (face font-lock-variable-name-face) 34174 34175 (face font-lock-string-face) 34175 34200 nil 34200 34201 (face font-lock-string-face) 34201 34209 (face font-lock-keyword-face) 34209 34210 (face font-lock-string-face) 34210 34214 nil 34214 34215 (face font-lock-string-face) 34215 34233 (face font-lock-constant-face) 34233 34234 (face font-lock-string-face) 34234 34268 nil 34268 34287 (face font-lock-comment-face) 34287 34293 nil 34293 34365 (face font-lock-comment-face) 34365 34371 nil 34371 34372 (face font-lock-string-face) 34372 34379 (face font-lock-keyword-face) 34379 34380 (face font-lock-string-face) 34380 34404 nil 34404 34405 (face font-lock-string-face) 34405 34416 (face font-lock-keyword-face) 34416 34417 (face font-lock-string-face) 34417 34419 nil 34419 34420 (face font-lock-string-face) 34420 34436 (face font-lock-function-name-face) 34436 34437 (face font-lock-string-face) 34437 34449 nil 34449 34450 (face font-lock-string-face) 34450 34454 (face font-lock-keyword-face) 34454 34455 (face font-lock-string-face) 34455 34457 nil 34457 34458 (face font-lock-string-face) 34458 34468 (face font-lock-type-face) 34468 34469 (face font-lock-string-face) 34469 34481 nil 34481 34482 (face font-lock-string-face) 34482 34494 (face font-lock-keyword-face) 34494 34495 (face font-lock-string-face) 34495 34511 nil 34511 34512 (face font-lock-string-face) 34512 34517 (face font-lock-function-name-face) 34517 34518 (face font-lock-string-face) 34518 34532 nil 34532 34533 (face font-lock-string-face) 34533 34551 (face font-lock-function-name-face) 34551 34552 (face font-lock-string-face) 34552 34566 nil 34566 34567 (face font-lock-string-face) 34567 34588 (face font-lock-function-name-face) 34588 34589 (face font-lock-string-face) 34589 34603 nil 34603 34604 (face font-lock-string-face) 34604 34630 (face font-lock-function-name-face) 34630 34631 (face font-lock-string-face) 34631 34645 nil 34645 34646 (face font-lock-string-face) 34646 34680 (face font-lock-function-name-face) 34680 34681 (face font-lock-string-face) 34681 34695 nil 34695 34696 (face font-lock-string-face) 34696 34730 (face font-lock-function-name-face) 34730 34731 (face font-lock-string-face) 34731 34745 nil 34745 34746 (face font-lock-string-face) 34746 34772 (face font-lock-function-name-face) 34772 34773 (face font-lock-string-face) 34773 34787 nil 34787 34788 (face font-lock-string-face) 34788 34827 (face font-lock-function-name-face) 34827 34828 (face font-lock-string-face) 34828 34853 nil 34853 34854 (face font-lock-string-face) 34854 34861 (face font-lock-keyword-face) 34861 34862 (face font-lock-string-face) 34862 34878 nil 34878 34879 (face font-lock-string-face) 34879 34904 (face font-lock-constant-face) 34904 34905 (face font-lock-string-face) 34905 34930 nil 34930 34931 (face font-lock-string-face) 34931 34941 (face font-lock-keyword-face) 34941 34942 (face font-lock-string-face) 34942 34959 nil 34959 34960 (face font-lock-string-face) 34960 34981 (face font-lock-variable-name-face) 34981 34982 (face font-lock-string-face) 34982 35000 nil 35000 35001 (face font-lock-string-face) 35001 35013 (face font-lock-keyword-face) 35013 35014 (face font-lock-string-face) 35014 35034 nil 35034 35077 (face font-lock-comment-face) 35077 35093 nil 35093 35123 (face font-lock-comment-face) 35123 35139 nil 35139 35164 (face font-lock-comment-face) 35164 35180 nil 35180 35194 (face font-lock-comment-face) 35194 35210 nil 35210 35211 (face font-lock-string-face) 35211 35240 (face font-lock-function-name-face) 35240 35241 (face font-lock-string-face) 35241 35274 nil 35274 35275 (face font-lock-string-face) 35275 35285 (face font-lock-keyword-face) 35285 35286 (face font-lock-string-face) 35286 35307 nil 35307 35308 (face font-lock-string-face) 35308 35329 (face font-lock-variable-name-face) 35329 35330 (face font-lock-string-face) 35330 35352 nil 35352 35353 (face font-lock-string-face) 35353 35365 (face font-lock-keyword-face) 35365 35366 (face font-lock-string-face) 35366 35390 nil 35390 35391 (face font-lock-string-face) 35391 35432 (face font-lock-function-name-face) 35432 35433 (face font-lock-string-face) 35433 35553 nil 35553 35554 (face font-lock-string-face) 35554 35565 (face font-lock-keyword-face) 35565 35566 (face font-lock-string-face) 35566 35568 nil 35568 35569 (face font-lock-string-face) 35569 35592 (face font-lock-function-name-face) 35592 35593 (face font-lock-string-face) 35593 35605 nil 35605 35606 (face font-lock-string-face) 35606 35610 (face font-lock-keyword-face) 35610 35611 (face font-lock-string-face) 35611 35613 nil 35613 35614 (face font-lock-string-face) 35614 35624 (face font-lock-type-face) 35624 35625 (face font-lock-string-face) 35625 35637 nil 35637 35638 (face font-lock-string-face) 35638 35650 (face font-lock-keyword-face) 35650 35651 (face font-lock-string-face) 35651 35667 nil 35667 35668 (face font-lock-string-face) 35668 35673 (face font-lock-function-name-face) 35673 35674 (face font-lock-string-face) 35674 35688 nil 35688 35689 (face font-lock-string-face) 35689 35707 (face font-lock-function-name-face) 35707 35708 (face font-lock-string-face) 35708 35722 nil 35722 35723 (face font-lock-string-face) 35723 35757 (face font-lock-function-name-face) 35757 35758 (face font-lock-string-face) 35758 35772 nil 35772 35773 (face font-lock-string-face) 35773 35799 (face font-lock-function-name-face) 35799 35800 (face font-lock-string-face) 35800 35814 nil 35814 35815 (face font-lock-string-face) 35815 35841 (face font-lock-function-name-face) 35841 35842 (face font-lock-string-face) 35842 35856 nil 35856 35857 (face font-lock-string-face) 35857 35896 (face font-lock-function-name-face) 35896 35897 (face font-lock-string-face) 35897 35922 nil 35922 35923 (face font-lock-string-face) 35923 35930 (face font-lock-keyword-face) 35930 35931 (face font-lock-string-face) 35931 35947 nil 35947 35948 (face font-lock-string-face) 35948 35970 (face font-lock-constant-face) 35970 35971 (face font-lock-string-face) 35971 35985 nil 35985 35986 (face font-lock-string-face) 35986 36011 (face font-lock-constant-face) 36011 36012 (face font-lock-string-face) 36012 36026 nil 36026 36027 (face font-lock-string-face) 36027 36060 (face font-lock-constant-face) 36060 36061 (face font-lock-string-face) 36061 36075 nil 36075 36076 (face font-lock-string-face) 36076 36117 (face font-lock-constant-face) 36117 36118 (face font-lock-string-face) 36118 36143 nil 36143 36144 (face font-lock-string-face) 36144 36154 (face font-lock-keyword-face) 36154 36155 (face font-lock-string-face) 36155 36172 nil 36172 36173 (face font-lock-string-face) 36173 36198 (face font-lock-variable-name-face) 36198 36199 (face font-lock-string-face) 36199 36217 nil 36217 36218 (face font-lock-string-face) 36218 36228 (face font-lock-keyword-face) 36228 36229 (face font-lock-string-face) 36229 36250 nil 36250 36251 (face font-lock-string-face) 36251 36272 (face font-lock-variable-name-face) 36272 36273 (face font-lock-string-face) 36273 36295 nil 36295 36296 (face font-lock-string-face) 36296 36308 (face font-lock-keyword-face) 36308 36309 (face font-lock-string-face) 36309 36333 nil 36333 36334 (face font-lock-string-face) 36334 36375 (face font-lock-function-name-face) 36375 36376 (face font-lock-string-face) 36376 36496 nil 36496 36497 (face font-lock-string-face) 36497 36508 (face font-lock-keyword-face) 36508 36509 (face font-lock-string-face) 36509 36511 nil 36511 36512 (face font-lock-string-face) 36512 36524 (face font-lock-function-name-face) 36524 36525 (face font-lock-string-face) 36525 36537 nil 36537 36538 (face font-lock-string-face) 36538 36542 (face font-lock-keyword-face) 36542 36543 (face font-lock-string-face) 36543 36545 nil 36545 36546 (face font-lock-string-face) 36546 36556 (face font-lock-type-face) 36556 36557 (face font-lock-string-face) 36557 36569 nil 36569 36570 (face font-lock-string-face) 36570 36582 (face font-lock-keyword-face) 36582 36583 (face font-lock-string-face) 36583 36599 nil 36599 36600 (face font-lock-string-face) 36600 36605 (face font-lock-function-name-face) 36605 36606 (face font-lock-string-face) 36606 36620 nil 36620 36621 (face font-lock-string-face) 36621 36642 (face font-lock-function-name-face) 36642 36643 (face font-lock-string-face) 36643 36657 nil 36657 36658 (face font-lock-string-face) 36658 36697 (face font-lock-function-name-face) 36697 36698 (face font-lock-string-face) 36698 36723 nil 36723 36724 (face font-lock-string-face) 36724 36731 (face font-lock-keyword-face) 36731 36732 (face font-lock-string-face) 36732 36748 nil 36748 36749 (face font-lock-string-face) 36749 36782 (face font-lock-constant-face) 36782 36783 (face font-lock-string-face) 36783 36829 nil 36829 36830 (face font-lock-string-face) 36830 36841 (face font-lock-keyword-face) 36841 36842 (face font-lock-string-face) 36842 36844 nil 36844 36845 (face font-lock-string-face) 36845 36856 (face font-lock-function-name-face) 36856 36857 (face font-lock-string-face) 36857 36869 nil 36869 36870 (face font-lock-string-face) 36870 36874 (face font-lock-keyword-face) 36874 36875 (face font-lock-string-face) 36875 36877 nil 36877 36878 (face font-lock-string-face) 36878 36888 (face font-lock-type-face) 36888 36889 (face font-lock-string-face) 36889 36901 nil 36901 36902 (face font-lock-string-face) 36902 36914 (face font-lock-keyword-face) 36914 36915 (face font-lock-string-face) 36915 36931 nil 36931 36932 (face font-lock-string-face) 36932 36937 (face font-lock-function-name-face) 36937 36938 (face font-lock-string-face) 36938 36952 nil 36952 36953 (face font-lock-string-face) 36953 36974 (face font-lock-function-name-face) 36974 36975 (face font-lock-string-face) 36975 36989 nil 36989 36990 (face font-lock-string-face) 36990 37029 (face font-lock-function-name-face) 37029 37030 (face font-lock-string-face) 37030 37055 nil 37055 37056 (face font-lock-string-face) 37056 37063 (face font-lock-keyword-face) 37063 37064 (face font-lock-string-face) 37064 37080 nil 37080 37081 (face font-lock-string-face) 37081 37113 (face font-lock-constant-face) 37113 37114 (face font-lock-string-face) 37114 37163 nil)
diff --git a/third_party/python/gyp/tools/graphviz.py b/third_party/python/gyp/tools/graphviz.py
new file mode 100755
index 0000000000..538b059da4
--- /dev/null
+++ b/third_party/python/gyp/tools/graphviz.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Using the JSON dumped by the dump-dependency-json generator,
+generate input suitable for graphviz to render a dependency graph of
+targets."""
+
+from __future__ import print_function
+
+import collections
+import json
+import sys
+
+
+def ParseTarget(target):
+ target, _, suffix = target.partition('#')
+ filename, _, target = target.partition(':')
+ return filename, target, suffix
+
+
+def LoadEdges(filename, targets):
+ """Load the edges map from the dump file, and filter it to only
+ show targets in |targets| and their depedendents."""
+
+ file = open('dump.json')
+ edges = json.load(file)
+ file.close()
+
+ # Copy out only the edges we're interested in from the full edge list.
+ target_edges = {}
+ to_visit = targets[:]
+ while to_visit:
+ src = to_visit.pop()
+ if src in target_edges:
+ continue
+ target_edges[src] = edges[src]
+ to_visit.extend(edges[src])
+
+ return target_edges
+
+
+def WriteGraph(edges):
+ """Print a graphviz graph to stdout.
+ |edges| is a map of target to a list of other targets it depends on."""
+
+ # Bucket targets by file.
+ files = collections.defaultdict(list)
+ for src, dst in edges.items():
+ build_file, target_name, toolset = ParseTarget(src)
+ files[build_file].append(src)
+
+ print('digraph D {')
+ print(' fontsize=8') # Used by subgraphs.
+ print(' node [fontsize=8]')
+
+ # Output nodes by file. We must first write out each node within
+ # its file grouping before writing out any edges that may refer
+ # to those nodes.
+ for filename, targets in files.items():
+ if len(targets) == 1:
+ # If there's only one node for this file, simplify
+ # the display by making it a box without an internal node.
+ target = targets[0]
+ build_file, target_name, toolset = ParseTarget(target)
+ print(' "%s" [shape=box, label="%s\\n%s"]' % (target, filename,
+ target_name))
+ else:
+ # Group multiple nodes together in a subgraph.
+ print(' subgraph "cluster_%s" {' % filename)
+ print(' label = "%s"' % filename)
+ for target in targets:
+ build_file, target_name, toolset = ParseTarget(target)
+ print(' "%s" [label="%s"]' % (target, target_name))
+ print(' }')
+
+ # Now that we've placed all the nodes within subgraphs, output all
+ # the edges between nodes.
+ for src, dsts in edges.items():
+ for dst in dsts:
+ print(' "%s" -> "%s"' % (src, dst))
+
+ print('}')
+
+
+def main():
+ if len(sys.argv) < 2:
+ print(__doc__, file=sys.stderr)
+ print(file=sys.stderr)
+ print('usage: %s target1 target2...' % (sys.argv[0]), file=sys.stderr)
+ return 1
+
+ edges = LoadEdges('dump.json', sys.argv[1:])
+
+ WriteGraph(edges)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/third_party/python/gyp/tools/pretty_gyp.py b/third_party/python/gyp/tools/pretty_gyp.py
new file mode 100755
index 0000000000..5060d1d9e2
--- /dev/null
+++ b/third_party/python/gyp/tools/pretty_gyp.py
@@ -0,0 +1,156 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Pretty-prints the contents of a GYP file."""
+
+from __future__ import print_function
+
+import sys
+import re
+
+
+# Regex to remove comments when we're counting braces.
+COMMENT_RE = re.compile(r'\s*#.*')
+
+# Regex to remove quoted strings when we're counting braces.
+# It takes into account quoted quotes, and makes sure that the quotes match.
+# NOTE: It does not handle quotes that span more than one line, or
+# cases where an escaped quote is preceeded by an escaped backslash.
+QUOTE_RE_STR = r'(?P<q>[\'"])(.*?)(?<![^\\][\\])(?P=q)'
+QUOTE_RE = re.compile(QUOTE_RE_STR)
+
+
+def comment_replace(matchobj):
+ return matchobj.group(1) + matchobj.group(2) + '#' * len(matchobj.group(3))
+
+
+def mask_comments(input):
+ """Mask the quoted strings so we skip braces inside quoted strings."""
+ search_re = re.compile(r'(.*?)(#)(.*)')
+ return [search_re.sub(comment_replace, line) for line in input]
+
+
+def quote_replace(matchobj):
+ return "%s%s%s%s" % (matchobj.group(1),
+ matchobj.group(2),
+ 'x'*len(matchobj.group(3)),
+ matchobj.group(2))
+
+
+def mask_quotes(input):
+ """Mask the quoted strings so we skip braces inside quoted strings."""
+ search_re = re.compile(r'(.*?)' + QUOTE_RE_STR)
+ return [search_re.sub(quote_replace, line) for line in input]
+
+
+def do_split(input, masked_input, search_re):
+ output = []
+ mask_output = []
+ for (line, masked_line) in zip(input, masked_input):
+ m = search_re.match(masked_line)
+ while m:
+ split = len(m.group(1))
+ line = line[:split] + r'\n' + line[split:]
+ masked_line = masked_line[:split] + r'\n' + masked_line[split:]
+ m = search_re.match(masked_line)
+ output.extend(line.split(r'\n'))
+ mask_output.extend(masked_line.split(r'\n'))
+ return (output, mask_output)
+
+
+def split_double_braces(input):
+ """Masks out the quotes and comments, and then splits appropriate
+ lines (lines that matche the double_*_brace re's above) before
+ indenting them below.
+
+ These are used to split lines which have multiple braces on them, so
+ that the indentation looks prettier when all laid out (e.g. closing
+ braces make a nice diagonal line).
+ """
+ double_open_brace_re = re.compile(r'(.*?[\[\{\(,])(\s*)([\[\{\(])')
+ double_close_brace_re = re.compile(r'(.*?[\]\}\)],?)(\s*)([\]\}\)])')
+
+ masked_input = mask_quotes(input)
+ masked_input = mask_comments(masked_input)
+
+ (output, mask_output) = do_split(input, masked_input, double_open_brace_re)
+ (output, mask_output) = do_split(output, mask_output, double_close_brace_re)
+
+ return output
+
+
+def count_braces(line):
+ """keeps track of the number of braces on a given line and returns the result.
+
+ It starts at zero and subtracts for closed braces, and adds for open braces.
+ """
+ open_braces = ['[', '(', '{']
+ close_braces = [']', ')', '}']
+ closing_prefix_re = re.compile(r'(.*?[^\s\]\}\)]+.*?)([\]\}\)],?)\s*$')
+ cnt = 0
+ stripline = COMMENT_RE.sub(r'', line)
+ stripline = QUOTE_RE.sub(r"''", stripline)
+ for char in stripline:
+ for brace in open_braces:
+ if char == brace:
+ cnt += 1
+ for brace in close_braces:
+ if char == brace:
+ cnt -= 1
+
+ after = False
+ if cnt > 0:
+ after = True
+
+ # This catches the special case of a closing brace having something
+ # other than just whitespace ahead of it -- we don't want to
+ # unindent that until after this line is printed so it stays with
+ # the previous indentation level.
+ if cnt < 0 and closing_prefix_re.match(stripline):
+ after = True
+ return (cnt, after)
+
+
+def prettyprint_input(lines):
+ """Does the main work of indenting the input based on the brace counts."""
+ indent = 0
+ basic_offset = 2
+ last_line = ""
+ for line in lines:
+ line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
+ if len(line) > 0:
+ brace_diff = 0
+ if not COMMENT_RE.match(line):
+ (brace_diff, after) = count_braces(line)
+ if brace_diff != 0:
+ if after:
+ print(" " * (basic_offset * indent) + line)
+ indent += brace_diff
+ else:
+ indent += brace_diff
+ print(" " * (basic_offset * indent) + line)
+ else:
+ print(" " * (basic_offset * indent) + line)
+ else:
+ print("")
+ last_line = line
+
+
+def main():
+ if len(sys.argv) > 1:
+ data = open(sys.argv[1]).read().splitlines()
+ else:
+ data = sys.stdin.read().splitlines()
+ # Split up the double braces.
+ lines = split_double_braces(data)
+
+ # Indent and print the output.
+ prettyprint_input(lines)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/third_party/python/gyp/tools/pretty_sln.py b/third_party/python/gyp/tools/pretty_sln.py
new file mode 100755
index 0000000000..12a6dadd17
--- /dev/null
+++ b/third_party/python/gyp/tools/pretty_sln.py
@@ -0,0 +1,171 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Prints the information in a sln file in a diffable way.
+
+ It first outputs each projects in alphabetical order with their
+ dependencies.
+
+ Then it outputs a possible build order.
+"""
+
+from __future__ import print_function
+
+__author__ = 'nsylvain (Nicolas Sylvain)'
+
+import os
+import re
+import sys
+import pretty_vcproj
+
+def BuildProject(project, built, projects, deps):
+ # if all dependencies are done, we can build it, otherwise we try to build the
+ # dependency.
+ # This is not infinite-recursion proof.
+ for dep in deps[project]:
+ if dep not in built:
+ BuildProject(dep, built, projects, deps)
+ print(project)
+ built.append(project)
+
+def ParseSolution(solution_file):
+ # All projects, their clsid and paths.
+ projects = dict()
+
+ # A list of dependencies associated with a project.
+ dependencies = dict()
+
+ # Regular expressions that matches the SLN format.
+ # The first line of a project definition.
+ begin_project = re.compile(r'^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
+ r'}"\) = "(.*)", "(.*)", "(.*)"$')
+ # The last line of a project definition.
+ end_project = re.compile('^EndProject$')
+ # The first line of a dependency list.
+ begin_dep = re.compile(
+ r'ProjectSection\(ProjectDependencies\) = postProject$')
+ # The last line of a dependency list.
+ end_dep = re.compile('EndProjectSection$')
+ # A line describing a dependency.
+ dep_line = re.compile(' *({.*}) = ({.*})$')
+
+ in_deps = False
+ solution = open(solution_file)
+ for line in solution:
+ results = begin_project.search(line)
+ if results:
+ # Hack to remove icu because the diff is too different.
+ if results.group(1).find('icu') != -1:
+ continue
+ # We remove "_gyp" from the names because it helps to diff them.
+ current_project = results.group(1).replace('_gyp', '')
+ projects[current_project] = [results.group(2).replace('_gyp', ''),
+ results.group(3),
+ results.group(2)]
+ dependencies[current_project] = []
+ continue
+
+ results = end_project.search(line)
+ if results:
+ current_project = None
+ continue
+
+ results = begin_dep.search(line)
+ if results:
+ in_deps = True
+ continue
+
+ results = end_dep.search(line)
+ if results:
+ in_deps = False
+ continue
+
+ results = dep_line.search(line)
+ if results and in_deps and current_project:
+ dependencies[current_project].append(results.group(1))
+ continue
+
+ # Change all dependencies clsid to name instead.
+ for project in dependencies:
+ # For each dependencies in this project
+ new_dep_array = []
+ for dep in dependencies[project]:
+ # Look for the project name matching this cldis
+ for project_info in projects:
+ if projects[project_info][1] == dep:
+ new_dep_array.append(project_info)
+ dependencies[project] = sorted(new_dep_array)
+
+ return (projects, dependencies)
+
+def PrintDependencies(projects, deps):
+ print("---------------------------------------")
+ print("Dependencies for all projects")
+ print("---------------------------------------")
+ print("-- --")
+
+ for (project, dep_list) in sorted(deps.items()):
+ print("Project : %s" % project)
+ print("Path : %s" % projects[project][0])
+ if dep_list:
+ for dep in dep_list:
+ print(" - %s" % dep)
+ print("")
+
+ print("-- --")
+
+def PrintBuildOrder(projects, deps):
+ print("---------------------------------------")
+ print("Build order ")
+ print("---------------------------------------")
+ print("-- --")
+
+ built = []
+ for (project, _) in sorted(deps.items()):
+ if project not in built:
+ BuildProject(project, built, projects, deps)
+
+ print("-- --")
+
+def PrintVCProj(projects):
+
+ for project in projects:
+ print("-------------------------------------")
+ print("-------------------------------------")
+ print(project)
+ print(project)
+ print(project)
+ print("-------------------------------------")
+ print("-------------------------------------")
+
+ project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]),
+ projects[project][2]))
+
+ pretty = pretty_vcproj
+ argv = [ '',
+ project_path,
+ '$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]),
+ ]
+ argv.extend(sys.argv[3:])
+ pretty.main(argv)
+
+def main():
+ # check if we have exactly 1 parameter.
+ if len(sys.argv) < 2:
+ print('Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0])
+ return 1
+
+ (projects, deps) = ParseSolution(sys.argv[1])
+ PrintDependencies(projects, deps)
+ PrintBuildOrder(projects, deps)
+
+ if '--recursive' in sys.argv:
+ PrintVCProj(projects)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/third_party/python/gyp/tools/pretty_vcproj.py b/third_party/python/gyp/tools/pretty_vcproj.py
new file mode 100755
index 0000000000..4454d9b2b9
--- /dev/null
+++ b/third_party/python/gyp/tools/pretty_vcproj.py
@@ -0,0 +1,337 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Make the format of a vcproj really pretty.
+
+ This script normalize and sort an xml. It also fetches all the properties
+ inside linked vsprops and include them explicitly in the vcproj.
+
+ It outputs the resulting xml to stdout.
+"""
+
+from __future__ import print_function
+
+__author__ = 'nsylvain (Nicolas Sylvain)'
+
+import os
+import sys
+
+from xml.dom.minidom import parse
+from xml.dom.minidom import Node
+
+try:
+ # cmp was removed in python3.
+ cmp
+except NameError:
+ def cmp(a, b):
+ return (a > b) - (a < b)
+
+REPLACEMENTS = dict()
+ARGUMENTS = None
+
+
+class CmpTuple(object):
+ """Compare function between 2 tuple."""
+ def __call__(self, x, y):
+ return cmp(x[0], y[0])
+
+
+class CmpNode(object):
+ """Compare function between 2 xml nodes."""
+
+ def __call__(self, x, y):
+ def get_string(node):
+ node_string = "node"
+ node_string += node.nodeName
+ if node.nodeValue:
+ node_string += node.nodeValue
+
+ if node.attributes:
+ # We first sort by name, if present.
+ node_string += node.getAttribute("Name")
+
+ all_nodes = []
+ for (name, value) in node.attributes.items():
+ all_nodes.append((name, value))
+
+ all_nodes.sort(CmpTuple())
+ for (name, value) in all_nodes:
+ node_string += name
+ node_string += value
+
+ return node_string
+
+ return cmp(get_string(x), get_string(y))
+
+
+def PrettyPrintNode(node, indent=0):
+ if node.nodeType == Node.TEXT_NODE:
+ if node.data.strip():
+ print('%s%s' % (' '*indent, node.data.strip()))
+ return
+
+ if node.childNodes:
+ node.normalize()
+ # Get the number of attributes
+ attr_count = 0
+ if node.attributes:
+ attr_count = node.attributes.length
+
+ # Print the main tag
+ if attr_count == 0:
+ print('%s<%s>' % (' '*indent, node.nodeName))
+ else:
+ print('%s<%s' % (' '*indent, node.nodeName))
+
+ all_attributes = []
+ for (name, value) in node.attributes.items():
+ all_attributes.append((name, value))
+ all_attributes.sort(key=(lambda attr: attr[0]))
+ for (name, value) in all_attributes:
+ print('%s %s="%s"' % (' '*indent, name, value))
+ print('%s>' % (' '*indent))
+ if node.nodeValue:
+ print('%s %s' % (' '*indent, node.nodeValue))
+
+ for sub_node in node.childNodes:
+ PrettyPrintNode(sub_node, indent=indent+2)
+ print('%s</%s>' % (' '*indent, node.nodeName))
+
+
+def FlattenFilter(node):
+ """Returns a list of all the node and sub nodes."""
+ node_list = []
+
+ if (node.attributes and
+ node.getAttribute('Name') == '_excluded_files'):
+ # We don't add the "_excluded_files" filter.
+ return []
+
+ for current in node.childNodes:
+ if current.nodeName == 'Filter':
+ node_list.extend(FlattenFilter(current))
+ else:
+ node_list.append(current)
+
+ return node_list
+
+
+def FixFilenames(filenames, current_directory):
+ new_list = []
+ for filename in filenames:
+ if filename:
+ for key in REPLACEMENTS:
+ filename = filename.replace(key, REPLACEMENTS[key])
+ os.chdir(current_directory)
+ filename = filename.strip('"\' ')
+ if filename.startswith('$'):
+ new_list.append(filename)
+ else:
+ new_list.append(os.path.abspath(filename))
+ return new_list
+
+
+def AbsoluteNode(node):
+ """Makes all the properties we know about in this node absolute."""
+ if node.attributes:
+ for (name, value) in node.attributes.items():
+ if name in ['InheritedPropertySheets', 'RelativePath',
+ 'AdditionalIncludeDirectories',
+ 'IntermediateDirectory', 'OutputDirectory',
+ 'AdditionalLibraryDirectories']:
+ # We want to fix up these paths
+ path_list = value.split(';')
+ new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
+ node.setAttribute(name, ';'.join(new_list))
+ if not value:
+ node.removeAttribute(name)
+
+
+def CleanupVcproj(node):
+ """For each sub node, we call recursively this function."""
+ for sub_node in node.childNodes:
+ AbsoluteNode(sub_node)
+ CleanupVcproj(sub_node)
+
+ # Normalize the node, and remove all extranous whitespaces.
+ for sub_node in node.childNodes:
+ if sub_node.nodeType == Node.TEXT_NODE:
+ sub_node.data = sub_node.data.replace("\r", "")
+ sub_node.data = sub_node.data.replace("\n", "")
+ sub_node.data = sub_node.data.rstrip()
+
+ # Fix all the semicolon separated attributes to be sorted, and we also
+ # remove the dups.
+ if node.attributes:
+ for (name, value) in node.attributes.items():
+ sorted_list = sorted(value.split(';'))
+ unique_list = []
+ for i in sorted_list:
+ if not unique_list.count(i):
+ unique_list.append(i)
+ node.setAttribute(name, ';'.join(unique_list))
+ if not value:
+ node.removeAttribute(name)
+
+ if node.childNodes:
+ node.normalize()
+
+ # For each node, take a copy, and remove it from the list.
+ node_array = []
+ while node.childNodes and node.childNodes[0]:
+ # Take a copy of the node and remove it from the list.
+ current = node.childNodes[0]
+ node.removeChild(current)
+
+ # If the child is a filter, we want to append all its children
+ # to this same list.
+ if current.nodeName == 'Filter':
+ node_array.extend(FlattenFilter(current))
+ else:
+ node_array.append(current)
+
+
+ # Sort the list.
+ node_array.sort(CmpNode())
+
+ # Insert the nodes in the correct order.
+ for new_node in node_array:
+ # But don't append empty tool node.
+ if new_node.nodeName == 'Tool':
+ if new_node.attributes and new_node.attributes.length == 1:
+ # This one was empty.
+ continue
+ if new_node.nodeName == 'UserMacro':
+ continue
+ node.appendChild(new_node)
+
+
+def GetConfiguationNodes(vcproj):
+ #TODO(nsylvain): Find a better way to navigate the xml.
+ nodes = []
+ for node in vcproj.childNodes:
+ if node.nodeName == "Configurations":
+ for sub_node in node.childNodes:
+ if sub_node.nodeName == "Configuration":
+ nodes.append(sub_node)
+
+ return nodes
+
+
+def GetChildrenVsprops(filename):
+ dom = parse(filename)
+ if dom.documentElement.attributes:
+ vsprops = dom.documentElement.getAttribute('InheritedPropertySheets')
+ return FixFilenames(vsprops.split(';'), os.path.dirname(filename))
+ return []
+
+def SeekToNode(node1, child2):
+ # A text node does not have properties.
+ if child2.nodeType == Node.TEXT_NODE:
+ return None
+
+ # Get the name of the current node.
+ current_name = child2.getAttribute("Name")
+ if not current_name:
+ # There is no name. We don't know how to merge.
+ return None
+
+ # Look through all the nodes to find a match.
+ for sub_node in node1.childNodes:
+ if sub_node.nodeName == child2.nodeName:
+ name = sub_node.getAttribute("Name")
+ if name == current_name:
+ return sub_node
+
+ # No match. We give up.
+ return None
+
+
+def MergeAttributes(node1, node2):
+ # No attributes to merge?
+ if not node2.attributes:
+ return
+
+ for (name, value2) in node2.attributes.items():
+ # Don't merge the 'Name' attribute.
+ if name == 'Name':
+ continue
+ value1 = node1.getAttribute(name)
+ if value1:
+ # The attribute exist in the main node. If it's equal, we leave it
+ # untouched, otherwise we concatenate it.
+ if value1 != value2:
+ node1.setAttribute(name, ';'.join([value1, value2]))
+ else:
+ # The attribute does nto exist in the main node. We append this one.
+ node1.setAttribute(name, value2)
+
+ # If the attribute was a property sheet attributes, we remove it, since
+ # they are useless.
+ if name == 'InheritedPropertySheets':
+ node1.removeAttribute(name)
+
+
+def MergeProperties(node1, node2):
+ MergeAttributes(node1, node2)
+ for child2 in node2.childNodes:
+ child1 = SeekToNode(node1, child2)
+ if child1:
+ MergeProperties(child1, child2)
+ else:
+ node1.appendChild(child2.cloneNode(True))
+
+
+def main(argv):
+ """Main function of this vcproj prettifier."""
+ global ARGUMENTS
+ ARGUMENTS = argv
+
+ # check if we have exactly 1 parameter.
+ if len(argv) < 2:
+ print('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
+ '[key2=value2]' % argv[0])
+ return 1
+
+ # Parse the keys
+ for i in range(2, len(argv)):
+ (key, value) = argv[i].split('=')
+ REPLACEMENTS[key] = value
+
+ # Open the vcproj and parse the xml.
+ dom = parse(argv[1])
+
+ # First thing we need to do is find the Configuration Node and merge them
+ # with the vsprops they include.
+ for configuration_node in GetConfiguationNodes(dom.documentElement):
+ # Get the property sheets associated with this configuration.
+ vsprops = configuration_node.getAttribute('InheritedPropertySheets')
+
+ # Fix the filenames to be absolute.
+ vsprops_list = FixFilenames(vsprops.strip().split(';'),
+ os.path.dirname(argv[1]))
+
+ # Extend the list of vsprops with all vsprops contained in the current
+ # vsprops.
+ for current_vsprops in vsprops_list:
+ vsprops_list.extend(GetChildrenVsprops(current_vsprops))
+
+ # Now that we have all the vsprops, we need to merge them.
+ for current_vsprops in vsprops_list:
+ MergeProperties(configuration_node,
+ parse(current_vsprops).documentElement)
+
+ # Now that everything is merged, we need to cleanup the xml.
+ CleanupVcproj(dom.documentElement)
+
+ # Finally, we use the prett xml function to print the vcproj back to the
+ # user.
+ PrettyPrintNode(dom.documentElement)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/third_party/python/importlib_metadata/.gitlab-ci.yml b/third_party/python/importlib_metadata/.gitlab-ci.yml
new file mode 100644
index 0000000000..a8d1bd28f5
--- /dev/null
+++ b/third_party/python/importlib_metadata/.gitlab-ci.yml
@@ -0,0 +1,50 @@
+image: quay.io/python-devs/ci-image
+
+stages:
+ - test
+ - qa
+ - docs
+ - codecov
+ - deploy
+
+qa:
+ script:
+ - tox -e qa
+
+tests:
+ script:
+ - tox -e py27,py35,py36,py37,py38
+
+coverage:
+ script:
+ - tox -e py27-cov,py35-cov,py36-cov,py37-cov,py38-cov
+ artifacts:
+ paths:
+ - coverage.xml
+
+benchmark:
+ script:
+ - tox -e perf
+
+diffcov:
+ script:
+ - tox -e py27-diffcov,py35-diffcov,py36-diffcov,py37-diffcov,py38-diffcov
+
+docs:
+ script:
+ - tox -e docs
+
+codecov:
+ stage: codecov
+ dependencies:
+ - coverage
+ script:
+ - codecov
+ when: on_success
+
+release:
+ stage: deploy
+ only:
+ - /^v\d+\.\d+(\.\d+)?([abc]\d*)?$/
+ script:
+ - tox -e release
diff --git a/third_party/python/importlib_metadata/.readthedocs.yml b/third_party/python/importlib_metadata/.readthedocs.yml
new file mode 100644
index 0000000000..8ae4468428
--- /dev/null
+++ b/third_party/python/importlib_metadata/.readthedocs.yml
@@ -0,0 +1,5 @@
+python:
+ version: 3
+ extra_requirements:
+ - docs
+ pip_install: true
diff --git a/third_party/python/importlib_metadata/LICENSE b/third_party/python/importlib_metadata/LICENSE
new file mode 100644
index 0000000000..be7e092b0b
--- /dev/null
+++ b/third_party/python/importlib_metadata/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2017-2019 Jason R. Coombs, Barry Warsaw
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/third_party/python/importlib_metadata/MANIFEST.in b/third_party/python/importlib_metadata/MANIFEST.in
new file mode 100644
index 0000000000..3fcf6d633a
--- /dev/null
+++ b/third_party/python/importlib_metadata/MANIFEST.in
@@ -0,0 +1,5 @@
+include *.py MANIFEST.in LICENSE README.rst
+global-include *.txt *.rst *.ini *.cfg *.toml *.whl *.egg
+exclude .gitignore
+prune build
+prune .tox
diff --git a/third_party/python/importlib_metadata/PKG-INFO b/third_party/python/importlib_metadata/PKG-INFO
new file mode 100644
index 0000000000..c5048ce87d
--- /dev/null
+++ b/third_party/python/importlib_metadata/PKG-INFO
@@ -0,0 +1,55 @@
+Metadata-Version: 2.1
+Name: importlib_metadata
+Version: 1.7.0
+Summary: Read metadata from Python packages
+Home-page: http://importlib-metadata.readthedocs.io/
+Author: Barry Warsaw
+Author-email: barry@python.org
+License: Apache Software License
+Description: =========================
+ ``importlib_metadata``
+ =========================
+
+ ``importlib_metadata`` is a library to access the metadata for a Python
+ package. It is intended to be ported to Python 3.8.
+
+
+ Usage
+ =====
+
+ See the `online documentation <https://importlib_metadata.readthedocs.io/>`_
+ for usage details.
+
+ `Finder authors
+ <https://docs.python.org/3/reference/import.html#finders-and-loaders>`_ can
+ also add support for custom package installers. See the above documentation
+ for details.
+
+
+ Caveats
+ =======
+
+ This project primarily supports third-party packages installed by PyPA
+ tools (or other conforming packages). It does not support:
+
+ - Packages in the stdlib.
+ - Packages installed without metadata.
+
+ Project details
+ ===============
+
+ * Project home: https://gitlab.com/python-devs/importlib_metadata
+ * Report bugs at: https://gitlab.com/python-devs/importlib_metadata/issues
+ * Code hosting: https://gitlab.com/python-devs/importlib_metadata.git
+ * Documentation: http://importlib_metadata.readthedocs.io/
+
+Platform: UNKNOWN
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 2
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7
+Provides-Extra: testing
+Provides-Extra: docs
diff --git a/third_party/python/importlib_metadata/README.rst b/third_party/python/importlib_metadata/README.rst
new file mode 100644
index 0000000000..2bdd4b8a61
--- /dev/null
+++ b/third_party/python/importlib_metadata/README.rst
@@ -0,0 +1,36 @@
+=========================
+ ``importlib_metadata``
+=========================
+
+``importlib_metadata`` is a library to access the metadata for a Python
+package. It is intended to be ported to Python 3.8.
+
+
+Usage
+=====
+
+See the `online documentation <https://importlib_metadata.readthedocs.io/>`_
+for usage details.
+
+`Finder authors
+<https://docs.python.org/3/reference/import.html#finders-and-loaders>`_ can
+also add support for custom package installers. See the above documentation
+for details.
+
+
+Caveats
+=======
+
+This project primarily supports third-party packages installed by PyPA
+tools (or other conforming packages). It does not support:
+
+- Packages in the stdlib.
+- Packages installed without metadata.
+
+Project details
+===============
+
+ * Project home: https://gitlab.com/python-devs/importlib_metadata
+ * Report bugs at: https://gitlab.com/python-devs/importlib_metadata/issues
+ * Code hosting: https://gitlab.com/python-devs/importlib_metadata.git
+ * Documentation: http://importlib_metadata.readthedocs.io/
diff --git a/third_party/python/importlib_metadata/codecov.yml b/third_party/python/importlib_metadata/codecov.yml
new file mode 100644
index 0000000000..66c7f4bd19
--- /dev/null
+++ b/third_party/python/importlib_metadata/codecov.yml
@@ -0,0 +1,2 @@
+codecov:
+ token: 5eb1bc45-1b7f-43e6-8bc1-f2b02833dba9
diff --git a/third_party/python/importlib_metadata/coverage.ini b/third_party/python/importlib_metadata/coverage.ini
new file mode 100644
index 0000000000..b4d3102f42
--- /dev/null
+++ b/third_party/python/importlib_metadata/coverage.ini
@@ -0,0 +1,24 @@
+[run]
+branch = true
+parallel = true
+omit =
+ setup*
+ .tox/*/lib/python*
+ */tests/*.py
+ */testing/*.py
+ /usr/local/*
+ */mod.py
+plugins =
+ coverplug
+
+[report]
+exclude_lines =
+ pragma: nocover
+ raise NotImplementedError
+ raise AssertionError
+ assert\s
+ nocoverpy${PYV}
+
+[paths]
+source =
+ importlib_metadata
diff --git a/third_party/python/importlib_metadata/coverplug.py b/third_party/python/importlib_metadata/coverplug.py
new file mode 100644
index 0000000000..0b0c7cb549
--- /dev/null
+++ b/third_party/python/importlib_metadata/coverplug.py
@@ -0,0 +1,21 @@
+"""Coverage plugin to add exclude lines based on the Python version."""
+
+import sys
+
+from coverage import CoveragePlugin
+
+
+class MyConfigPlugin(CoveragePlugin):
+ def configure(self, config):
+ opt_name = 'report:exclude_lines'
+ exclude_lines = config.get_option(opt_name)
+ # Python >= 3.6 has os.PathLike.
+ if sys.version_info >= (3, 6):
+ exclude_lines.append('pragma: >=36')
+ else:
+ exclude_lines.append('pragma: <=35')
+ config.set_option(opt_name, exclude_lines)
+
+
+def coverage_init(reg, options):
+ reg.add_configurer(MyConfigPlugin())
diff --git a/third_party/python/importlib_metadata/importlib_metadata.egg-info/PKG-INFO b/third_party/python/importlib_metadata/importlib_metadata.egg-info/PKG-INFO
new file mode 100644
index 0000000000..c7f923c23a
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata.egg-info/PKG-INFO
@@ -0,0 +1,55 @@
+Metadata-Version: 2.1
+Name: importlib-metadata
+Version: 1.7.0
+Summary: Read metadata from Python packages
+Home-page: http://importlib-metadata.readthedocs.io/
+Author: Barry Warsaw
+Author-email: barry@python.org
+License: Apache Software License
+Description: =========================
+ ``importlib_metadata``
+ =========================
+
+ ``importlib_metadata`` is a library to access the metadata for a Python
+ package. It is intended to be ported to Python 3.8.
+
+
+ Usage
+ =====
+
+ See the `online documentation <https://importlib_metadata.readthedocs.io/>`_
+ for usage details.
+
+ `Finder authors
+ <https://docs.python.org/3/reference/import.html#finders-and-loaders>`_ can
+ also add support for custom package installers. See the above documentation
+ for details.
+
+
+ Caveats
+ =======
+
+ This project primarily supports third-party packages installed by PyPA
+ tools (or other conforming packages). It does not support:
+
+ - Packages in the stdlib.
+ - Packages installed without metadata.
+
+ Project details
+ ===============
+
+ * Project home: https://gitlab.com/python-devs/importlib_metadata
+ * Report bugs at: https://gitlab.com/python-devs/importlib_metadata/issues
+ * Code hosting: https://gitlab.com/python-devs/importlib_metadata.git
+ * Documentation: http://importlib_metadata.readthedocs.io/
+
+Platform: UNKNOWN
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 2
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7
+Provides-Extra: testing
+Provides-Extra: docs
diff --git a/third_party/python/importlib_metadata/importlib_metadata.egg-info/SOURCES.txt b/third_party/python/importlib_metadata/importlib_metadata.egg-info/SOURCES.txt
new file mode 100644
index 0000000000..ab3ccdda35
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata.egg-info/SOURCES.txt
@@ -0,0 +1,35 @@
+.gitlab-ci.yml
+.readthedocs.yml
+LICENSE
+MANIFEST.in
+README.rst
+codecov.yml
+coverage.ini
+coverplug.py
+pyproject.toml
+setup.cfg
+setup.py
+tox.ini
+importlib_metadata/__init__.py
+importlib_metadata/_compat.py
+importlib_metadata.egg-info/PKG-INFO
+importlib_metadata.egg-info/SOURCES.txt
+importlib_metadata.egg-info/dependency_links.txt
+importlib_metadata.egg-info/requires.txt
+importlib_metadata.egg-info/top_level.txt
+importlib_metadata/docs/__init__.py
+importlib_metadata/docs/changelog.rst
+importlib_metadata/docs/conf.py
+importlib_metadata/docs/index.rst
+importlib_metadata/docs/using.rst
+importlib_metadata/tests/__init__.py
+importlib_metadata/tests/fixtures.py
+importlib_metadata/tests/test_api.py
+importlib_metadata/tests/test_integration.py
+importlib_metadata/tests/test_main.py
+importlib_metadata/tests/test_zip.py
+importlib_metadata/tests/data/__init__.py
+importlib_metadata/tests/data/example-21.12-py3-none-any.whl
+importlib_metadata/tests/data/example-21.12-py3.6.egg
+prepare/example/setup.py
+prepare/example/example/__init__.py \ No newline at end of file
diff --git a/third_party/python/importlib_metadata/importlib_metadata.egg-info/dependency_links.txt b/third_party/python/importlib_metadata/importlib_metadata.egg-info/dependency_links.txt
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/third_party/python/importlib_metadata/importlib_metadata.egg-info/requires.txt b/third_party/python/importlib_metadata/importlib_metadata.egg-info/requires.txt
new file mode 100644
index 0000000000..e0129a6686
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata.egg-info/requires.txt
@@ -0,0 +1,17 @@
+zipp>=0.5
+
+[:python_version < "3"]
+pathlib2
+contextlib2
+configparser>=3.5
+
+[docs]
+sphinx
+rst.linker
+
+[testing]
+packaging
+pep517
+
+[testing:python_version < "3.9"]
+importlib_resources>=1.3
diff --git a/third_party/python/importlib_metadata/importlib_metadata.egg-info/top_level.txt b/third_party/python/importlib_metadata/importlib_metadata.egg-info/top_level.txt
new file mode 100644
index 0000000000..bbb07547a1
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata.egg-info/top_level.txt
@@ -0,0 +1 @@
+importlib_metadata
diff --git a/third_party/python/importlib_metadata/importlib_metadata/__init__.py b/third_party/python/importlib_metadata/importlib_metadata/__init__.py
new file mode 100644
index 0000000000..b01e7e36da
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata/__init__.py
@@ -0,0 +1,623 @@
+from __future__ import unicode_literals, absolute_import
+
+import io
+import os
+import re
+import abc
+import csv
+import sys
+import zipp
+import operator
+import functools
+import itertools
+import posixpath
+import collections
+
+from ._compat import (
+ install,
+ NullFinder,
+ ConfigParser,
+ suppress,
+ map,
+ FileNotFoundError,
+ IsADirectoryError,
+ NotADirectoryError,
+ PermissionError,
+ pathlib,
+ ModuleNotFoundError,
+ MetaPathFinder,
+ email_message_from_string,
+ PyPy_repr,
+ unique_ordered,
+ str,
+ )
+from importlib import import_module
+from itertools import starmap
+
+
+__metaclass__ = type
+
+
+__all__ = [
+ 'Distribution',
+ 'DistributionFinder',
+ 'PackageNotFoundError',
+ 'distribution',
+ 'distributions',
+ 'entry_points',
+ 'files',
+ 'metadata',
+ 'requires',
+ 'version',
+ ]
+
+
+class PackageNotFoundError(ModuleNotFoundError):
+ """The package was not found."""
+
+ def __str__(self):
+ tmpl = "No package metadata was found for {self.name}"
+ return tmpl.format(**locals())
+
+ @property
+ def name(self):
+ name, = self.args
+ return name
+
+
+class EntryPoint(
+ PyPy_repr,
+ collections.namedtuple('EntryPointBase', 'name value group')):
+ """An entry point as defined by Python packaging conventions.
+
+ See `the packaging docs on entry points
+ <https://packaging.python.org/specifications/entry-points/>`_
+ for more information.
+ """
+
+ pattern = re.compile(
+ r'(?P<module>[\w.]+)\s*'
+ r'(:\s*(?P<attr>[\w.]+))?\s*'
+ r'(?P<extras>\[.*\])?\s*$'
+ )
+ """
+ A regular expression describing the syntax for an entry point,
+ which might look like:
+
+ - module
+ - package.module
+ - package.module:attribute
+ - package.module:object.attribute
+ - package.module:attr [extra1, extra2]
+
+ Other combinations are possible as well.
+
+ The expression is lenient about whitespace around the ':',
+ following the attr, and following any extras.
+ """
+
+ def load(self):
+ """Load the entry point from its definition. If only a module
+ is indicated by the value, return that module. Otherwise,
+ return the named object.
+ """
+ match = self.pattern.match(self.value)
+ module = import_module(match.group('module'))
+ attrs = filter(None, (match.group('attr') or '').split('.'))
+ return functools.reduce(getattr, attrs, module)
+
+ @property
+ def module(self):
+ match = self.pattern.match(self.value)
+ return match.group('module')
+
+ @property
+ def attr(self):
+ match = self.pattern.match(self.value)
+ return match.group('attr')
+
+ @property
+ def extras(self):
+ match = self.pattern.match(self.value)
+ return list(re.finditer(r'\w+', match.group('extras') or ''))
+
+ @classmethod
+ def _from_config(cls, config):
+ return [
+ cls(name, value, group)
+ for group in config.sections()
+ for name, value in config.items(group)
+ ]
+
+ @classmethod
+ def _from_text(cls, text):
+ config = ConfigParser(delimiters='=')
+ # case sensitive: https://stackoverflow.com/q/1611799/812183
+ config.optionxform = str
+ try:
+ config.read_string(text)
+ except AttributeError: # pragma: nocover
+ # Python 2 has no read_string
+ config.readfp(io.StringIO(text))
+ return EntryPoint._from_config(config)
+
+ def __iter__(self):
+ """
+ Supply iter so one may construct dicts of EntryPoints easily.
+ """
+ return iter((self.name, self))
+
+ def __reduce__(self):
+ return (
+ self.__class__,
+ (self.name, self.value, self.group),
+ )
+
+
+class PackagePath(pathlib.PurePosixPath):
+ """A reference to a path in a package"""
+
+ def read_text(self, encoding='utf-8'):
+ with self.locate().open(encoding=encoding) as stream:
+ return stream.read()
+
+ def read_binary(self):
+ with self.locate().open('rb') as stream:
+ return stream.read()
+
+ def locate(self):
+ """Return a path-like object for this path"""
+ return self.dist.locate_file(self)
+
+
+class FileHash:
+ def __init__(self, spec):
+ self.mode, _, self.value = spec.partition('=')
+
+ def __repr__(self):
+ return '<FileHash mode: {} value: {}>'.format(self.mode, self.value)
+
+
+class Distribution:
+ """A Python distribution package."""
+
+ @abc.abstractmethod
+ def read_text(self, filename):
+ """Attempt to load metadata file given by the name.
+
+ :param filename: The name of the file in the distribution info.
+ :return: The text if found, otherwise None.
+ """
+
+ @abc.abstractmethod
+ def locate_file(self, path):
+ """
+ Given a path to a file in this distribution, return a path
+ to it.
+ """
+
+ @classmethod
+ def from_name(cls, name):
+ """Return the Distribution for the given package name.
+
+ :param name: The name of the distribution package to search for.
+ :return: The Distribution instance (or subclass thereof) for the named
+ package, if found.
+ :raises PackageNotFoundError: When the named package's distribution
+ metadata cannot be found.
+ """
+ for resolver in cls._discover_resolvers():
+ dists = resolver(DistributionFinder.Context(name=name))
+ dist = next(iter(dists), None)
+ if dist is not None:
+ return dist
+ else:
+ raise PackageNotFoundError(name)
+
+ @classmethod
+ def discover(cls, **kwargs):
+ """Return an iterable of Distribution objects for all packages.
+
+ Pass a ``context`` or pass keyword arguments for constructing
+ a context.
+
+ :context: A ``DistributionFinder.Context`` object.
+ :return: Iterable of Distribution objects for all packages.
+ """
+ context = kwargs.pop('context', None)
+ if context and kwargs:
+ raise ValueError("cannot accept context and kwargs")
+ context = context or DistributionFinder.Context(**kwargs)
+ return itertools.chain.from_iterable(
+ resolver(context)
+ for resolver in cls._discover_resolvers()
+ )
+
+ @staticmethod
+ def at(path):
+ """Return a Distribution for the indicated metadata path
+
+ :param path: a string or path-like object
+ :return: a concrete Distribution instance for the path
+ """
+ return PathDistribution(pathlib.Path(path))
+
+ @staticmethod
+ def _discover_resolvers():
+ """Search the meta_path for resolvers."""
+ declared = (
+ getattr(finder, 'find_distributions', None)
+ for finder in sys.meta_path
+ )
+ return filter(None, declared)
+
+ @classmethod
+ def _local(cls, root='.'):
+ from pep517 import build, meta
+ system = build.compat_system(root)
+ builder = functools.partial(
+ meta.build,
+ source_dir=root,
+ system=system,
+ )
+ return PathDistribution(zipp.Path(meta.build_as_zip(builder)))
+
+ @property
+ def metadata(self):
+ """Return the parsed metadata for this Distribution.
+
+ The returned object will have keys that name the various bits of
+ metadata. See PEP 566 for details.
+ """
+ text = (
+ self.read_text('METADATA')
+ or self.read_text('PKG-INFO')
+ # This last clause is here to support old egg-info files. Its
+ # effect is to just end up using the PathDistribution's self._path
+ # (which points to the egg-info file) attribute unchanged.
+ or self.read_text('')
+ )
+ return email_message_from_string(text)
+
+ @property
+ def version(self):
+ """Return the 'Version' metadata for the distribution package."""
+ return self.metadata['Version']
+
+ @property
+ def entry_points(self):
+ return EntryPoint._from_text(self.read_text('entry_points.txt'))
+
+ @property
+ def files(self):
+ """Files in this distribution.
+
+ :return: List of PackagePath for this distribution or None
+
+ Result is `None` if the metadata file that enumerates files
+ (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
+ missing.
+ Result may be empty if the metadata exists but is empty.
+ """
+ file_lines = self._read_files_distinfo() or self._read_files_egginfo()
+
+ def make_file(name, hash=None, size_str=None):
+ result = PackagePath(name)
+ result.hash = FileHash(hash) if hash else None
+ result.size = int(size_str) if size_str else None
+ result.dist = self
+ return result
+
+ return file_lines and list(starmap(make_file, csv.reader(file_lines)))
+
+ def _read_files_distinfo(self):
+ """
+ Read the lines of RECORD
+ """
+ text = self.read_text('RECORD')
+ return text and text.splitlines()
+
+ def _read_files_egginfo(self):
+ """
+ SOURCES.txt might contain literal commas, so wrap each line
+ in quotes.
+ """
+ text = self.read_text('SOURCES.txt')
+ return text and map('"{}"'.format, text.splitlines())
+
+ @property
+ def requires(self):
+ """Generated requirements specified for this Distribution"""
+ reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
+ return reqs and list(reqs)
+
+ def _read_dist_info_reqs(self):
+ return self.metadata.get_all('Requires-Dist')
+
+ def _read_egg_info_reqs(self):
+ source = self.read_text('requires.txt')
+ return source and self._deps_from_requires_text(source)
+
+ @classmethod
+ def _deps_from_requires_text(cls, source):
+ section_pairs = cls._read_sections(source.splitlines())
+ sections = {
+ section: list(map(operator.itemgetter('line'), results))
+ for section, results in
+ itertools.groupby(section_pairs, operator.itemgetter('section'))
+ }
+ return cls._convert_egg_info_reqs_to_simple_reqs(sections)
+
+ @staticmethod
+ def _read_sections(lines):
+ section = None
+ for line in filter(None, lines):
+ section_match = re.match(r'\[(.*)\]$', line)
+ if section_match:
+ section = section_match.group(1)
+ continue
+ yield locals()
+
+ @staticmethod
+ def _convert_egg_info_reqs_to_simple_reqs(sections):
+ """
+ Historically, setuptools would solicit and store 'extra'
+ requirements, including those with environment markers,
+ in separate sections. More modern tools expect each
+ dependency to be defined separately, with any relevant
+ extras and environment markers attached directly to that
+ requirement. This method converts the former to the
+ latter. See _test_deps_from_requires_text for an example.
+ """
+ def make_condition(name):
+ return name and 'extra == "{name}"'.format(name=name)
+
+ def parse_condition(section):
+ section = section or ''
+ extra, sep, markers = section.partition(':')
+ if extra and markers:
+ markers = '({markers})'.format(markers=markers)
+ conditions = list(filter(None, [markers, make_condition(extra)]))
+ return '; ' + ' and '.join(conditions) if conditions else ''
+
+ for section, deps in sections.items():
+ for dep in deps:
+ yield dep + parse_condition(section)
+
+
+class DistributionFinder(MetaPathFinder):
+ """
+ A MetaPathFinder capable of discovering installed distributions.
+ """
+
+ class Context:
+ """
+ Keyword arguments presented by the caller to
+ ``distributions()`` or ``Distribution.discover()``
+ to narrow the scope of a search for distributions
+ in all DistributionFinders.
+
+ Each DistributionFinder may expect any parameters
+ and should attempt to honor the canonical
+ parameters defined below when appropriate.
+ """
+
+ name = None
+ """
+ Specific name for which a distribution finder should match.
+ A name of ``None`` matches all distributions.
+ """
+
+ def __init__(self, **kwargs):
+ vars(self).update(kwargs)
+
+ @property
+ def path(self):
+ """
+ The path that a distribution finder should search.
+
+ Typically refers to Python package paths and defaults
+ to ``sys.path``.
+ """
+ return vars(self).get('path', sys.path)
+
+ @abc.abstractmethod
+ def find_distributions(self, context=Context()):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching the ``context``,
+ a DistributionFinder.Context instance.
+ """
+
+
+class FastPath:
+ """
+ Micro-optimized class for searching a path for
+ children.
+ """
+
+ def __init__(self, root):
+ self.root = str(root)
+ self.base = os.path.basename(self.root).lower()
+
+ def joinpath(self, child):
+ return pathlib.Path(self.root, child)
+
+ def children(self):
+ with suppress(Exception):
+ return os.listdir(self.root or '')
+ with suppress(Exception):
+ return self.zip_children()
+ return []
+
+ def zip_children(self):
+ zip_path = zipp.Path(self.root)
+ names = zip_path.root.namelist()
+ self.joinpath = zip_path.joinpath
+
+ return unique_ordered(
+ child.split(posixpath.sep, 1)[0]
+ for child in names
+ )
+
+ def is_egg(self, search):
+ base = self.base
+ return (
+ base == search.versionless_egg_name
+ or base.startswith(search.prefix)
+ and base.endswith('.egg'))
+
+ def search(self, name):
+ for child in self.children():
+ n_low = child.lower()
+ if (n_low in name.exact_matches
+ or n_low.startswith(name.prefix)
+ and n_low.endswith(name.suffixes)
+ # legacy case:
+ or self.is_egg(name) and n_low == 'egg-info'):
+ yield self.joinpath(child)
+
+
+class Prepared:
+ """
+ A prepared search for metadata on a possibly-named package.
+ """
+ normalized = ''
+ prefix = ''
+ suffixes = '.dist-info', '.egg-info'
+ exact_matches = [''][:0]
+ versionless_egg_name = ''
+
+ def __init__(self, name):
+ self.name = name
+ if name is None:
+ return
+ self.normalized = name.lower().replace('-', '_')
+ self.prefix = self.normalized + '-'
+ self.exact_matches = [
+ self.normalized + suffix for suffix in self.suffixes]
+ self.versionless_egg_name = self.normalized + '.egg'
+
+
+@install
+class MetadataPathFinder(NullFinder, DistributionFinder):
+ """A degenerate finder for distribution packages on the file system.
+
+ This finder supplies only a find_distributions() method for versions
+ of Python that do not have a PathFinder find_distributions().
+ """
+
+ def find_distributions(self, context=DistributionFinder.Context()):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching ``context.name``
+ (or all names if ``None`` indicated) along the paths in the list
+ of directories ``context.path``.
+ """
+ found = self._search_paths(context.name, context.path)
+ return map(PathDistribution, found)
+
+ @classmethod
+ def _search_paths(cls, name, paths):
+ """Find metadata directories in paths heuristically."""
+ return itertools.chain.from_iterable(
+ path.search(Prepared(name))
+ for path in map(FastPath, paths)
+ )
+
+
+class PathDistribution(Distribution):
+ def __init__(self, path):
+ """Construct a distribution from a path to the metadata directory.
+
+ :param path: A pathlib.Path or similar object supporting
+ .joinpath(), __div__, .parent, and .read_text().
+ """
+ self._path = path
+
+ def read_text(self, filename):
+ with suppress(FileNotFoundError, IsADirectoryError, KeyError,
+ NotADirectoryError, PermissionError):
+ return self._path.joinpath(filename).read_text(encoding='utf-8')
+ read_text.__doc__ = Distribution.read_text.__doc__
+
+ def locate_file(self, path):
+ return self._path.parent / path
+
+
+def distribution(distribution_name):
+ """Get the ``Distribution`` instance for the named package.
+
+ :param distribution_name: The name of the distribution package as a string.
+ :return: A ``Distribution`` instance (or subclass thereof).
+ """
+ return Distribution.from_name(distribution_name)
+
+
+def distributions(**kwargs):
+ """Get all ``Distribution`` instances in the current environment.
+
+ :return: An iterable of ``Distribution`` instances.
+ """
+ return Distribution.discover(**kwargs)
+
+
+def metadata(distribution_name):
+ """Get the metadata for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: An email.Message containing the parsed metadata.
+ """
+ return Distribution.from_name(distribution_name).metadata
+
+
+def version(distribution_name):
+ """Get the version string for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: The version string for the package as defined in the package's
+ "Version" metadata key.
+ """
+ return distribution(distribution_name).version
+
+
+def entry_points():
+ """Return EntryPoint objects for all installed packages.
+
+ :return: EntryPoint objects for all installed packages.
+ """
+ eps = itertools.chain.from_iterable(
+ dist.entry_points for dist in distributions())
+ by_group = operator.attrgetter('group')
+ ordered = sorted(eps, key=by_group)
+ grouped = itertools.groupby(ordered, by_group)
+ return {
+ group: tuple(eps)
+ for group, eps in grouped
+ }
+
+
+def files(distribution_name):
+ """Return a list of files for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: List of files composing the distribution.
+ """
+ return distribution(distribution_name).files
+
+
+def requires(distribution_name):
+ """
+ Return a list of requirements for the named package.
+
+ :return: An iterator of requirements, suitable for
+ packaging.requirement.Requirement.
+ """
+ return distribution(distribution_name).requires
+
+
+__version__ = version(__name__)
diff --git a/third_party/python/importlib_metadata/importlib_metadata/_compat.py b/third_party/python/importlib_metadata/importlib_metadata/_compat.py
new file mode 100644
index 0000000000..303d4a22e8
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata/_compat.py
@@ -0,0 +1,152 @@
+from __future__ import absolute_import, unicode_literals
+
+import io
+import abc
+import sys
+import email
+
+
+if sys.version_info > (3,): # pragma: nocover
+ import builtins
+ from configparser import ConfigParser
+ import contextlib
+ FileNotFoundError = builtins.FileNotFoundError
+ IsADirectoryError = builtins.IsADirectoryError
+ NotADirectoryError = builtins.NotADirectoryError
+ PermissionError = builtins.PermissionError
+ map = builtins.map
+ from itertools import filterfalse
+else: # pragma: nocover
+ from backports.configparser import ConfigParser
+ from itertools import imap as map # type: ignore
+ from itertools import ifilterfalse as filterfalse
+ import contextlib2 as contextlib
+ FileNotFoundError = IOError, OSError
+ IsADirectoryError = IOError, OSError
+ NotADirectoryError = IOError, OSError
+ PermissionError = IOError, OSError
+
+str = type('')
+
+suppress = contextlib.suppress
+
+if sys.version_info > (3, 5): # pragma: nocover
+ import pathlib
+else: # pragma: nocover
+ import pathlib2 as pathlib
+
+try:
+ ModuleNotFoundError = builtins.FileNotFoundError
+except (NameError, AttributeError): # pragma: nocover
+ ModuleNotFoundError = ImportError # type: ignore
+
+
+if sys.version_info >= (3,): # pragma: nocover
+ from importlib.abc import MetaPathFinder
+else: # pragma: nocover
+ class MetaPathFinder(object):
+ __metaclass__ = abc.ABCMeta
+
+
+__metaclass__ = type
+__all__ = [
+ 'install', 'NullFinder', 'MetaPathFinder', 'ModuleNotFoundError',
+ 'pathlib', 'ConfigParser', 'map', 'suppress', 'FileNotFoundError',
+ 'NotADirectoryError', 'email_message_from_string',
+ ]
+
+
+def install(cls):
+ """
+ Class decorator for installation on sys.meta_path.
+
+ Adds the backport DistributionFinder to sys.meta_path and
+ attempts to disable the finder functionality of the stdlib
+ DistributionFinder.
+ """
+ sys.meta_path.append(cls())
+ disable_stdlib_finder()
+ return cls
+
+
+def disable_stdlib_finder():
+ """
+ Give the backport primacy for discovering path-based distributions
+ by monkey-patching the stdlib O_O.
+
+ See #91 for more background for rationale on this sketchy
+ behavior.
+ """
+ def matches(finder):
+ return (
+ getattr(finder, '__module__', None) == '_frozen_importlib_external'
+ and hasattr(finder, 'find_distributions')
+ )
+ for finder in filter(matches, sys.meta_path): # pragma: nocover
+ del finder.find_distributions
+
+
+class NullFinder:
+ """
+ A "Finder" (aka "MetaClassFinder") that never finds any modules,
+ but may find distributions.
+ """
+ @staticmethod
+ def find_spec(*args, **kwargs):
+ return None
+
+ # In Python 2, the import system requires finders
+ # to have a find_module() method, but this usage
+ # is deprecated in Python 3 in favor of find_spec().
+ # For the purposes of this finder (i.e. being present
+ # on sys.meta_path but having no other import
+ # system functionality), the two methods are identical.
+ find_module = find_spec
+
+
+def py2_message_from_string(text): # nocoverpy3
+ # Work around https://bugs.python.org/issue25545 where
+ # email.message_from_string cannot handle Unicode on Python 2.
+ io_buffer = io.StringIO(text)
+ return email.message_from_file(io_buffer)
+
+
+email_message_from_string = (
+ py2_message_from_string
+ if sys.version_info < (3,) else
+ email.message_from_string
+ )
+
+
+class PyPy_repr:
+ """
+ Override repr for EntryPoint objects on PyPy to avoid __iter__ access.
+ Ref #97, #102.
+ """
+ affected = hasattr(sys, 'pypy_version_info')
+
+ def __compat_repr__(self): # pragma: nocover
+ def make_param(name):
+ value = getattr(self, name)
+ return '{name}={value!r}'.format(**locals())
+ params = ', '.join(map(make_param, self._fields))
+ return 'EntryPoint({params})'.format(**locals())
+
+ if affected: # pragma: nocover
+ __repr__ = __compat_repr__
+ del affected
+
+
+# from itertools recipes
+def unique_everseen(iterable): # pragma: nocover
+ "List unique elements, preserving order. Remember all elements ever seen."
+ seen = set()
+ seen_add = seen.add
+
+ for element in filterfalse(seen.__contains__, iterable):
+ seen_add(element)
+ yield element
+
+
+unique_ordered = (
+ unique_everseen if sys.version_info < (3, 7) else dict.fromkeys)
diff --git a/third_party/python/importlib_metadata/importlib_metadata/docs/__init__.py b/third_party/python/importlib_metadata/importlib_metadata/docs/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata/docs/__init__.py
diff --git a/third_party/python/importlib_metadata/importlib_metadata/docs/changelog.rst b/third_party/python/importlib_metadata/importlib_metadata/docs/changelog.rst
new file mode 100644
index 0000000000..0455e66743
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata/docs/changelog.rst
@@ -0,0 +1,297 @@
+=========================
+ importlib_metadata NEWS
+=========================
+
+v1.7.0
+======
+
+* ``PathNotFoundError`` now has a custom ``__str__``
+ mentioning "package metadata" being missing to help
+ guide users to the cause when the package is installed
+ but no metadata is present. Closes #124.
+
+v1.6.1
+======
+
+* Added ``Distribution._local()`` as a provisional
+ demonstration of how to load metadata for a local
+ package. Implicitly requires that
+ `pep517 <https://pypi.org/project/pep517>`_ is
+ installed. Ref #42.
+* Ensure inputs to FastPath are Unicode. Closes #121.
+* Tests now rely on ``importlib.resources.files`` (and
+ backport) instead of the older ``path`` function.
+* Support any iterable from ``find_distributions``.
+ Closes #122.
+
+v1.6.0
+======
+
+* Added ``module`` and ``attr`` attributes to ``EntryPoint``
+
+v1.5.2
+======
+
+* Fix redundant entries from ``FastPath.zip_children``.
+ Closes #117.
+
+v1.5.1
+======
+
+* Improve reliability and consistency of compatibility
+ imports for contextlib and pathlib when running tests.
+ Closes #116.
+
+v1.5.0
+======
+
+* Additional performance optimizations in FastPath now
+ saves an additional 20% on a typical call.
+* Correct for issue where PyOxidizer finder has no
+ ``__module__`` attribute. Closes #110.
+
+v1.4.0
+======
+
+* Through careful optimization, ``distribution()`` is
+ 3-4x faster. Thanks to Antony Lee for the
+ contribution. Closes #95.
+
+* When searching through ``sys.path``, if any error
+ occurs attempting to list a path entry, that entry
+ is skipped, making the system much more lenient
+ to errors. Closes #94.
+
+v1.3.0
+======
+
+* Improve custom finders documentation. Closes #105.
+
+v1.2.0
+======
+
+* Once again, drop support for Python 3.4. Ref #104.
+
+v1.1.3
+======
+
+* Restored support for Python 3.4 due to improper version
+ compatibility declarations in the v1.1.0 and v1.1.1
+ releases. Closes #104.
+
+v1.1.2
+======
+
+* Repaired project metadata to correctly declare the
+ ``python_requires`` directive. Closes #103.
+
+v1.1.1
+======
+
+* Fixed ``repr(EntryPoint)`` on PyPy 3 also. Closes #102.
+
+v1.1.0
+======
+
+* Dropped support for Python 3.4.
+* EntryPoints are now pickleable. Closes #96.
+* Fixed ``repr(EntryPoint)`` on PyPy 2. Closes #97.
+
+v1.0.0
+======
+
+* Project adopts semver for versioning.
+
+* Removed compatibility shim introduced in 0.23.
+
+* For better compatibility with the stdlib implementation and to
+ avoid the same distributions being discovered by the stdlib and
+ backport implementations, the backport now disables the
+ stdlib DistributionFinder during initialization (import time).
+ Closes #91 and closes #100.
+
+0.23
+====
+* Added a compatibility shim to prevent failures on beta releases
+ of Python before the signature changed to accept the
+ "context" parameter on find_distributions. This workaround
+ will have a limited lifespan, not to extend beyond release of
+ Python 3.8 final.
+
+0.22
+====
+* Renamed ``package`` parameter to ``distribution_name``
+ as `recommended <https://bugs.python.org/issue34632#msg349423>`_
+ in the following functions: ``distribution``, ``metadata``,
+ ``version``, ``files``, and ``requires``. This
+ backward-incompatible change is expected to have little impact
+ as these functions are assumed to be primarily used with
+ positional parameters.
+
+0.21
+====
+* ``importlib.metadata`` now exposes the ``DistributionFinder``
+ metaclass and references it in the docs for extending the
+ search algorithm.
+* Add ``Distribution.at`` for constructing a Distribution object
+ from a known metadata directory on the file system. Closes #80.
+* Distribution finders now receive a context object that
+ supplies ``.path`` and ``.name`` properties. This change
+ introduces a fundamental backward incompatibility for
+ any projects implementing a ``find_distributions`` method
+ on a ``MetaPathFinder``. This new layer of abstraction
+ allows this context to be supplied directly or constructed
+ on demand and opens the opportunity for a
+ ``find_distributions`` method to solicit additional
+ context from the caller. Closes #85.
+
+0.20
+====
+* Clarify in the docs that calls to ``.files`` could return
+ ``None`` when the metadata is not present. Closes #69.
+* Return all requirements and not just the first for dist-info
+ packages. Closes #67.
+
+0.19
+====
+* Restrain over-eager egg metadata resolution.
+* Add support for entry points with colons in the name. Closes #75.
+
+0.18
+====
+* Parse entry points case sensitively. Closes #68
+* Add a version constraint on the backport configparser package. Closes #66
+
+0.17
+====
+* Fix a permission problem in the tests on Windows.
+
+0.16
+====
+* Don't crash if there exists an EGG-INFO directory on sys.path.
+
+0.15
+====
+* Fix documentation.
+
+0.14
+====
+* Removed ``local_distribution`` function from the API.
+ **This backward-incompatible change removes this
+ behavior summarily**. Projects should remove their
+ reliance on this behavior. A replacement behavior is
+ under review in the `pep517 project
+ <https://github.com/pypa/pep517>`_. Closes #42.
+
+0.13
+====
+* Update docstrings to match PEP 8. Closes #63.
+* Merged modules into one module. Closes #62.
+
+0.12
+====
+* Add support for eggs. !65; Closes #19.
+
+0.11
+====
+* Support generic zip files (not just wheels). Closes #59
+* Support zip files with multiple distributions in them. Closes #60
+* Fully expose the public API in ``importlib_metadata.__all__``.
+
+0.10
+====
+* The ``Distribution`` ABC is now officially part of the public API.
+ Closes #37.
+* Fixed support for older single file egg-info formats. Closes #43.
+* Fixed a testing bug when ``$CWD`` has spaces in the path. Closes #50.
+* Add Python 3.8 to the ``tox`` testing matrix.
+
+0.9
+===
+* Fixed issue where entry points without an attribute would raise an
+ Exception. Closes #40.
+* Removed unused ``name`` parameter from ``entry_points()``. Closes #44.
+* ``DistributionFinder`` classes must now be instantiated before
+ being placed on ``sys.meta_path``.
+
+0.8
+===
+* This library can now discover/enumerate all installed packages. **This
+ backward-incompatible change alters the protocol finders must
+ implement to support distribution package discovery.** Closes #24.
+* The signature of ``find_distributions()`` on custom installer finders
+ should now accept two parameters, ``name`` and ``path`` and
+ these parameters must supply defaults.
+* The ``entry_points()`` method no longer accepts a package name
+ but instead returns all entry points in a dictionary keyed by the
+ ``EntryPoint.group``. The ``resolve`` method has been removed. Instead,
+ call ``EntryPoint.load()``, which has the same semantics as
+ ``pkg_resources`` and ``entrypoints``. **This is a backward incompatible
+ change.**
+* Metadata is now always returned as Unicode text regardless of
+ Python version. Closes #29.
+* This library can now discover metadata for a 'local' package (found
+ in the current-working directory). Closes #27.
+* Added ``files()`` function for resolving files from a distribution.
+* Added a new ``requires()`` function, which returns the requirements
+ for a package suitable for parsing by
+ ``packaging.requirements.Requirement``. Closes #18.
+* The top-level ``read_text()`` function has been removed. Use
+ ``PackagePath.read_text()`` on instances returned by the ``files()``
+ function. **This is a backward incompatible change.**
+* Release dates are now automatically injected into the changelog
+ based on SCM tags.
+
+0.7
+===
+* Fixed issue where packages with dashes in their names would
+ not be discovered. Closes #21.
+* Distribution lookup is now case-insensitive. Closes #20.
+* Wheel distributions can no longer be discovered by their module
+ name. Like Path distributions, they must be indicated by their
+ distribution package name.
+
+0.6
+===
+* Removed ``importlib_metadata.distribution`` function. Now
+ the public interface is primarily the utility functions exposed
+ in ``importlib_metadata.__all__``. Closes #14.
+* Added two new utility functions ``read_text`` and
+ ``metadata``.
+
+0.5
+===
+* Updated README and removed details about Distribution
+ class, now considered private. Closes #15.
+* Added test suite support for Python 3.4+.
+* Fixed SyntaxErrors on Python 3.4 and 3.5. !12
+* Fixed errors on Windows joining Path elements. !15
+
+0.4
+===
+* Housekeeping.
+
+0.3
+===
+* Added usage documentation. Closes #8
+* Add support for getting metadata from wheels on ``sys.path``. Closes #9
+
+0.2
+===
+* Added ``importlib_metadata.entry_points()``. Closes #1
+* Added ``importlib_metadata.resolve()``. Closes #12
+* Add support for Python 2.7. Closes #4
+
+0.1
+===
+* Initial release.
+
+
+..
+ Local Variables:
+ mode: change-log-mode
+ indent-tabs-mode: nil
+ sentence-end-double-space: t
+ fill-column: 78
+ coding: utf-8
+ End:
diff --git a/third_party/python/importlib_metadata/importlib_metadata/docs/conf.py b/third_party/python/importlib_metadata/importlib_metadata/docs/conf.py
new file mode 100644
index 0000000000..129a7a4eae
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata/docs/conf.py
@@ -0,0 +1,185 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# importlib_metadata documentation build configuration file, created by
+# sphinx-quickstart on Thu Nov 30 10:21:00 2017.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#
+# import os
+# import sys
+# sys.path.insert(0, os.path.abspath('.'))
+
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ 'rst.linker',
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.coverage',
+ 'sphinx.ext.doctest',
+ 'sphinx.ext.intersphinx',
+ 'sphinx.ext.viewcode',
+ ]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+#
+# source_suffix = ['.rst', '.md']
+source_suffix = '.rst'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = 'importlib_metadata'
+copyright = '2017-2019, Jason R. Coombs, Barry Warsaw'
+author = 'Jason R. Coombs, Barry Warsaw'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '0.1'
+# The full version, including alpha/beta/rc tags.
+release = '0.1'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = None
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This patterns also effect to html_static_path and html_extra_path
+exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = False
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+#
+html_theme = 'default'
+
+# Custom sidebar templates, must be a dictionary that maps document names
+# to template names.
+#
+# This is required for the alabaster theme
+# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
+html_sidebars = {
+ '**': [
+ 'relations.html', # needs 'show_related': True theme option to display
+ 'searchbox.html',
+ ]
+ }
+
+
+# -- Options for HTMLHelp output ------------------------------------------
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'importlib_metadatadoc'
+
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+ # The paper size ('letterpaper' or 'a4paper').
+ #
+ # 'papersize': 'letterpaper',
+
+ # The font size ('10pt', '11pt' or '12pt').
+ #
+ # 'pointsize': '10pt',
+
+ # Additional stuff for the LaTeX preamble.
+ #
+ # 'preamble': '',
+
+ # Latex figure (float) alignment
+ #
+ # 'figure_align': 'htbp',
+ }
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ (master_doc, 'importlib_metadata.tex',
+ 'importlib\\_metadata Documentation',
+ 'Brett Cannon, Barry Warsaw', 'manual'),
+ ]
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ (master_doc, 'importlib_metadata', 'importlib_metadata Documentation',
+ [author], 1)
+ ]
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (master_doc, 'importlib_metadata', 'importlib_metadata Documentation',
+ author, 'importlib_metadata', 'One line description of project.',
+ 'Miscellaneous'),
+ ]
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {
+ 'python': ('https://docs.python.org/3', None),
+ 'importlib_resources': (
+ 'https://importlib-resources.readthedocs.io/en/latest/', None
+ ),
+ }
+
+
+# For rst.linker, inject release dates into changelog.rst
+link_files = {
+ 'changelog.rst': dict(
+ replace=[
+ dict(
+ pattern=r'^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n',
+ with_scm='{text}\n{rev[timestamp]:%Y-%m-%d}\n\n',
+ ),
+ ],
+ ),
+ }
diff --git a/third_party/python/importlib_metadata/importlib_metadata/docs/index.rst b/third_party/python/importlib_metadata/importlib_metadata/docs/index.rst
new file mode 100644
index 0000000000..530197cf18
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata/docs/index.rst
@@ -0,0 +1,50 @@
+===============================
+ Welcome to importlib_metadata
+===============================
+
+``importlib_metadata`` is a library which provides an API for accessing an
+installed package's metadata (see :pep:`566`), such as its entry points or its top-level
+name. This functionality intends to replace most uses of ``pkg_resources``
+`entry point API`_ and `metadata API`_. Along with :mod:`importlib.resources` in
+Python 3.7 and newer (backported as :doc:`importlib_resources <importlib_resources:index>` for older
+versions of Python), this can eliminate the need to use the older and less
+efficient ``pkg_resources`` package.
+
+``importlib_metadata`` is a backport of Python 3.8's standard library
+:doc:`importlib.metadata <library/importlib.metadata>` module for Python 2.7, and 3.4 through 3.7. Users of
+Python 3.8 and beyond are encouraged to use the standard library module.
+When imported on Python 3.8 and later, ``importlib_metadata`` replaces the
+DistributionFinder behavior from the stdlib, but leaves the API in tact.
+Developers looking for detailed API descriptions should refer to the Python
+3.8 standard library documentation.
+
+The documentation here includes a general :ref:`usage <using>` guide.
+
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Contents:
+
+ using.rst
+ changelog (links).rst
+
+
+Project details
+===============
+
+ * Project home: https://gitlab.com/python-devs/importlib_metadata
+ * Report bugs at: https://gitlab.com/python-devs/importlib_metadata/issues
+ * Code hosting: https://gitlab.com/python-devs/importlib_metadata.git
+ * Documentation: http://importlib_metadata.readthedocs.io/
+
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
+
+.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points
+.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api
diff --git a/third_party/python/importlib_metadata/importlib_metadata/docs/using.rst b/third_party/python/importlib_metadata/importlib_metadata/docs/using.rst
new file mode 100644
index 0000000000..11965147f4
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata/docs/using.rst
@@ -0,0 +1,260 @@
+.. _using:
+
+=================================
+ Using :mod:`!importlib_metadata`
+=================================
+
+``importlib_metadata`` is a library that provides for access to installed
+package metadata. Built in part on Python's import system, this library
+intends to replace similar functionality in the `entry point
+API`_ and `metadata API`_ of ``pkg_resources``. Along with
+:mod:`importlib.resources` in Python 3.7
+and newer (backported as :doc:`importlib_resources <importlib_resources:index>` for older versions of
+Python), this can eliminate the need to use the older and less efficient
+``pkg_resources`` package.
+
+By "installed package" we generally mean a third-party package installed into
+Python's ``site-packages`` directory via tools such as `pip
+<https://pypi.org/project/pip/>`_. Specifically,
+it means a package with either a discoverable ``dist-info`` or ``egg-info``
+directory, and metadata defined by :pep:`566` or its older specifications.
+By default, package metadata can live on the file system or in zip archives on
+:data:`sys.path`. Through an extension mechanism, the metadata can live almost
+anywhere.
+
+
+Overview
+========
+
+Let's say you wanted to get the version string for a package you've installed
+using ``pip``. We start by creating a virtual environment and installing
+something into it::
+
+ $ python3 -m venv example
+ $ source example/bin/activate
+ (example) $ pip install importlib_metadata
+ (example) $ pip install wheel
+
+You can get the version string for ``wheel`` by running the following::
+
+ (example) $ python
+ >>> from importlib_metadata import version
+ >>> version('wheel')
+ '0.32.3'
+
+You can also get the set of entry points keyed by group, such as
+``console_scripts``, ``distutils.commands`` and others. Each group contains a
+sequence of :ref:`EntryPoint <entry-points>` objects.
+
+You can get the :ref:`metadata for a distribution <metadata>`::
+
+ >>> list(metadata('wheel'))
+ ['Metadata-Version', 'Name', 'Version', 'Summary', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License', 'Project-URL', 'Project-URL', 'Project-URL', 'Keywords', 'Platform', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Requires-Python', 'Provides-Extra', 'Requires-Dist', 'Requires-Dist']
+
+You can also get a :ref:`distribution's version number <version>`, list its
+:ref:`constituent files <files>`, and get a list of the distribution's
+:ref:`requirements`.
+
+
+Functional API
+==============
+
+This package provides the following functionality via its public API.
+
+
+.. _entry-points:
+
+Entry points
+------------
+
+The ``entry_points()`` function returns a dictionary of all entry points,
+keyed by group. Entry points are represented by ``EntryPoint`` instances;
+each ``EntryPoint`` has a ``.name``, ``.group``, and ``.value`` attributes and
+a ``.load()`` method to resolve the value. There are also ``.module``,
+``.attr``, and ``.extras`` attributes for getting the components of the
+``.value`` attribute::
+
+ >>> eps = entry_points()
+ >>> list(eps)
+ ['console_scripts', 'distutils.commands', 'distutils.setup_keywords', 'egg_info.writers', 'setuptools.installation']
+ >>> scripts = eps['console_scripts']
+ >>> wheel = [ep for ep in scripts if ep.name == 'wheel'][0]
+ >>> wheel
+ EntryPoint(name='wheel', value='wheel.cli:main', group='console_scripts')
+ >>> wheel.module
+ 'wheel.cli'
+ >>> wheel.attr
+ 'main'
+ >>> wheel.extras
+ []
+ >>> main = wheel.load()
+ >>> main
+ <function main at 0x103528488>
+
+The ``group`` and ``name`` are arbitrary values defined by the package author
+and usually a client will wish to resolve all entry points for a particular
+group. Read `the setuptools docs
+<https://setuptools.readthedocs.io/en/latest/setuptools.html#dynamic-discovery-of-services-and-plugins>`_
+for more information on entry points, their definition, and usage.
+
+
+.. _metadata:
+
+Distribution metadata
+---------------------
+
+Every distribution includes some metadata, which you can extract using the
+``metadata()`` function::
+
+ >>> wheel_metadata = metadata('wheel')
+
+The keys of the returned data structure [#f1]_ name the metadata keywords, and
+their values are returned unparsed from the distribution metadata::
+
+ >>> wheel_metadata['Requires-Python']
+ '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*'
+
+
+.. _version:
+
+Distribution versions
+---------------------
+
+The ``version()`` function is the quickest way to get a distribution's version
+number, as a string::
+
+ >>> version('wheel')
+ '0.32.3'
+
+
+.. _files:
+
+Distribution files
+------------------
+
+You can also get the full set of files contained within a distribution. The
+``files()`` function takes a distribution package name and returns all of the
+files installed by this distribution. Each file object returned is a
+``PackagePath``, a :class:`pathlib.Path` derived object with additional ``dist``,
+``size``, and ``hash`` properties as indicated by the metadata. For example::
+
+ >>> util = [p for p in files('wheel') if 'util.py' in str(p)][0]
+ >>> util
+ PackagePath('wheel/util.py')
+ >>> util.size
+ 859
+ >>> util.dist
+ <importlib_metadata._hooks.PathDistribution object at 0x101e0cef0>
+ >>> util.hash
+ <FileHash mode: sha256 value: bYkw5oMccfazVCoYQwKkkemoVyMAFoR34mmKBx8R1NI>
+
+Once you have the file, you can also read its contents::
+
+ >>> print(util.read_text())
+ import base64
+ import sys
+ ...
+ def as_bytes(s):
+ if isinstance(s, text_type):
+ return s.encode('utf-8')
+ return s
+
+In the case where the metadata file listing files
+(RECORD or SOURCES.txt) is missing, ``files()`` will
+return ``None``. The caller may wish to wrap calls to
+``files()`` in `always_iterable
+<https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_iterable>`_
+or otherwise guard against this condition if the target
+distribution is not known to have the metadata present.
+
+.. _requirements:
+
+Distribution requirements
+-------------------------
+
+To get the full set of requirements for a distribution, use the ``requires()``
+function::
+
+ >>> requires('wheel')
+ ["pytest (>=3.0.0) ; extra == 'test'", "pytest-cov ; extra == 'test'"]
+
+
+Distributions
+=============
+
+While the above API is the most common and convenient usage, you can get all
+of that information from the ``Distribution`` class. A ``Distribution`` is an
+abstract object that represents the metadata for a Python package. You can
+get the ``Distribution`` instance::
+
+ >>> from importlib_metadata import distribution
+ >>> dist = distribution('wheel')
+
+Thus, an alternative way to get the version number is through the
+``Distribution`` instance::
+
+ >>> dist.version
+ '0.32.3'
+
+There are all kinds of additional metadata available on the ``Distribution``
+instance::
+
+ >>> d.metadata['Requires-Python']
+ '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*'
+ >>> d.metadata['License']
+ 'MIT'
+
+The full set of available metadata is not described here. See :pep:`566`
+for additional details.
+
+
+Extending the search algorithm
+==============================
+
+Because package metadata is not available through :data:`sys.path` searches, or
+package loaders directly, the metadata for a package is found through import
+system `finders`_. To find a distribution package's metadata,
+``importlib.metadata`` queries the list of :term:`meta path finders <meta path finder>` on
+:data:`sys.meta_path`.
+
+By default ``importlib_metadata`` installs a finder for distribution packages
+found on the file system. This finder doesn't actually find any *packages*,
+but it can find the packages' metadata.
+
+The abstract class :py:class:`importlib.abc.MetaPathFinder` defines the
+interface expected of finders by Python's import system.
+``importlib_metadata`` extends this protocol by looking for an optional
+``find_distributions`` callable on the finders from
+:data:`sys.meta_path` and presents this extended interface as the
+``DistributionFinder`` abstract base class, which defines this abstract
+method::
+
+ @abc.abstractmethod
+ def find_distributions(context=DistributionFinder.Context()):
+ """Return an iterable of all Distribution instances capable of
+ loading the metadata for packages for the indicated ``context``.
+ """
+
+The ``DistributionFinder.Context`` object provides ``.path`` and ``.name``
+properties indicating the path to search and name to match and may
+supply other relevant context.
+
+What this means in practice is that to support finding distribution package
+metadata in locations other than the file system, subclass
+``Distribution`` and implement the abstract methods. Then from
+a custom finder, return instances of this derived ``Distribution`` in the
+``find_distributions()`` method.
+
+
+.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points
+.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api
+.. _`finders`: https://docs.python.org/3/reference/import.html#finders-and-loaders
+
+
+.. rubric:: Footnotes
+
+.. [#f1] Technically, the returned distribution metadata object is an
+ :class:`email.message.EmailMessage`
+ instance, but this is an implementation detail, and not part of the
+ stable API. You should only use dictionary-like methods and syntax
+ to access the metadata contents.
diff --git a/third_party/python/importlib_metadata/importlib_metadata/tests/__init__.py b/third_party/python/importlib_metadata/importlib_metadata/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata/tests/__init__.py
diff --git a/third_party/python/importlib_metadata/importlib_metadata/tests/data/__init__.py b/third_party/python/importlib_metadata/importlib_metadata/tests/data/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata/tests/data/__init__.py
diff --git a/third_party/python/importlib_metadata/importlib_metadata/tests/data/example-21.12-py3-none-any.whl b/third_party/python/importlib_metadata/importlib_metadata/tests/data/example-21.12-py3-none-any.whl
new file mode 100644
index 0000000000..641ab07f7a
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata/tests/data/example-21.12-py3-none-any.whl
Binary files differ
diff --git a/third_party/python/importlib_metadata/importlib_metadata/tests/data/example-21.12-py3.6.egg b/third_party/python/importlib_metadata/importlib_metadata/tests/data/example-21.12-py3.6.egg
new file mode 100644
index 0000000000..cdb298a19b
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata/tests/data/example-21.12-py3.6.egg
Binary files differ
diff --git a/third_party/python/importlib_metadata/importlib_metadata/tests/fixtures.py b/third_party/python/importlib_metadata/importlib_metadata/tests/fixtures.py
new file mode 100644
index 0000000000..20982fa1c4
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata/tests/fixtures.py
@@ -0,0 +1,232 @@
+from __future__ import unicode_literals
+
+import os
+import sys
+import shutil
+import tempfile
+import textwrap
+import test.support
+
+from .._compat import pathlib, contextlib
+
+
+__metaclass__ = type
+
+
+@contextlib.contextmanager
+def tempdir():
+ tmpdir = tempfile.mkdtemp()
+ try:
+ yield pathlib.Path(tmpdir)
+ finally:
+ shutil.rmtree(tmpdir)
+
+
+@contextlib.contextmanager
+def save_cwd():
+ orig = os.getcwd()
+ try:
+ yield
+ finally:
+ os.chdir(orig)
+
+
+@contextlib.contextmanager
+def tempdir_as_cwd():
+ with tempdir() as tmp:
+ with save_cwd():
+ os.chdir(str(tmp))
+ yield tmp
+
+
+@contextlib.contextmanager
+def install_finder(finder):
+ sys.meta_path.append(finder)
+ try:
+ yield
+ finally:
+ sys.meta_path.remove(finder)
+
+
+class Fixtures:
+ def setUp(self):
+ self.fixtures = contextlib.ExitStack()
+ self.addCleanup(self.fixtures.close)
+
+
+class SiteDir(Fixtures):
+ def setUp(self):
+ super(SiteDir, self).setUp()
+ self.site_dir = self.fixtures.enter_context(tempdir())
+
+
+class OnSysPath(Fixtures):
+ @staticmethod
+ @contextlib.contextmanager
+ def add_sys_path(dir):
+ sys.path[:0] = [str(dir)]
+ try:
+ yield
+ finally:
+ sys.path.remove(str(dir))
+
+ def setUp(self):
+ super(OnSysPath, self).setUp()
+ self.fixtures.enter_context(self.add_sys_path(self.site_dir))
+
+
+class DistInfoPkg(OnSysPath, SiteDir):
+ files = {
+ "distinfo_pkg-1.0.0.dist-info": {
+ "METADATA": """
+ Name: distinfo-pkg
+ Author: Steven Ma
+ Version: 1.0.0
+ Requires-Dist: wheel >= 1.0
+ Requires-Dist: pytest; extra == 'test'
+ """,
+ "RECORD": "mod.py,sha256=abc,20\n",
+ "entry_points.txt": """
+ [entries]
+ main = mod:main
+ ns:sub = mod:main
+ """
+ },
+ "mod.py": """
+ def main():
+ print("hello world")
+ """,
+ }
+
+ def setUp(self):
+ super(DistInfoPkg, self).setUp()
+ build_files(DistInfoPkg.files, self.site_dir)
+
+
+class DistInfoPkgOffPath(SiteDir):
+ def setUp(self):
+ super(DistInfoPkgOffPath, self).setUp()
+ build_files(DistInfoPkg.files, self.site_dir)
+
+
+class EggInfoPkg(OnSysPath, SiteDir):
+ files = {
+ "egginfo_pkg.egg-info": {
+ "PKG-INFO": """
+ Name: egginfo-pkg
+ Author: Steven Ma
+ License: Unknown
+ Version: 1.0.0
+ Classifier: Intended Audience :: Developers
+ Classifier: Topic :: Software Development :: Libraries
+ """,
+ "SOURCES.txt": """
+ mod.py
+ egginfo_pkg.egg-info/top_level.txt
+ """,
+ "entry_points.txt": """
+ [entries]
+ main = mod:main
+ """,
+ "requires.txt": """
+ wheel >= 1.0; python_version >= "2.7"
+ [test]
+ pytest
+ """,
+ "top_level.txt": "mod\n"
+ },
+ "mod.py": """
+ def main():
+ print("hello world")
+ """,
+ }
+
+ def setUp(self):
+ super(EggInfoPkg, self).setUp()
+ build_files(EggInfoPkg.files, prefix=self.site_dir)
+
+
+class EggInfoFile(OnSysPath, SiteDir):
+ files = {
+ "egginfo_file.egg-info": """
+ Metadata-Version: 1.0
+ Name: egginfo_file
+ Version: 0.1
+ Summary: An example package
+ Home-page: www.example.com
+ Author: Eric Haffa-Vee
+ Author-email: eric@example.coms
+ License: UNKNOWN
+ Description: UNKNOWN
+ Platform: UNKNOWN
+ """,
+ }
+
+ def setUp(self):
+ super(EggInfoFile, self).setUp()
+ build_files(EggInfoFile.files, prefix=self.site_dir)
+
+
+class LocalPackage:
+ files = {
+ "setup.py": """
+ import setuptools
+ setuptools.setup(name="local-pkg", version="2.0.1")
+ """,
+ }
+
+ def setUp(self):
+ self.fixtures = contextlib.ExitStack()
+ self.addCleanup(self.fixtures.close)
+ self.fixtures.enter_context(tempdir_as_cwd())
+ build_files(self.files)
+
+
+def build_files(file_defs, prefix=pathlib.Path()):
+ """Build a set of files/directories, as described by the
+
+ file_defs dictionary. Each key/value pair in the dictionary is
+ interpreted as a filename/contents pair. If the contents value is a
+ dictionary, a directory is created, and the dictionary interpreted
+ as the files within it, recursively.
+
+ For example:
+
+ {"README.txt": "A README file",
+ "foo": {
+ "__init__.py": "",
+ "bar": {
+ "__init__.py": "",
+ },
+ "baz.py": "# Some code",
+ }
+ }
+ """
+ for name, contents in file_defs.items():
+ full_name = prefix / name
+ if isinstance(contents, dict):
+ full_name.mkdir()
+ build_files(contents, prefix=full_name)
+ else:
+ if isinstance(contents, bytes):
+ with full_name.open('wb') as f:
+ f.write(contents)
+ else:
+ with full_name.open('w') as f:
+ f.write(DALS(contents))
+
+
+class FileBuilder:
+ def unicode_filename(self):
+ return test.support.FS_NONASCII or \
+ self.skip("File system does not support non-ascii.")
+
+
+def DALS(str):
+ "Dedent and left-strip"
+ return textwrap.dedent(str).lstrip()
+
+
+class NullFinder:
+ def find_module(self, name):
+ pass
diff --git a/third_party/python/importlib_metadata/importlib_metadata/tests/test_api.py b/third_party/python/importlib_metadata/importlib_metadata/tests/test_api.py
new file mode 100644
index 0000000000..aa346ddb20
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata/tests/test_api.py
@@ -0,0 +1,176 @@
+import re
+import textwrap
+import unittest
+
+from . import fixtures
+from .. import (
+ Distribution, PackageNotFoundError, __version__, distribution,
+ entry_points, files, metadata, requires, version,
+ )
+
+try:
+ from collections.abc import Iterator
+except ImportError:
+ from collections import Iterator # noqa: F401
+
+try:
+ from builtins import str as text
+except ImportError:
+ from __builtin__ import unicode as text
+
+
+class APITests(
+ fixtures.EggInfoPkg,
+ fixtures.DistInfoPkg,
+ fixtures.EggInfoFile,
+ unittest.TestCase):
+
+ version_pattern = r'\d+\.\d+(\.\d)?'
+
+ def test_retrieves_version_of_self(self):
+ pkg_version = version('egginfo-pkg')
+ assert isinstance(pkg_version, text)
+ assert re.match(self.version_pattern, pkg_version)
+
+ def test_retrieves_version_of_distinfo_pkg(self):
+ pkg_version = version('distinfo-pkg')
+ assert isinstance(pkg_version, text)
+ assert re.match(self.version_pattern, pkg_version)
+
+ def test_for_name_does_not_exist(self):
+ with self.assertRaises(PackageNotFoundError):
+ distribution('does-not-exist')
+
+ def test_for_top_level(self):
+ self.assertEqual(
+ distribution('egginfo-pkg').read_text('top_level.txt').strip(),
+ 'mod')
+
+ def test_read_text(self):
+ top_level = [
+ path for path in files('egginfo-pkg')
+ if path.name == 'top_level.txt'
+ ][0]
+ self.assertEqual(top_level.read_text(), 'mod\n')
+
+ def test_entry_points(self):
+ entries = dict(entry_points()['entries'])
+ ep = entries['main']
+ self.assertEqual(ep.value, 'mod:main')
+ self.assertEqual(ep.extras, [])
+
+ def test_metadata_for_this_package(self):
+ md = metadata('egginfo-pkg')
+ assert md['author'] == 'Steven Ma'
+ assert md['LICENSE'] == 'Unknown'
+ assert md['Name'] == 'egginfo-pkg'
+ classifiers = md.get_all('Classifier')
+ assert 'Topic :: Software Development :: Libraries' in classifiers
+
+ def test_importlib_metadata_version(self):
+ assert re.match(self.version_pattern, __version__)
+
+ @staticmethod
+ def _test_files(files):
+ root = files[0].root
+ for file in files:
+ assert file.root == root
+ assert not file.hash or file.hash.value
+ assert not file.hash or file.hash.mode == 'sha256'
+ assert not file.size or file.size >= 0
+ assert file.locate().exists()
+ assert isinstance(file.read_binary(), bytes)
+ if file.name.endswith('.py'):
+ file.read_text()
+
+ def test_file_hash_repr(self):
+ try:
+ assertRegex = self.assertRegex
+ except AttributeError:
+ # Python 2
+ assertRegex = self.assertRegexpMatches
+
+ util = [
+ p for p in files('distinfo-pkg')
+ if p.name == 'mod.py'
+ ][0]
+ assertRegex(
+ repr(util.hash),
+ '<FileHash mode: sha256 value: .*>')
+
+ def test_files_dist_info(self):
+ self._test_files(files('distinfo-pkg'))
+
+ def test_files_egg_info(self):
+ self._test_files(files('egginfo-pkg'))
+
+ def test_version_egg_info_file(self):
+ self.assertEqual(version('egginfo-file'), '0.1')
+
+ def test_requires_egg_info_file(self):
+ requirements = requires('egginfo-file')
+ self.assertIsNone(requirements)
+
+ def test_requires_egg_info(self):
+ deps = requires('egginfo-pkg')
+ assert len(deps) == 2
+ assert any(
+ dep == 'wheel >= 1.0; python_version >= "2.7"'
+ for dep in deps
+ )
+
+ def test_requires_dist_info(self):
+ deps = requires('distinfo-pkg')
+ assert len(deps) == 2
+ assert all(deps)
+ assert 'wheel >= 1.0' in deps
+ assert "pytest; extra == 'test'" in deps
+
+ def test_more_complex_deps_requires_text(self):
+ requires = textwrap.dedent("""
+ dep1
+ dep2
+
+ [:python_version < "3"]
+ dep3
+
+ [extra1]
+ dep4
+
+ [extra2:python_version < "3"]
+ dep5
+ """)
+ deps = sorted(Distribution._deps_from_requires_text(requires))
+ expected = [
+ 'dep1',
+ 'dep2',
+ 'dep3; python_version < "3"',
+ 'dep4; extra == "extra1"',
+ 'dep5; (python_version < "3") and extra == "extra2"',
+ ]
+ # It's important that the environment marker expression be
+ # wrapped in parentheses to avoid the following 'and' binding more
+ # tightly than some other part of the environment expression.
+
+ assert deps == expected
+
+
+class OffSysPathTests(fixtures.DistInfoPkgOffPath, unittest.TestCase):
+ def test_find_distributions_specified_path(self):
+ dists = Distribution.discover(path=[str(self.site_dir)])
+ assert any(
+ dist.metadata['Name'] == 'distinfo-pkg'
+ for dist in dists
+ )
+
+ def test_distribution_at_pathlib(self):
+ """Demonstrate how to load metadata direct from a directory.
+ """
+ dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info'
+ dist = Distribution.at(dist_info_path)
+ assert dist.version == '1.0.0'
+
+ def test_distribution_at_str(self):
+ dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info'
+ dist = Distribution.at(str(dist_info_path))
+ assert dist.version == '1.0.0'
diff --git a/third_party/python/importlib_metadata/importlib_metadata/tests/test_integration.py b/third_party/python/importlib_metadata/importlib_metadata/tests/test_integration.py
new file mode 100644
index 0000000000..cbb940bd46
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata/tests/test_integration.py
@@ -0,0 +1,54 @@
+# coding: utf-8
+
+from __future__ import unicode_literals
+
+import unittest
+import packaging.requirements
+import packaging.version
+
+from . import fixtures
+from .. import (
+ Distribution,
+ _compat,
+ version,
+ )
+
+
+class IntegrationTests(fixtures.DistInfoPkg, unittest.TestCase):
+
+ def test_package_spec_installed(self):
+ """
+ Illustrate the recommended procedure to determine if
+ a specified version of a package is installed.
+ """
+ def is_installed(package_spec):
+ req = packaging.requirements.Requirement(package_spec)
+ return version(req.name) in req.specifier
+
+ assert is_installed('distinfo-pkg==1.0')
+ assert is_installed('distinfo-pkg>=1.0,<2.0')
+ assert not is_installed('distinfo-pkg<1.0')
+
+
+class FinderTests(fixtures.Fixtures, unittest.TestCase):
+
+ def test_finder_without_module(self):
+ class ModuleFreeFinder(fixtures.NullFinder):
+ """
+ A finder without an __module__ attribute
+ """
+ def __getattribute__(self, name):
+ if name == '__module__':
+ raise AttributeError(name)
+ return super().__getattribute__(name)
+
+ self.fixtures.enter_context(
+ fixtures.install_finder(ModuleFreeFinder()))
+ _compat.disable_stdlib_finder()
+
+
+class LocalProjectTests(fixtures.LocalPackage, unittest.TestCase):
+ def test_find_local(self):
+ dist = Distribution._local()
+ assert dist.metadata['Name'] == 'local-pkg'
+ assert dist.version == '2.0.1'
diff --git a/third_party/python/importlib_metadata/importlib_metadata/tests/test_main.py b/third_party/python/importlib_metadata/importlib_metadata/tests/test_main.py
new file mode 100644
index 0000000000..4ffdd5d666
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata/tests/test_main.py
@@ -0,0 +1,285 @@
+# coding: utf-8
+from __future__ import unicode_literals
+
+import re
+import json
+import pickle
+import textwrap
+import unittest
+import importlib
+import importlib_metadata
+import pyfakefs.fake_filesystem_unittest as ffs
+
+from . import fixtures
+from .. import (
+ Distribution, EntryPoint, MetadataPathFinder,
+ PackageNotFoundError, distributions,
+ entry_points, metadata, version,
+ )
+
+try:
+ from builtins import str as text
+except ImportError:
+ from __builtin__ import unicode as text
+
+
+class BasicTests(fixtures.DistInfoPkg, unittest.TestCase):
+ version_pattern = r'\d+\.\d+(\.\d)?'
+
+ def test_retrieves_version_of_self(self):
+ dist = Distribution.from_name('distinfo-pkg')
+ assert isinstance(dist.version, text)
+ assert re.match(self.version_pattern, dist.version)
+
+ def test_for_name_does_not_exist(self):
+ with self.assertRaises(PackageNotFoundError):
+ Distribution.from_name('does-not-exist')
+
+ def test_package_not_found_mentions_metadata(self):
+ """
+ When a package is not found, that could indicate that the
+ packgae is not installed or that it is installed without
+ metadata. Ensure the exception mentions metadata to help
+ guide users toward the cause. See #124.
+ """
+ with self.assertRaises(PackageNotFoundError) as ctx:
+ Distribution.from_name('does-not-exist')
+
+ assert "metadata" in str(ctx.exception)
+
+ def test_new_style_classes(self):
+ self.assertIsInstance(Distribution, type)
+ self.assertIsInstance(MetadataPathFinder, type)
+
+
+class ImportTests(fixtures.DistInfoPkg, unittest.TestCase):
+ def test_import_nonexistent_module(self):
+ # Ensure that the MetadataPathFinder does not crash an import of a
+ # non-existent module.
+ with self.assertRaises(ImportError):
+ importlib.import_module('does_not_exist')
+
+ def test_resolve(self):
+ entries = dict(entry_points()['entries'])
+ ep = entries['main']
+ self.assertEqual(ep.load().__name__, "main")
+
+ def test_entrypoint_with_colon_in_name(self):
+ entries = dict(entry_points()['entries'])
+ ep = entries['ns:sub']
+ self.assertEqual(ep.value, 'mod:main')
+
+ def test_resolve_without_attr(self):
+ ep = EntryPoint(
+ name='ep',
+ value='importlib_metadata',
+ group='grp',
+ )
+ assert ep.load() is importlib_metadata
+
+
+class NameNormalizationTests(
+ fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase):
+ @staticmethod
+ def pkg_with_dashes(site_dir):
+ """
+ Create minimal metadata for a package with dashes
+ in the name (and thus underscores in the filename).
+ """
+ metadata_dir = site_dir / 'my_pkg.dist-info'
+ metadata_dir.mkdir()
+ metadata = metadata_dir / 'METADATA'
+ with metadata.open('w') as strm:
+ strm.write('Version: 1.0\n')
+ return 'my-pkg'
+
+ def test_dashes_in_dist_name_found_as_underscores(self):
+ """
+ For a package with a dash in the name, the dist-info metadata
+ uses underscores in the name. Ensure the metadata loads.
+ """
+ pkg_name = self.pkg_with_dashes(self.site_dir)
+ assert version(pkg_name) == '1.0'
+
+ @staticmethod
+ def pkg_with_mixed_case(site_dir):
+ """
+ Create minimal metadata for a package with mixed case
+ in the name.
+ """
+ metadata_dir = site_dir / 'CherryPy.dist-info'
+ metadata_dir.mkdir()
+ metadata = metadata_dir / 'METADATA'
+ with metadata.open('w') as strm:
+ strm.write('Version: 1.0\n')
+ return 'CherryPy'
+
+ def test_dist_name_found_as_any_case(self):
+ """
+ Ensure the metadata loads when queried with any case.
+ """
+ pkg_name = self.pkg_with_mixed_case(self.site_dir)
+ assert version(pkg_name) == '1.0'
+ assert version(pkg_name.lower()) == '1.0'
+ assert version(pkg_name.upper()) == '1.0'
+
+
+class NonASCIITests(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase):
+ @staticmethod
+ def pkg_with_non_ascii_description(site_dir):
+ """
+ Create minimal metadata for a package with non-ASCII in
+ the description.
+ """
+ metadata_dir = site_dir / 'portend.dist-info'
+ metadata_dir.mkdir()
+ metadata = metadata_dir / 'METADATA'
+ with metadata.open('w', encoding='utf-8') as fp:
+ fp.write('Description: pôrˈtend\n')
+ return 'portend'
+
+ @staticmethod
+ def pkg_with_non_ascii_description_egg_info(site_dir):
+ """
+ Create minimal metadata for an egg-info package with
+ non-ASCII in the description.
+ """
+ metadata_dir = site_dir / 'portend.dist-info'
+ metadata_dir.mkdir()
+ metadata = metadata_dir / 'METADATA'
+ with metadata.open('w', encoding='utf-8') as fp:
+ fp.write(textwrap.dedent("""
+ Name: portend
+
+ pôrˈtend
+ """).lstrip())
+ return 'portend'
+
+ def test_metadata_loads(self):
+ pkg_name = self.pkg_with_non_ascii_description(self.site_dir)
+ meta = metadata(pkg_name)
+ assert meta['Description'] == 'pôrˈtend'
+
+ def test_metadata_loads_egg_info(self):
+ pkg_name = self.pkg_with_non_ascii_description_egg_info(self.site_dir)
+ meta = metadata(pkg_name)
+ assert meta.get_payload() == 'pôrˈtend\n'
+
+
+class DiscoveryTests(fixtures.EggInfoPkg,
+ fixtures.DistInfoPkg,
+ unittest.TestCase):
+
+ def test_package_discovery(self):
+ dists = list(distributions())
+ assert all(
+ isinstance(dist, Distribution)
+ for dist in dists
+ )
+ assert any(
+ dist.metadata['Name'] == 'egginfo-pkg'
+ for dist in dists
+ )
+ assert any(
+ dist.metadata['Name'] == 'distinfo-pkg'
+ for dist in dists
+ )
+
+ def test_invalid_usage(self):
+ with self.assertRaises(ValueError):
+ list(distributions(context='something', name='else'))
+
+
+class DirectoryTest(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase):
+ def test_egg_info(self):
+ # make an `EGG-INFO` directory that's unrelated
+ self.site_dir.joinpath('EGG-INFO').mkdir()
+ # used to crash with `IsADirectoryError`
+ with self.assertRaises(PackageNotFoundError):
+ version('unknown-package')
+
+ def test_egg(self):
+ egg = self.site_dir.joinpath('foo-3.6.egg')
+ egg.mkdir()
+ with self.add_sys_path(egg):
+ with self.assertRaises(PackageNotFoundError):
+ version('foo')
+
+
+class MissingSysPath(fixtures.OnSysPath, unittest.TestCase):
+ site_dir = '/does-not-exist'
+
+ def test_discovery(self):
+ """
+ Discovering distributions should succeed even if
+ there is an invalid path on sys.path.
+ """
+ importlib_metadata.distributions()
+
+
+class InaccessibleSysPath(fixtures.OnSysPath, ffs.TestCase):
+ site_dir = '/access-denied'
+
+ def setUp(self):
+ super(InaccessibleSysPath, self).setUp()
+ self.setUpPyfakefs()
+ self.fs.create_dir(self.site_dir, perm_bits=000)
+
+ def test_discovery(self):
+ """
+ Discovering distributions should succeed even if
+ there is an invalid path on sys.path.
+ """
+ list(importlib_metadata.distributions())
+
+
+class TestEntryPoints(unittest.TestCase):
+ def __init__(self, *args):
+ super(TestEntryPoints, self).__init__(*args)
+ self.ep = importlib_metadata.EntryPoint('name', 'value', 'group')
+
+ def test_entry_point_pickleable(self):
+ revived = pickle.loads(pickle.dumps(self.ep))
+ assert revived == self.ep
+
+ def test_immutable(self):
+ """EntryPoints should be immutable"""
+ with self.assertRaises(AttributeError):
+ self.ep.name = 'badactor'
+
+ def test_repr(self):
+ assert 'EntryPoint' in repr(self.ep)
+ assert 'name=' in repr(self.ep)
+ assert "'name'" in repr(self.ep)
+
+ def test_hashable(self):
+ """EntryPoints should be hashable"""
+ hash(self.ep)
+
+ def test_json_dump(self):
+ """
+ json should not expect to be able to dump an EntryPoint
+ """
+ with self.assertRaises(Exception):
+ json.dumps(self.ep)
+
+ def test_module(self):
+ assert self.ep.module == 'value'
+
+ def test_attr(self):
+ assert self.ep.attr is None
+
+
+class FileSystem(
+ fixtures.OnSysPath, fixtures.SiteDir, fixtures.FileBuilder,
+ unittest.TestCase):
+ def test_unicode_dir_on_sys_path(self):
+ """
+ Ensure a Unicode subdirectory of a directory on sys.path
+ does not crash.
+ """
+ fixtures.build_files(
+ {self.unicode_filename(): {}},
+ prefix=self.site_dir,
+ )
+ list(distributions())
diff --git a/third_party/python/importlib_metadata/importlib_metadata/tests/test_zip.py b/third_party/python/importlib_metadata/importlib_metadata/tests/test_zip.py
new file mode 100644
index 0000000000..4aae933d9d
--- /dev/null
+++ b/third_party/python/importlib_metadata/importlib_metadata/tests/test_zip.py
@@ -0,0 +1,80 @@
+import sys
+import unittest
+
+from .. import (
+ distribution, entry_points, files, PackageNotFoundError,
+ version, distributions,
+ )
+
+try:
+ from importlib import resources
+ getattr(resources, 'files')
+ getattr(resources, 'as_file')
+except (ImportError, AttributeError):
+ import importlib_resources as resources
+
+try:
+ from contextlib import ExitStack
+except ImportError:
+ from contextlib2 import ExitStack
+
+
+class TestZip(unittest.TestCase):
+ root = 'importlib_metadata.tests.data'
+
+ def _fixture_on_path(self, filename):
+ pkg_file = resources.files(self.root).joinpath(filename)
+ file = self.resources.enter_context(resources.as_file(pkg_file))
+ assert file.name.startswith('example-'), file.name
+ sys.path.insert(0, str(file))
+ self.resources.callback(sys.path.pop, 0)
+
+ def setUp(self):
+ # Find the path to the example-*.whl so we can add it to the front of
+ # sys.path, where we'll then try to find the metadata thereof.
+ self.resources = ExitStack()
+ self.addCleanup(self.resources.close)
+ self._fixture_on_path('example-21.12-py3-none-any.whl')
+
+ def test_zip_version(self):
+ self.assertEqual(version('example'), '21.12')
+
+ def test_zip_version_does_not_match(self):
+ with self.assertRaises(PackageNotFoundError):
+ version('definitely-not-installed')
+
+ def test_zip_entry_points(self):
+ scripts = dict(entry_points()['console_scripts'])
+ entry_point = scripts['example']
+ self.assertEqual(entry_point.value, 'example:main')
+ entry_point = scripts['Example']
+ self.assertEqual(entry_point.value, 'example:main')
+
+ def test_missing_metadata(self):
+ self.assertIsNone(distribution('example').read_text('does not exist'))
+
+ def test_case_insensitive(self):
+ self.assertEqual(version('Example'), '21.12')
+
+ def test_files(self):
+ for file in files('example'):
+ path = str(file.dist.locate_file(file))
+ assert '.whl/' in path, path
+
+ def test_one_distribution(self):
+ dists = list(distributions(path=sys.path[:1]))
+ assert len(dists) == 1
+
+
+class TestEgg(TestZip):
+ def setUp(self):
+ # Find the path to the example-*.egg so we can add it to the front of
+ # sys.path, where we'll then try to find the metadata thereof.
+ self.resources = ExitStack()
+ self.addCleanup(self.resources.close)
+ self._fixture_on_path('example-21.12-py3.6.egg')
+
+ def test_files(self):
+ for file in files('example'):
+ path = str(file.dist.locate_file(file))
+ assert '.egg/' in path, path
diff --git a/third_party/python/importlib_metadata/prepare/example/example/__init__.py b/third_party/python/importlib_metadata/prepare/example/example/__init__.py
new file mode 100644
index 0000000000..ba73b74339
--- /dev/null
+++ b/third_party/python/importlib_metadata/prepare/example/example/__init__.py
@@ -0,0 +1,2 @@
+def main():
+ return 'example'
diff --git a/third_party/python/importlib_metadata/prepare/example/setup.py b/third_party/python/importlib_metadata/prepare/example/setup.py
new file mode 100644
index 0000000000..8663ad389a
--- /dev/null
+++ b/third_party/python/importlib_metadata/prepare/example/setup.py
@@ -0,0 +1,10 @@
+from setuptools import setup
+setup(
+ name='example',
+ version='21.12',
+ license='Apache Software License',
+ packages=['example'],
+ entry_points={
+ 'console_scripts': ['example = example:main', 'Example=example:main'],
+ },
+ )
diff --git a/third_party/python/importlib_metadata/pyproject.toml b/third_party/python/importlib_metadata/pyproject.toml
new file mode 100644
index 0000000000..e5c3a6a455
--- /dev/null
+++ b/third_party/python/importlib_metadata/pyproject.toml
@@ -0,0 +1,2 @@
+[build-system]
+requires = ["setuptools>=30.3", "wheel", "setuptools_scm"]
diff --git a/third_party/python/importlib_metadata/setup.cfg b/third_party/python/importlib_metadata/setup.cfg
new file mode 100644
index 0000000000..2c178ce21a
--- /dev/null
+++ b/third_party/python/importlib_metadata/setup.cfg
@@ -0,0 +1,61 @@
+[metadata]
+name = importlib_metadata
+author = Barry Warsaw
+author_email = barry@python.org
+url = http://importlib-metadata.readthedocs.io/
+description = Read metadata from Python packages
+long_description = file: README.rst
+license = Apache Software License
+classifiers =
+ Development Status :: 3 - Alpha
+ Intended Audience :: Developers
+ License :: OSI Approved :: Apache Software License
+ Topic :: Software Development :: Libraries
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 2
+
+[options]
+python_requires = >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*
+setup_requires = setuptools-scm
+install_requires =
+ zipp>=0.5
+ pathlib2; python_version < '3'
+ contextlib2; python_version < '3'
+ configparser>=3.5; python_version < '3'
+packages = find:
+
+[options.package_data]
+* = *.zip, *.file, *.txt, *.toml
+importlib_metadata =
+ docs/*
+ docs/_static/*
+importlib_metadata.tests.data =
+ *.egg
+ *.whl
+
+[mypy]
+ignore_missing_imports = True
+strict_optional = False
+
+[mypy-importlib_metadata.docs.*]
+ignore_errors = True
+
+[mypy-importlib_metadata.tests.*]
+ignore_errors = True
+
+[wheel]
+universal = 1
+
+[options.extras_require]
+testing =
+ importlib_resources>=1.3; python_version < "3.9"
+ packaging
+ pep517
+docs =
+ sphinx
+ rst.linker
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/importlib_metadata/setup.py b/third_party/python/importlib_metadata/setup.py
new file mode 100644
index 0000000000..d5d43d7c93
--- /dev/null
+++ b/third_party/python/importlib_metadata/setup.py
@@ -0,0 +1,3 @@
+from setuptools import setup
+
+setup(use_scm_version=True)
diff --git a/third_party/python/importlib_metadata/tox.ini b/third_party/python/importlib_metadata/tox.ini
new file mode 100644
index 0000000000..b2775cd1e2
--- /dev/null
+++ b/third_party/python/importlib_metadata/tox.ini
@@ -0,0 +1,97 @@
+[tox]
+envlist = {py27,py35,py36,py37,py38}{,-cov,-diffcov},qa,docs,perf
+skip_missing_interpreters = True
+minversion = 3.2
+# Ensure that a late version of pip is used even on tox-venv.
+requires =
+ tox-pip-version>=0.0.6
+
+[testenv]
+pip_version = pip
+commands =
+ !cov,!diffcov: python -m unittest discover {posargs}
+ cov,diffcov: python -m coverage run {[coverage]rc} -m unittest discover {posargs}
+ cov,diffcov: python -m coverage combine {[coverage]rc}
+ cov: python -m coverage html {[coverage]rc}
+ cov: python -m coverage xml {[coverage]rc}
+ cov: python -m coverage report -m {[coverage]rc} --fail-under=100
+ diffcov: python -m coverage xml {[coverage]rc}
+ diffcov: diff-cover coverage.xml --html-report diffcov.html
+ diffcov: diff-cover coverage.xml --fail-under=100
+usedevelop = True
+passenv =
+ PYTHON*
+ LANG*
+ LC_*
+ PYV
+deps =
+ cov,diffcov: coverage>=4.5
+ diffcov: diff_cover
+ pyfakefs
+setenv =
+ cov: COVERAGE_PROCESS_START={[coverage]rcfile}
+ cov: COVERAGE_OPTIONS="-p"
+ cov: COVERAGE_FILE={toxinidir}/.coverage
+ py27: PYV=2
+ py35,py36,py37,py38: PYV=3
+ # workaround deprecation warnings in pip's vendored packages
+ PYTHONWARNINGS=ignore:Using or importing the ABCs:DeprecationWarning:pip._vendor
+extras =
+ testing
+
+
+[testenv:qa]
+basepython = python3.7
+commands =
+ python -m flake8 importlib_metadata
+ mypy importlib_metadata
+deps =
+ mypy
+ flake8
+ flufl.flake8
+extras =
+
+
+[testenv:docs]
+basepython = python3
+commands =
+ sphinx-build importlib_metadata/docs build/sphinx/html
+extras =
+ docs
+
+
+[testenv:perf]
+use_develop = False
+deps =
+ ipython
+commands =
+ python -m timeit -s 'import importlib_metadata' -- 'importlib_metadata.distribution("ipython")'
+
+
+[testenv:release]
+basepython = python3
+deps =
+ twine
+ wheel
+ setuptools
+ keyring
+ setuptools_scm
+passenv =
+ TWINE_PASSWORD
+setenv =
+ TWINE_USERNAME = {env:TWINE_USERNAME:__token__}
+commands =
+ python setup.py sdist bdist_wheel
+ python -m twine {posargs} upload dist/*
+
+
+[coverage]
+rcfile = {toxinidir}/coverage.ini
+rc = --rcfile="{[coverage]rcfile}"
+
+
+[flake8]
+hang-closing = True
+jobs = 1
+max-line-length = 79
+enable-extensions = U4
diff --git a/third_party/python/iso8601/LICENSE b/third_party/python/iso8601/LICENSE
new file mode 100644
index 0000000000..4eb54eaff9
--- /dev/null
+++ b/third_party/python/iso8601/LICENSE
@@ -0,0 +1,20 @@
+Copyright (c) 2007 - 2015 Michael Twomey
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/third_party/python/iso8601/MANIFEST.in b/third_party/python/iso8601/MANIFEST.in
new file mode 100644
index 0000000000..ea47448e5b
--- /dev/null
+++ b/third_party/python/iso8601/MANIFEST.in
@@ -0,0 +1,2 @@
+recursive-include iso8601 *.py
+include README.rst LICENSE tox.ini setup.py *requirements.txt \ No newline at end of file
diff --git a/third_party/python/iso8601/PKG-INFO b/third_party/python/iso8601/PKG-INFO
new file mode 100644
index 0000000000..17a7fdc4f0
--- /dev/null
+++ b/third_party/python/iso8601/PKG-INFO
@@ -0,0 +1,203 @@
+Metadata-Version: 1.1
+Name: iso8601
+Version: 0.1.12
+Summary: Simple module to parse ISO 8601 dates
+Home-page: https://bitbucket.org/micktwomey/pyiso8601
+Author: Michael Twomey
+Author-email: micktwomey+iso8601@gmail.com
+License: MIT
+Description: Simple module to parse ISO 8601 dates
+
+ This module parses the most common forms of ISO 8601 date strings (e.g.
+ 2007-01-14T20:34:22+00:00) into datetime objects.
+
+ >>> import iso8601
+ >>> iso8601.parse_date("2007-01-25T12:00:00Z")
+ datetime.datetime(2007, 1, 25, 12, 0, tzinfo=<iso8601.Utc>)
+ >>>
+
+ See the LICENSE file for the license this package is released under.
+
+ If you want more full featured parsing look at:
+
+ - http://labix.org/python-dateutil - python-dateutil
+
+ Parsed Formats
+ ==============
+
+ You can parse full date + times, or just the date. In both cases a datetime instance is returned but with missing times defaulting to 0, and missing days / months defaulting to 1.
+
+ Dates
+ -----
+
+ - YYYY-MM-DD
+ - YYYYMMDD
+ - YYYY-MM (defaults to 1 for the day)
+ - YYYY (defaults to 1 for month and day)
+
+ Times
+ -----
+
+ - hh:mm:ss.nn
+ - hhmmss.nn
+ - hh:mm (defaults to 0 for seconds)
+ - hhmm (defaults to 0 for seconds)
+ - hh (defaults to 0 for minutes and seconds)
+
+ Time Zones
+ ----------
+
+ - Nothing, will use the default timezone given (which in turn defaults to UTC).
+ - Z (UTC)
+ - +/-hh:mm
+ - +/-hhmm
+ - +/-hh
+
+ Where it Differs From ISO 8601
+ ==============================
+
+ Known differences from the ISO 8601 spec:
+
+ - You can use a " " (space) instead of T for separating date from time.
+ - Days and months without a leading 0 (2 vs 02) will be parsed.
+ - If time zone information is omitted the default time zone given is used (which in turn defaults to UTC). Use a default of None to yield naive datetime instances.
+
+ Homepage
+ ========
+
+ - Documentation: http://pyiso8601.readthedocs.org/
+ - Source: https://bitbucket.org/micktwomey/pyiso8601/
+
+ This was originally hosted at https://code.google.com/p/pyiso8601/
+
+ References
+ ==========
+
+ - http://en.wikipedia.org/wiki/ISO_8601
+
+ - http://www.cl.cam.ac.uk/~mgk25/iso-time.html - simple overview
+
+ - http://hydracen.com/dx/iso8601.htm - more detailed enumeration of valid formats.
+
+ Testing
+ =======
+
+ 1. pip install -r dev-requirements.txt
+ 2. tox
+
+ Note that you need all the pythons installed to perform a tox run (see below). Homebrew helps a lot on the mac, however you wind up having to add cellars to your PATH or symlinking the pythonX.Y executables.
+
+ Alternatively, to test only with your current python:
+
+ 1. pip install -r dev-requirements.txt
+ 2. py.test --verbose iso8601
+
+ Supported Python Versions
+ =========================
+
+ Tested against:
+
+ - Python 2.6
+ - Python 2.7
+ - Python 3.2
+ - Python 3.3
+ - Python 3.4
+ - Python 3.5
+ - Python 3.6
+ - PyPy
+ - PyPy 3
+
+ Python 3.0 and 3.1 are untested but should work (tests didn't run under them when last tried).
+
+ Jython is untested but should work (tests failed to run).
+
+ Python 2.5 is not supported (too old for the tests for the most part). It could work with some small changes but I'm not supporting it.
+
+ Changes
+ =======
+
+ 0.1.12
+ ------
+
+ * Fix class reference for iso8601.Utc in module docstring (thanks to felixschwarz in https://bitbucket.org/micktwomey/pyiso8601/pull-requests/7/fix-class-reference-for-iso8601utc-in/diff)
+
+ 0.1.11
+ ------
+
+ * Remove logging (thanks to Quentin Pradet in https://bitbucket.org/micktwomey/pyiso8601/pull-requests/6/remove-debug-logging/diff)
+ * Add support for , as separator for fractional part (thanks to ecksun in https://bitbucket.org/micktwomey/pyiso8601/pull-requests/5/add-support-for-as-separator-for/diff)
+ * Add Python 3.4 and 3.5 to tox test config.
+ * Add PyPy 3 to tox test config.
+ * Link to documentation at http://pyiso8601.readthedocs.org/
+
+
+ 0.1.10
+ ------
+
+ * Fixes https://bitbucket.org/micktwomey/pyiso8601/issue/14/regression-yyyy-mm-no-longer-parses (thanks to Kevin Gill for reporting)
+ * Adds YYYY as a valid date (uses 1 for both month and day)
+ * Woo, semantic versioning, .10 at last.
+
+ 0.1.9
+ -----
+
+ * Lots of fixes tightening up parsing from jdanjou. In particular more invalid cases are treated as errors. Also includes fixes for tests (which is how these invalid cases got in in the first place).
+ * Release addresses https://bitbucket.org/micktwomey/pyiso8601/issue/13/new-release-based-on-critical-bug-fix
+
+ 0.1.8
+ -----
+
+ * Remove +/- chars from README.rst and ensure tox tests run using LC_ALL=C. The setup.py egg_info command was failing in python 3.* on some setups (basically any where the system encoding wasn't UTF-8). (https://bitbucket.org/micktwomey/pyiso8601/issue/10/setuppy-broken-for-python-33) (thanks to klmitch)
+
+ 0.1.7
+ -----
+
+ * Fix parsing of microseconds (https://bitbucket.org/micktwomey/pyiso8601/issue/9/regression-parsing-microseconds) (Thanks to dims and bnemec)
+
+ 0.1.6
+ -----
+
+ * Correct negative timezone offsets (https://bitbucket.org/micktwomey/pyiso8601/issue/8/015-parses-negative-timezones-incorrectly) (thanks to Jonathan Lange)
+
+ 0.1.5
+ -----
+
+ * Wow, it's alive! First update since 2007
+ * Moved over to https://bitbucket.org/micktwomey/pyiso8601
+ * Add support for python 3. https://code.google.com/p/pyiso8601/issues/detail?id=23 (thanks to zefciu)
+ * Switched to py.test and tox for testing
+ * Make seconds optional in date format ("1997-07-16T19:20+01:00" now valid). https://bitbucket.org/micktwomey/pyiso8601/pull-request/1/make-the-inclusion-of-seconds-optional-in/diff (thanks to Chris Down)
+ * Correctly raise ParseError for more invalid inputs (https://bitbucket.org/micktwomey/pyiso8601/issue/1/raise-parseerror-for-invalid-input) (thanks to manish.tomar)
+ * Support more variations of ISO 8601 dates, times and time zone specs.
+ * Fix microsecond rounding issues (https://bitbucket.org/micktwomey/pyiso8601/issue/2/roundoff-issues-when-parsing-decimal) (thanks to nielsenb@jetfuse.net)
+ * Fix pickling and deepcopy of returned datetime objects (https://bitbucket.org/micktwomey/pyiso8601/issue/3/dates-returned-by-parse_date-do-not) (thanks to fogathmann and john@openlearning.com)
+ * Fix timezone offsets without a separator (https://bitbucket.org/micktwomey/pyiso8601/issue/4/support-offsets-without-a-separator) (thanks to joe.walton.gglcd)
+ * "Z" produces default timezone if one is specified (https://bitbucket.org/micktwomey/pyiso8601/issue/5/z-produces-default-timezone-if-one-is) (thanks to vfaronov). This one may cause problems if you've been relying on default_timezone to use that timezone instead of UTC. Strictly speaking that was wrong but this is potentially backwards incompatible.
+ * Handle compact date format (https://bitbucket.org/micktwomey/pyiso8601/issue/6/handle-compact-date-format) (thanks to rvandolson@esri.com)
+
+ 0.1.4
+ -----
+
+ * The default_timezone argument wasn't being passed through correctly, UTC was being used in every case. Fixes issue 10.
+
+ 0.1.3
+ -----
+
+ * Fixed the microsecond handling, the generated microsecond values were way too small. Fixes issue 9.
+
+ 0.1.2
+ -----
+
+ * Adding ParseError to __all__ in iso8601 module, allows people to import it. Addresses issue 7.
+ * Be a little more flexible when dealing with dates without leading zeroes. This violates the spec a little, but handles more dates as seen in the field. Addresses issue 6.
+ * Allow date/time separators other than T.
+
+ 0.1.1
+ -----
+
+ * When parsing dates without a timezone the specified default is used. If no default is specified then UTC is used. Addresses issue 4.
+
+Platform: UNKNOWN
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
diff --git a/third_party/python/iso8601/README.rst b/third_party/python/iso8601/README.rst
new file mode 100644
index 0000000000..a9f1761e09
--- /dev/null
+++ b/third_party/python/iso8601/README.rst
@@ -0,0 +1,190 @@
+Simple module to parse ISO 8601 dates
+
+This module parses the most common forms of ISO 8601 date strings (e.g.
+2007-01-14T20:34:22+00:00) into datetime objects.
+
+>>> import iso8601
+>>> iso8601.parse_date("2007-01-25T12:00:00Z")
+datetime.datetime(2007, 1, 25, 12, 0, tzinfo=<iso8601.Utc>)
+>>>
+
+See the LICENSE file for the license this package is released under.
+
+If you want more full featured parsing look at:
+
+- http://labix.org/python-dateutil - python-dateutil
+
+Parsed Formats
+==============
+
+You can parse full date + times, or just the date. In both cases a datetime instance is returned but with missing times defaulting to 0, and missing days / months defaulting to 1.
+
+Dates
+-----
+
+- YYYY-MM-DD
+- YYYYMMDD
+- YYYY-MM (defaults to 1 for the day)
+- YYYY (defaults to 1 for month and day)
+
+Times
+-----
+
+- hh:mm:ss.nn
+- hhmmss.nn
+- hh:mm (defaults to 0 for seconds)
+- hhmm (defaults to 0 for seconds)
+- hh (defaults to 0 for minutes and seconds)
+
+Time Zones
+----------
+
+- Nothing, will use the default timezone given (which in turn defaults to UTC).
+- Z (UTC)
+- +/-hh:mm
+- +/-hhmm
+- +/-hh
+
+Where it Differs From ISO 8601
+==============================
+
+Known differences from the ISO 8601 spec:
+
+- You can use a " " (space) instead of T for separating date from time.
+- Days and months without a leading 0 (2 vs 02) will be parsed.
+- If time zone information is omitted the default time zone given is used (which in turn defaults to UTC). Use a default of None to yield naive datetime instances.
+
+Homepage
+========
+
+- Documentation: http://pyiso8601.readthedocs.org/
+- Source: https://bitbucket.org/micktwomey/pyiso8601/
+
+This was originally hosted at https://code.google.com/p/pyiso8601/
+
+References
+==========
+
+- http://en.wikipedia.org/wiki/ISO_8601
+
+- http://www.cl.cam.ac.uk/~mgk25/iso-time.html - simple overview
+
+- http://hydracen.com/dx/iso8601.htm - more detailed enumeration of valid formats.
+
+Testing
+=======
+
+1. pip install -r dev-requirements.txt
+2. tox
+
+Note that you need all the pythons installed to perform a tox run (see below). Homebrew helps a lot on the mac, however you wind up having to add cellars to your PATH or symlinking the pythonX.Y executables.
+
+Alternatively, to test only with your current python:
+
+1. pip install -r dev-requirements.txt
+2. py.test --verbose iso8601
+
+Supported Python Versions
+=========================
+
+Tested against:
+
+- Python 2.6
+- Python 2.7
+- Python 3.2
+- Python 3.3
+- Python 3.4
+- Python 3.5
+- Python 3.6
+- PyPy
+- PyPy 3
+
+Python 3.0 and 3.1 are untested but should work (tests didn't run under them when last tried).
+
+Jython is untested but should work (tests failed to run).
+
+Python 2.5 is not supported (too old for the tests for the most part). It could work with some small changes but I'm not supporting it.
+
+Changes
+=======
+
+0.1.12
+------
+
+* Fix class reference for iso8601.Utc in module docstring (thanks to felixschwarz in https://bitbucket.org/micktwomey/pyiso8601/pull-requests/7/fix-class-reference-for-iso8601utc-in/diff)
+
+0.1.11
+------
+
+* Remove logging (thanks to Quentin Pradet in https://bitbucket.org/micktwomey/pyiso8601/pull-requests/6/remove-debug-logging/diff)
+* Add support for , as separator for fractional part (thanks to ecksun in https://bitbucket.org/micktwomey/pyiso8601/pull-requests/5/add-support-for-as-separator-for/diff)
+* Add Python 3.4 and 3.5 to tox test config.
+* Add PyPy 3 to tox test config.
+* Link to documentation at http://pyiso8601.readthedocs.org/
+
+
+0.1.10
+------
+
+* Fixes https://bitbucket.org/micktwomey/pyiso8601/issue/14/regression-yyyy-mm-no-longer-parses (thanks to Kevin Gill for reporting)
+* Adds YYYY as a valid date (uses 1 for both month and day)
+* Woo, semantic versioning, .10 at last.
+
+0.1.9
+-----
+
+* Lots of fixes tightening up parsing from jdanjou. In particular more invalid cases are treated as errors. Also includes fixes for tests (which is how these invalid cases got in in the first place).
+* Release addresses https://bitbucket.org/micktwomey/pyiso8601/issue/13/new-release-based-on-critical-bug-fix
+
+0.1.8
+-----
+
+* Remove +/- chars from README.rst and ensure tox tests run using LC_ALL=C. The setup.py egg_info command was failing in python 3.* on some setups (basically any where the system encoding wasn't UTF-8). (https://bitbucket.org/micktwomey/pyiso8601/issue/10/setuppy-broken-for-python-33) (thanks to klmitch)
+
+0.1.7
+-----
+
+* Fix parsing of microseconds (https://bitbucket.org/micktwomey/pyiso8601/issue/9/regression-parsing-microseconds) (Thanks to dims and bnemec)
+
+0.1.6
+-----
+
+* Correct negative timezone offsets (https://bitbucket.org/micktwomey/pyiso8601/issue/8/015-parses-negative-timezones-incorrectly) (thanks to Jonathan Lange)
+
+0.1.5
+-----
+
+* Wow, it's alive! First update since 2007
+* Moved over to https://bitbucket.org/micktwomey/pyiso8601
+* Add support for python 3. https://code.google.com/p/pyiso8601/issues/detail?id=23 (thanks to zefciu)
+* Switched to py.test and tox for testing
+* Make seconds optional in date format ("1997-07-16T19:20+01:00" now valid). https://bitbucket.org/micktwomey/pyiso8601/pull-request/1/make-the-inclusion-of-seconds-optional-in/diff (thanks to Chris Down)
+* Correctly raise ParseError for more invalid inputs (https://bitbucket.org/micktwomey/pyiso8601/issue/1/raise-parseerror-for-invalid-input) (thanks to manish.tomar)
+* Support more variations of ISO 8601 dates, times and time zone specs.
+* Fix microsecond rounding issues (https://bitbucket.org/micktwomey/pyiso8601/issue/2/roundoff-issues-when-parsing-decimal) (thanks to nielsenb@jetfuse.net)
+* Fix pickling and deepcopy of returned datetime objects (https://bitbucket.org/micktwomey/pyiso8601/issue/3/dates-returned-by-parse_date-do-not) (thanks to fogathmann and john@openlearning.com)
+* Fix timezone offsets without a separator (https://bitbucket.org/micktwomey/pyiso8601/issue/4/support-offsets-without-a-separator) (thanks to joe.walton.gglcd)
+* "Z" produces default timezone if one is specified (https://bitbucket.org/micktwomey/pyiso8601/issue/5/z-produces-default-timezone-if-one-is) (thanks to vfaronov). This one may cause problems if you've been relying on default_timezone to use that timezone instead of UTC. Strictly speaking that was wrong but this is potentially backwards incompatible.
+* Handle compact date format (https://bitbucket.org/micktwomey/pyiso8601/issue/6/handle-compact-date-format) (thanks to rvandolson@esri.com)
+
+0.1.4
+-----
+
+* The default_timezone argument wasn't being passed through correctly, UTC was being used in every case. Fixes issue 10.
+
+0.1.3
+-----
+
+* Fixed the microsecond handling, the generated microsecond values were way too small. Fixes issue 9.
+
+0.1.2
+-----
+
+* Adding ParseError to __all__ in iso8601 module, allows people to import it. Addresses issue 7.
+* Be a little more flexible when dealing with dates without leading zeroes. This violates the spec a little, but handles more dates as seen in the field. Addresses issue 6.
+* Allow date/time separators other than T.
+
+0.1.1
+-----
+
+* When parsing dates without a timezone the specified default is used. If no default is specified then UTC is used. Addresses issue 4.
diff --git a/third_party/python/iso8601/dev-requirements.txt b/third_party/python/iso8601/dev-requirements.txt
new file mode 100644
index 0000000000..29a220a917
--- /dev/null
+++ b/third_party/python/iso8601/dev-requirements.txt
@@ -0,0 +1,6 @@
+devpi>=1.2.1
+pytest>=2.5.2
+Sphinx>=1.2.1
+tox-pyenv>=1.0.3
+tox>=1.7.0
+wheel>=0.22.0 \ No newline at end of file
diff --git a/third_party/python/iso8601/iso8601/__init__.py b/third_party/python/iso8601/iso8601/__init__.py
new file mode 100644
index 0000000000..11b1adcbc9
--- /dev/null
+++ b/third_party/python/iso8601/iso8601/__init__.py
@@ -0,0 +1 @@
+from .iso8601 import *
diff --git a/third_party/python/iso8601/iso8601/iso8601.py b/third_party/python/iso8601/iso8601/iso8601.py
new file mode 100644
index 0000000000..0c149f679b
--- /dev/null
+++ b/third_party/python/iso8601/iso8601/iso8601.py
@@ -0,0 +1,214 @@
+"""ISO 8601 date time string parsing
+
+Basic usage:
+>>> import iso8601
+>>> iso8601.parse_date("2007-01-25T12:00:00Z")
+datetime.datetime(2007, 1, 25, 12, 0, tzinfo=<iso8601.Utc ...>)
+>>>
+
+"""
+
+import datetime
+from decimal import Decimal
+import sys
+import re
+
+__all__ = ["parse_date", "ParseError", "UTC",
+ "FixedOffset"]
+
+if sys.version_info >= (3, 0, 0):
+ _basestring = str
+else:
+ _basestring = basestring
+
+
+# Adapted from http://delete.me.uk/2005/03/iso8601.html
+ISO8601_REGEX = re.compile(
+ r"""
+ (?P<year>[0-9]{4})
+ (
+ (
+ (-(?P<monthdash>[0-9]{1,2}))
+ |
+ (?P<month>[0-9]{2})
+ (?!$) # Don't allow YYYYMM
+ )
+ (
+ (
+ (-(?P<daydash>[0-9]{1,2}))
+ |
+ (?P<day>[0-9]{2})
+ )
+ (
+ (
+ (?P<separator>[ T])
+ (?P<hour>[0-9]{2})
+ (:{0,1}(?P<minute>[0-9]{2})){0,1}
+ (
+ :{0,1}(?P<second>[0-9]{1,2})
+ ([.,](?P<second_fraction>[0-9]+)){0,1}
+ ){0,1}
+ (?P<timezone>
+ Z
+ |
+ (
+ (?P<tz_sign>[-+])
+ (?P<tz_hour>[0-9]{2})
+ :{0,1}
+ (?P<tz_minute>[0-9]{2}){0,1}
+ )
+ ){0,1}
+ ){0,1}
+ )
+ ){0,1} # YYYY-MM
+ ){0,1} # YYYY only
+ $
+ """,
+ re.VERBOSE
+)
+
+class ParseError(Exception):
+ """Raised when there is a problem parsing a date string"""
+
+if sys.version_info >= (3, 2, 0):
+ UTC = datetime.timezone.utc
+ def FixedOffset(offset_hours, offset_minutes, name):
+ return datetime.timezone(
+ datetime.timedelta(
+ hours=offset_hours, minutes=offset_minutes),
+ name)
+else:
+ # Yoinked from python docs
+ ZERO = datetime.timedelta(0)
+ class Utc(datetime.tzinfo):
+ """UTC Timezone
+
+ """
+ def utcoffset(self, dt):
+ return ZERO
+
+ def tzname(self, dt):
+ return "UTC"
+
+ def dst(self, dt):
+ return ZERO
+
+ def __repr__(self):
+ return "<iso8601.Utc>"
+
+ UTC = Utc()
+
+ class FixedOffset(datetime.tzinfo):
+ """Fixed offset in hours and minutes from UTC
+
+ """
+ def __init__(self, offset_hours, offset_minutes, name):
+ self.__offset_hours = offset_hours # Keep for later __getinitargs__
+ self.__offset_minutes = offset_minutes # Keep for later __getinitargs__
+ self.__offset = datetime.timedelta(
+ hours=offset_hours, minutes=offset_minutes)
+ self.__name = name
+
+ def __eq__(self, other):
+ if isinstance(other, FixedOffset):
+ return (
+ (other.__offset == self.__offset)
+ and
+ (other.__name == self.__name)
+ )
+ return NotImplemented
+
+ def __getinitargs__(self):
+ return (self.__offset_hours, self.__offset_minutes, self.__name)
+
+ def utcoffset(self, dt):
+ return self.__offset
+
+ def tzname(self, dt):
+ return self.__name
+
+ def dst(self, dt):
+ return ZERO
+
+ def __repr__(self):
+ return "<FixedOffset %r %r>" % (self.__name, self.__offset)
+
+
+def to_int(d, key, default_to_zero=False, default=None, required=True):
+ """Pull a value from the dict and convert to int
+
+ :param default_to_zero: If the value is None or empty, treat it as zero
+ :param default: If the value is missing in the dict use this default
+
+ """
+ value = d.get(key) or default
+ if (value in ["", None]) and default_to_zero:
+ return 0
+ if value is None:
+ if required:
+ raise ParseError("Unable to read %s from %s" % (key, d))
+ else:
+ return int(value)
+
+def parse_timezone(matches, default_timezone=UTC):
+ """Parses ISO 8601 time zone specs into tzinfo offsets
+
+ """
+
+ if matches["timezone"] == "Z":
+ return UTC
+ # This isn't strictly correct, but it's common to encounter dates without
+ # timezones so I'll assume the default (which defaults to UTC).
+ # Addresses issue 4.
+ if matches["timezone"] is None:
+ return default_timezone
+ sign = matches["tz_sign"]
+ hours = to_int(matches, "tz_hour")
+ minutes = to_int(matches, "tz_minute", default_to_zero=True)
+ description = "%s%02d:%02d" % (sign, hours, minutes)
+ if sign == "-":
+ hours = -hours
+ minutes = -minutes
+ return FixedOffset(hours, minutes, description)
+
+def parse_date(datestring, default_timezone=UTC):
+ """Parses ISO 8601 dates into datetime objects
+
+ The timezone is parsed from the date string. However it is quite common to
+ have dates without a timezone (not strictly correct). In this case the
+ default timezone specified in default_timezone is used. This is UTC by
+ default.
+
+ :param datestring: The date to parse as a string
+ :param default_timezone: A datetime tzinfo instance to use when no timezone
+ is specified in the datestring. If this is set to
+ None then a naive datetime object is returned.
+ :returns: A datetime.datetime instance
+ :raises: ParseError when there is a problem parsing the date or
+ constructing the datetime instance.
+
+ """
+ if not isinstance(datestring, _basestring):
+ raise ParseError("Expecting a string %r" % datestring)
+ m = ISO8601_REGEX.match(datestring)
+ if not m:
+ raise ParseError("Unable to parse date string %r" % datestring)
+ groups = m.groupdict()
+
+ tz = parse_timezone(groups, default_timezone=default_timezone)
+
+ groups["second_fraction"] = int(Decimal("0.%s" % (groups["second_fraction"] or 0)) * Decimal("1000000.0"))
+
+ try:
+ return datetime.datetime(
+ year=to_int(groups, "year"),
+ month=to_int(groups, "month", default=to_int(groups, "monthdash", required=False, default=1)),
+ day=to_int(groups, "day", default=to_int(groups, "daydash", required=False, default=1)),
+ hour=to_int(groups, "hour", default_to_zero=True),
+ minute=to_int(groups, "minute", default_to_zero=True),
+ second=to_int(groups, "second", default_to_zero=True),
+ microsecond=groups["second_fraction"],
+ tzinfo=tz,
+ )
+ except Exception as e:
+ raise ParseError(e)
diff --git a/third_party/python/iso8601/iso8601/test_iso8601.py b/third_party/python/iso8601/iso8601/test_iso8601.py
new file mode 100644
index 0000000000..0d01ffbb85
--- /dev/null
+++ b/third_party/python/iso8601/iso8601/test_iso8601.py
@@ -0,0 +1,102 @@
+# coding=UTF-8
+from __future__ import absolute_import
+
+import copy
+import datetime
+import pickle
+
+import pytest
+
+from iso8601 import iso8601
+
+def test_iso8601_regex():
+ assert iso8601.ISO8601_REGEX.match("2006-10-11T00:14:33Z")
+
+def test_fixedoffset_eq():
+ # See https://bitbucket.org/micktwomey/pyiso8601/issues/19
+ datetime.tzinfo() == iso8601.FixedOffset(2, 0, '+2:00')
+
+def test_parse_no_timezone_different_default():
+ tz = iso8601.FixedOffset(2, 0, "test offset")
+ d = iso8601.parse_date("2007-01-01T08:00:00", default_timezone=tz)
+ assert d == datetime.datetime(2007, 1, 1, 8, 0, 0, 0, tz)
+ assert d.tzinfo == tz
+
+def test_parse_utc_different_default():
+ """Z should mean 'UTC', not 'default'.
+
+ """
+ tz = iso8601.FixedOffset(2, 0, "test offset")
+ d = iso8601.parse_date("2007-01-01T08:00:00Z", default_timezone=tz)
+ assert d == datetime.datetime(2007, 1, 1, 8, 0, 0, 0, iso8601.UTC)
+
+@pytest.mark.parametrize("invalid_date, error_string", [
+ ("2013-10-", "Unable to parse date string"),
+ ("2013-", "Unable to parse date string"),
+ ("", "Unable to parse date string"),
+ (None, "Expecting a string"),
+ ("wibble", "Unable to parse date string"),
+ ("23", "Unable to parse date string"),
+ ("131015T142533Z", "Unable to parse date string"),
+ ("131015", "Unable to parse date string"),
+ ("20141", "Unable to parse date string"),
+ ("201402", "Unable to parse date string"),
+ ("2007-06-23X06:40:34.00Z", "Unable to parse date string"), # https://code.google.com/p/pyiso8601/issues/detail?id=14
+ ("2007-06-23 06:40:34.00Zrubbish", "Unable to parse date string"), # https://code.google.com/p/pyiso8601/issues/detail?id=14
+ ("20114-01-03T01:45:49", "Unable to parse date string"),
+])
+def test_parse_invalid_date(invalid_date, error_string):
+ assert isinstance(invalid_date, str) or invalid_date is None # Why? 'cos I've screwed up the parametrize before :)
+ with pytest.raises(iso8601.ParseError) as exc:
+ iso8601.parse_date(invalid_date)
+ assert exc.errisinstance(iso8601.ParseError)
+ assert str(exc.value).startswith(error_string)
+
+@pytest.mark.parametrize("valid_date,expected_datetime,isoformat", [
+ ("2007-06-23 06:40:34.00Z", datetime.datetime(2007, 6, 23, 6, 40, 34, 0, iso8601.UTC), "2007-06-23T06:40:34+00:00"), # Handle a separator other than T
+ ("1997-07-16T19:20+01:00", datetime.datetime(1997, 7, 16, 19, 20, 0, 0, iso8601.FixedOffset(1, 0, "+01:00")), "1997-07-16T19:20:00+01:00"), # Parse with no seconds
+ ("2007-01-01T08:00:00", datetime.datetime(2007, 1, 1, 8, 0, 0, 0, iso8601.UTC), "2007-01-01T08:00:00+00:00"), # Handle timezone-less dates. Assumes UTC. http://code.google.com/p/pyiso8601/issues/detail?id=4
+ ("2006-10-20T15:34:56.123+02:30", datetime.datetime(2006, 10, 20, 15, 34, 56, 123000, iso8601.FixedOffset(2, 30, "+02:30")), None),
+ ("2006-10-20T15:34:56Z", datetime.datetime(2006, 10, 20, 15, 34, 56, 0, iso8601.UTC), "2006-10-20T15:34:56+00:00"),
+ ("2007-5-7T11:43:55.328Z", datetime.datetime(2007, 5, 7, 11, 43, 55, 328000, iso8601.UTC), "2007-05-07T11:43:55.328000+00:00"), # http://code.google.com/p/pyiso8601/issues/detail?id=6
+ ("2006-10-20T15:34:56.123Z", datetime.datetime(2006, 10, 20, 15, 34, 56, 123000, iso8601.UTC), "2006-10-20T15:34:56.123000+00:00"),
+ ("2013-10-15T18:30Z", datetime.datetime(2013, 10, 15, 18, 30, 0, 0, iso8601.UTC), "2013-10-15T18:30:00+00:00"),
+ ("2013-10-15T22:30+04", datetime.datetime(2013, 10, 15, 22, 30, 0, 0, iso8601.FixedOffset(4, 0, "+04:00")), "2013-10-15T22:30:00+04:00"), # <time>±hh:mm
+ ("2013-10-15T1130-0700", datetime.datetime(2013, 10, 15, 11, 30, 0, 0, iso8601.FixedOffset(-7, 0, "-07:00")), "2013-10-15T11:30:00-07:00"), # <time>±hhmm
+ ("2013-10-15T1130+0700", datetime.datetime(2013, 10, 15, 11, 30, 0, 0, iso8601.FixedOffset(+7, 0, "+07:00")), "2013-10-15T11:30:00+07:00"), # <time>±hhmm
+ ("2013-10-15T1130+07", datetime.datetime(2013, 10, 15, 11, 30, 0, 0, iso8601.FixedOffset(+7, 0, "+07:00")), "2013-10-15T11:30:00+07:00"), # <time>±hh
+ ("2013-10-15T1130-07", datetime.datetime(2013, 10, 15, 11, 30, 0, 0, iso8601.FixedOffset(-7, 0, "-07:00")), "2013-10-15T11:30:00-07:00"), # <time>±hh
+ ("2013-10-15T15:00-03:30", datetime.datetime(2013, 10, 15, 15, 0, 0, 0, iso8601.FixedOffset(-3, -30, "-03:30")), "2013-10-15T15:00:00-03:30"),
+ ("2013-10-15T183123Z", datetime.datetime(2013, 10, 15, 18, 31, 23, 0, iso8601.UTC), "2013-10-15T18:31:23+00:00"), # hhmmss
+ ("2013-10-15T1831Z", datetime.datetime(2013, 10, 15, 18, 31, 0, 0, iso8601.UTC), "2013-10-15T18:31:00+00:00"), # hhmm
+ ("2013-10-15T18Z", datetime.datetime(2013, 10, 15, 18, 0, 0, 0, iso8601.UTC), "2013-10-15T18:00:00+00:00"), # hh
+ ("2013-10-15", datetime.datetime(2013, 10, 15, 0, 0, 0, 0, iso8601.UTC), "2013-10-15T00:00:00+00:00"), # YYYY-MM-DD
+ ("20131015T18:30Z", datetime.datetime(2013, 10, 15, 18, 30, 0, 0, iso8601.UTC), "2013-10-15T18:30:00+00:00"), # YYYYMMDD
+ ("2012-12-19T23:21:28.512400+00:00", datetime.datetime(2012, 12, 19, 23, 21, 28, 512400, iso8601.FixedOffset(0, 0, "+00:00")), "2012-12-19T23:21:28.512400+00:00"), # https://code.google.com/p/pyiso8601/issues/detail?id=21
+ ("2006-10-20T15:34:56.123+0230", datetime.datetime(2006, 10, 20, 15, 34, 56, 123000, iso8601.FixedOffset(2, 30, "+02:30")), "2006-10-20T15:34:56.123000+02:30"), # https://code.google.com/p/pyiso8601/issues/detail?id=18
+ ("19950204", datetime.datetime(1995, 2, 4, tzinfo=iso8601.UTC), "1995-02-04T00:00:00+00:00"), # https://code.google.com/p/pyiso8601/issues/detail?id=1
+ ("2010-07-20 15:25:52.520701+00:00", datetime.datetime(2010, 7, 20, 15, 25, 52, 520701, iso8601.FixedOffset(0, 0, "+00:00")), "2010-07-20T15:25:52.520701+00:00"), # https://code.google.com/p/pyiso8601/issues/detail?id=17
+ ("2010-06-12", datetime.datetime(2010, 6, 12, tzinfo=iso8601.UTC), "2010-06-12T00:00:00+00:00"), # https://code.google.com/p/pyiso8601/issues/detail?id=16
+ ("1985-04-12T23:20:50.52-05:30", datetime.datetime(1985, 4, 12, 23, 20, 50, 520000, iso8601.FixedOffset(-5, -30, "-05:30")), "1985-04-12T23:20:50.520000-05:30"), # https://bitbucket.org/micktwomey/pyiso8601/issue/8/015-parses-negative-timezones-incorrectly
+ ("1997-08-29T06:14:00.000123Z", datetime.datetime(1997, 8, 29, 6, 14, 0, 123, iso8601.UTC), "1997-08-29T06:14:00.000123+00:00"), # https://bitbucket.org/micktwomey/pyiso8601/issue/9/regression-parsing-microseconds
+ ("2014-02", datetime.datetime(2014, 2, 1, 0, 0, 0, 0, iso8601.UTC), "2014-02-01T00:00:00+00:00"), # https://bitbucket.org/micktwomey/pyiso8601/issue/14/regression-yyyy-mm-no-longer-parses
+ ("2014", datetime.datetime(2014, 1, 1, 0, 0, 0, 0, iso8601.UTC), "2014-01-01T00:00:00+00:00"), # YYYY
+ ("1997-08-29T06:14:00,000123Z", datetime.datetime(1997, 8, 29, 6, 14, 0, 123, iso8601.UTC), "1997-08-29T06:14:00.000123+00:00"), # Use , as decimal separator
+])
+def test_parse_valid_date(valid_date, expected_datetime, isoformat):
+ parsed = iso8601.parse_date(valid_date)
+ assert parsed.year == expected_datetime.year
+ assert parsed.month == expected_datetime.month
+ assert parsed.day == expected_datetime.day
+ assert parsed.hour == expected_datetime.hour
+ assert parsed.minute == expected_datetime.minute
+ assert parsed.second == expected_datetime.second
+ assert parsed.microsecond == expected_datetime.microsecond
+ assert parsed.tzinfo == expected_datetime.tzinfo
+ assert parsed == expected_datetime
+ assert parsed.isoformat() == expected_datetime.isoformat()
+ copy.deepcopy(parsed) # ensure it's deep copy-able
+ pickle.dumps(parsed) # ensure it pickles
+ if isoformat:
+ assert parsed.isoformat() == isoformat
+ assert iso8601.parse_date(parsed.isoformat()) == parsed # Test round trip
diff --git a/third_party/python/iso8601/setup.cfg b/third_party/python/iso8601/setup.cfg
new file mode 100644
index 0000000000..8bfd5a12f8
--- /dev/null
+++ b/third_party/python/iso8601/setup.cfg
@@ -0,0 +1,4 @@
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/iso8601/setup.py b/third_party/python/iso8601/setup.py
new file mode 100644
index 0000000000..d5c8782f07
--- /dev/null
+++ b/third_party/python/iso8601/setup.py
@@ -0,0 +1,25 @@
+import os
+
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils import setup
+
+long_description = open(os.path.join(os.path.dirname(__file__), "README.rst")).read()
+
+setup(
+ name="iso8601",
+ version="0.1.12",
+ description=long_description.split("\n")[0],
+ long_description=long_description,
+ author="Michael Twomey",
+ author_email="micktwomey+iso8601@gmail.com",
+ url="https://bitbucket.org/micktwomey/pyiso8601",
+ packages=["iso8601"],
+ license="MIT",
+ classifiers=[
+ "License :: OSI Approved :: MIT License",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 3",
+ ],
+)
diff --git a/third_party/python/iso8601/tox.ini b/third_party/python/iso8601/tox.ini
new file mode 100644
index 0000000000..90985f78cf
--- /dev/null
+++ b/third_party/python/iso8601/tox.ini
@@ -0,0 +1,8 @@
+[tox]
+envlist = py26,py27,py32,py33,py34,py35,py36,pypy,pypy3
+
+[testenv]
+deps=pytest>=2.4.2
+commands=py.test --verbose iso8601
+setenv =
+ LC_ALL=C
diff --git a/third_party/python/jsmin/CHANGELOG.txt b/third_party/python/jsmin/CHANGELOG.txt
new file mode 100644
index 0000000000..d9da338a7f
--- /dev/null
+++ b/third_party/python/jsmin/CHANGELOG.txt
@@ -0,0 +1,11 @@
+Changelog
+=========
+
+v2.1.0 (2014-12-24) Tikitu de Jager
+-----------------------------------
+
+ * First changelog entries; see README.rst for prior contributors.
+
+ * Expose quote_chars parameter to provide just enough unofficial Harmony
+ support to be useful.
+
diff --git a/third_party/python/jsmin/LICENSE.txt b/third_party/python/jsmin/LICENSE.txt
new file mode 100644
index 0000000000..193a85326d
--- /dev/null
+++ b/third_party/python/jsmin/LICENSE.txt
@@ -0,0 +1,23 @@
+The MIT License (MIT)
+
+Copyright (c) 2013 Dave St.Germain
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+
diff --git a/third_party/python/jsmin/MANIFEST.in b/third_party/python/jsmin/MANIFEST.in
new file mode 100644
index 0000000000..ab30e9acee
--- /dev/null
+++ b/third_party/python/jsmin/MANIFEST.in
@@ -0,0 +1 @@
+include *.txt
diff --git a/third_party/python/jsmin/PKG-INFO b/third_party/python/jsmin/PKG-INFO
new file mode 100644
index 0000000000..27f88212e5
--- /dev/null
+++ b/third_party/python/jsmin/PKG-INFO
@@ -0,0 +1,117 @@
+Metadata-Version: 1.1
+Name: jsmin
+Version: 2.1.0
+Summary: JavaScript minifier.
+PLEASE UPDATE TO VERSION >= 2.0.6. Older versions have a serious bug related to comments.
+Home-page: https://bitbucket.org/dcs/jsmin/
+Author: Tikitu de Jager
+Author-email: tikitu+jsmin@logophile.org
+License: MIT License
+Description: =====
+ jsmin
+ =====
+
+ JavaScript minifier.
+
+ Usage
+ =====
+
+ .. code:: python
+
+ from jsmin import jsmin
+ with open('myfile.js') as js_file:
+ minified = jsmin(js_file.read())
+
+ You can run it as a commandline tool also::
+
+ python -m jsmin myfile.js
+
+ As yet, ``jsmin`` makes no attempt to be compatible with
+ `ECMAScript 6 / ES.next / Harmony <http://wiki.ecmascript.org/doku.php?id=harmony:specification_drafts>`_.
+ If you're using it on Harmony code, though, you might find the ``quote_chars``
+ parameter useful:
+
+ .. code:: python
+
+ from jsmin import jsmin
+ with open('myfile.js') as js_file:
+ minified = jsmin(js_file.read(), quote_chars="'\"`")
+
+
+ Where to get it
+ ===============
+
+ * install the package `from pypi <https://pypi.python.org/pypi/jsmin/>`_
+ * get the latest release `from the stable branch on bitbucket <https://bitbucket.org/dcs/jsmin/branch/stable>`_
+ * get the development version `from the default branch on bitbucket <https://bitbucket.org/dcs/jsmin/branch/default>`_
+
+ Contributing
+ ============
+
+ `Issues <https://bitbucket.org/dcs/jsmin/issues>`_ and `Pull requests <https://bitbucket.org/dcs/jsmin/pull-requests>`_
+ will be gratefully received on Bitbucket. Pull requests on github are great too, but the issue tracker lives on
+ bitbucket.
+
+ If possible, please make separate pull requests for tests and for code: tests will be committed on the stable branch
+ (which tracks the latest released version) while code will go to default by, erm, default.
+
+ Unless you request otherwise, your Bitbucket identity will be added to the contributor's list below; if you prefer a
+ different name feel free to add it in your pull request instead. (If you prefer not to be mentioned you'll have to let
+ the maintainer know somehow.)
+
+ Build/test status
+ =================
+
+ Both default and stable branches are tested with Travis: https://travis-ci.org/tikitu/jsmin
+
+ Stable (latest released version plus any new tests) is tested against CPython 2.6, 2.7, 3.2, and 3.3.
+ Currently:
+
+ .. image:: https://travis-ci.org/tikitu/jsmin.png?branch=ghstable
+
+ If stable is failing that means there's a new test that fails on *the latest released version on pypi*, with no fix yet
+ released.
+
+ Default (development version, might be ahead of latest released version) is tested against CPython 2.6, 2.7, 3.2, and
+ 3.3. Currently:
+
+ .. image:: https://travis-ci.org/tikitu/jsmin.png?branch=master
+
+ If default is failing don't use it, but as long as stable is passing the pypi release should be ok.
+
+ Contributors (chronological commit order)
+ =========================================
+
+ * `Dave St.Germain <https://bitbucket.org/dcs>`_ (original author)
+ * `Hans weltar <https://bitbucket.org/hansweltar>`_
+ * `Tikitu de Jager <mailto:tikitu+jsmin@logophile.org>`_ (current maintainer)
+ * https://bitbucket.org/rennat
+ * `Nick Alexander <https://bitbucket.org/ncalexan>`_
+
+ Changelog
+ =========
+
+ v2.1.0 (2014-12-24) Tikitu de Jager
+ -----------------------------------
+
+ * First changelog entries; see README.rst for prior contributors.
+
+ * Expose quote_chars parameter to provide just enough unofficial Harmony
+ support to be useful.
+
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Software Development :: Pre-processors
+Classifier: Topic :: Text Processing :: Filters
diff --git a/third_party/python/jsmin/README.rst b/third_party/python/jsmin/README.rst
new file mode 100644
index 0000000000..6e7f35cde9
--- /dev/null
+++ b/third_party/python/jsmin/README.rst
@@ -0,0 +1,80 @@
+=====
+jsmin
+=====
+
+JavaScript minifier.
+
+Usage
+=====
+
+.. code:: python
+
+ from jsmin import jsmin
+ with open('myfile.js') as js_file:
+ minified = jsmin(js_file.read())
+
+You can run it as a commandline tool also::
+
+ python -m jsmin myfile.js
+
+As yet, ``jsmin`` makes no attempt to be compatible with
+`ECMAScript 6 / ES.next / Harmony <http://wiki.ecmascript.org/doku.php?id=harmony:specification_drafts>`_.
+If you're using it on Harmony code, though, you might find the ``quote_chars``
+parameter useful:
+
+.. code:: python
+
+ from jsmin import jsmin
+ with open('myfile.js') as js_file:
+ minified = jsmin(js_file.read(), quote_chars="'\"`")
+
+
+Where to get it
+===============
+
+* install the package `from pypi <https://pypi.python.org/pypi/jsmin/>`_
+* get the latest release `from the stable branch on bitbucket <https://bitbucket.org/dcs/jsmin/branch/stable>`_
+* get the development version `from the default branch on bitbucket <https://bitbucket.org/dcs/jsmin/branch/default>`_
+
+Contributing
+============
+
+`Issues <https://bitbucket.org/dcs/jsmin/issues>`_ and `Pull requests <https://bitbucket.org/dcs/jsmin/pull-requests>`_
+will be gratefully received on Bitbucket. Pull requests on github are great too, but the issue tracker lives on
+bitbucket.
+
+If possible, please make separate pull requests for tests and for code: tests will be committed on the stable branch
+(which tracks the latest released version) while code will go to default by, erm, default.
+
+Unless you request otherwise, your Bitbucket identity will be added to the contributor's list below; if you prefer a
+different name feel free to add it in your pull request instead. (If you prefer not to be mentioned you'll have to let
+the maintainer know somehow.)
+
+Build/test status
+=================
+
+Both default and stable branches are tested with Travis: https://travis-ci.org/tikitu/jsmin
+
+Stable (latest released version plus any new tests) is tested against CPython 2.6, 2.7, 3.2, and 3.3.
+Currently:
+
+.. image:: https://travis-ci.org/tikitu/jsmin.png?branch=ghstable
+
+If stable is failing that means there's a new test that fails on *the latest released version on pypi*, with no fix yet
+released.
+
+Default (development version, might be ahead of latest released version) is tested against CPython 2.6, 2.7, 3.2, and
+3.3. Currently:
+
+.. image:: https://travis-ci.org/tikitu/jsmin.png?branch=master
+
+If default is failing don't use it, but as long as stable is passing the pypi release should be ok.
+
+Contributors (chronological commit order)
+=========================================
+
+* `Dave St.Germain <https://bitbucket.org/dcs>`_ (original author)
+* `Hans weltar <https://bitbucket.org/hansweltar>`_
+* `Tikitu de Jager <mailto:tikitu+jsmin@logophile.org>`_ (current maintainer)
+* https://bitbucket.org/rennat
+* `Nick Alexander <https://bitbucket.org/ncalexan>`_
diff --git a/third_party/python/jsmin/jsmin/__init__.py b/third_party/python/jsmin/jsmin/__init__.py
new file mode 100644
index 0000000000..44bb6b8cb3
--- /dev/null
+++ b/third_party/python/jsmin/jsmin/__init__.py
@@ -0,0 +1,238 @@
+# This code is original from jsmin by Douglas Crockford, it was translated to
+# Python by Baruch Even. It was rewritten by Dave St.Germain for speed.
+#
+# The MIT License (MIT)
+#
+# Copyright (c) 2013 Dave St.Germain
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+
+
+import sys
+is_3 = sys.version_info >= (3, 0)
+if is_3:
+ import io
+else:
+ import StringIO
+ try:
+ import cStringIO
+ except ImportError:
+ cStringIO = None
+
+
+__all__ = ['jsmin', 'JavascriptMinify']
+__version__ = '2.1.0'
+
+
+def jsmin(js, **kwargs):
+ """
+ returns a minified version of the javascript string
+ """
+ if not is_3:
+ if cStringIO and not isinstance(js, unicode):
+ # strings can use cStringIO for a 3x performance
+ # improvement, but unicode (in python2) cannot
+ klass = cStringIO.StringIO
+ else:
+ klass = StringIO.StringIO
+ else:
+ klass = io.StringIO
+ ins = klass(js)
+ outs = klass()
+ JavascriptMinify(ins, outs, **kwargs).minify()
+ return outs.getvalue()
+
+
+class JavascriptMinify(object):
+ """
+ Minify an input stream of javascript, writing
+ to an output stream
+ """
+
+ def __init__(self, instream=None, outstream=None, quote_chars="'\""):
+ self.ins = instream
+ self.outs = outstream
+ self.quote_chars = quote_chars
+
+ def minify(self, instream=None, outstream=None):
+ if instream and outstream:
+ self.ins, self.outs = instream, outstream
+
+ self.is_return = False
+ self.return_buf = ''
+
+ def write(char):
+ # all of this is to support literal regular expressions.
+ # sigh
+ if char in 'return':
+ self.return_buf += char
+ self.is_return = self.return_buf == 'return'
+ self.outs.write(char)
+ if self.is_return:
+ self.return_buf = ''
+
+ read = self.ins.read
+
+ space_strings = "abcdefghijklmnopqrstuvwxyz"\
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_$\\"
+ starters, enders = '{[(+-', '}])+-' + self.quote_chars
+ newlinestart_strings = starters + space_strings
+ newlineend_strings = enders + space_strings
+ do_newline = False
+ do_space = False
+ escape_slash_count = 0
+ doing_single_comment = False
+ previous_before_comment = ''
+ doing_multi_comment = False
+ in_re = False
+ in_quote = ''
+ quote_buf = []
+
+ previous = read(1)
+ if previous == '\\':
+ escape_slash_count += 1
+ next1 = read(1)
+ if previous == '/':
+ if next1 == '/':
+ doing_single_comment = True
+ elif next1 == '*':
+ doing_multi_comment = True
+ previous = next1
+ next1 = read(1)
+ else:
+ in_re = True # literal regex at start of script
+ write(previous)
+ elif not previous:
+ return
+ elif previous >= '!':
+ if previous in self.quote_chars:
+ in_quote = previous
+ write(previous)
+ previous_non_space = previous
+ else:
+ previous_non_space = ' '
+ if not next1:
+ return
+
+ while 1:
+ next2 = read(1)
+ if not next2:
+ last = next1.strip()
+ if not (doing_single_comment or doing_multi_comment)\
+ and last not in ('', '/'):
+ if in_quote:
+ write(''.join(quote_buf))
+ write(last)
+ break
+ if doing_multi_comment:
+ if next1 == '*' and next2 == '/':
+ doing_multi_comment = False
+ if previous_before_comment and previous_before_comment in space_strings:
+ do_space = True
+ next2 = read(1)
+ elif doing_single_comment:
+ if next1 in '\r\n':
+ doing_single_comment = False
+ while next2 in '\r\n':
+ next2 = read(1)
+ if not next2:
+ break
+ if previous_before_comment in ')}]':
+ do_newline = True
+ elif previous_before_comment in space_strings:
+ write('\n')
+ elif in_quote:
+ quote_buf.append(next1)
+
+ if next1 == in_quote:
+ numslashes = 0
+ for c in reversed(quote_buf[:-1]):
+ if c != '\\':
+ break
+ else:
+ numslashes += 1
+ if numslashes % 2 == 0:
+ in_quote = ''
+ write(''.join(quote_buf))
+ elif next1 in '\r\n':
+ if previous_non_space in newlineend_strings \
+ or previous_non_space > '~':
+ while 1:
+ if next2 < '!':
+ next2 = read(1)
+ if not next2:
+ break
+ else:
+ if next2 in newlinestart_strings \
+ or next2 > '~' or next2 == '/':
+ do_newline = True
+ break
+ elif next1 < '!' and not in_re:
+ if (previous_non_space in space_strings \
+ or previous_non_space > '~') \
+ and (next2 in space_strings or next2 > '~'):
+ do_space = True
+ elif previous_non_space in '-+' and next2 == previous_non_space:
+ # protect against + ++ or - -- sequences
+ do_space = True
+ elif self.is_return and next2 == '/':
+ # returning a regex...
+ write(' ')
+ elif next1 == '/':
+ if do_space:
+ write(' ')
+ if in_re:
+ if previous != '\\' or (not escape_slash_count % 2) or next2 in 'gimy':
+ in_re = False
+ write('/')
+ elif next2 == '/':
+ doing_single_comment = True
+ previous_before_comment = previous_non_space
+ elif next2 == '*':
+ doing_multi_comment = True
+ previous_before_comment = previous_non_space
+ previous = next1
+ next1 = next2
+ next2 = read(1)
+ else:
+ in_re = previous_non_space in '(,=:[?!&|;' or self.is_return # literal regular expression
+ write('/')
+ else:
+ if do_space:
+ do_space = False
+ write(' ')
+ if do_newline:
+ write('\n')
+ do_newline = False
+
+ write(next1)
+ if not in_re and next1 in self.quote_chars:
+ in_quote = next1
+ quote_buf = []
+
+ previous = next1
+ next1 = next2
+
+ if previous >= '!':
+ previous_non_space = previous
+
+ if previous == '\\':
+ escape_slash_count += 1
+ else:
+ escape_slash_count = 0
diff --git a/third_party/python/jsmin/jsmin/__main__.py b/third_party/python/jsmin/jsmin/__main__.py
new file mode 100644
index 0000000000..58da2e2221
--- /dev/null
+++ b/third_party/python/jsmin/jsmin/__main__.py
@@ -0,0 +1,10 @@
+import sys, os, glob
+from jsmin import JavascriptMinify
+
+for f in sys.argv[1:]:
+ with open(f, 'r') as js:
+ minifier = JavascriptMinify(js, sys.stdout)
+ minifier.minify()
+ sys.stdout.write('\n')
+
+
diff --git a/third_party/python/jsmin/jsmin/test.py b/third_party/python/jsmin/jsmin/test.py
new file mode 100644
index 0000000000..6f7f627fde
--- /dev/null
+++ b/third_party/python/jsmin/jsmin/test.py
@@ -0,0 +1,394 @@
+import unittest
+import jsmin
+import sys
+
+class JsTests(unittest.TestCase):
+ def _minify(self, js):
+ return jsmin.jsmin(js)
+
+ def assertEqual(self, thing1, thing2):
+ if thing1 != thing2:
+ print(repr(thing1), repr(thing2))
+ raise AssertionError
+ return True
+
+ def assertMinified(self, js_input, expected, **kwargs):
+ minified = jsmin.jsmin(js_input, **kwargs)
+ assert minified == expected, "%r != %r" % (minified, expected)
+
+ def testQuoted(self):
+ js = r'''
+ Object.extend(String, {
+ interpret: function(value) {
+ return value == null ? '' : String(value);
+ },
+ specialChar: {
+ '\b': '\\b',
+ '\t': '\\t',
+ '\n': '\\n',
+ '\f': '\\f',
+ '\r': '\\r',
+ '\\': '\\\\'
+ }
+ });
+
+ '''
+ expected = r"""Object.extend(String,{interpret:function(value){return value==null?'':String(value);},specialChar:{'\b':'\\b','\t':'\\t','\n':'\\n','\f':'\\f','\r':'\\r','\\':'\\\\'}});"""
+ self.assertMinified(js, expected)
+
+ def testSingleComment(self):
+ js = r'''// use native browser JS 1.6 implementation if available
+ if (Object.isFunction(Array.prototype.forEach))
+ Array.prototype._each = Array.prototype.forEach;
+
+ if (!Array.prototype.indexOf) Array.prototype.indexOf = function(item, i) {
+
+ // hey there
+ function() {// testing comment
+ foo;
+ //something something
+
+ location = 'http://foo.com;'; // goodbye
+ }
+ //bye
+ '''
+ expected = r"""
+if(Object.isFunction(Array.prototype.forEach))
+Array.prototype._each=Array.prototype.forEach;if(!Array.prototype.indexOf)Array.prototype.indexOf=function(item,i){ function(){ foo; location='http://foo.com;';}"""
+ # print expected
+ self.assertMinified(js, expected)
+
+ def testEmpty(self):
+ self.assertMinified('', '')
+ self.assertMinified(' ', '')
+ self.assertMinified('\n', '')
+ self.assertMinified('\r\n', '')
+ self.assertMinified('\t', '')
+
+
+ def testMultiComment(self):
+ js = r"""
+ function foo() {
+ print('hey');
+ }
+ /*
+ if(this.options.zindex) {
+ this.originalZ = parseInt(Element.getStyle(this.element,'z-index') || 0);
+ this.element.style.zIndex = this.options.zindex;
+ }
+ */
+ another thing;
+ """
+ expected = r"""function foo(){print('hey');}
+another thing;"""
+ self.assertMinified(js, expected)
+
+ def testLeadingComment(self):
+ js = r"""/* here is a comment at the top
+
+ it ends here */
+ function foo() {
+ alert('crud');
+ }
+
+ """
+ expected = r"""function foo(){alert('crud');}"""
+ self.assertMinified(js, expected)
+
+ def testBlockCommentStartingWithSlash(self):
+ self.assertMinified('A; /*/ comment */ B', 'A;B')
+
+ def testBlockCommentEndingWithSlash(self):
+ self.assertMinified('A; /* comment /*/ B', 'A;B')
+
+ def testLeadingBlockCommentStartingWithSlash(self):
+ self.assertMinified('/*/ comment */ A', 'A')
+
+ def testLeadingBlockCommentEndingWithSlash(self):
+ self.assertMinified('/* comment /*/ A', 'A')
+
+ def testEmptyBlockComment(self):
+ self.assertMinified('/**/ A', 'A')
+
+ def testBlockCommentMultipleOpen(self):
+ self.assertMinified('/* A /* B */ C', 'C')
+
+ def testJustAComment(self):
+ self.assertMinified(' // a comment', '')
+
+ def test_issue_10(self):
+ js = '''
+ files = [{name: value.replace(/^.*\\\\/, '')}];
+ // comment
+ A
+ '''
+ expected = '''files=[{name:value.replace(/^.*\\\\/,'')}]; A'''
+ self.assertMinified(js, expected)
+
+ def testRe(self):
+ js = r'''
+ var str = this.replace(/\\./g, '@').replace(/"[^"\\\n\r]*"/g, '');
+ return (/^[,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t]*$/).test(str);
+ });'''
+ expected = r"""var str=this.replace(/\\./g,'@').replace(/"[^"\\\n\r]*"/g,'');return(/^[,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t]*$/).test(str);});"""
+ self.assertMinified(js, expected)
+
+ def testIgnoreComment(self):
+ js = r"""
+ var options_for_droppable = {
+ overlap: options.overlap,
+ containment: options.containment,
+ tree: options.tree,
+ hoverclass: options.hoverclass,
+ onHover: Sortable.onHover
+ }
+
+ var options_for_tree = {
+ onHover: Sortable.onEmptyHover,
+ overlap: options.overlap,
+ containment: options.containment,
+ hoverclass: options.hoverclass
+ }
+
+ // fix for gecko engine
+ Element.cleanWhitespace(element);
+ """
+ expected = r"""var options_for_droppable={overlap:options.overlap,containment:options.containment,tree:options.tree,hoverclass:options.hoverclass,onHover:Sortable.onHover}
+var options_for_tree={onHover:Sortable.onEmptyHover,overlap:options.overlap,containment:options.containment,hoverclass:options.hoverclass}
+Element.cleanWhitespace(element);"""
+ self.assertMinified(js, expected)
+
+ def testHairyRe(self):
+ js = r"""
+ inspect: function(useDoubleQuotes) {
+ var escapedString = this.gsub(/[\x00-\x1f\\]/, function(match) {
+ var character = String.specialChar[match[0]];
+ return character ? character : '\\u00' + match[0].charCodeAt().toPaddedString(2, 16);
+ });
+ if (useDoubleQuotes) return '"' + escapedString.replace(/"/g, '\\"') + '"';
+ return "'" + escapedString.replace(/'/g, '\\\'') + "'";
+ },
+
+ toJSON: function() {
+ return this.inspect(true);
+ },
+
+ unfilterJSON: function(filter) {
+ return this.sub(filter || Prototype.JSONFilter, '#{1}');
+ },
+ """
+ expected = r"""inspect:function(useDoubleQuotes){var escapedString=this.gsub(/[\x00-\x1f\\]/,function(match){var character=String.specialChar[match[0]];return character?character:'\\u00'+match[0].charCodeAt().toPaddedString(2,16);});if(useDoubleQuotes)return'"'+escapedString.replace(/"/g,'\\"')+'"';return"'"+escapedString.replace(/'/g,'\\\'')+"'";},toJSON:function(){return this.inspect(true);},unfilterJSON:function(filter){return this.sub(filter||Prototype.JSONFilter,'#{1}');},"""
+ self.assertMinified(js, expected)
+
+ def testLiteralRe(self):
+ js = r"""
+ myString.replace(/\\/g, '/');
+ console.log("hi");
+ """
+ expected = r"""myString.replace(/\\/g,'/');console.log("hi");"""
+ self.assertMinified(js, expected)
+
+ js = r''' return /^data:image\//i.test(url) ||
+ /^(https?|ftp|file|about|chrome|resource):/.test(url);
+ '''
+ expected = r'''return /^data:image\//i.test(url)||/^(https?|ftp|file|about|chrome|resource):/.test(url);'''
+ self.assertMinified(js, expected)
+
+ def testNoBracesWithComment(self):
+ js = r"""
+ onSuccess: function(transport) {
+ var js = transport.responseText.strip();
+ if (!/^\[.*\]$/.test(js)) // TODO: improve sanity check
+ throw 'Server returned an invalid collection representation.';
+ this._collection = eval(js);
+ this.checkForExternalText();
+ }.bind(this),
+ onFailure: this.onFailure
+ });
+ """
+ expected = r"""onSuccess:function(transport){var js=transport.responseText.strip();if(!/^\[.*\]$/.test(js))
+throw'Server returned an invalid collection representation.';this._collection=eval(js);this.checkForExternalText();}.bind(this),onFailure:this.onFailure});"""
+ self.assertMinified(js, expected)
+
+ def testSpaceInRe(self):
+ js = r"""
+ num = num.replace(/ /g,'');
+ """
+ self.assertMinified(js, "num=num.replace(/ /g,'');")
+
+ def testEmptyString(self):
+ js = r'''
+ function foo('') {
+
+ }
+ '''
+ self.assertMinified(js, "function foo(''){}")
+
+ def testDoubleSpace(self):
+ js = r'''
+var foo = "hey";
+ '''
+ self.assertMinified(js, 'var foo="hey";')
+
+ def testLeadingRegex(self):
+ js = r'/[d]+/g '
+ self.assertMinified(js, js.strip())
+
+ def testLeadingString(self):
+ js = r"'a string in the middle of nowhere'; // and a comment"
+ self.assertMinified(js, "'a string in the middle of nowhere';")
+
+ def testSingleCommentEnd(self):
+ js = r'// a comment\n'
+ self.assertMinified(js, '')
+
+ def testInputStream(self):
+ try:
+ from StringIO import StringIO
+ except ImportError:
+ from io import StringIO
+
+ ins = StringIO(r'''
+ function foo('') {
+
+ }
+ ''')
+ outs = StringIO()
+ m = jsmin.JavascriptMinify()
+ m.minify(ins, outs)
+ output = outs.getvalue()
+ assert output == "function foo(''){}"
+
+ def testUnicode(self):
+ instr = u'\u4000 //foo'
+ expected = u'\u4000'
+ output = jsmin.jsmin(instr)
+ self.assertEqual(output, expected)
+
+ def testCommentBeforeEOF(self):
+ self.assertMinified("//test\r\n", "")
+
+ def testCommentInObj(self):
+ self.assertMinified("""{
+ a: 1,//comment
+ }""", "{a:1,}")
+
+ def testCommentInObj2(self):
+ self.assertMinified("{a: 1//comment\r\n}", "{a:1\n}")
+
+ def testImplicitSemicolon(self):
+ # return \n 1 is equivalent with return; 1
+ # so best make sure jsmin retains the newline
+ self.assertMinified("return;//comment\r\na", "return;a")
+
+ def testImplicitSemicolon2(self):
+ self.assertMinified("return//comment...\r\na", "return\na")
+
+ def testSingleComment2(self):
+ self.assertMinified('x.replace(/\//, "_")// slash to underscore',
+ 'x.replace(/\//,"_")')
+
+ def testSlashesNearComments(self):
+ original = '''
+ { a: n / 2, }
+ // comment
+ '''
+ expected = '''{a:n/2,}'''
+ self.assertMinified(original, expected)
+
+ def testReturn(self):
+ original = '''
+ return foo;//comment
+ return bar;'''
+ expected = 'return foo; return bar;'
+ self.assertMinified(original, expected)
+
+ def test_space_plus(self):
+ original = '"s" + ++e + "s"'
+ expected = '"s"+ ++e+"s"'
+ self.assertMinified(original, expected)
+
+ def test_no_final_newline(self):
+ original = '"s"'
+ expected = '"s"'
+ self.assertMinified(original, expected)
+
+ def test_space_with_regex_repeats(self):
+ original = '/(NaN| {2}|^$)/.test(a)&&(a="M 0 0");'
+ self.assertMinified(original, original) # there should be nothing jsmin can do here
+
+ def test_space_with_regex_repeats_not_at_start(self):
+ original = 'aaa;/(NaN| {2}|^$)/.test(a)&&(a="M 0 0");'
+ self.assertMinified(original, original) # there should be nothing jsmin can do here
+
+ def test_space_in_regex(self):
+ original = '/a (a)/.test("a")'
+ self.assertMinified(original, original)
+
+ def test_angular_1(self):
+ original = '''var /** holds major version number for IE or NaN for real browsers */
+ msie,
+ jqLite, // delay binding since jQuery could be loaded after us.'''
+ minified = jsmin.jsmin(original)
+ self.assertTrue('var msie' in minified)
+
+ def test_angular_2(self):
+ original = 'var/* comment */msie;'
+ expected = 'var msie;'
+ self.assertMinified(original, expected)
+
+ def test_angular_3(self):
+ original = 'var /* comment */msie;'
+ expected = 'var msie;'
+ self.assertMinified(original, expected)
+
+ def test_angular_4(self):
+ original = 'var /* comment */ msie;'
+ expected = 'var msie;'
+ self.assertMinified(original, expected)
+
+ def test_angular_5(self):
+ original = 'a/b'
+ self.assertMinified(original, original)
+
+ def testBackticks(self):
+ original = '`test`'
+ self.assertMinified(original, original, quote_chars="'\"`")
+
+ original = '` test with leading whitespace`'
+ self.assertMinified(original, original, quote_chars="'\"`")
+
+ original = '`test with trailing whitespace `'
+ self.assertMinified(original, original, quote_chars="'\"`")
+
+ original = '''`test
+with a new line`'''
+ self.assertMinified(original, original, quote_chars="'\"`")
+
+ original = '''dumpAvStats: function(stats) {
+ var statsString = "";
+ if (stats.mozAvSyncDelay) {
+ statsString += `A/V sync: ${stats.mozAvSyncDelay} ms `;
+ }
+ if (stats.mozJitterBufferDelay) {
+ statsString += `Jitter-buffer delay: ${stats.mozJitterBufferDelay} ms`;
+ }
+
+ return React.DOM.div(null, statsString);'''
+ expected = 'dumpAvStats:function(stats){var statsString="";if(stats.mozAvSyncDelay){statsString+=`A/V sync: ${stats.mozAvSyncDelay} ms `;}\nif(stats.mozJitterBufferDelay){statsString+=`Jitter-buffer delay: ${stats.mozJitterBufferDelay} ms`;}\nreturn React.DOM.div(null,statsString);'
+ self.assertMinified(original, expected, quote_chars="'\"`")
+
+ def testBackticksExpressions(self):
+ original = '`Fifteen is ${a + b} and not ${2 * a + b}.`'
+ self.assertMinified(original, original, quote_chars="'\"`")
+
+ original = '''`Fifteen is ${a +
+b} and not ${2 * a + "b"}.`'''
+ self.assertMinified(original, original, quote_chars="'\"`")
+
+ def testBackticksTagged(self):
+ original = 'tag`Hello ${ a + b } world ${ a * b}`;'
+ self.assertMinified(original, original, quote_chars="'\"`")
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/python/jsmin/setup.cfg b/third_party/python/jsmin/setup.cfg
new file mode 100644
index 0000000000..861a9f5542
--- /dev/null
+++ b/third_party/python/jsmin/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/third_party/python/jsmin/setup.py b/third_party/python/jsmin/setup.py
new file mode 100644
index 0000000000..22639d1e35
--- /dev/null
+++ b/third_party/python/jsmin/setup.py
@@ -0,0 +1,47 @@
+from setuptools import setup
+
+import os, sys, re
+
+os.environ['COPYFILE_DISABLE'] = 'true' # this disables including resource forks in tar files on os x
+
+
+extra = {}
+if sys.version_info >= (3,0):
+ extra['use_2to3'] = True
+
+
+def long_description():
+ return open('README.rst').read() + '\n' + open('CHANGELOG.txt').read()
+
+
+setup(
+ name="jsmin",
+ version=re.search(r'__version__ = ["\']([^"\']+)', open('jsmin/__init__.py').read()).group(1),
+ packages=['jsmin'],
+ description='JavaScript minifier.\nPLEASE UPDATE TO VERSION >= 2.0.6. Older versions have a serious bug related to comments.',
+ long_description=long_description(),
+ author='Dave St.Germain',
+ author_email='dave@st.germa.in',
+ maintainer='Tikitu de Jager',
+ maintainer_email='tikitu+jsmin@logophile.org',
+ test_suite='jsmin.test.JsTests',
+ license='MIT License',
+ url='https://bitbucket.org/dcs/jsmin/',
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Environment :: Web Environment',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: 3.3',
+ 'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
+ 'Topic :: Software Development :: Pre-processors',
+ 'Topic :: Text Processing :: Filters',
+ ],
+ **extra
+)
diff --git a/third_party/python/json-e/MANIFEST.in b/third_party/python/json-e/MANIFEST.in
new file mode 100644
index 0000000000..a6995977cb
--- /dev/null
+++ b/third_party/python/json-e/MANIFEST.in
@@ -0,0 +1,3 @@
+include jsone *.py
+include package.json
+recursive-exclude test *
diff --git a/third_party/python/json-e/PKG-INFO b/third_party/python/json-e/PKG-INFO
new file mode 100644
index 0000000000..bf41f82167
--- /dev/null
+++ b/third_party/python/json-e/PKG-INFO
@@ -0,0 +1,11 @@
+Metadata-Version: 2.1
+Name: json-e
+Version: 2.7.0
+Summary: A data-structure parameterization system written for embedding context in JSON objects
+Home-page: https://taskcluster.github.io/json-e/
+Author: Dustin J. Mitchell
+Author-email: dustin@mozilla.com
+License: MPL2
+Description: UNKNOWN
+Platform: UNKNOWN
+Provides-Extra: release
diff --git a/third_party/python/json-e/README.md b/third_party/python/json-e/README.md
new file mode 100644
index 0000000000..155b2e6ded
--- /dev/null
+++ b/third_party/python/json-e/README.md
@@ -0,0 +1,730 @@
+* [Full documentation](https://taskcluster.github.io/json-e)
+
+# JSON-e
+
+JSON-e is a data-structure parameterization system for embedding context in
+JSON objects.
+
+The central idea is to treat a data structure as a "template" and transform it,
+using another data structure as context, to produce an output data structure.
+
+There are countless libraries to do this with strings, such as
+[mustache](https://mustache.github.io/). What makes JSON-e unique is that it
+operates on data structures, not on their textual representation. This allows
+input to be written in a number of formats (JSON, YAML, etc.) or even generated
+dynamically. It also means that the output cannot be "invalid", even when
+including large chunks of contextual data.
+
+JSON-e is also designed to be safe for use on untrusted data. It never uses
+`eval` or any other function that might result in arbitrary code execution. It
+also disallows unbounded iteration, so any JSON-e rendering operation will
+finish in finite time.
+
+## Changes
+
+See
+[CHANGELOG.rst](https://github.com/taskcluster/json-e/blob/master/CHANGELOG.rst)
+for the changes in each version of this library.
+
+# Interface
+
+## JavaScript
+
+The JS module is installed with either of
+
+```shell
+npm install --save json-e
+yarn add json-e
+```
+
+The module exposes following interface:
+
+```javascript
+import jsone from 'json-e';
+
+var template = {a: {$eval: "foo.bar"}};
+var context = {foo: {bar: "zoo"}};
+console.log(jsone(template, context));
+// -> { a: 'zoo' }
+```
+
+Note that the context can contain functions, and those functions can be called
+from the template:
+
+```javascript
+var template = {$eval: "foo(1)"};
+var context = {"foo": function(x) { return x + 2; }};
+console.log(jsone(template, context)); // -> 3
+```
+
+*NOTE*: Context functions are called synchronously. Any complex asynchronous
+operations should be handled before rendering the template.
+
+*NOTE*: If the template is untrusted, it can pass arbitrary data to functions
+in the context, which must guard against such behavior.
+
+### Browser
+
+JSON-e is distributed as a CommonJS package is not designed to be included
+directly in a browser with `<script>`. Instead, it must be incorproated using a
+tool that understands CommonJS such as Webpack. See
+[Neutrino](https://neutrino.js.org/) for an easy, configuration-free way to
+build such applications.
+
+## Python
+
+The Python distribution exposes a `render` function:
+
+```python
+import jsone
+
+template = {"a": {"$eval": "foo.bar"}}
+context = {"foo": {"bar": "zoo"}}
+print(jsone.render(template, context)) # -> {"a": "zoo"}
+```
+
+and also allows custom functions in the context:
+
+```python
+template = {"$eval": "foo(1)"}
+context = {"foo": lambda x: x + 2}
+print(jsone.render(template, context)) # -> 3
+```
+
+## Go (golang)
+
+The [golang package for json-e](https://godoc.org/github.com/taskcluster/json-e) exposes a `Render` function:
+
+```golang
+import (
+ "fmt"
+ "github.com/taskcluster/json-e"
+)
+
+// Template must be given using types:
+// map[string]interface{}, []interface{}, float64, string, bool, nil
+// The same types that json.Unmarshal() will create when targeting an interface{}
+template := map[string]interface{}{
+ "result": map[string]interface{}{
+ "$eval": "f() + 5",
+ },
+}
+// Context can be JSON types just like template, but may also contain functions
+// these can JSON types as arguments, and return a value and optionally an error.
+context := map[string]interface{}{
+ "f": func() int { return 37 },
+}
+
+func main() {
+ value, err := jsone.Render(template, context)
+ fmt.Printf("%#v\n", value)
+}
+```
+
+## CLI
+
+You can use the 3rd party package [rjsone](https://wryun.github.io/rjsone/) to template
+JSON-e from the command line, passing templates/contexts as files or arguments and using
+stdout for the result.
+
+
+# Language Reference
+
+The examples here are given in YAML for ease of reading. Of course, the
+rendering operation takes place on the parsed data, so the input format is
+irrelevant to its operation.
+
+## Simple Operations
+
+All JSON-e directives involve the `$` character, so a template without any directives is
+rendered unchanged:
+
+```yaml
+template: {key: [1,2,{key2: 'val', key3: 1}, true], f: false}
+context: {}
+result: {key: [1,2,{key2: 'val', key3: 1}, true], f: false}
+```
+
+## String Interpolation
+
+The simplest form of substitution occurs within strings, using `${..}`:
+
+```yaml
+template: {message: 'hello ${key}', 'k=${num}': true}
+context: {key: 'world', num: 1}
+result: {message: 'hello world', 'k=1': true}
+```
+
+The bit inside the `${..}` is an expression, and must evaluate to something
+that interpolates obviously into a string (so, a string, number, boolean,).
+If it is null, then the expression interpolates into an empty string.
+The expression syntax is described in more detail below.
+
+Values interpolate as their JSON literal values:
+
+```yaml
+template: ["number: ${num}", "booleans: ${t} ${f}", "null: ${nil}"]
+context: {num: 3, t: true, f: false, nil: null}
+result: ["number: 3", "booleans: true false", "null: "]
+```
+
+Note that object keys can be interpolated, too:
+
+```yaml
+template: {"tc_${name}": "${value}"}
+context: {name: 'foo', value: 'bar'}
+result: {"tc_foo": "bar"}
+```
+
+The string `${` can be escaped as `$${`.
+
+## Operators
+
+JSON-e defines a bunch of operators. Each is represented as an object with a
+property beginning with `$`. This object can be buried deeply within the
+template. Some operators take additional arguments as properties of the same
+object.
+
+### `$eval`
+
+The `$eval` operator evaluates the given expression and is replaced with the
+result of that evaluation. Unlike with string interpolation, the result need
+not be a string, but can be an arbitrary data structure.
+
+```yaml
+template: {config: {$eval: 'settings.staging'}}
+context:
+ settings:
+ staging:
+ transactionBackend: mock
+ production:
+ transactionBackend: customerdb
+result: {config: {transactionBackend: 'mock'}}
+```
+
+The expression syntax is described in more detail below.
+
+Note that `$eval`'s value must be a string. "Metaprogramming" by providing a
+calculated value to eval is not allowed. For example, `{$eval: {$eval:
+"${var1} + ${var2}"}}` is not valid JSON-e.
+
+### `$json`
+
+The `$json` operator formats the given value as JSON with sorted keys. It does
+not evaluate the value (use `$eval` for that). While this can be useful in some
+cases, it is an unusual case to include a JSON string in a larger data
+structure.
+
+```yaml
+template: {$json: [a, b, {$eval: 'a+b'}, 4]}
+context: {a: 1, b: 2}
+result: '["a", "b", 3, 4]'
+```
+
+### `$if` - `then` - `else`
+
+The `$if` operator supports conditionals. It evaluates the given value, and
+replaces itself with the `then` or `else` properties. If either property is
+omitted, then the expression is omitted from the parent object.
+
+```yaml
+template: {key: {$if: 'cond', then: 1}, k2: 3}
+context: {cond: true}
+result: {key: 1, k2: 3}
+```
+
+```yaml
+template: {$if: 'x > 5', then: 1, else: -1}
+context: {x: 10}
+result: 1
+```
+
+```yaml
+template: [1, {$if: 'cond', else: 2}, 3]
+context: {cond: false}
+result: [1,2,3]
+```
+
+```yaml
+template: {key: {$if: 'cond', then: 2}, other: 3}
+context: {cond: false}
+result: {other: 3}
+```
+
+### `$flatten`
+
+The `$flatten` operator flattens an array of arrays into one array.
+
+```yaml
+template: {$flatten: [[1, 2], [3, 4], [5]]}
+context: {}
+result: [1, 2, 3, 4, 5]
+```
+
+### `$flattenDeep`
+
+The `$flattenDeep` operator deeply flattens an array of arrays into one array.
+
+```yaml
+template: {$flattenDeep: [[1, [2, [3]]]]}
+context: {}
+result: [1, 2, 3]
+```
+
+### `$fromNow`
+
+The `$fromNow` operator is a shorthand for the built-in function `fromNow`. It
+creates a JSON (ISO 8601) datestamp for a time relative to the current time
+(see the `now` builtin, below) or, if `from` is given, relative to that time.
+The offset is specified by a sequence of number/unit pairs in a string. For
+example:
+
+```yaml
+template: {$fromNow: '2 days 1 hour'}
+context: {}
+result: '2017-01-19T16:27:20.974Z'
+```
+
+```yaml
+template: {$fromNow: '1 hour', from: '2017-01-19T16:27:20.974Z'}
+context: {}
+result: '2017-01-19T17:27:20.974Z'
+```
+
+The available units are `day`, `hour`, and `minute`, for all of which a plural
+is also accepted.
+
+### `$let`
+
+The `$let` operator evaluates an expression using a context amended with the
+given values. It is analogous to the Haskell `where` clause.
+
+```yaml
+template: {$let: {ts: 100, foo: 200},
+ in: [{$eval: "ts+foo"}, {$eval: "ts-foo"}, {$eval: "ts*foo"}]}
+context: {}
+result: [300, -100, 20000]
+```
+
+The `$let` operator here added the `ts` and `foo` variables to the scope of
+the context and accordingly evaluated the `in` clause using those variables
+to return the correct result.
+
+The variable names in the `$let` value must be valid context variable names and
+must be written literally. That is, an expression like `{$let: {$eval:
+"extraVariables"}, in : ..}` is not allowed.
+
+### `$map`
+
+The `$map` operator evaluates an expression for each value of the given array or object,
+constructing the result as an array or object of the evaluated values.
+
+When given an array, map always returns an array.
+
+```yaml
+template:
+ $map: [2, 4, 6]
+ each(x): {$eval: 'x + a'}
+context: {a: 1}
+result: [3, 5, 7]
+```
+The array or object is the value of the `$map` property, and the expression to evaluate
+is given by `each(var)` where `var` is the name of the variable containing each
+element. In the case of iterating over an object, `var` will be an object with two keys:
+`key` and `val`. These keys correspond to a key in the object and its corresponding value.
+
+When $map is given an object, the expression defined by `each(var)` must evaluate to an
+object for each key/value pair (`key` and `val`).The objects constructed by each 'each(var)'
+can then be merged internally to give the resulting object with later keys overwriting
+the previous ones.Otherwise the expression becomes invalid for the $map operator
+
+```yaml
+template:
+ $map: {a: 1, b: 2, c: 3}
+ each(y): {'${y.key}x': {$eval: 'y.val + 1'}}
+context: {}
+result: {ax: 2, bx: 3, cx: 4}
+```
+
+### `$match`
+
+The `$match` operator is not dissimilar to pattern matching operators. It gets an object, in which every key is a string expression(s) to evaluate to `true` or `false` based on the context. The result will be an array of things (all types are supported) that were values corresponding to the keys that were evaluated to `true`. The order of the things in the array will be arbitrary. If there are no matches, the result is an empty array.
+
+```yaml
+template: {$match: {"x == 10": "ten", "x == 20": "twenty"}}
+context: {x: 10}
+result: ["ten"]
+```
+
+```yaml
+template: {$match: {"x == 10 || x == 20": "tens", "x == 10": "ten"}}
+context: {x: 10}
+one possible result: ["tens", "ten"]
+another possible result: ["ten", "tens"]
+```
+```yaml
+template: {$match: {"x < 10": "tens"}}
+context: {x: 10}
+result: []
+```
+
+### `$merge`
+
+The `$merge` operator merges an array of objects, returning a single object
+that combines all of the objects in the array, where the right-side objects
+overwrite the values of the left-side ones.
+
+```yaml
+template: {$merge: [{a: 1, b: 1}, {b: 2, c: 3}, {d: 4}]}
+context: {}
+result: {a: 1, b: 2, c: 3, d: 4}
+```
+
+### `$mergeDeep`
+
+The `$mergeDeep` operator is like `$merge`, but it recurses into objects to
+combine their contents property by property. Arrays are concatenated.
+
+```yaml
+template:
+ $mergeDeep:
+ - task:
+ payload:
+ command: [a, b]
+ - task:
+ extra:
+ foo: bar
+ - task:
+ payload:
+ command: [c]
+context: {}
+result:
+ task:
+ extra:
+ foo: bar
+ payload:
+ command: [a, b, c]
+```
+
+### `$sort`
+
+The `$sort` operator sorts the given array. It takes a `by(var)` property which
+should evaluate to a comparable value for each element. The `by(var)` property
+defaults to the identity function.
+
+```yaml
+template:
+ $sort: [{a: 2}, {a: 1, b: []}, {a: 3}]
+ by(x): 'x.a'
+context: {}
+result: [{a: 1, b: []}, {a: 2}, {a: 3}]
+```
+
+### `$reverse`
+
+The `$reverse` operator simply reverses the given array.
+
+```yaml
+template: {$reverse: [3, 4, 1, 2]}
+context: {}
+result: [2, 1, 4, 3]
+```
+
+### Escaping operators
+
+All property names starting with `$` are reserved for JSON-e.
+You can use `$$` to escape such properties:
+
+```yaml
+template: {$$reverse: [3, 2, {$$eval: '2 - 1'}, 0]}
+context: {}
+result: {$reverse: [3, 2, {$eval: '2 - 1'}, 0]}
+```
+
+## Truthiness
+
+Many values can be evaluated in context where booleans are required,
+not just booleans themselves. JSON-e defines the following values as false.
+Anything else will be true.
+
+```yaml
+template: {$if: 'a || b || c || d || e || f', then: "uh oh", else: "falsy" }
+context: {a: null, b: [], c: {}, d: "", e: 0, f: false}
+result: "falsy"
+```
+
+## Expression Syntax
+
+Expression are given in a simple Python- or JavaScript-like expression
+language. Its data types are limited to JSON types plus function objects.
+
+### Literals
+
+Literals are similar to those for JSON. Numeric literals only accept integer
+and decimal notation. Strings do not support any kind of escaping. The use of
+`\n` and `\t` in the example below depends on the YAML parser to expand the
+escapes.
+
+```yaml
+template:
+ - {$eval: "1.3"}
+ - {$eval: "'abc'"}
+ - {$eval: '"abc"'}
+ - {$eval: "'\n\t'"}
+context: {}
+result:
+ - 1.3
+ - "abc"
+ - "abc"
+ - "\n\t"
+```
+
+Array and object literals also look much like JSON, with bare identifiers
+allowed as keys like in Javascript:
+
+```yaml
+template:
+ - {$eval: '[1, 2, "three"]'}
+ - {$eval: '{foo: 1, "bar": 2}'}
+context: {}
+result:
+ - [1, 2, "three"]
+ - {"foo": 1, "bar": 2}
+```
+
+### Context References
+
+Bare identifiers refer to items from the context or to built-ins (described below).
+
+```yaml
+template: {$eval: '[x, z, x+z]'}
+context: {x: 'quick', z: 'sort'}
+reslut: ['quick', 'sort', 'quicksort']
+```
+
+### Arithmetic Operations
+
+The usual arithmetic operators are all defined, with typical associativity and
+precedence:
+
+```yaml
+template:
+ - {$eval: 'x + z'}
+ - {$eval: 's + t'}
+ - {$eval: 'z - x'}
+ - {$eval: 'x * z'}
+ - {$eval: 'z / x'}
+ - {$eval: 'z ** 2'}
+ - {$eval: '(z / x) ** 2'}
+context: {x: 10, z: 20, s: "face", t: "plant"}
+result:
+ - 30
+ - "faceplant"
+ - 10
+ - 200
+ - 2
+ - 400
+ - 4
+```
+
+Note that strings can be concatenated with `+`, but none of the other operators
+apply.
+
+### Comparison Operations
+
+Comparisons work as expected. Equality is "deep" in the sense of doing
+comparisons of the contents of data structures.
+
+```yaml
+template:
+ - {$eval: 'x < z'}
+ - {$eval: 'x <= z'}
+ - {$eval: 'x > z'}
+ - {$eval: 'x >= z'}
+ - {$eval: 'deep == [1, [3, {a: 5}]]'}
+ - {$eval: 'deep != [1, [3, {a: 5}]]'}
+context: {x: -10, z: 10, deep: [1, [3, {a: 5}]]}
+result: [true, true, false, false, true, false]
+```
+
+### Boolean Operations
+
+Boolean operations use C- and Javascript-style symbls `||`, `&&`, and `!`:
+
+```yaml
+template: {$eval: '!(false || false) && true'}
+context: {}
+result: true
+```
+
+### Object Property Access
+
+Like Javascript, object properties can be accessed either with array-index
+syntax or with dot syntax. Unlike Javascript, `obj.prop` is an error if `obj`
+does not have `prop`, while `obj['prop']` will evaulate to `null`.
+
+```yaml
+template: {$eval: 'v.a + v["b"]'}
+context: {v: {a: 'apple', b: 'bananna', c: 'carrot'}}
+result: 'applebananna'
+````
+
+### Indexing and Slicing
+
+Strings and arrays can be indexed and sliced using a Python-like indexing
+scheme. Negative indexes are counted from the end of the value. Slices are
+treated as "half-open", meaning that the result contains the first index and
+does not contain the second index. A "backward" slice with the start index
+greater than the end index is treated as empty.
+
+```yaml
+template:
+ - {$eval: '[array[1], string[1]]'}
+ - {$eval: '[array[1:4], string[1:4]]'}
+ - {$eval: '[array[2:], string[2:]]'}
+ - {$eval: '[array[:2], string[:2]]'}
+ - {$eval: '[array[4:2], string[4:2]]'}
+ - {$eval: '[array[-2], string[-2]]'}
+ - {$eval: '[array[-2:], string[-2:]]'}
+ - {$eval: '[array[:-3], string[:-3]]'}
+context: {array: ['a', 'b', 'c', 'd', 'e'], string: 'abcde'}
+result:
+ - ['b', 'b']
+ - [['b', 'c', 'd'], 'bcd']
+ - [['c', 'd', 'e'], 'cde']
+ - [['a', 'b'], 'ab']
+ - [[], '']
+ - ['d', 'd']
+ - [['d', 'e'], 'de']
+ - [['a', 'b'], 'ab']
+```
+
+### Containment Operation
+
+The `in` keyword can be used to check for containment: a property in an object,
+an element in an array, or a substring in a string.
+
+```yaml
+template:
+ - {$eval: '"foo" in {foo: 1, bar: 2}'}
+ - {$eval: '"foo" in ["foo", "bar"]'}
+ - {$eval: '"foo" in "foobar"'}
+context: {}
+result: [true, true, true]
+```
+
+### Function Invocation
+
+Function calls are made with the usual `fn(arg1, arg2)` syntax. Functions are
+not JSON data, so they cannot be created in JSON-e, but they can be provided as
+built-ins or supplied in the context and called from JSON-e.
+
+### Built-In Functions and Variables
+
+The expression language provides a laundry-list of built-in functions/variables. Library
+users can easily add additional functions/variables, or override the built-ins, as part
+of the context.
+
+#### Time
+
+The built-in context value `now` is set to the current time at the start of
+evaluation of the template, and used as the default "from" value for `$fromNow`
+and the built-in `fromNow()`.
+
+```yaml
+template:
+ - {$eval: 'now'}
+ - {$eval: 'fromNow("1 minute")'}
+ - {$eval: 'fromNow("1 minute", "2017-01-19T16:27:20.974Z")'}
+context: {}
+result:
+ - '2017-01-19T16:27:20.974Z',
+ - '2017-01-19T16:28:20.974Z',
+ - '2017-01-19T16:28:20.974Z',
+```
+
+#### Math
+
+```yaml
+template:
+ # the smallest of the arguments
+ - {$eval: 'min(1, 3, 5)'}
+ # the largest of the arguments
+ - {$eval: 'max(2, 4, 6)'}
+ # mathematical functions
+ - {$eval: 'sqrt(16)'}
+ - {$eval: 'ceil(0.3)'}
+ - {$eval: 'floor(0.3)'}
+ - {$eval: 'abs(-0.3)'}
+context: {}
+result:
+ - 1
+ - 6
+ - 4
+ - 1
+ - 0
+ - 0.3
+```
+
+#### Strings
+
+```yaml
+template:
+ # convert string case
+ - {$eval: 'lowercase("Fools!")'}
+ - {$eval: 'uppercase("Fools!")'}
+ # convert string, number, boolean, or array to string
+ - {$eval: 'str(130)'}
+ # strip whitespace from left, right, or both ends of a string
+ - {$eval: 'lstrip(" room ")'}
+ - {$eval: 'rstrip(" room ")'}
+ - {$eval: 'strip(" room ")'}
+context: {}
+result:
+ - "fools!"
+ - "FOOLS!"
+ - "130"
+ - "room "
+ - " room"
+ - room
+```
+
+#### Type
+
+The `typeof()` built-in returns the type of an object. Its behavior around
+`null` is reminiscent of JavaScript.
+
+```yaml
+template:
+ - "${typeof('abc')}"
+ - "${typeof(42)}"
+ - "${typeof(42.0)}"
+ - "${typeof(true)}"
+ - "${typeof([])}"
+ - "${typeof({})}"
+ - "${typeof(typeof)}"
+ - {$eval: "typeof(null)"}
+ - "${typeof(null)}"
+context: {}
+result:
+ - string
+ - number
+ - number
+ - boolean
+ - array
+ - object
+ - function
+ - null # note: the value null, not the string "null"
+ - '' # .. which interpolates to an empty string
+```
+
+#### Length
+
+The `len()` built-in returns the length of a string or array.
+
+```yaml
+template: {$eval: 'len([1, 2, 3])'}
+context: {}
+result: 3
+```
+
diff --git a/third_party/python/json-e/jsone/__init__.py b/third_party/python/json-e/jsone/__init__.py
new file mode 100644
index 0000000000..943674e672
--- /dev/null
+++ b/third_party/python/json-e/jsone/__init__.py
@@ -0,0 +1,21 @@
+from __future__ import absolute_import, print_function, unicode_literals
+
+import re
+from .render import renderValue
+from .shared import JSONTemplateError, DeleteMarker, TemplateError, fromNow
+from . import builtins
+
+_context_re = re.compile(r'[a-zA-Z_][a-zA-Z0-9_]*$')
+
+
+def render(template, context):
+ if not all(_context_re.match(c) for c in context):
+ raise TemplateError('top level keys of context must follow '
+ '/[a-zA-Z_][a-zA-Z0-9_]*/')
+ full_context = {'now': fromNow('0 seconds', None)}
+ full_context.update(builtins.build(full_context))
+ full_context.update(context)
+ rv = renderValue(template, full_context)
+ if rv is DeleteMarker:
+ return None
+ return rv
diff --git a/third_party/python/json-e/jsone/builtins.py b/third_party/python/json-e/jsone/builtins.py
new file mode 100644
index 0000000000..751ee2dc04
--- /dev/null
+++ b/third_party/python/json-e/jsone/builtins.py
@@ -0,0 +1,121 @@
+from __future__ import absolute_import, print_function, unicode_literals
+
+import math
+from .shared import string, to_str, fromNow, JSONTemplateError
+
+
+class BuiltinError(JSONTemplateError):
+ pass
+
+
+def build(context):
+ builtins = {}
+
+ def builtin(name, variadic=None, argument_tests=None, minArgs=None):
+ def wrap(fn):
+ def bad(reason=None):
+ raise BuiltinError(
+ (reason or 'invalid arguments to builtin: {}').format(name))
+ if variadic:
+ def invoke(*args):
+ if minArgs:
+ if len(args) < minArgs:
+ bad("too few arguments to {}")
+ for arg in args:
+ if not variadic(arg):
+ bad()
+ return fn(*args)
+
+ elif argument_tests:
+ def invoke(*args):
+ if len(args) != len(argument_tests):
+ bad()
+ for t, arg in zip(argument_tests, args):
+ if not t(arg):
+ bad()
+ return fn(*args)
+
+ else:
+ def invoke(*args):
+ return fn(*args)
+
+ builtins[name] = invoke
+ return fn
+ return wrap
+
+ def is_number(v):
+ return isinstance(v, (int, float)) and not isinstance(v, bool)
+
+ def is_string(v):
+ return isinstance(v, string)
+
+ def is_string_or_array(v):
+ return isinstance(v, (string, list))
+
+ def anything_except_array(v):
+ return isinstance(v, (string, int, float, bool)) or v is None
+
+ def anything(v):
+ return isinstance(v, (string, int, float, list, dict)) or v is None or callable(v)
+
+ # ---
+
+ builtin('min', variadic=is_number, minArgs=1)(min)
+ builtin('max', variadic=is_number, minArgs=1)(max)
+ builtin('sqrt', argument_tests=[is_number])(math.sqrt)
+ builtin('abs', argument_tests=[is_number])(abs)
+
+ @builtin('ceil', argument_tests=[is_number])
+ def ceil(v):
+ return int(math.ceil(v))
+
+ @builtin('floor', argument_tests=[is_number])
+ def floor(v):
+ return int(math.floor(v))
+
+ @builtin('lowercase', argument_tests=[is_string])
+ def lowercase(v):
+ return v.lower()
+
+ @builtin('uppercase', argument_tests=[is_string])
+ def lowercase(v):
+ return v.upper()
+
+ builtin('len', argument_tests=[is_string_or_array])(len)
+ builtin('str', argument_tests=[anything_except_array])(to_str)
+ builtin('number', variadic=is_string, minArgs=1)(float)
+
+ @builtin('strip', argument_tests=[is_string])
+ def strip(s):
+ return s.strip()
+
+ @builtin('rstrip', argument_tests=[is_string])
+ def rstrip(s):
+ return s.rstrip()
+
+ @builtin('lstrip', argument_tests=[is_string])
+ def lstrip(s):
+ return s.lstrip()
+
+ @builtin('fromNow', variadic=is_string, minArgs=1)
+ def fromNow_builtin(offset, reference=None):
+ return fromNow(offset, reference or context.get('now'))
+
+ @builtin('typeof', argument_tests=[anything])
+ def typeof(v):
+ if isinstance(v, bool):
+ return 'boolean'
+ elif isinstance(v, string):
+ return 'string'
+ elif isinstance(v, (int, float)):
+ return 'number'
+ elif isinstance(v, list):
+ return 'array'
+ elif isinstance(v, dict):
+ return 'object'
+ elif v is None:
+ return None
+ elif callable(v):
+ return 'function'
+
+ return builtins
diff --git a/third_party/python/json-e/jsone/interpreter.py b/third_party/python/json-e/jsone/interpreter.py
new file mode 100644
index 0000000000..eb38a9c85b
--- /dev/null
+++ b/third_party/python/json-e/jsone/interpreter.py
@@ -0,0 +1,289 @@
+from __future__ import absolute_import, print_function, unicode_literals
+
+from .prattparser import PrattParser, infix, prefix
+from .shared import TemplateError, InterpreterError, string
+import operator
+import json
+
+OPERATORS = {
+ '-': operator.sub,
+ '*': operator.mul,
+ '/': operator.truediv,
+ '**': operator.pow,
+ '==': operator.eq,
+ '!=': operator.ne,
+ '<=': operator.le,
+ '<': operator.lt,
+ '>': operator.gt,
+ '>=': operator.ge,
+ '&&': lambda a, b: bool(a and b),
+ '||': lambda a, b: bool(a or b),
+}
+
+
+def infixExpectationError(operator, expected):
+ return InterpreterError('infix: {} expects {} {} {}'.
+ format(operator, expected, operator, expected))
+
+
+class ExpressionEvaluator(PrattParser):
+
+ ignore = '\\s+'
+ patterns = {
+ 'number': '[0-9]+(?:\\.[0-9]+)?',
+ 'identifier': '[a-zA-Z_][a-zA-Z_0-9]*',
+ 'string': '\'[^\']*\'|"[^"]*"',
+ # avoid matching these as prefixes of identifiers e.g., `insinutations`
+ 'true': 'true(?![a-zA-Z_0-9])',
+ 'false': 'false(?![a-zA-Z_0-9])',
+ 'in': 'in(?![a-zA-Z_0-9])',
+ 'null': 'null(?![a-zA-Z_0-9])',
+ }
+ tokens = [
+ '**', '+', '-', '*', '/', '[', ']', '.', '(', ')', '{', '}', ':', ',',
+ '>=', '<=', '<', '>', '==', '!=', '!', '&&', '||', 'true', 'false', 'in',
+ 'null', 'number', 'identifier', 'string',
+ ]
+ precedence = [
+ ['||'],
+ ['&&'],
+ ['in'],
+ ['==', '!='],
+ ['>=', '<=', '<', '>'],
+ ['+', '-'],
+ ['*', '/'],
+ ['**-right-associative'],
+ ['**'],
+ ['[', '.'],
+ ['('],
+ ['unary'],
+ ]
+
+ def __init__(self, context):
+ super(ExpressionEvaluator, self).__init__()
+ self.context = context
+
+ def parse(self, expression):
+ if not isinstance(expression, string):
+ raise TemplateError('expression to be evaluated must be a string')
+ return super(ExpressionEvaluator, self).parse(expression)
+
+ @prefix('number')
+ def number(self, token, pc):
+ v = token.value
+ return float(v) if '.' in v else int(v)
+
+ @prefix("!")
+ def bang(self, token, pc):
+ return not pc.parse('unary')
+
+ @prefix("-")
+ def uminus(self, token, pc):
+ v = pc.parse('unary')
+ if not isNumber(v):
+ raise InterpreterError('{} expects {}'.format('unary -', 'number'))
+ return -v
+
+ @prefix("+")
+ def uplus(self, token, pc):
+ v = pc.parse('unary')
+ if not isNumber(v):
+ raise InterpreterError('{} expects {}'.format('unary +', 'number'))
+ return v
+
+ @prefix("identifier")
+ def identifier(self, token, pc):
+ try:
+ return self.context[token.value]
+ except KeyError:
+ raise InterpreterError(
+ 'unknown context value {}'.format(token.value))
+
+ @prefix("null")
+ def null(self, token, pc):
+ return None
+
+ @prefix("[")
+ def array_bracket(self, token, pc):
+ return parseList(pc, ',', ']')
+
+ @prefix("(")
+ def grouping_paren(self, token, pc):
+ rv = pc.parse()
+ pc.require(')')
+ return rv
+
+ @prefix("{")
+ def object_brace(self, token, pc):
+ return parseObject(pc)
+
+ @prefix("string")
+ def string(self, token, pc):
+ return parseString(token.value)
+
+ @prefix("true")
+ def true(self, token, pc):
+ return True
+
+ @prefix("false")
+ def false(self, token, ps):
+ return False
+
+ @infix("+")
+ def plus(self, left, token, pc):
+ if not isinstance(left, (string, int, float)) or isinstance(left, bool):
+ raise infixExpectationError('+', 'number/string')
+ right = pc.parse(token.kind)
+ if not isinstance(right, (string, int, float)) or isinstance(right, bool):
+ raise infixExpectationError('+', 'number/string')
+ if type(right) != type(left) and \
+ (isinstance(left, string) or isinstance(right, string)):
+ raise infixExpectationError('+', 'numbers/strings')
+ return left + right
+
+ @infix('-', '*', '/', '**')
+ def arith(self, left, token, pc):
+ op = token.kind
+ if not isNumber(left):
+ raise infixExpectationError(op, 'number')
+ right = pc.parse({'**': '**-right-associative'}.get(op))
+ if not isNumber(right):
+ raise infixExpectationError(op, 'number')
+ return OPERATORS[op](left, right)
+
+ @infix("[")
+ def index_slice(self, left, token, pc):
+ a = None
+ b = None
+ is_interval = False
+ if pc.attempt(':'):
+ a = 0
+ is_interval = True
+ else:
+ a = pc.parse()
+ if pc.attempt(':'):
+ is_interval = True
+
+ if is_interval and not pc.attempt(']'):
+ b = pc.parse()
+ pc.require(']')
+
+ if not is_interval:
+ pc.require(']')
+
+ return accessProperty(left, a, b, is_interval)
+
+ @infix(".")
+ def property_dot(self, left, token, pc):
+ if not isinstance(left, dict):
+ raise infixExpectationError('.', 'object')
+ k = pc.require('identifier').value
+ try:
+ return left[k]
+ except KeyError:
+ raise TemplateError(
+ '{} not found in {}'.format(k, json.dumps(left)))
+
+ @infix("(")
+ def function_call(self, left, token, pc):
+ if not callable(left):
+ raise TemplateError('function call', 'callable')
+ args = parseList(pc, ',', ')')
+ return left(*args)
+
+ @infix('==', '!=', '||', '&&')
+ def equality_and_logic(self, left, token, pc):
+ op = token.kind
+ right = pc.parse(op)
+ return OPERATORS[op](left, right)
+
+ @infix('<=', '<', '>', '>=')
+ def inequality(self, left, token, pc):
+ op = token.kind
+ right = pc.parse(op)
+ if type(left) != type(right) or \
+ not (isinstance(left, (int, float, string)) and not isinstance(left, bool)):
+ raise infixExpectationError(op, 'numbers/strings')
+ return OPERATORS[op](left, right)
+
+ @infix("in")
+ def contains(self, left, token, pc):
+ right = pc.parse(token.kind)
+ if isinstance(right, dict):
+ if not isinstance(left, string):
+ raise infixExpectationError('in-object', 'string on left side')
+ elif isinstance(right, string):
+ if not isinstance(left, string):
+ raise infixExpectationError('in-string', 'string on left side')
+ elif not isinstance(right, list):
+ raise infixExpectationError(
+ 'in', 'Array, string, or object on right side')
+ try:
+ return left in right
+ except TypeError:
+ raise infixExpectationError('in', 'scalar value, collection')
+
+
+def isNumber(v):
+ return isinstance(v, (int, float)) and not isinstance(v, bool)
+
+
+def parseString(v):
+ return v[1:-1]
+
+
+def parseList(pc, separator, terminator):
+ rv = []
+ if not pc.attempt(terminator):
+ while True:
+ rv.append(pc.parse())
+ if not pc.attempt(separator):
+ break
+ pc.require(terminator)
+ return rv
+
+
+def parseObject(pc):
+ rv = {}
+ if not pc.attempt('}'):
+ while True:
+ k = pc.require('identifier', 'string')
+ if k.kind == 'string':
+ k = parseString(k.value)
+ else:
+ k = k.value
+ pc.require(':')
+ v = pc.parse()
+ rv[k] = v
+ if not pc.attempt(','):
+ break
+ pc.require('}')
+ return rv
+
+
+def accessProperty(value, a, b, is_interval):
+ if isinstance(value, (list, string)):
+ if is_interval:
+ if b is None:
+ b = len(value)
+ try:
+ return value[a:b]
+ except TypeError:
+ raise infixExpectationError('[..]', 'integer')
+ else:
+ try:
+ return value[a]
+ except IndexError:
+ raise TemplateError('index out of bounds')
+ except TypeError:
+ raise infixExpectationError('[..]', 'integer')
+
+ if not isinstance(value, dict):
+ raise infixExpectationError('[..]', 'object, array, or string')
+ if not isinstance(a, string):
+ raise infixExpectationError('[..]', 'string index')
+
+ try:
+ return value[a]
+ except KeyError:
+ return None
diff --git a/third_party/python/json-e/jsone/prattparser.py b/third_party/python/json-e/jsone/prattparser.py
new file mode 100644
index 0000000000..5bf250a816
--- /dev/null
+++ b/third_party/python/json-e/jsone/prattparser.py
@@ -0,0 +1,191 @@
+from __future__ import absolute_import, print_function, unicode_literals
+
+import re
+from collections import namedtuple
+from .shared import TemplateError
+from .six import with_metaclass, viewitems
+
+
+class SyntaxError(TemplateError):
+
+ @classmethod
+ def unexpected(cls, got, exp):
+ exp = ', '.join(sorted(exp))
+ return cls('Found {}, expected {}'.format(got.value, exp))
+
+
+Token = namedtuple('Token', ['kind', 'value', 'start', 'end'])
+
+
+def prefix(*kinds):
+ """Decorate a method as handling prefix tokens of the given kinds"""
+ def wrap(fn):
+ try:
+ fn.prefix_kinds.extend(kinds)
+ except AttributeError:
+ fn.prefix_kinds = list(kinds)
+ return fn
+ return wrap
+
+
+def infix(*kinds):
+ """Decorate a method as handling infix tokens of the given kinds"""
+ def wrap(fn):
+ try:
+ fn.infix_kinds.extend(kinds)
+ except AttributeError:
+ fn.infix_kinds = list(kinds)
+ return fn
+ return wrap
+
+
+class PrattParserMeta(type):
+
+ def __init__(cls, name, bases, body):
+ # set up rules based on decorated methods
+ infix_rules = cls.infix_rules = {}
+ prefix_rules = cls.prefix_rules = {}
+ for prop, value in viewitems(body):
+ if hasattr(value, 'prefix_kinds'):
+ for kind in value.prefix_kinds:
+ prefix_rules[kind] = value
+ delattr(cls, prop)
+ if hasattr(value, 'infix_kinds'):
+ for kind in value.infix_kinds:
+ infix_rules[kind] = value
+ delattr(cls, prop)
+
+ # build a regular expression to generate a sequence of tokens
+ token_patterns = [
+ '({})'.format(cls.patterns.get(t, re.escape(t)))
+ for t in cls.tokens]
+ if cls.ignore:
+ token_patterns.append('(?:{})'.format(cls.ignore))
+ cls.token_re = re.compile('^(?:' + '|'.join(token_patterns) + ')')
+
+ # build a map from token kind to precedence level
+ cls.precedence_map = {
+ kind: prec + 1
+ for (prec, row) in enumerate(cls.precedence)
+ for kind in row
+ }
+
+
+class PrattParser(with_metaclass(PrattParserMeta, object)):
+
+ # regular expression for ignored input (e.g., whitespace)
+ ignore = None
+
+ # regular expressions for tokens that do not match themselves
+ patterns = {}
+
+ # all token kinds (note that order matters - the first matching token
+ # will be returned)
+ tokens = []
+
+ # precedence of tokens, as a list of lists, from lowest to highest
+ precedence = []
+
+ def parse(self, source):
+ pc = ParseContext(self, source, self._generate_tokens(source))
+ result = pc.parse()
+ # if there are any tokens remaining, that's an error..
+ token = pc.attempt()
+ if token:
+ raise SyntaxError.unexpected(token, self.infix_rules)
+ return result
+
+ def parseUntilTerminator(self, source, terminator):
+ pc = ParseContext(self, source, self._generate_tokens(source))
+ result = pc.parse()
+ token = pc.attempt()
+ if token.kind != terminator:
+ raise SyntaxError.unexpected(token, [terminator])
+ return (result, token.start)
+
+ def _generate_tokens(self, source):
+ offset = 0
+ while True:
+ start = offset
+ remainder = source[offset:]
+ mo = self.token_re.match(remainder)
+ if not mo:
+ if remainder:
+ raise SyntaxError(
+ "Unexpected input: '{}'".format(remainder))
+ break
+ offset += mo.end()
+
+ # figure out which token matched (note that idx is 0-based)
+ indexes = list(
+ filter(lambda x: x[1] is not None, enumerate(mo.groups())))
+ if indexes:
+ idx = indexes[0][0]
+ yield Token(
+ kind=self.tokens[idx],
+ value=mo.group(idx + 1), # (mo.group is 1-based)
+ start=start,
+ end=offset)
+
+
+class ParseContext(object):
+
+ def __init__(self, parser, source, token_generator):
+ self.parser = parser
+ self.source = source
+
+ self._tokens = token_generator
+ self._error = None
+
+ self._advance()
+
+ def _advance(self):
+ try:
+ self.next_token = next(self._tokens)
+ except StopIteration:
+ self.next_token = None
+ except SyntaxError as exc:
+ self._error = exc
+
+ def attempt(self, *kinds):
+ """Try to get the next token if it matches one of the kinds given,
+ otherwise returning None. If no kinds are given, any kind is
+ accepted."""
+ if self._error:
+ raise self._error
+ token = self.next_token
+ if not token:
+ return None
+ if kinds and token.kind not in kinds:
+ return None
+ self._advance()
+ return token
+
+ def require(self, *kinds):
+ """Get the next token, raising an exception if it doesn't match one of
+ the given kinds, or the input ends. If no kinds are given, returns the
+ next token of any kind."""
+ token = self.attempt()
+ if not token:
+ raise SyntaxError('Unexpected end of input')
+ if kinds and token.kind not in kinds:
+ raise SyntaxError.unexpected(token, kinds)
+ return token
+
+ def parse(self, precedence=None):
+ parser = self.parser
+ precedence = parser.precedence_map[precedence] if precedence else 0
+ token = self.require()
+ prefix_rule = parser.prefix_rules.get(token.kind)
+ if not prefix_rule:
+ raise SyntaxError.unexpected(token, parser.prefix_rules)
+ left = prefix_rule(parser, token, self)
+ while self.next_token:
+ kind = self.next_token.kind
+ if kind not in parser.infix_rules:
+ break
+ if precedence >= parser.precedence_map[kind]:
+ break
+ token = self.require()
+ left = parser.infix_rules[kind](parser, left, token, self)
+ return left
diff --git a/third_party/python/json-e/jsone/render.py b/third_party/python/json-e/jsone/render.py
new file mode 100644
index 0000000000..e820da1ec2
--- /dev/null
+++ b/third_party/python/json-e/jsone/render.py
@@ -0,0 +1,354 @@
+from __future__ import absolute_import, print_function, unicode_literals
+
+import re
+import json as json
+from .shared import JSONTemplateError, TemplateError, DeleteMarker, string, to_str
+from . import shared
+from .interpreter import ExpressionEvaluator
+from .six import viewitems
+import functools
+
+operators = {}
+IDENTIFIER_RE = re.compile(r'[a-zA-Z_][a-zA-Z0-9_]*$')
+
+
+def operator(name):
+ def wrap(fn):
+ operators[name] = fn
+ return fn
+ return wrap
+
+
+def evaluateExpression(expr, context):
+ evaluator = ExpressionEvaluator(context)
+ return evaluator.parse(expr)
+
+
+_interpolation_start_re = re.compile(r'\$?\${')
+
+
+def interpolate(string, context):
+ mo = _interpolation_start_re.search(string)
+ if not mo:
+ return string
+
+ result = []
+ evaluator = ExpressionEvaluator(context)
+
+ while True:
+ result.append(string[:mo.start()])
+ if mo.group() != '$${':
+ string = string[mo.end():]
+ parsed, offset = evaluator.parseUntilTerminator(string, '}')
+ if isinstance(parsed, (list, dict)):
+ raise TemplateError(
+ "interpolation of '{}' produced an array or object".format(string[:offset]))
+ if to_str(parsed) == "null":
+ result.append("")
+ else:
+ result.append(to_str(parsed))
+ string = string[offset + 1:]
+ else: # found `$${`
+ result.append('${')
+ string = string[mo.end():]
+
+ mo = _interpolation_start_re.search(string)
+ if not mo:
+ result.append(string)
+ break
+
+ return ''.join(result)
+
+
+def checkUndefinedProperties(template, allowed):
+ unknownKeys = []
+ combined = "|".join(allowed) + "$"
+ unknownKeys = [key for key in sorted(template)
+ if not re.match(combined, key)]
+ if unknownKeys:
+ raise TemplateError(allowed[0].replace('\\', '') +
+ " has undefined properties: " + " ".join(unknownKeys))
+
+
+@operator('$eval')
+def eval(template, context):
+ checkUndefinedProperties(template, ['\$eval'])
+ if not isinstance(template['$eval'], string):
+ raise TemplateError("$eval must be given a string expression")
+ return evaluateExpression(template['$eval'], context)
+
+
+@operator('$flatten')
+def flatten(template, context):
+ checkUndefinedProperties(template, ['\$flatten'])
+ value = renderValue(template['$flatten'], context)
+ if not isinstance(value, list):
+ raise TemplateError('$flatten value must evaluate to an array')
+
+ def gen():
+ for e in value:
+ if isinstance(e, list):
+ for e2 in e:
+ yield e2
+ else:
+ yield e
+ return list(gen())
+
+
+@operator('$flattenDeep')
+def flattenDeep(template, context):
+ checkUndefinedProperties(template, ['\$flattenDeep'])
+ value = renderValue(template['$flattenDeep'], context)
+ if not isinstance(value, list):
+ raise TemplateError('$flattenDeep value must evaluate to an array')
+
+ def gen(value):
+ if isinstance(value, list):
+ for e in value:
+ for sub in gen(e):
+ yield sub
+ else:
+ yield value
+
+ return list(gen(value))
+
+
+@operator('$fromNow')
+def fromNow(template, context):
+ checkUndefinedProperties(template, ['\$fromNow', 'from'])
+ offset = renderValue(template['$fromNow'], context)
+ reference = renderValue(
+ template['from'], context) if 'from' in template else context.get('now')
+
+ if not isinstance(offset, string):
+ raise TemplateError("$fromNow expects a string")
+ return shared.fromNow(offset, reference)
+
+
+@operator('$if')
+def ifConstruct(template, context):
+ checkUndefinedProperties(template, ['\$if', 'then', 'else'])
+ condition = evaluateExpression(template['$if'], context)
+ try:
+ if condition:
+ rv = template['then']
+ else:
+ rv = template['else']
+ except KeyError:
+ return DeleteMarker
+ return renderValue(rv, context)
+
+
+@operator('$json')
+def jsonConstruct(template, context):
+ checkUndefinedProperties(template, ['\$json'])
+ value = renderValue(template['$json'], context)
+ return json.dumps(value, separators=(',', ':'), sort_keys=True, ensure_ascii=False)
+
+
+@operator('$let')
+def let(template, context):
+ checkUndefinedProperties(template, ['\$let', 'in'])
+ if not isinstance(template['$let'], dict):
+ raise TemplateError("$let value must be an object")
+
+ subcontext = context.copy()
+ for k, v in template['$let'].items():
+ if not IDENTIFIER_RE.match(k):
+ raise TemplateError('top level keys of $let must follow /[a-zA-Z_][a-zA-Z0-9_]*/')
+ subcontext[k] = renderValue(v, context)
+
+ try:
+ in_expression = template['in']
+ except KeyError:
+ raise TemplateError("$let operator requires an `in` clause")
+ return renderValue(in_expression, subcontext)
+
+
+@operator('$map')
+def map(template, context):
+ EACH_RE = 'each\([a-zA-Z_][a-zA-Z0-9_]*\)'
+ checkUndefinedProperties(template, ['\$map', EACH_RE])
+ value = renderValue(template['$map'], context)
+ if not isinstance(value, list) and not isinstance(value, dict):
+ raise TemplateError("$map value must evaluate to an array or object")
+
+ is_obj = isinstance(value, dict)
+
+ each_keys = [k for k in template if k.startswith('each(')]
+ if len(each_keys) != 1:
+ raise TemplateError(
+ "$map requires exactly one other property, each(..)")
+ each_key = each_keys[0]
+ each_var = each_key[5:-1]
+ each_template = template[each_key]
+
+ def gen(val):
+ subcontext = context.copy()
+ for elt in val:
+ subcontext[each_var] = elt
+ elt = renderValue(each_template, subcontext)
+ if elt is not DeleteMarker:
+ yield elt
+ if is_obj:
+ value = [{'key': v[0], 'val': v[1]} for v in value.items()]
+ v = dict()
+ for e in gen(value):
+ if not isinstance(e, dict):
+ raise TemplateError(
+ "$map on objects expects {0} to evaluate to an object".format(each_key))
+ v.update(e)
+ return v
+ else:
+ return list(gen(value))
+
+
+@operator('$match')
+def matchConstruct(template, context):
+ checkUndefinedProperties(template, ['\$match'])
+
+ if not isinstance(template['$match'], dict):
+ raise TemplateError("$match can evaluate objects only")
+
+ result = []
+ for condition in template['$match']:
+ if evaluateExpression(condition, context):
+ result.append(renderValue(template['$match'][condition], context))
+
+ return result
+
+
+@operator('$merge')
+def merge(template, context):
+ checkUndefinedProperties(template, ['\$merge'])
+ value = renderValue(template['$merge'], context)
+ if not isinstance(value, list) or not all(isinstance(e, dict) for e in value):
+ raise TemplateError(
+ "$merge value must evaluate to an array of objects")
+ v = dict()
+ for e in value:
+ v.update(e)
+ return v
+
+
+@operator('$mergeDeep')
+def merge(template, context):
+ checkUndefinedProperties(template, ['\$mergeDeep'])
+ value = renderValue(template['$mergeDeep'], context)
+ if not isinstance(value, list) or not all(isinstance(e, dict) for e in value):
+ raise TemplateError(
+ "$mergeDeep value must evaluate to an array of objects")
+
+ def merge(l, r):
+ if isinstance(l, list) and isinstance(r, list):
+ return l + r
+ if isinstance(l, dict) and isinstance(r, dict):
+ res = l.copy()
+ for k, v in viewitems(r):
+ if k in l:
+ res[k] = merge(l[k], v)
+ else:
+ res[k] = v
+ return res
+ return r
+ if len(value) == 0:
+ return {}
+ return functools.reduce(merge, value[1:], value[0])
+
+
+@operator('$reverse')
+def reverse(template, context):
+ checkUndefinedProperties(template, ['\$reverse'])
+ value = renderValue(template['$reverse'], context)
+ if not isinstance(value, list):
+ raise TemplateError("$reverse value must evaluate to an array of objects")
+ return list(reversed(value))
+
+
+@operator('$sort')
+def sort(template, context):
+ BY_RE = 'by\([a-zA-Z_][a-zA-Z0-9_]*\)'
+ checkUndefinedProperties(template, ['\$sort', BY_RE])
+ value = renderValue(template['$sort'], context)
+ if not isinstance(value, list):
+ raise TemplateError('$sorted values to be sorted must have the same type')
+
+ # handle by(..) if given, applying the schwartzian transform
+ by_keys = [k for k in template if k.startswith('by(')]
+ if len(by_keys) == 1:
+ by_key = by_keys[0]
+ by_var = by_key[3:-1]
+ by_expr = template[by_key]
+
+ def xform():
+ subcontext = context.copy()
+ for e in value:
+ subcontext[by_var] = e
+ yield evaluateExpression(by_expr, subcontext), e
+ to_sort = list(xform())
+ elif len(by_keys) == 0:
+ to_sort = [(e, e) for e in value]
+ else:
+ raise TemplateError('only one by(..) is allowed')
+
+ # check types
+ try:
+ eltype = type(to_sort[0][0])
+ except IndexError:
+ return []
+ if eltype in (list, dict, bool, type(None)):
+ raise TemplateError('$sorted values to be sorted must have the same type')
+ if not all(isinstance(e[0], eltype) for e in to_sort):
+ raise TemplateError('$sorted values to be sorted must have the same type')
+
+ # unzip the schwartzian transform
+ return list(e[1] for e in sorted(to_sort))
+
+
+def renderValue(template, context):
+ if isinstance(template, string):
+ return interpolate(template, context)
+
+ elif isinstance(template, dict):
+ matches = [k for k in template if k in operators]
+ if matches:
+ if len(matches) > 1:
+ raise TemplateError("only one operator allowed")
+ return operators[matches[0]](template, context)
+
+ def updated():
+ for k, v in viewitems(template):
+ if k.startswith('$$'):
+ k = k[1:]
+ elif k.startswith('$') and IDENTIFIER_RE.match(k[1:]):
+ raise TemplateError(
+ '$<identifier> is reserved; ues $$<identifier>')
+ else:
+ k = interpolate(k, context)
+
+ try:
+ v = renderValue(v, context)
+ except JSONTemplateError as e:
+ if IDENTIFIER_RE.match(k):
+ e.add_location('.{}'.format(k))
+ else:
+ e.add_location('[{}]'.format(json.dumps(k)))
+ raise
+ if v is not DeleteMarker:
+ yield k, v
+ return dict(updated())
+
+ elif isinstance(template, list):
+ def updated():
+ for i, e in enumerate(template):
+ try:
+ v = renderValue(e, context)
+ if v is not DeleteMarker:
+ yield v
+ except JSONTemplateError as e:
+ e.add_location('[{}]'.format(i))
+ raise
+ return list(updated())
+
+ else:
+ return template
diff --git a/third_party/python/json-e/jsone/shared.py b/third_party/python/json-e/jsone/shared.py
new file mode 100644
index 0000000000..0e70e21f81
--- /dev/null
+++ b/third_party/python/json-e/jsone/shared.py
@@ -0,0 +1,131 @@
+from __future__ import absolute_import, print_function, unicode_literals
+
+import re
+import datetime
+
+
+class DeleteMarker:
+ pass
+
+
+class JSONTemplateError(Exception):
+ def __init__(self, message):
+ super(JSONTemplateError, self).__init__(message)
+ self.location = []
+
+ def add_location(self, loc):
+ self.location.insert(0, loc)
+
+ def __str__(self):
+ location = ' at template' + ''.join(self.location)
+ return "{}{}: {}".format(
+ self.__class__.__name__,
+ location if self.location else '',
+ self.args[0])
+
+
+class TemplateError(JSONTemplateError):
+ pass
+
+
+class InterpreterError(JSONTemplateError):
+ pass
+
+
+# Regular expression matching: X days Y hours Z minutes
+# todo: support hr, wk, yr
+FROMNOW_RE = re.compile(''.join([
+ '^(\s*(?P<years>\d+)\s*y(ears?)?)?',
+ '(\s*(?P<months>\d+)\s*mo(nths?)?)?',
+ '(\s*(?P<weeks>\d+)\s*w(eeks?)?)?',
+ '(\s*(?P<days>\d+)\s*d(ays?)?)?',
+ '(\s*(?P<hours>\d+)\s*h(ours?)?)?',
+ '(\s*(?P<minutes>\d+)\s*m(in(utes?)?)?)?\s*',
+ '(\s*(?P<seconds>\d+)\s*s(ec(onds?)?)?)?\s*$',
+]))
+
+
+def fromNow(offset, reference):
+ # copied from taskcluster-client.py
+ # We want to handle past dates as well as future
+ future = True
+ offset = offset.lstrip()
+ if offset.startswith('-'):
+ future = False
+ offset = offset[1:].lstrip()
+ if offset.startswith('+'):
+ offset = offset[1:].lstrip()
+
+ # Parse offset
+ m = FROMNOW_RE.match(offset)
+ if m is None:
+ raise ValueError("offset string: '%s' does not parse" % offset)
+
+ # In order to calculate years and months we need to calculate how many days
+ # to offset the offset by, since timedelta only goes as high as weeks
+ days = 0
+ hours = 0
+ minutes = 0
+ seconds = 0
+ if m.group('years'):
+ # forget leap years, a year is 365 days
+ years = int(m.group('years'))
+ days += 365 * years
+ if m.group('months'):
+ # assume "month" means 30 days
+ months = int(m.group('months'))
+ days += 30 * months
+ days += int(m.group('days') or 0)
+ hours += int(m.group('hours') or 0)
+ minutes += int(m.group('minutes') or 0)
+ seconds += int(m.group('seconds') or 0)
+
+ # Offset datetime from utc
+ delta = datetime.timedelta(
+ weeks=int(m.group('weeks') or 0),
+ days=days,
+ hours=hours,
+ minutes=minutes,
+ seconds=seconds,
+ )
+
+ if isinstance(reference, string):
+ reference = datetime.datetime.strptime(
+ reference, '%Y-%m-%dT%H:%M:%S.%fZ')
+ elif reference is None:
+ reference = datetime.datetime.utcnow()
+ return stringDate(reference + delta if future else reference - delta)
+
+
+datefmt_re = re.compile(r'(\.[0-9]{3})[0-9]*(\+00:00)?')
+
+
+def to_str(v):
+ if isinstance(v, bool):
+ return {True: 'true', False: 'false'}[v]
+ elif isinstance(v, list):
+ return ','.join(to_str(e) for e in v)
+ elif v is None:
+ return 'null'
+ else:
+ return str(v)
+
+
+def stringDate(date):
+ # Convert to isoFormat
+ try:
+ string = date.isoformat(timespec='microseconds')
+ # py2.7 to py3.5 does not have timespec
+ except TypeError as e:
+ string = date.isoformat()
+ if string.find('.') == -1:
+ string += '.000'
+ string = datefmt_re.sub(r'\1Z', string)
+ return string
+
+
+# the base class for strings, regardless of python version
+try:
+ string = basestring
+except NameError:
+ string = str
diff --git a/third_party/python/json-e/jsone/six.py b/third_party/python/json-e/jsone/six.py
new file mode 100644
index 0000000000..1ab9cd2d7d
--- /dev/null
+++ b/third_party/python/json-e/jsone/six.py
@@ -0,0 +1,23 @@
+import sys
+import operator
+
+# https://github.com/benjaminp/six/blob/2c3492a9f16d294cd5e6b43d6110c5a3a2e58b4c/six.py#L818
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(meta):
+
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+ return type.__new__(metaclass, 'temporary_class', (), {})
+
+
+# https://github.com/benjaminp/six/blob/2c3492a9f16d294cd5e6b43d6110c5a3a2e58b4c/six.py#L578
+if sys.version_info[0] == 3:
+ viewitems = operator.methodcaller("items")
+else:
+ viewitems = operator.methodcaller("viewitems")
diff --git a/third_party/python/json-e/package.json b/third_party/python/json-e/package.json
new file mode 100644
index 0000000000..0c388d57db
--- /dev/null
+++ b/third_party/python/json-e/package.json
@@ -0,0 +1,35 @@
+{
+ "name": "json-e",
+ "version": "2.7.0",
+ "description": "json parameterization module inspired from json-parameterization",
+ "main": "./src/index.js",
+ "scripts": {
+ "lint": "eslint src/*.js test/*.js",
+ "test": "yarn lint && mocha test/*_test.js",
+ "build-demo": "cd demo && yarn && yarn build",
+ "start-demo": "cd demo && yarn && yarn start"
+ },
+ "files": [
+ "src"
+ ],
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/taskcluster/json-e"
+ },
+ "author": "",
+ "license": "MPL-2.0",
+ "dependencies": {
+ "json-stable-stringify": "^1.0.1"
+ },
+ "devDependencies": {
+ "assume": "^1.5.2",
+ "browserify": "^14.5.0",
+ "eslint-config-taskcluster": "^3.0.0",
+ "mocha": "^4.0.1",
+ "source-map-support": "^0.5.0",
+ "timekeeper": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=6.4.0"
+ }
+}
diff --git a/third_party/python/json-e/setup.cfg b/third_party/python/json-e/setup.cfg
new file mode 100644
index 0000000000..6410597b69
--- /dev/null
+++ b/third_party/python/json-e/setup.cfg
@@ -0,0 +1,8 @@
+[pep8]
+max-line-length = 100
+select = E,W
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/json-e/setup.py b/third_party/python/json-e/setup.py
new file mode 100644
index 0000000000..76299df9be
--- /dev/null
+++ b/third_party/python/json-e/setup.py
@@ -0,0 +1,31 @@
+import json
+import os
+from setuptools import setup, find_packages
+
+package_json = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'package.json')
+with open(package_json) as f:
+ version = json.load(f)['version']
+
+setup(name='json-e',
+ version=version,
+ description='A data-structure parameterization system written for embedding context in JSON objects',
+ author='Dustin J. Mitchell',
+ url='https://taskcluster.github.io/json-e/',
+ author_email='dustin@mozilla.com',
+ packages=['jsone'],
+ test_suite='nose.collector',
+ license='MPL2',
+ extras_require={
+ 'release': [
+ 'towncrier',
+ ],
+ },
+ tests_require=[
+ "freezegun",
+ "hypothesis",
+ "nose",
+ "PyYAML",
+ "python-dateutil",
+ 'pep8',
+ ]
+)
diff --git a/third_party/python/jsonschema/.appveyor.yml b/third_party/python/jsonschema/.appveyor.yml
new file mode 100644
index 0000000000..80d5aa5a49
--- /dev/null
+++ b/third_party/python/jsonschema/.appveyor.yml
@@ -0,0 +1,33 @@
+build: false
+environment:
+ VENV: "%APPVEYOR_BUILD_FOLDER%\\venv"
+
+ matrix:
+ - TOXENV: py35-tests
+ PYTHON: "C:\\Python35"
+
+ - TOXENV: py35-tests
+ PYTHON: "C:\\Python35-x64"
+
+ - TOXENV: py36-tests
+ PYTHON: "C:\\Python36"
+
+ - TOXENV: py36-tests
+ PYTHON: "C:\\Python36-x64"
+
+ - TOXENV: py37-tests
+ PYTHON: "C:\\Python37"
+
+ - TOXENV: py37-tests
+ PYTHON: "C:\\Python37-x64"
+
+init:
+ - echo "TOXENV - %TOXENV%"
+
+install:
+ - ps: Update-AppveyorBuild -Version "v$(python setup.py --version) b$Env:APPVEYOR_BUILD_NUMBER"
+ - virtualenv -p "%PYTHON%\\python.exe" "%VENV%"
+ - "%VENV%\\Scripts\\pip install tox"
+
+test_script:
+ - "%VENV%\\Scripts\\tox"
diff --git a/third_party/python/jsonschema/.coveragerc b/third_party/python/jsonschema/.coveragerc
new file mode 100644
index 0000000000..0f24d2f04c
--- /dev/null
+++ b/third_party/python/jsonschema/.coveragerc
@@ -0,0 +1,5 @@
+# vim: filetype=dosini:
+[run]
+branch = True
+source = jsonschema
+omit = */jsonschema/_reflect.py,*/jsonschema/__main__.py,*/jsonschema/benchmarks/*
diff --git a/third_party/python/jsonschema/.github/FUNDING.yml b/third_party/python/jsonschema/.github/FUNDING.yml
new file mode 100644
index 0000000000..fc40c007ca
--- /dev/null
+++ b/third_party/python/jsonschema/.github/FUNDING.yml
@@ -0,0 +1,5 @@
+# These are supported funding model platforms
+
+github: "Julian"
+patreon: "JulianWasTaken"
+tidelift: "pypi/jsonschema"
diff --git a/third_party/python/jsonschema/.github/SECURITY.md b/third_party/python/jsonschema/.github/SECURITY.md
new file mode 100644
index 0000000000..fd524e9476
--- /dev/null
+++ b/third_party/python/jsonschema/.github/SECURITY.md
@@ -0,0 +1,21 @@
+# Security Policy
+
+## Supported Versions
+
+In general, only the latest released ``jsonschema`` version is supported
+and will receive updates.
+
+## Reporting a Vulnerability
+
+To report a security vulnerability, please send an email to
+``Julian+Security@GrayVines.com`` with subject line ``SECURITY
+(jsonschema)``.
+
+I will do my best to respond within 48 hours to acknowledge the message
+and discuss further steps.
+
+If the vulnerability is accepted, an advisory will be sent out via
+GitHub's security advisory functionality.
+
+For non-sensitive discussion related to this policy itself, feel free to
+open an issue on the issue tracker.
diff --git a/third_party/python/jsonschema/.gitignore b/third_party/python/jsonschema/.gitignore
new file mode 100644
index 0000000000..31236db578
--- /dev/null
+++ b/third_party/python/jsonschema/.gitignore
@@ -0,0 +1,5 @@
+_cache
+_static
+_templates
+
+TODO
diff --git a/third_party/python/jsonschema/.travis.yml b/third_party/python/jsonschema/.travis.yml
new file mode 100644
index 0000000000..d31d77e842
--- /dev/null
+++ b/third_party/python/jsonschema/.travis.yml
@@ -0,0 +1,30 @@
+sudo: false
+
+language: python
+
+dist: xenial
+
+python:
+ - 3.5
+ - 3.6
+ - 3.7
+ - 3.8
+ - pypy
+ - pypy3
+
+install:
+ - pip install tox-travis
+
+script:
+ - tox
+
+after_success:
+ - tox -e codecov
+
+addons:
+ apt:
+ packages:
+ - libenchant-dev
+
+git:
+ depth: false
diff --git a/third_party/python/jsonschema/CHANGELOG.rst b/third_party/python/jsonschema/CHANGELOG.rst
new file mode 100644
index 0000000000..8f0a2700ea
--- /dev/null
+++ b/third_party/python/jsonschema/CHANGELOG.rst
@@ -0,0 +1,196 @@
+v3.2.0
+------
+
+* Added a ``format_nongpl`` setuptools extra, which installs only ``format``
+ dependencies that are non-GPL (#619).
+
+v3.1.1
+------
+
+* Temporarily revert the switch to ``js-regex`` until #611 and #612 are
+ resolved.
+
+v3.1.0
+------
+
+* Regular expressions throughout schemas now respect the ECMA 262 dialect, as
+ recommended by the specification (#609).
+
+v3.0.2
+------
+
+* Fixed a bug where ``0`` and ``False`` were considered equal by
+ ``const`` and ``enum`` (#575).
+
+v3.0.1
+------
+
+* Fixed a bug where extending validators did not preserve their notion
+ of which validator property contains ``$id`` information.
+
+v3.0.0
+------
+
+* Support for Draft 6 and Draft 7
+* Draft 7 is now the default
+* New ``TypeChecker`` object for more complex type definitions (and overrides)
+* Falling back to isodate for the date-time format checker is no longer
+ attempted, in accordance with the specification
+
+v2.6.0
+------
+
+* Support for Python 2.6 has been dropped.
+* Improve a few error messages for ``uniqueItems`` (#224) and
+ ``additionalProperties`` (#317)
+* Fixed an issue with ``ErrorTree``'s handling of multiple errors (#288)
+
+v2.5.0
+------
+
+* Improved performance on CPython by adding caching around ref resolution
+ (#203)
+
+v2.4.0
+------
+
+* Added a CLI (#134)
+* Added absolute path and absolute schema path to errors (#120)
+* Added ``relevance``
+* Meta-schemas are now loaded via ``pkgutil``
+
+v2.3.0
+------
+
+* Added ``by_relevance`` and ``best_match`` (#91)
+* Fixed ``format`` to allow adding formats for non-strings (#125)
+* Fixed the ``uri`` format to reject URI references (#131)
+
+v2.2.0
+------
+
+* Compile the host name regex (#127)
+* Allow arbitrary objects to be types (#129)
+
+v2.1.0
+------
+
+* Support RFC 3339 datetimes in conformance with the spec
+* Fixed error paths for additionalItems + items (#122)
+* Fixed wording for min / maxProperties (#117)
+
+
+v2.0.0
+------
+
+* Added ``create`` and ``extend`` to ``jsonschema.validators``
+* Removed ``ValidatorMixin``
+* Fixed array indices ref resolution (#95)
+* Fixed unknown scheme defragmenting and handling (#102)
+
+
+v1.3.0
+------
+
+* Better error tracebacks (#83)
+* Raise exceptions in ``ErrorTree``\s for keys not in the instance (#92)
+* __cause__ (#93)
+
+
+v1.2.0
+------
+
+* More attributes for ValidationError (#86)
+* Added ``ValidatorMixin.descend``
+* Fixed bad ``RefResolutionError`` message (#82)
+
+
+v1.1.0
+------
+
+* Canonicalize URIs (#70)
+* Allow attaching exceptions to ``format`` errors (#77)
+
+
+v1.0.0
+------
+
+* Support for Draft 4
+* Support for format
+* Longs are ints too!
+* Fixed a number of issues with ``$ref`` support (#66)
+* Draft4Validator is now the default
+* ``ValidationError.path`` is now in sequential order
+* Added ``ValidatorMixin``
+
+
+v0.8.0
+------
+
+* Full support for JSON References
+* ``validates`` for registering new validators
+* Documentation
+* Bugfixes
+
+ * uniqueItems not so unique (#34)
+ * Improper any (#47)
+
+
+v0.7
+----
+
+* Partial support for (JSON Pointer) ``$ref``
+* Deprecations
+
+ * ``Validator`` is replaced by ``Draft3Validator`` with a slightly different
+ interface
+ * ``validator(meta_validate=False)``
+
+
+v0.6
+----
+
+* Bugfixes
+
+ * Issue #30 - Wrong behavior for the dependencies property validation
+ * Fixed a miswritten test
+
+
+v0.5
+----
+
+* Bugfixes
+
+ * Issue #17 - require path for error objects
+ * Issue #18 - multiple type validation for non-objects
+
+
+v0.4
+----
+
+* Preliminary support for programmatic access to error details (Issue #5).
+ There are certainly some corner cases that don't do the right thing yet, but
+ this works mostly.
+
+ In order to make this happen (and also to clean things up a bit), a number
+ of deprecations are necessary:
+
+ * ``stop_on_error`` is deprecated in ``Validator.__init__``. Use
+ ``Validator.iter_errors()`` instead.
+ * ``number_types`` and ``string_types`` are deprecated there as well.
+ Use ``types={"number" : ..., "string" : ...}`` instead.
+ * ``meta_validate`` is also deprecated, and instead is now accepted as
+ an argument to ``validate``, ``iter_errors`` and ``is_valid``.
+
+* A bugfix or two
+
+
+v0.3
+----
+
+* Default for unknown types and properties is now to *not* error (consistent
+ with the schema).
+* Python 3 support
+* Removed dependency on SecureTypes now that the hash bug has been resolved.
+* "Numerous bug fixes" -- most notably, a divisibleBy error for floats and a
+ bunch of missing typechecks for irrelevant properties.
diff --git a/third_party/python/jsonschema/COPYING b/third_party/python/jsonschema/COPYING
new file mode 100644
index 0000000000..af9cfbdb13
--- /dev/null
+++ b/third_party/python/jsonschema/COPYING
@@ -0,0 +1,19 @@
+Copyright (c) 2013 Julian Berman
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/third_party/python/jsonschema/DEMO.ipynb b/third_party/python/jsonschema/DEMO.ipynb
new file mode 100644
index 0000000000..f008b793f0
--- /dev/null
+++ b/third_party/python/jsonschema/DEMO.ipynb
@@ -0,0 +1,167 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# jsonschema\n",
+ "---"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "`jsonschema` is an implementation of [JSON Schema](https://json-schema.org) for Python (supporting 2.7+ including Python 3)."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Usage"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from jsonschema import validate"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# A sample schema, like what we'd get from json.load()\n",
+ "schema = {\n",
+ " \"type\" : \"object\",\n",
+ " \"properties\" : {\n",
+ " \"price\" : {\"type\" : \"number\"},\n",
+ " \"name\" : {\"type\" : \"string\"},\n",
+ " },\n",
+ "}"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# If no exception is raised by validate(), the instance is valid.\n",
+ "validate(instance={\"name\" : \"Eggs\", \"price\" : 34.99}, schema=schema)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "ename": "ValidationError",
+ "evalue": "'Invalid' is not of type 'number'\n\nFailed validating 'type' in schema['properties']['price']:\n {'type': 'number'}\n\nOn instance['price']:\n 'Invalid'",
+ "output_type": "error",
+ "traceback": [
+ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
+ "\u001b[0;31mValidationError\u001b[0m Traceback (most recent call last)",
+ "\u001b[0;32m<ipython-input-5-e1e543273d1f>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m validate(\n\u001b[1;32m 2\u001b[0m \u001b[0minstance\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m{\u001b[0m\u001b[0;34m\"name\"\u001b[0m \u001b[0;34m:\u001b[0m \u001b[0;34m\"Eggs\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"price\"\u001b[0m \u001b[0;34m:\u001b[0m \u001b[0;34m\"Invalid\"\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mschema\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mschema\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4\u001b[0m )\n",
+ "\u001b[0;32m~/Development/jsonschema/jsonschema/validators.py\u001b[0m in \u001b[0;36mvalidate\u001b[0;34m(instance, schema, cls, *args, **kwargs)\u001b[0m\n\u001b[1;32m 899\u001b[0m \u001b[0merror\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mexceptions\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbest_match\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalidator\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0miter_errors\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minstance\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 900\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0merror\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 901\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0merror\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 902\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 903\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+ "\u001b[0;31mValidationError\u001b[0m: 'Invalid' is not of type 'number'\n\nFailed validating 'type' in schema['properties']['price']:\n {'type': 'number'}\n\nOn instance['price']:\n 'Invalid'"
+ ]
+ }
+ ],
+ "source": [
+ "validate(\n",
+ " instance={\"name\" : \"Eggs\", \"price\" : \"Invalid\"},\n",
+ " schema=schema,\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "It can also be used from console:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "!echo '{\"name\" : \"Eggs\", \"price\" : 34.99}' > /tmp/sample.json"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "!echo '{\"type\" : \"object\", \"properties\" : {\"price\" : {\"type\" : \"number\"}, \"name\" : {\"type\" : \"string\"}}}' > /tmp/sample.schema"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "!jsonschema -i /tmp/sample.json /tmp/sample.schema"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Do your own experiments here..."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Try `jsonschema` youself by adding your code below and running your own experiments 👇"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import jsonschema\n",
+ "\n",
+ "# your code here\n",
+ "jsonschema."
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.7.4"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/third_party/python/jsonschema/MANIFEST.in b/third_party/python/jsonschema/MANIFEST.in
new file mode 100644
index 0000000000..a951c8a5e1
--- /dev/null
+++ b/third_party/python/jsonschema/MANIFEST.in
@@ -0,0 +1,4 @@
+include *.rst
+include COPYING
+include tox.ini
+recursive-include json *
diff --git a/third_party/python/jsonschema/PKG-INFO b/third_party/python/jsonschema/PKG-INFO
new file mode 100644
index 0000000000..cc0c592f96
--- /dev/null
+++ b/third_party/python/jsonschema/PKG-INFO
@@ -0,0 +1,206 @@
+Metadata-Version: 2.1
+Name: jsonschema
+Version: 3.2.0
+Summary: An implementation of JSON Schema validation for Python
+Home-page: https://github.com/Julian/jsonschema
+Author: Julian Berman
+Author-email: Julian@GrayVines.com
+License: UNKNOWN
+Project-URL: Docs, https://python-jsonschema.readthedocs.io/en/latest/
+Description: ==========
+ jsonschema
+ ==========
+
+ |PyPI| |Pythons| |Travis| |AppVeyor| |Codecov| |ReadTheDocs|
+
+ .. |PyPI| image:: https://img.shields.io/pypi/v/jsonschema.svg
+ :alt: PyPI version
+ :target: https://pypi.org/project/jsonschema/
+
+ .. |Pythons| image:: https://img.shields.io/pypi/pyversions/jsonschema.svg
+ :alt: Supported Python versions
+ :target: https://pypi.org/project/jsonschema/
+
+ .. |Travis| image:: https://travis-ci.com/Julian/jsonschema.svg?branch=master
+ :alt: Travis build status
+ :target: https://travis-ci.com/Julian/jsonschema
+
+ .. |AppVeyor| image:: https://ci.appveyor.com/api/projects/status/adtt0aiaihy6muyn/branch/master?svg=true
+ :alt: AppVeyor build status
+ :target: https://ci.appveyor.com/project/Julian/jsonschema
+
+ .. |Codecov| image:: https://codecov.io/gh/Julian/jsonschema/branch/master/graph/badge.svg
+ :alt: Codecov Code coverage
+ :target: https://codecov.io/gh/Julian/jsonschema
+
+ .. |ReadTheDocs| image:: https://readthedocs.org/projects/python-jsonschema/badge/?version=stable&style=flat
+ :alt: ReadTheDocs status
+ :target: https://python-jsonschema.readthedocs.io/en/stable/
+
+
+ ``jsonschema`` is an implementation of `JSON Schema <https://json-schema.org>`_
+ for Python (supporting 2.7+ including Python 3).
+
+ .. code-block:: python
+
+ >>> from jsonschema import validate
+
+ >>> # A sample schema, like what we'd get from json.load()
+ >>> schema = {
+ ... "type" : "object",
+ ... "properties" : {
+ ... "price" : {"type" : "number"},
+ ... "name" : {"type" : "string"},
+ ... },
+ ... }
+
+ >>> # If no exception is raised by validate(), the instance is valid.
+ >>> validate(instance={"name" : "Eggs", "price" : 34.99}, schema=schema)
+
+ >>> validate(
+ ... instance={"name" : "Eggs", "price" : "Invalid"}, schema=schema,
+ ... ) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValidationError: 'Invalid' is not of type 'number'
+
+ It can also be used from console:
+
+ .. code-block:: bash
+
+ $ jsonschema -i sample.json sample.schema
+
+ Features
+ --------
+
+ * Full support for
+ `Draft 7 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft7Validator>`_,
+ `Draft 6 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft6Validator>`_,
+ `Draft 4 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft4Validator>`_
+ and
+ `Draft 3 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft3Validator>`_
+
+ * `Lazy validation <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.IValidator.iter_errors>`_
+ that can iteratively report *all* validation errors.
+
+ * `Programmatic querying <https://python-jsonschema.readthedocs.io/en/latest/errors/>`_
+ of which properties or items failed validation.
+
+
+ Installation
+ ------------
+
+ ``jsonschema`` is available on `PyPI <https://pypi.org/project/jsonschema/>`_. You can install using `pip <https://pip.pypa.io/en/stable/>`_:
+
+ .. code-block:: bash
+
+ $ pip install jsonschema
+
+
+ Demo
+ ----
+
+ Try ``jsonschema`` interactively in this online demo:
+
+ .. image:: https://user-images.githubusercontent.com/1155573/56745335-8b158a00-6750-11e9-8776-83fa675939c4.png
+ :target: https://notebooks.ai/demo/gh/Julian/jsonschema
+ :alt: Open Live Demo
+
+
+ Online demo Notebook will look similar to this:
+
+
+ .. image:: https://user-images.githubusercontent.com/1155573/56820861-5c1c1880-6823-11e9-802a-ce01c5ec574f.gif
+ :alt: Open Live Demo
+ :width: 480 px
+
+
+ Release Notes
+ -------------
+
+ v3.1 brings support for ECMA 262 dialect regular expressions
+ throughout schemas, as recommended by the specification. Big
+ thanks to @Zac-HD for authoring support in a new `js-regex
+ <https://pypi.org/project/js-regex/>`_ library.
+
+
+ Running the Test Suite
+ ----------------------
+
+ If you have ``tox`` installed (perhaps via ``pip install tox`` or your
+ package manager), running ``tox`` in the directory of your source
+ checkout will run ``jsonschema``'s test suite on all of the versions
+ of Python ``jsonschema`` supports. If you don't have all of the
+ versions that ``jsonschema`` is tested under, you'll likely want to run
+ using ``tox``'s ``--skip-missing-interpreters`` option.
+
+ Of course you're also free to just run the tests on a single version with your
+ favorite test runner. The tests live in the ``jsonschema.tests`` package.
+
+
+ Benchmarks
+ ----------
+
+ ``jsonschema``'s benchmarks make use of `pyperf
+ <https://pyperf.readthedocs.io>`_.
+
+ Running them can be done via ``tox -e perf``, or by invoking the ``pyperf``
+ commands externally (after ensuring that both it and ``jsonschema`` itself are
+ installed)::
+
+ $ python -m pyperf jsonschema/benchmarks/test_suite.py --hist --output results.json
+
+ To compare to a previous run, use::
+
+ $ python -m pyperf compare_to --table reference.json results.json
+
+ See the ``pyperf`` documentation for more details.
+
+
+ Community
+ ---------
+
+ There's a `mailing list <https://groups.google.com/forum/#!forum/jsonschema>`_
+ for this implementation on Google Groups.
+
+ Please join, and feel free to send questions there.
+
+
+ Contributing
+ ------------
+
+ I'm Julian Berman.
+
+ ``jsonschema`` is on `GitHub <https://github.com/Julian/jsonschema>`_.
+
+ Get in touch, via GitHub or otherwise, if you've got something to contribute,
+ it'd be most welcome!
+
+ You can also generally find me on Freenode (nick: ``tos9``) in various
+ channels, including ``#python``.
+
+ If you feel overwhelmingly grateful, you can also woo me with beer money
+ via Google Pay with the email in my GitHub profile.
+
+ And for companies who appreciate ``jsonschema`` and its continued support
+ and growth, ``jsonschema`` is also now supportable via `TideLift
+ <https://tidelift.com/subscription/pkg/pypi-jsonschema?utm_source=pypi-j
+ sonschema&utm_medium=referral&utm_campaign=readme>`_.
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Provides-Extra: format
+Provides-Extra: format_nongpl
diff --git a/third_party/python/jsonschema/README.rst b/third_party/python/jsonschema/README.rst
new file mode 100644
index 0000000000..ccfb55d02d
--- /dev/null
+++ b/third_party/python/jsonschema/README.rst
@@ -0,0 +1,179 @@
+==========
+jsonschema
+==========
+
+|PyPI| |Pythons| |Travis| |AppVeyor| |Codecov| |ReadTheDocs|
+
+.. |PyPI| image:: https://img.shields.io/pypi/v/jsonschema.svg
+ :alt: PyPI version
+ :target: https://pypi.org/project/jsonschema/
+
+.. |Pythons| image:: https://img.shields.io/pypi/pyversions/jsonschema.svg
+ :alt: Supported Python versions
+ :target: https://pypi.org/project/jsonschema/
+
+.. |Travis| image:: https://travis-ci.com/Julian/jsonschema.svg?branch=master
+ :alt: Travis build status
+ :target: https://travis-ci.com/Julian/jsonschema
+
+.. |AppVeyor| image:: https://ci.appveyor.com/api/projects/status/adtt0aiaihy6muyn/branch/master?svg=true
+ :alt: AppVeyor build status
+ :target: https://ci.appveyor.com/project/Julian/jsonschema
+
+.. |Codecov| image:: https://codecov.io/gh/Julian/jsonschema/branch/master/graph/badge.svg
+ :alt: Codecov Code coverage
+ :target: https://codecov.io/gh/Julian/jsonschema
+
+.. |ReadTheDocs| image:: https://readthedocs.org/projects/python-jsonschema/badge/?version=stable&style=flat
+ :alt: ReadTheDocs status
+ :target: https://python-jsonschema.readthedocs.io/en/stable/
+
+
+``jsonschema`` is an implementation of `JSON Schema <https://json-schema.org>`_
+for Python (supporting 2.7+ including Python 3).
+
+.. code-block:: python
+
+ >>> from jsonschema import validate
+
+ >>> # A sample schema, like what we'd get from json.load()
+ >>> schema = {
+ ... "type" : "object",
+ ... "properties" : {
+ ... "price" : {"type" : "number"},
+ ... "name" : {"type" : "string"},
+ ... },
+ ... }
+
+ >>> # If no exception is raised by validate(), the instance is valid.
+ >>> validate(instance={"name" : "Eggs", "price" : 34.99}, schema=schema)
+
+ >>> validate(
+ ... instance={"name" : "Eggs", "price" : "Invalid"}, schema=schema,
+ ... ) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValidationError: 'Invalid' is not of type 'number'
+
+It can also be used from console:
+
+.. code-block:: bash
+
+ $ jsonschema -i sample.json sample.schema
+
+Features
+--------
+
+* Full support for
+ `Draft 7 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft7Validator>`_,
+ `Draft 6 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft6Validator>`_,
+ `Draft 4 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft4Validator>`_
+ and
+ `Draft 3 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft3Validator>`_
+
+* `Lazy validation <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.IValidator.iter_errors>`_
+ that can iteratively report *all* validation errors.
+
+* `Programmatic querying <https://python-jsonschema.readthedocs.io/en/latest/errors/>`_
+ of which properties or items failed validation.
+
+
+Installation
+------------
+
+``jsonschema`` is available on `PyPI <https://pypi.org/project/jsonschema/>`_. You can install using `pip <https://pip.pypa.io/en/stable/>`_:
+
+.. code-block:: bash
+
+ $ pip install jsonschema
+
+
+Demo
+----
+
+Try ``jsonschema`` interactively in this online demo:
+
+.. image:: https://user-images.githubusercontent.com/1155573/56745335-8b158a00-6750-11e9-8776-83fa675939c4.png
+ :target: https://notebooks.ai/demo/gh/Julian/jsonschema
+ :alt: Open Live Demo
+
+
+Online demo Notebook will look similar to this:
+
+
+.. image:: https://user-images.githubusercontent.com/1155573/56820861-5c1c1880-6823-11e9-802a-ce01c5ec574f.gif
+ :alt: Open Live Demo
+ :width: 480 px
+
+
+Release Notes
+-------------
+
+v3.1 brings support for ECMA 262 dialect regular expressions
+throughout schemas, as recommended by the specification. Big
+thanks to @Zac-HD for authoring support in a new `js-regex
+<https://pypi.org/project/js-regex/>`_ library.
+
+
+Running the Test Suite
+----------------------
+
+If you have ``tox`` installed (perhaps via ``pip install tox`` or your
+package manager), running ``tox`` in the directory of your source
+checkout will run ``jsonschema``'s test suite on all of the versions
+of Python ``jsonschema`` supports. If you don't have all of the
+versions that ``jsonschema`` is tested under, you'll likely want to run
+using ``tox``'s ``--skip-missing-interpreters`` option.
+
+Of course you're also free to just run the tests on a single version with your
+favorite test runner. The tests live in the ``jsonschema.tests`` package.
+
+
+Benchmarks
+----------
+
+``jsonschema``'s benchmarks make use of `pyperf
+<https://pyperf.readthedocs.io>`_.
+
+Running them can be done via ``tox -e perf``, or by invoking the ``pyperf``
+commands externally (after ensuring that both it and ``jsonschema`` itself are
+installed)::
+
+ $ python -m pyperf jsonschema/benchmarks/test_suite.py --hist --output results.json
+
+To compare to a previous run, use::
+
+ $ python -m pyperf compare_to --table reference.json results.json
+
+See the ``pyperf`` documentation for more details.
+
+
+Community
+---------
+
+There's a `mailing list <https://groups.google.com/forum/#!forum/jsonschema>`_
+for this implementation on Google Groups.
+
+Please join, and feel free to send questions there.
+
+
+Contributing
+------------
+
+I'm Julian Berman.
+
+``jsonschema`` is on `GitHub <https://github.com/Julian/jsonschema>`_.
+
+Get in touch, via GitHub or otherwise, if you've got something to contribute,
+it'd be most welcome!
+
+You can also generally find me on Freenode (nick: ``tos9``) in various
+channels, including ``#python``.
+
+If you feel overwhelmingly grateful, you can also woo me with beer money
+via Google Pay with the email in my GitHub profile.
+
+And for companies who appreciate ``jsonschema`` and its continued support
+and growth, ``jsonschema`` is also now supportable via `TideLift
+<https://tidelift.com/subscription/pkg/pypi-jsonschema?utm_source=pypi-j
+sonschema&utm_medium=referral&utm_campaign=readme>`_.
diff --git a/third_party/python/jsonschema/codecov.yml b/third_party/python/jsonschema/codecov.yml
new file mode 100644
index 0000000000..a370000d6b
--- /dev/null
+++ b/third_party/python/jsonschema/codecov.yml
@@ -0,0 +1,11 @@
+coverage:
+ precision: 2
+ round: down
+ status:
+ patch:
+ default:
+ target: 100%
+
+comment:
+ layout: "header, diff, uncovered"
+ behavior: default
diff --git a/third_party/python/jsonschema/demo.yml b/third_party/python/jsonschema/demo.yml
new file mode 100644
index 0000000000..a2d7e46f55
--- /dev/null
+++ b/third_party/python/jsonschema/demo.yml
@@ -0,0 +1,2 @@
+requirements:
+ - jsonschema==3.0.1
diff --git a/third_party/python/jsonschema/json/.gitignore b/third_party/python/jsonschema/json/.gitignore
new file mode 100644
index 0000000000..1333ed77b7
--- /dev/null
+++ b/third_party/python/jsonschema/json/.gitignore
@@ -0,0 +1 @@
+TODO
diff --git a/third_party/python/jsonschema/json/.travis.yml b/third_party/python/jsonschema/json/.travis.yml
new file mode 100644
index 0000000000..f65e40bb85
--- /dev/null
+++ b/third_party/python/jsonschema/json/.travis.yml
@@ -0,0 +1,8 @@
+language: python
+python: "3.7"
+node_js: "9"
+install:
+ - pip install tox
+script:
+ - tox
+ - npm install && npm test || true
diff --git a/third_party/python/jsonschema/json/LICENSE b/third_party/python/jsonschema/json/LICENSE
new file mode 100644
index 0000000000..c28adbadd9
--- /dev/null
+++ b/third_party/python/jsonschema/json/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2012 Julian Berman
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/third_party/python/jsonschema/json/README.md b/third_party/python/jsonschema/json/README.md
new file mode 100644
index 0000000000..f65934c4f5
--- /dev/null
+++ b/third_party/python/jsonschema/json/README.md
@@ -0,0 +1,181 @@
+JSON Schema Test Suite [![Build Status](https://travis-ci.org/json-schema-org/JSON-Schema-Test-Suite.svg?branch=master)](https://travis-ci.org/json-schema-org/JSON-Schema-Test-Suite)
+======================
+
+This repository contains a set of JSON objects that implementors of JSON Schema
+validation libraries can use to test their validators.
+
+It is meant to be language agnostic and should require only a JSON parser.
+
+The conversion of the JSON objects into tests within your test framework of
+choice is still the job of the validator implementor.
+
+Structure of a Test
+-------------------
+
+If you're going to use this suite, you need to know how tests are laid out. The
+tests are contained in the `tests` directory at the root of this repository.
+
+Inside that directory is a subdirectory for each draft or version of the
+schema.
+
+If you look inside the draft directory, there are a number of `.json` files,
+which logically group a set of test cases together. Often the grouping is by
+property under test, but not always, especially within optional test files
+(discussed below).
+
+Inside each `.json` file is a single array containing objects. It's easiest to
+illustrate the structure of these with an example:
+
+```json
+ {
+ "description": "the description of the test case",
+ "schema": {"the schema that should" : "be validated against"},
+ "tests": [
+ {
+ "description": "a specific test of a valid instance",
+ "data": "the instance",
+ "valid": true
+ },
+ {
+ "description": "another specific test this time, invalid",
+ "data": 15,
+ "valid": false
+ }
+ ]
+ }
+```
+
+So a description, a schema, and some tests, where tests is an array containing
+one or more objects with descriptions, data, and a boolean indicating whether
+they should be valid or invalid.
+
+Coverage
+--------
+
+Drafts 03, 04, 06, and 07 should have full coverage, with drafts 06 and 07
+being considered current and actively supported. Bug fixes will be made as
+needed for draft-04 as it is still the most widely used, while draft-03
+is long since deprecated.
+
+If you see anything missing from the current supported drafts, or incorrect
+on any draft still accepting bug fixes, please file an issue or submit a PR.
+
+Who Uses the Test Suite
+-----------------------
+
+This suite is being used by:
+
+### Clojure ###
+
+* [jinx](https://github.com/juxt/jinx)
+* [json-schema](https://github.com/tatut/json-schema)
+
+### Coffeescript ###
+
+* [jsck](https://github.com/pandastrike/jsck)
+
+### C++ ###
+
+* [Modern C++ JSON schema validator](https://github.com/pboettch/json-schema-validator)
+
+### Dart ###
+
+* [json_schema](https://github.com/patefacio/json_schema)
+
+### Elixir ###
+
+* [ex_json_schema](https://github.com/jonasschmidt/ex_json_schema)
+
+### Erlang ###
+
+* [jesse](https://github.com/for-GET/jesse)
+
+### Go ###
+
+* [gojsonschema](https://github.com/sigu-399/gojsonschema)
+* [validate-json](https://github.com/cesanta/validate-json)
+
+### Haskell ###
+
+* [aeson-schema](https://github.com/timjb/aeson-schema)
+* [hjsonschema](https://github.com/seagreen/hjsonschema)
+
+### Java ###
+
+* [json-schema-validator](https://github.com/daveclayton/json-schema-validator)
+* [everit-org/json-schema](https://github.com/everit-org/json-schema)
+* [networknt/json-schema-validator](https://github.com/networknt/json-schema-validator)
+* [Justify](https://github.com/leadpony/justify)
+
+### JavaScript ###
+
+* [json-schema-benchmark](https://github.com/Muscula/json-schema-benchmark)
+* [direct-schema](https://github.com/IreneKnapp/direct-schema)
+* [is-my-json-valid](https://github.com/mafintosh/is-my-json-valid)
+* [jassi](https://github.com/iclanzan/jassi)
+* [JaySchema](https://github.com/natesilva/jayschema)
+* [json-schema-valid](https://github.com/ericgj/json-schema-valid)
+* [Jsonary](https://github.com/jsonary-js/jsonary)
+* [jsonschema](https://github.com/tdegrunt/jsonschema)
+* [request-validator](https://github.com/bugventure/request-validator)
+* [skeemas](https://github.com/Prestaul/skeemas)
+* [tv4](https://github.com/geraintluff/tv4)
+* [z-schema](https://github.com/zaggino/z-schema)
+* [jsen](https://github.com/bugventure/jsen)
+* [ajv](https://github.com/epoberezkin/ajv)
+* [djv](https://github.com/korzio/djv)
+
+### Node.js ###
+
+For node.js developers, the suite is also available as an
+[npm](https://www.npmjs.com/package/@json-schema-org/tests) package.
+
+Node-specific support is maintained in a [separate
+repository](https://github.com/json-schema-org/json-schema-test-suite-npm)
+which also welcomes your contributions!
+
+### .NET ###
+
+* [Newtonsoft.Json.Schema](https://github.com/JamesNK/Newtonsoft.Json.Schema)
+* [Manatee.Json](https://github.com/gregsdennis/Manatee.Json)
+
+### PHP ###
+
+* [json-schema](https://github.com/justinrainbow/json-schema)
+* [json-guard](https://github.com/thephpleague/json-guard)
+
+### PostgreSQL ###
+
+* [postgres-json-schema](https://github.com/gavinwahl/postgres-json-schema)
+* [is_jsonb_valid](https://github.com/furstenheim/is_jsonb_valid)
+
+### Python ###
+
+* [jsonschema](https://github.com/Julian/jsonschema)
+* [fastjsonschema](https://github.com/seznam/python-fastjsonschema)
+* [hypothesis-jsonschema](https://github.com/Zac-HD/hypothesis-jsonschema)
+
+### Ruby ###
+
+* [json-schema](https://github.com/hoxworth/json-schema)
+* [json_schemer](https://github.com/davishmcclurg/json_schemer)
+
+### Rust ###
+
+* [valico](https://github.com/rustless/valico)
+
+### Swift ###
+
+* [JSONSchema](https://github.com/kylef/JSONSchema.swift)
+
+If you use it as well, please fork and send a pull request adding yourself to
+the list :).
+
+Contributing
+------------
+
+If you see something missing or incorrect, a pull request is most welcome!
+
+There are some sanity checks in place for testing the test suite. You can run
+them with `bin/jsonschema_suite check && npm test` or `tox && npm test`. They will be run automatically by
+[Travis CI](https://travis-ci.org/) as well.
diff --git a/third_party/python/jsonschema/json/bin/jsonschema_suite b/third_party/python/jsonschema/json/bin/jsonschema_suite
new file mode 100755
index 0000000000..6b1c486450
--- /dev/null
+++ b/third_party/python/jsonschema/json/bin/jsonschema_suite
@@ -0,0 +1,298 @@
+#! /usr/bin/env python3
+from __future__ import print_function
+from pprint import pformat
+import argparse
+import errno
+import fnmatch
+import json
+import os
+import random
+import shutil
+import sys
+import textwrap
+import unittest
+import warnings
+
+if getattr(unittest, "skipIf", None) is None:
+ unittest.skipIf = lambda cond, msg : lambda fn : fn
+
+try:
+ import jsonschema
+except ImportError:
+ jsonschema = None
+else:
+ validators = getattr(
+ jsonschema.validators, "validators", jsonschema.validators
+ )
+
+
+ROOT_DIR = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), os.pardir).rstrip("__pycache__"),
+)
+SUITE_ROOT_DIR = os.path.join(ROOT_DIR, "tests")
+
+REMOTES = {
+ "integer.json": {u"type": u"integer"},
+ "name.json": {
+ u"type": "string",
+ u"definitions": {
+ u"orNull": {u"anyOf": [{u"type": u"null"}, {u"$ref": u"#"}]},
+ },
+ },
+ "name-defs.json": {
+ u"type": "string",
+ u"$defs": {
+ u"orNull": {u"anyOf": [{u"type": u"null"}, {u"$ref": u"#"}]},
+ },
+ },
+ "subSchemas.json": {
+ u"integer": {u"type": u"integer"},
+ u"refToInteger": {u"$ref": u"#/integer"},
+ },
+ "folder/folderInteger.json": {u"type": u"integer"}
+}
+REMOTES_DIR = os.path.join(ROOT_DIR, "remotes")
+
+with open(os.path.join(ROOT_DIR, "test-schema.json")) as schema:
+ TESTSUITE_SCHEMA = json.load(schema)
+
+def files(paths):
+ for path in paths:
+ with open(path) as test_file:
+ yield json.load(test_file)
+
+
+def groups(paths):
+ for test_file in files(paths):
+ for group in test_file:
+ yield group
+
+
+def cases(paths):
+ for test_group in groups(paths):
+ for test in test_group["tests"]:
+ test["schema"] = test_group["schema"]
+ yield test
+
+
+def collect(root_dir):
+ for root, dirs, files in os.walk(root_dir):
+ for filename in fnmatch.filter(files, "*.json"):
+ yield os.path.join(root, filename)
+
+
+class SanityTests(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ print("Looking for tests in %s" % SUITE_ROOT_DIR)
+ cls.test_files = list(collect(SUITE_ROOT_DIR))
+ print("Found %s test files" % len(cls.test_files))
+ assert cls.test_files, "Didn't find the test files!"
+
+ def test_all_files_are_valid_json(self):
+ for path in self.test_files:
+ with open(path) as test_file:
+ try:
+ json.load(test_file)
+ except ValueError as error:
+ self.fail("%s contains invalid JSON (%s)" % (path, error))
+
+ def test_all_descriptions_have_reasonable_length(self):
+ for case in cases(self.test_files):
+ description = case["description"]
+ self.assertLess(
+ len(description),
+ 70,
+ "%r is too long! (keep it to less than 70 chars)" % (
+ description,
+ ),
+ )
+
+ def test_all_descriptions_are_unique(self):
+ for group in groups(self.test_files):
+ descriptions = set(test["description"] for test in group["tests"])
+ self.assertEqual(
+ len(descriptions),
+ len(group["tests"]),
+ "%r contains a duplicate description" % (group,)
+ )
+
+ @unittest.skipIf(jsonschema is None, "Validation library not present!")
+ def test_all_schemas_are_valid(self):
+ for schema in os.listdir(SUITE_ROOT_DIR):
+ schema_validator = validators.get(schema)
+ if schema_validator is not None:
+ test_files = collect(os.path.join(SUITE_ROOT_DIR, schema))
+ for case in cases(test_files):
+ try:
+ schema_validator.check_schema(case["schema"])
+ except jsonschema.SchemaError as error:
+ self.fail("%s contains an invalid schema (%s)" %
+ (case, error))
+ else:
+ warnings.warn("No schema validator for %s" % schema)
+
+ @unittest.skipIf(jsonschema is None, "Validation library not present!")
+ def test_suites_are_valid(self):
+ validator = jsonschema.Draft4Validator(TESTSUITE_SCHEMA)
+ for tests in files(self.test_files):
+ try:
+ validator.validate(tests)
+ except jsonschema.ValidationError as error:
+ self.fail(str(error))
+
+ def test_remote_schemas_are_updated(self):
+ files = {}
+ for parent, _, paths in os.walk(REMOTES_DIR):
+ for path in paths:
+ absolute_path = os.path.join(parent, path)
+ with open(absolute_path) as schema_file:
+ files[absolute_path] = json.load(schema_file)
+
+ expected = {
+ os.path.join(REMOTES_DIR, path): contents
+ for path, contents in REMOTES.items()
+ }
+
+ missing = set(files).symmetric_difference(expected)
+ changed = {
+ path
+ for path, contents in expected.items()
+ if path in files
+ and contents != files[path]
+ }
+
+ self.assertEqual(
+ files,
+ expected,
+ msg=textwrap.dedent(
+ """
+ Remotes in the remotes/ directory do not match those in the
+ ``jsonschema_suite`` Python script.
+
+ Unfortunately for the minute, each remote file is duplicated in
+ two places.""" + ("""
+
+ Only present in one location:
+
+ {}""".format("\n".join(missing)) if missing else "") + ("""
+
+ Conflicting between the two:
+
+ {}""".format("\n".join(changed)) if changed else "")
+ )
+ )
+
+
+def main(arguments):
+ if arguments.command == "check":
+ suite = unittest.TestLoader().loadTestsFromTestCase(SanityTests)
+ result = unittest.TextTestRunner(verbosity=2).run(suite)
+ sys.exit(not result.wasSuccessful())
+ elif arguments.command == "flatten":
+ selected_cases = [case for case in cases(collect(arguments.version))]
+
+ if arguments.randomize:
+ random.shuffle(selected_cases)
+
+ json.dump(selected_cases, sys.stdout, indent=4, sort_keys=True)
+ elif arguments.command == "remotes":
+ json.dump(REMOTES, sys.stdout, indent=4, sort_keys=True)
+ elif arguments.command == "dump_remotes":
+ if arguments.update:
+ shutil.rmtree(arguments.out_dir, ignore_errors=True)
+
+ try:
+ os.makedirs(arguments.out_dir)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ print("%s already exists. Aborting." % arguments.out_dir)
+ sys.exit(1)
+ raise
+
+ for url, schema in REMOTES.items():
+ filepath = os.path.join(arguments.out_dir, url)
+
+ try:
+ os.makedirs(os.path.dirname(filepath))
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ with open(filepath, "w") as out_file:
+ json.dump(schema, out_file, indent=4, sort_keys=True)
+ out_file.write("\n")
+ elif arguments.command == "serve":
+ try:
+ from flask import Flask, jsonify
+ except ImportError:
+ print(textwrap.dedent("""
+ The Flask library is required to serve the remote schemas.
+
+ You can install it by running `pip install Flask`.
+
+ Alternatively, see the `jsonschema_suite remotes` or
+ `jsonschema_suite dump_remotes` commands to create static files
+ that can be served with your own web server.
+ """.strip("\n")))
+ sys.exit(1)
+
+ app = Flask(__name__)
+
+ @app.route("/<path:path>")
+ def serve_path(path):
+ if path in REMOTES:
+ return jsonify(REMOTES[path])
+ return "Document does not exist.", 404
+
+ app.run(port=1234)
+
+
+parser = argparse.ArgumentParser(
+ description="JSON Schema Test Suite utilities",
+)
+subparsers = parser.add_subparsers(help="utility commands", dest="command")
+
+check = subparsers.add_parser("check", help="Sanity check the test suite.")
+
+flatten = subparsers.add_parser(
+ "flatten",
+ help="Output a flattened file containing a selected version's test cases."
+)
+flatten.add_argument(
+ "--randomize",
+ action="store_true",
+ help="Randomize the order of the outputted cases.",
+)
+flatten.add_argument(
+ "version", help="The directory containing the version to output",
+)
+
+remotes = subparsers.add_parser(
+ "remotes",
+ help="Output the expected URLs and their associated schemas for remote "
+ "ref tests as a JSON object."
+)
+
+dump_remotes = subparsers.add_parser(
+ "dump_remotes", help="Dump the remote ref schemas into a file tree",
+)
+dump_remotes.add_argument(
+ "--update",
+ action="store_true",
+ help="Update the remotes in an existing directory.",
+)
+dump_remotes.add_argument(
+ "--out-dir",
+ default=REMOTES_DIR,
+ type=os.path.abspath,
+ help="The output directory to create as the root of the file tree",
+)
+
+serve = subparsers.add_parser(
+ "serve",
+ help="Start a webserver to serve schemas used by remote ref tests."
+)
+
+if __name__ == "__main__":
+ main(parser.parse_args())
diff --git a/third_party/python/jsonschema/json/index.js b/third_party/python/jsonschema/json/index.js
new file mode 100644
index 0000000000..b138226b17
--- /dev/null
+++ b/third_party/python/jsonschema/json/index.js
@@ -0,0 +1,45 @@
+'use strict';
+
+const Ajv = require('ajv');
+const jsonSchemaTest = require('json-schema-test');
+const assert = require('assert');
+
+const refs = {
+ 'http://localhost:1234/integer.json': require('./remotes/integer.json'),
+ 'http://localhost:1234/subSchemas.json': require('./remotes/subSchemas.json'),
+ 'http://localhost:1234/folder/folderInteger.json': require('./remotes/folder/folderInteger.json'),
+ 'http://localhost:1234/name.json': require('./remotes/name.json'),
+ 'http://localhost:1234/name-defs.json': require('./remotes/name-defs.json')
+};
+
+const SKIP = {
+ 4: ['optional/zeroTerminatedFloats'],
+ 7: [
+ 'format/idn-email',
+ 'format/idn-hostname',
+ 'format/iri',
+ 'format/iri-reference',
+ 'optional/content'
+ ]
+};
+
+[4, 6, 7].forEach((draft) => {
+ let ajv;
+ if (draft == 7) {
+ ajv = new Ajv({format: 'full'});
+ } else {
+ const schemaId = draft == 4 ? 'id' : '$id';
+ ajv = new Ajv({format: 'full', meta: false, schemaId});
+ ajv.addMetaSchema(require(`ajv/lib/refs/json-schema-draft-0${draft}.json`));
+ ajv._opts.defaultMeta = `http://json-schema.org/draft-0${draft}/schema#`;
+ }
+ for (const uri in refs) ajv.addSchema(refs[uri], uri);
+
+ jsonSchemaTest(ajv, {
+ description: `Test suite draft-0${draft}`,
+ suites: {tests: `./tests/draft${draft}/{**/,}*.json`},
+ skip: SKIP[draft],
+ cwd: __dirname,
+ hideFolder: 'tests/'
+ });
+});
diff --git a/third_party/python/jsonschema/json/package.json b/third_party/python/jsonschema/json/package.json
new file mode 100644
index 0000000000..3980136cc6
--- /dev/null
+++ b/third_party/python/jsonschema/json/package.json
@@ -0,0 +1,28 @@
+{
+ "name": "json-schema-test-suite",
+ "version": "0.1.0",
+ "description": "A language agnostic test suite for the JSON Schema specifications",
+ "main": "index.js",
+ "scripts": {
+ "test": "mocha index.js -R spec"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/json-schema-org/JSON-Schema-Test-Suite.git"
+ },
+ "keywords": [
+ "json-schema",
+ "tests"
+ ],
+ "author": "http://json-schema.org",
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/json-schema-org/JSON-Schema-Test-Suite/issues"
+ },
+ "homepage": "https://github.com/json-schema-org/JSON-Schema-Test-Suite#readme",
+ "devDependencies": {
+ "ajv": "^6.0.0-rc.1",
+ "json-schema-test": "^2.0.0",
+ "mocha": "^3.2.0"
+ }
+}
diff --git a/third_party/python/jsonschema/json/remotes/folder/folderInteger.json b/third_party/python/jsonschema/json/remotes/folder/folderInteger.json
new file mode 100644
index 0000000000..8b50ea3085
--- /dev/null
+++ b/third_party/python/jsonschema/json/remotes/folder/folderInteger.json
@@ -0,0 +1,3 @@
+{
+ "type": "integer"
+}
diff --git a/third_party/python/jsonschema/json/remotes/integer.json b/third_party/python/jsonschema/json/remotes/integer.json
new file mode 100644
index 0000000000..8b50ea3085
--- /dev/null
+++ b/third_party/python/jsonschema/json/remotes/integer.json
@@ -0,0 +1,3 @@
+{
+ "type": "integer"
+}
diff --git a/third_party/python/jsonschema/json/remotes/name-defs.json b/third_party/python/jsonschema/json/remotes/name-defs.json
new file mode 100644
index 0000000000..1dab4a4343
--- /dev/null
+++ b/third_party/python/jsonschema/json/remotes/name-defs.json
@@ -0,0 +1,15 @@
+{
+ "$defs": {
+ "orNull": {
+ "anyOf": [
+ {
+ "type": "null"
+ },
+ {
+ "$ref": "#"
+ }
+ ]
+ }
+ },
+ "type": "string"
+}
diff --git a/third_party/python/jsonschema/json/remotes/name.json b/third_party/python/jsonschema/json/remotes/name.json
new file mode 100644
index 0000000000..fceacb8097
--- /dev/null
+++ b/third_party/python/jsonschema/json/remotes/name.json
@@ -0,0 +1,15 @@
+{
+ "definitions": {
+ "orNull": {
+ "anyOf": [
+ {
+ "type": "null"
+ },
+ {
+ "$ref": "#"
+ }
+ ]
+ }
+ },
+ "type": "string"
+}
diff --git a/third_party/python/jsonschema/json/remotes/subSchemas.json b/third_party/python/jsonschema/json/remotes/subSchemas.json
new file mode 100644
index 0000000000..9f8030bceb
--- /dev/null
+++ b/third_party/python/jsonschema/json/remotes/subSchemas.json
@@ -0,0 +1,8 @@
+{
+ "integer": {
+ "type": "integer"
+ },
+ "refToInteger": {
+ "$ref": "#/integer"
+ }
+}
diff --git a/third_party/python/jsonschema/json/test-schema.json b/third_party/python/jsonschema/json/test-schema.json
new file mode 100644
index 0000000000..670d2804cb
--- /dev/null
+++ b/third_party/python/jsonschema/json/test-schema.json
@@ -0,0 +1,59 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "definitions": {
+ "outputItem": {
+ "type": "object",
+ "properties": {
+ "valid": {"type": "boolean"},
+ "keywordLocation": {"type": "string"},
+ "absoluteKeywordLocation": {
+ "type": "string",
+ "format": "uri"
+ },
+ "instanceLocation": {"type": "string"},
+ "annotations": {
+ "type": "array",
+ "items": {"$ref": "#/definitions/outputItem"}
+ },
+ "errors": {
+ "type": "array",
+ "items": {"$ref": "#/definitions/outputItem"}
+ }
+ }
+ }
+ },
+ "type": "array",
+ "items": {
+ "type": "object",
+ "required": ["description", "schema", "tests"],
+ "properties": {
+ "description": {"type": "string"},
+ "schema": {},
+ "tests": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "required": ["description", "data", "valid"],
+ "properties": {
+ "description": {"type": "string"},
+ "data": {},
+ "valid": {"type": "boolean"},
+ "output": {
+ "type": "object",
+ "properties": {
+ "basic": {"$ref": "#/definitions/outputItem"},
+ "detailed": {"$ref": "#/definitions/outputItem"},
+ "verbose": {"$ref": "#/definitions/outputItem"}
+ },
+ "required": ["basic", "detailed", "verbose"]
+ }
+ },
+ "additionalProperties": false
+ },
+ "minItems": 1
+ }
+ },
+ "additionalProperties": false,
+ "minItems": 1
+ }
+}
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/additionalItems.json b/third_party/python/jsonschema/json/tests/draft2019-09/additionalItems.json
new file mode 100644
index 0000000000..abecc578be
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/additionalItems.json
@@ -0,0 +1,87 @@
+[
+ {
+ "description": "additionalItems as schema",
+ "schema": {
+ "items": [{}],
+ "additionalItems": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "additional items match schema",
+ "data": [ null, 2, 3, 4 ],
+ "valid": true
+ },
+ {
+ "description": "additional items do not match schema",
+ "data": [ null, 2, 3, "foo" ],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "items is schema, no additionalItems",
+ "schema": {
+ "items": {},
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "all items match schema",
+ "data": [ 1, 2, 3, 4, 5 ],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "array of items with no additionalItems",
+ "schema": {
+ "items": [{}, {}, {}],
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "fewer number of items present",
+ "data": [ 1, 2 ],
+ "valid": true
+ },
+ {
+ "description": "equal number of items present",
+ "data": [ 1, 2, 3 ],
+ "valid": true
+ },
+ {
+ "description": "additional items are not permitted",
+ "data": [ 1, 2, 3, 4 ],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "additionalItems as false without items",
+ "schema": {"additionalItems": false},
+ "tests": [
+ {
+ "description":
+ "items defaults to empty schema so everything is valid",
+ "data": [ 1, 2, 3, 4, 5 ],
+ "valid": true
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": {"foo" : "bar"},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "additionalItems are allowed by default",
+ "schema": {"items": [{"type": "integer"}]},
+ "tests": [
+ {
+ "description": "only the first item is validated",
+ "data": [1, "foo", false],
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/additionalProperties.json b/third_party/python/jsonschema/json/tests/draft2019-09/additionalProperties.json
new file mode 100644
index 0000000000..ffeac6b381
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/additionalProperties.json
@@ -0,0 +1,133 @@
+[
+ {
+ "description":
+ "additionalProperties being false does not allow other properties",
+ "schema": {
+ "properties": {"foo": {}, "bar": {}},
+ "patternProperties": { "^v": {} },
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "no additional properties is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "an additional property is invalid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : "boom"},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [1, 2, 3],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobarbaz",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "patternProperties are not additional properties",
+ "data": {"foo":1, "vroom": 2},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "non-ASCII pattern with additionalProperties",
+ "schema": {
+ "patternProperties": {"^á": {}},
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "matching the pattern is valid",
+ "data": {"ármányos": 2},
+ "valid": true
+ },
+ {
+ "description": "not matching the pattern is invalid",
+ "data": {"élmény": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description":
+ "additionalProperties allows a schema which should validate",
+ "schema": {
+ "properties": {"foo": {}, "bar": {}},
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "no additional properties is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "an additional valid property is valid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : true},
+ "valid": true
+ },
+ {
+ "description": "an additional invalid property is invalid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : 12},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description":
+ "additionalProperties can exist by itself",
+ "schema": {
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "an additional valid property is valid",
+ "data": {"foo" : true},
+ "valid": true
+ },
+ {
+ "description": "an additional invalid property is invalid",
+ "data": {"foo" : 1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "additionalProperties are allowed by default",
+ "schema": {"properties": {"foo": {}, "bar": {}}},
+ "tests": [
+ {
+ "description": "additional properties are allowed",
+ "data": {"foo": 1, "bar": 2, "quux": true},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "additionalProperties should not look in applicators",
+ "schema": {
+ "allOf": [
+ {"properties": {"foo": {}}}
+ ],
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "properties defined in allOf are not allowed",
+ "data": {"foo": 1, "bar": true},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/allOf.json b/third_party/python/jsonschema/json/tests/draft2019-09/allOf.json
new file mode 100644
index 0000000000..eb612091aa
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/allOf.json
@@ -0,0 +1,218 @@
+[
+ {
+ "description": "allOf",
+ "schema": {
+ "allOf": [
+ {
+ "properties": {
+ "bar": {"type": "integer"}
+ },
+ "required": ["bar"]
+ },
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "allOf",
+ "data": {"foo": "baz", "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "mismatch second",
+ "data": {"foo": "baz"},
+ "valid": false
+ },
+ {
+ "description": "mismatch first",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "wrong type",
+ "data": {"foo": "baz", "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with base schema",
+ "schema": {
+ "properties": {"bar": {"type": "integer"}},
+ "required": ["bar"],
+ "allOf" : [
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ },
+ {
+ "properties": {
+ "baz": {"type": "null"}
+ },
+ "required": ["baz"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": {"foo": "quux", "bar": 2, "baz": null},
+ "valid": true
+ },
+ {
+ "description": "mismatch base schema",
+ "data": {"foo": "quux", "baz": null},
+ "valid": false
+ },
+ {
+ "description": "mismatch first allOf",
+ "data": {"bar": 2, "baz": null},
+ "valid": false
+ },
+ {
+ "description": "mismatch second allOf",
+ "data": {"foo": "quux", "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "mismatch both",
+ "data": {"bar": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf simple types",
+ "schema": {
+ "allOf": [
+ {"maximum": 30},
+ {"minimum": 20}
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": 25,
+ "valid": true
+ },
+ {
+ "description": "mismatch one",
+ "data": 35,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with boolean schemas, all true",
+ "schema": {"allOf": [true, true]},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "allOf with boolean schemas, some false",
+ "schema": {"allOf": [true, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with boolean schemas, all false",
+ "schema": {"allOf": [false, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with one empty schema",
+ "schema": {
+ "allOf": [
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "any data is valid",
+ "data": 1,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "allOf with two empty schemas",
+ "schema": {
+ "allOf": [
+ {},
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "any data is valid",
+ "data": 1,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "allOf with the first empty schema",
+ "schema": {
+ "allOf": [
+ {},
+ { "type": "number" }
+ ]
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with the last empty schema",
+ "schema": {
+ "allOf": [
+ { "type": "number" },
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/anchor.json b/third_party/python/jsonschema/json/tests/draft2019-09/anchor.json
new file mode 100644
index 0000000000..06b0ba4d25
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/anchor.json
@@ -0,0 +1,87 @@
+[
+ {
+ "description": "Location-independent identifier",
+ "schema": {
+ "allOf": [{
+ "$ref": "#foo"
+ }],
+ "$defs": {
+ "A": {
+ "$anchor": "foo",
+ "type": "integer"
+ }
+ }
+ },
+ "tests": [
+ {
+ "data": 1,
+ "description": "match",
+ "valid": true
+ },
+ {
+ "data": "a",
+ "description": "mismatch",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Location-independent identifier with absolute URI",
+ "schema": {
+ "allOf": [{
+ "$ref": "http://localhost:1234/bar#foo"
+ }],
+ "$defs": {
+ "A": {
+ "$id": "http://localhost:1234/bar",
+ "$anchor": "foo",
+ "type": "integer"
+ }
+ }
+ },
+ "tests": [
+ {
+ "data": 1,
+ "description": "match",
+ "valid": true
+ },
+ {
+ "data": "a",
+ "description": "mismatch",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Location-independent identifier with base URI change in subschema",
+ "schema": {
+ "$id": "http://localhost:1234/root",
+ "allOf": [{
+ "$ref": "http://localhost:1234/nested.json#foo"
+ }],
+ "$defs": {
+ "A": {
+ "$id": "nested.json",
+ "$defs": {
+ "B": {
+ "$anchor": "foo",
+ "type": "integer"
+ }
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "data": 1,
+ "description": "match",
+ "valid": true
+ },
+ {
+ "data": "a",
+ "description": "mismatch",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/anyOf.json b/third_party/python/jsonschema/json/tests/draft2019-09/anyOf.json
new file mode 100644
index 0000000000..ab5eb386b4
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/anyOf.json
@@ -0,0 +1,189 @@
+[
+ {
+ "description": "anyOf",
+ "schema": {
+ "anyOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "minimum": 2
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first anyOf valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "second anyOf valid",
+ "data": 2.5,
+ "valid": true
+ },
+ {
+ "description": "both anyOf valid",
+ "data": 3,
+ "valid": true
+ },
+ {
+ "description": "neither anyOf valid",
+ "data": 1.5,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf with base schema",
+ "schema": {
+ "type": "string",
+ "anyOf" : [
+ {
+ "maxLength": 2
+ },
+ {
+ "minLength": 4
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "mismatch base schema",
+ "data": 3,
+ "valid": false
+ },
+ {
+ "description": "one anyOf valid",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "both anyOf invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf with boolean schemas, all true",
+ "schema": {"anyOf": [true, true]},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "anyOf with boolean schemas, some true",
+ "schema": {"anyOf": [true, false]},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "anyOf with boolean schemas, all false",
+ "schema": {"anyOf": [false, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf complex types",
+ "schema": {
+ "anyOf": [
+ {
+ "properties": {
+ "bar": {"type": "integer"}
+ },
+ "required": ["bar"]
+ },
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first anyOf valid (complex)",
+ "data": {"bar": 2},
+ "valid": true
+ },
+ {
+ "description": "second anyOf valid (complex)",
+ "data": {"foo": "baz"},
+ "valid": true
+ },
+ {
+ "description": "both anyOf valid (complex)",
+ "data": {"foo": "baz", "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "neither anyOf valid (complex)",
+ "data": {"foo": 2, "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf with one empty schema",
+ "schema": {
+ "anyOf": [
+ { "type": "number" },
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "number is valid",
+ "data": 123,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "nested anyOf, to check validation semantics",
+ "schema": {
+ "anyOf": [
+ {
+ "anyOf": [
+ {
+ "type": "null"
+ }
+ ]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "anything non-null is invalid",
+ "data": 123,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/boolean_schema.json b/third_party/python/jsonschema/json/tests/draft2019-09/boolean_schema.json
new file mode 100644
index 0000000000..6d40f23f26
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/boolean_schema.json
@@ -0,0 +1,104 @@
+[
+ {
+ "description": "boolean schema 'true'",
+ "schema": true,
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "boolean true is valid",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "boolean false is valid",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "object is valid",
+ "data": {"foo": "bar"},
+ "valid": true
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "array is valid",
+ "data": ["foo"],
+ "valid": true
+ },
+ {
+ "description": "empty array is valid",
+ "data": [],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "boolean schema 'false'",
+ "schema": false,
+ "tests": [
+ {
+ "description": "number is invalid",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "boolean true is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "boolean false is invalid",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "null is invalid",
+ "data": null,
+ "valid": false
+ },
+ {
+ "description": "object is invalid",
+ "data": {"foo": "bar"},
+ "valid": false
+ },
+ {
+ "description": "empty object is invalid",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "array is invalid",
+ "data": ["foo"],
+ "valid": false
+ },
+ {
+ "description": "empty array is invalid",
+ "data": [],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/const.json b/third_party/python/jsonschema/json/tests/draft2019-09/const.json
new file mode 100644
index 0000000000..c089625dc4
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/const.json
@@ -0,0 +1,170 @@
+[
+ {
+ "description": "const validation",
+ "schema": {"const": 2},
+ "tests": [
+ {
+ "description": "same value is valid",
+ "data": 2,
+ "valid": true
+ },
+ {
+ "description": "another value is invalid",
+ "data": 5,
+ "valid": false
+ },
+ {
+ "description": "another type is invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with object",
+ "schema": {"const": {"foo": "bar", "baz": "bax"}},
+ "tests": [
+ {
+ "description": "same object is valid",
+ "data": {"foo": "bar", "baz": "bax"},
+ "valid": true
+ },
+ {
+ "description": "same object with different property order is valid",
+ "data": {"baz": "bax", "foo": "bar"},
+ "valid": true
+ },
+ {
+ "description": "another object is invalid",
+ "data": {"foo": "bar"},
+ "valid": false
+ },
+ {
+ "description": "another type is invalid",
+ "data": [1, 2],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with array",
+ "schema": {"const": [{ "foo": "bar" }]},
+ "tests": [
+ {
+ "description": "same array is valid",
+ "data": [{"foo": "bar"}],
+ "valid": true
+ },
+ {
+ "description": "another array item is invalid",
+ "data": [2],
+ "valid": false
+ },
+ {
+ "description": "array with additional items is invalid",
+ "data": [1, 2, 3],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with null",
+ "schema": {"const": null},
+ "tests": [
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "not null is invalid",
+ "data": 0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with false does not match 0",
+ "schema": {"const": false},
+ "tests": [
+ {
+ "description": "false is valid",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "integer zero is invalid",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "float zero is invalid",
+ "data": 0.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with true does not match 1",
+ "schema": {"const": true},
+ "tests": [
+ {
+ "description": "true is valid",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "integer one is invalid",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "float one is invalid",
+ "data": 1.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with 0 does not match false",
+ "schema": {"const": 0},
+ "tests": [
+ {
+ "description": "false is invalid",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "integer zero is valid",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "float zero is valid",
+ "data": 0.0,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "const with 1 does not match true",
+ "schema": {"const": 1},
+ "tests": [
+ {
+ "description": "true is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "integer one is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "float one is valid",
+ "data": 1.0,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/contains.json b/third_party/python/jsonschema/json/tests/draft2019-09/contains.json
new file mode 100644
index 0000000000..b7ae5a25fe
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/contains.json
@@ -0,0 +1,95 @@
+[
+ {
+ "description": "contains keyword validation",
+ "schema": {
+ "contains": {"minimum": 5}
+ },
+ "tests": [
+ {
+ "description": "array with item matching schema (5) is valid",
+ "data": [3, 4, 5],
+ "valid": true
+ },
+ {
+ "description": "array with item matching schema (6) is valid",
+ "data": [3, 4, 6],
+ "valid": true
+ },
+ {
+ "description": "array with two items matching schema (5, 6) is valid",
+ "data": [3, 4, 5, 6],
+ "valid": true
+ },
+ {
+ "description": "array without items matching schema is invalid",
+ "data": [2, 3, 4],
+ "valid": false
+ },
+ {
+ "description": "empty array is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "not array is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "contains keyword with const keyword",
+ "schema": {
+ "contains": { "const": 5 }
+ },
+ "tests": [
+ {
+ "description": "array with item 5 is valid",
+ "data": [3, 4, 5],
+ "valid": true
+ },
+ {
+ "description": "array with two items 5 is valid",
+ "data": [3, 4, 5, 5],
+ "valid": true
+ },
+ {
+ "description": "array without item 5 is invalid",
+ "data": [1, 2, 3, 4],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "contains keyword with boolean schema true",
+ "schema": {"contains": true},
+ "tests": [
+ {
+ "description": "any non-empty array is valid",
+ "data": ["foo"],
+ "valid": true
+ },
+ {
+ "description": "empty array is invalid",
+ "data": [],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "contains keyword with boolean schema false",
+ "schema": {"contains": false},
+ "tests": [
+ {
+ "description": "any non-empty array is invalid",
+ "data": ["foo"],
+ "valid": false
+ },
+ {
+ "description": "empty array is invalid",
+ "data": [],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/default.json b/third_party/python/jsonschema/json/tests/draft2019-09/default.json
new file mode 100644
index 0000000000..17629779fb
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/default.json
@@ -0,0 +1,49 @@
+[
+ {
+ "description": "invalid type for default",
+ "schema": {
+ "properties": {
+ "foo": {
+ "type": "integer",
+ "default": []
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when property is specified",
+ "data": {"foo": 13},
+ "valid": true
+ },
+ {
+ "description": "still valid when the invalid default is used",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "invalid string value for default",
+ "schema": {
+ "properties": {
+ "bar": {
+ "type": "string",
+ "minLength": 4,
+ "default": "bad"
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when property is specified",
+ "data": {"bar": "good"},
+ "valid": true
+ },
+ {
+ "description": "still valid when the invalid default is used",
+ "data": {},
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/defs.json b/third_party/python/jsonschema/json/tests/draft2019-09/defs.json
new file mode 100644
index 0000000000..f2fbec42b2
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/defs.json
@@ -0,0 +1,24 @@
+[
+ {
+ "description": "valid definition",
+ "schema": {"$ref": "https://json-schema.org/draft/2019-09/schema"},
+ "tests": [
+ {
+ "description": "valid definition schema",
+ "data": {"$defs": {"foo": {"type": "integer"}}},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "invalid definition",
+ "schema": {"$ref": "https://json-schema.org/draft/2019-09/schema"},
+ "tests": [
+ {
+ "description": "invalid definition schema",
+ "data": {"$defs": {"foo": {"type": 1}}},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/dependencies.json b/third_party/python/jsonschema/json/tests/draft2019-09/dependencies.json
new file mode 100644
index 0000000000..8dd78aa5d8
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/dependencies.json
@@ -0,0 +1,268 @@
+[
+ {
+ "description": "dependencies",
+ "schema": {
+ "dependencies": {"bar": ["foo"]}
+ },
+ "tests": [
+ {
+ "description": "neither",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "nondependant",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "with dependency",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "missing dependency",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": ["bar"],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "dependencies with empty array",
+ "schema": {
+ "dependencies": {"bar": []}
+ },
+ "tests": [
+ {
+ "description": "empty object",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "object with one property",
+ "data": {"bar": 2},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple dependencies",
+ "schema": {
+ "dependencies": {"quux": ["foo", "bar"]}
+ },
+ "tests": [
+ {
+ "description": "neither",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "nondependants",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "with dependencies",
+ "data": {"foo": 1, "bar": 2, "quux": 3},
+ "valid": true
+ },
+ {
+ "description": "missing dependency",
+ "data": {"foo": 1, "quux": 2},
+ "valid": false
+ },
+ {
+ "description": "missing other dependency",
+ "data": {"bar": 1, "quux": 2},
+ "valid": false
+ },
+ {
+ "description": "missing both dependencies",
+ "data": {"quux": 1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "multiple dependencies subschema",
+ "schema": {
+ "dependencies": {
+ "bar": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"type": "integer"}
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "no dependency",
+ "data": {"foo": "quux"},
+ "valid": true
+ },
+ {
+ "description": "wrong type",
+ "data": {"foo": "quux", "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "wrong type other",
+ "data": {"foo": 2, "bar": "quux"},
+ "valid": false
+ },
+ {
+ "description": "wrong type both",
+ "data": {"foo": "quux", "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "dependencies with boolean subschemas",
+ "schema": {
+ "dependencies": {
+ "foo": true,
+ "bar": false
+ }
+ },
+ "tests": [
+ {
+ "description": "object with property having schema true is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "object with property having schema false is invalid",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "object with both properties is invalid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "empty array of dependencies",
+ "schema": {
+ "dependencies": {
+ "foo": []
+ }
+ },
+ "tests": [
+ {
+ "description": "object with property is valid",
+ "data": { "foo": 1 },
+ "valid": true
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "non-object is valid",
+ "data": 1,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "dependencies with escaped characters",
+ "schema": {
+ "dependencies": {
+ "foo\nbar": ["foo\rbar"],
+ "foo\tbar": {
+ "minProperties": 4
+ },
+ "foo'bar": {"required": ["foo\"bar"]},
+ "foo\"bar": ["foo'bar"]
+ }
+ },
+ "tests": [
+ {
+ "description": "valid object 1",
+ "data": {
+ "foo\nbar": 1,
+ "foo\rbar": 2
+ },
+ "valid": true
+ },
+ {
+ "description": "valid object 2",
+ "data": {
+ "foo\tbar": 1,
+ "a": 2,
+ "b": 3,
+ "c": 4
+ },
+ "valid": true
+ },
+ {
+ "description": "valid object 3",
+ "data": {
+ "foo'bar": 1,
+ "foo\"bar": 2
+ },
+ "valid": true
+ },
+ {
+ "description": "invalid object 1",
+ "data": {
+ "foo\nbar": 1,
+ "foo": 2
+ },
+ "valid": false
+ },
+ {
+ "description": "invalid object 2",
+ "data": {
+ "foo\tbar": 1,
+ "a": 2
+ },
+ "valid": false
+ },
+ {
+ "description": "invalid object 3",
+ "data": {
+ "foo'bar": 1
+ },
+ "valid": false
+ },
+ {
+ "description": "invalid object 4",
+ "data": {
+ "foo\"bar": 2
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/enum.json b/third_party/python/jsonschema/json/tests/draft2019-09/enum.json
new file mode 100644
index 0000000000..32d79026e1
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/enum.json
@@ -0,0 +1,179 @@
+[
+ {
+ "description": "simple enum validation",
+ "schema": {"enum": [1, 2, 3]},
+ "tests": [
+ {
+ "description": "one of the enum is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "something else is invalid",
+ "data": 4,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "heterogeneous enum validation",
+ "schema": {"enum": [6, "foo", [], true, {"foo": 12}]},
+ "tests": [
+ {
+ "description": "one of the enum is valid",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "something else is invalid",
+ "data": null,
+ "valid": false
+ },
+ {
+ "description": "objects are deep compared",
+ "data": {"foo": false},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enums in properties",
+ "schema": {
+ "type":"object",
+ "properties": {
+ "foo": {"enum":["foo"]},
+ "bar": {"enum":["bar"]}
+ },
+ "required": ["bar"]
+ },
+ "tests": [
+ {
+ "description": "both properties are valid",
+ "data": {"foo":"foo", "bar":"bar"},
+ "valid": true
+ },
+ {
+ "description": "missing optional property is valid",
+ "data": {"bar":"bar"},
+ "valid": true
+ },
+ {
+ "description": "missing required property is invalid",
+ "data": {"foo":"foo"},
+ "valid": false
+ },
+ {
+ "description": "missing all properties is invalid",
+ "data": {},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with escaped characters",
+ "schema": {
+ "enum": ["foo\nbar", "foo\rbar"]
+ },
+ "tests": [
+ {
+ "description": "member 1 is valid",
+ "data": "foo\nbar",
+ "valid": true
+ },
+ {
+ "description": "member 2 is valid",
+ "data": "foo\rbar",
+ "valid": true
+ },
+ {
+ "description": "another string is invalid",
+ "data": "abc",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with false does not match 0",
+ "schema": {"enum": [false]},
+ "tests": [
+ {
+ "description": "false is valid",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "integer zero is invalid",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "float zero is invalid",
+ "data": 0.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with true does not match 1",
+ "schema": {"enum": [true]},
+ "tests": [
+ {
+ "description": "true is valid",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "integer one is invalid",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "float one is invalid",
+ "data": 1.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with 0 does not match false",
+ "schema": {"enum": [0]},
+ "tests": [
+ {
+ "description": "false is invalid",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "integer zero is valid",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "float zero is valid",
+ "data": 0.0,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "enum with 1 does not match true",
+ "schema": {"enum": [1]},
+ "tests": [
+ {
+ "description": "true is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "integer one is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "float one is valid",
+ "data": 1.0,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/exclusiveMaximum.json b/third_party/python/jsonschema/json/tests/draft2019-09/exclusiveMaximum.json
new file mode 100644
index 0000000000..dc3cd709d3
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/exclusiveMaximum.json
@@ -0,0 +1,30 @@
+[
+ {
+ "description": "exclusiveMaximum validation",
+ "schema": {
+ "exclusiveMaximum": 3.0
+ },
+ "tests": [
+ {
+ "description": "below the exclusiveMaximum is valid",
+ "data": 2.2,
+ "valid": true
+ },
+ {
+ "description": "boundary point is invalid",
+ "data": 3.0,
+ "valid": false
+ },
+ {
+ "description": "above the exclusiveMaximum is invalid",
+ "data": 3.5,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/exclusiveMinimum.json b/third_party/python/jsonschema/json/tests/draft2019-09/exclusiveMinimum.json
new file mode 100644
index 0000000000..b38d7ecec6
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/exclusiveMinimum.json
@@ -0,0 +1,30 @@
+[
+ {
+ "description": "exclusiveMinimum validation",
+ "schema": {
+ "exclusiveMinimum": 1.1
+ },
+ "tests": [
+ {
+ "description": "above the exclusiveMinimum is valid",
+ "data": 1.2,
+ "valid": true
+ },
+ {
+ "description": "boundary point is invalid",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "below the exclusiveMinimum is invalid",
+ "data": 0.6,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/format.json b/third_party/python/jsonschema/json/tests/draft2019-09/format.json
new file mode 100644
index 0000000000..93305f5cd1
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/format.json
@@ -0,0 +1,614 @@
+[
+ {
+ "description": "validation of e-mail addresses",
+ "schema": {"format": "email"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IDN e-mail addresses",
+ "schema": {"format": "idn-email"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of regexes",
+ "schema": {"format": "regex"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IP addresses",
+ "schema": {"format": "ipv4"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IPv6 addresses",
+ "schema": {"format": "ipv6"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IDN hostnames",
+ "schema": {"format": "idn-hostname"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of hostnames",
+ "schema": {"format": "hostname"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of date strings",
+ "schema": {"format": "date"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of date-time strings",
+ "schema": {"format": "date-time"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of time strings",
+ "schema": {"format": "time"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of JSON pointers",
+ "schema": {"format": "json-pointer"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of relative JSON pointers",
+ "schema": {"format": "relative-json-pointer"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IRIs",
+ "schema": {"format": "iri"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IRI references",
+ "schema": {"format": "iri-reference"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of URIs",
+ "schema": {"format": "uri"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of URI references",
+ "schema": {"format": "uri-reference"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of URI templates",
+ "schema": {"format": "uri-template"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/if-then-else.json b/third_party/python/jsonschema/json/tests/draft2019-09/if-then-else.json
new file mode 100644
index 0000000000..be7328163d
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/if-then-else.json
@@ -0,0 +1,188 @@
+[
+ {
+ "description": "ignore if without then or else",
+ "schema": {
+ "if": {
+ "const": 0
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when valid against lone if",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "valid when invalid against lone if",
+ "data": "hello",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ignore then without if",
+ "schema": {
+ "then": {
+ "const": 0
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when valid against lone then",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "valid when invalid against lone then",
+ "data": "hello",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ignore else without if",
+ "schema": {
+ "else": {
+ "const": 0
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when valid against lone else",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "valid when invalid against lone else",
+ "data": "hello",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "if and then without else",
+ "schema": {
+ "if": {
+ "exclusiveMaximum": 0
+ },
+ "then": {
+ "minimum": -10
+ }
+ },
+ "tests": [
+ {
+ "description": "valid through then",
+ "data": -1,
+ "valid": true
+ },
+ {
+ "description": "invalid through then",
+ "data": -100,
+ "valid": false
+ },
+ {
+ "description": "valid when if test fails",
+ "data": 3,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "if and else without then",
+ "schema": {
+ "if": {
+ "exclusiveMaximum": 0
+ },
+ "else": {
+ "multipleOf": 2
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when if test passes",
+ "data": -1,
+ "valid": true
+ },
+ {
+ "description": "valid through else",
+ "data": 4,
+ "valid": true
+ },
+ {
+ "description": "invalid through else",
+ "data": 3,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validate against correct branch, then vs else",
+ "schema": {
+ "if": {
+ "exclusiveMaximum": 0
+ },
+ "then": {
+ "minimum": -10
+ },
+ "else": {
+ "multipleOf": 2
+ }
+ },
+ "tests": [
+ {
+ "description": "valid through then",
+ "data": -1,
+ "valid": true
+ },
+ {
+ "description": "invalid through then",
+ "data": -100,
+ "valid": false
+ },
+ {
+ "description": "valid through else",
+ "data": 4,
+ "valid": true
+ },
+ {
+ "description": "invalid through else",
+ "data": 3,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "non-interference across combined schemas",
+ "schema": {
+ "allOf": [
+ {
+ "if": {
+ "exclusiveMaximum": 0
+ }
+ },
+ {
+ "then": {
+ "minimum": -10
+ }
+ },
+ {
+ "else": {
+ "multipleOf": 2
+ }
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid, but would have been invalid through then",
+ "data": -100,
+ "valid": true
+ },
+ {
+ "description": "valid, but would have been invalid through else",
+ "data": 3,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/items.json b/third_party/python/jsonschema/json/tests/draft2019-09/items.json
new file mode 100644
index 0000000000..6e98ee82da
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/items.json
@@ -0,0 +1,250 @@
+[
+ {
+ "description": "a schema given for items",
+ "schema": {
+ "items": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "valid items",
+ "data": [ 1, 2, 3 ],
+ "valid": true
+ },
+ {
+ "description": "wrong type of items",
+ "data": [1, "x"],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": {"foo" : "bar"},
+ "valid": true
+ },
+ {
+ "description": "JavaScript pseudo-array is valid",
+ "data": {
+ "0": "invalid",
+ "length": 1
+ },
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "an array of schemas for items",
+ "schema": {
+ "items": [
+ {"type": "integer"},
+ {"type": "string"}
+ ]
+ },
+ "tests": [
+ {
+ "description": "correct types",
+ "data": [ 1, "foo" ],
+ "valid": true
+ },
+ {
+ "description": "wrong types",
+ "data": [ "foo", 1 ],
+ "valid": false
+ },
+ {
+ "description": "incomplete array of items",
+ "data": [ 1 ],
+ "valid": true
+ },
+ {
+ "description": "array with additional items",
+ "data": [ 1, "foo", true ],
+ "valid": true
+ },
+ {
+ "description": "empty array",
+ "data": [ ],
+ "valid": true
+ },
+ {
+ "description": "JavaScript pseudo-array is valid",
+ "data": {
+ "0": "invalid",
+ "1": "valid",
+ "length": 2
+ },
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "items with boolean schema (true)",
+ "schema": {"items": true},
+ "tests": [
+ {
+ "description": "any array is valid",
+ "data": [ 1, "foo", true ],
+ "valid": true
+ },
+ {
+ "description": "empty array is valid",
+ "data": [],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "items with boolean schema (false)",
+ "schema": {"items": false},
+ "tests": [
+ {
+ "description": "any non-empty array is invalid",
+ "data": [ 1, "foo", true ],
+ "valid": false
+ },
+ {
+ "description": "empty array is valid",
+ "data": [],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "items with boolean schemas",
+ "schema": {
+ "items": [true, false]
+ },
+ "tests": [
+ {
+ "description": "array with one item is valid",
+ "data": [ 1 ],
+ "valid": true
+ },
+ {
+ "description": "array with two items is invalid",
+ "data": [ 1, "foo" ],
+ "valid": false
+ },
+ {
+ "description": "empty array is valid",
+ "data": [],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "items and subitems",
+ "schema": {
+ "$defs": {
+ "item": {
+ "type": "array",
+ "additionalItems": false,
+ "items": [
+ { "$ref": "#/$defs/sub-item" },
+ { "$ref": "#/$defs/sub-item" }
+ ]
+ },
+ "sub-item": {
+ "type": "object",
+ "required": ["foo"]
+ }
+ },
+ "type": "array",
+ "additionalItems": false,
+ "items": [
+ { "$ref": "#/$defs/item" },
+ { "$ref": "#/$defs/item" },
+ { "$ref": "#/$defs/item" }
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid items",
+ "data": [
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": true
+ },
+ {
+ "description": "too many items",
+ "data": [
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "too many sub-items",
+ "data": [
+ [ {"foo": null}, {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "wrong item",
+ "data": [
+ {"foo": null},
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "wrong sub-item",
+ "data": [
+ [ {}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "fewer items is valid",
+ "data": [
+ [ {"foo": null} ],
+ [ {"foo": null} ]
+ ],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "nested items",
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "number"
+ }
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid nested array",
+ "data": [[[[1]], [[2],[3]]], [[[4], [5], [6]]]],
+ "valid": true
+ },
+ {
+ "description": "nested array with invalid type",
+ "data": [[[["1"]], [[2],[3]]], [[[4], [5], [6]]]],
+ "valid": false
+ },
+ {
+ "description": "not deep enough",
+ "data": [[[1], [2],[3]], [[4], [5], [6]]],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/maxItems.json b/third_party/python/jsonschema/json/tests/draft2019-09/maxItems.json
new file mode 100644
index 0000000000..3b53a6b371
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/maxItems.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "maxItems validation",
+ "schema": {"maxItems": 2},
+ "tests": [
+ {
+ "description": "shorter is valid",
+ "data": [1],
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "too long is invalid",
+ "data": [1, 2, 3],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": "foobar",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/maxLength.json b/third_party/python/jsonschema/json/tests/draft2019-09/maxLength.json
new file mode 100644
index 0000000000..811d35b253
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/maxLength.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "maxLength validation",
+ "schema": {"maxLength": 2},
+ "tests": [
+ {
+ "description": "shorter is valid",
+ "data": "f",
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": "fo",
+ "valid": true
+ },
+ {
+ "description": "too long is invalid",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 100,
+ "valid": true
+ },
+ {
+ "description": "two supplementary Unicode code points is long enough",
+ "data": "\uD83D\uDCA9\uD83D\uDCA9",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/maxProperties.json b/third_party/python/jsonschema/json/tests/draft2019-09/maxProperties.json
new file mode 100644
index 0000000000..513731e4c8
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/maxProperties.json
@@ -0,0 +1,38 @@
+[
+ {
+ "description": "maxProperties validation",
+ "schema": {"maxProperties": 2},
+ "tests": [
+ {
+ "description": "shorter is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "too long is invalid",
+ "data": {"foo": 1, "bar": 2, "baz": 3},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [1, 2, 3],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/maximum.json b/third_party/python/jsonschema/json/tests/draft2019-09/maximum.json
new file mode 100644
index 0000000000..8150984ee5
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/maximum.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "maximum validation",
+ "schema": {"maximum": 3.0},
+ "tests": [
+ {
+ "description": "below the maximum is valid",
+ "data": 2.6,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": 3.0,
+ "valid": true
+ },
+ {
+ "description": "above the maximum is invalid",
+ "data": 3.5,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/minItems.json b/third_party/python/jsonschema/json/tests/draft2019-09/minItems.json
new file mode 100644
index 0000000000..ed5118815e
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/minItems.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "minItems validation",
+ "schema": {"minItems": 1},
+ "tests": [
+ {
+ "description": "longer is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": [1],
+ "valid": true
+ },
+ {
+ "description": "too short is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": "",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/minLength.json b/third_party/python/jsonschema/json/tests/draft2019-09/minLength.json
new file mode 100644
index 0000000000..3f09158dee
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/minLength.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "minLength validation",
+ "schema": {"minLength": 2},
+ "tests": [
+ {
+ "description": "longer is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": "fo",
+ "valid": true
+ },
+ {
+ "description": "too short is invalid",
+ "data": "f",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "one supplementary Unicode code point is not long enough",
+ "data": "\uD83D\uDCA9",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/minProperties.json b/third_party/python/jsonschema/json/tests/draft2019-09/minProperties.json
new file mode 100644
index 0000000000..49a0726e01
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/minProperties.json
@@ -0,0 +1,38 @@
+[
+ {
+ "description": "minProperties validation",
+ "schema": {"minProperties": 1},
+ "tests": [
+ {
+ "description": "longer is valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "too short is invalid",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/minimum.json b/third_party/python/jsonschema/json/tests/draft2019-09/minimum.json
new file mode 100644
index 0000000000..2a9c42b3c4
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/minimum.json
@@ -0,0 +1,59 @@
+[
+ {
+ "description": "minimum validation",
+ "schema": {"minimum": 1.1},
+ "tests": [
+ {
+ "description": "above the minimum is valid",
+ "data": 2.6,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": 1.1,
+ "valid": true
+ },
+ {
+ "description": "below the minimum is invalid",
+ "data": 0.6,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "minimum validation with signed integer",
+ "schema": {"minimum": -2},
+ "tests": [
+ {
+ "description": "negative above the minimum is valid",
+ "data": -1,
+ "valid": true
+ },
+ {
+ "description": "positive above the minimum is valid",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": -2,
+ "valid": true
+ },
+ {
+ "description": "below the minimum is invalid",
+ "data": -3,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/multipleOf.json b/third_party/python/jsonschema/json/tests/draft2019-09/multipleOf.json
new file mode 100644
index 0000000000..ca3b761805
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/multipleOf.json
@@ -0,0 +1,60 @@
+[
+ {
+ "description": "by int",
+ "schema": {"multipleOf": 2},
+ "tests": [
+ {
+ "description": "int by int",
+ "data": 10,
+ "valid": true
+ },
+ {
+ "description": "int by int fail",
+ "data": 7,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "by number",
+ "schema": {"multipleOf": 1.5},
+ "tests": [
+ {
+ "description": "zero is multiple of anything",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "4.5 is multiple of 1.5",
+ "data": 4.5,
+ "valid": true
+ },
+ {
+ "description": "35 is not multiple of 1.5",
+ "data": 35,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "by small number",
+ "schema": {"multipleOf": 0.0001},
+ "tests": [
+ {
+ "description": "0.0075 is multiple of 0.0001",
+ "data": 0.0075,
+ "valid": true
+ },
+ {
+ "description": "0.00751 is not multiple of 0.0001",
+ "data": 0.00751,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/not.json b/third_party/python/jsonschema/json/tests/draft2019-09/not.json
new file mode 100644
index 0000000000..98de0eda8d
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/not.json
@@ -0,0 +1,117 @@
+[
+ {
+ "description": "not",
+ "schema": {
+ "not": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "allowed",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "disallowed",
+ "data": 1,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "not multiple types",
+ "schema": {
+ "not": {"type": ["integer", "boolean"]}
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "other mismatch",
+ "data": true,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "not more complex schema",
+ "schema": {
+ "not": {
+ "type": "object",
+ "properties": {
+ "foo": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "other match",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": {"foo": "bar"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "forbidden property",
+ "schema": {
+ "properties": {
+ "foo": {
+ "not": {}
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "property present",
+ "data": {"foo": 1, "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "property absent",
+ "data": {"bar": 1, "baz": 2},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "not with boolean schema true",
+ "schema": {"not": true},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "not with boolean schema false",
+ "schema": {"not": false},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/oneOf.json b/third_party/python/jsonschema/json/tests/draft2019-09/oneOf.json
new file mode 100644
index 0000000000..57640b7afb
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/oneOf.json
@@ -0,0 +1,206 @@
+[
+ {
+ "description": "oneOf",
+ "schema": {
+ "oneOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "minimum": 2
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first oneOf valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "second oneOf valid",
+ "data": 2.5,
+ "valid": true
+ },
+ {
+ "description": "both oneOf valid",
+ "data": 3,
+ "valid": false
+ },
+ {
+ "description": "neither oneOf valid",
+ "data": 1.5,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with base schema",
+ "schema": {
+ "type": "string",
+ "oneOf" : [
+ {
+ "minLength": 2
+ },
+ {
+ "maxLength": 4
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "mismatch base schema",
+ "data": 3,
+ "valid": false
+ },
+ {
+ "description": "one oneOf valid",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "both oneOf valid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with boolean schemas, all true",
+ "schema": {"oneOf": [true, true, true]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with boolean schemas, one true",
+ "schema": {"oneOf": [true, false, false]},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "oneOf with boolean schemas, more than one true",
+ "schema": {"oneOf": [true, true, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with boolean schemas, all false",
+ "schema": {"oneOf": [false, false, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf complex types",
+ "schema": {
+ "oneOf": [
+ {
+ "properties": {
+ "bar": {"type": "integer"}
+ },
+ "required": ["bar"]
+ },
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first oneOf valid (complex)",
+ "data": {"bar": 2},
+ "valid": true
+ },
+ {
+ "description": "second oneOf valid (complex)",
+ "data": {"foo": "baz"},
+ "valid": true
+ },
+ {
+ "description": "both oneOf valid (complex)",
+ "data": {"foo": "baz", "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "neither oneOf valid (complex)",
+ "data": {"foo": 2, "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with empty schema",
+ "schema": {
+ "oneOf": [
+ { "type": "number" },
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "one valid - valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "both valid - invalid",
+ "data": 123,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with required",
+ "schema": {
+ "type": "object",
+ "oneOf": [
+ { "required": ["foo", "bar"] },
+ { "required": ["foo", "baz"] }
+ ]
+ },
+ "tests": [
+ {
+ "description": "both invalid - invalid",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "first valid - valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "second valid - valid",
+ "data": {"foo": 1, "baz": 3},
+ "valid": true
+ },
+ {
+ "description": "both valid - invalid",
+ "data": {"foo": 1, "bar": 2, "baz" : 3},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/bignum.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/bignum.json
new file mode 100644
index 0000000000..fac275e21f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/bignum.json
@@ -0,0 +1,105 @@
+[
+ {
+ "description": "integer",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "a bignum is an integer",
+ "data": 12345678910111213141516171819202122232425262728293031,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "number",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "a bignum is a number",
+ "data": 98249283749234923498293171823948729348710298301928331,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "integer",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "a negative bignum is an integer",
+ "data": -12345678910111213141516171819202122232425262728293031,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "number",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "a negative bignum is a number",
+ "data": -98249283749234923498293171823948729348710298301928331,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "string",
+ "schema": {"type": "string"},
+ "tests": [
+ {
+ "description": "a bignum is not a string",
+ "data": 98249283749234923498293171823948729348710298301928331,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "integer comparison",
+ "schema": {"maximum": 18446744073709551615},
+ "tests": [
+ {
+ "description": "comparison works for high numbers",
+ "data": 18446744073709551600,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "float comparison with high precision",
+ "schema": {
+ "exclusiveMaximum": 972783798187987123879878123.18878137
+ },
+ "tests": [
+ {
+ "description": "comparison works for high numbers",
+ "data": 972783798187987123879878123.188781371,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "integer comparison",
+ "schema": {"minimum": -18446744073709551615},
+ "tests": [
+ {
+ "description": "comparison works for very negative numbers",
+ "data": -18446744073709551600,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "float comparison with high precision on negative numbers",
+ "schema": {
+ "exclusiveMinimum": -972783798187987123879878123.18878137
+ },
+ "tests": [
+ {
+ "description": "comparison works for very negative numbers",
+ "data": -972783798187987123879878123.188781371,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/content.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/content.json
new file mode 100644
index 0000000000..3f5a7430b2
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/content.json
@@ -0,0 +1,77 @@
+[
+ {
+ "description": "validation of string-encoded content based on media type",
+ "schema": {
+ "contentMediaType": "application/json"
+ },
+ "tests": [
+ {
+ "description": "a valid JSON document",
+ "data": "{\"foo\": \"bar\"}",
+ "valid": true
+ },
+ {
+ "description": "an invalid JSON document",
+ "data": "{:}",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 100,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of binary string-encoding",
+ "schema": {
+ "contentEncoding": "base64"
+ },
+ "tests": [
+ {
+ "description": "a valid base64 string",
+ "data": "eyJmb28iOiAiYmFyIn0K",
+ "valid": true
+ },
+ {
+ "description": "an invalid base64 string (% is not a valid character)",
+ "data": "eyJmb28iOi%iYmFyIn0K",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 100,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of binary-encoded media type documents",
+ "schema": {
+ "contentMediaType": "application/json",
+ "contentEncoding": "base64"
+ },
+ "tests": [
+ {
+ "description": "a valid base64-encoded JSON document",
+ "data": "eyJmb28iOiAiYmFyIn0K",
+ "valid": true
+ },
+ {
+ "description": "a validly-encoded invalid JSON document",
+ "data": "ezp9Cg==",
+ "valid": false
+ },
+ {
+ "description": "an invalid base64 string that is valid JSON",
+ "data": "{}",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 100,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/ecmascript-regex.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/ecmascript-regex.json
new file mode 100644
index 0000000000..d82e0feb03
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/ecmascript-regex.json
@@ -0,0 +1,213 @@
+[
+ {
+ "description": "ECMA 262 regex non-compliance",
+ "schema": { "format": "regex" },
+ "tests": [
+ {
+ "description": "ECMA 262 has no support for \\Z anchor from .NET",
+ "data": "^\\S(|(.|\\n)*\\S)\\Z",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex $ does not match trailing newline",
+ "schema": {
+ "type": "string",
+ "pattern": "^abc$"
+ },
+ "tests": [
+ {
+ "description": "matches in Python, but should not in jsonschema",
+ "data": "abc\n",
+ "valid": false
+ },
+ {
+ "description": "should match",
+ "data": "abc",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex converts \\a to ascii BEL",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\a$"
+ },
+ "tests": [
+ {
+ "description": "does not match",
+ "data": "\\a",
+ "valid": false
+ },
+ {
+ "description": "matches",
+ "data": "\u0007",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex escapes control codes with \\c and upper letter",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\cC$"
+ },
+ "tests": [
+ {
+ "description": "does not match",
+ "data": "\\cC",
+ "valid": false
+ },
+ {
+ "description": "matches",
+ "data": "\u0003",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex escapes control codes with \\c and lower letter",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\cc$"
+ },
+ "tests": [
+ {
+ "description": "does not match",
+ "data": "\\cc",
+ "valid": false
+ },
+ {
+ "description": "matches",
+ "data": "\u0003",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\d matches ascii digits only",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\d$"
+ },
+ "tests": [
+ {
+ "description": "ASCII zero matches",
+ "data": "0",
+ "valid": true
+ },
+ {
+ "description": "NKO DIGIT ZERO does not match (unlike e.g. Python)",
+ "data": "߀",
+ "valid": false
+ },
+ {
+ "description": "NKO DIGIT ZERO (as \\u escape) does not match",
+ "data": "\u07c0",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\D matches everything but ascii digits",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\D$"
+ },
+ "tests": [
+ {
+ "description": "ASCII zero does not match",
+ "data": "0",
+ "valid": false
+ },
+ {
+ "description": "NKO DIGIT ZERO matches (unlike e.g. Python)",
+ "data": "߀",
+ "valid": true
+ },
+ {
+ "description": "NKO DIGIT ZERO (as \\u escape) matches",
+ "data": "\u07c0",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\w matches ascii letters only",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\w$"
+ },
+ "tests": [
+ {
+ "description": "ASCII 'a' matches",
+ "data": "a",
+ "valid": true
+ },
+ {
+ "description": "latin-1 e-acute does not match (unlike e.g. Python)",
+ "data": "é",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\w matches everything but ascii letters",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\W$"
+ },
+ "tests": [
+ {
+ "description": "ASCII 'a' does not match",
+ "data": "a",
+ "valid": false
+ },
+ {
+ "description": "latin-1 e-acute matches (unlike e.g. Python)",
+ "data": "é",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\s matches ascii whitespace only",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\s$"
+ },
+ "tests": [
+ {
+ "description": "ASCII space matches",
+ "data": " ",
+ "valid": true
+ },
+ {
+ "description": "latin-1 non-breaking-space does not match (unlike e.g. Python)",
+ "data": "\u00a0",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\S matches everything but ascii whitespace",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\S$"
+ },
+ "tests": [
+ {
+ "description": "ASCII space does not match",
+ "data": " ",
+ "valid": false
+ },
+ {
+ "description": "latin-1 non-breaking-space matches (unlike e.g. Python)",
+ "data": "\u00a0",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/date-time.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/date-time.json
new file mode 100644
index 0000000000..dfccee6e67
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/date-time.json
@@ -0,0 +1,53 @@
+[
+ {
+ "description": "validation of date-time strings",
+ "schema": {"format": "date-time"},
+ "tests": [
+ {
+ "description": "a valid date-time string",
+ "data": "1963-06-19T08:30:06.283185Z",
+ "valid": true
+ },
+ {
+ "description": "a valid date-time string without second fraction",
+ "data": "1963-06-19T08:30:06Z",
+ "valid": true
+ },
+ {
+ "description": "a valid date-time string with plus offset",
+ "data": "1937-01-01T12:00:27.87+00:20",
+ "valid": true
+ },
+ {
+ "description": "a valid date-time string with minus offset",
+ "data": "1990-12-31T15:59:50.123-08:00",
+ "valid": true
+ },
+ {
+ "description": "a invalid day in date-time string",
+ "data": "1990-02-31T15:59:60.123-08:00",
+ "valid": false
+ },
+ {
+ "description": "an invalid offset in date-time string",
+ "data": "1990-12-31T15:59:60-24:00",
+ "valid": false
+ },
+ {
+ "description": "an invalid date-time string",
+ "data": "06/19/1963 08:30:06 PST",
+ "valid": false
+ },
+ {
+ "description": "case-insensitive T and Z",
+ "data": "1963-06-19t08:30:06.283185z",
+ "valid": true
+ },
+ {
+ "description": "only RFC3339 not all of ISO 8601 are valid",
+ "data": "2013-350T01:01:01",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/date.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/date.json
new file mode 100644
index 0000000000..cd23baae3a
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/date.json
@@ -0,0 +1,23 @@
+[
+ {
+ "description": "validation of date strings",
+ "schema": {"format": "date"},
+ "tests": [
+ {
+ "description": "a valid date string",
+ "data": "1963-06-19",
+ "valid": true
+ },
+ {
+ "description": "an invalid date-time string",
+ "data": "06/19/1963",
+ "valid": false
+ },
+ {
+ "description": "only RFC3339 not all of ISO 8601 are valid",
+ "data": "2013-350",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/email.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/email.json
new file mode 100644
index 0000000000..c837c84bc1
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/email.json
@@ -0,0 +1,18 @@
+[
+ {
+ "description": "validation of e-mail addresses",
+ "schema": {"format": "email"},
+ "tests": [
+ {
+ "description": "a valid e-mail address",
+ "data": "joe.bloggs@example.com",
+ "valid": true
+ },
+ {
+ "description": "an invalid e-mail address",
+ "data": "2962",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/hostname.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/hostname.json
new file mode 100644
index 0000000000..d22e57db03
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/hostname.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "validation of host names",
+ "schema": {"format": "hostname"},
+ "tests": [
+ {
+ "description": "a valid host name",
+ "data": "www.example.com",
+ "valid": true
+ },
+ {
+ "description": "a valid punycoded IDN hostname",
+ "data": "xn--4gbwdl.xn--wgbh1c",
+ "valid": true
+ },
+ {
+ "description": "a host name starting with an illegal character",
+ "data": "-a-host-name-that-starts-with--",
+ "valid": false
+ },
+ {
+ "description": "a host name containing illegal characters",
+ "data": "not_a_valid_host_name",
+ "valid": false
+ },
+ {
+ "description": "a host name with a component too long",
+ "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/idn-email.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/idn-email.json
new file mode 100644
index 0000000000..637409ea8f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/idn-email.json
@@ -0,0 +1,18 @@
+[
+ {
+ "description": "validation of an internationalized e-mail addresses",
+ "schema": {"format": "idn-email"},
+ "tests": [
+ {
+ "description": "a valid idn e-mail (example@example.test in Hangul)",
+ "data": "실례@실례.테스트",
+ "valid": true
+ },
+ {
+ "description": "an invalid idn e-mail address",
+ "data": "2962",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/idn-hostname.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/idn-hostname.json
new file mode 100644
index 0000000000..3291820e6f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/idn-hostname.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "validation of internationalized host names",
+ "schema": {"format": "idn-hostname"},
+ "tests": [
+ {
+ "description": "a valid host name (example.test in Hangul)",
+ "data": "실례.테스트",
+ "valid": true
+ },
+ {
+ "description": "illegal first char U+302E Hangul single dot tone mark",
+ "data": "〮실례.테스트",
+ "valid": false
+ },
+ {
+ "description": "contains illegal char U+302E Hangul single dot tone mark",
+ "data": "실〮례.테스트",
+ "valid": false
+ },
+ {
+ "description": "a host name with a component too long",
+ "data": "실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실례례테스트례례례례례례례례례례례례례례례례례테스트례례례례례례례례례례례례례례례례례례례테스트례례례례례례례례례례례례테스트례례실례.테스트",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/ipv4.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/ipv4.json
new file mode 100644
index 0000000000..661148a74d
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/ipv4.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "validation of IP addresses",
+ "schema": {"format": "ipv4"},
+ "tests": [
+ {
+ "description": "a valid IP address",
+ "data": "192.168.0.1",
+ "valid": true
+ },
+ {
+ "description": "an IP address with too many components",
+ "data": "127.0.0.0.1",
+ "valid": false
+ },
+ {
+ "description": "an IP address with out-of-range values",
+ "data": "256.256.256.256",
+ "valid": false
+ },
+ {
+ "description": "an IP address without 4 components",
+ "data": "127.0",
+ "valid": false
+ },
+ {
+ "description": "an IP address as an integer",
+ "data": "0x7f000001",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/ipv6.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/ipv6.json
new file mode 100644
index 0000000000..f67559b35d
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/ipv6.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "validation of IPv6 addresses",
+ "schema": {"format": "ipv6"},
+ "tests": [
+ {
+ "description": "a valid IPv6 address",
+ "data": "::1",
+ "valid": true
+ },
+ {
+ "description": "an IPv6 address with out-of-range values",
+ "data": "12345::",
+ "valid": false
+ },
+ {
+ "description": "an IPv6 address with too many components",
+ "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1",
+ "valid": false
+ },
+ {
+ "description": "an IPv6 address containing illegal characters",
+ "data": "::laptop",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/iri-reference.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/iri-reference.json
new file mode 100644
index 0000000000..1fd779c23c
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/iri-reference.json
@@ -0,0 +1,43 @@
+[
+ {
+ "description": "validation of IRI References",
+ "schema": {"format": "iri-reference"},
+ "tests": [
+ {
+ "description": "a valid IRI",
+ "data": "http://ƒøø.ßår/?∂éœ=πîx#πîüx",
+ "valid": true
+ },
+ {
+ "description": "a valid protocol-relative IRI Reference",
+ "data": "//ƒøø.ßår/?∂éœ=πîx#πîüx",
+ "valid": true
+ },
+ {
+ "description": "a valid relative IRI Reference",
+ "data": "/âππ",
+ "valid": true
+ },
+ {
+ "description": "an invalid IRI Reference",
+ "data": "\\\\WINDOWS\\filëßåré",
+ "valid": false
+ },
+ {
+ "description": "a valid IRI Reference",
+ "data": "âππ",
+ "valid": true
+ },
+ {
+ "description": "a valid IRI fragment",
+ "data": "#ƒrägmênt",
+ "valid": true
+ },
+ {
+ "description": "an invalid IRI fragment",
+ "data": "#ƒräg\\mênt",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/iri.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/iri.json
new file mode 100644
index 0000000000..ed54094c01
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/iri.json
@@ -0,0 +1,53 @@
+[
+ {
+ "description": "validation of IRIs",
+ "schema": {"format": "iri"},
+ "tests": [
+ {
+ "description": "a valid IRI with anchor tag",
+ "data": "http://ƒøø.ßår/?∂éœ=πîx#πîüx",
+ "valid": true
+ },
+ {
+ "description": "a valid IRI with anchor tag and parantheses",
+ "data": "http://ƒøø.com/blah_(wîkïpédiå)_blah#ßité-1",
+ "valid": true
+ },
+ {
+ "description": "a valid IRI with URL-encoded stuff",
+ "data": "http://ƒøø.ßår/?q=Test%20URL-encoded%20stuff",
+ "valid": true
+ },
+ {
+ "description": "a valid IRI with many special characters",
+ "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com",
+ "valid": true
+ },
+ {
+ "description": "a valid IRI based on IPv6",
+ "data": "http://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]",
+ "valid": true
+ },
+ {
+ "description": "an invalid IRI based on IPv6",
+ "data": "http://2001:0db8:85a3:0000:0000:8a2e:0370:7334",
+ "valid": false
+ },
+ {
+ "description": "an invalid relative IRI Reference",
+ "data": "/abc",
+ "valid": false
+ },
+ {
+ "description": "an invalid IRI",
+ "data": "\\\\WINDOWS\\filëßåré",
+ "valid": false
+ },
+ {
+ "description": "an invalid IRI though valid IRI reference",
+ "data": "âππ",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/json-pointer.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/json-pointer.json
new file mode 100644
index 0000000000..65c2f064f0
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/json-pointer.json
@@ -0,0 +1,168 @@
+[
+ {
+ "description": "validation of JSON-pointers (JSON String Representation)",
+ "schema": {"format": "json-pointer"},
+ "tests": [
+ {
+ "description": "a valid JSON-pointer",
+ "data": "/foo/bar~0/baz~1/%a",
+ "valid": true
+ },
+ {
+ "description": "not a valid JSON-pointer (~ not escaped)",
+ "data": "/foo/bar~",
+ "valid": false
+ },
+ {
+ "description": "valid JSON-pointer with empty segment",
+ "data": "/foo//bar",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer with the last empty segment",
+ "data": "/foo/bar/",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #1",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #2",
+ "data": "/foo",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #3",
+ "data": "/foo/0",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #4",
+ "data": "/",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #5",
+ "data": "/a~1b",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #6",
+ "data": "/c%d",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #7",
+ "data": "/e^f",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #8",
+ "data": "/g|h",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #9",
+ "data": "/i\\j",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #10",
+ "data": "/k\"l",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #11",
+ "data": "/ ",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #12",
+ "data": "/m~0n",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer used adding to the last array position",
+ "data": "/foo/-",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer (- used as object member name)",
+ "data": "/foo/-/bar",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer (multiple escaped characters)",
+ "data": "/~1~0~0~1~1",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer (escaped with fraction part) #1",
+ "data": "/~1.1",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer (escaped with fraction part) #2",
+ "data": "/~0.1",
+ "valid": true
+ },
+ {
+ "description": "not a valid JSON-pointer (URI Fragment Identifier) #1",
+ "data": "#",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (URI Fragment Identifier) #2",
+ "data": "#/",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (URI Fragment Identifier) #3",
+ "data": "#a",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (some escaped, but not all) #1",
+ "data": "/~0~",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (some escaped, but not all) #2",
+ "data": "/~0/~",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (wrong escape character) #1",
+ "data": "/~2",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (wrong escape character) #2",
+ "data": "/~-1",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (multiple characters not escaped)",
+ "data": "/~~",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (isn't empty nor starts with /) #1",
+ "data": "a",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (isn't empty nor starts with /) #2",
+ "data": "0",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (isn't empty nor starts with /) #3",
+ "data": "a/a",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/regex.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/regex.json
new file mode 100644
index 0000000000..d99d021ec0
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/regex.json
@@ -0,0 +1,18 @@
+[
+ {
+ "description": "validation of regular expressions",
+ "schema": {"format": "regex"},
+ "tests": [
+ {
+ "description": "a valid regular expression",
+ "data": "([abc])+\\s+$",
+ "valid": true
+ },
+ {
+ "description": "a regular expression with unclosed parens is invalid",
+ "data": "^(abc]",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/relative-json-pointer.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/relative-json-pointer.json
new file mode 100644
index 0000000000..ceeb743a32
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/relative-json-pointer.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "validation of Relative JSON Pointers (RJP)",
+ "schema": {"format": "relative-json-pointer"},
+ "tests": [
+ {
+ "description": "a valid upwards RJP",
+ "data": "1",
+ "valid": true
+ },
+ {
+ "description": "a valid downwards RJP",
+ "data": "0/foo/bar",
+ "valid": true
+ },
+ {
+ "description": "a valid up and then down RJP, with array index",
+ "data": "2/0/baz/1/zip",
+ "valid": true
+ },
+ {
+ "description": "a valid RJP taking the member or index name",
+ "data": "0#",
+ "valid": true
+ },
+ {
+ "description": "an invalid RJP that is a valid JSON Pointer",
+ "data": "/foo/bar",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/time.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/time.json
new file mode 100644
index 0000000000..4ec8a01a3e
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/time.json
@@ -0,0 +1,23 @@
+[
+ {
+ "description": "validation of time strings",
+ "schema": {"format": "time"},
+ "tests": [
+ {
+ "description": "a valid time string",
+ "data": "08:30:06.283185Z",
+ "valid": true
+ },
+ {
+ "description": "an invalid time string",
+ "data": "08:30:06 PST",
+ "valid": false
+ },
+ {
+ "description": "only RFC3339 not all of ISO 8601 are valid",
+ "data": "01:01:01,1111",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/uri-reference.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/uri-reference.json
new file mode 100644
index 0000000000..e4c9eef63c
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/uri-reference.json
@@ -0,0 +1,43 @@
+[
+ {
+ "description": "validation of URI References",
+ "schema": {"format": "uri-reference"},
+ "tests": [
+ {
+ "description": "a valid URI",
+ "data": "http://foo.bar/?baz=qux#quux",
+ "valid": true
+ },
+ {
+ "description": "a valid protocol-relative URI Reference",
+ "data": "//foo.bar/?baz=qux#quux",
+ "valid": true
+ },
+ {
+ "description": "a valid relative URI Reference",
+ "data": "/abc",
+ "valid": true
+ },
+ {
+ "description": "an invalid URI Reference",
+ "data": "\\\\WINDOWS\\fileshare",
+ "valid": false
+ },
+ {
+ "description": "a valid URI Reference",
+ "data": "abc",
+ "valid": true
+ },
+ {
+ "description": "a valid URI fragment",
+ "data": "#fragment",
+ "valid": true
+ },
+ {
+ "description": "an invalid URI fragment",
+ "data": "#frag\\ment",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/uri-template.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/uri-template.json
new file mode 100644
index 0000000000..33ab76ee73
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/uri-template.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "format: uri-template",
+ "schema": {"format": "uri-template"},
+ "tests": [
+ {
+ "description": "a valid uri-template",
+ "data": "http://example.com/dictionary/{term:1}/{term}",
+ "valid": true
+ },
+ {
+ "description": "an invalid uri-template",
+ "data": "http://example.com/dictionary/{term:1}/{term",
+ "valid": false
+ },
+ {
+ "description": "a valid uri-template without variables",
+ "data": "http://example.com/dictionary",
+ "valid": true
+ },
+ {
+ "description": "a valid relative uri-template",
+ "data": "dictionary/{term:1}/{term}",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/uri.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/uri.json
new file mode 100644
index 0000000000..25cc40c80a
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/format/uri.json
@@ -0,0 +1,103 @@
+[
+ {
+ "description": "validation of URIs",
+ "schema": {"format": "uri"},
+ "tests": [
+ {
+ "description": "a valid URL with anchor tag",
+ "data": "http://foo.bar/?baz=qux#quux",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with anchor tag and parantheses",
+ "data": "http://foo.com/blah_(wikipedia)_blah#cite-1",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with URL-encoded stuff",
+ "data": "http://foo.bar/?q=Test%20URL-encoded%20stuff",
+ "valid": true
+ },
+ {
+ "description": "a valid puny-coded URL ",
+ "data": "http://xn--nw2a.xn--j6w193g/",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with many special characters",
+ "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com",
+ "valid": true
+ },
+ {
+ "description": "a valid URL based on IPv4",
+ "data": "http://223.255.255.254",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with ftp scheme",
+ "data": "ftp://ftp.is.co.za/rfc/rfc1808.txt",
+ "valid": true
+ },
+ {
+ "description": "a valid URL for a simple text file",
+ "data": "http://www.ietf.org/rfc/rfc2396.txt",
+ "valid": true
+ },
+ {
+ "description": "a valid URL ",
+ "data": "ldap://[2001:db8::7]/c=GB?objectClass?one",
+ "valid": true
+ },
+ {
+ "description": "a valid mailto URI",
+ "data": "mailto:John.Doe@example.com",
+ "valid": true
+ },
+ {
+ "description": "a valid newsgroup URI",
+ "data": "news:comp.infosystems.www.servers.unix",
+ "valid": true
+ },
+ {
+ "description": "a valid tel URI",
+ "data": "tel:+1-816-555-1212",
+ "valid": true
+ },
+ {
+ "description": "a valid URN",
+ "data": "urn:oasis:names:specification:docbook:dtd:xml:4.1.2",
+ "valid": true
+ },
+ {
+ "description": "an invalid protocol-relative URI Reference",
+ "data": "//foo.bar/?baz=qux#quux",
+ "valid": false
+ },
+ {
+ "description": "an invalid relative URI Reference",
+ "data": "/abc",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI",
+ "data": "\\\\WINDOWS\\fileshare",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI though valid URI reference",
+ "data": "abc",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI with spaces",
+ "data": "http:// shouldfail.com",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI with spaces and missing scheme",
+ "data": ":// should fail",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/optional/zeroTerminatedFloats.json b/third_party/python/jsonschema/json/tests/draft2019-09/optional/zeroTerminatedFloats.json
new file mode 100644
index 0000000000..1bcdf96036
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/optional/zeroTerminatedFloats.json
@@ -0,0 +1,15 @@
+[
+ {
+ "description": "some languages do not distinguish between different types of numeric value",
+ "schema": {
+ "type": "integer"
+ },
+ "tests": [
+ {
+ "description": "a float without fractional part is an integer",
+ "data": 1.0,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/pattern.json b/third_party/python/jsonschema/json/tests/draft2019-09/pattern.json
new file mode 100644
index 0000000000..25e7299731
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/pattern.json
@@ -0,0 +1,34 @@
+[
+ {
+ "description": "pattern validation",
+ "schema": {"pattern": "^a*$"},
+ "tests": [
+ {
+ "description": "a matching pattern is valid",
+ "data": "aaa",
+ "valid": true
+ },
+ {
+ "description": "a non-matching pattern is invalid",
+ "data": "abc",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": true,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "pattern is not anchored",
+ "schema": {"pattern": "a+"},
+ "tests": [
+ {
+ "description": "matches a substring",
+ "data": "xxaayy",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/patternProperties.json b/third_party/python/jsonschema/json/tests/draft2019-09/patternProperties.json
new file mode 100644
index 0000000000..1d04a1675c
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/patternProperties.json
@@ -0,0 +1,151 @@
+[
+ {
+ "description":
+ "patternProperties validates properties matching a regex",
+ "schema": {
+ "patternProperties": {
+ "f.*o": {"type": "integer"}
+ }
+ },
+ "tests": [
+ {
+ "description": "a single valid match is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "multiple valid matches is valid",
+ "data": {"foo": 1, "foooooo" : 2},
+ "valid": true
+ },
+ {
+ "description": "a single invalid match is invalid",
+ "data": {"foo": "bar", "fooooo": 2},
+ "valid": false
+ },
+ {
+ "description": "multiple invalid matches is invalid",
+ "data": {"foo": "bar", "foooooo" : "baz"},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": ["foo"],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple simultaneous patternProperties are validated",
+ "schema": {
+ "patternProperties": {
+ "a*": {"type": "integer"},
+ "aaa*": {"maximum": 20}
+ }
+ },
+ "tests": [
+ {
+ "description": "a single valid match is valid",
+ "data": {"a": 21},
+ "valid": true
+ },
+ {
+ "description": "a simultaneous match is valid",
+ "data": {"aaaa": 18},
+ "valid": true
+ },
+ {
+ "description": "multiple matches is valid",
+ "data": {"a": 21, "aaaa": 18},
+ "valid": true
+ },
+ {
+ "description": "an invalid due to one is invalid",
+ "data": {"a": "bar"},
+ "valid": false
+ },
+ {
+ "description": "an invalid due to the other is invalid",
+ "data": {"aaaa": 31},
+ "valid": false
+ },
+ {
+ "description": "an invalid due to both is invalid",
+ "data": {"aaa": "foo", "aaaa": 31},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "regexes are not anchored by default and are case sensitive",
+ "schema": {
+ "patternProperties": {
+ "[0-9]{2,}": { "type": "boolean" },
+ "X_": { "type": "string" }
+ }
+ },
+ "tests": [
+ {
+ "description": "non recognized members are ignored",
+ "data": { "answer 1": "42" },
+ "valid": true
+ },
+ {
+ "description": "recognized members are accounted for",
+ "data": { "a31b": null },
+ "valid": false
+ },
+ {
+ "description": "regexes are case sensitive",
+ "data": { "a_x_3": 3 },
+ "valid": true
+ },
+ {
+ "description": "regexes are case sensitive, 2",
+ "data": { "a_X_3": 3 },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "patternProperties with boolean schemas",
+ "schema": {
+ "patternProperties": {
+ "f.*": true,
+ "b.*": false
+ }
+ },
+ "tests": [
+ {
+ "description": "object with property matching schema true is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "object with property matching schema false is invalid",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "object with both properties is invalid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/properties.json b/third_party/python/jsonschema/json/tests/draft2019-09/properties.json
new file mode 100644
index 0000000000..b86c181982
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/properties.json
@@ -0,0 +1,167 @@
+[
+ {
+ "description": "object properties validation",
+ "schema": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"type": "string"}
+ }
+ },
+ "tests": [
+ {
+ "description": "both properties present and valid is valid",
+ "data": {"foo": 1, "bar": "baz"},
+ "valid": true
+ },
+ {
+ "description": "one property invalid is invalid",
+ "data": {"foo": 1, "bar": {}},
+ "valid": false
+ },
+ {
+ "description": "both properties invalid is invalid",
+ "data": {"foo": [], "bar": {}},
+ "valid": false
+ },
+ {
+ "description": "doesn't invalidate other properties",
+ "data": {"quux": []},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description":
+ "properties, patternProperties, additionalProperties interaction",
+ "schema": {
+ "properties": {
+ "foo": {"type": "array", "maxItems": 3},
+ "bar": {"type": "array"}
+ },
+ "patternProperties": {"f.o": {"minItems": 2}},
+ "additionalProperties": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "property validates property",
+ "data": {"foo": [1, 2]},
+ "valid": true
+ },
+ {
+ "description": "property invalidates property",
+ "data": {"foo": [1, 2, 3, 4]},
+ "valid": false
+ },
+ {
+ "description": "patternProperty invalidates property",
+ "data": {"foo": []},
+ "valid": false
+ },
+ {
+ "description": "patternProperty validates nonproperty",
+ "data": {"fxo": [1, 2]},
+ "valid": true
+ },
+ {
+ "description": "patternProperty invalidates nonproperty",
+ "data": {"fxo": []},
+ "valid": false
+ },
+ {
+ "description": "additionalProperty ignores property",
+ "data": {"bar": []},
+ "valid": true
+ },
+ {
+ "description": "additionalProperty validates others",
+ "data": {"quux": 3},
+ "valid": true
+ },
+ {
+ "description": "additionalProperty invalidates others",
+ "data": {"quux": "foo"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "properties with boolean schema",
+ "schema": {
+ "properties": {
+ "foo": true,
+ "bar": false
+ }
+ },
+ "tests": [
+ {
+ "description": "no property present is valid",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "only 'true' property present is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "only 'false' property present is invalid",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "both properties present is invalid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "properties with escaped characters",
+ "schema": {
+ "properties": {
+ "foo\nbar": {"type": "number"},
+ "foo\"bar": {"type": "number"},
+ "foo\\bar": {"type": "number"},
+ "foo\rbar": {"type": "number"},
+ "foo\tbar": {"type": "number"},
+ "foo\fbar": {"type": "number"}
+ }
+ },
+ "tests": [
+ {
+ "description": "object with all numbers is valid",
+ "data": {
+ "foo\nbar": 1,
+ "foo\"bar": 1,
+ "foo\\bar": 1,
+ "foo\rbar": 1,
+ "foo\tbar": 1,
+ "foo\fbar": 1
+ },
+ "valid": true
+ },
+ {
+ "description": "object with strings is invalid",
+ "data": {
+ "foo\nbar": "1",
+ "foo\"bar": "1",
+ "foo\\bar": "1",
+ "foo\rbar": "1",
+ "foo\tbar": "1",
+ "foo\fbar": "1"
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/propertyNames.json b/third_party/python/jsonschema/json/tests/draft2019-09/propertyNames.json
new file mode 100644
index 0000000000..8423690d90
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/propertyNames.json
@@ -0,0 +1,78 @@
+[
+ {
+ "description": "propertyNames validation",
+ "schema": {
+ "propertyNames": {"maxLength": 3}
+ },
+ "tests": [
+ {
+ "description": "all property names valid",
+ "data": {
+ "f": {},
+ "foo": {}
+ },
+ "valid": true
+ },
+ {
+ "description": "some property names invalid",
+ "data": {
+ "foo": {},
+ "foobar": {}
+ },
+ "valid": false
+ },
+ {
+ "description": "object without properties is valid",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [1, 2, 3, 4],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "propertyNames with boolean schema true",
+ "schema": {"propertyNames": true},
+ "tests": [
+ {
+ "description": "object with any properties is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "propertyNames with boolean schema false",
+ "schema": {"propertyNames": false},
+ "tests": [
+ {
+ "description": "object with any properties is invalid",
+ "data": {"foo": 1},
+ "valid": false
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/ref.json b/third_party/python/jsonschema/json/tests/draft2019-09/ref.json
new file mode 100644
index 0000000000..285de55c06
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/ref.json
@@ -0,0 +1,359 @@
+[
+ {
+ "description": "root pointer ref",
+ "schema": {
+ "properties": {
+ "foo": {"$ref": "#"}
+ },
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": {"foo": false},
+ "valid": true
+ },
+ {
+ "description": "recursive match",
+ "data": {"foo": {"foo": false}},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": {"bar": false},
+ "valid": false
+ },
+ {
+ "description": "recursive mismatch",
+ "data": {"foo": {"bar": false}},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "relative pointer ref to object",
+ "schema": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"$ref": "#/properties/foo"}
+ }
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": {"bar": 3},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": {"bar": true},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "relative pointer ref to array",
+ "schema": {
+ "items": [
+ {"type": "integer"},
+ {"$ref": "#/items/0"}
+ ]
+ },
+ "tests": [
+ {
+ "description": "match array",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "mismatch array",
+ "data": [1, "foo"],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "escaped pointer ref",
+ "schema": {
+ "tilda~field": {"type": "integer"},
+ "slash/field": {"type": "integer"},
+ "percent%field": {"type": "integer"},
+ "properties": {
+ "tilda": {"$ref": "#/tilda~0field"},
+ "slash": {"$ref": "#/slash~1field"},
+ "percent": {"$ref": "#/percent%25field"}
+ }
+ },
+ "tests": [
+ {
+ "description": "slash invalid",
+ "data": {"slash": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "tilda invalid",
+ "data": {"tilda": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "percent invalid",
+ "data": {"percent": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "slash valid",
+ "data": {"slash": 123},
+ "valid": true
+ },
+ {
+ "description": "tilda valid",
+ "data": {"tilda": 123},
+ "valid": true
+ },
+ {
+ "description": "percent valid",
+ "data": {"percent": 123},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "nested refs",
+ "schema": {
+ "$defs": {
+ "a": {"type": "integer"},
+ "b": {"$ref": "#/$defs/a"},
+ "c": {"$ref": "#/$defs/b"}
+ },
+ "$ref": "#/$defs/c"
+ },
+ "tests": [
+ {
+ "description": "nested ref valid",
+ "data": 5,
+ "valid": true
+ },
+ {
+ "description": "nested ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ref overrides any sibling keywords",
+ "schema": {
+ "$defs": {
+ "reffed": {
+ "type": "array"
+ }
+ },
+ "properties": {
+ "foo": {
+ "$ref": "#/$defs/reffed",
+ "maxItems": 2
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "ref valid",
+ "data": { "foo": [] },
+ "valid": true
+ },
+ {
+ "description": "ref valid, maxItems ignored",
+ "data": { "foo": [ 1, 2, 3] },
+ "valid": true
+ },
+ {
+ "description": "ref invalid",
+ "data": { "foo": "string" },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "remote ref, containing refs itself",
+ "schema": {"$ref": "https://json-schema.org/draft/2019-09/schema"},
+ "tests": [
+ {
+ "description": "remote ref valid",
+ "data": {"minLength": 1},
+ "valid": true
+ },
+ {
+ "description": "remote ref invalid",
+ "data": {"minLength": -1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "property named $ref that is not a reference",
+ "schema": {
+ "properties": {
+ "$ref": {"type": "string"}
+ }
+ },
+ "tests": [
+ {
+ "description": "property named $ref valid",
+ "data": {"$ref": "a"},
+ "valid": true
+ },
+ {
+ "description": "property named $ref invalid",
+ "data": {"$ref": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "$ref to boolean schema true",
+ "schema": {
+ "$ref": "#/$defs/bool",
+ "$defs": {
+ "bool": true
+ }
+ },
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "$ref to boolean schema false",
+ "schema": {
+ "$ref": "#/$defs/bool",
+ "$defs": {
+ "bool": false
+ }
+ },
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Recursive references between schemas",
+ "schema": {
+ "$id": "http://localhost:1234/tree",
+ "description": "tree of nodes",
+ "type": "object",
+ "properties": {
+ "meta": {"type": "string"},
+ "nodes": {
+ "type": "array",
+ "items": {"$ref": "node"}
+ }
+ },
+ "required": ["meta", "nodes"],
+ "$defs": {
+ "node": {
+ "$id": "http://localhost:1234/node",
+ "description": "node",
+ "type": "object",
+ "properties": {
+ "value": {"type": "number"},
+ "subtree": {"$ref": "tree"}
+ },
+ "required": ["value"]
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid tree",
+ "data": {
+ "meta": "root",
+ "nodes": [
+ {
+ "value": 1,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": 1.1},
+ {"value": 1.2}
+ ]
+ }
+ },
+ {
+ "value": 2,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": 2.1},
+ {"value": 2.2}
+ ]
+ }
+ }
+ ]
+ },
+ "valid": true
+ },
+ {
+ "description": "invalid tree",
+ "data": {
+ "meta": "root",
+ "nodes": [
+ {
+ "value": 1,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": "string is invalid"},
+ {"value": 1.2}
+ ]
+ }
+ },
+ {
+ "value": 2,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": 2.1},
+ {"value": 2.2}
+ ]
+ }
+ }
+ ]
+ },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "refs with quote",
+ "schema": {
+ "properties": {
+ "foo\"bar": {"$ref": "#/$defs/foo%22bar"}
+ },
+ "$defs": {
+ "foo\"bar": {"type": "number"}
+ }
+ },
+ "tests": [
+ {
+ "description": "object with numbers is valid",
+ "data": {
+ "foo\"bar": 1
+ },
+ "valid": true
+ },
+ {
+ "description": "object with strings is invalid",
+ "data": {
+ "foo\"bar": "1"
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/refRemote.json b/third_party/python/jsonschema/json/tests/draft2019-09/refRemote.json
new file mode 100644
index 0000000000..9cadc92666
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/refRemote.json
@@ -0,0 +1,167 @@
+[
+ {
+ "description": "remote ref",
+ "schema": {"$ref": "http://localhost:1234/integer.json"},
+ "tests": [
+ {
+ "description": "remote ref valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "remote ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "fragment within remote ref",
+ "schema": {"$ref": "http://localhost:1234/subSchemas.json#/integer"},
+ "tests": [
+ {
+ "description": "remote fragment valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "remote fragment invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ref within remote ref",
+ "schema": {
+ "$ref": "http://localhost:1234/subSchemas.json#/refToInteger"
+ },
+ "tests": [
+ {
+ "description": "ref within ref valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "ref within ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "base URI change",
+ "schema": {
+ "$id": "http://localhost:1234/",
+ "items": {
+ "$id": "folder/",
+ "items": {"$ref": "folderInteger.json"}
+ }
+ },
+ "tests": [
+ {
+ "description": "base URI change ref valid",
+ "data": [[1]],
+ "valid": true
+ },
+ {
+ "description": "base URI change ref invalid",
+ "data": [["a"]],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "base URI change - change folder",
+ "schema": {
+ "$id": "http://localhost:1234/scope_change_defs1.json",
+ "type" : "object",
+ "properties": {"list": {"$ref": "#/$defs/baz"}},
+ "$defs": {
+ "baz": {
+ "$id": "folder/",
+ "type": "array",
+ "items": {"$ref": "folderInteger.json"}
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": {"list": [1]},
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": {"list": ["a"]},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "base URI change - change folder in subschema",
+ "schema": {
+ "$id": "http://localhost:1234/scope_change_defs2.json",
+ "type" : "object",
+ "properties": {"list": {"$ref": "#/$defs/baz/$defs/bar"}},
+ "$defs": {
+ "baz": {
+ "$id": "folder/",
+ "$defs": {
+ "bar": {
+ "type": "array",
+ "items": {"$ref": "folderInteger.json"}
+ }
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": {"list": [1]},
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": {"list": ["a"]},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "root ref in remote ref",
+ "schema": {
+ "$id": "http://localhost:1234/object",
+ "type": "object",
+ "properties": {
+ "name": {"$ref": "name-defs.json#/$defs/orNull"}
+ }
+ },
+ "tests": [
+ {
+ "description": "string is valid",
+ "data": {
+ "name": "foo"
+ },
+ "valid": true
+ },
+ {
+ "description": "null is valid",
+ "data": {
+ "name": null
+ },
+ "valid": true
+ },
+ {
+ "description": "object is invalid",
+ "data": {
+ "name": {
+ "name": null
+ }
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/required.json b/third_party/python/jsonschema/json/tests/draft2019-09/required.json
new file mode 100644
index 0000000000..abf18f3459
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/required.json
@@ -0,0 +1,105 @@
+[
+ {
+ "description": "required validation",
+ "schema": {
+ "properties": {
+ "foo": {},
+ "bar": {}
+ },
+ "required": ["foo"]
+ },
+ "tests": [
+ {
+ "description": "present required property is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "non-present required property is invalid",
+ "data": {"bar": 1},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "required default validation",
+ "schema": {
+ "properties": {
+ "foo": {}
+ }
+ },
+ "tests": [
+ {
+ "description": "not required by default",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "required with empty array",
+ "schema": {
+ "properties": {
+ "foo": {}
+ },
+ "required": []
+ },
+ "tests": [
+ {
+ "description": "property not required",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "required with escaped characters",
+ "schema": {
+ "required": [
+ "foo\nbar",
+ "foo\"bar",
+ "foo\\bar",
+ "foo\rbar",
+ "foo\tbar",
+ "foo\fbar"
+ ]
+ },
+ "tests": [
+ {
+ "description": "object with all properties present is valid",
+ "data": {
+ "foo\nbar": 1,
+ "foo\"bar": 1,
+ "foo\\bar": 1,
+ "foo\rbar": 1,
+ "foo\tbar": 1,
+ "foo\fbar": 1
+ },
+ "valid": true
+ },
+ {
+ "description": "object with some properties missing is invalid",
+ "data": {
+ "foo\nbar": "1",
+ "foo\"bar": "1"
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/type.json b/third_party/python/jsonschema/json/tests/draft2019-09/type.json
new file mode 100644
index 0000000000..ea33b1821f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/type.json
@@ -0,0 +1,464 @@
+[
+ {
+ "description": "integer type matches integers",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "an integer is an integer",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a float is not an integer",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an integer",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "a string is still not an integer, even if it looks like one",
+ "data": "1",
+ "valid": false
+ },
+ {
+ "description": "an object is not an integer",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not an integer",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not an integer",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an integer",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "number type matches numbers",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "an integer is a number",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a float is a number",
+ "data": 1.1,
+ "valid": true
+ },
+ {
+ "description": "a string is not a number",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "a string is still not a number, even if it looks like one",
+ "data": "1",
+ "valid": false
+ },
+ {
+ "description": "an object is not a number",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a number",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not a number",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not a number",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "string type matches strings",
+ "schema": {"type": "string"},
+ "tests": [
+ {
+ "description": "1 is not a string",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not a string",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is a string",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "a string is still a string, even if it looks like a number",
+ "data": "1",
+ "valid": true
+ },
+ {
+ "description": "an empty string is still a string",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "an object is not a string",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a string",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not a string",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not a string",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "object type matches objects",
+ "schema": {"type": "object"},
+ "tests": [
+ {
+ "description": "an integer is not an object",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not an object",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an object",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an object is an object",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "an array is not an object",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not an object",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an object",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "array type matches arrays",
+ "schema": {"type": "array"},
+ "tests": [
+ {
+ "description": "an integer is not an array",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not an array",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an array",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an object is not an array",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is an array",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "a boolean is not an array",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an array",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "boolean type matches booleans",
+ "schema": {"type": "boolean"},
+ "tests": [
+ {
+ "description": "an integer is not a boolean",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "zero is not a boolean",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "a float is not a boolean",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not a boolean",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an empty string is not a boolean",
+ "data": "",
+ "valid": false
+ },
+ {
+ "description": "an object is not a boolean",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a boolean",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "true is a boolean",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "false is a boolean",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "null is not a boolean",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "null type matches only the null object",
+ "schema": {"type": "null"},
+ "tests": [
+ {
+ "description": "an integer is not null",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not null",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "zero is not null",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "a string is not null",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an empty string is not null",
+ "data": "",
+ "valid": false
+ },
+ {
+ "description": "an object is not null",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not null",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "true is not null",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "false is not null",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "null is null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple types can be specified in an array",
+ "schema": {"type": ["integer", "string"]},
+ "tests": [
+ {
+ "description": "an integer is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "a float is invalid",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "an object is invalid",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is invalid",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "type as array with one item",
+ "schema": {
+ "type": ["string"]
+ },
+ "tests": [
+ {
+ "description": "string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "number is invalid",
+ "data": 123,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "type: array or object",
+ "schema": {
+ "type": ["array", "object"]
+ },
+ "tests": [
+ {
+ "description": "array is valid",
+ "data": [1,2,3],
+ "valid": true
+ },
+ {
+ "description": "object is valid",
+ "data": {"foo": 123},
+ "valid": true
+ },
+ {
+ "description": "number is invalid",
+ "data": 123,
+ "valid": false
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "null is invalid",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "type: array, object or null",
+ "schema": {
+ "type": ["array", "object", "null"]
+ },
+ "tests": [
+ {
+ "description": "array is valid",
+ "data": [1,2,3],
+ "valid": true
+ },
+ {
+ "description": "object is valid",
+ "data": {"foo": 123},
+ "valid": true
+ },
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "number is invalid",
+ "data": 123,
+ "valid": false
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft2019-09/uniqueItems.json b/third_party/python/jsonschema/json/tests/draft2019-09/uniqueItems.json
new file mode 100644
index 0000000000..d312ad71ab
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft2019-09/uniqueItems.json
@@ -0,0 +1,173 @@
+[
+ {
+ "description": "uniqueItems validation",
+ "schema": {"uniqueItems": true},
+ "tests": [
+ {
+ "description": "unique array of integers is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of integers is invalid",
+ "data": [1, 1],
+ "valid": false
+ },
+ {
+ "description": "numbers are unique if mathematically unequal",
+ "data": [1.0, 1.00, 1],
+ "valid": false
+ },
+ {
+ "description": "false is not equal to zero",
+ "data": [0, false],
+ "valid": true
+ },
+ {
+ "description": "true is not equal to one",
+ "data": [1, true],
+ "valid": true
+ },
+ {
+ "description": "unique array of objects is valid",
+ "data": [{"foo": "bar"}, {"foo": "baz"}],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of objects is invalid",
+ "data": [{"foo": "bar"}, {"foo": "bar"}],
+ "valid": false
+ },
+ {
+ "description": "unique array of nested objects is valid",
+ "data": [
+ {"foo": {"bar" : {"baz" : true}}},
+ {"foo": {"bar" : {"baz" : false}}}
+ ],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of nested objects is invalid",
+ "data": [
+ {"foo": {"bar" : {"baz" : true}}},
+ {"foo": {"bar" : {"baz" : true}}}
+ ],
+ "valid": false
+ },
+ {
+ "description": "unique array of arrays is valid",
+ "data": [["foo"], ["bar"]],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of arrays is invalid",
+ "data": [["foo"], ["foo"]],
+ "valid": false
+ },
+ {
+ "description": "1 and true are unique",
+ "data": [1, true],
+ "valid": true
+ },
+ {
+ "description": "0 and false are unique",
+ "data": [0, false],
+ "valid": true
+ },
+ {
+ "description": "unique heterogeneous types are valid",
+ "data": [{}, [1], true, null, 1],
+ "valid": true
+ },
+ {
+ "description": "non-unique heterogeneous types are invalid",
+ "data": [{}, [1], true, null, {}, 1],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "uniqueItems with an array of items",
+ "schema": {
+ "items": [{"type": "boolean"}, {"type": "boolean"}],
+ "uniqueItems": true
+ },
+ "tests": [
+ {
+ "description": "[false, true] from items array is valid",
+ "data": [false, true],
+ "valid": true
+ },
+ {
+ "description": "[true, false] from items array is valid",
+ "data": [true, false],
+ "valid": true
+ },
+ {
+ "description": "[false, false] from items array is not valid",
+ "data": [false, false],
+ "valid": false
+ },
+ {
+ "description": "[true, true] from items array is not valid",
+ "data": [true, true],
+ "valid": false
+ },
+ {
+ "description": "unique array extended from [false, true] is valid",
+ "data": [false, true, "foo", "bar"],
+ "valid": true
+ },
+ {
+ "description": "unique array extended from [true, false] is valid",
+ "data": [true, false, "foo", "bar"],
+ "valid": true
+ },
+ {
+ "description": "non-unique array extended from [false, true] is not valid",
+ "data": [false, true, "foo", "foo"],
+ "valid": false
+ },
+ {
+ "description": "non-unique array extended from [true, false] is not valid",
+ "data": [true, false, "foo", "foo"],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "uniqueItems with an array of items and additionalItems=false",
+ "schema": {
+ "items": [{"type": "boolean"}, {"type": "boolean"}],
+ "uniqueItems": true,
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "[false, true] from items array is valid",
+ "data": [false, true],
+ "valid": true
+ },
+ {
+ "description": "[true, false] from items array is valid",
+ "data": [true, false],
+ "valid": true
+ },
+ {
+ "description": "[false, false] from items array is not valid",
+ "data": [false, false],
+ "valid": false
+ },
+ {
+ "description": "[true, true] from items array is not valid",
+ "data": [true, true],
+ "valid": false
+ },
+ {
+ "description": "extra items are invalid even if unique",
+ "data": [false, true, null],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/additionalItems.json b/third_party/python/jsonschema/json/tests/draft3/additionalItems.json
new file mode 100644
index 0000000000..6d4bff51cf
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/additionalItems.json
@@ -0,0 +1,82 @@
+[
+ {
+ "description": "additionalItems as schema",
+ "schema": {
+ "items": [],
+ "additionalItems": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "additional items match schema",
+ "data": [ 1, 2, 3, 4 ],
+ "valid": true
+ },
+ {
+ "description": "additional items do not match schema",
+ "data": [ 1, 2, 3, "foo" ],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "items is schema, no additionalItems",
+ "schema": {
+ "items": {},
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "all items match schema",
+ "data": [ 1, 2, 3, 4, 5 ],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "array of items with no additionalItems",
+ "schema": {
+ "items": [{}, {}, {}],
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "no additional items present",
+ "data": [ 1, 2, 3 ],
+ "valid": true
+ },
+ {
+ "description": "additional items are not permitted",
+ "data": [ 1, 2, 3, 4 ],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "additionalItems as false without items",
+ "schema": {"additionalItems": false},
+ "tests": [
+ {
+ "description":
+ "items defaults to empty schema so everything is valid",
+ "data": [ 1, 2, 3, 4, 5 ],
+ "valid": true
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": {"foo" : "bar"},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "additionalItems are allowed by default",
+ "schema": {"items": []},
+ "tests": [
+ {
+ "description": "only the first items are validated",
+ "data": [1, "foo", false],
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/additionalProperties.json b/third_party/python/jsonschema/json/tests/draft3/additionalProperties.json
new file mode 100644
index 0000000000..bfb0844a26
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/additionalProperties.json
@@ -0,0 +1,133 @@
+[
+ {
+ "description":
+ "additionalProperties being false does not allow other properties",
+ "schema": {
+ "properties": {"foo": {}, "bar": {}},
+ "patternProperties": { "^v": {} },
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "no additional properties is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "an additional property is invalid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : "boom"},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [1, 2, 3],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobarbaz",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "patternProperties are not additional properties",
+ "data": {"foo":1, "vroom": 2},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "non-ASCII pattern with additionalProperties",
+ "schema": {
+ "patternProperties": {"^á": {}},
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "matching the pattern is valid",
+ "data": {"ármányos": 2},
+ "valid": true
+ },
+ {
+ "description": "not matching the pattern is invalid",
+ "data": {"élmény": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description":
+ "additionalProperties allows a schema which should validate",
+ "schema": {
+ "properties": {"foo": {}, "bar": {}},
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "no additional properties is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "an additional valid property is valid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : true},
+ "valid": true
+ },
+ {
+ "description": "an additional invalid property is invalid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : 12},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description":
+ "additionalProperties can exist by itself",
+ "schema": {
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "an additional valid property is valid",
+ "data": {"foo" : true},
+ "valid": true
+ },
+ {
+ "description": "an additional invalid property is invalid",
+ "data": {"foo" : 1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "additionalProperties are allowed by default",
+ "schema": {"properties": {"foo": {}, "bar": {}}},
+ "tests": [
+ {
+ "description": "additional properties are allowed",
+ "data": {"foo": 1, "bar": 2, "quux": true},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "additionalProperties should not look in applicators",
+ "schema": {
+ "extends": [
+ {"properties": {"foo": {}}}
+ ],
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "properties defined in extends are not allowed",
+ "data": {"foo": 1, "bar": true},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/default.json b/third_party/python/jsonschema/json/tests/draft3/default.json
new file mode 100644
index 0000000000..17629779fb
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/default.json
@@ -0,0 +1,49 @@
+[
+ {
+ "description": "invalid type for default",
+ "schema": {
+ "properties": {
+ "foo": {
+ "type": "integer",
+ "default": []
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when property is specified",
+ "data": {"foo": 13},
+ "valid": true
+ },
+ {
+ "description": "still valid when the invalid default is used",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "invalid string value for default",
+ "schema": {
+ "properties": {
+ "bar": {
+ "type": "string",
+ "minLength": 4,
+ "default": "bad"
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when property is specified",
+ "data": {"bar": "good"},
+ "valid": true
+ },
+ {
+ "description": "still valid when the invalid default is used",
+ "data": {},
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/dependencies.json b/third_party/python/jsonschema/json/tests/draft3/dependencies.json
new file mode 100644
index 0000000000..d7e09256ab
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/dependencies.json
@@ -0,0 +1,118 @@
+[
+ {
+ "description": "dependencies",
+ "schema": {
+ "dependencies": {"bar": "foo"}
+ },
+ "tests": [
+ {
+ "description": "neither",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "nondependant",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "with dependency",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "missing dependency",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": ["bar"],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple dependencies",
+ "schema": {
+ "dependencies": {"quux": ["foo", "bar"]}
+ },
+ "tests": [
+ {
+ "description": "neither",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "nondependants",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "with dependencies",
+ "data": {"foo": 1, "bar": 2, "quux": 3},
+ "valid": true
+ },
+ {
+ "description": "missing dependency",
+ "data": {"foo": 1, "quux": 2},
+ "valid": false
+ },
+ {
+ "description": "missing other dependency",
+ "data": {"bar": 1, "quux": 2},
+ "valid": false
+ },
+ {
+ "description": "missing both dependencies",
+ "data": {"quux": 1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "multiple dependencies subschema",
+ "schema": {
+ "dependencies": {
+ "bar": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"type": "integer"}
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "wrong type",
+ "data": {"foo": "quux", "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "wrong type other",
+ "data": {"foo": 2, "bar": "quux"},
+ "valid": false
+ },
+ {
+ "description": "wrong type both",
+ "data": {"foo": "quux", "bar": "quux"},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/disallow.json b/third_party/python/jsonschema/json/tests/draft3/disallow.json
new file mode 100644
index 0000000000..a5c9d90cce
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/disallow.json
@@ -0,0 +1,80 @@
+[
+ {
+ "description": "disallow",
+ "schema": {
+ "disallow": "integer"
+ },
+ "tests": [
+ {
+ "description": "allowed",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "disallowed",
+ "data": 1,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "multiple disallow",
+ "schema": {
+ "disallow": ["integer", "boolean"]
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "other mismatch",
+ "data": true,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "multiple disallow subschema",
+ "schema": {
+ "disallow":
+ ["string",
+ {
+ "type": "object",
+ "properties": {
+ "foo": {
+ "type": "string"
+ }
+ }
+ }]
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "other match",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "other mismatch",
+ "data": {"foo": "bar"},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/divisibleBy.json b/third_party/python/jsonschema/json/tests/draft3/divisibleBy.json
new file mode 100644
index 0000000000..ef7cc14890
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/divisibleBy.json
@@ -0,0 +1,60 @@
+[
+ {
+ "description": "by int",
+ "schema": {"divisibleBy": 2},
+ "tests": [
+ {
+ "description": "int by int",
+ "data": 10,
+ "valid": true
+ },
+ {
+ "description": "int by int fail",
+ "data": 7,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "by number",
+ "schema": {"divisibleBy": 1.5},
+ "tests": [
+ {
+ "description": "zero is divisible by anything (except 0)",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "4.5 is divisible by 1.5",
+ "data": 4.5,
+ "valid": true
+ },
+ {
+ "description": "35 is not divisible by 1.5",
+ "data": 35,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "by small number",
+ "schema": {"divisibleBy": 0.0001},
+ "tests": [
+ {
+ "description": "0.0075 is divisible by 0.0001",
+ "data": 0.0075,
+ "valid": true
+ },
+ {
+ "description": "0.00751 is not divisible by 0.0001",
+ "data": 0.00751,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/enum.json b/third_party/python/jsonschema/json/tests/draft3/enum.json
new file mode 100644
index 0000000000..fc3e070707
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/enum.json
@@ -0,0 +1,71 @@
+[
+ {
+ "description": "simple enum validation",
+ "schema": {"enum": [1, 2, 3]},
+ "tests": [
+ {
+ "description": "one of the enum is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "something else is invalid",
+ "data": 4,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "heterogeneous enum validation",
+ "schema": {"enum": [6, "foo", [], true, {"foo": 12}]},
+ "tests": [
+ {
+ "description": "one of the enum is valid",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "something else is invalid",
+ "data": null,
+ "valid": false
+ },
+ {
+ "description": "objects are deep compared",
+ "data": {"foo": false},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enums in properties",
+ "schema": {
+ "type":"object",
+ "properties": {
+ "foo": {"enum":["foo"]},
+ "bar": {"enum":["bar"], "required":true}
+ }
+ },
+ "tests": [
+ {
+ "description": "both properties are valid",
+ "data": {"foo":"foo", "bar":"bar"},
+ "valid": true
+ },
+ {
+ "description": "missing optional property is valid",
+ "data": {"bar":"bar"},
+ "valid": true
+ },
+ {
+ "description": "missing required property is invalid",
+ "data": {"foo":"foo"},
+ "valid": false
+ },
+ {
+ "description": "missing all properties is invalid",
+ "data": {},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/extends.json b/third_party/python/jsonschema/json/tests/draft3/extends.json
new file mode 100644
index 0000000000..909bce575a
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/extends.json
@@ -0,0 +1,94 @@
+[
+ {
+ "description": "extends",
+ "schema": {
+ "properties": {"bar": {"type": "integer", "required": true}},
+ "extends": {
+ "properties": {
+ "foo": {"type": "string", "required": true}
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "extends",
+ "data": {"foo": "baz", "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "mismatch extends",
+ "data": {"foo": "baz"},
+ "valid": false
+ },
+ {
+ "description": "mismatch extended",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "wrong type",
+ "data": {"foo": "baz", "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "multiple extends",
+ "schema": {
+ "properties": {"bar": {"type": "integer", "required": true}},
+ "extends" : [
+ {
+ "properties": {
+ "foo": {"type": "string", "required": true}
+ }
+ },
+ {
+ "properties": {
+ "baz": {"type": "null", "required": true}
+ }
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": {"foo": "quux", "bar": 2, "baz": null},
+ "valid": true
+ },
+ {
+ "description": "mismatch first extends",
+ "data": {"bar": 2, "baz": null},
+ "valid": false
+ },
+ {
+ "description": "mismatch second extends",
+ "data": {"foo": "quux", "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "mismatch both",
+ "data": {"bar": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "extends simple types",
+ "schema": {
+ "minimum": 20,
+ "extends": {"maximum": 30}
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": 25,
+ "valid": true
+ },
+ {
+ "description": "mismatch extends",
+ "data": 35,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/format.json b/third_party/python/jsonschema/json/tests/draft3/format.json
new file mode 100644
index 0000000000..8279336294
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/format.json
@@ -0,0 +1,362 @@
+[
+ {
+ "description": "validation of e-mail addresses",
+ "schema": {"format": "email"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IP addresses",
+ "schema": {"format": "ip-address"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IPv6 addresses",
+ "schema": {"format": "ipv6"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of hostnames",
+ "schema": {"format": "host-name"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of date-time strings",
+ "schema": {"format": "date-time"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of regular expressions",
+ "schema": {"format": "regex"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of date strings",
+ "schema": {"format": "date"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of time strings",
+ "schema": {"format": "time"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of CSS colors",
+ "schema": {"format": "color"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of URIs",
+ "schema": {"format": "uri"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/items.json b/third_party/python/jsonschema/json/tests/draft3/items.json
new file mode 100644
index 0000000000..f5e18a1384
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/items.json
@@ -0,0 +1,46 @@
+[
+ {
+ "description": "a schema given for items",
+ "schema": {
+ "items": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "valid items",
+ "data": [ 1, 2, 3 ],
+ "valid": true
+ },
+ {
+ "description": "wrong type of items",
+ "data": [1, "x"],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": {"foo" : "bar"},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "an array of schemas for items",
+ "schema": {
+ "items": [
+ {"type": "integer"},
+ {"type": "string"}
+ ]
+ },
+ "tests": [
+ {
+ "description": "correct types",
+ "data": [ 1, "foo" ],
+ "valid": true
+ },
+ {
+ "description": "wrong types",
+ "data": [ "foo", 1 ],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/maxItems.json b/third_party/python/jsonschema/json/tests/draft3/maxItems.json
new file mode 100644
index 0000000000..3b53a6b371
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/maxItems.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "maxItems validation",
+ "schema": {"maxItems": 2},
+ "tests": [
+ {
+ "description": "shorter is valid",
+ "data": [1],
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "too long is invalid",
+ "data": [1, 2, 3],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": "foobar",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/maxLength.json b/third_party/python/jsonschema/json/tests/draft3/maxLength.json
new file mode 100644
index 0000000000..4de42bcaba
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/maxLength.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "maxLength validation",
+ "schema": {"maxLength": 2},
+ "tests": [
+ {
+ "description": "shorter is valid",
+ "data": "f",
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": "fo",
+ "valid": true
+ },
+ {
+ "description": "too long is invalid",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 10,
+ "valid": true
+ },
+ {
+ "description": "two supplementary Unicode code points is long enough",
+ "data": "\uD83D\uDCA9\uD83D\uDCA9",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/maximum.json b/third_party/python/jsonschema/json/tests/draft3/maximum.json
new file mode 100644
index 0000000000..86c7b89c9a
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/maximum.json
@@ -0,0 +1,42 @@
+[
+ {
+ "description": "maximum validation",
+ "schema": {"maximum": 3.0},
+ "tests": [
+ {
+ "description": "below the maximum is valid",
+ "data": 2.6,
+ "valid": true
+ },
+ {
+ "description": "above the maximum is invalid",
+ "data": 3.5,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "exclusiveMaximum validation",
+ "schema": {
+ "maximum": 3.0,
+ "exclusiveMaximum": true
+ },
+ "tests": [
+ {
+ "description": "below the maximum is still valid",
+ "data": 2.2,
+ "valid": true
+ },
+ {
+ "description": "boundary point is invalid",
+ "data": 3.0,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/minItems.json b/third_party/python/jsonschema/json/tests/draft3/minItems.json
new file mode 100644
index 0000000000..ed5118815e
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/minItems.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "minItems validation",
+ "schema": {"minItems": 1},
+ "tests": [
+ {
+ "description": "longer is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": [1],
+ "valid": true
+ },
+ {
+ "description": "too short is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": "",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/minLength.json b/third_party/python/jsonschema/json/tests/draft3/minLength.json
new file mode 100644
index 0000000000..3f09158dee
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/minLength.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "minLength validation",
+ "schema": {"minLength": 2},
+ "tests": [
+ {
+ "description": "longer is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": "fo",
+ "valid": true
+ },
+ {
+ "description": "too short is invalid",
+ "data": "f",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "one supplementary Unicode code point is not long enough",
+ "data": "\uD83D\uDCA9",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/minimum.json b/third_party/python/jsonschema/json/tests/draft3/minimum.json
new file mode 100644
index 0000000000..5ac9feefef
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/minimum.json
@@ -0,0 +1,73 @@
+[
+ {
+ "description": "minimum validation",
+ "schema": {"minimum": 1.1},
+ "tests": [
+ {
+ "description": "above the minimum is valid",
+ "data": 2.6,
+ "valid": true
+ },
+ {
+ "description": "below the minimum is invalid",
+ "data": 0.6,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "exclusiveMinimum validation",
+ "schema": {
+ "minimum": 1.1,
+ "exclusiveMinimum": true
+ },
+ "tests": [
+ {
+ "description": "above the minimum is still valid",
+ "data": 1.2,
+ "valid": true
+ },
+ {
+ "description": "boundary point is invalid",
+ "data": 1.1,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "minimum validation with signed integer",
+ "schema": {"minimum": -2},
+ "tests": [
+ {
+ "description": "negative above the minimum is valid",
+ "data": -1,
+ "valid": true
+ },
+ {
+ "description": "positive above the minimum is valid",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": -2,
+ "valid": true
+ },
+ {
+ "description": "below the minimum is invalid",
+ "data": -3,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/optional/bignum.json b/third_party/python/jsonschema/json/tests/draft3/optional/bignum.json
new file mode 100644
index 0000000000..ccc7c17fe8
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/optional/bignum.json
@@ -0,0 +1,107 @@
+[
+ {
+ "description": "integer",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "a bignum is an integer",
+ "data": 12345678910111213141516171819202122232425262728293031,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "number",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "a bignum is a number",
+ "data": 98249283749234923498293171823948729348710298301928331,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "integer",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "a negative bignum is an integer",
+ "data": -12345678910111213141516171819202122232425262728293031,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "number",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "a negative bignum is a number",
+ "data": -98249283749234923498293171823948729348710298301928331,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "string",
+ "schema": {"type": "string"},
+ "tests": [
+ {
+ "description": "a bignum is not a string",
+ "data": 98249283749234923498293171823948729348710298301928331,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "integer comparison",
+ "schema": {"maximum": 18446744073709551615},
+ "tests": [
+ {
+ "description": "comparison works for high numbers",
+ "data": 18446744073709551600,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "float comparison with high precision",
+ "schema": {
+ "maximum": 972783798187987123879878123.18878137,
+ "exclusiveMaximum": true
+ },
+ "tests": [
+ {
+ "description": "comparison works for high numbers",
+ "data": 972783798187987123879878123.188781371,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "integer comparison",
+ "schema": {"minimum": -18446744073709551615},
+ "tests": [
+ {
+ "description": "comparison works for very negative numbers",
+ "data": -18446744073709551600,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "float comparison with high precision on negative numbers",
+ "schema": {
+ "minimum": -972783798187987123879878123.18878137,
+ "exclusiveMinimum": true
+ },
+ "tests": [
+ {
+ "description": "comparison works for very negative numbers",
+ "data": -972783798187987123879878123.188781371,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/optional/ecmascript-regex.json b/third_party/python/jsonschema/json/tests/draft3/optional/ecmascript-regex.json
new file mode 100644
index 0000000000..03fe97724c
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/optional/ecmascript-regex.json
@@ -0,0 +1,18 @@
+[
+ {
+ "description": "ECMA 262 regex dialect recognition",
+ "schema": { "format": "regex" },
+ "tests": [
+ {
+ "description": "[^] is a valid regex",
+ "data": "[^]",
+ "valid": true
+ },
+ {
+ "description": "ECMA 262 has no support for lookbehind",
+ "data": "(?<=foo)bar",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/optional/format.json b/third_party/python/jsonschema/json/tests/draft3/optional/format.json
new file mode 100644
index 0000000000..9864589dd2
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/optional/format.json
@@ -0,0 +1,227 @@
+[
+ {
+ "description": "validation of regular expressions",
+ "schema": {"format": "regex"},
+ "tests": [
+ {
+ "description": "a valid regular expression",
+ "data": "([abc])+\\s+$",
+ "valid": true
+ },
+ {
+ "description": "a regular expression with unclosed parens is invalid",
+ "data": "^(abc]",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of date-time strings",
+ "schema": {"format": "date-time"},
+ "tests": [
+ {
+ "description": "a valid date-time string",
+ "data": "1963-06-19T08:30:06.283185Z",
+ "valid": true
+ },
+ {
+ "description": "an invalid date-time string",
+ "data": "06/19/1963 08:30:06 PST",
+ "valid": false
+ },
+ {
+ "description": "case-insensitive T and Z",
+ "data": "1963-06-19t08:30:06.283185z",
+ "valid": true
+ },
+ {
+ "description": "only RFC3339 not all of ISO 8601 are valid",
+ "data": "2013-350T01:01:01",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of date strings",
+ "schema": {"format": "date"},
+ "tests": [
+ {
+ "description": "a valid date string",
+ "data": "1963-06-19",
+ "valid": true
+ },
+ {
+ "description": "an invalid date string",
+ "data": "06/19/1963",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of time strings",
+ "schema": {"format": "time"},
+ "tests": [
+ {
+ "description": "a valid time string",
+ "data": "08:30:06",
+ "valid": true
+ },
+ {
+ "description": "an invalid time string",
+ "data": "8:30 AM",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of URIs",
+ "schema": {"format": "uri"},
+ "tests": [
+ {
+ "description": "a valid URI",
+ "data": "http://foo.bar/?baz=qux#quux",
+ "valid": true
+ },
+ {
+ "description": "an invalid protocol-relative URI Reference",
+ "data": "//foo.bar/?baz=qux#quux",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI",
+ "data": "\\\\WINDOWS\\fileshare",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI though valid URI reference",
+ "data": "abc",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of e-mail addresses",
+ "schema": {"format": "email"},
+ "tests": [
+ {
+ "description": "a valid e-mail address",
+ "data": "joe.bloggs@example.com",
+ "valid": true
+ },
+ {
+ "description": "an invalid e-mail address",
+ "data": "2962",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of IP addresses",
+ "schema": {"format": "ip-address"},
+ "tests": [
+ {
+ "description": "a valid IP address",
+ "data": "192.168.0.1",
+ "valid": true
+ },
+ {
+ "description": "an IP address with too many components",
+ "data": "127.0.0.0.1",
+ "valid": false
+ },
+ {
+ "description": "an IP address with out-of-range values",
+ "data": "256.256.256.256",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of IPv6 addresses",
+ "schema": {"format": "ipv6"},
+ "tests": [
+ {
+ "description": "a valid IPv6 address",
+ "data": "::1",
+ "valid": true
+ },
+ {
+ "description": "an IPv6 address with out-of-range values",
+ "data": "12345::",
+ "valid": false
+ },
+ {
+ "description": "an IPv6 address with too many components",
+ "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1",
+ "valid": false
+ },
+ {
+ "description": "an IPv6 address containing illegal characters",
+ "data": "::laptop",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of host names",
+ "schema": {"format": "host-name"},
+ "tests": [
+ {
+ "description": "a valid host name",
+ "data": "www.example.com",
+ "valid": true
+ },
+ {
+ "description": "a host name starting with an illegal character",
+ "data": "-a-host-name-that-starts-with--",
+ "valid": false
+ },
+ {
+ "description": "a host name containing illegal characters",
+ "data": "not_a_valid_host_name",
+ "valid": false
+ },
+ {
+ "description": "a host name with a component too long",
+ "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of CSS colors",
+ "schema": {"format": "color"},
+ "tests": [
+ {
+ "description": "a valid CSS color name",
+ "data": "fuchsia",
+ "valid": true
+ },
+ {
+ "description": "a valid six-digit CSS color code",
+ "data": "#CC8899",
+ "valid": true
+ },
+ {
+ "description": "a valid three-digit CSS color code",
+ "data": "#C89",
+ "valid": true
+ },
+ {
+ "description": "an invalid CSS color code",
+ "data": "#00332520",
+ "valid": false
+ },
+ {
+ "description": "an invalid CSS color name",
+ "data": "puce",
+ "valid": false
+ },
+ {
+ "description": "a CSS color name containing invalid characters",
+ "data": "light_grayish_red-violet",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/optional/zeroTerminatedFloats.json b/third_party/python/jsonschema/json/tests/draft3/optional/zeroTerminatedFloats.json
new file mode 100644
index 0000000000..9b50ea2776
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/optional/zeroTerminatedFloats.json
@@ -0,0 +1,15 @@
+[
+ {
+ "description": "some languages do not distinguish between different types of numeric value",
+ "schema": {
+ "type": "integer"
+ },
+ "tests": [
+ {
+ "description": "a float is not an integer even without fractional part",
+ "data": 1.0,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/pattern.json b/third_party/python/jsonschema/json/tests/draft3/pattern.json
new file mode 100644
index 0000000000..25e7299731
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/pattern.json
@@ -0,0 +1,34 @@
+[
+ {
+ "description": "pattern validation",
+ "schema": {"pattern": "^a*$"},
+ "tests": [
+ {
+ "description": "a matching pattern is valid",
+ "data": "aaa",
+ "valid": true
+ },
+ {
+ "description": "a non-matching pattern is invalid",
+ "data": "abc",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": true,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "pattern is not anchored",
+ "schema": {"pattern": "a+"},
+ "tests": [
+ {
+ "description": "matches a substring",
+ "data": "xxaayy",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/patternProperties.json b/third_party/python/jsonschema/json/tests/draft3/patternProperties.json
new file mode 100644
index 0000000000..2ca9aaebdd
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/patternProperties.json
@@ -0,0 +1,115 @@
+[
+ {
+ "description":
+ "patternProperties validates properties matching a regex",
+ "schema": {
+ "patternProperties": {
+ "f.*o": {"type": "integer"}
+ }
+ },
+ "tests": [
+ {
+ "description": "a single valid match is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "multiple valid matches is valid",
+ "data": {"foo": 1, "foooooo" : 2},
+ "valid": true
+ },
+ {
+ "description": "a single invalid match is invalid",
+ "data": {"foo": "bar", "fooooo": 2},
+ "valid": false
+ },
+ {
+ "description": "multiple invalid matches is invalid",
+ "data": {"foo": "bar", "foooooo" : "baz"},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple simultaneous patternProperties are validated",
+ "schema": {
+ "patternProperties": {
+ "a*": {"type": "integer"},
+ "aaa*": {"maximum": 20}
+ }
+ },
+ "tests": [
+ {
+ "description": "a single valid match is valid",
+ "data": {"a": 21},
+ "valid": true
+ },
+ {
+ "description": "a simultaneous match is valid",
+ "data": {"aaaa": 18},
+ "valid": true
+ },
+ {
+ "description": "multiple matches is valid",
+ "data": {"a": 21, "aaaa": 18},
+ "valid": true
+ },
+ {
+ "description": "an invalid due to one is invalid",
+ "data": {"a": "bar"},
+ "valid": false
+ },
+ {
+ "description": "an invalid due to the other is invalid",
+ "data": {"aaaa": 31},
+ "valid": false
+ },
+ {
+ "description": "an invalid due to both is invalid",
+ "data": {"aaa": "foo", "aaaa": 31},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "regexes are not anchored by default and are case sensitive",
+ "schema": {
+ "patternProperties": {
+ "[0-9]{2,}": { "type": "boolean" },
+ "X_": { "type": "string" }
+ }
+ },
+ "tests": [
+ {
+ "description": "non recognized members are ignored",
+ "data": { "answer 1": "42" },
+ "valid": true
+ },
+ {
+ "description": "recognized members are accounted for",
+ "data": { "a31b": null },
+ "valid": false
+ },
+ {
+ "description": "regexes are case sensitive",
+ "data": { "a_x_3": 3 },
+ "valid": true
+ },
+ {
+ "description": "regexes are case sensitive, 2",
+ "data": { "a_X_3": 3 },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/properties.json b/third_party/python/jsonschema/json/tests/draft3/properties.json
new file mode 100644
index 0000000000..a830c67e7b
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/properties.json
@@ -0,0 +1,97 @@
+[
+ {
+ "description": "object properties validation",
+ "schema": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"type": "string"}
+ }
+ },
+ "tests": [
+ {
+ "description": "both properties present and valid is valid",
+ "data": {"foo": 1, "bar": "baz"},
+ "valid": true
+ },
+ {
+ "description": "one property invalid is invalid",
+ "data": {"foo": 1, "bar": {}},
+ "valid": false
+ },
+ {
+ "description": "both properties invalid is invalid",
+ "data": {"foo": [], "bar": {}},
+ "valid": false
+ },
+ {
+ "description": "doesn't invalidate other properties",
+ "data": {"quux": []},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description":
+ "properties, patternProperties, additionalProperties interaction",
+ "schema": {
+ "properties": {
+ "foo": {"type": "array", "maxItems": 3},
+ "bar": {"type": "array"}
+ },
+ "patternProperties": {"f.o": {"minItems": 2}},
+ "additionalProperties": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "property validates property",
+ "data": {"foo": [1, 2]},
+ "valid": true
+ },
+ {
+ "description": "property invalidates property",
+ "data": {"foo": [1, 2, 3, 4]},
+ "valid": false
+ },
+ {
+ "description": "patternProperty invalidates property",
+ "data": {"foo": []},
+ "valid": false
+ },
+ {
+ "description": "patternProperty validates nonproperty",
+ "data": {"fxo": [1, 2]},
+ "valid": true
+ },
+ {
+ "description": "patternProperty invalidates nonproperty",
+ "data": {"fxo": []},
+ "valid": false
+ },
+ {
+ "description": "additionalProperty ignores property",
+ "data": {"bar": []},
+ "valid": true
+ },
+ {
+ "description": "additionalProperty validates others",
+ "data": {"quux": 3},
+ "valid": true
+ },
+ {
+ "description": "additionalProperty invalidates others",
+ "data": {"quux": "foo"},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/ref.json b/third_party/python/jsonschema/json/tests/draft3/ref.json
new file mode 100644
index 0000000000..31414ad6ba
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/ref.json
@@ -0,0 +1,192 @@
+[
+ {
+ "description": "root pointer ref",
+ "schema": {
+ "properties": {
+ "foo": {"$ref": "#"}
+ },
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": {"foo": false},
+ "valid": true
+ },
+ {
+ "description": "recursive match",
+ "data": {"foo": {"foo": false}},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": {"bar": false},
+ "valid": false
+ },
+ {
+ "description": "recursive mismatch",
+ "data": {"foo": {"bar": false}},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "relative pointer ref to object",
+ "schema": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"$ref": "#/properties/foo"}
+ }
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": {"bar": 3},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": {"bar": true},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "relative pointer ref to array",
+ "schema": {
+ "items": [
+ {"type": "integer"},
+ {"$ref": "#/items/0"}
+ ]
+ },
+ "tests": [
+ {
+ "description": "match array",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "mismatch array",
+ "data": [1, "foo"],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "escaped pointer ref",
+ "schema": {
+ "tilda~field": {"type": "integer"},
+ "slash/field": {"type": "integer"},
+ "percent%field": {"type": "integer"},
+ "properties": {
+ "tilda": {"$ref": "#/tilda~0field"},
+ "slash": {"$ref": "#/slash~1field"},
+ "percent": {"$ref": "#/percent%25field"}
+ }
+ },
+ "tests": [
+ {
+ "description": "slash invalid",
+ "data": {"slash": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "tilda invalid",
+ "data": {"tilda": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "percent invalid",
+ "data": {"percent": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "slash valid",
+ "data": {"slash": 123},
+ "valid": true
+ },
+ {
+ "description": "tilda valid",
+ "data": {"tilda": 123},
+ "valid": true
+ },
+ {
+ "description": "percent valid",
+ "data": {"percent": 123},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "nested refs",
+ "schema": {
+ "definitions": {
+ "a": {"type": "integer"},
+ "b": {"$ref": "#/definitions/a"},
+ "c": {"$ref": "#/definitions/b"}
+ },
+ "$ref": "#/definitions/c"
+ },
+ "tests": [
+ {
+ "description": "nested ref valid",
+ "data": 5,
+ "valid": true
+ },
+ {
+ "description": "nested ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ref overrides any sibling keywords",
+ "schema": {
+ "definitions": {
+ "reffed": {
+ "type": "array"
+ }
+ },
+ "properties": {
+ "foo": {
+ "$ref": "#/definitions/reffed",
+ "maxItems": 2
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "remote ref valid",
+ "data": { "foo": [] },
+ "valid": true
+ },
+ {
+ "description": "remote ref valid, maxItems ignored",
+ "data": { "foo": [ 1, 2, 3] },
+ "valid": true
+ },
+ {
+ "description": "ref invalid",
+ "data": { "foo": "string" },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "remote ref, containing refs itself",
+ "schema": {"$ref": "http://json-schema.org/draft-03/schema#"},
+ "tests": [
+ {
+ "description": "remote ref valid",
+ "data": {"items": {"type": "integer"}},
+ "valid": true
+ },
+ {
+ "description": "remote ref invalid",
+ "data": {"items": {"type": 1}},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/refRemote.json b/third_party/python/jsonschema/json/tests/draft3/refRemote.json
new file mode 100644
index 0000000000..4ca804732c
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/refRemote.json
@@ -0,0 +1,74 @@
+[
+ {
+ "description": "remote ref",
+ "schema": {"$ref": "http://localhost:1234/integer.json"},
+ "tests": [
+ {
+ "description": "remote ref valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "remote ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "fragment within remote ref",
+ "schema": {"$ref": "http://localhost:1234/subSchemas.json#/integer"},
+ "tests": [
+ {
+ "description": "remote fragment valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "remote fragment invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ref within remote ref",
+ "schema": {
+ "$ref": "http://localhost:1234/subSchemas.json#/refToInteger"
+ },
+ "tests": [
+ {
+ "description": "ref within ref valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "ref within ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "change resolution scope",
+ "schema": {
+ "id": "http://localhost:1234/",
+ "items": {
+ "id": "folder/",
+ "items": {"$ref": "folderInteger.json"}
+ }
+ },
+ "tests": [
+ {
+ "description": "changed scope ref valid",
+ "data": [[1]],
+ "valid": true
+ },
+ {
+ "description": "changed scope ref invalid",
+ "data": [["a"]],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/required.json b/third_party/python/jsonschema/json/tests/draft3/required.json
new file mode 100644
index 0000000000..aaaf024273
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/required.json
@@ -0,0 +1,53 @@
+[
+ {
+ "description": "required validation",
+ "schema": {
+ "properties": {
+ "foo": {"required" : true},
+ "bar": {}
+ }
+ },
+ "tests": [
+ {
+ "description": "present required property is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "non-present required property is invalid",
+ "data": {"bar": 1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "required default validation",
+ "schema": {
+ "properties": {
+ "foo": {}
+ }
+ },
+ "tests": [
+ {
+ "description": "not required by default",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "required explicitly false validation",
+ "schema": {
+ "properties": {
+ "foo": {"required": false}
+ }
+ },
+ "tests": [
+ {
+ "description": "not required if required is false",
+ "data": {},
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/type.json b/third_party/python/jsonschema/json/tests/draft3/type.json
new file mode 100644
index 0000000000..49c9b40a99
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/type.json
@@ -0,0 +1,489 @@
+[
+ {
+ "description": "integer type matches integers",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "an integer is an integer",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a float is not an integer",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an integer",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "a string is still not an integer, even if it looks like one",
+ "data": "1",
+ "valid": false
+ },
+ {
+ "description": "an object is not an integer",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not an integer",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not an integer",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an integer",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "number type matches numbers",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "an integer is a number",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a float is a number",
+ "data": 1.1,
+ "valid": true
+ },
+ {
+ "description": "a string is not a number",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "a string is still not a number, even if it looks like one",
+ "data": "1",
+ "valid": false
+ },
+ {
+ "description": "an object is not a number",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a number",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not a number",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not a number",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "string type matches strings",
+ "schema": {"type": "string"},
+ "tests": [
+ {
+ "description": "1 is not a string",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not a string",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is a string",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "a string is still a string, even if it looks like a number",
+ "data": "1",
+ "valid": true
+ },
+ {
+ "description": "an object is not a string",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a string",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not a string",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not a string",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "object type matches objects",
+ "schema": {"type": "object"},
+ "tests": [
+ {
+ "description": "an integer is not an object",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not an object",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an object",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an object is an object",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "an array is not an object",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not an object",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an object",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "array type matches arrays",
+ "schema": {"type": "array"},
+ "tests": [
+ {
+ "description": "an integer is not an array",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not an array",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an array",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an object is not an array",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is an array",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "a boolean is not an array",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an array",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "boolean type matches booleans",
+ "schema": {"type": "boolean"},
+ "tests": [
+ {
+ "description": "an integer is not a boolean",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not a boolean",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not a boolean",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an object is not a boolean",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a boolean",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is a boolean",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "null is not a boolean",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "null type matches only the null object",
+ "schema": {"type": "null"},
+ "tests": [
+ {
+ "description": "an integer is not null",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not null",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not null",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an object is not null",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not null",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not null",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "any type matches any type",
+ "schema": {"type": "any"},
+ "tests": [
+ {
+ "description": "any type includes integers",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "any type includes float",
+ "data": 1.1,
+ "valid": true
+ },
+ {
+ "description": "any type includes string",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "any type includes object",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "any type includes array",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "any type includes boolean",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "any type includes null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple types can be specified in an array",
+ "schema": {"type": ["integer", "string"]},
+ "tests": [
+ {
+ "description": "an integer is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "a float is invalid",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "an object is invalid",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is invalid",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "types can include schemas",
+ "schema": {
+ "type": [
+ "array",
+ {"type": "object"}
+ ]
+ },
+ "tests": [
+ {
+ "description": "an integer is invalid",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a string is invalid",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "a float is invalid",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "an object is valid",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "an array is valid",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "a boolean is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is invalid",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description":
+ "when types includes a schema it should fully validate the schema",
+ "schema": {
+ "type": [
+ "integer",
+ {
+ "properties": {
+ "foo": {"type": "null"}
+ }
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "an integer is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "an object is valid only if it is fully valid",
+ "data": {"foo": null},
+ "valid": true
+ },
+ {
+ "description": "an object is invalid otherwise",
+ "data": {"foo": "bar"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "types from separate schemas are merged",
+ "schema": {
+ "type": [
+ {"type": ["string"]},
+ {"type": ["array", "null"]}
+ ]
+ },
+ "tests": [
+ {
+ "description": "an integer is invalid",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "an array is valid",
+ "data": [1, 2, 3],
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft3/uniqueItems.json b/third_party/python/jsonschema/json/tests/draft3/uniqueItems.json
new file mode 100644
index 0000000000..59e3542cef
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft3/uniqueItems.json
@@ -0,0 +1,163 @@
+[
+ {
+ "description": "uniqueItems validation",
+ "schema": {"uniqueItems": true},
+ "tests": [
+ {
+ "description": "unique array of integers is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of integers is invalid",
+ "data": [1, 1],
+ "valid": false
+ },
+ {
+ "description": "numbers are unique if mathematically unequal",
+ "data": [1.0, 1.00, 1],
+ "valid": false
+ },
+ {
+ "description": "unique array of objects is valid",
+ "data": [{"foo": "bar"}, {"foo": "baz"}],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of objects is invalid",
+ "data": [{"foo": "bar"}, {"foo": "bar"}],
+ "valid": false
+ },
+ {
+ "description": "unique array of nested objects is valid",
+ "data": [
+ {"foo": {"bar" : {"baz" : true}}},
+ {"foo": {"bar" : {"baz" : false}}}
+ ],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of nested objects is invalid",
+ "data": [
+ {"foo": {"bar" : {"baz" : true}}},
+ {"foo": {"bar" : {"baz" : true}}}
+ ],
+ "valid": false
+ },
+ {
+ "description": "unique array of arrays is valid",
+ "data": [["foo"], ["bar"]],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of arrays is invalid",
+ "data": [["foo"], ["foo"]],
+ "valid": false
+ },
+ {
+ "description": "1 and true are unique",
+ "data": [1, true],
+ "valid": true
+ },
+ {
+ "description": "0 and false are unique",
+ "data": [0, false],
+ "valid": true
+ },
+ {
+ "description": "unique heterogeneous types are valid",
+ "data": [{}, [1], true, null, 1],
+ "valid": true
+ },
+ {
+ "description": "non-unique heterogeneous types are invalid",
+ "data": [{}, [1], true, null, {}, 1],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "uniqueItems with an array of items",
+ "schema": {
+ "items": [{"type": "boolean"}, {"type": "boolean"}],
+ "uniqueItems": true
+ },
+ "tests": [
+ {
+ "description": "[false, true] from items array is valid",
+ "data": [false, true],
+ "valid": true
+ },
+ {
+ "description": "[true, false] from items array is valid",
+ "data": [true, false],
+ "valid": true
+ },
+ {
+ "description": "[false, false] from items array is not valid",
+ "data": [false, false],
+ "valid": false
+ },
+ {
+ "description": "[true, true] from items array is not valid",
+ "data": [true, true],
+ "valid": false
+ },
+ {
+ "description": "unique array extended from [false, true] is valid",
+ "data": [false, true, "foo", "bar"],
+ "valid": true
+ },
+ {
+ "description": "unique array extended from [true, false] is valid",
+ "data": [true, false, "foo", "bar"],
+ "valid": true
+ },
+ {
+ "description": "non-unique array extended from [false, true] is not valid",
+ "data": [false, true, "foo", "foo"],
+ "valid": false
+ },
+ {
+ "description": "non-unique array extended from [true, false] is not valid",
+ "data": [true, false, "foo", "foo"],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "uniqueItems with an array of items and additionalItems=false",
+ "schema": {
+ "items": [{"type": "boolean"}, {"type": "boolean"}],
+ "uniqueItems": true,
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "[false, true] from items array is valid",
+ "data": [false, true],
+ "valid": true
+ },
+ {
+ "description": "[true, false] from items array is valid",
+ "data": [true, false],
+ "valid": true
+ },
+ {
+ "description": "[false, false] from items array is not valid",
+ "data": [false, false],
+ "valid": false
+ },
+ {
+ "description": "[true, true] from items array is not valid",
+ "data": [true, true],
+ "valid": false
+ },
+ {
+ "description": "extra items are invalid even if unique",
+ "data": [false, true, null],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/additionalItems.json b/third_party/python/jsonschema/json/tests/draft4/additionalItems.json
new file mode 100644
index 0000000000..abecc578be
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/additionalItems.json
@@ -0,0 +1,87 @@
+[
+ {
+ "description": "additionalItems as schema",
+ "schema": {
+ "items": [{}],
+ "additionalItems": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "additional items match schema",
+ "data": [ null, 2, 3, 4 ],
+ "valid": true
+ },
+ {
+ "description": "additional items do not match schema",
+ "data": [ null, 2, 3, "foo" ],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "items is schema, no additionalItems",
+ "schema": {
+ "items": {},
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "all items match schema",
+ "data": [ 1, 2, 3, 4, 5 ],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "array of items with no additionalItems",
+ "schema": {
+ "items": [{}, {}, {}],
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "fewer number of items present",
+ "data": [ 1, 2 ],
+ "valid": true
+ },
+ {
+ "description": "equal number of items present",
+ "data": [ 1, 2, 3 ],
+ "valid": true
+ },
+ {
+ "description": "additional items are not permitted",
+ "data": [ 1, 2, 3, 4 ],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "additionalItems as false without items",
+ "schema": {"additionalItems": false},
+ "tests": [
+ {
+ "description":
+ "items defaults to empty schema so everything is valid",
+ "data": [ 1, 2, 3, 4, 5 ],
+ "valid": true
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": {"foo" : "bar"},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "additionalItems are allowed by default",
+ "schema": {"items": [{"type": "integer"}]},
+ "tests": [
+ {
+ "description": "only the first item is validated",
+ "data": [1, "foo", false],
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/additionalProperties.json b/third_party/python/jsonschema/json/tests/draft4/additionalProperties.json
new file mode 100644
index 0000000000..ffeac6b381
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/additionalProperties.json
@@ -0,0 +1,133 @@
+[
+ {
+ "description":
+ "additionalProperties being false does not allow other properties",
+ "schema": {
+ "properties": {"foo": {}, "bar": {}},
+ "patternProperties": { "^v": {} },
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "no additional properties is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "an additional property is invalid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : "boom"},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [1, 2, 3],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobarbaz",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "patternProperties are not additional properties",
+ "data": {"foo":1, "vroom": 2},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "non-ASCII pattern with additionalProperties",
+ "schema": {
+ "patternProperties": {"^á": {}},
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "matching the pattern is valid",
+ "data": {"ármányos": 2},
+ "valid": true
+ },
+ {
+ "description": "not matching the pattern is invalid",
+ "data": {"élmény": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description":
+ "additionalProperties allows a schema which should validate",
+ "schema": {
+ "properties": {"foo": {}, "bar": {}},
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "no additional properties is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "an additional valid property is valid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : true},
+ "valid": true
+ },
+ {
+ "description": "an additional invalid property is invalid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : 12},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description":
+ "additionalProperties can exist by itself",
+ "schema": {
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "an additional valid property is valid",
+ "data": {"foo" : true},
+ "valid": true
+ },
+ {
+ "description": "an additional invalid property is invalid",
+ "data": {"foo" : 1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "additionalProperties are allowed by default",
+ "schema": {"properties": {"foo": {}, "bar": {}}},
+ "tests": [
+ {
+ "description": "additional properties are allowed",
+ "data": {"foo": 1, "bar": 2, "quux": true},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "additionalProperties should not look in applicators",
+ "schema": {
+ "allOf": [
+ {"properties": {"foo": {}}}
+ ],
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "properties defined in allOf are not allowed",
+ "data": {"foo": 1, "bar": true},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/allOf.json b/third_party/python/jsonschema/json/tests/draft4/allOf.json
new file mode 100644
index 0000000000..ce9fdd466c
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/allOf.json
@@ -0,0 +1,185 @@
+[
+ {
+ "description": "allOf",
+ "schema": {
+ "allOf": [
+ {
+ "properties": {
+ "bar": {"type": "integer"}
+ },
+ "required": ["bar"]
+ },
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "allOf",
+ "data": {"foo": "baz", "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "mismatch second",
+ "data": {"foo": "baz"},
+ "valid": false
+ },
+ {
+ "description": "mismatch first",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "wrong type",
+ "data": {"foo": "baz", "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with base schema",
+ "schema": {
+ "properties": {"bar": {"type": "integer"}},
+ "required": ["bar"],
+ "allOf" : [
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ },
+ {
+ "properties": {
+ "baz": {"type": "null"}
+ },
+ "required": ["baz"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": {"foo": "quux", "bar": 2, "baz": null},
+ "valid": true
+ },
+ {
+ "description": "mismatch base schema",
+ "data": {"foo": "quux", "baz": null},
+ "valid": false
+ },
+ {
+ "description": "mismatch first allOf",
+ "data": {"bar": 2, "baz": null},
+ "valid": false
+ },
+ {
+ "description": "mismatch second allOf",
+ "data": {"foo": "quux", "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "mismatch both",
+ "data": {"bar": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf simple types",
+ "schema": {
+ "allOf": [
+ {"maximum": 30},
+ {"minimum": 20}
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": 25,
+ "valid": true
+ },
+ {
+ "description": "mismatch one",
+ "data": 35,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with one empty schema",
+ "schema": {
+ "allOf": [
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "any data is valid",
+ "data": 1,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "allOf with two empty schemas",
+ "schema": {
+ "allOf": [
+ {},
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "any data is valid",
+ "data": 1,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "allOf with the first empty schema",
+ "schema": {
+ "allOf": [
+ {},
+ { "type": "number" }
+ ]
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with the last empty schema",
+ "schema": {
+ "allOf": [
+ { "type": "number" },
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/anyOf.json b/third_party/python/jsonschema/json/tests/draft4/anyOf.json
new file mode 100644
index 0000000000..09cc3c2f64
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/anyOf.json
@@ -0,0 +1,156 @@
+[
+ {
+ "description": "anyOf",
+ "schema": {
+ "anyOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "minimum": 2
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first anyOf valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "second anyOf valid",
+ "data": 2.5,
+ "valid": true
+ },
+ {
+ "description": "both anyOf valid",
+ "data": 3,
+ "valid": true
+ },
+ {
+ "description": "neither anyOf valid",
+ "data": 1.5,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf with base schema",
+ "schema": {
+ "type": "string",
+ "anyOf" : [
+ {
+ "maxLength": 2
+ },
+ {
+ "minLength": 4
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "mismatch base schema",
+ "data": 3,
+ "valid": false
+ },
+ {
+ "description": "one anyOf valid",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "both anyOf invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf complex types",
+ "schema": {
+ "anyOf": [
+ {
+ "properties": {
+ "bar": {"type": "integer"}
+ },
+ "required": ["bar"]
+ },
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first anyOf valid (complex)",
+ "data": {"bar": 2},
+ "valid": true
+ },
+ {
+ "description": "second anyOf valid (complex)",
+ "data": {"foo": "baz"},
+ "valid": true
+ },
+ {
+ "description": "both anyOf valid (complex)",
+ "data": {"foo": "baz", "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "neither anyOf valid (complex)",
+ "data": {"foo": 2, "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf with one empty schema",
+ "schema": {
+ "anyOf": [
+ { "type": "number" },
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "number is valid",
+ "data": 123,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "nested anyOf, to check validation semantics",
+ "schema": {
+ "anyOf": [
+ {
+ "anyOf": [
+ {
+ "type": "null"
+ }
+ ]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "anything non-null is invalid",
+ "data": 123,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/default.json b/third_party/python/jsonschema/json/tests/draft4/default.json
new file mode 100644
index 0000000000..17629779fb
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/default.json
@@ -0,0 +1,49 @@
+[
+ {
+ "description": "invalid type for default",
+ "schema": {
+ "properties": {
+ "foo": {
+ "type": "integer",
+ "default": []
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when property is specified",
+ "data": {"foo": 13},
+ "valid": true
+ },
+ {
+ "description": "still valid when the invalid default is used",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "invalid string value for default",
+ "schema": {
+ "properties": {
+ "bar": {
+ "type": "string",
+ "minLength": 4,
+ "default": "bad"
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when property is specified",
+ "data": {"bar": "good"},
+ "valid": true
+ },
+ {
+ "description": "still valid when the invalid default is used",
+ "data": {},
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/definitions.json b/third_party/python/jsonschema/json/tests/draft4/definitions.json
new file mode 100644
index 0000000000..cf935a3215
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/definitions.json
@@ -0,0 +1,32 @@
+[
+ {
+ "description": "valid definition",
+ "schema": {"$ref": "http://json-schema.org/draft-04/schema#"},
+ "tests": [
+ {
+ "description": "valid definition schema",
+ "data": {
+ "definitions": {
+ "foo": {"type": "integer"}
+ }
+ },
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "invalid definition",
+ "schema": {"$ref": "http://json-schema.org/draft-04/schema#"},
+ "tests": [
+ {
+ "description": "invalid definition schema",
+ "data": {
+ "definitions": {
+ "foo": {"type": 1}
+ }
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/dependencies.json b/third_party/python/jsonschema/json/tests/draft4/dependencies.json
new file mode 100644
index 0000000000..51eeddf32e
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/dependencies.json
@@ -0,0 +1,194 @@
+[
+ {
+ "description": "dependencies",
+ "schema": {
+ "dependencies": {"bar": ["foo"]}
+ },
+ "tests": [
+ {
+ "description": "neither",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "nondependant",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "with dependency",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "missing dependency",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": ["bar"],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple dependencies",
+ "schema": {
+ "dependencies": {"quux": ["foo", "bar"]}
+ },
+ "tests": [
+ {
+ "description": "neither",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "nondependants",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "with dependencies",
+ "data": {"foo": 1, "bar": 2, "quux": 3},
+ "valid": true
+ },
+ {
+ "description": "missing dependency",
+ "data": {"foo": 1, "quux": 2},
+ "valid": false
+ },
+ {
+ "description": "missing other dependency",
+ "data": {"bar": 1, "quux": 2},
+ "valid": false
+ },
+ {
+ "description": "missing both dependencies",
+ "data": {"quux": 1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "multiple dependencies subschema",
+ "schema": {
+ "dependencies": {
+ "bar": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"type": "integer"}
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "no dependency",
+ "data": {"foo": "quux"},
+ "valid": true
+ },
+ {
+ "description": "wrong type",
+ "data": {"foo": "quux", "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "wrong type other",
+ "data": {"foo": 2, "bar": "quux"},
+ "valid": false
+ },
+ {
+ "description": "wrong type both",
+ "data": {"foo": "quux", "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "dependencies with escaped characters",
+ "schema": {
+ "dependencies": {
+ "foo\nbar": ["foo\rbar"],
+ "foo\tbar": {
+ "minProperties": 4
+ },
+ "foo'bar": {"required": ["foo\"bar"]},
+ "foo\"bar": ["foo'bar"]
+ }
+ },
+ "tests": [
+ {
+ "description": "valid object 1",
+ "data": {
+ "foo\nbar": 1,
+ "foo\rbar": 2
+ },
+ "valid": true
+ },
+ {
+ "description": "valid object 2",
+ "data": {
+ "foo\tbar": 1,
+ "a": 2,
+ "b": 3,
+ "c": 4
+ },
+ "valid": true
+ },
+ {
+ "description": "valid object 3",
+ "data": {
+ "foo'bar": 1,
+ "foo\"bar": 2
+ },
+ "valid": true
+ },
+ {
+ "description": "invalid object 1",
+ "data": {
+ "foo\nbar": 1,
+ "foo": 2
+ },
+ "valid": false
+ },
+ {
+ "description": "invalid object 2",
+ "data": {
+ "foo\tbar": 1,
+ "a": 2
+ },
+ "valid": false
+ },
+ {
+ "description": "invalid object 3",
+ "data": {
+ "foo'bar": 1
+ },
+ "valid": false
+ },
+ {
+ "description": "invalid object 4",
+ "data": {
+ "foo\"bar": 2
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/enum.json b/third_party/python/jsonschema/json/tests/draft4/enum.json
new file mode 100644
index 0000000000..32d79026e1
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/enum.json
@@ -0,0 +1,179 @@
+[
+ {
+ "description": "simple enum validation",
+ "schema": {"enum": [1, 2, 3]},
+ "tests": [
+ {
+ "description": "one of the enum is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "something else is invalid",
+ "data": 4,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "heterogeneous enum validation",
+ "schema": {"enum": [6, "foo", [], true, {"foo": 12}]},
+ "tests": [
+ {
+ "description": "one of the enum is valid",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "something else is invalid",
+ "data": null,
+ "valid": false
+ },
+ {
+ "description": "objects are deep compared",
+ "data": {"foo": false},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enums in properties",
+ "schema": {
+ "type":"object",
+ "properties": {
+ "foo": {"enum":["foo"]},
+ "bar": {"enum":["bar"]}
+ },
+ "required": ["bar"]
+ },
+ "tests": [
+ {
+ "description": "both properties are valid",
+ "data": {"foo":"foo", "bar":"bar"},
+ "valid": true
+ },
+ {
+ "description": "missing optional property is valid",
+ "data": {"bar":"bar"},
+ "valid": true
+ },
+ {
+ "description": "missing required property is invalid",
+ "data": {"foo":"foo"},
+ "valid": false
+ },
+ {
+ "description": "missing all properties is invalid",
+ "data": {},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with escaped characters",
+ "schema": {
+ "enum": ["foo\nbar", "foo\rbar"]
+ },
+ "tests": [
+ {
+ "description": "member 1 is valid",
+ "data": "foo\nbar",
+ "valid": true
+ },
+ {
+ "description": "member 2 is valid",
+ "data": "foo\rbar",
+ "valid": true
+ },
+ {
+ "description": "another string is invalid",
+ "data": "abc",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with false does not match 0",
+ "schema": {"enum": [false]},
+ "tests": [
+ {
+ "description": "false is valid",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "integer zero is invalid",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "float zero is invalid",
+ "data": 0.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with true does not match 1",
+ "schema": {"enum": [true]},
+ "tests": [
+ {
+ "description": "true is valid",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "integer one is invalid",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "float one is invalid",
+ "data": 1.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with 0 does not match false",
+ "schema": {"enum": [0]},
+ "tests": [
+ {
+ "description": "false is invalid",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "integer zero is valid",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "float zero is valid",
+ "data": 0.0,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "enum with 1 does not match true",
+ "schema": {"enum": [1]},
+ "tests": [
+ {
+ "description": "true is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "integer one is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "float one is valid",
+ "data": 1.0,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/format.json b/third_party/python/jsonschema/json/tests/draft4/format.json
new file mode 100644
index 0000000000..61e4b62a53
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/format.json
@@ -0,0 +1,218 @@
+[
+ {
+ "description": "validation of e-mail addresses",
+ "schema": {"format": "email"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IP addresses",
+ "schema": {"format": "ipv4"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IPv6 addresses",
+ "schema": {"format": "ipv6"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of hostnames",
+ "schema": {"format": "hostname"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of date-time strings",
+ "schema": {"format": "date-time"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of URIs",
+ "schema": {"format": "uri"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/items.json b/third_party/python/jsonschema/json/tests/draft4/items.json
new file mode 100644
index 0000000000..7bf9f02ba1
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/items.json
@@ -0,0 +1,195 @@
+[
+ {
+ "description": "a schema given for items",
+ "schema": {
+ "items": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "valid items",
+ "data": [ 1, 2, 3 ],
+ "valid": true
+ },
+ {
+ "description": "wrong type of items",
+ "data": [1, "x"],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": {"foo" : "bar"},
+ "valid": true
+ },
+ {
+ "description": "JavaScript pseudo-array is valid",
+ "data": {
+ "0": "invalid",
+ "length": 1
+ },
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "an array of schemas for items",
+ "schema": {
+ "items": [
+ {"type": "integer"},
+ {"type": "string"}
+ ]
+ },
+ "tests": [
+ {
+ "description": "correct types",
+ "data": [ 1, "foo" ],
+ "valid": true
+ },
+ {
+ "description": "wrong types",
+ "data": [ "foo", 1 ],
+ "valid": false
+ },
+ {
+ "description": "incomplete array of items",
+ "data": [ 1 ],
+ "valid": true
+ },
+ {
+ "description": "array with additional items",
+ "data": [ 1, "foo", true ],
+ "valid": true
+ },
+ {
+ "description": "empty array",
+ "data": [ ],
+ "valid": true
+ },
+ {
+ "description": "JavaScript pseudo-array is valid",
+ "data": {
+ "0": "invalid",
+ "1": "valid",
+ "length": 2
+ },
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "items and subitems",
+ "schema": {
+ "definitions": {
+ "item": {
+ "type": "array",
+ "additionalItems": false,
+ "items": [
+ { "$ref": "#/definitions/sub-item" },
+ { "$ref": "#/definitions/sub-item" }
+ ]
+ },
+ "sub-item": {
+ "type": "object",
+ "required": ["foo"]
+ }
+ },
+ "type": "array",
+ "additionalItems": false,
+ "items": [
+ { "$ref": "#/definitions/item" },
+ { "$ref": "#/definitions/item" },
+ { "$ref": "#/definitions/item" }
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid items",
+ "data": [
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": true
+ },
+ {
+ "description": "too many items",
+ "data": [
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "too many sub-items",
+ "data": [
+ [ {"foo": null}, {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "wrong item",
+ "data": [
+ {"foo": null},
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "wrong sub-item",
+ "data": [
+ [ {}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "fewer items is valid",
+ "data": [
+ [ {"foo": null} ],
+ [ {"foo": null} ]
+ ],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "nested items",
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "number"
+ }
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid nested array",
+ "data": [[[[1]], [[2],[3]]], [[[4], [5], [6]]]],
+ "valid": true
+ },
+ {
+ "description": "nested array with invalid type",
+ "data": [[[["1"]], [[2],[3]]], [[[4], [5], [6]]]],
+ "valid": false
+ },
+ {
+ "description": "not deep enough",
+ "data": [[[1], [2],[3]], [[4], [5], [6]]],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/maxItems.json b/third_party/python/jsonschema/json/tests/draft4/maxItems.json
new file mode 100644
index 0000000000..3b53a6b371
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/maxItems.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "maxItems validation",
+ "schema": {"maxItems": 2},
+ "tests": [
+ {
+ "description": "shorter is valid",
+ "data": [1],
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "too long is invalid",
+ "data": [1, 2, 3],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": "foobar",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/maxLength.json b/third_party/python/jsonschema/json/tests/draft4/maxLength.json
new file mode 100644
index 0000000000..811d35b253
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/maxLength.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "maxLength validation",
+ "schema": {"maxLength": 2},
+ "tests": [
+ {
+ "description": "shorter is valid",
+ "data": "f",
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": "fo",
+ "valid": true
+ },
+ {
+ "description": "too long is invalid",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 100,
+ "valid": true
+ },
+ {
+ "description": "two supplementary Unicode code points is long enough",
+ "data": "\uD83D\uDCA9\uD83D\uDCA9",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/maxProperties.json b/third_party/python/jsonschema/json/tests/draft4/maxProperties.json
new file mode 100644
index 0000000000..513731e4c8
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/maxProperties.json
@@ -0,0 +1,38 @@
+[
+ {
+ "description": "maxProperties validation",
+ "schema": {"maxProperties": 2},
+ "tests": [
+ {
+ "description": "shorter is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "too long is invalid",
+ "data": {"foo": 1, "bar": 2, "baz": 3},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [1, 2, 3],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/maximum.json b/third_party/python/jsonschema/json/tests/draft4/maximum.json
new file mode 100644
index 0000000000..02581f62a2
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/maximum.json
@@ -0,0 +1,73 @@
+[
+ {
+ "description": "maximum validation",
+ "schema": {"maximum": 3.0},
+ "tests": [
+ {
+ "description": "below the maximum is valid",
+ "data": 2.6,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": 3.0,
+ "valid": true
+ },
+ {
+ "description": "above the maximum is invalid",
+ "data": 3.5,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "maximum validation (explicit false exclusivity)",
+ "schema": {"maximum": 3.0, "exclusiveMaximum": false},
+ "tests": [
+ {
+ "description": "below the maximum is valid",
+ "data": 2.6,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": 3.0,
+ "valid": true
+ },
+ {
+ "description": "above the maximum is invalid",
+ "data": 3.5,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "exclusiveMaximum validation",
+ "schema": {
+ "maximum": 3.0,
+ "exclusiveMaximum": true
+ },
+ "tests": [
+ {
+ "description": "below the maximum is still valid",
+ "data": 2.2,
+ "valid": true
+ },
+ {
+ "description": "boundary point is invalid",
+ "data": 3.0,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/minItems.json b/third_party/python/jsonschema/json/tests/draft4/minItems.json
new file mode 100644
index 0000000000..ed5118815e
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/minItems.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "minItems validation",
+ "schema": {"minItems": 1},
+ "tests": [
+ {
+ "description": "longer is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": [1],
+ "valid": true
+ },
+ {
+ "description": "too short is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": "",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/minLength.json b/third_party/python/jsonschema/json/tests/draft4/minLength.json
new file mode 100644
index 0000000000..3f09158dee
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/minLength.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "minLength validation",
+ "schema": {"minLength": 2},
+ "tests": [
+ {
+ "description": "longer is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": "fo",
+ "valid": true
+ },
+ {
+ "description": "too short is invalid",
+ "data": "f",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "one supplementary Unicode code point is not long enough",
+ "data": "\uD83D\uDCA9",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/minProperties.json b/third_party/python/jsonschema/json/tests/draft4/minProperties.json
new file mode 100644
index 0000000000..49a0726e01
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/minProperties.json
@@ -0,0 +1,38 @@
+[
+ {
+ "description": "minProperties validation",
+ "schema": {"minProperties": 1},
+ "tests": [
+ {
+ "description": "longer is valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "too short is invalid",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/minimum.json b/third_party/python/jsonschema/json/tests/draft4/minimum.json
new file mode 100644
index 0000000000..6becf2a881
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/minimum.json
@@ -0,0 +1,104 @@
+[
+ {
+ "description": "minimum validation",
+ "schema": {"minimum": 1.1},
+ "tests": [
+ {
+ "description": "above the minimum is valid",
+ "data": 2.6,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": 1.1,
+ "valid": true
+ },
+ {
+ "description": "below the minimum is invalid",
+ "data": 0.6,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "minimum validation (explicit false exclusivity)",
+ "schema": {"minimum": 1.1, "exclusiveMinimum": false},
+ "tests": [
+ {
+ "description": "above the minimum is valid",
+ "data": 2.6,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": 1.1,
+ "valid": true
+ },
+ {
+ "description": "below the minimum is invalid",
+ "data": 0.6,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "exclusiveMinimum validation",
+ "schema": {
+ "minimum": 1.1,
+ "exclusiveMinimum": true
+ },
+ "tests": [
+ {
+ "description": "above the minimum is still valid",
+ "data": 1.2,
+ "valid": true
+ },
+ {
+ "description": "boundary point is invalid",
+ "data": 1.1,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "minimum validation with signed integer",
+ "schema": {"minimum": -2},
+ "tests": [
+ {
+ "description": "negative above the minimum is valid",
+ "data": -1,
+ "valid": true
+ },
+ {
+ "description": "positive above the minimum is valid",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": -2,
+ "valid": true
+ },
+ {
+ "description": "below the minimum is invalid",
+ "data": -3,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/multipleOf.json b/third_party/python/jsonschema/json/tests/draft4/multipleOf.json
new file mode 100644
index 0000000000..ca3b761805
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/multipleOf.json
@@ -0,0 +1,60 @@
+[
+ {
+ "description": "by int",
+ "schema": {"multipleOf": 2},
+ "tests": [
+ {
+ "description": "int by int",
+ "data": 10,
+ "valid": true
+ },
+ {
+ "description": "int by int fail",
+ "data": 7,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "by number",
+ "schema": {"multipleOf": 1.5},
+ "tests": [
+ {
+ "description": "zero is multiple of anything",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "4.5 is multiple of 1.5",
+ "data": 4.5,
+ "valid": true
+ },
+ {
+ "description": "35 is not multiple of 1.5",
+ "data": 35,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "by small number",
+ "schema": {"multipleOf": 0.0001},
+ "tests": [
+ {
+ "description": "0.0075 is multiple of 0.0001",
+ "data": 0.0075,
+ "valid": true
+ },
+ {
+ "description": "0.00751 is not multiple of 0.0001",
+ "data": 0.00751,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/not.json b/third_party/python/jsonschema/json/tests/draft4/not.json
new file mode 100644
index 0000000000..cbb7f46bf8
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/not.json
@@ -0,0 +1,96 @@
+[
+ {
+ "description": "not",
+ "schema": {
+ "not": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "allowed",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "disallowed",
+ "data": 1,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "not multiple types",
+ "schema": {
+ "not": {"type": ["integer", "boolean"]}
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "other mismatch",
+ "data": true,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "not more complex schema",
+ "schema": {
+ "not": {
+ "type": "object",
+ "properties": {
+ "foo": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "other match",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": {"foo": "bar"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "forbidden property",
+ "schema": {
+ "properties": {
+ "foo": {
+ "not": {}
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "property present",
+ "data": {"foo": 1, "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "property absent",
+ "data": {"bar": 1, "baz": 2},
+ "valid": true
+ }
+ ]
+ }
+
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/oneOf.json b/third_party/python/jsonschema/json/tests/draft4/oneOf.json
new file mode 100644
index 0000000000..9dfffe13b6
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/oneOf.json
@@ -0,0 +1,162 @@
+[
+ {
+ "description": "oneOf",
+ "schema": {
+ "oneOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "minimum": 2
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first oneOf valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "second oneOf valid",
+ "data": 2.5,
+ "valid": true
+ },
+ {
+ "description": "both oneOf valid",
+ "data": 3,
+ "valid": false
+ },
+ {
+ "description": "neither oneOf valid",
+ "data": 1.5,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with base schema",
+ "schema": {
+ "type": "string",
+ "oneOf" : [
+ {
+ "minLength": 2
+ },
+ {
+ "maxLength": 4
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "mismatch base schema",
+ "data": 3,
+ "valid": false
+ },
+ {
+ "description": "one oneOf valid",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "both oneOf valid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf complex types",
+ "schema": {
+ "oneOf": [
+ {
+ "properties": {
+ "bar": {"type": "integer"}
+ },
+ "required": ["bar"]
+ },
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first oneOf valid (complex)",
+ "data": {"bar": 2},
+ "valid": true
+ },
+ {
+ "description": "second oneOf valid (complex)",
+ "data": {"foo": "baz"},
+ "valid": true
+ },
+ {
+ "description": "both oneOf valid (complex)",
+ "data": {"foo": "baz", "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "neither oneOf valid (complex)",
+ "data": {"foo": 2, "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with empty schema",
+ "schema": {
+ "oneOf": [
+ { "type": "number" },
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "one valid - valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "both valid - invalid",
+ "data": 123,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with required",
+ "schema": {
+ "type": "object",
+ "oneOf": [
+ { "required": ["foo", "bar"] },
+ { "required": ["foo", "baz"] }
+ ]
+ },
+ "tests": [
+ {
+ "description": "both invalid - invalid",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "first valid - valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "second valid - valid",
+ "data": {"foo": 1, "baz": 3},
+ "valid": true
+ },
+ {
+ "description": "both valid - invalid",
+ "data": {"foo": 1, "bar": 2, "baz" : 3},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/optional/bignum.json b/third_party/python/jsonschema/json/tests/draft4/optional/bignum.json
new file mode 100644
index 0000000000..ccc7c17fe8
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/optional/bignum.json
@@ -0,0 +1,107 @@
+[
+ {
+ "description": "integer",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "a bignum is an integer",
+ "data": 12345678910111213141516171819202122232425262728293031,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "number",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "a bignum is a number",
+ "data": 98249283749234923498293171823948729348710298301928331,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "integer",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "a negative bignum is an integer",
+ "data": -12345678910111213141516171819202122232425262728293031,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "number",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "a negative bignum is a number",
+ "data": -98249283749234923498293171823948729348710298301928331,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "string",
+ "schema": {"type": "string"},
+ "tests": [
+ {
+ "description": "a bignum is not a string",
+ "data": 98249283749234923498293171823948729348710298301928331,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "integer comparison",
+ "schema": {"maximum": 18446744073709551615},
+ "tests": [
+ {
+ "description": "comparison works for high numbers",
+ "data": 18446744073709551600,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "float comparison with high precision",
+ "schema": {
+ "maximum": 972783798187987123879878123.18878137,
+ "exclusiveMaximum": true
+ },
+ "tests": [
+ {
+ "description": "comparison works for high numbers",
+ "data": 972783798187987123879878123.188781371,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "integer comparison",
+ "schema": {"minimum": -18446744073709551615},
+ "tests": [
+ {
+ "description": "comparison works for very negative numbers",
+ "data": -18446744073709551600,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "float comparison with high precision on negative numbers",
+ "schema": {
+ "minimum": -972783798187987123879878123.18878137,
+ "exclusiveMinimum": true
+ },
+ "tests": [
+ {
+ "description": "comparison works for very negative numbers",
+ "data": -972783798187987123879878123.188781371,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/optional/ecmascript-regex.json b/third_party/python/jsonschema/json/tests/draft4/optional/ecmascript-regex.json
new file mode 100644
index 0000000000..d82e0feb03
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/optional/ecmascript-regex.json
@@ -0,0 +1,213 @@
+[
+ {
+ "description": "ECMA 262 regex non-compliance",
+ "schema": { "format": "regex" },
+ "tests": [
+ {
+ "description": "ECMA 262 has no support for \\Z anchor from .NET",
+ "data": "^\\S(|(.|\\n)*\\S)\\Z",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex $ does not match trailing newline",
+ "schema": {
+ "type": "string",
+ "pattern": "^abc$"
+ },
+ "tests": [
+ {
+ "description": "matches in Python, but should not in jsonschema",
+ "data": "abc\n",
+ "valid": false
+ },
+ {
+ "description": "should match",
+ "data": "abc",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex converts \\a to ascii BEL",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\a$"
+ },
+ "tests": [
+ {
+ "description": "does not match",
+ "data": "\\a",
+ "valid": false
+ },
+ {
+ "description": "matches",
+ "data": "\u0007",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex escapes control codes with \\c and upper letter",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\cC$"
+ },
+ "tests": [
+ {
+ "description": "does not match",
+ "data": "\\cC",
+ "valid": false
+ },
+ {
+ "description": "matches",
+ "data": "\u0003",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex escapes control codes with \\c and lower letter",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\cc$"
+ },
+ "tests": [
+ {
+ "description": "does not match",
+ "data": "\\cc",
+ "valid": false
+ },
+ {
+ "description": "matches",
+ "data": "\u0003",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\d matches ascii digits only",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\d$"
+ },
+ "tests": [
+ {
+ "description": "ASCII zero matches",
+ "data": "0",
+ "valid": true
+ },
+ {
+ "description": "NKO DIGIT ZERO does not match (unlike e.g. Python)",
+ "data": "߀",
+ "valid": false
+ },
+ {
+ "description": "NKO DIGIT ZERO (as \\u escape) does not match",
+ "data": "\u07c0",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\D matches everything but ascii digits",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\D$"
+ },
+ "tests": [
+ {
+ "description": "ASCII zero does not match",
+ "data": "0",
+ "valid": false
+ },
+ {
+ "description": "NKO DIGIT ZERO matches (unlike e.g. Python)",
+ "data": "߀",
+ "valid": true
+ },
+ {
+ "description": "NKO DIGIT ZERO (as \\u escape) matches",
+ "data": "\u07c0",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\w matches ascii letters only",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\w$"
+ },
+ "tests": [
+ {
+ "description": "ASCII 'a' matches",
+ "data": "a",
+ "valid": true
+ },
+ {
+ "description": "latin-1 e-acute does not match (unlike e.g. Python)",
+ "data": "é",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\w matches everything but ascii letters",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\W$"
+ },
+ "tests": [
+ {
+ "description": "ASCII 'a' does not match",
+ "data": "a",
+ "valid": false
+ },
+ {
+ "description": "latin-1 e-acute matches (unlike e.g. Python)",
+ "data": "é",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\s matches ascii whitespace only",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\s$"
+ },
+ "tests": [
+ {
+ "description": "ASCII space matches",
+ "data": " ",
+ "valid": true
+ },
+ {
+ "description": "latin-1 non-breaking-space does not match (unlike e.g. Python)",
+ "data": "\u00a0",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\S matches everything but ascii whitespace",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\S$"
+ },
+ "tests": [
+ {
+ "description": "ASCII space does not match",
+ "data": " ",
+ "valid": false
+ },
+ {
+ "description": "latin-1 non-breaking-space matches (unlike e.g. Python)",
+ "data": "\u00a0",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/optional/format.json b/third_party/python/jsonschema/json/tests/draft4/optional/format.json
new file mode 100644
index 0000000000..4bf4ea8e1f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/optional/format.json
@@ -0,0 +1,253 @@
+[
+ {
+ "description": "validation of date-time strings",
+ "schema": {"format": "date-time"},
+ "tests": [
+ {
+ "description": "a valid date-time string",
+ "data": "1963-06-19T08:30:06.283185Z",
+ "valid": true
+ },
+ {
+ "description": "a valid date-time string without second fraction",
+ "data": "1963-06-19T08:30:06Z",
+ "valid": true
+ },
+ {
+ "description": "a valid date-time string with plus offset",
+ "data": "1937-01-01T12:00:27.87+00:20",
+ "valid": true
+ },
+ {
+ "description": "a valid date-time string with minus offset",
+ "data": "1990-12-31T15:59:50.123-08:00",
+ "valid": true
+ },
+ {
+ "description": "a invalid day in date-time string",
+ "data": "1990-02-31T15:59:60.123-08:00",
+ "valid": false
+ },
+ {
+ "description": "an invalid offset in date-time string",
+ "data": "1990-12-31T15:59:60-24:00",
+ "valid": false
+ },
+ {
+ "description": "an invalid date-time string",
+ "data": "06/19/1963 08:30:06 PST",
+ "valid": false
+ },
+ {
+ "description": "case-insensitive T and Z",
+ "data": "1963-06-19t08:30:06.283185z",
+ "valid": true
+ },
+ {
+ "description": "only RFC3339 not all of ISO 8601 are valid",
+ "data": "2013-350T01:01:01",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of URIs",
+ "schema": {"format": "uri"},
+ "tests": [
+ {
+ "description": "a valid URL with anchor tag",
+ "data": "http://foo.bar/?baz=qux#quux",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with anchor tag and parantheses",
+ "data": "http://foo.com/blah_(wikipedia)_blah#cite-1",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with URL-encoded stuff",
+ "data": "http://foo.bar/?q=Test%20URL-encoded%20stuff",
+ "valid": true
+ },
+ {
+ "description": "a valid puny-coded URL ",
+ "data": "http://xn--nw2a.xn--j6w193g/",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with many special characters",
+ "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com",
+ "valid": true
+ },
+ {
+ "description": "a valid URL based on IPv4",
+ "data": "http://223.255.255.254",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with ftp scheme",
+ "data": "ftp://ftp.is.co.za/rfc/rfc1808.txt",
+ "valid": true
+ },
+ {
+ "description": "a valid URL for a simple text file",
+ "data": "http://www.ietf.org/rfc/rfc2396.txt",
+ "valid": true
+ },
+ {
+ "description": "a valid URL ",
+ "data": "ldap://[2001:db8::7]/c=GB?objectClass?one",
+ "valid": true
+ },
+ {
+ "description": "a valid mailto URI",
+ "data": "mailto:John.Doe@example.com",
+ "valid": true
+ },
+ {
+ "description": "a valid newsgroup URI",
+ "data": "news:comp.infosystems.www.servers.unix",
+ "valid": true
+ },
+ {
+ "description": "a valid tel URI",
+ "data": "tel:+1-816-555-1212",
+ "valid": true
+ },
+ {
+ "description": "a valid URN",
+ "data": "urn:oasis:names:specification:docbook:dtd:xml:4.1.2",
+ "valid": true
+ },
+ {
+ "description": "an invalid protocol-relative URI Reference",
+ "data": "//foo.bar/?baz=qux#quux",
+ "valid": false
+ },
+ {
+ "description": "an invalid relative URI Reference",
+ "data": "/abc",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI",
+ "data": "\\\\WINDOWS\\fileshare",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI though valid URI reference",
+ "data": "abc",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI with spaces",
+ "data": "http:// shouldfail.com",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI with spaces and missing scheme",
+ "data": ":// should fail",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of e-mail addresses",
+ "schema": {"format": "email"},
+ "tests": [
+ {
+ "description": "a valid e-mail address",
+ "data": "joe.bloggs@example.com",
+ "valid": true
+ },
+ {
+ "description": "an invalid e-mail address",
+ "data": "2962",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of IP addresses",
+ "schema": {"format": "ipv4"},
+ "tests": [
+ {
+ "description": "a valid IP address",
+ "data": "192.168.0.1",
+ "valid": true
+ },
+ {
+ "description": "an IP address with too many components",
+ "data": "127.0.0.0.1",
+ "valid": false
+ },
+ {
+ "description": "an IP address with out-of-range values",
+ "data": "256.256.256.256",
+ "valid": false
+ },
+ {
+ "description": "an IP address without 4 components",
+ "data": "127.0",
+ "valid": false
+ },
+ {
+ "description": "an IP address as an integer",
+ "data": "0x7f000001",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of IPv6 addresses",
+ "schema": {"format": "ipv6"},
+ "tests": [
+ {
+ "description": "a valid IPv6 address",
+ "data": "::1",
+ "valid": true
+ },
+ {
+ "description": "an IPv6 address with out-of-range values",
+ "data": "12345::",
+ "valid": false
+ },
+ {
+ "description": "an IPv6 address with too many components",
+ "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1",
+ "valid": false
+ },
+ {
+ "description": "an IPv6 address containing illegal characters",
+ "data": "::laptop",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of host names",
+ "schema": {"format": "hostname"},
+ "tests": [
+ {
+ "description": "a valid host name",
+ "data": "www.example.com",
+ "valid": true
+ },
+ {
+ "description": "a host name starting with an illegal character",
+ "data": "-a-host-name-that-starts-with--",
+ "valid": false
+ },
+ {
+ "description": "a host name containing illegal characters",
+ "data": "not_a_valid_host_name",
+ "valid": false
+ },
+ {
+ "description": "a host name with a component too long",
+ "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/optional/zeroTerminatedFloats.json b/third_party/python/jsonschema/json/tests/draft4/optional/zeroTerminatedFloats.json
new file mode 100644
index 0000000000..9b50ea2776
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/optional/zeroTerminatedFloats.json
@@ -0,0 +1,15 @@
+[
+ {
+ "description": "some languages do not distinguish between different types of numeric value",
+ "schema": {
+ "type": "integer"
+ },
+ "tests": [
+ {
+ "description": "a float is not an integer even without fractional part",
+ "data": 1.0,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/pattern.json b/third_party/python/jsonschema/json/tests/draft4/pattern.json
new file mode 100644
index 0000000000..25e7299731
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/pattern.json
@@ -0,0 +1,34 @@
+[
+ {
+ "description": "pattern validation",
+ "schema": {"pattern": "^a*$"},
+ "tests": [
+ {
+ "description": "a matching pattern is valid",
+ "data": "aaa",
+ "valid": true
+ },
+ {
+ "description": "a non-matching pattern is invalid",
+ "data": "abc",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": true,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "pattern is not anchored",
+ "schema": {"pattern": "a+"},
+ "tests": [
+ {
+ "description": "matches a substring",
+ "data": "xxaayy",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/patternProperties.json b/third_party/python/jsonschema/json/tests/draft4/patternProperties.json
new file mode 100644
index 0000000000..5f741dfca6
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/patternProperties.json
@@ -0,0 +1,120 @@
+[
+ {
+ "description":
+ "patternProperties validates properties matching a regex",
+ "schema": {
+ "patternProperties": {
+ "f.*o": {"type": "integer"}
+ }
+ },
+ "tests": [
+ {
+ "description": "a single valid match is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "multiple valid matches is valid",
+ "data": {"foo": 1, "foooooo" : 2},
+ "valid": true
+ },
+ {
+ "description": "a single invalid match is invalid",
+ "data": {"foo": "bar", "fooooo": 2},
+ "valid": false
+ },
+ {
+ "description": "multiple invalid matches is invalid",
+ "data": {"foo": "bar", "foooooo" : "baz"},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple simultaneous patternProperties are validated",
+ "schema": {
+ "patternProperties": {
+ "a*": {"type": "integer"},
+ "aaa*": {"maximum": 20}
+ }
+ },
+ "tests": [
+ {
+ "description": "a single valid match is valid",
+ "data": {"a": 21},
+ "valid": true
+ },
+ {
+ "description": "a simultaneous match is valid",
+ "data": {"aaaa": 18},
+ "valid": true
+ },
+ {
+ "description": "multiple matches is valid",
+ "data": {"a": 21, "aaaa": 18},
+ "valid": true
+ },
+ {
+ "description": "an invalid due to one is invalid",
+ "data": {"a": "bar"},
+ "valid": false
+ },
+ {
+ "description": "an invalid due to the other is invalid",
+ "data": {"aaaa": 31},
+ "valid": false
+ },
+ {
+ "description": "an invalid due to both is invalid",
+ "data": {"aaa": "foo", "aaaa": 31},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "regexes are not anchored by default and are case sensitive",
+ "schema": {
+ "patternProperties": {
+ "[0-9]{2,}": { "type": "boolean" },
+ "X_": { "type": "string" }
+ }
+ },
+ "tests": [
+ {
+ "description": "non recognized members are ignored",
+ "data": { "answer 1": "42" },
+ "valid": true
+ },
+ {
+ "description": "recognized members are accounted for",
+ "data": { "a31b": null },
+ "valid": false
+ },
+ {
+ "description": "regexes are case sensitive",
+ "data": { "a_x_3": 3 },
+ "valid": true
+ },
+ {
+ "description": "regexes are case sensitive, 2",
+ "data": { "a_X_3": 3 },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/properties.json b/third_party/python/jsonschema/json/tests/draft4/properties.json
new file mode 100644
index 0000000000..688527bc66
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/properties.json
@@ -0,0 +1,136 @@
+[
+ {
+ "description": "object properties validation",
+ "schema": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"type": "string"}
+ }
+ },
+ "tests": [
+ {
+ "description": "both properties present and valid is valid",
+ "data": {"foo": 1, "bar": "baz"},
+ "valid": true
+ },
+ {
+ "description": "one property invalid is invalid",
+ "data": {"foo": 1, "bar": {}},
+ "valid": false
+ },
+ {
+ "description": "both properties invalid is invalid",
+ "data": {"foo": [], "bar": {}},
+ "valid": false
+ },
+ {
+ "description": "doesn't invalidate other properties",
+ "data": {"quux": []},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description":
+ "properties, patternProperties, additionalProperties interaction",
+ "schema": {
+ "properties": {
+ "foo": {"type": "array", "maxItems": 3},
+ "bar": {"type": "array"}
+ },
+ "patternProperties": {"f.o": {"minItems": 2}},
+ "additionalProperties": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "property validates property",
+ "data": {"foo": [1, 2]},
+ "valid": true
+ },
+ {
+ "description": "property invalidates property",
+ "data": {"foo": [1, 2, 3, 4]},
+ "valid": false
+ },
+ {
+ "description": "patternProperty invalidates property",
+ "data": {"foo": []},
+ "valid": false
+ },
+ {
+ "description": "patternProperty validates nonproperty",
+ "data": {"fxo": [1, 2]},
+ "valid": true
+ },
+ {
+ "description": "patternProperty invalidates nonproperty",
+ "data": {"fxo": []},
+ "valid": false
+ },
+ {
+ "description": "additionalProperty ignores property",
+ "data": {"bar": []},
+ "valid": true
+ },
+ {
+ "description": "additionalProperty validates others",
+ "data": {"quux": 3},
+ "valid": true
+ },
+ {
+ "description": "additionalProperty invalidates others",
+ "data": {"quux": "foo"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "properties with escaped characters",
+ "schema": {
+ "properties": {
+ "foo\nbar": {"type": "number"},
+ "foo\"bar": {"type": "number"},
+ "foo\\bar": {"type": "number"},
+ "foo\rbar": {"type": "number"},
+ "foo\tbar": {"type": "number"},
+ "foo\fbar": {"type": "number"}
+ }
+ },
+ "tests": [
+ {
+ "description": "object with all numbers is valid",
+ "data": {
+ "foo\nbar": 1,
+ "foo\"bar": 1,
+ "foo\\bar": 1,
+ "foo\rbar": 1,
+ "foo\tbar": 1,
+ "foo\fbar": 1
+ },
+ "valid": true
+ },
+ {
+ "description": "object with strings is invalid",
+ "data": {
+ "foo\nbar": "1",
+ "foo\"bar": "1",
+ "foo\\bar": "1",
+ "foo\rbar": "1",
+ "foo\tbar": "1",
+ "foo\fbar": "1"
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/ref.json b/third_party/python/jsonschema/json/tests/draft4/ref.json
new file mode 100644
index 0000000000..51e750fb6f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/ref.json
@@ -0,0 +1,411 @@
+[
+ {
+ "description": "root pointer ref",
+ "schema": {
+ "properties": {
+ "foo": {"$ref": "#"}
+ },
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": {"foo": false},
+ "valid": true
+ },
+ {
+ "description": "recursive match",
+ "data": {"foo": {"foo": false}},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": {"bar": false},
+ "valid": false
+ },
+ {
+ "description": "recursive mismatch",
+ "data": {"foo": {"bar": false}},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "relative pointer ref to object",
+ "schema": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"$ref": "#/properties/foo"}
+ }
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": {"bar": 3},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": {"bar": true},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "relative pointer ref to array",
+ "schema": {
+ "items": [
+ {"type": "integer"},
+ {"$ref": "#/items/0"}
+ ]
+ },
+ "tests": [
+ {
+ "description": "match array",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "mismatch array",
+ "data": [1, "foo"],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "escaped pointer ref",
+ "schema": {
+ "tilda~field": {"type": "integer"},
+ "slash/field": {"type": "integer"},
+ "percent%field": {"type": "integer"},
+ "properties": {
+ "tilda": {"$ref": "#/tilda~0field"},
+ "slash": {"$ref": "#/slash~1field"},
+ "percent": {"$ref": "#/percent%25field"}
+ }
+ },
+ "tests": [
+ {
+ "description": "slash invalid",
+ "data": {"slash": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "tilda invalid",
+ "data": {"tilda": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "percent invalid",
+ "data": {"percent": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "slash valid",
+ "data": {"slash": 123},
+ "valid": true
+ },
+ {
+ "description": "tilda valid",
+ "data": {"tilda": 123},
+ "valid": true
+ },
+ {
+ "description": "percent valid",
+ "data": {"percent": 123},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "nested refs",
+ "schema": {
+ "definitions": {
+ "a": {"type": "integer"},
+ "b": {"$ref": "#/definitions/a"},
+ "c": {"$ref": "#/definitions/b"}
+ },
+ "$ref": "#/definitions/c"
+ },
+ "tests": [
+ {
+ "description": "nested ref valid",
+ "data": 5,
+ "valid": true
+ },
+ {
+ "description": "nested ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ref overrides any sibling keywords",
+ "schema": {
+ "definitions": {
+ "reffed": {
+ "type": "array"
+ }
+ },
+ "properties": {
+ "foo": {
+ "$ref": "#/definitions/reffed",
+ "maxItems": 2
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "ref valid",
+ "data": { "foo": [] },
+ "valid": true
+ },
+ {
+ "description": "ref valid, maxItems ignored",
+ "data": { "foo": [ 1, 2, 3] },
+ "valid": true
+ },
+ {
+ "description": "ref invalid",
+ "data": { "foo": "string" },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "remote ref, containing refs itself",
+ "schema": {"$ref": "http://json-schema.org/draft-04/schema#"},
+ "tests": [
+ {
+ "description": "remote ref valid",
+ "data": {"minLength": 1},
+ "valid": true
+ },
+ {
+ "description": "remote ref invalid",
+ "data": {"minLength": -1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "property named $ref that is not a reference",
+ "schema": {
+ "properties": {
+ "$ref": {"type": "string"}
+ }
+ },
+ "tests": [
+ {
+ "description": "property named $ref valid",
+ "data": {"$ref": "a"},
+ "valid": true
+ },
+ {
+ "description": "property named $ref invalid",
+ "data": {"$ref": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Recursive references between schemas",
+ "schema": {
+ "id": "http://localhost:1234/tree",
+ "description": "tree of nodes",
+ "type": "object",
+ "properties": {
+ "meta": {"type": "string"},
+ "nodes": {
+ "type": "array",
+ "items": {"$ref": "node"}
+ }
+ },
+ "required": ["meta", "nodes"],
+ "definitions": {
+ "node": {
+ "id": "http://localhost:1234/node",
+ "description": "node",
+ "type": "object",
+ "properties": {
+ "value": {"type": "number"},
+ "subtree": {"$ref": "tree"}
+ },
+ "required": ["value"]
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid tree",
+ "data": {
+ "meta": "root",
+ "nodes": [
+ {
+ "value": 1,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": 1.1},
+ {"value": 1.2}
+ ]
+ }
+ },
+ {
+ "value": 2,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": 2.1},
+ {"value": 2.2}
+ ]
+ }
+ }
+ ]
+ },
+ "valid": true
+ },
+ {
+ "description": "invalid tree",
+ "data": {
+ "meta": "root",
+ "nodes": [
+ {
+ "value": 1,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": "string is invalid"},
+ {"value": 1.2}
+ ]
+ }
+ },
+ {
+ "value": 2,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": 2.1},
+ {"value": 2.2}
+ ]
+ }
+ }
+ ]
+ },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "refs with quote",
+ "schema": {
+ "properties": {
+ "foo\"bar": {"$ref": "#/definitions/foo%22bar"}
+ },
+ "definitions": {
+ "foo\"bar": {"type": "number"}
+ }
+ },
+ "tests": [
+ {
+ "description": "object with numbers is valid",
+ "data": {
+ "foo\"bar": 1
+ },
+ "valid": true
+ },
+ {
+ "description": "object with strings is invalid",
+ "data": {
+ "foo\"bar": "1"
+ },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Location-independent identifier",
+ "schema": {
+ "allOf": [{
+ "$ref": "#foo"
+ }],
+ "definitions": {
+ "A": {
+ "id": "#foo",
+ "type": "integer"
+ }
+ }
+ },
+ "tests": [
+ {
+ "data": 1,
+ "description": "match",
+ "valid": true
+ },
+ {
+ "data": "a",
+ "description": "mismatch",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Location-independent identifier with absolute URI",
+ "schema": {
+ "allOf": [{
+ "$ref": "http://localhost:1234/bar#foo"
+ }],
+ "definitions": {
+ "A": {
+ "id": "http://localhost:1234/bar#foo",
+ "type": "integer"
+ }
+ }
+ },
+ "tests": [
+ {
+ "data": 1,
+ "description": "match",
+ "valid": true
+ },
+ {
+ "data": "a",
+ "description": "mismatch",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Location-independent identifier with base URI change in subschema",
+ "schema": {
+ "id": "http://localhost:1234/root",
+ "allOf": [{
+ "$ref": "http://localhost:1234/nested.json#foo"
+ }],
+ "definitions": {
+ "A": {
+ "id": "nested.json",
+ "definitions": {
+ "B": {
+ "id": "#foo",
+ "type": "integer"
+ }
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "data": 1,
+ "description": "match",
+ "valid": true
+ },
+ {
+ "data": "a",
+ "description": "mismatch",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/refRemote.json b/third_party/python/jsonschema/json/tests/draft4/refRemote.json
new file mode 100644
index 0000000000..8611fadc01
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/refRemote.json
@@ -0,0 +1,171 @@
+[
+ {
+ "description": "remote ref",
+ "schema": {"$ref": "http://localhost:1234/integer.json"},
+ "tests": [
+ {
+ "description": "remote ref valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "remote ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "fragment within remote ref",
+ "schema": {"$ref": "http://localhost:1234/subSchemas.json#/integer"},
+ "tests": [
+ {
+ "description": "remote fragment valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "remote fragment invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ref within remote ref",
+ "schema": {
+ "$ref": "http://localhost:1234/subSchemas.json#/refToInteger"
+ },
+ "tests": [
+ {
+ "description": "ref within ref valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "ref within ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "base URI change",
+ "schema": {
+ "id": "http://localhost:1234/",
+ "items": {
+ "id": "folder/",
+ "items": {"$ref": "folderInteger.json"}
+ }
+ },
+ "tests": [
+ {
+ "description": "base URI change ref valid",
+ "data": [[1]],
+ "valid": true
+ },
+ {
+ "description": "base URI change ref invalid",
+ "data": [["a"]],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "base URI change - change folder",
+ "schema": {
+ "id": "http://localhost:1234/scope_change_defs1.json",
+ "type" : "object",
+ "properties": {
+ "list": {"$ref": "#/definitions/baz"}
+ },
+ "definitions": {
+ "baz": {
+ "id": "folder/",
+ "type": "array",
+ "items": {"$ref": "folderInteger.json"}
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": {"list": [1]},
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": {"list": ["a"]},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "base URI change - change folder in subschema",
+ "schema": {
+ "id": "http://localhost:1234/scope_change_defs2.json",
+ "type" : "object",
+ "properties": {
+ "list": {"$ref": "#/definitions/baz/definitions/bar"}
+ },
+ "definitions": {
+ "baz": {
+ "id": "folder/",
+ "definitions": {
+ "bar": {
+ "type": "array",
+ "items": {"$ref": "folderInteger.json"}
+ }
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": {"list": [1]},
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": {"list": ["a"]},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "root ref in remote ref",
+ "schema": {
+ "id": "http://localhost:1234/object",
+ "type": "object",
+ "properties": {
+ "name": {"$ref": "name.json#/definitions/orNull"}
+ }
+ },
+ "tests": [
+ {
+ "description": "string is valid",
+ "data": {
+ "name": "foo"
+ },
+ "valid": true
+ },
+ {
+ "description": "null is valid",
+ "data": {
+ "name": null
+ },
+ "valid": true
+ },
+ {
+ "description": "object is invalid",
+ "data": {
+ "name": {
+ "name": null
+ }
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/required.json b/third_party/python/jsonschema/json/tests/draft4/required.json
new file mode 100644
index 0000000000..9b05318f7c
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/required.json
@@ -0,0 +1,89 @@
+[
+ {
+ "description": "required validation",
+ "schema": {
+ "properties": {
+ "foo": {},
+ "bar": {}
+ },
+ "required": ["foo"]
+ },
+ "tests": [
+ {
+ "description": "present required property is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "non-present required property is invalid",
+ "data": {"bar": 1},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "required default validation",
+ "schema": {
+ "properties": {
+ "foo": {}
+ }
+ },
+ "tests": [
+ {
+ "description": "not required by default",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "required with escaped characters",
+ "schema": {
+ "required": [
+ "foo\nbar",
+ "foo\"bar",
+ "foo\\bar",
+ "foo\rbar",
+ "foo\tbar",
+ "foo\fbar"
+ ]
+ },
+ "tests": [
+ {
+ "description": "object with all properties present is valid",
+ "data": {
+ "foo\nbar": 1,
+ "foo\"bar": 1,
+ "foo\\bar": 1,
+ "foo\rbar": 1,
+ "foo\tbar": 1,
+ "foo\fbar": 1
+ },
+ "valid": true
+ },
+ {
+ "description": "object with some properties missing is invalid",
+ "data": {
+ "foo\nbar": "1",
+ "foo\"bar": "1"
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/type.json b/third_party/python/jsonschema/json/tests/draft4/type.json
new file mode 100644
index 0000000000..ea33b1821f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/type.json
@@ -0,0 +1,464 @@
+[
+ {
+ "description": "integer type matches integers",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "an integer is an integer",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a float is not an integer",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an integer",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "a string is still not an integer, even if it looks like one",
+ "data": "1",
+ "valid": false
+ },
+ {
+ "description": "an object is not an integer",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not an integer",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not an integer",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an integer",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "number type matches numbers",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "an integer is a number",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a float is a number",
+ "data": 1.1,
+ "valid": true
+ },
+ {
+ "description": "a string is not a number",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "a string is still not a number, even if it looks like one",
+ "data": "1",
+ "valid": false
+ },
+ {
+ "description": "an object is not a number",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a number",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not a number",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not a number",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "string type matches strings",
+ "schema": {"type": "string"},
+ "tests": [
+ {
+ "description": "1 is not a string",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not a string",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is a string",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "a string is still a string, even if it looks like a number",
+ "data": "1",
+ "valid": true
+ },
+ {
+ "description": "an empty string is still a string",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "an object is not a string",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a string",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not a string",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not a string",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "object type matches objects",
+ "schema": {"type": "object"},
+ "tests": [
+ {
+ "description": "an integer is not an object",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not an object",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an object",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an object is an object",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "an array is not an object",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not an object",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an object",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "array type matches arrays",
+ "schema": {"type": "array"},
+ "tests": [
+ {
+ "description": "an integer is not an array",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not an array",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an array",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an object is not an array",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is an array",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "a boolean is not an array",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an array",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "boolean type matches booleans",
+ "schema": {"type": "boolean"},
+ "tests": [
+ {
+ "description": "an integer is not a boolean",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "zero is not a boolean",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "a float is not a boolean",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not a boolean",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an empty string is not a boolean",
+ "data": "",
+ "valid": false
+ },
+ {
+ "description": "an object is not a boolean",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a boolean",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "true is a boolean",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "false is a boolean",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "null is not a boolean",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "null type matches only the null object",
+ "schema": {"type": "null"},
+ "tests": [
+ {
+ "description": "an integer is not null",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not null",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "zero is not null",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "a string is not null",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an empty string is not null",
+ "data": "",
+ "valid": false
+ },
+ {
+ "description": "an object is not null",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not null",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "true is not null",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "false is not null",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "null is null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple types can be specified in an array",
+ "schema": {"type": ["integer", "string"]},
+ "tests": [
+ {
+ "description": "an integer is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "a float is invalid",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "an object is invalid",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is invalid",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "type as array with one item",
+ "schema": {
+ "type": ["string"]
+ },
+ "tests": [
+ {
+ "description": "string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "number is invalid",
+ "data": 123,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "type: array or object",
+ "schema": {
+ "type": ["array", "object"]
+ },
+ "tests": [
+ {
+ "description": "array is valid",
+ "data": [1,2,3],
+ "valid": true
+ },
+ {
+ "description": "object is valid",
+ "data": {"foo": 123},
+ "valid": true
+ },
+ {
+ "description": "number is invalid",
+ "data": 123,
+ "valid": false
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "null is invalid",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "type: array, object or null",
+ "schema": {
+ "type": ["array", "object", "null"]
+ },
+ "tests": [
+ {
+ "description": "array is valid",
+ "data": [1,2,3],
+ "valid": true
+ },
+ {
+ "description": "object is valid",
+ "data": {"foo": 123},
+ "valid": true
+ },
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "number is invalid",
+ "data": 123,
+ "valid": false
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft4/uniqueItems.json b/third_party/python/jsonschema/json/tests/draft4/uniqueItems.json
new file mode 100644
index 0000000000..d312ad71ab
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft4/uniqueItems.json
@@ -0,0 +1,173 @@
+[
+ {
+ "description": "uniqueItems validation",
+ "schema": {"uniqueItems": true},
+ "tests": [
+ {
+ "description": "unique array of integers is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of integers is invalid",
+ "data": [1, 1],
+ "valid": false
+ },
+ {
+ "description": "numbers are unique if mathematically unequal",
+ "data": [1.0, 1.00, 1],
+ "valid": false
+ },
+ {
+ "description": "false is not equal to zero",
+ "data": [0, false],
+ "valid": true
+ },
+ {
+ "description": "true is not equal to one",
+ "data": [1, true],
+ "valid": true
+ },
+ {
+ "description": "unique array of objects is valid",
+ "data": [{"foo": "bar"}, {"foo": "baz"}],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of objects is invalid",
+ "data": [{"foo": "bar"}, {"foo": "bar"}],
+ "valid": false
+ },
+ {
+ "description": "unique array of nested objects is valid",
+ "data": [
+ {"foo": {"bar" : {"baz" : true}}},
+ {"foo": {"bar" : {"baz" : false}}}
+ ],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of nested objects is invalid",
+ "data": [
+ {"foo": {"bar" : {"baz" : true}}},
+ {"foo": {"bar" : {"baz" : true}}}
+ ],
+ "valid": false
+ },
+ {
+ "description": "unique array of arrays is valid",
+ "data": [["foo"], ["bar"]],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of arrays is invalid",
+ "data": [["foo"], ["foo"]],
+ "valid": false
+ },
+ {
+ "description": "1 and true are unique",
+ "data": [1, true],
+ "valid": true
+ },
+ {
+ "description": "0 and false are unique",
+ "data": [0, false],
+ "valid": true
+ },
+ {
+ "description": "unique heterogeneous types are valid",
+ "data": [{}, [1], true, null, 1],
+ "valid": true
+ },
+ {
+ "description": "non-unique heterogeneous types are invalid",
+ "data": [{}, [1], true, null, {}, 1],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "uniqueItems with an array of items",
+ "schema": {
+ "items": [{"type": "boolean"}, {"type": "boolean"}],
+ "uniqueItems": true
+ },
+ "tests": [
+ {
+ "description": "[false, true] from items array is valid",
+ "data": [false, true],
+ "valid": true
+ },
+ {
+ "description": "[true, false] from items array is valid",
+ "data": [true, false],
+ "valid": true
+ },
+ {
+ "description": "[false, false] from items array is not valid",
+ "data": [false, false],
+ "valid": false
+ },
+ {
+ "description": "[true, true] from items array is not valid",
+ "data": [true, true],
+ "valid": false
+ },
+ {
+ "description": "unique array extended from [false, true] is valid",
+ "data": [false, true, "foo", "bar"],
+ "valid": true
+ },
+ {
+ "description": "unique array extended from [true, false] is valid",
+ "data": [true, false, "foo", "bar"],
+ "valid": true
+ },
+ {
+ "description": "non-unique array extended from [false, true] is not valid",
+ "data": [false, true, "foo", "foo"],
+ "valid": false
+ },
+ {
+ "description": "non-unique array extended from [true, false] is not valid",
+ "data": [true, false, "foo", "foo"],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "uniqueItems with an array of items and additionalItems=false",
+ "schema": {
+ "items": [{"type": "boolean"}, {"type": "boolean"}],
+ "uniqueItems": true,
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "[false, true] from items array is valid",
+ "data": [false, true],
+ "valid": true
+ },
+ {
+ "description": "[true, false] from items array is valid",
+ "data": [true, false],
+ "valid": true
+ },
+ {
+ "description": "[false, false] from items array is not valid",
+ "data": [false, false],
+ "valid": false
+ },
+ {
+ "description": "[true, true] from items array is not valid",
+ "data": [true, true],
+ "valid": false
+ },
+ {
+ "description": "extra items are invalid even if unique",
+ "data": [false, true, null],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/additionalItems.json b/third_party/python/jsonschema/json/tests/draft6/additionalItems.json
new file mode 100644
index 0000000000..abecc578be
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/additionalItems.json
@@ -0,0 +1,87 @@
+[
+ {
+ "description": "additionalItems as schema",
+ "schema": {
+ "items": [{}],
+ "additionalItems": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "additional items match schema",
+ "data": [ null, 2, 3, 4 ],
+ "valid": true
+ },
+ {
+ "description": "additional items do not match schema",
+ "data": [ null, 2, 3, "foo" ],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "items is schema, no additionalItems",
+ "schema": {
+ "items": {},
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "all items match schema",
+ "data": [ 1, 2, 3, 4, 5 ],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "array of items with no additionalItems",
+ "schema": {
+ "items": [{}, {}, {}],
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "fewer number of items present",
+ "data": [ 1, 2 ],
+ "valid": true
+ },
+ {
+ "description": "equal number of items present",
+ "data": [ 1, 2, 3 ],
+ "valid": true
+ },
+ {
+ "description": "additional items are not permitted",
+ "data": [ 1, 2, 3, 4 ],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "additionalItems as false without items",
+ "schema": {"additionalItems": false},
+ "tests": [
+ {
+ "description":
+ "items defaults to empty schema so everything is valid",
+ "data": [ 1, 2, 3, 4, 5 ],
+ "valid": true
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": {"foo" : "bar"},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "additionalItems are allowed by default",
+ "schema": {"items": [{"type": "integer"}]},
+ "tests": [
+ {
+ "description": "only the first item is validated",
+ "data": [1, "foo", false],
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/additionalProperties.json b/third_party/python/jsonschema/json/tests/draft6/additionalProperties.json
new file mode 100644
index 0000000000..ffeac6b381
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/additionalProperties.json
@@ -0,0 +1,133 @@
+[
+ {
+ "description":
+ "additionalProperties being false does not allow other properties",
+ "schema": {
+ "properties": {"foo": {}, "bar": {}},
+ "patternProperties": { "^v": {} },
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "no additional properties is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "an additional property is invalid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : "boom"},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [1, 2, 3],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobarbaz",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "patternProperties are not additional properties",
+ "data": {"foo":1, "vroom": 2},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "non-ASCII pattern with additionalProperties",
+ "schema": {
+ "patternProperties": {"^á": {}},
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "matching the pattern is valid",
+ "data": {"ármányos": 2},
+ "valid": true
+ },
+ {
+ "description": "not matching the pattern is invalid",
+ "data": {"élmény": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description":
+ "additionalProperties allows a schema which should validate",
+ "schema": {
+ "properties": {"foo": {}, "bar": {}},
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "no additional properties is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "an additional valid property is valid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : true},
+ "valid": true
+ },
+ {
+ "description": "an additional invalid property is invalid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : 12},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description":
+ "additionalProperties can exist by itself",
+ "schema": {
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "an additional valid property is valid",
+ "data": {"foo" : true},
+ "valid": true
+ },
+ {
+ "description": "an additional invalid property is invalid",
+ "data": {"foo" : 1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "additionalProperties are allowed by default",
+ "schema": {"properties": {"foo": {}, "bar": {}}},
+ "tests": [
+ {
+ "description": "additional properties are allowed",
+ "data": {"foo": 1, "bar": 2, "quux": true},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "additionalProperties should not look in applicators",
+ "schema": {
+ "allOf": [
+ {"properties": {"foo": {}}}
+ ],
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "properties defined in allOf are not allowed",
+ "data": {"foo": 1, "bar": true},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/allOf.json b/third_party/python/jsonschema/json/tests/draft6/allOf.json
new file mode 100644
index 0000000000..eb612091aa
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/allOf.json
@@ -0,0 +1,218 @@
+[
+ {
+ "description": "allOf",
+ "schema": {
+ "allOf": [
+ {
+ "properties": {
+ "bar": {"type": "integer"}
+ },
+ "required": ["bar"]
+ },
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "allOf",
+ "data": {"foo": "baz", "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "mismatch second",
+ "data": {"foo": "baz"},
+ "valid": false
+ },
+ {
+ "description": "mismatch first",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "wrong type",
+ "data": {"foo": "baz", "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with base schema",
+ "schema": {
+ "properties": {"bar": {"type": "integer"}},
+ "required": ["bar"],
+ "allOf" : [
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ },
+ {
+ "properties": {
+ "baz": {"type": "null"}
+ },
+ "required": ["baz"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": {"foo": "quux", "bar": 2, "baz": null},
+ "valid": true
+ },
+ {
+ "description": "mismatch base schema",
+ "data": {"foo": "quux", "baz": null},
+ "valid": false
+ },
+ {
+ "description": "mismatch first allOf",
+ "data": {"bar": 2, "baz": null},
+ "valid": false
+ },
+ {
+ "description": "mismatch second allOf",
+ "data": {"foo": "quux", "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "mismatch both",
+ "data": {"bar": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf simple types",
+ "schema": {
+ "allOf": [
+ {"maximum": 30},
+ {"minimum": 20}
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": 25,
+ "valid": true
+ },
+ {
+ "description": "mismatch one",
+ "data": 35,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with boolean schemas, all true",
+ "schema": {"allOf": [true, true]},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "allOf with boolean schemas, some false",
+ "schema": {"allOf": [true, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with boolean schemas, all false",
+ "schema": {"allOf": [false, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with one empty schema",
+ "schema": {
+ "allOf": [
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "any data is valid",
+ "data": 1,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "allOf with two empty schemas",
+ "schema": {
+ "allOf": [
+ {},
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "any data is valid",
+ "data": 1,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "allOf with the first empty schema",
+ "schema": {
+ "allOf": [
+ {},
+ { "type": "number" }
+ ]
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with the last empty schema",
+ "schema": {
+ "allOf": [
+ { "type": "number" },
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/anyOf.json b/third_party/python/jsonschema/json/tests/draft6/anyOf.json
new file mode 100644
index 0000000000..ab5eb386b4
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/anyOf.json
@@ -0,0 +1,189 @@
+[
+ {
+ "description": "anyOf",
+ "schema": {
+ "anyOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "minimum": 2
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first anyOf valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "second anyOf valid",
+ "data": 2.5,
+ "valid": true
+ },
+ {
+ "description": "both anyOf valid",
+ "data": 3,
+ "valid": true
+ },
+ {
+ "description": "neither anyOf valid",
+ "data": 1.5,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf with base schema",
+ "schema": {
+ "type": "string",
+ "anyOf" : [
+ {
+ "maxLength": 2
+ },
+ {
+ "minLength": 4
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "mismatch base schema",
+ "data": 3,
+ "valid": false
+ },
+ {
+ "description": "one anyOf valid",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "both anyOf invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf with boolean schemas, all true",
+ "schema": {"anyOf": [true, true]},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "anyOf with boolean schemas, some true",
+ "schema": {"anyOf": [true, false]},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "anyOf with boolean schemas, all false",
+ "schema": {"anyOf": [false, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf complex types",
+ "schema": {
+ "anyOf": [
+ {
+ "properties": {
+ "bar": {"type": "integer"}
+ },
+ "required": ["bar"]
+ },
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first anyOf valid (complex)",
+ "data": {"bar": 2},
+ "valid": true
+ },
+ {
+ "description": "second anyOf valid (complex)",
+ "data": {"foo": "baz"},
+ "valid": true
+ },
+ {
+ "description": "both anyOf valid (complex)",
+ "data": {"foo": "baz", "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "neither anyOf valid (complex)",
+ "data": {"foo": 2, "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf with one empty schema",
+ "schema": {
+ "anyOf": [
+ { "type": "number" },
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "number is valid",
+ "data": 123,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "nested anyOf, to check validation semantics",
+ "schema": {
+ "anyOf": [
+ {
+ "anyOf": [
+ {
+ "type": "null"
+ }
+ ]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "anything non-null is invalid",
+ "data": 123,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/boolean_schema.json b/third_party/python/jsonschema/json/tests/draft6/boolean_schema.json
new file mode 100644
index 0000000000..6d40f23f26
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/boolean_schema.json
@@ -0,0 +1,104 @@
+[
+ {
+ "description": "boolean schema 'true'",
+ "schema": true,
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "boolean true is valid",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "boolean false is valid",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "object is valid",
+ "data": {"foo": "bar"},
+ "valid": true
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "array is valid",
+ "data": ["foo"],
+ "valid": true
+ },
+ {
+ "description": "empty array is valid",
+ "data": [],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "boolean schema 'false'",
+ "schema": false,
+ "tests": [
+ {
+ "description": "number is invalid",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "boolean true is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "boolean false is invalid",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "null is invalid",
+ "data": null,
+ "valid": false
+ },
+ {
+ "description": "object is invalid",
+ "data": {"foo": "bar"},
+ "valid": false
+ },
+ {
+ "description": "empty object is invalid",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "array is invalid",
+ "data": ["foo"],
+ "valid": false
+ },
+ {
+ "description": "empty array is invalid",
+ "data": [],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/const.json b/third_party/python/jsonschema/json/tests/draft6/const.json
new file mode 100644
index 0000000000..c089625dc4
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/const.json
@@ -0,0 +1,170 @@
+[
+ {
+ "description": "const validation",
+ "schema": {"const": 2},
+ "tests": [
+ {
+ "description": "same value is valid",
+ "data": 2,
+ "valid": true
+ },
+ {
+ "description": "another value is invalid",
+ "data": 5,
+ "valid": false
+ },
+ {
+ "description": "another type is invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with object",
+ "schema": {"const": {"foo": "bar", "baz": "bax"}},
+ "tests": [
+ {
+ "description": "same object is valid",
+ "data": {"foo": "bar", "baz": "bax"},
+ "valid": true
+ },
+ {
+ "description": "same object with different property order is valid",
+ "data": {"baz": "bax", "foo": "bar"},
+ "valid": true
+ },
+ {
+ "description": "another object is invalid",
+ "data": {"foo": "bar"},
+ "valid": false
+ },
+ {
+ "description": "another type is invalid",
+ "data": [1, 2],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with array",
+ "schema": {"const": [{ "foo": "bar" }]},
+ "tests": [
+ {
+ "description": "same array is valid",
+ "data": [{"foo": "bar"}],
+ "valid": true
+ },
+ {
+ "description": "another array item is invalid",
+ "data": [2],
+ "valid": false
+ },
+ {
+ "description": "array with additional items is invalid",
+ "data": [1, 2, 3],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with null",
+ "schema": {"const": null},
+ "tests": [
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "not null is invalid",
+ "data": 0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with false does not match 0",
+ "schema": {"const": false},
+ "tests": [
+ {
+ "description": "false is valid",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "integer zero is invalid",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "float zero is invalid",
+ "data": 0.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with true does not match 1",
+ "schema": {"const": true},
+ "tests": [
+ {
+ "description": "true is valid",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "integer one is invalid",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "float one is invalid",
+ "data": 1.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with 0 does not match false",
+ "schema": {"const": 0},
+ "tests": [
+ {
+ "description": "false is invalid",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "integer zero is valid",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "float zero is valid",
+ "data": 0.0,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "const with 1 does not match true",
+ "schema": {"const": 1},
+ "tests": [
+ {
+ "description": "true is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "integer one is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "float one is valid",
+ "data": 1.0,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/contains.json b/third_party/python/jsonschema/json/tests/draft6/contains.json
new file mode 100644
index 0000000000..67ecbd991a
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/contains.json
@@ -0,0 +1,100 @@
+[
+ {
+ "description": "contains keyword validation",
+ "schema": {
+ "contains": {"minimum": 5}
+ },
+ "tests": [
+ {
+ "description": "array with item matching schema (5) is valid",
+ "data": [3, 4, 5],
+ "valid": true
+ },
+ {
+ "description": "array with item matching schema (6) is valid",
+ "data": [3, 4, 6],
+ "valid": true
+ },
+ {
+ "description": "array with two items matching schema (5, 6) is valid",
+ "data": [3, 4, 5, 6],
+ "valid": true
+ },
+ {
+ "description": "array without items matching schema is invalid",
+ "data": [2, 3, 4],
+ "valid": false
+ },
+ {
+ "description": "empty array is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "not array is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "contains keyword with const keyword",
+ "schema": {
+ "contains": { "const": 5 }
+ },
+ "tests": [
+ {
+ "description": "array with item 5 is valid",
+ "data": [3, 4, 5],
+ "valid": true
+ },
+ {
+ "description": "array with two items 5 is valid",
+ "data": [3, 4, 5, 5],
+ "valid": true
+ },
+ {
+ "description": "array without item 5 is invalid",
+ "data": [1, 2, 3, 4],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "contains keyword with boolean schema true",
+ "schema": {"contains": true},
+ "tests": [
+ {
+ "description": "any non-empty array is valid",
+ "data": ["foo"],
+ "valid": true
+ },
+ {
+ "description": "empty array is invalid",
+ "data": [],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "contains keyword with boolean schema false",
+ "schema": {"contains": false},
+ "tests": [
+ {
+ "description": "any non-empty array is invalid",
+ "data": ["foo"],
+ "valid": false
+ },
+ {
+ "description": "empty array is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "non-arrays are valid",
+ "data": "contains does not apply to strings",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/default.json b/third_party/python/jsonschema/json/tests/draft6/default.json
new file mode 100644
index 0000000000..17629779fb
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/default.json
@@ -0,0 +1,49 @@
+[
+ {
+ "description": "invalid type for default",
+ "schema": {
+ "properties": {
+ "foo": {
+ "type": "integer",
+ "default": []
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when property is specified",
+ "data": {"foo": 13},
+ "valid": true
+ },
+ {
+ "description": "still valid when the invalid default is used",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "invalid string value for default",
+ "schema": {
+ "properties": {
+ "bar": {
+ "type": "string",
+ "minLength": 4,
+ "default": "bad"
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when property is specified",
+ "data": {"bar": "good"},
+ "valid": true
+ },
+ {
+ "description": "still valid when the invalid default is used",
+ "data": {},
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/definitions.json b/third_party/python/jsonschema/json/tests/draft6/definitions.json
new file mode 100644
index 0000000000..7f3b8997d5
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/definitions.json
@@ -0,0 +1,32 @@
+[
+ {
+ "description": "valid definition",
+ "schema": {"$ref": "http://json-schema.org/draft-06/schema#"},
+ "tests": [
+ {
+ "description": "valid definition schema",
+ "data": {
+ "definitions": {
+ "foo": {"type": "integer"}
+ }
+ },
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "invalid definition",
+ "schema": {"$ref": "http://json-schema.org/draft-06/schema#"},
+ "tests": [
+ {
+ "description": "invalid definition schema",
+ "data": {
+ "definitions": {
+ "foo": {"type": 1}
+ }
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/dependencies.json b/third_party/python/jsonschema/json/tests/draft6/dependencies.json
new file mode 100644
index 0000000000..8dd78aa5d8
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/dependencies.json
@@ -0,0 +1,268 @@
+[
+ {
+ "description": "dependencies",
+ "schema": {
+ "dependencies": {"bar": ["foo"]}
+ },
+ "tests": [
+ {
+ "description": "neither",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "nondependant",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "with dependency",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "missing dependency",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": ["bar"],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "dependencies with empty array",
+ "schema": {
+ "dependencies": {"bar": []}
+ },
+ "tests": [
+ {
+ "description": "empty object",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "object with one property",
+ "data": {"bar": 2},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple dependencies",
+ "schema": {
+ "dependencies": {"quux": ["foo", "bar"]}
+ },
+ "tests": [
+ {
+ "description": "neither",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "nondependants",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "with dependencies",
+ "data": {"foo": 1, "bar": 2, "quux": 3},
+ "valid": true
+ },
+ {
+ "description": "missing dependency",
+ "data": {"foo": 1, "quux": 2},
+ "valid": false
+ },
+ {
+ "description": "missing other dependency",
+ "data": {"bar": 1, "quux": 2},
+ "valid": false
+ },
+ {
+ "description": "missing both dependencies",
+ "data": {"quux": 1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "multiple dependencies subschema",
+ "schema": {
+ "dependencies": {
+ "bar": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"type": "integer"}
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "no dependency",
+ "data": {"foo": "quux"},
+ "valid": true
+ },
+ {
+ "description": "wrong type",
+ "data": {"foo": "quux", "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "wrong type other",
+ "data": {"foo": 2, "bar": "quux"},
+ "valid": false
+ },
+ {
+ "description": "wrong type both",
+ "data": {"foo": "quux", "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "dependencies with boolean subschemas",
+ "schema": {
+ "dependencies": {
+ "foo": true,
+ "bar": false
+ }
+ },
+ "tests": [
+ {
+ "description": "object with property having schema true is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "object with property having schema false is invalid",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "object with both properties is invalid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "empty array of dependencies",
+ "schema": {
+ "dependencies": {
+ "foo": []
+ }
+ },
+ "tests": [
+ {
+ "description": "object with property is valid",
+ "data": { "foo": 1 },
+ "valid": true
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "non-object is valid",
+ "data": 1,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "dependencies with escaped characters",
+ "schema": {
+ "dependencies": {
+ "foo\nbar": ["foo\rbar"],
+ "foo\tbar": {
+ "minProperties": 4
+ },
+ "foo'bar": {"required": ["foo\"bar"]},
+ "foo\"bar": ["foo'bar"]
+ }
+ },
+ "tests": [
+ {
+ "description": "valid object 1",
+ "data": {
+ "foo\nbar": 1,
+ "foo\rbar": 2
+ },
+ "valid": true
+ },
+ {
+ "description": "valid object 2",
+ "data": {
+ "foo\tbar": 1,
+ "a": 2,
+ "b": 3,
+ "c": 4
+ },
+ "valid": true
+ },
+ {
+ "description": "valid object 3",
+ "data": {
+ "foo'bar": 1,
+ "foo\"bar": 2
+ },
+ "valid": true
+ },
+ {
+ "description": "invalid object 1",
+ "data": {
+ "foo\nbar": 1,
+ "foo": 2
+ },
+ "valid": false
+ },
+ {
+ "description": "invalid object 2",
+ "data": {
+ "foo\tbar": 1,
+ "a": 2
+ },
+ "valid": false
+ },
+ {
+ "description": "invalid object 3",
+ "data": {
+ "foo'bar": 1
+ },
+ "valid": false
+ },
+ {
+ "description": "invalid object 4",
+ "data": {
+ "foo\"bar": 2
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/enum.json b/third_party/python/jsonschema/json/tests/draft6/enum.json
new file mode 100644
index 0000000000..32d79026e1
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/enum.json
@@ -0,0 +1,179 @@
+[
+ {
+ "description": "simple enum validation",
+ "schema": {"enum": [1, 2, 3]},
+ "tests": [
+ {
+ "description": "one of the enum is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "something else is invalid",
+ "data": 4,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "heterogeneous enum validation",
+ "schema": {"enum": [6, "foo", [], true, {"foo": 12}]},
+ "tests": [
+ {
+ "description": "one of the enum is valid",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "something else is invalid",
+ "data": null,
+ "valid": false
+ },
+ {
+ "description": "objects are deep compared",
+ "data": {"foo": false},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enums in properties",
+ "schema": {
+ "type":"object",
+ "properties": {
+ "foo": {"enum":["foo"]},
+ "bar": {"enum":["bar"]}
+ },
+ "required": ["bar"]
+ },
+ "tests": [
+ {
+ "description": "both properties are valid",
+ "data": {"foo":"foo", "bar":"bar"},
+ "valid": true
+ },
+ {
+ "description": "missing optional property is valid",
+ "data": {"bar":"bar"},
+ "valid": true
+ },
+ {
+ "description": "missing required property is invalid",
+ "data": {"foo":"foo"},
+ "valid": false
+ },
+ {
+ "description": "missing all properties is invalid",
+ "data": {},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with escaped characters",
+ "schema": {
+ "enum": ["foo\nbar", "foo\rbar"]
+ },
+ "tests": [
+ {
+ "description": "member 1 is valid",
+ "data": "foo\nbar",
+ "valid": true
+ },
+ {
+ "description": "member 2 is valid",
+ "data": "foo\rbar",
+ "valid": true
+ },
+ {
+ "description": "another string is invalid",
+ "data": "abc",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with false does not match 0",
+ "schema": {"enum": [false]},
+ "tests": [
+ {
+ "description": "false is valid",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "integer zero is invalid",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "float zero is invalid",
+ "data": 0.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with true does not match 1",
+ "schema": {"enum": [true]},
+ "tests": [
+ {
+ "description": "true is valid",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "integer one is invalid",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "float one is invalid",
+ "data": 1.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with 0 does not match false",
+ "schema": {"enum": [0]},
+ "tests": [
+ {
+ "description": "false is invalid",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "integer zero is valid",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "float zero is valid",
+ "data": 0.0,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "enum with 1 does not match true",
+ "schema": {"enum": [1]},
+ "tests": [
+ {
+ "description": "true is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "integer one is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "float one is valid",
+ "data": 1.0,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/exclusiveMaximum.json b/third_party/python/jsonschema/json/tests/draft6/exclusiveMaximum.json
new file mode 100644
index 0000000000..dc3cd709d3
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/exclusiveMaximum.json
@@ -0,0 +1,30 @@
+[
+ {
+ "description": "exclusiveMaximum validation",
+ "schema": {
+ "exclusiveMaximum": 3.0
+ },
+ "tests": [
+ {
+ "description": "below the exclusiveMaximum is valid",
+ "data": 2.2,
+ "valid": true
+ },
+ {
+ "description": "boundary point is invalid",
+ "data": 3.0,
+ "valid": false
+ },
+ {
+ "description": "above the exclusiveMaximum is invalid",
+ "data": 3.5,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/exclusiveMinimum.json b/third_party/python/jsonschema/json/tests/draft6/exclusiveMinimum.json
new file mode 100644
index 0000000000..b38d7ecec6
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/exclusiveMinimum.json
@@ -0,0 +1,30 @@
+[
+ {
+ "description": "exclusiveMinimum validation",
+ "schema": {
+ "exclusiveMinimum": 1.1
+ },
+ "tests": [
+ {
+ "description": "above the exclusiveMinimum is valid",
+ "data": 1.2,
+ "valid": true
+ },
+ {
+ "description": "boundary point is invalid",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "below the exclusiveMinimum is invalid",
+ "data": 0.6,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/format.json b/third_party/python/jsonschema/json/tests/draft6/format.json
new file mode 100644
index 0000000000..32e81524aa
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/format.json
@@ -0,0 +1,326 @@
+[
+ {
+ "description": "validation of e-mail addresses",
+ "schema": {"format": "email"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IP addresses",
+ "schema": {"format": "ipv4"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IPv6 addresses",
+ "schema": {"format": "ipv6"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of hostnames",
+ "schema": {"format": "hostname"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of date-time strings",
+ "schema": {"format": "date-time"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of JSON pointers",
+ "schema": {"format": "json-pointer"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of URIs",
+ "schema": {"format": "uri"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of URI references",
+ "schema": {"format": "uri-reference"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of URI templates",
+ "schema": {"format": "uri-template"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/items.json b/third_party/python/jsonschema/json/tests/draft6/items.json
new file mode 100644
index 0000000000..67f11840a2
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/items.json
@@ -0,0 +1,250 @@
+[
+ {
+ "description": "a schema given for items",
+ "schema": {
+ "items": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "valid items",
+ "data": [ 1, 2, 3 ],
+ "valid": true
+ },
+ {
+ "description": "wrong type of items",
+ "data": [1, "x"],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": {"foo" : "bar"},
+ "valid": true
+ },
+ {
+ "description": "JavaScript pseudo-array is valid",
+ "data": {
+ "0": "invalid",
+ "length": 1
+ },
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "an array of schemas for items",
+ "schema": {
+ "items": [
+ {"type": "integer"},
+ {"type": "string"}
+ ]
+ },
+ "tests": [
+ {
+ "description": "correct types",
+ "data": [ 1, "foo" ],
+ "valid": true
+ },
+ {
+ "description": "wrong types",
+ "data": [ "foo", 1 ],
+ "valid": false
+ },
+ {
+ "description": "incomplete array of items",
+ "data": [ 1 ],
+ "valid": true
+ },
+ {
+ "description": "array with additional items",
+ "data": [ 1, "foo", true ],
+ "valid": true
+ },
+ {
+ "description": "empty array",
+ "data": [ ],
+ "valid": true
+ },
+ {
+ "description": "JavaScript pseudo-array is valid",
+ "data": {
+ "0": "invalid",
+ "1": "valid",
+ "length": 2
+ },
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "items with boolean schema (true)",
+ "schema": {"items": true},
+ "tests": [
+ {
+ "description": "any array is valid",
+ "data": [ 1, "foo", true ],
+ "valid": true
+ },
+ {
+ "description": "empty array is valid",
+ "data": [],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "items with boolean schema (false)",
+ "schema": {"items": false},
+ "tests": [
+ {
+ "description": "any non-empty array is invalid",
+ "data": [ 1, "foo", true ],
+ "valid": false
+ },
+ {
+ "description": "empty array is valid",
+ "data": [],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "items with boolean schemas",
+ "schema": {
+ "items": [true, false]
+ },
+ "tests": [
+ {
+ "description": "array with one item is valid",
+ "data": [ 1 ],
+ "valid": true
+ },
+ {
+ "description": "array with two items is invalid",
+ "data": [ 1, "foo" ],
+ "valid": false
+ },
+ {
+ "description": "empty array is valid",
+ "data": [],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "items and subitems",
+ "schema": {
+ "definitions": {
+ "item": {
+ "type": "array",
+ "additionalItems": false,
+ "items": [
+ { "$ref": "#/definitions/sub-item" },
+ { "$ref": "#/definitions/sub-item" }
+ ]
+ },
+ "sub-item": {
+ "type": "object",
+ "required": ["foo"]
+ }
+ },
+ "type": "array",
+ "additionalItems": false,
+ "items": [
+ { "$ref": "#/definitions/item" },
+ { "$ref": "#/definitions/item" },
+ { "$ref": "#/definitions/item" }
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid items",
+ "data": [
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": true
+ },
+ {
+ "description": "too many items",
+ "data": [
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "too many sub-items",
+ "data": [
+ [ {"foo": null}, {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "wrong item",
+ "data": [
+ {"foo": null},
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "wrong sub-item",
+ "data": [
+ [ {}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "fewer items is valid",
+ "data": [
+ [ {"foo": null} ],
+ [ {"foo": null} ]
+ ],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "nested items",
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "number"
+ }
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid nested array",
+ "data": [[[[1]], [[2],[3]]], [[[4], [5], [6]]]],
+ "valid": true
+ },
+ {
+ "description": "nested array with invalid type",
+ "data": [[[["1"]], [[2],[3]]], [[[4], [5], [6]]]],
+ "valid": false
+ },
+ {
+ "description": "not deep enough",
+ "data": [[[1], [2],[3]], [[4], [5], [6]]],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/maxItems.json b/third_party/python/jsonschema/json/tests/draft6/maxItems.json
new file mode 100644
index 0000000000..3b53a6b371
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/maxItems.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "maxItems validation",
+ "schema": {"maxItems": 2},
+ "tests": [
+ {
+ "description": "shorter is valid",
+ "data": [1],
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "too long is invalid",
+ "data": [1, 2, 3],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": "foobar",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/maxLength.json b/third_party/python/jsonschema/json/tests/draft6/maxLength.json
new file mode 100644
index 0000000000..811d35b253
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/maxLength.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "maxLength validation",
+ "schema": {"maxLength": 2},
+ "tests": [
+ {
+ "description": "shorter is valid",
+ "data": "f",
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": "fo",
+ "valid": true
+ },
+ {
+ "description": "too long is invalid",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 100,
+ "valid": true
+ },
+ {
+ "description": "two supplementary Unicode code points is long enough",
+ "data": "\uD83D\uDCA9\uD83D\uDCA9",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/maxProperties.json b/third_party/python/jsonschema/json/tests/draft6/maxProperties.json
new file mode 100644
index 0000000000..513731e4c8
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/maxProperties.json
@@ -0,0 +1,38 @@
+[
+ {
+ "description": "maxProperties validation",
+ "schema": {"maxProperties": 2},
+ "tests": [
+ {
+ "description": "shorter is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "too long is invalid",
+ "data": {"foo": 1, "bar": 2, "baz": 3},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [1, 2, 3],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/maximum.json b/third_party/python/jsonschema/json/tests/draft6/maximum.json
new file mode 100644
index 0000000000..8150984ee5
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/maximum.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "maximum validation",
+ "schema": {"maximum": 3.0},
+ "tests": [
+ {
+ "description": "below the maximum is valid",
+ "data": 2.6,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": 3.0,
+ "valid": true
+ },
+ {
+ "description": "above the maximum is invalid",
+ "data": 3.5,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/minItems.json b/third_party/python/jsonschema/json/tests/draft6/minItems.json
new file mode 100644
index 0000000000..ed5118815e
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/minItems.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "minItems validation",
+ "schema": {"minItems": 1},
+ "tests": [
+ {
+ "description": "longer is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": [1],
+ "valid": true
+ },
+ {
+ "description": "too short is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": "",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/minLength.json b/third_party/python/jsonschema/json/tests/draft6/minLength.json
new file mode 100644
index 0000000000..3f09158dee
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/minLength.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "minLength validation",
+ "schema": {"minLength": 2},
+ "tests": [
+ {
+ "description": "longer is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": "fo",
+ "valid": true
+ },
+ {
+ "description": "too short is invalid",
+ "data": "f",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "one supplementary Unicode code point is not long enough",
+ "data": "\uD83D\uDCA9",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/minProperties.json b/third_party/python/jsonschema/json/tests/draft6/minProperties.json
new file mode 100644
index 0000000000..49a0726e01
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/minProperties.json
@@ -0,0 +1,38 @@
+[
+ {
+ "description": "minProperties validation",
+ "schema": {"minProperties": 1},
+ "tests": [
+ {
+ "description": "longer is valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "too short is invalid",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/minimum.json b/third_party/python/jsonschema/json/tests/draft6/minimum.json
new file mode 100644
index 0000000000..2a9c42b3c4
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/minimum.json
@@ -0,0 +1,59 @@
+[
+ {
+ "description": "minimum validation",
+ "schema": {"minimum": 1.1},
+ "tests": [
+ {
+ "description": "above the minimum is valid",
+ "data": 2.6,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": 1.1,
+ "valid": true
+ },
+ {
+ "description": "below the minimum is invalid",
+ "data": 0.6,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "minimum validation with signed integer",
+ "schema": {"minimum": -2},
+ "tests": [
+ {
+ "description": "negative above the minimum is valid",
+ "data": -1,
+ "valid": true
+ },
+ {
+ "description": "positive above the minimum is valid",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": -2,
+ "valid": true
+ },
+ {
+ "description": "below the minimum is invalid",
+ "data": -3,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/multipleOf.json b/third_party/python/jsonschema/json/tests/draft6/multipleOf.json
new file mode 100644
index 0000000000..ca3b761805
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/multipleOf.json
@@ -0,0 +1,60 @@
+[
+ {
+ "description": "by int",
+ "schema": {"multipleOf": 2},
+ "tests": [
+ {
+ "description": "int by int",
+ "data": 10,
+ "valid": true
+ },
+ {
+ "description": "int by int fail",
+ "data": 7,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "by number",
+ "schema": {"multipleOf": 1.5},
+ "tests": [
+ {
+ "description": "zero is multiple of anything",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "4.5 is multiple of 1.5",
+ "data": 4.5,
+ "valid": true
+ },
+ {
+ "description": "35 is not multiple of 1.5",
+ "data": 35,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "by small number",
+ "schema": {"multipleOf": 0.0001},
+ "tests": [
+ {
+ "description": "0.0075 is multiple of 0.0001",
+ "data": 0.0075,
+ "valid": true
+ },
+ {
+ "description": "0.00751 is not multiple of 0.0001",
+ "data": 0.00751,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/not.json b/third_party/python/jsonschema/json/tests/draft6/not.json
new file mode 100644
index 0000000000..98de0eda8d
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/not.json
@@ -0,0 +1,117 @@
+[
+ {
+ "description": "not",
+ "schema": {
+ "not": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "allowed",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "disallowed",
+ "data": 1,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "not multiple types",
+ "schema": {
+ "not": {"type": ["integer", "boolean"]}
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "other mismatch",
+ "data": true,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "not more complex schema",
+ "schema": {
+ "not": {
+ "type": "object",
+ "properties": {
+ "foo": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "other match",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": {"foo": "bar"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "forbidden property",
+ "schema": {
+ "properties": {
+ "foo": {
+ "not": {}
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "property present",
+ "data": {"foo": 1, "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "property absent",
+ "data": {"bar": 1, "baz": 2},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "not with boolean schema true",
+ "schema": {"not": true},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "not with boolean schema false",
+ "schema": {"not": false},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/oneOf.json b/third_party/python/jsonschema/json/tests/draft6/oneOf.json
new file mode 100644
index 0000000000..57640b7afb
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/oneOf.json
@@ -0,0 +1,206 @@
+[
+ {
+ "description": "oneOf",
+ "schema": {
+ "oneOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "minimum": 2
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first oneOf valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "second oneOf valid",
+ "data": 2.5,
+ "valid": true
+ },
+ {
+ "description": "both oneOf valid",
+ "data": 3,
+ "valid": false
+ },
+ {
+ "description": "neither oneOf valid",
+ "data": 1.5,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with base schema",
+ "schema": {
+ "type": "string",
+ "oneOf" : [
+ {
+ "minLength": 2
+ },
+ {
+ "maxLength": 4
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "mismatch base schema",
+ "data": 3,
+ "valid": false
+ },
+ {
+ "description": "one oneOf valid",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "both oneOf valid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with boolean schemas, all true",
+ "schema": {"oneOf": [true, true, true]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with boolean schemas, one true",
+ "schema": {"oneOf": [true, false, false]},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "oneOf with boolean schemas, more than one true",
+ "schema": {"oneOf": [true, true, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with boolean schemas, all false",
+ "schema": {"oneOf": [false, false, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf complex types",
+ "schema": {
+ "oneOf": [
+ {
+ "properties": {
+ "bar": {"type": "integer"}
+ },
+ "required": ["bar"]
+ },
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first oneOf valid (complex)",
+ "data": {"bar": 2},
+ "valid": true
+ },
+ {
+ "description": "second oneOf valid (complex)",
+ "data": {"foo": "baz"},
+ "valid": true
+ },
+ {
+ "description": "both oneOf valid (complex)",
+ "data": {"foo": "baz", "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "neither oneOf valid (complex)",
+ "data": {"foo": 2, "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with empty schema",
+ "schema": {
+ "oneOf": [
+ { "type": "number" },
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "one valid - valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "both valid - invalid",
+ "data": 123,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with required",
+ "schema": {
+ "type": "object",
+ "oneOf": [
+ { "required": ["foo", "bar"] },
+ { "required": ["foo", "baz"] }
+ ]
+ },
+ "tests": [
+ {
+ "description": "both invalid - invalid",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "first valid - valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "second valid - valid",
+ "data": {"foo": 1, "baz": 3},
+ "valid": true
+ },
+ {
+ "description": "both valid - invalid",
+ "data": {"foo": 1, "bar": 2, "baz" : 3},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/optional/bignum.json b/third_party/python/jsonschema/json/tests/draft6/optional/bignum.json
new file mode 100644
index 0000000000..fac275e21f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/optional/bignum.json
@@ -0,0 +1,105 @@
+[
+ {
+ "description": "integer",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "a bignum is an integer",
+ "data": 12345678910111213141516171819202122232425262728293031,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "number",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "a bignum is a number",
+ "data": 98249283749234923498293171823948729348710298301928331,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "integer",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "a negative bignum is an integer",
+ "data": -12345678910111213141516171819202122232425262728293031,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "number",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "a negative bignum is a number",
+ "data": -98249283749234923498293171823948729348710298301928331,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "string",
+ "schema": {"type": "string"},
+ "tests": [
+ {
+ "description": "a bignum is not a string",
+ "data": 98249283749234923498293171823948729348710298301928331,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "integer comparison",
+ "schema": {"maximum": 18446744073709551615},
+ "tests": [
+ {
+ "description": "comparison works for high numbers",
+ "data": 18446744073709551600,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "float comparison with high precision",
+ "schema": {
+ "exclusiveMaximum": 972783798187987123879878123.18878137
+ },
+ "tests": [
+ {
+ "description": "comparison works for high numbers",
+ "data": 972783798187987123879878123.188781371,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "integer comparison",
+ "schema": {"minimum": -18446744073709551615},
+ "tests": [
+ {
+ "description": "comparison works for very negative numbers",
+ "data": -18446744073709551600,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "float comparison with high precision on negative numbers",
+ "schema": {
+ "exclusiveMinimum": -972783798187987123879878123.18878137
+ },
+ "tests": [
+ {
+ "description": "comparison works for very negative numbers",
+ "data": -972783798187987123879878123.188781371,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/optional/ecmascript-regex.json b/third_party/python/jsonschema/json/tests/draft6/optional/ecmascript-regex.json
new file mode 100644
index 0000000000..d82e0feb03
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/optional/ecmascript-regex.json
@@ -0,0 +1,213 @@
+[
+ {
+ "description": "ECMA 262 regex non-compliance",
+ "schema": { "format": "regex" },
+ "tests": [
+ {
+ "description": "ECMA 262 has no support for \\Z anchor from .NET",
+ "data": "^\\S(|(.|\\n)*\\S)\\Z",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex $ does not match trailing newline",
+ "schema": {
+ "type": "string",
+ "pattern": "^abc$"
+ },
+ "tests": [
+ {
+ "description": "matches in Python, but should not in jsonschema",
+ "data": "abc\n",
+ "valid": false
+ },
+ {
+ "description": "should match",
+ "data": "abc",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex converts \\a to ascii BEL",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\a$"
+ },
+ "tests": [
+ {
+ "description": "does not match",
+ "data": "\\a",
+ "valid": false
+ },
+ {
+ "description": "matches",
+ "data": "\u0007",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex escapes control codes with \\c and upper letter",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\cC$"
+ },
+ "tests": [
+ {
+ "description": "does not match",
+ "data": "\\cC",
+ "valid": false
+ },
+ {
+ "description": "matches",
+ "data": "\u0003",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex escapes control codes with \\c and lower letter",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\cc$"
+ },
+ "tests": [
+ {
+ "description": "does not match",
+ "data": "\\cc",
+ "valid": false
+ },
+ {
+ "description": "matches",
+ "data": "\u0003",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\d matches ascii digits only",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\d$"
+ },
+ "tests": [
+ {
+ "description": "ASCII zero matches",
+ "data": "0",
+ "valid": true
+ },
+ {
+ "description": "NKO DIGIT ZERO does not match (unlike e.g. Python)",
+ "data": "߀",
+ "valid": false
+ },
+ {
+ "description": "NKO DIGIT ZERO (as \\u escape) does not match",
+ "data": "\u07c0",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\D matches everything but ascii digits",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\D$"
+ },
+ "tests": [
+ {
+ "description": "ASCII zero does not match",
+ "data": "0",
+ "valid": false
+ },
+ {
+ "description": "NKO DIGIT ZERO matches (unlike e.g. Python)",
+ "data": "߀",
+ "valid": true
+ },
+ {
+ "description": "NKO DIGIT ZERO (as \\u escape) matches",
+ "data": "\u07c0",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\w matches ascii letters only",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\w$"
+ },
+ "tests": [
+ {
+ "description": "ASCII 'a' matches",
+ "data": "a",
+ "valid": true
+ },
+ {
+ "description": "latin-1 e-acute does not match (unlike e.g. Python)",
+ "data": "é",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\w matches everything but ascii letters",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\W$"
+ },
+ "tests": [
+ {
+ "description": "ASCII 'a' does not match",
+ "data": "a",
+ "valid": false
+ },
+ {
+ "description": "latin-1 e-acute matches (unlike e.g. Python)",
+ "data": "é",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\s matches ascii whitespace only",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\s$"
+ },
+ "tests": [
+ {
+ "description": "ASCII space matches",
+ "data": " ",
+ "valid": true
+ },
+ {
+ "description": "latin-1 non-breaking-space does not match (unlike e.g. Python)",
+ "data": "\u00a0",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\S matches everything but ascii whitespace",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\S$"
+ },
+ "tests": [
+ {
+ "description": "ASCII space does not match",
+ "data": " ",
+ "valid": false
+ },
+ {
+ "description": "latin-1 non-breaking-space matches (unlike e.g. Python)",
+ "data": "\u00a0",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/optional/format.json b/third_party/python/jsonschema/json/tests/draft6/optional/format.json
new file mode 100644
index 0000000000..3dd265fe4f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/optional/format.json
@@ -0,0 +1,491 @@
+[
+ {
+ "description": "validation of date-time strings",
+ "schema": {"format": "date-time"},
+ "tests": [
+ {
+ "description": "a valid date-time string",
+ "data": "1963-06-19T08:30:06.283185Z",
+ "valid": true
+ },
+ {
+ "description": "a valid date-time string without second fraction",
+ "data": "1963-06-19T08:30:06Z",
+ "valid": true
+ },
+ {
+ "description": "a valid date-time string with plus offset",
+ "data": "1937-01-01T12:00:27.87+00:20",
+ "valid": true
+ },
+ {
+ "description": "a valid date-time string with minus offset",
+ "data": "1990-12-31T15:59:50.123-08:00",
+ "valid": true
+ },
+ {
+ "description": "a invalid day in date-time string",
+ "data": "1990-02-31T15:59:60.123-08:00",
+ "valid": false
+ },
+ {
+ "description": "an invalid offset in date-time string",
+ "data": "1990-12-31T15:59:60-24:00",
+ "valid": false
+ },
+ {
+ "description": "an invalid closing Z after time-zone offset",
+ "data": "1963-06-19T08:30:06.28123+01:00Z",
+ "valid": false
+ },
+ {
+ "description": "an invalid date-time string",
+ "data": "06/19/1963 08:30:06 PST",
+ "valid": false
+ },
+ {
+ "description": "case-insensitive T and Z",
+ "data": "1963-06-19t08:30:06.283185z",
+ "valid": true
+ },
+ {
+ "description": "only RFC3339 not all of ISO 8601 are valid",
+ "data": "2013-350T01:01:01",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of URIs",
+ "schema": {"format": "uri"},
+ "tests": [
+ {
+ "description": "a valid URL with anchor tag",
+ "data": "http://foo.bar/?baz=qux#quux",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with anchor tag and parantheses",
+ "data": "http://foo.com/blah_(wikipedia)_blah#cite-1",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with URL-encoded stuff",
+ "data": "http://foo.bar/?q=Test%20URL-encoded%20stuff",
+ "valid": true
+ },
+ {
+ "description": "a valid puny-coded URL ",
+ "data": "http://xn--nw2a.xn--j6w193g/",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with many special characters",
+ "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com",
+ "valid": true
+ },
+ {
+ "description": "a valid URL based on IPv4",
+ "data": "http://223.255.255.254",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with ftp scheme",
+ "data": "ftp://ftp.is.co.za/rfc/rfc1808.txt",
+ "valid": true
+ },
+ {
+ "description": "a valid URL for a simple text file",
+ "data": "http://www.ietf.org/rfc/rfc2396.txt",
+ "valid": true
+ },
+ {
+ "description": "a valid URL ",
+ "data": "ldap://[2001:db8::7]/c=GB?objectClass?one",
+ "valid": true
+ },
+ {
+ "description": "a valid mailto URI",
+ "data": "mailto:John.Doe@example.com",
+ "valid": true
+ },
+ {
+ "description": "a valid newsgroup URI",
+ "data": "news:comp.infosystems.www.servers.unix",
+ "valid": true
+ },
+ {
+ "description": "a valid tel URI",
+ "data": "tel:+1-816-555-1212",
+ "valid": true
+ },
+ {
+ "description": "a valid URN",
+ "data": "urn:oasis:names:specification:docbook:dtd:xml:4.1.2",
+ "valid": true
+ },
+ {
+ "description": "an invalid protocol-relative URI Reference",
+ "data": "//foo.bar/?baz=qux#quux",
+ "valid": false
+ },
+ {
+ "description": "an invalid relative URI Reference",
+ "data": "/abc",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI",
+ "data": "\\\\WINDOWS\\fileshare",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI though valid URI reference",
+ "data": "abc",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI with spaces",
+ "data": "http:// shouldfail.com",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI with spaces and missing scheme",
+ "data": ":// should fail",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of URI References",
+ "schema": {"format": "uri-reference"},
+ "tests": [
+ {
+ "description": "a valid URI",
+ "data": "http://foo.bar/?baz=qux#quux",
+ "valid": true
+ },
+ {
+ "description": "a valid protocol-relative URI Reference",
+ "data": "//foo.bar/?baz=qux#quux",
+ "valid": true
+ },
+ {
+ "description": "a valid relative URI Reference",
+ "data": "/abc",
+ "valid": true
+ },
+ {
+ "description": "an invalid URI Reference",
+ "data": "\\\\WINDOWS\\fileshare",
+ "valid": false
+ },
+ {
+ "description": "a valid URI Reference",
+ "data": "abc",
+ "valid": true
+ },
+ {
+ "description": "a valid URI fragment",
+ "data": "#fragment",
+ "valid": true
+ },
+ {
+ "description": "an invalid URI fragment",
+ "data": "#frag\\ment",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "format: uri-template",
+ "schema": {"format": "uri-template"},
+ "tests": [
+ {
+ "description": "a valid uri-template",
+ "data": "http://example.com/dictionary/{term:1}/{term}",
+ "valid": true
+ },
+ {
+ "description": "an invalid uri-template",
+ "data": "http://example.com/dictionary/{term:1}/{term",
+ "valid": false
+ },
+ {
+ "description": "a valid uri-template without variables",
+ "data": "http://example.com/dictionary",
+ "valid": true
+ },
+ {
+ "description": "a valid relative uri-template",
+ "data": "dictionary/{term:1}/{term}",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of e-mail addresses",
+ "schema": {"format": "email"},
+ "tests": [
+ {
+ "description": "a valid e-mail address",
+ "data": "joe.bloggs@example.com",
+ "valid": true
+ },
+ {
+ "description": "an invalid e-mail address",
+ "data": "2962",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of IP addresses",
+ "schema": {"format": "ipv4"},
+ "tests": [
+ {
+ "description": "a valid IP address",
+ "data": "192.168.0.1",
+ "valid": true
+ },
+ {
+ "description": "an IP address with too many components",
+ "data": "127.0.0.0.1",
+ "valid": false
+ },
+ {
+ "description": "an IP address with out-of-range values",
+ "data": "256.256.256.256",
+ "valid": false
+ },
+ {
+ "description": "an IP address without 4 components",
+ "data": "127.0",
+ "valid": false
+ },
+ {
+ "description": "an IP address as an integer",
+ "data": "0x7f000001",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of IPv6 addresses",
+ "schema": {"format": "ipv6"},
+ "tests": [
+ {
+ "description": "a valid IPv6 address",
+ "data": "::1",
+ "valid": true
+ },
+ {
+ "description": "an IPv6 address with out-of-range values",
+ "data": "12345::",
+ "valid": false
+ },
+ {
+ "description": "an IPv6 address with too many components",
+ "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1",
+ "valid": false
+ },
+ {
+ "description": "an IPv6 address containing illegal characters",
+ "data": "::laptop",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of host names",
+ "schema": {"format": "hostname"},
+ "tests": [
+ {
+ "description": "a valid host name",
+ "data": "www.example.com",
+ "valid": true
+ },
+ {
+ "description": "a host name starting with an illegal character",
+ "data": "-a-host-name-that-starts-with--",
+ "valid": false
+ },
+ {
+ "description": "a host name containing illegal characters",
+ "data": "not_a_valid_host_name",
+ "valid": false
+ },
+ {
+ "description": "a host name with a component too long",
+ "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validation of JSON-pointers (JSON String Representation)",
+ "schema": {"format": "json-pointer"},
+ "tests": [
+ {
+ "description": "a valid JSON-pointer",
+ "data": "/foo/bar~0/baz~1/%a",
+ "valid": true
+ },
+ {
+ "description": "not a valid JSON-pointer (~ not escaped)",
+ "data": "/foo/bar~",
+ "valid": false
+ },
+ {
+ "description": "valid JSON-pointer with empty segment",
+ "data": "/foo//bar",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer with the last empty segment",
+ "data": "/foo/bar/",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #1",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #2",
+ "data": "/foo",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #3",
+ "data": "/foo/0",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #4",
+ "data": "/",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #5",
+ "data": "/a~1b",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #6",
+ "data": "/c%d",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #7",
+ "data": "/e^f",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #8",
+ "data": "/g|h",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #9",
+ "data": "/i\\j",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #10",
+ "data": "/k\"l",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #11",
+ "data": "/ ",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #12",
+ "data": "/m~0n",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer used adding to the last array position",
+ "data": "/foo/-",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer (- used as object member name)",
+ "data": "/foo/-/bar",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer (multiple escaped characters)",
+ "data": "/~1~0~0~1~1",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer (escaped with fraction part) #1",
+ "data": "/~1.1",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer (escaped with fraction part) #2",
+ "data": "/~0.1",
+ "valid": true
+ },
+ {
+ "description": "not a valid JSON-pointer (URI Fragment Identifier) #1",
+ "data": "#",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (URI Fragment Identifier) #2",
+ "data": "#/",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (URI Fragment Identifier) #3",
+ "data": "#a",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (some escaped, but not all) #1",
+ "data": "/~0~",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (some escaped, but not all) #2",
+ "data": "/~0/~",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (wrong escape character) #1",
+ "data": "/~2",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (wrong escape character) #2",
+ "data": "/~-1",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (multiple characters not escaped)",
+ "data": "/~~",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (isn't empty nor starts with /) #1",
+ "data": "a",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (isn't empty nor starts with /) #2",
+ "data": "0",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (isn't empty nor starts with /) #3",
+ "data": "a/a",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/optional/zeroTerminatedFloats.json b/third_party/python/jsonschema/json/tests/draft6/optional/zeroTerminatedFloats.json
new file mode 100644
index 0000000000..1bcdf96036
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/optional/zeroTerminatedFloats.json
@@ -0,0 +1,15 @@
+[
+ {
+ "description": "some languages do not distinguish between different types of numeric value",
+ "schema": {
+ "type": "integer"
+ },
+ "tests": [
+ {
+ "description": "a float without fractional part is an integer",
+ "data": 1.0,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/pattern.json b/third_party/python/jsonschema/json/tests/draft6/pattern.json
new file mode 100644
index 0000000000..25e7299731
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/pattern.json
@@ -0,0 +1,34 @@
+[
+ {
+ "description": "pattern validation",
+ "schema": {"pattern": "^a*$"},
+ "tests": [
+ {
+ "description": "a matching pattern is valid",
+ "data": "aaa",
+ "valid": true
+ },
+ {
+ "description": "a non-matching pattern is invalid",
+ "data": "abc",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": true,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "pattern is not anchored",
+ "schema": {"pattern": "a+"},
+ "tests": [
+ {
+ "description": "matches a substring",
+ "data": "xxaayy",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/patternProperties.json b/third_party/python/jsonschema/json/tests/draft6/patternProperties.json
new file mode 100644
index 0000000000..1d04a1675c
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/patternProperties.json
@@ -0,0 +1,151 @@
+[
+ {
+ "description":
+ "patternProperties validates properties matching a regex",
+ "schema": {
+ "patternProperties": {
+ "f.*o": {"type": "integer"}
+ }
+ },
+ "tests": [
+ {
+ "description": "a single valid match is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "multiple valid matches is valid",
+ "data": {"foo": 1, "foooooo" : 2},
+ "valid": true
+ },
+ {
+ "description": "a single invalid match is invalid",
+ "data": {"foo": "bar", "fooooo": 2},
+ "valid": false
+ },
+ {
+ "description": "multiple invalid matches is invalid",
+ "data": {"foo": "bar", "foooooo" : "baz"},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": ["foo"],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple simultaneous patternProperties are validated",
+ "schema": {
+ "patternProperties": {
+ "a*": {"type": "integer"},
+ "aaa*": {"maximum": 20}
+ }
+ },
+ "tests": [
+ {
+ "description": "a single valid match is valid",
+ "data": {"a": 21},
+ "valid": true
+ },
+ {
+ "description": "a simultaneous match is valid",
+ "data": {"aaaa": 18},
+ "valid": true
+ },
+ {
+ "description": "multiple matches is valid",
+ "data": {"a": 21, "aaaa": 18},
+ "valid": true
+ },
+ {
+ "description": "an invalid due to one is invalid",
+ "data": {"a": "bar"},
+ "valid": false
+ },
+ {
+ "description": "an invalid due to the other is invalid",
+ "data": {"aaaa": 31},
+ "valid": false
+ },
+ {
+ "description": "an invalid due to both is invalid",
+ "data": {"aaa": "foo", "aaaa": 31},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "regexes are not anchored by default and are case sensitive",
+ "schema": {
+ "patternProperties": {
+ "[0-9]{2,}": { "type": "boolean" },
+ "X_": { "type": "string" }
+ }
+ },
+ "tests": [
+ {
+ "description": "non recognized members are ignored",
+ "data": { "answer 1": "42" },
+ "valid": true
+ },
+ {
+ "description": "recognized members are accounted for",
+ "data": { "a31b": null },
+ "valid": false
+ },
+ {
+ "description": "regexes are case sensitive",
+ "data": { "a_x_3": 3 },
+ "valid": true
+ },
+ {
+ "description": "regexes are case sensitive, 2",
+ "data": { "a_X_3": 3 },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "patternProperties with boolean schemas",
+ "schema": {
+ "patternProperties": {
+ "f.*": true,
+ "b.*": false
+ }
+ },
+ "tests": [
+ {
+ "description": "object with property matching schema true is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "object with property matching schema false is invalid",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "object with both properties is invalid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/properties.json b/third_party/python/jsonschema/json/tests/draft6/properties.json
new file mode 100644
index 0000000000..b86c181982
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/properties.json
@@ -0,0 +1,167 @@
+[
+ {
+ "description": "object properties validation",
+ "schema": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"type": "string"}
+ }
+ },
+ "tests": [
+ {
+ "description": "both properties present and valid is valid",
+ "data": {"foo": 1, "bar": "baz"},
+ "valid": true
+ },
+ {
+ "description": "one property invalid is invalid",
+ "data": {"foo": 1, "bar": {}},
+ "valid": false
+ },
+ {
+ "description": "both properties invalid is invalid",
+ "data": {"foo": [], "bar": {}},
+ "valid": false
+ },
+ {
+ "description": "doesn't invalidate other properties",
+ "data": {"quux": []},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description":
+ "properties, patternProperties, additionalProperties interaction",
+ "schema": {
+ "properties": {
+ "foo": {"type": "array", "maxItems": 3},
+ "bar": {"type": "array"}
+ },
+ "patternProperties": {"f.o": {"minItems": 2}},
+ "additionalProperties": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "property validates property",
+ "data": {"foo": [1, 2]},
+ "valid": true
+ },
+ {
+ "description": "property invalidates property",
+ "data": {"foo": [1, 2, 3, 4]},
+ "valid": false
+ },
+ {
+ "description": "patternProperty invalidates property",
+ "data": {"foo": []},
+ "valid": false
+ },
+ {
+ "description": "patternProperty validates nonproperty",
+ "data": {"fxo": [1, 2]},
+ "valid": true
+ },
+ {
+ "description": "patternProperty invalidates nonproperty",
+ "data": {"fxo": []},
+ "valid": false
+ },
+ {
+ "description": "additionalProperty ignores property",
+ "data": {"bar": []},
+ "valid": true
+ },
+ {
+ "description": "additionalProperty validates others",
+ "data": {"quux": 3},
+ "valid": true
+ },
+ {
+ "description": "additionalProperty invalidates others",
+ "data": {"quux": "foo"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "properties with boolean schema",
+ "schema": {
+ "properties": {
+ "foo": true,
+ "bar": false
+ }
+ },
+ "tests": [
+ {
+ "description": "no property present is valid",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "only 'true' property present is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "only 'false' property present is invalid",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "both properties present is invalid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "properties with escaped characters",
+ "schema": {
+ "properties": {
+ "foo\nbar": {"type": "number"},
+ "foo\"bar": {"type": "number"},
+ "foo\\bar": {"type": "number"},
+ "foo\rbar": {"type": "number"},
+ "foo\tbar": {"type": "number"},
+ "foo\fbar": {"type": "number"}
+ }
+ },
+ "tests": [
+ {
+ "description": "object with all numbers is valid",
+ "data": {
+ "foo\nbar": 1,
+ "foo\"bar": 1,
+ "foo\\bar": 1,
+ "foo\rbar": 1,
+ "foo\tbar": 1,
+ "foo\fbar": 1
+ },
+ "valid": true
+ },
+ {
+ "description": "object with strings is invalid",
+ "data": {
+ "foo\nbar": "1",
+ "foo\"bar": "1",
+ "foo\\bar": "1",
+ "foo\rbar": "1",
+ "foo\tbar": "1",
+ "foo\fbar": "1"
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/propertyNames.json b/third_party/python/jsonschema/json/tests/draft6/propertyNames.json
new file mode 100644
index 0000000000..8423690d90
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/propertyNames.json
@@ -0,0 +1,78 @@
+[
+ {
+ "description": "propertyNames validation",
+ "schema": {
+ "propertyNames": {"maxLength": 3}
+ },
+ "tests": [
+ {
+ "description": "all property names valid",
+ "data": {
+ "f": {},
+ "foo": {}
+ },
+ "valid": true
+ },
+ {
+ "description": "some property names invalid",
+ "data": {
+ "foo": {},
+ "foobar": {}
+ },
+ "valid": false
+ },
+ {
+ "description": "object without properties is valid",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [1, 2, 3, 4],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "propertyNames with boolean schema true",
+ "schema": {"propertyNames": true},
+ "tests": [
+ {
+ "description": "object with any properties is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "propertyNames with boolean schema false",
+ "schema": {"propertyNames": false},
+ "tests": [
+ {
+ "description": "object with any properties is invalid",
+ "data": {"foo": 1},
+ "valid": false
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/ref.json b/third_party/python/jsonschema/json/tests/draft6/ref.json
new file mode 100644
index 0000000000..53f3a9e9d4
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/ref.json
@@ -0,0 +1,443 @@
+[
+ {
+ "description": "root pointer ref",
+ "schema": {
+ "properties": {
+ "foo": {"$ref": "#"}
+ },
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": {"foo": false},
+ "valid": true
+ },
+ {
+ "description": "recursive match",
+ "data": {"foo": {"foo": false}},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": {"bar": false},
+ "valid": false
+ },
+ {
+ "description": "recursive mismatch",
+ "data": {"foo": {"bar": false}},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "relative pointer ref to object",
+ "schema": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"$ref": "#/properties/foo"}
+ }
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": {"bar": 3},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": {"bar": true},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "relative pointer ref to array",
+ "schema": {
+ "items": [
+ {"type": "integer"},
+ {"$ref": "#/items/0"}
+ ]
+ },
+ "tests": [
+ {
+ "description": "match array",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "mismatch array",
+ "data": [1, "foo"],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "escaped pointer ref",
+ "schema": {
+ "tilda~field": {"type": "integer"},
+ "slash/field": {"type": "integer"},
+ "percent%field": {"type": "integer"},
+ "properties": {
+ "tilda": {"$ref": "#/tilda~0field"},
+ "slash": {"$ref": "#/slash~1field"},
+ "percent": {"$ref": "#/percent%25field"}
+ }
+ },
+ "tests": [
+ {
+ "description": "slash invalid",
+ "data": {"slash": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "tilda invalid",
+ "data": {"tilda": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "percent invalid",
+ "data": {"percent": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "slash valid",
+ "data": {"slash": 123},
+ "valid": true
+ },
+ {
+ "description": "tilda valid",
+ "data": {"tilda": 123},
+ "valid": true
+ },
+ {
+ "description": "percent valid",
+ "data": {"percent": 123},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "nested refs",
+ "schema": {
+ "definitions": {
+ "a": {"type": "integer"},
+ "b": {"$ref": "#/definitions/a"},
+ "c": {"$ref": "#/definitions/b"}
+ },
+ "$ref": "#/definitions/c"
+ },
+ "tests": [
+ {
+ "description": "nested ref valid",
+ "data": 5,
+ "valid": true
+ },
+ {
+ "description": "nested ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ref overrides any sibling keywords",
+ "schema": {
+ "definitions": {
+ "reffed": {
+ "type": "array"
+ }
+ },
+ "properties": {
+ "foo": {
+ "$ref": "#/definitions/reffed",
+ "maxItems": 2
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "ref valid",
+ "data": { "foo": [] },
+ "valid": true
+ },
+ {
+ "description": "ref valid, maxItems ignored",
+ "data": { "foo": [ 1, 2, 3] },
+ "valid": true
+ },
+ {
+ "description": "ref invalid",
+ "data": { "foo": "string" },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "remote ref, containing refs itself",
+ "schema": {"$ref": "http://json-schema.org/draft-06/schema#"},
+ "tests": [
+ {
+ "description": "remote ref valid",
+ "data": {"minLength": 1},
+ "valid": true
+ },
+ {
+ "description": "remote ref invalid",
+ "data": {"minLength": -1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "property named $ref that is not a reference",
+ "schema": {
+ "properties": {
+ "$ref": {"type": "string"}
+ }
+ },
+ "tests": [
+ {
+ "description": "property named $ref valid",
+ "data": {"$ref": "a"},
+ "valid": true
+ },
+ {
+ "description": "property named $ref invalid",
+ "data": {"$ref": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "$ref to boolean schema true",
+ "schema": {
+ "$ref": "#/definitions/bool",
+ "definitions": {
+ "bool": true
+ }
+ },
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "$ref to boolean schema false",
+ "schema": {
+ "$ref": "#/definitions/bool",
+ "definitions": {
+ "bool": false
+ }
+ },
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Recursive references between schemas",
+ "schema": {
+ "$id": "http://localhost:1234/tree",
+ "description": "tree of nodes",
+ "type": "object",
+ "properties": {
+ "meta": {"type": "string"},
+ "nodes": {
+ "type": "array",
+ "items": {"$ref": "node"}
+ }
+ },
+ "required": ["meta", "nodes"],
+ "definitions": {
+ "node": {
+ "$id": "http://localhost:1234/node",
+ "description": "node",
+ "type": "object",
+ "properties": {
+ "value": {"type": "number"},
+ "subtree": {"$ref": "tree"}
+ },
+ "required": ["value"]
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid tree",
+ "data": {
+ "meta": "root",
+ "nodes": [
+ {
+ "value": 1,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": 1.1},
+ {"value": 1.2}
+ ]
+ }
+ },
+ {
+ "value": 2,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": 2.1},
+ {"value": 2.2}
+ ]
+ }
+ }
+ ]
+ },
+ "valid": true
+ },
+ {
+ "description": "invalid tree",
+ "data": {
+ "meta": "root",
+ "nodes": [
+ {
+ "value": 1,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": "string is invalid"},
+ {"value": 1.2}
+ ]
+ }
+ },
+ {
+ "value": 2,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": 2.1},
+ {"value": 2.2}
+ ]
+ }
+ }
+ ]
+ },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "refs with quote",
+ "schema": {
+ "properties": {
+ "foo\"bar": {"$ref": "#/definitions/foo%22bar"}
+ },
+ "definitions": {
+ "foo\"bar": {"type": "number"}
+ }
+ },
+ "tests": [
+ {
+ "description": "object with numbers is valid",
+ "data": {
+ "foo\"bar": 1
+ },
+ "valid": true
+ },
+ {
+ "description": "object with strings is invalid",
+ "data": {
+ "foo\"bar": "1"
+ },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Location-independent identifier",
+ "schema": {
+ "allOf": [{
+ "$ref": "#foo"
+ }],
+ "definitions": {
+ "A": {
+ "$id": "#foo",
+ "type": "integer"
+ }
+ }
+ },
+ "tests": [
+ {
+ "data": 1,
+ "description": "match",
+ "valid": true
+ },
+ {
+ "data": "a",
+ "description": "mismatch",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Location-independent identifier with absolute URI",
+ "schema": {
+ "allOf": [{
+ "$ref": "http://localhost:1234/bar#foo"
+ }],
+ "definitions": {
+ "A": {
+ "$id": "http://localhost:1234/bar#foo",
+ "type": "integer"
+ }
+ }
+ },
+ "tests": [
+ {
+ "data": 1,
+ "description": "match",
+ "valid": true
+ },
+ {
+ "data": "a",
+ "description": "mismatch",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Location-independent identifier with base URI change in subschema",
+ "schema": {
+ "$id": "http://localhost:1234/root",
+ "allOf": [{
+ "$ref": "http://localhost:1234/nested.json#foo"
+ }],
+ "definitions": {
+ "A": {
+ "$id": "nested.json",
+ "definitions": {
+ "B": {
+ "$id": "#foo",
+ "type": "integer"
+ }
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "data": 1,
+ "description": "match",
+ "valid": true
+ },
+ {
+ "data": "a",
+ "description": "mismatch",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/refRemote.json b/third_party/python/jsonschema/json/tests/draft6/refRemote.json
new file mode 100644
index 0000000000..819d32678a
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/refRemote.json
@@ -0,0 +1,171 @@
+[
+ {
+ "description": "remote ref",
+ "schema": {"$ref": "http://localhost:1234/integer.json"},
+ "tests": [
+ {
+ "description": "remote ref valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "remote ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "fragment within remote ref",
+ "schema": {"$ref": "http://localhost:1234/subSchemas.json#/integer"},
+ "tests": [
+ {
+ "description": "remote fragment valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "remote fragment invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ref within remote ref",
+ "schema": {
+ "$ref": "http://localhost:1234/subSchemas.json#/refToInteger"
+ },
+ "tests": [
+ {
+ "description": "ref within ref valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "ref within ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "base URI change",
+ "schema": {
+ "$id": "http://localhost:1234/",
+ "items": {
+ "$id": "folder/",
+ "items": {"$ref": "folderInteger.json"}
+ }
+ },
+ "tests": [
+ {
+ "description": "base URI change ref valid",
+ "data": [[1]],
+ "valid": true
+ },
+ {
+ "description": "base URI change ref invalid",
+ "data": [["a"]],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "base URI change - change folder",
+ "schema": {
+ "$id": "http://localhost:1234/scope_change_defs1.json",
+ "type" : "object",
+ "properties": {
+ "list": {"$ref": "#/definitions/baz"}
+ },
+ "definitions": {
+ "baz": {
+ "$id": "folder/",
+ "type": "array",
+ "items": {"$ref": "folderInteger.json"}
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": {"list": [1]},
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": {"list": ["a"]},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "base URI change - change folder in subschema",
+ "schema": {
+ "$id": "http://localhost:1234/scope_change_defs2.json",
+ "type" : "object",
+ "properties": {
+ "list": {"$ref": "#/definitions/baz/definitions/bar"}
+ },
+ "definitions": {
+ "baz": {
+ "$id": "folder/",
+ "definitions": {
+ "bar": {
+ "type": "array",
+ "items": {"$ref": "folderInteger.json"}
+ }
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": {"list": [1]},
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": {"list": ["a"]},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "root ref in remote ref",
+ "schema": {
+ "$id": "http://localhost:1234/object",
+ "type": "object",
+ "properties": {
+ "name": {"$ref": "name.json#/definitions/orNull"}
+ }
+ },
+ "tests": [
+ {
+ "description": "string is valid",
+ "data": {
+ "name": "foo"
+ },
+ "valid": true
+ },
+ {
+ "description": "null is valid",
+ "data": {
+ "name": null
+ },
+ "valid": true
+ },
+ {
+ "description": "object is invalid",
+ "data": {
+ "name": {
+ "name": null
+ }
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/required.json b/third_party/python/jsonschema/json/tests/draft6/required.json
new file mode 100644
index 0000000000..abf18f3459
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/required.json
@@ -0,0 +1,105 @@
+[
+ {
+ "description": "required validation",
+ "schema": {
+ "properties": {
+ "foo": {},
+ "bar": {}
+ },
+ "required": ["foo"]
+ },
+ "tests": [
+ {
+ "description": "present required property is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "non-present required property is invalid",
+ "data": {"bar": 1},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "required default validation",
+ "schema": {
+ "properties": {
+ "foo": {}
+ }
+ },
+ "tests": [
+ {
+ "description": "not required by default",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "required with empty array",
+ "schema": {
+ "properties": {
+ "foo": {}
+ },
+ "required": []
+ },
+ "tests": [
+ {
+ "description": "property not required",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "required with escaped characters",
+ "schema": {
+ "required": [
+ "foo\nbar",
+ "foo\"bar",
+ "foo\\bar",
+ "foo\rbar",
+ "foo\tbar",
+ "foo\fbar"
+ ]
+ },
+ "tests": [
+ {
+ "description": "object with all properties present is valid",
+ "data": {
+ "foo\nbar": 1,
+ "foo\"bar": 1,
+ "foo\\bar": 1,
+ "foo\rbar": 1,
+ "foo\tbar": 1,
+ "foo\fbar": 1
+ },
+ "valid": true
+ },
+ {
+ "description": "object with some properties missing is invalid",
+ "data": {
+ "foo\nbar": "1",
+ "foo\"bar": "1"
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/type.json b/third_party/python/jsonschema/json/tests/draft6/type.json
new file mode 100644
index 0000000000..ea33b1821f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/type.json
@@ -0,0 +1,464 @@
+[
+ {
+ "description": "integer type matches integers",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "an integer is an integer",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a float is not an integer",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an integer",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "a string is still not an integer, even if it looks like one",
+ "data": "1",
+ "valid": false
+ },
+ {
+ "description": "an object is not an integer",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not an integer",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not an integer",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an integer",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "number type matches numbers",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "an integer is a number",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a float is a number",
+ "data": 1.1,
+ "valid": true
+ },
+ {
+ "description": "a string is not a number",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "a string is still not a number, even if it looks like one",
+ "data": "1",
+ "valid": false
+ },
+ {
+ "description": "an object is not a number",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a number",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not a number",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not a number",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "string type matches strings",
+ "schema": {"type": "string"},
+ "tests": [
+ {
+ "description": "1 is not a string",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not a string",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is a string",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "a string is still a string, even if it looks like a number",
+ "data": "1",
+ "valid": true
+ },
+ {
+ "description": "an empty string is still a string",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "an object is not a string",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a string",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not a string",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not a string",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "object type matches objects",
+ "schema": {"type": "object"},
+ "tests": [
+ {
+ "description": "an integer is not an object",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not an object",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an object",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an object is an object",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "an array is not an object",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not an object",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an object",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "array type matches arrays",
+ "schema": {"type": "array"},
+ "tests": [
+ {
+ "description": "an integer is not an array",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not an array",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an array",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an object is not an array",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is an array",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "a boolean is not an array",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an array",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "boolean type matches booleans",
+ "schema": {"type": "boolean"},
+ "tests": [
+ {
+ "description": "an integer is not a boolean",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "zero is not a boolean",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "a float is not a boolean",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not a boolean",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an empty string is not a boolean",
+ "data": "",
+ "valid": false
+ },
+ {
+ "description": "an object is not a boolean",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a boolean",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "true is a boolean",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "false is a boolean",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "null is not a boolean",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "null type matches only the null object",
+ "schema": {"type": "null"},
+ "tests": [
+ {
+ "description": "an integer is not null",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not null",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "zero is not null",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "a string is not null",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an empty string is not null",
+ "data": "",
+ "valid": false
+ },
+ {
+ "description": "an object is not null",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not null",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "true is not null",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "false is not null",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "null is null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple types can be specified in an array",
+ "schema": {"type": ["integer", "string"]},
+ "tests": [
+ {
+ "description": "an integer is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "a float is invalid",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "an object is invalid",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is invalid",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "type as array with one item",
+ "schema": {
+ "type": ["string"]
+ },
+ "tests": [
+ {
+ "description": "string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "number is invalid",
+ "data": 123,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "type: array or object",
+ "schema": {
+ "type": ["array", "object"]
+ },
+ "tests": [
+ {
+ "description": "array is valid",
+ "data": [1,2,3],
+ "valid": true
+ },
+ {
+ "description": "object is valid",
+ "data": {"foo": 123},
+ "valid": true
+ },
+ {
+ "description": "number is invalid",
+ "data": 123,
+ "valid": false
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "null is invalid",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "type: array, object or null",
+ "schema": {
+ "type": ["array", "object", "null"]
+ },
+ "tests": [
+ {
+ "description": "array is valid",
+ "data": [1,2,3],
+ "valid": true
+ },
+ {
+ "description": "object is valid",
+ "data": {"foo": 123},
+ "valid": true
+ },
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "number is invalid",
+ "data": 123,
+ "valid": false
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft6/uniqueItems.json b/third_party/python/jsonschema/json/tests/draft6/uniqueItems.json
new file mode 100644
index 0000000000..d312ad71ab
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft6/uniqueItems.json
@@ -0,0 +1,173 @@
+[
+ {
+ "description": "uniqueItems validation",
+ "schema": {"uniqueItems": true},
+ "tests": [
+ {
+ "description": "unique array of integers is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of integers is invalid",
+ "data": [1, 1],
+ "valid": false
+ },
+ {
+ "description": "numbers are unique if mathematically unequal",
+ "data": [1.0, 1.00, 1],
+ "valid": false
+ },
+ {
+ "description": "false is not equal to zero",
+ "data": [0, false],
+ "valid": true
+ },
+ {
+ "description": "true is not equal to one",
+ "data": [1, true],
+ "valid": true
+ },
+ {
+ "description": "unique array of objects is valid",
+ "data": [{"foo": "bar"}, {"foo": "baz"}],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of objects is invalid",
+ "data": [{"foo": "bar"}, {"foo": "bar"}],
+ "valid": false
+ },
+ {
+ "description": "unique array of nested objects is valid",
+ "data": [
+ {"foo": {"bar" : {"baz" : true}}},
+ {"foo": {"bar" : {"baz" : false}}}
+ ],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of nested objects is invalid",
+ "data": [
+ {"foo": {"bar" : {"baz" : true}}},
+ {"foo": {"bar" : {"baz" : true}}}
+ ],
+ "valid": false
+ },
+ {
+ "description": "unique array of arrays is valid",
+ "data": [["foo"], ["bar"]],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of arrays is invalid",
+ "data": [["foo"], ["foo"]],
+ "valid": false
+ },
+ {
+ "description": "1 and true are unique",
+ "data": [1, true],
+ "valid": true
+ },
+ {
+ "description": "0 and false are unique",
+ "data": [0, false],
+ "valid": true
+ },
+ {
+ "description": "unique heterogeneous types are valid",
+ "data": [{}, [1], true, null, 1],
+ "valid": true
+ },
+ {
+ "description": "non-unique heterogeneous types are invalid",
+ "data": [{}, [1], true, null, {}, 1],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "uniqueItems with an array of items",
+ "schema": {
+ "items": [{"type": "boolean"}, {"type": "boolean"}],
+ "uniqueItems": true
+ },
+ "tests": [
+ {
+ "description": "[false, true] from items array is valid",
+ "data": [false, true],
+ "valid": true
+ },
+ {
+ "description": "[true, false] from items array is valid",
+ "data": [true, false],
+ "valid": true
+ },
+ {
+ "description": "[false, false] from items array is not valid",
+ "data": [false, false],
+ "valid": false
+ },
+ {
+ "description": "[true, true] from items array is not valid",
+ "data": [true, true],
+ "valid": false
+ },
+ {
+ "description": "unique array extended from [false, true] is valid",
+ "data": [false, true, "foo", "bar"],
+ "valid": true
+ },
+ {
+ "description": "unique array extended from [true, false] is valid",
+ "data": [true, false, "foo", "bar"],
+ "valid": true
+ },
+ {
+ "description": "non-unique array extended from [false, true] is not valid",
+ "data": [false, true, "foo", "foo"],
+ "valid": false
+ },
+ {
+ "description": "non-unique array extended from [true, false] is not valid",
+ "data": [true, false, "foo", "foo"],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "uniqueItems with an array of items and additionalItems=false",
+ "schema": {
+ "items": [{"type": "boolean"}, {"type": "boolean"}],
+ "uniqueItems": true,
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "[false, true] from items array is valid",
+ "data": [false, true],
+ "valid": true
+ },
+ {
+ "description": "[true, false] from items array is valid",
+ "data": [true, false],
+ "valid": true
+ },
+ {
+ "description": "[false, false] from items array is not valid",
+ "data": [false, false],
+ "valid": false
+ },
+ {
+ "description": "[true, true] from items array is not valid",
+ "data": [true, true],
+ "valid": false
+ },
+ {
+ "description": "extra items are invalid even if unique",
+ "data": [false, true, null],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/additionalItems.json b/third_party/python/jsonschema/json/tests/draft7/additionalItems.json
new file mode 100644
index 0000000000..abecc578be
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/additionalItems.json
@@ -0,0 +1,87 @@
+[
+ {
+ "description": "additionalItems as schema",
+ "schema": {
+ "items": [{}],
+ "additionalItems": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "additional items match schema",
+ "data": [ null, 2, 3, 4 ],
+ "valid": true
+ },
+ {
+ "description": "additional items do not match schema",
+ "data": [ null, 2, 3, "foo" ],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "items is schema, no additionalItems",
+ "schema": {
+ "items": {},
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "all items match schema",
+ "data": [ 1, 2, 3, 4, 5 ],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "array of items with no additionalItems",
+ "schema": {
+ "items": [{}, {}, {}],
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "fewer number of items present",
+ "data": [ 1, 2 ],
+ "valid": true
+ },
+ {
+ "description": "equal number of items present",
+ "data": [ 1, 2, 3 ],
+ "valid": true
+ },
+ {
+ "description": "additional items are not permitted",
+ "data": [ 1, 2, 3, 4 ],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "additionalItems as false without items",
+ "schema": {"additionalItems": false},
+ "tests": [
+ {
+ "description":
+ "items defaults to empty schema so everything is valid",
+ "data": [ 1, 2, 3, 4, 5 ],
+ "valid": true
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": {"foo" : "bar"},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "additionalItems are allowed by default",
+ "schema": {"items": [{"type": "integer"}]},
+ "tests": [
+ {
+ "description": "only the first item is validated",
+ "data": [1, "foo", false],
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/additionalProperties.json b/third_party/python/jsonschema/json/tests/draft7/additionalProperties.json
new file mode 100644
index 0000000000..ffeac6b381
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/additionalProperties.json
@@ -0,0 +1,133 @@
+[
+ {
+ "description":
+ "additionalProperties being false does not allow other properties",
+ "schema": {
+ "properties": {"foo": {}, "bar": {}},
+ "patternProperties": { "^v": {} },
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "no additional properties is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "an additional property is invalid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : "boom"},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [1, 2, 3],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobarbaz",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "patternProperties are not additional properties",
+ "data": {"foo":1, "vroom": 2},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "non-ASCII pattern with additionalProperties",
+ "schema": {
+ "patternProperties": {"^á": {}},
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "matching the pattern is valid",
+ "data": {"ármányos": 2},
+ "valid": true
+ },
+ {
+ "description": "not matching the pattern is invalid",
+ "data": {"élmény": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description":
+ "additionalProperties allows a schema which should validate",
+ "schema": {
+ "properties": {"foo": {}, "bar": {}},
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "no additional properties is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "an additional valid property is valid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : true},
+ "valid": true
+ },
+ {
+ "description": "an additional invalid property is invalid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : 12},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description":
+ "additionalProperties can exist by itself",
+ "schema": {
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "an additional valid property is valid",
+ "data": {"foo" : true},
+ "valid": true
+ },
+ {
+ "description": "an additional invalid property is invalid",
+ "data": {"foo" : 1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "additionalProperties are allowed by default",
+ "schema": {"properties": {"foo": {}, "bar": {}}},
+ "tests": [
+ {
+ "description": "additional properties are allowed",
+ "data": {"foo": 1, "bar": 2, "quux": true},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "additionalProperties should not look in applicators",
+ "schema": {
+ "allOf": [
+ {"properties": {"foo": {}}}
+ ],
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "properties defined in allOf are not allowed",
+ "data": {"foo": 1, "bar": true},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/allOf.json b/third_party/python/jsonschema/json/tests/draft7/allOf.json
new file mode 100644
index 0000000000..eb612091aa
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/allOf.json
@@ -0,0 +1,218 @@
+[
+ {
+ "description": "allOf",
+ "schema": {
+ "allOf": [
+ {
+ "properties": {
+ "bar": {"type": "integer"}
+ },
+ "required": ["bar"]
+ },
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "allOf",
+ "data": {"foo": "baz", "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "mismatch second",
+ "data": {"foo": "baz"},
+ "valid": false
+ },
+ {
+ "description": "mismatch first",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "wrong type",
+ "data": {"foo": "baz", "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with base schema",
+ "schema": {
+ "properties": {"bar": {"type": "integer"}},
+ "required": ["bar"],
+ "allOf" : [
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ },
+ {
+ "properties": {
+ "baz": {"type": "null"}
+ },
+ "required": ["baz"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": {"foo": "quux", "bar": 2, "baz": null},
+ "valid": true
+ },
+ {
+ "description": "mismatch base schema",
+ "data": {"foo": "quux", "baz": null},
+ "valid": false
+ },
+ {
+ "description": "mismatch first allOf",
+ "data": {"bar": 2, "baz": null},
+ "valid": false
+ },
+ {
+ "description": "mismatch second allOf",
+ "data": {"foo": "quux", "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "mismatch both",
+ "data": {"bar": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf simple types",
+ "schema": {
+ "allOf": [
+ {"maximum": 30},
+ {"minimum": 20}
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": 25,
+ "valid": true
+ },
+ {
+ "description": "mismatch one",
+ "data": 35,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with boolean schemas, all true",
+ "schema": {"allOf": [true, true]},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "allOf with boolean schemas, some false",
+ "schema": {"allOf": [true, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with boolean schemas, all false",
+ "schema": {"allOf": [false, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with one empty schema",
+ "schema": {
+ "allOf": [
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "any data is valid",
+ "data": 1,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "allOf with two empty schemas",
+ "schema": {
+ "allOf": [
+ {},
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "any data is valid",
+ "data": 1,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "allOf with the first empty schema",
+ "schema": {
+ "allOf": [
+ {},
+ { "type": "number" }
+ ]
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with the last empty schema",
+ "schema": {
+ "allOf": [
+ { "type": "number" },
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/anyOf.json b/third_party/python/jsonschema/json/tests/draft7/anyOf.json
new file mode 100644
index 0000000000..ab5eb386b4
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/anyOf.json
@@ -0,0 +1,189 @@
+[
+ {
+ "description": "anyOf",
+ "schema": {
+ "anyOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "minimum": 2
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first anyOf valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "second anyOf valid",
+ "data": 2.5,
+ "valid": true
+ },
+ {
+ "description": "both anyOf valid",
+ "data": 3,
+ "valid": true
+ },
+ {
+ "description": "neither anyOf valid",
+ "data": 1.5,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf with base schema",
+ "schema": {
+ "type": "string",
+ "anyOf" : [
+ {
+ "maxLength": 2
+ },
+ {
+ "minLength": 4
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "mismatch base schema",
+ "data": 3,
+ "valid": false
+ },
+ {
+ "description": "one anyOf valid",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "both anyOf invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf with boolean schemas, all true",
+ "schema": {"anyOf": [true, true]},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "anyOf with boolean schemas, some true",
+ "schema": {"anyOf": [true, false]},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "anyOf with boolean schemas, all false",
+ "schema": {"anyOf": [false, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf complex types",
+ "schema": {
+ "anyOf": [
+ {
+ "properties": {
+ "bar": {"type": "integer"}
+ },
+ "required": ["bar"]
+ },
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first anyOf valid (complex)",
+ "data": {"bar": 2},
+ "valid": true
+ },
+ {
+ "description": "second anyOf valid (complex)",
+ "data": {"foo": "baz"},
+ "valid": true
+ },
+ {
+ "description": "both anyOf valid (complex)",
+ "data": {"foo": "baz", "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "neither anyOf valid (complex)",
+ "data": {"foo": 2, "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf with one empty schema",
+ "schema": {
+ "anyOf": [
+ { "type": "number" },
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "number is valid",
+ "data": 123,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "nested anyOf, to check validation semantics",
+ "schema": {
+ "anyOf": [
+ {
+ "anyOf": [
+ {
+ "type": "null"
+ }
+ ]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "anything non-null is invalid",
+ "data": 123,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/boolean_schema.json b/third_party/python/jsonschema/json/tests/draft7/boolean_schema.json
new file mode 100644
index 0000000000..6d40f23f26
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/boolean_schema.json
@@ -0,0 +1,104 @@
+[
+ {
+ "description": "boolean schema 'true'",
+ "schema": true,
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "boolean true is valid",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "boolean false is valid",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "object is valid",
+ "data": {"foo": "bar"},
+ "valid": true
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "array is valid",
+ "data": ["foo"],
+ "valid": true
+ },
+ {
+ "description": "empty array is valid",
+ "data": [],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "boolean schema 'false'",
+ "schema": false,
+ "tests": [
+ {
+ "description": "number is invalid",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "boolean true is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "boolean false is invalid",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "null is invalid",
+ "data": null,
+ "valid": false
+ },
+ {
+ "description": "object is invalid",
+ "data": {"foo": "bar"},
+ "valid": false
+ },
+ {
+ "description": "empty object is invalid",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "array is invalid",
+ "data": ["foo"],
+ "valid": false
+ },
+ {
+ "description": "empty array is invalid",
+ "data": [],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/const.json b/third_party/python/jsonschema/json/tests/draft7/const.json
new file mode 100644
index 0000000000..c089625dc4
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/const.json
@@ -0,0 +1,170 @@
+[
+ {
+ "description": "const validation",
+ "schema": {"const": 2},
+ "tests": [
+ {
+ "description": "same value is valid",
+ "data": 2,
+ "valid": true
+ },
+ {
+ "description": "another value is invalid",
+ "data": 5,
+ "valid": false
+ },
+ {
+ "description": "another type is invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with object",
+ "schema": {"const": {"foo": "bar", "baz": "bax"}},
+ "tests": [
+ {
+ "description": "same object is valid",
+ "data": {"foo": "bar", "baz": "bax"},
+ "valid": true
+ },
+ {
+ "description": "same object with different property order is valid",
+ "data": {"baz": "bax", "foo": "bar"},
+ "valid": true
+ },
+ {
+ "description": "another object is invalid",
+ "data": {"foo": "bar"},
+ "valid": false
+ },
+ {
+ "description": "another type is invalid",
+ "data": [1, 2],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with array",
+ "schema": {"const": [{ "foo": "bar" }]},
+ "tests": [
+ {
+ "description": "same array is valid",
+ "data": [{"foo": "bar"}],
+ "valid": true
+ },
+ {
+ "description": "another array item is invalid",
+ "data": [2],
+ "valid": false
+ },
+ {
+ "description": "array with additional items is invalid",
+ "data": [1, 2, 3],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with null",
+ "schema": {"const": null},
+ "tests": [
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "not null is invalid",
+ "data": 0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with false does not match 0",
+ "schema": {"const": false},
+ "tests": [
+ {
+ "description": "false is valid",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "integer zero is invalid",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "float zero is invalid",
+ "data": 0.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with true does not match 1",
+ "schema": {"const": true},
+ "tests": [
+ {
+ "description": "true is valid",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "integer one is invalid",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "float one is invalid",
+ "data": 1.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with 0 does not match false",
+ "schema": {"const": 0},
+ "tests": [
+ {
+ "description": "false is invalid",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "integer zero is valid",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "float zero is valid",
+ "data": 0.0,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "const with 1 does not match true",
+ "schema": {"const": 1},
+ "tests": [
+ {
+ "description": "true is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "integer one is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "float one is valid",
+ "data": 1.0,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/contains.json b/third_party/python/jsonschema/json/tests/draft7/contains.json
new file mode 100644
index 0000000000..67ecbd991a
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/contains.json
@@ -0,0 +1,100 @@
+[
+ {
+ "description": "contains keyword validation",
+ "schema": {
+ "contains": {"minimum": 5}
+ },
+ "tests": [
+ {
+ "description": "array with item matching schema (5) is valid",
+ "data": [3, 4, 5],
+ "valid": true
+ },
+ {
+ "description": "array with item matching schema (6) is valid",
+ "data": [3, 4, 6],
+ "valid": true
+ },
+ {
+ "description": "array with two items matching schema (5, 6) is valid",
+ "data": [3, 4, 5, 6],
+ "valid": true
+ },
+ {
+ "description": "array without items matching schema is invalid",
+ "data": [2, 3, 4],
+ "valid": false
+ },
+ {
+ "description": "empty array is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "not array is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "contains keyword with const keyword",
+ "schema": {
+ "contains": { "const": 5 }
+ },
+ "tests": [
+ {
+ "description": "array with item 5 is valid",
+ "data": [3, 4, 5],
+ "valid": true
+ },
+ {
+ "description": "array with two items 5 is valid",
+ "data": [3, 4, 5, 5],
+ "valid": true
+ },
+ {
+ "description": "array without item 5 is invalid",
+ "data": [1, 2, 3, 4],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "contains keyword with boolean schema true",
+ "schema": {"contains": true},
+ "tests": [
+ {
+ "description": "any non-empty array is valid",
+ "data": ["foo"],
+ "valid": true
+ },
+ {
+ "description": "empty array is invalid",
+ "data": [],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "contains keyword with boolean schema false",
+ "schema": {"contains": false},
+ "tests": [
+ {
+ "description": "any non-empty array is invalid",
+ "data": ["foo"],
+ "valid": false
+ },
+ {
+ "description": "empty array is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "non-arrays are valid",
+ "data": "contains does not apply to strings",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/default.json b/third_party/python/jsonschema/json/tests/draft7/default.json
new file mode 100644
index 0000000000..17629779fb
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/default.json
@@ -0,0 +1,49 @@
+[
+ {
+ "description": "invalid type for default",
+ "schema": {
+ "properties": {
+ "foo": {
+ "type": "integer",
+ "default": []
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when property is specified",
+ "data": {"foo": 13},
+ "valid": true
+ },
+ {
+ "description": "still valid when the invalid default is used",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "invalid string value for default",
+ "schema": {
+ "properties": {
+ "bar": {
+ "type": "string",
+ "minLength": 4,
+ "default": "bad"
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when property is specified",
+ "data": {"bar": "good"},
+ "valid": true
+ },
+ {
+ "description": "still valid when the invalid default is used",
+ "data": {},
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/definitions.json b/third_party/python/jsonschema/json/tests/draft7/definitions.json
new file mode 100644
index 0000000000..436040650a
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/definitions.json
@@ -0,0 +1,32 @@
+[
+ {
+ "description": "valid definition",
+ "schema": {"$ref": "http://json-schema.org/draft-07/schema#"},
+ "tests": [
+ {
+ "description": "valid definition schema",
+ "data": {
+ "definitions": {
+ "foo": {"type": "integer"}
+ }
+ },
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "invalid definition",
+ "schema": {"$ref": "http://json-schema.org/draft-07/schema#"},
+ "tests": [
+ {
+ "description": "invalid definition schema",
+ "data": {
+ "definitions": {
+ "foo": {"type": 1}
+ }
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/dependencies.json b/third_party/python/jsonschema/json/tests/draft7/dependencies.json
new file mode 100644
index 0000000000..8dd78aa5d8
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/dependencies.json
@@ -0,0 +1,268 @@
+[
+ {
+ "description": "dependencies",
+ "schema": {
+ "dependencies": {"bar": ["foo"]}
+ },
+ "tests": [
+ {
+ "description": "neither",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "nondependant",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "with dependency",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "missing dependency",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": ["bar"],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "dependencies with empty array",
+ "schema": {
+ "dependencies": {"bar": []}
+ },
+ "tests": [
+ {
+ "description": "empty object",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "object with one property",
+ "data": {"bar": 2},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple dependencies",
+ "schema": {
+ "dependencies": {"quux": ["foo", "bar"]}
+ },
+ "tests": [
+ {
+ "description": "neither",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "nondependants",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "with dependencies",
+ "data": {"foo": 1, "bar": 2, "quux": 3},
+ "valid": true
+ },
+ {
+ "description": "missing dependency",
+ "data": {"foo": 1, "quux": 2},
+ "valid": false
+ },
+ {
+ "description": "missing other dependency",
+ "data": {"bar": 1, "quux": 2},
+ "valid": false
+ },
+ {
+ "description": "missing both dependencies",
+ "data": {"quux": 1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "multiple dependencies subschema",
+ "schema": {
+ "dependencies": {
+ "bar": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"type": "integer"}
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "no dependency",
+ "data": {"foo": "quux"},
+ "valid": true
+ },
+ {
+ "description": "wrong type",
+ "data": {"foo": "quux", "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "wrong type other",
+ "data": {"foo": 2, "bar": "quux"},
+ "valid": false
+ },
+ {
+ "description": "wrong type both",
+ "data": {"foo": "quux", "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "dependencies with boolean subschemas",
+ "schema": {
+ "dependencies": {
+ "foo": true,
+ "bar": false
+ }
+ },
+ "tests": [
+ {
+ "description": "object with property having schema true is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "object with property having schema false is invalid",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "object with both properties is invalid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "empty array of dependencies",
+ "schema": {
+ "dependencies": {
+ "foo": []
+ }
+ },
+ "tests": [
+ {
+ "description": "object with property is valid",
+ "data": { "foo": 1 },
+ "valid": true
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "non-object is valid",
+ "data": 1,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "dependencies with escaped characters",
+ "schema": {
+ "dependencies": {
+ "foo\nbar": ["foo\rbar"],
+ "foo\tbar": {
+ "minProperties": 4
+ },
+ "foo'bar": {"required": ["foo\"bar"]},
+ "foo\"bar": ["foo'bar"]
+ }
+ },
+ "tests": [
+ {
+ "description": "valid object 1",
+ "data": {
+ "foo\nbar": 1,
+ "foo\rbar": 2
+ },
+ "valid": true
+ },
+ {
+ "description": "valid object 2",
+ "data": {
+ "foo\tbar": 1,
+ "a": 2,
+ "b": 3,
+ "c": 4
+ },
+ "valid": true
+ },
+ {
+ "description": "valid object 3",
+ "data": {
+ "foo'bar": 1,
+ "foo\"bar": 2
+ },
+ "valid": true
+ },
+ {
+ "description": "invalid object 1",
+ "data": {
+ "foo\nbar": 1,
+ "foo": 2
+ },
+ "valid": false
+ },
+ {
+ "description": "invalid object 2",
+ "data": {
+ "foo\tbar": 1,
+ "a": 2
+ },
+ "valid": false
+ },
+ {
+ "description": "invalid object 3",
+ "data": {
+ "foo'bar": 1
+ },
+ "valid": false
+ },
+ {
+ "description": "invalid object 4",
+ "data": {
+ "foo\"bar": 2
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/enum.json b/third_party/python/jsonschema/json/tests/draft7/enum.json
new file mode 100644
index 0000000000..32d79026e1
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/enum.json
@@ -0,0 +1,179 @@
+[
+ {
+ "description": "simple enum validation",
+ "schema": {"enum": [1, 2, 3]},
+ "tests": [
+ {
+ "description": "one of the enum is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "something else is invalid",
+ "data": 4,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "heterogeneous enum validation",
+ "schema": {"enum": [6, "foo", [], true, {"foo": 12}]},
+ "tests": [
+ {
+ "description": "one of the enum is valid",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "something else is invalid",
+ "data": null,
+ "valid": false
+ },
+ {
+ "description": "objects are deep compared",
+ "data": {"foo": false},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enums in properties",
+ "schema": {
+ "type":"object",
+ "properties": {
+ "foo": {"enum":["foo"]},
+ "bar": {"enum":["bar"]}
+ },
+ "required": ["bar"]
+ },
+ "tests": [
+ {
+ "description": "both properties are valid",
+ "data": {"foo":"foo", "bar":"bar"},
+ "valid": true
+ },
+ {
+ "description": "missing optional property is valid",
+ "data": {"bar":"bar"},
+ "valid": true
+ },
+ {
+ "description": "missing required property is invalid",
+ "data": {"foo":"foo"},
+ "valid": false
+ },
+ {
+ "description": "missing all properties is invalid",
+ "data": {},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with escaped characters",
+ "schema": {
+ "enum": ["foo\nbar", "foo\rbar"]
+ },
+ "tests": [
+ {
+ "description": "member 1 is valid",
+ "data": "foo\nbar",
+ "valid": true
+ },
+ {
+ "description": "member 2 is valid",
+ "data": "foo\rbar",
+ "valid": true
+ },
+ {
+ "description": "another string is invalid",
+ "data": "abc",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with false does not match 0",
+ "schema": {"enum": [false]},
+ "tests": [
+ {
+ "description": "false is valid",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "integer zero is invalid",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "float zero is invalid",
+ "data": 0.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with true does not match 1",
+ "schema": {"enum": [true]},
+ "tests": [
+ {
+ "description": "true is valid",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "integer one is invalid",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "float one is invalid",
+ "data": 1.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with 0 does not match false",
+ "schema": {"enum": [0]},
+ "tests": [
+ {
+ "description": "false is invalid",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "integer zero is valid",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "float zero is valid",
+ "data": 0.0,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "enum with 1 does not match true",
+ "schema": {"enum": [1]},
+ "tests": [
+ {
+ "description": "true is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "integer one is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "float one is valid",
+ "data": 1.0,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/exclusiveMaximum.json b/third_party/python/jsonschema/json/tests/draft7/exclusiveMaximum.json
new file mode 100644
index 0000000000..dc3cd709d3
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/exclusiveMaximum.json
@@ -0,0 +1,30 @@
+[
+ {
+ "description": "exclusiveMaximum validation",
+ "schema": {
+ "exclusiveMaximum": 3.0
+ },
+ "tests": [
+ {
+ "description": "below the exclusiveMaximum is valid",
+ "data": 2.2,
+ "valid": true
+ },
+ {
+ "description": "boundary point is invalid",
+ "data": 3.0,
+ "valid": false
+ },
+ {
+ "description": "above the exclusiveMaximum is invalid",
+ "data": 3.5,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/exclusiveMinimum.json b/third_party/python/jsonschema/json/tests/draft7/exclusiveMinimum.json
new file mode 100644
index 0000000000..b38d7ecec6
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/exclusiveMinimum.json
@@ -0,0 +1,30 @@
+[
+ {
+ "description": "exclusiveMinimum validation",
+ "schema": {
+ "exclusiveMinimum": 1.1
+ },
+ "tests": [
+ {
+ "description": "above the exclusiveMinimum is valid",
+ "data": 1.2,
+ "valid": true
+ },
+ {
+ "description": "boundary point is invalid",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "below the exclusiveMinimum is invalid",
+ "data": 0.6,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/format.json b/third_party/python/jsonschema/json/tests/draft7/format.json
new file mode 100644
index 0000000000..93305f5cd1
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/format.json
@@ -0,0 +1,614 @@
+[
+ {
+ "description": "validation of e-mail addresses",
+ "schema": {"format": "email"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IDN e-mail addresses",
+ "schema": {"format": "idn-email"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of regexes",
+ "schema": {"format": "regex"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IP addresses",
+ "schema": {"format": "ipv4"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IPv6 addresses",
+ "schema": {"format": "ipv6"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IDN hostnames",
+ "schema": {"format": "idn-hostname"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of hostnames",
+ "schema": {"format": "hostname"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of date strings",
+ "schema": {"format": "date"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of date-time strings",
+ "schema": {"format": "date-time"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of time strings",
+ "schema": {"format": "time"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of JSON pointers",
+ "schema": {"format": "json-pointer"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of relative JSON pointers",
+ "schema": {"format": "relative-json-pointer"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IRIs",
+ "schema": {"format": "iri"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IRI references",
+ "schema": {"format": "iri-reference"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of URIs",
+ "schema": {"format": "uri"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of URI references",
+ "schema": {"format": "uri-reference"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of URI templates",
+ "schema": {"format": "uri-template"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/if-then-else.json b/third_party/python/jsonschema/json/tests/draft7/if-then-else.json
new file mode 100644
index 0000000000..be7328163d
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/if-then-else.json
@@ -0,0 +1,188 @@
+[
+ {
+ "description": "ignore if without then or else",
+ "schema": {
+ "if": {
+ "const": 0
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when valid against lone if",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "valid when invalid against lone if",
+ "data": "hello",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ignore then without if",
+ "schema": {
+ "then": {
+ "const": 0
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when valid against lone then",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "valid when invalid against lone then",
+ "data": "hello",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ignore else without if",
+ "schema": {
+ "else": {
+ "const": 0
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when valid against lone else",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "valid when invalid against lone else",
+ "data": "hello",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "if and then without else",
+ "schema": {
+ "if": {
+ "exclusiveMaximum": 0
+ },
+ "then": {
+ "minimum": -10
+ }
+ },
+ "tests": [
+ {
+ "description": "valid through then",
+ "data": -1,
+ "valid": true
+ },
+ {
+ "description": "invalid through then",
+ "data": -100,
+ "valid": false
+ },
+ {
+ "description": "valid when if test fails",
+ "data": 3,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "if and else without then",
+ "schema": {
+ "if": {
+ "exclusiveMaximum": 0
+ },
+ "else": {
+ "multipleOf": 2
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when if test passes",
+ "data": -1,
+ "valid": true
+ },
+ {
+ "description": "valid through else",
+ "data": 4,
+ "valid": true
+ },
+ {
+ "description": "invalid through else",
+ "data": 3,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validate against correct branch, then vs else",
+ "schema": {
+ "if": {
+ "exclusiveMaximum": 0
+ },
+ "then": {
+ "minimum": -10
+ },
+ "else": {
+ "multipleOf": 2
+ }
+ },
+ "tests": [
+ {
+ "description": "valid through then",
+ "data": -1,
+ "valid": true
+ },
+ {
+ "description": "invalid through then",
+ "data": -100,
+ "valid": false
+ },
+ {
+ "description": "valid through else",
+ "data": 4,
+ "valid": true
+ },
+ {
+ "description": "invalid through else",
+ "data": 3,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "non-interference across combined schemas",
+ "schema": {
+ "allOf": [
+ {
+ "if": {
+ "exclusiveMaximum": 0
+ }
+ },
+ {
+ "then": {
+ "minimum": -10
+ }
+ },
+ {
+ "else": {
+ "multipleOf": 2
+ }
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid, but would have been invalid through then",
+ "data": -100,
+ "valid": true
+ },
+ {
+ "description": "valid, but would have been invalid through else",
+ "data": 3,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/items.json b/third_party/python/jsonschema/json/tests/draft7/items.json
new file mode 100644
index 0000000000..67f11840a2
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/items.json
@@ -0,0 +1,250 @@
+[
+ {
+ "description": "a schema given for items",
+ "schema": {
+ "items": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "valid items",
+ "data": [ 1, 2, 3 ],
+ "valid": true
+ },
+ {
+ "description": "wrong type of items",
+ "data": [1, "x"],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": {"foo" : "bar"},
+ "valid": true
+ },
+ {
+ "description": "JavaScript pseudo-array is valid",
+ "data": {
+ "0": "invalid",
+ "length": 1
+ },
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "an array of schemas for items",
+ "schema": {
+ "items": [
+ {"type": "integer"},
+ {"type": "string"}
+ ]
+ },
+ "tests": [
+ {
+ "description": "correct types",
+ "data": [ 1, "foo" ],
+ "valid": true
+ },
+ {
+ "description": "wrong types",
+ "data": [ "foo", 1 ],
+ "valid": false
+ },
+ {
+ "description": "incomplete array of items",
+ "data": [ 1 ],
+ "valid": true
+ },
+ {
+ "description": "array with additional items",
+ "data": [ 1, "foo", true ],
+ "valid": true
+ },
+ {
+ "description": "empty array",
+ "data": [ ],
+ "valid": true
+ },
+ {
+ "description": "JavaScript pseudo-array is valid",
+ "data": {
+ "0": "invalid",
+ "1": "valid",
+ "length": 2
+ },
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "items with boolean schema (true)",
+ "schema": {"items": true},
+ "tests": [
+ {
+ "description": "any array is valid",
+ "data": [ 1, "foo", true ],
+ "valid": true
+ },
+ {
+ "description": "empty array is valid",
+ "data": [],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "items with boolean schema (false)",
+ "schema": {"items": false},
+ "tests": [
+ {
+ "description": "any non-empty array is invalid",
+ "data": [ 1, "foo", true ],
+ "valid": false
+ },
+ {
+ "description": "empty array is valid",
+ "data": [],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "items with boolean schemas",
+ "schema": {
+ "items": [true, false]
+ },
+ "tests": [
+ {
+ "description": "array with one item is valid",
+ "data": [ 1 ],
+ "valid": true
+ },
+ {
+ "description": "array with two items is invalid",
+ "data": [ 1, "foo" ],
+ "valid": false
+ },
+ {
+ "description": "empty array is valid",
+ "data": [],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "items and subitems",
+ "schema": {
+ "definitions": {
+ "item": {
+ "type": "array",
+ "additionalItems": false,
+ "items": [
+ { "$ref": "#/definitions/sub-item" },
+ { "$ref": "#/definitions/sub-item" }
+ ]
+ },
+ "sub-item": {
+ "type": "object",
+ "required": ["foo"]
+ }
+ },
+ "type": "array",
+ "additionalItems": false,
+ "items": [
+ { "$ref": "#/definitions/item" },
+ { "$ref": "#/definitions/item" },
+ { "$ref": "#/definitions/item" }
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid items",
+ "data": [
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": true
+ },
+ {
+ "description": "too many items",
+ "data": [
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "too many sub-items",
+ "data": [
+ [ {"foo": null}, {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "wrong item",
+ "data": [
+ {"foo": null},
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "wrong sub-item",
+ "data": [
+ [ {}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "fewer items is valid",
+ "data": [
+ [ {"foo": null} ],
+ [ {"foo": null} ]
+ ],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "nested items",
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "number"
+ }
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid nested array",
+ "data": [[[[1]], [[2],[3]]], [[[4], [5], [6]]]],
+ "valid": true
+ },
+ {
+ "description": "nested array with invalid type",
+ "data": [[[["1"]], [[2],[3]]], [[[4], [5], [6]]]],
+ "valid": false
+ },
+ {
+ "description": "not deep enough",
+ "data": [[[1], [2],[3]], [[4], [5], [6]]],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/maxItems.json b/third_party/python/jsonschema/json/tests/draft7/maxItems.json
new file mode 100644
index 0000000000..3b53a6b371
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/maxItems.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "maxItems validation",
+ "schema": {"maxItems": 2},
+ "tests": [
+ {
+ "description": "shorter is valid",
+ "data": [1],
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "too long is invalid",
+ "data": [1, 2, 3],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": "foobar",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/maxLength.json b/third_party/python/jsonschema/json/tests/draft7/maxLength.json
new file mode 100644
index 0000000000..811d35b253
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/maxLength.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "maxLength validation",
+ "schema": {"maxLength": 2},
+ "tests": [
+ {
+ "description": "shorter is valid",
+ "data": "f",
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": "fo",
+ "valid": true
+ },
+ {
+ "description": "too long is invalid",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 100,
+ "valid": true
+ },
+ {
+ "description": "two supplementary Unicode code points is long enough",
+ "data": "\uD83D\uDCA9\uD83D\uDCA9",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/maxProperties.json b/third_party/python/jsonschema/json/tests/draft7/maxProperties.json
new file mode 100644
index 0000000000..513731e4c8
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/maxProperties.json
@@ -0,0 +1,38 @@
+[
+ {
+ "description": "maxProperties validation",
+ "schema": {"maxProperties": 2},
+ "tests": [
+ {
+ "description": "shorter is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "too long is invalid",
+ "data": {"foo": 1, "bar": 2, "baz": 3},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [1, 2, 3],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/maximum.json b/third_party/python/jsonschema/json/tests/draft7/maximum.json
new file mode 100644
index 0000000000..8150984ee5
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/maximum.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "maximum validation",
+ "schema": {"maximum": 3.0},
+ "tests": [
+ {
+ "description": "below the maximum is valid",
+ "data": 2.6,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": 3.0,
+ "valid": true
+ },
+ {
+ "description": "above the maximum is invalid",
+ "data": 3.5,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/minItems.json b/third_party/python/jsonschema/json/tests/draft7/minItems.json
new file mode 100644
index 0000000000..ed5118815e
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/minItems.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "minItems validation",
+ "schema": {"minItems": 1},
+ "tests": [
+ {
+ "description": "longer is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": [1],
+ "valid": true
+ },
+ {
+ "description": "too short is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": "",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/minLength.json b/third_party/python/jsonschema/json/tests/draft7/minLength.json
new file mode 100644
index 0000000000..3f09158dee
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/minLength.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "minLength validation",
+ "schema": {"minLength": 2},
+ "tests": [
+ {
+ "description": "longer is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": "fo",
+ "valid": true
+ },
+ {
+ "description": "too short is invalid",
+ "data": "f",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "one supplementary Unicode code point is not long enough",
+ "data": "\uD83D\uDCA9",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/minProperties.json b/third_party/python/jsonschema/json/tests/draft7/minProperties.json
new file mode 100644
index 0000000000..49a0726e01
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/minProperties.json
@@ -0,0 +1,38 @@
+[
+ {
+ "description": "minProperties validation",
+ "schema": {"minProperties": 1},
+ "tests": [
+ {
+ "description": "longer is valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "too short is invalid",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/minimum.json b/third_party/python/jsonschema/json/tests/draft7/minimum.json
new file mode 100644
index 0000000000..2a9c42b3c4
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/minimum.json
@@ -0,0 +1,59 @@
+[
+ {
+ "description": "minimum validation",
+ "schema": {"minimum": 1.1},
+ "tests": [
+ {
+ "description": "above the minimum is valid",
+ "data": 2.6,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": 1.1,
+ "valid": true
+ },
+ {
+ "description": "below the minimum is invalid",
+ "data": 0.6,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "minimum validation with signed integer",
+ "schema": {"minimum": -2},
+ "tests": [
+ {
+ "description": "negative above the minimum is valid",
+ "data": -1,
+ "valid": true
+ },
+ {
+ "description": "positive above the minimum is valid",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": -2,
+ "valid": true
+ },
+ {
+ "description": "below the minimum is invalid",
+ "data": -3,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/multipleOf.json b/third_party/python/jsonschema/json/tests/draft7/multipleOf.json
new file mode 100644
index 0000000000..ca3b761805
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/multipleOf.json
@@ -0,0 +1,60 @@
+[
+ {
+ "description": "by int",
+ "schema": {"multipleOf": 2},
+ "tests": [
+ {
+ "description": "int by int",
+ "data": 10,
+ "valid": true
+ },
+ {
+ "description": "int by int fail",
+ "data": 7,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "by number",
+ "schema": {"multipleOf": 1.5},
+ "tests": [
+ {
+ "description": "zero is multiple of anything",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "4.5 is multiple of 1.5",
+ "data": 4.5,
+ "valid": true
+ },
+ {
+ "description": "35 is not multiple of 1.5",
+ "data": 35,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "by small number",
+ "schema": {"multipleOf": 0.0001},
+ "tests": [
+ {
+ "description": "0.0075 is multiple of 0.0001",
+ "data": 0.0075,
+ "valid": true
+ },
+ {
+ "description": "0.00751 is not multiple of 0.0001",
+ "data": 0.00751,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/not.json b/third_party/python/jsonschema/json/tests/draft7/not.json
new file mode 100644
index 0000000000..98de0eda8d
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/not.json
@@ -0,0 +1,117 @@
+[
+ {
+ "description": "not",
+ "schema": {
+ "not": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "allowed",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "disallowed",
+ "data": 1,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "not multiple types",
+ "schema": {
+ "not": {"type": ["integer", "boolean"]}
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "other mismatch",
+ "data": true,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "not more complex schema",
+ "schema": {
+ "not": {
+ "type": "object",
+ "properties": {
+ "foo": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "other match",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": {"foo": "bar"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "forbidden property",
+ "schema": {
+ "properties": {
+ "foo": {
+ "not": {}
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "property present",
+ "data": {"foo": 1, "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "property absent",
+ "data": {"bar": 1, "baz": 2},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "not with boolean schema true",
+ "schema": {"not": true},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "not with boolean schema false",
+ "schema": {"not": false},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/oneOf.json b/third_party/python/jsonschema/json/tests/draft7/oneOf.json
new file mode 100644
index 0000000000..57640b7afb
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/oneOf.json
@@ -0,0 +1,206 @@
+[
+ {
+ "description": "oneOf",
+ "schema": {
+ "oneOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "minimum": 2
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first oneOf valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "second oneOf valid",
+ "data": 2.5,
+ "valid": true
+ },
+ {
+ "description": "both oneOf valid",
+ "data": 3,
+ "valid": false
+ },
+ {
+ "description": "neither oneOf valid",
+ "data": 1.5,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with base schema",
+ "schema": {
+ "type": "string",
+ "oneOf" : [
+ {
+ "minLength": 2
+ },
+ {
+ "maxLength": 4
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "mismatch base schema",
+ "data": 3,
+ "valid": false
+ },
+ {
+ "description": "one oneOf valid",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "both oneOf valid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with boolean schemas, all true",
+ "schema": {"oneOf": [true, true, true]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with boolean schemas, one true",
+ "schema": {"oneOf": [true, false, false]},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "oneOf with boolean schemas, more than one true",
+ "schema": {"oneOf": [true, true, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with boolean schemas, all false",
+ "schema": {"oneOf": [false, false, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf complex types",
+ "schema": {
+ "oneOf": [
+ {
+ "properties": {
+ "bar": {"type": "integer"}
+ },
+ "required": ["bar"]
+ },
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first oneOf valid (complex)",
+ "data": {"bar": 2},
+ "valid": true
+ },
+ {
+ "description": "second oneOf valid (complex)",
+ "data": {"foo": "baz"},
+ "valid": true
+ },
+ {
+ "description": "both oneOf valid (complex)",
+ "data": {"foo": "baz", "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "neither oneOf valid (complex)",
+ "data": {"foo": 2, "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with empty schema",
+ "schema": {
+ "oneOf": [
+ { "type": "number" },
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "one valid - valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "both valid - invalid",
+ "data": 123,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with required",
+ "schema": {
+ "type": "object",
+ "oneOf": [
+ { "required": ["foo", "bar"] },
+ { "required": ["foo", "baz"] }
+ ]
+ },
+ "tests": [
+ {
+ "description": "both invalid - invalid",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "first valid - valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "second valid - valid",
+ "data": {"foo": 1, "baz": 3},
+ "valid": true
+ },
+ {
+ "description": "both valid - invalid",
+ "data": {"foo": 1, "bar": 2, "baz" : 3},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/bignum.json b/third_party/python/jsonschema/json/tests/draft7/optional/bignum.json
new file mode 100644
index 0000000000..fac275e21f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/bignum.json
@@ -0,0 +1,105 @@
+[
+ {
+ "description": "integer",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "a bignum is an integer",
+ "data": 12345678910111213141516171819202122232425262728293031,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "number",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "a bignum is a number",
+ "data": 98249283749234923498293171823948729348710298301928331,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "integer",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "a negative bignum is an integer",
+ "data": -12345678910111213141516171819202122232425262728293031,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "number",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "a negative bignum is a number",
+ "data": -98249283749234923498293171823948729348710298301928331,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "string",
+ "schema": {"type": "string"},
+ "tests": [
+ {
+ "description": "a bignum is not a string",
+ "data": 98249283749234923498293171823948729348710298301928331,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "integer comparison",
+ "schema": {"maximum": 18446744073709551615},
+ "tests": [
+ {
+ "description": "comparison works for high numbers",
+ "data": 18446744073709551600,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "float comparison with high precision",
+ "schema": {
+ "exclusiveMaximum": 972783798187987123879878123.18878137
+ },
+ "tests": [
+ {
+ "description": "comparison works for high numbers",
+ "data": 972783798187987123879878123.188781371,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "integer comparison",
+ "schema": {"minimum": -18446744073709551615},
+ "tests": [
+ {
+ "description": "comparison works for very negative numbers",
+ "data": -18446744073709551600,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "float comparison with high precision on negative numbers",
+ "schema": {
+ "exclusiveMinimum": -972783798187987123879878123.18878137
+ },
+ "tests": [
+ {
+ "description": "comparison works for very negative numbers",
+ "data": -972783798187987123879878123.188781371,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/content.json b/third_party/python/jsonschema/json/tests/draft7/optional/content.json
new file mode 100644
index 0000000000..3f5a7430b2
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/content.json
@@ -0,0 +1,77 @@
+[
+ {
+ "description": "validation of string-encoded content based on media type",
+ "schema": {
+ "contentMediaType": "application/json"
+ },
+ "tests": [
+ {
+ "description": "a valid JSON document",
+ "data": "{\"foo\": \"bar\"}",
+ "valid": true
+ },
+ {
+ "description": "an invalid JSON document",
+ "data": "{:}",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 100,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of binary string-encoding",
+ "schema": {
+ "contentEncoding": "base64"
+ },
+ "tests": [
+ {
+ "description": "a valid base64 string",
+ "data": "eyJmb28iOiAiYmFyIn0K",
+ "valid": true
+ },
+ {
+ "description": "an invalid base64 string (% is not a valid character)",
+ "data": "eyJmb28iOi%iYmFyIn0K",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 100,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of binary-encoded media type documents",
+ "schema": {
+ "contentMediaType": "application/json",
+ "contentEncoding": "base64"
+ },
+ "tests": [
+ {
+ "description": "a valid base64-encoded JSON document",
+ "data": "eyJmb28iOiAiYmFyIn0K",
+ "valid": true
+ },
+ {
+ "description": "a validly-encoded invalid JSON document",
+ "data": "ezp9Cg==",
+ "valid": false
+ },
+ {
+ "description": "an invalid base64 string that is valid JSON",
+ "data": "{}",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 100,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/ecmascript-regex.json b/third_party/python/jsonschema/json/tests/draft7/optional/ecmascript-regex.json
new file mode 100644
index 0000000000..d82e0feb03
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/ecmascript-regex.json
@@ -0,0 +1,213 @@
+[
+ {
+ "description": "ECMA 262 regex non-compliance",
+ "schema": { "format": "regex" },
+ "tests": [
+ {
+ "description": "ECMA 262 has no support for \\Z anchor from .NET",
+ "data": "^\\S(|(.|\\n)*\\S)\\Z",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex $ does not match trailing newline",
+ "schema": {
+ "type": "string",
+ "pattern": "^abc$"
+ },
+ "tests": [
+ {
+ "description": "matches in Python, but should not in jsonschema",
+ "data": "abc\n",
+ "valid": false
+ },
+ {
+ "description": "should match",
+ "data": "abc",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex converts \\a to ascii BEL",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\a$"
+ },
+ "tests": [
+ {
+ "description": "does not match",
+ "data": "\\a",
+ "valid": false
+ },
+ {
+ "description": "matches",
+ "data": "\u0007",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex escapes control codes with \\c and upper letter",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\cC$"
+ },
+ "tests": [
+ {
+ "description": "does not match",
+ "data": "\\cC",
+ "valid": false
+ },
+ {
+ "description": "matches",
+ "data": "\u0003",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex escapes control codes with \\c and lower letter",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\cc$"
+ },
+ "tests": [
+ {
+ "description": "does not match",
+ "data": "\\cc",
+ "valid": false
+ },
+ {
+ "description": "matches",
+ "data": "\u0003",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\d matches ascii digits only",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\d$"
+ },
+ "tests": [
+ {
+ "description": "ASCII zero matches",
+ "data": "0",
+ "valid": true
+ },
+ {
+ "description": "NKO DIGIT ZERO does not match (unlike e.g. Python)",
+ "data": "߀",
+ "valid": false
+ },
+ {
+ "description": "NKO DIGIT ZERO (as \\u escape) does not match",
+ "data": "\u07c0",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\D matches everything but ascii digits",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\D$"
+ },
+ "tests": [
+ {
+ "description": "ASCII zero does not match",
+ "data": "0",
+ "valid": false
+ },
+ {
+ "description": "NKO DIGIT ZERO matches (unlike e.g. Python)",
+ "data": "߀",
+ "valid": true
+ },
+ {
+ "description": "NKO DIGIT ZERO (as \\u escape) matches",
+ "data": "\u07c0",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\w matches ascii letters only",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\w$"
+ },
+ "tests": [
+ {
+ "description": "ASCII 'a' matches",
+ "data": "a",
+ "valid": true
+ },
+ {
+ "description": "latin-1 e-acute does not match (unlike e.g. Python)",
+ "data": "é",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\w matches everything but ascii letters",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\W$"
+ },
+ "tests": [
+ {
+ "description": "ASCII 'a' does not match",
+ "data": "a",
+ "valid": false
+ },
+ {
+ "description": "latin-1 e-acute matches (unlike e.g. Python)",
+ "data": "é",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\s matches ascii whitespace only",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\s$"
+ },
+ "tests": [
+ {
+ "description": "ASCII space matches",
+ "data": " ",
+ "valid": true
+ },
+ {
+ "description": "latin-1 non-breaking-space does not match (unlike e.g. Python)",
+ "data": "\u00a0",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\S matches everything but ascii whitespace",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\S$"
+ },
+ "tests": [
+ {
+ "description": "ASCII space does not match",
+ "data": " ",
+ "valid": false
+ },
+ {
+ "description": "latin-1 non-breaking-space matches (unlike e.g. Python)",
+ "data": "\u00a0",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/format/date-time.json b/third_party/python/jsonschema/json/tests/draft7/optional/format/date-time.json
new file mode 100644
index 0000000000..dfccee6e67
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/format/date-time.json
@@ -0,0 +1,53 @@
+[
+ {
+ "description": "validation of date-time strings",
+ "schema": {"format": "date-time"},
+ "tests": [
+ {
+ "description": "a valid date-time string",
+ "data": "1963-06-19T08:30:06.283185Z",
+ "valid": true
+ },
+ {
+ "description": "a valid date-time string without second fraction",
+ "data": "1963-06-19T08:30:06Z",
+ "valid": true
+ },
+ {
+ "description": "a valid date-time string with plus offset",
+ "data": "1937-01-01T12:00:27.87+00:20",
+ "valid": true
+ },
+ {
+ "description": "a valid date-time string with minus offset",
+ "data": "1990-12-31T15:59:50.123-08:00",
+ "valid": true
+ },
+ {
+ "description": "a invalid day in date-time string",
+ "data": "1990-02-31T15:59:60.123-08:00",
+ "valid": false
+ },
+ {
+ "description": "an invalid offset in date-time string",
+ "data": "1990-12-31T15:59:60-24:00",
+ "valid": false
+ },
+ {
+ "description": "an invalid date-time string",
+ "data": "06/19/1963 08:30:06 PST",
+ "valid": false
+ },
+ {
+ "description": "case-insensitive T and Z",
+ "data": "1963-06-19t08:30:06.283185z",
+ "valid": true
+ },
+ {
+ "description": "only RFC3339 not all of ISO 8601 are valid",
+ "data": "2013-350T01:01:01",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/format/date.json b/third_party/python/jsonschema/json/tests/draft7/optional/format/date.json
new file mode 100644
index 0000000000..cd23baae3a
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/format/date.json
@@ -0,0 +1,23 @@
+[
+ {
+ "description": "validation of date strings",
+ "schema": {"format": "date"},
+ "tests": [
+ {
+ "description": "a valid date string",
+ "data": "1963-06-19",
+ "valid": true
+ },
+ {
+ "description": "an invalid date-time string",
+ "data": "06/19/1963",
+ "valid": false
+ },
+ {
+ "description": "only RFC3339 not all of ISO 8601 are valid",
+ "data": "2013-350",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/format/email.json b/third_party/python/jsonschema/json/tests/draft7/optional/format/email.json
new file mode 100644
index 0000000000..c837c84bc1
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/format/email.json
@@ -0,0 +1,18 @@
+[
+ {
+ "description": "validation of e-mail addresses",
+ "schema": {"format": "email"},
+ "tests": [
+ {
+ "description": "a valid e-mail address",
+ "data": "joe.bloggs@example.com",
+ "valid": true
+ },
+ {
+ "description": "an invalid e-mail address",
+ "data": "2962",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/format/hostname.json b/third_party/python/jsonschema/json/tests/draft7/optional/format/hostname.json
new file mode 100644
index 0000000000..d22e57db03
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/format/hostname.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "validation of host names",
+ "schema": {"format": "hostname"},
+ "tests": [
+ {
+ "description": "a valid host name",
+ "data": "www.example.com",
+ "valid": true
+ },
+ {
+ "description": "a valid punycoded IDN hostname",
+ "data": "xn--4gbwdl.xn--wgbh1c",
+ "valid": true
+ },
+ {
+ "description": "a host name starting with an illegal character",
+ "data": "-a-host-name-that-starts-with--",
+ "valid": false
+ },
+ {
+ "description": "a host name containing illegal characters",
+ "data": "not_a_valid_host_name",
+ "valid": false
+ },
+ {
+ "description": "a host name with a component too long",
+ "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/format/idn-email.json b/third_party/python/jsonschema/json/tests/draft7/optional/format/idn-email.json
new file mode 100644
index 0000000000..637409ea8f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/format/idn-email.json
@@ -0,0 +1,18 @@
+[
+ {
+ "description": "validation of an internationalized e-mail addresses",
+ "schema": {"format": "idn-email"},
+ "tests": [
+ {
+ "description": "a valid idn e-mail (example@example.test in Hangul)",
+ "data": "실례@실례.테스트",
+ "valid": true
+ },
+ {
+ "description": "an invalid idn e-mail address",
+ "data": "2962",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/format/idn-hostname.json b/third_party/python/jsonschema/json/tests/draft7/optional/format/idn-hostname.json
new file mode 100644
index 0000000000..3291820e6f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/format/idn-hostname.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "validation of internationalized host names",
+ "schema": {"format": "idn-hostname"},
+ "tests": [
+ {
+ "description": "a valid host name (example.test in Hangul)",
+ "data": "실례.테스트",
+ "valid": true
+ },
+ {
+ "description": "illegal first char U+302E Hangul single dot tone mark",
+ "data": "〮실례.테스트",
+ "valid": false
+ },
+ {
+ "description": "contains illegal char U+302E Hangul single dot tone mark",
+ "data": "실〮례.테스트",
+ "valid": false
+ },
+ {
+ "description": "a host name with a component too long",
+ "data": "실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실례례테스트례례례례례례례례례례례례례례례례례테스트례례례례례례례례례례례례례례례례례례례테스트례례례례례례례례례례례례테스트례례실례.테스트",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/format/ipv4.json b/third_party/python/jsonschema/json/tests/draft7/optional/format/ipv4.json
new file mode 100644
index 0000000000..661148a74d
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/format/ipv4.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "validation of IP addresses",
+ "schema": {"format": "ipv4"},
+ "tests": [
+ {
+ "description": "a valid IP address",
+ "data": "192.168.0.1",
+ "valid": true
+ },
+ {
+ "description": "an IP address with too many components",
+ "data": "127.0.0.0.1",
+ "valid": false
+ },
+ {
+ "description": "an IP address with out-of-range values",
+ "data": "256.256.256.256",
+ "valid": false
+ },
+ {
+ "description": "an IP address without 4 components",
+ "data": "127.0",
+ "valid": false
+ },
+ {
+ "description": "an IP address as an integer",
+ "data": "0x7f000001",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/format/ipv6.json b/third_party/python/jsonschema/json/tests/draft7/optional/format/ipv6.json
new file mode 100644
index 0000000000..f67559b35d
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/format/ipv6.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "validation of IPv6 addresses",
+ "schema": {"format": "ipv6"},
+ "tests": [
+ {
+ "description": "a valid IPv6 address",
+ "data": "::1",
+ "valid": true
+ },
+ {
+ "description": "an IPv6 address with out-of-range values",
+ "data": "12345::",
+ "valid": false
+ },
+ {
+ "description": "an IPv6 address with too many components",
+ "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1",
+ "valid": false
+ },
+ {
+ "description": "an IPv6 address containing illegal characters",
+ "data": "::laptop",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/format/iri-reference.json b/third_party/python/jsonschema/json/tests/draft7/optional/format/iri-reference.json
new file mode 100644
index 0000000000..1fd779c23c
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/format/iri-reference.json
@@ -0,0 +1,43 @@
+[
+ {
+ "description": "validation of IRI References",
+ "schema": {"format": "iri-reference"},
+ "tests": [
+ {
+ "description": "a valid IRI",
+ "data": "http://ƒøø.ßår/?∂éœ=πîx#πîüx",
+ "valid": true
+ },
+ {
+ "description": "a valid protocol-relative IRI Reference",
+ "data": "//ƒøø.ßår/?∂éœ=πîx#πîüx",
+ "valid": true
+ },
+ {
+ "description": "a valid relative IRI Reference",
+ "data": "/âππ",
+ "valid": true
+ },
+ {
+ "description": "an invalid IRI Reference",
+ "data": "\\\\WINDOWS\\filëßåré",
+ "valid": false
+ },
+ {
+ "description": "a valid IRI Reference",
+ "data": "âππ",
+ "valid": true
+ },
+ {
+ "description": "a valid IRI fragment",
+ "data": "#ƒrägmênt",
+ "valid": true
+ },
+ {
+ "description": "an invalid IRI fragment",
+ "data": "#ƒräg\\mênt",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/format/iri.json b/third_party/python/jsonschema/json/tests/draft7/optional/format/iri.json
new file mode 100644
index 0000000000..ed54094c01
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/format/iri.json
@@ -0,0 +1,53 @@
+[
+ {
+ "description": "validation of IRIs",
+ "schema": {"format": "iri"},
+ "tests": [
+ {
+ "description": "a valid IRI with anchor tag",
+ "data": "http://ƒøø.ßår/?∂éœ=πîx#πîüx",
+ "valid": true
+ },
+ {
+ "description": "a valid IRI with anchor tag and parantheses",
+ "data": "http://ƒøø.com/blah_(wîkïpédiå)_blah#ßité-1",
+ "valid": true
+ },
+ {
+ "description": "a valid IRI with URL-encoded stuff",
+ "data": "http://ƒøø.ßår/?q=Test%20URL-encoded%20stuff",
+ "valid": true
+ },
+ {
+ "description": "a valid IRI with many special characters",
+ "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com",
+ "valid": true
+ },
+ {
+ "description": "a valid IRI based on IPv6",
+ "data": "http://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]",
+ "valid": true
+ },
+ {
+ "description": "an invalid IRI based on IPv6",
+ "data": "http://2001:0db8:85a3:0000:0000:8a2e:0370:7334",
+ "valid": false
+ },
+ {
+ "description": "an invalid relative IRI Reference",
+ "data": "/abc",
+ "valid": false
+ },
+ {
+ "description": "an invalid IRI",
+ "data": "\\\\WINDOWS\\filëßåré",
+ "valid": false
+ },
+ {
+ "description": "an invalid IRI though valid IRI reference",
+ "data": "âππ",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/format/json-pointer.json b/third_party/python/jsonschema/json/tests/draft7/optional/format/json-pointer.json
new file mode 100644
index 0000000000..65c2f064f0
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/format/json-pointer.json
@@ -0,0 +1,168 @@
+[
+ {
+ "description": "validation of JSON-pointers (JSON String Representation)",
+ "schema": {"format": "json-pointer"},
+ "tests": [
+ {
+ "description": "a valid JSON-pointer",
+ "data": "/foo/bar~0/baz~1/%a",
+ "valid": true
+ },
+ {
+ "description": "not a valid JSON-pointer (~ not escaped)",
+ "data": "/foo/bar~",
+ "valid": false
+ },
+ {
+ "description": "valid JSON-pointer with empty segment",
+ "data": "/foo//bar",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer with the last empty segment",
+ "data": "/foo/bar/",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #1",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #2",
+ "data": "/foo",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #3",
+ "data": "/foo/0",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #4",
+ "data": "/",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #5",
+ "data": "/a~1b",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #6",
+ "data": "/c%d",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #7",
+ "data": "/e^f",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #8",
+ "data": "/g|h",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #9",
+ "data": "/i\\j",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #10",
+ "data": "/k\"l",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #11",
+ "data": "/ ",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #12",
+ "data": "/m~0n",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer used adding to the last array position",
+ "data": "/foo/-",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer (- used as object member name)",
+ "data": "/foo/-/bar",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer (multiple escaped characters)",
+ "data": "/~1~0~0~1~1",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer (escaped with fraction part) #1",
+ "data": "/~1.1",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer (escaped with fraction part) #2",
+ "data": "/~0.1",
+ "valid": true
+ },
+ {
+ "description": "not a valid JSON-pointer (URI Fragment Identifier) #1",
+ "data": "#",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (URI Fragment Identifier) #2",
+ "data": "#/",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (URI Fragment Identifier) #3",
+ "data": "#a",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (some escaped, but not all) #1",
+ "data": "/~0~",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (some escaped, but not all) #2",
+ "data": "/~0/~",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (wrong escape character) #1",
+ "data": "/~2",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (wrong escape character) #2",
+ "data": "/~-1",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (multiple characters not escaped)",
+ "data": "/~~",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (isn't empty nor starts with /) #1",
+ "data": "a",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (isn't empty nor starts with /) #2",
+ "data": "0",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (isn't empty nor starts with /) #3",
+ "data": "a/a",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/format/regex.json b/third_party/python/jsonschema/json/tests/draft7/optional/format/regex.json
new file mode 100644
index 0000000000..d99d021ec0
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/format/regex.json
@@ -0,0 +1,18 @@
+[
+ {
+ "description": "validation of regular expressions",
+ "schema": {"format": "regex"},
+ "tests": [
+ {
+ "description": "a valid regular expression",
+ "data": "([abc])+\\s+$",
+ "valid": true
+ },
+ {
+ "description": "a regular expression with unclosed parens is invalid",
+ "data": "^(abc]",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/format/relative-json-pointer.json b/third_party/python/jsonschema/json/tests/draft7/optional/format/relative-json-pointer.json
new file mode 100644
index 0000000000..ceeb743a32
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/format/relative-json-pointer.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "validation of Relative JSON Pointers (RJP)",
+ "schema": {"format": "relative-json-pointer"},
+ "tests": [
+ {
+ "description": "a valid upwards RJP",
+ "data": "1",
+ "valid": true
+ },
+ {
+ "description": "a valid downwards RJP",
+ "data": "0/foo/bar",
+ "valid": true
+ },
+ {
+ "description": "a valid up and then down RJP, with array index",
+ "data": "2/0/baz/1/zip",
+ "valid": true
+ },
+ {
+ "description": "a valid RJP taking the member or index name",
+ "data": "0#",
+ "valid": true
+ },
+ {
+ "description": "an invalid RJP that is a valid JSON Pointer",
+ "data": "/foo/bar",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/format/time.json b/third_party/python/jsonschema/json/tests/draft7/optional/format/time.json
new file mode 100644
index 0000000000..4ec8a01a3e
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/format/time.json
@@ -0,0 +1,23 @@
+[
+ {
+ "description": "validation of time strings",
+ "schema": {"format": "time"},
+ "tests": [
+ {
+ "description": "a valid time string",
+ "data": "08:30:06.283185Z",
+ "valid": true
+ },
+ {
+ "description": "an invalid time string",
+ "data": "08:30:06 PST",
+ "valid": false
+ },
+ {
+ "description": "only RFC3339 not all of ISO 8601 are valid",
+ "data": "01:01:01,1111",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/format/uri-reference.json b/third_party/python/jsonschema/json/tests/draft7/optional/format/uri-reference.json
new file mode 100644
index 0000000000..e4c9eef63c
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/format/uri-reference.json
@@ -0,0 +1,43 @@
+[
+ {
+ "description": "validation of URI References",
+ "schema": {"format": "uri-reference"},
+ "tests": [
+ {
+ "description": "a valid URI",
+ "data": "http://foo.bar/?baz=qux#quux",
+ "valid": true
+ },
+ {
+ "description": "a valid protocol-relative URI Reference",
+ "data": "//foo.bar/?baz=qux#quux",
+ "valid": true
+ },
+ {
+ "description": "a valid relative URI Reference",
+ "data": "/abc",
+ "valid": true
+ },
+ {
+ "description": "an invalid URI Reference",
+ "data": "\\\\WINDOWS\\fileshare",
+ "valid": false
+ },
+ {
+ "description": "a valid URI Reference",
+ "data": "abc",
+ "valid": true
+ },
+ {
+ "description": "a valid URI fragment",
+ "data": "#fragment",
+ "valid": true
+ },
+ {
+ "description": "an invalid URI fragment",
+ "data": "#frag\\ment",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/format/uri-template.json b/third_party/python/jsonschema/json/tests/draft7/optional/format/uri-template.json
new file mode 100644
index 0000000000..33ab76ee73
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/format/uri-template.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "format: uri-template",
+ "schema": {"format": "uri-template"},
+ "tests": [
+ {
+ "description": "a valid uri-template",
+ "data": "http://example.com/dictionary/{term:1}/{term}",
+ "valid": true
+ },
+ {
+ "description": "an invalid uri-template",
+ "data": "http://example.com/dictionary/{term:1}/{term",
+ "valid": false
+ },
+ {
+ "description": "a valid uri-template without variables",
+ "data": "http://example.com/dictionary",
+ "valid": true
+ },
+ {
+ "description": "a valid relative uri-template",
+ "data": "dictionary/{term:1}/{term}",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/format/uri.json b/third_party/python/jsonschema/json/tests/draft7/optional/format/uri.json
new file mode 100644
index 0000000000..25cc40c80a
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/format/uri.json
@@ -0,0 +1,103 @@
+[
+ {
+ "description": "validation of URIs",
+ "schema": {"format": "uri"},
+ "tests": [
+ {
+ "description": "a valid URL with anchor tag",
+ "data": "http://foo.bar/?baz=qux#quux",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with anchor tag and parantheses",
+ "data": "http://foo.com/blah_(wikipedia)_blah#cite-1",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with URL-encoded stuff",
+ "data": "http://foo.bar/?q=Test%20URL-encoded%20stuff",
+ "valid": true
+ },
+ {
+ "description": "a valid puny-coded URL ",
+ "data": "http://xn--nw2a.xn--j6w193g/",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with many special characters",
+ "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com",
+ "valid": true
+ },
+ {
+ "description": "a valid URL based on IPv4",
+ "data": "http://223.255.255.254",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with ftp scheme",
+ "data": "ftp://ftp.is.co.za/rfc/rfc1808.txt",
+ "valid": true
+ },
+ {
+ "description": "a valid URL for a simple text file",
+ "data": "http://www.ietf.org/rfc/rfc2396.txt",
+ "valid": true
+ },
+ {
+ "description": "a valid URL ",
+ "data": "ldap://[2001:db8::7]/c=GB?objectClass?one",
+ "valid": true
+ },
+ {
+ "description": "a valid mailto URI",
+ "data": "mailto:John.Doe@example.com",
+ "valid": true
+ },
+ {
+ "description": "a valid newsgroup URI",
+ "data": "news:comp.infosystems.www.servers.unix",
+ "valid": true
+ },
+ {
+ "description": "a valid tel URI",
+ "data": "tel:+1-816-555-1212",
+ "valid": true
+ },
+ {
+ "description": "a valid URN",
+ "data": "urn:oasis:names:specification:docbook:dtd:xml:4.1.2",
+ "valid": true
+ },
+ {
+ "description": "an invalid protocol-relative URI Reference",
+ "data": "//foo.bar/?baz=qux#quux",
+ "valid": false
+ },
+ {
+ "description": "an invalid relative URI Reference",
+ "data": "/abc",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI",
+ "data": "\\\\WINDOWS\\fileshare",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI though valid URI reference",
+ "data": "abc",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI with spaces",
+ "data": "http:// shouldfail.com",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI with spaces and missing scheme",
+ "data": ":// should fail",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/optional/zeroTerminatedFloats.json b/third_party/python/jsonschema/json/tests/draft7/optional/zeroTerminatedFloats.json
new file mode 100644
index 0000000000..1bcdf96036
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/optional/zeroTerminatedFloats.json
@@ -0,0 +1,15 @@
+[
+ {
+ "description": "some languages do not distinguish between different types of numeric value",
+ "schema": {
+ "type": "integer"
+ },
+ "tests": [
+ {
+ "description": "a float without fractional part is an integer",
+ "data": 1.0,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/pattern.json b/third_party/python/jsonschema/json/tests/draft7/pattern.json
new file mode 100644
index 0000000000..25e7299731
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/pattern.json
@@ -0,0 +1,34 @@
+[
+ {
+ "description": "pattern validation",
+ "schema": {"pattern": "^a*$"},
+ "tests": [
+ {
+ "description": "a matching pattern is valid",
+ "data": "aaa",
+ "valid": true
+ },
+ {
+ "description": "a non-matching pattern is invalid",
+ "data": "abc",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": true,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "pattern is not anchored",
+ "schema": {"pattern": "a+"},
+ "tests": [
+ {
+ "description": "matches a substring",
+ "data": "xxaayy",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/patternProperties.json b/third_party/python/jsonschema/json/tests/draft7/patternProperties.json
new file mode 100644
index 0000000000..1d04a1675c
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/patternProperties.json
@@ -0,0 +1,151 @@
+[
+ {
+ "description":
+ "patternProperties validates properties matching a regex",
+ "schema": {
+ "patternProperties": {
+ "f.*o": {"type": "integer"}
+ }
+ },
+ "tests": [
+ {
+ "description": "a single valid match is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "multiple valid matches is valid",
+ "data": {"foo": 1, "foooooo" : 2},
+ "valid": true
+ },
+ {
+ "description": "a single invalid match is invalid",
+ "data": {"foo": "bar", "fooooo": 2},
+ "valid": false
+ },
+ {
+ "description": "multiple invalid matches is invalid",
+ "data": {"foo": "bar", "foooooo" : "baz"},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": ["foo"],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple simultaneous patternProperties are validated",
+ "schema": {
+ "patternProperties": {
+ "a*": {"type": "integer"},
+ "aaa*": {"maximum": 20}
+ }
+ },
+ "tests": [
+ {
+ "description": "a single valid match is valid",
+ "data": {"a": 21},
+ "valid": true
+ },
+ {
+ "description": "a simultaneous match is valid",
+ "data": {"aaaa": 18},
+ "valid": true
+ },
+ {
+ "description": "multiple matches is valid",
+ "data": {"a": 21, "aaaa": 18},
+ "valid": true
+ },
+ {
+ "description": "an invalid due to one is invalid",
+ "data": {"a": "bar"},
+ "valid": false
+ },
+ {
+ "description": "an invalid due to the other is invalid",
+ "data": {"aaaa": 31},
+ "valid": false
+ },
+ {
+ "description": "an invalid due to both is invalid",
+ "data": {"aaa": "foo", "aaaa": 31},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "regexes are not anchored by default and are case sensitive",
+ "schema": {
+ "patternProperties": {
+ "[0-9]{2,}": { "type": "boolean" },
+ "X_": { "type": "string" }
+ }
+ },
+ "tests": [
+ {
+ "description": "non recognized members are ignored",
+ "data": { "answer 1": "42" },
+ "valid": true
+ },
+ {
+ "description": "recognized members are accounted for",
+ "data": { "a31b": null },
+ "valid": false
+ },
+ {
+ "description": "regexes are case sensitive",
+ "data": { "a_x_3": 3 },
+ "valid": true
+ },
+ {
+ "description": "regexes are case sensitive, 2",
+ "data": { "a_X_3": 3 },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "patternProperties with boolean schemas",
+ "schema": {
+ "patternProperties": {
+ "f.*": true,
+ "b.*": false
+ }
+ },
+ "tests": [
+ {
+ "description": "object with property matching schema true is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "object with property matching schema false is invalid",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "object with both properties is invalid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/properties.json b/third_party/python/jsonschema/json/tests/draft7/properties.json
new file mode 100644
index 0000000000..b86c181982
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/properties.json
@@ -0,0 +1,167 @@
+[
+ {
+ "description": "object properties validation",
+ "schema": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"type": "string"}
+ }
+ },
+ "tests": [
+ {
+ "description": "both properties present and valid is valid",
+ "data": {"foo": 1, "bar": "baz"},
+ "valid": true
+ },
+ {
+ "description": "one property invalid is invalid",
+ "data": {"foo": 1, "bar": {}},
+ "valid": false
+ },
+ {
+ "description": "both properties invalid is invalid",
+ "data": {"foo": [], "bar": {}},
+ "valid": false
+ },
+ {
+ "description": "doesn't invalidate other properties",
+ "data": {"quux": []},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description":
+ "properties, patternProperties, additionalProperties interaction",
+ "schema": {
+ "properties": {
+ "foo": {"type": "array", "maxItems": 3},
+ "bar": {"type": "array"}
+ },
+ "patternProperties": {"f.o": {"minItems": 2}},
+ "additionalProperties": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "property validates property",
+ "data": {"foo": [1, 2]},
+ "valid": true
+ },
+ {
+ "description": "property invalidates property",
+ "data": {"foo": [1, 2, 3, 4]},
+ "valid": false
+ },
+ {
+ "description": "patternProperty invalidates property",
+ "data": {"foo": []},
+ "valid": false
+ },
+ {
+ "description": "patternProperty validates nonproperty",
+ "data": {"fxo": [1, 2]},
+ "valid": true
+ },
+ {
+ "description": "patternProperty invalidates nonproperty",
+ "data": {"fxo": []},
+ "valid": false
+ },
+ {
+ "description": "additionalProperty ignores property",
+ "data": {"bar": []},
+ "valid": true
+ },
+ {
+ "description": "additionalProperty validates others",
+ "data": {"quux": 3},
+ "valid": true
+ },
+ {
+ "description": "additionalProperty invalidates others",
+ "data": {"quux": "foo"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "properties with boolean schema",
+ "schema": {
+ "properties": {
+ "foo": true,
+ "bar": false
+ }
+ },
+ "tests": [
+ {
+ "description": "no property present is valid",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "only 'true' property present is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "only 'false' property present is invalid",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "both properties present is invalid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "properties with escaped characters",
+ "schema": {
+ "properties": {
+ "foo\nbar": {"type": "number"},
+ "foo\"bar": {"type": "number"},
+ "foo\\bar": {"type": "number"},
+ "foo\rbar": {"type": "number"},
+ "foo\tbar": {"type": "number"},
+ "foo\fbar": {"type": "number"}
+ }
+ },
+ "tests": [
+ {
+ "description": "object with all numbers is valid",
+ "data": {
+ "foo\nbar": 1,
+ "foo\"bar": 1,
+ "foo\\bar": 1,
+ "foo\rbar": 1,
+ "foo\tbar": 1,
+ "foo\fbar": 1
+ },
+ "valid": true
+ },
+ {
+ "description": "object with strings is invalid",
+ "data": {
+ "foo\nbar": "1",
+ "foo\"bar": "1",
+ "foo\\bar": "1",
+ "foo\rbar": "1",
+ "foo\tbar": "1",
+ "foo\fbar": "1"
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/propertyNames.json b/third_party/python/jsonschema/json/tests/draft7/propertyNames.json
new file mode 100644
index 0000000000..8423690d90
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/propertyNames.json
@@ -0,0 +1,78 @@
+[
+ {
+ "description": "propertyNames validation",
+ "schema": {
+ "propertyNames": {"maxLength": 3}
+ },
+ "tests": [
+ {
+ "description": "all property names valid",
+ "data": {
+ "f": {},
+ "foo": {}
+ },
+ "valid": true
+ },
+ {
+ "description": "some property names invalid",
+ "data": {
+ "foo": {},
+ "foobar": {}
+ },
+ "valid": false
+ },
+ {
+ "description": "object without properties is valid",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [1, 2, 3, 4],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "propertyNames with boolean schema true",
+ "schema": {"propertyNames": true},
+ "tests": [
+ {
+ "description": "object with any properties is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "propertyNames with boolean schema false",
+ "schema": {"propertyNames": false},
+ "tests": [
+ {
+ "description": "object with any properties is invalid",
+ "data": {"foo": 1},
+ "valid": false
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/ref.json b/third_party/python/jsonschema/json/tests/draft7/ref.json
new file mode 100644
index 0000000000..44b8ed22f1
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/ref.json
@@ -0,0 +1,443 @@
+[
+ {
+ "description": "root pointer ref",
+ "schema": {
+ "properties": {
+ "foo": {"$ref": "#"}
+ },
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": {"foo": false},
+ "valid": true
+ },
+ {
+ "description": "recursive match",
+ "data": {"foo": {"foo": false}},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": {"bar": false},
+ "valid": false
+ },
+ {
+ "description": "recursive mismatch",
+ "data": {"foo": {"bar": false}},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "relative pointer ref to object",
+ "schema": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"$ref": "#/properties/foo"}
+ }
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": {"bar": 3},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": {"bar": true},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "relative pointer ref to array",
+ "schema": {
+ "items": [
+ {"type": "integer"},
+ {"$ref": "#/items/0"}
+ ]
+ },
+ "tests": [
+ {
+ "description": "match array",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "mismatch array",
+ "data": [1, "foo"],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "escaped pointer ref",
+ "schema": {
+ "tilda~field": {"type": "integer"},
+ "slash/field": {"type": "integer"},
+ "percent%field": {"type": "integer"},
+ "properties": {
+ "tilda": {"$ref": "#/tilda~0field"},
+ "slash": {"$ref": "#/slash~1field"},
+ "percent": {"$ref": "#/percent%25field"}
+ }
+ },
+ "tests": [
+ {
+ "description": "slash invalid",
+ "data": {"slash": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "tilda invalid",
+ "data": {"tilda": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "percent invalid",
+ "data": {"percent": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "slash valid",
+ "data": {"slash": 123},
+ "valid": true
+ },
+ {
+ "description": "tilda valid",
+ "data": {"tilda": 123},
+ "valid": true
+ },
+ {
+ "description": "percent valid",
+ "data": {"percent": 123},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "nested refs",
+ "schema": {
+ "definitions": {
+ "a": {"type": "integer"},
+ "b": {"$ref": "#/definitions/a"},
+ "c": {"$ref": "#/definitions/b"}
+ },
+ "$ref": "#/definitions/c"
+ },
+ "tests": [
+ {
+ "description": "nested ref valid",
+ "data": 5,
+ "valid": true
+ },
+ {
+ "description": "nested ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ref overrides any sibling keywords",
+ "schema": {
+ "definitions": {
+ "reffed": {
+ "type": "array"
+ }
+ },
+ "properties": {
+ "foo": {
+ "$ref": "#/definitions/reffed",
+ "maxItems": 2
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "ref valid",
+ "data": { "foo": [] },
+ "valid": true
+ },
+ {
+ "description": "ref valid, maxItems ignored",
+ "data": { "foo": [ 1, 2, 3] },
+ "valid": true
+ },
+ {
+ "description": "ref invalid",
+ "data": { "foo": "string" },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "remote ref, containing refs itself",
+ "schema": {"$ref": "http://json-schema.org/draft-07/schema#"},
+ "tests": [
+ {
+ "description": "remote ref valid",
+ "data": {"minLength": 1},
+ "valid": true
+ },
+ {
+ "description": "remote ref invalid",
+ "data": {"minLength": -1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "property named $ref that is not a reference",
+ "schema": {
+ "properties": {
+ "$ref": {"type": "string"}
+ }
+ },
+ "tests": [
+ {
+ "description": "property named $ref valid",
+ "data": {"$ref": "a"},
+ "valid": true
+ },
+ {
+ "description": "property named $ref invalid",
+ "data": {"$ref": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "$ref to boolean schema true",
+ "schema": {
+ "$ref": "#/definitions/bool",
+ "definitions": {
+ "bool": true
+ }
+ },
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "$ref to boolean schema false",
+ "schema": {
+ "$ref": "#/definitions/bool",
+ "definitions": {
+ "bool": false
+ }
+ },
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Recursive references between schemas",
+ "schema": {
+ "$id": "http://localhost:1234/tree",
+ "description": "tree of nodes",
+ "type": "object",
+ "properties": {
+ "meta": {"type": "string"},
+ "nodes": {
+ "type": "array",
+ "items": {"$ref": "node"}
+ }
+ },
+ "required": ["meta", "nodes"],
+ "definitions": {
+ "node": {
+ "$id": "http://localhost:1234/node",
+ "description": "node",
+ "type": "object",
+ "properties": {
+ "value": {"type": "number"},
+ "subtree": {"$ref": "tree"}
+ },
+ "required": ["value"]
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid tree",
+ "data": {
+ "meta": "root",
+ "nodes": [
+ {
+ "value": 1,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": 1.1},
+ {"value": 1.2}
+ ]
+ }
+ },
+ {
+ "value": 2,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": 2.1},
+ {"value": 2.2}
+ ]
+ }
+ }
+ ]
+ },
+ "valid": true
+ },
+ {
+ "description": "invalid tree",
+ "data": {
+ "meta": "root",
+ "nodes": [
+ {
+ "value": 1,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": "string is invalid"},
+ {"value": 1.2}
+ ]
+ }
+ },
+ {
+ "value": 2,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": 2.1},
+ {"value": 2.2}
+ ]
+ }
+ }
+ ]
+ },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "refs with quote",
+ "schema": {
+ "properties": {
+ "foo\"bar": {"$ref": "#/definitions/foo%22bar"}
+ },
+ "definitions": {
+ "foo\"bar": {"type": "number"}
+ }
+ },
+ "tests": [
+ {
+ "description": "object with numbers is valid",
+ "data": {
+ "foo\"bar": 1
+ },
+ "valid": true
+ },
+ {
+ "description": "object with strings is invalid",
+ "data": {
+ "foo\"bar": "1"
+ },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Location-independent identifier",
+ "schema": {
+ "allOf": [{
+ "$ref": "#foo"
+ }],
+ "definitions": {
+ "A": {
+ "$id": "#foo",
+ "type": "integer"
+ }
+ }
+ },
+ "tests": [
+ {
+ "data": 1,
+ "description": "match",
+ "valid": true
+ },
+ {
+ "data": "a",
+ "description": "mismatch",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Location-independent identifier with absolute URI",
+ "schema": {
+ "allOf": [{
+ "$ref": "http://localhost:1234/bar#foo"
+ }],
+ "definitions": {
+ "A": {
+ "$id": "http://localhost:1234/bar#foo",
+ "type": "integer"
+ }
+ }
+ },
+ "tests": [
+ {
+ "data": 1,
+ "description": "match",
+ "valid": true
+ },
+ {
+ "data": "a",
+ "description": "mismatch",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Location-independent identifier with base URI change in subschema",
+ "schema": {
+ "$id": "http://localhost:1234/root",
+ "allOf": [{
+ "$ref": "http://localhost:1234/nested.json#foo"
+ }],
+ "definitions": {
+ "A": {
+ "$id": "nested.json",
+ "definitions": {
+ "B": {
+ "$id": "#foo",
+ "type": "integer"
+ }
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "data": 1,
+ "description": "match",
+ "valid": true
+ },
+ {
+ "data": "a",
+ "description": "mismatch",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/refRemote.json b/third_party/python/jsonschema/json/tests/draft7/refRemote.json
new file mode 100644
index 0000000000..819d32678a
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/refRemote.json
@@ -0,0 +1,171 @@
+[
+ {
+ "description": "remote ref",
+ "schema": {"$ref": "http://localhost:1234/integer.json"},
+ "tests": [
+ {
+ "description": "remote ref valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "remote ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "fragment within remote ref",
+ "schema": {"$ref": "http://localhost:1234/subSchemas.json#/integer"},
+ "tests": [
+ {
+ "description": "remote fragment valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "remote fragment invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ref within remote ref",
+ "schema": {
+ "$ref": "http://localhost:1234/subSchemas.json#/refToInteger"
+ },
+ "tests": [
+ {
+ "description": "ref within ref valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "ref within ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "base URI change",
+ "schema": {
+ "$id": "http://localhost:1234/",
+ "items": {
+ "$id": "folder/",
+ "items": {"$ref": "folderInteger.json"}
+ }
+ },
+ "tests": [
+ {
+ "description": "base URI change ref valid",
+ "data": [[1]],
+ "valid": true
+ },
+ {
+ "description": "base URI change ref invalid",
+ "data": [["a"]],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "base URI change - change folder",
+ "schema": {
+ "$id": "http://localhost:1234/scope_change_defs1.json",
+ "type" : "object",
+ "properties": {
+ "list": {"$ref": "#/definitions/baz"}
+ },
+ "definitions": {
+ "baz": {
+ "$id": "folder/",
+ "type": "array",
+ "items": {"$ref": "folderInteger.json"}
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": {"list": [1]},
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": {"list": ["a"]},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "base URI change - change folder in subschema",
+ "schema": {
+ "$id": "http://localhost:1234/scope_change_defs2.json",
+ "type" : "object",
+ "properties": {
+ "list": {"$ref": "#/definitions/baz/definitions/bar"}
+ },
+ "definitions": {
+ "baz": {
+ "$id": "folder/",
+ "definitions": {
+ "bar": {
+ "type": "array",
+ "items": {"$ref": "folderInteger.json"}
+ }
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": {"list": [1]},
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": {"list": ["a"]},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "root ref in remote ref",
+ "schema": {
+ "$id": "http://localhost:1234/object",
+ "type": "object",
+ "properties": {
+ "name": {"$ref": "name.json#/definitions/orNull"}
+ }
+ },
+ "tests": [
+ {
+ "description": "string is valid",
+ "data": {
+ "name": "foo"
+ },
+ "valid": true
+ },
+ {
+ "description": "null is valid",
+ "data": {
+ "name": null
+ },
+ "valid": true
+ },
+ {
+ "description": "object is invalid",
+ "data": {
+ "name": {
+ "name": null
+ }
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/required.json b/third_party/python/jsonschema/json/tests/draft7/required.json
new file mode 100644
index 0000000000..abf18f3459
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/required.json
@@ -0,0 +1,105 @@
+[
+ {
+ "description": "required validation",
+ "schema": {
+ "properties": {
+ "foo": {},
+ "bar": {}
+ },
+ "required": ["foo"]
+ },
+ "tests": [
+ {
+ "description": "present required property is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "non-present required property is invalid",
+ "data": {"bar": 1},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "required default validation",
+ "schema": {
+ "properties": {
+ "foo": {}
+ }
+ },
+ "tests": [
+ {
+ "description": "not required by default",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "required with empty array",
+ "schema": {
+ "properties": {
+ "foo": {}
+ },
+ "required": []
+ },
+ "tests": [
+ {
+ "description": "property not required",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "required with escaped characters",
+ "schema": {
+ "required": [
+ "foo\nbar",
+ "foo\"bar",
+ "foo\\bar",
+ "foo\rbar",
+ "foo\tbar",
+ "foo\fbar"
+ ]
+ },
+ "tests": [
+ {
+ "description": "object with all properties present is valid",
+ "data": {
+ "foo\nbar": 1,
+ "foo\"bar": 1,
+ "foo\\bar": 1,
+ "foo\rbar": 1,
+ "foo\tbar": 1,
+ "foo\fbar": 1
+ },
+ "valid": true
+ },
+ {
+ "description": "object with some properties missing is invalid",
+ "data": {
+ "foo\nbar": "1",
+ "foo\"bar": "1"
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/type.json b/third_party/python/jsonschema/json/tests/draft7/type.json
new file mode 100644
index 0000000000..ea33b1821f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/type.json
@@ -0,0 +1,464 @@
+[
+ {
+ "description": "integer type matches integers",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "an integer is an integer",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a float is not an integer",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an integer",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "a string is still not an integer, even if it looks like one",
+ "data": "1",
+ "valid": false
+ },
+ {
+ "description": "an object is not an integer",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not an integer",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not an integer",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an integer",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "number type matches numbers",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "an integer is a number",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a float is a number",
+ "data": 1.1,
+ "valid": true
+ },
+ {
+ "description": "a string is not a number",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "a string is still not a number, even if it looks like one",
+ "data": "1",
+ "valid": false
+ },
+ {
+ "description": "an object is not a number",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a number",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not a number",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not a number",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "string type matches strings",
+ "schema": {"type": "string"},
+ "tests": [
+ {
+ "description": "1 is not a string",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not a string",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is a string",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "a string is still a string, even if it looks like a number",
+ "data": "1",
+ "valid": true
+ },
+ {
+ "description": "an empty string is still a string",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "an object is not a string",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a string",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not a string",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not a string",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "object type matches objects",
+ "schema": {"type": "object"},
+ "tests": [
+ {
+ "description": "an integer is not an object",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not an object",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an object",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an object is an object",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "an array is not an object",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not an object",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an object",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "array type matches arrays",
+ "schema": {"type": "array"},
+ "tests": [
+ {
+ "description": "an integer is not an array",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not an array",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an array",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an object is not an array",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is an array",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "a boolean is not an array",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an array",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "boolean type matches booleans",
+ "schema": {"type": "boolean"},
+ "tests": [
+ {
+ "description": "an integer is not a boolean",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "zero is not a boolean",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "a float is not a boolean",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not a boolean",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an empty string is not a boolean",
+ "data": "",
+ "valid": false
+ },
+ {
+ "description": "an object is not a boolean",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a boolean",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "true is a boolean",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "false is a boolean",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "null is not a boolean",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "null type matches only the null object",
+ "schema": {"type": "null"},
+ "tests": [
+ {
+ "description": "an integer is not null",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not null",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "zero is not null",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "a string is not null",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an empty string is not null",
+ "data": "",
+ "valid": false
+ },
+ {
+ "description": "an object is not null",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not null",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "true is not null",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "false is not null",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "null is null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple types can be specified in an array",
+ "schema": {"type": ["integer", "string"]},
+ "tests": [
+ {
+ "description": "an integer is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "a float is invalid",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "an object is invalid",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is invalid",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "type as array with one item",
+ "schema": {
+ "type": ["string"]
+ },
+ "tests": [
+ {
+ "description": "string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "number is invalid",
+ "data": 123,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "type: array or object",
+ "schema": {
+ "type": ["array", "object"]
+ },
+ "tests": [
+ {
+ "description": "array is valid",
+ "data": [1,2,3],
+ "valid": true
+ },
+ {
+ "description": "object is valid",
+ "data": {"foo": 123},
+ "valid": true
+ },
+ {
+ "description": "number is invalid",
+ "data": 123,
+ "valid": false
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "null is invalid",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "type: array, object or null",
+ "schema": {
+ "type": ["array", "object", "null"]
+ },
+ "tests": [
+ {
+ "description": "array is valid",
+ "data": [1,2,3],
+ "valid": true
+ },
+ {
+ "description": "object is valid",
+ "data": {"foo": 123},
+ "valid": true
+ },
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "number is invalid",
+ "data": 123,
+ "valid": false
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/draft7/uniqueItems.json b/third_party/python/jsonschema/json/tests/draft7/uniqueItems.json
new file mode 100644
index 0000000000..d0a94d8cf5
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/draft7/uniqueItems.json
@@ -0,0 +1,173 @@
+[
+ {
+ "description": "uniqueItems validation",
+ "schema": {"uniqueItems": true},
+ "tests": [
+ {
+ "description": "unique array of integers is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of integers is invalid",
+ "data": [1, 1],
+ "valid": false
+ },
+ {
+ "description": "numbers are unique if mathematically unequal",
+ "data": [1.0, 1.00, 1],
+ "valid": false
+ },
+ {
+ "description": "false is not equal to zero",
+ "data": [0, false],
+ "valid": true
+ },
+ {
+ "description": "true is not equal to one",
+ "data": [1, true],
+ "valid": true
+ },
+ {
+ "description": "unique array of objects is valid",
+ "data": [{"foo": "bar"}, {"foo": "baz"}],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of objects is invalid",
+ "data": [{"foo": "bar"}, {"foo": "bar"}],
+ "valid": false
+ },
+ {
+ "description": "unique array of nested objects is valid",
+ "data": [
+ {"foo": {"bar" : {"baz" : true}}},
+ {"foo": {"bar" : {"baz" : false}}}
+ ],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of nested objects is invalid",
+ "data": [
+ {"foo": {"bar" : {"baz" : true}}},
+ {"foo": {"bar" : {"baz" : true}}}
+ ],
+ "valid": false
+ },
+ {
+ "description": "unique array of arrays is valid",
+ "data": [["foo"], ["bar"]],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of arrays is invalid",
+ "data": [["foo"], ["foo"]],
+ "valid": false
+ },
+ {
+ "description": "1 and true are unique",
+ "data": [1, true],
+ "valid": true
+ },
+ {
+ "description": "0 and false are unique",
+ "data": [0, false],
+ "valid": true
+ },
+ {
+ "description": "unique heterogeneous types are valid",
+ "data": [{}, [1], true, null, 1],
+ "valid": true
+ },
+ {
+ "description": "non-unique heterogeneous types are invalid",
+ "data": [{}, [1], true, null, {}, 1],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "uniqueItems with an array of items",
+ "schema": {
+ "items": [{"type": "boolean"}, {"type": "boolean"}],
+ "uniqueItems": true
+ },
+ "tests": [
+ {
+ "description": "[false, true] from items array is valid",
+ "data": [false, true],
+ "valid": true
+ },
+ {
+ "description": "[true, false] from items array is valid",
+ "data": [true, false],
+ "valid": true
+ },
+ {
+ "description": "[false, false] from items array is not valid",
+ "data": [false, false],
+ "valid": false
+ },
+ {
+ "description": "[true, true] from items array is not valid",
+ "data": [true, true],
+ "valid": false
+ },
+ {
+ "description": "unique array extended from [false, true] is valid",
+ "data": [false, true, "foo", "bar"],
+ "valid": true
+ },
+ {
+ "description": "unique array extended from [true, false] is valid",
+ "data": [true, false, "foo", "bar"],
+ "valid": true
+ },
+ {
+ "description": "non-unique array extended from [false, true] is not valid",
+ "data": [false, true, "foo", "foo"],
+ "valid": false
+ },
+ {
+ "description": "non-unique array extended from [true, false] is not valid",
+ "data": [true, false, "foo", "foo"],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "uniqueItems with an array of items and additionalItems=false",
+ "schema": {
+ "items": [{"type": "boolean"}, {"type": "boolean"}],
+ "uniqueItems": true,
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "[false, true] from items array is valid",
+ "data": [false, true],
+ "valid": true
+ },
+ {
+ "description": "[true, false] from items array is valid",
+ "data": [true, false],
+ "valid": true
+ },
+ {
+ "description": "[false, false] from items array is not valid",
+ "data": [false, false],
+ "valid": false
+ },
+ {
+ "description": "[true, true] from items array is not valid",
+ "data": [true, true],
+ "valid": false
+ },
+ {
+ "description": "extra items are invalid even if unique",
+ "data": [false, true, null],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/additionalItems.json b/third_party/python/jsonschema/json/tests/latest/additionalItems.json
new file mode 100644
index 0000000000..abecc578be
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/additionalItems.json
@@ -0,0 +1,87 @@
+[
+ {
+ "description": "additionalItems as schema",
+ "schema": {
+ "items": [{}],
+ "additionalItems": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "additional items match schema",
+ "data": [ null, 2, 3, 4 ],
+ "valid": true
+ },
+ {
+ "description": "additional items do not match schema",
+ "data": [ null, 2, 3, "foo" ],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "items is schema, no additionalItems",
+ "schema": {
+ "items": {},
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "all items match schema",
+ "data": [ 1, 2, 3, 4, 5 ],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "array of items with no additionalItems",
+ "schema": {
+ "items": [{}, {}, {}],
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "fewer number of items present",
+ "data": [ 1, 2 ],
+ "valid": true
+ },
+ {
+ "description": "equal number of items present",
+ "data": [ 1, 2, 3 ],
+ "valid": true
+ },
+ {
+ "description": "additional items are not permitted",
+ "data": [ 1, 2, 3, 4 ],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "additionalItems as false without items",
+ "schema": {"additionalItems": false},
+ "tests": [
+ {
+ "description":
+ "items defaults to empty schema so everything is valid",
+ "data": [ 1, 2, 3, 4, 5 ],
+ "valid": true
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": {"foo" : "bar"},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "additionalItems are allowed by default",
+ "schema": {"items": [{"type": "integer"}]},
+ "tests": [
+ {
+ "description": "only the first item is validated",
+ "data": [1, "foo", false],
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/additionalProperties.json b/third_party/python/jsonschema/json/tests/latest/additionalProperties.json
new file mode 100644
index 0000000000..ffeac6b381
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/additionalProperties.json
@@ -0,0 +1,133 @@
+[
+ {
+ "description":
+ "additionalProperties being false does not allow other properties",
+ "schema": {
+ "properties": {"foo": {}, "bar": {}},
+ "patternProperties": { "^v": {} },
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "no additional properties is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "an additional property is invalid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : "boom"},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [1, 2, 3],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobarbaz",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "patternProperties are not additional properties",
+ "data": {"foo":1, "vroom": 2},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "non-ASCII pattern with additionalProperties",
+ "schema": {
+ "patternProperties": {"^á": {}},
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "matching the pattern is valid",
+ "data": {"ármányos": 2},
+ "valid": true
+ },
+ {
+ "description": "not matching the pattern is invalid",
+ "data": {"élmény": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description":
+ "additionalProperties allows a schema which should validate",
+ "schema": {
+ "properties": {"foo": {}, "bar": {}},
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "no additional properties is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "an additional valid property is valid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : true},
+ "valid": true
+ },
+ {
+ "description": "an additional invalid property is invalid",
+ "data": {"foo" : 1, "bar" : 2, "quux" : 12},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description":
+ "additionalProperties can exist by itself",
+ "schema": {
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "an additional valid property is valid",
+ "data": {"foo" : true},
+ "valid": true
+ },
+ {
+ "description": "an additional invalid property is invalid",
+ "data": {"foo" : 1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "additionalProperties are allowed by default",
+ "schema": {"properties": {"foo": {}, "bar": {}}},
+ "tests": [
+ {
+ "description": "additional properties are allowed",
+ "data": {"foo": 1, "bar": 2, "quux": true},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "additionalProperties should not look in applicators",
+ "schema": {
+ "allOf": [
+ {"properties": {"foo": {}}}
+ ],
+ "additionalProperties": {"type": "boolean"}
+ },
+ "tests": [
+ {
+ "description": "properties defined in allOf are not allowed",
+ "data": {"foo": 1, "bar": true},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/allOf.json b/third_party/python/jsonschema/json/tests/latest/allOf.json
new file mode 100644
index 0000000000..eb612091aa
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/allOf.json
@@ -0,0 +1,218 @@
+[
+ {
+ "description": "allOf",
+ "schema": {
+ "allOf": [
+ {
+ "properties": {
+ "bar": {"type": "integer"}
+ },
+ "required": ["bar"]
+ },
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "allOf",
+ "data": {"foo": "baz", "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "mismatch second",
+ "data": {"foo": "baz"},
+ "valid": false
+ },
+ {
+ "description": "mismatch first",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "wrong type",
+ "data": {"foo": "baz", "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with base schema",
+ "schema": {
+ "properties": {"bar": {"type": "integer"}},
+ "required": ["bar"],
+ "allOf" : [
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ },
+ {
+ "properties": {
+ "baz": {"type": "null"}
+ },
+ "required": ["baz"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": {"foo": "quux", "bar": 2, "baz": null},
+ "valid": true
+ },
+ {
+ "description": "mismatch base schema",
+ "data": {"foo": "quux", "baz": null},
+ "valid": false
+ },
+ {
+ "description": "mismatch first allOf",
+ "data": {"bar": 2, "baz": null},
+ "valid": false
+ },
+ {
+ "description": "mismatch second allOf",
+ "data": {"foo": "quux", "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "mismatch both",
+ "data": {"bar": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf simple types",
+ "schema": {
+ "allOf": [
+ {"maximum": 30},
+ {"minimum": 20}
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": 25,
+ "valid": true
+ },
+ {
+ "description": "mismatch one",
+ "data": 35,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with boolean schemas, all true",
+ "schema": {"allOf": [true, true]},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "allOf with boolean schemas, some false",
+ "schema": {"allOf": [true, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with boolean schemas, all false",
+ "schema": {"allOf": [false, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with one empty schema",
+ "schema": {
+ "allOf": [
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "any data is valid",
+ "data": 1,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "allOf with two empty schemas",
+ "schema": {
+ "allOf": [
+ {},
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "any data is valid",
+ "data": 1,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "allOf with the first empty schema",
+ "schema": {
+ "allOf": [
+ {},
+ { "type": "number" }
+ ]
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "allOf with the last empty schema",
+ "schema": {
+ "allOf": [
+ { "type": "number" },
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/anchor.json b/third_party/python/jsonschema/json/tests/latest/anchor.json
new file mode 100644
index 0000000000..06b0ba4d25
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/anchor.json
@@ -0,0 +1,87 @@
+[
+ {
+ "description": "Location-independent identifier",
+ "schema": {
+ "allOf": [{
+ "$ref": "#foo"
+ }],
+ "$defs": {
+ "A": {
+ "$anchor": "foo",
+ "type": "integer"
+ }
+ }
+ },
+ "tests": [
+ {
+ "data": 1,
+ "description": "match",
+ "valid": true
+ },
+ {
+ "data": "a",
+ "description": "mismatch",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Location-independent identifier with absolute URI",
+ "schema": {
+ "allOf": [{
+ "$ref": "http://localhost:1234/bar#foo"
+ }],
+ "$defs": {
+ "A": {
+ "$id": "http://localhost:1234/bar",
+ "$anchor": "foo",
+ "type": "integer"
+ }
+ }
+ },
+ "tests": [
+ {
+ "data": 1,
+ "description": "match",
+ "valid": true
+ },
+ {
+ "data": "a",
+ "description": "mismatch",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Location-independent identifier with base URI change in subschema",
+ "schema": {
+ "$id": "http://localhost:1234/root",
+ "allOf": [{
+ "$ref": "http://localhost:1234/nested.json#foo"
+ }],
+ "$defs": {
+ "A": {
+ "$id": "nested.json",
+ "$defs": {
+ "B": {
+ "$anchor": "foo",
+ "type": "integer"
+ }
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "data": 1,
+ "description": "match",
+ "valid": true
+ },
+ {
+ "data": "a",
+ "description": "mismatch",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/anyOf.json b/third_party/python/jsonschema/json/tests/latest/anyOf.json
new file mode 100644
index 0000000000..ab5eb386b4
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/anyOf.json
@@ -0,0 +1,189 @@
+[
+ {
+ "description": "anyOf",
+ "schema": {
+ "anyOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "minimum": 2
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first anyOf valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "second anyOf valid",
+ "data": 2.5,
+ "valid": true
+ },
+ {
+ "description": "both anyOf valid",
+ "data": 3,
+ "valid": true
+ },
+ {
+ "description": "neither anyOf valid",
+ "data": 1.5,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf with base schema",
+ "schema": {
+ "type": "string",
+ "anyOf" : [
+ {
+ "maxLength": 2
+ },
+ {
+ "minLength": 4
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "mismatch base schema",
+ "data": 3,
+ "valid": false
+ },
+ {
+ "description": "one anyOf valid",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "both anyOf invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf with boolean schemas, all true",
+ "schema": {"anyOf": [true, true]},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "anyOf with boolean schemas, some true",
+ "schema": {"anyOf": [true, false]},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "anyOf with boolean schemas, all false",
+ "schema": {"anyOf": [false, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf complex types",
+ "schema": {
+ "anyOf": [
+ {
+ "properties": {
+ "bar": {"type": "integer"}
+ },
+ "required": ["bar"]
+ },
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first anyOf valid (complex)",
+ "data": {"bar": 2},
+ "valid": true
+ },
+ {
+ "description": "second anyOf valid (complex)",
+ "data": {"foo": "baz"},
+ "valid": true
+ },
+ {
+ "description": "both anyOf valid (complex)",
+ "data": {"foo": "baz", "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "neither anyOf valid (complex)",
+ "data": {"foo": 2, "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "anyOf with one empty schema",
+ "schema": {
+ "anyOf": [
+ { "type": "number" },
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "number is valid",
+ "data": 123,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "nested anyOf, to check validation semantics",
+ "schema": {
+ "anyOf": [
+ {
+ "anyOf": [
+ {
+ "type": "null"
+ }
+ ]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "anything non-null is invalid",
+ "data": 123,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/boolean_schema.json b/third_party/python/jsonschema/json/tests/latest/boolean_schema.json
new file mode 100644
index 0000000000..6d40f23f26
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/boolean_schema.json
@@ -0,0 +1,104 @@
+[
+ {
+ "description": "boolean schema 'true'",
+ "schema": true,
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "boolean true is valid",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "boolean false is valid",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "object is valid",
+ "data": {"foo": "bar"},
+ "valid": true
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "array is valid",
+ "data": ["foo"],
+ "valid": true
+ },
+ {
+ "description": "empty array is valid",
+ "data": [],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "boolean schema 'false'",
+ "schema": false,
+ "tests": [
+ {
+ "description": "number is invalid",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "boolean true is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "boolean false is invalid",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "null is invalid",
+ "data": null,
+ "valid": false
+ },
+ {
+ "description": "object is invalid",
+ "data": {"foo": "bar"},
+ "valid": false
+ },
+ {
+ "description": "empty object is invalid",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "array is invalid",
+ "data": ["foo"],
+ "valid": false
+ },
+ {
+ "description": "empty array is invalid",
+ "data": [],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/const.json b/third_party/python/jsonschema/json/tests/latest/const.json
new file mode 100644
index 0000000000..c089625dc4
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/const.json
@@ -0,0 +1,170 @@
+[
+ {
+ "description": "const validation",
+ "schema": {"const": 2},
+ "tests": [
+ {
+ "description": "same value is valid",
+ "data": 2,
+ "valid": true
+ },
+ {
+ "description": "another value is invalid",
+ "data": 5,
+ "valid": false
+ },
+ {
+ "description": "another type is invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with object",
+ "schema": {"const": {"foo": "bar", "baz": "bax"}},
+ "tests": [
+ {
+ "description": "same object is valid",
+ "data": {"foo": "bar", "baz": "bax"},
+ "valid": true
+ },
+ {
+ "description": "same object with different property order is valid",
+ "data": {"baz": "bax", "foo": "bar"},
+ "valid": true
+ },
+ {
+ "description": "another object is invalid",
+ "data": {"foo": "bar"},
+ "valid": false
+ },
+ {
+ "description": "another type is invalid",
+ "data": [1, 2],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with array",
+ "schema": {"const": [{ "foo": "bar" }]},
+ "tests": [
+ {
+ "description": "same array is valid",
+ "data": [{"foo": "bar"}],
+ "valid": true
+ },
+ {
+ "description": "another array item is invalid",
+ "data": [2],
+ "valid": false
+ },
+ {
+ "description": "array with additional items is invalid",
+ "data": [1, 2, 3],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with null",
+ "schema": {"const": null},
+ "tests": [
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "not null is invalid",
+ "data": 0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with false does not match 0",
+ "schema": {"const": false},
+ "tests": [
+ {
+ "description": "false is valid",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "integer zero is invalid",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "float zero is invalid",
+ "data": 0.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with true does not match 1",
+ "schema": {"const": true},
+ "tests": [
+ {
+ "description": "true is valid",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "integer one is invalid",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "float one is invalid",
+ "data": 1.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "const with 0 does not match false",
+ "schema": {"const": 0},
+ "tests": [
+ {
+ "description": "false is invalid",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "integer zero is valid",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "float zero is valid",
+ "data": 0.0,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "const with 1 does not match true",
+ "schema": {"const": 1},
+ "tests": [
+ {
+ "description": "true is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "integer one is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "float one is valid",
+ "data": 1.0,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/contains.json b/third_party/python/jsonschema/json/tests/latest/contains.json
new file mode 100644
index 0000000000..b7ae5a25fe
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/contains.json
@@ -0,0 +1,95 @@
+[
+ {
+ "description": "contains keyword validation",
+ "schema": {
+ "contains": {"minimum": 5}
+ },
+ "tests": [
+ {
+ "description": "array with item matching schema (5) is valid",
+ "data": [3, 4, 5],
+ "valid": true
+ },
+ {
+ "description": "array with item matching schema (6) is valid",
+ "data": [3, 4, 6],
+ "valid": true
+ },
+ {
+ "description": "array with two items matching schema (5, 6) is valid",
+ "data": [3, 4, 5, 6],
+ "valid": true
+ },
+ {
+ "description": "array without items matching schema is invalid",
+ "data": [2, 3, 4],
+ "valid": false
+ },
+ {
+ "description": "empty array is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "not array is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "contains keyword with const keyword",
+ "schema": {
+ "contains": { "const": 5 }
+ },
+ "tests": [
+ {
+ "description": "array with item 5 is valid",
+ "data": [3, 4, 5],
+ "valid": true
+ },
+ {
+ "description": "array with two items 5 is valid",
+ "data": [3, 4, 5, 5],
+ "valid": true
+ },
+ {
+ "description": "array without item 5 is invalid",
+ "data": [1, 2, 3, 4],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "contains keyword with boolean schema true",
+ "schema": {"contains": true},
+ "tests": [
+ {
+ "description": "any non-empty array is valid",
+ "data": ["foo"],
+ "valid": true
+ },
+ {
+ "description": "empty array is invalid",
+ "data": [],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "contains keyword with boolean schema false",
+ "schema": {"contains": false},
+ "tests": [
+ {
+ "description": "any non-empty array is invalid",
+ "data": ["foo"],
+ "valid": false
+ },
+ {
+ "description": "empty array is invalid",
+ "data": [],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/default.json b/third_party/python/jsonschema/json/tests/latest/default.json
new file mode 100644
index 0000000000..17629779fb
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/default.json
@@ -0,0 +1,49 @@
+[
+ {
+ "description": "invalid type for default",
+ "schema": {
+ "properties": {
+ "foo": {
+ "type": "integer",
+ "default": []
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when property is specified",
+ "data": {"foo": 13},
+ "valid": true
+ },
+ {
+ "description": "still valid when the invalid default is used",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "invalid string value for default",
+ "schema": {
+ "properties": {
+ "bar": {
+ "type": "string",
+ "minLength": 4,
+ "default": "bad"
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when property is specified",
+ "data": {"bar": "good"},
+ "valid": true
+ },
+ {
+ "description": "still valid when the invalid default is used",
+ "data": {},
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/defs.json b/third_party/python/jsonschema/json/tests/latest/defs.json
new file mode 100644
index 0000000000..f2fbec42b2
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/defs.json
@@ -0,0 +1,24 @@
+[
+ {
+ "description": "valid definition",
+ "schema": {"$ref": "https://json-schema.org/draft/2019-09/schema"},
+ "tests": [
+ {
+ "description": "valid definition schema",
+ "data": {"$defs": {"foo": {"type": "integer"}}},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "invalid definition",
+ "schema": {"$ref": "https://json-schema.org/draft/2019-09/schema"},
+ "tests": [
+ {
+ "description": "invalid definition schema",
+ "data": {"$defs": {"foo": {"type": 1}}},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/dependencies.json b/third_party/python/jsonschema/json/tests/latest/dependencies.json
new file mode 100644
index 0000000000..8dd78aa5d8
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/dependencies.json
@@ -0,0 +1,268 @@
+[
+ {
+ "description": "dependencies",
+ "schema": {
+ "dependencies": {"bar": ["foo"]}
+ },
+ "tests": [
+ {
+ "description": "neither",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "nondependant",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "with dependency",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "missing dependency",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": ["bar"],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "dependencies with empty array",
+ "schema": {
+ "dependencies": {"bar": []}
+ },
+ "tests": [
+ {
+ "description": "empty object",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "object with one property",
+ "data": {"bar": 2},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple dependencies",
+ "schema": {
+ "dependencies": {"quux": ["foo", "bar"]}
+ },
+ "tests": [
+ {
+ "description": "neither",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "nondependants",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "with dependencies",
+ "data": {"foo": 1, "bar": 2, "quux": 3},
+ "valid": true
+ },
+ {
+ "description": "missing dependency",
+ "data": {"foo": 1, "quux": 2},
+ "valid": false
+ },
+ {
+ "description": "missing other dependency",
+ "data": {"bar": 1, "quux": 2},
+ "valid": false
+ },
+ {
+ "description": "missing both dependencies",
+ "data": {"quux": 1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "multiple dependencies subschema",
+ "schema": {
+ "dependencies": {
+ "bar": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"type": "integer"}
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "no dependency",
+ "data": {"foo": "quux"},
+ "valid": true
+ },
+ {
+ "description": "wrong type",
+ "data": {"foo": "quux", "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "wrong type other",
+ "data": {"foo": 2, "bar": "quux"},
+ "valid": false
+ },
+ {
+ "description": "wrong type both",
+ "data": {"foo": "quux", "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "dependencies with boolean subschemas",
+ "schema": {
+ "dependencies": {
+ "foo": true,
+ "bar": false
+ }
+ },
+ "tests": [
+ {
+ "description": "object with property having schema true is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "object with property having schema false is invalid",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "object with both properties is invalid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "empty array of dependencies",
+ "schema": {
+ "dependencies": {
+ "foo": []
+ }
+ },
+ "tests": [
+ {
+ "description": "object with property is valid",
+ "data": { "foo": 1 },
+ "valid": true
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "non-object is valid",
+ "data": 1,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "dependencies with escaped characters",
+ "schema": {
+ "dependencies": {
+ "foo\nbar": ["foo\rbar"],
+ "foo\tbar": {
+ "minProperties": 4
+ },
+ "foo'bar": {"required": ["foo\"bar"]},
+ "foo\"bar": ["foo'bar"]
+ }
+ },
+ "tests": [
+ {
+ "description": "valid object 1",
+ "data": {
+ "foo\nbar": 1,
+ "foo\rbar": 2
+ },
+ "valid": true
+ },
+ {
+ "description": "valid object 2",
+ "data": {
+ "foo\tbar": 1,
+ "a": 2,
+ "b": 3,
+ "c": 4
+ },
+ "valid": true
+ },
+ {
+ "description": "valid object 3",
+ "data": {
+ "foo'bar": 1,
+ "foo\"bar": 2
+ },
+ "valid": true
+ },
+ {
+ "description": "invalid object 1",
+ "data": {
+ "foo\nbar": 1,
+ "foo": 2
+ },
+ "valid": false
+ },
+ {
+ "description": "invalid object 2",
+ "data": {
+ "foo\tbar": 1,
+ "a": 2
+ },
+ "valid": false
+ },
+ {
+ "description": "invalid object 3",
+ "data": {
+ "foo'bar": 1
+ },
+ "valid": false
+ },
+ {
+ "description": "invalid object 4",
+ "data": {
+ "foo\"bar": 2
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/enum.json b/third_party/python/jsonschema/json/tests/latest/enum.json
new file mode 100644
index 0000000000..32d79026e1
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/enum.json
@@ -0,0 +1,179 @@
+[
+ {
+ "description": "simple enum validation",
+ "schema": {"enum": [1, 2, 3]},
+ "tests": [
+ {
+ "description": "one of the enum is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "something else is invalid",
+ "data": 4,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "heterogeneous enum validation",
+ "schema": {"enum": [6, "foo", [], true, {"foo": 12}]},
+ "tests": [
+ {
+ "description": "one of the enum is valid",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "something else is invalid",
+ "data": null,
+ "valid": false
+ },
+ {
+ "description": "objects are deep compared",
+ "data": {"foo": false},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enums in properties",
+ "schema": {
+ "type":"object",
+ "properties": {
+ "foo": {"enum":["foo"]},
+ "bar": {"enum":["bar"]}
+ },
+ "required": ["bar"]
+ },
+ "tests": [
+ {
+ "description": "both properties are valid",
+ "data": {"foo":"foo", "bar":"bar"},
+ "valid": true
+ },
+ {
+ "description": "missing optional property is valid",
+ "data": {"bar":"bar"},
+ "valid": true
+ },
+ {
+ "description": "missing required property is invalid",
+ "data": {"foo":"foo"},
+ "valid": false
+ },
+ {
+ "description": "missing all properties is invalid",
+ "data": {},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with escaped characters",
+ "schema": {
+ "enum": ["foo\nbar", "foo\rbar"]
+ },
+ "tests": [
+ {
+ "description": "member 1 is valid",
+ "data": "foo\nbar",
+ "valid": true
+ },
+ {
+ "description": "member 2 is valid",
+ "data": "foo\rbar",
+ "valid": true
+ },
+ {
+ "description": "another string is invalid",
+ "data": "abc",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with false does not match 0",
+ "schema": {"enum": [false]},
+ "tests": [
+ {
+ "description": "false is valid",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "integer zero is invalid",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "float zero is invalid",
+ "data": 0.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with true does not match 1",
+ "schema": {"enum": [true]},
+ "tests": [
+ {
+ "description": "true is valid",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "integer one is invalid",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "float one is invalid",
+ "data": 1.0,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "enum with 0 does not match false",
+ "schema": {"enum": [0]},
+ "tests": [
+ {
+ "description": "false is invalid",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "integer zero is valid",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "float zero is valid",
+ "data": 0.0,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "enum with 1 does not match true",
+ "schema": {"enum": [1]},
+ "tests": [
+ {
+ "description": "true is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "integer one is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "float one is valid",
+ "data": 1.0,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/exclusiveMaximum.json b/third_party/python/jsonschema/json/tests/latest/exclusiveMaximum.json
new file mode 100644
index 0000000000..dc3cd709d3
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/exclusiveMaximum.json
@@ -0,0 +1,30 @@
+[
+ {
+ "description": "exclusiveMaximum validation",
+ "schema": {
+ "exclusiveMaximum": 3.0
+ },
+ "tests": [
+ {
+ "description": "below the exclusiveMaximum is valid",
+ "data": 2.2,
+ "valid": true
+ },
+ {
+ "description": "boundary point is invalid",
+ "data": 3.0,
+ "valid": false
+ },
+ {
+ "description": "above the exclusiveMaximum is invalid",
+ "data": 3.5,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/exclusiveMinimum.json b/third_party/python/jsonschema/json/tests/latest/exclusiveMinimum.json
new file mode 100644
index 0000000000..b38d7ecec6
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/exclusiveMinimum.json
@@ -0,0 +1,30 @@
+[
+ {
+ "description": "exclusiveMinimum validation",
+ "schema": {
+ "exclusiveMinimum": 1.1
+ },
+ "tests": [
+ {
+ "description": "above the exclusiveMinimum is valid",
+ "data": 1.2,
+ "valid": true
+ },
+ {
+ "description": "boundary point is invalid",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "below the exclusiveMinimum is invalid",
+ "data": 0.6,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/format.json b/third_party/python/jsonschema/json/tests/latest/format.json
new file mode 100644
index 0000000000..93305f5cd1
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/format.json
@@ -0,0 +1,614 @@
+[
+ {
+ "description": "validation of e-mail addresses",
+ "schema": {"format": "email"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IDN e-mail addresses",
+ "schema": {"format": "idn-email"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of regexes",
+ "schema": {"format": "regex"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IP addresses",
+ "schema": {"format": "ipv4"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IPv6 addresses",
+ "schema": {"format": "ipv6"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IDN hostnames",
+ "schema": {"format": "idn-hostname"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of hostnames",
+ "schema": {"format": "hostname"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of date strings",
+ "schema": {"format": "date"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of date-time strings",
+ "schema": {"format": "date-time"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of time strings",
+ "schema": {"format": "time"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of JSON pointers",
+ "schema": {"format": "json-pointer"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of relative JSON pointers",
+ "schema": {"format": "relative-json-pointer"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IRIs",
+ "schema": {"format": "iri"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of IRI references",
+ "schema": {"format": "iri-reference"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of URIs",
+ "schema": {"format": "uri"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of URI references",
+ "schema": {"format": "uri-reference"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of URI templates",
+ "schema": {"format": "uri-template"},
+ "tests": [
+ {
+ "description": "ignores integers",
+ "data": 12,
+ "valid": true
+ },
+ {
+ "description": "ignores floats",
+ "data": 13.7,
+ "valid": true
+ },
+ {
+ "description": "ignores objects",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores booleans",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "ignores null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/if-then-else.json b/third_party/python/jsonschema/json/tests/latest/if-then-else.json
new file mode 100644
index 0000000000..be7328163d
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/if-then-else.json
@@ -0,0 +1,188 @@
+[
+ {
+ "description": "ignore if without then or else",
+ "schema": {
+ "if": {
+ "const": 0
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when valid against lone if",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "valid when invalid against lone if",
+ "data": "hello",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ignore then without if",
+ "schema": {
+ "then": {
+ "const": 0
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when valid against lone then",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "valid when invalid against lone then",
+ "data": "hello",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ignore else without if",
+ "schema": {
+ "else": {
+ "const": 0
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when valid against lone else",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "valid when invalid against lone else",
+ "data": "hello",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "if and then without else",
+ "schema": {
+ "if": {
+ "exclusiveMaximum": 0
+ },
+ "then": {
+ "minimum": -10
+ }
+ },
+ "tests": [
+ {
+ "description": "valid through then",
+ "data": -1,
+ "valid": true
+ },
+ {
+ "description": "invalid through then",
+ "data": -100,
+ "valid": false
+ },
+ {
+ "description": "valid when if test fails",
+ "data": 3,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "if and else without then",
+ "schema": {
+ "if": {
+ "exclusiveMaximum": 0
+ },
+ "else": {
+ "multipleOf": 2
+ }
+ },
+ "tests": [
+ {
+ "description": "valid when if test passes",
+ "data": -1,
+ "valid": true
+ },
+ {
+ "description": "valid through else",
+ "data": 4,
+ "valid": true
+ },
+ {
+ "description": "invalid through else",
+ "data": 3,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "validate against correct branch, then vs else",
+ "schema": {
+ "if": {
+ "exclusiveMaximum": 0
+ },
+ "then": {
+ "minimum": -10
+ },
+ "else": {
+ "multipleOf": 2
+ }
+ },
+ "tests": [
+ {
+ "description": "valid through then",
+ "data": -1,
+ "valid": true
+ },
+ {
+ "description": "invalid through then",
+ "data": -100,
+ "valid": false
+ },
+ {
+ "description": "valid through else",
+ "data": 4,
+ "valid": true
+ },
+ {
+ "description": "invalid through else",
+ "data": 3,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "non-interference across combined schemas",
+ "schema": {
+ "allOf": [
+ {
+ "if": {
+ "exclusiveMaximum": 0
+ }
+ },
+ {
+ "then": {
+ "minimum": -10
+ }
+ },
+ {
+ "else": {
+ "multipleOf": 2
+ }
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid, but would have been invalid through then",
+ "data": -100,
+ "valid": true
+ },
+ {
+ "description": "valid, but would have been invalid through else",
+ "data": 3,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/items.json b/third_party/python/jsonschema/json/tests/latest/items.json
new file mode 100644
index 0000000000..6e98ee82da
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/items.json
@@ -0,0 +1,250 @@
+[
+ {
+ "description": "a schema given for items",
+ "schema": {
+ "items": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "valid items",
+ "data": [ 1, 2, 3 ],
+ "valid": true
+ },
+ {
+ "description": "wrong type of items",
+ "data": [1, "x"],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": {"foo" : "bar"},
+ "valid": true
+ },
+ {
+ "description": "JavaScript pseudo-array is valid",
+ "data": {
+ "0": "invalid",
+ "length": 1
+ },
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "an array of schemas for items",
+ "schema": {
+ "items": [
+ {"type": "integer"},
+ {"type": "string"}
+ ]
+ },
+ "tests": [
+ {
+ "description": "correct types",
+ "data": [ 1, "foo" ],
+ "valid": true
+ },
+ {
+ "description": "wrong types",
+ "data": [ "foo", 1 ],
+ "valid": false
+ },
+ {
+ "description": "incomplete array of items",
+ "data": [ 1 ],
+ "valid": true
+ },
+ {
+ "description": "array with additional items",
+ "data": [ 1, "foo", true ],
+ "valid": true
+ },
+ {
+ "description": "empty array",
+ "data": [ ],
+ "valid": true
+ },
+ {
+ "description": "JavaScript pseudo-array is valid",
+ "data": {
+ "0": "invalid",
+ "1": "valid",
+ "length": 2
+ },
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "items with boolean schema (true)",
+ "schema": {"items": true},
+ "tests": [
+ {
+ "description": "any array is valid",
+ "data": [ 1, "foo", true ],
+ "valid": true
+ },
+ {
+ "description": "empty array is valid",
+ "data": [],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "items with boolean schema (false)",
+ "schema": {"items": false},
+ "tests": [
+ {
+ "description": "any non-empty array is invalid",
+ "data": [ 1, "foo", true ],
+ "valid": false
+ },
+ {
+ "description": "empty array is valid",
+ "data": [],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "items with boolean schemas",
+ "schema": {
+ "items": [true, false]
+ },
+ "tests": [
+ {
+ "description": "array with one item is valid",
+ "data": [ 1 ],
+ "valid": true
+ },
+ {
+ "description": "array with two items is invalid",
+ "data": [ 1, "foo" ],
+ "valid": false
+ },
+ {
+ "description": "empty array is valid",
+ "data": [],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "items and subitems",
+ "schema": {
+ "$defs": {
+ "item": {
+ "type": "array",
+ "additionalItems": false,
+ "items": [
+ { "$ref": "#/$defs/sub-item" },
+ { "$ref": "#/$defs/sub-item" }
+ ]
+ },
+ "sub-item": {
+ "type": "object",
+ "required": ["foo"]
+ }
+ },
+ "type": "array",
+ "additionalItems": false,
+ "items": [
+ { "$ref": "#/$defs/item" },
+ { "$ref": "#/$defs/item" },
+ { "$ref": "#/$defs/item" }
+ ]
+ },
+ "tests": [
+ {
+ "description": "valid items",
+ "data": [
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": true
+ },
+ {
+ "description": "too many items",
+ "data": [
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "too many sub-items",
+ "data": [
+ [ {"foo": null}, {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "wrong item",
+ "data": [
+ {"foo": null},
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "wrong sub-item",
+ "data": [
+ [ {}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ],
+ [ {"foo": null}, {"foo": null} ]
+ ],
+ "valid": false
+ },
+ {
+ "description": "fewer items is valid",
+ "data": [
+ [ {"foo": null} ],
+ [ {"foo": null} ]
+ ],
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "nested items",
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "number"
+ }
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid nested array",
+ "data": [[[[1]], [[2],[3]]], [[[4], [5], [6]]]],
+ "valid": true
+ },
+ {
+ "description": "nested array with invalid type",
+ "data": [[[["1"]], [[2],[3]]], [[[4], [5], [6]]]],
+ "valid": false
+ },
+ {
+ "description": "not deep enough",
+ "data": [[[1], [2],[3]], [[4], [5], [6]]],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/maxItems.json b/third_party/python/jsonschema/json/tests/latest/maxItems.json
new file mode 100644
index 0000000000..3b53a6b371
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/maxItems.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "maxItems validation",
+ "schema": {"maxItems": 2},
+ "tests": [
+ {
+ "description": "shorter is valid",
+ "data": [1],
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "too long is invalid",
+ "data": [1, 2, 3],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": "foobar",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/maxLength.json b/third_party/python/jsonschema/json/tests/latest/maxLength.json
new file mode 100644
index 0000000000..811d35b253
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/maxLength.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "maxLength validation",
+ "schema": {"maxLength": 2},
+ "tests": [
+ {
+ "description": "shorter is valid",
+ "data": "f",
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": "fo",
+ "valid": true
+ },
+ {
+ "description": "too long is invalid",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 100,
+ "valid": true
+ },
+ {
+ "description": "two supplementary Unicode code points is long enough",
+ "data": "\uD83D\uDCA9\uD83D\uDCA9",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/maxProperties.json b/third_party/python/jsonschema/json/tests/latest/maxProperties.json
new file mode 100644
index 0000000000..513731e4c8
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/maxProperties.json
@@ -0,0 +1,38 @@
+[
+ {
+ "description": "maxProperties validation",
+ "schema": {"maxProperties": 2},
+ "tests": [
+ {
+ "description": "shorter is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "too long is invalid",
+ "data": {"foo": 1, "bar": 2, "baz": 3},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [1, 2, 3],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/maximum.json b/third_party/python/jsonschema/json/tests/latest/maximum.json
new file mode 100644
index 0000000000..8150984ee5
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/maximum.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "maximum validation",
+ "schema": {"maximum": 3.0},
+ "tests": [
+ {
+ "description": "below the maximum is valid",
+ "data": 2.6,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": 3.0,
+ "valid": true
+ },
+ {
+ "description": "above the maximum is invalid",
+ "data": 3.5,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/minItems.json b/third_party/python/jsonschema/json/tests/latest/minItems.json
new file mode 100644
index 0000000000..ed5118815e
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/minItems.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "minItems validation",
+ "schema": {"minItems": 1},
+ "tests": [
+ {
+ "description": "longer is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": [1],
+ "valid": true
+ },
+ {
+ "description": "too short is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "ignores non-arrays",
+ "data": "",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/minLength.json b/third_party/python/jsonschema/json/tests/latest/minLength.json
new file mode 100644
index 0000000000..3f09158dee
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/minLength.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "minLength validation",
+ "schema": {"minLength": 2},
+ "tests": [
+ {
+ "description": "longer is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": "fo",
+ "valid": true
+ },
+ {
+ "description": "too short is invalid",
+ "data": "f",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "one supplementary Unicode code point is not long enough",
+ "data": "\uD83D\uDCA9",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/minProperties.json b/third_party/python/jsonschema/json/tests/latest/minProperties.json
new file mode 100644
index 0000000000..49a0726e01
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/minProperties.json
@@ -0,0 +1,38 @@
+[
+ {
+ "description": "minProperties validation",
+ "schema": {"minProperties": 1},
+ "tests": [
+ {
+ "description": "longer is valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "exact length is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "too short is invalid",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/minimum.json b/third_party/python/jsonschema/json/tests/latest/minimum.json
new file mode 100644
index 0000000000..2a9c42b3c4
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/minimum.json
@@ -0,0 +1,59 @@
+[
+ {
+ "description": "minimum validation",
+ "schema": {"minimum": 1.1},
+ "tests": [
+ {
+ "description": "above the minimum is valid",
+ "data": 2.6,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": 1.1,
+ "valid": true
+ },
+ {
+ "description": "below the minimum is invalid",
+ "data": 0.6,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "minimum validation with signed integer",
+ "schema": {"minimum": -2},
+ "tests": [
+ {
+ "description": "negative above the minimum is valid",
+ "data": -1,
+ "valid": true
+ },
+ {
+ "description": "positive above the minimum is valid",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "boundary point is valid",
+ "data": -2,
+ "valid": true
+ },
+ {
+ "description": "below the minimum is invalid",
+ "data": -3,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "x",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/multipleOf.json b/third_party/python/jsonschema/json/tests/latest/multipleOf.json
new file mode 100644
index 0000000000..ca3b761805
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/multipleOf.json
@@ -0,0 +1,60 @@
+[
+ {
+ "description": "by int",
+ "schema": {"multipleOf": 2},
+ "tests": [
+ {
+ "description": "int by int",
+ "data": 10,
+ "valid": true
+ },
+ {
+ "description": "int by int fail",
+ "data": 7,
+ "valid": false
+ },
+ {
+ "description": "ignores non-numbers",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "by number",
+ "schema": {"multipleOf": 1.5},
+ "tests": [
+ {
+ "description": "zero is multiple of anything",
+ "data": 0,
+ "valid": true
+ },
+ {
+ "description": "4.5 is multiple of 1.5",
+ "data": 4.5,
+ "valid": true
+ },
+ {
+ "description": "35 is not multiple of 1.5",
+ "data": 35,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "by small number",
+ "schema": {"multipleOf": 0.0001},
+ "tests": [
+ {
+ "description": "0.0075 is multiple of 0.0001",
+ "data": 0.0075,
+ "valid": true
+ },
+ {
+ "description": "0.00751 is not multiple of 0.0001",
+ "data": 0.00751,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/not.json b/third_party/python/jsonschema/json/tests/latest/not.json
new file mode 100644
index 0000000000..98de0eda8d
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/not.json
@@ -0,0 +1,117 @@
+[
+ {
+ "description": "not",
+ "schema": {
+ "not": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "allowed",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "disallowed",
+ "data": 1,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "not multiple types",
+ "schema": {
+ "not": {"type": ["integer", "boolean"]}
+ },
+ "tests": [
+ {
+ "description": "valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "other mismatch",
+ "data": true,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "not more complex schema",
+ "schema": {
+ "not": {
+ "type": "object",
+ "properties": {
+ "foo": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "other match",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": {"foo": "bar"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "forbidden property",
+ "schema": {
+ "properties": {
+ "foo": {
+ "not": {}
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "property present",
+ "data": {"foo": 1, "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "property absent",
+ "data": {"bar": 1, "baz": 2},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "not with boolean schema true",
+ "schema": {"not": true},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "not with boolean schema false",
+ "schema": {"not": false},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/oneOf.json b/third_party/python/jsonschema/json/tests/latest/oneOf.json
new file mode 100644
index 0000000000..57640b7afb
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/oneOf.json
@@ -0,0 +1,206 @@
+[
+ {
+ "description": "oneOf",
+ "schema": {
+ "oneOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "minimum": 2
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first oneOf valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "second oneOf valid",
+ "data": 2.5,
+ "valid": true
+ },
+ {
+ "description": "both oneOf valid",
+ "data": 3,
+ "valid": false
+ },
+ {
+ "description": "neither oneOf valid",
+ "data": 1.5,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with base schema",
+ "schema": {
+ "type": "string",
+ "oneOf" : [
+ {
+ "minLength": 2
+ },
+ {
+ "maxLength": 4
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "mismatch base schema",
+ "data": 3,
+ "valid": false
+ },
+ {
+ "description": "one oneOf valid",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "both oneOf valid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with boolean schemas, all true",
+ "schema": {"oneOf": [true, true, true]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with boolean schemas, one true",
+ "schema": {"oneOf": [true, false, false]},
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "oneOf with boolean schemas, more than one true",
+ "schema": {"oneOf": [true, true, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with boolean schemas, all false",
+ "schema": {"oneOf": [false, false, false]},
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf complex types",
+ "schema": {
+ "oneOf": [
+ {
+ "properties": {
+ "bar": {"type": "integer"}
+ },
+ "required": ["bar"]
+ },
+ {
+ "properties": {
+ "foo": {"type": "string"}
+ },
+ "required": ["foo"]
+ }
+ ]
+ },
+ "tests": [
+ {
+ "description": "first oneOf valid (complex)",
+ "data": {"bar": 2},
+ "valid": true
+ },
+ {
+ "description": "second oneOf valid (complex)",
+ "data": {"foo": "baz"},
+ "valid": true
+ },
+ {
+ "description": "both oneOf valid (complex)",
+ "data": {"foo": "baz", "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "neither oneOf valid (complex)",
+ "data": {"foo": 2, "bar": "quux"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with empty schema",
+ "schema": {
+ "oneOf": [
+ { "type": "number" },
+ {}
+ ]
+ },
+ "tests": [
+ {
+ "description": "one valid - valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "both valid - invalid",
+ "data": 123,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "oneOf with required",
+ "schema": {
+ "type": "object",
+ "oneOf": [
+ { "required": ["foo", "bar"] },
+ { "required": ["foo", "baz"] }
+ ]
+ },
+ "tests": [
+ {
+ "description": "both invalid - invalid",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "first valid - valid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": true
+ },
+ {
+ "description": "second valid - valid",
+ "data": {"foo": 1, "baz": 3},
+ "valid": true
+ },
+ {
+ "description": "both valid - invalid",
+ "data": {"foo": 1, "bar": 2, "baz" : 3},
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/bignum.json b/third_party/python/jsonschema/json/tests/latest/optional/bignum.json
new file mode 100644
index 0000000000..fac275e21f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/bignum.json
@@ -0,0 +1,105 @@
+[
+ {
+ "description": "integer",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "a bignum is an integer",
+ "data": 12345678910111213141516171819202122232425262728293031,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "number",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "a bignum is a number",
+ "data": 98249283749234923498293171823948729348710298301928331,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "integer",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "a negative bignum is an integer",
+ "data": -12345678910111213141516171819202122232425262728293031,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "number",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "a negative bignum is a number",
+ "data": -98249283749234923498293171823948729348710298301928331,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "string",
+ "schema": {"type": "string"},
+ "tests": [
+ {
+ "description": "a bignum is not a string",
+ "data": 98249283749234923498293171823948729348710298301928331,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "integer comparison",
+ "schema": {"maximum": 18446744073709551615},
+ "tests": [
+ {
+ "description": "comparison works for high numbers",
+ "data": 18446744073709551600,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "float comparison with high precision",
+ "schema": {
+ "exclusiveMaximum": 972783798187987123879878123.18878137
+ },
+ "tests": [
+ {
+ "description": "comparison works for high numbers",
+ "data": 972783798187987123879878123.188781371,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "integer comparison",
+ "schema": {"minimum": -18446744073709551615},
+ "tests": [
+ {
+ "description": "comparison works for very negative numbers",
+ "data": -18446744073709551600,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "float comparison with high precision on negative numbers",
+ "schema": {
+ "exclusiveMinimum": -972783798187987123879878123.18878137
+ },
+ "tests": [
+ {
+ "description": "comparison works for very negative numbers",
+ "data": -972783798187987123879878123.188781371,
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/content.json b/third_party/python/jsonschema/json/tests/latest/optional/content.json
new file mode 100644
index 0000000000..3f5a7430b2
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/content.json
@@ -0,0 +1,77 @@
+[
+ {
+ "description": "validation of string-encoded content based on media type",
+ "schema": {
+ "contentMediaType": "application/json"
+ },
+ "tests": [
+ {
+ "description": "a valid JSON document",
+ "data": "{\"foo\": \"bar\"}",
+ "valid": true
+ },
+ {
+ "description": "an invalid JSON document",
+ "data": "{:}",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 100,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of binary string-encoding",
+ "schema": {
+ "contentEncoding": "base64"
+ },
+ "tests": [
+ {
+ "description": "a valid base64 string",
+ "data": "eyJmb28iOiAiYmFyIn0K",
+ "valid": true
+ },
+ {
+ "description": "an invalid base64 string (% is not a valid character)",
+ "data": "eyJmb28iOi%iYmFyIn0K",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 100,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "validation of binary-encoded media type documents",
+ "schema": {
+ "contentMediaType": "application/json",
+ "contentEncoding": "base64"
+ },
+ "tests": [
+ {
+ "description": "a valid base64-encoded JSON document",
+ "data": "eyJmb28iOiAiYmFyIn0K",
+ "valid": true
+ },
+ {
+ "description": "a validly-encoded invalid JSON document",
+ "data": "ezp9Cg==",
+ "valid": false
+ },
+ {
+ "description": "an invalid base64 string that is valid JSON",
+ "data": "{}",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": 100,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/ecmascript-regex.json b/third_party/python/jsonschema/json/tests/latest/optional/ecmascript-regex.json
new file mode 100644
index 0000000000..d82e0feb03
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/ecmascript-regex.json
@@ -0,0 +1,213 @@
+[
+ {
+ "description": "ECMA 262 regex non-compliance",
+ "schema": { "format": "regex" },
+ "tests": [
+ {
+ "description": "ECMA 262 has no support for \\Z anchor from .NET",
+ "data": "^\\S(|(.|\\n)*\\S)\\Z",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex $ does not match trailing newline",
+ "schema": {
+ "type": "string",
+ "pattern": "^abc$"
+ },
+ "tests": [
+ {
+ "description": "matches in Python, but should not in jsonschema",
+ "data": "abc\n",
+ "valid": false
+ },
+ {
+ "description": "should match",
+ "data": "abc",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex converts \\a to ascii BEL",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\a$"
+ },
+ "tests": [
+ {
+ "description": "does not match",
+ "data": "\\a",
+ "valid": false
+ },
+ {
+ "description": "matches",
+ "data": "\u0007",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex escapes control codes with \\c and upper letter",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\cC$"
+ },
+ "tests": [
+ {
+ "description": "does not match",
+ "data": "\\cC",
+ "valid": false
+ },
+ {
+ "description": "matches",
+ "data": "\u0003",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 regex escapes control codes with \\c and lower letter",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\cc$"
+ },
+ "tests": [
+ {
+ "description": "does not match",
+ "data": "\\cc",
+ "valid": false
+ },
+ {
+ "description": "matches",
+ "data": "\u0003",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\d matches ascii digits only",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\d$"
+ },
+ "tests": [
+ {
+ "description": "ASCII zero matches",
+ "data": "0",
+ "valid": true
+ },
+ {
+ "description": "NKO DIGIT ZERO does not match (unlike e.g. Python)",
+ "data": "߀",
+ "valid": false
+ },
+ {
+ "description": "NKO DIGIT ZERO (as \\u escape) does not match",
+ "data": "\u07c0",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\D matches everything but ascii digits",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\D$"
+ },
+ "tests": [
+ {
+ "description": "ASCII zero does not match",
+ "data": "0",
+ "valid": false
+ },
+ {
+ "description": "NKO DIGIT ZERO matches (unlike e.g. Python)",
+ "data": "߀",
+ "valid": true
+ },
+ {
+ "description": "NKO DIGIT ZERO (as \\u escape) matches",
+ "data": "\u07c0",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\w matches ascii letters only",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\w$"
+ },
+ "tests": [
+ {
+ "description": "ASCII 'a' matches",
+ "data": "a",
+ "valid": true
+ },
+ {
+ "description": "latin-1 e-acute does not match (unlike e.g. Python)",
+ "data": "é",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\w matches everything but ascii letters",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\W$"
+ },
+ "tests": [
+ {
+ "description": "ASCII 'a' does not match",
+ "data": "a",
+ "valid": false
+ },
+ {
+ "description": "latin-1 e-acute matches (unlike e.g. Python)",
+ "data": "é",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\s matches ascii whitespace only",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\s$"
+ },
+ "tests": [
+ {
+ "description": "ASCII space matches",
+ "data": " ",
+ "valid": true
+ },
+ {
+ "description": "latin-1 non-breaking-space does not match (unlike e.g. Python)",
+ "data": "\u00a0",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ECMA 262 \\S matches everything but ascii whitespace",
+ "schema": {
+ "type": "string",
+ "pattern": "^\\S$"
+ },
+ "tests": [
+ {
+ "description": "ASCII space does not match",
+ "data": " ",
+ "valid": false
+ },
+ {
+ "description": "latin-1 non-breaking-space matches (unlike e.g. Python)",
+ "data": "\u00a0",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/format/date-time.json b/third_party/python/jsonschema/json/tests/latest/optional/format/date-time.json
new file mode 100644
index 0000000000..dfccee6e67
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/format/date-time.json
@@ -0,0 +1,53 @@
+[
+ {
+ "description": "validation of date-time strings",
+ "schema": {"format": "date-time"},
+ "tests": [
+ {
+ "description": "a valid date-time string",
+ "data": "1963-06-19T08:30:06.283185Z",
+ "valid": true
+ },
+ {
+ "description": "a valid date-time string without second fraction",
+ "data": "1963-06-19T08:30:06Z",
+ "valid": true
+ },
+ {
+ "description": "a valid date-time string with plus offset",
+ "data": "1937-01-01T12:00:27.87+00:20",
+ "valid": true
+ },
+ {
+ "description": "a valid date-time string with minus offset",
+ "data": "1990-12-31T15:59:50.123-08:00",
+ "valid": true
+ },
+ {
+ "description": "a invalid day in date-time string",
+ "data": "1990-02-31T15:59:60.123-08:00",
+ "valid": false
+ },
+ {
+ "description": "an invalid offset in date-time string",
+ "data": "1990-12-31T15:59:60-24:00",
+ "valid": false
+ },
+ {
+ "description": "an invalid date-time string",
+ "data": "06/19/1963 08:30:06 PST",
+ "valid": false
+ },
+ {
+ "description": "case-insensitive T and Z",
+ "data": "1963-06-19t08:30:06.283185z",
+ "valid": true
+ },
+ {
+ "description": "only RFC3339 not all of ISO 8601 are valid",
+ "data": "2013-350T01:01:01",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/format/date.json b/third_party/python/jsonschema/json/tests/latest/optional/format/date.json
new file mode 100644
index 0000000000..cd23baae3a
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/format/date.json
@@ -0,0 +1,23 @@
+[
+ {
+ "description": "validation of date strings",
+ "schema": {"format": "date"},
+ "tests": [
+ {
+ "description": "a valid date string",
+ "data": "1963-06-19",
+ "valid": true
+ },
+ {
+ "description": "an invalid date-time string",
+ "data": "06/19/1963",
+ "valid": false
+ },
+ {
+ "description": "only RFC3339 not all of ISO 8601 are valid",
+ "data": "2013-350",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/format/email.json b/third_party/python/jsonschema/json/tests/latest/optional/format/email.json
new file mode 100644
index 0000000000..c837c84bc1
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/format/email.json
@@ -0,0 +1,18 @@
+[
+ {
+ "description": "validation of e-mail addresses",
+ "schema": {"format": "email"},
+ "tests": [
+ {
+ "description": "a valid e-mail address",
+ "data": "joe.bloggs@example.com",
+ "valid": true
+ },
+ {
+ "description": "an invalid e-mail address",
+ "data": "2962",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/format/hostname.json b/third_party/python/jsonschema/json/tests/latest/optional/format/hostname.json
new file mode 100644
index 0000000000..d22e57db03
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/format/hostname.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "validation of host names",
+ "schema": {"format": "hostname"},
+ "tests": [
+ {
+ "description": "a valid host name",
+ "data": "www.example.com",
+ "valid": true
+ },
+ {
+ "description": "a valid punycoded IDN hostname",
+ "data": "xn--4gbwdl.xn--wgbh1c",
+ "valid": true
+ },
+ {
+ "description": "a host name starting with an illegal character",
+ "data": "-a-host-name-that-starts-with--",
+ "valid": false
+ },
+ {
+ "description": "a host name containing illegal characters",
+ "data": "not_a_valid_host_name",
+ "valid": false
+ },
+ {
+ "description": "a host name with a component too long",
+ "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/format/idn-email.json b/third_party/python/jsonschema/json/tests/latest/optional/format/idn-email.json
new file mode 100644
index 0000000000..637409ea8f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/format/idn-email.json
@@ -0,0 +1,18 @@
+[
+ {
+ "description": "validation of an internationalized e-mail addresses",
+ "schema": {"format": "idn-email"},
+ "tests": [
+ {
+ "description": "a valid idn e-mail (example@example.test in Hangul)",
+ "data": "실례@실례.테스트",
+ "valid": true
+ },
+ {
+ "description": "an invalid idn e-mail address",
+ "data": "2962",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/format/idn-hostname.json b/third_party/python/jsonschema/json/tests/latest/optional/format/idn-hostname.json
new file mode 100644
index 0000000000..3291820e6f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/format/idn-hostname.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "validation of internationalized host names",
+ "schema": {"format": "idn-hostname"},
+ "tests": [
+ {
+ "description": "a valid host name (example.test in Hangul)",
+ "data": "실례.테스트",
+ "valid": true
+ },
+ {
+ "description": "illegal first char U+302E Hangul single dot tone mark",
+ "data": "〮실례.테스트",
+ "valid": false
+ },
+ {
+ "description": "contains illegal char U+302E Hangul single dot tone mark",
+ "data": "실〮례.테스트",
+ "valid": false
+ },
+ {
+ "description": "a host name with a component too long",
+ "data": "실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실례례테스트례례례례례례례례례례례례례례례례례테스트례례례례례례례례례례례례례례례례례례례테스트례례례례례례례례례례례례테스트례례실례.테스트",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/format/ipv4.json b/third_party/python/jsonschema/json/tests/latest/optional/format/ipv4.json
new file mode 100644
index 0000000000..661148a74d
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/format/ipv4.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "validation of IP addresses",
+ "schema": {"format": "ipv4"},
+ "tests": [
+ {
+ "description": "a valid IP address",
+ "data": "192.168.0.1",
+ "valid": true
+ },
+ {
+ "description": "an IP address with too many components",
+ "data": "127.0.0.0.1",
+ "valid": false
+ },
+ {
+ "description": "an IP address with out-of-range values",
+ "data": "256.256.256.256",
+ "valid": false
+ },
+ {
+ "description": "an IP address without 4 components",
+ "data": "127.0",
+ "valid": false
+ },
+ {
+ "description": "an IP address as an integer",
+ "data": "0x7f000001",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/format/ipv6.json b/third_party/python/jsonschema/json/tests/latest/optional/format/ipv6.json
new file mode 100644
index 0000000000..f67559b35d
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/format/ipv6.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "validation of IPv6 addresses",
+ "schema": {"format": "ipv6"},
+ "tests": [
+ {
+ "description": "a valid IPv6 address",
+ "data": "::1",
+ "valid": true
+ },
+ {
+ "description": "an IPv6 address with out-of-range values",
+ "data": "12345::",
+ "valid": false
+ },
+ {
+ "description": "an IPv6 address with too many components",
+ "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1",
+ "valid": false
+ },
+ {
+ "description": "an IPv6 address containing illegal characters",
+ "data": "::laptop",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/format/iri-reference.json b/third_party/python/jsonschema/json/tests/latest/optional/format/iri-reference.json
new file mode 100644
index 0000000000..1fd779c23c
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/format/iri-reference.json
@@ -0,0 +1,43 @@
+[
+ {
+ "description": "validation of IRI References",
+ "schema": {"format": "iri-reference"},
+ "tests": [
+ {
+ "description": "a valid IRI",
+ "data": "http://ƒøø.ßår/?∂éœ=πîx#πîüx",
+ "valid": true
+ },
+ {
+ "description": "a valid protocol-relative IRI Reference",
+ "data": "//ƒøø.ßår/?∂éœ=πîx#πîüx",
+ "valid": true
+ },
+ {
+ "description": "a valid relative IRI Reference",
+ "data": "/âππ",
+ "valid": true
+ },
+ {
+ "description": "an invalid IRI Reference",
+ "data": "\\\\WINDOWS\\filëßåré",
+ "valid": false
+ },
+ {
+ "description": "a valid IRI Reference",
+ "data": "âππ",
+ "valid": true
+ },
+ {
+ "description": "a valid IRI fragment",
+ "data": "#ƒrägmênt",
+ "valid": true
+ },
+ {
+ "description": "an invalid IRI fragment",
+ "data": "#ƒräg\\mênt",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/format/iri.json b/third_party/python/jsonschema/json/tests/latest/optional/format/iri.json
new file mode 100644
index 0000000000..ed54094c01
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/format/iri.json
@@ -0,0 +1,53 @@
+[
+ {
+ "description": "validation of IRIs",
+ "schema": {"format": "iri"},
+ "tests": [
+ {
+ "description": "a valid IRI with anchor tag",
+ "data": "http://ƒøø.ßår/?∂éœ=πîx#πîüx",
+ "valid": true
+ },
+ {
+ "description": "a valid IRI with anchor tag and parantheses",
+ "data": "http://ƒøø.com/blah_(wîkïpédiå)_blah#ßité-1",
+ "valid": true
+ },
+ {
+ "description": "a valid IRI with URL-encoded stuff",
+ "data": "http://ƒøø.ßår/?q=Test%20URL-encoded%20stuff",
+ "valid": true
+ },
+ {
+ "description": "a valid IRI with many special characters",
+ "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com",
+ "valid": true
+ },
+ {
+ "description": "a valid IRI based on IPv6",
+ "data": "http://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]",
+ "valid": true
+ },
+ {
+ "description": "an invalid IRI based on IPv6",
+ "data": "http://2001:0db8:85a3:0000:0000:8a2e:0370:7334",
+ "valid": false
+ },
+ {
+ "description": "an invalid relative IRI Reference",
+ "data": "/abc",
+ "valid": false
+ },
+ {
+ "description": "an invalid IRI",
+ "data": "\\\\WINDOWS\\filëßåré",
+ "valid": false
+ },
+ {
+ "description": "an invalid IRI though valid IRI reference",
+ "data": "âππ",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/format/json-pointer.json b/third_party/python/jsonschema/json/tests/latest/optional/format/json-pointer.json
new file mode 100644
index 0000000000..65c2f064f0
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/format/json-pointer.json
@@ -0,0 +1,168 @@
+[
+ {
+ "description": "validation of JSON-pointers (JSON String Representation)",
+ "schema": {"format": "json-pointer"},
+ "tests": [
+ {
+ "description": "a valid JSON-pointer",
+ "data": "/foo/bar~0/baz~1/%a",
+ "valid": true
+ },
+ {
+ "description": "not a valid JSON-pointer (~ not escaped)",
+ "data": "/foo/bar~",
+ "valid": false
+ },
+ {
+ "description": "valid JSON-pointer with empty segment",
+ "data": "/foo//bar",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer with the last empty segment",
+ "data": "/foo/bar/",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #1",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #2",
+ "data": "/foo",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #3",
+ "data": "/foo/0",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #4",
+ "data": "/",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #5",
+ "data": "/a~1b",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #6",
+ "data": "/c%d",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #7",
+ "data": "/e^f",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #8",
+ "data": "/g|h",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #9",
+ "data": "/i\\j",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #10",
+ "data": "/k\"l",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #11",
+ "data": "/ ",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer as stated in RFC 6901 #12",
+ "data": "/m~0n",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer used adding to the last array position",
+ "data": "/foo/-",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer (- used as object member name)",
+ "data": "/foo/-/bar",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer (multiple escaped characters)",
+ "data": "/~1~0~0~1~1",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer (escaped with fraction part) #1",
+ "data": "/~1.1",
+ "valid": true
+ },
+ {
+ "description": "valid JSON-pointer (escaped with fraction part) #2",
+ "data": "/~0.1",
+ "valid": true
+ },
+ {
+ "description": "not a valid JSON-pointer (URI Fragment Identifier) #1",
+ "data": "#",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (URI Fragment Identifier) #2",
+ "data": "#/",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (URI Fragment Identifier) #3",
+ "data": "#a",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (some escaped, but not all) #1",
+ "data": "/~0~",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (some escaped, but not all) #2",
+ "data": "/~0/~",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (wrong escape character) #1",
+ "data": "/~2",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (wrong escape character) #2",
+ "data": "/~-1",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (multiple characters not escaped)",
+ "data": "/~~",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (isn't empty nor starts with /) #1",
+ "data": "a",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (isn't empty nor starts with /) #2",
+ "data": "0",
+ "valid": false
+ },
+ {
+ "description": "not a valid JSON-pointer (isn't empty nor starts with /) #3",
+ "data": "a/a",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/format/regex.json b/third_party/python/jsonschema/json/tests/latest/optional/format/regex.json
new file mode 100644
index 0000000000..d99d021ec0
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/format/regex.json
@@ -0,0 +1,18 @@
+[
+ {
+ "description": "validation of regular expressions",
+ "schema": {"format": "regex"},
+ "tests": [
+ {
+ "description": "a valid regular expression",
+ "data": "([abc])+\\s+$",
+ "valid": true
+ },
+ {
+ "description": "a regular expression with unclosed parens is invalid",
+ "data": "^(abc]",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/format/relative-json-pointer.json b/third_party/python/jsonschema/json/tests/latest/optional/format/relative-json-pointer.json
new file mode 100644
index 0000000000..ceeb743a32
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/format/relative-json-pointer.json
@@ -0,0 +1,33 @@
+[
+ {
+ "description": "validation of Relative JSON Pointers (RJP)",
+ "schema": {"format": "relative-json-pointer"},
+ "tests": [
+ {
+ "description": "a valid upwards RJP",
+ "data": "1",
+ "valid": true
+ },
+ {
+ "description": "a valid downwards RJP",
+ "data": "0/foo/bar",
+ "valid": true
+ },
+ {
+ "description": "a valid up and then down RJP, with array index",
+ "data": "2/0/baz/1/zip",
+ "valid": true
+ },
+ {
+ "description": "a valid RJP taking the member or index name",
+ "data": "0#",
+ "valid": true
+ },
+ {
+ "description": "an invalid RJP that is a valid JSON Pointer",
+ "data": "/foo/bar",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/format/time.json b/third_party/python/jsonschema/json/tests/latest/optional/format/time.json
new file mode 100644
index 0000000000..4ec8a01a3e
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/format/time.json
@@ -0,0 +1,23 @@
+[
+ {
+ "description": "validation of time strings",
+ "schema": {"format": "time"},
+ "tests": [
+ {
+ "description": "a valid time string",
+ "data": "08:30:06.283185Z",
+ "valid": true
+ },
+ {
+ "description": "an invalid time string",
+ "data": "08:30:06 PST",
+ "valid": false
+ },
+ {
+ "description": "only RFC3339 not all of ISO 8601 are valid",
+ "data": "01:01:01,1111",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/format/uri-reference.json b/third_party/python/jsonschema/json/tests/latest/optional/format/uri-reference.json
new file mode 100644
index 0000000000..e4c9eef63c
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/format/uri-reference.json
@@ -0,0 +1,43 @@
+[
+ {
+ "description": "validation of URI References",
+ "schema": {"format": "uri-reference"},
+ "tests": [
+ {
+ "description": "a valid URI",
+ "data": "http://foo.bar/?baz=qux#quux",
+ "valid": true
+ },
+ {
+ "description": "a valid protocol-relative URI Reference",
+ "data": "//foo.bar/?baz=qux#quux",
+ "valid": true
+ },
+ {
+ "description": "a valid relative URI Reference",
+ "data": "/abc",
+ "valid": true
+ },
+ {
+ "description": "an invalid URI Reference",
+ "data": "\\\\WINDOWS\\fileshare",
+ "valid": false
+ },
+ {
+ "description": "a valid URI Reference",
+ "data": "abc",
+ "valid": true
+ },
+ {
+ "description": "a valid URI fragment",
+ "data": "#fragment",
+ "valid": true
+ },
+ {
+ "description": "an invalid URI fragment",
+ "data": "#frag\\ment",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/format/uri-template.json b/third_party/python/jsonschema/json/tests/latest/optional/format/uri-template.json
new file mode 100644
index 0000000000..33ab76ee73
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/format/uri-template.json
@@ -0,0 +1,28 @@
+[
+ {
+ "description": "format: uri-template",
+ "schema": {"format": "uri-template"},
+ "tests": [
+ {
+ "description": "a valid uri-template",
+ "data": "http://example.com/dictionary/{term:1}/{term}",
+ "valid": true
+ },
+ {
+ "description": "an invalid uri-template",
+ "data": "http://example.com/dictionary/{term:1}/{term",
+ "valid": false
+ },
+ {
+ "description": "a valid uri-template without variables",
+ "data": "http://example.com/dictionary",
+ "valid": true
+ },
+ {
+ "description": "a valid relative uri-template",
+ "data": "dictionary/{term:1}/{term}",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/format/uri.json b/third_party/python/jsonschema/json/tests/latest/optional/format/uri.json
new file mode 100644
index 0000000000..25cc40c80a
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/format/uri.json
@@ -0,0 +1,103 @@
+[
+ {
+ "description": "validation of URIs",
+ "schema": {"format": "uri"},
+ "tests": [
+ {
+ "description": "a valid URL with anchor tag",
+ "data": "http://foo.bar/?baz=qux#quux",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with anchor tag and parantheses",
+ "data": "http://foo.com/blah_(wikipedia)_blah#cite-1",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with URL-encoded stuff",
+ "data": "http://foo.bar/?q=Test%20URL-encoded%20stuff",
+ "valid": true
+ },
+ {
+ "description": "a valid puny-coded URL ",
+ "data": "http://xn--nw2a.xn--j6w193g/",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with many special characters",
+ "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com",
+ "valid": true
+ },
+ {
+ "description": "a valid URL based on IPv4",
+ "data": "http://223.255.255.254",
+ "valid": true
+ },
+ {
+ "description": "a valid URL with ftp scheme",
+ "data": "ftp://ftp.is.co.za/rfc/rfc1808.txt",
+ "valid": true
+ },
+ {
+ "description": "a valid URL for a simple text file",
+ "data": "http://www.ietf.org/rfc/rfc2396.txt",
+ "valid": true
+ },
+ {
+ "description": "a valid URL ",
+ "data": "ldap://[2001:db8::7]/c=GB?objectClass?one",
+ "valid": true
+ },
+ {
+ "description": "a valid mailto URI",
+ "data": "mailto:John.Doe@example.com",
+ "valid": true
+ },
+ {
+ "description": "a valid newsgroup URI",
+ "data": "news:comp.infosystems.www.servers.unix",
+ "valid": true
+ },
+ {
+ "description": "a valid tel URI",
+ "data": "tel:+1-816-555-1212",
+ "valid": true
+ },
+ {
+ "description": "a valid URN",
+ "data": "urn:oasis:names:specification:docbook:dtd:xml:4.1.2",
+ "valid": true
+ },
+ {
+ "description": "an invalid protocol-relative URI Reference",
+ "data": "//foo.bar/?baz=qux#quux",
+ "valid": false
+ },
+ {
+ "description": "an invalid relative URI Reference",
+ "data": "/abc",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI",
+ "data": "\\\\WINDOWS\\fileshare",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI though valid URI reference",
+ "data": "abc",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI with spaces",
+ "data": "http:// shouldfail.com",
+ "valid": false
+ },
+ {
+ "description": "an invalid URI with spaces and missing scheme",
+ "data": ":// should fail",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/optional/zeroTerminatedFloats.json b/third_party/python/jsonschema/json/tests/latest/optional/zeroTerminatedFloats.json
new file mode 100644
index 0000000000..1bcdf96036
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/optional/zeroTerminatedFloats.json
@@ -0,0 +1,15 @@
+[
+ {
+ "description": "some languages do not distinguish between different types of numeric value",
+ "schema": {
+ "type": "integer"
+ },
+ "tests": [
+ {
+ "description": "a float without fractional part is an integer",
+ "data": 1.0,
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/pattern.json b/third_party/python/jsonschema/json/tests/latest/pattern.json
new file mode 100644
index 0000000000..25e7299731
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/pattern.json
@@ -0,0 +1,34 @@
+[
+ {
+ "description": "pattern validation",
+ "schema": {"pattern": "^a*$"},
+ "tests": [
+ {
+ "description": "a matching pattern is valid",
+ "data": "aaa",
+ "valid": true
+ },
+ {
+ "description": "a non-matching pattern is invalid",
+ "data": "abc",
+ "valid": false
+ },
+ {
+ "description": "ignores non-strings",
+ "data": true,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "pattern is not anchored",
+ "schema": {"pattern": "a+"},
+ "tests": [
+ {
+ "description": "matches a substring",
+ "data": "xxaayy",
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/patternProperties.json b/third_party/python/jsonschema/json/tests/latest/patternProperties.json
new file mode 100644
index 0000000000..1d04a1675c
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/patternProperties.json
@@ -0,0 +1,151 @@
+[
+ {
+ "description":
+ "patternProperties validates properties matching a regex",
+ "schema": {
+ "patternProperties": {
+ "f.*o": {"type": "integer"}
+ }
+ },
+ "tests": [
+ {
+ "description": "a single valid match is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "multiple valid matches is valid",
+ "data": {"foo": 1, "foooooo" : 2},
+ "valid": true
+ },
+ {
+ "description": "a single invalid match is invalid",
+ "data": {"foo": "bar", "fooooo": 2},
+ "valid": false
+ },
+ {
+ "description": "multiple invalid matches is invalid",
+ "data": {"foo": "bar", "foooooo" : "baz"},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": ["foo"],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple simultaneous patternProperties are validated",
+ "schema": {
+ "patternProperties": {
+ "a*": {"type": "integer"},
+ "aaa*": {"maximum": 20}
+ }
+ },
+ "tests": [
+ {
+ "description": "a single valid match is valid",
+ "data": {"a": 21},
+ "valid": true
+ },
+ {
+ "description": "a simultaneous match is valid",
+ "data": {"aaaa": 18},
+ "valid": true
+ },
+ {
+ "description": "multiple matches is valid",
+ "data": {"a": 21, "aaaa": 18},
+ "valid": true
+ },
+ {
+ "description": "an invalid due to one is invalid",
+ "data": {"a": "bar"},
+ "valid": false
+ },
+ {
+ "description": "an invalid due to the other is invalid",
+ "data": {"aaaa": 31},
+ "valid": false
+ },
+ {
+ "description": "an invalid due to both is invalid",
+ "data": {"aaa": "foo", "aaaa": 31},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "regexes are not anchored by default and are case sensitive",
+ "schema": {
+ "patternProperties": {
+ "[0-9]{2,}": { "type": "boolean" },
+ "X_": { "type": "string" }
+ }
+ },
+ "tests": [
+ {
+ "description": "non recognized members are ignored",
+ "data": { "answer 1": "42" },
+ "valid": true
+ },
+ {
+ "description": "recognized members are accounted for",
+ "data": { "a31b": null },
+ "valid": false
+ },
+ {
+ "description": "regexes are case sensitive",
+ "data": { "a_x_3": 3 },
+ "valid": true
+ },
+ {
+ "description": "regexes are case sensitive, 2",
+ "data": { "a_X_3": 3 },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "patternProperties with boolean schemas",
+ "schema": {
+ "patternProperties": {
+ "f.*": true,
+ "b.*": false
+ }
+ },
+ "tests": [
+ {
+ "description": "object with property matching schema true is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "object with property matching schema false is invalid",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "object with both properties is invalid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": false
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/properties.json b/third_party/python/jsonschema/json/tests/latest/properties.json
new file mode 100644
index 0000000000..b86c181982
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/properties.json
@@ -0,0 +1,167 @@
+[
+ {
+ "description": "object properties validation",
+ "schema": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"type": "string"}
+ }
+ },
+ "tests": [
+ {
+ "description": "both properties present and valid is valid",
+ "data": {"foo": 1, "bar": "baz"},
+ "valid": true
+ },
+ {
+ "description": "one property invalid is invalid",
+ "data": {"foo": 1, "bar": {}},
+ "valid": false
+ },
+ {
+ "description": "both properties invalid is invalid",
+ "data": {"foo": [], "bar": {}},
+ "valid": false
+ },
+ {
+ "description": "doesn't invalidate other properties",
+ "data": {"quux": []},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description":
+ "properties, patternProperties, additionalProperties interaction",
+ "schema": {
+ "properties": {
+ "foo": {"type": "array", "maxItems": 3},
+ "bar": {"type": "array"}
+ },
+ "patternProperties": {"f.o": {"minItems": 2}},
+ "additionalProperties": {"type": "integer"}
+ },
+ "tests": [
+ {
+ "description": "property validates property",
+ "data": {"foo": [1, 2]},
+ "valid": true
+ },
+ {
+ "description": "property invalidates property",
+ "data": {"foo": [1, 2, 3, 4]},
+ "valid": false
+ },
+ {
+ "description": "patternProperty invalidates property",
+ "data": {"foo": []},
+ "valid": false
+ },
+ {
+ "description": "patternProperty validates nonproperty",
+ "data": {"fxo": [1, 2]},
+ "valid": true
+ },
+ {
+ "description": "patternProperty invalidates nonproperty",
+ "data": {"fxo": []},
+ "valid": false
+ },
+ {
+ "description": "additionalProperty ignores property",
+ "data": {"bar": []},
+ "valid": true
+ },
+ {
+ "description": "additionalProperty validates others",
+ "data": {"quux": 3},
+ "valid": true
+ },
+ {
+ "description": "additionalProperty invalidates others",
+ "data": {"quux": "foo"},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "properties with boolean schema",
+ "schema": {
+ "properties": {
+ "foo": true,
+ "bar": false
+ }
+ },
+ "tests": [
+ {
+ "description": "no property present is valid",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "only 'true' property present is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "only 'false' property present is invalid",
+ "data": {"bar": 2},
+ "valid": false
+ },
+ {
+ "description": "both properties present is invalid",
+ "data": {"foo": 1, "bar": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "properties with escaped characters",
+ "schema": {
+ "properties": {
+ "foo\nbar": {"type": "number"},
+ "foo\"bar": {"type": "number"},
+ "foo\\bar": {"type": "number"},
+ "foo\rbar": {"type": "number"},
+ "foo\tbar": {"type": "number"},
+ "foo\fbar": {"type": "number"}
+ }
+ },
+ "tests": [
+ {
+ "description": "object with all numbers is valid",
+ "data": {
+ "foo\nbar": 1,
+ "foo\"bar": 1,
+ "foo\\bar": 1,
+ "foo\rbar": 1,
+ "foo\tbar": 1,
+ "foo\fbar": 1
+ },
+ "valid": true
+ },
+ {
+ "description": "object with strings is invalid",
+ "data": {
+ "foo\nbar": "1",
+ "foo\"bar": "1",
+ "foo\\bar": "1",
+ "foo\rbar": "1",
+ "foo\tbar": "1",
+ "foo\fbar": "1"
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/propertyNames.json b/third_party/python/jsonschema/json/tests/latest/propertyNames.json
new file mode 100644
index 0000000000..8423690d90
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/propertyNames.json
@@ -0,0 +1,78 @@
+[
+ {
+ "description": "propertyNames validation",
+ "schema": {
+ "propertyNames": {"maxLength": 3}
+ },
+ "tests": [
+ {
+ "description": "all property names valid",
+ "data": {
+ "f": {},
+ "foo": {}
+ },
+ "valid": true
+ },
+ {
+ "description": "some property names invalid",
+ "data": {
+ "foo": {},
+ "foobar": {}
+ },
+ "valid": false
+ },
+ {
+ "description": "object without properties is valid",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "ignores arrays",
+ "data": [1, 2, 3, 4],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "foobar",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "propertyNames with boolean schema true",
+ "schema": {"propertyNames": true},
+ "tests": [
+ {
+ "description": "object with any properties is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "propertyNames with boolean schema false",
+ "schema": {"propertyNames": false},
+ "tests": [
+ {
+ "description": "object with any properties is invalid",
+ "data": {"foo": 1},
+ "valid": false
+ },
+ {
+ "description": "empty object is valid",
+ "data": {},
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/ref.json b/third_party/python/jsonschema/json/tests/latest/ref.json
new file mode 100644
index 0000000000..285de55c06
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/ref.json
@@ -0,0 +1,359 @@
+[
+ {
+ "description": "root pointer ref",
+ "schema": {
+ "properties": {
+ "foo": {"$ref": "#"}
+ },
+ "additionalProperties": false
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": {"foo": false},
+ "valid": true
+ },
+ {
+ "description": "recursive match",
+ "data": {"foo": {"foo": false}},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": {"bar": false},
+ "valid": false
+ },
+ {
+ "description": "recursive mismatch",
+ "data": {"foo": {"bar": false}},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "relative pointer ref to object",
+ "schema": {
+ "properties": {
+ "foo": {"type": "integer"},
+ "bar": {"$ref": "#/properties/foo"}
+ }
+ },
+ "tests": [
+ {
+ "description": "match",
+ "data": {"bar": 3},
+ "valid": true
+ },
+ {
+ "description": "mismatch",
+ "data": {"bar": true},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "relative pointer ref to array",
+ "schema": {
+ "items": [
+ {"type": "integer"},
+ {"$ref": "#/items/0"}
+ ]
+ },
+ "tests": [
+ {
+ "description": "match array",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "mismatch array",
+ "data": [1, "foo"],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "escaped pointer ref",
+ "schema": {
+ "tilda~field": {"type": "integer"},
+ "slash/field": {"type": "integer"},
+ "percent%field": {"type": "integer"},
+ "properties": {
+ "tilda": {"$ref": "#/tilda~0field"},
+ "slash": {"$ref": "#/slash~1field"},
+ "percent": {"$ref": "#/percent%25field"}
+ }
+ },
+ "tests": [
+ {
+ "description": "slash invalid",
+ "data": {"slash": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "tilda invalid",
+ "data": {"tilda": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "percent invalid",
+ "data": {"percent": "aoeu"},
+ "valid": false
+ },
+ {
+ "description": "slash valid",
+ "data": {"slash": 123},
+ "valid": true
+ },
+ {
+ "description": "tilda valid",
+ "data": {"tilda": 123},
+ "valid": true
+ },
+ {
+ "description": "percent valid",
+ "data": {"percent": 123},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "nested refs",
+ "schema": {
+ "$defs": {
+ "a": {"type": "integer"},
+ "b": {"$ref": "#/$defs/a"},
+ "c": {"$ref": "#/$defs/b"}
+ },
+ "$ref": "#/$defs/c"
+ },
+ "tests": [
+ {
+ "description": "nested ref valid",
+ "data": 5,
+ "valid": true
+ },
+ {
+ "description": "nested ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ref overrides any sibling keywords",
+ "schema": {
+ "$defs": {
+ "reffed": {
+ "type": "array"
+ }
+ },
+ "properties": {
+ "foo": {
+ "$ref": "#/$defs/reffed",
+ "maxItems": 2
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "ref valid",
+ "data": { "foo": [] },
+ "valid": true
+ },
+ {
+ "description": "ref valid, maxItems ignored",
+ "data": { "foo": [ 1, 2, 3] },
+ "valid": true
+ },
+ {
+ "description": "ref invalid",
+ "data": { "foo": "string" },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "remote ref, containing refs itself",
+ "schema": {"$ref": "https://json-schema.org/draft/2019-09/schema"},
+ "tests": [
+ {
+ "description": "remote ref valid",
+ "data": {"minLength": 1},
+ "valid": true
+ },
+ {
+ "description": "remote ref invalid",
+ "data": {"minLength": -1},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "property named $ref that is not a reference",
+ "schema": {
+ "properties": {
+ "$ref": {"type": "string"}
+ }
+ },
+ "tests": [
+ {
+ "description": "property named $ref valid",
+ "data": {"$ref": "a"},
+ "valid": true
+ },
+ {
+ "description": "property named $ref invalid",
+ "data": {"$ref": 2},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "$ref to boolean schema true",
+ "schema": {
+ "$ref": "#/$defs/bool",
+ "$defs": {
+ "bool": true
+ }
+ },
+ "tests": [
+ {
+ "description": "any value is valid",
+ "data": "foo",
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "$ref to boolean schema false",
+ "schema": {
+ "$ref": "#/$defs/bool",
+ "$defs": {
+ "bool": false
+ }
+ },
+ "tests": [
+ {
+ "description": "any value is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "Recursive references between schemas",
+ "schema": {
+ "$id": "http://localhost:1234/tree",
+ "description": "tree of nodes",
+ "type": "object",
+ "properties": {
+ "meta": {"type": "string"},
+ "nodes": {
+ "type": "array",
+ "items": {"$ref": "node"}
+ }
+ },
+ "required": ["meta", "nodes"],
+ "$defs": {
+ "node": {
+ "$id": "http://localhost:1234/node",
+ "description": "node",
+ "type": "object",
+ "properties": {
+ "value": {"type": "number"},
+ "subtree": {"$ref": "tree"}
+ },
+ "required": ["value"]
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "valid tree",
+ "data": {
+ "meta": "root",
+ "nodes": [
+ {
+ "value": 1,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": 1.1},
+ {"value": 1.2}
+ ]
+ }
+ },
+ {
+ "value": 2,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": 2.1},
+ {"value": 2.2}
+ ]
+ }
+ }
+ ]
+ },
+ "valid": true
+ },
+ {
+ "description": "invalid tree",
+ "data": {
+ "meta": "root",
+ "nodes": [
+ {
+ "value": 1,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": "string is invalid"},
+ {"value": 1.2}
+ ]
+ }
+ },
+ {
+ "value": 2,
+ "subtree": {
+ "meta": "child",
+ "nodes": [
+ {"value": 2.1},
+ {"value": 2.2}
+ ]
+ }
+ }
+ ]
+ },
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "refs with quote",
+ "schema": {
+ "properties": {
+ "foo\"bar": {"$ref": "#/$defs/foo%22bar"}
+ },
+ "$defs": {
+ "foo\"bar": {"type": "number"}
+ }
+ },
+ "tests": [
+ {
+ "description": "object with numbers is valid",
+ "data": {
+ "foo\"bar": 1
+ },
+ "valid": true
+ },
+ {
+ "description": "object with strings is invalid",
+ "data": {
+ "foo\"bar": "1"
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/refRemote.json b/third_party/python/jsonschema/json/tests/latest/refRemote.json
new file mode 100644
index 0000000000..9cadc92666
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/refRemote.json
@@ -0,0 +1,167 @@
+[
+ {
+ "description": "remote ref",
+ "schema": {"$ref": "http://localhost:1234/integer.json"},
+ "tests": [
+ {
+ "description": "remote ref valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "remote ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "fragment within remote ref",
+ "schema": {"$ref": "http://localhost:1234/subSchemas.json#/integer"},
+ "tests": [
+ {
+ "description": "remote fragment valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "remote fragment invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "ref within remote ref",
+ "schema": {
+ "$ref": "http://localhost:1234/subSchemas.json#/refToInteger"
+ },
+ "tests": [
+ {
+ "description": "ref within ref valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "ref within ref invalid",
+ "data": "a",
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "base URI change",
+ "schema": {
+ "$id": "http://localhost:1234/",
+ "items": {
+ "$id": "folder/",
+ "items": {"$ref": "folderInteger.json"}
+ }
+ },
+ "tests": [
+ {
+ "description": "base URI change ref valid",
+ "data": [[1]],
+ "valid": true
+ },
+ {
+ "description": "base URI change ref invalid",
+ "data": [["a"]],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "base URI change - change folder",
+ "schema": {
+ "$id": "http://localhost:1234/scope_change_defs1.json",
+ "type" : "object",
+ "properties": {"list": {"$ref": "#/$defs/baz"}},
+ "$defs": {
+ "baz": {
+ "$id": "folder/",
+ "type": "array",
+ "items": {"$ref": "folderInteger.json"}
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": {"list": [1]},
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": {"list": ["a"]},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "base URI change - change folder in subschema",
+ "schema": {
+ "$id": "http://localhost:1234/scope_change_defs2.json",
+ "type" : "object",
+ "properties": {"list": {"$ref": "#/$defs/baz/$defs/bar"}},
+ "$defs": {
+ "baz": {
+ "$id": "folder/",
+ "$defs": {
+ "bar": {
+ "type": "array",
+ "items": {"$ref": "folderInteger.json"}
+ }
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "number is valid",
+ "data": {"list": [1]},
+ "valid": true
+ },
+ {
+ "description": "string is invalid",
+ "data": {"list": ["a"]},
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "root ref in remote ref",
+ "schema": {
+ "$id": "http://localhost:1234/object",
+ "type": "object",
+ "properties": {
+ "name": {"$ref": "name-defs.json#/$defs/orNull"}
+ }
+ },
+ "tests": [
+ {
+ "description": "string is valid",
+ "data": {
+ "name": "foo"
+ },
+ "valid": true
+ },
+ {
+ "description": "null is valid",
+ "data": {
+ "name": null
+ },
+ "valid": true
+ },
+ {
+ "description": "object is invalid",
+ "data": {
+ "name": {
+ "name": null
+ }
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/required.json b/third_party/python/jsonschema/json/tests/latest/required.json
new file mode 100644
index 0000000000..abf18f3459
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/required.json
@@ -0,0 +1,105 @@
+[
+ {
+ "description": "required validation",
+ "schema": {
+ "properties": {
+ "foo": {},
+ "bar": {}
+ },
+ "required": ["foo"]
+ },
+ "tests": [
+ {
+ "description": "present required property is valid",
+ "data": {"foo": 1},
+ "valid": true
+ },
+ {
+ "description": "non-present required property is invalid",
+ "data": {"bar": 1},
+ "valid": false
+ },
+ {
+ "description": "ignores arrays",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "ignores strings",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "ignores other non-objects",
+ "data": 12,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "required default validation",
+ "schema": {
+ "properties": {
+ "foo": {}
+ }
+ },
+ "tests": [
+ {
+ "description": "not required by default",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "required with empty array",
+ "schema": {
+ "properties": {
+ "foo": {}
+ },
+ "required": []
+ },
+ "tests": [
+ {
+ "description": "property not required",
+ "data": {},
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "required with escaped characters",
+ "schema": {
+ "required": [
+ "foo\nbar",
+ "foo\"bar",
+ "foo\\bar",
+ "foo\rbar",
+ "foo\tbar",
+ "foo\fbar"
+ ]
+ },
+ "tests": [
+ {
+ "description": "object with all properties present is valid",
+ "data": {
+ "foo\nbar": 1,
+ "foo\"bar": 1,
+ "foo\\bar": 1,
+ "foo\rbar": 1,
+ "foo\tbar": 1,
+ "foo\fbar": 1
+ },
+ "valid": true
+ },
+ {
+ "description": "object with some properties missing is invalid",
+ "data": {
+ "foo\nbar": "1",
+ "foo\"bar": "1"
+ },
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/type.json b/third_party/python/jsonschema/json/tests/latest/type.json
new file mode 100644
index 0000000000..ea33b1821f
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/type.json
@@ -0,0 +1,464 @@
+[
+ {
+ "description": "integer type matches integers",
+ "schema": {"type": "integer"},
+ "tests": [
+ {
+ "description": "an integer is an integer",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a float is not an integer",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an integer",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "a string is still not an integer, even if it looks like one",
+ "data": "1",
+ "valid": false
+ },
+ {
+ "description": "an object is not an integer",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not an integer",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not an integer",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an integer",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "number type matches numbers",
+ "schema": {"type": "number"},
+ "tests": [
+ {
+ "description": "an integer is a number",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a float is a number",
+ "data": 1.1,
+ "valid": true
+ },
+ {
+ "description": "a string is not a number",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "a string is still not a number, even if it looks like one",
+ "data": "1",
+ "valid": false
+ },
+ {
+ "description": "an object is not a number",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a number",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not a number",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not a number",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "string type matches strings",
+ "schema": {"type": "string"},
+ "tests": [
+ {
+ "description": "1 is not a string",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not a string",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is a string",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "a string is still a string, even if it looks like a number",
+ "data": "1",
+ "valid": true
+ },
+ {
+ "description": "an empty string is still a string",
+ "data": "",
+ "valid": true
+ },
+ {
+ "description": "an object is not a string",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a string",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not a string",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not a string",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "object type matches objects",
+ "schema": {"type": "object"},
+ "tests": [
+ {
+ "description": "an integer is not an object",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not an object",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an object",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an object is an object",
+ "data": {},
+ "valid": true
+ },
+ {
+ "description": "an array is not an object",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is not an object",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an object",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "array type matches arrays",
+ "schema": {"type": "array"},
+ "tests": [
+ {
+ "description": "an integer is not an array",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not an array",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not an array",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an object is not an array",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is an array",
+ "data": [],
+ "valid": true
+ },
+ {
+ "description": "a boolean is not an array",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is not an array",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "boolean type matches booleans",
+ "schema": {"type": "boolean"},
+ "tests": [
+ {
+ "description": "an integer is not a boolean",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "zero is not a boolean",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "a float is not a boolean",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "a string is not a boolean",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an empty string is not a boolean",
+ "data": "",
+ "valid": false
+ },
+ {
+ "description": "an object is not a boolean",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not a boolean",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "true is a boolean",
+ "data": true,
+ "valid": true
+ },
+ {
+ "description": "false is a boolean",
+ "data": false,
+ "valid": true
+ },
+ {
+ "description": "null is not a boolean",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "null type matches only the null object",
+ "schema": {"type": "null"},
+ "tests": [
+ {
+ "description": "an integer is not null",
+ "data": 1,
+ "valid": false
+ },
+ {
+ "description": "a float is not null",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "zero is not null",
+ "data": 0,
+ "valid": false
+ },
+ {
+ "description": "a string is not null",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "an empty string is not null",
+ "data": "",
+ "valid": false
+ },
+ {
+ "description": "an object is not null",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is not null",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "true is not null",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "false is not null",
+ "data": false,
+ "valid": false
+ },
+ {
+ "description": "null is null",
+ "data": null,
+ "valid": true
+ }
+ ]
+ },
+ {
+ "description": "multiple types can be specified in an array",
+ "schema": {"type": ["integer", "string"]},
+ "tests": [
+ {
+ "description": "an integer is valid",
+ "data": 1,
+ "valid": true
+ },
+ {
+ "description": "a string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "a float is invalid",
+ "data": 1.1,
+ "valid": false
+ },
+ {
+ "description": "an object is invalid",
+ "data": {},
+ "valid": false
+ },
+ {
+ "description": "an array is invalid",
+ "data": [],
+ "valid": false
+ },
+ {
+ "description": "a boolean is invalid",
+ "data": true,
+ "valid": false
+ },
+ {
+ "description": "null is invalid",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "type as array with one item",
+ "schema": {
+ "type": ["string"]
+ },
+ "tests": [
+ {
+ "description": "string is valid",
+ "data": "foo",
+ "valid": true
+ },
+ {
+ "description": "number is invalid",
+ "data": 123,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "type: array or object",
+ "schema": {
+ "type": ["array", "object"]
+ },
+ "tests": [
+ {
+ "description": "array is valid",
+ "data": [1,2,3],
+ "valid": true
+ },
+ {
+ "description": "object is valid",
+ "data": {"foo": 123},
+ "valid": true
+ },
+ {
+ "description": "number is invalid",
+ "data": 123,
+ "valid": false
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ },
+ {
+ "description": "null is invalid",
+ "data": null,
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "type: array, object or null",
+ "schema": {
+ "type": ["array", "object", "null"]
+ },
+ "tests": [
+ {
+ "description": "array is valid",
+ "data": [1,2,3],
+ "valid": true
+ },
+ {
+ "description": "object is valid",
+ "data": {"foo": 123},
+ "valid": true
+ },
+ {
+ "description": "null is valid",
+ "data": null,
+ "valid": true
+ },
+ {
+ "description": "number is invalid",
+ "data": 123,
+ "valid": false
+ },
+ {
+ "description": "string is invalid",
+ "data": "foo",
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tests/latest/uniqueItems.json b/third_party/python/jsonschema/json/tests/latest/uniqueItems.json
new file mode 100644
index 0000000000..d312ad71ab
--- /dev/null
+++ b/third_party/python/jsonschema/json/tests/latest/uniqueItems.json
@@ -0,0 +1,173 @@
+[
+ {
+ "description": "uniqueItems validation",
+ "schema": {"uniqueItems": true},
+ "tests": [
+ {
+ "description": "unique array of integers is valid",
+ "data": [1, 2],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of integers is invalid",
+ "data": [1, 1],
+ "valid": false
+ },
+ {
+ "description": "numbers are unique if mathematically unequal",
+ "data": [1.0, 1.00, 1],
+ "valid": false
+ },
+ {
+ "description": "false is not equal to zero",
+ "data": [0, false],
+ "valid": true
+ },
+ {
+ "description": "true is not equal to one",
+ "data": [1, true],
+ "valid": true
+ },
+ {
+ "description": "unique array of objects is valid",
+ "data": [{"foo": "bar"}, {"foo": "baz"}],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of objects is invalid",
+ "data": [{"foo": "bar"}, {"foo": "bar"}],
+ "valid": false
+ },
+ {
+ "description": "unique array of nested objects is valid",
+ "data": [
+ {"foo": {"bar" : {"baz" : true}}},
+ {"foo": {"bar" : {"baz" : false}}}
+ ],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of nested objects is invalid",
+ "data": [
+ {"foo": {"bar" : {"baz" : true}}},
+ {"foo": {"bar" : {"baz" : true}}}
+ ],
+ "valid": false
+ },
+ {
+ "description": "unique array of arrays is valid",
+ "data": [["foo"], ["bar"]],
+ "valid": true
+ },
+ {
+ "description": "non-unique array of arrays is invalid",
+ "data": [["foo"], ["foo"]],
+ "valid": false
+ },
+ {
+ "description": "1 and true are unique",
+ "data": [1, true],
+ "valid": true
+ },
+ {
+ "description": "0 and false are unique",
+ "data": [0, false],
+ "valid": true
+ },
+ {
+ "description": "unique heterogeneous types are valid",
+ "data": [{}, [1], true, null, 1],
+ "valid": true
+ },
+ {
+ "description": "non-unique heterogeneous types are invalid",
+ "data": [{}, [1], true, null, {}, 1],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "uniqueItems with an array of items",
+ "schema": {
+ "items": [{"type": "boolean"}, {"type": "boolean"}],
+ "uniqueItems": true
+ },
+ "tests": [
+ {
+ "description": "[false, true] from items array is valid",
+ "data": [false, true],
+ "valid": true
+ },
+ {
+ "description": "[true, false] from items array is valid",
+ "data": [true, false],
+ "valid": true
+ },
+ {
+ "description": "[false, false] from items array is not valid",
+ "data": [false, false],
+ "valid": false
+ },
+ {
+ "description": "[true, true] from items array is not valid",
+ "data": [true, true],
+ "valid": false
+ },
+ {
+ "description": "unique array extended from [false, true] is valid",
+ "data": [false, true, "foo", "bar"],
+ "valid": true
+ },
+ {
+ "description": "unique array extended from [true, false] is valid",
+ "data": [true, false, "foo", "bar"],
+ "valid": true
+ },
+ {
+ "description": "non-unique array extended from [false, true] is not valid",
+ "data": [false, true, "foo", "foo"],
+ "valid": false
+ },
+ {
+ "description": "non-unique array extended from [true, false] is not valid",
+ "data": [true, false, "foo", "foo"],
+ "valid": false
+ }
+ ]
+ },
+ {
+ "description": "uniqueItems with an array of items and additionalItems=false",
+ "schema": {
+ "items": [{"type": "boolean"}, {"type": "boolean"}],
+ "uniqueItems": true,
+ "additionalItems": false
+ },
+ "tests": [
+ {
+ "description": "[false, true] from items array is valid",
+ "data": [false, true],
+ "valid": true
+ },
+ {
+ "description": "[true, false] from items array is valid",
+ "data": [true, false],
+ "valid": true
+ },
+ {
+ "description": "[false, false] from items array is not valid",
+ "data": [false, false],
+ "valid": false
+ },
+ {
+ "description": "[true, true] from items array is not valid",
+ "data": [true, true],
+ "valid": false
+ },
+ {
+ "description": "extra items are invalid even if unique",
+ "data": [false, true, null],
+ "valid": false
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/json/tox.ini b/third_party/python/jsonschema/json/tox.ini
new file mode 100644
index 0000000000..9c4e94990d
--- /dev/null
+++ b/third_party/python/jsonschema/json/tox.ini
@@ -0,0 +1,9 @@
+[tox]
+minversion = 1.6
+envlist = sanity
+skipsdist = True
+
+[testenv:sanity]
+# used just for validating the structure of the test case files themselves
+deps = jsonschema
+commands = {envpython} bin/jsonschema_suite check
diff --git a/third_party/python/jsonschema/jsonschema.egg-info/PKG-INFO b/third_party/python/jsonschema/jsonschema.egg-info/PKG-INFO
new file mode 100644
index 0000000000..cc0c592f96
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema.egg-info/PKG-INFO
@@ -0,0 +1,206 @@
+Metadata-Version: 2.1
+Name: jsonschema
+Version: 3.2.0
+Summary: An implementation of JSON Schema validation for Python
+Home-page: https://github.com/Julian/jsonschema
+Author: Julian Berman
+Author-email: Julian@GrayVines.com
+License: UNKNOWN
+Project-URL: Docs, https://python-jsonschema.readthedocs.io/en/latest/
+Description: ==========
+ jsonschema
+ ==========
+
+ |PyPI| |Pythons| |Travis| |AppVeyor| |Codecov| |ReadTheDocs|
+
+ .. |PyPI| image:: https://img.shields.io/pypi/v/jsonschema.svg
+ :alt: PyPI version
+ :target: https://pypi.org/project/jsonschema/
+
+ .. |Pythons| image:: https://img.shields.io/pypi/pyversions/jsonschema.svg
+ :alt: Supported Python versions
+ :target: https://pypi.org/project/jsonschema/
+
+ .. |Travis| image:: https://travis-ci.com/Julian/jsonschema.svg?branch=master
+ :alt: Travis build status
+ :target: https://travis-ci.com/Julian/jsonschema
+
+ .. |AppVeyor| image:: https://ci.appveyor.com/api/projects/status/adtt0aiaihy6muyn/branch/master?svg=true
+ :alt: AppVeyor build status
+ :target: https://ci.appveyor.com/project/Julian/jsonschema
+
+ .. |Codecov| image:: https://codecov.io/gh/Julian/jsonschema/branch/master/graph/badge.svg
+ :alt: Codecov Code coverage
+ :target: https://codecov.io/gh/Julian/jsonschema
+
+ .. |ReadTheDocs| image:: https://readthedocs.org/projects/python-jsonschema/badge/?version=stable&style=flat
+ :alt: ReadTheDocs status
+ :target: https://python-jsonschema.readthedocs.io/en/stable/
+
+
+ ``jsonschema`` is an implementation of `JSON Schema <https://json-schema.org>`_
+ for Python (supporting 2.7+ including Python 3).
+
+ .. code-block:: python
+
+ >>> from jsonschema import validate
+
+ >>> # A sample schema, like what we'd get from json.load()
+ >>> schema = {
+ ... "type" : "object",
+ ... "properties" : {
+ ... "price" : {"type" : "number"},
+ ... "name" : {"type" : "string"},
+ ... },
+ ... }
+
+ >>> # If no exception is raised by validate(), the instance is valid.
+ >>> validate(instance={"name" : "Eggs", "price" : 34.99}, schema=schema)
+
+ >>> validate(
+ ... instance={"name" : "Eggs", "price" : "Invalid"}, schema=schema,
+ ... ) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValidationError: 'Invalid' is not of type 'number'
+
+ It can also be used from console:
+
+ .. code-block:: bash
+
+ $ jsonschema -i sample.json sample.schema
+
+ Features
+ --------
+
+ * Full support for
+ `Draft 7 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft7Validator>`_,
+ `Draft 6 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft6Validator>`_,
+ `Draft 4 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft4Validator>`_
+ and
+ `Draft 3 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft3Validator>`_
+
+ * `Lazy validation <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.IValidator.iter_errors>`_
+ that can iteratively report *all* validation errors.
+
+ * `Programmatic querying <https://python-jsonschema.readthedocs.io/en/latest/errors/>`_
+ of which properties or items failed validation.
+
+
+ Installation
+ ------------
+
+ ``jsonschema`` is available on `PyPI <https://pypi.org/project/jsonschema/>`_. You can install using `pip <https://pip.pypa.io/en/stable/>`_:
+
+ .. code-block:: bash
+
+ $ pip install jsonschema
+
+
+ Demo
+ ----
+
+ Try ``jsonschema`` interactively in this online demo:
+
+ .. image:: https://user-images.githubusercontent.com/1155573/56745335-8b158a00-6750-11e9-8776-83fa675939c4.png
+ :target: https://notebooks.ai/demo/gh/Julian/jsonschema
+ :alt: Open Live Demo
+
+
+ Online demo Notebook will look similar to this:
+
+
+ .. image:: https://user-images.githubusercontent.com/1155573/56820861-5c1c1880-6823-11e9-802a-ce01c5ec574f.gif
+ :alt: Open Live Demo
+ :width: 480 px
+
+
+ Release Notes
+ -------------
+
+ v3.1 brings support for ECMA 262 dialect regular expressions
+ throughout schemas, as recommended by the specification. Big
+ thanks to @Zac-HD for authoring support in a new `js-regex
+ <https://pypi.org/project/js-regex/>`_ library.
+
+
+ Running the Test Suite
+ ----------------------
+
+ If you have ``tox`` installed (perhaps via ``pip install tox`` or your
+ package manager), running ``tox`` in the directory of your source
+ checkout will run ``jsonschema``'s test suite on all of the versions
+ of Python ``jsonschema`` supports. If you don't have all of the
+ versions that ``jsonschema`` is tested under, you'll likely want to run
+ using ``tox``'s ``--skip-missing-interpreters`` option.
+
+ Of course you're also free to just run the tests on a single version with your
+ favorite test runner. The tests live in the ``jsonschema.tests`` package.
+
+
+ Benchmarks
+ ----------
+
+ ``jsonschema``'s benchmarks make use of `pyperf
+ <https://pyperf.readthedocs.io>`_.
+
+ Running them can be done via ``tox -e perf``, or by invoking the ``pyperf``
+ commands externally (after ensuring that both it and ``jsonschema`` itself are
+ installed)::
+
+ $ python -m pyperf jsonschema/benchmarks/test_suite.py --hist --output results.json
+
+ To compare to a previous run, use::
+
+ $ python -m pyperf compare_to --table reference.json results.json
+
+ See the ``pyperf`` documentation for more details.
+
+
+ Community
+ ---------
+
+ There's a `mailing list <https://groups.google.com/forum/#!forum/jsonschema>`_
+ for this implementation on Google Groups.
+
+ Please join, and feel free to send questions there.
+
+
+ Contributing
+ ------------
+
+ I'm Julian Berman.
+
+ ``jsonschema`` is on `GitHub <https://github.com/Julian/jsonschema>`_.
+
+ Get in touch, via GitHub or otherwise, if you've got something to contribute,
+ it'd be most welcome!
+
+ You can also generally find me on Freenode (nick: ``tos9``) in various
+ channels, including ``#python``.
+
+ If you feel overwhelmingly grateful, you can also woo me with beer money
+ via Google Pay with the email in my GitHub profile.
+
+ And for companies who appreciate ``jsonschema`` and its continued support
+ and growth, ``jsonschema`` is also now supportable via `TideLift
+ <https://tidelift.com/subscription/pkg/pypi-jsonschema?utm_source=pypi-j
+ sonschema&utm_medium=referral&utm_campaign=readme>`_.
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Provides-Extra: format
+Provides-Extra: format_nongpl
diff --git a/third_party/python/jsonschema/jsonschema.egg-info/SOURCES.txt b/third_party/python/jsonschema/jsonschema.egg-info/SOURCES.txt
new file mode 100644
index 0000000000..70a10f20fc
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema.egg-info/SOURCES.txt
@@ -0,0 +1,354 @@
+.appveyor.yml
+.coveragerc
+.gitignore
+.travis.yml
+CHANGELOG.rst
+COPYING
+DEMO.ipynb
+MANIFEST.in
+README.rst
+codecov.yml
+demo.yml
+pyproject.toml
+setup.cfg
+setup.py
+test-requirements.txt
+tox.ini
+.github/FUNDING.yml
+.github/SECURITY.md
+docs/Makefile
+docs/conf.py
+docs/creating.rst
+docs/errors.rst
+docs/faq.rst
+docs/index.rst
+docs/jsonschema_role.py
+docs/make.bat
+docs/references.rst
+docs/requirements.in
+docs/requirements.txt
+docs/spelling-wordlist.txt
+docs/validate.rst
+json/.gitignore
+json/.travis.yml
+json/LICENSE
+json/README.md
+json/index.js
+json/package.json
+json/test-schema.json
+json/tox.ini
+json/bin/jsonschema_suite
+json/remotes/integer.json
+json/remotes/name-defs.json
+json/remotes/name.json
+json/remotes/subSchemas.json
+json/remotes/folder/folderInteger.json
+json/tests/latest
+json/tests/draft2019-09/additionalItems.json
+json/tests/draft2019-09/additionalProperties.json
+json/tests/draft2019-09/allOf.json
+json/tests/draft2019-09/anchor.json
+json/tests/draft2019-09/anyOf.json
+json/tests/draft2019-09/boolean_schema.json
+json/tests/draft2019-09/const.json
+json/tests/draft2019-09/contains.json
+json/tests/draft2019-09/default.json
+json/tests/draft2019-09/defs.json
+json/tests/draft2019-09/dependencies.json
+json/tests/draft2019-09/enum.json
+json/tests/draft2019-09/exclusiveMaximum.json
+json/tests/draft2019-09/exclusiveMinimum.json
+json/tests/draft2019-09/format.json
+json/tests/draft2019-09/if-then-else.json
+json/tests/draft2019-09/items.json
+json/tests/draft2019-09/maxItems.json
+json/tests/draft2019-09/maxLength.json
+json/tests/draft2019-09/maxProperties.json
+json/tests/draft2019-09/maximum.json
+json/tests/draft2019-09/minItems.json
+json/tests/draft2019-09/minLength.json
+json/tests/draft2019-09/minProperties.json
+json/tests/draft2019-09/minimum.json
+json/tests/draft2019-09/multipleOf.json
+json/tests/draft2019-09/not.json
+json/tests/draft2019-09/oneOf.json
+json/tests/draft2019-09/pattern.json
+json/tests/draft2019-09/patternProperties.json
+json/tests/draft2019-09/properties.json
+json/tests/draft2019-09/propertyNames.json
+json/tests/draft2019-09/ref.json
+json/tests/draft2019-09/refRemote.json
+json/tests/draft2019-09/required.json
+json/tests/draft2019-09/type.json
+json/tests/draft2019-09/uniqueItems.json
+json/tests/draft2019-09/optional/bignum.json
+json/tests/draft2019-09/optional/content.json
+json/tests/draft2019-09/optional/ecmascript-regex.json
+json/tests/draft2019-09/optional/zeroTerminatedFloats.json
+json/tests/draft2019-09/optional/format/date-time.json
+json/tests/draft2019-09/optional/format/date.json
+json/tests/draft2019-09/optional/format/email.json
+json/tests/draft2019-09/optional/format/hostname.json
+json/tests/draft2019-09/optional/format/idn-email.json
+json/tests/draft2019-09/optional/format/idn-hostname.json
+json/tests/draft2019-09/optional/format/ipv4.json
+json/tests/draft2019-09/optional/format/ipv6.json
+json/tests/draft2019-09/optional/format/iri-reference.json
+json/tests/draft2019-09/optional/format/iri.json
+json/tests/draft2019-09/optional/format/json-pointer.json
+json/tests/draft2019-09/optional/format/regex.json
+json/tests/draft2019-09/optional/format/relative-json-pointer.json
+json/tests/draft2019-09/optional/format/time.json
+json/tests/draft2019-09/optional/format/uri-reference.json
+json/tests/draft2019-09/optional/format/uri-template.json
+json/tests/draft2019-09/optional/format/uri.json
+json/tests/draft3/additionalItems.json
+json/tests/draft3/additionalProperties.json
+json/tests/draft3/default.json
+json/tests/draft3/dependencies.json
+json/tests/draft3/disallow.json
+json/tests/draft3/divisibleBy.json
+json/tests/draft3/enum.json
+json/tests/draft3/extends.json
+json/tests/draft3/format.json
+json/tests/draft3/items.json
+json/tests/draft3/maxItems.json
+json/tests/draft3/maxLength.json
+json/tests/draft3/maximum.json
+json/tests/draft3/minItems.json
+json/tests/draft3/minLength.json
+json/tests/draft3/minimum.json
+json/tests/draft3/pattern.json
+json/tests/draft3/patternProperties.json
+json/tests/draft3/properties.json
+json/tests/draft3/ref.json
+json/tests/draft3/refRemote.json
+json/tests/draft3/required.json
+json/tests/draft3/type.json
+json/tests/draft3/uniqueItems.json
+json/tests/draft3/optional/bignum.json
+json/tests/draft3/optional/ecmascript-regex.json
+json/tests/draft3/optional/format.json
+json/tests/draft3/optional/zeroTerminatedFloats.json
+json/tests/draft4/additionalItems.json
+json/tests/draft4/additionalProperties.json
+json/tests/draft4/allOf.json
+json/tests/draft4/anyOf.json
+json/tests/draft4/default.json
+json/tests/draft4/definitions.json
+json/tests/draft4/dependencies.json
+json/tests/draft4/enum.json
+json/tests/draft4/format.json
+json/tests/draft4/items.json
+json/tests/draft4/maxItems.json
+json/tests/draft4/maxLength.json
+json/tests/draft4/maxProperties.json
+json/tests/draft4/maximum.json
+json/tests/draft4/minItems.json
+json/tests/draft4/minLength.json
+json/tests/draft4/minProperties.json
+json/tests/draft4/minimum.json
+json/tests/draft4/multipleOf.json
+json/tests/draft4/not.json
+json/tests/draft4/oneOf.json
+json/tests/draft4/pattern.json
+json/tests/draft4/patternProperties.json
+json/tests/draft4/properties.json
+json/tests/draft4/ref.json
+json/tests/draft4/refRemote.json
+json/tests/draft4/required.json
+json/tests/draft4/type.json
+json/tests/draft4/uniqueItems.json
+json/tests/draft4/optional/bignum.json
+json/tests/draft4/optional/ecmascript-regex.json
+json/tests/draft4/optional/format.json
+json/tests/draft4/optional/zeroTerminatedFloats.json
+json/tests/draft6/additionalItems.json
+json/tests/draft6/additionalProperties.json
+json/tests/draft6/allOf.json
+json/tests/draft6/anyOf.json
+json/tests/draft6/boolean_schema.json
+json/tests/draft6/const.json
+json/tests/draft6/contains.json
+json/tests/draft6/default.json
+json/tests/draft6/definitions.json
+json/tests/draft6/dependencies.json
+json/tests/draft6/enum.json
+json/tests/draft6/exclusiveMaximum.json
+json/tests/draft6/exclusiveMinimum.json
+json/tests/draft6/format.json
+json/tests/draft6/items.json
+json/tests/draft6/maxItems.json
+json/tests/draft6/maxLength.json
+json/tests/draft6/maxProperties.json
+json/tests/draft6/maximum.json
+json/tests/draft6/minItems.json
+json/tests/draft6/minLength.json
+json/tests/draft6/minProperties.json
+json/tests/draft6/minimum.json
+json/tests/draft6/multipleOf.json
+json/tests/draft6/not.json
+json/tests/draft6/oneOf.json
+json/tests/draft6/pattern.json
+json/tests/draft6/patternProperties.json
+json/tests/draft6/properties.json
+json/tests/draft6/propertyNames.json
+json/tests/draft6/ref.json
+json/tests/draft6/refRemote.json
+json/tests/draft6/required.json
+json/tests/draft6/type.json
+json/tests/draft6/uniqueItems.json
+json/tests/draft6/optional/bignum.json
+json/tests/draft6/optional/ecmascript-regex.json
+json/tests/draft6/optional/format.json
+json/tests/draft6/optional/zeroTerminatedFloats.json
+json/tests/draft7/additionalItems.json
+json/tests/draft7/additionalProperties.json
+json/tests/draft7/allOf.json
+json/tests/draft7/anyOf.json
+json/tests/draft7/boolean_schema.json
+json/tests/draft7/const.json
+json/tests/draft7/contains.json
+json/tests/draft7/default.json
+json/tests/draft7/definitions.json
+json/tests/draft7/dependencies.json
+json/tests/draft7/enum.json
+json/tests/draft7/exclusiveMaximum.json
+json/tests/draft7/exclusiveMinimum.json
+json/tests/draft7/format.json
+json/tests/draft7/if-then-else.json
+json/tests/draft7/items.json
+json/tests/draft7/maxItems.json
+json/tests/draft7/maxLength.json
+json/tests/draft7/maxProperties.json
+json/tests/draft7/maximum.json
+json/tests/draft7/minItems.json
+json/tests/draft7/minLength.json
+json/tests/draft7/minProperties.json
+json/tests/draft7/minimum.json
+json/tests/draft7/multipleOf.json
+json/tests/draft7/not.json
+json/tests/draft7/oneOf.json
+json/tests/draft7/pattern.json
+json/tests/draft7/patternProperties.json
+json/tests/draft7/properties.json
+json/tests/draft7/propertyNames.json
+json/tests/draft7/ref.json
+json/tests/draft7/refRemote.json
+json/tests/draft7/required.json
+json/tests/draft7/type.json
+json/tests/draft7/uniqueItems.json
+json/tests/draft7/optional/bignum.json
+json/tests/draft7/optional/content.json
+json/tests/draft7/optional/ecmascript-regex.json
+json/tests/draft7/optional/zeroTerminatedFloats.json
+json/tests/draft7/optional/format/date-time.json
+json/tests/draft7/optional/format/date.json
+json/tests/draft7/optional/format/email.json
+json/tests/draft7/optional/format/hostname.json
+json/tests/draft7/optional/format/idn-email.json
+json/tests/draft7/optional/format/idn-hostname.json
+json/tests/draft7/optional/format/ipv4.json
+json/tests/draft7/optional/format/ipv6.json
+json/tests/draft7/optional/format/iri-reference.json
+json/tests/draft7/optional/format/iri.json
+json/tests/draft7/optional/format/json-pointer.json
+json/tests/draft7/optional/format/regex.json
+json/tests/draft7/optional/format/relative-json-pointer.json
+json/tests/draft7/optional/format/time.json
+json/tests/draft7/optional/format/uri-reference.json
+json/tests/draft7/optional/format/uri-template.json
+json/tests/draft7/optional/format/uri.json
+json/tests/latest/additionalItems.json
+json/tests/latest/additionalProperties.json
+json/tests/latest/allOf.json
+json/tests/latest/anchor.json
+json/tests/latest/anyOf.json
+json/tests/latest/boolean_schema.json
+json/tests/latest/const.json
+json/tests/latest/contains.json
+json/tests/latest/default.json
+json/tests/latest/defs.json
+json/tests/latest/dependencies.json
+json/tests/latest/enum.json
+json/tests/latest/exclusiveMaximum.json
+json/tests/latest/exclusiveMinimum.json
+json/tests/latest/format.json
+json/tests/latest/if-then-else.json
+json/tests/latest/items.json
+json/tests/latest/maxItems.json
+json/tests/latest/maxLength.json
+json/tests/latest/maxProperties.json
+json/tests/latest/maximum.json
+json/tests/latest/minItems.json
+json/tests/latest/minLength.json
+json/tests/latest/minProperties.json
+json/tests/latest/minimum.json
+json/tests/latest/multipleOf.json
+json/tests/latest/not.json
+json/tests/latest/oneOf.json
+json/tests/latest/pattern.json
+json/tests/latest/patternProperties.json
+json/tests/latest/properties.json
+json/tests/latest/propertyNames.json
+json/tests/latest/ref.json
+json/tests/latest/refRemote.json
+json/tests/latest/required.json
+json/tests/latest/type.json
+json/tests/latest/uniqueItems.json
+json/tests/latest/optional/bignum.json
+json/tests/latest/optional/content.json
+json/tests/latest/optional/ecmascript-regex.json
+json/tests/latest/optional/zeroTerminatedFloats.json
+json/tests/latest/optional/format/date-time.json
+json/tests/latest/optional/format/date.json
+json/tests/latest/optional/format/email.json
+json/tests/latest/optional/format/hostname.json
+json/tests/latest/optional/format/idn-email.json
+json/tests/latest/optional/format/idn-hostname.json
+json/tests/latest/optional/format/ipv4.json
+json/tests/latest/optional/format/ipv6.json
+json/tests/latest/optional/format/iri-reference.json
+json/tests/latest/optional/format/iri.json
+json/tests/latest/optional/format/json-pointer.json
+json/tests/latest/optional/format/regex.json
+json/tests/latest/optional/format/relative-json-pointer.json
+json/tests/latest/optional/format/time.json
+json/tests/latest/optional/format/uri-reference.json
+json/tests/latest/optional/format/uri-template.json
+json/tests/latest/optional/format/uri.json
+jsonschema/__init__.py
+jsonschema/__main__.py
+jsonschema/_format.py
+jsonschema/_legacy_validators.py
+jsonschema/_reflect.py
+jsonschema/_types.py
+jsonschema/_utils.py
+jsonschema/_validators.py
+jsonschema/cli.py
+jsonschema/compat.py
+jsonschema/exceptions.py
+jsonschema/validators.py
+jsonschema.egg-info/PKG-INFO
+jsonschema.egg-info/SOURCES.txt
+jsonschema.egg-info/dependency_links.txt
+jsonschema.egg-info/entry_points.txt
+jsonschema.egg-info/requires.txt
+jsonschema.egg-info/top_level.txt
+jsonschema/benchmarks/__init__.py
+jsonschema/benchmarks/issue232.py
+jsonschema/benchmarks/json_schema_test_suite.py
+jsonschema/benchmarks/issue232/issue.json
+jsonschema/schemas/draft3.json
+jsonschema/schemas/draft4.json
+jsonschema/schemas/draft6.json
+jsonschema/schemas/draft7.json
+jsonschema/tests/__init__.py
+jsonschema/tests/_helpers.py
+jsonschema/tests/_suite.py
+jsonschema/tests/test_cli.py
+jsonschema/tests/test_exceptions.py
+jsonschema/tests/test_format.py
+jsonschema/tests/test_jsonschema_test_suite.py
+jsonschema/tests/test_types.py
+jsonschema/tests/test_validators.py \ No newline at end of file
diff --git a/third_party/python/jsonschema/jsonschema.egg-info/dependency_links.txt b/third_party/python/jsonschema/jsonschema.egg-info/dependency_links.txt
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/third_party/python/jsonschema/jsonschema.egg-info/entry_points.txt b/third_party/python/jsonschema/jsonschema.egg-info/entry_points.txt
new file mode 100644
index 0000000000..c627b310cd
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema.egg-info/entry_points.txt
@@ -0,0 +1,3 @@
+[console_scripts]
+jsonschema = jsonschema.cli:main
+
diff --git a/third_party/python/jsonschema/jsonschema.egg-info/requires.txt b/third_party/python/jsonschema/jsonschema.egg-info/requires.txt
new file mode 100644
index 0000000000..8b31c58109
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema.egg-info/requires.txt
@@ -0,0 +1,24 @@
+attrs>=17.4.0
+pyrsistent>=0.14.0
+setuptools
+six>=1.11.0
+
+[:python_version < "3"]
+functools32
+
+[:python_version < "3.8"]
+importlib_metadata
+
+[format]
+idna
+jsonpointer>1.13
+rfc3987
+strict-rfc3339
+webcolors
+
+[format_nongpl]
+idna
+jsonpointer>1.13
+webcolors
+rfc3986-validator>0.1.0
+rfc3339-validator
diff --git a/third_party/python/jsonschema/jsonschema.egg-info/top_level.txt b/third_party/python/jsonschema/jsonschema.egg-info/top_level.txt
new file mode 100644
index 0000000000..d89304b1a8
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema.egg-info/top_level.txt
@@ -0,0 +1 @@
+jsonschema
diff --git a/third_party/python/jsonschema/jsonschema/__init__.py b/third_party/python/jsonschema/jsonschema/__init__.py
new file mode 100644
index 0000000000..6b630cdfbb
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/__init__.py
@@ -0,0 +1,34 @@
+"""
+An implementation of JSON Schema for Python
+
+The main functionality is provided by the validator classes for each of the
+supported JSON Schema versions.
+
+Most commonly, `validate` is the quickest way to simply validate a given
+instance under a schema, and will create a validator for you.
+"""
+
+from jsonschema.exceptions import (
+ ErrorTree, FormatError, RefResolutionError, SchemaError, ValidationError
+)
+from jsonschema._format import (
+ FormatChecker,
+ draft3_format_checker,
+ draft4_format_checker,
+ draft6_format_checker,
+ draft7_format_checker,
+)
+from jsonschema._types import TypeChecker
+from jsonschema.validators import (
+ Draft3Validator,
+ Draft4Validator,
+ Draft6Validator,
+ Draft7Validator,
+ RefResolver,
+ validate,
+)
+try:
+ from importlib import metadata
+except ImportError: # for Python<3.8
+ import importlib_metadata as metadata
+__version__ = metadata.version("jsonschema")
diff --git a/third_party/python/jsonschema/jsonschema/__main__.py b/third_party/python/jsonschema/jsonschema/__main__.py
new file mode 100644
index 0000000000..82c29fd39e
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/__main__.py
@@ -0,0 +1,2 @@
+from jsonschema.cli import main
+main()
diff --git a/third_party/python/jsonschema/jsonschema/_format.py b/third_party/python/jsonschema/jsonschema/_format.py
new file mode 100644
index 0000000000..281a7cfcff
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/_format.py
@@ -0,0 +1,425 @@
+import datetime
+import re
+import socket
+import struct
+
+from jsonschema.compat import str_types
+from jsonschema.exceptions import FormatError
+
+
+class FormatChecker(object):
+ """
+ A ``format`` property checker.
+
+ JSON Schema does not mandate that the ``format`` property actually do any
+ validation. If validation is desired however, instances of this class can
+ be hooked into validators to enable format validation.
+
+ `FormatChecker` objects always return ``True`` when asked about
+ formats that they do not know how to validate.
+
+ To check a custom format using a function that takes an instance and
+ returns a ``bool``, use the `FormatChecker.checks` or
+ `FormatChecker.cls_checks` decorators.
+
+ Arguments:
+
+ formats (~collections.Iterable):
+
+ The known formats to validate. This argument can be used to
+ limit which formats will be used during validation.
+ """
+
+ checkers = {}
+
+ def __init__(self, formats=None):
+ if formats is None:
+ self.checkers = self.checkers.copy()
+ else:
+ self.checkers = dict((k, self.checkers[k]) for k in formats)
+
+ def __repr__(self):
+ return "<FormatChecker checkers={}>".format(sorted(self.checkers))
+
+ def checks(self, format, raises=()):
+ """
+ Register a decorated function as validating a new format.
+
+ Arguments:
+
+ format (str):
+
+ The format that the decorated function will check.
+
+ raises (Exception):
+
+ The exception(s) raised by the decorated function when an
+ invalid instance is found.
+
+ The exception object will be accessible as the
+ `jsonschema.exceptions.ValidationError.cause` attribute of the
+ resulting validation error.
+ """
+
+ def _checks(func):
+ self.checkers[format] = (func, raises)
+ return func
+ return _checks
+
+ cls_checks = classmethod(checks)
+
+ def check(self, instance, format):
+ """
+ Check whether the instance conforms to the given format.
+
+ Arguments:
+
+ instance (*any primitive type*, i.e. str, number, bool):
+
+ The instance to check
+
+ format (str):
+
+ The format that instance should conform to
+
+
+ Raises:
+
+ FormatError: if the instance does not conform to ``format``
+ """
+
+ if format not in self.checkers:
+ return
+
+ func, raises = self.checkers[format]
+ result, cause = None, None
+ try:
+ result = func(instance)
+ except raises as e:
+ cause = e
+ if not result:
+ raise FormatError(
+ "%r is not a %r" % (instance, format), cause=cause,
+ )
+
+ def conforms(self, instance, format):
+ """
+ Check whether the instance conforms to the given format.
+
+ Arguments:
+
+ instance (*any primitive type*, i.e. str, number, bool):
+
+ The instance to check
+
+ format (str):
+
+ The format that instance should conform to
+
+ Returns:
+
+ bool: whether it conformed
+ """
+
+ try:
+ self.check(instance, format)
+ except FormatError:
+ return False
+ else:
+ return True
+
+
+draft3_format_checker = FormatChecker()
+draft4_format_checker = FormatChecker()
+draft6_format_checker = FormatChecker()
+draft7_format_checker = FormatChecker()
+
+
+_draft_checkers = dict(
+ draft3=draft3_format_checker,
+ draft4=draft4_format_checker,
+ draft6=draft6_format_checker,
+ draft7=draft7_format_checker,
+)
+
+
+def _checks_drafts(
+ name=None,
+ draft3=None,
+ draft4=None,
+ draft6=None,
+ draft7=None,
+ raises=(),
+):
+ draft3 = draft3 or name
+ draft4 = draft4 or name
+ draft6 = draft6 or name
+ draft7 = draft7 or name
+
+ def wrap(func):
+ if draft3:
+ func = _draft_checkers["draft3"].checks(draft3, raises)(func)
+ if draft4:
+ func = _draft_checkers["draft4"].checks(draft4, raises)(func)
+ if draft6:
+ func = _draft_checkers["draft6"].checks(draft6, raises)(func)
+ if draft7:
+ func = _draft_checkers["draft7"].checks(draft7, raises)(func)
+
+ # Oy. This is bad global state, but relied upon for now, until
+ # deprecation. See https://github.com/Julian/jsonschema/issues/519
+ # and test_format_checkers_come_with_defaults
+ FormatChecker.cls_checks(draft7 or draft6 or draft4 or draft3, raises)(
+ func,
+ )
+ return func
+ return wrap
+
+
+@_checks_drafts(name="idn-email")
+@_checks_drafts(name="email")
+def is_email(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return "@" in instance
+
+
+_ipv4_re = re.compile(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$")
+
+
+@_checks_drafts(
+ draft3="ip-address", draft4="ipv4", draft6="ipv4", draft7="ipv4",
+)
+def is_ipv4(instance):
+ if not isinstance(instance, str_types):
+ return True
+ if not _ipv4_re.match(instance):
+ return False
+ return all(0 <= int(component) <= 255 for component in instance.split("."))
+
+
+if hasattr(socket, "inet_pton"):
+ # FIXME: Really this only should raise struct.error, but see the sadness
+ # that is https://twistedmatrix.com/trac/ticket/9409
+ @_checks_drafts(
+ name="ipv6", raises=(socket.error, struct.error, ValueError),
+ )
+ def is_ipv6(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return socket.inet_pton(socket.AF_INET6, instance)
+
+
+_host_name_re = re.compile(r"^[A-Za-z0-9][A-Za-z0-9\.\-]{1,255}$")
+
+
+@_checks_drafts(
+ draft3="host-name",
+ draft4="hostname",
+ draft6="hostname",
+ draft7="hostname",
+)
+def is_host_name(instance):
+ if not isinstance(instance, str_types):
+ return True
+ if not _host_name_re.match(instance):
+ return False
+ components = instance.split(".")
+ for component in components:
+ if len(component) > 63:
+ return False
+ return True
+
+
+try:
+ # The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
+ import idna
+except ImportError:
+ pass
+else:
+ @_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
+ def is_idn_host_name(instance):
+ if not isinstance(instance, str_types):
+ return True
+ idna.encode(instance)
+ return True
+
+
+try:
+ import rfc3987
+except ImportError:
+ try:
+ from rfc3986_validator import validate_rfc3986
+ except ImportError:
+ pass
+ else:
+ @_checks_drafts(name="uri")
+ def is_uri(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return validate_rfc3986(instance, rule="URI")
+
+ @_checks_drafts(
+ draft6="uri-reference",
+ draft7="uri-reference",
+ raises=ValueError,
+ )
+ def is_uri_reference(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return validate_rfc3986(instance, rule="URI_reference")
+
+else:
+ @_checks_drafts(draft7="iri", raises=ValueError)
+ def is_iri(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return rfc3987.parse(instance, rule="IRI")
+
+ @_checks_drafts(draft7="iri-reference", raises=ValueError)
+ def is_iri_reference(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return rfc3987.parse(instance, rule="IRI_reference")
+
+ @_checks_drafts(name="uri", raises=ValueError)
+ def is_uri(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return rfc3987.parse(instance, rule="URI")
+
+ @_checks_drafts(
+ draft6="uri-reference",
+ draft7="uri-reference",
+ raises=ValueError,
+ )
+ def is_uri_reference(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return rfc3987.parse(instance, rule="URI_reference")
+
+
+try:
+ from strict_rfc3339 import validate_rfc3339
+except ImportError:
+ try:
+ from rfc3339_validator import validate_rfc3339
+ except ImportError:
+ validate_rfc3339 = None
+
+if validate_rfc3339:
+ @_checks_drafts(name="date-time")
+ def is_datetime(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return validate_rfc3339(instance)
+
+ @_checks_drafts(draft7="time")
+ def is_time(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return is_datetime("1970-01-01T" + instance)
+
+
+@_checks_drafts(name="regex", raises=re.error)
+def is_regex(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return re.compile(instance)
+
+
+@_checks_drafts(draft3="date", draft7="date", raises=ValueError)
+def is_date(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return datetime.datetime.strptime(instance, "%Y-%m-%d")
+
+
+@_checks_drafts(draft3="time", raises=ValueError)
+def is_draft3_time(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return datetime.datetime.strptime(instance, "%H:%M:%S")
+
+
+try:
+ import webcolors
+except ImportError:
+ pass
+else:
+ def is_css_color_code(instance):
+ return webcolors.normalize_hex(instance)
+
+ @_checks_drafts(draft3="color", raises=(ValueError, TypeError))
+ def is_css21_color(instance):
+ if (
+ not isinstance(instance, str_types) or
+ instance.lower() in webcolors.css21_names_to_hex
+ ):
+ return True
+ return is_css_color_code(instance)
+
+ def is_css3_color(instance):
+ if instance.lower() in webcolors.css3_names_to_hex:
+ return True
+ return is_css_color_code(instance)
+
+
+try:
+ import jsonpointer
+except ImportError:
+ pass
+else:
+ @_checks_drafts(
+ draft6="json-pointer",
+ draft7="json-pointer",
+ raises=jsonpointer.JsonPointerException,
+ )
+ def is_json_pointer(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return jsonpointer.JsonPointer(instance)
+
+ # TODO: I don't want to maintain this, so it
+ # needs to go either into jsonpointer (pending
+ # https://github.com/stefankoegl/python-json-pointer/issues/34) or
+ # into a new external library.
+ @_checks_drafts(
+ draft7="relative-json-pointer",
+ raises=jsonpointer.JsonPointerException,
+ )
+ def is_relative_json_pointer(instance):
+ # Definition taken from:
+ # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
+ if not isinstance(instance, str_types):
+ return True
+ non_negative_integer, rest = [], ""
+ for i, character in enumerate(instance):
+ if character.isdigit():
+ non_negative_integer.append(character)
+ continue
+
+ if not non_negative_integer:
+ return False
+
+ rest = instance[i:]
+ break
+ return (rest == "#") or jsonpointer.JsonPointer(rest)
+
+
+try:
+ import uritemplate.exceptions
+except ImportError:
+ pass
+else:
+ @_checks_drafts(
+ draft6="uri-template",
+ draft7="uri-template",
+ raises=uritemplate.exceptions.InvalidTemplate,
+ )
+ def is_uri_template(
+ instance,
+ template_validator=uritemplate.Validator().force_balanced_braces(),
+ ):
+ template = uritemplate.URITemplate(instance)
+ return template_validator.validate(template)
diff --git a/third_party/python/jsonschema/jsonschema/_legacy_validators.py b/third_party/python/jsonschema/jsonschema/_legacy_validators.py
new file mode 100644
index 0000000000..264ff7d713
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/_legacy_validators.py
@@ -0,0 +1,141 @@
+from jsonschema import _utils
+from jsonschema.compat import iteritems
+from jsonschema.exceptions import ValidationError
+
+
+def dependencies_draft3(validator, dependencies, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+
+ for property, dependency in iteritems(dependencies):
+ if property not in instance:
+ continue
+
+ if validator.is_type(dependency, "object"):
+ for error in validator.descend(
+ instance, dependency, schema_path=property,
+ ):
+ yield error
+ elif validator.is_type(dependency, "string"):
+ if dependency not in instance:
+ yield ValidationError(
+ "%r is a dependency of %r" % (dependency, property)
+ )
+ else:
+ for each in dependency:
+ if each not in instance:
+ message = "%r is a dependency of %r"
+ yield ValidationError(message % (each, property))
+
+
+def disallow_draft3(validator, disallow, instance, schema):
+ for disallowed in _utils.ensure_list(disallow):
+ if validator.is_valid(instance, {"type": [disallowed]}):
+ yield ValidationError(
+ "%r is disallowed for %r" % (disallowed, instance)
+ )
+
+
+def extends_draft3(validator, extends, instance, schema):
+ if validator.is_type(extends, "object"):
+ for error in validator.descend(instance, extends):
+ yield error
+ return
+ for index, subschema in enumerate(extends):
+ for error in validator.descend(instance, subschema, schema_path=index):
+ yield error
+
+
+def items_draft3_draft4(validator, items, instance, schema):
+ if not validator.is_type(instance, "array"):
+ return
+
+ if validator.is_type(items, "object"):
+ for index, item in enumerate(instance):
+ for error in validator.descend(item, items, path=index):
+ yield error
+ else:
+ for (index, item), subschema in zip(enumerate(instance), items):
+ for error in validator.descend(
+ item, subschema, path=index, schema_path=index,
+ ):
+ yield error
+
+
+def minimum_draft3_draft4(validator, minimum, instance, schema):
+ if not validator.is_type(instance, "number"):
+ return
+
+ if schema.get("exclusiveMinimum", False):
+ failed = instance <= minimum
+ cmp = "less than or equal to"
+ else:
+ failed = instance < minimum
+ cmp = "less than"
+
+ if failed:
+ yield ValidationError(
+ "%r is %s the minimum of %r" % (instance, cmp, minimum)
+ )
+
+
+def maximum_draft3_draft4(validator, maximum, instance, schema):
+ if not validator.is_type(instance, "number"):
+ return
+
+ if schema.get("exclusiveMaximum", False):
+ failed = instance >= maximum
+ cmp = "greater than or equal to"
+ else:
+ failed = instance > maximum
+ cmp = "greater than"
+
+ if failed:
+ yield ValidationError(
+ "%r is %s the maximum of %r" % (instance, cmp, maximum)
+ )
+
+
+def properties_draft3(validator, properties, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+
+ for property, subschema in iteritems(properties):
+ if property in instance:
+ for error in validator.descend(
+ instance[property],
+ subschema,
+ path=property,
+ schema_path=property,
+ ):
+ yield error
+ elif subschema.get("required", False):
+ error = ValidationError("%r is a required property" % property)
+ error._set(
+ validator="required",
+ validator_value=subschema["required"],
+ instance=instance,
+ schema=schema,
+ )
+ error.path.appendleft(property)
+ error.schema_path.extend([property, "required"])
+ yield error
+
+
+def type_draft3(validator, types, instance, schema):
+ types = _utils.ensure_list(types)
+
+ all_errors = []
+ for index, type in enumerate(types):
+ if validator.is_type(type, "object"):
+ errors = list(validator.descend(instance, type, schema_path=index))
+ if not errors:
+ return
+ all_errors.extend(errors)
+ else:
+ if validator.is_type(instance, type):
+ return
+ else:
+ yield ValidationError(
+ _utils.types_msg(instance, types), context=all_errors,
+ )
diff --git a/third_party/python/jsonschema/jsonschema/_reflect.py b/third_party/python/jsonschema/jsonschema/_reflect.py
new file mode 100644
index 0000000000..d09e38fbdc
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/_reflect.py
@@ -0,0 +1,155 @@
+# -*- test-case-name: twisted.test.test_reflect -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Standardized versions of various cool and/or strange things that you can do
+with Python's reflection capabilities.
+"""
+
+import sys
+
+from jsonschema.compat import PY3
+
+
+class _NoModuleFound(Exception):
+ """
+ No module was found because none exists.
+ """
+
+
+
+class InvalidName(ValueError):
+ """
+ The given name is not a dot-separated list of Python objects.
+ """
+
+
+
+class ModuleNotFound(InvalidName):
+ """
+ The module associated with the given name doesn't exist and it can't be
+ imported.
+ """
+
+
+
+class ObjectNotFound(InvalidName):
+ """
+ The object associated with the given name doesn't exist and it can't be
+ imported.
+ """
+
+
+
+if PY3:
+ def reraise(exception, traceback):
+ raise exception.with_traceback(traceback)
+else:
+ exec("""def reraise(exception, traceback):
+ raise exception.__class__, exception, traceback""")
+
+reraise.__doc__ = """
+Re-raise an exception, with an optional traceback, in a way that is compatible
+with both Python 2 and Python 3.
+
+Note that on Python 3, re-raised exceptions will be mutated, with their
+C{__traceback__} attribute being set.
+
+@param exception: The exception instance.
+@param traceback: The traceback to use, or C{None} indicating a new traceback.
+"""
+
+
+def _importAndCheckStack(importName):
+ """
+ Import the given name as a module, then walk the stack to determine whether
+ the failure was the module not existing, or some code in the module (for
+ example a dependent import) failing. This can be helpful to determine
+ whether any actual application code was run. For example, to distiguish
+ administrative error (entering the wrong module name), from programmer
+ error (writing buggy code in a module that fails to import).
+
+ @param importName: The name of the module to import.
+ @type importName: C{str}
+ @raise Exception: if something bad happens. This can be any type of
+ exception, since nobody knows what loading some arbitrary code might
+ do.
+ @raise _NoModuleFound: if no module was found.
+ """
+ try:
+ return __import__(importName)
+ except ImportError:
+ excType, excValue, excTraceback = sys.exc_info()
+ while excTraceback:
+ execName = excTraceback.tb_frame.f_globals["__name__"]
+ # in Python 2 execName is None when an ImportError is encountered,
+ # where in Python 3 execName is equal to the importName.
+ if execName is None or execName == importName:
+ reraise(excValue, excTraceback)
+ excTraceback = excTraceback.tb_next
+ raise _NoModuleFound()
+
+
+
+def namedAny(name):
+ """
+ Retrieve a Python object by its fully qualified name from the global Python
+ module namespace. The first part of the name, that describes a module,
+ will be discovered and imported. Each subsequent part of the name is
+ treated as the name of an attribute of the object specified by all of the
+ name which came before it. For example, the fully-qualified name of this
+ object is 'twisted.python.reflect.namedAny'.
+
+ @type name: L{str}
+ @param name: The name of the object to return.
+
+ @raise InvalidName: If the name is an empty string, starts or ends with
+ a '.', or is otherwise syntactically incorrect.
+
+ @raise ModuleNotFound: If the name is syntactically correct but the
+ module it specifies cannot be imported because it does not appear to
+ exist.
+
+ @raise ObjectNotFound: If the name is syntactically correct, includes at
+ least one '.', but the module it specifies cannot be imported because
+ it does not appear to exist.
+
+ @raise AttributeError: If an attribute of an object along the way cannot be
+ accessed, or a module along the way is not found.
+
+ @return: the Python object identified by 'name'.
+ """
+ if not name:
+ raise InvalidName('Empty module name')
+
+ names = name.split('.')
+
+ # if the name starts or ends with a '.' or contains '..', the __import__
+ # will raise an 'Empty module name' error. This will provide a better error
+ # message.
+ if '' in names:
+ raise InvalidName(
+ "name must be a string giving a '.'-separated list of Python "
+ "identifiers, not %r" % (name,))
+
+ topLevelPackage = None
+ moduleNames = names[:]
+ while not topLevelPackage:
+ if moduleNames:
+ trialname = '.'.join(moduleNames)
+ try:
+ topLevelPackage = _importAndCheckStack(trialname)
+ except _NoModuleFound:
+ moduleNames.pop()
+ else:
+ if len(names) == 1:
+ raise ModuleNotFound("No module named %r" % (name,))
+ else:
+ raise ObjectNotFound('%r does not name an object' % (name,))
+
+ obj = topLevelPackage
+ for n in names[1:]:
+ obj = getattr(obj, n)
+
+ return obj
diff --git a/third_party/python/jsonschema/jsonschema/_types.py b/third_party/python/jsonschema/jsonschema/_types.py
new file mode 100644
index 0000000000..a71a4e34bd
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/_types.py
@@ -0,0 +1,188 @@
+import numbers
+
+from pyrsistent import pmap
+import attr
+
+from jsonschema.compat import int_types, str_types
+from jsonschema.exceptions import UndefinedTypeCheck
+
+
+def is_array(checker, instance):
+ return isinstance(instance, list)
+
+
+def is_bool(checker, instance):
+ return isinstance(instance, bool)
+
+
+def is_integer(checker, instance):
+ # bool inherits from int, so ensure bools aren't reported as ints
+ if isinstance(instance, bool):
+ return False
+ return isinstance(instance, int_types)
+
+
+def is_null(checker, instance):
+ return instance is None
+
+
+def is_number(checker, instance):
+ # bool inherits from int, so ensure bools aren't reported as ints
+ if isinstance(instance, bool):
+ return False
+ return isinstance(instance, numbers.Number)
+
+
+def is_object(checker, instance):
+ return isinstance(instance, dict)
+
+
+def is_string(checker, instance):
+ return isinstance(instance, str_types)
+
+
+def is_any(checker, instance):
+ return True
+
+
+@attr.s(frozen=True)
+class TypeChecker(object):
+ """
+ A ``type`` property checker.
+
+ A `TypeChecker` performs type checking for an `IValidator`. Type
+ checks to perform are updated using `TypeChecker.redefine` or
+ `TypeChecker.redefine_many` and removed via `TypeChecker.remove`.
+ Each of these return a new `TypeChecker` object.
+
+ Arguments:
+
+ type_checkers (dict):
+
+ The initial mapping of types to their checking functions.
+ """
+ _type_checkers = attr.ib(default=pmap(), converter=pmap)
+
+ def is_type(self, instance, type):
+ """
+ Check if the instance is of the appropriate type.
+
+ Arguments:
+
+ instance (object):
+
+ The instance to check
+
+ type (str):
+
+ The name of the type that is expected.
+
+ Returns:
+
+ bool: Whether it conformed.
+
+
+ Raises:
+
+ `jsonschema.exceptions.UndefinedTypeCheck`:
+ if type is unknown to this object.
+ """
+ try:
+ fn = self._type_checkers[type]
+ except KeyError:
+ raise UndefinedTypeCheck(type)
+
+ return fn(self, instance)
+
+ def redefine(self, type, fn):
+ """
+ Produce a new checker with the given type redefined.
+
+ Arguments:
+
+ type (str):
+
+ The name of the type to check.
+
+ fn (collections.Callable):
+
+ A function taking exactly two parameters - the type
+ checker calling the function and the instance to check.
+ The function should return true if instance is of this
+ type and false otherwise.
+
+ Returns:
+
+ A new `TypeChecker` instance.
+ """
+ return self.redefine_many({type: fn})
+
+ def redefine_many(self, definitions=()):
+ """
+ Produce a new checker with the given types redefined.
+
+ Arguments:
+
+ definitions (dict):
+
+ A dictionary mapping types to their checking functions.
+
+ Returns:
+
+ A new `TypeChecker` instance.
+ """
+ return attr.evolve(
+ self, type_checkers=self._type_checkers.update(definitions),
+ )
+
+ def remove(self, *types):
+ """
+ Produce a new checker with the given types forgotten.
+
+ Arguments:
+
+ types (~collections.Iterable):
+
+ the names of the types to remove.
+
+ Returns:
+
+ A new `TypeChecker` instance
+
+ Raises:
+
+ `jsonschema.exceptions.UndefinedTypeCheck`:
+
+ if any given type is unknown to this object
+ """
+
+ checkers = self._type_checkers
+ for each in types:
+ try:
+ checkers = checkers.remove(each)
+ except KeyError:
+ raise UndefinedTypeCheck(each)
+ return attr.evolve(self, type_checkers=checkers)
+
+
+draft3_type_checker = TypeChecker(
+ {
+ u"any": is_any,
+ u"array": is_array,
+ u"boolean": is_bool,
+ u"integer": is_integer,
+ u"object": is_object,
+ u"null": is_null,
+ u"number": is_number,
+ u"string": is_string,
+ },
+)
+draft4_type_checker = draft3_type_checker.remove(u"any")
+draft6_type_checker = draft4_type_checker.redefine(
+ u"integer",
+ lambda checker, instance: (
+ is_integer(checker, instance) or
+ isinstance(instance, float) and instance.is_integer()
+ ),
+)
+draft7_type_checker = draft6_type_checker
diff --git a/third_party/python/jsonschema/jsonschema/_utils.py b/third_party/python/jsonschema/jsonschema/_utils.py
new file mode 100644
index 0000000000..ceb880198d
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/_utils.py
@@ -0,0 +1,212 @@
+import itertools
+import json
+import pkgutil
+import re
+
+from jsonschema.compat import MutableMapping, str_types, urlsplit
+
+
+class URIDict(MutableMapping):
+ """
+ Dictionary which uses normalized URIs as keys.
+ """
+
+ def normalize(self, uri):
+ return urlsplit(uri).geturl()
+
+ def __init__(self, *args, **kwargs):
+ self.store = dict()
+ self.store.update(*args, **kwargs)
+
+ def __getitem__(self, uri):
+ return self.store[self.normalize(uri)]
+
+ def __setitem__(self, uri, value):
+ self.store[self.normalize(uri)] = value
+
+ def __delitem__(self, uri):
+ del self.store[self.normalize(uri)]
+
+ def __iter__(self):
+ return iter(self.store)
+
+ def __len__(self):
+ return len(self.store)
+
+ def __repr__(self):
+ return repr(self.store)
+
+
+class Unset(object):
+ """
+ An as-of-yet unset attribute or unprovided default parameter.
+ """
+
+ def __repr__(self):
+ return "<unset>"
+
+
+def load_schema(name):
+ """
+ Load a schema from ./schemas/``name``.json and return it.
+ """
+
+ data = pkgutil.get_data("jsonschema", "schemas/{0}.json".format(name))
+ return json.loads(data.decode("utf-8"))
+
+
+def indent(string, times=1):
+ """
+ A dumb version of `textwrap.indent` from Python 3.3.
+ """
+
+ return "\n".join(" " * (4 * times) + line for line in string.splitlines())
+
+
+def format_as_index(indices):
+ """
+ Construct a single string containing indexing operations for the indices.
+
+ For example, [1, 2, "foo"] -> [1][2]["foo"]
+
+ Arguments:
+
+ indices (sequence):
+
+ The indices to format.
+ """
+
+ if not indices:
+ return ""
+ return "[%s]" % "][".join(repr(index) for index in indices)
+
+
+def find_additional_properties(instance, schema):
+ """
+ Return the set of additional properties for the given ``instance``.
+
+ Weeds out properties that should have been validated by ``properties`` and
+ / or ``patternProperties``.
+
+ Assumes ``instance`` is dict-like already.
+ """
+
+ properties = schema.get("properties", {})
+ patterns = "|".join(schema.get("patternProperties", {}))
+ for property in instance:
+ if property not in properties:
+ if patterns and re.search(patterns, property):
+ continue
+ yield property
+
+
+def extras_msg(extras):
+ """
+ Create an error message for extra items or properties.
+ """
+
+ if len(extras) == 1:
+ verb = "was"
+ else:
+ verb = "were"
+ return ", ".join(repr(extra) for extra in extras), verb
+
+
+def types_msg(instance, types):
+ """
+ Create an error message for a failure to match the given types.
+
+ If the ``instance`` is an object and contains a ``name`` property, it will
+ be considered to be a description of that object and used as its type.
+
+ Otherwise the message is simply the reprs of the given ``types``.
+ """
+
+ reprs = []
+ for type in types:
+ try:
+ reprs.append(repr(type["name"]))
+ except Exception:
+ reprs.append(repr(type))
+ return "%r is not of type %s" % (instance, ", ".join(reprs))
+
+
+def flatten(suitable_for_isinstance):
+ """
+ isinstance() can accept a bunch of really annoying different types:
+ * a single type
+ * a tuple of types
+ * an arbitrary nested tree of tuples
+
+ Return a flattened tuple of the given argument.
+ """
+
+ types = set()
+
+ if not isinstance(suitable_for_isinstance, tuple):
+ suitable_for_isinstance = (suitable_for_isinstance,)
+ for thing in suitable_for_isinstance:
+ if isinstance(thing, tuple):
+ types.update(flatten(thing))
+ else:
+ types.add(thing)
+ return tuple(types)
+
+
+def ensure_list(thing):
+ """
+ Wrap ``thing`` in a list if it's a single str.
+
+ Otherwise, return it unchanged.
+ """
+
+ if isinstance(thing, str_types):
+ return [thing]
+ return thing
+
+
+def equal(one, two):
+ """
+ Check if two things are equal, but evade booleans and ints being equal.
+ """
+ return unbool(one) == unbool(two)
+
+
+def unbool(element, true=object(), false=object()):
+ """
+ A hack to make True and 1 and False and 0 unique for ``uniq``.
+ """
+
+ if element is True:
+ return true
+ elif element is False:
+ return false
+ return element
+
+
+def uniq(container):
+ """
+ Check if all of a container's elements are unique.
+
+ Successively tries first to rely that the elements are hashable, then
+ falls back on them being sortable, and finally falls back on brute
+ force.
+ """
+
+ try:
+ return len(set(unbool(i) for i in container)) == len(container)
+ except TypeError:
+ try:
+ sort = sorted(unbool(i) for i in container)
+ sliced = itertools.islice(sort, 1, None)
+ for i, j in zip(sort, sliced):
+ if i == j:
+ return False
+ except (NotImplementedError, TypeError):
+ seen = []
+ for e in container:
+ e = unbool(e)
+ if e in seen:
+ return False
+ seen.append(e)
+ return True
diff --git a/third_party/python/jsonschema/jsonschema/_validators.py b/third_party/python/jsonschema/jsonschema/_validators.py
new file mode 100644
index 0000000000..179fec09a9
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/_validators.py
@@ -0,0 +1,373 @@
+import re
+
+from jsonschema._utils import (
+ ensure_list,
+ equal,
+ extras_msg,
+ find_additional_properties,
+ types_msg,
+ unbool,
+ uniq,
+)
+from jsonschema.exceptions import FormatError, ValidationError
+from jsonschema.compat import iteritems
+
+
+def patternProperties(validator, patternProperties, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+
+ for pattern, subschema in iteritems(patternProperties):
+ for k, v in iteritems(instance):
+ if re.search(pattern, k):
+ for error in validator.descend(
+ v, subschema, path=k, schema_path=pattern,
+ ):
+ yield error
+
+
+def propertyNames(validator, propertyNames, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+
+ for property in instance:
+ for error in validator.descend(
+ instance=property,
+ schema=propertyNames,
+ ):
+ yield error
+
+
+def additionalProperties(validator, aP, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+
+ extras = set(find_additional_properties(instance, schema))
+
+ if validator.is_type(aP, "object"):
+ for extra in extras:
+ for error in validator.descend(instance[extra], aP, path=extra):
+ yield error
+ elif not aP and extras:
+ if "patternProperties" in schema:
+ patterns = sorted(schema["patternProperties"])
+ if len(extras) == 1:
+ verb = "does"
+ else:
+ verb = "do"
+ error = "%s %s not match any of the regexes: %s" % (
+ ", ".join(map(repr, sorted(extras))),
+ verb,
+ ", ".join(map(repr, patterns)),
+ )
+ yield ValidationError(error)
+ else:
+ error = "Additional properties are not allowed (%s %s unexpected)"
+ yield ValidationError(error % extras_msg(extras))
+
+
+def items(validator, items, instance, schema):
+ if not validator.is_type(instance, "array"):
+ return
+
+ if validator.is_type(items, "array"):
+ for (index, item), subschema in zip(enumerate(instance), items):
+ for error in validator.descend(
+ item, subschema, path=index, schema_path=index,
+ ):
+ yield error
+ else:
+ for index, item in enumerate(instance):
+ for error in validator.descend(item, items, path=index):
+ yield error
+
+
+def additionalItems(validator, aI, instance, schema):
+ if (
+ not validator.is_type(instance, "array") or
+ validator.is_type(schema.get("items", {}), "object")
+ ):
+ return
+
+ len_items = len(schema.get("items", []))
+ if validator.is_type(aI, "object"):
+ for index, item in enumerate(instance[len_items:], start=len_items):
+ for error in validator.descend(item, aI, path=index):
+ yield error
+ elif not aI and len(instance) > len(schema.get("items", [])):
+ error = "Additional items are not allowed (%s %s unexpected)"
+ yield ValidationError(
+ error %
+ extras_msg(instance[len(schema.get("items", [])):])
+ )
+
+
+def const(validator, const, instance, schema):
+ if not equal(instance, const):
+ yield ValidationError("%r was expected" % (const,))
+
+
+def contains(validator, contains, instance, schema):
+ if not validator.is_type(instance, "array"):
+ return
+
+ if not any(validator.is_valid(element, contains) for element in instance):
+ yield ValidationError(
+ "None of %r are valid under the given schema" % (instance,)
+ )
+
+
+def exclusiveMinimum(validator, minimum, instance, schema):
+ if not validator.is_type(instance, "number"):
+ return
+
+ if instance <= minimum:
+ yield ValidationError(
+ "%r is less than or equal to the minimum of %r" % (
+ instance, minimum,
+ ),
+ )
+
+
+def exclusiveMaximum(validator, maximum, instance, schema):
+ if not validator.is_type(instance, "number"):
+ return
+
+ if instance >= maximum:
+ yield ValidationError(
+ "%r is greater than or equal to the maximum of %r" % (
+ instance, maximum,
+ ),
+ )
+
+
+def minimum(validator, minimum, instance, schema):
+ if not validator.is_type(instance, "number"):
+ return
+
+ if instance < minimum:
+ yield ValidationError(
+ "%r is less than the minimum of %r" % (instance, minimum)
+ )
+
+
+def maximum(validator, maximum, instance, schema):
+ if not validator.is_type(instance, "number"):
+ return
+
+ if instance > maximum:
+ yield ValidationError(
+ "%r is greater than the maximum of %r" % (instance, maximum)
+ )
+
+
+def multipleOf(validator, dB, instance, schema):
+ if not validator.is_type(instance, "number"):
+ return
+
+ if isinstance(dB, float):
+ quotient = instance / dB
+ failed = int(quotient) != quotient
+ else:
+ failed = instance % dB
+
+ if failed:
+ yield ValidationError("%r is not a multiple of %r" % (instance, dB))
+
+
+def minItems(validator, mI, instance, schema):
+ if validator.is_type(instance, "array") and len(instance) < mI:
+ yield ValidationError("%r is too short" % (instance,))
+
+
+def maxItems(validator, mI, instance, schema):
+ if validator.is_type(instance, "array") and len(instance) > mI:
+ yield ValidationError("%r is too long" % (instance,))
+
+
+def uniqueItems(validator, uI, instance, schema):
+ if (
+ uI and
+ validator.is_type(instance, "array") and
+ not uniq(instance)
+ ):
+ yield ValidationError("%r has non-unique elements" % (instance,))
+
+
+def pattern(validator, patrn, instance, schema):
+ if (
+ validator.is_type(instance, "string") and
+ not re.search(patrn, instance)
+ ):
+ yield ValidationError("%r does not match %r" % (instance, patrn))
+
+
+def format(validator, format, instance, schema):
+ if validator.format_checker is not None:
+ try:
+ validator.format_checker.check(instance, format)
+ except FormatError as error:
+ yield ValidationError(error.message, cause=error.cause)
+
+
+def minLength(validator, mL, instance, schema):
+ if validator.is_type(instance, "string") and len(instance) < mL:
+ yield ValidationError("%r is too short" % (instance,))
+
+
+def maxLength(validator, mL, instance, schema):
+ if validator.is_type(instance, "string") and len(instance) > mL:
+ yield ValidationError("%r is too long" % (instance,))
+
+
+def dependencies(validator, dependencies, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+
+ for property, dependency in iteritems(dependencies):
+ if property not in instance:
+ continue
+
+ if validator.is_type(dependency, "array"):
+ for each in dependency:
+ if each not in instance:
+ message = "%r is a dependency of %r"
+ yield ValidationError(message % (each, property))
+ else:
+ for error in validator.descend(
+ instance, dependency, schema_path=property,
+ ):
+ yield error
+
+
+def enum(validator, enums, instance, schema):
+ if instance == 0 or instance == 1:
+ unbooled = unbool(instance)
+ if all(unbooled != unbool(each) for each in enums):
+ yield ValidationError("%r is not one of %r" % (instance, enums))
+ elif instance not in enums:
+ yield ValidationError("%r is not one of %r" % (instance, enums))
+
+
+def ref(validator, ref, instance, schema):
+ resolve = getattr(validator.resolver, "resolve", None)
+ if resolve is None:
+ with validator.resolver.resolving(ref) as resolved:
+ for error in validator.descend(instance, resolved):
+ yield error
+ else:
+ scope, resolved = validator.resolver.resolve(ref)
+ validator.resolver.push_scope(scope)
+
+ try:
+ for error in validator.descend(instance, resolved):
+ yield error
+ finally:
+ validator.resolver.pop_scope()
+
+
+def type(validator, types, instance, schema):
+ types = ensure_list(types)
+
+ if not any(validator.is_type(instance, type) for type in types):
+ yield ValidationError(types_msg(instance, types))
+
+
+def properties(validator, properties, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+
+ for property, subschema in iteritems(properties):
+ if property in instance:
+ for error in validator.descend(
+ instance[property],
+ subschema,
+ path=property,
+ schema_path=property,
+ ):
+ yield error
+
+
+def required(validator, required, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+ for property in required:
+ if property not in instance:
+ yield ValidationError("%r is a required property" % property)
+
+
+def minProperties(validator, mP, instance, schema):
+ if validator.is_type(instance, "object") and len(instance) < mP:
+ yield ValidationError(
+ "%r does not have enough properties" % (instance,)
+ )
+
+
+def maxProperties(validator, mP, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+ if validator.is_type(instance, "object") and len(instance) > mP:
+ yield ValidationError("%r has too many properties" % (instance,))
+
+
+def allOf(validator, allOf, instance, schema):
+ for index, subschema in enumerate(allOf):
+ for error in validator.descend(instance, subschema, schema_path=index):
+ yield error
+
+
+def anyOf(validator, anyOf, instance, schema):
+ all_errors = []
+ for index, subschema in enumerate(anyOf):
+ errs = list(validator.descend(instance, subschema, schema_path=index))
+ if not errs:
+ break
+ all_errors.extend(errs)
+ else:
+ yield ValidationError(
+ "%r is not valid under any of the given schemas" % (instance,),
+ context=all_errors,
+ )
+
+
+def oneOf(validator, oneOf, instance, schema):
+ subschemas = enumerate(oneOf)
+ all_errors = []
+ for index, subschema in subschemas:
+ errs = list(validator.descend(instance, subschema, schema_path=index))
+ if not errs:
+ first_valid = subschema
+ break
+ all_errors.extend(errs)
+ else:
+ yield ValidationError(
+ "%r is not valid under any of the given schemas" % (instance,),
+ context=all_errors,
+ )
+
+ more_valid = [s for i, s in subschemas if validator.is_valid(instance, s)]
+ if more_valid:
+ more_valid.append(first_valid)
+ reprs = ", ".join(repr(schema) for schema in more_valid)
+ yield ValidationError(
+ "%r is valid under each of %s" % (instance, reprs)
+ )
+
+
+def not_(validator, not_schema, instance, schema):
+ if validator.is_valid(instance, not_schema):
+ yield ValidationError(
+ "%r is not allowed for %r" % (not_schema, instance)
+ )
+
+
+def if_(validator, if_schema, instance, schema):
+ if validator.is_valid(instance, if_schema):
+ if u"then" in schema:
+ then = schema[u"then"]
+ for error in validator.descend(instance, then, schema_path="then"):
+ yield error
+ elif u"else" in schema:
+ else_ = schema[u"else"]
+ for error in validator.descend(instance, else_, schema_path="else"):
+ yield error
diff --git a/third_party/python/jsonschema/jsonschema/benchmarks/__init__.py b/third_party/python/jsonschema/jsonschema/benchmarks/__init__.py
new file mode 100644
index 0000000000..e3dcc68993
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/benchmarks/__init__.py
@@ -0,0 +1,5 @@
+"""
+Benchmarks for validation.
+
+This package is *not* public API.
+"""
diff --git a/third_party/python/jsonschema/jsonschema/benchmarks/issue232.py b/third_party/python/jsonschema/jsonschema/benchmarks/issue232.py
new file mode 100644
index 0000000000..65e3aedf79
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/benchmarks/issue232.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+"""
+A performance benchmark using the example from issue #232.
+
+See https://github.com/Julian/jsonschema/pull/232.
+"""
+from twisted.python.filepath import FilePath
+from pyperf import Runner
+from pyrsistent import m
+
+from jsonschema.tests._suite import Version
+import jsonschema
+
+
+issue232 = Version(
+ path=FilePath(__file__).sibling("issue232"),
+ remotes=m(),
+ name="issue232",
+)
+
+
+if __name__ == "__main__":
+ issue232.benchmark(
+ runner=Runner(),
+ Validator=jsonschema.Draft4Validator,
+ )
diff --git a/third_party/python/jsonschema/jsonschema/benchmarks/issue232/issue.json b/third_party/python/jsonschema/jsonschema/benchmarks/issue232/issue.json
new file mode 100644
index 0000000000..804c340845
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/benchmarks/issue232/issue.json
@@ -0,0 +1,2653 @@
+[
+ {
+ "description": "Petstore",
+ "schema": {
+ "title": "A JSON Schema for Swagger 2.0 API.",
+ "id": "http://swagger.io/v2/schema.json#",
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "type": "object",
+ "required": [
+ "swagger",
+ "info",
+ "paths"
+ ],
+ "additionalProperties": false,
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ },
+ "properties": {
+ "swagger": {
+ "type": "string",
+ "enum": [
+ "2.0"
+ ],
+ "description": "The Swagger version of this document."
+ },
+ "info": {
+ "$ref": "#/definitions/info"
+ },
+ "host": {
+ "type": "string",
+ "pattern": "^[^{}/ :\\\\]+(?::\\d+)?$",
+ "description": "The host (name or ip) of the API. Example: 'swagger.io'"
+ },
+ "basePath": {
+ "type": "string",
+ "pattern": "^/",
+ "description": "The base path to the API. Example: '/api'."
+ },
+ "schemes": {
+ "$ref": "#/definitions/schemesList"
+ },
+ "consumes": {
+ "description": "A list of MIME types accepted by the API.",
+ "allOf": [
+ {
+ "$ref": "#/definitions/mediaTypeList"
+ }
+ ]
+ },
+ "produces": {
+ "description": "A list of MIME types the API can produce.",
+ "allOf": [
+ {
+ "$ref": "#/definitions/mediaTypeList"
+ }
+ ]
+ },
+ "paths": {
+ "$ref": "#/definitions/paths"
+ },
+ "definitions": {
+ "$ref": "#/definitions/definitions"
+ },
+ "parameters": {
+ "$ref": "#/definitions/parameterDefinitions"
+ },
+ "responses": {
+ "$ref": "#/definitions/responseDefinitions"
+ },
+ "security": {
+ "$ref": "#/definitions/security"
+ },
+ "securityDefinitions": {
+ "$ref": "#/definitions/securityDefinitions"
+ },
+ "tags": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/tag"
+ },
+ "uniqueItems": true
+ },
+ "externalDocs": {
+ "$ref": "#/definitions/externalDocs"
+ }
+ },
+ "definitions": {
+ "info": {
+ "type": "object",
+ "description": "General information about the API.",
+ "required": [
+ "version",
+ "title"
+ ],
+ "additionalProperties": false,
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ },
+ "properties": {
+ "title": {
+ "type": "string",
+ "description": "A unique and precise title of the API."
+ },
+ "version": {
+ "type": "string",
+ "description": "A semantic version number of the API."
+ },
+ "description": {
+ "type": "string",
+ "description": "A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed."
+ },
+ "termsOfService": {
+ "type": "string",
+ "description": "The terms of service for the API."
+ },
+ "contact": {
+ "$ref": "#/definitions/contact"
+ },
+ "license": {
+ "$ref": "#/definitions/license"
+ }
+ }
+ },
+ "contact": {
+ "type": "object",
+ "description": "Contact information for the owners of the API.",
+ "additionalProperties": false,
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "The identifying name of the contact person/organization."
+ },
+ "url": {
+ "type": "string",
+ "description": "The URL pointing to the contact information.",
+ "format": "uri"
+ },
+ "email": {
+ "type": "string",
+ "description": "The email address of the contact person/organization.",
+ "format": "email"
+ }
+ },
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ }
+ },
+ "license": {
+ "type": "object",
+ "required": [
+ "name"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "The name of the license type. It's encouraged to use an OSI compatible license."
+ },
+ "url": {
+ "type": "string",
+ "description": "The URL pointing to the license.",
+ "format": "uri"
+ }
+ },
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ }
+ },
+ "paths": {
+ "type": "object",
+ "description": "Relative paths to the individual endpoints. They must be relative to the 'basePath'.",
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ },
+ "^/": {
+ "$ref": "#/definitions/pathItem"
+ }
+ },
+ "additionalProperties": false
+ },
+ "definitions": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/definitions/schema"
+ },
+ "description": "One or more JSON objects describing the schemas being consumed and produced by the API."
+ },
+ "parameterDefinitions": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/definitions/parameter"
+ },
+ "description": "One or more JSON representations for parameters"
+ },
+ "responseDefinitions": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/definitions/response"
+ },
+ "description": "One or more JSON representations for parameters"
+ },
+ "externalDocs": {
+ "type": "object",
+ "additionalProperties": false,
+ "description": "information about external documentation",
+ "required": [
+ "url"
+ ],
+ "properties": {
+ "description": {
+ "type": "string"
+ },
+ "url": {
+ "type": "string",
+ "format": "uri"
+ }
+ },
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ }
+ },
+ "examples": {
+ "type": "object",
+ "additionalProperties": true
+ },
+ "mimeType": {
+ "type": "string",
+ "description": "The MIME type of the HTTP message."
+ },
+ "operation": {
+ "type": "object",
+ "required": [
+ "responses"
+ ],
+ "additionalProperties": false,
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ },
+ "properties": {
+ "tags": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "uniqueItems": true
+ },
+ "summary": {
+ "type": "string",
+ "description": "A brief summary of the operation."
+ },
+ "description": {
+ "type": "string",
+ "description": "A longer description of the operation, GitHub Flavored Markdown is allowed."
+ },
+ "externalDocs": {
+ "$ref": "#/definitions/externalDocs"
+ },
+ "operationId": {
+ "type": "string",
+ "description": "A unique identifier of the operation."
+ },
+ "produces": {
+ "description": "A list of MIME types the API can produce.",
+ "allOf": [
+ {
+ "$ref": "#/definitions/mediaTypeList"
+ }
+ ]
+ },
+ "consumes": {
+ "description": "A list of MIME types the API can consume.",
+ "allOf": [
+ {
+ "$ref": "#/definitions/mediaTypeList"
+ }
+ ]
+ },
+ "parameters": {
+ "$ref": "#/definitions/parametersList"
+ },
+ "responses": {
+ "$ref": "#/definitions/responses"
+ },
+ "schemes": {
+ "$ref": "#/definitions/schemesList"
+ },
+ "deprecated": {
+ "type": "boolean",
+ "default": false
+ },
+ "security": {
+ "$ref": "#/definitions/security"
+ }
+ }
+ },
+ "pathItem": {
+ "type": "object",
+ "additionalProperties": false,
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ },
+ "properties": {
+ "$ref": {
+ "type": "string"
+ },
+ "get": {
+ "$ref": "#/definitions/operation"
+ },
+ "put": {
+ "$ref": "#/definitions/operation"
+ },
+ "post": {
+ "$ref": "#/definitions/operation"
+ },
+ "delete": {
+ "$ref": "#/definitions/operation"
+ },
+ "options": {
+ "$ref": "#/definitions/operation"
+ },
+ "head": {
+ "$ref": "#/definitions/operation"
+ },
+ "patch": {
+ "$ref": "#/definitions/operation"
+ },
+ "parameters": {
+ "$ref": "#/definitions/parametersList"
+ }
+ }
+ },
+ "responses": {
+ "type": "object",
+ "description": "Response objects names can either be any valid HTTP status code or 'default'.",
+ "minProperties": 1,
+ "additionalProperties": false,
+ "patternProperties": {
+ "^([0-9]{3})$|^(default)$": {
+ "$ref": "#/definitions/responseValue"
+ },
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ },
+ "not": {
+ "type": "object",
+ "additionalProperties": false,
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ }
+ }
+ },
+ "responseValue": {
+ "oneOf": [
+ {
+ "$ref": "#/definitions/response"
+ },
+ {
+ "$ref": "#/definitions/jsonReference"
+ }
+ ]
+ },
+ "response": {
+ "type": "object",
+ "required": [
+ "description"
+ ],
+ "properties": {
+ "description": {
+ "type": "string"
+ },
+ "schema": {
+ "oneOf": [
+ {
+ "$ref": "#/definitions/schema"
+ },
+ {
+ "$ref": "#/definitions/fileSchema"
+ }
+ ]
+ },
+ "headers": {
+ "$ref": "#/definitions/headers"
+ },
+ "examples": {
+ "$ref": "#/definitions/examples"
+ }
+ },
+ "additionalProperties": false,
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ }
+ },
+ "headers": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/definitions/header"
+ }
+ },
+ "header": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type"
+ ],
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": [
+ "string",
+ "number",
+ "integer",
+ "boolean",
+ "array"
+ ]
+ },
+ "format": {
+ "type": "string"
+ },
+ "items": {
+ "$ref": "#/definitions/primitivesItems"
+ },
+ "collectionFormat": {
+ "$ref": "#/definitions/collectionFormat"
+ },
+ "default": {
+ "$ref": "#/definitions/default"
+ },
+ "maximum": {
+ "$ref": "#/definitions/maximum"
+ },
+ "exclusiveMaximum": {
+ "$ref": "#/definitions/exclusiveMaximum"
+ },
+ "minimum": {
+ "$ref": "#/definitions/minimum"
+ },
+ "exclusiveMinimum": {
+ "$ref": "#/definitions/exclusiveMinimum"
+ },
+ "maxLength": {
+ "$ref": "#/definitions/maxLength"
+ },
+ "minLength": {
+ "$ref": "#/definitions/minLength"
+ },
+ "pattern": {
+ "$ref": "#/definitions/pattern"
+ },
+ "maxItems": {
+ "$ref": "#/definitions/maxItems"
+ },
+ "minItems": {
+ "$ref": "#/definitions/minItems"
+ },
+ "uniqueItems": {
+ "$ref": "#/definitions/uniqueItems"
+ },
+ "enum": {
+ "$ref": "#/definitions/enum"
+ },
+ "multipleOf": {
+ "$ref": "#/definitions/multipleOf"
+ },
+ "description": {
+ "type": "string"
+ }
+ },
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ }
+ },
+ "vendorExtension": {
+ "description": "Any property starting with x- is valid.",
+ "additionalProperties": true,
+ "additionalItems": true
+ },
+ "bodyParameter": {
+ "type": "object",
+ "required": [
+ "name",
+ "in",
+ "schema"
+ ],
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ },
+ "properties": {
+ "description": {
+ "type": "string",
+ "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed."
+ },
+ "name": {
+ "type": "string",
+ "description": "The name of the parameter."
+ },
+ "in": {
+ "type": "string",
+ "description": "Determines the location of the parameter.",
+ "enum": [
+ "body"
+ ]
+ },
+ "required": {
+ "type": "boolean",
+ "description": "Determines whether or not this parameter is required or optional.",
+ "default": false
+ },
+ "schema": {
+ "$ref": "#/definitions/schema"
+ }
+ },
+ "additionalProperties": false
+ },
+ "headerParameterSubSchema": {
+ "additionalProperties": false,
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ },
+ "properties": {
+ "required": {
+ "type": "boolean",
+ "description": "Determines whether or not this parameter is required or optional.",
+ "default": false
+ },
+ "in": {
+ "type": "string",
+ "description": "Determines the location of the parameter.",
+ "enum": [
+ "header"
+ ]
+ },
+ "description": {
+ "type": "string",
+ "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed."
+ },
+ "name": {
+ "type": "string",
+ "description": "The name of the parameter."
+ },
+ "type": {
+ "type": "string",
+ "enum": [
+ "string",
+ "number",
+ "boolean",
+ "integer",
+ "array"
+ ]
+ },
+ "format": {
+ "type": "string"
+ },
+ "items": {
+ "$ref": "#/definitions/primitivesItems"
+ },
+ "collectionFormat": {
+ "$ref": "#/definitions/collectionFormat"
+ },
+ "default": {
+ "$ref": "#/definitions/default"
+ },
+ "maximum": {
+ "$ref": "#/definitions/maximum"
+ },
+ "exclusiveMaximum": {
+ "$ref": "#/definitions/exclusiveMaximum"
+ },
+ "minimum": {
+ "$ref": "#/definitions/minimum"
+ },
+ "exclusiveMinimum": {
+ "$ref": "#/definitions/exclusiveMinimum"
+ },
+ "maxLength": {
+ "$ref": "#/definitions/maxLength"
+ },
+ "minLength": {
+ "$ref": "#/definitions/minLength"
+ },
+ "pattern": {
+ "$ref": "#/definitions/pattern"
+ },
+ "maxItems": {
+ "$ref": "#/definitions/maxItems"
+ },
+ "minItems": {
+ "$ref": "#/definitions/minItems"
+ },
+ "uniqueItems": {
+ "$ref": "#/definitions/uniqueItems"
+ },
+ "enum": {
+ "$ref": "#/definitions/enum"
+ },
+ "multipleOf": {
+ "$ref": "#/definitions/multipleOf"
+ }
+ }
+ },
+ "queryParameterSubSchema": {
+ "additionalProperties": false,
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ },
+ "properties": {
+ "required": {
+ "type": "boolean",
+ "description": "Determines whether or not this parameter is required or optional.",
+ "default": false
+ },
+ "in": {
+ "type": "string",
+ "description": "Determines the location of the parameter.",
+ "enum": [
+ "query"
+ ]
+ },
+ "description": {
+ "type": "string",
+ "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed."
+ },
+ "name": {
+ "type": "string",
+ "description": "The name of the parameter."
+ },
+ "allowEmptyValue": {
+ "type": "boolean",
+ "default": false,
+ "description": "allows sending a parameter by name only or with an empty value."
+ },
+ "type": {
+ "type": "string",
+ "enum": [
+ "string",
+ "number",
+ "boolean",
+ "integer",
+ "array"
+ ]
+ },
+ "format": {
+ "type": "string"
+ },
+ "items": {
+ "$ref": "#/definitions/primitivesItems"
+ },
+ "collectionFormat": {
+ "$ref": "#/definitions/collectionFormatWithMulti"
+ },
+ "default": {
+ "$ref": "#/definitions/default"
+ },
+ "maximum": {
+ "$ref": "#/definitions/maximum"
+ },
+ "exclusiveMaximum": {
+ "$ref": "#/definitions/exclusiveMaximum"
+ },
+ "minimum": {
+ "$ref": "#/definitions/minimum"
+ },
+ "exclusiveMinimum": {
+ "$ref": "#/definitions/exclusiveMinimum"
+ },
+ "maxLength": {
+ "$ref": "#/definitions/maxLength"
+ },
+ "minLength": {
+ "$ref": "#/definitions/minLength"
+ },
+ "pattern": {
+ "$ref": "#/definitions/pattern"
+ },
+ "maxItems": {
+ "$ref": "#/definitions/maxItems"
+ },
+ "minItems": {
+ "$ref": "#/definitions/minItems"
+ },
+ "uniqueItems": {
+ "$ref": "#/definitions/uniqueItems"
+ },
+ "enum": {
+ "$ref": "#/definitions/enum"
+ },
+ "multipleOf": {
+ "$ref": "#/definitions/multipleOf"
+ }
+ }
+ },
+ "formDataParameterSubSchema": {
+ "additionalProperties": false,
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ },
+ "properties": {
+ "required": {
+ "type": "boolean",
+ "description": "Determines whether or not this parameter is required or optional.",
+ "default": false
+ },
+ "in": {
+ "type": "string",
+ "description": "Determines the location of the parameter.",
+ "enum": [
+ "formData"
+ ]
+ },
+ "description": {
+ "type": "string",
+ "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed."
+ },
+ "name": {
+ "type": "string",
+ "description": "The name of the parameter."
+ },
+ "allowEmptyValue": {
+ "type": "boolean",
+ "default": false,
+ "description": "allows sending a parameter by name only or with an empty value."
+ },
+ "type": {
+ "type": "string",
+ "enum": [
+ "string",
+ "number",
+ "boolean",
+ "integer",
+ "array",
+ "file"
+ ]
+ },
+ "format": {
+ "type": "string"
+ },
+ "items": {
+ "$ref": "#/definitions/primitivesItems"
+ },
+ "collectionFormat": {
+ "$ref": "#/definitions/collectionFormatWithMulti"
+ },
+ "default": {
+ "$ref": "#/definitions/default"
+ },
+ "maximum": {
+ "$ref": "#/definitions/maximum"
+ },
+ "exclusiveMaximum": {
+ "$ref": "#/definitions/exclusiveMaximum"
+ },
+ "minimum": {
+ "$ref": "#/definitions/minimum"
+ },
+ "exclusiveMinimum": {
+ "$ref": "#/definitions/exclusiveMinimum"
+ },
+ "maxLength": {
+ "$ref": "#/definitions/maxLength"
+ },
+ "minLength": {
+ "$ref": "#/definitions/minLength"
+ },
+ "pattern": {
+ "$ref": "#/definitions/pattern"
+ },
+ "maxItems": {
+ "$ref": "#/definitions/maxItems"
+ },
+ "minItems": {
+ "$ref": "#/definitions/minItems"
+ },
+ "uniqueItems": {
+ "$ref": "#/definitions/uniqueItems"
+ },
+ "enum": {
+ "$ref": "#/definitions/enum"
+ },
+ "multipleOf": {
+ "$ref": "#/definitions/multipleOf"
+ }
+ }
+ },
+ "pathParameterSubSchema": {
+ "additionalProperties": false,
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ },
+ "required": [
+ "required"
+ ],
+ "properties": {
+ "required": {
+ "type": "boolean",
+ "enum": [
+ true
+ ],
+ "description": "Determines whether or not this parameter is required or optional."
+ },
+ "in": {
+ "type": "string",
+ "description": "Determines the location of the parameter.",
+ "enum": [
+ "path"
+ ]
+ },
+ "description": {
+ "type": "string",
+ "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed."
+ },
+ "name": {
+ "type": "string",
+ "description": "The name of the parameter."
+ },
+ "type": {
+ "type": "string",
+ "enum": [
+ "string",
+ "number",
+ "boolean",
+ "integer",
+ "array"
+ ]
+ },
+ "format": {
+ "type": "string"
+ },
+ "items": {
+ "$ref": "#/definitions/primitivesItems"
+ },
+ "collectionFormat": {
+ "$ref": "#/definitions/collectionFormat"
+ },
+ "default": {
+ "$ref": "#/definitions/default"
+ },
+ "maximum": {
+ "$ref": "#/definitions/maximum"
+ },
+ "exclusiveMaximum": {
+ "$ref": "#/definitions/exclusiveMaximum"
+ },
+ "minimum": {
+ "$ref": "#/definitions/minimum"
+ },
+ "exclusiveMinimum": {
+ "$ref": "#/definitions/exclusiveMinimum"
+ },
+ "maxLength": {
+ "$ref": "#/definitions/maxLength"
+ },
+ "minLength": {
+ "$ref": "#/definitions/minLength"
+ },
+ "pattern": {
+ "$ref": "#/definitions/pattern"
+ },
+ "maxItems": {
+ "$ref": "#/definitions/maxItems"
+ },
+ "minItems": {
+ "$ref": "#/definitions/minItems"
+ },
+ "uniqueItems": {
+ "$ref": "#/definitions/uniqueItems"
+ },
+ "enum": {
+ "$ref": "#/definitions/enum"
+ },
+ "multipleOf": {
+ "$ref": "#/definitions/multipleOf"
+ }
+ }
+ },
+ "nonBodyParameter": {
+ "type": "object",
+ "required": [
+ "name",
+ "in",
+ "type"
+ ],
+ "oneOf": [
+ {
+ "$ref": "#/definitions/headerParameterSubSchema"
+ },
+ {
+ "$ref": "#/definitions/formDataParameterSubSchema"
+ },
+ {
+ "$ref": "#/definitions/queryParameterSubSchema"
+ },
+ {
+ "$ref": "#/definitions/pathParameterSubSchema"
+ }
+ ]
+ },
+ "parameter": {
+ "oneOf": [
+ {
+ "$ref": "#/definitions/bodyParameter"
+ },
+ {
+ "$ref": "#/definitions/nonBodyParameter"
+ }
+ ]
+ },
+ "schema": {
+ "type": "object",
+ "description": "A deterministic version of a JSON Schema object.",
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ },
+ "properties": {
+ "$ref": {
+ "type": "string"
+ },
+ "format": {
+ "type": "string"
+ },
+ "title": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/title"
+ },
+ "description": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/description"
+ },
+ "default": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/default"
+ },
+ "multipleOf": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf"
+ },
+ "maximum": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum"
+ },
+ "exclusiveMaximum": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum"
+ },
+ "minimum": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum"
+ },
+ "exclusiveMinimum": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum"
+ },
+ "maxLength": {
+ "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger"
+ },
+ "minLength": {
+ "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0"
+ },
+ "pattern": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern"
+ },
+ "maxItems": {
+ "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger"
+ },
+ "minItems": {
+ "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0"
+ },
+ "uniqueItems": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems"
+ },
+ "maxProperties": {
+ "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger"
+ },
+ "minProperties": {
+ "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0"
+ },
+ "required": {
+ "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray"
+ },
+ "enum": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/enum"
+ },
+ "additionalProperties": {
+ "anyOf": [
+ {
+ "$ref": "#/definitions/schema"
+ },
+ {
+ "type": "boolean"
+ }
+ ],
+ "default": {}
+ },
+ "type": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/type"
+ },
+ "items": {
+ "anyOf": [
+ {
+ "$ref": "#/definitions/schema"
+ },
+ {
+ "type": "array",
+ "minItems": 1,
+ "items": {
+ "$ref": "#/definitions/schema"
+ }
+ }
+ ],
+ "default": {}
+ },
+ "allOf": {
+ "type": "array",
+ "minItems": 1,
+ "items": {
+ "$ref": "#/definitions/schema"
+ }
+ },
+ "properties": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/definitions/schema"
+ },
+ "default": {}
+ },
+ "discriminator": {
+ "type": "string"
+ },
+ "readOnly": {
+ "type": "boolean",
+ "default": false
+ },
+ "xml": {
+ "$ref": "#/definitions/xml"
+ },
+ "externalDocs": {
+ "$ref": "#/definitions/externalDocs"
+ },
+ "example": {}
+ },
+ "additionalProperties": false
+ },
+ "fileSchema": {
+ "type": "object",
+ "description": "A deterministic version of a JSON Schema object.",
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ },
+ "required": [
+ "type"
+ ],
+ "properties": {
+ "format": {
+ "type": "string"
+ },
+ "title": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/title"
+ },
+ "description": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/description"
+ },
+ "default": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/default"
+ },
+ "required": {
+ "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray"
+ },
+ "type": {
+ "type": "string",
+ "enum": [
+ "file"
+ ]
+ },
+ "readOnly": {
+ "type": "boolean",
+ "default": false
+ },
+ "externalDocs": {
+ "$ref": "#/definitions/externalDocs"
+ },
+ "example": {}
+ },
+ "additionalProperties": false
+ },
+ "primitivesItems": {
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": [
+ "string",
+ "number",
+ "integer",
+ "boolean",
+ "array"
+ ]
+ },
+ "format": {
+ "type": "string"
+ },
+ "items": {
+ "$ref": "#/definitions/primitivesItems"
+ },
+ "collectionFormat": {
+ "$ref": "#/definitions/collectionFormat"
+ },
+ "default": {
+ "$ref": "#/definitions/default"
+ },
+ "maximum": {
+ "$ref": "#/definitions/maximum"
+ },
+ "exclusiveMaximum": {
+ "$ref": "#/definitions/exclusiveMaximum"
+ },
+ "minimum": {
+ "$ref": "#/definitions/minimum"
+ },
+ "exclusiveMinimum": {
+ "$ref": "#/definitions/exclusiveMinimum"
+ },
+ "maxLength": {
+ "$ref": "#/definitions/maxLength"
+ },
+ "minLength": {
+ "$ref": "#/definitions/minLength"
+ },
+ "pattern": {
+ "$ref": "#/definitions/pattern"
+ },
+ "maxItems": {
+ "$ref": "#/definitions/maxItems"
+ },
+ "minItems": {
+ "$ref": "#/definitions/minItems"
+ },
+ "uniqueItems": {
+ "$ref": "#/definitions/uniqueItems"
+ },
+ "enum": {
+ "$ref": "#/definitions/enum"
+ },
+ "multipleOf": {
+ "$ref": "#/definitions/multipleOf"
+ }
+ },
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ }
+ },
+ "security": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/securityRequirement"
+ },
+ "uniqueItems": true
+ },
+ "securityRequirement": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "uniqueItems": true
+ }
+ },
+ "xml": {
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "name": {
+ "type": "string"
+ },
+ "namespace": {
+ "type": "string"
+ },
+ "prefix": {
+ "type": "string"
+ },
+ "attribute": {
+ "type": "boolean",
+ "default": false
+ },
+ "wrapped": {
+ "type": "boolean",
+ "default": false
+ }
+ },
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ }
+ },
+ "tag": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "name"
+ ],
+ "properties": {
+ "name": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "externalDocs": {
+ "$ref": "#/definitions/externalDocs"
+ }
+ },
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ }
+ },
+ "securityDefinitions": {
+ "type": "object",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "$ref": "#/definitions/basicAuthenticationSecurity"
+ },
+ {
+ "$ref": "#/definitions/apiKeySecurity"
+ },
+ {
+ "$ref": "#/definitions/oauth2ImplicitSecurity"
+ },
+ {
+ "$ref": "#/definitions/oauth2PasswordSecurity"
+ },
+ {
+ "$ref": "#/definitions/oauth2ApplicationSecurity"
+ },
+ {
+ "$ref": "#/definitions/oauth2AccessCodeSecurity"
+ }
+ ]
+ }
+ },
+ "basicAuthenticationSecurity": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type"
+ ],
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": [
+ "basic"
+ ]
+ },
+ "description": {
+ "type": "string"
+ }
+ },
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ }
+ },
+ "apiKeySecurity": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "name",
+ "in"
+ ],
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": [
+ "apiKey"
+ ]
+ },
+ "name": {
+ "type": "string"
+ },
+ "in": {
+ "type": "string",
+ "enum": [
+ "header",
+ "query"
+ ]
+ },
+ "description": {
+ "type": "string"
+ }
+ },
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ }
+ },
+ "oauth2ImplicitSecurity": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "flow",
+ "authorizationUrl"
+ ],
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": [
+ "oauth2"
+ ]
+ },
+ "flow": {
+ "type": "string",
+ "enum": [
+ "implicit"
+ ]
+ },
+ "scopes": {
+ "$ref": "#/definitions/oauth2Scopes"
+ },
+ "authorizationUrl": {
+ "type": "string",
+ "format": "uri"
+ },
+ "description": {
+ "type": "string"
+ }
+ },
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ }
+ },
+ "oauth2PasswordSecurity": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "flow",
+ "tokenUrl"
+ ],
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": [
+ "oauth2"
+ ]
+ },
+ "flow": {
+ "type": "string",
+ "enum": [
+ "password"
+ ]
+ },
+ "scopes": {
+ "$ref": "#/definitions/oauth2Scopes"
+ },
+ "tokenUrl": {
+ "type": "string",
+ "format": "uri"
+ },
+ "description": {
+ "type": "string"
+ }
+ },
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ }
+ },
+ "oauth2ApplicationSecurity": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "flow",
+ "tokenUrl"
+ ],
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": [
+ "oauth2"
+ ]
+ },
+ "flow": {
+ "type": "string",
+ "enum": [
+ "application"
+ ]
+ },
+ "scopes": {
+ "$ref": "#/definitions/oauth2Scopes"
+ },
+ "tokenUrl": {
+ "type": "string",
+ "format": "uri"
+ },
+ "description": {
+ "type": "string"
+ }
+ },
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ }
+ },
+ "oauth2AccessCodeSecurity": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "flow",
+ "authorizationUrl",
+ "tokenUrl"
+ ],
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": [
+ "oauth2"
+ ]
+ },
+ "flow": {
+ "type": "string",
+ "enum": [
+ "accessCode"
+ ]
+ },
+ "scopes": {
+ "$ref": "#/definitions/oauth2Scopes"
+ },
+ "authorizationUrl": {
+ "type": "string",
+ "format": "uri"
+ },
+ "tokenUrl": {
+ "type": "string",
+ "format": "uri"
+ },
+ "description": {
+ "type": "string"
+ }
+ },
+ "patternProperties": {
+ "^x-": {
+ "$ref": "#/definitions/vendorExtension"
+ }
+ }
+ },
+ "oauth2Scopes": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "string"
+ }
+ },
+ "mediaTypeList": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/mimeType"
+ },
+ "uniqueItems": true
+ },
+ "parametersList": {
+ "type": "array",
+ "description": "The parameters needed to send a valid API call.",
+ "additionalItems": false,
+ "items": {
+ "oneOf": [
+ {
+ "$ref": "#/definitions/parameter"
+ },
+ {
+ "$ref": "#/definitions/jsonReference"
+ }
+ ]
+ },
+ "uniqueItems": true
+ },
+ "schemesList": {
+ "type": "array",
+ "description": "The transfer protocol of the API.",
+ "items": {
+ "type": "string",
+ "enum": [
+ "http",
+ "https",
+ "ws",
+ "wss"
+ ]
+ },
+ "uniqueItems": true
+ },
+ "collectionFormat": {
+ "type": "string",
+ "enum": [
+ "csv",
+ "ssv",
+ "tsv",
+ "pipes"
+ ],
+ "default": "csv"
+ },
+ "collectionFormatWithMulti": {
+ "type": "string",
+ "enum": [
+ "csv",
+ "ssv",
+ "tsv",
+ "pipes",
+ "multi"
+ ],
+ "default": "csv"
+ },
+ "title": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/title"
+ },
+ "description": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/description"
+ },
+ "default": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/default"
+ },
+ "multipleOf": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf"
+ },
+ "maximum": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum"
+ },
+ "exclusiveMaximum": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum"
+ },
+ "minimum": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum"
+ },
+ "exclusiveMinimum": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum"
+ },
+ "maxLength": {
+ "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger"
+ },
+ "minLength": {
+ "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0"
+ },
+ "pattern": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern"
+ },
+ "maxItems": {
+ "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger"
+ },
+ "minItems": {
+ "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0"
+ },
+ "uniqueItems": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems"
+ },
+ "enum": {
+ "$ref": "http://json-schema.org/draft-04/schema#/properties/enum"
+ },
+ "jsonReference": {
+ "type": "object",
+ "required": [
+ "$ref"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "$ref": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ },
+ "tests": [
+ {
+ "description": "Example petsore",
+ "data": {
+ "swagger": "2.0",
+ "info": {
+ "description": "This is a sample server Petstore server. You can find out more about Swagger at [http://swagger.io](http://swagger.io) or on [irc.freenode.net, #swagger](http://swagger.io/irc/). For this sample, you can use the api key `special-key` to test the authorization filters.",
+ "version": "1.0.0",
+ "title": "Swagger Petstore",
+ "termsOfService": "http://swagger.io/terms/",
+ "contact": {
+ "email": "apiteam@swagger.io"
+ },
+ "license": {
+ "name": "Apache 2.0",
+ "url": "http://www.apache.org/licenses/LICENSE-2.0.html"
+ }
+ },
+ "host": "petstore.swagger.io",
+ "basePath": "/v2",
+ "tags": [
+ {
+ "name": "pet",
+ "description": "Everything about your Pets",
+ "externalDocs": {
+ "description": "Find out more",
+ "url": "http://swagger.io"
+ }
+ },
+ {
+ "name": "store",
+ "description": "Access to Petstore orders"
+ },
+ {
+ "name": "user",
+ "description": "Operations about user",
+ "externalDocs": {
+ "description": "Find out more about our store",
+ "url": "http://swagger.io"
+ }
+ }
+ ],
+ "schemes": [
+ "http"
+ ],
+ "paths": {
+ "/pet": {
+ "post": {
+ "tags": [
+ "pet"
+ ],
+ "summary": "Add a new pet to the store",
+ "description": "",
+ "operationId": "addPet",
+ "consumes": [
+ "application/json",
+ "application/xml"
+ ],
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "in": "body",
+ "name": "body",
+ "description": "Pet object that needs to be added to the store",
+ "required": true,
+ "schema": {
+ "$ref": "#/definitions/Pet"
+ }
+ }
+ ],
+ "responses": {
+ "405": {
+ "description": "Invalid input"
+ }
+ },
+ "security": [
+ {
+ "petstore_auth": [
+ "write:pets",
+ "read:pets"
+ ]
+ }
+ ]
+ },
+ "put": {
+ "tags": [
+ "pet"
+ ],
+ "summary": "Update an existing pet",
+ "description": "",
+ "operationId": "updatePet",
+ "consumes": [
+ "application/json",
+ "application/xml"
+ ],
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "in": "body",
+ "name": "body",
+ "description": "Pet object that needs to be added to the store",
+ "required": true,
+ "schema": {
+ "$ref": "#/definitions/Pet"
+ }
+ }
+ ],
+ "responses": {
+ "400": {
+ "description": "Invalid ID supplied"
+ },
+ "404": {
+ "description": "Pet not found"
+ },
+ "405": {
+ "description": "Validation exception"
+ }
+ },
+ "security": [
+ {
+ "petstore_auth": [
+ "write:pets",
+ "read:pets"
+ ]
+ }
+ ]
+ }
+ },
+ "/pet/findByStatus": {
+ "get": {
+ "tags": [
+ "pet"
+ ],
+ "summary": "Finds Pets by status",
+ "description": "Multiple status values can be provided with comma separated strings",
+ "operationId": "findPetsByStatus",
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "name": "status",
+ "in": "query",
+ "description": "Status values that need to be considered for filter",
+ "required": true,
+ "type": "array",
+ "items": {
+ "type": "string",
+ "enum": [
+ "available",
+ "pending",
+ "sold"
+ ],
+ "default": "available"
+ },
+ "collectionFormat": "multi"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "successful operation",
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/Pet"
+ }
+ }
+ },
+ "400": {
+ "description": "Invalid status value"
+ }
+ },
+ "security": [
+ {
+ "petstore_auth": [
+ "write:pets",
+ "read:pets"
+ ]
+ }
+ ]
+ }
+ },
+ "/pet/findByTags": {
+ "get": {
+ "tags": [
+ "pet"
+ ],
+ "summary": "Finds Pets by tags",
+ "description": "Muliple tags can be provided with comma separated strings. Use tag1, tag2, tag3 for testing.",
+ "operationId": "findPetsByTags",
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "name": "tags",
+ "in": "query",
+ "description": "Tags to filter by",
+ "required": true,
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "collectionFormat": "multi"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "successful operation",
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/Pet"
+ }
+ }
+ },
+ "400": {
+ "description": "Invalid tag value"
+ }
+ },
+ "security": [
+ {
+ "petstore_auth": [
+ "write:pets",
+ "read:pets"
+ ]
+ }
+ ],
+ "deprecated": true
+ }
+ },
+ "/pet/{petId}": {
+ "get": {
+ "tags": [
+ "pet"
+ ],
+ "summary": "Find pet by ID",
+ "description": "Returns a single pet",
+ "operationId": "getPetById",
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "name": "petId",
+ "in": "path",
+ "description": "ID of pet to return",
+ "required": true,
+ "type": "integer",
+ "format": "int64"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "successful operation",
+ "schema": {
+ "$ref": "#/definitions/Pet"
+ }
+ },
+ "400": {
+ "description": "Invalid ID supplied"
+ },
+ "404": {
+ "description": "Pet not found"
+ }
+ },
+ "security": [
+ {
+ "api_key": []
+ }
+ ]
+ },
+ "post": {
+ "tags": [
+ "pet"
+ ],
+ "summary": "Updates a pet in the store with form data",
+ "description": "",
+ "operationId": "updatePetWithForm",
+ "consumes": [
+ "application/x-www-form-urlencoded"
+ ],
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "name": "petId",
+ "in": "path",
+ "description": "ID of pet that needs to be updated",
+ "required": true,
+ "type": "integer",
+ "format": "int64"
+ },
+ {
+ "name": "name",
+ "in": "formData",
+ "description": "Updated name of the pet",
+ "required": false,
+ "type": "string"
+ },
+ {
+ "name": "status",
+ "in": "formData",
+ "description": "Updated status of the pet",
+ "required": false,
+ "type": "string"
+ }
+ ],
+ "responses": {
+ "405": {
+ "description": "Invalid input"
+ }
+ },
+ "security": [
+ {
+ "petstore_auth": [
+ "write:pets",
+ "read:pets"
+ ]
+ }
+ ]
+ },
+ "delete": {
+ "tags": [
+ "pet"
+ ],
+ "summary": "Deletes a pet",
+ "description": "",
+ "operationId": "deletePet",
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "name": "api_key",
+ "in": "header",
+ "required": false,
+ "type": "string"
+ },
+ {
+ "name": "petId",
+ "in": "path",
+ "description": "Pet id to delete",
+ "required": true,
+ "type": "integer",
+ "format": "int64"
+ }
+ ],
+ "responses": {
+ "400": {
+ "description": "Invalid ID supplied"
+ },
+ "404": {
+ "description": "Pet not found"
+ }
+ },
+ "security": [
+ {
+ "petstore_auth": [
+ "write:pets",
+ "read:pets"
+ ]
+ }
+ ]
+ }
+ },
+ "/pet/{petId}/uploadImage": {
+ "post": {
+ "tags": [
+ "pet"
+ ],
+ "summary": "uploads an image",
+ "description": "",
+ "operationId": "uploadFile",
+ "consumes": [
+ "multipart/form-data"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "name": "petId",
+ "in": "path",
+ "description": "ID of pet to update",
+ "required": true,
+ "type": "integer",
+ "format": "int64"
+ },
+ {
+ "name": "additionalMetadata",
+ "in": "formData",
+ "description": "Additional data to pass to server",
+ "required": false,
+ "type": "string"
+ },
+ {
+ "name": "file",
+ "in": "formData",
+ "description": "file to upload",
+ "required": false,
+ "type": "file"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "successful operation",
+ "schema": {
+ "$ref": "#/definitions/ApiResponse"
+ }
+ }
+ },
+ "security": [
+ {
+ "petstore_auth": [
+ "write:pets",
+ "read:pets"
+ ]
+ }
+ ]
+ }
+ },
+ "/store/inventory": {
+ "get": {
+ "tags": [
+ "store"
+ ],
+ "summary": "Returns pet inventories by status",
+ "description": "Returns a map of status codes to quantities",
+ "operationId": "getInventory",
+ "produces": [
+ "application/json"
+ ],
+ "parameters": [],
+ "responses": {
+ "200": {
+ "description": "successful operation",
+ "schema": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "integer",
+ "format": "int32"
+ }
+ }
+ }
+ },
+ "security": [
+ {
+ "api_key": []
+ }
+ ]
+ }
+ },
+ "/store/order": {
+ "post": {
+ "tags": [
+ "store"
+ ],
+ "summary": "Place an order for a pet",
+ "description": "",
+ "operationId": "placeOrder",
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "in": "body",
+ "name": "body",
+ "description": "order placed for purchasing the pet",
+ "required": true,
+ "schema": {
+ "$ref": "#/definitions/Order"
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "successful operation",
+ "schema": {
+ "$ref": "#/definitions/Order"
+ }
+ },
+ "400": {
+ "description": "Invalid Order"
+ }
+ }
+ }
+ },
+ "/store/order/{orderId}": {
+ "get": {
+ "tags": [
+ "store"
+ ],
+ "summary": "Find purchase order by ID",
+ "description": "For valid response try integer IDs with value >= 1 and <= 10. Other values will generated exceptions",
+ "operationId": "getOrderById",
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "name": "orderId",
+ "in": "path",
+ "description": "ID of pet that needs to be fetched",
+ "required": true,
+ "type": "integer",
+ "maximum": 10.0,
+ "minimum": 1.0,
+ "format": "int64"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "successful operation",
+ "schema": {
+ "$ref": "#/definitions/Order"
+ }
+ },
+ "400": {
+ "description": "Invalid ID supplied"
+ },
+ "404": {
+ "description": "Order not found"
+ }
+ }
+ },
+ "delete": {
+ "tags": [
+ "store"
+ ],
+ "summary": "Delete purchase order by ID",
+ "description": "For valid response try integer IDs with positive integer value. Negative or non-integer values will generate API errors",
+ "operationId": "deleteOrder",
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "name": "orderId",
+ "in": "path",
+ "description": "ID of the order that needs to be deleted",
+ "required": true,
+ "type": "integer",
+ "minimum": 1.0,
+ "format": "int64"
+ }
+ ],
+ "responses": {
+ "400": {
+ "description": "Invalid ID supplied"
+ },
+ "404": {
+ "description": "Order not found"
+ }
+ }
+ }
+ },
+ "/user": {
+ "post": {
+ "tags": [
+ "user"
+ ],
+ "summary": "Create user",
+ "description": "This can only be done by the logged in user.",
+ "operationId": "createUser",
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "in": "body",
+ "name": "body",
+ "description": "Created user object",
+ "required": true,
+ "schema": {
+ "$ref": "#/definitions/User"
+ }
+ }
+ ],
+ "responses": {
+ "default": {
+ "description": "successful operation"
+ }
+ }
+ }
+ },
+ "/user/createWithArray": {
+ "post": {
+ "tags": [
+ "user"
+ ],
+ "summary": "Creates list of users with given input array",
+ "description": "",
+ "operationId": "createUsersWithArrayInput",
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "in": "body",
+ "name": "body",
+ "description": "List of user object",
+ "required": true,
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/User"
+ }
+ }
+ }
+ ],
+ "responses": {
+ "default": {
+ "description": "successful operation"
+ }
+ }
+ }
+ },
+ "/user/createWithList": {
+ "post": {
+ "tags": [
+ "user"
+ ],
+ "summary": "Creates list of users with given input array",
+ "description": "",
+ "operationId": "createUsersWithListInput",
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "in": "body",
+ "name": "body",
+ "description": "List of user object",
+ "required": true,
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/User"
+ }
+ }
+ }
+ ],
+ "responses": {
+ "default": {
+ "description": "successful operation"
+ }
+ }
+ }
+ },
+ "/user/login": {
+ "get": {
+ "tags": [
+ "user"
+ ],
+ "summary": "Logs user into the system",
+ "description": "",
+ "operationId": "loginUser",
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "name": "username",
+ "in": "query",
+ "description": "The user name for login",
+ "required": true,
+ "type": "string"
+ },
+ {
+ "name": "password",
+ "in": "query",
+ "description": "The password for login in clear text",
+ "required": true,
+ "type": "string"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "successful operation",
+ "schema": {
+ "type": "string"
+ },
+ "headers": {
+ "X-Rate-Limit": {
+ "type": "integer",
+ "format": "int32",
+ "description": "calls per hour allowed by the user"
+ },
+ "X-Expires-After": {
+ "type": "string",
+ "format": "date-time",
+ "description": "date in UTC when token expires"
+ }
+ }
+ },
+ "400": {
+ "description": "Invalid username/password supplied"
+ }
+ }
+ }
+ },
+ "/user/logout": {
+ "get": {
+ "tags": [
+ "user"
+ ],
+ "summary": "Logs out current logged in user session",
+ "description": "",
+ "operationId": "logoutUser",
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [],
+ "responses": {
+ "default": {
+ "description": "successful operation"
+ }
+ }
+ }
+ },
+ "/user/{username}": {
+ "get": {
+ "tags": [
+ "user"
+ ],
+ "summary": "Get user by user name",
+ "description": "",
+ "operationId": "getUserByName",
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "name": "username",
+ "in": "path",
+ "description": "The name that needs to be fetched. Use user1 for testing. ",
+ "required": true,
+ "type": "string"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "successful operation",
+ "schema": {
+ "$ref": "#/definitions/User"
+ }
+ },
+ "400": {
+ "description": "Invalid username supplied"
+ },
+ "404": {
+ "description": "User not found"
+ }
+ }
+ },
+ "put": {
+ "tags": [
+ "user"
+ ],
+ "summary": "Updated user",
+ "description": "This can only be done by the logged in user.",
+ "operationId": "updateUser",
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "name": "username",
+ "in": "path",
+ "description": "name that need to be updated",
+ "required": true,
+ "type": "string"
+ },
+ {
+ "in": "body",
+ "name": "body",
+ "description": "Updated user object",
+ "required": true,
+ "schema": {
+ "$ref": "#/definitions/User"
+ }
+ }
+ ],
+ "responses": {
+ "400": {
+ "description": "Invalid user supplied"
+ },
+ "404": {
+ "description": "User not found"
+ }
+ }
+ },
+ "delete": {
+ "tags": [
+ "user"
+ ],
+ "summary": "Delete user",
+ "description": "This can only be done by the logged in user.",
+ "operationId": "deleteUser",
+ "produces": [
+ "application/xml",
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "name": "username",
+ "in": "path",
+ "description": "The name that needs to be deleted",
+ "required": true,
+ "type": "string"
+ }
+ ],
+ "responses": {
+ "400": {
+ "description": "Invalid username supplied"
+ },
+ "404": {
+ "description": "User not found"
+ }
+ }
+ }
+ }
+ },
+ "securityDefinitions": {
+ "petstore_auth": {
+ "type": "oauth2",
+ "authorizationUrl": "http://petstore.swagger.io/oauth/dialog",
+ "flow": "implicit",
+ "scopes": {
+ "write:pets": "modify pets in your account",
+ "read:pets": "read your pets"
+ }
+ },
+ "api_key": {
+ "type": "apiKey",
+ "name": "api_key",
+ "in": "header"
+ }
+ },
+ "definitions": {
+ "Order": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "petId": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "quantity": {
+ "type": "integer",
+ "format": "int32"
+ },
+ "shipDate": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "status": {
+ "type": "string",
+ "description": "Order Status",
+ "enum": [
+ "placed",
+ "approved",
+ "delivered"
+ ]
+ },
+ "complete": {
+ "type": "boolean",
+ "default": false
+ }
+ },
+ "xml": {
+ "name": "Order"
+ }
+ },
+ "Category": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "name": {
+ "type": "string"
+ }
+ },
+ "xml": {
+ "name": "Category"
+ }
+ },
+ "User": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "username": {
+ "type": "string"
+ },
+ "firstName": {
+ "type": "string"
+ },
+ "lastName": {
+ "type": "string"
+ },
+ "email": {
+ "type": "string"
+ },
+ "password": {
+ "type": "string"
+ },
+ "phone": {
+ "type": "string"
+ },
+ "userStatus": {
+ "type": "integer",
+ "format": "int32",
+ "description": "User Status"
+ }
+ },
+ "xml": {
+ "name": "User"
+ }
+ },
+ "Tag": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "name": {
+ "type": "string"
+ }
+ },
+ "xml": {
+ "name": "Tag"
+ }
+ },
+ "Pet": {
+ "type": "object",
+ "required": [
+ "name",
+ "photoUrls"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "category": {
+ "$ref": "#/definitions/Category"
+ },
+ "name": {
+ "type": "string",
+ "example": "doggie"
+ },
+ "photoUrls": {
+ "type": "array",
+ "xml": {
+ "name": "photoUrl",
+ "wrapped": true
+ },
+ "items": {
+ "type": "string"
+ }
+ },
+ "tags": {
+ "type": "array",
+ "xml": {
+ "name": "tag",
+ "wrapped": true
+ },
+ "items": {
+ "$ref": "#/definitions/Tag"
+ }
+ },
+ "status": {
+ "type": "string",
+ "description": "pet status in the store",
+ "enum": [
+ "available",
+ "pending",
+ "sold"
+ ]
+ }
+ },
+ "xml": {
+ "name": "Pet"
+ }
+ },
+ "ApiResponse": {
+ "type": "object",
+ "properties": {
+ "code": {
+ "type": "integer",
+ "format": "int32"
+ },
+ "type": {
+ "type": "string"
+ },
+ "message": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ "externalDocs": {
+ "description": "Find out more about Swagger",
+ "url": "http://swagger.io"
+ }
+ },
+ "valid": true
+ }
+ ]
+ }
+]
diff --git a/third_party/python/jsonschema/jsonschema/benchmarks/json_schema_test_suite.py b/third_party/python/jsonschema/jsonschema/benchmarks/json_schema_test_suite.py
new file mode 100644
index 0000000000..5add5051df
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/benchmarks/json_schema_test_suite.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+"""
+A performance benchmark using the official test suite.
+
+This benchmarks jsonschema using every valid example in the
+JSON-Schema-Test-Suite. It will take some time to complete.
+"""
+from pyperf import Runner
+
+from jsonschema.tests._suite import Suite
+
+
+if __name__ == "__main__":
+ Suite().benchmark(runner=Runner())
diff --git a/third_party/python/jsonschema/jsonschema/cli.py b/third_party/python/jsonschema/jsonschema/cli.py
new file mode 100644
index 0000000000..ab3335b27c
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/cli.py
@@ -0,0 +1,90 @@
+"""
+The ``jsonschema`` command line.
+"""
+from __future__ import absolute_import
+import argparse
+import json
+import sys
+
+from jsonschema import __version__
+from jsonschema._reflect import namedAny
+from jsonschema.validators import validator_for
+
+
+def _namedAnyWithDefault(name):
+ if "." not in name:
+ name = "jsonschema." + name
+ return namedAny(name)
+
+
+def _json_file(path):
+ with open(path) as file:
+ return json.load(file)
+
+
+parser = argparse.ArgumentParser(
+ description="JSON Schema Validation CLI",
+)
+parser.add_argument(
+ "-i", "--instance",
+ action="append",
+ dest="instances",
+ type=_json_file,
+ help=(
+ "a path to a JSON instance (i.e. filename.json) "
+ "to validate (may be specified multiple times)"
+ ),
+)
+parser.add_argument(
+ "-F", "--error-format",
+ default="{error.instance}: {error.message}\n",
+ help=(
+ "the format to use for each error output message, specified in "
+ "a form suitable for passing to str.format, which will be called "
+ "with 'error' for each error"
+ ),
+)
+parser.add_argument(
+ "-V", "--validator",
+ type=_namedAnyWithDefault,
+ help=(
+ "the fully qualified object name of a validator to use, or, for "
+ "validators that are registered with jsonschema, simply the name "
+ "of the class."
+ ),
+)
+parser.add_argument(
+ "--version",
+ action="version",
+ version=__version__,
+)
+parser.add_argument(
+ "schema",
+ help="the JSON Schema to validate with (i.e. schema.json)",
+ type=_json_file,
+)
+
+
+def parse_args(args):
+ arguments = vars(parser.parse_args(args=args or ["--help"]))
+ if arguments["validator"] is None:
+ arguments["validator"] = validator_for(arguments["schema"])
+ return arguments
+
+
+def main(args=sys.argv[1:]):
+ sys.exit(run(arguments=parse_args(args=args)))
+
+
+def run(arguments, stdout=sys.stdout, stderr=sys.stderr):
+ error_format = arguments["error_format"]
+ validator = arguments["validator"](schema=arguments["schema"])
+
+ validator.check_schema(arguments["schema"])
+
+ errored = False
+ for instance in arguments["instances"] or ():
+ for error in validator.iter_errors(instance):
+ stderr.write(error_format.format(error=error))
+ errored = True
+ return errored
diff --git a/third_party/python/jsonschema/jsonschema/compat.py b/third_party/python/jsonschema/jsonschema/compat.py
new file mode 100644
index 0000000000..47e0980455
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/compat.py
@@ -0,0 +1,55 @@
+"""
+Python 2/3 compatibility helpers.
+
+Note: This module is *not* public API.
+"""
+import contextlib
+import operator
+import sys
+
+
+try:
+ from collections.abc import MutableMapping, Sequence # noqa
+except ImportError:
+ from collections import MutableMapping, Sequence # noqa
+
+PY3 = sys.version_info[0] >= 3
+
+if PY3:
+ zip = zip
+ from functools import lru_cache
+ from io import StringIO as NativeIO
+ from urllib.parse import (
+ unquote, urljoin, urlunsplit, SplitResult, urlsplit
+ )
+ from urllib.request import pathname2url, urlopen
+ str_types = str,
+ int_types = int,
+ iteritems = operator.methodcaller("items")
+else:
+ from itertools import izip as zip # noqa
+ from io import BytesIO as NativeIO
+ from urlparse import urljoin, urlunsplit, SplitResult, urlsplit
+ from urllib import pathname2url, unquote # noqa
+ import urllib2 # noqa
+ def urlopen(*args, **kwargs):
+ return contextlib.closing(urllib2.urlopen(*args, **kwargs))
+
+ str_types = basestring
+ int_types = int, long
+ iteritems = operator.methodcaller("iteritems")
+
+ from functools32 import lru_cache
+
+
+def urldefrag(url):
+ if "#" in url:
+ s, n, p, q, frag = urlsplit(url)
+ defrag = urlunsplit((s, n, p, q, ""))
+ else:
+ defrag = url
+ frag = ""
+ return defrag, frag
+
+
+# flake8: noqa
diff --git a/third_party/python/jsonschema/jsonschema/exceptions.py b/third_party/python/jsonschema/jsonschema/exceptions.py
new file mode 100644
index 0000000000..691dcffe6c
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/exceptions.py
@@ -0,0 +1,374 @@
+"""
+Validation errors, and some surrounding helpers.
+"""
+from collections import defaultdict, deque
+import itertools
+import pprint
+import textwrap
+
+import attr
+
+from jsonschema import _utils
+from jsonschema.compat import PY3, iteritems
+
+
+WEAK_MATCHES = frozenset(["anyOf", "oneOf"])
+STRONG_MATCHES = frozenset()
+
+_unset = _utils.Unset()
+
+
+class _Error(Exception):
+ def __init__(
+ self,
+ message,
+ validator=_unset,
+ path=(),
+ cause=None,
+ context=(),
+ validator_value=_unset,
+ instance=_unset,
+ schema=_unset,
+ schema_path=(),
+ parent=None,
+ ):
+ super(_Error, self).__init__(
+ message,
+ validator,
+ path,
+ cause,
+ context,
+ validator_value,
+ instance,
+ schema,
+ schema_path,
+ parent,
+ )
+ self.message = message
+ self.path = self.relative_path = deque(path)
+ self.schema_path = self.relative_schema_path = deque(schema_path)
+ self.context = list(context)
+ self.cause = self.__cause__ = cause
+ self.validator = validator
+ self.validator_value = validator_value
+ self.instance = instance
+ self.schema = schema
+ self.parent = parent
+
+ for error in context:
+ error.parent = self
+
+ def __repr__(self):
+ return "<%s: %r>" % (self.__class__.__name__, self.message)
+
+ def __unicode__(self):
+ essential_for_verbose = (
+ self.validator, self.validator_value, self.instance, self.schema,
+ )
+ if any(m is _unset for m in essential_for_verbose):
+ return self.message
+
+ pschema = pprint.pformat(self.schema, width=72)
+ pinstance = pprint.pformat(self.instance, width=72)
+ return self.message + textwrap.dedent("""
+
+ Failed validating %r in %s%s:
+ %s
+
+ On %s%s:
+ %s
+ """.rstrip()
+ ) % (
+ self.validator,
+ self._word_for_schema_in_error_message,
+ _utils.format_as_index(list(self.relative_schema_path)[:-1]),
+ _utils.indent(pschema),
+ self._word_for_instance_in_error_message,
+ _utils.format_as_index(self.relative_path),
+ _utils.indent(pinstance),
+ )
+
+ if PY3:
+ __str__ = __unicode__
+ else:
+ def __str__(self):
+ return unicode(self).encode("utf-8")
+
+ @classmethod
+ def create_from(cls, other):
+ return cls(**other._contents())
+
+ @property
+ def absolute_path(self):
+ parent = self.parent
+ if parent is None:
+ return self.relative_path
+
+ path = deque(self.relative_path)
+ path.extendleft(reversed(parent.absolute_path))
+ return path
+
+ @property
+ def absolute_schema_path(self):
+ parent = self.parent
+ if parent is None:
+ return self.relative_schema_path
+
+ path = deque(self.relative_schema_path)
+ path.extendleft(reversed(parent.absolute_schema_path))
+ return path
+
+ def _set(self, **kwargs):
+ for k, v in iteritems(kwargs):
+ if getattr(self, k) is _unset:
+ setattr(self, k, v)
+
+ def _contents(self):
+ attrs = (
+ "message", "cause", "context", "validator", "validator_value",
+ "path", "schema_path", "instance", "schema", "parent",
+ )
+ return dict((attr, getattr(self, attr)) for attr in attrs)
+
+
+class ValidationError(_Error):
+ """
+ An instance was invalid under a provided schema.
+ """
+
+ _word_for_schema_in_error_message = "schema"
+ _word_for_instance_in_error_message = "instance"
+
+
+class SchemaError(_Error):
+ """
+ A schema was invalid under its corresponding metaschema.
+ """
+
+ _word_for_schema_in_error_message = "metaschema"
+ _word_for_instance_in_error_message = "schema"
+
+
+@attr.s(hash=True)
+class RefResolutionError(Exception):
+ """
+ A ref could not be resolved.
+ """
+
+ _cause = attr.ib()
+
+ def __str__(self):
+ return str(self._cause)
+
+
+class UndefinedTypeCheck(Exception):
+ """
+ A type checker was asked to check a type it did not have registered.
+ """
+
+ def __init__(self, type):
+ self.type = type
+
+ def __unicode__(self):
+ return "Type %r is unknown to this type checker" % self.type
+
+ if PY3:
+ __str__ = __unicode__
+ else:
+ def __str__(self):
+ return unicode(self).encode("utf-8")
+
+
+class UnknownType(Exception):
+ """
+ A validator was asked to validate an instance against an unknown type.
+ """
+
+ def __init__(self, type, instance, schema):
+ self.type = type
+ self.instance = instance
+ self.schema = schema
+
+ def __unicode__(self):
+ pschema = pprint.pformat(self.schema, width=72)
+ pinstance = pprint.pformat(self.instance, width=72)
+ return textwrap.dedent("""
+ Unknown type %r for validator with schema:
+ %s
+
+ While checking instance:
+ %s
+ """.rstrip()
+ ) % (self.type, _utils.indent(pschema), _utils.indent(pinstance))
+
+ if PY3:
+ __str__ = __unicode__
+ else:
+ def __str__(self):
+ return unicode(self).encode("utf-8")
+
+
+class FormatError(Exception):
+ """
+ Validating a format failed.
+ """
+
+ def __init__(self, message, cause=None):
+ super(FormatError, self).__init__(message, cause)
+ self.message = message
+ self.cause = self.__cause__ = cause
+
+ def __unicode__(self):
+ return self.message
+
+ if PY3:
+ __str__ = __unicode__
+ else:
+ def __str__(self):
+ return self.message.encode("utf-8")
+
+
+class ErrorTree(object):
+ """
+ ErrorTrees make it easier to check which validations failed.
+ """
+
+ _instance = _unset
+
+ def __init__(self, errors=()):
+ self.errors = {}
+ self._contents = defaultdict(self.__class__)
+
+ for error in errors:
+ container = self
+ for element in error.path:
+ container = container[element]
+ container.errors[error.validator] = error
+
+ container._instance = error.instance
+
+ def __contains__(self, index):
+ """
+ Check whether ``instance[index]`` has any errors.
+ """
+
+ return index in self._contents
+
+ def __getitem__(self, index):
+ """
+ Retrieve the child tree one level down at the given ``index``.
+
+ If the index is not in the instance that this tree corresponds to and
+ is not known by this tree, whatever error would be raised by
+ ``instance.__getitem__`` will be propagated (usually this is some
+ subclass of `exceptions.LookupError`.
+ """
+
+ if self._instance is not _unset and index not in self:
+ self._instance[index]
+ return self._contents[index]
+
+ def __setitem__(self, index, value):
+ """
+ Add an error to the tree at the given ``index``.
+ """
+ self._contents[index] = value
+
+ def __iter__(self):
+ """
+ Iterate (non-recursively) over the indices in the instance with errors.
+ """
+
+ return iter(self._contents)
+
+ def __len__(self):
+ """
+ Return the `total_errors`.
+ """
+ return self.total_errors
+
+ def __repr__(self):
+ return "<%s (%s total errors)>" % (self.__class__.__name__, len(self))
+
+ @property
+ def total_errors(self):
+ """
+ The total number of errors in the entire tree, including children.
+ """
+
+ child_errors = sum(len(tree) for _, tree in iteritems(self._contents))
+ return len(self.errors) + child_errors
+
+
+def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES):
+ """
+ Create a key function that can be used to sort errors by relevance.
+
+ Arguments:
+ weak (set):
+ a collection of validator names to consider to be "weak".
+ If there are two errors at the same level of the instance
+ and one is in the set of weak validator names, the other
+ error will take priority. By default, :validator:`anyOf` and
+ :validator:`oneOf` are considered weak validators and will
+ be superseded by other same-level validation errors.
+
+ strong (set):
+ a collection of validator names to consider to be "strong"
+ """
+ def relevance(error):
+ validator = error.validator
+ return -len(error.path), validator not in weak, validator in strong
+ return relevance
+
+
+relevance = by_relevance()
+
+
+def best_match(errors, key=relevance):
+ """
+ Try to find an error that appears to be the best match among given errors.
+
+ In general, errors that are higher up in the instance (i.e. for which
+ `ValidationError.path` is shorter) are considered better matches,
+ since they indicate "more" is wrong with the instance.
+
+ If the resulting match is either :validator:`oneOf` or :validator:`anyOf`,
+ the *opposite* assumption is made -- i.e. the deepest error is picked,
+ since these validators only need to match once, and any other errors may
+ not be relevant.
+
+ Arguments:
+ errors (collections.Iterable):
+
+ the errors to select from. Do not provide a mixture of
+ errors from different validation attempts (i.e. from
+ different instances or schemas), since it won't produce
+ sensical output.
+
+ key (collections.Callable):
+
+ the key to use when sorting errors. See `relevance` and
+ transitively `by_relevance` for more details (the default is
+ to sort with the defaults of that function). Changing the
+ default is only useful if you want to change the function
+ that rates errors but still want the error context descent
+ done by this function.
+
+ Returns:
+ the best matching error, or ``None`` if the iterable was empty
+
+ .. note::
+
+ This function is a heuristic. Its return value may change for a given
+ set of inputs from version to version if better heuristics are added.
+ """
+ errors = iter(errors)
+ best = next(errors, None)
+ if best is None:
+ return
+ best = max(itertools.chain([best], errors), key=key)
+
+ while best.context:
+ best = min(best.context, key=key)
+ return best
diff --git a/third_party/python/jsonschema/jsonschema/schemas/draft3.json b/third_party/python/jsonschema/jsonschema/schemas/draft3.json
new file mode 100644
index 0000000000..f8a09c563b
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/schemas/draft3.json
@@ -0,0 +1,199 @@
+{
+ "$schema": "http://json-schema.org/draft-03/schema#",
+ "dependencies": {
+ "exclusiveMaximum": "maximum",
+ "exclusiveMinimum": "minimum"
+ },
+ "id": "http://json-schema.org/draft-03/schema#",
+ "properties": {
+ "$ref": {
+ "format": "uri",
+ "type": "string"
+ },
+ "$schema": {
+ "format": "uri",
+ "type": "string"
+ },
+ "additionalItems": {
+ "default": {},
+ "type": [
+ {
+ "$ref": "#"
+ },
+ "boolean"
+ ]
+ },
+ "additionalProperties": {
+ "default": {},
+ "type": [
+ {
+ "$ref": "#"
+ },
+ "boolean"
+ ]
+ },
+ "default": {
+ "type": "any"
+ },
+ "dependencies": {
+ "additionalProperties": {
+ "items": {
+ "type": "string"
+ },
+ "type": [
+ "string",
+ "array",
+ {
+ "$ref": "#"
+ }
+ ]
+ },
+ "default": {},
+ "type": [
+ "string",
+ "array",
+ "object"
+ ]
+ },
+ "description": {
+ "type": "string"
+ },
+ "disallow": {
+ "items": {
+ "type": [
+ "string",
+ {
+ "$ref": "#"
+ }
+ ]
+ },
+ "type": [
+ "string",
+ "array"
+ ],
+ "uniqueItems": true
+ },
+ "divisibleBy": {
+ "default": 1,
+ "exclusiveMinimum": true,
+ "minimum": 0,
+ "type": "number"
+ },
+ "enum": {
+ "type": "array"
+ },
+ "exclusiveMaximum": {
+ "default": false,
+ "type": "boolean"
+ },
+ "exclusiveMinimum": {
+ "default": false,
+ "type": "boolean"
+ },
+ "extends": {
+ "default": {},
+ "items": {
+ "$ref": "#"
+ },
+ "type": [
+ {
+ "$ref": "#"
+ },
+ "array"
+ ]
+ },
+ "format": {
+ "type": "string"
+ },
+ "id": {
+ "format": "uri",
+ "type": "string"
+ },
+ "items": {
+ "default": {},
+ "items": {
+ "$ref": "#"
+ },
+ "type": [
+ {
+ "$ref": "#"
+ },
+ "array"
+ ]
+ },
+ "maxDecimal": {
+ "minimum": 0,
+ "type": "number"
+ },
+ "maxItems": {
+ "minimum": 0,
+ "type": "integer"
+ },
+ "maxLength": {
+ "type": "integer"
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "minItems": {
+ "default": 0,
+ "minimum": 0,
+ "type": "integer"
+ },
+ "minLength": {
+ "default": 0,
+ "minimum": 0,
+ "type": "integer"
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "pattern": {
+ "format": "regex",
+ "type": "string"
+ },
+ "patternProperties": {
+ "additionalProperties": {
+ "$ref": "#"
+ },
+ "default": {},
+ "type": "object"
+ },
+ "properties": {
+ "additionalProperties": {
+ "$ref": "#",
+ "type": "object"
+ },
+ "default": {},
+ "type": "object"
+ },
+ "required": {
+ "default": false,
+ "type": "boolean"
+ },
+ "title": {
+ "type": "string"
+ },
+ "type": {
+ "default": "any",
+ "items": {
+ "type": [
+ "string",
+ {
+ "$ref": "#"
+ }
+ ]
+ },
+ "type": [
+ "string",
+ "array"
+ ],
+ "uniqueItems": true
+ },
+ "uniqueItems": {
+ "default": false,
+ "type": "boolean"
+ }
+ },
+ "type": "object"
+}
diff --git a/third_party/python/jsonschema/jsonschema/schemas/draft4.json b/third_party/python/jsonschema/jsonschema/schemas/draft4.json
new file mode 100644
index 0000000000..9b666cff88
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/schemas/draft4.json
@@ -0,0 +1,222 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "default": {},
+ "definitions": {
+ "positiveInteger": {
+ "minimum": 0,
+ "type": "integer"
+ },
+ "positiveIntegerDefault0": {
+ "allOf": [
+ {
+ "$ref": "#/definitions/positiveInteger"
+ },
+ {
+ "default": 0
+ }
+ ]
+ },
+ "schemaArray": {
+ "items": {
+ "$ref": "#"
+ },
+ "minItems": 1,
+ "type": "array"
+ },
+ "simpleTypes": {
+ "enum": [
+ "array",
+ "boolean",
+ "integer",
+ "null",
+ "number",
+ "object",
+ "string"
+ ]
+ },
+ "stringArray": {
+ "items": {
+ "type": "string"
+ },
+ "minItems": 1,
+ "type": "array",
+ "uniqueItems": true
+ }
+ },
+ "dependencies": {
+ "exclusiveMaximum": [
+ "maximum"
+ ],
+ "exclusiveMinimum": [
+ "minimum"
+ ]
+ },
+ "description": "Core schema meta-schema",
+ "id": "http://json-schema.org/draft-04/schema#",
+ "properties": {
+ "$schema": {
+ "format": "uri",
+ "type": "string"
+ },
+ "additionalItems": {
+ "anyOf": [
+ {
+ "type": "boolean"
+ },
+ {
+ "$ref": "#"
+ }
+ ],
+ "default": {}
+ },
+ "additionalProperties": {
+ "anyOf": [
+ {
+ "type": "boolean"
+ },
+ {
+ "$ref": "#"
+ }
+ ],
+ "default": {}
+ },
+ "allOf": {
+ "$ref": "#/definitions/schemaArray"
+ },
+ "anyOf": {
+ "$ref": "#/definitions/schemaArray"
+ },
+ "default": {},
+ "definitions": {
+ "additionalProperties": {
+ "$ref": "#"
+ },
+ "default": {},
+ "type": "object"
+ },
+ "dependencies": {
+ "additionalProperties": {
+ "anyOf": [
+ {
+ "$ref": "#"
+ },
+ {
+ "$ref": "#/definitions/stringArray"
+ }
+ ]
+ },
+ "type": "object"
+ },
+ "description": {
+ "type": "string"
+ },
+ "enum": {
+ "type": "array"
+ },
+ "exclusiveMaximum": {
+ "default": false,
+ "type": "boolean"
+ },
+ "exclusiveMinimum": {
+ "default": false,
+ "type": "boolean"
+ },
+ "format": {
+ "type": "string"
+ },
+ "id": {
+ "format": "uri",
+ "type": "string"
+ },
+ "items": {
+ "anyOf": [
+ {
+ "$ref": "#"
+ },
+ {
+ "$ref": "#/definitions/schemaArray"
+ }
+ ],
+ "default": {}
+ },
+ "maxItems": {
+ "$ref": "#/definitions/positiveInteger"
+ },
+ "maxLength": {
+ "$ref": "#/definitions/positiveInteger"
+ },
+ "maxProperties": {
+ "$ref": "#/definitions/positiveInteger"
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "minItems": {
+ "$ref": "#/definitions/positiveIntegerDefault0"
+ },
+ "minLength": {
+ "$ref": "#/definitions/positiveIntegerDefault0"
+ },
+ "minProperties": {
+ "$ref": "#/definitions/positiveIntegerDefault0"
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "multipleOf": {
+ "exclusiveMinimum": true,
+ "minimum": 0,
+ "type": "number"
+ },
+ "not": {
+ "$ref": "#"
+ },
+ "oneOf": {
+ "$ref": "#/definitions/schemaArray"
+ },
+ "pattern": {
+ "format": "regex",
+ "type": "string"
+ },
+ "patternProperties": {
+ "additionalProperties": {
+ "$ref": "#"
+ },
+ "default": {},
+ "type": "object"
+ },
+ "properties": {
+ "additionalProperties": {
+ "$ref": "#"
+ },
+ "default": {},
+ "type": "object"
+ },
+ "required": {
+ "$ref": "#/definitions/stringArray"
+ },
+ "title": {
+ "type": "string"
+ },
+ "type": {
+ "anyOf": [
+ {
+ "$ref": "#/definitions/simpleTypes"
+ },
+ {
+ "items": {
+ "$ref": "#/definitions/simpleTypes"
+ },
+ "minItems": 1,
+ "type": "array",
+ "uniqueItems": true
+ }
+ ]
+ },
+ "uniqueItems": {
+ "default": false,
+ "type": "boolean"
+ }
+ },
+ "type": "object"
+}
diff --git a/third_party/python/jsonschema/jsonschema/schemas/draft6.json b/third_party/python/jsonschema/jsonschema/schemas/draft6.json
new file mode 100644
index 0000000000..a0d2bf7896
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/schemas/draft6.json
@@ -0,0 +1,153 @@
+{
+ "$schema": "http://json-schema.org/draft-06/schema#",
+ "$id": "http://json-schema.org/draft-06/schema#",
+ "title": "Core schema meta-schema",
+ "definitions": {
+ "schemaArray": {
+ "type": "array",
+ "minItems": 1,
+ "items": { "$ref": "#" }
+ },
+ "nonNegativeInteger": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "nonNegativeIntegerDefault0": {
+ "allOf": [
+ { "$ref": "#/definitions/nonNegativeInteger" },
+ { "default": 0 }
+ ]
+ },
+ "simpleTypes": {
+ "enum": [
+ "array",
+ "boolean",
+ "integer",
+ "null",
+ "number",
+ "object",
+ "string"
+ ]
+ },
+ "stringArray": {
+ "type": "array",
+ "items": { "type": "string" },
+ "uniqueItems": true,
+ "default": []
+ }
+ },
+ "type": ["object", "boolean"],
+ "properties": {
+ "$id": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "$schema": {
+ "type": "string",
+ "format": "uri"
+ },
+ "$ref": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "title": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "default": {},
+ "examples": {
+ "type": "array",
+ "items": {}
+ },
+ "multipleOf": {
+ "type": "number",
+ "exclusiveMinimum": 0
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "exclusiveMaximum": {
+ "type": "number"
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "exclusiveMinimum": {
+ "type": "number"
+ },
+ "maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "pattern": {
+ "type": "string",
+ "format": "regex"
+ },
+ "additionalItems": { "$ref": "#" },
+ "items": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/schemaArray" }
+ ],
+ "default": {}
+ },
+ "maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "uniqueItems": {
+ "type": "boolean",
+ "default": false
+ },
+ "contains": { "$ref": "#" },
+ "maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "required": { "$ref": "#/definitions/stringArray" },
+ "additionalProperties": { "$ref": "#" },
+ "definitions": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "properties": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "patternProperties": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "propertyNames": { "format": "regex" },
+ "default": {}
+ },
+ "dependencies": {
+ "type": "object",
+ "additionalProperties": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/stringArray" }
+ ]
+ }
+ },
+ "propertyNames": { "$ref": "#" },
+ "const": {},
+ "enum": {
+ "type": "array"
+ },
+ "type": {
+ "anyOf": [
+ { "$ref": "#/definitions/simpleTypes" },
+ {
+ "type": "array",
+ "items": { "$ref": "#/definitions/simpleTypes" },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ ]
+ },
+ "format": { "type": "string" },
+ "allOf": { "$ref": "#/definitions/schemaArray" },
+ "anyOf": { "$ref": "#/definitions/schemaArray" },
+ "oneOf": { "$ref": "#/definitions/schemaArray" },
+ "not": { "$ref": "#" }
+ },
+ "default": {}
+}
diff --git a/third_party/python/jsonschema/jsonschema/schemas/draft7.json b/third_party/python/jsonschema/jsonschema/schemas/draft7.json
new file mode 100644
index 0000000000..746cde9690
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/schemas/draft7.json
@@ -0,0 +1,166 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "$id": "http://json-schema.org/draft-07/schema#",
+ "title": "Core schema meta-schema",
+ "definitions": {
+ "schemaArray": {
+ "type": "array",
+ "minItems": 1,
+ "items": { "$ref": "#" }
+ },
+ "nonNegativeInteger": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "nonNegativeIntegerDefault0": {
+ "allOf": [
+ { "$ref": "#/definitions/nonNegativeInteger" },
+ { "default": 0 }
+ ]
+ },
+ "simpleTypes": {
+ "enum": [
+ "array",
+ "boolean",
+ "integer",
+ "null",
+ "number",
+ "object",
+ "string"
+ ]
+ },
+ "stringArray": {
+ "type": "array",
+ "items": { "type": "string" },
+ "uniqueItems": true,
+ "default": []
+ }
+ },
+ "type": ["object", "boolean"],
+ "properties": {
+ "$id": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "$schema": {
+ "type": "string",
+ "format": "uri"
+ },
+ "$ref": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "$comment": {
+ "type": "string"
+ },
+ "title": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "default": true,
+ "readOnly": {
+ "type": "boolean",
+ "default": false
+ },
+ "examples": {
+ "type": "array",
+ "items": true
+ },
+ "multipleOf": {
+ "type": "number",
+ "exclusiveMinimum": 0
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "exclusiveMaximum": {
+ "type": "number"
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "exclusiveMinimum": {
+ "type": "number"
+ },
+ "maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "pattern": {
+ "type": "string",
+ "format": "regex"
+ },
+ "additionalItems": { "$ref": "#" },
+ "items": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/schemaArray" }
+ ],
+ "default": true
+ },
+ "maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "uniqueItems": {
+ "type": "boolean",
+ "default": false
+ },
+ "contains": { "$ref": "#" },
+ "maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "required": { "$ref": "#/definitions/stringArray" },
+ "additionalProperties": { "$ref": "#" },
+ "definitions": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "properties": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "patternProperties": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "propertyNames": { "format": "regex" },
+ "default": {}
+ },
+ "dependencies": {
+ "type": "object",
+ "additionalProperties": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/stringArray" }
+ ]
+ }
+ },
+ "propertyNames": { "$ref": "#" },
+ "const": true,
+ "enum": {
+ "type": "array",
+ "items": true
+ },
+ "type": {
+ "anyOf": [
+ { "$ref": "#/definitions/simpleTypes" },
+ {
+ "type": "array",
+ "items": { "$ref": "#/definitions/simpleTypes" },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ ]
+ },
+ "format": { "type": "string" },
+ "contentMediaType": { "type": "string" },
+ "contentEncoding": { "type": "string" },
+ "if": {"$ref": "#"},
+ "then": {"$ref": "#"},
+ "else": {"$ref": "#"},
+ "allOf": { "$ref": "#/definitions/schemaArray" },
+ "anyOf": { "$ref": "#/definitions/schemaArray" },
+ "oneOf": { "$ref": "#/definitions/schemaArray" },
+ "not": { "$ref": "#" }
+ },
+ "default": true
+}
diff --git a/third_party/python/jsonschema/jsonschema/tests/__init__.py b/third_party/python/jsonschema/jsonschema/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/tests/__init__.py
diff --git a/third_party/python/jsonschema/jsonschema/tests/_helpers.py b/third_party/python/jsonschema/jsonschema/tests/_helpers.py
new file mode 100644
index 0000000000..70f291fe2a
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/tests/_helpers.py
@@ -0,0 +1,5 @@
+def bug(issue=None):
+ message = "A known bug."
+ if issue is not None:
+ message += " See issue #{issue}.".format(issue=issue)
+ return message
diff --git a/third_party/python/jsonschema/jsonschema/tests/_suite.py b/third_party/python/jsonschema/jsonschema/tests/_suite.py
new file mode 100644
index 0000000000..b68a7b668c
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/tests/_suite.py
@@ -0,0 +1,239 @@
+"""
+Python representations of the JSON Schema Test Suite tests.
+"""
+
+from functools import partial
+import json
+import os
+import re
+import subprocess
+import sys
+import unittest
+
+from twisted.python.filepath import FilePath
+import attr
+
+from jsonschema.compat import PY3
+from jsonschema.validators import validators
+import jsonschema
+
+
+def _find_suite():
+ root = os.environ.get("JSON_SCHEMA_TEST_SUITE")
+ if root is not None:
+ return FilePath(root)
+
+ root = FilePath(jsonschema.__file__).parent().sibling("json")
+ if not root.isdir(): # pragma: no cover
+ raise ValueError(
+ (
+ "Can't find the JSON-Schema-Test-Suite directory. "
+ "Set the 'JSON_SCHEMA_TEST_SUITE' environment "
+ "variable or run the tests from alongside a checkout "
+ "of the suite."
+ ),
+ )
+ return root
+
+
+@attr.s(hash=True)
+class Suite(object):
+
+ _root = attr.ib(default=attr.Factory(_find_suite))
+
+ def _remotes(self):
+ jsonschema_suite = self._root.descendant(["bin", "jsonschema_suite"])
+ remotes = subprocess.check_output(
+ [sys.executable, jsonschema_suite.path, "remotes"],
+ )
+ return {
+ "http://localhost:1234/" + name: schema
+ for name, schema in json.loads(remotes.decode("utf-8")).items()
+ }
+
+ def benchmark(self, runner): # pragma: no cover
+ for name in validators:
+ self.version(name=name).benchmark(runner=runner)
+
+ def version(self, name):
+ return Version(
+ name=name,
+ path=self._root.descendant(["tests", name]),
+ remotes=self._remotes(),
+ )
+
+
+@attr.s(hash=True)
+class Version(object):
+
+ _path = attr.ib()
+ _remotes = attr.ib()
+
+ name = attr.ib()
+
+ def benchmark(self, runner, **kwargs): # pragma: no cover
+ for suite in self.tests():
+ for test in suite:
+ runner.bench_func(
+ test.fully_qualified_name,
+ partial(test.validate_ignoring_errors, **kwargs),
+ )
+
+ def tests(self):
+ return (
+ test
+ for child in self._path.globChildren("*.json")
+ for test in self._tests_in(
+ subject=child.basename()[:-5],
+ path=child,
+ )
+ )
+
+ def format_tests(self):
+ path = self._path.descendant(["optional", "format"])
+ return (
+ test
+ for child in path.globChildren("*.json")
+ for test in self._tests_in(
+ subject=child.basename()[:-5],
+ path=child,
+ )
+ )
+
+ def tests_of(self, name):
+ return self._tests_in(
+ subject=name,
+ path=self._path.child(name + ".json"),
+ )
+
+ def optional_tests_of(self, name):
+ return self._tests_in(
+ subject=name,
+ path=self._path.descendant(["optional", name + ".json"]),
+ )
+
+ def to_unittest_testcase(self, *suites, **kwargs):
+ name = kwargs.pop("name", "Test" + self.name.title())
+ methods = {
+ test.method_name: test.to_unittest_method(**kwargs)
+ for suite in suites
+ for tests in suite
+ for test in tests
+ }
+ cls = type(name, (unittest.TestCase,), methods)
+
+ try:
+ cls.__module__ = _someone_save_us_the_module_of_the_caller()
+ except Exception: # pragma: no cover
+ # We're doing crazy things, so if they go wrong, like a function
+ # behaving differently on some other interpreter, just make them
+ # not happen.
+ pass
+
+ return cls
+
+ def _tests_in(self, subject, path):
+ for each in json.loads(path.getContent().decode("utf-8")):
+ yield (
+ _Test(
+ version=self,
+ subject=subject,
+ case_description=each["description"],
+ schema=each["schema"],
+ remotes=self._remotes,
+ **test
+ ) for test in each["tests"]
+ )
+
+
+@attr.s(hash=True, repr=False)
+class _Test(object):
+
+ version = attr.ib()
+
+ subject = attr.ib()
+ case_description = attr.ib()
+ description = attr.ib()
+
+ data = attr.ib()
+ schema = attr.ib(repr=False)
+
+ valid = attr.ib()
+
+ _remotes = attr.ib()
+
+ def __repr__(self): # pragma: no cover
+ return "<Test {}>".format(self.fully_qualified_name)
+
+ @property
+ def fully_qualified_name(self): # pragma: no cover
+ return " > ".join(
+ [
+ self.version.name,
+ self.subject,
+ self.case_description,
+ self.description,
+ ]
+ )
+
+ @property
+ def method_name(self):
+ delimiters = r"[\W\- ]+"
+ name = "test_%s_%s_%s" % (
+ re.sub(delimiters, "_", self.subject),
+ re.sub(delimiters, "_", self.case_description),
+ re.sub(delimiters, "_", self.description),
+ )
+
+ if not PY3: # pragma: no cover
+ name = name.encode("utf-8")
+ return name
+
+ def to_unittest_method(self, skip=lambda test: None, **kwargs):
+ if self.valid:
+ def fn(this):
+ self.validate(**kwargs)
+ else:
+ def fn(this):
+ with this.assertRaises(jsonschema.ValidationError):
+ self.validate(**kwargs)
+
+ fn.__name__ = self.method_name
+ reason = skip(self)
+ return unittest.skipIf(reason is not None, reason)(fn)
+
+ def validate(self, Validator, **kwargs):
+ resolver = jsonschema.RefResolver.from_schema(
+ schema=self.schema,
+ store=self._remotes,
+ id_of=Validator.ID_OF,
+ )
+ jsonschema.validate(
+ instance=self.data,
+ schema=self.schema,
+ cls=Validator,
+ resolver=resolver,
+ **kwargs
+ )
+
+ def validate_ignoring_errors(self, Validator): # pragma: no cover
+ try:
+ self.validate(Validator=Validator)
+ except jsonschema.ValidationError:
+ pass
+
+
+def _someone_save_us_the_module_of_the_caller():
+ """
+ The FQON of the module 2nd stack frames up from here.
+
+ This is intended to allow us to dynamicallly return test case classes that
+ are indistinguishable from being defined in the module that wants them.
+
+ Otherwise, trial will mis-print the FQON, and copy pasting it won't re-run
+ the class that really is running.
+
+ Save us all, this is all so so so so so terrible.
+ """
+
+ return sys._getframe(2).f_globals["__name__"]
diff --git a/third_party/python/jsonschema/jsonschema/tests/test_cli.py b/third_party/python/jsonschema/jsonschema/tests/test_cli.py
new file mode 100644
index 0000000000..ed820ba3f8
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/tests/test_cli.py
@@ -0,0 +1,151 @@
+from unittest import TestCase
+import json
+import subprocess
+import sys
+
+from jsonschema import Draft4Validator, ValidationError, cli, __version__
+from jsonschema.compat import NativeIO
+from jsonschema.exceptions import SchemaError
+
+
+def fake_validator(*errors):
+ errors = list(reversed(errors))
+
+ class FakeValidator(object):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def iter_errors(self, instance):
+ if errors:
+ return errors.pop()
+ return []
+
+ def check_schema(self, schema):
+ pass
+
+ return FakeValidator
+
+
+class TestParser(TestCase):
+
+ FakeValidator = fake_validator()
+ instance_file = "foo.json"
+ schema_file = "schema.json"
+
+ def setUp(self):
+ cli.open = self.fake_open
+ self.addCleanup(delattr, cli, "open")
+
+ def fake_open(self, path):
+ if path == self.instance_file:
+ contents = ""
+ elif path == self.schema_file:
+ contents = {}
+ else: # pragma: no cover
+ self.fail("What is {!r}".format(path))
+ return NativeIO(json.dumps(contents))
+
+ def test_find_validator_by_fully_qualified_object_name(self):
+ arguments = cli.parse_args(
+ [
+ "--validator",
+ "jsonschema.tests.test_cli.TestParser.FakeValidator",
+ "--instance", self.instance_file,
+ self.schema_file,
+ ]
+ )
+ self.assertIs(arguments["validator"], self.FakeValidator)
+
+ def test_find_validator_in_jsonschema(self):
+ arguments = cli.parse_args(
+ [
+ "--validator", "Draft4Validator",
+ "--instance", self.instance_file,
+ self.schema_file,
+ ]
+ )
+ self.assertIs(arguments["validator"], Draft4Validator)
+
+
+class TestCLI(TestCase):
+ def test_draft3_schema_draft4_validator(self):
+ stdout, stderr = NativeIO(), NativeIO()
+ with self.assertRaises(SchemaError):
+ cli.run(
+ {
+ "validator": Draft4Validator,
+ "schema": {
+ "anyOf": [
+ {"minimum": 20},
+ {"type": "string"},
+ {"required": True},
+ ],
+ },
+ "instances": [1],
+ "error_format": "{error.message}",
+ },
+ stdout=stdout,
+ stderr=stderr,
+ )
+
+ def test_successful_validation(self):
+ stdout, stderr = NativeIO(), NativeIO()
+ exit_code = cli.run(
+ {
+ "validator": fake_validator(),
+ "schema": {},
+ "instances": [1],
+ "error_format": "{error.message}",
+ },
+ stdout=stdout,
+ stderr=stderr,
+ )
+ self.assertFalse(stdout.getvalue())
+ self.assertFalse(stderr.getvalue())
+ self.assertEqual(exit_code, 0)
+
+ def test_unsuccessful_validation(self):
+ error = ValidationError("I am an error!", instance=1)
+ stdout, stderr = NativeIO(), NativeIO()
+ exit_code = cli.run(
+ {
+ "validator": fake_validator([error]),
+ "schema": {},
+ "instances": [1],
+ "error_format": "{error.instance} - {error.message}",
+ },
+ stdout=stdout,
+ stderr=stderr,
+ )
+ self.assertFalse(stdout.getvalue())
+ self.assertEqual(stderr.getvalue(), "1 - I am an error!")
+ self.assertEqual(exit_code, 1)
+
+ def test_unsuccessful_validation_multiple_instances(self):
+ first_errors = [
+ ValidationError("9", instance=1),
+ ValidationError("8", instance=1),
+ ]
+ second_errors = [ValidationError("7", instance=2)]
+ stdout, stderr = NativeIO(), NativeIO()
+ exit_code = cli.run(
+ {
+ "validator": fake_validator(first_errors, second_errors),
+ "schema": {},
+ "instances": [1, 2],
+ "error_format": "{error.instance} - {error.message}\t",
+ },
+ stdout=stdout,
+ stderr=stderr,
+ )
+ self.assertFalse(stdout.getvalue())
+ self.assertEqual(stderr.getvalue(), "1 - 9\t1 - 8\t2 - 7\t")
+ self.assertEqual(exit_code, 1)
+
+ def test_version(self):
+ version = subprocess.check_output(
+ [sys.executable, "-m", "jsonschema", "--version"],
+ stderr=subprocess.STDOUT,
+ )
+ version = version.decode("utf-8").strip()
+ self.assertEqual(version, __version__)
diff --git a/third_party/python/jsonschema/jsonschema/tests/test_exceptions.py b/third_party/python/jsonschema/jsonschema/tests/test_exceptions.py
new file mode 100644
index 0000000000..eae00d76d7
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/tests/test_exceptions.py
@@ -0,0 +1,462 @@
+from unittest import TestCase
+import textwrap
+
+from jsonschema import Draft4Validator, exceptions
+from jsonschema.compat import PY3
+
+
+class TestBestMatch(TestCase):
+ def best_match(self, errors):
+ errors = list(errors)
+ best = exceptions.best_match(errors)
+ reversed_best = exceptions.best_match(reversed(errors))
+ msg = "Didn't return a consistent best match!\nGot: {0}\n\nThen: {1}"
+ self.assertEqual(
+ best._contents(), reversed_best._contents(),
+ msg=msg.format(best, reversed_best),
+ )
+ return best
+
+ def test_shallower_errors_are_better_matches(self):
+ validator = Draft4Validator(
+ {
+ "properties": {
+ "foo": {
+ "minProperties": 2,
+ "properties": {"bar": {"type": "object"}},
+ },
+ },
+ },
+ )
+ best = self.best_match(validator.iter_errors({"foo": {"bar": []}}))
+ self.assertEqual(best.validator, "minProperties")
+
+ def test_oneOf_and_anyOf_are_weak_matches(self):
+ """
+ A property you *must* match is probably better than one you have to
+ match a part of.
+ """
+
+ validator = Draft4Validator(
+ {
+ "minProperties": 2,
+ "anyOf": [{"type": "string"}, {"type": "number"}],
+ "oneOf": [{"type": "string"}, {"type": "number"}],
+ }
+ )
+ best = self.best_match(validator.iter_errors({}))
+ self.assertEqual(best.validator, "minProperties")
+
+ def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self):
+ """
+ If the most relevant error is an anyOf, then we traverse its context
+ and select the otherwise *least* relevant error, since in this case
+ that means the most specific, deep, error inside the instance.
+
+ I.e. since only one of the schemas must match, we look for the most
+ relevant one.
+ """
+
+ validator = Draft4Validator(
+ {
+ "properties": {
+ "foo": {
+ "anyOf": [
+ {"type": "string"},
+ {"properties": {"bar": {"type": "array"}}},
+ ],
+ },
+ },
+ },
+ )
+ best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
+ self.assertEqual(best.validator_value, "array")
+
+ def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self):
+ """
+ If the most relevant error is an oneOf, then we traverse its context
+ and select the otherwise *least* relevant error, since in this case
+ that means the most specific, deep, error inside the instance.
+
+ I.e. since only one of the schemas must match, we look for the most
+ relevant one.
+ """
+
+ validator = Draft4Validator(
+ {
+ "properties": {
+ "foo": {
+ "oneOf": [
+ {"type": "string"},
+ {"properties": {"bar": {"type": "array"}}},
+ ],
+ },
+ },
+ },
+ )
+ best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
+ self.assertEqual(best.validator_value, "array")
+
+ def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self):
+ """
+ Now, if the error is allOf, we traverse but select the *most* relevant
+ error from the context, because all schemas here must match anyways.
+ """
+
+ validator = Draft4Validator(
+ {
+ "properties": {
+ "foo": {
+ "allOf": [
+ {"type": "string"},
+ {"properties": {"bar": {"type": "array"}}},
+ ],
+ },
+ },
+ },
+ )
+ best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
+ self.assertEqual(best.validator_value, "string")
+
+ def test_nested_context_for_oneOf(self):
+ validator = Draft4Validator(
+ {
+ "properties": {
+ "foo": {
+ "oneOf": [
+ {"type": "string"},
+ {
+ "oneOf": [
+ {"type": "string"},
+ {
+ "properties": {
+ "bar": {"type": "array"},
+ },
+ },
+ ],
+ },
+ ],
+ },
+ },
+ },
+ )
+ best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
+ self.assertEqual(best.validator_value, "array")
+
+ def test_one_error(self):
+ validator = Draft4Validator({"minProperties": 2})
+ error, = validator.iter_errors({})
+ self.assertEqual(
+ exceptions.best_match(validator.iter_errors({})).validator,
+ "minProperties",
+ )
+
+ def test_no_errors(self):
+ validator = Draft4Validator({})
+ self.assertIsNone(exceptions.best_match(validator.iter_errors({})))
+
+
+class TestByRelevance(TestCase):
+ def test_short_paths_are_better_matches(self):
+ shallow = exceptions.ValidationError("Oh no!", path=["baz"])
+ deep = exceptions.ValidationError("Oh yes!", path=["foo", "bar"])
+ match = max([shallow, deep], key=exceptions.relevance)
+ self.assertIs(match, shallow)
+
+ match = max([deep, shallow], key=exceptions.relevance)
+ self.assertIs(match, shallow)
+
+ def test_global_errors_are_even_better_matches(self):
+ shallow = exceptions.ValidationError("Oh no!", path=[])
+ deep = exceptions.ValidationError("Oh yes!", path=["foo"])
+
+ errors = sorted([shallow, deep], key=exceptions.relevance)
+ self.assertEqual(
+ [list(error.path) for error in errors],
+ [["foo"], []],
+ )
+
+ errors = sorted([deep, shallow], key=exceptions.relevance)
+ self.assertEqual(
+ [list(error.path) for error in errors],
+ [["foo"], []],
+ )
+
+ def test_weak_validators_are_lower_priority(self):
+ weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
+ normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
+
+ best_match = exceptions.by_relevance(weak="a")
+
+ match = max([weak, normal], key=best_match)
+ self.assertIs(match, normal)
+
+ match = max([normal, weak], key=best_match)
+ self.assertIs(match, normal)
+
+ def test_strong_validators_are_higher_priority(self):
+ weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
+ normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
+ strong = exceptions.ValidationError("Oh fine!", path=[], validator="c")
+
+ best_match = exceptions.by_relevance(weak="a", strong="c")
+
+ match = max([weak, normal, strong], key=best_match)
+ self.assertIs(match, strong)
+
+ match = max([strong, normal, weak], key=best_match)
+ self.assertIs(match, strong)
+
+
+class TestErrorTree(TestCase):
+ def test_it_knows_how_many_total_errors_it_contains(self):
+ # FIXME: https://github.com/Julian/jsonschema/issues/442
+ errors = [
+ exceptions.ValidationError("Something", validator=i)
+ for i in range(8)
+ ]
+ tree = exceptions.ErrorTree(errors)
+ self.assertEqual(tree.total_errors, 8)
+
+ def test_it_contains_an_item_if_the_item_had_an_error(self):
+ errors = [exceptions.ValidationError("a message", path=["bar"])]
+ tree = exceptions.ErrorTree(errors)
+ self.assertIn("bar", tree)
+
+ def test_it_does_not_contain_an_item_if_the_item_had_no_error(self):
+ errors = [exceptions.ValidationError("a message", path=["bar"])]
+ tree = exceptions.ErrorTree(errors)
+ self.assertNotIn("foo", tree)
+
+ def test_validators_that_failed_appear_in_errors_dict(self):
+ error = exceptions.ValidationError("a message", validator="foo")
+ tree = exceptions.ErrorTree([error])
+ self.assertEqual(tree.errors, {"foo": error})
+
+ def test_it_creates_a_child_tree_for_each_nested_path(self):
+ errors = [
+ exceptions.ValidationError("a bar message", path=["bar"]),
+ exceptions.ValidationError("a bar -> 0 message", path=["bar", 0]),
+ ]
+ tree = exceptions.ErrorTree(errors)
+ self.assertIn(0, tree["bar"])
+ self.assertNotIn(1, tree["bar"])
+
+ def test_children_have_their_errors_dicts_built(self):
+ e1, e2 = (
+ exceptions.ValidationError("1", validator="foo", path=["bar", 0]),
+ exceptions.ValidationError("2", validator="quux", path=["bar", 0]),
+ )
+ tree = exceptions.ErrorTree([e1, e2])
+ self.assertEqual(tree["bar"][0].errors, {"foo": e1, "quux": e2})
+
+ def test_multiple_errors_with_instance(self):
+ e1, e2 = (
+ exceptions.ValidationError(
+ "1",
+ validator="foo",
+ path=["bar", "bar2"],
+ instance="i1"),
+ exceptions.ValidationError(
+ "2",
+ validator="quux",
+ path=["foobar", 2],
+ instance="i2"),
+ )
+ exceptions.ErrorTree([e1, e2])
+
+ def test_it_does_not_contain_subtrees_that_are_not_in_the_instance(self):
+ error = exceptions.ValidationError("123", validator="foo", instance=[])
+ tree = exceptions.ErrorTree([error])
+
+ with self.assertRaises(IndexError):
+ tree[0]
+
+ def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self):
+ """
+ If a validator is dumb (like :validator:`required` in draft 3) and
+ refers to a path that isn't in the instance, the tree still properly
+ returns a subtree for that path.
+ """
+
+ error = exceptions.ValidationError(
+ "a message", validator="foo", instance={}, path=["foo"],
+ )
+ tree = exceptions.ErrorTree([error])
+ self.assertIsInstance(tree["foo"], exceptions.ErrorTree)
+
+
+class TestErrorInitReprStr(TestCase):
+ def make_error(self, **kwargs):
+ defaults = dict(
+ message=u"hello",
+ validator=u"type",
+ validator_value=u"string",
+ instance=5,
+ schema={u"type": u"string"},
+ )
+ defaults.update(kwargs)
+ return exceptions.ValidationError(**defaults)
+
+ def assertShows(self, expected, **kwargs):
+ if PY3: # pragma: no cover
+ expected = expected.replace("u'", "'")
+ expected = textwrap.dedent(expected).rstrip("\n")
+
+ error = self.make_error(**kwargs)
+ message_line, _, rest = str(error).partition("\n")
+ self.assertEqual(message_line, error.message)
+ self.assertEqual(rest, expected)
+
+ def test_it_calls_super_and_sets_args(self):
+ error = self.make_error()
+ self.assertGreater(len(error.args), 1)
+
+ def test_repr(self):
+ self.assertEqual(
+ repr(exceptions.ValidationError(message="Hello!")),
+ "<ValidationError: %r>" % "Hello!",
+ )
+
+ def test_unset_error(self):
+ error = exceptions.ValidationError("message")
+ self.assertEqual(str(error), "message")
+
+ kwargs = {
+ "validator": "type",
+ "validator_value": "string",
+ "instance": 5,
+ "schema": {"type": "string"},
+ }
+ # Just the message should show if any of the attributes are unset
+ for attr in kwargs:
+ k = dict(kwargs)
+ del k[attr]
+ error = exceptions.ValidationError("message", **k)
+ self.assertEqual(str(error), "message")
+
+ def test_empty_paths(self):
+ self.assertShows(
+ """
+ Failed validating u'type' in schema:
+ {u'type': u'string'}
+
+ On instance:
+ 5
+ """,
+ path=[],
+ schema_path=[],
+ )
+
+ def test_one_item_paths(self):
+ self.assertShows(
+ """
+ Failed validating u'type' in schema:
+ {u'type': u'string'}
+
+ On instance[0]:
+ 5
+ """,
+ path=[0],
+ schema_path=["items"],
+ )
+
+ def test_multiple_item_paths(self):
+ self.assertShows(
+ """
+ Failed validating u'type' in schema[u'items'][0]:
+ {u'type': u'string'}
+
+ On instance[0][u'a']:
+ 5
+ """,
+ path=[0, u"a"],
+ schema_path=[u"items", 0, 1],
+ )
+
+ def test_uses_pprint(self):
+ self.assertShows(
+ """
+ Failed validating u'maxLength' in schema:
+ {0: 0,
+ 1: 1,
+ 2: 2,
+ 3: 3,
+ 4: 4,
+ 5: 5,
+ 6: 6,
+ 7: 7,
+ 8: 8,
+ 9: 9,
+ 10: 10,
+ 11: 11,
+ 12: 12,
+ 13: 13,
+ 14: 14,
+ 15: 15,
+ 16: 16,
+ 17: 17,
+ 18: 18,
+ 19: 19}
+
+ On instance:
+ [0,
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24]
+ """,
+ instance=list(range(25)),
+ schema=dict(zip(range(20), range(20))),
+ validator=u"maxLength",
+ )
+
+ def test_str_works_with_instances_having_overriden_eq_operator(self):
+ """
+ Check for https://github.com/Julian/jsonschema/issues/164 which
+ rendered exceptions unusable when a `ValidationError` involved
+ instances with an `__eq__` method that returned truthy values.
+ """
+
+ class DontEQMeBro(object):
+ def __eq__(this, other): # pragma: no cover
+ self.fail("Don't!")
+
+ def __ne__(this, other): # pragma: no cover
+ self.fail("Don't!")
+
+ instance = DontEQMeBro()
+ error = exceptions.ValidationError(
+ "a message",
+ validator="foo",
+ instance=instance,
+ validator_value="some",
+ schema="schema",
+ )
+ self.assertIn(repr(instance), str(error))
+
+
+class TestHashable(TestCase):
+ def test_hashable(self):
+ set([exceptions.ValidationError("")])
+ set([exceptions.SchemaError("")])
diff --git a/third_party/python/jsonschema/jsonschema/tests/test_format.py b/third_party/python/jsonschema/jsonschema/tests/test_format.py
new file mode 100644
index 0000000000..254985f615
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/tests/test_format.py
@@ -0,0 +1,89 @@
+"""
+Tests for the parts of jsonschema related to the :validator:`format` property.
+"""
+
+from unittest import TestCase
+
+from jsonschema import FormatError, ValidationError, FormatChecker
+from jsonschema.validators import Draft4Validator
+
+
+BOOM = ValueError("Boom!")
+BANG = ZeroDivisionError("Bang!")
+
+
+def boom(thing):
+ if thing == "bang":
+ raise BANG
+ raise BOOM
+
+
+class TestFormatChecker(TestCase):
+ def test_it_can_validate_no_formats(self):
+ checker = FormatChecker(formats=())
+ self.assertFalse(checker.checkers)
+
+ def test_it_raises_a_key_error_for_unknown_formats(self):
+ with self.assertRaises(KeyError):
+ FormatChecker(formats=["o noes"])
+
+ def test_it_can_register_cls_checkers(self):
+ original = dict(FormatChecker.checkers)
+ self.addCleanup(FormatChecker.checkers.pop, "boom")
+ FormatChecker.cls_checks("boom")(boom)
+ self.assertEqual(
+ FormatChecker.checkers,
+ dict(original, boom=(boom, ())),
+ )
+
+ def test_it_can_register_checkers(self):
+ checker = FormatChecker()
+ checker.checks("boom")(boom)
+ self.assertEqual(
+ checker.checkers,
+ dict(FormatChecker.checkers, boom=(boom, ()))
+ )
+
+ def test_it_catches_registered_errors(self):
+ checker = FormatChecker()
+ checker.checks("boom", raises=type(BOOM))(boom)
+
+ with self.assertRaises(FormatError) as cm:
+ checker.check(instance=12, format="boom")
+
+ self.assertIs(cm.exception.cause, BOOM)
+ self.assertIs(cm.exception.__cause__, BOOM)
+
+ # Unregistered errors should not be caught
+ with self.assertRaises(type(BANG)):
+ checker.check(instance="bang", format="boom")
+
+ def test_format_error_causes_become_validation_error_causes(self):
+ checker = FormatChecker()
+ checker.checks("boom", raises=ValueError)(boom)
+ validator = Draft4Validator({"format": "boom"}, format_checker=checker)
+
+ with self.assertRaises(ValidationError) as cm:
+ validator.validate("BOOM")
+
+ self.assertIs(cm.exception.cause, BOOM)
+ self.assertIs(cm.exception.__cause__, BOOM)
+
+ def test_format_checkers_come_with_defaults(self):
+ # This is bad :/ but relied upon.
+ # The docs for quite awhile recommended people do things like
+ # validate(..., format_checker=FormatChecker())
+ # We should change that, but we can't without deprecation...
+ checker = FormatChecker()
+ with self.assertRaises(FormatError):
+ checker.check(instance="not-an-ipv4", format="ipv4")
+
+ def test_repr(self):
+ checker = FormatChecker(formats=())
+ checker.checks("foo")(lambda thing: True)
+ checker.checks("bar")(lambda thing: True)
+ checker.checks("baz")(lambda thing: True)
+ self.assertEqual(
+ repr(checker),
+ "<FormatChecker checkers=['bar', 'baz', 'foo']>",
+ )
diff --git a/third_party/python/jsonschema/jsonschema/tests/test_jsonschema_test_suite.py b/third_party/python/jsonschema/jsonschema/tests/test_jsonschema_test_suite.py
new file mode 100644
index 0000000000..ebccf29735
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/tests/test_jsonschema_test_suite.py
@@ -0,0 +1,277 @@
+"""
+Test runner for the JSON Schema official test suite
+
+Tests comprehensive correctness of each draft's validator.
+
+See https://github.com/json-schema-org/JSON-Schema-Test-Suite for details.
+"""
+
+import sys
+import warnings
+
+from jsonschema import (
+ Draft3Validator,
+ Draft4Validator,
+ Draft6Validator,
+ Draft7Validator,
+ draft3_format_checker,
+ draft4_format_checker,
+ draft6_format_checker,
+ draft7_format_checker,
+)
+from jsonschema.tests._helpers import bug
+from jsonschema.tests._suite import Suite
+from jsonschema.validators import _DEPRECATED_DEFAULT_TYPES, create
+
+
+SUITE = Suite()
+DRAFT3 = SUITE.version(name="draft3")
+DRAFT4 = SUITE.version(name="draft4")
+DRAFT6 = SUITE.version(name="draft6")
+DRAFT7 = SUITE.version(name="draft7")
+
+
+def skip(message, **kwargs):
+ def skipper(test):
+ if all(value == getattr(test, attr) for attr, value in kwargs.items()):
+ return message
+ return skipper
+
+
+def missing_format(checker):
+ def missing_format(test):
+ schema = test.schema
+ if schema is True or schema is False or "format" not in schema:
+ return
+
+ if schema["format"] not in checker.checkers:
+ return "Format checker {0!r} not found.".format(schema["format"])
+ return missing_format
+
+
+is_narrow_build = sys.maxunicode == 2 ** 16 - 1
+if is_narrow_build: # pragma: no cover
+ message = "Not running surrogate Unicode case, this Python is narrow."
+
+ def narrow_unicode_build(test): # pragma: no cover
+ return skip(
+ message=message,
+ description="one supplementary Unicode code point is not long enough",
+ )(test) or skip(
+ message=message,
+ description="two supplementary Unicode code points is long enough",
+ )(test)
+else:
+ def narrow_unicode_build(test): # pragma: no cover
+ return
+
+
+TestDraft3 = DRAFT3.to_unittest_testcase(
+ DRAFT3.tests(),
+ DRAFT3.optional_tests_of(name="bignum"),
+ DRAFT3.optional_tests_of(name="format"),
+ DRAFT3.optional_tests_of(name="zeroTerminatedFloats"),
+ Validator=Draft3Validator,
+ format_checker=draft3_format_checker,
+ skip=lambda test: (
+ narrow_unicode_build(test)
+ or missing_format(draft3_format_checker)(test)
+ or skip(
+ message="Upstream bug in strict_rfc3339",
+ subject="format",
+ description="case-insensitive T and Z",
+ )(test)
+ ),
+)
+
+
+TestDraft4 = DRAFT4.to_unittest_testcase(
+ DRAFT4.tests(),
+ DRAFT4.optional_tests_of(name="bignum"),
+ DRAFT4.optional_tests_of(name="format"),
+ DRAFT4.optional_tests_of(name="zeroTerminatedFloats"),
+ Validator=Draft4Validator,
+ format_checker=draft4_format_checker,
+ skip=lambda test: (
+ narrow_unicode_build(test)
+ or missing_format(draft4_format_checker)(test)
+ or skip(
+ message=bug(),
+ subject="ref",
+ case_description="Recursive references between schemas",
+ )(test)
+ or skip(
+ message=bug(371),
+ subject="ref",
+ case_description="Location-independent identifier",
+ )(test)
+ or skip(
+ message=bug(371),
+ subject="ref",
+ case_description=(
+ "Location-independent identifier with absolute URI"
+ ),
+ )(test)
+ or skip(
+ message=bug(371),
+ subject="ref",
+ case_description=(
+ "Location-independent identifier with base URI change in subschema"
+ ),
+ )(test)
+ or skip(
+ message=bug(),
+ subject="refRemote",
+ case_description="base URI change - change folder in subschema",
+ )(test)
+ or skip(
+ message="Upstream bug in strict_rfc3339",
+ subject="format",
+ description="case-insensitive T and Z",
+ )(test)
+ ),
+)
+
+
+TestDraft6 = DRAFT6.to_unittest_testcase(
+ DRAFT6.tests(),
+ DRAFT6.optional_tests_of(name="bignum"),
+ DRAFT6.optional_tests_of(name="format"),
+ DRAFT6.optional_tests_of(name="zeroTerminatedFloats"),
+ Validator=Draft6Validator,
+ format_checker=draft6_format_checker,
+ skip=lambda test: (
+ narrow_unicode_build(test)
+ or missing_format(draft6_format_checker)(test)
+ or skip(
+ message=bug(),
+ subject="ref",
+ case_description="Recursive references between schemas",
+ )(test)
+ or skip(
+ message=bug(371),
+ subject="ref",
+ case_description="Location-independent identifier",
+ )(test)
+ or skip(
+ message=bug(371),
+ subject="ref",
+ case_description=(
+ "Location-independent identifier with absolute URI"
+ ),
+ )(test)
+ or skip(
+ message=bug(371),
+ subject="ref",
+ case_description=(
+ "Location-independent identifier with base URI change in subschema"
+ ),
+ )(test)
+ or skip(
+ message=bug(),
+ subject="refRemote",
+ case_description="base URI change - change folder in subschema",
+ )(test)
+ or skip(
+ message="Upstream bug in strict_rfc3339",
+ subject="format",
+ description="case-insensitive T and Z",
+ )(test)
+ ),
+)
+
+
+TestDraft7 = DRAFT7.to_unittest_testcase(
+ DRAFT7.tests(),
+ DRAFT7.format_tests(),
+ DRAFT7.optional_tests_of(name="bignum"),
+ DRAFT7.optional_tests_of(name="content"),
+ DRAFT7.optional_tests_of(name="zeroTerminatedFloats"),
+ Validator=Draft7Validator,
+ format_checker=draft7_format_checker,
+ skip=lambda test: (
+ narrow_unicode_build(test)
+ or missing_format(draft7_format_checker)(test)
+ or skip(
+ message=bug(),
+ subject="ref",
+ case_description="Recursive references between schemas",
+ )(test)
+ or skip(
+ message=bug(371),
+ subject="ref",
+ case_description="Location-independent identifier",
+ )(test)
+ or skip(
+ message=bug(371),
+ subject="ref",
+ case_description=(
+ "Location-independent identifier with absolute URI"
+ ),
+ )(test)
+ or skip(
+ message=bug(371),
+ subject="ref",
+ case_description=(
+ "Location-independent identifier with base URI change in subschema"
+ ),
+ )(test)
+ or skip(
+ message=bug(),
+ subject="refRemote",
+ case_description="base URI change - change folder in subschema",
+ )(test)
+ or skip(
+ message="Upstream bug in strict_rfc3339",
+ subject="date-time",
+ description="case-insensitive T and Z",
+ )(test)
+ or skip(
+ message=bug(593),
+ subject="content",
+ case_description=(
+ "validation of string-encoded content based on media type"
+ ),
+ )(test)
+ or skip(
+ message=bug(593),
+ subject="content",
+ case_description="validation of binary string-encoding",
+ )(test)
+ or skip(
+ message=bug(593),
+ subject="content",
+ case_description=(
+ "validation of binary-encoded media type documents"
+ ),
+ )(test)
+ ),
+)
+
+
+with warnings.catch_warnings():
+ warnings.simplefilter("ignore", DeprecationWarning)
+
+ TestDraft3LegacyTypeCheck = DRAFT3.to_unittest_testcase(
+ # Interestingly the any part couldn't really be done w/the old API.
+ (
+ (test for test in each if test.schema != {"type": "any"})
+ for each in DRAFT3.tests_of(name="type")
+ ),
+ name="TestDraft3LegacyTypeCheck",
+ Validator=create(
+ meta_schema=Draft3Validator.META_SCHEMA,
+ validators=Draft3Validator.VALIDATORS,
+ default_types=_DEPRECATED_DEFAULT_TYPES,
+ ),
+ )
+
+ TestDraft4LegacyTypeCheck = DRAFT4.to_unittest_testcase(
+ DRAFT4.tests_of(name="type"),
+ name="TestDraft4LegacyTypeCheck",
+ Validator=create(
+ meta_schema=Draft4Validator.META_SCHEMA,
+ validators=Draft4Validator.VALIDATORS,
+ default_types=_DEPRECATED_DEFAULT_TYPES,
+ ),
+ )
diff --git a/third_party/python/jsonschema/jsonschema/tests/test_types.py b/third_party/python/jsonschema/jsonschema/tests/test_types.py
new file mode 100644
index 0000000000..2280cc395b
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/tests/test_types.py
@@ -0,0 +1,190 @@
+"""
+Tests on the new type interface. The actual correctness of the type checking
+is handled in test_jsonschema_test_suite; these tests check that TypeChecker
+functions correctly and can facilitate extensions to type checking
+"""
+from collections import namedtuple
+from unittest import TestCase
+
+from jsonschema import ValidationError, _validators
+from jsonschema._types import TypeChecker
+from jsonschema.exceptions import UndefinedTypeCheck
+from jsonschema.validators import Draft4Validator, extend
+
+
+def equals_2(checker, instance):
+ return instance == 2
+
+
+def is_namedtuple(instance):
+ return isinstance(instance, tuple) and getattr(instance, "_fields", None)
+
+
+def is_object_or_named_tuple(checker, instance):
+ if Draft4Validator.TYPE_CHECKER.is_type(instance, "object"):
+ return True
+ return is_namedtuple(instance)
+
+
+def coerce_named_tuple(fn):
+ def coerced(validator, value, instance, schema):
+ if is_namedtuple(instance):
+ instance = instance._asdict()
+ return fn(validator, value, instance, schema)
+ return coerced
+
+
+required = coerce_named_tuple(_validators.required)
+properties = coerce_named_tuple(_validators.properties)
+
+
+class TestTypeChecker(TestCase):
+ def test_is_type(self):
+ checker = TypeChecker({"two": equals_2})
+ self.assertEqual(
+ (
+ checker.is_type(instance=2, type="two"),
+ checker.is_type(instance="bar", type="two"),
+ ),
+ (True, False),
+ )
+
+ def test_is_unknown_type(self):
+ with self.assertRaises(UndefinedTypeCheck) as context:
+ TypeChecker().is_type(4, "foobar")
+ self.assertIn("foobar", str(context.exception))
+
+ def test_checks_can_be_added_at_init(self):
+ checker = TypeChecker({"two": equals_2})
+ self.assertEqual(checker, TypeChecker().redefine("two", equals_2))
+
+ def test_redefine_existing_type(self):
+ self.assertEqual(
+ TypeChecker().redefine("two", object()).redefine("two", equals_2),
+ TypeChecker().redefine("two", equals_2),
+ )
+
+ def test_remove(self):
+ self.assertEqual(
+ TypeChecker({"two": equals_2}).remove("two"),
+ TypeChecker(),
+ )
+
+ def test_remove_unknown_type(self):
+ with self.assertRaises(UndefinedTypeCheck) as context:
+ TypeChecker().remove("foobar")
+ self.assertIn("foobar", str(context.exception))
+
+ def test_redefine_many(self):
+ self.assertEqual(
+ TypeChecker().redefine_many({"foo": int, "bar": str}),
+ TypeChecker().redefine("foo", int).redefine("bar", str),
+ )
+
+ def test_remove_multiple(self):
+ self.assertEqual(
+ TypeChecker({"foo": int, "bar": str}).remove("foo", "bar"),
+ TypeChecker(),
+ )
+
+ def test_type_check_can_raise_key_error(self):
+ """
+ Make sure no one writes:
+
+ try:
+ self._type_checkers[type](...)
+ except KeyError:
+
+ ignoring the fact that the function itself can raise that.
+ """
+
+ error = KeyError("Stuff")
+
+ def raises_keyerror(checker, instance):
+ raise error
+
+ with self.assertRaises(KeyError) as context:
+ TypeChecker({"foo": raises_keyerror}).is_type(4, "foo")
+
+ self.assertIs(context.exception, error)
+
+
+class TestCustomTypes(TestCase):
+ def test_simple_type_can_be_extended(self):
+ def int_or_str_int(checker, instance):
+ if not isinstance(instance, (int, str)):
+ return False
+ try:
+ int(instance)
+ except ValueError:
+ return False
+ return True
+
+ CustomValidator = extend(
+ Draft4Validator,
+ type_checker=Draft4Validator.TYPE_CHECKER.redefine(
+ "integer", int_or_str_int,
+ ),
+ )
+ validator = CustomValidator({"type": "integer"})
+
+ validator.validate(4)
+ validator.validate("4")
+
+ with self.assertRaises(ValidationError):
+ validator.validate(4.4)
+
+ def test_object_can_be_extended(self):
+ schema = {"type": "object"}
+
+ Point = namedtuple("Point", ["x", "y"])
+
+ type_checker = Draft4Validator.TYPE_CHECKER.redefine(
+ u"object", is_object_or_named_tuple,
+ )
+
+ CustomValidator = extend(Draft4Validator, type_checker=type_checker)
+ validator = CustomValidator(schema)
+
+ validator.validate(Point(x=4, y=5))
+
+ def test_object_extensions_require_custom_validators(self):
+ schema = {"type": "object", "required": ["x"]}
+
+ type_checker = Draft4Validator.TYPE_CHECKER.redefine(
+ u"object", is_object_or_named_tuple,
+ )
+
+ CustomValidator = extend(Draft4Validator, type_checker=type_checker)
+ validator = CustomValidator(schema)
+
+ Point = namedtuple("Point", ["x", "y"])
+ # Cannot handle required
+ with self.assertRaises(ValidationError):
+ validator.validate(Point(x=4, y=5))
+
+ def test_object_extensions_can_handle_custom_validators(self):
+ schema = {
+ "type": "object",
+ "required": ["x"],
+ "properties": {"x": {"type": "integer"}},
+ }
+
+ type_checker = Draft4Validator.TYPE_CHECKER.redefine(
+ u"object", is_object_or_named_tuple,
+ )
+
+ CustomValidator = extend(
+ Draft4Validator,
+ type_checker=type_checker,
+ validators={"required": required, "properties": properties},
+ )
+
+ validator = CustomValidator(schema)
+
+ Point = namedtuple("Point", ["x", "y"])
+ # Can now process required and properties
+ validator.validate(Point(x=4, y=5))
+
+ with self.assertRaises(ValidationError):
+ validator.validate(Point(x="not an integer", y=5))
diff --git a/third_party/python/jsonschema/jsonschema/tests/test_validators.py b/third_party/python/jsonschema/jsonschema/tests/test_validators.py
new file mode 100644
index 0000000000..07be4f08bc
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/tests/test_validators.py
@@ -0,0 +1,1762 @@
+from collections import deque
+from contextlib import contextmanager
+from decimal import Decimal
+from io import BytesIO
+from unittest import TestCase
+import json
+import os
+import sys
+import tempfile
+import unittest
+
+from twisted.trial.unittest import SynchronousTestCase
+import attr
+
+from jsonschema import FormatChecker, TypeChecker, exceptions, validators
+from jsonschema.compat import PY3, pathname2url
+from jsonschema.tests._helpers import bug
+
+
+def startswith(validator, startswith, instance, schema):
+ if not instance.startswith(startswith):
+ yield exceptions.ValidationError(u"Whoops!")
+
+
+class TestCreateAndExtend(SynchronousTestCase):
+ def setUp(self):
+ self.addCleanup(
+ self.assertEqual,
+ validators.meta_schemas,
+ dict(validators.meta_schemas),
+ )
+
+ self.meta_schema = {u"$id": "some://meta/schema"}
+ self.validators = {u"startswith": startswith}
+ self.type_checker = TypeChecker()
+ self.Validator = validators.create(
+ meta_schema=self.meta_schema,
+ validators=self.validators,
+ type_checker=self.type_checker,
+ )
+
+ def test_attrs(self):
+ self.assertEqual(
+ (
+ self.Validator.VALIDATORS,
+ self.Validator.META_SCHEMA,
+ self.Validator.TYPE_CHECKER,
+ ), (
+ self.validators,
+ self.meta_schema,
+ self.type_checker,
+ ),
+ )
+
+ def test_init(self):
+ schema = {u"startswith": u"foo"}
+ self.assertEqual(self.Validator(schema).schema, schema)
+
+ def test_iter_errors(self):
+ schema = {u"startswith": u"hel"}
+ iter_errors = self.Validator(schema).iter_errors
+
+ errors = list(iter_errors(u"hello"))
+ self.assertEqual(errors, [])
+
+ expected_error = exceptions.ValidationError(
+ u"Whoops!",
+ instance=u"goodbye",
+ schema=schema,
+ validator=u"startswith",
+ validator_value=u"hel",
+ schema_path=deque([u"startswith"]),
+ )
+
+ errors = list(iter_errors(u"goodbye"))
+ self.assertEqual(len(errors), 1)
+ self.assertEqual(errors[0]._contents(), expected_error._contents())
+
+ def test_if_a_version_is_provided_it_is_registered(self):
+ Validator = validators.create(
+ meta_schema={u"$id": "something"},
+ version="my version",
+ )
+ self.addCleanup(validators.meta_schemas.pop, "something")
+ self.assertEqual(Validator.__name__, "MyVersionValidator")
+
+ def test_if_a_version_is_not_provided_it_is_not_registered(self):
+ original = dict(validators.meta_schemas)
+ validators.create(meta_schema={u"id": "id"})
+ self.assertEqual(validators.meta_schemas, original)
+
+ def test_validates_registers_meta_schema_id(self):
+ meta_schema_key = "meta schema id"
+ my_meta_schema = {u"id": meta_schema_key}
+
+ validators.create(
+ meta_schema=my_meta_schema,
+ version="my version",
+ id_of=lambda s: s.get("id", ""),
+ )
+ self.addCleanup(validators.meta_schemas.pop, meta_schema_key)
+
+ self.assertIn(meta_schema_key, validators.meta_schemas)
+
+ def test_validates_registers_meta_schema_draft6_id(self):
+ meta_schema_key = "meta schema $id"
+ my_meta_schema = {u"$id": meta_schema_key}
+
+ validators.create(
+ meta_schema=my_meta_schema,
+ version="my version",
+ )
+ self.addCleanup(validators.meta_schemas.pop, meta_schema_key)
+
+ self.assertIn(meta_schema_key, validators.meta_schemas)
+
+ def test_create_default_types(self):
+ Validator = validators.create(meta_schema={}, validators=())
+ self.assertTrue(
+ all(
+ Validator({}).is_type(instance=instance, type=type)
+ for type, instance in [
+ (u"array", []),
+ (u"boolean", True),
+ (u"integer", 12),
+ (u"null", None),
+ (u"number", 12.0),
+ (u"object", {}),
+ (u"string", u"foo"),
+ ]
+ ),
+ )
+
+ def test_extend(self):
+ original = dict(self.Validator.VALIDATORS)
+ new = object()
+
+ Extended = validators.extend(
+ self.Validator,
+ validators={u"new": new},
+ )
+ self.assertEqual(
+ (
+ Extended.VALIDATORS,
+ Extended.META_SCHEMA,
+ Extended.TYPE_CHECKER,
+ self.Validator.VALIDATORS,
+ ), (
+ dict(original, new=new),
+ self.Validator.META_SCHEMA,
+ self.Validator.TYPE_CHECKER,
+ original,
+ ),
+ )
+
+ def test_extend_idof(self):
+ """
+ Extending a validator preserves its notion of schema IDs.
+ """
+ def id_of(schema):
+ return schema.get(u"__test__", self.Validator.ID_OF(schema))
+ correct_id = "the://correct/id/"
+ meta_schema = {
+ u"$id": "the://wrong/id/",
+ u"__test__": correct_id,
+ }
+ Original = validators.create(
+ meta_schema=meta_schema,
+ validators=self.validators,
+ type_checker=self.type_checker,
+ id_of=id_of,
+ )
+ self.assertEqual(Original.ID_OF(Original.META_SCHEMA), correct_id)
+
+ Derived = validators.extend(Original)
+ self.assertEqual(Derived.ID_OF(Derived.META_SCHEMA), correct_id)
+
+
+class TestLegacyTypeChecking(SynchronousTestCase):
+ def test_create_default_types(self):
+ Validator = validators.create(meta_schema={}, validators=())
+ self.assertEqual(
+ set(Validator.DEFAULT_TYPES), {
+ u"array",
+ u"boolean",
+ u"integer",
+ u"null",
+ u"number",
+ u"object", u"string",
+ },
+ )
+ self.flushWarnings()
+
+ def test_extend(self):
+ Validator = validators.create(meta_schema={}, validators=())
+ original = dict(Validator.VALIDATORS)
+ new = object()
+
+ Extended = validators.extend(
+ Validator,
+ validators={u"new": new},
+ )
+ self.assertEqual(
+ (
+ Extended.VALIDATORS,
+ Extended.META_SCHEMA,
+ Extended.TYPE_CHECKER,
+ Validator.VALIDATORS,
+
+ Extended.DEFAULT_TYPES,
+ Extended({}).DEFAULT_TYPES,
+ self.flushWarnings()[0]["message"],
+ ), (
+ dict(original, new=new),
+ Validator.META_SCHEMA,
+ Validator.TYPE_CHECKER,
+ original,
+
+ Validator.DEFAULT_TYPES,
+ Validator.DEFAULT_TYPES,
+ self.flushWarnings()[0]["message"],
+ ),
+ )
+
+ def test_types_redefines_the_validators_type_checker(self):
+ schema = {"type": "string"}
+ self.assertFalse(validators.Draft7Validator(schema).is_valid(12))
+
+ validator = validators.Draft7Validator(
+ schema,
+ types={"string": (str, int)},
+ )
+ self.assertTrue(validator.is_valid(12))
+ self.flushWarnings()
+
+ def test_providing_default_types_warns(self):
+ self.assertWarns(
+ category=DeprecationWarning,
+ message=(
+ "The default_types argument is deprecated. "
+ "Use the type_checker argument instead."
+ ),
+ # https://tm.tl/9363 :'(
+ filename=sys.modules[self.assertWarns.__module__].__file__,
+
+ f=validators.create,
+ meta_schema={},
+ validators={},
+ default_types={"foo": object},
+ )
+
+ def test_cannot_ask_for_default_types_with_non_default_type_checker(self):
+ """
+ We raise an error when you ask a validator with non-default
+ type checker for its DEFAULT_TYPES.
+
+ The type checker argument is new, so no one but this library
+ itself should be trying to use it, and doing so while then
+ asking for DEFAULT_TYPES makes no sense (not to mention is
+ deprecated), since type checkers are not strictly about Python
+ type.
+ """
+ Validator = validators.create(
+ meta_schema={},
+ validators={},
+ type_checker=TypeChecker(),
+ )
+ with self.assertRaises(validators._DontDoThat) as e:
+ Validator.DEFAULT_TYPES
+
+ self.assertIn(
+ "DEFAULT_TYPES cannot be used on Validators using TypeCheckers",
+ str(e.exception),
+ )
+ with self.assertRaises(validators._DontDoThat):
+ Validator({}).DEFAULT_TYPES
+
+ self.assertFalse(self.flushWarnings())
+
+ def test_providing_explicit_type_checker_does_not_warn(self):
+ Validator = validators.create(
+ meta_schema={},
+ validators={},
+ type_checker=TypeChecker(),
+ )
+ self.assertFalse(self.flushWarnings())
+
+ Validator({})
+ self.assertFalse(self.flushWarnings())
+
+ def test_providing_neither_does_not_warn(self):
+ Validator = validators.create(meta_schema={}, validators={})
+ self.assertFalse(self.flushWarnings())
+
+ Validator({})
+ self.assertFalse(self.flushWarnings())
+
+ def test_providing_default_types_with_type_checker_errors(self):
+ with self.assertRaises(TypeError) as e:
+ validators.create(
+ meta_schema={},
+ validators={},
+ default_types={"foo": object},
+ type_checker=TypeChecker(),
+ )
+
+ self.assertIn(
+ "Do not specify default_types when providing a type checker",
+ str(e.exception),
+ )
+ self.assertFalse(self.flushWarnings())
+
+ def test_extending_a_legacy_validator_with_a_type_checker_errors(self):
+ Validator = validators.create(
+ meta_schema={},
+ validators={},
+ default_types={u"array": list}
+ )
+ with self.assertRaises(TypeError) as e:
+ validators.extend(
+ Validator,
+ validators={},
+ type_checker=TypeChecker(),
+ )
+
+ self.assertIn(
+ (
+ "Cannot extend a validator created with default_types "
+ "with a type_checker. Update the validator to use a "
+ "type_checker when created."
+ ),
+ str(e.exception),
+ )
+ self.flushWarnings()
+
+ def test_extending_a_legacy_validator_does_not_rewarn(self):
+ Validator = validators.create(meta_schema={}, default_types={})
+ self.assertTrue(self.flushWarnings())
+
+ validators.extend(Validator)
+ self.assertFalse(self.flushWarnings())
+
+ def test_accessing_default_types_warns(self):
+ Validator = validators.create(meta_schema={}, validators={})
+ self.assertFalse(self.flushWarnings())
+
+ self.assertWarns(
+ DeprecationWarning,
+ (
+ "The DEFAULT_TYPES attribute is deprecated. "
+ "See the type checker attached to this validator instead."
+ ),
+ # https://tm.tl/9363 :'(
+ sys.modules[self.assertWarns.__module__].__file__,
+
+ getattr,
+ Validator,
+ "DEFAULT_TYPES",
+ )
+
+ def test_accessing_default_types_on_the_instance_warns(self):
+ Validator = validators.create(meta_schema={}, validators={})
+ self.assertFalse(self.flushWarnings())
+
+ self.assertWarns(
+ DeprecationWarning,
+ (
+ "The DEFAULT_TYPES attribute is deprecated. "
+ "See the type checker attached to this validator instead."
+ ),
+ # https://tm.tl/9363 :'(
+ sys.modules[self.assertWarns.__module__].__file__,
+
+ getattr,
+ Validator({}),
+ "DEFAULT_TYPES",
+ )
+
+ def test_providing_types_to_init_warns(self):
+ Validator = validators.create(meta_schema={}, validators={})
+ self.assertFalse(self.flushWarnings())
+
+ self.assertWarns(
+ category=DeprecationWarning,
+ message=(
+ "The types argument is deprecated. "
+ "Provide a type_checker to jsonschema.validators.extend "
+ "instead."
+ ),
+ # https://tm.tl/9363 :'(
+ filename=sys.modules[self.assertWarns.__module__].__file__,
+
+ f=Validator,
+ schema={},
+ types={"bar": object},
+ )
+
+
+class TestIterErrors(TestCase):
+ def setUp(self):
+ self.validator = validators.Draft3Validator({})
+
+ def test_iter_errors(self):
+ instance = [1, 2]
+ schema = {
+ u"disallow": u"array",
+ u"enum": [["a", "b", "c"], ["d", "e", "f"]],
+ u"minItems": 3,
+ }
+
+ got = (e.message for e in self.validator.iter_errors(instance, schema))
+ expected = [
+ "%r is disallowed for [1, 2]" % (schema["disallow"],),
+ "[1, 2] is too short",
+ "[1, 2] is not one of %r" % (schema["enum"],),
+ ]
+ self.assertEqual(sorted(got), sorted(expected))
+
+ def test_iter_errors_multiple_failures_one_validator(self):
+ instance = {"foo": 2, "bar": [1], "baz": 15, "quux": "spam"}
+ schema = {
+ u"properties": {
+ "foo": {u"type": "string"},
+ "bar": {u"minItems": 2},
+ "baz": {u"maximum": 10, u"enum": [2, 4, 6, 8]},
+ },
+ }
+
+ errors = list(self.validator.iter_errors(instance, schema))
+ self.assertEqual(len(errors), 4)
+
+
+class TestValidationErrorMessages(TestCase):
+ def message_for(self, instance, schema, *args, **kwargs):
+ kwargs.setdefault("cls", validators.Draft3Validator)
+ with self.assertRaises(exceptions.ValidationError) as e:
+ validators.validate(instance, schema, *args, **kwargs)
+ return e.exception.message
+
+ def test_single_type_failure(self):
+ message = self.message_for(instance=1, schema={u"type": u"string"})
+ self.assertEqual(message, "1 is not of type %r" % u"string")
+
+ def test_single_type_list_failure(self):
+ message = self.message_for(instance=1, schema={u"type": [u"string"]})
+ self.assertEqual(message, "1 is not of type %r" % u"string")
+
+ def test_multiple_type_failure(self):
+ types = u"string", u"object"
+ message = self.message_for(instance=1, schema={u"type": list(types)})
+ self.assertEqual(message, "1 is not of type %r, %r" % types)
+
+ def test_object_without_title_type_failure(self):
+ type = {u"type": [{u"minimum": 3}]}
+ message = self.message_for(instance=1, schema={u"type": [type]})
+ self.assertEqual(message, "1 is less than the minimum of 3")
+
+ def test_object_with_named_type_failure(self):
+ schema = {u"type": [{u"name": "Foo", u"minimum": 3}]}
+ message = self.message_for(instance=1, schema=schema)
+ self.assertEqual(message, "1 is less than the minimum of 3")
+
+ def test_minimum(self):
+ message = self.message_for(instance=1, schema={"minimum": 2})
+ self.assertEqual(message, "1 is less than the minimum of 2")
+
+ def test_maximum(self):
+ message = self.message_for(instance=1, schema={"maximum": 0})
+ self.assertEqual(message, "1 is greater than the maximum of 0")
+
+ def test_dependencies_single_element(self):
+ depend, on = "bar", "foo"
+ schema = {u"dependencies": {depend: on}}
+ message = self.message_for(
+ instance={"bar": 2},
+ schema=schema,
+ cls=validators.Draft3Validator,
+ )
+ self.assertEqual(message, "%r is a dependency of %r" % (on, depend))
+
+ def test_dependencies_list_draft3(self):
+ depend, on = "bar", "foo"
+ schema = {u"dependencies": {depend: [on]}}
+ message = self.message_for(
+ instance={"bar": 2},
+ schema=schema,
+ cls=validators.Draft3Validator,
+ )
+ self.assertEqual(message, "%r is a dependency of %r" % (on, depend))
+
+ def test_dependencies_list_draft7(self):
+ depend, on = "bar", "foo"
+ schema = {u"dependencies": {depend: [on]}}
+ message = self.message_for(
+ instance={"bar": 2},
+ schema=schema,
+ cls=validators.Draft7Validator,
+ )
+ self.assertEqual(message, "%r is a dependency of %r" % (on, depend))
+
+ def test_additionalItems_single_failure(self):
+ message = self.message_for(
+ instance=[2],
+ schema={u"items": [], u"additionalItems": False},
+ )
+ self.assertIn("(2 was unexpected)", message)
+
+ def test_additionalItems_multiple_failures(self):
+ message = self.message_for(
+ instance=[1, 2, 3],
+ schema={u"items": [], u"additionalItems": False}
+ )
+ self.assertIn("(1, 2, 3 were unexpected)", message)
+
+ def test_additionalProperties_single_failure(self):
+ additional = "foo"
+ schema = {u"additionalProperties": False}
+ message = self.message_for(instance={additional: 2}, schema=schema)
+ self.assertIn("(%r was unexpected)" % (additional,), message)
+
+ def test_additionalProperties_multiple_failures(self):
+ schema = {u"additionalProperties": False}
+ message = self.message_for(
+ instance=dict.fromkeys(["foo", "bar"]),
+ schema=schema,
+ )
+
+ self.assertIn(repr("foo"), message)
+ self.assertIn(repr("bar"), message)
+ self.assertIn("were unexpected)", message)
+
+ def test_const(self):
+ schema = {u"const": 12}
+ message = self.message_for(
+ instance={"foo": "bar"},
+ schema=schema,
+ cls=validators.Draft6Validator,
+ )
+ self.assertIn("12 was expected", message)
+
+ def test_contains(self):
+ schema = {u"contains": {u"const": 12}}
+ message = self.message_for(
+ instance=[2, {}, []],
+ schema=schema,
+ cls=validators.Draft6Validator,
+ )
+ self.assertIn(
+ "None of [2, {}, []] are valid under the given schema",
+ message,
+ )
+
+ def test_invalid_format_default_message(self):
+ checker = FormatChecker(formats=())
+ checker.checks(u"thing")(lambda value: False)
+
+ schema = {u"format": u"thing"}
+ message = self.message_for(
+ instance="bla",
+ schema=schema,
+ format_checker=checker,
+ )
+
+ self.assertIn(repr("bla"), message)
+ self.assertIn(repr("thing"), message)
+ self.assertIn("is not a", message)
+
+ def test_additionalProperties_false_patternProperties(self):
+ schema = {u"type": u"object",
+ u"additionalProperties": False,
+ u"patternProperties": {
+ u"^abc$": {u"type": u"string"},
+ u"^def$": {u"type": u"string"},
+ }}
+ message = self.message_for(
+ instance={u"zebra": 123},
+ schema=schema,
+ cls=validators.Draft4Validator,
+ )
+ self.assertEqual(
+ message,
+ "{} does not match any of the regexes: {}, {}".format(
+ repr(u"zebra"), repr(u"^abc$"), repr(u"^def$"),
+ ),
+ )
+ message = self.message_for(
+ instance={u"zebra": 123, u"fish": 456},
+ schema=schema,
+ cls=validators.Draft4Validator,
+ )
+ self.assertEqual(
+ message,
+ "{}, {} do not match any of the regexes: {}, {}".format(
+ repr(u"fish"), repr(u"zebra"), repr(u"^abc$"), repr(u"^def$")
+ ),
+ )
+
+ def test_False_schema(self):
+ message = self.message_for(
+ instance="something",
+ schema=False,
+ cls=validators.Draft7Validator,
+ )
+ self.assertIn("False schema does not allow 'something'", message)
+
+
+class TestValidationErrorDetails(TestCase):
+ # TODO: These really need unit tests for each individual validator, rather
+ # than just these higher level tests.
+ def test_anyOf(self):
+ instance = 5
+ schema = {
+ "anyOf": [
+ {"minimum": 20},
+ {"type": "string"},
+ ],
+ }
+
+ validator = validators.Draft4Validator(schema)
+ errors = list(validator.iter_errors(instance))
+ self.assertEqual(len(errors), 1)
+ e = errors[0]
+
+ self.assertEqual(e.validator, "anyOf")
+ self.assertEqual(e.validator_value, schema["anyOf"])
+ self.assertEqual(e.instance, instance)
+ self.assertEqual(e.schema, schema)
+ self.assertIsNone(e.parent)
+
+ self.assertEqual(e.path, deque([]))
+ self.assertEqual(e.relative_path, deque([]))
+ self.assertEqual(e.absolute_path, deque([]))
+
+ self.assertEqual(e.schema_path, deque(["anyOf"]))
+ self.assertEqual(e.relative_schema_path, deque(["anyOf"]))
+ self.assertEqual(e.absolute_schema_path, deque(["anyOf"]))
+
+ self.assertEqual(len(e.context), 2)
+
+ e1, e2 = sorted_errors(e.context)
+
+ self.assertEqual(e1.validator, "minimum")
+ self.assertEqual(e1.validator_value, schema["anyOf"][0]["minimum"])
+ self.assertEqual(e1.instance, instance)
+ self.assertEqual(e1.schema, schema["anyOf"][0])
+ self.assertIs(e1.parent, e)
+
+ self.assertEqual(e1.path, deque([]))
+ self.assertEqual(e1.absolute_path, deque([]))
+ self.assertEqual(e1.relative_path, deque([]))
+
+ self.assertEqual(e1.schema_path, deque([0, "minimum"]))
+ self.assertEqual(e1.relative_schema_path, deque([0, "minimum"]))
+ self.assertEqual(
+ e1.absolute_schema_path, deque(["anyOf", 0, "minimum"]),
+ )
+
+ self.assertFalse(e1.context)
+
+ self.assertEqual(e2.validator, "type")
+ self.assertEqual(e2.validator_value, schema["anyOf"][1]["type"])
+ self.assertEqual(e2.instance, instance)
+ self.assertEqual(e2.schema, schema["anyOf"][1])
+ self.assertIs(e2.parent, e)
+
+ self.assertEqual(e2.path, deque([]))
+ self.assertEqual(e2.relative_path, deque([]))
+ self.assertEqual(e2.absolute_path, deque([]))
+
+ self.assertEqual(e2.schema_path, deque([1, "type"]))
+ self.assertEqual(e2.relative_schema_path, deque([1, "type"]))
+ self.assertEqual(e2.absolute_schema_path, deque(["anyOf", 1, "type"]))
+
+ self.assertEqual(len(e2.context), 0)
+
+ def test_type(self):
+ instance = {"foo": 1}
+ schema = {
+ "type": [
+ {"type": "integer"},
+ {
+ "type": "object",
+ "properties": {"foo": {"enum": [2]}},
+ },
+ ],
+ }
+
+ validator = validators.Draft3Validator(schema)
+ errors = list(validator.iter_errors(instance))
+ self.assertEqual(len(errors), 1)
+ e = errors[0]
+
+ self.assertEqual(e.validator, "type")
+ self.assertEqual(e.validator_value, schema["type"])
+ self.assertEqual(e.instance, instance)
+ self.assertEqual(e.schema, schema)
+ self.assertIsNone(e.parent)
+
+ self.assertEqual(e.path, deque([]))
+ self.assertEqual(e.relative_path, deque([]))
+ self.assertEqual(e.absolute_path, deque([]))
+
+ self.assertEqual(e.schema_path, deque(["type"]))
+ self.assertEqual(e.relative_schema_path, deque(["type"]))
+ self.assertEqual(e.absolute_schema_path, deque(["type"]))
+
+ self.assertEqual(len(e.context), 2)
+
+ e1, e2 = sorted_errors(e.context)
+
+ self.assertEqual(e1.validator, "type")
+ self.assertEqual(e1.validator_value, schema["type"][0]["type"])
+ self.assertEqual(e1.instance, instance)
+ self.assertEqual(e1.schema, schema["type"][0])
+ self.assertIs(e1.parent, e)
+
+ self.assertEqual(e1.path, deque([]))
+ self.assertEqual(e1.relative_path, deque([]))
+ self.assertEqual(e1.absolute_path, deque([]))
+
+ self.assertEqual(e1.schema_path, deque([0, "type"]))
+ self.assertEqual(e1.relative_schema_path, deque([0, "type"]))
+ self.assertEqual(e1.absolute_schema_path, deque(["type", 0, "type"]))
+
+ self.assertFalse(e1.context)
+
+ self.assertEqual(e2.validator, "enum")
+ self.assertEqual(e2.validator_value, [2])
+ self.assertEqual(e2.instance, 1)
+ self.assertEqual(e2.schema, {u"enum": [2]})
+ self.assertIs(e2.parent, e)
+
+ self.assertEqual(e2.path, deque(["foo"]))
+ self.assertEqual(e2.relative_path, deque(["foo"]))
+ self.assertEqual(e2.absolute_path, deque(["foo"]))
+
+ self.assertEqual(
+ e2.schema_path, deque([1, "properties", "foo", "enum"]),
+ )
+ self.assertEqual(
+ e2.relative_schema_path, deque([1, "properties", "foo", "enum"]),
+ )
+ self.assertEqual(
+ e2.absolute_schema_path,
+ deque(["type", 1, "properties", "foo", "enum"]),
+ )
+
+ self.assertFalse(e2.context)
+
+ def test_single_nesting(self):
+ instance = {"foo": 2, "bar": [1], "baz": 15, "quux": "spam"}
+ schema = {
+ "properties": {
+ "foo": {"type": "string"},
+ "bar": {"minItems": 2},
+ "baz": {"maximum": 10, "enum": [2, 4, 6, 8]},
+ },
+ }
+
+ validator = validators.Draft3Validator(schema)
+ errors = validator.iter_errors(instance)
+ e1, e2, e3, e4 = sorted_errors(errors)
+
+ self.assertEqual(e1.path, deque(["bar"]))
+ self.assertEqual(e2.path, deque(["baz"]))
+ self.assertEqual(e3.path, deque(["baz"]))
+ self.assertEqual(e4.path, deque(["foo"]))
+
+ self.assertEqual(e1.relative_path, deque(["bar"]))
+ self.assertEqual(e2.relative_path, deque(["baz"]))
+ self.assertEqual(e3.relative_path, deque(["baz"]))
+ self.assertEqual(e4.relative_path, deque(["foo"]))
+
+ self.assertEqual(e1.absolute_path, deque(["bar"]))
+ self.assertEqual(e2.absolute_path, deque(["baz"]))
+ self.assertEqual(e3.absolute_path, deque(["baz"]))
+ self.assertEqual(e4.absolute_path, deque(["foo"]))
+
+ self.assertEqual(e1.validator, "minItems")
+ self.assertEqual(e2.validator, "enum")
+ self.assertEqual(e3.validator, "maximum")
+ self.assertEqual(e4.validator, "type")
+
+ def test_multiple_nesting(self):
+ instance = [1, {"foo": 2, "bar": {"baz": [1]}}, "quux"]
+ schema = {
+ "type": "string",
+ "items": {
+ "type": ["string", "object"],
+ "properties": {
+ "foo": {"enum": [1, 3]},
+ "bar": {
+ "type": "array",
+ "properties": {
+ "bar": {"required": True},
+ "baz": {"minItems": 2},
+ },
+ },
+ },
+ },
+ }
+
+ validator = validators.Draft3Validator(schema)
+ errors = validator.iter_errors(instance)
+ e1, e2, e3, e4, e5, e6 = sorted_errors(errors)
+
+ self.assertEqual(e1.path, deque([]))
+ self.assertEqual(e2.path, deque([0]))
+ self.assertEqual(e3.path, deque([1, "bar"]))
+ self.assertEqual(e4.path, deque([1, "bar", "bar"]))
+ self.assertEqual(e5.path, deque([1, "bar", "baz"]))
+ self.assertEqual(e6.path, deque([1, "foo"]))
+
+ self.assertEqual(e1.schema_path, deque(["type"]))
+ self.assertEqual(e2.schema_path, deque(["items", "type"]))
+ self.assertEqual(
+ list(e3.schema_path), ["items", "properties", "bar", "type"],
+ )
+ self.assertEqual(
+ list(e4.schema_path),
+ ["items", "properties", "bar", "properties", "bar", "required"],
+ )
+ self.assertEqual(
+ list(e5.schema_path),
+ ["items", "properties", "bar", "properties", "baz", "minItems"]
+ )
+ self.assertEqual(
+ list(e6.schema_path), ["items", "properties", "foo", "enum"],
+ )
+
+ self.assertEqual(e1.validator, "type")
+ self.assertEqual(e2.validator, "type")
+ self.assertEqual(e3.validator, "type")
+ self.assertEqual(e4.validator, "required")
+ self.assertEqual(e5.validator, "minItems")
+ self.assertEqual(e6.validator, "enum")
+
+ def test_recursive(self):
+ schema = {
+ "definitions": {
+ "node": {
+ "anyOf": [{
+ "type": "object",
+ "required": ["name", "children"],
+ "properties": {
+ "name": {
+ "type": "string",
+ },
+ "children": {
+ "type": "object",
+ "patternProperties": {
+ "^.*$": {
+ "$ref": "#/definitions/node",
+ },
+ },
+ },
+ },
+ }],
+ },
+ },
+ "type": "object",
+ "required": ["root"],
+ "properties": {"root": {"$ref": "#/definitions/node"}},
+ }
+
+ instance = {
+ "root": {
+ "name": "root",
+ "children": {
+ "a": {
+ "name": "a",
+ "children": {
+ "ab": {
+ "name": "ab",
+ # missing "children"
+ },
+ },
+ },
+ },
+ },
+ }
+ validator = validators.Draft4Validator(schema)
+
+ e, = validator.iter_errors(instance)
+ self.assertEqual(e.absolute_path, deque(["root"]))
+ self.assertEqual(
+ e.absolute_schema_path, deque(["properties", "root", "anyOf"]),
+ )
+
+ e1, = e.context
+ self.assertEqual(e1.absolute_path, deque(["root", "children", "a"]))
+ self.assertEqual(
+ e1.absolute_schema_path, deque(
+ [
+ "properties",
+ "root",
+ "anyOf",
+ 0,
+ "properties",
+ "children",
+ "patternProperties",
+ "^.*$",
+ "anyOf",
+ ],
+ ),
+ )
+
+ e2, = e1.context
+ self.assertEqual(
+ e2.absolute_path, deque(
+ ["root", "children", "a", "children", "ab"],
+ ),
+ )
+ self.assertEqual(
+ e2.absolute_schema_path, deque(
+ [
+ "properties",
+ "root",
+ "anyOf",
+ 0,
+ "properties",
+ "children",
+ "patternProperties",
+ "^.*$",
+ "anyOf",
+ 0,
+ "properties",
+ "children",
+ "patternProperties",
+ "^.*$",
+ "anyOf",
+ ],
+ ),
+ )
+
+ def test_additionalProperties(self):
+ instance = {"bar": "bar", "foo": 2}
+ schema = {"additionalProperties": {"type": "integer", "minimum": 5}}
+
+ validator = validators.Draft3Validator(schema)
+ errors = validator.iter_errors(instance)
+ e1, e2 = sorted_errors(errors)
+
+ self.assertEqual(e1.path, deque(["bar"]))
+ self.assertEqual(e2.path, deque(["foo"]))
+
+ self.assertEqual(e1.validator, "type")
+ self.assertEqual(e2.validator, "minimum")
+
+ def test_patternProperties(self):
+ instance = {"bar": 1, "foo": 2}
+ schema = {
+ "patternProperties": {
+ "bar": {"type": "string"},
+ "foo": {"minimum": 5},
+ },
+ }
+
+ validator = validators.Draft3Validator(schema)
+ errors = validator.iter_errors(instance)
+ e1, e2 = sorted_errors(errors)
+
+ self.assertEqual(e1.path, deque(["bar"]))
+ self.assertEqual(e2.path, deque(["foo"]))
+
+ self.assertEqual(e1.validator, "type")
+ self.assertEqual(e2.validator, "minimum")
+
+ def test_additionalItems(self):
+ instance = ["foo", 1]
+ schema = {
+ "items": [],
+ "additionalItems": {"type": "integer", "minimum": 5},
+ }
+
+ validator = validators.Draft3Validator(schema)
+ errors = validator.iter_errors(instance)
+ e1, e2 = sorted_errors(errors)
+
+ self.assertEqual(e1.path, deque([0]))
+ self.assertEqual(e2.path, deque([1]))
+
+ self.assertEqual(e1.validator, "type")
+ self.assertEqual(e2.validator, "minimum")
+
+ def test_additionalItems_with_items(self):
+ instance = ["foo", "bar", 1]
+ schema = {
+ "items": [{}],
+ "additionalItems": {"type": "integer", "minimum": 5},
+ }
+
+ validator = validators.Draft3Validator(schema)
+ errors = validator.iter_errors(instance)
+ e1, e2 = sorted_errors(errors)
+
+ self.assertEqual(e1.path, deque([1]))
+ self.assertEqual(e2.path, deque([2]))
+
+ self.assertEqual(e1.validator, "type")
+ self.assertEqual(e2.validator, "minimum")
+
+ def test_propertyNames(self):
+ instance = {"foo": 12}
+ schema = {"propertyNames": {"not": {"const": "foo"}}}
+
+ validator = validators.Draft7Validator(schema)
+ error, = validator.iter_errors(instance)
+
+ self.assertEqual(error.validator, "not")
+ self.assertEqual(
+ error.message,
+ "%r is not allowed for %r" % ({"const": "foo"}, "foo"),
+ )
+ self.assertEqual(error.path, deque([]))
+ self.assertEqual(error.schema_path, deque(["propertyNames", "not"]))
+
+ def test_if_then(self):
+ schema = {
+ "if": {"const": 12},
+ "then": {"const": 13},
+ }
+
+ validator = validators.Draft7Validator(schema)
+ error, = validator.iter_errors(12)
+
+ self.assertEqual(error.validator, "const")
+ self.assertEqual(error.message, "13 was expected")
+ self.assertEqual(error.path, deque([]))
+ self.assertEqual(error.schema_path, deque(["if", "then", "const"]))
+
+ def test_if_else(self):
+ schema = {
+ "if": {"const": 12},
+ "else": {"const": 13},
+ }
+
+ validator = validators.Draft7Validator(schema)
+ error, = validator.iter_errors(15)
+
+ self.assertEqual(error.validator, "const")
+ self.assertEqual(error.message, "13 was expected")
+ self.assertEqual(error.path, deque([]))
+ self.assertEqual(error.schema_path, deque(["if", "else", "const"]))
+
+ def test_boolean_schema_False(self):
+ validator = validators.Draft7Validator(False)
+ error, = validator.iter_errors(12)
+
+ self.assertEqual(
+ (
+ error.message,
+ error.validator,
+ error.validator_value,
+ error.instance,
+ error.schema,
+ error.schema_path,
+ ),
+ (
+ "False schema does not allow 12",
+ None,
+ None,
+ 12,
+ False,
+ deque([]),
+ ),
+ )
+
+ def test_ref(self):
+ ref, schema = "someRef", {"additionalProperties": {"type": "integer"}}
+ validator = validators.Draft7Validator(
+ {"$ref": ref},
+ resolver=validators.RefResolver("", {}, store={ref: schema}),
+ )
+ error, = validator.iter_errors({"foo": "notAnInteger"})
+
+ self.assertEqual(
+ (
+ error.message,
+ error.validator,
+ error.validator_value,
+ error.instance,
+ error.absolute_path,
+ error.schema,
+ error.schema_path,
+ ),
+ (
+ "'notAnInteger' is not of type 'integer'",
+ "type",
+ "integer",
+ "notAnInteger",
+ deque(["foo"]),
+ {"type": "integer"},
+ deque(["additionalProperties", "type"]),
+ ),
+ )
+
+
+class MetaSchemaTestsMixin(object):
+ # TODO: These all belong upstream
+ def test_invalid_properties(self):
+ with self.assertRaises(exceptions.SchemaError):
+ self.Validator.check_schema({"properties": {"test": object()}})
+
+ def test_minItems_invalid_string(self):
+ with self.assertRaises(exceptions.SchemaError):
+ # needs to be an integer
+ self.Validator.check_schema({"minItems": "1"})
+
+ def test_enum_allows_empty_arrays(self):
+ """
+ Technically, all the spec says is they SHOULD have elements, not MUST.
+
+ See https://github.com/Julian/jsonschema/issues/529.
+ """
+ self.Validator.check_schema({"enum": []})
+
+ def test_enum_allows_non_unique_items(self):
+ """
+ Technically, all the spec says is they SHOULD be unique, not MUST.
+
+ See https://github.com/Julian/jsonschema/issues/529.
+ """
+ self.Validator.check_schema({"enum": [12, 12]})
+
+
+class ValidatorTestMixin(MetaSchemaTestsMixin, object):
+ def test_valid_instances_are_valid(self):
+ schema, instance = self.valid
+ self.assertTrue(self.Validator(schema).is_valid(instance))
+
+ def test_invalid_instances_are_not_valid(self):
+ schema, instance = self.invalid
+ self.assertFalse(self.Validator(schema).is_valid(instance))
+
+ def test_non_existent_properties_are_ignored(self):
+ self.Validator({object(): object()}).validate(instance=object())
+
+ def test_it_creates_a_ref_resolver_if_not_provided(self):
+ self.assertIsInstance(
+ self.Validator({}).resolver,
+ validators.RefResolver,
+ )
+
+ def test_it_delegates_to_a_ref_resolver(self):
+ ref, schema = "someCoolRef", {"type": "integer"}
+ resolver = validators.RefResolver("", {}, store={ref: schema})
+ validator = self.Validator({"$ref": ref}, resolver=resolver)
+
+ with self.assertRaises(exceptions.ValidationError):
+ validator.validate(None)
+
+ def test_it_delegates_to_a_legacy_ref_resolver(self):
+ """
+ Legacy RefResolvers support only the context manager form of
+ resolution.
+ """
+
+ class LegacyRefResolver(object):
+ @contextmanager
+ def resolving(this, ref):
+ self.assertEqual(ref, "the ref")
+ yield {"type": "integer"}
+
+ resolver = LegacyRefResolver()
+ schema = {"$ref": "the ref"}
+
+ with self.assertRaises(exceptions.ValidationError):
+ self.Validator(schema, resolver=resolver).validate(None)
+
+ def test_is_type_is_true_for_valid_type(self):
+ self.assertTrue(self.Validator({}).is_type("foo", "string"))
+
+ def test_is_type_is_false_for_invalid_type(self):
+ self.assertFalse(self.Validator({}).is_type("foo", "array"))
+
+ def test_is_type_evades_bool_inheriting_from_int(self):
+ self.assertFalse(self.Validator({}).is_type(True, "integer"))
+ self.assertFalse(self.Validator({}).is_type(True, "number"))
+
+ @unittest.skipIf(PY3, "In Python 3 json.load always produces unicode")
+ def test_string_a_bytestring_is_a_string(self):
+ self.Validator({"type": "string"}).validate(b"foo")
+
+ def test_patterns_can_be_native_strings(self):
+ """
+ See https://github.com/Julian/jsonschema/issues/611.
+ """
+ self.Validator({"pattern": "foo"}).validate("foo")
+
+ def test_it_can_validate_with_decimals(self):
+ schema = {"items": {"type": "number"}}
+ Validator = validators.extend(
+ self.Validator,
+ type_checker=self.Validator.TYPE_CHECKER.redefine(
+ "number",
+ lambda checker, thing: isinstance(
+ thing, (int, float, Decimal),
+ ) and not isinstance(thing, bool),
+ )
+ )
+
+ validator = Validator(schema)
+ validator.validate([1, 1.1, Decimal(1) / Decimal(8)])
+
+ invalid = ["foo", {}, [], True, None]
+ self.assertEqual(
+ [error.instance for error in validator.iter_errors(invalid)],
+ invalid,
+ )
+
+ def test_it_returns_true_for_formats_it_does_not_know_about(self):
+ validator = self.Validator(
+ {"format": "carrot"}, format_checker=FormatChecker(),
+ )
+ validator.validate("bugs")
+
+ def test_it_does_not_validate_formats_by_default(self):
+ validator = self.Validator({})
+ self.assertIsNone(validator.format_checker)
+
+ def test_it_validates_formats_if_a_checker_is_provided(self):
+ checker = FormatChecker()
+ bad = ValueError("Bad!")
+
+ @checker.checks("foo", raises=ValueError)
+ def check(value):
+ if value == "good":
+ return True
+ elif value == "bad":
+ raise bad
+ else: # pragma: no cover
+ self.fail("What is {}? [Baby Don't Hurt Me]".format(value))
+
+ validator = self.Validator(
+ {"format": "foo"}, format_checker=checker,
+ )
+
+ validator.validate("good")
+ with self.assertRaises(exceptions.ValidationError) as cm:
+ validator.validate("bad")
+
+ # Make sure original cause is attached
+ self.assertIs(cm.exception.cause, bad)
+
+ def test_non_string_custom_type(self):
+ non_string_type = object()
+ schema = {"type": [non_string_type]}
+ Crazy = validators.extend(
+ self.Validator,
+ type_checker=self.Validator.TYPE_CHECKER.redefine(
+ non_string_type,
+ lambda checker, thing: isinstance(thing, int),
+ )
+ )
+ Crazy(schema).validate(15)
+
+ def test_it_properly_formats_tuples_in_errors(self):
+ """
+ A tuple instance properly formats validation errors for uniqueItems.
+
+ See https://github.com/Julian/jsonschema/pull/224
+ """
+ TupleValidator = validators.extend(
+ self.Validator,
+ type_checker=self.Validator.TYPE_CHECKER.redefine(
+ "array",
+ lambda checker, thing: isinstance(thing, tuple),
+ )
+ )
+ with self.assertRaises(exceptions.ValidationError) as e:
+ TupleValidator({"uniqueItems": True}).validate((1, 1))
+ self.assertIn("(1, 1) has non-unique elements", str(e.exception))
+
+
+class AntiDraft6LeakMixin(object):
+ """
+ Make sure functionality from draft 6 doesn't leak backwards in time.
+ """
+
+ def test_True_is_not_a_schema(self):
+ with self.assertRaises(exceptions.SchemaError) as e:
+ self.Validator.check_schema(True)
+ self.assertIn("True is not of type", str(e.exception))
+
+ def test_False_is_not_a_schema(self):
+ with self.assertRaises(exceptions.SchemaError) as e:
+ self.Validator.check_schema(False)
+ self.assertIn("False is not of type", str(e.exception))
+
+ @unittest.skip(bug(523))
+ def test_True_is_not_a_schema_even_if_you_forget_to_check(self):
+ resolver = validators.RefResolver("", {})
+ with self.assertRaises(Exception) as e:
+ self.Validator(True, resolver=resolver).validate(12)
+ self.assertNotIsInstance(e.exception, exceptions.ValidationError)
+
+ @unittest.skip(bug(523))
+ def test_False_is_not_a_schema_even_if_you_forget_to_check(self):
+ resolver = validators.RefResolver("", {})
+ with self.assertRaises(Exception) as e:
+ self.Validator(False, resolver=resolver).validate(12)
+ self.assertNotIsInstance(e.exception, exceptions.ValidationError)
+
+
+class TestDraft3Validator(AntiDraft6LeakMixin, ValidatorTestMixin, TestCase):
+ Validator = validators.Draft3Validator
+ valid = {}, {}
+ invalid = {"type": "integer"}, "foo"
+
+ def test_any_type_is_valid_for_type_any(self):
+ validator = self.Validator({"type": "any"})
+ validator.validate(object())
+
+ def test_any_type_is_redefinable(self):
+ """
+ Sigh, because why not.
+ """
+ Crazy = validators.extend(
+ self.Validator,
+ type_checker=self.Validator.TYPE_CHECKER.redefine(
+ "any", lambda checker, thing: isinstance(thing, int),
+ )
+ )
+ validator = Crazy({"type": "any"})
+ validator.validate(12)
+ with self.assertRaises(exceptions.ValidationError):
+ validator.validate("foo")
+
+ def test_is_type_is_true_for_any_type(self):
+ self.assertTrue(self.Validator({}).is_valid(object(), {"type": "any"}))
+
+ def test_is_type_does_not_evade_bool_if_it_is_being_tested(self):
+ self.assertTrue(self.Validator({}).is_type(True, "boolean"))
+ self.assertTrue(self.Validator({}).is_valid(True, {"type": "any"}))
+
+
+class TestDraft4Validator(AntiDraft6LeakMixin, ValidatorTestMixin, TestCase):
+ Validator = validators.Draft4Validator
+ valid = {}, {}
+ invalid = {"type": "integer"}, "foo"
+
+
+class TestDraft6Validator(ValidatorTestMixin, TestCase):
+ Validator = validators.Draft6Validator
+ valid = {}, {}
+ invalid = {"type": "integer"}, "foo"
+
+
+class TestDraft7Validator(ValidatorTestMixin, TestCase):
+ Validator = validators.Draft7Validator
+ valid = {}, {}
+ invalid = {"type": "integer"}, "foo"
+
+
+class TestValidatorFor(SynchronousTestCase):
+ def test_draft_3(self):
+ schema = {"$schema": "http://json-schema.org/draft-03/schema"}
+ self.assertIs(
+ validators.validator_for(schema),
+ validators.Draft3Validator,
+ )
+
+ schema = {"$schema": "http://json-schema.org/draft-03/schema#"}
+ self.assertIs(
+ validators.validator_for(schema),
+ validators.Draft3Validator,
+ )
+
+ def test_draft_4(self):
+ schema = {"$schema": "http://json-schema.org/draft-04/schema"}
+ self.assertIs(
+ validators.validator_for(schema),
+ validators.Draft4Validator,
+ )
+
+ schema = {"$schema": "http://json-schema.org/draft-04/schema#"}
+ self.assertIs(
+ validators.validator_for(schema),
+ validators.Draft4Validator,
+ )
+
+ def test_draft_6(self):
+ schema = {"$schema": "http://json-schema.org/draft-06/schema"}
+ self.assertIs(
+ validators.validator_for(schema),
+ validators.Draft6Validator,
+ )
+
+ schema = {"$schema": "http://json-schema.org/draft-06/schema#"}
+ self.assertIs(
+ validators.validator_for(schema),
+ validators.Draft6Validator,
+ )
+
+ def test_draft_7(self):
+ schema = {"$schema": "http://json-schema.org/draft-07/schema"}
+ self.assertIs(
+ validators.validator_for(schema),
+ validators.Draft7Validator,
+ )
+
+ schema = {"$schema": "http://json-schema.org/draft-07/schema#"}
+ self.assertIs(
+ validators.validator_for(schema),
+ validators.Draft7Validator,
+ )
+
+ def test_True(self):
+ self.assertIs(
+ validators.validator_for(True),
+ validators._LATEST_VERSION,
+ )
+
+ def test_False(self):
+ self.assertIs(
+ validators.validator_for(False),
+ validators._LATEST_VERSION,
+ )
+
+ def test_custom_validator(self):
+ Validator = validators.create(
+ meta_schema={"id": "meta schema id"},
+ version="12",
+ id_of=lambda s: s.get("id", ""),
+ )
+ schema = {"$schema": "meta schema id"}
+ self.assertIs(
+ validators.validator_for(schema),
+ Validator,
+ )
+
+ def test_custom_validator_draft6(self):
+ Validator = validators.create(
+ meta_schema={"$id": "meta schema $id"},
+ version="13",
+ )
+ schema = {"$schema": "meta schema $id"}
+ self.assertIs(
+ validators.validator_for(schema),
+ Validator,
+ )
+
+ def test_validator_for_jsonschema_default(self):
+ self.assertIs(validators.validator_for({}), validators._LATEST_VERSION)
+
+ def test_validator_for_custom_default(self):
+ self.assertIs(validators.validator_for({}, default=None), None)
+
+ def test_warns_if_meta_schema_specified_was_not_found(self):
+ self.assertWarns(
+ category=DeprecationWarning,
+ message=(
+ "The metaschema specified by $schema was not found. "
+ "Using the latest draft to validate, but this will raise "
+ "an error in the future."
+ ),
+ # https://tm.tl/9363 :'(
+ filename=sys.modules[self.assertWarns.__module__].__file__,
+
+ f=validators.validator_for,
+ schema={u"$schema": "unknownSchema"},
+ default={},
+ )
+
+ def test_does_not_warn_if_meta_schema_is_unspecified(self):
+ validators.validator_for(schema={}, default={}),
+ self.assertFalse(self.flushWarnings())
+
+
+class TestValidate(SynchronousTestCase):
+ def assertUses(self, schema, Validator):
+ result = []
+ self.patch(Validator, "check_schema", result.append)
+ validators.validate({}, schema)
+ self.assertEqual(result, [schema])
+
+ def test_draft3_validator_is_chosen(self):
+ self.assertUses(
+ schema={"$schema": "http://json-schema.org/draft-03/schema#"},
+ Validator=validators.Draft3Validator,
+ )
+ # Make sure it works without the empty fragment
+ self.assertUses(
+ schema={"$schema": "http://json-schema.org/draft-03/schema"},
+ Validator=validators.Draft3Validator,
+ )
+
+ def test_draft4_validator_is_chosen(self):
+ self.assertUses(
+ schema={"$schema": "http://json-schema.org/draft-04/schema#"},
+ Validator=validators.Draft4Validator,
+ )
+ # Make sure it works without the empty fragment
+ self.assertUses(
+ schema={"$schema": "http://json-schema.org/draft-04/schema"},
+ Validator=validators.Draft4Validator,
+ )
+
+ def test_draft6_validator_is_chosen(self):
+ self.assertUses(
+ schema={"$schema": "http://json-schema.org/draft-06/schema#"},
+ Validator=validators.Draft6Validator,
+ )
+ # Make sure it works without the empty fragment
+ self.assertUses(
+ schema={"$schema": "http://json-schema.org/draft-06/schema"},
+ Validator=validators.Draft6Validator,
+ )
+
+ def test_draft7_validator_is_chosen(self):
+ self.assertUses(
+ schema={"$schema": "http://json-schema.org/draft-07/schema#"},
+ Validator=validators.Draft7Validator,
+ )
+ # Make sure it works without the empty fragment
+ self.assertUses(
+ schema={"$schema": "http://json-schema.org/draft-07/schema"},
+ Validator=validators.Draft7Validator,
+ )
+
+ def test_draft7_validator_is_the_default(self):
+ self.assertUses(schema={}, Validator=validators.Draft7Validator)
+
+ def test_validation_error_message(self):
+ with self.assertRaises(exceptions.ValidationError) as e:
+ validators.validate(12, {"type": "string"})
+ self.assertRegexpMatches(
+ str(e.exception),
+ "(?s)Failed validating u?'.*' in schema.*On instance",
+ )
+
+ def test_schema_error_message(self):
+ with self.assertRaises(exceptions.SchemaError) as e:
+ validators.validate(12, {"type": 12})
+ self.assertRegexpMatches(
+ str(e.exception),
+ "(?s)Failed validating u?'.*' in metaschema.*On schema",
+ )
+
+ def test_it_uses_best_match(self):
+ # This is a schema that best_match will recurse into
+ schema = {"oneOf": [{"type": "string"}, {"type": "array"}]}
+ with self.assertRaises(exceptions.ValidationError) as e:
+ validators.validate(12, schema)
+ self.assertIn("12 is not of type", str(e.exception))
+
+
+class TestRefResolver(SynchronousTestCase):
+
+ base_uri = ""
+ stored_uri = "foo://stored"
+ stored_schema = {"stored": "schema"}
+
+ def setUp(self):
+ self.referrer = {}
+ self.store = {self.stored_uri: self.stored_schema}
+ self.resolver = validators.RefResolver(
+ self.base_uri, self.referrer, self.store,
+ )
+
+ def test_it_does_not_retrieve_schema_urls_from_the_network(self):
+ ref = validators.Draft3Validator.META_SCHEMA["id"]
+ self.patch(
+ self.resolver,
+ "resolve_remote",
+ lambda *args, **kwargs: self.fail("Should not have been called!"),
+ )
+ with self.resolver.resolving(ref) as resolved:
+ pass
+ self.assertEqual(resolved, validators.Draft3Validator.META_SCHEMA)
+
+ def test_it_resolves_local_refs(self):
+ ref = "#/properties/foo"
+ self.referrer["properties"] = {"foo": object()}
+ with self.resolver.resolving(ref) as resolved:
+ self.assertEqual(resolved, self.referrer["properties"]["foo"])
+
+ def test_it_resolves_local_refs_with_id(self):
+ schema = {"id": "http://bar/schema#", "a": {"foo": "bar"}}
+ resolver = validators.RefResolver.from_schema(
+ schema,
+ id_of=lambda schema: schema.get(u"id", u""),
+ )
+ with resolver.resolving("#/a") as resolved:
+ self.assertEqual(resolved, schema["a"])
+ with resolver.resolving("http://bar/schema#/a") as resolved:
+ self.assertEqual(resolved, schema["a"])
+
+ def test_it_retrieves_stored_refs(self):
+ with self.resolver.resolving(self.stored_uri) as resolved:
+ self.assertIs(resolved, self.stored_schema)
+
+ self.resolver.store["cached_ref"] = {"foo": 12}
+ with self.resolver.resolving("cached_ref#/foo") as resolved:
+ self.assertEqual(resolved, 12)
+
+ def test_it_retrieves_unstored_refs_via_requests(self):
+ ref = "http://bar#baz"
+ schema = {"baz": 12}
+
+ if "requests" in sys.modules:
+ self.addCleanup(
+ sys.modules.__setitem__, "requests", sys.modules["requests"],
+ )
+ sys.modules["requests"] = ReallyFakeRequests({"http://bar": schema})
+
+ with self.resolver.resolving(ref) as resolved:
+ self.assertEqual(resolved, 12)
+
+ def test_it_retrieves_unstored_refs_via_urlopen(self):
+ ref = "http://bar#baz"
+ schema = {"baz": 12}
+
+ if "requests" in sys.modules:
+ self.addCleanup(
+ sys.modules.__setitem__, "requests", sys.modules["requests"],
+ )
+ sys.modules["requests"] = None
+
+ @contextmanager
+ def fake_urlopen(url):
+ self.assertEqual(url, "http://bar")
+ yield BytesIO(json.dumps(schema).encode("utf8"))
+
+ self.addCleanup(setattr, validators, "urlopen", validators.urlopen)
+ validators.urlopen = fake_urlopen
+
+ with self.resolver.resolving(ref) as resolved:
+ pass
+ self.assertEqual(resolved, 12)
+
+ def test_it_retrieves_local_refs_via_urlopen(self):
+ with tempfile.NamedTemporaryFile(delete=False, mode="wt") as tempf:
+ self.addCleanup(os.remove, tempf.name)
+ json.dump({"foo": "bar"}, tempf)
+
+ ref = "file://{}#foo".format(pathname2url(tempf.name))
+ with self.resolver.resolving(ref) as resolved:
+ self.assertEqual(resolved, "bar")
+
+ def test_it_can_construct_a_base_uri_from_a_schema(self):
+ schema = {"id": "foo"}
+ resolver = validators.RefResolver.from_schema(
+ schema,
+ id_of=lambda schema: schema.get(u"id", u""),
+ )
+ self.assertEqual(resolver.base_uri, "foo")
+ self.assertEqual(resolver.resolution_scope, "foo")
+ with resolver.resolving("") as resolved:
+ self.assertEqual(resolved, schema)
+ with resolver.resolving("#") as resolved:
+ self.assertEqual(resolved, schema)
+ with resolver.resolving("foo") as resolved:
+ self.assertEqual(resolved, schema)
+ with resolver.resolving("foo#") as resolved:
+ self.assertEqual(resolved, schema)
+
+ def test_it_can_construct_a_base_uri_from_a_schema_without_id(self):
+ schema = {}
+ resolver = validators.RefResolver.from_schema(schema)
+ self.assertEqual(resolver.base_uri, "")
+ self.assertEqual(resolver.resolution_scope, "")
+ with resolver.resolving("") as resolved:
+ self.assertEqual(resolved, schema)
+ with resolver.resolving("#") as resolved:
+ self.assertEqual(resolved, schema)
+
+ def test_custom_uri_scheme_handlers(self):
+ def handler(url):
+ self.assertEqual(url, ref)
+ return schema
+
+ schema = {"foo": "bar"}
+ ref = "foo://bar"
+ resolver = validators.RefResolver("", {}, handlers={"foo": handler})
+ with resolver.resolving(ref) as resolved:
+ self.assertEqual(resolved, schema)
+
+ def test_cache_remote_on(self):
+ response = [object()]
+
+ def handler(url):
+ try:
+ return response.pop()
+ except IndexError: # pragma: no cover
+ self.fail("Response must not have been cached!")
+
+ ref = "foo://bar"
+ resolver = validators.RefResolver(
+ "", {}, cache_remote=True, handlers={"foo": handler},
+ )
+ with resolver.resolving(ref):
+ pass
+ with resolver.resolving(ref):
+ pass
+
+ def test_cache_remote_off(self):
+ response = [object()]
+
+ def handler(url):
+ try:
+ return response.pop()
+ except IndexError: # pragma: no cover
+ self.fail("Handler called twice!")
+
+ ref = "foo://bar"
+ resolver = validators.RefResolver(
+ "", {}, cache_remote=False, handlers={"foo": handler},
+ )
+ with resolver.resolving(ref):
+ pass
+
+ def test_if_you_give_it_junk_you_get_a_resolution_error(self):
+ error = ValueError("Oh no! What's this?")
+
+ def handler(url):
+ raise error
+
+ ref = "foo://bar"
+ resolver = validators.RefResolver("", {}, handlers={"foo": handler})
+ with self.assertRaises(exceptions.RefResolutionError) as err:
+ with resolver.resolving(ref):
+ self.fail("Shouldn't get this far!") # pragma: no cover
+ self.assertEqual(err.exception, exceptions.RefResolutionError(error))
+
+ def test_helpful_error_message_on_failed_pop_scope(self):
+ resolver = validators.RefResolver("", {})
+ resolver.pop_scope()
+ with self.assertRaises(exceptions.RefResolutionError) as exc:
+ resolver.pop_scope()
+ self.assertIn("Failed to pop the scope", str(exc.exception))
+
+
+def sorted_errors(errors):
+ def key(error):
+ return (
+ [str(e) for e in error.path],
+ [str(e) for e in error.schema_path],
+ )
+ return sorted(errors, key=key)
+
+
+@attr.s
+class ReallyFakeRequests(object):
+
+ _responses = attr.ib()
+
+ def get(self, url):
+ response = self._responses.get(url)
+ if url is None: # pragma: no cover
+ raise ValueError("Unknown URL: " + repr(url))
+ return _ReallyFakeJSONResponse(json.dumps(response))
+
+
+@attr.s
+class _ReallyFakeJSONResponse(object):
+
+ _response = attr.ib()
+
+ def json(self):
+ return json.loads(self._response)
diff --git a/third_party/python/jsonschema/jsonschema/validators.py b/third_party/python/jsonschema/jsonschema/validators.py
new file mode 100644
index 0000000000..1dc420c70d
--- /dev/null
+++ b/third_party/python/jsonschema/jsonschema/validators.py
@@ -0,0 +1,970 @@
+"""
+Creation and extension of validators, with implementations for existing drafts.
+"""
+from __future__ import division
+
+from warnings import warn
+import contextlib
+import json
+import numbers
+
+from six import add_metaclass
+
+from jsonschema import (
+ _legacy_validators,
+ _types,
+ _utils,
+ _validators,
+ exceptions,
+)
+from jsonschema.compat import (
+ Sequence,
+ int_types,
+ iteritems,
+ lru_cache,
+ str_types,
+ unquote,
+ urldefrag,
+ urljoin,
+ urlopen,
+ urlsplit,
+)
+
+# Sigh. https://gitlab.com/pycqa/flake8/issues/280
+# https://github.com/pyga/ebb-lint/issues/7
+# Imported for backwards compatibility.
+from jsonschema.exceptions import ErrorTree
+ErrorTree
+
+
+class _DontDoThat(Exception):
+ """
+ Raised when a Validators with non-default type checker is misused.
+
+ Asking one for DEFAULT_TYPES doesn't make sense, since type checkers
+ exist for the unrepresentable cases where DEFAULT_TYPES can't
+ represent the type relationship.
+ """
+
+ def __str__(self):
+ return "DEFAULT_TYPES cannot be used on Validators using TypeCheckers"
+
+
+validators = {}
+meta_schemas = _utils.URIDict()
+
+
+def _generate_legacy_type_checks(types=()):
+ """
+ Generate newer-style type checks out of JSON-type-name-to-type mappings.
+
+ Arguments:
+
+ types (dict):
+
+ A mapping of type names to their Python types
+
+ Returns:
+
+ A dictionary of definitions to pass to `TypeChecker`
+ """
+ types = dict(types)
+
+ def gen_type_check(pytypes):
+ pytypes = _utils.flatten(pytypes)
+
+ def type_check(checker, instance):
+ if isinstance(instance, bool):
+ if bool not in pytypes:
+ return False
+ return isinstance(instance, pytypes)
+
+ return type_check
+
+ definitions = {}
+ for typename, pytypes in iteritems(types):
+ definitions[typename] = gen_type_check(pytypes)
+
+ return definitions
+
+
+_DEPRECATED_DEFAULT_TYPES = {
+ u"array": list,
+ u"boolean": bool,
+ u"integer": int_types,
+ u"null": type(None),
+ u"number": numbers.Number,
+ u"object": dict,
+ u"string": str_types,
+}
+_TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES = _types.TypeChecker(
+ type_checkers=_generate_legacy_type_checks(_DEPRECATED_DEFAULT_TYPES),
+)
+
+
+def validates(version):
+ """
+ Register the decorated validator for a ``version`` of the specification.
+
+ Registered validators and their meta schemas will be considered when
+ parsing ``$schema`` properties' URIs.
+
+ Arguments:
+
+ version (str):
+
+ An identifier to use as the version's name
+
+ Returns:
+
+ collections.Callable:
+
+ a class decorator to decorate the validator with the version
+ """
+
+ def _validates(cls):
+ validators[version] = cls
+ meta_schema_id = cls.ID_OF(cls.META_SCHEMA)
+ if meta_schema_id:
+ meta_schemas[meta_schema_id] = cls
+ return cls
+ return _validates
+
+
+def _DEFAULT_TYPES(self):
+ if self._CREATED_WITH_DEFAULT_TYPES is None:
+ raise _DontDoThat()
+
+ warn(
+ (
+ "The DEFAULT_TYPES attribute is deprecated. "
+ "See the type checker attached to this validator instead."
+ ),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self._DEFAULT_TYPES
+
+
+class _DefaultTypesDeprecatingMetaClass(type):
+ DEFAULT_TYPES = property(_DEFAULT_TYPES)
+
+
+def _id_of(schema):
+ if schema is True or schema is False:
+ return u""
+ return schema.get(u"$id", u"")
+
+
+def create(
+ meta_schema,
+ validators=(),
+ version=None,
+ default_types=None,
+ type_checker=None,
+ id_of=_id_of,
+):
+ """
+ Create a new validator class.
+
+ Arguments:
+
+ meta_schema (collections.Mapping):
+
+ the meta schema for the new validator class
+
+ validators (collections.Mapping):
+
+ a mapping from names to callables, where each callable will
+ validate the schema property with the given name.
+
+ Each callable should take 4 arguments:
+
+ 1. a validator instance,
+ 2. the value of the property being validated within the
+ instance
+ 3. the instance
+ 4. the schema
+
+ version (str):
+
+ an identifier for the version that this validator class will
+ validate. If provided, the returned validator class will
+ have its ``__name__`` set to include the version, and also
+ will have `jsonschema.validators.validates` automatically
+ called for the given version.
+
+ type_checker (jsonschema.TypeChecker):
+
+ a type checker, used when applying the :validator:`type` validator.
+
+ If unprovided, a `jsonschema.TypeChecker` will be created
+ with a set of default types typical of JSON Schema drafts.
+
+ default_types (collections.Mapping):
+
+ .. deprecated:: 3.0.0
+
+ Please use the type_checker argument instead.
+
+ If set, it provides mappings of JSON types to Python types
+ that will be converted to functions and redefined in this
+ object's `jsonschema.TypeChecker`.
+
+ id_of (collections.Callable):
+
+ A function that given a schema, returns its ID.
+
+ Returns:
+
+ a new `jsonschema.IValidator` class
+ """
+
+ if default_types is not None:
+ if type_checker is not None:
+ raise TypeError(
+ "Do not specify default_types when providing a type checker.",
+ )
+ _created_with_default_types = True
+ warn(
+ (
+ "The default_types argument is deprecated. "
+ "Use the type_checker argument instead."
+ ),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ type_checker = _types.TypeChecker(
+ type_checkers=_generate_legacy_type_checks(default_types),
+ )
+ else:
+ default_types = _DEPRECATED_DEFAULT_TYPES
+ if type_checker is None:
+ _created_with_default_types = False
+ type_checker = _TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES
+ elif type_checker is _TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES:
+ _created_with_default_types = False
+ else:
+ _created_with_default_types = None
+
+ @add_metaclass(_DefaultTypesDeprecatingMetaClass)
+ class Validator(object):
+
+ VALIDATORS = dict(validators)
+ META_SCHEMA = dict(meta_schema)
+ TYPE_CHECKER = type_checker
+ ID_OF = staticmethod(id_of)
+
+ DEFAULT_TYPES = property(_DEFAULT_TYPES)
+ _DEFAULT_TYPES = dict(default_types)
+ _CREATED_WITH_DEFAULT_TYPES = _created_with_default_types
+
+ def __init__(
+ self,
+ schema,
+ types=(),
+ resolver=None,
+ format_checker=None,
+ ):
+ if types:
+ warn(
+ (
+ "The types argument is deprecated. Provide "
+ "a type_checker to jsonschema.validators.extend "
+ "instead."
+ ),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ self.TYPE_CHECKER = self.TYPE_CHECKER.redefine_many(
+ _generate_legacy_type_checks(types),
+ )
+
+ if resolver is None:
+ resolver = RefResolver.from_schema(schema, id_of=id_of)
+
+ self.resolver = resolver
+ self.format_checker = format_checker
+ self.schema = schema
+
+ @classmethod
+ def check_schema(cls, schema):
+ for error in cls(cls.META_SCHEMA).iter_errors(schema):
+ raise exceptions.SchemaError.create_from(error)
+
+ def iter_errors(self, instance, _schema=None):
+ if _schema is None:
+ _schema = self.schema
+
+ if _schema is True:
+ return
+ elif _schema is False:
+ yield exceptions.ValidationError(
+ "False schema does not allow %r" % (instance,),
+ validator=None,
+ validator_value=None,
+ instance=instance,
+ schema=_schema,
+ )
+ return
+
+ scope = id_of(_schema)
+ if scope:
+ self.resolver.push_scope(scope)
+ try:
+ ref = _schema.get(u"$ref")
+ if ref is not None:
+ validators = [(u"$ref", ref)]
+ else:
+ validators = iteritems(_schema)
+
+ for k, v in validators:
+ validator = self.VALIDATORS.get(k)
+ if validator is None:
+ continue
+
+ errors = validator(self, v, instance, _schema) or ()
+ for error in errors:
+ # set details if not already set by the called fn
+ error._set(
+ validator=k,
+ validator_value=v,
+ instance=instance,
+ schema=_schema,
+ )
+ if k != u"$ref":
+ error.schema_path.appendleft(k)
+ yield error
+ finally:
+ if scope:
+ self.resolver.pop_scope()
+
+ def descend(self, instance, schema, path=None, schema_path=None):
+ for error in self.iter_errors(instance, schema):
+ if path is not None:
+ error.path.appendleft(path)
+ if schema_path is not None:
+ error.schema_path.appendleft(schema_path)
+ yield error
+
+ def validate(self, *args, **kwargs):
+ for error in self.iter_errors(*args, **kwargs):
+ raise error
+
+ def is_type(self, instance, type):
+ try:
+ return self.TYPE_CHECKER.is_type(instance, type)
+ except exceptions.UndefinedTypeCheck:
+ raise exceptions.UnknownType(type, instance, self.schema)
+
+ def is_valid(self, instance, _schema=None):
+ error = next(self.iter_errors(instance, _schema), None)
+ return error is None
+
+ if version is not None:
+ Validator = validates(version)(Validator)
+ Validator.__name__ = version.title().replace(" ", "") + "Validator"
+
+ return Validator
+
+
+def extend(validator, validators=(), version=None, type_checker=None):
+ """
+ Create a new validator class by extending an existing one.
+
+ Arguments:
+
+ validator (jsonschema.IValidator):
+
+ an existing validator class
+
+ validators (collections.Mapping):
+
+ a mapping of new validator callables to extend with, whose
+ structure is as in `create`.
+
+ .. note::
+
+ Any validator callables with the same name as an
+ existing one will (silently) replace the old validator
+ callable entirely, effectively overriding any validation
+ done in the "parent" validator class.
+
+ If you wish to instead extend the behavior of a parent's
+ validator callable, delegate and call it directly in
+ the new validator function by retrieving it using
+ ``OldValidator.VALIDATORS["validator_name"]``.
+
+ version (str):
+
+ a version for the new validator class
+
+ type_checker (jsonschema.TypeChecker):
+
+ a type checker, used when applying the :validator:`type` validator.
+
+ If unprovided, the type checker of the extended
+ `jsonschema.IValidator` will be carried along.`
+
+ Returns:
+
+ a new `jsonschema.IValidator` class extending the one provided
+
+ .. note:: Meta Schemas
+
+ The new validator class will have its parent's meta schema.
+
+ If you wish to change or extend the meta schema in the new
+ validator class, modify ``META_SCHEMA`` directly on the returned
+ class. Note that no implicit copying is done, so a copy should
+ likely be made before modifying it, in order to not affect the
+ old validator.
+ """
+
+ all_validators = dict(validator.VALIDATORS)
+ all_validators.update(validators)
+
+ if type_checker is None:
+ type_checker = validator.TYPE_CHECKER
+ elif validator._CREATED_WITH_DEFAULT_TYPES:
+ raise TypeError(
+ "Cannot extend a validator created with default_types "
+ "with a type_checker. Update the validator to use a "
+ "type_checker when created."
+ )
+ return create(
+ meta_schema=validator.META_SCHEMA,
+ validators=all_validators,
+ version=version,
+ type_checker=type_checker,
+ id_of=validator.ID_OF,
+ )
+
+
+Draft3Validator = create(
+ meta_schema=_utils.load_schema("draft3"),
+ validators={
+ u"$ref": _validators.ref,
+ u"additionalItems": _validators.additionalItems,
+ u"additionalProperties": _validators.additionalProperties,
+ u"dependencies": _legacy_validators.dependencies_draft3,
+ u"disallow": _legacy_validators.disallow_draft3,
+ u"divisibleBy": _validators.multipleOf,
+ u"enum": _validators.enum,
+ u"extends": _legacy_validators.extends_draft3,
+ u"format": _validators.format,
+ u"items": _legacy_validators.items_draft3_draft4,
+ u"maxItems": _validators.maxItems,
+ u"maxLength": _validators.maxLength,
+ u"maximum": _legacy_validators.maximum_draft3_draft4,
+ u"minItems": _validators.minItems,
+ u"minLength": _validators.minLength,
+ u"minimum": _legacy_validators.minimum_draft3_draft4,
+ u"pattern": _validators.pattern,
+ u"patternProperties": _validators.patternProperties,
+ u"properties": _legacy_validators.properties_draft3,
+ u"type": _legacy_validators.type_draft3,
+ u"uniqueItems": _validators.uniqueItems,
+ },
+ type_checker=_types.draft3_type_checker,
+ version="draft3",
+ id_of=lambda schema: schema.get(u"id", ""),
+)
+
+Draft4Validator = create(
+ meta_schema=_utils.load_schema("draft4"),
+ validators={
+ u"$ref": _validators.ref,
+ u"additionalItems": _validators.additionalItems,
+ u"additionalProperties": _validators.additionalProperties,
+ u"allOf": _validators.allOf,
+ u"anyOf": _validators.anyOf,
+ u"dependencies": _validators.dependencies,
+ u"enum": _validators.enum,
+ u"format": _validators.format,
+ u"items": _legacy_validators.items_draft3_draft4,
+ u"maxItems": _validators.maxItems,
+ u"maxLength": _validators.maxLength,
+ u"maxProperties": _validators.maxProperties,
+ u"maximum": _legacy_validators.maximum_draft3_draft4,
+ u"minItems": _validators.minItems,
+ u"minLength": _validators.minLength,
+ u"minProperties": _validators.minProperties,
+ u"minimum": _legacy_validators.minimum_draft3_draft4,
+ u"multipleOf": _validators.multipleOf,
+ u"not": _validators.not_,
+ u"oneOf": _validators.oneOf,
+ u"pattern": _validators.pattern,
+ u"patternProperties": _validators.patternProperties,
+ u"properties": _validators.properties,
+ u"required": _validators.required,
+ u"type": _validators.type,
+ u"uniqueItems": _validators.uniqueItems,
+ },
+ type_checker=_types.draft4_type_checker,
+ version="draft4",
+ id_of=lambda schema: schema.get(u"id", ""),
+)
+
+Draft6Validator = create(
+ meta_schema=_utils.load_schema("draft6"),
+ validators={
+ u"$ref": _validators.ref,
+ u"additionalItems": _validators.additionalItems,
+ u"additionalProperties": _validators.additionalProperties,
+ u"allOf": _validators.allOf,
+ u"anyOf": _validators.anyOf,
+ u"const": _validators.const,
+ u"contains": _validators.contains,
+ u"dependencies": _validators.dependencies,
+ u"enum": _validators.enum,
+ u"exclusiveMaximum": _validators.exclusiveMaximum,
+ u"exclusiveMinimum": _validators.exclusiveMinimum,
+ u"format": _validators.format,
+ u"items": _validators.items,
+ u"maxItems": _validators.maxItems,
+ u"maxLength": _validators.maxLength,
+ u"maxProperties": _validators.maxProperties,
+ u"maximum": _validators.maximum,
+ u"minItems": _validators.minItems,
+ u"minLength": _validators.minLength,
+ u"minProperties": _validators.minProperties,
+ u"minimum": _validators.minimum,
+ u"multipleOf": _validators.multipleOf,
+ u"not": _validators.not_,
+ u"oneOf": _validators.oneOf,
+ u"pattern": _validators.pattern,
+ u"patternProperties": _validators.patternProperties,
+ u"properties": _validators.properties,
+ u"propertyNames": _validators.propertyNames,
+ u"required": _validators.required,
+ u"type": _validators.type,
+ u"uniqueItems": _validators.uniqueItems,
+ },
+ type_checker=_types.draft6_type_checker,
+ version="draft6",
+)
+
+Draft7Validator = create(
+ meta_schema=_utils.load_schema("draft7"),
+ validators={
+ u"$ref": _validators.ref,
+ u"additionalItems": _validators.additionalItems,
+ u"additionalProperties": _validators.additionalProperties,
+ u"allOf": _validators.allOf,
+ u"anyOf": _validators.anyOf,
+ u"const": _validators.const,
+ u"contains": _validators.contains,
+ u"dependencies": _validators.dependencies,
+ u"enum": _validators.enum,
+ u"exclusiveMaximum": _validators.exclusiveMaximum,
+ u"exclusiveMinimum": _validators.exclusiveMinimum,
+ u"format": _validators.format,
+ u"if": _validators.if_,
+ u"items": _validators.items,
+ u"maxItems": _validators.maxItems,
+ u"maxLength": _validators.maxLength,
+ u"maxProperties": _validators.maxProperties,
+ u"maximum": _validators.maximum,
+ u"minItems": _validators.minItems,
+ u"minLength": _validators.minLength,
+ u"minProperties": _validators.minProperties,
+ u"minimum": _validators.minimum,
+ u"multipleOf": _validators.multipleOf,
+ u"oneOf": _validators.oneOf,
+ u"not": _validators.not_,
+ u"pattern": _validators.pattern,
+ u"patternProperties": _validators.patternProperties,
+ u"properties": _validators.properties,
+ u"propertyNames": _validators.propertyNames,
+ u"required": _validators.required,
+ u"type": _validators.type,
+ u"uniqueItems": _validators.uniqueItems,
+ },
+ type_checker=_types.draft7_type_checker,
+ version="draft7",
+)
+
+_LATEST_VERSION = Draft7Validator
+
+
+class RefResolver(object):
+ """
+ Resolve JSON References.
+
+ Arguments:
+
+ base_uri (str):
+
+ The URI of the referring document
+
+ referrer:
+
+ The actual referring document
+
+ store (dict):
+
+ A mapping from URIs to documents to cache
+
+ cache_remote (bool):
+
+ Whether remote refs should be cached after first resolution
+
+ handlers (dict):
+
+ A mapping from URI schemes to functions that should be used
+ to retrieve them
+
+ urljoin_cache (:func:`functools.lru_cache`):
+
+ A cache that will be used for caching the results of joining
+ the resolution scope to subscopes.
+
+ remote_cache (:func:`functools.lru_cache`):
+
+ A cache that will be used for caching the results of
+ resolved remote URLs.
+
+ Attributes:
+
+ cache_remote (bool):
+
+ Whether remote refs should be cached after first resolution
+ """
+
+ def __init__(
+ self,
+ base_uri,
+ referrer,
+ store=(),
+ cache_remote=True,
+ handlers=(),
+ urljoin_cache=None,
+ remote_cache=None,
+ ):
+ if urljoin_cache is None:
+ urljoin_cache = lru_cache(1024)(urljoin)
+ if remote_cache is None:
+ remote_cache = lru_cache(1024)(self.resolve_from_url)
+
+ self.referrer = referrer
+ self.cache_remote = cache_remote
+ self.handlers = dict(handlers)
+
+ self._scopes_stack = [base_uri]
+ self.store = _utils.URIDict(
+ (id, validator.META_SCHEMA)
+ for id, validator in iteritems(meta_schemas)
+ )
+ self.store.update(store)
+ self.store[base_uri] = referrer
+
+ self._urljoin_cache = urljoin_cache
+ self._remote_cache = remote_cache
+
+ @classmethod
+ def from_schema(cls, schema, id_of=_id_of, *args, **kwargs):
+ """
+ Construct a resolver from a JSON schema object.
+
+ Arguments:
+
+ schema:
+
+ the referring schema
+
+ Returns:
+
+ `RefResolver`
+ """
+
+ return cls(base_uri=id_of(schema), referrer=schema, *args, **kwargs)
+
+ def push_scope(self, scope):
+ """
+ Enter a given sub-scope.
+
+ Treats further dereferences as being performed underneath the
+ given scope.
+ """
+ self._scopes_stack.append(
+ self._urljoin_cache(self.resolution_scope, scope),
+ )
+
+ def pop_scope(self):
+ """
+ Exit the most recent entered scope.
+
+ Treats further dereferences as being performed underneath the
+ original scope.
+
+ Don't call this method more times than `push_scope` has been
+ called.
+ """
+ try:
+ self._scopes_stack.pop()
+ except IndexError:
+ raise exceptions.RefResolutionError(
+ "Failed to pop the scope from an empty stack. "
+ "`pop_scope()` should only be called once for every "
+ "`push_scope()`"
+ )
+
+ @property
+ def resolution_scope(self):
+ """
+ Retrieve the current resolution scope.
+ """
+ return self._scopes_stack[-1]
+
+ @property
+ def base_uri(self):
+ """
+ Retrieve the current base URI, not including any fragment.
+ """
+ uri, _ = urldefrag(self.resolution_scope)
+ return uri
+
+ @contextlib.contextmanager
+ def in_scope(self, scope):
+ """
+ Temporarily enter the given scope for the duration of the context.
+ """
+ self.push_scope(scope)
+ try:
+ yield
+ finally:
+ self.pop_scope()
+
+ @contextlib.contextmanager
+ def resolving(self, ref):
+ """
+ Resolve the given ``ref`` and enter its resolution scope.
+
+ Exits the scope on exit of this context manager.
+
+ Arguments:
+
+ ref (str):
+
+ The reference to resolve
+ """
+
+ url, resolved = self.resolve(ref)
+ self.push_scope(url)
+ try:
+ yield resolved
+ finally:
+ self.pop_scope()
+
+ def resolve(self, ref):
+ """
+ Resolve the given reference.
+ """
+ url = self._urljoin_cache(self.resolution_scope, ref)
+ return url, self._remote_cache(url)
+
+ def resolve_from_url(self, url):
+ """
+ Resolve the given remote URL.
+ """
+ url, fragment = urldefrag(url)
+ try:
+ document = self.store[url]
+ except KeyError:
+ try:
+ document = self.resolve_remote(url)
+ except Exception as exc:
+ raise exceptions.RefResolutionError(exc)
+
+ return self.resolve_fragment(document, fragment)
+
+ def resolve_fragment(self, document, fragment):
+ """
+ Resolve a ``fragment`` within the referenced ``document``.
+
+ Arguments:
+
+ document:
+
+ The referent document
+
+ fragment (str):
+
+ a URI fragment to resolve within it
+ """
+
+ fragment = fragment.lstrip(u"/")
+ parts = unquote(fragment).split(u"/") if fragment else []
+
+ for part in parts:
+ part = part.replace(u"~1", u"/").replace(u"~0", u"~")
+
+ if isinstance(document, Sequence):
+ # Array indexes should be turned into integers
+ try:
+ part = int(part)
+ except ValueError:
+ pass
+ try:
+ document = document[part]
+ except (TypeError, LookupError):
+ raise exceptions.RefResolutionError(
+ "Unresolvable JSON pointer: %r" % fragment
+ )
+
+ return document
+
+ def resolve_remote(self, uri):
+ """
+ Resolve a remote ``uri``.
+
+ If called directly, does not check the store first, but after
+ retrieving the document at the specified URI it will be saved in
+ the store if :attr:`cache_remote` is True.
+
+ .. note::
+
+ If the requests_ library is present, ``jsonschema`` will use it to
+ request the remote ``uri``, so that the correct encoding is
+ detected and used.
+
+ If it isn't, or if the scheme of the ``uri`` is not ``http`` or
+ ``https``, UTF-8 is assumed.
+
+ Arguments:
+
+ uri (str):
+
+ The URI to resolve
+
+ Returns:
+
+ The retrieved document
+
+ .. _requests: https://pypi.org/project/requests/
+ """
+ try:
+ import requests
+ except ImportError:
+ requests = None
+
+ scheme = urlsplit(uri).scheme
+
+ if scheme in self.handlers:
+ result = self.handlers[scheme](uri)
+ elif scheme in [u"http", u"https"] and requests:
+ # Requests has support for detecting the correct encoding of
+ # json over http
+ result = requests.get(uri).json()
+ else:
+ # Otherwise, pass off to urllib and assume utf-8
+ with urlopen(uri) as url:
+ result = json.loads(url.read().decode("utf-8"))
+
+ if self.cache_remote:
+ self.store[uri] = result
+ return result
+
+
+def validate(instance, schema, cls=None, *args, **kwargs):
+ """
+ Validate an instance under the given schema.
+
+ >>> validate([2, 3, 4], {"maxItems": 2})
+ Traceback (most recent call last):
+ ...
+ ValidationError: [2, 3, 4] is too long
+
+ :func:`validate` will first verify that the provided schema is
+ itself valid, since not doing so can lead to less obvious error
+ messages and fail in less obvious or consistent ways.
+
+ If you know you have a valid schema already, especially if you
+ intend to validate multiple instances with the same schema, you
+ likely would prefer using the `IValidator.validate` method directly
+ on a specific validator (e.g. ``Draft7Validator.validate``).
+
+
+ Arguments:
+
+ instance:
+
+ The instance to validate
+
+ schema:
+
+ The schema to validate with
+
+ cls (IValidator):
+
+ The class that will be used to validate the instance.
+
+ If the ``cls`` argument is not provided, two things will happen
+ in accordance with the specification. First, if the schema has a
+ :validator:`$schema` property containing a known meta-schema [#]_
+ then the proper validator will be used. The specification recommends
+ that all schemas contain :validator:`$schema` properties for this
+ reason. If no :validator:`$schema` property is found, the default
+ validator class is the latest released draft.
+
+ Any other provided positional and keyword arguments will be passed
+ on when instantiating the ``cls``.
+
+ Raises:
+
+ `jsonschema.exceptions.ValidationError` if the instance
+ is invalid
+
+ `jsonschema.exceptions.SchemaError` if the schema itself
+ is invalid
+
+ .. rubric:: Footnotes
+ .. [#] known by a validator registered with
+ `jsonschema.validators.validates`
+ """
+ if cls is None:
+ cls = validator_for(schema)
+
+ cls.check_schema(schema)
+ validator = cls(schema, *args, **kwargs)
+ error = exceptions.best_match(validator.iter_errors(instance))
+ if error is not None:
+ raise error
+
+
+def validator_for(schema, default=_LATEST_VERSION):
+ """
+ Retrieve the validator class appropriate for validating the given schema.
+
+ Uses the :validator:`$schema` property that should be present in the
+ given schema to look up the appropriate validator class.
+
+ Arguments:
+
+ schema (collections.Mapping or bool):
+
+ the schema to look at
+
+ default:
+
+ the default to return if the appropriate validator class
+ cannot be determined.
+
+ If unprovided, the default is to return the latest supported
+ draft.
+ """
+ if schema is True or schema is False or u"$schema" not in schema:
+ return default
+ if schema[u"$schema"] not in meta_schemas:
+ warn(
+ (
+ "The metaschema specified by $schema was not found. "
+ "Using the latest draft to validate, but this will raise "
+ "an error in the future."
+ ),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return meta_schemas.get(schema[u"$schema"], _LATEST_VERSION)
diff --git a/third_party/python/jsonschema/pyproject.toml b/third_party/python/jsonschema/pyproject.toml
new file mode 100644
index 0000000000..8fac02e398
--- /dev/null
+++ b/third_party/python/jsonschema/pyproject.toml
@@ -0,0 +1,8 @@
+[build-system]
+requires = [
+ # The minimum setuptools version is specific to the PEP 517 backend,
+ # and may be stricter than the version required in `setup.py`
+ "setuptools>=40.6.0",
+ "wheel",
+]
+build-backend = "setuptools.build_meta"
diff --git a/third_party/python/jsonschema/setup.cfg b/third_party/python/jsonschema/setup.cfg
new file mode 100644
index 0000000000..285e3b228f
--- /dev/null
+++ b/third_party/python/jsonschema/setup.cfg
@@ -0,0 +1,81 @@
+[metadata]
+name = jsonschema
+url = https://github.com/Julian/jsonschema
+project_urls =
+ Docs = https://python-jsonschema.readthedocs.io/en/latest/
+description = An implementation of JSON Schema validation for Python
+long_description = file: README.rst
+author = Julian Berman
+author_email = Julian@GrayVines.com
+classifiers =
+ Development Status :: 5 - Production/Stable
+ Intended Audience :: Developers
+ License :: OSI Approved :: MIT License
+ Operating System :: OS Independent
+ Programming Language :: Python
+ Programming Language :: Python :: 2
+ Programming Language :: Python :: 2.7
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3.5
+ Programming Language :: Python :: 3.6
+ Programming Language :: Python :: 3.7
+ Programming Language :: Python :: 3.8
+ Programming Language :: Python :: Implementation :: CPython
+ Programming Language :: Python :: Implementation :: PyPy
+
+[options]
+packages = find:
+setup_requires = setuptools_scm
+install_requires =
+ attrs>=17.4.0
+ importlib_metadata;python_version<'3.8'
+ pyrsistent>=0.14.0
+ setuptools
+ six>=1.11.0
+ functools32;python_version<'3'
+
+[options.extras_require]
+format =
+ idna
+ jsonpointer>1.13
+ rfc3987
+ strict-rfc3339
+ webcolors
+format_nongpl =
+ idna
+ jsonpointer>1.13
+ webcolors
+ rfc3986-validator>0.1.0
+ rfc3339-validator
+
+[options.entry_points]
+console_scripts =
+ jsonschema = jsonschema.cli:main
+
+[options.package_data]
+jsonschema = schemas/*.json
+
+[bdist_wheel]
+universal = 1
+
+[flake8]
+builtins = unicode
+exclude =
+ jsonschema/__init__.py
+ jsonschema/_reflect.py
+
+[pydocstyle]
+match = (?!(test_|_|compat|cli)).*\.py # see PyCQA/pydocstyle#323
+add-select =
+ D410, # Trailing whitespace plz
+add-ignore =
+ D107, # Hah, no
+ D200, # 1-line docstrings don't need to be on one line
+ D202, # One line is fine.
+ D412, # Trailing whitespace plz
+ D413, # No trailing whitespace plz
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/jsonschema/setup.py b/third_party/python/jsonschema/setup.py
new file mode 100644
index 0000000000..460aabeb2a
--- /dev/null
+++ b/third_party/python/jsonschema/setup.py
@@ -0,0 +1,2 @@
+from setuptools import setup
+setup(use_scm_version=True)
diff --git a/third_party/python/jsonschema/test-requirements.txt b/third_party/python/jsonschema/test-requirements.txt
new file mode 100644
index 0000000000..082785c979
--- /dev/null
+++ b/third_party/python/jsonschema/test-requirements.txt
@@ -0,0 +1 @@
+Twisted
diff --git a/third_party/python/jsonschema/tox.ini b/third_party/python/jsonschema/tox.ini
new file mode 100644
index 0000000000..4ed385bd40
--- /dev/null
+++ b/third_party/python/jsonschema/tox.ini
@@ -0,0 +1,153 @@
+[tox]
+envlist =
+ py{35,36,37,38,py,py3}-{build,tests,tests_nongpl},
+ demo
+ readme
+ safety
+ secrets
+ style
+ docs-{html,doctest,linkcheck,spelling,style}
+skipsdist = True
+
+[testenv]
+changedir =
+ !build: {envtmpdir}
+setenv =
+ JSON_SCHEMA_TEST_SUITE = {toxinidir}/json
+whitelist_externals =
+ python2.7
+ mkdir
+ rm
+ sh
+ virtualenv
+commands =
+ perf,tests: {envbindir}/python -m pip install '{toxinidir}[format]'
+ tests_nongpl: {envbindir}/python -m pip install '{toxinidir}[format_nongpl]'
+
+ tests,tests_nongpl: {envbindir}/trial {posargs:jsonschema}
+ tests: {envpython} -m doctest {toxinidir}/README.rst
+
+ perf: mkdir {envtmpdir}/benchmarks/
+ perf: {envpython} {toxinidir}/jsonschema/benchmarks/issue232.py --inherit-environ JSON_SCHEMA_TEST_SUITE --output {envtmpdir}/benchmarks/issue232.json
+ perf: {envpython} {toxinidir}/jsonschema/benchmarks/json_schema_test_suite.py --inherit-environ JSON_SCHEMA_TEST_SUITE --output {envtmpdir}/benchmarks/json_schema_test_suite.json
+
+ # Check to make sure that releases build and install properly
+ build: virtualenv --quiet --python=python2.7 {envtmpdir}/venv
+ build: {envtmpdir}/venv/bin/pip install --quiet wheel
+
+ build: {envtmpdir}/venv/bin/python {toxinidir}/setup.py --quiet bdist_wheel --dist-dir={envtmpdir}/wheel
+ build: sh -c '{envbindir}/pip install --quiet --upgrade --force-reinstall {envtmpdir}/wheel/jsonschema*.whl'
+
+ build: python2.7 {toxinidir}/setup.py --quiet sdist --dist-dir={envtmpdir}/sdist --format=gztar,zip
+ build: sh -c '{envbindir}/pip install --quiet --upgrade --force-reinstall {envtmpdir}/sdist/jsonschema*.tar.gz'
+ build: sh -c '{envbindir}/pip install --quiet --upgrade --force-reinstall {envtmpdir}/sdist/jsonschema*.zip'
+
+ build: {envbindir}/python -m pep517.check {toxinidir}
+
+ # FIXME: This has side effects! But it's not my fault... I can't
+ # figure out yet how to get setuptools to not create this directory
+ # here yet. But whatever, probably this will change to pep517.build
+ # soon anways.
+ build: rm -rf {toxinidir}/jsonschema.egg-info
+deps =
+ build: pep517
+
+ perf: pyperf
+
+ tests,tests_nongpl,coverage,codecov: -r{toxinidir}/test-requirements.txt
+
+ coverage,codecov: coverage
+ codecov: codecov
+
+[testenv:bandit]
+deps = bandit
+commands = {envbindir}/bandit --recursive {toxinidir}/jsonschema
+
+[testenv:demo]
+deps = jupyter
+commands =
+ {envbindir}/jupyter nbconvert --output-dir {envtmpdir} {toxinidir}/DEMO.ipynb
+
+[testenv:readme]
+changedir = {toxinidir}
+deps = readme_renderer
+commands =
+ {envbindir}/python setup.py check --restructuredtext --strict
+
+[testenv:safety]
+deps = safety
+commands =
+ {envbindir}/pip install '{toxinidir}[format]'
+ {envbindir}/safety check
+
+[testenv:secrets]
+deps = detect-secrets
+commands = {envbindir}/detect-secrets scan {toxinidir}
+
+[testenv:style]
+basepython = pypy3
+deps =
+ ebb-lint>=0.19.1.0
+commands =
+ {envbindir}/flake8 {posargs} {toxinidir}/jsonschema {toxinidir}/docs {toxinidir}/setup.py
+
+[testenv:coverage]
+setenv =
+ {[testenv]setenv}
+ COVERAGE_DEBUG_FILE={envtmpdir}/coverage-debug
+ COVERAGE_FILE={envtmpdir}/coverage-data
+commands =
+ {envbindir}/python -m pip install '{toxinidir}[format]'
+ {envbindir}/coverage run --rcfile={toxinidir}/.coveragerc {envbindir}/trial jsonschema
+ {envbindir}/coverage report --rcfile={toxinidir}/.coveragerc --show-missing
+ {envbindir}/coverage html --directory={envtmpdir}/htmlcov --rcfile={toxinidir}/.coveragerc {posargs}
+
+[testenv:docs-html]
+basepython = pypy3
+commands = {envpython} -m sphinx -b html {toxinidir}/docs/ {envtmpdir}/build {posargs:-a -n -q -T -W}
+deps =
+ -r{toxinidir}/docs/requirements.txt
+ {toxinidir}
+
+[testenv:docs-doctest]
+basepython = pypy3
+commands = {envpython} -m sphinx -b doctest {toxinidir}/docs/ {envtmpdir}/build {posargs:-a -n -q -T -W}
+deps =
+ -r{toxinidir}/docs/requirements.txt
+ {toxinidir}
+
+[testenv:docs-linkcheck]
+basepython = pypy3
+commands = {envpython} -m sphinx -b linkcheck {toxinidir}/docs/ {envtmpdir}/build {posargs:-a -n -q -T -W}
+deps =
+ -r{toxinidir}/docs/requirements.txt
+ {toxinidir}
+
+[testenv:docs-spelling]
+basepython = pypy3
+commands = {envpython} -m sphinx -b spelling {toxinidir}/docs/ {envtmpdir}/build {posargs:-a -n -q -T -W}
+deps =
+ -r{toxinidir}/docs/requirements.txt
+ {toxinidir}
+
+[testenv:docs-style]
+basepython = pypy3
+commands = doc8 {posargs} {toxinidir}/docs
+deps =
+ doc8
+ pygments
+ pygments-github-lexers
+
+[testenv:codecov]
+passenv = CODECOV* CI TRAVIS TRAVIS_*
+setenv = {[testenv:coverage]setenv}
+commands =
+ {envbindir}/python -m pip install '{toxinidir}[format]'
+ {envbindir}/coverage run --rcfile={toxinidir}/.coveragerc {envbindir}/trial jsonschema
+ {envbindir}/coverage xml -o {envtmpdir}/coverage.xml
+ codecov --required --disable gcov --file {envtmpdir}/coverage.xml
+
+[travis]
+python =
+ pypy: pypy, readme, safety, secrets
+ pypy3: pypy3, demo, docs, style
diff --git a/third_party/python/mock-1.0.0/LICENSE.txt b/third_party/python/mock-1.0.0/LICENSE.txt
new file mode 100644
index 0000000000..7891703b13
--- /dev/null
+++ b/third_party/python/mock-1.0.0/LICENSE.txt
@@ -0,0 +1,26 @@
+Copyright (c) 2003-2012, Michael Foord
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/third_party/python/mock-1.0.0/MANIFEST.in b/third_party/python/mock-1.0.0/MANIFEST.in
new file mode 100644
index 0000000000..d52b301de6
--- /dev/null
+++ b/third_party/python/mock-1.0.0/MANIFEST.in
@@ -0,0 +1,2 @@
+include LICENSE.txt tox.ini tests/*.py
+recursive-include docs *.txt *.py *.png *.css *.html *.js
diff --git a/third_party/python/mock-1.0.0/PKG-INFO b/third_party/python/mock-1.0.0/PKG-INFO
new file mode 100644
index 0000000000..4c7309c713
--- /dev/null
+++ b/third_party/python/mock-1.0.0/PKG-INFO
@@ -0,0 +1,208 @@
+Metadata-Version: 1.0
+Name: mock
+Version: 1.0.0
+Summary: A Python Mocking and Patching Library for Testing
+Home-page: http://www.voidspace.org.uk/python/mock/
+Author: Michael Foord
+Author-email: michael@voidspace.org.uk
+License: UNKNOWN
+Description: mock is a library for testing in Python. It allows you to replace parts of
+ your system under test with mock objects and make assertions about how they
+ have been used.
+
+ mock is now part of the Python standard library, available as `unittest.mock <
+ http://docs.python.org/py3k/library/unittest.mock.html#module-unittest.mock>`_
+ in Python 3.3 onwards.
+
+ mock provides a core `MagicMock` class removing the need to create a host of
+ stubs throughout your test suite. After performing an action, you can make
+ assertions about which methods / attributes were used and arguments they were
+ called with. You can also specify return values and set needed attributes in
+ the normal way.
+
+ mock is tested on Python versions 2.4-2.7 and Python 3. mock is also tested
+ with the latest versions of Jython and pypy.
+
+ The mock module also provides utility functions / objects to assist with
+ testing, particularly monkey patching.
+
+ * `PDF documentation for 1.0 beta 1
+ <http://www.voidspace.org.uk/downloads/mock-1.0.0.pdf>`_
+ * `mock on google code (repository and issue tracker)
+ <http://code.google.com/p/mock/>`_
+ * `mock documentation
+ <http://www.voidspace.org.uk/python/mock/>`_
+ * `mock on PyPI <http://pypi.python.org/pypi/mock/>`_
+ * `Mailing list (testing-in-python@lists.idyll.org)
+ <http://lists.idyll.org/listinfo/testing-in-python>`_
+
+ Mock is very easy to use and is designed for use with
+ `unittest <http://pypi.python.org/pypi/unittest2>`_. Mock is based on
+ the 'action -> assertion' pattern instead of 'record -> replay' used by many
+ mocking frameworks. See the `mock documentation`_ for full details.
+
+ Mock objects create all attributes and methods as you access them and store
+ details of how they have been used. You can configure them, to specify return
+ values or limit what attributes are available, and then make assertions about
+ how they have been used::
+
+ >>> from mock import Mock
+ >>> real = ProductionClass()
+ >>> real.method = Mock(return_value=3)
+ >>> real.method(3, 4, 5, key='value')
+ 3
+ >>> real.method.assert_called_with(3, 4, 5, key='value')
+
+ `side_effect` allows you to perform side effects, return different values or
+ raise an exception when a mock is called::
+
+ >>> mock = Mock(side_effect=KeyError('foo'))
+ >>> mock()
+ Traceback (most recent call last):
+ ...
+ KeyError: 'foo'
+ >>> values = {'a': 1, 'b': 2, 'c': 3}
+ >>> def side_effect(arg):
+ ... return values[arg]
+ ...
+ >>> mock.side_effect = side_effect
+ >>> mock('a'), mock('b'), mock('c')
+ (3, 2, 1)
+ >>> mock.side_effect = [5, 4, 3, 2, 1]
+ >>> mock(), mock(), mock()
+ (5, 4, 3)
+
+ Mock has many other ways you can configure it and control its behaviour. For
+ example the `spec` argument configures the mock to take its specification from
+ another object. Attempting to access attributes or methods on the mock that
+ don't exist on the spec will fail with an `AttributeError`.
+
+ The `patch` decorator / context manager makes it easy to mock classes or
+ objects in a module under test. The object you specify will be replaced with a
+ mock (or other object) during the test and restored when the test ends::
+
+ >>> from mock import patch
+ >>> @patch('test_module.ClassName1')
+ ... @patch('test_module.ClassName2')
+ ... def test(MockClass2, MockClass1):
+ ... test_module.ClassName1()
+ ... test_module.ClassName2()
+
+ ... assert MockClass1.called
+ ... assert MockClass2.called
+ ...
+ >>> test()
+
+ .. note::
+
+ When you nest patch decorators the mocks are passed in to the decorated
+ function in the same order they applied (the normal *python* order that
+ decorators are applied). This means from the bottom up, so in the example
+ above the mock for `test_module.ClassName2` is passed in first.
+
+ With `patch` it matters that you patch objects in the namespace where they
+ are looked up. This is normally straightforward, but for a quick guide
+ read `where to patch
+ <http://www.voidspace.org.uk/python/mock/patch.html#where-to-patch>`_.
+
+ As well as a decorator `patch` can be used as a context manager in a with
+ statement::
+
+ >>> with patch.object(ProductionClass, 'method') as mock_method:
+ ... mock_method.return_value = None
+ ... real = ProductionClass()
+ ... real.method(1, 2, 3)
+ ...
+ >>> mock_method.assert_called_once_with(1, 2, 3)
+
+ There is also `patch.dict` for setting values in a dictionary just during the
+ scope of a test and restoring the dictionary to its original state when the
+ test ends::
+
+ >>> foo = {'key': 'value'}
+ >>> original = foo.copy()
+ >>> with patch.dict(foo, {'newkey': 'newvalue'}, clear=True):
+ ... assert foo == {'newkey': 'newvalue'}
+ ...
+ >>> assert foo == original
+
+ Mock supports the mocking of Python magic methods. The easiest way of
+ using magic methods is with the `MagicMock` class. It allows you to do
+ things like::
+
+ >>> from mock import MagicMock
+ >>> mock = MagicMock()
+ >>> mock.__str__.return_value = 'foobarbaz'
+ >>> str(mock)
+ 'foobarbaz'
+ >>> mock.__str__.assert_called_once_with()
+
+ Mock allows you to assign functions (or other Mock instances) to magic methods
+ and they will be called appropriately. The MagicMock class is just a Mock
+ variant that has all of the magic methods pre-created for you (well - all the
+ useful ones anyway).
+
+ The following is an example of using magic methods with the ordinary Mock
+ class::
+
+ >>> from mock import Mock
+ >>> mock = Mock()
+ >>> mock.__str__ = Mock(return_value = 'wheeeeee')
+ >>> str(mock)
+ 'wheeeeee'
+
+ For ensuring that the mock objects your tests use have the same api as the
+ objects they are replacing, you can use "auto-speccing". Auto-speccing can
+ be done through the `autospec` argument to patch, or the `create_autospec`
+ function. Auto-speccing creates mock objects that have the same attributes
+ and methods as the objects they are replacing, and any functions and methods
+ (including constructors) have the same call signature as the real object.
+
+ This ensures that your mocks will fail in the same way as your production
+ code if they are used incorrectly::
+
+ >>> from mock import create_autospec
+ >>> def function(a, b, c):
+ ... pass
+ ...
+ >>> mock_function = create_autospec(function, return_value='fishy')
+ >>> mock_function(1, 2, 3)
+ 'fishy'
+ >>> mock_function.assert_called_once_with(1, 2, 3)
+ >>> mock_function('wrong arguments')
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes exactly 3 arguments (1 given)
+
+ `create_autospec` can also be used on classes, where it copies the signature of
+ the `__init__` method, and on callable objects where it copies the signature of
+ the `__call__` method.
+
+ The distribution contains tests and documentation. The tests require
+ `unittest2 <http://pypi.python.org/pypi/unittest2>`_ to run.
+
+ Docs from the in-development version of `mock` can be found at
+ `mock.readthedocs.org <http://mock.readthedocs.org>`_.
+
+Keywords: testing,test,mock,mocking,unittest,patching,stubs,fakes,doubles
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 2.4
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.1
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Programming Language :: Python :: Implementation :: Jython
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Software Development :: Testing
diff --git a/third_party/python/mock-1.0.0/README.txt b/third_party/python/mock-1.0.0/README.txt
new file mode 100644
index 0000000000..385db3cae6
--- /dev/null
+++ b/third_party/python/mock-1.0.0/README.txt
@@ -0,0 +1,177 @@
+mock is a library for testing in Python. It allows you to replace parts of
+your system under test with mock objects and make assertions about how they
+have been used.
+
+mock is now part of the Python standard library, available as `unittest.mock <
+http://docs.python.org/py3k/library/unittest.mock.html#module-unittest.mock>`_
+in Python 3.3 onwards.
+
+mock provides a core `MagicMock` class removing the need to create a host of
+stubs throughout your test suite. After performing an action, you can make
+assertions about which methods / attributes were used and arguments they were
+called with. You can also specify return values and set needed attributes in
+the normal way.
+
+mock is tested on Python versions 2.4-2.7 and Python 3. mock is also tested
+with the latest versions of Jython and pypy.
+
+The mock module also provides utility functions / objects to assist with
+testing, particularly monkey patching.
+
+* `PDF documentation for 1.0 beta 1
+ <http://www.voidspace.org.uk/downloads/mock-1.0.0.pdf>`_
+* `mock on google code (repository and issue tracker)
+ <http://code.google.com/p/mock/>`_
+* `mock documentation
+ <http://www.voidspace.org.uk/python/mock/>`_
+* `mock on PyPI <http://pypi.python.org/pypi/mock/>`_
+* `Mailing list (testing-in-python@lists.idyll.org)
+ <http://lists.idyll.org/listinfo/testing-in-python>`_
+
+Mock is very easy to use and is designed for use with
+`unittest <http://pypi.python.org/pypi/unittest2>`_. Mock is based on
+the 'action -> assertion' pattern instead of 'record -> replay' used by many
+mocking frameworks. See the `mock documentation`_ for full details.
+
+Mock objects create all attributes and methods as you access them and store
+details of how they have been used. You can configure them, to specify return
+values or limit what attributes are available, and then make assertions about
+how they have been used::
+
+ >>> from mock import Mock
+ >>> real = ProductionClass()
+ >>> real.method = Mock(return_value=3)
+ >>> real.method(3, 4, 5, key='value')
+ 3
+ >>> real.method.assert_called_with(3, 4, 5, key='value')
+
+`side_effect` allows you to perform side effects, return different values or
+raise an exception when a mock is called::
+
+ >>> mock = Mock(side_effect=KeyError('foo'))
+ >>> mock()
+ Traceback (most recent call last):
+ ...
+ KeyError: 'foo'
+ >>> values = {'a': 1, 'b': 2, 'c': 3}
+ >>> def side_effect(arg):
+ ... return values[arg]
+ ...
+ >>> mock.side_effect = side_effect
+ >>> mock('a'), mock('b'), mock('c')
+ (3, 2, 1)
+ >>> mock.side_effect = [5, 4, 3, 2, 1]
+ >>> mock(), mock(), mock()
+ (5, 4, 3)
+
+Mock has many other ways you can configure it and control its behaviour. For
+example the `spec` argument configures the mock to take its specification from
+another object. Attempting to access attributes or methods on the mock that
+don't exist on the spec will fail with an `AttributeError`.
+
+The `patch` decorator / context manager makes it easy to mock classes or
+objects in a module under test. The object you specify will be replaced with a
+mock (or other object) during the test and restored when the test ends::
+
+ >>> from mock import patch
+ >>> @patch('test_module.ClassName1')
+ ... @patch('test_module.ClassName2')
+ ... def test(MockClass2, MockClass1):
+ ... test_module.ClassName1()
+ ... test_module.ClassName2()
+
+ ... assert MockClass1.called
+ ... assert MockClass2.called
+ ...
+ >>> test()
+
+.. note::
+
+ When you nest patch decorators the mocks are passed in to the decorated
+ function in the same order they applied (the normal *python* order that
+ decorators are applied). This means from the bottom up, so in the example
+ above the mock for `test_module.ClassName2` is passed in first.
+
+ With `patch` it matters that you patch objects in the namespace where they
+ are looked up. This is normally straightforward, but for a quick guide
+ read `where to patch
+ <http://www.voidspace.org.uk/python/mock/patch.html#where-to-patch>`_.
+
+As well as a decorator `patch` can be used as a context manager in a with
+statement::
+
+ >>> with patch.object(ProductionClass, 'method') as mock_method:
+ ... mock_method.return_value = None
+ ... real = ProductionClass()
+ ... real.method(1, 2, 3)
+ ...
+ >>> mock_method.assert_called_once_with(1, 2, 3)
+
+There is also `patch.dict` for setting values in a dictionary just during the
+scope of a test and restoring the dictionary to its original state when the
+test ends::
+
+ >>> foo = {'key': 'value'}
+ >>> original = foo.copy()
+ >>> with patch.dict(foo, {'newkey': 'newvalue'}, clear=True):
+ ... assert foo == {'newkey': 'newvalue'}
+ ...
+ >>> assert foo == original
+
+Mock supports the mocking of Python magic methods. The easiest way of
+using magic methods is with the `MagicMock` class. It allows you to do
+things like::
+
+ >>> from mock import MagicMock
+ >>> mock = MagicMock()
+ >>> mock.__str__.return_value = 'foobarbaz'
+ >>> str(mock)
+ 'foobarbaz'
+ >>> mock.__str__.assert_called_once_with()
+
+Mock allows you to assign functions (or other Mock instances) to magic methods
+and they will be called appropriately. The MagicMock class is just a Mock
+variant that has all of the magic methods pre-created for you (well - all the
+useful ones anyway).
+
+The following is an example of using magic methods with the ordinary Mock
+class::
+
+ >>> from mock import Mock
+ >>> mock = Mock()
+ >>> mock.__str__ = Mock(return_value = 'wheeeeee')
+ >>> str(mock)
+ 'wheeeeee'
+
+For ensuring that the mock objects your tests use have the same api as the
+objects they are replacing, you can use "auto-speccing". Auto-speccing can
+be done through the `autospec` argument to patch, or the `create_autospec`
+function. Auto-speccing creates mock objects that have the same attributes
+and methods as the objects they are replacing, and any functions and methods
+(including constructors) have the same call signature as the real object.
+
+This ensures that your mocks will fail in the same way as your production
+code if they are used incorrectly::
+
+ >>> from mock import create_autospec
+ >>> def function(a, b, c):
+ ... pass
+ ...
+ >>> mock_function = create_autospec(function, return_value='fishy')
+ >>> mock_function(1, 2, 3)
+ 'fishy'
+ >>> mock_function.assert_called_once_with(1, 2, 3)
+ >>> mock_function('wrong arguments')
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes exactly 3 arguments (1 given)
+
+`create_autospec` can also be used on classes, where it copies the signature of
+the `__init__` method, and on callable objects where it copies the signature of
+the `__call__` method.
+
+The distribution contains tests and documentation. The tests require
+`unittest2 <http://pypi.python.org/pypi/unittest2>`_ to run.
+
+Docs from the in-development version of `mock` can be found at
+`mock.readthedocs.org <http://mock.readthedocs.org>`_.
diff --git a/third_party/python/mock-1.0.0/docs/changelog.txt b/third_party/python/mock-1.0.0/docs/changelog.txt
new file mode 100644
index 0000000000..a605be3d97
--- /dev/null
+++ b/third_party/python/mock-1.0.0/docs/changelog.txt
@@ -0,0 +1,725 @@
+.. currentmodule:: mock
+
+
+CHANGELOG
+=========
+
+2012/10/07 Version 1.0.0
+------------------------
+
+No changes since 1.0.0 beta 1. This version has feature parity with
+`unittest.mock
+<http://docs.python.org/py3k/library/unittest.mock.html#module-unittest.mock>`_
+in Python 3.3.
+
+Full list of changes since 0.8:
+
+* `mocksignature`, along with the `mocksignature` argument to `patch`, removed
+* Support for deleting attributes (accessing deleted attributes will raise an
+ `AttributeError`)
+* Added the `mock_open` helper function for mocking the builtin `open`
+* `__class__` is assignable, so a mock can pass an `isinstance` check without
+ requiring a spec
+* Addition of `PropertyMock`, for mocking properties
+* `MagicMocks` made unorderable by default (in Python 3). The comparison
+ methods (other than equality and inequality) now return `NotImplemented`
+* Propagate traceback info to support subclassing of `_patch` by other
+ libraries
+* `create_autospec` works with attributes present in results of `dir` that
+ can't be fetched from the object's class. Contributed by Konstantine Rybnikov
+* Any exceptions in an iterable `side_effect` will be raised instead of
+ returned
+* In Python 3, `create_autospec` now supports keyword only arguments
+* Added `patch.stopall` method to stop all active patches created by `start`
+* BUGFIX: calling `MagicMock.reset_mock` wouldn't reset magic method mocks
+* BUGFIX: calling `reset_mock` on a `MagicMock` created with autospec could
+ raise an exception
+* BUGFIX: passing multiple spec arguments to patchers (`spec` , `spec_set` and
+ `autospec`) had unpredictable results, now it is an error
+* BUGFIX: using `spec=True` *and* `create=True` as arguments to patchers could
+ result in using `DEFAULT` as the spec. Now it is an error instead
+* BUGFIX: using `spec` or `autospec` arguments to patchers, along with
+ `spec_set=True` did not work correctly
+* BUGFIX: using an object that evaluates to False as a spec could be ignored
+* BUGFIX: a list as the `spec` argument to a patcher would always result in a
+ non-callable mock. Now if `__call__` is in the spec the mock is callable
+
+
+2012/07/13 Version 1.0.0 beta 1
+--------------------------------
+
+* Added `patch.stopall` method to stop all active patches created by `start`
+* BUGFIX: calling `MagicMock.reset_mock` wouldn't reset magic method mocks
+* BUGFIX: calling `reset_mock` on a `MagicMock` created with autospec could
+ raise an exception
+
+
+2012/05/04 Version 1.0.0 alpha 2
+--------------------------------
+
+* `PropertyMock` attributes are now standard `MagicMocks`
+* `create_autospec` works with attributes present in results of `dir` that
+ can't be fetched from the object's class. Contributed by Konstantine Rybnikov
+* Any exceptions in an iterable `side_effect` will be raised instead of
+ returned
+* In Python 3, `create_autospec` now supports keyword only arguments
+
+
+2012/03/25 Version 1.0.0 alpha 1
+--------------------------------
+
+The standard library version!
+
+* `mocksignature`, along with the `mocksignature` argument to `patch`, removed
+* Support for deleting attributes (accessing deleted attributes will raise an
+ `AttributeError`)
+* Added the `mock_open` helper function for mocking the builtin `open`
+* `__class__` is assignable, so a mock can pass an `isinstance` check without
+ requiring a spec
+* Addition of `PropertyMock`, for mocking properties
+* `MagicMocks` made unorderable by default (in Python 3). The comparison
+ methods (other than equality and inequality) now return `NotImplemented`
+* Propagate traceback info to support subclassing of `_patch` by other
+ libraries
+* BUGFIX: passing multiple spec arguments to patchers (`spec` , `spec_set` and
+ `autospec`) had unpredictable results, now it is an error
+* BUGFIX: using `spec=True` *and* `create=True` as arguments to patchers could
+ result in using `DEFAULT` as the spec. Now it is an error instead
+* BUGFIX: using `spec` or `autospec` arguments to patchers, along with
+ `spec_set=True` did not work correctly
+* BUGFIX: using an object that evaluates to False as a spec could be ignored
+* BUGFIX: a list as the `spec` argument to a patcher would always result in a
+ non-callable mock. Now if `__call__` is in the spec the mock is callable
+
+
+2012/02/13 Version 0.8.0
+------------------------
+
+The only changes since 0.8rc2 are:
+
+* Improved repr of :data:`sentinel` objects
+* :data:`ANY` can be used for comparisons against :data:`call` objects
+* The return value of `MagicMock.__iter__` method can be set to
+ any iterable and isn't required to be an iterator
+
+Full List of changes since 0.7:
+
+mock 0.8.0 is the last version that will support Python 2.4.
+
+* Addition of :attr:`~Mock.mock_calls` list for *all* calls (including magic
+ methods and chained calls)
+* :func:`patch` and :func:`patch.object` now create a :class:`MagicMock`
+ instead of a :class:`Mock` by default
+* The patchers (`patch`, `patch.object` and `patch.dict`), plus `Mock` and
+ `MagicMock`, take arbitrary keyword arguments for configuration
+* New mock method :meth:`~Mock.configure_mock` for setting attributes and
+ return values / side effects on the mock and its attributes
+* New mock assert methods :meth:`~Mock.assert_any_call` and
+ :meth:`~Mock.assert_has_calls`
+* Implemented :ref:`auto-speccing` (recursive, lazy speccing of mocks with
+ mocked signatures for functions/methods), as the `autospec` argument to
+ `patch`
+* Added the :func:`create_autospec` function for manually creating
+ 'auto-specced' mocks
+* :func:`patch.multiple` for doing multiple patches in a single call, using
+ keyword arguments
+* Setting :attr:`~Mock.side_effect` to an iterable will cause calls to the mock
+ to return the next value from the iterable
+* New `new_callable` argument to `patch` and `patch.object` allowing you to
+ pass in a class or callable object (instead of `MagicMock`) that will be
+ called to replace the object being patched
+* Addition of :class:`NonCallableMock` and :class:`NonCallableMagicMock`, mocks
+ without a `__call__` method
+* Addition of :meth:`~Mock.mock_add_spec` method for adding (or changing) a
+ spec on an existing mock
+* Protocol methods on :class:`MagicMock` are magic mocks, and are created
+ lazily on first lookup. This means the result of calling a protocol method is
+ a `MagicMock` instead of a `Mock` as it was previously
+* Addition of :meth:`~Mock.attach_mock` method
+* Added :data:`ANY` for ignoring arguments in :meth:`~Mock.assert_called_with`
+ calls
+* Addition of :data:`call` helper object
+* Improved repr for mocks
+* Improved repr for :attr:`Mock.call_args` and entries in
+ :attr:`Mock.call_args_list`, :attr:`Mock.method_calls` and
+ :attr:`Mock.mock_calls`
+* Improved repr for :data:`sentinel` objects
+* `patch` lookup is done at use time not at decoration time
+* In Python 2.6 or more recent, `dir` on a mock will report all the dynamically
+ created attributes (or the full list of attributes if there is a spec) as
+ well as all the mock methods and attributes.
+* Module level :data:`FILTER_DIR` added to control whether `dir(mock)` filters
+ private attributes. `True` by default.
+* `patch.TEST_PREFIX` for controlling how patchers recognise test methods when
+ used to decorate a class
+* Support for using Java exceptions as a :attr:`~Mock.side_effect` on Jython
+* `Mock` call lists (`call_args_list`, `method_calls` & `mock_calls`) are now
+ custom list objects that allow membership tests for "sub lists" and have
+ a nicer representation if you `str` or `print` them
+* Mocks attached as attributes or return values to other mocks have calls
+ recorded in `method_calls` and `mock_calls` of the parent (unless a name is
+ already set on the child)
+* Improved failure messages for `assert_called_with` and
+ `assert_called_once_with`
+* The return value of the :class:`MagicMock` `__iter__` method can be set to
+ any iterable and isn't required to be an iterator
+* Added the Mock API (`assert_called_with` etc) to functions created by
+ :func:`mocksignature`
+* Tuples as well as lists can be used to specify allowed methods for `spec` &
+ `spec_set` arguments
+* Calling `stop` on an unstarted patcher fails with a more meaningful error
+ message
+* Renamed the internal classes `Sentinel` and `SentinelObject` to prevent abuse
+* BUGFIX: an error creating a patch, with nested patch decorators, won't leave
+ patches in place
+* BUGFIX: `__truediv__` and `__rtruediv__` not available as magic methods on
+ mocks in Python 3
+* BUGFIX: `assert_called_with` / `assert_called_once_with` can be used with
+ `self` as a keyword argument
+* BUGFIX: when patching a class with an explicit spec / spec_set (not a
+ boolean) it applies "spec inheritance" to the return value of the created
+ mock (the "instance")
+* BUGFIX: remove the `__unittest` marker causing traceback truncation
+* Removal of deprecated `patch_object`
+* Private attributes `_name`, `_methods`, '_children', `_wraps` and `_parent`
+ (etc) renamed to reduce likelihood of clash with user attributes.
+* Added license file to the distribution
+
+
+2012/01/10 Version 0.8.0 release candidate 2
+--------------------------------------------
+
+* Removed the `configure` keyword argument to `create_autospec` and allow
+ arbitrary keyword arguments (for the `Mock` constructor) instead
+* Fixed `ANY` equality with some types in `assert_called_with` calls
+* Switched to a standard Sphinx theme (compatible with
+ `readthedocs.org <http://mock.readthedocs.org>`_)
+
+
+2011/12/29 Version 0.8.0 release candidate 1
+--------------------------------------------
+
+* `create_autospec` on the return value of a mocked class will use `__call__`
+ for the signature rather than `__init__`
+* Performance improvement instantiating `Mock` and `MagicMock`
+* Mocks used as magic methods have the same type as their parent instead of
+ being hardcoded to `MagicMock`
+
+Special thanks to Julian Berman for his help with diagnosing and improving
+performance in this release.
+
+
+2011/10/09 Version 0.8.0 beta 4
+-------------------------------
+
+* `patch` lookup is done at use time not at decoration time
+* When attaching a Mock to another Mock as a magic method, calls are recorded
+ in mock_calls
+* Addition of `attach_mock` method
+* Renamed the internal classes `Sentinel` and `SentinelObject` to prevent abuse
+* BUGFIX: various issues around circular references with mocks (setting a mock
+ return value to be itself etc)
+
+
+2011/08/15 Version 0.8.0 beta 3
+-------------------------------
+
+* Mocks attached as attributes or return values to other mocks have calls
+ recorded in `method_calls` and `mock_calls` of the parent (unless a name is
+ already set on the child)
+* Addition of `mock_add_spec` method for adding (or changing) a spec on an
+ existing mock
+* Improved repr for `Mock.call_args` and entries in `Mock.call_args_list`,
+ `Mock.method_calls` and `Mock.mock_calls`
+* Improved repr for mocks
+* BUGFIX: minor fixes in the way `mock_calls` is worked out,
+ especially for "intermediate" mocks in a call chain
+
+
+2011/08/05 Version 0.8.0 beta 2
+-------------------------------
+
+* Setting `side_effect` to an iterable will cause calls to the mock to return
+ the next value from the iterable
+* Added `assert_any_call` method
+* Moved `assert_has_calls` from call lists onto mocks
+* BUGFIX: `call_args` and all members of `call_args_list` are two tuples of
+ `(args, kwargs)` again instead of three tuples of `(name, args, kwargs)`
+
+
+2011/07/25 Version 0.8.0 beta 1
+-------------------------------
+
+* `patch.TEST_PREFIX` for controlling how patchers recognise test methods when
+ used to decorate a class
+* `Mock` call lists (`call_args_list`, `method_calls` & `mock_calls`) are now
+ custom list objects that allow membership tests for "sub lists" and have
+ an `assert_has_calls` method for unordered call checks
+* `callargs` changed to *always* be a three-tuple of `(name, args, kwargs)`
+* Addition of `mock_calls` list for *all* calls (including magic methods and
+ chained calls)
+* Extension of `call` object to support chained calls and `callargs` for better
+ comparisons with or without names. `call` object has a `call_list` method for
+ chained calls
+* Added the public `instance` argument to `create_autospec`
+* Support for using Java exceptions as a `side_effect` on Jython
+* Improved failure messages for `assert_called_with` and
+ `assert_called_once_with`
+* Tuples as well as lists can be used to specify allowed methods for `spec` &
+ `spec_set` arguments
+* BUGFIX: Fixed bug in `patch.multiple` for argument passing when creating
+ mocks
+* Added license file to the distribution
+
+
+2011/07/16 Version 0.8.0 alpha 2
+--------------------------------
+
+* `patch.multiple` for doing multiple patches in a single call, using keyword
+ arguments
+* New `new_callable` argument to `patch` and `patch.object` allowing you to
+ pass in a class or callable object (instead of `MagicMock`) that will be
+ called to replace the object being patched
+* Addition of `NonCallableMock` and `NonCallableMagicMock`, mocks without a
+ `__call__` method
+* Mocks created by `patch` have a `MagicMock` as the `return_value` where a
+ class is being patched
+* `create_autospec` can create non-callable mocks for non-callable objects.
+ `return_value` mocks of classes will be non-callable unless the class has
+ a `__call__` method
+* `autospec` creates a `MagicMock` without a spec for properties and slot
+ descriptors, because we don't know the type of object they return
+* Removed the "inherit" argument from `create_autospec`
+* Calling `stop` on an unstarted patcher fails with a more meaningful error
+ message
+* BUGFIX: an error creating a patch, with nested patch decorators, won't leave
+ patches in place
+* BUGFIX: `__truediv__` and `__rtruediv__` not available as magic methods on
+ mocks in Python 3
+* BUGFIX: `assert_called_with` / `assert_called_once_with` can be used with
+ `self` as a keyword argument
+* BUGFIX: autospec for functions / methods with an argument named self that
+ isn't the first argument no longer broken
+* BUGFIX: when patching a class with an explicit spec / spec_set (not a
+ boolean) it applies "spec inheritance" to the return value of the created
+ mock (the "instance")
+* BUGFIX: remove the `__unittest` marker causing traceback truncation
+
+
+2011/06/14 Version 0.8.0 alpha 1
+--------------------------------
+
+mock 0.8.0 is the last version that will support Python 2.4.
+
+* The patchers (`patch`, `patch.object` and `patch.dict`), plus `Mock` and
+ `MagicMock`, take arbitrary keyword arguments for configuration
+* New mock method `configure_mock` for setting attributes and return values /
+ side effects on the mock and its attributes
+* In Python 2.6 or more recent, `dir` on a mock will report all the dynamically
+ created attributes (or the full list of attributes if there is a spec) as
+ well as all the mock methods and attributes.
+* Module level `FILTER_DIR` added to control whether `dir(mock)` filters
+ private attributes. `True` by default. Note that `vars(Mock())` can still be
+ used to get all instance attributes and `dir(type(Mock())` will still return
+ all the other attributes (irrespective of `FILTER_DIR`)
+* `patch` and `patch.object` now create a `MagicMock` instead of a `Mock` by
+ default
+* Added `ANY` for ignoring arguments in `assert_called_with` calls
+* Addition of `call` helper object
+* Protocol methods on `MagicMock` are magic mocks, and are created lazily on
+ first lookup. This means the result of calling a protocol method is a
+ MagicMock instead of a Mock as it was previously
+* Added the Mock API (`assert_called_with` etc) to functions created by
+ `mocksignature`
+* Private attributes `_name`, `_methods`, '_children', `_wraps` and `_parent`
+ (etc) renamed to reduce likelihood of clash with user attributes.
+* Implemented auto-speccing (recursive, lazy speccing of mocks with mocked
+ signatures for functions/methods)
+
+ Limitations:
+
+ - Doesn't mock magic methods or attributes (it creates MagicMocks, so the
+ magic methods are *there*, they just don't have the signature mocked nor
+ are attributes followed)
+ - Doesn't mock function / method attributes
+ - Uses object traversal on the objects being mocked to determine types - so
+ properties etc may be triggered
+ - The return value of mocked classes (the 'instance') has the same call
+ signature as the class __init__ (as they share the same spec)
+
+ You create auto-specced mocks by passing `autospec=True` to `patch`.
+
+ Note that attributes that are None are special cased and mocked without a
+ spec (so any attribute / method can be used). This is because None is
+ typically used as a default value for attributes that may be of some other
+ type, and as we don't know what type that may be we allow all access.
+
+ Note that the `autospec` option to `patch` obsoletes the `mocksignature`
+ option.
+
+* Added the `create_autospec` function for manually creating 'auto-specced'
+ mocks
+* Removal of deprecated `patch_object`
+
+
+2011/05/30 Version 0.7.2
+------------------------
+
+* BUGFIX: instances of list subclasses can now be used as mock specs
+* BUGFIX: MagicMock equality / inequality protocol methods changed to use the
+ default equality / inequality. This is done through a `side_effect` on
+ the mocks used for `__eq__` / `__ne__`
+
+
+2011/05/06 Version 0.7.1
+------------------------
+
+Package fixes contributed by Michael Fladischer. No code changes.
+
+* Include template in package
+* Use isolated binaries for the tox tests
+* Unset executable bit on docs
+* Fix DOS line endings in getting-started.txt
+
+
+2011/03/05 Version 0.7.0
+------------------------
+
+No API changes since 0.7.0 rc1. Many documentation changes including a stylish
+new `Sphinx theme <https://github.com/coordt/ADCtheme/>`_.
+
+The full set of changes since 0.6.0 are:
+
+* Python 3 compatibility
+* Ability to mock magic methods with `Mock` and addition of `MagicMock`
+ with pre-created magic methods
+* Addition of `mocksignature` and `mocksignature` argument to `patch` and
+ `patch.object`
+* Addition of `patch.dict` for changing dictionaries during a test
+* Ability to use `patch`, `patch.object` and `patch.dict` as class decorators
+* Renamed ``patch_object`` to `patch.object` (``patch_object`` is
+ deprecated)
+* Addition of soft comparisons: `call_args`, `call_args_list` and `method_calls`
+ now return tuple-like objects which compare equal even when empty args
+ or kwargs are skipped
+* patchers (`patch`, `patch.object` and `patch.dict`) have start and stop
+ methods
+* Addition of `assert_called_once_with` method
+* Mocks can now be named (`name` argument to constructor) and the name is used
+ in the repr
+* repr of a mock with a spec includes the class name of the spec
+* `assert_called_with` works with `python -OO`
+* New `spec_set` keyword argument to `Mock` and `patch`. If used,
+ attempting to *set* an attribute on a mock not on the spec will raise an
+ `AttributeError`
+* Mocks created with a spec can now pass `isinstance` tests (`__class__`
+ returns the type of the spec)
+* Added docstrings to all objects
+* Improved failure message for `Mock.assert_called_with` when the mock
+ has not been called at all
+* Decorated functions / methods have their docstring and `__module__`
+ preserved on Python 2.4.
+* BUGFIX: `mock.patch` now works correctly with certain types of objects that
+ proxy attribute access, like the django settings object
+* BUGFIX: mocks are now copyable (thanks to Ned Batchelder for reporting and
+ diagnosing this)
+* BUGFIX: `spec=True` works with old style classes
+* BUGFIX: ``help(mock)`` works now (on the module). Can no longer use ``__bases__``
+ as a valid sentinel name (thanks to Stephen Emslie for reporting and
+ diagnosing this)
+* BUGFIX: ``side_effect`` now works with ``BaseException`` exceptions like
+ ``KeyboardInterrupt``
+* BUGFIX: `reset_mock` caused infinite recursion when a mock is set as its own
+ return value
+* BUGFIX: patching the same object twice now restores the patches correctly
+* with statement tests now skipped on Python 2.4
+* Tests require unittest2 (or unittest2-py3k) to run
+* Tested with `tox <http://pypi.python.org/pypi/tox>`_ on Python 2.4 - 3.2,
+ jython and pypy (excluding 3.0)
+* Added 'build_sphinx' command to setup.py (requires setuptools or distribute)
+ Thanks to Florian Bauer
+* Switched from subversion to mercurial for source code control
+* `Konrad Delong <http://konryd.blogspot.com/>`_ added as co-maintainer
+
+
+2011/02/16 Version 0.7.0 RC 1
+-----------------------------
+
+Changes since beta 4:
+
+* Tested with jython, pypy and Python 3.2 and 3.1
+* Decorated functions / methods have their docstring and `__module__`
+ preserved on Python 2.4
+* BUGFIX: `mock.patch` now works correctly with certain types of objects that
+ proxy attribute access, like the django settings object
+* BUGFIX: `reset_mock` caused infinite recursion when a mock is set as its own
+ return value
+
+
+2010/11/12 Version 0.7.0 beta 4
+-------------------------------
+
+* patchers (`patch`, `patch.object` and `patch.dict`) have start and stop
+ methods
+* Addition of `assert_called_once_with` method
+* repr of a mock with a spec includes the class name of the spec
+* `assert_called_with` works with `python -OO`
+* New `spec_set` keyword argument to `Mock` and `patch`. If used,
+ attempting to *set* an attribute on a mock not on the spec will raise an
+ `AttributeError`
+* Attributes and return value of a `MagicMock` are `MagicMock` objects
+* Attempting to set an unsupported magic method now raises an `AttributeError`
+* `patch.dict` works as a class decorator
+* Switched from subversion to mercurial for source code control
+* BUGFIX: mocks are now copyable (thanks to Ned Batchelder for reporting and
+ diagnosing this)
+* BUGFIX: `spec=True` works with old style classes
+* BUGFIX: `mocksignature=True` can now patch instance methods via
+ `patch.object`
+
+
+2010/09/18 Version 0.7.0 beta 3
+-------------------------------
+
+* Using spec with :class:`MagicMock` only pre-creates magic methods in the spec
+* Setting a magic method on a mock with a ``spec`` can only be done if the
+ spec has that method
+* Mocks can now be named (`name` argument to constructor) and the name is used
+ in the repr
+* `mocksignature` can now be used with classes (signature based on `__init__`)
+ and callable objects (signature based on `__call__`)
+* Mocks created with a spec can now pass `isinstance` tests (`__class__`
+ returns the type of the spec)
+* Default numeric value for MagicMock is 1 rather than zero (because the
+ MagicMock bool defaults to True and 0 is False)
+* Improved failure message for :meth:`~Mock.assert_called_with` when the mock
+ has not been called at all
+* Adding the following to the set of supported magic methods:
+
+ - ``__getformat__`` and ``__setformat__``
+ - pickle methods
+ - ``__trunc__``, ``__ceil__`` and ``__floor__``
+ - ``__sizeof__``
+
+* Added 'build_sphinx' command to setup.py (requires setuptools or distribute)
+ Thanks to Florian Bauer
+* with statement tests now skipped on Python 2.4
+* Tests require unittest2 to run on Python 2.7
+* Improved several docstrings and documentation
+
+
+2010/06/23 Version 0.7.0 beta 2
+-------------------------------
+
+* :func:`patch.dict` works as a context manager as well as a decorator
+* ``patch.dict`` takes a string to specify dictionary as well as a dictionary
+ object. If a string is supplied the name specified is imported
+* BUGFIX: ``patch.dict`` restores dictionary even when an exception is raised
+
+
+2010/06/22 Version 0.7.0 beta 1
+-------------------------------
+
+* Addition of :func:`mocksignature`
+* Ability to mock magic methods
+* Ability to use ``patch`` and ``patch.object`` as class decorators
+* Renamed ``patch_object`` to :func:`patch.object` (``patch_object`` is
+ deprecated)
+* Addition of :class:`MagicMock` class with all magic methods pre-created for you
+* Python 3 compatibility (tested with 3.2 but should work with 3.0 & 3.1 as
+ well)
+* Addition of :func:`patch.dict` for changing dictionaries during a test
+* Addition of ``mocksignature`` argument to ``patch`` and ``patch.object``
+* ``help(mock)`` works now (on the module). Can no longer use ``__bases__``
+ as a valid sentinel name (thanks to Stephen Emslie for reporting and
+ diagnosing this)
+* Addition of soft comparisons: `call_args`, `call_args_list` and `method_calls`
+ now return tuple-like objects which compare equal even when empty args
+ or kwargs are skipped
+* Added docstrings.
+* BUGFIX: ``side_effect`` now works with ``BaseException`` exceptions like
+ ``KeyboardInterrupt``
+* BUGFIX: patching the same object twice now restores the patches correctly
+* The tests now require `unittest2 <http://pypi.python.org/pypi/unittest2>`_
+ to run
+* `Konrad Delong <http://konryd.blogspot.com/>`_ added as co-maintainer
+
+
+2009/08/22 Version 0.6.0
+------------------------
+
+* New test layout compatible with test discovery
+* Descriptors (static methods / class methods etc) can now be patched and
+ restored correctly
+* Mocks can raise exceptions when called by setting ``side_effect`` to an
+ exception class or instance
+* Mocks that wrap objects will not pass on calls to the underlying object if
+ an explicit return_value is set
+
+
+2009/04/17 Version 0.5.0
+------------------------
+
+* Made DEFAULT part of the public api.
+* Documentation built with Sphinx.
+* ``side_effect`` is now called with the same arguments as the mock is called with and
+ if returns a non-DEFAULT value that is automatically set as the ``mock.return_value``.
+* ``wraps`` keyword argument used for wrapping objects (and passing calls through to the wrapped object).
+* ``Mock.reset`` renamed to ``Mock.reset_mock``, as reset is a common API name.
+* ``patch`` / ``patch_object`` are now context managers and can be used with ``with``.
+* A new 'create' keyword argument to patch and patch_object that allows them to patch
+ (and unpatch) attributes that don't exist. (Potentially unsafe to use - it can allow
+ you to have tests that pass when they are testing an API that doesn't exist - use at
+ your own risk!)
+* The methods keyword argument to Mock has been removed and merged with spec. The spec
+ argument can now be a list of methods or an object to take the spec from.
+* Nested patches may now be applied in a different order (created mocks passed
+ in the opposite order). This is actually a bugfix.
+* patch and patch_object now take a spec keyword argument. If spec is
+ passed in as 'True' then the Mock created will take the object it is replacing
+ as its spec object. If the object being replaced is a class, then the return
+ value for the mock will also use the class as a spec.
+* A Mock created without a spec will not attempt to mock any magic methods / attributes
+ (they will raise an ``AttributeError`` instead).
+
+
+2008/10/12 Version 0.4.0
+------------------------
+
+* Default return value is now a new mock rather than None
+* return_value added as a keyword argument to the constructor
+* New method 'assert_called_with'
+* Added 'side_effect' attribute / keyword argument called when mock is called
+* patch decorator split into two decorators:
+
+ - ``patch_object`` which takes an object and an attribute name to patch
+ (plus optionally a value to patch with which defaults to a mock object)
+ - ``patch`` which takes a string specifying a target to patch; in the form
+ 'package.module.Class.attribute'. (plus optionally a value to
+ patch with which defaults to a mock object)
+
+* Can now patch objects with ``None``
+* Change to patch for nose compatibility with error reporting in wrapped functions
+* Reset no longer clears children / return value etc - it just resets
+ call count and call args. It also calls reset on all children (and
+ the return value if it is a mock).
+
+Thanks to Konrad Delong, Kevin Dangoor and others for patches and suggestions.
+
+
+2007/12/03 Version 0.3.1
+-------------------------
+
+``patch`` maintains the name of decorated functions for compatibility with nose
+test autodiscovery.
+
+Tests decorated with ``patch`` that use the two argument form (implicit mock
+creation) will receive the mock(s) passed in as extra arguments.
+
+Thanks to Kevin Dangoor for these changes.
+
+
+2007/11/30 Version 0.3.0
+-------------------------
+
+Removed ``patch_module``. ``patch`` can now take a string as the first
+argument for patching modules.
+
+The third argument to ``patch`` is optional - a mock will be created by
+default if it is not passed in.
+
+
+2007/11/21 Version 0.2.1
+-------------------------
+
+Bug fix, allows reuse of functions decorated with ``patch`` and ``patch_module``.
+
+
+2007/11/20 Version 0.2.0
+-------------------------
+
+Added ``spec`` keyword argument for creating ``Mock`` objects from a
+specification object.
+
+Added ``patch`` and ``patch_module`` monkey patching decorators.
+
+Added ``sentinel`` for convenient access to unique objects.
+
+Distribution includes unit tests.
+
+
+2007/11/19 Version 0.1.0
+-------------------------
+
+Initial release.
+
+
+TODO and Limitations
+====================
+
+Contributions, bug reports and comments welcomed!
+
+Feature requests and bug reports are handled on the issue tracker:
+
+ * `mock issue tracker <http://code.google.com/p/mock/issues/list>`_
+
+`wraps` is not integrated with magic methods.
+
+`patch` could auto-do the patching in the constructor and unpatch in the
+destructor. This would be useful in itself, but violates TOOWTDI and would be
+unsafe for IronPython & PyPy (non-deterministic calling of destructors).
+Destructors aren't called in CPython where there are cycles, but a weak
+reference with a callback can be used to get round this.
+
+`Mock` has several attributes. This makes it unsuitable for mocking objects
+that use these attribute names. A way round this would be to provide methods
+that *hide* these attributes when needed. In 0.8 many, but not all, of these
+attributes are renamed to gain a `_mock` prefix, making it less likely that
+they will clash. Any outstanding attributes that haven't been modified with
+the prefix should be changed.
+
+If a patch is started using `patch.start` and then not stopped correctly then
+the unpatching is not done. Using weak references it would be possible to
+detect and fix this when the patch object itself is garbage collected. This
+would be tricky to get right though.
+
+When a `Mock` is created by `patch`, arbitrary keywords can be used to set
+attributes. If `patch` is created with a `spec`, and is replacing a class, then
+a `return_value` mock is created. The keyword arguments are not applied to the
+child mock, but could be.
+
+When mocking a class with `patch`, passing in `spec=True` or `autospec=True`,
+the mock class has an instance created from the same spec. Should this be the
+default behaviour for mocks anyway (mock return values inheriting the spec
+from their parent), or should it be controlled by an additional keyword
+argument (`inherit`) to the Mock constructor? `create_autospec` does this, so
+an additional keyword argument to Mock is probably unnecessary.
+
+The `mocksignature` argument to `patch` with a non `Mock` passed into
+`new_callable` will *probably* cause an error. Should it just be invalid?
+
+Note that `NonCallableMock` and `NonCallableMagicMock` still have the unused
+(and unusable) attributes: `return_value`, `side_effect`, `call_count`,
+`call_args` and `call_args_list`. These could be removed or raise errors on
+getting / setting. They also have the `assert_called_with` and
+`assert_called_once_with` methods. Removing these would be pointless as
+fetching them would create a mock (attribute) that could be called without
+error.
+
+Some outstanding technical debt. The way autospeccing mocks function
+signatures was copied and modified from `mocksignature`. This could all be
+refactored into one set of functions instead of two. The way we tell if
+patchers are started and if a patcher is being used for a `patch.multiple`
+call are both horrible. There are now a host of helper functions that should
+be rationalised. (Probably time to split mock into a package instead of a
+module.)
+
+Passing arbitrary keyword arguments to `create_autospec`, or `patch` with
+`autospec`, when mocking a *function* works fine. However, the arbitrary
+attributes are set on the created mock - but `create_autospec` returns a
+real function (which doesn't have those attributes). However, what is the use
+case for using autospec to create functions with attributes that don't exist
+on the original?
+
+`mocksignature`, plus the `call_args_list` and `method_calls` attributes of
+`Mock` could all be deprecated.
diff --git a/third_party/python/mock-1.0.0/docs/compare.txt b/third_party/python/mock-1.0.0/docs/compare.txt
new file mode 100644
index 0000000000..41555308e2
--- /dev/null
+++ b/third_party/python/mock-1.0.0/docs/compare.txt
@@ -0,0 +1,628 @@
+=========================
+ Mock Library Comparison
+=========================
+
+
+.. testsetup::
+
+ def assertEqual(a, b):
+ assert a == b, ("%r != %r" % (a, b))
+
+ def assertRaises(Exc, func):
+ try:
+ func()
+ except Exc:
+ return
+ assert False, ("%s not raised" % Exc)
+
+ sys.modules['somemodule'] = somemodule = mock.Mock(name='somemodule')
+ class SomeException(Exception):
+ some_method = method1 = method2 = None
+ some_other_object = SomeObject = SomeException
+
+
+A side-by-side comparison of how to accomplish some basic tasks with mock and
+some other popular Python mocking libraries and frameworks.
+
+These are:
+
+* `flexmock <http://pypi.python.org/pypi/flexmock>`_
+* `mox <http://pypi.python.org/pypi/mox>`_
+* `Mocker <http://niemeyer.net/mocker>`_
+* `dingus <http://pypi.python.org/pypi/dingus>`_
+* `fudge <http://pypi.python.org/pypi/fudge>`_
+
+Popular python mocking frameworks not yet represented here include
+`MiniMock <http://pypi.python.org/pypi/MiniMock>`_.
+
+`pMock <http://pmock.sourceforge.net/>`_ (last release 2004 and doesn't import
+in recent versions of Python) and
+`python-mock <http://python-mock.sourceforge.net/>`_ (last release 2005) are
+intentionally omitted.
+
+.. note::
+
+ A more up to date, and tested for all mock libraries (only the mock
+ examples on this page can be executed as doctests) version of this
+ comparison is maintained by Gary Bernhardt:
+
+ * `Python Mock Library Comparison
+ <http://garybernhardt.github.com/python-mock-comparison/>`_
+
+This comparison is by no means complete, and also may not be fully idiomatic
+for all the libraries represented. *Please* contribute corrections, missing
+comparisons, or comparisons for additional libraries to the `mock issue
+tracker <https://code.google.com/p/mock/issues/list>`_.
+
+This comparison page was originally created by the `Mox project
+<https://code.google.com/p/pymox/wiki/MoxComparison>`_ and then extended for
+`flexmock and mock <http://has207.github.com/flexmock/compare.html>`_ by
+Herman Sheremetyev. Dingus examples written by `Gary Bernhadt
+<http://garybernhardt.github.com/python-mock-comparison/>`_. fudge examples
+provided by `Kumar McMillan <http://farmdev.com/>`_.
+
+.. note::
+
+ The examples tasks here were originally created by Mox which is a mocking
+ *framework* rather than a library like mock. The tasks shown naturally
+ exemplify tasks that frameworks are good at and not the ones they make
+ harder. In particular you can take a `Mock` or `MagicMock` object and use
+ it in any way you want with no up-front configuration. The same is also
+ true for Dingus.
+
+ The examples for mock here assume version 0.7.0.
+
+
+Simple fake object
+~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> my_mock.some_method.return_value = "calculated value"
+ >>> my_mock.some_attribute = "value"
+ >>> assertEqual("calculated value", my_mock.some_method())
+ >>> assertEqual("value", my_mock.some_attribute)
+
+::
+
+ # Flexmock
+ mock = flexmock(some_method=lambda: "calculated value", some_attribute="value")
+ assertEqual("calculated value", mock.some_method())
+ assertEqual("value", mock.some_attribute)
+
+ # Mox
+ mock = mox.MockAnything()
+ mock.some_method().AndReturn("calculated value")
+ mock.some_attribute = "value"
+ mox.Replay(mock)
+ assertEqual("calculated value", mock.some_method())
+ assertEqual("value", mock.some_attribute)
+
+ # Mocker
+ mock = mocker.mock()
+ mock.some_method()
+ mocker.result("calculated value")
+ mocker.replay()
+ mock.some_attribute = "value"
+ assertEqual("calculated value", mock.some_method())
+ assertEqual("value", mock.some_attribute)
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus(some_attribute="value",
+ ... some_method__returns="calculated value")
+ >>> assertEqual("calculated value", my_dingus.some_method())
+ >>> assertEqual("value", my_dingus.some_attribute)
+
+::
+
+ >>> # fudge
+ >>> my_fake = (fudge.Fake()
+ ... .provides('some_method')
+ ... .returns("calculated value")
+ ... .has_attr(some_attribute="value"))
+ ...
+ >>> assertEqual("calculated value", my_fake.some_method())
+ >>> assertEqual("value", my_fake.some_attribute)
+
+
+Simple mock
+~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> my_mock.some_method.return_value = "value"
+ >>> assertEqual("value", my_mock.some_method())
+ >>> my_mock.some_method.assert_called_once_with()
+
+::
+
+ # Flexmock
+ mock = flexmock()
+ mock.should_receive("some_method").and_return("value").once
+ assertEqual("value", mock.some_method())
+
+ # Mox
+ mock = mox.MockAnything()
+ mock.some_method().AndReturn("value")
+ mox.Replay(mock)
+ assertEqual("value", mock.some_method())
+ mox.Verify(mock)
+
+ # Mocker
+ mock = mocker.mock()
+ mock.some_method()
+ mocker.result("value")
+ mocker.replay()
+ assertEqual("value", mock.some_method())
+ mocker.verify()
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus(some_method__returns="value")
+ >>> assertEqual("value", my_dingus.some_method())
+ >>> assert my_dingus.some_method.calls().once()
+
+::
+
+ >>> # fudge
+ >>> @fudge.test
+ ... def test():
+ ... my_fake = (fudge.Fake()
+ ... .expects('some_method')
+ ... .returns("value")
+ ... .times_called(1))
+ ...
+ >>> test()
+ Traceback (most recent call last):
+ ...
+ AssertionError: fake:my_fake.some_method() was not called
+
+
+Creating partial mocks
+~~~~~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> SomeObject.some_method = mock.Mock(return_value='value')
+ >>> assertEqual("value", SomeObject.some_method())
+
+::
+
+ # Flexmock
+ flexmock(SomeObject).should_receive("some_method").and_return('value')
+ assertEqual("value", mock.some_method())
+
+ # Mox
+ mock = mox.MockObject(SomeObject)
+ mock.some_method().AndReturn("value")
+ mox.Replay(mock)
+ assertEqual("value", mock.some_method())
+ mox.Verify(mock)
+
+ # Mocker
+ mock = mocker.mock(SomeObject)
+ mock.Get()
+ mocker.result("value")
+ mocker.replay()
+ assertEqual("value", mock.some_method())
+ mocker.verify()
+
+::
+
+ >>> # Dingus
+ >>> object = SomeObject
+ >>> object.some_method = dingus.Dingus(return_value="value")
+ >>> assertEqual("value", object.some_method())
+
+::
+
+ >>> # fudge
+ >>> fake = fudge.Fake().is_callable().returns("<fudge-value>")
+ >>> with fudge.patched_context(SomeObject, 'some_method', fake):
+ ... s = SomeObject()
+ ... assertEqual("<fudge-value>", s.some_method())
+ ...
+
+
+Ensure calls are made in specific order
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock(spec=SomeObject)
+ >>> my_mock.method1()
+ <Mock name='mock.method1()' id='...'>
+ >>> my_mock.method2()
+ <Mock name='mock.method2()' id='...'>
+ >>> assertEqual(my_mock.mock_calls, [call.method1(), call.method2()])
+
+::
+
+ # Flexmock
+ mock = flexmock(SomeObject)
+ mock.should_receive('method1').once.ordered.and_return('first thing')
+ mock.should_receive('method2').once.ordered.and_return('second thing')
+
+ # Mox
+ mock = mox.MockObject(SomeObject)
+ mock.method1().AndReturn('first thing')
+ mock.method2().AndReturn('second thing')
+ mox.Replay(mock)
+ mox.Verify(mock)
+
+ # Mocker
+ mock = mocker.mock()
+ with mocker.order():
+ mock.method1()
+ mocker.result('first thing')
+ mock.method2()
+ mocker.result('second thing')
+ mocker.replay()
+ mocker.verify()
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> my_dingus.method1()
+ <Dingus ...>
+ >>> my_dingus.method2()
+ <Dingus ...>
+ >>> assertEqual(['method1', 'method2'], [call.name for call in my_dingus.calls])
+
+::
+
+ >>> # fudge
+ >>> @fudge.test
+ ... def test():
+ ... my_fake = (fudge.Fake()
+ ... .remember_order()
+ ... .expects('method1')
+ ... .expects('method2'))
+ ... my_fake.method2()
+ ... my_fake.method1()
+ ...
+ >>> test()
+ Traceback (most recent call last):
+ ...
+ AssertionError: Call #1 was fake:my_fake.method2(); Expected: #1 fake:my_fake.method1(), #2 fake:my_fake.method2(), end
+
+
+Raising exceptions
+~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> my_mock.some_method.side_effect = SomeException("message")
+ >>> assertRaises(SomeException, my_mock.some_method)
+
+::
+
+ # Flexmock
+ mock = flexmock()
+ mock.should_receive("some_method").and_raise(SomeException("message"))
+ assertRaises(SomeException, mock.some_method)
+
+ # Mox
+ mock = mox.MockAnything()
+ mock.some_method().AndRaise(SomeException("message"))
+ mox.Replay(mock)
+ assertRaises(SomeException, mock.some_method)
+ mox.Verify(mock)
+
+ # Mocker
+ mock = mocker.mock()
+ mock.some_method()
+ mocker.throw(SomeException("message"))
+ mocker.replay()
+ assertRaises(SomeException, mock.some_method)
+ mocker.verify()
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> my_dingus.some_method = dingus.exception_raiser(SomeException)
+ >>> assertRaises(SomeException, my_dingus.some_method)
+
+::
+
+ >>> # fudge
+ >>> my_fake = (fudge.Fake()
+ ... .is_callable()
+ ... .raises(SomeException("message")))
+ ...
+ >>> my_fake()
+ Traceback (most recent call last):
+ ...
+ SomeException: message
+
+
+Override new instances of a class
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> with mock.patch('somemodule.Someclass') as MockClass:
+ ... MockClass.return_value = some_other_object
+ ... assertEqual(some_other_object, somemodule.Someclass())
+ ...
+
+
+::
+
+ # Flexmock
+ flexmock(some_module.SomeClass, new_instances=some_other_object)
+ assertEqual(some_other_object, some_module.SomeClass())
+
+ # Mox
+ # (you will probably have mox.Mox() available as self.mox in a real test)
+ mox.Mox().StubOutWithMock(some_module, 'SomeClass', use_mock_anything=True)
+ some_module.SomeClass().AndReturn(some_other_object)
+ mox.ReplayAll()
+ assertEqual(some_other_object, some_module.SomeClass())
+
+ # Mocker
+ instance = mocker.mock()
+ klass = mocker.replace(SomeClass, spec=None)
+ klass('expected', 'args')
+ mocker.result(instance)
+
+::
+
+ >>> # Dingus
+ >>> MockClass = dingus.Dingus(return_value=some_other_object)
+ >>> with dingus.patch('somemodule.SomeClass', MockClass):
+ ... assertEqual(some_other_object, somemodule.SomeClass())
+ ...
+
+::
+
+ >>> # fudge
+ >>> @fudge.patch('somemodule.SomeClass')
+ ... def test(FakeClass):
+ ... FakeClass.is_callable().returns(some_other_object)
+ ... assertEqual(some_other_object, somemodule.SomeClass())
+ ...
+ >>> test()
+
+
+Call the same method multiple times
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. note::
+
+ You don't need to do *any* configuration to call `mock.Mock()` methods
+ multiple times. Attributes like `call_count`, `call_args_list` and
+ `method_calls` provide various different ways of making assertions about
+ how the mock was used.
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> my_mock.some_method()
+ <Mock name='mock.some_method()' id='...'>
+ >>> my_mock.some_method()
+ <Mock name='mock.some_method()' id='...'>
+ >>> assert my_mock.some_method.call_count >= 2
+
+::
+
+ # Flexmock # (verifies that the method gets called at least twice)
+ flexmock(some_object).should_receive('some_method').at_least.twice
+
+ # Mox
+ # (does not support variable number of calls, so you need to create a new entry for each explicit call)
+ mock = mox.MockObject(some_object)
+ mock.some_method(mox.IgnoreArg(), mox.IgnoreArg())
+ mock.some_method(mox.IgnoreArg(), mox.IgnoreArg())
+ mox.Replay(mock)
+ mox.Verify(mock)
+
+ # Mocker
+ # (TODO)
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> my_dingus.some_method()
+ <Dingus ...>
+ >>> my_dingus.some_method()
+ <Dingus ...>
+ >>> assert len(my_dingus.calls('some_method')) == 2
+
+::
+
+ >>> # fudge
+ >>> @fudge.test
+ ... def test():
+ ... my_fake = fudge.Fake().expects('some_method').times_called(2)
+ ... my_fake.some_method()
+ ...
+ >>> test()
+ Traceback (most recent call last):
+ ...
+ AssertionError: fake:my_fake.some_method() was called 1 time(s). Expected 2.
+
+
+Mock chained methods
+~~~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> method3 = my_mock.method1.return_value.method2.return_value.method3
+ >>> method3.return_value = 'some value'
+ >>> assertEqual('some value', my_mock.method1().method2().method3(1, 2))
+ >>> method3.assert_called_once_with(1, 2)
+
+::
+
+ # Flexmock
+ # (intermediate method calls are automatically assigned to temporary fake objects
+ # and can be called with any arguments)
+ flexmock(some_object).should_receive(
+ 'method1.method2.method3'
+ ).with_args(arg1, arg2).and_return('some value')
+ assertEqual('some_value', some_object.method1().method2().method3(arg1, arg2))
+
+::
+
+ # Mox
+ mock = mox.MockObject(some_object)
+ mock2 = mox.MockAnything()
+ mock3 = mox.MockAnything()
+ mock.method1().AndReturn(mock1)
+ mock2.method2().AndReturn(mock2)
+ mock3.method3(arg1, arg2).AndReturn('some_value')
+ self.mox.ReplayAll()
+ assertEqual("some_value", some_object.method1().method2().method3(arg1, arg2))
+ self.mox.VerifyAll()
+
+ # Mocker
+ # (TODO)
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> method3 = my_dingus.method1.return_value.method2.return_value.method3
+ >>> method3.return_value = 'some value'
+ >>> assertEqual('some value', my_dingus.method1().method2().method3(1, 2))
+ >>> assert method3.calls('()', 1, 2).once()
+
+::
+
+ >>> # fudge
+ >>> @fudge.test
+ ... def test():
+ ... my_fake = fudge.Fake()
+ ... (my_fake
+ ... .expects('method1')
+ ... .returns_fake()
+ ... .expects('method2')
+ ... .returns_fake()
+ ... .expects('method3')
+ ... .with_args(1, 2)
+ ... .returns('some value'))
+ ... assertEqual('some value', my_fake.method1().method2().method3(1, 2))
+ ...
+ >>> test()
+
+
+Mocking a context manager
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Examples for mock, Dingus and fudge only (so far):
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.MagicMock()
+ >>> with my_mock:
+ ... pass
+ ...
+ >>> my_mock.__enter__.assert_called_with()
+ >>> my_mock.__exit__.assert_called_with(None, None, None)
+
+::
+
+
+ >>> # Dingus (nothing special here; all dinguses are "magic mocks")
+ >>> my_dingus = dingus.Dingus()
+ >>> with my_dingus:
+ ... pass
+ ...
+ >>> assert my_dingus.__enter__.calls()
+ >>> assert my_dingus.__exit__.calls('()', None, None, None)
+
+::
+
+ >>> # fudge
+ >>> my_fake = fudge.Fake().provides('__enter__').provides('__exit__')
+ >>> with my_fake:
+ ... pass
+ ...
+
+
+Mocking the builtin open used as a context manager
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Example for mock only (so far):
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.MagicMock()
+ >>> with mock.patch('__builtin__.open', my_mock):
+ ... manager = my_mock.return_value.__enter__.return_value
+ ... manager.read.return_value = 'some data'
+ ... with open('foo') as h:
+ ... data = h.read()
+ ...
+ >>> data
+ 'some data'
+ >>> my_mock.assert_called_once_with('foo')
+
+*or*:
+
+.. doctest::
+
+ >>> # mock
+ >>> with mock.patch('__builtin__.open') as my_mock:
+ ... my_mock.return_value.__enter__ = lambda s: s
+ ... my_mock.return_value.__exit__ = mock.Mock()
+ ... my_mock.return_value.read.return_value = 'some data'
+ ... with open('foo') as h:
+ ... data = h.read()
+ ...
+ >>> data
+ 'some data'
+ >>> my_mock.assert_called_once_with('foo')
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> with dingus.patch('__builtin__.open', my_dingus):
+ ... file_ = open.return_value.__enter__.return_value
+ ... file_.read.return_value = 'some data'
+ ... with open('foo') as h:
+ ... data = f.read()
+ ...
+ >>> data
+ 'some data'
+ >>> assert my_dingus.calls('()', 'foo').once()
+
+::
+
+ >>> # fudge
+ >>> from contextlib import contextmanager
+ >>> from StringIO import StringIO
+ >>> @contextmanager
+ ... def fake_file(filename):
+ ... yield StringIO('sekrets')
+ ...
+ >>> with fudge.patch('__builtin__.open') as fake_open:
+ ... fake_open.is_callable().calls(fake_file)
+ ... with open('/etc/password') as f:
+ ... data = f.read()
+ ...
+ fake:__builtin__.open
+ >>> data
+ 'sekrets' \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/docs/conf.py b/third_party/python/mock-1.0.0/docs/conf.py
new file mode 100644
index 0000000000..62f0491cca
--- /dev/null
+++ b/third_party/python/mock-1.0.0/docs/conf.py
@@ -0,0 +1,209 @@
+# -*- coding: utf-8 -*-
+#
+# Mock documentation build configuration file, created by
+# sphinx-quickstart on Mon Nov 17 18:12:00 2008.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# The contents of this file are pickled, so don't put values in the namespace
+# that aren't pickleable (module imports are okay, they're removed automatically).
+#
+# All configuration values have a default value; values that are commented out
+# serve to show the default value.
+
+import sys, os
+sys.path.insert(0, os.path.abspath('..'))
+from mock import __version__
+
+# If your extensions are in another directory, add it here. If the directory
+# is relative to the documentation root, use os.path.abspath to make it
+# absolute, like shown here.
+#sys.path.append(os.path.abspath('some/directory'))
+
+# General configuration
+# ---------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.doctest']
+
+doctest_global_setup = """
+import os
+import sys
+import mock
+from mock import * # yeah, I know :-/
+import unittest2
+import __main__
+
+if os.getcwd() not in sys.path:
+ sys.path.append(os.getcwd())
+
+# keep a reference to __main__
+sys.modules['__main'] = __main__
+
+class ProxyModule(object):
+ def __init__(self):
+ self.__dict__ = globals()
+
+sys.modules['__main__'] = ProxyModule()
+"""
+
+doctest_global_cleanup = """
+sys.modules['__main__'] = sys.modules['__main']
+"""
+
+html_theme = 'nature'
+html_theme_options = {}
+
+# Add any paths that contain templates here, relative to this directory.
+#templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.txt'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General substitutions.
+project = u'Mock'
+copyright = u'2007-2012, Michael Foord & the mock team'
+
+# The default replacements for |version| and |release|, also used in various
+# other places throughout the built documents.
+#
+# The short X.Y version.
+version = __version__[:3]
+# The full version, including alpha/beta/rc tags.
+release = __version__
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+#unused_docs = []
+
+# List of directories, relative to source directories, that shouldn't be searched
+# for source files.
+exclude_trees = []
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+add_module_names = False
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'friendly'
+
+
+# Options for HTML output
+# -----------------------
+
+# The style sheet to use for HTML and HTML Help pages. A file of that name
+# must exist either in Sphinx' static/ path, or in one of the custom paths
+# given in html_static_path.
+#html_style = 'adctheme.css'
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+#html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+html_use_modindex = False
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, the reST sources are included in the HTML build as _sources/<name>.
+#html_copy_source = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = ''
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Mockdoc'
+
+
+# Options for LaTeX output
+# ------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+latex_font_size = '12pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, document class [howto/manual]).
+latex_documents = [
+ ('index', 'Mock.tex', u'Mock Documentation',
+ u'Michael Foord', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+latex_use_modindex = False \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/docs/examples.txt b/third_party/python/mock-1.0.0/docs/examples.txt
new file mode 100644
index 0000000000..ecb994b156
--- /dev/null
+++ b/third_party/python/mock-1.0.0/docs/examples.txt
@@ -0,0 +1,1063 @@
+.. _further-examples:
+
+==================
+ Further Examples
+==================
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ from datetime import date
+
+ BackendProvider = Mock()
+ sys.modules['mymodule'] = mymodule = Mock(name='mymodule')
+
+ def grob(val):
+ "First frob and then clear val"
+ mymodule.frob(val)
+ val.clear()
+
+ mymodule.frob = lambda val: val
+ mymodule.grob = grob
+ mymodule.date = date
+
+ class TestCase(unittest2.TestCase):
+ def run(self):
+ result = unittest2.TestResult()
+ out = unittest2.TestCase.run(self, result)
+ assert result.wasSuccessful()
+
+ from mock import inPy3k
+
+
+
+For comprehensive examples, see the unit tests included in the full source
+distribution.
+
+Here are some more examples for some slightly more advanced scenarios than in
+the :ref:`getting started <getting-started>` guide.
+
+
+Mocking chained calls
+=====================
+
+Mocking chained calls is actually straightforward with mock once you
+understand the :attr:`~Mock.return_value` attribute. When a mock is called for
+the first time, or you fetch its `return_value` before it has been called, a
+new `Mock` is created.
+
+This means that you can see how the object returned from a call to a mocked
+object has been used by interrogating the `return_value` mock:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock().foo(a=2, b=3)
+ <Mock name='mock().foo()' id='...'>
+ >>> mock.return_value.foo.assert_called_with(a=2, b=3)
+
+From here it is a simple step to configure and then make assertions about
+chained calls. Of course another alternative is writing your code in a more
+testable way in the first place...
+
+So, suppose we have some code that looks a little bit like this:
+
+.. doctest::
+
+ >>> class Something(object):
+ ... def __init__(self):
+ ... self.backend = BackendProvider()
+ ... def method(self):
+ ... response = self.backend.get_endpoint('foobar').create_call('spam', 'eggs').start_call()
+ ... # more code
+
+Assuming that `BackendProvider` is already well tested, how do we test
+`method()`? Specifically, we want to test that the code section `# more
+code` uses the response object in the correct way.
+
+As this chain of calls is made from an instance attribute we can monkey patch
+the `backend` attribute on a `Something` instance. In this particular case
+we are only interested in the return value from the final call to
+`start_call` so we don't have much configuration to do. Let's assume the
+object it returns is 'file-like', so we'll ensure that our response object
+uses the builtin `file` as its `spec`.
+
+To do this we create a mock instance as our mock backend and create a mock
+response object for it. To set the response as the return value for that final
+`start_call` we could do this:
+
+ `mock_backend.get_endpoint.return_value.create_call.return_value.start_call.return_value = mock_response`.
+
+We can do that in a slightly nicer way using the :meth:`~Mock.configure_mock`
+method to directly set the return value for us:
+
+.. doctest::
+
+ >>> something = Something()
+ >>> mock_response = Mock(spec=file)
+ >>> mock_backend = Mock()
+ >>> config = {'get_endpoint.return_value.create_call.return_value.start_call.return_value': mock_response}
+ >>> mock_backend.configure_mock(**config)
+
+With these we monkey patch the "mock backend" in place and can make the real
+call:
+
+.. doctest::
+
+ >>> something.backend = mock_backend
+ >>> something.method()
+
+Using :attr:`~Mock.mock_calls` we can check the chained call with a single
+assert. A chained call is several calls in one line of code, so there will be
+several entries in `mock_calls`. We can use :meth:`call.call_list` to create
+this list of calls for us:
+
+.. doctest::
+
+ >>> chained = call.get_endpoint('foobar').create_call('spam', 'eggs').start_call()
+ >>> call_list = chained.call_list()
+ >>> assert mock_backend.mock_calls == call_list
+
+
+Partial mocking
+===============
+
+In some tests I wanted to mock out a call to `datetime.date.today()
+<http://docs.python.org/library/datetime.html#datetime.date.today>`_ to return
+a known date, but I didn't want to prevent the code under test from
+creating new date objects. Unfortunately `datetime.date` is written in C, and
+so I couldn't just monkey-patch out the static `date.today` method.
+
+I found a simple way of doing this that involved effectively wrapping the date
+class with a mock, but passing through calls to the constructor to the real
+class (and returning real instances).
+
+The :func:`patch decorator <patch>` is used here to
+mock out the `date` class in the module under test. The :attr:`side_effect`
+attribute on the mock date class is then set to a lambda function that returns
+a real date. When the mock date class is called a real date will be
+constructed and returned by `side_effect`.
+
+.. doctest::
+
+ >>> from datetime import date
+ >>> with patch('mymodule.date') as mock_date:
+ ... mock_date.today.return_value = date(2010, 10, 8)
+ ... mock_date.side_effect = lambda *args, **kw: date(*args, **kw)
+ ...
+ ... assert mymodule.date.today() == date(2010, 10, 8)
+ ... assert mymodule.date(2009, 6, 8) == date(2009, 6, 8)
+ ...
+
+Note that we don't patch `datetime.date` globally, we patch `date` in the
+module that *uses* it. See :ref:`where to patch <where-to-patch>`.
+
+When `date.today()` is called a known date is returned, but calls to the
+`date(...)` constructor still return normal dates. Without this you can find
+yourself having to calculate an expected result using exactly the same
+algorithm as the code under test, which is a classic testing anti-pattern.
+
+Calls to the date constructor are recorded in the `mock_date` attributes
+(`call_count` and friends) which may also be useful for your tests.
+
+An alternative way of dealing with mocking dates, or other builtin classes,
+is discussed in `this blog entry
+<http://williamjohnbert.com/2011/07/how-to-unit-testing-in-django-with-mocking-and-patching/>`_.
+
+
+Mocking a Generator Method
+==========================
+
+A Python generator is a function or method that uses the `yield statement
+<http://docs.python.org/reference/simple_stmts.html#the-yield-statement>`_ to
+return a series of values when iterated over [#]_.
+
+A generator method / function is called to return the generator object. It is
+the generator object that is then iterated over. The protocol method for
+iteration is `__iter__
+<http://docs.python.org/library/stdtypes.html#container.__iter__>`_, so we can
+mock this using a `MagicMock`.
+
+Here's an example class with an "iter" method implemented as a generator:
+
+.. doctest::
+
+ >>> class Foo(object):
+ ... def iter(self):
+ ... for i in [1, 2, 3]:
+ ... yield i
+ ...
+ >>> foo = Foo()
+ >>> list(foo.iter())
+ [1, 2, 3]
+
+
+How would we mock this class, and in particular its "iter" method?
+
+To configure the values returned from the iteration (implicit in the call to
+`list`), we need to configure the object returned by the call to `foo.iter()`.
+
+.. doctest::
+
+ >>> mock_foo = MagicMock()
+ >>> mock_foo.iter.return_value = iter([1, 2, 3])
+ >>> list(mock_foo.iter())
+ [1, 2, 3]
+
+.. [#] There are also generator expressions and more `advanced uses
+ <http://www.dabeaz.com/coroutines/index.html>`_ of generators, but we aren't
+ concerned about them here. A very good introduction to generators and how
+ powerful they are is: `Generator Tricks for Systems Programmers
+ <http://www.dabeaz.com/generators/>`_.
+
+
+Applying the same patch to every test method
+============================================
+
+If you want several patches in place for multiple test methods the obvious way
+is to apply the patch decorators to every method. This can feel like unnecessary
+repetition. For Python 2.6 or more recent you can use `patch` (in all its
+various forms) as a class decorator. This applies the patches to all test
+methods on the class. A test method is identified by methods whose names start
+with `test`:
+
+.. doctest::
+
+ >>> @patch('mymodule.SomeClass')
+ ... class MyTest(TestCase):
+ ...
+ ... def test_one(self, MockSomeClass):
+ ... self.assertTrue(mymodule.SomeClass is MockSomeClass)
+ ...
+ ... def test_two(self, MockSomeClass):
+ ... self.assertTrue(mymodule.SomeClass is MockSomeClass)
+ ...
+ ... def not_a_test(self):
+ ... return 'something'
+ ...
+ >>> MyTest('test_one').test_one()
+ >>> MyTest('test_two').test_two()
+ >>> MyTest('test_two').not_a_test()
+ 'something'
+
+An alternative way of managing patches is to use the :ref:`start-and-stop`.
+These allow you to move the patching into your `setUp` and `tearDown` methods.
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ... def setUp(self):
+ ... self.patcher = patch('mymodule.foo')
+ ... self.mock_foo = self.patcher.start()
+ ...
+ ... def test_foo(self):
+ ... self.assertTrue(mymodule.foo is self.mock_foo)
+ ...
+ ... def tearDown(self):
+ ... self.patcher.stop()
+ ...
+ >>> MyTest('test_foo').run()
+
+If you use this technique you must ensure that the patching is "undone" by
+calling `stop`. This can be fiddlier than you might think, because if an
+exception is raised in the setUp then tearDown is not called. `unittest2
+<http://pypi.python.org/pypi/unittest2>`_ cleanup functions make this simpler:
+
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ... def setUp(self):
+ ... patcher = patch('mymodule.foo')
+ ... self.addCleanup(patcher.stop)
+ ... self.mock_foo = patcher.start()
+ ...
+ ... def test_foo(self):
+ ... self.assertTrue(mymodule.foo is self.mock_foo)
+ ...
+ >>> MyTest('test_foo').run()
+
+
+Mocking Unbound Methods
+=======================
+
+Whilst writing tests today I needed to patch an *unbound method* (patching the
+method on the class rather than on the instance). I needed self to be passed
+in as the first argument because I want to make asserts about which objects
+were calling this particular method. The issue is that you can't patch with a
+mock for this, because if you replace an unbound method with a mock it doesn't
+become a bound method when fetched from the instance, and so it doesn't get
+self passed in. The workaround is to patch the unbound method with a real
+function instead. The :func:`patch` decorator makes it so simple to
+patch out methods with a mock that having to create a real function becomes a
+nuisance.
+
+If you pass `autospec=True` to patch then it does the patching with a
+*real* function object. This function object has the same signature as the one
+it is replacing, but delegates to a mock under the hood. You still get your
+mock auto-created in exactly the same way as before. What it means though, is
+that if you use it to patch out an unbound method on a class the mocked
+function will be turned into a bound method if it is fetched from an instance.
+It will have `self` passed in as the first argument, which is exactly what I
+wanted:
+
+.. doctest::
+
+ >>> class Foo(object):
+ ... def foo(self):
+ ... pass
+ ...
+ >>> with patch.object(Foo, 'foo', autospec=True) as mock_foo:
+ ... mock_foo.return_value = 'foo'
+ ... foo = Foo()
+ ... foo.foo()
+ ...
+ 'foo'
+ >>> mock_foo.assert_called_once_with(foo)
+
+If we don't use `autospec=True` then the unbound method is patched out
+with a Mock instance instead, and isn't called with `self`.
+
+
+Checking multiple calls with mock
+=================================
+
+mock has a nice API for making assertions about how your mock objects are used.
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.foo_bar.return_value = None
+ >>> mock.foo_bar('baz', spam='eggs')
+ >>> mock.foo_bar.assert_called_with('baz', spam='eggs')
+
+If your mock is only being called once you can use the
+:meth:`assert_called_once_with` method that also asserts that the
+:attr:`call_count` is one.
+
+.. doctest::
+
+ >>> mock.foo_bar.assert_called_once_with('baz', spam='eggs')
+ >>> mock.foo_bar()
+ >>> mock.foo_bar.assert_called_once_with('baz', spam='eggs')
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected to be called once. Called 2 times.
+
+Both `assert_called_with` and `assert_called_once_with` make assertions about
+the *most recent* call. If your mock is going to be called several times, and
+you want to make assertions about *all* those calls you can use
+:attr:`~Mock.call_args_list`:
+
+.. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock(1, 2, 3)
+ >>> mock(4, 5, 6)
+ >>> mock()
+ >>> mock.call_args_list
+ [call(1, 2, 3), call(4, 5, 6), call()]
+
+The :data:`call` helper makes it easy to make assertions about these calls. You
+can build up a list of expected calls and compare it to `call_args_list`. This
+looks remarkably similar to the repr of the `call_args_list`:
+
+.. doctest::
+
+ >>> expected = [call(1, 2, 3), call(4, 5, 6), call()]
+ >>> mock.call_args_list == expected
+ True
+
+
+Coping with mutable arguments
+=============================
+
+Another situation is rare, but can bite you, is when your mock is called with
+mutable arguments. `call_args` and `call_args_list` store *references* to the
+arguments. If the arguments are mutated by the code under test then you can no
+longer make assertions about what the values were when the mock was called.
+
+Here's some example code that shows the problem. Imagine the following functions
+defined in 'mymodule'::
+
+ def frob(val):
+ pass
+
+ def grob(val):
+ "First frob and then clear val"
+ frob(val)
+ val.clear()
+
+When we try to test that `grob` calls `frob` with the correct argument look
+what happens:
+
+.. doctest::
+
+ >>> with patch('mymodule.frob') as mock_frob:
+ ... val = set([6])
+ ... mymodule.grob(val)
+ ...
+ >>> val
+ set([])
+ >>> mock_frob.assert_called_with(set([6]))
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected: ((set([6]),), {})
+ Called with: ((set([]),), {})
+
+One possibility would be for mock to copy the arguments you pass in. This
+could then cause problems if you do assertions that rely on object identity
+for equality.
+
+Here's one solution that uses the :attr:`side_effect`
+functionality. If you provide a `side_effect` function for a mock then
+`side_effect` will be called with the same args as the mock. This gives us an
+opportunity to copy the arguments and store them for later assertions. In this
+example I'm using *another* mock to store the arguments so that I can use the
+mock methods for doing the assertion. Again a helper function sets this up for
+me.
+
+.. doctest::
+
+ >>> from copy import deepcopy
+ >>> from mock import Mock, patch, DEFAULT
+ >>> def copy_call_args(mock):
+ ... new_mock = Mock()
+ ... def side_effect(*args, **kwargs):
+ ... args = deepcopy(args)
+ ... kwargs = deepcopy(kwargs)
+ ... new_mock(*args, **kwargs)
+ ... return DEFAULT
+ ... mock.side_effect = side_effect
+ ... return new_mock
+ ...
+ >>> with patch('mymodule.frob') as mock_frob:
+ ... new_mock = copy_call_args(mock_frob)
+ ... val = set([6])
+ ... mymodule.grob(val)
+ ...
+ >>> new_mock.assert_called_with(set([6]))
+ >>> new_mock.call_args
+ call(set([6]))
+
+`copy_call_args` is called with the mock that will be called. It returns a new
+mock that we do the assertion on. The `side_effect` function makes a copy of
+the args and calls our `new_mock` with the copy.
+
+.. note::
+
+ If your mock is only going to be used once there is an easier way of
+ checking arguments at the point they are called. You can simply do the
+ checking inside a `side_effect` function.
+
+ .. doctest::
+
+ >>> def side_effect(arg):
+ ... assert arg == set([6])
+ ...
+ >>> mock = Mock(side_effect=side_effect)
+ >>> mock(set([6]))
+ >>> mock(set())
+ Traceback (most recent call last):
+ ...
+ AssertionError
+
+An alternative approach is to create a subclass of `Mock` or `MagicMock` that
+copies (using `copy.deepcopy
+<http://docs.python.org/library/copy.html#copy.deepcopy>`_) the arguments.
+Here's an example implementation:
+
+.. doctest::
+
+ >>> from copy import deepcopy
+ >>> class CopyingMock(MagicMock):
+ ... def __call__(self, *args, **kwargs):
+ ... args = deepcopy(args)
+ ... kwargs = deepcopy(kwargs)
+ ... return super(CopyingMock, self).__call__(*args, **kwargs)
+ ...
+ >>> c = CopyingMock(return_value=None)
+ >>> arg = set()
+ >>> c(arg)
+ >>> arg.add(1)
+ >>> c.assert_called_with(set())
+ >>> c.assert_called_with(arg)
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected call: mock(set([1]))
+ Actual call: mock(set([]))
+ >>> c.foo
+ <CopyingMock name='mock.foo' id='...'>
+
+When you subclass `Mock` or `MagicMock` all dynamically created attributes,
+and the `return_value` will use your subclass automatically. That means all
+children of a `CopyingMock` will also have the type `CopyingMock`.
+
+
+Raising exceptions on attribute access
+======================================
+
+You can use :class:`PropertyMock` to mimic the behaviour of properties. This
+includes raising exceptions when an attribute is accessed.
+
+Here's an example raising a `ValueError` when the 'foo' attribute is accessed:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> p = PropertyMock(side_effect=ValueError)
+ >>> type(m).foo = p
+ >>> m.foo
+ Traceback (most recent call last):
+ ....
+ ValueError
+
+Because every mock object has its own type, a new subclass of whichever mock
+class you're using, all mock objects are isolated from each other. You can
+safely attach properties (or other descriptors or whatever you want in fact)
+to `type(mock)` without affecting other mock objects.
+
+
+Multiple calls with different effects
+=====================================
+
+.. note::
+
+ In mock 1.0 the handling of iterable `side_effect` was changed. Any
+ exceptions in the iterable will be raised instead of returned.
+
+Handling code that needs to behave differently on subsequent calls during the
+test can be tricky. For example you may have a function that needs to raise
+an exception the first time it is called but returns a response on the second
+call (testing retry behaviour).
+
+One approach is to use a :attr:`side_effect` function that replaces itself. The
+first time it is called the `side_effect` sets a new `side_effect` that will
+be used for the second call. It then raises an exception:
+
+.. doctest::
+
+ >>> def side_effect(*args):
+ ... def second_call(*args):
+ ... return 'response'
+ ... mock.side_effect = second_call
+ ... raise Exception('boom')
+ ...
+ >>> mock = Mock(side_effect=side_effect)
+ >>> mock('first')
+ Traceback (most recent call last):
+ ...
+ Exception: boom
+ >>> mock('second')
+ 'response'
+ >>> mock.assert_called_with('second')
+
+Another perfectly valid way would be to pop return values from a list. If the
+return value is an exception, raise it instead of returning it:
+
+.. doctest::
+
+ >>> returns = [Exception('boom'), 'response']
+ >>> def side_effect(*args):
+ ... result = returns.pop(0)
+ ... if isinstance(result, Exception):
+ ... raise result
+ ... return result
+ ...
+ >>> mock = Mock(side_effect=side_effect)
+ >>> mock('first')
+ Traceback (most recent call last):
+ ...
+ Exception: boom
+ >>> mock('second')
+ 'response'
+ >>> mock.assert_called_with('second')
+
+Which approach you prefer is a matter of taste. The first approach is actually
+a line shorter but maybe the second approach is more readable.
+
+
+Nesting Patches
+===============
+
+Using patch as a context manager is nice, but if you do multiple patches you
+can end up with nested with statements indenting further and further to the
+right:
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ...
+ ... def test_foo(self):
+ ... with patch('mymodule.Foo') as mock_foo:
+ ... with patch('mymodule.Bar') as mock_bar:
+ ... with patch('mymodule.Spam') as mock_spam:
+ ... assert mymodule.Foo is mock_foo
+ ... assert mymodule.Bar is mock_bar
+ ... assert mymodule.Spam is mock_spam
+ ...
+ >>> original = mymodule.Foo
+ >>> MyTest('test_foo').test_foo()
+ >>> assert mymodule.Foo is original
+
+With unittest2_ `cleanup` functions and the :ref:`start-and-stop` we can
+achieve the same effect without the nested indentation. A simple helper
+method, `create_patch`, puts the patch in place and returns the created mock
+for us:
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ...
+ ... def create_patch(self, name):
+ ... patcher = patch(name)
+ ... thing = patcher.start()
+ ... self.addCleanup(patcher.stop)
+ ... return thing
+ ...
+ ... def test_foo(self):
+ ... mock_foo = self.create_patch('mymodule.Foo')
+ ... mock_bar = self.create_patch('mymodule.Bar')
+ ... mock_spam = self.create_patch('mymodule.Spam')
+ ...
+ ... assert mymodule.Foo is mock_foo
+ ... assert mymodule.Bar is mock_bar
+ ... assert mymodule.Spam is mock_spam
+ ...
+ >>> original = mymodule.Foo
+ >>> MyTest('test_foo').run()
+ >>> assert mymodule.Foo is original
+
+
+Mocking a dictionary with MagicMock
+===================================
+
+You may want to mock a dictionary, or other container object, recording all
+access to it whilst having it still behave like a dictionary.
+
+We can do this with :class:`MagicMock`, which will behave like a dictionary,
+and using :data:`~Mock.side_effect` to delegate dictionary access to a real
+underlying dictionary that is under our control.
+
+When the `__getitem__` and `__setitem__` methods of our `MagicMock` are called
+(normal dictionary access) then `side_effect` is called with the key (and in
+the case of `__setitem__` the value too). We can also control what is returned.
+
+After the `MagicMock` has been used we can use attributes like
+:data:`~Mock.call_args_list` to assert about how the dictionary was used:
+
+.. doctest::
+
+ >>> my_dict = {'a': 1, 'b': 2, 'c': 3}
+ >>> def getitem(name):
+ ... return my_dict[name]
+ ...
+ >>> def setitem(name, val):
+ ... my_dict[name] = val
+ ...
+ >>> mock = MagicMock()
+ >>> mock.__getitem__.side_effect = getitem
+ >>> mock.__setitem__.side_effect = setitem
+
+.. note::
+
+ An alternative to using `MagicMock` is to use `Mock` and *only* provide
+ the magic methods you specifically want:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.__setitem__ = Mock(side_effect=getitem)
+ >>> mock.__getitem__ = Mock(side_effect=setitem)
+
+ A *third* option is to use `MagicMock` but passing in `dict` as the `spec`
+ (or `spec_set`) argument so that the `MagicMock` created only has
+ dictionary magic methods available:
+
+ .. doctest::
+
+ >>> mock = MagicMock(spec_set=dict)
+ >>> mock.__getitem__.side_effect = getitem
+ >>> mock.__setitem__.side_effect = setitem
+
+With these side effect functions in place, the `mock` will behave like a normal
+dictionary but recording the access. It even raises a `KeyError` if you try
+to access a key that doesn't exist.
+
+.. doctest::
+
+ >>> mock['a']
+ 1
+ >>> mock['c']
+ 3
+ >>> mock['d']
+ Traceback (most recent call last):
+ ...
+ KeyError: 'd'
+ >>> mock['b'] = 'fish'
+ >>> mock['d'] = 'eggs'
+ >>> mock['b']
+ 'fish'
+ >>> mock['d']
+ 'eggs'
+
+After it has been used you can make assertions about the access using the normal
+mock methods and attributes:
+
+.. doctest::
+
+ >>> mock.__getitem__.call_args_list
+ [call('a'), call('c'), call('d'), call('b'), call('d')]
+ >>> mock.__setitem__.call_args_list
+ [call('b', 'fish'), call('d', 'eggs')]
+ >>> my_dict
+ {'a': 1, 'c': 3, 'b': 'fish', 'd': 'eggs'}
+
+
+Mock subclasses and their attributes
+====================================
+
+There are various reasons why you might want to subclass `Mock`. One reason
+might be to add helper methods. Here's a silly example:
+
+.. doctest::
+
+ >>> class MyMock(MagicMock):
+ ... def has_been_called(self):
+ ... return self.called
+ ...
+ >>> mymock = MyMock(return_value=None)
+ >>> mymock
+ <MyMock id='...'>
+ >>> mymock.has_been_called()
+ False
+ >>> mymock()
+ >>> mymock.has_been_called()
+ True
+
+The standard behaviour for `Mock` instances is that attributes and the return
+value mocks are of the same type as the mock they are accessed on. This ensures
+that `Mock` attributes are `Mocks` and `MagicMock` attributes are `MagicMocks`
+[#]_. So if you're subclassing to add helper methods then they'll also be
+available on the attributes and return value mock of instances of your
+subclass.
+
+.. doctest::
+
+ >>> mymock.foo
+ <MyMock name='mock.foo' id='...'>
+ >>> mymock.foo.has_been_called()
+ False
+ >>> mymock.foo()
+ <MyMock name='mock.foo()' id='...'>
+ >>> mymock.foo.has_been_called()
+ True
+
+Sometimes this is inconvenient. For example, `one user
+<https://code.google.com/p/mock/issues/detail?id=105>`_ is subclassing mock to
+created a `Twisted adaptor
+<http://twistedmatrix.com/documents/11.0.0/api/twisted.python.components.html>`_.
+Having this applied to attributes too actually causes errors.
+
+`Mock` (in all its flavours) uses a method called `_get_child_mock` to create
+these "sub-mocks" for attributes and return values. You can prevent your
+subclass being used for attributes by overriding this method. The signature is
+that it takes arbitrary keyword arguments (`**kwargs`) which are then passed
+onto the mock constructor:
+
+.. doctest::
+
+ >>> class Subclass(MagicMock):
+ ... def _get_child_mock(self, **kwargs):
+ ... return MagicMock(**kwargs)
+ ...
+ >>> mymock = Subclass()
+ >>> mymock.foo
+ <MagicMock name='mock.foo' id='...'>
+ >>> assert isinstance(mymock, Subclass)
+ >>> assert not isinstance(mymock.foo, Subclass)
+ >>> assert not isinstance(mymock(), Subclass)
+
+.. [#] An exception to this rule are the non-callable mocks. Attributes use the
+ callable variant because otherwise non-callable mocks couldn't have callable
+ methods.
+
+
+Mocking imports with patch.dict
+===============================
+
+One situation where mocking can be hard is where you have a local import inside
+a function. These are harder to mock because they aren't using an object from
+the module namespace that we can patch out.
+
+Generally local imports are to be avoided. They are sometimes done to prevent
+circular dependencies, for which there is *usually* a much better way to solve
+the problem (refactor the code) or to prevent "up front costs" by delaying the
+import. This can also be solved in better ways than an unconditional local
+import (store the module as a class or module attribute and only do the import
+on first use).
+
+That aside there is a way to use `mock` to affect the results of an import.
+Importing fetches an *object* from the `sys.modules` dictionary. Note that it
+fetches an *object*, which need not be a module. Importing a module for the
+first time results in a module object being put in `sys.modules`, so usually
+when you import something you get a module back. This need not be the case
+however.
+
+This means you can use :func:`patch.dict` to *temporarily* put a mock in place
+in `sys.modules`. Any imports whilst this patch is active will fetch the mock.
+When the patch is complete (the decorated function exits, the with statement
+body is complete or `patcher.stop()` is called) then whatever was there
+previously will be restored safely.
+
+Here's an example that mocks out the 'fooble' module.
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> with patch.dict('sys.modules', {'fooble': mock}):
+ ... import fooble
+ ... fooble.blob()
+ ...
+ <Mock name='mock.blob()' id='...'>
+ >>> assert 'fooble' not in sys.modules
+ >>> mock.blob.assert_called_once_with()
+
+As you can see the `import fooble` succeeds, but on exit there is no 'fooble'
+left in `sys.modules`.
+
+This also works for the `from module import name` form:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> with patch.dict('sys.modules', {'fooble': mock}):
+ ... from fooble import blob
+ ... blob.blip()
+ ...
+ <Mock name='mock.blob.blip()' id='...'>
+ >>> mock.blob.blip.assert_called_once_with()
+
+With slightly more work you can also mock package imports:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> modules = {'package': mock, 'package.module': mock.module}
+ >>> with patch.dict('sys.modules', modules):
+ ... from package.module import fooble
+ ... fooble()
+ ...
+ <Mock name='mock.module.fooble()' id='...'>
+ >>> mock.module.fooble.assert_called_once_with()
+
+
+Tracking order of calls and less verbose call assertions
+========================================================
+
+The :class:`Mock` class allows you to track the *order* of method calls on
+your mock objects through the :attr:`~Mock.method_calls` attribute. This
+doesn't allow you to track the order of calls between separate mock objects,
+however we can use :attr:`~Mock.mock_calls` to achieve the same effect.
+
+Because mocks track calls to child mocks in `mock_calls`, and accessing an
+arbitrary attribute of a mock creates a child mock, we can create our separate
+mocks from a parent one. Calls to those child mock will then all be recorded,
+in order, in the `mock_calls` of the parent:
+
+.. doctest::
+
+ >>> manager = Mock()
+ >>> mock_foo = manager.foo
+ >>> mock_bar = manager.bar
+
+ >>> mock_foo.something()
+ <Mock name='mock.foo.something()' id='...'>
+ >>> mock_bar.other.thing()
+ <Mock name='mock.bar.other.thing()' id='...'>
+
+ >>> manager.mock_calls
+ [call.foo.something(), call.bar.other.thing()]
+
+We can then assert about the calls, including the order, by comparing with
+the `mock_calls` attribute on the manager mock:
+
+.. doctest::
+
+ >>> expected_calls = [call.foo.something(), call.bar.other.thing()]
+ >>> manager.mock_calls == expected_calls
+ True
+
+If `patch` is creating, and putting in place, your mocks then you can attach
+them to a manager mock using the :meth:`~Mock.attach_mock` method. After
+attaching calls will be recorded in `mock_calls` of the manager.
+
+.. doctest::
+
+ >>> manager = MagicMock()
+ >>> with patch('mymodule.Class1') as MockClass1:
+ ... with patch('mymodule.Class2') as MockClass2:
+ ... manager.attach_mock(MockClass1, 'MockClass1')
+ ... manager.attach_mock(MockClass2, 'MockClass2')
+ ... MockClass1().foo()
+ ... MockClass2().bar()
+ ...
+ <MagicMock name='mock.MockClass1().foo()' id='...'>
+ <MagicMock name='mock.MockClass2().bar()' id='...'>
+ >>> manager.mock_calls
+ [call.MockClass1(),
+ call.MockClass1().foo(),
+ call.MockClass2(),
+ call.MockClass2().bar()]
+
+If many calls have been made, but you're only interested in a particular
+sequence of them then an alternative is to use the
+:meth:`~Mock.assert_has_calls` method. This takes a list of calls (constructed
+with the :data:`call` object). If that sequence of calls are in
+:attr:`~Mock.mock_calls` then the assert succeeds.
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> m().foo().bar().baz()
+ <MagicMock name='mock().foo().bar().baz()' id='...'>
+ >>> m.one().two().three()
+ <MagicMock name='mock.one().two().three()' id='...'>
+ >>> calls = call.one().two().three().call_list()
+ >>> m.assert_has_calls(calls)
+
+Even though the chained call `m.one().two().three()` aren't the only calls that
+have been made to the mock, the assert still succeeds.
+
+Sometimes a mock may have several calls made to it, and you are only interested
+in asserting about *some* of those calls. You may not even care about the
+order. In this case you can pass `any_order=True` to `assert_has_calls`:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> m(1), m.two(2, 3), m.seven(7), m.fifty('50')
+ (...)
+ >>> calls = [call.fifty('50'), call(1), call.seven(7)]
+ >>> m.assert_has_calls(calls, any_order=True)
+
+
+More complex argument matching
+==============================
+
+Using the same basic concept as `ANY` we can implement matchers to do more
+complex assertions on objects used as arguments to mocks.
+
+Suppose we expect some object to be passed to a mock that by default
+compares equal based on object identity (which is the Python default for user
+defined classes). To use :meth:`~Mock.assert_called_with` we would need to pass
+in the exact same object. If we are only interested in some of the attributes
+of this object then we can create a matcher that will check these attributes
+for us.
+
+You can see in this example how a 'standard' call to `assert_called_with` isn't
+sufficient:
+
+.. doctest::
+
+ >>> class Foo(object):
+ ... def __init__(self, a, b):
+ ... self.a, self.b = a, b
+ ...
+ >>> mock = Mock(return_value=None)
+ >>> mock(Foo(1, 2))
+ >>> mock.assert_called_with(Foo(1, 2))
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected: call(<__main__.Foo object at 0x...>)
+ Actual call: call(<__main__.Foo object at 0x...>)
+
+A comparison function for our `Foo` class might look something like this:
+
+.. doctest::
+
+ >>> def compare(self, other):
+ ... if not type(self) == type(other):
+ ... return False
+ ... if self.a != other.a:
+ ... return False
+ ... if self.b != other.b:
+ ... return False
+ ... return True
+ ...
+
+And a matcher object that can use comparison functions like this for its
+equality operation would look something like this:
+
+.. doctest::
+
+ >>> class Matcher(object):
+ ... def __init__(self, compare, some_obj):
+ ... self.compare = compare
+ ... self.some_obj = some_obj
+ ... def __eq__(self, other):
+ ... return self.compare(self.some_obj, other)
+ ...
+
+Putting all this together:
+
+.. doctest::
+
+ >>> match_foo = Matcher(compare, Foo(1, 2))
+ >>> mock.assert_called_with(match_foo)
+
+The `Matcher` is instantiated with our compare function and the `Foo` object
+we want to compare against. In `assert_called_with` the `Matcher` equality
+method will be called, which compares the object the mock was called with
+against the one we created our matcher with. If they match then
+`assert_called_with` passes, and if they don't an `AssertionError` is raised:
+
+.. doctest::
+
+ >>> match_wrong = Matcher(compare, Foo(3, 4))
+ >>> mock.assert_called_with(match_wrong)
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected: ((<Matcher object at 0x...>,), {})
+ Called with: ((<Foo object at 0x...>,), {})
+
+With a bit of tweaking you could have the comparison function raise the
+`AssertionError` directly and provide a more useful failure message.
+
+As of version 1.5, the Python testing library `PyHamcrest
+<http://pypi.python.org/pypi/PyHamcrest>`_ provides similar functionality,
+that may be useful here, in the form of its equality matcher
+(`hamcrest.library.integration.match_equality
+<http://packages.python.org/PyHamcrest/integration.html#hamcrest.library.integration.match_equality>`_).
+
+
+Less verbose configuration of mock objects
+==========================================
+
+This recipe, for easier configuration of mock objects, is now part of `Mock`.
+See the :meth:`~Mock.configure_mock` method.
+
+
+Matching any argument in assertions
+===================================
+
+This example is now built in to mock. See :data:`ANY`.
+
+
+Mocking Properties
+==================
+
+This example is now built in to mock. See :class:`PropertyMock`.
+
+
+Mocking open
+============
+
+This example is now built in to mock. See :func:`mock_open`.
+
+
+Mocks without some attributes
+=============================
+
+This example is now built in to mock. See :ref:`deleting-attributes`.
diff --git a/third_party/python/mock-1.0.0/docs/getting-started.txt b/third_party/python/mock-1.0.0/docs/getting-started.txt
new file mode 100644
index 0000000000..1b5d289ebe
--- /dev/null
+++ b/third_party/python/mock-1.0.0/docs/getting-started.txt
@@ -0,0 +1,479 @@
+===========================
+ Getting Started with Mock
+===========================
+
+.. _getting-started:
+
+.. index:: Getting Started
+
+.. testsetup::
+
+ class SomeClass(object):
+ static_method = None
+ class_method = None
+ attribute = None
+
+ sys.modules['package'] = package = Mock(name='package')
+ sys.modules['package.module'] = module = package.module
+ sys.modules['module'] = package.module
+
+
+Using Mock
+==========
+
+Mock Patching Methods
+---------------------
+
+Common uses for :class:`Mock` objects include:
+
+* Patching methods
+* Recording method calls on objects
+
+You might want to replace a method on an object to check that
+it is called with the correct arguments by another part of the system:
+
+.. doctest::
+
+ >>> real = SomeClass()
+ >>> real.method = MagicMock(name='method')
+ >>> real.method(3, 4, 5, key='value')
+ <MagicMock name='method()' id='...'>
+
+Once our mock has been used (`real.method` in this example) it has methods
+and attributes that allow you to make assertions about how it has been used.
+
+.. note::
+
+ In most of these examples the :class:`Mock` and :class:`MagicMock` classes
+ are interchangeable. As the `MagicMock` is the more capable class it makes
+ a sensible one to use by default.
+
+Once the mock has been called its :attr:`~Mock.called` attribute is set to
+`True`. More importantly we can use the :meth:`~Mock.assert_called_with` or
+:meth:`~Mock.assert_called_once_with` method to check that it was called with
+the correct arguments.
+
+This example tests that calling `ProductionClass().method` results in a call to
+the `something` method:
+
+.. doctest::
+
+ >>> from mock import MagicMock
+ >>> class ProductionClass(object):
+ ... def method(self):
+ ... self.something(1, 2, 3)
+ ... def something(self, a, b, c):
+ ... pass
+ ...
+ >>> real = ProductionClass()
+ >>> real.something = MagicMock()
+ >>> real.method()
+ >>> real.something.assert_called_once_with(1, 2, 3)
+
+
+
+Mock for Method Calls on an Object
+----------------------------------
+
+In the last example we patched a method directly on an object to check that it
+was called correctly. Another common use case is to pass an object into a
+method (or some part of the system under test) and then check that it is used
+in the correct way.
+
+The simple `ProductionClass` below has a `closer` method. If it is called with
+an object then it calls `close` on it.
+
+.. doctest::
+
+ >>> class ProductionClass(object):
+ ... def closer(self, something):
+ ... something.close()
+ ...
+
+So to test it we need to pass in an object with a `close` method and check
+that it was called correctly.
+
+.. doctest::
+
+ >>> real = ProductionClass()
+ >>> mock = Mock()
+ >>> real.closer(mock)
+ >>> mock.close.assert_called_with()
+
+We don't have to do any work to provide the 'close' method on our mock.
+Accessing close creates it. So, if 'close' hasn't already been called then
+accessing it in the test will create it, but :meth:`~Mock.assert_called_with`
+will raise a failure exception.
+
+
+Mocking Classes
+---------------
+
+A common use case is to mock out classes instantiated by your code under test.
+When you patch a class, then that class is replaced with a mock. Instances
+are created by *calling the class*. This means you access the "mock instance"
+by looking at the return value of the mocked class.
+
+In the example below we have a function `some_function` that instantiates `Foo`
+and calls a method on it. The call to `patch` replaces the class `Foo` with a
+mock. The `Foo` instance is the result of calling the mock, so it is configured
+by modifying the mock :attr:`~Mock.return_value`.
+
+.. doctest::
+
+ >>> def some_function():
+ ... instance = module.Foo()
+ ... return instance.method()
+ ...
+ >>> with patch('module.Foo') as mock:
+ ... instance = mock.return_value
+ ... instance.method.return_value = 'the result'
+ ... result = some_function()
+ ... assert result == 'the result'
+
+
+Naming your mocks
+-----------------
+
+It can be useful to give your mocks a name. The name is shown in the repr of
+the mock and can be helpful when the mock appears in test failure messages. The
+name is also propagated to attributes or methods of the mock:
+
+.. doctest::
+
+ >>> mock = MagicMock(name='foo')
+ >>> mock
+ <MagicMock name='foo' id='...'>
+ >>> mock.method
+ <MagicMock name='foo.method' id='...'>
+
+
+Tracking all Calls
+------------------
+
+Often you want to track more than a single call to a method. The
+:attr:`~Mock.mock_calls` attribute records all calls
+to child attributes of the mock - and also to their children.
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> mock.method()
+ <MagicMock name='mock.method()' id='...'>
+ >>> mock.attribute.method(10, x=53)
+ <MagicMock name='mock.attribute.method()' id='...'>
+ >>> mock.mock_calls
+ [call.method(), call.attribute.method(10, x=53)]
+
+If you make an assertion about `mock_calls` and any unexpected methods
+have been called, then the assertion will fail. This is useful because as well
+as asserting that the calls you expected have been made, you are also checking
+that they were made in the right order and with no additional calls:
+
+You use the :data:`call` object to construct lists for comparing with
+`mock_calls`:
+
+.. doctest::
+
+ >>> expected = [call.method(), call.attribute.method(10, x=53)]
+ >>> mock.mock_calls == expected
+ True
+
+
+Setting Return Values and Attributes
+------------------------------------
+
+Setting the return values on a mock object is trivially easy:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.return_value = 3
+ >>> mock()
+ 3
+
+Of course you can do the same for methods on the mock:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.method.return_value = 3
+ >>> mock.method()
+ 3
+
+The return value can also be set in the constructor:
+
+.. doctest::
+
+ >>> mock = Mock(return_value=3)
+ >>> mock()
+ 3
+
+If you need an attribute setting on your mock, just do it:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.x = 3
+ >>> mock.x
+ 3
+
+Sometimes you want to mock up a more complex situation, like for example
+`mock.connection.cursor().execute("SELECT 1")`. If we wanted this call to
+return a list, then we have to configure the result of the nested call.
+
+We can use :data:`call` to construct the set of calls in a "chained call" like
+this for easy assertion afterwards:
+
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> cursor = mock.connection.cursor.return_value
+ >>> cursor.execute.return_value = ['foo']
+ >>> mock.connection.cursor().execute("SELECT 1")
+ ['foo']
+ >>> expected = call.connection.cursor().execute("SELECT 1").call_list()
+ >>> mock.mock_calls
+ [call.connection.cursor(), call.connection.cursor().execute('SELECT 1')]
+ >>> mock.mock_calls == expected
+ True
+
+It is the call to `.call_list()` that turns our call object into a list of
+calls representing the chained calls.
+
+
+
+Raising exceptions with mocks
+-----------------------------
+
+A useful attribute is :attr:`~Mock.side_effect`. If you set this to an
+exception class or instance then the exception will be raised when the mock
+is called.
+
+.. doctest::
+
+ >>> mock = Mock(side_effect=Exception('Boom!'))
+ >>> mock()
+ Traceback (most recent call last):
+ ...
+ Exception: Boom!
+
+
+Side effect functions and iterables
+-----------------------------------
+
+`side_effect` can also be set to a function or an iterable. The use case for
+`side_effect` as an iterable is where your mock is going to be called several
+times, and you want each call to return a different value. When you set
+`side_effect` to an iterable every call to the mock returns the next value
+from the iterable:
+
+.. doctest::
+
+ >>> mock = MagicMock(side_effect=[4, 5, 6])
+ >>> mock()
+ 4
+ >>> mock()
+ 5
+ >>> mock()
+ 6
+
+
+For more advanced use cases, like dynamically varying the return values
+depending on what the mock is called with, `side_effect` can be a function.
+The function will be called with the same arguments as the mock. Whatever the
+function returns is what the call returns:
+
+.. doctest::
+
+ >>> vals = {(1, 2): 1, (2, 3): 2}
+ >>> def side_effect(*args):
+ ... return vals[args]
+ ...
+ >>> mock = MagicMock(side_effect=side_effect)
+ >>> mock(1, 2)
+ 1
+ >>> mock(2, 3)
+ 2
+
+
+Creating a Mock from an Existing Object
+---------------------------------------
+
+One problem with over use of mocking is that it couples your tests to the
+implementation of your mocks rather than your real code. Suppose you have a
+class that implements `some_method`. In a test for another class, you
+provide a mock of this object that *also* provides `some_method`. If later
+you refactor the first class, so that it no longer has `some_method` - then
+your tests will continue to pass even though your code is now broken!
+
+`Mock` allows you to provide an object as a specification for the mock,
+using the `spec` keyword argument. Accessing methods / attributes on the
+mock that don't exist on your specification object will immediately raise an
+attribute error. If you change the implementation of your specification, then
+tests that use that class will start failing immediately without you having to
+instantiate the class in those tests.
+
+.. doctest::
+
+ >>> mock = Mock(spec=SomeClass)
+ >>> mock.old_method()
+ Traceback (most recent call last):
+ ...
+ AttributeError: object has no attribute 'old_method'
+
+If you want a stronger form of specification that prevents the setting
+of arbitrary attributes as well as the getting of them then you can use
+`spec_set` instead of `spec`.
+
+
+
+Patch Decorators
+================
+
+.. note::
+
+ With `patch` it matters that you patch objects in the namespace where they
+ are looked up. This is normally straightforward, but for a quick guide
+ read :ref:`where to patch <where-to-patch>`.
+
+
+A common need in tests is to patch a class attribute or a module attribute,
+for example patching a builtin or patching a class in a module to test that it
+is instantiated. Modules and classes are effectively global, so patching on
+them has to be undone after the test or the patch will persist into other
+tests and cause hard to diagnose problems.
+
+mock provides three convenient decorators for this: `patch`, `patch.object` and
+`patch.dict`. `patch` takes a single string, of the form
+`package.module.Class.attribute` to specify the attribute you are patching. It
+also optionally takes a value that you want the attribute (or class or
+whatever) to be replaced with. 'patch.object' takes an object and the name of
+the attribute you would like patched, plus optionally the value to patch it
+with.
+
+`patch.object`:
+
+.. doctest::
+
+ >>> original = SomeClass.attribute
+ >>> @patch.object(SomeClass, 'attribute', sentinel.attribute)
+ ... def test():
+ ... assert SomeClass.attribute == sentinel.attribute
+ ...
+ >>> test()
+ >>> assert SomeClass.attribute == original
+
+ >>> @patch('package.module.attribute', sentinel.attribute)
+ ... def test():
+ ... from package.module import attribute
+ ... assert attribute is sentinel.attribute
+ ...
+ >>> test()
+
+If you are patching a module (including `__builtin__`) then use `patch`
+instead of `patch.object`:
+
+.. doctest::
+
+ >>> mock = MagicMock(return_value = sentinel.file_handle)
+ >>> with patch('__builtin__.open', mock):
+ ... handle = open('filename', 'r')
+ ...
+ >>> mock.assert_called_with('filename', 'r')
+ >>> assert handle == sentinel.file_handle, "incorrect file handle returned"
+
+The module name can be 'dotted', in the form `package.module` if needed:
+
+.. doctest::
+
+ >>> @patch('package.module.ClassName.attribute', sentinel.attribute)
+ ... def test():
+ ... from package.module import ClassName
+ ... assert ClassName.attribute == sentinel.attribute
+ ...
+ >>> test()
+
+A nice pattern is to actually decorate test methods themselves:
+
+.. doctest::
+
+ >>> class MyTest(unittest2.TestCase):
+ ... @patch.object(SomeClass, 'attribute', sentinel.attribute)
+ ... def test_something(self):
+ ... self.assertEqual(SomeClass.attribute, sentinel.attribute)
+ ...
+ >>> original = SomeClass.attribute
+ >>> MyTest('test_something').test_something()
+ >>> assert SomeClass.attribute == original
+
+If you want to patch with a Mock, you can use `patch` with only one argument
+(or `patch.object` with two arguments). The mock will be created for you and
+passed into the test function / method:
+
+.. doctest::
+
+ >>> class MyTest(unittest2.TestCase):
+ ... @patch.object(SomeClass, 'static_method')
+ ... def test_something(self, mock_method):
+ ... SomeClass.static_method()
+ ... mock_method.assert_called_with()
+ ...
+ >>> MyTest('test_something').test_something()
+
+You can stack up multiple patch decorators using this pattern:
+
+.. doctest::
+
+ >>> class MyTest(unittest2.TestCase):
+ ... @patch('package.module.ClassName1')
+ ... @patch('package.module.ClassName2')
+ ... def test_something(self, MockClass2, MockClass1):
+ ... self.assertTrue(package.module.ClassName1 is MockClass1)
+ ... self.assertTrue(package.module.ClassName2 is MockClass2)
+ ...
+ >>> MyTest('test_something').test_something()
+
+When you nest patch decorators the mocks are passed in to the decorated
+function in the same order they applied (the normal *python* order that
+decorators are applied). This means from the bottom up, so in the example
+above the mock for `test_module.ClassName2` is passed in first.
+
+There is also :func:`patch.dict` for setting values in a dictionary just
+during a scope and restoring the dictionary to its original state when the test
+ends:
+
+.. doctest::
+
+ >>> foo = {'key': 'value'}
+ >>> original = foo.copy()
+ >>> with patch.dict(foo, {'newkey': 'newvalue'}, clear=True):
+ ... assert foo == {'newkey': 'newvalue'}
+ ...
+ >>> assert foo == original
+
+`patch`, `patch.object` and `patch.dict` can all be used as context managers.
+
+Where you use `patch` to create a mock for you, you can get a reference to the
+mock using the "as" form of the with statement:
+
+.. doctest::
+
+ >>> class ProductionClass(object):
+ ... def method(self):
+ ... pass
+ ...
+ >>> with patch.object(ProductionClass, 'method') as mock_method:
+ ... mock_method.return_value = None
+ ... real = ProductionClass()
+ ... real.method(1, 2, 3)
+ ...
+ >>> mock_method.assert_called_with(1, 2, 3)
+
+
+As an alternative `patch`, `patch.object` and `patch.dict` can be used as
+class decorators. When used in this way it is the same as applying the
+decorator indvidually to every method whose name starts with "test".
+
+For some more advanced examples, see the :ref:`further-examples` page.
diff --git a/third_party/python/mock-1.0.0/docs/helpers.txt b/third_party/python/mock-1.0.0/docs/helpers.txt
new file mode 100644
index 0000000000..571b71d5eb
--- /dev/null
+++ b/third_party/python/mock-1.0.0/docs/helpers.txt
@@ -0,0 +1,583 @@
+=========
+ Helpers
+=========
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ mock.FILTER_DIR = True
+ from pprint import pprint as pp
+ original_dir = dir
+ def dir(obj):
+ print pp(original_dir(obj))
+
+ import urllib2
+ __main__.urllib2 = urllib2
+
+.. testcleanup::
+
+ dir = original_dir
+ mock.FILTER_DIR = True
+
+
+
+call
+====
+
+.. function:: call(*args, **kwargs)
+
+ `call` is a helper object for making simpler assertions, for comparing
+ with :attr:`~Mock.call_args`, :attr:`~Mock.call_args_list`,
+ :attr:`~Mock.mock_calls` and :attr: `~Mock.method_calls`. `call` can also be
+ used with :meth:`~Mock.assert_has_calls`.
+
+ .. doctest::
+
+ >>> m = MagicMock(return_value=None)
+ >>> m(1, 2, a='foo', b='bar')
+ >>> m()
+ >>> m.call_args_list == [call(1, 2, a='foo', b='bar'), call()]
+ True
+
+.. method:: call.call_list()
+
+ For a call object that represents multiple calls, `call_list`
+ returns a list of all the intermediate calls as well as the
+ final call.
+
+`call_list` is particularly useful for making assertions on "chained calls". A
+chained call is multiple calls on a single line of code. This results in
+multiple entries in :attr:`~Mock.mock_calls` on a mock. Manually constructing
+the sequence of calls can be tedious.
+
+:meth:`~call.call_list` can construct the sequence of calls from the same
+chained call:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> m(1).method(arg='foo').other('bar')(2.0)
+ <MagicMock name='mock().method().other()()' id='...'>
+ >>> kall = call(1).method(arg='foo').other('bar')(2.0)
+ >>> kall.call_list()
+ [call(1),
+ call().method(arg='foo'),
+ call().method().other('bar'),
+ call().method().other()(2.0)]
+ >>> m.mock_calls == kall.call_list()
+ True
+
+.. _calls-as-tuples:
+
+A `call` object is either a tuple of (positional args, keyword args) or
+(name, positional args, keyword args) depending on how it was constructed. When
+you construct them yourself this isn't particularly interesting, but the `call`
+objects that are in the :attr:`Mock.call_args`, :attr:`Mock.call_args_list` and
+:attr:`Mock.mock_calls` attributes can be introspected to get at the individual
+arguments they contain.
+
+The `call` objects in :attr:`Mock.call_args` and :attr:`Mock.call_args_list`
+are two-tuples of (positional args, keyword args) whereas the `call` objects
+in :attr:`Mock.mock_calls`, along with ones you construct yourself, are
+three-tuples of (name, positional args, keyword args).
+
+You can use their "tupleness" to pull out the individual arguments for more
+complex introspection and assertions. The positional arguments are a tuple
+(an empty tuple if there are no positional arguments) and the keyword
+arguments are a dictionary:
+
+.. doctest::
+
+ >>> m = MagicMock(return_value=None)
+ >>> m(1, 2, 3, arg='one', arg2='two')
+ >>> kall = m.call_args
+ >>> args, kwargs = kall
+ >>> args
+ (1, 2, 3)
+ >>> kwargs
+ {'arg2': 'two', 'arg': 'one'}
+ >>> args is kall[0]
+ True
+ >>> kwargs is kall[1]
+ True
+
+ >>> m = MagicMock()
+ >>> m.foo(4, 5, 6, arg='two', arg2='three')
+ <MagicMock name='mock.foo()' id='...'>
+ >>> kall = m.mock_calls[0]
+ >>> name, args, kwargs = kall
+ >>> name
+ 'foo'
+ >>> args
+ (4, 5, 6)
+ >>> kwargs
+ {'arg2': 'three', 'arg': 'two'}
+ >>> name is m.mock_calls[0][0]
+ True
+
+
+create_autospec
+===============
+
+.. function:: create_autospec(spec, spec_set=False, instance=False, **kwargs)
+
+ Create a mock object using another object as a spec. Attributes on the
+ mock will use the corresponding attribute on the `spec` object as their
+ spec.
+
+ Functions or methods being mocked will have their arguments checked to
+ ensure that they are called with the correct signature.
+
+ If `spec_set` is `True` then attempting to set attributes that don't exist
+ on the spec object will raise an `AttributeError`.
+
+ If a class is used as a spec then the return value of the mock (the
+ instance of the class) will have the same spec. You can use a class as the
+ spec for an instance object by passing `instance=True`. The returned mock
+ will only be callable if instances of the mock are callable.
+
+ `create_autospec` also takes arbitrary keyword arguments that are passed to
+ the constructor of the created mock.
+
+See :ref:`auto-speccing` for examples of how to use auto-speccing with
+`create_autospec` and the `autospec` argument to :func:`patch`.
+
+
+ANY
+===
+
+.. data:: ANY
+
+Sometimes you may need to make assertions about *some* of the arguments in a
+call to mock, but either not care about some of the arguments or want to pull
+them individually out of :attr:`~Mock.call_args` and make more complex
+assertions on them.
+
+To ignore certain arguments you can pass in objects that compare equal to
+*everything*. Calls to :meth:`~Mock.assert_called_with` and
+:meth:`~Mock.assert_called_once_with` will then succeed no matter what was
+passed in.
+
+.. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock('foo', bar=object())
+ >>> mock.assert_called_once_with('foo', bar=ANY)
+
+`ANY` can also be used in comparisons with call lists like
+:attr:`~Mock.mock_calls`:
+
+.. doctest::
+
+ >>> m = MagicMock(return_value=None)
+ >>> m(1)
+ >>> m(1, 2)
+ >>> m(object())
+ >>> m.mock_calls == [call(1), call(1, 2), ANY]
+ True
+
+
+
+FILTER_DIR
+==========
+
+.. data:: FILTER_DIR
+
+`FILTER_DIR` is a module level variable that controls the way mock objects
+respond to `dir` (only for Python 2.6 or more recent). The default is `True`,
+which uses the filtering described below, to only show useful members. If you
+dislike this filtering, or need to switch it off for diagnostic purposes, then
+set `mock.FILTER_DIR = False`.
+
+With filtering on, `dir(some_mock)` shows only useful attributes and will
+include any dynamically created attributes that wouldn't normally be shown.
+If the mock was created with a `spec` (or `autospec` of course) then all the
+attributes from the original are shown, even if they haven't been accessed
+yet:
+
+.. doctest::
+
+ >>> dir(Mock())
+ ['assert_any_call',
+ 'assert_called_once_with',
+ 'assert_called_with',
+ 'assert_has_calls',
+ 'attach_mock',
+ ...
+ >>> import urllib2
+ >>> dir(Mock(spec=urllib2))
+ ['AbstractBasicAuthHandler',
+ 'AbstractDigestAuthHandler',
+ 'AbstractHTTPHandler',
+ 'BaseHandler',
+ ...
+
+Many of the not-very-useful (private to `Mock` rather than the thing being
+mocked) underscore and double underscore prefixed attributes have been
+filtered from the result of calling `dir` on a `Mock`. If you dislike this
+behaviour you can switch it off by setting the module level switch
+`FILTER_DIR`:
+
+.. doctest::
+
+ >>> import mock
+ >>> mock.FILTER_DIR = False
+ >>> dir(mock.Mock())
+ ['_NonCallableMock__get_return_value',
+ '_NonCallableMock__get_side_effect',
+ '_NonCallableMock__return_value_doc',
+ '_NonCallableMock__set_return_value',
+ '_NonCallableMock__set_side_effect',
+ '__call__',
+ '__class__',
+ ...
+
+Alternatively you can just use `vars(my_mock)` (instance members) and
+`dir(type(my_mock))` (type members) to bypass the filtering irrespective of
+`mock.FILTER_DIR`.
+
+
+mock_open
+=========
+
+.. function:: mock_open(mock=None, read_data=None)
+
+ A helper function to create a mock to replace the use of `open`. It works
+ for `open` called directly or used as a context manager.
+
+ The `mock` argument is the mock object to configure. If `None` (the
+ default) then a `MagicMock` will be created for you, with the API limited
+ to methods or attributes available on standard file handles.
+
+ `read_data` is a string for the `read` method of the file handle to return.
+ This is an empty string by default.
+
+Using `open` as a context manager is a great way to ensure your file handles
+are closed properly and is becoming common::
+
+ with open('/some/path', 'w') as f:
+ f.write('something')
+
+The issue is that even if you mock out the call to `open` it is the
+*returned object* that is used as a context manager (and has `__enter__` and
+`__exit__` called).
+
+Mocking context managers with a :class:`MagicMock` is common enough and fiddly
+enough that a helper function is useful.
+
+.. doctest::
+
+ >>> from mock import mock_open
+ >>> m = mock_open()
+ >>> with patch('__main__.open', m, create=True):
+ ... with open('foo', 'w') as h:
+ ... h.write('some stuff')
+ ...
+ >>> m.mock_calls
+ [call('foo', 'w'),
+ call().__enter__(),
+ call().write('some stuff'),
+ call().__exit__(None, None, None)]
+ >>> m.assert_called_once_with('foo', 'w')
+ >>> handle = m()
+ >>> handle.write.assert_called_once_with('some stuff')
+
+And for reading files:
+
+.. doctest::
+
+ >>> with patch('__main__.open', mock_open(read_data='bibble'), create=True) as m:
+ ... with open('foo') as h:
+ ... result = h.read()
+ ...
+ >>> m.assert_called_once_with('foo')
+ >>> assert result == 'bibble'
+
+
+.. _auto-speccing:
+
+Autospeccing
+============
+
+Autospeccing is based on the existing `spec` feature of mock. It limits the
+api of mocks to the api of an original object (the spec), but it is recursive
+(implemented lazily) so that attributes of mocks only have the same api as
+the attributes of the spec. In addition mocked functions / methods have the
+same call signature as the original so they raise a `TypeError` if they are
+called incorrectly.
+
+Before I explain how auto-speccing works, here's why it is needed.
+
+`Mock` is a very powerful and flexible object, but it suffers from two flaws
+when used to mock out objects from a system under test. One of these flaws is
+specific to the `Mock` api and the other is a more general problem with using
+mock objects.
+
+First the problem specific to `Mock`. `Mock` has two assert methods that are
+extremely handy: :meth:`~Mock.assert_called_with` and
+:meth:`~Mock.assert_called_once_with`.
+
+.. doctest::
+
+ >>> mock = Mock(name='Thing', return_value=None)
+ >>> mock(1, 2, 3)
+ >>> mock.assert_called_once_with(1, 2, 3)
+ >>> mock(1, 2, 3)
+ >>> mock.assert_called_once_with(1, 2, 3)
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected to be called once. Called 2 times.
+
+Because mocks auto-create attributes on demand, and allow you to call them
+with arbitrary arguments, if you misspell one of these assert methods then
+your assertion is gone:
+
+.. code-block:: pycon
+
+ >>> mock = Mock(name='Thing', return_value=None)
+ >>> mock(1, 2, 3)
+ >>> mock.assret_called_once_with(4, 5, 6)
+
+Your tests can pass silently and incorrectly because of the typo.
+
+The second issue is more general to mocking. If you refactor some of your
+code, rename members and so on, any tests for code that is still using the
+*old api* but uses mocks instead of the real objects will still pass. This
+means your tests can all pass even though your code is broken.
+
+Note that this is another reason why you need integration tests as well as
+unit tests. Testing everything in isolation is all fine and dandy, but if you
+don't test how your units are "wired together" there is still lots of room
+for bugs that tests might have caught.
+
+`mock` already provides a feature to help with this, called speccing. If you
+use a class or instance as the `spec` for a mock then you can only access
+attributes on the mock that exist on the real class:
+
+.. doctest::
+
+ >>> import urllib2
+ >>> mock = Mock(spec=urllib2.Request)
+ >>> mock.assret_called_with
+ Traceback (most recent call last):
+ ...
+ AttributeError: Mock object has no attribute 'assret_called_with'
+
+The spec only applies to the mock itself, so we still have the same issue
+with any methods on the mock:
+
+.. code-block:: pycon
+
+ >>> mock.has_data()
+ <mock.Mock object at 0x...>
+ >>> mock.has_data.assret_called_with()
+
+Auto-speccing solves this problem. You can either pass `autospec=True` to
+`patch` / `patch.object` or use the `create_autospec` function to create a
+mock with a spec. If you use the `autospec=True` argument to `patch` then the
+object that is being replaced will be used as the spec object. Because the
+speccing is done "lazily" (the spec is created as attributes on the mock are
+accessed) you can use it with very complex or deeply nested objects (like
+modules that import modules that import modules) without a big performance
+hit.
+
+Here's an example of it in use:
+
+.. doctest::
+
+ >>> import urllib2
+ >>> patcher = patch('__main__.urllib2', autospec=True)
+ >>> mock_urllib2 = patcher.start()
+ >>> urllib2 is mock_urllib2
+ True
+ >>> urllib2.Request
+ <MagicMock name='urllib2.Request' spec='Request' id='...'>
+
+You can see that `urllib2.Request` has a spec. `urllib2.Request` takes two
+arguments in the constructor (one of which is `self`). Here's what happens if
+we try to call it incorrectly:
+
+.. doctest::
+
+ >>> req = urllib2.Request()
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes at least 2 arguments (1 given)
+
+The spec also applies to instantiated classes (i.e. the return value of
+specced mocks):
+
+.. doctest::
+
+ >>> req = urllib2.Request('foo')
+ >>> req
+ <NonCallableMagicMock name='urllib2.Request()' spec='Request' id='...'>
+
+`Request` objects are not callable, so the return value of instantiating our
+mocked out `urllib2.Request` is a non-callable mock. With the spec in place
+any typos in our asserts will raise the correct error:
+
+.. doctest::
+
+ >>> req.add_header('spam', 'eggs')
+ <MagicMock name='urllib2.Request().add_header()' id='...'>
+ >>> req.add_header.assret_called_with
+ Traceback (most recent call last):
+ ...
+ AttributeError: Mock object has no attribute 'assret_called_with'
+ >>> req.add_header.assert_called_with('spam', 'eggs')
+
+In many cases you will just be able to add `autospec=True` to your existing
+`patch` calls and then be protected against bugs due to typos and api
+changes.
+
+As well as using `autospec` through `patch` there is a
+:func:`create_autospec` for creating autospecced mocks directly:
+
+.. doctest::
+
+ >>> import urllib2
+ >>> mock_urllib2 = create_autospec(urllib2)
+ >>> mock_urllib2.Request('foo', 'bar')
+ <NonCallableMagicMock name='mock.Request()' spec='Request' id='...'>
+
+This isn't without caveats and limitations however, which is why it is not
+the default behaviour. In order to know what attributes are available on the
+spec object, autospec has to introspect (access attributes) the spec. As you
+traverse attributes on the mock a corresponding traversal of the original
+object is happening under the hood. If any of your specced objects have
+properties or descriptors that can trigger code execution then you may not be
+able to use autospec. On the other hand it is much better to design your
+objects so that introspection is safe [#]_.
+
+A more serious problem is that it is common for instance attributes to be
+created in the `__init__` method and not to exist on the class at all.
+`autospec` can't know about any dynamically created attributes and restricts
+the api to visible attributes.
+
+.. doctest::
+
+ >>> class Something(object):
+ ... def __init__(self):
+ ... self.a = 33
+ ...
+ >>> with patch('__main__.Something', autospec=True):
+ ... thing = Something()
+ ... thing.a
+ ...
+ Traceback (most recent call last):
+ ...
+ AttributeError: Mock object has no attribute 'a'
+
+There are a few different ways of resolving this problem. The easiest, but
+not necessarily the least annoying, way is to simply set the required
+attributes on the mock after creation. Just because `autospec` doesn't allow
+you to fetch attributes that don't exist on the spec it doesn't prevent you
+setting them:
+
+.. doctest::
+
+ >>> with patch('__main__.Something', autospec=True):
+ ... thing = Something()
+ ... thing.a = 33
+ ...
+
+There is a more aggressive version of both `spec` and `autospec` that *does*
+prevent you setting non-existent attributes. This is useful if you want to
+ensure your code only *sets* valid attributes too, but obviously it prevents
+this particular scenario:
+
+.. doctest::
+
+ >>> with patch('__main__.Something', autospec=True, spec_set=True):
+ ... thing = Something()
+ ... thing.a = 33
+ ...
+ Traceback (most recent call last):
+ ...
+ AttributeError: Mock object has no attribute 'a'
+
+Probably the best way of solving the problem is to add class attributes as
+default values for instance members initialised in `__init__`. Note that if
+you are only setting default attributes in `__init__` then providing them via
+class attributes (shared between instances of course) is faster too. e.g.
+
+.. code-block:: python
+
+ class Something(object):
+ a = 33
+
+This brings up another issue. It is relatively common to provide a default
+value of `None` for members that will later be an object of a different type.
+`None` would be useless as a spec because it wouldn't let you access *any*
+attributes or methods on it. As `None` is *never* going to be useful as a
+spec, and probably indicates a member that will normally of some other type,
+`autospec` doesn't use a spec for members that are set to `None`. These will
+just be ordinary mocks (well - `MagicMocks`):
+
+.. doctest::
+
+ >>> class Something(object):
+ ... member = None
+ ...
+ >>> mock = create_autospec(Something)
+ >>> mock.member.foo.bar.baz()
+ <MagicMock name='mock.member.foo.bar.baz()' id='...'>
+
+If modifying your production classes to add defaults isn't to your liking
+then there are more options. One of these is simply to use an instance as the
+spec rather than the class. The other is to create a subclass of the
+production class and add the defaults to the subclass without affecting the
+production class. Both of these require you to use an alternative object as
+the spec. Thankfully `patch` supports this - you can simply pass the
+alternative object as the `autospec` argument:
+
+.. doctest::
+
+ >>> class Something(object):
+ ... def __init__(self):
+ ... self.a = 33
+ ...
+ >>> class SomethingForTest(Something):
+ ... a = 33
+ ...
+ >>> p = patch('__main__.Something', autospec=SomethingForTest)
+ >>> mock = p.start()
+ >>> mock.a
+ <NonCallableMagicMock name='Something.a' spec='int' id='...'>
+
+.. note::
+
+ An additional limitation (currently) with `autospec` is that unbound
+ methods on mocked classes *don't* take an "explicit self" as the first
+ argument - so this usage will fail with `autospec`.
+
+ .. doctest::
+
+ >>> class Foo(object):
+ ... def foo(self):
+ ... pass
+ ...
+ >>> Foo.foo(Foo())
+ >>> MockFoo = create_autospec(Foo)
+ >>> MockFoo.foo(MockFoo())
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes exactly 1 argument (2 given)
+
+ The reason is that its very hard to tell the difference between functions,
+ unbound methods and staticmethods across Python 2 & 3 and the alternative
+ implementations. This restriction may be fixed in future versions.
+
+
+------
+
+.. [#] This only applies to classes or already instantiated objects. Calling
+ a mocked class to create a mock instance *does not* create a real instance.
+ It is only attribute lookups - along with calls to `dir` - that are done. A
+ way round this problem would have been to use `getattr_static
+ <http://docs.python.org/dev/library/inspect.html#inspect.getattr_static>`_,
+ which can fetch attributes without triggering code execution. Descriptors
+ like `classmethod` and `staticmethod` *need* to be fetched correctly though,
+ so that their signatures can be mocked correctly.
diff --git a/third_party/python/mock-1.0.0/docs/index.txt b/third_party/python/mock-1.0.0/docs/index.txt
new file mode 100644
index 0000000000..7e4a8daca6
--- /dev/null
+++ b/third_party/python/mock-1.0.0/docs/index.txt
@@ -0,0 +1,411 @@
+====================================
+ Mock - Mocking and Testing Library
+====================================
+
+.. currentmodule:: mock
+
+:Author: `Michael Foord
+ <http://www.voidspace.org.uk/python/weblog/index.shtml>`_
+:Version: |release|
+:Date: 2012/10/07
+:Homepage: `Mock Homepage`_
+:Download: `Mock on PyPI`_
+:Documentation: `PDF Documentation
+ <http://www.voidspace.org.uk/downloads/mock-1.0.0.pdf>`_
+:License: `BSD License`_
+:Support: `Mailing list (testing-in-python@lists.idyll.org)
+ <http://lists.idyll.org/listinfo/testing-in-python>`_
+:Issue tracker: `Google code project
+ <http://code.google.com/p/mock/issues/list>`_
+
+.. _Mock Homepage: http://www.voidspace.org.uk/python/mock/
+.. _BSD License: http://www.voidspace.org.uk/python/license.shtml
+
+
+.. currentmodule:: mock
+
+.. module:: mock
+ :synopsis: Mock object and testing library.
+
+.. index:: introduction
+
+mock is a library for testing in Python. It allows you to replace parts of
+your system under test with mock objects and make assertions about how they
+have been used.
+
+mock is now part of the Python standard library, available as `unittest.mock
+<http://docs.python.org/py3k/library/unittest.mock.html#module-unittest.mock>`_
+in Python 3.3 onwards.
+
+mock provides a core :class:`Mock` class removing the need to create a host
+of stubs throughout your test suite. After performing an action, you can make
+assertions about which methods / attributes were used and arguments they were
+called with. You can also specify return values and set needed attributes in
+the normal way.
+
+Additionally, mock provides a :func:`patch` decorator that handles patching
+module and class level attributes within the scope of a test, along with
+:const:`sentinel` for creating unique objects. See the `quick guide`_ for
+some examples of how to use :class:`Mock`, :class:`MagicMock` and
+:func:`patch`.
+
+Mock is very easy to use and is designed for use with
+`unittest <http://pypi.python.org/pypi/unittest2>`_. Mock is based on
+the 'action -> assertion' pattern instead of `'record -> replay'` used by many
+mocking frameworks.
+
+mock is tested on Python versions 2.4-2.7, Python 3 plus the latest versions of
+Jython and PyPy.
+
+
+.. testsetup::
+
+ class ProductionClass(object):
+ def method(self, *args):
+ pass
+
+ module = sys.modules['module'] = ProductionClass
+ ProductionClass.ClassName1 = ProductionClass
+ ProductionClass.ClassName2 = ProductionClass
+
+
+
+API Documentation
+=================
+
+.. toctree::
+ :maxdepth: 2
+
+ mock
+ patch
+ helpers
+ sentinel
+ magicmock
+
+
+User Guide
+==========
+
+.. toctree::
+ :maxdepth: 2
+
+ getting-started
+ examples
+ compare
+ changelog
+
+
+.. index:: installing
+
+Installing
+==========
+
+The current version is |release|. Mock is stable and widely used. If you do
+find any bugs, or have suggestions for improvements / extensions
+then please contact us.
+
+* `mock on PyPI <http://pypi.python.org/pypi/mock>`_
+* `mock documentation as PDF
+ <http://www.voidspace.org.uk/downloads/mock-1.0.0.pdf>`_
+* `Google Code Home & Mercurial Repository <http://code.google.com/p/mock/>`_
+
+.. index:: repository
+.. index:: hg
+
+You can checkout the latest development version from the Google Code Mercurial
+repository with the following command:
+
+ ``hg clone https://mock.googlecode.com/hg/ mock``
+
+
+.. index:: pip
+.. index:: easy_install
+.. index:: setuptools
+
+If you have pip, setuptools or distribute you can install mock with:
+
+ | ``easy_install -U mock``
+ | ``pip install -U mock``
+
+Alternatively you can download the mock distribution from PyPI and after
+unpacking run:
+
+ ``python setup.py install``
+
+
+Quick Guide
+===========
+
+:class:`Mock` and :class:`MagicMock` objects create all attributes and
+methods as you access them and store details of how they have been used. You
+can configure them, to specify return values or limit what attributes are
+available, and then make assertions about how they have been used:
+
+.. doctest::
+
+ >>> from mock import MagicMock
+ >>> thing = ProductionClass()
+ >>> thing.method = MagicMock(return_value=3)
+ >>> thing.method(3, 4, 5, key='value')
+ 3
+ >>> thing.method.assert_called_with(3, 4, 5, key='value')
+
+:attr:`side_effect` allows you to perform side effects, including raising an
+exception when a mock is called:
+
+.. doctest::
+
+ >>> mock = Mock(side_effect=KeyError('foo'))
+ >>> mock()
+ Traceback (most recent call last):
+ ...
+ KeyError: 'foo'
+
+ >>> values = {'a': 1, 'b': 2, 'c': 3}
+ >>> def side_effect(arg):
+ ... return values[arg]
+ ...
+ >>> mock.side_effect = side_effect
+ >>> mock('a'), mock('b'), mock('c')
+ (1, 2, 3)
+ >>> mock.side_effect = [5, 4, 3, 2, 1]
+ >>> mock(), mock(), mock()
+ (5, 4, 3)
+
+Mock has many other ways you can configure it and control its behaviour. For
+example the `spec` argument configures the mock to take its specification
+from another object. Attempting to access attributes or methods on the mock
+that don't exist on the spec will fail with an `AttributeError`.
+
+The :func:`patch` decorator / context manager makes it easy to mock classes or
+objects in a module under test. The object you specify will be replaced with a
+mock (or other object) during the test and restored when the test ends:
+
+.. doctest::
+
+ >>> from mock import patch
+ >>> @patch('module.ClassName2')
+ ... @patch('module.ClassName1')
+ ... def test(MockClass1, MockClass2):
+ ... module.ClassName1()
+ ... module.ClassName2()
+
+ ... assert MockClass1 is module.ClassName1
+ ... assert MockClass2 is module.ClassName2
+ ... assert MockClass1.called
+ ... assert MockClass2.called
+ ...
+ >>> test()
+
+.. note::
+
+ When you nest patch decorators the mocks are passed in to the decorated
+ function in the same order they applied (the normal *python* order that
+ decorators are applied). This means from the bottom up, so in the example
+ above the mock for `module.ClassName1` is passed in first.
+
+ With `patch` it matters that you patch objects in the namespace where they
+ are looked up. This is normally straightforward, but for a quick guide
+ read :ref:`where to patch <where-to-patch>`.
+
+As well as a decorator `patch` can be used as a context manager in a with
+statement:
+
+.. doctest::
+
+ >>> with patch.object(ProductionClass, 'method', return_value=None) as mock_method:
+ ... thing = ProductionClass()
+ ... thing.method(1, 2, 3)
+ ...
+ >>> mock_method.assert_called_once_with(1, 2, 3)
+
+
+There is also :func:`patch.dict` for setting values in a dictionary just
+during a scope and restoring the dictionary to its original state when the test
+ends:
+
+.. doctest::
+
+ >>> foo = {'key': 'value'}
+ >>> original = foo.copy()
+ >>> with patch.dict(foo, {'newkey': 'newvalue'}, clear=True):
+ ... assert foo == {'newkey': 'newvalue'}
+ ...
+ >>> assert foo == original
+
+Mock supports the mocking of Python :ref:`magic methods <magic-methods>`. The
+easiest way of using magic methods is with the :class:`MagicMock` class. It
+allows you to do things like:
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> mock.__str__.return_value = 'foobarbaz'
+ >>> str(mock)
+ 'foobarbaz'
+ >>> mock.__str__.assert_called_with()
+
+Mock allows you to assign functions (or other Mock instances) to magic methods
+and they will be called appropriately. The `MagicMock` class is just a Mock
+variant that has all of the magic methods pre-created for you (well, all the
+useful ones anyway).
+
+The following is an example of using magic methods with the ordinary Mock
+class:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.__str__ = Mock(return_value='wheeeeee')
+ >>> str(mock)
+ 'wheeeeee'
+
+For ensuring that the mock objects in your tests have the same api as the
+objects they are replacing, you can use :ref:`auto-speccing <auto-speccing>`.
+Auto-speccing can be done through the `autospec` argument to patch, or the
+:func:`create_autospec` function. Auto-speccing creates mock objects that
+have the same attributes and methods as the objects they are replacing, and
+any functions and methods (including constructors) have the same call
+signature as the real object.
+
+This ensures that your mocks will fail in the same way as your production
+code if they are used incorrectly:
+
+.. doctest::
+
+ >>> from mock import create_autospec
+ >>> def function(a, b, c):
+ ... pass
+ ...
+ >>> mock_function = create_autospec(function, return_value='fishy')
+ >>> mock_function(1, 2, 3)
+ 'fishy'
+ >>> mock_function.assert_called_once_with(1, 2, 3)
+ >>> mock_function('wrong arguments')
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes exactly 3 arguments (1 given)
+
+`create_autospec` can also be used on classes, where it copies the signature of
+the `__init__` method, and on callable objects where it copies the signature of
+the `__call__` method.
+
+
+.. index:: references
+.. index:: articles
+
+References
+==========
+
+Articles, blog entries and other stuff related to testing with Mock:
+
+* `Imposing a No DB Discipline on Django unit tests
+ <https://github.com/carljm/django-testing-slides/blob/master/models/30_no_database.md>`_
+* `mock-django: tools for mocking the Django ORM and models
+ <https://github.com/dcramer/mock-django>`_
+* `PyCon 2011 Video: Testing with mock <https://blip.tv/file/4881513>`_
+* `Mock objects in Python
+ <http://noopenblockers.com/2012/01/06/mock-objects-in-python/>`_
+* `Python: Injecting Mock Objects for Powerful Testing
+ <http://blueprintforge.com/blog/2012/01/08/python-injecting-mock-objects-for-powerful-testing/>`_
+* `Python Mock: How to assert a substring of logger output
+ <http://www.michaelpollmeier.com/python-mock-how-to-assert-a-substring-of-logger-output/>`_
+* `Mocking Django <http://www.mattjmorrison.com/2011/09/mocking-django.html>`_
+* `Mocking dates and other classes that can't be modified
+ <http://williamjohnbert.com/2011/07/how-to-unit-testing-in-django-with-mocking-and-patching/>`_
+* `Mock recipes <http://konryd.blogspot.com/2010/06/mock-recipies.html>`_
+* `Mockity mock mock - some love for the mock module
+ <http://konryd.blogspot.com/2010/05/mockity-mock-mock-some-love-for-mock.html>`_
+* `Coverage and Mock (with django)
+ <http://mattsnider.com/python/mock-and-coverage/>`_
+* `Python Unit Testing with Mock <http://www.insomnihack.com/?p=194>`_
+* `Getting started with Python Mock
+ <http://myadventuresincoding.wordpress.com/2011/02/26/python-python-mock-cheat-sheet/>`_
+* `Smart Parameter Checks with mock
+ <http://tobyho.com/2011/03/24/smart-parameter-checks-in/>`_
+* `Python mock testing techniques and tools
+ <http://agiletesting.blogspot.com/2009/07/python-mock-testing-techniques-and.html>`_
+* `How To Test Django Template Tags
+ <http://techblog.ironfroggy.com/2008/10/how-to-test.html>`_
+* `A presentation on Unit Testing with Mock
+ <http://pypap.blogspot.com/2008/10/newbie-nugget-unit-testing-with-mock.html>`_
+* `Mocking with Django and Google AppEngine
+ <http://michael-a-nelson.blogspot.com/2008/09/mocking-with-django-and-google-app.html>`_
+
+
+.. index:: tests
+.. index:: unittest2
+
+Tests
+=====
+
+Mock uses `unittest2 <http://pypi.python.org/pypi/unittest2>`_ for its own
+test suite. In order to run it, use the `unit2` script that comes with
+`unittest2` module on a checkout of the source repository:
+
+ `unit2 discover`
+
+If you have `setuptools <http://pypi.python.org/pypi/distribute>`_ as well as
+unittest2 you can run:
+
+ ``python setup.py test``
+
+On Python 3.2 you can use ``unittest`` module from the standard library.
+
+ ``python3.2 -m unittest discover``
+
+.. index:: Python 3
+
+On Python 3 the tests for unicode are skipped as they are not relevant. On
+Python 2.4 tests that use the with statements are skipped as the with statement
+is invalid syntax on Python 2.4.
+
+
+.. index:: older versions
+
+Older Versions
+==============
+
+Documentation for older versions of mock:
+
+* `mock 0.8 <http://www.voidspace.org.uk/python/mock/0.8/>`_
+* `mock 0.7 <http://www.voidspace.org.uk/python/mock/0.7/>`_
+* `mock 0.6 <http://www.voidspace.org.uk/python/mock/0.6.0/>`_
+
+Docs from the in-development version of `mock` can be found at
+`mock.readthedocs.org <http://mock.readthedocs.org>`_.
+
+
+Terminology
+===========
+
+Terminology for objects used to replace other ones can be confusing. Terms
+like double, fake, mock, stub, and spy are all used with varying meanings.
+
+In `classic mock terminology
+<http://xunitpatterns.com/Mocks,%20Fakes,%20Stubs%20and%20Dummies.html>`_
+:class:`mock.Mock` is a `spy <http://xunitpatterns.com/Test%20Spy.html>`_ that
+allows for *post-mortem* examination. This is what I call the "action ->
+assertion" [#]_ pattern of testing.
+
+I'm not however a fan of this "statically typed mocking terminology"
+promulgated by `Martin Fowler
+<http://martinfowler.com/articles/mocksArentStubs.html>`_. It confuses usage
+patterns with implementation and prevents you from using natural terminology
+when discussing mocking.
+
+I much prefer duck typing, if an object used in your test suite looks like a
+mock object and quacks like a mock object then it's fine to call it a mock, no
+matter what the implementation looks like.
+
+This terminology is perhaps more useful in less capable languages where
+different usage patterns will *require* different implementations.
+`mock.Mock()` is capable of being used in most of the different roles
+described by Fowler, except (annoyingly / frustratingly / ironically) a Mock
+itself!
+
+How about a simpler definition: a "mock object" is an object used to replace a
+real one in a system under test.
+
+.. [#] This pattern is called "AAA" by some members of the testing community;
+ "Arrange - Act - Assert".
diff --git a/third_party/python/mock-1.0.0/docs/magicmock.txt b/third_party/python/mock-1.0.0/docs/magicmock.txt
new file mode 100644
index 0000000000..42b2ed9db1
--- /dev/null
+++ b/third_party/python/mock-1.0.0/docs/magicmock.txt
@@ -0,0 +1,258 @@
+
+.. currentmodule:: mock
+
+
+.. _magic-methods:
+
+Mocking Magic Methods
+=====================
+
+.. currentmodule:: mock
+
+:class:`Mock` supports mocking `magic methods
+<http://www.ironpythoninaction.com/magic-methods.html>`_. This allows mock
+objects to replace containers or other objects that implement Python
+protocols.
+
+Because magic methods are looked up differently from normal methods [#]_, this
+support has been specially implemented. This means that only specific magic
+methods are supported. The supported list includes *almost* all of them. If
+there are any missing that you need please let us know!
+
+You mock magic methods by setting the method you are interested in to a function
+or a mock instance. If you are using a function then it *must* take ``self`` as
+the first argument [#]_.
+
+.. doctest::
+
+ >>> def __str__(self):
+ ... return 'fooble'
+ ...
+ >>> mock = Mock()
+ >>> mock.__str__ = __str__
+ >>> str(mock)
+ 'fooble'
+
+ >>> mock = Mock()
+ >>> mock.__str__ = Mock()
+ >>> mock.__str__.return_value = 'fooble'
+ >>> str(mock)
+ 'fooble'
+
+ >>> mock = Mock()
+ >>> mock.__iter__ = Mock(return_value=iter([]))
+ >>> list(mock)
+ []
+
+One use case for this is for mocking objects used as context managers in a
+`with` statement:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.__enter__ = Mock(return_value='foo')
+ >>> mock.__exit__ = Mock(return_value=False)
+ >>> with mock as m:
+ ... assert m == 'foo'
+ ...
+ >>> mock.__enter__.assert_called_with()
+ >>> mock.__exit__.assert_called_with(None, None, None)
+
+Calls to magic methods do not appear in :attr:`~Mock.method_calls`, but they
+are recorded in :attr:`~Mock.mock_calls`.
+
+.. note::
+
+ If you use the `spec` keyword argument to create a mock then attempting to
+ set a magic method that isn't in the spec will raise an `AttributeError`.
+
+The full list of supported magic methods is:
+
+* ``__hash__``, ``__sizeof__``, ``__repr__`` and ``__str__``
+* ``__dir__``, ``__format__`` and ``__subclasses__``
+* ``__floor__``, ``__trunc__`` and ``__ceil__``
+* Comparisons: ``__cmp__``, ``__lt__``, ``__gt__``, ``__le__``, ``__ge__``,
+ ``__eq__`` and ``__ne__``
+* Container methods: ``__getitem__``, ``__setitem__``, ``__delitem__``,
+ ``__contains__``, ``__len__``, ``__iter__``, ``__getslice__``,
+ ``__setslice__``, ``__reversed__`` and ``__missing__``
+* Context manager: ``__enter__`` and ``__exit__``
+* Unary numeric methods: ``__neg__``, ``__pos__`` and ``__invert__``
+* The numeric methods (including right hand and in-place variants):
+ ``__add__``, ``__sub__``, ``__mul__``, ``__div__``,
+ ``__floordiv__``, ``__mod__``, ``__divmod__``, ``__lshift__``,
+ ``__rshift__``, ``__and__``, ``__xor__``, ``__or__``, and ``__pow__``
+* Numeric conversion methods: ``__complex__``, ``__int__``, ``__float__``,
+ ``__index__`` and ``__coerce__``
+* Descriptor methods: ``__get__``, ``__set__`` and ``__delete__``
+* Pickling: ``__reduce__``, ``__reduce_ex__``, ``__getinitargs__``,
+ ``__getnewargs__``, ``__getstate__`` and ``__setstate__``
+
+
+The following methods are supported in Python 2 but don't exist in Python 3:
+
+* ``__unicode__``, ``__long__``, ``__oct__``, ``__hex__`` and ``__nonzero__``
+* ``__truediv__`` and ``__rtruediv__``
+
+The following methods are supported in Python 3 but don't exist in Python 2:
+
+* ``__bool__`` and ``__next__``
+
+The following methods exist but are *not* supported as they are either in use by
+mock, can't be set dynamically, or can cause problems:
+
+* ``__getattr__``, ``__setattr__``, ``__init__`` and ``__new__``
+* ``__prepare__``, ``__instancecheck__``, ``__subclasscheck__``, ``__del__``
+
+
+
+Magic Mock
+==========
+
+There are two `MagicMock` variants: `MagicMock` and `NonCallableMagicMock`.
+
+
+.. class:: MagicMock(*args, **kw)
+
+ ``MagicMock`` is a subclass of :class:`Mock` with default implementations
+ of most of the magic methods. You can use ``MagicMock`` without having to
+ configure the magic methods yourself.
+
+ The constructor parameters have the same meaning as for :class:`Mock`.
+
+ If you use the `spec` or `spec_set` arguments then *only* magic methods
+ that exist in the spec will be created.
+
+
+.. class:: NonCallableMagicMock(*args, **kw)
+
+ A non-callable version of `MagicMock`.
+
+ The constructor parameters have the same meaning as for
+ :class:`MagicMock`, with the exception of `return_value` and
+ `side_effect` which have no meaning on a non-callable mock.
+
+The magic methods are setup with `MagicMock` objects, so you can configure them
+and use them in the usual way:
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> mock[3] = 'fish'
+ >>> mock.__setitem__.assert_called_with(3, 'fish')
+ >>> mock.__getitem__.return_value = 'result'
+ >>> mock[2]
+ 'result'
+
+By default many of the protocol methods are required to return objects of a
+specific type. These methods are preconfigured with a default return value, so
+that they can be used without you having to do anything if you aren't interested
+in the return value. You can still *set* the return value manually if you want
+to change the default.
+
+Methods and their defaults:
+
+* ``__lt__``: NotImplemented
+* ``__gt__``: NotImplemented
+* ``__le__``: NotImplemented
+* ``__ge__``: NotImplemented
+* ``__int__`` : 1
+* ``__contains__`` : False
+* ``__len__`` : 1
+* ``__iter__`` : iter([])
+* ``__exit__`` : False
+* ``__complex__`` : 1j
+* ``__float__`` : 1.0
+* ``__bool__`` : True
+* ``__nonzero__`` : True
+* ``__oct__`` : '1'
+* ``__hex__`` : '0x1'
+* ``__long__`` : long(1)
+* ``__index__`` : 1
+* ``__hash__`` : default hash for the mock
+* ``__str__`` : default str for the mock
+* ``__unicode__`` : default unicode for the mock
+* ``__sizeof__``: default sizeof for the mock
+
+For example:
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> int(mock)
+ 1
+ >>> len(mock)
+ 0
+ >>> hex(mock)
+ '0x1'
+ >>> list(mock)
+ []
+ >>> object() in mock
+ False
+
+The two equality method, `__eq__` and `__ne__`, are special (changed in
+0.7.2). They do the default equality comparison on identity, using a side
+effect, unless you change their return value to return something else:
+
+.. doctest::
+
+ >>> MagicMock() == 3
+ False
+ >>> MagicMock() != 3
+ True
+ >>> mock = MagicMock()
+ >>> mock.__eq__.return_value = True
+ >>> mock == 3
+ True
+
+In `0.8` the `__iter__` also gained special handling implemented with a
+side effect. The return value of `MagicMock.__iter__` can be any iterable
+object and isn't required to be an iterator:
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> mock.__iter__.return_value = ['a', 'b', 'c']
+ >>> list(mock)
+ ['a', 'b', 'c']
+ >>> list(mock)
+ ['a', 'b', 'c']
+
+If the return value *is* an iterator, then iterating over it once will consume
+it and subsequent iterations will result in an empty list:
+
+.. doctest::
+
+ >>> mock.__iter__.return_value = iter(['a', 'b', 'c'])
+ >>> list(mock)
+ ['a', 'b', 'c']
+ >>> list(mock)
+ []
+
+``MagicMock`` has all of the supported magic methods configured except for some
+of the obscure and obsolete ones. You can still set these up if you want.
+
+Magic methods that are supported but not setup by default in ``MagicMock`` are:
+
+* ``__cmp__``
+* ``__getslice__`` and ``__setslice__``
+* ``__coerce__``
+* ``__subclasses__``
+* ``__dir__``
+* ``__format__``
+* ``__get__``, ``__set__`` and ``__delete__``
+* ``__reversed__`` and ``__missing__``
+* ``__reduce__``, ``__reduce_ex__``, ``__getinitargs__``, ``__getnewargs__``,
+ ``__getstate__`` and ``__setstate__``
+* ``__getformat__`` and ``__setformat__``
+
+
+
+------------
+
+.. [#] Magic methods *should* be looked up on the class rather than the
+ instance. Different versions of Python are inconsistent about applying this
+ rule. The supported protocol methods should work with all supported versions
+ of Python.
+.. [#] The function is basically hooked up to the class, but each ``Mock``
+ instance is kept isolated from the others.
diff --git a/third_party/python/mock-1.0.0/docs/mock.txt b/third_party/python/mock-1.0.0/docs/mock.txt
new file mode 100644
index 0000000000..58712b21a6
--- /dev/null
+++ b/third_party/python/mock-1.0.0/docs/mock.txt
@@ -0,0 +1,842 @@
+The Mock Class
+==============
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ class SomeClass:
+ pass
+
+
+`Mock` is a flexible mock object intended to replace the use of stubs and
+test doubles throughout your code. Mocks are callable and create attributes as
+new mocks when you access them [#]_. Accessing the same attribute will always
+return the same mock. Mocks record how you use them, allowing you to make
+assertions about what your code has done to them.
+
+:class:`MagicMock` is a subclass of `Mock` with all the magic methods
+pre-created and ready to use. There are also non-callable variants, useful
+when you are mocking out objects that aren't callable:
+:class:`NonCallableMock` and :class:`NonCallableMagicMock`
+
+The :func:`patch` decorators makes it easy to temporarily replace classes
+in a particular module with a `Mock` object. By default `patch` will create
+a `MagicMock` for you. You can specify an alternative class of `Mock` using
+the `new_callable` argument to `patch`.
+
+
+.. index:: side_effect
+.. index:: return_value
+.. index:: wraps
+.. index:: name
+.. index:: spec
+
+.. class:: Mock(spec=None, side_effect=None, return_value=DEFAULT, wraps=None, name=None, spec_set=None, **kwargs)
+
+ Create a new `Mock` object. `Mock` takes several optional arguments
+ that specify the behaviour of the Mock object:
+
+ * `spec`: This can be either a list of strings or an existing object (a
+ class or instance) that acts as the specification for the mock object. If
+ you pass in an object then a list of strings is formed by calling dir on
+ the object (excluding unsupported magic attributes and methods).
+ Accessing any attribute not in this list will raise an `AttributeError`.
+
+ If `spec` is an object (rather than a list of strings) then
+ :attr:`__class__` returns the class of the spec object. This allows mocks
+ to pass `isinstance` tests.
+
+ * `spec_set`: A stricter variant of `spec`. If used, attempting to *set*
+ or get an attribute on the mock that isn't on the object passed as
+ `spec_set` will raise an `AttributeError`.
+
+ * `side_effect`: A function to be called whenever the Mock is called. See
+ the :attr:`~Mock.side_effect` attribute. Useful for raising exceptions or
+ dynamically changing return values. The function is called with the same
+ arguments as the mock, and unless it returns :data:`DEFAULT`, the return
+ value of this function is used as the return value.
+
+ Alternatively `side_effect` can be an exception class or instance. In
+ this case the exception will be raised when the mock is called.
+
+ If `side_effect` is an iterable then each call to the mock will return
+ the next value from the iterable. If any of the members of the iterable
+ are exceptions they will be raised instead of returned.
+
+ A `side_effect` can be cleared by setting it to `None`.
+
+ * `return_value`: The value returned when the mock is called. By default
+ this is a new Mock (created on first access). See the
+ :attr:`return_value` attribute.
+
+ * `wraps`: Item for the mock object to wrap. If `wraps` is not None then
+ calling the Mock will pass the call through to the wrapped object
+ (returning the real result and ignoring `return_value`). Attribute access
+ on the mock will return a Mock object that wraps the corresponding
+ attribute of the wrapped object (so attempting to access an attribute
+ that doesn't exist will raise an `AttributeError`).
+
+ If the mock has an explicit `return_value` set then calls are not passed
+ to the wrapped object and the `return_value` is returned instead.
+
+ * `name`: If the mock has a name then it will be used in the repr of the
+ mock. This can be useful for debugging. The name is propagated to child
+ mocks.
+
+ Mocks can also be called with arbitrary keyword arguments. These will be
+ used to set attributes on the mock after it is created. See the
+ :meth:`configure_mock` method for details.
+
+
+ .. method:: assert_called_with(*args, **kwargs)
+
+ This method is a convenient way of asserting that calls are made in a
+ particular way:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.method(1, 2, 3, test='wow')
+ <Mock name='mock.method()' id='...'>
+ >>> mock.method.assert_called_with(1, 2, 3, test='wow')
+
+
+ .. method:: assert_called_once_with(*args, **kwargs)
+
+ Assert that the mock was called exactly once and with the specified
+ arguments.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock('foo', bar='baz')
+ >>> mock.assert_called_once_with('foo', bar='baz')
+ >>> mock('foo', bar='baz')
+ >>> mock.assert_called_once_with('foo', bar='baz')
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected to be called once. Called 2 times.
+
+
+ .. method:: assert_any_call(*args, **kwargs)
+
+ assert the mock has been called with the specified arguments.
+
+ The assert passes if the mock has *ever* been called, unlike
+ :meth:`assert_called_with` and :meth:`assert_called_once_with` that
+ only pass if the call is the most recent one.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock(1, 2, arg='thing')
+ >>> mock('some', 'thing', 'else')
+ >>> mock.assert_any_call(1, 2, arg='thing')
+
+
+ .. method:: assert_has_calls(calls, any_order=False)
+
+ assert the mock has been called with the specified calls.
+ The `mock_calls` list is checked for the calls.
+
+ If `any_order` is False (the default) then the calls must be
+ sequential. There can be extra calls before or after the
+ specified calls.
+
+ If `any_order` is True then the calls can be in any order, but
+ they must all appear in :attr:`mock_calls`.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock(1)
+ >>> mock(2)
+ >>> mock(3)
+ >>> mock(4)
+ >>> calls = [call(2), call(3)]
+ >>> mock.assert_has_calls(calls)
+ >>> calls = [call(4), call(2), call(3)]
+ >>> mock.assert_has_calls(calls, any_order=True)
+
+
+ .. method:: reset_mock()
+
+ The reset_mock method resets all the call attributes on a mock object:
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock('hello')
+ >>> mock.called
+ True
+ >>> mock.reset_mock()
+ >>> mock.called
+ False
+
+ This can be useful where you want to make a series of assertions that
+ reuse the same object. Note that `reset_mock` *doesn't* clear the
+ return value, :attr:`side_effect` or any child attributes you have
+ set using normal assignment. Child mocks and the return value mock
+ (if any) are reset as well.
+
+
+ .. method:: mock_add_spec(spec, spec_set=False)
+
+ Add a spec to a mock. `spec` can either be an object or a
+ list of strings. Only attributes on the `spec` can be fetched as
+ attributes from the mock.
+
+ If `spec_set` is `True` then only attributes on the spec can be set.
+
+
+ .. method:: attach_mock(mock, attribute)
+
+ Attach a mock as an attribute of this one, replacing its name and
+ parent. Calls to the attached mock will be recorded in the
+ :attr:`method_calls` and :attr:`mock_calls` attributes of this one.
+
+
+ .. method:: configure_mock(**kwargs)
+
+ Set attributes on the mock through keyword arguments.
+
+ Attributes plus return values and side effects can be set on child
+ mocks using standard dot notation and unpacking a dictionary in the
+ method call:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> mock.configure_mock(**attrs)
+ >>> mock.method()
+ 3
+ >>> mock.other()
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+ The same thing can be achieved in the constructor call to mocks:
+
+ .. doctest::
+
+ >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> mock = Mock(some_attribute='eggs', **attrs)
+ >>> mock.some_attribute
+ 'eggs'
+ >>> mock.method()
+ 3
+ >>> mock.other()
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+ `configure_mock` exists to make it easier to do configuration
+ after the mock has been created.
+
+
+ .. method:: __dir__()
+
+ `Mock` objects limit the results of `dir(some_mock)` to useful results.
+ For mocks with a `spec` this includes all the permitted attributes
+ for the mock.
+
+ See :data:`FILTER_DIR` for what this filtering does, and how to
+ switch it off.
+
+
+ .. method:: _get_child_mock(**kw)
+
+ Create the child mocks for attributes and return value.
+ By default child mocks will be the same type as the parent.
+ Subclasses of Mock may want to override this to customize the way
+ child mocks are made.
+
+ For non-callable mocks the callable variant will be used (rather than
+ any custom subclass).
+
+
+ .. attribute:: called
+
+ A boolean representing whether or not the mock object has been called:
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock.called
+ False
+ >>> mock()
+ >>> mock.called
+ True
+
+ .. attribute:: call_count
+
+ An integer telling you how many times the mock object has been called:
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock.call_count
+ 0
+ >>> mock()
+ >>> mock()
+ >>> mock.call_count
+ 2
+
+
+ .. attribute:: return_value
+
+ Set this to configure the value returned by calling the mock:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.return_value = 'fish'
+ >>> mock()
+ 'fish'
+
+ The default return value is a mock object and you can configure it in
+ the normal way:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.return_value.attribute = sentinel.Attribute
+ >>> mock.return_value()
+ <Mock name='mock()()' id='...'>
+ >>> mock.return_value.assert_called_with()
+
+ `return_value` can also be set in the constructor:
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=3)
+ >>> mock.return_value
+ 3
+ >>> mock()
+ 3
+
+
+ .. attribute:: side_effect
+
+ This can either be a function to be called when the mock is called,
+ or an exception (class or instance) to be raised.
+
+ If you pass in a function it will be called with same arguments as the
+ mock and unless the function returns the :data:`DEFAULT` singleton the
+ call to the mock will then return whatever the function returns. If the
+ function returns :data:`DEFAULT` then the mock will return its normal
+ value (from the :attr:`return_value`.
+
+ An example of a mock that raises an exception (to test exception
+ handling of an API):
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.side_effect = Exception('Boom!')
+ >>> mock()
+ Traceback (most recent call last):
+ ...
+ Exception: Boom!
+
+ Using `side_effect` to return a sequence of values:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.side_effect = [3, 2, 1]
+ >>> mock(), mock(), mock()
+ (3, 2, 1)
+
+ The `side_effect` function is called with the same arguments as the
+ mock (so it is wise for it to take arbitrary args and keyword
+ arguments) and whatever it returns is used as the return value for
+ the call. The exception is if `side_effect` returns :data:`DEFAULT`,
+ in which case the normal :attr:`return_value` is used.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=3)
+ >>> def side_effect(*args, **kwargs):
+ ... return DEFAULT
+ ...
+ >>> mock.side_effect = side_effect
+ >>> mock()
+ 3
+
+ `side_effect` can be set in the constructor. Here's an example that
+ adds one to the value the mock is called with and returns it:
+
+ .. doctest::
+
+ >>> side_effect = lambda value: value + 1
+ >>> mock = Mock(side_effect=side_effect)
+ >>> mock(3)
+ 4
+ >>> mock(-8)
+ -7
+
+ Setting `side_effect` to `None` clears it:
+
+ .. doctest::
+
+ >>> from mock import Mock
+ >>> m = Mock(side_effect=KeyError, return_value=3)
+ >>> m()
+ Traceback (most recent call last):
+ ...
+ KeyError
+ >>> m.side_effect = None
+ >>> m()
+ 3
+
+
+ .. attribute:: call_args
+
+ This is either `None` (if the mock hasn't been called), or the
+ arguments that the mock was last called with. This will be in the
+ form of a tuple: the first member is any ordered arguments the mock
+ was called with (or an empty tuple) and the second member is any
+ keyword arguments (or an empty dictionary).
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> print mock.call_args
+ None
+ >>> mock()
+ >>> mock.call_args
+ call()
+ >>> mock.call_args == ()
+ True
+ >>> mock(3, 4)
+ >>> mock.call_args
+ call(3, 4)
+ >>> mock.call_args == ((3, 4),)
+ True
+ >>> mock(3, 4, 5, key='fish', next='w00t!')
+ >>> mock.call_args
+ call(3, 4, 5, key='fish', next='w00t!')
+
+ `call_args`, along with members of the lists :attr:`call_args_list`,
+ :attr:`method_calls` and :attr:`mock_calls` are :data:`call` objects.
+ These are tuples, so they can be unpacked to get at the individual
+ arguments and make more complex assertions. See
+ :ref:`calls as tuples <calls-as-tuples>`.
+
+
+ .. attribute:: call_args_list
+
+ This is a list of all the calls made to the mock object in sequence
+ (so the length of the list is the number of times it has been
+ called). Before any calls have been made it is an empty list. The
+ :data:`call` object can be used for conveniently constructing lists of
+ calls to compare with `call_args_list`.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock()
+ >>> mock(3, 4)
+ >>> mock(key='fish', next='w00t!')
+ >>> mock.call_args_list
+ [call(), call(3, 4), call(key='fish', next='w00t!')]
+ >>> expected = [(), ((3, 4),), ({'key': 'fish', 'next': 'w00t!'},)]
+ >>> mock.call_args_list == expected
+ True
+
+ Members of `call_args_list` are :data:`call` objects. These can be
+ unpacked as tuples to get at the individual arguments. See
+ :ref:`calls as tuples <calls-as-tuples>`.
+
+
+ .. attribute:: method_calls
+
+ As well as tracking calls to themselves, mocks also track calls to
+ methods and attributes, and *their* methods and attributes:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.method()
+ <Mock name='mock.method()' id='...'>
+ >>> mock.property.method.attribute()
+ <Mock name='mock.property.method.attribute()' id='...'>
+ >>> mock.method_calls
+ [call.method(), call.property.method.attribute()]
+
+ Members of `method_calls` are :data:`call` objects. These can be
+ unpacked as tuples to get at the individual arguments. See
+ :ref:`calls as tuples <calls-as-tuples>`.
+
+
+ .. attribute:: mock_calls
+
+ `mock_calls` records *all* calls to the mock object, its methods, magic
+ methods *and* return value mocks.
+
+ .. doctest::
+
+ >>> mock = MagicMock()
+ >>> result = mock(1, 2, 3)
+ >>> mock.first(a=3)
+ <MagicMock name='mock.first()' id='...'>
+ >>> mock.second()
+ <MagicMock name='mock.second()' id='...'>
+ >>> int(mock)
+ 1
+ >>> result(1)
+ <MagicMock name='mock()()' id='...'>
+ >>> expected = [call(1, 2, 3), call.first(a=3), call.second(),
+ ... call.__int__(), call()(1)]
+ >>> mock.mock_calls == expected
+ True
+
+ Members of `mock_calls` are :data:`call` objects. These can be
+ unpacked as tuples to get at the individual arguments. See
+ :ref:`calls as tuples <calls-as-tuples>`.
+
+
+ .. attribute:: __class__
+
+ Normally the `__class__` attribute of an object will return its type.
+ For a mock object with a `spec` `__class__` returns the spec class
+ instead. This allows mock objects to pass `isinstance` tests for the
+ object they are replacing / masquerading as:
+
+ .. doctest::
+
+ >>> mock = Mock(spec=3)
+ >>> isinstance(mock, int)
+ True
+
+ `__class__` is assignable to, this allows a mock to pass an
+ `isinstance` check without forcing you to use a spec:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.__class__ = dict
+ >>> isinstance(mock, dict)
+ True
+
+.. class:: NonCallableMock(spec=None, wraps=None, name=None, spec_set=None, **kwargs)
+
+ A non-callable version of `Mock`. The constructor parameters have the same
+ meaning of `Mock`, with the exception of `return_value` and `side_effect`
+ which have no meaning on a non-callable mock.
+
+Mock objects that use a class or an instance as a `spec` or `spec_set` are able
+to pass `isintance` tests:
+
+.. doctest::
+
+ >>> mock = Mock(spec=SomeClass)
+ >>> isinstance(mock, SomeClass)
+ True
+ >>> mock = Mock(spec_set=SomeClass())
+ >>> isinstance(mock, SomeClass)
+ True
+
+The `Mock` classes have support for mocking magic methods. See :ref:`magic
+methods <magic-methods>` for the full details.
+
+The mock classes and the :func:`patch` decorators all take arbitrary keyword
+arguments for configuration. For the `patch` decorators the keywords are
+passed to the constructor of the mock being created. The keyword arguments
+are for configuring attributes of the mock:
+
+.. doctest::
+
+ >>> m = MagicMock(attribute=3, other='fish')
+ >>> m.attribute
+ 3
+ >>> m.other
+ 'fish'
+
+The return value and side effect of child mocks can be set in the same way,
+using dotted notation. As you can't use dotted names directly in a call you
+have to create a dictionary and unpack it using `**`:
+
+.. doctest::
+
+ >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> mock = Mock(some_attribute='eggs', **attrs)
+ >>> mock.some_attribute
+ 'eggs'
+ >>> mock.method()
+ 3
+ >>> mock.other()
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+
+.. class:: PropertyMock(*args, **kwargs)
+
+ A mock intended to be used as a property, or other descriptor, on a class.
+ `PropertyMock` provides `__get__` and `__set__` methods so you can specify
+ a return value when it is fetched.
+
+ Fetching a `PropertyMock` instance from an object calls the mock, with
+ no args. Setting it calls the mock with the value being set.
+
+ .. doctest::
+
+ >>> class Foo(object):
+ ... @property
+ ... def foo(self):
+ ... return 'something'
+ ... @foo.setter
+ ... def foo(self, value):
+ ... pass
+ ...
+ >>> with patch('__main__.Foo.foo', new_callable=PropertyMock) as mock_foo:
+ ... mock_foo.return_value = 'mockity-mock'
+ ... this_foo = Foo()
+ ... print this_foo.foo
+ ... this_foo.foo = 6
+ ...
+ mockity-mock
+ >>> mock_foo.mock_calls
+ [call(), call(6)]
+
+Because of the way mock attributes are stored you can't directly attach a
+`PropertyMock` to a mock object. Instead you can attach it to the mock type
+object:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> p = PropertyMock(return_value=3)
+ >>> type(m).foo = p
+ >>> m.foo
+ 3
+ >>> p.assert_called_once_with()
+
+
+.. index:: __call__
+.. index:: calling
+
+Calling
+=======
+
+Mock objects are callable. The call will return the value set as the
+:attr:`~Mock.return_value` attribute. The default return value is a new Mock
+object; it is created the first time the return value is accessed (either
+explicitly or by calling the Mock) - but it is stored and the same one
+returned each time.
+
+Calls made to the object will be recorded in the attributes
+like :attr:`~Mock.call_args` and :attr:`~Mock.call_args_list`.
+
+If :attr:`~Mock.side_effect` is set then it will be called after the call has
+been recorded, so if `side_effect` raises an exception the call is still
+recorded.
+
+The simplest way to make a mock raise an exception when called is to make
+:attr:`~Mock.side_effect` an exception class or instance:
+
+.. doctest::
+
+ >>> m = MagicMock(side_effect=IndexError)
+ >>> m(1, 2, 3)
+ Traceback (most recent call last):
+ ...
+ IndexError
+ >>> m.mock_calls
+ [call(1, 2, 3)]
+ >>> m.side_effect = KeyError('Bang!')
+ >>> m('two', 'three', 'four')
+ Traceback (most recent call last):
+ ...
+ KeyError: 'Bang!'
+ >>> m.mock_calls
+ [call(1, 2, 3), call('two', 'three', 'four')]
+
+If `side_effect` is a function then whatever that function returns is what
+calls to the mock return. The `side_effect` function is called with the
+same arguments as the mock. This allows you to vary the return value of the
+call dynamically, based on the input:
+
+.. doctest::
+
+ >>> def side_effect(value):
+ ... return value + 1
+ ...
+ >>> m = MagicMock(side_effect=side_effect)
+ >>> m(1)
+ 2
+ >>> m(2)
+ 3
+ >>> m.mock_calls
+ [call(1), call(2)]
+
+If you want the mock to still return the default return value (a new mock), or
+any set return value, then there are two ways of doing this. Either return
+`mock.return_value` from inside `side_effect`, or return :data:`DEFAULT`:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> def side_effect(*args, **kwargs):
+ ... return m.return_value
+ ...
+ >>> m.side_effect = side_effect
+ >>> m.return_value = 3
+ >>> m()
+ 3
+ >>> def side_effect(*args, **kwargs):
+ ... return DEFAULT
+ ...
+ >>> m.side_effect = side_effect
+ >>> m()
+ 3
+
+To remove a `side_effect`, and return to the default behaviour, set the
+`side_effect` to `None`:
+
+.. doctest::
+
+ >>> m = MagicMock(return_value=6)
+ >>> def side_effect(*args, **kwargs):
+ ... return 3
+ ...
+ >>> m.side_effect = side_effect
+ >>> m()
+ 3
+ >>> m.side_effect = None
+ >>> m()
+ 6
+
+The `side_effect` can also be any iterable object. Repeated calls to the mock
+will return values from the iterable (until the iterable is exhausted and
+a `StopIteration` is raised):
+
+.. doctest::
+
+ >>> m = MagicMock(side_effect=[1, 2, 3])
+ >>> m()
+ 1
+ >>> m()
+ 2
+ >>> m()
+ 3
+ >>> m()
+ Traceback (most recent call last):
+ ...
+ StopIteration
+
+If any members of the iterable are exceptions they will be raised instead of
+returned:
+
+.. doctest::
+
+ >>> iterable = (33, ValueError, 66)
+ >>> m = MagicMock(side_effect=iterable)
+ >>> m()
+ 33
+ >>> m()
+ Traceback (most recent call last):
+ ...
+ ValueError
+ >>> m()
+ 66
+
+
+.. _deleting-attributes:
+
+Deleting Attributes
+===================
+
+Mock objects create attributes on demand. This allows them to pretend to be
+objects of any type.
+
+You may want a mock object to return `False` to a `hasattr` call, or raise an
+`AttributeError` when an attribute is fetched. You can do this by providing
+an object as a `spec` for a mock, but that isn't always convenient.
+
+You "block" attributes by deleting them. Once deleted, accessing an attribute
+will raise an `AttributeError`.
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> hasattr(mock, 'm')
+ True
+ >>> del mock.m
+ >>> hasattr(mock, 'm')
+ False
+ >>> del mock.f
+ >>> mock.f
+ Traceback (most recent call last):
+ ...
+ AttributeError: f
+
+
+Attaching Mocks as Attributes
+=============================
+
+When you attach a mock as an attribute of another mock (or as the return
+value) it becomes a "child" of that mock. Calls to the child are recorded in
+the :attr:`~Mock.method_calls` and :attr:`~Mock.mock_calls` attributes of the
+parent. This is useful for configuring child mocks and then attaching them to
+the parent, or for attaching mocks to a parent that records all calls to the
+children and allows you to make assertions about the order of calls between
+mocks:
+
+.. doctest::
+
+ >>> parent = MagicMock()
+ >>> child1 = MagicMock(return_value=None)
+ >>> child2 = MagicMock(return_value=None)
+ >>> parent.child1 = child1
+ >>> parent.child2 = child2
+ >>> child1(1)
+ >>> child2(2)
+ >>> parent.mock_calls
+ [call.child1(1), call.child2(2)]
+
+The exception to this is if the mock has a name. This allows you to prevent
+the "parenting" if for some reason you don't want it to happen.
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> not_a_child = MagicMock(name='not-a-child')
+ >>> mock.attribute = not_a_child
+ >>> mock.attribute()
+ <MagicMock name='not-a-child()' id='...'>
+ >>> mock.mock_calls
+ []
+
+Mocks created for you by :func:`patch` are automatically given names. To
+attach mocks that have names to a parent you use the :meth:`~Mock.attach_mock`
+method:
+
+.. doctest::
+
+ >>> thing1 = object()
+ >>> thing2 = object()
+ >>> parent = MagicMock()
+ >>> with patch('__main__.thing1', return_value=None) as child1:
+ ... with patch('__main__.thing2', return_value=None) as child2:
+ ... parent.attach_mock(child1, 'child1')
+ ... parent.attach_mock(child2, 'child2')
+ ... child1('one')
+ ... child2('two')
+ ...
+ >>> parent.mock_calls
+ [call.child1('one'), call.child2('two')]
+
+
+-----
+
+.. [#] The only exceptions are magic methods and attributes (those that have
+ leading and trailing double underscores). Mock doesn't create these but
+ instead of raises an ``AttributeError``. This is because the interpreter
+ will often implicitly request these methods, and gets *very* confused to
+ get a new Mock object when it expects a magic method. If you need magic
+ method support see :ref:`magic methods <magic-methods>`.
diff --git a/third_party/python/mock-1.0.0/docs/patch.txt b/third_party/python/mock-1.0.0/docs/patch.txt
new file mode 100644
index 0000000000..3d56264fbb
--- /dev/null
+++ b/third_party/python/mock-1.0.0/docs/patch.txt
@@ -0,0 +1,636 @@
+==================
+ Patch Decorators
+==================
+
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ class SomeClass(object):
+ static_method = None
+ class_method = None
+ attribute = None
+
+ sys.modules['package'] = package = Mock(name='package')
+ sys.modules['package.module'] = package.module
+
+ class TestCase(unittest2.TestCase):
+ def run(self):
+ result = unittest2.TestResult()
+ super(unittest2.TestCase, self).run(result)
+ assert result.wasSuccessful()
+
+.. testcleanup::
+
+ patch.TEST_PREFIX = 'test'
+
+
+The patch decorators are used for patching objects only within the scope of
+the function they decorate. They automatically handle the unpatching for you,
+even if exceptions are raised. All of these functions can also be used in with
+statements or as class decorators.
+
+
+patch
+=====
+
+.. note::
+
+ `patch` is straightforward to use. The key is to do the patching in the
+ right namespace. See the section `where to patch`_.
+
+.. function:: patch(target, new=DEFAULT, spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs)
+
+ `patch` acts as a function decorator, class decorator or a context
+ manager. Inside the body of the function or with statement, the `target`
+ is patched with a `new` object. When the function/with statement exits
+ the patch is undone.
+
+ If `new` is omitted, then the target is replaced with a
+ :class:`MagicMock`. If `patch` is used as a decorator and `new` is
+ omitted, the created mock is passed in as an extra argument to the
+ decorated function. If `patch` is used as a context manager the created
+ mock is returned by the context manager.
+
+ `target` should be a string in the form `'package.module.ClassName'`. The
+ `target` is imported and the specified object replaced with the `new`
+ object, so the `target` must be importable from the environment you are
+ calling `patch` from. The target is imported when the decorated function
+ is executed, not at decoration time.
+
+ The `spec` and `spec_set` keyword arguments are passed to the `MagicMock`
+ if patch is creating one for you.
+
+ In addition you can pass `spec=True` or `spec_set=True`, which causes
+ patch to pass in the object being mocked as the spec/spec_set object.
+
+ `new_callable` allows you to specify a different class, or callable object,
+ that will be called to create the `new` object. By default `MagicMock` is
+ used.
+
+ A more powerful form of `spec` is `autospec`. If you set `autospec=True`
+ then the mock with be created with a spec from the object being replaced.
+ All attributes of the mock will also have the spec of the corresponding
+ attribute of the object being replaced. Methods and functions being mocked
+ will have their arguments checked and will raise a `TypeError` if they are
+ called with the wrong signature. For mocks
+ replacing a class, their return value (the 'instance') will have the same
+ spec as the class. See the :func:`create_autospec` function and
+ :ref:`auto-speccing`.
+
+ Instead of `autospec=True` you can pass `autospec=some_object` to use an
+ arbitrary object as the spec instead of the one being replaced.
+
+ By default `patch` will fail to replace attributes that don't exist. If
+ you pass in `create=True`, and the attribute doesn't exist, patch will
+ create the attribute for you when the patched function is called, and
+ delete it again afterwards. This is useful for writing tests against
+ attributes that your production code creates at runtime. It is off by by
+ default because it can be dangerous. With it switched on you can write
+ passing tests against APIs that don't actually exist!
+
+ Patch can be used as a `TestCase` class decorator. It works by
+ decorating each test method in the class. This reduces the boilerplate
+ code when your test methods share a common patchings set. `patch` finds
+ tests by looking for method names that start with `patch.TEST_PREFIX`.
+ By default this is `test`, which matches the way `unittest` finds tests.
+ You can specify an alternative prefix by setting `patch.TEST_PREFIX`.
+
+ Patch can be used as a context manager, with the with statement. Here the
+ patching applies to the indented block after the with statement. If you
+ use "as" then the patched object will be bound to the name after the
+ "as"; very useful if `patch` is creating a mock object for you.
+
+ `patch` takes arbitrary keyword arguments. These will be passed to
+ the `Mock` (or `new_callable`) on construction.
+
+ `patch.dict(...)`, `patch.multiple(...)` and `patch.object(...)` are
+ available for alternate use-cases.
+
+`patch` as function decorator, creating the mock for you and passing it into
+the decorated function:
+
+.. doctest::
+
+ >>> @patch('__main__.SomeClass')
+ ... def function(normal_argument, mock_class):
+ ... print mock_class is SomeClass
+ ...
+ >>> function(None)
+ True
+
+
+Patching a class replaces the class with a `MagicMock` *instance*. If the
+class is instantiated in the code under test then it will be the
+:attr:`~Mock.return_value` of the mock that will be used.
+
+If the class is instantiated multiple times you could use
+:attr:`~Mock.side_effect` to return a new mock each time. Alternatively you
+can set the `return_value` to be anything you want.
+
+To configure return values on methods of *instances* on the patched class
+you must do this on the `return_value`. For example:
+
+.. doctest::
+
+ >>> class Class(object):
+ ... def method(self):
+ ... pass
+ ...
+ >>> with patch('__main__.Class') as MockClass:
+ ... instance = MockClass.return_value
+ ... instance.method.return_value = 'foo'
+ ... assert Class() is instance
+ ... assert Class().method() == 'foo'
+ ...
+
+If you use `spec` or `spec_set` and `patch` is replacing a *class*, then the
+return value of the created mock will have the same spec.
+
+.. doctest::
+
+ >>> Original = Class
+ >>> patcher = patch('__main__.Class', spec=True)
+ >>> MockClass = patcher.start()
+ >>> instance = MockClass()
+ >>> assert isinstance(instance, Original)
+ >>> patcher.stop()
+
+The `new_callable` argument is useful where you want to use an alternative
+class to the default :class:`MagicMock` for the created mock. For example, if
+you wanted a :class:`NonCallableMock` to be used:
+
+.. doctest::
+
+ >>> thing = object()
+ >>> with patch('__main__.thing', new_callable=NonCallableMock) as mock_thing:
+ ... assert thing is mock_thing
+ ... thing()
+ ...
+ Traceback (most recent call last):
+ ...
+ TypeError: 'NonCallableMock' object is not callable
+
+Another use case might be to replace an object with a `StringIO` instance:
+
+.. doctest::
+
+ >>> from StringIO import StringIO
+ >>> def foo():
+ ... print 'Something'
+ ...
+ >>> @patch('sys.stdout', new_callable=StringIO)
+ ... def test(mock_stdout):
+ ... foo()
+ ... assert mock_stdout.getvalue() == 'Something\n'
+ ...
+ >>> test()
+
+When `patch` is creating a mock for you, it is common that the first thing
+you need to do is to configure the mock. Some of that configuration can be done
+in the call to patch. Any arbitrary keywords you pass into the call will be
+used to set attributes on the created mock:
+
+.. doctest::
+
+ >>> patcher = patch('__main__.thing', first='one', second='two')
+ >>> mock_thing = patcher.start()
+ >>> mock_thing.first
+ 'one'
+ >>> mock_thing.second
+ 'two'
+
+As well as attributes on the created mock attributes, like the
+:attr:`~Mock.return_value` and :attr:`~Mock.side_effect`, of child mocks can
+also be configured. These aren't syntactically valid to pass in directly as
+keyword arguments, but a dictionary with these as keys can still be expanded
+into a `patch` call using `**`:
+
+.. doctest::
+
+ >>> config = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> patcher = patch('__main__.thing', **config)
+ >>> mock_thing = patcher.start()
+ >>> mock_thing.method()
+ 3
+ >>> mock_thing.other()
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+
+patch.object
+============
+
+.. function:: patch.object(target, attribute, new=DEFAULT, spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs)
+
+ patch the named member (`attribute`) on an object (`target`) with a mock
+ object.
+
+ `patch.object` can be used as a decorator, class decorator or a context
+ manager. Arguments `new`, `spec`, `create`, `spec_set`, `autospec` and
+ `new_callable` have the same meaning as for `patch`. Like `patch`,
+ `patch.object` takes arbitrary keyword arguments for configuring the mock
+ object it creates.
+
+ When used as a class decorator `patch.object` honours `patch.TEST_PREFIX`
+ for choosing which methods to wrap.
+
+You can either call `patch.object` with three arguments or two arguments. The
+three argument form takes the object to be patched, the attribute name and the
+object to replace the attribute with.
+
+When calling with the two argument form you omit the replacement object, and a
+mock is created for you and passed in as an extra argument to the decorated
+function:
+
+.. doctest::
+
+ >>> @patch.object(SomeClass, 'class_method')
+ ... def test(mock_method):
+ ... SomeClass.class_method(3)
+ ... mock_method.assert_called_with(3)
+ ...
+ >>> test()
+
+`spec`, `create` and the other arguments to `patch.object` have the same
+meaning as they do for `patch`.
+
+
+patch.dict
+==========
+
+.. function:: patch.dict(in_dict, values=(), clear=False, **kwargs)
+
+ Patch a dictionary, or dictionary like object, and restore the dictionary
+ to its original state after the test.
+
+ `in_dict` can be a dictionary or a mapping like container. If it is a
+ mapping then it must at least support getting, setting and deleting items
+ plus iterating over keys.
+
+ `in_dict` can also be a string specifying the name of the dictionary, which
+ will then be fetched by importing it.
+
+ `values` can be a dictionary of values to set in the dictionary. `values`
+ can also be an iterable of `(key, value)` pairs.
+
+ If `clear` is True then the dictionary will be cleared before the new
+ values are set.
+
+ `patch.dict` can also be called with arbitrary keyword arguments to set
+ values in the dictionary.
+
+ `patch.dict` can be used as a context manager, decorator or class
+ decorator. When used as a class decorator `patch.dict` honours
+ `patch.TEST_PREFIX` for choosing which methods to wrap.
+
+`patch.dict` can be used to add members to a dictionary, or simply let a test
+change a dictionary, and ensure the dictionary is restored when the test
+ends.
+
+.. doctest::
+
+ >>> from mock import patch
+ >>> foo = {}
+ >>> with patch.dict(foo, {'newkey': 'newvalue'}):
+ ... assert foo == {'newkey': 'newvalue'}
+ ...
+ >>> assert foo == {}
+
+ >>> import os
+ >>> with patch.dict('os.environ', {'newkey': 'newvalue'}):
+ ... print os.environ['newkey']
+ ...
+ newvalue
+ >>> assert 'newkey' not in os.environ
+
+Keywords can be used in the `patch.dict` call to set values in the dictionary:
+
+.. doctest::
+
+ >>> mymodule = MagicMock()
+ >>> mymodule.function.return_value = 'fish'
+ >>> with patch.dict('sys.modules', mymodule=mymodule):
+ ... import mymodule
+ ... mymodule.function('some', 'args')
+ ...
+ 'fish'
+
+`patch.dict` can be used with dictionary like objects that aren't actually
+dictionaries. At the very minimum they must support item getting, setting,
+deleting and either iteration or membership test. This corresponds to the
+magic methods `__getitem__`, `__setitem__`, `__delitem__` and either
+`__iter__` or `__contains__`.
+
+.. doctest::
+
+ >>> class Container(object):
+ ... def __init__(self):
+ ... self.values = {}
+ ... def __getitem__(self, name):
+ ... return self.values[name]
+ ... def __setitem__(self, name, value):
+ ... self.values[name] = value
+ ... def __delitem__(self, name):
+ ... del self.values[name]
+ ... def __iter__(self):
+ ... return iter(self.values)
+ ...
+ >>> thing = Container()
+ >>> thing['one'] = 1
+ >>> with patch.dict(thing, one=2, two=3):
+ ... assert thing['one'] == 2
+ ... assert thing['two'] == 3
+ ...
+ >>> assert thing['one'] == 1
+ >>> assert list(thing) == ['one']
+
+
+patch.multiple
+==============
+
+.. function:: patch.multiple(target, spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs)
+
+ Perform multiple patches in a single call. It takes the object to be
+ patched (either as an object or a string to fetch the object by importing)
+ and keyword arguments for the patches::
+
+ with patch.multiple(settings, FIRST_PATCH='one', SECOND_PATCH='two'):
+ ...
+
+ Use :data:`DEFAULT` as the value if you want `patch.multiple` to create
+ mocks for you. In this case the created mocks are passed into a decorated
+ function by keyword, and a dictionary is returned when `patch.multiple` is
+ used as a context manager.
+
+ `patch.multiple` can be used as a decorator, class decorator or a context
+ manager. The arguments `spec`, `spec_set`, `create`, `autospec` and
+ `new_callable` have the same meaning as for `patch`. These arguments will
+ be applied to *all* patches done by `patch.multiple`.
+
+ When used as a class decorator `patch.multiple` honours `patch.TEST_PREFIX`
+ for choosing which methods to wrap.
+
+If you want `patch.multiple` to create mocks for you, then you can use
+:data:`DEFAULT` as the value. If you use `patch.multiple` as a decorator
+then the created mocks are passed into the decorated function by keyword.
+
+.. doctest::
+
+ >>> thing = object()
+ >>> other = object()
+
+ >>> @patch.multiple('__main__', thing=DEFAULT, other=DEFAULT)
+ ... def test_function(thing, other):
+ ... assert isinstance(thing, MagicMock)
+ ... assert isinstance(other, MagicMock)
+ ...
+ >>> test_function()
+
+`patch.multiple` can be nested with other `patch` decorators, but put arguments
+passed by keyword *after* any of the standard arguments created by `patch`:
+
+.. doctest::
+
+ >>> @patch('sys.exit')
+ ... @patch.multiple('__main__', thing=DEFAULT, other=DEFAULT)
+ ... def test_function(mock_exit, other, thing):
+ ... assert 'other' in repr(other)
+ ... assert 'thing' in repr(thing)
+ ... assert 'exit' in repr(mock_exit)
+ ...
+ >>> test_function()
+
+If `patch.multiple` is used as a context manager, the value returned by the
+context manger is a dictionary where created mocks are keyed by name:
+
+.. doctest::
+
+ >>> with patch.multiple('__main__', thing=DEFAULT, other=DEFAULT) as values:
+ ... assert 'other' in repr(values['other'])
+ ... assert 'thing' in repr(values['thing'])
+ ... assert values['thing'] is thing
+ ... assert values['other'] is other
+ ...
+
+
+.. _start-and-stop:
+
+patch methods: start and stop
+=============================
+
+All the patchers have `start` and `stop` methods. These make it simpler to do
+patching in `setUp` methods or where you want to do multiple patches without
+nesting decorators or with statements.
+
+To use them call `patch`, `patch.object` or `patch.dict` as normal and keep a
+reference to the returned `patcher` object. You can then call `start` to put
+the patch in place and `stop` to undo it.
+
+If you are using `patch` to create a mock for you then it will be returned by
+the call to `patcher.start`.
+
+.. doctest::
+
+ >>> patcher = patch('package.module.ClassName')
+ >>> from package import module
+ >>> original = module.ClassName
+ >>> new_mock = patcher.start()
+ >>> assert module.ClassName is not original
+ >>> assert module.ClassName is new_mock
+ >>> patcher.stop()
+ >>> assert module.ClassName is original
+ >>> assert module.ClassName is not new_mock
+
+
+A typical use case for this might be for doing multiple patches in the `setUp`
+method of a `TestCase`:
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ... def setUp(self):
+ ... self.patcher1 = patch('package.module.Class1')
+ ... self.patcher2 = patch('package.module.Class2')
+ ... self.MockClass1 = self.patcher1.start()
+ ... self.MockClass2 = self.patcher2.start()
+ ...
+ ... def tearDown(self):
+ ... self.patcher1.stop()
+ ... self.patcher2.stop()
+ ...
+ ... def test_something(self):
+ ... assert package.module.Class1 is self.MockClass1
+ ... assert package.module.Class2 is self.MockClass2
+ ...
+ >>> MyTest('test_something').run()
+
+.. caution::
+
+ If you use this technique you must ensure that the patching is "undone" by
+ calling `stop`. This can be fiddlier than you might think, because if an
+ exception is raised in the setUp then tearDown is not called. `unittest2
+ <http://pypi.python.org/pypi/unittest2>`_ cleanup functions make this
+ easier.
+
+ .. doctest::
+
+ >>> class MyTest(TestCase):
+ ... def setUp(self):
+ ... patcher = patch('package.module.Class')
+ ... self.MockClass = patcher.start()
+ ... self.addCleanup(patcher.stop)
+ ...
+ ... def test_something(self):
+ ... assert package.module.Class is self.MockClass
+ ...
+ >>> MyTest('test_something').run()
+
+ As an added bonus you no longer need to keep a reference to the `patcher`
+ object.
+
+It is also possible to stop all patches which have been started by using
+`patch.stopall`.
+
+.. function:: patch.stopall
+
+ Stop all active patches. Only stops patches started with `start`.
+
+
+TEST_PREFIX
+===========
+
+All of the patchers can be used as class decorators. When used in this way
+they wrap every test method on the class. The patchers recognise methods that
+start with `test` as being test methods. This is the same way that the
+`unittest.TestLoader` finds test methods by default.
+
+It is possible that you want to use a different prefix for your tests. You can
+inform the patchers of the different prefix by setting `patch.TEST_PREFIX`:
+
+.. doctest::
+
+ >>> patch.TEST_PREFIX = 'foo'
+ >>> value = 3
+ >>>
+ >>> @patch('__main__.value', 'not three')
+ ... class Thing(object):
+ ... def foo_one(self):
+ ... print value
+ ... def foo_two(self):
+ ... print value
+ ...
+ >>>
+ >>> Thing().foo_one()
+ not three
+ >>> Thing().foo_two()
+ not three
+ >>> value
+ 3
+
+
+Nesting Patch Decorators
+========================
+
+If you want to perform multiple patches then you can simply stack up the
+decorators.
+
+You can stack up multiple patch decorators using this pattern:
+
+.. doctest::
+
+ >>> @patch.object(SomeClass, 'class_method')
+ ... @patch.object(SomeClass, 'static_method')
+ ... def test(mock1, mock2):
+ ... assert SomeClass.static_method is mock1
+ ... assert SomeClass.class_method is mock2
+ ... SomeClass.static_method('foo')
+ ... SomeClass.class_method('bar')
+ ... return mock1, mock2
+ ...
+ >>> mock1, mock2 = test()
+ >>> mock1.assert_called_once_with('foo')
+ >>> mock2.assert_called_once_with('bar')
+
+
+Note that the decorators are applied from the bottom upwards. This is the
+standard way that Python applies decorators. The order of the created mocks
+passed into your test function matches this order.
+
+Like all context-managers patches can be nested using contextlib's nested
+function; *every* patching will appear in the tuple after "as":
+
+.. doctest::
+
+ >>> from contextlib import nested
+ >>> with nested(
+ ... patch('package.module.ClassName1'),
+ ... patch('package.module.ClassName2')
+ ... ) as (MockClass1, MockClass2):
+ ... assert package.module.ClassName1 is MockClass1
+ ... assert package.module.ClassName2 is MockClass2
+ ...
+
+
+.. _where-to-patch:
+
+Where to patch
+==============
+
+`patch` works by (temporarily) changing the object that a *name* points to with
+another one. There can be many names pointing to any individual object, so
+for patching to work you must ensure that you patch the name used by the system
+under test.
+
+The basic principle is that you patch where an object is *looked up*, which
+is not necessarily the same place as where it is defined. A couple of
+examples will help to clarify this.
+
+Imagine we have a project that we want to test with the following structure::
+
+ a.py
+ -> Defines SomeClass
+
+ b.py
+ -> from a import SomeClass
+ -> some_function instantiates SomeClass
+
+Now we want to test `some_function` but we want to mock out `SomeClass` using
+`patch`. The problem is that when we import module b, which we will have to
+do then it imports `SomeClass` from module a. If we use `patch` to mock out
+`a.SomeClass` then it will have no effect on our test; module b already has a
+reference to the *real* `SomeClass` and it looks like our patching had no
+effect.
+
+The key is to patch out `SomeClass` where it is used (or where it is looked up
+). In this case `some_function` will actually look up `SomeClass` in module b,
+where we have imported it. The patching should look like:
+
+ `@patch('b.SomeClass')`
+
+However, consider the alternative scenario where instead of `from a import
+SomeClass` module b does `import a` and `some_function` uses `a.SomeClass`. Both
+of these import forms are common. In this case the class we want to patch is
+being looked up on the a module and so we have to patch `a.SomeClass` instead:
+
+ `@patch('a.SomeClass')`
+
+
+Patching Descriptors and Proxy Objects
+======================================
+
+Since version 0.6.0 both patch_ and patch.object_ have been able to correctly
+patch and restore descriptors: class methods, static methods and properties.
+You should patch these on the *class* rather than an instance.
+
+Since version 0.7.0 patch_ and patch.object_ work correctly with some objects
+that proxy attribute access, like the `django setttings object
+<http://www.voidspace.org.uk/python/weblog/arch_d7_2010_12_04.shtml#e1198>`_.
+
+.. note::
+
+ In django `import settings` and `from django.conf import settings`
+ return different objects. If you are using libraries / apps that do both you
+ may have to patch both. Grrr...
diff --git a/third_party/python/mock-1.0.0/docs/sentinel.txt b/third_party/python/mock-1.0.0/docs/sentinel.txt
new file mode 100644
index 0000000000..1c5223da0e
--- /dev/null
+++ b/third_party/python/mock-1.0.0/docs/sentinel.txt
@@ -0,0 +1,58 @@
+==========
+ Sentinel
+==========
+
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ class ProductionClass(object):
+ def something(self):
+ return self.method()
+
+ class Test(unittest2.TestCase):
+ def testSomething(self):
+ pass
+ self = Test('testSomething')
+
+
+.. data:: sentinel
+
+ The ``sentinel`` object provides a convenient way of providing unique
+ objects for your tests.
+
+ Attributes are created on demand when you access them by name. Accessing
+ the same attribute will always return the same object. The objects
+ returned have a sensible repr so that test failure messages are readable.
+
+
+.. data:: DEFAULT
+
+ The `DEFAULT` object is a pre-created sentinel (actually
+ `sentinel.DEFAULT`). It can be used by :attr:`~Mock.side_effect`
+ functions to indicate that the normal return value should be used.
+
+
+Sentinel Example
+================
+
+Sometimes when testing you need to test that a specific object is passed as an
+argument to another method, or returned. It can be common to create named
+sentinel objects to test this. `sentinel` provides a convenient way of
+creating and testing the identity of objects like this.
+
+In this example we monkey patch `method` to return
+`sentinel.some_object`:
+
+.. doctest::
+
+ >>> real = ProductionClass()
+ >>> real.method = Mock(name="method")
+ >>> real.method.return_value = sentinel.some_object
+ >>> result = real.method()
+ >>> assert result is sentinel.some_object
+ >>> sentinel.some_object
+ sentinel.some_object
+
+
diff --git a/third_party/python/mock-1.0.0/html/.doctrees/changelog.doctree b/third_party/python/mock-1.0.0/html/.doctrees/changelog.doctree
new file mode 100644
index 0000000000..6356303021
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/.doctrees/changelog.doctree
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/.doctrees/compare.doctree b/third_party/python/mock-1.0.0/html/.doctrees/compare.doctree
new file mode 100644
index 0000000000..2a961fce4a
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/.doctrees/compare.doctree
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/.doctrees/examples.doctree b/third_party/python/mock-1.0.0/html/.doctrees/examples.doctree
new file mode 100644
index 0000000000..40e4fded53
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/.doctrees/examples.doctree
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/.doctrees/getting-started.doctree b/third_party/python/mock-1.0.0/html/.doctrees/getting-started.doctree
new file mode 100644
index 0000000000..ba82a89984
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/.doctrees/getting-started.doctree
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/.doctrees/index.doctree b/third_party/python/mock-1.0.0/html/.doctrees/index.doctree
new file mode 100644
index 0000000000..4c53f11a5a
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/.doctrees/index.doctree
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/.doctrees/magicmock.doctree b/third_party/python/mock-1.0.0/html/.doctrees/magicmock.doctree
new file mode 100644
index 0000000000..d57214e989
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/.doctrees/magicmock.doctree
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/.doctrees/mock.doctree b/third_party/python/mock-1.0.0/html/.doctrees/mock.doctree
new file mode 100644
index 0000000000..44dc3043ce
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/.doctrees/mock.doctree
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/.doctrees/mocksignature.doctree b/third_party/python/mock-1.0.0/html/.doctrees/mocksignature.doctree
new file mode 100644
index 0000000000..58313c2cf7
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/.doctrees/mocksignature.doctree
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/.doctrees/patch.doctree b/third_party/python/mock-1.0.0/html/.doctrees/patch.doctree
new file mode 100644
index 0000000000..100ef505db
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/.doctrees/patch.doctree
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/.doctrees/sentinel.doctree b/third_party/python/mock-1.0.0/html/.doctrees/sentinel.doctree
new file mode 100644
index 0000000000..e0312ed607
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/.doctrees/sentinel.doctree
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/_sources/changelog.txt b/third_party/python/mock-1.0.0/html/_sources/changelog.txt
new file mode 100644
index 0000000000..a605be3d97
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_sources/changelog.txt
@@ -0,0 +1,725 @@
+.. currentmodule:: mock
+
+
+CHANGELOG
+=========
+
+2012/10/07 Version 1.0.0
+------------------------
+
+No changes since 1.0.0 beta 1. This version has feature parity with
+`unittest.mock
+<http://docs.python.org/py3k/library/unittest.mock.html#module-unittest.mock>`_
+in Python 3.3.
+
+Full list of changes since 0.8:
+
+* `mocksignature`, along with the `mocksignature` argument to `patch`, removed
+* Support for deleting attributes (accessing deleted attributes will raise an
+ `AttributeError`)
+* Added the `mock_open` helper function for mocking the builtin `open`
+* `__class__` is assignable, so a mock can pass an `isinstance` check without
+ requiring a spec
+* Addition of `PropertyMock`, for mocking properties
+* `MagicMocks` made unorderable by default (in Python 3). The comparison
+ methods (other than equality and inequality) now return `NotImplemented`
+* Propagate traceback info to support subclassing of `_patch` by other
+ libraries
+* `create_autospec` works with attributes present in results of `dir` that
+ can't be fetched from the object's class. Contributed by Konstantine Rybnikov
+* Any exceptions in an iterable `side_effect` will be raised instead of
+ returned
+* In Python 3, `create_autospec` now supports keyword only arguments
+* Added `patch.stopall` method to stop all active patches created by `start`
+* BUGFIX: calling `MagicMock.reset_mock` wouldn't reset magic method mocks
+* BUGFIX: calling `reset_mock` on a `MagicMock` created with autospec could
+ raise an exception
+* BUGFIX: passing multiple spec arguments to patchers (`spec` , `spec_set` and
+ `autospec`) had unpredictable results, now it is an error
+* BUGFIX: using `spec=True` *and* `create=True` as arguments to patchers could
+ result in using `DEFAULT` as the spec. Now it is an error instead
+* BUGFIX: using `spec` or `autospec` arguments to patchers, along with
+ `spec_set=True` did not work correctly
+* BUGFIX: using an object that evaluates to False as a spec could be ignored
+* BUGFIX: a list as the `spec` argument to a patcher would always result in a
+ non-callable mock. Now if `__call__` is in the spec the mock is callable
+
+
+2012/07/13 Version 1.0.0 beta 1
+--------------------------------
+
+* Added `patch.stopall` method to stop all active patches created by `start`
+* BUGFIX: calling `MagicMock.reset_mock` wouldn't reset magic method mocks
+* BUGFIX: calling `reset_mock` on a `MagicMock` created with autospec could
+ raise an exception
+
+
+2012/05/04 Version 1.0.0 alpha 2
+--------------------------------
+
+* `PropertyMock` attributes are now standard `MagicMocks`
+* `create_autospec` works with attributes present in results of `dir` that
+ can't be fetched from the object's class. Contributed by Konstantine Rybnikov
+* Any exceptions in an iterable `side_effect` will be raised instead of
+ returned
+* In Python 3, `create_autospec` now supports keyword only arguments
+
+
+2012/03/25 Version 1.0.0 alpha 1
+--------------------------------
+
+The standard library version!
+
+* `mocksignature`, along with the `mocksignature` argument to `patch`, removed
+* Support for deleting attributes (accessing deleted attributes will raise an
+ `AttributeError`)
+* Added the `mock_open` helper function for mocking the builtin `open`
+* `__class__` is assignable, so a mock can pass an `isinstance` check without
+ requiring a spec
+* Addition of `PropertyMock`, for mocking properties
+* `MagicMocks` made unorderable by default (in Python 3). The comparison
+ methods (other than equality and inequality) now return `NotImplemented`
+* Propagate traceback info to support subclassing of `_patch` by other
+ libraries
+* BUGFIX: passing multiple spec arguments to patchers (`spec` , `spec_set` and
+ `autospec`) had unpredictable results, now it is an error
+* BUGFIX: using `spec=True` *and* `create=True` as arguments to patchers could
+ result in using `DEFAULT` as the spec. Now it is an error instead
+* BUGFIX: using `spec` or `autospec` arguments to patchers, along with
+ `spec_set=True` did not work correctly
+* BUGFIX: using an object that evaluates to False as a spec could be ignored
+* BUGFIX: a list as the `spec` argument to a patcher would always result in a
+ non-callable mock. Now if `__call__` is in the spec the mock is callable
+
+
+2012/02/13 Version 0.8.0
+------------------------
+
+The only changes since 0.8rc2 are:
+
+* Improved repr of :data:`sentinel` objects
+* :data:`ANY` can be used for comparisons against :data:`call` objects
+* The return value of `MagicMock.__iter__` method can be set to
+ any iterable and isn't required to be an iterator
+
+Full List of changes since 0.7:
+
+mock 0.8.0 is the last version that will support Python 2.4.
+
+* Addition of :attr:`~Mock.mock_calls` list for *all* calls (including magic
+ methods and chained calls)
+* :func:`patch` and :func:`patch.object` now create a :class:`MagicMock`
+ instead of a :class:`Mock` by default
+* The patchers (`patch`, `patch.object` and `patch.dict`), plus `Mock` and
+ `MagicMock`, take arbitrary keyword arguments for configuration
+* New mock method :meth:`~Mock.configure_mock` for setting attributes and
+ return values / side effects on the mock and its attributes
+* New mock assert methods :meth:`~Mock.assert_any_call` and
+ :meth:`~Mock.assert_has_calls`
+* Implemented :ref:`auto-speccing` (recursive, lazy speccing of mocks with
+ mocked signatures for functions/methods), as the `autospec` argument to
+ `patch`
+* Added the :func:`create_autospec` function for manually creating
+ 'auto-specced' mocks
+* :func:`patch.multiple` for doing multiple patches in a single call, using
+ keyword arguments
+* Setting :attr:`~Mock.side_effect` to an iterable will cause calls to the mock
+ to return the next value from the iterable
+* New `new_callable` argument to `patch` and `patch.object` allowing you to
+ pass in a class or callable object (instead of `MagicMock`) that will be
+ called to replace the object being patched
+* Addition of :class:`NonCallableMock` and :class:`NonCallableMagicMock`, mocks
+ without a `__call__` method
+* Addition of :meth:`~Mock.mock_add_spec` method for adding (or changing) a
+ spec on an existing mock
+* Protocol methods on :class:`MagicMock` are magic mocks, and are created
+ lazily on first lookup. This means the result of calling a protocol method is
+ a `MagicMock` instead of a `Mock` as it was previously
+* Addition of :meth:`~Mock.attach_mock` method
+* Added :data:`ANY` for ignoring arguments in :meth:`~Mock.assert_called_with`
+ calls
+* Addition of :data:`call` helper object
+* Improved repr for mocks
+* Improved repr for :attr:`Mock.call_args` and entries in
+ :attr:`Mock.call_args_list`, :attr:`Mock.method_calls` and
+ :attr:`Mock.mock_calls`
+* Improved repr for :data:`sentinel` objects
+* `patch` lookup is done at use time not at decoration time
+* In Python 2.6 or more recent, `dir` on a mock will report all the dynamically
+ created attributes (or the full list of attributes if there is a spec) as
+ well as all the mock methods and attributes.
+* Module level :data:`FILTER_DIR` added to control whether `dir(mock)` filters
+ private attributes. `True` by default.
+* `patch.TEST_PREFIX` for controlling how patchers recognise test methods when
+ used to decorate a class
+* Support for using Java exceptions as a :attr:`~Mock.side_effect` on Jython
+* `Mock` call lists (`call_args_list`, `method_calls` & `mock_calls`) are now
+ custom list objects that allow membership tests for "sub lists" and have
+ a nicer representation if you `str` or `print` them
+* Mocks attached as attributes or return values to other mocks have calls
+ recorded in `method_calls` and `mock_calls` of the parent (unless a name is
+ already set on the child)
+* Improved failure messages for `assert_called_with` and
+ `assert_called_once_with`
+* The return value of the :class:`MagicMock` `__iter__` method can be set to
+ any iterable and isn't required to be an iterator
+* Added the Mock API (`assert_called_with` etc) to functions created by
+ :func:`mocksignature`
+* Tuples as well as lists can be used to specify allowed methods for `spec` &
+ `spec_set` arguments
+* Calling `stop` on an unstarted patcher fails with a more meaningful error
+ message
+* Renamed the internal classes `Sentinel` and `SentinelObject` to prevent abuse
+* BUGFIX: an error creating a patch, with nested patch decorators, won't leave
+ patches in place
+* BUGFIX: `__truediv__` and `__rtruediv__` not available as magic methods on
+ mocks in Python 3
+* BUGFIX: `assert_called_with` / `assert_called_once_with` can be used with
+ `self` as a keyword argument
+* BUGFIX: when patching a class with an explicit spec / spec_set (not a
+ boolean) it applies "spec inheritance" to the return value of the created
+ mock (the "instance")
+* BUGFIX: remove the `__unittest` marker causing traceback truncation
+* Removal of deprecated `patch_object`
+* Private attributes `_name`, `_methods`, '_children', `_wraps` and `_parent`
+ (etc) renamed to reduce likelihood of clash with user attributes.
+* Added license file to the distribution
+
+
+2012/01/10 Version 0.8.0 release candidate 2
+--------------------------------------------
+
+* Removed the `configure` keyword argument to `create_autospec` and allow
+ arbitrary keyword arguments (for the `Mock` constructor) instead
+* Fixed `ANY` equality with some types in `assert_called_with` calls
+* Switched to a standard Sphinx theme (compatible with
+ `readthedocs.org <http://mock.readthedocs.org>`_)
+
+
+2011/12/29 Version 0.8.0 release candidate 1
+--------------------------------------------
+
+* `create_autospec` on the return value of a mocked class will use `__call__`
+ for the signature rather than `__init__`
+* Performance improvement instantiating `Mock` and `MagicMock`
+* Mocks used as magic methods have the same type as their parent instead of
+ being hardcoded to `MagicMock`
+
+Special thanks to Julian Berman for his help with diagnosing and improving
+performance in this release.
+
+
+2011/10/09 Version 0.8.0 beta 4
+-------------------------------
+
+* `patch` lookup is done at use time not at decoration time
+* When attaching a Mock to another Mock as a magic method, calls are recorded
+ in mock_calls
+* Addition of `attach_mock` method
+* Renamed the internal classes `Sentinel` and `SentinelObject` to prevent abuse
+* BUGFIX: various issues around circular references with mocks (setting a mock
+ return value to be itself etc)
+
+
+2011/08/15 Version 0.8.0 beta 3
+-------------------------------
+
+* Mocks attached as attributes or return values to other mocks have calls
+ recorded in `method_calls` and `mock_calls` of the parent (unless a name is
+ already set on the child)
+* Addition of `mock_add_spec` method for adding (or changing) a spec on an
+ existing mock
+* Improved repr for `Mock.call_args` and entries in `Mock.call_args_list`,
+ `Mock.method_calls` and `Mock.mock_calls`
+* Improved repr for mocks
+* BUGFIX: minor fixes in the way `mock_calls` is worked out,
+ especially for "intermediate" mocks in a call chain
+
+
+2011/08/05 Version 0.8.0 beta 2
+-------------------------------
+
+* Setting `side_effect` to an iterable will cause calls to the mock to return
+ the next value from the iterable
+* Added `assert_any_call` method
+* Moved `assert_has_calls` from call lists onto mocks
+* BUGFIX: `call_args` and all members of `call_args_list` are two tuples of
+ `(args, kwargs)` again instead of three tuples of `(name, args, kwargs)`
+
+
+2011/07/25 Version 0.8.0 beta 1
+-------------------------------
+
+* `patch.TEST_PREFIX` for controlling how patchers recognise test methods when
+ used to decorate a class
+* `Mock` call lists (`call_args_list`, `method_calls` & `mock_calls`) are now
+ custom list objects that allow membership tests for "sub lists" and have
+ an `assert_has_calls` method for unordered call checks
+* `callargs` changed to *always* be a three-tuple of `(name, args, kwargs)`
+* Addition of `mock_calls` list for *all* calls (including magic methods and
+ chained calls)
+* Extension of `call` object to support chained calls and `callargs` for better
+ comparisons with or without names. `call` object has a `call_list` method for
+ chained calls
+* Added the public `instance` argument to `create_autospec`
+* Support for using Java exceptions as a `side_effect` on Jython
+* Improved failure messages for `assert_called_with` and
+ `assert_called_once_with`
+* Tuples as well as lists can be used to specify allowed methods for `spec` &
+ `spec_set` arguments
+* BUGFIX: Fixed bug in `patch.multiple` for argument passing when creating
+ mocks
+* Added license file to the distribution
+
+
+2011/07/16 Version 0.8.0 alpha 2
+--------------------------------
+
+* `patch.multiple` for doing multiple patches in a single call, using keyword
+ arguments
+* New `new_callable` argument to `patch` and `patch.object` allowing you to
+ pass in a class or callable object (instead of `MagicMock`) that will be
+ called to replace the object being patched
+* Addition of `NonCallableMock` and `NonCallableMagicMock`, mocks without a
+ `__call__` method
+* Mocks created by `patch` have a `MagicMock` as the `return_value` where a
+ class is being patched
+* `create_autospec` can create non-callable mocks for non-callable objects.
+ `return_value` mocks of classes will be non-callable unless the class has
+ a `__call__` method
+* `autospec` creates a `MagicMock` without a spec for properties and slot
+ descriptors, because we don't know the type of object they return
+* Removed the "inherit" argument from `create_autospec`
+* Calling `stop` on an unstarted patcher fails with a more meaningful error
+ message
+* BUGFIX: an error creating a patch, with nested patch decorators, won't leave
+ patches in place
+* BUGFIX: `__truediv__` and `__rtruediv__` not available as magic methods on
+ mocks in Python 3
+* BUGFIX: `assert_called_with` / `assert_called_once_with` can be used with
+ `self` as a keyword argument
+* BUGFIX: autospec for functions / methods with an argument named self that
+ isn't the first argument no longer broken
+* BUGFIX: when patching a class with an explicit spec / spec_set (not a
+ boolean) it applies "spec inheritance" to the return value of the created
+ mock (the "instance")
+* BUGFIX: remove the `__unittest` marker causing traceback truncation
+
+
+2011/06/14 Version 0.8.0 alpha 1
+--------------------------------
+
+mock 0.8.0 is the last version that will support Python 2.4.
+
+* The patchers (`patch`, `patch.object` and `patch.dict`), plus `Mock` and
+ `MagicMock`, take arbitrary keyword arguments for configuration
+* New mock method `configure_mock` for setting attributes and return values /
+ side effects on the mock and its attributes
+* In Python 2.6 or more recent, `dir` on a mock will report all the dynamically
+ created attributes (or the full list of attributes if there is a spec) as
+ well as all the mock methods and attributes.
+* Module level `FILTER_DIR` added to control whether `dir(mock)` filters
+ private attributes. `True` by default. Note that `vars(Mock())` can still be
+ used to get all instance attributes and `dir(type(Mock())` will still return
+ all the other attributes (irrespective of `FILTER_DIR`)
+* `patch` and `patch.object` now create a `MagicMock` instead of a `Mock` by
+ default
+* Added `ANY` for ignoring arguments in `assert_called_with` calls
+* Addition of `call` helper object
+* Protocol methods on `MagicMock` are magic mocks, and are created lazily on
+ first lookup. This means the result of calling a protocol method is a
+ MagicMock instead of a Mock as it was previously
+* Added the Mock API (`assert_called_with` etc) to functions created by
+ `mocksignature`
+* Private attributes `_name`, `_methods`, '_children', `_wraps` and `_parent`
+ (etc) renamed to reduce likelihood of clash with user attributes.
+* Implemented auto-speccing (recursive, lazy speccing of mocks with mocked
+ signatures for functions/methods)
+
+ Limitations:
+
+ - Doesn't mock magic methods or attributes (it creates MagicMocks, so the
+ magic methods are *there*, they just don't have the signature mocked nor
+ are attributes followed)
+ - Doesn't mock function / method attributes
+ - Uses object traversal on the objects being mocked to determine types - so
+ properties etc may be triggered
+ - The return value of mocked classes (the 'instance') has the same call
+ signature as the class __init__ (as they share the same spec)
+
+ You create auto-specced mocks by passing `autospec=True` to `patch`.
+
+ Note that attributes that are None are special cased and mocked without a
+ spec (so any attribute / method can be used). This is because None is
+ typically used as a default value for attributes that may be of some other
+ type, and as we don't know what type that may be we allow all access.
+
+ Note that the `autospec` option to `patch` obsoletes the `mocksignature`
+ option.
+
+* Added the `create_autospec` function for manually creating 'auto-specced'
+ mocks
+* Removal of deprecated `patch_object`
+
+
+2011/05/30 Version 0.7.2
+------------------------
+
+* BUGFIX: instances of list subclasses can now be used as mock specs
+* BUGFIX: MagicMock equality / inequality protocol methods changed to use the
+ default equality / inequality. This is done through a `side_effect` on
+ the mocks used for `__eq__` / `__ne__`
+
+
+2011/05/06 Version 0.7.1
+------------------------
+
+Package fixes contributed by Michael Fladischer. No code changes.
+
+* Include template in package
+* Use isolated binaries for the tox tests
+* Unset executable bit on docs
+* Fix DOS line endings in getting-started.txt
+
+
+2011/03/05 Version 0.7.0
+------------------------
+
+No API changes since 0.7.0 rc1. Many documentation changes including a stylish
+new `Sphinx theme <https://github.com/coordt/ADCtheme/>`_.
+
+The full set of changes since 0.6.0 are:
+
+* Python 3 compatibility
+* Ability to mock magic methods with `Mock` and addition of `MagicMock`
+ with pre-created magic methods
+* Addition of `mocksignature` and `mocksignature` argument to `patch` and
+ `patch.object`
+* Addition of `patch.dict` for changing dictionaries during a test
+* Ability to use `patch`, `patch.object` and `patch.dict` as class decorators
+* Renamed ``patch_object`` to `patch.object` (``patch_object`` is
+ deprecated)
+* Addition of soft comparisons: `call_args`, `call_args_list` and `method_calls`
+ now return tuple-like objects which compare equal even when empty args
+ or kwargs are skipped
+* patchers (`patch`, `patch.object` and `patch.dict`) have start and stop
+ methods
+* Addition of `assert_called_once_with` method
+* Mocks can now be named (`name` argument to constructor) and the name is used
+ in the repr
+* repr of a mock with a spec includes the class name of the spec
+* `assert_called_with` works with `python -OO`
+* New `spec_set` keyword argument to `Mock` and `patch`. If used,
+ attempting to *set* an attribute on a mock not on the spec will raise an
+ `AttributeError`
+* Mocks created with a spec can now pass `isinstance` tests (`__class__`
+ returns the type of the spec)
+* Added docstrings to all objects
+* Improved failure message for `Mock.assert_called_with` when the mock
+ has not been called at all
+* Decorated functions / methods have their docstring and `__module__`
+ preserved on Python 2.4.
+* BUGFIX: `mock.patch` now works correctly with certain types of objects that
+ proxy attribute access, like the django settings object
+* BUGFIX: mocks are now copyable (thanks to Ned Batchelder for reporting and
+ diagnosing this)
+* BUGFIX: `spec=True` works with old style classes
+* BUGFIX: ``help(mock)`` works now (on the module). Can no longer use ``__bases__``
+ as a valid sentinel name (thanks to Stephen Emslie for reporting and
+ diagnosing this)
+* BUGFIX: ``side_effect`` now works with ``BaseException`` exceptions like
+ ``KeyboardInterrupt``
+* BUGFIX: `reset_mock` caused infinite recursion when a mock is set as its own
+ return value
+* BUGFIX: patching the same object twice now restores the patches correctly
+* with statement tests now skipped on Python 2.4
+* Tests require unittest2 (or unittest2-py3k) to run
+* Tested with `tox <http://pypi.python.org/pypi/tox>`_ on Python 2.4 - 3.2,
+ jython and pypy (excluding 3.0)
+* Added 'build_sphinx' command to setup.py (requires setuptools or distribute)
+ Thanks to Florian Bauer
+* Switched from subversion to mercurial for source code control
+* `Konrad Delong <http://konryd.blogspot.com/>`_ added as co-maintainer
+
+
+2011/02/16 Version 0.7.0 RC 1
+-----------------------------
+
+Changes since beta 4:
+
+* Tested with jython, pypy and Python 3.2 and 3.1
+* Decorated functions / methods have their docstring and `__module__`
+ preserved on Python 2.4
+* BUGFIX: `mock.patch` now works correctly with certain types of objects that
+ proxy attribute access, like the django settings object
+* BUGFIX: `reset_mock` caused infinite recursion when a mock is set as its own
+ return value
+
+
+2010/11/12 Version 0.7.0 beta 4
+-------------------------------
+
+* patchers (`patch`, `patch.object` and `patch.dict`) have start and stop
+ methods
+* Addition of `assert_called_once_with` method
+* repr of a mock with a spec includes the class name of the spec
+* `assert_called_with` works with `python -OO`
+* New `spec_set` keyword argument to `Mock` and `patch`. If used,
+ attempting to *set* an attribute on a mock not on the spec will raise an
+ `AttributeError`
+* Attributes and return value of a `MagicMock` are `MagicMock` objects
+* Attempting to set an unsupported magic method now raises an `AttributeError`
+* `patch.dict` works as a class decorator
+* Switched from subversion to mercurial for source code control
+* BUGFIX: mocks are now copyable (thanks to Ned Batchelder for reporting and
+ diagnosing this)
+* BUGFIX: `spec=True` works with old style classes
+* BUGFIX: `mocksignature=True` can now patch instance methods via
+ `patch.object`
+
+
+2010/09/18 Version 0.7.0 beta 3
+-------------------------------
+
+* Using spec with :class:`MagicMock` only pre-creates magic methods in the spec
+* Setting a magic method on a mock with a ``spec`` can only be done if the
+ spec has that method
+* Mocks can now be named (`name` argument to constructor) and the name is used
+ in the repr
+* `mocksignature` can now be used with classes (signature based on `__init__`)
+ and callable objects (signature based on `__call__`)
+* Mocks created with a spec can now pass `isinstance` tests (`__class__`
+ returns the type of the spec)
+* Default numeric value for MagicMock is 1 rather than zero (because the
+ MagicMock bool defaults to True and 0 is False)
+* Improved failure message for :meth:`~Mock.assert_called_with` when the mock
+ has not been called at all
+* Adding the following to the set of supported magic methods:
+
+ - ``__getformat__`` and ``__setformat__``
+ - pickle methods
+ - ``__trunc__``, ``__ceil__`` and ``__floor__``
+ - ``__sizeof__``
+
+* Added 'build_sphinx' command to setup.py (requires setuptools or distribute)
+ Thanks to Florian Bauer
+* with statement tests now skipped on Python 2.4
+* Tests require unittest2 to run on Python 2.7
+* Improved several docstrings and documentation
+
+
+2010/06/23 Version 0.7.0 beta 2
+-------------------------------
+
+* :func:`patch.dict` works as a context manager as well as a decorator
+* ``patch.dict`` takes a string to specify dictionary as well as a dictionary
+ object. If a string is supplied the name specified is imported
+* BUGFIX: ``patch.dict`` restores dictionary even when an exception is raised
+
+
+2010/06/22 Version 0.7.0 beta 1
+-------------------------------
+
+* Addition of :func:`mocksignature`
+* Ability to mock magic methods
+* Ability to use ``patch`` and ``patch.object`` as class decorators
+* Renamed ``patch_object`` to :func:`patch.object` (``patch_object`` is
+ deprecated)
+* Addition of :class:`MagicMock` class with all magic methods pre-created for you
+* Python 3 compatibility (tested with 3.2 but should work with 3.0 & 3.1 as
+ well)
+* Addition of :func:`patch.dict` for changing dictionaries during a test
+* Addition of ``mocksignature`` argument to ``patch`` and ``patch.object``
+* ``help(mock)`` works now (on the module). Can no longer use ``__bases__``
+ as a valid sentinel name (thanks to Stephen Emslie for reporting and
+ diagnosing this)
+* Addition of soft comparisons: `call_args`, `call_args_list` and `method_calls`
+ now return tuple-like objects which compare equal even when empty args
+ or kwargs are skipped
+* Added docstrings.
+* BUGFIX: ``side_effect`` now works with ``BaseException`` exceptions like
+ ``KeyboardInterrupt``
+* BUGFIX: patching the same object twice now restores the patches correctly
+* The tests now require `unittest2 <http://pypi.python.org/pypi/unittest2>`_
+ to run
+* `Konrad Delong <http://konryd.blogspot.com/>`_ added as co-maintainer
+
+
+2009/08/22 Version 0.6.0
+------------------------
+
+* New test layout compatible with test discovery
+* Descriptors (static methods / class methods etc) can now be patched and
+ restored correctly
+* Mocks can raise exceptions when called by setting ``side_effect`` to an
+ exception class or instance
+* Mocks that wrap objects will not pass on calls to the underlying object if
+ an explicit return_value is set
+
+
+2009/04/17 Version 0.5.0
+------------------------
+
+* Made DEFAULT part of the public api.
+* Documentation built with Sphinx.
+* ``side_effect`` is now called with the same arguments as the mock is called with and
+ if returns a non-DEFAULT value that is automatically set as the ``mock.return_value``.
+* ``wraps`` keyword argument used for wrapping objects (and passing calls through to the wrapped object).
+* ``Mock.reset`` renamed to ``Mock.reset_mock``, as reset is a common API name.
+* ``patch`` / ``patch_object`` are now context managers and can be used with ``with``.
+* A new 'create' keyword argument to patch and patch_object that allows them to patch
+ (and unpatch) attributes that don't exist. (Potentially unsafe to use - it can allow
+ you to have tests that pass when they are testing an API that doesn't exist - use at
+ your own risk!)
+* The methods keyword argument to Mock has been removed and merged with spec. The spec
+ argument can now be a list of methods or an object to take the spec from.
+* Nested patches may now be applied in a different order (created mocks passed
+ in the opposite order). This is actually a bugfix.
+* patch and patch_object now take a spec keyword argument. If spec is
+ passed in as 'True' then the Mock created will take the object it is replacing
+ as its spec object. If the object being replaced is a class, then the return
+ value for the mock will also use the class as a spec.
+* A Mock created without a spec will not attempt to mock any magic methods / attributes
+ (they will raise an ``AttributeError`` instead).
+
+
+2008/10/12 Version 0.4.0
+------------------------
+
+* Default return value is now a new mock rather than None
+* return_value added as a keyword argument to the constructor
+* New method 'assert_called_with'
+* Added 'side_effect' attribute / keyword argument called when mock is called
+* patch decorator split into two decorators:
+
+ - ``patch_object`` which takes an object and an attribute name to patch
+ (plus optionally a value to patch with which defaults to a mock object)
+ - ``patch`` which takes a string specifying a target to patch; in the form
+ 'package.module.Class.attribute'. (plus optionally a value to
+ patch with which defaults to a mock object)
+
+* Can now patch objects with ``None``
+* Change to patch for nose compatibility with error reporting in wrapped functions
+* Reset no longer clears children / return value etc - it just resets
+ call count and call args. It also calls reset on all children (and
+ the return value if it is a mock).
+
+Thanks to Konrad Delong, Kevin Dangoor and others for patches and suggestions.
+
+
+2007/12/03 Version 0.3.1
+-------------------------
+
+``patch`` maintains the name of decorated functions for compatibility with nose
+test autodiscovery.
+
+Tests decorated with ``patch`` that use the two argument form (implicit mock
+creation) will receive the mock(s) passed in as extra arguments.
+
+Thanks to Kevin Dangoor for these changes.
+
+
+2007/11/30 Version 0.3.0
+-------------------------
+
+Removed ``patch_module``. ``patch`` can now take a string as the first
+argument for patching modules.
+
+The third argument to ``patch`` is optional - a mock will be created by
+default if it is not passed in.
+
+
+2007/11/21 Version 0.2.1
+-------------------------
+
+Bug fix, allows reuse of functions decorated with ``patch`` and ``patch_module``.
+
+
+2007/11/20 Version 0.2.0
+-------------------------
+
+Added ``spec`` keyword argument for creating ``Mock`` objects from a
+specification object.
+
+Added ``patch`` and ``patch_module`` monkey patching decorators.
+
+Added ``sentinel`` for convenient access to unique objects.
+
+Distribution includes unit tests.
+
+
+2007/11/19 Version 0.1.0
+-------------------------
+
+Initial release.
+
+
+TODO and Limitations
+====================
+
+Contributions, bug reports and comments welcomed!
+
+Feature requests and bug reports are handled on the issue tracker:
+
+ * `mock issue tracker <http://code.google.com/p/mock/issues/list>`_
+
+`wraps` is not integrated with magic methods.
+
+`patch` could auto-do the patching in the constructor and unpatch in the
+destructor. This would be useful in itself, but violates TOOWTDI and would be
+unsafe for IronPython & PyPy (non-deterministic calling of destructors).
+Destructors aren't called in CPython where there are cycles, but a weak
+reference with a callback can be used to get round this.
+
+`Mock` has several attributes. This makes it unsuitable for mocking objects
+that use these attribute names. A way round this would be to provide methods
+that *hide* these attributes when needed. In 0.8 many, but not all, of these
+attributes are renamed to gain a `_mock` prefix, making it less likely that
+they will clash. Any outstanding attributes that haven't been modified with
+the prefix should be changed.
+
+If a patch is started using `patch.start` and then not stopped correctly then
+the unpatching is not done. Using weak references it would be possible to
+detect and fix this when the patch object itself is garbage collected. This
+would be tricky to get right though.
+
+When a `Mock` is created by `patch`, arbitrary keywords can be used to set
+attributes. If `patch` is created with a `spec`, and is replacing a class, then
+a `return_value` mock is created. The keyword arguments are not applied to the
+child mock, but could be.
+
+When mocking a class with `patch`, passing in `spec=True` or `autospec=True`,
+the mock class has an instance created from the same spec. Should this be the
+default behaviour for mocks anyway (mock return values inheriting the spec
+from their parent), or should it be controlled by an additional keyword
+argument (`inherit`) to the Mock constructor? `create_autospec` does this, so
+an additional keyword argument to Mock is probably unnecessary.
+
+The `mocksignature` argument to `patch` with a non `Mock` passed into
+`new_callable` will *probably* cause an error. Should it just be invalid?
+
+Note that `NonCallableMock` and `NonCallableMagicMock` still have the unused
+(and unusable) attributes: `return_value`, `side_effect`, `call_count`,
+`call_args` and `call_args_list`. These could be removed or raise errors on
+getting / setting. They also have the `assert_called_with` and
+`assert_called_once_with` methods. Removing these would be pointless as
+fetching them would create a mock (attribute) that could be called without
+error.
+
+Some outstanding technical debt. The way autospeccing mocks function
+signatures was copied and modified from `mocksignature`. This could all be
+refactored into one set of functions instead of two. The way we tell if
+patchers are started and if a patcher is being used for a `patch.multiple`
+call are both horrible. There are now a host of helper functions that should
+be rationalised. (Probably time to split mock into a package instead of a
+module.)
+
+Passing arbitrary keyword arguments to `create_autospec`, or `patch` with
+`autospec`, when mocking a *function* works fine. However, the arbitrary
+attributes are set on the created mock - but `create_autospec` returns a
+real function (which doesn't have those attributes). However, what is the use
+case for using autospec to create functions with attributes that don't exist
+on the original?
+
+`mocksignature`, plus the `call_args_list` and `method_calls` attributes of
+`Mock` could all be deprecated.
diff --git a/third_party/python/mock-1.0.0/html/_sources/compare.txt b/third_party/python/mock-1.0.0/html/_sources/compare.txt
new file mode 100644
index 0000000000..41555308e2
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_sources/compare.txt
@@ -0,0 +1,628 @@
+=========================
+ Mock Library Comparison
+=========================
+
+
+.. testsetup::
+
+ def assertEqual(a, b):
+ assert a == b, ("%r != %r" % (a, b))
+
+ def assertRaises(Exc, func):
+ try:
+ func()
+ except Exc:
+ return
+ assert False, ("%s not raised" % Exc)
+
+ sys.modules['somemodule'] = somemodule = mock.Mock(name='somemodule')
+ class SomeException(Exception):
+ some_method = method1 = method2 = None
+ some_other_object = SomeObject = SomeException
+
+
+A side-by-side comparison of how to accomplish some basic tasks with mock and
+some other popular Python mocking libraries and frameworks.
+
+These are:
+
+* `flexmock <http://pypi.python.org/pypi/flexmock>`_
+* `mox <http://pypi.python.org/pypi/mox>`_
+* `Mocker <http://niemeyer.net/mocker>`_
+* `dingus <http://pypi.python.org/pypi/dingus>`_
+* `fudge <http://pypi.python.org/pypi/fudge>`_
+
+Popular python mocking frameworks not yet represented here include
+`MiniMock <http://pypi.python.org/pypi/MiniMock>`_.
+
+`pMock <http://pmock.sourceforge.net/>`_ (last release 2004 and doesn't import
+in recent versions of Python) and
+`python-mock <http://python-mock.sourceforge.net/>`_ (last release 2005) are
+intentionally omitted.
+
+.. note::
+
+ A more up to date, and tested for all mock libraries (only the mock
+ examples on this page can be executed as doctests) version of this
+ comparison is maintained by Gary Bernhardt:
+
+ * `Python Mock Library Comparison
+ <http://garybernhardt.github.com/python-mock-comparison/>`_
+
+This comparison is by no means complete, and also may not be fully idiomatic
+for all the libraries represented. *Please* contribute corrections, missing
+comparisons, or comparisons for additional libraries to the `mock issue
+tracker <https://code.google.com/p/mock/issues/list>`_.
+
+This comparison page was originally created by the `Mox project
+<https://code.google.com/p/pymox/wiki/MoxComparison>`_ and then extended for
+`flexmock and mock <http://has207.github.com/flexmock/compare.html>`_ by
+Herman Sheremetyev. Dingus examples written by `Gary Bernhadt
+<http://garybernhardt.github.com/python-mock-comparison/>`_. fudge examples
+provided by `Kumar McMillan <http://farmdev.com/>`_.
+
+.. note::
+
+ The examples tasks here were originally created by Mox which is a mocking
+ *framework* rather than a library like mock. The tasks shown naturally
+ exemplify tasks that frameworks are good at and not the ones they make
+ harder. In particular you can take a `Mock` or `MagicMock` object and use
+ it in any way you want with no up-front configuration. The same is also
+ true for Dingus.
+
+ The examples for mock here assume version 0.7.0.
+
+
+Simple fake object
+~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> my_mock.some_method.return_value = "calculated value"
+ >>> my_mock.some_attribute = "value"
+ >>> assertEqual("calculated value", my_mock.some_method())
+ >>> assertEqual("value", my_mock.some_attribute)
+
+::
+
+ # Flexmock
+ mock = flexmock(some_method=lambda: "calculated value", some_attribute="value")
+ assertEqual("calculated value", mock.some_method())
+ assertEqual("value", mock.some_attribute)
+
+ # Mox
+ mock = mox.MockAnything()
+ mock.some_method().AndReturn("calculated value")
+ mock.some_attribute = "value"
+ mox.Replay(mock)
+ assertEqual("calculated value", mock.some_method())
+ assertEqual("value", mock.some_attribute)
+
+ # Mocker
+ mock = mocker.mock()
+ mock.some_method()
+ mocker.result("calculated value")
+ mocker.replay()
+ mock.some_attribute = "value"
+ assertEqual("calculated value", mock.some_method())
+ assertEqual("value", mock.some_attribute)
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus(some_attribute="value",
+ ... some_method__returns="calculated value")
+ >>> assertEqual("calculated value", my_dingus.some_method())
+ >>> assertEqual("value", my_dingus.some_attribute)
+
+::
+
+ >>> # fudge
+ >>> my_fake = (fudge.Fake()
+ ... .provides('some_method')
+ ... .returns("calculated value")
+ ... .has_attr(some_attribute="value"))
+ ...
+ >>> assertEqual("calculated value", my_fake.some_method())
+ >>> assertEqual("value", my_fake.some_attribute)
+
+
+Simple mock
+~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> my_mock.some_method.return_value = "value"
+ >>> assertEqual("value", my_mock.some_method())
+ >>> my_mock.some_method.assert_called_once_with()
+
+::
+
+ # Flexmock
+ mock = flexmock()
+ mock.should_receive("some_method").and_return("value").once
+ assertEqual("value", mock.some_method())
+
+ # Mox
+ mock = mox.MockAnything()
+ mock.some_method().AndReturn("value")
+ mox.Replay(mock)
+ assertEqual("value", mock.some_method())
+ mox.Verify(mock)
+
+ # Mocker
+ mock = mocker.mock()
+ mock.some_method()
+ mocker.result("value")
+ mocker.replay()
+ assertEqual("value", mock.some_method())
+ mocker.verify()
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus(some_method__returns="value")
+ >>> assertEqual("value", my_dingus.some_method())
+ >>> assert my_dingus.some_method.calls().once()
+
+::
+
+ >>> # fudge
+ >>> @fudge.test
+ ... def test():
+ ... my_fake = (fudge.Fake()
+ ... .expects('some_method')
+ ... .returns("value")
+ ... .times_called(1))
+ ...
+ >>> test()
+ Traceback (most recent call last):
+ ...
+ AssertionError: fake:my_fake.some_method() was not called
+
+
+Creating partial mocks
+~~~~~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> SomeObject.some_method = mock.Mock(return_value='value')
+ >>> assertEqual("value", SomeObject.some_method())
+
+::
+
+ # Flexmock
+ flexmock(SomeObject).should_receive("some_method").and_return('value')
+ assertEqual("value", mock.some_method())
+
+ # Mox
+ mock = mox.MockObject(SomeObject)
+ mock.some_method().AndReturn("value")
+ mox.Replay(mock)
+ assertEqual("value", mock.some_method())
+ mox.Verify(mock)
+
+ # Mocker
+ mock = mocker.mock(SomeObject)
+ mock.Get()
+ mocker.result("value")
+ mocker.replay()
+ assertEqual("value", mock.some_method())
+ mocker.verify()
+
+::
+
+ >>> # Dingus
+ >>> object = SomeObject
+ >>> object.some_method = dingus.Dingus(return_value="value")
+ >>> assertEqual("value", object.some_method())
+
+::
+
+ >>> # fudge
+ >>> fake = fudge.Fake().is_callable().returns("<fudge-value>")
+ >>> with fudge.patched_context(SomeObject, 'some_method', fake):
+ ... s = SomeObject()
+ ... assertEqual("<fudge-value>", s.some_method())
+ ...
+
+
+Ensure calls are made in specific order
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock(spec=SomeObject)
+ >>> my_mock.method1()
+ <Mock name='mock.method1()' id='...'>
+ >>> my_mock.method2()
+ <Mock name='mock.method2()' id='...'>
+ >>> assertEqual(my_mock.mock_calls, [call.method1(), call.method2()])
+
+::
+
+ # Flexmock
+ mock = flexmock(SomeObject)
+ mock.should_receive('method1').once.ordered.and_return('first thing')
+ mock.should_receive('method2').once.ordered.and_return('second thing')
+
+ # Mox
+ mock = mox.MockObject(SomeObject)
+ mock.method1().AndReturn('first thing')
+ mock.method2().AndReturn('second thing')
+ mox.Replay(mock)
+ mox.Verify(mock)
+
+ # Mocker
+ mock = mocker.mock()
+ with mocker.order():
+ mock.method1()
+ mocker.result('first thing')
+ mock.method2()
+ mocker.result('second thing')
+ mocker.replay()
+ mocker.verify()
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> my_dingus.method1()
+ <Dingus ...>
+ >>> my_dingus.method2()
+ <Dingus ...>
+ >>> assertEqual(['method1', 'method2'], [call.name for call in my_dingus.calls])
+
+::
+
+ >>> # fudge
+ >>> @fudge.test
+ ... def test():
+ ... my_fake = (fudge.Fake()
+ ... .remember_order()
+ ... .expects('method1')
+ ... .expects('method2'))
+ ... my_fake.method2()
+ ... my_fake.method1()
+ ...
+ >>> test()
+ Traceback (most recent call last):
+ ...
+ AssertionError: Call #1 was fake:my_fake.method2(); Expected: #1 fake:my_fake.method1(), #2 fake:my_fake.method2(), end
+
+
+Raising exceptions
+~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> my_mock.some_method.side_effect = SomeException("message")
+ >>> assertRaises(SomeException, my_mock.some_method)
+
+::
+
+ # Flexmock
+ mock = flexmock()
+ mock.should_receive("some_method").and_raise(SomeException("message"))
+ assertRaises(SomeException, mock.some_method)
+
+ # Mox
+ mock = mox.MockAnything()
+ mock.some_method().AndRaise(SomeException("message"))
+ mox.Replay(mock)
+ assertRaises(SomeException, mock.some_method)
+ mox.Verify(mock)
+
+ # Mocker
+ mock = mocker.mock()
+ mock.some_method()
+ mocker.throw(SomeException("message"))
+ mocker.replay()
+ assertRaises(SomeException, mock.some_method)
+ mocker.verify()
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> my_dingus.some_method = dingus.exception_raiser(SomeException)
+ >>> assertRaises(SomeException, my_dingus.some_method)
+
+::
+
+ >>> # fudge
+ >>> my_fake = (fudge.Fake()
+ ... .is_callable()
+ ... .raises(SomeException("message")))
+ ...
+ >>> my_fake()
+ Traceback (most recent call last):
+ ...
+ SomeException: message
+
+
+Override new instances of a class
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> with mock.patch('somemodule.Someclass') as MockClass:
+ ... MockClass.return_value = some_other_object
+ ... assertEqual(some_other_object, somemodule.Someclass())
+ ...
+
+
+::
+
+ # Flexmock
+ flexmock(some_module.SomeClass, new_instances=some_other_object)
+ assertEqual(some_other_object, some_module.SomeClass())
+
+ # Mox
+ # (you will probably have mox.Mox() available as self.mox in a real test)
+ mox.Mox().StubOutWithMock(some_module, 'SomeClass', use_mock_anything=True)
+ some_module.SomeClass().AndReturn(some_other_object)
+ mox.ReplayAll()
+ assertEqual(some_other_object, some_module.SomeClass())
+
+ # Mocker
+ instance = mocker.mock()
+ klass = mocker.replace(SomeClass, spec=None)
+ klass('expected', 'args')
+ mocker.result(instance)
+
+::
+
+ >>> # Dingus
+ >>> MockClass = dingus.Dingus(return_value=some_other_object)
+ >>> with dingus.patch('somemodule.SomeClass', MockClass):
+ ... assertEqual(some_other_object, somemodule.SomeClass())
+ ...
+
+::
+
+ >>> # fudge
+ >>> @fudge.patch('somemodule.SomeClass')
+ ... def test(FakeClass):
+ ... FakeClass.is_callable().returns(some_other_object)
+ ... assertEqual(some_other_object, somemodule.SomeClass())
+ ...
+ >>> test()
+
+
+Call the same method multiple times
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. note::
+
+ You don't need to do *any* configuration to call `mock.Mock()` methods
+ multiple times. Attributes like `call_count`, `call_args_list` and
+ `method_calls` provide various different ways of making assertions about
+ how the mock was used.
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> my_mock.some_method()
+ <Mock name='mock.some_method()' id='...'>
+ >>> my_mock.some_method()
+ <Mock name='mock.some_method()' id='...'>
+ >>> assert my_mock.some_method.call_count >= 2
+
+::
+
+ # Flexmock # (verifies that the method gets called at least twice)
+ flexmock(some_object).should_receive('some_method').at_least.twice
+
+ # Mox
+ # (does not support variable number of calls, so you need to create a new entry for each explicit call)
+ mock = mox.MockObject(some_object)
+ mock.some_method(mox.IgnoreArg(), mox.IgnoreArg())
+ mock.some_method(mox.IgnoreArg(), mox.IgnoreArg())
+ mox.Replay(mock)
+ mox.Verify(mock)
+
+ # Mocker
+ # (TODO)
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> my_dingus.some_method()
+ <Dingus ...>
+ >>> my_dingus.some_method()
+ <Dingus ...>
+ >>> assert len(my_dingus.calls('some_method')) == 2
+
+::
+
+ >>> # fudge
+ >>> @fudge.test
+ ... def test():
+ ... my_fake = fudge.Fake().expects('some_method').times_called(2)
+ ... my_fake.some_method()
+ ...
+ >>> test()
+ Traceback (most recent call last):
+ ...
+ AssertionError: fake:my_fake.some_method() was called 1 time(s). Expected 2.
+
+
+Mock chained methods
+~~~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> method3 = my_mock.method1.return_value.method2.return_value.method3
+ >>> method3.return_value = 'some value'
+ >>> assertEqual('some value', my_mock.method1().method2().method3(1, 2))
+ >>> method3.assert_called_once_with(1, 2)
+
+::
+
+ # Flexmock
+ # (intermediate method calls are automatically assigned to temporary fake objects
+ # and can be called with any arguments)
+ flexmock(some_object).should_receive(
+ 'method1.method2.method3'
+ ).with_args(arg1, arg2).and_return('some value')
+ assertEqual('some_value', some_object.method1().method2().method3(arg1, arg2))
+
+::
+
+ # Mox
+ mock = mox.MockObject(some_object)
+ mock2 = mox.MockAnything()
+ mock3 = mox.MockAnything()
+ mock.method1().AndReturn(mock1)
+ mock2.method2().AndReturn(mock2)
+ mock3.method3(arg1, arg2).AndReturn('some_value')
+ self.mox.ReplayAll()
+ assertEqual("some_value", some_object.method1().method2().method3(arg1, arg2))
+ self.mox.VerifyAll()
+
+ # Mocker
+ # (TODO)
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> method3 = my_dingus.method1.return_value.method2.return_value.method3
+ >>> method3.return_value = 'some value'
+ >>> assertEqual('some value', my_dingus.method1().method2().method3(1, 2))
+ >>> assert method3.calls('()', 1, 2).once()
+
+::
+
+ >>> # fudge
+ >>> @fudge.test
+ ... def test():
+ ... my_fake = fudge.Fake()
+ ... (my_fake
+ ... .expects('method1')
+ ... .returns_fake()
+ ... .expects('method2')
+ ... .returns_fake()
+ ... .expects('method3')
+ ... .with_args(1, 2)
+ ... .returns('some value'))
+ ... assertEqual('some value', my_fake.method1().method2().method3(1, 2))
+ ...
+ >>> test()
+
+
+Mocking a context manager
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Examples for mock, Dingus and fudge only (so far):
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.MagicMock()
+ >>> with my_mock:
+ ... pass
+ ...
+ >>> my_mock.__enter__.assert_called_with()
+ >>> my_mock.__exit__.assert_called_with(None, None, None)
+
+::
+
+
+ >>> # Dingus (nothing special here; all dinguses are "magic mocks")
+ >>> my_dingus = dingus.Dingus()
+ >>> with my_dingus:
+ ... pass
+ ...
+ >>> assert my_dingus.__enter__.calls()
+ >>> assert my_dingus.__exit__.calls('()', None, None, None)
+
+::
+
+ >>> # fudge
+ >>> my_fake = fudge.Fake().provides('__enter__').provides('__exit__')
+ >>> with my_fake:
+ ... pass
+ ...
+
+
+Mocking the builtin open used as a context manager
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Example for mock only (so far):
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.MagicMock()
+ >>> with mock.patch('__builtin__.open', my_mock):
+ ... manager = my_mock.return_value.__enter__.return_value
+ ... manager.read.return_value = 'some data'
+ ... with open('foo') as h:
+ ... data = h.read()
+ ...
+ >>> data
+ 'some data'
+ >>> my_mock.assert_called_once_with('foo')
+
+*or*:
+
+.. doctest::
+
+ >>> # mock
+ >>> with mock.patch('__builtin__.open') as my_mock:
+ ... my_mock.return_value.__enter__ = lambda s: s
+ ... my_mock.return_value.__exit__ = mock.Mock()
+ ... my_mock.return_value.read.return_value = 'some data'
+ ... with open('foo') as h:
+ ... data = h.read()
+ ...
+ >>> data
+ 'some data'
+ >>> my_mock.assert_called_once_with('foo')
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> with dingus.patch('__builtin__.open', my_dingus):
+ ... file_ = open.return_value.__enter__.return_value
+ ... file_.read.return_value = 'some data'
+ ... with open('foo') as h:
+ ... data = f.read()
+ ...
+ >>> data
+ 'some data'
+ >>> assert my_dingus.calls('()', 'foo').once()
+
+::
+
+ >>> # fudge
+ >>> from contextlib import contextmanager
+ >>> from StringIO import StringIO
+ >>> @contextmanager
+ ... def fake_file(filename):
+ ... yield StringIO('sekrets')
+ ...
+ >>> with fudge.patch('__builtin__.open') as fake_open:
+ ... fake_open.is_callable().calls(fake_file)
+ ... with open('/etc/password') as f:
+ ... data = f.read()
+ ...
+ fake:__builtin__.open
+ >>> data
+ 'sekrets' \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/_sources/examples.txt b/third_party/python/mock-1.0.0/html/_sources/examples.txt
new file mode 100644
index 0000000000..ecb994b156
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_sources/examples.txt
@@ -0,0 +1,1063 @@
+.. _further-examples:
+
+==================
+ Further Examples
+==================
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ from datetime import date
+
+ BackendProvider = Mock()
+ sys.modules['mymodule'] = mymodule = Mock(name='mymodule')
+
+ def grob(val):
+ "First frob and then clear val"
+ mymodule.frob(val)
+ val.clear()
+
+ mymodule.frob = lambda val: val
+ mymodule.grob = grob
+ mymodule.date = date
+
+ class TestCase(unittest2.TestCase):
+ def run(self):
+ result = unittest2.TestResult()
+ out = unittest2.TestCase.run(self, result)
+ assert result.wasSuccessful()
+
+ from mock import inPy3k
+
+
+
+For comprehensive examples, see the unit tests included in the full source
+distribution.
+
+Here are some more examples for some slightly more advanced scenarios than in
+the :ref:`getting started <getting-started>` guide.
+
+
+Mocking chained calls
+=====================
+
+Mocking chained calls is actually straightforward with mock once you
+understand the :attr:`~Mock.return_value` attribute. When a mock is called for
+the first time, or you fetch its `return_value` before it has been called, a
+new `Mock` is created.
+
+This means that you can see how the object returned from a call to a mocked
+object has been used by interrogating the `return_value` mock:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock().foo(a=2, b=3)
+ <Mock name='mock().foo()' id='...'>
+ >>> mock.return_value.foo.assert_called_with(a=2, b=3)
+
+From here it is a simple step to configure and then make assertions about
+chained calls. Of course another alternative is writing your code in a more
+testable way in the first place...
+
+So, suppose we have some code that looks a little bit like this:
+
+.. doctest::
+
+ >>> class Something(object):
+ ... def __init__(self):
+ ... self.backend = BackendProvider()
+ ... def method(self):
+ ... response = self.backend.get_endpoint('foobar').create_call('spam', 'eggs').start_call()
+ ... # more code
+
+Assuming that `BackendProvider` is already well tested, how do we test
+`method()`? Specifically, we want to test that the code section `# more
+code` uses the response object in the correct way.
+
+As this chain of calls is made from an instance attribute we can monkey patch
+the `backend` attribute on a `Something` instance. In this particular case
+we are only interested in the return value from the final call to
+`start_call` so we don't have much configuration to do. Let's assume the
+object it returns is 'file-like', so we'll ensure that our response object
+uses the builtin `file` as its `spec`.
+
+To do this we create a mock instance as our mock backend and create a mock
+response object for it. To set the response as the return value for that final
+`start_call` we could do this:
+
+ `mock_backend.get_endpoint.return_value.create_call.return_value.start_call.return_value = mock_response`.
+
+We can do that in a slightly nicer way using the :meth:`~Mock.configure_mock`
+method to directly set the return value for us:
+
+.. doctest::
+
+ >>> something = Something()
+ >>> mock_response = Mock(spec=file)
+ >>> mock_backend = Mock()
+ >>> config = {'get_endpoint.return_value.create_call.return_value.start_call.return_value': mock_response}
+ >>> mock_backend.configure_mock(**config)
+
+With these we monkey patch the "mock backend" in place and can make the real
+call:
+
+.. doctest::
+
+ >>> something.backend = mock_backend
+ >>> something.method()
+
+Using :attr:`~Mock.mock_calls` we can check the chained call with a single
+assert. A chained call is several calls in one line of code, so there will be
+several entries in `mock_calls`. We can use :meth:`call.call_list` to create
+this list of calls for us:
+
+.. doctest::
+
+ >>> chained = call.get_endpoint('foobar').create_call('spam', 'eggs').start_call()
+ >>> call_list = chained.call_list()
+ >>> assert mock_backend.mock_calls == call_list
+
+
+Partial mocking
+===============
+
+In some tests I wanted to mock out a call to `datetime.date.today()
+<http://docs.python.org/library/datetime.html#datetime.date.today>`_ to return
+a known date, but I didn't want to prevent the code under test from
+creating new date objects. Unfortunately `datetime.date` is written in C, and
+so I couldn't just monkey-patch out the static `date.today` method.
+
+I found a simple way of doing this that involved effectively wrapping the date
+class with a mock, but passing through calls to the constructor to the real
+class (and returning real instances).
+
+The :func:`patch decorator <patch>` is used here to
+mock out the `date` class in the module under test. The :attr:`side_effect`
+attribute on the mock date class is then set to a lambda function that returns
+a real date. When the mock date class is called a real date will be
+constructed and returned by `side_effect`.
+
+.. doctest::
+
+ >>> from datetime import date
+ >>> with patch('mymodule.date') as mock_date:
+ ... mock_date.today.return_value = date(2010, 10, 8)
+ ... mock_date.side_effect = lambda *args, **kw: date(*args, **kw)
+ ...
+ ... assert mymodule.date.today() == date(2010, 10, 8)
+ ... assert mymodule.date(2009, 6, 8) == date(2009, 6, 8)
+ ...
+
+Note that we don't patch `datetime.date` globally, we patch `date` in the
+module that *uses* it. See :ref:`where to patch <where-to-patch>`.
+
+When `date.today()` is called a known date is returned, but calls to the
+`date(...)` constructor still return normal dates. Without this you can find
+yourself having to calculate an expected result using exactly the same
+algorithm as the code under test, which is a classic testing anti-pattern.
+
+Calls to the date constructor are recorded in the `mock_date` attributes
+(`call_count` and friends) which may also be useful for your tests.
+
+An alternative way of dealing with mocking dates, or other builtin classes,
+is discussed in `this blog entry
+<http://williamjohnbert.com/2011/07/how-to-unit-testing-in-django-with-mocking-and-patching/>`_.
+
+
+Mocking a Generator Method
+==========================
+
+A Python generator is a function or method that uses the `yield statement
+<http://docs.python.org/reference/simple_stmts.html#the-yield-statement>`_ to
+return a series of values when iterated over [#]_.
+
+A generator method / function is called to return the generator object. It is
+the generator object that is then iterated over. The protocol method for
+iteration is `__iter__
+<http://docs.python.org/library/stdtypes.html#container.__iter__>`_, so we can
+mock this using a `MagicMock`.
+
+Here's an example class with an "iter" method implemented as a generator:
+
+.. doctest::
+
+ >>> class Foo(object):
+ ... def iter(self):
+ ... for i in [1, 2, 3]:
+ ... yield i
+ ...
+ >>> foo = Foo()
+ >>> list(foo.iter())
+ [1, 2, 3]
+
+
+How would we mock this class, and in particular its "iter" method?
+
+To configure the values returned from the iteration (implicit in the call to
+`list`), we need to configure the object returned by the call to `foo.iter()`.
+
+.. doctest::
+
+ >>> mock_foo = MagicMock()
+ >>> mock_foo.iter.return_value = iter([1, 2, 3])
+ >>> list(mock_foo.iter())
+ [1, 2, 3]
+
+.. [#] There are also generator expressions and more `advanced uses
+ <http://www.dabeaz.com/coroutines/index.html>`_ of generators, but we aren't
+ concerned about them here. A very good introduction to generators and how
+ powerful they are is: `Generator Tricks for Systems Programmers
+ <http://www.dabeaz.com/generators/>`_.
+
+
+Applying the same patch to every test method
+============================================
+
+If you want several patches in place for multiple test methods the obvious way
+is to apply the patch decorators to every method. This can feel like unnecessary
+repetition. For Python 2.6 or more recent you can use `patch` (in all its
+various forms) as a class decorator. This applies the patches to all test
+methods on the class. A test method is identified by methods whose names start
+with `test`:
+
+.. doctest::
+
+ >>> @patch('mymodule.SomeClass')
+ ... class MyTest(TestCase):
+ ...
+ ... def test_one(self, MockSomeClass):
+ ... self.assertTrue(mymodule.SomeClass is MockSomeClass)
+ ...
+ ... def test_two(self, MockSomeClass):
+ ... self.assertTrue(mymodule.SomeClass is MockSomeClass)
+ ...
+ ... def not_a_test(self):
+ ... return 'something'
+ ...
+ >>> MyTest('test_one').test_one()
+ >>> MyTest('test_two').test_two()
+ >>> MyTest('test_two').not_a_test()
+ 'something'
+
+An alternative way of managing patches is to use the :ref:`start-and-stop`.
+These allow you to move the patching into your `setUp` and `tearDown` methods.
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ... def setUp(self):
+ ... self.patcher = patch('mymodule.foo')
+ ... self.mock_foo = self.patcher.start()
+ ...
+ ... def test_foo(self):
+ ... self.assertTrue(mymodule.foo is self.mock_foo)
+ ...
+ ... def tearDown(self):
+ ... self.patcher.stop()
+ ...
+ >>> MyTest('test_foo').run()
+
+If you use this technique you must ensure that the patching is "undone" by
+calling `stop`. This can be fiddlier than you might think, because if an
+exception is raised in the setUp then tearDown is not called. `unittest2
+<http://pypi.python.org/pypi/unittest2>`_ cleanup functions make this simpler:
+
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ... def setUp(self):
+ ... patcher = patch('mymodule.foo')
+ ... self.addCleanup(patcher.stop)
+ ... self.mock_foo = patcher.start()
+ ...
+ ... def test_foo(self):
+ ... self.assertTrue(mymodule.foo is self.mock_foo)
+ ...
+ >>> MyTest('test_foo').run()
+
+
+Mocking Unbound Methods
+=======================
+
+Whilst writing tests today I needed to patch an *unbound method* (patching the
+method on the class rather than on the instance). I needed self to be passed
+in as the first argument because I want to make asserts about which objects
+were calling this particular method. The issue is that you can't patch with a
+mock for this, because if you replace an unbound method with a mock it doesn't
+become a bound method when fetched from the instance, and so it doesn't get
+self passed in. The workaround is to patch the unbound method with a real
+function instead. The :func:`patch` decorator makes it so simple to
+patch out methods with a mock that having to create a real function becomes a
+nuisance.
+
+If you pass `autospec=True` to patch then it does the patching with a
+*real* function object. This function object has the same signature as the one
+it is replacing, but delegates to a mock under the hood. You still get your
+mock auto-created in exactly the same way as before. What it means though, is
+that if you use it to patch out an unbound method on a class the mocked
+function will be turned into a bound method if it is fetched from an instance.
+It will have `self` passed in as the first argument, which is exactly what I
+wanted:
+
+.. doctest::
+
+ >>> class Foo(object):
+ ... def foo(self):
+ ... pass
+ ...
+ >>> with patch.object(Foo, 'foo', autospec=True) as mock_foo:
+ ... mock_foo.return_value = 'foo'
+ ... foo = Foo()
+ ... foo.foo()
+ ...
+ 'foo'
+ >>> mock_foo.assert_called_once_with(foo)
+
+If we don't use `autospec=True` then the unbound method is patched out
+with a Mock instance instead, and isn't called with `self`.
+
+
+Checking multiple calls with mock
+=================================
+
+mock has a nice API for making assertions about how your mock objects are used.
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.foo_bar.return_value = None
+ >>> mock.foo_bar('baz', spam='eggs')
+ >>> mock.foo_bar.assert_called_with('baz', spam='eggs')
+
+If your mock is only being called once you can use the
+:meth:`assert_called_once_with` method that also asserts that the
+:attr:`call_count` is one.
+
+.. doctest::
+
+ >>> mock.foo_bar.assert_called_once_with('baz', spam='eggs')
+ >>> mock.foo_bar()
+ >>> mock.foo_bar.assert_called_once_with('baz', spam='eggs')
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected to be called once. Called 2 times.
+
+Both `assert_called_with` and `assert_called_once_with` make assertions about
+the *most recent* call. If your mock is going to be called several times, and
+you want to make assertions about *all* those calls you can use
+:attr:`~Mock.call_args_list`:
+
+.. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock(1, 2, 3)
+ >>> mock(4, 5, 6)
+ >>> mock()
+ >>> mock.call_args_list
+ [call(1, 2, 3), call(4, 5, 6), call()]
+
+The :data:`call` helper makes it easy to make assertions about these calls. You
+can build up a list of expected calls and compare it to `call_args_list`. This
+looks remarkably similar to the repr of the `call_args_list`:
+
+.. doctest::
+
+ >>> expected = [call(1, 2, 3), call(4, 5, 6), call()]
+ >>> mock.call_args_list == expected
+ True
+
+
+Coping with mutable arguments
+=============================
+
+Another situation is rare, but can bite you, is when your mock is called with
+mutable arguments. `call_args` and `call_args_list` store *references* to the
+arguments. If the arguments are mutated by the code under test then you can no
+longer make assertions about what the values were when the mock was called.
+
+Here's some example code that shows the problem. Imagine the following functions
+defined in 'mymodule'::
+
+ def frob(val):
+ pass
+
+ def grob(val):
+ "First frob and then clear val"
+ frob(val)
+ val.clear()
+
+When we try to test that `grob` calls `frob` with the correct argument look
+what happens:
+
+.. doctest::
+
+ >>> with patch('mymodule.frob') as mock_frob:
+ ... val = set([6])
+ ... mymodule.grob(val)
+ ...
+ >>> val
+ set([])
+ >>> mock_frob.assert_called_with(set([6]))
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected: ((set([6]),), {})
+ Called with: ((set([]),), {})
+
+One possibility would be for mock to copy the arguments you pass in. This
+could then cause problems if you do assertions that rely on object identity
+for equality.
+
+Here's one solution that uses the :attr:`side_effect`
+functionality. If you provide a `side_effect` function for a mock then
+`side_effect` will be called with the same args as the mock. This gives us an
+opportunity to copy the arguments and store them for later assertions. In this
+example I'm using *another* mock to store the arguments so that I can use the
+mock methods for doing the assertion. Again a helper function sets this up for
+me.
+
+.. doctest::
+
+ >>> from copy import deepcopy
+ >>> from mock import Mock, patch, DEFAULT
+ >>> def copy_call_args(mock):
+ ... new_mock = Mock()
+ ... def side_effect(*args, **kwargs):
+ ... args = deepcopy(args)
+ ... kwargs = deepcopy(kwargs)
+ ... new_mock(*args, **kwargs)
+ ... return DEFAULT
+ ... mock.side_effect = side_effect
+ ... return new_mock
+ ...
+ >>> with patch('mymodule.frob') as mock_frob:
+ ... new_mock = copy_call_args(mock_frob)
+ ... val = set([6])
+ ... mymodule.grob(val)
+ ...
+ >>> new_mock.assert_called_with(set([6]))
+ >>> new_mock.call_args
+ call(set([6]))
+
+`copy_call_args` is called with the mock that will be called. It returns a new
+mock that we do the assertion on. The `side_effect` function makes a copy of
+the args and calls our `new_mock` with the copy.
+
+.. note::
+
+ If your mock is only going to be used once there is an easier way of
+ checking arguments at the point they are called. You can simply do the
+ checking inside a `side_effect` function.
+
+ .. doctest::
+
+ >>> def side_effect(arg):
+ ... assert arg == set([6])
+ ...
+ >>> mock = Mock(side_effect=side_effect)
+ >>> mock(set([6]))
+ >>> mock(set())
+ Traceback (most recent call last):
+ ...
+ AssertionError
+
+An alternative approach is to create a subclass of `Mock` or `MagicMock` that
+copies (using `copy.deepcopy
+<http://docs.python.org/library/copy.html#copy.deepcopy>`_) the arguments.
+Here's an example implementation:
+
+.. doctest::
+
+ >>> from copy import deepcopy
+ >>> class CopyingMock(MagicMock):
+ ... def __call__(self, *args, **kwargs):
+ ... args = deepcopy(args)
+ ... kwargs = deepcopy(kwargs)
+ ... return super(CopyingMock, self).__call__(*args, **kwargs)
+ ...
+ >>> c = CopyingMock(return_value=None)
+ >>> arg = set()
+ >>> c(arg)
+ >>> arg.add(1)
+ >>> c.assert_called_with(set())
+ >>> c.assert_called_with(arg)
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected call: mock(set([1]))
+ Actual call: mock(set([]))
+ >>> c.foo
+ <CopyingMock name='mock.foo' id='...'>
+
+When you subclass `Mock` or `MagicMock` all dynamically created attributes,
+and the `return_value` will use your subclass automatically. That means all
+children of a `CopyingMock` will also have the type `CopyingMock`.
+
+
+Raising exceptions on attribute access
+======================================
+
+You can use :class:`PropertyMock` to mimic the behaviour of properties. This
+includes raising exceptions when an attribute is accessed.
+
+Here's an example raising a `ValueError` when the 'foo' attribute is accessed:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> p = PropertyMock(side_effect=ValueError)
+ >>> type(m).foo = p
+ >>> m.foo
+ Traceback (most recent call last):
+ ....
+ ValueError
+
+Because every mock object has its own type, a new subclass of whichever mock
+class you're using, all mock objects are isolated from each other. You can
+safely attach properties (or other descriptors or whatever you want in fact)
+to `type(mock)` without affecting other mock objects.
+
+
+Multiple calls with different effects
+=====================================
+
+.. note::
+
+ In mock 1.0 the handling of iterable `side_effect` was changed. Any
+ exceptions in the iterable will be raised instead of returned.
+
+Handling code that needs to behave differently on subsequent calls during the
+test can be tricky. For example you may have a function that needs to raise
+an exception the first time it is called but returns a response on the second
+call (testing retry behaviour).
+
+One approach is to use a :attr:`side_effect` function that replaces itself. The
+first time it is called the `side_effect` sets a new `side_effect` that will
+be used for the second call. It then raises an exception:
+
+.. doctest::
+
+ >>> def side_effect(*args):
+ ... def second_call(*args):
+ ... return 'response'
+ ... mock.side_effect = second_call
+ ... raise Exception('boom')
+ ...
+ >>> mock = Mock(side_effect=side_effect)
+ >>> mock('first')
+ Traceback (most recent call last):
+ ...
+ Exception: boom
+ >>> mock('second')
+ 'response'
+ >>> mock.assert_called_with('second')
+
+Another perfectly valid way would be to pop return values from a list. If the
+return value is an exception, raise it instead of returning it:
+
+.. doctest::
+
+ >>> returns = [Exception('boom'), 'response']
+ >>> def side_effect(*args):
+ ... result = returns.pop(0)
+ ... if isinstance(result, Exception):
+ ... raise result
+ ... return result
+ ...
+ >>> mock = Mock(side_effect=side_effect)
+ >>> mock('first')
+ Traceback (most recent call last):
+ ...
+ Exception: boom
+ >>> mock('second')
+ 'response'
+ >>> mock.assert_called_with('second')
+
+Which approach you prefer is a matter of taste. The first approach is actually
+a line shorter but maybe the second approach is more readable.
+
+
+Nesting Patches
+===============
+
+Using patch as a context manager is nice, but if you do multiple patches you
+can end up with nested with statements indenting further and further to the
+right:
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ...
+ ... def test_foo(self):
+ ... with patch('mymodule.Foo') as mock_foo:
+ ... with patch('mymodule.Bar') as mock_bar:
+ ... with patch('mymodule.Spam') as mock_spam:
+ ... assert mymodule.Foo is mock_foo
+ ... assert mymodule.Bar is mock_bar
+ ... assert mymodule.Spam is mock_spam
+ ...
+ >>> original = mymodule.Foo
+ >>> MyTest('test_foo').test_foo()
+ >>> assert mymodule.Foo is original
+
+With unittest2_ `cleanup` functions and the :ref:`start-and-stop` we can
+achieve the same effect without the nested indentation. A simple helper
+method, `create_patch`, puts the patch in place and returns the created mock
+for us:
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ...
+ ... def create_patch(self, name):
+ ... patcher = patch(name)
+ ... thing = patcher.start()
+ ... self.addCleanup(patcher.stop)
+ ... return thing
+ ...
+ ... def test_foo(self):
+ ... mock_foo = self.create_patch('mymodule.Foo')
+ ... mock_bar = self.create_patch('mymodule.Bar')
+ ... mock_spam = self.create_patch('mymodule.Spam')
+ ...
+ ... assert mymodule.Foo is mock_foo
+ ... assert mymodule.Bar is mock_bar
+ ... assert mymodule.Spam is mock_spam
+ ...
+ >>> original = mymodule.Foo
+ >>> MyTest('test_foo').run()
+ >>> assert mymodule.Foo is original
+
+
+Mocking a dictionary with MagicMock
+===================================
+
+You may want to mock a dictionary, or other container object, recording all
+access to it whilst having it still behave like a dictionary.
+
+We can do this with :class:`MagicMock`, which will behave like a dictionary,
+and using :data:`~Mock.side_effect` to delegate dictionary access to a real
+underlying dictionary that is under our control.
+
+When the `__getitem__` and `__setitem__` methods of our `MagicMock` are called
+(normal dictionary access) then `side_effect` is called with the key (and in
+the case of `__setitem__` the value too). We can also control what is returned.
+
+After the `MagicMock` has been used we can use attributes like
+:data:`~Mock.call_args_list` to assert about how the dictionary was used:
+
+.. doctest::
+
+ >>> my_dict = {'a': 1, 'b': 2, 'c': 3}
+ >>> def getitem(name):
+ ... return my_dict[name]
+ ...
+ >>> def setitem(name, val):
+ ... my_dict[name] = val
+ ...
+ >>> mock = MagicMock()
+ >>> mock.__getitem__.side_effect = getitem
+ >>> mock.__setitem__.side_effect = setitem
+
+.. note::
+
+ An alternative to using `MagicMock` is to use `Mock` and *only* provide
+ the magic methods you specifically want:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.__setitem__ = Mock(side_effect=getitem)
+ >>> mock.__getitem__ = Mock(side_effect=setitem)
+
+ A *third* option is to use `MagicMock` but passing in `dict` as the `spec`
+ (or `spec_set`) argument so that the `MagicMock` created only has
+ dictionary magic methods available:
+
+ .. doctest::
+
+ >>> mock = MagicMock(spec_set=dict)
+ >>> mock.__getitem__.side_effect = getitem
+ >>> mock.__setitem__.side_effect = setitem
+
+With these side effect functions in place, the `mock` will behave like a normal
+dictionary but recording the access. It even raises a `KeyError` if you try
+to access a key that doesn't exist.
+
+.. doctest::
+
+ >>> mock['a']
+ 1
+ >>> mock['c']
+ 3
+ >>> mock['d']
+ Traceback (most recent call last):
+ ...
+ KeyError: 'd'
+ >>> mock['b'] = 'fish'
+ >>> mock['d'] = 'eggs'
+ >>> mock['b']
+ 'fish'
+ >>> mock['d']
+ 'eggs'
+
+After it has been used you can make assertions about the access using the normal
+mock methods and attributes:
+
+.. doctest::
+
+ >>> mock.__getitem__.call_args_list
+ [call('a'), call('c'), call('d'), call('b'), call('d')]
+ >>> mock.__setitem__.call_args_list
+ [call('b', 'fish'), call('d', 'eggs')]
+ >>> my_dict
+ {'a': 1, 'c': 3, 'b': 'fish', 'd': 'eggs'}
+
+
+Mock subclasses and their attributes
+====================================
+
+There are various reasons why you might want to subclass `Mock`. One reason
+might be to add helper methods. Here's a silly example:
+
+.. doctest::
+
+ >>> class MyMock(MagicMock):
+ ... def has_been_called(self):
+ ... return self.called
+ ...
+ >>> mymock = MyMock(return_value=None)
+ >>> mymock
+ <MyMock id='...'>
+ >>> mymock.has_been_called()
+ False
+ >>> mymock()
+ >>> mymock.has_been_called()
+ True
+
+The standard behaviour for `Mock` instances is that attributes and the return
+value mocks are of the same type as the mock they are accessed on. This ensures
+that `Mock` attributes are `Mocks` and `MagicMock` attributes are `MagicMocks`
+[#]_. So if you're subclassing to add helper methods then they'll also be
+available on the attributes and return value mock of instances of your
+subclass.
+
+.. doctest::
+
+ >>> mymock.foo
+ <MyMock name='mock.foo' id='...'>
+ >>> mymock.foo.has_been_called()
+ False
+ >>> mymock.foo()
+ <MyMock name='mock.foo()' id='...'>
+ >>> mymock.foo.has_been_called()
+ True
+
+Sometimes this is inconvenient. For example, `one user
+<https://code.google.com/p/mock/issues/detail?id=105>`_ is subclassing mock to
+created a `Twisted adaptor
+<http://twistedmatrix.com/documents/11.0.0/api/twisted.python.components.html>`_.
+Having this applied to attributes too actually causes errors.
+
+`Mock` (in all its flavours) uses a method called `_get_child_mock` to create
+these "sub-mocks" for attributes and return values. You can prevent your
+subclass being used for attributes by overriding this method. The signature is
+that it takes arbitrary keyword arguments (`**kwargs`) which are then passed
+onto the mock constructor:
+
+.. doctest::
+
+ >>> class Subclass(MagicMock):
+ ... def _get_child_mock(self, **kwargs):
+ ... return MagicMock(**kwargs)
+ ...
+ >>> mymock = Subclass()
+ >>> mymock.foo
+ <MagicMock name='mock.foo' id='...'>
+ >>> assert isinstance(mymock, Subclass)
+ >>> assert not isinstance(mymock.foo, Subclass)
+ >>> assert not isinstance(mymock(), Subclass)
+
+.. [#] An exception to this rule are the non-callable mocks. Attributes use the
+ callable variant because otherwise non-callable mocks couldn't have callable
+ methods.
+
+
+Mocking imports with patch.dict
+===============================
+
+One situation where mocking can be hard is where you have a local import inside
+a function. These are harder to mock because they aren't using an object from
+the module namespace that we can patch out.
+
+Generally local imports are to be avoided. They are sometimes done to prevent
+circular dependencies, for which there is *usually* a much better way to solve
+the problem (refactor the code) or to prevent "up front costs" by delaying the
+import. This can also be solved in better ways than an unconditional local
+import (store the module as a class or module attribute and only do the import
+on first use).
+
+That aside there is a way to use `mock` to affect the results of an import.
+Importing fetches an *object* from the `sys.modules` dictionary. Note that it
+fetches an *object*, which need not be a module. Importing a module for the
+first time results in a module object being put in `sys.modules`, so usually
+when you import something you get a module back. This need not be the case
+however.
+
+This means you can use :func:`patch.dict` to *temporarily* put a mock in place
+in `sys.modules`. Any imports whilst this patch is active will fetch the mock.
+When the patch is complete (the decorated function exits, the with statement
+body is complete or `patcher.stop()` is called) then whatever was there
+previously will be restored safely.
+
+Here's an example that mocks out the 'fooble' module.
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> with patch.dict('sys.modules', {'fooble': mock}):
+ ... import fooble
+ ... fooble.blob()
+ ...
+ <Mock name='mock.blob()' id='...'>
+ >>> assert 'fooble' not in sys.modules
+ >>> mock.blob.assert_called_once_with()
+
+As you can see the `import fooble` succeeds, but on exit there is no 'fooble'
+left in `sys.modules`.
+
+This also works for the `from module import name` form:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> with patch.dict('sys.modules', {'fooble': mock}):
+ ... from fooble import blob
+ ... blob.blip()
+ ...
+ <Mock name='mock.blob.blip()' id='...'>
+ >>> mock.blob.blip.assert_called_once_with()
+
+With slightly more work you can also mock package imports:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> modules = {'package': mock, 'package.module': mock.module}
+ >>> with patch.dict('sys.modules', modules):
+ ... from package.module import fooble
+ ... fooble()
+ ...
+ <Mock name='mock.module.fooble()' id='...'>
+ >>> mock.module.fooble.assert_called_once_with()
+
+
+Tracking order of calls and less verbose call assertions
+========================================================
+
+The :class:`Mock` class allows you to track the *order* of method calls on
+your mock objects through the :attr:`~Mock.method_calls` attribute. This
+doesn't allow you to track the order of calls between separate mock objects,
+however we can use :attr:`~Mock.mock_calls` to achieve the same effect.
+
+Because mocks track calls to child mocks in `mock_calls`, and accessing an
+arbitrary attribute of a mock creates a child mock, we can create our separate
+mocks from a parent one. Calls to those child mock will then all be recorded,
+in order, in the `mock_calls` of the parent:
+
+.. doctest::
+
+ >>> manager = Mock()
+ >>> mock_foo = manager.foo
+ >>> mock_bar = manager.bar
+
+ >>> mock_foo.something()
+ <Mock name='mock.foo.something()' id='...'>
+ >>> mock_bar.other.thing()
+ <Mock name='mock.bar.other.thing()' id='...'>
+
+ >>> manager.mock_calls
+ [call.foo.something(), call.bar.other.thing()]
+
+We can then assert about the calls, including the order, by comparing with
+the `mock_calls` attribute on the manager mock:
+
+.. doctest::
+
+ >>> expected_calls = [call.foo.something(), call.bar.other.thing()]
+ >>> manager.mock_calls == expected_calls
+ True
+
+If `patch` is creating, and putting in place, your mocks then you can attach
+them to a manager mock using the :meth:`~Mock.attach_mock` method. After
+attaching calls will be recorded in `mock_calls` of the manager.
+
+.. doctest::
+
+ >>> manager = MagicMock()
+ >>> with patch('mymodule.Class1') as MockClass1:
+ ... with patch('mymodule.Class2') as MockClass2:
+ ... manager.attach_mock(MockClass1, 'MockClass1')
+ ... manager.attach_mock(MockClass2, 'MockClass2')
+ ... MockClass1().foo()
+ ... MockClass2().bar()
+ ...
+ <MagicMock name='mock.MockClass1().foo()' id='...'>
+ <MagicMock name='mock.MockClass2().bar()' id='...'>
+ >>> manager.mock_calls
+ [call.MockClass1(),
+ call.MockClass1().foo(),
+ call.MockClass2(),
+ call.MockClass2().bar()]
+
+If many calls have been made, but you're only interested in a particular
+sequence of them then an alternative is to use the
+:meth:`~Mock.assert_has_calls` method. This takes a list of calls (constructed
+with the :data:`call` object). If that sequence of calls are in
+:attr:`~Mock.mock_calls` then the assert succeeds.
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> m().foo().bar().baz()
+ <MagicMock name='mock().foo().bar().baz()' id='...'>
+ >>> m.one().two().three()
+ <MagicMock name='mock.one().two().three()' id='...'>
+ >>> calls = call.one().two().three().call_list()
+ >>> m.assert_has_calls(calls)
+
+Even though the chained call `m.one().two().three()` aren't the only calls that
+have been made to the mock, the assert still succeeds.
+
+Sometimes a mock may have several calls made to it, and you are only interested
+in asserting about *some* of those calls. You may not even care about the
+order. In this case you can pass `any_order=True` to `assert_has_calls`:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> m(1), m.two(2, 3), m.seven(7), m.fifty('50')
+ (...)
+ >>> calls = [call.fifty('50'), call(1), call.seven(7)]
+ >>> m.assert_has_calls(calls, any_order=True)
+
+
+More complex argument matching
+==============================
+
+Using the same basic concept as `ANY` we can implement matchers to do more
+complex assertions on objects used as arguments to mocks.
+
+Suppose we expect some object to be passed to a mock that by default
+compares equal based on object identity (which is the Python default for user
+defined classes). To use :meth:`~Mock.assert_called_with` we would need to pass
+in the exact same object. If we are only interested in some of the attributes
+of this object then we can create a matcher that will check these attributes
+for us.
+
+You can see in this example how a 'standard' call to `assert_called_with` isn't
+sufficient:
+
+.. doctest::
+
+ >>> class Foo(object):
+ ... def __init__(self, a, b):
+ ... self.a, self.b = a, b
+ ...
+ >>> mock = Mock(return_value=None)
+ >>> mock(Foo(1, 2))
+ >>> mock.assert_called_with(Foo(1, 2))
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected: call(<__main__.Foo object at 0x...>)
+ Actual call: call(<__main__.Foo object at 0x...>)
+
+A comparison function for our `Foo` class might look something like this:
+
+.. doctest::
+
+ >>> def compare(self, other):
+ ... if not type(self) == type(other):
+ ... return False
+ ... if self.a != other.a:
+ ... return False
+ ... if self.b != other.b:
+ ... return False
+ ... return True
+ ...
+
+And a matcher object that can use comparison functions like this for its
+equality operation would look something like this:
+
+.. doctest::
+
+ >>> class Matcher(object):
+ ... def __init__(self, compare, some_obj):
+ ... self.compare = compare
+ ... self.some_obj = some_obj
+ ... def __eq__(self, other):
+ ... return self.compare(self.some_obj, other)
+ ...
+
+Putting all this together:
+
+.. doctest::
+
+ >>> match_foo = Matcher(compare, Foo(1, 2))
+ >>> mock.assert_called_with(match_foo)
+
+The `Matcher` is instantiated with our compare function and the `Foo` object
+we want to compare against. In `assert_called_with` the `Matcher` equality
+method will be called, which compares the object the mock was called with
+against the one we created our matcher with. If they match then
+`assert_called_with` passes, and if they don't an `AssertionError` is raised:
+
+.. doctest::
+
+ >>> match_wrong = Matcher(compare, Foo(3, 4))
+ >>> mock.assert_called_with(match_wrong)
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected: ((<Matcher object at 0x...>,), {})
+ Called with: ((<Foo object at 0x...>,), {})
+
+With a bit of tweaking you could have the comparison function raise the
+`AssertionError` directly and provide a more useful failure message.
+
+As of version 1.5, the Python testing library `PyHamcrest
+<http://pypi.python.org/pypi/PyHamcrest>`_ provides similar functionality,
+that may be useful here, in the form of its equality matcher
+(`hamcrest.library.integration.match_equality
+<http://packages.python.org/PyHamcrest/integration.html#hamcrest.library.integration.match_equality>`_).
+
+
+Less verbose configuration of mock objects
+==========================================
+
+This recipe, for easier configuration of mock objects, is now part of `Mock`.
+See the :meth:`~Mock.configure_mock` method.
+
+
+Matching any argument in assertions
+===================================
+
+This example is now built in to mock. See :data:`ANY`.
+
+
+Mocking Properties
+==================
+
+This example is now built in to mock. See :class:`PropertyMock`.
+
+
+Mocking open
+============
+
+This example is now built in to mock. See :func:`mock_open`.
+
+
+Mocks without some attributes
+=============================
+
+This example is now built in to mock. See :ref:`deleting-attributes`.
diff --git a/third_party/python/mock-1.0.0/html/_sources/getting-started.txt b/third_party/python/mock-1.0.0/html/_sources/getting-started.txt
new file mode 100644
index 0000000000..1b5d289ebe
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_sources/getting-started.txt
@@ -0,0 +1,479 @@
+===========================
+ Getting Started with Mock
+===========================
+
+.. _getting-started:
+
+.. index:: Getting Started
+
+.. testsetup::
+
+ class SomeClass(object):
+ static_method = None
+ class_method = None
+ attribute = None
+
+ sys.modules['package'] = package = Mock(name='package')
+ sys.modules['package.module'] = module = package.module
+ sys.modules['module'] = package.module
+
+
+Using Mock
+==========
+
+Mock Patching Methods
+---------------------
+
+Common uses for :class:`Mock` objects include:
+
+* Patching methods
+* Recording method calls on objects
+
+You might want to replace a method on an object to check that
+it is called with the correct arguments by another part of the system:
+
+.. doctest::
+
+ >>> real = SomeClass()
+ >>> real.method = MagicMock(name='method')
+ >>> real.method(3, 4, 5, key='value')
+ <MagicMock name='method()' id='...'>
+
+Once our mock has been used (`real.method` in this example) it has methods
+and attributes that allow you to make assertions about how it has been used.
+
+.. note::
+
+ In most of these examples the :class:`Mock` and :class:`MagicMock` classes
+ are interchangeable. As the `MagicMock` is the more capable class it makes
+ a sensible one to use by default.
+
+Once the mock has been called its :attr:`~Mock.called` attribute is set to
+`True`. More importantly we can use the :meth:`~Mock.assert_called_with` or
+:meth:`~Mock.assert_called_once_with` method to check that it was called with
+the correct arguments.
+
+This example tests that calling `ProductionClass().method` results in a call to
+the `something` method:
+
+.. doctest::
+
+ >>> from mock import MagicMock
+ >>> class ProductionClass(object):
+ ... def method(self):
+ ... self.something(1, 2, 3)
+ ... def something(self, a, b, c):
+ ... pass
+ ...
+ >>> real = ProductionClass()
+ >>> real.something = MagicMock()
+ >>> real.method()
+ >>> real.something.assert_called_once_with(1, 2, 3)
+
+
+
+Mock for Method Calls on an Object
+----------------------------------
+
+In the last example we patched a method directly on an object to check that it
+was called correctly. Another common use case is to pass an object into a
+method (or some part of the system under test) and then check that it is used
+in the correct way.
+
+The simple `ProductionClass` below has a `closer` method. If it is called with
+an object then it calls `close` on it.
+
+.. doctest::
+
+ >>> class ProductionClass(object):
+ ... def closer(self, something):
+ ... something.close()
+ ...
+
+So to test it we need to pass in an object with a `close` method and check
+that it was called correctly.
+
+.. doctest::
+
+ >>> real = ProductionClass()
+ >>> mock = Mock()
+ >>> real.closer(mock)
+ >>> mock.close.assert_called_with()
+
+We don't have to do any work to provide the 'close' method on our mock.
+Accessing close creates it. So, if 'close' hasn't already been called then
+accessing it in the test will create it, but :meth:`~Mock.assert_called_with`
+will raise a failure exception.
+
+
+Mocking Classes
+---------------
+
+A common use case is to mock out classes instantiated by your code under test.
+When you patch a class, then that class is replaced with a mock. Instances
+are created by *calling the class*. This means you access the "mock instance"
+by looking at the return value of the mocked class.
+
+In the example below we have a function `some_function` that instantiates `Foo`
+and calls a method on it. The call to `patch` replaces the class `Foo` with a
+mock. The `Foo` instance is the result of calling the mock, so it is configured
+by modifying the mock :attr:`~Mock.return_value`.
+
+.. doctest::
+
+ >>> def some_function():
+ ... instance = module.Foo()
+ ... return instance.method()
+ ...
+ >>> with patch('module.Foo') as mock:
+ ... instance = mock.return_value
+ ... instance.method.return_value = 'the result'
+ ... result = some_function()
+ ... assert result == 'the result'
+
+
+Naming your mocks
+-----------------
+
+It can be useful to give your mocks a name. The name is shown in the repr of
+the mock and can be helpful when the mock appears in test failure messages. The
+name is also propagated to attributes or methods of the mock:
+
+.. doctest::
+
+ >>> mock = MagicMock(name='foo')
+ >>> mock
+ <MagicMock name='foo' id='...'>
+ >>> mock.method
+ <MagicMock name='foo.method' id='...'>
+
+
+Tracking all Calls
+------------------
+
+Often you want to track more than a single call to a method. The
+:attr:`~Mock.mock_calls` attribute records all calls
+to child attributes of the mock - and also to their children.
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> mock.method()
+ <MagicMock name='mock.method()' id='...'>
+ >>> mock.attribute.method(10, x=53)
+ <MagicMock name='mock.attribute.method()' id='...'>
+ >>> mock.mock_calls
+ [call.method(), call.attribute.method(10, x=53)]
+
+If you make an assertion about `mock_calls` and any unexpected methods
+have been called, then the assertion will fail. This is useful because as well
+as asserting that the calls you expected have been made, you are also checking
+that they were made in the right order and with no additional calls:
+
+You use the :data:`call` object to construct lists for comparing with
+`mock_calls`:
+
+.. doctest::
+
+ >>> expected = [call.method(), call.attribute.method(10, x=53)]
+ >>> mock.mock_calls == expected
+ True
+
+
+Setting Return Values and Attributes
+------------------------------------
+
+Setting the return values on a mock object is trivially easy:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.return_value = 3
+ >>> mock()
+ 3
+
+Of course you can do the same for methods on the mock:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.method.return_value = 3
+ >>> mock.method()
+ 3
+
+The return value can also be set in the constructor:
+
+.. doctest::
+
+ >>> mock = Mock(return_value=3)
+ >>> mock()
+ 3
+
+If you need an attribute setting on your mock, just do it:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.x = 3
+ >>> mock.x
+ 3
+
+Sometimes you want to mock up a more complex situation, like for example
+`mock.connection.cursor().execute("SELECT 1")`. If we wanted this call to
+return a list, then we have to configure the result of the nested call.
+
+We can use :data:`call` to construct the set of calls in a "chained call" like
+this for easy assertion afterwards:
+
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> cursor = mock.connection.cursor.return_value
+ >>> cursor.execute.return_value = ['foo']
+ >>> mock.connection.cursor().execute("SELECT 1")
+ ['foo']
+ >>> expected = call.connection.cursor().execute("SELECT 1").call_list()
+ >>> mock.mock_calls
+ [call.connection.cursor(), call.connection.cursor().execute('SELECT 1')]
+ >>> mock.mock_calls == expected
+ True
+
+It is the call to `.call_list()` that turns our call object into a list of
+calls representing the chained calls.
+
+
+
+Raising exceptions with mocks
+-----------------------------
+
+A useful attribute is :attr:`~Mock.side_effect`. If you set this to an
+exception class or instance then the exception will be raised when the mock
+is called.
+
+.. doctest::
+
+ >>> mock = Mock(side_effect=Exception('Boom!'))
+ >>> mock()
+ Traceback (most recent call last):
+ ...
+ Exception: Boom!
+
+
+Side effect functions and iterables
+-----------------------------------
+
+`side_effect` can also be set to a function or an iterable. The use case for
+`side_effect` as an iterable is where your mock is going to be called several
+times, and you want each call to return a different value. When you set
+`side_effect` to an iterable every call to the mock returns the next value
+from the iterable:
+
+.. doctest::
+
+ >>> mock = MagicMock(side_effect=[4, 5, 6])
+ >>> mock()
+ 4
+ >>> mock()
+ 5
+ >>> mock()
+ 6
+
+
+For more advanced use cases, like dynamically varying the return values
+depending on what the mock is called with, `side_effect` can be a function.
+The function will be called with the same arguments as the mock. Whatever the
+function returns is what the call returns:
+
+.. doctest::
+
+ >>> vals = {(1, 2): 1, (2, 3): 2}
+ >>> def side_effect(*args):
+ ... return vals[args]
+ ...
+ >>> mock = MagicMock(side_effect=side_effect)
+ >>> mock(1, 2)
+ 1
+ >>> mock(2, 3)
+ 2
+
+
+Creating a Mock from an Existing Object
+---------------------------------------
+
+One problem with over use of mocking is that it couples your tests to the
+implementation of your mocks rather than your real code. Suppose you have a
+class that implements `some_method`. In a test for another class, you
+provide a mock of this object that *also* provides `some_method`. If later
+you refactor the first class, so that it no longer has `some_method` - then
+your tests will continue to pass even though your code is now broken!
+
+`Mock` allows you to provide an object as a specification for the mock,
+using the `spec` keyword argument. Accessing methods / attributes on the
+mock that don't exist on your specification object will immediately raise an
+attribute error. If you change the implementation of your specification, then
+tests that use that class will start failing immediately without you having to
+instantiate the class in those tests.
+
+.. doctest::
+
+ >>> mock = Mock(spec=SomeClass)
+ >>> mock.old_method()
+ Traceback (most recent call last):
+ ...
+ AttributeError: object has no attribute 'old_method'
+
+If you want a stronger form of specification that prevents the setting
+of arbitrary attributes as well as the getting of them then you can use
+`spec_set` instead of `spec`.
+
+
+
+Patch Decorators
+================
+
+.. note::
+
+ With `patch` it matters that you patch objects in the namespace where they
+ are looked up. This is normally straightforward, but for a quick guide
+ read :ref:`where to patch <where-to-patch>`.
+
+
+A common need in tests is to patch a class attribute or a module attribute,
+for example patching a builtin or patching a class in a module to test that it
+is instantiated. Modules and classes are effectively global, so patching on
+them has to be undone after the test or the patch will persist into other
+tests and cause hard to diagnose problems.
+
+mock provides three convenient decorators for this: `patch`, `patch.object` and
+`patch.dict`. `patch` takes a single string, of the form
+`package.module.Class.attribute` to specify the attribute you are patching. It
+also optionally takes a value that you want the attribute (or class or
+whatever) to be replaced with. 'patch.object' takes an object and the name of
+the attribute you would like patched, plus optionally the value to patch it
+with.
+
+`patch.object`:
+
+.. doctest::
+
+ >>> original = SomeClass.attribute
+ >>> @patch.object(SomeClass, 'attribute', sentinel.attribute)
+ ... def test():
+ ... assert SomeClass.attribute == sentinel.attribute
+ ...
+ >>> test()
+ >>> assert SomeClass.attribute == original
+
+ >>> @patch('package.module.attribute', sentinel.attribute)
+ ... def test():
+ ... from package.module import attribute
+ ... assert attribute is sentinel.attribute
+ ...
+ >>> test()
+
+If you are patching a module (including `__builtin__`) then use `patch`
+instead of `patch.object`:
+
+.. doctest::
+
+ >>> mock = MagicMock(return_value = sentinel.file_handle)
+ >>> with patch('__builtin__.open', mock):
+ ... handle = open('filename', 'r')
+ ...
+ >>> mock.assert_called_with('filename', 'r')
+ >>> assert handle == sentinel.file_handle, "incorrect file handle returned"
+
+The module name can be 'dotted', in the form `package.module` if needed:
+
+.. doctest::
+
+ >>> @patch('package.module.ClassName.attribute', sentinel.attribute)
+ ... def test():
+ ... from package.module import ClassName
+ ... assert ClassName.attribute == sentinel.attribute
+ ...
+ >>> test()
+
+A nice pattern is to actually decorate test methods themselves:
+
+.. doctest::
+
+ >>> class MyTest(unittest2.TestCase):
+ ... @patch.object(SomeClass, 'attribute', sentinel.attribute)
+ ... def test_something(self):
+ ... self.assertEqual(SomeClass.attribute, sentinel.attribute)
+ ...
+ >>> original = SomeClass.attribute
+ >>> MyTest('test_something').test_something()
+ >>> assert SomeClass.attribute == original
+
+If you want to patch with a Mock, you can use `patch` with only one argument
+(or `patch.object` with two arguments). The mock will be created for you and
+passed into the test function / method:
+
+.. doctest::
+
+ >>> class MyTest(unittest2.TestCase):
+ ... @patch.object(SomeClass, 'static_method')
+ ... def test_something(self, mock_method):
+ ... SomeClass.static_method()
+ ... mock_method.assert_called_with()
+ ...
+ >>> MyTest('test_something').test_something()
+
+You can stack up multiple patch decorators using this pattern:
+
+.. doctest::
+
+ >>> class MyTest(unittest2.TestCase):
+ ... @patch('package.module.ClassName1')
+ ... @patch('package.module.ClassName2')
+ ... def test_something(self, MockClass2, MockClass1):
+ ... self.assertTrue(package.module.ClassName1 is MockClass1)
+ ... self.assertTrue(package.module.ClassName2 is MockClass2)
+ ...
+ >>> MyTest('test_something').test_something()
+
+When you nest patch decorators the mocks are passed in to the decorated
+function in the same order they applied (the normal *python* order that
+decorators are applied). This means from the bottom up, so in the example
+above the mock for `test_module.ClassName2` is passed in first.
+
+There is also :func:`patch.dict` for setting values in a dictionary just
+during a scope and restoring the dictionary to its original state when the test
+ends:
+
+.. doctest::
+
+ >>> foo = {'key': 'value'}
+ >>> original = foo.copy()
+ >>> with patch.dict(foo, {'newkey': 'newvalue'}, clear=True):
+ ... assert foo == {'newkey': 'newvalue'}
+ ...
+ >>> assert foo == original
+
+`patch`, `patch.object` and `patch.dict` can all be used as context managers.
+
+Where you use `patch` to create a mock for you, you can get a reference to the
+mock using the "as" form of the with statement:
+
+.. doctest::
+
+ >>> class ProductionClass(object):
+ ... def method(self):
+ ... pass
+ ...
+ >>> with patch.object(ProductionClass, 'method') as mock_method:
+ ... mock_method.return_value = None
+ ... real = ProductionClass()
+ ... real.method(1, 2, 3)
+ ...
+ >>> mock_method.assert_called_with(1, 2, 3)
+
+
+As an alternative `patch`, `patch.object` and `patch.dict` can be used as
+class decorators. When used in this way it is the same as applying the
+decorator indvidually to every method whose name starts with "test".
+
+For some more advanced examples, see the :ref:`further-examples` page.
diff --git a/third_party/python/mock-1.0.0/html/_sources/index.txt b/third_party/python/mock-1.0.0/html/_sources/index.txt
new file mode 100644
index 0000000000..7e4a8daca6
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_sources/index.txt
@@ -0,0 +1,411 @@
+====================================
+ Mock - Mocking and Testing Library
+====================================
+
+.. currentmodule:: mock
+
+:Author: `Michael Foord
+ <http://www.voidspace.org.uk/python/weblog/index.shtml>`_
+:Version: |release|
+:Date: 2012/10/07
+:Homepage: `Mock Homepage`_
+:Download: `Mock on PyPI`_
+:Documentation: `PDF Documentation
+ <http://www.voidspace.org.uk/downloads/mock-1.0.0.pdf>`_
+:License: `BSD License`_
+:Support: `Mailing list (testing-in-python@lists.idyll.org)
+ <http://lists.idyll.org/listinfo/testing-in-python>`_
+:Issue tracker: `Google code project
+ <http://code.google.com/p/mock/issues/list>`_
+
+.. _Mock Homepage: http://www.voidspace.org.uk/python/mock/
+.. _BSD License: http://www.voidspace.org.uk/python/license.shtml
+
+
+.. currentmodule:: mock
+
+.. module:: mock
+ :synopsis: Mock object and testing library.
+
+.. index:: introduction
+
+mock is a library for testing in Python. It allows you to replace parts of
+your system under test with mock objects and make assertions about how they
+have been used.
+
+mock is now part of the Python standard library, available as `unittest.mock
+<http://docs.python.org/py3k/library/unittest.mock.html#module-unittest.mock>`_
+in Python 3.3 onwards.
+
+mock provides a core :class:`Mock` class removing the need to create a host
+of stubs throughout your test suite. After performing an action, you can make
+assertions about which methods / attributes were used and arguments they were
+called with. You can also specify return values and set needed attributes in
+the normal way.
+
+Additionally, mock provides a :func:`patch` decorator that handles patching
+module and class level attributes within the scope of a test, along with
+:const:`sentinel` for creating unique objects. See the `quick guide`_ for
+some examples of how to use :class:`Mock`, :class:`MagicMock` and
+:func:`patch`.
+
+Mock is very easy to use and is designed for use with
+`unittest <http://pypi.python.org/pypi/unittest2>`_. Mock is based on
+the 'action -> assertion' pattern instead of `'record -> replay'` used by many
+mocking frameworks.
+
+mock is tested on Python versions 2.4-2.7, Python 3 plus the latest versions of
+Jython and PyPy.
+
+
+.. testsetup::
+
+ class ProductionClass(object):
+ def method(self, *args):
+ pass
+
+ module = sys.modules['module'] = ProductionClass
+ ProductionClass.ClassName1 = ProductionClass
+ ProductionClass.ClassName2 = ProductionClass
+
+
+
+API Documentation
+=================
+
+.. toctree::
+ :maxdepth: 2
+
+ mock
+ patch
+ helpers
+ sentinel
+ magicmock
+
+
+User Guide
+==========
+
+.. toctree::
+ :maxdepth: 2
+
+ getting-started
+ examples
+ compare
+ changelog
+
+
+.. index:: installing
+
+Installing
+==========
+
+The current version is |release|. Mock is stable and widely used. If you do
+find any bugs, or have suggestions for improvements / extensions
+then please contact us.
+
+* `mock on PyPI <http://pypi.python.org/pypi/mock>`_
+* `mock documentation as PDF
+ <http://www.voidspace.org.uk/downloads/mock-1.0.0.pdf>`_
+* `Google Code Home & Mercurial Repository <http://code.google.com/p/mock/>`_
+
+.. index:: repository
+.. index:: hg
+
+You can checkout the latest development version from the Google Code Mercurial
+repository with the following command:
+
+ ``hg clone https://mock.googlecode.com/hg/ mock``
+
+
+.. index:: pip
+.. index:: easy_install
+.. index:: setuptools
+
+If you have pip, setuptools or distribute you can install mock with:
+
+ | ``easy_install -U mock``
+ | ``pip install -U mock``
+
+Alternatively you can download the mock distribution from PyPI and after
+unpacking run:
+
+ ``python setup.py install``
+
+
+Quick Guide
+===========
+
+:class:`Mock` and :class:`MagicMock` objects create all attributes and
+methods as you access them and store details of how they have been used. You
+can configure them, to specify return values or limit what attributes are
+available, and then make assertions about how they have been used:
+
+.. doctest::
+
+ >>> from mock import MagicMock
+ >>> thing = ProductionClass()
+ >>> thing.method = MagicMock(return_value=3)
+ >>> thing.method(3, 4, 5, key='value')
+ 3
+ >>> thing.method.assert_called_with(3, 4, 5, key='value')
+
+:attr:`side_effect` allows you to perform side effects, including raising an
+exception when a mock is called:
+
+.. doctest::
+
+ >>> mock = Mock(side_effect=KeyError('foo'))
+ >>> mock()
+ Traceback (most recent call last):
+ ...
+ KeyError: 'foo'
+
+ >>> values = {'a': 1, 'b': 2, 'c': 3}
+ >>> def side_effect(arg):
+ ... return values[arg]
+ ...
+ >>> mock.side_effect = side_effect
+ >>> mock('a'), mock('b'), mock('c')
+ (1, 2, 3)
+ >>> mock.side_effect = [5, 4, 3, 2, 1]
+ >>> mock(), mock(), mock()
+ (5, 4, 3)
+
+Mock has many other ways you can configure it and control its behaviour. For
+example the `spec` argument configures the mock to take its specification
+from another object. Attempting to access attributes or methods on the mock
+that don't exist on the spec will fail with an `AttributeError`.
+
+The :func:`patch` decorator / context manager makes it easy to mock classes or
+objects in a module under test. The object you specify will be replaced with a
+mock (or other object) during the test and restored when the test ends:
+
+.. doctest::
+
+ >>> from mock import patch
+ >>> @patch('module.ClassName2')
+ ... @patch('module.ClassName1')
+ ... def test(MockClass1, MockClass2):
+ ... module.ClassName1()
+ ... module.ClassName2()
+
+ ... assert MockClass1 is module.ClassName1
+ ... assert MockClass2 is module.ClassName2
+ ... assert MockClass1.called
+ ... assert MockClass2.called
+ ...
+ >>> test()
+
+.. note::
+
+ When you nest patch decorators the mocks are passed in to the decorated
+ function in the same order they applied (the normal *python* order that
+ decorators are applied). This means from the bottom up, so in the example
+ above the mock for `module.ClassName1` is passed in first.
+
+ With `patch` it matters that you patch objects in the namespace where they
+ are looked up. This is normally straightforward, but for a quick guide
+ read :ref:`where to patch <where-to-patch>`.
+
+As well as a decorator `patch` can be used as a context manager in a with
+statement:
+
+.. doctest::
+
+ >>> with patch.object(ProductionClass, 'method', return_value=None) as mock_method:
+ ... thing = ProductionClass()
+ ... thing.method(1, 2, 3)
+ ...
+ >>> mock_method.assert_called_once_with(1, 2, 3)
+
+
+There is also :func:`patch.dict` for setting values in a dictionary just
+during a scope and restoring the dictionary to its original state when the test
+ends:
+
+.. doctest::
+
+ >>> foo = {'key': 'value'}
+ >>> original = foo.copy()
+ >>> with patch.dict(foo, {'newkey': 'newvalue'}, clear=True):
+ ... assert foo == {'newkey': 'newvalue'}
+ ...
+ >>> assert foo == original
+
+Mock supports the mocking of Python :ref:`magic methods <magic-methods>`. The
+easiest way of using magic methods is with the :class:`MagicMock` class. It
+allows you to do things like:
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> mock.__str__.return_value = 'foobarbaz'
+ >>> str(mock)
+ 'foobarbaz'
+ >>> mock.__str__.assert_called_with()
+
+Mock allows you to assign functions (or other Mock instances) to magic methods
+and they will be called appropriately. The `MagicMock` class is just a Mock
+variant that has all of the magic methods pre-created for you (well, all the
+useful ones anyway).
+
+The following is an example of using magic methods with the ordinary Mock
+class:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.__str__ = Mock(return_value='wheeeeee')
+ >>> str(mock)
+ 'wheeeeee'
+
+For ensuring that the mock objects in your tests have the same api as the
+objects they are replacing, you can use :ref:`auto-speccing <auto-speccing>`.
+Auto-speccing can be done through the `autospec` argument to patch, or the
+:func:`create_autospec` function. Auto-speccing creates mock objects that
+have the same attributes and methods as the objects they are replacing, and
+any functions and methods (including constructors) have the same call
+signature as the real object.
+
+This ensures that your mocks will fail in the same way as your production
+code if they are used incorrectly:
+
+.. doctest::
+
+ >>> from mock import create_autospec
+ >>> def function(a, b, c):
+ ... pass
+ ...
+ >>> mock_function = create_autospec(function, return_value='fishy')
+ >>> mock_function(1, 2, 3)
+ 'fishy'
+ >>> mock_function.assert_called_once_with(1, 2, 3)
+ >>> mock_function('wrong arguments')
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes exactly 3 arguments (1 given)
+
+`create_autospec` can also be used on classes, where it copies the signature of
+the `__init__` method, and on callable objects where it copies the signature of
+the `__call__` method.
+
+
+.. index:: references
+.. index:: articles
+
+References
+==========
+
+Articles, blog entries and other stuff related to testing with Mock:
+
+* `Imposing a No DB Discipline on Django unit tests
+ <https://github.com/carljm/django-testing-slides/blob/master/models/30_no_database.md>`_
+* `mock-django: tools for mocking the Django ORM and models
+ <https://github.com/dcramer/mock-django>`_
+* `PyCon 2011 Video: Testing with mock <https://blip.tv/file/4881513>`_
+* `Mock objects in Python
+ <http://noopenblockers.com/2012/01/06/mock-objects-in-python/>`_
+* `Python: Injecting Mock Objects for Powerful Testing
+ <http://blueprintforge.com/blog/2012/01/08/python-injecting-mock-objects-for-powerful-testing/>`_
+* `Python Mock: How to assert a substring of logger output
+ <http://www.michaelpollmeier.com/python-mock-how-to-assert-a-substring-of-logger-output/>`_
+* `Mocking Django <http://www.mattjmorrison.com/2011/09/mocking-django.html>`_
+* `Mocking dates and other classes that can't be modified
+ <http://williamjohnbert.com/2011/07/how-to-unit-testing-in-django-with-mocking-and-patching/>`_
+* `Mock recipes <http://konryd.blogspot.com/2010/06/mock-recipies.html>`_
+* `Mockity mock mock - some love for the mock module
+ <http://konryd.blogspot.com/2010/05/mockity-mock-mock-some-love-for-mock.html>`_
+* `Coverage and Mock (with django)
+ <http://mattsnider.com/python/mock-and-coverage/>`_
+* `Python Unit Testing with Mock <http://www.insomnihack.com/?p=194>`_
+* `Getting started with Python Mock
+ <http://myadventuresincoding.wordpress.com/2011/02/26/python-python-mock-cheat-sheet/>`_
+* `Smart Parameter Checks with mock
+ <http://tobyho.com/2011/03/24/smart-parameter-checks-in/>`_
+* `Python mock testing techniques and tools
+ <http://agiletesting.blogspot.com/2009/07/python-mock-testing-techniques-and.html>`_
+* `How To Test Django Template Tags
+ <http://techblog.ironfroggy.com/2008/10/how-to-test.html>`_
+* `A presentation on Unit Testing with Mock
+ <http://pypap.blogspot.com/2008/10/newbie-nugget-unit-testing-with-mock.html>`_
+* `Mocking with Django and Google AppEngine
+ <http://michael-a-nelson.blogspot.com/2008/09/mocking-with-django-and-google-app.html>`_
+
+
+.. index:: tests
+.. index:: unittest2
+
+Tests
+=====
+
+Mock uses `unittest2 <http://pypi.python.org/pypi/unittest2>`_ for its own
+test suite. In order to run it, use the `unit2` script that comes with
+`unittest2` module on a checkout of the source repository:
+
+ `unit2 discover`
+
+If you have `setuptools <http://pypi.python.org/pypi/distribute>`_ as well as
+unittest2 you can run:
+
+ ``python setup.py test``
+
+On Python 3.2 you can use ``unittest`` module from the standard library.
+
+ ``python3.2 -m unittest discover``
+
+.. index:: Python 3
+
+On Python 3 the tests for unicode are skipped as they are not relevant. On
+Python 2.4 tests that use the with statements are skipped as the with statement
+is invalid syntax on Python 2.4.
+
+
+.. index:: older versions
+
+Older Versions
+==============
+
+Documentation for older versions of mock:
+
+* `mock 0.8 <http://www.voidspace.org.uk/python/mock/0.8/>`_
+* `mock 0.7 <http://www.voidspace.org.uk/python/mock/0.7/>`_
+* `mock 0.6 <http://www.voidspace.org.uk/python/mock/0.6.0/>`_
+
+Docs from the in-development version of `mock` can be found at
+`mock.readthedocs.org <http://mock.readthedocs.org>`_.
+
+
+Terminology
+===========
+
+Terminology for objects used to replace other ones can be confusing. Terms
+like double, fake, mock, stub, and spy are all used with varying meanings.
+
+In `classic mock terminology
+<http://xunitpatterns.com/Mocks,%20Fakes,%20Stubs%20and%20Dummies.html>`_
+:class:`mock.Mock` is a `spy <http://xunitpatterns.com/Test%20Spy.html>`_ that
+allows for *post-mortem* examination. This is what I call the "action ->
+assertion" [#]_ pattern of testing.
+
+I'm not however a fan of this "statically typed mocking terminology"
+promulgated by `Martin Fowler
+<http://martinfowler.com/articles/mocksArentStubs.html>`_. It confuses usage
+patterns with implementation and prevents you from using natural terminology
+when discussing mocking.
+
+I much prefer duck typing, if an object used in your test suite looks like a
+mock object and quacks like a mock object then it's fine to call it a mock, no
+matter what the implementation looks like.
+
+This terminology is perhaps more useful in less capable languages where
+different usage patterns will *require* different implementations.
+`mock.Mock()` is capable of being used in most of the different roles
+described by Fowler, except (annoyingly / frustratingly / ironically) a Mock
+itself!
+
+How about a simpler definition: a "mock object" is an object used to replace a
+real one in a system under test.
+
+.. [#] This pattern is called "AAA" by some members of the testing community;
+ "Arrange - Act - Assert".
diff --git a/third_party/python/mock-1.0.0/html/_sources/magicmock.txt b/third_party/python/mock-1.0.0/html/_sources/magicmock.txt
new file mode 100644
index 0000000000..42b2ed9db1
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_sources/magicmock.txt
@@ -0,0 +1,258 @@
+
+.. currentmodule:: mock
+
+
+.. _magic-methods:
+
+Mocking Magic Methods
+=====================
+
+.. currentmodule:: mock
+
+:class:`Mock` supports mocking `magic methods
+<http://www.ironpythoninaction.com/magic-methods.html>`_. This allows mock
+objects to replace containers or other objects that implement Python
+protocols.
+
+Because magic methods are looked up differently from normal methods [#]_, this
+support has been specially implemented. This means that only specific magic
+methods are supported. The supported list includes *almost* all of them. If
+there are any missing that you need please let us know!
+
+You mock magic methods by setting the method you are interested in to a function
+or a mock instance. If you are using a function then it *must* take ``self`` as
+the first argument [#]_.
+
+.. doctest::
+
+ >>> def __str__(self):
+ ... return 'fooble'
+ ...
+ >>> mock = Mock()
+ >>> mock.__str__ = __str__
+ >>> str(mock)
+ 'fooble'
+
+ >>> mock = Mock()
+ >>> mock.__str__ = Mock()
+ >>> mock.__str__.return_value = 'fooble'
+ >>> str(mock)
+ 'fooble'
+
+ >>> mock = Mock()
+ >>> mock.__iter__ = Mock(return_value=iter([]))
+ >>> list(mock)
+ []
+
+One use case for this is for mocking objects used as context managers in a
+`with` statement:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.__enter__ = Mock(return_value='foo')
+ >>> mock.__exit__ = Mock(return_value=False)
+ >>> with mock as m:
+ ... assert m == 'foo'
+ ...
+ >>> mock.__enter__.assert_called_with()
+ >>> mock.__exit__.assert_called_with(None, None, None)
+
+Calls to magic methods do not appear in :attr:`~Mock.method_calls`, but they
+are recorded in :attr:`~Mock.mock_calls`.
+
+.. note::
+
+ If you use the `spec` keyword argument to create a mock then attempting to
+ set a magic method that isn't in the spec will raise an `AttributeError`.
+
+The full list of supported magic methods is:
+
+* ``__hash__``, ``__sizeof__``, ``__repr__`` and ``__str__``
+* ``__dir__``, ``__format__`` and ``__subclasses__``
+* ``__floor__``, ``__trunc__`` and ``__ceil__``
+* Comparisons: ``__cmp__``, ``__lt__``, ``__gt__``, ``__le__``, ``__ge__``,
+ ``__eq__`` and ``__ne__``
+* Container methods: ``__getitem__``, ``__setitem__``, ``__delitem__``,
+ ``__contains__``, ``__len__``, ``__iter__``, ``__getslice__``,
+ ``__setslice__``, ``__reversed__`` and ``__missing__``
+* Context manager: ``__enter__`` and ``__exit__``
+* Unary numeric methods: ``__neg__``, ``__pos__`` and ``__invert__``
+* The numeric methods (including right hand and in-place variants):
+ ``__add__``, ``__sub__``, ``__mul__``, ``__div__``,
+ ``__floordiv__``, ``__mod__``, ``__divmod__``, ``__lshift__``,
+ ``__rshift__``, ``__and__``, ``__xor__``, ``__or__``, and ``__pow__``
+* Numeric conversion methods: ``__complex__``, ``__int__``, ``__float__``,
+ ``__index__`` and ``__coerce__``
+* Descriptor methods: ``__get__``, ``__set__`` and ``__delete__``
+* Pickling: ``__reduce__``, ``__reduce_ex__``, ``__getinitargs__``,
+ ``__getnewargs__``, ``__getstate__`` and ``__setstate__``
+
+
+The following methods are supported in Python 2 but don't exist in Python 3:
+
+* ``__unicode__``, ``__long__``, ``__oct__``, ``__hex__`` and ``__nonzero__``
+* ``__truediv__`` and ``__rtruediv__``
+
+The following methods are supported in Python 3 but don't exist in Python 2:
+
+* ``__bool__`` and ``__next__``
+
+The following methods exist but are *not* supported as they are either in use by
+mock, can't be set dynamically, or can cause problems:
+
+* ``__getattr__``, ``__setattr__``, ``__init__`` and ``__new__``
+* ``__prepare__``, ``__instancecheck__``, ``__subclasscheck__``, ``__del__``
+
+
+
+Magic Mock
+==========
+
+There are two `MagicMock` variants: `MagicMock` and `NonCallableMagicMock`.
+
+
+.. class:: MagicMock(*args, **kw)
+
+ ``MagicMock`` is a subclass of :class:`Mock` with default implementations
+ of most of the magic methods. You can use ``MagicMock`` without having to
+ configure the magic methods yourself.
+
+ The constructor parameters have the same meaning as for :class:`Mock`.
+
+ If you use the `spec` or `spec_set` arguments then *only* magic methods
+ that exist in the spec will be created.
+
+
+.. class:: NonCallableMagicMock(*args, **kw)
+
+ A non-callable version of `MagicMock`.
+
+ The constructor parameters have the same meaning as for
+ :class:`MagicMock`, with the exception of `return_value` and
+ `side_effect` which have no meaning on a non-callable mock.
+
+The magic methods are setup with `MagicMock` objects, so you can configure them
+and use them in the usual way:
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> mock[3] = 'fish'
+ >>> mock.__setitem__.assert_called_with(3, 'fish')
+ >>> mock.__getitem__.return_value = 'result'
+ >>> mock[2]
+ 'result'
+
+By default many of the protocol methods are required to return objects of a
+specific type. These methods are preconfigured with a default return value, so
+that they can be used without you having to do anything if you aren't interested
+in the return value. You can still *set* the return value manually if you want
+to change the default.
+
+Methods and their defaults:
+
+* ``__lt__``: NotImplemented
+* ``__gt__``: NotImplemented
+* ``__le__``: NotImplemented
+* ``__ge__``: NotImplemented
+* ``__int__`` : 1
+* ``__contains__`` : False
+* ``__len__`` : 1
+* ``__iter__`` : iter([])
+* ``__exit__`` : False
+* ``__complex__`` : 1j
+* ``__float__`` : 1.0
+* ``__bool__`` : True
+* ``__nonzero__`` : True
+* ``__oct__`` : '1'
+* ``__hex__`` : '0x1'
+* ``__long__`` : long(1)
+* ``__index__`` : 1
+* ``__hash__`` : default hash for the mock
+* ``__str__`` : default str for the mock
+* ``__unicode__`` : default unicode for the mock
+* ``__sizeof__``: default sizeof for the mock
+
+For example:
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> int(mock)
+ 1
+ >>> len(mock)
+ 0
+ >>> hex(mock)
+ '0x1'
+ >>> list(mock)
+ []
+ >>> object() in mock
+ False
+
+The two equality method, `__eq__` and `__ne__`, are special (changed in
+0.7.2). They do the default equality comparison on identity, using a side
+effect, unless you change their return value to return something else:
+
+.. doctest::
+
+ >>> MagicMock() == 3
+ False
+ >>> MagicMock() != 3
+ True
+ >>> mock = MagicMock()
+ >>> mock.__eq__.return_value = True
+ >>> mock == 3
+ True
+
+In `0.8` the `__iter__` also gained special handling implemented with a
+side effect. The return value of `MagicMock.__iter__` can be any iterable
+object and isn't required to be an iterator:
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> mock.__iter__.return_value = ['a', 'b', 'c']
+ >>> list(mock)
+ ['a', 'b', 'c']
+ >>> list(mock)
+ ['a', 'b', 'c']
+
+If the return value *is* an iterator, then iterating over it once will consume
+it and subsequent iterations will result in an empty list:
+
+.. doctest::
+
+ >>> mock.__iter__.return_value = iter(['a', 'b', 'c'])
+ >>> list(mock)
+ ['a', 'b', 'c']
+ >>> list(mock)
+ []
+
+``MagicMock`` has all of the supported magic methods configured except for some
+of the obscure and obsolete ones. You can still set these up if you want.
+
+Magic methods that are supported but not setup by default in ``MagicMock`` are:
+
+* ``__cmp__``
+* ``__getslice__`` and ``__setslice__``
+* ``__coerce__``
+* ``__subclasses__``
+* ``__dir__``
+* ``__format__``
+* ``__get__``, ``__set__`` and ``__delete__``
+* ``__reversed__`` and ``__missing__``
+* ``__reduce__``, ``__reduce_ex__``, ``__getinitargs__``, ``__getnewargs__``,
+ ``__getstate__`` and ``__setstate__``
+* ``__getformat__`` and ``__setformat__``
+
+
+
+------------
+
+.. [#] Magic methods *should* be looked up on the class rather than the
+ instance. Different versions of Python are inconsistent about applying this
+ rule. The supported protocol methods should work with all supported versions
+ of Python.
+.. [#] The function is basically hooked up to the class, but each ``Mock``
+ instance is kept isolated from the others.
diff --git a/third_party/python/mock-1.0.0/html/_sources/mock.txt b/third_party/python/mock-1.0.0/html/_sources/mock.txt
new file mode 100644
index 0000000000..58712b21a6
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_sources/mock.txt
@@ -0,0 +1,842 @@
+The Mock Class
+==============
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ class SomeClass:
+ pass
+
+
+`Mock` is a flexible mock object intended to replace the use of stubs and
+test doubles throughout your code. Mocks are callable and create attributes as
+new mocks when you access them [#]_. Accessing the same attribute will always
+return the same mock. Mocks record how you use them, allowing you to make
+assertions about what your code has done to them.
+
+:class:`MagicMock` is a subclass of `Mock` with all the magic methods
+pre-created and ready to use. There are also non-callable variants, useful
+when you are mocking out objects that aren't callable:
+:class:`NonCallableMock` and :class:`NonCallableMagicMock`
+
+The :func:`patch` decorators makes it easy to temporarily replace classes
+in a particular module with a `Mock` object. By default `patch` will create
+a `MagicMock` for you. You can specify an alternative class of `Mock` using
+the `new_callable` argument to `patch`.
+
+
+.. index:: side_effect
+.. index:: return_value
+.. index:: wraps
+.. index:: name
+.. index:: spec
+
+.. class:: Mock(spec=None, side_effect=None, return_value=DEFAULT, wraps=None, name=None, spec_set=None, **kwargs)
+
+ Create a new `Mock` object. `Mock` takes several optional arguments
+ that specify the behaviour of the Mock object:
+
+ * `spec`: This can be either a list of strings or an existing object (a
+ class or instance) that acts as the specification for the mock object. If
+ you pass in an object then a list of strings is formed by calling dir on
+ the object (excluding unsupported magic attributes and methods).
+ Accessing any attribute not in this list will raise an `AttributeError`.
+
+ If `spec` is an object (rather than a list of strings) then
+ :attr:`__class__` returns the class of the spec object. This allows mocks
+ to pass `isinstance` tests.
+
+ * `spec_set`: A stricter variant of `spec`. If used, attempting to *set*
+ or get an attribute on the mock that isn't on the object passed as
+ `spec_set` will raise an `AttributeError`.
+
+ * `side_effect`: A function to be called whenever the Mock is called. See
+ the :attr:`~Mock.side_effect` attribute. Useful for raising exceptions or
+ dynamically changing return values. The function is called with the same
+ arguments as the mock, and unless it returns :data:`DEFAULT`, the return
+ value of this function is used as the return value.
+
+ Alternatively `side_effect` can be an exception class or instance. In
+ this case the exception will be raised when the mock is called.
+
+ If `side_effect` is an iterable then each call to the mock will return
+ the next value from the iterable. If any of the members of the iterable
+ are exceptions they will be raised instead of returned.
+
+ A `side_effect` can be cleared by setting it to `None`.
+
+ * `return_value`: The value returned when the mock is called. By default
+ this is a new Mock (created on first access). See the
+ :attr:`return_value` attribute.
+
+ * `wraps`: Item for the mock object to wrap. If `wraps` is not None then
+ calling the Mock will pass the call through to the wrapped object
+ (returning the real result and ignoring `return_value`). Attribute access
+ on the mock will return a Mock object that wraps the corresponding
+ attribute of the wrapped object (so attempting to access an attribute
+ that doesn't exist will raise an `AttributeError`).
+
+ If the mock has an explicit `return_value` set then calls are not passed
+ to the wrapped object and the `return_value` is returned instead.
+
+ * `name`: If the mock has a name then it will be used in the repr of the
+ mock. This can be useful for debugging. The name is propagated to child
+ mocks.
+
+ Mocks can also be called with arbitrary keyword arguments. These will be
+ used to set attributes on the mock after it is created. See the
+ :meth:`configure_mock` method for details.
+
+
+ .. method:: assert_called_with(*args, **kwargs)
+
+ This method is a convenient way of asserting that calls are made in a
+ particular way:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.method(1, 2, 3, test='wow')
+ <Mock name='mock.method()' id='...'>
+ >>> mock.method.assert_called_with(1, 2, 3, test='wow')
+
+
+ .. method:: assert_called_once_with(*args, **kwargs)
+
+ Assert that the mock was called exactly once and with the specified
+ arguments.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock('foo', bar='baz')
+ >>> mock.assert_called_once_with('foo', bar='baz')
+ >>> mock('foo', bar='baz')
+ >>> mock.assert_called_once_with('foo', bar='baz')
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected to be called once. Called 2 times.
+
+
+ .. method:: assert_any_call(*args, **kwargs)
+
+ assert the mock has been called with the specified arguments.
+
+ The assert passes if the mock has *ever* been called, unlike
+ :meth:`assert_called_with` and :meth:`assert_called_once_with` that
+ only pass if the call is the most recent one.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock(1, 2, arg='thing')
+ >>> mock('some', 'thing', 'else')
+ >>> mock.assert_any_call(1, 2, arg='thing')
+
+
+ .. method:: assert_has_calls(calls, any_order=False)
+
+ assert the mock has been called with the specified calls.
+ The `mock_calls` list is checked for the calls.
+
+ If `any_order` is False (the default) then the calls must be
+ sequential. There can be extra calls before or after the
+ specified calls.
+
+ If `any_order` is True then the calls can be in any order, but
+ they must all appear in :attr:`mock_calls`.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock(1)
+ >>> mock(2)
+ >>> mock(3)
+ >>> mock(4)
+ >>> calls = [call(2), call(3)]
+ >>> mock.assert_has_calls(calls)
+ >>> calls = [call(4), call(2), call(3)]
+ >>> mock.assert_has_calls(calls, any_order=True)
+
+
+ .. method:: reset_mock()
+
+ The reset_mock method resets all the call attributes on a mock object:
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock('hello')
+ >>> mock.called
+ True
+ >>> mock.reset_mock()
+ >>> mock.called
+ False
+
+ This can be useful where you want to make a series of assertions that
+ reuse the same object. Note that `reset_mock` *doesn't* clear the
+ return value, :attr:`side_effect` or any child attributes you have
+ set using normal assignment. Child mocks and the return value mock
+ (if any) are reset as well.
+
+
+ .. method:: mock_add_spec(spec, spec_set=False)
+
+ Add a spec to a mock. `spec` can either be an object or a
+ list of strings. Only attributes on the `spec` can be fetched as
+ attributes from the mock.
+
+ If `spec_set` is `True` then only attributes on the spec can be set.
+
+
+ .. method:: attach_mock(mock, attribute)
+
+ Attach a mock as an attribute of this one, replacing its name and
+ parent. Calls to the attached mock will be recorded in the
+ :attr:`method_calls` and :attr:`mock_calls` attributes of this one.
+
+
+ .. method:: configure_mock(**kwargs)
+
+ Set attributes on the mock through keyword arguments.
+
+ Attributes plus return values and side effects can be set on child
+ mocks using standard dot notation and unpacking a dictionary in the
+ method call:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> mock.configure_mock(**attrs)
+ >>> mock.method()
+ 3
+ >>> mock.other()
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+ The same thing can be achieved in the constructor call to mocks:
+
+ .. doctest::
+
+ >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> mock = Mock(some_attribute='eggs', **attrs)
+ >>> mock.some_attribute
+ 'eggs'
+ >>> mock.method()
+ 3
+ >>> mock.other()
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+ `configure_mock` exists to make it easier to do configuration
+ after the mock has been created.
+
+
+ .. method:: __dir__()
+
+ `Mock` objects limit the results of `dir(some_mock)` to useful results.
+ For mocks with a `spec` this includes all the permitted attributes
+ for the mock.
+
+ See :data:`FILTER_DIR` for what this filtering does, and how to
+ switch it off.
+
+
+ .. method:: _get_child_mock(**kw)
+
+ Create the child mocks for attributes and return value.
+ By default child mocks will be the same type as the parent.
+ Subclasses of Mock may want to override this to customize the way
+ child mocks are made.
+
+ For non-callable mocks the callable variant will be used (rather than
+ any custom subclass).
+
+
+ .. attribute:: called
+
+ A boolean representing whether or not the mock object has been called:
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock.called
+ False
+ >>> mock()
+ >>> mock.called
+ True
+
+ .. attribute:: call_count
+
+ An integer telling you how many times the mock object has been called:
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock.call_count
+ 0
+ >>> mock()
+ >>> mock()
+ >>> mock.call_count
+ 2
+
+
+ .. attribute:: return_value
+
+ Set this to configure the value returned by calling the mock:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.return_value = 'fish'
+ >>> mock()
+ 'fish'
+
+ The default return value is a mock object and you can configure it in
+ the normal way:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.return_value.attribute = sentinel.Attribute
+ >>> mock.return_value()
+ <Mock name='mock()()' id='...'>
+ >>> mock.return_value.assert_called_with()
+
+ `return_value` can also be set in the constructor:
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=3)
+ >>> mock.return_value
+ 3
+ >>> mock()
+ 3
+
+
+ .. attribute:: side_effect
+
+ This can either be a function to be called when the mock is called,
+ or an exception (class or instance) to be raised.
+
+ If you pass in a function it will be called with same arguments as the
+ mock and unless the function returns the :data:`DEFAULT` singleton the
+ call to the mock will then return whatever the function returns. If the
+ function returns :data:`DEFAULT` then the mock will return its normal
+ value (from the :attr:`return_value`.
+
+ An example of a mock that raises an exception (to test exception
+ handling of an API):
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.side_effect = Exception('Boom!')
+ >>> mock()
+ Traceback (most recent call last):
+ ...
+ Exception: Boom!
+
+ Using `side_effect` to return a sequence of values:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.side_effect = [3, 2, 1]
+ >>> mock(), mock(), mock()
+ (3, 2, 1)
+
+ The `side_effect` function is called with the same arguments as the
+ mock (so it is wise for it to take arbitrary args and keyword
+ arguments) and whatever it returns is used as the return value for
+ the call. The exception is if `side_effect` returns :data:`DEFAULT`,
+ in which case the normal :attr:`return_value` is used.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=3)
+ >>> def side_effect(*args, **kwargs):
+ ... return DEFAULT
+ ...
+ >>> mock.side_effect = side_effect
+ >>> mock()
+ 3
+
+ `side_effect` can be set in the constructor. Here's an example that
+ adds one to the value the mock is called with and returns it:
+
+ .. doctest::
+
+ >>> side_effect = lambda value: value + 1
+ >>> mock = Mock(side_effect=side_effect)
+ >>> mock(3)
+ 4
+ >>> mock(-8)
+ -7
+
+ Setting `side_effect` to `None` clears it:
+
+ .. doctest::
+
+ >>> from mock import Mock
+ >>> m = Mock(side_effect=KeyError, return_value=3)
+ >>> m()
+ Traceback (most recent call last):
+ ...
+ KeyError
+ >>> m.side_effect = None
+ >>> m()
+ 3
+
+
+ .. attribute:: call_args
+
+ This is either `None` (if the mock hasn't been called), or the
+ arguments that the mock was last called with. This will be in the
+ form of a tuple: the first member is any ordered arguments the mock
+ was called with (or an empty tuple) and the second member is any
+ keyword arguments (or an empty dictionary).
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> print mock.call_args
+ None
+ >>> mock()
+ >>> mock.call_args
+ call()
+ >>> mock.call_args == ()
+ True
+ >>> mock(3, 4)
+ >>> mock.call_args
+ call(3, 4)
+ >>> mock.call_args == ((3, 4),)
+ True
+ >>> mock(3, 4, 5, key='fish', next='w00t!')
+ >>> mock.call_args
+ call(3, 4, 5, key='fish', next='w00t!')
+
+ `call_args`, along with members of the lists :attr:`call_args_list`,
+ :attr:`method_calls` and :attr:`mock_calls` are :data:`call` objects.
+ These are tuples, so they can be unpacked to get at the individual
+ arguments and make more complex assertions. See
+ :ref:`calls as tuples <calls-as-tuples>`.
+
+
+ .. attribute:: call_args_list
+
+ This is a list of all the calls made to the mock object in sequence
+ (so the length of the list is the number of times it has been
+ called). Before any calls have been made it is an empty list. The
+ :data:`call` object can be used for conveniently constructing lists of
+ calls to compare with `call_args_list`.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock()
+ >>> mock(3, 4)
+ >>> mock(key='fish', next='w00t!')
+ >>> mock.call_args_list
+ [call(), call(3, 4), call(key='fish', next='w00t!')]
+ >>> expected = [(), ((3, 4),), ({'key': 'fish', 'next': 'w00t!'},)]
+ >>> mock.call_args_list == expected
+ True
+
+ Members of `call_args_list` are :data:`call` objects. These can be
+ unpacked as tuples to get at the individual arguments. See
+ :ref:`calls as tuples <calls-as-tuples>`.
+
+
+ .. attribute:: method_calls
+
+ As well as tracking calls to themselves, mocks also track calls to
+ methods and attributes, and *their* methods and attributes:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.method()
+ <Mock name='mock.method()' id='...'>
+ >>> mock.property.method.attribute()
+ <Mock name='mock.property.method.attribute()' id='...'>
+ >>> mock.method_calls
+ [call.method(), call.property.method.attribute()]
+
+ Members of `method_calls` are :data:`call` objects. These can be
+ unpacked as tuples to get at the individual arguments. See
+ :ref:`calls as tuples <calls-as-tuples>`.
+
+
+ .. attribute:: mock_calls
+
+ `mock_calls` records *all* calls to the mock object, its methods, magic
+ methods *and* return value mocks.
+
+ .. doctest::
+
+ >>> mock = MagicMock()
+ >>> result = mock(1, 2, 3)
+ >>> mock.first(a=3)
+ <MagicMock name='mock.first()' id='...'>
+ >>> mock.second()
+ <MagicMock name='mock.second()' id='...'>
+ >>> int(mock)
+ 1
+ >>> result(1)
+ <MagicMock name='mock()()' id='...'>
+ >>> expected = [call(1, 2, 3), call.first(a=3), call.second(),
+ ... call.__int__(), call()(1)]
+ >>> mock.mock_calls == expected
+ True
+
+ Members of `mock_calls` are :data:`call` objects. These can be
+ unpacked as tuples to get at the individual arguments. See
+ :ref:`calls as tuples <calls-as-tuples>`.
+
+
+ .. attribute:: __class__
+
+ Normally the `__class__` attribute of an object will return its type.
+ For a mock object with a `spec` `__class__` returns the spec class
+ instead. This allows mock objects to pass `isinstance` tests for the
+ object they are replacing / masquerading as:
+
+ .. doctest::
+
+ >>> mock = Mock(spec=3)
+ >>> isinstance(mock, int)
+ True
+
+ `__class__` is assignable to, this allows a mock to pass an
+ `isinstance` check without forcing you to use a spec:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.__class__ = dict
+ >>> isinstance(mock, dict)
+ True
+
+.. class:: NonCallableMock(spec=None, wraps=None, name=None, spec_set=None, **kwargs)
+
+ A non-callable version of `Mock`. The constructor parameters have the same
+ meaning of `Mock`, with the exception of `return_value` and `side_effect`
+ which have no meaning on a non-callable mock.
+
+Mock objects that use a class or an instance as a `spec` or `spec_set` are able
+to pass `isintance` tests:
+
+.. doctest::
+
+ >>> mock = Mock(spec=SomeClass)
+ >>> isinstance(mock, SomeClass)
+ True
+ >>> mock = Mock(spec_set=SomeClass())
+ >>> isinstance(mock, SomeClass)
+ True
+
+The `Mock` classes have support for mocking magic methods. See :ref:`magic
+methods <magic-methods>` for the full details.
+
+The mock classes and the :func:`patch` decorators all take arbitrary keyword
+arguments for configuration. For the `patch` decorators the keywords are
+passed to the constructor of the mock being created. The keyword arguments
+are for configuring attributes of the mock:
+
+.. doctest::
+
+ >>> m = MagicMock(attribute=3, other='fish')
+ >>> m.attribute
+ 3
+ >>> m.other
+ 'fish'
+
+The return value and side effect of child mocks can be set in the same way,
+using dotted notation. As you can't use dotted names directly in a call you
+have to create a dictionary and unpack it using `**`:
+
+.. doctest::
+
+ >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> mock = Mock(some_attribute='eggs', **attrs)
+ >>> mock.some_attribute
+ 'eggs'
+ >>> mock.method()
+ 3
+ >>> mock.other()
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+
+.. class:: PropertyMock(*args, **kwargs)
+
+ A mock intended to be used as a property, or other descriptor, on a class.
+ `PropertyMock` provides `__get__` and `__set__` methods so you can specify
+ a return value when it is fetched.
+
+ Fetching a `PropertyMock` instance from an object calls the mock, with
+ no args. Setting it calls the mock with the value being set.
+
+ .. doctest::
+
+ >>> class Foo(object):
+ ... @property
+ ... def foo(self):
+ ... return 'something'
+ ... @foo.setter
+ ... def foo(self, value):
+ ... pass
+ ...
+ >>> with patch('__main__.Foo.foo', new_callable=PropertyMock) as mock_foo:
+ ... mock_foo.return_value = 'mockity-mock'
+ ... this_foo = Foo()
+ ... print this_foo.foo
+ ... this_foo.foo = 6
+ ...
+ mockity-mock
+ >>> mock_foo.mock_calls
+ [call(), call(6)]
+
+Because of the way mock attributes are stored you can't directly attach a
+`PropertyMock` to a mock object. Instead you can attach it to the mock type
+object:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> p = PropertyMock(return_value=3)
+ >>> type(m).foo = p
+ >>> m.foo
+ 3
+ >>> p.assert_called_once_with()
+
+
+.. index:: __call__
+.. index:: calling
+
+Calling
+=======
+
+Mock objects are callable. The call will return the value set as the
+:attr:`~Mock.return_value` attribute. The default return value is a new Mock
+object; it is created the first time the return value is accessed (either
+explicitly or by calling the Mock) - but it is stored and the same one
+returned each time.
+
+Calls made to the object will be recorded in the attributes
+like :attr:`~Mock.call_args` and :attr:`~Mock.call_args_list`.
+
+If :attr:`~Mock.side_effect` is set then it will be called after the call has
+been recorded, so if `side_effect` raises an exception the call is still
+recorded.
+
+The simplest way to make a mock raise an exception when called is to make
+:attr:`~Mock.side_effect` an exception class or instance:
+
+.. doctest::
+
+ >>> m = MagicMock(side_effect=IndexError)
+ >>> m(1, 2, 3)
+ Traceback (most recent call last):
+ ...
+ IndexError
+ >>> m.mock_calls
+ [call(1, 2, 3)]
+ >>> m.side_effect = KeyError('Bang!')
+ >>> m('two', 'three', 'four')
+ Traceback (most recent call last):
+ ...
+ KeyError: 'Bang!'
+ >>> m.mock_calls
+ [call(1, 2, 3), call('two', 'three', 'four')]
+
+If `side_effect` is a function then whatever that function returns is what
+calls to the mock return. The `side_effect` function is called with the
+same arguments as the mock. This allows you to vary the return value of the
+call dynamically, based on the input:
+
+.. doctest::
+
+ >>> def side_effect(value):
+ ... return value + 1
+ ...
+ >>> m = MagicMock(side_effect=side_effect)
+ >>> m(1)
+ 2
+ >>> m(2)
+ 3
+ >>> m.mock_calls
+ [call(1), call(2)]
+
+If you want the mock to still return the default return value (a new mock), or
+any set return value, then there are two ways of doing this. Either return
+`mock.return_value` from inside `side_effect`, or return :data:`DEFAULT`:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> def side_effect(*args, **kwargs):
+ ... return m.return_value
+ ...
+ >>> m.side_effect = side_effect
+ >>> m.return_value = 3
+ >>> m()
+ 3
+ >>> def side_effect(*args, **kwargs):
+ ... return DEFAULT
+ ...
+ >>> m.side_effect = side_effect
+ >>> m()
+ 3
+
+To remove a `side_effect`, and return to the default behaviour, set the
+`side_effect` to `None`:
+
+.. doctest::
+
+ >>> m = MagicMock(return_value=6)
+ >>> def side_effect(*args, **kwargs):
+ ... return 3
+ ...
+ >>> m.side_effect = side_effect
+ >>> m()
+ 3
+ >>> m.side_effect = None
+ >>> m()
+ 6
+
+The `side_effect` can also be any iterable object. Repeated calls to the mock
+will return values from the iterable (until the iterable is exhausted and
+a `StopIteration` is raised):
+
+.. doctest::
+
+ >>> m = MagicMock(side_effect=[1, 2, 3])
+ >>> m()
+ 1
+ >>> m()
+ 2
+ >>> m()
+ 3
+ >>> m()
+ Traceback (most recent call last):
+ ...
+ StopIteration
+
+If any members of the iterable are exceptions they will be raised instead of
+returned:
+
+.. doctest::
+
+ >>> iterable = (33, ValueError, 66)
+ >>> m = MagicMock(side_effect=iterable)
+ >>> m()
+ 33
+ >>> m()
+ Traceback (most recent call last):
+ ...
+ ValueError
+ >>> m()
+ 66
+
+
+.. _deleting-attributes:
+
+Deleting Attributes
+===================
+
+Mock objects create attributes on demand. This allows them to pretend to be
+objects of any type.
+
+You may want a mock object to return `False` to a `hasattr` call, or raise an
+`AttributeError` when an attribute is fetched. You can do this by providing
+an object as a `spec` for a mock, but that isn't always convenient.
+
+You "block" attributes by deleting them. Once deleted, accessing an attribute
+will raise an `AttributeError`.
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> hasattr(mock, 'm')
+ True
+ >>> del mock.m
+ >>> hasattr(mock, 'm')
+ False
+ >>> del mock.f
+ >>> mock.f
+ Traceback (most recent call last):
+ ...
+ AttributeError: f
+
+
+Attaching Mocks as Attributes
+=============================
+
+When you attach a mock as an attribute of another mock (or as the return
+value) it becomes a "child" of that mock. Calls to the child are recorded in
+the :attr:`~Mock.method_calls` and :attr:`~Mock.mock_calls` attributes of the
+parent. This is useful for configuring child mocks and then attaching them to
+the parent, or for attaching mocks to a parent that records all calls to the
+children and allows you to make assertions about the order of calls between
+mocks:
+
+.. doctest::
+
+ >>> parent = MagicMock()
+ >>> child1 = MagicMock(return_value=None)
+ >>> child2 = MagicMock(return_value=None)
+ >>> parent.child1 = child1
+ >>> parent.child2 = child2
+ >>> child1(1)
+ >>> child2(2)
+ >>> parent.mock_calls
+ [call.child1(1), call.child2(2)]
+
+The exception to this is if the mock has a name. This allows you to prevent
+the "parenting" if for some reason you don't want it to happen.
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> not_a_child = MagicMock(name='not-a-child')
+ >>> mock.attribute = not_a_child
+ >>> mock.attribute()
+ <MagicMock name='not-a-child()' id='...'>
+ >>> mock.mock_calls
+ []
+
+Mocks created for you by :func:`patch` are automatically given names. To
+attach mocks that have names to a parent you use the :meth:`~Mock.attach_mock`
+method:
+
+.. doctest::
+
+ >>> thing1 = object()
+ >>> thing2 = object()
+ >>> parent = MagicMock()
+ >>> with patch('__main__.thing1', return_value=None) as child1:
+ ... with patch('__main__.thing2', return_value=None) as child2:
+ ... parent.attach_mock(child1, 'child1')
+ ... parent.attach_mock(child2, 'child2')
+ ... child1('one')
+ ... child2('two')
+ ...
+ >>> parent.mock_calls
+ [call.child1('one'), call.child2('two')]
+
+
+-----
+
+.. [#] The only exceptions are magic methods and attributes (those that have
+ leading and trailing double underscores). Mock doesn't create these but
+ instead of raises an ``AttributeError``. This is because the interpreter
+ will often implicitly request these methods, and gets *very* confused to
+ get a new Mock object when it expects a magic method. If you need magic
+ method support see :ref:`magic methods <magic-methods>`.
diff --git a/third_party/python/mock-1.0.0/html/_sources/mocksignature.txt b/third_party/python/mock-1.0.0/html/_sources/mocksignature.txt
new file mode 100644
index 0000000000..dbb5019fbb
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_sources/mocksignature.txt
@@ -0,0 +1,262 @@
+mocksignature
+=============
+
+.. currentmodule:: mock
+
+.. note::
+
+ :ref:`auto-speccing`, added in mock 0.8, is a more advanced version of
+ `mocksignature` and can be used for many of the same use cases.
+
+A problem with using mock objects to replace real objects in your tests is that
+:class:`Mock` can be *too* flexible. Your code can treat the mock objects in
+any way and you have to manually check that they were called correctly. If your
+code calls functions or methods with the wrong number of arguments then mocks
+don't complain.
+
+The solution to this is `mocksignature`, which creates functions with the
+same signature as the original, but delegating to a mock. You can interrogate
+the mock in the usual way to check it has been called with the *right*
+arguments, but if it is called with the wrong number of arguments it will
+raise a `TypeError` in the same way your production code would.
+
+Another advantage is that your mocked objects are real functions, which can
+be useful when your code uses
+`inspect <http://docs.python.org/library/inspect.html>`_ or depends on
+functions being function objects.
+
+.. function:: mocksignature(func, mock=None, skipfirst=False)
+
+ Create a new function with the same signature as `func` that delegates
+ to `mock`. If `skipfirst` is True the first argument is skipped, useful
+ for methods where `self` needs to be omitted from the new function.
+
+ If you don't pass in a `mock` then one will be created for you.
+
+ Functions returned by `mocksignature` have many of the same attributes
+ and assert methods as a mock object.
+
+ The mock is set as the `mock` attribute of the returned function for easy
+ access.
+
+ `mocksignature` can also be used with classes. It copies the signature of
+ the `__init__` method.
+
+ When used with callable objects (instances) it copies the signature of the
+ `__call__` method.
+
+`mocksignature` will work out if it is mocking the signature of a method on
+an instance or a method on a class and do the "right thing" with the `self`
+argument in both cases.
+
+Because of a limitation in the way that arguments are collected by functions
+created by `mocksignature` they are *always* passed as positional arguments
+(including defaults) and not keyword arguments.
+
+
+mocksignature api
+-----------------
+
+Although the objects returned by `mocksignature` api are real function objects,
+they have much of the same api as the :class:`Mock` class. This includes the
+assert methods:
+
+.. doctest::
+
+ >>> def func(a, b, c):
+ ... pass
+ ...
+ >>> func2 = mocksignature(func)
+ >>> func2.called
+ False
+ >>> func2.return_value = 3
+ >>> func2(1, 2, 3)
+ 3
+ >>> func2.called
+ True
+ >>> func2.assert_called_once_with(1, 2, 3)
+ >>> func2.assert_called_with(1, 2, 4)
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected call: mock(1, 2, 4)
+ Actual call: mock(1, 2, 3)
+ >>> func2.call_count
+ 1
+ >>> func2.side_effect = IndexError
+ >>> func2(4, 5, 6)
+ Traceback (most recent call last):
+ ...
+ IndexError
+
+The mock object that is being delegated to is available as the `mock` attribute
+of the function created by `mocksignature`.
+
+.. doctest::
+
+ >>> func2.mock.mock_calls
+ [call(1, 2, 3), call(4, 5, 6)]
+
+The methods and attributes available on functions returned by `mocksignature`
+are:
+
+ :meth:`~Mock.assert_any_call`, :meth:`~Mock.assert_called_once_with`,
+ :meth:`~Mock.assert_called_with`, :meth:`~Mock.assert_has_calls`,
+ :attr:`~Mock.call_args`, :attr:`~Mock.call_args_list`,
+ :attr:`~Mock.call_count`, :attr:`~Mock.called`,
+ :attr:`~Mock.method_calls`, `mock`, :attr:`~Mock.mock_calls`,
+ :meth:`~Mock.reset_mock`, :attr:`~Mock.return_value`, and
+ :attr:`~Mock.side_effect`.
+
+
+Example use
+-----------
+
+Basic use
+~~~~~~~~~
+
+.. doctest::
+
+ >>> def function(a, b, c=None):
+ ... pass
+ ...
+ >>> mock = Mock()
+ >>> function = mocksignature(function, mock)
+ >>> function()
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes at least 2 arguments (0 given)
+ >>> function.return_value = 'some value'
+ >>> function(1, 2, 'foo')
+ 'some value'
+ >>> function.assert_called_with(1, 2, 'foo')
+
+
+Keyword arguments
+~~~~~~~~~~~~~~~~~
+
+Note that arguments to functions created by `mocksignature` are always passed
+in to the underlying mock by position even when called with keywords:
+
+.. doctest::
+
+ >>> def function(a, b, c=None):
+ ... pass
+ ...
+ >>> function = mocksignature(function)
+ >>> function.return_value = None
+ >>> function(1, 2)
+ >>> function.assert_called_with(1, 2, None)
+
+
+Mocking methods and self
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+When you use `mocksignature` to replace a method on a class then `self`
+will be included in the method signature - and you will need to include
+the instance when you do your asserts.
+
+As a curious factor of the way Python (2) wraps methods fetched from a class,
+we can *get* the `return_value` from a function set on a class, but we can't
+set it. We have to do this through the exposed `mock` attribute instead:
+
+.. doctest::
+
+ >>> class SomeClass(object):
+ ... def method(self, a, b, c=None):
+ ... pass
+ ...
+ >>> SomeClass.method = mocksignature(SomeClass.method)
+ >>> SomeClass.method.mock.return_value = None
+ >>> instance = SomeClass()
+ >>> instance.method()
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes at least 4 arguments (1 given)
+ >>> instance.method(1, 2, 3)
+ >>> instance.method.assert_called_with(instance, 1, 2, 3)
+
+When you use `mocksignature` on instance methods `self` isn't included (and we
+can set the `return_value` etc directly):
+
+.. doctest::
+
+ >>> class SomeClass(object):
+ ... def method(self, a, b, c=None):
+ ... pass
+ ...
+ >>> instance = SomeClass()
+ >>> instance.method = mocksignature(instance.method)
+ >>> instance.method.return_value = None
+ >>> instance.method(1, 2, 3)
+ >>> instance.method.assert_called_with(1, 2, 3)
+
+
+mocksignature with classes
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+When used with a class `mocksignature` copies the signature of the `__init__`
+method.
+
+.. doctest::
+
+ >>> class Something(object):
+ ... def __init__(self, foo, bar):
+ ... pass
+ ...
+ >>> MockSomething = mocksignature(Something)
+ >>> instance = MockSomething(10, 9)
+ >>> assert instance is MockSomething.return_value
+ >>> MockSomething.assert_called_with(10, 9)
+ >>> MockSomething()
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes at least 2 arguments (0 given)
+
+Because the object returned by `mocksignature` is a function rather than a
+`Mock` you lose the other capabilities of `Mock`, like dynamic attribute
+creation.
+
+
+mocksignature with callable objects
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+When used with a callable object `mocksignature` copies the signature of the
+`__call__` method.
+
+.. doctest::
+
+ >>> class Something(object):
+ ... def __call__(self, spam, eggs):
+ ... pass
+ ...
+ >>> something = Something()
+ >>> mock_something = mocksignature(something)
+ >>> result = mock_something(10, 9)
+ >>> mock_something.assert_called_with(10, 9)
+ >>> mock_something()
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes at least 2 arguments (0 given)
+
+
+mocksignature argument to patch
+-------------------------------
+
+`mocksignature` is available as a keyword argument to :func:`patch` or
+:func:`patch.object`. It can be used with functions / methods / classes and
+callable objects.
+
+.. doctest::
+
+ >>> class SomeClass(object):
+ ... def method(self, a, b, c=None):
+ ... pass
+ ...
+ >>> @patch.object(SomeClass, 'method', mocksignature=True)
+ ... def test(mock_method):
+ ... instance = SomeClass()
+ ... mock_method.return_value = None
+ ... instance.method(1, 2)
+ ... mock_method.assert_called_with(instance, 1, 2, None)
+ ...
+ >>> test()
diff --git a/third_party/python/mock-1.0.0/html/_sources/patch.txt b/third_party/python/mock-1.0.0/html/_sources/patch.txt
new file mode 100644
index 0000000000..3d56264fbb
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_sources/patch.txt
@@ -0,0 +1,636 @@
+==================
+ Patch Decorators
+==================
+
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ class SomeClass(object):
+ static_method = None
+ class_method = None
+ attribute = None
+
+ sys.modules['package'] = package = Mock(name='package')
+ sys.modules['package.module'] = package.module
+
+ class TestCase(unittest2.TestCase):
+ def run(self):
+ result = unittest2.TestResult()
+ super(unittest2.TestCase, self).run(result)
+ assert result.wasSuccessful()
+
+.. testcleanup::
+
+ patch.TEST_PREFIX = 'test'
+
+
+The patch decorators are used for patching objects only within the scope of
+the function they decorate. They automatically handle the unpatching for you,
+even if exceptions are raised. All of these functions can also be used in with
+statements or as class decorators.
+
+
+patch
+=====
+
+.. note::
+
+ `patch` is straightforward to use. The key is to do the patching in the
+ right namespace. See the section `where to patch`_.
+
+.. function:: patch(target, new=DEFAULT, spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs)
+
+ `patch` acts as a function decorator, class decorator or a context
+ manager. Inside the body of the function or with statement, the `target`
+ is patched with a `new` object. When the function/with statement exits
+ the patch is undone.
+
+ If `new` is omitted, then the target is replaced with a
+ :class:`MagicMock`. If `patch` is used as a decorator and `new` is
+ omitted, the created mock is passed in as an extra argument to the
+ decorated function. If `patch` is used as a context manager the created
+ mock is returned by the context manager.
+
+ `target` should be a string in the form `'package.module.ClassName'`. The
+ `target` is imported and the specified object replaced with the `new`
+ object, so the `target` must be importable from the environment you are
+ calling `patch` from. The target is imported when the decorated function
+ is executed, not at decoration time.
+
+ The `spec` and `spec_set` keyword arguments are passed to the `MagicMock`
+ if patch is creating one for you.
+
+ In addition you can pass `spec=True` or `spec_set=True`, which causes
+ patch to pass in the object being mocked as the spec/spec_set object.
+
+ `new_callable` allows you to specify a different class, or callable object,
+ that will be called to create the `new` object. By default `MagicMock` is
+ used.
+
+ A more powerful form of `spec` is `autospec`. If you set `autospec=True`
+ then the mock with be created with a spec from the object being replaced.
+ All attributes of the mock will also have the spec of the corresponding
+ attribute of the object being replaced. Methods and functions being mocked
+ will have their arguments checked and will raise a `TypeError` if they are
+ called with the wrong signature. For mocks
+ replacing a class, their return value (the 'instance') will have the same
+ spec as the class. See the :func:`create_autospec` function and
+ :ref:`auto-speccing`.
+
+ Instead of `autospec=True` you can pass `autospec=some_object` to use an
+ arbitrary object as the spec instead of the one being replaced.
+
+ By default `patch` will fail to replace attributes that don't exist. If
+ you pass in `create=True`, and the attribute doesn't exist, patch will
+ create the attribute for you when the patched function is called, and
+ delete it again afterwards. This is useful for writing tests against
+ attributes that your production code creates at runtime. It is off by by
+ default because it can be dangerous. With it switched on you can write
+ passing tests against APIs that don't actually exist!
+
+ Patch can be used as a `TestCase` class decorator. It works by
+ decorating each test method in the class. This reduces the boilerplate
+ code when your test methods share a common patchings set. `patch` finds
+ tests by looking for method names that start with `patch.TEST_PREFIX`.
+ By default this is `test`, which matches the way `unittest` finds tests.
+ You can specify an alternative prefix by setting `patch.TEST_PREFIX`.
+
+ Patch can be used as a context manager, with the with statement. Here the
+ patching applies to the indented block after the with statement. If you
+ use "as" then the patched object will be bound to the name after the
+ "as"; very useful if `patch` is creating a mock object for you.
+
+ `patch` takes arbitrary keyword arguments. These will be passed to
+ the `Mock` (or `new_callable`) on construction.
+
+ `patch.dict(...)`, `patch.multiple(...)` and `patch.object(...)` are
+ available for alternate use-cases.
+
+`patch` as function decorator, creating the mock for you and passing it into
+the decorated function:
+
+.. doctest::
+
+ >>> @patch('__main__.SomeClass')
+ ... def function(normal_argument, mock_class):
+ ... print mock_class is SomeClass
+ ...
+ >>> function(None)
+ True
+
+
+Patching a class replaces the class with a `MagicMock` *instance*. If the
+class is instantiated in the code under test then it will be the
+:attr:`~Mock.return_value` of the mock that will be used.
+
+If the class is instantiated multiple times you could use
+:attr:`~Mock.side_effect` to return a new mock each time. Alternatively you
+can set the `return_value` to be anything you want.
+
+To configure return values on methods of *instances* on the patched class
+you must do this on the `return_value`. For example:
+
+.. doctest::
+
+ >>> class Class(object):
+ ... def method(self):
+ ... pass
+ ...
+ >>> with patch('__main__.Class') as MockClass:
+ ... instance = MockClass.return_value
+ ... instance.method.return_value = 'foo'
+ ... assert Class() is instance
+ ... assert Class().method() == 'foo'
+ ...
+
+If you use `spec` or `spec_set` and `patch` is replacing a *class*, then the
+return value of the created mock will have the same spec.
+
+.. doctest::
+
+ >>> Original = Class
+ >>> patcher = patch('__main__.Class', spec=True)
+ >>> MockClass = patcher.start()
+ >>> instance = MockClass()
+ >>> assert isinstance(instance, Original)
+ >>> patcher.stop()
+
+The `new_callable` argument is useful where you want to use an alternative
+class to the default :class:`MagicMock` for the created mock. For example, if
+you wanted a :class:`NonCallableMock` to be used:
+
+.. doctest::
+
+ >>> thing = object()
+ >>> with patch('__main__.thing', new_callable=NonCallableMock) as mock_thing:
+ ... assert thing is mock_thing
+ ... thing()
+ ...
+ Traceback (most recent call last):
+ ...
+ TypeError: 'NonCallableMock' object is not callable
+
+Another use case might be to replace an object with a `StringIO` instance:
+
+.. doctest::
+
+ >>> from StringIO import StringIO
+ >>> def foo():
+ ... print 'Something'
+ ...
+ >>> @patch('sys.stdout', new_callable=StringIO)
+ ... def test(mock_stdout):
+ ... foo()
+ ... assert mock_stdout.getvalue() == 'Something\n'
+ ...
+ >>> test()
+
+When `patch` is creating a mock for you, it is common that the first thing
+you need to do is to configure the mock. Some of that configuration can be done
+in the call to patch. Any arbitrary keywords you pass into the call will be
+used to set attributes on the created mock:
+
+.. doctest::
+
+ >>> patcher = patch('__main__.thing', first='one', second='two')
+ >>> mock_thing = patcher.start()
+ >>> mock_thing.first
+ 'one'
+ >>> mock_thing.second
+ 'two'
+
+As well as attributes on the created mock attributes, like the
+:attr:`~Mock.return_value` and :attr:`~Mock.side_effect`, of child mocks can
+also be configured. These aren't syntactically valid to pass in directly as
+keyword arguments, but a dictionary with these as keys can still be expanded
+into a `patch` call using `**`:
+
+.. doctest::
+
+ >>> config = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> patcher = patch('__main__.thing', **config)
+ >>> mock_thing = patcher.start()
+ >>> mock_thing.method()
+ 3
+ >>> mock_thing.other()
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+
+patch.object
+============
+
+.. function:: patch.object(target, attribute, new=DEFAULT, spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs)
+
+ patch the named member (`attribute`) on an object (`target`) with a mock
+ object.
+
+ `patch.object` can be used as a decorator, class decorator or a context
+ manager. Arguments `new`, `spec`, `create`, `spec_set`, `autospec` and
+ `new_callable` have the same meaning as for `patch`. Like `patch`,
+ `patch.object` takes arbitrary keyword arguments for configuring the mock
+ object it creates.
+
+ When used as a class decorator `patch.object` honours `patch.TEST_PREFIX`
+ for choosing which methods to wrap.
+
+You can either call `patch.object` with three arguments or two arguments. The
+three argument form takes the object to be patched, the attribute name and the
+object to replace the attribute with.
+
+When calling with the two argument form you omit the replacement object, and a
+mock is created for you and passed in as an extra argument to the decorated
+function:
+
+.. doctest::
+
+ >>> @patch.object(SomeClass, 'class_method')
+ ... def test(mock_method):
+ ... SomeClass.class_method(3)
+ ... mock_method.assert_called_with(3)
+ ...
+ >>> test()
+
+`spec`, `create` and the other arguments to `patch.object` have the same
+meaning as they do for `patch`.
+
+
+patch.dict
+==========
+
+.. function:: patch.dict(in_dict, values=(), clear=False, **kwargs)
+
+ Patch a dictionary, or dictionary like object, and restore the dictionary
+ to its original state after the test.
+
+ `in_dict` can be a dictionary or a mapping like container. If it is a
+ mapping then it must at least support getting, setting and deleting items
+ plus iterating over keys.
+
+ `in_dict` can also be a string specifying the name of the dictionary, which
+ will then be fetched by importing it.
+
+ `values` can be a dictionary of values to set in the dictionary. `values`
+ can also be an iterable of `(key, value)` pairs.
+
+ If `clear` is True then the dictionary will be cleared before the new
+ values are set.
+
+ `patch.dict` can also be called with arbitrary keyword arguments to set
+ values in the dictionary.
+
+ `patch.dict` can be used as a context manager, decorator or class
+ decorator. When used as a class decorator `patch.dict` honours
+ `patch.TEST_PREFIX` for choosing which methods to wrap.
+
+`patch.dict` can be used to add members to a dictionary, or simply let a test
+change a dictionary, and ensure the dictionary is restored when the test
+ends.
+
+.. doctest::
+
+ >>> from mock import patch
+ >>> foo = {}
+ >>> with patch.dict(foo, {'newkey': 'newvalue'}):
+ ... assert foo == {'newkey': 'newvalue'}
+ ...
+ >>> assert foo == {}
+
+ >>> import os
+ >>> with patch.dict('os.environ', {'newkey': 'newvalue'}):
+ ... print os.environ['newkey']
+ ...
+ newvalue
+ >>> assert 'newkey' not in os.environ
+
+Keywords can be used in the `patch.dict` call to set values in the dictionary:
+
+.. doctest::
+
+ >>> mymodule = MagicMock()
+ >>> mymodule.function.return_value = 'fish'
+ >>> with patch.dict('sys.modules', mymodule=mymodule):
+ ... import mymodule
+ ... mymodule.function('some', 'args')
+ ...
+ 'fish'
+
+`patch.dict` can be used with dictionary like objects that aren't actually
+dictionaries. At the very minimum they must support item getting, setting,
+deleting and either iteration or membership test. This corresponds to the
+magic methods `__getitem__`, `__setitem__`, `__delitem__` and either
+`__iter__` or `__contains__`.
+
+.. doctest::
+
+ >>> class Container(object):
+ ... def __init__(self):
+ ... self.values = {}
+ ... def __getitem__(self, name):
+ ... return self.values[name]
+ ... def __setitem__(self, name, value):
+ ... self.values[name] = value
+ ... def __delitem__(self, name):
+ ... del self.values[name]
+ ... def __iter__(self):
+ ... return iter(self.values)
+ ...
+ >>> thing = Container()
+ >>> thing['one'] = 1
+ >>> with patch.dict(thing, one=2, two=3):
+ ... assert thing['one'] == 2
+ ... assert thing['two'] == 3
+ ...
+ >>> assert thing['one'] == 1
+ >>> assert list(thing) == ['one']
+
+
+patch.multiple
+==============
+
+.. function:: patch.multiple(target, spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs)
+
+ Perform multiple patches in a single call. It takes the object to be
+ patched (either as an object or a string to fetch the object by importing)
+ and keyword arguments for the patches::
+
+ with patch.multiple(settings, FIRST_PATCH='one', SECOND_PATCH='two'):
+ ...
+
+ Use :data:`DEFAULT` as the value if you want `patch.multiple` to create
+ mocks for you. In this case the created mocks are passed into a decorated
+ function by keyword, and a dictionary is returned when `patch.multiple` is
+ used as a context manager.
+
+ `patch.multiple` can be used as a decorator, class decorator or a context
+ manager. The arguments `spec`, `spec_set`, `create`, `autospec` and
+ `new_callable` have the same meaning as for `patch`. These arguments will
+ be applied to *all* patches done by `patch.multiple`.
+
+ When used as a class decorator `patch.multiple` honours `patch.TEST_PREFIX`
+ for choosing which methods to wrap.
+
+If you want `patch.multiple` to create mocks for you, then you can use
+:data:`DEFAULT` as the value. If you use `patch.multiple` as a decorator
+then the created mocks are passed into the decorated function by keyword.
+
+.. doctest::
+
+ >>> thing = object()
+ >>> other = object()
+
+ >>> @patch.multiple('__main__', thing=DEFAULT, other=DEFAULT)
+ ... def test_function(thing, other):
+ ... assert isinstance(thing, MagicMock)
+ ... assert isinstance(other, MagicMock)
+ ...
+ >>> test_function()
+
+`patch.multiple` can be nested with other `patch` decorators, but put arguments
+passed by keyword *after* any of the standard arguments created by `patch`:
+
+.. doctest::
+
+ >>> @patch('sys.exit')
+ ... @patch.multiple('__main__', thing=DEFAULT, other=DEFAULT)
+ ... def test_function(mock_exit, other, thing):
+ ... assert 'other' in repr(other)
+ ... assert 'thing' in repr(thing)
+ ... assert 'exit' in repr(mock_exit)
+ ...
+ >>> test_function()
+
+If `patch.multiple` is used as a context manager, the value returned by the
+context manger is a dictionary where created mocks are keyed by name:
+
+.. doctest::
+
+ >>> with patch.multiple('__main__', thing=DEFAULT, other=DEFAULT) as values:
+ ... assert 'other' in repr(values['other'])
+ ... assert 'thing' in repr(values['thing'])
+ ... assert values['thing'] is thing
+ ... assert values['other'] is other
+ ...
+
+
+.. _start-and-stop:
+
+patch methods: start and stop
+=============================
+
+All the patchers have `start` and `stop` methods. These make it simpler to do
+patching in `setUp` methods or where you want to do multiple patches without
+nesting decorators or with statements.
+
+To use them call `patch`, `patch.object` or `patch.dict` as normal and keep a
+reference to the returned `patcher` object. You can then call `start` to put
+the patch in place and `stop` to undo it.
+
+If you are using `patch` to create a mock for you then it will be returned by
+the call to `patcher.start`.
+
+.. doctest::
+
+ >>> patcher = patch('package.module.ClassName')
+ >>> from package import module
+ >>> original = module.ClassName
+ >>> new_mock = patcher.start()
+ >>> assert module.ClassName is not original
+ >>> assert module.ClassName is new_mock
+ >>> patcher.stop()
+ >>> assert module.ClassName is original
+ >>> assert module.ClassName is not new_mock
+
+
+A typical use case for this might be for doing multiple patches in the `setUp`
+method of a `TestCase`:
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ... def setUp(self):
+ ... self.patcher1 = patch('package.module.Class1')
+ ... self.patcher2 = patch('package.module.Class2')
+ ... self.MockClass1 = self.patcher1.start()
+ ... self.MockClass2 = self.patcher2.start()
+ ...
+ ... def tearDown(self):
+ ... self.patcher1.stop()
+ ... self.patcher2.stop()
+ ...
+ ... def test_something(self):
+ ... assert package.module.Class1 is self.MockClass1
+ ... assert package.module.Class2 is self.MockClass2
+ ...
+ >>> MyTest('test_something').run()
+
+.. caution::
+
+ If you use this technique you must ensure that the patching is "undone" by
+ calling `stop`. This can be fiddlier than you might think, because if an
+ exception is raised in the setUp then tearDown is not called. `unittest2
+ <http://pypi.python.org/pypi/unittest2>`_ cleanup functions make this
+ easier.
+
+ .. doctest::
+
+ >>> class MyTest(TestCase):
+ ... def setUp(self):
+ ... patcher = patch('package.module.Class')
+ ... self.MockClass = patcher.start()
+ ... self.addCleanup(patcher.stop)
+ ...
+ ... def test_something(self):
+ ... assert package.module.Class is self.MockClass
+ ...
+ >>> MyTest('test_something').run()
+
+ As an added bonus you no longer need to keep a reference to the `patcher`
+ object.
+
+It is also possible to stop all patches which have been started by using
+`patch.stopall`.
+
+.. function:: patch.stopall
+
+ Stop all active patches. Only stops patches started with `start`.
+
+
+TEST_PREFIX
+===========
+
+All of the patchers can be used as class decorators. When used in this way
+they wrap every test method on the class. The patchers recognise methods that
+start with `test` as being test methods. This is the same way that the
+`unittest.TestLoader` finds test methods by default.
+
+It is possible that you want to use a different prefix for your tests. You can
+inform the patchers of the different prefix by setting `patch.TEST_PREFIX`:
+
+.. doctest::
+
+ >>> patch.TEST_PREFIX = 'foo'
+ >>> value = 3
+ >>>
+ >>> @patch('__main__.value', 'not three')
+ ... class Thing(object):
+ ... def foo_one(self):
+ ... print value
+ ... def foo_two(self):
+ ... print value
+ ...
+ >>>
+ >>> Thing().foo_one()
+ not three
+ >>> Thing().foo_two()
+ not three
+ >>> value
+ 3
+
+
+Nesting Patch Decorators
+========================
+
+If you want to perform multiple patches then you can simply stack up the
+decorators.
+
+You can stack up multiple patch decorators using this pattern:
+
+.. doctest::
+
+ >>> @patch.object(SomeClass, 'class_method')
+ ... @patch.object(SomeClass, 'static_method')
+ ... def test(mock1, mock2):
+ ... assert SomeClass.static_method is mock1
+ ... assert SomeClass.class_method is mock2
+ ... SomeClass.static_method('foo')
+ ... SomeClass.class_method('bar')
+ ... return mock1, mock2
+ ...
+ >>> mock1, mock2 = test()
+ >>> mock1.assert_called_once_with('foo')
+ >>> mock2.assert_called_once_with('bar')
+
+
+Note that the decorators are applied from the bottom upwards. This is the
+standard way that Python applies decorators. The order of the created mocks
+passed into your test function matches this order.
+
+Like all context-managers patches can be nested using contextlib's nested
+function; *every* patching will appear in the tuple after "as":
+
+.. doctest::
+
+ >>> from contextlib import nested
+ >>> with nested(
+ ... patch('package.module.ClassName1'),
+ ... patch('package.module.ClassName2')
+ ... ) as (MockClass1, MockClass2):
+ ... assert package.module.ClassName1 is MockClass1
+ ... assert package.module.ClassName2 is MockClass2
+ ...
+
+
+.. _where-to-patch:
+
+Where to patch
+==============
+
+`patch` works by (temporarily) changing the object that a *name* points to with
+another one. There can be many names pointing to any individual object, so
+for patching to work you must ensure that you patch the name used by the system
+under test.
+
+The basic principle is that you patch where an object is *looked up*, which
+is not necessarily the same place as where it is defined. A couple of
+examples will help to clarify this.
+
+Imagine we have a project that we want to test with the following structure::
+
+ a.py
+ -> Defines SomeClass
+
+ b.py
+ -> from a import SomeClass
+ -> some_function instantiates SomeClass
+
+Now we want to test `some_function` but we want to mock out `SomeClass` using
+`patch`. The problem is that when we import module b, which we will have to
+do then it imports `SomeClass` from module a. If we use `patch` to mock out
+`a.SomeClass` then it will have no effect on our test; module b already has a
+reference to the *real* `SomeClass` and it looks like our patching had no
+effect.
+
+The key is to patch out `SomeClass` where it is used (or where it is looked up
+). In this case `some_function` will actually look up `SomeClass` in module b,
+where we have imported it. The patching should look like:
+
+ `@patch('b.SomeClass')`
+
+However, consider the alternative scenario where instead of `from a import
+SomeClass` module b does `import a` and `some_function` uses `a.SomeClass`. Both
+of these import forms are common. In this case the class we want to patch is
+being looked up on the a module and so we have to patch `a.SomeClass` instead:
+
+ `@patch('a.SomeClass')`
+
+
+Patching Descriptors and Proxy Objects
+======================================
+
+Since version 0.6.0 both patch_ and patch.object_ have been able to correctly
+patch and restore descriptors: class methods, static methods and properties.
+You should patch these on the *class* rather than an instance.
+
+Since version 0.7.0 patch_ and patch.object_ work correctly with some objects
+that proxy attribute access, like the `django setttings object
+<http://www.voidspace.org.uk/python/weblog/arch_d7_2010_12_04.shtml#e1198>`_.
+
+.. note::
+
+ In django `import settings` and `from django.conf import settings`
+ return different objects. If you are using libraries / apps that do both you
+ may have to patch both. Grrr...
diff --git a/third_party/python/mock-1.0.0/html/_sources/sentinel.txt b/third_party/python/mock-1.0.0/html/_sources/sentinel.txt
new file mode 100644
index 0000000000..1c5223da0e
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_sources/sentinel.txt
@@ -0,0 +1,58 @@
+==========
+ Sentinel
+==========
+
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ class ProductionClass(object):
+ def something(self):
+ return self.method()
+
+ class Test(unittest2.TestCase):
+ def testSomething(self):
+ pass
+ self = Test('testSomething')
+
+
+.. data:: sentinel
+
+ The ``sentinel`` object provides a convenient way of providing unique
+ objects for your tests.
+
+ Attributes are created on demand when you access them by name. Accessing
+ the same attribute will always return the same object. The objects
+ returned have a sensible repr so that test failure messages are readable.
+
+
+.. data:: DEFAULT
+
+ The `DEFAULT` object is a pre-created sentinel (actually
+ `sentinel.DEFAULT`). It can be used by :attr:`~Mock.side_effect`
+ functions to indicate that the normal return value should be used.
+
+
+Sentinel Example
+================
+
+Sometimes when testing you need to test that a specific object is passed as an
+argument to another method, or returned. It can be common to create named
+sentinel objects to test this. `sentinel` provides a convenient way of
+creating and testing the identity of objects like this.
+
+In this example we monkey patch `method` to return
+`sentinel.some_object`:
+
+.. doctest::
+
+ >>> real = ProductionClass()
+ >>> real.method = Mock(name="method")
+ >>> real.method.return_value = sentinel.some_object
+ >>> result = real.method()
+ >>> assert result is sentinel.some_object
+ >>> sentinel.some_object
+ sentinel.some_object
+
+
diff --git a/third_party/python/mock-1.0.0/html/_static/adctheme.css b/third_party/python/mock-1.0.0/html/_static/adctheme.css
new file mode 100644
index 0000000000..60395bcefb
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/adctheme.css
@@ -0,0 +1,757 @@
+/**
+ * Sphinx stylesheet -- basic theme
+ * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ */
+ h3 {
+ color:#000000;
+ font-size: 17px;
+ margin-bottom:0.5em;
+ margin-top:2em;
+ }
+/* -- main layout ----------------------------------------------------------- */
+
+div.clearer {
+ clear: both;
+}
+
+/* -- header ---------------------------------------------------------------- */
+
+#header #title {
+ background:#29334F url(title_background.png) repeat-x scroll 0 0;
+ border-bottom:1px solid #B6B6B6;
+ height:25px;
+ overflow:hidden;
+}
+#headerButtons {
+ position: absolute;
+ list-style: none outside;
+ top: 26px;
+ left: 0px;
+ right: 0px;
+ margin: 0px;
+ padding: 0px;
+ border-top: 1px solid #2B334F;
+ border-bottom: 1px solid #EDEDED;
+ height: 20px;
+ font-size: 8pt;
+ overflow: hidden;
+ background-color: #D8D8D8;
+}
+
+#headerButtons li {
+ background-repeat:no-repeat;
+ display:inline;
+ margin-top:0;
+ padding:0;
+}
+
+.headerButton {
+ display: inline;
+ height:20px;
+}
+
+.headerButton a {
+ text-decoration: none;
+ float: right;
+ height: 20px;
+ padding: 4px 15px;
+ border-left: 1px solid #ACACAC;
+ font-family:'Lucida Grande',Geneva,Helvetica,Arial,sans-serif;
+ color: black;
+}
+.headerButton a:hover {
+ color: white;
+ background-color: #787878;
+
+}
+
+li#toc_button {
+ text-align:left;
+}
+
+li#toc_button .headerButton a {
+ width:198px;
+ padding-top: 4px;
+ font-family:'Lucida Grande',Geneva,Helvetica,Arial,sans-serif;
+ color: black;
+ float: left;
+ padding-left:15px;
+ border-right:1px solid #ACACAC;
+ background:transparent url(triangle_open.png) no-repeat scroll 4px 6px;
+}
+
+li#toc_button .headerButton a:hover {
+ background-color: #787878;
+ color: white;
+}
+
+li#page_buttons {
+position:absolute;
+right:0;
+}
+
+#breadcrumbs {
+ color: black;
+ background-image:url(breadcrumb_background.png);
+ border-top:1px solid #2B334F;
+ bottom:0;
+ font-size:10px;
+ height:15px;
+ left:0;
+ overflow:hidden;
+ padding:3px 10px 0;
+ position:absolute;
+ right:0;
+ white-space:nowrap;
+ z-index:901;
+}
+#breadcrumbs a {
+ color: black;
+ text-decoration: none;
+}
+#breadcrumbs a:hover {
+ text-decoration: underline;
+}
+#breadcrumbs img {
+ padding-left: 3px;
+}
+/* -- sidebar --------------------------------------------------------------- */
+#sphinxsidebar {
+ position: absolute;
+ top: 84px;
+ bottom: 19px;
+ left: 0px;
+ width: 229px;
+ background-color: #E4EBF7;
+ border-right: 1px solid #ACACAC;
+ border-top: 1px solid #2B334F;
+ overflow-x: hidden;
+ overflow-y: auto;
+ padding: 0px 0px 0px 0px;
+ font-size:11px;
+}
+
+div.sphinxsidebarwrapper {
+ padding: 10px 5px 0 10px;
+}
+
+#sphinxsidebar li {
+ margin: 0px;
+ padding: 0px;
+ font-weight: normal;
+ margin: 0px 0px 7px 0px;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ font-size: 11px;
+}
+
+#sphinxsidebar ul {
+ list-style: none;
+ margin: 0px 0px 0px 0px;
+ padding: 0px 5px 0px 5px;
+}
+
+#sphinxsidebar ul ul,
+#sphinxsidebar ul.want-points {
+ list-style: square;
+}
+
+#sphinxsidebar ul ul {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+#sphinxsidebar form {
+ margin-top: 10px;
+}
+
+#sphinxsidebar input {
+ border: 1px solid #787878;
+ font-family: sans-serif;
+ font-size: 1em;
+}
+
+img {
+ border: 0;
+}
+
+#sphinxsidebar li.toctree-l1 a {
+ font-weight: bold;
+ color: #000;
+ text-decoration: none;
+}
+
+#sphinxsidebar li.toctree-l2 a {
+ font-weight: bold;
+ color: #4f4f4f;
+ text-decoration: none;
+}
+
+/* -- search page ----------------------------------------------------------- */
+
+ul.search {
+ margin: 10px 0 0 20px;
+ padding: 0;
+}
+
+ul.search li {
+ padding: 5px 0 5px 20px;
+ background-image: url(file.png);
+ background-repeat: no-repeat;
+ background-position: 0 7px;
+}
+
+ul.search li a {
+ font-weight: bold;
+}
+
+ul.search li div.context {
+ color: #888;
+ margin: 2px 0 0 30px;
+ text-align: left;
+}
+
+ul.keywordmatches li.goodmatch a {
+ font-weight: bold;
+}
+#sphinxsidebar input.prettysearch {border:none;}
+input.searchbutton {
+ float: right;
+}
+.search-wrapper {width: 100%; height: 25px;}
+.search-wrapper input.prettysearch { border: none; width:200px; height: 16px; background: url(searchfield_repeat.png) center top repeat-x; border: 0px; margin: 0; padding: 3px 0 0 0; font: 11px "Lucida Grande", "Lucida Sans Unicode", Arial, sans-serif; }
+.search-wrapper input.prettysearch { width: 184px; margin-left: 20px; *margin-top:-1px; *margin-right:-2px; *margin-left:10px; }
+.search-wrapper .search-left { display: block; position: absolute; width: 20px; height: 19px; background: url(searchfield_leftcap.png) left top no-repeat; }
+.search-wrapper .search-right { display: block; position: relative; left: 204px; top: -19px; width: 10px; height: 19px; background: url(searchfield_rightcap.png) right top no-repeat; }
+
+/* -- index page ------------------------------------------------------------ */
+
+table.contentstable {
+ width: 90%;
+}
+
+table.contentstable p.biglink {
+ line-height: 150%;
+}
+
+a.biglink {
+ font-size: 1.3em;
+}
+
+span.linkdescr {
+ font-style: italic;
+ padding-top: 5px;
+ font-size: 90%;
+}
+
+/* -- general index --------------------------------------------------------- */
+
+table.indextable td {
+ text-align: left;
+ vertical-align: top;
+}
+
+table.indextable dl, table.indextable dd {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+table.indextable tr.pcap {
+ height: 10px;
+}
+
+table.indextable tr.cap {
+ margin-top: 10px;
+ background-color: #f2f2f2;
+}
+
+img.toggler {
+ margin-right: 3px;
+ margin-top: 3px;
+ cursor: pointer;
+}
+
+/* -- general body styles --------------------------------------------------- */
+.document {
+ border-top:1px solid #2B334F;
+ overflow:auto;
+ padding-left:2em;
+ padding-right:2em;
+ position:absolute;
+ z-index:1;
+ top:84px;
+ bottom:19px;
+ right:0;
+ left:230px;
+}
+
+a.headerlink {
+ visibility: hidden;
+}
+
+h1:hover > a.headerlink,
+h2:hover > a.headerlink,
+h3:hover > a.headerlink,
+h4:hover > a.headerlink,
+h5:hover > a.headerlink,
+h6:hover > a.headerlink,
+dt:hover > a.headerlink {
+ visibility: visible;
+}
+
+div.body p.caption {
+ text-align: inherit;
+}
+
+div.body td {
+ text-align: left;
+}
+
+.field-list ul {
+ padding-left: 1em;
+}
+
+.first {
+ margin-top: 0 !important;
+}
+
+p.rubric {
+ margin-top: 30px;
+ font-weight: bold;
+}
+
+/* -- sidebars -------------------------------------------------------------- */
+
+/*div.sidebar {
+ margin: 0 0 0.5em 1em;
+ border: 1px solid #ddb;
+ padding: 7px 7px 0 7px;
+ background-color: #ffe;
+ width: 40%;
+ float: right;
+}
+
+p.sidebar-title {
+ font-weight: bold;
+}
+*/
+/* -- topics ---------------------------------------------------------------- */
+
+div.topic {
+ border: 1px solid #ccc;
+ padding: 7px 7px 0 7px;
+ margin: 10px 0 10px 0;
+}
+
+p.topic-title {
+ font-size: 1.1em;
+ font-weight: bold;
+ margin-top: 10px;
+}
+
+/* -- admonitions ----------------------------------------------------------- */
+.admonition {
+ border: 1px solid #a1a5a9;
+ background-color: #f7f7f7;
+ margin: 20px;
+ padding: 0px 8px 7px 9px;
+ text-align: left;
+}
+.warning {
+ background-color:#E8E8E8;
+ border:1px solid #111111;
+ margin:30px;
+}
+.admonition p {
+ font: 12px 'Lucida Grande', Geneva, Helvetica, Arial, sans-serif;
+ margin-top: 7px;
+ margin-bottom: 0px;
+}
+
+div.admonition dt {
+ font-weight: bold;
+}
+
+div.admonition dl {
+ margin-bottom: 0;
+}
+
+p.admonition-title {
+ margin: 0px 10px 5px 0px;
+ font-weight: bold;
+ padding-top: 3px;
+}
+
+div.body p.centered {
+ text-align: center;
+ margin-top: 25px;
+}
+
+/* -- tables ---------------------------------------------------------------- */
+
+table.docutils {
+ border-collapse: collapse;
+ border-top: 1px solid #919699;
+ border-left: 1px solid #919699;
+ border-right: 1px solid #919699;
+ font-size:12px;
+ padding:8px;
+ text-align:left;
+ vertical-align:top;
+}
+
+table.docutils td, table.docutils th {
+ padding: 8px;
+ font-size: 12px;
+ text-align: left;
+ vertical-align: top;
+ border-bottom: 1px solid #919699;
+}
+
+table.docutils th {
+ font-weight: bold;
+}
+/* This alternates colors in up to six table rows (light blue for odd, white for even)*/
+.docutils tr {
+ background: #F0F5F9;
+}
+
+.docutils tr + tr {
+ background: #FFFFFF;
+}
+
+.docutils tr + tr + tr {
+ background: #F0F5F9;
+}
+
+.docutils tr + tr + tr + tr {
+ background: #FFFFFF;
+}
+
+.docutils tr + tr + tr +tr + tr {
+ background: #F0F5F9;
+}
+
+.docutils tr + tr + tr + tr + tr + tr {
+ background: #FFFFFF;
+}
+
+.docutils tr + tr + tr + tr + tr + tr + tr {
+ background: #F0F5F9;
+}
+
+table.footnote td, table.footnote th {
+ border: 0 !important;
+}
+
+th {
+ text-align: left;
+ padding-right: 5px;
+}
+
+/* -- other body styles ----------------------------------------------------- */
+
+dl {
+ margin-bottom: 15px;
+}
+
+dd p {
+ margin-top: 0px;
+ font-size: 12px;
+}
+
+dd ul, dd table {
+ margin-bottom: 10px;
+}
+
+dd {
+ margin-top: 3px;
+ margin-bottom: 10px;
+ margin-left: 30px;
+ font-size: 12px;
+}
+
+dt:target, .highlight {
+ background-color: #fbe54e;
+}
+
+dl.glossary dt {
+ font-weight: bold;
+ font-size: 0.8em;
+}
+
+dl.glossary dd {
+ font-size:12px;
+}
+.field-list ul {
+ vertical-align: top;
+ margin: 0;
+ padding-bottom: 0;
+ list-style: none inside;
+}
+
+.field-list ul li {
+ margin-top: 0;
+}
+
+.field-list p {
+ margin: 0;
+}
+
+.refcount {
+ color: #060;
+}
+
+.optional {
+ font-size: 1.3em;
+}
+
+.versionmodified {
+ font-style: italic;
+}
+
+.system-message {
+ background-color: #fda;
+ padding: 5px;
+ border: 3px solid red;
+}
+
+.footnote:target {
+ background-color: #ffa
+}
+
+/* -- code displays --------------------------------------------------------- */
+
+pre {
+ overflow: auto;
+ background-color:#F1F5F9;
+ border:1px solid #C9D1D7;
+ border-spacing:0;
+ font-family:"Bitstream Vera Sans Mono",Monaco,"Lucida Console",Courier,Consolas,monospace;
+ font-size:11px;
+ padding: 10px;
+}
+
+td.linenos pre {
+ padding: 5px 0px;
+ border: 0;
+ background-color: transparent;
+ color: #aaa;
+}
+
+table.highlighttable {
+ margin-left: 0.5em;
+}
+
+table.highlighttable td {
+ padding: 0 0.5em 0 0.5em;
+}
+
+tt {
+ font-family:"Bitstream Vera Sans Mono",Monaco,"Lucida Console",Courier,Consolas,monospace;
+
+}
+
+tt.descname {
+ background-color: transparent;
+ font-weight: bold;
+ font-size: 1em;
+}
+
+tt.descclassname {
+ background-color: transparent;
+}
+
+tt.xref, a tt {
+ background-color: transparent;
+ font-weight: bold;
+}
+
+h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
+ background-color: transparent;
+}
+
+/* -- math display ---------------------------------------------------------- */
+
+img.math {
+ vertical-align: middle;
+}
+
+div.body div.math p {
+ text-align: center;
+}
+
+span.eqno {
+ float: right;
+}
+
+/* -- printout stylesheet --------------------------------------------------- */
+
+@media print {
+ div.document,
+ div.documentwrapper,
+ div.bodywrapper {
+ margin: 0;
+ width: 100%;
+ }
+
+ div.sphinxsidebar,
+ div.related,
+ div.footer,
+ #top-link {
+ display: none;
+ }
+}
+
+body {
+ font-family:'Lucida Grande',Geneva,Helvetica,Arial,sans-serif;
+}
+
+dl.class dt {
+ padding: 3px;
+/* border-top: 2px solid #999;*/
+}
+
+em.property {
+ font-style: normal;
+}
+
+dl.class dd p {
+ margin-top: 6px;
+}
+
+dl.class dd dl.exception dt {
+ padding: 3px;
+ background-color: #FFD6D6;
+ border-top: none;
+}
+
+dl.class dd dl.method dt {
+ padding: 3px;
+ background-color: #e9e9e9;
+ border-top: none;
+
+}
+
+dl.function dt {
+ padding: 3px;
+ border-top: 2px solid #999;
+}
+
+ul {
+list-style-image:none;
+list-style-position:outside;
+list-style-type:square;
+margin:0 0 0 30px;
+padding:0 0 12px 6px;
+}
+#docstitle {
+ height: 36px;
+ background-image: url(header_sm_mid.png);
+ left: 0;
+ top: 0;
+ position: absolute;
+ width: 100%;
+}
+#docstitle p {
+ padding:7px 0 0 45px;
+ margin: 0;
+ color: white;
+ text-shadow:0 1px 0 #787878;
+ background: transparent url(documentation.png) no-repeat scroll 10px 3px;
+ height: 36px;
+ font-size: 15px;
+}
+#header {
+height:45px;
+left:0;
+position:absolute;
+right:0;
+top:36px;
+z-index:900;
+}
+
+#header h1 {
+font-size:10pt;
+margin:0;
+padding:5px 0 0 10px;
+text-shadow:0 1px 0 #D5D5D5;
+white-space:nowrap;
+}
+
+h1 {
+-x-system-font:none;
+color:#000000;
+font-family:'Lucida Grande',Geneva,Helvetica,Arial,sans-serif;
+font-size:30px;
+font-size-adjust:none;
+font-stretch:normal;
+font-style:normal;
+font-variant:normal;
+font-weight:bold;
+line-height:normal;
+margin-bottom:25px;
+margin-top:1em;
+}
+
+.footer {
+border-top:1px solid #DDDDDD;
+clear:both;
+padding-top:9px;
+width:100%;
+font-size:10px;
+}
+
+p {
+-x-system-font:none;
+font-family:'Lucida Grande',Geneva,Helvetica,Arial,sans-serif;
+font-size:12px;
+font-size-adjust:none;
+font-stretch:normal;
+font-style:normal;
+font-variant:normal;
+font-weight:normal;
+line-height:normal;
+margin-bottom:10px;
+margin-top:0;
+}
+
+h2 {
+border-bottom:1px solid #919699;
+color:#000000;
+font-size:24px;
+margin-top:2.5em;
+padding-bottom:2px;
+}
+
+a:link:hover {
+color:#093D92;
+text-decoration:underline;
+}
+
+a:link {
+color:#093D92;
+text-decoration:none;
+}
+
+
+ol {
+list-style-position:outside;
+list-style-type:decimal;
+margin:0 0 0 30px;
+padding:0 0 12px 6px;
+}
+li {
+margin-top:7px;
+font-family:'Lucida Grande',Geneva,Helvetica,Arial,sans-serif;
+font-size:12px;
+font-size-adjust:none;
+font-stretch:normal;
+font-style:normal;
+font-variant:normal;
+font-weight:normal;
+line-height:normal;
+}
+li p {
+margin-top:8px;
+} \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/_static/basic.css b/third_party/python/mock-1.0.0/html/_static/basic.css
new file mode 100644
index 0000000000..43e8bafaf3
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/basic.css
@@ -0,0 +1,540 @@
+/*
+ * basic.css
+ * ~~~~~~~~~
+ *
+ * Sphinx stylesheet -- basic theme.
+ *
+ * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+/* -- main layout ----------------------------------------------------------- */
+
+div.clearer {
+ clear: both;
+}
+
+/* -- relbar ---------------------------------------------------------------- */
+
+div.related {
+ width: 100%;
+ font-size: 90%;
+}
+
+div.related h3 {
+ display: none;
+}
+
+div.related ul {
+ margin: 0;
+ padding: 0 0 0 10px;
+ list-style: none;
+}
+
+div.related li {
+ display: inline;
+}
+
+div.related li.right {
+ float: right;
+ margin-right: 5px;
+}
+
+/* -- sidebar --------------------------------------------------------------- */
+
+div.sphinxsidebarwrapper {
+ padding: 10px 5px 0 10px;
+}
+
+div.sphinxsidebar {
+ float: left;
+ width: 230px;
+ margin-left: -100%;
+ font-size: 90%;
+}
+
+div.sphinxsidebar ul {
+ list-style: none;
+}
+
+div.sphinxsidebar ul ul,
+div.sphinxsidebar ul.want-points {
+ margin-left: 20px;
+ list-style: square;
+}
+
+div.sphinxsidebar ul ul {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+div.sphinxsidebar form {
+ margin-top: 10px;
+}
+
+div.sphinxsidebar input {
+ border: 1px solid #98dbcc;
+ font-family: sans-serif;
+ font-size: 1em;
+}
+
+div.sphinxsidebar #searchbox input[type="text"] {
+ width: 170px;
+}
+
+div.sphinxsidebar #searchbox input[type="submit"] {
+ width: 30px;
+}
+
+img {
+ border: 0;
+}
+
+/* -- search page ----------------------------------------------------------- */
+
+ul.search {
+ margin: 10px 0 0 20px;
+ padding: 0;
+}
+
+ul.search li {
+ padding: 5px 0 5px 20px;
+ background-image: url(file.png);
+ background-repeat: no-repeat;
+ background-position: 0 7px;
+}
+
+ul.search li a {
+ font-weight: bold;
+}
+
+ul.search li div.context {
+ color: #888;
+ margin: 2px 0 0 30px;
+ text-align: left;
+}
+
+ul.keywordmatches li.goodmatch a {
+ font-weight: bold;
+}
+
+/* -- index page ------------------------------------------------------------ */
+
+table.contentstable {
+ width: 90%;
+}
+
+table.contentstable p.biglink {
+ line-height: 150%;
+}
+
+a.biglink {
+ font-size: 1.3em;
+}
+
+span.linkdescr {
+ font-style: italic;
+ padding-top: 5px;
+ font-size: 90%;
+}
+
+/* -- general index --------------------------------------------------------- */
+
+table.indextable {
+ width: 100%;
+}
+
+table.indextable td {
+ text-align: left;
+ vertical-align: top;
+}
+
+table.indextable dl, table.indextable dd {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+table.indextable tr.pcap {
+ height: 10px;
+}
+
+table.indextable tr.cap {
+ margin-top: 10px;
+ background-color: #f2f2f2;
+}
+
+img.toggler {
+ margin-right: 3px;
+ margin-top: 3px;
+ cursor: pointer;
+}
+
+div.modindex-jumpbox {
+ border-top: 1px solid #ddd;
+ border-bottom: 1px solid #ddd;
+ margin: 1em 0 1em 0;
+ padding: 0.4em;
+}
+
+div.genindex-jumpbox {
+ border-top: 1px solid #ddd;
+ border-bottom: 1px solid #ddd;
+ margin: 1em 0 1em 0;
+ padding: 0.4em;
+}
+
+/* -- general body styles --------------------------------------------------- */
+
+a.headerlink {
+ visibility: hidden;
+}
+
+h1:hover > a.headerlink,
+h2:hover > a.headerlink,
+h3:hover > a.headerlink,
+h4:hover > a.headerlink,
+h5:hover > a.headerlink,
+h6:hover > a.headerlink,
+dt:hover > a.headerlink {
+ visibility: visible;
+}
+
+div.body p.caption {
+ text-align: inherit;
+}
+
+div.body td {
+ text-align: left;
+}
+
+.field-list ul {
+ padding-left: 1em;
+}
+
+.first {
+ margin-top: 0 !important;
+}
+
+p.rubric {
+ margin-top: 30px;
+ font-weight: bold;
+}
+
+img.align-left, .figure.align-left, object.align-left {
+ clear: left;
+ float: left;
+ margin-right: 1em;
+}
+
+img.align-right, .figure.align-right, object.align-right {
+ clear: right;
+ float: right;
+ margin-left: 1em;
+}
+
+img.align-center, .figure.align-center, object.align-center {
+ display: block;
+ margin-left: auto;
+ margin-right: auto;
+}
+
+.align-left {
+ text-align: left;
+}
+
+.align-center {
+ text-align: center;
+}
+
+.align-right {
+ text-align: right;
+}
+
+/* -- sidebars -------------------------------------------------------------- */
+
+div.sidebar {
+ margin: 0 0 0.5em 1em;
+ border: 1px solid #ddb;
+ padding: 7px 7px 0 7px;
+ background-color: #ffe;
+ width: 40%;
+ float: right;
+}
+
+p.sidebar-title {
+ font-weight: bold;
+}
+
+/* -- topics ---------------------------------------------------------------- */
+
+div.topic {
+ border: 1px solid #ccc;
+ padding: 7px 7px 0 7px;
+ margin: 10px 0 10px 0;
+}
+
+p.topic-title {
+ font-size: 1.1em;
+ font-weight: bold;
+ margin-top: 10px;
+}
+
+/* -- admonitions ----------------------------------------------------------- */
+
+div.admonition {
+ margin-top: 10px;
+ margin-bottom: 10px;
+ padding: 7px;
+}
+
+div.admonition dt {
+ font-weight: bold;
+}
+
+div.admonition dl {
+ margin-bottom: 0;
+}
+
+p.admonition-title {
+ margin: 0px 10px 5px 0px;
+ font-weight: bold;
+}
+
+div.body p.centered {
+ text-align: center;
+ margin-top: 25px;
+}
+
+/* -- tables ---------------------------------------------------------------- */
+
+table.docutils {
+ border: 0;
+ border-collapse: collapse;
+}
+
+table.docutils td, table.docutils th {
+ padding: 1px 8px 1px 5px;
+ border-top: 0;
+ border-left: 0;
+ border-right: 0;
+ border-bottom: 1px solid #aaa;
+}
+
+table.field-list td, table.field-list th {
+ border: 0 !important;
+}
+
+table.footnote td, table.footnote th {
+ border: 0 !important;
+}
+
+th {
+ text-align: left;
+ padding-right: 5px;
+}
+
+table.citation {
+ border-left: solid 1px gray;
+ margin-left: 1px;
+}
+
+table.citation td {
+ border-bottom: none;
+}
+
+/* -- other body styles ----------------------------------------------------- */
+
+ol.arabic {
+ list-style: decimal;
+}
+
+ol.loweralpha {
+ list-style: lower-alpha;
+}
+
+ol.upperalpha {
+ list-style: upper-alpha;
+}
+
+ol.lowerroman {
+ list-style: lower-roman;
+}
+
+ol.upperroman {
+ list-style: upper-roman;
+}
+
+dl {
+ margin-bottom: 15px;
+}
+
+dd p {
+ margin-top: 0px;
+}
+
+dd ul, dd table {
+ margin-bottom: 10px;
+}
+
+dd {
+ margin-top: 3px;
+ margin-bottom: 10px;
+ margin-left: 30px;
+}
+
+dt:target, .highlighted {
+ background-color: #fbe54e;
+}
+
+dl.glossary dt {
+ font-weight: bold;
+ font-size: 1.1em;
+}
+
+.field-list ul {
+ margin: 0;
+ padding-left: 1em;
+}
+
+.field-list p {
+ margin: 0;
+}
+
+.refcount {
+ color: #060;
+}
+
+.optional {
+ font-size: 1.3em;
+}
+
+.versionmodified {
+ font-style: italic;
+}
+
+.system-message {
+ background-color: #fda;
+ padding: 5px;
+ border: 3px solid red;
+}
+
+.footnote:target {
+ background-color: #ffa;
+}
+
+.line-block {
+ display: block;
+ margin-top: 1em;
+ margin-bottom: 1em;
+}
+
+.line-block .line-block {
+ margin-top: 0;
+ margin-bottom: 0;
+ margin-left: 1.5em;
+}
+
+.guilabel, .menuselection {
+ font-family: sans-serif;
+}
+
+.accelerator {
+ text-decoration: underline;
+}
+
+.classifier {
+ font-style: oblique;
+}
+
+abbr, acronym {
+ border-bottom: dotted 1px;
+ cursor: help;
+}
+
+/* -- code displays --------------------------------------------------------- */
+
+pre {
+ overflow: auto;
+ overflow-y: hidden; /* fixes display issues on Chrome browsers */
+}
+
+td.linenos pre {
+ padding: 5px 0px;
+ border: 0;
+ background-color: transparent;
+ color: #aaa;
+}
+
+table.highlighttable {
+ margin-left: 0.5em;
+}
+
+table.highlighttable td {
+ padding: 0 0.5em 0 0.5em;
+}
+
+tt.descname {
+ background-color: transparent;
+ font-weight: bold;
+ font-size: 1.2em;
+}
+
+tt.descclassname {
+ background-color: transparent;
+}
+
+tt.xref, a tt {
+ background-color: transparent;
+ font-weight: bold;
+}
+
+h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
+ background-color: transparent;
+}
+
+.viewcode-link {
+ float: right;
+}
+
+.viewcode-back {
+ float: right;
+ font-family: sans-serif;
+}
+
+div.viewcode-block:target {
+ margin: -1px -10px;
+ padding: 0 10px;
+}
+
+/* -- math display ---------------------------------------------------------- */
+
+img.math {
+ vertical-align: middle;
+}
+
+div.body div.math p {
+ text-align: center;
+}
+
+span.eqno {
+ float: right;
+}
+
+/* -- printout stylesheet --------------------------------------------------- */
+
+@media print {
+ div.document,
+ div.documentwrapper,
+ div.bodywrapper {
+ margin: 0 !important;
+ width: 100%;
+ }
+
+ div.sphinxsidebar,
+ div.related,
+ div.footer,
+ #top-link {
+ display: none;
+ }
+} \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/_static/breadcrumb_background.png b/third_party/python/mock-1.0.0/html/_static/breadcrumb_background.png
new file mode 100644
index 0000000000..9b45910e0b
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/breadcrumb_background.png
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/_static/default.css b/third_party/python/mock-1.0.0/html/_static/default.css
new file mode 100644
index 0000000000..2a3ac13316
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/default.css
@@ -0,0 +1,256 @@
+/*
+ * default.css_t
+ * ~~~~~~~~~~~~~
+ *
+ * Sphinx stylesheet -- default theme.
+ *
+ * :copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+@import url("basic.css");
+
+/* -- page layout ----------------------------------------------------------- */
+
+body {
+ font-family: sans-serif;
+ font-size: 100%;
+ background-color: #11303d;
+ color: #000;
+ margin: 0;
+ padding: 0;
+}
+
+div.document {
+ background-color: #1c4e63;
+}
+
+div.documentwrapper {
+ float: left;
+ width: 100%;
+}
+
+div.bodywrapper {
+ margin: 0 0 0 230px;
+}
+
+div.body {
+ background-color: #ffffff;
+ color: #000000;
+ padding: 0 20px 30px 20px;
+}
+
+div.footer {
+ color: #ffffff;
+ width: 100%;
+ padding: 9px 0 9px 0;
+ text-align: center;
+ font-size: 75%;
+}
+
+div.footer a {
+ color: #ffffff;
+ text-decoration: underline;
+}
+
+div.related {
+ background-color: #133f52;
+ line-height: 30px;
+ color: #ffffff;
+}
+
+div.related a {
+ color: #ffffff;
+}
+
+div.sphinxsidebar {
+}
+
+div.sphinxsidebar h3 {
+ font-family: 'Trebuchet MS', sans-serif;
+ color: #ffffff;
+ font-size: 1.4em;
+ font-weight: normal;
+ margin: 0;
+ padding: 0;
+}
+
+div.sphinxsidebar h3 a {
+ color: #ffffff;
+}
+
+div.sphinxsidebar h4 {
+ font-family: 'Trebuchet MS', sans-serif;
+ color: #ffffff;
+ font-size: 1.3em;
+ font-weight: normal;
+ margin: 5px 0 0 0;
+ padding: 0;
+}
+
+div.sphinxsidebar p {
+ color: #ffffff;
+}
+
+div.sphinxsidebar p.topless {
+ margin: 5px 10px 10px 10px;
+}
+
+div.sphinxsidebar ul {
+ margin: 10px;
+ padding: 0;
+ color: #ffffff;
+}
+
+div.sphinxsidebar a {
+ color: #98dbcc;
+}
+
+div.sphinxsidebar input {
+ border: 1px solid #98dbcc;
+ font-family: sans-serif;
+ font-size: 1em;
+}
+
+
+
+/* -- hyperlink styles ------------------------------------------------------ */
+
+a {
+ color: #355f7c;
+ text-decoration: none;
+}
+
+a:visited {
+ color: #355f7c;
+ text-decoration: none;
+}
+
+a:hover {
+ text-decoration: underline;
+}
+
+
+
+/* -- body styles ----------------------------------------------------------- */
+
+div.body h1,
+div.body h2,
+div.body h3,
+div.body h4,
+div.body h5,
+div.body h6 {
+ font-family: 'Trebuchet MS', sans-serif;
+ background-color: #f2f2f2;
+ font-weight: normal;
+ color: #20435c;
+ border-bottom: 1px solid #ccc;
+ margin: 20px -20px 10px -20px;
+ padding: 3px 0 3px 10px;
+}
+
+div.body h1 { margin-top: 0; font-size: 200%; }
+div.body h2 { font-size: 160%; }
+div.body h3 { font-size: 140%; }
+div.body h4 { font-size: 120%; }
+div.body h5 { font-size: 110%; }
+div.body h6 { font-size: 100%; }
+
+a.headerlink {
+ color: #c60f0f;
+ font-size: 0.8em;
+ padding: 0 4px 0 4px;
+ text-decoration: none;
+}
+
+a.headerlink:hover {
+ background-color: #c60f0f;
+ color: white;
+}
+
+div.body p, div.body dd, div.body li {
+ text-align: justify;
+ line-height: 130%;
+}
+
+div.admonition p.admonition-title + p {
+ display: inline;
+}
+
+div.admonition p {
+ margin-bottom: 5px;
+}
+
+div.admonition pre {
+ margin-bottom: 5px;
+}
+
+div.admonition ul, div.admonition ol {
+ margin-bottom: 5px;
+}
+
+div.note {
+ background-color: #eee;
+ border: 1px solid #ccc;
+}
+
+div.seealso {
+ background-color: #ffc;
+ border: 1px solid #ff6;
+}
+
+div.topic {
+ background-color: #eee;
+}
+
+div.warning {
+ background-color: #ffe4e4;
+ border: 1px solid #f66;
+}
+
+p.admonition-title {
+ display: inline;
+}
+
+p.admonition-title:after {
+ content: ":";
+}
+
+pre {
+ padding: 5px;
+ background-color: #eeffcc;
+ color: #333333;
+ line-height: 120%;
+ border: 1px solid #ac9;
+ border-left: none;
+ border-right: none;
+}
+
+tt {
+ background-color: #ecf0f3;
+ padding: 0 1px 0 1px;
+ font-size: 0.95em;
+}
+
+th {
+ background-color: #ede;
+}
+
+.warning tt {
+ background: #efc2c2;
+}
+
+.note tt {
+ background: #d6d6d6;
+}
+
+.viewcode-back {
+ font-family: sans-serif;
+}
+
+div.viewcode-block:target {
+ background-color: #f4debf;
+ border-top: 1px solid #ac9;
+ border-bottom: 1px solid #ac9;
+} \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/_static/doctools.js b/third_party/python/mock-1.0.0/html/_static/doctools.js
new file mode 100644
index 0000000000..d4619fdfb1
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/doctools.js
@@ -0,0 +1,247 @@
+/*
+ * doctools.js
+ * ~~~~~~~~~~~
+ *
+ * Sphinx JavaScript utilities for all documentation.
+ *
+ * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+/**
+ * select a different prefix for underscore
+ */
+$u = _.noConflict();
+
+/**
+ * make the code below compatible with browsers without
+ * an installed firebug like debugger
+if (!window.console || !console.firebug) {
+ var names = ["log", "debug", "info", "warn", "error", "assert", "dir",
+ "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace",
+ "profile", "profileEnd"];
+ window.console = {};
+ for (var i = 0; i < names.length; ++i)
+ window.console[names[i]] = function() {};
+}
+ */
+
+/**
+ * small helper function to urldecode strings
+ */
+jQuery.urldecode = function(x) {
+ return decodeURIComponent(x).replace(/\+/g, ' ');
+}
+
+/**
+ * small helper function to urlencode strings
+ */
+jQuery.urlencode = encodeURIComponent;
+
+/**
+ * This function returns the parsed url parameters of the
+ * current request. Multiple values per key are supported,
+ * it will always return arrays of strings for the value parts.
+ */
+jQuery.getQueryParameters = function(s) {
+ if (typeof s == 'undefined')
+ s = document.location.search;
+ var parts = s.substr(s.indexOf('?') + 1).split('&');
+ var result = {};
+ for (var i = 0; i < parts.length; i++) {
+ var tmp = parts[i].split('=', 2);
+ var key = jQuery.urldecode(tmp[0]);
+ var value = jQuery.urldecode(tmp[1]);
+ if (key in result)
+ result[key].push(value);
+ else
+ result[key] = [value];
+ }
+ return result;
+};
+
+/**
+ * small function to check if an array contains
+ * a given item.
+ */
+jQuery.contains = function(arr, item) {
+ for (var i = 0; i < arr.length; i++) {
+ if (arr[i] == item)
+ return true;
+ }
+ return false;
+};
+
+/**
+ * highlight a given string on a jquery object by wrapping it in
+ * span elements with the given class name.
+ */
+jQuery.fn.highlightText = function(text, className) {
+ function highlight(node) {
+ if (node.nodeType == 3) {
+ var val = node.nodeValue;
+ var pos = val.toLowerCase().indexOf(text);
+ if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) {
+ var span = document.createElement("span");
+ span.className = className;
+ span.appendChild(document.createTextNode(val.substr(pos, text.length)));
+ node.parentNode.insertBefore(span, node.parentNode.insertBefore(
+ document.createTextNode(val.substr(pos + text.length)),
+ node.nextSibling));
+ node.nodeValue = val.substr(0, pos);
+ }
+ }
+ else if (!jQuery(node).is("button, select, textarea")) {
+ jQuery.each(node.childNodes, function() {
+ highlight(this);
+ });
+ }
+ }
+ return this.each(function() {
+ highlight(this);
+ });
+};
+
+/**
+ * Small JavaScript module for the documentation.
+ */
+var Documentation = {
+
+ init : function() {
+ this.fixFirefoxAnchorBug();
+ this.highlightSearchWords();
+ this.initIndexTable();
+ },
+
+ /**
+ * i18n support
+ */
+ TRANSLATIONS : {},
+ PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; },
+ LOCALE : 'unknown',
+
+ // gettext and ngettext don't access this so that the functions
+ // can safely bound to a different name (_ = Documentation.gettext)
+ gettext : function(string) {
+ var translated = Documentation.TRANSLATIONS[string];
+ if (typeof translated == 'undefined')
+ return string;
+ return (typeof translated == 'string') ? translated : translated[0];
+ },
+
+ ngettext : function(singular, plural, n) {
+ var translated = Documentation.TRANSLATIONS[singular];
+ if (typeof translated == 'undefined')
+ return (n == 1) ? singular : plural;
+ return translated[Documentation.PLURALEXPR(n)];
+ },
+
+ addTranslations : function(catalog) {
+ for (var key in catalog.messages)
+ this.TRANSLATIONS[key] = catalog.messages[key];
+ this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')');
+ this.LOCALE = catalog.locale;
+ },
+
+ /**
+ * add context elements like header anchor links
+ */
+ addContextElements : function() {
+ $('div[id] > :header:first').each(function() {
+ $('<a class="headerlink">\u00B6</a>').
+ attr('href', '#' + this.id).
+ attr('title', _('Permalink to this headline')).
+ appendTo(this);
+ });
+ $('dt[id]').each(function() {
+ $('<a class="headerlink">\u00B6</a>').
+ attr('href', '#' + this.id).
+ attr('title', _('Permalink to this definition')).
+ appendTo(this);
+ });
+ },
+
+ /**
+ * workaround a firefox stupidity
+ */
+ fixFirefoxAnchorBug : function() {
+ if (document.location.hash && $.browser.mozilla)
+ window.setTimeout(function() {
+ document.location.href += '';
+ }, 10);
+ },
+
+ /**
+ * highlight the search words provided in the url in the text
+ */
+ highlightSearchWords : function() {
+ var params = $.getQueryParameters();
+ var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : [];
+ if (terms.length) {
+ var body = $('div.body');
+ window.setTimeout(function() {
+ $.each(terms, function() {
+ body.highlightText(this.toLowerCase(), 'highlighted');
+ });
+ }, 10);
+ $('<p class="highlight-link"><a href="javascript:Documentation.' +
+ 'hideSearchWords()">' + _('Hide Search Matches') + '</a></p>')
+ .appendTo($('#searchbox'));
+ }
+ },
+
+ /**
+ * init the domain index toggle buttons
+ */
+ initIndexTable : function() {
+ var togglers = $('img.toggler').click(function() {
+ var src = $(this).attr('src');
+ var idnum = $(this).attr('id').substr(7);
+ $('tr.cg-' + idnum).toggle();
+ if (src.substr(-9) == 'minus.png')
+ $(this).attr('src', src.substr(0, src.length-9) + 'plus.png');
+ else
+ $(this).attr('src', src.substr(0, src.length-8) + 'minus.png');
+ }).css('display', '');
+ if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) {
+ togglers.click();
+ }
+ },
+
+ /**
+ * helper function to hide the search marks again
+ */
+ hideSearchWords : function() {
+ $('#searchbox .highlight-link').fadeOut(300);
+ $('span.highlighted').removeClass('highlighted');
+ },
+
+ /**
+ * make the url absolute
+ */
+ makeURL : function(relativeURL) {
+ return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL;
+ },
+
+ /**
+ * get the current relative url
+ */
+ getCurrentURL : function() {
+ var path = document.location.pathname;
+ var parts = path.split(/\//);
+ $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() {
+ if (this == '..')
+ parts.pop();
+ });
+ var url = parts.join('/');
+ return path.substring(url.lastIndexOf('/') + 1, path.length - 1);
+ }
+};
+
+// quick alias for translations
+_ = Documentation.gettext;
+
+$(document).ready(function() {
+ Documentation.init();
+});
diff --git a/third_party/python/mock-1.0.0/html/_static/documentation.png b/third_party/python/mock-1.0.0/html/_static/documentation.png
new file mode 100644
index 0000000000..f0d334b57a
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/documentation.png
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/_static/file.png b/third_party/python/mock-1.0.0/html/_static/file.png
new file mode 100644
index 0000000000..d18082e397
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/file.png
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/_static/header_sm_mid.png b/third_party/python/mock-1.0.0/html/_static/header_sm_mid.png
new file mode 100644
index 0000000000..dce5a40e98
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/header_sm_mid.png
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/_static/jquery.js b/third_party/python/mock-1.0.0/html/_static/jquery.js
new file mode 100644
index 0000000000..7c24308023
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/jquery.js
@@ -0,0 +1,154 @@
+/*!
+ * jQuery JavaScript Library v1.4.2
+ * http://jquery.com/
+ *
+ * Copyright 2010, John Resig
+ * Dual licensed under the MIT or GPL Version 2 licenses.
+ * http://jquery.org/license
+ *
+ * Includes Sizzle.js
+ * http://sizzlejs.com/
+ * Copyright 2010, The Dojo Foundation
+ * Released under the MIT, BSD, and GPL Licenses.
+ *
+ * Date: Sat Feb 13 22:33:48 2010 -0500
+ */
+(function(A,w){function ma(){if(!c.isReady){try{s.documentElement.doScroll("left")}catch(a){setTimeout(ma,1);return}c.ready()}}function Qa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function X(a,b,d,f,e,j){var i=a.length;if(typeof b==="object"){for(var o in b)X(a,o,b[o],f,e,d);return a}if(d!==w){f=!j&&f&&c.isFunction(d);for(o=0;o<i;o++)e(a[o],b,f?d.call(a[o],o,e(a[o],b)):d,j);return a}return i?
+e(a[0],b):w}function J(){return(new Date).getTime()}function Y(){return false}function Z(){return true}function na(a,b,d){d[0].type=a;return c.event.handle.apply(b,d)}function oa(a){var b,d=[],f=[],e=arguments,j,i,o,k,n,r;i=c.data(this,"events");if(!(a.liveFired===this||!i||!i.live||a.button&&a.type==="click")){a.liveFired=this;var u=i.live.slice(0);for(k=0;k<u.length;k++){i=u[k];i.origType.replace(O,"")===a.type?f.push(i.selector):u.splice(k--,1)}j=c(a.target).closest(f,a.currentTarget);n=0;for(r=
+j.length;n<r;n++)for(k=0;k<u.length;k++){i=u[k];if(j[n].selector===i.selector){o=j[n].elem;f=null;if(i.preType==="mouseenter"||i.preType==="mouseleave")f=c(a.relatedTarget).closest(i.selector)[0];if(!f||f!==o)d.push({elem:o,handleObj:i})}}n=0;for(r=d.length;n<r;n++){j=d[n];a.currentTarget=j.elem;a.data=j.handleObj.data;a.handleObj=j.handleObj;if(j.handleObj.origHandler.apply(j.elem,e)===false){b=false;break}}return b}}function pa(a,b){return"live."+(a&&a!=="*"?a+".":"")+b.replace(/\./g,"`").replace(/ /g,
+"&")}function qa(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function ra(a,b){var d=0;b.each(function(){if(this.nodeName===(a[d]&&a[d].nodeName)){var f=c.data(a[d++]),e=c.data(this,f);if(f=f&&f.events){delete e.handle;e.events={};for(var j in f)for(var i in f[j])c.event.add(this,j,f[j][i],f[j][i].data)}}})}function sa(a,b,d){var f,e,j;b=b&&b[0]?b[0].ownerDocument||b[0]:s;if(a.length===1&&typeof a[0]==="string"&&a[0].length<512&&b===s&&!ta.test(a[0])&&(c.support.checkClone||!ua.test(a[0]))){e=
+true;if(j=c.fragments[a[0]])if(j!==1)f=j}if(!f){f=b.createDocumentFragment();c.clean(a,b,f,d)}if(e)c.fragments[a[0]]=j?f:1;return{fragment:f,cacheable:e}}function K(a,b){var d={};c.each(va.concat.apply([],va.slice(0,b)),function(){d[this]=a});return d}function wa(a){return"scrollTo"in a&&a.document?a:a.nodeType===9?a.defaultView||a.parentWindow:false}var c=function(a,b){return new c.fn.init(a,b)},Ra=A.jQuery,Sa=A.$,s=A.document,T,Ta=/^[^<]*(<[\w\W]+>)[^>]*$|^#([\w-]+)$/,Ua=/^.[^:#\[\.,]*$/,Va=/\S/,
+Wa=/^(\s|\u00A0)+|(\s|\u00A0)+$/g,Xa=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,P=navigator.userAgent,xa=false,Q=[],L,$=Object.prototype.toString,aa=Object.prototype.hasOwnProperty,ba=Array.prototype.push,R=Array.prototype.slice,ya=Array.prototype.indexOf;c.fn=c.prototype={init:function(a,b){var d,f;if(!a)return this;if(a.nodeType){this.context=this[0]=a;this.length=1;return this}if(a==="body"&&!b){this.context=s;this[0]=s.body;this.selector="body";this.length=1;return this}if(typeof a==="string")if((d=Ta.exec(a))&&
+(d[1]||!b))if(d[1]){f=b?b.ownerDocument||b:s;if(a=Xa.exec(a))if(c.isPlainObject(b)){a=[s.createElement(a[1])];c.fn.attr.call(a,b,true)}else a=[f.createElement(a[1])];else{a=sa([d[1]],[f]);a=(a.cacheable?a.fragment.cloneNode(true):a.fragment).childNodes}return c.merge(this,a)}else{if(b=s.getElementById(d[2])){if(b.id!==d[2])return T.find(a);this.length=1;this[0]=b}this.context=s;this.selector=a;return this}else if(!b&&/^\w+$/.test(a)){this.selector=a;this.context=s;a=s.getElementsByTagName(a);return c.merge(this,
+a)}else return!b||b.jquery?(b||T).find(a):c(b).find(a);else if(c.isFunction(a))return T.ready(a);if(a.selector!==w){this.selector=a.selector;this.context=a.context}return c.makeArray(a,this)},selector:"",jquery:"1.4.2",length:0,size:function(){return this.length},toArray:function(){return R.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this.slice(a)[0]:this[a]},pushStack:function(a,b,d){var f=c();c.isArray(a)?ba.apply(f,a):c.merge(f,a);f.prevObject=this;f.context=this.context;if(b===
+"find")f.selector=this.selector+(this.selector?" ":"")+d;else if(b)f.selector=this.selector+"."+b+"("+d+")";return f},each:function(a,b){return c.each(this,a,b)},ready:function(a){c.bindReady();if(c.isReady)a.call(s,c);else Q&&Q.push(a);return this},eq:function(a){return a===-1?this.slice(a):this.slice(a,+a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(R.apply(this,arguments),"slice",R.call(arguments).join(","))},map:function(a){return this.pushStack(c.map(this,
+function(b,d){return a.call(b,d,b)}))},end:function(){return this.prevObject||c(null)},push:ba,sort:[].sort,splice:[].splice};c.fn.init.prototype=c.fn;c.extend=c.fn.extend=function(){var a=arguments[0]||{},b=1,d=arguments.length,f=false,e,j,i,o;if(typeof a==="boolean"){f=a;a=arguments[1]||{};b=2}if(typeof a!=="object"&&!c.isFunction(a))a={};if(d===b){a=this;--b}for(;b<d;b++)if((e=arguments[b])!=null)for(j in e){i=a[j];o=e[j];if(a!==o)if(f&&o&&(c.isPlainObject(o)||c.isArray(o))){i=i&&(c.isPlainObject(i)||
+c.isArray(i))?i:c.isArray(o)?[]:{};a[j]=c.extend(f,i,o)}else if(o!==w)a[j]=o}return a};c.extend({noConflict:function(a){A.$=Sa;if(a)A.jQuery=Ra;return c},isReady:false,ready:function(){if(!c.isReady){if(!s.body)return setTimeout(c.ready,13);c.isReady=true;if(Q){for(var a,b=0;a=Q[b++];)a.call(s,c);Q=null}c.fn.triggerHandler&&c(s).triggerHandler("ready")}},bindReady:function(){if(!xa){xa=true;if(s.readyState==="complete")return c.ready();if(s.addEventListener){s.addEventListener("DOMContentLoaded",
+L,false);A.addEventListener("load",c.ready,false)}else if(s.attachEvent){s.attachEvent("onreadystatechange",L);A.attachEvent("onload",c.ready);var a=false;try{a=A.frameElement==null}catch(b){}s.documentElement.doScroll&&a&&ma()}}},isFunction:function(a){return $.call(a)==="[object Function]"},isArray:function(a){return $.call(a)==="[object Array]"},isPlainObject:function(a){if(!a||$.call(a)!=="[object Object]"||a.nodeType||a.setInterval)return false;if(a.constructor&&!aa.call(a,"constructor")&&!aa.call(a.constructor.prototype,
+"isPrototypeOf"))return false;var b;for(b in a);return b===w||aa.call(a,b)},isEmptyObject:function(a){for(var b in a)return false;return true},error:function(a){throw a;},parseJSON:function(a){if(typeof a!=="string"||!a)return null;a=c.trim(a);if(/^[\],:{}\s]*$/.test(a.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,"")))return A.JSON&&A.JSON.parse?A.JSON.parse(a):(new Function("return "+
+a))();else c.error("Invalid JSON: "+a)},noop:function(){},globalEval:function(a){if(a&&Va.test(a)){var b=s.getElementsByTagName("head")[0]||s.documentElement,d=s.createElement("script");d.type="text/javascript";if(c.support.scriptEval)d.appendChild(s.createTextNode(a));else d.text=a;b.insertBefore(d,b.firstChild);b.removeChild(d)}},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,b,d){var f,e=0,j=a.length,i=j===w||c.isFunction(a);if(d)if(i)for(f in a){if(b.apply(a[f],
+d)===false)break}else for(;e<j;){if(b.apply(a[e++],d)===false)break}else if(i)for(f in a){if(b.call(a[f],f,a[f])===false)break}else for(d=a[0];e<j&&b.call(d,e,d)!==false;d=a[++e]);return a},trim:function(a){return(a||"").replace(Wa,"")},makeArray:function(a,b){b=b||[];if(a!=null)a.length==null||typeof a==="string"||c.isFunction(a)||typeof a!=="function"&&a.setInterval?ba.call(b,a):c.merge(b,a);return b},inArray:function(a,b){if(b.indexOf)return b.indexOf(a);for(var d=0,f=b.length;d<f;d++)if(b[d]===
+a)return d;return-1},merge:function(a,b){var d=a.length,f=0;if(typeof b.length==="number")for(var e=b.length;f<e;f++)a[d++]=b[f];else for(;b[f]!==w;)a[d++]=b[f++];a.length=d;return a},grep:function(a,b,d){for(var f=[],e=0,j=a.length;e<j;e++)!d!==!b(a[e],e)&&f.push(a[e]);return f},map:function(a,b,d){for(var f=[],e,j=0,i=a.length;j<i;j++){e=b(a[j],j,d);if(e!=null)f[f.length]=e}return f.concat.apply([],f)},guid:1,proxy:function(a,b,d){if(arguments.length===2)if(typeof b==="string"){d=a;a=d[b];b=w}else if(b&&
+!c.isFunction(b)){d=b;b=w}if(!b&&a)b=function(){return a.apply(d||this,arguments)};if(a)b.guid=a.guid=a.guid||b.guid||c.guid++;return b},uaMatch:function(a){a=a.toLowerCase();a=/(webkit)[ \/]([\w.]+)/.exec(a)||/(opera)(?:.*version)?[ \/]([\w.]+)/.exec(a)||/(msie) ([\w.]+)/.exec(a)||!/compatible/.test(a)&&/(mozilla)(?:.*? rv:([\w.]+))?/.exec(a)||[];return{browser:a[1]||"",version:a[2]||"0"}},browser:{}});P=c.uaMatch(P);if(P.browser){c.browser[P.browser]=true;c.browser.version=P.version}if(c.browser.webkit)c.browser.safari=
+true;if(ya)c.inArray=function(a,b){return ya.call(b,a)};T=c(s);if(s.addEventListener)L=function(){s.removeEventListener("DOMContentLoaded",L,false);c.ready()};else if(s.attachEvent)L=function(){if(s.readyState==="complete"){s.detachEvent("onreadystatechange",L);c.ready()}};(function(){c.support={};var a=s.documentElement,b=s.createElement("script"),d=s.createElement("div"),f="script"+J();d.style.display="none";d.innerHTML=" <link/><table></table><a href='/a' style='color:red;float:left;opacity:.55;'>a</a><input type='checkbox'/>";
+var e=d.getElementsByTagName("*"),j=d.getElementsByTagName("a")[0];if(!(!e||!e.length||!j)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(j.getAttribute("style")),hrefNormalized:j.getAttribute("href")==="/a",opacity:/^0.55$/.test(j.style.opacity),cssFloat:!!j.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:s.createElement("select").appendChild(s.createElement("option")).selected,
+parentNode:d.removeChild(d.appendChild(s.createElement("div"))).parentNode===null,deleteExpando:true,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null};b.type="text/javascript";try{b.appendChild(s.createTextNode("window."+f+"=1;"))}catch(i){}a.insertBefore(b,a.firstChild);if(A[f]){c.support.scriptEval=true;delete A[f]}try{delete b.test}catch(o){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function k(){c.support.noCloneEvent=
+false;d.detachEvent("onclick",k)});d.cloneNode(true).fireEvent("onclick")}d=s.createElement("div");d.innerHTML="<input type='radio' name='radiotest' checked='checked'/>";a=s.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var k=s.createElement("div");k.style.width=k.style.paddingLeft="1px";s.body.appendChild(k);c.boxModel=c.support.boxModel=k.offsetWidth===2;s.body.removeChild(k).style.display="none"});a=function(k){var n=
+s.createElement("div");k="on"+k;var r=k in n;if(!r){n.setAttribute(k,"return;");r=typeof n[k]==="function"}return r};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=e=j=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var G="jQuery"+J(),Ya=0,za={};c.extend({cache:{},expando:G,noData:{embed:true,object:true,
+applet:true},data:function(a,b,d){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var f=a[G],e=c.cache;if(!f&&typeof b==="string"&&d===w)return null;f||(f=++Ya);if(typeof b==="object"){a[G]=f;e[f]=c.extend(true,{},b)}else if(!e[f]){a[G]=f;e[f]={}}a=e[f];if(d!==w)a[b]=d;return typeof b==="string"?a[b]:a}},removeData:function(a,b){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var d=a[G],f=c.cache,e=f[d];if(b){if(e){delete e[b];c.isEmptyObject(e)&&c.removeData(a)}}else{if(c.support.deleteExpando)delete a[c.expando];
+else a.removeAttribute&&a.removeAttribute(c.expando);delete f[d]}}}});c.fn.extend({data:function(a,b){if(typeof a==="undefined"&&this.length)return c.data(this[0]);else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===w){var f=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(f===w&&this.length)f=c.data(this[0],a);return f===w&&d[1]?this.data(d[0]):f}else return this.trigger("setData"+d[1]+"!",[d[0],b]).each(function(){c.data(this,
+a,b)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var f=c.data(a,b);if(!d)return f||[];if(!f||c.isArray(d))f=c.data(a,b,c.makeArray(d));else f.push(d);return f}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),f=d.shift();if(f==="inprogress")f=d.shift();if(f){b==="fx"&&d.unshift("inprogress");f.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b===
+w)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var Aa=/[\n\t]/g,ca=/\s+/,Za=/\r/g,$a=/href|src|style/,ab=/(button|input)/i,bb=/(button|input|object|select|textarea)/i,
+cb=/^(a|area)$/i,Ba=/radio|checkbox/;c.fn.extend({attr:function(a,b){return X(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(n){var r=c(this);r.addClass(a.call(this,n,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1)if(e.className){for(var j=" "+e.className+" ",
+i=e.className,o=0,k=b.length;o<k;o++)if(j.indexOf(" "+b[o]+" ")<0)i+=" "+b[o];e.className=c.trim(i)}else e.className=a}return this},removeClass:function(a){if(c.isFunction(a))return this.each(function(k){var n=c(this);n.removeClass(a.call(this,k,n.attr("class")))});if(a&&typeof a==="string"||a===w)for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1&&e.className)if(a){for(var j=(" "+e.className+" ").replace(Aa," "),i=0,o=b.length;i<o;i++)j=j.replace(" "+b[i]+" ",
+" ");e.className=c.trim(j)}else e.className=""}return this},toggleClass:function(a,b){var d=typeof a,f=typeof b==="boolean";if(c.isFunction(a))return this.each(function(e){var j=c(this);j.toggleClass(a.call(this,e,j.attr("class"),b),b)});return this.each(function(){if(d==="string")for(var e,j=0,i=c(this),o=b,k=a.split(ca);e=k[j++];){o=f?o:!i.hasClass(e);i[o?"addClass":"removeClass"](e)}else if(d==="undefined"||d==="boolean"){this.className&&c.data(this,"__className__",this.className);this.className=
+this.className||a===false?"":c.data(this,"__className__")||""}})},hasClass:function(a){a=" "+a+" ";for(var b=0,d=this.length;b<d;b++)if((" "+this[b].className+" ").replace(Aa," ").indexOf(a)>-1)return true;return false},val:function(a){if(a===w){var b=this[0];if(b){if(c.nodeName(b,"option"))return(b.attributes.value||{}).specified?b.value:b.text;if(c.nodeName(b,"select")){var d=b.selectedIndex,f=[],e=b.options;b=b.type==="select-one";if(d<0)return null;var j=b?d:0;for(d=b?d+1:e.length;j<d;j++){var i=
+e[j];if(i.selected){a=c(i).val();if(b)return a;f.push(a)}}return f}if(Ba.test(b.type)&&!c.support.checkOn)return b.getAttribute("value")===null?"on":b.value;return(b.value||"").replace(Za,"")}return w}var o=c.isFunction(a);return this.each(function(k){var n=c(this),r=a;if(this.nodeType===1){if(o)r=a.call(this,k,n.val());if(typeof r==="number")r+="";if(c.isArray(r)&&Ba.test(this.type))this.checked=c.inArray(n.val(),r)>=0;else if(c.nodeName(this,"select")){var u=c.makeArray(r);c("option",this).each(function(){this.selected=
+c.inArray(c(this).val(),u)>=0});if(!u.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,f){if(!a||a.nodeType===3||a.nodeType===8)return w;if(f&&b in c.attrFn)return c(a)[b](d);f=a.nodeType!==1||!c.isXMLDoc(a);var e=d!==w;b=f&&c.props[b]||b;if(a.nodeType===1){var j=$a.test(b);if(b in a&&f&&!j){if(e){b==="type"&&ab.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed");
+a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:bb.test(a.nodeName)||cb.test(a.nodeName)&&a.href?0:w;return a[b]}if(!c.support.style&&f&&b==="style"){if(e)a.style.cssText=""+d;return a.style.cssText}e&&a.setAttribute(b,""+d);a=!c.support.hrefNormalized&&f&&j?a.getAttribute(b,2):a.getAttribute(b);return a===null?w:a}return c.style(a,b,d)}});var O=/\.(.*)$/,db=function(a){return a.replace(/[^\w\s\.\|`]/g,
+function(b){return"\\"+b})};c.event={add:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){if(a.setInterval&&a!==A&&!a.frameElement)a=A;var e,j;if(d.handler){e=d;d=e.handler}if(!d.guid)d.guid=c.guid++;if(j=c.data(a)){var i=j.events=j.events||{},o=j.handle;if(!o)j.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,arguments):w};o.elem=a;b=b.split(" ");for(var k,n=0,r;k=b[n++];){j=e?c.extend({},e):{handler:d,data:f};if(k.indexOf(".")>-1){r=k.split(".");
+k=r.shift();j.namespace=r.slice(0).sort().join(".")}else{r=[];j.namespace=""}j.type=k;j.guid=d.guid;var u=i[k],z=c.event.special[k]||{};if(!u){u=i[k]=[];if(!z.setup||z.setup.call(a,f,r,o)===false)if(a.addEventListener)a.addEventListener(k,o,false);else a.attachEvent&&a.attachEvent("on"+k,o)}if(z.add){z.add.call(a,j);if(!j.handler.guid)j.handler.guid=d.guid}u.push(j);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){var e,j=0,i,o,k,n,r,u,z=c.data(a),
+C=z&&z.events;if(z&&C){if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(e in C)c.event.remove(a,e+b)}else{for(b=b.split(" ");e=b[j++];){n=e;i=e.indexOf(".")<0;o=[];if(!i){o=e.split(".");e=o.shift();k=new RegExp("(^|\\.)"+c.map(o.slice(0).sort(),db).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(r=C[e])if(d){n=c.event.special[e]||{};for(B=f||0;B<r.length;B++){u=r[B];if(d.guid===u.guid){if(i||k.test(u.namespace)){f==null&&r.splice(B--,1);n.remove&&n.remove.call(a,u)}if(f!=
+null)break}}if(r.length===0||f!=null&&r.length===1){if(!n.teardown||n.teardown.call(a,o)===false)Ca(a,e,z.handle);delete C[e]}}else for(var B=0;B<r.length;B++){u=r[B];if(i||k.test(u.namespace)){c.event.remove(a,n,u.handler,B);r.splice(B--,1)}}}if(c.isEmptyObject(C)){if(b=z.handle)b.elem=null;delete z.events;delete z.handle;c.isEmptyObject(z)&&c.removeData(a)}}}}},trigger:function(a,b,d,f){var e=a.type||a;if(!f){a=typeof a==="object"?a[G]?a:c.extend(c.Event(e),a):c.Event(e);if(e.indexOf("!")>=0){a.type=
+e=e.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[e]&&c.each(c.cache,function(){this.events&&this.events[e]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return w;a.result=w;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(f=c.data(d,"handle"))&&f.apply(d,b);f=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+e]&&d["on"+e].apply(d,b)===false)a.result=false}catch(j){}if(!a.isPropagationStopped()&&
+f)c.event.trigger(a,b,f,true);else if(!a.isDefaultPrevented()){f=a.target;var i,o=c.nodeName(f,"a")&&e==="click",k=c.event.special[e]||{};if((!k._default||k._default.call(d,a)===false)&&!o&&!(f&&f.nodeName&&c.noData[f.nodeName.toLowerCase()])){try{if(f[e]){if(i=f["on"+e])f["on"+e]=null;c.event.triggered=true;f[e]()}}catch(n){}if(i)f["on"+e]=i;c.event.triggered=false}}},handle:function(a){var b,d,f,e;a=arguments[0]=c.event.fix(a||A.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive;
+if(!b){d=a.type.split(".");a.type=d.shift();f=new RegExp("(^|\\.)"+d.slice(0).sort().join("\\.(?:.*\\.)?")+"(\\.|$)")}e=c.data(this,"events");d=e[a.type];if(e&&d){d=d.slice(0);e=0;for(var j=d.length;e<j;e++){var i=d[e];if(b||f.test(i.namespace)){a.handler=i.handler;a.data=i.data;a.handleObj=i;i=i.handler.apply(this,arguments);if(i!==w){a.result=i;if(i===false){a.preventDefault();a.stopPropagation()}}if(a.isImmediatePropagationStopped())break}}}return a.result},props:"altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode layerX layerY metaKey newValue offsetX offsetY originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "),
+fix:function(a){if(a[G])return a;var b=a;a=c.Event(b);for(var d=this.props.length,f;d;){f=this.props[--d];a[f]=b[f]}if(!a.target)a.target=a.srcElement||s;if(a.target.nodeType===3)a.target=a.target.parentNode;if(!a.relatedTarget&&a.fromElement)a.relatedTarget=a.fromElement===a.target?a.toElement:a.fromElement;if(a.pageX==null&&a.clientX!=null){b=s.documentElement;d=s.body;a.pageX=a.clientX+(b&&b.scrollLeft||d&&d.scrollLeft||0)-(b&&b.clientLeft||d&&d.clientLeft||0);a.pageY=a.clientY+(b&&b.scrollTop||
+d&&d.scrollTop||0)-(b&&b.clientTop||d&&d.clientTop||0)}if(!a.which&&(a.charCode||a.charCode===0?a.charCode:a.keyCode))a.which=a.charCode||a.keyCode;if(!a.metaKey&&a.ctrlKey)a.metaKey=a.ctrlKey;if(!a.which&&a.button!==w)a.which=a.button&1?1:a.button&2?3:a.button&4?2:0;return a},guid:1E8,proxy:c.proxy,special:{ready:{setup:c.bindReady,teardown:c.noop},live:{add:function(a){c.event.add(this,a.origType,c.extend({},a,{handler:oa}))},remove:function(a){var b=true,d=a.origType.replace(O,"");c.each(c.data(this,
+"events").live||[],function(){if(d===this.origType.replace(O,""))return b=false});b&&c.event.remove(this,a.origType,oa)}},beforeunload:{setup:function(a,b,d){if(this.setInterval)this.onbeforeunload=d;return false},teardown:function(a,b){if(this.onbeforeunload===b)this.onbeforeunload=null}}}};var Ca=s.removeEventListener?function(a,b,d){a.removeEventListener(b,d,false)}:function(a,b,d){a.detachEvent("on"+b,d)};c.Event=function(a){if(!this.preventDefault)return new c.Event(a);if(a&&a.type){this.originalEvent=
+a;this.type=a.type}else this.type=a;this.timeStamp=J();this[G]=true};c.Event.prototype={preventDefault:function(){this.isDefaultPrevented=Z;var a=this.originalEvent;if(a){a.preventDefault&&a.preventDefault();a.returnValue=false}},stopPropagation:function(){this.isPropagationStopped=Z;var a=this.originalEvent;if(a){a.stopPropagation&&a.stopPropagation();a.cancelBubble=true}},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=Z;this.stopPropagation()},isDefaultPrevented:Y,isPropagationStopped:Y,
+isImmediatePropagationStopped:Y};var Da=function(a){var b=a.relatedTarget;try{for(;b&&b!==this;)b=b.parentNode;if(b!==this){a.type=a.data;c.event.handle.apply(this,arguments)}}catch(d){}},Ea=function(a){a.type=a.data;c.event.handle.apply(this,arguments)};c.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){c.event.special[a]={setup:function(d){c.event.add(this,b,d&&d.selector?Ea:Da,a)},teardown:function(d){c.event.remove(this,b,d&&d.selector?Ea:Da)}}});if(!c.support.submitBubbles)c.event.special.submit=
+{setup:function(){if(this.nodeName.toLowerCase()!=="form"){c.event.add(this,"click.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="submit"||d==="image")&&c(b).closest("form").length)return na("submit",this,arguments)});c.event.add(this,"keypress.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="text"||d==="password")&&c(b).closest("form").length&&a.keyCode===13)return na("submit",this,arguments)})}else return false},teardown:function(){c.event.remove(this,".specialSubmit")}};
+if(!c.support.changeBubbles){var da=/textarea|input|select/i,ea,Fa=function(a){var b=a.type,d=a.value;if(b==="radio"||b==="checkbox")d=a.checked;else if(b==="select-multiple")d=a.selectedIndex>-1?c.map(a.options,function(f){return f.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},fa=function(a,b){var d=a.target,f,e;if(!(!da.test(d.nodeName)||d.readOnly)){f=c.data(d,"_change_data");e=Fa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data",
+e);if(!(f===w||e===f))if(f!=null||e){a.type="change";return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:fa,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return fa.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return fa.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a,
+"_change_data",Fa(a))}},setup:function(){if(this.type==="file")return false;for(var a in ea)c.event.add(this,a+".specialChange",ea[a]);return da.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return da.test(this.nodeName)}};ea=c.event.special.change.filters}s.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(f){f=c.event.fix(f);f.type=b;return c.event.handle.call(this,f)}c.event.special[b]={setup:function(){this.addEventListener(a,
+d,true)},teardown:function(){this.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,f,e){if(typeof d==="object"){for(var j in d)this[b](j,f,d[j],e);return this}if(c.isFunction(f)){e=f;f=w}var i=b==="one"?c.proxy(e,function(k){c(this).unbind(k,i);return e.apply(this,arguments)}):e;if(d==="unload"&&b!=="one")this.one(d,f,e);else{j=0;for(var o=this.length;j<o;j++)c.event.add(this[j],d,i,f)}return this}});c.fn.extend({unbind:function(a,b){if(typeof a==="object"&&
+!a.preventDefault)for(var d in a)this.unbind(d,a[d]);else{d=0;for(var f=this.length;d<f;d++)c.event.remove(this[d],a,b)}return this},delegate:function(a,b,d,f){return this.live(b,d,f,a)},undelegate:function(a,b,d){return arguments.length===0?this.unbind("live"):this.die(b,null,d,a)},trigger:function(a,b){return this.each(function(){c.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0]){a=c.Event(a);a.preventDefault();a.stopPropagation();c.event.trigger(a,b,this[0]);return a.result}},
+toggle:function(a){for(var b=arguments,d=1;d<b.length;)c.proxy(a,b[d++]);return this.click(c.proxy(a,function(f){var e=(c.data(this,"lastToggle"+a.guid)||0)%d;c.data(this,"lastToggle"+a.guid,e+1);f.preventDefault();return b[e].apply(this,arguments)||false}))},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}});var Ga={focus:"focusin",blur:"focusout",mouseenter:"mouseover",mouseleave:"mouseout"};c.each(["live","die"],function(a,b){c.fn[b]=function(d,f,e,j){var i,o=0,k,n,r=j||this.selector,
+u=j?this:c(this.context);if(c.isFunction(f)){e=f;f=w}for(d=(d||"").split(" ");(i=d[o++])!=null;){j=O.exec(i);k="";if(j){k=j[0];i=i.replace(O,"")}if(i==="hover")d.push("mouseenter"+k,"mouseleave"+k);else{n=i;if(i==="focus"||i==="blur"){d.push(Ga[i]+k);i+=k}else i=(Ga[i]||i)+k;b==="live"?u.each(function(){c.event.add(this,pa(i,r),{data:f,selector:r,handler:e,origType:i,origHandler:e,preType:n})}):u.unbind(pa(i,r),e)}}return this}});c.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error".split(" "),
+function(a,b){c.fn[b]=function(d){return d?this.bind(b,d):this.trigger(b)};if(c.attrFn)c.attrFn[b]=true});A.attachEvent&&!A.addEventListener&&A.attachEvent("onunload",function(){for(var a in c.cache)if(c.cache[a].handle)try{c.event.remove(c.cache[a].handle.elem)}catch(b){}});(function(){function a(g){for(var h="",l,m=0;g[m];m++){l=g[m];if(l.nodeType===3||l.nodeType===4)h+=l.nodeValue;else if(l.nodeType!==8)h+=a(l.childNodes)}return h}function b(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];
+if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1&&!p){t.sizcache=l;t.sizset=q}if(t.nodeName.toLowerCase()===h){y=t;break}t=t[g]}m[q]=y}}}function d(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1){if(!p){t.sizcache=l;t.sizset=q}if(typeof h!=="string"){if(t===h){y=true;break}}else if(k.filter(h,[t]).length>0){y=t;break}}t=t[g]}m[q]=y}}}var f=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,
+e=0,j=Object.prototype.toString,i=false,o=true;[0,0].sort(function(){o=false;return 0});var k=function(g,h,l,m){l=l||[];var q=h=h||s;if(h.nodeType!==1&&h.nodeType!==9)return[];if(!g||typeof g!=="string")return l;for(var p=[],v,t,y,S,H=true,M=x(h),I=g;(f.exec(""),v=f.exec(I))!==null;){I=v[3];p.push(v[1]);if(v[2]){S=v[3];break}}if(p.length>1&&r.exec(g))if(p.length===2&&n.relative[p[0]])t=ga(p[0]+p[1],h);else for(t=n.relative[p[0]]?[h]:k(p.shift(),h);p.length;){g=p.shift();if(n.relative[g])g+=p.shift();
+t=ga(g,t)}else{if(!m&&p.length>1&&h.nodeType===9&&!M&&n.match.ID.test(p[0])&&!n.match.ID.test(p[p.length-1])){v=k.find(p.shift(),h,M);h=v.expr?k.filter(v.expr,v.set)[0]:v.set[0]}if(h){v=m?{expr:p.pop(),set:z(m)}:k.find(p.pop(),p.length===1&&(p[0]==="~"||p[0]==="+")&&h.parentNode?h.parentNode:h,M);t=v.expr?k.filter(v.expr,v.set):v.set;if(p.length>0)y=z(t);else H=false;for(;p.length;){var D=p.pop();v=D;if(n.relative[D])v=p.pop();else D="";if(v==null)v=h;n.relative[D](y,v,M)}}else y=[]}y||(y=t);y||k.error(D||
+g);if(j.call(y)==="[object Array]")if(H)if(h&&h.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&E(h,y[g])))l.push(t[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&l.push(t[g]);else l.push.apply(l,y);else z(y,l);if(S){k(S,q,l,m);k.uniqueSort(l)}return l};k.uniqueSort=function(g){if(B){i=o;g.sort(B);if(i)for(var h=1;h<g.length;h++)g[h]===g[h-1]&&g.splice(h--,1)}return g};k.matches=function(g,h){return k(g,null,null,h)};k.find=function(g,h,l){var m,q;if(!g)return[];
+for(var p=0,v=n.order.length;p<v;p++){var t=n.order[p];if(q=n.leftMatch[t].exec(g)){var y=q[1];q.splice(1,1);if(y.substr(y.length-1)!=="\\"){q[1]=(q[1]||"").replace(/\\/g,"");m=n.find[t](q,h,l);if(m!=null){g=g.replace(n.match[t],"");break}}}}m||(m=h.getElementsByTagName("*"));return{set:m,expr:g}};k.filter=function(g,h,l,m){for(var q=g,p=[],v=h,t,y,S=h&&h[0]&&x(h[0]);g&&h.length;){for(var H in n.filter)if((t=n.leftMatch[H].exec(g))!=null&&t[2]){var M=n.filter[H],I,D;D=t[1];y=false;t.splice(1,1);if(D.substr(D.length-
+1)!=="\\"){if(v===p)p=[];if(n.preFilter[H])if(t=n.preFilter[H](t,v,l,p,m,S)){if(t===true)continue}else y=I=true;if(t)for(var U=0;(D=v[U])!=null;U++)if(D){I=M(D,t,U,v);var Ha=m^!!I;if(l&&I!=null)if(Ha)y=true;else v[U]=false;else if(Ha){p.push(D);y=true}}if(I!==w){l||(v=p);g=g.replace(n.match[H],"");if(!y)return[];break}}}if(g===q)if(y==null)k.error(g);else break;q=g}return v};k.error=function(g){throw"Syntax error, unrecognized expression: "+g;};var n=k.selectors={order:["ID","NAME","TAG"],match:{ID:/#((?:[\w\u00c0-\uFFFF-]|\\.)+)/,
+CLASS:/\.((?:[\w\u00c0-\uFFFF-]|\\.)+)/,NAME:/\[name=['"]*((?:[\w\u00c0-\uFFFF-]|\\.)+)['"]*\]/,ATTR:/\[\s*((?:[\w\u00c0-\uFFFF-]|\\.)+)\s*(?:(\S?=)\s*(['"]*)(.*?)\3|)\s*\]/,TAG:/^((?:[\w\u00c0-\uFFFF\*-]|\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\((even|odd|[\dn+-]*)\))?/,POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^-]|$)/,PSEUDO:/:((?:[\w\u00c0-\uFFFF-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/},leftMatch:{},attrMap:{"class":"className","for":"htmlFor"},attrHandle:{href:function(g){return g.getAttribute("href")}},
+relative:{"+":function(g,h){var l=typeof h==="string",m=l&&!/\W/.test(h);l=l&&!m;if(m)h=h.toLowerCase();m=0;for(var q=g.length,p;m<q;m++)if(p=g[m]){for(;(p=p.previousSibling)&&p.nodeType!==1;);g[m]=l||p&&p.nodeName.toLowerCase()===h?p||false:p===h}l&&k.filter(h,g,true)},">":function(g,h){var l=typeof h==="string";if(l&&!/\W/.test(h)){h=h.toLowerCase();for(var m=0,q=g.length;m<q;m++){var p=g[m];if(p){l=p.parentNode;g[m]=l.nodeName.toLowerCase()===h?l:false}}}else{m=0;for(q=g.length;m<q;m++)if(p=g[m])g[m]=
+l?p.parentNode:p.parentNode===h;l&&k.filter(h,g,true)}},"":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("parentNode",h,m,g,p,l)},"~":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("previousSibling",h,m,g,p,l)}},find:{ID:function(g,h,l){if(typeof h.getElementById!=="undefined"&&!l)return(g=h.getElementById(g[1]))?[g]:[]},NAME:function(g,h){if(typeof h.getElementsByName!=="undefined"){var l=[];
+h=h.getElementsByName(g[1]);for(var m=0,q=h.length;m<q;m++)h[m].getAttribute("name")===g[1]&&l.push(h[m]);return l.length===0?null:l}},TAG:function(g,h){return h.getElementsByTagName(g[1])}},preFilter:{CLASS:function(g,h,l,m,q,p){g=" "+g[1].replace(/\\/g,"")+" ";if(p)return g;p=0;for(var v;(v=h[p])!=null;p++)if(v)if(q^(v.className&&(" "+v.className+" ").replace(/[\t\n]/g," ").indexOf(g)>=0))l||m.push(v);else if(l)h[p]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()},
+CHILD:function(g){if(g[1]==="nth"){var h=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=h[1]+(h[2]||1)-0;g[3]=h[3]-0}g[0]=e++;return g},ATTR:function(g,h,l,m,q,p){h=g[1].replace(/\\/g,"");if(!p&&n.attrMap[h])g[1]=n.attrMap[h];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,h,l,m,q){if(g[1]==="not")if((f.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,h);else{g=k.filter(g[3],h,l,true^q);l||m.push.apply(m,
+g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,h,l){return!!k(l[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)},
+text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}},
+setFilters:{first:function(g,h){return h===0},last:function(g,h,l,m){return h===m.length-1},even:function(g,h){return h%2===0},odd:function(g,h){return h%2===1},lt:function(g,h,l){return h<l[3]-0},gt:function(g,h,l){return h>l[3]-0},nth:function(g,h,l){return l[3]-0===h},eq:function(g,h,l){return l[3]-0===h}},filter:{PSEUDO:function(g,h,l,m){var q=h[1],p=n.filters[q];if(p)return p(g,l,h,m);else if(q==="contains")return(g.textContent||g.innerText||a([g])||"").indexOf(h[3])>=0;else if(q==="not"){h=
+h[3];l=0;for(m=h.length;l<m;l++)if(h[l]===g)return false;return true}else k.error("Syntax error, unrecognized expression: "+q)},CHILD:function(g,h){var l=h[1],m=g;switch(l){case "only":case "first":for(;m=m.previousSibling;)if(m.nodeType===1)return false;if(l==="first")return true;m=g;case "last":for(;m=m.nextSibling;)if(m.nodeType===1)return false;return true;case "nth":l=h[2];var q=h[3];if(l===1&&q===0)return true;h=h[0];var p=g.parentNode;if(p&&(p.sizcache!==h||!g.nodeIndex)){var v=0;for(m=p.firstChild;m;m=
+m.nextSibling)if(m.nodeType===1)m.nodeIndex=++v;p.sizcache=h}g=g.nodeIndex-q;return l===0?g===0:g%l===0&&g/l>=0}},ID:function(g,h){return g.nodeType===1&&g.getAttribute("id")===h},TAG:function(g,h){return h==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===h},CLASS:function(g,h){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(h)>-1},ATTR:function(g,h){var l=h[1];g=n.attrHandle[l]?n.attrHandle[l](g):g[l]!=null?g[l]:g.getAttribute(l);l=g+"";var m=h[2];h=h[4];return g==null?m==="!=":m===
+"="?l===h:m==="*="?l.indexOf(h)>=0:m==="~="?(" "+l+" ").indexOf(h)>=0:!h?l&&g!==false:m==="!="?l!==h:m==="^="?l.indexOf(h)===0:m==="$="?l.substr(l.length-h.length)===h:m==="|="?l===h||l.substr(0,h.length+1)===h+"-":false},POS:function(g,h,l,m){var q=n.setFilters[h[2]];if(q)return q(g,l,h,m)}}},r=n.match.POS;for(var u in n.match){n.match[u]=new RegExp(n.match[u].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[u]=new RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[u].source.replace(/\\(\d+)/g,function(g,
+h){return"\\"+(h-0+1)}))}var z=function(g,h){g=Array.prototype.slice.call(g,0);if(h){h.push.apply(h,g);return h}return g};try{Array.prototype.slice.call(s.documentElement.childNodes,0)}catch(C){z=function(g,h){h=h||[];if(j.call(g)==="[object Array]")Array.prototype.push.apply(h,g);else if(typeof g.length==="number")for(var l=0,m=g.length;l<m;l++)h.push(g[l]);else for(l=0;g[l];l++)h.push(g[l]);return h}}var B;if(s.documentElement.compareDocumentPosition)B=function(g,h){if(!g.compareDocumentPosition||
+!h.compareDocumentPosition){if(g==h)i=true;return g.compareDocumentPosition?-1:1}g=g.compareDocumentPosition(h)&4?-1:g===h?0:1;if(g===0)i=true;return g};else if("sourceIndex"in s.documentElement)B=function(g,h){if(!g.sourceIndex||!h.sourceIndex){if(g==h)i=true;return g.sourceIndex?-1:1}g=g.sourceIndex-h.sourceIndex;if(g===0)i=true;return g};else if(s.createRange)B=function(g,h){if(!g.ownerDocument||!h.ownerDocument){if(g==h)i=true;return g.ownerDocument?-1:1}var l=g.ownerDocument.createRange(),m=
+h.ownerDocument.createRange();l.setStart(g,0);l.setEnd(g,0);m.setStart(h,0);m.setEnd(h,0);g=l.compareBoundaryPoints(Range.START_TO_END,m);if(g===0)i=true;return g};(function(){var g=s.createElement("div"),h="script"+(new Date).getTime();g.innerHTML="<a name='"+h+"'/>";var l=s.documentElement;l.insertBefore(g,l.firstChild);if(s.getElementById(h)){n.find.ID=function(m,q,p){if(typeof q.getElementById!=="undefined"&&!p)return(q=q.getElementById(m[1]))?q.id===m[1]||typeof q.getAttributeNode!=="undefined"&&
+q.getAttributeNode("id").nodeValue===m[1]?[q]:w:[]};n.filter.ID=function(m,q){var p=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&p&&p.nodeValue===q}}l.removeChild(g);l=g=null})();(function(){var g=s.createElement("div");g.appendChild(s.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(h,l){l=l.getElementsByTagName(h[1]);if(h[1]==="*"){h=[];for(var m=0;l[m];m++)l[m].nodeType===1&&h.push(l[m]);l=h}return l};g.innerHTML="<a href='#'></a>";
+if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(h){return h.getAttribute("href",2)};g=null})();s.querySelectorAll&&function(){var g=k,h=s.createElement("div");h.innerHTML="<p class='TEST'></p>";if(!(h.querySelectorAll&&h.querySelectorAll(".TEST").length===0)){k=function(m,q,p,v){q=q||s;if(!v&&q.nodeType===9&&!x(q))try{return z(q.querySelectorAll(m),p)}catch(t){}return g(m,q,p,v)};for(var l in g)k[l]=g[l];h=null}}();
+(function(){var g=s.createElement("div");g.innerHTML="<div class='test e'></div><div class='test'></div>";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(h,l,m){if(typeof l.getElementsByClassName!=="undefined"&&!m)return l.getElementsByClassName(h[1])};g=null}}})();var E=s.compareDocumentPosition?function(g,h){return!!(g.compareDocumentPosition(h)&16)}:
+function(g,h){return g!==h&&(g.contains?g.contains(h):true)},x=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false},ga=function(g,h){var l=[],m="",q;for(h=h.nodeType?[h]:h;q=n.match.PSEUDO.exec(g);){m+=q[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;q=0;for(var p=h.length;q<p;q++)k(g,h[q],l);return k.filter(m,l)};c.find=k;c.expr=k.selectors;c.expr[":"]=c.expr.filters;c.unique=k.uniqueSort;c.text=a;c.isXMLDoc=x;c.contains=E})();var eb=/Until$/,fb=/^(?:parents|prevUntil|prevAll)/,
+gb=/,/;R=Array.prototype.slice;var Ia=function(a,b,d){if(c.isFunction(b))return c.grep(a,function(e,j){return!!b.call(e,j,e)===d});else if(b.nodeType)return c.grep(a,function(e){return e===b===d});else if(typeof b==="string"){var f=c.grep(a,function(e){return e.nodeType===1});if(Ua.test(b))return c.filter(b,f,!d);else b=c.filter(b,f)}return c.grep(a,function(e){return c.inArray(e,b)>=0===d})};c.fn.extend({find:function(a){for(var b=this.pushStack("","find",a),d=0,f=0,e=this.length;f<e;f++){d=b.length;
+c.find(a,this[f],b);if(f>0)for(var j=d;j<b.length;j++)for(var i=0;i<d;i++)if(b[i]===b[j]){b.splice(j--,1);break}}return b},has:function(a){var b=c(a);return this.filter(function(){for(var d=0,f=b.length;d<f;d++)if(c.contains(this,b[d]))return true})},not:function(a){return this.pushStack(Ia(this,a,false),"not",a)},filter:function(a){return this.pushStack(Ia(this,a,true),"filter",a)},is:function(a){return!!a&&c.filter(a,this).length>0},closest:function(a,b){if(c.isArray(a)){var d=[],f=this[0],e,j=
+{},i;if(f&&a.length){e=0;for(var o=a.length;e<o;e++){i=a[e];j[i]||(j[i]=c.expr.match.POS.test(i)?c(i,b||this.context):i)}for(;f&&f.ownerDocument&&f!==b;){for(i in j){e=j[i];if(e.jquery?e.index(f)>-1:c(f).is(e)){d.push({selector:i,elem:f});delete j[i]}}f=f.parentNode}}return d}var k=c.expr.match.POS.test(a)?c(a,b||this.context):null;return this.map(function(n,r){for(;r&&r.ownerDocument&&r!==b;){if(k?k.index(r)>-1:c(r).is(a))return r;r=r.parentNode}return null})},index:function(a){if(!a||typeof a===
+"string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){a=typeof a==="string"?c(a,b||this.context):c.makeArray(a);b=c.merge(this.get(),a);return this.pushStack(qa(a[0])||qa(b[0])?b:c.unique(b))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode",
+d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")?
+a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,f){var e=c.map(this,b,d);eb.test(a)||(f=d);if(f&&typeof f==="string")e=c.filter(f,e);e=this.length>1?c.unique(e):e;if((this.length>1||gb.test(f))&&fb.test(a))e=e.reverse();return this.pushStack(e,a,R.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return c.find.matches(a,b)},dir:function(a,b,d){var f=[];for(a=a[b];a&&a.nodeType!==9&&(d===w||a.nodeType!==1||!c(a).is(d));){a.nodeType===
+1&&f.push(a);a=a[b]}return f},nth:function(a,b,d){b=b||1;for(var f=0;a;a=a[d])if(a.nodeType===1&&++f===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var Ja=/ jQuery\d+="(?:\d+|null)"/g,V=/^\s+/,Ka=/(<([\w:]+)[^>]*?)\/>/g,hb=/^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,La=/<([\w:]+)/,ib=/<tbody/i,jb=/<|&#?\w+;/,ta=/<script|<object|<embed|<option|<style/i,ua=/checked\s*(?:[^=]|=\s*.checked.)/i,Ma=function(a,b,d){return hb.test(d)?
+a:b+"></"+d+">"},F={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]};F.optgroup=F.option;F.tbody=F.tfoot=F.colgroup=F.caption=F.thead;F.th=F.td;if(!c.support.htmlSerialize)F._default=[1,"div<div>","</div>"];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d=
+c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==w)return this.empty().append((this[0]&&this[0].ownerDocument||s).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this},
+wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})},
+prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,
+this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,f;(f=this[d])!=null;d++)if(!a||c.filter(a,[f]).length){if(!b&&f.nodeType===1){c.cleanData(f.getElementsByTagName("*"));c.cleanData([f])}f.parentNode&&f.parentNode.removeChild(f)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild);
+return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,f=this.ownerDocument;if(!d){d=f.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(Ja,"").replace(/=([^="'>\s]+\/)>/g,'="$1">').replace(V,"")],f)[0]}else return this.cloneNode(true)});if(a===true){ra(this,b);ra(this.find("*"),b.find("*"))}return b},html:function(a){if(a===w)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(Ja,
+""):null;else if(typeof a==="string"&&!ta.test(a)&&(c.support.leadingWhitespace||!V.test(a))&&!F[(La.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Ka,Ma);try{for(var b=0,d=this.length;b<d;b++)if(this[b].nodeType===1){c.cleanData(this[b].getElementsByTagName("*"));this[b].innerHTML=a}}catch(f){this.empty().append(a)}}else c.isFunction(a)?this.each(function(e){var j=c(this),i=j.html();j.empty().append(function(){return a.call(this,e,i)})}):this.empty().append(a);return this},replaceWith:function(a){if(this[0]&&
+this[0].parentNode){if(c.isFunction(a))return this.each(function(b){var d=c(this),f=d.html();d.replaceWith(a.call(this,b,f))});if(typeof a!=="string")a=c(a).detach();return this.each(function(){var b=this.nextSibling,d=this.parentNode;c(this).remove();b?c(b).before(a):c(d).append(a)})}else return this.pushStack(c(c.isFunction(a)?a():a),"replaceWith",a)},detach:function(a){return this.remove(a,true)},domManip:function(a,b,d){function f(u){return c.nodeName(u,"table")?u.getElementsByTagName("tbody")[0]||
+u.appendChild(u.ownerDocument.createElement("tbody")):u}var e,j,i=a[0],o=[],k;if(!c.support.checkClone&&arguments.length===3&&typeof i==="string"&&ua.test(i))return this.each(function(){c(this).domManip(a,b,d,true)});if(c.isFunction(i))return this.each(function(u){var z=c(this);a[0]=i.call(this,u,b?z.html():w);z.domManip(a,b,d)});if(this[0]){e=i&&i.parentNode;e=c.support.parentNode&&e&&e.nodeType===11&&e.childNodes.length===this.length?{fragment:e}:sa(a,this,o);k=e.fragment;if(j=k.childNodes.length===
+1?(k=k.firstChild):k.firstChild){b=b&&c.nodeName(j,"tr");for(var n=0,r=this.length;n<r;n++)d.call(b?f(this[n],j):this[n],n>0||e.cacheable||this.length>1?k.cloneNode(true):k)}o.length&&c.each(o,Qa)}return this}});c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var f=[];d=c(d);var e=this.length===1&&this[0].parentNode;if(e&&e.nodeType===11&&e.childNodes.length===1&&d.length===1){d[b](this[0]);
+return this}else{e=0;for(var j=d.length;e<j;e++){var i=(e>0?this.clone(true):this).get();c.fn[b].apply(c(d[e]),i);f=f.concat(i)}return this.pushStack(f,a,d.selector)}}});c.extend({clean:function(a,b,d,f){b=b||s;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||s;for(var e=[],j=0,i;(i=a[j])!=null;j++){if(typeof i==="number")i+="";if(i){if(typeof i==="string"&&!jb.test(i))i=b.createTextNode(i);else if(typeof i==="string"){i=i.replace(Ka,Ma);var o=(La.exec(i)||["",
+""])[1].toLowerCase(),k=F[o]||F._default,n=k[0],r=b.createElement("div");for(r.innerHTML=k[1]+i+k[2];n--;)r=r.lastChild;if(!c.support.tbody){n=ib.test(i);o=o==="table"&&!n?r.firstChild&&r.firstChild.childNodes:k[1]==="<table>"&&!n?r.childNodes:[];for(k=o.length-1;k>=0;--k)c.nodeName(o[k],"tbody")&&!o[k].childNodes.length&&o[k].parentNode.removeChild(o[k])}!c.support.leadingWhitespace&&V.test(i)&&r.insertBefore(b.createTextNode(V.exec(i)[0]),r.firstChild);i=r.childNodes}if(i.nodeType)e.push(i);else e=
+c.merge(e,i)}}if(d)for(j=0;e[j];j++)if(f&&c.nodeName(e[j],"script")&&(!e[j].type||e[j].type.toLowerCase()==="text/javascript"))f.push(e[j].parentNode?e[j].parentNode.removeChild(e[j]):e[j]);else{e[j].nodeType===1&&e.splice.apply(e,[j+1,0].concat(c.makeArray(e[j].getElementsByTagName("script"))));d.appendChild(e[j])}return e},cleanData:function(a){for(var b,d,f=c.cache,e=c.event.special,j=c.support.deleteExpando,i=0,o;(o=a[i])!=null;i++)if(d=o[c.expando]){b=f[d];if(b.events)for(var k in b.events)e[k]?
+c.event.remove(o,k):Ca(o,k,b.handle);if(j)delete o[c.expando];else o.removeAttribute&&o.removeAttribute(c.expando);delete f[d]}}});var kb=/z-?index|font-?weight|opacity|zoom|line-?height/i,Na=/alpha\([^)]*\)/,Oa=/opacity=([^)]*)/,ha=/float/i,ia=/-([a-z])/ig,lb=/([A-Z])/g,mb=/^-?\d+(?:px)?$/i,nb=/^-?\d/,ob={position:"absolute",visibility:"hidden",display:"block"},pb=["Left","Right"],qb=["Top","Bottom"],rb=s.defaultView&&s.defaultView.getComputedStyle,Pa=c.support.cssFloat?"cssFloat":"styleFloat",ja=
+function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){return X(this,a,b,true,function(d,f,e){if(e===w)return c.curCSS(d,f);if(typeof e==="number"&&!kb.test(f))e+="px";c.style(d,f,e)})};c.extend({style:function(a,b,d){if(!a||a.nodeType===3||a.nodeType===8)return w;if((b==="width"||b==="height")&&parseFloat(d)<0)d=w;var f=a.style||a,e=d!==w;if(!c.support.opacity&&b==="opacity"){if(e){f.zoom=1;b=parseInt(d,10)+""==="NaN"?"":"alpha(opacity="+d*100+")";a=f.filter||c.curCSS(a,"filter")||"";f.filter=
+Na.test(a)?a.replace(Na,b):b}return f.filter&&f.filter.indexOf("opacity=")>=0?parseFloat(Oa.exec(f.filter)[1])/100+"":""}if(ha.test(b))b=Pa;b=b.replace(ia,ja);if(e)f[b]=d;return f[b]},css:function(a,b,d,f){if(b==="width"||b==="height"){var e,j=b==="width"?pb:qb;function i(){e=b==="width"?a.offsetWidth:a.offsetHeight;f!=="border"&&c.each(j,function(){f||(e-=parseFloat(c.curCSS(a,"padding"+this,true))||0);if(f==="margin")e+=parseFloat(c.curCSS(a,"margin"+this,true))||0;else e-=parseFloat(c.curCSS(a,
+"border"+this+"Width",true))||0})}a.offsetWidth!==0?i():c.swap(a,ob,i);return Math.max(0,Math.round(e))}return c.curCSS(a,b,d)},curCSS:function(a,b,d){var f,e=a.style;if(!c.support.opacity&&b==="opacity"&&a.currentStyle){f=Oa.test(a.currentStyle.filter||"")?parseFloat(RegExp.$1)/100+"":"";return f===""?"1":f}if(ha.test(b))b=Pa;if(!d&&e&&e[b])f=e[b];else if(rb){if(ha.test(b))b="float";b=b.replace(lb,"-$1").toLowerCase();e=a.ownerDocument.defaultView;if(!e)return null;if(a=e.getComputedStyle(a,null))f=
+a.getPropertyValue(b);if(b==="opacity"&&f==="")f="1"}else if(a.currentStyle){d=b.replace(ia,ja);f=a.currentStyle[b]||a.currentStyle[d];if(!mb.test(f)&&nb.test(f)){b=e.left;var j=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;e.left=d==="fontSize"?"1em":f||0;f=e.pixelLeft+"px";e.left=b;a.runtimeStyle.left=j}}return f},swap:function(a,b,d){var f={};for(var e in b){f[e]=a.style[e];a.style[e]=b[e]}d.call(a);for(e in b)a.style[e]=f[e]}});if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b=
+a.offsetWidth,d=a.offsetHeight,f=a.nodeName.toLowerCase()==="tr";return b===0&&d===0&&!f?true:b>0&&d>0&&!f?false:c.curCSS(a,"display")==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var sb=J(),tb=/<script(.|\s)*?\/script>/gi,ub=/select|textarea/i,vb=/color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,N=/=\?(&|$)/,ka=/\?/,wb=/(\?|&)_=.*?(&|$)/,xb=/^(\w+:)?\/\/([^\/?#]+)/,yb=/%20/g,zb=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!==
+"string")return zb.call(this,a);else if(!this.length)return this;var f=a.indexOf(" ");if(f>=0){var e=a.slice(f,a.length);a=a.slice(0,f)}f="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);f="POST"}var j=this;c.ajax({url:a,type:f,dataType:"html",data:b,complete:function(i,o){if(o==="success"||o==="notmodified")j.html(e?c("<div />").append(i.responseText.replace(tb,"")).find(e):i.responseText);d&&j.each(d,[i.responseText,o,i])}});return this},
+serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ub.test(this.nodeName)||vb.test(this.type))}).map(function(a,b){a=c(this).val();return a==null?null:c.isArray(a)?c.map(a,function(d){return{name:b.name,value:d}}):{name:b.name,value:a}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),
+function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:f})},getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:f})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href,
+global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:A.XMLHttpRequest&&(A.location.protocol!=="file:"||!A.ActiveXObject)?function(){return new A.XMLHttpRequest}:function(){try{return new A.ActiveXObject("Microsoft.XMLHTTP")}catch(a){}},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},etag:{},ajax:function(a){function b(){e.success&&
+e.success.call(k,o,i,x);e.global&&f("ajaxSuccess",[x,e])}function d(){e.complete&&e.complete.call(k,x,i);e.global&&f("ajaxComplete",[x,e]);e.global&&!--c.active&&c.event.trigger("ajaxStop")}function f(q,p){(e.context?c(e.context):c.event).trigger(q,p)}var e=c.extend(true,{},c.ajaxSettings,a),j,i,o,k=a&&a.context||e,n=e.type.toUpperCase();if(e.data&&e.processData&&typeof e.data!=="string")e.data=c.param(e.data,e.traditional);if(e.dataType==="jsonp"){if(n==="GET")N.test(e.url)||(e.url+=(ka.test(e.url)?
+"&":"?")+(e.jsonp||"callback")+"=?");else if(!e.data||!N.test(e.data))e.data=(e.data?e.data+"&":"")+(e.jsonp||"callback")+"=?";e.dataType="json"}if(e.dataType==="json"&&(e.data&&N.test(e.data)||N.test(e.url))){j=e.jsonpCallback||"jsonp"+sb++;if(e.data)e.data=(e.data+"").replace(N,"="+j+"$1");e.url=e.url.replace(N,"="+j+"$1");e.dataType="script";A[j]=A[j]||function(q){o=q;b();d();A[j]=w;try{delete A[j]}catch(p){}z&&z.removeChild(C)}}if(e.dataType==="script"&&e.cache===null)e.cache=false;if(e.cache===
+false&&n==="GET"){var r=J(),u=e.url.replace(wb,"$1_="+r+"$2");e.url=u+(u===e.url?(ka.test(e.url)?"&":"?")+"_="+r:"")}if(e.data&&n==="GET")e.url+=(ka.test(e.url)?"&":"?")+e.data;e.global&&!c.active++&&c.event.trigger("ajaxStart");r=(r=xb.exec(e.url))&&(r[1]&&r[1]!==location.protocol||r[2]!==location.host);if(e.dataType==="script"&&n==="GET"&&r){var z=s.getElementsByTagName("head")[0]||s.documentElement,C=s.createElement("script");C.src=e.url;if(e.scriptCharset)C.charset=e.scriptCharset;if(!j){var B=
+false;C.onload=C.onreadystatechange=function(){if(!B&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){B=true;b();d();C.onload=C.onreadystatechange=null;z&&C.parentNode&&z.removeChild(C)}}}z.insertBefore(C,z.firstChild);return w}var E=false,x=e.xhr();if(x){e.username?x.open(n,e.url,e.async,e.username,e.password):x.open(n,e.url,e.async);try{if(e.data||a&&a.contentType)x.setRequestHeader("Content-Type",e.contentType);if(e.ifModified){c.lastModified[e.url]&&x.setRequestHeader("If-Modified-Since",
+c.lastModified[e.url]);c.etag[e.url]&&x.setRequestHeader("If-None-Match",c.etag[e.url])}r||x.setRequestHeader("X-Requested-With","XMLHttpRequest");x.setRequestHeader("Accept",e.dataType&&e.accepts[e.dataType]?e.accepts[e.dataType]+", */*":e.accepts._default)}catch(ga){}if(e.beforeSend&&e.beforeSend.call(k,x,e)===false){e.global&&!--c.active&&c.event.trigger("ajaxStop");x.abort();return false}e.global&&f("ajaxSend",[x,e]);var g=x.onreadystatechange=function(q){if(!x||x.readyState===0||q==="abort"){E||
+d();E=true;if(x)x.onreadystatechange=c.noop}else if(!E&&x&&(x.readyState===4||q==="timeout")){E=true;x.onreadystatechange=c.noop;i=q==="timeout"?"timeout":!c.httpSuccess(x)?"error":e.ifModified&&c.httpNotModified(x,e.url)?"notmodified":"success";var p;if(i==="success")try{o=c.httpData(x,e.dataType,e)}catch(v){i="parsererror";p=v}if(i==="success"||i==="notmodified")j||b();else c.handleError(e,x,i,p);d();q==="timeout"&&x.abort();if(e.async)x=null}};try{var h=x.abort;x.abort=function(){x&&h.call(x);
+g("abort")}}catch(l){}e.async&&e.timeout>0&&setTimeout(function(){x&&!E&&g("timeout")},e.timeout);try{x.send(n==="POST"||n==="PUT"||n==="DELETE"?e.data:null)}catch(m){c.handleError(e,x,null,m);d()}e.async||g();return x}},handleError:function(a,b,d,f){if(a.error)a.error.call(a.context||a,b,d,f);if(a.global)(a.context?c(a.context):c.event).trigger("ajaxError",[b,a,f])},active:0,httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status===
+1223||a.status===0}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),f=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(f)c.etag[b]=f;return a.status===304||a.status===0},httpData:function(a,b,d){var f=a.getResponseHeader("content-type")||"",e=b==="xml"||!b&&f.indexOf("xml")>=0;a=e?a.responseXML:a.responseText;e&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b===
+"json"||!b&&f.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&f.indexOf("javascript")>=0)c.globalEval(a);return a},param:function(a,b){function d(i,o){if(c.isArray(o))c.each(o,function(k,n){b||/\[\]$/.test(i)?f(i,n):d(i+"["+(typeof n==="object"||c.isArray(n)?k:"")+"]",n)});else!b&&o!=null&&typeof o==="object"?c.each(o,function(k,n){d(i+"["+k+"]",n)}):f(i,o)}function f(i,o){o=c.isFunction(o)?o():o;e[e.length]=encodeURIComponent(i)+"="+encodeURIComponent(o)}var e=[];if(b===w)b=c.ajaxSettings.traditional;
+if(c.isArray(a)||a.jquery)c.each(a,function(){f(this.name,this.value)});else for(var j in a)d(j,a[j]);return e.join("&").replace(yb,"+")}});var la={},Ab=/toggle|show|hide/,Bb=/^([+-]=)?([\d+-.]+)(.*)$/,W,va=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b){if(a||a===0)return this.animate(K("show",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");
+this[a].style.display=d||"";if(c.css(this[a],"display")==="none"){d=this[a].nodeName;var f;if(la[d])f=la[d];else{var e=c("<"+d+" />").appendTo("body");f=e.css("display");if(f==="none")f="block";e.remove();la[d]=f}c.data(this[a],"olddisplay",f)}}a=0;for(b=this.length;a<b;a++)this[a].style.display=c.data(this[a],"olddisplay")||"";return this}},hide:function(a,b){if(a||a===0)return this.animate(K("hide",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");!d&&d!=="none"&&c.data(this[a],
+"olddisplay",c.css(this[a],"display"))}a=0;for(b=this.length;a<b;a++)this[a].style.display="none";return this}},_toggle:c.fn.toggle,toggle:function(a,b){var d=typeof a==="boolean";if(c.isFunction(a)&&c.isFunction(b))this._toggle.apply(this,arguments);else a==null||d?this.each(function(){var f=d?a:c(this).is(":hidden");c(this)[f?"show":"hide"]()}):this.animate(K("toggle",3),a,b);return this},fadeTo:function(a,b,d){return this.filter(":hidden").css("opacity",0).show().end().animate({opacity:b},a,d)},
+animate:function(a,b,d,f){var e=c.speed(b,d,f);if(c.isEmptyObject(a))return this.each(e.complete);return this[e.queue===false?"each":"queue"](function(){var j=c.extend({},e),i,o=this.nodeType===1&&c(this).is(":hidden"),k=this;for(i in a){var n=i.replace(ia,ja);if(i!==n){a[n]=a[i];delete a[i];i=n}if(a[i]==="hide"&&o||a[i]==="show"&&!o)return j.complete.call(this);if((i==="height"||i==="width")&&this.style){j.display=c.css(this,"display");j.overflow=this.style.overflow}if(c.isArray(a[i])){(j.specialEasing=
+j.specialEasing||{})[i]=a[i][1];a[i]=a[i][0]}}if(j.overflow!=null)this.style.overflow="hidden";j.curAnim=c.extend({},a);c.each(a,function(r,u){var z=new c.fx(k,j,r);if(Ab.test(u))z[u==="toggle"?o?"show":"hide":u](a);else{var C=Bb.exec(u),B=z.cur(true)||0;if(C){u=parseFloat(C[2]);var E=C[3]||"px";if(E!=="px"){k.style[r]=(u||1)+E;B=(u||1)/z.cur(true)*B;k.style[r]=B+E}if(C[1])u=(C[1]==="-="?-1:1)*u+B;z.custom(B,u,E)}else z.custom(B,u,"")}});return true})},stop:function(a,b){var d=c.timers;a&&this.queue([]);
+this.each(function(){for(var f=d.length-1;f>=0;f--)if(d[f].elem===this){b&&d[f](true);d.splice(f,1)}});b||this.dequeue();return this}});c.each({slideDown:K("show",1),slideUp:K("hide",1),slideToggle:K("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,f){return this.animate(b,d,f)}});c.extend({speed:function(a,b,d){var f=a&&typeof a==="object"?a:{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};f.duration=c.fx.off?0:typeof f.duration===
+"number"?f.duration:c.fx.speeds[f.duration]||c.fx.speeds._default;f.old=f.complete;f.complete=function(){f.queue!==false&&c(this).dequeue();c.isFunction(f.old)&&f.old.call(this)};return f},easing:{linear:function(a,b,d,f){return d+f*a},swing:function(a,b,d,f){return(-Math.cos(a*Math.PI)/2+0.5)*f+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]||
+c.fx.step._default)(this);if((this.prop==="height"||this.prop==="width")&&this.elem.style)this.elem.style.display="block"},cur:function(a){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];return(a=parseFloat(c.css(this.elem,this.prop,a)))&&a>-10000?a:parseFloat(c.curCSS(this.elem,this.prop))||0},custom:function(a,b,d){function f(j){return e.step(j)}this.startTime=J();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start;
+this.pos=this.state=0;var e=this;f.elem=this.elem;if(f()&&c.timers.push(f)&&!W)W=setInterval(c.fx.tick,13)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a){var b=J(),d=true;if(a||b>=this.options.duration+this.startTime){this.now=
+this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var f in this.options.curAnim)if(this.options.curAnim[f]!==true)d=false;if(d){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;a=c.data(this.elem,"olddisplay");this.elem.style.display=a?a:this.options.display;if(c.css(this.elem,"display")==="none")this.elem.style.display="block"}this.options.hide&&c(this.elem).hide();if(this.options.hide||this.options.show)for(var e in this.options.curAnim)c.style(this.elem,
+e,this.options.orig[e]);this.options.complete.call(this.elem)}return false}else{e=b-this.startTime;this.state=e/this.options.duration;a=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||a](this.state,e,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=c.timers,b=0;b<a.length;b++)a[b]()||a.splice(b--,1);a.length||
+c.fx.stop()},stop:function(){clearInterval(W);W=null},speeds:{slow:600,fast:200,_default:400},step:{opacity:function(a){c.style(a.elem,"opacity",a.now)},_default:function(a){if(a.elem.style&&a.elem.style[a.prop]!=null)a.elem.style[a.prop]=(a.prop==="width"||a.prop==="height"?Math.max(0,a.now):a.now)+a.unit;else a.elem[a.prop]=a.now}}});if(c.expr&&c.expr.filters)c.expr.filters.animated=function(a){return c.grep(c.timers,function(b){return a===b.elem}).length};c.fn.offset="getBoundingClientRect"in s.documentElement?
+function(a){var b=this[0];if(a)return this.each(function(e){c.offset.setOffset(this,a,e)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);var d=b.getBoundingClientRect(),f=b.ownerDocument;b=f.body;f=f.documentElement;return{top:d.top+(self.pageYOffset||c.support.boxModel&&f.scrollTop||b.scrollTop)-(f.clientTop||b.clientTop||0),left:d.left+(self.pageXOffset||c.support.boxModel&&f.scrollLeft||b.scrollLeft)-(f.clientLeft||b.clientLeft||0)}}:function(a){var b=
+this[0];if(a)return this.each(function(r){c.offset.setOffset(this,a,r)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);c.offset.initialize();var d=b.offsetParent,f=b,e=b.ownerDocument,j,i=e.documentElement,o=e.body;f=(e=e.defaultView)?e.getComputedStyle(b,null):b.currentStyle;for(var k=b.offsetTop,n=b.offsetLeft;(b=b.parentNode)&&b!==o&&b!==i;){if(c.offset.supportsFixedPosition&&f.position==="fixed")break;j=e?e.getComputedStyle(b,null):b.currentStyle;
+k-=b.scrollTop;n-=b.scrollLeft;if(b===d){k+=b.offsetTop;n+=b.offsetLeft;if(c.offset.doesNotAddBorder&&!(c.offset.doesAddBorderForTableAndCells&&/^t(able|d|h)$/i.test(b.nodeName))){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=d;d=b.offsetParent}if(c.offset.subtractsBorderForOverflowNotVisible&&j.overflow!=="visible"){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=j}if(f.position==="relative"||f.position==="static"){k+=o.offsetTop;n+=o.offsetLeft}if(c.offset.supportsFixedPosition&&
+f.position==="fixed"){k+=Math.max(i.scrollTop,o.scrollTop);n+=Math.max(i.scrollLeft,o.scrollLeft)}return{top:k,left:n}};c.offset={initialize:function(){var a=s.body,b=s.createElement("div"),d,f,e,j=parseFloat(c.curCSS(a,"marginTop",true))||0;c.extend(b.style,{position:"absolute",top:0,left:0,margin:0,border:0,width:"1px",height:"1px",visibility:"hidden"});b.innerHTML="<div style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;'><div></div></div><table style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;' cellpadding='0' cellspacing='0'><tr><td></td></tr></table>";
+a.insertBefore(b,a.firstChild);d=b.firstChild;f=d.firstChild;e=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=f.offsetTop!==5;this.doesAddBorderForTableAndCells=e.offsetTop===5;f.style.position="fixed";f.style.top="20px";this.supportsFixedPosition=f.offsetTop===20||f.offsetTop===15;f.style.position=f.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=f.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==j;a.removeChild(b);
+c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.curCSS(a,"marginTop",true))||0;d+=parseFloat(c.curCSS(a,"marginLeft",true))||0}return{top:b,left:d}},setOffset:function(a,b,d){if(/static/.test(c.curCSS(a,"position")))a.style.position="relative";var f=c(a),e=f.offset(),j=parseInt(c.curCSS(a,"top",true),10)||0,i=parseInt(c.curCSS(a,"left",true),10)||0;if(c.isFunction(b))b=b.call(a,
+d,e);d={top:b.top-e.top+j,left:b.left-e.left+i};"using"in b?b.using.call(a,d):f.css(d)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),f=/^body|html$/i.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.curCSS(a,"marginTop",true))||0;d.left-=parseFloat(c.curCSS(a,"marginLeft",true))||0;f.top+=parseFloat(c.curCSS(b[0],"borderTopWidth",true))||0;f.left+=parseFloat(c.curCSS(b[0],"borderLeftWidth",true))||0;return{top:d.top-
+f.top,left:d.left-f.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||s.body;a&&!/^body|html$/i.test(a.nodeName)&&c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(f){var e=this[0],j;if(!e)return null;if(f!==w)return this.each(function(){if(j=wa(this))j.scrollTo(!a?f:c(j).scrollLeft(),a?f:c(j).scrollTop());else this[d]=f});else return(j=wa(e))?"pageXOffset"in j?j[a?"pageYOffset":
+"pageXOffset"]:c.support.boxModel&&j.document.documentElement[d]||j.document.body[d]:e[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();c.fn["inner"+b]=function(){return this[0]?c.css(this[0],d,false,"padding"):null};c.fn["outer"+b]=function(f){return this[0]?c.css(this[0],d,false,f?"margin":"border"):null};c.fn[d]=function(f){var e=this[0];if(!e)return f==null?null:this;if(c.isFunction(f))return this.each(function(j){var i=c(this);i[d](f.call(this,j,i[d]()))});return"scrollTo"in
+e&&e.document?e.document.compatMode==="CSS1Compat"&&e.document.documentElement["client"+b]||e.document.body["client"+b]:e.nodeType===9?Math.max(e.documentElement["client"+b],e.body["scroll"+b],e.documentElement["scroll"+b],e.body["offset"+b],e.documentElement["offset"+b]):f===w?c.css(e,d):this.css(d,typeof f==="string"?f:f+"px")}});A.jQuery=A.$=c})(window);
diff --git a/third_party/python/mock-1.0.0/html/_static/minus.png b/third_party/python/mock-1.0.0/html/_static/minus.png
new file mode 100644
index 0000000000..da1c5620d1
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/minus.png
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/_static/mobile.css b/third_party/python/mock-1.0.0/html/_static/mobile.css
new file mode 100644
index 0000000000..0cfe799b22
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/mobile.css
@@ -0,0 +1,17 @@
+/*
+ * CSS adjustments (overrides) for mobile browsers that cannot handle
+ * fix-positioned div's very well.
+ * This makes long pages scrollable on mobile browsers.
+ */
+
+#breadcrumbs {
+ display: none !important;
+}
+
+.document {
+ bottom: inherit !important;
+}
+
+#sphinxsidebar {
+ bottom: inherit !important;
+}
diff --git a/third_party/python/mock-1.0.0/html/_static/plus.png b/third_party/python/mock-1.0.0/html/_static/plus.png
new file mode 100644
index 0000000000..b3cb37425e
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/plus.png
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/_static/pygments.css b/third_party/python/mock-1.0.0/html/_static/pygments.css
new file mode 100644
index 0000000000..f07b654ba2
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/pygments.css
@@ -0,0 +1,62 @@
+.highlight .hll { background-color: #ffffcc }
+.highlight { background: #f0f0f0; }
+.highlight .c { color: #60a0b0; font-style: italic } /* Comment */
+.highlight .err { border: 1px solid #FF0000 } /* Error */
+.highlight .k { color: #007020; font-weight: bold } /* Keyword */
+.highlight .o { color: #666666 } /* Operator */
+.highlight .cm { color: #60a0b0; font-style: italic } /* Comment.Multiline */
+.highlight .cp { color: #007020 } /* Comment.Preproc */
+.highlight .c1 { color: #60a0b0; font-style: italic } /* Comment.Single */
+.highlight .cs { color: #60a0b0; background-color: #fff0f0 } /* Comment.Special */
+.highlight .gd { color: #A00000 } /* Generic.Deleted */
+.highlight .ge { font-style: italic } /* Generic.Emph */
+.highlight .gr { color: #FF0000 } /* Generic.Error */
+.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
+.highlight .gi { color: #00A000 } /* Generic.Inserted */
+.highlight .go { color: #808080 } /* Generic.Output */
+.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */
+.highlight .gs { font-weight: bold } /* Generic.Strong */
+.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
+.highlight .gt { color: #0040D0 } /* Generic.Traceback */
+.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */
+.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */
+.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */
+.highlight .kp { color: #007020 } /* Keyword.Pseudo */
+.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */
+.highlight .kt { color: #902000 } /* Keyword.Type */
+.highlight .m { color: #40a070 } /* Literal.Number */
+.highlight .s { color: #4070a0 } /* Literal.String */
+.highlight .na { color: #4070a0 } /* Name.Attribute */
+.highlight .nb { color: #007020 } /* Name.Builtin */
+.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */
+.highlight .no { color: #60add5 } /* Name.Constant */
+.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */
+.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */
+.highlight .ne { color: #007020 } /* Name.Exception */
+.highlight .nf { color: #06287e } /* Name.Function */
+.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */
+.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */
+.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */
+.highlight .nv { color: #bb60d5 } /* Name.Variable */
+.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */
+.highlight .w { color: #bbbbbb } /* Text.Whitespace */
+.highlight .mf { color: #40a070 } /* Literal.Number.Float */
+.highlight .mh { color: #40a070 } /* Literal.Number.Hex */
+.highlight .mi { color: #40a070 } /* Literal.Number.Integer */
+.highlight .mo { color: #40a070 } /* Literal.Number.Oct */
+.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */
+.highlight .sc { color: #4070a0 } /* Literal.String.Char */
+.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */
+.highlight .s2 { color: #4070a0 } /* Literal.String.Double */
+.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */
+.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */
+.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */
+.highlight .sx { color: #c65d09 } /* Literal.String.Other */
+.highlight .sr { color: #235388 } /* Literal.String.Regex */
+.highlight .s1 { color: #4070a0 } /* Literal.String.Single */
+.highlight .ss { color: #517918 } /* Literal.String.Symbol */
+.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */
+.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */
+.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */
+.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */
+.highlight .il { color: #40a070 } /* Literal.Number.Integer.Long */ \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/_static/scrn1.png b/third_party/python/mock-1.0.0/html/_static/scrn1.png
new file mode 100644
index 0000000000..6499b3cf76
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/scrn1.png
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/_static/scrn2.png b/third_party/python/mock-1.0.0/html/_static/scrn2.png
new file mode 100644
index 0000000000..2a60215d03
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/scrn2.png
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/_static/searchfield_leftcap.png b/third_party/python/mock-1.0.0/html/_static/searchfield_leftcap.png
new file mode 100644
index 0000000000..cc00c22b05
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/searchfield_leftcap.png
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/_static/searchfield_repeat.png b/third_party/python/mock-1.0.0/html/_static/searchfield_repeat.png
new file mode 100644
index 0000000000..b429a16ba6
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/searchfield_repeat.png
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/_static/searchfield_rightcap.png b/third_party/python/mock-1.0.0/html/_static/searchfield_rightcap.png
new file mode 100644
index 0000000000..8e13620ecb
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/searchfield_rightcap.png
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/_static/searchtools.js b/third_party/python/mock-1.0.0/html/_static/searchtools.js
new file mode 100644
index 0000000000..663be4c909
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/searchtools.js
@@ -0,0 +1,560 @@
+/*
+ * searchtools.js_t
+ * ~~~~~~~~~~~~~~~~
+ *
+ * Sphinx JavaScript utilties for the full-text search.
+ *
+ * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+/**
+ * helper function to return a node containing the
+ * search summary for a given text. keywords is a list
+ * of stemmed words, hlwords is the list of normal, unstemmed
+ * words. the first one is used to find the occurance, the
+ * latter for highlighting it.
+ */
+
+jQuery.makeSearchSummary = function(text, keywords, hlwords) {
+ var textLower = text.toLowerCase();
+ var start = 0;
+ $.each(keywords, function() {
+ var i = textLower.indexOf(this.toLowerCase());
+ if (i > -1)
+ start = i;
+ });
+ start = Math.max(start - 120, 0);
+ var excerpt = ((start > 0) ? '...' : '') +
+ $.trim(text.substr(start, 240)) +
+ ((start + 240 - text.length) ? '...' : '');
+ var rv = $('<div class="context"></div>').text(excerpt);
+ $.each(hlwords, function() {
+ rv = rv.highlightText(this, 'highlighted');
+ });
+ return rv;
+}
+
+
+/**
+ * Porter Stemmer
+ */
+var Stemmer = function() {
+
+ var step2list = {
+ ational: 'ate',
+ tional: 'tion',
+ enci: 'ence',
+ anci: 'ance',
+ izer: 'ize',
+ bli: 'ble',
+ alli: 'al',
+ entli: 'ent',
+ eli: 'e',
+ ousli: 'ous',
+ ization: 'ize',
+ ation: 'ate',
+ ator: 'ate',
+ alism: 'al',
+ iveness: 'ive',
+ fulness: 'ful',
+ ousness: 'ous',
+ aliti: 'al',
+ iviti: 'ive',
+ biliti: 'ble',
+ logi: 'log'
+ };
+
+ var step3list = {
+ icate: 'ic',
+ ative: '',
+ alize: 'al',
+ iciti: 'ic',
+ ical: 'ic',
+ ful: '',
+ ness: ''
+ };
+
+ var c = "[^aeiou]"; // consonant
+ var v = "[aeiouy]"; // vowel
+ var C = c + "[^aeiouy]*"; // consonant sequence
+ var V = v + "[aeiou]*"; // vowel sequence
+
+ var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
+ var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
+ var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
+ var s_v = "^(" + C + ")?" + v; // vowel in stem
+
+ this.stemWord = function (w) {
+ var stem;
+ var suffix;
+ var firstch;
+ var origword = w;
+
+ if (w.length < 3)
+ return w;
+
+ var re;
+ var re2;
+ var re3;
+ var re4;
+
+ firstch = w.substr(0,1);
+ if (firstch == "y")
+ w = firstch.toUpperCase() + w.substr(1);
+
+ // Step 1a
+ re = /^(.+?)(ss|i)es$/;
+ re2 = /^(.+?)([^s])s$/;
+
+ if (re.test(w))
+ w = w.replace(re,"$1$2");
+ else if (re2.test(w))
+ w = w.replace(re2,"$1$2");
+
+ // Step 1b
+ re = /^(.+?)eed$/;
+ re2 = /^(.+?)(ed|ing)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ re = new RegExp(mgr0);
+ if (re.test(fp[1])) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+ }
+ else if (re2.test(w)) {
+ var fp = re2.exec(w);
+ stem = fp[1];
+ re2 = new RegExp(s_v);
+ if (re2.test(stem)) {
+ w = stem;
+ re2 = /(at|bl|iz)$/;
+ re3 = new RegExp("([^aeiouylsz])\\1$");
+ re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
+ if (re2.test(w))
+ w = w + "e";
+ else if (re3.test(w)) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+ else if (re4.test(w))
+ w = w + "e";
+ }
+ }
+
+ // Step 1c
+ re = /^(.+?)y$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(s_v);
+ if (re.test(stem))
+ w = stem + "i";
+ }
+
+ // Step 2
+ re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ suffix = fp[2];
+ re = new RegExp(mgr0);
+ if (re.test(stem))
+ w = stem + step2list[suffix];
+ }
+
+ // Step 3
+ re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ suffix = fp[2];
+ re = new RegExp(mgr0);
+ if (re.test(stem))
+ w = stem + step3list[suffix];
+ }
+
+ // Step 4
+ re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;
+ re2 = /^(.+?)(s|t)(ion)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(mgr1);
+ if (re.test(stem))
+ w = stem;
+ }
+ else if (re2.test(w)) {
+ var fp = re2.exec(w);
+ stem = fp[1] + fp[2];
+ re2 = new RegExp(mgr1);
+ if (re2.test(stem))
+ w = stem;
+ }
+
+ // Step 5
+ re = /^(.+?)e$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(mgr1);
+ re2 = new RegExp(meq1);
+ re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
+ if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
+ w = stem;
+ }
+ re = /ll$/;
+ re2 = new RegExp(mgr1);
+ if (re.test(w) && re2.test(w)) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+
+ // and turn initial Y back to y
+ if (firstch == "y")
+ w = firstch.toLowerCase() + w.substr(1);
+ return w;
+ }
+}
+
+
+/**
+ * Search Module
+ */
+var Search = {
+
+ _index : null,
+ _queued_query : null,
+ _pulse_status : -1,
+
+ init : function() {
+ var params = $.getQueryParameters();
+ if (params.q) {
+ var query = params.q[0];
+ $('input[name="q"]')[0].value = query;
+ this.performSearch(query);
+ }
+ },
+
+ loadIndex : function(url) {
+ $.ajax({type: "GET", url: url, data: null, success: null,
+ dataType: "script", cache: true});
+ },
+
+ setIndex : function(index) {
+ var q;
+ this._index = index;
+ if ((q = this._queued_query) !== null) {
+ this._queued_query = null;
+ Search.query(q);
+ }
+ },
+
+ hasIndex : function() {
+ return this._index !== null;
+ },
+
+ deferQuery : function(query) {
+ this._queued_query = query;
+ },
+
+ stopPulse : function() {
+ this._pulse_status = 0;
+ },
+
+ startPulse : function() {
+ if (this._pulse_status >= 0)
+ return;
+ function pulse() {
+ Search._pulse_status = (Search._pulse_status + 1) % 4;
+ var dotString = '';
+ for (var i = 0; i < Search._pulse_status; i++)
+ dotString += '.';
+ Search.dots.text(dotString);
+ if (Search._pulse_status > -1)
+ window.setTimeout(pulse, 500);
+ };
+ pulse();
+ },
+
+ /**
+ * perform a search for something
+ */
+ performSearch : function(query) {
+ // create the required interface elements
+ this.out = $('#search-results');
+ this.title = $('<h2>' + _('Searching') + '</h2>').appendTo(this.out);
+ this.dots = $('<span></span>').appendTo(this.title);
+ this.status = $('<p style="display: none"></p>').appendTo(this.out);
+ this.output = $('<ul class="search"/>').appendTo(this.out);
+
+ $('#search-progress').text(_('Preparing search...'));
+ this.startPulse();
+
+ // index already loaded, the browser was quick!
+ if (this.hasIndex())
+ this.query(query);
+ else
+ this.deferQuery(query);
+ },
+
+ query : function(query) {
+ var stopwords = ["and","then","into","it","as","are","in","if","for","no","there","their","was","is","be","to","that","but","they","not","such","with","by","a","on","these","of","will","this","near","the","or","at"];
+
+ // Stem the searchterms and add them to the correct list
+ var stemmer = new Stemmer();
+ var searchterms = [];
+ var excluded = [];
+ var hlterms = [];
+ var tmp = query.split(/\s+/);
+ var objectterms = [];
+ for (var i = 0; i < tmp.length; i++) {
+ if (tmp[i] != "") {
+ objectterms.push(tmp[i].toLowerCase());
+ }
+
+ if ($u.indexOf(stopwords, tmp[i]) != -1 || tmp[i].match(/^\d+$/) ||
+ tmp[i] == "") {
+ // skip this "word"
+ continue;
+ }
+ // stem the word
+ var word = stemmer.stemWord(tmp[i]).toLowerCase();
+ // select the correct list
+ if (word[0] == '-') {
+ var toAppend = excluded;
+ word = word.substr(1);
+ }
+ else {
+ var toAppend = searchterms;
+ hlterms.push(tmp[i].toLowerCase());
+ }
+ // only add if not already in the list
+ if (!$.contains(toAppend, word))
+ toAppend.push(word);
+ };
+ var highlightstring = '?highlight=' + $.urlencode(hlterms.join(" "));
+
+ // console.debug('SEARCH: searching for:');
+ // console.info('required: ', searchterms);
+ // console.info('excluded: ', excluded);
+
+ // prepare search
+ var filenames = this._index.filenames;
+ var titles = this._index.titles;
+ var terms = this._index.terms;
+ var fileMap = {};
+ var files = null;
+ // different result priorities
+ var importantResults = [];
+ var objectResults = [];
+ var regularResults = [];
+ var unimportantResults = [];
+ $('#search-progress').empty();
+
+ // lookup as object
+ for (var i = 0; i < objectterms.length; i++) {
+ var others = [].concat(objectterms.slice(0,i),
+ objectterms.slice(i+1, objectterms.length))
+ var results = this.performObjectSearch(objectterms[i], others);
+ // Assume first word is most likely to be the object,
+ // other words more likely to be in description.
+ // Therefore put matches for earlier words first.
+ // (Results are eventually used in reverse order).
+ objectResults = results[0].concat(objectResults);
+ importantResults = results[1].concat(importantResults);
+ unimportantResults = results[2].concat(unimportantResults);
+ }
+
+ // perform the search on the required terms
+ for (var i = 0; i < searchterms.length; i++) {
+ var word = searchterms[i];
+ // no match but word was a required one
+ if ((files = terms[word]) == null)
+ break;
+ if (files.length == undefined) {
+ files = [files];
+ }
+ // create the mapping
+ for (var j = 0; j < files.length; j++) {
+ var file = files[j];
+ if (file in fileMap)
+ fileMap[file].push(word);
+ else
+ fileMap[file] = [word];
+ }
+ }
+
+ // now check if the files don't contain excluded terms
+ for (var file in fileMap) {
+ var valid = true;
+
+ // check if all requirements are matched
+ if (fileMap[file].length != searchterms.length)
+ continue;
+
+ // ensure that none of the excluded terms is in the
+ // search result.
+ for (var i = 0; i < excluded.length; i++) {
+ if (terms[excluded[i]] == file ||
+ $.contains(terms[excluded[i]] || [], file)) {
+ valid = false;
+ break;
+ }
+ }
+
+ // if we have still a valid result we can add it
+ // to the result list
+ if (valid)
+ regularResults.push([filenames[file], titles[file], '', null]);
+ }
+
+ // delete unused variables in order to not waste
+ // memory until list is retrieved completely
+ delete filenames, titles, terms;
+
+ // now sort the regular results descending by title
+ regularResults.sort(function(a, b) {
+ var left = a[1].toLowerCase();
+ var right = b[1].toLowerCase();
+ return (left > right) ? -1 : ((left < right) ? 1 : 0);
+ });
+
+ // combine all results
+ var results = unimportantResults.concat(regularResults)
+ .concat(objectResults).concat(importantResults);
+
+ // print the results
+ var resultCount = results.length;
+ function displayNextItem() {
+ // results left, load the summary and display it
+ if (results.length) {
+ var item = results.pop();
+ var listItem = $('<li style="display:none"></li>');
+ if (DOCUMENTATION_OPTIONS.FILE_SUFFIX == '') {
+ // dirhtml builder
+ var dirname = item[0] + '/';
+ if (dirname.match(/\/index\/$/)) {
+ dirname = dirname.substring(0, dirname.length-6);
+ } else if (dirname == 'index/') {
+ dirname = '';
+ }
+ listItem.append($('<a/>').attr('href',
+ DOCUMENTATION_OPTIONS.URL_ROOT + dirname +
+ highlightstring + item[2]).html(item[1]));
+ } else {
+ // normal html builders
+ listItem.append($('<a/>').attr('href',
+ item[0] + DOCUMENTATION_OPTIONS.FILE_SUFFIX +
+ highlightstring + item[2]).html(item[1]));
+ }
+ if (item[3]) {
+ listItem.append($('<span> (' + item[3] + ')</span>'));
+ Search.output.append(listItem);
+ listItem.slideDown(5, function() {
+ displayNextItem();
+ });
+ } else if (DOCUMENTATION_OPTIONS.HAS_SOURCE) {
+ $.get(DOCUMENTATION_OPTIONS.URL_ROOT + '_sources/' +
+ item[0] + '.txt', function(data) {
+ if (data != '') {
+ listItem.append($.makeSearchSummary(data, searchterms, hlterms));
+ Search.output.append(listItem);
+ }
+ listItem.slideDown(5, function() {
+ displayNextItem();
+ });
+ }, "text");
+ } else {
+ // no source available, just display title
+ Search.output.append(listItem);
+ listItem.slideDown(5, function() {
+ displayNextItem();
+ });
+ }
+ }
+ // search finished, update title and status message
+ else {
+ Search.stopPulse();
+ Search.title.text(_('Search Results'));
+ if (!resultCount)
+ Search.status.text(_('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.'));
+ else
+ Search.status.text(_('Search finished, found %s page(s) matching the search query.').replace('%s', resultCount));
+ Search.status.fadeIn(500);
+ }
+ }
+ displayNextItem();
+ },
+
+ performObjectSearch : function(object, otherterms) {
+ var filenames = this._index.filenames;
+ var objects = this._index.objects;
+ var objnames = this._index.objnames;
+ var titles = this._index.titles;
+
+ var importantResults = [];
+ var objectResults = [];
+ var unimportantResults = [];
+
+ for (var prefix in objects) {
+ for (var name in objects[prefix]) {
+ var fullname = (prefix ? prefix + '.' : '') + name;
+ if (fullname.toLowerCase().indexOf(object) > -1) {
+ var match = objects[prefix][name];
+ var objname = objnames[match[1]][2];
+ var title = titles[match[0]];
+ // If more than one term searched for, we require other words to be
+ // found in the name/title/description
+ if (otherterms.length > 0) {
+ var haystack = (prefix + ' ' + name + ' ' +
+ objname + ' ' + title).toLowerCase();
+ var allfound = true;
+ for (var i = 0; i < otherterms.length; i++) {
+ if (haystack.indexOf(otherterms[i]) == -1) {
+ allfound = false;
+ break;
+ }
+ }
+ if (!allfound) {
+ continue;
+ }
+ }
+ var descr = objname + _(', in ') + title;
+ anchor = match[3];
+ if (anchor == '')
+ anchor = fullname;
+ else if (anchor == '-')
+ anchor = objnames[match[1]][1] + '-' + fullname;
+ result = [filenames[match[0]], fullname, '#'+anchor, descr];
+ switch (match[2]) {
+ case 1: objectResults.push(result); break;
+ case 0: importantResults.push(result); break;
+ case 2: unimportantResults.push(result); break;
+ }
+ }
+ }
+ }
+
+ // sort results descending
+ objectResults.sort(function(a, b) {
+ return (a[1] > b[1]) ? -1 : ((a[1] < b[1]) ? 1 : 0);
+ });
+
+ importantResults.sort(function(a, b) {
+ return (a[1] > b[1]) ? -1 : ((a[1] < b[1]) ? 1 : 0);
+ });
+
+ unimportantResults.sort(function(a, b) {
+ return (a[1] > b[1]) ? -1 : ((a[1] < b[1]) ? 1 : 0);
+ });
+
+ return [importantResults, objectResults, unimportantResults]
+ }
+}
+
+$(document).ready(function() {
+ Search.init();
+}); \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/_static/sidebar.js b/third_party/python/mock-1.0.0/html/_static/sidebar.js
new file mode 100644
index 0000000000..7318517111
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/sidebar.js
@@ -0,0 +1,148 @@
+/*
+ * sidebar.js
+ * ~~~~~~~~~~
+ *
+ * This script makes the Sphinx sidebar collapsible.
+ *
+ * .sphinxsidebar contains .sphinxsidebarwrapper. This script adds
+ * in .sphixsidebar, after .sphinxsidebarwrapper, the #sidebarbutton
+ * used to collapse and expand the sidebar.
+ *
+ * When the sidebar is collapsed the .sphinxsidebarwrapper is hidden
+ * and the width of the sidebar and the margin-left of the document
+ * are decreased. When the sidebar is expanded the opposite happens.
+ * This script saves a per-browser/per-session cookie used to
+ * remember the position of the sidebar among the pages.
+ * Once the browser is closed the cookie is deleted and the position
+ * reset to the default (expanded).
+ *
+ * :copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+$(function() {
+ // global elements used by the functions.
+ // the 'sidebarbutton' element is defined as global after its
+ // creation, in the add_sidebar_button function
+ var bodywrapper = $('.bodywrapper');
+ var sidebar = $('.sphinxsidebar');
+ var sidebarwrapper = $('.sphinxsidebarwrapper');
+
+ // original margin-left of the bodywrapper and width of the sidebar
+ // with the sidebar expanded
+ var bw_margin_expanded = bodywrapper.css('margin-left');
+ var ssb_width_expanded = sidebar.width();
+
+ // margin-left of the bodywrapper and width of the sidebar
+ // with the sidebar collapsed
+ var bw_margin_collapsed = '.8em';
+ var ssb_width_collapsed = '.8em';
+
+ // colors used by the current theme
+ var dark_color = $('.related').css('background-color');
+ var light_color = $('.document').css('background-color');
+
+ function sidebar_is_collapsed() {
+ return sidebarwrapper.is(':not(:visible)');
+ }
+
+ function toggle_sidebar() {
+ if (sidebar_is_collapsed())
+ expand_sidebar();
+ else
+ collapse_sidebar();
+ }
+
+ function collapse_sidebar() {
+ sidebarwrapper.hide();
+ sidebar.css('width', ssb_width_collapsed);
+ bodywrapper.css('margin-left', bw_margin_collapsed);
+ sidebarbutton.css({
+ 'margin-left': '0',
+ 'height': bodywrapper.height()
+ });
+ sidebarbutton.find('span').text('»');
+ sidebarbutton.attr('title', _('Expand sidebar'));
+ document.cookie = 'sidebar=collapsed';
+ }
+
+ function expand_sidebar() {
+ bodywrapper.css('margin-left', bw_margin_expanded);
+ sidebar.css('width', ssb_width_expanded);
+ sidebarwrapper.show();
+ sidebarbutton.css({
+ 'margin-left': ssb_width_expanded-12,
+ 'height': bodywrapper.height()
+ });
+ sidebarbutton.find('span').text('«');
+ sidebarbutton.attr('title', _('Collapse sidebar'));
+ document.cookie = 'sidebar=expanded';
+ }
+
+ function add_sidebar_button() {
+ sidebarwrapper.css({
+ 'float': 'left',
+ 'margin-right': '0',
+ 'width': ssb_width_expanded - 28
+ });
+ // create the button
+ sidebar.append(
+ '<div id="sidebarbutton"><span>&laquo;</span></div>'
+ );
+ var sidebarbutton = $('#sidebarbutton');
+ light_color = sidebarbutton.css('background-color');
+ // find the height of the viewport to center the '<<' in the page
+ var viewport_height;
+ if (window.innerHeight)
+ viewport_height = window.innerHeight;
+ else
+ viewport_height = $(window).height();
+ sidebarbutton.find('span').css({
+ 'display': 'block',
+ 'margin-top': (viewport_height - sidebar.position().top - 20) / 2
+ });
+
+ sidebarbutton.click(toggle_sidebar);
+ sidebarbutton.attr('title', _('Collapse sidebar'));
+ sidebarbutton.css({
+ 'color': '#FFFFFF',
+ 'border-left': '1px solid ' + dark_color,
+ 'font-size': '1.2em',
+ 'cursor': 'pointer',
+ 'height': bodywrapper.height(),
+ 'padding-top': '1px',
+ 'margin-left': ssb_width_expanded - 12
+ });
+
+ sidebarbutton.hover(
+ function () {
+ $(this).css('background-color', dark_color);
+ },
+ function () {
+ $(this).css('background-color', light_color);
+ }
+ );
+ }
+
+ function set_position_from_cookie() {
+ if (!document.cookie)
+ return;
+ var items = document.cookie.split(';');
+ for(var k=0; k<items.length; k++) {
+ var key_val = items[k].split('=');
+ var key = key_val[0];
+ if (key == 'sidebar') {
+ var value = key_val[1];
+ if ((value == 'collapsed') && (!sidebar_is_collapsed()))
+ collapse_sidebar();
+ else if ((value == 'expanded') && (sidebar_is_collapsed()))
+ expand_sidebar();
+ }
+ }
+ }
+
+ add_sidebar_button();
+ var sidebarbutton = $('#sidebarbutton');
+ set_position_from_cookie();
+});
diff --git a/third_party/python/mock-1.0.0/html/_static/title_background.png b/third_party/python/mock-1.0.0/html/_static/title_background.png
new file mode 100644
index 0000000000..6fcd1cda87
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/title_background.png
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/_static/toc.js b/third_party/python/mock-1.0.0/html/_static/toc.js
new file mode 100644
index 0000000000..7b709785d4
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/toc.js
@@ -0,0 +1,20 @@
+var TOC = {
+ load: function () {
+ $('#toc_button').click(TOC.toggle);
+ },
+
+ toggle: function () {
+ if ($('#sphinxsidebar').toggle().is(':hidden')) {
+ $('div.document').css('left', "0px");
+ $('toc_button').removeClass("open");
+ } else {
+ $('div.document').css('left', "230px");
+ $('#toc_button').addClass("open");
+ }
+ return $('#sphinxsidebar');
+ }
+};
+
+$(document).ready(function () {
+ TOC.load();
+}); \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/_static/triangle_closed.png b/third_party/python/mock-1.0.0/html/_static/triangle_closed.png
new file mode 100644
index 0000000000..1e7f7bba20
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/triangle_closed.png
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/_static/triangle_left.png b/third_party/python/mock-1.0.0/html/_static/triangle_left.png
new file mode 100644
index 0000000000..2d86be7dfe
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/triangle_left.png
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/_static/triangle_open.png b/third_party/python/mock-1.0.0/html/_static/triangle_open.png
new file mode 100644
index 0000000000..e5d3bfdad1
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/triangle_open.png
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/_static/underscore.js b/third_party/python/mock-1.0.0/html/_static/underscore.js
new file mode 100644
index 0000000000..5d89914340
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/_static/underscore.js
@@ -0,0 +1,23 @@
+// Underscore.js 0.5.5
+// (c) 2009 Jeremy Ashkenas, DocumentCloud Inc.
+// Underscore is freely distributable under the terms of the MIT license.
+// Portions of Underscore are inspired by or borrowed from Prototype.js,
+// Oliver Steele's Functional, and John Resig's Micro-Templating.
+// For all details and documentation:
+// http://documentcloud.github.com/underscore/
+(function(){var j=this,n=j._,i=function(a){this._wrapped=a},m=typeof StopIteration!=="undefined"?StopIteration:"__break__",b=j._=function(a){return new i(a)};if(typeof exports!=="undefined")exports._=b;var k=Array.prototype.slice,o=Array.prototype.unshift,p=Object.prototype.toString,q=Object.prototype.hasOwnProperty,r=Object.prototype.propertyIsEnumerable;b.VERSION="0.5.5";b.each=function(a,c,d){try{if(a.forEach)a.forEach(c,d);else if(b.isArray(a)||b.isArguments(a))for(var e=0,f=a.length;e<f;e++)c.call(d,
+a[e],e,a);else{var g=b.keys(a);f=g.length;for(e=0;e<f;e++)c.call(d,a[g[e]],g[e],a)}}catch(h){if(h!=m)throw h;}return a};b.map=function(a,c,d){if(a&&b.isFunction(a.map))return a.map(c,d);var e=[];b.each(a,function(f,g,h){e.push(c.call(d,f,g,h))});return e};b.reduce=function(a,c,d,e){if(a&&b.isFunction(a.reduce))return a.reduce(b.bind(d,e),c);b.each(a,function(f,g,h){c=d.call(e,c,f,g,h)});return c};b.reduceRight=function(a,c,d,e){if(a&&b.isFunction(a.reduceRight))return a.reduceRight(b.bind(d,e),c);
+var f=b.clone(b.toArray(a)).reverse();b.each(f,function(g,h){c=d.call(e,c,g,h,a)});return c};b.detect=function(a,c,d){var e;b.each(a,function(f,g,h){if(c.call(d,f,g,h)){e=f;b.breakLoop()}});return e};b.select=function(a,c,d){if(a&&b.isFunction(a.filter))return a.filter(c,d);var e=[];b.each(a,function(f,g,h){c.call(d,f,g,h)&&e.push(f)});return e};b.reject=function(a,c,d){var e=[];b.each(a,function(f,g,h){!c.call(d,f,g,h)&&e.push(f)});return e};b.all=function(a,c,d){c=c||b.identity;if(a&&b.isFunction(a.every))return a.every(c,
+d);var e=true;b.each(a,function(f,g,h){(e=e&&c.call(d,f,g,h))||b.breakLoop()});return e};b.any=function(a,c,d){c=c||b.identity;if(a&&b.isFunction(a.some))return a.some(c,d);var e=false;b.each(a,function(f,g,h){if(e=c.call(d,f,g,h))b.breakLoop()});return e};b.include=function(a,c){if(b.isArray(a))return b.indexOf(a,c)!=-1;var d=false;b.each(a,function(e){if(d=e===c)b.breakLoop()});return d};b.invoke=function(a,c){var d=b.rest(arguments,2);return b.map(a,function(e){return(c?e[c]:e).apply(e,d)})};b.pluck=
+function(a,c){return b.map(a,function(d){return d[c]})};b.max=function(a,c,d){if(!c&&b.isArray(a))return Math.max.apply(Math,a);var e={computed:-Infinity};b.each(a,function(f,g,h){g=c?c.call(d,f,g,h):f;g>=e.computed&&(e={value:f,computed:g})});return e.value};b.min=function(a,c,d){if(!c&&b.isArray(a))return Math.min.apply(Math,a);var e={computed:Infinity};b.each(a,function(f,g,h){g=c?c.call(d,f,g,h):f;g<e.computed&&(e={value:f,computed:g})});return e.value};b.sortBy=function(a,c,d){return b.pluck(b.map(a,
+function(e,f,g){return{value:e,criteria:c.call(d,e,f,g)}}).sort(function(e,f){e=e.criteria;f=f.criteria;return e<f?-1:e>f?1:0}),"value")};b.sortedIndex=function(a,c,d){d=d||b.identity;for(var e=0,f=a.length;e<f;){var g=e+f>>1;d(a[g])<d(c)?(e=g+1):(f=g)}return e};b.toArray=function(a){if(!a)return[];if(a.toArray)return a.toArray();if(b.isArray(a))return a;if(b.isArguments(a))return k.call(a);return b.values(a)};b.size=function(a){return b.toArray(a).length};b.first=function(a,c,d){return c&&!d?k.call(a,
+0,c):a[0]};b.rest=function(a,c,d){return k.call(a,b.isUndefined(c)||d?1:c)};b.last=function(a){return a[a.length-1]};b.compact=function(a){return b.select(a,function(c){return!!c})};b.flatten=function(a){return b.reduce(a,[],function(c,d){if(b.isArray(d))return c.concat(b.flatten(d));c.push(d);return c})};b.without=function(a){var c=b.rest(arguments);return b.select(a,function(d){return!b.include(c,d)})};b.uniq=function(a,c){return b.reduce(a,[],function(d,e,f){if(0==f||(c===true?b.last(d)!=e:!b.include(d,
+e)))d.push(e);return d})};b.intersect=function(a){var c=b.rest(arguments);return b.select(b.uniq(a),function(d){return b.all(c,function(e){return b.indexOf(e,d)>=0})})};b.zip=function(){for(var a=b.toArray(arguments),c=b.max(b.pluck(a,"length")),d=new Array(c),e=0;e<c;e++)d[e]=b.pluck(a,String(e));return d};b.indexOf=function(a,c){if(a.indexOf)return a.indexOf(c);for(var d=0,e=a.length;d<e;d++)if(a[d]===c)return d;return-1};b.lastIndexOf=function(a,c){if(a.lastIndexOf)return a.lastIndexOf(c);for(var d=
+a.length;d--;)if(a[d]===c)return d;return-1};b.range=function(a,c,d){var e=b.toArray(arguments),f=e.length<=1;a=f?0:e[0];c=f?e[0]:e[1];d=e[2]||1;e=Math.ceil((c-a)/d);if(e<=0)return[];e=new Array(e);f=a;for(var g=0;1;f+=d){if((d>0?f-c:c-f)>=0)return e;e[g++]=f}};b.bind=function(a,c){var d=b.rest(arguments,2);return function(){return a.apply(c||j,d.concat(b.toArray(arguments)))}};b.bindAll=function(a){var c=b.rest(arguments);if(c.length==0)c=b.functions(a);b.each(c,function(d){a[d]=b.bind(a[d],a)});
+return a};b.delay=function(a,c){var d=b.rest(arguments,2);return setTimeout(function(){return a.apply(a,d)},c)};b.defer=function(a){return b.delay.apply(b,[a,1].concat(b.rest(arguments)))};b.wrap=function(a,c){return function(){var d=[a].concat(b.toArray(arguments));return c.apply(c,d)}};b.compose=function(){var a=b.toArray(arguments);return function(){for(var c=b.toArray(arguments),d=a.length-1;d>=0;d--)c=[a[d].apply(this,c)];return c[0]}};b.keys=function(a){if(b.isArray(a))return b.range(0,a.length);
+var c=[];for(var d in a)q.call(a,d)&&c.push(d);return c};b.values=function(a){return b.map(a,b.identity)};b.functions=function(a){return b.select(b.keys(a),function(c){return b.isFunction(a[c])}).sort()};b.extend=function(a,c){for(var d in c)a[d]=c[d];return a};b.clone=function(a){if(b.isArray(a))return a.slice(0);return b.extend({},a)};b.tap=function(a,c){c(a);return a};b.isEqual=function(a,c){if(a===c)return true;var d=typeof a;if(d!=typeof c)return false;if(a==c)return true;if(!a&&c||a&&!c)return false;
+if(a.isEqual)return a.isEqual(c);if(b.isDate(a)&&b.isDate(c))return a.getTime()===c.getTime();if(b.isNaN(a)&&b.isNaN(c))return true;if(b.isRegExp(a)&&b.isRegExp(c))return a.source===c.source&&a.global===c.global&&a.ignoreCase===c.ignoreCase&&a.multiline===c.multiline;if(d!=="object")return false;if(a.length&&a.length!==c.length)return false;d=b.keys(a);var e=b.keys(c);if(d.length!=e.length)return false;for(var f in a)if(!b.isEqual(a[f],c[f]))return false;return true};b.isEmpty=function(a){return b.keys(a).length==
+0};b.isElement=function(a){return!!(a&&a.nodeType==1)};b.isArray=function(a){return!!(a&&a.concat&&a.unshift)};b.isArguments=function(a){return a&&b.isNumber(a.length)&&!b.isArray(a)&&!r.call(a,"length")};b.isFunction=function(a){return!!(a&&a.constructor&&a.call&&a.apply)};b.isString=function(a){return!!(a===""||a&&a.charCodeAt&&a.substr)};b.isNumber=function(a){return p.call(a)==="[object Number]"};b.isDate=function(a){return!!(a&&a.getTimezoneOffset&&a.setUTCFullYear)};b.isRegExp=function(a){return!!(a&&
+a.test&&a.exec&&(a.ignoreCase||a.ignoreCase===false))};b.isNaN=function(a){return b.isNumber(a)&&isNaN(a)};b.isNull=function(a){return a===null};b.isUndefined=function(a){return typeof a=="undefined"};b.noConflict=function(){j._=n;return this};b.identity=function(a){return a};b.breakLoop=function(){throw m;};var s=0;b.uniqueId=function(a){var c=s++;return a?a+c:c};b.template=function(a,c){a=new Function("obj","var p=[],print=function(){p.push.apply(p,arguments);};with(obj){p.push('"+a.replace(/[\r\t\n]/g,
+" ").replace(/'(?=[^%]*%>)/g,"\t").split("'").join("\\'").split("\t").join("'").replace(/<%=(.+?)%>/g,"',$1,'").split("<%").join("');").split("%>").join("p.push('")+"');}return p.join('');");return c?a(c):a};b.forEach=b.each;b.foldl=b.inject=b.reduce;b.foldr=b.reduceRight;b.filter=b.select;b.every=b.all;b.some=b.any;b.head=b.first;b.tail=b.rest;b.methods=b.functions;var l=function(a,c){return c?b(a).chain():a};b.each(b.functions(b),function(a){var c=b[a];i.prototype[a]=function(){var d=b.toArray(arguments);
+o.call(d,this._wrapped);return l(c.apply(b,d),this._chain)}});b.each(["pop","push","reverse","shift","sort","splice","unshift"],function(a){var c=Array.prototype[a];i.prototype[a]=function(){c.apply(this._wrapped,arguments);return l(this._wrapped,this._chain)}});b.each(["concat","join","slice"],function(a){var c=Array.prototype[a];i.prototype[a]=function(){return l(c.apply(this._wrapped,arguments),this._chain)}});i.prototype.chain=function(){this._chain=true;return this};i.prototype.value=function(){return this._wrapped}})();
diff --git a/third_party/python/mock-1.0.0/html/changelog.html b/third_party/python/mock-1.0.0/html/changelog.html
new file mode 100644
index 0000000000..c4b935d2cc
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/changelog.html
@@ -0,0 +1,839 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>CHANGELOG &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <link rel="prev" title="Mock Library Comparison" href="compare.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="compare.html" title="Mock Library Comparison"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="changelog">
+<h1>CHANGELOG<a class="headerlink" href="#changelog" title="Permalink to this headline">¶</a></h1>
+<div class="section" id="version-1-0-0">
+<h2>2012/10/07 Version 1.0.0<a class="headerlink" href="#version-1-0-0" title="Permalink to this headline">¶</a></h2>
+<p>No changes since 1.0.0 beta 1. This version has feature parity with
+<a class="reference external" href="http://docs.python.org/py3k/library/unittest.mock.html#module-unittest.mock">unittest.mock</a>
+in Python 3.3.</p>
+<p>Full list of changes since 0.8:</p>
+<ul class="simple">
+<li><cite>mocksignature</cite>, along with the <cite>mocksignature</cite> argument to <cite>patch</cite>, removed</li>
+<li>Support for deleting attributes (accessing deleted attributes will raise an
+<cite>AttributeError</cite>)</li>
+<li>Added the <cite>mock_open</cite> helper function for mocking the builtin <cite>open</cite></li>
+<li><cite>__class__</cite> is assignable, so a mock can pass an <cite>isinstance</cite> check without
+requiring a spec</li>
+<li>Addition of <cite>PropertyMock</cite>, for mocking properties</li>
+<li><cite>MagicMocks</cite> made unorderable by default (in Python 3). The comparison
+methods (other than equality and inequality) now return <cite>NotImplemented</cite></li>
+<li>Propagate traceback info to support subclassing of <cite>_patch</cite> by other
+libraries</li>
+<li><cite>create_autospec</cite> works with attributes present in results of <cite>dir</cite> that
+can&#8217;t be fetched from the object&#8217;s class. Contributed by Konstantine Rybnikov</li>
+<li>Any exceptions in an iterable <cite>side_effect</cite> will be raised instead of
+returned</li>
+<li>In Python 3, <cite>create_autospec</cite> now supports keyword only arguments</li>
+<li>Added <cite>patch.stopall</cite> method to stop all active patches created by <cite>start</cite></li>
+<li>BUGFIX: calling <cite>MagicMock.reset_mock</cite> wouldn&#8217;t reset magic method mocks</li>
+<li>BUGFIX: calling <cite>reset_mock</cite> on a <cite>MagicMock</cite> created with autospec could
+raise an exception</li>
+<li>BUGFIX: passing multiple spec arguments to patchers (<cite>spec</cite> , <cite>spec_set</cite> and
+<cite>autospec</cite>) had unpredictable results, now it is an error</li>
+<li>BUGFIX: using <cite>spec=True</cite> <em>and</em> <cite>create=True</cite> as arguments to patchers could
+result in using <cite>DEFAULT</cite> as the spec. Now it is an error instead</li>
+<li>BUGFIX: using <cite>spec</cite> or <cite>autospec</cite> arguments to patchers, along with
+<cite>spec_set=True</cite> did not work correctly</li>
+<li>BUGFIX: using an object that evaluates to False as a spec could be ignored</li>
+<li>BUGFIX: a list as the <cite>spec</cite> argument to a patcher would always result in a
+non-callable mock. Now if <cite>__call__</cite> is in the spec the mock is callable</li>
+</ul>
+</div>
+<div class="section" id="version-1-0-0-beta-1">
+<h2>2012/07/13 Version 1.0.0 beta 1<a class="headerlink" href="#version-1-0-0-beta-1" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>Added <cite>patch.stopall</cite> method to stop all active patches created by <cite>start</cite></li>
+<li>BUGFIX: calling <cite>MagicMock.reset_mock</cite> wouldn&#8217;t reset magic method mocks</li>
+<li>BUGFIX: calling <cite>reset_mock</cite> on a <cite>MagicMock</cite> created with autospec could
+raise an exception</li>
+</ul>
+</div>
+<div class="section" id="version-1-0-0-alpha-2">
+<h2>2012/05/04 Version 1.0.0 alpha 2<a class="headerlink" href="#version-1-0-0-alpha-2" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><cite>PropertyMock</cite> attributes are now standard <cite>MagicMocks</cite></li>
+<li><cite>create_autospec</cite> works with attributes present in results of <cite>dir</cite> that
+can&#8217;t be fetched from the object&#8217;s class. Contributed by Konstantine Rybnikov</li>
+<li>Any exceptions in an iterable <cite>side_effect</cite> will be raised instead of
+returned</li>
+<li>In Python 3, <cite>create_autospec</cite> now supports keyword only arguments</li>
+</ul>
+</div>
+<div class="section" id="version-1-0-0-alpha-1">
+<h2>2012/03/25 Version 1.0.0 alpha 1<a class="headerlink" href="#version-1-0-0-alpha-1" title="Permalink to this headline">¶</a></h2>
+<p>The standard library version!</p>
+<ul class="simple">
+<li><cite>mocksignature</cite>, along with the <cite>mocksignature</cite> argument to <cite>patch</cite>, removed</li>
+<li>Support for deleting attributes (accessing deleted attributes will raise an
+<cite>AttributeError</cite>)</li>
+<li>Added the <cite>mock_open</cite> helper function for mocking the builtin <cite>open</cite></li>
+<li><cite>__class__</cite> is assignable, so a mock can pass an <cite>isinstance</cite> check without
+requiring a spec</li>
+<li>Addition of <cite>PropertyMock</cite>, for mocking properties</li>
+<li><cite>MagicMocks</cite> made unorderable by default (in Python 3). The comparison
+methods (other than equality and inequality) now return <cite>NotImplemented</cite></li>
+<li>Propagate traceback info to support subclassing of <cite>_patch</cite> by other
+libraries</li>
+<li>BUGFIX: passing multiple spec arguments to patchers (<cite>spec</cite> , <cite>spec_set</cite> and
+<cite>autospec</cite>) had unpredictable results, now it is an error</li>
+<li>BUGFIX: using <cite>spec=True</cite> <em>and</em> <cite>create=True</cite> as arguments to patchers could
+result in using <cite>DEFAULT</cite> as the spec. Now it is an error instead</li>
+<li>BUGFIX: using <cite>spec</cite> or <cite>autospec</cite> arguments to patchers, along with
+<cite>spec_set=True</cite> did not work correctly</li>
+<li>BUGFIX: using an object that evaluates to False as a spec could be ignored</li>
+<li>BUGFIX: a list as the <cite>spec</cite> argument to a patcher would always result in a
+non-callable mock. Now if <cite>__call__</cite> is in the spec the mock is callable</li>
+</ul>
+</div>
+<div class="section" id="version-0-8-0">
+<h2>2012/02/13 Version 0.8.0<a class="headerlink" href="#version-0-8-0" title="Permalink to this headline">¶</a></h2>
+<p>The only changes since 0.8rc2 are:</p>
+<ul class="simple">
+<li>Improved repr of <a class="reference internal" href="sentinel.html#mock.sentinel" title="mock.sentinel"><tt class="xref py py-data docutils literal"><span class="pre">sentinel</span></tt></a> objects</li>
+<li><a class="reference internal" href="helpers.html#mock.ANY" title="mock.ANY"><tt class="xref py py-data docutils literal"><span class="pre">ANY</span></tt></a> can be used for comparisons against <a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> objects</li>
+<li>The return value of <cite>MagicMock.__iter__</cite> method can be set to
+any iterable and isn&#8217;t required to be an iterator</li>
+</ul>
+<p>Full List of changes since 0.7:</p>
+<p>mock 0.8.0 is the last version that will support Python 2.4.</p>
+<ul class="simple">
+<li>Addition of <a class="reference internal" href="mock.html#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a> list for <em>all</em> calls (including magic
+methods and chained calls)</li>
+<li><a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a> and <a class="reference internal" href="patch.html#mock.patch.object" title="mock.patch.object"><tt class="xref py py-func docutils literal"><span class="pre">patch.object()</span></tt></a> now create a <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a>
+instead of a <a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a> by default</li>
+<li>The patchers (<cite>patch</cite>, <cite>patch.object</cite> and <cite>patch.dict</cite>), plus <cite>Mock</cite> and
+<cite>MagicMock</cite>, take arbitrary keyword arguments for configuration</li>
+<li>New mock method <a class="reference internal" href="mock.html#mock.Mock.configure_mock" title="mock.Mock.configure_mock"><tt class="xref py py-meth docutils literal"><span class="pre">configure_mock()</span></tt></a> for setting attributes and
+return values / side effects on the mock and its attributes</li>
+<li>New mock assert methods <a class="reference internal" href="mock.html#mock.Mock.assert_any_call" title="mock.Mock.assert_any_call"><tt class="xref py py-meth docutils literal"><span class="pre">assert_any_call()</span></tt></a> and
+<a class="reference internal" href="mock.html#mock.Mock.assert_has_calls" title="mock.Mock.assert_has_calls"><tt class="xref py py-meth docutils literal"><span class="pre">assert_has_calls()</span></tt></a></li>
+<li>Implemented <a class="reference internal" href="helpers.html#auto-speccing"><em>Autospeccing</em></a> (recursive, lazy speccing of mocks with
+mocked signatures for functions/methods), as the <cite>autospec</cite> argument to
+<cite>patch</cite></li>
+<li>Added the <a class="reference internal" href="helpers.html#mock.create_autospec" title="mock.create_autospec"><tt class="xref py py-func docutils literal"><span class="pre">create_autospec()</span></tt></a> function for manually creating
+&#8216;auto-specced&#8217; mocks</li>
+<li><a class="reference internal" href="patch.html#mock.patch.multiple" title="mock.patch.multiple"><tt class="xref py py-func docutils literal"><span class="pre">patch.multiple()</span></tt></a> for doing multiple patches in a single call, using
+keyword arguments</li>
+<li>Setting <a class="reference internal" href="mock.html#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a> to an iterable will cause calls to the mock
+to return the next value from the iterable</li>
+<li>New <cite>new_callable</cite> argument to <cite>patch</cite> and <cite>patch.object</cite> allowing you to
+pass in a class or callable object (instead of <cite>MagicMock</cite>) that will be
+called to replace the object being patched</li>
+<li>Addition of <a class="reference internal" href="mock.html#mock.NonCallableMock" title="mock.NonCallableMock"><tt class="xref py py-class docutils literal"><span class="pre">NonCallableMock</span></tt></a> and <a class="reference internal" href="magicmock.html#mock.NonCallableMagicMock" title="mock.NonCallableMagicMock"><tt class="xref py py-class docutils literal"><span class="pre">NonCallableMagicMock</span></tt></a>, mocks
+without a <cite>__call__</cite> method</li>
+<li>Addition of <a class="reference internal" href="mock.html#mock.Mock.mock_add_spec" title="mock.Mock.mock_add_spec"><tt class="xref py py-meth docutils literal"><span class="pre">mock_add_spec()</span></tt></a> method for adding (or changing) a
+spec on an existing mock</li>
+<li>Protocol methods on <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> are magic mocks, and are created
+lazily on first lookup. This means the result of calling a protocol method is
+a <cite>MagicMock</cite> instead of a <cite>Mock</cite> as it was previously</li>
+<li>Addition of <a class="reference internal" href="mock.html#mock.Mock.attach_mock" title="mock.Mock.attach_mock"><tt class="xref py py-meth docutils literal"><span class="pre">attach_mock()</span></tt></a> method</li>
+<li>Added <a class="reference internal" href="helpers.html#mock.ANY" title="mock.ANY"><tt class="xref py py-data docutils literal"><span class="pre">ANY</span></tt></a> for ignoring arguments in <a class="reference internal" href="mock.html#mock.Mock.assert_called_with" title="mock.Mock.assert_called_with"><tt class="xref py py-meth docutils literal"><span class="pre">assert_called_with()</span></tt></a>
+calls</li>
+<li>Addition of <a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> helper object</li>
+<li>Improved repr for mocks</li>
+<li>Improved repr for <a class="reference internal" href="mock.html#mock.Mock.call_args" title="mock.Mock.call_args"><tt class="xref py py-attr docutils literal"><span class="pre">Mock.call_args</span></tt></a> and entries in
+<a class="reference internal" href="mock.html#mock.Mock.call_args_list" title="mock.Mock.call_args_list"><tt class="xref py py-attr docutils literal"><span class="pre">Mock.call_args_list</span></tt></a>, <a class="reference internal" href="mock.html#mock.Mock.method_calls" title="mock.Mock.method_calls"><tt class="xref py py-attr docutils literal"><span class="pre">Mock.method_calls</span></tt></a> and
+<a class="reference internal" href="mock.html#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">Mock.mock_calls</span></tt></a></li>
+<li>Improved repr for <a class="reference internal" href="sentinel.html#mock.sentinel" title="mock.sentinel"><tt class="xref py py-data docutils literal"><span class="pre">sentinel</span></tt></a> objects</li>
+<li><cite>patch</cite> lookup is done at use time not at decoration time</li>
+<li>In Python 2.6 or more recent, <cite>dir</cite> on a mock will report all the dynamically
+created attributes (or the full list of attributes if there is a spec) as
+well as all the mock methods and attributes.</li>
+<li>Module level <a class="reference internal" href="helpers.html#mock.FILTER_DIR" title="mock.FILTER_DIR"><tt class="xref py py-data docutils literal"><span class="pre">FILTER_DIR</span></tt></a> added to control whether <cite>dir(mock)</cite> filters
+private attributes. <cite>True</cite> by default.</li>
+<li><cite>patch.TEST_PREFIX</cite> for controlling how patchers recognise test methods when
+used to decorate a class</li>
+<li>Support for using Java exceptions as a <a class="reference internal" href="mock.html#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a> on Jython</li>
+<li><cite>Mock</cite> call lists (<cite>call_args_list</cite>, <cite>method_calls</cite> &amp; <cite>mock_calls</cite>) are now
+custom list objects that allow membership tests for &#8220;sub lists&#8221; and have
+a nicer representation if you <cite>str</cite> or <cite>print</cite> them</li>
+<li>Mocks attached as attributes or return values to other mocks have calls
+recorded in <cite>method_calls</cite> and <cite>mock_calls</cite> of the parent (unless a name is
+already set on the child)</li>
+<li>Improved failure messages for <cite>assert_called_with</cite> and
+<cite>assert_called_once_with</cite></li>
+<li>The return value of the <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> <cite>__iter__</cite> method can be set to
+any iterable and isn&#8217;t required to be an iterator</li>
+<li>Added the Mock API (<cite>assert_called_with</cite> etc) to functions created by
+<tt class="xref py py-func docutils literal"><span class="pre">mocksignature()</span></tt></li>
+<li>Tuples as well as lists can be used to specify allowed methods for <cite>spec</cite> &amp;
+<cite>spec_set</cite> arguments</li>
+<li>Calling <cite>stop</cite> on an unstarted patcher fails with a more meaningful error
+message</li>
+<li>Renamed the internal classes <cite>Sentinel</cite> and <cite>SentinelObject</cite> to prevent abuse</li>
+<li>BUGFIX: an error creating a patch, with nested patch decorators, won&#8217;t leave
+patches in place</li>
+<li>BUGFIX: <cite>__truediv__</cite> and <cite>__rtruediv__</cite> not available as magic methods on
+mocks in Python 3</li>
+<li>BUGFIX: <cite>assert_called_with</cite> / <cite>assert_called_once_with</cite> can be used with
+<cite>self</cite> as a keyword argument</li>
+<li>BUGFIX: when patching a class with an explicit spec / spec_set (not a
+boolean) it applies &#8220;spec inheritance&#8221; to the return value of the created
+mock (the &#8220;instance&#8221;)</li>
+<li>BUGFIX: remove the <cite>__unittest</cite> marker causing traceback truncation</li>
+<li>Removal of deprecated <cite>patch_object</cite></li>
+<li>Private attributes <cite>_name</cite>, <cite>_methods</cite>, &#8216;_children&#8217;, <cite>_wraps</cite> and <cite>_parent</cite>
+(etc) renamed to reduce likelihood of clash with user attributes.</li>
+<li>Added license file to the distribution</li>
+</ul>
+</div>
+<div class="section" id="version-0-8-0-release-candidate-2">
+<h2>2012/01/10 Version 0.8.0 release candidate 2<a class="headerlink" href="#version-0-8-0-release-candidate-2" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>Removed the <cite>configure</cite> keyword argument to <cite>create_autospec</cite> and allow
+arbitrary keyword arguments (for the <cite>Mock</cite> constructor) instead</li>
+<li>Fixed <cite>ANY</cite> equality with some types in <cite>assert_called_with</cite> calls</li>
+<li>Switched to a standard Sphinx theme (compatible with
+<a class="reference external" href="http://mock.readthedocs.org">readthedocs.org</a>)</li>
+</ul>
+</div>
+<div class="section" id="version-0-8-0-release-candidate-1">
+<h2>2011/12/29 Version 0.8.0 release candidate 1<a class="headerlink" href="#version-0-8-0-release-candidate-1" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><cite>create_autospec</cite> on the return value of a mocked class will use <cite>__call__</cite>
+for the signature rather than <cite>__init__</cite></li>
+<li>Performance improvement instantiating <cite>Mock</cite> and <cite>MagicMock</cite></li>
+<li>Mocks used as magic methods have the same type as their parent instead of
+being hardcoded to <cite>MagicMock</cite></li>
+</ul>
+<p>Special thanks to Julian Berman for his help with diagnosing and improving
+performance in this release.</p>
+</div>
+<div class="section" id="version-0-8-0-beta-4">
+<h2>2011/10/09 Version 0.8.0 beta 4<a class="headerlink" href="#version-0-8-0-beta-4" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><cite>patch</cite> lookup is done at use time not at decoration time</li>
+<li>When attaching a Mock to another Mock as a magic method, calls are recorded
+in mock_calls</li>
+<li>Addition of <cite>attach_mock</cite> method</li>
+<li>Renamed the internal classes <cite>Sentinel</cite> and <cite>SentinelObject</cite> to prevent abuse</li>
+<li>BUGFIX: various issues around circular references with mocks (setting a mock
+return value to be itself etc)</li>
+</ul>
+</div>
+<div class="section" id="version-0-8-0-beta-3">
+<h2>2011/08/15 Version 0.8.0 beta 3<a class="headerlink" href="#version-0-8-0-beta-3" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>Mocks attached as attributes or return values to other mocks have calls
+recorded in <cite>method_calls</cite> and <cite>mock_calls</cite> of the parent (unless a name is
+already set on the child)</li>
+<li>Addition of <cite>mock_add_spec</cite> method for adding (or changing) a spec on an
+existing mock</li>
+<li>Improved repr for <cite>Mock.call_args</cite> and entries in <cite>Mock.call_args_list</cite>,
+<cite>Mock.method_calls</cite> and <cite>Mock.mock_calls</cite></li>
+<li>Improved repr for mocks</li>
+<li>BUGFIX: minor fixes in the way <cite>mock_calls</cite> is worked out,
+especially for &#8220;intermediate&#8221; mocks in a call chain</li>
+</ul>
+</div>
+<div class="section" id="version-0-8-0-beta-2">
+<h2>2011/08/05 Version 0.8.0 beta 2<a class="headerlink" href="#version-0-8-0-beta-2" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>Setting <cite>side_effect</cite> to an iterable will cause calls to the mock to return
+the next value from the iterable</li>
+<li>Added <cite>assert_any_call</cite> method</li>
+<li>Moved <cite>assert_has_calls</cite> from call lists onto mocks</li>
+<li>BUGFIX: <cite>call_args</cite> and all members of <cite>call_args_list</cite> are two tuples of
+<cite>(args, kwargs)</cite> again instead of three tuples of <cite>(name, args, kwargs)</cite></li>
+</ul>
+</div>
+<div class="section" id="version-0-8-0-beta-1">
+<h2>2011/07/25 Version 0.8.0 beta 1<a class="headerlink" href="#version-0-8-0-beta-1" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><cite>patch.TEST_PREFIX</cite> for controlling how patchers recognise test methods when
+used to decorate a class</li>
+<li><cite>Mock</cite> call lists (<cite>call_args_list</cite>, <cite>method_calls</cite> &amp; <cite>mock_calls</cite>) are now
+custom list objects that allow membership tests for &#8220;sub lists&#8221; and have
+an <cite>assert_has_calls</cite> method for unordered call checks</li>
+<li><cite>callargs</cite> changed to <em>always</em> be a three-tuple of <cite>(name, args, kwargs)</cite></li>
+<li>Addition of <cite>mock_calls</cite> list for <em>all</em> calls (including magic methods and
+chained calls)</li>
+<li>Extension of <cite>call</cite> object to support chained calls and <cite>callargs</cite> for better
+comparisons with or without names. <cite>call</cite> object has a <cite>call_list</cite> method for
+chained calls</li>
+<li>Added the public <cite>instance</cite> argument to <cite>create_autospec</cite></li>
+<li>Support for using Java exceptions as a <cite>side_effect</cite> on Jython</li>
+<li>Improved failure messages for <cite>assert_called_with</cite> and
+<cite>assert_called_once_with</cite></li>
+<li>Tuples as well as lists can be used to specify allowed methods for <cite>spec</cite> &amp;
+<cite>spec_set</cite> arguments</li>
+<li>BUGFIX: Fixed bug in <cite>patch.multiple</cite> for argument passing when creating
+mocks</li>
+<li>Added license file to the distribution</li>
+</ul>
+</div>
+<div class="section" id="version-0-8-0-alpha-2">
+<h2>2011/07/16 Version 0.8.0 alpha 2<a class="headerlink" href="#version-0-8-0-alpha-2" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><cite>patch.multiple</cite> for doing multiple patches in a single call, using keyword
+arguments</li>
+<li>New <cite>new_callable</cite> argument to <cite>patch</cite> and <cite>patch.object</cite> allowing you to
+pass in a class or callable object (instead of <cite>MagicMock</cite>) that will be
+called to replace the object being patched</li>
+<li>Addition of <cite>NonCallableMock</cite> and <cite>NonCallableMagicMock</cite>, mocks without a
+<cite>__call__</cite> method</li>
+<li>Mocks created by <cite>patch</cite> have a <cite>MagicMock</cite> as the <cite>return_value</cite> where a
+class is being patched</li>
+<li><cite>create_autospec</cite> can create non-callable mocks for non-callable objects.
+<cite>return_value</cite> mocks of classes will be non-callable unless the class has
+a <cite>__call__</cite> method</li>
+<li><cite>autospec</cite> creates a <cite>MagicMock</cite> without a spec for properties and slot
+descriptors, because we don&#8217;t know the type of object they return</li>
+<li>Removed the &#8220;inherit&#8221; argument from <cite>create_autospec</cite></li>
+<li>Calling <cite>stop</cite> on an unstarted patcher fails with a more meaningful error
+message</li>
+<li>BUGFIX: an error creating a patch, with nested patch decorators, won&#8217;t leave
+patches in place</li>
+<li>BUGFIX: <cite>__truediv__</cite> and <cite>__rtruediv__</cite> not available as magic methods on
+mocks in Python 3</li>
+<li>BUGFIX: <cite>assert_called_with</cite> / <cite>assert_called_once_with</cite> can be used with
+<cite>self</cite> as a keyword argument</li>
+<li>BUGFIX: autospec for functions / methods with an argument named self that
+isn&#8217;t the first argument no longer broken</li>
+<li>BUGFIX: when patching a class with an explicit spec / spec_set (not a
+boolean) it applies &#8220;spec inheritance&#8221; to the return value of the created
+mock (the &#8220;instance&#8221;)</li>
+<li>BUGFIX: remove the <cite>__unittest</cite> marker causing traceback truncation</li>
+</ul>
+</div>
+<div class="section" id="version-0-8-0-alpha-1">
+<h2>2011/06/14 Version 0.8.0 alpha 1<a class="headerlink" href="#version-0-8-0-alpha-1" title="Permalink to this headline">¶</a></h2>
+<p>mock 0.8.0 is the last version that will support Python 2.4.</p>
+<ul>
+<li><p class="first">The patchers (<cite>patch</cite>, <cite>patch.object</cite> and <cite>patch.dict</cite>), plus <cite>Mock</cite> and
+<cite>MagicMock</cite>, take arbitrary keyword arguments for configuration</p>
+</li>
+<li><p class="first">New mock method <cite>configure_mock</cite> for setting attributes and return values /
+side effects on the mock and its attributes</p>
+</li>
+<li><p class="first">In Python 2.6 or more recent, <cite>dir</cite> on a mock will report all the dynamically
+created attributes (or the full list of attributes if there is a spec) as
+well as all the mock methods and attributes.</p>
+</li>
+<li><p class="first">Module level <cite>FILTER_DIR</cite> added to control whether <cite>dir(mock)</cite> filters
+private attributes. <cite>True</cite> by default. Note that <cite>vars(Mock())</cite> can still be
+used to get all instance attributes and <cite>dir(type(Mock())</cite> will still return
+all the other attributes (irrespective of <cite>FILTER_DIR</cite>)</p>
+</li>
+<li><p class="first"><cite>patch</cite> and <cite>patch.object</cite> now create a <cite>MagicMock</cite> instead of a <cite>Mock</cite> by
+default</p>
+</li>
+<li><p class="first">Added <cite>ANY</cite> for ignoring arguments in <cite>assert_called_with</cite> calls</p>
+</li>
+<li><p class="first">Addition of <cite>call</cite> helper object</p>
+</li>
+<li><p class="first">Protocol methods on <cite>MagicMock</cite> are magic mocks, and are created lazily on
+first lookup. This means the result of calling a protocol method is a
+MagicMock instead of a Mock as it was previously</p>
+</li>
+<li><p class="first">Added the Mock API (<cite>assert_called_with</cite> etc) to functions created by
+<cite>mocksignature</cite></p>
+</li>
+<li><p class="first">Private attributes <cite>_name</cite>, <cite>_methods</cite>, &#8216;_children&#8217;, <cite>_wraps</cite> and <cite>_parent</cite>
+(etc) renamed to reduce likelihood of clash with user attributes.</p>
+</li>
+<li><p class="first">Implemented auto-speccing (recursive, lazy speccing of mocks with mocked
+signatures for functions/methods)</p>
+<p>Limitations:</p>
+<ul class="simple">
+<li>Doesn&#8217;t mock magic methods or attributes (it creates MagicMocks, so the
+magic methods are <em>there</em>, they just don&#8217;t have the signature mocked nor
+are attributes followed)</li>
+<li>Doesn&#8217;t mock function / method attributes</li>
+<li>Uses object traversal on the objects being mocked to determine types - so
+properties etc may be triggered</li>
+<li>The return value of mocked classes (the &#8216;instance&#8217;) has the same call
+signature as the class __init__ (as they share the same spec)</li>
+</ul>
+<p>You create auto-specced mocks by passing <cite>autospec=True</cite> to <cite>patch</cite>.</p>
+<p>Note that attributes that are None are special cased and mocked without a
+spec (so any attribute / method can be used). This is because None is
+typically used as a default value for attributes that may be of some other
+type, and as we don&#8217;t know what type that may be we allow all access.</p>
+<p>Note that the <cite>autospec</cite> option to <cite>patch</cite> obsoletes the <cite>mocksignature</cite>
+option.</p>
+</li>
+<li><p class="first">Added the <cite>create_autospec</cite> function for manually creating &#8216;auto-specced&#8217;
+mocks</p>
+</li>
+<li><p class="first">Removal of deprecated <cite>patch_object</cite></p>
+</li>
+</ul>
+</div>
+<div class="section" id="version-0-7-2">
+<h2>2011/05/30 Version 0.7.2<a class="headerlink" href="#version-0-7-2" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>BUGFIX: instances of list subclasses can now be used as mock specs</li>
+<li>BUGFIX: MagicMock equality / inequality protocol methods changed to use the
+default equality / inequality. This is done through a <cite>side_effect</cite> on
+the mocks used for <cite>__eq__</cite> / <cite>__ne__</cite></li>
+</ul>
+</div>
+<div class="section" id="version-0-7-1">
+<h2>2011/05/06 Version 0.7.1<a class="headerlink" href="#version-0-7-1" title="Permalink to this headline">¶</a></h2>
+<p>Package fixes contributed by Michael Fladischer. No code changes.</p>
+<ul class="simple">
+<li>Include template in package</li>
+<li>Use isolated binaries for the tox tests</li>
+<li>Unset executable bit on docs</li>
+<li>Fix DOS line endings in getting-started.txt</li>
+</ul>
+</div>
+<div class="section" id="version-0-7-0">
+<h2>2011/03/05 Version 0.7.0<a class="headerlink" href="#version-0-7-0" title="Permalink to this headline">¶</a></h2>
+<p>No API changes since 0.7.0 rc1. Many documentation changes including a stylish
+new <a class="reference external" href="https://github.com/coordt/ADCtheme/">Sphinx theme</a>.</p>
+<p>The full set of changes since 0.6.0 are:</p>
+<ul class="simple">
+<li>Python 3 compatibility</li>
+<li>Ability to mock magic methods with <cite>Mock</cite> and addition of <cite>MagicMock</cite>
+with pre-created magic methods</li>
+<li>Addition of <cite>mocksignature</cite> and <cite>mocksignature</cite> argument to <cite>patch</cite> and
+<cite>patch.object</cite></li>
+<li>Addition of <cite>patch.dict</cite> for changing dictionaries during a test</li>
+<li>Ability to use <cite>patch</cite>, <cite>patch.object</cite> and <cite>patch.dict</cite> as class decorators</li>
+<li>Renamed <tt class="docutils literal"><span class="pre">patch_object</span></tt> to <cite>patch.object</cite> (<tt class="docutils literal"><span class="pre">patch_object</span></tt> is
+deprecated)</li>
+<li>Addition of soft comparisons: <cite>call_args</cite>, <cite>call_args_list</cite> and <cite>method_calls</cite>
+now return tuple-like objects which compare equal even when empty args
+or kwargs are skipped</li>
+<li>patchers (<cite>patch</cite>, <cite>patch.object</cite> and <cite>patch.dict</cite>) have start and stop
+methods</li>
+<li>Addition of <cite>assert_called_once_with</cite> method</li>
+<li>Mocks can now be named (<cite>name</cite> argument to constructor) and the name is used
+in the repr</li>
+<li>repr of a mock with a spec includes the class name of the spec</li>
+<li><cite>assert_called_with</cite> works with <cite>python -OO</cite></li>
+<li>New <cite>spec_set</cite> keyword argument to <cite>Mock</cite> and <cite>patch</cite>. If used,
+attempting to <em>set</em> an attribute on a mock not on the spec will raise an
+<cite>AttributeError</cite></li>
+<li>Mocks created with a spec can now pass <cite>isinstance</cite> tests (<cite>__class__</cite>
+returns the type of the spec)</li>
+<li>Added docstrings to all objects</li>
+<li>Improved failure message for <cite>Mock.assert_called_with</cite> when the mock
+has not been called at all</li>
+<li>Decorated functions / methods have their docstring and <cite>__module__</cite>
+preserved on Python 2.4.</li>
+<li>BUGFIX: <cite>mock.patch</cite> now works correctly with certain types of objects that
+proxy attribute access, like the django settings object</li>
+<li>BUGFIX: mocks are now copyable (thanks to Ned Batchelder for reporting and
+diagnosing this)</li>
+<li>BUGFIX: <cite>spec=True</cite> works with old style classes</li>
+<li>BUGFIX: <tt class="docutils literal"><span class="pre">help(mock)</span></tt> works now (on the module). Can no longer use <tt class="docutils literal"><span class="pre">__bases__</span></tt>
+as a valid sentinel name (thanks to Stephen Emslie for reporting and
+diagnosing this)</li>
+<li>BUGFIX: <tt class="docutils literal"><span class="pre">side_effect</span></tt> now works with <tt class="docutils literal"><span class="pre">BaseException</span></tt> exceptions like
+<tt class="docutils literal"><span class="pre">KeyboardInterrupt</span></tt></li>
+<li>BUGFIX: <cite>reset_mock</cite> caused infinite recursion when a mock is set as its own
+return value</li>
+<li>BUGFIX: patching the same object twice now restores the patches correctly</li>
+<li>with statement tests now skipped on Python 2.4</li>
+<li>Tests require unittest2 (or unittest2-py3k) to run</li>
+<li>Tested with <a class="reference external" href="http://pypi.python.org/pypi/tox">tox</a> on Python 2.4 - 3.2,
+jython and pypy (excluding 3.0)</li>
+<li>Added &#8216;build_sphinx&#8217; command to setup.py (requires setuptools or distribute)
+Thanks to Florian Bauer</li>
+<li>Switched from subversion to mercurial for source code control</li>
+<li><a class="reference external" href="http://konryd.blogspot.com/">Konrad Delong</a> added as co-maintainer</li>
+</ul>
+</div>
+<div class="section" id="version-0-7-0-rc-1">
+<h2>2011/02/16 Version 0.7.0 RC 1<a class="headerlink" href="#version-0-7-0-rc-1" title="Permalink to this headline">¶</a></h2>
+<p>Changes since beta 4:</p>
+<ul class="simple">
+<li>Tested with jython, pypy and Python 3.2 and 3.1</li>
+<li>Decorated functions / methods have their docstring and <cite>__module__</cite>
+preserved on Python 2.4</li>
+<li>BUGFIX: <cite>mock.patch</cite> now works correctly with certain types of objects that
+proxy attribute access, like the django settings object</li>
+<li>BUGFIX: <cite>reset_mock</cite> caused infinite recursion when a mock is set as its own
+return value</li>
+</ul>
+</div>
+<div class="section" id="version-0-7-0-beta-4">
+<h2>2010/11/12 Version 0.7.0 beta 4<a class="headerlink" href="#version-0-7-0-beta-4" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>patchers (<cite>patch</cite>, <cite>patch.object</cite> and <cite>patch.dict</cite>) have start and stop
+methods</li>
+<li>Addition of <cite>assert_called_once_with</cite> method</li>
+<li>repr of a mock with a spec includes the class name of the spec</li>
+<li><cite>assert_called_with</cite> works with <cite>python -OO</cite></li>
+<li>New <cite>spec_set</cite> keyword argument to <cite>Mock</cite> and <cite>patch</cite>. If used,
+attempting to <em>set</em> an attribute on a mock not on the spec will raise an
+<cite>AttributeError</cite></li>
+<li>Attributes and return value of a <cite>MagicMock</cite> are <cite>MagicMock</cite> objects</li>
+<li>Attempting to set an unsupported magic method now raises an <cite>AttributeError</cite></li>
+<li><cite>patch.dict</cite> works as a class decorator</li>
+<li>Switched from subversion to mercurial for source code control</li>
+<li>BUGFIX: mocks are now copyable (thanks to Ned Batchelder for reporting and
+diagnosing this)</li>
+<li>BUGFIX: <cite>spec=True</cite> works with old style classes</li>
+<li>BUGFIX: <cite>mocksignature=True</cite> can now patch instance methods via
+<cite>patch.object</cite></li>
+</ul>
+</div>
+<div class="section" id="version-0-7-0-beta-3">
+<h2>2010/09/18 Version 0.7.0 beta 3<a class="headerlink" href="#version-0-7-0-beta-3" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>Using spec with <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> only pre-creates magic methods in the spec</li>
+<li>Setting a magic method on a mock with a <tt class="docutils literal"><span class="pre">spec</span></tt> can only be done if the
+spec has that method</li>
+<li>Mocks can now be named (<cite>name</cite> argument to constructor) and the name is used
+in the repr</li>
+<li><cite>mocksignature</cite> can now be used with classes (signature based on <cite>__init__</cite>)
+and callable objects (signature based on <cite>__call__</cite>)</li>
+<li>Mocks created with a spec can now pass <cite>isinstance</cite> tests (<cite>__class__</cite>
+returns the type of the spec)</li>
+<li>Default numeric value for MagicMock is 1 rather than zero (because the
+MagicMock bool defaults to True and 0 is False)</li>
+<li>Improved failure message for <a class="reference internal" href="mock.html#mock.Mock.assert_called_with" title="mock.Mock.assert_called_with"><tt class="xref py py-meth docutils literal"><span class="pre">assert_called_with()</span></tt></a> when the mock
+has not been called at all</li>
+<li>Adding the following to the set of supported magic methods:<ul>
+<li><tt class="docutils literal"><span class="pre">__getformat__</span></tt> and <tt class="docutils literal"><span class="pre">__setformat__</span></tt></li>
+<li>pickle methods</li>
+<li><tt class="docutils literal"><span class="pre">__trunc__</span></tt>, <tt class="docutils literal"><span class="pre">__ceil__</span></tt> and <tt class="docutils literal"><span class="pre">__floor__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__sizeof__</span></tt></li>
+</ul>
+</li>
+<li>Added &#8216;build_sphinx&#8217; command to setup.py (requires setuptools or distribute)
+Thanks to Florian Bauer</li>
+<li>with statement tests now skipped on Python 2.4</li>
+<li>Tests require unittest2 to run on Python 2.7</li>
+<li>Improved several docstrings and documentation</li>
+</ul>
+</div>
+<div class="section" id="version-0-7-0-beta-2">
+<h2>2010/06/23 Version 0.7.0 beta 2<a class="headerlink" href="#version-0-7-0-beta-2" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><a class="reference internal" href="patch.html#mock.patch.dict" title="mock.patch.dict"><tt class="xref py py-func docutils literal"><span class="pre">patch.dict()</span></tt></a> works as a context manager as well as a decorator</li>
+<li><tt class="docutils literal"><span class="pre">patch.dict</span></tt> takes a string to specify dictionary as well as a dictionary
+object. If a string is supplied the name specified is imported</li>
+<li>BUGFIX: <tt class="docutils literal"><span class="pre">patch.dict</span></tt> restores dictionary even when an exception is raised</li>
+</ul>
+</div>
+<div class="section" id="version-0-7-0-beta-1">
+<h2>2010/06/22 Version 0.7.0 beta 1<a class="headerlink" href="#version-0-7-0-beta-1" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>Addition of <tt class="xref py py-func docutils literal"><span class="pre">mocksignature()</span></tt></li>
+<li>Ability to mock magic methods</li>
+<li>Ability to use <tt class="docutils literal"><span class="pre">patch</span></tt> and <tt class="docutils literal"><span class="pre">patch.object</span></tt> as class decorators</li>
+<li>Renamed <tt class="docutils literal"><span class="pre">patch_object</span></tt> to <a class="reference internal" href="patch.html#mock.patch.object" title="mock.patch.object"><tt class="xref py py-func docutils literal"><span class="pre">patch.object()</span></tt></a> (<tt class="docutils literal"><span class="pre">patch_object</span></tt> is
+deprecated)</li>
+<li>Addition of <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> class with all magic methods pre-created for you</li>
+<li>Python 3 compatibility (tested with 3.2 but should work with 3.0 &amp; 3.1 as
+well)</li>
+<li>Addition of <a class="reference internal" href="patch.html#mock.patch.dict" title="mock.patch.dict"><tt class="xref py py-func docutils literal"><span class="pre">patch.dict()</span></tt></a> for changing dictionaries during a test</li>
+<li>Addition of <tt class="docutils literal"><span class="pre">mocksignature</span></tt> argument to <tt class="docutils literal"><span class="pre">patch</span></tt> and <tt class="docutils literal"><span class="pre">patch.object</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">help(mock)</span></tt> works now (on the module). Can no longer use <tt class="docutils literal"><span class="pre">__bases__</span></tt>
+as a valid sentinel name (thanks to Stephen Emslie for reporting and
+diagnosing this)</li>
+<li>Addition of soft comparisons: <cite>call_args</cite>, <cite>call_args_list</cite> and <cite>method_calls</cite>
+now return tuple-like objects which compare equal even when empty args
+or kwargs are skipped</li>
+<li>Added docstrings.</li>
+<li>BUGFIX: <tt class="docutils literal"><span class="pre">side_effect</span></tt> now works with <tt class="docutils literal"><span class="pre">BaseException</span></tt> exceptions like
+<tt class="docutils literal"><span class="pre">KeyboardInterrupt</span></tt></li>
+<li>BUGFIX: patching the same object twice now restores the patches correctly</li>
+<li>The tests now require <a class="reference external" href="http://pypi.python.org/pypi/unittest2">unittest2</a>
+to run</li>
+<li><a class="reference external" href="http://konryd.blogspot.com/">Konrad Delong</a> added as co-maintainer</li>
+</ul>
+</div>
+<div class="section" id="version-0-6-0">
+<h2>2009/08/22 Version 0.6.0<a class="headerlink" href="#version-0-6-0" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>New test layout compatible with test discovery</li>
+<li>Descriptors (static methods / class methods etc) can now be patched and
+restored correctly</li>
+<li>Mocks can raise exceptions when called by setting <tt class="docutils literal"><span class="pre">side_effect</span></tt> to an
+exception class or instance</li>
+<li>Mocks that wrap objects will not pass on calls to the underlying object if
+an explicit return_value is set</li>
+</ul>
+</div>
+<div class="section" id="version-0-5-0">
+<h2>2009/04/17 Version 0.5.0<a class="headerlink" href="#version-0-5-0" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>Made DEFAULT part of the public api.</li>
+<li>Documentation built with Sphinx.</li>
+<li><tt class="docutils literal"><span class="pre">side_effect</span></tt> is now called with the same arguments as the mock is called with and
+if returns a non-DEFAULT value that is automatically set as the <tt class="docutils literal"><span class="pre">mock.return_value</span></tt>.</li>
+<li><tt class="docutils literal"><span class="pre">wraps</span></tt> keyword argument used for wrapping objects (and passing calls through to the wrapped object).</li>
+<li><tt class="docutils literal"><span class="pre">Mock.reset</span></tt> renamed to <tt class="docutils literal"><span class="pre">Mock.reset_mock</span></tt>, as reset is a common API name.</li>
+<li><tt class="docutils literal"><span class="pre">patch</span></tt> / <tt class="docutils literal"><span class="pre">patch_object</span></tt> are now context managers and can be used with <tt class="docutils literal"><span class="pre">with</span></tt>.</li>
+<li>A new &#8216;create&#8217; keyword argument to patch and patch_object that allows them to patch
+(and unpatch) attributes that don&#8217;t exist. (Potentially unsafe to use - it can allow
+you to have tests that pass when they are testing an API that doesn&#8217;t exist - use at
+your own risk!)</li>
+<li>The methods keyword argument to Mock has been removed and merged with spec. The spec
+argument can now be a list of methods or an object to take the spec from.</li>
+<li>Nested patches may now be applied in a different order (created mocks passed
+in the opposite order). This is actually a bugfix.</li>
+<li>patch and patch_object now take a spec keyword argument. If spec is
+passed in as &#8216;True&#8217; then the Mock created will take the object it is replacing
+as its spec object. If the object being replaced is a class, then the return
+value for the mock will also use the class as a spec.</li>
+<li>A Mock created without a spec will not attempt to mock any magic methods / attributes
+(they will raise an <tt class="docutils literal"><span class="pre">AttributeError</span></tt> instead).</li>
+</ul>
+</div>
+<div class="section" id="version-0-4-0">
+<h2>2008/10/12 Version 0.4.0<a class="headerlink" href="#version-0-4-0" title="Permalink to this headline">¶</a></h2>
+<ul>
+<li><p class="first">Default return value is now a new mock rather than None</p>
+</li>
+<li><p class="first">return_value added as a keyword argument to the constructor</p>
+</li>
+<li><p class="first">New method &#8216;assert_called_with&#8217;</p>
+</li>
+<li><p class="first">Added &#8216;side_effect&#8217; attribute / keyword argument called when mock is called</p>
+</li>
+<li><p class="first">patch decorator split into two decorators:</p>
+<blockquote>
+<div><ul class="simple">
+<li><tt class="docutils literal"><span class="pre">patch_object</span></tt> which takes an object and an attribute name to patch
+(plus optionally a value to patch with which defaults to a mock object)</li>
+<li><tt class="docutils literal"><span class="pre">patch</span></tt> which takes a string specifying a target to patch; in the form
+&#8216;package.module.Class.attribute&#8217;. (plus optionally a value to
+patch with which defaults to a mock object)</li>
+</ul>
+</div></blockquote>
+</li>
+<li><p class="first">Can now patch objects with <tt class="docutils literal"><span class="pre">None</span></tt></p>
+</li>
+<li><p class="first">Change to patch for nose compatibility with error reporting in wrapped functions</p>
+</li>
+<li><p class="first">Reset no longer clears children / return value etc - it just resets
+call count and call args. It also calls reset on all children (and
+the return value if it is a mock).</p>
+</li>
+</ul>
+<p>Thanks to Konrad Delong, Kevin Dangoor and others for patches and suggestions.</p>
+</div>
+<div class="section" id="version-0-3-1">
+<h2>2007/12/03 Version 0.3.1<a class="headerlink" href="#version-0-3-1" title="Permalink to this headline">¶</a></h2>
+<p><tt class="docutils literal"><span class="pre">patch</span></tt> maintains the name of decorated functions for compatibility with nose
+test autodiscovery.</p>
+<p>Tests decorated with <tt class="docutils literal"><span class="pre">patch</span></tt> that use the two argument form (implicit mock
+creation) will receive the mock(s) passed in as extra arguments.</p>
+<p>Thanks to Kevin Dangoor for these changes.</p>
+</div>
+<div class="section" id="version-0-3-0">
+<h2>2007/11/30 Version 0.3.0<a class="headerlink" href="#version-0-3-0" title="Permalink to this headline">¶</a></h2>
+<p>Removed <tt class="docutils literal"><span class="pre">patch_module</span></tt>. <tt class="docutils literal"><span class="pre">patch</span></tt> can now take a string as the first
+argument for patching modules.</p>
+<p>The third argument to <tt class="docutils literal"><span class="pre">patch</span></tt> is optional - a mock will be created by
+default if it is not passed in.</p>
+</div>
+<div class="section" id="version-0-2-1">
+<h2>2007/11/21 Version 0.2.1<a class="headerlink" href="#version-0-2-1" title="Permalink to this headline">¶</a></h2>
+<p>Bug fix, allows reuse of functions decorated with <tt class="docutils literal"><span class="pre">patch</span></tt> and <tt class="docutils literal"><span class="pre">patch_module</span></tt>.</p>
+</div>
+<div class="section" id="version-0-2-0">
+<h2>2007/11/20 Version 0.2.0<a class="headerlink" href="#version-0-2-0" title="Permalink to this headline">¶</a></h2>
+<p>Added <tt class="docutils literal"><span class="pre">spec</span></tt> keyword argument for creating <tt class="docutils literal"><span class="pre">Mock</span></tt> objects from a
+specification object.</p>
+<p>Added <tt class="docutils literal"><span class="pre">patch</span></tt> and <tt class="docutils literal"><span class="pre">patch_module</span></tt> monkey patching decorators.</p>
+<p>Added <tt class="docutils literal"><span class="pre">sentinel</span></tt> for convenient access to unique objects.</p>
+<p>Distribution includes unit tests.</p>
+</div>
+<div class="section" id="version-0-1-0">
+<h2>2007/11/19 Version 0.1.0<a class="headerlink" href="#version-0-1-0" title="Permalink to this headline">¶</a></h2>
+<p>Initial release.</p>
+</div>
+</div>
+<div class="section" id="todo-and-limitations">
+<h1>TODO and Limitations<a class="headerlink" href="#todo-and-limitations" title="Permalink to this headline">¶</a></h1>
+<p>Contributions, bug reports and comments welcomed!</p>
+<p>Feature requests and bug reports are handled on the issue tracker:</p>
+<blockquote>
+<div><ul class="simple">
+<li><a class="reference external" href="http://code.google.com/p/mock/issues/list">mock issue tracker</a></li>
+</ul>
+</div></blockquote>
+<p><cite>wraps</cite> is not integrated with magic methods.</p>
+<p><cite>patch</cite> could auto-do the patching in the constructor and unpatch in the
+destructor. This would be useful in itself, but violates TOOWTDI and would be
+unsafe for IronPython &amp; PyPy (non-deterministic calling of destructors).
+Destructors aren&#8217;t called in CPython where there are cycles, but a weak
+reference with a callback can be used to get round this.</p>
+<p><cite>Mock</cite> has several attributes. This makes it unsuitable for mocking objects
+that use these attribute names. A way round this would be to provide methods
+that <em>hide</em> these attributes when needed. In 0.8 many, but not all, of these
+attributes are renamed to gain a <cite>_mock</cite> prefix, making it less likely that
+they will clash. Any outstanding attributes that haven&#8217;t been modified with
+the prefix should be changed.</p>
+<p>If a patch is started using <cite>patch.start</cite> and then not stopped correctly then
+the unpatching is not done. Using weak references it would be possible to
+detect and fix this when the patch object itself is garbage collected. This
+would be tricky to get right though.</p>
+<p>When a <cite>Mock</cite> is created by <cite>patch</cite>, arbitrary keywords can be used to set
+attributes. If <cite>patch</cite> is created with a <cite>spec</cite>, and is replacing a class, then
+a <cite>return_value</cite> mock is created. The keyword arguments are not applied to the
+child mock, but could be.</p>
+<p>When mocking a class with <cite>patch</cite>, passing in <cite>spec=True</cite> or <cite>autospec=True</cite>,
+the mock class has an instance created from the same spec. Should this be the
+default behaviour for mocks anyway (mock return values inheriting the spec
+from their parent), or should it be controlled by an additional keyword
+argument (<cite>inherit</cite>) to the Mock constructor? <cite>create_autospec</cite> does this, so
+an additional keyword argument to Mock is probably unnecessary.</p>
+<p>The <cite>mocksignature</cite> argument to <cite>patch</cite> with a non <cite>Mock</cite> passed into
+<cite>new_callable</cite> will <em>probably</em> cause an error. Should it just be invalid?</p>
+<p>Note that <cite>NonCallableMock</cite> and <cite>NonCallableMagicMock</cite> still have the unused
+(and unusable) attributes: <cite>return_value</cite>, <cite>side_effect</cite>, <cite>call_count</cite>,
+<cite>call_args</cite> and <cite>call_args_list</cite>. These could be removed or raise errors on
+getting / setting. They also have the <cite>assert_called_with</cite> and
+<cite>assert_called_once_with</cite> methods. Removing these would be pointless as
+fetching them would create a mock (attribute) that could be called without
+error.</p>
+<p>Some outstanding technical debt. The way autospeccing mocks function
+signatures was copied and modified from <cite>mocksignature</cite>. This could all be
+refactored into one set of functions instead of two. The way we tell if
+patchers are started and if a patcher is being used for a <cite>patch.multiple</cite>
+call are both horrible. There are now a host of helper functions that should
+be rationalised. (Probably time to split mock into a package instead of a
+module.)</p>
+<p>Passing arbitrary keyword arguments to <cite>create_autospec</cite>, or <cite>patch</cite> with
+<cite>autospec</cite>, when mocking a <em>function</em> works fine. However, the arbitrary
+attributes are set on the created mock - but <cite>create_autospec</cite> returns a
+real function (which doesn&#8217;t have those attributes). However, what is the use
+case for using autospec to create functions with attributes that don&#8217;t exist
+on the original?</p>
+<p><cite>mocksignature</cite>, plus the <cite>call_args_list</cite> and <cite>method_calls</cite> attributes of
+<cite>Mock</cite> could all be deprecated.</p>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">CHANGELOG</a><ul>
+<li><a class="reference internal" href="#version-1-0-0">2012/10/07 Version 1.0.0</a></li>
+<li><a class="reference internal" href="#version-1-0-0-beta-1">2012/07/13 Version 1.0.0 beta 1</a></li>
+<li><a class="reference internal" href="#version-1-0-0-alpha-2">2012/05/04 Version 1.0.0 alpha 2</a></li>
+<li><a class="reference internal" href="#version-1-0-0-alpha-1">2012/03/25 Version 1.0.0 alpha 1</a></li>
+<li><a class="reference internal" href="#version-0-8-0">2012/02/13 Version 0.8.0</a></li>
+<li><a class="reference internal" href="#version-0-8-0-release-candidate-2">2012/01/10 Version 0.8.0 release candidate 2</a></li>
+<li><a class="reference internal" href="#version-0-8-0-release-candidate-1">2011/12/29 Version 0.8.0 release candidate 1</a></li>
+<li><a class="reference internal" href="#version-0-8-0-beta-4">2011/10/09 Version 0.8.0 beta 4</a></li>
+<li><a class="reference internal" href="#version-0-8-0-beta-3">2011/08/15 Version 0.8.0 beta 3</a></li>
+<li><a class="reference internal" href="#version-0-8-0-beta-2">2011/08/05 Version 0.8.0 beta 2</a></li>
+<li><a class="reference internal" href="#version-0-8-0-beta-1">2011/07/25 Version 0.8.0 beta 1</a></li>
+<li><a class="reference internal" href="#version-0-8-0-alpha-2">2011/07/16 Version 0.8.0 alpha 2</a></li>
+<li><a class="reference internal" href="#version-0-8-0-alpha-1">2011/06/14 Version 0.8.0 alpha 1</a></li>
+<li><a class="reference internal" href="#version-0-7-2">2011/05/30 Version 0.7.2</a></li>
+<li><a class="reference internal" href="#version-0-7-1">2011/05/06 Version 0.7.1</a></li>
+<li><a class="reference internal" href="#version-0-7-0">2011/03/05 Version 0.7.0</a></li>
+<li><a class="reference internal" href="#version-0-7-0-rc-1">2011/02/16 Version 0.7.0 RC 1</a></li>
+<li><a class="reference internal" href="#version-0-7-0-beta-4">2010/11/12 Version 0.7.0 beta 4</a></li>
+<li><a class="reference internal" href="#version-0-7-0-beta-3">2010/09/18 Version 0.7.0 beta 3</a></li>
+<li><a class="reference internal" href="#version-0-7-0-beta-2">2010/06/23 Version 0.7.0 beta 2</a></li>
+<li><a class="reference internal" href="#version-0-7-0-beta-1">2010/06/22 Version 0.7.0 beta 1</a></li>
+<li><a class="reference internal" href="#version-0-6-0">2009/08/22 Version 0.6.0</a></li>
+<li><a class="reference internal" href="#version-0-5-0">2009/04/17 Version 0.5.0</a></li>
+<li><a class="reference internal" href="#version-0-4-0">2008/10/12 Version 0.4.0</a></li>
+<li><a class="reference internal" href="#version-0-3-1">2007/12/03 Version 0.3.1</a></li>
+<li><a class="reference internal" href="#version-0-3-0">2007/11/30 Version 0.3.0</a></li>
+<li><a class="reference internal" href="#version-0-2-1">2007/11/21 Version 0.2.1</a></li>
+<li><a class="reference internal" href="#version-0-2-0">2007/11/20 Version 0.2.0</a></li>
+<li><a class="reference internal" href="#version-0-1-0">2007/11/19 Version 0.1.0</a></li>
+</ul>
+</li>
+<li><a class="reference internal" href="#todo-and-limitations">TODO and Limitations</a></li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="compare.html"
+ title="previous chapter">Mock Library Comparison</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/changelog.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="compare.html" title="Mock Library Comparison"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/compare.html b/third_party/python/mock-1.0.0/html/compare.html
new file mode 100644
index 0000000000..bfc9d519a8
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/compare.html
@@ -0,0 +1,672 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Mock Library Comparison &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <link rel="next" title="CHANGELOG" href="changelog.html" />
+ <link rel="prev" title="Further Examples" href="examples.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="changelog.html" title="CHANGELOG"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="examples.html" title="Further Examples"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="mock-library-comparison">
+<h1>Mock Library Comparison<a class="headerlink" href="#mock-library-comparison" title="Permalink to this headline">¶</a></h1>
+<p>A side-by-side comparison of how to accomplish some basic tasks with mock and
+some other popular Python mocking libraries and frameworks.</p>
+<p>These are:</p>
+<ul class="simple">
+<li><a class="reference external" href="http://pypi.python.org/pypi/flexmock">flexmock</a></li>
+<li><a class="reference external" href="http://pypi.python.org/pypi/mox">mox</a></li>
+<li><a class="reference external" href="http://niemeyer.net/mocker">Mocker</a></li>
+<li><a class="reference external" href="http://pypi.python.org/pypi/dingus">dingus</a></li>
+<li><a class="reference external" href="http://pypi.python.org/pypi/fudge">fudge</a></li>
+</ul>
+<p>Popular python mocking frameworks not yet represented here include
+<a class="reference external" href="http://pypi.python.org/pypi/MiniMock">MiniMock</a>.</p>
+<p><a class="reference external" href="http://pmock.sourceforge.net/">pMock</a> (last release 2004 and doesn&#8217;t import
+in recent versions of Python) and
+<a class="reference external" href="http://python-mock.sourceforge.net/">python-mock</a> (last release 2005) are
+intentionally omitted.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p>A more up to date, and tested for all mock libraries (only the mock
+examples on this page can be executed as doctests) version of this
+comparison is maintained by Gary Bernhardt:</p>
+<ul class="last simple">
+<li><a class="reference external" href="http://garybernhardt.github.com/python-mock-comparison/">Python Mock Library Comparison</a></li>
+</ul>
+</div>
+<p>This comparison is by no means complete, and also may not be fully idiomatic
+for all the libraries represented. <em>Please</em> contribute corrections, missing
+comparisons, or comparisons for additional libraries to the <a class="reference external" href="https://code.google.com/p/mock/issues/list">mock issue
+tracker</a>.</p>
+<p>This comparison page was originally created by the <a class="reference external" href="https://code.google.com/p/pymox/wiki/MoxComparison">Mox project</a> and then extended for
+<a class="reference external" href="http://has207.github.com/flexmock/compare.html">flexmock and mock</a> by
+Herman Sheremetyev. Dingus examples written by <a class="reference external" href="http://garybernhardt.github.com/python-mock-comparison/">Gary Bernhadt</a>. fudge examples
+provided by <a class="reference external" href="http://farmdev.com/">Kumar McMillan</a>.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p>The examples tasks here were originally created by Mox which is a mocking
+<em>framework</em> rather than a library like mock. The tasks shown naturally
+exemplify tasks that frameworks are good at and not the ones they make
+harder. In particular you can take a <cite>Mock</cite> or <cite>MagicMock</cite> object and use
+it in any way you want with no up-front configuration. The same is also
+true for Dingus.</p>
+<p class="last">The examples for mock here assume version 0.7.0.</p>
+</div>
+<div class="section" id="simple-fake-object">
+<h2>Simple fake object<a class="headerlink" href="#simple-fake-object" title="Permalink to this headline">¶</a></h2>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&quot;calculated value&quot;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">some_attribute</span> <span class="o">=</span> <span class="s">&quot;value&quot;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">,</span> <span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">my_mock</span><span class="o">.</span><span class="n">some_attribute</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Flexmock</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">flexmock</span><span class="p">(</span><span class="n">some_method</span><span class="o">=</span><span class="k">lambda</span><span class="p">:</span> <span class="s">&quot;calculated value&quot;</span><span class="p">,</span> <span class="n">some_attribute</span><span class="o">=</span><span class="s">&quot;value&quot;</span><span class="p">)</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_attribute</span><span class="p">)</span>
+
+<span class="c"># Mox</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockAnything</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">)</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_attribute</span> <span class="o">=</span> <span class="s">&quot;value&quot;</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Replay</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_attribute</span><span class="p">)</span>
+
+<span class="c"># Mocker</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mocker</span><span class="o">.</span><span class="n">mock</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">result</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">)</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">replay</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_attribute</span> <span class="o">=</span> <span class="s">&quot;value&quot;</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_attribute</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">(</span><span class="n">some_attribute</span><span class="o">=</span><span class="s">&quot;value&quot;</span><span class="p">,</span>
+<span class="gp">... </span> <span class="n">some_method__returns</span><span class="o">=</span><span class="s">&quot;calculated value&quot;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">,</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">some_attribute</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_fake</span> <span class="o">=</span> <span class="p">(</span><span class="n">fudge</span><span class="o">.</span><span class="n">Fake</span><span class="p">()</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">provides</span><span class="p">(</span><span class="s">&#39;some_method&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">returns</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">has_attr</span><span class="p">(</span><span class="n">some_attribute</span><span class="o">=</span><span class="s">&quot;value&quot;</span><span class="p">))</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">,</span> <span class="n">my_fake</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">my_fake</span><span class="o">.</span><span class="n">some_attribute</span><span class="p">)</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="simple-mock">
+<h2>Simple mock<a class="headerlink" href="#simple-mock" title="Permalink to this headline">¶</a></h2>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&quot;value&quot;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Flexmock</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">flexmock</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">should_receive</span><span class="p">(</span><span class="s">&quot;some_method&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">and_return</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">once</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+
+<span class="c"># Mox</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockAnything</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">)</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Replay</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Verify</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+
+<span class="c"># Mocker</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mocker</span><span class="o">.</span><span class="n">mock</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">result</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">)</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">replay</span><span class="p">()</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">verify</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">(</span><span class="n">some_method__returns</span><span class="o">=</span><span class="s">&quot;value&quot;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">some_method</span><span class="o">.</span><span class="n">calls</span><span class="p">()</span><span class="o">.</span><span class="n">once</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@fudge.test</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">():</span>
+<span class="gp">... </span> <span class="n">my_fake</span> <span class="o">=</span> <span class="p">(</span><span class="n">fudge</span><span class="o">.</span><span class="n">Fake</span><span class="p">()</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">expects</span><span class="p">(</span><span class="s">&#39;some_method&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">returns</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">times_called</span><span class="p">(</span><span class="mi">1</span><span class="p">))</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+<span class="c">...</span>
+<span class="gr">AssertionError</span>: <span class="n">fake:my_fake.some_method() was not called</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="creating-partial-mocks">
+<h2>Creating partial mocks<a class="headerlink" href="#creating-partial-mocks" title="Permalink to this headline">¶</a></h2>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">SomeObject</span><span class="o">.</span><span class="n">some_method</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="s">&#39;value&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">SomeObject</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Flexmock</span>
+<span class="n">flexmock</span><span class="p">(</span><span class="n">SomeObject</span><span class="p">)</span><span class="o">.</span><span class="n">should_receive</span><span class="p">(</span><span class="s">&quot;some_method&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">and_return</span><span class="p">(</span><span class="s">&#39;value&#39;</span><span class="p">)</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+
+<span class="c"># Mox</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockObject</span><span class="p">(</span><span class="n">SomeObject</span><span class="p">)</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">)</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Replay</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Verify</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+
+<span class="c"># Mocker</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mocker</span><span class="o">.</span><span class="n">mock</span><span class="p">(</span><span class="n">SomeObject</span><span class="p">)</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">Get</span><span class="p">()</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">result</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">)</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">replay</span><span class="p">()</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">verify</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">object</span> <span class="o">=</span> <span class="n">SomeObject</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">object</span><span class="o">.</span><span class="n">some_method</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="s">&quot;value&quot;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="nb">object</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">fake</span> <span class="o">=</span> <span class="n">fudge</span><span class="o">.</span><span class="n">Fake</span><span class="p">()</span><span class="o">.</span><span class="n">is_callable</span><span class="p">()</span><span class="o">.</span><span class="n">returns</span><span class="p">(</span><span class="s">&quot;&lt;fudge-value&gt;&quot;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">fudge</span><span class="o">.</span><span class="n">patched_context</span><span class="p">(</span><span class="n">SomeObject</span><span class="p">,</span> <span class="s">&#39;some_method&#39;</span><span class="p">,</span> <span class="n">fake</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">s</span> <span class="o">=</span> <span class="n">SomeObject</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;&lt;fudge-value&gt;&quot;</span><span class="p">,</span> <span class="n">s</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="ensure-calls-are-made-in-specific-order">
+<h2>Ensure calls are made in specific order<a class="headerlink" href="#ensure-calls-are-made-in-specific-order" title="Permalink to this headline">¶</a></h2>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">Mock</span><span class="p">(</span><span class="n">spec</span><span class="o">=</span><span class="n">SomeObject</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock.method1()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock.method2()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="n">my_mock</span><span class="o">.</span><span class="n">mock_calls</span><span class="p">,</span> <span class="p">[</span><span class="n">call</span><span class="o">.</span><span class="n">method1</span><span class="p">(),</span> <span class="n">call</span><span class="o">.</span><span class="n">method2</span><span class="p">()])</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Flexmock</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">flexmock</span><span class="p">(</span><span class="n">SomeObject</span><span class="p">)</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">should_receive</span><span class="p">(</span><span class="s">&#39;method1&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">once</span><span class="o">.</span><span class="n">ordered</span><span class="o">.</span><span class="n">and_return</span><span class="p">(</span><span class="s">&#39;first thing&#39;</span><span class="p">)</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">should_receive</span><span class="p">(</span><span class="s">&#39;method2&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">once</span><span class="o">.</span><span class="n">ordered</span><span class="o">.</span><span class="n">and_return</span><span class="p">(</span><span class="s">&#39;second thing&#39;</span><span class="p">)</span>
+
+<span class="c"># Mox</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockObject</span><span class="p">(</span><span class="n">SomeObject</span><span class="p">)</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="s">&#39;first thing&#39;</span><span class="p">)</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="s">&#39;second thing&#39;</span><span class="p">)</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Replay</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Verify</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+
+<span class="c"># Mocker</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mocker</span><span class="o">.</span><span class="n">mock</span><span class="p">()</span>
+<span class="k">with</span> <span class="n">mocker</span><span class="o">.</span><span class="n">order</span><span class="p">():</span>
+ <span class="n">mock</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span>
+ <span class="n">mocker</span><span class="o">.</span><span class="n">result</span><span class="p">(</span><span class="s">&#39;first thing&#39;</span><span class="p">)</span>
+ <span class="n">mock</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span>
+ <span class="n">mocker</span><span class="o">.</span><span class="n">result</span><span class="p">(</span><span class="s">&#39;second thing&#39;</span><span class="p">)</span>
+ <span class="n">mocker</span><span class="o">.</span><span class="n">replay</span><span class="p">()</span>
+ <span class="n">mocker</span><span class="o">.</span><span class="n">verify</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span>
+<span class="go">&lt;Dingus ...&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span>
+<span class="go">&lt;Dingus ...&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">([</span><span class="s">&#39;method1&#39;</span><span class="p">,</span> <span class="s">&#39;method2&#39;</span><span class="p">],</span> <span class="p">[</span><span class="n">call</span><span class="o">.</span><span class="n">name</span> <span class="k">for</span> <span class="n">call</span> <span class="ow">in</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">calls</span><span class="p">])</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@fudge.test</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">():</span>
+<span class="gp">... </span> <span class="n">my_fake</span> <span class="o">=</span> <span class="p">(</span><span class="n">fudge</span><span class="o">.</span><span class="n">Fake</span><span class="p">()</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">remember_order</span><span class="p">()</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">expects</span><span class="p">(</span><span class="s">&#39;method1&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">expects</span><span class="p">(</span><span class="s">&#39;method2&#39;</span><span class="p">))</span>
+<span class="gp">... </span> <span class="n">my_fake</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">my_fake</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+<span class="c">...</span>
+<span class="gr">AssertionError: Call #1 was fake:my_fake.method2(); Expected</span>: <span class="n">#1 fake:my_fake.method1(), #2 fake:my_fake.method2(), end</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="raising-exceptions">
+<h2>Raising exceptions<a class="headerlink" href="#raising-exceptions" title="Permalink to this headline">¶</a></h2>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">SomeException</span><span class="p">(</span><span class="s">&quot;message&quot;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertRaises</span><span class="p">(</span><span class="n">SomeException</span><span class="p">,</span> <span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Flexmock</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">flexmock</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">should_receive</span><span class="p">(</span><span class="s">&quot;some_method&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">and_raise</span><span class="p">(</span><span class="n">SomeException</span><span class="p">(</span><span class="s">&quot;message&quot;</span><span class="p">))</span>
+<span class="n">assertRaises</span><span class="p">(</span><span class="n">SomeException</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">)</span>
+
+<span class="c"># Mox</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockAnything</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span><span class="o">.</span><span class="n">AndRaise</span><span class="p">(</span><span class="n">SomeException</span><span class="p">(</span><span class="s">&quot;message&quot;</span><span class="p">))</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Replay</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="n">assertRaises</span><span class="p">(</span><span class="n">SomeException</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">)</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Verify</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+
+<span class="c"># Mocker</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mocker</span><span class="o">.</span><span class="n">mock</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">throw</span><span class="p">(</span><span class="n">SomeException</span><span class="p">(</span><span class="s">&quot;message&quot;</span><span class="p">))</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">replay</span><span class="p">()</span>
+<span class="n">assertRaises</span><span class="p">(</span><span class="n">SomeException</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">)</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">verify</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span><span class="o">.</span><span class="n">some_method</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">exception_raiser</span><span class="p">(</span><span class="n">SomeException</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertRaises</span><span class="p">(</span><span class="n">SomeException</span><span class="p">,</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">some_method</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_fake</span> <span class="o">=</span> <span class="p">(</span><span class="n">fudge</span><span class="o">.</span><span class="n">Fake</span><span class="p">()</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">is_callable</span><span class="p">()</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">raises</span><span class="p">(</span><span class="n">SomeException</span><span class="p">(</span><span class="s">&quot;message&quot;</span><span class="p">)))</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_fake</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+<span class="c">...</span>
+<span class="gr">SomeException</span>: <span class="n">message</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="override-new-instances-of-a-class">
+<h2>Override new instances of a class<a class="headerlink" href="#override-new-instances-of-a-class" title="Permalink to this headline">¶</a></h2>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">mock</span><span class="o">.</span><span class="n">patch</span><span class="p">(</span><span class="s">&#39;somemodule.Someclass&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">MockClass</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">MockClass</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="n">some_other_object</span>
+<span class="gp">... </span> <span class="n">assertEqual</span><span class="p">(</span><span class="n">some_other_object</span><span class="p">,</span> <span class="n">somemodule</span><span class="o">.</span><span class="n">Someclass</span><span class="p">())</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Flexmock</span>
+<span class="n">flexmock</span><span class="p">(</span><span class="n">some_module</span><span class="o">.</span><span class="n">SomeClass</span><span class="p">,</span> <span class="n">new_instances</span><span class="o">=</span><span class="n">some_other_object</span><span class="p">)</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="n">some_other_object</span><span class="p">,</span> <span class="n">some_module</span><span class="o">.</span><span class="n">SomeClass</span><span class="p">())</span>
+
+<span class="c"># Mox</span>
+<span class="c"># (you will probably have mox.Mox() available as self.mox in a real test)</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Mox</span><span class="p">()</span><span class="o">.</span><span class="n">StubOutWithMock</span><span class="p">(</span><span class="n">some_module</span><span class="p">,</span> <span class="s">&#39;SomeClass&#39;</span><span class="p">,</span> <span class="n">use_mock_anything</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span>
+<span class="n">some_module</span><span class="o">.</span><span class="n">SomeClass</span><span class="p">()</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="n">some_other_object</span><span class="p">)</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">ReplayAll</span><span class="p">()</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="n">some_other_object</span><span class="p">,</span> <span class="n">some_module</span><span class="o">.</span><span class="n">SomeClass</span><span class="p">())</span>
+
+<span class="c"># Mocker</span>
+<span class="n">instance</span> <span class="o">=</span> <span class="n">mocker</span><span class="o">.</span><span class="n">mock</span><span class="p">()</span>
+<span class="n">klass</span> <span class="o">=</span> <span class="n">mocker</span><span class="o">.</span><span class="n">replace</span><span class="p">(</span><span class="n">SomeClass</span><span class="p">,</span> <span class="n">spec</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="n">klass</span><span class="p">(</span><span class="s">&#39;expected&#39;</span><span class="p">,</span> <span class="s">&#39;args&#39;</span><span class="p">)</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">result</span><span class="p">(</span><span class="n">instance</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MockClass</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="n">some_other_object</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">dingus</span><span class="o">.</span><span class="n">patch</span><span class="p">(</span><span class="s">&#39;somemodule.SomeClass&#39;</span><span class="p">,</span> <span class="n">MockClass</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">assertEqual</span><span class="p">(</span><span class="n">some_other_object</span><span class="p">,</span> <span class="n">somemodule</span><span class="o">.</span><span class="n">SomeClass</span><span class="p">())</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@fudge.patch</span><span class="p">(</span><span class="s">&#39;somemodule.SomeClass&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">(</span><span class="n">FakeClass</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">FakeClass</span><span class="o">.</span><span class="n">is_callable</span><span class="p">()</span><span class="o">.</span><span class="n">returns</span><span class="p">(</span><span class="n">some_other_object</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">assertEqual</span><span class="p">(</span><span class="n">some_other_object</span><span class="p">,</span> <span class="n">somemodule</span><span class="o">.</span><span class="n">SomeClass</span><span class="p">())</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="call-the-same-method-multiple-times">
+<h2>Call the same method multiple times<a class="headerlink" href="#call-the-same-method-multiple-times" title="Permalink to this headline">¶</a></h2>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">You don&#8217;t need to do <em>any</em> configuration to call <cite>mock.Mock()</cite> methods
+multiple times. Attributes like <cite>call_count</cite>, <cite>call_args_list</cite> and
+<cite>method_calls</cite> provide various different ways of making assertions about
+how the mock was used.</p>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock.some_method()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock.some_method()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="o">.</span><span class="n">call_count</span> <span class="o">&gt;=</span> <span class="mi">2</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Flexmock # (verifies that the method gets called at least twice)</span>
+<span class="n">flexmock</span><span class="p">(</span><span class="n">some_object</span><span class="p">)</span><span class="o">.</span><span class="n">should_receive</span><span class="p">(</span><span class="s">&#39;some_method&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">at_least</span><span class="o">.</span><span class="n">twice</span>
+
+<span class="c"># Mox</span>
+<span class="c"># (does not support variable number of calls, so you need to create a new entry for each explicit call)</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockObject</span><span class="p">(</span><span class="n">some_object</span><span class="p">)</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">(</span><span class="n">mox</span><span class="o">.</span><span class="n">IgnoreArg</span><span class="p">(),</span> <span class="n">mox</span><span class="o">.</span><span class="n">IgnoreArg</span><span class="p">())</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">(</span><span class="n">mox</span><span class="o">.</span><span class="n">IgnoreArg</span><span class="p">(),</span> <span class="n">mox</span><span class="o">.</span><span class="n">IgnoreArg</span><span class="p">())</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Replay</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Verify</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+
+<span class="c"># Mocker</span>
+<span class="c"># (TODO)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span>
+<span class="go">&lt;Dingus ...&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span>
+<span class="go">&lt;Dingus ...&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="nb">len</span><span class="p">(</span><span class="n">my_dingus</span><span class="o">.</span><span class="n">calls</span><span class="p">(</span><span class="s">&#39;some_method&#39;</span><span class="p">))</span> <span class="o">==</span> <span class="mi">2</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@fudge.test</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">():</span>
+<span class="gp">... </span> <span class="n">my_fake</span> <span class="o">=</span> <span class="n">fudge</span><span class="o">.</span><span class="n">Fake</span><span class="p">()</span><span class="o">.</span><span class="n">expects</span><span class="p">(</span><span class="s">&#39;some_method&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">times_called</span><span class="p">(</span><span class="mi">2</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">my_fake</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+<span class="c">...</span>
+<span class="gr">AssertionError</span>: <span class="n">fake:my_fake.some_method() was called 1 time(s). Expected 2.</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mock-chained-methods">
+<h2>Mock chained methods<a class="headerlink" href="#mock-chained-methods" title="Permalink to this headline">¶</a></h2>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">method3</span> <span class="o">=</span> <span class="n">my_mock</span><span class="o">.</span><span class="n">method1</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">method2</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">method3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">method3</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;some value&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&#39;some value&#39;</span><span class="p">,</span> <span class="n">my_mock</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span><span class="o">.</span><span class="n">method3</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">method3</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Flexmock</span>
+<span class="c"># (intermediate method calls are automatically assigned to temporary fake objects</span>
+<span class="c"># and can be called with any arguments)</span>
+<span class="n">flexmock</span><span class="p">(</span><span class="n">some_object</span><span class="p">)</span><span class="o">.</span><span class="n">should_receive</span><span class="p">(</span>
+ <span class="s">&#39;method1.method2.method3&#39;</span>
+<span class="p">)</span><span class="o">.</span><span class="n">with_args</span><span class="p">(</span><span class="n">arg1</span><span class="p">,</span> <span class="n">arg2</span><span class="p">)</span><span class="o">.</span><span class="n">and_return</span><span class="p">(</span><span class="s">&#39;some value&#39;</span><span class="p">)</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&#39;some_value&#39;</span><span class="p">,</span> <span class="n">some_object</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span><span class="o">.</span><span class="n">method3</span><span class="p">(</span><span class="n">arg1</span><span class="p">,</span> <span class="n">arg2</span><span class="p">))</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Mox</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockObject</span><span class="p">(</span><span class="n">some_object</span><span class="p">)</span>
+<span class="n">mock2</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockAnything</span><span class="p">()</span>
+<span class="n">mock3</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockAnything</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="n">mock1</span><span class="p">)</span>
+<span class="n">mock2</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="n">mock2</span><span class="p">)</span>
+<span class="n">mock3</span><span class="o">.</span><span class="n">method3</span><span class="p">(</span><span class="n">arg1</span><span class="p">,</span> <span class="n">arg2</span><span class="p">)</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="s">&#39;some_value&#39;</span><span class="p">)</span>
+<span class="bp">self</span><span class="o">.</span><span class="n">mox</span><span class="o">.</span><span class="n">ReplayAll</span><span class="p">()</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;some_value&quot;</span><span class="p">,</span> <span class="n">some_object</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span><span class="o">.</span><span class="n">method3</span><span class="p">(</span><span class="n">arg1</span><span class="p">,</span> <span class="n">arg2</span><span class="p">))</span>
+<span class="bp">self</span><span class="o">.</span><span class="n">mox</span><span class="o">.</span><span class="n">VerifyAll</span><span class="p">()</span>
+
+<span class="c"># Mocker</span>
+<span class="c"># (TODO)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">method3</span> <span class="o">=</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">method1</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">method2</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">method3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">method3</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;some value&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&#39;some value&#39;</span><span class="p">,</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span><span class="o">.</span><span class="n">method3</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">method3</span><span class="o">.</span><span class="n">calls</span><span class="p">(</span><span class="s">&#39;()&#39;</span><span class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span><span class="o">.</span><span class="n">once</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@fudge.test</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">():</span>
+<span class="gp">... </span> <span class="n">my_fake</span> <span class="o">=</span> <span class="n">fudge</span><span class="o">.</span><span class="n">Fake</span><span class="p">()</span>
+<span class="gp">... </span> <span class="p">(</span><span class="n">my_fake</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">expects</span><span class="p">(</span><span class="s">&#39;method1&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">returns_fake</span><span class="p">()</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">expects</span><span class="p">(</span><span class="s">&#39;method2&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">returns_fake</span><span class="p">()</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">expects</span><span class="p">(</span><span class="s">&#39;method3&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">with_args</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">returns</span><span class="p">(</span><span class="s">&#39;some value&#39;</span><span class="p">))</span>
+<span class="gp">... </span> <span class="n">assertEqual</span><span class="p">(</span><span class="s">&#39;some value&#39;</span><span class="p">,</span> <span class="n">my_fake</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span><span class="o">.</span><span class="n">method3</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">))</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mocking-a-context-manager">
+<h2>Mocking a context manager<a class="headerlink" href="#mocking-a-context-manager" title="Permalink to this headline">¶</a></h2>
+<p>Examples for mock, Dingus and fudge only (so far):</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">my_mock</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">__enter__</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">__exit__</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="bp">None</span><span class="p">,</span> <span class="bp">None</span><span class="p">,</span> <span class="bp">None</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus (nothing special here; all dinguses are &quot;magic mocks&quot;)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">my_dingus</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">__enter__</span><span class="o">.</span><span class="n">calls</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">__exit__</span><span class="o">.</span><span class="n">calls</span><span class="p">(</span><span class="s">&#39;()&#39;</span><span class="p">,</span> <span class="bp">None</span><span class="p">,</span> <span class="bp">None</span><span class="p">,</span> <span class="bp">None</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_fake</span> <span class="o">=</span> <span class="n">fudge</span><span class="o">.</span><span class="n">Fake</span><span class="p">()</span><span class="o">.</span><span class="n">provides</span><span class="p">(</span><span class="s">&#39;__enter__&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">provides</span><span class="p">(</span><span class="s">&#39;__exit__&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">my_fake</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mocking-the-builtin-open-used-as-a-context-manager">
+<h2>Mocking the builtin open used as a context manager<a class="headerlink" href="#mocking-the-builtin-open-used-as-a-context-manager" title="Permalink to this headline">¶</a></h2>
+<p>Example for mock only (so far):</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">mock</span><span class="o">.</span><span class="n">patch</span><span class="p">(</span><span class="s">&#39;__builtin__.open&#39;</span><span class="p">,</span> <span class="n">my_mock</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">manager</span> <span class="o">=</span> <span class="n">my_mock</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">__enter__</span><span class="o">.</span><span class="n">return_value</span>
+<span class="gp">... </span> <span class="n">manager</span><span class="o">.</span><span class="n">read</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;some data&#39;</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="nb">open</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">h</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">data</span> <span class="o">=</span> <span class="n">h</span><span class="o">.</span><span class="n">read</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">data</span>
+<span class="go">&#39;some data&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+<p><em>or</em>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">mock</span><span class="o">.</span><span class="n">patch</span><span class="p">(</span><span class="s">&#39;__builtin__.open&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">my_mock</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">my_mock</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">__enter__</span> <span class="o">=</span> <span class="k">lambda</span> <span class="n">s</span><span class="p">:</span> <span class="n">s</span>
+<span class="gp">... </span> <span class="n">my_mock</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">__exit__</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">Mock</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">my_mock</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">read</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;some data&#39;</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="nb">open</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">h</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">data</span> <span class="o">=</span> <span class="n">h</span><span class="o">.</span><span class="n">read</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">data</span>
+<span class="go">&#39;some data&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">dingus</span><span class="o">.</span><span class="n">patch</span><span class="p">(</span><span class="s">&#39;__builtin__.open&#39;</span><span class="p">,</span> <span class="n">my_dingus</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">file_</span> <span class="o">=</span> <span class="nb">open</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">__enter__</span><span class="o">.</span><span class="n">return_value</span>
+<span class="gp">... </span> <span class="n">file_</span><span class="o">.</span><span class="n">read</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;some data&#39;</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="nb">open</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">h</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">data</span> <span class="o">=</span> <span class="n">f</span><span class="o">.</span><span class="n">read</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">data</span>
+<span class="go">&#39;some data&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">calls</span><span class="p">(</span><span class="s">&#39;()&#39;</span><span class="p">,</span> <span class="s">&#39;foo&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">once</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">contextlib</span> <span class="kn">import</span> <span class="n">contextmanager</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">StringIO</span> <span class="kn">import</span> <span class="n">StringIO</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@contextmanager</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">fake_file</span><span class="p">(</span><span class="n">filename</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">yield</span> <span class="n">StringIO</span><span class="p">(</span><span class="s">&#39;sekrets&#39;</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">fudge</span><span class="o">.</span><span class="n">patch</span><span class="p">(</span><span class="s">&#39;__builtin__.open&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">fake_open</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">fake_open</span><span class="o">.</span><span class="n">is_callable</span><span class="p">()</span><span class="o">.</span><span class="n">calls</span><span class="p">(</span><span class="n">fake_file</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="nb">open</span><span class="p">(</span><span class="s">&#39;/etc/password&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">f</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">data</span> <span class="o">=</span> <span class="n">f</span><span class="o">.</span><span class="n">read</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="go">fake:__builtin__.open</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">data</span>
+<span class="go">&#39;sekrets&#39;</span>
+</pre></div>
+</div>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Mock Library Comparison</a><ul>
+<li><a class="reference internal" href="#simple-fake-object">Simple fake object</a></li>
+<li><a class="reference internal" href="#simple-mock">Simple mock</a></li>
+<li><a class="reference internal" href="#creating-partial-mocks">Creating partial mocks</a></li>
+<li><a class="reference internal" href="#ensure-calls-are-made-in-specific-order">Ensure calls are made in specific order</a></li>
+<li><a class="reference internal" href="#raising-exceptions">Raising exceptions</a></li>
+<li><a class="reference internal" href="#override-new-instances-of-a-class">Override new instances of a class</a></li>
+<li><a class="reference internal" href="#call-the-same-method-multiple-times">Call the same method multiple times</a></li>
+<li><a class="reference internal" href="#mock-chained-methods">Mock chained methods</a></li>
+<li><a class="reference internal" href="#mocking-a-context-manager">Mocking a context manager</a></li>
+<li><a class="reference internal" href="#mocking-the-builtin-open-used-as-a-context-manager">Mocking the builtin open used as a context manager</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="examples.html"
+ title="previous chapter">Further Examples</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="changelog.html"
+ title="next chapter">CHANGELOG</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/compare.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="changelog.html" title="CHANGELOG"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="examples.html" title="Further Examples"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/examples.html b/third_party/python/mock-1.0.0/html/examples.html
new file mode 100644
index 0000000000..8d8113e58c
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/examples.html
@@ -0,0 +1,1006 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Further Examples &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <link rel="next" title="Mock Library Comparison" href="compare.html" />
+ <link rel="prev" title="Getting Started with Mock" href="getting-started.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="compare.html" title="Mock Library Comparison"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="getting-started.html" title="Getting Started with Mock"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="further-examples">
+<span id="id1"></span><h1>Further Examples<a class="headerlink" href="#further-examples" title="Permalink to this headline">¶</a></h1>
+<p>For comprehensive examples, see the unit tests included in the full source
+distribution.</p>
+<p>Here are some more examples for some slightly more advanced scenarios than in
+the <a class="reference internal" href="getting-started.html#getting-started"><em>getting started</em></a> guide.</p>
+<div class="section" id="mocking-chained-calls">
+<h2>Mocking chained calls<a class="headerlink" href="#mocking-chained-calls" title="Permalink to this headline">¶</a></h2>
+<p>Mocking chained calls is actually straightforward with mock once you
+understand the <a class="reference internal" href="mock.html#mock.Mock.return_value" title="mock.Mock.return_value"><tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt></a> attribute. When a mock is called for
+the first time, or you fetch its <cite>return_value</cite> before it has been called, a
+new <cite>Mock</cite> is created.</p>
+<p>This means that you can see how the object returned from a call to a mocked
+object has been used by interrogating the <cite>return_value</cite> mock:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span><span class="o">.</span><span class="n">foo</span><span class="p">(</span><span class="n">a</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span> <span class="n">b</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="go">&lt;Mock name=&#39;mock().foo()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">foo</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="n">a</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span> <span class="n">b</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>From here it is a simple step to configure and then make assertions about
+chained calls. Of course another alternative is writing your code in a more
+testable way in the first place...</p>
+<p>So, suppose we have some code that looks a little bit like this:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Something</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">backend</span> <span class="o">=</span> <span class="n">BackendProvider</span><span class="p">()</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">method</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">response</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">backend</span><span class="o">.</span><span class="n">get_endpoint</span><span class="p">(</span><span class="s">&#39;foobar&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">create_call</span><span class="p">(</span><span class="s">&#39;spam&#39;</span><span class="p">,</span> <span class="s">&#39;eggs&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">start_call</span><span class="p">()</span>
+<span class="gp">... </span> <span class="c"># more code</span>
+</pre></div>
+</div>
+<p>Assuming that <cite>BackendProvider</cite> is already well tested, how do we test
+<cite>method()</cite>? Specifically, we want to test that the code section <cite># more
+code</cite> uses the response object in the correct way.</p>
+<p>As this chain of calls is made from an instance attribute we can monkey patch
+the <cite>backend</cite> attribute on a <cite>Something</cite> instance. In this particular case
+we are only interested in the return value from the final call to
+<cite>start_call</cite> so we don&#8217;t have much configuration to do. Let&#8217;s assume the
+object it returns is &#8216;file-like&#8217;, so we&#8217;ll ensure that our response object
+uses the builtin <cite>file</cite> as its <cite>spec</cite>.</p>
+<p>To do this we create a mock instance as our mock backend and create a mock
+response object for it. To set the response as the return value for that final
+<cite>start_call</cite> we could do this:</p>
+<blockquote>
+<div><cite>mock_backend.get_endpoint.return_value.create_call.return_value.start_call.return_value = mock_response</cite>.</div></blockquote>
+<p>We can do that in a slightly nicer way using the <a class="reference internal" href="mock.html#mock.Mock.configure_mock" title="mock.Mock.configure_mock"><tt class="xref py py-meth docutils literal"><span class="pre">configure_mock()</span></tt></a>
+method to directly set the return value for us:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">something</span> <span class="o">=</span> <span class="n">Something</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_response</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">spec</span><span class="o">=</span><span class="nb">file</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_backend</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">config</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;get_endpoint.return_value.create_call.return_value.start_call.return_value&#39;</span><span class="p">:</span> <span class="n">mock_response</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_backend</span><span class="o">.</span><span class="n">configure_mock</span><span class="p">(</span><span class="o">**</span><span class="n">config</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>With these we monkey patch the &#8220;mock backend&#8221; in place and can make the real
+call:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">something</span><span class="o">.</span><span class="n">backend</span> <span class="o">=</span> <span class="n">mock_backend</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">something</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>Using <a class="reference internal" href="mock.html#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a> we can check the chained call with a single
+assert. A chained call is several calls in one line of code, so there will be
+several entries in <cite>mock_calls</cite>. We can use <a class="reference internal" href="helpers.html#mock.call.call_list" title="mock.call.call_list"><tt class="xref py py-meth docutils literal"><span class="pre">call.call_list()</span></tt></a> to create
+this list of calls for us:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">chained</span> <span class="o">=</span> <span class="n">call</span><span class="o">.</span><span class="n">get_endpoint</span><span class="p">(</span><span class="s">&#39;foobar&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">create_call</span><span class="p">(</span><span class="s">&#39;spam&#39;</span><span class="p">,</span> <span class="s">&#39;eggs&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">start_call</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">call_list</span> <span class="o">=</span> <span class="n">chained</span><span class="o">.</span><span class="n">call_list</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">mock_backend</span><span class="o">.</span><span class="n">mock_calls</span> <span class="o">==</span> <span class="n">call_list</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="partial-mocking">
+<h2>Partial mocking<a class="headerlink" href="#partial-mocking" title="Permalink to this headline">¶</a></h2>
+<p>In some tests I wanted to mock out a call to <a class="reference external" href="http://docs.python.org/library/datetime.html#datetime.date.today">datetime.date.today()</a> to return
+a known date, but I didn&#8217;t want to prevent the code under test from
+creating new date objects. Unfortunately <cite>datetime.date</cite> is written in C, and
+so I couldn&#8217;t just monkey-patch out the static <cite>date.today</cite> method.</p>
+<p>I found a simple way of doing this that involved effectively wrapping the date
+class with a mock, but passing through calls to the constructor to the real
+class (and returning real instances).</p>
+<p>The <a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch</span> <span class="pre">decorator</span></tt></a> is used here to
+mock out the <cite>date</cite> class in the module under test. The <tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt>
+attribute on the mock date class is then set to a lambda function that returns
+a real date. When the mock date class is called a real date will be
+constructed and returned by <cite>side_effect</cite>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">datetime</span> <span class="kn">import</span> <span class="n">date</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.date&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_date</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">mock_date</span><span class="o">.</span><span class="n">today</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="n">date</span><span class="p">(</span><span class="mi">2010</span><span class="p">,</span> <span class="mi">10</span><span class="p">,</span> <span class="mi">8</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">mock_date</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="k">lambda</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kw</span><span class="p">:</span> <span class="n">date</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kw</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">date</span><span class="o">.</span><span class="n">today</span><span class="p">()</span> <span class="o">==</span> <span class="n">date</span><span class="p">(</span><span class="mi">2010</span><span class="p">,</span> <span class="mi">10</span><span class="p">,</span> <span class="mi">8</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">date</span><span class="p">(</span><span class="mi">2009</span><span class="p">,</span> <span class="mi">6</span><span class="p">,</span> <span class="mi">8</span><span class="p">)</span> <span class="o">==</span> <span class="n">date</span><span class="p">(</span><span class="mi">2009</span><span class="p">,</span> <span class="mi">6</span><span class="p">,</span> <span class="mi">8</span><span class="p">)</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+<p>Note that we don&#8217;t patch <cite>datetime.date</cite> globally, we patch <cite>date</cite> in the
+module that <em>uses</em> it. See <a class="reference internal" href="patch.html#where-to-patch"><em>where to patch</em></a>.</p>
+<p>When <cite>date.today()</cite> is called a known date is returned, but calls to the
+<cite>date(...)</cite> constructor still return normal dates. Without this you can find
+yourself having to calculate an expected result using exactly the same
+algorithm as the code under test, which is a classic testing anti-pattern.</p>
+<p>Calls to the date constructor are recorded in the <cite>mock_date</cite> attributes
+(<cite>call_count</cite> and friends) which may also be useful for your tests.</p>
+<p>An alternative way of dealing with mocking dates, or other builtin classes,
+is discussed in <a class="reference external" href="http://williamjohnbert.com/2011/07/how-to-unit-testing-in-django-with-mocking-and-patching/">this blog entry</a>.</p>
+</div>
+<div class="section" id="mocking-a-generator-method">
+<h2>Mocking a Generator Method<a class="headerlink" href="#mocking-a-generator-method" title="Permalink to this headline">¶</a></h2>
+<p>A Python generator is a function or method that uses the <a class="reference external" href="http://docs.python.org/reference/simple_stmts.html#the-yield-statement">yield statement</a> to
+return a series of values when iterated over <a class="footnote-reference" href="#id3" id="id2">[1]</a>.</p>
+<p>A generator method / function is called to return the generator object. It is
+the generator object that is then iterated over. The protocol method for
+iteration is <a class="reference external" href="http://docs.python.org/library/stdtypes.html#container.__iter__">__iter__</a>, so we can
+mock this using a <cite>MagicMock</cite>.</p>
+<p>Here&#8217;s an example class with an &#8220;iter&#8221; method implemented as a generator:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Foo</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">iter</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="p">[</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">]:</span>
+<span class="gp">... </span> <span class="k">yield</span> <span class="n">i</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">foo</span> <span class="o">=</span> <span class="n">Foo</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">list</span><span class="p">(</span><span class="n">foo</span><span class="o">.</span><span class="n">iter</span><span class="p">())</span>
+<span class="go">[1, 2, 3]</span>
+</pre></div>
+</div>
+<p>How would we mock this class, and in particular its &#8220;iter&#8221; method?</p>
+<p>To configure the values returned from the iteration (implicit in the call to
+<cite>list</cite>), we need to configure the object returned by the call to <cite>foo.iter()</cite>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock_foo</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_foo</span><span class="o">.</span><span class="n">iter</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="nb">iter</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">])</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">list</span><span class="p">(</span><span class="n">mock_foo</span><span class="o">.</span><span class="n">iter</span><span class="p">())</span>
+<span class="go">[1, 2, 3]</span>
+</pre></div>
+</div>
+<table class="docutils footnote" frame="void" id="id3" rules="none">
+<colgroup><col class="label" /><col /></colgroup>
+<tbody valign="top">
+<tr><td class="label"><a class="fn-backref" href="#id2">[1]</a></td><td>There are also generator expressions and more <a class="reference external" href="http://www.dabeaz.com/coroutines/index.html">advanced uses</a> of generators, but we aren&#8217;t
+concerned about them here. A very good introduction to generators and how
+powerful they are is: <a class="reference external" href="http://www.dabeaz.com/generators/">Generator Tricks for Systems Programmers</a>.</td></tr>
+</tbody>
+</table>
+</div>
+<div class="section" id="applying-the-same-patch-to-every-test-method">
+<h2>Applying the same patch to every test method<a class="headerlink" href="#applying-the-same-patch-to-every-test-method" title="Permalink to this headline">¶</a></h2>
+<p>If you want several patches in place for multiple test methods the obvious way
+is to apply the patch decorators to every method. This can feel like unnecessary
+repetition. For Python 2.6 or more recent you can use <cite>patch</cite> (in all its
+various forms) as a class decorator. This applies the patches to all test
+methods on the class. A test method is identified by methods whose names start
+with <cite>test</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;mymodule.SomeClass&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_one</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">MockSomeClass</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">assertTrue</span><span class="p">(</span><span class="n">mymodule</span><span class="o">.</span><span class="n">SomeClass</span> <span class="ow">is</span> <span class="n">MockSomeClass</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_two</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">MockSomeClass</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">assertTrue</span><span class="p">(</span><span class="n">mymodule</span><span class="o">.</span><span class="n">SomeClass</span> <span class="ow">is</span> <span class="n">MockSomeClass</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">not_a_test</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="s">&#39;something&#39;</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_one&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">test_one</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_two&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">test_two</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_two&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">not_a_test</span><span class="p">()</span>
+<span class="go">&#39;something&#39;</span>
+</pre></div>
+</div>
+<p>An alternative way of managing patches is to use the <a class="reference internal" href="patch.html#start-and-stop"><em>patch methods: start and stop</em></a>.
+These allow you to move the patching into your <cite>setUp</cite> and <cite>tearDown</cite> methods.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">setUp</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.foo&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">mock_foo</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_foo</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">assertTrue</span><span class="p">(</span><span class="n">mymodule</span><span class="o">.</span><span class="n">foo</span> <span class="ow">is</span> <span class="bp">self</span><span class="o">.</span><span class="n">mock_foo</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">tearDown</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher</span><span class="o">.</span><span class="n">stop</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_foo&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">run</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>If you use this technique you must ensure that the patching is &#8220;undone&#8221; by
+calling <cite>stop</cite>. This can be fiddlier than you might think, because if an
+exception is raised in the setUp then tearDown is not called. <a class="reference external" href="http://pypi.python.org/pypi/unittest2">unittest2</a> cleanup functions make this simpler:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">setUp</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">patcher</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.foo&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">addCleanup</span><span class="p">(</span><span class="n">patcher</span><span class="o">.</span><span class="n">stop</span><span class="p">)</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">mock_foo</span> <span class="o">=</span> <span class="n">patcher</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_foo</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">assertTrue</span><span class="p">(</span><span class="n">mymodule</span><span class="o">.</span><span class="n">foo</span> <span class="ow">is</span> <span class="bp">self</span><span class="o">.</span><span class="n">mock_foo</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_foo&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">run</span><span class="p">()</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mocking-unbound-methods">
+<h2>Mocking Unbound Methods<a class="headerlink" href="#mocking-unbound-methods" title="Permalink to this headline">¶</a></h2>
+<p>Whilst writing tests today I needed to patch an <em>unbound method</em> (patching the
+method on the class rather than on the instance). I needed self to be passed
+in as the first argument because I want to make asserts about which objects
+were calling this particular method. The issue is that you can&#8217;t patch with a
+mock for this, because if you replace an unbound method with a mock it doesn&#8217;t
+become a bound method when fetched from the instance, and so it doesn&#8217;t get
+self passed in. The workaround is to patch the unbound method with a real
+function instead. The <a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a> decorator makes it so simple to
+patch out methods with a mock that having to create a real function becomes a
+nuisance.</p>
+<p>If you pass <cite>autospec=True</cite> to patch then it does the patching with a
+<em>real</em> function object. This function object has the same signature as the one
+it is replacing, but delegates to a mock under the hood. You still get your
+mock auto-created in exactly the same way as before. What it means though, is
+that if you use it to patch out an unbound method on a class the mocked
+function will be turned into a bound method if it is fetched from an instance.
+It will have <cite>self</cite> passed in as the first argument, which is exactly what I
+wanted:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Foo</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">foo</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">object</span><span class="p">(</span><span class="n">Foo</span><span class="p">,</span> <span class="s">&#39;foo&#39;</span><span class="p">,</span> <span class="n">autospec</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_foo</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">mock_foo</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;foo&#39;</span>
+<span class="gp">... </span> <span class="n">foo</span> <span class="o">=</span> <span class="n">Foo</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">foo</span><span class="o">.</span><span class="n">foo</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="go">&#39;foo&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_foo</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="n">foo</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>If we don&#8217;t use <cite>autospec=True</cite> then the unbound method is patched out
+with a Mock instance instead, and isn&#8217;t called with <cite>self</cite>.</p>
+</div>
+<div class="section" id="checking-multiple-calls-with-mock">
+<h2>Checking multiple calls with mock<a class="headerlink" href="#checking-multiple-calls-with-mock" title="Permalink to this headline">¶</a></h2>
+<p>mock has a nice API for making assertions about how your mock objects are used.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">foo_bar</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="bp">None</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">foo_bar</span><span class="p">(</span><span class="s">&#39;baz&#39;</span><span class="p">,</span> <span class="n">spam</span><span class="o">=</span><span class="s">&#39;eggs&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">foo_bar</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="s">&#39;baz&#39;</span><span class="p">,</span> <span class="n">spam</span><span class="o">=</span><span class="s">&#39;eggs&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>If your mock is only being called once you can use the
+<tt class="xref py py-meth docutils literal"><span class="pre">assert_called_once_with()</span></tt> method that also asserts that the
+<tt class="xref py py-attr docutils literal"><span class="pre">call_count</span></tt> is one.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">foo_bar</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="s">&#39;baz&#39;</span><span class="p">,</span> <span class="n">spam</span><span class="o">=</span><span class="s">&#39;eggs&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">foo_bar</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">foo_bar</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="s">&#39;baz&#39;</span><span class="p">,</span> <span class="n">spam</span><span class="o">=</span><span class="s">&#39;eggs&#39;</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">AssertionError</span>: <span class="n">Expected to be called once. Called 2 times.</span>
+</pre></div>
+</div>
+<p>Both <cite>assert_called_with</cite> and <cite>assert_called_once_with</cite> make assertions about
+the <em>most recent</em> call. If your mock is going to be called several times, and
+you want to make assertions about <em>all</em> those calls you can use
+<a class="reference internal" href="mock.html#mock.Mock.call_args_list" title="mock.Mock.call_args_list"><tt class="xref py py-attr docutils literal"><span class="pre">call_args_list</span></tt></a>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">4</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="mi">6</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args_list</span>
+<span class="go">[call(1, 2, 3), call(4, 5, 6), call()]</span>
+</pre></div>
+</div>
+<p>The <a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> helper makes it easy to make assertions about these calls. You
+can build up a list of expected calls and compare it to <cite>call_args_list</cite>. This
+looks remarkably similar to the repr of the <cite>call_args_list</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">expected</span> <span class="o">=</span> <span class="p">[</span><span class="n">call</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">),</span> <span class="n">call</span><span class="p">(</span><span class="mi">4</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="mi">6</span><span class="p">),</span> <span class="n">call</span><span class="p">()]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args_list</span> <span class="o">==</span> <span class="n">expected</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="coping-with-mutable-arguments">
+<h2>Coping with mutable arguments<a class="headerlink" href="#coping-with-mutable-arguments" title="Permalink to this headline">¶</a></h2>
+<p>Another situation is rare, but can bite you, is when your mock is called with
+mutable arguments. <cite>call_args</cite> and <cite>call_args_list</cite> store <em>references</em> to the
+arguments. If the arguments are mutated by the code under test then you can no
+longer make assertions about what the values were when the mock was called.</p>
+<p>Here&#8217;s some example code that shows the problem. Imagine the following functions
+defined in &#8216;mymodule&#8217;:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="k">def</span> <span class="nf">frob</span><span class="p">(</span><span class="n">val</span><span class="p">):</span>
+ <span class="k">pass</span>
+
+<span class="k">def</span> <span class="nf">grob</span><span class="p">(</span><span class="n">val</span><span class="p">):</span>
+ <span class="s">&quot;First frob and then clear val&quot;</span>
+ <span class="n">frob</span><span class="p">(</span><span class="n">val</span><span class="p">)</span>
+ <span class="n">val</span><span class="o">.</span><span class="n">clear</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>When we try to test that <cite>grob</cite> calls <cite>frob</cite> with the correct argument look
+what happens:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.frob&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_frob</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">val</span> <span class="o">=</span> <span class="nb">set</span><span class="p">([</span><span class="mi">6</span><span class="p">])</span>
+<span class="gp">... </span> <span class="n">mymodule</span><span class="o">.</span><span class="n">grob</span><span class="p">(</span><span class="n">val</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">val</span>
+<span class="go">set([])</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_frob</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="nb">set</span><span class="p">([</span><span class="mi">6</span><span class="p">]))</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">AssertionError: Expected</span>: <span class="n">((set([6]),), {})</span>
+<span class="go">Called with: ((set([]),), {})</span>
+</pre></div>
+</div>
+<p>One possibility would be for mock to copy the arguments you pass in. This
+could then cause problems if you do assertions that rely on object identity
+for equality.</p>
+<p>Here&#8217;s one solution that uses the <tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt>
+functionality. If you provide a <cite>side_effect</cite> function for a mock then
+<cite>side_effect</cite> will be called with the same args as the mock. This gives us an
+opportunity to copy the arguments and store them for later assertions. In this
+example I&#8217;m using <em>another</em> mock to store the arguments so that I can use the
+mock methods for doing the assertion. Again a helper function sets this up for
+me.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">copy</span> <span class="kn">import</span> <span class="n">deepcopy</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mock</span> <span class="kn">import</span> <span class="n">Mock</span><span class="p">,</span> <span class="n">patch</span><span class="p">,</span> <span class="n">DEFAULT</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">copy_call_args</span><span class="p">(</span><span class="n">mock</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">new_mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">args</span> <span class="o">=</span> <span class="n">deepcopy</span><span class="p">(</span><span class="n">args</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">kwargs</span> <span class="o">=</span> <span class="n">deepcopy</span><span class="p">(</span><span class="n">kwargs</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">new_mock</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">DEFAULT</span>
+<span class="gp">... </span> <span class="n">mock</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">side_effect</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">new_mock</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.frob&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_frob</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">new_mock</span> <span class="o">=</span> <span class="n">copy_call_args</span><span class="p">(</span><span class="n">mock_frob</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">val</span> <span class="o">=</span> <span class="nb">set</span><span class="p">([</span><span class="mi">6</span><span class="p">])</span>
+<span class="gp">... </span> <span class="n">mymodule</span><span class="o">.</span><span class="n">grob</span><span class="p">(</span><span class="n">val</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">new_mock</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="nb">set</span><span class="p">([</span><span class="mi">6</span><span class="p">]))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">new_mock</span><span class="o">.</span><span class="n">call_args</span>
+<span class="go">call(set([6]))</span>
+</pre></div>
+</div>
+<p><cite>copy_call_args</cite> is called with the mock that will be called. It returns a new
+mock that we do the assertion on. The <cite>side_effect</cite> function makes a copy of
+the args and calls our <cite>new_mock</cite> with the copy.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p>If your mock is only going to be used once there is an easier way of
+checking arguments at the point they are called. You can simply do the
+checking inside a <cite>side_effect</cite> function.</p>
+<div class="last highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="n">arg</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">arg</span> <span class="o">==</span> <span class="nb">set</span><span class="p">([</span><span class="mi">6</span><span class="p">])</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">side_effect</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="nb">set</span><span class="p">([</span><span class="mi">6</span><span class="p">]))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="nb">set</span><span class="p">())</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">AssertionError</span>
+</pre></div>
+</div>
+</div>
+<p>An alternative approach is to create a subclass of <cite>Mock</cite> or <cite>MagicMock</cite> that
+copies (using <a class="reference external" href="http://docs.python.org/library/copy.html#copy.deepcopy">copy.deepcopy</a>) the arguments.
+Here&#8217;s an example implementation:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">copy</span> <span class="kn">import</span> <span class="n">deepcopy</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">CopyingMock</span><span class="p">(</span><span class="n">MagicMock</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__call__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">args</span> <span class="o">=</span> <span class="n">deepcopy</span><span class="p">(</span><span class="n">args</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">kwargs</span> <span class="o">=</span> <span class="n">deepcopy</span><span class="p">(</span><span class="n">kwargs</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="nb">super</span><span class="p">(</span><span class="n">CopyingMock</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="n">__call__</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">c</span> <span class="o">=</span> <span class="n">CopyingMock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">arg</span> <span class="o">=</span> <span class="nb">set</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">c</span><span class="p">(</span><span class="n">arg</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">arg</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">c</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="nb">set</span><span class="p">())</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">c</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="n">arg</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">AssertionError: Expected call</span>: <span class="n">mock(set([1]))</span>
+<span class="go">Actual call: mock(set([]))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">c</span><span class="o">.</span><span class="n">foo</span>
+<span class="go">&lt;CopyingMock name=&#39;mock.foo&#39; id=&#39;...&#39;&gt;</span>
+</pre></div>
+</div>
+<p>When you subclass <cite>Mock</cite> or <cite>MagicMock</cite> all dynamically created attributes,
+and the <cite>return_value</cite> will use your subclass automatically. That means all
+children of a <cite>CopyingMock</cite> will also have the type <cite>CopyingMock</cite>.</p>
+</div>
+<div class="section" id="raising-exceptions-on-attribute-access">
+<h2>Raising exceptions on attribute access<a class="headerlink" href="#raising-exceptions-on-attribute-access" title="Permalink to this headline">¶</a></h2>
+<p>You can use <a class="reference internal" href="mock.html#mock.PropertyMock" title="mock.PropertyMock"><tt class="xref py py-class docutils literal"><span class="pre">PropertyMock</span></tt></a> to mimic the behaviour of properties. This
+includes raising exceptions when an attribute is accessed.</p>
+<p>Here&#8217;s an example raising a <cite>ValueError</cite> when the &#8216;foo&#8217; attribute is accessed:</p>
+<div class="highlight-python"><pre>&gt;&gt;&gt; m = MagicMock()
+&gt;&gt;&gt; p = PropertyMock(side_effect=ValueError)
+&gt;&gt;&gt; type(m).foo = p
+&gt;&gt;&gt; m.foo
+Traceback (most recent call last):
+....
+ValueError</pre>
+</div>
+<p>Because every mock object has its own type, a new subclass of whichever mock
+class you&#8217;re using, all mock objects are isolated from each other. You can
+safely attach properties (or other descriptors or whatever you want in fact)
+to <cite>type(mock)</cite> without affecting other mock objects.</p>
+</div>
+<div class="section" id="multiple-calls-with-different-effects">
+<h2>Multiple calls with different effects<a class="headerlink" href="#multiple-calls-with-different-effects" title="Permalink to this headline">¶</a></h2>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">In mock 1.0 the handling of iterable <cite>side_effect</cite> was changed. Any
+exceptions in the iterable will be raised instead of returned.</p>
+</div>
+<p>Handling code that needs to behave differently on subsequent calls during the
+test can be tricky. For example you may have a function that needs to raise
+an exception the first time it is called but returns a response on the second
+call (testing retry behaviour).</p>
+<p>One approach is to use a <tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt> function that replaces itself. The
+first time it is called the <cite>side_effect</cite> sets a new <cite>side_effect</cite> that will
+be used for the second call. It then raises an exception:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">second_call</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="s">&#39;response&#39;</span>
+<span class="gp">... </span> <span class="n">mock</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">second_call</span>
+<span class="gp">... </span> <span class="k">raise</span> <span class="ne">Exception</span><span class="p">(</span><span class="s">&#39;boom&#39;</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">side_effect</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;first&#39;</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">Exception</span>: <span class="n">boom</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;second&#39;</span><span class="p">)</span>
+<span class="go">&#39;response&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="s">&#39;second&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>Another perfectly valid way would be to pop return values from a list. If the
+return value is an exception, raise it instead of returning it:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">returns</span> <span class="o">=</span> <span class="p">[</span><span class="ne">Exception</span><span class="p">(</span><span class="s">&#39;boom&#39;</span><span class="p">),</span> <span class="s">&#39;response&#39;</span><span class="p">]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">result</span> <span class="o">=</span> <span class="n">returns</span><span class="o">.</span><span class="n">pop</span><span class="p">(</span><span class="mi">0</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">result</span><span class="p">,</span> <span class="ne">Exception</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">raise</span> <span class="n">result</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">result</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">side_effect</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;first&#39;</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">Exception</span>: <span class="n">boom</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;second&#39;</span><span class="p">)</span>
+<span class="go">&#39;response&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="s">&#39;second&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>Which approach you prefer is a matter of taste. The first approach is actually
+a line shorter but maybe the second approach is more readable.</p>
+</div>
+<div class="section" id="nesting-patches">
+<h2>Nesting Patches<a class="headerlink" href="#nesting-patches" title="Permalink to this headline">¶</a></h2>
+<p>Using patch as a context manager is nice, but if you do multiple patches you
+can end up with nested with statements indenting further and further to the
+right:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_foo</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.Foo&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_foo</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.Bar&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_bar</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.Spam&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_spam</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Foo</span> <span class="ow">is</span> <span class="n">mock_foo</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Bar</span> <span class="ow">is</span> <span class="n">mock_bar</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Spam</span> <span class="ow">is</span> <span class="n">mock_spam</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">original</span> <span class="o">=</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Foo</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_foo&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">test_foo</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Foo</span> <span class="ow">is</span> <span class="n">original</span>
+</pre></div>
+</div>
+<p>With <a class="reference external" href="http://pypi.python.org/pypi/unittest2">unittest2</a> <cite>cleanup</cite> functions and the <a class="reference internal" href="patch.html#start-and-stop"><em>patch methods: start and stop</em></a> we can
+achieve the same effect without the nested indentation. A simple helper
+method, <cite>create_patch</cite>, puts the patch in place and returns the created mock
+for us:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">create_patch</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">patcher</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="n">name</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">thing</span> <span class="o">=</span> <span class="n">patcher</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">addCleanup</span><span class="p">(</span><span class="n">patcher</span><span class="o">.</span><span class="n">stop</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">thing</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_foo</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">mock_foo</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">create_patch</span><span class="p">(</span><span class="s">&#39;mymodule.Foo&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">mock_bar</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">create_patch</span><span class="p">(</span><span class="s">&#39;mymodule.Bar&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">mock_spam</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">create_patch</span><span class="p">(</span><span class="s">&#39;mymodule.Spam&#39;</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Foo</span> <span class="ow">is</span> <span class="n">mock_foo</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Bar</span> <span class="ow">is</span> <span class="n">mock_bar</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Spam</span> <span class="ow">is</span> <span class="n">mock_spam</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">original</span> <span class="o">=</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Foo</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_foo&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">run</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Foo</span> <span class="ow">is</span> <span class="n">original</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mocking-a-dictionary-with-magicmock">
+<h2>Mocking a dictionary with MagicMock<a class="headerlink" href="#mocking-a-dictionary-with-magicmock" title="Permalink to this headline">¶</a></h2>
+<p>You may want to mock a dictionary, or other container object, recording all
+access to it whilst having it still behave like a dictionary.</p>
+<p>We can do this with <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a>, which will behave like a dictionary,
+and using <a class="reference internal" href="mock.html#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-data docutils literal"><span class="pre">side_effect</span></tt></a> to delegate dictionary access to a real
+underlying dictionary that is under our control.</p>
+<p>When the <cite>__getitem__</cite> and <cite>__setitem__</cite> methods of our <cite>MagicMock</cite> are called
+(normal dictionary access) then <cite>side_effect</cite> is called with the key (and in
+the case of <cite>__setitem__</cite> the value too). We can also control what is returned.</p>
+<p>After the <cite>MagicMock</cite> has been used we can use attributes like
+<a class="reference internal" href="mock.html#mock.Mock.call_args_list" title="mock.Mock.call_args_list"><tt class="xref py py-data docutils literal"><span class="pre">call_args_list</span></tt></a> to assert about how the dictionary was used:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">my_dict</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;a&#39;</span><span class="p">:</span> <span class="mi">1</span><span class="p">,</span> <span class="s">&#39;b&#39;</span><span class="p">:</span> <span class="mi">2</span><span class="p">,</span> <span class="s">&#39;c&#39;</span><span class="p">:</span> <span class="mi">3</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">getitem</span><span class="p">(</span><span class="n">name</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">my_dict</span><span class="p">[</span><span class="n">name</span><span class="p">]</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">setitem</span><span class="p">(</span><span class="n">name</span><span class="p">,</span> <span class="n">val</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">my_dict</span><span class="p">[</span><span class="n">name</span><span class="p">]</span> <span class="o">=</span> <span class="n">val</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__getitem__</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">getitem</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__setitem__</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">setitem</span>
+</pre></div>
+</div>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p>An alternative to using <cite>MagicMock</cite> is to use <cite>Mock</cite> and <em>only</em> provide
+the magic methods you specifically want:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__setitem__</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">getitem</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__getitem__</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">setitem</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>A <em>third</em> option is to use <cite>MagicMock</cite> but passing in <cite>dict</cite> as the <cite>spec</cite>
+(or <cite>spec_set</cite>) argument so that the <cite>MagicMock</cite> created only has
+dictionary magic methods available:</p>
+<div class="last highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">spec_set</span><span class="o">=</span><span class="nb">dict</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__getitem__</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">getitem</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__setitem__</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">setitem</span>
+</pre></div>
+</div>
+</div>
+<p>With these side effect functions in place, the <cite>mock</cite> will behave like a normal
+dictionary but recording the access. It even raises a <cite>KeyError</cite> if you try
+to access a key that doesn&#8217;t exist.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="s">&#39;a&#39;</span><span class="p">]</span>
+<span class="go">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="s">&#39;c&#39;</span><span class="p">]</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="s">&#39;d&#39;</span><span class="p">]</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">KeyError</span>: <span class="n">&#39;d&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="s">&#39;b&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="s">&#39;fish&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="s">&#39;d&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="s">&#39;eggs&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="s">&#39;b&#39;</span><span class="p">]</span>
+<span class="go">&#39;fish&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="s">&#39;d&#39;</span><span class="p">]</span>
+<span class="go">&#39;eggs&#39;</span>
+</pre></div>
+</div>
+<p>After it has been used you can make assertions about the access using the normal
+mock methods and attributes:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__getitem__</span><span class="o">.</span><span class="n">call_args_list</span>
+<span class="go">[call(&#39;a&#39;), call(&#39;c&#39;), call(&#39;d&#39;), call(&#39;b&#39;), call(&#39;d&#39;)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__setitem__</span><span class="o">.</span><span class="n">call_args_list</span>
+<span class="go">[call(&#39;b&#39;, &#39;fish&#39;), call(&#39;d&#39;, &#39;eggs&#39;)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dict</span>
+<span class="go">{&#39;a&#39;: 1, &#39;c&#39;: 3, &#39;b&#39;: &#39;fish&#39;, &#39;d&#39;: &#39;eggs&#39;}</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mock-subclasses-and-their-attributes">
+<h2>Mock subclasses and their attributes<a class="headerlink" href="#mock-subclasses-and-their-attributes" title="Permalink to this headline">¶</a></h2>
+<p>There are various reasons why you might want to subclass <cite>Mock</cite>. One reason
+might be to add helper methods. Here&#8217;s a silly example:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyMock</span><span class="p">(</span><span class="n">MagicMock</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">has_been_called</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">called</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span> <span class="o">=</span> <span class="n">MyMock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span>
+<span class="go">&lt;MyMock id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span><span class="o">.</span><span class="n">has_been_called</span><span class="p">()</span>
+<span class="go">False</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span><span class="o">.</span><span class="n">has_been_called</span><span class="p">()</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>The standard behaviour for <cite>Mock</cite> instances is that attributes and the return
+value mocks are of the same type as the mock they are accessed on. This ensures
+that <cite>Mock</cite> attributes are <cite>Mocks</cite> and <cite>MagicMock</cite> attributes are <cite>MagicMocks</cite>
+<a class="footnote-reference" href="#id5" id="id4">[2]</a>. So if you&#8217;re subclassing to add helper methods then they&#8217;ll also be
+available on the attributes and return value mock of instances of your
+subclass.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span><span class="o">.</span><span class="n">foo</span>
+<span class="go">&lt;MyMock name=&#39;mock.foo&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span><span class="o">.</span><span class="n">foo</span><span class="o">.</span><span class="n">has_been_called</span><span class="p">()</span>
+<span class="go">False</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span><span class="o">.</span><span class="n">foo</span><span class="p">()</span>
+<span class="go">&lt;MyMock name=&#39;mock.foo()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span><span class="o">.</span><span class="n">foo</span><span class="o">.</span><span class="n">has_been_called</span><span class="p">()</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>Sometimes this is inconvenient. For example, <a class="reference external" href="https://code.google.com/p/mock/issues/detail?id=105">one user</a> is subclassing mock to
+created a <a class="reference external" href="http://twistedmatrix.com/documents/11.0.0/api/twisted.python.components.html">Twisted adaptor</a>.
+Having this applied to attributes too actually causes errors.</p>
+<p><cite>Mock</cite> (in all its flavours) uses a method called <cite>_get_child_mock</cite> to create
+these &#8220;sub-mocks&#8221; for attributes and return values. You can prevent your
+subclass being used for attributes by overriding this method. The signature is
+that it takes arbitrary keyword arguments (<cite>**kwargs</cite>) which are then passed
+onto the mock constructor:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Subclass</span><span class="p">(</span><span class="n">MagicMock</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">_get_child_mock</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">MagicMock</span><span class="p">(</span><span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span> <span class="o">=</span> <span class="n">Subclass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span><span class="o">.</span><span class="n">foo</span>
+<span class="go">&lt;MagicMock name=&#39;mock.foo&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">mymock</span><span class="p">,</span> <span class="n">Subclass</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">mymock</span><span class="o">.</span><span class="n">foo</span><span class="p">,</span> <span class="n">Subclass</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">mymock</span><span class="p">(),</span> <span class="n">Subclass</span><span class="p">)</span>
+</pre></div>
+</div>
+<table class="docutils footnote" frame="void" id="id5" rules="none">
+<colgroup><col class="label" /><col /></colgroup>
+<tbody valign="top">
+<tr><td class="label"><a class="fn-backref" href="#id4">[2]</a></td><td>An exception to this rule are the non-callable mocks. Attributes use the
+callable variant because otherwise non-callable mocks couldn&#8217;t have callable
+methods.</td></tr>
+</tbody>
+</table>
+</div>
+<div class="section" id="mocking-imports-with-patch-dict">
+<h2>Mocking imports with patch.dict<a class="headerlink" href="#mocking-imports-with-patch-dict" title="Permalink to this headline">¶</a></h2>
+<p>One situation where mocking can be hard is where you have a local import inside
+a function. These are harder to mock because they aren&#8217;t using an object from
+the module namespace that we can patch out.</p>
+<p>Generally local imports are to be avoided. They are sometimes done to prevent
+circular dependencies, for which there is <em>usually</em> a much better way to solve
+the problem (refactor the code) or to prevent &#8220;up front costs&#8221; by delaying the
+import. This can also be solved in better ways than an unconditional local
+import (store the module as a class or module attribute and only do the import
+on first use).</p>
+<p>That aside there is a way to use <cite>mock</cite> to affect the results of an import.
+Importing fetches an <em>object</em> from the <cite>sys.modules</cite> dictionary. Note that it
+fetches an <em>object</em>, which need not be a module. Importing a module for the
+first time results in a module object being put in <cite>sys.modules</cite>, so usually
+when you import something you get a module back. This need not be the case
+however.</p>
+<p>This means you can use <a class="reference internal" href="patch.html#mock.patch.dict" title="mock.patch.dict"><tt class="xref py py-func docutils literal"><span class="pre">patch.dict()</span></tt></a> to <em>temporarily</em> put a mock in place
+in <cite>sys.modules</cite>. Any imports whilst this patch is active will fetch the mock.
+When the patch is complete (the decorated function exits, the with statement
+body is complete or <cite>patcher.stop()</cite> is called) then whatever was there
+previously will be restored safely.</p>
+<p>Here&#8217;s an example that mocks out the &#8216;fooble&#8217; module.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="s">&#39;sys.modules&#39;</span><span class="p">,</span> <span class="p">{</span><span class="s">&#39;fooble&#39;</span><span class="p">:</span> <span class="n">mock</span><span class="p">}):</span>
+<span class="gp">... </span> <span class="kn">import</span> <span class="nn">fooble</span>
+<span class="gp">... </span> <span class="n">fooble</span><span class="o">.</span><span class="n">blob</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="go">&lt;Mock name=&#39;mock.blob()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="s">&#39;fooble&#39;</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">sys</span><span class="o">.</span><span class="n">modules</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">blob</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>As you can see the <cite>import fooble</cite> succeeds, but on exit there is no &#8216;fooble&#8217;
+left in <cite>sys.modules</cite>.</p>
+<p>This also works for the <cite>from module import name</cite> form:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="s">&#39;sys.modules&#39;</span><span class="p">,</span> <span class="p">{</span><span class="s">&#39;fooble&#39;</span><span class="p">:</span> <span class="n">mock</span><span class="p">}):</span>
+<span class="gp">... </span> <span class="kn">from</span> <span class="nn">fooble</span> <span class="kn">import</span> <span class="n">blob</span>
+<span class="gp">... </span> <span class="n">blob</span><span class="o">.</span><span class="n">blip</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="go">&lt;Mock name=&#39;mock.blob.blip()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">blob</span><span class="o">.</span><span class="n">blip</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>With slightly more work you can also mock package imports:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">modules</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;package&#39;</span><span class="p">:</span> <span class="n">mock</span><span class="p">,</span> <span class="s">&#39;package.module&#39;</span><span class="p">:</span> <span class="n">mock</span><span class="o">.</span><span class="n">module</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="s">&#39;sys.modules&#39;</span><span class="p">,</span> <span class="n">modules</span><span class="p">):</span>
+<span class="gp">... </span> <span class="kn">from</span> <span class="nn">package.module</span> <span class="kn">import</span> <span class="n">fooble</span>
+<span class="gp">... </span> <span class="n">fooble</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="go">&lt;Mock name=&#39;mock.module.fooble()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">module</span><span class="o">.</span><span class="n">fooble</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">()</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="tracking-order-of-calls-and-less-verbose-call-assertions">
+<h2>Tracking order of calls and less verbose call assertions<a class="headerlink" href="#tracking-order-of-calls-and-less-verbose-call-assertions" title="Permalink to this headline">¶</a></h2>
+<p>The <a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a> class allows you to track the <em>order</em> of method calls on
+your mock objects through the <a class="reference internal" href="mock.html#mock.Mock.method_calls" title="mock.Mock.method_calls"><tt class="xref py py-attr docutils literal"><span class="pre">method_calls</span></tt></a> attribute. This
+doesn&#8217;t allow you to track the order of calls between separate mock objects,
+however we can use <a class="reference internal" href="mock.html#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a> to achieve the same effect.</p>
+<p>Because mocks track calls to child mocks in <cite>mock_calls</cite>, and accessing an
+arbitrary attribute of a mock creates a child mock, we can create our separate
+mocks from a parent one. Calls to those child mock will then all be recorded,
+in order, in the <cite>mock_calls</cite> of the parent:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">manager</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_foo</span> <span class="o">=</span> <span class="n">manager</span><span class="o">.</span><span class="n">foo</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_bar</span> <span class="o">=</span> <span class="n">manager</span><span class="o">.</span><span class="n">bar</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_foo</span><span class="o">.</span><span class="n">something</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock.foo.something()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_bar</span><span class="o">.</span><span class="n">other</span><span class="o">.</span><span class="n">thing</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock.bar.other.thing()&#39; id=&#39;...&#39;&gt;</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="n">manager</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call.foo.something(), call.bar.other.thing()]</span>
+</pre></div>
+</div>
+<p>We can then assert about the calls, including the order, by comparing with
+the <cite>mock_calls</cite> attribute on the manager mock:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">expected_calls</span> <span class="o">=</span> <span class="p">[</span><span class="n">call</span><span class="o">.</span><span class="n">foo</span><span class="o">.</span><span class="n">something</span><span class="p">(),</span> <span class="n">call</span><span class="o">.</span><span class="n">bar</span><span class="o">.</span><span class="n">other</span><span class="o">.</span><span class="n">thing</span><span class="p">()]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">manager</span><span class="o">.</span><span class="n">mock_calls</span> <span class="o">==</span> <span class="n">expected_calls</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>If <cite>patch</cite> is creating, and putting in place, your mocks then you can attach
+them to a manager mock using the <a class="reference internal" href="mock.html#mock.Mock.attach_mock" title="mock.Mock.attach_mock"><tt class="xref py py-meth docutils literal"><span class="pre">attach_mock()</span></tt></a> method. After
+attaching calls will be recorded in <cite>mock_calls</cite> of the manager.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">manager</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.Class1&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">MockClass1</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.Class2&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">MockClass2</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">manager</span><span class="o">.</span><span class="n">attach_mock</span><span class="p">(</span><span class="n">MockClass1</span><span class="p">,</span> <span class="s">&#39;MockClass1&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">manager</span><span class="o">.</span><span class="n">attach_mock</span><span class="p">(</span><span class="n">MockClass2</span><span class="p">,</span> <span class="s">&#39;MockClass2&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">MockClass1</span><span class="p">()</span><span class="o">.</span><span class="n">foo</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">MockClass2</span><span class="p">()</span><span class="o">.</span><span class="n">bar</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="go">&lt;MagicMock name=&#39;mock.MockClass1().foo()&#39; id=&#39;...&#39;&gt;</span>
+<span class="go">&lt;MagicMock name=&#39;mock.MockClass2().bar()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">manager</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call.MockClass1(),</span>
+<span class="go"> call.MockClass1().foo(),</span>
+<span class="go"> call.MockClass2(),</span>
+<span class="go"> call.MockClass2().bar()]</span>
+</pre></div>
+</div>
+<p>If many calls have been made, but you&#8217;re only interested in a particular
+sequence of them then an alternative is to use the
+<a class="reference internal" href="mock.html#mock.Mock.assert_has_calls" title="mock.Mock.assert_has_calls"><tt class="xref py py-meth docutils literal"><span class="pre">assert_has_calls()</span></tt></a> method. This takes a list of calls (constructed
+with the <a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> object). If that sequence of calls are in
+<a class="reference internal" href="mock.html#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a> then the assert succeeds.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span><span class="o">.</span><span class="n">foo</span><span class="p">()</span><span class="o">.</span><span class="n">bar</span><span class="p">()</span><span class="o">.</span><span class="n">baz</span><span class="p">()</span>
+<span class="go">&lt;MagicMock name=&#39;mock().foo().bar().baz()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">one</span><span class="p">()</span><span class="o">.</span><span class="n">two</span><span class="p">()</span><span class="o">.</span><span class="n">three</span><span class="p">()</span>
+<span class="go">&lt;MagicMock name=&#39;mock.one().two().three()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">calls</span> <span class="o">=</span> <span class="n">call</span><span class="o">.</span><span class="n">one</span><span class="p">()</span><span class="o">.</span><span class="n">two</span><span class="p">()</span><span class="o">.</span><span class="n">three</span><span class="p">()</span><span class="o">.</span><span class="n">call_list</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">assert_has_calls</span><span class="p">(</span><span class="n">calls</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>Even though the chained call <cite>m.one().two().three()</cite> aren&#8217;t the only calls that
+have been made to the mock, the assert still succeeds.</p>
+<p>Sometimes a mock may have several calls made to it, and you are only interested
+in asserting about <em>some</em> of those calls. You may not even care about the
+order. In this case you can pass <cite>any_order=True</cite> to <cite>assert_has_calls</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">(</span><span class="mi">1</span><span class="p">),</span> <span class="n">m</span><span class="o">.</span><span class="n">two</span><span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">),</span> <span class="n">m</span><span class="o">.</span><span class="n">seven</span><span class="p">(</span><span class="mi">7</span><span class="p">),</span> <span class="n">m</span><span class="o">.</span><span class="n">fifty</span><span class="p">(</span><span class="s">&#39;50&#39;</span><span class="p">)</span>
+<span class="go">(...)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">calls</span> <span class="o">=</span> <span class="p">[</span><span class="n">call</span><span class="o">.</span><span class="n">fifty</span><span class="p">(</span><span class="s">&#39;50&#39;</span><span class="p">),</span> <span class="n">call</span><span class="p">(</span><span class="mi">1</span><span class="p">),</span> <span class="n">call</span><span class="o">.</span><span class="n">seven</span><span class="p">(</span><span class="mi">7</span><span class="p">)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">assert_has_calls</span><span class="p">(</span><span class="n">calls</span><span class="p">,</span> <span class="n">any_order</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="more-complex-argument-matching">
+<h2>More complex argument matching<a class="headerlink" href="#more-complex-argument-matching" title="Permalink to this headline">¶</a></h2>
+<p>Using the same basic concept as <cite>ANY</cite> we can implement matchers to do more
+complex assertions on objects used as arguments to mocks.</p>
+<p>Suppose we expect some object to be passed to a mock that by default
+compares equal based on object identity (which is the Python default for user
+defined classes). To use <a class="reference internal" href="mock.html#mock.Mock.assert_called_with" title="mock.Mock.assert_called_with"><tt class="xref py py-meth docutils literal"><span class="pre">assert_called_with()</span></tt></a> we would need to pass
+in the exact same object. If we are only interested in some of the attributes
+of this object then we can create a matcher that will check these attributes
+for us.</p>
+<p>You can see in this example how a &#8216;standard&#8217; call to <cite>assert_called_with</cite> isn&#8217;t
+sufficient:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Foo</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">a</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">b</span> <span class="o">=</span> <span class="n">a</span><span class="p">,</span> <span class="n">b</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="n">Foo</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="n">Foo</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">))</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">AssertionError: Expected</span>: <span class="n">call(&lt;__main__.Foo object at 0x...&gt;)</span>
+<span class="go">Actual call: call(&lt;__main__.Foo object at 0x...&gt;)</span>
+</pre></div>
+</div>
+<p>A comparison function for our <cite>Foo</cite> class might look something like this:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">compare</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">if</span> <span class="ow">not</span> <span class="nb">type</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span> <span class="o">==</span> <span class="nb">type</span><span class="p">(</span><span class="n">other</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="bp">False</span>
+<span class="gp">... </span> <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">a</span> <span class="o">!=</span> <span class="n">other</span><span class="o">.</span><span class="n">a</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="bp">False</span>
+<span class="gp">... </span> <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">b</span> <span class="o">!=</span> <span class="n">other</span><span class="o">.</span><span class="n">b</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="bp">False</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="bp">True</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+<p>And a matcher object that can use comparison functions like this for its
+equality operation would look something like this:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Matcher</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">compare</span><span class="p">,</span> <span class="n">some_obj</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">compare</span> <span class="o">=</span> <span class="n">compare</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">some_obj</span> <span class="o">=</span> <span class="n">some_obj</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__eq__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">compare</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">some_obj</span><span class="p">,</span> <span class="n">other</span><span class="p">)</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+<p>Putting all this together:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">match_foo</span> <span class="o">=</span> <span class="n">Matcher</span><span class="p">(</span><span class="n">compare</span><span class="p">,</span> <span class="n">Foo</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="n">match_foo</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>The <cite>Matcher</cite> is instantiated with our compare function and the <cite>Foo</cite> object
+we want to compare against. In <cite>assert_called_with</cite> the <cite>Matcher</cite> equality
+method will be called, which compares the object the mock was called with
+against the one we created our matcher with. If they match then
+<cite>assert_called_with</cite> passes, and if they don&#8217;t an <cite>AssertionError</cite> is raised:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">match_wrong</span> <span class="o">=</span> <span class="n">Matcher</span><span class="p">(</span><span class="n">compare</span><span class="p">,</span> <span class="n">Foo</span><span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="n">match_wrong</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">AssertionError: Expected</span>: <span class="n">((&lt;Matcher object at 0x...&gt;,), {})</span>
+<span class="go">Called with: ((&lt;Foo object at 0x...&gt;,), {})</span>
+</pre></div>
+</div>
+<p>With a bit of tweaking you could have the comparison function raise the
+<cite>AssertionError</cite> directly and provide a more useful failure message.</p>
+<p>As of version 1.5, the Python testing library <a class="reference external" href="http://pypi.python.org/pypi/PyHamcrest">PyHamcrest</a> provides similar functionality,
+that may be useful here, in the form of its equality matcher
+(<a class="reference external" href="http://packages.python.org/PyHamcrest/integration.html#hamcrest.library.integration.match_equality">hamcrest.library.integration.match_equality</a>).</p>
+</div>
+<div class="section" id="less-verbose-configuration-of-mock-objects">
+<h2>Less verbose configuration of mock objects<a class="headerlink" href="#less-verbose-configuration-of-mock-objects" title="Permalink to this headline">¶</a></h2>
+<p>This recipe, for easier configuration of mock objects, is now part of <cite>Mock</cite>.
+See the <a class="reference internal" href="mock.html#mock.Mock.configure_mock" title="mock.Mock.configure_mock"><tt class="xref py py-meth docutils literal"><span class="pre">configure_mock()</span></tt></a> method.</p>
+</div>
+<div class="section" id="matching-any-argument-in-assertions">
+<h2>Matching any argument in assertions<a class="headerlink" href="#matching-any-argument-in-assertions" title="Permalink to this headline">¶</a></h2>
+<p>This example is now built in to mock. See <a class="reference internal" href="helpers.html#mock.ANY" title="mock.ANY"><tt class="xref py py-data docutils literal"><span class="pre">ANY</span></tt></a>.</p>
+</div>
+<div class="section" id="mocking-properties">
+<h2>Mocking Properties<a class="headerlink" href="#mocking-properties" title="Permalink to this headline">¶</a></h2>
+<p>This example is now built in to mock. See <a class="reference internal" href="mock.html#mock.PropertyMock" title="mock.PropertyMock"><tt class="xref py py-class docutils literal"><span class="pre">PropertyMock</span></tt></a>.</p>
+</div>
+<div class="section" id="mocking-open">
+<h2>Mocking open<a class="headerlink" href="#mocking-open" title="Permalink to this headline">¶</a></h2>
+<p>This example is now built in to mock. See <a class="reference internal" href="helpers.html#mock.mock_open" title="mock.mock_open"><tt class="xref py py-func docutils literal"><span class="pre">mock_open()</span></tt></a>.</p>
+</div>
+<div class="section" id="mocks-without-some-attributes">
+<h2>Mocks without some attributes<a class="headerlink" href="#mocks-without-some-attributes" title="Permalink to this headline">¶</a></h2>
+<p>This example is now built in to mock. See <a class="reference internal" href="mock.html#deleting-attributes"><em>Deleting Attributes</em></a>.</p>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Further Examples</a><ul>
+<li><a class="reference internal" href="#mocking-chained-calls">Mocking chained calls</a></li>
+<li><a class="reference internal" href="#partial-mocking">Partial mocking</a></li>
+<li><a class="reference internal" href="#mocking-a-generator-method">Mocking a Generator Method</a></li>
+<li><a class="reference internal" href="#applying-the-same-patch-to-every-test-method">Applying the same patch to every test method</a></li>
+<li><a class="reference internal" href="#mocking-unbound-methods">Mocking Unbound Methods</a></li>
+<li><a class="reference internal" href="#checking-multiple-calls-with-mock">Checking multiple calls with mock</a></li>
+<li><a class="reference internal" href="#coping-with-mutable-arguments">Coping with mutable arguments</a></li>
+<li><a class="reference internal" href="#raising-exceptions-on-attribute-access">Raising exceptions on attribute access</a></li>
+<li><a class="reference internal" href="#multiple-calls-with-different-effects">Multiple calls with different effects</a></li>
+<li><a class="reference internal" href="#nesting-patches">Nesting Patches</a></li>
+<li><a class="reference internal" href="#mocking-a-dictionary-with-magicmock">Mocking a dictionary with MagicMock</a></li>
+<li><a class="reference internal" href="#mock-subclasses-and-their-attributes">Mock subclasses and their attributes</a></li>
+<li><a class="reference internal" href="#mocking-imports-with-patch-dict">Mocking imports with patch.dict</a></li>
+<li><a class="reference internal" href="#tracking-order-of-calls-and-less-verbose-call-assertions">Tracking order of calls and less verbose call assertions</a></li>
+<li><a class="reference internal" href="#more-complex-argument-matching">More complex argument matching</a></li>
+<li><a class="reference internal" href="#less-verbose-configuration-of-mock-objects">Less verbose configuration of mock objects</a></li>
+<li><a class="reference internal" href="#matching-any-argument-in-assertions">Matching any argument in assertions</a></li>
+<li><a class="reference internal" href="#mocking-properties">Mocking Properties</a></li>
+<li><a class="reference internal" href="#mocking-open">Mocking open</a></li>
+<li><a class="reference internal" href="#mocks-without-some-attributes">Mocks without some attributes</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="getting-started.html"
+ title="previous chapter">Getting Started with Mock</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="compare.html"
+ title="next chapter">Mock Library Comparison</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/examples.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="compare.html" title="Mock Library Comparison"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="getting-started.html" title="Getting Started with Mock"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/genindex.html b/third_party/python/mock-1.0.0/html/genindex.html
new file mode 100644
index 0000000000..2d1acf0b40
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/genindex.html
@@ -0,0 +1,479 @@
+
+
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Index &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="#" title="General Index"
+ accesskey="I">index</a></li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+
+<h1 id="index">Index</h1>
+
+<div class="genindex-jumpbox">
+ <a href="#_"><strong>_</strong></a>
+ | <a href="#A"><strong>A</strong></a>
+ | <a href="#C"><strong>C</strong></a>
+ | <a href="#D"><strong>D</strong></a>
+ | <a href="#E"><strong>E</strong></a>
+ | <a href="#F"><strong>F</strong></a>
+ | <a href="#G"><strong>G</strong></a>
+ | <a href="#H"><strong>H</strong></a>
+ | <a href="#I"><strong>I</strong></a>
+ | <a href="#M"><strong>M</strong></a>
+ | <a href="#N"><strong>N</strong></a>
+ | <a href="#O"><strong>O</strong></a>
+ | <a href="#P"><strong>P</strong></a>
+ | <a href="#R"><strong>R</strong></a>
+ | <a href="#S"><strong>S</strong></a>
+ | <a href="#T"><strong>T</strong></a>
+ | <a href="#U"><strong>U</strong></a>
+ | <a href="#W"><strong>W</strong></a>
+
+</div>
+<h2 id="_">_</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#index-5">__call__</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.__class__">__class__ (Mock attribute)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#mock.Mock.__dir__">__dir__() (Mock method)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock._get_child_mock">_get_child_mock() (Mock method)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="A">A</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="helpers.html#mock.ANY">ANY (in module mock)</a>
+ </dt>
+
+
+ <dt><a href="index.html#index-8">articles</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.assert_any_call">assert_any_call() (Mock method)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.assert_called_once_with">assert_called_once_with() (Mock method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#mock.Mock.assert_called_with">assert_called_with() (Mock method)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.assert_has_calls">assert_has_calls() (Mock method)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.attach_mock">attach_mock() (Mock method)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="C">C</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="helpers.html#mock.call">call() (in module mock)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.call_args">call_args (Mock attribute)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.call_args_list">call_args_list (Mock attribute)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.call_count">call_count (Mock attribute)</a>
+ </dt>
+
+
+ <dt><a href="helpers.html#mock.call.call_list">call_list() (call method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#mock.Mock.called">called (Mock attribute)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#index-6">calling</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.configure_mock">configure_mock() (Mock method)</a>
+ </dt>
+
+
+ <dt><a href="helpers.html#mock.create_autospec">create_autospec() (in module mock)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="D">D</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="sentinel.html#mock.DEFAULT">DEFAULT (in module mock)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="E">E</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="index.html#index-5">easy_install</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="F">F</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="helpers.html#mock.FILTER_DIR">FILTER_DIR (in module mock)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="G">G</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="getting-started.html#index-0">Getting Started</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="H">H</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="index.html#index-3">hg</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="I">I</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="index.html#index-1">installing</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="index.html#index-0">introduction</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="M">M</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="magicmock.html#mock.MagicMock">MagicMock (class in mock)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.method_calls">method_calls (Mock attribute)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock">Mock (class in mock)</a>
+ </dt>
+
+
+ <dt><a href="index.html#module-mock">mock (module)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#mock.Mock.mock_add_spec">mock_add_spec() (Mock method)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.mock_calls">mock_calls (Mock attribute)</a>
+ </dt>
+
+
+ <dt><a href="helpers.html#mock.mock_open">mock_open() (in module mock)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="N">N</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#index-3">name</a>
+ </dt>
+
+
+ <dt><a href="magicmock.html#mock.NonCallableMagicMock">NonCallableMagicMock (class in mock)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#mock.NonCallableMock">NonCallableMock (class in mock)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="O">O</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="index.html#index-12">older versions</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="P">P</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="patch.html#mock.patch">patch() (in module mock)</a>
+ </dt>
+
+
+ <dt><a href="patch.html#mock.patch.dict">patch.dict() (in module mock)</a>
+ </dt>
+
+
+ <dt><a href="patch.html#mock.patch.multiple">patch.multiple() (in module mock)</a>
+ </dt>
+
+
+ <dt><a href="patch.html#mock.patch.object">patch.object() (in module mock)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="patch.html#mock.patch.stopall">patch.stopall() (in module mock)</a>
+ </dt>
+
+
+ <dt><a href="index.html#index-4">pip</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.PropertyMock">PropertyMock (class in mock)</a>
+ </dt>
+
+
+ <dt><a href="index.html#index-11">Python 3</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="R">R</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="index.html#index-7">references</a>
+ </dt>
+
+
+ <dt><a href="index.html#index-2">repository</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#mock.Mock.reset_mock">reset_mock() (Mock method)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#index-1">return_value</a>
+ </dt>
+
+ <dd><dl>
+
+ <dt><a href="mock.html#mock.Mock.return_value">(Mock attribute)</a>
+ </dt>
+
+ </dl></dd>
+ </dl></td>
+</tr></table>
+
+<h2 id="S">S</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="sentinel.html#mock.sentinel">sentinel (in module mock)</a>
+ </dt>
+
+
+ <dt><a href="index.html#index-6">setuptools</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#index-0">side_effect</a>
+ </dt>
+
+ <dd><dl>
+
+ <dt><a href="mock.html#mock.Mock.side_effect">(Mock attribute)</a>
+ </dt>
+
+ </dl></dd>
+
+ <dt><a href="mock.html#index-4">spec</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="T">T</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="index.html#index-9">tests</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="U">U</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="index.html#index-10">unittest2</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="W">W</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#index-2">wraps</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+
+
+
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="#" title="General Index"
+ >index</a></li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/getting-started.html b/third_party/python/mock-1.0.0/html/getting-started.html
new file mode 100644
index 0000000000..73d708ac9e
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/getting-started.html
@@ -0,0 +1,510 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Getting Started with Mock &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <link rel="next" title="Further Examples" href="examples.html" />
+ <link rel="prev" title="Mocking Magic Methods" href="magicmock.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="examples.html" title="Further Examples"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="magicmock.html" title="Mocking Magic Methods"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="getting-started-with-mock">
+<h1>Getting Started with Mock<a class="headerlink" href="#getting-started-with-mock" title="Permalink to this headline">¶</a></h1>
+<span class="target" id="getting-started"></span><span class="target" id="index-0"></span><div class="section" id="using-mock">
+<h2>Using Mock<a class="headerlink" href="#using-mock" title="Permalink to this headline">¶</a></h2>
+<div class="section" id="mock-patching-methods">
+<h3>Mock Patching Methods<a class="headerlink" href="#mock-patching-methods" title="Permalink to this headline">¶</a></h3>
+<p>Common uses for <tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt> objects include:</p>
+<ul class="simple">
+<li>Patching methods</li>
+<li>Recording method calls on objects</li>
+</ul>
+<p>You might want to replace a method on an object to check that
+it is called with the correct arguments by another part of the system:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">real</span> <span class="o">=</span> <span class="n">SomeClass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span><span class="o">.</span><span class="n">method</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="s">&#39;method&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="n">key</span><span class="o">=</span><span class="s">&#39;value&#39;</span><span class="p">)</span>
+<span class="go">&lt;MagicMock name=&#39;method()&#39; id=&#39;...&#39;&gt;</span>
+</pre></div>
+</div>
+<p>Once our mock has been used (<cite>real.method</cite> in this example) it has methods
+and attributes that allow you to make assertions about how it has been used.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">In most of these examples the <tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt> and <tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt> classes
+are interchangeable. As the <cite>MagicMock</cite> is the more capable class it makes
+a sensible one to use by default.</p>
+</div>
+<p>Once the mock has been called its <tt class="xref py py-attr docutils literal"><span class="pre">called</span></tt> attribute is set to
+<cite>True</cite>. More importantly we can use the <tt class="xref py py-meth docutils literal"><span class="pre">assert_called_with()</span></tt> or
+<tt class="xref py py-meth docutils literal"><span class="pre">assert_called_once_with()</span></tt> method to check that it was called with
+the correct arguments.</p>
+<p>This example tests that calling <cite>ProductionClass().method</cite> results in a call to
+the <cite>something</cite> method:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mock</span> <span class="kn">import</span> <span class="n">MagicMock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">ProductionClass</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">method</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">something</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">something</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">,</span> <span class="n">c</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span> <span class="o">=</span> <span class="n">ProductionClass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span><span class="o">.</span><span class="n">something</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span><span class="o">.</span><span class="n">something</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mock-for-method-calls-on-an-object">
+<h3>Mock for Method Calls on an Object<a class="headerlink" href="#mock-for-method-calls-on-an-object" title="Permalink to this headline">¶</a></h3>
+<p>In the last example we patched a method directly on an object to check that it
+was called correctly. Another common use case is to pass an object into a
+method (or some part of the system under test) and then check that it is used
+in the correct way.</p>
+<p>The simple <cite>ProductionClass</cite> below has a <cite>closer</cite> method. If it is called with
+an object then it calls <cite>close</cite> on it.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">ProductionClass</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">closer</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">something</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">something</span><span class="o">.</span><span class="n">close</span><span class="p">()</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+<p>So to test it we need to pass in an object with a <cite>close</cite> method and check
+that it was called correctly.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">real</span> <span class="o">=</span> <span class="n">ProductionClass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span><span class="o">.</span><span class="n">closer</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">close</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>We don&#8217;t have to do any work to provide the &#8216;close&#8217; method on our mock.
+Accessing close creates it. So, if &#8216;close&#8217; hasn&#8217;t already been called then
+accessing it in the test will create it, but <tt class="xref py py-meth docutils literal"><span class="pre">assert_called_with()</span></tt>
+will raise a failure exception.</p>
+</div>
+<div class="section" id="mocking-classes">
+<h3>Mocking Classes<a class="headerlink" href="#mocking-classes" title="Permalink to this headline">¶</a></h3>
+<p>A common use case is to mock out classes instantiated by your code under test.
+When you patch a class, then that class is replaced with a mock. Instances
+are created by <em>calling the class</em>. This means you access the &#8220;mock instance&#8221;
+by looking at the return value of the mocked class.</p>
+<p>In the example below we have a function <cite>some_function</cite> that instantiates <cite>Foo</cite>
+and calls a method on it. The call to <cite>patch</cite> replaces the class <cite>Foo</cite> with a
+mock. The <cite>Foo</cite> instance is the result of calling the mock, so it is configured
+by modifying the mock <tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">some_function</span><span class="p">():</span>
+<span class="gp">... </span> <span class="n">instance</span> <span class="o">=</span> <span class="n">module</span><span class="o">.</span><span class="n">Foo</span><span class="p">()</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;module.Foo&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">instance</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">return_value</span>
+<span class="gp">... </span> <span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;the result&#39;</span>
+<span class="gp">... </span> <span class="n">result</span> <span class="o">=</span> <span class="n">some_function</span><span class="p">()</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">result</span> <span class="o">==</span> <span class="s">&#39;the result&#39;</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="naming-your-mocks">
+<h3>Naming your mocks<a class="headerlink" href="#naming-your-mocks" title="Permalink to this headline">¶</a></h3>
+<p>It can be useful to give your mocks a name. The name is shown in the repr of
+the mock and can be helpful when the mock appears in test failure messages. The
+name is also propagated to attributes or methods of the mock:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="s">&#39;foo&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span>
+<span class="go">&lt;MagicMock name=&#39;foo&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span>
+<span class="go">&lt;MagicMock name=&#39;foo.method&#39; id=&#39;...&#39;&gt;</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="tracking-all-calls">
+<h3>Tracking all Calls<a class="headerlink" href="#tracking-all-calls" title="Permalink to this headline">¶</a></h3>
+<p>Often you want to track more than a single call to a method. The
+<tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt> attribute records all calls
+to child attributes of the mock - and also to their children.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="go">&lt;MagicMock name=&#39;mock.method()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">attribute</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">10</span><span class="p">,</span> <span class="n">x</span><span class="o">=</span><span class="mi">53</span><span class="p">)</span>
+<span class="go">&lt;MagicMock name=&#39;mock.attribute.method()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call.method(), call.attribute.method(10, x=53)]</span>
+</pre></div>
+</div>
+<p>If you make an assertion about <cite>mock_calls</cite> and any unexpected methods
+have been called, then the assertion will fail. This is useful because as well
+as asserting that the calls you expected have been made, you are also checking
+that they were made in the right order and with no additional calls:</p>
+<p>You use the <tt class="xref py py-data docutils literal"><span class="pre">call</span></tt> object to construct lists for comparing with
+<cite>mock_calls</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">expected</span> <span class="o">=</span> <span class="p">[</span><span class="n">call</span><span class="o">.</span><span class="n">method</span><span class="p">(),</span> <span class="n">call</span><span class="o">.</span><span class="n">attribute</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">10</span><span class="p">,</span> <span class="n">x</span><span class="o">=</span><span class="mi">53</span><span class="p">)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">mock_calls</span> <span class="o">==</span> <span class="n">expected</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="setting-return-values-and-attributes">
+<h3>Setting Return Values and Attributes<a class="headerlink" href="#setting-return-values-and-attributes" title="Permalink to this headline">¶</a></h3>
+<p>Setting the return values on a mock object is trivially easy:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="mi">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+<p>Of course you can do the same for methods on the mock:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="mi">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+<p>The return value can also be set in the constructor:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+<p>If you need an attribute setting on your mock, just do it:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">x</span> <span class="o">=</span> <span class="mi">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">x</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+<p>Sometimes you want to mock up a more complex situation, like for example
+<cite>mock.connection.cursor().execute(&#8220;SELECT 1&#8221;)</cite>. If we wanted this call to
+return a list, then we have to configure the result of the nested call.</p>
+<p>We can use <tt class="xref py py-data docutils literal"><span class="pre">call</span></tt> to construct the set of calls in a &#8220;chained call&#8221; like
+this for easy assertion afterwards:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">cursor</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">connection</span><span class="o">.</span><span class="n">cursor</span><span class="o">.</span><span class="n">return_value</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">cursor</span><span class="o">.</span><span class="n">execute</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="p">[</span><span class="s">&#39;foo&#39;</span><span class="p">]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">connection</span><span class="o">.</span><span class="n">cursor</span><span class="p">()</span><span class="o">.</span><span class="n">execute</span><span class="p">(</span><span class="s">&quot;SELECT 1&quot;</span><span class="p">)</span>
+<span class="go">[&#39;foo&#39;]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">expected</span> <span class="o">=</span> <span class="n">call</span><span class="o">.</span><span class="n">connection</span><span class="o">.</span><span class="n">cursor</span><span class="p">()</span><span class="o">.</span><span class="n">execute</span><span class="p">(</span><span class="s">&quot;SELECT 1&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">call_list</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call.connection.cursor(), call.connection.cursor().execute(&#39;SELECT 1&#39;)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">mock_calls</span> <span class="o">==</span> <span class="n">expected</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>It is the call to <cite>.call_list()</cite> that turns our call object into a list of
+calls representing the chained calls.</p>
+</div>
+<div class="section" id="raising-exceptions-with-mocks">
+<h3>Raising exceptions with mocks<a class="headerlink" href="#raising-exceptions-with-mocks" title="Permalink to this headline">¶</a></h3>
+<p>A useful attribute is <tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt>. If you set this to an
+exception class or instance then the exception will be raised when the mock
+is called.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="ne">Exception</span><span class="p">(</span><span class="s">&#39;Boom!&#39;</span><span class="p">))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">Exception</span>: <span class="n">Boom!</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="side-effect-functions-and-iterables">
+<h3>Side effect functions and iterables<a class="headerlink" href="#side-effect-functions-and-iterables" title="Permalink to this headline">¶</a></h3>
+<p><cite>side_effect</cite> can also be set to a function or an iterable. The use case for
+<cite>side_effect</cite> as an iterable is where your mock is going to be called several
+times, and you want each call to return a different value. When you set
+<cite>side_effect</cite> to an iterable every call to the mock returns the next value
+from the iterable:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="p">[</span><span class="mi">4</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="mi">6</span><span class="p">])</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="go">4</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="go">5</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="go">6</span>
+</pre></div>
+</div>
+<p>For more advanced use cases, like dynamically varying the return values
+depending on what the mock is called with, <cite>side_effect</cite> can be a function.
+The function will be called with the same arguments as the mock. Whatever the
+function returns is what the call returns:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">vals</span> <span class="o">=</span> <span class="p">{(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">):</span> <span class="mi">1</span><span class="p">,</span> <span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">):</span> <span class="mi">2</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">vals</span><span class="p">[</span><span class="n">args</span><span class="p">]</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">side_effect</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
+<span class="go">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="go">2</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="creating-a-mock-from-an-existing-object">
+<h3>Creating a Mock from an Existing Object<a class="headerlink" href="#creating-a-mock-from-an-existing-object" title="Permalink to this headline">¶</a></h3>
+<p>One problem with over use of mocking is that it couples your tests to the
+implementation of your mocks rather than your real code. Suppose you have a
+class that implements <cite>some_method</cite>. In a test for another class, you
+provide a mock of this object that <em>also</em> provides <cite>some_method</cite>. If later
+you refactor the first class, so that it no longer has <cite>some_method</cite> - then
+your tests will continue to pass even though your code is now broken!</p>
+<p><cite>Mock</cite> allows you to provide an object as a specification for the mock,
+using the <cite>spec</cite> keyword argument. Accessing methods / attributes on the
+mock that don&#8217;t exist on your specification object will immediately raise an
+attribute error. If you change the implementation of your specification, then
+tests that use that class will start failing immediately without you having to
+instantiate the class in those tests.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">spec</span><span class="o">=</span><span class="n">SomeClass</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">old_method</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">AttributeError</span>: <span class="n">object has no attribute &#39;old_method&#39;</span>
+</pre></div>
+</div>
+<p>If you want a stronger form of specification that prevents the setting
+of arbitrary attributes as well as the getting of them then you can use
+<cite>spec_set</cite> instead of <cite>spec</cite>.</p>
+</div>
+</div>
+<div class="section" id="patch-decorators">
+<h2>Patch Decorators<a class="headerlink" href="#patch-decorators" title="Permalink to this headline">¶</a></h2>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">With <cite>patch</cite> it matters that you patch objects in the namespace where they
+are looked up. This is normally straightforward, but for a quick guide
+read <a class="reference internal" href="patch.html#where-to-patch"><em>where to patch</em></a>.</p>
+</div>
+<p>A common need in tests is to patch a class attribute or a module attribute,
+for example patching a builtin or patching a class in a module to test that it
+is instantiated. Modules and classes are effectively global, so patching on
+them has to be undone after the test or the patch will persist into other
+tests and cause hard to diagnose problems.</p>
+<p>mock provides three convenient decorators for this: <cite>patch</cite>, <cite>patch.object</cite> and
+<cite>patch.dict</cite>. <cite>patch</cite> takes a single string, of the form
+<cite>package.module.Class.attribute</cite> to specify the attribute you are patching. It
+also optionally takes a value that you want the attribute (or class or
+whatever) to be replaced with. &#8216;patch.object&#8217; takes an object and the name of
+the attribute you would like patched, plus optionally the value to patch it
+with.</p>
+<p><cite>patch.object</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">original</span> <span class="o">=</span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">attribute</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch.object</span><span class="p">(</span><span class="n">SomeClass</span><span class="p">,</span> <span class="s">&#39;attribute&#39;</span><span class="p">,</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">attribute</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">():</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">attribute</span> <span class="o">==</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">attribute</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">attribute</span> <span class="o">==</span> <span class="n">original</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;package.module.attribute&#39;</span><span class="p">,</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">attribute</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">():</span>
+<span class="gp">... </span> <span class="kn">from</span> <span class="nn">package.module</span> <span class="kn">import</span> <span class="n">attribute</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">attribute</span> <span class="ow">is</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">attribute</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>If you are patching a module (including <cite>__builtin__</cite>) then use <cite>patch</cite>
+instead of <cite>patch.object</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">return_value</span> <span class="o">=</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">file_handle</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__builtin__.open&#39;</span><span class="p">,</span> <span class="n">mock</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">handle</span> <span class="o">=</span> <span class="nb">open</span><span class="p">(</span><span class="s">&#39;filename&#39;</span><span class="p">,</span> <span class="s">&#39;r&#39;</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="s">&#39;filename&#39;</span><span class="p">,</span> <span class="s">&#39;r&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">handle</span> <span class="o">==</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">file_handle</span><span class="p">,</span> <span class="s">&quot;incorrect file handle returned&quot;</span>
+</pre></div>
+</div>
+<p>The module name can be &#8216;dotted&#8217;, in the form <cite>package.module</cite> if needed:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;package.module.ClassName.attribute&#39;</span><span class="p">,</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">attribute</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">():</span>
+<span class="gp">... </span> <span class="kn">from</span> <span class="nn">package.module</span> <span class="kn">import</span> <span class="n">ClassName</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">ClassName</span><span class="o">.</span><span class="n">attribute</span> <span class="o">==</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">attribute</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>A nice pattern is to actually decorate test methods themselves:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">unittest2</span><span class="o">.</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">... </span> <span class="nd">@patch.object</span><span class="p">(</span><span class="n">SomeClass</span><span class="p">,</span> <span class="s">&#39;attribute&#39;</span><span class="p">,</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">attribute</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_something</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">assertEqual</span><span class="p">(</span><span class="n">SomeClass</span><span class="o">.</span><span class="n">attribute</span><span class="p">,</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">attribute</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">original</span> <span class="o">=</span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">attribute</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_something&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">test_something</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">attribute</span> <span class="o">==</span> <span class="n">original</span>
+</pre></div>
+</div>
+<p>If you want to patch with a Mock, you can use <cite>patch</cite> with only one argument
+(or <cite>patch.object</cite> with two arguments). The mock will be created for you and
+passed into the test function / method:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">unittest2</span><span class="o">.</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">... </span> <span class="nd">@patch.object</span><span class="p">(</span><span class="n">SomeClass</span><span class="p">,</span> <span class="s">&#39;static_method&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_something</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">mock_method</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">static_method</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">mock_method</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_something&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">test_something</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>You can stack up multiple patch decorators using this pattern:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">unittest2</span><span class="o">.</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">... </span> <span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;package.module.ClassName1&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;package.module.ClassName2&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_something</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">MockClass2</span><span class="p">,</span> <span class="n">MockClass1</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">assertTrue</span><span class="p">(</span><span class="n">package</span><span class="o">.</span><span class="n">module</span><span class="o">.</span><span class="n">ClassName1</span> <span class="ow">is</span> <span class="n">MockClass1</span><span class="p">)</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">assertTrue</span><span class="p">(</span><span class="n">package</span><span class="o">.</span><span class="n">module</span><span class="o">.</span><span class="n">ClassName2</span> <span class="ow">is</span> <span class="n">MockClass2</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_something&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">test_something</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>When you nest patch decorators the mocks are passed in to the decorated
+function in the same order they applied (the normal <em>python</em> order that
+decorators are applied). This means from the bottom up, so in the example
+above the mock for <cite>test_module.ClassName2</cite> is passed in first.</p>
+<p>There is also <tt class="xref py py-func docutils literal"><span class="pre">patch.dict()</span></tt> for setting values in a dictionary just
+during a scope and restoring the dictionary to its original state when the test
+ends:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">foo</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;key&#39;</span><span class="p">:</span> <span class="s">&#39;value&#39;</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">original</span> <span class="o">=</span> <span class="n">foo</span><span class="o">.</span><span class="n">copy</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="n">foo</span><span class="p">,</span> <span class="p">{</span><span class="s">&#39;newkey&#39;</span><span class="p">:</span> <span class="s">&#39;newvalue&#39;</span><span class="p">},</span> <span class="n">clear</span><span class="o">=</span><span class="bp">True</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">foo</span> <span class="o">==</span> <span class="p">{</span><span class="s">&#39;newkey&#39;</span><span class="p">:</span> <span class="s">&#39;newvalue&#39;</span><span class="p">}</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">foo</span> <span class="o">==</span> <span class="n">original</span>
+</pre></div>
+</div>
+<p><cite>patch</cite>, <cite>patch.object</cite> and <cite>patch.dict</cite> can all be used as context managers.</p>
+<p>Where you use <cite>patch</cite> to create a mock for you, you can get a reference to the
+mock using the &#8220;as&#8221; form of the with statement:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">ProductionClass</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">method</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">object</span><span class="p">(</span><span class="n">ProductionClass</span><span class="p">,</span> <span class="s">&#39;method&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_method</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">mock_method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="bp">None</span>
+<span class="gp">... </span> <span class="n">real</span> <span class="o">=</span> <span class="n">ProductionClass</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">real</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_method</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>As an alternative <cite>patch</cite>, <cite>patch.object</cite> and <cite>patch.dict</cite> can be used as
+class decorators. When used in this way it is the same as applying the
+decorator indvidually to every method whose name starts with &#8220;test&#8221;.</p>
+<p>For some more advanced examples, see the <a class="reference internal" href="examples.html#further-examples"><em>Further Examples</em></a> page.</p>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Getting Started with Mock</a><ul>
+<li><a class="reference internal" href="#using-mock">Using Mock</a><ul>
+<li><a class="reference internal" href="#mock-patching-methods">Mock Patching Methods</a></li>
+<li><a class="reference internal" href="#mock-for-method-calls-on-an-object">Mock for Method Calls on an Object</a></li>
+<li><a class="reference internal" href="#mocking-classes">Mocking Classes</a></li>
+<li><a class="reference internal" href="#naming-your-mocks">Naming your mocks</a></li>
+<li><a class="reference internal" href="#tracking-all-calls">Tracking all Calls</a></li>
+<li><a class="reference internal" href="#setting-return-values-and-attributes">Setting Return Values and Attributes</a></li>
+<li><a class="reference internal" href="#raising-exceptions-with-mocks">Raising exceptions with mocks</a></li>
+<li><a class="reference internal" href="#side-effect-functions-and-iterables">Side effect functions and iterables</a></li>
+<li><a class="reference internal" href="#creating-a-mock-from-an-existing-object">Creating a Mock from an Existing Object</a></li>
+</ul>
+</li>
+<li><a class="reference internal" href="#patch-decorators">Patch Decorators</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="magicmock.html"
+ title="previous chapter">Mocking Magic Methods</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="examples.html"
+ title="next chapter">Further Examples</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/getting-started.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="examples.html" title="Further Examples"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="magicmock.html" title="Mocking Magic Methods"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/index.html b/third_party/python/mock-1.0.0/html/index.html
new file mode 100644
index 0000000000..2cdf89d5c4
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/index.html
@@ -0,0 +1,529 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Mock - Mocking and Testing Library &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="#" />
+ <link rel="next" title="The Mock Class" href="mock.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="mock.html" title="The Mock Class"
+ accesskey="N">next</a> |</li>
+ <li><a href="#">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="mock-mocking-and-testing-library">
+<h1>Mock - Mocking and Testing Library<a class="headerlink" href="#mock-mocking-and-testing-library" title="Permalink to this headline">¶</a></h1>
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name" />
+<col class="field-body" />
+<tbody valign="top">
+<tr class="field-odd field"><th class="field-name">Author:</th><td class="field-body"><a class="reference external" href="http://www.voidspace.org.uk/python/weblog/index.shtml">Michael Foord</a></td>
+</tr>
+<tr class="field-even field"><th class="field-name">Version:</th><td class="field-body">1.0.0</td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Date:</th><td class="field-body">2012/10/07</td>
+</tr>
+<tr class="field-even field"><th class="field-name">Homepage:</th><td class="field-body"><a class="reference external" href="http://www.voidspace.org.uk/python/mock/">Mock Homepage</a></td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Download:</th><td class="field-body"><a class="reference external" href="http://pypi.python.org/pypi/mock">Mock on PyPI</a></td>
+</tr>
+<tr class="field-even field"><th class="field-name">Documentation:</th><td class="field-body"><a class="reference external" href="http://www.voidspace.org.uk/downloads/mock-1.0.0.pdf">PDF Documentation</a></td>
+</tr>
+<tr class="field-odd field"><th class="field-name">License:</th><td class="field-body"><a class="reference external" href="http://www.voidspace.org.uk/python/license.shtml">BSD License</a></td>
+</tr>
+<tr class="field-even field"><th class="field-name">Support:</th><td class="field-body"><a class="reference external" href="http://lists.idyll.org/listinfo/testing-in-python">Mailing list (testing-in-python&#64;lists.idyll.org)</a></td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Issue tracker:</th><td class="field-body"><a class="reference external" href="http://code.google.com/p/mock/issues/list">Google code project</a></td>
+</tr>
+</tbody>
+</table>
+<span class="target" id="module-mock"></span><p id="index-0">mock is a library for testing in Python. It allows you to replace parts of
+your system under test with mock objects and make assertions about how they
+have been used.</p>
+<p>mock is now part of the Python standard library, available as <a class="reference external" href="http://docs.python.org/py3k/library/unittest.mock.html#module-unittest.mock">unittest.mock</a>
+in Python 3.3 onwards.</p>
+<p>mock provides a core <a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a> class removing the need to create a host
+of stubs throughout your test suite. After performing an action, you can make
+assertions about which methods / attributes were used and arguments they were
+called with. You can also specify return values and set needed attributes in
+the normal way.</p>
+<p>Additionally, mock provides a <a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a> decorator that handles patching
+module and class level attributes within the scope of a test, along with
+<a class="reference internal" href="sentinel.html#mock.sentinel" title="mock.sentinel"><tt class="xref py py-const docutils literal"><span class="pre">sentinel</span></tt></a> for creating unique objects. See the <a class="reference internal" href="#quick-guide">quick guide</a> for
+some examples of how to use <a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a>, <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> and
+<a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a>.</p>
+<p>Mock is very easy to use and is designed for use with
+<a class="reference external" href="http://pypi.python.org/pypi/unittest2">unittest</a>. Mock is based on
+the &#8216;action -&gt; assertion&#8217; pattern instead of <cite>&#8216;record -&gt; replay&#8217;</cite> used by many
+mocking frameworks.</p>
+<p>mock is tested on Python versions 2.4-2.7, Python 3 plus the latest versions of
+Jython and PyPy.</p>
+<div class="section" id="api-documentation">
+<h2>API Documentation<a class="headerlink" href="#api-documentation" title="Permalink to this headline">¶</a></h2>
+<div class="toctree-wrapper compound">
+<ul>
+<li class="toctree-l1"><a class="reference internal" href="mock.html">The Mock Class</a></li>
+<li class="toctree-l1"><a class="reference internal" href="mock.html#calling">Calling</a></li>
+<li class="toctree-l1"><a class="reference internal" href="mock.html#deleting-attributes">Deleting Attributes</a></li>
+<li class="toctree-l1"><a class="reference internal" href="mock.html#attaching-mocks-as-attributes">Attaching Mocks as Attributes</a></li>
+<li class="toctree-l1"><a class="reference internal" href="patch.html">Patch Decorators</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#patch">patch</a></li>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#patch-object">patch.object</a></li>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#patch-dict">patch.dict</a></li>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#patch-multiple">patch.multiple</a></li>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#patch-methods-start-and-stop">patch methods: start and stop</a></li>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#test-prefix">TEST_PREFIX</a></li>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#nesting-patch-decorators">Nesting Patch Decorators</a></li>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#where-to-patch">Where to patch</a></li>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#patching-descriptors-and-proxy-objects">Patching Descriptors and Proxy Objects</a></li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="helpers.html">Helpers</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="helpers.html#call">call</a></li>
+<li class="toctree-l2"><a class="reference internal" href="helpers.html#create-autospec">create_autospec</a></li>
+<li class="toctree-l2"><a class="reference internal" href="helpers.html#any">ANY</a></li>
+<li class="toctree-l2"><a class="reference internal" href="helpers.html#filter-dir">FILTER_DIR</a></li>
+<li class="toctree-l2"><a class="reference internal" href="helpers.html#mock-open">mock_open</a></li>
+<li class="toctree-l2"><a class="reference internal" href="helpers.html#autospeccing">Autospeccing</a></li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="sentinel.html">Sentinel</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="sentinel.html#sentinel-example">Sentinel Example</a></li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="magicmock.html">Mocking Magic Methods</a></li>
+<li class="toctree-l1"><a class="reference internal" href="magicmock.html#magic-mock">Magic Mock</a></li>
+</ul>
+</div>
+</div>
+<div class="section" id="user-guide">
+<h2>User Guide<a class="headerlink" href="#user-guide" title="Permalink to this headline">¶</a></h2>
+<div class="toctree-wrapper compound">
+<ul>
+<li class="toctree-l1"><a class="reference internal" href="getting-started.html">Getting Started with Mock</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="getting-started.html#using-mock">Using Mock</a></li>
+<li class="toctree-l2"><a class="reference internal" href="getting-started.html#patch-decorators">Patch Decorators</a></li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="examples.html">Further Examples</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mocking-chained-calls">Mocking chained calls</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#partial-mocking">Partial mocking</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mocking-a-generator-method">Mocking a Generator Method</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#applying-the-same-patch-to-every-test-method">Applying the same patch to every test method</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mocking-unbound-methods">Mocking Unbound Methods</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#checking-multiple-calls-with-mock">Checking multiple calls with mock</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#coping-with-mutable-arguments">Coping with mutable arguments</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#raising-exceptions-on-attribute-access">Raising exceptions on attribute access</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#multiple-calls-with-different-effects">Multiple calls with different effects</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#nesting-patches">Nesting Patches</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mocking-a-dictionary-with-magicmock">Mocking a dictionary with MagicMock</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mock-subclasses-and-their-attributes">Mock subclasses and their attributes</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mocking-imports-with-patch-dict">Mocking imports with patch.dict</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#tracking-order-of-calls-and-less-verbose-call-assertions">Tracking order of calls and less verbose call assertions</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#more-complex-argument-matching">More complex argument matching</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#less-verbose-configuration-of-mock-objects">Less verbose configuration of mock objects</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#matching-any-argument-in-assertions">Matching any argument in assertions</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mocking-properties">Mocking Properties</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mocking-open">Mocking open</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mocks-without-some-attributes">Mocks without some attributes</a></li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="compare.html">Mock Library Comparison</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#simple-fake-object">Simple fake object</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#simple-mock">Simple mock</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#creating-partial-mocks">Creating partial mocks</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#ensure-calls-are-made-in-specific-order">Ensure calls are made in specific order</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#raising-exceptions">Raising exceptions</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#override-new-instances-of-a-class">Override new instances of a class</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#call-the-same-method-multiple-times">Call the same method multiple times</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#mock-chained-methods">Mock chained methods</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#mocking-a-context-manager">Mocking a context manager</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#mocking-the-builtin-open-used-as-a-context-manager">Mocking the builtin open used as a context manager</a></li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="changelog.html">CHANGELOG</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-1-0-0">2012/10/07 Version 1.0.0</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-1-0-0-beta-1">2012/07/13 Version 1.0.0 beta 1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-1-0-0-alpha-2">2012/05/04 Version 1.0.0 alpha 2</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-1-0-0-alpha-1">2012/03/25 Version 1.0.0 alpha 1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0">2012/02/13 Version 0.8.0</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0-release-candidate-2">2012/01/10 Version 0.8.0 release candidate 2</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0-release-candidate-1">2011/12/29 Version 0.8.0 release candidate 1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0-beta-4">2011/10/09 Version 0.8.0 beta 4</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0-beta-3">2011/08/15 Version 0.8.0 beta 3</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0-beta-2">2011/08/05 Version 0.8.0 beta 2</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0-beta-1">2011/07/25 Version 0.8.0 beta 1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0-alpha-2">2011/07/16 Version 0.8.0 alpha 2</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0-alpha-1">2011/06/14 Version 0.8.0 alpha 1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-7-2">2011/05/30 Version 0.7.2</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-7-1">2011/05/06 Version 0.7.1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-7-0">2011/03/05 Version 0.7.0</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-7-0-rc-1">2011/02/16 Version 0.7.0 RC 1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-7-0-beta-4">2010/11/12 Version 0.7.0 beta 4</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-7-0-beta-3">2010/09/18 Version 0.7.0 beta 3</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-7-0-beta-2">2010/06/23 Version 0.7.0 beta 2</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-7-0-beta-1">2010/06/22 Version 0.7.0 beta 1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-6-0">2009/08/22 Version 0.6.0</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-5-0">2009/04/17 Version 0.5.0</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-4-0">2008/10/12 Version 0.4.0</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-3-1">2007/12/03 Version 0.3.1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-3-0">2007/11/30 Version 0.3.0</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-2-1">2007/11/21 Version 0.2.1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-2-0">2007/11/20 Version 0.2.0</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-1-0">2007/11/19 Version 0.1.0</a></li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="changelog.html#todo-and-limitations">TODO and Limitations</a></li>
+</ul>
+</div>
+</div>
+<div class="section" id="installing">
+<span id="index-1"></span><h2>Installing<a class="headerlink" href="#installing" title="Permalink to this headline">¶</a></h2>
+<p>The current version is 1.0.0. Mock is stable and widely used. If you do
+find any bugs, or have suggestions for improvements / extensions
+then please contact us.</p>
+<ul class="simple">
+<li><a class="reference external" href="http://pypi.python.org/pypi/mock">mock on PyPI</a></li>
+<li><a class="reference external" href="http://www.voidspace.org.uk/downloads/mock-1.0.0.pdf">mock documentation as PDF</a></li>
+<li><a class="reference external" href="http://code.google.com/p/mock/">Google Code Home &amp; Mercurial Repository</a></li>
+</ul>
+<span class="target" id="index-2"></span><p id="index-3">You can checkout the latest development version from the Google Code Mercurial
+repository with the following command:</p>
+<blockquote>
+<div><tt class="docutils literal"><span class="pre">hg</span> <span class="pre">clone</span> <span class="pre">https://mock.googlecode.com/hg/</span> <span class="pre">mock</span></tt></div></blockquote>
+<span class="target" id="index-4"></span><span class="target" id="index-5"></span><p id="index-6">If you have pip, setuptools or distribute you can install mock with:</p>
+<blockquote>
+<div><div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">easy_install</span> <span class="pre">-U</span> <span class="pre">mock</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">pip</span> <span class="pre">install</span> <span class="pre">-U</span> <span class="pre">mock</span></tt></div>
+</div>
+</div></blockquote>
+<p>Alternatively you can download the mock distribution from PyPI and after
+unpacking run:</p>
+<blockquote>
+<div><tt class="docutils literal"><span class="pre">python</span> <span class="pre">setup.py</span> <span class="pre">install</span></tt></div></blockquote>
+</div>
+<div class="section" id="quick-guide">
+<h2>Quick Guide<a class="headerlink" href="#quick-guide" title="Permalink to this headline">¶</a></h2>
+<p><a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a> and <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> objects create all attributes and
+methods as you access them and store details of how they have been used. You
+can configure them, to specify return values or limit what attributes are
+available, and then make assertions about how they have been used:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mock</span> <span class="kn">import</span> <span class="n">MagicMock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">thing</span> <span class="o">=</span> <span class="n">ProductionClass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">thing</span><span class="o">.</span><span class="n">method</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">thing</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="n">key</span><span class="o">=</span><span class="s">&#39;value&#39;</span><span class="p">)</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">thing</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="n">key</span><span class="o">=</span><span class="s">&#39;value&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+<p><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt> allows you to perform side effects, including raising an
+exception when a mock is called:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="ne">KeyError</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">KeyError</span>: <span class="n">&#39;foo&#39;</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="n">values</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;a&#39;</span><span class="p">:</span> <span class="mi">1</span><span class="p">,</span> <span class="s">&#39;b&#39;</span><span class="p">:</span> <span class="mi">2</span><span class="p">,</span> <span class="s">&#39;c&#39;</span><span class="p">:</span> <span class="mi">3</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="n">arg</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">values</span><span class="p">[</span><span class="n">arg</span><span class="p">]</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">side_effect</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;a&#39;</span><span class="p">),</span> <span class="n">mock</span><span class="p">(</span><span class="s">&#39;b&#39;</span><span class="p">),</span> <span class="n">mock</span><span class="p">(</span><span class="s">&#39;c&#39;</span><span class="p">)</span>
+<span class="go">(1, 2, 3)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="p">[</span><span class="mi">5</span><span class="p">,</span> <span class="mi">4</span><span class="p">,</span> <span class="mi">3</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">1</span><span class="p">]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(),</span> <span class="n">mock</span><span class="p">(),</span> <span class="n">mock</span><span class="p">()</span>
+<span class="go">(5, 4, 3)</span>
+</pre></div>
+</div>
+<p>Mock has many other ways you can configure it and control its behaviour. For
+example the <cite>spec</cite> argument configures the mock to take its specification
+from another object. Attempting to access attributes or methods on the mock
+that don&#8217;t exist on the spec will fail with an <cite>AttributeError</cite>.</p>
+<p>The <a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a> decorator / context manager makes it easy to mock classes or
+objects in a module under test. The object you specify will be replaced with a
+mock (or other object) during the test and restored when the test ends:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mock</span> <span class="kn">import</span> <span class="n">patch</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;module.ClassName2&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;module.ClassName1&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">(</span><span class="n">MockClass1</span><span class="p">,</span> <span class="n">MockClass2</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName1</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName2</span><span class="p">()</span>
+
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">MockClass1</span> <span class="ow">is</span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName1</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">MockClass2</span> <span class="ow">is</span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName2</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">MockClass1</span><span class="o">.</span><span class="n">called</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">MockClass2</span><span class="o">.</span><span class="n">called</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p>When you nest patch decorators the mocks are passed in to the decorated
+function in the same order they applied (the normal <em>python</em> order that
+decorators are applied). This means from the bottom up, so in the example
+above the mock for <cite>module.ClassName1</cite> is passed in first.</p>
+<p class="last">With <cite>patch</cite> it matters that you patch objects in the namespace where they
+are looked up. This is normally straightforward, but for a quick guide
+read <a class="reference internal" href="patch.html#where-to-patch"><em>where to patch</em></a>.</p>
+</div>
+<p>As well as a decorator <cite>patch</cite> can be used as a context manager in a with
+statement:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">object</span><span class="p">(</span><span class="n">ProductionClass</span><span class="p">,</span> <span class="s">&#39;method&#39;</span><span class="p">,</span> <span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_method</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">thing</span> <span class="o">=</span> <span class="n">ProductionClass</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">thing</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_method</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>There is also <a class="reference internal" href="patch.html#mock.patch.dict" title="mock.patch.dict"><tt class="xref py py-func docutils literal"><span class="pre">patch.dict()</span></tt></a> for setting values in a dictionary just
+during a scope and restoring the dictionary to its original state when the test
+ends:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">foo</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;key&#39;</span><span class="p">:</span> <span class="s">&#39;value&#39;</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">original</span> <span class="o">=</span> <span class="n">foo</span><span class="o">.</span><span class="n">copy</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="n">foo</span><span class="p">,</span> <span class="p">{</span><span class="s">&#39;newkey&#39;</span><span class="p">:</span> <span class="s">&#39;newvalue&#39;</span><span class="p">},</span> <span class="n">clear</span><span class="o">=</span><span class="bp">True</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">foo</span> <span class="o">==</span> <span class="p">{</span><span class="s">&#39;newkey&#39;</span><span class="p">:</span> <span class="s">&#39;newvalue&#39;</span><span class="p">}</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">foo</span> <span class="o">==</span> <span class="n">original</span>
+</pre></div>
+</div>
+<p>Mock supports the mocking of Python <a class="reference internal" href="magicmock.html#magic-methods"><em>magic methods</em></a>. The
+easiest way of using magic methods is with the <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> class. It
+allows you to do things like:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__str__</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;foobarbaz&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">str</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">&#39;foobarbaz&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__str__</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>Mock allows you to assign functions (or other Mock instances) to magic methods
+and they will be called appropriately. The <cite>MagicMock</cite> class is just a Mock
+variant that has all of the magic methods pre-created for you (well, all the
+useful ones anyway).</p>
+<p>The following is an example of using magic methods with the ordinary Mock
+class:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__str__</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="s">&#39;wheeeeee&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">str</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">&#39;wheeeeee&#39;</span>
+</pre></div>
+</div>
+<p>For ensuring that the mock objects in your tests have the same api as the
+objects they are replacing, you can use <a class="reference internal" href="helpers.html#auto-speccing"><em>auto-speccing</em></a>.
+Auto-speccing can be done through the <cite>autospec</cite> argument to patch, or the
+<a class="reference internal" href="helpers.html#mock.create_autospec" title="mock.create_autospec"><tt class="xref py py-func docutils literal"><span class="pre">create_autospec()</span></tt></a> function. Auto-speccing creates mock objects that
+have the same attributes and methods as the objects they are replacing, and
+any functions and methods (including constructors) have the same call
+signature as the real object.</p>
+<p>This ensures that your mocks will fail in the same way as your production
+code if they are used incorrectly:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mock</span> <span class="kn">import</span> <span class="n">create_autospec</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">function</span><span class="p">(</span><span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">,</span> <span class="n">c</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_function</span> <span class="o">=</span> <span class="n">create_autospec</span><span class="p">(</span><span class="n">function</span><span class="p">,</span> <span class="n">return_value</span><span class="o">=</span><span class="s">&#39;fishy&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_function</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="go">&#39;fishy&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_function</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_function</span><span class="p">(</span><span class="s">&#39;wrong arguments&#39;</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">TypeError</span>: <span class="n">&lt;lambda&gt;() takes exactly 3 arguments (1 given)</span>
+</pre></div>
+</div>
+<p><cite>create_autospec</cite> can also be used on classes, where it copies the signature of
+the <cite>__init__</cite> method, and on callable objects where it copies the signature of
+the <cite>__call__</cite> method.</p>
+<span class="target" id="index-7"></span></div>
+<div class="section" id="references">
+<span id="index-8"></span><h2>References<a class="headerlink" href="#references" title="Permalink to this headline">¶</a></h2>
+<p>Articles, blog entries and other stuff related to testing with Mock:</p>
+<ul class="simple">
+<li><a class="reference external" href="https://github.com/carljm/django-testing-slides/blob/master/models/30_no_database.md">Imposing a No DB Discipline on Django unit tests</a></li>
+<li><a class="reference external" href="https://github.com/dcramer/mock-django">mock-django: tools for mocking the Django ORM and models</a></li>
+<li><a class="reference external" href="https://blip.tv/file/4881513">PyCon 2011 Video: Testing with mock</a></li>
+<li><a class="reference external" href="http://noopenblockers.com/2012/01/06/mock-objects-in-python/">Mock objects in Python</a></li>
+<li><a class="reference external" href="http://blueprintforge.com/blog/2012/01/08/python-injecting-mock-objects-for-powerful-testing/">Python: Injecting Mock Objects for Powerful Testing</a></li>
+<li><a class="reference external" href="http://www.michaelpollmeier.com/python-mock-how-to-assert-a-substring-of-logger-output/">Python Mock: How to assert a substring of logger output</a></li>
+<li><a class="reference external" href="http://www.mattjmorrison.com/2011/09/mocking-django.html">Mocking Django</a></li>
+<li><a class="reference external" href="http://williamjohnbert.com/2011/07/how-to-unit-testing-in-django-with-mocking-and-patching/">Mocking dates and other classes that can&#8217;t be modified</a></li>
+<li><a class="reference external" href="http://konryd.blogspot.com/2010/06/mock-recipies.html">Mock recipes</a></li>
+<li><a class="reference external" href="http://konryd.blogspot.com/2010/05/mockity-mock-mock-some-love-for-mock.html">Mockity mock mock - some love for the mock module</a></li>
+<li><a class="reference external" href="http://mattsnider.com/python/mock-and-coverage/">Coverage and Mock (with django)</a></li>
+<li><a class="reference external" href="http://www.insomnihack.com/?p=194">Python Unit Testing with Mock</a></li>
+<li><a class="reference external" href="http://myadventuresincoding.wordpress.com/2011/02/26/python-python-mock-cheat-sheet/">Getting started with Python Mock</a></li>
+<li><a class="reference external" href="http://tobyho.com/2011/03/24/smart-parameter-checks-in/">Smart Parameter Checks with mock</a></li>
+<li><a class="reference external" href="http://agiletesting.blogspot.com/2009/07/python-mock-testing-techniques-and.html">Python mock testing techniques and tools</a></li>
+<li><a class="reference external" href="http://techblog.ironfroggy.com/2008/10/how-to-test.html">How To Test Django Template Tags</a></li>
+<li><a class="reference external" href="http://pypap.blogspot.com/2008/10/newbie-nugget-unit-testing-with-mock.html">A presentation on Unit Testing with Mock</a></li>
+<li><a class="reference external" href="http://michael-a-nelson.blogspot.com/2008/09/mocking-with-django-and-google-app.html">Mocking with Django and Google AppEngine</a></li>
+</ul>
+<span class="target" id="index-9"></span></div>
+<div class="section" id="tests">
+<span id="index-10"></span><h2>Tests<a class="headerlink" href="#tests" title="Permalink to this headline">¶</a></h2>
+<p>Mock uses <a class="reference external" href="http://pypi.python.org/pypi/unittest2">unittest2</a> for its own
+test suite. In order to run it, use the <cite>unit2</cite> script that comes with
+<cite>unittest2</cite> module on a checkout of the source repository:</p>
+<blockquote>
+<div><cite>unit2 discover</cite></div></blockquote>
+<p>If you have <a class="reference external" href="http://pypi.python.org/pypi/distribute">setuptools</a> as well as
+unittest2 you can run:</p>
+<blockquote>
+<div><tt class="docutils literal"><span class="pre">python</span> <span class="pre">setup.py</span> <span class="pre">test</span></tt></div></blockquote>
+<p>On Python 3.2 you can use <tt class="docutils literal"><span class="pre">unittest</span></tt> module from the standard library.</p>
+<blockquote>
+<div><tt class="docutils literal"><span class="pre">python3.2</span> <span class="pre">-m</span> <span class="pre">unittest</span> <span class="pre">discover</span></tt></div></blockquote>
+<p id="index-11">On Python 3 the tests for unicode are skipped as they are not relevant. On
+Python 2.4 tests that use the with statements are skipped as the with statement
+is invalid syntax on Python 2.4.</p>
+</div>
+<div class="section" id="older-versions">
+<span id="index-12"></span><h2>Older Versions<a class="headerlink" href="#older-versions" title="Permalink to this headline">¶</a></h2>
+<p>Documentation for older versions of mock:</p>
+<ul class="simple">
+<li><a class="reference external" href="http://www.voidspace.org.uk/python/mock/0.8/">mock 0.8</a></li>
+<li><a class="reference external" href="http://www.voidspace.org.uk/python/mock/0.7/">mock 0.7</a></li>
+<li><a class="reference external" href="http://www.voidspace.org.uk/python/mock/0.6.0/">mock 0.6</a></li>
+</ul>
+<p>Docs from the in-development version of <cite>mock</cite> can be found at
+<a class="reference external" href="http://mock.readthedocs.org">mock.readthedocs.org</a>.</p>
+</div>
+<div class="section" id="terminology">
+<h2>Terminology<a class="headerlink" href="#terminology" title="Permalink to this headline">¶</a></h2>
+<p>Terminology for objects used to replace other ones can be confusing. Terms
+like double, fake, mock, stub, and spy are all used with varying meanings.</p>
+<p>In <a class="reference external" href="http://xunitpatterns.com/Mocks,%20Fakes,%20Stubs%20and%20Dummies.html">classic mock terminology</a>
+<a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">mock.Mock</span></tt></a> is a <a class="reference external" href="http://xunitpatterns.com/Test%20Spy.html">spy</a> that
+allows for <em>post-mortem</em> examination. This is what I call the &#8220;action -&gt;
+assertion&#8221; <a class="footnote-reference" href="#id2" id="id1">[1]</a> pattern of testing.</p>
+<p>I&#8217;m not however a fan of this &#8220;statically typed mocking terminology&#8221;
+promulgated by <a class="reference external" href="http://martinfowler.com/articles/mocksArentStubs.html">Martin Fowler</a>. It confuses usage
+patterns with implementation and prevents you from using natural terminology
+when discussing mocking.</p>
+<p>I much prefer duck typing, if an object used in your test suite looks like a
+mock object and quacks like a mock object then it&#8217;s fine to call it a mock, no
+matter what the implementation looks like.</p>
+<p>This terminology is perhaps more useful in less capable languages where
+different usage patterns will <em>require</em> different implementations.
+<cite>mock.Mock()</cite> is capable of being used in most of the different roles
+described by Fowler, except (annoyingly / frustratingly / ironically) a Mock
+itself!</p>
+<p>How about a simpler definition: a &#8220;mock object&#8221; is an object used to replace a
+real one in a system under test.</p>
+<table class="docutils footnote" frame="void" id="id2" rules="none">
+<colgroup><col class="label" /><col /></colgroup>
+<tbody valign="top">
+<tr><td class="label"><a class="fn-backref" href="#id1">[1]</a></td><td>This pattern is called &#8220;AAA&#8221; by some members of the testing community;
+&#8220;Arrange - Act - Assert&#8221;.</td></tr>
+</tbody>
+</table>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="#">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Mock - Mocking and Testing Library</a><ul>
+<li><a class="reference internal" href="#api-documentation">API Documentation</a><ul>
+</ul>
+</li>
+<li><a class="reference internal" href="#user-guide">User Guide</a><ul>
+</ul>
+</li>
+<li><a class="reference internal" href="#installing">Installing</a></li>
+<li><a class="reference internal" href="#quick-guide">Quick Guide</a></li>
+<li><a class="reference internal" href="#references">References</a></li>
+<li><a class="reference internal" href="#tests">Tests</a></li>
+<li><a class="reference internal" href="#older-versions">Older Versions</a></li>
+<li><a class="reference internal" href="#terminology">Terminology</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Next topic</h4>
+ <p class="topless"><a href="mock.html"
+ title="next chapter">The Mock Class</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/index.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="mock.html" title="The Mock Class"
+ >next</a> |</li>
+ <li><a href="#">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/magicmock.html b/third_party/python/mock-1.0.0/html/magicmock.html
new file mode 100644
index 0000000000..f49fae7633
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/magicmock.html
@@ -0,0 +1,347 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Mocking Magic Methods &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <link rel="next" title="Getting Started with Mock" href="getting-started.html" />
+ <link rel="prev" title="Sentinel" href="sentinel.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="getting-started.html" title="Getting Started with Mock"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="sentinel.html" title="Sentinel"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="mocking-magic-methods">
+<span id="magic-methods"></span><h1>Mocking Magic Methods<a class="headerlink" href="#mocking-magic-methods" title="Permalink to this headline">¶</a></h1>
+<p><a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a> supports mocking <a class="reference external" href="http://www.ironpythoninaction.com/magic-methods.html">magic methods</a>. This allows mock
+objects to replace containers or other objects that implement Python
+protocols.</p>
+<p>Because magic methods are looked up differently from normal methods <a class="footnote-reference" href="#id4" id="id2">[1]</a>, this
+support has been specially implemented. This means that only specific magic
+methods are supported. The supported list includes <em>almost</em> all of them. If
+there are any missing that you need please let us know!</p>
+<p>You mock magic methods by setting the method you are interested in to a function
+or a mock instance. If you are using a function then it <em>must</em> take <tt class="docutils literal"><span class="pre">self</span></tt> as
+the first argument <a class="footnote-reference" href="#id5" id="id3">[2]</a>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">__str__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="s">&#39;fooble&#39;</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__str__</span> <span class="o">=</span> <span class="n">__str__</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">str</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">&#39;fooble&#39;</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__str__</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__str__</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;fooble&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">str</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">&#39;fooble&#39;</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__iter__</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="nb">iter</span><span class="p">([]))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">list</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">[]</span>
+</pre></div>
+</div>
+<p>One use case for this is for mocking objects used as context managers in a
+<cite>with</cite> statement:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__enter__</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="s">&#39;foo&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__exit__</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">False</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">mock</span> <span class="k">as</span> <span class="n">m</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">m</span> <span class="o">==</span> <span class="s">&#39;foo&#39;</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__enter__</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__exit__</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="bp">None</span><span class="p">,</span> <span class="bp">None</span><span class="p">,</span> <span class="bp">None</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>Calls to magic methods do not appear in <a class="reference internal" href="mock.html#mock.Mock.method_calls" title="mock.Mock.method_calls"><tt class="xref py py-attr docutils literal"><span class="pre">method_calls</span></tt></a>, but they
+are recorded in <a class="reference internal" href="mock.html#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a>.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">If you use the <cite>spec</cite> keyword argument to create a mock then attempting to
+set a magic method that isn&#8217;t in the spec will raise an <cite>AttributeError</cite>.</p>
+</div>
+<p>The full list of supported magic methods is:</p>
+<ul class="simple">
+<li><tt class="docutils literal"><span class="pre">__hash__</span></tt>, <tt class="docutils literal"><span class="pre">__sizeof__</span></tt>, <tt class="docutils literal"><span class="pre">__repr__</span></tt> and <tt class="docutils literal"><span class="pre">__str__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__dir__</span></tt>, <tt class="docutils literal"><span class="pre">__format__</span></tt> and <tt class="docutils literal"><span class="pre">__subclasses__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__floor__</span></tt>, <tt class="docutils literal"><span class="pre">__trunc__</span></tt> and <tt class="docutils literal"><span class="pre">__ceil__</span></tt></li>
+<li>Comparisons: <tt class="docutils literal"><span class="pre">__cmp__</span></tt>, <tt class="docutils literal"><span class="pre">__lt__</span></tt>, <tt class="docutils literal"><span class="pre">__gt__</span></tt>, <tt class="docutils literal"><span class="pre">__le__</span></tt>, <tt class="docutils literal"><span class="pre">__ge__</span></tt>,
+<tt class="docutils literal"><span class="pre">__eq__</span></tt> and <tt class="docutils literal"><span class="pre">__ne__</span></tt></li>
+<li>Container methods: <tt class="docutils literal"><span class="pre">__getitem__</span></tt>, <tt class="docutils literal"><span class="pre">__setitem__</span></tt>, <tt class="docutils literal"><span class="pre">__delitem__</span></tt>,
+<tt class="docutils literal"><span class="pre">__contains__</span></tt>, <tt class="docutils literal"><span class="pre">__len__</span></tt>, <tt class="docutils literal"><span class="pre">__iter__</span></tt>, <tt class="docutils literal"><span class="pre">__getslice__</span></tt>,
+<tt class="docutils literal"><span class="pre">__setslice__</span></tt>, <tt class="docutils literal"><span class="pre">__reversed__</span></tt> and <tt class="docutils literal"><span class="pre">__missing__</span></tt></li>
+<li>Context manager: <tt class="docutils literal"><span class="pre">__enter__</span></tt> and <tt class="docutils literal"><span class="pre">__exit__</span></tt></li>
+<li>Unary numeric methods: <tt class="docutils literal"><span class="pre">__neg__</span></tt>, <tt class="docutils literal"><span class="pre">__pos__</span></tt> and <tt class="docutils literal"><span class="pre">__invert__</span></tt></li>
+<li>The numeric methods (including right hand and in-place variants):
+<tt class="docutils literal"><span class="pre">__add__</span></tt>, <tt class="docutils literal"><span class="pre">__sub__</span></tt>, <tt class="docutils literal"><span class="pre">__mul__</span></tt>, <tt class="docutils literal"><span class="pre">__div__</span></tt>,
+<tt class="docutils literal"><span class="pre">__floordiv__</span></tt>, <tt class="docutils literal"><span class="pre">__mod__</span></tt>, <tt class="docutils literal"><span class="pre">__divmod__</span></tt>, <tt class="docutils literal"><span class="pre">__lshift__</span></tt>,
+<tt class="docutils literal"><span class="pre">__rshift__</span></tt>, <tt class="docutils literal"><span class="pre">__and__</span></tt>, <tt class="docutils literal"><span class="pre">__xor__</span></tt>, <tt class="docutils literal"><span class="pre">__or__</span></tt>, and <tt class="docutils literal"><span class="pre">__pow__</span></tt></li>
+<li>Numeric conversion methods: <tt class="docutils literal"><span class="pre">__complex__</span></tt>, <tt class="docutils literal"><span class="pre">__int__</span></tt>, <tt class="docutils literal"><span class="pre">__float__</span></tt>,
+<tt class="docutils literal"><span class="pre">__index__</span></tt> and <tt class="docutils literal"><span class="pre">__coerce__</span></tt></li>
+<li>Descriptor methods: <tt class="docutils literal"><span class="pre">__get__</span></tt>, <tt class="docutils literal"><span class="pre">__set__</span></tt> and <tt class="docutils literal"><span class="pre">__delete__</span></tt></li>
+<li>Pickling: <tt class="docutils literal"><span class="pre">__reduce__</span></tt>, <tt class="docutils literal"><span class="pre">__reduce_ex__</span></tt>, <tt class="docutils literal"><span class="pre">__getinitargs__</span></tt>,
+<tt class="docutils literal"><span class="pre">__getnewargs__</span></tt>, <tt class="docutils literal"><span class="pre">__getstate__</span></tt> and <tt class="docutils literal"><span class="pre">__setstate__</span></tt></li>
+</ul>
+<p>The following methods are supported in Python 2 but don&#8217;t exist in Python 3:</p>
+<ul class="simple">
+<li><tt class="docutils literal"><span class="pre">__unicode__</span></tt>, <tt class="docutils literal"><span class="pre">__long__</span></tt>, <tt class="docutils literal"><span class="pre">__oct__</span></tt>, <tt class="docutils literal"><span class="pre">__hex__</span></tt> and <tt class="docutils literal"><span class="pre">__nonzero__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__truediv__</span></tt> and <tt class="docutils literal"><span class="pre">__rtruediv__</span></tt></li>
+</ul>
+<p>The following methods are supported in Python 3 but don&#8217;t exist in Python 2:</p>
+<ul class="simple">
+<li><tt class="docutils literal"><span class="pre">__bool__</span></tt> and <tt class="docutils literal"><span class="pre">__next__</span></tt></li>
+</ul>
+<p>The following methods exist but are <em>not</em> supported as they are either in use by
+mock, can&#8217;t be set dynamically, or can cause problems:</p>
+<ul class="simple">
+<li><tt class="docutils literal"><span class="pre">__getattr__</span></tt>, <tt class="docutils literal"><span class="pre">__setattr__</span></tt>, <tt class="docutils literal"><span class="pre">__init__</span></tt> and <tt class="docutils literal"><span class="pre">__new__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__prepare__</span></tt>, <tt class="docutils literal"><span class="pre">__instancecheck__</span></tt>, <tt class="docutils literal"><span class="pre">__subclasscheck__</span></tt>, <tt class="docutils literal"><span class="pre">__del__</span></tt></li>
+</ul>
+</div>
+<div class="section" id="magic-mock">
+<h1>Magic Mock<a class="headerlink" href="#magic-mock" title="Permalink to this headline">¶</a></h1>
+<p>There are two <cite>MagicMock</cite> variants: <cite>MagicMock</cite> and <cite>NonCallableMagicMock</cite>.</p>
+<dl class="class">
+<dt id="mock.MagicMock">
+<em class="property">class </em><tt class="descname">MagicMock</tt><big>(</big><em>*args</em>, <em>**kw</em><big>)</big><a class="headerlink" href="#mock.MagicMock" title="Permalink to this definition">¶</a></dt>
+<dd><p><tt class="docutils literal"><span class="pre">MagicMock</span></tt> is a subclass of <a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a> with default implementations
+of most of the magic methods. You can use <tt class="docutils literal"><span class="pre">MagicMock</span></tt> without having to
+configure the magic methods yourself.</p>
+<p>The constructor parameters have the same meaning as for <a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a>.</p>
+<p>If you use the <cite>spec</cite> or <cite>spec_set</cite> arguments then <em>only</em> magic methods
+that exist in the spec will be created.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="mock.NonCallableMagicMock">
+<em class="property">class </em><tt class="descname">NonCallableMagicMock</tt><big>(</big><em>*args</em>, <em>**kw</em><big>)</big><a class="headerlink" href="#mock.NonCallableMagicMock" title="Permalink to this definition">¶</a></dt>
+<dd><p>A non-callable version of <cite>MagicMock</cite>.</p>
+<p>The constructor parameters have the same meaning as for
+<a class="reference internal" href="#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a>, with the exception of <cite>return_value</cite> and
+<cite>side_effect</cite> which have no meaning on a non-callable mock.</p>
+</dd></dl>
+
+<p>The magic methods are setup with <cite>MagicMock</cite> objects, so you can configure them
+and use them in the usual way:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="mi">3</span><span class="p">]</span> <span class="o">=</span> <span class="s">&#39;fish&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__setitem__</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="s">&#39;fish&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__getitem__</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;result&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="mi">2</span><span class="p">]</span>
+<span class="go">&#39;result&#39;</span>
+</pre></div>
+</div>
+<p>By default many of the protocol methods are required to return objects of a
+specific type. These methods are preconfigured with a default return value, so
+that they can be used without you having to do anything if you aren&#8217;t interested
+in the return value. You can still <em>set</em> the return value manually if you want
+to change the default.</p>
+<p>Methods and their defaults:</p>
+<ul class="simple">
+<li><tt class="docutils literal"><span class="pre">__lt__</span></tt>: NotImplemented</li>
+<li><tt class="docutils literal"><span class="pre">__gt__</span></tt>: NotImplemented</li>
+<li><tt class="docutils literal"><span class="pre">__le__</span></tt>: NotImplemented</li>
+<li><tt class="docutils literal"><span class="pre">__ge__</span></tt>: NotImplemented</li>
+<li><tt class="docutils literal"><span class="pre">__int__</span></tt> : 1</li>
+<li><tt class="docutils literal"><span class="pre">__contains__</span></tt> : False</li>
+<li><tt class="docutils literal"><span class="pre">__len__</span></tt> : 1</li>
+<li><tt class="docutils literal"><span class="pre">__iter__</span></tt> : iter([])</li>
+<li><tt class="docutils literal"><span class="pre">__exit__</span></tt> : False</li>
+<li><tt class="docutils literal"><span class="pre">__complex__</span></tt> : 1j</li>
+<li><tt class="docutils literal"><span class="pre">__float__</span></tt> : 1.0</li>
+<li><tt class="docutils literal"><span class="pre">__bool__</span></tt> : True</li>
+<li><tt class="docutils literal"><span class="pre">__nonzero__</span></tt> : True</li>
+<li><tt class="docutils literal"><span class="pre">__oct__</span></tt> : &#8216;1&#8217;</li>
+<li><tt class="docutils literal"><span class="pre">__hex__</span></tt> : &#8216;0x1&#8217;</li>
+<li><tt class="docutils literal"><span class="pre">__long__</span></tt> : long(1)</li>
+<li><tt class="docutils literal"><span class="pre">__index__</span></tt> : 1</li>
+<li><tt class="docutils literal"><span class="pre">__hash__</span></tt> : default hash for the mock</li>
+<li><tt class="docutils literal"><span class="pre">__str__</span></tt> : default str for the mock</li>
+<li><tt class="docutils literal"><span class="pre">__unicode__</span></tt> : default unicode for the mock</li>
+<li><tt class="docutils literal"><span class="pre">__sizeof__</span></tt>: default sizeof for the mock</li>
+</ul>
+<p>For example:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">int</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">len</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">0</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">hex</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">&#39;0x1&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">list</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">[]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">object</span><span class="p">()</span> <span class="ow">in</span> <span class="n">mock</span>
+<span class="go">False</span>
+</pre></div>
+</div>
+<p>The two equality method, <cite>__eq__</cite> and <cite>__ne__</cite>, are special (changed in
+0.7.2). They do the default equality comparison on identity, using a side
+effect, unless you change their return value to return something else:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">MagicMock</span><span class="p">()</span> <span class="o">==</span> <span class="mi">3</span>
+<span class="go">False</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MagicMock</span><span class="p">()</span> <span class="o">!=</span> <span class="mi">3</span>
+<span class="go">True</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__eq__</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="bp">True</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">==</span> <span class="mi">3</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>In <cite>0.8</cite> the <cite>__iter__</cite> also gained special handling implemented with a
+side effect. The return value of <cite>MagicMock.__iter__</cite> can be any iterable
+object and isn&#8217;t required to be an iterator:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__iter__</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="p">[</span><span class="s">&#39;a&#39;</span><span class="p">,</span> <span class="s">&#39;b&#39;</span><span class="p">,</span> <span class="s">&#39;c&#39;</span><span class="p">]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">list</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">list</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;]</span>
+</pre></div>
+</div>
+<p>If the return value <em>is</em> an iterator, then iterating over it once will consume
+it and subsequent iterations will result in an empty list:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__iter__</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="nb">iter</span><span class="p">([</span><span class="s">&#39;a&#39;</span><span class="p">,</span> <span class="s">&#39;b&#39;</span><span class="p">,</span> <span class="s">&#39;c&#39;</span><span class="p">])</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">list</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">list</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">[]</span>
+</pre></div>
+</div>
+<p><tt class="docutils literal"><span class="pre">MagicMock</span></tt> has all of the supported magic methods configured except for some
+of the obscure and obsolete ones. You can still set these up if you want.</p>
+<p>Magic methods that are supported but not setup by default in <tt class="docutils literal"><span class="pre">MagicMock</span></tt> are:</p>
+<ul class="simple">
+<li><tt class="docutils literal"><span class="pre">__cmp__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__getslice__</span></tt> and <tt class="docutils literal"><span class="pre">__setslice__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__coerce__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__subclasses__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__dir__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__format__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__get__</span></tt>, <tt class="docutils literal"><span class="pre">__set__</span></tt> and <tt class="docutils literal"><span class="pre">__delete__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__reversed__</span></tt> and <tt class="docutils literal"><span class="pre">__missing__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__reduce__</span></tt>, <tt class="docutils literal"><span class="pre">__reduce_ex__</span></tt>, <tt class="docutils literal"><span class="pre">__getinitargs__</span></tt>, <tt class="docutils literal"><span class="pre">__getnewargs__</span></tt>,
+<tt class="docutils literal"><span class="pre">__getstate__</span></tt> and <tt class="docutils literal"><span class="pre">__setstate__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__getformat__</span></tt> and <tt class="docutils literal"><span class="pre">__setformat__</span></tt></li>
+</ul>
+<hr class="docutils" />
+<table class="docutils footnote" frame="void" id="id4" rules="none">
+<colgroup><col class="label" /><col /></colgroup>
+<tbody valign="top">
+<tr><td class="label"><a class="fn-backref" href="#id2">[1]</a></td><td>Magic methods <em>should</em> be looked up on the class rather than the
+instance. Different versions of Python are inconsistent about applying this
+rule. The supported protocol methods should work with all supported versions
+of Python.</td></tr>
+</tbody>
+</table>
+<table class="docutils footnote" frame="void" id="id5" rules="none">
+<colgroup><col class="label" /><col /></colgroup>
+<tbody valign="top">
+<tr><td class="label"><a class="fn-backref" href="#id3">[2]</a></td><td>The function is basically hooked up to the class, but each <tt class="docutils literal"><span class="pre">Mock</span></tt>
+instance is kept isolated from the others.</td></tr>
+</tbody>
+</table>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Mocking Magic Methods</a></li>
+<li><a class="reference internal" href="#magic-mock">Magic Mock</a></li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="sentinel.html"
+ title="previous chapter">Sentinel</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="getting-started.html"
+ title="next chapter">Getting Started with Mock</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/magicmock.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="getting-started.html" title="Getting Started with Mock"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="sentinel.html" title="Sentinel"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/mock.html b/third_party/python/mock-1.0.0/html/mock.html
new file mode 100644
index 0000000000..84ba37b2b7
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/mock.html
@@ -0,0 +1,875 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>The Mock Class &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <link rel="next" title="Patch Decorators" href="patch.html" />
+ <link rel="prev" title="Mock - Mocking and Testing Library" href="index.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="patch.html" title="Patch Decorators"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="index.html" title="Mock - Mocking and Testing Library"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="the-mock-class">
+<h1>The Mock Class<a class="headerlink" href="#the-mock-class" title="Permalink to this headline">¶</a></h1>
+<p><cite>Mock</cite> is a flexible mock object intended to replace the use of stubs and
+test doubles throughout your code. Mocks are callable and create attributes as
+new mocks when you access them <a class="footnote-reference" href="#id3" id="id1">[1]</a>. Accessing the same attribute will always
+return the same mock. Mocks record how you use them, allowing you to make
+assertions about what your code has done to them.</p>
+<p><a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> is a subclass of <cite>Mock</cite> with all the magic methods
+pre-created and ready to use. There are also non-callable variants, useful
+when you are mocking out objects that aren&#8217;t callable:
+<a class="reference internal" href="#mock.NonCallableMock" title="mock.NonCallableMock"><tt class="xref py py-class docutils literal"><span class="pre">NonCallableMock</span></tt></a> and <a class="reference internal" href="magicmock.html#mock.NonCallableMagicMock" title="mock.NonCallableMagicMock"><tt class="xref py py-class docutils literal"><span class="pre">NonCallableMagicMock</span></tt></a></p>
+<p>The <a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a> decorators makes it easy to temporarily replace classes
+in a particular module with a <cite>Mock</cite> object. By default <cite>patch</cite> will create
+a <cite>MagicMock</cite> for you. You can specify an alternative class of <cite>Mock</cite> using
+the <cite>new_callable</cite> argument to <cite>patch</cite>.</p>
+<span class="target" id="index-0"></span><span class="target" id="index-1"></span><span class="target" id="index-2"></span><span class="target" id="index-3"></span><span class="target" id="index-4"></span><dl class="class">
+<dt id="mock.Mock">
+<em class="property">class </em><tt class="descname">Mock</tt><big>(</big><em>spec=None</em>, <em>side_effect=None</em>, <em>return_value=DEFAULT</em>, <em>wraps=None</em>, <em>name=None</em>, <em>spec_set=None</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.Mock" title="Permalink to this definition">¶</a></dt>
+<dd><p>Create a new <cite>Mock</cite> object. <cite>Mock</cite> takes several optional arguments
+that specify the behaviour of the Mock object:</p>
+<ul>
+<li><p class="first"><cite>spec</cite>: This can be either a list of strings or an existing object (a
+class or instance) that acts as the specification for the mock object. If
+you pass in an object then a list of strings is formed by calling dir on
+the object (excluding unsupported magic attributes and methods).
+Accessing any attribute not in this list will raise an <cite>AttributeError</cite>.</p>
+<p>If <cite>spec</cite> is an object (rather than a list of strings) then
+<a class="reference internal" href="#mock.Mock.__class__" title="mock.Mock.__class__"><tt class="xref py py-attr docutils literal"><span class="pre">__class__</span></tt></a> returns the class of the spec object. This allows mocks
+to pass <cite>isinstance</cite> tests.</p>
+</li>
+<li><p class="first"><cite>spec_set</cite>: A stricter variant of <cite>spec</cite>. If used, attempting to <em>set</em>
+or get an attribute on the mock that isn&#8217;t on the object passed as
+<cite>spec_set</cite> will raise an <cite>AttributeError</cite>.</p>
+</li>
+<li><p class="first"><cite>side_effect</cite>: A function to be called whenever the Mock is called. See
+the <a class="reference internal" href="#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a> attribute. Useful for raising exceptions or
+dynamically changing return values. The function is called with the same
+arguments as the mock, and unless it returns <a class="reference internal" href="sentinel.html#mock.DEFAULT" title="mock.DEFAULT"><tt class="xref py py-data docutils literal"><span class="pre">DEFAULT</span></tt></a>, the return
+value of this function is used as the return value.</p>
+<p>Alternatively <cite>side_effect</cite> can be an exception class or instance. In
+this case the exception will be raised when the mock is called.</p>
+<p>If <cite>side_effect</cite> is an iterable then each call to the mock will return
+the next value from the iterable. If any of the members of the iterable
+are exceptions they will be raised instead of returned.</p>
+<p>A <cite>side_effect</cite> can be cleared by setting it to <cite>None</cite>.</p>
+</li>
+<li><p class="first"><cite>return_value</cite>: The value returned when the mock is called. By default
+this is a new Mock (created on first access). See the
+<a class="reference internal" href="#mock.Mock.return_value" title="mock.Mock.return_value"><tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt></a> attribute.</p>
+</li>
+<li><p class="first"><cite>wraps</cite>: Item for the mock object to wrap. If <cite>wraps</cite> is not None then
+calling the Mock will pass the call through to the wrapped object
+(returning the real result and ignoring <cite>return_value</cite>). Attribute access
+on the mock will return a Mock object that wraps the corresponding
+attribute of the wrapped object (so attempting to access an attribute
+that doesn&#8217;t exist will raise an <cite>AttributeError</cite>).</p>
+<p>If the mock has an explicit <cite>return_value</cite> set then calls are not passed
+to the wrapped object and the <cite>return_value</cite> is returned instead.</p>
+</li>
+<li><p class="first"><cite>name</cite>: If the mock has a name then it will be used in the repr of the
+mock. This can be useful for debugging. The name is propagated to child
+mocks.</p>
+</li>
+</ul>
+<p>Mocks can also be called with arbitrary keyword arguments. These will be
+used to set attributes on the mock after it is created. See the
+<a class="reference internal" href="#mock.Mock.configure_mock" title="mock.Mock.configure_mock"><tt class="xref py py-meth docutils literal"><span class="pre">configure_mock()</span></tt></a> method for details.</p>
+<dl class="method">
+<dt id="mock.Mock.assert_called_with">
+<tt class="descname">assert_called_with</tt><big>(</big><em>*args</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.Mock.assert_called_with" title="Permalink to this definition">¶</a></dt>
+<dd><p>This method is a convenient way of asserting that calls are made in a
+particular way:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">,</span> <span class="n">test</span><span class="o">=</span><span class="s">&#39;wow&#39;</span><span class="p">)</span>
+<span class="go">&lt;Mock name=&#39;mock.method()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">,</span> <span class="n">test</span><span class="o">=</span><span class="s">&#39;wow&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock.assert_called_once_with">
+<tt class="descname">assert_called_once_with</tt><big>(</big><em>*args</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.Mock.assert_called_once_with" title="Permalink to this definition">¶</a></dt>
+<dd><p>Assert that the mock was called exactly once and with the specified
+arguments.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">,</span> <span class="n">bar</span><span class="o">=</span><span class="s">&#39;baz&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">,</span> <span class="n">bar</span><span class="o">=</span><span class="s">&#39;baz&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">,</span> <span class="n">bar</span><span class="o">=</span><span class="s">&#39;baz&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">,</span> <span class="n">bar</span><span class="o">=</span><span class="s">&#39;baz&#39;</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">AssertionError</span>: <span class="n">Expected to be called once. Called 2 times.</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock.assert_any_call">
+<tt class="descname">assert_any_call</tt><big>(</big><em>*args</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.Mock.assert_any_call" title="Permalink to this definition">¶</a></dt>
+<dd><p>assert the mock has been called with the specified arguments.</p>
+<p>The assert passes if the mock has <em>ever</em> been called, unlike
+<a class="reference internal" href="#mock.Mock.assert_called_with" title="mock.Mock.assert_called_with"><tt class="xref py py-meth docutils literal"><span class="pre">assert_called_with()</span></tt></a> and <a class="reference internal" href="#mock.Mock.assert_called_once_with" title="mock.Mock.assert_called_once_with"><tt class="xref py py-meth docutils literal"><span class="pre">assert_called_once_with()</span></tt></a> that
+only pass if the call is the most recent one.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="n">arg</span><span class="o">=</span><span class="s">&#39;thing&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;some&#39;</span><span class="p">,</span> <span class="s">&#39;thing&#39;</span><span class="p">,</span> <span class="s">&#39;else&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_any_call</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="n">arg</span><span class="o">=</span><span class="s">&#39;thing&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock.assert_has_calls">
+<tt class="descname">assert_has_calls</tt><big>(</big><em>calls</em>, <em>any_order=False</em><big>)</big><a class="headerlink" href="#mock.Mock.assert_has_calls" title="Permalink to this definition">¶</a></dt>
+<dd><p>assert the mock has been called with the specified calls.
+The <cite>mock_calls</cite> list is checked for the calls.</p>
+<p>If <cite>any_order</cite> is False (the default) then the calls must be
+sequential. There can be extra calls before or after the
+specified calls.</p>
+<p>If <cite>any_order</cite> is True then the calls can be in any order, but
+they must all appear in <a class="reference internal" href="#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">2</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">4</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">calls</span> <span class="o">=</span> <span class="p">[</span><span class="n">call</span><span class="p">(</span><span class="mi">2</span><span class="p">),</span> <span class="n">call</span><span class="p">(</span><span class="mi">3</span><span class="p">)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_has_calls</span><span class="p">(</span><span class="n">calls</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">calls</span> <span class="o">=</span> <span class="p">[</span><span class="n">call</span><span class="p">(</span><span class="mi">4</span><span class="p">),</span> <span class="n">call</span><span class="p">(</span><span class="mi">2</span><span class="p">),</span> <span class="n">call</span><span class="p">(</span><span class="mi">3</span><span class="p">)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_has_calls</span><span class="p">(</span><span class="n">calls</span><span class="p">,</span> <span class="n">any_order</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock.reset_mock">
+<tt class="descname">reset_mock</tt><big>(</big><big>)</big><a class="headerlink" href="#mock.Mock.reset_mock" title="Permalink to this definition">¶</a></dt>
+<dd><p>The reset_mock method resets all the call attributes on a mock object:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;hello&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">called</span>
+<span class="go">True</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">reset_mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">called</span>
+<span class="go">False</span>
+</pre></div>
+</div>
+<p>This can be useful where you want to make a series of assertions that
+reuse the same object. Note that <cite>reset_mock</cite> <em>doesn&#8217;t</em> clear the
+return value, <a class="reference internal" href="#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a> or any child attributes you have
+set using normal assignment. Child mocks and the return value mock
+(if any) are reset as well.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock.mock_add_spec">
+<tt class="descname">mock_add_spec</tt><big>(</big><em>spec</em>, <em>spec_set=False</em><big>)</big><a class="headerlink" href="#mock.Mock.mock_add_spec" title="Permalink to this definition">¶</a></dt>
+<dd><p>Add a spec to a mock. <cite>spec</cite> can either be an object or a
+list of strings. Only attributes on the <cite>spec</cite> can be fetched as
+attributes from the mock.</p>
+<p>If <cite>spec_set</cite> is <cite>True</cite> then only attributes on the spec can be set.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock.attach_mock">
+<tt class="descname">attach_mock</tt><big>(</big><em>mock</em>, <em>attribute</em><big>)</big><a class="headerlink" href="#mock.Mock.attach_mock" title="Permalink to this definition">¶</a></dt>
+<dd><p>Attach a mock as an attribute of this one, replacing its name and
+parent. Calls to the attached mock will be recorded in the
+<a class="reference internal" href="#mock.Mock.method_calls" title="mock.Mock.method_calls"><tt class="xref py py-attr docutils literal"><span class="pre">method_calls</span></tt></a> and <a class="reference internal" href="#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a> attributes of this one.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock.configure_mock">
+<tt class="descname">configure_mock</tt><big>(</big><em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.Mock.configure_mock" title="Permalink to this definition">¶</a></dt>
+<dd><p>Set attributes on the mock through keyword arguments.</p>
+<p>Attributes plus return values and side effects can be set on child
+mocks using standard dot notation and unpacking a dictionary in the
+method call:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">attrs</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;method.return_value&#39;</span><span class="p">:</span> <span class="mi">3</span><span class="p">,</span> <span class="s">&#39;other.side_effect&#39;</span><span class="p">:</span> <span class="ne">KeyError</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">configure_mock</span><span class="p">(</span><span class="o">**</span><span class="n">attrs</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">other</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">KeyError</span>
+</pre></div>
+</div>
+<p>The same thing can be achieved in the constructor call to mocks:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">attrs</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;method.return_value&#39;</span><span class="p">:</span> <span class="mi">3</span><span class="p">,</span> <span class="s">&#39;other.side_effect&#39;</span><span class="p">:</span> <span class="ne">KeyError</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">some_attribute</span><span class="o">=</span><span class="s">&#39;eggs&#39;</span><span class="p">,</span> <span class="o">**</span><span class="n">attrs</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">some_attribute</span>
+<span class="go">&#39;eggs&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">other</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">KeyError</span>
+</pre></div>
+</div>
+<p><cite>configure_mock</cite> exists to make it easier to do configuration
+after the mock has been created.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock.__dir__">
+<tt class="descname">__dir__</tt><big>(</big><big>)</big><a class="headerlink" href="#mock.Mock.__dir__" title="Permalink to this definition">¶</a></dt>
+<dd><p><cite>Mock</cite> objects limit the results of <cite>dir(some_mock)</cite> to useful results.
+For mocks with a <cite>spec</cite> this includes all the permitted attributes
+for the mock.</p>
+<p>See <a class="reference internal" href="helpers.html#mock.FILTER_DIR" title="mock.FILTER_DIR"><tt class="xref py py-data docutils literal"><span class="pre">FILTER_DIR</span></tt></a> for what this filtering does, and how to
+switch it off.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock._get_child_mock">
+<tt class="descname">_get_child_mock</tt><big>(</big><em>**kw</em><big>)</big><a class="headerlink" href="#mock.Mock._get_child_mock" title="Permalink to this definition">¶</a></dt>
+<dd><p>Create the child mocks for attributes and return value.
+By default child mocks will be the same type as the parent.
+Subclasses of Mock may want to override this to customize the way
+child mocks are made.</p>
+<p>For non-callable mocks the callable variant will be used (rather than
+any custom subclass).</p>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.called">
+<tt class="descname">called</tt><a class="headerlink" href="#mock.Mock.called" title="Permalink to this definition">¶</a></dt>
+<dd><p>A boolean representing whether or not the mock object has been called:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">called</span>
+<span class="go">False</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">called</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.call_count">
+<tt class="descname">call_count</tt><a class="headerlink" href="#mock.Mock.call_count" title="Permalink to this definition">¶</a></dt>
+<dd><p>An integer telling you how many times the mock object has been called:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_count</span>
+<span class="go">0</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_count</span>
+<span class="go">2</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.return_value">
+<tt class="descname">return_value</tt><a class="headerlink" href="#mock.Mock.return_value" title="Permalink to this definition">¶</a></dt>
+<dd><p>Set this to configure the value returned by calling the mock:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;fish&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="go">&#39;fish&#39;</span>
+</pre></div>
+</div>
+<p>The default return value is a mock object and you can configure it in
+the normal way:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">attribute</span> <span class="o">=</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">Attribute</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">return_value</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock()()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">()</span>
+</pre></div>
+</div>
+<p><cite>return_value</cite> can also be set in the constructor:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">return_value</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.side_effect">
+<tt class="descname">side_effect</tt><a class="headerlink" href="#mock.Mock.side_effect" title="Permalink to this definition">¶</a></dt>
+<dd><p>This can either be a function to be called when the mock is called,
+or an exception (class or instance) to be raised.</p>
+<p>If you pass in a function it will be called with same arguments as the
+mock and unless the function returns the <a class="reference internal" href="sentinel.html#mock.DEFAULT" title="mock.DEFAULT"><tt class="xref py py-data docutils literal"><span class="pre">DEFAULT</span></tt></a> singleton the
+call to the mock will then return whatever the function returns. If the
+function returns <a class="reference internal" href="sentinel.html#mock.DEFAULT" title="mock.DEFAULT"><tt class="xref py py-data docutils literal"><span class="pre">DEFAULT</span></tt></a> then the mock will return its normal
+value (from the <a class="reference internal" href="#mock.Mock.return_value" title="mock.Mock.return_value"><tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt></a>.</p>
+<p>An example of a mock that raises an exception (to test exception
+handling of an API):</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="ne">Exception</span><span class="p">(</span><span class="s">&#39;Boom!&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">Exception</span>: <span class="n">Boom!</span>
+</pre></div>
+</div>
+<p>Using <cite>side_effect</cite> to return a sequence of values:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="p">[</span><span class="mi">3</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">1</span><span class="p">]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(),</span> <span class="n">mock</span><span class="p">(),</span> <span class="n">mock</span><span class="p">()</span>
+<span class="go">(3, 2, 1)</span>
+</pre></div>
+</div>
+<p>The <cite>side_effect</cite> function is called with the same arguments as the
+mock (so it is wise for it to take arbitrary args and keyword
+arguments) and whatever it returns is used as the return value for
+the call. The exception is if <cite>side_effect</cite> returns <a class="reference internal" href="sentinel.html#mock.DEFAULT" title="mock.DEFAULT"><tt class="xref py py-data docutils literal"><span class="pre">DEFAULT</span></tt></a>,
+in which case the normal <a class="reference internal" href="#mock.Mock.return_value" title="mock.Mock.return_value"><tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt></a> is used.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">DEFAULT</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">side_effect</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+<p><cite>side_effect</cite> can be set in the constructor. Here&#8217;s an example that
+adds one to the value the mock is called with and returns it:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">side_effect</span> <span class="o">=</span> <span class="k">lambda</span> <span class="n">value</span><span class="p">:</span> <span class="n">value</span> <span class="o">+</span> <span class="mi">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">side_effect</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">3</span><span class="p">)</span>
+<span class="go">4</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="o">-</span><span class="mi">8</span><span class="p">)</span>
+<span class="go">-7</span>
+</pre></div>
+</div>
+<p>Setting <cite>side_effect</cite> to <cite>None</cite> clears it:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mock</span> <span class="kn">import</span> <span class="n">Mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="ne">KeyError</span><span class="p">,</span> <span class="n">return_value</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">KeyError</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="bp">None</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.call_args">
+<tt class="descname">call_args</tt><a class="headerlink" href="#mock.Mock.call_args" title="Permalink to this definition">¶</a></dt>
+<dd><p>This is either <cite>None</cite> (if the mock hasn&#8217;t been called), or the
+arguments that the mock was last called with. This will be in the
+form of a tuple: the first member is any ordered arguments the mock
+was called with (or an empty tuple) and the second member is any
+keyword arguments (or an empty dictionary).</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">print</span> <span class="n">mock</span><span class="o">.</span><span class="n">call_args</span>
+<span class="go">None</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args</span>
+<span class="go">call()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args</span> <span class="o">==</span> <span class="p">()</span>
+<span class="go">True</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args</span>
+<span class="go">call(3, 4)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args</span> <span class="o">==</span> <span class="p">((</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">),)</span>
+<span class="go">True</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="n">key</span><span class="o">=</span><span class="s">&#39;fish&#39;</span><span class="p">,</span> <span class="nb">next</span><span class="o">=</span><span class="s">&#39;w00t!&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args</span>
+<span class="go">call(3, 4, 5, key=&#39;fish&#39;, next=&#39;w00t!&#39;)</span>
+</pre></div>
+</div>
+<p><cite>call_args</cite>, along with members of the lists <a class="reference internal" href="#mock.Mock.call_args_list" title="mock.Mock.call_args_list"><tt class="xref py py-attr docutils literal"><span class="pre">call_args_list</span></tt></a>,
+<a class="reference internal" href="#mock.Mock.method_calls" title="mock.Mock.method_calls"><tt class="xref py py-attr docutils literal"><span class="pre">method_calls</span></tt></a> and <a class="reference internal" href="#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a> are <a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> objects.
+These are tuples, so they can be unpacked to get at the individual
+arguments and make more complex assertions. See
+<a class="reference internal" href="helpers.html#calls-as-tuples"><em>calls as tuples</em></a>.</p>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.call_args_list">
+<tt class="descname">call_args_list</tt><a class="headerlink" href="#mock.Mock.call_args_list" title="Permalink to this definition">¶</a></dt>
+<dd><p>This is a list of all the calls made to the mock object in sequence
+(so the length of the list is the number of times it has been
+called). Before any calls have been made it is an empty list. The
+<a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> object can be used for conveniently constructing lists of
+calls to compare with <cite>call_args_list</cite>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="n">key</span><span class="o">=</span><span class="s">&#39;fish&#39;</span><span class="p">,</span> <span class="nb">next</span><span class="o">=</span><span class="s">&#39;w00t!&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args_list</span>
+<span class="go">[call(), call(3, 4), call(key=&#39;fish&#39;, next=&#39;w00t!&#39;)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">expected</span> <span class="o">=</span> <span class="p">[(),</span> <span class="p">((</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">),),</span> <span class="p">({</span><span class="s">&#39;key&#39;</span><span class="p">:</span> <span class="s">&#39;fish&#39;</span><span class="p">,</span> <span class="s">&#39;next&#39;</span><span class="p">:</span> <span class="s">&#39;w00t!&#39;</span><span class="p">},)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args_list</span> <span class="o">==</span> <span class="n">expected</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>Members of <cite>call_args_list</cite> are <a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> objects. These can be
+unpacked as tuples to get at the individual arguments. See
+<a class="reference internal" href="helpers.html#calls-as-tuples"><em>calls as tuples</em></a>.</p>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.method_calls">
+<tt class="descname">method_calls</tt><a class="headerlink" href="#mock.Mock.method_calls" title="Permalink to this definition">¶</a></dt>
+<dd><p>As well as tracking calls to themselves, mocks also track calls to
+methods and attributes, and <em>their</em> methods and attributes:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock.method()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">property</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">attribute</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock.property.method.attribute()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method_calls</span>
+<span class="go">[call.method(), call.property.method.attribute()]</span>
+</pre></div>
+</div>
+<p>Members of <cite>method_calls</cite> are <a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> objects. These can be
+unpacked as tuples to get at the individual arguments. See
+<a class="reference internal" href="helpers.html#calls-as-tuples"><em>calls as tuples</em></a>.</p>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.mock_calls">
+<tt class="descname">mock_calls</tt><a class="headerlink" href="#mock.Mock.mock_calls" title="Permalink to this definition">¶</a></dt>
+<dd><p><cite>mock_calls</cite> records <em>all</em> calls to the mock object, its methods, magic
+methods <em>and</em> return value mocks.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">result</span> <span class="o">=</span> <span class="n">mock</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">first</span><span class="p">(</span><span class="n">a</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="go">&lt;MagicMock name=&#39;mock.first()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">second</span><span class="p">()</span>
+<span class="go">&lt;MagicMock name=&#39;mock.second()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">int</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">result</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
+<span class="go">&lt;MagicMock name=&#39;mock()()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">expected</span> <span class="o">=</span> <span class="p">[</span><span class="n">call</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">),</span> <span class="n">call</span><span class="o">.</span><span class="n">first</span><span class="p">(</span><span class="n">a</span><span class="o">=</span><span class="mi">3</span><span class="p">),</span> <span class="n">call</span><span class="o">.</span><span class="n">second</span><span class="p">(),</span>
+<span class="gp">... </span><span class="n">call</span><span class="o">.</span><span class="n">__int__</span><span class="p">(),</span> <span class="n">call</span><span class="p">()(</span><span class="mi">1</span><span class="p">)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">mock_calls</span> <span class="o">==</span> <span class="n">expected</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>Members of <cite>mock_calls</cite> are <a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> objects. These can be
+unpacked as tuples to get at the individual arguments. See
+<a class="reference internal" href="helpers.html#calls-as-tuples"><em>calls as tuples</em></a>.</p>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.__class__">
+<tt class="descname">__class__</tt><a class="headerlink" href="#mock.Mock.__class__" title="Permalink to this definition">¶</a></dt>
+<dd><p>Normally the <cite>__class__</cite> attribute of an object will return its type.
+For a mock object with a <cite>spec</cite> <cite>__class__</cite> returns the spec class
+instead. This allows mock objects to pass <cite>isinstance</cite> tests for the
+object they are replacing / masquerading as:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">spec</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">isinstance</span><span class="p">(</span><span class="n">mock</span><span class="p">,</span> <span class="nb">int</span><span class="p">)</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p><cite>__class__</cite> is assignable to, this allows a mock to pass an
+<cite>isinstance</cite> check without forcing you to use a spec:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__class__</span> <span class="o">=</span> <span class="nb">dict</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">isinstance</span><span class="p">(</span><span class="n">mock</span><span class="p">,</span> <span class="nb">dict</span><span class="p">)</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+</dd></dl>
+
+</dd></dl>
+
+<dl class="class">
+<dt id="mock.NonCallableMock">
+<em class="property">class </em><tt class="descname">NonCallableMock</tt><big>(</big><em>spec=None</em>, <em>wraps=None</em>, <em>name=None</em>, <em>spec_set=None</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.NonCallableMock" title="Permalink to this definition">¶</a></dt>
+<dd><p>A non-callable version of <cite>Mock</cite>. The constructor parameters have the same
+meaning of <cite>Mock</cite>, with the exception of <cite>return_value</cite> and <cite>side_effect</cite>
+which have no meaning on a non-callable mock.</p>
+</dd></dl>
+
+<p>Mock objects that use a class or an instance as a <cite>spec</cite> or <cite>spec_set</cite> are able
+to pass <cite>isintance</cite> tests:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">spec</span><span class="o">=</span><span class="n">SomeClass</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">isinstance</span><span class="p">(</span><span class="n">mock</span><span class="p">,</span> <span class="n">SomeClass</span><span class="p">)</span>
+<span class="go">True</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">spec_set</span><span class="o">=</span><span class="n">SomeClass</span><span class="p">())</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">isinstance</span><span class="p">(</span><span class="n">mock</span><span class="p">,</span> <span class="n">SomeClass</span><span class="p">)</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>The <cite>Mock</cite> classes have support for mocking magic methods. See <a class="reference internal" href="magicmock.html#magic-methods"><em>magic
+methods</em></a> for the full details.</p>
+<p>The mock classes and the <a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a> decorators all take arbitrary keyword
+arguments for configuration. For the <cite>patch</cite> decorators the keywords are
+passed to the constructor of the mock being created. The keyword arguments
+are for configuring attributes of the mock:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">attribute</span><span class="o">=</span><span class="mi">3</span><span class="p">,</span> <span class="n">other</span><span class="o">=</span><span class="s">&#39;fish&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">attribute</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">other</span>
+<span class="go">&#39;fish&#39;</span>
+</pre></div>
+</div>
+<p>The return value and side effect of child mocks can be set in the same way,
+using dotted notation. As you can&#8217;t use dotted names directly in a call you
+have to create a dictionary and unpack it using <cite>**</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">attrs</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;method.return_value&#39;</span><span class="p">:</span> <span class="mi">3</span><span class="p">,</span> <span class="s">&#39;other.side_effect&#39;</span><span class="p">:</span> <span class="ne">KeyError</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">some_attribute</span><span class="o">=</span><span class="s">&#39;eggs&#39;</span><span class="p">,</span> <span class="o">**</span><span class="n">attrs</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">some_attribute</span>
+<span class="go">&#39;eggs&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">other</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">KeyError</span>
+</pre></div>
+</div>
+<dl class="class">
+<dt id="mock.PropertyMock">
+<em class="property">class </em><tt class="descname">PropertyMock</tt><big>(</big><em>*args</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.PropertyMock" title="Permalink to this definition">¶</a></dt>
+<dd><p>A mock intended to be used as a property, or other descriptor, on a class.
+<cite>PropertyMock</cite> provides <cite>__get__</cite> and <cite>__set__</cite> methods so you can specify
+a return value when it is fetched.</p>
+<p>Fetching a <cite>PropertyMock</cite> instance from an object calls the mock, with
+no args. Setting it calls the mock with the value being set.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Foo</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="nd">@property</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">foo</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="s">&#39;something&#39;</span>
+<span class="gp">... </span> <span class="nd">@foo.setter</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">foo</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">value</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__main__.Foo.foo&#39;</span><span class="p">,</span> <span class="n">new_callable</span><span class="o">=</span><span class="n">PropertyMock</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_foo</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">mock_foo</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;mockity-mock&#39;</span>
+<span class="gp">... </span> <span class="n">this_foo</span> <span class="o">=</span> <span class="n">Foo</span><span class="p">()</span>
+<span class="gp">... </span> <span class="k">print</span> <span class="n">this_foo</span><span class="o">.</span><span class="n">foo</span>
+<span class="gp">... </span> <span class="n">this_foo</span><span class="o">.</span><span class="n">foo</span> <span class="o">=</span> <span class="mi">6</span>
+<span class="gp">...</span>
+<span class="go">mockity-mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_foo</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call(), call(6)]</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<p>Because of the way mock attributes are stored you can&#8217;t directly attach a
+<cite>PropertyMock</cite> to a mock object. Instead you can attach it to the mock type
+object:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">p</span> <span class="o">=</span> <span class="n">PropertyMock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">type</span><span class="p">(</span><span class="n">m</span><span class="p">)</span><span class="o">.</span><span class="n">foo</span> <span class="o">=</span> <span class="n">p</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">foo</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">p</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">()</span>
+</pre></div>
+</div>
+<span class="target" id="index-5"></span></div>
+<div class="section" id="calling">
+<span id="index-6"></span><h1>Calling<a class="headerlink" href="#calling" title="Permalink to this headline">¶</a></h1>
+<p>Mock objects are callable. The call will return the value set as the
+<a class="reference internal" href="#mock.Mock.return_value" title="mock.Mock.return_value"><tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt></a> attribute. The default return value is a new Mock
+object; it is created the first time the return value is accessed (either
+explicitly or by calling the Mock) - but it is stored and the same one
+returned each time.</p>
+<p>Calls made to the object will be recorded in the attributes
+like <a class="reference internal" href="#mock.Mock.call_args" title="mock.Mock.call_args"><tt class="xref py py-attr docutils literal"><span class="pre">call_args</span></tt></a> and <a class="reference internal" href="#mock.Mock.call_args_list" title="mock.Mock.call_args_list"><tt class="xref py py-attr docutils literal"><span class="pre">call_args_list</span></tt></a>.</p>
+<p>If <a class="reference internal" href="#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a> is set then it will be called after the call has
+been recorded, so if <cite>side_effect</cite> raises an exception the call is still
+recorded.</p>
+<p>The simplest way to make a mock raise an exception when called is to make
+<a class="reference internal" href="#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a> an exception class or instance:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="ne">IndexError</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">IndexError</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call(1, 2, 3)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="ne">KeyError</span><span class="p">(</span><span class="s">&#39;Bang!&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">(</span><span class="s">&#39;two&#39;</span><span class="p">,</span> <span class="s">&#39;three&#39;</span><span class="p">,</span> <span class="s">&#39;four&#39;</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">KeyError</span>: <span class="n">&#39;Bang!&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call(1, 2, 3), call(&#39;two&#39;, &#39;three&#39;, &#39;four&#39;)]</span>
+</pre></div>
+</div>
+<p>If <cite>side_effect</cite> is a function then whatever that function returns is what
+calls to the mock return. The <cite>side_effect</cite> function is called with the
+same arguments as the mock. This allows you to vary the return value of the
+call dynamically, based on the input:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="n">value</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">value</span> <span class="o">+</span> <span class="mi">1</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">side_effect</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
+<span class="go">2</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">(</span><span class="mi">2</span><span class="p">)</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call(1), call(2)]</span>
+</pre></div>
+</div>
+<p>If you want the mock to still return the default return value (a new mock), or
+any set return value, then there are two ways of doing this. Either return
+<cite>mock.return_value</cite> from inside <cite>side_effect</cite>, or return <a class="reference internal" href="sentinel.html#mock.DEFAULT" title="mock.DEFAULT"><tt class="xref py py-data docutils literal"><span class="pre">DEFAULT</span></tt></a>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">m</span><span class="o">.</span><span class="n">return_value</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">side_effect</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="mi">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">DEFAULT</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">side_effect</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+<p>To remove a <cite>side_effect</cite>, and return to the default behaviour, set the
+<cite>side_effect</cite> to <cite>None</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="mi">6</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="mi">3</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">side_effect</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="bp">None</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">6</span>
+</pre></div>
+</div>
+<p>The <cite>side_effect</cite> can also be any iterable object. Repeated calls to the mock
+will return values from the iterable (until the iterable is exhausted and
+a <cite>StopIteration</cite> is raised):</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="p">[</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">])</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">2</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">StopIteration</span>
+</pre></div>
+</div>
+<p>If any members of the iterable are exceptions they will be raised instead of
+returned:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">iterable</span> <span class="o">=</span> <span class="p">(</span><span class="mi">33</span><span class="p">,</span> <span class="ne">ValueError</span><span class="p">,</span> <span class="mi">66</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">iterable</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">33</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">ValueError</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">66</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="deleting-attributes">
+<span id="id2"></span><h1>Deleting Attributes<a class="headerlink" href="#deleting-attributes" title="Permalink to this headline">¶</a></h1>
+<p>Mock objects create attributes on demand. This allows them to pretend to be
+objects of any type.</p>
+<p>You may want a mock object to return <cite>False</cite> to a <cite>hasattr</cite> call, or raise an
+<cite>AttributeError</cite> when an attribute is fetched. You can do this by providing
+an object as a <cite>spec</cite> for a mock, but that isn&#8217;t always convenient.</p>
+<p>You &#8220;block&#8221; attributes by deleting them. Once deleted, accessing an attribute
+will raise an <cite>AttributeError</cite>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">hasattr</span><span class="p">(</span><span class="n">mock</span><span class="p">,</span> <span class="s">&#39;m&#39;</span><span class="p">)</span>
+<span class="go">True</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">del</span> <span class="n">mock</span><span class="o">.</span><span class="n">m</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">hasattr</span><span class="p">(</span><span class="n">mock</span><span class="p">,</span> <span class="s">&#39;m&#39;</span><span class="p">)</span>
+<span class="go">False</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">del</span> <span class="n">mock</span><span class="o">.</span><span class="n">f</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">f</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">AttributeError</span>: <span class="n">f</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="attaching-mocks-as-attributes">
+<h1>Attaching Mocks as Attributes<a class="headerlink" href="#attaching-mocks-as-attributes" title="Permalink to this headline">¶</a></h1>
+<p>When you attach a mock as an attribute of another mock (or as the return
+value) it becomes a &#8220;child&#8221; of that mock. Calls to the child are recorded in
+the <a class="reference internal" href="#mock.Mock.method_calls" title="mock.Mock.method_calls"><tt class="xref py py-attr docutils literal"><span class="pre">method_calls</span></tt></a> and <a class="reference internal" href="#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a> attributes of the
+parent. This is useful for configuring child mocks and then attaching them to
+the parent, or for attaching mocks to a parent that records all calls to the
+children and allows you to make assertions about the order of calls between
+mocks:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">parent</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">child1</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">child2</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">parent</span><span class="o">.</span><span class="n">child1</span> <span class="o">=</span> <span class="n">child1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">parent</span><span class="o">.</span><span class="n">child2</span> <span class="o">=</span> <span class="n">child2</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">child1</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">child2</span><span class="p">(</span><span class="mi">2</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">parent</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call.child1(1), call.child2(2)]</span>
+</pre></div>
+</div>
+<p>The exception to this is if the mock has a name. This allows you to prevent
+the &#8220;parenting&#8221; if for some reason you don&#8217;t want it to happen.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">not_a_child</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="s">&#39;not-a-child&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">attribute</span> <span class="o">=</span> <span class="n">not_a_child</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">attribute</span><span class="p">()</span>
+<span class="go">&lt;MagicMock name=&#39;not-a-child()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[]</span>
+</pre></div>
+</div>
+<p>Mocks created for you by <a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a> are automatically given names. To
+attach mocks that have names to a parent you use the <a class="reference internal" href="#mock.Mock.attach_mock" title="mock.Mock.attach_mock"><tt class="xref py py-meth docutils literal"><span class="pre">attach_mock()</span></tt></a>
+method:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">thing1</span> <span class="o">=</span> <span class="nb">object</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">thing2</span> <span class="o">=</span> <span class="nb">object</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">parent</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__main__.thing1&#39;</span><span class="p">,</span> <span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span> <span class="k">as</span> <span class="n">child1</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__main__.thing2&#39;</span><span class="p">,</span> <span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span> <span class="k">as</span> <span class="n">child2</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">parent</span><span class="o">.</span><span class="n">attach_mock</span><span class="p">(</span><span class="n">child1</span><span class="p">,</span> <span class="s">&#39;child1&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">parent</span><span class="o">.</span><span class="n">attach_mock</span><span class="p">(</span><span class="n">child2</span><span class="p">,</span> <span class="s">&#39;child2&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">child1</span><span class="p">(</span><span class="s">&#39;one&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">child2</span><span class="p">(</span><span class="s">&#39;two&#39;</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">parent</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call.child1(&#39;one&#39;), call.child2(&#39;two&#39;)]</span>
+</pre></div>
+</div>
+<hr class="docutils" />
+<table class="docutils footnote" frame="void" id="id3" rules="none">
+<colgroup><col class="label" /><col /></colgroup>
+<tbody valign="top">
+<tr><td class="label"><a class="fn-backref" href="#id1">[1]</a></td><td>The only exceptions are magic methods and attributes (those that have
+leading and trailing double underscores). Mock doesn&#8217;t create these but
+instead of raises an <tt class="docutils literal"><span class="pre">AttributeError</span></tt>. This is because the interpreter
+will often implicitly request these methods, and gets <em>very</em> confused to
+get a new Mock object when it expects a magic method. If you need magic
+method support see <a class="reference internal" href="magicmock.html#magic-methods"><em>magic methods</em></a>.</td></tr>
+</tbody>
+</table>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">The Mock Class</a></li>
+<li><a class="reference internal" href="#calling">Calling</a></li>
+<li><a class="reference internal" href="#deleting-attributes">Deleting Attributes</a></li>
+<li><a class="reference internal" href="#attaching-mocks-as-attributes">Attaching Mocks as Attributes</a></li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="index.html"
+ title="previous chapter">Mock - Mocking and Testing Library</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="patch.html"
+ title="next chapter">Patch Decorators</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/mock.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="patch.html" title="Patch Decorators"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="index.html" title="Mock - Mocking and Testing Library"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/mocksignature.html b/third_party/python/mock-1.0.0/html/mocksignature.html
new file mode 100644
index 0000000000..5b266f0310
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/mocksignature.html
@@ -0,0 +1,352 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>mocksignature &mdash; Mock 0.8.1alpha1 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '0.8.1alpha1',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 0.8.1alpha1 documentation" href="index.html" />
+ <link rel="next" title="Getting Started with Mock" href="getting-started.html" />
+ <link rel="prev" title="Mocking Magic Methods" href="magicmock.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="getting-started.html" title="Getting Started with Mock"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="magicmock.html" title="Mocking Magic Methods"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 0.8.1alpha1 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="mocksignature">
+<h1>mocksignature<a class="headerlink" href="#mocksignature" title="Permalink to this headline">¶</a></h1>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last"><a class="reference internal" href="helpers.html#auto-speccing"><em>Autospeccing</em></a>, added in mock 0.8, is a more advanced version of
+<cite>mocksignature</cite> and can be used for many of the same use cases.</p>
+</div>
+<p>A problem with using mock objects to replace real objects in your tests is that
+<a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a> can be <em>too</em> flexible. Your code can treat the mock objects in
+any way and you have to manually check that they were called correctly. If your
+code calls functions or methods with the wrong number of arguments then mocks
+don&#8217;t complain.</p>
+<p>The solution to this is <cite>mocksignature</cite>, which creates functions with the
+same signature as the original, but delegating to a mock. You can interrogate
+the mock in the usual way to check it has been called with the <em>right</em>
+arguments, but if it is called with the wrong number of arguments it will
+raise a <cite>TypeError</cite> in the same way your production code would.</p>
+<p>Another advantage is that your mocked objects are real functions, which can
+be useful when your code uses
+<a class="reference external" href="http://docs.python.org/library/inspect.html">inspect</a> or depends on
+functions being function objects.</p>
+<dl class="function">
+<dt id="mock.mocksignature">
+<tt class="descname">mocksignature</tt><big>(</big><em>func</em>, <em>mock=None</em>, <em>skipfirst=False</em><big>)</big><a class="headerlink" href="#mock.mocksignature" title="Permalink to this definition">¶</a></dt>
+<dd><p>Create a new function with the same signature as <cite>func</cite> that delegates
+to <cite>mock</cite>. If <cite>skipfirst</cite> is True the first argument is skipped, useful
+for methods where <cite>self</cite> needs to be omitted from the new function.</p>
+<p>If you don&#8217;t pass in a <cite>mock</cite> then one will be created for you.</p>
+<p>Functions returned by <cite>mocksignature</cite> have many of the same attributes
+and assert methods as a mock object.</p>
+<p>The mock is set as the <cite>mock</cite> attribute of the returned function for easy
+access.</p>
+<p><cite>mocksignature</cite> can also be used with classes. It copies the signature of
+the <cite>__init__</cite> method.</p>
+<p>When used with callable objects (instances) it copies the signature of the
+<cite>__call__</cite> method.</p>
+</dd></dl>
+
+<p><cite>mocksignature</cite> will work out if it is mocking the signature of a method on
+an instance or a method on a class and do the &#8220;right thing&#8221; with the <cite>self</cite>
+argument in both cases.</p>
+<p>Because of a limitation in the way that arguments are collected by functions
+created by <cite>mocksignature</cite> they are <em>always</em> passed as positional arguments
+(including defaults) and not keyword arguments.</p>
+<div class="section" id="mocksignature-api">
+<h2>mocksignature api<a class="headerlink" href="#mocksignature-api" title="Permalink to this headline">¶</a></h2>
+<p>Although the objects returned by <cite>mocksignature</cite> api are real function objects,
+they have much of the same api as the <a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a> class. This includes the
+assert methods:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">func</span><span class="p">(</span><span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">,</span> <span class="n">c</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span> <span class="o">=</span> <span class="n">mocksignature</span><span class="p">(</span><span class="n">func</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="o">.</span><span class="n">called</span>
+<span class="go">False</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="mi">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="o">.</span><span class="n">called</span>
+<span class="go">True</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">4</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">AssertionError: Expected call</span>: <span class="n">mock(1, 2, 4)</span>
+<span class="go">Actual call: mock(1, 2, 3)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="o">.</span><span class="n">call_count</span>
+<span class="go">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="ne">IndexError</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="p">(</span><span class="mi">4</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="mi">6</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">IndexError</span>
+</pre></div>
+</div>
+<p>The mock object that is being delegated to is available as the <cite>mock</cite> attribute
+of the function created by <cite>mocksignature</cite>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="o">.</span><span class="n">mock</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call(1, 2, 3), call(4, 5, 6)]</span>
+</pre></div>
+</div>
+<p>The methods and attributes available on functions returned by <cite>mocksignature</cite>
+are:</p>
+<blockquote>
+<div><a class="reference internal" href="mock.html#mock.Mock.assert_any_call" title="mock.Mock.assert_any_call"><tt class="xref py py-meth docutils literal"><span class="pre">assert_any_call()</span></tt></a>, <a class="reference internal" href="mock.html#mock.Mock.assert_called_once_with" title="mock.Mock.assert_called_once_with"><tt class="xref py py-meth docutils literal"><span class="pre">assert_called_once_with()</span></tt></a>,
+<a class="reference internal" href="mock.html#mock.Mock.assert_called_with" title="mock.Mock.assert_called_with"><tt class="xref py py-meth docutils literal"><span class="pre">assert_called_with()</span></tt></a>, <a class="reference internal" href="mock.html#mock.Mock.assert_has_calls" title="mock.Mock.assert_has_calls"><tt class="xref py py-meth docutils literal"><span class="pre">assert_has_calls()</span></tt></a>,
+<a class="reference internal" href="mock.html#mock.Mock.call_args" title="mock.Mock.call_args"><tt class="xref py py-attr docutils literal"><span class="pre">call_args</span></tt></a>, <a class="reference internal" href="mock.html#mock.Mock.call_args_list" title="mock.Mock.call_args_list"><tt class="xref py py-attr docutils literal"><span class="pre">call_args_list</span></tt></a>,
+<a class="reference internal" href="mock.html#mock.Mock.call_count" title="mock.Mock.call_count"><tt class="xref py py-attr docutils literal"><span class="pre">call_count</span></tt></a>, <a class="reference internal" href="mock.html#mock.Mock.called" title="mock.Mock.called"><tt class="xref py py-attr docutils literal"><span class="pre">called</span></tt></a>,
+<a class="reference internal" href="mock.html#mock.Mock.method_calls" title="mock.Mock.method_calls"><tt class="xref py py-attr docutils literal"><span class="pre">method_calls</span></tt></a>, <cite>mock</cite>, <a class="reference internal" href="mock.html#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a>,
+<a class="reference internal" href="mock.html#mock.Mock.reset_mock" title="mock.Mock.reset_mock"><tt class="xref py py-meth docutils literal"><span class="pre">reset_mock()</span></tt></a>, <a class="reference internal" href="mock.html#mock.Mock.return_value" title="mock.Mock.return_value"><tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt></a>, and
+<a class="reference internal" href="mock.html#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a>.</div></blockquote>
+</div>
+<div class="section" id="example-use">
+<h2>Example use<a class="headerlink" href="#example-use" title="Permalink to this headline">¶</a></h2>
+<div class="section" id="basic-use">
+<h3>Basic use<a class="headerlink" href="#basic-use" title="Permalink to this headline">¶</a></h3>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">function</span><span class="p">(</span><span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">,</span> <span class="n">c</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span> <span class="o">=</span> <span class="n">mocksignature</span><span class="p">(</span><span class="n">function</span><span class="p">,</span> <span class="n">mock</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">TypeError</span>: <span class="n">&lt;lambda&gt;() takes at least 2 arguments (0 given)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;some value&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="s">&#39;foo&#39;</span><span class="p">)</span>
+<span class="go">&#39;some value&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="s">&#39;foo&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="keyword-arguments">
+<h3>Keyword arguments<a class="headerlink" href="#keyword-arguments" title="Permalink to this headline">¶</a></h3>
+<p>Note that arguments to functions created by <cite>mocksignature</cite> are always passed
+in to the underlying mock by position even when called with keywords:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">function</span><span class="p">(</span><span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">,</span> <span class="n">c</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span> <span class="o">=</span> <span class="n">mocksignature</span><span class="p">(</span><span class="n">function</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="bp">None</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="bp">None</span><span class="p">)</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mocking-methods-and-self">
+<h3>Mocking methods and self<a class="headerlink" href="#mocking-methods-and-self" title="Permalink to this headline">¶</a></h3>
+<p>When you use <cite>mocksignature</cite> to replace a method on a class then <cite>self</cite>
+will be included in the method signature - and you will need to include
+the instance when you do your asserts.</p>
+<p>As a curious factor of the way Python (2) wraps methods fetched from a class,
+we can <em>get</em> the <cite>return_value</cite> from a function set on a class, but we can&#8217;t
+set it. We have to do this through the exposed <cite>mock</cite> attribute instead:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">SomeClass</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">method</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">,</span> <span class="n">c</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">SomeClass</span><span class="o">.</span><span class="n">method</span> <span class="o">=</span> <span class="n">mocksignature</span><span class="p">(</span><span class="n">SomeClass</span><span class="o">.</span><span class="n">method</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">SomeClass</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">mock</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="bp">None</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span> <span class="o">=</span> <span class="n">SomeClass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">TypeError</span>: <span class="n">&lt;lambda&gt;() takes at least 4 arguments (1 given)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="n">instance</span><span class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>When you use <cite>mocksignature</cite> on instance methods <cite>self</cite> isn&#8217;t included (and we
+can set the <cite>return_value</cite> etc directly):</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">SomeClass</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">method</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">,</span> <span class="n">c</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span> <span class="o">=</span> <span class="n">SomeClass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span><span class="o">.</span><span class="n">method</span> <span class="o">=</span> <span class="n">mocksignature</span><span class="p">(</span><span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="bp">None</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mocksignature-with-classes">
+<h3>mocksignature with classes<a class="headerlink" href="#mocksignature-with-classes" title="Permalink to this headline">¶</a></h3>
+<p>When used with a class <cite>mocksignature</cite> copies the signature of the <cite>__init__</cite>
+method.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Something</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">foo</span><span class="p">,</span> <span class="n">bar</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MockSomething</span> <span class="o">=</span> <span class="n">mocksignature</span><span class="p">(</span><span class="n">Something</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span> <span class="o">=</span> <span class="n">MockSomething</span><span class="p">(</span><span class="mi">10</span><span class="p">,</span> <span class="mi">9</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">instance</span> <span class="ow">is</span> <span class="n">MockSomething</span><span class="o">.</span><span class="n">return_value</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MockSomething</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">10</span><span class="p">,</span> <span class="mi">9</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MockSomething</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">TypeError</span>: <span class="n">&lt;lambda&gt;() takes at least 2 arguments (0 given)</span>
+</pre></div>
+</div>
+<p>Because the object returned by <cite>mocksignature</cite> is a function rather than a
+<cite>Mock</cite> you lose the other capabilities of <cite>Mock</cite>, like dynamic attribute
+creation.</p>
+</div>
+<div class="section" id="mocksignature-with-callable-objects">
+<h3>mocksignature with callable objects<a class="headerlink" href="#mocksignature-with-callable-objects" title="Permalink to this headline">¶</a></h3>
+<p>When used with a callable object <cite>mocksignature</cite> copies the signature of the
+<cite>__call__</cite> method.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Something</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__call__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">spam</span><span class="p">,</span> <span class="n">eggs</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">something</span> <span class="o">=</span> <span class="n">Something</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_something</span> <span class="o">=</span> <span class="n">mocksignature</span><span class="p">(</span><span class="n">something</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">result</span> <span class="o">=</span> <span class="n">mock_something</span><span class="p">(</span><span class="mi">10</span><span class="p">,</span> <span class="mi">9</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_something</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">10</span><span class="p">,</span> <span class="mi">9</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_something</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">TypeError</span>: <span class="n">&lt;lambda&gt;() takes at least 2 arguments (0 given)</span>
+</pre></div>
+</div>
+</div>
+</div>
+<div class="section" id="mocksignature-argument-to-patch">
+<h2>mocksignature argument to patch<a class="headerlink" href="#mocksignature-argument-to-patch" title="Permalink to this headline">¶</a></h2>
+<p><cite>mocksignature</cite> is available as a keyword argument to <a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a> or
+<a class="reference internal" href="patch.html#mock.patch.object" title="mock.patch.object"><tt class="xref py py-func docutils literal"><span class="pre">patch.object()</span></tt></a>. It can be used with functions / methods / classes and
+callable objects.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">SomeClass</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">method</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">,</span> <span class="n">c</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch.object</span><span class="p">(</span><span class="n">SomeClass</span><span class="p">,</span> <span class="s">&#39;method&#39;</span><span class="p">,</span> <span class="n">mocksignature</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">(</span><span class="n">mock_method</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">instance</span> <span class="o">=</span> <span class="n">SomeClass</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">mock_method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="bp">None</span>
+<span class="gp">... </span> <span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">mock_method</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="n">instance</span><span class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="bp">None</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+</pre></div>
+</div>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">mocksignature</a><ul>
+<li><a class="reference internal" href="#mocksignature-api">mocksignature api</a></li>
+<li><a class="reference internal" href="#example-use">Example use</a><ul>
+<li><a class="reference internal" href="#basic-use">Basic use</a></li>
+<li><a class="reference internal" href="#keyword-arguments">Keyword arguments</a></li>
+<li><a class="reference internal" href="#mocking-methods-and-self">Mocking methods and self</a></li>
+<li><a class="reference internal" href="#mocksignature-with-classes">mocksignature with classes</a></li>
+<li><a class="reference internal" href="#mocksignature-with-callable-objects">mocksignature with callable objects</a></li>
+</ul>
+</li>
+<li><a class="reference internal" href="#mocksignature-argument-to-patch">mocksignature argument to patch</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="magicmock.html"
+ title="previous chapter">Mocking Magic Methods</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="getting-started.html"
+ title="next chapter">Getting Started with Mock</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/mocksignature.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="getting-started.html" title="Getting Started with Mock"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="magicmock.html" title="Mocking Magic Methods"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 0.8.1alpha1 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Feb 16, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.2.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/objects.inv b/third_party/python/mock-1.0.0/html/objects.inv
new file mode 100644
index 0000000000..3638892571
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/objects.inv
Binary files differ
diff --git a/third_party/python/mock-1.0.0/html/output.txt b/third_party/python/mock-1.0.0/html/output.txt
new file mode 100644
index 0000000000..56093e7502
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/output.txt
@@ -0,0 +1,126 @@
+Results of doctest builder run on 2012-10-07 18:33:27
+=====================================================
+
+Document: index
+---------------
+1 items passed all tests:
+ 35 tests in default
+35 tests in 1 items.
+35 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 1 tests in default (cleanup code)
+1 tests in 1 items.
+1 passed and 0 failed.
+Test passed.
+
+Document: compare
+-----------------
+1 items passed all tests:
+ 39 tests in default
+39 tests in 1 items.
+39 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 1 tests in default (cleanup code)
+1 tests in 1 items.
+1 passed and 0 failed.
+Test passed.
+
+Document: getting-started
+-------------------------
+1 items passed all tests:
+ 83 tests in default
+83 tests in 1 items.
+83 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 1 tests in default (cleanup code)
+1 tests in 1 items.
+1 passed and 0 failed.
+Test passed.
+
+Document: magicmock
+-------------------
+1 items passed all tests:
+ 40 tests in default
+40 tests in 1 items.
+40 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 1 tests in default (cleanup code)
+1 tests in 1 items.
+1 passed and 0 failed.
+Test passed.
+
+Document: patch
+---------------
+1 items passed all tests:
+ 75 tests in default
+75 tests in 1 items.
+75 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 2 tests in default (cleanup code)
+2 tests in 1 items.
+2 passed and 0 failed.
+Test passed.
+
+Document: helpers
+-----------------
+1 items passed all tests:
+ 87 tests in default
+87 tests in 1 items.
+87 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 2 tests in default (cleanup code)
+2 tests in 1 items.
+2 passed and 0 failed.
+Test passed.
+
+Document: examples
+------------------
+1 items passed all tests:
+ 171 tests in default
+171 tests in 1 items.
+171 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 1 tests in default (cleanup code)
+1 tests in 1 items.
+1 passed and 0 failed.
+Test passed.
+
+Document: sentinel
+------------------
+1 items passed all tests:
+ 6 tests in default
+6 tests in 1 items.
+6 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 1 tests in default (cleanup code)
+1 tests in 1 items.
+1 passed and 0 failed.
+Test passed.
+
+Document: mock
+--------------
+1 items passed all tests:
+ 187 tests in default
+187 tests in 1 items.
+187 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 1 tests in default (cleanup code)
+1 tests in 1 items.
+1 passed and 0 failed.
+Test passed.
+
+Doctest summary
+===============
+ 723 tests
+ 0 failures in tests
+ 0 failures in setup code
+ 0 failures in cleanup code
diff --git a/third_party/python/mock-1.0.0/html/patch.html b/third_party/python/mock-1.0.0/html/patch.html
new file mode 100644
index 0000000000..e7164d147d
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/patch.html
@@ -0,0 +1,648 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Patch Decorators &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <link rel="next" title="Helpers" href="helpers.html" />
+ <link rel="prev" title="The Mock Class" href="mock.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="helpers.html" title="Helpers"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="mock.html" title="The Mock Class"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="patch-decorators">
+<h1>Patch Decorators<a class="headerlink" href="#patch-decorators" title="Permalink to this headline">¶</a></h1>
+<p>The patch decorators are used for patching objects only within the scope of
+the function they decorate. They automatically handle the unpatching for you,
+even if exceptions are raised. All of these functions can also be used in with
+statements or as class decorators.</p>
+<div class="section" id="patch">
+<h2>patch<a class="headerlink" href="#patch" title="Permalink to this headline">¶</a></h2>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last"><cite>patch</cite> is straightforward to use. The key is to do the patching in the
+right namespace. See the section <a class="reference internal" href="#id1">where to patch</a>.</p>
+</div>
+<dl class="function">
+<dt id="mock.patch">
+<tt class="descname">patch</tt><big>(</big><em>target</em>, <em>new=DEFAULT</em>, <em>spec=None</em>, <em>create=False</em>, <em>spec_set=None</em>, <em>autospec=None</em>, <em>new_callable=None</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.patch" title="Permalink to this definition">¶</a></dt>
+<dd><p><cite>patch</cite> acts as a function decorator, class decorator or a context
+manager. Inside the body of the function or with statement, the <cite>target</cite>
+is patched with a <cite>new</cite> object. When the function/with statement exits
+the patch is undone.</p>
+<p>If <cite>new</cite> is omitted, then the target is replaced with a
+<a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a>. If <cite>patch</cite> is used as a decorator and <cite>new</cite> is
+omitted, the created mock is passed in as an extra argument to the
+decorated function. If <cite>patch</cite> is used as a context manager the created
+mock is returned by the context manager.</p>
+<p><cite>target</cite> should be a string in the form <cite>&#8216;package.module.ClassName&#8217;</cite>. The
+<cite>target</cite> is imported and the specified object replaced with the <cite>new</cite>
+object, so the <cite>target</cite> must be importable from the environment you are
+calling <cite>patch</cite> from. The target is imported when the decorated function
+is executed, not at decoration time.</p>
+<p>The <cite>spec</cite> and <cite>spec_set</cite> keyword arguments are passed to the <cite>MagicMock</cite>
+if patch is creating one for you.</p>
+<p>In addition you can pass <cite>spec=True</cite> or <cite>spec_set=True</cite>, which causes
+patch to pass in the object being mocked as the spec/spec_set object.</p>
+<p><cite>new_callable</cite> allows you to specify a different class, or callable object,
+that will be called to create the <cite>new</cite> object. By default <cite>MagicMock</cite> is
+used.</p>
+<p>A more powerful form of <cite>spec</cite> is <cite>autospec</cite>. If you set <cite>autospec=True</cite>
+then the mock with be created with a spec from the object being replaced.
+All attributes of the mock will also have the spec of the corresponding
+attribute of the object being replaced. Methods and functions being mocked
+will have their arguments checked and will raise a <cite>TypeError</cite> if they are
+called with the wrong signature. For mocks
+replacing a class, their return value (the &#8216;instance&#8217;) will have the same
+spec as the class. See the <a class="reference internal" href="helpers.html#mock.create_autospec" title="mock.create_autospec"><tt class="xref py py-func docutils literal"><span class="pre">create_autospec()</span></tt></a> function and
+<a class="reference internal" href="helpers.html#auto-speccing"><em>Autospeccing</em></a>.</p>
+<p>Instead of <cite>autospec=True</cite> you can pass <cite>autospec=some_object</cite> to use an
+arbitrary object as the spec instead of the one being replaced.</p>
+<p>By default <cite>patch</cite> will fail to replace attributes that don&#8217;t exist. If
+you pass in <cite>create=True</cite>, and the attribute doesn&#8217;t exist, patch will
+create the attribute for you when the patched function is called, and
+delete it again afterwards. This is useful for writing tests against
+attributes that your production code creates at runtime. It is off by by
+default because it can be dangerous. With it switched on you can write
+passing tests against APIs that don&#8217;t actually exist!</p>
+<p>Patch can be used as a <cite>TestCase</cite> class decorator. It works by
+decorating each test method in the class. This reduces the boilerplate
+code when your test methods share a common patchings set. <cite>patch</cite> finds
+tests by looking for method names that start with <cite>patch.TEST_PREFIX</cite>.
+By default this is <cite>test</cite>, which matches the way <cite>unittest</cite> finds tests.
+You can specify an alternative prefix by setting <cite>patch.TEST_PREFIX</cite>.</p>
+<p>Patch can be used as a context manager, with the with statement. Here the
+patching applies to the indented block after the with statement. If you
+use &#8220;as&#8221; then the patched object will be bound to the name after the
+&#8220;as&#8221;; very useful if <cite>patch</cite> is creating a mock object for you.</p>
+<p><cite>patch</cite> takes arbitrary keyword arguments. These will be passed to
+the <cite>Mock</cite> (or <cite>new_callable</cite>) on construction.</p>
+<p><cite>patch.dict(...)</cite>, <cite>patch.multiple(...)</cite> and <cite>patch.object(...)</cite> are
+available for alternate use-cases.</p>
+</dd></dl>
+
+<p><cite>patch</cite> as function decorator, creating the mock for you and passing it into
+the decorated function:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;__main__.SomeClass&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">function</span><span class="p">(</span><span class="n">normal_argument</span><span class="p">,</span> <span class="n">mock_class</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">print</span> <span class="n">mock_class</span> <span class="ow">is</span> <span class="n">SomeClass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span><span class="p">(</span><span class="bp">None</span><span class="p">)</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>Patching a class replaces the class with a <cite>MagicMock</cite> <em>instance</em>. If the
+class is instantiated in the code under test then it will be the
+<a class="reference internal" href="mock.html#mock.Mock.return_value" title="mock.Mock.return_value"><tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt></a> of the mock that will be used.</p>
+<p>If the class is instantiated multiple times you could use
+<a class="reference internal" href="mock.html#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a> to return a new mock each time. Alternatively you
+can set the <cite>return_value</cite> to be anything you want.</p>
+<p>To configure return values on methods of <em>instances</em> on the patched class
+you must do this on the <cite>return_value</cite>. For example:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Class</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">method</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__main__.Class&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">MockClass</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">instance</span> <span class="o">=</span> <span class="n">MockClass</span><span class="o">.</span><span class="n">return_value</span>
+<span class="gp">... </span> <span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;foo&#39;</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">Class</span><span class="p">()</span> <span class="ow">is</span> <span class="n">instance</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">Class</span><span class="p">()</span><span class="o">.</span><span class="n">method</span><span class="p">()</span> <span class="o">==</span> <span class="s">&#39;foo&#39;</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+<p>If you use <cite>spec</cite> or <cite>spec_set</cite> and <cite>patch</cite> is replacing a <em>class</em>, then the
+return value of the created mock will have the same spec.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">Original</span> <span class="o">=</span> <span class="n">Class</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">patcher</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__main__.Class&#39;</span><span class="p">,</span> <span class="n">spec</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MockClass</span> <span class="o">=</span> <span class="n">patcher</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span> <span class="o">=</span> <span class="n">MockClass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">instance</span><span class="p">,</span> <span class="n">Original</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">patcher</span><span class="o">.</span><span class="n">stop</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>The <cite>new_callable</cite> argument is useful where you want to use an alternative
+class to the default <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> for the created mock. For example, if
+you wanted a <a class="reference internal" href="mock.html#mock.NonCallableMock" title="mock.NonCallableMock"><tt class="xref py py-class docutils literal"><span class="pre">NonCallableMock</span></tt></a> to be used:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">thing</span> <span class="o">=</span> <span class="nb">object</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__main__.thing&#39;</span><span class="p">,</span> <span class="n">new_callable</span><span class="o">=</span><span class="n">NonCallableMock</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_thing</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">thing</span> <span class="ow">is</span> <span class="n">mock_thing</span>
+<span class="gp">... </span> <span class="n">thing</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">TypeError</span>: <span class="n">&#39;NonCallableMock&#39; object is not callable</span>
+</pre></div>
+</div>
+<p>Another use case might be to replace an object with a <cite>StringIO</cite> instance:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">StringIO</span> <span class="kn">import</span> <span class="n">StringIO</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">foo</span><span class="p">():</span>
+<span class="gp">... </span> <span class="k">print</span> <span class="s">&#39;Something&#39;</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;sys.stdout&#39;</span><span class="p">,</span> <span class="n">new_callable</span><span class="o">=</span><span class="n">StringIO</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">(</span><span class="n">mock_stdout</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">foo</span><span class="p">()</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mock_stdout</span><span class="o">.</span><span class="n">getvalue</span><span class="p">()</span> <span class="o">==</span> <span class="s">&#39;Something</span><span class="se">\n</span><span class="s">&#39;</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>When <cite>patch</cite> is creating a mock for you, it is common that the first thing
+you need to do is to configure the mock. Some of that configuration can be done
+in the call to patch. Any arbitrary keywords you pass into the call will be
+used to set attributes on the created mock:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">patcher</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__main__.thing&#39;</span><span class="p">,</span> <span class="n">first</span><span class="o">=</span><span class="s">&#39;one&#39;</span><span class="p">,</span> <span class="n">second</span><span class="o">=</span><span class="s">&#39;two&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_thing</span> <span class="o">=</span> <span class="n">patcher</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_thing</span><span class="o">.</span><span class="n">first</span>
+<span class="go">&#39;one&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_thing</span><span class="o">.</span><span class="n">second</span>
+<span class="go">&#39;two&#39;</span>
+</pre></div>
+</div>
+<p>As well as attributes on the created mock attributes, like the
+<a class="reference internal" href="mock.html#mock.Mock.return_value" title="mock.Mock.return_value"><tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt></a> and <a class="reference internal" href="mock.html#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a>, of child mocks can
+also be configured. These aren&#8217;t syntactically valid to pass in directly as
+keyword arguments, but a dictionary with these as keys can still be expanded
+into a <cite>patch</cite> call using <cite>**</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">config</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;method.return_value&#39;</span><span class="p">:</span> <span class="mi">3</span><span class="p">,</span> <span class="s">&#39;other.side_effect&#39;</span><span class="p">:</span> <span class="ne">KeyError</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">patcher</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__main__.thing&#39;</span><span class="p">,</span> <span class="o">**</span><span class="n">config</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_thing</span> <span class="o">=</span> <span class="n">patcher</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_thing</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_thing</span><span class="o">.</span><span class="n">other</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">KeyError</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="patch-object">
+<h2>patch.object<a class="headerlink" href="#patch-object" title="Permalink to this headline">¶</a></h2>
+<dl class="function">
+<dt id="mock.patch.object">
+<tt class="descclassname">patch.</tt><tt class="descname">object</tt><big>(</big><em>target</em>, <em>attribute</em>, <em>new=DEFAULT</em>, <em>spec=None</em>, <em>create=False</em>, <em>spec_set=None</em>, <em>autospec=None</em>, <em>new_callable=None</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.patch.object" title="Permalink to this definition">¶</a></dt>
+<dd><p>patch the named member (<cite>attribute</cite>) on an object (<cite>target</cite>) with a mock
+object.</p>
+<p><cite>patch.object</cite> can be used as a decorator, class decorator or a context
+manager. Arguments <cite>new</cite>, <cite>spec</cite>, <cite>create</cite>, <cite>spec_set</cite>, <cite>autospec</cite> and
+<cite>new_callable</cite> have the same meaning as for <cite>patch</cite>. Like <cite>patch</cite>,
+<cite>patch.object</cite> takes arbitrary keyword arguments for configuring the mock
+object it creates.</p>
+<p>When used as a class decorator <cite>patch.object</cite> honours <cite>patch.TEST_PREFIX</cite>
+for choosing which methods to wrap.</p>
+</dd></dl>
+
+<p>You can either call <cite>patch.object</cite> with three arguments or two arguments. The
+three argument form takes the object to be patched, the attribute name and the
+object to replace the attribute with.</p>
+<p>When calling with the two argument form you omit the replacement object, and a
+mock is created for you and passed in as an extra argument to the decorated
+function:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch.object</span><span class="p">(</span><span class="n">SomeClass</span><span class="p">,</span> <span class="s">&#39;class_method&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">(</span><span class="n">mock_method</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">class_method</span><span class="p">(</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">mock_method</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+</pre></div>
+</div>
+<p><cite>spec</cite>, <cite>create</cite> and the other arguments to <cite>patch.object</cite> have the same
+meaning as they do for <cite>patch</cite>.</p>
+</div>
+<div class="section" id="patch-dict">
+<h2>patch.dict<a class="headerlink" href="#patch-dict" title="Permalink to this headline">¶</a></h2>
+<dl class="function">
+<dt id="mock.patch.dict">
+<tt class="descclassname">patch.</tt><tt class="descname">dict</tt><big>(</big><em>in_dict</em>, <em>values=()</em>, <em>clear=False</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.patch.dict" title="Permalink to this definition">¶</a></dt>
+<dd><p>Patch a dictionary, or dictionary like object, and restore the dictionary
+to its original state after the test.</p>
+<p><cite>in_dict</cite> can be a dictionary or a mapping like container. If it is a
+mapping then it must at least support getting, setting and deleting items
+plus iterating over keys.</p>
+<p><cite>in_dict</cite> can also be a string specifying the name of the dictionary, which
+will then be fetched by importing it.</p>
+<p><cite>values</cite> can be a dictionary of values to set in the dictionary. <cite>values</cite>
+can also be an iterable of <cite>(key, value)</cite> pairs.</p>
+<p>If <cite>clear</cite> is True then the dictionary will be cleared before the new
+values are set.</p>
+<p><cite>patch.dict</cite> can also be called with arbitrary keyword arguments to set
+values in the dictionary.</p>
+<p><cite>patch.dict</cite> can be used as a context manager, decorator or class
+decorator. When used as a class decorator <cite>patch.dict</cite> honours
+<cite>patch.TEST_PREFIX</cite> for choosing which methods to wrap.</p>
+</dd></dl>
+
+<p><cite>patch.dict</cite> can be used to add members to a dictionary, or simply let a test
+change a dictionary, and ensure the dictionary is restored when the test
+ends.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mock</span> <span class="kn">import</span> <span class="n">patch</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">foo</span> <span class="o">=</span> <span class="p">{}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="n">foo</span><span class="p">,</span> <span class="p">{</span><span class="s">&#39;newkey&#39;</span><span class="p">:</span> <span class="s">&#39;newvalue&#39;</span><span class="p">}):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">foo</span> <span class="o">==</span> <span class="p">{</span><span class="s">&#39;newkey&#39;</span><span class="p">:</span> <span class="s">&#39;newvalue&#39;</span><span class="p">}</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">foo</span> <span class="o">==</span> <span class="p">{}</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="kn">import</span> <span class="nn">os</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="s">&#39;os.environ&#39;</span><span class="p">,</span> <span class="p">{</span><span class="s">&#39;newkey&#39;</span><span class="p">:</span> <span class="s">&#39;newvalue&#39;</span><span class="p">}):</span>
+<span class="gp">... </span> <span class="k">print</span> <span class="n">os</span><span class="o">.</span><span class="n">environ</span><span class="p">[</span><span class="s">&#39;newkey&#39;</span><span class="p">]</span>
+<span class="gp">...</span>
+<span class="go">newvalue</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="s">&#39;newkey&#39;</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">os</span><span class="o">.</span><span class="n">environ</span>
+</pre></div>
+</div>
+<p>Keywords can be used in the <cite>patch.dict</cite> call to set values in the dictionary:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mymodule</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymodule</span><span class="o">.</span><span class="n">function</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;fish&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="s">&#39;sys.modules&#39;</span><span class="p">,</span> <span class="n">mymodule</span><span class="o">=</span><span class="n">mymodule</span><span class="p">):</span>
+<span class="gp">... </span> <span class="kn">import</span> <span class="nn">mymodule</span>
+<span class="gp">... </span> <span class="n">mymodule</span><span class="o">.</span><span class="n">function</span><span class="p">(</span><span class="s">&#39;some&#39;</span><span class="p">,</span> <span class="s">&#39;args&#39;</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="go">&#39;fish&#39;</span>
+</pre></div>
+</div>
+<p><cite>patch.dict</cite> can be used with dictionary like objects that aren&#8217;t actually
+dictionaries. At the very minimum they must support item getting, setting,
+deleting and either iteration or membership test. This corresponds to the
+magic methods <cite>__getitem__</cite>, <cite>__setitem__</cite>, <cite>__delitem__</cite> and either
+<cite>__iter__</cite> or <cite>__contains__</cite>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Container</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">values</span> <span class="o">=</span> <span class="p">{}</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__getitem__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">values</span><span class="p">[</span><span class="n">name</span><span class="p">]</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__setitem__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="p">,</span> <span class="n">value</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">values</span><span class="p">[</span><span class="n">name</span><span class="p">]</span> <span class="o">=</span> <span class="n">value</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__delitem__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">del</span> <span class="bp">self</span><span class="o">.</span><span class="n">values</span><span class="p">[</span><span class="n">name</span><span class="p">]</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__iter__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="nb">iter</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">values</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">thing</span> <span class="o">=</span> <span class="n">Container</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">thing</span><span class="p">[</span><span class="s">&#39;one&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="mi">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="n">thing</span><span class="p">,</span> <span class="n">one</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span> <span class="n">two</span><span class="o">=</span><span class="mi">3</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">thing</span><span class="p">[</span><span class="s">&#39;one&#39;</span><span class="p">]</span> <span class="o">==</span> <span class="mi">2</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">thing</span><span class="p">[</span><span class="s">&#39;two&#39;</span><span class="p">]</span> <span class="o">==</span> <span class="mi">3</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">thing</span><span class="p">[</span><span class="s">&#39;one&#39;</span><span class="p">]</span> <span class="o">==</span> <span class="mi">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="nb">list</span><span class="p">(</span><span class="n">thing</span><span class="p">)</span> <span class="o">==</span> <span class="p">[</span><span class="s">&#39;one&#39;</span><span class="p">]</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="patch-multiple">
+<h2>patch.multiple<a class="headerlink" href="#patch-multiple" title="Permalink to this headline">¶</a></h2>
+<dl class="function">
+<dt id="mock.patch.multiple">
+<tt class="descclassname">patch.</tt><tt class="descname">multiple</tt><big>(</big><em>target</em>, <em>spec=None</em>, <em>create=False</em>, <em>spec_set=None</em>, <em>autospec=None</em>, <em>new_callable=None</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.patch.multiple" title="Permalink to this definition">¶</a></dt>
+<dd><p>Perform multiple patches in a single call. It takes the object to be
+patched (either as an object or a string to fetch the object by importing)
+and keyword arguments for the patches:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">multiple</span><span class="p">(</span><span class="n">settings</span><span class="p">,</span> <span class="n">FIRST_PATCH</span><span class="o">=</span><span class="s">&#39;one&#39;</span><span class="p">,</span> <span class="n">SECOND_PATCH</span><span class="o">=</span><span class="s">&#39;two&#39;</span><span class="p">):</span>
+ <span class="o">...</span>
+</pre></div>
+</div>
+<p>Use <a class="reference internal" href="sentinel.html#mock.DEFAULT" title="mock.DEFAULT"><tt class="xref py py-data docutils literal"><span class="pre">DEFAULT</span></tt></a> as the value if you want <cite>patch.multiple</cite> to create
+mocks for you. In this case the created mocks are passed into a decorated
+function by keyword, and a dictionary is returned when <cite>patch.multiple</cite> is
+used as a context manager.</p>
+<p><cite>patch.multiple</cite> can be used as a decorator, class decorator or a context
+manager. The arguments <cite>spec</cite>, <cite>spec_set</cite>, <cite>create</cite>, <cite>autospec</cite> and
+<cite>new_callable</cite> have the same meaning as for <cite>patch</cite>. These arguments will
+be applied to <em>all</em> patches done by <cite>patch.multiple</cite>.</p>
+<p>When used as a class decorator <cite>patch.multiple</cite> honours <cite>patch.TEST_PREFIX</cite>
+for choosing which methods to wrap.</p>
+</dd></dl>
+
+<p>If you want <cite>patch.multiple</cite> to create mocks for you, then you can use
+<a class="reference internal" href="sentinel.html#mock.DEFAULT" title="mock.DEFAULT"><tt class="xref py py-data docutils literal"><span class="pre">DEFAULT</span></tt></a> as the value. If you use <cite>patch.multiple</cite> as a decorator
+then the created mocks are passed into the decorated function by keyword.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">thing</span> <span class="o">=</span> <span class="nb">object</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">other</span> <span class="o">=</span> <span class="nb">object</span><span class="p">()</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch.multiple</span><span class="p">(</span><span class="s">&#39;__main__&#39;</span><span class="p">,</span> <span class="n">thing</span><span class="o">=</span><span class="n">DEFAULT</span><span class="p">,</span> <span class="n">other</span><span class="o">=</span><span class="n">DEFAULT</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test_function</span><span class="p">(</span><span class="n">thing</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">thing</span><span class="p">,</span> <span class="n">MagicMock</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="n">MagicMock</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test_function</span><span class="p">()</span>
+</pre></div>
+</div>
+<p><cite>patch.multiple</cite> can be nested with other <cite>patch</cite> decorators, but put arguments
+passed by keyword <em>after</em> any of the standard arguments created by <cite>patch</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;sys.exit&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="nd">@patch.multiple</span><span class="p">(</span><span class="s">&#39;__main__&#39;</span><span class="p">,</span> <span class="n">thing</span><span class="o">=</span><span class="n">DEFAULT</span><span class="p">,</span> <span class="n">other</span><span class="o">=</span><span class="n">DEFAULT</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test_function</span><span class="p">(</span><span class="n">mock_exit</span><span class="p">,</span> <span class="n">other</span><span class="p">,</span> <span class="n">thing</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="s">&#39;other&#39;</span> <span class="ow">in</span> <span class="nb">repr</span><span class="p">(</span><span class="n">other</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="s">&#39;thing&#39;</span> <span class="ow">in</span> <span class="nb">repr</span><span class="p">(</span><span class="n">thing</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="s">&#39;exit&#39;</span> <span class="ow">in</span> <span class="nb">repr</span><span class="p">(</span><span class="n">mock_exit</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test_function</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>If <cite>patch.multiple</cite> is used as a context manager, the value returned by the
+context manger is a dictionary where created mocks are keyed by name:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">multiple</span><span class="p">(</span><span class="s">&#39;__main__&#39;</span><span class="p">,</span> <span class="n">thing</span><span class="o">=</span><span class="n">DEFAULT</span><span class="p">,</span> <span class="n">other</span><span class="o">=</span><span class="n">DEFAULT</span><span class="p">)</span> <span class="k">as</span> <span class="n">values</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="s">&#39;other&#39;</span> <span class="ow">in</span> <span class="nb">repr</span><span class="p">(</span><span class="n">values</span><span class="p">[</span><span class="s">&#39;other&#39;</span><span class="p">])</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="s">&#39;thing&#39;</span> <span class="ow">in</span> <span class="nb">repr</span><span class="p">(</span><span class="n">values</span><span class="p">[</span><span class="s">&#39;thing&#39;</span><span class="p">])</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">values</span><span class="p">[</span><span class="s">&#39;thing&#39;</span><span class="p">]</span> <span class="ow">is</span> <span class="n">thing</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">values</span><span class="p">[</span><span class="s">&#39;other&#39;</span><span class="p">]</span> <span class="ow">is</span> <span class="n">other</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="patch-methods-start-and-stop">
+<span id="start-and-stop"></span><h2>patch methods: start and stop<a class="headerlink" href="#patch-methods-start-and-stop" title="Permalink to this headline">¶</a></h2>
+<p>All the patchers have <cite>start</cite> and <cite>stop</cite> methods. These make it simpler to do
+patching in <cite>setUp</cite> methods or where you want to do multiple patches without
+nesting decorators or with statements.</p>
+<p>To use them call <cite>patch</cite>, <cite>patch.object</cite> or <cite>patch.dict</cite> as normal and keep a
+reference to the returned <cite>patcher</cite> object. You can then call <cite>start</cite> to put
+the patch in place and <cite>stop</cite> to undo it.</p>
+<p>If you are using <cite>patch</cite> to create a mock for you then it will be returned by
+the call to <cite>patcher.start</cite>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">patcher</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;package.module.ClassName&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">package</span> <span class="kn">import</span> <span class="n">module</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">original</span> <span class="o">=</span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">new_mock</span> <span class="o">=</span> <span class="n">patcher</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName</span> <span class="ow">is</span> <span class="ow">not</span> <span class="n">original</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName</span> <span class="ow">is</span> <span class="n">new_mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">patcher</span><span class="o">.</span><span class="n">stop</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName</span> <span class="ow">is</span> <span class="n">original</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName</span> <span class="ow">is</span> <span class="ow">not</span> <span class="n">new_mock</span>
+</pre></div>
+</div>
+<p>A typical use case for this might be for doing multiple patches in the <cite>setUp</cite>
+method of a <cite>TestCase</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">setUp</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher1</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;package.module.Class1&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher2</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;package.module.Class2&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">MockClass1</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher1</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">MockClass2</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher2</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">tearDown</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher1</span><span class="o">.</span><span class="n">stop</span><span class="p">()</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher2</span><span class="o">.</span><span class="n">stop</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_something</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">package</span><span class="o">.</span><span class="n">module</span><span class="o">.</span><span class="n">Class1</span> <span class="ow">is</span> <span class="bp">self</span><span class="o">.</span><span class="n">MockClass1</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">package</span><span class="o">.</span><span class="n">module</span><span class="o">.</span><span class="n">Class2</span> <span class="ow">is</span> <span class="bp">self</span><span class="o">.</span><span class="n">MockClass2</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_something&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">run</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="admonition caution">
+<p class="first admonition-title">Caution</p>
+<p>If you use this technique you must ensure that the patching is &#8220;undone&#8221; by
+calling <cite>stop</cite>. This can be fiddlier than you might think, because if an
+exception is raised in the setUp then tearDown is not called. <a class="reference external" href="http://pypi.python.org/pypi/unittest2">unittest2</a> cleanup functions make this
+easier.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">setUp</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">patcher</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;package.module.Class&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">MockClass</span> <span class="o">=</span> <span class="n">patcher</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">addCleanup</span><span class="p">(</span><span class="n">patcher</span><span class="o">.</span><span class="n">stop</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_something</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">package</span><span class="o">.</span><span class="n">module</span><span class="o">.</span><span class="n">Class</span> <span class="ow">is</span> <span class="bp">self</span><span class="o">.</span><span class="n">MockClass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_something&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">run</span><span class="p">()</span>
+</pre></div>
+</div>
+<p class="last">As an added bonus you no longer need to keep a reference to the <cite>patcher</cite>
+object.</p>
+</div>
+<p>It is also possible to stop all patches which have been started by using
+<cite>patch.stopall</cite>.</p>
+<dl class="function">
+<dt id="mock.patch.stopall">
+<tt class="descclassname">patch.</tt><tt class="descname">stopall</tt><big>(</big><big>)</big><a class="headerlink" href="#mock.patch.stopall" title="Permalink to this definition">¶</a></dt>
+<dd><p>Stop all active patches. Only stops patches started with <cite>start</cite>.</p>
+</dd></dl>
+
+</div>
+<div class="section" id="test-prefix">
+<h2>TEST_PREFIX<a class="headerlink" href="#test-prefix" title="Permalink to this headline">¶</a></h2>
+<p>All of the patchers can be used as class decorators. When used in this way
+they wrap every test method on the class. The patchers recognise methods that
+start with <cite>test</cite> as being test methods. This is the same way that the
+<cite>unittest.TestLoader</cite> finds test methods by default.</p>
+<p>It is possible that you want to use a different prefix for your tests. You can
+inform the patchers of the different prefix by setting <cite>patch.TEST_PREFIX</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">patch</span><span class="o">.</span><span class="n">TEST_PREFIX</span> <span class="o">=</span> <span class="s">&#39;foo&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">value</span> <span class="o">=</span> <span class="mi">3</span>
+<span class="go">&gt;&gt;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;__main__.value&#39;</span><span class="p">,</span> <span class="s">&#39;not three&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">class</span> <span class="nc">Thing</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">foo_one</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">print</span> <span class="n">value</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">foo_two</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">print</span> <span class="n">value</span>
+<span class="gp">...</span>
+<span class="go">&gt;&gt;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">Thing</span><span class="p">()</span><span class="o">.</span><span class="n">foo_one</span><span class="p">()</span>
+<span class="go">not three</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">Thing</span><span class="p">()</span><span class="o">.</span><span class="n">foo_two</span><span class="p">()</span>
+<span class="go">not three</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">value</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="nesting-patch-decorators">
+<h2>Nesting Patch Decorators<a class="headerlink" href="#nesting-patch-decorators" title="Permalink to this headline">¶</a></h2>
+<p>If you want to perform multiple patches then you can simply stack up the
+decorators.</p>
+<p>You can stack up multiple patch decorators using this pattern:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch.object</span><span class="p">(</span><span class="n">SomeClass</span><span class="p">,</span> <span class="s">&#39;class_method&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="nd">@patch.object</span><span class="p">(</span><span class="n">SomeClass</span><span class="p">,</span> <span class="s">&#39;static_method&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">(</span><span class="n">mock1</span><span class="p">,</span> <span class="n">mock2</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">static_method</span> <span class="ow">is</span> <span class="n">mock1</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">class_method</span> <span class="ow">is</span> <span class="n">mock2</span>
+<span class="gp">... </span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">static_method</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">class_method</span><span class="p">(</span><span class="s">&#39;bar&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">mock1</span><span class="p">,</span> <span class="n">mock2</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock1</span><span class="p">,</span> <span class="n">mock2</span> <span class="o">=</span> <span class="n">test</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock1</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock2</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="s">&#39;bar&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>Note that the decorators are applied from the bottom upwards. This is the
+standard way that Python applies decorators. The order of the created mocks
+passed into your test function matches this order.</p>
+<p>Like all context-managers patches can be nested using contextlib&#8217;s nested
+function; <em>every</em> patching will appear in the tuple after &#8220;as&#8221;:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">contextlib</span> <span class="kn">import</span> <span class="n">nested</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">nested</span><span class="p">(</span>
+<span class="gp">... </span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;package.module.ClassName1&#39;</span><span class="p">),</span>
+<span class="gp">... </span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;package.module.ClassName2&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="p">)</span> <span class="k">as</span> <span class="p">(</span><span class="n">MockClass1</span><span class="p">,</span> <span class="n">MockClass2</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">package</span><span class="o">.</span><span class="n">module</span><span class="o">.</span><span class="n">ClassName1</span> <span class="ow">is</span> <span class="n">MockClass1</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">package</span><span class="o">.</span><span class="n">module</span><span class="o">.</span><span class="n">ClassName2</span> <span class="ow">is</span> <span class="n">MockClass2</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="where-to-patch">
+<span id="id1"></span><h2>Where to patch<a class="headerlink" href="#where-to-patch" title="Permalink to this headline">¶</a></h2>
+<p><cite>patch</cite> works by (temporarily) changing the object that a <em>name</em> points to with
+another one. There can be many names pointing to any individual object, so
+for patching to work you must ensure that you patch the name used by the system
+under test.</p>
+<p>The basic principle is that you patch where an object is <em>looked up</em>, which
+is not necessarily the same place as where it is defined. A couple of
+examples will help to clarify this.</p>
+<p>Imagine we have a project that we want to test with the following structure:</p>
+<div class="highlight-python"><pre>a.py
+ -&gt; Defines SomeClass
+
+b.py
+ -&gt; from a import SomeClass
+ -&gt; some_function instantiates SomeClass</pre>
+</div>
+<p>Now we want to test <cite>some_function</cite> but we want to mock out <cite>SomeClass</cite> using
+<cite>patch</cite>. The problem is that when we import module b, which we will have to
+do then it imports <cite>SomeClass</cite> from module a. If we use <cite>patch</cite> to mock out
+<cite>a.SomeClass</cite> then it will have no effect on our test; module b already has a
+reference to the <em>real</em> <cite>SomeClass</cite> and it looks like our patching had no
+effect.</p>
+<p>The key is to patch out <cite>SomeClass</cite> where it is used (or where it is looked up
+). In this case <cite>some_function</cite> will actually look up <cite>SomeClass</cite> in module b,
+where we have imported it. The patching should look like:</p>
+<blockquote>
+<div><cite>&#64;patch(&#8216;b.SomeClass&#8217;)</cite></div></blockquote>
+<p>However, consider the alternative scenario where instead of <cite>from a import
+SomeClass</cite> module b does <cite>import a</cite> and <cite>some_function</cite> uses <cite>a.SomeClass</cite>. Both
+of these import forms are common. In this case the class we want to patch is
+being looked up on the a module and so we have to patch <cite>a.SomeClass</cite> instead:</p>
+<blockquote>
+<div><cite>&#64;patch(&#8216;a.SomeClass&#8217;)</cite></div></blockquote>
+</div>
+<div class="section" id="patching-descriptors-and-proxy-objects">
+<h2>Patching Descriptors and Proxy Objects<a class="headerlink" href="#patching-descriptors-and-proxy-objects" title="Permalink to this headline">¶</a></h2>
+<p>Since version 0.6.0 both <a class="reference internal" href="#patch">patch</a> and <a class="reference internal" href="#patch-object">patch.object</a> have been able to correctly
+patch and restore descriptors: class methods, static methods and properties.
+You should patch these on the <em>class</em> rather than an instance.</p>
+<p>Since version 0.7.0 <a class="reference internal" href="#patch">patch</a> and <a class="reference internal" href="#patch-object">patch.object</a> work correctly with some objects
+that proxy attribute access, like the <a class="reference external" href="http://www.voidspace.org.uk/python/weblog/arch_d7_2010_12_04.shtml#e1198">django setttings object</a>.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">In django <cite>import settings</cite> and <cite>from django.conf import settings</cite>
+return different objects. If you are using libraries / apps that do both you
+may have to patch both. Grrr...</p>
+</div>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Patch Decorators</a><ul>
+<li><a class="reference internal" href="#patch">patch</a></li>
+<li><a class="reference internal" href="#patch-object">patch.object</a></li>
+<li><a class="reference internal" href="#patch-dict">patch.dict</a></li>
+<li><a class="reference internal" href="#patch-multiple">patch.multiple</a></li>
+<li><a class="reference internal" href="#patch-methods-start-and-stop">patch methods: start and stop</a></li>
+<li><a class="reference internal" href="#test-prefix">TEST_PREFIX</a></li>
+<li><a class="reference internal" href="#nesting-patch-decorators">Nesting Patch Decorators</a></li>
+<li><a class="reference internal" href="#where-to-patch">Where to patch</a></li>
+<li><a class="reference internal" href="#patching-descriptors-and-proxy-objects">Patching Descriptors and Proxy Objects</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="mock.html"
+ title="previous chapter">The Mock Class</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="helpers.html"
+ title="next chapter">Helpers</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/patch.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="helpers.html" title="Helpers"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="mock.html" title="The Mock Class"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/search.html b/third_party/python/mock-1.0.0/html/search.html
new file mode 100644
index 0000000000..8e0a907cae
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/search.html
@@ -0,0 +1,99 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Search &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <script type="text/javascript" src="_static/searchtools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <script type="text/javascript">
+ jQuery(function() { Search.loadIndex("searchindex.js"); });
+ </script>
+
+
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <h1 id="search-documentation">Search</h1>
+ <div id="fallback" class="admonition warning">
+ <script type="text/javascript">$('#fallback').hide();</script>
+ <p>
+ Please activate JavaScript to enable the search
+ functionality.
+ </p>
+ </div>
+ <p>
+ From here you can search these documents. Enter your search
+ words into the box below and click "search". Note that the search
+ function will automatically search for all of the words. Pages
+ containing fewer words won't appear in the result list.
+ </p>
+ <form action="" method="get">
+ <input type="text" name="q" value="" />
+ <input type="submit" value="search" />
+ <span id="search-progress" style="padding-left: 10px"></span>
+ </form>
+
+ <div id="search-results">
+
+ </div>
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/searchindex.js b/third_party/python/mock-1.0.0/html/searchindex.js
new file mode 100644
index 0000000000..a71918b937
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/searchindex.js
@@ -0,0 +1 @@
+Search.setIndex({objects:{"":{mock:[0,0,1,""]},"mock.patch":{dict:[5,2,1,""],object:[5,2,1,""],multiple:[5,2,1,""],stopall:[5,2,1,""]},"mock.Mock":{reset_mock:[9,5,1,""],method_calls:[9,4,1,""],attach_mock:[9,5,1,""],assert_called_with:[9,5,1,""],assert_has_calls:[9,5,1,""],"_get_child_mock":[9,5,1,""],mock_calls:[9,4,1,""],side_effect:[9,4,1,""],"__class__":[9,4,1,""],assert_called_once_with:[9,5,1,""],call_args_list:[9,4,1,""],configure_mock:[9,5,1,""],return_value:[9,4,1,""],call_count:[9,4,1,""],assert_any_call:[9,5,1,""],mock_add_spec:[9,5,1,""],called:[9,4,1,""],"__dir__":[9,5,1,""],call_args:[9,4,1,""]},mock:{create_autospec:[6,2,1,""],MagicMock:[3,3,1,""],DEFAULT:[8,1,1,""],PropertyMock:[9,3,1,""],NonCallableMock:[9,3,1,""],patch:[5,2,1,""],FILTER_DIR:[6,1,1,""],NonCallableMagicMock:[3,3,1,""],call:[6,2,1,""],sentinel:[8,1,1,""],mock_open:[6,2,1,""],ANY:[6,1,1,""],Mock:[9,3,1,""]},"mock.call":{call_list:[6,5,1,""]}},terms:{"__lshift__":3,"_children":4,demand:[8,6,9],interchang:2,four:9,prefix:[5,6,4],"1alpha1":[],repetit:7,whose:[2,7],typeerror:[0,6,5],patcher:[5,6,7,4],test_foo:7,not_a_child:9,under:[0,2,6,7,5],testabl:7,spec:[0,1,2,3,4,5,6,7,9],everi:[0,2,7,5],risk:4,"__instancecheck__":3,"__oct__":3,"__nonzero__":3,noncallablemock:[5,9,4],sentinelobject:4,readthedoc:[0,4],initialis:6,request:[6,9,4],second:[1,6,7,9,5],even:[5,2,6,7,4],hide:4,"20spy":[],ned:4,introspect:6,calcul:[1,7],"new":[0,1,4,5,7,9],ever:9,never:6,here:[1,6,7,9,5],path:6,interpret:9,datetim:7,permit:9,aka:[],contextmanag:1,propag:[2,9,4],substr:0,articl:0,abstracthttphandl:6,describ:[0,6],would:[2,6,7,4],call:[0,1,2,3,4,5,6,7,9],typo:6,type:[0,3,4,6,7,9],tell:[6,9,4],mock_db:[],relat:0,"__iter__":[5,7,3,4],isatti:[],unpack:[0,9],must:[9,7,3,5],kumar:1,word:[],room:6,restor:[0,2,7,5,4],setup:[0,5,7,3,4],work:[2,3,4,5,6,7],line_buff:[],overrid:[0,1,7,9],give:[2,7],indic:[8,6],caution:5,want:[1,2,3,5,6,7,9],masquerad:9,unstart:4,end:[0,1,2,4,5,7],turn:[2,7],noncallablemagicmock:[6,9,3,4],ordinari:[0,6],how:[0,1,2,4,6,7,9],mockobject:1,mock_cal:[1,2,3,4,6,7,9],verifi:1,config:[7,5],earlier:[],befor:[6,7,9,5],wrong:[0,5],attempt:[0,6,9,3,4],third:[7,4],classmethod:6,obsolet:[3,4],foord:0,delong:4,maintain:[1,4],environ:5,emsli:4,lambda:[0,1,6,7,9],order:[0,1,2,4,5,6,7,9],create_autospec:[0,5,6,4],oper:7,diagnos:[2,4],over:[2,7,3,5],becaus:[2,3,4,5,6,7,9],affect:[6,7],flexibl:[6,9],vari:[0,2,9],fix:[6,4],"__class__":[6,9,4],better:[6,7,4],persist:2,comprehens:7,easier:[7,9,5],them:[0,2,3,4,5,6,7,8,9],thei:[0,1,2,3,4,5,6,7,9],"__setstate__":3,safe:[6,7],"__subclasscheck__":3,bang:9,changelog:[0,4],bonu:5,getvalu:5,each:[1,2,3,5,7,9],debug:9,complet:[1,7],dingu:1,side:[0,1,2,3,4,7,9],mean:[0,1,2,3,4,5,6,7,9],colleagu:[],test_method_calls_someth:[],appengin:0,unbound:[0,6,7],foo_bar:7,got:[],situat:[2,7],standard:[0,4,5,6,7,9],test_someth:[2,5],"__setitem__":[7,3,5],call_list:[2,6,7,4],bernhadt:1,traceback:[0,1,2,4,5,6,7,9],"_patch":4,filter:[6,9,4],isn:[9,6,7,3,4],onto:[7,4],bite:7,user:[0,7,4],restrict:6,hook:3,unlik:9,alreadi:[5,2,6,7,4],hood:[6,7],seekabl:[],tox:4,top:[],sometim:[8,2,6,7],mercuri:[0,4],too:[6,7],skipfirst:[],with_arg:1,namespac:[0,2,7,5],tool:0,setuptool:[0,4],"0alpha3":[],"0alpha2":[],"0alpha1":[],technic:4,silli:7,target:[5,4],keyword:[2,3,4,5,6,7,9],provid:[0,1,2,4,6,7,8,9],zero:4,file_spec:[],project:[0,1,5],matter:[0,2,6,7],some_method__return:1,iron:0,add_head:6,modern:[],raw:[],"__main__":[6,7,9,5],seen:[],seem:[],seek:[],"__new__":3,especi:4,adaptor:7,"__getstate__":3,"_new_nam":[],though:[2,6,7,4],object:[0,1,2,3,4,5,6,7,8,9],what:[0,2,4,6,7,9],monkeypatch:[],bsd:0,konstantin:4,don:[0,1,2,3,4,5,6,7,9],doc:[0,4],doe:[1,4,5,6,7,9],dot:[2,9],asid:7,opposit:4,"__str__":[0,3],syntax:0,somethingfortest:6,identifi:7,involv:7,despit:[],layout:4,explain:6,configur:[0,1,2,3,4,5,6,7,9],theme:4,"__call__":[0,6,7,4],plate:[],stop:[0,5,7,4],assertrais:1,report:4,reconstruct:[],softli:[],bar:[6,7,9,5],baz:[6,7,9],method:[0,1,2,3,4,5,6,7,8,9],twice:[1,4],respond:6,assert_called_with:[0,1,2,3,4,5,6,7,9],result:[1,2,3,4,6,7,8,9],respons:7,fail:[0,2,6,5,4],themselv:[2,9],best:6,hopefulli:[],discoveri:4,outstand:4,simplest:9,approach:7,attribut:[0,1,2,4,5,6,7,8,9],extend:1,weak:4,autodiscoveri:4,posit:6,lazi:4,returns_fak:1,debt:4,"__unicode__":3,protect:6,expos:[],"_noncallablemock__get_side_effect":6,howev:[0,5,6,7,4],against:[5,6,7,4],py3k:4,com:0,callarg:4,foobar:7,exception_rais:1,assum:[1,7],three:[2,4,5,6,7,9],been:[0,2,3,4,5,6,7,9],much:[0,6,7],interest:[6,7,3],basic:[1,7,3,5],mocksignatur:4,"__len__":3,xxx:[],uncommon:[],ani:[0,1,2,3,4,5,6,7,9],prop_mock:[],child:[5,2,7,9,4],"catch":[],ugli:[],ident:[8,7,3],properti:[0,4,5,6,7,9],weren:[],build_sphinx:4,suffici:7,"__ge__":3,seven:7,kwarg:[5,6,7,9,4],conf:5,tediou:6,somemodul:1,file_handl:2,incorrectli:[0,6],perform:[0,5,6,4],suggest:[0,4],make:[0,1,2,4,5,6,7,9],"0beta3":[],complex:[0,2,6,7,9],split:4,nosetest:[],idyl:0,hand:[6,3],rais:[0,1,2,3,4,5,6,7,9],boiler:[],second_cal:7,kept:3,scenario:[6,7,5],fiddlier:[7,5],thing2:9,thing1:9,inherit:4,contact:0,thi:[0,1,2,3,4,5,6,7,8,9],programm:7,everyth:6,left:7,protocol:[7,3,4],just:[0,2,6,7,4],kall:6,mock_fish:[],"__dict__":[],testwith:[],yet:[1,6],languag:0,previous:[7,4],easi:[0,2,7,9],interfer:[],had:[5,4],els:[9,3],"0beta4":[],"0beta2":[],mayb:7,preserv:4,mocker:1,measur:[],specif:[0,1,2,3,4,6,7,8,9],arbitrari:[2,4,5,6,7,9],manual:[6,3,4],noth:1,"__long__":3,unnecessari:[7,4],underli:[7,4],right:[5,2,7,3,4],old:[6,4],deal:7,stopiter:9,intern:4,test_method:[],txt:4,implement:[0,2,3,4,6,7],bottom:[0,2,5],subclass:[0,3,4,6,7,9],buffer:[],tracker:[0,1,4],unit:[0,6,7,4],foo:[0,1,2,3,5,6,7,9],core:0,plu:[0,2,9,5,4],sensibl:[8,2],idiomat:1,someclass:[1,2,7,9,5],repositori:0,post:0,"super":7,normal_argu:5,unsaf:4,obj:[],slightli:7,unfortun:7,"__mul__":3,some_object:[8,1,5],curiou:[],encod:[],bound:[7,5],"__and__":3,opportun:7,wai:[0,1,2,3,4,5,6,7,8,9],support:[0,1,3,4,5,6,9],herman:1,avail:[0,1,4,5,6,7],reli:7,mymock:7,creation:[6,4],form:[5,2,7,9,4],forc:9,"true":[0,1,2,3,4,5,6,7,9],something_els:[],reset:[9,4],flavour:7,attr:[6,9],bugfix:4,until:9,testutil:[],featur:[6,4],classic:[0,7],stronger:2,diagnost:6,exist:[0,2,3,4,5,6,7,9],check:[0,2,4,5,6,7,9],"__reduce_ex__":3,when:[0,2,4,5,6,7,8,9],refactor:[2,6,7,4],"8rc2":4,role:0,test:[0,1,2,4,5,6,7,8,9],intend:9,stringio:[1,5],assret_called_with:6,urllib2:6,consid:5,in_dict:5,receiv:4,faster:6,ignor:[6,9,4],time:[0,1,2,4,5,6,7,9],preconfigur:3,my_dict:7,concept:7,"__ne__":[3,4],chain:[0,1,2,4,6,7],skip:[0,4],global:[2,7],"__rshift__":3,depend:[2,6,7],readabl:[8,7],certainli:[],isinst:[5,7,9,4],child1:9,child2:9,sourc:[0,7,4],"__exit__":[1,6,3],string:[5,2,6,9,4],addcleanup:[7,5],"__bool__":3,exact:7,w00t:9,level:[0,6,4],did:4,iter:[2,3,4,5,7,9],assertequ:[1,2],item:[9,5],setitem:7,unsupport:[9,4],quick:[0,2],round:[6,4],dir:[6,9,4],inpy3k:[],prevent:[0,2,4,6,7,9],peek:[],cost:7,run:[0,5,7,4],lazili:[6,4],"_checkwrit":[],abstractbasicauthhandl:6,"_mock_children":[],current:[0,6],suspect:[],del:[9,5],fifti:7,honour:5,gener:[0,6,7],"__hex__":3,along:[0,6,9,4],mock_backend:7,has_attr:1,behav:7,extrem:6,weird:[],someobject:1,semant:[],love:0,"__getslice__":3,extra:[5,9,4],tweak:7,modul:[0,2,4,5,6,7,9],prefer:[0,7],fake:[0,1],marker:4,instal:0,ignorearg:1,method_cal:[1,3,4,6,7,9],baseexcept:4,todai:7,subvers:4,scope:[0,2,5],checkout:0,remember_ord:1,python:[0,1,2,3,4,5,6,7],peopl:[],asserttru:[2,7],prototyp:[],examin:0,easiest:[0,6],pretend:9,uniqu:[8,0,4],descriptor:[0,3,4,5,6,7,9],whatev:[2,7,9],purpos:6,boilerpl:5,spy:0,topic:[],konrad:4,mock3:1,mock2:[1,5],"__sizeof__":[3,4],occur:[],alwai:[8,9,4],multipl:[0,1,2,4,5,6,7],modulenam:[],write:[6,7,5],classname2:[0,2,5],map:5,product:[0,6,5],grob:7,clone:0,"__next__":3,membership:[5,4],mai:[1,4,5,6,7,9],underscor:[6,9],data:1,grow:[],toowtdi:4,"_chunk_siz":[],explicit:[1,6,9,4],inform:5,"switch":[5,6,9,4],cannot:[],combin:[],block:[9,5],callabl:[0,3,4,5,6,7,9],still:[3,4,5,6,7,9],dynam:[2,3,4,6,7,9],thank:4,match_foo:7,mail:0,main:[],non:[9,6,7,3,4],matcher:7,initi:4,now:[0,2,7,5,4],"_checkclos":[],discuss:[0,7],nor:4,introduct:7,term:0,name:[1,2,4,5,6,7,8,9],didn:7,separ:7,getitem:7,attributeerror:[0,2,3,4,6,9],replai:[0,1],arg1:1,individu:[6,9,5],continu:2,wrap:[5,7,9,4],year:[],happen:[6,7,9],shown:[1,2,6],accomplish:1,correct:[1,2,6,7],some_mock:[6,9],"_checkread":[],runtimeerror:[],orm:0,org:[0,4],unpredict:4,care:[6,7],couldn:7,"_get_child_mock":[7,9],class_method:5,refus:[],thing:[0,1,5,6,7,9],place:[5,6,7,3,4],principl:5,think:[7,5],first:[0,1,2,3,4,5,6,7,9],origin:[0,1,2,4,5,6,7],directli:[2,6,7,9,5],onc:[1,2,3,6,7,9],yourself:[6,7,3],open:[0,1,2,4,6,7],given:[0,6,9],"__subclasses__":3,silent:6,workaround:7,teardown:[7,5],caught:6,necessarili:[6,5],conveni:[8,2,9,4],friend:7,spec_set:[2,3,4,5,6,7,9],cope:[0,7],copi:[0,2,7,4],specifi:[0,2,9,5,4],than:[1,2,3,4,5,6,7,9],wide:0,setattr:[],autospec:[0,5,6,7,4],were:[0,1,2,7],bauer:4,nuisanc:7,seri:[7,9],pre:[8,0,9,4],nicer:[7,4],argument:[0,1,2,3,4,5,6,7,8,9],exclud:[9,4],kevin:4,"__builtin__":[1,2],techniqu:[0,7,5],alias:[],note:[0,1,2,3,4,5,6,7,9],take:[0,1,2,3,4,5,6,7,9],new_mock:[7,5],wonder:[],unittest2:[0,2,7,5,4],sure:[],importantli:2,normal:[0,2,3,5,6,7,8,9],track:[0,2,7,9],beta:[0,4],wire:6,abus:4,pair:5,homepag:0,renam:[6,4],later:[2,6,7],runtim:5,stopal:[5,4],uncondit:7,show:[6,7],"_new_par":[],slot:4,onli:[1,2,3,4,5,6,7,9],explicitli:9,activ:[5,7,4],written:[1,7],dict:[0,2,4,5,7,9],variou:[1,7,4],get:[0,1,2,4,5,6,7,9],repr:[2,4,5,7,8,9],soon:[],newkei:[0,2,5],attach_mock:[6,7,9,4],should_rec:1,requir:[0,6,3,4],fileno:[],yield:[1,7],email:[],irrespect:[6,4],mocksomeclass:7,where:[0,2,4,5,7,9],"__module__":4,dangoor:4,testcas:[2,7,5],concern:7,infinit:4,detect:4,getattr:[],between:[6,7,9],"import":[0,1,2,4,5,6,7,9],across:6,parent:[7,9,4],cycl:4,mock_add_spec:[9,4],come:0,quack:0,copyabl:4,inconsist:3,mani:[0,3,4,5,6,7,9],unittest:[0,5,4],pop:7,anti:7,sentinel:[8,0,2,9,4],typic:[5,4],"0a2":[],"0a3":[],coupl:[2,5],mock_someth:[],valueerror:[7,9],fake_open:1,ironpython:4,"__eq__":[7,3,4],those:[2,7,9,4],"case":[2,3,4,5,6,7,9],"__mod__":3,trick:7,ran_emo:[],advantag:[],stdout:5,side_effect:[0,1,2,3,4,5,7,8,9],"__getformat__":[3,4],"__init__":[0,3,4,5,6,7],develop:0,author:0,same:[0,1,2,3,4,5,6,7,8,9],binari:4,html:[],document:[0,4],exhaust:9,nest:[0,2,4,5,6,7],companion:[],capabl:[0,2],improv:[0,4],extern:[],"_spec_stat":[],appropri:0,without:[0,2,3,4,5,6,7,9],model:0,"__getinitargs__":3,"__int__":[9,3],execut:[5,1,2,6,4],aspect:[],is_cal:1,testmethod:[],any_ord:[7,9],except:[0,1,2,3,4,5,7,9],littl:7,blog:[0,7],blob:7,real:[0,1,2,4,5,6,7,8,9],mox:1,around:4,read:[0,1,2,6],read_data:6,some_funct:[2,5],integ:9,benefit:[],either:[6,9,3,5],output:0,manag:[0,1,2,3,4,5,6,7],assertionerror:[1,6,7,9],definit:0,exit:[7,5],inject:0,overli:[],refer:[0,2,7,5,4],power:[0,6,7,5],garbag:4,inspect:[],broken:[2,6,4],fulli:1,"__name__":[],"throw":1,comparison:[0,1,3,4,6,7],"__setslice__":3,call_args_list:[1,6,7,9,4],act:[0,9,5],gari:1,terminolog:0,call_arg:[6,7,9,4],patcher1:5,patcher2:5,your:[0,2,4,5,6,7,8,9],aren:[9,5,7,3,4],hex:3,start:[0,2,4,5,6,7],lot:6,replayal:1,"__invert__":3,enough:6,tupl:[5,6,9,4],mock_frob:7,"__get__":[9,3],verifyal:1,notat:9,copy_call_arg:7,mockmethod:[],abstractdigestauthhandl:6,possibl:[5,7,4],"default":[2,3,4,5,6,7,8,9],unusu:[],expect:[1,2,6,7,9],gone:6,creat:[0,1,2,3,4,5,6,7,8,9],certain:[6,4],strongli:[],"__floor__":[3,4],file:[2,6,7,4],incorrect:2,again:[5,7,4],googl:0,readinto:[],compel:[],pmock:1,valid:[5,6,7,4],writabl:[],you:[0,1,2,3,4,5,6,7,8,9],isint:9,sequenc:[6,7,9],minimock:1,docstr:4,testload:5,reduc:[5,4],tricki:[7,4],mimic:7,nelson:[],potenti:4,unset:4,represent:4,all:[0,1,2,3,4,5,6,7,9],test_closer_closes_someth:[],month:[],abil:4,follow:[0,5,7,3,4],children:[2,7,9,4],"__cmp__":3,hasattr:9,foo_on:5,patch_modul:4,introduc:[],consum:3,bibbl:6,open_nam:[],straightforward:[0,2,7,5],fals:[3,4,5,6,7,9],pop_last_cal:[],xunitpattern:[],candid:[0,4],fan:0,failur:[8,2,7,4],veri:[0,6,7,9,5],reset_mock:[9,4],list:[0,2,3,4,5,6,7,9],productionclass:[8,0,2],sane:[],"__missing__":3,mockanyth:1,"__truediv__":[3,4],design:[0,6],pass:[0,1,2,4,5,6,7,8,9],further:[0,2,7],cursor:2,deleg:7,sub:[7,4],section:[7,5],"_noncallablemock__set_side_effect":6,abl:[6,9,5],delet:[0,5,7,9,4],version:[0,1,3,4,5,6,7,9],deepli:6,some_method:[1,2],"public":4,hasn:[2,9],full:[9,7,3,4],hash:3,misspel:6,behaviour:[0,6,7,9,4],shouldn:[],modifi:[0,2,6,4],valu:[0,1,2,3,4,5,6,7,8,9],test_two:7,not_a_test:7,"_noncallablemock__return_value_doc":6,"__xor__":3,doctest:1,action:0,via:[6,4],packagenam:[],intermedi:[1,6,4],ask:[],"__setformat__":[3,4],thankfulli:6,decrement:[],select:2,aggress:6,etc:[1,4],two:[2,3,4,5,6,7,9],coverag:0,almost:3,minor:4,more:[0,1,2,4,5,6,7,9],flaw:6,"__coerce__":3,particular:[1,6,7,9],known:7,cach:[],none:[0,1,2,3,4,5,6,7,9],copyingmock:7,caveat:6,def:[0,1,2,3,5,6,7,9],frustratingli:0,share:[5,6,4],templat:[0,4],minimum:5,cours:[2,6,7],xxxx:[],newlin:[],rather:[1,2,3,4,5,6,7,9],anoth:[0,2,4,5,6,7,8,9],assert_called_once_with:[0,1,2,4,5,6,7,9],simpl:[0,1,2,7],variant:[0,7,3,9],confus:[0,9],django:[0,5,4],caus:[5,2,7,3,4],callback:4,mortem:0,"__le__":3,egg:[6,7,9],help:[5,2,6,4],mockiti:[0,9],through:[0,6,7,9,4],suffer:6,annoyingli:0,paramet:[0,9,3],style:4,mockstat:[],times_cal:1,bypass:6,stephen:4,might:[2,6,7,5],wouldn:[6,4],good:[1,7],"return":[0,1,2,3,4,5,6,7,8,9],framework:[0,1],detach:[],complain:[],achiev:[7,9],found:[0,7],unicod:[0,3],mock_respons:7,truncat:4,obj_typ:[],michael:[0,4],fish:[9,7,3,5],hard:[2,6,7],realli:[],some_modul:1,connect:2,horribl:4,todo:[0,1,4],some_valu:1,mock_foo:[7,9],print:[5,9,4],proxi:[0,5,4],advanc:[2,7],reason:[6,7,9],base:[0,6,7,9,4],put:[7,5],new_inst:1,omit:[1,5],perhap:0,assign:[0,1,9,4],singleton:9,obviou:7,feel:7,number:[1,9],done:[0,4,5,6,7,9],construct:[2,6,7,9,5],stabl:0,miss:[1,3],"__float__":3,differ:[0,1,2,3,4,5,6,7],script:0,interact:[],least:[1,6,5],fladisch:4,"__dir__":[9,3],statement:[0,2,3,4,5,7],store:[0,7,9],option:[2,6,7,9,4],propertymock:[7,9,4],part:[0,2,7,4],aaa:0,whenev:9,remov:[0,9,4],my_dingu:1,reus:[9,4],str:[0,3,4],arrang:0,toward:[],well:[0,2,4,5,6,7,9],packag:[5,2,7,4],use_mock_anyth:1,imagin:[7,5],built:[7,4],florian:4,self:[1,2,3,4,5,6,7,9],violat:4,also:[0,1,2,3,4,5,6,7,9],useless:6,distribut:[0,7,4],previou:[],most:[0,1,2,3,5,6,7,9],alpha:[0,4],promulg:0,appear:[2,9,3,5],clear:[0,2,4,5,7,9],and_return:1,newvalu:[0,2,5],usual:[7,3],visibl:6,favour:[],particularli:6,and_rais:1,mock_open:[0,6,7,4],find:[0,7,5],has_data:6,indexerror:9,solut:7,factor:[],hit:6,unus:4,express:7,mock_method:[0,2,5],foobarbaz:0,"_ani":[],common:[8,5,2,6,4],manger:5,set:[0,2,3,4,5,6,7,9],get_endpoint:7,mutabl:[0,7],see:[0,2,5,6,7,9],arg:[0,1,2,3,4,5,6,7,9],close:[2,6],inconveni:7,wow:9,won:4,has_been_cal:7,altern:[0,2,5,6,7,9],signatur:[0,5,6,7,4],syntact:5,numer:[3,4],isol:[6,7,3,4],"__divmod__":3,solv:[6,7],nervou:[],classnam:[2,5],both:[5,6,7,4],last:[0,1,2,4,5,6,7,9],"__trunc__":[3,4],someexcept:1,context:[0,1,2,3,4,5,6,7],pdf:0,whole:[],onward:0,simpli:[6,7,5],point:[7,5],instanti:[5,2,6,7,4],suppli:4,throughout:[0,9],batcheld:4,pycon:0,backend:7,sever:[2,7,9,4],java:4,due:6,empti:[6,9,3,4],sinc:[5,4],andrais:1,remark:7,understand:7,func:[],"_old_nam":[],unpatch:[5,4],look:[0,2,7,3,5],keywarg:[],unifi:[],match:[0,7,5],abov:[0,2],error:[2,6,7,4],"__hash__":3,sheremetyev:1,expected_cal:7,readi:9,flexmock:1,itself:[0,6,7,4],decor:[0,2,4,5,7,9],mockclass:[1,5],minim:[],shorter:7,lenient:[],conflict:[],"__repr__":3,wherea:6,read1:[],temporari:1,"__or__":3,"__add__":3,stack:[2,5],recent:[0,1,2,4,5,6,7,9],travers:[6,4],task:1,older:0,entri:[0,1,6,7,4],pickl:[3,4],raise_except:[],"__neg__":3,elev:[],"_wrap":4,tast:7,match_equ:7,obscur:3,stabilis:[],"_parent":4,input:9,subsequ:[7,3],hamcrest:7,big:6,inequ:4,"__gt__":3,bit:[7,4],"_method":4,resolv:6,collect:4,"boolean":[9,4],popular:1,stylish:4,often:[2,9],"1st":[],some:[0,1,2,3,4,5,6,7,9],back:7,sizeof:3,retri:7,undon:[2,7,5],recognis:[5,4],nose:4,method2:1,method3:1,method1:1,martin:0,step:7,impos:0,patched_context:1,doubl:[0,6,9],within:[0,5],ensur:[0,1,6,7,5],spam:[6,7],question:[],"long":3,custom:[9,4],includ:[0,1,2,3,4,6,7,9],suit:0,assert_has_cal:[6,7,9,4],properli:6,fishi:0,line:[6,7,4],info:4,test_modul:2,file_:1,readlin:[],similar:7,match_wrong:7,doesn:[1,4,5,6,7,9],repres:[1,2,6,9],wheeeeee:0,deletingmock:[],titl:[],sequenti:9,"__format__":3,test_prefix:[0,5,4],invalid:[0,4],mock:[0,1,2,3,4,5,6,7,8,9],nice:[2,7],test_funct:5,meaning:4,eval:[],mymodul:[7,5],dingus:1,algorithm:7,berman:4,far:1,hello:9,code:[0,2,4,5,6,7,9],partial:[0,1,7],privat:[6,4],old_method:2,this_foo:9,mock_stdout:5,func2:[],whichev:7,dandi:6,implicitli:9,relev:0,recip:[0,7],magic:[0,1,3,4,5,7,9],"__prepare__":3,mocksetexpect:[],"try":[6,7],"_mock":4,pleas:[0,1,3],"_checkseek":[],natur:[0,1],"0x1":3,video:0,mock_spam:7,download:0,"__ceil__":[3,4],compat:4,compar:[2,6,7,9,4],fine:[0,6,4],access:[0,2,4,5,6,7,8,9],experiment:[],can:[0,1,2,3,4,5,6,7,8,9],len:[1,3],bodi:[7,5],let:[6,7,3,5],becom:[6,7,9],implicit:[7,4],great:6,convers:3,"_name":4,staticmethod:6,opinion:[],chang:[2,3,4,5,6,7,9],"__enter__":[1,6,3],danger:5,appli:[0,2,3,4,5,6,7],app:5,disciplin:0,mockfoo:6,api:[0,4,5,6,7,9],duck:0,from:[0,1,2,3,4,5,6,7,9],commun:0,assret_called_once_with:6,frob:7,next:[2,9,4],few:6,sort:[],bernhardt:1,trail:9,actual:[8,5,2,7,4],annoi:6,obvious:6,create_patch:7,fetch:[5,6,7,9,4],control:[0,6,7,4],deepcopi:7,tag:0,mock1:[1,5],delai:7,foo_two:5,six:[],"__pos__":3,instead:[0,2,4,5,6,7,9],class2:[7,5],circular:[7,4],exemplifi:1,seriou:6,correspond:[6,9,5],element:[],issu:[0,1,6,7,4],allow:[0,2,3,4,5,6,7,9],move:[7,4],whilst:7,mock_class:5,bunch:[],classname1:[0,2,5],static_method:[2,5],"__getitem__":[7,3,5],handl:[0,2,3,4,5,6,7,9],auto:[0,6,7,4],handi:6,configure_mock:[7,9,4],"__pow__":3,front:[1,7],"__delitem__":[3,5],anyth:[3,5],mytest:[2,7,5],mode:[],fiddli:6,reserv:[],class1:[7,5],upward:5,second_patch:5,"static":[0,5,7,4],our:[2,6,7,5],meth:[],patch:[0,1,2,4,5,6,7,8,9],special:[1,3,4],out:[2,4,5,6,7,9],variabl:[1,6],req:6,stub:[0,9],rel:6,new_cal:[5,9,4],ref:[],clarifi:5,insid:[7,9,5],manipul:[],undo:5,dictionari:[0,2,4,5,6,7,9],releas:[0,1,4],likelihood:4,afterward:[2,5],indent:[7,5],could:[5,7,4],keep:5,keen:[],length:9,"__delete__":3,date:[0,1,7],my_fak:1,owner:[],licens:[0,4],perfectli:7,system:[0,2,6,7,5],messag:[8,1,2,7,4],attach:[0,7,9,4],monkei:[8,7,4],"final":[6,7],"__del__":3,getattr_stat:6,mmckclass:[],exactli:[0,6,7,9],haven:[6,4],cpython:4,structur:5,rybnikov:4,sens:[],stricter:9,"__rtruediv__":[3,4],julian:4,deprec:4,liner:[],blip:7,have:[0,1,2,3,4,5,6,7,8,9],disadvantag:[],need:[0,1,2,3,4,5,6,7,8,9],unexpect:2,"__unittest":4,mix:[],builtin:[0,1,2,7,4],patch_object:4,which:[0,1,3,4,5,6,7,9],singl:[5,2,6,7,4],unless:[9,3,4],writelin:[],clash:4,sekret:1,who:[],discov:0,why:[6,7],magicmock:[0,1,2,3,4,5,6,7,9],hardcod:4,some_other_object:1,basehandl:6,determin:4,fowler:0,someth:[2,3,5,6,7,9],fact:7,"_calllist":[],verbos:[0,7],bring:6,mockcheckcal:[],longer:[5,2,7,4],trivial:2,anywai:[0,4],setter:9,mock_bar:7,should:[8,5,3,4],suppos:[2,7],start_cal:7,local:7,contribut:[1,4],"0b4":[],pypi:[0,4],notimpl:[3,4],stuff:[0,6],integr:[6,7,4],contain:[6,7,3,5],noodl:[],temporarili:[7,9,5],mock_th:5,closer:2,test_on:7,correctli:[5,2,6,4],pattern:[0,2,7,5],mock_dat:7,state:[0,2,5],"__bases__":4,expectexcept:[],stuboutwithmock:1,kei:[0,2,7,9,5],dislik:6,job:[],addit:[5,1,2,6,4],indvidu:2,extens:[0,4],equal:[6,7,3,4],"__delattr__":[],instanc:[0,1,2,3,4,5,6,7,9],comment:4,"__complex__":3,unorder:4,quit:[],addition:0,"__set__":[9,3],some_attribut:[1,9],treat:[],foobl:[7,3],"0rc2":[],"0rc1":[],immedi:2,create_cal:7,assert:[0,1,2,3,4,5,6,7,8,9],togeth:[6,7],fake_fil:1,present:[0,4],determinist:4,harder:[1,7],unsuit:4,defin:[7,5],intro:[],"__floordiv__":3,"__sub__":3,helper:[0,6,7,4],fakeclass:1,"__reduce__":3,mock_urllib2:6,mutat:7,welcom:4,backendprovid:7,member:[0,5,6,9,4],mock_exit:5,andreturn:1,call_count:[1,7,9,4],"_noncallablemock__get_return_valu":6,http:0,interrog:7,effect:[0,2,3,4,5,7,9],dai:[],build:7,at_least:1,expand:5,pull:6,off:[6,9,5],keyboardinterrupt:4,pyhamcrest:7,sett:5,"__div__":3,exampl:[0,1,2,3,5,6,7,8,9],command:[0,4],choos:5,unit2:0,latest:0,unari:3,less:[0,7,4],first_patch:5,"__lt__":3,simultan:[],mock_funct:0,add:[6,7,9,5],cleanup:[7,5],bool:4,logger:0,smart:0,futur:6,rationalis:4,jython:[0,4],assert_any_cal:[6,9,4],know:[6,3,4],password:1,recurs:[6,4],python3:0,"__reversed__":3,like:[0,1,2,4,5,6,7,8,9],unord:4,lose:[],soft:4,page:[1,2],revers:[],pariti:4,flush:[],home:0,"__getnewargs__":3,librari:[0,1,5,7,4],"__setattr__":3,guid:[0,2,7],lead:9,"__contains__":[3,5],avoid:7,"__getattr__":3,leav:4,grrr:5,some_obj:7,usag:[0,6],host:[0,4],although:[],after:[0,2,5,6,7,9],simpler:[0,6,7,5],about:[0,1,2,3,6,7,9],"_noncallablemock__set_return_valu":6,rare:7,fudg:1,constructor:[0,2,3,4,6,7,9],own:[0,7,4],easy_instal:0,automat:[1,5,7,9,4],destructor:4,pointless:4,merg:4,val:[2,7],mcmillan:1,intention:1,trigger:[6,4],return_valu:[0,1,2,3,4,5,6,7,8,9],replac:[0,1,2,3,4,5,6,7,9],"var":[6,4],arg2:[1,6],"function":[0,2,3,4,5,6,7,8,9],mockclass1:[0,2,7,5],mockclass2:[0,2,7,5],filter_dir:[0,6,9,4],keyerror:[0,7,9,5],gain:[3,4],sphinx:4,bug:[0,6,4],count:4,succe:[6,7],made:[0,1,2,4,7,9],wise:9,whether:[9,4],googlecod:0,rc1:4,record:[0,2,3,4,7,9],below:[2,6],limit:[0,6,9,4],otherwis:7,problem:[2,6,7,3,5],evalu:4,"int":[6,9,3],dure:[0,2,7,4],filenam:[1,2],twist:7,contextlib:[1,5],pip:0,probabl:[1,6,4],detail:[0,9],other:[0,1,2,3,4,5,6,7,9],lookup:[6,4],boom:[2,7,9],mocksometh:[],repeat:9,"class":[0,1,2,3,4,5,6,7,9],function2:[],my_mock:[1,6],rule:[7,3],klass:1,"__index__":3},objtypes:{"0":"py:module","1":"py:data","2":"py:function","3":"py:class","4":"py:attribute","5":"py:method"},titles:["Mock - Mocking and Testing Library","Mock Library Comparison","Getting Started with Mock","Mocking Magic Methods","CHANGELOG","Patch Decorators","Helpers","Further Examples","Sentinel","The Mock Class"],objnames:{"0":["py","module","Python module"],"1":["py","data","Python data"],"2":["py","function","Python function"],"3":["py","class","Python class"],"4":["py","attribute","Python attribute"],"5":["py","method","Python method"]},filenames:["index","compare","getting-started","magicmock","changelog","patch","helpers","examples","sentinel","mock"]}) \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/html/sentinel.html b/third_party/python/mock-1.0.0/html/sentinel.html
new file mode 100644
index 0000000000..5b28deb2c5
--- /dev/null
+++ b/third_party/python/mock-1.0.0/html/sentinel.html
@@ -0,0 +1,156 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Sentinel &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <link rel="next" title="Mocking Magic Methods" href="magicmock.html" />
+ <link rel="prev" title="Helpers" href="helpers.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="magicmock.html" title="Mocking Magic Methods"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="helpers.html" title="Helpers"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="sentinel">
+<h1>Sentinel<a class="headerlink" href="#sentinel" title="Permalink to this headline">¶</a></h1>
+<dl class="data">
+<dt id="mock.sentinel">
+<tt class="descname">sentinel</tt><a class="headerlink" href="#mock.sentinel" title="Permalink to this definition">¶</a></dt>
+<dd><p>The <tt class="docutils literal"><span class="pre">sentinel</span></tt> object provides a convenient way of providing unique
+objects for your tests.</p>
+<p>Attributes are created on demand when you access them by name. Accessing
+the same attribute will always return the same object. The objects
+returned have a sensible repr so that test failure messages are readable.</p>
+</dd></dl>
+
+<dl class="data">
+<dt id="mock.DEFAULT">
+<tt class="descname">DEFAULT</tt><a class="headerlink" href="#mock.DEFAULT" title="Permalink to this definition">¶</a></dt>
+<dd><p>The <cite>DEFAULT</cite> object is a pre-created sentinel (actually
+<cite>sentinel.DEFAULT</cite>). It can be used by <a class="reference internal" href="mock.html#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a>
+functions to indicate that the normal return value should be used.</p>
+</dd></dl>
+
+<div class="section" id="sentinel-example">
+<h2>Sentinel Example<a class="headerlink" href="#sentinel-example" title="Permalink to this headline">¶</a></h2>
+<p>Sometimes when testing you need to test that a specific object is passed as an
+argument to another method, or returned. It can be common to create named
+sentinel objects to test this. <cite>sentinel</cite> provides a convenient way of
+creating and testing the identity of objects like this.</p>
+<p>In this example we monkey patch <cite>method</cite> to return
+<cite>sentinel.some_object</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">real</span> <span class="o">=</span> <span class="n">ProductionClass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span><span class="o">.</span><span class="n">method</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="s">&quot;method&quot;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">some_object</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">result</span> <span class="o">=</span> <span class="n">real</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">result</span> <span class="ow">is</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">some_object</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">sentinel</span><span class="o">.</span><span class="n">some_object</span>
+<span class="go">sentinel.some_object</span>
+</pre></div>
+</div>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Sentinel</a><ul>
+<li><a class="reference internal" href="#sentinel-example">Sentinel Example</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="helpers.html"
+ title="previous chapter">Helpers</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="magicmock.html"
+ title="next chapter">Mocking Magic Methods</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/sentinel.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="magicmock.html" title="Mocking Magic Methods"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="helpers.html" title="Helpers"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/mock.egg-info/PKG-INFO b/third_party/python/mock-1.0.0/mock.egg-info/PKG-INFO
new file mode 100644
index 0000000000..4c7309c713
--- /dev/null
+++ b/third_party/python/mock-1.0.0/mock.egg-info/PKG-INFO
@@ -0,0 +1,208 @@
+Metadata-Version: 1.0
+Name: mock
+Version: 1.0.0
+Summary: A Python Mocking and Patching Library for Testing
+Home-page: http://www.voidspace.org.uk/python/mock/
+Author: Michael Foord
+Author-email: michael@voidspace.org.uk
+License: UNKNOWN
+Description: mock is a library for testing in Python. It allows you to replace parts of
+ your system under test with mock objects and make assertions about how they
+ have been used.
+
+ mock is now part of the Python standard library, available as `unittest.mock <
+ http://docs.python.org/py3k/library/unittest.mock.html#module-unittest.mock>`_
+ in Python 3.3 onwards.
+
+ mock provides a core `MagicMock` class removing the need to create a host of
+ stubs throughout your test suite. After performing an action, you can make
+ assertions about which methods / attributes were used and arguments they were
+ called with. You can also specify return values and set needed attributes in
+ the normal way.
+
+ mock is tested on Python versions 2.4-2.7 and Python 3. mock is also tested
+ with the latest versions of Jython and pypy.
+
+ The mock module also provides utility functions / objects to assist with
+ testing, particularly monkey patching.
+
+ * `PDF documentation for 1.0 beta 1
+ <http://www.voidspace.org.uk/downloads/mock-1.0.0.pdf>`_
+ * `mock on google code (repository and issue tracker)
+ <http://code.google.com/p/mock/>`_
+ * `mock documentation
+ <http://www.voidspace.org.uk/python/mock/>`_
+ * `mock on PyPI <http://pypi.python.org/pypi/mock/>`_
+ * `Mailing list (testing-in-python@lists.idyll.org)
+ <http://lists.idyll.org/listinfo/testing-in-python>`_
+
+ Mock is very easy to use and is designed for use with
+ `unittest <http://pypi.python.org/pypi/unittest2>`_. Mock is based on
+ the 'action -> assertion' pattern instead of 'record -> replay' used by many
+ mocking frameworks. See the `mock documentation`_ for full details.
+
+ Mock objects create all attributes and methods as you access them and store
+ details of how they have been used. You can configure them, to specify return
+ values or limit what attributes are available, and then make assertions about
+ how they have been used::
+
+ >>> from mock import Mock
+ >>> real = ProductionClass()
+ >>> real.method = Mock(return_value=3)
+ >>> real.method(3, 4, 5, key='value')
+ 3
+ >>> real.method.assert_called_with(3, 4, 5, key='value')
+
+ `side_effect` allows you to perform side effects, return different values or
+ raise an exception when a mock is called::
+
+ >>> mock = Mock(side_effect=KeyError('foo'))
+ >>> mock()
+ Traceback (most recent call last):
+ ...
+ KeyError: 'foo'
+ >>> values = {'a': 1, 'b': 2, 'c': 3}
+ >>> def side_effect(arg):
+ ... return values[arg]
+ ...
+ >>> mock.side_effect = side_effect
+ >>> mock('a'), mock('b'), mock('c')
+ (3, 2, 1)
+ >>> mock.side_effect = [5, 4, 3, 2, 1]
+ >>> mock(), mock(), mock()
+ (5, 4, 3)
+
+ Mock has many other ways you can configure it and control its behaviour. For
+ example the `spec` argument configures the mock to take its specification from
+ another object. Attempting to access attributes or methods on the mock that
+ don't exist on the spec will fail with an `AttributeError`.
+
+ The `patch` decorator / context manager makes it easy to mock classes or
+ objects in a module under test. The object you specify will be replaced with a
+ mock (or other object) during the test and restored when the test ends::
+
+ >>> from mock import patch
+ >>> @patch('test_module.ClassName1')
+ ... @patch('test_module.ClassName2')
+ ... def test(MockClass2, MockClass1):
+ ... test_module.ClassName1()
+ ... test_module.ClassName2()
+
+ ... assert MockClass1.called
+ ... assert MockClass2.called
+ ...
+ >>> test()
+
+ .. note::
+
+ When you nest patch decorators the mocks are passed in to the decorated
+ function in the same order they applied (the normal *python* order that
+ decorators are applied). This means from the bottom up, so in the example
+ above the mock for `test_module.ClassName2` is passed in first.
+
+ With `patch` it matters that you patch objects in the namespace where they
+ are looked up. This is normally straightforward, but for a quick guide
+ read `where to patch
+ <http://www.voidspace.org.uk/python/mock/patch.html#where-to-patch>`_.
+
+ As well as a decorator `patch` can be used as a context manager in a with
+ statement::
+
+ >>> with patch.object(ProductionClass, 'method') as mock_method:
+ ... mock_method.return_value = None
+ ... real = ProductionClass()
+ ... real.method(1, 2, 3)
+ ...
+ >>> mock_method.assert_called_once_with(1, 2, 3)
+
+ There is also `patch.dict` for setting values in a dictionary just during the
+ scope of a test and restoring the dictionary to its original state when the
+ test ends::
+
+ >>> foo = {'key': 'value'}
+ >>> original = foo.copy()
+ >>> with patch.dict(foo, {'newkey': 'newvalue'}, clear=True):
+ ... assert foo == {'newkey': 'newvalue'}
+ ...
+ >>> assert foo == original
+
+ Mock supports the mocking of Python magic methods. The easiest way of
+ using magic methods is with the `MagicMock` class. It allows you to do
+ things like::
+
+ >>> from mock import MagicMock
+ >>> mock = MagicMock()
+ >>> mock.__str__.return_value = 'foobarbaz'
+ >>> str(mock)
+ 'foobarbaz'
+ >>> mock.__str__.assert_called_once_with()
+
+ Mock allows you to assign functions (or other Mock instances) to magic methods
+ and they will be called appropriately. The MagicMock class is just a Mock
+ variant that has all of the magic methods pre-created for you (well - all the
+ useful ones anyway).
+
+ The following is an example of using magic methods with the ordinary Mock
+ class::
+
+ >>> from mock import Mock
+ >>> mock = Mock()
+ >>> mock.__str__ = Mock(return_value = 'wheeeeee')
+ >>> str(mock)
+ 'wheeeeee'
+
+ For ensuring that the mock objects your tests use have the same api as the
+ objects they are replacing, you can use "auto-speccing". Auto-speccing can
+ be done through the `autospec` argument to patch, or the `create_autospec`
+ function. Auto-speccing creates mock objects that have the same attributes
+ and methods as the objects they are replacing, and any functions and methods
+ (including constructors) have the same call signature as the real object.
+
+ This ensures that your mocks will fail in the same way as your production
+ code if they are used incorrectly::
+
+ >>> from mock import create_autospec
+ >>> def function(a, b, c):
+ ... pass
+ ...
+ >>> mock_function = create_autospec(function, return_value='fishy')
+ >>> mock_function(1, 2, 3)
+ 'fishy'
+ >>> mock_function.assert_called_once_with(1, 2, 3)
+ >>> mock_function('wrong arguments')
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes exactly 3 arguments (1 given)
+
+ `create_autospec` can also be used on classes, where it copies the signature of
+ the `__init__` method, and on callable objects where it copies the signature of
+ the `__call__` method.
+
+ The distribution contains tests and documentation. The tests require
+ `unittest2 <http://pypi.python.org/pypi/unittest2>`_ to run.
+
+ Docs from the in-development version of `mock` can be found at
+ `mock.readthedocs.org <http://mock.readthedocs.org>`_.
+
+Keywords: testing,test,mock,mocking,unittest,patching,stubs,fakes,doubles
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 2.4
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.1
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Programming Language :: Python :: Implementation :: Jython
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Software Development :: Testing
diff --git a/third_party/python/mock-1.0.0/mock.egg-info/SOURCES.txt b/third_party/python/mock-1.0.0/mock.egg-info/SOURCES.txt
new file mode 100644
index 0000000000..fd73542b37
--- /dev/null
+++ b/third_party/python/mock-1.0.0/mock.egg-info/SOURCES.txt
@@ -0,0 +1,94 @@
+LICENSE.txt
+MANIFEST.in
+README.txt
+mock.py
+setup.cfg
+setup.py
+tox.ini
+docs/changelog.txt
+docs/compare.txt
+docs/conf.py
+docs/examples.txt
+docs/getting-started.txt
+docs/helpers.txt
+docs/index.txt
+docs/magicmock.txt
+docs/mock.txt
+docs/patch.txt
+docs/sentinel.txt
+html/changelog.html
+html/compare.html
+html/examples.html
+html/genindex.html
+html/getting-started.html
+html/index.html
+html/magicmock.html
+html/mock.html
+html/mocksignature.html
+html/objects.inv
+html/output.txt
+html/patch.html
+html/search.html
+html/searchindex.js
+html/sentinel.html
+html/.doctrees/changelog.doctree
+html/.doctrees/compare.doctree
+html/.doctrees/examples.doctree
+html/.doctrees/getting-started.doctree
+html/.doctrees/index.doctree
+html/.doctrees/magicmock.doctree
+html/.doctrees/mock.doctree
+html/.doctrees/mocksignature.doctree
+html/.doctrees/patch.doctree
+html/.doctrees/sentinel.doctree
+html/_sources/changelog.txt
+html/_sources/compare.txt
+html/_sources/examples.txt
+html/_sources/getting-started.txt
+html/_sources/index.txt
+html/_sources/magicmock.txt
+html/_sources/mock.txt
+html/_sources/mocksignature.txt
+html/_sources/patch.txt
+html/_sources/sentinel.txt
+html/_static/adctheme.css
+html/_static/basic.css
+html/_static/breadcrumb_background.png
+html/_static/default.css
+html/_static/doctools.js
+html/_static/documentation.png
+html/_static/file.png
+html/_static/header_sm_mid.png
+html/_static/jquery.js
+html/_static/minus.png
+html/_static/mobile.css
+html/_static/plus.png
+html/_static/pygments.css
+html/_static/scrn1.png
+html/_static/scrn2.png
+html/_static/searchfield_leftcap.png
+html/_static/searchfield_repeat.png
+html/_static/searchfield_rightcap.png
+html/_static/searchtools.js
+html/_static/sidebar.js
+html/_static/title_background.png
+html/_static/toc.js
+html/_static/triangle_closed.png
+html/_static/triangle_left.png
+html/_static/triangle_open.png
+html/_static/underscore.js
+mock.egg-info/PKG-INFO
+mock.egg-info/SOURCES.txt
+mock.egg-info/dependency_links.txt
+mock.egg-info/top_level.txt
+tests/__init__.py
+tests/_testwith.py
+tests/support.py
+tests/support_with.py
+tests/testcallable.py
+tests/testhelpers.py
+tests/testmagicmethods.py
+tests/testmock.py
+tests/testpatch.py
+tests/testsentinel.py
+tests/testwith.py \ No newline at end of file
diff --git a/third_party/python/mock-1.0.0/mock.egg-info/dependency_links.txt b/third_party/python/mock-1.0.0/mock.egg-info/dependency_links.txt
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/third_party/python/mock-1.0.0/mock.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/third_party/python/mock-1.0.0/mock.egg-info/top_level.txt b/third_party/python/mock-1.0.0/mock.egg-info/top_level.txt
new file mode 100644
index 0000000000..932a8957f7
--- /dev/null
+++ b/third_party/python/mock-1.0.0/mock.egg-info/top_level.txt
@@ -0,0 +1 @@
+mock
diff --git a/third_party/python/mock-1.0.0/mock.py b/third_party/python/mock-1.0.0/mock.py
new file mode 100644
index 0000000000..1be4e6e458
--- /dev/null
+++ b/third_party/python/mock-1.0.0/mock.py
@@ -0,0 +1,2356 @@
+# mock.py
+# Test tools for mocking and patching.
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+
+# mock 1.0
+# http://www.voidspace.org.uk/python/mock/
+
+# Released subject to the BSD License
+# Please see http://www.voidspace.org.uk/python/license.shtml
+
+# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml
+# Comments, suggestions and bug reports welcome.
+
+
+__all__ = (
+ 'Mock',
+ 'MagicMock',
+ 'patch',
+ 'sentinel',
+ 'DEFAULT',
+ 'ANY',
+ 'call',
+ 'create_autospec',
+ 'FILTER_DIR',
+ 'NonCallableMock',
+ 'NonCallableMagicMock',
+ 'mock_open',
+ 'PropertyMock',
+)
+
+
+__version__ = '1.0.0'
+
+
+import pprint
+import sys
+
+try:
+ import inspect
+except ImportError:
+ # for alternative platforms that
+ # may not have inspect
+ inspect = None
+
+try:
+ from functools import wraps
+except ImportError:
+ # Python 2.4 compatibility
+ def wraps(original):
+ def inner(f):
+ f.__name__ = original.__name__
+ f.__doc__ = original.__doc__
+ f.__module__ = original.__module__
+ return f
+ return inner
+
+try:
+ unicode
+except NameError:
+ # Python 3
+ basestring = unicode = str
+
+try:
+ long
+except NameError:
+ # Python 3
+ long = int
+
+try:
+ BaseException
+except NameError:
+ # Python 2.4 compatibility
+ BaseException = Exception
+
+try:
+ next
+except NameError:
+ def next(obj):
+ return obj.next()
+
+
+BaseExceptions = (BaseException,)
+if 'java' in sys.platform:
+ # jython
+ import java
+ BaseExceptions = (BaseException, java.lang.Throwable)
+
+try:
+ _isidentifier = str.isidentifier
+except AttributeError:
+ # Python 2.X
+ import keyword
+ import re
+ regex = re.compile(r'^[a-z_][a-z0-9_]*$', re.I)
+ def _isidentifier(string):
+ if string in keyword.kwlist:
+ return False
+ return regex.match(string)
+
+
+inPy3k = sys.version_info[0] == 3
+
+# Needed to work around Python 3 bug where use of "super" interferes with
+# defining __class__ as a descriptor
+_super = super
+
+self = 'im_self'
+builtin = '__builtin__'
+if inPy3k:
+ self = '__self__'
+ builtin = 'builtins'
+
+FILTER_DIR = True
+
+
+def _is_instance_mock(obj):
+ # can't use isinstance on Mock objects because they override __class__
+ # The base class for all mocks is NonCallableMock
+ return issubclass(type(obj), NonCallableMock)
+
+
+def _is_exception(obj):
+ return (
+ isinstance(obj, BaseExceptions) or
+ isinstance(obj, ClassTypes) and issubclass(obj, BaseExceptions)
+ )
+
+
+class _slotted(object):
+ __slots__ = ['a']
+
+
+DescriptorTypes = (
+ type(_slotted.a),
+ property,
+)
+
+
+def _getsignature(func, skipfirst, instance=False):
+ if inspect is None:
+ raise ImportError('inspect module not available')
+
+ if isinstance(func, ClassTypes) and not instance:
+ try:
+ func = func.__init__
+ except AttributeError:
+ return
+ skipfirst = True
+ elif not isinstance(func, FunctionTypes):
+ # for classes where instance is True we end up here too
+ try:
+ func = func.__call__
+ except AttributeError:
+ return
+
+ if inPy3k:
+ try:
+ argspec = inspect.getfullargspec(func)
+ except TypeError:
+ # C function / method, possibly inherited object().__init__
+ return
+ regargs, varargs, varkw, defaults, kwonly, kwonlydef, ann = argspec
+ else:
+ try:
+ regargs, varargs, varkwargs, defaults = inspect.getargspec(func)
+ except TypeError:
+ # C function / method, possibly inherited object().__init__
+ return
+
+ # instance methods and classmethods need to lose the self argument
+ if getattr(func, self, None) is not None:
+ regargs = regargs[1:]
+ if skipfirst:
+ # this condition and the above one are never both True - why?
+ regargs = regargs[1:]
+
+ if inPy3k:
+ signature = inspect.formatargspec(
+ regargs, varargs, varkw, defaults,
+ kwonly, kwonlydef, ann, formatvalue=lambda value: "")
+ else:
+ signature = inspect.formatargspec(
+ regargs, varargs, varkwargs, defaults,
+ formatvalue=lambda value: "")
+ return signature[1:-1], func
+
+
+def _check_signature(func, mock, skipfirst, instance=False):
+ if not _callable(func):
+ return
+
+ result = _getsignature(func, skipfirst, instance)
+ if result is None:
+ return
+ signature, func = result
+
+ # can't use self because "self" is common as an argument name
+ # unfortunately even not in the first place
+ src = "lambda _mock_self, %s: None" % signature
+ checksig = eval(src, {})
+ _copy_func_details(func, checksig)
+ type(mock)._mock_check_sig = checksig
+
+
+def _copy_func_details(func, funcopy):
+ funcopy.__name__ = func.__name__
+ funcopy.__doc__ = func.__doc__
+ #funcopy.__dict__.update(func.__dict__)
+ funcopy.__module__ = func.__module__
+ if not inPy3k:
+ funcopy.func_defaults = func.func_defaults
+ return
+ funcopy.__defaults__ = func.__defaults__
+ funcopy.__kwdefaults__ = func.__kwdefaults__
+
+
+def _callable(obj):
+ if isinstance(obj, ClassTypes):
+ return True
+ if getattr(obj, '__call__', None) is not None:
+ return True
+ return False
+
+
+def _is_list(obj):
+ # checks for list or tuples
+ # XXXX badly named!
+ return type(obj) in (list, tuple)
+
+
+def _instance_callable(obj):
+ """Given an object, return True if the object is callable.
+ For classes, return True if instances would be callable."""
+ if not isinstance(obj, ClassTypes):
+ # already an instance
+ return getattr(obj, '__call__', None) is not None
+
+ klass = obj
+ # uses __bases__ instead of __mro__ so that we work with old style classes
+ if klass.__dict__.get('__call__') is not None:
+ return True
+
+ for base in klass.__bases__:
+ if _instance_callable(base):
+ return True
+ return False
+
+
+def _set_signature(mock, original, instance=False):
+ # creates a function with signature (*args, **kwargs) that delegates to a
+ # mock. It still does signature checking by calling a lambda with the same
+ # signature as the original.
+ if not _callable(original):
+ return
+
+ skipfirst = isinstance(original, ClassTypes)
+ result = _getsignature(original, skipfirst, instance)
+ if result is None:
+ # was a C function (e.g. object().__init__ ) that can't be mocked
+ return
+
+ signature, func = result
+
+ src = "lambda %s: None" % signature
+ checksig = eval(src, {})
+ _copy_func_details(func, checksig)
+
+ name = original.__name__
+ if not _isidentifier(name):
+ name = 'funcopy'
+ context = {'_checksig_': checksig, 'mock': mock}
+ src = """def %s(*args, **kwargs):
+ _checksig_(*args, **kwargs)
+ return mock(*args, **kwargs)""" % name
+ exec (src, context)
+ funcopy = context[name]
+ _setup_func(funcopy, mock)
+ return funcopy
+
+
+def _setup_func(funcopy, mock):
+ funcopy.mock = mock
+
+ # can't use isinstance with mocks
+ if not _is_instance_mock(mock):
+ return
+
+ def assert_called_with(*args, **kwargs):
+ return mock.assert_called_with(*args, **kwargs)
+ def assert_called_once_with(*args, **kwargs):
+ return mock.assert_called_once_with(*args, **kwargs)
+ def assert_has_calls(*args, **kwargs):
+ return mock.assert_has_calls(*args, **kwargs)
+ def assert_any_call(*args, **kwargs):
+ return mock.assert_any_call(*args, **kwargs)
+ def reset_mock():
+ funcopy.method_calls = _CallList()
+ funcopy.mock_calls = _CallList()
+ mock.reset_mock()
+ ret = funcopy.return_value
+ if _is_instance_mock(ret) and not ret is mock:
+ ret.reset_mock()
+
+ funcopy.called = False
+ funcopy.call_count = 0
+ funcopy.call_args = None
+ funcopy.call_args_list = _CallList()
+ funcopy.method_calls = _CallList()
+ funcopy.mock_calls = _CallList()
+
+ funcopy.return_value = mock.return_value
+ funcopy.side_effect = mock.side_effect
+ funcopy._mock_children = mock._mock_children
+
+ funcopy.assert_called_with = assert_called_with
+ funcopy.assert_called_once_with = assert_called_once_with
+ funcopy.assert_has_calls = assert_has_calls
+ funcopy.assert_any_call = assert_any_call
+ funcopy.reset_mock = reset_mock
+
+ mock._mock_delegate = funcopy
+
+
+def _is_magic(name):
+ return '__%s__' % name[2:-2] == name
+
+
+class _SentinelObject(object):
+ "A unique, named, sentinel object."
+ def __init__(self, name):
+ self.name = name
+
+ def __repr__(self):
+ return 'sentinel.%s' % self.name
+
+
+class _Sentinel(object):
+ """Access attributes to return a named object, usable as a sentinel."""
+ def __init__(self):
+ self._sentinels = {}
+
+ def __getattr__(self, name):
+ if name == '__bases__':
+ # Without this help(mock) raises an exception
+ raise AttributeError
+ return self._sentinels.setdefault(name, _SentinelObject(name))
+
+
+sentinel = _Sentinel()
+
+DEFAULT = sentinel.DEFAULT
+_missing = sentinel.MISSING
+_deleted = sentinel.DELETED
+
+
+class OldStyleClass:
+ pass
+ClassType = type(OldStyleClass)
+
+
+def _copy(value):
+ if type(value) in (dict, list, tuple, set):
+ return type(value)(value)
+ return value
+
+
+ClassTypes = (type,)
+if not inPy3k:
+ ClassTypes = (type, ClassType)
+
+_allowed_names = set(
+ [
+ 'return_value', '_mock_return_value', 'side_effect',
+ '_mock_side_effect', '_mock_parent', '_mock_new_parent',
+ '_mock_name', '_mock_new_name'
+ ]
+)
+
+
+def _delegating_property(name):
+ _allowed_names.add(name)
+ _the_name = '_mock_' + name
+ def _get(self, name=name, _the_name=_the_name):
+ sig = self._mock_delegate
+ if sig is None:
+ return getattr(self, _the_name)
+ return getattr(sig, name)
+ def _set(self, value, name=name, _the_name=_the_name):
+ sig = self._mock_delegate
+ if sig is None:
+ self.__dict__[_the_name] = value
+ else:
+ setattr(sig, name, value)
+
+ return property(_get, _set)
+
+
+
+class _CallList(list):
+
+ def __contains__(self, value):
+ if not isinstance(value, list):
+ return list.__contains__(self, value)
+ len_value = len(value)
+ len_self = len(self)
+ if len_value > len_self:
+ return False
+
+ for i in range(0, len_self - len_value + 1):
+ sub_list = self[i:i+len_value]
+ if sub_list == value:
+ return True
+ return False
+
+ def __repr__(self):
+ return pprint.pformat(list(self))
+
+
+def _check_and_set_parent(parent, value, name, new_name):
+ if not _is_instance_mock(value):
+ return False
+ if ((value._mock_name or value._mock_new_name) or
+ (value._mock_parent is not None) or
+ (value._mock_new_parent is not None)):
+ return False
+
+ _parent = parent
+ while _parent is not None:
+ # setting a mock (value) as a child or return value of itself
+ # should not modify the mock
+ if _parent is value:
+ return False
+ _parent = _parent._mock_new_parent
+
+ if new_name:
+ value._mock_new_parent = parent
+ value._mock_new_name = new_name
+ if name:
+ value._mock_parent = parent
+ value._mock_name = name
+ return True
+
+
+
+class Base(object):
+ _mock_return_value = DEFAULT
+ _mock_side_effect = None
+ def __init__(self, *args, **kwargs):
+ pass
+
+
+
+class NonCallableMock(Base):
+ """A non-callable version of `Mock`"""
+
+ def __new__(cls, *args, **kw):
+ # every instance has its own class
+ # so we can create magic methods on the
+ # class without stomping on other mocks
+ new = type(cls.__name__, (cls,), {'__doc__': cls.__doc__})
+ instance = object.__new__(new)
+ return instance
+
+
+ def __init__(
+ self, spec=None, wraps=None, name=None, spec_set=None,
+ parent=None, _spec_state=None, _new_name='', _new_parent=None,
+ **kwargs
+ ):
+ if _new_parent is None:
+ _new_parent = parent
+
+ __dict__ = self.__dict__
+ __dict__['_mock_parent'] = parent
+ __dict__['_mock_name'] = name
+ __dict__['_mock_new_name'] = _new_name
+ __dict__['_mock_new_parent'] = _new_parent
+
+ if spec_set is not None:
+ spec = spec_set
+ spec_set = True
+
+ self._mock_add_spec(spec, spec_set)
+
+ __dict__['_mock_children'] = {}
+ __dict__['_mock_wraps'] = wraps
+ __dict__['_mock_delegate'] = None
+
+ __dict__['_mock_called'] = False
+ __dict__['_mock_call_args'] = None
+ __dict__['_mock_call_count'] = 0
+ __dict__['_mock_call_args_list'] = _CallList()
+ __dict__['_mock_mock_calls'] = _CallList()
+
+ __dict__['method_calls'] = _CallList()
+
+ if kwargs:
+ self.configure_mock(**kwargs)
+
+ _super(NonCallableMock, self).__init__(
+ spec, wraps, name, spec_set, parent,
+ _spec_state
+ )
+
+
+ def attach_mock(self, mock, attribute):
+ """
+ Attach a mock as an attribute of this one, replacing its name and
+ parent. Calls to the attached mock will be recorded in the
+ `method_calls` and `mock_calls` attributes of this one."""
+ mock._mock_parent = None
+ mock._mock_new_parent = None
+ mock._mock_name = ''
+ mock._mock_new_name = None
+
+ setattr(self, attribute, mock)
+
+
+ def mock_add_spec(self, spec, spec_set=False):
+ """Add a spec to a mock. `spec` can either be an object or a
+ list of strings. Only attributes on the `spec` can be fetched as
+ attributes from the mock.
+
+ If `spec_set` is True then only attributes on the spec can be set."""
+ self._mock_add_spec(spec, spec_set)
+
+
+ def _mock_add_spec(self, spec, spec_set):
+ _spec_class = None
+
+ if spec is not None and not _is_list(spec):
+ if isinstance(spec, ClassTypes):
+ _spec_class = spec
+ else:
+ _spec_class = _get_class(spec)
+
+ spec = dir(spec)
+
+ __dict__ = self.__dict__
+ __dict__['_spec_class'] = _spec_class
+ __dict__['_spec_set'] = spec_set
+ __dict__['_mock_methods'] = spec
+
+
+ def __get_return_value(self):
+ ret = self._mock_return_value
+ if self._mock_delegate is not None:
+ ret = self._mock_delegate.return_value
+
+ if ret is DEFAULT:
+ ret = self._get_child_mock(
+ _new_parent=self, _new_name='()'
+ )
+ self.return_value = ret
+ return ret
+
+
+ def __set_return_value(self, value):
+ if self._mock_delegate is not None:
+ self._mock_delegate.return_value = value
+ else:
+ self._mock_return_value = value
+ _check_and_set_parent(self, value, None, '()')
+
+ __return_value_doc = "The value to be returned when the mock is called."
+ return_value = property(__get_return_value, __set_return_value,
+ __return_value_doc)
+
+
+ @property
+ def __class__(self):
+ if self._spec_class is None:
+ return type(self)
+ return self._spec_class
+
+ called = _delegating_property('called')
+ call_count = _delegating_property('call_count')
+ call_args = _delegating_property('call_args')
+ call_args_list = _delegating_property('call_args_list')
+ mock_calls = _delegating_property('mock_calls')
+
+
+ def __get_side_effect(self):
+ sig = self._mock_delegate
+ if sig is None:
+ return self._mock_side_effect
+ return sig.side_effect
+
+ def __set_side_effect(self, value):
+ value = _try_iter(value)
+ sig = self._mock_delegate
+ if sig is None:
+ self._mock_side_effect = value
+ else:
+ sig.side_effect = value
+
+ side_effect = property(__get_side_effect, __set_side_effect)
+
+
+ def reset_mock(self):
+ "Restore the mock object to its initial state."
+ self.called = False
+ self.call_args = None
+ self.call_count = 0
+ self.mock_calls = _CallList()
+ self.call_args_list = _CallList()
+ self.method_calls = _CallList()
+
+ for child in self._mock_children.values():
+ if isinstance(child, _SpecState):
+ continue
+ child.reset_mock()
+
+ ret = self._mock_return_value
+ if _is_instance_mock(ret) and ret is not self:
+ ret.reset_mock()
+
+
+ def configure_mock(self, **kwargs):
+ """Set attributes on the mock through keyword arguments.
+
+ Attributes plus return values and side effects can be set on child
+ mocks using standard dot notation and unpacking a dictionary in the
+ method call:
+
+ >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> mock.configure_mock(**attrs)"""
+ for arg, val in sorted(kwargs.items(),
+ # we sort on the number of dots so that
+ # attributes are set before we set attributes on
+ # attributes
+ key=lambda entry: entry[0].count('.')):
+ args = arg.split('.')
+ final = args.pop()
+ obj = self
+ for entry in args:
+ obj = getattr(obj, entry)
+ setattr(obj, final, val)
+
+
+ def __getattr__(self, name):
+ if name == '_mock_methods':
+ raise AttributeError(name)
+ elif self._mock_methods is not None:
+ if name not in self._mock_methods or name in _all_magics:
+ raise AttributeError("Mock object has no attribute %r" % name)
+ elif _is_magic(name):
+ raise AttributeError(name)
+
+ result = self._mock_children.get(name)
+ if result is _deleted:
+ raise AttributeError(name)
+ elif result is None:
+ wraps = None
+ if self._mock_wraps is not None:
+ # XXXX should we get the attribute without triggering code
+ # execution?
+ wraps = getattr(self._mock_wraps, name)
+
+ result = self._get_child_mock(
+ parent=self, name=name, wraps=wraps, _new_name=name,
+ _new_parent=self
+ )
+ self._mock_children[name] = result
+
+ elif isinstance(result, _SpecState):
+ result = create_autospec(
+ result.spec, result.spec_set, result.instance,
+ result.parent, result.name
+ )
+ self._mock_children[name] = result
+
+ return result
+
+
+ def __repr__(self):
+ _name_list = [self._mock_new_name]
+ _parent = self._mock_new_parent
+ last = self
+
+ dot = '.'
+ if _name_list == ['()']:
+ dot = ''
+ seen = set()
+ while _parent is not None:
+ last = _parent
+
+ _name_list.append(_parent._mock_new_name + dot)
+ dot = '.'
+ if _parent._mock_new_name == '()':
+ dot = ''
+
+ _parent = _parent._mock_new_parent
+
+ # use ids here so as not to call __hash__ on the mocks
+ if id(_parent) in seen:
+ break
+ seen.add(id(_parent))
+
+ _name_list = list(reversed(_name_list))
+ _first = last._mock_name or 'mock'
+ if len(_name_list) > 1:
+ if _name_list[1] not in ('()', '().'):
+ _first += '.'
+ _name_list[0] = _first
+ name = ''.join(_name_list)
+
+ name_string = ''
+ if name not in ('mock', 'mock.'):
+ name_string = ' name=%r' % name
+
+ spec_string = ''
+ if self._spec_class is not None:
+ spec_string = ' spec=%r'
+ if self._spec_set:
+ spec_string = ' spec_set=%r'
+ spec_string = spec_string % self._spec_class.__name__
+ return "<%s%s%s id='%s'>" % (
+ type(self).__name__,
+ name_string,
+ spec_string,
+ id(self)
+ )
+
+
+ def __dir__(self):
+ """Filter the output of `dir(mock)` to only useful members.
+ XXXX
+ """
+ extras = self._mock_methods or []
+ from_type = dir(type(self))
+ from_dict = list(self.__dict__)
+
+ if FILTER_DIR:
+ from_type = [e for e in from_type if not e.startswith('_')]
+ from_dict = [e for e in from_dict if not e.startswith('_') or
+ _is_magic(e)]
+ return sorted(set(extras + from_type + from_dict +
+ list(self._mock_children)))
+
+
+ def __setattr__(self, name, value):
+ if name in _allowed_names:
+ # property setters go through here
+ return object.__setattr__(self, name, value)
+ elif (self._spec_set and self._mock_methods is not None and
+ name not in self._mock_methods and
+ name not in self.__dict__):
+ raise AttributeError("Mock object has no attribute '%s'" % name)
+ elif name in _unsupported_magics:
+ msg = 'Attempting to set unsupported magic method %r.' % name
+ raise AttributeError(msg)
+ elif name in _all_magics:
+ if self._mock_methods is not None and name not in self._mock_methods:
+ raise AttributeError("Mock object has no attribute '%s'" % name)
+
+ if not _is_instance_mock(value):
+ setattr(type(self), name, _get_method(name, value))
+ original = value
+ value = lambda *args, **kw: original(self, *args, **kw)
+ else:
+ # only set _new_name and not name so that mock_calls is tracked
+ # but not method calls
+ _check_and_set_parent(self, value, None, name)
+ setattr(type(self), name, value)
+ self._mock_children[name] = value
+ elif name == '__class__':
+ self._spec_class = value
+ return
+ else:
+ if _check_and_set_parent(self, value, name, name):
+ self._mock_children[name] = value
+ return object.__setattr__(self, name, value)
+
+
+ def __delattr__(self, name):
+ if name in _all_magics and name in type(self).__dict__:
+ delattr(type(self), name)
+ if name not in self.__dict__:
+ # for magic methods that are still MagicProxy objects and
+ # not set on the instance itself
+ return
+
+ if name in self.__dict__:
+ object.__delattr__(self, name)
+
+ obj = self._mock_children.get(name, _missing)
+ if obj is _deleted:
+ raise AttributeError(name)
+ if obj is not _missing:
+ del self._mock_children[name]
+ self._mock_children[name] = _deleted
+
+
+
+ def _format_mock_call_signature(self, args, kwargs):
+ name = self._mock_name or 'mock'
+ return _format_call_signature(name, args, kwargs)
+
+
+ def _format_mock_failure_message(self, args, kwargs):
+ message = 'Expected call: %s\nActual call: %s'
+ expected_string = self._format_mock_call_signature(args, kwargs)
+ call_args = self.call_args
+ if len(call_args) == 3:
+ call_args = call_args[1:]
+ actual_string = self._format_mock_call_signature(*call_args)
+ return message % (expected_string, actual_string)
+
+
+ def assert_called_with(_mock_self, *args, **kwargs):
+ """assert that the mock was called with the specified arguments.
+
+ Raises an AssertionError if the args and keyword args passed in are
+ different to the last call to the mock."""
+ self = _mock_self
+ if self.call_args is None:
+ expected = self._format_mock_call_signature(args, kwargs)
+ raise AssertionError('Expected call: %s\nNot called' % (expected,))
+
+ if self.call_args != (args, kwargs):
+ msg = self._format_mock_failure_message(args, kwargs)
+ raise AssertionError(msg)
+
+
+ def assert_called_once_with(_mock_self, *args, **kwargs):
+ """assert that the mock was called exactly once and with the specified
+ arguments."""
+ self = _mock_self
+ if not self.call_count == 1:
+ msg = ("Expected to be called once. Called %s times." %
+ self.call_count)
+ raise AssertionError(msg)
+ return self.assert_called_with(*args, **kwargs)
+
+
+ def assert_has_calls(self, calls, any_order=False):
+ """assert the mock has been called with the specified calls.
+ The `mock_calls` list is checked for the calls.
+
+ If `any_order` is False (the default) then the calls must be
+ sequential. There can be extra calls before or after the
+ specified calls.
+
+ If `any_order` is True then the calls can be in any order, but
+ they must all appear in `mock_calls`."""
+ if not any_order:
+ if calls not in self.mock_calls:
+ raise AssertionError(
+ 'Calls not found.\nExpected: %r\n'
+ 'Actual: %r' % (calls, self.mock_calls)
+ )
+ return
+
+ all_calls = list(self.mock_calls)
+
+ not_found = []
+ for kall in calls:
+ try:
+ all_calls.remove(kall)
+ except ValueError:
+ not_found.append(kall)
+ if not_found:
+ raise AssertionError(
+ '%r not all found in call list' % (tuple(not_found),)
+ )
+
+
+ def assert_any_call(self, *args, **kwargs):
+ """assert the mock has been called with the specified arguments.
+
+ The assert passes if the mock has *ever* been called, unlike
+ `assert_called_with` and `assert_called_once_with` that only pass if
+ the call is the most recent one."""
+ kall = call(*args, **kwargs)
+ if kall not in self.call_args_list:
+ expected_string = self._format_mock_call_signature(args, kwargs)
+ raise AssertionError(
+ '%s call not found' % expected_string
+ )
+
+
+ def _get_child_mock(self, **kw):
+ """Create the child mocks for attributes and return value.
+ By default child mocks will be the same type as the parent.
+ Subclasses of Mock may want to override this to customize the way
+ child mocks are made.
+
+ For non-callable mocks the callable variant will be used (rather than
+ any custom subclass)."""
+ _type = type(self)
+ if not issubclass(_type, CallableMixin):
+ if issubclass(_type, NonCallableMagicMock):
+ klass = MagicMock
+ elif issubclass(_type, NonCallableMock) :
+ klass = Mock
+ else:
+ klass = _type.__mro__[1]
+ return klass(**kw)
+
+
+
+def _try_iter(obj):
+ if obj is None:
+ return obj
+ if _is_exception(obj):
+ return obj
+ if _callable(obj):
+ return obj
+ try:
+ return iter(obj)
+ except TypeError:
+ # XXXX backwards compatibility
+ # but this will blow up on first call - so maybe we should fail early?
+ return obj
+
+
+
+class CallableMixin(Base):
+
+ def __init__(self, spec=None, side_effect=None, return_value=DEFAULT,
+ wraps=None, name=None, spec_set=None, parent=None,
+ _spec_state=None, _new_name='', _new_parent=None, **kwargs):
+ self.__dict__['_mock_return_value'] = return_value
+
+ _super(CallableMixin, self).__init__(
+ spec, wraps, name, spec_set, parent,
+ _spec_state, _new_name, _new_parent, **kwargs
+ )
+
+ self.side_effect = side_effect
+
+
+ def _mock_check_sig(self, *args, **kwargs):
+ # stub method that can be replaced with one with a specific signature
+ pass
+
+
+ def __call__(_mock_self, *args, **kwargs):
+ # can't use self in-case a function / method we are mocking uses self
+ # in the signature
+ _mock_self._mock_check_sig(*args, **kwargs)
+ return _mock_self._mock_call(*args, **kwargs)
+
+
+ def _mock_call(_mock_self, *args, **kwargs):
+ self = _mock_self
+ self.called = True
+ self.call_count += 1
+ self.call_args = _Call((args, kwargs), two=True)
+ self.call_args_list.append(_Call((args, kwargs), two=True))
+
+ _new_name = self._mock_new_name
+ _new_parent = self._mock_new_parent
+ self.mock_calls.append(_Call(('', args, kwargs)))
+
+ seen = set()
+ skip_next_dot = _new_name == '()'
+ do_method_calls = self._mock_parent is not None
+ name = self._mock_name
+ while _new_parent is not None:
+ this_mock_call = _Call((_new_name, args, kwargs))
+ if _new_parent._mock_new_name:
+ dot = '.'
+ if skip_next_dot:
+ dot = ''
+
+ skip_next_dot = False
+ if _new_parent._mock_new_name == '()':
+ skip_next_dot = True
+
+ _new_name = _new_parent._mock_new_name + dot + _new_name
+
+ if do_method_calls:
+ if _new_name == name:
+ this_method_call = this_mock_call
+ else:
+ this_method_call = _Call((name, args, kwargs))
+ _new_parent.method_calls.append(this_method_call)
+
+ do_method_calls = _new_parent._mock_parent is not None
+ if do_method_calls:
+ name = _new_parent._mock_name + '.' + name
+
+ _new_parent.mock_calls.append(this_mock_call)
+ _new_parent = _new_parent._mock_new_parent
+
+ # use ids here so as not to call __hash__ on the mocks
+ _new_parent_id = id(_new_parent)
+ if _new_parent_id in seen:
+ break
+ seen.add(_new_parent_id)
+
+ ret_val = DEFAULT
+ effect = self.side_effect
+ if effect is not None:
+ if _is_exception(effect):
+ raise effect
+
+ if not _callable(effect):
+ result = next(effect)
+ if _is_exception(result):
+ raise result
+ return result
+
+ ret_val = effect(*args, **kwargs)
+ if ret_val is DEFAULT:
+ ret_val = self.return_value
+
+ if (self._mock_wraps is not None and
+ self._mock_return_value is DEFAULT):
+ return self._mock_wraps(*args, **kwargs)
+ if ret_val is DEFAULT:
+ ret_val = self.return_value
+ return ret_val
+
+
+
+class Mock(CallableMixin, NonCallableMock):
+ """
+ Create a new `Mock` object. `Mock` takes several optional arguments
+ that specify the behaviour of the Mock object:
+
+ * `spec`: This can be either a list of strings or an existing object (a
+ class or instance) that acts as the specification for the mock object. If
+ you pass in an object then a list of strings is formed by calling dir on
+ the object (excluding unsupported magic attributes and methods). Accessing
+ any attribute not in this list will raise an `AttributeError`.
+
+ If `spec` is an object (rather than a list of strings) then
+ `mock.__class__` returns the class of the spec object. This allows mocks
+ to pass `isinstance` tests.
+
+ * `spec_set`: A stricter variant of `spec`. If used, attempting to *set*
+ or get an attribute on the mock that isn't on the object passed as
+ `spec_set` will raise an `AttributeError`.
+
+ * `side_effect`: A function to be called whenever the Mock is called. See
+ the `side_effect` attribute. Useful for raising exceptions or
+ dynamically changing return values. The function is called with the same
+ arguments as the mock, and unless it returns `DEFAULT`, the return
+ value of this function is used as the return value.
+
+ Alternatively `side_effect` can be an exception class or instance. In
+ this case the exception will be raised when the mock is called.
+
+ If `side_effect` is an iterable then each call to the mock will return
+ the next value from the iterable. If any of the members of the iterable
+ are exceptions they will be raised instead of returned.
+
+ * `return_value`: The value returned when the mock is called. By default
+ this is a new Mock (created on first access). See the
+ `return_value` attribute.
+
+ * `wraps`: Item for the mock object to wrap. If `wraps` is not None then
+ calling the Mock will pass the call through to the wrapped object
+ (returning the real result). Attribute access on the mock will return a
+ Mock object that wraps the corresponding attribute of the wrapped object
+ (so attempting to access an attribute that doesn't exist will raise an
+ `AttributeError`).
+
+ If the mock has an explicit `return_value` set then calls are not passed
+ to the wrapped object and the `return_value` is returned instead.
+
+ * `name`: If the mock has a name then it will be used in the repr of the
+ mock. This can be useful for debugging. The name is propagated to child
+ mocks.
+
+ Mocks can also be called with arbitrary keyword arguments. These will be
+ used to set attributes on the mock after it is created.
+ """
+
+
+
+def _dot_lookup(thing, comp, import_path):
+ try:
+ return getattr(thing, comp)
+ except AttributeError:
+ __import__(import_path)
+ return getattr(thing, comp)
+
+
+def _importer(target):
+ components = target.split('.')
+ import_path = components.pop(0)
+ thing = __import__(import_path)
+
+ for comp in components:
+ import_path += ".%s" % comp
+ thing = _dot_lookup(thing, comp, import_path)
+ return thing
+
+
+def _is_started(patcher):
+ # XXXX horrible
+ return hasattr(patcher, 'is_local')
+
+
+class _patch(object):
+
+ attribute_name = None
+ _active_patches = set()
+
+ def __init__(
+ self, getter, attribute, new, spec, create,
+ spec_set, autospec, new_callable, kwargs
+ ):
+ if new_callable is not None:
+ if new is not DEFAULT:
+ raise ValueError(
+ "Cannot use 'new' and 'new_callable' together"
+ )
+ if autospec is not None:
+ raise ValueError(
+ "Cannot use 'autospec' and 'new_callable' together"
+ )
+
+ self.getter = getter
+ self.attribute = attribute
+ self.new = new
+ self.new_callable = new_callable
+ self.spec = spec
+ self.create = create
+ self.has_local = False
+ self.spec_set = spec_set
+ self.autospec = autospec
+ self.kwargs = kwargs
+ self.additional_patchers = []
+
+
+ def copy(self):
+ patcher = _patch(
+ self.getter, self.attribute, self.new, self.spec,
+ self.create, self.spec_set,
+ self.autospec, self.new_callable, self.kwargs
+ )
+ patcher.attribute_name = self.attribute_name
+ patcher.additional_patchers = [
+ p.copy() for p in self.additional_patchers
+ ]
+ return patcher
+
+
+ def __call__(self, func):
+ if isinstance(func, ClassTypes):
+ return self.decorate_class(func)
+ return self.decorate_callable(func)
+
+
+ def decorate_class(self, klass):
+ for attr in dir(klass):
+ if not attr.startswith(patch.TEST_PREFIX):
+ continue
+
+ attr_value = getattr(klass, attr)
+ if not hasattr(attr_value, "__call__"):
+ continue
+
+ patcher = self.copy()
+ setattr(klass, attr, patcher(attr_value))
+ return klass
+
+
+ def decorate_callable(self, func):
+ if hasattr(func, 'patchings'):
+ func.patchings.append(self)
+ return func
+
+ @wraps(func)
+ def patched(*args, **keywargs):
+ # don't use a with here (backwards compatability with Python 2.4)
+ extra_args = []
+ entered_patchers = []
+
+ # can't use try...except...finally because of Python 2.4
+ # compatibility
+ exc_info = tuple()
+ try:
+ try:
+ for patching in patched.patchings:
+ arg = patching.__enter__()
+ entered_patchers.append(patching)
+ if patching.attribute_name is not None:
+ keywargs.update(arg)
+ elif patching.new is DEFAULT:
+ extra_args.append(arg)
+
+ args += tuple(extra_args)
+ return func(*args, **keywargs)
+ except:
+ if (patching not in entered_patchers and
+ _is_started(patching)):
+ # the patcher may have been started, but an exception
+ # raised whilst entering one of its additional_patchers
+ entered_patchers.append(patching)
+ # Pass the exception to __exit__
+ exc_info = sys.exc_info()
+ # re-raise the exception
+ raise
+ finally:
+ for patching in reversed(entered_patchers):
+ patching.__exit__(*exc_info)
+
+ patched.patchings = [self]
+ if hasattr(func, 'func_code'):
+ # not in Python 3
+ patched.compat_co_firstlineno = getattr(
+ func, "compat_co_firstlineno",
+ func.func_code.co_firstlineno
+ )
+ return patched
+
+
+ def get_original(self):
+ target = self.getter()
+ name = self.attribute
+
+ original = DEFAULT
+ local = False
+
+ try:
+ original = target.__dict__[name]
+ except (AttributeError, KeyError):
+ original = getattr(target, name, DEFAULT)
+ else:
+ local = True
+
+ if not self.create and original is DEFAULT:
+ raise AttributeError(
+ "%s does not have the attribute %r" % (target, name)
+ )
+ return original, local
+
+
+ def __enter__(self):
+ """Perform the patch."""
+ new, spec, spec_set = self.new, self.spec, self.spec_set
+ autospec, kwargs = self.autospec, self.kwargs
+ new_callable = self.new_callable
+ self.target = self.getter()
+
+ # normalise False to None
+ if spec is False:
+ spec = None
+ if spec_set is False:
+ spec_set = None
+ if autospec is False:
+ autospec = None
+
+ if spec is not None and autospec is not None:
+ raise TypeError("Can't specify spec and autospec")
+ if ((spec is not None or autospec is not None) and
+ spec_set not in (True, None)):
+ raise TypeError("Can't provide explicit spec_set *and* spec or autospec")
+
+ original, local = self.get_original()
+
+ if new is DEFAULT and autospec is None:
+ inherit = False
+ if spec is True:
+ # set spec to the object we are replacing
+ spec = original
+ if spec_set is True:
+ spec_set = original
+ spec = None
+ elif spec is not None:
+ if spec_set is True:
+ spec_set = spec
+ spec = None
+ elif spec_set is True:
+ spec_set = original
+
+ if spec is not None or spec_set is not None:
+ if original is DEFAULT:
+ raise TypeError("Can't use 'spec' with create=True")
+ if isinstance(original, ClassTypes):
+ # If we're patching out a class and there is a spec
+ inherit = True
+
+ Klass = MagicMock
+ _kwargs = {}
+ if new_callable is not None:
+ Klass = new_callable
+ elif spec is not None or spec_set is not None:
+ this_spec = spec
+ if spec_set is not None:
+ this_spec = spec_set
+ if _is_list(this_spec):
+ not_callable = '__call__' not in this_spec
+ else:
+ not_callable = not _callable(this_spec)
+ if not_callable:
+ Klass = NonCallableMagicMock
+
+ if spec is not None:
+ _kwargs['spec'] = spec
+ if spec_set is not None:
+ _kwargs['spec_set'] = spec_set
+
+ # add a name to mocks
+ if (isinstance(Klass, type) and
+ issubclass(Klass, NonCallableMock) and self.attribute):
+ _kwargs['name'] = self.attribute
+
+ _kwargs.update(kwargs)
+ new = Klass(**_kwargs)
+
+ if inherit and _is_instance_mock(new):
+ # we can only tell if the instance should be callable if the
+ # spec is not a list
+ this_spec = spec
+ if spec_set is not None:
+ this_spec = spec_set
+ if (not _is_list(this_spec) and not
+ _instance_callable(this_spec)):
+ Klass = NonCallableMagicMock
+
+ _kwargs.pop('name')
+ new.return_value = Klass(_new_parent=new, _new_name='()',
+ **_kwargs)
+ elif autospec is not None:
+ # spec is ignored, new *must* be default, spec_set is treated
+ # as a boolean. Should we check spec is not None and that spec_set
+ # is a bool?
+ if new is not DEFAULT:
+ raise TypeError(
+ "autospec creates the mock for you. Can't specify "
+ "autospec and new."
+ )
+ if original is DEFAULT:
+ raise TypeError("Can't use 'autospec' with create=True")
+ spec_set = bool(spec_set)
+ if autospec is True:
+ autospec = original
+
+ new = create_autospec(autospec, spec_set=spec_set,
+ _name=self.attribute, **kwargs)
+ elif kwargs:
+ # can't set keyword args when we aren't creating the mock
+ # XXXX If new is a Mock we could call new.configure_mock(**kwargs)
+ raise TypeError("Can't pass kwargs to a mock we aren't creating")
+
+ new_attr = new
+
+ self.temp_original = original
+ self.is_local = local
+ setattr(self.target, self.attribute, new_attr)
+ if self.attribute_name is not None:
+ extra_args = {}
+ if self.new is DEFAULT:
+ extra_args[self.attribute_name] = new
+ for patching in self.additional_patchers:
+ arg = patching.__enter__()
+ if patching.new is DEFAULT:
+ extra_args.update(arg)
+ return extra_args
+
+ return new
+
+
+ def __exit__(self, *exc_info):
+ """Undo the patch."""
+ if not _is_started(self):
+ raise RuntimeError('stop called on unstarted patcher')
+
+ if self.is_local and self.temp_original is not DEFAULT:
+ setattr(self.target, self.attribute, self.temp_original)
+ else:
+ delattr(self.target, self.attribute)
+ if not self.create and not hasattr(self.target, self.attribute):
+ # needed for proxy objects like django settings
+ setattr(self.target, self.attribute, self.temp_original)
+
+ del self.temp_original
+ del self.is_local
+ del self.target
+ for patcher in reversed(self.additional_patchers):
+ if _is_started(patcher):
+ patcher.__exit__(*exc_info)
+
+
+ def start(self):
+ """Activate a patch, returning any created mock."""
+ result = self.__enter__()
+ self._active_patches.add(self)
+ return result
+
+
+ def stop(self):
+ """Stop an active patch."""
+ self._active_patches.discard(self)
+ return self.__exit__()
+
+
+
+def _get_target(target):
+ try:
+ target, attribute = target.rsplit('.', 1)
+ except (TypeError, ValueError):
+ raise TypeError("Need a valid target to patch. You supplied: %r" %
+ (target,))
+ getter = lambda: _importer(target)
+ return getter, attribute
+
+
+def _patch_object(
+ target, attribute, new=DEFAULT, spec=None,
+ create=False, spec_set=None, autospec=None,
+ new_callable=None, **kwargs
+ ):
+ """
+ patch.object(target, attribute, new=DEFAULT, spec=None, create=False,
+ spec_set=None, autospec=None, new_callable=None, **kwargs)
+
+ patch the named member (`attribute`) on an object (`target`) with a mock
+ object.
+
+ `patch.object` can be used as a decorator, class decorator or a context
+ manager. Arguments `new`, `spec`, `create`, `spec_set`,
+ `autospec` and `new_callable` have the same meaning as for `patch`. Like
+ `patch`, `patch.object` takes arbitrary keyword arguments for configuring
+ the mock object it creates.
+
+ When used as a class decorator `patch.object` honours `patch.TEST_PREFIX`
+ for choosing which methods to wrap.
+ """
+ getter = lambda: target
+ return _patch(
+ getter, attribute, new, spec, create,
+ spec_set, autospec, new_callable, kwargs
+ )
+
+
+def _patch_multiple(target, spec=None, create=False, spec_set=None,
+ autospec=None, new_callable=None, **kwargs):
+ """Perform multiple patches in a single call. It takes the object to be
+ patched (either as an object or a string to fetch the object by importing)
+ and keyword arguments for the patches::
+
+ with patch.multiple(settings, FIRST_PATCH='one', SECOND_PATCH='two'):
+ ...
+
+ Use `DEFAULT` as the value if you want `patch.multiple` to create
+ mocks for you. In this case the created mocks are passed into a decorated
+ function by keyword, and a dictionary is returned when `patch.multiple` is
+ used as a context manager.
+
+ `patch.multiple` can be used as a decorator, class decorator or a context
+ manager. The arguments `spec`, `spec_set`, `create`,
+ `autospec` and `new_callable` have the same meaning as for `patch`. These
+ arguments will be applied to *all* patches done by `patch.multiple`.
+
+ When used as a class decorator `patch.multiple` honours `patch.TEST_PREFIX`
+ for choosing which methods to wrap.
+ """
+ if type(target) in (unicode, str):
+ getter = lambda: _importer(target)
+ else:
+ getter = lambda: target
+
+ if not kwargs:
+ raise ValueError(
+ 'Must supply at least one keyword argument with patch.multiple'
+ )
+ # need to wrap in a list for python 3, where items is a view
+ items = list(kwargs.items())
+ attribute, new = items[0]
+ patcher = _patch(
+ getter, attribute, new, spec, create, spec_set,
+ autospec, new_callable, {}
+ )
+ patcher.attribute_name = attribute
+ for attribute, new in items[1:]:
+ this_patcher = _patch(
+ getter, attribute, new, spec, create, spec_set,
+ autospec, new_callable, {}
+ )
+ this_patcher.attribute_name = attribute
+ patcher.additional_patchers.append(this_patcher)
+ return patcher
+
+
+def patch(
+ target, new=DEFAULT, spec=None, create=False,
+ spec_set=None, autospec=None, new_callable=None, **kwargs
+ ):
+ """
+ `patch` acts as a function decorator, class decorator or a context
+ manager. Inside the body of the function or with statement, the `target`
+ is patched with a `new` object. When the function/with statement exits
+ the patch is undone.
+
+ If `new` is omitted, then the target is replaced with a
+ `MagicMock`. If `patch` is used as a decorator and `new` is
+ omitted, the created mock is passed in as an extra argument to the
+ decorated function. If `patch` is used as a context manager the created
+ mock is returned by the context manager.
+
+ `target` should be a string in the form `'package.module.ClassName'`. The
+ `target` is imported and the specified object replaced with the `new`
+ object, so the `target` must be importable from the environment you are
+ calling `patch` from. The target is imported when the decorated function
+ is executed, not at decoration time.
+
+ The `spec` and `spec_set` keyword arguments are passed to the `MagicMock`
+ if patch is creating one for you.
+
+ In addition you can pass `spec=True` or `spec_set=True`, which causes
+ patch to pass in the object being mocked as the spec/spec_set object.
+
+ `new_callable` allows you to specify a different class, or callable object,
+ that will be called to create the `new` object. By default `MagicMock` is
+ used.
+
+ A more powerful form of `spec` is `autospec`. If you set `autospec=True`
+ then the mock with be created with a spec from the object being replaced.
+ All attributes of the mock will also have the spec of the corresponding
+ attribute of the object being replaced. Methods and functions being
+ mocked will have their arguments checked and will raise a `TypeError` if
+ they are called with the wrong signature. For mocks replacing a class,
+ their return value (the 'instance') will have the same spec as the class.
+
+ Instead of `autospec=True` you can pass `autospec=some_object` to use an
+ arbitrary object as the spec instead of the one being replaced.
+
+ By default `patch` will fail to replace attributes that don't exist. If
+ you pass in `create=True`, and the attribute doesn't exist, patch will
+ create the attribute for you when the patched function is called, and
+ delete it again afterwards. This is useful for writing tests against
+ attributes that your production code creates at runtime. It is off by by
+ default because it can be dangerous. With it switched on you can write
+ passing tests against APIs that don't actually exist!
+
+ Patch can be used as a `TestCase` class decorator. It works by
+ decorating each test method in the class. This reduces the boilerplate
+ code when your test methods share a common patchings set. `patch` finds
+ tests by looking for method names that start with `patch.TEST_PREFIX`.
+ By default this is `test`, which matches the way `unittest` finds tests.
+ You can specify an alternative prefix by setting `patch.TEST_PREFIX`.
+
+ Patch can be used as a context manager, with the with statement. Here the
+ patching applies to the indented block after the with statement. If you
+ use "as" then the patched object will be bound to the name after the
+ "as"; very useful if `patch` is creating a mock object for you.
+
+ `patch` takes arbitrary keyword arguments. These will be passed to
+ the `Mock` (or `new_callable`) on construction.
+
+ `patch.dict(...)`, `patch.multiple(...)` and `patch.object(...)` are
+ available for alternate use-cases.
+ """
+ getter, attribute = _get_target(target)
+ return _patch(
+ getter, attribute, new, spec, create,
+ spec_set, autospec, new_callable, kwargs
+ )
+
+
+class _patch_dict(object):
+ """
+ Patch a dictionary, or dictionary like object, and restore the dictionary
+ to its original state after the test.
+
+ `in_dict` can be a dictionary or a mapping like container. If it is a
+ mapping then it must at least support getting, setting and deleting items
+ plus iterating over keys.
+
+ `in_dict` can also be a string specifying the name of the dictionary, which
+ will then be fetched by importing it.
+
+ `values` can be a dictionary of values to set in the dictionary. `values`
+ can also be an iterable of `(key, value)` pairs.
+
+ If `clear` is True then the dictionary will be cleared before the new
+ values are set.
+
+ `patch.dict` can also be called with arbitrary keyword arguments to set
+ values in the dictionary::
+
+ with patch.dict('sys.modules', mymodule=Mock(), other_module=Mock()):
+ ...
+
+ `patch.dict` can be used as a context manager, decorator or class
+ decorator. When used as a class decorator `patch.dict` honours
+ `patch.TEST_PREFIX` for choosing which methods to wrap.
+ """
+
+ def __init__(self, in_dict, values=(), clear=False, **kwargs):
+ if isinstance(in_dict, basestring):
+ in_dict = _importer(in_dict)
+ self.in_dict = in_dict
+ # support any argument supported by dict(...) constructor
+ self.values = dict(values)
+ self.values.update(kwargs)
+ self.clear = clear
+ self._original = None
+
+
+ def __call__(self, f):
+ if isinstance(f, ClassTypes):
+ return self.decorate_class(f)
+ @wraps(f)
+ def _inner(*args, **kw):
+ self._patch_dict()
+ try:
+ return f(*args, **kw)
+ finally:
+ self._unpatch_dict()
+
+ return _inner
+
+
+ def decorate_class(self, klass):
+ for attr in dir(klass):
+ attr_value = getattr(klass, attr)
+ if (attr.startswith(patch.TEST_PREFIX) and
+ hasattr(attr_value, "__call__")):
+ decorator = _patch_dict(self.in_dict, self.values, self.clear)
+ decorated = decorator(attr_value)
+ setattr(klass, attr, decorated)
+ return klass
+
+
+ def __enter__(self):
+ """Patch the dict."""
+ self._patch_dict()
+
+
+ def _patch_dict(self):
+ values = self.values
+ in_dict = self.in_dict
+ clear = self.clear
+
+ try:
+ original = in_dict.copy()
+ except AttributeError:
+ # dict like object with no copy method
+ # must support iteration over keys
+ original = {}
+ for key in in_dict:
+ original[key] = in_dict[key]
+ self._original = original
+
+ if clear:
+ _clear_dict(in_dict)
+
+ try:
+ in_dict.update(values)
+ except AttributeError:
+ # dict like object with no update method
+ for key in values:
+ in_dict[key] = values[key]
+
+
+ def _unpatch_dict(self):
+ in_dict = self.in_dict
+ original = self._original
+
+ _clear_dict(in_dict)
+
+ try:
+ in_dict.update(original)
+ except AttributeError:
+ for key in original:
+ in_dict[key] = original[key]
+
+
+ def __exit__(self, *args):
+ """Unpatch the dict."""
+ self._unpatch_dict()
+ return False
+
+ start = __enter__
+ stop = __exit__
+
+
+def _clear_dict(in_dict):
+ try:
+ in_dict.clear()
+ except AttributeError:
+ keys = list(in_dict)
+ for key in keys:
+ del in_dict[key]
+
+
+def _patch_stopall():
+ """Stop all active patches."""
+ for patch in list(_patch._active_patches):
+ patch.stop()
+
+
+patch.object = _patch_object
+patch.dict = _patch_dict
+patch.multiple = _patch_multiple
+patch.stopall = _patch_stopall
+patch.TEST_PREFIX = 'test'
+
+magic_methods = (
+ "lt le gt ge eq ne "
+ "getitem setitem delitem "
+ "len contains iter "
+ "hash str sizeof "
+ "enter exit "
+ "divmod neg pos abs invert "
+ "complex int float index "
+ "trunc floor ceil "
+)
+
+numerics = "add sub mul div floordiv mod lshift rshift and xor or pow "
+inplace = ' '.join('i%s' % n for n in numerics.split())
+right = ' '.join('r%s' % n for n in numerics.split())
+extra = ''
+if inPy3k:
+ extra = 'bool next '
+else:
+ extra = 'unicode long nonzero oct hex truediv rtruediv '
+
+# not including __prepare__, __instancecheck__, __subclasscheck__
+# (as they are metaclass methods)
+# __del__ is not supported at all as it causes problems if it exists
+
+_non_defaults = set('__%s__' % method for method in [
+ 'cmp', 'getslice', 'setslice', 'coerce', 'subclasses',
+ 'format', 'get', 'set', 'delete', 'reversed',
+ 'missing', 'reduce', 'reduce_ex', 'getinitargs',
+ 'getnewargs', 'getstate', 'setstate', 'getformat',
+ 'setformat', 'repr', 'dir'
+])
+
+
+def _get_method(name, func):
+ "Turns a callable object (like a mock) into a real function"
+ def method(self, *args, **kw):
+ return func(self, *args, **kw)
+ method.__name__ = name
+ return method
+
+
+_magics = set(
+ '__%s__' % method for method in
+ ' '.join([magic_methods, numerics, inplace, right, extra]).split()
+)
+
+_all_magics = _magics | _non_defaults
+
+_unsupported_magics = set([
+ '__getattr__', '__setattr__',
+ '__init__', '__new__', '__prepare__'
+ '__instancecheck__', '__subclasscheck__',
+ '__del__'
+])
+
+_calculate_return_value = {
+ '__hash__': lambda self: object.__hash__(self),
+ '__str__': lambda self: object.__str__(self),
+ '__sizeof__': lambda self: object.__sizeof__(self),
+ '__unicode__': lambda self: unicode(object.__str__(self)),
+}
+
+_return_values = {
+ '__lt__': NotImplemented,
+ '__gt__': NotImplemented,
+ '__le__': NotImplemented,
+ '__ge__': NotImplemented,
+ '__int__': 1,
+ '__contains__': False,
+ '__len__': 0,
+ '__exit__': False,
+ '__complex__': 1j,
+ '__float__': 1.0,
+ '__bool__': True,
+ '__nonzero__': True,
+ '__oct__': '1',
+ '__hex__': '0x1',
+ '__long__': long(1),
+ '__index__': 1,
+}
+
+
+def _get_eq(self):
+ def __eq__(other):
+ ret_val = self.__eq__._mock_return_value
+ if ret_val is not DEFAULT:
+ return ret_val
+ return self is other
+ return __eq__
+
+def _get_ne(self):
+ def __ne__(other):
+ if self.__ne__._mock_return_value is not DEFAULT:
+ return DEFAULT
+ return self is not other
+ return __ne__
+
+def _get_iter(self):
+ def __iter__():
+ ret_val = self.__iter__._mock_return_value
+ if ret_val is DEFAULT:
+ return iter([])
+ # if ret_val was already an iterator, then calling iter on it should
+ # return the iterator unchanged
+ return iter(ret_val)
+ return __iter__
+
+_side_effect_methods = {
+ '__eq__': _get_eq,
+ '__ne__': _get_ne,
+ '__iter__': _get_iter,
+}
+
+
+
+def _set_return_value(mock, method, name):
+ fixed = _return_values.get(name, DEFAULT)
+ if fixed is not DEFAULT:
+ method.return_value = fixed
+ return
+
+ return_calulator = _calculate_return_value.get(name)
+ if return_calulator is not None:
+ try:
+ return_value = return_calulator(mock)
+ except AttributeError:
+ # XXXX why do we return AttributeError here?
+ # set it as a side_effect instead?
+ return_value = AttributeError(name)
+ method.return_value = return_value
+ return
+
+ side_effector = _side_effect_methods.get(name)
+ if side_effector is not None:
+ method.side_effect = side_effector(mock)
+
+
+
+class MagicMixin(object):
+ def __init__(self, *args, **kw):
+ _super(MagicMixin, self).__init__(*args, **kw)
+ self._mock_set_magics()
+
+
+ def _mock_set_magics(self):
+ these_magics = _magics
+
+ if self._mock_methods is not None:
+ these_magics = _magics.intersection(self._mock_methods)
+
+ remove_magics = set()
+ remove_magics = _magics - these_magics
+
+ for entry in remove_magics:
+ if entry in type(self).__dict__:
+ # remove unneeded magic methods
+ delattr(self, entry)
+
+ # don't overwrite existing attributes if called a second time
+ these_magics = these_magics - set(type(self).__dict__)
+
+ _type = type(self)
+ for entry in these_magics:
+ setattr(_type, entry, MagicProxy(entry, self))
+
+
+
+class NonCallableMagicMock(MagicMixin, NonCallableMock):
+ """A version of `MagicMock` that isn't callable."""
+ def mock_add_spec(self, spec, spec_set=False):
+ """Add a spec to a mock. `spec` can either be an object or a
+ list of strings. Only attributes on the `spec` can be fetched as
+ attributes from the mock.
+
+ If `spec_set` is True then only attributes on the spec can be set."""
+ self._mock_add_spec(spec, spec_set)
+ self._mock_set_magics()
+
+
+
+class MagicMock(MagicMixin, Mock):
+ """
+ MagicMock is a subclass of Mock with default implementations
+ of most of the magic methods. You can use MagicMock without having to
+ configure the magic methods yourself.
+
+ If you use the `spec` or `spec_set` arguments then *only* magic
+ methods that exist in the spec will be created.
+
+ Attributes and the return value of a `MagicMock` will also be `MagicMocks`.
+ """
+ def mock_add_spec(self, spec, spec_set=False):
+ """Add a spec to a mock. `spec` can either be an object or a
+ list of strings. Only attributes on the `spec` can be fetched as
+ attributes from the mock.
+
+ If `spec_set` is True then only attributes on the spec can be set."""
+ self._mock_add_spec(spec, spec_set)
+ self._mock_set_magics()
+
+
+
+class MagicProxy(object):
+ def __init__(self, name, parent):
+ self.name = name
+ self.parent = parent
+
+ def __call__(self, *args, **kwargs):
+ m = self.create_mock()
+ return m(*args, **kwargs)
+
+ def create_mock(self):
+ entry = self.name
+ parent = self.parent
+ m = parent._get_child_mock(name=entry, _new_name=entry,
+ _new_parent=parent)
+ setattr(parent, entry, m)
+ _set_return_value(parent, m, entry)
+ return m
+
+ def __get__(self, obj, _type=None):
+ return self.create_mock()
+
+
+
+class _ANY(object):
+ "A helper object that compares equal to everything."
+
+ def __eq__(self, other):
+ return True
+
+ def __ne__(self, other):
+ return False
+
+ def __repr__(self):
+ return '<ANY>'
+
+ANY = _ANY()
+
+
+
+def _format_call_signature(name, args, kwargs):
+ message = '%s(%%s)' % name
+ formatted_args = ''
+ args_string = ', '.join([repr(arg) for arg in args])
+ kwargs_string = ', '.join([
+ '%s=%r' % (key, value) for key, value in kwargs.items()
+ ])
+ if args_string:
+ formatted_args = args_string
+ if kwargs_string:
+ if formatted_args:
+ formatted_args += ', '
+ formatted_args += kwargs_string
+
+ return message % formatted_args
+
+
+
+class _Call(tuple):
+ """
+ A tuple for holding the results of a call to a mock, either in the form
+ `(args, kwargs)` or `(name, args, kwargs)`.
+
+ If args or kwargs are empty then a call tuple will compare equal to
+ a tuple without those values. This makes comparisons less verbose::
+
+ _Call(('name', (), {})) == ('name',)
+ _Call(('name', (1,), {})) == ('name', (1,))
+ _Call(((), {'a': 'b'})) == ({'a': 'b'},)
+
+ The `_Call` object provides a useful shortcut for comparing with call::
+
+ _Call(((1, 2), {'a': 3})) == call(1, 2, a=3)
+ _Call(('foo', (1, 2), {'a': 3})) == call.foo(1, 2, a=3)
+
+ If the _Call has no name then it will match any name.
+ """
+ def __new__(cls, value=(), name=None, parent=None, two=False,
+ from_kall=True):
+ name = ''
+ args = ()
+ kwargs = {}
+ _len = len(value)
+ if _len == 3:
+ name, args, kwargs = value
+ elif _len == 2:
+ first, second = value
+ if isinstance(first, basestring):
+ name = first
+ if isinstance(second, tuple):
+ args = second
+ else:
+ kwargs = second
+ else:
+ args, kwargs = first, second
+ elif _len == 1:
+ value, = value
+ if isinstance(value, basestring):
+ name = value
+ elif isinstance(value, tuple):
+ args = value
+ else:
+ kwargs = value
+
+ if two:
+ return tuple.__new__(cls, (args, kwargs))
+
+ return tuple.__new__(cls, (name, args, kwargs))
+
+
+ def __init__(self, value=(), name=None, parent=None, two=False,
+ from_kall=True):
+ self.name = name
+ self.parent = parent
+ self.from_kall = from_kall
+
+
+ def __eq__(self, other):
+ if other is ANY:
+ return True
+ try:
+ len_other = len(other)
+ except TypeError:
+ return False
+
+ self_name = ''
+ if len(self) == 2:
+ self_args, self_kwargs = self
+ else:
+ self_name, self_args, self_kwargs = self
+
+ other_name = ''
+ if len_other == 0:
+ other_args, other_kwargs = (), {}
+ elif len_other == 3:
+ other_name, other_args, other_kwargs = other
+ elif len_other == 1:
+ value, = other
+ if isinstance(value, tuple):
+ other_args = value
+ other_kwargs = {}
+ elif isinstance(value, basestring):
+ other_name = value
+ other_args, other_kwargs = (), {}
+ else:
+ other_args = ()
+ other_kwargs = value
+ else:
+ # len 2
+ # could be (name, args) or (name, kwargs) or (args, kwargs)
+ first, second = other
+ if isinstance(first, basestring):
+ other_name = first
+ if isinstance(second, tuple):
+ other_args, other_kwargs = second, {}
+ else:
+ other_args, other_kwargs = (), second
+ else:
+ other_args, other_kwargs = first, second
+
+ if self_name and other_name != self_name:
+ return False
+
+ # this order is important for ANY to work!
+ return (other_args, other_kwargs) == (self_args, self_kwargs)
+
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+ def __call__(self, *args, **kwargs):
+ if self.name is None:
+ return _Call(('', args, kwargs), name='()')
+
+ name = self.name + '()'
+ return _Call((self.name, args, kwargs), name=name, parent=self)
+
+
+ def __getattr__(self, attr):
+ if self.name is None:
+ return _Call(name=attr, from_kall=False)
+ name = '%s.%s' % (self.name, attr)
+ return _Call(name=name, parent=self, from_kall=False)
+
+
+ def __repr__(self):
+ if not self.from_kall:
+ name = self.name or 'call'
+ if name.startswith('()'):
+ name = 'call%s' % name
+ return name
+
+ if len(self) == 2:
+ name = 'call'
+ args, kwargs = self
+ else:
+ name, args, kwargs = self
+ if not name:
+ name = 'call'
+ elif not name.startswith('()'):
+ name = 'call.%s' % name
+ else:
+ name = 'call%s' % name
+ return _format_call_signature(name, args, kwargs)
+
+
+ def call_list(self):
+ """For a call object that represents multiple calls, `call_list`
+ returns a list of all the intermediate calls as well as the
+ final call."""
+ vals = []
+ thing = self
+ while thing is not None:
+ if thing.from_kall:
+ vals.append(thing)
+ thing = thing.parent
+ return _CallList(reversed(vals))
+
+
+call = _Call(from_kall=False)
+
+
+
+def create_autospec(spec, spec_set=False, instance=False, _parent=None,
+ _name=None, **kwargs):
+ """Create a mock object using another object as a spec. Attributes on the
+ mock will use the corresponding attribute on the `spec` object as their
+ spec.
+
+ Functions or methods being mocked will have their arguments checked
+ to check that they are called with the correct signature.
+
+ If `spec_set` is True then attempting to set attributes that don't exist
+ on the spec object will raise an `AttributeError`.
+
+ If a class is used as a spec then the return value of the mock (the
+ instance of the class) will have the same spec. You can use a class as the
+ spec for an instance object by passing `instance=True`. The returned mock
+ will only be callable if instances of the mock are callable.
+
+ `create_autospec` also takes arbitrary keyword arguments that are passed to
+ the constructor of the created mock."""
+ if _is_list(spec):
+ # can't pass a list instance to the mock constructor as it will be
+ # interpreted as a list of strings
+ spec = type(spec)
+
+ is_type = isinstance(spec, ClassTypes)
+
+ _kwargs = {'spec': spec}
+ if spec_set:
+ _kwargs = {'spec_set': spec}
+ elif spec is None:
+ # None we mock with a normal mock without a spec
+ _kwargs = {}
+
+ _kwargs.update(kwargs)
+
+ Klass = MagicMock
+ if type(spec) in DescriptorTypes:
+ # descriptors don't have a spec
+ # because we don't know what type they return
+ _kwargs = {}
+ elif not _callable(spec):
+ Klass = NonCallableMagicMock
+ elif is_type and instance and not _instance_callable(spec):
+ Klass = NonCallableMagicMock
+
+ _new_name = _name
+ if _parent is None:
+ # for a top level object no _new_name should be set
+ _new_name = ''
+
+ mock = Klass(parent=_parent, _new_parent=_parent, _new_name=_new_name,
+ name=_name, **_kwargs)
+
+ if isinstance(spec, FunctionTypes):
+ # should only happen at the top level because we don't
+ # recurse for functions
+ mock = _set_signature(mock, spec)
+ else:
+ _check_signature(spec, mock, is_type, instance)
+
+ if _parent is not None and not instance:
+ _parent._mock_children[_name] = mock
+
+ if is_type and not instance and 'return_value' not in kwargs:
+ mock.return_value = create_autospec(spec, spec_set, instance=True,
+ _name='()', _parent=mock)
+
+ for entry in dir(spec):
+ if _is_magic(entry):
+ # MagicMock already does the useful magic methods for us
+ continue
+
+ if isinstance(spec, FunctionTypes) and entry in FunctionAttributes:
+ # allow a mock to actually be a function
+ continue
+
+ # XXXX do we need a better way of getting attributes without
+ # triggering code execution (?) Probably not - we need the actual
+ # object to mock it so we would rather trigger a property than mock
+ # the property descriptor. Likewise we want to mock out dynamically
+ # provided attributes.
+ # XXXX what about attributes that raise exceptions other than
+ # AttributeError on being fetched?
+ # we could be resilient against it, or catch and propagate the
+ # exception when the attribute is fetched from the mock
+ try:
+ original = getattr(spec, entry)
+ except AttributeError:
+ continue
+
+ kwargs = {'spec': original}
+ if spec_set:
+ kwargs = {'spec_set': original}
+
+ if not isinstance(original, FunctionTypes):
+ new = _SpecState(original, spec_set, mock, entry, instance)
+ mock._mock_children[entry] = new
+ else:
+ parent = mock
+ if isinstance(spec, FunctionTypes):
+ parent = mock.mock
+
+ new = MagicMock(parent=parent, name=entry, _new_name=entry,
+ _new_parent=parent, **kwargs)
+ mock._mock_children[entry] = new
+ skipfirst = _must_skip(spec, entry, is_type)
+ _check_signature(original, new, skipfirst=skipfirst)
+
+ # so functions created with _set_signature become instance attributes,
+ # *plus* their underlying mock exists in _mock_children of the parent
+ # mock. Adding to _mock_children may be unnecessary where we are also
+ # setting as an instance attribute?
+ if isinstance(new, FunctionTypes):
+ setattr(mock, entry, new)
+
+ return mock
+
+
+def _must_skip(spec, entry, is_type):
+ if not isinstance(spec, ClassTypes):
+ if entry in getattr(spec, '__dict__', {}):
+ # instance attribute - shouldn't skip
+ return False
+ spec = spec.__class__
+ if not hasattr(spec, '__mro__'):
+ # old style class: can't have descriptors anyway
+ return is_type
+
+ for klass in spec.__mro__:
+ result = klass.__dict__.get(entry, DEFAULT)
+ if result is DEFAULT:
+ continue
+ if isinstance(result, (staticmethod, classmethod)):
+ return False
+ return is_type
+
+ # shouldn't get here unless function is a dynamically provided attribute
+ # XXXX untested behaviour
+ return is_type
+
+
+def _get_class(obj):
+ try:
+ return obj.__class__
+ except AttributeError:
+ # in Python 2, _sre.SRE_Pattern objects have no __class__
+ return type(obj)
+
+
+class _SpecState(object):
+
+ def __init__(self, spec, spec_set=False, parent=None,
+ name=None, ids=None, instance=False):
+ self.spec = spec
+ self.ids = ids
+ self.spec_set = spec_set
+ self.parent = parent
+ self.instance = instance
+ self.name = name
+
+
+FunctionTypes = (
+ # python function
+ type(create_autospec),
+ # instance method
+ type(ANY.__eq__),
+ # unbound method
+ type(_ANY.__eq__),
+)
+
+FunctionAttributes = set([
+ 'func_closure',
+ 'func_code',
+ 'func_defaults',
+ 'func_dict',
+ 'func_doc',
+ 'func_globals',
+ 'func_name',
+])
+
+
+file_spec = None
+
+
+def mock_open(mock=None, read_data=''):
+ """
+ A helper function to create a mock to replace the use of `open`. It works
+ for `open` called directly or used as a context manager.
+
+ The `mock` argument is the mock object to configure. If `None` (the
+ default) then a `MagicMock` will be created for you, with the API limited
+ to methods or attributes available on standard file handles.
+
+ `read_data` is a string for the `read` method of the file handle to return.
+ This is an empty string by default.
+ """
+ global file_spec
+ if file_spec is None:
+ # set on first use
+ if inPy3k:
+ import _io
+ file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))
+ else:
+ file_spec = file
+
+ if mock is None:
+ mock = MagicMock(name='open', spec=open)
+
+ handle = MagicMock(spec=file_spec)
+ handle.write.return_value = None
+ handle.__enter__.return_value = handle
+ handle.read.return_value = read_data
+
+ mock.return_value = handle
+ return mock
+
+
+class PropertyMock(Mock):
+ """
+ A mock intended to be used as a property, or other descriptor, on a class.
+ `PropertyMock` provides `__get__` and `__set__` methods so you can specify
+ a return value when it is fetched.
+
+ Fetching a `PropertyMock` instance from an object calls the mock, with
+ no args. Setting it calls the mock with the value being set.
+ """
+ def _get_child_mock(self, **kwargs):
+ return MagicMock(**kwargs)
+
+ def __get__(self, obj, obj_type):
+ return self()
+ def __set__(self, obj, val):
+ self(val)
diff --git a/third_party/python/mock-1.0.0/setup.cfg b/third_party/python/mock-1.0.0/setup.cfg
new file mode 100644
index 0000000000..00948b7e4a
--- /dev/null
+++ b/third_party/python/mock-1.0.0/setup.cfg
@@ -0,0 +1,12 @@
+[build_sphinx]
+source-dir = docs
+build-dir = html
+
+[sdist]
+force-manifest = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/third_party/python/mock-1.0.0/setup.py b/third_party/python/mock-1.0.0/setup.py
new file mode 100755
index 0000000000..7c25e8f2dd
--- /dev/null
+++ b/third_party/python/mock-1.0.0/setup.py
@@ -0,0 +1,72 @@
+#! /usr/bin/env python
+
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
+
+from mock import __version__
+
+import os
+
+
+NAME = 'mock'
+MODULES = ['mock']
+DESCRIPTION = 'A Python Mocking and Patching Library for Testing'
+
+URL = "http://www.voidspace.org.uk/python/mock/"
+
+readme = os.path.join(os.path.dirname(__file__), 'README.txt')
+LONG_DESCRIPTION = open(readme).read()
+
+CLASSIFIERS = [
+ 'Development Status :: 5 - Production/Stable',
+ 'Environment :: Console',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: BSD License',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 2.4',
+ 'Programming Language :: Python :: 2.5',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3.1',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ 'Programming Language :: Python :: Implementation :: PyPy',
+ 'Programming Language :: Python :: Implementation :: Jython',
+ 'Operating System :: OS Independent',
+ 'Topic :: Software Development :: Libraries',
+ 'Topic :: Software Development :: Libraries :: Python Modules',
+ 'Topic :: Software Development :: Testing',
+]
+
+AUTHOR = 'Michael Foord'
+AUTHOR_EMAIL = 'michael@voidspace.org.uk'
+KEYWORDS = ("testing test mock mocking unittest patching "
+ "stubs fakes doubles").split(' ')
+
+params = dict(
+ name=NAME,
+ version=__version__,
+ py_modules=MODULES,
+
+ # metadata for upload to PyPI
+ author=AUTHOR,
+ author_email=AUTHOR_EMAIL,
+ description=DESCRIPTION,
+ long_description=LONG_DESCRIPTION,
+ keywords=KEYWORDS,
+ url=URL,
+ classifiers=CLASSIFIERS,
+)
+
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+else:
+ params['tests_require'] = ['unittest2']
+ params['test_suite'] = 'unittest2.collector'
+
+setup(**params)
diff --git a/third_party/python/mock-1.0.0/tests/__init__.py b/third_party/python/mock-1.0.0/tests/__init__.py
new file mode 100644
index 0000000000..54ddf2ecc1
--- /dev/null
+++ b/third_party/python/mock-1.0.0/tests/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
diff --git a/third_party/python/mock-1.0.0/tests/_testwith.py b/third_party/python/mock-1.0.0/tests/_testwith.py
new file mode 100644
index 0000000000..0b54780b80
--- /dev/null
+++ b/third_party/python/mock-1.0.0/tests/_testwith.py
@@ -0,0 +1,181 @@
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
+
+from __future__ import with_statement
+
+from tests.support import unittest2, is_instance
+
+from mock import MagicMock, Mock, patch, sentinel, mock_open, call
+
+from tests.support_with import catch_warnings, nested
+
+something = sentinel.Something
+something_else = sentinel.SomethingElse
+
+
+
+class WithTest(unittest2.TestCase):
+
+ def test_with_statement(self):
+ with patch('tests._testwith.something', sentinel.Something2):
+ self.assertEqual(something, sentinel.Something2, "unpatched")
+ self.assertEqual(something, sentinel.Something)
+
+
+ def test_with_statement_exception(self):
+ try:
+ with patch('tests._testwith.something', sentinel.Something2):
+ self.assertEqual(something, sentinel.Something2, "unpatched")
+ raise Exception('pow')
+ except Exception:
+ pass
+ else:
+ self.fail("patch swallowed exception")
+ self.assertEqual(something, sentinel.Something)
+
+
+ def test_with_statement_as(self):
+ with patch('tests._testwith.something') as mock_something:
+ self.assertEqual(something, mock_something, "unpatched")
+ self.assertTrue(is_instance(mock_something, MagicMock),
+ "patching wrong type")
+ self.assertEqual(something, sentinel.Something)
+
+
+ def test_patch_object_with_statement(self):
+ class Foo(object):
+ something = 'foo'
+ original = Foo.something
+ with patch.object(Foo, 'something'):
+ self.assertNotEqual(Foo.something, original, "unpatched")
+ self.assertEqual(Foo.something, original)
+
+
+ def test_with_statement_nested(self):
+ with catch_warnings(record=True):
+ # nested is deprecated in Python 2.7
+ with nested(patch('tests._testwith.something'),
+ patch('tests._testwith.something_else')) as (mock_something, mock_something_else):
+ self.assertEqual(something, mock_something, "unpatched")
+ self.assertEqual(something_else, mock_something_else,
+ "unpatched")
+ self.assertEqual(something, sentinel.Something)
+ self.assertEqual(something_else, sentinel.SomethingElse)
+
+
+ def test_with_statement_specified(self):
+ with patch('tests._testwith.something', sentinel.Patched) as mock_something:
+ self.assertEqual(something, mock_something, "unpatched")
+ self.assertEqual(mock_something, sentinel.Patched, "wrong patch")
+ self.assertEqual(something, sentinel.Something)
+
+
+ def testContextManagerMocking(self):
+ mock = Mock()
+ mock.__enter__ = Mock()
+ mock.__exit__ = Mock()
+ mock.__exit__.return_value = False
+
+ with mock as m:
+ self.assertEqual(m, mock.__enter__.return_value)
+ mock.__enter__.assert_called_with()
+ mock.__exit__.assert_called_with(None, None, None)
+
+
+ def test_context_manager_with_magic_mock(self):
+ mock = MagicMock()
+
+ with self.assertRaises(TypeError):
+ with mock:
+ 'foo' + 3
+ mock.__enter__.assert_called_with()
+ self.assertTrue(mock.__exit__.called)
+
+
+ def test_with_statement_same_attribute(self):
+ with patch('tests._testwith.something', sentinel.Patched) as mock_something:
+ self.assertEqual(something, mock_something, "unpatched")
+
+ with patch('tests._testwith.something') as mock_again:
+ self.assertEqual(something, mock_again, "unpatched")
+
+ self.assertEqual(something, mock_something,
+ "restored with wrong instance")
+
+ self.assertEqual(something, sentinel.Something, "not restored")
+
+
+ def test_with_statement_imbricated(self):
+ with patch('tests._testwith.something') as mock_something:
+ self.assertEqual(something, mock_something, "unpatched")
+
+ with patch('tests._testwith.something_else') as mock_something_else:
+ self.assertEqual(something_else, mock_something_else,
+ "unpatched")
+
+ self.assertEqual(something, sentinel.Something)
+ self.assertEqual(something_else, sentinel.SomethingElse)
+
+
+ def test_dict_context_manager(self):
+ foo = {}
+ with patch.dict(foo, {'a': 'b'}):
+ self.assertEqual(foo, {'a': 'b'})
+ self.assertEqual(foo, {})
+
+ with self.assertRaises(NameError):
+ with patch.dict(foo, {'a': 'b'}):
+ self.assertEqual(foo, {'a': 'b'})
+ raise NameError('Konrad')
+
+ self.assertEqual(foo, {})
+
+
+
+class TestMockOpen(unittest2.TestCase):
+
+ def test_mock_open(self):
+ mock = mock_open()
+ with patch('%s.open' % __name__, mock, create=True) as patched:
+ self.assertIs(patched, mock)
+ open('foo')
+
+ mock.assert_called_once_with('foo')
+
+
+ def test_mock_open_context_manager(self):
+ mock = mock_open()
+ handle = mock.return_value
+ with patch('%s.open' % __name__, mock, create=True):
+ with open('foo') as f:
+ f.read()
+
+ expected_calls = [call('foo'), call().__enter__(), call().read(),
+ call().__exit__(None, None, None)]
+ self.assertEqual(mock.mock_calls, expected_calls)
+ self.assertIs(f, handle)
+
+
+ def test_explicit_mock(self):
+ mock = MagicMock()
+ mock_open(mock)
+
+ with patch('%s.open' % __name__, mock, create=True) as patched:
+ self.assertIs(patched, mock)
+ open('foo')
+
+ mock.assert_called_once_with('foo')
+
+
+ def test_read_data(self):
+ mock = mock_open(read_data='foo')
+ with patch('%s.open' % __name__, mock, create=True):
+ h = open('bar')
+ result = h.read()
+
+ self.assertEqual(result, 'foo')
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/third_party/python/mock-1.0.0/tests/support.py b/third_party/python/mock-1.0.0/tests/support.py
new file mode 100644
index 0000000000..1b10c3428f
--- /dev/null
+++ b/third_party/python/mock-1.0.0/tests/support.py
@@ -0,0 +1,41 @@
+import sys
+
+info = sys.version_info
+if info[:3] >= (3, 2, 0):
+ # for Python 3.2 ordinary unittest is fine
+ import unittest as unittest2
+else:
+ import unittest2
+
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return hasattr(obj, '__call__')
+
+
+inPy3k = sys.version_info[0] == 3
+with_available = sys.version_info[:2] >= (2, 5)
+
+
+def is_instance(obj, klass):
+ """Version of is_instance that doesn't access __class__"""
+ return issubclass(type(obj), klass)
+
+
+class SomeClass(object):
+ class_attribute = None
+
+ def wibble(self):
+ pass
+
+
+class X(object):
+ pass
+
+try:
+ next = next
+except NameError:
+ def next(obj):
+ return obj.next()
diff --git a/third_party/python/mock-1.0.0/tests/support_with.py b/third_party/python/mock-1.0.0/tests/support_with.py
new file mode 100644
index 0000000000..fa286122ce
--- /dev/null
+++ b/third_party/python/mock-1.0.0/tests/support_with.py
@@ -0,0 +1,93 @@
+from __future__ import with_statement
+
+import sys
+
+__all__ = ['nested', 'catch_warnings', 'examine_warnings']
+
+
+try:
+ from contextlib import nested
+except ImportError:
+ from contextlib import contextmanager
+ @contextmanager
+ def nested(*managers):
+ exits = []
+ vars = []
+ exc = (None, None, None)
+ try:
+ for mgr in managers:
+ exit = mgr.__exit__
+ enter = mgr.__enter__
+ vars.append(enter())
+ exits.append(exit)
+ yield vars
+ except:
+ exc = sys.exc_info()
+ finally:
+ while exits:
+ exit = exits.pop()
+ try:
+ if exit(*exc):
+ exc = (None, None, None)
+ except:
+ exc = sys.exc_info()
+ if exc != (None, None, None):
+ raise exc[1]
+
+# copied from Python 2.6
+try:
+ from warnings import catch_warnings
+except ImportError:
+ class catch_warnings(object):
+ def __init__(self, record=False, module=None):
+ self._record = record
+ self._module = sys.modules['warnings']
+ self._entered = False
+
+ def __repr__(self):
+ args = []
+ if self._record:
+ args.append("record=True")
+ name = type(self).__name__
+ return "%s(%s)" % (name, ", ".join(args))
+
+ def __enter__(self):
+ if self._entered:
+ raise RuntimeError("Cannot enter %r twice" % self)
+ self._entered = True
+ self._filters = self._module.filters
+ self._module.filters = self._filters[:]
+ self._showwarning = self._module.showwarning
+ if self._record:
+ log = []
+ def showwarning(*args, **kwargs):
+ log.append(WarningMessage(*args, **kwargs))
+ self._module.showwarning = showwarning
+ return log
+ else:
+ return None
+
+ def __exit__(self, *exc_info):
+ if not self._entered:
+ raise RuntimeError("Cannot exit %r without entering first" % self)
+ self._module.filters = self._filters
+ self._module.showwarning = self._showwarning
+
+ class WarningMessage(object):
+ _WARNING_DETAILS = ("message", "category", "filename", "lineno", "file",
+ "line")
+ def __init__(self, message, category, filename, lineno, file=None,
+ line=None):
+ local_values = locals()
+ for attr in self._WARNING_DETAILS:
+ setattr(self, attr, local_values[attr])
+ self._category_name = None
+ if category.__name__:
+ self._category_name = category.__name__
+
+
+def examine_warnings(func):
+ def wrapper():
+ with catch_warnings(record=True) as ws:
+ func(ws)
+ return wrapper
diff --git a/third_party/python/mock-1.0.0/tests/testcallable.py b/third_party/python/mock-1.0.0/tests/testcallable.py
new file mode 100644
index 0000000000..f7dcd5e1df
--- /dev/null
+++ b/third_party/python/mock-1.0.0/tests/testcallable.py
@@ -0,0 +1,158 @@
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
+
+from tests.support import is_instance, unittest2, X, SomeClass
+
+from mock import (
+ Mock, MagicMock, NonCallableMagicMock,
+ NonCallableMock, patch, create_autospec,
+ CallableMixin
+)
+
+
+
+class TestCallable(unittest2.TestCase):
+
+ def assertNotCallable(self, mock):
+ self.assertTrue(is_instance(mock, NonCallableMagicMock))
+ self.assertFalse(is_instance(mock, CallableMixin))
+
+
+ def test_non_callable(self):
+ for mock in NonCallableMagicMock(), NonCallableMock():
+ self.assertRaises(TypeError, mock)
+ self.assertFalse(hasattr(mock, '__call__'))
+ self.assertIn(mock.__class__.__name__, repr(mock))
+
+
+ def test_heirarchy(self):
+ self.assertTrue(issubclass(MagicMock, Mock))
+ self.assertTrue(issubclass(NonCallableMagicMock, NonCallableMock))
+
+
+ def test_attributes(self):
+ one = NonCallableMock()
+ self.assertTrue(issubclass(type(one.one), Mock))
+
+ two = NonCallableMagicMock()
+ self.assertTrue(issubclass(type(two.two), MagicMock))
+
+
+ def test_subclasses(self):
+ class MockSub(Mock):
+ pass
+
+ one = MockSub()
+ self.assertTrue(issubclass(type(one.one), MockSub))
+
+ class MagicSub(MagicMock):
+ pass
+
+ two = MagicSub()
+ self.assertTrue(issubclass(type(two.two), MagicSub))
+
+
+ def test_patch_spec(self):
+ patcher = patch('%s.X' % __name__, spec=True)
+ mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ instance = mock()
+ mock.assert_called_once_with()
+
+ self.assertNotCallable(instance)
+ self.assertRaises(TypeError, instance)
+
+
+ def test_patch_spec_set(self):
+ patcher = patch('%s.X' % __name__, spec_set=True)
+ mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ instance = mock()
+ mock.assert_called_once_with()
+
+ self.assertNotCallable(instance)
+ self.assertRaises(TypeError, instance)
+
+
+ def test_patch_spec_instance(self):
+ patcher = patch('%s.X' % __name__, spec=X())
+ mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ self.assertNotCallable(mock)
+ self.assertRaises(TypeError, mock)
+
+
+ def test_patch_spec_set_instance(self):
+ patcher = patch('%s.X' % __name__, spec_set=X())
+ mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ self.assertNotCallable(mock)
+ self.assertRaises(TypeError, mock)
+
+
+ def test_patch_spec_callable_class(self):
+ class CallableX(X):
+ def __call__(self):
+ pass
+
+ class Sub(CallableX):
+ pass
+
+ class Multi(SomeClass, Sub):
+ pass
+
+ class OldStyle:
+ def __call__(self):
+ pass
+
+ class OldStyleSub(OldStyle):
+ pass
+
+ for arg in 'spec', 'spec_set':
+ for Klass in CallableX, Sub, Multi, OldStyle, OldStyleSub:
+ patcher = patch('%s.X' % __name__, **{arg: Klass})
+ mock = patcher.start()
+
+ try:
+ instance = mock()
+ mock.assert_called_once_with()
+
+ self.assertTrue(is_instance(instance, MagicMock))
+ # inherited spec
+ self.assertRaises(AttributeError, getattr, instance,
+ 'foobarbaz')
+
+ result = instance()
+ # instance is callable, result has no spec
+ instance.assert_called_once_with()
+
+ result(3, 2, 1)
+ result.assert_called_once_with(3, 2, 1)
+ result.foo(3, 2, 1)
+ result.foo.assert_called_once_with(3, 2, 1)
+ finally:
+ patcher.stop()
+
+
+ def test_create_autopsec(self):
+ mock = create_autospec(X)
+ instance = mock()
+ self.assertRaises(TypeError, instance)
+
+ mock = create_autospec(X())
+ self.assertRaises(TypeError, mock)
+
+
+ def test_create_autospec_instance(self):
+ mock = create_autospec(SomeClass, instance=True)
+
+ self.assertRaises(TypeError, mock)
+ mock.wibble()
+ mock.wibble.assert_called_once_with()
+
+ self.assertRaises(TypeError, mock.wibble, 'some', 'args')
diff --git a/third_party/python/mock-1.0.0/tests/testhelpers.py b/third_party/python/mock-1.0.0/tests/testhelpers.py
new file mode 100644
index 0000000000..e788da8447
--- /dev/null
+++ b/third_party/python/mock-1.0.0/tests/testhelpers.py
@@ -0,0 +1,940 @@
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
+
+from tests.support import unittest2, inPy3k
+
+from mock import (
+ call, _Call, create_autospec, MagicMock,
+ Mock, ANY, _CallList, patch, PropertyMock
+)
+
+from datetime import datetime
+
+class SomeClass(object):
+ def one(self, a, b):
+ pass
+ def two(self):
+ pass
+ def three(self, a=None):
+ pass
+
+
+
+class AnyTest(unittest2.TestCase):
+
+ def test_any(self):
+ self.assertEqual(ANY, object())
+
+ mock = Mock()
+ mock(ANY)
+ mock.assert_called_with(ANY)
+
+ mock = Mock()
+ mock(foo=ANY)
+ mock.assert_called_with(foo=ANY)
+
+ def test_repr(self):
+ self.assertEqual(repr(ANY), '<ANY>')
+ self.assertEqual(str(ANY), '<ANY>')
+
+
+ def test_any_and_datetime(self):
+ mock = Mock()
+ mock(datetime.now(), foo=datetime.now())
+
+ mock.assert_called_with(ANY, foo=ANY)
+
+
+ def test_any_mock_calls_comparison_order(self):
+ mock = Mock()
+ d = datetime.now()
+ class Foo(object):
+ def __eq__(self, other):
+ return False
+ def __ne__(self, other):
+ return True
+
+ for d in datetime.now(), Foo():
+ mock.reset_mock()
+
+ mock(d, foo=d, bar=d)
+ mock.method(d, zinga=d, alpha=d)
+ mock().method(a1=d, z99=d)
+
+ expected = [
+ call(ANY, foo=ANY, bar=ANY),
+ call.method(ANY, zinga=ANY, alpha=ANY),
+ call(), call().method(a1=ANY, z99=ANY)
+ ]
+ self.assertEqual(expected, mock.mock_calls)
+ self.assertEqual(mock.mock_calls, expected)
+
+
+
+class CallTest(unittest2.TestCase):
+
+ def test_call_with_call(self):
+ kall = _Call()
+ self.assertEqual(kall, _Call())
+ self.assertEqual(kall, _Call(('',)))
+ self.assertEqual(kall, _Call(((),)))
+ self.assertEqual(kall, _Call(({},)))
+ self.assertEqual(kall, _Call(('', ())))
+ self.assertEqual(kall, _Call(('', {})))
+ self.assertEqual(kall, _Call(('', (), {})))
+ self.assertEqual(kall, _Call(('foo',)))
+ self.assertEqual(kall, _Call(('bar', ())))
+ self.assertEqual(kall, _Call(('baz', {})))
+ self.assertEqual(kall, _Call(('spam', (), {})))
+
+ kall = _Call(((1, 2, 3),))
+ self.assertEqual(kall, _Call(((1, 2, 3),)))
+ self.assertEqual(kall, _Call(('', (1, 2, 3))))
+ self.assertEqual(kall, _Call(((1, 2, 3), {})))
+ self.assertEqual(kall, _Call(('', (1, 2, 3), {})))
+
+ kall = _Call(((1, 2, 4),))
+ self.assertNotEqual(kall, _Call(('', (1, 2, 3))))
+ self.assertNotEqual(kall, _Call(('', (1, 2, 3), {})))
+
+ kall = _Call(('foo', (1, 2, 4),))
+ self.assertNotEqual(kall, _Call(('', (1, 2, 4))))
+ self.assertNotEqual(kall, _Call(('', (1, 2, 4), {})))
+ self.assertNotEqual(kall, _Call(('bar', (1, 2, 4))))
+ self.assertNotEqual(kall, _Call(('bar', (1, 2, 4), {})))
+
+ kall = _Call(({'a': 3},))
+ self.assertEqual(kall, _Call(('', (), {'a': 3})))
+ self.assertEqual(kall, _Call(('', {'a': 3})))
+ self.assertEqual(kall, _Call(((), {'a': 3})))
+ self.assertEqual(kall, _Call(({'a': 3},)))
+
+
+ def test_empty__Call(self):
+ args = _Call()
+
+ self.assertEqual(args, ())
+ self.assertEqual(args, ('foo',))
+ self.assertEqual(args, ((),))
+ self.assertEqual(args, ('foo', ()))
+ self.assertEqual(args, ('foo',(), {}))
+ self.assertEqual(args, ('foo', {}))
+ self.assertEqual(args, ({},))
+
+
+ def test_named_empty_call(self):
+ args = _Call(('foo', (), {}))
+
+ self.assertEqual(args, ('foo',))
+ self.assertEqual(args, ('foo', ()))
+ self.assertEqual(args, ('foo',(), {}))
+ self.assertEqual(args, ('foo', {}))
+
+ self.assertNotEqual(args, ((),))
+ self.assertNotEqual(args, ())
+ self.assertNotEqual(args, ({},))
+ self.assertNotEqual(args, ('bar',))
+ self.assertNotEqual(args, ('bar', ()))
+ self.assertNotEqual(args, ('bar', {}))
+
+
+ def test_call_with_args(self):
+ args = _Call(((1, 2, 3), {}))
+
+ self.assertEqual(args, ((1, 2, 3),))
+ self.assertEqual(args, ('foo', (1, 2, 3)))
+ self.assertEqual(args, ('foo', (1, 2, 3), {}))
+ self.assertEqual(args, ((1, 2, 3), {}))
+
+
+ def test_named_call_with_args(self):
+ args = _Call(('foo', (1, 2, 3), {}))
+
+ self.assertEqual(args, ('foo', (1, 2, 3)))
+ self.assertEqual(args, ('foo', (1, 2, 3), {}))
+
+ self.assertNotEqual(args, ((1, 2, 3),))
+ self.assertNotEqual(args, ((1, 2, 3), {}))
+
+
+ def test_call_with_kwargs(self):
+ args = _Call(((), dict(a=3, b=4)))
+
+ self.assertEqual(args, (dict(a=3, b=4),))
+ self.assertEqual(args, ('foo', dict(a=3, b=4)))
+ self.assertEqual(args, ('foo', (), dict(a=3, b=4)))
+ self.assertEqual(args, ((), dict(a=3, b=4)))
+
+
+ def test_named_call_with_kwargs(self):
+ args = _Call(('foo', (), dict(a=3, b=4)))
+
+ self.assertEqual(args, ('foo', dict(a=3, b=4)))
+ self.assertEqual(args, ('foo', (), dict(a=3, b=4)))
+
+ self.assertNotEqual(args, (dict(a=3, b=4),))
+ self.assertNotEqual(args, ((), dict(a=3, b=4)))
+
+
+ def test_call_with_args_call_empty_name(self):
+ args = _Call(((1, 2, 3), {}))
+ self.assertEqual(args, call(1, 2, 3))
+ self.assertEqual(call(1, 2, 3), args)
+ self.assertTrue(call(1, 2, 3) in [args])
+
+
+ def test_call_ne(self):
+ self.assertNotEqual(_Call(((1, 2, 3),)), call(1, 2))
+ self.assertFalse(_Call(((1, 2, 3),)) != call(1, 2, 3))
+ self.assertTrue(_Call(((1, 2), {})) != call(1, 2, 3))
+
+
+ def test_call_non_tuples(self):
+ kall = _Call(((1, 2, 3),))
+ for value in 1, None, self, int:
+ self.assertNotEqual(kall, value)
+ self.assertFalse(kall == value)
+
+
+ def test_repr(self):
+ self.assertEqual(repr(_Call()), 'call()')
+ self.assertEqual(repr(_Call(('foo',))), 'call.foo()')
+
+ self.assertEqual(repr(_Call(((1, 2, 3), {'a': 'b'}))),
+ "call(1, 2, 3, a='b')")
+ self.assertEqual(repr(_Call(('bar', (1, 2, 3), {'a': 'b'}))),
+ "call.bar(1, 2, 3, a='b')")
+
+ self.assertEqual(repr(call), 'call')
+ self.assertEqual(str(call), 'call')
+
+ self.assertEqual(repr(call()), 'call()')
+ self.assertEqual(repr(call(1)), 'call(1)')
+ self.assertEqual(repr(call(zz='thing')), "call(zz='thing')")
+
+ self.assertEqual(repr(call().foo), 'call().foo')
+ self.assertEqual(repr(call(1).foo.bar(a=3).bing),
+ 'call().foo.bar().bing')
+ self.assertEqual(
+ repr(call().foo(1, 2, a=3)),
+ "call().foo(1, 2, a=3)"
+ )
+ self.assertEqual(repr(call()()), "call()()")
+ self.assertEqual(repr(call(1)(2)), "call()(2)")
+ self.assertEqual(
+ repr(call()().bar().baz.beep(1)),
+ "call()().bar().baz.beep(1)"
+ )
+
+
+ def test_call(self):
+ self.assertEqual(call(), ('', (), {}))
+ self.assertEqual(call('foo', 'bar', one=3, two=4),
+ ('', ('foo', 'bar'), {'one': 3, 'two': 4}))
+
+ mock = Mock()
+ mock(1, 2, 3)
+ mock(a=3, b=6)
+ self.assertEqual(mock.call_args_list,
+ [call(1, 2, 3), call(a=3, b=6)])
+
+ def test_attribute_call(self):
+ self.assertEqual(call.foo(1), ('foo', (1,), {}))
+ self.assertEqual(call.bar.baz(fish='eggs'),
+ ('bar.baz', (), {'fish': 'eggs'}))
+
+ mock = Mock()
+ mock.foo(1, 2 ,3)
+ mock.bar.baz(a=3, b=6)
+ self.assertEqual(mock.method_calls,
+ [call.foo(1, 2, 3), call.bar.baz(a=3, b=6)])
+
+
+ def test_extended_call(self):
+ result = call(1).foo(2).bar(3, a=4)
+ self.assertEqual(result, ('().foo().bar', (3,), dict(a=4)))
+
+ mock = MagicMock()
+ mock(1, 2, a=3, b=4)
+ self.assertEqual(mock.call_args, call(1, 2, a=3, b=4))
+ self.assertNotEqual(mock.call_args, call(1, 2, 3))
+
+ self.assertEqual(mock.call_args_list, [call(1, 2, a=3, b=4)])
+ self.assertEqual(mock.mock_calls, [call(1, 2, a=3, b=4)])
+
+ mock = MagicMock()
+ mock.foo(1).bar()().baz.beep(a=6)
+
+ last_call = call.foo(1).bar()().baz.beep(a=6)
+ self.assertEqual(mock.mock_calls[-1], last_call)
+ self.assertEqual(mock.mock_calls, last_call.call_list())
+
+
+ def test_call_list(self):
+ mock = MagicMock()
+ mock(1)
+ self.assertEqual(call(1).call_list(), mock.mock_calls)
+
+ mock = MagicMock()
+ mock(1).method(2)
+ self.assertEqual(call(1).method(2).call_list(),
+ mock.mock_calls)
+
+ mock = MagicMock()
+ mock(1).method(2)(3)
+ self.assertEqual(call(1).method(2)(3).call_list(),
+ mock.mock_calls)
+
+ mock = MagicMock()
+ int(mock(1).method(2)(3).foo.bar.baz(4)(5))
+ kall = call(1).method(2)(3).foo.bar.baz(4)(5).__int__()
+ self.assertEqual(kall.call_list(), mock.mock_calls)
+
+
+ def test_call_any(self):
+ self.assertEqual(call, ANY)
+
+ m = MagicMock()
+ int(m)
+ self.assertEqual(m.mock_calls, [ANY])
+ self.assertEqual([ANY], m.mock_calls)
+
+
+ def test_two_args_call(self):
+ args = _Call(((1, 2), {'a': 3}), two=True)
+ self.assertEqual(len(args), 2)
+ self.assertEqual(args[0], (1, 2))
+ self.assertEqual(args[1], {'a': 3})
+
+ other_args = _Call(((1, 2), {'a': 3}))
+ self.assertEqual(args, other_args)
+
+
+class SpecSignatureTest(unittest2.TestCase):
+
+ def _check_someclass_mock(self, mock):
+ self.assertRaises(AttributeError, getattr, mock, 'foo')
+ mock.one(1, 2)
+ mock.one.assert_called_with(1, 2)
+ self.assertRaises(AssertionError,
+ mock.one.assert_called_with, 3, 4)
+ self.assertRaises(TypeError, mock.one, 1)
+
+ mock.two()
+ mock.two.assert_called_with()
+ self.assertRaises(AssertionError,
+ mock.two.assert_called_with, 3)
+ self.assertRaises(TypeError, mock.two, 1)
+
+ mock.three()
+ mock.three.assert_called_with()
+ self.assertRaises(AssertionError,
+ mock.three.assert_called_with, 3)
+ self.assertRaises(TypeError, mock.three, 3, 2)
+
+ mock.three(1)
+ mock.three.assert_called_with(1)
+
+ mock.three(a=1)
+ mock.three.assert_called_with(a=1)
+
+
+ def test_basic(self):
+ for spec in (SomeClass, SomeClass()):
+ mock = create_autospec(spec)
+ self._check_someclass_mock(mock)
+
+
+ def test_create_autospec_return_value(self):
+ def f():
+ pass
+ mock = create_autospec(f, return_value='foo')
+ self.assertEqual(mock(), 'foo')
+
+ class Foo(object):
+ pass
+
+ mock = create_autospec(Foo, return_value='foo')
+ self.assertEqual(mock(), 'foo')
+
+
+ def test_autospec_reset_mock(self):
+ m = create_autospec(int)
+ int(m)
+ m.reset_mock()
+ self.assertEqual(m.__int__.call_count, 0)
+
+
+ def test_mocking_unbound_methods(self):
+ class Foo(object):
+ def foo(self, foo):
+ pass
+ p = patch.object(Foo, 'foo')
+ mock_foo = p.start()
+ Foo().foo(1)
+
+ mock_foo.assert_called_with(1)
+
+
+ @unittest2.expectedFailure
+ def test_create_autospec_unbound_methods(self):
+ # see issue 128
+ class Foo(object):
+ def foo(self):
+ pass
+
+ klass = create_autospec(Foo)
+ instance = klass()
+ self.assertRaises(TypeError, instance.foo, 1)
+
+ # Note: no type checking on the "self" parameter
+ klass.foo(1)
+ klass.foo.assert_called_with(1)
+ self.assertRaises(TypeError, klass.foo)
+
+
+ def test_create_autospec_keyword_arguments(self):
+ class Foo(object):
+ a = 3
+ m = create_autospec(Foo, a='3')
+ self.assertEqual(m.a, '3')
+
+ @unittest2.skipUnless(inPy3k, "Keyword only arguments Python 3 specific")
+ def test_create_autospec_keyword_only_arguments(self):
+ func_def = "def foo(a, *, b=None):\n pass\n"
+ namespace = {}
+ exec (func_def, namespace)
+ foo = namespace['foo']
+
+ m = create_autospec(foo)
+ m(1)
+ m.assert_called_with(1)
+ self.assertRaises(TypeError, m, 1, 2)
+
+ m(2, b=3)
+ m.assert_called_with(2, b=3)
+
+ def test_function_as_instance_attribute(self):
+ obj = SomeClass()
+ def f(a):
+ pass
+ obj.f = f
+
+ mock = create_autospec(obj)
+ mock.f('bing')
+ mock.f.assert_called_with('bing')
+
+
+ def test_spec_as_list(self):
+ # because spec as a list of strings in the mock constructor means
+ # something very different we treat a list instance as the type.
+ mock = create_autospec([])
+ mock.append('foo')
+ mock.append.assert_called_with('foo')
+
+ self.assertRaises(AttributeError, getattr, mock, 'foo')
+
+ class Foo(object):
+ foo = []
+
+ mock = create_autospec(Foo)
+ mock.foo.append(3)
+ mock.foo.append.assert_called_with(3)
+ self.assertRaises(AttributeError, getattr, mock.foo, 'foo')
+
+
+ def test_attributes(self):
+ class Sub(SomeClass):
+ attr = SomeClass()
+
+ sub_mock = create_autospec(Sub)
+
+ for mock in (sub_mock, sub_mock.attr):
+ self._check_someclass_mock(mock)
+
+
+ def test_builtin_functions_types(self):
+ # we could replace builtin functions / methods with a function
+ # with *args / **kwargs signature. Using the builtin method type
+ # as a spec seems to work fairly well though.
+ class BuiltinSubclass(list):
+ def bar(self, arg):
+ pass
+ sorted = sorted
+ attr = {}
+
+ mock = create_autospec(BuiltinSubclass)
+ mock.append(3)
+ mock.append.assert_called_with(3)
+ self.assertRaises(AttributeError, getattr, mock.append, 'foo')
+
+ mock.bar('foo')
+ mock.bar.assert_called_with('foo')
+ self.assertRaises(TypeError, mock.bar, 'foo', 'bar')
+ self.assertRaises(AttributeError, getattr, mock.bar, 'foo')
+
+ mock.sorted([1, 2])
+ mock.sorted.assert_called_with([1, 2])
+ self.assertRaises(AttributeError, getattr, mock.sorted, 'foo')
+
+ mock.attr.pop(3)
+ mock.attr.pop.assert_called_with(3)
+ self.assertRaises(AttributeError, getattr, mock.attr, 'foo')
+
+
+ def test_method_calls(self):
+ class Sub(SomeClass):
+ attr = SomeClass()
+
+ mock = create_autospec(Sub)
+ mock.one(1, 2)
+ mock.two()
+ mock.three(3)
+
+ expected = [call.one(1, 2), call.two(), call.three(3)]
+ self.assertEqual(mock.method_calls, expected)
+
+ mock.attr.one(1, 2)
+ mock.attr.two()
+ mock.attr.three(3)
+
+ expected.extend(
+ [call.attr.one(1, 2), call.attr.two(), call.attr.three(3)]
+ )
+ self.assertEqual(mock.method_calls, expected)
+
+
+ def test_magic_methods(self):
+ class BuiltinSubclass(list):
+ attr = {}
+
+ mock = create_autospec(BuiltinSubclass)
+ self.assertEqual(list(mock), [])
+ self.assertRaises(TypeError, int, mock)
+ self.assertRaises(TypeError, int, mock.attr)
+ self.assertEqual(list(mock), [])
+
+ self.assertIsInstance(mock['foo'], MagicMock)
+ self.assertIsInstance(mock.attr['foo'], MagicMock)
+
+
+ def test_spec_set(self):
+ class Sub(SomeClass):
+ attr = SomeClass()
+
+ for spec in (Sub, Sub()):
+ mock = create_autospec(spec, spec_set=True)
+ self._check_someclass_mock(mock)
+
+ self.assertRaises(AttributeError, setattr, mock, 'foo', 'bar')
+ self.assertRaises(AttributeError, setattr, mock.attr, 'foo', 'bar')
+
+
+ def test_descriptors(self):
+ class Foo(object):
+ @classmethod
+ def f(cls, a, b):
+ pass
+ @staticmethod
+ def g(a, b):
+ pass
+
+ class Bar(Foo):
+ pass
+
+ class Baz(SomeClass, Bar):
+ pass
+
+ for spec in (Foo, Foo(), Bar, Bar(), Baz, Baz()):
+ mock = create_autospec(spec)
+ mock.f(1, 2)
+ mock.f.assert_called_once_with(1, 2)
+
+ mock.g(3, 4)
+ mock.g.assert_called_once_with(3, 4)
+
+
+ @unittest2.skipIf(inPy3k, "No old style classes in Python 3")
+ def test_old_style_classes(self):
+ class Foo:
+ def f(self, a, b):
+ pass
+
+ class Bar(Foo):
+ g = Foo()
+
+ for spec in (Foo, Foo(), Bar, Bar()):
+ mock = create_autospec(spec)
+ mock.f(1, 2)
+ mock.f.assert_called_once_with(1, 2)
+
+ self.assertRaises(AttributeError, getattr, mock, 'foo')
+ self.assertRaises(AttributeError, getattr, mock.f, 'foo')
+
+ mock.g.f(1, 2)
+ mock.g.f.assert_called_once_with(1, 2)
+ self.assertRaises(AttributeError, getattr, mock.g, 'foo')
+
+
+ def test_recursive(self):
+ class A(object):
+ def a(self):
+ pass
+ foo = 'foo bar baz'
+ bar = foo
+
+ A.B = A
+ mock = create_autospec(A)
+
+ mock()
+ self.assertFalse(mock.B.called)
+
+ mock.a()
+ mock.B.a()
+ self.assertEqual(mock.method_calls, [call.a(), call.B.a()])
+
+ self.assertIs(A.foo, A.bar)
+ self.assertIsNot(mock.foo, mock.bar)
+ mock.foo.lower()
+ self.assertRaises(AssertionError, mock.bar.lower.assert_called_with)
+
+
+ def test_spec_inheritance_for_classes(self):
+ class Foo(object):
+ def a(self):
+ pass
+ class Bar(object):
+ def f(self):
+ pass
+
+ class_mock = create_autospec(Foo)
+
+ self.assertIsNot(class_mock, class_mock())
+
+ for this_mock in class_mock, class_mock():
+ this_mock.a()
+ this_mock.a.assert_called_with()
+ self.assertRaises(TypeError, this_mock.a, 'foo')
+ self.assertRaises(AttributeError, getattr, this_mock, 'b')
+
+ instance_mock = create_autospec(Foo())
+ instance_mock.a()
+ instance_mock.a.assert_called_with()
+ self.assertRaises(TypeError, instance_mock.a, 'foo')
+ self.assertRaises(AttributeError, getattr, instance_mock, 'b')
+
+ # The return value isn't isn't callable
+ self.assertRaises(TypeError, instance_mock)
+
+ instance_mock.Bar.f()
+ instance_mock.Bar.f.assert_called_with()
+ self.assertRaises(AttributeError, getattr, instance_mock.Bar, 'g')
+
+ instance_mock.Bar().f()
+ instance_mock.Bar().f.assert_called_with()
+ self.assertRaises(AttributeError, getattr, instance_mock.Bar(), 'g')
+
+
+ def test_inherit(self):
+ class Foo(object):
+ a = 3
+
+ Foo.Foo = Foo
+
+ # class
+ mock = create_autospec(Foo)
+ instance = mock()
+ self.assertRaises(AttributeError, getattr, instance, 'b')
+
+ attr_instance = mock.Foo()
+ self.assertRaises(AttributeError, getattr, attr_instance, 'b')
+
+ # instance
+ mock = create_autospec(Foo())
+ self.assertRaises(AttributeError, getattr, mock, 'b')
+ self.assertRaises(TypeError, mock)
+
+ # attribute instance
+ call_result = mock.Foo()
+ self.assertRaises(AttributeError, getattr, call_result, 'b')
+
+
+ def test_builtins(self):
+ # used to fail with infinite recursion
+ create_autospec(1)
+
+ create_autospec(int)
+ create_autospec('foo')
+ create_autospec(str)
+ create_autospec({})
+ create_autospec(dict)
+ create_autospec([])
+ create_autospec(list)
+ create_autospec(set())
+ create_autospec(set)
+ create_autospec(1.0)
+ create_autospec(float)
+ create_autospec(1j)
+ create_autospec(complex)
+ create_autospec(False)
+ create_autospec(True)
+
+
+ def test_function(self):
+ def f(a, b):
+ pass
+
+ mock = create_autospec(f)
+ self.assertRaises(TypeError, mock)
+ mock(1, 2)
+ mock.assert_called_with(1, 2)
+
+ f.f = f
+ mock = create_autospec(f)
+ self.assertRaises(TypeError, mock.f)
+ mock.f(3, 4)
+ mock.f.assert_called_with(3, 4)
+
+
+ def test_skip_attributeerrors(self):
+ class Raiser(object):
+ def __get__(self, obj, type=None):
+ if obj is None:
+ raise AttributeError('Can only be accessed via an instance')
+
+ class RaiserClass(object):
+ raiser = Raiser()
+
+ @staticmethod
+ def existing(a, b):
+ return a + b
+
+ s = create_autospec(RaiserClass)
+ self.assertRaises(TypeError, lambda x: s.existing(1, 2, 3))
+ s.existing(1, 2)
+ self.assertRaises(AttributeError, lambda: s.nonexisting)
+
+ # check we can fetch the raiser attribute and it has no spec
+ obj = s.raiser
+ obj.foo, obj.bar
+
+
+ def test_signature_class(self):
+ class Foo(object):
+ def __init__(self, a, b=3):
+ pass
+
+ mock = create_autospec(Foo)
+
+ self.assertRaises(TypeError, mock)
+ mock(1)
+ mock.assert_called_once_with(1)
+
+ mock(4, 5)
+ mock.assert_called_with(4, 5)
+
+
+ @unittest2.skipIf(inPy3k, 'no old style classes in Python 3')
+ def test_signature_old_style_class(self):
+ class Foo:
+ def __init__(self, a, b=3):
+ pass
+
+ mock = create_autospec(Foo)
+
+ self.assertRaises(TypeError, mock)
+ mock(1)
+ mock.assert_called_once_with(1)
+
+ mock(4, 5)
+ mock.assert_called_with(4, 5)
+
+
+ def test_class_with_no_init(self):
+ # this used to raise an exception
+ # due to trying to get a signature from object.__init__
+ class Foo(object):
+ pass
+ create_autospec(Foo)
+
+
+ @unittest2.skipIf(inPy3k, 'no old style classes in Python 3')
+ def test_old_style_class_with_no_init(self):
+ # this used to raise an exception
+ # due to Foo.__init__ raising an AttributeError
+ class Foo:
+ pass
+ create_autospec(Foo)
+
+
+ def test_signature_callable(self):
+ class Callable(object):
+ def __init__(self):
+ pass
+ def __call__(self, a):
+ pass
+
+ mock = create_autospec(Callable)
+ mock()
+ mock.assert_called_once_with()
+ self.assertRaises(TypeError, mock, 'a')
+
+ instance = mock()
+ self.assertRaises(TypeError, instance)
+ instance(a='a')
+ instance.assert_called_once_with(a='a')
+ instance('a')
+ instance.assert_called_with('a')
+
+ mock = create_autospec(Callable())
+ mock(a='a')
+ mock.assert_called_once_with(a='a')
+ self.assertRaises(TypeError, mock)
+ mock('a')
+ mock.assert_called_with('a')
+
+
+ def test_signature_noncallable(self):
+ class NonCallable(object):
+ def __init__(self):
+ pass
+
+ mock = create_autospec(NonCallable)
+ instance = mock()
+ mock.assert_called_once_with()
+ self.assertRaises(TypeError, mock, 'a')
+ self.assertRaises(TypeError, instance)
+ self.assertRaises(TypeError, instance, 'a')
+
+ mock = create_autospec(NonCallable())
+ self.assertRaises(TypeError, mock)
+ self.assertRaises(TypeError, mock, 'a')
+
+
+ def test_create_autospec_none(self):
+ class Foo(object):
+ bar = None
+
+ mock = create_autospec(Foo)
+ none = mock.bar
+ self.assertNotIsInstance(none, type(None))
+
+ none.foo()
+ none.foo.assert_called_once_with()
+
+
+ def test_autospec_functions_with_self_in_odd_place(self):
+ class Foo(object):
+ def f(a, self):
+ pass
+
+ a = create_autospec(Foo)
+ a.f(self=10)
+ a.f.assert_called_with(self=10)
+
+
+ def test_autospec_property(self):
+ class Foo(object):
+ @property
+ def foo(self):
+ return 3
+
+ foo = create_autospec(Foo)
+ mock_property = foo.foo
+
+ # no spec on properties
+ self.assertTrue(isinstance(mock_property, MagicMock))
+ mock_property(1, 2, 3)
+ mock_property.abc(4, 5, 6)
+ mock_property.assert_called_once_with(1, 2, 3)
+ mock_property.abc.assert_called_once_with(4, 5, 6)
+
+
+ def test_autospec_slots(self):
+ class Foo(object):
+ __slots__ = ['a']
+
+ foo = create_autospec(Foo)
+ mock_slot = foo.a
+
+ # no spec on slots
+ mock_slot(1, 2, 3)
+ mock_slot.abc(4, 5, 6)
+ mock_slot.assert_called_once_with(1, 2, 3)
+ mock_slot.abc.assert_called_once_with(4, 5, 6)
+
+
+class TestCallList(unittest2.TestCase):
+
+ def test_args_list_contains_call_list(self):
+ mock = Mock()
+ self.assertIsInstance(mock.call_args_list, _CallList)
+
+ mock(1, 2)
+ mock(a=3)
+ mock(3, 4)
+ mock(b=6)
+
+ for kall in call(1, 2), call(a=3), call(3, 4), call(b=6):
+ self.assertTrue(kall in mock.call_args_list)
+
+ calls = [call(a=3), call(3, 4)]
+ self.assertTrue(calls in mock.call_args_list)
+ calls = [call(1, 2), call(a=3)]
+ self.assertTrue(calls in mock.call_args_list)
+ calls = [call(3, 4), call(b=6)]
+ self.assertTrue(calls in mock.call_args_list)
+ calls = [call(3, 4)]
+ self.assertTrue(calls in mock.call_args_list)
+
+ self.assertFalse(call('fish') in mock.call_args_list)
+ self.assertFalse([call('fish')] in mock.call_args_list)
+
+
+ def test_call_list_str(self):
+ mock = Mock()
+ mock(1, 2)
+ mock.foo(a=3)
+ mock.foo.bar().baz('fish', cat='dog')
+
+ expected = (
+ "[call(1, 2),\n"
+ " call.foo(a=3),\n"
+ " call.foo.bar(),\n"
+ " call.foo.bar().baz('fish', cat='dog')]"
+ )
+ self.assertEqual(str(mock.mock_calls), expected)
+
+
+ def test_propertymock(self):
+ p = patch('%s.SomeClass.one' % __name__, new_callable=PropertyMock)
+ mock = p.start()
+ try:
+ SomeClass.one
+ mock.assert_called_once_with()
+
+ s = SomeClass()
+ s.one
+ mock.assert_called_with()
+ self.assertEqual(mock.mock_calls, [call(), call()])
+
+ s.one = 3
+ self.assertEqual(mock.mock_calls, [call(), call(), call(3)])
+ finally:
+ p.stop()
+
+
+ def test_propertymock_returnvalue(self):
+ m = MagicMock()
+ p = PropertyMock()
+ type(m).foo = p
+
+ returned = m.foo
+ p.assert_called_once_with()
+ self.assertIsInstance(returned, MagicMock)
+ self.assertNotIsInstance(returned, PropertyMock)
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/third_party/python/mock-1.0.0/tests/testmagicmethods.py b/third_party/python/mock-1.0.0/tests/testmagicmethods.py
new file mode 100644
index 0000000000..ef0f16d826
--- /dev/null
+++ b/third_party/python/mock-1.0.0/tests/testmagicmethods.py
@@ -0,0 +1,486 @@
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
+
+from tests.support import unittest2, inPy3k
+
+try:
+ unicode
+except NameError:
+ # Python 3
+ unicode = str
+ long = int
+
+import inspect
+import sys
+from mock import Mock, MagicMock, _magics
+
+
+
+class TestMockingMagicMethods(unittest2.TestCase):
+
+ def test_deleting_magic_methods(self):
+ mock = Mock()
+ self.assertFalse(hasattr(mock, '__getitem__'))
+
+ mock.__getitem__ = Mock()
+ self.assertTrue(hasattr(mock, '__getitem__'))
+
+ del mock.__getitem__
+ self.assertFalse(hasattr(mock, '__getitem__'))
+
+
+ def test_magicmock_del(self):
+ mock = MagicMock()
+ # before using getitem
+ del mock.__getitem__
+ self.assertRaises(TypeError, lambda: mock['foo'])
+
+ mock = MagicMock()
+ # this time use it first
+ mock['foo']
+ del mock.__getitem__
+ self.assertRaises(TypeError, lambda: mock['foo'])
+
+
+ def test_magic_method_wrapping(self):
+ mock = Mock()
+ def f(self, name):
+ return self, 'fish'
+
+ mock.__getitem__ = f
+ self.assertFalse(mock.__getitem__ is f)
+ self.assertEqual(mock['foo'], (mock, 'fish'))
+ self.assertEqual(mock.__getitem__('foo'), (mock, 'fish'))
+
+ mock.__getitem__ = mock
+ self.assertTrue(mock.__getitem__ is mock)
+
+
+ def test_magic_methods_isolated_between_mocks(self):
+ mock1 = Mock()
+ mock2 = Mock()
+
+ mock1.__iter__ = Mock(return_value=iter([]))
+ self.assertEqual(list(mock1), [])
+ self.assertRaises(TypeError, lambda: list(mock2))
+
+
+ def test_repr(self):
+ mock = Mock()
+ self.assertEqual(repr(mock), "<Mock id='%s'>" % id(mock))
+ mock.__repr__ = lambda s: 'foo'
+ self.assertEqual(repr(mock), 'foo')
+
+
+ def test_str(self):
+ mock = Mock()
+ self.assertEqual(str(mock), object.__str__(mock))
+ mock.__str__ = lambda s: 'foo'
+ self.assertEqual(str(mock), 'foo')
+
+
+ @unittest2.skipIf(inPy3k, "no unicode in Python 3")
+ def test_unicode(self):
+ mock = Mock()
+ self.assertEqual(unicode(mock), unicode(str(mock)))
+
+ mock.__unicode__ = lambda s: unicode('foo')
+ self.assertEqual(unicode(mock), unicode('foo'))
+
+
+ def test_dict_methods(self):
+ mock = Mock()
+
+ self.assertRaises(TypeError, lambda: mock['foo'])
+ def _del():
+ del mock['foo']
+ def _set():
+ mock['foo'] = 3
+ self.assertRaises(TypeError, _del)
+ self.assertRaises(TypeError, _set)
+
+ _dict = {}
+ def getitem(s, name):
+ return _dict[name]
+ def setitem(s, name, value):
+ _dict[name] = value
+ def delitem(s, name):
+ del _dict[name]
+
+ mock.__setitem__ = setitem
+ mock.__getitem__ = getitem
+ mock.__delitem__ = delitem
+
+ self.assertRaises(KeyError, lambda: mock['foo'])
+ mock['foo'] = 'bar'
+ self.assertEqual(_dict, {'foo': 'bar'})
+ self.assertEqual(mock['foo'], 'bar')
+ del mock['foo']
+ self.assertEqual(_dict, {})
+
+
+ def test_numeric(self):
+ original = mock = Mock()
+ mock.value = 0
+
+ self.assertRaises(TypeError, lambda: mock + 3)
+
+ def add(self, other):
+ mock.value += other
+ return self
+ mock.__add__ = add
+ self.assertEqual(mock + 3, mock)
+ self.assertEqual(mock.value, 3)
+
+ del mock.__add__
+ def iadd(mock):
+ mock += 3
+ self.assertRaises(TypeError, iadd, mock)
+ mock.__iadd__ = add
+ mock += 6
+ self.assertEqual(mock, original)
+ self.assertEqual(mock.value, 9)
+
+ self.assertRaises(TypeError, lambda: 3 + mock)
+ mock.__radd__ = add
+ self.assertEqual(7 + mock, mock)
+ self.assertEqual(mock.value, 16)
+
+
+ @unittest2.skipIf(inPy3k, 'no truediv in Python 3')
+ def test_truediv(self):
+ mock = MagicMock()
+ mock.__truediv__.return_value = 6
+
+ context = {'mock': mock}
+ code = 'from __future__ import division\nresult = mock / 7\n'
+ exec(code, context)
+ self.assertEqual(context['result'], 6)
+
+ mock.__rtruediv__.return_value = 3
+ code = 'from __future__ import division\nresult = 2 / mock\n'
+ exec(code, context)
+ self.assertEqual(context['result'], 3)
+
+
+ @unittest2.skipIf(not inPy3k, 'truediv is available in Python 2')
+ def test_no_truediv(self):
+ self.assertRaises(
+ AttributeError, getattr, MagicMock(), '__truediv__'
+ )
+ self.assertRaises(
+ AttributeError, getattr, MagicMock(), '__rtruediv__'
+ )
+
+
+ def test_hash(self):
+ mock = Mock()
+ # test delegation
+ self.assertEqual(hash(mock), Mock.__hash__(mock))
+
+ def _hash(s):
+ return 3
+ mock.__hash__ = _hash
+ self.assertEqual(hash(mock), 3)
+
+
+ def test_nonzero(self):
+ m = Mock()
+ self.assertTrue(bool(m))
+
+ nonzero = lambda s: False
+ if not inPy3k:
+ m.__nonzero__ = nonzero
+ else:
+ m.__bool__ = nonzero
+
+ self.assertFalse(bool(m))
+
+
+ def test_comparison(self):
+ # note: this test fails with Jython 2.5.1 due to a Jython bug
+ # it is fixed in jython 2.5.2
+ if not inPy3k:
+ # incomparable in Python 3
+ self. assertEqual(Mock() < 3, object() < 3)
+ self. assertEqual(Mock() > 3, object() > 3)
+ self. assertEqual(Mock() <= 3, object() <= 3)
+ self. assertEqual(Mock() >= 3, object() >= 3)
+ else:
+ self.assertRaises(TypeError, lambda: MagicMock() < object())
+ self.assertRaises(TypeError, lambda: object() < MagicMock())
+ self.assertRaises(TypeError, lambda: MagicMock() < MagicMock())
+ self.assertRaises(TypeError, lambda: MagicMock() > object())
+ self.assertRaises(TypeError, lambda: object() > MagicMock())
+ self.assertRaises(TypeError, lambda: MagicMock() > MagicMock())
+ self.assertRaises(TypeError, lambda: MagicMock() <= object())
+ self.assertRaises(TypeError, lambda: object() <= MagicMock())
+ self.assertRaises(TypeError, lambda: MagicMock() <= MagicMock())
+ self.assertRaises(TypeError, lambda: MagicMock() >= object())
+ self.assertRaises(TypeError, lambda: object() >= MagicMock())
+ self.assertRaises(TypeError, lambda: MagicMock() >= MagicMock())
+
+ mock = Mock()
+ def comp(s, o):
+ return True
+ mock.__lt__ = mock.__gt__ = mock.__le__ = mock.__ge__ = comp
+ self. assertTrue(mock < 3)
+ self. assertTrue(mock > 3)
+ self. assertTrue(mock <= 3)
+ self. assertTrue(mock >= 3)
+
+
+ def test_equality(self):
+ for mock in Mock(), MagicMock():
+ self.assertEqual(mock == mock, True)
+ self.assertIsInstance(mock == mock, bool)
+ self.assertEqual(mock != mock, False)
+ self.assertIsInstance(mock != mock, bool)
+ self.assertEqual(mock == object(), False)
+ self.assertEqual(mock != object(), True)
+
+ def eq(self, other):
+ return other == 3
+ mock.__eq__ = eq
+ self.assertTrue(mock == 3)
+ self.assertFalse(mock == 4)
+
+ def ne(self, other):
+ return other == 3
+ mock.__ne__ = ne
+ self.assertTrue(mock != 3)
+ self.assertFalse(mock != 4)
+
+ mock = MagicMock()
+ mock.__eq__.return_value = True
+ self.assertIsInstance(mock == 3, bool)
+ self.assertEqual(mock == 3, True)
+
+ mock.__ne__.return_value = False
+ self.assertIsInstance(mock != 3, bool)
+ self.assertEqual(mock != 3, False)
+
+
+ def test_len_contains_iter(self):
+ mock = Mock()
+
+ self.assertRaises(TypeError, len, mock)
+ self.assertRaises(TypeError, iter, mock)
+ self.assertRaises(TypeError, lambda: 'foo' in mock)
+
+ mock.__len__ = lambda s: 6
+ self.assertEqual(len(mock), 6)
+
+ mock.__contains__ = lambda s, o: o == 3
+ self.assertTrue(3 in mock)
+ self.assertFalse(6 in mock)
+
+ mock.__iter__ = lambda s: iter('foobarbaz')
+ self.assertEqual(list(mock), list('foobarbaz'))
+
+
+ def test_magicmock(self):
+ mock = MagicMock()
+
+ mock.__iter__.return_value = iter([1, 2, 3])
+ self.assertEqual(list(mock), [1, 2, 3])
+
+ name = '__nonzero__'
+ other = '__bool__'
+ if inPy3k:
+ name, other = other, name
+ getattr(mock, name).return_value = False
+ self.assertFalse(hasattr(mock, other))
+ self.assertFalse(bool(mock))
+
+ for entry in _magics:
+ self.assertTrue(hasattr(mock, entry))
+ self.assertFalse(hasattr(mock, '__imaginery__'))
+
+
+ def test_magic_mock_equality(self):
+ mock = MagicMock()
+ self.assertIsInstance(mock == object(), bool)
+ self.assertIsInstance(mock != object(), bool)
+
+ self.assertEqual(mock == object(), False)
+ self.assertEqual(mock != object(), True)
+ self.assertEqual(mock == mock, True)
+ self.assertEqual(mock != mock, False)
+
+
+ def test_magicmock_defaults(self):
+ mock = MagicMock()
+ self.assertEqual(int(mock), 1)
+ self.assertEqual(complex(mock), 1j)
+ self.assertEqual(float(mock), 1.0)
+ self.assertEqual(long(mock), long(1))
+ self.assertNotIn(object(), mock)
+ self.assertEqual(len(mock), 0)
+ self.assertEqual(list(mock), [])
+ self.assertEqual(hash(mock), object.__hash__(mock))
+ self.assertEqual(str(mock), object.__str__(mock))
+ self.assertEqual(unicode(mock), object.__str__(mock))
+ self.assertIsInstance(unicode(mock), unicode)
+ self.assertTrue(bool(mock))
+ if not inPy3k:
+ self.assertEqual(oct(mock), '1')
+ else:
+ # in Python 3 oct and hex use __index__
+ # so these tests are for __index__ in py3k
+ self.assertEqual(oct(mock), '0o1')
+ self.assertEqual(hex(mock), '0x1')
+ # how to test __sizeof__ ?
+
+
+ @unittest2.skipIf(inPy3k, "no __cmp__ in Python 3")
+ def test_non_default_magic_methods(self):
+ mock = MagicMock()
+ self.assertRaises(AttributeError, lambda: mock.__cmp__)
+
+ mock = Mock()
+ mock.__cmp__ = lambda s, o: 0
+
+ self.assertEqual(mock, object())
+
+
+ def test_magic_methods_and_spec(self):
+ class Iterable(object):
+ def __iter__(self):
+ pass
+
+ mock = Mock(spec=Iterable)
+ self.assertRaises(AttributeError, lambda: mock.__iter__)
+
+ mock.__iter__ = Mock(return_value=iter([]))
+ self.assertEqual(list(mock), [])
+
+ class NonIterable(object):
+ pass
+ mock = Mock(spec=NonIterable)
+ self.assertRaises(AttributeError, lambda: mock.__iter__)
+
+ def set_int():
+ mock.__int__ = Mock(return_value=iter([]))
+ self.assertRaises(AttributeError, set_int)
+
+ mock = MagicMock(spec=Iterable)
+ self.assertEqual(list(mock), [])
+ self.assertRaises(AttributeError, set_int)
+
+
+ def test_magic_methods_and_spec_set(self):
+ class Iterable(object):
+ def __iter__(self):
+ pass
+
+ mock = Mock(spec_set=Iterable)
+ self.assertRaises(AttributeError, lambda: mock.__iter__)
+
+ mock.__iter__ = Mock(return_value=iter([]))
+ self.assertEqual(list(mock), [])
+
+ class NonIterable(object):
+ pass
+ mock = Mock(spec_set=NonIterable)
+ self.assertRaises(AttributeError, lambda: mock.__iter__)
+
+ def set_int():
+ mock.__int__ = Mock(return_value=iter([]))
+ self.assertRaises(AttributeError, set_int)
+
+ mock = MagicMock(spec_set=Iterable)
+ self.assertEqual(list(mock), [])
+ self.assertRaises(AttributeError, set_int)
+
+
+ def test_setting_unsupported_magic_method(self):
+ mock = MagicMock()
+ def set_setattr():
+ mock.__setattr__ = lambda self, name: None
+ self.assertRaisesRegexp(AttributeError,
+ "Attempting to set unsupported magic method '__setattr__'.",
+ set_setattr
+ )
+
+
+ def test_attributes_and_return_value(self):
+ mock = MagicMock()
+ attr = mock.foo
+ def _get_type(obj):
+ # the type of every mock (or magicmock) is a custom subclass
+ # so the real type is the second in the mro
+ return type(obj).__mro__[1]
+ self.assertEqual(_get_type(attr), MagicMock)
+
+ returned = mock()
+ self.assertEqual(_get_type(returned), MagicMock)
+
+
+ def test_magic_methods_are_magic_mocks(self):
+ mock = MagicMock()
+ self.assertIsInstance(mock.__getitem__, MagicMock)
+
+ mock[1][2].__getitem__.return_value = 3
+ self.assertEqual(mock[1][2][3], 3)
+
+
+ def test_magic_method_reset_mock(self):
+ mock = MagicMock()
+ str(mock)
+ self.assertTrue(mock.__str__.called)
+ mock.reset_mock()
+ self.assertFalse(mock.__str__.called)
+
+
+ @unittest2.skipUnless(sys.version_info[:2] >= (2, 6),
+ "__dir__ not available until Python 2.6 or later")
+ def test_dir(self):
+ # overriding the default implementation
+ for mock in Mock(), MagicMock():
+ def _dir(self):
+ return ['foo']
+ mock.__dir__ = _dir
+ self.assertEqual(dir(mock), ['foo'])
+
+
+ @unittest2.skipIf('PyPy' in sys.version, "This fails differently on pypy")
+ def test_bound_methods(self):
+ m = Mock()
+
+ # XXXX should this be an expected failure instead?
+
+ # this seems like it should work, but is hard to do without introducing
+ # other api inconsistencies. Failure message could be better though.
+ m.__iter__ = [3].__iter__
+ self.assertRaises(TypeError, iter, m)
+
+
+ def test_magic_method_type(self):
+ class Foo(MagicMock):
+ pass
+
+ foo = Foo()
+ self.assertIsInstance(foo.__int__, Foo)
+
+
+ def test_descriptor_from_class(self):
+ m = MagicMock()
+ type(m).__str__.return_value = 'foo'
+ self.assertEqual(str(m), 'foo')
+
+
+ def test_iterable_as_iter_return_value(self):
+ m = MagicMock()
+ m.__iter__.return_value = [1, 2, 3]
+ self.assertEqual(list(m), [1, 2, 3])
+ self.assertEqual(list(m), [1, 2, 3])
+
+ m.__iter__.return_value = iter([4, 5, 6])
+ self.assertEqual(list(m), [4, 5, 6])
+ self.assertEqual(list(m), [])
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/third_party/python/mock-1.0.0/tests/testmock.py b/third_party/python/mock-1.0.0/tests/testmock.py
new file mode 100644
index 0000000000..f3ceea9955
--- /dev/null
+++ b/third_party/python/mock-1.0.0/tests/testmock.py
@@ -0,0 +1,1351 @@
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
+
+from tests.support import (
+ callable, unittest2, inPy3k, is_instance, next
+)
+
+import copy
+import pickle
+import sys
+
+import mock
+from mock import (
+ call, DEFAULT, patch, sentinel,
+ MagicMock, Mock, NonCallableMock,
+ NonCallableMagicMock, _CallList,
+ create_autospec
+)
+
+
+try:
+ unicode
+except NameError:
+ unicode = str
+
+
+class Iter(object):
+ def __init__(self):
+ self.thing = iter(['this', 'is', 'an', 'iter'])
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ return next(self.thing)
+
+ __next__ = next
+
+
+class Subclass(MagicMock):
+ pass
+
+
+class Thing(object):
+ attribute = 6
+ foo = 'bar'
+
+
+
+class MockTest(unittest2.TestCase):
+
+ def test_all(self):
+ # if __all__ is badly defined then import * will raise an error
+ # We have to exec it because you can't import * inside a method
+ # in Python 3
+ exec("from mock import *")
+
+
+ def test_constructor(self):
+ mock = Mock()
+
+ self.assertFalse(mock.called, "called not initialised correctly")
+ self.assertEqual(mock.call_count, 0,
+ "call_count not initialised correctly")
+ self.assertTrue(is_instance(mock.return_value, Mock),
+ "return_value not initialised correctly")
+
+ self.assertEqual(mock.call_args, None,
+ "call_args not initialised correctly")
+ self.assertEqual(mock.call_args_list, [],
+ "call_args_list not initialised correctly")
+ self.assertEqual(mock.method_calls, [],
+ "method_calls not initialised correctly")
+
+ # Can't use hasattr for this test as it always returns True on a mock
+ self.assertFalse('_items' in mock.__dict__,
+ "default mock should not have '_items' attribute")
+
+ self.assertIsNone(mock._mock_parent,
+ "parent not initialised correctly")
+ self.assertIsNone(mock._mock_methods,
+ "methods not initialised correctly")
+ self.assertEqual(mock._mock_children, {},
+ "children not initialised incorrectly")
+
+
+ def test_unicode_not_broken(self):
+ # This used to raise an exception with Python 2.5 and Mock 0.4
+ unicode(Mock())
+
+
+ def test_return_value_in_constructor(self):
+ mock = Mock(return_value=None)
+ self.assertIsNone(mock.return_value,
+ "return value in constructor not honoured")
+
+
+ def test_repr(self):
+ mock = Mock(name='foo')
+ self.assertIn('foo', repr(mock))
+ self.assertIn("'%s'" % id(mock), repr(mock))
+
+ mocks = [(Mock(), 'mock'), (Mock(name='bar'), 'bar')]
+ for mock, name in mocks:
+ self.assertIn('%s.bar' % name, repr(mock.bar))
+ self.assertIn('%s.foo()' % name, repr(mock.foo()))
+ self.assertIn('%s.foo().bing' % name, repr(mock.foo().bing))
+ self.assertIn('%s()' % name, repr(mock()))
+ self.assertIn('%s()()' % name, repr(mock()()))
+ self.assertIn('%s()().foo.bar.baz().bing' % name,
+ repr(mock()().foo.bar.baz().bing))
+
+
+ def test_repr_with_spec(self):
+ class X(object):
+ pass
+
+ mock = Mock(spec=X)
+ self.assertIn(" spec='X' ", repr(mock))
+
+ mock = Mock(spec=X())
+ self.assertIn(" spec='X' ", repr(mock))
+
+ mock = Mock(spec_set=X)
+ self.assertIn(" spec_set='X' ", repr(mock))
+
+ mock = Mock(spec_set=X())
+ self.assertIn(" spec_set='X' ", repr(mock))
+
+ mock = Mock(spec=X, name='foo')
+ self.assertIn(" spec='X' ", repr(mock))
+ self.assertIn(" name='foo' ", repr(mock))
+
+ mock = Mock(name='foo')
+ self.assertNotIn("spec", repr(mock))
+
+ mock = Mock()
+ self.assertNotIn("spec", repr(mock))
+
+ mock = Mock(spec=['foo'])
+ self.assertNotIn("spec", repr(mock))
+
+
+ def test_side_effect(self):
+ mock = Mock()
+
+ def effect(*args, **kwargs):
+ raise SystemError('kablooie')
+
+ mock.side_effect = effect
+ self.assertRaises(SystemError, mock, 1, 2, fish=3)
+ mock.assert_called_with(1, 2, fish=3)
+
+ results = [1, 2, 3]
+ def effect():
+ return results.pop()
+ mock.side_effect = effect
+
+ self.assertEqual([mock(), mock(), mock()], [3, 2, 1],
+ "side effect not used correctly")
+
+ mock = Mock(side_effect=sentinel.SideEffect)
+ self.assertEqual(mock.side_effect, sentinel.SideEffect,
+ "side effect in constructor not used")
+
+ def side_effect():
+ return DEFAULT
+ mock = Mock(side_effect=side_effect, return_value=sentinel.RETURN)
+ self.assertEqual(mock(), sentinel.RETURN)
+
+
+ @unittest2.skipUnless('java' in sys.platform,
+ 'This test only applies to Jython')
+ def test_java_exception_side_effect(self):
+ import java
+ mock = Mock(side_effect=java.lang.RuntimeException("Boom!"))
+
+ # can't use assertRaises with java exceptions
+ try:
+ mock(1, 2, fish=3)
+ except java.lang.RuntimeException:
+ pass
+ else:
+ self.fail('java exception not raised')
+ mock.assert_called_with(1,2, fish=3)
+
+
+ def test_reset_mock(self):
+ parent = Mock()
+ spec = ["something"]
+ mock = Mock(name="child", parent=parent, spec=spec)
+ mock(sentinel.Something, something=sentinel.SomethingElse)
+ something = mock.something
+ mock.something()
+ mock.side_effect = sentinel.SideEffect
+ return_value = mock.return_value
+ return_value()
+
+ mock.reset_mock()
+
+ self.assertEqual(mock._mock_name, "child",
+ "name incorrectly reset")
+ self.assertEqual(mock._mock_parent, parent,
+ "parent incorrectly reset")
+ self.assertEqual(mock._mock_methods, spec,
+ "methods incorrectly reset")
+
+ self.assertFalse(mock.called, "called not reset")
+ self.assertEqual(mock.call_count, 0, "call_count not reset")
+ self.assertEqual(mock.call_args, None, "call_args not reset")
+ self.assertEqual(mock.call_args_list, [], "call_args_list not reset")
+ self.assertEqual(mock.method_calls, [],
+ "method_calls not initialised correctly: %r != %r" %
+ (mock.method_calls, []))
+ self.assertEqual(mock.mock_calls, [])
+
+ self.assertEqual(mock.side_effect, sentinel.SideEffect,
+ "side_effect incorrectly reset")
+ self.assertEqual(mock.return_value, return_value,
+ "return_value incorrectly reset")
+ self.assertFalse(return_value.called, "return value mock not reset")
+ self.assertEqual(mock._mock_children, {'something': something},
+ "children reset incorrectly")
+ self.assertEqual(mock.something, something,
+ "children incorrectly cleared")
+ self.assertFalse(mock.something.called, "child not reset")
+
+
+ def test_reset_mock_recursion(self):
+ mock = Mock()
+ mock.return_value = mock
+
+ # used to cause recursion
+ mock.reset_mock()
+
+
+ def test_call(self):
+ mock = Mock()
+ self.assertTrue(is_instance(mock.return_value, Mock),
+ "Default return_value should be a Mock")
+
+ result = mock()
+ self.assertEqual(mock(), result,
+ "different result from consecutive calls")
+ mock.reset_mock()
+
+ ret_val = mock(sentinel.Arg)
+ self.assertTrue(mock.called, "called not set")
+ self.assertEqual(mock.call_count, 1, "call_count incoreect")
+ self.assertEqual(mock.call_args, ((sentinel.Arg,), {}),
+ "call_args not set")
+ self.assertEqual(mock.call_args_list, [((sentinel.Arg,), {})],
+ "call_args_list not initialised correctly")
+
+ mock.return_value = sentinel.ReturnValue
+ ret_val = mock(sentinel.Arg, key=sentinel.KeyArg)
+ self.assertEqual(ret_val, sentinel.ReturnValue,
+ "incorrect return value")
+
+ self.assertEqual(mock.call_count, 2, "call_count incorrect")
+ self.assertEqual(mock.call_args,
+ ((sentinel.Arg,), {'key': sentinel.KeyArg}),
+ "call_args not set")
+ self.assertEqual(mock.call_args_list, [
+ ((sentinel.Arg,), {}),
+ ((sentinel.Arg,), {'key': sentinel.KeyArg})
+ ],
+ "call_args_list not set")
+
+
+ def test_call_args_comparison(self):
+ mock = Mock()
+ mock()
+ mock(sentinel.Arg)
+ mock(kw=sentinel.Kwarg)
+ mock(sentinel.Arg, kw=sentinel.Kwarg)
+ self.assertEqual(mock.call_args_list, [
+ (),
+ ((sentinel.Arg,),),
+ ({"kw": sentinel.Kwarg},),
+ ((sentinel.Arg,), {"kw": sentinel.Kwarg})
+ ])
+ self.assertEqual(mock.call_args,
+ ((sentinel.Arg,), {"kw": sentinel.Kwarg}))
+
+
+ def test_assert_called_with(self):
+ mock = Mock()
+ mock()
+
+ # Will raise an exception if it fails
+ mock.assert_called_with()
+ self.assertRaises(AssertionError, mock.assert_called_with, 1)
+
+ mock.reset_mock()
+ self.assertRaises(AssertionError, mock.assert_called_with)
+
+ mock(1, 2, 3, a='fish', b='nothing')
+ mock.assert_called_with(1, 2, 3, a='fish', b='nothing')
+
+
+ def test_assert_called_once_with(self):
+ mock = Mock()
+ mock()
+
+ # Will raise an exception if it fails
+ mock.assert_called_once_with()
+
+ mock()
+ self.assertRaises(AssertionError, mock.assert_called_once_with)
+
+ mock.reset_mock()
+ self.assertRaises(AssertionError, mock.assert_called_once_with)
+
+ mock('foo', 'bar', baz=2)
+ mock.assert_called_once_with('foo', 'bar', baz=2)
+
+ mock.reset_mock()
+ mock('foo', 'bar', baz=2)
+ self.assertRaises(
+ AssertionError,
+ lambda: mock.assert_called_once_with('bob', 'bar', baz=2)
+ )
+
+
+ def test_attribute_access_returns_mocks(self):
+ mock = Mock()
+ something = mock.something
+ self.assertTrue(is_instance(something, Mock), "attribute isn't a mock")
+ self.assertEqual(mock.something, something,
+ "different attributes returned for same name")
+
+ # Usage example
+ mock = Mock()
+ mock.something.return_value = 3
+
+ self.assertEqual(mock.something(), 3, "method returned wrong value")
+ self.assertTrue(mock.something.called,
+ "method didn't record being called")
+
+
+ def test_attributes_have_name_and_parent_set(self):
+ mock = Mock()
+ something = mock.something
+
+ self.assertEqual(something._mock_name, "something",
+ "attribute name not set correctly")
+ self.assertEqual(something._mock_parent, mock,
+ "attribute parent not set correctly")
+
+
+ def test_method_calls_recorded(self):
+ mock = Mock()
+ mock.something(3, fish=None)
+ mock.something_else.something(6, cake=sentinel.Cake)
+
+ self.assertEqual(mock.something_else.method_calls,
+ [("something", (6,), {'cake': sentinel.Cake})],
+ "method calls not recorded correctly")
+ self.assertEqual(mock.method_calls, [
+ ("something", (3,), {'fish': None}),
+ ("something_else.something", (6,), {'cake': sentinel.Cake})
+ ],
+ "method calls not recorded correctly")
+
+
+ def test_method_calls_compare_easily(self):
+ mock = Mock()
+ mock.something()
+ self.assertEqual(mock.method_calls, [('something',)])
+ self.assertEqual(mock.method_calls, [('something', (), {})])
+
+ mock = Mock()
+ mock.something('different')
+ self.assertEqual(mock.method_calls, [('something', ('different',))])
+ self.assertEqual(mock.method_calls,
+ [('something', ('different',), {})])
+
+ mock = Mock()
+ mock.something(x=1)
+ self.assertEqual(mock.method_calls, [('something', {'x': 1})])
+ self.assertEqual(mock.method_calls, [('something', (), {'x': 1})])
+
+ mock = Mock()
+ mock.something('different', some='more')
+ self.assertEqual(mock.method_calls, [
+ ('something', ('different',), {'some': 'more'})
+ ])
+
+
+ def test_only_allowed_methods_exist(self):
+ for spec in ['something'], ('something',):
+ for arg in 'spec', 'spec_set':
+ mock = Mock(**{arg: spec})
+
+ # this should be allowed
+ mock.something
+ self.assertRaisesRegexp(
+ AttributeError,
+ "Mock object has no attribute 'something_else'",
+ getattr, mock, 'something_else'
+ )
+
+
+ def test_from_spec(self):
+ class Something(object):
+ x = 3
+ __something__ = None
+ def y(self):
+ pass
+
+ def test_attributes(mock):
+ # should work
+ mock.x
+ mock.y
+ mock.__something__
+ self.assertRaisesRegexp(
+ AttributeError,
+ "Mock object has no attribute 'z'",
+ getattr, mock, 'z'
+ )
+ self.assertRaisesRegexp(
+ AttributeError,
+ "Mock object has no attribute '__foobar__'",
+ getattr, mock, '__foobar__'
+ )
+
+ test_attributes(Mock(spec=Something))
+ test_attributes(Mock(spec=Something()))
+
+
+ def test_wraps_calls(self):
+ real = Mock()
+
+ mock = Mock(wraps=real)
+ self.assertEqual(mock(), real())
+
+ real.reset_mock()
+
+ mock(1, 2, fish=3)
+ real.assert_called_with(1, 2, fish=3)
+
+
+ def test_wraps_call_with_nondefault_return_value(self):
+ real = Mock()
+
+ mock = Mock(wraps=real)
+ mock.return_value = 3
+
+ self.assertEqual(mock(), 3)
+ self.assertFalse(real.called)
+
+
+ def test_wraps_attributes(self):
+ class Real(object):
+ attribute = Mock()
+
+ real = Real()
+
+ mock = Mock(wraps=real)
+ self.assertEqual(mock.attribute(), real.attribute())
+ self.assertRaises(AttributeError, lambda: mock.fish)
+
+ self.assertNotEqual(mock.attribute, real.attribute)
+ result = mock.attribute.frog(1, 2, fish=3)
+ Real.attribute.frog.assert_called_with(1, 2, fish=3)
+ self.assertEqual(result, Real.attribute.frog())
+
+
+ def test_exceptional_side_effect(self):
+ mock = Mock(side_effect=AttributeError)
+ self.assertRaises(AttributeError, mock)
+
+ mock = Mock(side_effect=AttributeError('foo'))
+ self.assertRaises(AttributeError, mock)
+
+
+ def test_baseexceptional_side_effect(self):
+ mock = Mock(side_effect=KeyboardInterrupt)
+ self.assertRaises(KeyboardInterrupt, mock)
+
+ mock = Mock(side_effect=KeyboardInterrupt('foo'))
+ self.assertRaises(KeyboardInterrupt, mock)
+
+
+ def test_assert_called_with_message(self):
+ mock = Mock()
+ self.assertRaisesRegexp(AssertionError, 'Not called',
+ mock.assert_called_with)
+
+
+ def test__name__(self):
+ mock = Mock()
+ self.assertRaises(AttributeError, lambda: mock.__name__)
+
+ mock.__name__ = 'foo'
+ self.assertEqual(mock.__name__, 'foo')
+
+
+ def test_spec_list_subclass(self):
+ class Sub(list):
+ pass
+ mock = Mock(spec=Sub(['foo']))
+
+ mock.append(3)
+ mock.append.assert_called_with(3)
+ self.assertRaises(AttributeError, getattr, mock, 'foo')
+
+
+ def test_spec_class(self):
+ class X(object):
+ pass
+
+ mock = Mock(spec=X)
+ self.assertTrue(isinstance(mock, X))
+
+ mock = Mock(spec=X())
+ self.assertTrue(isinstance(mock, X))
+
+ self.assertIs(mock.__class__, X)
+ self.assertEqual(Mock().__class__.__name__, 'Mock')
+
+ mock = Mock(spec_set=X)
+ self.assertTrue(isinstance(mock, X))
+
+ mock = Mock(spec_set=X())
+ self.assertTrue(isinstance(mock, X))
+
+
+ def test_setting_attribute_with_spec_set(self):
+ class X(object):
+ y = 3
+
+ mock = Mock(spec=X)
+ mock.x = 'foo'
+
+ mock = Mock(spec_set=X)
+ def set_attr():
+ mock.x = 'foo'
+
+ mock.y = 'foo'
+ self.assertRaises(AttributeError, set_attr)
+
+
+ def test_copy(self):
+ current = sys.getrecursionlimit()
+ self.addCleanup(sys.setrecursionlimit, current)
+
+ # can't use sys.maxint as this doesn't exist in Python 3
+ sys.setrecursionlimit(int(10e8))
+ # this segfaults without the fix in place
+ copy.copy(Mock())
+
+
+ @unittest2.skipIf(inPy3k, "no old style classes in Python 3")
+ def test_spec_old_style_classes(self):
+ class Foo:
+ bar = 7
+
+ mock = Mock(spec=Foo)
+ mock.bar = 6
+ self.assertRaises(AttributeError, lambda: mock.foo)
+
+ mock = Mock(spec=Foo())
+ mock.bar = 6
+ self.assertRaises(AttributeError, lambda: mock.foo)
+
+
+ @unittest2.skipIf(inPy3k, "no old style classes in Python 3")
+ def test_spec_set_old_style_classes(self):
+ class Foo:
+ bar = 7
+
+ mock = Mock(spec_set=Foo)
+ mock.bar = 6
+ self.assertRaises(AttributeError, lambda: mock.foo)
+
+ def _set():
+ mock.foo = 3
+ self.assertRaises(AttributeError, _set)
+
+ mock = Mock(spec_set=Foo())
+ mock.bar = 6
+ self.assertRaises(AttributeError, lambda: mock.foo)
+
+ def _set():
+ mock.foo = 3
+ self.assertRaises(AttributeError, _set)
+
+
+ def test_subclass_with_properties(self):
+ class SubClass(Mock):
+ def _get(self):
+ return 3
+ def _set(self, value):
+ raise NameError('strange error')
+ some_attribute = property(_get, _set)
+
+ s = SubClass(spec_set=SubClass)
+ self.assertEqual(s.some_attribute, 3)
+
+ def test():
+ s.some_attribute = 3
+ self.assertRaises(NameError, test)
+
+ def test():
+ s.foo = 'bar'
+ self.assertRaises(AttributeError, test)
+
+
+ def test_setting_call(self):
+ mock = Mock()
+ def __call__(self, a):
+ return self._mock_call(a)
+
+ type(mock).__call__ = __call__
+ mock('one')
+ mock.assert_called_with('one')
+
+ self.assertRaises(TypeError, mock, 'one', 'two')
+
+
+ @unittest2.skipUnless(sys.version_info[:2] >= (2, 6),
+ "__dir__ not available until Python 2.6 or later")
+ def test_dir(self):
+ mock = Mock()
+ attrs = set(dir(mock))
+ type_attrs = set([m for m in dir(Mock) if not m.startswith('_')])
+
+ # all public attributes from the type are included
+ self.assertEqual(set(), type_attrs - attrs)
+
+ # creates these attributes
+ mock.a, mock.b
+ self.assertIn('a', dir(mock))
+ self.assertIn('b', dir(mock))
+
+ # instance attributes
+ mock.c = mock.d = None
+ self.assertIn('c', dir(mock))
+ self.assertIn('d', dir(mock))
+
+ # magic methods
+ mock.__iter__ = lambda s: iter([])
+ self.assertIn('__iter__', dir(mock))
+
+
+ @unittest2.skipUnless(sys.version_info[:2] >= (2, 6),
+ "__dir__ not available until Python 2.6 or later")
+ def test_dir_from_spec(self):
+ mock = Mock(spec=unittest2.TestCase)
+ testcase_attrs = set(dir(unittest2.TestCase))
+ attrs = set(dir(mock))
+
+ # all attributes from the spec are included
+ self.assertEqual(set(), testcase_attrs - attrs)
+
+ # shadow a sys attribute
+ mock.version = 3
+ self.assertEqual(dir(mock).count('version'), 1)
+
+
+ @unittest2.skipUnless(sys.version_info[:2] >= (2, 6),
+ "__dir__ not available until Python 2.6 or later")
+ def test_filter_dir(self):
+ patcher = patch.object(mock, 'FILTER_DIR', False)
+ patcher.start()
+ try:
+ attrs = set(dir(Mock()))
+ type_attrs = set(dir(Mock))
+
+ # ALL attributes from the type are included
+ self.assertEqual(set(), type_attrs - attrs)
+ finally:
+ patcher.stop()
+
+
+ def test_configure_mock(self):
+ mock = Mock(foo='bar')
+ self.assertEqual(mock.foo, 'bar')
+
+ mock = MagicMock(foo='bar')
+ self.assertEqual(mock.foo, 'bar')
+
+ kwargs = {'side_effect': KeyError, 'foo.bar.return_value': 33,
+ 'foo': MagicMock()}
+ mock = Mock(**kwargs)
+ self.assertRaises(KeyError, mock)
+ self.assertEqual(mock.foo.bar(), 33)
+ self.assertIsInstance(mock.foo, MagicMock)
+
+ mock = Mock()
+ mock.configure_mock(**kwargs)
+ self.assertRaises(KeyError, mock)
+ self.assertEqual(mock.foo.bar(), 33)
+ self.assertIsInstance(mock.foo, MagicMock)
+
+
+ def assertRaisesWithMsg(self, exception, message, func, *args, **kwargs):
+ # needed because assertRaisesRegex doesn't work easily with newlines
+ try:
+ func(*args, **kwargs)
+ except:
+ instance = sys.exc_info()[1]
+ self.assertIsInstance(instance, exception)
+ else:
+ self.fail('Exception %r not raised' % (exception,))
+
+ msg = str(instance)
+ self.assertEqual(msg, message)
+
+
+ def test_assert_called_with_failure_message(self):
+ mock = NonCallableMock()
+
+ expected = "mock(1, '2', 3, bar='foo')"
+ message = 'Expected call: %s\nNot called'
+ self.assertRaisesWithMsg(
+ AssertionError, message % (expected,),
+ mock.assert_called_with, 1, '2', 3, bar='foo'
+ )
+
+ mock.foo(1, '2', 3, foo='foo')
+
+
+ asserters = [
+ mock.foo.assert_called_with, mock.foo.assert_called_once_with
+ ]
+ for meth in asserters:
+ actual = "foo(1, '2', 3, foo='foo')"
+ expected = "foo(1, '2', 3, bar='foo')"
+ message = 'Expected call: %s\nActual call: %s'
+ self.assertRaisesWithMsg(
+ AssertionError, message % (expected, actual),
+ meth, 1, '2', 3, bar='foo'
+ )
+
+ # just kwargs
+ for meth in asserters:
+ actual = "foo(1, '2', 3, foo='foo')"
+ expected = "foo(bar='foo')"
+ message = 'Expected call: %s\nActual call: %s'
+ self.assertRaisesWithMsg(
+ AssertionError, message % (expected, actual),
+ meth, bar='foo'
+ )
+
+ # just args
+ for meth in asserters:
+ actual = "foo(1, '2', 3, foo='foo')"
+ expected = "foo(1, 2, 3)"
+ message = 'Expected call: %s\nActual call: %s'
+ self.assertRaisesWithMsg(
+ AssertionError, message % (expected, actual),
+ meth, 1, 2, 3
+ )
+
+ # empty
+ for meth in asserters:
+ actual = "foo(1, '2', 3, foo='foo')"
+ expected = "foo()"
+ message = 'Expected call: %s\nActual call: %s'
+ self.assertRaisesWithMsg(
+ AssertionError, message % (expected, actual), meth
+ )
+
+
+ def test_mock_calls(self):
+ mock = MagicMock()
+
+ # need to do this because MagicMock.mock_calls used to just return
+ # a MagicMock which also returned a MagicMock when __eq__ was called
+ self.assertIs(mock.mock_calls == [], True)
+
+ mock = MagicMock()
+ mock()
+ expected = [('', (), {})]
+ self.assertEqual(mock.mock_calls, expected)
+
+ mock.foo()
+ expected.append(call.foo())
+ self.assertEqual(mock.mock_calls, expected)
+ # intermediate mock_calls work too
+ self.assertEqual(mock.foo.mock_calls, [('', (), {})])
+
+ mock = MagicMock()
+ mock().foo(1, 2, 3, a=4, b=5)
+ expected = [
+ ('', (), {}), ('().foo', (1, 2, 3), dict(a=4, b=5))
+ ]
+ self.assertEqual(mock.mock_calls, expected)
+ self.assertEqual(mock.return_value.foo.mock_calls,
+ [('', (1, 2, 3), dict(a=4, b=5))])
+ self.assertEqual(mock.return_value.mock_calls,
+ [('foo', (1, 2, 3), dict(a=4, b=5))])
+
+ mock = MagicMock()
+ mock().foo.bar().baz()
+ expected = [
+ ('', (), {}), ('().foo.bar', (), {}),
+ ('().foo.bar().baz', (), {})
+ ]
+ self.assertEqual(mock.mock_calls, expected)
+ self.assertEqual(mock().mock_calls,
+ call.foo.bar().baz().call_list())
+
+ for kwargs in dict(), dict(name='bar'):
+ mock = MagicMock(**kwargs)
+ int(mock.foo)
+ expected = [('foo.__int__', (), {})]
+ self.assertEqual(mock.mock_calls, expected)
+
+ mock = MagicMock(**kwargs)
+ mock.a()()
+ expected = [('a', (), {}), ('a()', (), {})]
+ self.assertEqual(mock.mock_calls, expected)
+ self.assertEqual(mock.a().mock_calls, [call()])
+
+ mock = MagicMock(**kwargs)
+ mock(1)(2)(3)
+ self.assertEqual(mock.mock_calls, call(1)(2)(3).call_list())
+ self.assertEqual(mock().mock_calls, call(2)(3).call_list())
+ self.assertEqual(mock()().mock_calls, call(3).call_list())
+
+ mock = MagicMock(**kwargs)
+ mock(1)(2)(3).a.b.c(4)
+ self.assertEqual(mock.mock_calls,
+ call(1)(2)(3).a.b.c(4).call_list())
+ self.assertEqual(mock().mock_calls,
+ call(2)(3).a.b.c(4).call_list())
+ self.assertEqual(mock()().mock_calls,
+ call(3).a.b.c(4).call_list())
+
+ mock = MagicMock(**kwargs)
+ int(mock().foo.bar().baz())
+ last_call = ('().foo.bar().baz().__int__', (), {})
+ self.assertEqual(mock.mock_calls[-1], last_call)
+ self.assertEqual(mock().mock_calls,
+ call.foo.bar().baz().__int__().call_list())
+ self.assertEqual(mock().foo.bar().mock_calls,
+ call.baz().__int__().call_list())
+ self.assertEqual(mock().foo.bar().baz.mock_calls,
+ call().__int__().call_list())
+
+
+ def test_subclassing(self):
+ class Subclass(Mock):
+ pass
+
+ mock = Subclass()
+ self.assertIsInstance(mock.foo, Subclass)
+ self.assertIsInstance(mock(), Subclass)
+
+ class Subclass(Mock):
+ def _get_child_mock(self, **kwargs):
+ return Mock(**kwargs)
+
+ mock = Subclass()
+ self.assertNotIsInstance(mock.foo, Subclass)
+ self.assertNotIsInstance(mock(), Subclass)
+
+
+ def test_arg_lists(self):
+ mocks = [
+ Mock(),
+ MagicMock(),
+ NonCallableMock(),
+ NonCallableMagicMock()
+ ]
+
+ def assert_attrs(mock):
+ names = 'call_args_list', 'method_calls', 'mock_calls'
+ for name in names:
+ attr = getattr(mock, name)
+ self.assertIsInstance(attr, _CallList)
+ self.assertIsInstance(attr, list)
+ self.assertEqual(attr, [])
+
+ for mock in mocks:
+ assert_attrs(mock)
+
+ if callable(mock):
+ mock()
+ mock(1, 2)
+ mock(a=3)
+
+ mock.reset_mock()
+ assert_attrs(mock)
+
+ mock.foo()
+ mock.foo.bar(1, a=3)
+ mock.foo(1).bar().baz(3)
+
+ mock.reset_mock()
+ assert_attrs(mock)
+
+
+ def test_call_args_two_tuple(self):
+ mock = Mock()
+ mock(1, a=3)
+ mock(2, b=4)
+
+ self.assertEqual(len(mock.call_args), 2)
+ args, kwargs = mock.call_args
+ self.assertEqual(args, (2,))
+ self.assertEqual(kwargs, dict(b=4))
+
+ expected_list = [((1,), dict(a=3)), ((2,), dict(b=4))]
+ for expected, call_args in zip(expected_list, mock.call_args_list):
+ self.assertEqual(len(call_args), 2)
+ self.assertEqual(expected[0], call_args[0])
+ self.assertEqual(expected[1], call_args[1])
+
+
+ def test_side_effect_iterator(self):
+ mock = Mock(side_effect=iter([1, 2, 3]))
+ self.assertEqual([mock(), mock(), mock()], [1, 2, 3])
+ self.assertRaises(StopIteration, mock)
+
+ mock = MagicMock(side_effect=['a', 'b', 'c'])
+ self.assertEqual([mock(), mock(), mock()], ['a', 'b', 'c'])
+ self.assertRaises(StopIteration, mock)
+
+ mock = Mock(side_effect='ghi')
+ self.assertEqual([mock(), mock(), mock()], ['g', 'h', 'i'])
+ self.assertRaises(StopIteration, mock)
+
+ class Foo(object):
+ pass
+ mock = MagicMock(side_effect=Foo)
+ self.assertIsInstance(mock(), Foo)
+
+ mock = Mock(side_effect=Iter())
+ self.assertEqual([mock(), mock(), mock(), mock()],
+ ['this', 'is', 'an', 'iter'])
+ self.assertRaises(StopIteration, mock)
+
+
+ def test_side_effect_setting_iterator(self):
+ mock = Mock()
+ mock.side_effect = iter([1, 2, 3])
+ self.assertEqual([mock(), mock(), mock()], [1, 2, 3])
+ self.assertRaises(StopIteration, mock)
+ side_effect = mock.side_effect
+ self.assertIsInstance(side_effect, type(iter([])))
+
+ mock.side_effect = ['a', 'b', 'c']
+ self.assertEqual([mock(), mock(), mock()], ['a', 'b', 'c'])
+ self.assertRaises(StopIteration, mock)
+ side_effect = mock.side_effect
+ self.assertIsInstance(side_effect, type(iter([])))
+
+ this_iter = Iter()
+ mock.side_effect = this_iter
+ self.assertEqual([mock(), mock(), mock(), mock()],
+ ['this', 'is', 'an', 'iter'])
+ self.assertRaises(StopIteration, mock)
+ self.assertIs(mock.side_effect, this_iter)
+
+
+ def test_side_effect_iterator_exceptions(self):
+ for Klass in Mock, MagicMock:
+ iterable = (ValueError, 3, KeyError, 6)
+ m = Klass(side_effect=iterable)
+ self.assertRaises(ValueError, m)
+ self.assertEqual(m(), 3)
+ self.assertRaises(KeyError, m)
+ self.assertEqual(m(), 6)
+
+
+ def test_assert_has_calls_any_order(self):
+ mock = Mock()
+ mock(1, 2)
+ mock(a=3)
+ mock(3, 4)
+ mock(b=6)
+ mock(b=6)
+
+ kalls = [
+ call(1, 2), ({'a': 3},),
+ ((3, 4),), ((), {'a': 3}),
+ ('', (1, 2)), ('', {'a': 3}),
+ ('', (1, 2), {}), ('', (), {'a': 3})
+ ]
+ for kall in kalls:
+ mock.assert_has_calls([kall], any_order=True)
+
+ for kall in call(1, '2'), call(b=3), call(), 3, None, 'foo':
+ self.assertRaises(
+ AssertionError, mock.assert_has_calls,
+ [kall], any_order=True
+ )
+
+ kall_lists = [
+ [call(1, 2), call(b=6)],
+ [call(3, 4), call(1, 2)],
+ [call(b=6), call(b=6)],
+ ]
+
+ for kall_list in kall_lists:
+ mock.assert_has_calls(kall_list, any_order=True)
+
+ kall_lists = [
+ [call(b=6), call(b=6), call(b=6)],
+ [call(1, 2), call(1, 2)],
+ [call(3, 4), call(1, 2), call(5, 7)],
+ [call(b=6), call(3, 4), call(b=6), call(1, 2), call(b=6)],
+ ]
+ for kall_list in kall_lists:
+ self.assertRaises(
+ AssertionError, mock.assert_has_calls,
+ kall_list, any_order=True
+ )
+
+ def test_assert_has_calls(self):
+ kalls1 = [
+ call(1, 2), ({'a': 3},),
+ ((3, 4),), call(b=6),
+ ('', (1,), {'b': 6}),
+ ]
+ kalls2 = [call.foo(), call.bar(1)]
+ kalls2.extend(call.spam().baz(a=3).call_list())
+ kalls2.extend(call.bam(set(), foo={}).fish([1]).call_list())
+
+ mocks = []
+ for mock in Mock(), MagicMock():
+ mock(1, 2)
+ mock(a=3)
+ mock(3, 4)
+ mock(b=6)
+ mock(1, b=6)
+ mocks.append((mock, kalls1))
+
+ mock = Mock()
+ mock.foo()
+ mock.bar(1)
+ mock.spam().baz(a=3)
+ mock.bam(set(), foo={}).fish([1])
+ mocks.append((mock, kalls2))
+
+ for mock, kalls in mocks:
+ for i in range(len(kalls)):
+ for step in 1, 2, 3:
+ these = kalls[i:i+step]
+ mock.assert_has_calls(these)
+
+ if len(these) > 1:
+ self.assertRaises(
+ AssertionError,
+ mock.assert_has_calls,
+ list(reversed(these))
+ )
+
+
+ def test_assert_any_call(self):
+ mock = Mock()
+ mock(1, 2)
+ mock(a=3)
+ mock(1, b=6)
+
+ mock.assert_any_call(1, 2)
+ mock.assert_any_call(a=3)
+ mock.assert_any_call(1, b=6)
+
+ self.assertRaises(
+ AssertionError,
+ mock.assert_any_call
+ )
+ self.assertRaises(
+ AssertionError,
+ mock.assert_any_call,
+ 1, 3
+ )
+ self.assertRaises(
+ AssertionError,
+ mock.assert_any_call,
+ a=4
+ )
+
+
+ def test_mock_calls_create_autospec(self):
+ def f(a, b):
+ pass
+ obj = Iter()
+ obj.f = f
+
+ funcs = [
+ create_autospec(f),
+ create_autospec(obj).f
+ ]
+ for func in funcs:
+ func(1, 2)
+ func(3, 4)
+
+ self.assertEqual(
+ func.mock_calls, [call(1, 2), call(3, 4)]
+ )
+
+
+ def test_mock_add_spec(self):
+ class _One(object):
+ one = 1
+ class _Two(object):
+ two = 2
+ class Anything(object):
+ one = two = three = 'four'
+
+ klasses = [
+ Mock, MagicMock, NonCallableMock, NonCallableMagicMock
+ ]
+ for Klass in list(klasses):
+ klasses.append(lambda K=Klass: K(spec=Anything))
+ klasses.append(lambda K=Klass: K(spec_set=Anything))
+
+ for Klass in klasses:
+ for kwargs in dict(), dict(spec_set=True):
+ mock = Klass()
+ #no error
+ mock.one, mock.two, mock.three
+
+ for One, Two in [(_One, _Two), (['one'], ['two'])]:
+ for kwargs in dict(), dict(spec_set=True):
+ mock.mock_add_spec(One, **kwargs)
+
+ mock.one
+ self.assertRaises(
+ AttributeError, getattr, mock, 'two'
+ )
+ self.assertRaises(
+ AttributeError, getattr, mock, 'three'
+ )
+ if 'spec_set' in kwargs:
+ self.assertRaises(
+ AttributeError, setattr, mock, 'three', None
+ )
+
+ mock.mock_add_spec(Two, **kwargs)
+ self.assertRaises(
+ AttributeError, getattr, mock, 'one'
+ )
+ mock.two
+ self.assertRaises(
+ AttributeError, getattr, mock, 'three'
+ )
+ if 'spec_set' in kwargs:
+ self.assertRaises(
+ AttributeError, setattr, mock, 'three', None
+ )
+ # note that creating a mock, setting an instance attribute, and
+ # *then* setting a spec doesn't work. Not the intended use case
+
+
+ def test_mock_add_spec_magic_methods(self):
+ for Klass in MagicMock, NonCallableMagicMock:
+ mock = Klass()
+ int(mock)
+
+ mock.mock_add_spec(object)
+ self.assertRaises(TypeError, int, mock)
+
+ mock = Klass()
+ mock['foo']
+ mock.__int__.return_value =4
+
+ mock.mock_add_spec(int)
+ self.assertEqual(int(mock), 4)
+ self.assertRaises(TypeError, lambda: mock['foo'])
+
+
+ def test_adding_child_mock(self):
+ for Klass in NonCallableMock, Mock, MagicMock, NonCallableMagicMock:
+ mock = Klass()
+
+ mock.foo = Mock()
+ mock.foo()
+
+ self.assertEqual(mock.method_calls, [call.foo()])
+ self.assertEqual(mock.mock_calls, [call.foo()])
+
+ mock = Klass()
+ mock.bar = Mock(name='name')
+ mock.bar()
+ self.assertEqual(mock.method_calls, [])
+ self.assertEqual(mock.mock_calls, [])
+
+ # mock with an existing _new_parent but no name
+ mock = Klass()
+ mock.baz = MagicMock()()
+ mock.baz()
+ self.assertEqual(mock.method_calls, [])
+ self.assertEqual(mock.mock_calls, [])
+
+
+ def test_adding_return_value_mock(self):
+ for Klass in Mock, MagicMock:
+ mock = Klass()
+ mock.return_value = MagicMock()
+
+ mock()()
+ self.assertEqual(mock.mock_calls, [call(), call()()])
+
+
+ def test_manager_mock(self):
+ class Foo(object):
+ one = 'one'
+ two = 'two'
+ manager = Mock()
+ p1 = patch.object(Foo, 'one')
+ p2 = patch.object(Foo, 'two')
+
+ mock_one = p1.start()
+ self.addCleanup(p1.stop)
+ mock_two = p2.start()
+ self.addCleanup(p2.stop)
+
+ manager.attach_mock(mock_one, 'one')
+ manager.attach_mock(mock_two, 'two')
+
+ Foo.two()
+ Foo.one()
+
+ self.assertEqual(manager.mock_calls, [call.two(), call.one()])
+
+
+ def test_magic_methods_mock_calls(self):
+ for Klass in Mock, MagicMock:
+ m = Klass()
+ m.__int__ = Mock(return_value=3)
+ m.__float__ = MagicMock(return_value=3.0)
+ int(m)
+ float(m)
+
+ self.assertEqual(m.mock_calls, [call.__int__(), call.__float__()])
+ self.assertEqual(m.method_calls, [])
+
+
+ def test_attribute_deletion(self):
+ # this behaviour isn't *useful*, but at least it's now tested...
+ for Klass in Mock, MagicMock, NonCallableMagicMock, NonCallableMock:
+ m = Klass()
+ original = m.foo
+ m.foo = 3
+ del m.foo
+ self.assertEqual(m.foo, original)
+
+ new = m.foo = Mock()
+ del m.foo
+ self.assertEqual(m.foo, new)
+
+
+ def test_mock_parents(self):
+ for Klass in Mock, MagicMock:
+ m = Klass()
+ original_repr = repr(m)
+ m.return_value = m
+ self.assertIs(m(), m)
+ self.assertEqual(repr(m), original_repr)
+
+ m.reset_mock()
+ self.assertIs(m(), m)
+ self.assertEqual(repr(m), original_repr)
+
+ m = Klass()
+ m.b = m.a
+ self.assertIn("name='mock.a'", repr(m.b))
+ self.assertIn("name='mock.a'", repr(m.a))
+ m.reset_mock()
+ self.assertIn("name='mock.a'", repr(m.b))
+ self.assertIn("name='mock.a'", repr(m.a))
+
+ m = Klass()
+ original_repr = repr(m)
+ m.a = m()
+ m.a.return_value = m
+
+ self.assertEqual(repr(m), original_repr)
+ self.assertEqual(repr(m.a()), original_repr)
+
+
+ def test_attach_mock(self):
+ classes = Mock, MagicMock, NonCallableMagicMock, NonCallableMock
+ for Klass in classes:
+ for Klass2 in classes:
+ m = Klass()
+
+ m2 = Klass2(name='foo')
+ m.attach_mock(m2, 'bar')
+
+ self.assertIs(m.bar, m2)
+ self.assertIn("name='mock.bar'", repr(m2))
+
+ m.bar.baz(1)
+ self.assertEqual(m.mock_calls, [call.bar.baz(1)])
+ self.assertEqual(m.method_calls, [call.bar.baz(1)])
+
+
+ def test_attach_mock_return_value(self):
+ classes = Mock, MagicMock, NonCallableMagicMock, NonCallableMock
+ for Klass in Mock, MagicMock:
+ for Klass2 in classes:
+ m = Klass()
+
+ m2 = Klass2(name='foo')
+ m.attach_mock(m2, 'return_value')
+
+ self.assertIs(m(), m2)
+ self.assertIn("name='mock()'", repr(m2))
+
+ m2.foo()
+ self.assertEqual(m.mock_calls, call().foo().call_list())
+
+
+ def test_attribute_deletion(self):
+ for mock in Mock(), MagicMock():
+ self.assertTrue(hasattr(mock, 'm'))
+
+ del mock.m
+ self.assertFalse(hasattr(mock, 'm'))
+
+ del mock.f
+ self.assertFalse(hasattr(mock, 'f'))
+ self.assertRaises(AttributeError, getattr, mock, 'f')
+
+
+ def test_class_assignable(self):
+ for mock in Mock(), MagicMock():
+ self.assertNotIsInstance(mock, int)
+
+ mock.__class__ = int
+ self.assertIsInstance(mock, int)
+
+
+ @unittest2.expectedFailure
+ def test_pickle(self):
+ for Klass in (MagicMock, Mock, Subclass, NonCallableMagicMock):
+ mock = Klass(name='foo', attribute=3)
+ mock.foo(1, 2, 3)
+ data = pickle.dumps(mock)
+ new = pickle.loads(data)
+
+ new.foo.assert_called_once_with(1, 2, 3)
+ self.assertFalse(new.called)
+ self.assertTrue(is_instance(new, Klass))
+ self.assertIsInstance(new, Thing)
+ self.assertIn('name="foo"', repr(new))
+ self.assertEqual(new.attribute, 3)
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/third_party/python/mock-1.0.0/tests/testpatch.py b/third_party/python/mock-1.0.0/tests/testpatch.py
new file mode 100644
index 0000000000..1ebe671065
--- /dev/null
+++ b/third_party/python/mock-1.0.0/tests/testpatch.py
@@ -0,0 +1,1790 @@
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
+
+import os
+import sys
+
+from tests import support
+from tests.support import unittest2, inPy3k, SomeClass, is_instance, callable
+
+from mock import (
+ NonCallableMock, CallableMixin, patch, sentinel,
+ MagicMock, Mock, NonCallableMagicMock, patch, _patch,
+ DEFAULT, call, _get_target
+)
+
+builtin_string = '__builtin__'
+if inPy3k:
+ builtin_string = 'builtins'
+ unicode = str
+
+PTModule = sys.modules[__name__]
+MODNAME = '%s.PTModule' % __name__
+
+
+def _get_proxy(obj, get_only=True):
+ class Proxy(object):
+ def __getattr__(self, name):
+ return getattr(obj, name)
+ if not get_only:
+ def __setattr__(self, name, value):
+ setattr(obj, name, value)
+ def __delattr__(self, name):
+ delattr(obj, name)
+ Proxy.__setattr__ = __setattr__
+ Proxy.__delattr__ = __delattr__
+ return Proxy()
+
+
+# for use in the test
+something = sentinel.Something
+something_else = sentinel.SomethingElse
+
+
+class Foo(object):
+ def __init__(self, a):
+ pass
+ def f(self, a):
+ pass
+ def g(self):
+ pass
+ foo = 'bar'
+
+ class Bar(object):
+ def a(self):
+ pass
+
+foo_name = '%s.Foo' % __name__
+
+
+def function(a, b=Foo):
+ pass
+
+
+class Container(object):
+ def __init__(self):
+ self.values = {}
+
+ def __getitem__(self, name):
+ return self.values[name]
+
+ def __setitem__(self, name, value):
+ self.values[name] = value
+
+ def __delitem__(self, name):
+ del self.values[name]
+
+ def __iter__(self):
+ return iter(self.values)
+
+
+
+class PatchTest(unittest2.TestCase):
+
+ def assertNotCallable(self, obj, magic=True):
+ MockClass = NonCallableMagicMock
+ if not magic:
+ MockClass = NonCallableMock
+
+ self.assertRaises(TypeError, obj)
+ self.assertTrue(is_instance(obj, MockClass))
+ self.assertFalse(is_instance(obj, CallableMixin))
+
+
+ def test_single_patchobject(self):
+ class Something(object):
+ attribute = sentinel.Original
+
+ @patch.object(Something, 'attribute', sentinel.Patched)
+ def test():
+ self.assertEqual(Something.attribute, sentinel.Patched, "unpatched")
+
+ test()
+ self.assertEqual(Something.attribute, sentinel.Original,
+ "patch not restored")
+
+
+ def test_patchobject_with_none(self):
+ class Something(object):
+ attribute = sentinel.Original
+
+ @patch.object(Something, 'attribute', None)
+ def test():
+ self.assertIsNone(Something.attribute, "unpatched")
+
+ test()
+ self.assertEqual(Something.attribute, sentinel.Original,
+ "patch not restored")
+
+
+ def test_multiple_patchobject(self):
+ class Something(object):
+ attribute = sentinel.Original
+ next_attribute = sentinel.Original2
+
+ @patch.object(Something, 'attribute', sentinel.Patched)
+ @patch.object(Something, 'next_attribute', sentinel.Patched2)
+ def test():
+ self.assertEqual(Something.attribute, sentinel.Patched,
+ "unpatched")
+ self.assertEqual(Something.next_attribute, sentinel.Patched2,
+ "unpatched")
+
+ test()
+ self.assertEqual(Something.attribute, sentinel.Original,
+ "patch not restored")
+ self.assertEqual(Something.next_attribute, sentinel.Original2,
+ "patch not restored")
+
+
+ def test_object_lookup_is_quite_lazy(self):
+ global something
+ original = something
+ @patch('%s.something' % __name__, sentinel.Something2)
+ def test():
+ pass
+
+ try:
+ something = sentinel.replacement_value
+ test()
+ self.assertEqual(something, sentinel.replacement_value)
+ finally:
+ something = original
+
+
+ def test_patch(self):
+ @patch('%s.something' % __name__, sentinel.Something2)
+ def test():
+ self.assertEqual(PTModule.something, sentinel.Something2,
+ "unpatched")
+
+ test()
+ self.assertEqual(PTModule.something, sentinel.Something,
+ "patch not restored")
+
+ @patch('%s.something' % __name__, sentinel.Something2)
+ @patch('%s.something_else' % __name__, sentinel.SomethingElse)
+ def test():
+ self.assertEqual(PTModule.something, sentinel.Something2,
+ "unpatched")
+ self.assertEqual(PTModule.something_else, sentinel.SomethingElse,
+ "unpatched")
+
+ self.assertEqual(PTModule.something, sentinel.Something,
+ "patch not restored")
+ self.assertEqual(PTModule.something_else, sentinel.SomethingElse,
+ "patch not restored")
+
+ # Test the patching and restoring works a second time
+ test()
+
+ self.assertEqual(PTModule.something, sentinel.Something,
+ "patch not restored")
+ self.assertEqual(PTModule.something_else, sentinel.SomethingElse,
+ "patch not restored")
+
+ mock = Mock()
+ mock.return_value = sentinel.Handle
+ @patch('%s.open' % builtin_string, mock)
+ def test():
+ self.assertEqual(open('filename', 'r'), sentinel.Handle,
+ "open not patched")
+ test()
+ test()
+
+ self.assertNotEqual(open, mock, "patch not restored")
+
+
+ def test_patch_class_attribute(self):
+ @patch('%s.SomeClass.class_attribute' % __name__,
+ sentinel.ClassAttribute)
+ def test():
+ self.assertEqual(PTModule.SomeClass.class_attribute,
+ sentinel.ClassAttribute, "unpatched")
+ test()
+
+ self.assertIsNone(PTModule.SomeClass.class_attribute,
+ "patch not restored")
+
+
+ def test_patchobject_with_default_mock(self):
+ class Test(object):
+ something = sentinel.Original
+ something2 = sentinel.Original2
+
+ @patch.object(Test, 'something')
+ def test(mock):
+ self.assertEqual(mock, Test.something,
+ "Mock not passed into test function")
+ self.assertIsInstance(mock, MagicMock,
+ "patch with two arguments did not create a mock")
+
+ test()
+
+ @patch.object(Test, 'something')
+ @patch.object(Test, 'something2')
+ def test(this1, this2, mock1, mock2):
+ self.assertEqual(this1, sentinel.this1,
+ "Patched function didn't receive initial argument")
+ self.assertEqual(this2, sentinel.this2,
+ "Patched function didn't receive second argument")
+ self.assertEqual(mock1, Test.something2,
+ "Mock not passed into test function")
+ self.assertEqual(mock2, Test.something,
+ "Second Mock not passed into test function")
+ self.assertIsInstance(mock2, MagicMock,
+ "patch with two arguments did not create a mock")
+ self.assertIsInstance(mock2, MagicMock,
+ "patch with two arguments did not create a mock")
+
+ # A hack to test that new mocks are passed the second time
+ self.assertNotEqual(outerMock1, mock1, "unexpected value for mock1")
+ self.assertNotEqual(outerMock2, mock2, "unexpected value for mock1")
+ return mock1, mock2
+
+ outerMock1 = outerMock2 = None
+ outerMock1, outerMock2 = test(sentinel.this1, sentinel.this2)
+
+ # Test that executing a second time creates new mocks
+ test(sentinel.this1, sentinel.this2)
+
+
+ def test_patch_with_spec(self):
+ @patch('%s.SomeClass' % __name__, spec=SomeClass)
+ def test(MockSomeClass):
+ self.assertEqual(SomeClass, MockSomeClass)
+ self.assertTrue(is_instance(SomeClass.wibble, MagicMock))
+ self.assertRaises(AttributeError, lambda: SomeClass.not_wibble)
+
+ test()
+
+
+ def test_patchobject_with_spec(self):
+ @patch.object(SomeClass, 'class_attribute', spec=SomeClass)
+ def test(MockAttribute):
+ self.assertEqual(SomeClass.class_attribute, MockAttribute)
+ self.assertTrue(is_instance(SomeClass.class_attribute.wibble,
+ MagicMock))
+ self.assertRaises(AttributeError,
+ lambda: SomeClass.class_attribute.not_wibble)
+
+ test()
+
+
+ def test_patch_with_spec_as_list(self):
+ @patch('%s.SomeClass' % __name__, spec=['wibble'])
+ def test(MockSomeClass):
+ self.assertEqual(SomeClass, MockSomeClass)
+ self.assertTrue(is_instance(SomeClass.wibble, MagicMock))
+ self.assertRaises(AttributeError, lambda: SomeClass.not_wibble)
+
+ test()
+
+
+ def test_patchobject_with_spec_as_list(self):
+ @patch.object(SomeClass, 'class_attribute', spec=['wibble'])
+ def test(MockAttribute):
+ self.assertEqual(SomeClass.class_attribute, MockAttribute)
+ self.assertTrue(is_instance(SomeClass.class_attribute.wibble,
+ MagicMock))
+ self.assertRaises(AttributeError,
+ lambda: SomeClass.class_attribute.not_wibble)
+
+ test()
+
+
+ def test_nested_patch_with_spec_as_list(self):
+ # regression test for nested decorators
+ @patch('%s.open' % builtin_string)
+ @patch('%s.SomeClass' % __name__, spec=['wibble'])
+ def test(MockSomeClass, MockOpen):
+ self.assertEqual(SomeClass, MockSomeClass)
+ self.assertTrue(is_instance(SomeClass.wibble, MagicMock))
+ self.assertRaises(AttributeError, lambda: SomeClass.not_wibble)
+ test()
+
+
+ def test_patch_with_spec_as_boolean(self):
+ @patch('%s.SomeClass' % __name__, spec=True)
+ def test(MockSomeClass):
+ self.assertEqual(SomeClass, MockSomeClass)
+ # Should not raise attribute error
+ MockSomeClass.wibble
+
+ self.assertRaises(AttributeError, lambda: MockSomeClass.not_wibble)
+
+ test()
+
+
+ def test_patch_object_with_spec_as_boolean(self):
+ @patch.object(PTModule, 'SomeClass', spec=True)
+ def test(MockSomeClass):
+ self.assertEqual(SomeClass, MockSomeClass)
+ # Should not raise attribute error
+ MockSomeClass.wibble
+
+ self.assertRaises(AttributeError, lambda: MockSomeClass.not_wibble)
+
+ test()
+
+
+ def test_patch_class_acts_with_spec_is_inherited(self):
+ @patch('%s.SomeClass' % __name__, spec=True)
+ def test(MockSomeClass):
+ self.assertTrue(is_instance(MockSomeClass, MagicMock))
+ instance = MockSomeClass()
+ self.assertNotCallable(instance)
+ # Should not raise attribute error
+ instance.wibble
+
+ self.assertRaises(AttributeError, lambda: instance.not_wibble)
+
+ test()
+
+
+ def test_patch_with_create_mocks_non_existent_attributes(self):
+ @patch('%s.frooble' % builtin_string, sentinel.Frooble, create=True)
+ def test():
+ self.assertEqual(frooble, sentinel.Frooble)
+
+ test()
+ self.assertRaises(NameError, lambda: frooble)
+
+
+ def test_patchobject_with_create_mocks_non_existent_attributes(self):
+ @patch.object(SomeClass, 'frooble', sentinel.Frooble, create=True)
+ def test():
+ self.assertEqual(SomeClass.frooble, sentinel.Frooble)
+
+ test()
+ self.assertFalse(hasattr(SomeClass, 'frooble'))
+
+
+ def test_patch_wont_create_by_default(self):
+ try:
+ @patch('%s.frooble' % builtin_string, sentinel.Frooble)
+ def test():
+ self.assertEqual(frooble, sentinel.Frooble)
+
+ test()
+ except AttributeError:
+ pass
+ else:
+ self.fail('Patching non existent attributes should fail')
+
+ self.assertRaises(NameError, lambda: frooble)
+
+
+ def test_patchobject_wont_create_by_default(self):
+ try:
+ @patch.object(SomeClass, 'frooble', sentinel.Frooble)
+ def test():
+ self.fail('Patching non existent attributes should fail')
+
+ test()
+ except AttributeError:
+ pass
+ else:
+ self.fail('Patching non existent attributes should fail')
+ self.assertFalse(hasattr(SomeClass, 'frooble'))
+
+
+ def test_patch_with_static_methods(self):
+ class Foo(object):
+ @staticmethod
+ def woot():
+ return sentinel.Static
+
+ @patch.object(Foo, 'woot', staticmethod(lambda: sentinel.Patched))
+ def anonymous():
+ self.assertEqual(Foo.woot(), sentinel.Patched)
+ anonymous()
+
+ self.assertEqual(Foo.woot(), sentinel.Static)
+
+
+ def test_patch_local(self):
+ foo = sentinel.Foo
+ @patch.object(sentinel, 'Foo', 'Foo')
+ def anonymous():
+ self.assertEqual(sentinel.Foo, 'Foo')
+ anonymous()
+
+ self.assertEqual(sentinel.Foo, foo)
+
+
+ def test_patch_slots(self):
+ class Foo(object):
+ __slots__ = ('Foo',)
+
+ foo = Foo()
+ foo.Foo = sentinel.Foo
+
+ @patch.object(foo, 'Foo', 'Foo')
+ def anonymous():
+ self.assertEqual(foo.Foo, 'Foo')
+ anonymous()
+
+ self.assertEqual(foo.Foo, sentinel.Foo)
+
+
+ def test_patchobject_class_decorator(self):
+ class Something(object):
+ attribute = sentinel.Original
+
+ class Foo(object):
+ def test_method(other_self):
+ self.assertEqual(Something.attribute, sentinel.Patched,
+ "unpatched")
+ def not_test_method(other_self):
+ self.assertEqual(Something.attribute, sentinel.Original,
+ "non-test method patched")
+
+ Foo = patch.object(Something, 'attribute', sentinel.Patched)(Foo)
+
+ f = Foo()
+ f.test_method()
+ f.not_test_method()
+
+ self.assertEqual(Something.attribute, sentinel.Original,
+ "patch not restored")
+
+
+ def test_patch_class_decorator(self):
+ class Something(object):
+ attribute = sentinel.Original
+
+ class Foo(object):
+ def test_method(other_self, mock_something):
+ self.assertEqual(PTModule.something, mock_something,
+ "unpatched")
+ def not_test_method(other_self):
+ self.assertEqual(PTModule.something, sentinel.Something,
+ "non-test method patched")
+ Foo = patch('%s.something' % __name__)(Foo)
+
+ f = Foo()
+ f.test_method()
+ f.not_test_method()
+
+ self.assertEqual(Something.attribute, sentinel.Original,
+ "patch not restored")
+ self.assertEqual(PTModule.something, sentinel.Something,
+ "patch not restored")
+
+
+ def test_patchobject_twice(self):
+ class Something(object):
+ attribute = sentinel.Original
+ next_attribute = sentinel.Original2
+
+ @patch.object(Something, 'attribute', sentinel.Patched)
+ @patch.object(Something, 'attribute', sentinel.Patched)
+ def test():
+ self.assertEqual(Something.attribute, sentinel.Patched, "unpatched")
+
+ test()
+
+ self.assertEqual(Something.attribute, sentinel.Original,
+ "patch not restored")
+
+
+ def test_patch_dict(self):
+ foo = {'initial': object(), 'other': 'something'}
+ original = foo.copy()
+
+ @patch.dict(foo)
+ def test():
+ foo['a'] = 3
+ del foo['initial']
+ foo['other'] = 'something else'
+
+ test()
+
+ self.assertEqual(foo, original)
+
+ @patch.dict(foo, {'a': 'b'})
+ def test():
+ self.assertEqual(len(foo), 3)
+ self.assertEqual(foo['a'], 'b')
+
+ test()
+
+ self.assertEqual(foo, original)
+
+ @patch.dict(foo, [('a', 'b')])
+ def test():
+ self.assertEqual(len(foo), 3)
+ self.assertEqual(foo['a'], 'b')
+
+ test()
+
+ self.assertEqual(foo, original)
+
+
+ def test_patch_dict_with_container_object(self):
+ foo = Container()
+ foo['initial'] = object()
+ foo['other'] = 'something'
+
+ original = foo.values.copy()
+
+ @patch.dict(foo)
+ def test():
+ foo['a'] = 3
+ del foo['initial']
+ foo['other'] = 'something else'
+
+ test()
+
+ self.assertEqual(foo.values, original)
+
+ @patch.dict(foo, {'a': 'b'})
+ def test():
+ self.assertEqual(len(foo.values), 3)
+ self.assertEqual(foo['a'], 'b')
+
+ test()
+
+ self.assertEqual(foo.values, original)
+
+
+ def test_patch_dict_with_clear(self):
+ foo = {'initial': object(), 'other': 'something'}
+ original = foo.copy()
+
+ @patch.dict(foo, clear=True)
+ def test():
+ self.assertEqual(foo, {})
+ foo['a'] = 3
+ foo['other'] = 'something else'
+
+ test()
+
+ self.assertEqual(foo, original)
+
+ @patch.dict(foo, {'a': 'b'}, clear=True)
+ def test():
+ self.assertEqual(foo, {'a': 'b'})
+
+ test()
+
+ self.assertEqual(foo, original)
+
+ @patch.dict(foo, [('a', 'b')], clear=True)
+ def test():
+ self.assertEqual(foo, {'a': 'b'})
+
+ test()
+
+ self.assertEqual(foo, original)
+
+
+ def test_patch_dict_with_container_object_and_clear(self):
+ foo = Container()
+ foo['initial'] = object()
+ foo['other'] = 'something'
+
+ original = foo.values.copy()
+
+ @patch.dict(foo, clear=True)
+ def test():
+ self.assertEqual(foo.values, {})
+ foo['a'] = 3
+ foo['other'] = 'something else'
+
+ test()
+
+ self.assertEqual(foo.values, original)
+
+ @patch.dict(foo, {'a': 'b'}, clear=True)
+ def test():
+ self.assertEqual(foo.values, {'a': 'b'})
+
+ test()
+
+ self.assertEqual(foo.values, original)
+
+
+ def test_name_preserved(self):
+ foo = {}
+
+ @patch('%s.SomeClass' % __name__, object())
+ @patch('%s.SomeClass' % __name__, object(), autospec=True)
+ @patch.object(SomeClass, object())
+ @patch.dict(foo)
+ def some_name():
+ pass
+
+ self.assertEqual(some_name.__name__, 'some_name')
+
+
+ def test_patch_with_exception(self):
+ foo = {}
+
+ @patch.dict(foo, {'a': 'b'})
+ def test():
+ raise NameError('Konrad')
+ try:
+ test()
+ except NameError:
+ pass
+ else:
+ self.fail('NameError not raised by test')
+
+ self.assertEqual(foo, {})
+
+
+ def test_patch_dict_with_string(self):
+ @patch.dict('os.environ', {'konrad_delong': 'some value'})
+ def test():
+ self.assertIn('konrad_delong', os.environ)
+
+ test()
+
+
+ @unittest2.expectedFailure
+ def test_patch_descriptor(self):
+ # would be some effort to fix this - we could special case the
+ # builtin descriptors: classmethod, property, staticmethod
+ class Nothing(object):
+ foo = None
+
+ class Something(object):
+ foo = {}
+
+ @patch.object(Nothing, 'foo', 2)
+ @classmethod
+ def klass(cls):
+ self.assertIs(cls, Something)
+
+ @patch.object(Nothing, 'foo', 2)
+ @staticmethod
+ def static(arg):
+ return arg
+
+ @patch.dict(foo)
+ @classmethod
+ def klass_dict(cls):
+ self.assertIs(cls, Something)
+
+ @patch.dict(foo)
+ @staticmethod
+ def static_dict(arg):
+ return arg
+
+ # these will raise exceptions if patching descriptors is broken
+ self.assertEqual(Something.static('f00'), 'f00')
+ Something.klass()
+ self.assertEqual(Something.static_dict('f00'), 'f00')
+ Something.klass_dict()
+
+ something = Something()
+ self.assertEqual(something.static('f00'), 'f00')
+ something.klass()
+ self.assertEqual(something.static_dict('f00'), 'f00')
+ something.klass_dict()
+
+
+ def test_patch_spec_set(self):
+ @patch('%s.SomeClass' % __name__, spec_set=SomeClass)
+ def test(MockClass):
+ MockClass.z = 'foo'
+
+ self.assertRaises(AttributeError, test)
+
+ @patch.object(support, 'SomeClass', spec_set=SomeClass)
+ def test(MockClass):
+ MockClass.z = 'foo'
+
+ self.assertRaises(AttributeError, test)
+ @patch('%s.SomeClass' % __name__, spec_set=True)
+ def test(MockClass):
+ MockClass.z = 'foo'
+
+ self.assertRaises(AttributeError, test)
+
+ @patch.object(support, 'SomeClass', spec_set=True)
+ def test(MockClass):
+ MockClass.z = 'foo'
+
+ self.assertRaises(AttributeError, test)
+
+
+ def test_spec_set_inherit(self):
+ @patch('%s.SomeClass' % __name__, spec_set=True)
+ def test(MockClass):
+ instance = MockClass()
+ instance.z = 'foo'
+
+ self.assertRaises(AttributeError, test)
+
+
+ def test_patch_start_stop(self):
+ original = something
+ patcher = patch('%s.something' % __name__)
+ self.assertIs(something, original)
+ mock = patcher.start()
+ try:
+ self.assertIsNot(mock, original)
+ self.assertIs(something, mock)
+ finally:
+ patcher.stop()
+ self.assertIs(something, original)
+
+
+ def test_stop_without_start(self):
+ patcher = patch(foo_name, 'bar', 3)
+
+ # calling stop without start used to produce a very obscure error
+ self.assertRaises(RuntimeError, patcher.stop)
+
+
+ def test_patchobject_start_stop(self):
+ original = something
+ patcher = patch.object(PTModule, 'something', 'foo')
+ self.assertIs(something, original)
+ replaced = patcher.start()
+ try:
+ self.assertEqual(replaced, 'foo')
+ self.assertIs(something, replaced)
+ finally:
+ patcher.stop()
+ self.assertIs(something, original)
+
+
+ def test_patch_dict_start_stop(self):
+ d = {'foo': 'bar'}
+ original = d.copy()
+ patcher = patch.dict(d, [('spam', 'eggs')], clear=True)
+ self.assertEqual(d, original)
+
+ patcher.start()
+ try:
+ self.assertEqual(d, {'spam': 'eggs'})
+ finally:
+ patcher.stop()
+ self.assertEqual(d, original)
+
+
+ def test_patch_dict_class_decorator(self):
+ this = self
+ d = {'spam': 'eggs'}
+ original = d.copy()
+
+ class Test(object):
+ def test_first(self):
+ this.assertEqual(d, {'foo': 'bar'})
+ def test_second(self):
+ this.assertEqual(d, {'foo': 'bar'})
+
+ Test = patch.dict(d, {'foo': 'bar'}, clear=True)(Test)
+ self.assertEqual(d, original)
+
+ test = Test()
+
+ test.test_first()
+ self.assertEqual(d, original)
+
+ test.test_second()
+ self.assertEqual(d, original)
+
+ test = Test()
+
+ test.test_first()
+ self.assertEqual(d, original)
+
+ test.test_second()
+ self.assertEqual(d, original)
+
+
+ def test_get_only_proxy(self):
+ class Something(object):
+ foo = 'foo'
+ class SomethingElse:
+ foo = 'foo'
+
+ for thing in Something, SomethingElse, Something(), SomethingElse:
+ proxy = _get_proxy(thing)
+
+ @patch.object(proxy, 'foo', 'bar')
+ def test():
+ self.assertEqual(proxy.foo, 'bar')
+ test()
+ self.assertEqual(proxy.foo, 'foo')
+ self.assertEqual(thing.foo, 'foo')
+ self.assertNotIn('foo', proxy.__dict__)
+
+
+ def test_get_set_delete_proxy(self):
+ class Something(object):
+ foo = 'foo'
+ class SomethingElse:
+ foo = 'foo'
+
+ for thing in Something, SomethingElse, Something(), SomethingElse:
+ proxy = _get_proxy(Something, get_only=False)
+
+ @patch.object(proxy, 'foo', 'bar')
+ def test():
+ self.assertEqual(proxy.foo, 'bar')
+ test()
+ self.assertEqual(proxy.foo, 'foo')
+ self.assertEqual(thing.foo, 'foo')
+ self.assertNotIn('foo', proxy.__dict__)
+
+
+ def test_patch_keyword_args(self):
+ kwargs = {'side_effect': KeyError, 'foo.bar.return_value': 33,
+ 'foo': MagicMock()}
+
+ patcher = patch(foo_name, **kwargs)
+ mock = patcher.start()
+ patcher.stop()
+
+ self.assertRaises(KeyError, mock)
+ self.assertEqual(mock.foo.bar(), 33)
+ self.assertIsInstance(mock.foo, MagicMock)
+
+
+ def test_patch_object_keyword_args(self):
+ kwargs = {'side_effect': KeyError, 'foo.bar.return_value': 33,
+ 'foo': MagicMock()}
+
+ patcher = patch.object(Foo, 'f', **kwargs)
+ mock = patcher.start()
+ patcher.stop()
+
+ self.assertRaises(KeyError, mock)
+ self.assertEqual(mock.foo.bar(), 33)
+ self.assertIsInstance(mock.foo, MagicMock)
+
+
+ def test_patch_dict_keyword_args(self):
+ original = {'foo': 'bar'}
+ copy = original.copy()
+
+ patcher = patch.dict(original, foo=3, bar=4, baz=5)
+ patcher.start()
+
+ try:
+ self.assertEqual(original, dict(foo=3, bar=4, baz=5))
+ finally:
+ patcher.stop()
+
+ self.assertEqual(original, copy)
+
+
+ def test_autospec(self):
+ class Boo(object):
+ def __init__(self, a):
+ pass
+ def f(self, a):
+ pass
+ def g(self):
+ pass
+ foo = 'bar'
+
+ class Bar(object):
+ def a(self):
+ pass
+
+ def _test(mock):
+ mock(1)
+ mock.assert_called_with(1)
+ self.assertRaises(TypeError, mock)
+
+ def _test2(mock):
+ mock.f(1)
+ mock.f.assert_called_with(1)
+ self.assertRaises(TypeError, mock.f)
+
+ mock.g()
+ mock.g.assert_called_with()
+ self.assertRaises(TypeError, mock.g, 1)
+
+ self.assertRaises(AttributeError, getattr, mock, 'h')
+
+ mock.foo.lower()
+ mock.foo.lower.assert_called_with()
+ self.assertRaises(AttributeError, getattr, mock.foo, 'bar')
+
+ mock.Bar()
+ mock.Bar.assert_called_with()
+
+ mock.Bar.a()
+ mock.Bar.a.assert_called_with()
+ self.assertRaises(TypeError, mock.Bar.a, 1)
+
+ mock.Bar().a()
+ mock.Bar().a.assert_called_with()
+ self.assertRaises(TypeError, mock.Bar().a, 1)
+
+ self.assertRaises(AttributeError, getattr, mock.Bar, 'b')
+ self.assertRaises(AttributeError, getattr, mock.Bar(), 'b')
+
+ def function(mock):
+ _test(mock)
+ _test2(mock)
+ _test2(mock(1))
+ self.assertIs(mock, Foo)
+ return mock
+
+ test = patch(foo_name, autospec=True)(function)
+
+ mock = test()
+ self.assertIsNot(Foo, mock)
+ # test patching a second time works
+ test()
+
+ module = sys.modules[__name__]
+ test = patch.object(module, 'Foo', autospec=True)(function)
+
+ mock = test()
+ self.assertIsNot(Foo, mock)
+ # test patching a second time works
+ test()
+
+
+ def test_autospec_function(self):
+ @patch('%s.function' % __name__, autospec=True)
+ def test(mock):
+ function(1)
+ function.assert_called_with(1)
+ function(2, 3)
+ function.assert_called_with(2, 3)
+
+ self.assertRaises(TypeError, function)
+ self.assertRaises(AttributeError, getattr, function, 'foo')
+
+ test()
+
+
+ def test_autospec_keywords(self):
+ @patch('%s.function' % __name__, autospec=True,
+ return_value=3)
+ def test(mock_function):
+ #self.assertEqual(function.abc, 'foo')
+ return function(1, 2)
+
+ result = test()
+ self.assertEqual(result, 3)
+
+
+ def test_autospec_with_new(self):
+ patcher = patch('%s.function' % __name__, new=3, autospec=True)
+ self.assertRaises(TypeError, patcher.start)
+
+ module = sys.modules[__name__]
+ patcher = patch.object(module, 'function', new=3, autospec=True)
+ self.assertRaises(TypeError, patcher.start)
+
+
+ def test_autospec_with_object(self):
+ class Bar(Foo):
+ extra = []
+
+ patcher = patch(foo_name, autospec=Bar)
+ mock = patcher.start()
+ try:
+ self.assertIsInstance(mock, Bar)
+ self.assertIsInstance(mock.extra, list)
+ finally:
+ patcher.stop()
+
+
+ def test_autospec_inherits(self):
+ FooClass = Foo
+ patcher = patch(foo_name, autospec=True)
+ mock = patcher.start()
+ try:
+ self.assertIsInstance(mock, FooClass)
+ self.assertIsInstance(mock(3), FooClass)
+ finally:
+ patcher.stop()
+
+
+ def test_autospec_name(self):
+ patcher = patch(foo_name, autospec=True)
+ mock = patcher.start()
+
+ try:
+ self.assertIn(" name='Foo'", repr(mock))
+ self.assertIn(" name='Foo.f'", repr(mock.f))
+ self.assertIn(" name='Foo()'", repr(mock(None)))
+ self.assertIn(" name='Foo().f'", repr(mock(None).f))
+ finally:
+ patcher.stop()
+
+
+ def test_tracebacks(self):
+ @patch.object(Foo, 'f', object())
+ def test():
+ raise AssertionError
+ try:
+ test()
+ except:
+ err = sys.exc_info()
+
+ result = unittest2.TextTestResult(None, None, 0)
+ traceback = result._exc_info_to_string(err, self)
+ self.assertIn('raise AssertionError', traceback)
+
+
+ def test_new_callable_patch(self):
+ patcher = patch(foo_name, new_callable=NonCallableMagicMock)
+
+ m1 = patcher.start()
+ patcher.stop()
+ m2 = patcher.start()
+ patcher.stop()
+
+ self.assertIsNot(m1, m2)
+ for mock in m1, m2:
+ self.assertNotCallable(m1)
+
+
+ def test_new_callable_patch_object(self):
+ patcher = patch.object(Foo, 'f', new_callable=NonCallableMagicMock)
+
+ m1 = patcher.start()
+ patcher.stop()
+ m2 = patcher.start()
+ patcher.stop()
+
+ self.assertIsNot(m1, m2)
+ for mock in m1, m2:
+ self.assertNotCallable(m1)
+
+
+ def test_new_callable_keyword_arguments(self):
+ class Bar(object):
+ kwargs = None
+ def __init__(self, **kwargs):
+ Bar.kwargs = kwargs
+
+ patcher = patch(foo_name, new_callable=Bar, arg1=1, arg2=2)
+ m = patcher.start()
+ try:
+ self.assertIs(type(m), Bar)
+ self.assertEqual(Bar.kwargs, dict(arg1=1, arg2=2))
+ finally:
+ patcher.stop()
+
+
+ def test_new_callable_spec(self):
+ class Bar(object):
+ kwargs = None
+ def __init__(self, **kwargs):
+ Bar.kwargs = kwargs
+
+ patcher = patch(foo_name, new_callable=Bar, spec=Bar)
+ patcher.start()
+ try:
+ self.assertEqual(Bar.kwargs, dict(spec=Bar))
+ finally:
+ patcher.stop()
+
+ patcher = patch(foo_name, new_callable=Bar, spec_set=Bar)
+ patcher.start()
+ try:
+ self.assertEqual(Bar.kwargs, dict(spec_set=Bar))
+ finally:
+ patcher.stop()
+
+
+ def test_new_callable_create(self):
+ non_existent_attr = '%s.weeeee' % foo_name
+ p = patch(non_existent_attr, new_callable=NonCallableMock)
+ self.assertRaises(AttributeError, p.start)
+
+ p = patch(non_existent_attr, new_callable=NonCallableMock,
+ create=True)
+ m = p.start()
+ try:
+ self.assertNotCallable(m, magic=False)
+ finally:
+ p.stop()
+
+
+ def test_new_callable_incompatible_with_new(self):
+ self.assertRaises(
+ ValueError, patch, foo_name, new=object(), new_callable=MagicMock
+ )
+ self.assertRaises(
+ ValueError, patch.object, Foo, 'f', new=object(),
+ new_callable=MagicMock
+ )
+
+
+ def test_new_callable_incompatible_with_autospec(self):
+ self.assertRaises(
+ ValueError, patch, foo_name, new_callable=MagicMock,
+ autospec=True
+ )
+ self.assertRaises(
+ ValueError, patch.object, Foo, 'f', new_callable=MagicMock,
+ autospec=True
+ )
+
+
+ def test_new_callable_inherit_for_mocks(self):
+ class MockSub(Mock):
+ pass
+
+ MockClasses = (
+ NonCallableMock, NonCallableMagicMock, MagicMock, Mock, MockSub
+ )
+ for Klass in MockClasses:
+ for arg in 'spec', 'spec_set':
+ kwargs = {arg: True}
+ p = patch(foo_name, new_callable=Klass, **kwargs)
+ m = p.start()
+ try:
+ instance = m.return_value
+ self.assertRaises(AttributeError, getattr, instance, 'x')
+ finally:
+ p.stop()
+
+
+ def test_new_callable_inherit_non_mock(self):
+ class NotAMock(object):
+ def __init__(self, spec):
+ self.spec = spec
+
+ p = patch(foo_name, new_callable=NotAMock, spec=True)
+ m = p.start()
+ try:
+ self.assertTrue(is_instance(m, NotAMock))
+ self.assertRaises(AttributeError, getattr, m, 'return_value')
+ finally:
+ p.stop()
+
+ self.assertEqual(m.spec, Foo)
+
+
+ def test_new_callable_class_decorating(self):
+ test = self
+ original = Foo
+ class SomeTest(object):
+
+ def _test(self, mock_foo):
+ test.assertIsNot(Foo, original)
+ test.assertIs(Foo, mock_foo)
+ test.assertIsInstance(Foo, SomeClass)
+
+ def test_two(self, mock_foo):
+ self._test(mock_foo)
+ def test_one(self, mock_foo):
+ self._test(mock_foo)
+
+ SomeTest = patch(foo_name, new_callable=SomeClass)(SomeTest)
+ SomeTest().test_one()
+ SomeTest().test_two()
+ self.assertIs(Foo, original)
+
+
+ def test_patch_multiple(self):
+ original_foo = Foo
+ original_f = Foo.f
+ original_g = Foo.g
+
+ patcher1 = patch.multiple(foo_name, f=1, g=2)
+ patcher2 = patch.multiple(Foo, f=1, g=2)
+
+ for patcher in patcher1, patcher2:
+ patcher.start()
+ try:
+ self.assertIs(Foo, original_foo)
+ self.assertEqual(Foo.f, 1)
+ self.assertEqual(Foo.g, 2)
+ finally:
+ patcher.stop()
+
+ self.assertIs(Foo, original_foo)
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+
+
+ @patch.multiple(foo_name, f=3, g=4)
+ def test():
+ self.assertIs(Foo, original_foo)
+ self.assertEqual(Foo.f, 3)
+ self.assertEqual(Foo.g, 4)
+
+ test()
+
+
+ def test_patch_multiple_no_kwargs(self):
+ self.assertRaises(ValueError, patch.multiple, foo_name)
+ self.assertRaises(ValueError, patch.multiple, Foo)
+
+
+ def test_patch_multiple_create_mocks(self):
+ original_foo = Foo
+ original_f = Foo.f
+ original_g = Foo.g
+
+ @patch.multiple(foo_name, f=DEFAULT, g=3, foo=DEFAULT)
+ def test(f, foo):
+ self.assertIs(Foo, original_foo)
+ self.assertIs(Foo.f, f)
+ self.assertEqual(Foo.g, 3)
+ self.assertIs(Foo.foo, foo)
+ self.assertTrue(is_instance(f, MagicMock))
+ self.assertTrue(is_instance(foo, MagicMock))
+
+ test()
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+
+
+ def test_patch_multiple_create_mocks_different_order(self):
+ # bug revealed by Jython!
+ original_f = Foo.f
+ original_g = Foo.g
+
+ patcher = patch.object(Foo, 'f', 3)
+ patcher.attribute_name = 'f'
+
+ other = patch.object(Foo, 'g', DEFAULT)
+ other.attribute_name = 'g'
+ patcher.additional_patchers = [other]
+
+ @patcher
+ def test(g):
+ self.assertIs(Foo.g, g)
+ self.assertEqual(Foo.f, 3)
+
+ test()
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+
+
+ def test_patch_multiple_stacked_decorators(self):
+ original_foo = Foo
+ original_f = Foo.f
+ original_g = Foo.g
+
+ @patch.multiple(foo_name, f=DEFAULT)
+ @patch.multiple(foo_name, foo=DEFAULT)
+ @patch(foo_name + '.g')
+ def test1(g, **kwargs):
+ _test(g, **kwargs)
+
+ @patch.multiple(foo_name, f=DEFAULT)
+ @patch(foo_name + '.g')
+ @patch.multiple(foo_name, foo=DEFAULT)
+ def test2(g, **kwargs):
+ _test(g, **kwargs)
+
+ @patch(foo_name + '.g')
+ @patch.multiple(foo_name, f=DEFAULT)
+ @patch.multiple(foo_name, foo=DEFAULT)
+ def test3(g, **kwargs):
+ _test(g, **kwargs)
+
+ def _test(g, **kwargs):
+ f = kwargs.pop('f')
+ foo = kwargs.pop('foo')
+ self.assertFalse(kwargs)
+
+ self.assertIs(Foo, original_foo)
+ self.assertIs(Foo.f, f)
+ self.assertIs(Foo.g, g)
+ self.assertIs(Foo.foo, foo)
+ self.assertTrue(is_instance(f, MagicMock))
+ self.assertTrue(is_instance(g, MagicMock))
+ self.assertTrue(is_instance(foo, MagicMock))
+
+ test1()
+ test2()
+ test3()
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+
+
+ def test_patch_multiple_create_mocks_patcher(self):
+ original_foo = Foo
+ original_f = Foo.f
+ original_g = Foo.g
+
+ patcher = patch.multiple(foo_name, f=DEFAULT, g=3, foo=DEFAULT)
+
+ result = patcher.start()
+ try:
+ f = result['f']
+ foo = result['foo']
+ self.assertEqual(set(result), set(['f', 'foo']))
+
+ self.assertIs(Foo, original_foo)
+ self.assertIs(Foo.f, f)
+ self.assertIs(Foo.foo, foo)
+ self.assertTrue(is_instance(f, MagicMock))
+ self.assertTrue(is_instance(foo, MagicMock))
+ finally:
+ patcher.stop()
+
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+
+
+ def test_patch_multiple_decorating_class(self):
+ test = self
+ original_foo = Foo
+ original_f = Foo.f
+ original_g = Foo.g
+
+ class SomeTest(object):
+
+ def _test(self, f, foo):
+ test.assertIs(Foo, original_foo)
+ test.assertIs(Foo.f, f)
+ test.assertEqual(Foo.g, 3)
+ test.assertIs(Foo.foo, foo)
+ test.assertTrue(is_instance(f, MagicMock))
+ test.assertTrue(is_instance(foo, MagicMock))
+
+ def test_two(self, f, foo):
+ self._test(f, foo)
+ def test_one(self, f, foo):
+ self._test(f, foo)
+
+ SomeTest = patch.multiple(
+ foo_name, f=DEFAULT, g=3, foo=DEFAULT
+ )(SomeTest)
+
+ thing = SomeTest()
+ thing.test_one()
+ thing.test_two()
+
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+
+
+ def test_patch_multiple_create(self):
+ patcher = patch.multiple(Foo, blam='blam')
+ self.assertRaises(AttributeError, patcher.start)
+
+ patcher = patch.multiple(Foo, blam='blam', create=True)
+ patcher.start()
+ try:
+ self.assertEqual(Foo.blam, 'blam')
+ finally:
+ patcher.stop()
+
+ self.assertFalse(hasattr(Foo, 'blam'))
+
+
+ def test_patch_multiple_spec_set(self):
+ # if spec_set works then we can assume that spec and autospec also
+ # work as the underlying machinery is the same
+ patcher = patch.multiple(Foo, foo=DEFAULT, spec_set=['a', 'b'])
+ result = patcher.start()
+ try:
+ self.assertEqual(Foo.foo, result['foo'])
+ Foo.foo.a(1)
+ Foo.foo.b(2)
+ Foo.foo.a.assert_called_with(1)
+ Foo.foo.b.assert_called_with(2)
+ self.assertRaises(AttributeError, setattr, Foo.foo, 'c', None)
+ finally:
+ patcher.stop()
+
+
+ def test_patch_multiple_new_callable(self):
+ class Thing(object):
+ pass
+
+ patcher = patch.multiple(
+ Foo, f=DEFAULT, g=DEFAULT, new_callable=Thing
+ )
+ result = patcher.start()
+ try:
+ self.assertIs(Foo.f, result['f'])
+ self.assertIs(Foo.g, result['g'])
+ self.assertIsInstance(Foo.f, Thing)
+ self.assertIsInstance(Foo.g, Thing)
+ self.assertIsNot(Foo.f, Foo.g)
+ finally:
+ patcher.stop()
+
+
+ def test_nested_patch_failure(self):
+ original_f = Foo.f
+ original_g = Foo.g
+
+ @patch.object(Foo, 'g', 1)
+ @patch.object(Foo, 'missing', 1)
+ @patch.object(Foo, 'f', 1)
+ def thing1():
+ pass
+
+ @patch.object(Foo, 'missing', 1)
+ @patch.object(Foo, 'g', 1)
+ @patch.object(Foo, 'f', 1)
+ def thing2():
+ pass
+
+ @patch.object(Foo, 'g', 1)
+ @patch.object(Foo, 'f', 1)
+ @patch.object(Foo, 'missing', 1)
+ def thing3():
+ pass
+
+ for func in thing1, thing2, thing3:
+ self.assertRaises(AttributeError, func)
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+
+
+ def test_new_callable_failure(self):
+ original_f = Foo.f
+ original_g = Foo.g
+ original_foo = Foo.foo
+
+ def crasher():
+ raise NameError('crasher')
+
+ @patch.object(Foo, 'g', 1)
+ @patch.object(Foo, 'foo', new_callable=crasher)
+ @patch.object(Foo, 'f', 1)
+ def thing1():
+ pass
+
+ @patch.object(Foo, 'foo', new_callable=crasher)
+ @patch.object(Foo, 'g', 1)
+ @patch.object(Foo, 'f', 1)
+ def thing2():
+ pass
+
+ @patch.object(Foo, 'g', 1)
+ @patch.object(Foo, 'f', 1)
+ @patch.object(Foo, 'foo', new_callable=crasher)
+ def thing3():
+ pass
+
+ for func in thing1, thing2, thing3:
+ self.assertRaises(NameError, func)
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+ self.assertEqual(Foo.foo, original_foo)
+
+
+ def test_patch_multiple_failure(self):
+ original_f = Foo.f
+ original_g = Foo.g
+
+ patcher = patch.object(Foo, 'f', 1)
+ patcher.attribute_name = 'f'
+
+ good = patch.object(Foo, 'g', 1)
+ good.attribute_name = 'g'
+
+ bad = patch.object(Foo, 'missing', 1)
+ bad.attribute_name = 'missing'
+
+ for additionals in [good, bad], [bad, good]:
+ patcher.additional_patchers = additionals
+
+ @patcher
+ def func():
+ pass
+
+ self.assertRaises(AttributeError, func)
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+
+
+ def test_patch_multiple_new_callable_failure(self):
+ original_f = Foo.f
+ original_g = Foo.g
+ original_foo = Foo.foo
+
+ def crasher():
+ raise NameError('crasher')
+
+ patcher = patch.object(Foo, 'f', 1)
+ patcher.attribute_name = 'f'
+
+ good = patch.object(Foo, 'g', 1)
+ good.attribute_name = 'g'
+
+ bad = patch.object(Foo, 'foo', new_callable=crasher)
+ bad.attribute_name = 'foo'
+
+ for additionals in [good, bad], [bad, good]:
+ patcher.additional_patchers = additionals
+
+ @patcher
+ def func():
+ pass
+
+ self.assertRaises(NameError, func)
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+ self.assertEqual(Foo.foo, original_foo)
+
+
+ def test_patch_multiple_string_subclasses(self):
+ for base in (str, unicode):
+ Foo = type('Foo', (base,), {'fish': 'tasty'})
+ foo = Foo()
+ @patch.multiple(foo, fish='nearly gone')
+ def test():
+ self.assertEqual(foo.fish, 'nearly gone')
+
+ test()
+ self.assertEqual(foo.fish, 'tasty')
+
+
+ @patch('mock.patch.TEST_PREFIX', 'foo')
+ def test_patch_test_prefix(self):
+ class Foo(object):
+ thing = 'original'
+
+ def foo_one(self):
+ return self.thing
+ def foo_two(self):
+ return self.thing
+ def test_one(self):
+ return self.thing
+ def test_two(self):
+ return self.thing
+
+ Foo = patch.object(Foo, 'thing', 'changed')(Foo)
+
+ foo = Foo()
+ self.assertEqual(foo.foo_one(), 'changed')
+ self.assertEqual(foo.foo_two(), 'changed')
+ self.assertEqual(foo.test_one(), 'original')
+ self.assertEqual(foo.test_two(), 'original')
+
+
+ @patch('mock.patch.TEST_PREFIX', 'bar')
+ def test_patch_dict_test_prefix(self):
+ class Foo(object):
+ def bar_one(self):
+ return dict(the_dict)
+ def bar_two(self):
+ return dict(the_dict)
+ def test_one(self):
+ return dict(the_dict)
+ def test_two(self):
+ return dict(the_dict)
+
+ the_dict = {'key': 'original'}
+ Foo = patch.dict(the_dict, key='changed')(Foo)
+
+ foo =Foo()
+ self.assertEqual(foo.bar_one(), {'key': 'changed'})
+ self.assertEqual(foo.bar_two(), {'key': 'changed'})
+ self.assertEqual(foo.test_one(), {'key': 'original'})
+ self.assertEqual(foo.test_two(), {'key': 'original'})
+
+
+ def test_patch_with_spec_mock_repr(self):
+ for arg in ('spec', 'autospec', 'spec_set'):
+ p = patch('%s.SomeClass' % __name__, **{arg: True})
+ m = p.start()
+ try:
+ self.assertIn(" name='SomeClass'", repr(m))
+ self.assertIn(" name='SomeClass.class_attribute'",
+ repr(m.class_attribute))
+ self.assertIn(" name='SomeClass()'", repr(m()))
+ self.assertIn(" name='SomeClass().class_attribute'",
+ repr(m().class_attribute))
+ finally:
+ p.stop()
+
+
+ def test_patch_nested_autospec_repr(self):
+ p = patch('tests.support', autospec=True)
+ m = p.start()
+ try:
+ self.assertIn(" name='support.SomeClass.wibble()'",
+ repr(m.SomeClass.wibble()))
+ self.assertIn(" name='support.SomeClass().wibble()'",
+ repr(m.SomeClass().wibble()))
+ finally:
+ p.stop()
+
+
+ def test_mock_calls_with_patch(self):
+ for arg in ('spec', 'autospec', 'spec_set'):
+ p = patch('%s.SomeClass' % __name__, **{arg: True})
+ m = p.start()
+ try:
+ m.wibble()
+
+ kalls = [call.wibble()]
+ self.assertEqual(m.mock_calls, kalls)
+ self.assertEqual(m.method_calls, kalls)
+ self.assertEqual(m.wibble.mock_calls, [call()])
+
+ result = m()
+ kalls.append(call())
+ self.assertEqual(m.mock_calls, kalls)
+
+ result.wibble()
+ kalls.append(call().wibble())
+ self.assertEqual(m.mock_calls, kalls)
+
+ self.assertEqual(result.mock_calls, [call.wibble()])
+ self.assertEqual(result.wibble.mock_calls, [call()])
+ self.assertEqual(result.method_calls, [call.wibble()])
+ finally:
+ p.stop()
+
+
+ def test_patch_imports_lazily(self):
+ sys.modules.pop('squizz', None)
+
+ p1 = patch('squizz.squozz')
+ self.assertRaises(ImportError, p1.start)
+
+ squizz = Mock()
+ squizz.squozz = 6
+ sys.modules['squizz'] = squizz
+ p1 = patch('squizz.squozz')
+ squizz.squozz = 3
+ p1.start()
+ p1.stop()
+ self.assertEqual(squizz.squozz, 3)
+
+
+ def test_patch_propogrates_exc_on_exit(self):
+ class holder:
+ exc_info = None, None, None
+
+ class custom_patch(_patch):
+ def __exit__(self, etype=None, val=None, tb=None):
+ _patch.__exit__(self, etype, val, tb)
+ holder.exc_info = etype, val, tb
+ stop = __exit__
+
+ def with_custom_patch(target):
+ getter, attribute = _get_target(target)
+ return custom_patch(
+ getter, attribute, DEFAULT, None, False, None,
+ None, None, {}
+ )
+
+ @with_custom_patch('squizz.squozz')
+ def test(mock):
+ raise RuntimeError
+
+ self.assertRaises(RuntimeError, test)
+ self.assertIs(holder.exc_info[0], RuntimeError)
+ self.assertIsNotNone(holder.exc_info[1],
+ 'exception value not propgated')
+ self.assertIsNotNone(holder.exc_info[2],
+ 'exception traceback not propgated')
+
+
+ def test_create_and_specs(self):
+ for kwarg in ('spec', 'spec_set', 'autospec'):
+ p = patch('%s.doesnotexist' % __name__, create=True,
+ **{kwarg: True})
+ self.assertRaises(TypeError, p.start)
+ self.assertRaises(NameError, lambda: doesnotexist)
+
+ # check that spec with create is innocuous if the original exists
+ p = patch(MODNAME, create=True, **{kwarg: True})
+ p.start()
+ p.stop()
+
+
+ def test_multiple_specs(self):
+ original = PTModule
+ for kwarg in ('spec', 'spec_set'):
+ p = patch(MODNAME, autospec=0, **{kwarg: 0})
+ self.assertRaises(TypeError, p.start)
+ self.assertIs(PTModule, original)
+
+ for kwarg in ('spec', 'autospec'):
+ p = patch(MODNAME, spec_set=0, **{kwarg: 0})
+ self.assertRaises(TypeError, p.start)
+ self.assertIs(PTModule, original)
+
+ for kwarg in ('spec_set', 'autospec'):
+ p = patch(MODNAME, spec=0, **{kwarg: 0})
+ self.assertRaises(TypeError, p.start)
+ self.assertIs(PTModule, original)
+
+
+ def test_specs_false_instead_of_none(self):
+ p = patch(MODNAME, spec=False, spec_set=False, autospec=False)
+ mock = p.start()
+ try:
+ # no spec should have been set, so attribute access should not fail
+ mock.does_not_exist
+ mock.does_not_exist = 3
+ finally:
+ p.stop()
+
+
+ def test_falsey_spec(self):
+ for kwarg in ('spec', 'autospec', 'spec_set'):
+ p = patch(MODNAME, **{kwarg: 0})
+ m = p.start()
+ try:
+ self.assertRaises(AttributeError, getattr, m, 'doesnotexit')
+ finally:
+ p.stop()
+
+
+ def test_spec_set_true(self):
+ for kwarg in ('spec', 'autospec'):
+ p = patch(MODNAME, spec_set=True, **{kwarg: True})
+ m = p.start()
+ try:
+ self.assertRaises(AttributeError, setattr, m,
+ 'doesnotexist', 'something')
+ self.assertRaises(AttributeError, getattr, m, 'doesnotexist')
+ finally:
+ p.stop()
+
+
+ def test_callable_spec_as_list(self):
+ spec = ('__call__',)
+ p = patch(MODNAME, spec=spec)
+ m = p.start()
+ try:
+ self.assertTrue(callable(m))
+ finally:
+ p.stop()
+
+
+ def test_not_callable_spec_as_list(self):
+ spec = ('foo', 'bar')
+ p = patch(MODNAME, spec=spec)
+ m = p.start()
+ try:
+ self.assertFalse(callable(m))
+ finally:
+ p.stop()
+
+
+ def test_patch_stopall(self):
+ unlink = os.unlink
+ chdir = os.chdir
+ path = os.path
+ patch('os.unlink', something).start()
+ patch('os.chdir', something_else).start()
+
+ @patch('os.path')
+ def patched(mock_path):
+ patch.stopall()
+ self.assertIs(os.path, mock_path)
+ self.assertIs(os.unlink, unlink)
+ self.assertIs(os.chdir, chdir)
+
+ patched()
+ self.assertIs(os.path, path)
+
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/third_party/python/mock-1.0.0/tests/testsentinel.py b/third_party/python/mock-1.0.0/tests/testsentinel.py
new file mode 100644
index 0000000000..981171a450
--- /dev/null
+++ b/third_party/python/mock-1.0.0/tests/testsentinel.py
@@ -0,0 +1,33 @@
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
+
+from tests.support import unittest2
+
+from mock import sentinel, DEFAULT
+
+
+class SentinelTest(unittest2.TestCase):
+
+ def testSentinels(self):
+ self.assertEqual(sentinel.whatever, sentinel.whatever,
+ 'sentinel not stored')
+ self.assertNotEqual(sentinel.whatever, sentinel.whateverelse,
+ 'sentinel should be unique')
+
+
+ def testSentinelName(self):
+ self.assertEqual(str(sentinel.whatever), 'sentinel.whatever',
+ 'sentinel name incorrect')
+
+
+ def testDEFAULT(self):
+ self.assertTrue(DEFAULT is sentinel.DEFAULT)
+
+ def testBases(self):
+ # If this doesn't raise an AttributeError then help(mock) is broken
+ self.assertRaises(AttributeError, lambda: sentinel.__bases__)
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/third_party/python/mock-1.0.0/tests/testwith.py b/third_party/python/mock-1.0.0/tests/testwith.py
new file mode 100644
index 0000000000..34529eb9fc
--- /dev/null
+++ b/third_party/python/mock-1.0.0/tests/testwith.py
@@ -0,0 +1,16 @@
+import sys
+
+if sys.version_info[:2] >= (2, 5):
+ from tests._testwith import *
+else:
+ from tests.support import unittest2
+
+ class TestWith(unittest2.TestCase):
+
+ @unittest2.skip('tests using with statement skipped on Python 2.4')
+ def testWith(self):
+ pass
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/third_party/python/mock-1.0.0/tox.ini b/third_party/python/mock-1.0.0/tox.ini
new file mode 100644
index 0000000000..554f87096f
--- /dev/null
+++ b/third_party/python/mock-1.0.0/tox.ini
@@ -0,0 +1,40 @@
+[tox]
+envlist = py24,py25,py26,py27,py31,pypy,py32,py33,jython
+
+[testenv]
+deps=unittest2
+commands={envbindir}/unit2 discover []
+
+[testenv:py26]
+commands=
+ {envbindir}/unit2 discover []
+ {envbindir}/sphinx-build -E -b doctest docs html
+ {envbindir}/sphinx-build -E docs html
+deps =
+ unittest2
+ sphinx
+
+[testenv:py27]
+commands=
+ {envbindir}/unit2 discover []
+ {envbindir}/sphinx-build -E -b doctest docs html
+deps =
+ unittest2
+ sphinx
+
+[testenv:py31]
+deps =
+ unittest2py3k
+
+[testenv:py32]
+commands=
+ {envbindir}/python -m unittest discover []
+deps =
+
+[testenv:py33]
+commands=
+ {envbindir}/python -m unittest discover []
+deps =
+
+# note for jython. Execute in tests directory:
+# rm `find . -name '*$py.class'` \ No newline at end of file
diff --git a/third_party/python/mohawk/PKG-INFO b/third_party/python/mohawk/PKG-INFO
new file mode 100644
index 0000000000..131f03cfc5
--- /dev/null
+++ b/third_party/python/mohawk/PKG-INFO
@@ -0,0 +1,19 @@
+Metadata-Version: 1.1
+Name: mohawk
+Version: 0.3.4
+Summary: Library for Hawk HTTP authorization
+Home-page: https://github.com/kumar303/mohawk
+Author: Kumar McMillan, Austin King
+Author-email: kumar.mcmillan@gmail.com
+License: MPL 2.0 (Mozilla Public License)
+Description: UNKNOWN
+Platform: UNKNOWN
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Topic :: Internet :: WWW/HTTP
diff --git a/third_party/python/mohawk/README.rst b/third_party/python/mohawk/README.rst
new file mode 100644
index 0000000000..e53a8f7e3e
--- /dev/null
+++ b/third_party/python/mohawk/README.rst
@@ -0,0 +1,25 @@
+======
+Mohawk
+======
+.. image:: https://img.shields.io/pypi/v/mohawk.svg
+ :target: https://pypi.python.org/pypi/mohawk
+ :alt: Latest PyPI release
+
+.. image:: https://img.shields.io/pypi/dm/mohawk.svg
+ :target: https://pypi.python.org/pypi/mohawk
+ :alt: PyPI monthly download stats
+
+.. image:: https://travis-ci.org/kumar303/mohawk.svg?branch=master
+ :target: https://travis-ci.org/kumar303/mohawk
+ :alt: Travis master branch status
+
+.. image:: https://readthedocs.org/projects/mohawk/badge/?version=latest
+ :target: https://mohawk.readthedocs.io/en/latest/?badge=latest
+ :alt: Documentation status
+
+Mohawk is an alternate Python implementation of the
+`Hawk HTTP authorization scheme`_.
+
+Full documentation: https://mohawk.readthedocs.io/
+
+.. _`Hawk HTTP authorization scheme`: https://github.com/hueniverse/hawk
diff --git a/third_party/python/mohawk/mohawk/__init__.py b/third_party/python/mohawk/mohawk/__init__.py
new file mode 100644
index 0000000000..a79e7b7164
--- /dev/null
+++ b/third_party/python/mohawk/mohawk/__init__.py
@@ -0,0 +1,2 @@
+from .sender import *
+from .receiver import *
diff --git a/third_party/python/mohawk/mohawk/base.py b/third_party/python/mohawk/mohawk/base.py
new file mode 100644
index 0000000000..4935110568
--- /dev/null
+++ b/third_party/python/mohawk/mohawk/base.py
@@ -0,0 +1,230 @@
+import logging
+import math
+import pprint
+
+import six
+from six.moves.urllib.parse import urlparse
+
+from .exc import (AlreadyProcessed,
+ MacMismatch,
+ MisComputedContentHash,
+ TokenExpired)
+from .util import (calculate_mac,
+ calculate_payload_hash,
+ calculate_ts_mac,
+ prepare_header_val,
+ random_string,
+ strings_match,
+ utc_now)
+
+default_ts_skew_in_seconds = 60
+log = logging.getLogger(__name__)
+
+
+class HawkAuthority:
+
+ def _authorize(self, mac_type, parsed_header, resource,
+ their_timestamp=None,
+ timestamp_skew_in_seconds=default_ts_skew_in_seconds,
+ localtime_offset_in_seconds=0,
+ accept_untrusted_content=False):
+
+ now = utc_now(offset_in_seconds=localtime_offset_in_seconds)
+
+ their_hash = parsed_header.get('hash', '')
+ their_mac = parsed_header.get('mac', '')
+ mac = calculate_mac(mac_type, resource, their_hash)
+ if not strings_match(mac, their_mac):
+ raise MacMismatch('MACs do not match; ours: {ours}; '
+ 'theirs: {theirs}'
+ .format(ours=mac, theirs=their_mac))
+
+ if 'hash' not in parsed_header and accept_untrusted_content:
+ # The request did not hash its content.
+ log.debug('NOT calculating/verifiying payload hash '
+ '(no hash in header)')
+ check_hash = False
+ content_hash = None
+ else:
+ check_hash = True
+ content_hash = resource.gen_content_hash()
+
+ if check_hash and not their_hash:
+ log.info('request unexpectedly did not hash its content')
+
+ if check_hash:
+ if not strings_match(content_hash, their_hash):
+ # The hash declared in the header is incorrect.
+ # Content could have been tampered with.
+ log.debug('mismatched content: {content}'
+ .format(content=repr(resource.content)))
+ log.debug('mismatched content-type: {typ}'
+ .format(typ=repr(resource.content_type)))
+ raise MisComputedContentHash(
+ 'Our hash {ours} ({algo}) did not '
+ 'match theirs {theirs}'
+ .format(ours=content_hash,
+ theirs=their_hash,
+ algo=resource.credentials['algorithm']))
+
+ if resource.seen_nonce:
+ if resource.seen_nonce(resource.credentials['id'],
+ parsed_header['nonce'],
+ parsed_header['ts']):
+ raise AlreadyProcessed('Nonce {nonce} with timestamp {ts} '
+ 'has already been processed for {id}'
+ .format(nonce=parsed_header['nonce'],
+ ts=parsed_header['ts'],
+ id=resource.credentials['id']))
+ else:
+ log.warn('seen_nonce was None; not checking nonce. '
+ 'You may be vulnerable to replay attacks')
+
+ their_ts = int(their_timestamp or parsed_header['ts'])
+
+ if math.fabs(their_ts - now) > timestamp_skew_in_seconds:
+ message = ('token with UTC timestamp {ts} has expired; '
+ 'it was compared to {now}'
+ .format(ts=their_ts, now=now))
+ tsm = calculate_ts_mac(now, resource.credentials)
+ if isinstance(tsm, six.binary_type):
+ tsm = tsm.decode('ascii')
+ www_authenticate = ('Hawk ts="{ts}", tsm="{tsm}", error="{error}"'
+ .format(ts=now, tsm=tsm, error=message))
+ raise TokenExpired(message,
+ localtime_in_seconds=now,
+ www_authenticate=www_authenticate)
+
+ log.debug('authorized OK')
+
+ def _make_header(self, resource, mac, additional_keys=None):
+ keys = additional_keys
+ if not keys:
+ # These are the default header keys that you'd send with a
+ # request header. Response headers are odd because they
+ # exclude a bunch of keys.
+ keys = ('id', 'ts', 'nonce', 'ext', 'app', 'dlg')
+
+ header = u'Hawk mac="{mac}"'.format(mac=prepare_header_val(mac))
+
+ if resource.content_hash:
+ header = u'{header}, hash="{hash}"'.format(
+ header=header,
+ hash=prepare_header_val(resource.content_hash))
+
+ if 'id' in keys:
+ header = u'{header}, id="{id}"'.format(
+ header=header,
+ id=prepare_header_val(resource.credentials['id']))
+
+ if 'ts' in keys:
+ header = u'{header}, ts="{ts}"'.format(
+ header=header, ts=prepare_header_val(resource.timestamp))
+
+ if 'nonce' in keys:
+ header = u'{header}, nonce="{nonce}"'.format(
+ header=header, nonce=prepare_header_val(resource.nonce))
+
+ # These are optional so we need to check if they have values first.
+
+ if 'ext' in keys and resource.ext:
+ header = u'{header}, ext="{ext}"'.format(
+ header=header, ext=prepare_header_val(resource.ext))
+
+ if 'app' in keys and resource.app:
+ header = u'{header}, app="{app}"'.format(
+ header=header, app=prepare_header_val(resource.app))
+
+ if 'dlg' in keys and resource.dlg:
+ header = u'{header}, dlg="{dlg}"'.format(
+ header=header, dlg=prepare_header_val(resource.dlg))
+
+ log.debug('Hawk header for URL={url} method={method}: {header}'
+ .format(url=resource.url, method=resource.method,
+ header=header))
+ return header
+
+
+class Resource:
+ """
+ Normalized request/response resource.
+ """
+
+ def __init__(self, **kw):
+ self.credentials = kw.pop('credentials')
+ self.method = kw.pop('method').upper()
+ self.content = kw.pop('content', None)
+ self.content_type = kw.pop('content_type', None)
+ self.always_hash_content = kw.pop('always_hash_content', True)
+ self.ext = kw.pop('ext', None)
+ self.app = kw.pop('app', None)
+ self.dlg = kw.pop('dlg', None)
+
+ self.timestamp = str(kw.pop('timestamp', None) or utc_now())
+
+ self.nonce = kw.pop('nonce', None)
+ if self.nonce is None:
+ self.nonce = random_string(6)
+
+ # This is a lookup function for checking nonces.
+ self.seen_nonce = kw.pop('seen_nonce', None)
+
+ self.url = kw.pop('url')
+ if not self.url:
+ raise ValueError('url was empty')
+ url_parts = self.parse_url(self.url)
+ log.debug('parsed URL parts: \n{parts}'
+ .format(parts=pprint.pformat(url_parts)))
+
+ self.name = url_parts['resource'] or ''
+ self.host = url_parts['hostname'] or ''
+ self.port = str(url_parts['port'])
+
+ if kw.keys():
+ raise TypeError('Unknown keyword argument(s): {0}'
+ .format(kw.keys()))
+
+ @property
+ def content_hash(self):
+ if not hasattr(self, '_content_hash'):
+ raise AttributeError(
+ 'Cannot access content_hash because it has not been generated')
+ return self._content_hash
+
+ def gen_content_hash(self):
+ if self.content is None or self.content_type is None:
+ if self.always_hash_content:
+ # Be really strict about allowing developers to skip content
+ # hashing. If they get this far they may be unintentiionally
+ # skipping it.
+ raise ValueError(
+ 'payload content and/or content_type cannot be '
+ 'empty without an explicit allowance')
+ log.debug('NOT hashing content')
+ self._content_hash = None
+ else:
+ self._content_hash = calculate_payload_hash(
+ self.content, self.credentials['algorithm'],
+ self.content_type)
+ return self.content_hash
+
+ def parse_url(self, url):
+ url_parts = urlparse(url)
+ url_dict = {
+ 'scheme': url_parts.scheme,
+ 'hostname': url_parts.hostname,
+ 'port': url_parts.port,
+ 'path': url_parts.path,
+ 'resource': url_parts.path,
+ 'query': url_parts.query,
+ }
+ if len(url_dict['query']) > 0:
+ url_dict['resource'] = '%s?%s' % (url_dict['resource'],
+ url_dict['query'])
+
+ if url_parts.port is None:
+ if url_parts.scheme == 'http':
+ url_dict['port'] = 80
+ elif url_parts.scheme == 'https':
+ url_dict['port'] = 443
+ return url_dict
diff --git a/third_party/python/mohawk/mohawk/bewit.py b/third_party/python/mohawk/mohawk/bewit.py
new file mode 100644
index 0000000000..ec83923655
--- /dev/null
+++ b/third_party/python/mohawk/mohawk/bewit.py
@@ -0,0 +1,167 @@
+from base64 import urlsafe_b64encode, b64decode
+from collections import namedtuple
+import logging
+import re
+
+import six
+
+from .base import Resource
+from .util import (calculate_mac,
+ utc_now)
+from .exc import (CredentialsLookupError,
+ InvalidBewit,
+ MacMismatch,
+ TokenExpired)
+
+log = logging.getLogger(__name__)
+
+
+def get_bewit(resource):
+ """
+ Returns a bewit identifier for the resource as a string.
+
+ :param resource:
+ Resource to generate a bewit for
+ :type resource: `mohawk.base.Resource`
+ """
+ if resource.method != 'GET':
+ raise ValueError('bewits can only be generated for GET requests')
+ if resource.nonce != '':
+ raise ValueError('bewits must use an empty nonce')
+ mac = calculate_mac(
+ 'bewit',
+ resource,
+ None,
+ )
+
+ if isinstance(mac, six.binary_type):
+ mac = mac.decode('ascii')
+
+ if resource.ext is None:
+ ext = ''
+ else:
+ ext = resource.ext
+
+ # Strip out \ from the client id
+ # since that can break parsing the response
+ # NB that the canonical implementation does not do this as of
+ # Oct 28, 2015, so this could break compat.
+ # We can leave \ in ext since validators can limit how many \ they split
+ # on (although again, the canonical implementation does not do this)
+ client_id = six.text_type(resource.credentials['id'])
+ if "\\" in client_id:
+ log.warn("Stripping backslash character(s) '\\' from client_id")
+ client_id = client_id.replace("\\", "")
+
+ # b64encode works only with bytes in python3, but all of our parameters are
+ # in unicode, so we need to encode them. The cleanest way to do this that
+ # works in both python 2 and 3 is to use string formatting to get a
+ # unicode string, and then explicitly encode it to bytes.
+ inner_bewit = u"{id}\\{exp}\\{mac}\\{ext}".format(
+ id=client_id,
+ exp=resource.timestamp,
+ mac=mac,
+ ext=ext,
+ )
+ inner_bewit_bytes = inner_bewit.encode('ascii')
+ bewit_bytes = urlsafe_b64encode(inner_bewit_bytes)
+ # Now decode the resulting bytes back to a unicode string
+ return bewit_bytes.decode('ascii')
+
+
+bewittuple = namedtuple('bewittuple', 'id expiration mac ext')
+
+
+def parse_bewit(bewit):
+ """
+ Returns a `bewittuple` representing the parts of an encoded bewit string.
+ This has the following named attributes:
+ (id, expiration, mac, ext)
+
+ :param bewit:
+ A base64 encoded bewit string
+ :type bewit: str
+ """
+ decoded_bewit = b64decode(bewit).decode('ascii')
+ bewit_parts = decoded_bewit.split("\\", 3)
+ if len(bewit_parts) != 4:
+ raise InvalidBewit('Expected 4 parts to bewit: %s' % decoded_bewit)
+ return bewittuple(*decoded_bewit.split("\\", 3))
+
+
+def strip_bewit(url):
+ """
+ Strips the bewit parameter out of a url.
+
+ Returns (encoded_bewit, stripped_url)
+
+ Raises InvalidBewit if no bewit found.
+
+ :param url:
+ The url containing a bewit parameter
+ :type url: str
+ """
+ m = re.search('[?&]bewit=([^&]+)', url)
+ if not m:
+ raise InvalidBewit('no bewit data found')
+ bewit = m.group(1)
+ stripped_url = url[:m.start()] + url[m.end():]
+ return bewit, stripped_url
+
+
+def check_bewit(url, credential_lookup, now=None):
+ """
+ Validates the given bewit.
+
+ Returns True if the resource has a valid bewit parameter attached,
+ or raises a subclass of HawkFail otherwise.
+
+ :param credential_lookup:
+ Callable to look up the credentials dict by sender ID.
+ The credentials dict must have the keys:
+ ``id``, ``key``, and ``algorithm``.
+ See :ref:`receiving-request` for an example.
+ :type credential_lookup: callable
+
+ :param now=None:
+ Unix epoch time for the current time to determine if bewit has expired.
+ If None, then the current time as given by utc_now() is used.
+ :type now=None: integer
+ """
+ raw_bewit, stripped_url = strip_bewit(url)
+ bewit = parse_bewit(raw_bewit)
+ try:
+ credentials = credential_lookup(bewit.id)
+ except LookupError:
+ raise CredentialsLookupError('Could not find credentials for ID {0}'
+ .format(bewit.id))
+
+ res = Resource(url=stripped_url,
+ method='GET',
+ credentials=credentials,
+ timestamp=bewit.expiration,
+ nonce='',
+ ext=bewit.ext,
+ )
+ mac = calculate_mac('bewit', res, None)
+ mac = mac.decode('ascii')
+
+ if mac != bewit.mac:
+ raise MacMismatch('bewit with mac {bewit_mac} did not match expected mac {expected_mac}'
+ .format(bewit_mac=bewit.mac,
+ expected_mac=mac))
+
+ # Check that the timestamp isn't expired
+ if now is None:
+ # TODO: Add offset/skew
+ now = utc_now()
+ if int(bewit.expiration) < now:
+ # TODO: Refactor TokenExpired to handle this better
+ raise TokenExpired('bewit with UTC timestamp {ts} has expired; '
+ 'it was compared to {now}'
+ .format(ts=bewit.expiration, now=now),
+ localtime_in_seconds=now,
+ www_authenticate=''
+ )
+
+ return True
diff --git a/third_party/python/mohawk/mohawk/exc.py b/third_party/python/mohawk/mohawk/exc.py
new file mode 100644
index 0000000000..9376995f2c
--- /dev/null
+++ b/third_party/python/mohawk/mohawk/exc.py
@@ -0,0 +1,98 @@
+"""
+If you want to catch any exception that might be raised,
+catch :class:`mohawk.exc.HawkFail`.
+"""
+
+
+class HawkFail(Exception):
+ """
+ All Mohawk exceptions derive from this base.
+ """
+
+
+class MissingAuthorization(HawkFail):
+ """
+ No authorization header was sent by the client.
+ """
+
+
+class InvalidCredentials(HawkFail):
+ """
+ The specified Hawk credentials are invalid.
+
+ For example, the dict could be formatted incorrectly.
+ """
+
+
+class CredentialsLookupError(HawkFail):
+ """
+ A :class:`mohawk.Receiver` could not look up the
+ credentials for an incoming request.
+ """
+
+
+class BadHeaderValue(HawkFail):
+ """
+ There was an error with an attribute or value when parsing
+ or creating a Hawk header.
+ """
+
+
+class MacMismatch(HawkFail):
+ """
+ The locally calculated MAC did not match the MAC that was sent.
+ """
+
+
+class MisComputedContentHash(HawkFail):
+ """
+ The signature of the content did not match the actual content.
+ """
+
+
+class TokenExpired(HawkFail):
+ """
+ The timestamp on a message received has expired.
+
+ You may also receive this message if your server clock is out of sync.
+ Consider synchronizing it with something like `TLSdate`_.
+
+ If you are unable to synchronize your clock universally,
+ The `Hawk`_ spec mentions how you can `adjust`_
+ your sender's time to match that of the receiver in the case
+ of unexpected expiration.
+
+ The ``www_authenticate`` attribute of this exception is a header
+ that can be returned to the client. If the value is not None, it
+ will include a timestamp HMAC'd with the sender's credentials.
+ This will allow the client
+ to verify the value and safely apply an offset.
+
+ .. _`Hawk`: https://github.com/hueniverse/hawk
+ .. _`adjust`: https://github.com/hueniverse/hawk#future-time-manipulation
+ .. _`TLSdate`: http://linux-audit.com/tlsdate-the-secure-alternative-for-ntpd-ntpdate-and-rdate/
+ """
+ #: Current local time in seconds that was used to compare timestamps.
+ localtime_in_seconds = None
+ # A header containing an HMAC'd server timestamp that the sender can verify.
+ www_authenticate = None
+
+ def __init__(self, *args, **kw):
+ self.localtime_in_seconds = kw.pop('localtime_in_seconds')
+ self.www_authenticate = kw.pop('www_authenticate')
+ super(HawkFail, self).__init__(*args, **kw)
+
+
+class AlreadyProcessed(HawkFail):
+ """
+ The message has already been processed and cannot be re-processed.
+
+ See :ref:`nonce` for details.
+ """
+
+
+class InvalidBewit(HawkFail):
+ """
+ The bewit is invalid; e.g. it doesn't contain the right number of
+ parameters.
+ """
diff --git a/third_party/python/mohawk/mohawk/receiver.py b/third_party/python/mohawk/mohawk/receiver.py
new file mode 100644
index 0000000000..509729ea8d
--- /dev/null
+++ b/third_party/python/mohawk/mohawk/receiver.py
@@ -0,0 +1,170 @@
+import logging
+import sys
+
+from .base import default_ts_skew_in_seconds, HawkAuthority, Resource
+from .exc import CredentialsLookupError, MissingAuthorization
+from .util import (calculate_mac,
+ parse_authorization_header,
+ validate_credentials)
+
+__all__ = ['Receiver']
+log = logging.getLogger(__name__)
+
+
+class Receiver(HawkAuthority):
+ """
+ A Hawk authority that will receive and respond to requests.
+
+ :param credentials_map:
+ Callable to look up the credentials dict by sender ID.
+ The credentials dict must have the keys:
+ ``id``, ``key``, and ``algorithm``.
+ See :ref:`receiving-request` for an example.
+ :type credentials_map: callable
+
+ :param request_header:
+ A `Hawk`_ ``Authorization`` header
+ such as one created by :class:`mohawk.Sender`.
+ :type request_header: str
+
+ :param url: Absolute URL of the request.
+ :type url: str
+
+ :param method: Method of the request. E.G. POST, GET
+ :type method: str
+
+ :param content=None: Byte string of request body.
+ :type content=None: str
+
+ :param content_type=None: content-type header value for request.
+ :type content_type=None: str
+
+ :param accept_untrusted_content=False:
+ When True, allow requests that do not hash their content or
+ allow None type ``content`` and ``content_type``
+ arguments. Read :ref:`skipping-content-checks`
+ to learn more.
+ :type accept_untrusted_content=False: bool
+
+ :param localtime_offset_in_seconds=0:
+ Seconds to add to local time in case it's out of sync.
+ :type localtime_offset_in_seconds=0: float
+
+ :param timestamp_skew_in_seconds=60:
+ Max seconds until a message expires. Upon expiry,
+ :class:`mohawk.exc.TokenExpired` is raised.
+ :type timestamp_skew_in_seconds=60: float
+
+ .. _`Hawk`: https://github.com/hueniverse/hawk
+ """
+ #: Value suitable for a ``Server-Authorization`` header.
+ response_header = None
+
+ def __init__(self,
+ credentials_map,
+ request_header,
+ url,
+ method,
+ content=None,
+ content_type=None,
+ seen_nonce=None,
+ localtime_offset_in_seconds=0,
+ accept_untrusted_content=False,
+ timestamp_skew_in_seconds=default_ts_skew_in_seconds,
+ **auth_kw):
+
+ self.response_header = None # make into property that can raise exc?
+ self.credentials_map = credentials_map
+ self.seen_nonce = seen_nonce
+
+ log.debug('accepting request {header}'.format(header=request_header))
+
+ if not request_header:
+ raise MissingAuthorization()
+
+ parsed_header = parse_authorization_header(request_header)
+
+ try:
+ credentials = self.credentials_map(parsed_header['id'])
+ except LookupError:
+ etype, val, tb = sys.exc_info()
+ log.debug('Catching {etype}: {val}'.format(etype=etype, val=val))
+ raise CredentialsLookupError(
+ 'Could not find credentials for ID {0}'
+ .format(parsed_header['id']))
+ validate_credentials(credentials)
+
+ resource = Resource(url=url,
+ method=method,
+ ext=parsed_header.get('ext', None),
+ app=parsed_header.get('app', None),
+ dlg=parsed_header.get('dlg', None),
+ credentials=credentials,
+ nonce=parsed_header['nonce'],
+ seen_nonce=self.seen_nonce,
+ content=content,
+ timestamp=parsed_header['ts'],
+ content_type=content_type)
+
+ self._authorize(
+ 'header', parsed_header, resource,
+ timestamp_skew_in_seconds=timestamp_skew_in_seconds,
+ localtime_offset_in_seconds=localtime_offset_in_seconds,
+ accept_untrusted_content=accept_untrusted_content,
+ **auth_kw)
+
+ # Now that we verified an incoming request, we can re-use some of its
+ # properties to build our response header.
+
+ self.parsed_header = parsed_header
+ self.resource = resource
+
+ def respond(self,
+ content=None,
+ content_type=None,
+ always_hash_content=True,
+ ext=None):
+ """
+ Respond to the request.
+
+ This generates the :attr:`mohawk.Receiver.response_header`
+ attribute.
+
+ :param content=None: Byte string of response body that will be sent.
+ :type content=None: str
+
+ :param content_type=None: content-type header value for response.
+ :type content_type=None: str
+
+ :param always_hash_content=True:
+ When True, ``content`` and ``content_type`` cannot be None.
+ Read :ref:`skipping-content-checks` to learn more.
+ :type always_hash_content=True: bool
+
+ :param ext=None:
+ An external `Hawk`_ string. If not None, this value will be
+ signed so that the sender can trust it.
+ :type ext=None: str
+
+ .. _`Hawk`: https://github.com/hueniverse/hawk
+ """
+
+ log.debug('generating response header')
+
+ resource = Resource(url=self.resource.url,
+ credentials=self.resource.credentials,
+ ext=ext,
+ app=self.parsed_header.get('app', None),
+ dlg=self.parsed_header.get('dlg', None),
+ method=self.resource.method,
+ content=content,
+ content_type=content_type,
+ always_hash_content=always_hash_content,
+ nonce=self.parsed_header['nonce'],
+ timestamp=self.parsed_header['ts'])
+
+ mac = calculate_mac('response', resource, resource.gen_content_hash())
+
+ self.response_header = self._make_header(resource, mac,
+ additional_keys=['ext'])
+ return self.response_header
diff --git a/third_party/python/mohawk/mohawk/sender.py b/third_party/python/mohawk/mohawk/sender.py
new file mode 100644
index 0000000000..b6f3edc170
--- /dev/null
+++ b/third_party/python/mohawk/mohawk/sender.py
@@ -0,0 +1,178 @@
+import logging
+
+from .base import default_ts_skew_in_seconds, HawkAuthority, Resource
+from .util import (calculate_mac,
+ parse_authorization_header,
+ validate_credentials)
+
+__all__ = ['Sender']
+log = logging.getLogger(__name__)
+
+
+class Sender(HawkAuthority):
+ """
+ A Hawk authority that will emit requests and verify responses.
+
+ :param credentials: Dict of credentials with keys ``id``, ``key``,
+ and ``algorithm``. See :ref:`usage` for an example.
+ :type credentials: dict
+
+ :param url: Absolute URL of the request.
+ :type url: str
+
+ :param method: Method of the request. E.G. POST, GET
+ :type method: str
+
+ :param content=None: Byte string of request body.
+ :type content=None: str
+
+ :param content_type=None: content-type header value for request.
+ :type content_type=None: str
+
+ :param always_hash_content=True:
+ When True, ``content`` and ``content_type`` cannot be None.
+ Read :ref:`skipping-content-checks` to learn more.
+ :type always_hash_content=True: bool
+
+ :param nonce=None:
+ A string that when coupled with the timestamp will
+ uniquely identify this request to prevent replays.
+ If None, a nonce will be generated for you.
+ :type nonce=None: str
+
+ :param ext=None:
+ An external `Hawk`_ string. If not None, this value will be signed
+ so that the receiver can trust it.
+ :type ext=None: str
+
+ :param app=None:
+ A `Hawk`_ application string. If not None, this value will be signed
+ so that the receiver can trust it.
+ :type app=None: str
+
+ :param dlg=None:
+ A `Hawk`_ delegation string. If not None, this value will be signed
+ so that the receiver can trust it.
+ :type dlg=None: str
+
+ :param seen_nonce=None:
+ A callable that returns True if a nonce has been seen.
+ See :ref:`nonce` for details.
+ :type seen_nonce=None: callable
+
+ .. _`Hawk`: https://github.com/hueniverse/hawk
+ """
+ #: Value suitable for an ``Authorization`` header.
+ request_header = None
+
+ def __init__(self, credentials,
+ url,
+ method,
+ content=None,
+ content_type=None,
+ always_hash_content=True,
+ nonce=None,
+ ext=None,
+ app=None,
+ dlg=None,
+ seen_nonce=None,
+ # For easier testing:
+ _timestamp=None):
+
+ self.reconfigure(credentials)
+ self.request_header = None
+ self.seen_nonce = seen_nonce
+
+ log.debug('generating request header')
+ self.req_resource = Resource(url=url,
+ credentials=self.credentials,
+ ext=ext,
+ app=app,
+ dlg=dlg,
+ nonce=nonce,
+ method=method,
+ content=content,
+ always_hash_content=always_hash_content,
+ timestamp=_timestamp,
+ content_type=content_type)
+
+ mac = calculate_mac('header', self.req_resource,
+ self.req_resource.gen_content_hash())
+ self.request_header = self._make_header(self.req_resource, mac)
+
+ def accept_response(self,
+ response_header,
+ content=None,
+ content_type=None,
+ accept_untrusted_content=False,
+ localtime_offset_in_seconds=0,
+ timestamp_skew_in_seconds=default_ts_skew_in_seconds,
+ **auth_kw):
+ """
+ Accept a response to this request.
+
+ :param response_header:
+ A `Hawk`_ ``Server-Authorization`` header
+ such as one created by :class:`mohawk.Receiver`.
+ :type response_header: str
+
+ :param content=None: Byte string of the response body received.
+ :type content=None: str
+
+ :param content_type=None:
+ Content-Type header value of the response received.
+ :type content_type=None: str
+
+ :param accept_untrusted_content=False:
+ When True, allow responses that do not hash their content or
+ allow None type ``content`` and ``content_type``
+ arguments. Read :ref:`skipping-content-checks`
+ to learn more.
+ :type accept_untrusted_content=False: bool
+
+ :param localtime_offset_in_seconds=0:
+ Seconds to add to local time in case it's out of sync.
+ :type localtime_offset_in_seconds=0: float
+
+ :param timestamp_skew_in_seconds=60:
+ Max seconds until a message expires. Upon expiry,
+ :class:`mohawk.exc.TokenExpired` is raised.
+ :type timestamp_skew_in_seconds=60: float
+
+ .. _`Hawk`: https://github.com/hueniverse/hawk
+ """
+ log.debug('accepting response {header}'
+ .format(header=response_header))
+
+ parsed_header = parse_authorization_header(response_header)
+
+ resource = Resource(ext=parsed_header.get('ext', None),
+ content=content,
+ content_type=content_type,
+ # The following response attributes are
+ # in reference to the original request,
+ # not to the reponse header:
+ timestamp=self.req_resource.timestamp,
+ nonce=self.req_resource.nonce,
+ url=self.req_resource.url,
+ method=self.req_resource.method,
+ app=self.req_resource.app,
+ dlg=self.req_resource.dlg,
+ credentials=self.credentials,
+ seen_nonce=self.seen_nonce)
+
+ self._authorize(
+ 'response', parsed_header, resource,
+ # Per Node lib, a responder macs the *sender's* timestamp.
+ # It does not create its own timestamp.
+ # I suppose a slow response could time out here. Maybe only check
+ # mac failures, not timeouts?
+ their_timestamp=resource.timestamp,
+ timestamp_skew_in_seconds=timestamp_skew_in_seconds,
+ localtime_offset_in_seconds=localtime_offset_in_seconds,
+ accept_untrusted_content=accept_untrusted_content,
+ **auth_kw)
+
+ def reconfigure(self, credentials):
+ validate_credentials(credentials)
+ self.credentials = credentials
diff --git a/third_party/python/mohawk/mohawk/tests.py b/third_party/python/mohawk/mohawk/tests.py
new file mode 100644
index 0000000000..eeb71506d1
--- /dev/null
+++ b/third_party/python/mohawk/mohawk/tests.py
@@ -0,0 +1,823 @@
+import sys
+from unittest import TestCase
+from base64 import b64decode, urlsafe_b64encode
+
+import mock
+from nose.tools import eq_, raises
+import six
+
+from . import Receiver, Sender
+from .base import Resource
+from .exc import (AlreadyProcessed,
+ BadHeaderValue,
+ CredentialsLookupError,
+ InvalidCredentials,
+ MacMismatch,
+ MisComputedContentHash,
+ MissingAuthorization,
+ TokenExpired,
+ InvalidBewit)
+from .util import (parse_authorization_header,
+ utc_now,
+ calculate_ts_mac,
+ validate_credentials)
+from .bewit import (get_bewit,
+ check_bewit,
+ strip_bewit,
+ parse_bewit)
+
+
+class Base(TestCase):
+
+ def setUp(self):
+ self.credentials = {
+ 'id': 'my-hawk-id',
+ 'key': 'my hAwK sekret',
+ 'algorithm': 'sha256',
+ }
+
+ # This callable might be replaced by tests.
+ def seen_nonce(id, nonce, ts):
+ return False
+ self.seen_nonce = seen_nonce
+
+ def credentials_map(self, id):
+ # Pretend this is doing something more interesting like looking up
+ # a credentials by ID in a database.
+ if self.credentials['id'] != id:
+ raise LookupError('No credentialsuration for Hawk ID {id}'
+ .format(id=id))
+ return self.credentials
+
+
+class TestConfig(Base):
+
+ @raises(InvalidCredentials)
+ def test_no_id(self):
+ c = self.credentials.copy()
+ del c['id']
+ validate_credentials(c)
+
+ @raises(InvalidCredentials)
+ def test_no_key(self):
+ c = self.credentials.copy()
+ del c['key']
+ validate_credentials(c)
+
+ @raises(InvalidCredentials)
+ def test_no_algo(self):
+ c = self.credentials.copy()
+ del c['algorithm']
+ validate_credentials(c)
+
+ @raises(InvalidCredentials)
+ def test_no_credentials(self):
+ validate_credentials(None)
+
+ def test_non_dict_credentials(self):
+ class WeirdThing(object):
+ def __getitem__(self, key):
+ return 'whatever'
+ validate_credentials(WeirdThing())
+
+
+class TestSender(Base):
+
+ def setUp(self):
+ super(TestSender, self).setUp()
+ self.url = 'http://site.com/foo?bar=1'
+
+ def Sender(self, method='GET', **kw):
+ credentials = kw.pop('credentials', self.credentials)
+ kw.setdefault('content', '')
+ kw.setdefault('content_type', '')
+ sender = Sender(credentials, self.url, method, **kw)
+ return sender
+
+ def receive(self, request_header, url=None, method='GET', **kw):
+ credentials_map = kw.pop('credentials_map', self.credentials_map)
+ kw.setdefault('content', '')
+ kw.setdefault('content_type', '')
+ kw.setdefault('seen_nonce', self.seen_nonce)
+ return Receiver(credentials_map, request_header,
+ url or self.url, method, **kw)
+
+ def test_get_ok(self):
+ method = 'GET'
+ sn = self.Sender(method=method)
+ self.receive(sn.request_header, method=method)
+
+ def test_post_ok(self):
+ method = 'POST'
+ sn = self.Sender(method=method)
+ self.receive(sn.request_header, method=method)
+
+ def test_post_content_ok(self):
+ method = 'POST'
+ content = 'foo=bar&baz=2'
+ sn = self.Sender(method=method, content=content)
+ self.receive(sn.request_header, method=method, content=content)
+
+ def test_post_content_type_ok(self):
+ method = 'POST'
+ content = '{"bar": "foobs"}'
+ content_type = 'application/json'
+ sn = self.Sender(method=method, content=content,
+ content_type=content_type)
+ self.receive(sn.request_header, method=method, content=content,
+ content_type=content_type)
+
+ def test_post_content_type_with_trailing_charset(self):
+ method = 'POST'
+ content = '{"bar": "foobs"}'
+ content_type = 'application/json; charset=utf8'
+ sn = self.Sender(method=method, content=content,
+ content_type=content_type)
+ self.receive(sn.request_header, method=method, content=content,
+ content_type='application/json; charset=other')
+
+ @raises(ValueError)
+ def test_missing_payload_details(self):
+ self.Sender(method='POST', content=None, content_type=None)
+
+ def test_skip_payload_hashing(self):
+ method = 'POST'
+ content = '{"bar": "foobs"}'
+ content_type = 'application/json'
+ sn = self.Sender(method=method, content=None, content_type=None,
+ always_hash_content=False)
+ self.receive(sn.request_header, method=method, content=content,
+ content_type=content_type,
+ accept_untrusted_content=True)
+
+ @raises(ValueError)
+ def test_cannot_skip_content_only(self):
+ self.Sender(method='POST', content=None,
+ content_type='application/json')
+
+ @raises(ValueError)
+ def test_cannot_skip_content_type_only(self):
+ self.Sender(method='POST', content='{"foo": "bar"}',
+ content_type=None)
+
+ @raises(MacMismatch)
+ def test_tamper_with_host(self):
+ sn = self.Sender()
+ self.receive(sn.request_header, url='http://TAMPERED-WITH.com')
+
+ @raises(MacMismatch)
+ def test_tamper_with_method(self):
+ sn = self.Sender(method='GET')
+ self.receive(sn.request_header, method='POST')
+
+ @raises(MacMismatch)
+ def test_tamper_with_path(self):
+ sn = self.Sender()
+ self.receive(sn.request_header,
+ url='http://site.com/TAMPERED?bar=1')
+
+ @raises(MacMismatch)
+ def test_tamper_with_query(self):
+ sn = self.Sender()
+ self.receive(sn.request_header,
+ url='http://site.com/foo?bar=TAMPERED')
+
+ @raises(MacMismatch)
+ def test_tamper_with_scheme(self):
+ sn = self.Sender()
+ self.receive(sn.request_header, url='https://site.com/foo?bar=1')
+
+ @raises(MacMismatch)
+ def test_tamper_with_port(self):
+ sn = self.Sender()
+ self.receive(sn.request_header,
+ url='http://site.com:8000/foo?bar=1')
+
+ @raises(MisComputedContentHash)
+ def test_tamper_with_content(self):
+ sn = self.Sender()
+ self.receive(sn.request_header, content='stuff=nope')
+
+ def test_non_ascii_content(self):
+ content = u'Ivan Kristi\u0107'
+ sn = self.Sender(content=content)
+ self.receive(sn.request_header, content=content)
+
+ @raises(MacMismatch)
+ def test_tamper_with_content_type(self):
+ sn = self.Sender(method='POST')
+ self.receive(sn.request_header, content_type='application/json')
+
+ @raises(AlreadyProcessed)
+ def test_nonce_fail(self):
+
+ def seen_nonce(id, nonce, ts):
+ return True
+
+ sn = self.Sender()
+
+ self.receive(sn.request_header, seen_nonce=seen_nonce)
+
+ def test_nonce_ok(self):
+
+ def seen_nonce(id, nonce, ts):
+ return False
+
+ sn = self.Sender(seen_nonce=seen_nonce)
+ self.receive(sn.request_header)
+
+ @raises(TokenExpired)
+ def test_expired_ts(self):
+ now = utc_now() - 120
+ sn = self.Sender(_timestamp=now)
+ self.receive(sn.request_header)
+
+ def test_expired_exception_reports_localtime(self):
+ now = utc_now()
+ ts = now - 120
+ sn = self.Sender(_timestamp=ts) # force expiry
+
+ exc = None
+ with mock.patch('mohawk.base.utc_now') as fake_now:
+ fake_now.return_value = now
+ try:
+ self.receive(sn.request_header)
+ except:
+ etype, exc, tb = sys.exc_info()
+
+ eq_(type(exc), TokenExpired)
+ eq_(exc.localtime_in_seconds, now)
+
+ def test_localtime_offset(self):
+ now = utc_now() - 120
+ sn = self.Sender(_timestamp=now)
+ # Without an offset this will raise an expired exception.
+ self.receive(sn.request_header, localtime_offset_in_seconds=-120)
+
+ def test_localtime_skew(self):
+ now = utc_now() - 120
+ sn = self.Sender(_timestamp=now)
+ # Without an offset this will raise an expired exception.
+ self.receive(sn.request_header, timestamp_skew_in_seconds=120)
+
+ @raises(MacMismatch)
+ def test_hash_tampering(self):
+ sn = self.Sender()
+ header = sn.request_header.replace('hash="', 'hash="nope')
+ self.receive(header)
+
+ @raises(MacMismatch)
+ def test_bad_secret(self):
+ cfg = {
+ 'id': 'my-hawk-id',
+ 'key': 'INCORRECT; YOU FAIL',
+ 'algorithm': 'sha256',
+ }
+ sn = self.Sender(credentials=cfg)
+ self.receive(sn.request_header)
+
+ @raises(MacMismatch)
+ def test_unexpected_algorithm(self):
+ cr = self.credentials.copy()
+ cr['algorithm'] = 'sha512'
+ sn = self.Sender(credentials=cr)
+
+ # Validate with mismatched credentials (sha256).
+ self.receive(sn.request_header)
+
+ @raises(InvalidCredentials)
+ def test_invalid_credentials(self):
+ cfg = self.credentials.copy()
+ # Create an invalid credentials.
+ del cfg['algorithm']
+
+ self.Sender(credentials=cfg)
+
+ @raises(CredentialsLookupError)
+ def test_unknown_id(self):
+ cr = self.credentials.copy()
+ cr['id'] = 'someone-else'
+ sn = self.Sender(credentials=cr)
+
+ self.receive(sn.request_header)
+
+ @raises(MacMismatch)
+ def test_bad_ext(self):
+ sn = self.Sender(ext='my external data')
+
+ header = sn.request_header.replace('my external data', 'TAMPERED')
+ self.receive(header)
+
+ def test_ext_with_quotes(self):
+ sn = self.Sender(ext='quotes=""')
+ self.receive(sn.request_header)
+ parsed = parse_authorization_header(sn.request_header)
+ eq_(parsed['ext'], 'quotes=""')
+
+ def test_ext_with_new_line(self):
+ sn = self.Sender(ext="new line \n in the middle")
+ self.receive(sn.request_header)
+ parsed = parse_authorization_header(sn.request_header)
+ eq_(parsed['ext'], "new line \n in the middle")
+
+ def test_ext_with_equality_sign(self):
+ sn = self.Sender(ext="foo=bar&foo2=bar2;foo3=bar3")
+ self.receive(sn.request_header)
+ parsed = parse_authorization_header(sn.request_header)
+ eq_(parsed['ext'], "foo=bar&foo2=bar2;foo3=bar3")
+
+ @raises(BadHeaderValue)
+ def test_ext_with_illegal_chars(self):
+ self.Sender(ext="something like \t is illegal")
+
+ @raises(BadHeaderValue)
+ def test_ext_with_illegal_unicode(self):
+ self.Sender(ext=u'Ivan Kristi\u0107')
+
+ @raises(BadHeaderValue)
+ def test_ext_with_illegal_utf8(self):
+ # This isn't allowed because the escaped byte chars are out of
+ # range. It's a little odd but this is what the Node lib does
+ # implicitly with its regex.
+ self.Sender(ext=u'Ivan Kristi\u0107'.encode('utf8'))
+
+ def test_app_ok(self):
+ app = 'custom-app'
+ sn = self.Sender(app=app)
+ self.receive(sn.request_header)
+ parsed = parse_authorization_header(sn.request_header)
+ eq_(parsed['app'], app)
+
+ @raises(MacMismatch)
+ def test_tampered_app(self):
+ app = 'custom-app'
+ sn = self.Sender(app=app)
+ header = sn.request_header.replace(app, 'TAMPERED-WITH')
+ self.receive(header)
+
+ def test_dlg_ok(self):
+ dlg = 'custom-dlg'
+ sn = self.Sender(dlg=dlg)
+ self.receive(sn.request_header)
+ parsed = parse_authorization_header(sn.request_header)
+ eq_(parsed['dlg'], dlg)
+
+ @raises(MacMismatch)
+ def test_tampered_dlg(self):
+ dlg = 'custom-dlg'
+ sn = self.Sender(dlg=dlg, app='some-app')
+ header = sn.request_header.replace(dlg, 'TAMPERED-WITH')
+ self.receive(header)
+
+
+class TestReceiver(Base):
+
+ def setUp(self):
+ super(TestReceiver, self).setUp()
+ self.url = 'http://site.com/'
+ self.sender = None
+ self.receiver = None
+
+ def receive(self, method='GET', **kw):
+ url = kw.pop('url', self.url)
+ sender = kw.pop('sender', None)
+ sender_kw = kw.pop('sender_kw', {})
+ sender_kw.setdefault('content', '')
+ sender_kw.setdefault('content_type', '')
+ sender_url = kw.pop('sender_url', url)
+
+ credentials_map = kw.pop('credentials_map',
+ lambda id: self.credentials)
+
+ if sender:
+ self.sender = sender
+ else:
+ self.sender = Sender(self.credentials, sender_url, method,
+ **sender_kw)
+
+ kw.setdefault('content', '')
+ kw.setdefault('content_type', '')
+ self.receiver = Receiver(credentials_map,
+ self.sender.request_header, url, method,
+ **kw)
+
+ def respond(self, **kw):
+ accept_kw = kw.pop('accept_kw', {})
+ accept_kw.setdefault('content', '')
+ accept_kw.setdefault('content_type', '')
+ receiver = kw.pop('receiver', self.receiver)
+
+ kw.setdefault('content', '')
+ kw.setdefault('content_type', '')
+ receiver.respond(**kw)
+ self.sender.accept_response(receiver.response_header, **accept_kw)
+
+ return receiver.response_header
+
+ @raises(InvalidCredentials)
+ def test_invalid_credentials_lookup(self):
+ # Return invalid credentials.
+ self.receive(credentials_map=lambda *a: {})
+
+ def test_get_ok(self):
+ method = 'GET'
+ self.receive(method=method)
+ self.respond()
+
+ def test_post_ok(self):
+ method = 'POST'
+ self.receive(method=method)
+ self.respond()
+
+ @raises(MisComputedContentHash)
+ def test_respond_with_wrong_content(self):
+ self.receive()
+ self.respond(content='real content',
+ accept_kw=dict(content='TAMPERED WITH'))
+
+ @raises(MisComputedContentHash)
+ def test_respond_with_wrong_content_type(self):
+ self.receive()
+ self.respond(content_type='text/html',
+ accept_kw=dict(content_type='application/json'))
+
+ @raises(MissingAuthorization)
+ def test_missing_authorization(self):
+ Receiver(lambda id: self.credentials, None, '/', 'GET')
+
+ @raises(MacMismatch)
+ def test_respond_with_wrong_url(self):
+ self.receive(url='http://fakesite.com')
+ wrong_receiver = self.receiver
+
+ self.receive(url='http://realsite.com')
+
+ self.respond(receiver=wrong_receiver)
+
+ @raises(MacMismatch)
+ def test_respond_with_wrong_method(self):
+ self.receive(method='GET')
+ wrong_receiver = self.receiver
+
+ self.receive(method='POST')
+
+ self.respond(receiver=wrong_receiver)
+
+ @raises(MacMismatch)
+ def test_respond_with_wrong_nonce(self):
+ self.receive(sender_kw=dict(nonce='another-nonce'))
+ wrong_receiver = self.receiver
+
+ self.receive()
+
+ # The nonce must match the one sent in the original request.
+ self.respond(receiver=wrong_receiver)
+
+ def test_respond_with_unhashed_content(self):
+ self.receive()
+
+ self.respond(always_hash_content=False, content=None,
+ content_type=None,
+ accept_kw=dict(accept_untrusted_content=True))
+
+ @raises(TokenExpired)
+ def test_respond_with_expired_ts(self):
+ self.receive()
+ hdr = self.receiver.respond(content='', content_type='')
+
+ with mock.patch('mohawk.base.utc_now') as fn:
+ fn.return_value = 0 # force an expiry
+ try:
+ self.sender.accept_response(hdr, content='', content_type='')
+ except TokenExpired:
+ etype, exc, tb = sys.exc_info()
+ hdr = parse_authorization_header(exc.www_authenticate)
+ calculated = calculate_ts_mac(fn(), self.credentials)
+ if isinstance(calculated, six.binary_type):
+ calculated = calculated.decode('ascii')
+ eq_(hdr['tsm'], calculated)
+ raise
+
+ def test_respond_with_bad_ts_skew_ok(self):
+ now = utc_now() - 120
+
+ self.receive()
+ hdr = self.receiver.respond(content='', content_type='')
+
+ with mock.patch('mohawk.base.utc_now') as fn:
+ fn.return_value = now
+
+ # Without an offset this will raise an expired exception.
+ self.sender.accept_response(hdr, content='', content_type='',
+ timestamp_skew_in_seconds=120)
+
+ def test_respond_with_ext(self):
+ self.receive()
+
+ ext = 'custom-ext'
+ self.respond(ext=ext)
+ header = parse_authorization_header(self.receiver.response_header)
+ eq_(header['ext'], ext)
+
+ @raises(MacMismatch)
+ def test_respond_with_wrong_app(self):
+ self.receive(sender_kw=dict(app='TAMPERED-WITH', dlg='delegation'))
+ self.receiver.respond(content='', content_type='')
+ wrong_receiver = self.receiver
+
+ self.receive(sender_kw=dict(app='real-app', dlg='delegation'))
+
+ self.sender.accept_response(wrong_receiver.response_header,
+ content='', content_type='')
+
+ @raises(MacMismatch)
+ def test_respond_with_wrong_dlg(self):
+ self.receive(sender_kw=dict(app='app', dlg='TAMPERED-WITH'))
+ self.receiver.respond(content='', content_type='')
+ wrong_receiver = self.receiver
+
+ self.receive(sender_kw=dict(app='app', dlg='real-dlg'))
+
+ self.sender.accept_response(wrong_receiver.response_header,
+ content='', content_type='')
+
+ @raises(MacMismatch)
+ def test_receive_wrong_method(self):
+ self.receive(method='GET')
+ wrong_sender = self.sender
+ self.receive(method='POST', sender=wrong_sender)
+
+ @raises(MacMismatch)
+ def test_receive_wrong_url(self):
+ self.receive(url='http://fakesite.com/')
+ wrong_sender = self.sender
+ self.receive(url='http://realsite.com/', sender=wrong_sender)
+
+ @raises(MisComputedContentHash)
+ def test_receive_wrong_content(self):
+ self.receive(sender_kw=dict(content='real request'),
+ content='real request')
+ wrong_sender = self.sender
+ self.receive(content='TAMPERED WITH', sender=wrong_sender)
+
+ @raises(MisComputedContentHash)
+ def test_unexpected_unhashed_content(self):
+ self.receive(sender_kw=dict(content=None, content_type=None,
+ always_hash_content=False))
+
+ @raises(ValueError)
+ def test_cannot_receive_empty_content_only(self):
+ content_type = 'text/plain'
+ self.receive(sender_kw=dict(content='<content>',
+ content_type=content_type),
+ content=None, content_type=content_type)
+
+ @raises(ValueError)
+ def test_cannot_receive_empty_content_type_only(self):
+ content = '<content>'
+ self.receive(sender_kw=dict(content=content,
+ content_type='text/plain'),
+ content=content, content_type=None)
+
+ @raises(MisComputedContentHash)
+ def test_receive_wrong_content_type(self):
+ self.receive(sender_kw=dict(content_type='text/html'),
+ content_type='text/html')
+ wrong_sender = self.sender
+
+ self.receive(content_type='application/json',
+ sender=wrong_sender)
+
+
+class TestSendAndReceive(Base):
+
+ def test(self):
+ credentials = {
+ 'id': 'some-id',
+ 'key': 'some secret',
+ 'algorithm': 'sha256'
+ }
+
+ url = 'https://my-site.com/'
+ method = 'POST'
+
+ # The client sends a request with a Hawk header.
+ content = 'foo=bar&baz=nooz'
+ content_type = 'application/x-www-form-urlencoded'
+
+ sender = Sender(credentials,
+ url, method,
+ content=content,
+ content_type=content_type)
+
+ # The server receives a request and authorizes access.
+ receiver = Receiver(lambda id: credentials,
+ sender.request_header,
+ url, method,
+ content=content,
+ content_type=content_type)
+
+ # The server responds with a similar Hawk header.
+ content = 'we are friends'
+ content_type = 'text/plain'
+ receiver.respond(content=content,
+ content_type=content_type)
+
+ # The client receives a response and authorizes access.
+ sender.accept_response(receiver.response_header,
+ content=content,
+ content_type=content_type)
+
+
+class TestBewit(Base):
+
+ # Test cases copied from
+ # https://github.com/hueniverse/hawk/blob/492632da51ecedd5f59ce96f081860ad24ce6532/test/uri.js
+
+ def setUp(self):
+ self.credentials = {
+ 'id': '123456',
+ 'key': '2983d45yun89q',
+ 'algorithm': 'sha256',
+ }
+
+ def make_credential_lookup(self, credentials_map):
+ # Helper function to make a lookup function given a dictionary of
+ # credentials
+ def lookup(client_id):
+ # Will raise a KeyError if missing; which is a subclass of
+ # LookupError
+ return credentials_map[client_id]
+ return lookup
+
+ def test_bewit(self):
+ res = Resource(url='https://example.com/somewhere/over/the/rainbow',
+ method='GET', credentials=self.credentials,
+ timestamp=1356420407 + 300,
+ nonce='',
+ )
+ bewit = get_bewit(res)
+
+ expected = '123456\\1356420707\\IGYmLgIqLrCe8CxvKPs4JlWIA+UjWJJouwgARiVhCAg=\\'
+ eq_(b64decode(bewit).decode('ascii'), expected)
+
+ def test_bewit_with_binary_id(self):
+ # Check for exceptions in get_bewit call with binary id
+ binary_credentials = self.credentials.copy()
+ binary_credentials['id'] = binary_credentials['id'].encode('ascii')
+ res = Resource(url='https://example.com/somewhere/over/the/rainbow',
+ method='GET', credentials=binary_credentials,
+ timestamp=1356420407 + 300,
+ nonce='',
+ )
+ get_bewit(res)
+
+ def test_bewit_with_ext(self):
+ res = Resource(url='https://example.com/somewhere/over/the/rainbow',
+ method='GET', credentials=self.credentials,
+ timestamp=1356420407 + 300,
+ nonce='',
+ ext='xandyandz'
+ )
+ bewit = get_bewit(res)
+
+ expected = '123456\\1356420707\\kscxwNR2tJpP1T1zDLNPbB5UiKIU9tOSJXTUdG7X9h8=\\xandyandz'
+ eq_(b64decode(bewit).decode('ascii'), expected)
+
+ def test_bewit_with_ext_and_backslashes(self):
+ credentials = self.credentials
+ credentials['id'] = '123\\456'
+ res = Resource(url='https://example.com/somewhere/over/the/rainbow',
+ method='GET', credentials=self.credentials,
+ timestamp=1356420407 + 300,
+ nonce='',
+ ext='xand\\yandz'
+ )
+ bewit = get_bewit(res)
+
+ expected = '123456\\1356420707\\b82LLIxG5UDkaChLU953mC+SMrbniV1sb8KiZi9cSsc=\\xand\\yandz'
+ eq_(b64decode(bewit).decode('ascii'), expected)
+
+ def test_bewit_with_port(self):
+ res = Resource(url='https://example.com:8080/somewhere/over/the/rainbow',
+ method='GET', credentials=self.credentials,
+ timestamp=1356420407 + 300, nonce='', ext='xandyandz')
+ bewit = get_bewit(res)
+
+ expected = '123456\\1356420707\\hZbJ3P2cKEo4ky0C8jkZAkRyCZueg4WSNbxV7vq3xHU=\\xandyandz'
+ eq_(b64decode(bewit).decode('ascii'), expected)
+
+ @raises(ValueError)
+ def test_bewit_with_nonce(self):
+ res = Resource(url='https://example.com/somewhere/over/the/rainbow',
+ method='GET', credentials=self.credentials,
+ timestamp=1356420407 + 300,
+ nonce='n1')
+ get_bewit(res)
+
+ @raises(ValueError)
+ def test_bewit_invalid_method(self):
+ res = Resource(url='https://example.com:8080/somewhere/over/the/rainbow',
+ method='POST', credentials=self.credentials,
+ timestamp=1356420407 + 300, nonce='')
+ get_bewit(res)
+
+ def test_strip_bewit(self):
+ bewit = b'123456\\1356420707\\IGYmLgIqLrCe8CxvKPs4JlWIA+UjWJJouwgARiVhCAg=\\'
+ bewit = urlsafe_b64encode(bewit).decode('ascii')
+ url = "https://example.com/somewhere/over/the/rainbow?bewit={bewit}".format(bewit=bewit)
+
+ raw_bewit, stripped_url = strip_bewit(url)
+ self.assertEquals(raw_bewit, bewit)
+ self.assertEquals(stripped_url, "https://example.com/somewhere/over/the/rainbow")
+
+ @raises(InvalidBewit)
+ def test_strip_url_without_bewit(self):
+ url = "https://example.com/somewhere/over/the/rainbow"
+ strip_bewit(url)
+
+ def test_parse_bewit(self):
+ bewit = b'123456\\1356420707\\IGYmLgIqLrCe8CxvKPs4JlWIA+UjWJJouwgARiVhCAg=\\'
+ bewit = urlsafe_b64encode(bewit).decode('ascii')
+ bewit = parse_bewit(bewit)
+ self.assertEquals(bewit.id, '123456')
+ self.assertEquals(bewit.expiration, '1356420707')
+ self.assertEquals(bewit.mac, 'IGYmLgIqLrCe8CxvKPs4JlWIA+UjWJJouwgARiVhCAg=')
+ self.assertEquals(bewit.ext, '')
+
+ def test_parse_bewit_with_ext(self):
+ bewit = b'123456\\1356420707\\IGYmLgIqLrCe8CxvKPs4JlWIA+UjWJJouwgARiVhCAg=\\xandyandz'
+ bewit = urlsafe_b64encode(bewit).decode('ascii')
+ bewit = parse_bewit(bewit)
+ self.assertEquals(bewit.id, '123456')
+ self.assertEquals(bewit.expiration, '1356420707')
+ self.assertEquals(bewit.mac, 'IGYmLgIqLrCe8CxvKPs4JlWIA+UjWJJouwgARiVhCAg=')
+ self.assertEquals(bewit.ext, 'xandyandz')
+
+ def test_parse_bewit_with_ext_and_backslashes(self):
+ bewit = b'123456\\1356420707\\IGYmLgIqLrCe8CxvKPs4JlWIA+UjWJJouwgARiVhCAg=\\xand\\yandz'
+ bewit = urlsafe_b64encode(bewit).decode('ascii')
+ bewit = parse_bewit(bewit)
+ self.assertEquals(bewit.id, '123456')
+ self.assertEquals(bewit.expiration, '1356420707')
+ self.assertEquals(bewit.mac, 'IGYmLgIqLrCe8CxvKPs4JlWIA+UjWJJouwgARiVhCAg=')
+ self.assertEquals(bewit.ext, 'xand\\yandz')
+
+ @raises(InvalidBewit)
+ def test_parse_invalid_bewit_with_only_one_part(self):
+ bewit = b'12345'
+ bewit = urlsafe_b64encode(bewit).decode('ascii')
+ bewit = parse_bewit(bewit)
+
+ @raises(InvalidBewit)
+ def test_parse_invalid_bewit_with_only_two_parts(self):
+ bewit = b'1\\2'
+ bewit = urlsafe_b64encode(bewit).decode('ascii')
+ bewit = parse_bewit(bewit)
+
+ def test_validate_bewit(self):
+ bewit = b'123456\\1356420707\\IGYmLgIqLrCe8CxvKPs4JlWIA+UjWJJouwgARiVhCAg=\\'
+ bewit = urlsafe_b64encode(bewit).decode('ascii')
+ url = "https://example.com/somewhere/over/the/rainbow?bewit={bewit}".format(bewit=bewit)
+ credential_lookup = self.make_credential_lookup({
+ self.credentials['id']: self.credentials,
+ })
+ self.assertTrue(check_bewit(url, credential_lookup=credential_lookup, now=1356420407 + 10))
+
+ def test_validate_bewit_with_ext(self):
+ bewit = b'123456\\1356420707\\kscxwNR2tJpP1T1zDLNPbB5UiKIU9tOSJXTUdG7X9h8=\\xandyandz'
+ bewit = urlsafe_b64encode(bewit).decode('ascii')
+ url = "https://example.com/somewhere/over/the/rainbow?bewit={bewit}".format(bewit=bewit)
+ credential_lookup = self.make_credential_lookup({
+ self.credentials['id']: self.credentials,
+ })
+ self.assertTrue(check_bewit(url, credential_lookup=credential_lookup, now=1356420407 + 10))
+
+ def test_validate_bewit_with_ext_and_backslashes(self):
+ bewit = b'123456\\1356420707\\b82LLIxG5UDkaChLU953mC+SMrbniV1sb8KiZi9cSsc=\\xand\\yandz'
+ bewit = urlsafe_b64encode(bewit).decode('ascii')
+ url = "https://example.com/somewhere/over/the/rainbow?bewit={bewit}".format(bewit=bewit)
+ credential_lookup = self.make_credential_lookup({
+ self.credentials['id']: self.credentials,
+ })
+ self.assertTrue(check_bewit(url, credential_lookup=credential_lookup, now=1356420407 + 10))
+
+ @raises(TokenExpired)
+ def test_validate_expired_bewit(self):
+ bewit = b'123456\\1356420707\\IGYmLgIqLrCe8CxvKPs4JlWIA+UjWJJouwgARiVhCAg=\\'
+ bewit = urlsafe_b64encode(bewit).decode('ascii')
+ url = "https://example.com/somewhere/over/the/rainbow?bewit={bewit}".format(bewit=bewit)
+ credential_lookup = self.make_credential_lookup({
+ self.credentials['id']: self.credentials,
+ })
+ check_bewit(url, credential_lookup=credential_lookup, now=1356420407 + 1000)
+
+ @raises(CredentialsLookupError)
+ def test_validate_bewit_with_unknown_credentials(self):
+ bewit = b'123456\\1356420707\\IGYmLgIqLrCe8CxvKPs4JlWIA+UjWJJouwgARiVhCAg=\\'
+ bewit = urlsafe_b64encode(bewit).decode('ascii')
+ url = "https://example.com/somewhere/over/the/rainbow?bewit={bewit}".format(bewit=bewit)
+ credential_lookup = self.make_credential_lookup({
+ 'other_id': self.credentials,
+ })
+ check_bewit(url, credential_lookup=credential_lookup, now=1356420407 + 10)
diff --git a/third_party/python/mohawk/mohawk/util.py b/third_party/python/mohawk/mohawk/util.py
new file mode 100644
index 0000000000..46a28e94ce
--- /dev/null
+++ b/third_party/python/mohawk/mohawk/util.py
@@ -0,0 +1,267 @@
+from base64 import b64encode, urlsafe_b64encode
+import calendar
+import hashlib
+import hmac
+import logging
+import math
+import os
+import pprint
+import re
+import sys
+import time
+
+import six
+
+from .exc import (
+ BadHeaderValue,
+ HawkFail,
+ InvalidCredentials)
+
+
+HAWK_VER = 1
+log = logging.getLogger(__name__)
+allowable_header_keys = set(['id', 'ts', 'tsm', 'nonce', 'hash',
+ 'error', 'ext', 'mac', 'app', 'dlg'])
+
+
+def validate_credentials(creds):
+ if not hasattr(creds, '__getitem__'):
+ raise InvalidCredentials('credentials must be a dict-like object')
+ try:
+ creds['id']
+ creds['key']
+ creds['algorithm']
+ except KeyError:
+ etype, val, tb = sys.exc_info()
+ raise InvalidCredentials('{etype}: {val}'
+ .format(etype=etype, val=val))
+
+
+def random_string(length):
+ """Generates a random string for a given length."""
+ # this conservatively gets 8*length bits and then returns 6*length of
+ # them. Grabbing (6/8)*length bits could lose some entropy off the ends.
+ return urlsafe_b64encode(os.urandom(length))[:length]
+
+
+def calculate_payload_hash(payload, algorithm, content_type):
+ """Calculates a hash for a given payload."""
+ p_hash = hashlib.new(algorithm)
+
+ parts = []
+ parts.append('hawk.' + str(HAWK_VER) + '.payload\n')
+ parts.append(parse_content_type(content_type) + '\n')
+ parts.append(payload or '')
+ parts.append('\n')
+
+ for i, p in enumerate(parts):
+ # Make sure we are about to hash binary strings.
+ if not isinstance(p, six.binary_type):
+ p = p.encode('utf8')
+ p_hash.update(p)
+ parts[i] = p
+
+ log.debug('calculating payload hash from:\n{parts}'
+ .format(parts=pprint.pformat(parts)))
+
+ return b64encode(p_hash.digest())
+
+
+def calculate_mac(mac_type, resource, content_hash):
+ """Calculates a message authorization code (MAC)."""
+ normalized = normalize_string(mac_type, resource, content_hash)
+ log.debug(u'normalized resource for mac calc: {norm}'
+ .format(norm=normalized))
+ digestmod = getattr(hashlib, resource.credentials['algorithm'])
+
+ # Make sure we are about to hash binary strings.
+
+ if not isinstance(normalized, six.binary_type):
+ normalized = normalized.encode('utf8')
+ key = resource.credentials['key']
+ if not isinstance(key, six.binary_type):
+ key = key.encode('ascii')
+
+ result = hmac.new(key, normalized, digestmod)
+ return b64encode(result.digest())
+
+
+def calculate_ts_mac(ts, credentials):
+ """Calculates a message authorization code (MAC) for a timestamp."""
+ normalized = ('hawk.{hawk_ver}.ts\n{ts}\n'
+ .format(hawk_ver=HAWK_VER, ts=ts))
+ log.debug(u'normalized resource for ts mac calc: {norm}'
+ .format(norm=normalized))
+ digestmod = getattr(hashlib, credentials['algorithm'])
+
+ if not isinstance(normalized, six.binary_type):
+ normalized = normalized.encode('utf8')
+ key = credentials['key']
+ if not isinstance(key, six.binary_type):
+ key = key.encode('ascii')
+
+ result = hmac.new(key, normalized, digestmod)
+ return b64encode(result.digest())
+
+
+def normalize_string(mac_type, resource, content_hash):
+ """Serializes mac_type and resource into a HAWK string."""
+
+ normalized = [
+ 'hawk.' + str(HAWK_VER) + '.' + mac_type,
+ normalize_header_attr(resource.timestamp),
+ normalize_header_attr(resource.nonce),
+ normalize_header_attr(resource.method or ''),
+ normalize_header_attr(resource.name or ''),
+ normalize_header_attr(resource.host),
+ normalize_header_attr(resource.port),
+ normalize_header_attr(content_hash or '')
+ ]
+
+ # The blank lines are important. They follow what the Node Hawk lib does.
+
+ normalized.append(normalize_header_attr(resource.ext or ''))
+
+ if resource.app:
+ normalized.append(normalize_header_attr(resource.app))
+ normalized.append(normalize_header_attr(resource.dlg or ''))
+
+ # Add trailing new line.
+ normalized.append('')
+
+ normalized = '\n'.join(normalized)
+
+ return normalized
+
+
+def parse_content_type(content_type):
+ """Cleans up content_type."""
+ if content_type:
+ return content_type.split(';')[0].strip().lower()
+ else:
+ return ''
+
+
+def parse_authorization_header(auth_header):
+ """
+ Example Authorization header:
+
+ 'Hawk id="dh37fgj492je", ts="1367076201", nonce="NPHgnG", ext="and
+ welcome!", mac="CeWHy4d9kbLGhDlkyw2Nh3PJ7SDOdZDa267KH4ZaNMY="'
+ """
+ attributes = {}
+
+ # Make sure we have a unicode object for consistency.
+ if isinstance(auth_header, six.binary_type):
+ auth_header = auth_header.decode('utf8')
+
+ parts = auth_header.split(',')
+ auth_scheme_parts = parts[0].split(' ')
+ if 'hawk' != auth_scheme_parts[0].lower():
+ raise HawkFail("Unknown scheme '{scheme}' when parsing header"
+ .format(scheme=auth_scheme_parts[0].lower()))
+
+ # Replace 'Hawk key: value' with 'key: value'
+ # which matches the rest of parts
+ parts[0] = auth_scheme_parts[1]
+
+ for part in parts:
+ attr_parts = part.split('=')
+ key = attr_parts[0].strip()
+ if key not in allowable_header_keys:
+ raise HawkFail("Unknown Hawk key '{key}' when parsing header"
+ .format(key=key))
+
+ if len(attr_parts) > 2:
+ attr_parts[1] = '='.join(attr_parts[1:])
+
+ # Chop of quotation marks
+ value = attr_parts[1]
+
+ if attr_parts[1].find('"') == 0:
+ value = attr_parts[1][1:]
+
+ if value.find('"') > -1:
+ value = value[0:-1]
+
+ validate_header_attr(value, name=key)
+ value = unescape_header_attr(value)
+ attributes[key] = value
+
+ log.debug('parsed Hawk header: {header} into: \n{parsed}'
+ .format(header=auth_header, parsed=pprint.pformat(attributes)))
+ return attributes
+
+
+def strings_match(a, b):
+ # Constant time string comparision, mitigates side channel attacks.
+ if len(a) != len(b):
+ return False
+ result = 0
+
+ def byte_ints(buf):
+ for ch in buf:
+ # In Python 3, if we have a bytes object, iterating it will
+ # already get the integer value. In older pythons, we need
+ # to use ord().
+ if not isinstance(ch, int):
+ ch = ord(ch)
+ yield ch
+
+ for x, y in zip(byte_ints(a), byte_ints(b)):
+ result |= x ^ y
+ return result == 0
+
+
+def utc_now(offset_in_seconds=0.0):
+ # TODO: add support for SNTP server? See ntplib module.
+ return int(math.floor(calendar.timegm(time.gmtime()) +
+ float(offset_in_seconds)))
+
+
+# Allowed value characters:
+# !#$%&'()*+,-./:;<=>?@[]^_`{|}~ and space, a-z, A-Z, 0-9, \, "
+_header_attribute_chars = re.compile(
+ r"^[ a-zA-Z0-9_\!#\$%&'\(\)\*\+,\-\./\:;<\=>\?@\[\]\^`\{\|\}~\"\\]*$")
+
+
+def validate_header_attr(val, name=None):
+ if not _header_attribute_chars.match(val):
+ raise BadHeaderValue('header value name={name} value={val} '
+ 'contained an illegal character'
+ .format(name=name or '?', val=repr(val)))
+
+
+def escape_header_attr(val):
+
+ # Ensure we are working with Unicode for consistency.
+ if isinstance(val, six.binary_type):
+ val = val.decode('utf8')
+
+ # Escape quotes and slash like the hawk reference code.
+ val = val.replace('\\', '\\\\')
+ val = val.replace('"', '\\"')
+ val = val.replace('\n', '\\n')
+ return val
+
+
+def unescape_header_attr(val):
+ # Un-do the hawk escaping.
+ val = val.replace('\\n', '\n')
+ val = val.replace('\\\\', '\\').replace('\\"', '"')
+ return val
+
+
+def prepare_header_val(val):
+ val = escape_header_attr(val)
+ validate_header_attr(val)
+ return val
+
+
+def normalize_header_attr(val):
+ if not val:
+ val = ''
+
+ # Normalize like the hawk reference code.
+ val = escape_header_attr(val)
+ return val
diff --git a/third_party/python/mohawk/setup.cfg b/third_party/python/mohawk/setup.cfg
new file mode 100644
index 0000000000..861a9f5542
--- /dev/null
+++ b/third_party/python/mohawk/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/third_party/python/mohawk/setup.py b/third_party/python/mohawk/setup.py
new file mode 100644
index 0000000000..ddaf9026c2
--- /dev/null
+++ b/third_party/python/mohawk/setup.py
@@ -0,0 +1,25 @@
+from setuptools import setup, find_packages
+
+
+setup(name='mohawk',
+ version='0.3.4',
+ description="Library for Hawk HTTP authorization",
+ long_description='',
+ author='Kumar McMillan, Austin King',
+ author_email='kumar.mcmillan@gmail.com',
+ license='MPL 2.0 (Mozilla Public License)',
+ url='https://github.com/kumar303/mohawk',
+ include_package_data=True,
+ classifiers=[
+ 'Intended Audience :: Developers',
+ 'Natural Language :: English',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3.3',
+ 'Topic :: Internet :: WWW/HTTP',
+ ],
+ packages=find_packages(exclude=['tests']),
+ install_requires=['six'])
diff --git a/third_party/python/more-itertools/LICENSE b/third_party/python/more-itertools/LICENSE
new file mode 100644
index 0000000000..0a523bece3
--- /dev/null
+++ b/third_party/python/more-itertools/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2012 Erik Rose
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/third_party/python/more-itertools/MANIFEST.in b/third_party/python/more-itertools/MANIFEST.in
new file mode 100644
index 0000000000..21d6742586
--- /dev/null
+++ b/third_party/python/more-itertools/MANIFEST.in
@@ -0,0 +1,9 @@
+include README.rst
+include LICENSE
+include docs/*.rst
+include docs/Makefile
+include docs/make.bat
+include docs/conf.py
+include docs/_static/*
+include fabfile.py
+include tox.ini
diff --git a/third_party/python/more-itertools/PKG-INFO b/third_party/python/more-itertools/PKG-INFO
new file mode 100644
index 0000000000..95d111bf6b
--- /dev/null
+++ b/third_party/python/more-itertools/PKG-INFO
@@ -0,0 +1,430 @@
+Metadata-Version: 1.1
+Name: more-itertools
+Version: 4.3.0
+Summary: More routines for operating on iterables, beyond itertools
+Home-page: https://github.com/erikrose/more-itertools
+Author: Erik Rose
+Author-email: erikrose@grinchcentral.com
+License: MIT
+Description: ==============
+ More Itertools
+ ==============
+
+ .. image:: https://coveralls.io/repos/github/erikrose/more-itertools/badge.svg?branch=master
+ :target: https://coveralls.io/github/erikrose/more-itertools?branch=master
+
+ Python's ``itertools`` library is a gem - you can compose elegant solutions
+ for a variety of problems with the functions it provides. In ``more-itertools``
+ we collect additional building blocks, recipes, and routines for working with
+ Python iterables.
+
+ ----
+
+ +------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+ | Grouping | `chunked <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.chunked>`_, |
+ | | `sliced <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sliced>`_, |
+ | | `distribute <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distribute>`_, |
+ | | `divide <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.divide>`_, |
+ | | `split_at <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_at>`_, |
+ | | `split_before <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_before>`_, |
+ | | `split_after <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_after>`_, |
+ | | `bucket <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.bucket>`_, |
+ | | `grouper <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.grouper>`_, |
+ | | `partition <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.partition>`_ |
+ +------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+ | Lookahead and lookback | `spy <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.spy>`_, |
+ | | `peekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.peekable>`_, |
+ | | `seekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.seekable>`_ |
+ +------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+ | Windowing | `windowed <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.windowed>`_, |
+ | | `stagger <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.stagger>`_, |
+ | | `pairwise <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.pairwise>`_ |
+ +------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+ | Augmenting | `count_cycle <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.count_cycle>`_, |
+ | | `intersperse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.intersperse>`_, |
+ | | `padded <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.padded>`_, |
+ | | `adjacent <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.adjacent>`_, |
+ | | `groupby_transform <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.groupby_transform>`_, |
+ | | `padnone <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.padnone>`_, |
+ | | `ncycles <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ncycles>`_ |
+ +------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+ | Combining | `collapse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.collapse>`_, |
+ | | `sort_together <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sort_together>`_, |
+ | | `interleave <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave>`_, |
+ | | `interleave_longest <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave_longest>`_, |
+ | | `collate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.collate>`_, |
+ | | `zip_offset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_offset>`_, |
+ | | `dotproduct <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.dotproduct>`_, |
+ | | `flatten <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.flatten>`_, |
+ | | `roundrobin <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.roundrobin>`_, |
+ | | `prepend <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.prepend>`_ |
+ +------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+ | Summarizing | `ilen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ilen>`_, |
+ | | `first <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first>`_, |
+ | | `last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.last>`_, |
+ | | `one <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.one>`_, |
+ | | `unique_to_each <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_to_each>`_, |
+ | | `locate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.locate>`_, |
+ | | `rlocate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rlocate>`_, |
+ | | `consecutive_groups <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consecutive_groups>`_, |
+ | | `exactly_n <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.exactly_n>`_, |
+ | | `run_length <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.run_length>`_, |
+ | | `map_reduce <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.map_reduce>`_, |
+ | | `all_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.all_equal>`_, |
+ | | `first_true <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first_true>`_, |
+ | | `nth <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth>`_, |
+ | | `quantify <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.quantify>`_ |
+ +------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+ | Selecting | `islice_extended <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.islice_extended>`_, |
+ | | `strip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.strip>`_, |
+ | | `lstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.lstrip>`_, |
+ | | `rstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rstrip>`_, |
+ | | `take <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.take>`_, |
+ | | `tail <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tail>`_, |
+ | | `unique_everseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertoo ls.unique_everseen>`_, |
+ | | `unique_justseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_justseen>`_ |
+ +------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+ | Combinatorics | `distinct_permutations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_permutations>`_, |
+ | | `circular_shifts <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.circular_shifts>`_, |
+ | | `powerset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.powerset>`_, |
+ | | `random_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_product>`_, |
+ | | `random_permutation <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_permutation>`_, |
+ | | `random_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination>`_, |
+ | | `random_combination_with_replacement <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination_with_replacement>`_, |
+ | | `nth_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_combination>`_ |
+ +------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+ | Wrapping | `always_iterable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_iterable>`_, |
+ | | `consumer <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consumer>`_, |
+ | | `with_iter <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.with_iter>`_, |
+ | | `iter_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iter_except>`_ |
+ +------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+ | Others | `replace <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.replace>`_, |
+ | | `numeric_range <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.numeric_range>`_, |
+ | | `always_reversible <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_reversible>`_, |
+ | | `side_effect <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.side_effect>`_, |
+ | | `iterate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iterate>`_, |
+ | | `difference <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.difference>`_, |
+ | | `make_decorator <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.make_decorator>`_, |
+ | | `SequenceView <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.SequenceView>`_, |
+ | | `consume <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consume>`_, |
+ | | `accumulate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.accumulate>`_, |
+ | | `tabulate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tabulate>`_, |
+ | | `repeatfunc <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.repeatfunc>`_ |
+ +------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+
+
+ Getting started
+ ===============
+
+ To get started, install the library with `pip <https://pip.pypa.io/en/stable/>`_:
+
+ .. code-block:: shell
+
+ pip install more-itertools
+
+ The recipes from the `itertools docs <https://docs.python.org/3/library/itertools.html#itertools-recipes>`_
+ are included in the top-level package:
+
+ .. code-block:: python
+
+ >>> from more_itertools import flatten
+ >>> iterable = [(0, 1), (2, 3)]
+ >>> list(flatten(iterable))
+ [0, 1, 2, 3]
+
+ Several new recipes are available as well:
+
+ .. code-block:: python
+
+ >>> from more_itertools import chunked
+ >>> iterable = [0, 1, 2, 3, 4, 5, 6, 7, 8]
+ >>> list(chunked(iterable, 3))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+
+ >>> from more_itertools import spy
+ >>> iterable = (x * x for x in range(1, 6))
+ >>> head, iterable = spy(iterable, n=3)
+ >>> list(head)
+ [1, 4, 9]
+ >>> list(iterable)
+ [1, 4, 9, 16, 25]
+
+
+
+ For the full listing of functions, see the `API documentation <https://more-itertools.readthedocs.io/en/latest/api.html>`_.
+
+ Development
+ ===========
+
+ ``more-itertools`` is maintained by `@erikrose <https://github.com/erikrose>`_
+ and `@bbayles <https://github.com/bbayles>`_, with help from `many others <https://github.com/erikrose/more-itertools/graphs/contributors>`_.
+ If you have a problem or suggestion, please file a bug or pull request in this
+ repository. Thanks for contributing!
+
+
+ Version History
+ ===============
+
+
+
+ 4.3.0
+ -----
+
+ * New itertools:
+ * last (thanks to tmshn)
+ * replace (thanks to pylang)
+ * rlocate (thanks to jferard and pylang)
+
+ * Improvements to existing itertools:
+ * locate can now search for multiple items
+
+ * Other changes:
+ * The docs now include a nice table of tools (thanks MSeifert04)
+
+ 4.2.0
+ -----
+
+ * New itertools:
+ * map_reduce (thanks to pylang)
+ * prepend (from the `Python 3.7 docs <https://docs.python.org/3.7/library/itertools.html#itertools-recipes>`_)
+
+ * Improvements to existing itertools:
+ * bucket now complies with PEP 479 (thanks to irmen)
+
+ * Other changes:
+ * Python 3.7 is now supported (thanks to irmen)
+ * Python 3.3 is no longer supported
+ * The test suite no longer requires third-party modules to run
+ * The API docs now include links to source code
+
+ 4.1.0
+ -----
+
+ * New itertools:
+ * split_at (thanks to michael-celani)
+ * circular_shifts (thanks to hiqua)
+ * make_decorator - see the blog post `Yo, I heard you like decorators <https://sites.google.com/site/bbayles/index/decorator_factory>`_
+ for a tour (thanks to pylang)
+ * always_reversible (thanks to michael-celani)
+ * nth_combination (from the `Python 3.7 docs <https://docs.python.org/3.7/library/itertools.html#itertools-recipes>`_)
+
+ * Improvements to existing itertools:
+ * seekable now has an ``elements`` method to return cached items.
+ * The performance tradeoffs between roundrobin and
+ interleave_longest are now documented (thanks michael-celani,
+ pylang, and MSeifert04)
+
+ 4.0.1
+ -----
+
+ * No code changes - this release fixes how the docs display on PyPI.
+
+ 4.0.0
+ -----
+
+ * New itertools:
+ * consecutive_groups (Based on the example in the `Python 2.4 docs <https://docs.python.org/release/2.4.4/lib/itertools-example.html>`_)
+ * seekable (If you're looking for how to "reset" an iterator,
+ you're in luck!)
+ * exactly_n (thanks to michael-celani)
+ * run_length.encode and run_length.decode
+ * difference
+
+ * Improvements to existing itertools:
+ * The number of items between filler elements in intersperse can
+ now be specified (thanks to pylang)
+ * distinct_permutations and peekable got some minor
+ adjustments (thanks to MSeifert04)
+ * always_iterable now returns an iterator object. It also now
+ allows different types to be considered iterable (thanks to jaraco)
+ * bucket can now limit the keys it stores in memory
+ * one now allows for custom exceptions (thanks to kalekundert)
+
+ * Other changes:
+ * A few typos were fixed (thanks to EdwardBetts)
+ * All tests can now be run with ``python setup.py test``
+
+ The major version update is due to the change in the return value of always_iterable.
+ It now always returns iterator objects:
+
+ .. code-block:: python
+
+ >>> from more_itertools import always_iterable
+ # Non-iterable objects are wrapped with iter(tuple(obj))
+ >>> always_iterable(12345)
+ <tuple_iterator object at 0x7fb24c9488d0>
+ >>> list(always_iterable(12345))
+ [12345]
+ # Iterable objects are wrapped with iter()
+ >>> always_iterable([1, 2, 3, 4, 5])
+ <list_iterator object at 0x7fb24c948c50>
+
+ 3.2.0
+ -----
+
+ * New itertools:
+ * lstrip, rstrip, and strip
+ (thanks to MSeifert04 and pylang)
+ * islice_extended
+ * Improvements to existing itertools:
+ * Some bugs with slicing peekable-wrapped iterables were fixed
+
+ 3.1.0
+ -----
+
+ * New itertools:
+ * numeric_range (Thanks to BebeSparkelSparkel and MSeifert04)
+ * count_cycle (Thanks to BebeSparkelSparkel)
+ * locate (Thanks to pylang and MSeifert04)
+ * Improvements to existing itertools:
+ * A few itertools are now slightly faster due to some function
+ optimizations. (Thanks to MSeifert04)
+ * The docs have been substantially revised with installation notes,
+ categories for library functions, links, and more. (Thanks to pylang)
+
+
+ 3.0.0
+ -----
+
+ * Removed itertools:
+ * ``context`` has been removed due to a design flaw - see below for
+ replacement options. (thanks to NeilGirdhar)
+ * Improvements to existing itertools:
+ * ``side_effect`` now supports ``before`` and ``after`` keyword
+ arguments. (Thanks to yardsale8)
+ * PyPy and PyPy3 are now supported.
+
+ The major version change is due to the removal of the ``context`` function.
+ Replace it with standard ``with`` statement context management:
+
+ .. code-block:: python
+
+ # Don't use context() anymore
+ file_obj = StringIO()
+ consume(print(x, file=f) for f in context(file_obj) for x in u'123')
+
+ # Use a with statement instead
+ file_obj = StringIO()
+ with file_obj as f:
+ consume(print(x, file=f) for x in u'123')
+
+ 2.6.0
+ -----
+
+ * New itertools:
+ * ``adjacent`` and ``groupby_transform`` (Thanks to diazona)
+ * ``always_iterable`` (Thanks to jaraco)
+ * (Removed in 3.0.0) ``context`` (Thanks to yardsale8)
+ * ``divide`` (Thanks to mozbhearsum)
+ * Improvements to existing itertools:
+ * ``ilen`` is now slightly faster. (Thanks to wbolster)
+ * ``peekable`` can now prepend items to an iterable. (Thanks to diazona)
+
+ 2.5.0
+ -----
+
+ * New itertools:
+ * ``distribute`` (Thanks to mozbhearsum and coady)
+ * ``sort_together`` (Thanks to clintval)
+ * ``stagger`` and ``zip_offset`` (Thanks to joshbode)
+ * ``padded``
+ * Improvements to existing itertools:
+ * ``peekable`` now handles negative indexes and slices with negative
+ components properly.
+ * ``intersperse`` is now slightly faster. (Thanks to pylang)
+ * ``windowed`` now accepts a ``step`` keyword argument.
+ (Thanks to pylang)
+ * Python 3.6 is now supported.
+
+ 2.4.1
+ -----
+
+ * Move docs 100% to readthedocs.io.
+
+ 2.4
+ -----
+
+ * New itertools:
+ * ``accumulate``, ``all_equal``, ``first_true``, ``partition``, and
+ ``tail`` from the itertools documentation.
+ * ``bucket`` (Thanks to Rosuav and cvrebert)
+ * ``collapse`` (Thanks to abarnet)
+ * ``interleave`` and ``interleave_longest`` (Thanks to abarnet)
+ * ``side_effect`` (Thanks to nvie)
+ * ``sliced`` (Thanks to j4mie and coady)
+ * ``split_before`` and ``split_after`` (Thanks to astronouth7303)
+ * ``spy`` (Thanks to themiurgo and mathieulongtin)
+ * Improvements to existing itertools:
+ * ``chunked`` is now simpler and more friendly to garbage collection.
+ (Contributed by coady, with thanks to piskvorky)
+ * ``collate`` now delegates to ``heapq.merge`` when possible.
+ (Thanks to kmike and julianpistorius)
+ * ``peekable``-wrapped iterables are now indexable and sliceable.
+ Iterating through ``peekable``-wrapped iterables is also faster.
+ * ``one`` and ``unique_to_each`` have been simplified.
+ (Thanks to coady)
+
+
+ 2.3
+ -----
+
+ * Added ``one`` from ``jaraco.util.itertools``. (Thanks, jaraco!)
+ * Added ``distinct_permutations`` and ``unique_to_each``. (Contributed by
+ bbayles)
+ * Added ``windowed``. (Contributed by bbayles, with thanks to buchanae,
+ jaraco, and abarnert)
+ * Simplified the implementation of ``chunked``. (Thanks, nvie!)
+ * Python 3.5 is now supported. Python 2.6 is no longer supported.
+ * Python 3 is now supported directly; there is no 2to3 step.
+
+ 2.2
+ -----
+
+ * Added ``iterate`` and ``with_iter``. (Thanks, abarnert!)
+
+ 2.1
+ -----
+
+ * Added (tested!) implementations of the recipes from the itertools
+ documentation. (Thanks, Chris Lonnen!)
+ * Added ``ilen``. (Thanks for the inspiration, Matt Basta!)
+
+ 2.0
+ -----
+
+ * ``chunked`` now returns lists rather than tuples. After all, they're
+ homogeneous. This slightly backward-incompatible change is the reason for
+ the major version bump.
+ * Added ``@consumer``.
+ * Improved test machinery.
+
+ 1.1
+ -----
+
+ * Added ``first`` function.
+ * Added Python 3 support.
+ * Added a default arg to ``peekable.peek()``.
+ * Noted how to easily test whether a peekable iterator is exhausted.
+ * Rewrote documentation.
+
+ 1.0
+ -----
+
+ * Initial release, with ``collate``, ``peekable``, and ``chunked``. Could
+ really use better docs.
+Keywords: itertools,iterator,iteration,filter,peek,peekable,collate,chunk,chunked
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Topic :: Software Development :: Libraries
diff --git a/third_party/python/more-itertools/README.rst b/third_party/python/more-itertools/README.rst
new file mode 100644
index 0000000000..d918eb684f
--- /dev/null
+++ b/third_party/python/more-itertools/README.rst
@@ -0,0 +1,154 @@
+==============
+More Itertools
+==============
+
+.. image:: https://coveralls.io/repos/github/erikrose/more-itertools/badge.svg?branch=master
+ :target: https://coveralls.io/github/erikrose/more-itertools?branch=master
+
+Python's ``itertools`` library is a gem - you can compose elegant solutions
+for a variety of problems with the functions it provides. In ``more-itertools``
+we collect additional building blocks, recipes, and routines for working with
+Python iterables.
+
+----
+
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Grouping | `chunked <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.chunked>`_, |
+| | `sliced <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sliced>`_, |
+| | `distribute <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distribute>`_, |
+| | `divide <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.divide>`_, |
+| | `split_at <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_at>`_, |
+| | `split_before <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_before>`_, |
+| | `split_after <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_after>`_, |
+| | `bucket <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.bucket>`_, |
+| | `grouper <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.grouper>`_, |
+| | `partition <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.partition>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Lookahead and lookback | `spy <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.spy>`_, |
+| | `peekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.peekable>`_, |
+| | `seekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.seekable>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Windowing | `windowed <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.windowed>`_, |
+| | `stagger <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.stagger>`_, |
+| | `pairwise <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.pairwise>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Augmenting | `count_cycle <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.count_cycle>`_, |
+| | `intersperse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.intersperse>`_, |
+| | `padded <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.padded>`_, |
+| | `adjacent <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.adjacent>`_, |
+| | `groupby_transform <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.groupby_transform>`_, |
+| | `padnone <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.padnone>`_, |
+| | `ncycles <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ncycles>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Combining | `collapse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.collapse>`_, |
+| | `sort_together <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sort_together>`_, |
+| | `interleave <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave>`_, |
+| | `interleave_longest <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave_longest>`_, |
+| | `collate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.collate>`_, |
+| | `zip_offset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_offset>`_, |
+| | `dotproduct <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.dotproduct>`_, |
+| | `flatten <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.flatten>`_, |
+| | `roundrobin <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.roundrobin>`_, |
+| | `prepend <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.prepend>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Summarizing | `ilen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ilen>`_, |
+| | `first <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first>`_, |
+| | `last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.last>`_, |
+| | `one <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.one>`_, |
+| | `unique_to_each <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_to_each>`_, |
+| | `locate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.locate>`_, |
+| | `rlocate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rlocate>`_, |
+| | `consecutive_groups <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consecutive_groups>`_, |
+| | `exactly_n <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.exactly_n>`_, |
+| | `run_length <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.run_length>`_, |
+| | `map_reduce <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.map_reduce>`_, |
+| | `all_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.all_equal>`_, |
+| | `first_true <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first_true>`_, |
+| | `nth <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth>`_, |
+| | `quantify <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.quantify>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Selecting | `islice_extended <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.islice_extended>`_, |
+| | `strip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.strip>`_, |
+| | `lstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.lstrip>`_, |
+| | `rstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rstrip>`_, |
+| | `take <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.take>`_, |
+| | `tail <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tail>`_, |
+| | `unique_everseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertoo ls.unique_everseen>`_, |
+| | `unique_justseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_justseen>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Combinatorics | `distinct_permutations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_permutations>`_, |
+| | `circular_shifts <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.circular_shifts>`_, |
+| | `powerset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.powerset>`_, |
+| | `random_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_product>`_, |
+| | `random_permutation <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_permutation>`_, |
+| | `random_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination>`_, |
+| | `random_combination_with_replacement <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination_with_replacement>`_, |
+| | `nth_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_combination>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Wrapping | `always_iterable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_iterable>`_, |
+| | `consumer <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consumer>`_, |
+| | `with_iter <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.with_iter>`_, |
+| | `iter_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iter_except>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Others | `replace <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.replace>`_, |
+| | `numeric_range <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.numeric_range>`_, |
+| | `always_reversible <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_reversible>`_, |
+| | `side_effect <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.side_effect>`_, |
+| | `iterate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iterate>`_, |
+| | `difference <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.difference>`_, |
+| | `make_decorator <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.make_decorator>`_, |
+| | `SequenceView <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.SequenceView>`_, |
+| | `consume <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consume>`_, |
+| | `accumulate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.accumulate>`_, |
+| | `tabulate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tabulate>`_, |
+| | `repeatfunc <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.repeatfunc>`_ |
++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+
+
+Getting started
+===============
+
+To get started, install the library with `pip <https://pip.pypa.io/en/stable/>`_:
+
+.. code-block:: shell
+
+ pip install more-itertools
+
+The recipes from the `itertools docs <https://docs.python.org/3/library/itertools.html#itertools-recipes>`_
+are included in the top-level package:
+
+.. code-block:: python
+
+ >>> from more_itertools import flatten
+ >>> iterable = [(0, 1), (2, 3)]
+ >>> list(flatten(iterable))
+ [0, 1, 2, 3]
+
+Several new recipes are available as well:
+
+.. code-block:: python
+
+ >>> from more_itertools import chunked
+ >>> iterable = [0, 1, 2, 3, 4, 5, 6, 7, 8]
+ >>> list(chunked(iterable, 3))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+
+ >>> from more_itertools import spy
+ >>> iterable = (x * x for x in range(1, 6))
+ >>> head, iterable = spy(iterable, n=3)
+ >>> list(head)
+ [1, 4, 9]
+ >>> list(iterable)
+ [1, 4, 9, 16, 25]
+
+
+
+For the full listing of functions, see the `API documentation <https://more-itertools.readthedocs.io/en/latest/api.html>`_.
+
+Development
+===========
+
+``more-itertools`` is maintained by `@erikrose <https://github.com/erikrose>`_
+and `@bbayles <https://github.com/bbayles>`_, with help from `many others <https://github.com/erikrose/more-itertools/graphs/contributors>`_.
+If you have a problem or suggestion, please file a bug or pull request in this
+repository. Thanks for contributing!
diff --git a/third_party/python/more-itertools/more_itertools/__init__.py b/third_party/python/more-itertools/more_itertools/__init__.py
new file mode 100644
index 0000000000..bba462c3db
--- /dev/null
+++ b/third_party/python/more-itertools/more_itertools/__init__.py
@@ -0,0 +1,2 @@
+from more_itertools.more import * # noqa
+from more_itertools.recipes import * # noqa
diff --git a/third_party/python/more-itertools/more_itertools/more.py b/third_party/python/more-itertools/more_itertools/more.py
new file mode 100644
index 0000000000..05e851eefa
--- /dev/null
+++ b/third_party/python/more-itertools/more_itertools/more.py
@@ -0,0 +1,2211 @@
+from __future__ import print_function
+
+from collections import Counter, defaultdict, deque
+from functools import partial, wraps
+from heapq import merge
+from itertools import (
+ chain,
+ compress,
+ count,
+ cycle,
+ dropwhile,
+ groupby,
+ islice,
+ repeat,
+ starmap,
+ takewhile,
+ tee
+)
+from operator import itemgetter, lt, gt, sub
+from sys import maxsize, version_info
+try:
+ from collections.abc import Sequence
+except ImportError:
+ from collections import Sequence
+
+from six import binary_type, string_types, text_type
+from six.moves import filter, map, range, zip, zip_longest
+
+from .recipes import consume, flatten, take
+
+__all__ = [
+ 'adjacent',
+ 'always_iterable',
+ 'always_reversible',
+ 'bucket',
+ 'chunked',
+ 'circular_shifts',
+ 'collapse',
+ 'collate',
+ 'consecutive_groups',
+ 'consumer',
+ 'count_cycle',
+ 'difference',
+ 'distinct_permutations',
+ 'distribute',
+ 'divide',
+ 'exactly_n',
+ 'first',
+ 'groupby_transform',
+ 'ilen',
+ 'interleave_longest',
+ 'interleave',
+ 'intersperse',
+ 'islice_extended',
+ 'iterate',
+ 'last',
+ 'locate',
+ 'lstrip',
+ 'make_decorator',
+ 'map_reduce',
+ 'numeric_range',
+ 'one',
+ 'padded',
+ 'peekable',
+ 'replace',
+ 'rlocate',
+ 'rstrip',
+ 'run_length',
+ 'seekable',
+ 'SequenceView',
+ 'side_effect',
+ 'sliced',
+ 'sort_together',
+ 'split_at',
+ 'split_after',
+ 'split_before',
+ 'spy',
+ 'stagger',
+ 'strip',
+ 'unique_to_each',
+ 'windowed',
+ 'with_iter',
+ 'zip_offset',
+]
+
+_marker = object()
+
+
+def chunked(iterable, n):
+ """Break *iterable* into lists of length *n*:
+
+ >>> list(chunked([1, 2, 3, 4, 5, 6], 3))
+ [[1, 2, 3], [4, 5, 6]]
+
+ If the length of *iterable* is not evenly divisible by *n*, the last
+ returned list will be shorter:
+
+ >>> list(chunked([1, 2, 3, 4, 5, 6, 7, 8], 3))
+ [[1, 2, 3], [4, 5, 6], [7, 8]]
+
+ To use a fill-in value instead, see the :func:`grouper` recipe.
+
+ :func:`chunked` is useful for splitting up a computation on a large number
+ of keys into batches, to be pickled and sent off to worker processes. One
+ example is operations on rows in MySQL, which does not implement
+ server-side cursors properly and would otherwise load the entire dataset
+ into RAM on the client.
+
+ """
+ return iter(partial(take, n, iter(iterable)), [])
+
+
+def first(iterable, default=_marker):
+ """Return the first item of *iterable*, or *default* if *iterable* is
+ empty.
+
+ >>> first([0, 1, 2, 3])
+ 0
+ >>> first([], 'some default')
+ 'some default'
+
+ If *default* is not provided and there are no items in the iterable,
+ raise ``ValueError``.
+
+ :func:`first` is useful when you have a generator of expensive-to-retrieve
+ values and want any arbitrary one. It is marginally shorter than
+ ``next(iter(iterable), default)``.
+
+ """
+ try:
+ return next(iter(iterable))
+ except StopIteration:
+ # I'm on the edge about raising ValueError instead of StopIteration. At
+ # the moment, ValueError wins, because the caller could conceivably
+ # want to do something different with flow control when I raise the
+ # exception, and it's weird to explicitly catch StopIteration.
+ if default is _marker:
+ raise ValueError('first() was called on an empty iterable, and no '
+ 'default value was provided.')
+ return default
+
+
+def last(iterable, default=_marker):
+ """Return the last item of *iterable*, or *default* if *iterable* is
+ empty.
+
+ >>> last([0, 1, 2, 3])
+ 3
+ >>> last([], 'some default')
+ 'some default'
+
+ If *default* is not provided and there are no items in the iterable,
+ raise ``ValueError``.
+ """
+ try:
+ try:
+ # Try to access the last item directly
+ return iterable[-1]
+ except (TypeError, AttributeError, KeyError):
+ # If not slice-able, iterate entirely using length-1 deque
+ return deque(iterable, maxlen=1)[0]
+ except IndexError: # If the iterable was empty
+ if default is _marker:
+ raise ValueError('last() was called on an empty iterable, and no '
+ 'default value was provided.')
+ return default
+
+
+class peekable(object):
+ """Wrap an iterator to allow lookahead and prepending elements.
+
+ Call :meth:`peek` on the result to get the value that will be returned
+ by :func:`next`. This won't advance the iterator:
+
+ >>> p = peekable(['a', 'b'])
+ >>> p.peek()
+ 'a'
+ >>> next(p)
+ 'a'
+
+ Pass :meth:`peek` a default value to return that instead of raising
+ ``StopIteration`` when the iterator is exhausted.
+
+ >>> p = peekable([])
+ >>> p.peek('hi')
+ 'hi'
+
+ peekables also offer a :meth:`prepend` method, which "inserts" items
+ at the head of the iterable:
+
+ >>> p = peekable([1, 2, 3])
+ >>> p.prepend(10, 11, 12)
+ >>> next(p)
+ 10
+ >>> p.peek()
+ 11
+ >>> list(p)
+ [11, 12, 1, 2, 3]
+
+ peekables can be indexed. Index 0 is the item that will be returned by
+ :func:`next`, index 1 is the item after that, and so on:
+ The values up to the given index will be cached.
+
+ >>> p = peekable(['a', 'b', 'c', 'd'])
+ >>> p[0]
+ 'a'
+ >>> p[1]
+ 'b'
+ >>> next(p)
+ 'a'
+
+ Negative indexes are supported, but be aware that they will cache the
+ remaining items in the source iterator, which may require significant
+ storage.
+
+ To check whether a peekable is exhausted, check its truth value:
+
+ >>> p = peekable(['a', 'b'])
+ >>> if p: # peekable has items
+ ... list(p)
+ ['a', 'b']
+ >>> if not p: # peekable is exhaused
+ ... list(p)
+ []
+
+ """
+ def __init__(self, iterable):
+ self._it = iter(iterable)
+ self._cache = deque()
+
+ def __iter__(self):
+ return self
+
+ def __bool__(self):
+ try:
+ self.peek()
+ except StopIteration:
+ return False
+ return True
+
+ def __nonzero__(self):
+ # For Python 2 compatibility
+ return self.__bool__()
+
+ def peek(self, default=_marker):
+ """Return the item that will be next returned from ``next()``.
+
+ Return ``default`` if there are no items left. If ``default`` is not
+ provided, raise ``StopIteration``.
+
+ """
+ if not self._cache:
+ try:
+ self._cache.append(next(self._it))
+ except StopIteration:
+ if default is _marker:
+ raise
+ return default
+ return self._cache[0]
+
+ def prepend(self, *items):
+ """Stack up items to be the next ones returned from ``next()`` or
+ ``self.peek()``. The items will be returned in
+ first in, first out order::
+
+ >>> p = peekable([1, 2, 3])
+ >>> p.prepend(10, 11, 12)
+ >>> next(p)
+ 10
+ >>> list(p)
+ [11, 12, 1, 2, 3]
+
+ It is possible, by prepending items, to "resurrect" a peekable that
+ previously raised ``StopIteration``.
+
+ >>> p = peekable([])
+ >>> next(p)
+ Traceback (most recent call last):
+ ...
+ StopIteration
+ >>> p.prepend(1)
+ >>> next(p)
+ 1
+ >>> next(p)
+ Traceback (most recent call last):
+ ...
+ StopIteration
+
+ """
+ self._cache.extendleft(reversed(items))
+
+ def __next__(self):
+ if self._cache:
+ return self._cache.popleft()
+
+ return next(self._it)
+
+ next = __next__ # For Python 2 compatibility
+
+ def _get_slice(self, index):
+ # Normalize the slice's arguments
+ step = 1 if (index.step is None) else index.step
+ if step > 0:
+ start = 0 if (index.start is None) else index.start
+ stop = maxsize if (index.stop is None) else index.stop
+ elif step < 0:
+ start = -1 if (index.start is None) else index.start
+ stop = (-maxsize - 1) if (index.stop is None) else index.stop
+ else:
+ raise ValueError('slice step cannot be zero')
+
+ # If either the start or stop index is negative, we'll need to cache
+ # the rest of the iterable in order to slice from the right side.
+ if (start < 0) or (stop < 0):
+ self._cache.extend(self._it)
+ # Otherwise we'll need to find the rightmost index and cache to that
+ # point.
+ else:
+ n = min(max(start, stop) + 1, maxsize)
+ cache_len = len(self._cache)
+ if n >= cache_len:
+ self._cache.extend(islice(self._it, n - cache_len))
+
+ return list(self._cache)[index]
+
+ def __getitem__(self, index):
+ if isinstance(index, slice):
+ return self._get_slice(index)
+
+ cache_len = len(self._cache)
+ if index < 0:
+ self._cache.extend(self._it)
+ elif index >= cache_len:
+ self._cache.extend(islice(self._it, index + 1 - cache_len))
+
+ return self._cache[index]
+
+
+def _collate(*iterables, **kwargs):
+ """Helper for ``collate()``, called when the user is using the ``reverse``
+ or ``key`` keyword arguments on Python versions below 3.5.
+
+ """
+ key = kwargs.pop('key', lambda a: a)
+ reverse = kwargs.pop('reverse', False)
+
+ min_or_max = partial(max if reverse else min, key=itemgetter(0))
+ peekables = [peekable(it) for it in iterables]
+ peekables = [p for p in peekables if p] # Kill empties.
+ while peekables:
+ _, p = min_or_max((key(p.peek()), p) for p in peekables)
+ yield next(p)
+ peekables = [x for x in peekables if x]
+
+
+def collate(*iterables, **kwargs):
+ """Return a sorted merge of the items from each of several already-sorted
+ *iterables*.
+
+ >>> list(collate('ACDZ', 'AZ', 'JKL'))
+ ['A', 'A', 'C', 'D', 'J', 'K', 'L', 'Z', 'Z']
+
+ Works lazily, keeping only the next value from each iterable in memory. Use
+ :func:`collate` to, for example, perform a n-way mergesort of items that
+ don't fit in memory.
+
+ If a *key* function is specified, the iterables will be sorted according
+ to its result:
+
+ >>> key = lambda s: int(s) # Sort by numeric value, not by string
+ >>> list(collate(['1', '10'], ['2', '11'], key=key))
+ ['1', '2', '10', '11']
+
+
+ If the *iterables* are sorted in descending order, set *reverse* to
+ ``True``:
+
+ >>> list(collate([5, 3, 1], [4, 2, 0], reverse=True))
+ [5, 4, 3, 2, 1, 0]
+
+ If the elements of the passed-in iterables are out of order, you might get
+ unexpected results.
+
+ On Python 2.7, this function delegates to :func:`heapq.merge` if neither
+ of the keyword arguments are specified. On Python 3.5+, this function
+ is an alias for :func:`heapq.merge`.
+
+ """
+ if not kwargs:
+ return merge(*iterables)
+
+ return _collate(*iterables, **kwargs)
+
+
+# If using Python version 3.5 or greater, heapq.merge() will be faster than
+# collate - use that instead.
+if version_info >= (3, 5, 0):
+ _collate_docstring = collate.__doc__
+ collate = partial(merge)
+ collate.__doc__ = _collate_docstring
+
+
+def consumer(func):
+ """Decorator that automatically advances a PEP-342-style "reverse iterator"
+ to its first yield point so you don't have to call ``next()`` on it
+ manually.
+
+ >>> @consumer
+ ... def tally():
+ ... i = 0
+ ... while True:
+ ... print('Thing number %s is %s.' % (i, (yield)))
+ ... i += 1
+ ...
+ >>> t = tally()
+ >>> t.send('red')
+ Thing number 0 is red.
+ >>> t.send('fish')
+ Thing number 1 is fish.
+
+ Without the decorator, you would have to call ``next(t)`` before
+ ``t.send()`` could be used.
+
+ """
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ gen = func(*args, **kwargs)
+ next(gen)
+ return gen
+ return wrapper
+
+
+def ilen(iterable):
+ """Return the number of items in *iterable*.
+
+ >>> ilen(x for x in range(1000000) if x % 3 == 0)
+ 333334
+
+ This consumes the iterable, so handle with care.
+
+ """
+ # maxlen=1 only stores the last item in the deque
+ d = deque(enumerate(iterable, 1), maxlen=1)
+ # since we started enumerate at 1,
+ # the first item of the last pair will be the length of the iterable
+ # (assuming there were items)
+ return d[0][0] if d else 0
+
+
+def iterate(func, start):
+ """Return ``start``, ``func(start)``, ``func(func(start))``, ...
+
+ >>> from itertools import islice
+ >>> list(islice(iterate(lambda x: 2*x, 1), 10))
+ [1, 2, 4, 8, 16, 32, 64, 128, 256, 512]
+
+ """
+ while True:
+ yield start
+ start = func(start)
+
+
+def with_iter(context_manager):
+ """Wrap an iterable in a ``with`` statement, so it closes once exhausted.
+
+ For example, this will close the file when the iterator is exhausted::
+
+ upper_lines = (line.upper() for line in with_iter(open('foo')))
+
+ Any context manager which returns an iterable is a candidate for
+ ``with_iter``.
+
+ """
+ with context_manager as iterable:
+ for item in iterable:
+ yield item
+
+
+def one(iterable, too_short=None, too_long=None):
+ """Return the first item from *iterable*, which is expected to contain only
+ that item. Raise an exception if *iterable* is empty or has more than one
+ item.
+
+ :func:`one` is useful for ensuring that an iterable contains only one item.
+ For example, it can be used to retrieve the result of a database query
+ that is expected to return a single row.
+
+ If *iterable* is empty, ``ValueError`` will be raised. You may specify a
+ different exception with the *too_short* keyword:
+
+ >>> it = []
+ >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: too many items in iterable (expected 1)'
+ >>> too_short = IndexError('too few items')
+ >>> one(it, too_short=too_short) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ IndexError: too few items
+
+ Similarly, if *iterable* contains more than one item, ``ValueError`` will
+ be raised. You may specify a different exception with the *too_long*
+ keyword:
+
+ >>> it = ['too', 'many']
+ >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: too many items in iterable (expected 1)'
+ >>> too_long = RuntimeError
+ >>> one(it, too_long=too_long) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ RuntimeError
+
+ Note that :func:`one` attempts to advance *iterable* twice to ensure there
+ is only one item. If there is more than one, both items will be discarded.
+ See :func:`spy` or :func:`peekable` to check iterable contents less
+ destructively.
+
+ """
+ it = iter(iterable)
+
+ try:
+ value = next(it)
+ except StopIteration:
+ raise too_short or ValueError('too few items in iterable (expected 1)')
+
+ try:
+ next(it)
+ except StopIteration:
+ pass
+ else:
+ raise too_long or ValueError('too many items in iterable (expected 1)')
+
+ return value
+
+
+def distinct_permutations(iterable):
+ """Yield successive distinct permutations of the elements in *iterable*.
+
+ >>> sorted(distinct_permutations([1, 0, 1]))
+ [(0, 1, 1), (1, 0, 1), (1, 1, 0)]
+
+ Equivalent to ``set(permutations(iterable))``, except duplicates are not
+ generated and thrown away. For larger input sequences this is much more
+ efficient.
+
+ Duplicate permutations arise when there are duplicated elements in the
+ input iterable. The number of items returned is
+ `n! / (x_1! * x_2! * ... * x_n!)`, where `n` is the total number of
+ items input, and each `x_i` is the count of a distinct item in the input
+ sequence.
+
+ """
+ def perm_unique_helper(item_counts, perm, i):
+ """Internal helper function
+
+ :arg item_counts: Stores the unique items in ``iterable`` and how many
+ times they are repeated
+ :arg perm: The permutation that is being built for output
+ :arg i: The index of the permutation being modified
+
+ The output permutations are built up recursively; the distinct items
+ are placed until their repetitions are exhausted.
+ """
+ if i < 0:
+ yield tuple(perm)
+ else:
+ for item in item_counts:
+ if item_counts[item] <= 0:
+ continue
+ perm[i] = item
+ item_counts[item] -= 1
+ for x in perm_unique_helper(item_counts, perm, i - 1):
+ yield x
+ item_counts[item] += 1
+
+ item_counts = Counter(iterable)
+ length = sum(item_counts.values())
+
+ return perm_unique_helper(item_counts, [None] * length, length - 1)
+
+
+def intersperse(e, iterable, n=1):
+ """Intersperse filler element *e* among the items in *iterable*, leaving
+ *n* items between each filler element.
+
+ >>> list(intersperse('!', [1, 2, 3, 4, 5]))
+ [1, '!', 2, '!', 3, '!', 4, '!', 5]
+
+ >>> list(intersperse(None, [1, 2, 3, 4, 5], n=2))
+ [1, 2, None, 3, 4, None, 5]
+
+ """
+ if n == 0:
+ raise ValueError('n must be > 0')
+ elif n == 1:
+ # interleave(repeat(e), iterable) -> e, x_0, e, e, x_1, e, x_2...
+ # islice(..., 1, None) -> x_0, e, e, x_1, e, x_2...
+ return islice(interleave(repeat(e), iterable), 1, None)
+ else:
+ # interleave(filler, chunks) -> [e], [x_0, x_1], [e], [x_2, x_3]...
+ # islice(..., 1, None) -> [x_0, x_1], [e], [x_2, x_3]...
+ # flatten(...) -> x_0, x_1, e, x_2, x_3...
+ filler = repeat([e])
+ chunks = chunked(iterable, n)
+ return flatten(islice(interleave(filler, chunks), 1, None))
+
+
+def unique_to_each(*iterables):
+ """Return the elements from each of the input iterables that aren't in the
+ other input iterables.
+
+ For example, suppose you have a set of packages, each with a set of
+ dependencies::
+
+ {'pkg_1': {'A', 'B'}, 'pkg_2': {'B', 'C'}, 'pkg_3': {'B', 'D'}}
+
+ If you remove one package, which dependencies can also be removed?
+
+ If ``pkg_1`` is removed, then ``A`` is no longer necessary - it is not
+ associated with ``pkg_2`` or ``pkg_3``. Similarly, ``C`` is only needed for
+ ``pkg_2``, and ``D`` is only needed for ``pkg_3``::
+
+ >>> unique_to_each({'A', 'B'}, {'B', 'C'}, {'B', 'D'})
+ [['A'], ['C'], ['D']]
+
+ If there are duplicates in one input iterable that aren't in the others
+ they will be duplicated in the output. Input order is preserved::
+
+ >>> unique_to_each("mississippi", "missouri")
+ [['p', 'p'], ['o', 'u', 'r']]
+
+ It is assumed that the elements of each iterable are hashable.
+
+ """
+ pool = [list(it) for it in iterables]
+ counts = Counter(chain.from_iterable(map(set, pool)))
+ uniques = {element for element in counts if counts[element] == 1}
+ return [list(filter(uniques.__contains__, it)) for it in pool]
+
+
+def windowed(seq, n, fillvalue=None, step=1):
+ """Return a sliding window of width *n* over the given iterable.
+
+ >>> all_windows = windowed([1, 2, 3, 4, 5], 3)
+ >>> list(all_windows)
+ [(1, 2, 3), (2, 3, 4), (3, 4, 5)]
+
+ When the window is larger than the iterable, *fillvalue* is used in place
+ of missing values::
+
+ >>> list(windowed([1, 2, 3], 4))
+ [(1, 2, 3, None)]
+
+ Each window will advance in increments of *step*:
+
+ >>> list(windowed([1, 2, 3, 4, 5, 6], 3, fillvalue='!', step=2))
+ [(1, 2, 3), (3, 4, 5), (5, 6, '!')]
+
+ """
+ if n < 0:
+ raise ValueError('n must be >= 0')
+ if n == 0:
+ yield tuple()
+ return
+ if step < 1:
+ raise ValueError('step must be >= 1')
+
+ it = iter(seq)
+ window = deque([], n)
+ append = window.append
+
+ # Initial deque fill
+ for _ in range(n):
+ append(next(it, fillvalue))
+ yield tuple(window)
+
+ # Appending new items to the right causes old items to fall off the left
+ i = 0
+ for item in it:
+ append(item)
+ i = (i + 1) % step
+ if i % step == 0:
+ yield tuple(window)
+
+ # If there are items from the iterable in the window, pad with the given
+ # value and emit them.
+ if (i % step) and (step - i < n):
+ for _ in range(step - i):
+ append(fillvalue)
+ yield tuple(window)
+
+
+class bucket(object):
+ """Wrap *iterable* and return an object that buckets it iterable into
+ child iterables based on a *key* function.
+
+ >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3']
+ >>> s = bucket(iterable, key=lambda x: x[0])
+ >>> a_iterable = s['a']
+ >>> next(a_iterable)
+ 'a1'
+ >>> next(a_iterable)
+ 'a2'
+ >>> list(s['b'])
+ ['b1', 'b2', 'b3']
+
+ The original iterable will be advanced and its items will be cached until
+ they are used by the child iterables. This may require significant storage.
+
+ By default, attempting to select a bucket to which no items belong will
+ exhaust the iterable and cache all values.
+ If you specify a *validator* function, selected buckets will instead be
+ checked against it.
+
+ >>> from itertools import count
+ >>> it = count(1, 2) # Infinite sequence of odd numbers
+ >>> key = lambda x: x % 10 # Bucket by last digit
+ >>> validator = lambda x: x in {1, 3, 5, 7, 9} # Odd digits only
+ >>> s = bucket(it, key=key, validator=validator)
+ >>> 2 in s
+ False
+ >>> list(s[2])
+ []
+
+ """
+ def __init__(self, iterable, key, validator=None):
+ self._it = iter(iterable)
+ self._key = key
+ self._cache = defaultdict(deque)
+ self._validator = validator or (lambda x: True)
+
+ def __contains__(self, value):
+ if not self._validator(value):
+ return False
+
+ try:
+ item = next(self[value])
+ except StopIteration:
+ return False
+ else:
+ self._cache[value].appendleft(item)
+
+ return True
+
+ def _get_values(self, value):
+ """
+ Helper to yield items from the parent iterator that match *value*.
+ Items that don't match are stored in the local cache as they
+ are encountered.
+ """
+ while True:
+ # If we've cached some items that match the target value, emit
+ # the first one and evict it from the cache.
+ if self._cache[value]:
+ yield self._cache[value].popleft()
+ # Otherwise we need to advance the parent iterator to search for
+ # a matching item, caching the rest.
+ else:
+ while True:
+ try:
+ item = next(self._it)
+ except StopIteration:
+ return
+ item_value = self._key(item)
+ if item_value == value:
+ yield item
+ break
+ elif self._validator(item_value):
+ self._cache[item_value].append(item)
+
+ def __getitem__(self, value):
+ if not self._validator(value):
+ return iter(())
+
+ return self._get_values(value)
+
+
+def spy(iterable, n=1):
+ """Return a 2-tuple with a list containing the first *n* elements of
+ *iterable*, and an iterator with the same items as *iterable*.
+ This allows you to "look ahead" at the items in the iterable without
+ advancing it.
+
+ There is one item in the list by default:
+
+ >>> iterable = 'abcdefg'
+ >>> head, iterable = spy(iterable)
+ >>> head
+ ['a']
+ >>> list(iterable)
+ ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+
+ You may use unpacking to retrieve items instead of lists:
+
+ >>> (head,), iterable = spy('abcdefg')
+ >>> head
+ 'a'
+ >>> (first, second), iterable = spy('abcdefg', 2)
+ >>> first
+ 'a'
+ >>> second
+ 'b'
+
+ The number of items requested can be larger than the number of items in
+ the iterable:
+
+ >>> iterable = [1, 2, 3, 4, 5]
+ >>> head, iterable = spy(iterable, 10)
+ >>> head
+ [1, 2, 3, 4, 5]
+ >>> list(iterable)
+ [1, 2, 3, 4, 5]
+
+ """
+ it = iter(iterable)
+ head = take(n, it)
+
+ return head, chain(head, it)
+
+
+def interleave(*iterables):
+ """Return a new iterable yielding from each iterable in turn,
+ until the shortest is exhausted.
+
+ >>> list(interleave([1, 2, 3], [4, 5], [6, 7, 8]))
+ [1, 4, 6, 2, 5, 7]
+
+ For a version that doesn't terminate after the shortest iterable is
+ exhausted, see :func:`interleave_longest`.
+
+ """
+ return chain.from_iterable(zip(*iterables))
+
+
+def interleave_longest(*iterables):
+ """Return a new iterable yielding from each iterable in turn,
+ skipping any that are exhausted.
+
+ >>> list(interleave_longest([1, 2, 3], [4, 5], [6, 7, 8]))
+ [1, 4, 6, 2, 5, 7, 3, 8]
+
+ This function produces the same output as :func:`roundrobin`, but may
+ perform better for some inputs (in particular when the number of iterables
+ is large).
+
+ """
+ i = chain.from_iterable(zip_longest(*iterables, fillvalue=_marker))
+ return (x for x in i if x is not _marker)
+
+
+def collapse(iterable, base_type=None, levels=None):
+ """Flatten an iterable with multiple levels of nesting (e.g., a list of
+ lists of tuples) into non-iterable types.
+
+ >>> iterable = [(1, 2), ([3, 4], [[5], [6]])]
+ >>> list(collapse(iterable))
+ [1, 2, 3, 4, 5, 6]
+
+ String types are not considered iterable and will not be collapsed.
+ To avoid collapsing other types, specify *base_type*:
+
+ >>> iterable = ['ab', ('cd', 'ef'), ['gh', 'ij']]
+ >>> list(collapse(iterable, base_type=tuple))
+ ['ab', ('cd', 'ef'), 'gh', 'ij']
+
+ Specify *levels* to stop flattening after a certain level:
+
+ >>> iterable = [('a', ['b']), ('c', ['d'])]
+ >>> list(collapse(iterable)) # Fully flattened
+ ['a', 'b', 'c', 'd']
+ >>> list(collapse(iterable, levels=1)) # Only one level flattened
+ ['a', ['b'], 'c', ['d']]
+
+ """
+ def walk(node, level):
+ if (
+ ((levels is not None) and (level > levels)) or
+ isinstance(node, string_types) or
+ ((base_type is not None) and isinstance(node, base_type))
+ ):
+ yield node
+ return
+
+ try:
+ tree = iter(node)
+ except TypeError:
+ yield node
+ return
+ else:
+ for child in tree:
+ for x in walk(child, level + 1):
+ yield x
+
+ for x in walk(iterable, 0):
+ yield x
+
+
+def side_effect(func, iterable, chunk_size=None, before=None, after=None):
+ """Invoke *func* on each item in *iterable* (or on each *chunk_size* group
+ of items) before yielding the item.
+
+ `func` must be a function that takes a single argument. Its return value
+ will be discarded.
+
+ *before* and *after* are optional functions that take no arguments. They
+ will be executed before iteration starts and after it ends, respectively.
+
+ `side_effect` can be used for logging, updating progress bars, or anything
+ that is not functionally "pure."
+
+ Emitting a status message:
+
+ >>> from more_itertools import consume
+ >>> func = lambda item: print('Received {}'.format(item))
+ >>> consume(side_effect(func, range(2)))
+ Received 0
+ Received 1
+
+ Operating on chunks of items:
+
+ >>> pair_sums = []
+ >>> func = lambda chunk: pair_sums.append(sum(chunk))
+ >>> list(side_effect(func, [0, 1, 2, 3, 4, 5], 2))
+ [0, 1, 2, 3, 4, 5]
+ >>> list(pair_sums)
+ [1, 5, 9]
+
+ Writing to a file-like object:
+
+ >>> from io import StringIO
+ >>> from more_itertools import consume
+ >>> f = StringIO()
+ >>> func = lambda x: print(x, file=f)
+ >>> before = lambda: print(u'HEADER', file=f)
+ >>> after = f.close
+ >>> it = [u'a', u'b', u'c']
+ >>> consume(side_effect(func, it, before=before, after=after))
+ >>> f.closed
+ True
+
+ """
+ try:
+ if before is not None:
+ before()
+
+ if chunk_size is None:
+ for item in iterable:
+ func(item)
+ yield item
+ else:
+ for chunk in chunked(iterable, chunk_size):
+ func(chunk)
+ for item in chunk:
+ yield item
+ finally:
+ if after is not None:
+ after()
+
+
+def sliced(seq, n):
+ """Yield slices of length *n* from the sequence *seq*.
+
+ >>> list(sliced((1, 2, 3, 4, 5, 6), 3))
+ [(1, 2, 3), (4, 5, 6)]
+
+ If the length of the sequence is not divisible by the requested slice
+ length, the last slice will be shorter.
+
+ >>> list(sliced((1, 2, 3, 4, 5, 6, 7, 8), 3))
+ [(1, 2, 3), (4, 5, 6), (7, 8)]
+
+ This function will only work for iterables that support slicing.
+ For non-sliceable iterables, see :func:`chunked`.
+
+ """
+ return takewhile(bool, (seq[i: i + n] for i in count(0, n)))
+
+
+def split_at(iterable, pred):
+ """Yield lists of items from *iterable*, where each list is delimited by
+ an item where callable *pred* returns ``True``. The lists do not include
+ the delimiting items.
+
+ >>> list(split_at('abcdcba', lambda x: x == 'b'))
+ [['a'], ['c', 'd', 'c'], ['a']]
+
+ >>> list(split_at(range(10), lambda n: n % 2 == 1))
+ [[0], [2], [4], [6], [8], []]
+ """
+ buf = []
+ for item in iterable:
+ if pred(item):
+ yield buf
+ buf = []
+ else:
+ buf.append(item)
+ yield buf
+
+
+def split_before(iterable, pred):
+ """Yield lists of items from *iterable*, where each list starts with an
+ item where callable *pred* returns ``True``:
+
+ >>> list(split_before('OneTwo', lambda s: s.isupper()))
+ [['O', 'n', 'e'], ['T', 'w', 'o']]
+
+ >>> list(split_before(range(10), lambda n: n % 3 == 0))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]
+
+ """
+ buf = []
+ for item in iterable:
+ if pred(item) and buf:
+ yield buf
+ buf = []
+ buf.append(item)
+ yield buf
+
+
+def split_after(iterable, pred):
+ """Yield lists of items from *iterable*, where each list ends with an
+ item where callable *pred* returns ``True``:
+
+ >>> list(split_after('one1two2', lambda s: s.isdigit()))
+ [['o', 'n', 'e', '1'], ['t', 'w', 'o', '2']]
+
+ >>> list(split_after(range(10), lambda n: n % 3 == 0))
+ [[0], [1, 2, 3], [4, 5, 6], [7, 8, 9]]
+
+ """
+ buf = []
+ for item in iterable:
+ buf.append(item)
+ if pred(item) and buf:
+ yield buf
+ buf = []
+ if buf:
+ yield buf
+
+
+def padded(iterable, fillvalue=None, n=None, next_multiple=False):
+ """Yield the elements from *iterable*, followed by *fillvalue*, such that
+ at least *n* items are emitted.
+
+ >>> list(padded([1, 2, 3], '?', 5))
+ [1, 2, 3, '?', '?']
+
+ If *next_multiple* is ``True``, *fillvalue* will be emitted until the
+ number of items emitted is a multiple of *n*::
+
+ >>> list(padded([1, 2, 3, 4], n=3, next_multiple=True))
+ [1, 2, 3, 4, None, None]
+
+ If *n* is ``None``, *fillvalue* will be emitted indefinitely.
+
+ """
+ it = iter(iterable)
+ if n is None:
+ for item in chain(it, repeat(fillvalue)):
+ yield item
+ elif n < 1:
+ raise ValueError('n must be at least 1')
+ else:
+ item_count = 0
+ for item in it:
+ yield item
+ item_count += 1
+
+ remaining = (n - item_count) % n if next_multiple else n - item_count
+ for _ in range(remaining):
+ yield fillvalue
+
+
+def distribute(n, iterable):
+ """Distribute the items from *iterable* among *n* smaller iterables.
+
+ >>> group_1, group_2 = distribute(2, [1, 2, 3, 4, 5, 6])
+ >>> list(group_1)
+ [1, 3, 5]
+ >>> list(group_2)
+ [2, 4, 6]
+
+ If the length of *iterable* is not evenly divisible by *n*, then the
+ length of the returned iterables will not be identical:
+
+ >>> children = distribute(3, [1, 2, 3, 4, 5, 6, 7])
+ >>> [list(c) for c in children]
+ [[1, 4, 7], [2, 5], [3, 6]]
+
+ If the length of *iterable* is smaller than *n*, then the last returned
+ iterables will be empty:
+
+ >>> children = distribute(5, [1, 2, 3])
+ >>> [list(c) for c in children]
+ [[1], [2], [3], [], []]
+
+ This function uses :func:`itertools.tee` and may require significant
+ storage. If you need the order items in the smaller iterables to match the
+ original iterable, see :func:`divide`.
+
+ """
+ if n < 1:
+ raise ValueError('n must be at least 1')
+
+ children = tee(iterable, n)
+ return [islice(it, index, None, n) for index, it in enumerate(children)]
+
+
+def stagger(iterable, offsets=(-1, 0, 1), longest=False, fillvalue=None):
+ """Yield tuples whose elements are offset from *iterable*.
+ The amount by which the `i`-th item in each tuple is offset is given by
+ the `i`-th item in *offsets*.
+
+ >>> list(stagger([0, 1, 2, 3]))
+ [(None, 0, 1), (0, 1, 2), (1, 2, 3)]
+ >>> list(stagger(range(8), offsets=(0, 2, 4)))
+ [(0, 2, 4), (1, 3, 5), (2, 4, 6), (3, 5, 7)]
+
+ By default, the sequence will end when the final element of a tuple is the
+ last item in the iterable. To continue until the first element of a tuple
+ is the last item in the iterable, set *longest* to ``True``::
+
+ >>> list(stagger([0, 1, 2, 3], longest=True))
+ [(None, 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, None), (3, None, None)]
+
+ By default, ``None`` will be used to replace offsets beyond the end of the
+ sequence. Specify *fillvalue* to use some other value.
+
+ """
+ children = tee(iterable, len(offsets))
+
+ return zip_offset(
+ *children, offsets=offsets, longest=longest, fillvalue=fillvalue
+ )
+
+
+def zip_offset(*iterables, **kwargs):
+ """``zip`` the input *iterables* together, but offset the `i`-th iterable
+ by the `i`-th item in *offsets*.
+
+ >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1)))
+ [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e')]
+
+ This can be used as a lightweight alternative to SciPy or pandas to analyze
+ data sets in which somes series have a lead or lag relationship.
+
+ By default, the sequence will end when the shortest iterable is exhausted.
+ To continue until the longest iterable is exhausted, set *longest* to
+ ``True``.
+
+ >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1), longest=True))
+ [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e'), (None, 'f')]
+
+ By default, ``None`` will be used to replace offsets beyond the end of the
+ sequence. Specify *fillvalue* to use some other value.
+
+ """
+ offsets = kwargs['offsets']
+ longest = kwargs.get('longest', False)
+ fillvalue = kwargs.get('fillvalue', None)
+
+ if len(iterables) != len(offsets):
+ raise ValueError("Number of iterables and offsets didn't match")
+
+ staggered = []
+ for it, n in zip(iterables, offsets):
+ if n < 0:
+ staggered.append(chain(repeat(fillvalue, -n), it))
+ elif n > 0:
+ staggered.append(islice(it, n, None))
+ else:
+ staggered.append(it)
+
+ if longest:
+ return zip_longest(*staggered, fillvalue=fillvalue)
+
+ return zip(*staggered)
+
+
+def sort_together(iterables, key_list=(0,), reverse=False):
+ """Return the input iterables sorted together, with *key_list* as the
+ priority for sorting. All iterables are trimmed to the length of the
+ shortest one.
+
+ This can be used like the sorting function in a spreadsheet. If each
+ iterable represents a column of data, the key list determines which
+ columns are used for sorting.
+
+ By default, all iterables are sorted using the ``0``-th iterable::
+
+ >>> iterables = [(4, 3, 2, 1), ('a', 'b', 'c', 'd')]
+ >>> sort_together(iterables)
+ [(1, 2, 3, 4), ('d', 'c', 'b', 'a')]
+
+ Set a different key list to sort according to another iterable.
+ Specifying mutliple keys dictates how ties are broken::
+
+ >>> iterables = [(3, 1, 2), (0, 1, 0), ('c', 'b', 'a')]
+ >>> sort_together(iterables, key_list=(1, 2))
+ [(2, 3, 1), (0, 0, 1), ('a', 'c', 'b')]
+
+ Set *reverse* to ``True`` to sort in descending order.
+
+ >>> sort_together([(1, 2, 3), ('c', 'b', 'a')], reverse=True)
+ [(3, 2, 1), ('a', 'b', 'c')]
+
+ """
+ return list(zip(*sorted(zip(*iterables),
+ key=itemgetter(*key_list),
+ reverse=reverse)))
+
+
+def divide(n, iterable):
+ """Divide the elements from *iterable* into *n* parts, maintaining
+ order.
+
+ >>> group_1, group_2 = divide(2, [1, 2, 3, 4, 5, 6])
+ >>> list(group_1)
+ [1, 2, 3]
+ >>> list(group_2)
+ [4, 5, 6]
+
+ If the length of *iterable* is not evenly divisible by *n*, then the
+ length of the returned iterables will not be identical:
+
+ >>> children = divide(3, [1, 2, 3, 4, 5, 6, 7])
+ >>> [list(c) for c in children]
+ [[1, 2, 3], [4, 5], [6, 7]]
+
+ If the length of the iterable is smaller than n, then the last returned
+ iterables will be empty:
+
+ >>> children = divide(5, [1, 2, 3])
+ >>> [list(c) for c in children]
+ [[1], [2], [3], [], []]
+
+ This function will exhaust the iterable before returning and may require
+ significant storage. If order is not important, see :func:`distribute`,
+ which does not first pull the iterable into memory.
+
+ """
+ if n < 1:
+ raise ValueError('n must be at least 1')
+
+ seq = tuple(iterable)
+ q, r = divmod(len(seq), n)
+
+ ret = []
+ for i in range(n):
+ start = (i * q) + (i if i < r else r)
+ stop = ((i + 1) * q) + (i + 1 if i + 1 < r else r)
+ ret.append(iter(seq[start:stop]))
+
+ return ret
+
+
+def always_iterable(obj, base_type=(text_type, binary_type)):
+ """If *obj* is iterable, return an iterator over its items::
+
+ >>> obj = (1, 2, 3)
+ >>> list(always_iterable(obj))
+ [1, 2, 3]
+
+ If *obj* is not iterable, return a one-item iterable containing *obj*::
+
+ >>> obj = 1
+ >>> list(always_iterable(obj))
+ [1]
+
+ If *obj* is ``None``, return an empty iterable:
+
+ >>> obj = None
+ >>> list(always_iterable(None))
+ []
+
+ By default, binary and text strings are not considered iterable::
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj))
+ ['foo']
+
+ If *base_type* is set, objects for which ``isinstance(obj, base_type)``
+ returns ``True`` won't be considered iterable.
+
+ >>> obj = {'a': 1}
+ >>> list(always_iterable(obj)) # Iterate over the dict's keys
+ ['a']
+ >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit
+ [{'a': 1}]
+
+ Set *base_type* to ``None`` to avoid any special handling and treat objects
+ Python considers iterable as iterable:
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj, base_type=None))
+ ['f', 'o', 'o']
+ """
+ if obj is None:
+ return iter(())
+
+ if (base_type is not None) and isinstance(obj, base_type):
+ return iter((obj,))
+
+ try:
+ return iter(obj)
+ except TypeError:
+ return iter((obj,))
+
+
+def adjacent(predicate, iterable, distance=1):
+ """Return an iterable over `(bool, item)` tuples where the `item` is
+ drawn from *iterable* and the `bool` indicates whether
+ that item satisfies the *predicate* or is adjacent to an item that does.
+
+ For example, to find whether items are adjacent to a ``3``::
+
+ >>> list(adjacent(lambda x: x == 3, range(6)))
+ [(False, 0), (False, 1), (True, 2), (True, 3), (True, 4), (False, 5)]
+
+ Set *distance* to change what counts as adjacent. For example, to find
+ whether items are two places away from a ``3``:
+
+ >>> list(adjacent(lambda x: x == 3, range(6), distance=2))
+ [(False, 0), (True, 1), (True, 2), (True, 3), (True, 4), (True, 5)]
+
+ This is useful for contextualizing the results of a search function.
+ For example, a code comparison tool might want to identify lines that
+ have changed, but also surrounding lines to give the viewer of the diff
+ context.
+
+ The predicate function will only be called once for each item in the
+ iterable.
+
+ See also :func:`groupby_transform`, which can be used with this function
+ to group ranges of items with the same `bool` value.
+
+ """
+ # Allow distance=0 mainly for testing that it reproduces results with map()
+ if distance < 0:
+ raise ValueError('distance must be at least 0')
+
+ i1, i2 = tee(iterable)
+ padding = [False] * distance
+ selected = chain(padding, map(predicate, i1), padding)
+ adjacent_to_selected = map(any, windowed(selected, 2 * distance + 1))
+ return zip(adjacent_to_selected, i2)
+
+
+def groupby_transform(iterable, keyfunc=None, valuefunc=None):
+ """An extension of :func:`itertools.groupby` that transforms the values of
+ *iterable* after grouping them.
+ *keyfunc* is a function used to compute a grouping key for each item.
+ *valuefunc* is a function for transforming the items after grouping.
+
+ >>> iterable = 'AaaABbBCcA'
+ >>> keyfunc = lambda x: x.upper()
+ >>> valuefunc = lambda x: x.lower()
+ >>> grouper = groupby_transform(iterable, keyfunc, valuefunc)
+ >>> [(k, ''.join(g)) for k, g in grouper]
+ [('A', 'aaaa'), ('B', 'bbb'), ('C', 'cc'), ('A', 'a')]
+
+ *keyfunc* and *valuefunc* default to identity functions if they are not
+ specified.
+
+ :func:`groupby_transform` is useful when grouping elements of an iterable
+ using a separate iterable as the key. To do this, :func:`zip` the iterables
+ and pass a *keyfunc* that extracts the first element and a *valuefunc*
+ that extracts the second element::
+
+ >>> from operator import itemgetter
+ >>> keys = [0, 0, 1, 1, 1, 2, 2, 2, 3]
+ >>> values = 'abcdefghi'
+ >>> iterable = zip(keys, values)
+ >>> grouper = groupby_transform(iterable, itemgetter(0), itemgetter(1))
+ >>> [(k, ''.join(g)) for k, g in grouper]
+ [(0, 'ab'), (1, 'cde'), (2, 'fgh'), (3, 'i')]
+
+ Note that the order of items in the iterable is significant.
+ Only adjacent items are grouped together, so if you don't want any
+ duplicate groups, you should sort the iterable by the key function.
+
+ """
+ valuefunc = (lambda x: x) if valuefunc is None else valuefunc
+ return ((k, map(valuefunc, g)) for k, g in groupby(iterable, keyfunc))
+
+
+def numeric_range(*args):
+ """An extension of the built-in ``range()`` function whose arguments can
+ be any orderable numeric type.
+
+ With only *stop* specified, *start* defaults to ``0`` and *step*
+ defaults to ``1``. The output items will match the type of *stop*:
+
+ >>> list(numeric_range(3.5))
+ [0.0, 1.0, 2.0, 3.0]
+
+ With only *start* and *stop* specified, *step* defaults to ``1``. The
+ output items will match the type of *start*:
+
+ >>> from decimal import Decimal
+ >>> start = Decimal('2.1')
+ >>> stop = Decimal('5.1')
+ >>> list(numeric_range(start, stop))
+ [Decimal('2.1'), Decimal('3.1'), Decimal('4.1')]
+
+ With *start*, *stop*, and *step* specified the output items will match
+ the type of ``start + step``:
+
+ >>> from fractions import Fraction
+ >>> start = Fraction(1, 2) # Start at 1/2
+ >>> stop = Fraction(5, 2) # End at 5/2
+ >>> step = Fraction(1, 2) # Count by 1/2
+ >>> list(numeric_range(start, stop, step))
+ [Fraction(1, 2), Fraction(1, 1), Fraction(3, 2), Fraction(2, 1)]
+
+ If *step* is zero, ``ValueError`` is raised. Negative steps are supported:
+
+ >>> list(numeric_range(3, -1, -1.0))
+ [3.0, 2.0, 1.0, 0.0]
+
+ Be aware of the limitations of floating point numbers; the representation
+ of the yielded numbers may be surprising.
+
+ """
+ argc = len(args)
+ if argc == 1:
+ stop, = args
+ start = type(stop)(0)
+ step = 1
+ elif argc == 2:
+ start, stop = args
+ step = 1
+ elif argc == 3:
+ start, stop, step = args
+ else:
+ err_msg = 'numeric_range takes at most 3 arguments, got {}'
+ raise TypeError(err_msg.format(argc))
+
+ values = (start + (step * n) for n in count())
+ if step > 0:
+ return takewhile(partial(gt, stop), values)
+ elif step < 0:
+ return takewhile(partial(lt, stop), values)
+ else:
+ raise ValueError('numeric_range arg 3 must not be zero')
+
+
+def count_cycle(iterable, n=None):
+ """Cycle through the items from *iterable* up to *n* times, yielding
+ the number of completed cycles along with each item. If *n* is omitted the
+ process repeats indefinitely.
+
+ >>> list(count_cycle('AB', 3))
+ [(0, 'A'), (0, 'B'), (1, 'A'), (1, 'B'), (2, 'A'), (2, 'B')]
+
+ """
+ iterable = tuple(iterable)
+ if not iterable:
+ return iter(())
+ counter = count() if n is None else range(n)
+ return ((i, item) for i in counter for item in iterable)
+
+
+def locate(iterable, pred=bool, window_size=None):
+ """Yield the index of each item in *iterable* for which *pred* returns
+ ``True``.
+
+ *pred* defaults to :func:`bool`, which will select truthy items:
+
+ >>> list(locate([0, 1, 1, 0, 1, 0, 0]))
+ [1, 2, 4]
+
+ Set *pred* to a custom function to, e.g., find the indexes for a particular
+ item.
+
+ >>> list(locate(['a', 'b', 'c', 'b'], lambda x: x == 'b'))
+ [1, 3]
+
+ If *window_size* is given, then the *pred* function will be called with
+ that many items. This enables searching for sub-sequences:
+
+ >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3]
+ >>> pred = lambda *args: args == (1, 2, 3)
+ >>> list(locate(iterable, pred=pred, window_size=3))
+ [1, 5, 9]
+
+ Use with :func:`seekable` to find indexes and then retrieve the associated
+ items:
+
+ >>> from itertools import count
+ >>> from more_itertools import seekable
+ >>> source = (3 * n + 1 if (n % 2) else n // 2 for n in count())
+ >>> it = seekable(source)
+ >>> pred = lambda x: x > 100
+ >>> indexes = locate(it, pred=pred)
+ >>> i = next(indexes)
+ >>> it.seek(i)
+ >>> next(it)
+ 106
+
+ """
+ if window_size is None:
+ return compress(count(), map(pred, iterable))
+
+ if window_size < 1:
+ raise ValueError('window size must be at least 1')
+
+ it = windowed(iterable, window_size, fillvalue=_marker)
+ return compress(count(), starmap(pred, it))
+
+
+def lstrip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the beginning
+ for which *pred* returns ``True``.
+
+ For example, to remove a set of items from the start of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(lstrip(iterable, pred))
+ [1, 2, None, 3, False, None]
+
+ This function is analogous to to :func:`str.lstrip`, and is essentially
+ an wrapper for :func:`itertools.dropwhile`.
+
+ """
+ return dropwhile(pred, iterable)
+
+
+def rstrip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the end
+ for which *pred* returns ``True``.
+
+ For example, to remove a set of items from the end of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(rstrip(iterable, pred))
+ [None, False, None, 1, 2, None, 3]
+
+ This function is analogous to :func:`str.rstrip`.
+
+ """
+ cache = []
+ cache_append = cache.append
+ for x in iterable:
+ if pred(x):
+ cache_append(x)
+ else:
+ for y in cache:
+ yield y
+ del cache[:]
+ yield x
+
+
+def strip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the
+ beginning and end for which *pred* returns ``True``.
+
+ For example, to remove a set of items from both ends of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(strip(iterable, pred))
+ [1, 2, None, 3]
+
+ This function is analogous to :func:`str.strip`.
+
+ """
+ return rstrip(lstrip(iterable, pred), pred)
+
+
+def islice_extended(iterable, *args):
+ """An extension of :func:`itertools.islice` that supports negative values
+ for *stop*, *start*, and *step*.
+
+ >>> iterable = iter('abcdefgh')
+ >>> list(islice_extended(iterable, -4, -1))
+ ['e', 'f', 'g']
+
+ Slices with negative values require some caching of *iterable*, but this
+ function takes care to minimize the amount of memory required.
+
+ For example, you can use a negative step with an infinite iterator:
+
+ >>> from itertools import count
+ >>> list(islice_extended(count(), 110, 99, -2))
+ [110, 108, 106, 104, 102, 100]
+
+ """
+ s = slice(*args)
+ start = s.start
+ stop = s.stop
+ if s.step == 0:
+ raise ValueError('step argument must be a non-zero integer or None.')
+ step = s.step or 1
+
+ it = iter(iterable)
+
+ if step > 0:
+ start = 0 if (start is None) else start
+
+ if (start < 0):
+ # Consume all but the last -start items
+ cache = deque(enumerate(it, 1), maxlen=-start)
+ len_iter = cache[-1][0] if cache else 0
+
+ # Adjust start to be positive
+ i = max(len_iter + start, 0)
+
+ # Adjust stop to be positive
+ if stop is None:
+ j = len_iter
+ elif stop >= 0:
+ j = min(stop, len_iter)
+ else:
+ j = max(len_iter + stop, 0)
+
+ # Slice the cache
+ n = j - i
+ if n <= 0:
+ return
+
+ for index, item in islice(cache, 0, n, step):
+ yield item
+ elif (stop is not None) and (stop < 0):
+ # Advance to the start position
+ next(islice(it, start, start), None)
+
+ # When stop is negative, we have to carry -stop items while
+ # iterating
+ cache = deque(islice(it, -stop), maxlen=-stop)
+
+ for index, item in enumerate(it):
+ cached_item = cache.popleft()
+ if index % step == 0:
+ yield cached_item
+ cache.append(item)
+ else:
+ # When both start and stop are positive we have the normal case
+ for item in islice(it, start, stop, step):
+ yield item
+ else:
+ start = -1 if (start is None) else start
+
+ if (stop is not None) and (stop < 0):
+ # Consume all but the last items
+ n = -stop - 1
+ cache = deque(enumerate(it, 1), maxlen=n)
+ len_iter = cache[-1][0] if cache else 0
+
+ # If start and stop are both negative they are comparable and
+ # we can just slice. Otherwise we can adjust start to be negative
+ # and then slice.
+ if start < 0:
+ i, j = start, stop
+ else:
+ i, j = min(start - len_iter, -1), None
+
+ for index, item in list(cache)[i:j:step]:
+ yield item
+ else:
+ # Advance to the stop position
+ if stop is not None:
+ m = stop + 1
+ next(islice(it, m, m), None)
+
+ # stop is positive, so if start is negative they are not comparable
+ # and we need the rest of the items.
+ if start < 0:
+ i = start
+ n = None
+ # stop is None and start is positive, so we just need items up to
+ # the start index.
+ elif stop is None:
+ i = None
+ n = start + 1
+ # Both stop and start are positive, so they are comparable.
+ else:
+ i = None
+ n = start - stop
+ if n <= 0:
+ return
+
+ cache = list(islice(it, n))
+
+ for item in cache[i::step]:
+ yield item
+
+
+def always_reversible(iterable):
+ """An extension of :func:`reversed` that supports all iterables, not
+ just those which implement the ``Reversible`` or ``Sequence`` protocols.
+
+ >>> print(*always_reversible(x for x in range(3)))
+ 2 1 0
+
+ If the iterable is already reversible, this function returns the
+ result of :func:`reversed()`. If the iterable is not reversible,
+ this function will cache the remaining items in the iterable and
+ yield them in reverse order, which may require significant storage.
+ """
+ try:
+ return reversed(iterable)
+ except TypeError:
+ return reversed(list(iterable))
+
+
+def consecutive_groups(iterable, ordering=lambda x: x):
+ """Yield groups of consecutive items using :func:`itertools.groupby`.
+ The *ordering* function determines whether two items are adjacent by
+ returning their position.
+
+ By default, the ordering function is the identity function. This is
+ suitable for finding runs of numbers:
+
+ >>> iterable = [1, 10, 11, 12, 20, 30, 31, 32, 33, 40]
+ >>> for group in consecutive_groups(iterable):
+ ... print(list(group))
+ [1]
+ [10, 11, 12]
+ [20]
+ [30, 31, 32, 33]
+ [40]
+
+ For finding runs of adjacent letters, try using the :meth:`index` method
+ of a string of letters:
+
+ >>> from string import ascii_lowercase
+ >>> iterable = 'abcdfgilmnop'
+ >>> ordering = ascii_lowercase.index
+ >>> for group in consecutive_groups(iterable, ordering):
+ ... print(list(group))
+ ['a', 'b', 'c', 'd']
+ ['f', 'g']
+ ['i']
+ ['l', 'm', 'n', 'o', 'p']
+
+ """
+ for k, g in groupby(
+ enumerate(iterable), key=lambda x: x[0] - ordering(x[1])
+ ):
+ yield map(itemgetter(1), g)
+
+
+def difference(iterable, func=sub):
+ """By default, compute the first difference of *iterable* using
+ :func:`operator.sub`.
+
+ >>> iterable = [0, 1, 3, 6, 10]
+ >>> list(difference(iterable))
+ [0, 1, 2, 3, 4]
+
+ This is the opposite of :func:`accumulate`'s default behavior:
+
+ >>> from more_itertools import accumulate
+ >>> iterable = [0, 1, 2, 3, 4]
+ >>> list(accumulate(iterable))
+ [0, 1, 3, 6, 10]
+ >>> list(difference(accumulate(iterable)))
+ [0, 1, 2, 3, 4]
+
+ By default *func* is :func:`operator.sub`, but other functions can be
+ specified. They will be applied as follows::
+
+ A, B, C, D, ... --> A, func(B, A), func(C, B), func(D, C), ...
+
+ For example, to do progressive division:
+
+ >>> iterable = [1, 2, 6, 24, 120] # Factorial sequence
+ >>> func = lambda x, y: x // y
+ >>> list(difference(iterable, func))
+ [1, 2, 3, 4, 5]
+
+ """
+ a, b = tee(iterable)
+ try:
+ item = next(b)
+ except StopIteration:
+ return iter([])
+ return chain([item], map(lambda x: func(x[1], x[0]), zip(a, b)))
+
+
+class SequenceView(Sequence):
+ """Return a read-only view of the sequence object *target*.
+
+ :class:`SequenceView` objects are analagous to Python's built-in
+ "dictionary view" types. They provide a dynamic view of a sequence's items,
+ meaning that when the sequence updates, so does the view.
+
+ >>> seq = ['0', '1', '2']
+ >>> view = SequenceView(seq)
+ >>> view
+ SequenceView(['0', '1', '2'])
+ >>> seq.append('3')
+ >>> view
+ SequenceView(['0', '1', '2', '3'])
+
+ Sequence views support indexing, slicing, and length queries. They act
+ like the underlying sequence, except they don't allow assignment:
+
+ >>> view[1]
+ '1'
+ >>> view[1:-1]
+ ['1', '2']
+ >>> len(view)
+ 4
+
+ Sequence views are useful as an alternative to copying, as they don't
+ require (much) extra storage.
+
+ """
+ def __init__(self, target):
+ if not isinstance(target, Sequence):
+ raise TypeError
+ self._target = target
+
+ def __getitem__(self, index):
+ return self._target[index]
+
+ def __len__(self):
+ return len(self._target)
+
+ def __repr__(self):
+ return '{}({})'.format(self.__class__.__name__, repr(self._target))
+
+
+class seekable(object):
+ """Wrap an iterator to allow for seeking backward and forward. This
+ progressively caches the items in the source iterable so they can be
+ re-visited.
+
+ Call :meth:`seek` with an index to seek to that position in the source
+ iterable.
+
+ To "reset" an iterator, seek to ``0``:
+
+ >>> from itertools import count
+ >>> it = seekable((str(n) for n in count()))
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> it.seek(0)
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> next(it)
+ '3'
+
+ You can also seek forward:
+
+ >>> it = seekable((str(n) for n in range(20)))
+ >>> it.seek(10)
+ >>> next(it)
+ '10'
+ >>> it.seek(20) # Seeking past the end of the source isn't a problem
+ >>> list(it)
+ []
+ >>> it.seek(0) # Resetting works even after hitting the end
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+
+ The cache grows as the source iterable progresses, so beware of wrapping
+ very large or infinite iterables.
+
+ You may view the contents of the cache with the :meth:`elements` method.
+ That returns a :class:`SequenceView`, a view that updates automatically:
+
+ >>> it = seekable((str(n) for n in range(10)))
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> elements = it.elements()
+ >>> elements
+ SequenceView(['0', '1', '2'])
+ >>> next(it)
+ '3'
+ >>> elements
+ SequenceView(['0', '1', '2', '3'])
+
+ """
+
+ def __init__(self, iterable):
+ self._source = iter(iterable)
+ self._cache = []
+ self._index = None
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ if self._index is not None:
+ try:
+ item = self._cache[self._index]
+ except IndexError:
+ self._index = None
+ else:
+ self._index += 1
+ return item
+
+ item = next(self._source)
+ self._cache.append(item)
+ return item
+
+ next = __next__
+
+ def elements(self):
+ return SequenceView(self._cache)
+
+ def seek(self, index):
+ self._index = index
+ remainder = index - len(self._cache)
+ if remainder > 0:
+ consume(self, remainder)
+
+
+class run_length(object):
+ """
+ :func:`run_length.encode` compresses an iterable with run-length encoding.
+ It yields groups of repeated items with the count of how many times they
+ were repeated:
+
+ >>> uncompressed = 'abbcccdddd'
+ >>> list(run_length.encode(uncompressed))
+ [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+
+ :func:`run_length.decode` decompresses an iterable that was previously
+ compressed with run-length encoding. It yields the items of the
+ decompressed iterable:
+
+ >>> compressed = [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+ >>> list(run_length.decode(compressed))
+ ['a', 'b', 'b', 'c', 'c', 'c', 'd', 'd', 'd', 'd']
+
+ """
+
+ @staticmethod
+ def encode(iterable):
+ return ((k, ilen(g)) for k, g in groupby(iterable))
+
+ @staticmethod
+ def decode(iterable):
+ return chain.from_iterable(repeat(k, n) for k, n in iterable)
+
+
+def exactly_n(iterable, n, predicate=bool):
+ """Return ``True`` if exactly ``n`` items in the iterable are ``True``
+ according to the *predicate* function.
+
+ >>> exactly_n([True, True, False], 2)
+ True
+ >>> exactly_n([True, True, False], 1)
+ False
+ >>> exactly_n([0, 1, 2, 3, 4, 5], 3, lambda x: x < 3)
+ True
+
+ The iterable will be advanced until ``n + 1`` truthy items are encountered,
+ so avoid calling it on infinite iterables.
+
+ """
+ return len(take(n + 1, filter(predicate, iterable))) == n
+
+
+def circular_shifts(iterable):
+ """Return a list of circular shifts of *iterable*.
+
+ >>> circular_shifts(range(4))
+ [(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)]
+ """
+ lst = list(iterable)
+ return take(len(lst), windowed(cycle(lst), len(lst)))
+
+
+def make_decorator(wrapping_func, result_index=0):
+ """Return a decorator version of *wrapping_func*, which is a function that
+ modifies an iterable. *result_index* is the position in that function's
+ signature where the iterable goes.
+
+ This lets you use itertools on the "production end," i.e. at function
+ definition. This can augment what the function returns without changing the
+ function's code.
+
+ For example, to produce a decorator version of :func:`chunked`:
+
+ >>> from more_itertools import chunked
+ >>> chunker = make_decorator(chunked, result_index=0)
+ >>> @chunker(3)
+ ... def iter_range(n):
+ ... return iter(range(n))
+ ...
+ >>> list(iter_range(9))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+
+ To only allow truthy items to be returned:
+
+ >>> truth_serum = make_decorator(filter, result_index=1)
+ >>> @truth_serum(bool)
+ ... def boolean_test():
+ ... return [0, 1, '', ' ', False, True]
+ ...
+ >>> list(boolean_test())
+ [1, ' ', True]
+
+ The :func:`peekable` and :func:`seekable` wrappers make for practical
+ decorators:
+
+ >>> from more_itertools import peekable
+ >>> peekable_function = make_decorator(peekable)
+ >>> @peekable_function()
+ ... def str_range(*args):
+ ... return (str(x) for x in range(*args))
+ ...
+ >>> it = str_range(1, 20, 2)
+ >>> next(it), next(it), next(it)
+ ('1', '3', '5')
+ >>> it.peek()
+ '7'
+ >>> next(it)
+ '7'
+
+ """
+ # See https://sites.google.com/site/bbayles/index/decorator_factory for
+ # notes on how this works.
+ def decorator(*wrapping_args, **wrapping_kwargs):
+ def outer_wrapper(f):
+ def inner_wrapper(*args, **kwargs):
+ result = f(*args, **kwargs)
+ wrapping_args_ = list(wrapping_args)
+ wrapping_args_.insert(result_index, result)
+ return wrapping_func(*wrapping_args_, **wrapping_kwargs)
+
+ return inner_wrapper
+
+ return outer_wrapper
+
+ return decorator
+
+
+def map_reduce(iterable, keyfunc, valuefunc=None, reducefunc=None):
+ """Return a dictionary that maps the items in *iterable* to categories
+ defined by *keyfunc*, transforms them with *valuefunc*, and
+ then summarizes them by category with *reducefunc*.
+
+ *valuefunc* defaults to the identity function if it is unspecified.
+ If *reducefunc* is unspecified, no summarization takes place:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> result = map_reduce('abbccc', keyfunc)
+ >>> sorted(result.items())
+ [('A', ['a']), ('B', ['b', 'b']), ('C', ['c', 'c', 'c'])]
+
+ Specifying *valuefunc* transforms the categorized items:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> valuefunc = lambda x: 1
+ >>> result = map_reduce('abbccc', keyfunc, valuefunc)
+ >>> sorted(result.items())
+ [('A', [1]), ('B', [1, 1]), ('C', [1, 1, 1])]
+
+ Specifying *reducefunc* summarizes the categorized items:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> valuefunc = lambda x: 1
+ >>> reducefunc = sum
+ >>> result = map_reduce('abbccc', keyfunc, valuefunc, reducefunc)
+ >>> sorted(result.items())
+ [('A', 1), ('B', 2), ('C', 3)]
+
+ You may want to filter the input iterable before applying the map/reduce
+ procedure:
+
+ >>> all_items = range(30)
+ >>> items = [x for x in all_items if 10 <= x <= 20] # Filter
+ >>> keyfunc = lambda x: x % 2 # Evens map to 0; odds to 1
+ >>> categories = map_reduce(items, keyfunc=keyfunc)
+ >>> sorted(categories.items())
+ [(0, [10, 12, 14, 16, 18, 20]), (1, [11, 13, 15, 17, 19])]
+ >>> summaries = map_reduce(items, keyfunc=keyfunc, reducefunc=sum)
+ >>> sorted(summaries.items())
+ [(0, 90), (1, 75)]
+
+ Note that all items in the iterable are gathered into a list before the
+ summarization step, which may require significant storage.
+
+ The returned object is a :obj:`collections.defaultdict` with the
+ ``default_factory`` set to ``None``, such that it behaves like a normal
+ dictionary.
+
+ """
+ valuefunc = (lambda x: x) if (valuefunc is None) else valuefunc
+
+ ret = defaultdict(list)
+ for item in iterable:
+ key = keyfunc(item)
+ value = valuefunc(item)
+ ret[key].append(value)
+
+ if reducefunc is not None:
+ for key, value_list in ret.items():
+ ret[key] = reducefunc(value_list)
+
+ ret.default_factory = None
+ return ret
+
+
+def rlocate(iterable, pred=bool, window_size=None):
+ """Yield the index of each item in *iterable* for which *pred* returns
+ ``True``, starting from the right and moving left.
+
+ *pred* defaults to :func:`bool`, which will select truthy items:
+
+ >>> list(rlocate([0, 1, 1, 0, 1, 0, 0])) # Truthy at 1, 2, and 4
+ [4, 2, 1]
+
+ Set *pred* to a custom function to, e.g., find the indexes for a particular
+ item:
+
+ >>> iterable = iter('abcb')
+ >>> pred = lambda x: x == 'b'
+ >>> list(rlocate(iterable, pred))
+ [3, 1]
+
+ If *window_size* is given, then the *pred* function will be called with
+ that many items. This enables searching for sub-sequences:
+
+ >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3]
+ >>> pred = lambda *args: args == (1, 2, 3)
+ >>> list(rlocate(iterable, pred=pred, window_size=3))
+ [9, 5, 1]
+
+ Beware, this function won't return anything for infinite iterables.
+ If *iterable* is reversible, ``rlocate`` will reverse it and search from
+ the right. Otherwise, it will search from the left and return the results
+ in reverse order.
+
+ See :func:`locate` to for other example applications.
+
+ """
+ if window_size is None:
+ try:
+ len_iter = len(iterable)
+ return (
+ len_iter - i - 1 for i in locate(reversed(iterable), pred)
+ )
+ except TypeError:
+ pass
+
+ return reversed(list(locate(iterable, pred, window_size)))
+
+
+def replace(iterable, pred, substitutes, count=None, window_size=1):
+ """Yield the items from *iterable*, replacing the items for which *pred*
+ returns ``True`` with the items from the iterable *substitutes*.
+
+ >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1]
+ >>> pred = lambda x: x == 0
+ >>> substitutes = (2, 3)
+ >>> list(replace(iterable, pred, substitutes))
+ [1, 1, 2, 3, 1, 1, 2, 3, 1, 1]
+
+ If *count* is given, the number of replacements will be limited:
+
+ >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1, 0]
+ >>> pred = lambda x: x == 0
+ >>> substitutes = [None]
+ >>> list(replace(iterable, pred, substitutes, count=2))
+ [1, 1, None, 1, 1, None, 1, 1, 0]
+
+ Use *window_size* to control the number of items passed as arguments to
+ *pred*. This allows for locating and replacing subsequences.
+
+ >>> iterable = [0, 1, 2, 5, 0, 1, 2, 5]
+ >>> window_size = 3
+ >>> pred = lambda *args: args == (0, 1, 2) # 3 items passed to pred
+ >>> substitutes = [3, 4] # Splice in these items
+ >>> list(replace(iterable, pred, substitutes, window_size=window_size))
+ [3, 4, 5, 3, 4, 5]
+
+ """
+ if window_size < 1:
+ raise ValueError('window_size must be at least 1')
+
+ # Save the substitutes iterable, since it's used more than once
+ substitutes = tuple(substitutes)
+
+ # Add padding such that the number of windows matches the length of the
+ # iterable
+ it = chain(iterable, [_marker] * (window_size - 1))
+ windows = windowed(it, window_size)
+
+ n = 0
+ for w in windows:
+ # If the current window matches our predicate (and we haven't hit
+ # our maximum number of replacements), splice in the substitutes
+ # and then consume the following windows that overlap with this one.
+ # For example, if the iterable is (0, 1, 2, 3, 4...)
+ # and the window size is 2, we have (0, 1), (1, 2), (2, 3)...
+ # If the predicate matches on (0, 1), we need to zap (0, 1) and (1, 2)
+ if pred(*w):
+ if (count is None) or (n < count):
+ n += 1
+ for s in substitutes:
+ yield s
+ consume(windows, window_size - 1)
+ continue
+
+ # If there was no match (or we've reached the replacement limit),
+ # yield the first item from the window.
+ if w and (w[0] is not _marker):
+ yield w[0]
diff --git a/third_party/python/more-itertools/more_itertools/recipes.py b/third_party/python/more-itertools/more_itertools/recipes.py
new file mode 100644
index 0000000000..3a7706cb91
--- /dev/null
+++ b/third_party/python/more-itertools/more_itertools/recipes.py
@@ -0,0 +1,565 @@
+"""Imported from the recipes section of the itertools documentation.
+
+All functions taken from the recipes section of the itertools library docs
+[1]_.
+Some backward-compatible usability improvements have been made.
+
+.. [1] http://docs.python.org/library/itertools.html#recipes
+
+"""
+from collections import deque
+from itertools import (
+ chain, combinations, count, cycle, groupby, islice, repeat, starmap, tee
+)
+import operator
+from random import randrange, sample, choice
+
+from six import PY2
+from six.moves import filter, filterfalse, map, range, zip, zip_longest
+
+__all__ = [
+ 'accumulate',
+ 'all_equal',
+ 'consume',
+ 'dotproduct',
+ 'first_true',
+ 'flatten',
+ 'grouper',
+ 'iter_except',
+ 'ncycles',
+ 'nth',
+ 'nth_combination',
+ 'padnone',
+ 'pairwise',
+ 'partition',
+ 'powerset',
+ 'prepend',
+ 'quantify',
+ 'random_combination_with_replacement',
+ 'random_combination',
+ 'random_permutation',
+ 'random_product',
+ 'repeatfunc',
+ 'roundrobin',
+ 'tabulate',
+ 'tail',
+ 'take',
+ 'unique_everseen',
+ 'unique_justseen',
+]
+
+
+def accumulate(iterable, func=operator.add):
+ """
+ Return an iterator whose items are the accumulated results of a function
+ (specified by the optional *func* argument) that takes two arguments.
+ By default, returns accumulated sums with :func:`operator.add`.
+
+ >>> list(accumulate([1, 2, 3, 4, 5])) # Running sum
+ [1, 3, 6, 10, 15]
+ >>> list(accumulate([1, 2, 3], func=operator.mul)) # Running product
+ [1, 2, 6]
+ >>> list(accumulate([0, 1, -1, 2, 3, 2], func=max)) # Running maximum
+ [0, 1, 1, 2, 3, 3]
+
+ This function is available in the ``itertools`` module for Python 3.2 and
+ greater.
+
+ """
+ it = iter(iterable)
+ try:
+ total = next(it)
+ except StopIteration:
+ return
+ else:
+ yield total
+
+ for element in it:
+ total = func(total, element)
+ yield total
+
+
+def take(n, iterable):
+ """Return first *n* items of the iterable as a list.
+
+ >>> take(3, range(10))
+ [0, 1, 2]
+ >>> take(5, range(3))
+ [0, 1, 2]
+
+ Effectively a short replacement for ``next`` based iterator consumption
+ when you want more than one item, but less than the whole iterator.
+
+ """
+ return list(islice(iterable, n))
+
+
+def tabulate(function, start=0):
+ """Return an iterator over the results of ``func(start)``,
+ ``func(start + 1)``, ``func(start + 2)``...
+
+ *func* should be a function that accepts one integer argument.
+
+ If *start* is not specified it defaults to 0. It will be incremented each
+ time the iterator is advanced.
+
+ >>> square = lambda x: x ** 2
+ >>> iterator = tabulate(square, -3)
+ >>> take(4, iterator)
+ [9, 4, 1, 0]
+
+ """
+ return map(function, count(start))
+
+
+def tail(n, iterable):
+ """Return an iterator over the last *n* items of *iterable*.
+
+ >>> t = tail(3, 'ABCDEFG')
+ >>> list(t)
+ ['E', 'F', 'G']
+
+ """
+ return iter(deque(iterable, maxlen=n))
+
+
+def consume(iterator, n=None):
+ """Advance *iterable* by *n* steps. If *n* is ``None``, consume it
+ entirely.
+
+ Efficiently exhausts an iterator without returning values. Defaults to
+ consuming the whole iterator, but an optional second argument may be
+ provided to limit consumption.
+
+ >>> i = (x for x in range(10))
+ >>> next(i)
+ 0
+ >>> consume(i, 3)
+ >>> next(i)
+ 4
+ >>> consume(i)
+ >>> next(i)
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ StopIteration
+
+ If the iterator has fewer items remaining than the provided limit, the
+ whole iterator will be consumed.
+
+ >>> i = (x for x in range(3))
+ >>> consume(i, 5)
+ >>> next(i)
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ StopIteration
+
+ """
+ # Use functions that consume iterators at C speed.
+ if n is None:
+ # feed the entire iterator into a zero-length deque
+ deque(iterator, maxlen=0)
+ else:
+ # advance to the empty slice starting at position n
+ next(islice(iterator, n, n), None)
+
+
+def nth(iterable, n, default=None):
+ """Returns the nth item or a default value.
+
+ >>> l = range(10)
+ >>> nth(l, 3)
+ 3
+ >>> nth(l, 20, "zebra")
+ 'zebra'
+
+ """
+ return next(islice(iterable, n, None), default)
+
+
+def all_equal(iterable):
+ """
+ Returns ``True`` if all the elements are equal to each other.
+
+ >>> all_equal('aaaa')
+ True
+ >>> all_equal('aaab')
+ False
+
+ """
+ g = groupby(iterable)
+ return next(g, True) and not next(g, False)
+
+
+def quantify(iterable, pred=bool):
+ """Return the how many times the predicate is true.
+
+ >>> quantify([True, False, True])
+ 2
+
+ """
+ return sum(map(pred, iterable))
+
+
+def padnone(iterable):
+ """Returns the sequence of elements and then returns ``None`` indefinitely.
+
+ >>> take(5, padnone(range(3)))
+ [0, 1, 2, None, None]
+
+ Useful for emulating the behavior of the built-in :func:`map` function.
+
+ See also :func:`padded`.
+
+ """
+ return chain(iterable, repeat(None))
+
+
+def ncycles(iterable, n):
+ """Returns the sequence elements *n* times
+
+ >>> list(ncycles(["a", "b"], 3))
+ ['a', 'b', 'a', 'b', 'a', 'b']
+
+ """
+ return chain.from_iterable(repeat(tuple(iterable), n))
+
+
+def dotproduct(vec1, vec2):
+ """Returns the dot product of the two iterables.
+
+ >>> dotproduct([10, 10], [20, 20])
+ 400
+
+ """
+ return sum(map(operator.mul, vec1, vec2))
+
+
+def flatten(listOfLists):
+ """Return an iterator flattening one level of nesting in a list of lists.
+
+ >>> list(flatten([[0, 1], [2, 3]]))
+ [0, 1, 2, 3]
+
+ See also :func:`collapse`, which can flatten multiple levels of nesting.
+
+ """
+ return chain.from_iterable(listOfLists)
+
+
+def repeatfunc(func, times=None, *args):
+ """Call *func* with *args* repeatedly, returning an iterable over the
+ results.
+
+ If *times* is specified, the iterable will terminate after that many
+ repetitions:
+
+ >>> from operator import add
+ >>> times = 4
+ >>> args = 3, 5
+ >>> list(repeatfunc(add, times, *args))
+ [8, 8, 8, 8]
+
+ If *times* is ``None`` the iterable will not terminate:
+
+ >>> from random import randrange
+ >>> times = None
+ >>> args = 1, 11
+ >>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP
+ [2, 4, 8, 1, 8, 4]
+
+ """
+ if times is None:
+ return starmap(func, repeat(args))
+ return starmap(func, repeat(args, times))
+
+
+def pairwise(iterable):
+ """Returns an iterator of paired items, overlapping, from the original
+
+ >>> take(4, pairwise(count()))
+ [(0, 1), (1, 2), (2, 3), (3, 4)]
+
+ """
+ a, b = tee(iterable)
+ next(b, None)
+ return zip(a, b)
+
+
+def grouper(n, iterable, fillvalue=None):
+ """Collect data into fixed-length chunks or blocks.
+
+ >>> list(grouper(3, 'ABCDEFG', 'x'))
+ [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')]
+
+ """
+ args = [iter(iterable)] * n
+ return zip_longest(fillvalue=fillvalue, *args)
+
+
+def roundrobin(*iterables):
+ """Yields an item from each iterable, alternating between them.
+
+ >>> list(roundrobin('ABC', 'D', 'EF'))
+ ['A', 'D', 'E', 'B', 'F', 'C']
+
+ This function produces the same output as :func:`interleave_longest`, but
+ may perform better for some inputs (in particular when the number of
+ iterables is small).
+
+ """
+ # Recipe credited to George Sakkis
+ pending = len(iterables)
+ if PY2:
+ nexts = cycle(iter(it).next for it in iterables)
+ else:
+ nexts = cycle(iter(it).__next__ for it in iterables)
+ while pending:
+ try:
+ for next in nexts:
+ yield next()
+ except StopIteration:
+ pending -= 1
+ nexts = cycle(islice(nexts, pending))
+
+
+def partition(pred, iterable):
+ """
+ Returns a 2-tuple of iterables derived from the input iterable.
+ The first yields the items that have ``pred(item) == False``.
+ The second yields the items that have ``pred(item) == True``.
+
+ >>> is_odd = lambda x: x % 2 != 0
+ >>> iterable = range(10)
+ >>> even_items, odd_items = partition(is_odd, iterable)
+ >>> list(even_items), list(odd_items)
+ ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
+
+ """
+ # partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9
+ t1, t2 = tee(iterable)
+ return filterfalse(pred, t1), filter(pred, t2)
+
+
+def powerset(iterable):
+ """Yields all possible subsets of the iterable.
+
+ >>> list(powerset([1,2,3]))
+ [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
+
+ """
+ s = list(iterable)
+ return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
+
+
+def unique_everseen(iterable, key=None):
+ """
+ Yield unique elements, preserving order.
+
+ >>> list(unique_everseen('AAAABBBCCDAABBB'))
+ ['A', 'B', 'C', 'D']
+ >>> list(unique_everseen('ABBCcAD', str.lower))
+ ['A', 'B', 'C', 'D']
+
+ Sequences with a mix of hashable and unhashable items can be used.
+ The function will be slower (i.e., `O(n^2)`) for unhashable items.
+
+ """
+ seenset = set()
+ seenset_add = seenset.add
+ seenlist = []
+ seenlist_add = seenlist.append
+ if key is None:
+ for element in iterable:
+ try:
+ if element not in seenset:
+ seenset_add(element)
+ yield element
+ except TypeError:
+ if element not in seenlist:
+ seenlist_add(element)
+ yield element
+ else:
+ for element in iterable:
+ k = key(element)
+ try:
+ if k not in seenset:
+ seenset_add(k)
+ yield element
+ except TypeError:
+ if k not in seenlist:
+ seenlist_add(k)
+ yield element
+
+
+def unique_justseen(iterable, key=None):
+ """Yields elements in order, ignoring serial duplicates
+
+ >>> list(unique_justseen('AAAABBBCCDAABBB'))
+ ['A', 'B', 'C', 'D', 'A', 'B']
+ >>> list(unique_justseen('ABBCcAD', str.lower))
+ ['A', 'B', 'C', 'A', 'D']
+
+ """
+ return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
+
+
+def iter_except(func, exception, first=None):
+ """Yields results from a function repeatedly until an exception is raised.
+
+ Converts a call-until-exception interface to an iterator interface.
+ Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel
+ to end the loop.
+
+ >>> l = [0, 1, 2]
+ >>> list(iter_except(l.pop, IndexError))
+ [2, 1, 0]
+
+ """
+ try:
+ if first is not None:
+ yield first()
+ while 1:
+ yield func()
+ except exception:
+ pass
+
+
+def first_true(iterable, default=False, pred=None):
+ """
+ Returns the first true value in the iterable.
+
+ If no true value is found, returns *default*
+
+ If *pred* is not None, returns the first item for which
+ ``pred(item) == True`` .
+
+ >>> first_true(range(10))
+ 1
+ >>> first_true(range(10), pred=lambda x: x > 5)
+ 6
+ >>> first_true(range(10), default='missing', pred=lambda x: x > 9)
+ 'missing'
+
+ """
+ return next(filter(pred, iterable), default)
+
+
+def random_product(*args, **kwds):
+ """Draw an item at random from each of the input iterables.
+
+ >>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP
+ ('c', 3, 'Z')
+
+ If *repeat* is provided as a keyword argument, that many items will be
+ drawn from each iterable.
+
+ >>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP
+ ('a', 2, 'd', 3)
+
+ This equivalent to taking a random selection from
+ ``itertools.product(*args, **kwarg)``.
+
+ """
+ pools = [tuple(pool) for pool in args] * kwds.get('repeat', 1)
+ return tuple(choice(pool) for pool in pools)
+
+
+def random_permutation(iterable, r=None):
+ """Return a random *r* length permutation of the elements in *iterable*.
+
+ If *r* is not specified or is ``None``, then *r* defaults to the length of
+ *iterable*.
+
+ >>> random_permutation(range(5)) # doctest:+SKIP
+ (3, 4, 0, 1, 2)
+
+ This equivalent to taking a random selection from
+ ``itertools.permutations(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ r = len(pool) if r is None else r
+ return tuple(sample(pool, r))
+
+
+def random_combination(iterable, r):
+ """Return a random *r* length subsequence of the elements in *iterable*.
+
+ >>> random_combination(range(5), 3) # doctest:+SKIP
+ (2, 3, 4)
+
+ This equivalent to taking a random selection from
+ ``itertools.combinations(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ indices = sorted(sample(range(n), r))
+ return tuple(pool[i] for i in indices)
+
+
+def random_combination_with_replacement(iterable, r):
+ """Return a random *r* length subsequence of elements in *iterable*,
+ allowing individual elements to be repeated.
+
+ >>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP
+ (0, 0, 1, 2, 2)
+
+ This equivalent to taking a random selection from
+ ``itertools.combinations_with_replacement(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ indices = sorted(randrange(n) for i in range(r))
+ return tuple(pool[i] for i in indices)
+
+
+def nth_combination(iterable, r, index):
+ """Equivalent to ``list(combinations(iterable, r))[index]``.
+
+ The subsequences of *iterable* that are of length *r* can be ordered
+ lexicographically. :func:`nth_combination` computes the subsequence at
+ sort position *index* directly, without computing the previous
+ subsequences.
+
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ if (r < 0) or (r > n):
+ raise ValueError
+
+ c = 1
+ k = min(r, n - r)
+ for i in range(1, k + 1):
+ c = c * (n - k + i) // i
+
+ if index < 0:
+ index += c
+
+ if (index < 0) or (index >= c):
+ raise IndexError
+
+ result = []
+ while r:
+ c, n, r = c * r // n, n - 1, r - 1
+ while index >= c:
+ index -= c
+ c, n = c * (n - r) // n, n - 1
+ result.append(pool[-1 - n])
+
+ return tuple(result)
+
+
+def prepend(value, iterator):
+ """Yield *value*, followed by the elements in *iterator*.
+
+ >>> value = '0'
+ >>> iterator = ['1', '2', '3']
+ >>> list(prepend(value, iterator))
+ ['0', '1', '2', '3']
+
+ To prepend multiple values, see :func:`itertools.chain`.
+
+ """
+ return chain([value], iterator)
diff --git a/third_party/python/more-itertools/more_itertools/tests/__init__.py b/third_party/python/more-itertools/more_itertools/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/more-itertools/more_itertools/tests/__init__.py
diff --git a/third_party/python/more-itertools/more_itertools/tests/test_more.py b/third_party/python/more-itertools/more_itertools/tests/test_more.py
new file mode 100644
index 0000000000..a1b1e43198
--- /dev/null
+++ b/third_party/python/more-itertools/more_itertools/tests/test_more.py
@@ -0,0 +1,2074 @@
+from __future__ import division, print_function, unicode_literals
+
+from collections import OrderedDict
+from decimal import Decimal
+from doctest import DocTestSuite
+from fractions import Fraction
+from functools import partial, reduce
+from heapq import merge
+from io import StringIO
+from itertools import (
+ chain,
+ count,
+ groupby,
+ islice,
+ permutations,
+ product,
+ repeat,
+)
+from operator import add, mul, itemgetter
+from unittest import TestCase
+
+from six.moves import filter, map, range, zip
+
+import more_itertools as mi
+
+
+def load_tests(loader, tests, ignore):
+ # Add the doctests
+ tests.addTests(DocTestSuite('more_itertools.more'))
+ return tests
+
+
+class CollateTests(TestCase):
+ """Unit tests for ``collate()``"""
+ # Also accidentally tests peekable, though that could use its own tests
+
+ def test_default(self):
+ """Test with the default `key` function."""
+ iterables = [range(4), range(7), range(3, 6)]
+ self.assertEqual(
+ sorted(reduce(list.__add__, [list(it) for it in iterables])),
+ list(mi.collate(*iterables))
+ )
+
+ def test_key(self):
+ """Test using a custom `key` function."""
+ iterables = [range(5, 0, -1), range(4, 0, -1)]
+ actual = sorted(
+ reduce(list.__add__, [list(it) for it in iterables]), reverse=True
+ )
+ expected = list(mi.collate(*iterables, key=lambda x: -x))
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ """Be nice if passed an empty list of iterables."""
+ self.assertEqual([], list(mi.collate()))
+
+ def test_one(self):
+ """Work when only 1 iterable is passed."""
+ self.assertEqual([0, 1], list(mi.collate(range(2))))
+
+ def test_reverse(self):
+ """Test the `reverse` kwarg."""
+ iterables = [range(4, 0, -1), range(7, 0, -1), range(3, 6, -1)]
+
+ actual = sorted(
+ reduce(list.__add__, [list(it) for it in iterables]), reverse=True
+ )
+ expected = list(mi.collate(*iterables, reverse=True))
+ self.assertEqual(actual, expected)
+
+ def test_alias(self):
+ self.assertNotEqual(merge.__doc__, mi.collate.__doc__)
+ self.assertNotEqual(partial.__doc__, mi.collate.__doc__)
+
+
+class ChunkedTests(TestCase):
+ """Tests for ``chunked()``"""
+
+ def test_even(self):
+ """Test when ``n`` divides evenly into the length of the iterable."""
+ self.assertEqual(
+ list(mi.chunked('ABCDEF', 3)), [['A', 'B', 'C'], ['D', 'E', 'F']]
+ )
+
+ def test_odd(self):
+ """Test when ``n`` does not divide evenly into the length of the
+ iterable.
+
+ """
+ self.assertEqual(
+ list(mi.chunked('ABCDE', 3)), [['A', 'B', 'C'], ['D', 'E']]
+ )
+
+
+class FirstTests(TestCase):
+ """Tests for ``first()``"""
+
+ def test_many(self):
+ """Test that it works on many-item iterables."""
+ # Also try it on a generator expression to make sure it works on
+ # whatever those return, across Python versions.
+ self.assertEqual(mi.first(x for x in range(4)), 0)
+
+ def test_one(self):
+ """Test that it doesn't raise StopIteration prematurely."""
+ self.assertEqual(mi.first([3]), 3)
+
+ def test_empty_stop_iteration(self):
+ """It should raise StopIteration for empty iterables."""
+ self.assertRaises(ValueError, lambda: mi.first([]))
+
+ def test_default(self):
+ """It should return the provided default arg for empty iterables."""
+ self.assertEqual(mi.first([], 'boo'), 'boo')
+
+
+class IterOnlyRange:
+ """User-defined iterable class which only support __iter__.
+
+ It is not specified to inherit ``object``, so indexing on a instance will
+ raise an ``AttributeError`` rather than ``TypeError`` in Python 2.
+
+ >>> r = IterOnlyRange(5)
+ >>> r[0]
+ AttributeError: IterOnlyRange instance has no attribute '__getitem__'
+
+ Note: In Python 3, ``TypeError`` will be raised because ``object`` is
+ inherited implicitly by default.
+
+ >>> r[0]
+ TypeError: 'IterOnlyRange' object does not support indexing
+ """
+ def __init__(self, n):
+ """Set the length of the range."""
+ self.n = n
+
+ def __iter__(self):
+ """Works same as range()."""
+ return iter(range(self.n))
+
+
+class LastTests(TestCase):
+ """Tests for ``last()``"""
+
+ def test_many_nonsliceable(self):
+ """Test that it works on many-item non-slice-able iterables."""
+ # Also try it on a generator expression to make sure it works on
+ # whatever those return, across Python versions.
+ self.assertEqual(mi.last(x for x in range(4)), 3)
+
+ def test_one_nonsliceable(self):
+ """Test that it doesn't raise StopIteration prematurely."""
+ self.assertEqual(mi.last(x for x in range(1)), 0)
+
+ def test_empty_stop_iteration_nonsliceable(self):
+ """It should raise ValueError for empty non-slice-able iterables."""
+ self.assertRaises(ValueError, lambda: mi.last(x for x in range(0)))
+
+ def test_default_nonsliceable(self):
+ """It should return the provided default arg for empty non-slice-able
+ iterables.
+ """
+ self.assertEqual(mi.last((x for x in range(0)), 'boo'), 'boo')
+
+ def test_many_sliceable(self):
+ """Test that it works on many-item slice-able iterables."""
+ self.assertEqual(mi.last([0, 1, 2, 3]), 3)
+
+ def test_one_sliceable(self):
+ """Test that it doesn't raise StopIteration prematurely."""
+ self.assertEqual(mi.last([3]), 3)
+
+ def test_empty_stop_iteration_sliceable(self):
+ """It should raise ValueError for empty slice-able iterables."""
+ self.assertRaises(ValueError, lambda: mi.last([]))
+
+ def test_default_sliceable(self):
+ """It should return the provided default arg for empty slice-able
+ iterables.
+ """
+ self.assertEqual(mi.last([], 'boo'), 'boo')
+
+ def test_dict(self):
+ """last(dic) and last(dic.keys()) should return same result."""
+ dic = {'a': 1, 'b': 2, 'c': 3}
+ self.assertEqual(mi.last(dic), mi.last(dic.keys()))
+
+ def test_ordereddict(self):
+ """last(dic) should return the last key."""
+ od = OrderedDict()
+ od['a'] = 1
+ od['b'] = 2
+ od['c'] = 3
+ self.assertEqual(mi.last(od), 'c')
+
+ def test_customrange(self):
+ """It should work on custom class where [] raises AttributeError."""
+ self.assertEqual(mi.last(IterOnlyRange(5)), 4)
+
+
+class PeekableTests(TestCase):
+ """Tests for ``peekable()`` behavor not incidentally covered by testing
+ ``collate()``
+
+ """
+ def test_peek_default(self):
+ """Make sure passing a default into ``peek()`` works."""
+ p = mi.peekable([])
+ self.assertEqual(p.peek(7), 7)
+
+ def test_truthiness(self):
+ """Make sure a ``peekable`` tests true iff there are items remaining in
+ the iterable.
+
+ """
+ p = mi.peekable([])
+ self.assertFalse(p)
+
+ p = mi.peekable(range(3))
+ self.assertTrue(p)
+
+ def test_simple_peeking(self):
+ """Make sure ``next`` and ``peek`` advance and don't advance the
+ iterator, respectively.
+
+ """
+ p = mi.peekable(range(10))
+ self.assertEqual(next(p), 0)
+ self.assertEqual(p.peek(), 1)
+ self.assertEqual(next(p), 1)
+
+ def test_indexing(self):
+ """
+ Indexing into the peekable shouldn't advance the iterator.
+ """
+ p = mi.peekable('abcdefghijkl')
+
+ # The 0th index is what ``next()`` will return
+ self.assertEqual(p[0], 'a')
+ self.assertEqual(next(p), 'a')
+
+ # Indexing further into the peekable shouldn't advance the itertor
+ self.assertEqual(p[2], 'd')
+ self.assertEqual(next(p), 'b')
+
+ # The 0th index moves up with the iterator; the last index follows
+ self.assertEqual(p[0], 'c')
+ self.assertEqual(p[9], 'l')
+
+ self.assertEqual(next(p), 'c')
+ self.assertEqual(p[8], 'l')
+
+ # Negative indexing should work too
+ self.assertEqual(p[-2], 'k')
+ self.assertEqual(p[-9], 'd')
+ self.assertRaises(IndexError, lambda: p[-10])
+
+ def test_slicing(self):
+ """Slicing the peekable shouldn't advance the iterator."""
+ seq = list('abcdefghijkl')
+ p = mi.peekable(seq)
+
+ # Slicing the peekable should just be like slicing a re-iterable
+ self.assertEqual(p[1:4], seq[1:4])
+
+ # Advancing the iterator moves the slices up also
+ self.assertEqual(next(p), 'a')
+ self.assertEqual(p[1:4], seq[1:][1:4])
+
+ # Implicit starts and stop should work
+ self.assertEqual(p[:5], seq[1:][:5])
+ self.assertEqual(p[:], seq[1:][:])
+
+ # Indexing past the end should work
+ self.assertEqual(p[:100], seq[1:][:100])
+
+ # Steps should work, including negative
+ self.assertEqual(p[::2], seq[1:][::2])
+ self.assertEqual(p[::-1], seq[1:][::-1])
+
+ def test_slicing_reset(self):
+ """Test slicing on a fresh iterable each time"""
+ iterable = ['0', '1', '2', '3', '4', '5']
+ indexes = list(range(-4, len(iterable) + 4)) + [None]
+ steps = [1, 2, 3, 4, -1, -2, -3, 4]
+ for slice_args in product(indexes, indexes, steps):
+ it = iter(iterable)
+ p = mi.peekable(it)
+ next(p)
+ index = slice(*slice_args)
+ actual = p[index]
+ expected = iterable[1:][index]
+ self.assertEqual(actual, expected, slice_args)
+
+ def test_slicing_error(self):
+ iterable = '01234567'
+ p = mi.peekable(iter(iterable))
+
+ # Prime the cache
+ p.peek()
+ old_cache = list(p._cache)
+
+ # Illegal slice
+ with self.assertRaises(ValueError):
+ p[1:-1:0]
+
+ # Neither the cache nor the iteration should be affected
+ self.assertEqual(old_cache, list(p._cache))
+ self.assertEqual(list(p), list(iterable))
+
+ def test_passthrough(self):
+ """Iterating a peekable without using ``peek()`` or ``prepend()``
+ should just give the underlying iterable's elements (a trivial test but
+ useful to set a baseline in case something goes wrong)"""
+ expected = [1, 2, 3, 4, 5]
+ actual = list(mi.peekable(expected))
+ self.assertEqual(actual, expected)
+
+ # prepend() behavior tests
+
+ def test_prepend(self):
+ """Tests intersperesed ``prepend()`` and ``next()`` calls"""
+ it = mi.peekable(range(2))
+ actual = []
+
+ # Test prepend() before next()
+ it.prepend(10)
+ actual += [next(it), next(it)]
+
+ # Test prepend() between next()s
+ it.prepend(11)
+ actual += [next(it), next(it)]
+
+ # Test prepend() after source iterable is consumed
+ it.prepend(12)
+ actual += [next(it)]
+
+ expected = [10, 0, 11, 1, 12]
+ self.assertEqual(actual, expected)
+
+ def test_multi_prepend(self):
+ """Tests prepending multiple items and getting them in proper order"""
+ it = mi.peekable(range(5))
+ actual = [next(it), next(it)]
+ it.prepend(10, 11, 12)
+ it.prepend(20, 21)
+ actual += list(it)
+ expected = [0, 1, 20, 21, 10, 11, 12, 2, 3, 4]
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ """Tests prepending in front of an empty iterable"""
+ it = mi.peekable([])
+ it.prepend(10)
+ actual = list(it)
+ expected = [10]
+ self.assertEqual(actual, expected)
+
+ def test_prepend_truthiness(self):
+ """Tests that ``__bool__()`` or ``__nonzero__()`` works properly
+ with ``prepend()``"""
+ it = mi.peekable(range(5))
+ self.assertTrue(it)
+ actual = list(it)
+ self.assertFalse(it)
+ it.prepend(10)
+ self.assertTrue(it)
+ actual += [next(it)]
+ self.assertFalse(it)
+ expected = [0, 1, 2, 3, 4, 10]
+ self.assertEqual(actual, expected)
+
+ def test_multi_prepend_peek(self):
+ """Tests prepending multiple elements and getting them in reverse order
+ while peeking"""
+ it = mi.peekable(range(5))
+ actual = [next(it), next(it)]
+ self.assertEqual(it.peek(), 2)
+ it.prepend(10, 11, 12)
+ self.assertEqual(it.peek(), 10)
+ it.prepend(20, 21)
+ self.assertEqual(it.peek(), 20)
+ actual += list(it)
+ self.assertFalse(it)
+ expected = [0, 1, 20, 21, 10, 11, 12, 2, 3, 4]
+ self.assertEqual(actual, expected)
+
+ def test_prepend_after_stop(self):
+ """Test resuming iteration after a previous exhaustion"""
+ it = mi.peekable(range(3))
+ self.assertEqual(list(it), [0, 1, 2])
+ self.assertRaises(StopIteration, lambda: next(it))
+ it.prepend(10)
+ self.assertEqual(next(it), 10)
+ self.assertRaises(StopIteration, lambda: next(it))
+
+ def test_prepend_slicing(self):
+ """Tests interaction between prepending and slicing"""
+ seq = list(range(20))
+ p = mi.peekable(seq)
+
+ p.prepend(30, 40, 50)
+ pseq = [30, 40, 50] + seq # pseq for prepended_seq
+
+ # adapt the specific tests from test_slicing
+ self.assertEqual(p[0], 30)
+ self.assertEqual(p[1:8], pseq[1:8])
+ self.assertEqual(p[1:], pseq[1:])
+ self.assertEqual(p[:5], pseq[:5])
+ self.assertEqual(p[:], pseq[:])
+ self.assertEqual(p[:100], pseq[:100])
+ self.assertEqual(p[::2], pseq[::2])
+ self.assertEqual(p[::-1], pseq[::-1])
+
+ def test_prepend_indexing(self):
+ """Tests interaction between prepending and indexing"""
+ seq = list(range(20))
+ p = mi.peekable(seq)
+
+ p.prepend(30, 40, 50)
+
+ self.assertEqual(p[0], 30)
+ self.assertEqual(next(p), 30)
+ self.assertEqual(p[2], 0)
+ self.assertEqual(next(p), 40)
+ self.assertEqual(p[0], 50)
+ self.assertEqual(p[9], 8)
+ self.assertEqual(next(p), 50)
+ self.assertEqual(p[8], 8)
+ self.assertEqual(p[-2], 18)
+ self.assertEqual(p[-9], 11)
+ self.assertRaises(IndexError, lambda: p[-21])
+
+ def test_prepend_iterable(self):
+ """Tests prepending from an iterable"""
+ it = mi.peekable(range(5))
+ # Don't directly use the range() object to avoid any range-specific
+ # optimizations
+ it.prepend(*(x for x in range(5)))
+ actual = list(it)
+ expected = list(chain(range(5), range(5)))
+ self.assertEqual(actual, expected)
+
+ def test_prepend_many(self):
+ """Tests that prepending a huge number of elements works"""
+ it = mi.peekable(range(5))
+ # Don't directly use the range() object to avoid any range-specific
+ # optimizations
+ it.prepend(*(x for x in range(20000)))
+ actual = list(it)
+ expected = list(chain(range(20000), range(5)))
+ self.assertEqual(actual, expected)
+
+ def test_prepend_reversed(self):
+ """Tests prepending from a reversed iterable"""
+ it = mi.peekable(range(3))
+ it.prepend(*reversed((10, 11, 12)))
+ actual = list(it)
+ expected = [12, 11, 10, 0, 1, 2]
+ self.assertEqual(actual, expected)
+
+
+class ConsumerTests(TestCase):
+ """Tests for ``consumer()``"""
+
+ def test_consumer(self):
+ @mi.consumer
+ def eater():
+ while True:
+ x = yield # noqa
+
+ e = eater()
+ e.send('hi') # without @consumer, would raise TypeError
+
+
+class DistinctPermutationsTests(TestCase):
+ def test_distinct_permutations(self):
+ """Make sure the output for ``distinct_permutations()`` is the same as
+ set(permutations(it)).
+
+ """
+ iterable = ['z', 'a', 'a', 'q', 'q', 'q', 'y']
+ test_output = sorted(mi.distinct_permutations(iterable))
+ ref_output = sorted(set(permutations(iterable)))
+ self.assertEqual(test_output, ref_output)
+
+ def test_other_iterables(self):
+ """Make sure ``distinct_permutations()`` accepts a different type of
+ iterables.
+
+ """
+ # a generator
+ iterable = (c for c in ['z', 'a', 'a', 'q', 'q', 'q', 'y'])
+ test_output = sorted(mi.distinct_permutations(iterable))
+ # "reload" it
+ iterable = (c for c in ['z', 'a', 'a', 'q', 'q', 'q', 'y'])
+ ref_output = sorted(set(permutations(iterable)))
+ self.assertEqual(test_output, ref_output)
+
+ # an iterator
+ iterable = iter(['z', 'a', 'a', 'q', 'q', 'q', 'y'])
+ test_output = sorted(mi.distinct_permutations(iterable))
+ # "reload" it
+ iterable = iter(['z', 'a', 'a', 'q', 'q', 'q', 'y'])
+ ref_output = sorted(set(permutations(iterable)))
+ self.assertEqual(test_output, ref_output)
+
+
+class IlenTests(TestCase):
+ def test_ilen(self):
+ """Sanity-checks for ``ilen()``."""
+ # Non-empty
+ self.assertEqual(
+ mi.ilen(filter(lambda x: x % 10 == 0, range(101))), 11
+ )
+
+ # Empty
+ self.assertEqual(mi.ilen((x for x in range(0))), 0)
+
+ # Iterable with __len__
+ self.assertEqual(mi.ilen(list(range(6))), 6)
+
+
+class WithIterTests(TestCase):
+ def test_with_iter(self):
+ s = StringIO('One fish\nTwo fish')
+ initial_words = [line.split()[0] for line in mi.with_iter(s)]
+
+ # Iterable's items should be faithfully represented
+ self.assertEqual(initial_words, ['One', 'Two'])
+ # The file object should be closed
+ self.assertEqual(s.closed, True)
+
+
+class OneTests(TestCase):
+ def test_basic(self):
+ it = iter(['item'])
+ self.assertEqual(mi.one(it), 'item')
+
+ def test_too_short(self):
+ it = iter([])
+ self.assertRaises(ValueError, lambda: mi.one(it))
+ self.assertRaises(IndexError, lambda: mi.one(it, too_short=IndexError))
+
+ def test_too_long(self):
+ it = count()
+ self.assertRaises(ValueError, lambda: mi.one(it)) # burn 0 and 1
+ self.assertEqual(next(it), 2)
+ self.assertRaises(
+ OverflowError, lambda: mi.one(it, too_long=OverflowError)
+ )
+
+
+class IntersperseTest(TestCase):
+ """ Tests for intersperse() """
+
+ def test_even(self):
+ iterable = (x for x in '01')
+ self.assertEqual(
+ list(mi.intersperse(None, iterable)), ['0', None, '1']
+ )
+
+ def test_odd(self):
+ iterable = (x for x in '012')
+ self.assertEqual(
+ list(mi.intersperse(None, iterable)), ['0', None, '1', None, '2']
+ )
+
+ def test_nested(self):
+ element = ('a', 'b')
+ iterable = (x for x in '012')
+ actual = list(mi.intersperse(element, iterable))
+ expected = ['0', ('a', 'b'), '1', ('a', 'b'), '2']
+ self.assertEqual(actual, expected)
+
+ def test_not_iterable(self):
+ self.assertRaises(TypeError, lambda: mi.intersperse('x', 1))
+
+ def test_n(self):
+ for n, element, expected in [
+ (1, '_', ['0', '_', '1', '_', '2', '_', '3', '_', '4', '_', '5']),
+ (2, '_', ['0', '1', '_', '2', '3', '_', '4', '5']),
+ (3, '_', ['0', '1', '2', '_', '3', '4', '5']),
+ (4, '_', ['0', '1', '2', '3', '_', '4', '5']),
+ (5, '_', ['0', '1', '2', '3', '4', '_', '5']),
+ (6, '_', ['0', '1', '2', '3', '4', '5']),
+ (7, '_', ['0', '1', '2', '3', '4', '5']),
+ (3, ['a', 'b'], ['0', '1', '2', ['a', 'b'], '3', '4', '5']),
+ ]:
+ iterable = (x for x in '012345')
+ actual = list(mi.intersperse(element, iterable, n=n))
+ self.assertEqual(actual, expected)
+
+ def test_n_zero(self):
+ self.assertRaises(
+ ValueError, lambda: list(mi.intersperse('x', '012', n=0))
+ )
+
+
+class UniqueToEachTests(TestCase):
+ """Tests for ``unique_to_each()``"""
+
+ def test_all_unique(self):
+ """When all the input iterables are unique the output should match
+ the input."""
+ iterables = [[1, 2], [3, 4, 5], [6, 7, 8]]
+ self.assertEqual(mi.unique_to_each(*iterables), iterables)
+
+ def test_duplicates(self):
+ """When there are duplicates in any of the input iterables that aren't
+ in the rest, those duplicates should be emitted."""
+ iterables = ["mississippi", "missouri"]
+ self.assertEqual(
+ mi.unique_to_each(*iterables), [['p', 'p'], ['o', 'u', 'r']]
+ )
+
+ def test_mixed(self):
+ """When the input iterables contain different types the function should
+ still behave properly"""
+ iterables = ['x', (i for i in range(3)), [1, 2, 3], tuple()]
+ self.assertEqual(mi.unique_to_each(*iterables), [['x'], [0], [3], []])
+
+
+class WindowedTests(TestCase):
+ """Tests for ``windowed()``"""
+
+ def test_basic(self):
+ actual = list(mi.windowed([1, 2, 3, 4, 5], 3))
+ expected = [(1, 2, 3), (2, 3, 4), (3, 4, 5)]
+ self.assertEqual(actual, expected)
+
+ def test_large_size(self):
+ """
+ When the window size is larger than the iterable, and no fill value is
+ given,``None`` should be filled in.
+ """
+ actual = list(mi.windowed([1, 2, 3, 4, 5], 6))
+ expected = [(1, 2, 3, 4, 5, None)]
+ self.assertEqual(actual, expected)
+
+ def test_fillvalue(self):
+ """
+ When sizes don't match evenly, the given fill value should be used.
+ """
+ iterable = [1, 2, 3, 4, 5]
+
+ for n, kwargs, expected in [
+ (6, {}, [(1, 2, 3, 4, 5, '!')]), # n > len(iterable)
+ (3, {'step': 3}, [(1, 2, 3), (4, 5, '!')]), # using ``step``
+ ]:
+ actual = list(mi.windowed(iterable, n, fillvalue='!', **kwargs))
+ self.assertEqual(actual, expected)
+
+ def test_zero(self):
+ """When the window size is zero, an empty tuple should be emitted."""
+ actual = list(mi.windowed([1, 2, 3, 4, 5], 0))
+ expected = [tuple()]
+ self.assertEqual(actual, expected)
+
+ def test_negative(self):
+ """When the window size is negative, ValueError should be raised."""
+ with self.assertRaises(ValueError):
+ list(mi.windowed([1, 2, 3, 4, 5], -1))
+
+ def test_step(self):
+ """The window should advance by the number of steps provided"""
+ iterable = [1, 2, 3, 4, 5, 6, 7]
+ for n, step, expected in [
+ (3, 2, [(1, 2, 3), (3, 4, 5), (5, 6, 7)]), # n > step
+ (3, 3, [(1, 2, 3), (4, 5, 6), (7, None, None)]), # n == step
+ (3, 4, [(1, 2, 3), (5, 6, 7)]), # line up nicely
+ (3, 5, [(1, 2, 3), (6, 7, None)]), # off by one
+ (3, 6, [(1, 2, 3), (7, None, None)]), # off by two
+ (3, 7, [(1, 2, 3)]), # step past the end
+ (7, 8, [(1, 2, 3, 4, 5, 6, 7)]), # step > len(iterable)
+ ]:
+ actual = list(mi.windowed(iterable, n, step=step))
+ self.assertEqual(actual, expected)
+
+ # Step must be greater than or equal to 1
+ with self.assertRaises(ValueError):
+ list(mi.windowed(iterable, 3, step=0))
+
+
+class BucketTests(TestCase):
+ """Tests for ``bucket()``"""
+
+ def test_basic(self):
+ iterable = [10, 20, 30, 11, 21, 31, 12, 22, 23, 33]
+ D = mi.bucket(iterable, key=lambda x: 10 * (x // 10))
+
+ # In-order access
+ self.assertEqual(list(D[10]), [10, 11, 12])
+
+ # Out of order access
+ self.assertEqual(list(D[30]), [30, 31, 33])
+ self.assertEqual(list(D[20]), [20, 21, 22, 23])
+
+ self.assertEqual(list(D[40]), []) # Nothing in here!
+
+ def test_in(self):
+ iterable = [10, 20, 30, 11, 21, 31, 12, 22, 23, 33]
+ D = mi.bucket(iterable, key=lambda x: 10 * (x // 10))
+
+ self.assertTrue(10 in D)
+ self.assertFalse(40 in D)
+ self.assertTrue(20 in D)
+ self.assertFalse(21 in D)
+
+ # Checking in-ness shouldn't advance the iterator
+ self.assertEqual(next(D[10]), 10)
+
+ def test_validator(self):
+ iterable = count(0)
+ key = lambda x: int(str(x)[0]) # First digit of each number
+ validator = lambda x: 0 < x < 10 # No leading zeros
+ D = mi.bucket(iterable, key, validator=validator)
+ self.assertEqual(mi.take(3, D[1]), [1, 10, 11])
+ self.assertNotIn(0, D) # Non-valid entries don't return True
+ self.assertNotIn(0, D._cache) # Don't store non-valid entries
+ self.assertEqual(list(D[0]), [])
+
+
+class SpyTests(TestCase):
+ """Tests for ``spy()``"""
+
+ def test_basic(self):
+ original_iterable = iter('abcdefg')
+ head, new_iterable = mi.spy(original_iterable)
+ self.assertEqual(head, ['a'])
+ self.assertEqual(
+ list(new_iterable), ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+ )
+
+ def test_unpacking(self):
+ original_iterable = iter('abcdefg')
+ (first, second, third), new_iterable = mi.spy(original_iterable, 3)
+ self.assertEqual(first, 'a')
+ self.assertEqual(second, 'b')
+ self.assertEqual(third, 'c')
+ self.assertEqual(
+ list(new_iterable), ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+ )
+
+ def test_too_many(self):
+ original_iterable = iter('abc')
+ head, new_iterable = mi.spy(original_iterable, 4)
+ self.assertEqual(head, ['a', 'b', 'c'])
+ self.assertEqual(list(new_iterable), ['a', 'b', 'c'])
+
+ def test_zero(self):
+ original_iterable = iter('abc')
+ head, new_iterable = mi.spy(original_iterable, 0)
+ self.assertEqual(head, [])
+ self.assertEqual(list(new_iterable), ['a', 'b', 'c'])
+
+
+class InterleaveTests(TestCase):
+ def test_even(self):
+ actual = list(mi.interleave([1, 4, 7], [2, 5, 8], [3, 6, 9]))
+ expected = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ self.assertEqual(actual, expected)
+
+ def test_short(self):
+ actual = list(mi.interleave([1, 4], [2, 5, 7], [3, 6, 8]))
+ expected = [1, 2, 3, 4, 5, 6]
+ self.assertEqual(actual, expected)
+
+ def test_mixed_types(self):
+ it_list = ['a', 'b', 'c', 'd']
+ it_str = '12345'
+ it_inf = count()
+ actual = list(mi.interleave(it_list, it_str, it_inf))
+ expected = ['a', '1', 0, 'b', '2', 1, 'c', '3', 2, 'd', '4', 3]
+ self.assertEqual(actual, expected)
+
+
+class InterleaveLongestTests(TestCase):
+ def test_even(self):
+ actual = list(mi.interleave_longest([1, 4, 7], [2, 5, 8], [3, 6, 9]))
+ expected = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ self.assertEqual(actual, expected)
+
+ def test_short(self):
+ actual = list(mi.interleave_longest([1, 4], [2, 5, 7], [3, 6, 8]))
+ expected = [1, 2, 3, 4, 5, 6, 7, 8]
+ self.assertEqual(actual, expected)
+
+ def test_mixed_types(self):
+ it_list = ['a', 'b', 'c', 'd']
+ it_str = '12345'
+ it_gen = (x for x in range(3))
+ actual = list(mi.interleave_longest(it_list, it_str, it_gen))
+ expected = ['a', '1', 0, 'b', '2', 1, 'c', '3', 2, 'd', '4', '5']
+ self.assertEqual(actual, expected)
+
+
+class TestCollapse(TestCase):
+ """Tests for ``collapse()``"""
+
+ def test_collapse(self):
+ l = [[1], 2, [[3], 4], [[[5]]]]
+ self.assertEqual(list(mi.collapse(l)), [1, 2, 3, 4, 5])
+
+ def test_collapse_to_string(self):
+ l = [["s1"], "s2", [["s3"], "s4"], [[["s5"]]]]
+ self.assertEqual(list(mi.collapse(l)), ["s1", "s2", "s3", "s4", "s5"])
+
+ def test_collapse_flatten(self):
+ l = [[1], [2], [[3], 4], [[[5]]]]
+ self.assertEqual(list(mi.collapse(l, levels=1)), list(mi.flatten(l)))
+
+ def test_collapse_to_level(self):
+ l = [[1], 2, [[3], 4], [[[5]]]]
+ self.assertEqual(list(mi.collapse(l, levels=2)), [1, 2, 3, 4, [5]])
+ self.assertEqual(
+ list(mi.collapse(mi.collapse(l, levels=1), levels=1)),
+ list(mi.collapse(l, levels=2))
+ )
+
+ def test_collapse_to_list(self):
+ l = (1, [2], (3, [4, (5,)], 'ab'))
+ actual = list(mi.collapse(l, base_type=list))
+ expected = [1, [2], 3, [4, (5,)], 'ab']
+ self.assertEqual(actual, expected)
+
+
+class SideEffectTests(TestCase):
+ """Tests for ``side_effect()``"""
+
+ def test_individual(self):
+ # The function increments the counter for each call
+ counter = [0]
+
+ def func(arg):
+ counter[0] += 1
+
+ result = list(mi.side_effect(func, range(10)))
+ self.assertEqual(result, list(range(10)))
+ self.assertEqual(counter[0], 10)
+
+ def test_chunked(self):
+ # The function increments the counter for each call
+ counter = [0]
+
+ def func(arg):
+ counter[0] += 1
+
+ result = list(mi.side_effect(func, range(10), 2))
+ self.assertEqual(result, list(range(10)))
+ self.assertEqual(counter[0], 5)
+
+ def test_before_after(self):
+ f = StringIO()
+ collector = []
+
+ def func(item):
+ print(item, file=f)
+ collector.append(f.getvalue())
+
+ def it():
+ yield u'a'
+ yield u'b'
+ raise RuntimeError('kaboom')
+
+ before = lambda: print('HEADER', file=f)
+ after = f.close
+
+ try:
+ mi.consume(mi.side_effect(func, it(), before=before, after=after))
+ except RuntimeError:
+ pass
+
+ # The iterable should have been written to the file
+ self.assertEqual(collector, [u'HEADER\na\n', u'HEADER\na\nb\n'])
+
+ # The file should be closed even though something bad happened
+ self.assertTrue(f.closed)
+
+ def test_before_fails(self):
+ f = StringIO()
+ func = lambda x: print(x, file=f)
+
+ def before():
+ raise RuntimeError('ouch')
+
+ try:
+ mi.consume(
+ mi.side_effect(func, u'abc', before=before, after=f.close)
+ )
+ except RuntimeError:
+ pass
+
+ # The file should be closed even though something bad happened in the
+ # before function
+ self.assertTrue(f.closed)
+
+
+class SlicedTests(TestCase):
+ """Tests for ``sliced()``"""
+
+ def test_even(self):
+ """Test when the length of the sequence is divisible by *n*"""
+ seq = 'ABCDEFGHI'
+ self.assertEqual(list(mi.sliced(seq, 3)), ['ABC', 'DEF', 'GHI'])
+
+ def test_odd(self):
+ """Test when the length of the sequence is not divisible by *n*"""
+ seq = 'ABCDEFGHI'
+ self.assertEqual(list(mi.sliced(seq, 4)), ['ABCD', 'EFGH', 'I'])
+
+ def test_not_sliceable(self):
+ seq = (x for x in 'ABCDEFGHI')
+
+ with self.assertRaises(TypeError):
+ list(mi.sliced(seq, 3))
+
+
+class SplitAtTests(TestCase):
+ """Tests for ``split()``"""
+
+ def comp_with_str_split(self, str_to_split, delim):
+ pred = lambda c: c == delim
+ actual = list(map(''.join, mi.split_at(str_to_split, pred)))
+ expected = str_to_split.split(delim)
+ self.assertEqual(actual, expected)
+
+ def test_seperators(self):
+ test_strs = ['', 'abcba', 'aaabbbcccddd', 'e']
+ for s, delim in product(test_strs, 'abcd'):
+ self.comp_with_str_split(s, delim)
+
+
+class SplitBeforeTest(TestCase):
+ """Tests for ``split_before()``"""
+
+ def test_starts_with_sep(self):
+ actual = list(mi.split_before('xooxoo', lambda c: c == 'x'))
+ expected = [['x', 'o', 'o'], ['x', 'o', 'o']]
+ self.assertEqual(actual, expected)
+
+ def test_ends_with_sep(self):
+ actual = list(mi.split_before('ooxoox', lambda c: c == 'x'))
+ expected = [['o', 'o'], ['x', 'o', 'o'], ['x']]
+ self.assertEqual(actual, expected)
+
+ def test_no_sep(self):
+ actual = list(mi.split_before('ooo', lambda c: c == 'x'))
+ expected = [['o', 'o', 'o']]
+ self.assertEqual(actual, expected)
+
+
+class SplitAfterTest(TestCase):
+ """Tests for ``split_after()``"""
+
+ def test_starts_with_sep(self):
+ actual = list(mi.split_after('xooxoo', lambda c: c == 'x'))
+ expected = [['x'], ['o', 'o', 'x'], ['o', 'o']]
+ self.assertEqual(actual, expected)
+
+ def test_ends_with_sep(self):
+ actual = list(mi.split_after('ooxoox', lambda c: c == 'x'))
+ expected = [['o', 'o', 'x'], ['o', 'o', 'x']]
+ self.assertEqual(actual, expected)
+
+ def test_no_sep(self):
+ actual = list(mi.split_after('ooo', lambda c: c == 'x'))
+ expected = [['o', 'o', 'o']]
+ self.assertEqual(actual, expected)
+
+
+class PaddedTest(TestCase):
+ """Tests for ``padded()``"""
+
+ def test_no_n(self):
+ seq = [1, 2, 3]
+
+ # No fillvalue
+ self.assertEqual(mi.take(5, mi.padded(seq)), [1, 2, 3, None, None])
+
+ # With fillvalue
+ self.assertEqual(
+ mi.take(5, mi.padded(seq, fillvalue='')), [1, 2, 3, '', '']
+ )
+
+ def test_invalid_n(self):
+ self.assertRaises(ValueError, lambda: list(mi.padded([1, 2, 3], n=-1)))
+ self.assertRaises(ValueError, lambda: list(mi.padded([1, 2, 3], n=0)))
+
+ def test_valid_n(self):
+ seq = [1, 2, 3, 4, 5]
+
+ # No need for padding: len(seq) <= n
+ self.assertEqual(list(mi.padded(seq, n=4)), [1, 2, 3, 4, 5])
+ self.assertEqual(list(mi.padded(seq, n=5)), [1, 2, 3, 4, 5])
+
+ # No fillvalue
+ self.assertEqual(
+ list(mi.padded(seq, n=7)), [1, 2, 3, 4, 5, None, None]
+ )
+
+ # With fillvalue
+ self.assertEqual(
+ list(mi.padded(seq, fillvalue='', n=7)), [1, 2, 3, 4, 5, '', '']
+ )
+
+ def test_next_multiple(self):
+ seq = [1, 2, 3, 4, 5, 6]
+
+ # No need for padding: len(seq) % n == 0
+ self.assertEqual(
+ list(mi.padded(seq, n=3, next_multiple=True)), [1, 2, 3, 4, 5, 6]
+ )
+
+ # Padding needed: len(seq) < n
+ self.assertEqual(
+ list(mi.padded(seq, n=8, next_multiple=True)),
+ [1, 2, 3, 4, 5, 6, None, None]
+ )
+
+ # No padding needed: len(seq) == n
+ self.assertEqual(
+ list(mi.padded(seq, n=6, next_multiple=True)), [1, 2, 3, 4, 5, 6]
+ )
+
+ # Padding needed: len(seq) > n
+ self.assertEqual(
+ list(mi.padded(seq, n=4, next_multiple=True)),
+ [1, 2, 3, 4, 5, 6, None, None]
+ )
+
+ # With fillvalue
+ self.assertEqual(
+ list(mi.padded(seq, fillvalue='', n=4, next_multiple=True)),
+ [1, 2, 3, 4, 5, 6, '', '']
+ )
+
+
+class DistributeTest(TestCase):
+ """Tests for distribute()"""
+
+ def test_invalid_n(self):
+ self.assertRaises(ValueError, lambda: mi.distribute(-1, [1, 2, 3]))
+ self.assertRaises(ValueError, lambda: mi.distribute(0, [1, 2, 3]))
+
+ def test_basic(self):
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+
+ for n, expected in [
+ (1, [iterable]),
+ (2, [[1, 3, 5, 7, 9], [2, 4, 6, 8, 10]]),
+ (3, [[1, 4, 7, 10], [2, 5, 8], [3, 6, 9]]),
+ (10, [[n] for n in range(1, 10 + 1)]),
+ ]:
+ self.assertEqual(
+ [list(x) for x in mi.distribute(n, iterable)], expected
+ )
+
+ def test_large_n(self):
+ iterable = [1, 2, 3, 4]
+ self.assertEqual(
+ [list(x) for x in mi.distribute(6, iterable)],
+ [[1], [2], [3], [4], [], []]
+ )
+
+
+class StaggerTest(TestCase):
+ """Tests for ``stagger()``"""
+
+ def test_default(self):
+ iterable = [0, 1, 2, 3]
+ actual = list(mi.stagger(iterable))
+ expected = [(None, 0, 1), (0, 1, 2), (1, 2, 3)]
+ self.assertEqual(actual, expected)
+
+ def test_offsets(self):
+ iterable = [0, 1, 2, 3]
+ for offsets, expected in [
+ ((-2, 0, 2), [('', 0, 2), ('', 1, 3)]),
+ ((-2, -1), [('', ''), ('', 0), (0, 1), (1, 2), (2, 3)]),
+ ((1, 2), [(1, 2), (2, 3)]),
+ ]:
+ all_groups = mi.stagger(iterable, offsets=offsets, fillvalue='')
+ self.assertEqual(list(all_groups), expected)
+
+ def test_longest(self):
+ iterable = [0, 1, 2, 3]
+ for offsets, expected in [
+ (
+ (-1, 0, 1),
+ [('', 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, ''), (3, '', '')]
+ ),
+ ((-2, -1), [('', ''), ('', 0), (0, 1), (1, 2), (2, 3), (3, '')]),
+ ((1, 2), [(1, 2), (2, 3), (3, '')]),
+ ]:
+ all_groups = mi.stagger(
+ iterable, offsets=offsets, fillvalue='', longest=True
+ )
+ self.assertEqual(list(all_groups), expected)
+
+
+class ZipOffsetTest(TestCase):
+ """Tests for ``zip_offset()``"""
+
+ def test_shortest(self):
+ a_1 = [0, 1, 2, 3]
+ a_2 = [0, 1, 2, 3, 4, 5]
+ a_3 = [0, 1, 2, 3, 4, 5, 6, 7]
+ actual = list(
+ mi.zip_offset(a_1, a_2, a_3, offsets=(-1, 0, 1), fillvalue='')
+ )
+ expected = [('', 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, 4), (3, 4, 5)]
+ self.assertEqual(actual, expected)
+
+ def test_longest(self):
+ a_1 = [0, 1, 2, 3]
+ a_2 = [0, 1, 2, 3, 4, 5]
+ a_3 = [0, 1, 2, 3, 4, 5, 6, 7]
+ actual = list(
+ mi.zip_offset(a_1, a_2, a_3, offsets=(-1, 0, 1), longest=True)
+ )
+ expected = [
+ (None, 0, 1),
+ (0, 1, 2),
+ (1, 2, 3),
+ (2, 3, 4),
+ (3, 4, 5),
+ (None, 5, 6),
+ (None, None, 7),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_mismatch(self):
+ iterables = [0, 1, 2], [2, 3, 4]
+ offsets = (-1, 0, 1)
+ self.assertRaises(
+ ValueError,
+ lambda: list(mi.zip_offset(*iterables, offsets=offsets))
+ )
+
+
+class SortTogetherTest(TestCase):
+ """Tests for sort_together()"""
+
+ def test_key_list(self):
+ """tests `key_list` including default, iterables include duplicates"""
+ iterables = [
+ ['GA', 'GA', 'GA', 'CT', 'CT', 'CT'],
+ ['May', 'Aug.', 'May', 'June', 'July', 'July'],
+ [97, 20, 100, 70, 100, 20]
+ ]
+
+ self.assertEqual(
+ mi.sort_together(iterables),
+ [
+ ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'),
+ ('June', 'July', 'July', 'May', 'Aug.', 'May'),
+ (70, 100, 20, 97, 20, 100)
+ ]
+ )
+
+ self.assertEqual(
+ mi.sort_together(iterables, key_list=(0, 1)),
+ [
+ ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'),
+ ('July', 'July', 'June', 'Aug.', 'May', 'May'),
+ (100, 20, 70, 20, 97, 100)
+ ]
+ )
+
+ self.assertEqual(
+ mi.sort_together(iterables, key_list=(0, 1, 2)),
+ [
+ ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'),
+ ('July', 'July', 'June', 'Aug.', 'May', 'May'),
+ (20, 100, 70, 20, 97, 100)
+ ]
+ )
+
+ self.assertEqual(
+ mi.sort_together(iterables, key_list=(2,)),
+ [
+ ('GA', 'CT', 'CT', 'GA', 'GA', 'CT'),
+ ('Aug.', 'July', 'June', 'May', 'May', 'July'),
+ (20, 20, 70, 97, 100, 100)
+ ]
+ )
+
+ def test_invalid_key_list(self):
+ """tests `key_list` for indexes not available in `iterables`"""
+ iterables = [
+ ['GA', 'GA', 'GA', 'CT', 'CT', 'CT'],
+ ['May', 'Aug.', 'May', 'June', 'July', 'July'],
+ [97, 20, 100, 70, 100, 20]
+ ]
+
+ self.assertRaises(
+ IndexError, lambda: mi.sort_together(iterables, key_list=(5,))
+ )
+
+ def test_reverse(self):
+ """tests `reverse` to ensure a reverse sort for `key_list` iterables"""
+ iterables = [
+ ['GA', 'GA', 'GA', 'CT', 'CT', 'CT'],
+ ['May', 'Aug.', 'May', 'June', 'July', 'July'],
+ [97, 20, 100, 70, 100, 20]
+ ]
+
+ self.assertEqual(
+ mi.sort_together(iterables, key_list=(0, 1, 2), reverse=True),
+ [('GA', 'GA', 'GA', 'CT', 'CT', 'CT'),
+ ('May', 'May', 'Aug.', 'June', 'July', 'July'),
+ (100, 97, 20, 70, 100, 20)]
+ )
+
+ def test_uneven_iterables(self):
+ """tests trimming of iterables to the shortest length before sorting"""
+ iterables = [['GA', 'GA', 'GA', 'CT', 'CT', 'CT', 'MA'],
+ ['May', 'Aug.', 'May', 'June', 'July', 'July'],
+ [97, 20, 100, 70, 100, 20, 0]]
+
+ self.assertEqual(
+ mi.sort_together(iterables),
+ [
+ ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'),
+ ('June', 'July', 'July', 'May', 'Aug.', 'May'),
+ (70, 100, 20, 97, 20, 100)
+ ]
+ )
+
+
+class DivideTest(TestCase):
+ """Tests for divide()"""
+
+ def test_invalid_n(self):
+ self.assertRaises(ValueError, lambda: mi.divide(-1, [1, 2, 3]))
+ self.assertRaises(ValueError, lambda: mi.divide(0, [1, 2, 3]))
+
+ def test_basic(self):
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+
+ for n, expected in [
+ (1, [iterable]),
+ (2, [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]]),
+ (3, [[1, 2, 3, 4], [5, 6, 7], [8, 9, 10]]),
+ (10, [[n] for n in range(1, 10 + 1)]),
+ ]:
+ self.assertEqual(
+ [list(x) for x in mi.divide(n, iterable)], expected
+ )
+
+ def test_large_n(self):
+ iterable = [1, 2, 3, 4]
+ self.assertEqual(
+ [list(x) for x in mi.divide(6, iterable)],
+ [[1], [2], [3], [4], [], []]
+ )
+
+
+class TestAlwaysIterable(TestCase):
+ """Tests for always_iterable()"""
+ def test_single(self):
+ self.assertEqual(list(mi.always_iterable(1)), [1])
+
+ def test_strings(self):
+ for obj in ['foo', b'bar', u'baz']:
+ actual = list(mi.always_iterable(obj))
+ expected = [obj]
+ self.assertEqual(actual, expected)
+
+ def test_base_type(self):
+ dict_obj = {'a': 1, 'b': 2}
+ str_obj = '123'
+
+ # Default: dicts are iterable like they normally are
+ default_actual = list(mi.always_iterable(dict_obj))
+ default_expected = list(dict_obj)
+ self.assertEqual(default_actual, default_expected)
+
+ # Unitary types set: dicts are not iterable
+ custom_actual = list(mi.always_iterable(dict_obj, base_type=dict))
+ custom_expected = [dict_obj]
+ self.assertEqual(custom_actual, custom_expected)
+
+ # With unitary types set, strings are iterable
+ str_actual = list(mi.always_iterable(str_obj, base_type=None))
+ str_expected = list(str_obj)
+ self.assertEqual(str_actual, str_expected)
+
+ def test_iterables(self):
+ self.assertEqual(list(mi.always_iterable([0, 1])), [0, 1])
+ self.assertEqual(
+ list(mi.always_iterable([0, 1], base_type=list)), [[0, 1]]
+ )
+ self.assertEqual(
+ list(mi.always_iterable(iter('foo'))), ['f', 'o', 'o']
+ )
+ self.assertEqual(list(mi.always_iterable([])), [])
+
+ def test_none(self):
+ self.assertEqual(list(mi.always_iterable(None)), [])
+
+ def test_generator(self):
+ def _gen():
+ yield 0
+ yield 1
+
+ self.assertEqual(list(mi.always_iterable(_gen())), [0, 1])
+
+
+class AdjacentTests(TestCase):
+ def test_typical(self):
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, range(10)))
+ expected = [(True, 0), (True, 1), (False, 2), (False, 3), (True, 4),
+ (True, 5), (True, 6), (False, 7), (False, 8), (False, 9)]
+ self.assertEqual(actual, expected)
+
+ def test_empty_iterable(self):
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, []))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_length_one(self):
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, [0]))
+ expected = [(True, 0)]
+ self.assertEqual(actual, expected)
+
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, [1]))
+ expected = [(False, 1)]
+ self.assertEqual(actual, expected)
+
+ def test_consecutive_true(self):
+ """Test that when the predicate matches multiple consecutive elements
+ it doesn't repeat elements in the output"""
+ actual = list(mi.adjacent(lambda x: x % 5 < 2, range(10)))
+ expected = [(True, 0), (True, 1), (True, 2), (False, 3), (True, 4),
+ (True, 5), (True, 6), (True, 7), (False, 8), (False, 9)]
+ self.assertEqual(actual, expected)
+
+ def test_distance(self):
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, range(10), distance=2))
+ expected = [(True, 0), (True, 1), (True, 2), (True, 3), (True, 4),
+ (True, 5), (True, 6), (True, 7), (False, 8), (False, 9)]
+ self.assertEqual(actual, expected)
+
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, range(10), distance=3))
+ expected = [(True, 0), (True, 1), (True, 2), (True, 3), (True, 4),
+ (True, 5), (True, 6), (True, 7), (True, 8), (False, 9)]
+ self.assertEqual(actual, expected)
+
+ def test_large_distance(self):
+ """Test distance larger than the length of the iterable"""
+ iterable = range(10)
+ actual = list(mi.adjacent(lambda x: x % 5 == 4, iterable, distance=20))
+ expected = list(zip(repeat(True), iterable))
+ self.assertEqual(actual, expected)
+
+ actual = list(mi.adjacent(lambda x: False, iterable, distance=20))
+ expected = list(zip(repeat(False), iterable))
+ self.assertEqual(actual, expected)
+
+ def test_zero_distance(self):
+ """Test that adjacent() reduces to zip+map when distance is 0"""
+ iterable = range(1000)
+ predicate = lambda x: x % 4 == 2
+ actual = mi.adjacent(predicate, iterable, 0)
+ expected = zip(map(predicate, iterable), iterable)
+ self.assertTrue(all(a == e for a, e in zip(actual, expected)))
+
+ def test_negative_distance(self):
+ """Test that adjacent() raises an error with negative distance"""
+ pred = lambda x: x
+ self.assertRaises(
+ ValueError, lambda: mi.adjacent(pred, range(1000), -1)
+ )
+ self.assertRaises(
+ ValueError, lambda: mi.adjacent(pred, range(10), -10)
+ )
+
+ def test_grouping(self):
+ """Test interaction of adjacent() with groupby_transform()"""
+ iterable = mi.adjacent(lambda x: x % 5 == 0, range(10))
+ grouper = mi.groupby_transform(iterable, itemgetter(0), itemgetter(1))
+ actual = [(k, list(g)) for k, g in grouper]
+ expected = [
+ (True, [0, 1]),
+ (False, [2, 3]),
+ (True, [4, 5, 6]),
+ (False, [7, 8, 9]),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_call_once(self):
+ """Test that the predicate is only called once per item."""
+ already_seen = set()
+ iterable = range(10)
+
+ def predicate(item):
+ self.assertNotIn(item, already_seen)
+ already_seen.add(item)
+ return True
+
+ actual = list(mi.adjacent(predicate, iterable))
+ expected = [(True, x) for x in iterable]
+ self.assertEqual(actual, expected)
+
+
+class GroupByTransformTests(TestCase):
+ def assertAllGroupsEqual(self, groupby1, groupby2):
+ """Compare two groupby objects for equality, both keys and groups."""
+ for a, b in zip(groupby1, groupby2):
+ key1, group1 = a
+ key2, group2 = b
+ self.assertEqual(key1, key2)
+ self.assertListEqual(list(group1), list(group2))
+ self.assertRaises(StopIteration, lambda: next(groupby1))
+ self.assertRaises(StopIteration, lambda: next(groupby2))
+
+ def test_default_funcs(self):
+ """Test that groupby_transform() with default args mimics groupby()"""
+ iterable = [(x // 5, x) for x in range(1000)]
+ actual = mi.groupby_transform(iterable)
+ expected = groupby(iterable)
+ self.assertAllGroupsEqual(actual, expected)
+
+ def test_valuefunc(self):
+ iterable = [(int(x / 5), int(x / 3), x) for x in range(10)]
+
+ # Test the standard usage of grouping one iterable using another's keys
+ grouper = mi.groupby_transform(
+ iterable, keyfunc=itemgetter(0), valuefunc=itemgetter(-1)
+ )
+ actual = [(k, list(g)) for k, g in grouper]
+ expected = [(0, [0, 1, 2, 3, 4]), (1, [5, 6, 7, 8, 9])]
+ self.assertEqual(actual, expected)
+
+ grouper = mi.groupby_transform(
+ iterable, keyfunc=itemgetter(1), valuefunc=itemgetter(-1)
+ )
+ actual = [(k, list(g)) for k, g in grouper]
+ expected = [(0, [0, 1, 2]), (1, [3, 4, 5]), (2, [6, 7, 8]), (3, [9])]
+ self.assertEqual(actual, expected)
+
+ # and now for something a little different
+ d = dict(zip(range(10), 'abcdefghij'))
+ grouper = mi.groupby_transform(
+ range(10), keyfunc=lambda x: x // 5, valuefunc=d.get
+ )
+ actual = [(k, ''.join(g)) for k, g in grouper]
+ expected = [(0, 'abcde'), (1, 'fghij')]
+ self.assertEqual(actual, expected)
+
+ def test_no_valuefunc(self):
+ iterable = range(1000)
+
+ def key(x):
+ return x // 5
+
+ actual = mi.groupby_transform(iterable, key, valuefunc=None)
+ expected = groupby(iterable, key)
+ self.assertAllGroupsEqual(actual, expected)
+
+ actual = mi.groupby_transform(iterable, key) # default valuefunc
+ expected = groupby(iterable, key)
+ self.assertAllGroupsEqual(actual, expected)
+
+
+class NumericRangeTests(TestCase):
+ def test_basic(self):
+ for args, expected in [
+ ((4,), [0, 1, 2, 3]),
+ ((4.0,), [0.0, 1.0, 2.0, 3.0]),
+ ((1.0, 4), [1.0, 2.0, 3.0]),
+ ((1, 4.0), [1, 2, 3]),
+ ((1.0, 5), [1.0, 2.0, 3.0, 4.0]),
+ ((0, 20, 5), [0, 5, 10, 15]),
+ ((0, 20, 5.0), [0.0, 5.0, 10.0, 15.0]),
+ ((0, 10, 3), [0, 3, 6, 9]),
+ ((0, 10, 3.0), [0.0, 3.0, 6.0, 9.0]),
+ ((0, -5, -1), [0, -1, -2, -3, -4]),
+ ((0.0, -5, -1), [0.0, -1.0, -2.0, -3.0, -4.0]),
+ ((1, 2, Fraction(1, 2)), [Fraction(1, 1), Fraction(3, 2)]),
+ ((0,), []),
+ ((0.0,), []),
+ ((1, 0), []),
+ ((1.0, 0.0), []),
+ ((Fraction(2, 1),), [Fraction(0, 1), Fraction(1, 1)]),
+ ((Decimal('2.0'),), [Decimal('0.0'), Decimal('1.0')]),
+ ]:
+ actual = list(mi.numeric_range(*args))
+ self.assertEqual(actual, expected)
+ self.assertTrue(
+ all(type(a) == type(e) for a, e in zip(actual, expected))
+ )
+
+ def test_arg_count(self):
+ self.assertRaises(TypeError, lambda: list(mi.numeric_range()))
+ self.assertRaises(
+ TypeError, lambda: list(mi.numeric_range(0, 1, 2, 3))
+ )
+
+ def test_zero_step(self):
+ self.assertRaises(
+ ValueError, lambda: list(mi.numeric_range(1, 2, 0))
+ )
+
+
+class CountCycleTests(TestCase):
+ def test_basic(self):
+ expected = [
+ (0, 'a'), (0, 'b'), (0, 'c'),
+ (1, 'a'), (1, 'b'), (1, 'c'),
+ (2, 'a'), (2, 'b'), (2, 'c'),
+ ]
+ for actual in [
+ mi.take(9, mi.count_cycle('abc')), # n=None
+ list(mi.count_cycle('abc', 3)), # n=3
+ ]:
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ self.assertEqual(list(mi.count_cycle('')), [])
+ self.assertEqual(list(mi.count_cycle('', 2)), [])
+
+ def test_negative(self):
+ self.assertEqual(list(mi.count_cycle('abc', -3)), [])
+
+
+class LocateTests(TestCase):
+ def test_default_pred(self):
+ iterable = [0, 1, 1, 0, 1, 0, 0]
+ actual = list(mi.locate(iterable))
+ expected = [1, 2, 4]
+ self.assertEqual(actual, expected)
+
+ def test_no_matches(self):
+ iterable = [0, 0, 0]
+ actual = list(mi.locate(iterable))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_custom_pred(self):
+ iterable = ['0', 1, 1, '0', 1, '0', '0']
+ pred = lambda x: x == '0'
+ actual = list(mi.locate(iterable, pred))
+ expected = [0, 3, 5, 6]
+ self.assertEqual(actual, expected)
+
+ def test_window_size(self):
+ iterable = ['0', 1, 1, '0', 1, '0', '0']
+ pred = lambda *args: args == ('0', 1)
+ actual = list(mi.locate(iterable, pred, window_size=2))
+ expected = [0, 3]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_large(self):
+ iterable = [1, 2, 3, 4]
+ pred = lambda a, b, c, d, e: True
+ actual = list(mi.locate(iterable, pred, window_size=5))
+ expected = [0]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_zero(self):
+ iterable = [1, 2, 3, 4]
+ pred = lambda: True
+ with self.assertRaises(ValueError):
+ list(mi.locate(iterable, pred, window_size=0))
+
+
+class StripFunctionTests(TestCase):
+ def test_hashable(self):
+ iterable = list('www.example.com')
+ pred = lambda x: x in set('cmowz.')
+
+ self.assertEqual(list(mi.lstrip(iterable, pred)), list('example.com'))
+ self.assertEqual(list(mi.rstrip(iterable, pred)), list('www.example'))
+ self.assertEqual(list(mi.strip(iterable, pred)), list('example'))
+
+ def test_not_hashable(self):
+ iterable = [
+ list('http://'), list('www'), list('.example'), list('.com')
+ ]
+ pred = lambda x: x in [list('http://'), list('www'), list('.com')]
+
+ self.assertEqual(list(mi.lstrip(iterable, pred)), iterable[2:])
+ self.assertEqual(list(mi.rstrip(iterable, pred)), iterable[:3])
+ self.assertEqual(list(mi.strip(iterable, pred)), iterable[2: 3])
+
+ def test_math(self):
+ iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2]
+ pred = lambda x: x <= 2
+
+ self.assertEqual(list(mi.lstrip(iterable, pred)), iterable[3:])
+ self.assertEqual(list(mi.rstrip(iterable, pred)), iterable[:-3])
+ self.assertEqual(list(mi.strip(iterable, pred)), iterable[3:-3])
+
+
+class IsliceExtendedTests(TestCase):
+ def test_all(self):
+ iterable = ['0', '1', '2', '3', '4', '5']
+ indexes = list(range(-4, len(iterable) + 4)) + [None]
+ steps = [1, 2, 3, 4, -1, -2, -3, 4]
+ for slice_args in product(indexes, indexes, steps):
+ try:
+ actual = list(mi.islice_extended(iterable, *slice_args))
+ except Exception as e:
+ self.fail((slice_args, e))
+
+ expected = iterable[slice(*slice_args)]
+ self.assertEqual(actual, expected, slice_args)
+
+ def test_zero_step(self):
+ with self.assertRaises(ValueError):
+ list(mi.islice_extended([1, 2, 3], 0, 1, 0))
+
+
+class ConsecutiveGroupsTest(TestCase):
+ def test_numbers(self):
+ iterable = [-10, -8, -7, -6, 1, 2, 4, 5, -1, 7]
+ actual = [list(g) for g in mi.consecutive_groups(iterable)]
+ expected = [[-10], [-8, -7, -6], [1, 2], [4, 5], [-1], [7]]
+ self.assertEqual(actual, expected)
+
+ def test_custom_ordering(self):
+ iterable = ['1', '10', '11', '20', '21', '22', '30', '31']
+ ordering = lambda x: int(x)
+ actual = [list(g) for g in mi.consecutive_groups(iterable, ordering)]
+ expected = [['1'], ['10', '11'], ['20', '21', '22'], ['30', '31']]
+ self.assertEqual(actual, expected)
+
+ def test_exotic_ordering(self):
+ iterable = [
+ ('a', 'b', 'c', 'd'),
+ ('a', 'c', 'b', 'd'),
+ ('a', 'c', 'd', 'b'),
+ ('a', 'd', 'b', 'c'),
+ ('d', 'b', 'c', 'a'),
+ ('d', 'c', 'a', 'b'),
+ ]
+ ordering = list(permutations('abcd')).index
+ actual = [list(g) for g in mi.consecutive_groups(iterable, ordering)]
+ expected = [
+ [('a', 'b', 'c', 'd')],
+ [('a', 'c', 'b', 'd'), ('a', 'c', 'd', 'b'), ('a', 'd', 'b', 'c')],
+ [('d', 'b', 'c', 'a'), ('d', 'c', 'a', 'b')],
+ ]
+ self.assertEqual(actual, expected)
+
+
+class DifferenceTest(TestCase):
+ def test_normal(self):
+ iterable = [10, 20, 30, 40, 50]
+ actual = list(mi.difference(iterable))
+ expected = [10, 10, 10, 10, 10]
+ self.assertEqual(actual, expected)
+
+ def test_custom(self):
+ iterable = [10, 20, 30, 40, 50]
+ actual = list(mi.difference(iterable, add))
+ expected = [10, 30, 50, 70, 90]
+ self.assertEqual(actual, expected)
+
+ def test_roundtrip(self):
+ original = list(range(100))
+ accumulated = mi.accumulate(original)
+ actual = list(mi.difference(accumulated))
+ self.assertEqual(actual, original)
+
+ def test_one(self):
+ self.assertEqual(list(mi.difference([0])), [0])
+
+ def test_empty(self):
+ self.assertEqual(list(mi.difference([])), [])
+
+
+class SeekableTest(TestCase):
+ def test_exhaustion_reset(self):
+ iterable = [str(n) for n in range(10)]
+
+ s = mi.seekable(iterable)
+ self.assertEqual(list(s), iterable) # Normal iteration
+ self.assertEqual(list(s), []) # Iterable is exhausted
+
+ s.seek(0)
+ self.assertEqual(list(s), iterable) # Back in action
+
+ def test_partial_reset(self):
+ iterable = [str(n) for n in range(10)]
+
+ s = mi.seekable(iterable)
+ self.assertEqual(mi.take(5, s), iterable[:5]) # Normal iteration
+
+ s.seek(1)
+ self.assertEqual(list(s), iterable[1:]) # Get the rest of the iterable
+
+ def test_forward(self):
+ iterable = [str(n) for n in range(10)]
+
+ s = mi.seekable(iterable)
+ self.assertEqual(mi.take(1, s), iterable[:1]) # Normal iteration
+
+ s.seek(3) # Skip over index 2
+ self.assertEqual(list(s), iterable[3:]) # Result is similar to slicing
+
+ s.seek(0) # Back to 0
+ self.assertEqual(list(s), iterable) # No difference in result
+
+ def test_past_end(self):
+ iterable = [str(n) for n in range(10)]
+
+ s = mi.seekable(iterable)
+ self.assertEqual(mi.take(1, s), iterable[:1]) # Normal iteration
+
+ s.seek(20)
+ self.assertEqual(list(s), []) # Iterable is exhausted
+
+ s.seek(0) # Back to 0
+ self.assertEqual(list(s), iterable) # No difference in result
+
+ def test_elements(self):
+ iterable = map(str, count())
+
+ s = mi.seekable(iterable)
+ mi.take(10, s)
+
+ elements = s.elements()
+ self.assertEqual(
+ [elements[i] for i in range(10)], [str(n) for n in range(10)]
+ )
+ self.assertEqual(len(elements), 10)
+
+ mi.take(10, s)
+ self.assertEqual(list(elements), [str(n) for n in range(20)])
+
+
+class SequenceViewTests(TestCase):
+ def test_init(self):
+ view = mi.SequenceView((1, 2, 3))
+ self.assertEqual(repr(view), "SequenceView((1, 2, 3))")
+ self.assertRaises(TypeError, lambda: mi.SequenceView({}))
+
+ def test_update(self):
+ seq = [1, 2, 3]
+ view = mi.SequenceView(seq)
+ self.assertEqual(len(view), 3)
+ self.assertEqual(repr(view), "SequenceView([1, 2, 3])")
+
+ seq.pop()
+ self.assertEqual(len(view), 2)
+ self.assertEqual(repr(view), "SequenceView([1, 2])")
+
+ def test_indexing(self):
+ seq = ('a', 'b', 'c', 'd', 'e', 'f')
+ view = mi.SequenceView(seq)
+ for i in range(-len(seq), len(seq)):
+ self.assertEqual(view[i], seq[i])
+
+ def test_slicing(self):
+ seq = ('a', 'b', 'c', 'd', 'e', 'f')
+ view = mi.SequenceView(seq)
+ n = len(seq)
+ indexes = list(range(-n - 1, n + 1)) + [None]
+ steps = list(range(-n, n + 1))
+ steps.remove(0)
+ for slice_args in product(indexes, indexes, steps):
+ i = slice(*slice_args)
+ self.assertEqual(view[i], seq[i])
+
+ def test_abc_methods(self):
+ # collections.Sequence should provide all of this functionality
+ seq = ('a', 'b', 'c', 'd', 'e', 'f', 'f')
+ view = mi.SequenceView(seq)
+
+ # __contains__
+ self.assertIn('b', view)
+ self.assertNotIn('g', view)
+
+ # __iter__
+ self.assertEqual(list(iter(view)), list(seq))
+
+ # __reversed__
+ self.assertEqual(list(reversed(view)), list(reversed(seq)))
+
+ # index
+ self.assertEqual(view.index('b'), 1)
+
+ # count
+ self.assertEqual(seq.count('f'), 2)
+
+
+class RunLengthTest(TestCase):
+ def test_encode(self):
+ iterable = (int(str(n)[0]) for n in count(800))
+ actual = mi.take(4, mi.run_length.encode(iterable))
+ expected = [(8, 100), (9, 100), (1, 1000), (2, 1000)]
+ self.assertEqual(actual, expected)
+
+ def test_decode(self):
+ iterable = [('d', 4), ('c', 3), ('b', 2), ('a', 1)]
+ actual = ''.join(mi.run_length.decode(iterable))
+ expected = 'ddddcccbba'
+ self.assertEqual(actual, expected)
+
+
+class ExactlyNTests(TestCase):
+ """Tests for ``exactly_n()``"""
+
+ def test_true(self):
+ """Iterable has ``n`` ``True`` elements"""
+ self.assertTrue(mi.exactly_n([True, False, True], 2))
+ self.assertTrue(mi.exactly_n([1, 1, 1, 0], 3))
+ self.assertTrue(mi.exactly_n([False, False], 0))
+ self.assertTrue(mi.exactly_n(range(100), 10, lambda x: x < 10))
+
+ def test_false(self):
+ """Iterable does not have ``n`` ``True`` elements"""
+ self.assertFalse(mi.exactly_n([True, False, False], 2))
+ self.assertFalse(mi.exactly_n([True, True, False], 1))
+ self.assertFalse(mi.exactly_n([False], 1))
+ self.assertFalse(mi.exactly_n([True], -1))
+ self.assertFalse(mi.exactly_n(repeat(True), 100))
+
+ def test_empty(self):
+ """Return ``True`` if the iterable is empty and ``n`` is 0"""
+ self.assertTrue(mi.exactly_n([], 0))
+ self.assertFalse(mi.exactly_n([], 1))
+
+
+class AlwaysReversibleTests(TestCase):
+ """Tests for ``always_reversible()``"""
+
+ def test_regular_reversed(self):
+ self.assertEqual(list(reversed(range(10))),
+ list(mi.always_reversible(range(10))))
+ self.assertEqual(list(reversed([1, 2, 3])),
+ list(mi.always_reversible([1, 2, 3])))
+ self.assertEqual(reversed([1, 2, 3]).__class__,
+ mi.always_reversible([1, 2, 3]).__class__)
+
+ def test_nonseq_reversed(self):
+ # Create a non-reversible generator from a sequence
+ with self.assertRaises(TypeError):
+ reversed(x for x in range(10))
+
+ self.assertEqual(list(reversed(range(10))),
+ list(mi.always_reversible(x for x in range(10))))
+ self.assertEqual(list(reversed([1, 2, 3])),
+ list(mi.always_reversible(x for x in [1, 2, 3])))
+ self.assertNotEqual(reversed((1, 2)).__class__,
+ mi.always_reversible(x for x in (1, 2)).__class__)
+
+
+class CircularShiftsTests(TestCase):
+ def test_empty(self):
+ # empty iterable -> empty list
+ self.assertEqual(list(mi.circular_shifts([])), [])
+
+ def test_simple_circular_shifts(self):
+ # test the a simple iterator case
+ self.assertEqual(
+ mi.circular_shifts(range(4)),
+ [(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)]
+ )
+
+ def test_duplicates(self):
+ # test non-distinct entries
+ self.assertEqual(
+ mi.circular_shifts([0, 1, 0, 1]),
+ [(0, 1, 0, 1), (1, 0, 1, 0), (0, 1, 0, 1), (1, 0, 1, 0)]
+ )
+
+
+class MakeDecoratorTests(TestCase):
+ def test_basic(self):
+ slicer = mi.make_decorator(islice)
+
+ @slicer(1, 10, 2)
+ def user_function(arg_1, arg_2, kwarg_1=None):
+ self.assertEqual(arg_1, 'arg_1')
+ self.assertEqual(arg_2, 'arg_2')
+ self.assertEqual(kwarg_1, 'kwarg_1')
+ return map(str, count())
+
+ it = user_function('arg_1', 'arg_2', kwarg_1='kwarg_1')
+ actual = list(it)
+ expected = ['1', '3', '5', '7', '9']
+ self.assertEqual(actual, expected)
+
+ def test_result_index(self):
+ def stringify(*args, **kwargs):
+ self.assertEqual(args[0], 'arg_0')
+ iterable = args[1]
+ self.assertEqual(args[2], 'arg_2')
+ self.assertEqual(kwargs['kwarg_1'], 'kwarg_1')
+ return map(str, iterable)
+
+ stringifier = mi.make_decorator(stringify, result_index=1)
+
+ @stringifier('arg_0', 'arg_2', kwarg_1='kwarg_1')
+ def user_function(n):
+ return count(n)
+
+ it = user_function(1)
+ actual = mi.take(5, it)
+ expected = ['1', '2', '3', '4', '5']
+ self.assertEqual(actual, expected)
+
+ def test_wrap_class(self):
+ seeker = mi.make_decorator(mi.seekable)
+
+ @seeker()
+ def user_function(n):
+ return map(str, range(n))
+
+ it = user_function(5)
+ self.assertEqual(list(it), ['0', '1', '2', '3', '4'])
+
+ it.seek(0)
+ self.assertEqual(list(it), ['0', '1', '2', '3', '4'])
+
+
+class MapReduceTests(TestCase):
+ def test_default(self):
+ iterable = (str(x) for x in range(5))
+ keyfunc = lambda x: int(x) // 2
+ actual = sorted(mi.map_reduce(iterable, keyfunc).items())
+ expected = [(0, ['0', '1']), (1, ['2', '3']), (2, ['4'])]
+ self.assertEqual(actual, expected)
+
+ def test_valuefunc(self):
+ iterable = (str(x) for x in range(5))
+ keyfunc = lambda x: int(x) // 2
+ valuefunc = int
+ actual = sorted(mi.map_reduce(iterable, keyfunc, valuefunc).items())
+ expected = [(0, [0, 1]), (1, [2, 3]), (2, [4])]
+ self.assertEqual(actual, expected)
+
+ def test_reducefunc(self):
+ iterable = (str(x) for x in range(5))
+ keyfunc = lambda x: int(x) // 2
+ valuefunc = int
+ reducefunc = lambda value_list: reduce(mul, value_list, 1)
+ actual = sorted(
+ mi.map_reduce(iterable, keyfunc, valuefunc, reducefunc).items()
+ )
+ expected = [(0, 0), (1, 6), (2, 4)]
+ self.assertEqual(actual, expected)
+
+ def test_ret(self):
+ d = mi.map_reduce([1, 0, 2, 0, 1, 0], bool)
+ self.assertEqual(d, {False: [0, 0, 0], True: [1, 2, 1]})
+ self.assertRaises(KeyError, lambda: d[None].append(1))
+
+
+class RlocateTests(TestCase):
+ def test_default_pred(self):
+ iterable = [0, 1, 1, 0, 1, 0, 0]
+ for it in (iterable[:], iter(iterable)):
+ actual = list(mi.rlocate(it))
+ expected = [4, 2, 1]
+ self.assertEqual(actual, expected)
+
+ def test_no_matches(self):
+ iterable = [0, 0, 0]
+ for it in (iterable[:], iter(iterable)):
+ actual = list(mi.rlocate(it))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_custom_pred(self):
+ iterable = ['0', 1, 1, '0', 1, '0', '0']
+ pred = lambda x: x == '0'
+ for it in (iterable[:], iter(iterable)):
+ actual = list(mi.rlocate(it, pred))
+ expected = [6, 5, 3, 0]
+ self.assertEqual(actual, expected)
+
+ def test_efficient_reversal(self):
+ iterable = range(10 ** 10) # Is efficiently reversible
+ target = 10 ** 10 - 2
+ pred = lambda x: x == target # Find-able from the right
+ actual = next(mi.rlocate(iterable, pred))
+ self.assertEqual(actual, target)
+
+ def test_window_size(self):
+ iterable = ['0', 1, 1, '0', 1, '0', '0']
+ pred = lambda *args: args == ('0', 1)
+ for it in (iterable, iter(iterable)):
+ actual = list(mi.rlocate(it, pred, window_size=2))
+ expected = [3, 0]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_large(self):
+ iterable = [1, 2, 3, 4]
+ pred = lambda a, b, c, d, e: True
+ for it in (iterable, iter(iterable)):
+ actual = list(mi.rlocate(iterable, pred, window_size=5))
+ expected = [0]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_zero(self):
+ iterable = [1, 2, 3, 4]
+ pred = lambda: True
+ for it in (iterable, iter(iterable)):
+ with self.assertRaises(ValueError):
+ list(mi.locate(iterable, pred, window_size=0))
+
+
+class ReplaceTests(TestCase):
+ def test_basic(self):
+ iterable = range(10)
+ pred = lambda x: x % 2 == 0
+ substitutes = []
+ actual = list(mi.replace(iterable, pred, substitutes))
+ expected = [1, 3, 5, 7, 9]
+ self.assertEqual(actual, expected)
+
+ def test_count(self):
+ iterable = range(10)
+ pred = lambda x: x % 2 == 0
+ substitutes = []
+ actual = list(mi.replace(iterable, pred, substitutes, count=4))
+ expected = [1, 3, 5, 7, 8, 9]
+ self.assertEqual(actual, expected)
+
+ def test_window_size(self):
+ iterable = range(10)
+ pred = lambda *args: args == (0, 1, 2)
+ substitutes = []
+ actual = list(mi.replace(iterable, pred, substitutes, window_size=3))
+ expected = [3, 4, 5, 6, 7, 8, 9]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_end(self):
+ iterable = range(10)
+ pred = lambda *args: args == (7, 8, 9)
+ substitutes = []
+ actual = list(mi.replace(iterable, pred, substitutes, window_size=3))
+ expected = [0, 1, 2, 3, 4, 5, 6]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_count(self):
+ iterable = range(10)
+ pred = lambda *args: (args == (0, 1, 2)) or (args == (7, 8, 9))
+ substitutes = []
+ actual = list(
+ mi.replace(iterable, pred, substitutes, count=1, window_size=3)
+ )
+ expected = [3, 4, 5, 6, 7, 8, 9]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_large(self):
+ iterable = range(4)
+ pred = lambda a, b, c, d, e: True
+ substitutes = [5, 6, 7]
+ actual = list(mi.replace(iterable, pred, substitutes, window_size=5))
+ expected = [5, 6, 7]
+ self.assertEqual(actual, expected)
+
+ def test_window_size_zero(self):
+ iterable = range(10)
+ pred = lambda *args: True
+ substitutes = []
+ with self.assertRaises(ValueError):
+ list(mi.replace(iterable, pred, substitutes, window_size=0))
+
+ def test_iterable_substitutes(self):
+ iterable = range(5)
+ pred = lambda x: x % 2 == 0
+ substitutes = iter('__')
+ actual = list(mi.replace(iterable, pred, substitutes))
+ expected = ['_', '_', 1, '_', '_', 3, '_', '_']
+ self.assertEqual(actual, expected)
diff --git a/third_party/python/more-itertools/more_itertools/tests/test_recipes.py b/third_party/python/more-itertools/more_itertools/tests/test_recipes.py
new file mode 100644
index 0000000000..98981fe8e6
--- /dev/null
+++ b/third_party/python/more-itertools/more_itertools/tests/test_recipes.py
@@ -0,0 +1,616 @@
+from doctest import DocTestSuite
+from unittest import TestCase
+
+from itertools import combinations
+from six.moves import range
+
+import more_itertools as mi
+
+
+def load_tests(loader, tests, ignore):
+ # Add the doctests
+ tests.addTests(DocTestSuite('more_itertools.recipes'))
+ return tests
+
+
+class AccumulateTests(TestCase):
+ """Tests for ``accumulate()``"""
+
+ def test_empty(self):
+ """Test that an empty input returns an empty output"""
+ self.assertEqual(list(mi.accumulate([])), [])
+
+ def test_default(self):
+ """Test accumulate with the default function (addition)"""
+ self.assertEqual(list(mi.accumulate([1, 2, 3])), [1, 3, 6])
+
+ def test_bogus_function(self):
+ """Test accumulate with an invalid function"""
+ with self.assertRaises(TypeError):
+ list(mi.accumulate([1, 2, 3], func=lambda x: x))
+
+ def test_custom_function(self):
+ """Test accumulate with a custom function"""
+ self.assertEqual(
+ list(mi.accumulate((1, 2, 3, 2, 1), func=max)), [1, 2, 3, 3, 3]
+ )
+
+
+class TakeTests(TestCase):
+ """Tests for ``take()``"""
+
+ def test_simple_take(self):
+ """Test basic usage"""
+ t = mi.take(5, range(10))
+ self.assertEqual(t, [0, 1, 2, 3, 4])
+
+ def test_null_take(self):
+ """Check the null case"""
+ t = mi.take(0, range(10))
+ self.assertEqual(t, [])
+
+ def test_negative_take(self):
+ """Make sure taking negative items results in a ValueError"""
+ self.assertRaises(ValueError, lambda: mi.take(-3, range(10)))
+
+ def test_take_too_much(self):
+ """Taking more than an iterator has remaining should return what the
+ iterator has remaining.
+
+ """
+ t = mi.take(10, range(5))
+ self.assertEqual(t, [0, 1, 2, 3, 4])
+
+
+class TabulateTests(TestCase):
+ """Tests for ``tabulate()``"""
+
+ def test_simple_tabulate(self):
+ """Test the happy path"""
+ t = mi.tabulate(lambda x: x)
+ f = tuple([next(t) for _ in range(3)])
+ self.assertEqual(f, (0, 1, 2))
+
+ def test_count(self):
+ """Ensure tabulate accepts specific count"""
+ t = mi.tabulate(lambda x: 2 * x, -1)
+ f = (next(t), next(t), next(t))
+ self.assertEqual(f, (-2, 0, 2))
+
+
+class TailTests(TestCase):
+ """Tests for ``tail()``"""
+
+ def test_greater(self):
+ """Length of iterable is greather than requested tail"""
+ self.assertEqual(list(mi.tail(3, 'ABCDEFG')), ['E', 'F', 'G'])
+
+ def test_equal(self):
+ """Length of iterable is equal to the requested tail"""
+ self.assertEqual(
+ list(mi.tail(7, 'ABCDEFG')), ['A', 'B', 'C', 'D', 'E', 'F', 'G']
+ )
+
+ def test_less(self):
+ """Length of iterable is less than requested tail"""
+ self.assertEqual(
+ list(mi.tail(8, 'ABCDEFG')), ['A', 'B', 'C', 'D', 'E', 'F', 'G']
+ )
+
+
+class ConsumeTests(TestCase):
+ """Tests for ``consume()``"""
+
+ def test_sanity(self):
+ """Test basic functionality"""
+ r = (x for x in range(10))
+ mi.consume(r, 3)
+ self.assertEqual(3, next(r))
+
+ def test_null_consume(self):
+ """Check the null case"""
+ r = (x for x in range(10))
+ mi.consume(r, 0)
+ self.assertEqual(0, next(r))
+
+ def test_negative_consume(self):
+ """Check that negative consumsion throws an error"""
+ r = (x for x in range(10))
+ self.assertRaises(ValueError, lambda: mi.consume(r, -1))
+
+ def test_total_consume(self):
+ """Check that iterator is totally consumed by default"""
+ r = (x for x in range(10))
+ mi.consume(r)
+ self.assertRaises(StopIteration, lambda: next(r))
+
+
+class NthTests(TestCase):
+ """Tests for ``nth()``"""
+
+ def test_basic(self):
+ """Make sure the nth item is returned"""
+ l = range(10)
+ for i, v in enumerate(l):
+ self.assertEqual(mi.nth(l, i), v)
+
+ def test_default(self):
+ """Ensure a default value is returned when nth item not found"""
+ l = range(3)
+ self.assertEqual(mi.nth(l, 100, "zebra"), "zebra")
+
+ def test_negative_item_raises(self):
+ """Ensure asking for a negative item raises an exception"""
+ self.assertRaises(ValueError, lambda: mi.nth(range(10), -3))
+
+
+class AllEqualTests(TestCase):
+ """Tests for ``all_equal()``"""
+
+ def test_true(self):
+ """Everything is equal"""
+ self.assertTrue(mi.all_equal('aaaaaa'))
+ self.assertTrue(mi.all_equal([0, 0, 0, 0]))
+
+ def test_false(self):
+ """Not everything is equal"""
+ self.assertFalse(mi.all_equal('aaaaab'))
+ self.assertFalse(mi.all_equal([0, 0, 0, 1]))
+
+ def test_tricky(self):
+ """Not everything is identical, but everything is equal"""
+ items = [1, complex(1, 0), 1.0]
+ self.assertTrue(mi.all_equal(items))
+
+ def test_empty(self):
+ """Return True if the iterable is empty"""
+ self.assertTrue(mi.all_equal(''))
+ self.assertTrue(mi.all_equal([]))
+
+ def test_one(self):
+ """Return True if the iterable is singular"""
+ self.assertTrue(mi.all_equal('0'))
+ self.assertTrue(mi.all_equal([0]))
+
+
+class QuantifyTests(TestCase):
+ """Tests for ``quantify()``"""
+
+ def test_happy_path(self):
+ """Make sure True count is returned"""
+ q = [True, False, True]
+ self.assertEqual(mi.quantify(q), 2)
+
+ def test_custom_predicate(self):
+ """Ensure non-default predicates return as expected"""
+ q = range(10)
+ self.assertEqual(mi.quantify(q, lambda x: x % 2 == 0), 5)
+
+
+class PadnoneTests(TestCase):
+ """Tests for ``padnone()``"""
+
+ def test_happy_path(self):
+ """wrapper iterator should return None indefinitely"""
+ r = range(2)
+ p = mi.padnone(r)
+ self.assertEqual([0, 1, None, None], [next(p) for _ in range(4)])
+
+
+class NcyclesTests(TestCase):
+ """Tests for ``nyclces()``"""
+
+ def test_happy_path(self):
+ """cycle a sequence three times"""
+ r = ["a", "b", "c"]
+ n = mi.ncycles(r, 3)
+ self.assertEqual(
+ ["a", "b", "c", "a", "b", "c", "a", "b", "c"],
+ list(n)
+ )
+
+ def test_null_case(self):
+ """asking for 0 cycles should return an empty iterator"""
+ n = mi.ncycles(range(100), 0)
+ self.assertRaises(StopIteration, lambda: next(n))
+
+ def test_pathalogical_case(self):
+ """asking for negative cycles should return an empty iterator"""
+ n = mi.ncycles(range(100), -10)
+ self.assertRaises(StopIteration, lambda: next(n))
+
+
+class DotproductTests(TestCase):
+ """Tests for ``dotproduct()``'"""
+
+ def test_happy_path(self):
+ """simple dotproduct example"""
+ self.assertEqual(400, mi.dotproduct([10, 10], [20, 20]))
+
+
+class FlattenTests(TestCase):
+ """Tests for ``flatten()``"""
+
+ def test_basic_usage(self):
+ """ensure list of lists is flattened one level"""
+ f = [[0, 1, 2], [3, 4, 5]]
+ self.assertEqual(list(range(6)), list(mi.flatten(f)))
+
+ def test_single_level(self):
+ """ensure list of lists is flattened only one level"""
+ f = [[0, [1, 2]], [[3, 4], 5]]
+ self.assertEqual([0, [1, 2], [3, 4], 5], list(mi.flatten(f)))
+
+
+class RepeatfuncTests(TestCase):
+ """Tests for ``repeatfunc()``"""
+
+ def test_simple_repeat(self):
+ """test simple repeated functions"""
+ r = mi.repeatfunc(lambda: 5)
+ self.assertEqual([5, 5, 5, 5, 5], [next(r) for _ in range(5)])
+
+ def test_finite_repeat(self):
+ """ensure limited repeat when times is provided"""
+ r = mi.repeatfunc(lambda: 5, times=5)
+ self.assertEqual([5, 5, 5, 5, 5], list(r))
+
+ def test_added_arguments(self):
+ """ensure arguments are applied to the function"""
+ r = mi.repeatfunc(lambda x: x, 2, 3)
+ self.assertEqual([3, 3], list(r))
+
+ def test_null_times(self):
+ """repeat 0 should return an empty iterator"""
+ r = mi.repeatfunc(range, 0, 3)
+ self.assertRaises(StopIteration, lambda: next(r))
+
+
+class PairwiseTests(TestCase):
+ """Tests for ``pairwise()``"""
+
+ def test_base_case(self):
+ """ensure an iterable will return pairwise"""
+ p = mi.pairwise([1, 2, 3])
+ self.assertEqual([(1, 2), (2, 3)], list(p))
+
+ def test_short_case(self):
+ """ensure an empty iterator if there's not enough values to pair"""
+ p = mi.pairwise("a")
+ self.assertRaises(StopIteration, lambda: next(p))
+
+
+class GrouperTests(TestCase):
+ """Tests for ``grouper()``"""
+
+ def test_even(self):
+ """Test when group size divides evenly into the length of
+ the iterable.
+
+ """
+ self.assertEqual(
+ list(mi.grouper(3, 'ABCDEF')), [('A', 'B', 'C'), ('D', 'E', 'F')]
+ )
+
+ def test_odd(self):
+ """Test when group size does not divide evenly into the length of the
+ iterable.
+
+ """
+ self.assertEqual(
+ list(mi.grouper(3, 'ABCDE')), [('A', 'B', 'C'), ('D', 'E', None)]
+ )
+
+ def test_fill_value(self):
+ """Test that the fill value is used to pad the final group"""
+ self.assertEqual(
+ list(mi.grouper(3, 'ABCDE', 'x')),
+ [('A', 'B', 'C'), ('D', 'E', 'x')]
+ )
+
+
+class RoundrobinTests(TestCase):
+ """Tests for ``roundrobin()``"""
+
+ def test_even_groups(self):
+ """Ensure ordered output from evenly populated iterables"""
+ self.assertEqual(
+ list(mi.roundrobin('ABC', [1, 2, 3], range(3))),
+ ['A', 1, 0, 'B', 2, 1, 'C', 3, 2]
+ )
+
+ def test_uneven_groups(self):
+ """Ensure ordered output from unevenly populated iterables"""
+ self.assertEqual(
+ list(mi.roundrobin('ABCD', [1, 2], range(0))),
+ ['A', 1, 'B', 2, 'C', 'D']
+ )
+
+
+class PartitionTests(TestCase):
+ """Tests for ``partition()``"""
+
+ def test_bool(self):
+ """Test when pred() returns a boolean"""
+ lesser, greater = mi.partition(lambda x: x > 5, range(10))
+ self.assertEqual(list(lesser), [0, 1, 2, 3, 4, 5])
+ self.assertEqual(list(greater), [6, 7, 8, 9])
+
+ def test_arbitrary(self):
+ """Test when pred() returns an integer"""
+ divisibles, remainders = mi.partition(lambda x: x % 3, range(10))
+ self.assertEqual(list(divisibles), [0, 3, 6, 9])
+ self.assertEqual(list(remainders), [1, 2, 4, 5, 7, 8])
+
+
+class PowersetTests(TestCase):
+ """Tests for ``powerset()``"""
+
+ def test_combinatorics(self):
+ """Ensure a proper enumeration"""
+ p = mi.powerset([1, 2, 3])
+ self.assertEqual(
+ list(p),
+ [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
+ )
+
+
+class UniqueEverseenTests(TestCase):
+ """Tests for ``unique_everseen()``"""
+
+ def test_everseen(self):
+ """ensure duplicate elements are ignored"""
+ u = mi.unique_everseen('AAAABBBBCCDAABBB')
+ self.assertEqual(
+ ['A', 'B', 'C', 'D'],
+ list(u)
+ )
+
+ def test_custom_key(self):
+ """ensure the custom key comparison works"""
+ u = mi.unique_everseen('aAbACCc', key=str.lower)
+ self.assertEqual(list('abC'), list(u))
+
+ def test_unhashable(self):
+ """ensure things work for unhashable items"""
+ iterable = ['a', [1, 2, 3], [1, 2, 3], 'a']
+ u = mi.unique_everseen(iterable)
+ self.assertEqual(list(u), ['a', [1, 2, 3]])
+
+ def test_unhashable_key(self):
+ """ensure things work for unhashable items with a custom key"""
+ iterable = ['a', [1, 2, 3], [1, 2, 3], 'a']
+ u = mi.unique_everseen(iterable, key=lambda x: x)
+ self.assertEqual(list(u), ['a', [1, 2, 3]])
+
+
+class UniqueJustseenTests(TestCase):
+ """Tests for ``unique_justseen()``"""
+
+ def test_justseen(self):
+ """ensure only last item is remembered"""
+ u = mi.unique_justseen('AAAABBBCCDABB')
+ self.assertEqual(list('ABCDAB'), list(u))
+
+ def test_custom_key(self):
+ """ensure the custom key comparison works"""
+ u = mi.unique_justseen('AABCcAD', str.lower)
+ self.assertEqual(list('ABCAD'), list(u))
+
+
+class IterExceptTests(TestCase):
+ """Tests for ``iter_except()``"""
+
+ def test_exact_exception(self):
+ """ensure the exact specified exception is caught"""
+ l = [1, 2, 3]
+ i = mi.iter_except(l.pop, IndexError)
+ self.assertEqual(list(i), [3, 2, 1])
+
+ def test_generic_exception(self):
+ """ensure the generic exception can be caught"""
+ l = [1, 2]
+ i = mi.iter_except(l.pop, Exception)
+ self.assertEqual(list(i), [2, 1])
+
+ def test_uncaught_exception_is_raised(self):
+ """ensure a non-specified exception is raised"""
+ l = [1, 2, 3]
+ i = mi.iter_except(l.pop, KeyError)
+ self.assertRaises(IndexError, lambda: list(i))
+
+ def test_first(self):
+ """ensure first is run before the function"""
+ l = [1, 2, 3]
+ f = lambda: 25
+ i = mi.iter_except(l.pop, IndexError, f)
+ self.assertEqual(list(i), [25, 3, 2, 1])
+
+
+class FirstTrueTests(TestCase):
+ """Tests for ``first_true()``"""
+
+ def test_something_true(self):
+ """Test with no keywords"""
+ self.assertEqual(mi.first_true(range(10)), 1)
+
+ def test_nothing_true(self):
+ """Test default return value."""
+ self.assertEqual(mi.first_true([0, 0, 0]), False)
+
+ def test_default(self):
+ """Test with a default keyword"""
+ self.assertEqual(mi.first_true([0, 0, 0], default='!'), '!')
+
+ def test_pred(self):
+ """Test with a custom predicate"""
+ self.assertEqual(
+ mi.first_true([2, 4, 6], pred=lambda x: x % 3 == 0), 6
+ )
+
+
+class RandomProductTests(TestCase):
+ """Tests for ``random_product()``
+
+ Since random.choice() has different results with the same seed across
+ python versions 2.x and 3.x, these tests use highly probably events to
+ create predictable outcomes across platforms.
+ """
+
+ def test_simple_lists(self):
+ """Ensure that one item is chosen from each list in each pair.
+ Also ensure that each item from each list eventually appears in
+ the chosen combinations.
+
+ Odds are roughly 1 in 7.1 * 10e16 that one item from either list will
+ not be chosen after 100 samplings of one item from each list. Just to
+ be safe, better use a known random seed, too.
+
+ """
+ nums = [1, 2, 3]
+ lets = ['a', 'b', 'c']
+ n, m = zip(*[mi.random_product(nums, lets) for _ in range(100)])
+ n, m = set(n), set(m)
+ self.assertEqual(n, set(nums))
+ self.assertEqual(m, set(lets))
+ self.assertEqual(len(n), len(nums))
+ self.assertEqual(len(m), len(lets))
+
+ def test_list_with_repeat(self):
+ """ensure multiple items are chosen, and that they appear to be chosen
+ from one list then the next, in proper order.
+
+ """
+ nums = [1, 2, 3]
+ lets = ['a', 'b', 'c']
+ r = list(mi.random_product(nums, lets, repeat=100))
+ self.assertEqual(2 * 100, len(r))
+ n, m = set(r[::2]), set(r[1::2])
+ self.assertEqual(n, set(nums))
+ self.assertEqual(m, set(lets))
+ self.assertEqual(len(n), len(nums))
+ self.assertEqual(len(m), len(lets))
+
+
+class RandomPermutationTests(TestCase):
+ """Tests for ``random_permutation()``"""
+
+ def test_full_permutation(self):
+ """ensure every item from the iterable is returned in a new ordering
+
+ 15 elements have a 1 in 1.3 * 10e12 of appearing in sorted order, so
+ we fix a seed value just to be sure.
+
+ """
+ i = range(15)
+ r = mi.random_permutation(i)
+ self.assertEqual(set(i), set(r))
+ if i == r:
+ raise AssertionError("Values were not permuted")
+
+ def test_partial_permutation(self):
+ """ensure all returned items are from the iterable, that the returned
+ permutation is of the desired length, and that all items eventually
+ get returned.
+
+ Sampling 100 permutations of length 5 from a set of 15 leaves a
+ (2/3)^100 chance that an item will not be chosen. Multiplied by 15
+ items, there is a 1 in 2.6e16 chance that at least 1 item will not
+ show up in the resulting output. Using a random seed will fix that.
+
+ """
+ items = range(15)
+ item_set = set(items)
+ all_items = set()
+ for _ in range(100):
+ permutation = mi.random_permutation(items, 5)
+ self.assertEqual(len(permutation), 5)
+ permutation_set = set(permutation)
+ self.assertLessEqual(permutation_set, item_set)
+ all_items |= permutation_set
+ self.assertEqual(all_items, item_set)
+
+
+class RandomCombinationTests(TestCase):
+ """Tests for ``random_combination()``"""
+
+ def test_psuedorandomness(self):
+ """ensure different subsets of the iterable get returned over many
+ samplings of random combinations"""
+ items = range(15)
+ all_items = set()
+ for _ in range(50):
+ combination = mi.random_combination(items, 5)
+ all_items |= set(combination)
+ self.assertEqual(all_items, set(items))
+
+ def test_no_replacement(self):
+ """ensure that elements are sampled without replacement"""
+ items = range(15)
+ for _ in range(50):
+ combination = mi.random_combination(items, len(items))
+ self.assertEqual(len(combination), len(set(combination)))
+ self.assertRaises(
+ ValueError, lambda: mi.random_combination(items, len(items) + 1)
+ )
+
+
+class RandomCombinationWithReplacementTests(TestCase):
+ """Tests for ``random_combination_with_replacement()``"""
+
+ def test_replacement(self):
+ """ensure that elements are sampled with replacement"""
+ items = range(5)
+ combo = mi.random_combination_with_replacement(items, len(items) * 2)
+ self.assertEqual(2 * len(items), len(combo))
+ if len(set(combo)) == len(combo):
+ raise AssertionError("Combination contained no duplicates")
+
+ def test_pseudorandomness(self):
+ """ensure different subsets of the iterable get returned over many
+ samplings of random combinations"""
+ items = range(15)
+ all_items = set()
+ for _ in range(50):
+ combination = mi.random_combination_with_replacement(items, 5)
+ all_items |= set(combination)
+ self.assertEqual(all_items, set(items))
+
+
+class NthCombinationTests(TestCase):
+ def test_basic(self):
+ iterable = 'abcdefg'
+ r = 4
+ for index, expected in enumerate(combinations(iterable, r)):
+ actual = mi.nth_combination(iterable, r, index)
+ self.assertEqual(actual, expected)
+
+ def test_long(self):
+ actual = mi.nth_combination(range(180), 4, 2000000)
+ expected = (2, 12, 35, 126)
+ self.assertEqual(actual, expected)
+
+ def test_invalid_r(self):
+ for r in (-1, 3):
+ with self.assertRaises(ValueError):
+ mi.nth_combination([], r, 0)
+
+ def test_invalid_index(self):
+ with self.assertRaises(IndexError):
+ mi.nth_combination('abcdefg', 3, -36)
+
+
+class PrependTests(TestCase):
+ def test_basic(self):
+ value = 'a'
+ iterator = iter('bcdefg')
+ actual = list(mi.prepend(value, iterator))
+ expected = list('abcdefg')
+ self.assertEqual(actual, expected)
+
+ def test_multiple(self):
+ value = 'ab'
+ iterator = iter('cdefg')
+ actual = tuple(mi.prepend(value, iterator))
+ expected = ('ab',) + tuple('cdefg')
+ self.assertEqual(actual, expected)
diff --git a/third_party/python/more-itertools/setup.cfg b/third_party/python/more-itertools/setup.cfg
new file mode 100644
index 0000000000..86c9d8ce2f
--- /dev/null
+++ b/third_party/python/more-itertools/setup.cfg
@@ -0,0 +1,8 @@
+[flake8]
+exclude = ./docs/conf.py, .eggs/
+ignore = E731, E741, F999
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/more-itertools/setup.py b/third_party/python/more-itertools/setup.py
new file mode 100644
index 0000000000..2772653875
--- /dev/null
+++ b/third_party/python/more-itertools/setup.py
@@ -0,0 +1,59 @@
+# Hack to prevent stupid error on exit of `python setup.py test`. (See
+# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html.)
+try:
+ import multiprocessing # noqa
+except ImportError:
+ pass
+from re import sub
+
+from setuptools import setup, find_packages
+
+
+def get_long_description():
+ # Fix display issues on PyPI caused by RST markup
+ readme = open('README.rst').read()
+
+ version_lines = []
+ with open('docs/versions.rst') as infile:
+ next(infile)
+ for line in infile:
+ line = line.rstrip().replace('.. automodule:: more_itertools', '')
+ version_lines.append(line)
+ version_history = '\n'.join(version_lines)
+ version_history = sub(r':func:`([a-zA-Z0-9._]+)`', r'\1', version_history)
+
+ ret = readme + '\n\n' + version_history
+ return ret
+
+
+setup(
+ name='more-itertools',
+ version='4.3.0',
+ description='More routines for operating on iterables, beyond itertools',
+ long_description=get_long_description(),
+ author='Erik Rose',
+ author_email='erikrose@grinchcentral.com',
+ license='MIT',
+ packages=find_packages(exclude=['ez_setup']),
+ install_requires=['six>=1.0.0,<2.0.0'],
+ test_suite='more_itertools.tests',
+ url='https://github.com/erikrose/more-itertools',
+ include_package_data=True,
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'Natural Language :: English',
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Topic :: Software Development :: Libraries'],
+ keywords=['itertools', 'iterator', 'iteration', 'filter', 'peek',
+ 'peekable', 'collate', 'chunk', 'chunked'],
+)
diff --git a/third_party/python/more-itertools/tox.ini b/third_party/python/more-itertools/tox.ini
new file mode 100644
index 0000000000..70c68c058d
--- /dev/null
+++ b/third_party/python/more-itertools/tox.ini
@@ -0,0 +1,5 @@
+[tox]
+envlist = py27, py34, py35, py36, py37
+
+[testenv]
+commands = {envbindir}/python -m unittest discover -v
diff --git a/third_party/python/moz.build b/third_party/python/moz.build
new file mode 100644
index 0000000000..a9c0275c28
--- /dev/null
+++ b/third_party/python/moz.build
@@ -0,0 +1,97 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# Default extra components to build config
+with Files('**'):
+ BUG_COMPONENT = ('Firefox Build System', 'General')
+
+with Files('attrs/**'):
+ BUG_COMPONENT = ('Firefox Build System', 'Task Configuration')
+
+with Files('blessings/**'):
+ BUG_COMPONENT = ('Firefox Build System', 'General')
+
+with Files('compare-locales/**'):
+ BUG_COMPONENT = ('Localization Infrastructure and Tools', 'compare-locales')
+
+with Files('dlmanager/**'):
+ BUG_COMPONENT = ('Firefox Build System', 'General')
+
+with Files('enum34/**'):
+ BUG_COMPONENT = ('Release Engineering', 'General')
+
+with Files('fluent.migrate/**'):
+ BUG_COMPONENT = ('Localization Infrastructure and Tools', 'Fluent Migration')
+
+# Actually, https://github.com/projectfluent/python-fluent/issues
+with Files('fluent.syntax/**'):
+ BUG_COMPONENT = ('Localization Infrastructure and Tools', 'General')
+
+with Files('futures/**'):
+ BUG_COMPONENT = ('Firefox Build System', 'General')
+
+with Files('jsmin/**'):
+ BUG_COMPONENT = ('GeckoView', 'General')
+
+with Files('mock-1.0.0/**'):
+ BUG_COMPONENT = ('Firefox Build System', 'General')
+
+with Files('mohawk/**'):
+ BUG_COMPONENT = ('Taskcluster', 'Platform Libraries')
+
+with Files('mozilla-version/**'):
+ BUG_COMPONENT = ('Release Engineering', 'General')
+
+with Files('psutil/**'):
+ BUG_COMPONENT = ('Firefox Build System', 'General')
+
+with Files('py/**'):
+ BUG_COMPONENT = ('Firefox Build System', 'General')
+
+with Files('pyasn1/**'):
+ BUG_COMPONENT = ('Release Engineering', 'General')
+
+with Files('pyasn1-modules/**'):
+ BUG_COMPONENT = ('Core', 'Security: PSM')
+
+with Files('pylru/**'):
+ BUG_COMPONENT = ('mozilla.org', 'MozillaBuild')
+
+with Files('pystache/**'):
+ BUG_COMPONENT = ('Taskcluster', 'General')
+
+with Files('pytest/**'):
+ BUG_COMPONENT = ('Testing', 'General')
+
+with Files('pytoml/**'):
+ BUG_COMPONENT = ('Firefox Build System', 'General')
+
+with Files('pyyaml/**'):
+ BUG_COMPONENT = ('Taskcluster', 'General')
+
+with Files('redo/**'):
+ BUG_COMPONENT = ('Firefox Build System', 'General')
+
+with Files('requests*/**'):
+ BUG_COMPONENT = ('Firefox Build System', 'General')
+
+with Files('requirements.*'):
+ BUG_COMPONENT = ('Firefox Build System', 'General')
+
+with Files('rsa/**'):
+ BUG_COMPONENT = ('Core', 'Security: PSM')
+
+with Files('slugid/**'):
+ BUG_COMPONENT = ('Taskcluster', 'Platform Libraries')
+
+with Files('taskcluster/**'):
+ BUG_COMPONENT = ('Taskcluster', 'Platform Libraries')
+
+with Files('virtualenv/**'):
+ BUG_COMPONENT = ('Firefox Build System', 'General')
+
+with Files('voluptuous/**'):
+ BUG_COMPONENT = ('Firefox Build System', 'Task Configuration')
diff --git a/third_party/python/mozilla-version/LICENSE b/third_party/python/mozilla-version/LICENSE
new file mode 100644
index 0000000000..e87a115e46
--- /dev/null
+++ b/third_party/python/mozilla-version/LICENSE
@@ -0,0 +1,363 @@
+Mozilla Public License, version 2.0
+
+1. Definitions
+
+1.1. "Contributor"
+
+ means each individual or legal entity that creates, contributes to the
+ creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+
+ means the combination of the Contributions of others (if any) used by a
+ Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+
+ means Source Code Form to which the initial Contributor has attached the
+ notice in Exhibit A, the Executable Form of such Source Code Form, and
+ Modifications of such Source Code Form, in each case including portions
+ thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ a. that the initial Contributor has attached the notice described in
+ Exhibit B to the Covered Software; or
+
+ b. that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the terms of
+ a Secondary License.
+
+1.6. "Executable Form"
+
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+
+ means a work that combines Covered Software with other material, in a
+ separate file or files, that is not Covered Software.
+
+1.8. "License"
+
+ means this document.
+
+1.9. "Licensable"
+
+ means having the right to grant, to the maximum extent possible, whether
+ at the time of the initial grant or subsequently, any and all of the
+ rights conveyed by this License.
+
+1.10. "Modifications"
+
+ means any of the following:
+
+ a. any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered Software; or
+
+ b. any new file in Source Code Form that contains any Covered Software.
+
+1.11. "Patent Claims" of a Contributor
+
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the License,
+ by the making, using, selling, offering for sale, having made, import,
+ or transfer of either its Contributions or its Contributor Version.
+
+1.12. "Secondary License"
+
+ means either the GNU General Public License, Version 2.0, the GNU Lesser
+ General Public License, Version 2.1, the GNU Affero General Public
+ License, Version 3.0, or any later versions of those licenses.
+
+1.13. "Source Code Form"
+
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that controls, is
+ controlled by, or is under common control with You. For purposes of this
+ definition, "control" means (a) the power, direct or indirect, to cause
+ the direction or management of such entity, whether by contract or
+ otherwise, or (b) ownership of more than fifty percent (50%) of the
+ outstanding shares or beneficial ownership of such entity.
+
+
+2. License Grants and Conditions
+
+2.1. Grants
+
+ Each Contributor hereby grants You a world-wide, royalty-free,
+ non-exclusive license:
+
+ a. under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+ b. under Patent Claims of such Contributor to make, use, sell, offer for
+ sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+ The licenses granted in Section 2.1 with respect to any Contribution
+ become effective for each Contribution on the date the Contributor first
+ distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+ The licenses granted in this Section 2 are the only rights granted under
+ this License. No additional rights or licenses will be implied from the
+ distribution or licensing of Covered Software under this License.
+ Notwithstanding Section 2.1(b) above, no patent license is granted by a
+ Contributor:
+
+ a. for any code that a Contributor has removed from Covered Software; or
+
+ b. for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+ c. under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+ This License does not grant any rights in the trademarks, service marks,
+ or logos of any Contributor (except as may be necessary to comply with
+ the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+ No Contributor makes additional grants as a result of Your choice to
+ distribute the Covered Software under a subsequent version of this
+ License (see Section 10.2) or under the terms of a Secondary License (if
+ permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+ Each Contributor represents that the Contributor believes its
+ Contributions are its original creation(s) or it has sufficient rights to
+ grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+ This License is not intended to limit any rights You have under
+ applicable copyright doctrines of fair use, fair dealing, or other
+ equivalents.
+
+2.7. Conditions
+
+ Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
+ Section 2.1.
+
+
+3. Responsibilities
+
+3.1. Distribution of Source Form
+
+ All distribution of Covered Software in Source Code Form, including any
+ Modifications that You create or to which You contribute, must be under
+ the terms of this License. You must inform recipients that the Source
+ Code Form of the Covered Software is governed by the terms of this
+ License, and how they can obtain a copy of this License. You may not
+ attempt to alter or restrict the recipients' rights in the Source Code
+ Form.
+
+3.2. Distribution of Executable Form
+
+ If You distribute Covered Software in Executable Form then:
+
+ a. such Covered Software must also be made available in Source Code Form,
+ as described in Section 3.1, and You must inform recipients of the
+ Executable Form how they can obtain a copy of such Source Code Form by
+ reasonable means in a timely manner, at a charge no more than the cost
+ of distribution to the recipient; and
+
+ b. You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter the
+ recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+ You may create and distribute a Larger Work under terms of Your choice,
+ provided that You also comply with the requirements of this License for
+ the Covered Software. If the Larger Work is a combination of Covered
+ Software with a work governed by one or more Secondary Licenses, and the
+ Covered Software is not Incompatible With Secondary Licenses, this
+ License permits You to additionally distribute such Covered Software
+ under the terms of such Secondary License(s), so that the recipient of
+ the Larger Work may, at their option, further distribute the Covered
+ Software under the terms of either this License or such Secondary
+ License(s).
+
+3.4. Notices
+
+ You may not remove or alter the substance of any license notices
+ (including copyright notices, patent notices, disclaimers of warranty, or
+ limitations of liability) contained within the Source Code Form of the
+ Covered Software, except that You may alter any license notices to the
+ extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+ You may choose to offer, and to charge a fee for, warranty, support,
+ indemnity or liability obligations to one or more recipients of Covered
+ Software. However, You may do so only on Your own behalf, and not on
+ behalf of any Contributor. You must make it absolutely clear that any
+ such warranty, support, indemnity, or liability obligation is offered by
+ You alone, and You hereby agree to indemnify every Contributor for any
+ liability incurred by such Contributor as a result of warranty, support,
+ indemnity or liability terms You offer. You may include additional
+ disclaimers of warranty and limitations of liability specific to any
+ jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+
+ If it is impossible for You to comply with any of the terms of this License
+ with respect to some or all of the Covered Software due to statute,
+ judicial order, or regulation then You must: (a) comply with the terms of
+ this License to the maximum extent possible; and (b) describe the
+ limitations and the code they affect. Such description must be placed in a
+ text file included with all distributions of the Covered Software under
+ this License. Except to the extent prohibited by statute or regulation,
+ such description must be sufficiently detailed for a recipient of ordinary
+ skill to be able to understand it.
+
+5. Termination
+
+5.1. The rights granted under this License will terminate automatically if You
+ fail to comply with any of its terms. However, if You become compliant,
+ then the rights granted under this License from a particular Contributor
+ are reinstated (a) provisionally, unless and until such Contributor
+ explicitly and finally terminates Your grants, and (b) on an ongoing
+ basis, if such Contributor fails to notify You of the non-compliance by
+ some reasonable means prior to 60 days after You have come back into
+ compliance. Moreover, Your grants from a particular Contributor are
+ reinstated on an ongoing basis if such Contributor notifies You of the
+ non-compliance by some reasonable means, this is the first time You have
+ received notice of non-compliance with this License from such
+ Contributor, and You become compliant prior to 30 days after Your receipt
+ of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+ infringement claim (excluding declaratory judgment actions,
+ counter-claims, and cross-claims) alleging that a Contributor Version
+ directly or indirectly infringes any patent, then the rights granted to
+ You by any and all Contributors for the Covered Software under Section
+ 2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
+ license agreements (excluding distributors and resellers) which have been
+ validly granted by You or Your distributors under this License prior to
+ termination shall survive termination.
+
+6. Disclaimer of Warranty
+
+ Covered Software is provided under this License on an "as is" basis,
+ without warranty of any kind, either expressed, implied, or statutory,
+ including, without limitation, warranties that the Covered Software is free
+ of defects, merchantable, fit for a particular purpose or non-infringing.
+ The entire risk as to the quality and performance of the Covered Software
+ is with You. Should any Covered Software prove defective in any respect,
+ You (not any Contributor) assume the cost of any necessary servicing,
+ repair, or correction. This disclaimer of warranty constitutes an essential
+ part of this License. No use of any Covered Software is authorized under
+ this License except under this disclaimer.
+
+7. Limitation of Liability
+
+ Under no circumstances and under no legal theory, whether tort (including
+ negligence), contract, or otherwise, shall any Contributor, or anyone who
+ distributes Covered Software as permitted above, be liable to You for any
+ direct, indirect, special, incidental, or consequential damages of any
+ character including, without limitation, damages for lost profits, loss of
+ goodwill, work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses, even if such party shall have been
+ informed of the possibility of such damages. This limitation of liability
+ shall not apply to liability for death or personal injury resulting from
+ such party's negligence to the extent applicable law prohibits such
+ limitation. Some jurisdictions do not allow the exclusion or limitation of
+ incidental or consequential damages, so this exclusion and limitation may
+ not apply to You.
+
+8. Litigation
+
+ Any litigation relating to this License may be brought only in the courts
+ of a jurisdiction where the defendant maintains its principal place of
+ business and such litigation shall be governed by laws of that
+ jurisdiction, without reference to its conflict-of-law provisions. Nothing
+ in this Section shall prevent a party's ability to bring cross-claims or
+ counter-claims.
+
+9. Miscellaneous
+
+ This License represents the complete agreement concerning the subject
+ matter hereof. If any provision of this License is held to be
+ unenforceable, such provision shall be reformed only to the extent
+ necessary to make it enforceable. Any law or regulation which provides that
+ the language of a contract shall be construed against the drafter shall not
+ be used to construe this License against a Contributor.
+
+
+10. Versions of the License
+
+10.1. New Versions
+
+ Mozilla Foundation is the license steward. Except as provided in Section
+ 10.3, no one other than the license steward has the right to modify or
+ publish new versions of this License. Each version will be given a
+ distinguishing version number.
+
+10.2. Effect of New Versions
+
+ You may distribute the Covered Software under the terms of the version
+ of the License under which You originally received the Covered Software,
+ or under the terms of any subsequent version published by the license
+ steward.
+
+10.3. Modified Versions
+
+ If you create software not governed by this License, and you want to
+ create a new license for such software, you may create and use a
+ modified version of this License if you rename the license and remove
+ any references to the name of the license steward (except to note that
+ such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+ Licenses If You choose to distribute Source Code Form that is
+ Incompatible With Secondary Licenses under the terms of this version of
+ the License, the notice described in Exhibit B of this License must be
+ attached.
+
+Exhibit A - Source Code Form License Notice
+
+ This Source Code Form is subject to the
+ terms of the Mozilla Public License, v.
+ 2.0. If a copy of the MPL was not
+ distributed with this file, You can
+ obtain one at
+ http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular file,
+then You may include the notice in a location (such as a LICENSE file in a
+relevant directory) where a recipient would be likely to look for such a
+notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+
+ This Source Code Form is "Incompatible
+ With Secondary Licenses", as defined by
+ the Mozilla Public License, v. 2.0.
+
diff --git a/third_party/python/mozilla-version/MANIFEST.in b/third_party/python/mozilla-version/MANIFEST.in
new file mode 100644
index 0000000000..44c07d4f05
--- /dev/null
+++ b/third_party/python/mozilla-version/MANIFEST.in
@@ -0,0 +1,8 @@
+include LICENSE
+include README.md
+include version.txt
+include requirements*.txt
+include requirements.txt.in # Used by setup.py
+
+recursive-exclude * __pycache__
+recursive-exclude * *.py[co]
diff --git a/third_party/python/mozilla-version/PKG-INFO b/third_party/python/mozilla-version/PKG-INFO
new file mode 100644
index 0000000000..fc5fecbf73
--- /dev/null
+++ b/third_party/python/mozilla-version/PKG-INFO
@@ -0,0 +1,13 @@
+Metadata-Version: 1.1
+Name: mozilla-version
+Version: 0.3.4
+Summary: Process Firefox versions numbers. Tells whether they are valid or not, whether they are nightlies or regular releases, whether this version precedes that other.
+
+Home-page: https://github.com/mozilla-releng/mozilla-version
+Author: Mozilla Release Engineering
+Author-email: release+python@mozilla.com
+License: MPL2
+Description: UNKNOWN
+Platform: UNKNOWN
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
diff --git a/third_party/python/mozilla-version/README.md b/third_party/python/mozilla-version/README.md
new file mode 100644
index 0000000000..59cde67f63
--- /dev/null
+++ b/third_party/python/mozilla-version/README.md
@@ -0,0 +1,28 @@
+# mozilla-version
+
+[![Build Status](https://travis-ci.org/mozilla-releng/mozilla-version.svg?branch=master)](https://travis-ci.org/mozilla-releng/mozilla-version) [![Coverage Status](https://coveralls.io/repos/github/mozilla-releng/mozilla-version/badge.svg?branch=master)](https://coveralls.io/github/mozilla-releng/mozilla-version?branch=master)[![Documentation Status](https://readthedocs.org/projects/mozilla-version/badge/?version=latest)](https://mozilla-version.readthedocs.io/en/latest/?badge=latest)
+
+
+Process Firefox versions numbers. Tell whether they are valid or not, whether they are nightlies or regular releases, whether this version precedes that other.
+
+## Documentation
+
+https://mozilla-version.readthedocs.io/en/latest/
+
+## Get the code
+
+Just install it from pip:
+
+```sh
+pip install mozilla-version
+```
+
+
+## Hack on the code
+```sh
+virtualenv venv # create the virtualenv in ./venv
+. venv/bin/activate # activate it
+git clone https://github.com/mozilla-releng/mozilla-version
+cd mozilla-version
+pip install mozilla-version
+```
diff --git a/third_party/python/mozilla-version/mozilla_version/__init__.py b/third_party/python/mozilla-version/mozilla_version/__init__.py
new file mode 100644
index 0000000000..ba46ee264d
--- /dev/null
+++ b/third_party/python/mozilla-version/mozilla_version/__init__.py
@@ -0,0 +1 @@
+"""Defines characteristics of Mozilla's version numbers."""
diff --git a/third_party/python/mozilla-version/mozilla_version/balrog.py b/third_party/python/mozilla-version/mozilla_version/balrog.py
new file mode 100644
index 0000000000..0860d5698a
--- /dev/null
+++ b/third_party/python/mozilla-version/mozilla_version/balrog.py
@@ -0,0 +1,142 @@
+"""Defines characteristics of a Balrog release name.
+
+Balrog is the server that delivers Firefox and Thunderbird updates. Release names follow
+the pattern "{product}-{version}-build{build_number}"
+
+Examples:
+ .. code-block:: python
+
+ from mozilla_version.balrog import BalrogReleaseName
+
+ balrog_release = BalrogReleaseName.parse('firefox-60.0.1-build1')
+
+ balrog_release.product # firefox
+ balrog_release.version.major_number # 60
+ str(balrog_release) # 'firefox-60.0.1-build1'
+
+ previous_release = BalrogReleaseName.parse('firefox-60.0-build2')
+ previous_release < balrog_release # True
+
+ invalid = BalrogReleaseName.parse('60.0.1') # raises PatternNotMatchedError
+ invalid = BalrogReleaseName.parse('firefox-60.0.1') # raises PatternNotMatchedError
+
+ # Releases can be built thanks to version classes like FirefoxVersion
+ BalrogReleaseName('firefox', FirefoxVersion(60, 0, 1, 1)) # 'firefox-60.0.1-build1'
+
+"""
+
+import attr
+import re
+
+from mozilla_version.errors import PatternNotMatchedError
+from mozilla_version.parser import get_value_matched_by_regex
+from mozilla_version.gecko import (
+ GeckoVersion, FirefoxVersion, DeveditionVersion, FennecVersion, ThunderbirdVersion
+)
+
+
+_VALID_ENOUGH_BALROG_RELEASE_PATTERN = re.compile(
+ r"^(?P<product>[a-z]+)-(?P<version>.+)$", re.IGNORECASE
+)
+
+
+_SUPPORTED_PRODUCTS = {
+ 'firefox': FirefoxVersion,
+ 'devedition': DeveditionVersion,
+ 'fennec': FennecVersion,
+ 'thunderbird': ThunderbirdVersion,
+}
+
+
+def _supported_product(string):
+ product = string.lower()
+ if product not in _SUPPORTED_PRODUCTS:
+ raise PatternNotMatchedError(string, pattern='unknown product')
+ return product
+
+
+def _products_must_be_identical(method):
+ def checker(this, other):
+ if this.product != other.product:
+ raise ValueError('Cannot compare "{}" and "{}"'.format(this.product, other.product))
+ return method(this, other)
+ return checker
+
+
+@attr.s(frozen=True, cmp=False, hash=True)
+class BalrogReleaseName(object):
+ """Class that validates and handles Balrog release names.
+
+ Raises:
+ PatternNotMatchedError: if a parsed string doesn't match the pattern of a valid release
+ MissingFieldError: if a mandatory field is missing in the string. Mandatory fields are
+ `product`, `major_number`, `minor_number`, and `build_number`
+ ValueError: if an integer can't be cast or is not (strictly) positive
+ TooManyTypesError: if the string matches more than 1 `VersionType`
+ NoVersionTypeError: if the string matches none.
+
+ """
+
+ product = attr.ib(type=str, converter=_supported_product)
+ version = attr.ib(type=GeckoVersion)
+
+ def __attrs_post_init__(self):
+ """Ensure attributes are sane all together."""
+ if self.version.build_number is None:
+ raise PatternNotMatchedError(self, pattern='build_number must exist')
+
+ @classmethod
+ def parse(cls, release_string):
+ """Construct an object representing a valid Firefox version number."""
+ regex_matches = _VALID_ENOUGH_BALROG_RELEASE_PATTERN.match(release_string)
+ if regex_matches is None:
+ raise PatternNotMatchedError(release_string, _VALID_ENOUGH_BALROG_RELEASE_PATTERN)
+
+ product = get_value_matched_by_regex('product', regex_matches, release_string)
+ try:
+ VersionClass = _SUPPORTED_PRODUCTS[product.lower()]
+ except KeyError:
+ raise PatternNotMatchedError(release_string, pattern='unknown product')
+
+ version_string = get_value_matched_by_regex('version', regex_matches, release_string)
+ version = VersionClass.parse(version_string)
+
+ return cls(product, version)
+
+ def __str__(self):
+ """Implement string representation.
+
+ Computes a new string based on the given attributes.
+ """
+ version_string = str(self.version).replace('build', '-build')
+ return '{}-{}'.format(self.product, version_string)
+
+ @_products_must_be_identical
+ def __eq__(self, other):
+ """Implement `==` operator."""
+ return self.version == other.version
+
+ @_products_must_be_identical
+ def __ne__(self, other):
+ """Implement `!=` operator."""
+ return self.version != other.version
+
+ @_products_must_be_identical
+ def __lt__(self, other):
+ """Implement `<` operator."""
+ return self.version < other.version
+
+ @_products_must_be_identical
+ def __le__(self, other):
+ """Implement `<=` operator."""
+ return self.version <= other.version
+
+ @_products_must_be_identical
+ def __gt__(self, other):
+ """Implement `>` operator."""
+ return self.version > other.version
+
+ @_products_must_be_identical
+ def __ge__(self, other):
+ """Implement `>=` operator."""
+ return self.version >= other.version
diff --git a/third_party/python/mozilla-version/mozilla_version/errors.py b/third_party/python/mozilla-version/mozilla_version/errors.py
new file mode 100644
index 0000000000..84cd2169f5
--- /dev/null
+++ b/third_party/python/mozilla-version/mozilla_version/errors.py
@@ -0,0 +1,64 @@
+"""Defines all errors reported by mozilla-version."""
+
+
+class PatternNotMatchedError(ValueError):
+ """Error when a string doesn't match an expected pattern.
+
+ Args:
+ string (str): The string it was unable to match.
+ pattern (str): The pattern used it tried to match.
+ """
+
+ def __init__(self, string, pattern):
+ """Constructor."""
+ super(PatternNotMatchedError, self).__init__(
+ '"{}" does not match the pattern: {}'.format(string, pattern)
+ )
+
+
+class NoVersionTypeError(ValueError):
+ """Error when `version_string` matched the pattern, but was unable to find its type.
+
+ Args:
+ version_string (str): The string it was unable to guess the type.
+ """
+
+ def __init__(self, version_string):
+ """Constructor."""
+ super(NoVersionTypeError, self).__init__(
+ 'Version "{}" matched the pattern of a valid version, but it is unable to find what type it is. \
+This is likely a bug in mozilla-version'.format(version_string)
+ )
+
+
+class MissingFieldError(ValueError):
+ """Error when `version_string` lacks an expected field.
+
+ Args:
+ version_string (str): The string it was unable to extract a given field.
+ field_name (str): The name of the missing field.
+ """
+
+ def __init__(self, version_string, field_name):
+ """Constructor."""
+ super(MissingFieldError, self).__init__(
+ 'Release "{}" does not contain a valid {}'.format(version_string, field_name)
+ )
+
+
+class TooManyTypesError(ValueError):
+ """Error when `version_string` has too many types."""
+
+ def __init__(self, version_string, first_matched_type, second_matched_type):
+ """Constructor.
+
+ Args:
+ version_string (str): The string that gave too many types.
+ first_matched_type (str): The name of the first detected type.
+ second_matched_type (str): The name of the second detected type
+ """
+ super(TooManyTypesError, self).__init__(
+ 'Release "{}" cannot match types "{}" and "{}"'.format(
+ version_string, first_matched_type, second_matched_type
+ )
+ )
diff --git a/third_party/python/mozilla-version/mozilla_version/gecko.py b/third_party/python/mozilla-version/mozilla_version/gecko.py
new file mode 100644
index 0000000000..dd14dda6b0
--- /dev/null
+++ b/third_party/python/mozilla-version/mozilla_version/gecko.py
@@ -0,0 +1,435 @@
+"""Defines characteristics of a Gecko version number, including Firefox.
+
+Examples:
+ .. code-block:: python
+
+ from mozilla_version.gecko import FirefoxVersion
+
+ version = FirefoxVersion.parse('60.0.1')
+
+ version.major_number # 60
+ version.minor_number # 0
+ version.patch_number # 1
+
+ version.is_release # True
+ version.is_beta # False
+ version.is_nightly # False
+
+ str(version) # '60.0.1'
+
+ previous_version = FirefoxVersion.parse('60.0b14')
+ previous_version < version # True
+
+ previous_version.beta_number # 14
+ previous_version.major_number # 60
+ previous_version.minor_number # 0
+ previous_version.patch_number # raises AttributeError
+
+ previous_version.is_beta # True
+ previous_version.is_release # False
+ previous_version.is_nightly # False
+
+ invalid_version = FirefoxVersion.parse('60.1') # raises PatternNotMatchedError
+ invalid_version = FirefoxVersion.parse('60.0.0') # raises PatternNotMatchedError
+ version = FirefoxVersion.parse('60.0') # valid
+
+ # Versions can be built by raw values
+ FirefoxVersion(60, 0)) # '60.0'
+ FirefoxVersion(60, 0, 1)) # '60.0.1'
+ FirefoxVersion(60, 1, 0)) # '60.1.0'
+ FirefoxVersion(60, 0, 1, 1)) # '60.0.1build1'
+ FirefoxVersion(60, 0, beta_number=1)) # '60.0b1'
+ FirefoxVersion(60, 0, is_nightly=True)) # '60.0a1'
+ FirefoxVersion(60, 0, is_aurora_or_devedition=True)) # '60.0a2'
+ FirefoxVersion(60, 0, is_esr=True)) # '60.0esr'
+ FirefoxVersion(60, 0, 1, is_esr=True)) # '60.0.1esr'
+
+"""
+
+import attr
+import re
+
+from mozilla_version.errors import (
+ PatternNotMatchedError, TooManyTypesError, NoVersionTypeError
+)
+from mozilla_version.parser import strictly_positive_int_or_none
+from mozilla_version.version import BaseVersion, VersionType
+
+
+def _find_type(version):
+ version_type = None
+
+ def ensure_version_type_is_not_already_defined(previous_type, candidate_type):
+ if previous_type is not None:
+ raise TooManyTypesError(
+ str(version), previous_type, candidate_type
+ )
+
+ if version.is_nightly:
+ version_type = VersionType.NIGHTLY
+ if version.is_aurora_or_devedition:
+ ensure_version_type_is_not_already_defined(
+ version_type, VersionType.AURORA_OR_DEVEDITION
+ )
+ version_type = VersionType.AURORA_OR_DEVEDITION
+ if version.is_beta:
+ ensure_version_type_is_not_already_defined(version_type, VersionType.BETA)
+ version_type = VersionType.BETA
+ if version.is_esr:
+ ensure_version_type_is_not_already_defined(version_type, VersionType.ESR)
+ version_type = VersionType.ESR
+ if version.is_release:
+ ensure_version_type_is_not_already_defined(version_type, VersionType.RELEASE)
+ version_type = VersionType.RELEASE
+
+ if version_type is None:
+ raise NoVersionTypeError(str(version))
+
+ return version_type
+
+
+@attr.s(frozen=True, cmp=False, hash=True)
+class GeckoVersion(BaseVersion):
+ """Class that validates and handles version numbers for Gecko-based products.
+
+ You may want to use specific classes like FirefoxVersion. These classes define edge cases
+ that were shipped.
+
+ Raises:
+ PatternNotMatchedError: if the string doesn't match the pattern of a valid version number
+ MissingFieldError: if a mandatory field is missing in the string. Mandatory fields are
+ `major_number` and `minor_number`
+ ValueError: if an integer can't be cast or is not (strictly) positive
+ TooManyTypesError: if the string matches more than 1 `VersionType`
+ NoVersionTypeError: if the string matches none.
+
+ """
+
+ # XXX This pattern doesn't catch all subtleties of a Firefox version (like 32.5 isn't valid).
+ # This regex is intended to assign numbers. Then checks are done by attrs and
+ # __attrs_post_init__()
+ _VALID_ENOUGH_VERSION_PATTERN = re.compile(r"""
+ ^(?P<major_number>\d+)
+ \.(?P<minor_number>\d+)
+ (\.(?P<patch_number>\d+))?
+ (
+ (?P<is_nightly>a1)
+ |(?P<is_aurora_or_devedition>a2)
+ |b(?P<beta_number>\d+)
+ |(?P<is_esr>esr)
+ )?
+ -?(build(?P<build_number>\d+))?$""", re.VERBOSE)
+
+ _ALL_VERSION_NUMBERS_TYPES = (
+ 'major_number', 'minor_number', 'patch_number', 'beta_number',
+ )
+
+ _OPTIONAL_NUMBERS = BaseVersion._OPTIONAL_NUMBERS + ('beta_number', 'build_number')
+
+ build_number = attr.ib(type=int, converter=strictly_positive_int_or_none, default=None)
+ beta_number = attr.ib(type=int, converter=strictly_positive_int_or_none, default=None)
+ is_nightly = attr.ib(type=bool, default=False)
+ is_aurora_or_devedition = attr.ib(type=bool, default=False)
+ is_esr = attr.ib(type=bool, default=False)
+ version_type = attr.ib(init=False, default=attr.Factory(_find_type, takes_self=True))
+
+ def __attrs_post_init__(self):
+ """Ensure attributes are sane all together."""
+ if self.minor_number == 0 and self.patch_number == 0:
+ raise PatternNotMatchedError(
+ self, pattern='Minor number and patch number cannot be both equal to 0'
+ )
+
+ if self.minor_number != 0 and self.patch_number is None:
+ raise PatternNotMatchedError(
+ self, pattern='Patch number cannot be undefined if minor number is greater than 0'
+ )
+
+ if self.beta_number is not None and self.patch_number is not None:
+ raise PatternNotMatchedError(
+ self, pattern='Beta number and patch number cannot be both defined'
+ )
+
+ if self.patch_number is not None and self.is_nightly:
+ raise PatternNotMatchedError(
+ self, pattern='Patch number cannot be defined on a nightly version'
+ )
+
+ if self.patch_number is not None and self.is_aurora_or_devedition:
+ raise PatternNotMatchedError(
+ self, pattern='Patch number cannot be defined on an aurora version'
+ )
+
+ @classmethod
+ def parse(cls, version_string):
+ """Construct an object representing a valid Firefox version number."""
+ return super(GeckoVersion, cls).parse(
+ version_string, regex_groups=('is_nightly', 'is_aurora_or_devedition', 'is_esr')
+ )
+
+ @property
+ def is_beta(self):
+ """Return `True` if `FirefoxVersion` was built with a string matching a beta version."""
+ return self.beta_number is not None
+
+ @property
+ def is_release(self):
+ """Return `True` if `FirefoxVersion` was built with a string matching a release version."""
+ return not (self.is_nightly or self.is_aurora_or_devedition or self.is_beta or self.is_esr)
+
+ def __str__(self):
+ """Implement string representation.
+
+ Computes a new string based on the given attributes.
+ """
+ string = super(GeckoVersion, self).__str__()
+
+ if self.is_nightly:
+ string = '{}a1'.format(string)
+ elif self.is_aurora_or_devedition:
+ string = '{}a2'.format(string)
+ elif self.is_beta:
+ string = '{}b{}'.format(string, self.beta_number)
+ elif self.is_esr:
+ string = '{}esr'.format(string)
+
+ if self.build_number is not None:
+ string = '{}build{}'.format(string, self.build_number)
+
+ return string
+
+ def __eq__(self, other):
+ """Implement `==` operator.
+
+ A version is considered equal to another if all numbers match and if they are of the same
+ `VersionType`. Like said in `VersionType`, release and ESR are considered equal (if they
+ share the same numbers). If a version contains a build number but not the other, the build
+ number won't be considered in the comparison.
+
+ Examples:
+ .. code-block:: python
+
+ assert GeckoVersion.parse('60.0') == GeckoVersion.parse('60.0')
+ assert GeckoVersion.parse('60.0') == GeckoVersion.parse('60.0esr')
+ assert GeckoVersion.parse('60.0') == GeckoVersion.parse('60.0build1')
+ assert GeckoVersion.parse('60.0build1') == GeckoVersion.parse('60.0build1')
+
+ assert GeckoVersion.parse('60.0') != GeckoVersion.parse('61.0')
+ assert GeckoVersion.parse('60.0') != GeckoVersion.parse('60.1.0')
+ assert GeckoVersion.parse('60.0') != GeckoVersion.parse('60.0.1')
+ assert GeckoVersion.parse('60.0') != GeckoVersion.parse('60.0a1')
+ assert GeckoVersion.parse('60.0') != GeckoVersion.parse('60.0a2')
+ assert GeckoVersion.parse('60.0') != GeckoVersion.parse('60.0b1')
+ assert GeckoVersion.parse('60.0build1') != GeckoVersion.parse('60.0build2')
+
+ """
+ return super(GeckoVersion, self).__eq__(other)
+
+ def _compare(self, other):
+ """Compare this release with another.
+
+ Returns:
+ 0 if equal
+ < 0 is this precedes the other
+ > 0 if the other precedes this
+
+ """
+ if isinstance(other, str):
+ other = GeckoVersion.parse(other)
+ elif not isinstance(other, GeckoVersion):
+ raise ValueError('Cannot compare "{}", type not supported!'.format(other))
+
+ difference = super(GeckoVersion, self)._compare(other)
+ if difference != 0:
+ return difference
+
+ channel_difference = self._compare_version_type(other)
+ if channel_difference != 0:
+ return channel_difference
+
+ if self.is_beta and other.is_beta:
+ beta_difference = self.beta_number - other.beta_number
+ if beta_difference != 0:
+ return beta_difference
+
+ # Build numbers are a special case. We might compare a regular version number
+ # (like "32.0b8") versus a release build (as in "32.0b8build1"). As a consequence,
+ # we only compare build_numbers when we both have them.
+ try:
+ return self.build_number - other.build_number
+ except TypeError:
+ pass
+
+ return 0
+
+ def _compare_version_type(self, other):
+ return self.version_type.compare(other.version_type)
+
+
+class _VersionWithEdgeCases(GeckoVersion):
+ def __attrs_post_init__(self):
+ for edge_case in self._RELEASED_EDGE_CASES:
+ if all(
+ getattr(self, number_type) == edge_case.get(number_type, None)
+ for number_type in self._ALL_VERSION_NUMBERS_TYPES
+ ):
+ if self.build_number is None:
+ return
+ elif self.build_number == edge_case.get('build_number', None):
+ return
+
+ super(_VersionWithEdgeCases, self).__attrs_post_init__()
+
+
+class FirefoxVersion(_VersionWithEdgeCases):
+ """Class that validates and handles Firefox version numbers."""
+
+ _RELEASED_EDGE_CASES = ({
+ 'major_number': 33,
+ 'minor_number': 1,
+ 'build_number': 1,
+ }, {
+ 'major_number': 33,
+ 'minor_number': 1,
+ 'build_number': 2,
+ }, {
+ 'major_number': 33,
+ 'minor_number': 1,
+ 'build_number': 3,
+ }, {
+ 'major_number': 38,
+ 'minor_number': 0,
+ 'patch_number': 5,
+ 'beta_number': 1,
+ 'build_number': 1,
+ }, {
+ 'major_number': 38,
+ 'minor_number': 0,
+ 'patch_number': 5,
+ 'beta_number': 1,
+ 'build_number': 2,
+ }, {
+ 'major_number': 38,
+ 'minor_number': 0,
+ 'patch_number': 5,
+ 'beta_number': 2,
+ 'build_number': 1,
+ }, {
+ 'major_number': 38,
+ 'minor_number': 0,
+ 'patch_number': 5,
+ 'beta_number': 3,
+ 'build_number': 1,
+ })
+
+
+class DeveditionVersion(GeckoVersion):
+ """Class that validates and handles Devedition after it became an equivalent to beta."""
+
+ # No edge case were shipped
+
+ def __attrs_post_init__(self):
+ """Ensure attributes are sane all together."""
+ if (
+ (not self.is_beta) or
+ (self.major_number < 54) or
+ (self.major_number == 54 and self.beta_number < 11)
+ ):
+ raise PatternNotMatchedError(
+ self, pattern='Devedition as a product must be a beta >= 54.0b11'
+ )
+
+
+class FennecVersion(_VersionWithEdgeCases):
+ """Class that validates and handles Fennec (Firefox for Android) version numbers."""
+
+ _RELEASED_EDGE_CASES = ({
+ 'major_number': 33,
+ 'minor_number': 1,
+ 'build_number': 1,
+ }, {
+ 'major_number': 33,
+ 'minor_number': 1,
+ 'build_number': 2,
+ }, {
+ 'major_number': 38,
+ 'minor_number': 0,
+ 'patch_number': 5,
+ 'beta_number': 4,
+ 'build_number': 1,
+ })
+
+ def __attrs_post_init__(self):
+ """Ensure attributes are sane all together."""
+ # Versions matching 68.Xa1, 68.XbN, or simply 68.X are expected since bug 1523402. The
+ # latter is needed because of the version.txt of beta
+ if (
+ self.major_number == 68 and
+ self.minor_number > 0 and
+ self.patch_number is None
+ ):
+ return
+
+ if self.major_number >= 69:
+ raise PatternNotMatchedError(self, pattern='Last Fennec version is 68')
+
+ super(FennecVersion, self).__attrs_post_init__()
+
+
+class ThunderbirdVersion(_VersionWithEdgeCases):
+ """Class that validates and handles Thunderbird version numbers."""
+
+ _RELEASED_EDGE_CASES = ({
+ 'major_number': 45,
+ 'minor_number': 1,
+ 'beta_number': 1,
+ 'build_number': 1,
+ }, {
+ 'major_number': 45,
+ 'minor_number': 2,
+ 'build_number': 1,
+ }, {
+ 'major_number': 45,
+ 'minor_number': 2,
+ 'build_number': 2,
+ }, {
+ 'major_number': 45,
+ 'minor_number': 2,
+ 'beta_number': 1,
+ 'build_number': 2,
+ })
+
+
+class GeckoSnapVersion(GeckoVersion):
+ """Class that validates and handles Gecko's Snap version numbers.
+
+ Snap is a Linux packaging format developped by Canonical. Valid numbers are like "63.0b7-1",
+ "1" stands for "build1". Release Engineering set this scheme at the beginning of Snap and now
+ we can't rename published snap to the regular pattern like "63.0b7-build1".
+ """
+
+ # Our Snaps are recent enough to not list any edge case, yet.
+
+ # Differences between this regex and the one in GeckoVersion:
+ # * no a2
+ # * no "build"
+ # * but mandatory dash and build number.
+ # Example: 63.0b7-1
+ _VALID_ENOUGH_VERSION_PATTERN = re.compile(r"""
+ ^(?P<major_number>\d+)
+ \.(?P<minor_number>\d+)
+ (\.(?P<patch_number>\d+))?
+ (
+ (?P<is_nightly>a1)
+ |b(?P<beta_number>\d+)
+ |(?P<is_esr>esr)
+ )?
+ -(?P<build_number>\d+)$""", re.VERBOSE)
+
+ def __str__(self):
+ """Implement string representation.
+
+ Returns format like "63.0b7-1"
+ """
+ string = super(GeckoSnapVersion, self).__str__()
+ return string.replace('build', '-')
diff --git a/third_party/python/mozilla-version/mozilla_version/maven.py b/third_party/python/mozilla-version/mozilla_version/maven.py
new file mode 100644
index 0000000000..91cecb3431
--- /dev/null
+++ b/third_party/python/mozilla-version/mozilla_version/maven.py
@@ -0,0 +1,56 @@
+"""Defines characteristics of a Maven version at Mozilla."""
+
+import attr
+import re
+
+from mozilla_version.version import BaseVersion
+
+
+@attr.s(frozen=True, cmp=False, hash=True)
+class MavenVersion(BaseVersion):
+ """Class that validates and handles Maven version numbers.
+
+ At Mozilla, Maven packages are used in projects like "GeckoView" or "Android-Components".
+ """
+
+ is_snapshot = attr.ib(type=bool, default=False)
+
+ _VALID_ENOUGH_VERSION_PATTERN = re.compile(r"""
+ ^(?P<major_number>\d+)
+ \.(?P<minor_number>\d+)
+ (\.(?P<patch_number>\d+))?
+ (?P<is_snapshot>-SNAPSHOT)?$""", re.VERBOSE)
+
+ @classmethod
+ def parse(cls, version_string):
+ """Construct an object representing a valid Maven version number."""
+ return super(MavenVersion, cls).parse(version_string, regex_groups=('is_snapshot', ))
+
+ def __str__(self):
+ """Implement string representation.
+
+ Computes a new string based on the given attributes.
+ """
+ string = super(MavenVersion, self).__str__()
+
+ if self.is_snapshot:
+ string = '{}-SNAPSHOT'.format(string)
+
+ return string
+
+ def _compare(self, other):
+ if isinstance(other, str):
+ other = MavenVersion.parse(other)
+ elif not isinstance(other, MavenVersion):
+ raise ValueError('Cannot compare "{}", type not supported!'.format(other))
+
+ difference = super(MavenVersion, self)._compare(other)
+ if difference != 0:
+ return difference
+
+ if not self.is_snapshot and other.is_snapshot:
+ return 1
+ elif self.is_snapshot and not other.is_snapshot:
+ return -1
+ else:
+ return 0
diff --git a/third_party/python/mozilla-version/mozilla_version/parser.py b/third_party/python/mozilla-version/mozilla_version/parser.py
new file mode 100644
index 0000000000..d3a6ef6bb6
--- /dev/null
+++ b/third_party/python/mozilla-version/mozilla_version/parser.py
@@ -0,0 +1,48 @@
+"""Defines parser helpers."""
+
+from mozilla_version.errors import MissingFieldError
+
+
+def get_value_matched_by_regex(field_name, regex_matches, string):
+ """Ensure value stored in regex group exists."""
+ try:
+ value = regex_matches.group(field_name)
+ if value is not None:
+ return value
+ except IndexError:
+ pass
+
+ raise MissingFieldError(string, field_name)
+
+
+def does_regex_have_group(regex_matches, group_name):
+ """Return a boolean depending on whether a regex group is matched."""
+ try:
+ return regex_matches.group(group_name) is not None
+ except IndexError:
+ return False
+
+
+def positive_int(val):
+ """Parse `val` into a positive integer."""
+ if isinstance(val, float):
+ raise ValueError('"{}" must not be a float'.format(val))
+ val = int(val)
+ if val >= 0:
+ return val
+ raise ValueError('"{}" must be positive'.format(val))
+
+
+def positive_int_or_none(val):
+ """Parse `val` into either `None` or a positive integer."""
+ if val is None:
+ return val
+ return positive_int(val)
+
+
+def strictly_positive_int_or_none(val):
+ """Parse `val` into either `None` or a strictly positive integer."""
+ val = positive_int_or_none(val)
+ if val is None or val > 0:
+ return val
+ raise ValueError('"{}" must be strictly positive'.format(val))
diff --git a/third_party/python/mozilla-version/mozilla_version/test/__init__.py b/third_party/python/mozilla-version/mozilla_version/test/__init__.py
new file mode 100644
index 0000000000..f094a83248
--- /dev/null
+++ b/third_party/python/mozilla-version/mozilla_version/test/__init__.py
@@ -0,0 +1,5 @@
+from contextlib import contextmanager
+
+@contextmanager
+def does_not_raise():
+ yield
diff --git a/third_party/python/mozilla-version/mozilla_version/test/test_balrog.py b/third_party/python/mozilla-version/mozilla_version/test/test_balrog.py
new file mode 100644
index 0000000000..dd9dc24d3c
--- /dev/null
+++ b/third_party/python/mozilla-version/mozilla_version/test/test_balrog.py
@@ -0,0 +1,172 @@
+import pytest
+
+from mozilla_version.balrog import BalrogReleaseName
+from mozilla_version.errors import PatternNotMatchedError
+from mozilla_version.gecko import FirefoxVersion
+
+
+@pytest.mark.parametrize(
+ 'product, major_number, minor_number, patch_number, beta_number, build_number, is_nightly, \
+is_aurora_or_devedition, is_esr, expected_output_string', ((
+ 'firefox', 32, 0, None, None, 1, False, False, False, 'firefox-32.0-build1'
+), (
+ 'firefox', 32, 0, 1, None, 2, False, False, False, 'firefox-32.0.1-build2'
+), (
+ 'firefox', 32, 0, None, 3, 4, False, False, False, 'firefox-32.0b3-build4'
+), (
+ 'firefox', 32, 0, None, None, 5, True, False, False, 'firefox-32.0a1-build5'
+), (
+ 'firefox', 32, 0, None, None, 6, False, True, False, 'firefox-32.0a2-build6'
+), (
+ 'firefox', 32, 0, None, None, 7, False, False, True, 'firefox-32.0esr-build7'
+), (
+ 'firefox', 32, 0, 1, None, 8, False, False, True, 'firefox-32.0.1esr-build8'
+), (
+ 'devedition', 54, 0, None, 12, 1, False, False, False, 'devedition-54.0b12-build1'
+), (
+ 'fennec', 32, 0, None, None, 1, False, False, False, 'fennec-32.0-build1'
+), (
+ 'thunderbird', 32, 0, None, None, 1, False, False, False, 'thunderbird-32.0-build1'
+)))
+def test_balrog_release_name_constructor_and_str(
+ product, major_number, minor_number, patch_number, beta_number, build_number, is_nightly,
+ is_aurora_or_devedition, is_esr, expected_output_string
+):
+ assert str(BalrogReleaseName(product, FirefoxVersion(
+ major_number=major_number,
+ minor_number=minor_number,
+ patch_number=patch_number,
+ build_number=build_number,
+ beta_number=beta_number,
+ is_nightly=is_nightly,
+ is_aurora_or_devedition=is_aurora_or_devedition,
+ is_esr=is_esr
+ ))) == expected_output_string
+
+
+@pytest.mark.parametrize('product, major_number, minor_number, patch_number, beta_number, build_number, is_nightly, is_aurora_or_devedition, is_esr, ExpectedErrorType', ((
+ ('nonexistingproduct', 32, 0, None, None, 1, False, False, False, PatternNotMatchedError),
+ ('firefox', 32, 0, None, None, None, False, False, False, PatternNotMatchedError),
+)))
+def test_fail_balrog_release_constructor(product, major_number, minor_number, patch_number, beta_number, build_number, is_nightly, is_aurora_or_devedition, is_esr, ExpectedErrorType):
+ with pytest.raises(ExpectedErrorType):
+ BalrogReleaseName(product, FirefoxVersion(
+ major_number=major_number,
+ minor_number=minor_number,
+ patch_number=patch_number,
+ beta_number=beta_number,
+ build_number=build_number,
+ is_nightly=is_nightly,
+ is_aurora_or_devedition=is_aurora_or_devedition,
+ is_esr=is_esr
+ ))
+
+
+@pytest.mark.parametrize('string, expected_string', ((
+ ('firefox-32.0-build1', 'firefox-32.0-build1'),
+ ('firefox-32.0.1-build2', 'firefox-32.0.1-build2'),
+ ('firefox-32.0b3-build4', 'firefox-32.0b3-build4'),
+ ('firefox-32.0a1-build5', 'firefox-32.0a1-build5'),
+ ('firefox-32.0a2-build6', 'firefox-32.0a2-build6'),
+ ('firefox-32.0esr-build7', 'firefox-32.0esr-build7'),
+ ('firefox-32.0.1esr-build8', 'firefox-32.0.1esr-build8'),
+
+ ('firefox-32.0build1', 'firefox-32.0-build1'),
+)))
+def test_balrog_release_name_parse(string, expected_string):
+ assert str(BalrogReleaseName.parse(string)) == expected_string
+
+
+@pytest.mark.parametrize('string, ExpectedErrorType', (
+ ('firefox-32.0', PatternNotMatchedError),
+
+ ('firefox32.0-build1', PatternNotMatchedError),
+ ('firefox32.0build1', PatternNotMatchedError),
+ ('firefox-32.0--build1', PatternNotMatchedError),
+ ('firefox-build1', PatternNotMatchedError),
+ ('nonexistingproduct-32.0-build1', PatternNotMatchedError),
+
+ ('firefox-32-build1', PatternNotMatchedError),
+ ('firefox-32.b2-build1', PatternNotMatchedError),
+ ('firefox-.1-build1', PatternNotMatchedError),
+ ('firefox-32.0.0-build1', PatternNotMatchedError),
+ ('firefox-32.2-build1', PatternNotMatchedError),
+ ('firefox-32.02-build1', PatternNotMatchedError),
+ ('firefox-32.0a0-build1', ValueError),
+ ('firefox-32.0b0-build1', ValueError),
+ ('firefox-32.0.1a1-build1', PatternNotMatchedError),
+ ('firefox-32.0.1a2-build1', PatternNotMatchedError),
+ ('firefox-32.0.1b2-build1', PatternNotMatchedError),
+ ('firefox-32.0-build0', ValueError),
+ ('firefox-32.0a1a2-build1', PatternNotMatchedError),
+ ('firefox-32.0a1b2-build1', PatternNotMatchedError),
+ ('firefox-32.0b2esr-build1', PatternNotMatchedError),
+ ('firefox-32.0esrb2-build1', PatternNotMatchedError),
+))
+def test_firefox_version_raises_when_invalid_version_is_given(string, ExpectedErrorType):
+ with pytest.raises(ExpectedErrorType):
+ BalrogReleaseName.parse(string)
+
+
+@pytest.mark.parametrize('previous, next', (
+ ('firefox-32.0-build1', 'firefox-33.0-build1'),
+ ('firefox-32.0-build1', 'firefox-32.1.0-build1'),
+ ('firefox-32.0-build1', 'firefox-32.0.1-build1'),
+ ('firefox-32.0-build1', 'firefox-32.0-build2'),
+
+ ('firefox-32.0a1-build1', 'firefox-32.0-build1'),
+ ('firefox-32.0a2-build1', 'firefox-32.0-build1'),
+ ('firefox-32.0b1-build1', 'firefox-32.0-build1'),
+
+ ('firefox-32.0.1-build1', 'firefox-33.0-build1'),
+ ('firefox-32.0.1-build1', 'firefox-32.1.0-build1'),
+ ('firefox-32.0.1-build1', 'firefox-32.0.2-build1'),
+ ('firefox-32.0.1-build1', 'firefox-32.0.1-build2'),
+
+ ('firefox-32.1.0-build1', 'firefox-33.0-build1'),
+ ('firefox-32.1.0-build1', 'firefox-32.2.0-build1'),
+ ('firefox-32.1.0-build1', 'firefox-32.1.1-build1'),
+ ('firefox-32.1.0-build1', 'firefox-32.1.0-build2'),
+
+ ('firefox-32.0b1-build1', 'firefox-33.0b1-build1'),
+ ('firefox-32.0b1-build1', 'firefox-32.0b2-build1'),
+ ('firefox-32.0b1-build1', 'firefox-32.0b1-build2'),
+
+ ('firefox-2.0-build1', 'firefox-10.0-build1'),
+ ('firefox-10.2.0-build1', 'firefox-10.10.0-build1'),
+ ('firefox-10.0.2-build1', 'firefox-10.0.10-build1'),
+ ('firefox-10.10.1-build1', 'firefox-10.10.10-build1'),
+ ('firefox-10.0-build2', 'firefox-10.0-build10'),
+ ('firefox-10.0b2-build1', 'firefox-10.0b10-build1'),
+))
+def test_balrog_release_implements_lt_operator(previous, next):
+ assert BalrogReleaseName.parse(previous) < BalrogReleaseName.parse(next)
+
+
+def test_fail_balrog_release_lt_operator():
+ with pytest.raises(ValueError):
+ assert BalrogReleaseName.parse('thunderbird-32.0-build1') < BalrogReleaseName.parse('Firefox-32.0-build2')
+
+
+def test_balrog_release_implements_remaining_comparision_operators():
+ assert BalrogReleaseName.parse('firefox-32.0-build1') == BalrogReleaseName.parse('firefox-32.0-build1')
+ assert BalrogReleaseName.parse('firefox-32.0-build1') != BalrogReleaseName.parse('firefox-33.0-build1')
+
+ assert BalrogReleaseName.parse('firefox-32.0-build1') <= BalrogReleaseName.parse('firefox-32.0-build1')
+ assert BalrogReleaseName.parse('firefox-32.0-build1') <= BalrogReleaseName.parse('firefox-33.0-build1')
+
+ assert BalrogReleaseName.parse('firefox-33.0-build1') >= BalrogReleaseName.parse('firefox-32.0-build1')
+ assert BalrogReleaseName.parse('firefox-33.0-build1') >= BalrogReleaseName.parse('firefox-33.0-build1')
+
+ assert BalrogReleaseName.parse('firefox-33.0-build1') > BalrogReleaseName.parse('firefox-32.0-build1')
+ assert not BalrogReleaseName.parse('firefox-33.0-build1') > BalrogReleaseName.parse('firefox-33.0-build1')
+
+ assert not BalrogReleaseName.parse('firefox-32.0-build1') < BalrogReleaseName.parse('firefox-32.0-build1')
+
+ assert BalrogReleaseName.parse('firefox-33.0-build1') != BalrogReleaseName.parse('firefox-32.0-build1')
+
+def test_balrog_release_hashable():
+ """
+ It is possible to hash `BalrogReleaseNmae`.
+ """
+ hash(BalrogReleaseName.parse('firefox-63.0-build1'))
diff --git a/third_party/python/mozilla-version/mozilla_version/test/test_gecko.py b/third_party/python/mozilla-version/mozilla_version/test/test_gecko.py
new file mode 100644
index 0000000000..020e959e81
--- /dev/null
+++ b/third_party/python/mozilla-version/mozilla_version/test/test_gecko.py
@@ -0,0 +1,411 @@
+import pytest
+import re
+
+from distutils.version import StrictVersion, LooseVersion
+
+import mozilla_version.gecko
+
+from mozilla_version.errors import PatternNotMatchedError, TooManyTypesError, NoVersionTypeError
+from mozilla_version.gecko import (
+ GeckoVersion, FirefoxVersion, DeveditionVersion,
+ ThunderbirdVersion, FennecVersion, GeckoSnapVersion,
+)
+from mozilla_version.test import does_not_raise
+
+
+VALID_VERSIONS = {
+ '32.0a1': 'nightly',
+ '32.0a2': 'aurora_or_devedition',
+ '32.0b2': 'beta',
+ '32.0b10': 'beta',
+ '32.0': 'release',
+ '32.0.1': 'release',
+ '32.0esr': 'esr',
+ '32.0.1esr': 'esr',
+}
+
+
+@pytest.mark.parametrize('major_number, minor_number, patch_number, beta_number, build_number, is_nightly, is_aurora_or_devedition, is_esr, expected_output_string', ((
+ 32, 0, None, None, None, False, False, False, '32.0'
+), (
+ 32, 0, 1, None, None, False, False, False, '32.0.1'
+), (
+ 32, 0, None, 3, None, False, False, False, '32.0b3'
+), (
+ 32, 0, None, None, 10, False, False, False, '32.0build10'
+), (
+ 32, 0, None, None, None, True, False, False, '32.0a1'
+), (
+ 32, 0, None, None, None, False, True, False, '32.0a2'
+), (
+ 32, 0, None, None, None, False, False, True, '32.0esr'
+), (
+ 32, 0, 1, None, None, False, False, True, '32.0.1esr'
+)))
+def test_firefox_version_constructor_and_str(major_number, minor_number, patch_number, beta_number, build_number, is_nightly, is_aurora_or_devedition, is_esr, expected_output_string):
+ assert str(FirefoxVersion(
+ major_number=major_number,
+ minor_number=minor_number,
+ patch_number=patch_number,
+ beta_number=beta_number,
+ build_number=build_number,
+ is_nightly=is_nightly,
+ is_aurora_or_devedition=is_aurora_or_devedition,
+ is_esr=is_esr
+ )) == expected_output_string
+
+
+@pytest.mark.parametrize('major_number, minor_number, patch_number, beta_number, build_number, is_nightly, is_aurora_or_devedition, is_esr, ExpectedErrorType', ((
+ 32, 0, None, 1, None, True, False, False, TooManyTypesError
+), (
+ 32, 0, None, 1, None, False, True, False, TooManyTypesError
+), (
+ 32, 0, None, 1, None, False, False, True, TooManyTypesError
+), (
+ 32, 0, None, None, None, True, True, False, TooManyTypesError
+), (
+ 32, 0, None, None, None, True, False, True, TooManyTypesError
+), (
+ 32, 0, None, None, None, False, True, True, TooManyTypesError
+), (
+ 32, 0, None, None, None, True, True, True, TooManyTypesError
+), (
+ 32, 0, 0, None, None, False, False, False, PatternNotMatchedError
+), (
+ 32, 0, None, 0, None, False, False, False, ValueError
+), (
+ 32, 0, None, None, 0, False, False, False, ValueError
+), (
+ 32, 0, 1, 1, None, False, False, False, PatternNotMatchedError
+), (
+ 32, 0, 1, None, None, True, False, False, PatternNotMatchedError
+), (
+ 32, 0, 1, None, None, False, True, False, PatternNotMatchedError
+), (
+ -1, 0, None, None, None, False, False, False, ValueError
+), (
+ 32, -1, None, None, None, False, False, False, ValueError
+), (
+ 32, 0, -1, None, None, False, False, False, ValueError
+), (
+ 2.2, 0, 0, None, None, False, False, False, ValueError
+), (
+ 'some string', 0, 0, None, None, False, False, False, ValueError
+)))
+def test_fail_firefox_version_constructor(major_number, minor_number, patch_number, beta_number, build_number, is_nightly, is_aurora_or_devedition, is_esr, ExpectedErrorType):
+ with pytest.raises(ExpectedErrorType):
+ FirefoxVersion(
+ major_number=major_number,
+ minor_number=minor_number,
+ patch_number=patch_number,
+ beta_number=beta_number,
+ build_number=build_number,
+ is_nightly=is_nightly,
+ is_aurora_or_devedition=is_aurora_or_devedition,
+ is_esr=is_esr
+ )
+
+
+def test_firefox_version_constructor_minimum_kwargs():
+ assert str(FirefoxVersion(32, 0)) == '32.0'
+ assert str(FirefoxVersion(32, 0, 1)) == '32.0.1'
+ assert str(FirefoxVersion(32, 1, 0)) == '32.1.0'
+ assert str(FirefoxVersion(32, 0, 1, 1)) == '32.0.1build1'
+ assert str(FirefoxVersion(32, 0, beta_number=1)) == '32.0b1'
+ assert str(FirefoxVersion(32, 0, is_nightly=True)) == '32.0a1'
+ assert str(FirefoxVersion(32, 0, is_aurora_or_devedition=True)) == '32.0a2'
+ assert str(FirefoxVersion(32, 0, is_esr=True)) == '32.0esr'
+ assert str(FirefoxVersion(32, 0, 1, is_esr=True)) == '32.0.1esr'
+
+
+@pytest.mark.parametrize('version_string, ExpectedErrorType', (
+ ('32', PatternNotMatchedError),
+ ('32.b2', PatternNotMatchedError),
+ ('.1', PatternNotMatchedError),
+ ('32.0.0', PatternNotMatchedError),
+ ('32.2', PatternNotMatchedError),
+ ('32.02', PatternNotMatchedError),
+ ('32.0a0', ValueError),
+ ('32.0b0', ValueError),
+ ('32.0.1a1', PatternNotMatchedError),
+ ('32.0.1a2', PatternNotMatchedError),
+ ('32.0.1b2', PatternNotMatchedError),
+ ('32.0build0', ValueError),
+ ('32.0a1a2', PatternNotMatchedError),
+ ('32.0a1b2', PatternNotMatchedError),
+ ('32.0b2esr', PatternNotMatchedError),
+ ('32.0esrb2', PatternNotMatchedError),
+))
+def test_firefox_version_raises_when_invalid_version_is_given(version_string, ExpectedErrorType):
+ with pytest.raises(ExpectedErrorType):
+ FirefoxVersion.parse(version_string)
+
+
+@pytest.mark.parametrize('version_string, expected_type', VALID_VERSIONS.items())
+def test_firefox_version_is_of_a_defined_type(version_string, expected_type):
+ release = FirefoxVersion.parse(version_string)
+ assert getattr(release, 'is_{}'.format(expected_type))
+
+
+@pytest.mark.parametrize('previous, next', (
+ ('32.0', '33.0'),
+ ('32.0', '32.1.0'),
+ ('32.0', '32.0.1'),
+ ('32.0build1', '32.0build2'),
+
+ ('32.0.1', '33.0'),
+ ('32.0.1', '32.1.0'),
+ ('32.0.1', '32.0.2'),
+ ('32.0.1build1', '32.0.1build2'),
+
+ ('32.1.0', '33.0'),
+ ('32.1.0', '32.2.0'),
+ ('32.1.0', '32.1.1'),
+ ('32.1.0build1', '32.1.0build2'),
+
+ ('32.0b1', '33.0b1'),
+ ('32.0b1', '32.0b2'),
+ ('32.0b1build1', '32.0b1build2'),
+
+ ('32.0a1', '32.0a2'),
+ ('32.0a1', '32.0b1'),
+ ('32.0a1', '32.0'),
+ ('32.0a1', '32.0esr'),
+
+ ('32.0a2', '32.0b1'),
+ ('32.0a2', '32.0'),
+ ('32.0a2', '32.0esr'),
+
+ ('32.0b1', '32.0'),
+ ('32.0b1', '32.0esr'),
+
+ ('32.0', '32.0esr'),
+
+ ('2.0', '10.0'),
+ ('10.2.0', '10.10.0'),
+ ('10.0.2', '10.0.10'),
+ ('10.10.1', '10.10.10'),
+ ('10.0build2', '10.0build10'),
+ ('10.0b2', '10.0b10'),
+))
+def test_firefox_version_implements_lt_operator(previous, next):
+ assert FirefoxVersion.parse(previous) < FirefoxVersion.parse(next)
+
+
+@pytest.mark.parametrize('equivalent_version_string', (
+ '32.0', '032.0', '32.0build1', '32.0build01', '32.0-build1', '32.0build2',
+))
+def test_firefox_version_implements_eq_operator(equivalent_version_string):
+ assert FirefoxVersion.parse('32.0') == FirefoxVersion.parse(equivalent_version_string)
+ # raw strings are also converted
+ assert FirefoxVersion.parse('32.0') == equivalent_version_string
+
+
+@pytest.mark.parametrize('wrong_type', (
+ 32,
+ 32.0,
+ ('32', '0', '1'),
+ ['32', '0', '1'],
+ LooseVersion('32.0'),
+ StrictVersion('32.0'),
+))
+def test_firefox_version_raises_eq_operator(wrong_type):
+ with pytest.raises(ValueError):
+ assert FirefoxVersion.parse('32.0') == wrong_type
+ # AttributeError is raised by LooseVersion and StrictVersion
+ with pytest.raises((ValueError, AttributeError)):
+ assert wrong_type == FirefoxVersion.parse('32.0')
+
+
+def test_firefox_version_implements_remaining_comparision_operators():
+ assert FirefoxVersion.parse('32.0') <= FirefoxVersion.parse('32.0')
+ assert FirefoxVersion.parse('32.0') <= FirefoxVersion.parse('33.0')
+
+ assert FirefoxVersion.parse('33.0') >= FirefoxVersion.parse('32.0')
+ assert FirefoxVersion.parse('33.0') >= FirefoxVersion.parse('33.0')
+
+ assert FirefoxVersion.parse('33.0') > FirefoxVersion.parse('32.0')
+ assert not FirefoxVersion.parse('33.0') > FirefoxVersion.parse('33.0')
+
+ assert not FirefoxVersion.parse('32.0') < FirefoxVersion.parse('32.0')
+
+ assert FirefoxVersion.parse('33.0') != FirefoxVersion.parse('32.0')
+
+
+@pytest.mark.parametrize('version_string, expected_output', (
+ ('32.0', '32.0'),
+ ('032.0', '32.0'),
+ ('32.0build1', '32.0build1'),
+ ('32.0build01', '32.0build1'),
+ ('32.0.1', '32.0.1'),
+ ('32.0a1', '32.0a1'),
+ ('32.0a2', '32.0a2'),
+ ('32.0b1', '32.0b1'),
+ ('32.0b01', '32.0b1'),
+ ('32.0esr', '32.0esr'),
+ ('32.0.1esr', '32.0.1esr'),
+))
+def test_firefox_version_implements_str_operator(version_string, expected_output):
+ assert str(FirefoxVersion.parse(version_string)) == expected_output
+
+
+_SUPER_PERMISSIVE_PATTERN = re.compile(r"""
+(?P<major_number>\d+)\.(?P<minor_number>\d+)(\.(\d+))*
+(?P<is_nightly>a1)?(?P<is_aurora_or_devedition>a2)?(b(?P<beta_number>\d+))?
+(?P<is_esr>esr)?
+""", re.VERBOSE)
+
+
+@pytest.mark.parametrize('version_string', (
+ '32.0a1a2', '32.0a1b2', '32.0b2esr'
+))
+def test_firefox_version_ensures_it_does_not_have_multiple_type(monkeypatch, version_string):
+ # Let's make sure the sanity checks detect a broken regular expression
+ original_pattern = FirefoxVersion._VALID_ENOUGH_VERSION_PATTERN
+ FirefoxVersion._VALID_ENOUGH_VERSION_PATTERN = _SUPER_PERMISSIVE_PATTERN
+
+ with pytest.raises(TooManyTypesError):
+ FirefoxVersion.parse(version_string)
+
+ FirefoxVersion._VALID_ENOUGH_VERSION_PATTERN = original_pattern
+
+
+def test_firefox_version_ensures_a_new_added_release_type_is_caught(monkeypatch):
+ # Let's make sure the sanity checks detect a broken regular expression
+ original_pattern = FirefoxVersion._VALID_ENOUGH_VERSION_PATTERN
+ FirefoxVersion._VALID_ENOUGH_VERSION_PATTERN = _SUPER_PERMISSIVE_PATTERN
+
+ # And a broken type detection
+ original_is_release = FirefoxVersion.is_release
+ FirefoxVersion.is_release = False
+
+ with pytest.raises(NoVersionTypeError):
+ mozilla_version.gecko.FirefoxVersion.parse('32.0.0.0')
+
+ FirefoxVersion.is_release = original_is_release
+ FirefoxVersion._VALID_ENOUGH_VERSION_PATTERN = original_pattern
+
+
+@pytest.mark.parametrize('version_string', (
+ '33.1', '33.1build1', '33.1build2', '33.1build3',
+ '38.0.5b1', '38.0.5b1build1', '38.0.5b1build2',
+ '38.0.5b2', '38.0.5b2build1',
+ '38.0.5b3', '38.0.5b3build1',
+))
+def test_firefox_version_supports_released_edge_cases(version_string):
+ assert str(FirefoxVersion.parse(version_string)) == version_string
+ for Class in (DeveditionVersion, FennecVersion, ThunderbirdVersion):
+ if Class == FennecVersion and version_string in ('33.1', '33.1build1', '33.1build2'):
+ # These edge cases also exist in Fennec
+ continue
+ with pytest.raises(PatternNotMatchedError):
+ Class.parse(version_string)
+
+
+@pytest.mark.parametrize('version_string', (
+ '54.0b11', '54.0b12', '55.0b1'
+))
+def test_devedition_version(version_string):
+ DeveditionVersion.parse(version_string)
+
+
+@pytest.mark.parametrize('version_string', (
+ '53.0a1', '53.0b1', '54.0b10', '55.0', '55.0a1', '60.0esr'
+))
+def test_devedition_version_bails_on_wrong_version(version_string):
+ with pytest.raises(PatternNotMatchedError):
+ DeveditionVersion.parse(version_string)
+
+
+@pytest.mark.parametrize('version_string', (
+ '33.1', '33.1build1', '33.1build2',
+ '38.0.5b4', '38.0.5b4build1'
+))
+def test_fennec_version_supports_released_edge_cases(version_string):
+ assert str(FennecVersion.parse(version_string)) == version_string
+ for Class in (FirefoxVersion, DeveditionVersion, ThunderbirdVersion):
+ if Class == FirefoxVersion and version_string in ('33.1', '33.1build1', '33.1build2'):
+ # These edge cases also exist in Firefox
+ continue
+ with pytest.raises(PatternNotMatchedError):
+ Class.parse(version_string)
+
+
+@pytest.mark.parametrize('version_string, expectation', (
+ ('68.0a1', does_not_raise()),
+ ('68.0b3', does_not_raise()),
+ ('68.0b17', does_not_raise()),
+ ('68.0', does_not_raise()),
+ ('68.0.1', does_not_raise()),
+ ('68.1a1', does_not_raise()),
+ ('68.1b2', does_not_raise()),
+ ('68.1.0', does_not_raise()),
+ ('68.1', does_not_raise()),
+ ('68.1b3', does_not_raise()),
+ ('68.1.1', does_not_raise()),
+ ('68.2a1', does_not_raise()),
+ ('68.2b1', does_not_raise()),
+ ('68.2', does_not_raise()),
+
+ ('67.1', pytest.raises(PatternNotMatchedError)),
+ ('68.0.1a1', pytest.raises(PatternNotMatchedError)),
+ ('68.1a1b1', pytest.raises(PatternNotMatchedError)),
+ ('68.0.1b1', pytest.raises(PatternNotMatchedError)),
+ ('68.1.0a1', pytest.raises(PatternNotMatchedError)),
+ ('68.1.0b1', pytest.raises(PatternNotMatchedError)),
+ ('68.1.1a1', pytest.raises(PatternNotMatchedError)),
+ ('68.1.1b2', pytest.raises(PatternNotMatchedError)),
+
+ ('69.0a1', pytest.raises(PatternNotMatchedError)),
+ ('69.0b3', pytest.raises(PatternNotMatchedError)),
+ ('69.0', pytest.raises(PatternNotMatchedError)),
+ ('69.0.1', pytest.raises(PatternNotMatchedError)),
+ ('69.1', pytest.raises(PatternNotMatchedError)),
+
+ ('70.0', pytest.raises(PatternNotMatchedError)),
+))
+def test_fennec_version_ends_at_68(version_string, expectation):
+ with expectation:
+ FennecVersion.parse(version_string)
+
+
+@pytest.mark.parametrize('version_string', (
+ '45.1b1', '45.1b1build1',
+ '45.2', '45.2build1', '45.2build2',
+ '45.2b1', '45.2b1build2',
+))
+def test_thunderbird_version_supports_released_edge_cases(version_string):
+ assert str(ThunderbirdVersion.parse(version_string)) == version_string
+ for Class in (FirefoxVersion, DeveditionVersion, FennecVersion):
+ with pytest.raises(PatternNotMatchedError):
+ Class.parse(version_string)
+
+
+@pytest.mark.parametrize('version_string', (
+ '63.0b7-1', '63.0b7-2',
+ '62.0-1', '62.0-2',
+ '60.2.1esr-1', '60.2.0esr-2',
+ '60.0esr-1', '60.0esr-13',
+ # TODO Bug 1451694: Figure out what nightlies version numbers looks like
+))
+def test_gecko_snap_version(version_string):
+ GeckoSnapVersion.parse(version_string)
+
+
+@pytest.mark.parametrize('version_string', (
+ '32.0a2', '32.0esr1', '32.0-build1',
+))
+def test_gecko_snap_version_bails_on_wrong_version(version_string):
+ with pytest.raises(PatternNotMatchedError):
+ GeckoSnapVersion.parse(version_string)
+
+
+def test_gecko_snap_version_implements_its_own_string():
+ assert str(GeckoSnapVersion.parse('63.0b7-1')) == '63.0b7-1'
+
+
+def test_gecko_version_hashable():
+ """
+ It is possible to hash `GeckoVersion`.
+ """
+ hash(GeckoVersion.parse('63.0'))
diff --git a/third_party/python/mozilla-version/mozilla_version/test/test_maven.py b/third_party/python/mozilla-version/mozilla_version/test/test_maven.py
new file mode 100644
index 0000000000..f8d539872d
--- /dev/null
+++ b/third_party/python/mozilla-version/mozilla_version/test/test_maven.py
@@ -0,0 +1,87 @@
+import pytest
+
+from distutils.version import LooseVersion, StrictVersion
+
+from mozilla_version.errors import PatternNotMatchedError
+from mozilla_version.maven import MavenVersion
+
+@pytest.mark.parametrize('major_number, minor_number, patch_number, is_snapshot, expected_output_string', ((
+ 32, 0, None, False, '32.0'
+), (
+ 32, 0, 1, False, '32.0.1'
+), (
+ 32, 0, None, True, '32.0-SNAPSHOT'
+), (
+ 32, 0, 1, True, '32.0.1-SNAPSHOT'
+)))
+def test_maven_version_constructor_and_str(major_number, minor_number, patch_number, is_snapshot, expected_output_string):
+ assert str(MavenVersion(
+ major_number=major_number,
+ minor_number=minor_number,
+ patch_number=patch_number,
+ is_snapshot=is_snapshot,
+ )) == expected_output_string
+
+
+def test_maven_version_constructor_minimum_kwargs():
+ assert str(MavenVersion(32, 0)) == '32.0'
+ assert str(MavenVersion(32, 0, 1)) == '32.0.1'
+ assert str(MavenVersion(32, 1, 0)) == '32.1.0'
+ assert str(MavenVersion(32, 1, 0, False)) == '32.1.0'
+ assert str(MavenVersion(32, 1, 0, True)) == '32.1.0-SNAPSHOT'
+
+
+@pytest.mark.parametrize('version_string, ExpectedErrorType', (
+ ('32.0SNAPSHOT', PatternNotMatchedError),
+ ('32.1.0SNAPSHOT', PatternNotMatchedError),
+))
+def test_maven_version_raises_when_invalid_version_is_given(version_string, ExpectedErrorType):
+ with pytest.raises(ExpectedErrorType):
+ MavenVersion.parse(version_string)
+
+
+@pytest.mark.parametrize('previous, next', (
+ ('32.0-SNAPSHOT', '32.0'),
+ ('31.0', '32.0-SNAPSHOT'),
+ ('32.0', '32.0.1-SNAPSHOT'),
+ ('32.0.1-SNAPSHOT', '32.1.0'),
+ ('32.0.1-SNAPSHOT', '33.0'),
+))
+def test_maven_version_implements_lt_operator(previous, next):
+ assert MavenVersion.parse(previous) < MavenVersion.parse(next)
+
+
+@pytest.mark.parametrize('previous, next', (
+ ('32.0', '32.0-SNAPSHOT'),
+ ('32.0-SNAPSHOT', '31.0'),
+ ('32.0.1-SNAPSHOT', '32.0'),
+ ('32.1.0', '32.0.1-SNAPSHOT'),
+))
+def test_maven_version_implements_gt_operator(previous, next):
+ assert MavenVersion.parse(previous) > MavenVersion.parse(next)
+
+
+@pytest.mark.parametrize('wrong_type', (
+ 32,
+ 32.0,
+ ('32', '0', '1'),
+ ['32', '0', '1'],
+ LooseVersion('32.0'),
+ StrictVersion('32.0'),
+))
+def test_base_version_raises_eq_operator(wrong_type):
+ with pytest.raises(ValueError):
+ assert MavenVersion.parse('32.0') == wrong_type
+ # AttributeError is raised by LooseVersion and StrictVersion
+ with pytest.raises((ValueError, AttributeError)):
+ assert wrong_type == MavenVersion.parse('32.0')
+
+
+def test_maven_version_implements_eq_operator():
+ assert MavenVersion.parse('32.0-SNAPSHOT') == MavenVersion.parse('32.0-SNAPSHOT')
+ # raw strings are also converted
+ assert MavenVersion.parse('32.0-SNAPSHOT') == '32.0-SNAPSHOT'
+
+
+def test_maven_version_hashable():
+ hash(MavenVersion.parse('32.0.1'))
diff --git a/third_party/python/mozilla-version/mozilla_version/test/test_version.py b/third_party/python/mozilla-version/mozilla_version/test/test_version.py
new file mode 100644
index 0000000000..c74bdb735a
--- /dev/null
+++ b/third_party/python/mozilla-version/mozilla_version/test/test_version.py
@@ -0,0 +1,171 @@
+import pytest
+
+from distutils.version import LooseVersion, StrictVersion
+
+from mozilla_version.errors import PatternNotMatchedError
+from mozilla_version.version import BaseVersion, VersionType
+
+
+@pytest.mark.parametrize('major_number, minor_number, patch_number, expected_output_string', ((
+ 32, 0, None, '32.0'
+), (
+ 32, 0, 1, '32.0.1'
+)))
+def test_base_version_constructor_and_str(major_number, minor_number, patch_number, expected_output_string):
+ assert str(BaseVersion(
+ major_number=major_number,
+ minor_number=minor_number,
+ patch_number=patch_number,
+ )) == expected_output_string
+
+
+@pytest.mark.parametrize('major_number, minor_number, patch_number, ExpectedErrorType', ((
+ -1, 0, None, ValueError
+), (
+ 32, -1, None, ValueError
+), (
+ 32, 0, -1, ValueError
+), (
+ 2.2, 0, 0, ValueError
+), (
+ 'some string', 0, 0, ValueError
+)))
+def test_fail_base_version_constructor(major_number, minor_number, patch_number, ExpectedErrorType):
+ with pytest.raises(ExpectedErrorType):
+ BaseVersion(
+ major_number=major_number,
+ minor_number=minor_number,
+ patch_number=patch_number,
+ )
+
+
+def test_base_version_constructor_minimum_kwargs():
+ assert str(BaseVersion(32, 0)) == '32.0'
+ assert str(BaseVersion(32, 0, 1)) == '32.0.1'
+ assert str(BaseVersion(32, 1, 0)) == '32.1.0'
+
+
+@pytest.mark.parametrize('version_string, ExpectedErrorType', (
+ ('32', PatternNotMatchedError),
+ ('.1', PatternNotMatchedError),
+))
+def test_base_version_raises_when_invalid_version_is_given(version_string, ExpectedErrorType):
+ with pytest.raises(ExpectedErrorType):
+ BaseVersion.parse(version_string)
+
+
+@pytest.mark.parametrize('previous, next', (
+ ('32.0', '33.0'),
+ ('32.0', '32.1.0'),
+ ('32.0', '32.0.1'),
+
+ ('32.0.1', '33.0'),
+ ('32.0.1', '32.1.0'),
+ ('32.0.1', '32.0.2'),
+
+ ('32.1.0', '33.0'),
+ ('32.1.0', '32.2.0'),
+ ('32.1.0', '32.1.1'),
+
+ ('2.0', '10.0'),
+ ('10.2.0', '10.10.0'),
+ ('10.0.2', '10.0.10'),
+ ('10.10.1', '10.10.10'),
+))
+def test_base_version_implements_lt_operator(previous, next):
+ assert BaseVersion.parse(previous) < BaseVersion.parse(next)
+
+
+@pytest.mark.parametrize('equivalent_version_string', (
+ '32.0', '032.0', '32.00'
+))
+def test_base_version_implements_eq_operator(equivalent_version_string):
+ assert BaseVersion.parse('32.0') == BaseVersion.parse(equivalent_version_string)
+ # raw strings are also converted
+ assert BaseVersion.parse('32.0') == equivalent_version_string
+
+
+@pytest.mark.parametrize('wrong_type', (
+ 32,
+ 32.0,
+ ('32', '0', '1'),
+ ['32', '0', '1'],
+ LooseVersion('32.0'),
+ StrictVersion('32.0'),
+))
+def test_base_version_raises_eq_operator(wrong_type):
+ with pytest.raises(ValueError):
+ assert BaseVersion.parse('32.0') == wrong_type
+ # AttributeError is raised by LooseVersion and StrictVersion
+ with pytest.raises((ValueError, AttributeError)):
+ assert wrong_type == BaseVersion.parse('32.0')
+
+
+def test_base_version_implements_remaining_comparision_operators():
+ assert BaseVersion.parse('32.0') <= BaseVersion.parse('32.0')
+ assert BaseVersion.parse('32.0') <= BaseVersion.parse('33.0')
+
+ assert BaseVersion.parse('33.0') >= BaseVersion.parse('32.0')
+ assert BaseVersion.parse('33.0') >= BaseVersion.parse('33.0')
+
+ assert BaseVersion.parse('33.0') > BaseVersion.parse('32.0')
+ assert not BaseVersion.parse('33.0') > BaseVersion.parse('33.0')
+
+ assert not BaseVersion.parse('32.0') < BaseVersion.parse('32.0')
+
+ assert BaseVersion.parse('33.0') != BaseVersion.parse('32.0')
+
+
+def test_base_version_hashable():
+ hash(BaseVersion.parse('63.0'))
+
+
+@pytest.mark.parametrize('previous, next', (
+ (VersionType.NIGHTLY, VersionType.AURORA_OR_DEVEDITION),
+ (VersionType.NIGHTLY, VersionType.BETA),
+ (VersionType.NIGHTLY, VersionType.RELEASE),
+ (VersionType.NIGHTLY, VersionType.ESR),
+
+ (VersionType.AURORA_OR_DEVEDITION, VersionType.BETA),
+ (VersionType.AURORA_OR_DEVEDITION, VersionType.RELEASE),
+ (VersionType.AURORA_OR_DEVEDITION, VersionType.ESR),
+
+ (VersionType.BETA, VersionType.RELEASE),
+ (VersionType.BETA, VersionType.ESR),
+
+ (VersionType.RELEASE, VersionType.ESR),
+))
+def test_version_type_implements_lt_operator(previous, next):
+ assert previous < next
+
+
+@pytest.mark.parametrize('first, second', (
+ (VersionType.NIGHTLY, VersionType.NIGHTLY),
+ (VersionType.AURORA_OR_DEVEDITION, VersionType.AURORA_OR_DEVEDITION),
+ (VersionType.BETA, VersionType.BETA),
+ (VersionType.RELEASE, VersionType.RELEASE),
+ (VersionType.ESR, VersionType.ESR),
+))
+def test_version_type_implements_eq_operator(first, second):
+ assert first == second
+
+
+def test_version_type_implements_remaining_comparision_operators():
+ assert VersionType.NIGHTLY <= VersionType.NIGHTLY
+ assert VersionType.NIGHTLY <= VersionType.BETA
+
+ assert VersionType.NIGHTLY >= VersionType.NIGHTLY
+ assert VersionType.BETA >= VersionType.NIGHTLY
+
+ assert not VersionType.NIGHTLY > VersionType.NIGHTLY
+ assert VersionType.BETA > VersionType.NIGHTLY
+
+ assert not VersionType.BETA < VersionType.NIGHTLY
+
+ assert VersionType.NIGHTLY != VersionType.BETA
+
+
+def test_version_type_compare():
+ assert VersionType.NIGHTLY.compare(VersionType.NIGHTLY) == 0
+ assert VersionType.NIGHTLY.compare(VersionType.BETA) < 0
+ assert VersionType.BETA.compare(VersionType.NIGHTLY) > 0
diff --git a/third_party/python/mozilla-version/mozilla_version/version.py b/third_party/python/mozilla-version/mozilla_version/version.py
new file mode 100644
index 0000000000..1f78bec590
--- /dev/null
+++ b/third_party/python/mozilla-version/mozilla_version/version.py
@@ -0,0 +1,177 @@
+"""Defines common characteristics of a version at Mozilla."""
+
+import attr
+import re
+
+from enum import Enum
+
+from mozilla_version.errors import MissingFieldError, PatternNotMatchedError
+from mozilla_version.parser import (
+ get_value_matched_by_regex,
+ does_regex_have_group,
+ positive_int,
+ positive_int_or_none
+)
+
+
+@attr.s(frozen=True, cmp=False, hash=True)
+class BaseVersion(object):
+ """Class that validates and handles general version numbers."""
+
+ major_number = attr.ib(type=int, converter=positive_int)
+ minor_number = attr.ib(type=int, converter=positive_int)
+ patch_number = attr.ib(type=int, converter=positive_int_or_none, default=None)
+
+ _MANDATORY_NUMBERS = ('major_number', 'minor_number')
+ _OPTIONAL_NUMBERS = ('patch_number', )
+ _ALL_NUMBERS = _MANDATORY_NUMBERS + _OPTIONAL_NUMBERS
+
+ _VALID_ENOUGH_VERSION_PATTERN = re.compile(r"""
+ ^(?P<major_number>\d+)
+ \.(?P<minor_number>\d+)
+ (\.(?P<patch_number>\d+))?$""", re.VERBOSE)
+
+ @classmethod
+ def parse(cls, version_string, regex_groups=()):
+ """Construct an object representing a valid version number."""
+ regex_matches = cls._VALID_ENOUGH_VERSION_PATTERN.match(version_string)
+
+ if regex_matches is None:
+ raise PatternNotMatchedError(version_string, cls._VALID_ENOUGH_VERSION_PATTERN)
+
+ kwargs = {}
+
+ for field in cls._MANDATORY_NUMBERS:
+ kwargs[field] = get_value_matched_by_regex(field, regex_matches, version_string)
+ for field in cls._OPTIONAL_NUMBERS:
+ try:
+ kwargs[field] = get_value_matched_by_regex(field, regex_matches, version_string)
+ except MissingFieldError:
+ pass
+
+ for regex_group in regex_groups:
+ kwargs[regex_group] = does_regex_have_group(regex_matches, regex_group)
+
+ return cls(**kwargs)
+
+ def __str__(self):
+ """Implement string representation.
+
+ Computes a new string based on the given attributes.
+ """
+ semvers = [str(self.major_number), str(self.minor_number)]
+ if self.patch_number is not None:
+ semvers.append(str(self.patch_number))
+
+ return '.'.join(semvers)
+
+ def __eq__(self, other):
+ """Implement `==` operator."""
+ return self._compare(other) == 0
+
+ def __ne__(self, other):
+ """Implement `!=` operator."""
+ return self._compare(other) != 0
+
+ def __lt__(self, other):
+ """Implement `<` operator."""
+ return self._compare(other) < 0
+
+ def __le__(self, other):
+ """Implement `<=` operator."""
+ return self._compare(other) <= 0
+
+ def __gt__(self, other):
+ """Implement `>` operator."""
+ return self._compare(other) > 0
+
+ def __ge__(self, other):
+ """Implement `>=` operator."""
+ return self._compare(other) >= 0
+
+ def _compare(self, other):
+ """Compare this release with another.
+
+ Returns:
+ 0 if equal
+ < 0 is this precedes the other
+ > 0 if the other precedes this
+
+ """
+ if isinstance(other, str):
+ other = BaseVersion.parse(other)
+ elif not isinstance(other, BaseVersion):
+ raise ValueError('Cannot compare "{}", type not supported!'.format(other))
+
+ for field in ('major_number', 'minor_number', 'patch_number'):
+ this_number = getattr(self, field)
+ this_number = 0 if this_number is None else this_number
+ other_number = getattr(other, field)
+ other_number = 0 if other_number is None else other_number
+
+ difference = this_number - other_number
+
+ if difference != 0:
+ return difference
+
+ return 0
+
+
+class VersionType(Enum):
+ """Enum that sorts types of versions (e.g.: nightly, beta, release, esr).
+
+ Supports comparison. `ESR` is considered higher than `RELEASE` (even if they technically have
+ the same codebase). For instance: 60.0.1 < 60.0.1esr but 61.0 > 60.0.1esr.
+ This choice has a practical use case: if you have a list of Release and ESR version, you can
+ easily extract one kind or the other thanks to the VersionType.
+
+ Examples:
+ .. code-block:: python
+
+ assert VersionType.NIGHTLY == VersionType.NIGHTLY
+ assert VersionType.ESR > VersionType.RELEASE
+
+ """
+
+ NIGHTLY = 1
+ AURORA_OR_DEVEDITION = 2
+ BETA = 3
+ RELEASE = 4
+ ESR = 5
+
+ def __eq__(self, other):
+ """Implement `==` operator."""
+ return self.compare(other) == 0
+
+ def __ne__(self, other):
+ """Implement `!=` operator."""
+ return self.compare(other) != 0
+
+ def __lt__(self, other):
+ """Implement `<` operator."""
+ return self.compare(other) < 0
+
+ def __le__(self, other):
+ """Implement `<=` operator."""
+ return self.compare(other) <= 0
+
+ def __gt__(self, other):
+ """Implement `>` operator."""
+ return self.compare(other) > 0
+
+ def __ge__(self, other):
+ """Implement `>=` operator."""
+ return self.compare(other) >= 0
+
+ def compare(self, other):
+ """Compare this `VersionType` with anotherself.
+
+ Returns:
+ 0 if equal
+ < 0 is this precedes the other
+ > 0 if the other precedes this
+
+ """
+ return self.value - other.value
+
+ __hash__ = Enum.__hash__
diff --git a/third_party/python/mozilla-version/requirements-coveralls.txt b/third_party/python/mozilla-version/requirements-coveralls.txt
new file mode 100644
index 0000000000..fdc6b1bd04
--- /dev/null
+++ b/third_party/python/mozilla-version/requirements-coveralls.txt
@@ -0,0 +1,140 @@
+#
+# This file is autogenerated by pip-compile
+# To update, run:
+#
+# pip-compile --generate-hashes --output-file requirements-coveralls.txt requirements-coveralls.txt.in
+#
+asn1crypto==0.24.0 \
+ --hash=sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87 \
+ --hash=sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49 \
+ # via cryptography
+certifi==2018.11.29 \
+ --hash=sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7 \
+ --hash=sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033 \
+ # via requests, urllib3
+cffi==1.12.1 \
+ --hash=sha256:0b5f895714a7a9905148fc51978c62e8a6cbcace30904d39dcd0d9e2265bb2f6 \
+ --hash=sha256:27cdc7ba35ee6aa443271d11583b50815c4bb52be89a909d0028e86c21961709 \
+ --hash=sha256:2d4a38049ea93d5ce3c7659210393524c1efc3efafa151bd85d196fa98fce50a \
+ --hash=sha256:3262573d0d60fc6b9d0e0e6e666db0e5045cbe8a531779aa0deb3b425ec5a282 \
+ --hash=sha256:358e96cfffc185ab8f6e7e425c7bb028931ed08d65402fbcf3f4e1bff6e66556 \
+ --hash=sha256:37c7db824b5687fbd7ea5519acfd054c905951acc53503547c86be3db0580134 \
+ --hash=sha256:39b9554dfe60f878e0c6ff8a460708db6e1b1c9cc6da2c74df2955adf83e355d \
+ --hash=sha256:42b96a77acf8b2d06821600fa87c208046decc13bd22a4a0e65c5c973443e0da \
+ --hash=sha256:5b37dde5035d3c219324cac0e69d96495970977f310b306fa2df5910e1f329a1 \
+ --hash=sha256:5d35819f5566d0dd254f273d60cf4a2dcdd3ae3003dfd412d40b3fe8ffd87509 \
+ --hash=sha256:5df73aa465e53549bd03c819c1bc69fb85529a5e1a693b7b6cb64408dd3970d1 \
+ --hash=sha256:7075b361f7a4d0d4165439992d0b8a3cdfad1f302bf246ed9308a2e33b046bd3 \
+ --hash=sha256:7678b5a667b0381c173abe530d7bdb0e6e3b98e062490618f04b80ca62686d96 \
+ --hash=sha256:7dfd996192ff8a535458c17f22ff5eb78b83504c34d10eefac0c77b1322609e2 \
+ --hash=sha256:8a3be5d31d02c60f84c4fd4c98c5e3a97b49f32e16861367f67c49425f955b28 \
+ --hash=sha256:9812e53369c469506b123aee9dcb56d50c82fad60c5df87feb5ff59af5b5f55c \
+ --hash=sha256:9b6f7ba4e78c52c1a291d0c0c0bd745d19adde1a9e1c03cb899f0c6efd6f8033 \
+ --hash=sha256:a85bc1d7c3bba89b3d8c892bc0458de504f8b3bcca18892e6ed15b5f7a52ad9d \
+ --hash=sha256:aa6b9c843ad645ebb12616de848cc4e25a40f633ccc293c3c9fe34107c02c2ea \
+ --hash=sha256:bae1aa56ee00746798beafe486daa7cfb586cd395c6ce822ba3068e48d761bc0 \
+ --hash=sha256:bae96e26510e4825d5910a196bf6b5a11a18b87d9278db6d08413be8ea799469 \
+ --hash=sha256:bd78df3b594013b227bf31d0301566dc50ba6f40df38a70ded731d5a8f2cb071 \
+ --hash=sha256:c2711197154f46d06f73542c539a0ff5411f1951fab391e0a4ac8359badef719 \
+ --hash=sha256:d998c20e3deed234fca993fd6c8314cb7cbfda05fd170f1bd75bb5d7421c3c5a \
+ --hash=sha256:df4f840d77d9e37136f8e6b432fecc9d6b8730f18f896e90628712c793466ce6 \
+ --hash=sha256:f5653c2581acb038319e6705d4e3593677676df14b112f13e0b5b44b6a18df1a \
+ --hash=sha256:f7c7aa485a2e2250d455148470ffd0195eecc3d845122635202d7467d6f7b4cf \
+ --hash=sha256:f9e2c66a6493147de835f207f198540a56b26745ce4f272fbc7c2f2cfebeb729 # via cryptography
+chardet==3.0.4 \
+ --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
+ --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \
+ # via requests
+coverage==4.5.2 \
+ --hash=sha256:09e47c529ff77bf042ecfe858fb55c3e3eb97aac2c87f0349ab5a7efd6b3939f \
+ --hash=sha256:0a1f9b0eb3aa15c990c328535655847b3420231af299386cfe5efc98f9c250fe \
+ --hash=sha256:0cc941b37b8c2ececfed341444a456912e740ecf515d560de58b9a76562d966d \
+ --hash=sha256:10e8af18d1315de936d67775d3a814cc81d0747a1a0312d84e27ae5610e313b0 \
+ --hash=sha256:1b4276550b86caa60606bd3572b52769860a81a70754a54acc8ba789ce74d607 \
+ --hash=sha256:1e8a2627c48266c7b813975335cfdea58c706fe36f607c97d9392e61502dc79d \
+ --hash=sha256:2b224052bfd801beb7478b03e8a66f3f25ea56ea488922e98903914ac9ac930b \
+ --hash=sha256:447c450a093766744ab53bf1e7063ec82866f27bcb4f4c907da25ad293bba7e3 \
+ --hash=sha256:46101fc20c6f6568561cdd15a54018bb42980954b79aa46da8ae6f008066a30e \
+ --hash=sha256:4710dc676bb4b779c4361b54eb308bc84d64a2fa3d78e5f7228921eccce5d815 \
+ --hash=sha256:510986f9a280cd05189b42eee2b69fecdf5bf9651d4cd315ea21d24a964a3c36 \
+ --hash=sha256:5535dda5739257effef56e49a1c51c71f1d37a6e5607bb25a5eee507c59580d1 \
+ --hash=sha256:5a7524042014642b39b1fcae85fb37556c200e64ec90824ae9ecf7b667ccfc14 \
+ --hash=sha256:5f55028169ef85e1fa8e4b8b1b91c0b3b0fa3297c4fb22990d46ff01d22c2d6c \
+ --hash=sha256:6694d5573e7790a0e8d3d177d7a416ca5f5c150742ee703f3c18df76260de794 \
+ --hash=sha256:6831e1ac20ac52634da606b658b0b2712d26984999c9d93f0c6e59fe62ca741b \
+ --hash=sha256:77f0d9fa5e10d03aa4528436e33423bfa3718b86c646615f04616294c935f840 \
+ --hash=sha256:828ad813c7cdc2e71dcf141912c685bfe4b548c0e6d9540db6418b807c345ddd \
+ --hash=sha256:85a06c61598b14b015d4df233d249cd5abfa61084ef5b9f64a48e997fd829a82 \
+ --hash=sha256:8cb4febad0f0b26c6f62e1628f2053954ad2c555d67660f28dfb1b0496711952 \
+ --hash=sha256:a5c58664b23b248b16b96253880b2868fb34358911400a7ba39d7f6399935389 \
+ --hash=sha256:aaa0f296e503cda4bc07566f592cd7a28779d433f3a23c48082af425d6d5a78f \
+ --hash=sha256:ab235d9fe64833f12d1334d29b558aacedfbca2356dfb9691f2d0d38a8a7bfb4 \
+ --hash=sha256:b3b0c8f660fae65eac74fbf003f3103769b90012ae7a460863010539bb7a80da \
+ --hash=sha256:bab8e6d510d2ea0f1d14f12642e3f35cefa47a9b2e4c7cea1852b52bc9c49647 \
+ --hash=sha256:c45297bbdbc8bb79b02cf41417d63352b70bcb76f1bbb1ee7d47b3e89e42f95d \
+ --hash=sha256:d19bca47c8a01b92640c614a9147b081a1974f69168ecd494687c827109e8f42 \
+ --hash=sha256:d64b4340a0c488a9e79b66ec9f9d77d02b99b772c8b8afd46c1294c1d39ca478 \
+ --hash=sha256:da969da069a82bbb5300b59161d8d7c8d423bc4ccd3b410a9b4d8932aeefc14b \
+ --hash=sha256:ed02c7539705696ecb7dc9d476d861f3904a8d2b7e894bd418994920935d36bb \
+ --hash=sha256:ee5b8abc35b549012e03a7b1e86c09491457dba6c94112a2482b18589cc2bdb9 \
+ # via coveralls
+coveralls==1.6.0 \
+ --hash=sha256:6f213e461390973f4a97fb9e9d4ebd4956af296ff0a4d868e622108145835cb7 \
+ --hash=sha256:a7d0078c9e9b5692c03dcd3884647e837836c265c01e98094632feadef767d36
+cryptography==2.5 \
+ --hash=sha256:05b3ded5e88747d28ee3ef493f2b92cbb947c1e45cf98cfef22e6d38bb67d4af \
+ --hash=sha256:06826e7f72d1770e186e9c90e76b4f84d90cdb917b47ff88d8dc59a7b10e2b1e \
+ --hash=sha256:08b753df3672b7066e74376f42ce8fc4683e4fd1358d34c80f502e939ee944d2 \
+ --hash=sha256:2cd29bd1911782baaee890544c653bb03ec7d95ebeb144d714b0f5c33deb55c7 \
+ --hash=sha256:31e5637e9036d966824edaa91bf0aa39dc6f525a1c599f39fd5c50340264e079 \
+ --hash=sha256:42fad67d7072216a49e34f923d8cbda9edacbf6633b19a79655e88a1b4857063 \
+ --hash=sha256:4946b67235b9d2ea7d31307be9d5ad5959d6c4a8f98f900157b47abddf698401 \
+ --hash=sha256:522fdb2809603ee97a4d0ef2f8d617bc791eb483313ba307cb9c0a773e5e5695 \
+ --hash=sha256:6f841c7272645dd7c65b07b7108adfa8af0aaea57f27b7f59e01d41f75444c85 \
+ --hash=sha256:7d335e35306af5b9bc0560ca39f740dfc8def72749645e193dd35be11fb323b3 \
+ --hash=sha256:8504661ffe324837f5c4607347eeee4cf0fcad689163c6e9c8d3b18cf1f4a4ad \
+ --hash=sha256:9260b201ce584d7825d900c88700aa0bd6b40d4ebac7b213857bd2babee9dbca \
+ --hash=sha256:9a30384cc402eac099210ab9b8801b2ae21e591831253883decdb4513b77a3cd \
+ --hash=sha256:9e29af877c29338f0cab5f049ccc8bd3ead289a557f144376c4fbc7d1b98914f \
+ --hash=sha256:ab50da871bc109b2d9389259aac269dd1b7c7413ee02d06fe4e486ed26882159 \
+ --hash=sha256:b13c80b877e73bcb6f012813c6f4a9334fcf4b0e96681c5a15dac578f2eedfa0 \
+ --hash=sha256:bfe66b577a7118e05b04141f0f1ed0959552d45672aa7ecb3d91e319d846001e \
+ --hash=sha256:e091bd424567efa4b9d94287a952597c05d22155a13716bf5f9f746b9dc906d3 \
+ --hash=sha256:fa2b38c8519c5a3aa6e2b4e1cf1a549b54acda6adb25397ff542068e73d1ed00 \
+ # via pyopenssl, urllib3
+docopt==0.6.2 \
+ --hash=sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491 \
+ # via coveralls
+enum34==1.1.6 \
+ --hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
+ --hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
+ --hash=sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79 \
+ --hash=sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1 \
+ # via cryptography
+idna==2.8 \
+ --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
+ --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \
+ # via requests, urllib3
+ipaddress==1.0.22 \
+ --hash=sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794 \
+ --hash=sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c \
+ # via cryptography, urllib3
+pycparser==2.19 \
+ --hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3 \
+ # via cffi
+pyopenssl==19.0.0 \
+ --hash=sha256:aeca66338f6de19d1aa46ed634c3b9ae519a64b458f8468aec688e7e3c20f200 \
+ --hash=sha256:c727930ad54b10fc157015014b666f2d8b41f70c0d03e83ab67624fd3dd5d1e6 \
+ # via urllib3
+requests==2.21.0 \
+ --hash=sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e \
+ --hash=sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b \
+ # via coveralls
+six==1.12.0 \
+ --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
+ --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
+ # via cryptography, pyopenssl
+urllib3[secure]==1.24.2 \
+ --hash=sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0 \
+ --hash=sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3 \
+ # via coveralls, requests
diff --git a/third_party/python/mozilla-version/requirements-docs.txt b/third_party/python/mozilla-version/requirements-docs.txt
new file mode 100644
index 0000000000..bc3ba6a5e3
--- /dev/null
+++ b/third_party/python/mozilla-version/requirements-docs.txt
@@ -0,0 +1,122 @@
+#
+# This file is autogenerated by pip-compile
+# To update, run:
+#
+# pip-compile --generate-hashes --output-file requirements-docs.txt requirements-docs.txt.in
+#
+alabaster==0.7.12 \
+ --hash=sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359 \
+ --hash=sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02 \
+ # via sphinx
+babel==2.6.0 \
+ --hash=sha256:6778d85147d5d85345c14a26aada5e478ab04e39b078b0745ee6870c2b5cf669 \
+ --hash=sha256:8cba50f48c529ca3fa18cf81fa9403be176d374ac4d60738b839122dfaaa3d23 \
+ # via sphinx
+certifi==2018.11.29 \
+ --hash=sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7 \
+ --hash=sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033 \
+ # via requests
+chardet==3.0.4 \
+ --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
+ --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \
+ # via requests
+docutils==0.14 \
+ --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \
+ --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \
+ --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6 \
+ # via m2r, sphinx
+idna==2.8 \
+ --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
+ --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \
+ # via requests
+imagesize==1.1.0 \
+ --hash=sha256:3f349de3eb99145973fefb7dbe38554414e5c30abd0c8e4b970a7c9d09f3a1d8 \
+ --hash=sha256:f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5 \
+ # via sphinx
+jinja2==2.10 \
+ --hash=sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd \
+ --hash=sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4 \
+ # via sphinx
+m2r==0.2.1 \
+ --hash=sha256:bf90bad66cda1164b17e5ba4a037806d2443f2a4d5ddc9f6a5554a0322aaed99
+markupsafe==1.1.0 \
+ --hash=sha256:048ef924c1623740e70204aa7143ec592504045ae4429b59c30054cb31e3c432 \
+ --hash=sha256:130f844e7f5bdd8e9f3f42e7102ef1d49b2e6fdf0d7526df3f87281a532d8c8b \
+ --hash=sha256:19f637c2ac5ae9da8bfd98cef74d64b7e1bb8a63038a3505cd182c3fac5eb4d9 \
+ --hash=sha256:1b8a7a87ad1b92bd887568ce54b23565f3fd7018c4180136e1cf412b405a47af \
+ --hash=sha256:1c25694ca680b6919de53a4bb3bdd0602beafc63ff001fea2f2fc16ec3a11834 \
+ --hash=sha256:1f19ef5d3908110e1e891deefb5586aae1b49a7440db952454b4e281b41620cd \
+ --hash=sha256:1fa6058938190ebe8290e5cae6c351e14e7bb44505c4a7624555ce57fbbeba0d \
+ --hash=sha256:31cbb1359e8c25f9f48e156e59e2eaad51cd5242c05ed18a8de6dbe85184e4b7 \
+ --hash=sha256:3e835d8841ae7863f64e40e19477f7eb398674da6a47f09871673742531e6f4b \
+ --hash=sha256:4e97332c9ce444b0c2c38dd22ddc61c743eb208d916e4265a2a3b575bdccb1d3 \
+ --hash=sha256:525396ee324ee2da82919f2ee9c9e73b012f23e7640131dd1b53a90206a0f09c \
+ --hash=sha256:52b07fbc32032c21ad4ab060fec137b76eb804c4b9a1c7c7dc562549306afad2 \
+ --hash=sha256:52ccb45e77a1085ec5461cde794e1aa037df79f473cbc69b974e73940655c8d7 \
+ --hash=sha256:5c3fbebd7de20ce93103cb3183b47671f2885307df4a17a0ad56a1dd51273d36 \
+ --hash=sha256:5e5851969aea17660e55f6a3be00037a25b96a9b44d2083651812c99d53b14d1 \
+ --hash=sha256:5edfa27b2d3eefa2210fb2f5d539fbed81722b49f083b2c6566455eb7422fd7e \
+ --hash=sha256:7d263e5770efddf465a9e31b78362d84d015cc894ca2c131901a4445eaa61ee1 \
+ --hash=sha256:83381342bfc22b3c8c06f2dd93a505413888694302de25add756254beee8449c \
+ --hash=sha256:857eebb2c1dc60e4219ec8e98dfa19553dae33608237e107db9c6078b1167856 \
+ --hash=sha256:98e439297f78fca3a6169fd330fbe88d78b3bb72f967ad9961bcac0d7fdd1550 \
+ --hash=sha256:bf54103892a83c64db58125b3f2a43df6d2cb2d28889f14c78519394feb41492 \
+ --hash=sha256:d9ac82be533394d341b41d78aca7ed0e0f4ba5a2231602e2f05aa87f25c51672 \
+ --hash=sha256:e982fe07ede9fada6ff6705af70514a52beb1b2c3d25d4e873e82114cf3c5401 \
+ --hash=sha256:edce2ea7f3dfc981c4ddc97add8a61381d9642dc3273737e756517cc03e84dd6 \
+ --hash=sha256:efdc45ef1afc238db84cb4963aa689c0408912a0239b0721cb172b4016eb31d6 \
+ --hash=sha256:f137c02498f8b935892d5c0172560d7ab54bc45039de8805075e19079c639a9c \
+ --hash=sha256:f82e347a72f955b7017a39708a3667f106e6ad4d10b25f237396a7115d8ed5fd \
+ --hash=sha256:fb7c206e01ad85ce57feeaaa0bf784b97fa3cad0d4a5737bc5295785f5c613a1 \
+ # via jinja2
+mistune==0.8.4 \
+ --hash=sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e \
+ --hash=sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4 \
+ # via m2r
+packaging==19.0 \
+ --hash=sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af \
+ --hash=sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3 \
+ # via sphinx
+pygments==2.3.1 \
+ --hash=sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a \
+ --hash=sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d \
+ # via sphinx
+pyparsing==2.3.1 \
+ --hash=sha256:66c9268862641abcac4a96ba74506e594c884e3f57690a696d21ad8210ed667a \
+ --hash=sha256:f6c5ef0d7480ad048c054c37632c67fca55299990fff127850181659eea33fc3 \
+ # via packaging
+pytz==2018.9 \
+ --hash=sha256:32b0891edff07e28efe91284ed9c31e123d84bea3fd98e1f72be2508f43ef8d9 \
+ --hash=sha256:d5f05e487007e29e03409f9398d074e158d920d36eb82eaf66fb1136b0c5374c \
+ # via babel
+readthedocs-sphinx-ext==0.5.16 \
+ --hash=sha256:42b1c63d63dd483a188b541599bd08a540b2d08ec2b166660179618b6ccc3bb0 \
+ --hash=sha256:e73770f53a226f6db8199916a12bcee1808e0c0cbe028422668e8c1c7f2fa80c
+requests==2.21.0 \
+ --hash=sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e \
+ --hash=sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b \
+ # via readthedocs-sphinx-ext, sphinx
+six==1.12.0 \
+ --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
+ --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
+ # via packaging, sphinx
+snowballstemmer==1.2.1 \
+ --hash=sha256:919f26a68b2c17a7634da993d91339e288964f93c274f1343e3bbbe2096e1128 \
+ --hash=sha256:9f3bcd3c401c3e862ec0ebe6d2c069ebc012ce142cce209c098ccb5b09136e89 \
+ # via sphinx
+sphinx==1.8.4 \
+ --hash=sha256:b53904fa7cb4b06a39409a492b949193a1b68cc7241a1a8ce9974f86f0d24287 \
+ --hash=sha256:c1c00fc4f6e8b101a0d037065043460dffc2d507257f2f11acaed71fd2b0c83c
+sphinxcontrib-websupport==1.1.0 \
+ --hash=sha256:68ca7ff70785cbe1e7bccc71a48b5b6d965d79ca50629606c7861a21b206d9dd \
+ --hash=sha256:9de47f375baf1ea07cdb3436ff39d7a9c76042c10a769c52353ec46e4e8fc3b9 \
+ # via sphinx
+typing==3.6.6 \
+ --hash=sha256:4027c5f6127a6267a435201981ba156de91ad0d1d98e9ddc2aa173453453492d \
+ --hash=sha256:57dcf675a99b74d64dacf6fba08fb17cf7e3d5fdff53d4a30ea2a5e7e52543d4 \
+ --hash=sha256:a4c8473ce11a65999c8f59cb093e70686b6c84c98df58c1dae9b3b196089858a \
+ # via sphinx
+urllib3==1.24.2 \
+ --hash=sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0 \
+ --hash=sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3 \
+ # via requests
diff --git a/third_party/python/mozilla-version/requirements-test.txt b/third_party/python/mozilla-version/requirements-test.txt
new file mode 100644
index 0000000000..d56c6cd582
--- /dev/null
+++ b/third_party/python/mozilla-version/requirements-test.txt
@@ -0,0 +1,140 @@
+#
+# This file is autogenerated by pip-compile
+# To update, run:
+#
+# pip-compile --generate-hashes --output-file requirements-test.txt requirements-test.txt.in
+#
+atomicwrites==1.3.0 \
+ --hash=sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4 \
+ --hash=sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6 \
+ # via pytest
+attrs==18.2.0 \
+ --hash=sha256:10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69 \
+ --hash=sha256:ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb
+configparser==3.7.1 \
+ --hash=sha256:5bd5fa2a491dc3cfe920a3f2a107510d65eceae10e9c6e547b90261a4710df32 \
+ --hash=sha256:c114ff90ee2e762db972fa205f02491b1f5cf3ff950decd8542c62970c9bedac \
+ --hash=sha256:df28e045fbff307a28795b18df6ac8662be3219435560ddb068c283afab1ea7a ; python_version < "3.2"
+coverage==4.5.2 \
+ --hash=sha256:09e47c529ff77bf042ecfe858fb55c3e3eb97aac2c87f0349ab5a7efd6b3939f \
+ --hash=sha256:0a1f9b0eb3aa15c990c328535655847b3420231af299386cfe5efc98f9c250fe \
+ --hash=sha256:0cc941b37b8c2ececfed341444a456912e740ecf515d560de58b9a76562d966d \
+ --hash=sha256:10e8af18d1315de936d67775d3a814cc81d0747a1a0312d84e27ae5610e313b0 \
+ --hash=sha256:1b4276550b86caa60606bd3572b52769860a81a70754a54acc8ba789ce74d607 \
+ --hash=sha256:1e8a2627c48266c7b813975335cfdea58c706fe36f607c97d9392e61502dc79d \
+ --hash=sha256:2b224052bfd801beb7478b03e8a66f3f25ea56ea488922e98903914ac9ac930b \
+ --hash=sha256:447c450a093766744ab53bf1e7063ec82866f27bcb4f4c907da25ad293bba7e3 \
+ --hash=sha256:46101fc20c6f6568561cdd15a54018bb42980954b79aa46da8ae6f008066a30e \
+ --hash=sha256:4710dc676bb4b779c4361b54eb308bc84d64a2fa3d78e5f7228921eccce5d815 \
+ --hash=sha256:510986f9a280cd05189b42eee2b69fecdf5bf9651d4cd315ea21d24a964a3c36 \
+ --hash=sha256:5535dda5739257effef56e49a1c51c71f1d37a6e5607bb25a5eee507c59580d1 \
+ --hash=sha256:5a7524042014642b39b1fcae85fb37556c200e64ec90824ae9ecf7b667ccfc14 \
+ --hash=sha256:5f55028169ef85e1fa8e4b8b1b91c0b3b0fa3297c4fb22990d46ff01d22c2d6c \
+ --hash=sha256:6694d5573e7790a0e8d3d177d7a416ca5f5c150742ee703f3c18df76260de794 \
+ --hash=sha256:6831e1ac20ac52634da606b658b0b2712d26984999c9d93f0c6e59fe62ca741b \
+ --hash=sha256:77f0d9fa5e10d03aa4528436e33423bfa3718b86c646615f04616294c935f840 \
+ --hash=sha256:828ad813c7cdc2e71dcf141912c685bfe4b548c0e6d9540db6418b807c345ddd \
+ --hash=sha256:85a06c61598b14b015d4df233d249cd5abfa61084ef5b9f64a48e997fd829a82 \
+ --hash=sha256:8cb4febad0f0b26c6f62e1628f2053954ad2c555d67660f28dfb1b0496711952 \
+ --hash=sha256:a5c58664b23b248b16b96253880b2868fb34358911400a7ba39d7f6399935389 \
+ --hash=sha256:aaa0f296e503cda4bc07566f592cd7a28779d433f3a23c48082af425d6d5a78f \
+ --hash=sha256:ab235d9fe64833f12d1334d29b558aacedfbca2356dfb9691f2d0d38a8a7bfb4 \
+ --hash=sha256:b3b0c8f660fae65eac74fbf003f3103769b90012ae7a460863010539bb7a80da \
+ --hash=sha256:bab8e6d510d2ea0f1d14f12642e3f35cefa47a9b2e4c7cea1852b52bc9c49647 \
+ --hash=sha256:c45297bbdbc8bb79b02cf41417d63352b70bcb76f1bbb1ee7d47b3e89e42f95d \
+ --hash=sha256:d19bca47c8a01b92640c614a9147b081a1974f69168ecd494687c827109e8f42 \
+ --hash=sha256:d64b4340a0c488a9e79b66ec9f9d77d02b99b772c8b8afd46c1294c1d39ca478 \
+ --hash=sha256:da969da069a82bbb5300b59161d8d7c8d423bc4ccd3b410a9b4d8932aeefc14b \
+ --hash=sha256:ed02c7539705696ecb7dc9d476d861f3904a8d2b7e894bd418994920935d36bb \
+ --hash=sha256:ee5b8abc35b549012e03a7b1e86c09491457dba6c94112a2482b18589cc2bdb9
+entrypoints==0.3 \
+ --hash=sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19 \
+ --hash=sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451 \
+ # via flake8
+enum34==1.1.6 \
+ --hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
+ --hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
+ --hash=sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79 \
+ --hash=sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1
+flake8-docstrings==1.3.0 \
+ --hash=sha256:4e0ce1476b64e6291520e5570cf12b05016dd4e8ae454b8a8a9a48bc5f84e1cd \
+ --hash=sha256:8436396b5ecad51a122a2c99ba26e5b4e623bf6e913b0fea0cb6c2c4050f91eb
+flake8-polyfill==1.0.2 \
+ --hash=sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9 \
+ --hash=sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda \
+ # via flake8-docstrings
+flake8==3.7.6 \
+ --hash=sha256:6d8c66a65635d46d54de59b027a1dda40abbe2275b3164b634835ac9c13fd048 \
+ --hash=sha256:6eab21c6e34df2c05416faa40d0c59963008fff29b6f0ccfe8fa28152ab3e383
+funcsigs==1.0.2 \
+ --hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \
+ --hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50 ; python_version < "3.0"
+# For an unknown reason, putting the python_version statement at the end doesn't work
+functools32==3.2.3.post2 ; python_version < "3.0" \
+ --hash=sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0 \
+ --hash=sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d
+mccabe==0.6.1 \
+ --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \
+ --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \
+ # via flake8
+more-itertools==5.0.0 \
+ --hash=sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4 \
+ --hash=sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc \
+ --hash=sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9 \
+ # via pytest
+pathlib2==2.3.3 \
+ --hash=sha256:25199318e8cc3c25dcb45cbe084cc061051336d5a9ea2a12448d3d8cb748f742 \
+ --hash=sha256:5887121d7f7df3603bca2f710e7219f3eca0eb69e0b7cc6e0a022e155ac931a7 ; python_version < "3.6"
+pluggy==0.8.1 \
+ --hash=sha256:8ddc32f03971bfdf900a81961a48ccf2fb677cf7715108f85295c67405798616 \
+ --hash=sha256:980710797ff6a041e9a73a5787804f848996ecaa6f8a1b1e08224a5894f2074a \
+ # via pytest
+py==1.7.0 \
+ --hash=sha256:bf92637198836372b520efcba9e020c330123be8ce527e535d185ed4b6f45694 \
+ --hash=sha256:e76826342cefe3c3d5f7e8ee4316b80d1dd8a300781612ddbc765c17ba25a6c6 \
+ # via pytest
+pycodestyle==2.5.0 \
+ --hash=sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56 \
+ --hash=sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c \
+ # via flake8
+pydocstyle==3.0.0 \
+ --hash=sha256:2258f9b0df68b97bf3a6c29003edc5238ff8879f1efb6f1999988d934e432bd8 \
+ --hash=sha256:5741c85e408f9e0ddf873611085e819b809fca90b619f5fd7f34bd4959da3dd4 \
+ --hash=sha256:ed79d4ec5e92655eccc21eb0c6cf512e69512b4a97d215ace46d17e4990f2039 \
+ # via flake8-docstrings
+pyflakes==2.1.0 \
+ --hash=sha256:5e8c00e30c464c99e0b501dc160b13a14af7f27d4dffb529c556e30a159e231d \
+ --hash=sha256:f277f9ca3e55de669fba45b7393a1449009cff5a37d1af10ebb76c52765269cd \
+ # via flake8
+pytest-cov==2.6.1 \
+ --hash=sha256:0ab664b25c6aa9716cbf203b17ddb301932383046082c081b9848a0edf5add33 \
+ --hash=sha256:230ef817450ab0699c6cc3c9c8f7a829c34674456f2ed8df1fe1d39780f7c87f
+pytest==4.3.0 \
+ --hash=sha256:067a1d4bf827ffdd56ad21bd46674703fce77c5957f6c1eef731f6146bfcef1c \
+ --hash=sha256:9687049d53695ad45cf5fdc7bbd51f0c49f1ea3ecfc4b7f3fde7501b541f17f4
+scandir==1.9.0 \
+ --hash=sha256:04b8adb105f2ed313a7c2ef0f1cf7aff4871aa7a1883fa4d8c44b5551ab052d6 \
+ --hash=sha256:1444134990356c81d12f30e4b311379acfbbcd03e0bab591de2696a3b126d58e \
+ --hash=sha256:1b5c314e39f596875e5a95dd81af03730b338c277c54a454226978d5ba95dbb6 \
+ --hash=sha256:346619f72eb0ddc4cf355ceffd225fa52506c92a2ff05318cfabd02a144e7c4e \
+ --hash=sha256:44975e209c4827fc18a3486f257154d34ec6eaec0f90fef0cca1caa482db7064 \
+ --hash=sha256:61859fd7e40b8c71e609c202db5b0c1dbec0d5c7f1449dec2245575bdc866792 \
+ --hash=sha256:a5e232a0bf188362fa00123cc0bb842d363a292de7126126df5527b6a369586a \
+ --hash=sha256:c14701409f311e7a9b7ec8e337f0815baf7ac95776cc78b419a1e6d49889a383 \
+ --hash=sha256:c7708f29d843fc2764310732e41f0ce27feadde453261859ec0fca7865dfc41b \
+ --hash=sha256:c9009c527929f6e25604aec39b0a43c3f831d2947d89d6caaab22f057b7055c8 \
+ --hash=sha256:f5c71e29b4e2af7ccdc03a020c626ede51da471173b4a6ad1e904f2b2e04b4bd \
+ # via pathlib2
+six==1.12.0 \
+ --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
+ --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
+ # via more-itertools, pathlib2, pydocstyle, pytest
+snowballstemmer==1.2.1 \
+ --hash=sha256:919f26a68b2c17a7634da993d91339e288964f93c274f1343e3bbbe2096e1128 \
+ --hash=sha256:9f3bcd3c401c3e862ec0ebe6d2c069ebc012ce142cce209c098ccb5b09136e89 \
+ # via pydocstyle
+typing==3.6.6 \
+ --hash=sha256:4027c5f6127a6267a435201981ba156de91ad0d1d98e9ddc2aa173453453492d \
+ --hash=sha256:57dcf675a99b74d64dacf6fba08fb17cf7e3d5fdff53d4a30ea2a5e7e52543d4 \
+ --hash=sha256:a4c8473ce11a65999c8f59cb093e70686b6c84c98df58c1dae9b3b196089858a \
+ # via flake8
diff --git a/third_party/python/mozilla-version/requirements.txt b/third_party/python/mozilla-version/requirements.txt
new file mode 100644
index 0000000000..d08788894a
--- /dev/null
+++ b/third_party/python/mozilla-version/requirements.txt
@@ -0,0 +1,14 @@
+#
+# This file is autogenerated by pip-compile
+# To update, run:
+#
+# pip-compile --generate-hashes --output-file requirements.txt requirements.txt.in
+#
+attrs==18.2.0 \
+ --hash=sha256:10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69 \
+ --hash=sha256:ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb
+enum34==1.1.6 \
+ --hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
+ --hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
+ --hash=sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79 \
+ --hash=sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1 ; python_version < "3.4"
diff --git a/third_party/python/mozilla-version/requirements.txt.in b/third_party/python/mozilla-version/requirements.txt.in
new file mode 100644
index 0000000000..1b356989ac
--- /dev/null
+++ b/third_party/python/mozilla-version/requirements.txt.in
@@ -0,0 +1,3 @@
+# pyup: ignore file
+enum34; python_version < '3.4'
+attrs
diff --git a/third_party/python/mozilla-version/setup.cfg b/third_party/python/mozilla-version/setup.cfg
new file mode 100644
index 0000000000..8bfd5a12f8
--- /dev/null
+++ b/third_party/python/mozilla-version/setup.cfg
@@ -0,0 +1,4 @@
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/mozilla-version/setup.py b/third_party/python/mozilla-version/setup.py
new file mode 100644
index 0000000000..4755bc7aed
--- /dev/null
+++ b/third_party/python/mozilla-version/setup.py
@@ -0,0 +1,38 @@
+import os
+import re
+from setuptools import setup, find_packages
+
+
+project_dir = os.path.abspath(os.path.dirname(__file__))
+
+with open(os.path.join(project_dir, 'version.txt')) as f:
+ version = f.read().rstrip()
+
+# We use the .in file because a library shouldn't pin versions, it breaks consumers' updates.
+# We allow commented lines in this file
+with open(os.path.join(project_dir, 'requirements.txt.in')) as f:
+ requirements_raw = f.readlines()
+
+requirements_without_comments = [
+ line for line in requirements_raw if line and not line.startswith('#')
+]
+
+setup(
+ name='mozilla-version',
+ version=version,
+ description="""Process Firefox versions numbers. Tells whether they are valid or not, whether \
+they are nightlies or regular releases, whether this version precedes that other.
+ """,
+ author='Mozilla Release Engineering',
+ author_email='release+python@mozilla.com',
+ url='https://github.com/mozilla-releng/mozilla-version',
+ packages=find_packages(),
+ include_package_data=True,
+ zip_safe=False,
+ license='MPL2',
+ install_requires=requirements_without_comments,
+ classifiers=(
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ ),
+)
diff --git a/third_party/python/mozilla-version/version.txt b/third_party/python/mozilla-version/version.txt
new file mode 100644
index 0000000000..42045acae2
--- /dev/null
+++ b/third_party/python/mozilla-version/version.txt
@@ -0,0 +1 @@
+0.3.4
diff --git a/third_party/python/pathlib2/CHANGELOG.rst b/third_party/python/pathlib2/CHANGELOG.rst
new file mode 100644
index 0000000000..8739b2dc73
--- /dev/null
+++ b/third_party/python/pathlib2/CHANGELOG.rst
@@ -0,0 +1,137 @@
+History
+-------
+
+Version 2.3.2
+^^^^^^^^^^^^^
+
+- Hotfix for broken setup.py.
+
+Version 2.3.1
+^^^^^^^^^^^^^
+
+- Fix tests for systems where filesystem encoding only supports ascii
+ (reported by yurivict, fixed with help of honnibal, see issue #30).
+
+- Use modern setuptools syntax for specifying conditional scandir
+ dependency (see issue #31).
+
+- Remove legacy use of support module from old pathlib module (see
+ issue #39). This fixes the tests for Python 3.6.
+
+- Drop the "from __future__ import unicode_literals" and -Qnew tests
+ as it introduced subtle bugs in the tests, and maintaining separate
+ test modules for these legacy features seems not worth the effort.
+
+- Drop Python 3.2 support, as scandir no longer supports it.
+
+Version 2.3.0
+^^^^^^^^^^^^^
+
+- Sync with upstream pathlib from CPython 3.6.1 (7d1017d).
+
+Version 2.2.1
+^^^^^^^^^^^^^
+
+- Fix conditional scandir dependency in wheel (reported by AvdN, see
+ issue #20 and pull request #21).
+
+Version 2.2.0
+^^^^^^^^^^^^^
+
+- Sync with upstream pathlib from CPython 3.5.2 and 3.6.0: fix various
+ exceptions, empty glob pattern, scandir, __fspath__.
+
+- Support unicode strings to be used to construct paths in Python 2
+ (reported by native-api, see issue #13 and pull request #15).
+
+Version 2.1.0
+^^^^^^^^^^^^^
+
+- Sync with upstream pathlib from CPython 3.5.0: gethomedir, home,
+ expanduser.
+
+Version 2.0.1
+^^^^^^^^^^^^^
+
+- Fix TypeError exceptions in write_bytes and write_text (contributed
+ by Emanuele Gaifas, see pull request #2).
+
+Version 2.0
+^^^^^^^^^^^
+
+- Sync with upstream pathlib from CPython: read_text, write_text,
+ read_bytes, write_bytes, __enter__, __exit__, samefile.
+- Use travis and appveyor for continuous integration.
+- Fixed some bugs in test code.
+
+Version 1.0.1
+^^^^^^^^^^^^^
+
+- Pull request #4: Python 2.6 compatibility by eevee.
+
+Version 1.0
+^^^^^^^^^^^
+
+This version brings ``pathlib`` up to date with the official Python 3.4
+release, and also fixes a couple of 2.7-specific issues.
+
+- Python issue #20765: Add missing documentation for PurePath.with_name()
+ and PurePath.with_suffix().
+- Fix test_mkdir_parents when the working directory has additional bits
+ set (such as the setgid or sticky bits).
+- Python issue #20111: pathlib.Path.with_suffix() now sanity checks the
+ given suffix.
+- Python issue #19918: Fix PurePath.relative_to() under Windows.
+- Python issue #19921: When Path.mkdir() is called with parents=True, any
+ missing parent is created with the default permissions, ignoring the mode
+ argument (mimicking the POSIX "mkdir -p" command).
+- Python issue #19887: Improve the Path.resolve() algorithm to support
+ certain symlink chains.
+- Make pathlib usable under Python 2.7 with unicode pathnames (only pure
+ ASCII, though).
+- Issue #21: fix TypeError under Python 2.7 when using new division.
+- Add tox support for easier testing.
+
+Version 0.97
+^^^^^^^^^^^^
+
+This version brings ``pathlib`` up to date with the final API specified
+in :pep:`428`. The changes are too long to list here, it is recommended
+to read the `documentation <https://pathlib.readthedocs.org/>`_.
+
+.. warning::
+ The API in this version is partially incompatible with pathlib 0.8 and
+ earlier. Be sure to check your code for possible breakage!
+
+Version 0.8
+^^^^^^^^^^^
+
+- Add PurePath.name and PurePath.anchor.
+- Add Path.owner and Path.group.
+- Add Path.replace().
+- Add Path.as_uri().
+- Issue #10: when creating a file with Path.open(), don't set the executable
+ bit.
+- Issue #11: fix comparisons with non-Path objects.
+
+Version 0.7
+^^^^^^^^^^^
+
+- Add '**' (recursive) patterns to Path.glob().
+- Fix openat() support after the API refactoring in Python 3.3 beta1.
+- Add a *target_is_directory* argument to Path.symlink_to()
+
+Version 0.6
+^^^^^^^^^^^
+
+- Add Path.is_file() and Path.is_symlink()
+- Add Path.glob() and Path.rglob()
+- Add PurePath.match()
+
+Version 0.5
+^^^^^^^^^^^
+
+- Add Path.mkdir().
+- Add Python 2.7 compatibility by Michele Lacchia.
+- Make parent() raise ValueError when the level is greater than the path
+ length.
diff --git a/third_party/python/pathlib2/LICENSE.rst b/third_party/python/pathlib2/LICENSE.rst
new file mode 100644
index 0000000000..1715d3d7a2
--- /dev/null
+++ b/third_party/python/pathlib2/LICENSE.rst
@@ -0,0 +1,23 @@
+The MIT License (MIT)
+
+Copyright (c) 2014-2017 Matthias C. M. Troffaes
+Copyright (c) 2012-2014 Antoine Pitrou and contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
diff --git a/third_party/python/pathlib2/MANIFEST.in b/third_party/python/pathlib2/MANIFEST.in
new file mode 100644
index 0000000000..2f03369dc6
--- /dev/null
+++ b/third_party/python/pathlib2/MANIFEST.in
@@ -0,0 +1,10 @@
+include *.py
+recursive-include pathlib2 *.py
+recursive-include tests *.py
+include *.rst
+include VERSION
+include requirements.txt
+exclude .travis.yml
+exclude appveyor.yml
+exclude codecov.yml
+prune appveyor
diff --git a/third_party/python/pathlib2/PKG-INFO b/third_party/python/pathlib2/PKG-INFO
new file mode 100644
index 0000000000..f32d0caec2
--- /dev/null
+++ b/third_party/python/pathlib2/PKG-INFO
@@ -0,0 +1,72 @@
+Metadata-Version: 1.1
+Name: pathlib2
+Version: 2.3.2
+Summary: Object-oriented filesystem paths
+Home-page: https://pypi.python.org/pypi/pathlib2/
+Author: Matthias C. M. Troffaes
+Author-email: matthias.troffaes@gmail.com
+License: MIT
+Download-URL: https://pypi.python.org/pypi/pathlib2/
+Description-Content-Type: UNKNOWN
+Description: The `old pathlib <https://bitbucket.org/pitrou/pathlib>`_
+ module on bitbucket is in bugfix-only mode.
+ The goal of pathlib2 is to provide a backport of
+ `standard pathlib <http://docs.python.org/dev/library/pathlib.html>`_
+ module which tracks the standard library module,
+ so all the newest features of the standard pathlib can be
+ used also on older Python versions.
+
+ Download
+ --------
+
+ Standalone releases are available on PyPI:
+ http://pypi.python.org/pypi/pathlib2/
+
+ Development
+ -----------
+
+ The main development takes place in the Python standard library: see
+ the `Python developer's guide <http://docs.python.org/devguide/>`_.
+ In particular, new features should be submitted to the
+ `Python bug tracker <http://bugs.python.org/>`_.
+
+ Issues that occur in this backport, but that do not occur not in the
+ standard Python pathlib module can be submitted on
+ the `pathlib2 bug tracker <https://github.com/mcmtroffaes/pathlib2/issues>`_.
+
+ Documentation
+ -------------
+
+ Refer to the
+ `standard pathlib <http://docs.python.org/dev/library/pathlib.html>`_
+ documentation.
+
+ .. |travis| image:: https://travis-ci.org/mcmtroffaes/pathlib2.png?branch=develop
+ :target: https://travis-ci.org/mcmtroffaes/pathlib2
+ :alt: travis-ci
+
+ .. |appveyor| image:: https://ci.appveyor.com/api/projects/status/baddx3rpet2wyi2c?svg=true
+ :target: https://ci.appveyor.com/project/mcmtroffaes/pathlib2
+ :alt: appveyor
+
+ .. |codecov| image:: https://codecov.io/gh/mcmtroffaes/pathlib2/branch/develop/graph/badge.svg
+ :target: https://codecov.io/gh/mcmtroffaes/pathlib2
+ :alt: codecov
+
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: System :: Filesystems
diff --git a/third_party/python/pathlib2/README.rst b/third_party/python/pathlib2/README.rst
new file mode 100644
index 0000000000..d276fdcd03
--- /dev/null
+++ b/third_party/python/pathlib2/README.rst
@@ -0,0 +1,52 @@
+pathlib2
+========
+
+|appveyor| |travis| |codecov|
+
+Fork of pathlib aiming to support the full stdlib Python API.
+
+The `old pathlib <https://bitbucket.org/pitrou/pathlib>`_
+module on bitbucket is in bugfix-only mode.
+The goal of pathlib2 is to provide a backport of
+`standard pathlib <http://docs.python.org/dev/library/pathlib.html>`_
+module which tracks the standard library module,
+so all the newest features of the standard pathlib can be
+used also on older Python versions.
+
+Download
+--------
+
+Standalone releases are available on PyPI:
+http://pypi.python.org/pypi/pathlib2/
+
+Development
+-----------
+
+The main development takes place in the Python standard library: see
+the `Python developer's guide <http://docs.python.org/devguide/>`_.
+In particular, new features should be submitted to the
+`Python bug tracker <http://bugs.python.org/>`_.
+
+Issues that occur in this backport, but that do not occur not in the
+standard Python pathlib module can be submitted on
+the `pathlib2 bug tracker <https://github.com/mcmtroffaes/pathlib2/issues>`_.
+
+Documentation
+-------------
+
+Refer to the
+`standard pathlib <http://docs.python.org/dev/library/pathlib.html>`_
+documentation.
+
+.. |travis| image:: https://travis-ci.org/mcmtroffaes/pathlib2.png?branch=develop
+ :target: https://travis-ci.org/mcmtroffaes/pathlib2
+ :alt: travis-ci
+
+.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/baddx3rpet2wyi2c?svg=true
+ :target: https://ci.appveyor.com/project/mcmtroffaes/pathlib2
+ :alt: appveyor
+
+.. |codecov| image:: https://codecov.io/gh/mcmtroffaes/pathlib2/branch/develop/graph/badge.svg
+ :target: https://codecov.io/gh/mcmtroffaes/pathlib2
+ :alt: codecov
+
diff --git a/third_party/python/pathlib2/VERSION b/third_party/python/pathlib2/VERSION
new file mode 100644
index 0000000000..f90b1afc08
--- /dev/null
+++ b/third_party/python/pathlib2/VERSION
@@ -0,0 +1 @@
+2.3.2
diff --git a/third_party/python/pathlib2/pathlib2/__init__.py b/third_party/python/pathlib2/pathlib2/__init__.py
new file mode 100644
index 0000000000..2eb41e309e
--- /dev/null
+++ b/third_party/python/pathlib2/pathlib2/__init__.py
@@ -0,0 +1,1670 @@
+# Copyright (c) 2014-2017 Matthias C. M. Troffaes
+# Copyright (c) 2012-2014 Antoine Pitrou and contributors
+# Distributed under the terms of the MIT License.
+
+import ctypes
+import fnmatch
+import functools
+import io
+import ntpath
+import os
+import posixpath
+import re
+import six
+import sys
+from collections import Sequence
+from errno import EINVAL, ENOENT, ENOTDIR, EEXIST, EPERM, EACCES
+from operator import attrgetter
+
+from stat import (
+ S_ISDIR, S_ISLNK, S_ISREG, S_ISSOCK, S_ISBLK, S_ISCHR, S_ISFIFO)
+try:
+ from urllib import quote as urlquote_from_bytes
+except ImportError:
+ from urllib.parse import quote_from_bytes as urlquote_from_bytes
+
+
+try:
+ intern = intern
+except NameError:
+ intern = sys.intern
+
+supports_symlinks = True
+if os.name == 'nt':
+ import nt
+ if sys.getwindowsversion()[:2] >= (6, 0) and sys.version_info >= (3, 2):
+ from nt import _getfinalpathname
+ else:
+ supports_symlinks = False
+ _getfinalpathname = None
+else:
+ nt = None
+
+try:
+ from os import scandir as os_scandir
+except ImportError:
+ from scandir import scandir as os_scandir
+
+__all__ = [
+ "PurePath", "PurePosixPath", "PureWindowsPath",
+ "Path", "PosixPath", "WindowsPath",
+ ]
+
+#
+# Internals
+#
+
+
+def _py2_fsencode(parts):
+ # py2 => minimal unicode support
+ assert six.PY2
+ return [part.encode('ascii') if isinstance(part, six.text_type)
+ else part for part in parts]
+
+
+def _try_except_fileexistserror(try_func, except_func, else_func=None):
+ if sys.version_info >= (3, 3):
+ try:
+ try_func()
+ except FileExistsError as exc:
+ except_func(exc)
+ else:
+ if else_func is not None:
+ else_func()
+ else:
+ try:
+ try_func()
+ except EnvironmentError as exc:
+ if exc.errno != EEXIST:
+ raise
+ else:
+ except_func(exc)
+ else:
+ if else_func is not None:
+ else_func()
+
+
+def _try_except_filenotfounderror(try_func, except_func):
+ if sys.version_info >= (3, 3):
+ try:
+ try_func()
+ except FileNotFoundError as exc:
+ except_func(exc)
+ else:
+ try:
+ try_func()
+ except EnvironmentError as exc:
+ if exc.errno != ENOENT:
+ raise
+ else:
+ except_func(exc)
+
+
+def _try_except_permissionerror_iter(try_iter, except_iter):
+ if sys.version_info >= (3, 3):
+ try:
+ for x in try_iter():
+ yield x
+ except PermissionError as exc:
+ for x in except_iter(exc):
+ yield x
+ else:
+ try:
+ for x in try_iter():
+ yield x
+ except EnvironmentError as exc:
+ if exc.errno not in (EPERM, EACCES):
+ raise
+ else:
+ for x in except_iter(exc):
+ yield x
+
+
+def _win32_get_unique_path_id(path):
+ # get file information, needed for samefile on older Python versions
+ # see http://timgolden.me.uk/python/win32_how_do_i/
+ # see_if_two_files_are_the_same_file.html
+ from ctypes import POINTER, Structure, WinError
+ from ctypes.wintypes import DWORD, HANDLE, BOOL
+
+ class FILETIME(Structure):
+ _fields_ = [("datetime_lo", DWORD),
+ ("datetime_hi", DWORD),
+ ]
+
+ class BY_HANDLE_FILE_INFORMATION(Structure):
+ _fields_ = [("attributes", DWORD),
+ ("created_at", FILETIME),
+ ("accessed_at", FILETIME),
+ ("written_at", FILETIME),
+ ("volume", DWORD),
+ ("file_hi", DWORD),
+ ("file_lo", DWORD),
+ ("n_links", DWORD),
+ ("index_hi", DWORD),
+ ("index_lo", DWORD),
+ ]
+
+ CreateFile = ctypes.windll.kernel32.CreateFileW
+ CreateFile.argtypes = [ctypes.c_wchar_p, DWORD, DWORD, ctypes.c_void_p,
+ DWORD, DWORD, HANDLE]
+ CreateFile.restype = HANDLE
+ GetFileInformationByHandle = (
+ ctypes.windll.kernel32.GetFileInformationByHandle)
+ GetFileInformationByHandle.argtypes = [
+ HANDLE, POINTER(BY_HANDLE_FILE_INFORMATION)]
+ GetFileInformationByHandle.restype = BOOL
+ CloseHandle = ctypes.windll.kernel32.CloseHandle
+ CloseHandle.argtypes = [HANDLE]
+ CloseHandle.restype = BOOL
+ GENERIC_READ = 0x80000000
+ FILE_SHARE_READ = 0x00000001
+ FILE_FLAG_BACKUP_SEMANTICS = 0x02000000
+ OPEN_EXISTING = 3
+ if os.path.isdir(path):
+ flags = FILE_FLAG_BACKUP_SEMANTICS
+ else:
+ flags = 0
+ hfile = CreateFile(path, GENERIC_READ, FILE_SHARE_READ,
+ None, OPEN_EXISTING, flags, None)
+ if hfile == 0xffffffff:
+ if sys.version_info >= (3, 3):
+ raise FileNotFoundError(path)
+ else:
+ exc = OSError("file not found: path")
+ exc.errno = ENOENT
+ raise exc
+ info = BY_HANDLE_FILE_INFORMATION()
+ success = GetFileInformationByHandle(hfile, info)
+ CloseHandle(hfile)
+ if success == 0:
+ raise WinError()
+ return info.volume, info.index_hi, info.index_lo
+
+
+def _is_wildcard_pattern(pat):
+ # Whether this pattern needs actual matching using fnmatch, or can
+ # be looked up directly as a file.
+ return "*" in pat or "?" in pat or "[" in pat
+
+
+class _Flavour(object):
+
+ """A flavour implements a particular (platform-specific) set of path
+ semantics."""
+
+ def __init__(self):
+ self.join = self.sep.join
+
+ def parse_parts(self, parts):
+ if six.PY2:
+ parts = _py2_fsencode(parts)
+ parsed = []
+ sep = self.sep
+ altsep = self.altsep
+ drv = root = ''
+ it = reversed(parts)
+ for part in it:
+ if not part:
+ continue
+ if altsep:
+ part = part.replace(altsep, sep)
+ drv, root, rel = self.splitroot(part)
+ if sep in rel:
+ for x in reversed(rel.split(sep)):
+ if x and x != '.':
+ parsed.append(intern(x))
+ else:
+ if rel and rel != '.':
+ parsed.append(intern(rel))
+ if drv or root:
+ if not drv:
+ # If no drive is present, try to find one in the previous
+ # parts. This makes the result of parsing e.g.
+ # ("C:", "/", "a") reasonably intuitive.
+ for part in it:
+ if not part:
+ continue
+ if altsep:
+ part = part.replace(altsep, sep)
+ drv = self.splitroot(part)[0]
+ if drv:
+ break
+ break
+ if drv or root:
+ parsed.append(drv + root)
+ parsed.reverse()
+ return drv, root, parsed
+
+ def join_parsed_parts(self, drv, root, parts, drv2, root2, parts2):
+ """
+ Join the two paths represented by the respective
+ (drive, root, parts) tuples. Return a new (drive, root, parts) tuple.
+ """
+ if root2:
+ if not drv2 and drv:
+ return drv, root2, [drv + root2] + parts2[1:]
+ elif drv2:
+ if drv2 == drv or self.casefold(drv2) == self.casefold(drv):
+ # Same drive => second path is relative to the first
+ return drv, root, parts + parts2[1:]
+ else:
+ # Second path is non-anchored (common case)
+ return drv, root, parts + parts2
+ return drv2, root2, parts2
+
+
+class _WindowsFlavour(_Flavour):
+ # Reference for Windows paths can be found at
+ # http://msdn.microsoft.com/en-us/library/aa365247%28v=vs.85%29.aspx
+
+ sep = '\\'
+ altsep = '/'
+ has_drv = True
+ pathmod = ntpath
+
+ is_supported = (os.name == 'nt')
+
+ drive_letters = set('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ')
+ ext_namespace_prefix = '\\\\?\\'
+
+ reserved_names = (
+ set(['CON', 'PRN', 'AUX', 'NUL']) |
+ set(['COM%d' % i for i in range(1, 10)]) |
+ set(['LPT%d' % i for i in range(1, 10)])
+ )
+
+ # Interesting findings about extended paths:
+ # - '\\?\c:\a', '//?/c:\a' and '//?/c:/a' are all supported
+ # but '\\?\c:/a' is not
+ # - extended paths are always absolute; "relative" extended paths will
+ # fail.
+
+ def splitroot(self, part, sep=sep):
+ first = part[0:1]
+ second = part[1:2]
+ if (second == sep and first == sep):
+ # XXX extended paths should also disable the collapsing of "."
+ # components (according to MSDN docs).
+ prefix, part = self._split_extended_path(part)
+ first = part[0:1]
+ second = part[1:2]
+ else:
+ prefix = ''
+ third = part[2:3]
+ if (second == sep and first == sep and third != sep):
+ # is a UNC path:
+ # vvvvvvvvvvvvvvvvvvvvv root
+ # \\machine\mountpoint\directory\etc\...
+ # directory ^^^^^^^^^^^^^^
+ index = part.find(sep, 2)
+ if index != -1:
+ index2 = part.find(sep, index + 1)
+ # a UNC path can't have two slashes in a row
+ # (after the initial two)
+ if index2 != index + 1:
+ if index2 == -1:
+ index2 = len(part)
+ if prefix:
+ return prefix + part[1:index2], sep, part[index2 + 1:]
+ else:
+ return part[:index2], sep, part[index2 + 1:]
+ drv = root = ''
+ if second == ':' and first in self.drive_letters:
+ drv = part[:2]
+ part = part[2:]
+ first = third
+ if first == sep:
+ root = first
+ part = part.lstrip(sep)
+ return prefix + drv, root, part
+
+ def casefold(self, s):
+ return s.lower()
+
+ def casefold_parts(self, parts):
+ return [p.lower() for p in parts]
+
+ def resolve(self, path, strict=False):
+ s = str(path)
+ if not s:
+ return os.getcwd()
+ previous_s = None
+ if _getfinalpathname is not None:
+ if strict:
+ return self._ext_to_normal(_getfinalpathname(s))
+ else:
+ # End of the path after the first one not found
+ tail_parts = []
+ while True:
+ try:
+ s = self._ext_to_normal(_getfinalpathname(s))
+ except FileNotFoundError:
+ previous_s = s
+ s, tail = os.path.split(s)
+ tail_parts.append(tail)
+ if previous_s == s:
+ return path
+ else:
+ return os.path.join(s, *reversed(tail_parts))
+ # Means fallback on absolute
+ return None
+
+ def _split_extended_path(self, s, ext_prefix=ext_namespace_prefix):
+ prefix = ''
+ if s.startswith(ext_prefix):
+ prefix = s[:4]
+ s = s[4:]
+ if s.startswith('UNC\\'):
+ prefix += s[:3]
+ s = '\\' + s[3:]
+ return prefix, s
+
+ def _ext_to_normal(self, s):
+ # Turn back an extended path into a normal DOS-like path
+ return self._split_extended_path(s)[1]
+
+ def is_reserved(self, parts):
+ # NOTE: the rules for reserved names seem somewhat complicated
+ # (e.g. r"..\NUL" is reserved but not r"foo\NUL").
+ # We err on the side of caution and return True for paths which are
+ # not considered reserved by Windows.
+ if not parts:
+ return False
+ if parts[0].startswith('\\\\'):
+ # UNC paths are never reserved
+ return False
+ return parts[-1].partition('.')[0].upper() in self.reserved_names
+
+ def make_uri(self, path):
+ # Under Windows, file URIs use the UTF-8 encoding.
+ drive = path.drive
+ if len(drive) == 2 and drive[1] == ':':
+ # It's a path on a local drive => 'file:///c:/a/b'
+ rest = path.as_posix()[2:].lstrip('/')
+ return 'file:///%s/%s' % (
+ drive, urlquote_from_bytes(rest.encode('utf-8')))
+ else:
+ # It's a path on a network drive => 'file://host/share/a/b'
+ return 'file:' + urlquote_from_bytes(
+ path.as_posix().encode('utf-8'))
+
+ def gethomedir(self, username):
+ if 'HOME' in os.environ:
+ userhome = os.environ['HOME']
+ elif 'USERPROFILE' in os.environ:
+ userhome = os.environ['USERPROFILE']
+ elif 'HOMEPATH' in os.environ:
+ try:
+ drv = os.environ['HOMEDRIVE']
+ except KeyError:
+ drv = ''
+ userhome = drv + os.environ['HOMEPATH']
+ else:
+ raise RuntimeError("Can't determine home directory")
+
+ if username:
+ # Try to guess user home directory. By default all users
+ # directories are located in the same place and are named by
+ # corresponding usernames. If current user home directory points
+ # to nonstandard place, this guess is likely wrong.
+ if os.environ['USERNAME'] != username:
+ drv, root, parts = self.parse_parts((userhome,))
+ if parts[-1] != os.environ['USERNAME']:
+ raise RuntimeError("Can't determine home directory "
+ "for %r" % username)
+ parts[-1] = username
+ if drv or root:
+ userhome = drv + root + self.join(parts[1:])
+ else:
+ userhome = self.join(parts)
+ return userhome
+
+
+class _PosixFlavour(_Flavour):
+ sep = '/'
+ altsep = ''
+ has_drv = False
+ pathmod = posixpath
+
+ is_supported = (os.name != 'nt')
+
+ def splitroot(self, part, sep=sep):
+ if part and part[0] == sep:
+ stripped_part = part.lstrip(sep)
+ # According to POSIX path resolution:
+ # http://pubs.opengroup.org/onlinepubs/009695399/basedefs/
+ # xbd_chap04.html#tag_04_11
+ # "A pathname that begins with two successive slashes may be
+ # interpreted in an implementation-defined manner, although more
+ # than two leading slashes shall be treated as a single slash".
+ if len(part) - len(stripped_part) == 2:
+ return '', sep * 2, stripped_part
+ else:
+ return '', sep, stripped_part
+ else:
+ return '', '', part
+
+ def casefold(self, s):
+ return s
+
+ def casefold_parts(self, parts):
+ return parts
+
+ def resolve(self, path, strict=False):
+ sep = self.sep
+ accessor = path._accessor
+ seen = {}
+
+ def _resolve(path, rest):
+ if rest.startswith(sep):
+ path = ''
+
+ for name in rest.split(sep):
+ if not name or name == '.':
+ # current dir
+ continue
+ if name == '..':
+ # parent dir
+ path, _, _ = path.rpartition(sep)
+ continue
+ newpath = path + sep + name
+ if newpath in seen:
+ # Already seen this path
+ path = seen[newpath]
+ if path is not None:
+ # use cached value
+ continue
+ # The symlink is not resolved, so we must have a symlink
+ # loop.
+ raise RuntimeError("Symlink loop from %r" % newpath)
+ # Resolve the symbolic link
+ try:
+ target = accessor.readlink(newpath)
+ except OSError as e:
+ if e.errno != EINVAL and strict:
+ raise
+ # Not a symlink, or non-strict mode. We just leave the path
+ # untouched.
+ path = newpath
+ else:
+ seen[newpath] = None # not resolved symlink
+ path = _resolve(path, target)
+ seen[newpath] = path # resolved symlink
+
+ return path
+ # NOTE: according to POSIX, getcwd() cannot contain path components
+ # which are symlinks.
+ base = '' if path.is_absolute() else os.getcwd()
+ return _resolve(base, str(path)) or sep
+
+ def is_reserved(self, parts):
+ return False
+
+ def make_uri(self, path):
+ # We represent the path using the local filesystem encoding,
+ # for portability to other applications.
+ bpath = bytes(path)
+ return 'file://' + urlquote_from_bytes(bpath)
+
+ def gethomedir(self, username):
+ if not username:
+ try:
+ return os.environ['HOME']
+ except KeyError:
+ import pwd
+ return pwd.getpwuid(os.getuid()).pw_dir
+ else:
+ import pwd
+ try:
+ return pwd.getpwnam(username).pw_dir
+ except KeyError:
+ raise RuntimeError("Can't determine home directory "
+ "for %r" % username)
+
+
+_windows_flavour = _WindowsFlavour()
+_posix_flavour = _PosixFlavour()
+
+
+class _Accessor:
+
+ """An accessor implements a particular (system-specific or not) way of
+ accessing paths on the filesystem."""
+
+
+class _NormalAccessor(_Accessor):
+
+ def _wrap_strfunc(strfunc):
+ @functools.wraps(strfunc)
+ def wrapped(pathobj, *args):
+ return strfunc(str(pathobj), *args)
+ return staticmethod(wrapped)
+
+ def _wrap_binary_strfunc(strfunc):
+ @functools.wraps(strfunc)
+ def wrapped(pathobjA, pathobjB, *args):
+ return strfunc(str(pathobjA), str(pathobjB), *args)
+ return staticmethod(wrapped)
+
+ stat = _wrap_strfunc(os.stat)
+
+ lstat = _wrap_strfunc(os.lstat)
+
+ open = _wrap_strfunc(os.open)
+
+ listdir = _wrap_strfunc(os.listdir)
+
+ scandir = _wrap_strfunc(os_scandir)
+
+ chmod = _wrap_strfunc(os.chmod)
+
+ if hasattr(os, "lchmod"):
+ lchmod = _wrap_strfunc(os.lchmod)
+ else:
+ def lchmod(self, pathobj, mode):
+ raise NotImplementedError("lchmod() not available on this system")
+
+ mkdir = _wrap_strfunc(os.mkdir)
+
+ unlink = _wrap_strfunc(os.unlink)
+
+ rmdir = _wrap_strfunc(os.rmdir)
+
+ rename = _wrap_binary_strfunc(os.rename)
+
+ if sys.version_info >= (3, 3):
+ replace = _wrap_binary_strfunc(os.replace)
+
+ if nt:
+ if supports_symlinks:
+ symlink = _wrap_binary_strfunc(os.symlink)
+ else:
+ def symlink(a, b, target_is_directory):
+ raise NotImplementedError(
+ "symlink() not available on this system")
+ else:
+ # Under POSIX, os.symlink() takes two args
+ @staticmethod
+ def symlink(a, b, target_is_directory):
+ return os.symlink(str(a), str(b))
+
+ utime = _wrap_strfunc(os.utime)
+
+ # Helper for resolve()
+ def readlink(self, path):
+ return os.readlink(path)
+
+
+_normal_accessor = _NormalAccessor()
+
+
+#
+# Globbing helpers
+#
+
+def _make_selector(pattern_parts):
+ pat = pattern_parts[0]
+ child_parts = pattern_parts[1:]
+ if pat == '**':
+ cls = _RecursiveWildcardSelector
+ elif '**' in pat:
+ raise ValueError(
+ "Invalid pattern: '**' can only be an entire path component")
+ elif _is_wildcard_pattern(pat):
+ cls = _WildcardSelector
+ else:
+ cls = _PreciseSelector
+ return cls(pat, child_parts)
+
+
+if hasattr(functools, "lru_cache"):
+ _make_selector = functools.lru_cache()(_make_selector)
+
+
+class _Selector:
+
+ """A selector matches a specific glob pattern part against the children
+ of a given path."""
+
+ def __init__(self, child_parts):
+ self.child_parts = child_parts
+ if child_parts:
+ self.successor = _make_selector(child_parts)
+ self.dironly = True
+ else:
+ self.successor = _TerminatingSelector()
+ self.dironly = False
+
+ def select_from(self, parent_path):
+ """Iterate over all child paths of `parent_path` matched by this
+ selector. This can contain parent_path itself."""
+ path_cls = type(parent_path)
+ is_dir = path_cls.is_dir
+ exists = path_cls.exists
+ scandir = parent_path._accessor.scandir
+ if not is_dir(parent_path):
+ return iter([])
+ return self._select_from(parent_path, is_dir, exists, scandir)
+
+
+class _TerminatingSelector:
+
+ def _select_from(self, parent_path, is_dir, exists, scandir):
+ yield parent_path
+
+
+class _PreciseSelector(_Selector):
+
+ def __init__(self, name, child_parts):
+ self.name = name
+ _Selector.__init__(self, child_parts)
+
+ def _select_from(self, parent_path, is_dir, exists, scandir):
+ def try_iter():
+ path = parent_path._make_child_relpath(self.name)
+ if (is_dir if self.dironly else exists)(path):
+ for p in self.successor._select_from(
+ path, is_dir, exists, scandir):
+ yield p
+
+ def except_iter(exc):
+ return
+ yield
+
+ for x in _try_except_permissionerror_iter(try_iter, except_iter):
+ yield x
+
+
+class _WildcardSelector(_Selector):
+
+ def __init__(self, pat, child_parts):
+ self.pat = re.compile(fnmatch.translate(pat))
+ _Selector.__init__(self, child_parts)
+
+ def _select_from(self, parent_path, is_dir, exists, scandir):
+ def try_iter():
+ cf = parent_path._flavour.casefold
+ entries = list(scandir(parent_path))
+ for entry in entries:
+ if not self.dironly or entry.is_dir():
+ name = entry.name
+ casefolded = cf(name)
+ if self.pat.match(casefolded):
+ path = parent_path._make_child_relpath(name)
+ for p in self.successor._select_from(
+ path, is_dir, exists, scandir):
+ yield p
+
+ def except_iter(exc):
+ return
+ yield
+
+ for x in _try_except_permissionerror_iter(try_iter, except_iter):
+ yield x
+
+
+class _RecursiveWildcardSelector(_Selector):
+
+ def __init__(self, pat, child_parts):
+ _Selector.__init__(self, child_parts)
+
+ def _iterate_directories(self, parent_path, is_dir, scandir):
+ yield parent_path
+
+ def try_iter():
+ entries = list(scandir(parent_path))
+ for entry in entries:
+ if entry.is_dir() and not entry.is_symlink():
+ path = parent_path._make_child_relpath(entry.name)
+ for p in self._iterate_directories(path, is_dir, scandir):
+ yield p
+
+ def except_iter(exc):
+ return
+ yield
+
+ for x in _try_except_permissionerror_iter(try_iter, except_iter):
+ yield x
+
+ def _select_from(self, parent_path, is_dir, exists, scandir):
+ def try_iter():
+ yielded = set()
+ try:
+ successor_select = self.successor._select_from
+ for starting_point in self._iterate_directories(
+ parent_path, is_dir, scandir):
+ for p in successor_select(
+ starting_point, is_dir, exists, scandir):
+ if p not in yielded:
+ yield p
+ yielded.add(p)
+ finally:
+ yielded.clear()
+
+ def except_iter(exc):
+ return
+ yield
+
+ for x in _try_except_permissionerror_iter(try_iter, except_iter):
+ yield x
+
+
+#
+# Public API
+#
+
+class _PathParents(Sequence):
+
+ """This object provides sequence-like access to the logical ancestors
+ of a path. Don't try to construct it yourself."""
+ __slots__ = ('_pathcls', '_drv', '_root', '_parts')
+
+ def __init__(self, path):
+ # We don't store the instance to avoid reference cycles
+ self._pathcls = type(path)
+ self._drv = path._drv
+ self._root = path._root
+ self._parts = path._parts
+
+ def __len__(self):
+ if self._drv or self._root:
+ return len(self._parts) - 1
+ else:
+ return len(self._parts)
+
+ def __getitem__(self, idx):
+ if idx < 0 or idx >= len(self):
+ raise IndexError(idx)
+ return self._pathcls._from_parsed_parts(self._drv, self._root,
+ self._parts[:-idx - 1])
+
+ def __repr__(self):
+ return "<{0}.parents>".format(self._pathcls.__name__)
+
+
+class PurePath(object):
+
+ """PurePath represents a filesystem path and offers operations which
+ don't imply any actual filesystem I/O. Depending on your system,
+ instantiating a PurePath will return either a PurePosixPath or a
+ PureWindowsPath object. You can also instantiate either of these classes
+ directly, regardless of your system.
+ """
+ __slots__ = (
+ '_drv', '_root', '_parts',
+ '_str', '_hash', '_pparts', '_cached_cparts',
+ )
+
+ def __new__(cls, *args):
+ """Construct a PurePath from one or several strings and or existing
+ PurePath objects. The strings and path objects are combined so as
+ to yield a canonicalized path, which is incorporated into the
+ new PurePath object.
+ """
+ if cls is PurePath:
+ cls = PureWindowsPath if os.name == 'nt' else PurePosixPath
+ return cls._from_parts(args)
+
+ def __reduce__(self):
+ # Using the parts tuple helps share interned path parts
+ # when pickling related paths.
+ return (self.__class__, tuple(self._parts))
+
+ @classmethod
+ def _parse_args(cls, args):
+ # This is useful when you don't want to create an instance, just
+ # canonicalize some constructor arguments.
+ parts = []
+ for a in args:
+ if isinstance(a, PurePath):
+ parts += a._parts
+ else:
+ if sys.version_info >= (3, 6):
+ a = os.fspath(a)
+ else:
+ # duck typing for older Python versions
+ if hasattr(a, "__fspath__"):
+ a = a.__fspath__()
+ if isinstance(a, str):
+ # Force-cast str subclasses to str (issue #21127)
+ parts.append(str(a))
+ # also handle unicode for PY2 (six.text_type = unicode)
+ elif six.PY2 and isinstance(a, six.text_type):
+ # cast to str using filesystem encoding
+ parts.append(a.encode(sys.getfilesystemencoding()))
+ else:
+ raise TypeError(
+ "argument should be a str object or an os.PathLike "
+ "object returning str, not %r"
+ % type(a))
+ return cls._flavour.parse_parts(parts)
+
+ @classmethod
+ def _from_parts(cls, args, init=True):
+ # We need to call _parse_args on the instance, so as to get the
+ # right flavour.
+ self = object.__new__(cls)
+ drv, root, parts = self._parse_args(args)
+ self._drv = drv
+ self._root = root
+ self._parts = parts
+ if init:
+ self._init()
+ return self
+
+ @classmethod
+ def _from_parsed_parts(cls, drv, root, parts, init=True):
+ self = object.__new__(cls)
+ self._drv = drv
+ self._root = root
+ self._parts = parts
+ if init:
+ self._init()
+ return self
+
+ @classmethod
+ def _format_parsed_parts(cls, drv, root, parts):
+ if drv or root:
+ return drv + root + cls._flavour.join(parts[1:])
+ else:
+ return cls._flavour.join(parts)
+
+ def _init(self):
+ # Overridden in concrete Path
+ pass
+
+ def _make_child(self, args):
+ drv, root, parts = self._parse_args(args)
+ drv, root, parts = self._flavour.join_parsed_parts(
+ self._drv, self._root, self._parts, drv, root, parts)
+ return self._from_parsed_parts(drv, root, parts)
+
+ def __str__(self):
+ """Return the string representation of the path, suitable for
+ passing to system calls."""
+ try:
+ return self._str
+ except AttributeError:
+ self._str = self._format_parsed_parts(self._drv, self._root,
+ self._parts) or '.'
+ return self._str
+
+ def __fspath__(self):
+ return str(self)
+
+ def as_posix(self):
+ """Return the string representation of the path with forward (/)
+ slashes."""
+ f = self._flavour
+ return str(self).replace(f.sep, '/')
+
+ def __bytes__(self):
+ """Return the bytes representation of the path. This is only
+ recommended to use under Unix."""
+ if sys.version_info < (3, 2):
+ raise NotImplementedError("needs Python 3.2 or later")
+ return os.fsencode(str(self))
+
+ def __repr__(self):
+ return "{0}({1!r})".format(self.__class__.__name__, self.as_posix())
+
+ def as_uri(self):
+ """Return the path as a 'file' URI."""
+ if not self.is_absolute():
+ raise ValueError("relative path can't be expressed as a file URI")
+ return self._flavour.make_uri(self)
+
+ @property
+ def _cparts(self):
+ # Cached casefolded parts, for hashing and comparison
+ try:
+ return self._cached_cparts
+ except AttributeError:
+ self._cached_cparts = self._flavour.casefold_parts(self._parts)
+ return self._cached_cparts
+
+ def __eq__(self, other):
+ if not isinstance(other, PurePath):
+ return NotImplemented
+ return (
+ self._cparts == other._cparts
+ and self._flavour is other._flavour)
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ try:
+ return self._hash
+ except AttributeError:
+ self._hash = hash(tuple(self._cparts))
+ return self._hash
+
+ def __lt__(self, other):
+ if (not isinstance(other, PurePath)
+ or self._flavour is not other._flavour):
+ return NotImplemented
+ return self._cparts < other._cparts
+
+ def __le__(self, other):
+ if (not isinstance(other, PurePath)
+ or self._flavour is not other._flavour):
+ return NotImplemented
+ return self._cparts <= other._cparts
+
+ def __gt__(self, other):
+ if (not isinstance(other, PurePath)
+ or self._flavour is not other._flavour):
+ return NotImplemented
+ return self._cparts > other._cparts
+
+ def __ge__(self, other):
+ if (not isinstance(other, PurePath)
+ or self._flavour is not other._flavour):
+ return NotImplemented
+ return self._cparts >= other._cparts
+
+ drive = property(attrgetter('_drv'),
+ doc="""The drive prefix (letter or UNC path), if any.""")
+
+ root = property(attrgetter('_root'),
+ doc="""The root of the path, if any.""")
+
+ @property
+ def anchor(self):
+ """The concatenation of the drive and root, or ''."""
+ anchor = self._drv + self._root
+ return anchor
+
+ @property
+ def name(self):
+ """The final path component, if any."""
+ parts = self._parts
+ if len(parts) == (1 if (self._drv or self._root) else 0):
+ return ''
+ return parts[-1]
+
+ @property
+ def suffix(self):
+ """The final component's last suffix, if any."""
+ name = self.name
+ i = name.rfind('.')
+ if 0 < i < len(name) - 1:
+ return name[i:]
+ else:
+ return ''
+
+ @property
+ def suffixes(self):
+ """A list of the final component's suffixes, if any."""
+ name = self.name
+ if name.endswith('.'):
+ return []
+ name = name.lstrip('.')
+ return ['.' + suffix for suffix in name.split('.')[1:]]
+
+ @property
+ def stem(self):
+ """The final path component, minus its last suffix."""
+ name = self.name
+ i = name.rfind('.')
+ if 0 < i < len(name) - 1:
+ return name[:i]
+ else:
+ return name
+
+ def with_name(self, name):
+ """Return a new path with the file name changed."""
+ if not self.name:
+ raise ValueError("%r has an empty name" % (self,))
+ drv, root, parts = self._flavour.parse_parts((name,))
+ if (not name or name[-1] in [self._flavour.sep, self._flavour.altsep]
+ or drv or root or len(parts) != 1):
+ raise ValueError("Invalid name %r" % (name))
+ return self._from_parsed_parts(self._drv, self._root,
+ self._parts[:-1] + [name])
+
+ def with_suffix(self, suffix):
+ """Return a new path with the file suffix changed (or added, if
+ none).
+ """
+ # XXX if suffix is None, should the current suffix be removed?
+ f = self._flavour
+ if f.sep in suffix or f.altsep and f.altsep in suffix:
+ raise ValueError("Invalid suffix %r" % (suffix))
+ if suffix and not suffix.startswith('.') or suffix == '.':
+ raise ValueError("Invalid suffix %r" % (suffix))
+ name = self.name
+ if not name:
+ raise ValueError("%r has an empty name" % (self,))
+ old_suffix = self.suffix
+ if not old_suffix:
+ name = name + suffix
+ else:
+ name = name[:-len(old_suffix)] + suffix
+ return self._from_parsed_parts(self._drv, self._root,
+ self._parts[:-1] + [name])
+
+ def relative_to(self, *other):
+ """Return the relative path to another path identified by the passed
+ arguments. If the operation is not possible (because this is not
+ a subpath of the other path), raise ValueError.
+ """
+ # For the purpose of this method, drive and root are considered
+ # separate parts, i.e.:
+ # Path('c:/').relative_to('c:') gives Path('/')
+ # Path('c:/').relative_to('/') raise ValueError
+ if not other:
+ raise TypeError("need at least one argument")
+ parts = self._parts
+ drv = self._drv
+ root = self._root
+ if root:
+ abs_parts = [drv, root] + parts[1:]
+ else:
+ abs_parts = parts
+ to_drv, to_root, to_parts = self._parse_args(other)
+ if to_root:
+ to_abs_parts = [to_drv, to_root] + to_parts[1:]
+ else:
+ to_abs_parts = to_parts
+ n = len(to_abs_parts)
+ cf = self._flavour.casefold_parts
+ if (root or drv) if n == 0 else cf(abs_parts[:n]) != cf(to_abs_parts):
+ formatted = self._format_parsed_parts(to_drv, to_root, to_parts)
+ raise ValueError("{0!r} does not start with {1!r}"
+ .format(str(self), str(formatted)))
+ return self._from_parsed_parts('', root if n == 1 else '',
+ abs_parts[n:])
+
+ @property
+ def parts(self):
+ """An object providing sequence-like access to the
+ components in the filesystem path."""
+ # We cache the tuple to avoid building a new one each time .parts
+ # is accessed. XXX is this necessary?
+ try:
+ return self._pparts
+ except AttributeError:
+ self._pparts = tuple(self._parts)
+ return self._pparts
+
+ def joinpath(self, *args):
+ """Combine this path with one or several arguments, and return a
+ new path representing either a subpath (if all arguments are relative
+ paths) or a totally different path (if one of the arguments is
+ anchored).
+ """
+ return self._make_child(args)
+
+ def __truediv__(self, key):
+ return self._make_child((key,))
+
+ def __rtruediv__(self, key):
+ return self._from_parts([key] + self._parts)
+
+ if six.PY2:
+ __div__ = __truediv__
+ __rdiv__ = __rtruediv__
+
+ @property
+ def parent(self):
+ """The logical parent of the path."""
+ drv = self._drv
+ root = self._root
+ parts = self._parts
+ if len(parts) == 1 and (drv or root):
+ return self
+ return self._from_parsed_parts(drv, root, parts[:-1])
+
+ @property
+ def parents(self):
+ """A sequence of this path's logical parents."""
+ return _PathParents(self)
+
+ def is_absolute(self):
+ """True if the path is absolute (has both a root and, if applicable,
+ a drive)."""
+ if not self._root:
+ return False
+ return not self._flavour.has_drv or bool(self._drv)
+
+ def is_reserved(self):
+ """Return True if the path contains one of the special names reserved
+ by the system, if any."""
+ return self._flavour.is_reserved(self._parts)
+
+ def match(self, path_pattern):
+ """
+ Return True if this path matches the given pattern.
+ """
+ cf = self._flavour.casefold
+ path_pattern = cf(path_pattern)
+ drv, root, pat_parts = self._flavour.parse_parts((path_pattern,))
+ if not pat_parts:
+ raise ValueError("empty pattern")
+ if drv and drv != cf(self._drv):
+ return False
+ if root and root != cf(self._root):
+ return False
+ parts = self._cparts
+ if drv or root:
+ if len(pat_parts) != len(parts):
+ return False
+ pat_parts = pat_parts[1:]
+ elif len(pat_parts) > len(parts):
+ return False
+ for part, pat in zip(reversed(parts), reversed(pat_parts)):
+ if not fnmatch.fnmatchcase(part, pat):
+ return False
+ return True
+
+
+# Can't subclass os.PathLike from PurePath and keep the constructor
+# optimizations in PurePath._parse_args().
+if sys.version_info >= (3, 6):
+ os.PathLike.register(PurePath)
+
+
+class PurePosixPath(PurePath):
+ _flavour = _posix_flavour
+ __slots__ = ()
+
+
+class PureWindowsPath(PurePath):
+ _flavour = _windows_flavour
+ __slots__ = ()
+
+
+# Filesystem-accessing classes
+
+
+class Path(PurePath):
+ __slots__ = (
+ '_accessor',
+ '_closed',
+ )
+
+ def __new__(cls, *args, **kwargs):
+ if cls is Path:
+ cls = WindowsPath if os.name == 'nt' else PosixPath
+ self = cls._from_parts(args, init=False)
+ if not self._flavour.is_supported:
+ raise NotImplementedError("cannot instantiate %r on your system"
+ % (cls.__name__,))
+ self._init()
+ return self
+
+ def _init(self,
+ # Private non-constructor arguments
+ template=None,
+ ):
+ self._closed = False
+ if template is not None:
+ self._accessor = template._accessor
+ else:
+ self._accessor = _normal_accessor
+
+ def _make_child_relpath(self, part):
+ # This is an optimization used for dir walking. `part` must be
+ # a single part relative to this path.
+ parts = self._parts + [part]
+ return self._from_parsed_parts(self._drv, self._root, parts)
+
+ def __enter__(self):
+ if self._closed:
+ self._raise_closed()
+ return self
+
+ def __exit__(self, t, v, tb):
+ self._closed = True
+
+ def _raise_closed(self):
+ raise ValueError("I/O operation on closed path")
+
+ def _opener(self, name, flags, mode=0o666):
+ # A stub for the opener argument to built-in open()
+ return self._accessor.open(self, flags, mode)
+
+ def _raw_open(self, flags, mode=0o777):
+ """
+ Open the file pointed by this path and return a file descriptor,
+ as os.open() does.
+ """
+ if self._closed:
+ self._raise_closed()
+ return self._accessor.open(self, flags, mode)
+
+ # Public API
+
+ @classmethod
+ def cwd(cls):
+ """Return a new path pointing to the current working directory
+ (as returned by os.getcwd()).
+ """
+ return cls(os.getcwd())
+
+ @classmethod
+ def home(cls):
+ """Return a new path pointing to the user's home directory (as
+ returned by os.path.expanduser('~')).
+ """
+ return cls(cls()._flavour.gethomedir(None))
+
+ def samefile(self, other_path):
+ """Return whether other_path is the same or not as this file
+ (as returned by os.path.samefile()).
+ """
+ if hasattr(os.path, "samestat"):
+ st = self.stat()
+ try:
+ other_st = other_path.stat()
+ except AttributeError:
+ other_st = os.stat(other_path)
+ return os.path.samestat(st, other_st)
+ else:
+ filename1 = six.text_type(self)
+ filename2 = six.text_type(other_path)
+ st1 = _win32_get_unique_path_id(filename1)
+ st2 = _win32_get_unique_path_id(filename2)
+ return st1 == st2
+
+ def iterdir(self):
+ """Iterate over the files in this directory. Does not yield any
+ result for the special paths '.' and '..'.
+ """
+ if self._closed:
+ self._raise_closed()
+ for name in self._accessor.listdir(self):
+ if name in ('.', '..'):
+ # Yielding a path object for these makes little sense
+ continue
+ yield self._make_child_relpath(name)
+ if self._closed:
+ self._raise_closed()
+
+ def glob(self, pattern):
+ """Iterate over this subtree and yield all existing files (of any
+ kind, including directories) matching the given pattern.
+ """
+ if not pattern:
+ raise ValueError("Unacceptable pattern: {0!r}".format(pattern))
+ pattern = self._flavour.casefold(pattern)
+ drv, root, pattern_parts = self._flavour.parse_parts((pattern,))
+ if drv or root:
+ raise NotImplementedError("Non-relative patterns are unsupported")
+ selector = _make_selector(tuple(pattern_parts))
+ for p in selector.select_from(self):
+ yield p
+
+ def rglob(self, pattern):
+ """Recursively yield all existing files (of any kind, including
+ directories) matching the given pattern, anywhere in this subtree.
+ """
+ pattern = self._flavour.casefold(pattern)
+ drv, root, pattern_parts = self._flavour.parse_parts((pattern,))
+ if drv or root:
+ raise NotImplementedError("Non-relative patterns are unsupported")
+ selector = _make_selector(("**",) + tuple(pattern_parts))
+ for p in selector.select_from(self):
+ yield p
+
+ def absolute(self):
+ """Return an absolute version of this path. This function works
+ even if the path doesn't point to anything.
+
+ No normalization is done, i.e. all '.' and '..' will be kept along.
+ Use resolve() to get the canonical path to a file.
+ """
+ # XXX untested yet!
+ if self._closed:
+ self._raise_closed()
+ if self.is_absolute():
+ return self
+ # FIXME this must defer to the specific flavour (and, under Windows,
+ # use nt._getfullpathname())
+ obj = self._from_parts([os.getcwd()] + self._parts, init=False)
+ obj._init(template=self)
+ return obj
+
+ def resolve(self, strict=False):
+ """
+ Make the path absolute, resolving all symlinks on the way and also
+ normalizing it (for example turning slashes into backslashes under
+ Windows).
+ """
+ if self._closed:
+ self._raise_closed()
+ s = self._flavour.resolve(self, strict=strict)
+ if s is None:
+ # No symlink resolution => for consistency, raise an error if
+ # the path doesn't exist or is forbidden
+ self.stat()
+ s = str(self.absolute())
+ # Now we have no symlinks in the path, it's safe to normalize it.
+ normed = self._flavour.pathmod.normpath(s)
+ obj = self._from_parts((normed,), init=False)
+ obj._init(template=self)
+ return obj
+
+ def stat(self):
+ """
+ Return the result of the stat() system call on this path, like
+ os.stat() does.
+ """
+ return self._accessor.stat(self)
+
+ def owner(self):
+ """
+ Return the login name of the file owner.
+ """
+ import pwd
+ return pwd.getpwuid(self.stat().st_uid).pw_name
+
+ def group(self):
+ """
+ Return the group name of the file gid.
+ """
+ import grp
+ return grp.getgrgid(self.stat().st_gid).gr_name
+
+ def open(self, mode='r', buffering=-1, encoding=None,
+ errors=None, newline=None):
+ """
+ Open the file pointed by this path and return a file object, as
+ the built-in open() function does.
+ """
+ if self._closed:
+ self._raise_closed()
+ if sys.version_info >= (3, 3):
+ return io.open(
+ str(self), mode, buffering, encoding, errors, newline,
+ opener=self._opener)
+ else:
+ return io.open(str(self), mode, buffering,
+ encoding, errors, newline)
+
+ def read_bytes(self):
+ """
+ Open the file in bytes mode, read it, and close the file.
+ """
+ with self.open(mode='rb') as f:
+ return f.read()
+
+ def read_text(self, encoding=None, errors=None):
+ """
+ Open the file in text mode, read it, and close the file.
+ """
+ with self.open(mode='r', encoding=encoding, errors=errors) as f:
+ return f.read()
+
+ def write_bytes(self, data):
+ """
+ Open the file in bytes mode, write to it, and close the file.
+ """
+ if not isinstance(data, six.binary_type):
+ raise TypeError(
+ 'data must be %s, not %s' %
+ (six.binary_type.__name__, data.__class__.__name__))
+ with self.open(mode='wb') as f:
+ return f.write(data)
+
+ def write_text(self, data, encoding=None, errors=None):
+ """
+ Open the file in text mode, write to it, and close the file.
+ """
+ if not isinstance(data, six.text_type):
+ raise TypeError(
+ 'data must be %s, not %s' %
+ (six.text_type.__name__, data.__class__.__name__))
+ with self.open(mode='w', encoding=encoding, errors=errors) as f:
+ return f.write(data)
+
+ def touch(self, mode=0o666, exist_ok=True):
+ """
+ Create this file with the given access mode, if it doesn't exist.
+ """
+ if self._closed:
+ self._raise_closed()
+ if exist_ok:
+ # First try to bump modification time
+ # Implementation note: GNU touch uses the UTIME_NOW option of
+ # the utimensat() / futimens() functions.
+ try:
+ self._accessor.utime(self, None)
+ except OSError:
+ # Avoid exception chaining
+ pass
+ else:
+ return
+ flags = os.O_CREAT | os.O_WRONLY
+ if not exist_ok:
+ flags |= os.O_EXCL
+ fd = self._raw_open(flags, mode)
+ os.close(fd)
+
+ def mkdir(self, mode=0o777, parents=False, exist_ok=False):
+ """
+ Create a new directory at this given path.
+ """
+ if self._closed:
+ self._raise_closed()
+
+ def _try_func():
+ self._accessor.mkdir(self, mode)
+
+ def _exc_func(exc):
+ if not parents or self.parent == self:
+ raise exc
+ self.parent.mkdir(parents=True, exist_ok=True)
+ self.mkdir(mode, parents=False, exist_ok=exist_ok)
+
+ try:
+ _try_except_filenotfounderror(_try_func, _exc_func)
+ except OSError:
+ if not exist_ok or not self.is_dir():
+ raise
+
+ def chmod(self, mode):
+ """
+ Change the permissions of the path, like os.chmod().
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.chmod(self, mode)
+
+ def lchmod(self, mode):
+ """
+ Like chmod(), except if the path points to a symlink, the symlink's
+ permissions are changed, rather than its target's.
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.lchmod(self, mode)
+
+ def unlink(self):
+ """
+ Remove this file or link.
+ If the path is a directory, use rmdir() instead.
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.unlink(self)
+
+ def rmdir(self):
+ """
+ Remove this directory. The directory must be empty.
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.rmdir(self)
+
+ def lstat(self):
+ """
+ Like stat(), except if the path points to a symlink, the symlink's
+ status information is returned, rather than its target's.
+ """
+ if self._closed:
+ self._raise_closed()
+ return self._accessor.lstat(self)
+
+ def rename(self, target):
+ """
+ Rename this path to the given path.
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.rename(self, target)
+
+ def replace(self, target):
+ """
+ Rename this path to the given path, clobbering the existing
+ destination if it exists.
+ """
+ if sys.version_info < (3, 3):
+ raise NotImplementedError("replace() is only available "
+ "with Python 3.3 and later")
+ if self._closed:
+ self._raise_closed()
+ self._accessor.replace(self, target)
+
+ def symlink_to(self, target, target_is_directory=False):
+ """
+ Make this path a symlink pointing to the given path.
+ Note the order of arguments (self, target) is the reverse of
+ os.symlink's.
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.symlink(target, self, target_is_directory)
+
+ # Convenience functions for querying the stat results
+
+ def exists(self):
+ """
+ Whether this path exists.
+ """
+ try:
+ self.stat()
+ except OSError as e:
+ if e.errno not in (ENOENT, ENOTDIR):
+ raise
+ return False
+ return True
+
+ def is_dir(self):
+ """
+ Whether this path is a directory.
+ """
+ try:
+ return S_ISDIR(self.stat().st_mode)
+ except OSError as e:
+ if e.errno not in (ENOENT, ENOTDIR):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+
+ def is_file(self):
+ """
+ Whether this path is a regular file (also True for symlinks pointing
+ to regular files).
+ """
+ try:
+ return S_ISREG(self.stat().st_mode)
+ except OSError as e:
+ if e.errno not in (ENOENT, ENOTDIR):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+
+ def is_symlink(self):
+ """
+ Whether this path is a symbolic link.
+ """
+ try:
+ return S_ISLNK(self.lstat().st_mode)
+ except OSError as e:
+ if e.errno not in (ENOENT, ENOTDIR):
+ raise
+ # Path doesn't exist
+ return False
+
+ def is_block_device(self):
+ """
+ Whether this path is a block device.
+ """
+ try:
+ return S_ISBLK(self.stat().st_mode)
+ except OSError as e:
+ if e.errno not in (ENOENT, ENOTDIR):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+
+ def is_char_device(self):
+ """
+ Whether this path is a character device.
+ """
+ try:
+ return S_ISCHR(self.stat().st_mode)
+ except OSError as e:
+ if e.errno not in (ENOENT, ENOTDIR):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+
+ def is_fifo(self):
+ """
+ Whether this path is a FIFO.
+ """
+ try:
+ return S_ISFIFO(self.stat().st_mode)
+ except OSError as e:
+ if e.errno not in (ENOENT, ENOTDIR):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+
+ def is_socket(self):
+ """
+ Whether this path is a socket.
+ """
+ try:
+ return S_ISSOCK(self.stat().st_mode)
+ except OSError as e:
+ if e.errno not in (ENOENT, ENOTDIR):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+
+ def expanduser(self):
+ """ Return a new path with expanded ~ and ~user constructs
+ (as returned by os.path.expanduser)
+ """
+ if (not (self._drv or self._root)
+ and self._parts and self._parts[0][:1] == '~'):
+ homedir = self._flavour.gethomedir(self._parts[0][1:])
+ return self._from_parts([homedir] + self._parts[1:])
+
+ return self
+
+
+class PosixPath(Path, PurePosixPath):
+ __slots__ = ()
+
+
+class WindowsPath(Path, PureWindowsPath):
+ __slots__ = ()
+
+ def owner(self):
+ raise NotImplementedError("Path.owner() is unsupported on this system")
+
+ def group(self):
+ raise NotImplementedError("Path.group() is unsupported on this system")
diff --git a/third_party/python/pathlib2/requirements.txt b/third_party/python/pathlib2/requirements.txt
new file mode 100644
index 0000000000..9d43212790
--- /dev/null
+++ b/third_party/python/pathlib2/requirements.txt
@@ -0,0 +1,3 @@
+six
+scandir; python_version < '3.5'
+mock; python_version < '3.3'
diff --git a/third_party/python/pathlib2/setup.cfg b/third_party/python/pathlib2/setup.cfg
new file mode 100644
index 0000000000..81f27fcfa3
--- /dev/null
+++ b/third_party/python/pathlib2/setup.cfg
@@ -0,0 +1,13 @@
+[nosetests]
+with-coverage = 1
+cover-package = pathlib2
+cover-branches = 1
+cover-html = 1
+
+[wheel]
+universal = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/pathlib2/setup.py b/third_party/python/pathlib2/setup.py
new file mode 100644
index 0000000000..2004ab0be1
--- /dev/null
+++ b/third_party/python/pathlib2/setup.py
@@ -0,0 +1,49 @@
+# Copyright (c) 2014-2017 Matthias C. M. Troffaes
+# Copyright (c) 2012-2014 Antoine Pitrou and contributors
+# Distributed under the terms of the MIT License.
+
+import io
+from setuptools import setup, find_packages
+
+
+def readfile(filename):
+ with io.open(filename, encoding="utf-8") as stream:
+ return stream.read().split("\n")
+
+
+readme = readfile("README.rst")[5:] # skip title and badges
+version = readfile("VERSION")[0].strip()
+
+setup(
+ name='pathlib2',
+ version=version,
+ packages=find_packages(),
+ license='MIT',
+ description='Object-oriented filesystem paths',
+ long_description="\n".join(readme[2:]),
+ author='Matthias C. M. Troffaes',
+ author_email='matthias.troffaes@gmail.com',
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Topic :: Software Development :: Libraries',
+ 'Topic :: System :: Filesystems',
+ ],
+ download_url='https://pypi.python.org/pypi/pathlib2/',
+ url='https://pypi.python.org/pypi/pathlib2/',
+ install_requires=[
+ 'six',
+ 'scandir;python_version<"3.5"',
+ ],
+)
diff --git a/third_party/python/pathspec/CHANGES.rst b/third_party/python/pathspec/CHANGES.rst
new file mode 100644
index 0000000000..c92a56f537
--- /dev/null
+++ b/third_party/python/pathspec/CHANGES.rst
@@ -0,0 +1,202 @@
+
+Change History
+==============
+
+
+0.8.0 (2020-04-09)
+------------------
+
+- `Issue #30`_: Expose what patterns matched paths. Added `util.detailed_match_files()`.
+- `Issue #31`_: `match_tree()` doesn't return symlinks.
+- Add `PathSpec.match_tree_entries` and `util.iter_tree_entries()` to support directories and symlinks.
+- API change: `match_tree()` has been renamed to `match_tree_files()`. The old name `match_tree()` is still available as an alias.
+- API change: `match_tree_files()` now returns symlinks. This is a bug fix but it will change the returned results.
+
+.. _`Issue #30`: https://github.com/cpburnz/python-path-specification/issues/30
+.. _`Issue #31`: https://github.com/cpburnz/python-path-specification/issues/31
+
+
+0.7.0 (2019-12-27)
+------------------
+
+- `Issue #28`_: Add support for Python 3.8, and drop Python 3.4.
+- `Issue #29`_: Publish bdist wheel.
+
+.. _`Issue #28`: https://github.com/cpburnz/python-path-specification/pull/28
+.. _`Issue #29`: https://github.com/cpburnz/python-path-specification/pull/29
+
+
+0.6.0 (2019-10-03)
+------------------
+
+- `Issue #24`_: Drop support for Python 2.6, 3.2, and 3.3.
+- `Issue #25`_: Update README.rst.
+- `Issue #26`_: Method to escape gitwildmatch.
+
+.. _`Issue #24`: https://github.com/cpburnz/python-path-specification/pull/24
+.. _`Issue #25`: https://github.com/cpburnz/python-path-specification/pull/25
+.. _`Issue #26`: https://github.com/cpburnz/python-path-specification/pull/26
+
+
+0.5.9 (2018-09-15)
+------------------
+
+- Fixed file system error handling.
+
+
+0.5.8 (2018-09-15)
+------------------
+
+- Improved type checking.
+- Created scripts to test Python 2.6 because Tox removed support for it.
+- Improved byte string handling in Python 3.
+- `Issue #22`_: Handle dangling symlinks.
+
+.. _`Issue #22`: https://github.com/cpburnz/python-path-specification/issues/22
+
+
+0.5.7 (2018-08-14)
+------------------
+
+- `Issue #21`_: Fix collections deprecation warning.
+
+.. _`Issue #21`: https://github.com/cpburnz/python-path-specification/issues/21
+
+
+0.5.6 (2018-04-06)
+------------------
+
+- Improved unit tests.
+- Improved type checking.
+- `Issue #20`_: Support current directory prefix.
+
+.. _`Issue #20`: https://github.com/cpburnz/python-path-specification/issues/20
+
+
+0.5.5 (2017-09-09)
+------------------
+
+- Add documentation link to README.
+
+
+0.5.4 (2017-09-09)
+------------------
+
+- `Issue #17`_: Add link to Ruby implementation of *pathspec*.
+- Add sphinx documentation.
+
+.. _`Issue #17`: https://github.com/cpburnz/python-path-specification/pull/17
+
+
+0.5.3 (2017-07-01)
+------------------
+
+- `Issue #14`_: Fix byte strings for Python 3.
+- `Issue #15`_: Include "LICENSE" in source package.
+- `Issue #16`_: Support Python 2.6.
+
+.. _`Issue #14`: https://github.com/cpburnz/python-path-specification/issues/14
+.. _`Issue #15`: https://github.com/cpburnz/python-path-specification/pull/15
+.. _`Issue #16`: https://github.com/cpburnz/python-path-specification/issues/16
+
+
+0.5.2 (2017-04-04)
+------------------
+
+- Fixed change log.
+
+
+0.5.1 (2017-04-04)
+------------------
+
+- `Issue #13`_: Add equality methods to `PathSpec` and `RegexPattern`.
+
+.. _`Issue #13`: https://github.com/cpburnz/python-path-specification/pull/13
+
+
+0.5.0 (2016-08-22)
+------------------
+
+- `Issue #12`_: Add `PathSpec.match_file()`.
+- Renamed `gitignore.GitIgnorePattern` to `patterns.gitwildmatch.GitWildMatchPattern`.
+- Deprecated `gitignore.GitIgnorePattern`.
+
+.. _`Issue #12`: https://github.com/cpburnz/python-path-specification/issues/12
+
+
+0.4.0 (2016-07-15)
+------------------
+
+- `Issue #11`_: Support converting patterns into regular expressions without compiling them.
+- API change: Subclasses of `RegexPattern` should implement `pattern_to_regex()`.
+
+.. _`Issue #11`: https://github.com/cpburnz/python-path-specification/issues/11
+
+
+0.3.4 (2015-08-24)
+------------------
+
+- `Issue #7`_: Fixed non-recursive links.
+- `Issue #8`_: Fixed edge cases in gitignore patterns.
+- `Issue #9`_: Fixed minor usage documentation.
+- Fixed recursion detection.
+- Fixed trivial incompatibility with Python 3.2.
+
+.. _`Issue #7`: https://github.com/cpburnz/python-path-specification/pull/7
+.. _`Issue #8`: https://github.com/cpburnz/python-path-specification/pull/8
+.. _`Issue #9`: https://github.com/cpburnz/python-path-specification/pull/9
+
+
+0.3.3 (2014-11-21)
+------------------
+
+- Improved documentation.
+
+
+0.3.2 (2014-11-08)
+------------------
+
+- `Issue #5`_: Use tox for testing.
+- `Issue #6`_: Fixed matching Windows paths.
+- Improved documentation.
+- API change: `spec.match_tree()` and `spec.match_files()` now return iterators instead of sets.
+
+.. _`Issue #5`: https://github.com/cpburnz/python-path-specification/pull/5
+.. _`Issue #6`: https://github.com/cpburnz/python-path-specification/issues/6
+
+
+0.3.1 (2014-09-17)
+------------------
+
+- Updated README.
+
+
+0.3.0 (2014-09-17)
+------------------
+
+- `Issue #3`_: Fixed trailing slash in gitignore patterns.
+- `Issue #4`_: Fixed test for trailing slash in gitignore patterns.
+- Added registered patterns.
+
+.. _`Issue #3`: https://github.com/cpburnz/python-path-specification/pull/3
+.. _`Issue #4`: https://github.com/cpburnz/python-path-specification/pull/4
+
+
+0.2.2 (2013-12-17)
+------------------
+
+- Fixed setup.py.
+
+
+0.2.1 (2013-12-17)
+------------------
+
+- Added tests.
+- Fixed comment gitignore patterns.
+- Fixed relative path gitignore patterns.
+
+
+0.2.0 (2013-12-07)
+------------------
+
+- Initial release.
diff --git a/third_party/python/pathspec/LICENSE b/third_party/python/pathspec/LICENSE
new file mode 100644
index 0000000000..14e2f777f6
--- /dev/null
+++ b/third_party/python/pathspec/LICENSE
@@ -0,0 +1,373 @@
+Mozilla Public License Version 2.0
+==================================
+
+1. Definitions
+--------------
+
+1.1. "Contributor"
+ means each individual or legal entity that creates, contributes to
+ the creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+ means the combination of the Contributions of others (if any) used
+ by a Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+ means Source Code Form to which the initial Contributor has attached
+ the notice in Exhibit A, the Executable Form of such Source Code
+ Form, and Modifications of such Source Code Form, in each case
+ including portions thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ (a) that the initial Contributor has attached the notice described
+ in Exhibit B to the Covered Software; or
+
+ (b) that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the
+ terms of a Secondary License.
+
+1.6. "Executable Form"
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+ means a work that combines Covered Software with other material, in
+ a separate file or files, that is not Covered Software.
+
+1.8. "License"
+ means this document.
+
+1.9. "Licensable"
+ means having the right to grant, to the maximum extent possible,
+ whether at the time of the initial grant or subsequently, any and
+ all of the rights conveyed by this License.
+
+1.10. "Modifications"
+ means any of the following:
+
+ (a) any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered
+ Software; or
+
+ (b) any new file in Source Code Form that contains any Covered
+ Software.
+
+1.11. "Patent Claims" of a Contributor
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the
+ License, by the making, using, selling, offering for sale, having
+ made, import, or transfer of either its Contributions or its
+ Contributor Version.
+
+1.12. "Secondary License"
+ means either the GNU General Public License, Version 2.0, the GNU
+ Lesser General Public License, Version 2.1, the GNU Affero General
+ Public License, Version 3.0, or any later versions of those
+ licenses.
+
+1.13. "Source Code Form"
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that
+ controls, is controlled by, or is under common control with You. For
+ purposes of this definition, "control" means (a) the power, direct
+ or indirect, to cause the direction or management of such entity,
+ whether by contract or otherwise, or (b) ownership of more than
+ fifty percent (50%) of the outstanding shares or beneficial
+ ownership of such entity.
+
+2. License Grants and Conditions
+--------------------------------
+
+2.1. Grants
+
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license:
+
+(a) under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+(b) under Patent Claims of such Contributor to make, use, sell, offer
+ for sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+The licenses granted in Section 2.1 with respect to any Contribution
+become effective for each Contribution on the date the Contributor first
+distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+The licenses granted in this Section 2 are the only rights granted under
+this License. No additional rights or licenses will be implied from the
+distribution or licensing of Covered Software under this License.
+Notwithstanding Section 2.1(b) above, no patent license is granted by a
+Contributor:
+
+(a) for any code that a Contributor has removed from Covered Software;
+ or
+
+(b) for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+(c) under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+This License does not grant any rights in the trademarks, service marks,
+or logos of any Contributor (except as may be necessary to comply with
+the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+No Contributor makes additional grants as a result of Your choice to
+distribute the Covered Software under a subsequent version of this
+License (see Section 10.2) or under the terms of a Secondary License (if
+permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+Each Contributor represents that the Contributor believes its
+Contributions are its original creation(s) or it has sufficient rights
+to grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+This License is not intended to limit any rights You have under
+applicable copyright doctrines of fair use, fair dealing, or other
+equivalents.
+
+2.7. Conditions
+
+Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+in Section 2.1.
+
+3. Responsibilities
+-------------------
+
+3.1. Distribution of Source Form
+
+All distribution of Covered Software in Source Code Form, including any
+Modifications that You create or to which You contribute, must be under
+the terms of this License. You must inform recipients that the Source
+Code Form of the Covered Software is governed by the terms of this
+License, and how they can obtain a copy of this License. You may not
+attempt to alter or restrict the recipients' rights in the Source Code
+Form.
+
+3.2. Distribution of Executable Form
+
+If You distribute Covered Software in Executable Form then:
+
+(a) such Covered Software must also be made available in Source Code
+ Form, as described in Section 3.1, and You must inform recipients of
+ the Executable Form how they can obtain a copy of such Source Code
+ Form by reasonable means in a timely manner, at a charge no more
+ than the cost of distribution to the recipient; and
+
+(b) You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter
+ the recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+You may create and distribute a Larger Work under terms of Your choice,
+provided that You also comply with the requirements of this License for
+the Covered Software. If the Larger Work is a combination of Covered
+Software with a work governed by one or more Secondary Licenses, and the
+Covered Software is not Incompatible With Secondary Licenses, this
+License permits You to additionally distribute such Covered Software
+under the terms of such Secondary License(s), so that the recipient of
+the Larger Work may, at their option, further distribute the Covered
+Software under the terms of either this License or such Secondary
+License(s).
+
+3.4. Notices
+
+You may not remove or alter the substance of any license notices
+(including copyright notices, patent notices, disclaimers of warranty,
+or limitations of liability) contained within the Source Code Form of
+the Covered Software, except that You may alter any license notices to
+the extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+You may choose to offer, and to charge a fee for, warranty, support,
+indemnity or liability obligations to one or more recipients of Covered
+Software. However, You may do so only on Your own behalf, and not on
+behalf of any Contributor. You must make it absolutely clear that any
+such warranty, support, indemnity, or liability obligation is offered by
+You alone, and You hereby agree to indemnify every Contributor for any
+liability incurred by such Contributor as a result of warranty, support,
+indemnity or liability terms You offer. You may include additional
+disclaimers of warranty and limitations of liability specific to any
+jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+---------------------------------------------------
+
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Software due to
+statute, judicial order, or regulation then You must: (a) comply with
+the terms of this License to the maximum extent possible; and (b)
+describe the limitations and the code they affect. Such description must
+be placed in a text file included with all distributions of the Covered
+Software under this License. Except to the extent prohibited by statute
+or regulation, such description must be sufficiently detailed for a
+recipient of ordinary skill to be able to understand it.
+
+5. Termination
+--------------
+
+5.1. The rights granted under this License will terminate automatically
+if You fail to comply with any of its terms. However, if You become
+compliant, then the rights granted under this License from a particular
+Contributor are reinstated (a) provisionally, unless and until such
+Contributor explicitly and finally terminates Your grants, and (b) on an
+ongoing basis, if such Contributor fails to notify You of the
+non-compliance by some reasonable means prior to 60 days after You have
+come back into compliance. Moreover, Your grants from a particular
+Contributor are reinstated on an ongoing basis if such Contributor
+notifies You of the non-compliance by some reasonable means, this is the
+first time You have received notice of non-compliance with this License
+from such Contributor, and You become compliant prior to 30 days after
+Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+infringement claim (excluding declaratory judgment actions,
+counter-claims, and cross-claims) alleging that a Contributor Version
+directly or indirectly infringes any patent, then the rights granted to
+You by any and all Contributors for the Covered Software under Section
+2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+end user license agreements (excluding distributors and resellers) which
+have been validly granted by You or Your distributors under this License
+prior to termination shall survive termination.
+
+************************************************************************
+* *
+* 6. Disclaimer of Warranty *
+* ------------------------- *
+* *
+* Covered Software is provided under this License on an "as is" *
+* basis, without warranty of any kind, either expressed, implied, or *
+* statutory, including, without limitation, warranties that the *
+* Covered Software is free of defects, merchantable, fit for a *
+* particular purpose or non-infringing. The entire risk as to the *
+* quality and performance of the Covered Software is with You. *
+* Should any Covered Software prove defective in any respect, You *
+* (not any Contributor) assume the cost of any necessary servicing, *
+* repair, or correction. This disclaimer of warranty constitutes an *
+* essential part of this License. No use of any Covered Software is *
+* authorized under this License except under this disclaimer. *
+* *
+************************************************************************
+
+************************************************************************
+* *
+* 7. Limitation of Liability *
+* -------------------------- *
+* *
+* Under no circumstances and under no legal theory, whether tort *
+* (including negligence), contract, or otherwise, shall any *
+* Contributor, or anyone who distributes Covered Software as *
+* permitted above, be liable to You for any direct, indirect, *
+* special, incidental, or consequential damages of any character *
+* including, without limitation, damages for lost profits, loss of *
+* goodwill, work stoppage, computer failure or malfunction, or any *
+* and all other commercial damages or losses, even if such party *
+* shall have been informed of the possibility of such damages. This *
+* limitation of liability shall not apply to liability for death or *
+* personal injury resulting from such party's negligence to the *
+* extent applicable law prohibits such limitation. Some *
+* jurisdictions do not allow the exclusion or limitation of *
+* incidental or consequential damages, so this exclusion and *
+* limitation may not apply to You. *
+* *
+************************************************************************
+
+8. Litigation
+-------------
+
+Any litigation relating to this License may be brought only in the
+courts of a jurisdiction where the defendant maintains its principal
+place of business and such litigation shall be governed by laws of that
+jurisdiction, without reference to its conflict-of-law provisions.
+Nothing in this Section shall prevent a party's ability to bring
+cross-claims or counter-claims.
+
+9. Miscellaneous
+----------------
+
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision of this License is held to be
+unenforceable, such provision shall be reformed only to the extent
+necessary to make it enforceable. Any law or regulation which provides
+that the language of a contract shall be construed against the drafter
+shall not be used to construe this License against a Contributor.
+
+10. Versions of the License
+---------------------------
+
+10.1. New Versions
+
+Mozilla Foundation is the license steward. Except as provided in Section
+10.3, no one other than the license steward has the right to modify or
+publish new versions of this License. Each version will be given a
+distinguishing version number.
+
+10.2. Effect of New Versions
+
+You may distribute the Covered Software under the terms of the version
+of the License under which You originally received the Covered Software,
+or under the terms of any subsequent version published by the license
+steward.
+
+10.3. Modified Versions
+
+If you create software not governed by this License, and you want to
+create a new license for such software, you may create and use a
+modified version of this License if you rename the license and remove
+any references to the name of the license steward (except to note that
+such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+Licenses
+
+If You choose to distribute Source Code Form that is Incompatible With
+Secondary Licenses under the terms of this version of the License, the
+notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+-------------------------------------------
+
+ This Source Code Form is subject to the terms of the Mozilla Public
+ License, v. 2.0. If a copy of the MPL was not distributed with this
+ file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular
+file, then You may include the notice in a location (such as a LICENSE
+file in a relevant directory) where a recipient would be likely to look
+for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+---------------------------------------------------------
+
+ This Source Code Form is "Incompatible With Secondary Licenses", as
+ defined by the Mozilla Public License, v. 2.0.
diff --git a/third_party/python/pathspec/MANIFEST.in b/third_party/python/pathspec/MANIFEST.in
new file mode 100644
index 0000000000..9173153f57
--- /dev/null
+++ b/third_party/python/pathspec/MANIFEST.in
@@ -0,0 +1,2 @@
+include *.rst
+include LICENSE
diff --git a/third_party/python/pathspec/PKG-INFO b/third_party/python/pathspec/PKG-INFO
new file mode 100644
index 0000000000..6070abfe03
--- /dev/null
+++ b/third_party/python/pathspec/PKG-INFO
@@ -0,0 +1,380 @@
+Metadata-Version: 1.2
+Name: pathspec
+Version: 0.8.0
+Summary: Utility library for gitignore style pattern matching of file paths.
+Home-page: https://github.com/cpburnz/python-path-specification
+Author: Caleb P. Burns
+Author-email: cpburnz@gmail.com
+License: MPL 2.0
+Description: *pathspec*: Path Specification
+ ==============================
+
+ *pathspec* is a utility library for pattern matching of file paths. So
+ far this only includes Git's wildmatch pattern matching which itself is
+ derived from Rsync's wildmatch. Git uses wildmatch for its `gitignore`_
+ files.
+
+ .. _`gitignore`: http://git-scm.com/docs/gitignore
+
+
+ Tutorial
+ --------
+
+ Say you have a "Projects" directory and you want to back it up, but only
+ certain files, and ignore others depending on certain conditions::
+
+ >>> import pathspec
+ >>> # The gitignore-style patterns for files to select, but we're including
+ >>> # instead of ignoring.
+ >>> spec = """
+ ...
+ ... # This is a comment because the line begins with a hash: "#"
+ ...
+ ... # Include several project directories (and all descendants) relative to
+ ... # the current directory. To reference a directory you must end with a
+ ... # slash: "/"
+ ... /project-a/
+ ... /project-b/
+ ... /project-c/
+ ...
+ ... # Patterns can be negated by prefixing with exclamation mark: "!"
+ ...
+ ... # Ignore temporary files beginning or ending with "~" and ending with
+ ... # ".swp".
+ ... !~*
+ ... !*~
+ ... !*.swp
+ ...
+ ... # These are python projects so ignore compiled python files from
+ ... # testing.
+ ... !*.pyc
+ ...
+ ... # Ignore the build directories but only directly under the project
+ ... # directories.
+ ... !/*/build/
+ ...
+ ... """
+
+ We want to use the ``GitWildMatchPattern`` class to compile our patterns. The
+ ``PathSpec`` class provides an interface around pattern implementations::
+
+ >>> spec = pathspec.PathSpec.from_lines(pathspec.patterns.GitWildMatchPattern, spec.splitlines())
+
+ That may be a mouthful but it allows for additional patterns to be implemented
+ in the future without them having to deal with anything but matching the paths
+ sent to them. ``GitWildMatchPattern`` is the implementation of the actual
+ pattern which internally gets converted into a regular expression.
+ ``PathSpec`` is a simple wrapper around a list of compiled patterns.
+
+ To make things simpler, we can use the registered name for a pattern class
+ instead of always having to provide a reference to the class itself. The
+ ``GitWildMatchPattern`` class is registered as **gitwildmatch**::
+
+ >>> spec = pathspec.PathSpec.from_lines('gitwildmatch', spec.splitlines())
+
+ If we wanted to manually compile the patterns we can just do the following::
+
+ >>> patterns = map(pathspec.patterns.GitWildMatchPattern, spec.splitlines())
+ >>> spec = PathSpec(patterns)
+
+ ``PathSpec.from_lines()`` is simply a class method which does just that.
+
+ If you want to load the patterns from file, you can pass the file instance
+ directly as well::
+
+ >>> with open('patterns.list', 'r') as fh:
+ >>> spec = pathspec.PathSpec.from_lines('gitwildmatch', fh)
+
+ You can perform matching on a whole directory tree with::
+
+ >>> matches = spec.match_tree('path/to/directory')
+
+ Or you can perform matching on a specific set of file paths with::
+
+ >>> matches = spec.match_files(file_paths)
+
+ Or check to see if an individual file matches::
+
+ >>> is_matched = spec.match_file(file_path)
+
+
+ License
+ -------
+
+ *pathspec* is licensed under the `Mozilla Public License Version 2.0`_. See
+ `LICENSE`_ or the `FAQ`_ for more information.
+
+ In summary, you may use *pathspec* with any closed or open source project
+ without affecting the license of the larger work so long as you:
+
+ - give credit where credit is due,
+
+ - and release any custom changes made to *pathspec*.
+
+ .. _`Mozilla Public License Version 2.0`: http://www.mozilla.org/MPL/2.0
+ .. _`LICENSE`: LICENSE
+ .. _`FAQ`: http://www.mozilla.org/MPL/2.0/FAQ.html
+
+
+ Source
+ ------
+
+ The source code for *pathspec* is available from the GitHub repo
+ `cpburnz/python-path-specification`_.
+
+ .. _`cpburnz/python-path-specification`: https://github.com/cpburnz/python-path-specification
+
+
+ Installation
+ ------------
+
+ *pathspec* requires the following packages:
+
+ - `setuptools`_
+
+ *pathspec* can be installed from source with::
+
+ python setup.py install
+
+ *pathspec* is also available for install through `PyPI`_::
+
+ pip install pathspec
+
+ .. _`setuptools`: https://pypi.python.org/pypi/setuptools
+ .. _`PyPI`: http://pypi.python.org/pypi/pathspec
+
+
+ Documentation
+ -------------
+
+ Documentation for *pathspec* is available on `Read the Docs`_.
+
+ .. _`Read the Docs`: http://python-path-specification.readthedocs.io
+
+
+ Other Languages
+ ---------------
+
+ *pathspec* is also available as a `Ruby gem`_.
+
+ .. _`Ruby gem`: https://github.com/highb/pathspec-ruby
+
+ Change History
+ ==============
+
+
+ 0.8.0 (2020-04-09)
+ ------------------
+
+ - `Issue #30`_: Expose what patterns matched paths. Added `util.detailed_match_files()`.
+ - `Issue #31`_: `match_tree()` doesn't return symlinks.
+ - Add `PathSpec.match_tree_entries` and `util.iter_tree_entries()` to support directories and symlinks.
+ - API change: `match_tree()` has been renamed to `match_tree_files()`. The old name `match_tree()` is still available as an alias.
+ - API change: `match_tree_files()` now returns symlinks. This is a bug fix but it will change the returned results.
+
+ .. _`Issue #30`: https://github.com/cpburnz/python-path-specification/issues/30
+ .. _`Issue #31`: https://github.com/cpburnz/python-path-specification/issues/31
+
+
+ 0.7.0 (2019-12-27)
+ ------------------
+
+ - `Issue #28`_: Add support for Python 3.8, and drop Python 3.4.
+ - `Issue #29`_: Publish bdist wheel.
+
+ .. _`Issue #28`: https://github.com/cpburnz/python-path-specification/pull/28
+ .. _`Issue #29`: https://github.com/cpburnz/python-path-specification/pull/29
+
+
+ 0.6.0 (2019-10-03)
+ ------------------
+
+ - `Issue #24`_: Drop support for Python 2.6, 3.2, and 3.3.
+ - `Issue #25`_: Update README.rst.
+ - `Issue #26`_: Method to escape gitwildmatch.
+
+ .. _`Issue #24`: https://github.com/cpburnz/python-path-specification/pull/24
+ .. _`Issue #25`: https://github.com/cpburnz/python-path-specification/pull/25
+ .. _`Issue #26`: https://github.com/cpburnz/python-path-specification/pull/26
+
+
+ 0.5.9 (2018-09-15)
+ ------------------
+
+ - Fixed file system error handling.
+
+
+ 0.5.8 (2018-09-15)
+ ------------------
+
+ - Improved type checking.
+ - Created scripts to test Python 2.6 because Tox removed support for it.
+ - Improved byte string handling in Python 3.
+ - `Issue #22`_: Handle dangling symlinks.
+
+ .. _`Issue #22`: https://github.com/cpburnz/python-path-specification/issues/22
+
+
+ 0.5.7 (2018-08-14)
+ ------------------
+
+ - `Issue #21`_: Fix collections deprecation warning.
+
+ .. _`Issue #21`: https://github.com/cpburnz/python-path-specification/issues/21
+
+
+ 0.5.6 (2018-04-06)
+ ------------------
+
+ - Improved unit tests.
+ - Improved type checking.
+ - `Issue #20`_: Support current directory prefix.
+
+ .. _`Issue #20`: https://github.com/cpburnz/python-path-specification/issues/20
+
+
+ 0.5.5 (2017-09-09)
+ ------------------
+
+ - Add documentation link to README.
+
+
+ 0.5.4 (2017-09-09)
+ ------------------
+
+ - `Issue #17`_: Add link to Ruby implementation of *pathspec*.
+ - Add sphinx documentation.
+
+ .. _`Issue #17`: https://github.com/cpburnz/python-path-specification/pull/17
+
+
+ 0.5.3 (2017-07-01)
+ ------------------
+
+ - `Issue #14`_: Fix byte strings for Python 3.
+ - `Issue #15`_: Include "LICENSE" in source package.
+ - `Issue #16`_: Support Python 2.6.
+
+ .. _`Issue #14`: https://github.com/cpburnz/python-path-specification/issues/14
+ .. _`Issue #15`: https://github.com/cpburnz/python-path-specification/pull/15
+ .. _`Issue #16`: https://github.com/cpburnz/python-path-specification/issues/16
+
+
+ 0.5.2 (2017-04-04)
+ ------------------
+
+ - Fixed change log.
+
+
+ 0.5.1 (2017-04-04)
+ ------------------
+
+ - `Issue #13`_: Add equality methods to `PathSpec` and `RegexPattern`.
+
+ .. _`Issue #13`: https://github.com/cpburnz/python-path-specification/pull/13
+
+
+ 0.5.0 (2016-08-22)
+ ------------------
+
+ - `Issue #12`_: Add `PathSpec.match_file()`.
+ - Renamed `gitignore.GitIgnorePattern` to `patterns.gitwildmatch.GitWildMatchPattern`.
+ - Deprecated `gitignore.GitIgnorePattern`.
+
+ .. _`Issue #12`: https://github.com/cpburnz/python-path-specification/issues/12
+
+
+ 0.4.0 (2016-07-15)
+ ------------------
+
+ - `Issue #11`_: Support converting patterns into regular expressions without compiling them.
+ - API change: Subclasses of `RegexPattern` should implement `pattern_to_regex()`.
+
+ .. _`Issue #11`: https://github.com/cpburnz/python-path-specification/issues/11
+
+
+ 0.3.4 (2015-08-24)
+ ------------------
+
+ - `Issue #7`_: Fixed non-recursive links.
+ - `Issue #8`_: Fixed edge cases in gitignore patterns.
+ - `Issue #9`_: Fixed minor usage documentation.
+ - Fixed recursion detection.
+ - Fixed trivial incompatibility with Python 3.2.
+
+ .. _`Issue #7`: https://github.com/cpburnz/python-path-specification/pull/7
+ .. _`Issue #8`: https://github.com/cpburnz/python-path-specification/pull/8
+ .. _`Issue #9`: https://github.com/cpburnz/python-path-specification/pull/9
+
+
+ 0.3.3 (2014-11-21)
+ ------------------
+
+ - Improved documentation.
+
+
+ 0.3.2 (2014-11-08)
+ ------------------
+
+ - `Issue #5`_: Use tox for testing.
+ - `Issue #6`_: Fixed matching Windows paths.
+ - Improved documentation.
+ - API change: `spec.match_tree()` and `spec.match_files()` now return iterators instead of sets.
+
+ .. _`Issue #5`: https://github.com/cpburnz/python-path-specification/pull/5
+ .. _`Issue #6`: https://github.com/cpburnz/python-path-specification/issues/6
+
+
+ 0.3.1 (2014-09-17)
+ ------------------
+
+ - Updated README.
+
+
+ 0.3.0 (2014-09-17)
+ ------------------
+
+ - `Issue #3`_: Fixed trailing slash in gitignore patterns.
+ - `Issue #4`_: Fixed test for trailing slash in gitignore patterns.
+ - Added registered patterns.
+
+ .. _`Issue #3`: https://github.com/cpburnz/python-path-specification/pull/3
+ .. _`Issue #4`: https://github.com/cpburnz/python-path-specification/pull/4
+
+
+ 0.2.2 (2013-12-17)
+ ------------------
+
+ - Fixed setup.py.
+
+
+ 0.2.1 (2013-12-17)
+ ------------------
+
+ - Added tests.
+ - Fixed comment gitignore patterns.
+ - Fixed relative path gitignore patterns.
+
+
+ 0.2.0 (2013-12-07)
+ ------------------
+
+ - Initial release.
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Utilities
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
diff --git a/third_party/python/pathspec/README.rst b/third_party/python/pathspec/README.rst
new file mode 100644
index 0000000000..e8ac70a14a
--- /dev/null
+++ b/third_party/python/pathspec/README.rst
@@ -0,0 +1,153 @@
+
+*pathspec*: Path Specification
+==============================
+
+*pathspec* is a utility library for pattern matching of file paths. So
+far this only includes Git's wildmatch pattern matching which itself is
+derived from Rsync's wildmatch. Git uses wildmatch for its `gitignore`_
+files.
+
+.. _`gitignore`: http://git-scm.com/docs/gitignore
+
+
+Tutorial
+--------
+
+Say you have a "Projects" directory and you want to back it up, but only
+certain files, and ignore others depending on certain conditions::
+
+ >>> import pathspec
+ >>> # The gitignore-style patterns for files to select, but we're including
+ >>> # instead of ignoring.
+ >>> spec = """
+ ...
+ ... # This is a comment because the line begins with a hash: "#"
+ ...
+ ... # Include several project directories (and all descendants) relative to
+ ... # the current directory. To reference a directory you must end with a
+ ... # slash: "/"
+ ... /project-a/
+ ... /project-b/
+ ... /project-c/
+ ...
+ ... # Patterns can be negated by prefixing with exclamation mark: "!"
+ ...
+ ... # Ignore temporary files beginning or ending with "~" and ending with
+ ... # ".swp".
+ ... !~*
+ ... !*~
+ ... !*.swp
+ ...
+ ... # These are python projects so ignore compiled python files from
+ ... # testing.
+ ... !*.pyc
+ ...
+ ... # Ignore the build directories but only directly under the project
+ ... # directories.
+ ... !/*/build/
+ ...
+ ... """
+
+We want to use the ``GitWildMatchPattern`` class to compile our patterns. The
+``PathSpec`` class provides an interface around pattern implementations::
+
+ >>> spec = pathspec.PathSpec.from_lines(pathspec.patterns.GitWildMatchPattern, spec.splitlines())
+
+That may be a mouthful but it allows for additional patterns to be implemented
+in the future without them having to deal with anything but matching the paths
+sent to them. ``GitWildMatchPattern`` is the implementation of the actual
+pattern which internally gets converted into a regular expression.
+``PathSpec`` is a simple wrapper around a list of compiled patterns.
+
+To make things simpler, we can use the registered name for a pattern class
+instead of always having to provide a reference to the class itself. The
+``GitWildMatchPattern`` class is registered as **gitwildmatch**::
+
+ >>> spec = pathspec.PathSpec.from_lines('gitwildmatch', spec.splitlines())
+
+If we wanted to manually compile the patterns we can just do the following::
+
+ >>> patterns = map(pathspec.patterns.GitWildMatchPattern, spec.splitlines())
+ >>> spec = PathSpec(patterns)
+
+``PathSpec.from_lines()`` is simply a class method which does just that.
+
+If you want to load the patterns from file, you can pass the file instance
+directly as well::
+
+ >>> with open('patterns.list', 'r') as fh:
+ >>> spec = pathspec.PathSpec.from_lines('gitwildmatch', fh)
+
+You can perform matching on a whole directory tree with::
+
+ >>> matches = spec.match_tree('path/to/directory')
+
+Or you can perform matching on a specific set of file paths with::
+
+ >>> matches = spec.match_files(file_paths)
+
+Or check to see if an individual file matches::
+
+ >>> is_matched = spec.match_file(file_path)
+
+
+License
+-------
+
+*pathspec* is licensed under the `Mozilla Public License Version 2.0`_. See
+`LICENSE`_ or the `FAQ`_ for more information.
+
+In summary, you may use *pathspec* with any closed or open source project
+without affecting the license of the larger work so long as you:
+
+- give credit where credit is due,
+
+- and release any custom changes made to *pathspec*.
+
+.. _`Mozilla Public License Version 2.0`: http://www.mozilla.org/MPL/2.0
+.. _`LICENSE`: LICENSE
+.. _`FAQ`: http://www.mozilla.org/MPL/2.0/FAQ.html
+
+
+Source
+------
+
+The source code for *pathspec* is available from the GitHub repo
+`cpburnz/python-path-specification`_.
+
+.. _`cpburnz/python-path-specification`: https://github.com/cpburnz/python-path-specification
+
+
+Installation
+------------
+
+*pathspec* requires the following packages:
+
+- `setuptools`_
+
+*pathspec* can be installed from source with::
+
+ python setup.py install
+
+*pathspec* is also available for install through `PyPI`_::
+
+ pip install pathspec
+
+.. _`setuptools`: https://pypi.python.org/pypi/setuptools
+.. _`PyPI`: http://pypi.python.org/pypi/pathspec
+
+
+Documentation
+-------------
+
+Documentation for *pathspec* is available on `Read the Docs`_.
+
+.. _`Read the Docs`: http://python-path-specification.readthedocs.io
+
+
+Other Languages
+---------------
+
+*pathspec* is also available as a `Ruby gem`_.
+
+.. _`Ruby gem`: https://github.com/highb/pathspec-ruby
diff --git a/third_party/python/pathspec/pathspec/__init__.py b/third_party/python/pathspec/pathspec/__init__.py
new file mode 100644
index 0000000000..2400402197
--- /dev/null
+++ b/third_party/python/pathspec/pathspec/__init__.py
@@ -0,0 +1,68 @@
+# encoding: utf-8
+"""
+The *pathspec* package provides pattern matching for file paths. So far
+this only includes Git's wildmatch pattern matching (the style used for
+".gitignore" files).
+
+The following classes are imported and made available from the root of
+the `pathspec` package:
+
+- :class:`pathspec.pathspec.PathSpec`
+
+- :class:`pathspec.pattern.Pattern`
+
+- :class:`pathspec.pattern.RegexPattern`
+
+- :class:`pathspec.util.RecursionError`
+
+The following functions are also imported:
+
+- :func:`pathspec.util.iter_tree`
+- :func:`pathspec.util.lookup_pattern`
+- :func:`pathspec.util.match_files`
+"""
+from __future__ import unicode_literals
+
+__author__ = "Caleb P. Burns"
+__copyright__ = "Copyright © 2013-2020 Caleb P. Burns"
+__created__ = "2013-10-12"
+__credits__ = [
+ "dahlia <https://github.com/dahlia>",
+ "highb <https://github.com/highb>",
+ "029xue <https://github.com/029xue>",
+ "mikexstudios <https://github.com/mikexstudios>",
+ "nhumrich <https://github.com/nhumrich>",
+ "davidfraser <https://github.com/davidfraser>",
+ "demurgos <https://github.com/demurgos>",
+ "ghickman <https://github.com/ghickman>",
+ "nvie <https://github.com/nvie>",
+ "adrienverge <https://github.com/adrienverge>",
+ "AndersBlomdell <https://github.com/AndersBlomdell>",
+ "highb <https://github.com/highb>",
+ "thmxv <https://github.com/thmxv>",
+ "wimglenn <https://github.com/wimglenn>",
+ "hugovk <https://github.com/hugovk>",
+ "dcecile <https://github.com/dcecile>",
+ "mroutis <https://github.com/mroutis>",
+ "jdufresne <https://github.com/jdufresne>",
+ "groodt <https://github.com/groodt>",
+ "ftrofin <https://github.com/ftrofin>",
+ "pykong <https://github.com/pykong>",
+]
+__email__ = "cpburnz@gmail.com"
+__license__ = "MPL 2.0"
+__project__ = "pathspec"
+__status__ = "Development"
+__updated__ = "2020-04-09"
+__version__ = "0.8.0"
+
+from .pathspec import PathSpec
+from .pattern import Pattern, RegexPattern
+from .util import iter_tree, lookup_pattern, match_files, RecursionError
+
+# Load pattern implementations.
+from . import patterns
+
+# Expose `GitIgnorePattern` class in the root module for backward
+# compatibility with v0.4.
+from .patterns.gitwildmatch import GitIgnorePattern
diff --git a/third_party/python/pathspec/pathspec/compat.py b/third_party/python/pathspec/pathspec/compat.py
new file mode 100644
index 0000000000..37c6480510
--- /dev/null
+++ b/third_party/python/pathspec/pathspec/compat.py
@@ -0,0 +1,38 @@
+# encoding: utf-8
+"""
+This module provides compatibility between Python 2 and 3. Hardly
+anything is used by this project to constitute including `six`_.
+
+.. _`six`: http://pythonhosted.org/six
+"""
+
+import sys
+
+if sys.version_info[0] < 3:
+ # Python 2.
+ unicode = unicode
+ string_types = (basestring,)
+
+ from collections import Iterable
+ from itertools import izip_longest
+
+ def iterkeys(mapping):
+ return mapping.iterkeys()
+
+else:
+ # Python 3.
+ unicode = str
+ string_types = (unicode,)
+
+ from collections.abc import Iterable
+ from itertools import zip_longest as izip_longest
+
+ def iterkeys(mapping):
+ return mapping.keys()
+
+try:
+ # Python 3.6+.
+ from collections.abc import Collection
+except ImportError:
+ # Python 2.7 - 3.5.
+ from collections import Container as Collection
diff --git a/third_party/python/pathspec/pathspec/pathspec.py b/third_party/python/pathspec/pathspec/pathspec.py
new file mode 100644
index 0000000000..be058ffc87
--- /dev/null
+++ b/third_party/python/pathspec/pathspec/pathspec.py
@@ -0,0 +1,185 @@
+# encoding: utf-8
+"""
+This module provides an object oriented interface for pattern matching
+of files.
+"""
+
+from . import util
+from .compat import Collection, iterkeys, izip_longest, string_types, unicode
+
+
+class PathSpec(object):
+ """
+ The :class:`PathSpec` class is a wrapper around a list of compiled
+ :class:`.Pattern` instances.
+ """
+
+ def __init__(self, patterns):
+ """
+ Initializes the :class:`PathSpec` instance.
+
+ *patterns* (:class:`~collections.abc.Collection` or :class:`~collections.abc.Iterable`)
+ yields each compiled pattern (:class:`.Pattern`).
+ """
+
+ self.patterns = patterns if isinstance(patterns, Collection) else list(patterns)
+ """
+ *patterns* (:class:`~collections.abc.Collection` of :class:`.Pattern`)
+ contains the compiled patterns.
+ """
+
+ def __eq__(self, other):
+ """
+ Tests the equality of this path-spec with *other* (:class:`PathSpec`)
+ by comparing their :attr:`~PathSpec.patterns` attributes.
+ """
+ if isinstance(other, PathSpec):
+ paired_patterns = izip_longest(self.patterns, other.patterns)
+ return all(a == b for a, b in paired_patterns)
+ else:
+ return NotImplemented
+
+ def __len__(self):
+ """
+ Returns the number of compiled patterns this path-spec contains
+ (:class:`int`).
+ """
+ return len(self.patterns)
+
+ @classmethod
+ def from_lines(cls, pattern_factory, lines):
+ """
+ Compiles the pattern lines.
+
+ *pattern_factory* can be either the name of a registered pattern
+ factory (:class:`str`), or a :class:`~collections.abc.Callable` used
+ to compile patterns. It must accept an uncompiled pattern (:class:`str`)
+ and return the compiled pattern (:class:`.Pattern`).
+
+ *lines* (:class:`~collections.abc.Iterable`) yields each uncompiled
+ pattern (:class:`str`). This simply has to yield each line so it can
+ be a :class:`file` (e.g., from :func:`open` or :class:`io.StringIO`)
+ or the result from :meth:`str.splitlines`.
+
+ Returns the :class:`PathSpec` instance.
+ """
+ if isinstance(pattern_factory, string_types):
+ pattern_factory = util.lookup_pattern(pattern_factory)
+ if not callable(pattern_factory):
+ raise TypeError("pattern_factory:{!r} is not callable.".format(pattern_factory))
+
+ if not util._is_iterable(lines):
+ raise TypeError("lines:{!r} is not an iterable.".format(lines))
+
+ lines = [pattern_factory(line) for line in lines if line]
+ return cls(lines)
+
+ def match_file(self, file, separators=None):
+ """
+ Matches the file to this path-spec.
+
+ *file* (:class:`str` or :class:`~pathlib.PurePath`) is the file path
+ to be matched against :attr:`self.patterns <PathSpec.patterns>`.
+
+ *separators* (:class:`~collections.abc.Collection` of :class:`str`)
+ optionally contains the path separators to normalize. See
+ :func:`~pathspec.util.normalize_file` for more information.
+
+ Returns :data:`True` if *file* matched; otherwise, :data:`False`.
+ """
+ norm_file = util.normalize_file(file, separators=separators)
+ return util.match_file(self.patterns, norm_file)
+
+ def match_entries(self, entries, separators=None):
+ """
+ Matches the entries to this path-spec.
+
+ *entries* (:class:`~collections.abc.Iterable` of :class:`~util.TreeEntry`)
+ contains the entries to be matched against :attr:`self.patterns <PathSpec.patterns>`.
+
+ *separators* (:class:`~collections.abc.Collection` of :class:`str`;
+ or :data:`None`) optionally contains the path separators to
+ normalize. See :func:`~pathspec.util.normalize_file` for more
+ information.
+
+ Returns the matched entries (:class:`~collections.abc.Iterable` of
+ :class:`~util.TreeEntry`).
+ """
+ if not util._is_iterable(entries):
+ raise TypeError("entries:{!r} is not an iterable.".format(entries))
+
+ entry_map = util._normalize_entries(entries, separators=separators)
+ match_paths = util.match_files(self.patterns, iterkeys(entry_map))
+ for path in match_paths:
+ yield entry_map[path]
+
+ def match_files(self, files, separators=None):
+ """
+ Matches the files to this path-spec.
+
+ *files* (:class:`~collections.abc.Iterable` of :class:`str; or
+ :class:`pathlib.PurePath`) contains the file paths to be matched
+ against :attr:`self.patterns <PathSpec.patterns>`.
+
+ *separators* (:class:`~collections.abc.Collection` of :class:`str`;
+ or :data:`None`) optionally contains the path separators to
+ normalize. See :func:`~pathspec.util.normalize_file` for more
+ information.
+
+ Returns the matched files (:class:`~collections.abc.Iterable` of
+ :class:`str`).
+ """
+ if not util._is_iterable(files):
+ raise TypeError("files:{!r} is not an iterable.".format(files))
+
+ file_map = util.normalize_files(files, separators=separators)
+ matched_files = util.match_files(self.patterns, iterkeys(file_map))
+ for path in matched_files:
+ yield file_map[path]
+
+ def match_tree_entries(self, root, on_error=None, follow_links=None):
+ """
+ Walks the specified root path for all files and matches them to this
+ path-spec.
+
+ *root* (:class:`str`; or :class:`pathlib.PurePath`) is the root
+ directory to search.
+
+ *on_error* (:class:`~collections.abc.Callable` or :data:`None`)
+ optionally is the error handler for file-system exceptions. See
+ :func:`~pathspec.util.iter_tree_entries` for more information.
+
+ *follow_links* (:class:`bool` or :data:`None`) optionally is whether
+ to walk symbolic links that resolve to directories. See
+ :func:`~pathspec.util.iter_tree_files` for more information.
+
+ Returns the matched files (:class:`~collections.abc.Iterable` of
+ :class:`str`).
+ """
+ entries = util.iter_tree_entries(root, on_error=on_error, follow_links=follow_links)
+ return self.match_entries(entries)
+
+ def match_tree_files(self, root, on_error=None, follow_links=None):
+ """
+ Walks the specified root path for all files and matches them to this
+ path-spec.
+
+ *root* (:class:`str`; or :class:`pathlib.PurePath`) is the root
+ directory to search for files.
+
+ *on_error* (:class:`~collections.abc.Callable` or :data:`None`)
+ optionally is the error handler for file-system exceptions. See
+ :func:`~pathspec.util.iter_tree_files` for more information.
+
+ *follow_links* (:class:`bool` or :data:`None`) optionally is whether
+ to walk symbolic links that resolve to directories. See
+ :func:`~pathspec.util.iter_tree_files` for more information.
+
+ Returns the matched files (:class:`~collections.abc.Iterable` of
+ :class:`str`).
+ """
+ files = util.iter_tree_files(root, on_error=on_error, follow_links=follow_links)
+ return self.match_files(files)
+
+ # Alias `match_tree_files()` as `match_tree()`.
+ match_tree = match_tree_files
diff --git a/third_party/python/pathspec/pathspec/pattern.py b/third_party/python/pathspec/pathspec/pattern.py
new file mode 100644
index 0000000000..4ba4edf790
--- /dev/null
+++ b/third_party/python/pathspec/pathspec/pattern.py
@@ -0,0 +1,146 @@
+# encoding: utf-8
+"""
+This module provides the base definition for patterns.
+"""
+
+import re
+
+from .compat import unicode
+
+
+class Pattern(object):
+ """
+ The :class:`Pattern` class is the abstract definition of a pattern.
+ """
+
+ # Make the class dict-less.
+ __slots__ = ('include',)
+
+ def __init__(self, include):
+ """
+ Initializes the :class:`Pattern` instance.
+
+ *include* (:class:`bool` or :data:`None`) is whether the matched
+ files should be included (:data:`True`), excluded (:data:`False`),
+ or is a null-operation (:data:`None`).
+ """
+
+ self.include = include
+ """
+ *include* (:class:`bool` or :data:`None`) is whether the matched
+ files should be included (:data:`True`), excluded (:data:`False`),
+ or is a null-operation (:data:`None`).
+ """
+
+ def match(self, files):
+ """
+ Matches this pattern against the specified files.
+
+ *files* (:class:`~collections.abc.Iterable` of :class:`str`) contains
+ each file relative to the root directory (e.g., ``"relative/path/to/file"``).
+
+ Returns an :class:`~collections.abc.Iterable` yielding each matched
+ file path (:class:`str`).
+ """
+ raise NotImplementedError("{}.{} must override match().".format(self.__class__.__module__, self.__class__.__name__))
+
+
+class RegexPattern(Pattern):
+ """
+ The :class:`RegexPattern` class is an implementation of a pattern
+ using regular expressions.
+ """
+
+ # Make the class dict-less.
+ __slots__ = ('regex',)
+
+ def __init__(self, pattern, include=None):
+ """
+ Initializes the :class:`RegexPattern` instance.
+
+ *pattern* (:class:`unicode`, :class:`bytes`, :class:`re.RegexObject`,
+ or :data:`None`) is the pattern to compile into a regular
+ expression.
+
+ *include* (:class:`bool` or :data:`None`) must be :data:`None`
+ unless *pattern* is a precompiled regular expression (:class:`re.RegexObject`)
+ in which case it is whether matched files should be included
+ (:data:`True`), excluded (:data:`False`), or is a null operation
+ (:data:`None`).
+
+ .. NOTE:: Subclasses do not need to support the *include*
+ parameter.
+ """
+
+ self.regex = None
+ """
+ *regex* (:class:`re.RegexObject`) is the regular expression for the
+ pattern.
+ """
+
+ if isinstance(pattern, (unicode, bytes)):
+ assert include is None, "include:{!r} must be null when pattern:{!r} is a string.".format(include, pattern)
+ regex, include = self.pattern_to_regex(pattern)
+ # NOTE: Make sure to allow a null regular expression to be
+ # returned for a null-operation.
+ if include is not None:
+ regex = re.compile(regex)
+
+ elif pattern is not None and hasattr(pattern, 'match'):
+ # Assume pattern is a precompiled regular expression.
+ # - NOTE: Used specified *include*.
+ regex = pattern
+
+ elif pattern is None:
+ # NOTE: Make sure to allow a null pattern to be passed for a
+ # null-operation.
+ assert include is None, "include:{!r} must be null when pattern:{!r} is null.".format(include, pattern)
+
+ else:
+ raise TypeError("pattern:{!r} is not a string, RegexObject, or None.".format(pattern))
+
+ super(RegexPattern, self).__init__(include)
+ self.regex = regex
+
+ def __eq__(self, other):
+ """
+ Tests the equality of this regex pattern with *other* (:class:`RegexPattern`)
+ by comparing their :attr:`~Pattern.include` and :attr:`~RegexPattern.regex`
+ attributes.
+ """
+ if isinstance(other, RegexPattern):
+ return self.include == other.include and self.regex == other.regex
+ else:
+ return NotImplemented
+
+ def match(self, files):
+ """
+ Matches this pattern against the specified files.
+
+ *files* (:class:`~collections.abc.Iterable` of :class:`str`)
+ contains each file relative to the root directory (e.g., "relative/path/to/file").
+
+ Returns an :class:`~collections.abc.Iterable` yielding each matched
+ file path (:class:`str`).
+ """
+ if self.include is not None:
+ for path in files:
+ if self.regex.match(path) is not None:
+ yield path
+
+ @classmethod
+ def pattern_to_regex(cls, pattern):
+ """
+ Convert the pattern into an uncompiled regular expression.
+
+ *pattern* (:class:`str`) is the pattern to convert into a regular
+ expression.
+
+ Returns the uncompiled regular expression (:class:`str` or :data:`None`),
+ and whether matched files should be included (:data:`True`),
+ excluded (:data:`False`), or is a null-operation (:data:`None`).
+
+ .. NOTE:: The default implementation simply returns *pattern* and
+ :data:`True`.
+ """
+ return pattern, True
diff --git a/third_party/python/pathspec/pathspec/patterns/__init__.py b/third_party/python/pathspec/pathspec/patterns/__init__.py
new file mode 100644
index 0000000000..1a0d55ec74
--- /dev/null
+++ b/third_party/python/pathspec/pathspec/patterns/__init__.py
@@ -0,0 +1,8 @@
+# encoding: utf-8
+"""
+The *pathspec.patterns* package contains the pattern matching
+implementations.
+"""
+
+# Load pattern implementations.
+from .gitwildmatch import GitWildMatchPattern
diff --git a/third_party/python/pathspec/pathspec/patterns/gitwildmatch.py b/third_party/python/pathspec/pathspec/patterns/gitwildmatch.py
new file mode 100644
index 0000000000..07fd03880a
--- /dev/null
+++ b/third_party/python/pathspec/pathspec/patterns/gitwildmatch.py
@@ -0,0 +1,330 @@
+# encoding: utf-8
+"""
+This module implements Git's wildmatch pattern matching which itself is
+derived from Rsync's wildmatch. Git uses wildmatch for its ".gitignore"
+files.
+"""
+from __future__ import unicode_literals
+
+import re
+import warnings
+
+from .. import util
+from ..compat import unicode
+from ..pattern import RegexPattern
+
+#: The encoding to use when parsing a byte string pattern.
+_BYTES_ENCODING = 'latin1'
+
+
+class GitWildMatchPattern(RegexPattern):
+ """
+ The :class:`GitWildMatchPattern` class represents a compiled Git
+ wildmatch pattern.
+ """
+
+ # Keep the dict-less class hierarchy.
+ __slots__ = ()
+
+ @classmethod
+ def pattern_to_regex(cls, pattern):
+ """
+ Convert the pattern into a regular expression.
+
+ *pattern* (:class:`unicode` or :class:`bytes`) is the pattern to
+ convert into a regular expression.
+
+ Returns the uncompiled regular expression (:class:`unicode`, :class:`bytes`,
+ or :data:`None`), and whether matched files should be included
+ (:data:`True`), excluded (:data:`False`), or if it is a
+ null-operation (:data:`None`).
+ """
+ if isinstance(pattern, unicode):
+ return_type = unicode
+ elif isinstance(pattern, bytes):
+ return_type = bytes
+ pattern = pattern.decode(_BYTES_ENCODING)
+ else:
+ raise TypeError("pattern:{!r} is not a unicode or byte string.".format(pattern))
+
+ pattern = pattern.strip()
+
+ if pattern.startswith('#'):
+ # A pattern starting with a hash ('#') serves as a comment
+ # (neither includes nor excludes files). Escape the hash with a
+ # back-slash to match a literal hash (i.e., '\#').
+ regex = None
+ include = None
+
+ elif pattern == '/':
+ # EDGE CASE: According to `git check-ignore` (v2.4.1), a single
+ # '/' does not match any file.
+ regex = None
+ include = None
+
+ elif pattern:
+
+ if pattern.startswith('!'):
+ # A pattern starting with an exclamation mark ('!') negates the
+ # pattern (exclude instead of include). Escape the exclamation
+ # mark with a back-slash to match a literal exclamation mark
+ # (i.e., '\!').
+ include = False
+ # Remove leading exclamation mark.
+ pattern = pattern[1:]
+ else:
+ include = True
+
+ if pattern.startswith('\\'):
+ # Remove leading back-slash escape for escaped hash ('#') or
+ # exclamation mark ('!').
+ pattern = pattern[1:]
+
+ # Split pattern into segments.
+ pattern_segs = pattern.split('/')
+
+ # Normalize pattern to make processing easier.
+
+ if not pattern_segs[0]:
+ # A pattern beginning with a slash ('/') will only match paths
+ # directly on the root directory instead of any descendant
+ # paths. So, remove empty first segment to make pattern relative
+ # to root.
+ del pattern_segs[0]
+
+ elif len(pattern_segs) == 1 or (len(pattern_segs) == 2 and not pattern_segs[1]):
+ # A single pattern without a beginning slash ('/') will match
+ # any descendant path. This is equivalent to "**/{pattern}". So,
+ # prepend with double-asterisks to make pattern relative to
+ # root.
+ # EDGE CASE: This also holds for a single pattern with a
+ # trailing slash (e.g. dir/).
+ if pattern_segs[0] != '**':
+ pattern_segs.insert(0, '**')
+
+ else:
+ # EDGE CASE: A pattern without a beginning slash ('/') but
+ # contains at least one prepended directory (e.g.
+ # "dir/{pattern}") should not match "**/dir/{pattern}",
+ # according to `git check-ignore` (v2.4.1).
+ pass
+
+ if not pattern_segs[-1] and len(pattern_segs) > 1:
+ # A pattern ending with a slash ('/') will match all descendant
+ # paths if it is a directory but not if it is a regular file.
+ # This is equivilent to "{pattern}/**". So, set last segment to
+ # double asterisks to include all descendants.
+ pattern_segs[-1] = '**'
+
+ # Build regular expression from pattern.
+ output = ['^']
+ need_slash = False
+ end = len(pattern_segs) - 1
+ for i, seg in enumerate(pattern_segs):
+ if seg == '**':
+ if i == 0 and i == end:
+ # A pattern consisting solely of double-asterisks ('**')
+ # will match every path.
+ output.append('.+')
+ elif i == 0:
+ # A normalized pattern beginning with double-asterisks
+ # ('**') will match any leading path segments.
+ output.append('(?:.+/)?')
+ need_slash = False
+ elif i == end:
+ # A normalized pattern ending with double-asterisks ('**')
+ # will match any trailing path segments.
+ output.append('/.*')
+ else:
+ # A pattern with inner double-asterisks ('**') will match
+ # multiple (or zero) inner path segments.
+ output.append('(?:/.+)?')
+ need_slash = True
+ elif seg == '*':
+ # Match single path segment.
+ if need_slash:
+ output.append('/')
+ output.append('[^/]+')
+ need_slash = True
+ else:
+ # Match segment glob pattern.
+ if need_slash:
+ output.append('/')
+ output.append(cls._translate_segment_glob(seg))
+ if i == end and include is True:
+ # A pattern ending without a slash ('/') will match a file
+ # or a directory (with paths underneath it). E.g., "foo"
+ # matches "foo", "foo/bar", "foo/bar/baz", etc.
+ # EDGE CASE: However, this does not hold for exclusion cases
+ # according to `git check-ignore` (v2.4.1).
+ output.append('(?:/.*)?')
+ need_slash = True
+ output.append('$')
+ regex = ''.join(output)
+
+ else:
+ # A blank pattern is a null-operation (neither includes nor
+ # excludes files).
+ regex = None
+ include = None
+
+ if regex is not None and return_type is bytes:
+ regex = regex.encode(_BYTES_ENCODING)
+
+ return regex, include
+
+ @staticmethod
+ def _translate_segment_glob(pattern):
+ """
+ Translates the glob pattern to a regular expression. This is used in
+ the constructor to translate a path segment glob pattern to its
+ corresponding regular expression.
+
+ *pattern* (:class:`str`) is the glob pattern.
+
+ Returns the regular expression (:class:`str`).
+ """
+ # NOTE: This is derived from `fnmatch.translate()` and is similar to
+ # the POSIX function `fnmatch()` with the `FNM_PATHNAME` flag set.
+
+ escape = False
+ regex = ''
+ i, end = 0, len(pattern)
+ while i < end:
+ # Get next character.
+ char = pattern[i]
+ i += 1
+
+ if escape:
+ # Escape the character.
+ escape = False
+ regex += re.escape(char)
+
+ elif char == '\\':
+ # Escape character, escape next character.
+ escape = True
+
+ elif char == '*':
+ # Multi-character wildcard. Match any string (except slashes),
+ # including an empty string.
+ regex += '[^/]*'
+
+ elif char == '?':
+ # Single-character wildcard. Match any single character (except
+ # a slash).
+ regex += '[^/]'
+
+ elif char == '[':
+ # Braket expression wildcard. Except for the beginning
+ # exclamation mark, the whole braket expression can be used
+ # directly as regex but we have to find where the expression
+ # ends.
+ # - "[][!]" matchs ']', '[' and '!'.
+ # - "[]-]" matchs ']' and '-'.
+ # - "[!]a-]" matchs any character except ']', 'a' and '-'.
+ j = i
+ # Pass brack expression negation.
+ if j < end and pattern[j] == '!':
+ j += 1
+ # Pass first closing braket if it is at the beginning of the
+ # expression.
+ if j < end and pattern[j] == ']':
+ j += 1
+ # Find closing braket. Stop once we reach the end or find it.
+ while j < end and pattern[j] != ']':
+ j += 1
+
+ if j < end:
+ # Found end of braket expression. Increment j to be one past
+ # the closing braket:
+ #
+ # [...]
+ # ^ ^
+ # i j
+ #
+ j += 1
+ expr = '['
+
+ if pattern[i] == '!':
+ # Braket expression needs to be negated.
+ expr += '^'
+ i += 1
+ elif pattern[i] == '^':
+ # POSIX declares that the regex braket expression negation
+ # "[^...]" is undefined in a glob pattern. Python's
+ # `fnmatch.translate()` escapes the caret ('^') as a
+ # literal. To maintain consistency with undefined behavior,
+ # I am escaping the '^' as well.
+ expr += '\\^'
+ i += 1
+
+ # Build regex braket expression. Escape slashes so they are
+ # treated as literal slashes by regex as defined by POSIX.
+ expr += pattern[i:j].replace('\\', '\\\\')
+
+ # Add regex braket expression to regex result.
+ regex += expr
+
+ # Set i to one past the closing braket.
+ i = j
+
+ else:
+ # Failed to find closing braket, treat opening braket as a
+ # braket literal instead of as an expression.
+ regex += '\\['
+
+ else:
+ # Regular character, escape it for regex.
+ regex += re.escape(char)
+
+ return regex
+
+ @staticmethod
+ def escape(s):
+ """
+ Escape special characters in the given string.
+
+ *s* (:class:`unicode` or :class:`bytes`) a filename or a string
+ that you want to escape, usually before adding it to a `.gitignore`
+
+ Returns the escaped string (:class:`unicode`, :class:`bytes`)
+ """
+ # Reference: https://git-scm.com/docs/gitignore#_pattern_format
+ meta_characters = r"[]!*#?"
+
+ return "".join("\\" + x if x in meta_characters else x for x in s)
+
+util.register_pattern('gitwildmatch', GitWildMatchPattern)
+
+
+class GitIgnorePattern(GitWildMatchPattern):
+ """
+ The :class:`GitIgnorePattern` class is deprecated by :class:`GitWildMatchPattern`.
+ This class only exists to maintain compatibility with v0.4.
+ """
+
+ def __init__(self, *args, **kw):
+ """
+ Warn about deprecation.
+ """
+ self._deprecated()
+ return super(GitIgnorePattern, self).__init__(*args, **kw)
+
+ @staticmethod
+ def _deprecated():
+ """
+ Warn about deprecation.
+ """
+ warnings.warn("GitIgnorePattern ('gitignore') is deprecated. Use GitWildMatchPattern ('gitwildmatch') instead.", DeprecationWarning, stacklevel=3)
+
+ @classmethod
+ def pattern_to_regex(cls, *args, **kw):
+ """
+ Warn about deprecation.
+ """
+ cls._deprecated()
+ return super(GitIgnorePattern, cls).pattern_to_regex(*args, **kw)
+
+# Register `GitIgnorePattern` as "gitignore" for backward compatibility
+# with v0.4.
+util.register_pattern('gitignore', GitIgnorePattern)
diff --git a/third_party/python/pathspec/pathspec/tests/__init__.py b/third_party/python/pathspec/pathspec/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/pathspec/pathspec/tests/__init__.py
diff --git a/third_party/python/pathspec/pathspec/tests/test_gitwildmatch.py b/third_party/python/pathspec/pathspec/tests/test_gitwildmatch.py
new file mode 100644
index 0000000000..e552d5ef53
--- /dev/null
+++ b/third_party/python/pathspec/pathspec/tests/test_gitwildmatch.py
@@ -0,0 +1,474 @@
+# encoding: utf-8
+"""
+This script tests ``GitWildMatchPattern``.
+"""
+from __future__ import unicode_literals
+
+import re
+import sys
+import unittest
+
+import pathspec.patterns.gitwildmatch
+import pathspec.util
+from pathspec.patterns.gitwildmatch import GitWildMatchPattern
+
+if sys.version_info[0] >= 3:
+ unichr = chr
+
+
+class GitWildMatchTest(unittest.TestCase):
+ """
+ The ``GitWildMatchTest`` class tests the ``GitWildMatchPattern``
+ implementation.
+ """
+
+ def test_00_empty(self):
+ """
+ Tests an empty pattern.
+ """
+ regex, include = GitWildMatchPattern.pattern_to_regex('')
+ self.assertIsNone(include)
+ self.assertIsNone(regex)
+
+ def test_01_absolute(self):
+ """
+ Tests an absolute path pattern.
+
+ This should match:
+
+ an/absolute/file/path
+ an/absolute/file/path/foo
+
+ This should NOT match:
+
+ foo/an/absolute/file/path
+ """
+ regex, include = GitWildMatchPattern.pattern_to_regex('/an/absolute/file/path')
+ self.assertTrue(include)
+ self.assertEqual(regex, '^an/absolute/file/path(?:/.*)?$')
+
+ pattern = GitWildMatchPattern(re.compile(regex), include)
+ results = set(pattern.match([
+ 'an/absolute/file/path',
+ 'an/absolute/file/path/foo',
+ 'foo/an/absolute/file/path',
+ ]))
+ self.assertEqual(results, {
+ 'an/absolute/file/path',
+ 'an/absolute/file/path/foo',
+ })
+
+ def test_01_absolute_root(self):
+ """
+ Tests a single root absolute path pattern.
+
+ This should NOT match any file (according to git check-ignore
+ (v2.4.1)).
+ """
+ regex, include = GitWildMatchPattern.pattern_to_regex('/')
+ self.assertIsNone(include)
+ self.assertIsNone(regex)
+
+ def test_01_relative(self):
+ """
+ Tests a relative path pattern.
+
+ This should match:
+
+ spam
+ spam/
+ foo/spam
+ spam/foo
+ foo/spam/bar
+ """
+ regex, include = GitWildMatchPattern.pattern_to_regex('spam')
+ self.assertTrue(include)
+ self.assertEqual(regex, '^(?:.+/)?spam(?:/.*)?$')
+
+ pattern = GitWildMatchPattern(re.compile(regex), include)
+ results = set(pattern.match([
+ 'spam',
+ 'spam/',
+ 'foo/spam',
+ 'spam/foo',
+ 'foo/spam/bar',
+ ]))
+ self.assertEqual(results, {
+ 'spam',
+ 'spam/',
+ 'foo/spam',
+ 'spam/foo',
+ 'foo/spam/bar',
+ })
+
+ def test_01_relative_nested(self):
+ """
+ Tests a relative nested path pattern.
+
+ This should match:
+
+ foo/spam
+ foo/spam/bar
+
+ This should **not** match (according to git check-ignore (v2.4.1)):
+
+ bar/foo/spam
+ """
+ regex, include = GitWildMatchPattern.pattern_to_regex('foo/spam')
+ self.assertTrue(include)
+ self.assertEqual(regex, '^foo/spam(?:/.*)?$')
+
+ pattern = GitWildMatchPattern(re.compile(regex), include)
+ results = set(pattern.match([
+ 'foo/spam',
+ 'foo/spam/bar',
+ 'bar/foo/spam',
+ ]))
+ self.assertEqual(results, {
+ 'foo/spam',
+ 'foo/spam/bar',
+ })
+
+ def test_02_comment(self):
+ """
+ Tests a comment pattern.
+ """
+ regex, include = GitWildMatchPattern.pattern_to_regex('# Cork soakers.')
+ self.assertIsNone(include)
+ self.assertIsNone(regex)
+
+ def test_02_ignore(self):
+ """
+ Tests an exclude pattern.
+
+ This should NOT match (according to git check-ignore (v2.4.1)):
+
+ temp/foo
+ """
+ regex, include = GitWildMatchPattern.pattern_to_regex('!temp')
+ self.assertIsNotNone(include)
+ self.assertFalse(include)
+ self.assertEqual(regex, '^(?:.+/)?temp$')
+
+ pattern = GitWildMatchPattern(re.compile(regex), include)
+ results = set(pattern.match(['temp/foo']))
+ self.assertEqual(results, set())
+
+ def test_03_child_double_asterisk(self):
+ """
+ Tests a directory name with a double-asterisk child
+ directory.
+
+ This should match:
+
+ spam/bar
+
+ This should **not** match (according to git check-ignore (v2.4.1)):
+
+ foo/spam/bar
+ """
+ regex, include = GitWildMatchPattern.pattern_to_regex('spam/**')
+ self.assertTrue(include)
+ self.assertEqual(regex, '^spam/.*$')
+
+ pattern = GitWildMatchPattern(re.compile(regex), include)
+ results = set(pattern.match([
+ 'spam/bar',
+ 'foo/spam/bar',
+ ]))
+ self.assertEqual(results, {'spam/bar'})
+
+ def test_03_inner_double_asterisk(self):
+ """
+ Tests a path with an inner double-asterisk directory.
+
+ This should match:
+
+ left/bar/right
+ left/foo/bar/right
+ left/bar/right/foo
+
+ This should **not** match (according to git check-ignore (v2.4.1)):
+
+ foo/left/bar/right
+ """
+ regex, include = GitWildMatchPattern.pattern_to_regex('left/**/right')
+ self.assertTrue(include)
+ self.assertEqual(regex, '^left(?:/.+)?/right(?:/.*)?$')
+
+ pattern = GitWildMatchPattern(re.compile(regex), include)
+ results = set(pattern.match([
+ 'left/bar/right',
+ 'left/foo/bar/right',
+ 'left/bar/right/foo',
+ 'foo/left/bar/right',
+ ]))
+ self.assertEqual(results, {
+ 'left/bar/right',
+ 'left/foo/bar/right',
+ 'left/bar/right/foo',
+ })
+
+ def test_03_only_double_asterisk(self):
+ """
+ Tests a double-asterisk pattern which matches everything.
+ """
+ regex, include = GitWildMatchPattern.pattern_to_regex('**')
+ self.assertTrue(include)
+ self.assertEqual(regex, '^.+$')
+
+ def test_03_parent_double_asterisk(self):
+ """
+ Tests a file name with a double-asterisk parent directory.
+
+ This should match:
+
+ foo/spam
+ foo/spam/bar
+ """
+ regex, include = GitWildMatchPattern.pattern_to_regex('**/spam')
+ self.assertTrue(include)
+ self.assertEqual(regex, '^(?:.+/)?spam(?:/.*)?$')
+
+ pattern = GitWildMatchPattern(re.compile(regex), include)
+ results = set(pattern.match([
+ 'foo/spam',
+ 'foo/spam/bar',
+ ]))
+ self.assertEqual(results, {
+ 'foo/spam',
+ 'foo/spam/bar',
+ })
+
+ def test_04_infix_wildcard(self):
+ """
+ Tests a pattern with an infix wildcard.
+
+ This should match:
+
+ foo--bar
+ foo-hello-bar
+ a/foo-hello-bar
+ foo-hello-bar/b
+ a/foo-hello-bar/b
+ """
+ regex, include = GitWildMatchPattern.pattern_to_regex('foo-*-bar')
+ self.assertTrue(include)
+ self.assertEqual(regex, '^(?:.+/)?foo\\-[^/]*\\-bar(?:/.*)?$')
+
+ pattern = GitWildMatchPattern(re.compile(regex), include)
+ results = set(pattern.match([
+ 'foo--bar',
+ 'foo-hello-bar',
+ 'a/foo-hello-bar',
+ 'foo-hello-bar/b',
+ 'a/foo-hello-bar/b',
+ ]))
+ self.assertEqual(results, {
+ 'foo--bar',
+ 'foo-hello-bar',
+ 'a/foo-hello-bar',
+ 'foo-hello-bar/b',
+ 'a/foo-hello-bar/b',
+ })
+
+ def test_04_postfix_wildcard(self):
+ """
+ Tests a pattern with a postfix wildcard.
+
+ This should match:
+
+ ~temp-
+ ~temp-foo
+ ~temp-foo/bar
+ foo/~temp-bar
+ foo/~temp-bar/baz
+ """
+ regex, include = GitWildMatchPattern.pattern_to_regex('~temp-*')
+ self.assertTrue(include)
+ self.assertEqual(regex, '^(?:.+/)?\\~temp\\-[^/]*(?:/.*)?$')
+
+ pattern = GitWildMatchPattern(re.compile(regex), include)
+ results = set(pattern.match([
+ '~temp-',
+ '~temp-foo',
+ '~temp-foo/bar',
+ 'foo/~temp-bar',
+ 'foo/~temp-bar/baz',
+ ]))
+ self.assertEqual(results, {
+ '~temp-',
+ '~temp-foo',
+ '~temp-foo/bar',
+ 'foo/~temp-bar',
+ 'foo/~temp-bar/baz',
+ })
+
+ def test_04_prefix_wildcard(self):
+ """
+ Tests a pattern with a prefix wildcard.
+
+ This should match:
+
+ bar.py
+ bar.py/
+ foo/bar.py
+ foo/bar.py/baz
+ """
+ regex, include = GitWildMatchPattern.pattern_to_regex('*.py')
+ self.assertTrue(include)
+ self.assertEqual(regex, '^(?:.+/)?[^/]*\\.py(?:/.*)?$')
+
+ pattern = GitWildMatchPattern(re.compile(regex), include)
+ results = set(pattern.match([
+ 'bar.py',
+ 'bar.py/',
+ 'foo/bar.py',
+ 'foo/bar.py/baz',
+ ]))
+ self.assertEqual(results, {
+ 'bar.py',
+ 'bar.py/',
+ 'foo/bar.py',
+ 'foo/bar.py/baz',
+ })
+
+ def test_05_directory(self):
+ """
+ Tests a directory pattern.
+
+ This should match:
+
+ dir/
+ foo/dir/
+ foo/dir/bar
+
+ This should **not** match:
+
+ dir
+ """
+ regex, include = GitWildMatchPattern.pattern_to_regex('dir/')
+ self.assertTrue(include)
+ self.assertEqual(regex, '^(?:.+/)?dir/.*$')
+
+ pattern = GitWildMatchPattern(re.compile(regex), include)
+ results = set(pattern.match([
+ 'dir/',
+ 'foo/dir/',
+ 'foo/dir/bar',
+ 'dir',
+ ]))
+ self.assertEqual(results, {
+ 'dir/',
+ 'foo/dir/',
+ 'foo/dir/bar',
+ })
+
+ def test_06_registered(self):
+ """
+ Tests that the pattern is registered.
+ """
+ self.assertIs(pathspec.util.lookup_pattern('gitwildmatch'), GitWildMatchPattern)
+
+ def test_06_access_deprecated(self):
+ """
+ Tests that the pattern is accessible from the root module using the
+ deprecated alias.
+ """
+ self.assertTrue(hasattr(pathspec, 'GitIgnorePattern'))
+ self.assertTrue(issubclass(pathspec.GitIgnorePattern, GitWildMatchPattern))
+
+ def test_06_registered_deprecated(self):
+ """
+ Tests that the pattern is registered under the deprecated alias.
+ """
+ self.assertIs(pathspec.util.lookup_pattern('gitignore'), pathspec.GitIgnorePattern)
+
+ def test_07_encode_bytes(self):
+ """
+ Test encoding bytes.
+ """
+ encoded = "".join(map(unichr, range(0,256))).encode(pathspec.patterns.gitwildmatch._BYTES_ENCODING)
+ expected = b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ self.assertEqual(encoded, expected)
+
+ def test_07_decode_bytes(self):
+ """
+ Test decoding bytes.
+ """
+ decoded = bytes(bytearray(range(0,256))).decode(pathspec.patterns.gitwildmatch._BYTES_ENCODING)
+ expected = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ self.assertEqual(decoded, expected)
+
+ def test_07_match_bytes_and_bytes(self):
+ """
+ Test byte string patterns matching byte string paths.
+ """
+ pattern = GitWildMatchPattern(b'*.py')
+ results = set(pattern.match([b'a.py']))
+ self.assertEqual(results, {b'a.py'})
+
+ def test_07_match_bytes_and_bytes_complete(self):
+ """
+ Test byte string patterns matching byte string paths.
+ """
+ encoded = bytes(bytearray(range(0,256)))
+ escaped = b"".join(b"\\" + encoded[i:i+1] for i in range(len(encoded)))
+ pattern = GitWildMatchPattern(escaped)
+ results = set(pattern.match([encoded]))
+ self.assertEqual(results, {encoded})
+
+ @unittest.skipIf(sys.version_info[0] >= 3, "Python 3 is strict")
+ def test_07_match_bytes_and_unicode(self):
+ """
+ Test byte string patterns matching byte string paths.
+ """
+ pattern = GitWildMatchPattern(b'*.py')
+ results = set(pattern.match(['a.py']))
+ self.assertEqual(results, {'a.py'})
+
+ @unittest.skipIf(sys.version_info[0] == 2, "Python 2 is lenient")
+ def test_07_match_bytes_and_unicode_fail(self):
+ """
+ Test byte string patterns matching byte string paths.
+ """
+ pattern = GitWildMatchPattern(b'*.py')
+ with self.assertRaises(TypeError):
+ for _ in pattern.match(['a.py']):
+ pass
+
+ @unittest.skipIf(sys.version_info[0] >= 3, "Python 3 is strict")
+ def test_07_match_unicode_and_bytes(self):
+ """
+ Test unicode patterns with byte paths.
+ """
+ pattern = GitWildMatchPattern('*.py')
+ results = set(pattern.match([b'a.py']))
+ self.assertEqual(results, {b'a.py'})
+
+ @unittest.skipIf(sys.version_info[0] == 2, "Python 2 is lenient")
+ def test_07_match_unicode_and_bytes_fail(self):
+ """
+ Test unicode patterns with byte paths.
+ """
+ pattern = GitWildMatchPattern('*.py')
+ with self.assertRaises(TypeError):
+ for _ in pattern.match([b'a.py']):
+ pass
+
+ def test_07_match_unicode_and_unicode(self):
+ """
+ Test unicode patterns with unicode paths.
+ """
+ pattern = GitWildMatchPattern('*.py')
+ results = set(pattern.match(['a.py']))
+ self.assertEqual(results, {'a.py'})
+
+ def test_08_escape(self):
+ """
+ Test escaping a string with meta-characters
+ """
+ fname = "file!with*weird#naming_[1].t?t"
+ escaped = r"file\!with\*weird\#naming_\[1\].t\?t"
+ result = GitWildMatchPattern.escape(fname)
+ self.assertEqual(result, escaped)
diff --git a/third_party/python/pathspec/pathspec/tests/test_pathspec.py b/third_party/python/pathspec/pathspec/tests/test_pathspec.py
new file mode 100644
index 0000000000..1f5bb8ba11
--- /dev/null
+++ b/third_party/python/pathspec/pathspec/tests/test_pathspec.py
@@ -0,0 +1,129 @@
+# encoding: utf-8
+"""
+This script tests ``PathSpec``.
+"""
+
+import unittest
+
+import pathspec
+
+
+class PathSpecTest(unittest.TestCase):
+ """
+ The ``PathSpecTest`` class tests the ``PathSpec`` class.
+ """
+
+ def test_01_current_dir_paths(self):
+ """
+ Tests that paths referencing the current directory will be properly
+ normalized and matched.
+ """
+ spec = pathspec.PathSpec.from_lines('gitwildmatch', [
+ '*.txt',
+ '!test1/',
+ ])
+ results = set(spec.match_files([
+ './src/test1/a.txt',
+ './src/test1/b.txt',
+ './src/test1/c/c.txt',
+ './src/test2/a.txt',
+ './src/test2/b.txt',
+ './src/test2/c/c.txt',
+ ]))
+ self.assertEqual(results, {
+ './src/test2/a.txt',
+ './src/test2/b.txt',
+ './src/test2/c/c.txt',
+ })
+
+ def test_01_match_files(self):
+ """
+ Tests that matching files one at a time yields the same results as
+ matching multiples files at once.
+ """
+ spec = pathspec.PathSpec.from_lines('gitwildmatch', [
+ '*.txt',
+ '!test1/',
+ ])
+ test_files = [
+ 'src/test1/a.txt',
+ 'src/test1/b.txt',
+ 'src/test1/c/c.txt',
+ 'src/test2/a.txt',
+ 'src/test2/b.txt',
+ 'src/test2/c/c.txt',
+ ]
+ single_results = set(filter(spec.match_file, test_files))
+ multi_results = set(spec.match_files(test_files))
+ self.assertEqual(single_results, multi_results)
+
+ def test_01_windows_current_dir_paths(self):
+ """
+ Tests that paths referencing the current directory will be properly
+ normalized and matched.
+ """
+ spec = pathspec.PathSpec.from_lines('gitwildmatch', [
+ '*.txt',
+ '!test1/',
+ ])
+ results = set(spec.match_files([
+ '.\\src\\test1\\a.txt',
+ '.\\src\\test1\\b.txt',
+ '.\\src\\test1\\c\\c.txt',
+ '.\\src\\test2\\a.txt',
+ '.\\src\\test2\\b.txt',
+ '.\\src\\test2\\c\\c.txt',
+ ], separators=('\\',)))
+ self.assertEqual(results, {
+ '.\\src\\test2\\a.txt',
+ '.\\src\\test2\\b.txt',
+ '.\\src\\test2\\c\\c.txt',
+ })
+
+ def test_01_windows_paths(self):
+ """
+ Tests that Windows paths will be properly normalized and matched.
+ """
+ spec = pathspec.PathSpec.from_lines('gitwildmatch', [
+ '*.txt',
+ '!test1/',
+ ])
+ results = set(spec.match_files([
+ 'src\\test1\\a.txt',
+ 'src\\test1\\b.txt',
+ 'src\\test1\\c\\c.txt',
+ 'src\\test2\\a.txt',
+ 'src\\test2\\b.txt',
+ 'src\\test2\\c\\c.txt',
+ ], separators=('\\',)))
+ self.assertEqual(results, {
+ 'src\\test2\\a.txt',
+ 'src\\test2\\b.txt',
+ 'src\\test2\\c\\c.txt',
+ })
+
+ def test_02_eq(self):
+ """
+ Tests equality.
+ """
+ first_spec = pathspec.PathSpec.from_lines('gitwildmatch', [
+ '*.txt',
+ '!test1/',
+ ])
+ second_spec = pathspec.PathSpec.from_lines('gitwildmatch', [
+ '*.txt',
+ '!test1/',
+ ])
+ self.assertEqual(first_spec, second_spec)
+
+ def test_02_ne(self):
+ """
+ Tests equality.
+ """
+ first_spec = pathspec.PathSpec.from_lines('gitwildmatch', [
+ '*.txt',
+ ])
+ second_spec = pathspec.PathSpec.from_lines('gitwildmatch', [
+ '!*.txt',
+ ])
+ self.assertNotEqual(first_spec, second_spec)
diff --git a/third_party/python/pathspec/pathspec/tests/test_util.py b/third_party/python/pathspec/pathspec/tests/test_util.py
new file mode 100644
index 0000000000..943bde259c
--- /dev/null
+++ b/third_party/python/pathspec/pathspec/tests/test_util.py
@@ -0,0 +1,380 @@
+# encoding: utf-8
+"""
+This script tests utility functions.
+"""
+
+import errno
+import os
+import os.path
+import shutil
+import sys
+import tempfile
+import unittest
+
+from pathspec.util import iter_tree_entries, iter_tree_files, RecursionError, normalize_file
+
+
+class IterTreeTest(unittest.TestCase):
+ """
+ The ``IterTreeTest`` class tests `pathspec.util.iter_tree_files()`.
+ """
+
+ def make_dirs(self, dirs):
+ """
+ Create the specified directories.
+ """
+ for dir in dirs:
+ os.mkdir(os.path.join(self.temp_dir, self.ospath(dir)))
+
+ def make_files(self, files):
+ """
+ Create the specified files.
+ """
+ for file in files:
+ self.mkfile(os.path.join(self.temp_dir, self.ospath(file)))
+
+ def make_links(self, links):
+ """
+ Create the specified links.
+ """
+ for link, node in links:
+ os.symlink(os.path.join(self.temp_dir, self.ospath(node)), os.path.join(self.temp_dir, self.ospath(link)))
+
+ @staticmethod
+ def mkfile(file):
+ """
+ Creates an empty file.
+ """
+ with open(file, 'wb'):
+ pass
+
+ @staticmethod
+ def ospath(path):
+ """
+ Convert the POSIX path to a native OS path.
+ """
+ return os.path.join(*path.split('/'))
+
+ def require_realpath(self):
+ """
+ Skips the test if `os.path.realpath` does not properly support
+ symlinks.
+ """
+ if self.broken_realpath:
+ raise unittest.SkipTest("`os.path.realpath` is broken.")
+
+ def require_symlink(self):
+ """
+ Skips the test if `os.symlink` is not supported.
+ """
+ if self.no_symlink:
+ raise unittest.SkipTest("`os.symlink` is not supported.")
+
+ def setUp(self):
+ """
+ Called before each test.
+ """
+ self.temp_dir = tempfile.mkdtemp()
+
+ def tearDown(self):
+ """
+ Called after each test.
+ """
+ shutil.rmtree(self.temp_dir)
+
+ def test_1_files(self):
+ """
+ Tests to make sure all files are found.
+ """
+ self.make_dirs([
+ 'Empty',
+ 'Dir',
+ 'Dir/Inner',
+ ])
+ self.make_files([
+ 'a',
+ 'b',
+ 'Dir/c',
+ 'Dir/d',
+ 'Dir/Inner/e',
+ 'Dir/Inner/f',
+ ])
+ results = set(iter_tree_files(self.temp_dir))
+ self.assertEqual(results, set(map(self.ospath, [
+ 'a',
+ 'b',
+ 'Dir/c',
+ 'Dir/d',
+ 'Dir/Inner/e',
+ 'Dir/Inner/f',
+ ])))
+
+ def test_2_0_check_symlink(self):
+ """
+ Tests whether links can be created.
+ """
+ # NOTE: Windows does not support `os.symlink` for Python 2. Windows
+ # Vista and greater supports `os.symlink` for Python 3.2+.
+ no_symlink = None
+ try:
+ file = os.path.join(self.temp_dir, 'file')
+ link = os.path.join(self.temp_dir, 'link')
+ self.mkfile(file)
+
+ try:
+ os.symlink(file, link)
+ except (AttributeError, NotImplementedError):
+ no_symlink = True
+ raise
+ no_symlink = False
+
+ finally:
+ self.__class__.no_symlink = no_symlink
+
+ def test_2_1_check_realpath(self):
+ """
+ Tests whether `os.path.realpath` works properly with symlinks.
+ """
+ # NOTE: Windows does not follow symlinks with `os.path.realpath`
+ # which is what we use to detect recursion. See <https://bugs.python.org/issue9949>
+ # for details.
+ broken_realpath = None
+ try:
+ self.require_symlink()
+ file = os.path.join(self.temp_dir, 'file')
+ link = os.path.join(self.temp_dir, 'link')
+ self.mkfile(file)
+ os.symlink(file, link)
+
+ try:
+ self.assertEqual(os.path.realpath(file), os.path.realpath(link))
+ except AssertionError:
+ broken_realpath = True
+ raise
+ broken_realpath = False
+
+ finally:
+ self.__class__.broken_realpath = broken_realpath
+
+ def test_2_2_links(self):
+ """
+ Tests to make sure links to directories and files work.
+ """
+ self.require_symlink()
+ self.make_dirs([
+ 'Dir',
+ ])
+ self.make_files([
+ 'a',
+ 'b',
+ 'Dir/c',
+ 'Dir/d',
+ ])
+ self.make_links([
+ ('ax', 'a'),
+ ('bx', 'b'),
+ ('Dir/cx', 'Dir/c'),
+ ('Dir/dx', 'Dir/d'),
+ ('DirX', 'Dir'),
+ ])
+ results = set(iter_tree_files(self.temp_dir))
+ self.assertEqual(results, set(map(self.ospath, [
+ 'a',
+ 'ax',
+ 'b',
+ 'bx',
+ 'Dir/c',
+ 'Dir/cx',
+ 'Dir/d',
+ 'Dir/dx',
+ 'DirX/c',
+ 'DirX/cx',
+ 'DirX/d',
+ 'DirX/dx',
+ ])))
+
+ def test_2_3_sideways_links(self):
+ """
+ Tests to make sure the same directory can be encountered multiple
+ times via links.
+ """
+ self.require_symlink()
+ self.make_dirs([
+ 'Dir',
+ 'Dir/Target',
+ ])
+ self.make_files([
+ 'Dir/Target/file',
+ ])
+ self.make_links([
+ ('Ax', 'Dir'),
+ ('Bx', 'Dir'),
+ ('Cx', 'Dir/Target'),
+ ('Dx', 'Dir/Target'),
+ ('Dir/Ex', 'Dir/Target'),
+ ('Dir/Fx', 'Dir/Target'),
+ ])
+ results = set(iter_tree_files(self.temp_dir))
+ self.assertEqual(results, set(map(self.ospath, [
+ 'Ax/Ex/file',
+ 'Ax/Fx/file',
+ 'Ax/Target/file',
+ 'Bx/Ex/file',
+ 'Bx/Fx/file',
+ 'Bx/Target/file',
+ 'Cx/file',
+ 'Dx/file',
+ 'Dir/Ex/file',
+ 'Dir/Fx/file',
+ 'Dir/Target/file',
+ ])))
+
+ def test_2_4_recursive_links(self):
+ """
+ Tests detection of recursive links.
+ """
+ self.require_symlink()
+ self.require_realpath()
+ self.make_dirs([
+ 'Dir',
+ ])
+ self.make_files([
+ 'Dir/file',
+ ])
+ self.make_links([
+ ('Dir/Self', 'Dir'),
+ ])
+ with self.assertRaises(RecursionError) as context:
+ set(iter_tree_files(self.temp_dir))
+ self.assertEqual(context.exception.first_path, 'Dir')
+ self.assertEqual(context.exception.second_path, self.ospath('Dir/Self'))
+
+ def test_2_5_recursive_circular_links(self):
+ """
+ Tests detection of recursion through circular links.
+ """
+ self.require_symlink()
+ self.require_realpath()
+ self.make_dirs([
+ 'A',
+ 'B',
+ 'C',
+ ])
+ self.make_files([
+ 'A/d',
+ 'B/e',
+ 'C/f',
+ ])
+ self.make_links([
+ ('A/Bx', 'B'),
+ ('B/Cx', 'C'),
+ ('C/Ax', 'A'),
+ ])
+ with self.assertRaises(RecursionError) as context:
+ set(iter_tree_files(self.temp_dir))
+ self.assertIn(context.exception.first_path, ('A', 'B', 'C'))
+ self.assertEqual(context.exception.second_path, {
+ 'A': self.ospath('A/Bx/Cx/Ax'),
+ 'B': self.ospath('B/Cx/Ax/Bx'),
+ 'C': self.ospath('C/Ax/Bx/Cx'),
+ }[context.exception.first_path])
+
+ def test_2_6_detect_broken_links(self):
+ """
+ Tests that broken links are detected.
+ """
+ def reraise(e):
+ raise e
+
+ self.require_symlink()
+ self.make_links([
+ ('A', 'DOES_NOT_EXIST'),
+ ])
+ with self.assertRaises(OSError) as context:
+ set(iter_tree_files(self.temp_dir, on_error=reraise))
+ self.assertEqual(context.exception.errno, errno.ENOENT)
+
+ def test_2_7_ignore_broken_links(self):
+ """
+ Tests that broken links are ignored.
+ """
+ self.require_symlink()
+ self.make_links([
+ ('A', 'DOES_NOT_EXIST'),
+ ])
+ results = set(iter_tree_files(self.temp_dir))
+ self.assertEqual(results, set())
+
+ def test_2_8_no_follow_links(self):
+ """
+ Tests to make sure directory links can be ignored.
+ """
+ self.require_symlink()
+ self.make_dirs([
+ 'Dir',
+ ])
+ self.make_files([
+ 'A',
+ 'B',
+ 'Dir/C',
+ 'Dir/D',
+ ])
+ self.make_links([
+ ('Ax', 'A'),
+ ('Bx', 'B'),
+ ('Dir/Cx', 'Dir/C'),
+ ('Dir/Dx', 'Dir/D'),
+ ('DirX', 'Dir'),
+ ])
+ results = set(iter_tree_files(self.temp_dir, follow_links=False))
+ self.assertEqual(results, set(map(self.ospath, [
+ 'A',
+ 'Ax',
+ 'B',
+ 'Bx',
+ 'Dir/C',
+ 'Dir/Cx',
+ 'Dir/D',
+ 'Dir/Dx',
+ 'DirX',
+ ])))
+
+ def test_3_entries(self):
+ """
+ Tests to make sure all files are found.
+ """
+ self.make_dirs([
+ 'Empty',
+ 'Dir',
+ 'Dir/Inner',
+ ])
+ self.make_files([
+ 'a',
+ 'b',
+ 'Dir/c',
+ 'Dir/d',
+ 'Dir/Inner/e',
+ 'Dir/Inner/f',
+ ])
+ results = {entry.path for entry in iter_tree_entries(self.temp_dir)}
+ self.assertEqual(results, set(map(self.ospath, [
+ 'a',
+ 'b',
+ 'Dir',
+ 'Dir/c',
+ 'Dir/d',
+ 'Dir/Inner',
+ 'Dir/Inner/e',
+ 'Dir/Inner/f',
+ 'Empty',
+ ])))
+
+ @unittest.skipIf(sys.version_info < (3, 4), "pathlib entered stdlib in Python 3.4")
+ def test_4_normalizing_pathlib_path(self):
+ """
+ Tests passing pathlib.Path as argument.
+ """
+ from pathlib import Path
+ first_spec = normalize_file(Path('a.txt'))
+ second_spec = normalize_file('a.txt')
+ self.assertEqual(first_spec, second_spec)
diff --git a/third_party/python/pathspec/pathspec/util.py b/third_party/python/pathspec/pathspec/util.py
new file mode 100644
index 0000000000..bcba8783b6
--- /dev/null
+++ b/third_party/python/pathspec/pathspec/util.py
@@ -0,0 +1,600 @@
+# encoding: utf-8
+"""
+This module provides utility methods for dealing with path-specs.
+"""
+
+import os
+import os.path
+import posixpath
+import stat
+
+from .compat import Collection, Iterable, string_types, unicode
+
+NORMALIZE_PATH_SEPS = [sep for sep in [os.sep, os.altsep] if sep and sep != posixpath.sep]
+"""
+*NORMALIZE_PATH_SEPS* (:class:`list` of :class:`str`) contains the path
+separators that need to be normalized to the POSIX separator for the
+current operating system. The separators are determined by examining
+:data:`os.sep` and :data:`os.altsep`.
+"""
+
+_registered_patterns = {}
+"""
+*_registered_patterns* (:class:`dict`) maps a name (:class:`str`) to the
+registered pattern factory (:class:`~collections.abc.Callable`).
+"""
+
+
+def detailed_match_files(patterns, files, all_matches=None):
+ """
+ Matches the files to the patterns, and returns which patterns matched
+ the files.
+
+ *patterns* (:class:`~collections.abc.Iterable` of :class:`~pathspec.pattern.Pattern`)
+ contains the patterns to use.
+
+ *files* (:class:`~collections.abc.Iterable` of :class:`str`) contains
+ the normalized file paths to be matched against *patterns*.
+
+ *all_matches* (:class:`boot` or :data:`None`) is whether to return all
+ matches patterns (:data:`True`), or only the last matched pattern
+ (:data:`False`). Default is :data:`None` for :data:`False`.
+
+ Returns the matched files (:class:`dict`) which maps each matched file
+ (:class:`str`) to the patterns that matched in order (:class:`.MatchDetail`).
+ """
+ all_files = files if isinstance(files, Collection) else list(files)
+ return_files = {}
+ for pattern in patterns:
+ if pattern.include is not None:
+ result_files = pattern.match(all_files)
+ if pattern.include:
+ # Add files and record pattern.
+ for result_file in result_files:
+ if result_file in return_files:
+ if all_matches:
+ return_files[result_file].patterns.append(pattern)
+ else:
+ return_files[result_file].patterns[0] = pattern
+ else:
+ return_files[result_file] = MatchDetail([pattern])
+
+ else:
+ # Remove files.
+ for file in result_files:
+ del return_files[file]
+
+ return return_files
+
+
+def _is_iterable(value):
+ """
+ Check whether the value is an iterable (excludes strings).
+
+ *value* is the value to check,
+
+ Returns whether *value* is a iterable (:class:`bool`).
+ """
+ return isinstance(value, Iterable) and not isinstance(value, (unicode, bytes))
+
+
+def iter_tree_entries(root, on_error=None, follow_links=None):
+ """
+ Walks the specified directory for all files and directories.
+
+ *root* (:class:`str`) is the root directory to search.
+
+ *on_error* (:class:`~collections.abc.Callable` or :data:`None`)
+ optionally is the error handler for file-system exceptions. It will be
+ called with the exception (:exc:`OSError`). Reraise the exception to
+ abort the walk. Default is :data:`None` to ignore file-system
+ exceptions.
+
+ *follow_links* (:class:`bool` or :data:`None`) optionally is whether
+ to walk symbolic links that resolve to directories. Default is
+ :data:`None` for :data:`True`.
+
+ Raises :exc:`RecursionError` if recursion is detected.
+
+ Returns an :class:`~collections.abc.Iterable` yielding each file or
+ directory entry (:class:`.TreeEntry`) relative to *root*.
+ """
+ if on_error is not None and not callable(on_error):
+ raise TypeError("on_error:{!r} is not callable.".format(on_error))
+
+ if follow_links is None:
+ follow_links = True
+
+ for entry in _iter_tree_entries_next(os.path.abspath(root), '', {}, on_error, follow_links):
+ yield entry
+
+
+def iter_tree_files(root, on_error=None, follow_links=None):
+ """
+ Walks the specified directory for all files.
+
+ *root* (:class:`str`) is the root directory to search for files.
+
+ *on_error* (:class:`~collections.abc.Callable` or :data:`None`)
+ optionally is the error handler for file-system exceptions. It will be
+ called with the exception (:exc:`OSError`). Reraise the exception to
+ abort the walk. Default is :data:`None` to ignore file-system
+ exceptions.
+
+ *follow_links* (:class:`bool` or :data:`None`) optionally is whether
+ to walk symbolic links that resolve to directories. Default is
+ :data:`None` for :data:`True`.
+
+ Raises :exc:`RecursionError` if recursion is detected.
+
+ Returns an :class:`~collections.abc.Iterable` yielding the path to
+ each file (:class:`str`) relative to *root*.
+ """
+ if on_error is not None and not callable(on_error):
+ raise TypeError("on_error:{!r} is not callable.".format(on_error))
+
+ if follow_links is None:
+ follow_links = True
+
+ for entry in _iter_tree_entries_next(os.path.abspath(root), '', {}, on_error, follow_links):
+ if not entry.is_dir(follow_links):
+ yield entry.path
+
+
+# Alias `iter_tree_files()` as `iter_tree()`.
+iter_tree = iter_tree_files
+
+
+def _iter_tree_entries_next(root_full, dir_rel, memo, on_error, follow_links):
+ """
+ Scan the directory for all descendant files.
+
+ *root_full* (:class:`str`) the absolute path to the root directory.
+
+ *dir_rel* (:class:`str`) the path to the directory to scan relative to
+ *root_full*.
+
+ *memo* (:class:`dict`) keeps track of ancestor directories
+ encountered. Maps each ancestor real path (:class:`str`) to relative
+ path (:class:`str`).
+
+ *on_error* (:class:`~collections.abc.Callable` or :data:`None`)
+ optionally is the error handler for file-system exceptions.
+
+ *follow_links* (:class:`bool`) is whether to walk symbolic links that
+ resolve to directories.
+
+ Yields each entry (:class:`.TreeEntry`).
+ """
+ dir_full = os.path.join(root_full, dir_rel)
+ dir_real = os.path.realpath(dir_full)
+
+ # Remember each encountered ancestor directory and its canonical
+ # (real) path. If a canonical path is encountered more than once,
+ # recursion has occurred.
+ if dir_real not in memo:
+ memo[dir_real] = dir_rel
+ else:
+ raise RecursionError(real_path=dir_real, first_path=memo[dir_real], second_path=dir_rel)
+
+ for node_name in os.listdir(dir_full):
+ node_rel = os.path.join(dir_rel, node_name)
+ node_full = os.path.join(root_full, node_rel)
+
+ # Inspect child node.
+ try:
+ node_lstat = os.lstat(node_full)
+ except OSError as e:
+ if on_error is not None:
+ on_error(e)
+ continue
+
+ if stat.S_ISLNK(node_lstat.st_mode):
+ # Child node is a link, inspect the target node.
+ is_link = True
+ try:
+ node_stat = os.stat(node_full)
+ except OSError as e:
+ if on_error is not None:
+ on_error(e)
+ continue
+ else:
+ is_link = False
+ node_stat = node_lstat
+
+ if stat.S_ISDIR(node_stat.st_mode) and (follow_links or not is_link):
+ # Child node is a directory, recurse into it and yield its
+ # descendant files.
+ yield TreeEntry(node_name, node_rel, node_lstat, node_stat)
+
+ for entry in _iter_tree_entries_next(root_full, node_rel, memo, on_error, follow_links):
+ yield entry
+
+ elif stat.S_ISREG(node_stat.st_mode) or is_link:
+ # Child node is either a file or an unfollowed link, yield it.
+ yield TreeEntry(node_name, node_rel, node_lstat, node_stat)
+
+ # NOTE: Make sure to remove the canonical (real) path of the directory
+ # from the ancestors memo once we are done with it. This allows the
+ # same directory to appear multiple times. If this is not done, the
+ # second occurrence of the directory will be incorrectly interpreted
+ # as a recursion. See <https://github.com/cpburnz/python-path-specification/pull/7>.
+ del memo[dir_real]
+
+
+def lookup_pattern(name):
+ """
+ Lookups a registered pattern factory by name.
+
+ *name* (:class:`str`) is the name of the pattern factory.
+
+ Returns the registered pattern factory (:class:`~collections.abc.Callable`).
+ If no pattern factory is registered, raises :exc:`KeyError`.
+ """
+ return _registered_patterns[name]
+
+
+def match_file(patterns, file):
+ """
+ Matches the file to the patterns.
+
+ *patterns* (:class:`~collections.abc.Iterable` of :class:`~pathspec.pattern.Pattern`)
+ contains the patterns to use.
+
+ *file* (:class:`str`) is the normalized file path to be matched
+ against *patterns*.
+
+ Returns :data:`True` if *file* matched; otherwise, :data:`False`.
+ """
+ matched = False
+ for pattern in patterns:
+ if pattern.include is not None:
+ if file in pattern.match((file,)):
+ matched = pattern.include
+ return matched
+
+
+def match_files(patterns, files):
+ """
+ Matches the files to the patterns.
+
+ *patterns* (:class:`~collections.abc.Iterable` of :class:`~pathspec.pattern.Pattern`)
+ contains the patterns to use.
+
+ *files* (:class:`~collections.abc.Iterable` of :class:`str`) contains
+ the normalized file paths to be matched against *patterns*.
+
+ Returns the matched files (:class:`set` of :class:`str`).
+ """
+ all_files = files if isinstance(files, Collection) else list(files)
+ return_files = set()
+ for pattern in patterns:
+ if pattern.include is not None:
+ result_files = pattern.match(all_files)
+ if pattern.include:
+ return_files.update(result_files)
+ else:
+ return_files.difference_update(result_files)
+ return return_files
+
+
+def _normalize_entries(entries, separators=None):
+ """
+ Normalizes the entry paths to use the POSIX path separator.
+
+ *entries* (:class:`~collections.abc.Iterable` of :class:`.TreeEntry`)
+ contains the entries to be normalized.
+
+ *separators* (:class:`~collections.abc.Collection` of :class:`str`; or
+ :data:`None`) optionally contains the path separators to normalize.
+ See :func:`normalize_file` for more information.
+
+ Returns a :class:`dict` mapping the each normalized file path (:class:`str`)
+ to the entry (:class:`.TreeEntry`)
+ """
+ norm_files = {}
+ for entry in entries:
+ norm_files[normalize_file(entry.path, separators=separators)] = entry
+ return norm_files
+
+
+def normalize_file(file, separators=None):
+ """
+ Normalizes the file path to use the POSIX path separator (i.e., ``'/'``).
+
+ *file* (:class:`str` or :class:`pathlib.PurePath`) is the file path.
+
+ *separators* (:class:`~collections.abc.Collection` of :class:`str`; or
+ :data:`None`) optionally contains the path separators to normalize.
+ This does not need to include the POSIX path separator (``'/'``), but
+ including it will not affect the results. Default is :data:`None` for
+ :data:`NORMALIZE_PATH_SEPS`. To prevent normalization, pass an empty
+ container (e.g., an empty tuple ``()``).
+
+ Returns the normalized file path (:class:`str`).
+ """
+ # Normalize path separators.
+ if separators is None:
+ separators = NORMALIZE_PATH_SEPS
+
+ # Convert path object to string.
+ norm_file = str(file)
+
+ for sep in separators:
+ norm_file = norm_file.replace(sep, posixpath.sep)
+
+ # Remove current directory prefix.
+ if norm_file.startswith('./'):
+ norm_file = norm_file[2:]
+
+ return norm_file
+
+
+def normalize_files(files, separators=None):
+ """
+ Normalizes the file paths to use the POSIX path separator.
+
+ *files* (:class:`~collections.abc.Iterable` of :class:`str` or
+ :class:`pathlib.PurePath`) contains the file paths to be normalized.
+
+ *separators* (:class:`~collections.abc.Collection` of :class:`str`; or
+ :data:`None`) optionally contains the path separators to normalize.
+ See :func:`normalize_file` for more information.
+
+ Returns a :class:`dict` mapping the each normalized file path (:class:`str`)
+ to the original file path (:class:`str`)
+ """
+ norm_files = {}
+ for path in files:
+ norm_files[normalize_file(path, separators=separators)] = path
+ return norm_files
+
+
+def register_pattern(name, pattern_factory, override=None):
+ """
+ Registers the specified pattern factory.
+
+ *name* (:class:`str`) is the name to register the pattern factory
+ under.
+
+ *pattern_factory* (:class:`~collections.abc.Callable`) is used to
+ compile patterns. It must accept an uncompiled pattern (:class:`str`)
+ and return the compiled pattern (:class:`.Pattern`).
+
+ *override* (:class:`bool` or :data:`None`) optionally is whether to
+ allow overriding an already registered pattern under the same name
+ (:data:`True`), instead of raising an :exc:`AlreadyRegisteredError`
+ (:data:`False`). Default is :data:`None` for :data:`False`.
+ """
+ if not isinstance(name, string_types):
+ raise TypeError("name:{!r} is not a string.".format(name))
+ if not callable(pattern_factory):
+ raise TypeError("pattern_factory:{!r} is not callable.".format(pattern_factory))
+ if name in _registered_patterns and not override:
+ raise AlreadyRegisteredError(name, _registered_patterns[name])
+ _registered_patterns[name] = pattern_factory
+
+
+class AlreadyRegisteredError(Exception):
+ """
+ The :exc:`AlreadyRegisteredError` exception is raised when a pattern
+ factory is registered under a name already in use.
+ """
+
+ def __init__(self, name, pattern_factory):
+ """
+ Initializes the :exc:`AlreadyRegisteredError` instance.
+
+ *name* (:class:`str`) is the name of the registered pattern.
+
+ *pattern_factory* (:class:`~collections.abc.Callable`) is the
+ registered pattern factory.
+ """
+ super(AlreadyRegisteredError, self).__init__(name, pattern_factory)
+
+ @property
+ def message(self):
+ """
+ *message* (:class:`str`) is the error message.
+ """
+ return "{name!r} is already registered for pattern factory:{pattern_factory!r}.".format(
+ name=self.name,
+ pattern_factory=self.pattern_factory,
+ )
+
+ @property
+ def name(self):
+ """
+ *name* (:class:`str`) is the name of the registered pattern.
+ """
+ return self.args[0]
+
+ @property
+ def pattern_factory(self):
+ """
+ *pattern_factory* (:class:`~collections.abc.Callable`) is the
+ registered pattern factory.
+ """
+ return self.args[1]
+
+
+class RecursionError(Exception):
+ """
+ The :exc:`RecursionError` exception is raised when recursion is
+ detected.
+ """
+
+ def __init__(self, real_path, first_path, second_path):
+ """
+ Initializes the :exc:`RecursionError` instance.
+
+ *real_path* (:class:`str`) is the real path that recursion was
+ encountered on.
+
+ *first_path* (:class:`str`) is the first path encountered for
+ *real_path*.
+
+ *second_path* (:class:`str`) is the second path encountered for
+ *real_path*.
+ """
+ super(RecursionError, self).__init__(real_path, first_path, second_path)
+
+ @property
+ def first_path(self):
+ """
+ *first_path* (:class:`str`) is the first path encountered for
+ :attr:`self.real_path <RecursionError.real_path>`.
+ """
+ return self.args[1]
+
+ @property
+ def message(self):
+ """
+ *message* (:class:`str`) is the error message.
+ """
+ return "Real path {real!r} was encountered at {first!r} and then {second!r}.".format(
+ real=self.real_path,
+ first=self.first_path,
+ second=self.second_path,
+ )
+
+ @property
+ def real_path(self):
+ """
+ *real_path* (:class:`str`) is the real path that recursion was
+ encountered on.
+ """
+ return self.args[0]
+
+ @property
+ def second_path(self):
+ """
+ *second_path* (:class:`str`) is the second path encountered for
+ :attr:`self.real_path <RecursionError.real_path>`.
+ """
+ return self.args[2]
+
+
+class MatchDetail(object):
+ """
+ The :class:`.MatchDetail` class contains information about
+ """
+
+ #: Make the class dict-less.
+ __slots__ = ('patterns',)
+
+ def __init__(self, patterns):
+ """
+ Initialize the :class:`.MatchDetail` instance.
+
+ *patterns* (:class:`~collections.abc.Sequence` of :class:`~pathspec.pattern.Pattern`)
+ contains the patterns that matched the file in the order they were
+ encountered.
+ """
+
+ self.patterns = patterns
+ """
+ *patterns* (:class:`~collections.abc.Sequence` of :class:`~pathspec.pattern.Pattern`)
+ contains the patterns that matched the file in the order they were
+ encountered.
+ """
+
+
+class TreeEntry(object):
+ """
+ The :class:`.TreeEntry` class contains information about a file-system
+ entry.
+ """
+
+ #: Make the class dict-less.
+ __slots__ = ('_lstat', 'name', 'path', '_stat')
+
+ def __init__(self, name, path, lstat, stat):
+ """
+ Initialize the :class:`.TreeEntry` instance.
+
+ *name* (:class:`str`) is the base name of the entry.
+
+ *path* (:class:`str`) is the relative path of the entry.
+
+ *lstat* (:class:`~os.stat_result`) is the stat result of the direct
+ entry.
+
+ *stat* (:class:`~os.stat_result`) is the stat result of the entry,
+ potentially linked.
+ """
+
+ self._lstat = lstat
+ """
+ *_lstat* (:class:`~os.stat_result`) is the stat result of the direct
+ entry.
+ """
+
+ self.name = name
+ """
+ *name* (:class:`str`) is the base name of the entry.
+ """
+
+ self.path = path
+ """
+ *path* (:class:`str`) is the path of the entry.
+ """
+
+ self._stat = stat
+ """
+ *_stat* (:class:`~os.stat_result`) is the stat result of the linked
+ entry.
+ """
+
+ def is_dir(self, follow_links=None):
+ """
+ Get whether the entry is a directory.
+
+ *follow_links* (:class:`bool` or :data:`None`) is whether to follow
+ symbolic links. If this is :data:`True`, a symlink to a directory
+ will result in :data:`True`. Default is :data:`None` for :data:`True`.
+
+ Returns whether the entry is a directory (:class:`bool`).
+ """
+ if follow_links is None:
+ follow_links = True
+
+ node_stat = self._stat if follow_links else self._lstat
+ return stat.S_ISDIR(node_stat.st_mode)
+
+ def is_file(self, follow_links=None):
+ """
+ Get whether the entry is a regular file.
+
+ *follow_links* (:class:`bool` or :data:`None`) is whether to follow
+ symbolic links. If this is :data:`True`, a symlink to a regular file
+ will result in :data:`True`. Default is :data:`None` for :data:`True`.
+
+ Returns whether the entry is a regular file (:class:`bool`).
+ """
+ if follow_links is None:
+ follow_links = True
+
+ node_stat = self._stat if follow_links else self._lstat
+ return stat.S_ISREG(node_stat.st_mode)
+
+ def is_symlink(self):
+ """
+ Returns whether the entry is a symbolic link (:class:`bool`).
+ """
+ return stat.S_ISLNK(self._lstat.st_mode)
+
+ def stat(self, follow_links=None):
+ """
+ Get the cached stat result for the entry.
+
+ *follow_links* (:class:`bool` or :data:`None`) is whether to follow
+ symbolic links. If this is :data:`True`, the stat result of the
+ linked file will be returned. Default is :data:`None` for :data:`True`.
+
+ Returns that stat result (:class:`~os.stat_result`).
+ """
+ if follow_links is None:
+ follow_links = True
+
+ return self._stat if follow_links else self._lstat
diff --git a/third_party/python/pathspec/setup.cfg b/third_party/python/pathspec/setup.cfg
new file mode 100644
index 0000000000..adf5ed72aa
--- /dev/null
+++ b/third_party/python/pathspec/setup.cfg
@@ -0,0 +1,7 @@
+[bdist_wheel]
+universal = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/pathspec/setup.py b/third_party/python/pathspec/setup.py
new file mode 100644
index 0000000000..130d416166
--- /dev/null
+++ b/third_party/python/pathspec/setup.py
@@ -0,0 +1,44 @@
+# encoding: utf-8
+
+import io
+from setuptools import setup, find_packages
+
+from pathspec import __author__, __email__, __license__, __project__, __version__
+
+# Read readme and changes files.
+with io.open("README.rst", mode='r', encoding='UTF-8') as fh:
+ readme = fh.read().strip()
+with io.open("CHANGES.rst", mode='r', encoding='UTF-8') as fh:
+ changes = fh.read().strip()
+
+setup(
+ name=__project__,
+ version=__version__,
+ author=__author__,
+ author_email=__email__,
+ url="https://github.com/cpburnz/python-path-specification",
+ description="Utility library for gitignore style pattern matching of file paths.",
+ long_description=readme + "\n\n" + changes,
+ python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
+ classifiers=[
+ "Development Status :: 4 - Beta",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Programming Language :: Python :: Implementation :: PyPy",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+ "Topic :: Utilities",
+ ],
+ license=__license__,
+ packages=find_packages(),
+ test_suite='pathspec.tests',
+)
diff --git a/third_party/python/pep487/PKG-INFO b/third_party/python/pep487/PKG-INFO
new file mode 100644
index 0000000000..c44d702629
--- /dev/null
+++ b/third_party/python/pep487/PKG-INFO
@@ -0,0 +1,87 @@
+Metadata-Version: 2.1
+Name: pep487
+Version: 1.0.1
+Summary: PEP487 - Simpler customisation of class creation
+Home-page: https://github.com/zaehlwerk/pep487
+Author: Gregor Giesen
+Author-email: giesen@zaehlwerk.net
+License: GPLv3
+Description: ===============================================
+ PEP487: Simpler customisation of class creation
+ ===============================================
+
+ This is a backport of PEP487's simpler customisation of class
+ creation by Martin Teichmann <https://www.python.org/dev/peps/pep-0487/>
+ for Python versions before 3.6.
+
+ PEP487 is free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published
+ by the Free Software Foundation, either version 3 of the License,
+ or (at your option) any later version.
+
+
+ Subclass init
+ =============
+
+ >>> from pep487 import PEP487Object
+ >>> class FooBase(PEP487Object):
+ ... foos = set()
+ ...
+ ... def __init_subclass__(cls, **kwargs):
+ ... cls.foos.add(cls.__name__)
+
+ Using `PEP487Object` as base class all subclasses of FooBase
+ will add their name to the common class variable 'foos'.
+
+ >>> class Foo1(FooBase):
+ ... pass
+ >>> class Foo2(FooBase):
+ ... pass
+
+ Hence:
+
+ >>> FooBase.foos
+ {'Foo1', 'Foo2'}
+
+
+
+ Property names and owner
+ ========================
+
+ If a class object has a method `__set_name__` upon declaration
+ of an PEP487Object class, it will be called:
+
+ >>> class NamedProperty:
+ ... def __set_name__(self, owner, name):
+ ... self.context = owner
+ ... self.name = name
+
+ >>> class Bar(PEP487Object):
+ ... foo = NamedProperty()
+ ... bar = NamedProperty()
+
+ Consequently:
+
+ >>> Bar.foo.name is 'foo' and Bar.foo.context is Bar
+ True
+ >>> Bar.bar.name is 'bar' and Bar.bar.context is Bar
+ True
+
+
+ Abstract base classes
+ =====================
+
+ Since `PEP487Object` has a custom metaclass, it is incompatible
+ to `abc.ABC`. Therefore `pep487` contains patched versions of `ABC`
+ and `ABCMeta`.
+
+Keywords: pep487
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Provides-Extra: test
diff --git a/third_party/python/pep487/README.rst b/third_party/python/pep487/README.rst
new file mode 100644
index 0000000000..4190c77f79
--- /dev/null
+++ b/third_party/python/pep487/README.rst
@@ -0,0 +1,68 @@
+===============================================
+PEP487: Simpler customisation of class creation
+===============================================
+
+This is a backport of PEP487's simpler customisation of class
+creation by Martin Teichmann <https://www.python.org/dev/peps/pep-0487/>
+for Python versions before 3.6.
+
+PEP487 is free software: you can redistribute it and/or modify it
+under the terms of the GNU General Public License as published
+by the Free Software Foundation, either version 3 of the License,
+or (at your option) any later version.
+
+
+Subclass init
+=============
+
+>>> from pep487 import PEP487Object
+>>> class FooBase(PEP487Object):
+... foos = set()
+...
+... def __init_subclass__(cls, **kwargs):
+... cls.foos.add(cls.__name__)
+
+Using `PEP487Object` as base class all subclasses of FooBase
+will add their name to the common class variable 'foos'.
+
+>>> class Foo1(FooBase):
+... pass
+>>> class Foo2(FooBase):
+... pass
+
+Hence:
+
+>>> FooBase.foos
+{'Foo1', 'Foo2'}
+
+
+
+Property names and owner
+========================
+
+If a class object has a method `__set_name__` upon declaration
+of an PEP487Object class, it will be called:
+
+>>> class NamedProperty:
+... def __set_name__(self, owner, name):
+... self.context = owner
+... self.name = name
+
+>>> class Bar(PEP487Object):
+... foo = NamedProperty()
+... bar = NamedProperty()
+
+Consequently:
+
+>>> Bar.foo.name is 'foo' and Bar.foo.context is Bar
+True
+>>> Bar.bar.name is 'bar' and Bar.bar.context is Bar
+True
+
+
+Abstract base classes
+=====================
+
+Since `PEP487Object` has a custom metaclass, it is incompatible
+to `abc.ABC`. Therefore `pep487` contains patched versions of `ABC`
+and `ABCMeta`.
diff --git a/third_party/python/pep487/lib/pep487/__init__.py b/third_party/python/pep487/lib/pep487/__init__.py
new file mode 100644
index 0000000000..1bfcec0036
--- /dev/null
+++ b/third_party/python/pep487/lib/pep487/__init__.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2017 by Gregor Giesen
+#
+# This is a backport of PEP487's simpler customisation of class
+# creation by Martin Teichmann <https://www.python.org/dev/peps/pep-0487/>
+# for Python versions before 3.6.
+#
+# PEP487 is free software: you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License,
+# or (at your option) any later version.
+#
+# PEP487 is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with PEP487. If not, see <http://www.gnu.org/licenses/>.
+#
+"""pep487.py: Simpler customisation of class creation"""
+
+import abc
+import sys
+import types
+
+__all__ = ('PEP487Meta', 'PEP487Object', 'ABCMeta', 'ABC')
+
+HAS_PY36 = sys.version_info >= (3, 6)
+HAS_PEP487 = HAS_PY36
+
+if HAS_PEP487:
+ PEP487Meta = type # pragma: no cover
+ ABCMeta = abc.ABCMeta # pragma: no cover
+ ABC = abc.ABC # pragma: no cover
+ PEP487Base = object # pragma: no cover
+ PEP487Object = object # pragma: no cover
+else:
+ class PEP487Meta(type):
+ def __new__(mcls, name, bases, ns, **kwargs):
+ init = ns.get('__init_subclass__')
+ if isinstance(init, types.FunctionType):
+ ns['__init_subclass__'] = classmethod(init)
+ cls = super().__new__(mcls, name, bases, ns)
+ for key, value in cls.__dict__.items():
+ func = getattr(value, '__set_name__', None)
+ if func is not None:
+ func(cls, key)
+ super(cls, cls).__init_subclass__(**kwargs)
+ return cls
+
+ def __init__(cls, name, bases, ns, **kwargs):
+ super().__init__(name, bases, ns)
+
+ class ABCMeta(abc.ABCMeta):
+ def __new__(mcls, name, bases, ns, **kwargs):
+ init = ns.get('__init_subclass__')
+ if isinstance(init, types.FunctionType):
+ ns['__init_subclass__'] = classmethod(init)
+ cls = super().__new__(mcls, name, bases, ns)
+ for key, value in cls.__dict__.items():
+ func = getattr(value, '__set_name__', None)
+ if func is not None:
+ func(cls, key)
+ super(cls, cls).__init_subclass__(**kwargs)
+ return cls
+
+ def __init__(cls, name, bases, ns, **kwargs):
+ super().__init__(name, bases, ns)
+
+ class PEP487Base:
+ @classmethod
+ def __init_subclass__(cls, **kwargs):
+ pass
+
+ class PEP487Object(PEP487Base, metaclass=PEP487Meta):
+ pass
+
+ class ABC(PEP487Base, metaclass=ABCMeta):
+ pass
diff --git a/third_party/python/pep487/lib/pep487/version.py b/third_party/python/pep487/lib/pep487/version.py
new file mode 100644
index 0000000000..02a2605ea5
--- /dev/null
+++ b/third_party/python/pep487/lib/pep487/version.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2017 by Gregor Giesen
+#
+# This file is part of PEP487.
+#
+# PEP487 is free software: you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License,
+# or (at your option) any later version.
+#
+# PEP487 is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with PEP487. If not, see <http://www.gnu.org/licenses/>.
+#
+"""pep487/version.py: version info"""
+
+__author__ = "Gregor Giesen"
+__copyright__ = "Copyright 2017, Gregor Giesen"
+__credits__ = ["Martin Teichmann", "Gregor Giesen"]
+__license__ = "GPLv3"
+__version__ = "1.0.1"
+__maintainer__ = "Gregor Giesen"
diff --git a/third_party/python/pep487/setup.cfg b/third_party/python/pep487/setup.cfg
new file mode 100644
index 0000000000..f80f35b7f5
--- /dev/null
+++ b/third_party/python/pep487/setup.cfg
@@ -0,0 +1,12 @@
+[bdist_wheel]
+universal = 1
+
+[aliases]
+test = pytest
+
+[tool:pytest]
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/pep487/setup.py b/third_party/python/pep487/setup.py
new file mode 100644
index 0000000000..a3d833202b
--- /dev/null
+++ b/third_party/python/pep487/setup.py
@@ -0,0 +1,121 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2017 by Gregor Giesen
+#
+# This file is part of PEP487.
+#
+# PEP487 is free software: you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License,
+# or (at your option) any later version.
+#
+# PEP487 is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with PEP487. If not, see <http://www.gnu.org/licenses/>.
+#
+
+# Always prefer setuptools over distutils
+from setuptools import setup, find_packages
+# To use a consistent encoding
+from codecs import open
+import re
+import os.path
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+version_file = os.path.join(here, 'lib', 'pep487', 'version.py')
+with open(version_file, 'rt') as fp:
+ re_version = re.compile(
+ r"""^__version__[ ]*=[ ]*["']{1,3}(.+)["']{1,3}$""")
+ for line in fp:
+ r = re_version.match(line)
+ if r is not None:
+ version = r.group(1)
+ break
+ else:
+ raise RuntimeError("Cannot find version string in %s" % version_file)
+
+# Get the long description from the README file
+with open(os.path.join(here, 'README.rst'), encoding='utf-8') as f:
+ long_description = f.read()
+
+setup(
+ name='pep487',
+
+ # Versions should comply with PEP440. For a discussion on single-sourcing
+ # the version across setup.py and the project code, see
+ # https://packaging.python.org/en/latest/single_source_version.html
+ version=version,
+
+ description='PEP487 - Simpler customisation of class creation',
+ long_description=long_description,
+
+ # The project's main homepage.
+ url='https://github.com/zaehlwerk/pep487',
+
+ # Author details
+ author='Gregor Giesen',
+ author_email='giesen@zaehlwerk.net',
+
+ # Choose your license
+ license="GPLv3",
+
+ # See https://pypi.python.org/pypi?%3Aaction=list_classifiers
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ ],
+
+ # What does your project relate to?
+ keywords='pep487',
+
+ # You can just specify the packages manually here if your project is
+ # simple. Or you can use find_packages().
+ package_dir={'': 'lib'},
+ packages=find_packages('lib', exclude=['tests']),
+
+ # List run-time dependencies here. These will be installed by pip when
+ # your project is installed. For an analysis of "install_requires" vs pip's
+ # requirements files see:
+ # https://packaging.python.org/en/latest/requirements.html
+ install_requires=[],
+
+ # List additional groups of dependencies here (e.g. development
+ # dependencies). You can install these using the following syntax,
+ # for example:
+ # $ pip install -e .[dev,test]
+ extras_require={
+ 'test': ['pytest',
+ 'pytest-cov',
+ 'pytest-flakes',
+ 'pytest-mock',
+ 'pytest-pep8',
+ 'pytest-runner'],
+ },
+
+ # If there are data files included in your packages that need to be
+ # installed, specify them here. If using Python 2.6 or less, then these
+ # have to be included in MANIFEST.in as well.
+ package_data={},
+
+ setup_requires=['pytest-runner'],
+ tests_require=['pytest',
+ 'pytest-cov',
+ 'pytest-flakes',
+ 'pytest-pep8',
+ 'pytest-mock'],
+
+ # To provide executable scripts, use entry points in preference to the
+ # "scripts" keyword. Entry points provide cross-platform support and allow
+ # pip to create the appropriate form of executable for the target platform.
+ entry_points={},
+)
diff --git a/third_party/python/pip-tools/.appveyor.yml b/third_party/python/pip-tools/.appveyor.yml
new file mode 100644
index 0000000000..5df681cda4
--- /dev/null
+++ b/third_party/python/pip-tools/.appveyor.yml
@@ -0,0 +1,52 @@
+environment:
+ global:
+ PYTHON: "C:\\Python36"
+
+ matrix:
+ - TOXENV: py27-pip20.0-coverage
+ PIP: 20.0
+ - TOXENV: py27-piplatest-coverage
+ PIP: latest
+
+ - TOXENV: py35-pip20.0
+ PIP: 20.0
+ - TOXENV: py35-piplatest
+ PIP: latest
+
+ - TOXENV: py36-pip20.0
+ PIP: 20.0
+ - TOXENV: py36-piplatest
+ PIP: latest
+
+ - TOXENV: py37-pip20.0
+ PIP: 20.0
+ - TOXENV: py37-piplatest
+ PIP: latest
+
+ - TOXENV: py38-pip20.0-coverage
+ PIP: 20.0
+ - TOXENV: py38-piplatest-coverage
+ PIP: latest
+
+matrix:
+ fast_finish: true
+ allow_failures:
+ - PIP: master
+
+install:
+ - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
+ - python -m pip install -U tox virtualenv
+
+build: false
+
+test_script:
+ - tox
+
+after_test:
+ # Add tox environment to PATH.
+ - "SET PATH=%CD%\\.tox\\%TOXENV%\\scripts;%PATH%"
+ - IF NOT "x%TOXENV:-coverage=%"=="x%TOXENV%" (
+ pip install codecov &&
+ coverage xml &&
+ appveyor-retry codecov --required -X gcov pycov search -f coverage.xml -n %TOXENV%-windows
+ )
diff --git a/third_party/python/pip-tools/.bandit b/third_party/python/pip-tools/.bandit
new file mode 100644
index 0000000000..b9d346ecac
--- /dev/null
+++ b/third_party/python/pip-tools/.bandit
@@ -0,0 +1,2 @@
+[bandit]
+exclude: tests,.tox,.eggs,.venv,.git
diff --git a/third_party/python/pip-tools/.coveragerc b/third_party/python/pip-tools/.coveragerc
new file mode 100644
index 0000000000..26c6762a74
--- /dev/null
+++ b/third_party/python/pip-tools/.coveragerc
@@ -0,0 +1,8 @@
+[run]
+branch = True
+source = .
+omit =
+ piptools/_compat/*
+
+[report]
+include = piptools/*, tests/*
diff --git a/third_party/python/pip-tools/.fussyfox.yml b/third_party/python/pip-tools/.fussyfox.yml
new file mode 100644
index 0000000000..c488ca3525
--- /dev/null
+++ b/third_party/python/pip-tools/.fussyfox.yml
@@ -0,0 +1,3 @@
+- bandit
+- isort
+- flake8
diff --git a/third_party/python/pip-tools/.github/ISSUE_TEMPLATE/bug-report.md b/third_party/python/pip-tools/.github/ISSUE_TEMPLATE/bug-report.md
new file mode 100644
index 0000000000..d8be0c5674
--- /dev/null
+++ b/third_party/python/pip-tools/.github/ISSUE_TEMPLATE/bug-report.md
@@ -0,0 +1,28 @@
+---
+name: Bug report
+about: Create a report to help us improve
+
+---
+
+<!-- Describe the issue briefly here. -->
+
+#### Environment Versions
+
+1. OS Type
+1. Python version: `$ python -V`
+1. pip version: `$ pip --version`
+1. pip-tools version: `$ pip-compile --version`
+
+#### Steps to replicate
+
+1. ...
+2. ...
+3. ...
+
+#### Expected result
+
+...
+
+#### Actual result
+
+...
diff --git a/third_party/python/pip-tools/.github/ISSUE_TEMPLATE/feature-request.md b/third_party/python/pip-tools/.github/ISSUE_TEMPLATE/feature-request.md
new file mode 100644
index 0000000000..e2c7c5b4ed
--- /dev/null
+++ b/third_party/python/pip-tools/.github/ISSUE_TEMPLATE/feature-request.md
@@ -0,0 +1,19 @@
+---
+name: Feature request
+about: Suggest an idea for this project
+
+---
+
+#### What's the problem this feature will solve?
+<!-- What are you trying to do, that you are unable to achieve with pip-tools as it currently stands? -->
+
+#### Describe the solution you'd like
+<!-- A clear and concise description of what you want to happen. -->
+
+<!-- Provide examples of real-world use cases that this would enable and how it solves the problem described above. -->
+
+#### Alternative Solutions
+<!-- Have you tried to workaround the problem using pip-tools or other tools? Or a different approach to solving this issue? Please elaborate here. -->
+
+#### Additional context
+<!-- Add any other context, links, etc. about the feature here. -->
diff --git a/third_party/python/pip-tools/.github/PULL_REQUEST_TEMPLATE.md b/third_party/python/pip-tools/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000000..430ad24772
--- /dev/null
+++ b/third_party/python/pip-tools/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,9 @@
+<!--- Describe the changes here. --->
+
+**Changelog-friendly one-liner**: <!-- One-liner description here -->
+
+##### Contributor checklist
+
+- [ ] Provided the tests for the changes.
+- [ ] Gave a clear one-line description in the PR (that the maintainers can add to CHANGELOG.md on release).
+- [ ] Assign the PR to an existing or new milestone for the target version (following [Semantic Versioning](https://blog.versioneye.com/2014/01/16/semantic-versioning/)).
diff --git a/third_party/python/pip-tools/.github/workflows/ci.yml b/third_party/python/pip-tools/.github/workflows/ci.yml
new file mode 100644
index 0000000000..b5da29dde1
--- /dev/null
+++ b/third_party/python/pip-tools/.github/workflows/ci.yml
@@ -0,0 +1,66 @@
+name: CI
+
+on:
+ pull_request:
+ push:
+ branches:
+ - master
+ tags:
+ schedule:
+ # Run everyday at 03:53 UTC
+ - cron: 53 3 * * *
+
+jobs:
+ test:
+ name: ${{ matrix.os }} / ${{ matrix.python-version }} / ${{ matrix.pip-version }}
+ runs-on: ${{ matrix.os }}-latest
+ strategy:
+ matrix:
+ os:
+ - Ubuntu
+ - Windows
+ - macOS
+ python-version:
+ - 3.8
+ - 2.7
+ - 3.5
+ - 3.6
+ - 3.7
+ pip-version:
+ - "latest"
+ - "20.2" # TODO: update to 20.1 after pip-20.2 being released
+ - "20.0"
+ include:
+ - os: Ubuntu
+ python-version: 3.9-dev
+ pip-version: latest
+
+ env:
+ PY_COLORS: 1
+ TOXENV: pip${{ matrix.pip-version }}-coverage
+ TOX_PARALLEL_NO_SPINNER: 1
+ steps:
+ - uses: actions/checkout@master
+ - name: Set up Python ${{ matrix.python-version }} from GitHub
+ if: "!endsWith(matrix.python-version, '-dev')"
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Set up Python ${{ matrix.python-version }} from deadsnakes
+ if: endsWith(matrix.python-version, '-dev')
+ uses: deadsnakes/action@v1.0.0
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Log python version info (${{ matrix.python-version }})
+ run: python --version --version
+ - name: Install test dependencies
+ run: python -m pip install -U tox virtualenv
+ - name: Prepare test environment
+ run: tox --notest -p auto --parallel-live
+ - name: Test pip ${{ matrix.pip-version }}
+ run: tox
+ - name: Upload coverage to Codecov
+ uses: codecov/codecov-action@v1.0.6
+ with:
+ file: ./coverage.xml
+ name: ${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.pip-version }}
diff --git a/third_party/python/pip-tools/.github/workflows/cron.yml b/third_party/python/pip-tools/.github/workflows/cron.yml
new file mode 100644
index 0000000000..149825a1d2
--- /dev/null
+++ b/third_party/python/pip-tools/.github/workflows/cron.yml
@@ -0,0 +1,73 @@
+name: Cron
+
+on:
+ schedule:
+ # Run every day at 00:00 UTC
+ - cron: 0 0 * * *
+
+jobs:
+ master:
+ name: ${{ matrix.os }} / ${{ matrix.python-version }} / ${{ matrix.pip-version }}
+ runs-on: ${{ matrix.os }}-latest
+ strategy:
+ matrix:
+ os:
+ - Ubuntu
+ - Windows
+ - MacOS
+ python-version:
+ - 3.8
+ - 2.7
+ - 3.5
+ - 3.6
+ - 3.7
+ pip-version:
+ - master
+ env:
+ PY_COLORS: 1
+ TOXENV: pip${{ matrix.pip-version }}
+ TOX_PARALLEL_NO_SPINNER: 1
+ steps:
+ - uses: actions/checkout@master
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install test dependencies
+ run: python -m pip install -U tox virtualenv
+ - name: Prepare test environment
+ run: tox --notest -p auto --parallel-live
+ - name: Test pip ${{ matrix.pip-version }}
+ run: tox
+
+ pypy:
+ name: ${{ matrix.os }} / ${{ matrix.python-version }} / ${{ matrix.pip-version }}
+ runs-on: ${{ matrix.os }}-latest
+ strategy:
+ matrix:
+ os:
+ - Ubuntu
+ - MacOS
+ # TODO: fix test_realistic_complex_sub_dependencies test on Windows
+ # - Windows
+ python-version:
+ - pypy3
+ - pypy2
+ pip-version:
+ - latest
+ env:
+ PY_COLORS: 1
+ TOXENV: pip${{ matrix.pip-version }}
+ TOX_PARALLEL_NO_SPINNER: 1
+ steps:
+ - uses: actions/checkout@master
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install tox
+ run: pip install tox
+ - name: Prepare test environment
+ run: tox --notest -p auto --parallel-live
+ - name: Test pip ${{ matrix.pip-version }}
+ run: tox
diff --git a/third_party/python/pip-tools/.github/workflows/qa.yml b/third_party/python/pip-tools/.github/workflows/qa.yml
new file mode 100644
index 0000000000..2c5d6f5c59
--- /dev/null
+++ b/third_party/python/pip-tools/.github/workflows/qa.yml
@@ -0,0 +1,43 @@
+name: QA
+
+on:
+ pull_request:
+ push:
+ branches:
+ - master
+ tags:
+
+jobs:
+ qa:
+ name: ${{ matrix.toxenv }}
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ toxenv:
+ - checkqa
+ - readme
+ python-version:
+ - "3.x"
+ env:
+ PY_COLORS: 1
+ TOXENV: ${{ matrix.toxenv }}
+ TOX_PARALLEL_NO_SPINNER: 1
+ steps:
+ - uses: actions/checkout@master
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Prepare cache key
+ id: cache-key
+ run: echo "::set-output name=sha-256::$(python -VV | sha256sum | cut -d' ' -f1)"
+ - uses: actions/cache@v1
+ with:
+ path: ~/.cache/pre-commit
+ key: pre-commit|${{ steps.cache-key.outputs.sha-256 }}|${{ hashFiles('.pre-commit-config.yaml') }}
+ - name: Install tox
+ run: pip install tox
+ - name: Prepare test environment
+ run: tox --notest -p auto --parallel-live
+ - name: Test ${{ matrix.toxenv }}
+ run: tox
diff --git a/third_party/python/pip-tools/.gitignore b/third_party/python/pip-tools/.gitignore
new file mode 100644
index 0000000000..bc17d7d80b
--- /dev/null
+++ b/third_party/python/pip-tools/.gitignore
@@ -0,0 +1,31 @@
+# Ignore cram test output
+*.t.err
+
+# Python cruft
+*.pyc
+
+# Virtualenvs
+.envrc
+.direnv
+.venv
+venv/
+
+# Testing
+.pytest_cache/
+.tox
+htmlcov
+
+# Build output
+build
+dist
+*.egg-info
+.coverage
+.cache
+
+# IDE
+.idea
+
+# Test files
+requirements.in
+requirements.txt
+.eggs/
diff --git a/third_party/python/pip-tools/.pre-commit-config.yaml b/third_party/python/pip-tools/.pre-commit-config.yaml
new file mode 100644
index 0000000000..cb06e17f07
--- /dev/null
+++ b/third_party/python/pip-tools/.pre-commit-config.yaml
@@ -0,0 +1,24 @@
+repos:
+ - repo: https://github.com/psf/black
+ rev: 19.10b0
+ hooks:
+ - id: black
+ language_version: python3
+ - repo: https://github.com/pre-commit/mirrors-isort
+ rev: v4.3.21
+ hooks:
+ - id: isort
+ language_version: python3
+ - repo: https://gitlab.com/pycqa/flake8
+ rev: 3.8.1
+ hooks:
+ - id: flake8
+ language_version: python3
+ additional_dependencies:
+ - flake8-pytest-style
+ - repo: https://github.com/PyCQA/bandit
+ rev: 1.6.2
+ hooks:
+ - id: bandit
+ language_version: python3
+ exclude: ^tests/
diff --git a/third_party/python/pip-tools/.pre-commit-hooks.yaml b/third_party/python/pip-tools/.pre-commit-hooks.yaml
new file mode 100644
index 0000000000..c70f0382ff
--- /dev/null
+++ b/third_party/python/pip-tools/.pre-commit-hooks.yaml
@@ -0,0 +1,7 @@
+- id: pip-compile
+ name: pip-compile
+ description: Automatically compile requirements.
+ entry: pip-compile
+ language: python
+ files: ^requirements\.(in|txt)$
+ pass_filenames: false
diff --git a/third_party/python/pip-tools/.travis.yml b/third_party/python/pip-tools/.travis.yml
new file mode 100644
index 0000000000..7ed071a83c
--- /dev/null
+++ b/third_party/python/pip-tools/.travis.yml
@@ -0,0 +1,72 @@
+dist: xenial
+sudo: false
+language: python
+python:
+ - "2.7"
+ - "3.5"
+ - "3.6"
+ - "3.7"
+ - "3.8"
+
+env:
+ # NOTE: keep this in sync with envlist in tox.ini for tox-travis.
+ - PIP=latest
+ - PIP=20.2 # TODO: update to 20.1 after pip-20.2 being released
+ - PIP=20.0
+
+cache: false
+install:
+ - travis_retry python -m pip install -U tox-travis virtualenv
+script:
+ - tox
+
+stages:
+- test
+- name: deploy
+ if: repo = jazzband/pip-tools AND tag IS present
+
+jobs:
+ include:
+ # QA checks.
+ - env: TOXENV=checkqa
+ python: 3.7
+ after_success: skip # No need coverage
+ cache:
+ directories:
+ - $HOME/.cache/pre-commit
+ - env: TOXENV=readme
+ python: 2.7
+ after_success: skip # No need coverage
+
+ # Only test pypy/pypy3 with latest pip.
+ - env: PIP=latest
+ python: "pypy2.7-6.0"
+ - env: PIP=latest
+ python: "pypy3.5-6.0"
+
+ - stage: deploy
+ install: skip # No need to install tox-travis on deploy.
+ script: skip # No test on the deploy stage.
+ python: 2.7
+ env: skip # No special env required.
+ after_success: true # No need coverage
+ deploy:
+ provider: pypi
+ user: jazzband
+ server: https://jazzband.co/projects/pip-tools/upload
+ distributions: sdist bdist_wheel
+ password:
+ secure: TCG9beQgarL/EDHiwSCgEf1JnofTroA5QRp2OTL3QC+eaar6FftqxcJQw3FwnHJ7NarI6E7bcxn9wDRs6tXqiLcyGOzWUnR4jQ94w/7YaLQjzLr8g1toRnb9dNwU1l14z2hmnzc4oRqu7+pi4wIpBx+11Ke9JXNcrC+cRFwCdLQ=
+ on:
+ tags: true
+ repo: jazzband/pip-tools
+ allow_failures:
+ - env: PIP=master
+
+after_success:
+ - travis_retry pip install codecov coveralls
+ - travis_retry codecov --required -n "py${TRAVIS_PYTHON_VERSION}-pip${PIP}-${TRAVIS_OS_NAME}"
+ - "COVERALLS_PARALLEL=true coveralls"
+
+notifications:
+ webhooks: https://coveralls.io/webhook
diff --git a/third_party/python/pip-tools/CHANGELOG.md b/third_party/python/pip-tools/CHANGELOG.md
new file mode 100644
index 0000000000..75b4960ebb
--- /dev/null
+++ b/third_party/python/pip-tools/CHANGELOG.md
@@ -0,0 +1,607 @@
+# 5.3.1 (2020-07-31)
+
+Bug Fixes:
+- Fix `pip-20.2` compatibility issue that caused `pip-tools` to sometime fail to
+stabilize in a constant number of rounds
+([1194](https://github.com/jazzband/pip-tools/pull/1194)). Thanks @vphilippon
+
+# 5.3.0 (2020-07-26)
+
+Features:
+- Add `-h` alias for `--help` option to `pip-sync` and `pip-compile`
+([1163](https://github.com/jazzband/pip-tools/pull/1163)). Thanks @jan25
+- Add `pip>=20.2` support
+([1168](https://github.com/jazzband/pip-tools/pull/1168)). Thanks @atugushev
+- `pip-sync` now exists with code `1` on `--dry-run`
+([1172](https://github.com/jazzband/pip-tools/pull/1172)). Thanks @francisbrito
+- `pip-compile` now doesn't resolve constraints from `-c constraints.txt`that are not
+(yet) requirements
+ ([1175](https://github.com/jazzband/pip-tools/pull/1175)). Thanks @clslgrnc
+- Add `--reuse-hashes/--no-reuse-hashes` options to `pip-compile`
+ ([1177](https://github.com/jazzband/pip-tools/pull/1177)). Thanks @graingert
+
+# 5.2.1 (2020-06-09)
+
+Bug Fixes:
+- Fix a bug where `pip-compile` would lose some dependencies on update
+a `requirements.txt`
+([1159](https://github.com/jazzband/pip-tools/pull/1159)). Thanks @richafrank
+
+
+# 5.2.0 (2020-05-27)
+
+Features:
+- Show basename of URLs when `pip-compile` generates hashes in a verbose mode
+([1113](https://github.com/jazzband/pip-tools/pull/1113)). Thanks @atugushev
+- Add `--emit-index-url/--no-emit-index-url` options to `pip-compile`
+([1130](https://github.com/jazzband/pip-tools/pull/1130)). Thanks @atugushev
+
+Bug Fixes:
+- Fix a bug where `pip-compile` would ignore some of package versions when
+`PIP_PREFER_BINARY` is set on
+([1119](https://github.com/jazzband/pip-tools/pull/1119)). Thanks @atugushev
+- Fix leaked URLs with credentials in the debug output of `pip-compile`.
+([1146](https://github.com/jazzband/pip-tools/pull/1146)). Thanks @atugushev
+- Fix a bug where URL requirements would have name collisions
+([1149](https://github.com/jazzband/pip-tools/pull/1149)). Thanks @geokala
+
+Deprecations:
+- Deprecate `--index/--no-index` in favor of `--emit-index-url/--no-emit-index-url`
+options in `pip-compile`
+([1130](https://github.com/jazzband/pip-tools/pull/1130)). Thanks @atugushev
+
+Other Changes:
+
+- Switch to `setuptools` declarative syntax through `setup.cfg`
+([1141](https://github.com/jazzband/pip-tools/pull/1141)). Thanks @jdufresne
+
+# 5.1.2 (2020-05-05)
+
+Bug Fixes:
+- Fix grouping of editables and non-editables requirements
+([1132](https://github.com/jazzband/pip-tools/pull/1132)). Thanks @richafrank
+
+# 5.1.1 (2020-05-01)
+
+Bug Fixes:
+- Fix a bug where `pip-compile` would generate hashes for `*.egg` files
+([#1122](https://github.com/jazzband/pip-tools/pull/1122)). Thanks @atugushev
+
+# 5.1.0 (2020-04-27)
+
+Features:
+- Show progress bar when downloading packages in `pip-compile` verbose mode
+([#949](https://github.com/jazzband/pip-tools/pull/949)). Thanks @atugushev
+- `pip-compile` now gets hashes from `PyPI` JSON API (if available) which significantly
+increases the speed of hashes generation
+([#1109](https://github.com/jazzband/pip-tools/pull/1109)). Thanks @atugushev
+
+# 5.0.0 (2020-04-16)
+
+Backwards Incompatible Changes:
+- `pip-tools` now requires `pip>=20.0` (previosly `8.1.x` - `20.0.x`). Windows users,
+ make sure to use `python -m pip install pip-tools` to avoid issues with `pip`
+ self-update from now on
+([#1055](https://github.com/jazzband/pip-tools/pull/1055)). Thanks @atugushev
+- `--build-isolation` option now set on by default for `pip-compile`
+([#1060](https://github.com/jazzband/pip-tools/pull/1060)). Thanks @hramezani
+
+Features:
+- Exclude requirements with non-matching markers from `pip-sync`
+([#927](https://github.com/jazzband/pip-tools/pull/927)). Thanks @AndydeCleyre
+- Add `pre-commit` hook for `pip-compile`
+([#976](https://github.com/jazzband/pip-tools/pull/976)). Thanks @atugushev
+- `pip-compile` and `pip-sync` now pass anything provided to the new `--pip-args` option on to `pip`
+([#1080](https://github.com/jazzband/pip-tools/pull/1080)). Thanks @AndydeCleyre
+- `pip-compile` output headers are now more accurate when `--` is used to escape filenames
+([#1080](https://github.com/jazzband/pip-tools/pull/1080)). Thanks @AndydeCleyre
+- Add `pip>=20.1` support
+([#1088](https://github.com/jazzband/pip-tools/pull/1088)). Thanks @atugushev
+
+Bug Fixes:
+- Fix a bug where editables that are both direct requirements and constraints wouldn't appear in `pip-compile` output
+([#1093](https://github.com/jazzband/pip-tools/pull/1093)). Thanks @richafrank
+- `pip-compile` now sorts format controls (`--no-binary/--only-binary`) to ensure consistent results
+([#1098](https://github.com/jazzband/pip-tools/pull/1098)). Thanks @richafrank
+
+Improved Documentation:
+- Add cross-environment usage documentation to `README`
+([#651](https://github.com/jazzband/pip-tools/pull/651)). Thanks @vphilippon
+- Add versions compatibility table to `README`
+([#1106](https://github.com/jazzband/pip-tools/pull/1106)). Thanks @atugushev
+
+# 4.5.1 (2020-02-26)
+
+Bug Fixes:
+- Strip line number annotations such as "(line XX)" from file requirements, to prevent diff noise when modifying input requirement files
+([#1075](https://github.com/jazzband/pip-tools/pull/1075)). Thanks @adamchainz
+
+Improved Documentation:
+- Updated `README` example outputs for primary requirement annotations
+([#1072](https://github.com/jazzband/pip-tools/pull/1072)). Thanks @richafrank
+
+# 4.5.0 (2020-02-20)
+
+Features:
+- Primary requirements and VCS dependencies are now get annotated with any source `.in` files and reverse dependencies
+([#1058](https://github.com/jazzband/pip-tools/pull/1058)). Thanks @AndydeCleyre
+
+Bug Fixes:
+- Always use normalized path for cache directory as it is required in newer versions of `pip`
+([#1062](https://github.com/jazzband/pip-tools/pull/1062)). Thanks @kammala
+
+Improved Documentation:
+- Replace outdated link in the `README` with rationale for pinning
+([#1053](https://github.com/jazzband/pip-tools/pull/1053)). Thanks @m-aciek
+
+# 4.4.1 (2020-01-31)
+
+Bug Fixes:
+- Fix a bug where `pip-compile` would keep outdated options from `requirements.txt`
+([#1029](https://github.com/jazzband/pip-tools/pull/1029)). Thanks @atugushev
+- Fix the `No handlers could be found for logger "pip.*"` error by configuring the builtin logging module
+([#1035](https://github.com/jazzband/pip-tools/pull/1035)). Thanks @vphilippon
+- Fix a bug where dependencies of relevant constraints may be missing from output file
+([#1037](https://github.com/jazzband/pip-tools/pull/1037)). Thanks @jeevb
+- Upgrade the minimal version of `click` from `6.0` to `7.0` version in `setup.py`
+([#1039](https://github.com/jazzband/pip-tools/pull/1039)). Thanks @hramezani
+- Ensure that depcache considers the python implementation such that (for example) `cpython3.6` does not poison the results of `pypy3.6`
+([#1050](https://github.com/jazzband/pip-tools/pull/1050)). Thanks @asottile
+
+Improved Documentation:
+- Make the `README` more imperative about installing into a project's virtual environment to avoid confusion
+([#1023](https://github.com/jazzband/pip-tools/pull/1023)). Thanks @tekumara
+- Add a note to the `README` about how to install requirements on different stages to [Workflow for layered requirements](https://github.com/jazzband/pip-tools#workflow-for-layered-requirements) section
+([#1044](https://github.com/jazzband/pip-tools/pull/1044)). Thanks @hramezani
+
+# 4.4.0 (2020-01-21)
+
+Features:
+- Add `--cache-dir` option to `pip-compile`
+([#1022](https://github.com/jazzband/pip-tools/pull/1022)). Thanks @richafrank
+- Add `pip>=20.0` support
+([#1024](https://github.com/jazzband/pip-tools/pull/1024)). Thanks @atugushev
+
+Bug Fixes:
+- Fix a bug where `pip-compile --upgrade-package` would upgrade those passed packages not already required according to the `*.in` and `*.txt` files
+([#1031](https://github.com/jazzband/pip-tools/pull/1031)). Thanks @AndydeCleyre
+
+# 4.3.0 (2019-11-25)
+
+Features:
+- Add Python 3.8 support
+([#956](https://github.com/jazzband/pip-tools/pull/956)). Thanks @hramezani
+- Unpin commented out unsafe packages in `requirements.txt`
+([#975](https://github.com/jazzband/pip-tools/pull/975)). Thanks @atugushev
+
+Bug Fixes:
+- Fix `pip-compile` doesn't copy `--trusted-host` from `requirements.in` to `requirements.txt`
+([#964](https://github.com/jazzband/pip-tools/pull/964)). Thanks @atugushev
+- Add compatibility with `pip>=20.0`
+([#953](https://github.com/jazzband/pip-tools/pull/953) and [#978](https://github.com/jazzband/pip-tools/pull/978)). Thanks @atugushev
+- Fix a bug where the resolver wouldn't clean up the ephemeral wheel cache
+([#968](https://github.com/jazzband/pip-tools/pull/968)). Thanks @atugushev
+
+Improved Documentation:
+- Add a note to `README` about `requirements.txt` file, which would possibly interfere if you're compiling from scratch
+([#959](https://github.com/jazzband/pip-tools/pull/959)). Thanks @hramezani
+
+# 4.2.0 (2019-10-12)
+
+Features:
+- Add `--ask` option to `pip-sync`
+([#913](https://github.com/jazzband/pip-tools/pull/913)). Thanks @georgek
+
+Bug Fixes:
+- Add compatibility with `pip>=19.3`
+([#864](https://github.com/jazzband/pip-tools/pull/864), [#904](https://github.com/jazzband/pip-tools/pull/904), [#910](https://github.com/jazzband/pip-tools/pull/910), [#912](https://github.com/jazzband/pip-tools/pull/912) and [#915](https://github.com/jazzband/pip-tools/pull/915)). Thanks @atugushev
+- Ensure `pip-compile --no-header <blank requirements.in>` creates/overwrites `requirements.txt`
+([#909](https://github.com/jazzband/pip-tools/pull/909)). Thanks @AndydeCleyre
+- Fix `pip-compile --upgrade-package` removes «via» annotation
+([#931](https://github.com/jazzband/pip-tools/pull/931)). Thanks @hramezani
+
+Improved Documentation:
+- Add info to `README` about layered requirements files and `-c` flag
+([#905](https://github.com/jazzband/pip-tools/pull/905)). Thanks @jamescooke
+
+# 4.1.0 (2019-08-26)
+
+Features:
+- Add `--no-emit-find-links` option to `pip-compile`
+([#873](https://github.com/jazzband/pip-tools/pull/873)). Thanks @jacobtolar
+
+Bug Fixes:
+- Prevent `--dry-run` log message from being printed with `--quiet` option in `pip-compile`
+([#861](https://github.com/jazzband/pip-tools/pull/861)). Thanks @ddormer
+- Fix resolution of requirements from Git URLs without `-e`
+([#879](https://github.com/jazzband/pip-tools/pull/879)). Thanks @andersk
+
+# 4.0.0 (2019-07-25)
+
+Backwards Incompatible Changes:
+- Drop support for EOL Python 3.4
+([#803](https://github.com/jazzband/pip-tools/pull/803)). Thanks @auvipy
+
+Bug Fixes:
+- Fix `pip>=19.2` compatibility
+([#857](https://github.com/jazzband/pip-tools/pull/857)). Thanks @atugushev
+
+# 3.9.0 (2019-07-17)
+
+Features:
+- Print provenance information when `pip-compile` fails
+([#837](https://github.com/jazzband/pip-tools/pull/837)). Thanks @jakevdp
+
+Bug Fixes:
+- Output all logging to stderr instead of stdout
+([#834](https://github.com/jazzband/pip-tools/pull/834)). Thanks @georgek
+- Fix output file update with `--dry-run` option in `pip-compile`
+([#842](https://github.com/jazzband/pip-tools/pull/842)). Thanks @shipmints and @atugushev
+
+# 3.8.0 (2019-06-06)
+
+Features:
+- Options `--upgrade` and `--upgrade-package` are no longer mutually exclusive
+([#831](https://github.com/jazzband/pip-tools/pull/831)). Thanks @adamchainz
+
+Bug Fixes:
+- Fix `--generate-hashes` with bare VCS URLs
+([#812](https://github.com/jazzband/pip-tools/pull/812)). Thanks @jcushman
+- Fix issues with `UnicodeError` when installing `pip-tools` from source in some systems
+([#816](https://github.com/jazzband/pip-tools/pull/816)). Thanks @AbdealiJK
+- Respect `--pre` option in the input file
+([#822](https://github.com/jazzband/pip-tools/pull/822)). Thanks @atugushev
+- Option `--upgrade-package` now works even if the output file does not exist
+([#831](https://github.com/jazzband/pip-tools/pull/831)). Thanks @adamchainz
+
+
+# 3.7.0 (2019-05-09)
+
+Features:
+- Show progressbar on generation hashes in `pip-compile` verbose mode
+([#743](https://github.com/jazzband/pip-tools/pull/743)). Thanks @atugushev
+- Add options `--cert` and `--client-cert` to `pip-sync`
+([#798](https://github.com/jazzband/pip-tools/pull/798)). Thanks @atugushev
+- Add support for `--find-links` in `pip-compile` output
+([#793](https://github.com/jazzband/pip-tools/pull/793)). Thanks @estan and @atugushev
+- Normalize «command to run» in `pip-compile` headers
+([#800](https://github.com/jazzband/pip-tools/pull/800)). Thanks @atugushev
+- Support URLs as packages
+([#807](https://github.com/jazzband/pip-tools/pull/807)). Thanks @jcushman, @nim65s and @toejough
+
+Bug Fixes:
+- Fix replacing password to asterisks in `pip-compile`
+([#808](https://github.com/jazzband/pip-tools/pull/808)). Thanks @atugushev
+
+# 3.6.1 (2019-04-24)
+
+Bug Fixes:
+- Fix `pip>=19.1` compatibility
+([#795](https://github.com/jazzband/pip-tools/pull/795)). Thanks @atugushev
+
+# 3.6.0 (2019-04-03)
+
+Features:
+- Show less output on `pip-sync` with `--quiet` option
+([#765](https://github.com/jazzband/pip-tools/pull/765)). Thanks @atugushev
+- Support the flag `--trusted-host` in `pip-sync`
+([#777](https://github.com/jazzband/pip-tools/pull/777)). Thanks @firebirdberlin
+
+# 3.5.0 (2019-03-13)
+
+Features:
+- Show default index url provided by `pip`
+([#735](https://github.com/jazzband/pip-tools/pull/735)). Thanks @atugushev
+- Add an option to allow enabling/disabling build isolation
+([#758](https://github.com/jazzband/pip-tools/pull/758)). Thanks @atugushev
+
+Bug Fixes:
+- Fix the output file for `pip-compile` with an explicit `setup.py` as source file
+([#731](https://github.com/jazzband/pip-tools/pull/731)). Thanks @atugushev
+- Fix order issue with generated lock file when `hashes` and `markers` are used together
+([#763](https://github.com/jazzband/pip-tools/pull/763)). Thanks @milind-shakya-sp
+
+# 3.4.0 (2019-02-19)
+
+Features:
+- Add option `--quiet` to `pip-compile`
+([#720](https://github.com/jazzband/pip-tools/pull/720)). Thanks @bendikro
+- Emit the original command to the `pip-compile`'s header
+([#733](https://github.com/jazzband/pip-tools/pull/733)). Thanks @atugushev
+
+Bug Fixes:
+- Fix `pip-sync` to use pip script depending on a python version
+([#737](https://github.com/jazzband/pip-tools/pull/737)). Thanks @atugushev
+
+# 3.3.2 (2019-01-26)
+
+Bug Fixes:
+- Fix `pip-sync` with a temporary requirement file on Windows
+([#723](https://github.com/jazzband/pip-tools/pull/723)). Thanks @atugushev
+- Fix `pip-sync` to prevent uninstall of stdlib and dev packages
+([#718](https://github.com/jazzband/pip-tools/pull/718)). Thanks @atugushev
+
+# 3.3.1 (2019-01-24)
+
+- Re-release of 3.3.0 after fixing the deployment pipeline
+([#716](https://github.com/jazzband/pip-tools/issues/716)). Thanks @atugushev
+
+# 3.3.0 (2019-01-23)
+(Unreleased - Deployment pipeline issue, see 3.3.1)
+
+Features:
+- Added support of `pip` 19.0
+([#715](https://github.com/jazzband/pip-tools/pull/715)). Thanks @atugushev
+- Add `--allow-unsafe` to update instructions in the generated `requirements.txt`
+([#708](https://github.com/jazzband/pip-tools/pull/708)). Thanks @richafrank
+
+Bug Fixes:
+- Fix `pip-sync` to check hashes
+([#706](https://github.com/jazzband/pip-tools/pull/706)). Thanks @atugushev
+
+# 3.2.0 (2018-12-18)
+
+Features:
+- Apply version constraints specified with package upgrade option (`-P, --upgrade-package`)
+([#694](https://github.com/jazzband/pip-tools/pull/694)). Thanks @richafrank
+
+# 3.1.0 (2018-10-05)
+
+Features:
+- Added support of `pip` 18.1
+([#689](https://github.com/jazzband/pip-tools/pull/689)). Thanks @vphilippon
+
+# 3.0.0 (2018-09-24)
+
+Major changes:
+- Update `pip-tools` for native `pip` 8, 9, 10 and 18 compatibility, un-vendoring `pip` to use the user-installed `pip`
+([#657](https://github.com/jazzband/pip-tools/pull/657) and [#672](https://github.com/jazzband/pip-tools/pull/672)).
+Thanks to @techalchemy, @suutari, @tysonclugg and @vphilippon for contributing on this.
+
+Features:
+- Removed the dependency on the external library `first`
+([#676](https://github.com/jazzband/pip-tools/pull/676)). Thanks @jdufresne
+
+# 2.0.2 (2018-04-28)
+
+Bug Fixes:
+- Added clearer error reporting when skipping pre-releases
+([#655](https://github.com/jazzband/pip-tools/pull/655)). Thanks @WoLpH
+
+# 2.0.1 (2018-04-15)
+
+Bug Fixes:
+- Added missing package data from vendored pip, such as missing cacert.pem file. Thanks @vphilippon
+
+# 2.0.0 (2018-04-15)
+
+Major changes:
+- Vendored `pip` 9.0.3 to keep compatibility for users with `pip` 10.0.0
+([#644](https://github.com/jazzband/pip-tools/pull/644)). Thanks @vphilippon
+
+Features:
+- Improved the speed of pip-compile --generate-hashes by caching the hashes from an existing output file
+([#641](https://github.com/jazzband/pip-tools/pull/641)). Thanks @justicz
+- Added a `pip-sync --user` option to restrict attention to user-local directory
+([#642](https://github.com/jazzband/pip-tools/pull/642)). Thanks @jbergknoff-10e
+- Removed the hard dependency on setuptools
+([#645](https://github.com/jazzband/pip-tools/pull/645)). Thanks @vphilippon
+
+Bug fixes:
+- The pip environment markers on top-level requirements in the source file (requirements.in)
+are now properly handled and will only be processed in the right environment
+([#647](https://github.com/jazzband/pip-tools/pull/647)). Thanks @JoergRittinger
+
+# 1.11.0 (2017-11-30)
+
+Features:
+- Allow editable packages in requirements.in with `pip-compile --generate-hashes` ([#524](https://github.com/jazzband/pip-tools/pull/524)). Thanks @jdufresne
+- Allow for CA bundles with `pip-compile --cert` ([#612](https://github.com/jazzband/pip-tools/pull/612)). Thanks @khwilson
+- Improved `pip-compile` duration with large locally available editable requirement by skipping a copy to the cache
+([#583](https://github.com/jazzband/pip-tools/pull/583)). Thanks @costypetrisor
+- Slightly improved the `NoCandidateFound` error message on potential causes ([#614](https://github.com/jazzband/pip-tools/pull/614)). Thanks @vphilippon
+
+Bug Fixes:
+- Add `-markerlib` to the list of `PACKAGES_TO_IGNORE` of `pip-sync` ([#613](https://github.com/jazzband/pip-tools/pull/613)).
+
+# 1.10.2 (2017-11-22)
+
+Bug Fixes:
+- Fixed bug causing dependencies from invalid wheels for the current platform to be included ([#571](https://github.com/jazzband/pip-tools/pull/571)).
+- `pip-sync` will respect environment markers in the requirements.txt ([600](https://github.com/jazzband/pip-tools/pull/600)). Thanks @hazmat345
+- Converted the ReadMe to have a nice description rendering on PyPI. Thanks @bittner
+
+# 1.10.1 (2017-09-27)
+
+Bug Fixes:
+- Fixed bug breaking `pip-sync` on Python 3, raising `TypeError: '<' not supported between instances of 'InstallRequirement' and 'InstallRequirement'` ([#570](https://github.com/jazzband/pip-tools/pull/570)).
+
+# 1.10.0 (2017-09-27)
+
+Features:
+- `--generate-hashes` now generates hashes for all wheels,
+not only wheels for the currently running platform ([#520](https://github.com/jazzband/pip-tools/pull/520)). Thanks @jdufresne
+- Added a `-q`/`--quiet` argument to the pip-sync command to reduce log output.
+
+Bug Fixes:
+- Fixed bug where unsafe packages would get pinned in generated requirements files
+when `--allow-unsafe` was not set. ([#517](https://github.com/jazzband/pip-tools/pull/517)). Thanks @dschaller
+- Fixed bug where editable PyPI dependencies would have a `download_dir` and be exposed to `git-checkout-index`,
+(thus losing their VCS directory) and `python setup.py egg_info` fails. ([#385](https://github.com/jazzband/pip-tools/pull/385#) and [#538](https://github.com/jazzband/pip-tools/pull/538)). Thanks @blueyed and @dfee
+- Fixed bug where some primary dependencies were annotated with "via" info comments. ([#542](https://github.com/jazzband/pip-tools/pull/542)). Thanks @quantus
+- Fixed bug where pkg-resources would be removed by pip-sync in Ubuntu. ([#555](https://github.com/jazzband/pip-tools/pull/555)). Thanks @cemsbr
+- Fixed bug where the resolver would sometime not stabilize on requirements specifying extras. ([#566](https://github.com/jazzband/pip-tools/pull/566)). Thanks @vphilippon
+- Fixed an unicode encoding error when distribution package contains non-ASCII file names ([#567](https://github.com/jazzband/pip-tools/pull/567)). Thanks @suutari
+- Fixed package hashing doing unnecessary unpacking ([#557](https://github.com/jazzband/pip-tools/pull/557)). Thanks @suutari-ai
+
+# 1.9.0 (2017-04-12)
+
+Features:
+- Added ability to read requirements from `setup.py` instead of just `requirements.in` ([#418](https://github.com/jazzband/pip-tools/pull/418)). Thanks to @tysonclugg and @majuscule.
+- Added a `--max-rounds` argument to the pip-compile command to allow for solving large requirement sets ([#472](https://github.com/jazzband/pip-tools/pull/472)). Thanks @derek-miller.
+- Exclude unsafe packages' dependencies when `--allow-unsafe` is not in use ([#441](https://github.com/jazzband/pip-tools/pull/441)). Thanks @jdufresne.
+- Exclude irrelevant pip constraints ([#471](https://github.com/jazzband/pip-tools/pull/471)). Thanks @derek-miller.
+- Allow control over emitting trusted-host to the compiled requirements. ([#448](https://github.com/jazzband/pip-tools/pull/448)). Thanks @tonyseek.
+- Allow running as a Python module (#[461](https://github.com/jazzband/pip-tools/pull/461)). Thanks @AndreLouisCaron.
+- Preserve environment markers in generated requirements.txt. ([#460](https://github.com/jazzband/pip-tools/pull/460)). Thanks @barrywhart.
+
+Bug Fixes:
+- Fixed the --upgrade-package option to respect the given package list to update ([#491](https://github.com/jazzband/pip-tools/pull/491)).
+- Fixed the default output file name when the source file has no extension ([#488](https://github.com/jazzband/pip-tools/pull/488)). Thanks @vphilippon
+- Fixed crash on editable requirements introduced in 1.8.2.
+- Fixed duplicated --trusted-host, --extra-index-url and --index-url in the generated requirements.
+
+# 1.8.2 (2017-03-28)
+
+- Regression fix: editable reqs were loosing their dependencies after first round ([#476](https://github.com/jazzband/pip-tools/pull/476))
+ Thanks @mattlong
+- Remove duplicate index urls in generated requirements.txt ([#468](https://github.com/jazzband/pip-tools/pull/468))
+ Thanks @majuscule
+
+# 1.8.1 (2017-03-22)
+
+- Recalculate secondary dependencies between rounds (#378)
+- Calculated dependencies could be left with wrong candidates when
+ toplevel requirements happen to be also pinned in sub-dependencies (#450)
+- Fix duplicate entries that could happen in generated requirements.txt (#427)
+- Gracefully report invalid pip version (#457)
+- Fix capitalization in the generated requirements.txt, packages will always be lowercased (#452)
+
+# 1.8.0 (2016-11-17)
+
+- Adds support for upgrading individual packages with a new option
+ `--upgrade-package`. To upgrade a _specific_ package to the latest or
+ a specific version use `--upgrade-package <pkg>`. To upgrade all packages,
+ you can still use `pip-compile --upgrade`. (#409)
+- Adds support for pinning dependencies even further by including the hashes
+ found on PyPI at compilation time, which will be re-checked when dependencies
+ are installed at installation time. This adds protection against packages
+ that are tampered with. (#383)
+- Improve support for extras, like `hypothesis[django]`
+- Drop support for pip < 8
+
+
+# 1.7.1 (2016-10-20)
+
+- Add `--allow-unsafe` option (#377)
+
+
+# 1.7.0 (2016-07-06)
+
+- Add compatibility with pip >= 8.1.2 (#374)
+ Thanks so much, @jmbowman!
+
+
+# 1.6.5 (2016-05-11)
+
+- Add warning that pip >= 8.1.2 is not supported until 1.7.x is out
+
+
+# 1.6.4 (2016-05-03)
+
+- Incorporate fix for atomic file saving behaviour on the Windows platform
+ (see #351)
+
+
+# 1.6.3 (2016-05-02)
+
+- PyPI won't let me upload 1.6.2
+
+
+# 1.6.2 (2016-05-02)
+
+- Respect pip configuration from pip.{ini,conf}
+- Fixes for atomic-saving of output files on Windows (see #351)
+
+
+# 1.6.1 (2016-04-06)
+
+Minor changes:
+- pip-sync now supports being invoked from within and outside an activated
+ virtualenv (see #317)
+- pip-compile: support -U as a shorthand for --upgrade
+- pip-compile: support pip's --no-binary and --binary-only flags
+
+Fixes:
+- Change header format of output files to mention all input files
+
+
+# 1.6 (2016-02-05)
+
+Major change:
+- pip-compile will by default try to fulfill package specs by looking at
+ a previously compiled output file first, before checking PyPI. This means
+ pip-compile will only update the requirements.txt when it absolutely has to.
+ To get the old behaviour (picking the latest version of all packages from
+ PyPI), use the new `--upgrade` option.
+
+Minor changes:
+- Bugfix where pip-compile would lose "via" info when on pip 8 (see #313)
+- Ensure cache dir exists (see #315)
+
+
+# 1.5 (2016-01-23)
+
+- Add support for pip >= 8
+- Drop support for pip < 7
+- Fix bug where `pip-sync` fails to uninstall packages if you're using the
+ `--no-index` (or other) flags
+
+
+# 1.4.5 (2016-01-20)
+
+- Add `--no-index` flag to `pip-compile` to avoid emitting `--index-url` into
+ the output (useful if you have configured a different index in your global
+ ~/.pip/pip.conf, for example)
+- Fix: ignore stdlib backport packages, like `argparse`, when listing which
+ packages will be installed/uninstalled (#286)
+- Fix pip-sync failed uninstalling packages when using `--find-links` (#298)
+- Explicitly error when pip-tools is used with pip 8.0+ (for now)
+
+
+# 1.4.4 (2016-01-11)
+
+- Fix: unintended change in behaviour where packages installed by `pip-sync`
+ could accidentally get upgraded under certain conditions, even though the
+ requirements.txt would dictate otherwise (see #290)
+
+
+# 1.4.3 (2016-01-06)
+
+- Fix: add `--index-url` and `--extra-index-url` options to `pip-sync`
+- Fix: always install using `--upgrade` flag when running `pip-sync`
+
+
+# 1.4.2 (2015-12-13)
+
+- Fix bug where umask was ignored when writing requirement files (#268)
+
+
+# 1.4.1 (2015-12-13)
+
+- Fix bug where successive invocations of pip-sync with editables kept
+ uninstalling/installing them (fixes #270)
+
+
+# 1.4.0 (2015-12-13)
+
+- Add command line option -f / --find-links
+- Add command line option --no-index
+- Add command line alias -n (for --dry-run)
+- Fix a unicode issue
+
+
+# 1.3.0 (2015-12-08)
+
+- Support multiple requirement files to pip-compile
+- Support requirements from stdin for pip-compile
+- Support --output-file option on pip-compile, to redirect output to a file (or stdout)
+
+
+# 1.2.0 (2015-11-30)
+
+- Add CHANGELOG :)
+- Support pip-sync'ing editable requirements
+- Support extras properly (i.e. package[foo] syntax)
+
+(Anything before 1.2.0 was not recorded.)
diff --git a/third_party/python/pip-tools/CONTRIBUTING.md b/third_party/python/pip-tools/CONTRIBUTING.md
new file mode 100644
index 0000000000..7a3c83bf9a
--- /dev/null
+++ b/third_party/python/pip-tools/CONTRIBUTING.md
@@ -0,0 +1,49 @@
+[![Jazzband](https://jazzband.co/static/img/jazzband.svg)](https://jazzband.co/)
+
+This is a [Jazzband](https://jazzband.co/) project. By contributing you agree
+to abide by the [Contributor Code of Conduct](https://jazzband.co/about/conduct)
+and follow the [guidelines](https://jazzband.co/about/guidelines).
+
+## Project Contribution Guidelines
+
+Here are a few additional or emphasized guidelines to follow when contributing to pip-tools:
+- Install pip-tools in development mode and its test dependencies with `pip install -e .[testing]`.
+- Check with `tox -e checkqa` to see your changes are not breaking the style conventions.
+- Always provide tests for your changes.
+- Give a clear one-line description in the PR (that the maintainers can add to [CHANGELOG](CHANGELOG.md) afterwards).
+- Wait for the review of at least one other contributor before merging (even if you're a Jazzband member).
+- Before merging, assign the PR to a milestone for a version to help with the release process.
+
+The only exception to those guidelines is for trivial changes, such as
+documentation corrections or contributions that do not change pip-tools itself.
+
+Contributions following these guidelines are always welcomed, encouraged and appreciated.
+
+## Project Release Process
+
+Jazzband aims to give full access to all members, including performing releases, as described in the
+[Jazzband Releases documentation](https://jazzband.co/about/releases).
+
+To help keeping track of the releases and their changes, here's the current release process:
+- Check to see if any recently merged PRs are missing from the milestone of the version about to be released.
+- Create a branch for the release. *Ex: release-3.4.0*.
+- Update the [CHANGELOG](CHANGELOG.md) with the version, date and using the one-line descriptions
+ from the PRs included in the milestone of the version.
+ Check the previous release changelog format for an example. Don't forget the "Thanks @contributor" mentions.
+- Push the branch to your fork and create a pull request.
+- Merge the pull request after the changes being approved.
+- Make sure that the tests/CI still pass.
+- Once ready, go to `Github pip-tools Homepage > releases tab > Draft a new release` and type in:
+ - *Tag version:* The exact version number, following [Semantic Versioning](https://blog.versioneye.com/2014/01/16/semantic-versioning/). *Ex: 3.4.0*
+ - *Target:* master. As a general rule, the HEAD commit of the master branch should be the release target.
+ - *Release title:* Same as the tag. *Ex: 3.4.0*
+ - *Describe this release:* Copy of this release's changelog segment.
+- Publish release. This will push a tag on the HEAD of master, trigger the CI pipeline and
+ deploy a pip-tools release in the **Jazzband private package index** upon success.
+- The pip-tools "lead" project members will receive an email notification to review the release and
+ deploy it to the public PyPI if all is correct.
+- Once the release to the public PyPI is confirmed, close the milestone.
+
+Please be mindful of other before and when performing a release, and use this access responsibly.
+
+Do not hesitate to ask questions if you have any before performing a release.
diff --git a/third_party/python/pip-tools/LICENSE b/third_party/python/pip-tools/LICENSE
new file mode 100644
index 0000000000..64719ca9f5
--- /dev/null
+++ b/third_party/python/pip-tools/LICENSE
@@ -0,0 +1,26 @@
+Copyright (c). All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ 3. Neither the name of pip-tools nor the names of its contributors may be
+ used to endorse or promote products derived from this software without
+ specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/third_party/python/pip-tools/PKG-INFO b/third_party/python/pip-tools/PKG-INFO
new file mode 100644
index 0000000000..ea065f0495
--- /dev/null
+++ b/third_party/python/pip-tools/PKG-INFO
@@ -0,0 +1,495 @@
+Metadata-Version: 2.1
+Name: pip-tools
+Version: 5.3.1
+Summary: pip-tools keeps your pinned dependencies fresh.
+Home-page: https://github.com/jazzband/pip-tools/
+Author: Vincent Driessen
+Author-email: me@nvie.com
+License: BSD
+Description: |jazzband| |pypi| |pyversions| |buildstatus-travis| |buildstatus-appveyor| |codecov|
+
+ ==================================
+ pip-tools = pip-compile + pip-sync
+ ==================================
+
+ A set of command line tools to help you keep your ``pip``-based packages fresh,
+ even when you've pinned them. You do pin them, right? (In building your Python application and its dependencies for production, you want to make sure that your builds are predictable and deterministic.)
+
+ .. image:: https://github.com/jazzband/pip-tools/raw/master/img/pip-tools-overview.png
+ :alt: pip-tools overview for phase II
+
+ .. |buildstatus-travis| image:: https://img.shields.io/travis/jazzband/pip-tools/master.svg?logo=travis
+ :alt: Travis CI build status
+ :target: https://travis-ci.org/jazzband/pip-tools
+ .. |buildstatus-appveyor| image:: https://img.shields.io/appveyor/ci/jazzband/pip-tools/master.svg?logo=appveyor
+ :alt: AppVeyor build status
+ :target: https://ci.appveyor.com/project/jazzband/pip-tools
+ .. |codecov| image:: https://codecov.io/gh/jazzband/pip-tools/branch/master/graph/badge.svg
+ :alt: Coverage
+ :target: https://codecov.io/gh/jazzband/pip-tools
+ .. |jazzband| image:: https://jazzband.co/static/img/badge.svg
+ :alt: Jazzband
+ :target: https://jazzband.co/
+ .. |pypi| image:: https://img.shields.io/pypi/v/pip-tools.svg
+ :alt: PyPI version
+ :target: https://pypi.org/project/pip-tools/
+ .. |pyversions| image:: https://img.shields.io/pypi/pyversions/pip-tools.svg
+ :alt: Supported Python versions
+ :target: https://pypi.org/project/pip-tools/
+ .. _You do pin them, right?: http://nvie.com/posts/pin-your-packages/
+
+
+ Installation
+ ============
+
+ Similar to ``pip``, ``pip-tools`` must be installed in each of your project's
+ `virtual environments`_:
+
+ .. code-block:: bash
+
+ $ source /path/to/venv/bin/activate
+ (venv)$ python -m pip install pip-tools
+
+ **Note**: all of the remaining example commands assume you've activated your
+ project's virtual environment.
+
+ .. _virtual environments: https://packaging.python.org/tutorials/installing-packages/#creating-virtual-environments
+
+ Example usage for ``pip-compile``
+ =================================
+
+ The ``pip-compile`` command lets you compile a ``requirements.txt`` file from
+ your dependencies, specified in either ``setup.py`` or ``requirements.in``.
+
+ Run it with ``pip-compile`` or ``python -m piptools compile``. If you use
+ multiple Python versions, you can run ``pip-compile`` as ``py -X.Y -m piptools
+ compile`` on Windows and ``pythonX.Y -m piptools compile`` on other systems.
+
+ ``pip-compile`` should be run from the same virtual environment as your
+ project so conditional dependencies that require a specific Python version,
+ or other environment markers, resolve relative to your project's
+ environment.
+
+ **Note**: ensure you don't have ``requirements.txt`` if you compile
+ ``setup.py`` or ``requirements.in`` from scratch, otherwise, it might
+ interfere.
+
+ Requirements from ``setup.py``
+ ------------------------------
+
+ Suppose you have a Django project, and want to pin it for production.
+ If you have a ``setup.py`` with ``install_requires=['django']``, then run
+ ``pip-compile`` without any arguments:
+
+ .. code-block:: bash
+
+ $ pip-compile
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile
+ #
+ asgiref==3.2.3 # via django
+ django==3.0.3 # via my_django_project (setup.py)
+ pytz==2019.3 # via django
+ sqlparse==0.3.0 # via django
+
+ ``pip-compile`` will produce your ``requirements.txt``, with all the Django
+ dependencies (and all underlying dependencies) pinned.
+
+ Without ``setup.py``
+ --------------------
+
+ If you don't use ``setup.py`` (`it's easy to write one`_), you can create a
+ ``requirements.in`` file to declare the Django dependency:
+
+ .. code-block:: ini
+
+ # requirements.in
+ django
+
+ Now, run ``pip-compile requirements.in``:
+
+ .. code-block:: bash
+
+ $ pip-compile requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile requirements.in
+ #
+ asgiref==3.2.3 # via django
+ django==3.0.3 # via -r requirements.in
+ pytz==2019.3 # via django
+ sqlparse==0.3.0 # via django
+
+ And it will produce your ``requirements.txt``, with all the Django dependencies
+ (and all underlying dependencies) pinned.
+
+ .. _it's easy to write one: https://packaging.python.org/guides/distributing-packages-using-setuptools/#configuring-your-project
+
+ Using hashes
+ ------------
+
+ If you would like to use *Hash-Checking Mode* available in ``pip`` since
+ version 8.0, ``pip-compile`` offers ``--generate-hashes`` flag:
+
+ .. code-block:: bash
+
+ $ pip-compile --generate-hashes requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile --generate-hashes requirements.in
+ #
+ asgiref==3.2.3 \
+ --hash=sha256:7e06d934a7718bf3975acbf87780ba678957b87c7adc056f13b6215d610695a0 \
+ --hash=sha256:ea448f92fc35a0ef4b1508f53a04c4670255a3f33d22a81c8fc9c872036adbe5 \
+ # via django
+ django==3.0.3 \
+ --hash=sha256:2f1ba1db8648484dd5c238fb62504777b7ad090c81c5f1fd8d5eb5ec21b5f283 \
+ --hash=sha256:c91c91a7ad6ef67a874a4f76f58ba534f9208412692a840e1d125eb5c279cb0a \
+ # via -r requirements.in
+ pytz==2019.3 \
+ --hash=sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d \
+ --hash=sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be \
+ # via django
+ sqlparse==0.3.0 \
+ --hash=sha256:40afe6b8d4b1117e7dff5504d7a8ce07d9a1b15aeeade8a2d10f130a834f8177 \
+ --hash=sha256:7c3dca29c022744e95b547e867cee89f4fce4373f3549ccd8797d8eb52cdb873 \
+ # via django
+
+ Updating requirements
+ ---------------------
+
+ To update all packages, periodically re-run ``pip-compile --upgrade``.
+
+ To update a specific package to the latest or a specific version use the
+ ``--upgrade-package`` or ``-P`` flag:
+
+ .. code-block:: bash
+
+ # only update the django package
+ $ pip-compile --upgrade-package django
+
+ # update both the django and requests packages
+ $ pip-compile --upgrade-package django --upgrade-package requests
+
+ # update the django package to the latest, and requests to v2.0.0
+ $ pip-compile --upgrade-package django --upgrade-package requests==2.0.0
+
+ You can combine ``--upgrade`` and ``--upgrade-package`` in one command, to
+ provide constraints on the allowed upgrades. For example to upgrade all
+ packages whilst constraining requests to the latest version less than 3.0:
+
+ .. code-block:: bash
+
+ $ pip-compile --upgrade --upgrade-package 'requests<3.0'
+
+ Output File
+ -----------
+
+ To output the pinned requirements in a filename other than
+ ``requirements.txt``, use ``--output-file``. This might be useful for compiling
+ multiple files, for example with different constraints on django to test a
+ library with both versions using `tox <https://tox.readthedocs.io/en/latest/>`__:
+
+ .. code-block:: bash
+
+ $ pip-compile --upgrade-package 'django<1.0' --output-file requirements-django0x.txt
+ $ pip-compile --upgrade-package 'django<2.0' --output-file requirements-django1x.txt
+
+ Or to output to standard output, use ``--output-file=-``:
+
+ .. code-block:: bash
+
+ $ pip-compile --output-file=- > requirements.txt
+ $ pip-compile - --output-file=- < requirements.in > requirements.txt
+
+ Forwarding options to ``pip``
+ -----------------------------
+
+ Any valid ``pip`` flags or arguments may be passed on with ``pip-compile``'s
+ ``--pip-args`` option, e.g.
+
+ .. code-block:: bash
+
+ $ pip-compile requirements.in --pip-args '--retries 10 --timeout 30'
+
+ Configuration
+ -------------
+
+ You might be wrapping the ``pip-compile`` command in another script. To avoid
+ confusing consumers of your custom script you can override the update command
+ generated at the top of requirements files by setting the
+ ``CUSTOM_COMPILE_COMMAND`` environment variable.
+
+ .. code-block:: bash
+
+ $ CUSTOM_COMPILE_COMMAND="./pipcompilewrapper" pip-compile requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # ./pipcompilewrapper
+ #
+ asgiref==3.2.3 # via django
+ django==3.0.3 # via -r requirements.in
+ pytz==2019.3 # via django
+ sqlparse==0.3.0 # via django
+
+ Workflow for layered requirements
+ ---------------------------------
+
+ If you have different environments that you need to install different but
+ compatible packages for, then you can create layered requirements files and use
+ one layer to constrain the other.
+
+ For example, if you have a Django project where you want the newest ``2.1``
+ release in production and when developing you want to use the Django debug
+ toolbar, then you can create two ``*.in`` files, one for each layer:
+
+ .. code-block:: ini
+
+ # requirements.in
+ django<2.2
+
+ At the top of the development requirements ``dev-requirements.in`` you use ``-c
+ requirements.txt`` to constrain the dev requirements to packages already
+ selected for production in ``requirements.txt``.
+
+ .. code-block:: ini
+
+ # dev-requirements.in
+ -c requirements.txt
+ django-debug-toolbar
+
+ First, compile ``requirements.txt`` as usual:
+
+ .. code-block:: bash
+
+ $ pip-compile
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile
+ #
+ django==2.1.15 # via -r requirements.in
+ pytz==2019.3 # via django
+
+
+ Now compile the dev requirements and the ``requirements.txt`` file is used as
+ a constraint:
+
+ .. code-block:: bash
+
+ $ pip-compile dev-requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile dev-requirements.in
+ #
+ django-debug-toolbar==2.2 # via -r dev-requirements.in
+ django==2.1.15 # via -c requirements.txt, django-debug-toolbar
+ pytz==2019.3 # via -c requirements.txt, django
+ sqlparse==0.3.0 # via django-debug-toolbar
+
+ As you can see above, even though a ``2.2`` release of Django is available, the
+ dev requirements only include a ``2.1`` version of Django because they were
+ constrained. Now both compiled requirements files can be installed safely in
+ the dev environment.
+
+ To install requirements in production stage use:
+
+ .. code-block:: bash
+
+ $ pip-sync
+
+ You can install requirements in development stage by:
+
+ .. code-block:: bash
+
+ $ pip-sync requirements.txt dev-requirements.txt
+
+
+ Version control integration
+ ---------------------------
+
+ You might use ``pip-compile`` as a hook for the `pre-commit <https://github.com/pre-commit/pre-commit>`_.
+ See `pre-commit docs <https://pre-commit.com/>`_ for instructions.
+ Sample ``.pre-commit-config.yaml``:
+
+ .. code-block:: yaml
+
+ repos:
+ - repo: https://github.com/jazzband/pip-tools
+ rev: 5.0.0
+ hooks:
+ - id: pip-compile
+
+ You might want to customize ``pip-compile`` args by configuring ``args`` and/or ``files``, for example:
+
+ .. code-block:: yaml
+
+ repos:
+ - repo: https://github.com/jazzband/pip-tools
+ rev: 5.0.0
+ hooks:
+ - id: pip-compile
+ files: ^requirements/production\.(in|txt)$
+ args: [--index-url=https://example.com, requirements/production.in]
+
+
+ Example usage for ``pip-sync``
+ ==============================
+
+ Now that you have a ``requirements.txt``, you can use ``pip-sync`` to update
+ your virtual environment to reflect exactly what's in there. This will
+ install/upgrade/uninstall everything necessary to match the
+ ``requirements.txt`` contents.
+
+ Run it with ``pip-sync`` or ``python -m piptools sync``. If you use multiple
+ Python versions, you can also run ``py -X.Y -m piptools sync`` on Windows and
+ ``pythonX.Y -m piptools sync`` on other systems.
+
+ ``pip-sync`` must be installed into and run from the same virtual
+ environment as your project to identify which packages to install
+ or upgrade.
+
+ **Be careful**: ``pip-sync`` is meant to be used only with a
+ ``requirements.txt`` generated by ``pip-compile``.
+
+ .. code-block:: bash
+
+ $ pip-sync
+ Uninstalling flake8-2.4.1:
+ Successfully uninstalled flake8-2.4.1
+ Collecting click==4.1
+ Downloading click-4.1-py2.py3-none-any.whl (62kB)
+ 100% |................................| 65kB 1.8MB/s
+ Found existing installation: click 4.0
+ Uninstalling click-4.0:
+ Successfully uninstalled click-4.0
+ Successfully installed click-4.1
+
+ To sync multiple ``*.txt`` dependency lists, just pass them in via command
+ line arguments, e.g.
+
+ .. code-block:: bash
+
+ $ pip-sync dev-requirements.txt requirements.txt
+
+ Passing in empty arguments would cause it to default to ``requirements.txt``.
+
+ Any valid ``pip install`` flags or arguments may be passed with ``pip-sync``'s
+ ``--pip-args`` option, e.g.
+
+ .. code-block:: bash
+
+ $ pip-sync requirements.txt --pip-args '--no-cache-dir --no-deps'
+
+ If you use multiple Python versions, you can run ``pip-sync`` as
+ ``py -X.Y -m piptools sync ...`` on Windows and
+ ``pythonX.Y -m piptools sync ...`` on other systems.
+
+ **Note**: ``pip-sync`` will not upgrade or uninstall packaging tools like
+ ``setuptools``, ``pip``, or ``pip-tools`` itself. Use ``python -m pip install --upgrade``
+ to upgrade those packages.
+
+ Should I commit ``requirements.in`` and ``requirements.txt`` to source control?
+ ===============================================================================
+
+ Generally, yes. If you want a reproducible environment installation available from your source control,
+ then yes, you should commit both ``requirements.in`` and ``requirements.txt`` to source control.
+
+ Note that if you are deploying on multiple Python environments (read the section below),
+ then you must commit a seperate output file for each Python environment.
+ We suggest to use the ``{env}-requirements.txt`` format
+ (ex: ``win32-py2.7-requirements.txt``, ``macos-py3.6-requirements.txt``, etc.).
+
+
+ Cross-environment usage of ``requirements.in``/``requirements.txt`` and ``pip-compile``
+ =======================================================================================
+
+ The dependencies of a package can change depending on the Python environment in which it
+ is installed. Here, we define a Python environment as the combination of Operating
+ System, Python version (2.7, 3.6, etc.), and Python implementation (CPython, PyPy,
+ etc.). For an exact definition, refer to the possible combinations of `PEP 508
+ environment markers`_.
+
+ As the resulting ``requirements.txt`` can differ for each environment, users must
+ execute ``pip-compile`` **on each Python environment separately** to generate a
+ ``requirements.txt`` valid for each said environment. The same ``requirements.in`` can
+ be used as the source file for all environments, using `PEP 508 environment markers`_ as
+ needed, the same way it would be done for regular ``pip`` cross-environment usage.
+
+ If the generated ``requirements.txt`` remains exactly the same for all Python
+ environments, then it can be used across Python environments safely. **But** users
+ should be careful as any package update can introduce environment-dependant
+ dependencies, making any newly generated ``requirements.txt`` environment-dependant too.
+ As a general rule, it's advised that users should still always execute ``pip-compile``
+ on each targeted Python environment to avoid issues.
+
+ .. _PEP 508 environment markers: https://www.python.org/dev/peps/pep-0508/#environment-markers
+
+ Other useful tools
+ ==================
+
+ - `pipdeptree`_ to print the dependency tree of the installed packages.
+ - ``requirements.in``/``requirements.txt`` syntax highlighting:
+
+ * `requirements.txt.vim`_ for Vim.
+ * `Python extension for VS Code`_ for VS Code.
+
+ .. _pipdeptree: https://github.com/naiquevin/pipdeptree
+ .. _requirements.txt.vim: https://github.com/raimon49/requirements.txt.vim
+ .. _Python extension for VS Code: https://marketplace.visualstudio.com/items?itemName=ms-python.python
+
+
+ Deprecations
+ ============
+
+ This section lists ``pip-tools`` features that are currently deprecated.
+
+ - ``--index/--no-index`` command-line options, use instead
+ ``--emit-index-url/--no-emit-index-url`` (since 5.2.0).
+
+ Versions and compatibility
+ ==========================
+
+ The table below summarizes the latest ``pip-tools`` versions with the required ``pip``
+ versions.
+
+ +-----------+-----------------+
+ | pip-tools | pip |
+ +===========+=================+
+ | 4.5.x | 8.1.3 - 20.0.x |
+ +-----------+-----------------+
+ | 5.x | 20.0.x - 20.1.x |
+ +-----------+-----------------+
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: System Administrators
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: System :: Systems Administration
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7
+Provides-Extra: testing
+Provides-Extra: coverage
diff --git a/third_party/python/pip-tools/README.rst b/third_party/python/pip-tools/README.rst
new file mode 100644
index 0000000000..8f045a989a
--- /dev/null
+++ b/third_party/python/pip-tools/README.rst
@@ -0,0 +1,466 @@
+|jazzband| |pypi| |pyversions| |buildstatus-travis| |buildstatus-appveyor| |codecov|
+
+==================================
+pip-tools = pip-compile + pip-sync
+==================================
+
+A set of command line tools to help you keep your ``pip``-based packages fresh,
+even when you've pinned them. You do pin them, right? (In building your Python application and its dependencies for production, you want to make sure that your builds are predictable and deterministic.)
+
+.. image:: https://github.com/jazzband/pip-tools/raw/master/img/pip-tools-overview.png
+ :alt: pip-tools overview for phase II
+
+.. |buildstatus-travis| image:: https://img.shields.io/travis/jazzband/pip-tools/master.svg?logo=travis
+ :alt: Travis CI build status
+ :target: https://travis-ci.org/jazzband/pip-tools
+.. |buildstatus-appveyor| image:: https://img.shields.io/appveyor/ci/jazzband/pip-tools/master.svg?logo=appveyor
+ :alt: AppVeyor build status
+ :target: https://ci.appveyor.com/project/jazzband/pip-tools
+.. |codecov| image:: https://codecov.io/gh/jazzband/pip-tools/branch/master/graph/badge.svg
+ :alt: Coverage
+ :target: https://codecov.io/gh/jazzband/pip-tools
+.. |jazzband| image:: https://jazzband.co/static/img/badge.svg
+ :alt: Jazzband
+ :target: https://jazzband.co/
+.. |pypi| image:: https://img.shields.io/pypi/v/pip-tools.svg
+ :alt: PyPI version
+ :target: https://pypi.org/project/pip-tools/
+.. |pyversions| image:: https://img.shields.io/pypi/pyversions/pip-tools.svg
+ :alt: Supported Python versions
+ :target: https://pypi.org/project/pip-tools/
+.. _You do pin them, right?: http://nvie.com/posts/pin-your-packages/
+
+
+Installation
+============
+
+Similar to ``pip``, ``pip-tools`` must be installed in each of your project's
+`virtual environments`_:
+
+.. code-block:: bash
+
+ $ source /path/to/venv/bin/activate
+ (venv)$ python -m pip install pip-tools
+
+**Note**: all of the remaining example commands assume you've activated your
+project's virtual environment.
+
+.. _virtual environments: https://packaging.python.org/tutorials/installing-packages/#creating-virtual-environments
+
+Example usage for ``pip-compile``
+=================================
+
+The ``pip-compile`` command lets you compile a ``requirements.txt`` file from
+your dependencies, specified in either ``setup.py`` or ``requirements.in``.
+
+Run it with ``pip-compile`` or ``python -m piptools compile``. If you use
+multiple Python versions, you can run ``pip-compile`` as ``py -X.Y -m piptools
+compile`` on Windows and ``pythonX.Y -m piptools compile`` on other systems.
+
+``pip-compile`` should be run from the same virtual environment as your
+project so conditional dependencies that require a specific Python version,
+or other environment markers, resolve relative to your project's
+environment.
+
+**Note**: ensure you don't have ``requirements.txt`` if you compile
+``setup.py`` or ``requirements.in`` from scratch, otherwise, it might
+interfere.
+
+Requirements from ``setup.py``
+------------------------------
+
+Suppose you have a Django project, and want to pin it for production.
+If you have a ``setup.py`` with ``install_requires=['django']``, then run
+``pip-compile`` without any arguments:
+
+.. code-block:: bash
+
+ $ pip-compile
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile
+ #
+ asgiref==3.2.3 # via django
+ django==3.0.3 # via my_django_project (setup.py)
+ pytz==2019.3 # via django
+ sqlparse==0.3.0 # via django
+
+``pip-compile`` will produce your ``requirements.txt``, with all the Django
+dependencies (and all underlying dependencies) pinned.
+
+Without ``setup.py``
+--------------------
+
+If you don't use ``setup.py`` (`it's easy to write one`_), you can create a
+``requirements.in`` file to declare the Django dependency:
+
+.. code-block:: ini
+
+ # requirements.in
+ django
+
+Now, run ``pip-compile requirements.in``:
+
+.. code-block:: bash
+
+ $ pip-compile requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile requirements.in
+ #
+ asgiref==3.2.3 # via django
+ django==3.0.3 # via -r requirements.in
+ pytz==2019.3 # via django
+ sqlparse==0.3.0 # via django
+
+And it will produce your ``requirements.txt``, with all the Django dependencies
+(and all underlying dependencies) pinned.
+
+.. _it's easy to write one: https://packaging.python.org/guides/distributing-packages-using-setuptools/#configuring-your-project
+
+Using hashes
+------------
+
+If you would like to use *Hash-Checking Mode* available in ``pip`` since
+version 8.0, ``pip-compile`` offers ``--generate-hashes`` flag:
+
+.. code-block:: bash
+
+ $ pip-compile --generate-hashes requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile --generate-hashes requirements.in
+ #
+ asgiref==3.2.3 \
+ --hash=sha256:7e06d934a7718bf3975acbf87780ba678957b87c7adc056f13b6215d610695a0 \
+ --hash=sha256:ea448f92fc35a0ef4b1508f53a04c4670255a3f33d22a81c8fc9c872036adbe5 \
+ # via django
+ django==3.0.3 \
+ --hash=sha256:2f1ba1db8648484dd5c238fb62504777b7ad090c81c5f1fd8d5eb5ec21b5f283 \
+ --hash=sha256:c91c91a7ad6ef67a874a4f76f58ba534f9208412692a840e1d125eb5c279cb0a \
+ # via -r requirements.in
+ pytz==2019.3 \
+ --hash=sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d \
+ --hash=sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be \
+ # via django
+ sqlparse==0.3.0 \
+ --hash=sha256:40afe6b8d4b1117e7dff5504d7a8ce07d9a1b15aeeade8a2d10f130a834f8177 \
+ --hash=sha256:7c3dca29c022744e95b547e867cee89f4fce4373f3549ccd8797d8eb52cdb873 \
+ # via django
+
+Updating requirements
+---------------------
+
+To update all packages, periodically re-run ``pip-compile --upgrade``.
+
+To update a specific package to the latest or a specific version use the
+``--upgrade-package`` or ``-P`` flag:
+
+.. code-block:: bash
+
+ # only update the django package
+ $ pip-compile --upgrade-package django
+
+ # update both the django and requests packages
+ $ pip-compile --upgrade-package django --upgrade-package requests
+
+ # update the django package to the latest, and requests to v2.0.0
+ $ pip-compile --upgrade-package django --upgrade-package requests==2.0.0
+
+You can combine ``--upgrade`` and ``--upgrade-package`` in one command, to
+provide constraints on the allowed upgrades. For example to upgrade all
+packages whilst constraining requests to the latest version less than 3.0:
+
+.. code-block:: bash
+
+ $ pip-compile --upgrade --upgrade-package 'requests<3.0'
+
+Output File
+-----------
+
+To output the pinned requirements in a filename other than
+``requirements.txt``, use ``--output-file``. This might be useful for compiling
+multiple files, for example with different constraints on django to test a
+library with both versions using `tox <https://tox.readthedocs.io/en/latest/>`__:
+
+.. code-block:: bash
+
+ $ pip-compile --upgrade-package 'django<1.0' --output-file requirements-django0x.txt
+ $ pip-compile --upgrade-package 'django<2.0' --output-file requirements-django1x.txt
+
+Or to output to standard output, use ``--output-file=-``:
+
+.. code-block:: bash
+
+ $ pip-compile --output-file=- > requirements.txt
+ $ pip-compile - --output-file=- < requirements.in > requirements.txt
+
+Forwarding options to ``pip``
+-----------------------------
+
+Any valid ``pip`` flags or arguments may be passed on with ``pip-compile``'s
+``--pip-args`` option, e.g.
+
+.. code-block:: bash
+
+ $ pip-compile requirements.in --pip-args '--retries 10 --timeout 30'
+
+Configuration
+-------------
+
+You might be wrapping the ``pip-compile`` command in another script. To avoid
+confusing consumers of your custom script you can override the update command
+generated at the top of requirements files by setting the
+``CUSTOM_COMPILE_COMMAND`` environment variable.
+
+.. code-block:: bash
+
+ $ CUSTOM_COMPILE_COMMAND="./pipcompilewrapper" pip-compile requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # ./pipcompilewrapper
+ #
+ asgiref==3.2.3 # via django
+ django==3.0.3 # via -r requirements.in
+ pytz==2019.3 # via django
+ sqlparse==0.3.0 # via django
+
+Workflow for layered requirements
+---------------------------------
+
+If you have different environments that you need to install different but
+compatible packages for, then you can create layered requirements files and use
+one layer to constrain the other.
+
+For example, if you have a Django project where you want the newest ``2.1``
+release in production and when developing you want to use the Django debug
+toolbar, then you can create two ``*.in`` files, one for each layer:
+
+.. code-block:: ini
+
+ # requirements.in
+ django<2.2
+
+At the top of the development requirements ``dev-requirements.in`` you use ``-c
+requirements.txt`` to constrain the dev requirements to packages already
+selected for production in ``requirements.txt``.
+
+.. code-block:: ini
+
+ # dev-requirements.in
+ -c requirements.txt
+ django-debug-toolbar
+
+First, compile ``requirements.txt`` as usual:
+
+.. code-block:: bash
+
+ $ pip-compile
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile
+ #
+ django==2.1.15 # via -r requirements.in
+ pytz==2019.3 # via django
+
+
+Now compile the dev requirements and the ``requirements.txt`` file is used as
+a constraint:
+
+.. code-block:: bash
+
+ $ pip-compile dev-requirements.in
+ #
+ # This file is autogenerated by pip-compile
+ # To update, run:
+ #
+ # pip-compile dev-requirements.in
+ #
+ django-debug-toolbar==2.2 # via -r dev-requirements.in
+ django==2.1.15 # via -c requirements.txt, django-debug-toolbar
+ pytz==2019.3 # via -c requirements.txt, django
+ sqlparse==0.3.0 # via django-debug-toolbar
+
+As you can see above, even though a ``2.2`` release of Django is available, the
+dev requirements only include a ``2.1`` version of Django because they were
+constrained. Now both compiled requirements files can be installed safely in
+the dev environment.
+
+To install requirements in production stage use:
+
+.. code-block:: bash
+
+ $ pip-sync
+
+You can install requirements in development stage by:
+
+.. code-block:: bash
+
+ $ pip-sync requirements.txt dev-requirements.txt
+
+
+Version control integration
+---------------------------
+
+You might use ``pip-compile`` as a hook for the `pre-commit <https://github.com/pre-commit/pre-commit>`_.
+See `pre-commit docs <https://pre-commit.com/>`_ for instructions.
+Sample ``.pre-commit-config.yaml``:
+
+.. code-block:: yaml
+
+ repos:
+ - repo: https://github.com/jazzband/pip-tools
+ rev: 5.0.0
+ hooks:
+ - id: pip-compile
+
+You might want to customize ``pip-compile`` args by configuring ``args`` and/or ``files``, for example:
+
+.. code-block:: yaml
+
+ repos:
+ - repo: https://github.com/jazzband/pip-tools
+ rev: 5.0.0
+ hooks:
+ - id: pip-compile
+ files: ^requirements/production\.(in|txt)$
+ args: [--index-url=https://example.com, requirements/production.in]
+
+
+Example usage for ``pip-sync``
+==============================
+
+Now that you have a ``requirements.txt``, you can use ``pip-sync`` to update
+your virtual environment to reflect exactly what's in there. This will
+install/upgrade/uninstall everything necessary to match the
+``requirements.txt`` contents.
+
+Run it with ``pip-sync`` or ``python -m piptools sync``. If you use multiple
+Python versions, you can also run ``py -X.Y -m piptools sync`` on Windows and
+``pythonX.Y -m piptools sync`` on other systems.
+
+``pip-sync`` must be installed into and run from the same virtual
+environment as your project to identify which packages to install
+or upgrade.
+
+**Be careful**: ``pip-sync`` is meant to be used only with a
+``requirements.txt`` generated by ``pip-compile``.
+
+.. code-block:: bash
+
+ $ pip-sync
+ Uninstalling flake8-2.4.1:
+ Successfully uninstalled flake8-2.4.1
+ Collecting click==4.1
+ Downloading click-4.1-py2.py3-none-any.whl (62kB)
+ 100% |................................| 65kB 1.8MB/s
+ Found existing installation: click 4.0
+ Uninstalling click-4.0:
+ Successfully uninstalled click-4.0
+ Successfully installed click-4.1
+
+To sync multiple ``*.txt`` dependency lists, just pass them in via command
+line arguments, e.g.
+
+.. code-block:: bash
+
+ $ pip-sync dev-requirements.txt requirements.txt
+
+Passing in empty arguments would cause it to default to ``requirements.txt``.
+
+Any valid ``pip install`` flags or arguments may be passed with ``pip-sync``'s
+``--pip-args`` option, e.g.
+
+.. code-block:: bash
+
+ $ pip-sync requirements.txt --pip-args '--no-cache-dir --no-deps'
+
+If you use multiple Python versions, you can run ``pip-sync`` as
+``py -X.Y -m piptools sync ...`` on Windows and
+``pythonX.Y -m piptools sync ...`` on other systems.
+
+**Note**: ``pip-sync`` will not upgrade or uninstall packaging tools like
+``setuptools``, ``pip``, or ``pip-tools`` itself. Use ``python -m pip install --upgrade``
+to upgrade those packages.
+
+Should I commit ``requirements.in`` and ``requirements.txt`` to source control?
+===============================================================================
+
+Generally, yes. If you want a reproducible environment installation available from your source control,
+then yes, you should commit both ``requirements.in`` and ``requirements.txt`` to source control.
+
+Note that if you are deploying on multiple Python environments (read the section below),
+then you must commit a seperate output file for each Python environment.
+We suggest to use the ``{env}-requirements.txt`` format
+(ex: ``win32-py2.7-requirements.txt``, ``macos-py3.6-requirements.txt``, etc.).
+
+
+Cross-environment usage of ``requirements.in``/``requirements.txt`` and ``pip-compile``
+=======================================================================================
+
+The dependencies of a package can change depending on the Python environment in which it
+is installed. Here, we define a Python environment as the combination of Operating
+System, Python version (2.7, 3.6, etc.), and Python implementation (CPython, PyPy,
+etc.). For an exact definition, refer to the possible combinations of `PEP 508
+environment markers`_.
+
+As the resulting ``requirements.txt`` can differ for each environment, users must
+execute ``pip-compile`` **on each Python environment separately** to generate a
+``requirements.txt`` valid for each said environment. The same ``requirements.in`` can
+be used as the source file for all environments, using `PEP 508 environment markers`_ as
+needed, the same way it would be done for regular ``pip`` cross-environment usage.
+
+If the generated ``requirements.txt`` remains exactly the same for all Python
+environments, then it can be used across Python environments safely. **But** users
+should be careful as any package update can introduce environment-dependant
+dependencies, making any newly generated ``requirements.txt`` environment-dependant too.
+As a general rule, it's advised that users should still always execute ``pip-compile``
+on each targeted Python environment to avoid issues.
+
+.. _PEP 508 environment markers: https://www.python.org/dev/peps/pep-0508/#environment-markers
+
+Other useful tools
+==================
+
+- `pipdeptree`_ to print the dependency tree of the installed packages.
+- ``requirements.in``/``requirements.txt`` syntax highlighting:
+
+ * `requirements.txt.vim`_ for Vim.
+ * `Python extension for VS Code`_ for VS Code.
+
+.. _pipdeptree: https://github.com/naiquevin/pipdeptree
+.. _requirements.txt.vim: https://github.com/raimon49/requirements.txt.vim
+.. _Python extension for VS Code: https://marketplace.visualstudio.com/items?itemName=ms-python.python
+
+
+Deprecations
+============
+
+This section lists ``pip-tools`` features that are currently deprecated.
+
+- ``--index/--no-index`` command-line options, use instead
+ ``--emit-index-url/--no-emit-index-url`` (since 5.2.0).
+
+Versions and compatibility
+==========================
+
+The table below summarizes the latest ``pip-tools`` versions with the required ``pip``
+versions.
+
++-----------+-----------------+
+| pip-tools | pip |
++===========+=================+
+| 4.5.x | 8.1.3 - 20.0.x |
++-----------+-----------------+
+| 5.x | 20.0.x - 20.1.x |
++-----------+-----------------+
diff --git a/third_party/python/pip-tools/examples/django.in b/third_party/python/pip-tools/examples/django.in
new file mode 100644
index 0000000000..6b472eee4a
--- /dev/null
+++ b/third_party/python/pip-tools/examples/django.in
@@ -0,0 +1,3 @@
+# This file includes the Django project, and the debug toolbar
+Django<2.2.1 # suppose some version requirement
+django-debug-toolbar
diff --git a/third_party/python/pip-tools/examples/flask.in b/third_party/python/pip-tools/examples/flask.in
new file mode 100644
index 0000000000..8da22a8df9
--- /dev/null
+++ b/third_party/python/pip-tools/examples/flask.in
@@ -0,0 +1,2 @@
+# Flask has 2nd and 3rd level dependencies
+Flask
diff --git a/third_party/python/pip-tools/examples/hypothesis.in b/third_party/python/pip-tools/examples/hypothesis.in
new file mode 100644
index 0000000000..dcd4b2acd1
--- /dev/null
+++ b/third_party/python/pip-tools/examples/hypothesis.in
@@ -0,0 +1 @@
+hypothesis[django]
diff --git a/third_party/python/pip-tools/examples/protection.in b/third_party/python/pip-tools/examples/protection.in
new file mode 100644
index 0000000000..0080fa230e
--- /dev/null
+++ b/third_party/python/pip-tools/examples/protection.in
@@ -0,0 +1,3 @@
+# This package depends on setuptools, which should not end up in the compiled
+# requirements, because it may cause conflicts with pip itself
+python-levenshtein>=0.12.0
diff --git a/third_party/python/pip-tools/examples/sentry.in b/third_party/python/pip-tools/examples/sentry.in
new file mode 100644
index 0000000000..976bbdaab0
--- /dev/null
+++ b/third_party/python/pip-tools/examples/sentry.in
@@ -0,0 +1,2 @@
+# Sentry has a very large dependency tree
+sentry
diff --git a/third_party/python/pip-tools/img/pip-tools-overview.png b/third_party/python/pip-tools/img/pip-tools-overview.png
new file mode 100644
index 0000000000..a4849ad5e0
--- /dev/null
+++ b/third_party/python/pip-tools/img/pip-tools-overview.png
Binary files differ
diff --git a/third_party/python/pip-tools/piptools/__init__.py b/third_party/python/pip-tools/piptools/__init__.py
new file mode 100644
index 0000000000..9f0c95aa56
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/__init__.py
@@ -0,0 +1,11 @@
+import locale
+
+from piptools.click import secho
+
+# Needed for locale.getpreferredencoding(False) to work
+# in pip._internal.utils.encoding.auto_decode
+try:
+ locale.setlocale(locale.LC_ALL, "")
+except locale.Error as e: # pragma: no cover
+ # setlocale can apparently crash if locale are uninitialized
+ secho("Ignoring error when setting locale: {}".format(e), fg="red")
diff --git a/third_party/python/pip-tools/piptools/__main__.py b/third_party/python/pip-tools/piptools/__main__.py
new file mode 100644
index 0000000000..2d8b75e85d
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/__main__.py
@@ -0,0 +1,17 @@
+import click
+
+from piptools.scripts import compile, sync
+
+
+@click.group()
+def cli():
+ pass
+
+
+cli.add_command(compile.cli, "compile")
+cli.add_command(sync.cli, "sync")
+
+
+# Enable ``python -m piptools ...``.
+if __name__ == "__main__": # pragma: no branch
+ cli()
diff --git a/third_party/python/pip-tools/piptools/_compat/__init__.py b/third_party/python/pip-tools/piptools/_compat/__init__.py
new file mode 100644
index 0000000000..f67f0949ad
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/_compat/__init__.py
@@ -0,0 +1,12 @@
+# coding: utf-8
+# flake8: noqa
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import six
+
+from .pip_compat import BAR_TYPES, PIP_VERSION, parse_requirements
+
+if six.PY2:
+ from .tempfile import TemporaryDirectory
+else:
+ from tempfile import TemporaryDirectory
diff --git a/third_party/python/pip-tools/piptools/_compat/contextlib.py b/third_party/python/pip-tools/piptools/_compat/contextlib.py
new file mode 100644
index 0000000000..04039ccb01
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/_compat/contextlib.py
@@ -0,0 +1,18 @@
+# Ported from python 3.7 contextlib.py
+class nullcontext(object):
+ """Context manager that does no additional processing.
+ Used as a stand-in for a normal context manager, when a particular
+ block of code is only sometimes used with a normal context manager:
+ cm = optional_cm if condition else nullcontext()
+ with cm:
+ # Perform operation, using optional_cm if condition is True
+ """
+
+ def __init__(self, enter_result=None):
+ self.enter_result = enter_result
+
+ def __enter__(self):
+ return self.enter_result
+
+ def __exit__(self, *excinfo):
+ pass
diff --git a/third_party/python/pip-tools/piptools/_compat/pip_compat.py b/third_party/python/pip-tools/piptools/_compat/pip_compat.py
new file mode 100644
index 0000000000..543593ad9a
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/_compat/pip_compat.py
@@ -0,0 +1,29 @@
+# -*- coding=utf-8 -*-
+from __future__ import absolute_import
+
+import pip
+from pip._internal.req import parse_requirements as _parse_requirements
+from pip._vendor.packaging.version import parse as parse_version
+
+PIP_VERSION = tuple(map(int, parse_version(pip.__version__).base_version.split(".")))
+
+
+if PIP_VERSION[:2] <= (20, 0):
+
+ def install_req_from_parsed_requirement(req, **kwargs):
+ return req
+
+ from pip._internal.utils.ui import BAR_TYPES
+
+else:
+ from pip._internal.req.constructors import install_req_from_parsed_requirement
+ from pip._internal.cli.progress_bars import BAR_TYPES
+
+
+def parse_requirements(
+ filename, session, finder=None, options=None, constraint=False, isolated=False
+):
+ for parsed_req in _parse_requirements(
+ filename, session, finder=finder, options=options, constraint=constraint
+ ):
+ yield install_req_from_parsed_requirement(parsed_req, isolated=isolated)
diff --git a/third_party/python/pip-tools/piptools/_compat/tempfile.py b/third_party/python/pip-tools/piptools/_compat/tempfile.py
new file mode 100644
index 0000000000..dc7e9ef997
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/_compat/tempfile.py
@@ -0,0 +1,88 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function
+
+import os as _os
+import sys as _sys
+import warnings as _warnings
+from tempfile import mkdtemp
+
+
+class TemporaryDirectory(object):
+ """Create and return a temporary directory. This has the same
+ behavior as mkdtemp but can be used as a context manager. For
+ example:
+
+ with TemporaryDirectory() as tmpdir:
+ ...
+
+ Upon exiting the context, the directory and everything contained
+ in it are removed.
+ """
+
+ def __init__(self, suffix="", prefix="tmp", dir=None):
+ self._closed = False
+ self.name = None # Handle mkdtemp raising an exception
+ self.name = mkdtemp(suffix, prefix, dir)
+
+ def __repr__(self):
+ return "<{} {!r}>".format(self.__class__.__name__, self.name)
+
+ def __enter__(self):
+ return self.name
+
+ def cleanup(self):
+ if self.name and not self._closed:
+ try:
+ self._rmtree(self.name)
+ except (TypeError, AttributeError) as ex:
+ # Issue #10188: Emit a warning on stderr
+ # if the directory could not be cleaned
+ # up due to missing globals
+ if "None" not in str(ex):
+ raise
+ print(
+ "ERROR: {!r} while cleaning up {!r}".format(ex, self),
+ file=_sys.stderr,
+ )
+ return
+ self._closed = True
+
+ def __exit__(self, exc, value, tb):
+ self.cleanup()
+
+ def __del__(self):
+ # Issue a ResourceWarning if implicit cleanup needed
+ self.cleanup()
+
+ # XXX (ncoghlan): The following code attempts to make
+ # this class tolerant of the module nulling out process
+ # that happens during CPython interpreter shutdown
+ # Alas, it doesn't actually manage it. See issue #10188
+ _listdir = staticmethod(_os.listdir)
+ _path_join = staticmethod(_os.path.join)
+ _isdir = staticmethod(_os.path.isdir)
+ _islink = staticmethod(_os.path.islink)
+ _remove = staticmethod(_os.remove)
+ _rmdir = staticmethod(_os.rmdir)
+ _warn = _warnings.warn
+
+ def _rmtree(self, path):
+ # Essentially a stripped down version of shutil.rmtree. We can't
+ # use globals because they may be None'ed out at shutdown.
+ for name in self._listdir(path):
+ fullname = self._path_join(path, name)
+ try:
+ isdir = self._isdir(fullname) and not self._islink(fullname)
+ except OSError:
+ isdir = False
+ if isdir:
+ self._rmtree(fullname)
+ else:
+ try:
+ self._remove(fullname)
+ except OSError:
+ pass
+ try:
+ self._rmdir(path)
+ except OSError:
+ pass
diff --git a/third_party/python/pip-tools/piptools/cache.py b/third_party/python/pip-tools/piptools/cache.py
new file mode 100644
index 0000000000..9b6bf550cc
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/cache.py
@@ -0,0 +1,170 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import json
+import os
+import platform
+import sys
+
+from pip._vendor.packaging.requirements import Requirement
+
+from .exceptions import PipToolsError
+from .utils import as_tuple, key_from_req, lookup_table
+
+_PEP425_PY_TAGS = {"cpython": "cp", "pypy": "pp", "ironpython": "ip", "jython": "jy"}
+
+
+def _implementation_name():
+ """similar to PEP 425, however the minor version is separated from the
+ major to differentation "3.10" and "31.0".
+ """
+ implementation_name = platform.python_implementation().lower()
+ implementation = _PEP425_PY_TAGS.get(implementation_name, "??")
+ return "{}{}.{}".format(implementation, *sys.version_info)
+
+
+class CorruptCacheError(PipToolsError):
+ def __init__(self, path):
+ self.path = path
+
+ def __str__(self):
+ lines = [
+ "The dependency cache seems to have been corrupted.",
+ "Inspect, or delete, the following file:",
+ " {}".format(self.path),
+ ]
+ return os.linesep.join(lines)
+
+
+def read_cache_file(cache_file_path):
+ with open(cache_file_path, "r") as cache_file:
+ try:
+ doc = json.load(cache_file)
+ except ValueError:
+ raise CorruptCacheError(cache_file_path)
+
+ # Check version and load the contents
+ if doc["__format__"] != 1:
+ raise AssertionError("Unknown cache file format")
+ return doc["dependencies"]
+
+
+class DependencyCache(object):
+ """
+ Creates a new persistent dependency cache for the current Python version.
+ The cache file is written to the appropriate user cache dir for the
+ current platform, i.e.
+
+ ~/.cache/pip-tools/depcache-pyX.Y.json
+
+ Where py indicates the Python implementation.
+ Where X.Y indicates the Python version.
+ """
+
+ def __init__(self, cache_dir):
+ if not os.path.isdir(cache_dir):
+ os.makedirs(cache_dir)
+ cache_filename = "depcache-{}.json".format(_implementation_name())
+
+ self._cache_file = os.path.join(cache_dir, cache_filename)
+ self._cache = None
+
+ @property
+ def cache(self):
+ """
+ The dictionary that is the actual in-memory cache. This property
+ lazily loads the cache from disk.
+ """
+ if self._cache is None:
+ self.read_cache()
+ return self._cache
+
+ def as_cache_key(self, ireq):
+ """
+ Given a requirement, return its cache key. This behavior is a little weird
+ in order to allow backwards compatibility with cache files. For a requirement
+ without extras, this will return, for example:
+
+ ("ipython", "2.1.0")
+
+ For a requirement with extras, the extras will be comma-separated and appended
+ to the version, inside brackets, like so:
+
+ ("ipython", "2.1.0[nbconvert,notebook]")
+ """
+ name, version, extras = as_tuple(ireq)
+ if not extras:
+ extras_string = ""
+ else:
+ extras_string = "[{}]".format(",".join(extras))
+ return name, "{}{}".format(version, extras_string)
+
+ def read_cache(self):
+ """Reads the cached contents into memory."""
+ if os.path.exists(self._cache_file):
+ self._cache = read_cache_file(self._cache_file)
+ else:
+ self._cache = {}
+
+ def write_cache(self):
+ """Writes the cache to disk as JSON."""
+ doc = {"__format__": 1, "dependencies": self._cache}
+ with open(self._cache_file, "w") as f:
+ json.dump(doc, f, sort_keys=True)
+
+ def clear(self):
+ self._cache = {}
+ self.write_cache()
+
+ def __contains__(self, ireq):
+ pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
+ return pkgversion_and_extras in self.cache.get(pkgname, {})
+
+ def __getitem__(self, ireq):
+ pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
+ return self.cache[pkgname][pkgversion_and_extras]
+
+ def __setitem__(self, ireq, values):
+ pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
+ self.cache.setdefault(pkgname, {})
+ self.cache[pkgname][pkgversion_and_extras] = values
+ self.write_cache()
+
+ def reverse_dependencies(self, ireqs):
+ """
+ Returns a lookup table of reverse dependencies for all the given ireqs.
+
+ Since this is all static, it only works if the dependency cache
+ contains the complete data, otherwise you end up with a partial view.
+ This is typically no problem if you use this function after the entire
+ dependency tree is resolved.
+ """
+ ireqs_as_cache_values = [self.as_cache_key(ireq) for ireq in ireqs]
+ return self._reverse_dependencies(ireqs_as_cache_values)
+
+ def _reverse_dependencies(self, cache_keys):
+ """
+ Returns a lookup table of reverse dependencies for all the given cache keys.
+
+ Example input:
+
+ [('pep8', '1.5.7'),
+ ('flake8', '2.4.0'),
+ ('mccabe', '0.3'),
+ ('pyflakes', '0.8.1')]
+
+ Example output:
+
+ {'pep8': ['flake8'],
+ 'flake8': [],
+ 'mccabe': ['flake8'],
+ 'pyflakes': ['flake8']}
+
+ """
+ # First, collect all the dependencies into a sequence of (parent, child)
+ # tuples, like [('flake8', 'pep8'), ('flake8', 'mccabe'), ...]
+ return lookup_table(
+ (key_from_req(Requirement(dep_name)), name)
+ for name, version_and_extras in cache_keys
+ for dep_name in self.cache[name][version_and_extras]
+ )
diff --git a/third_party/python/pip-tools/piptools/click.py b/third_party/python/pip-tools/piptools/click.py
new file mode 100644
index 0000000000..86f1612c6a
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/click.py
@@ -0,0 +1,6 @@
+from __future__ import absolute_import
+
+import click
+from click import * # noqa
+
+click.disable_unicode_literals_warning = True
diff --git a/third_party/python/pip-tools/piptools/exceptions.py b/third_party/python/pip-tools/piptools/exceptions.py
new file mode 100644
index 0000000000..5278972741
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/exceptions.py
@@ -0,0 +1,66 @@
+from pip._internal.utils.misc import redact_auth_from_url
+
+
+class PipToolsError(Exception):
+ pass
+
+
+class NoCandidateFound(PipToolsError):
+ def __init__(self, ireq, candidates_tried, finder):
+ self.ireq = ireq
+ self.candidates_tried = candidates_tried
+ self.finder = finder
+
+ def __str__(self):
+ versions = []
+ pre_versions = []
+
+ for candidate in sorted(self.candidates_tried):
+ version = str(candidate.version)
+ if candidate.version.is_prerelease:
+ pre_versions.append(version)
+ else:
+ versions.append(version)
+
+ lines = ["Could not find a version that matches {}".format(self.ireq)]
+
+ if versions:
+ lines.append("Tried: {}".format(", ".join(versions)))
+
+ if pre_versions:
+ if self.finder.allow_all_prereleases:
+ line = "Tried"
+ else:
+ line = "Skipped"
+
+ line += " pre-versions: {}".format(", ".join(pre_versions))
+ lines.append(line)
+
+ if versions or pre_versions:
+ lines.append(
+ "There are incompatible versions in the resolved dependencies:"
+ )
+ source_ireqs = getattr(self.ireq, "_source_ireqs", [])
+ lines.extend(" {}".format(ireq) for ireq in source_ireqs)
+ else:
+ redacted_urls = tuple(
+ redact_auth_from_url(url) for url in self.finder.index_urls
+ )
+ lines.append("No versions found")
+ lines.append(
+ "{} {} reachable?".format(
+ "Were" if len(redacted_urls) > 1 else "Was",
+ " or ".join(redacted_urls),
+ )
+ )
+ return "\n".join(lines)
+
+
+class IncompatibleRequirements(PipToolsError):
+ def __init__(self, ireq_a, ireq_b):
+ self.ireq_a = ireq_a
+ self.ireq_b = ireq_b
+
+ def __str__(self):
+ message = "Incompatible requirements found: {} and {}"
+ return message.format(self.ireq_a, self.ireq_b)
diff --git a/third_party/python/pip-tools/piptools/locations.py b/third_party/python/pip-tools/piptools/locations.py
new file mode 100644
index 0000000000..9ca0ffe436
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/locations.py
@@ -0,0 +1,25 @@
+import os
+from shutil import rmtree
+
+from pip._internal.utils.appdirs import user_cache_dir
+
+from .click import secho
+
+# The user_cache_dir helper comes straight from pip itself
+CACHE_DIR = user_cache_dir("pip-tools")
+
+# NOTE
+# We used to store the cache dir under ~/.pip-tools, which is not the
+# preferred place to store caches for any platform. This has been addressed
+# in pip-tools==1.0.5, but to be good citizens, we point this out explicitly
+# to the user when this directory is still found.
+LEGACY_CACHE_DIR = os.path.expanduser("~/.pip-tools")
+
+if os.path.exists(LEGACY_CACHE_DIR):
+ secho(
+ "Removing old cache dir {} (new cache dir is {})".format(
+ LEGACY_CACHE_DIR, CACHE_DIR
+ ),
+ fg="yellow",
+ )
+ rmtree(LEGACY_CACHE_DIR)
diff --git a/third_party/python/pip-tools/piptools/logging.py b/third_party/python/pip-tools/piptools/logging.py
new file mode 100644
index 0000000000..dcf068f7a2
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/logging.py
@@ -0,0 +1,62 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import contextlib
+import logging
+import sys
+
+from . import click
+
+# Initialise the builtin logging module for other component using it.
+# Ex: pip
+logging.basicConfig()
+
+
+class LogContext(object):
+ stream = sys.stderr
+
+ def __init__(self, verbosity=0, indent_width=2):
+ self.verbosity = verbosity
+ self.current_indent = 0
+ self._indent_width = indent_width
+
+ def log(self, message, *args, **kwargs):
+ kwargs.setdefault("err", True)
+ prefix = " " * self.current_indent
+ click.secho(prefix + message, *args, **kwargs)
+
+ def debug(self, *args, **kwargs):
+ if self.verbosity >= 1:
+ self.log(*args, **kwargs)
+
+ def info(self, *args, **kwargs):
+ if self.verbosity >= 0:
+ self.log(*args, **kwargs)
+
+ def warning(self, *args, **kwargs):
+ kwargs.setdefault("fg", "yellow")
+ self.log(*args, **kwargs)
+
+ def error(self, *args, **kwargs):
+ kwargs.setdefault("fg", "red")
+ self.log(*args, **kwargs)
+
+ def _indent(self):
+ self.current_indent += self._indent_width
+
+ def _dedent(self):
+ self.current_indent -= self._indent_width
+
+ @contextlib.contextmanager
+ def indentation(self):
+ """
+ Increase indentation.
+ """
+ self._indent()
+ try:
+ yield
+ finally:
+ self._dedent()
+
+
+log = LogContext()
diff --git a/third_party/python/pip-tools/piptools/repositories/__init__.py b/third_party/python/pip-tools/piptools/repositories/__init__.py
new file mode 100644
index 0000000000..ce5142e8c6
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/repositories/__init__.py
@@ -0,0 +1,3 @@
+# flake8: noqa
+from .local import LocalRequirementsRepository
+from .pypi import PyPIRepository
diff --git a/third_party/python/pip-tools/piptools/repositories/base.py b/third_party/python/pip-tools/piptools/repositories/base.py
new file mode 100644
index 0000000000..0343fe7d79
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/repositories/base.py
@@ -0,0 +1,55 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+from abc import ABCMeta, abstractmethod
+from contextlib import contextmanager
+
+from six import add_metaclass
+
+
+@add_metaclass(ABCMeta)
+class BaseRepository(object):
+ def clear_caches(self):
+ """Should clear any caches used by the implementation."""
+
+ def freshen_build_caches(self):
+ """Should start with fresh build/source caches."""
+
+ @abstractmethod
+ def find_best_match(self, ireq):
+ """
+ Return a Version object that indicates the best match for the given
+ InstallRequirement according to the repository.
+ """
+
+ @abstractmethod
+ def get_dependencies(self, ireq):
+ """
+ Given a pinned, URL, or editable InstallRequirement, returns a set of
+ dependencies (also InstallRequirements, but not necessarily pinned).
+ They indicate the secondary dependencies for the given requirement.
+ """
+
+ @abstractmethod
+ def get_hashes(self, ireq):
+ """
+ Given a pinned InstallRequire, returns a set of hashes that represent
+ all of the files for a given requirement. It is not acceptable for an
+ editable or unpinned requirement to be passed to this function.
+ """
+
+ @abstractmethod
+ @contextmanager
+ def allow_all_wheels(self):
+ """
+ Monkey patches pip.Wheel to allow wheels from all platforms and Python versions.
+ """
+
+ @abstractmethod
+ def copy_ireq_dependencies(self, source, dest):
+ """
+ Notifies the repository that `dest` is a copy of `source`, and so it
+ has the same dependencies. Otherwise, once we prepare an ireq to assign
+ it its name, we would lose track of those dependencies on combining
+ that ireq with others.
+ """
diff --git a/third_party/python/pip-tools/piptools/repositories/local.py b/third_party/python/pip-tools/piptools/repositories/local.py
new file mode 100644
index 0000000000..6c91d1b4f2
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/repositories/local.py
@@ -0,0 +1,99 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+from contextlib import contextmanager
+
+from pip._internal.utils.hashes import FAVORITE_HASH
+
+from .._compat import PIP_VERSION
+from .base import BaseRepository
+
+from piptools.utils import as_tuple, key_from_ireq, make_install_requirement
+
+
+def ireq_satisfied_by_existing_pin(ireq, existing_pin):
+ """
+ Return True if the given InstallationRequirement is satisfied by the
+ previously encountered version pin.
+ """
+ version = next(iter(existing_pin.req.specifier)).version
+ return ireq.req.specifier.contains(
+ version, prereleases=existing_pin.req.specifier.prereleases
+ )
+
+
+class LocalRequirementsRepository(BaseRepository):
+ """
+ The LocalRequirementsRepository proxied the _real_ repository by first
+ checking if a requirement can be satisfied by existing pins (i.e. the
+ result of a previous compile step).
+
+ In effect, if a requirement can be satisfied with a version pinned in the
+ requirements file, we prefer that version over the best match found in
+ PyPI. This keeps updates to the requirements.txt down to a minimum.
+ """
+
+ def __init__(self, existing_pins, proxied_repository, reuse_hashes=True):
+ self._reuse_hashes = reuse_hashes
+ self.repository = proxied_repository
+ self.existing_pins = existing_pins
+
+ @property
+ def options(self):
+ return self.repository.options
+
+ @property
+ def finder(self):
+ return self.repository.finder
+
+ @property
+ def session(self):
+ return self.repository.session
+
+ @property
+ def DEFAULT_INDEX_URL(self):
+ return self.repository.DEFAULT_INDEX_URL
+
+ def clear_caches(self):
+ self.repository.clear_caches()
+
+ def freshen_build_caches(self):
+ self.repository.freshen_build_caches()
+
+ def find_best_match(self, ireq, prereleases=None):
+ key = key_from_ireq(ireq)
+ existing_pin = self.existing_pins.get(key)
+ if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin):
+ project, version, _ = as_tuple(existing_pin)
+ return make_install_requirement(
+ project, version, ireq.extras, constraint=ireq.constraint
+ )
+ else:
+ return self.repository.find_best_match(ireq, prereleases)
+
+ def get_dependencies(self, ireq):
+ return self.repository.get_dependencies(ireq)
+
+ def get_hashes(self, ireq):
+ existing_pin = self._reuse_hashes and self.existing_pins.get(
+ key_from_ireq(ireq)
+ )
+ if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin):
+ if PIP_VERSION[:2] <= (20, 0):
+ hashes = existing_pin.options.get("hashes", {})
+ else:
+ hashes = existing_pin.hash_options
+ hexdigests = hashes.get(FAVORITE_HASH)
+ if hexdigests:
+ return {
+ ":".join([FAVORITE_HASH, hexdigest]) for hexdigest in hexdigests
+ }
+ return self.repository.get_hashes(ireq)
+
+ @contextmanager
+ def allow_all_wheels(self):
+ with self.repository.allow_all_wheels():
+ yield
+
+ def copy_ireq_dependencies(self, source, dest):
+ self.repository.copy_ireq_dependencies(source, dest)
diff --git a/third_party/python/pip-tools/piptools/repositories/pypi.py b/third_party/python/pip-tools/piptools/repositories/pypi.py
new file mode 100644
index 0000000000..7480b5e855
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/repositories/pypi.py
@@ -0,0 +1,524 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import collections
+import hashlib
+import itertools
+import logging
+import os
+from contextlib import contextmanager
+from shutil import rmtree
+
+from pip._internal.cache import WheelCache
+from pip._internal.commands import create_command
+from pip._internal.models.index import PackageIndex, PyPI
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.req import RequirementSet
+from pip._internal.req.req_tracker import get_requirement_tracker
+from pip._internal.utils.hashes import FAVORITE_HASH
+from pip._internal.utils.logging import indent_log, setup_logging
+from pip._internal.utils.misc import normalize_path
+from pip._internal.utils.temp_dir import TempDirectory, global_tempdir_manager
+from pip._internal.utils.urls import path_to_url, url_to_path
+from pip._vendor.requests import RequestException
+
+from .._compat import BAR_TYPES, PIP_VERSION, TemporaryDirectory, contextlib
+from ..click import progressbar
+from ..exceptions import NoCandidateFound
+from ..logging import log
+from ..utils import (
+ as_tuple,
+ fs_str,
+ is_pinned_requirement,
+ is_url_requirement,
+ lookup_table,
+ make_install_requirement,
+)
+from .base import BaseRepository
+
+FILE_CHUNK_SIZE = 4096
+FileStream = collections.namedtuple("FileStream", "stream size")
+
+
+class PyPIRepository(BaseRepository):
+ DEFAULT_INDEX_URL = PyPI.simple_url
+ HASHABLE_PACKAGE_TYPES = {"bdist_wheel", "sdist"}
+
+ """
+ The PyPIRepository will use the provided Finder instance to lookup
+ packages. Typically, it looks up packages on PyPI (the default implicit
+ config), but any other PyPI mirror can be used if index_urls is
+ changed/configured on the Finder.
+ """
+
+ def __init__(self, pip_args, cache_dir):
+ # Use pip's parser for pip.conf management and defaults.
+ # General options (find_links, index_url, extra_index_url, trusted_host,
+ # and pre) are deferred to pip.
+ self.command = create_command("install")
+ self.options, _ = self.command.parse_args(pip_args)
+ if self.options.cache_dir:
+ self.options.cache_dir = normalize_path(self.options.cache_dir)
+
+ self.options.require_hashes = False
+ self.options.ignore_dependencies = False
+
+ self.session = self.command._build_session(self.options)
+ self.finder = self.command._build_package_finder(
+ options=self.options, session=self.session
+ )
+
+ # Caches
+ # stores project_name => InstallationCandidate mappings for all
+ # versions reported by PyPI, so we only have to ask once for each
+ # project
+ self._available_candidates_cache = {}
+
+ # stores InstallRequirement => list(InstallRequirement) mappings
+ # of all secondary dependencies for the given requirement, so we
+ # only have to go to disk once for each requirement
+ self._dependencies_cache = {}
+
+ # Setup file paths
+ self.freshen_build_caches()
+ self._cache_dir = normalize_path(cache_dir)
+ self._download_dir = fs_str(os.path.join(self._cache_dir, "pkgs"))
+ self._wheel_download_dir = fs_str(os.path.join(self._cache_dir, "wheels"))
+
+ self._setup_logging()
+
+ def freshen_build_caches(self):
+ """
+ Start with fresh build/source caches. Will remove any old build
+ caches from disk automatically.
+ """
+ self._build_dir = TemporaryDirectory(fs_str("build"))
+ self._source_dir = TemporaryDirectory(fs_str("source"))
+
+ @property
+ def build_dir(self):
+ return self._build_dir.name
+
+ @property
+ def source_dir(self):
+ return self._source_dir.name
+
+ def clear_caches(self):
+ rmtree(self._download_dir, ignore_errors=True)
+ rmtree(self._wheel_download_dir, ignore_errors=True)
+
+ def find_all_candidates(self, req_name):
+ if req_name not in self._available_candidates_cache:
+ candidates = self.finder.find_all_candidates(req_name)
+ self._available_candidates_cache[req_name] = candidates
+ return self._available_candidates_cache[req_name]
+
+ def find_best_match(self, ireq, prereleases=None):
+ """
+ Returns a Version object that indicates the best match for the given
+ InstallRequirement according to the external repository.
+ """
+ if ireq.editable or is_url_requirement(ireq):
+ return ireq # return itself as the best match
+
+ all_candidates = self.find_all_candidates(ireq.name)
+ candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version)
+ matching_versions = ireq.specifier.filter(
+ (candidate.version for candidate in all_candidates), prereleases=prereleases
+ )
+
+ matching_candidates = list(
+ itertools.chain.from_iterable(
+ candidates_by_version[ver] for ver in matching_versions
+ )
+ )
+ if not matching_candidates:
+ raise NoCandidateFound(ireq, all_candidates, self.finder)
+
+ evaluator = self.finder.make_candidate_evaluator(ireq.name)
+ best_candidate_result = evaluator.compute_best_candidate(matching_candidates)
+ best_candidate = best_candidate_result.best_candidate
+
+ # Turn the candidate into a pinned InstallRequirement
+ return make_install_requirement(
+ best_candidate.name,
+ best_candidate.version,
+ ireq.extras,
+ constraint=ireq.constraint,
+ )
+
+ def resolve_reqs(self, download_dir, ireq, wheel_cache):
+ with get_requirement_tracker() as req_tracker, TempDirectory(
+ kind="resolver"
+ ) as temp_dir, indent_log():
+ preparer = self.command.make_requirement_preparer(
+ temp_build_dir=temp_dir,
+ options=self.options,
+ req_tracker=req_tracker,
+ session=self.session,
+ finder=self.finder,
+ use_user_site=False,
+ download_dir=download_dir,
+ wheel_download_dir=self._wheel_download_dir,
+ )
+
+ reqset = RequirementSet()
+ if PIP_VERSION[:2] <= (20, 1):
+ ireq.is_direct = True
+ else:
+ ireq.user_supplied = True
+ reqset.add_requirement(ireq)
+
+ resolver = self.command.make_resolver(
+ preparer=preparer,
+ finder=self.finder,
+ options=self.options,
+ wheel_cache=wheel_cache,
+ use_user_site=False,
+ ignore_installed=True,
+ ignore_requires_python=False,
+ force_reinstall=False,
+ upgrade_strategy="to-satisfy-only",
+ )
+ results = resolver._resolve_one(reqset, ireq)
+ if not ireq.prepared:
+ # If still not prepared, e.g. a constraint, do enough to assign
+ # the ireq a name:
+ resolver._get_abstract_dist_for(ireq)
+
+ if PIP_VERSION[:2] <= (20, 0):
+ reqset.cleanup_files()
+
+ return set(results)
+
+ def get_dependencies(self, ireq):
+ """
+ Given a pinned, URL, or editable InstallRequirement, returns a set of
+ dependencies (also InstallRequirements, but not necessarily pinned).
+ They indicate the secondary dependencies for the given requirement.
+ """
+ if not (
+ ireq.editable or is_url_requirement(ireq) or is_pinned_requirement(ireq)
+ ):
+ raise TypeError(
+ "Expected url, pinned or editable InstallRequirement, got {}".format(
+ ireq
+ )
+ )
+
+ if ireq not in self._dependencies_cache:
+ if ireq.editable and (ireq.source_dir and os.path.exists(ireq.source_dir)):
+ # No download_dir for locally available editable requirements.
+ # If a download_dir is passed, pip will unnecessarely
+ # archive the entire source directory
+ download_dir = None
+ elif ireq.link and ireq.link.is_vcs:
+ # No download_dir for VCS sources. This also works around pip
+ # using git-checkout-index, which gets rid of the .git dir.
+ download_dir = None
+ else:
+ download_dir = self._get_download_path(ireq)
+ if not os.path.isdir(download_dir):
+ os.makedirs(download_dir)
+ if not os.path.isdir(self._wheel_download_dir):
+ os.makedirs(self._wheel_download_dir)
+
+ with global_tempdir_manager():
+ wheel_cache = WheelCache(self._cache_dir, self.options.format_control)
+ prev_tracker = os.environ.get("PIP_REQ_TRACKER")
+ try:
+ self._dependencies_cache[ireq] = self.resolve_reqs(
+ download_dir, ireq, wheel_cache
+ )
+ finally:
+ if "PIP_REQ_TRACKER" in os.environ:
+ if prev_tracker:
+ os.environ["PIP_REQ_TRACKER"] = prev_tracker
+ else:
+ del os.environ["PIP_REQ_TRACKER"]
+
+ if PIP_VERSION[:2] <= (20, 0):
+ wheel_cache.cleanup()
+
+ return self._dependencies_cache[ireq]
+
+ def copy_ireq_dependencies(self, source, dest):
+ try:
+ self._dependencies_cache[dest] = self._dependencies_cache[source]
+ except KeyError:
+ # `source` may not be in cache yet.
+ pass
+
+ def _get_project(self, ireq):
+ """
+ Return a dict of a project info from PyPI JSON API for a given
+ InstallRequirement. Return None on HTTP/JSON error or if a package
+ is not found on PyPI server.
+
+ API reference: https://warehouse.readthedocs.io/api-reference/json/
+ """
+ package_indexes = (
+ PackageIndex(url=index_url, file_storage_domain="")
+ for index_url in self.finder.search_scope.index_urls
+ )
+ for package_index in package_indexes:
+ url = "{url}/{name}/json".format(url=package_index.pypi_url, name=ireq.name)
+ try:
+ response = self.session.get(url)
+ except RequestException as e:
+ log.debug(
+ "Fetch package info from PyPI failed: {url}: {e}".format(
+ url=url, e=e
+ )
+ )
+ continue
+
+ # Skip this PyPI server, because there is no package
+ # or JSON API might be not supported
+ if response.status_code == 404:
+ continue
+
+ try:
+ data = response.json()
+ except ValueError as e:
+ log.debug(
+ "Cannot parse JSON response from PyPI: {url}: {e}".format(
+ url=url, e=e
+ )
+ )
+ continue
+ return data
+ return None
+
+ def _get_download_path(self, ireq):
+ """
+ Determine the download dir location in a way which avoids name
+ collisions.
+ """
+ if ireq.link:
+ salt = hashlib.sha224(ireq.link.url_without_fragment.encode()).hexdigest()
+ # Nest directories to avoid running out of top level dirs on some FS
+ # (see pypi _get_cache_path_parts, which inspired this)
+ salt = [salt[:2], salt[2:4], salt[4:6], salt[6:]]
+ return os.path.join(self._download_dir, *salt)
+ else:
+ return self._download_dir
+
+ def get_hashes(self, ireq):
+ """
+ Given an InstallRequirement, return a set of hashes that represent all
+ of the files for a given requirement. Unhashable requirements return an
+ empty set. Unpinned requirements raise a TypeError.
+ """
+
+ if ireq.link:
+ link = ireq.link
+
+ if link.is_vcs or (link.is_file and link.is_existing_dir()):
+ # Return empty set for unhashable requirements.
+ # Unhashable logic modeled on pip's
+ # RequirementPreparer.prepare_linked_requirement
+ return set()
+
+ if is_url_requirement(ireq):
+ # Directly hash URL requirements.
+ # URL requirements may have been previously downloaded and cached
+ # locally by self.resolve_reqs()
+ cached_path = os.path.join(self._get_download_path(ireq), link.filename)
+ if os.path.exists(cached_path):
+ cached_link = Link(path_to_url(cached_path))
+ else:
+ cached_link = link
+ return {self._get_file_hash(cached_link)}
+
+ if not is_pinned_requirement(ireq):
+ raise TypeError("Expected pinned requirement, got {}".format(ireq))
+
+ log.debug("{}".format(ireq.name))
+
+ with log.indentation():
+ hashes = self._get_hashes_from_pypi(ireq)
+ if hashes is None:
+ log.log("Couldn't get hashes from PyPI, fallback to hashing files")
+ return self._get_hashes_from_files(ireq)
+
+ return hashes
+
+ def _get_hashes_from_pypi(self, ireq):
+ """
+ Return a set of hashes from PyPI JSON API for a given InstallRequirement.
+ Return None if fetching data is failed or missing digests.
+ """
+ project = self._get_project(ireq)
+ if project is None:
+ return None
+
+ _, version, _ = as_tuple(ireq)
+
+ try:
+ release_files = project["releases"][version]
+ except KeyError:
+ log.debug("Missing release files on PyPI")
+ return None
+
+ try:
+ hashes = {
+ "{algo}:{digest}".format(
+ algo=FAVORITE_HASH, digest=file_["digests"][FAVORITE_HASH]
+ )
+ for file_ in release_files
+ if file_["packagetype"] in self.HASHABLE_PACKAGE_TYPES
+ }
+ except KeyError:
+ log.debug("Missing digests of release files on PyPI")
+ return None
+
+ return hashes
+
+ def _get_hashes_from_files(self, ireq):
+ """
+ Return a set of hashes for all release files of a given InstallRequirement.
+ """
+ # We need to get all of the candidates that match our current version
+ # pin, these will represent all of the files that could possibly
+ # satisfy this constraint.
+ all_candidates = self.find_all_candidates(ireq.name)
+ candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version)
+ matching_versions = list(
+ ireq.specifier.filter((candidate.version for candidate in all_candidates))
+ )
+ matching_candidates = candidates_by_version[matching_versions[0]]
+
+ return {
+ self._get_file_hash(candidate.link) for candidate in matching_candidates
+ }
+
+ def _get_file_hash(self, link):
+ log.debug("Hashing {}".format(link.show_url))
+ h = hashlib.new(FAVORITE_HASH)
+ with open_local_or_remote_file(link, self.session) as f:
+ # Chunks to iterate
+ chunks = iter(lambda: f.stream.read(FILE_CHUNK_SIZE), b"")
+
+ # Choose a context manager depending on verbosity
+ if log.verbosity >= 1:
+ iter_length = f.size / FILE_CHUNK_SIZE if f.size else None
+ bar_template = "{prefix} |%(bar)s| %(info)s".format(
+ prefix=" " * log.current_indent
+ )
+ context_manager = progressbar(
+ chunks,
+ length=iter_length,
+ # Make it look like default pip progress bar
+ fill_char="█",
+ empty_char=" ",
+ bar_template=bar_template,
+ width=32,
+ )
+ else:
+ context_manager = contextlib.nullcontext(chunks)
+
+ # Iterate over the chosen context manager
+ with context_manager as bar:
+ for chunk in bar:
+ h.update(chunk)
+ return ":".join([FAVORITE_HASH, h.hexdigest()])
+
+ @contextmanager
+ def allow_all_wheels(self):
+ """
+ Monkey patches pip.Wheel to allow wheels from all platforms and Python versions.
+
+ This also saves the candidate cache and set a new one, or else the results from
+ the previous non-patched calls will interfere.
+ """
+
+ def _wheel_supported(self, tags=None):
+ # Ignore current platform. Support everything.
+ return True
+
+ def _wheel_support_index_min(self, tags=None):
+ # All wheels are equal priority for sorting.
+ return 0
+
+ original_wheel_supported = Wheel.supported
+ original_support_index_min = Wheel.support_index_min
+ original_cache = self._available_candidates_cache
+
+ Wheel.supported = _wheel_supported
+ Wheel.support_index_min = _wheel_support_index_min
+ self._available_candidates_cache = {}
+
+ try:
+ yield
+ finally:
+ Wheel.supported = original_wheel_supported
+ Wheel.support_index_min = original_support_index_min
+ self._available_candidates_cache = original_cache
+
+ def _setup_logging(self):
+ """
+ Setup pip's logger. Ensure pip is verbose same as pip-tools and sync
+ pip's log stream with LogContext.stream.
+ """
+ # Default pip's logger is noisy, so decrease it's verbosity
+ setup_logging(
+ verbosity=log.verbosity - 1,
+ no_color=self.options.no_color,
+ user_log_file=self.options.log,
+ )
+
+ # Sync pip's console handler stream with LogContext.stream
+ logger = logging.getLogger()
+ for handler in logger.handlers:
+ if handler.name == "console": # pragma: no branch
+ handler.stream = log.stream
+ break
+ else: # pragma: no cover
+ # There is always a console handler. This warning would be a signal that
+ # this block should be removed/revisited, because of pip possibly
+ # refactored-out logging config.
+ log.warning("Couldn't find a 'console' logging handler")
+
+ # Sync pip's progress bars stream with LogContext.stream
+ for bar_cls in itertools.chain(*BAR_TYPES.values()):
+ bar_cls.file = log.stream
+
+
+@contextmanager
+def open_local_or_remote_file(link, session):
+ """
+ Open local or remote file for reading.
+
+ :type link: pip.index.Link
+ :type session: requests.Session
+ :raises ValueError: If link points to a local directory.
+ :return: a context manager to a FileStream with the opened file-like object
+ """
+ url = link.url_without_fragment
+
+ if link.is_file:
+ # Local URL
+ local_path = url_to_path(url)
+ if os.path.isdir(local_path):
+ raise ValueError("Cannot open directory for read: {}".format(url))
+ else:
+ st = os.stat(local_path)
+ with open(local_path, "rb") as local_file:
+ yield FileStream(stream=local_file, size=st.st_size)
+ else:
+ # Remote URL
+ headers = {"Accept-Encoding": "identity"}
+ response = session.get(url, headers=headers, stream=True)
+
+ # Content length must be int or None
+ try:
+ content_length = int(response.headers["content-length"])
+ except (ValueError, KeyError, TypeError):
+ content_length = None
+
+ try:
+ yield FileStream(stream=response.raw, size=content_length)
+ finally:
+ response.close()
diff --git a/third_party/python/pip-tools/piptools/resolver.py b/third_party/python/pip-tools/piptools/resolver.py
new file mode 100644
index 0000000000..954f751ab9
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/resolver.py
@@ -0,0 +1,408 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import copy
+import os
+from functools import partial
+from itertools import chain, count, groupby
+
+from pip._internal.req.constructors import install_req_from_line
+
+from . import click
+from .logging import log
+from .utils import (
+ UNSAFE_PACKAGES,
+ format_requirement,
+ format_specifier,
+ is_pinned_requirement,
+ is_url_requirement,
+ key_from_ireq,
+)
+
+green = partial(click.style, fg="green")
+magenta = partial(click.style, fg="magenta")
+
+
+class RequirementSummary(object):
+ """
+ Summary of a requirement's properties for comparison purposes.
+ """
+
+ def __init__(self, ireq):
+ self.req = ireq.req
+ self.key = key_from_ireq(ireq)
+ self.extras = frozenset(ireq.extras)
+ self.specifier = ireq.specifier
+
+ def __eq__(self, other):
+ return (
+ self.key == other.key
+ and self.specifier == other.specifier
+ and self.extras == other.extras
+ )
+
+ def __hash__(self):
+ return hash((self.key, self.specifier, self.extras))
+
+ def __str__(self):
+ return repr((self.key, str(self.specifier), sorted(self.extras)))
+
+
+def combine_install_requirements(repository, ireqs):
+ """
+ Return a single install requirement that reflects a combination of
+ all the inputs.
+ """
+ # We will store the source ireqs in a _source_ireqs attribute;
+ # if any of the inputs have this, then use those sources directly.
+ source_ireqs = []
+ for ireq in ireqs:
+ source_ireqs.extend(getattr(ireq, "_source_ireqs", [ireq]))
+
+ # Optimization. Don't bother with combination logic.
+ if len(source_ireqs) == 1:
+ return source_ireqs[0]
+
+ # deepcopy the accumulator so as to not modify the inputs
+ combined_ireq = copy.deepcopy(source_ireqs[0])
+ repository.copy_ireq_dependencies(source_ireqs[0], combined_ireq)
+
+ for ireq in source_ireqs[1:]:
+ # NOTE we may be losing some info on dropped reqs here
+ combined_ireq.req.specifier &= ireq.req.specifier
+ if combined_ireq.constraint:
+ # We don't find dependencies for constraint ireqs, so copy them
+ # from non-constraints:
+ repository.copy_ireq_dependencies(ireq, combined_ireq)
+ combined_ireq.constraint &= ireq.constraint
+ # Return a sorted, de-duped tuple of extras
+ combined_ireq.extras = tuple(
+ sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras)))
+ )
+
+ # InstallRequirements objects are assumed to come from only one source, and
+ # so they support only a single comes_from entry. This function breaks this
+ # model. As a workaround, we deterministically choose a single source for
+ # the comes_from entry, and add an extra _source_ireqs attribute to keep
+ # track of multiple sources for use within pip-tools.
+ if len(source_ireqs) > 1:
+ if any(ireq.comes_from is None for ireq in source_ireqs):
+ # None indicates package was directly specified.
+ combined_ireq.comes_from = None
+ else:
+ # Populate the comes_from field from one of the sources.
+ # Requirement input order is not stable, so we need to sort:
+ # We choose the shortest entry in order to keep the printed
+ # representation as concise as possible.
+ combined_ireq.comes_from = min(
+ (ireq.comes_from for ireq in source_ireqs),
+ key=lambda x: (len(str(x)), str(x)),
+ )
+ combined_ireq._source_ireqs = source_ireqs
+ return combined_ireq
+
+
+class Resolver(object):
+ def __init__(
+ self,
+ constraints,
+ repository,
+ cache,
+ prereleases=False,
+ clear_caches=False,
+ allow_unsafe=False,
+ ):
+ """
+ This class resolves a given set of constraints (a collection of
+ InstallRequirement objects) by consulting the given Repository and the
+ DependencyCache.
+ """
+ self.our_constraints = set(constraints)
+ self.their_constraints = set()
+ self.repository = repository
+ self.dependency_cache = cache
+ self.prereleases = prereleases
+ self.clear_caches = clear_caches
+ self.allow_unsafe = allow_unsafe
+ self.unsafe_constraints = set()
+
+ @property
+ def constraints(self):
+ return set(
+ self._group_constraints(chain(self.our_constraints, self.their_constraints))
+ )
+
+ def resolve_hashes(self, ireqs):
+ """
+ Finds acceptable hashes for all of the given InstallRequirements.
+ """
+ log.debug("")
+ log.debug("Generating hashes:")
+ with self.repository.allow_all_wheels(), log.indentation():
+ return {ireq: self.repository.get_hashes(ireq) for ireq in ireqs}
+
+ def resolve(self, max_rounds=10):
+ """
+ Finds concrete package versions for all the given InstallRequirements
+ and their recursive dependencies. The end result is a flat list of
+ (name, version) tuples. (Or an editable package.)
+
+ Resolves constraints one round at a time, until they don't change
+ anymore. Protects against infinite loops by breaking out after a max
+ number rounds.
+ """
+ if self.clear_caches:
+ self.dependency_cache.clear()
+ self.repository.clear_caches()
+
+ # Ignore existing packages
+ os.environ[str("PIP_EXISTS_ACTION")] = str(
+ "i"
+ ) # NOTE: str() wrapping necessary for Python 2/3 compat
+ for current_round in count(start=1): # pragma: no branch
+ if current_round > max_rounds:
+ raise RuntimeError(
+ "No stable configuration of concrete packages "
+ "could be found for the given constraints after "
+ "{max_rounds} rounds of resolving.\n"
+ "This is likely a bug.".format(max_rounds=max_rounds)
+ )
+
+ log.debug("")
+ log.debug(magenta("{:^60}".format("ROUND {}".format(current_round))))
+ has_changed, best_matches = self._resolve_one_round()
+ log.debug("-" * 60)
+ log.debug(
+ "Result of round {}: {}".format(
+ current_round, "not stable" if has_changed else "stable, done"
+ )
+ )
+ if not has_changed:
+ break
+
+ # If a package version (foo==2.0) was built in a previous round,
+ # and in this round a different version of foo needs to be built
+ # (i.e. foo==1.0), the directory will exist already, which will
+ # cause a pip build failure. The trick is to start with a new
+ # build cache dir for every round, so this can never happen.
+ self.repository.freshen_build_caches()
+
+ del os.environ["PIP_EXISTS_ACTION"]
+
+ # Only include hard requirements and not pip constraints
+ results = {req for req in best_matches if not req.constraint}
+
+ # Filter out unsafe requirements.
+ self.unsafe_constraints = set()
+ if not self.allow_unsafe:
+ # reverse_dependencies is used to filter out packages that are only
+ # required by unsafe packages. This logic is incomplete, as it would
+ # fail to filter sub-sub-dependencies of unsafe packages. None of the
+ # UNSAFE_PACKAGES currently have any dependencies at all (which makes
+ # sense for installation tools) so this seems sufficient.
+ reverse_dependencies = self.reverse_dependencies(results)
+ for req in results.copy():
+ required_by = reverse_dependencies.get(req.name.lower(), [])
+ if req.name in UNSAFE_PACKAGES or (
+ required_by and all(name in UNSAFE_PACKAGES for name in required_by)
+ ):
+ self.unsafe_constraints.add(req)
+ results.remove(req)
+
+ return results
+
+ def _group_constraints(self, constraints):
+ """
+ Groups constraints (remember, InstallRequirements!) by their key name,
+ and combining their SpecifierSets into a single InstallRequirement per
+ package. For example, given the following constraints:
+
+ Django<1.9,>=1.4.2
+ django~=1.5
+ Flask~=0.7
+
+ This will be combined into a single entry per package:
+
+ django~=1.5,<1.9,>=1.4.2
+ flask~=0.7
+
+ """
+ constraints = list(constraints)
+ for ireq in constraints:
+ if ireq.name is None:
+ # get_dependencies has side-effect of assigning name to ireq
+ # (so we can group by the name below).
+ self.repository.get_dependencies(ireq)
+
+ # Sort first by name, i.e. the groupby key. Then within each group,
+ # sort editables first.
+ # This way, we don't bother with combining editables, since the first
+ # ireq will be editable, if one exists.
+ for _, ireqs in groupby(
+ sorted(constraints, key=(lambda x: (key_from_ireq(x), not x.editable))),
+ key=key_from_ireq,
+ ):
+ yield combine_install_requirements(self.repository, ireqs)
+
+ def _resolve_one_round(self):
+ """
+ Resolves one level of the current constraints, by finding the best
+ match for each package in the repository and adding all requirements
+ for those best package versions. Some of these constraints may be new
+ or updated.
+
+ Returns whether new constraints appeared in this round. If no
+ constraints were added or changed, this indicates a stable
+ configuration.
+ """
+ # Sort this list for readability of terminal output
+ constraints = sorted(self.constraints, key=key_from_ireq)
+
+ log.debug("Current constraints:")
+ with log.indentation():
+ for constraint in constraints:
+ log.debug(str(constraint))
+
+ log.debug("")
+ log.debug("Finding the best candidates:")
+ with log.indentation():
+ best_matches = {self.get_best_match(ireq) for ireq in constraints}
+
+ # Find the new set of secondary dependencies
+ log.debug("")
+ log.debug("Finding secondary dependencies:")
+
+ their_constraints = []
+ with log.indentation():
+ for best_match in best_matches:
+ their_constraints.extend(self._iter_dependencies(best_match))
+ # Grouping constraints to make clean diff between rounds
+ theirs = set(self._group_constraints(their_constraints))
+
+ # NOTE: We need to compare RequirementSummary objects, since
+ # InstallRequirement does not define equality
+ diff = {RequirementSummary(t) for t in theirs} - {
+ RequirementSummary(t) for t in self.their_constraints
+ }
+ removed = {RequirementSummary(t) for t in self.their_constraints} - {
+ RequirementSummary(t) for t in theirs
+ }
+
+ has_changed = len(diff) > 0 or len(removed) > 0
+ if has_changed:
+ log.debug("")
+ log.debug("New dependencies found in this round:")
+ with log.indentation():
+ for new_dependency in sorted(diff, key=key_from_ireq):
+ log.debug("adding {}".format(new_dependency))
+ log.debug("Removed dependencies in this round:")
+ with log.indentation():
+ for removed_dependency in sorted(removed, key=key_from_ireq):
+ log.debug("removing {}".format(removed_dependency))
+
+ # Store the last round's results in the their_constraints
+ self.their_constraints = theirs
+ return has_changed, best_matches
+
+ def get_best_match(self, ireq):
+ """
+ Returns a (pinned or editable) InstallRequirement, indicating the best
+ match to use for the given InstallRequirement (in the form of an
+ InstallRequirement).
+
+ Example:
+ Given the constraint Flask>=0.10, may return Flask==0.10.1 at
+ a certain moment in time.
+
+ Pinned requirements will always return themselves, i.e.
+
+ Flask==0.10.1 => Flask==0.10.1
+
+ """
+ if ireq.editable or is_url_requirement(ireq):
+ # NOTE: it's much quicker to immediately return instead of
+ # hitting the index server
+ best_match = ireq
+ elif is_pinned_requirement(ireq):
+ # NOTE: it's much quicker to immediately return instead of
+ # hitting the index server
+ best_match = ireq
+ elif ireq.constraint:
+ # NOTE: This is not a requirement (yet) and does not need
+ # to be resolved
+ best_match = ireq
+ else:
+ best_match = self.repository.find_best_match(
+ ireq, prereleases=self.prereleases
+ )
+
+ # Format the best match
+ log.debug(
+ "found candidate {} (constraint was {})".format(
+ format_requirement(best_match), format_specifier(ireq)
+ )
+ )
+ best_match.comes_from = ireq.comes_from
+ if hasattr(ireq, "_source_ireqs"):
+ best_match._source_ireqs = ireq._source_ireqs
+ return best_match
+
+ def _iter_dependencies(self, ireq):
+ """
+ Given a pinned, url, or editable InstallRequirement, collects all the
+ secondary dependencies for them, either by looking them up in a local
+ cache, or by reaching out to the repository.
+
+ Editable requirements will never be looked up, as they may have
+ changed at any time.
+ """
+ # Pip does not resolve dependencies of constraints. We skip handling
+ # constraints here as well to prevent the cache from being polluted.
+ # Constraints that are later determined to be dependencies will be
+ # marked as non-constraints in later rounds by
+ # `combine_install_requirements`, and will be properly resolved.
+ # See https://github.com/pypa/pip/
+ # blob/6896dfcd831330c13e076a74624d95fa55ff53f4/src/pip/_internal/
+ # legacy_resolve.py#L325
+ if ireq.constraint:
+ return
+
+ if ireq.editable or is_url_requirement(ireq):
+ for dependency in self.repository.get_dependencies(ireq):
+ yield dependency
+ return
+ elif not is_pinned_requirement(ireq):
+ raise TypeError(
+ "Expected pinned or editable requirement, got {}".format(ireq)
+ )
+
+ # Now, either get the dependencies from the dependency cache (for
+ # speed), or reach out to the external repository to
+ # download and inspect the package version and get dependencies
+ # from there
+ if ireq not in self.dependency_cache:
+ log.debug(
+ "{} not in cache, need to check index".format(format_requirement(ireq)),
+ fg="yellow",
+ )
+ dependencies = self.repository.get_dependencies(ireq)
+ self.dependency_cache[ireq] = sorted(str(ireq.req) for ireq in dependencies)
+
+ # Example: ['Werkzeug>=0.9', 'Jinja2>=2.4']
+ dependency_strings = self.dependency_cache[ireq]
+ log.debug(
+ "{:25} requires {}".format(
+ format_requirement(ireq),
+ ", ".join(sorted(dependency_strings, key=lambda s: s.lower())) or "-",
+ )
+ )
+ for dependency_string in dependency_strings:
+ yield install_req_from_line(
+ dependency_string, constraint=ireq.constraint, comes_from=ireq
+ )
+
+ def reverse_dependencies(self, ireqs):
+ non_editable = [
+ ireq for ireq in ireqs if not (ireq.editable or is_url_requirement(ireq))
+ ]
+ return self.dependency_cache.reverse_dependencies(non_editable)
diff --git a/third_party/python/pip-tools/piptools/scripts/__init__.py b/third_party/python/pip-tools/piptools/scripts/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/scripts/__init__.py
diff --git a/third_party/python/pip-tools/piptools/scripts/compile.py b/third_party/python/pip-tools/piptools/scripts/compile.py
new file mode 100755
index 0000000000..785afb9bb7
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/scripts/compile.py
@@ -0,0 +1,501 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import os
+import shlex
+import sys
+import tempfile
+import warnings
+
+from click import Command
+from click.utils import safecall
+from pip._internal.commands import create_command
+from pip._internal.req.constructors import install_req_from_line
+from pip._internal.utils.misc import redact_auth_from_url
+
+from .. import click
+from .._compat import parse_requirements
+from ..cache import DependencyCache
+from ..exceptions import PipToolsError
+from ..locations import CACHE_DIR
+from ..logging import log
+from ..repositories import LocalRequirementsRepository, PyPIRepository
+from ..resolver import Resolver
+from ..utils import UNSAFE_PACKAGES, dedup, is_pinned_requirement, key_from_ireq
+from ..writer import OutputWriter
+
+DEFAULT_REQUIREMENTS_FILE = "requirements.in"
+DEFAULT_REQUIREMENTS_OUTPUT_FILE = "requirements.txt"
+
+
+def _get_default_option(option_name):
+ """
+ Get default value of the pip's option (including option from pip.conf)
+ by a given option name.
+ """
+ install_command = create_command("install")
+ default_values = install_command.parser.get_default_values()
+ return getattr(default_values, option_name)
+
+
+class BaseCommand(Command):
+ _os_args = None
+
+ def parse_args(self, ctx, args):
+ """
+ Override base `parse_args` to store the argument part of `sys.argv`.
+ """
+ self._os_args = set(args)
+ return super(BaseCommand, self).parse_args(ctx, args)
+
+ def has_arg(self, arg_name):
+ """
+ Detect whether a given arg name (including negative counterparts
+ to the arg, e.g. --no-arg) is present in the argument part of `sys.argv`.
+ """
+ command_options = {option.name: option for option in self.params}
+ option = command_options[arg_name]
+ args = set(option.opts + option.secondary_opts)
+ return bool(self._os_args & args)
+
+
+@click.command(
+ cls=BaseCommand, context_settings={"help_option_names": ("-h", "--help")}
+)
+@click.version_option()
+@click.pass_context
+@click.option("-v", "--verbose", count=True, help="Show more output")
+@click.option("-q", "--quiet", count=True, help="Give less output")
+@click.option(
+ "-n",
+ "--dry-run",
+ is_flag=True,
+ help="Only show what would happen, don't change anything",
+)
+@click.option(
+ "-p",
+ "--pre",
+ is_flag=True,
+ default=None,
+ help="Allow resolving to prereleases (default is not)",
+)
+@click.option(
+ "-r",
+ "--rebuild",
+ is_flag=True,
+ help="Clear any caches upfront, rebuild from scratch",
+)
+@click.option(
+ "-f",
+ "--find-links",
+ multiple=True,
+ help="Look for archives in this directory or on this HTML page",
+ envvar="PIP_FIND_LINKS",
+)
+@click.option(
+ "-i",
+ "--index-url",
+ help="Change index URL (defaults to {index_url})".format(
+ index_url=redact_auth_from_url(_get_default_option("index_url"))
+ ),
+ envvar="PIP_INDEX_URL",
+)
+@click.option(
+ "--extra-index-url",
+ multiple=True,
+ help="Add additional index URL to search",
+ envvar="PIP_EXTRA_INDEX_URL",
+)
+@click.option("--cert", help="Path to alternate CA bundle.")
+@click.option(
+ "--client-cert",
+ help="Path to SSL client certificate, a single file containing "
+ "the private key and the certificate in PEM format.",
+)
+@click.option(
+ "--trusted-host",
+ multiple=True,
+ envvar="PIP_TRUSTED_HOST",
+ help="Mark this host as trusted, even though it does not have "
+ "valid or any HTTPS.",
+)
+@click.option(
+ "--header/--no-header",
+ is_flag=True,
+ default=True,
+ help="Add header to generated file",
+)
+@click.option(
+ "--index/--no-index",
+ is_flag=True,
+ default=True,
+ help="DEPRECATED: Add index URL to generated file",
+)
+@click.option(
+ "--emit-trusted-host/--no-emit-trusted-host",
+ is_flag=True,
+ default=True,
+ help="Add trusted host option to generated file",
+)
+@click.option(
+ "--annotate/--no-annotate",
+ is_flag=True,
+ default=True,
+ help="Annotate results, indicating where dependencies come from",
+)
+@click.option(
+ "-U",
+ "--upgrade",
+ is_flag=True,
+ default=False,
+ help="Try to upgrade all dependencies to their latest versions",
+)
+@click.option(
+ "-P",
+ "--upgrade-package",
+ "upgrade_packages",
+ nargs=1,
+ multiple=True,
+ help="Specify particular packages to upgrade.",
+)
+@click.option(
+ "-o",
+ "--output-file",
+ nargs=1,
+ default=None,
+ type=click.File("w+b", atomic=True, lazy=True),
+ help=(
+ "Output file name. Required if more than one input file is given. "
+ "Will be derived from input file otherwise."
+ ),
+)
+@click.option(
+ "--allow-unsafe",
+ is_flag=True,
+ default=False,
+ help="Pin packages considered unsafe: {}".format(
+ ", ".join(sorted(UNSAFE_PACKAGES))
+ ),
+)
+@click.option(
+ "--generate-hashes",
+ is_flag=True,
+ default=False,
+ help="Generate pip 8 style hashes in the resulting requirements file.",
+)
+@click.option(
+ "--reuse-hashes/--no-reuse-hashes",
+ is_flag=True,
+ default=True,
+ help=(
+ "Improve the speed of --generate-hashes by reusing the hashes from an "
+ "existing output file."
+ ),
+)
+@click.option(
+ "--max-rounds",
+ default=10,
+ help="Maximum number of rounds before resolving the requirements aborts.",
+)
+@click.argument("src_files", nargs=-1, type=click.Path(exists=True, allow_dash=True))
+@click.option(
+ "--build-isolation/--no-build-isolation",
+ is_flag=True,
+ default=True,
+ help="Enable isolation when building a modern source distribution. "
+ "Build dependencies specified by PEP 518 must be already installed "
+ "if build isolation is disabled.",
+)
+@click.option(
+ "--emit-find-links/--no-emit-find-links",
+ is_flag=True,
+ default=True,
+ help="Add the find-links option to generated file",
+)
+@click.option(
+ "--cache-dir",
+ help="Store the cache data in DIRECTORY.",
+ default=CACHE_DIR,
+ envvar="PIP_TOOLS_CACHE_DIR",
+ show_default=True,
+ show_envvar=True,
+ type=click.Path(file_okay=False, writable=True),
+)
+@click.option("--pip-args", help="Arguments to pass directly to the pip command.")
+@click.option(
+ "--emit-index-url/--no-emit-index-url",
+ is_flag=True,
+ default=True,
+ help="Add index URL to generated file",
+)
+def cli(
+ ctx,
+ verbose,
+ quiet,
+ dry_run,
+ pre,
+ rebuild,
+ find_links,
+ index_url,
+ extra_index_url,
+ cert,
+ client_cert,
+ trusted_host,
+ header,
+ index,
+ emit_trusted_host,
+ annotate,
+ upgrade,
+ upgrade_packages,
+ output_file,
+ allow_unsafe,
+ generate_hashes,
+ reuse_hashes,
+ src_files,
+ max_rounds,
+ build_isolation,
+ emit_find_links,
+ cache_dir,
+ pip_args,
+ emit_index_url,
+):
+ """Compiles requirements.txt from requirements.in specs."""
+ log.verbosity = verbose - quiet
+
+ if len(src_files) == 0:
+ if os.path.exists(DEFAULT_REQUIREMENTS_FILE):
+ src_files = (DEFAULT_REQUIREMENTS_FILE,)
+ elif os.path.exists("setup.py"):
+ src_files = ("setup.py",)
+ else:
+ raise click.BadParameter(
+ (
+ "If you do not specify an input file, "
+ "the default is {} or setup.py"
+ ).format(DEFAULT_REQUIREMENTS_FILE)
+ )
+
+ if not output_file:
+ # An output file must be provided for stdin
+ if src_files == ("-",):
+ raise click.BadParameter("--output-file is required if input is from stdin")
+ # Use default requirements output file if there is a setup.py the source file
+ elif src_files == ("setup.py",):
+ file_name = DEFAULT_REQUIREMENTS_OUTPUT_FILE
+ # An output file must be provided if there are multiple source files
+ elif len(src_files) > 1:
+ raise click.BadParameter(
+ "--output-file is required if two or more input files are given."
+ )
+ # Otherwise derive the output file from the source file
+ else:
+ base_name = src_files[0].rsplit(".", 1)[0]
+ file_name = base_name + ".txt"
+
+ output_file = click.open_file(file_name, "w+b", atomic=True, lazy=True)
+
+ # Close the file at the end of the context execution
+ ctx.call_on_close(safecall(output_file.close_intelligently))
+
+ if cli.has_arg("index") and cli.has_arg("emit_index_url"):
+ raise click.BadParameter(
+ "--index/--no-index and --emit-index-url/--no-emit-index-url "
+ "are mutually exclusive."
+ )
+ elif cli.has_arg("index"):
+ warnings.warn(
+ "--index and --no-index are deprecated and will be removed "
+ "in future versions. Use --emit-index-url/--no-emit-index-url instead.",
+ category=FutureWarning,
+ )
+ emit_index_url = index
+
+ ###
+ # Setup
+ ###
+
+ right_args = shlex.split(pip_args or "")
+ pip_args = []
+ if find_links:
+ for link in find_links:
+ pip_args.extend(["-f", link])
+ if index_url:
+ pip_args.extend(["-i", index_url])
+ if extra_index_url:
+ for extra_index in extra_index_url:
+ pip_args.extend(["--extra-index-url", extra_index])
+ if cert:
+ pip_args.extend(["--cert", cert])
+ if client_cert:
+ pip_args.extend(["--client-cert", client_cert])
+ if pre:
+ pip_args.extend(["--pre"])
+ if trusted_host:
+ for host in trusted_host:
+ pip_args.extend(["--trusted-host", host])
+
+ if not build_isolation:
+ pip_args.append("--no-build-isolation")
+ pip_args.extend(right_args)
+
+ repository = PyPIRepository(pip_args, cache_dir=cache_dir)
+
+ # Parse all constraints coming from --upgrade-package/-P
+ upgrade_reqs_gen = (install_req_from_line(pkg) for pkg in upgrade_packages)
+ upgrade_install_reqs = {
+ key_from_ireq(install_req): install_req for install_req in upgrade_reqs_gen
+ }
+
+ existing_pins_to_upgrade = set()
+
+ # Proxy with a LocalRequirementsRepository if --upgrade is not specified
+ # (= default invocation)
+ if not upgrade and os.path.exists(output_file.name):
+ # Use a temporary repository to ensure outdated(removed) options from
+ # existing requirements.txt wouldn't get into the current repository.
+ tmp_repository = PyPIRepository(pip_args, cache_dir=cache_dir)
+ ireqs = parse_requirements(
+ output_file.name,
+ finder=tmp_repository.finder,
+ session=tmp_repository.session,
+ options=tmp_repository.options,
+ )
+
+ # Exclude packages from --upgrade-package/-P from the existing
+ # constraints, and separately gather pins to be upgraded
+ existing_pins = {}
+ for ireq in filter(is_pinned_requirement, ireqs):
+ key = key_from_ireq(ireq)
+ if key in upgrade_install_reqs:
+ existing_pins_to_upgrade.add(key)
+ else:
+ existing_pins[key] = ireq
+ repository = LocalRequirementsRepository(
+ existing_pins, repository, reuse_hashes=reuse_hashes
+ )
+
+ ###
+ # Parsing/collecting initial requirements
+ ###
+
+ constraints = []
+ for src_file in src_files:
+ is_setup_file = os.path.basename(src_file) == "setup.py"
+ if is_setup_file or src_file == "-":
+ # pip requires filenames and not files. Since we want to support
+ # piping from stdin, we need to briefly save the input from stdin
+ # to a temporary file and have pip read that. also used for
+ # reading requirements from install_requires in setup.py.
+ tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False)
+ if is_setup_file:
+ from distutils.core import run_setup
+
+ dist = run_setup(src_file)
+ tmpfile.write("\n".join(dist.install_requires))
+ comes_from = "{name} ({filename})".format(
+ name=dist.get_name(), filename=src_file
+ )
+ else:
+ tmpfile.write(sys.stdin.read())
+ comes_from = "-r -"
+ tmpfile.flush()
+ reqs = list(
+ parse_requirements(
+ tmpfile.name,
+ finder=repository.finder,
+ session=repository.session,
+ options=repository.options,
+ )
+ )
+ for req in reqs:
+ req.comes_from = comes_from
+ constraints.extend(reqs)
+ else:
+ constraints.extend(
+ parse_requirements(
+ src_file,
+ finder=repository.finder,
+ session=repository.session,
+ options=repository.options,
+ )
+ )
+
+ primary_packages = {
+ key_from_ireq(ireq) for ireq in constraints if not ireq.constraint
+ }
+
+ allowed_upgrades = primary_packages | existing_pins_to_upgrade
+ constraints.extend(
+ ireq for key, ireq in upgrade_install_reqs.items() if key in allowed_upgrades
+ )
+
+ # Filter out pip environment markers which do not match (PEP496)
+ constraints = [
+ req for req in constraints if req.markers is None or req.markers.evaluate()
+ ]
+
+ log.debug("Using indexes:")
+ with log.indentation():
+ for index_url in dedup(repository.finder.index_urls):
+ log.debug(redact_auth_from_url(index_url))
+
+ if repository.finder.find_links:
+ log.debug("")
+ log.debug("Using links:")
+ with log.indentation():
+ for find_link in dedup(repository.finder.find_links):
+ log.debug(redact_auth_from_url(find_link))
+
+ try:
+ resolver = Resolver(
+ constraints,
+ repository,
+ prereleases=repository.finder.allow_all_prereleases or pre,
+ cache=DependencyCache(cache_dir),
+ clear_caches=rebuild,
+ allow_unsafe=allow_unsafe,
+ )
+ results = resolver.resolve(max_rounds=max_rounds)
+ if generate_hashes:
+ hashes = resolver.resolve_hashes(results)
+ else:
+ hashes = None
+ except PipToolsError as e:
+ log.error(str(e))
+ sys.exit(2)
+
+ log.debug("")
+
+ ##
+ # Output
+ ##
+
+ writer = OutputWriter(
+ src_files,
+ output_file,
+ click_ctx=ctx,
+ dry_run=dry_run,
+ emit_header=header,
+ emit_index_url=emit_index_url,
+ emit_trusted_host=emit_trusted_host,
+ annotate=annotate,
+ generate_hashes=generate_hashes,
+ default_index_url=repository.DEFAULT_INDEX_URL,
+ index_urls=repository.finder.index_urls,
+ trusted_hosts=repository.finder.trusted_hosts,
+ format_control=repository.finder.format_control,
+ allow_unsafe=allow_unsafe,
+ find_links=repository.finder.find_links,
+ emit_find_links=emit_find_links,
+ )
+ writer.write(
+ results=results,
+ unsafe_requirements=resolver.unsafe_constraints,
+ markers={
+ key_from_ireq(ireq): ireq.markers for ireq in constraints if ireq.markers
+ },
+ hashes=hashes,
+ )
+
+ if dry_run:
+ log.info("Dry-run, so nothing updated.")
diff --git a/third_party/python/pip-tools/piptools/scripts/sync.py b/third_party/python/pip-tools/piptools/scripts/sync.py
new file mode 100755
index 0000000000..fbad5463cd
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/scripts/sync.py
@@ -0,0 +1,217 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import itertools
+import os
+import shlex
+import sys
+
+from pip._internal.commands import create_command
+from pip._internal.utils.misc import get_installed_distributions
+
+from .. import click, sync
+from .._compat import parse_requirements
+from ..exceptions import PipToolsError
+from ..logging import log
+from ..repositories import PyPIRepository
+from ..utils import flat_map
+
+DEFAULT_REQUIREMENTS_FILE = "requirements.txt"
+
+
+@click.command(context_settings={"help_option_names": ("-h", "--help")})
+@click.version_option()
+@click.option(
+ "-a",
+ "--ask",
+ is_flag=True,
+ help="Show what would happen, then ask whether to continue",
+)
+@click.option(
+ "-n",
+ "--dry-run",
+ is_flag=True,
+ help="Only show what would happen, don't change anything",
+)
+@click.option("--force", is_flag=True, help="Proceed even if conflicts are found")
+@click.option(
+ "-f",
+ "--find-links",
+ multiple=True,
+ help="Look for archives in this directory or on this HTML page",
+ envvar="PIP_FIND_LINKS",
+)
+@click.option(
+ "-i",
+ "--index-url",
+ help="Change index URL (defaults to PyPI)",
+ envvar="PIP_INDEX_URL",
+)
+@click.option(
+ "--extra-index-url",
+ multiple=True,
+ help="Add additional index URL to search",
+ envvar="PIP_EXTRA_INDEX_URL",
+)
+@click.option(
+ "--trusted-host",
+ multiple=True,
+ help="Mark this host as trusted, even though it does not have valid or any HTTPS.",
+)
+@click.option(
+ "--no-index",
+ is_flag=True,
+ help="Ignore package index (only looking at --find-links URLs instead)",
+)
+@click.option("-q", "--quiet", default=False, is_flag=True, help="Give less output")
+@click.option(
+ "--user", "user_only", is_flag=True, help="Restrict attention to user directory"
+)
+@click.option("--cert", help="Path to alternate CA bundle.")
+@click.option(
+ "--client-cert",
+ help="Path to SSL client certificate, a single file containing "
+ "the private key and the certificate in PEM format.",
+)
+@click.argument("src_files", required=False, type=click.Path(exists=True), nargs=-1)
+@click.option("--pip-args", help="Arguments to pass directly to pip install.")
+def cli(
+ ask,
+ dry_run,
+ force,
+ find_links,
+ index_url,
+ extra_index_url,
+ trusted_host,
+ no_index,
+ quiet,
+ user_only,
+ cert,
+ client_cert,
+ src_files,
+ pip_args,
+):
+ """Synchronize virtual environment with requirements.txt."""
+ if not src_files:
+ if os.path.exists(DEFAULT_REQUIREMENTS_FILE):
+ src_files = (DEFAULT_REQUIREMENTS_FILE,)
+ else:
+ msg = "No requirement files given and no {} found in the current directory"
+ log.error(msg.format(DEFAULT_REQUIREMENTS_FILE))
+ sys.exit(2)
+
+ if any(src_file.endswith(".in") for src_file in src_files):
+ msg = (
+ "Some input files have the .in extension, which is most likely an error "
+ "and can cause weird behaviour. You probably meant to use "
+ "the corresponding *.txt file?"
+ )
+ if force:
+ log.warning("WARNING: " + msg)
+ else:
+ log.error("ERROR: " + msg)
+ sys.exit(2)
+
+ install_command = create_command("install")
+ options, _ = install_command.parse_args([])
+ session = install_command._build_session(options)
+ finder = install_command._build_package_finder(options=options, session=session)
+
+ # Parse requirements file. Note, all options inside requirements file
+ # will be collected by the finder.
+ requirements = flat_map(
+ lambda src: parse_requirements(src, finder=finder, session=session), src_files
+ )
+
+ try:
+ requirements = sync.merge(requirements, ignore_conflicts=force)
+ except PipToolsError as e:
+ log.error(str(e))
+ sys.exit(2)
+
+ installed_dists = get_installed_distributions(skip=[], user_only=user_only)
+ to_install, to_uninstall = sync.diff(requirements, installed_dists)
+
+ install_flags = _compose_install_flags(
+ finder,
+ no_index=no_index,
+ index_url=index_url,
+ extra_index_url=extra_index_url,
+ trusted_host=trusted_host,
+ find_links=find_links,
+ user_only=user_only,
+ cert=cert,
+ client_cert=client_cert,
+ ) + shlex.split(pip_args or "")
+ sys.exit(
+ sync.sync(
+ to_install,
+ to_uninstall,
+ verbose=(not quiet),
+ dry_run=dry_run,
+ install_flags=install_flags,
+ ask=ask,
+ )
+ )
+
+
+def _compose_install_flags(
+ finder,
+ no_index=False,
+ index_url=None,
+ extra_index_url=None,
+ trusted_host=None,
+ find_links=None,
+ user_only=False,
+ cert=None,
+ client_cert=None,
+):
+ """
+ Compose install flags with the given finder and CLI options.
+ """
+ result = []
+
+ # Build --index-url/--extra-index-url/--no-index
+ if no_index:
+ result.append("--no-index")
+ elif index_url:
+ result.extend(["--index-url", index_url])
+ elif finder.index_urls:
+ finder_index_url = finder.index_urls[0]
+ if finder_index_url != PyPIRepository.DEFAULT_INDEX_URL:
+ result.extend(["--index-url", finder_index_url])
+ for extra_index in finder.index_urls[1:]:
+ result.extend(["--extra-index-url", extra_index])
+ else:
+ result.append("--no-index")
+
+ for extra_index in extra_index_url or []:
+ result.extend(["--extra-index-url", extra_index])
+
+ # Build --trusted-hosts
+ for host in itertools.chain(trusted_host or [], finder.trusted_hosts):
+ result.extend(["--trusted-host", host])
+
+ # Build --find-links
+ for link in itertools.chain(find_links or [], finder.find_links):
+ result.extend(["--find-links", link])
+
+ # Build format controls --no-binary/--only-binary
+ for format_control in ("no_binary", "only_binary"):
+ formats = getattr(finder.format_control, format_control)
+ if not formats:
+ continue
+ result.extend(
+ ["--" + format_control.replace("_", "-"), ",".join(sorted(formats))]
+ )
+
+ if user_only:
+ result.append("--user")
+
+ if cert:
+ result.extend(["--cert", cert])
+
+ if client_cert:
+ result.extend(["--client-cert", client_cert])
+
+ return result
diff --git a/third_party/python/pip-tools/piptools/sync.py b/third_party/python/pip-tools/piptools/sync.py
new file mode 100644
index 0000000000..4e2bb49401
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/sync.py
@@ -0,0 +1,223 @@
+import collections
+import os
+import sys
+import tempfile
+from subprocess import check_call # nosec
+
+from pip._internal.commands.freeze import DEV_PKGS
+from pip._internal.utils.compat import stdlib_pkgs
+
+from . import click
+from .exceptions import IncompatibleRequirements
+from .utils import (
+ flat_map,
+ format_requirement,
+ get_hashes_from_ireq,
+ is_url_requirement,
+ key_from_ireq,
+ key_from_req,
+)
+
+PACKAGES_TO_IGNORE = (
+ ["-markerlib", "pip", "pip-tools", "pip-review", "pkg-resources"]
+ + list(stdlib_pkgs)
+ + list(DEV_PKGS)
+)
+
+
+def dependency_tree(installed_keys, root_key):
+ """
+ Calculate the dependency tree for the package `root_key` and return
+ a collection of all its dependencies. Uses a DFS traversal algorithm.
+
+ `installed_keys` should be a {key: requirement} mapping, e.g.
+ {'django': from_line('django==1.8')}
+ `root_key` should be the key to return the dependency tree for.
+ """
+ dependencies = set()
+ queue = collections.deque()
+
+ if root_key in installed_keys:
+ dep = installed_keys[root_key]
+ queue.append(dep)
+
+ while queue:
+ v = queue.popleft()
+ key = key_from_req(v)
+ if key in dependencies:
+ continue
+
+ dependencies.add(key)
+
+ for dep_specifier in v.requires():
+ dep_name = key_from_req(dep_specifier)
+ if dep_name in installed_keys:
+ dep = installed_keys[dep_name]
+
+ if dep_specifier.specifier.contains(dep.version):
+ queue.append(dep)
+
+ return dependencies
+
+
+def get_dists_to_ignore(installed):
+ """
+ Returns a collection of package names to ignore when performing pip-sync,
+ based on the currently installed environment. For example, when pip-tools
+ is installed in the local environment, it should be ignored, including all
+ of its dependencies (e.g. click). When pip-tools is not installed
+ locally, click should also be installed/uninstalled depending on the given
+ requirements.
+ """
+ installed_keys = {key_from_req(r): r for r in installed}
+ return list(
+ flat_map(lambda req: dependency_tree(installed_keys, req), PACKAGES_TO_IGNORE)
+ )
+
+
+def merge(requirements, ignore_conflicts):
+ by_key = {}
+
+ for ireq in requirements:
+ # Limitation: URL requirements are merged by precise string match, so
+ # "file:///example.zip#egg=example", "file:///example.zip", and
+ # "example==1.0" will not merge with each other
+ if ireq.match_markers():
+ key = key_from_ireq(ireq)
+
+ if not ignore_conflicts:
+ existing_ireq = by_key.get(key)
+ if existing_ireq:
+ # NOTE: We check equality here since we can assume that the
+ # requirements are all pinned
+ if ireq.specifier != existing_ireq.specifier:
+ raise IncompatibleRequirements(ireq, existing_ireq)
+
+ # TODO: Always pick the largest specifier in case of a conflict
+ by_key[key] = ireq
+ return by_key.values()
+
+
+def diff_key_from_ireq(ireq):
+ """
+ Calculate a key for comparing a compiled requirement with installed modules.
+ For URL requirements, only provide a useful key if the url includes
+ #egg=name==version, which will set ireq.req.name and ireq.specifier.
+ Otherwise return ireq.link so the key will not match and the package will
+ reinstall. Reinstall is necessary to ensure that packages will reinstall
+ if the URL is changed but the version is not.
+ """
+ if is_url_requirement(ireq):
+ if (
+ ireq.req
+ and (getattr(ireq.req, "key", None) or getattr(ireq.req, "name", None))
+ and ireq.specifier
+ ):
+ return key_from_ireq(ireq)
+ return str(ireq.link)
+ return key_from_ireq(ireq)
+
+
+def diff(compiled_requirements, installed_dists):
+ """
+ Calculate which packages should be installed or uninstalled, given a set
+ of compiled requirements and a list of currently installed modules.
+ """
+ requirements_lut = {diff_key_from_ireq(r): r for r in compiled_requirements}
+
+ satisfied = set() # holds keys
+ to_install = set() # holds InstallRequirement objects
+ to_uninstall = set() # holds keys
+
+ pkgs_to_ignore = get_dists_to_ignore(installed_dists)
+ for dist in installed_dists:
+ key = key_from_req(dist)
+ if key not in requirements_lut or not requirements_lut[key].match_markers():
+ to_uninstall.add(key)
+ elif requirements_lut[key].specifier.contains(dist.version):
+ satisfied.add(key)
+
+ for key, requirement in requirements_lut.items():
+ if key not in satisfied and requirement.match_markers():
+ to_install.add(requirement)
+
+ # Make sure to not uninstall any packages that should be ignored
+ to_uninstall -= set(pkgs_to_ignore)
+
+ return (to_install, to_uninstall)
+
+
+def sync(
+ to_install,
+ to_uninstall,
+ verbose=False,
+ dry_run=False,
+ install_flags=None,
+ ask=False,
+):
+ """
+ Install and uninstalls the given sets of modules.
+ """
+ exit_code = 0
+
+ if not to_uninstall and not to_install:
+ if verbose:
+ click.echo("Everything up-to-date")
+ return exit_code
+
+ pip_flags = []
+ if not verbose:
+ pip_flags += ["-q"]
+
+ if ask:
+ dry_run = True
+
+ if dry_run:
+ if to_uninstall:
+ click.echo("Would uninstall:")
+ for pkg in sorted(to_uninstall):
+ click.echo(" {}".format(pkg))
+
+ if to_install:
+ click.echo("Would install:")
+ for ireq in sorted(to_install, key=key_from_ireq):
+ click.echo(" {}".format(format_requirement(ireq)))
+
+ exit_code = 1
+
+ if ask and click.confirm("Would you like to proceed with these changes?"):
+ dry_run = False
+ exit_code = 0
+
+ if not dry_run:
+ if to_uninstall:
+ check_call( # nosec
+ [sys.executable, "-m", "pip", "uninstall", "-y"]
+ + pip_flags
+ + sorted(to_uninstall)
+ )
+
+ if to_install:
+ if install_flags is None:
+ install_flags = []
+ # prepare requirement lines
+ req_lines = []
+ for ireq in sorted(to_install, key=key_from_ireq):
+ ireq_hashes = get_hashes_from_ireq(ireq)
+ req_lines.append(format_requirement(ireq, hashes=ireq_hashes))
+
+ # save requirement lines to a temporary file
+ tmp_req_file = tempfile.NamedTemporaryFile(mode="wt", delete=False)
+ tmp_req_file.write("\n".join(req_lines))
+ tmp_req_file.close()
+
+ try:
+ check_call( # nosec
+ [sys.executable, "-m", "pip", "install", "-r", tmp_req_file.name]
+ + pip_flags
+ + install_flags
+ )
+ finally:
+ os.unlink(tmp_req_file.name)
+
+ return exit_code
diff --git a/third_party/python/pip-tools/piptools/utils.py b/third_party/python/pip-tools/piptools/utils.py
new file mode 100644
index 0000000000..b0eca76a6e
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/utils.py
@@ -0,0 +1,388 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import sys
+from collections import OrderedDict
+from itertools import chain
+
+import six
+from click.utils import LazyFile
+from pip._internal.req.constructors import install_req_from_line
+from pip._internal.utils.misc import redact_auth_from_url
+from pip._internal.vcs import is_url
+from six.moves import shlex_quote
+
+from ._compat import PIP_VERSION
+from .click import style
+
+UNSAFE_PACKAGES = {"setuptools", "distribute", "pip"}
+COMPILE_EXCLUDE_OPTIONS = {
+ "--dry-run",
+ "--quiet",
+ "--rebuild",
+ "--upgrade",
+ "--upgrade-package",
+ "--verbose",
+ "--cache-dir",
+}
+
+
+def key_from_ireq(ireq):
+ """Get a standardized key for an InstallRequirement."""
+ if ireq.req is None and ireq.link is not None:
+ return str(ireq.link)
+ else:
+ return key_from_req(ireq.req)
+
+
+def key_from_req(req):
+ """Get an all-lowercase version of the requirement's name."""
+ if hasattr(req, "key"):
+ # from pkg_resources, such as installed dists for pip-sync
+ key = req.key
+ else:
+ # from packaging, such as install requirements from requirements.txt
+ key = req.name
+
+ key = key.replace("_", "-").lower()
+ return key
+
+
+def comment(text):
+ return style(text, fg="green")
+
+
+def make_install_requirement(name, version, extras, constraint=False):
+ # If no extras are specified, the extras string is blank
+ extras_string = ""
+ if extras:
+ # Sort extras for stability
+ extras_string = "[{}]".format(",".join(sorted(extras)))
+
+ return install_req_from_line(
+ str("{}{}=={}".format(name, extras_string, version)), constraint=constraint
+ )
+
+
+def is_url_requirement(ireq):
+ """
+ Return True if requirement was specified as a path or URL.
+ ireq.original_link will have been set by InstallRequirement.__init__
+ """
+ return bool(ireq.original_link)
+
+
+def format_requirement(ireq, marker=None, hashes=None):
+ """
+ Generic formatter for pretty printing InstallRequirements to the terminal
+ in a less verbose way than using its `__str__` method.
+ """
+ if ireq.editable:
+ line = "-e {}".format(ireq.link.url)
+ elif is_url_requirement(ireq):
+ line = ireq.link.url
+ else:
+ line = str(ireq.req).lower()
+
+ if marker:
+ line = "{} ; {}".format(line, marker)
+
+ if hashes:
+ for hash_ in sorted(hashes):
+ line += " \\\n --hash={}".format(hash_)
+
+ return line
+
+
+def format_specifier(ireq):
+ """
+ Generic formatter for pretty printing the specifier part of
+ InstallRequirements to the terminal.
+ """
+ # TODO: Ideally, this is carried over to the pip library itself
+ specs = ireq.specifier._specs if ireq.req is not None else []
+ specs = sorted(specs, key=lambda x: x._spec[1])
+ return ",".join(str(s) for s in specs) or "<any>"
+
+
+def is_pinned_requirement(ireq):
+ """
+ Returns whether an InstallRequirement is a "pinned" requirement.
+
+ An InstallRequirement is considered pinned if:
+
+ - Is not editable
+ - It has exactly one specifier
+ - That specifier is "=="
+ - The version does not contain a wildcard
+
+ Examples:
+ django==1.8 # pinned
+ django>1.8 # NOT pinned
+ django~=1.8 # NOT pinned
+ django==1.* # NOT pinned
+ """
+ if ireq.editable:
+ return False
+
+ if ireq.req is None or len(ireq.specifier._specs) != 1:
+ return False
+
+ op, version = next(iter(ireq.specifier._specs))._spec
+ return (op == "==" or op == "===") and not version.endswith(".*")
+
+
+def as_tuple(ireq):
+ """
+ Pulls out the (name: str, version:str, extras:(str)) tuple from
+ the pinned InstallRequirement.
+ """
+ if not is_pinned_requirement(ireq):
+ raise TypeError("Expected a pinned InstallRequirement, got {}".format(ireq))
+
+ name = key_from_ireq(ireq)
+ version = next(iter(ireq.specifier._specs))._spec[1]
+ extras = tuple(sorted(ireq.extras))
+ return name, version, extras
+
+
+def flat_map(fn, collection):
+ """Map a function over a collection and flatten the result by one-level"""
+ return chain.from_iterable(map(fn, collection))
+
+
+def lookup_table(values, key=None, keyval=None, unique=False, use_lists=False):
+ """
+ Builds a dict-based lookup table (index) elegantly.
+
+ Supports building normal and unique lookup tables. For example:
+
+ >>> assert lookup_table(
+ ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0]) == {
+ ... 'b': {'bar', 'baz'},
+ ... 'f': {'foo'},
+ ... 'q': {'quux', 'qux'}
+ ... }
+
+ For key functions that uniquely identify values, set unique=True:
+
+ >>> assert lookup_table(
+ ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0],
+ ... unique=True) == {
+ ... 'b': 'baz',
+ ... 'f': 'foo',
+ ... 'q': 'quux'
+ ... }
+
+ For the values represented as lists, set use_lists=True:
+
+ >>> assert lookup_table(
+ ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0],
+ ... use_lists=True) == {
+ ... 'b': ['bar', 'baz'],
+ ... 'f': ['foo'],
+ ... 'q': ['qux', 'quux']
+ ... }
+
+ The values of the resulting lookup table will be lists, not sets.
+
+ For extra power, you can even change the values while building up the LUT.
+ To do so, use the `keyval` function instead of the `key` arg:
+
+ >>> assert lookup_table(
+ ... ['foo', 'bar', 'baz', 'qux', 'quux'],
+ ... keyval=lambda s: (s[0], s[1:])) == {
+ ... 'b': {'ar', 'az'},
+ ... 'f': {'oo'},
+ ... 'q': {'uux', 'ux'}
+ ... }
+
+ """
+ if keyval is None:
+ if key is None:
+
+ def keyval(v):
+ return v
+
+ else:
+
+ def keyval(v):
+ return (key(v), v)
+
+ if unique:
+ return dict(keyval(v) for v in values)
+
+ lut = {}
+ for value in values:
+ k, v = keyval(value)
+ try:
+ s = lut[k]
+ except KeyError:
+ if use_lists:
+ s = lut[k] = list()
+ else:
+ s = lut[k] = set()
+ if use_lists:
+ s.append(v)
+ else:
+ s.add(v)
+ return dict(lut)
+
+
+def dedup(iterable):
+ """Deduplicate an iterable object like iter(set(iterable)) but
+ order-preserved.
+ """
+ return iter(OrderedDict.fromkeys(iterable))
+
+
+def name_from_req(req):
+ """Get the name of the requirement"""
+ if hasattr(req, "project_name"):
+ # from pkg_resources, such as installed dists for pip-sync
+ return req.project_name
+ else:
+ # from packaging, such as install requirements from requirements.txt
+ return req.name
+
+
+def fs_str(string):
+ """
+ Convert given string to a correctly encoded filesystem string.
+
+ On Python 2, if the input string is unicode, converts it to bytes
+ encoded with the filesystem encoding.
+
+ On Python 3 returns the string as is, since Python 3 uses unicode
+ paths and the input string shouldn't be bytes.
+
+ :type string: str|unicode
+ :rtype: str
+ """
+ if isinstance(string, str):
+ return string
+ if isinstance(string, bytes):
+ raise AssertionError
+ return string.encode(_fs_encoding)
+
+
+_fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
+
+
+def get_hashes_from_ireq(ireq):
+ """
+ Given an InstallRequirement, return a list of string hashes in
+ the format "{algorithm}:{hash}". Return an empty list if there are no hashes
+ in the requirement options.
+ """
+ result = []
+ if PIP_VERSION[:2] <= (20, 0):
+ ireq_hashes = ireq.options.get("hashes", {})
+ else:
+ ireq_hashes = ireq.hash_options
+ for algorithm, hexdigests in ireq_hashes.items():
+ for hash_ in hexdigests:
+ result.append("{}:{}".format(algorithm, hash_))
+ return result
+
+
+def force_text(s):
+ """
+ Return a string representing `s`.
+ """
+ if s is None:
+ return ""
+ if not isinstance(s, six.string_types):
+ return six.text_type(s)
+ return s
+
+
+def get_compile_command(click_ctx):
+ """
+ Returns a normalized compile command depending on cli context.
+
+ The command will be normalized by:
+ - expanding options short to long
+ - removing values that are already default
+ - sorting the arguments
+ - removing one-off arguments like '--upgrade'
+ - removing arguments that don't change build behaviour like '--verbose'
+ """
+ from piptools.scripts.compile import cli
+
+ # Map of the compile cli options (option name -> click.Option)
+ compile_options = {option.name: option for option in cli.params}
+
+ left_args = []
+ right_args = []
+
+ for option_name, value in click_ctx.params.items():
+ option = compile_options[option_name]
+
+ # Get the latest option name (usually it'll be a long name)
+ option_long_name = option.opts[-1]
+
+ # Collect variadic args separately, they will be added
+ # at the end of the command later
+ if option.nargs < 0:
+ # These will necessarily be src_files
+ # Re-add click-stripped '--' if any start with '-'
+ if any(val.startswith("-") and val != "-" for val in value):
+ right_args.append("--")
+ right_args.extend([shlex_quote(force_text(val)) for val in value])
+ continue
+
+ # Exclude one-off options (--upgrade/--upgrade-package/--rebuild/...)
+ # or options that don't change compile behaviour (--verbose/--dry-run/...)
+ if option_long_name in COMPILE_EXCLUDE_OPTIONS:
+ continue
+
+ # Skip options without a value
+ if option.default is None and not value:
+ continue
+
+ # Skip options with a default value
+ if option.default == value:
+ continue
+
+ # Use a file name for file-like objects
+ if isinstance(value, LazyFile):
+ value = value.name
+
+ # Convert value to the list
+ if not isinstance(value, (tuple, list)):
+ value = [value]
+
+ for val in value:
+ # Flags don't have a value, thus add to args true or false option long name
+ if option.is_flag:
+ # If there are false-options, choose an option name depending on a value
+ if option.secondary_opts:
+ # Get the latest false-option
+ secondary_option_long_name = option.secondary_opts[-1]
+ arg = option_long_name if val else secondary_option_long_name
+ # There are no false-options, use true-option
+ else:
+ arg = option_long_name
+ left_args.append(shlex_quote(arg))
+ # Append to args the option with a value
+ else:
+ if isinstance(val, six.string_types) and is_url(val):
+ val = redact_auth_from_url(val)
+ if option.name == "pip_args":
+ # shlex_quote would produce functional but noisily quoted results,
+ # e.g. --pip-args='--cache-dir='"'"'/tmp/with spaces'"'"''
+ # Instead, we try to get more legible quoting via repr:
+ left_args.append(
+ "{option}={value}".format(
+ option=option_long_name, value=repr(fs_str(force_text(val)))
+ )
+ )
+ else:
+ left_args.append(
+ "{option}={value}".format(
+ option=option_long_name, value=shlex_quote(force_text(val))
+ )
+ )
+
+ return " ".join(["pip-compile"] + sorted(left_args) + sorted(right_args))
diff --git a/third_party/python/pip-tools/piptools/writer.py b/third_party/python/pip-tools/piptools/writer.py
new file mode 100644
index 0000000000..3bb2325fc1
--- /dev/null
+++ b/third_party/python/pip-tools/piptools/writer.py
@@ -0,0 +1,239 @@
+from __future__ import unicode_literals
+
+import os
+import re
+from itertools import chain
+
+import six
+
+from .click import unstyle
+from .logging import log
+from .utils import (
+ UNSAFE_PACKAGES,
+ comment,
+ dedup,
+ format_requirement,
+ get_compile_command,
+ key_from_ireq,
+)
+
+MESSAGE_UNHASHED_PACKAGE = comment(
+ "# WARNING: pip install will require the following package to be hashed."
+ "\n# Consider using a hashable URL like "
+ "https://github.com/jazzband/pip-tools/archive/SOMECOMMIT.zip"
+)
+
+MESSAGE_UNSAFE_PACKAGES_UNPINNED = comment(
+ "# WARNING: The following packages were not pinned, but pip requires them to be"
+ "\n# pinned when the requirements file includes hashes. "
+ "Consider using the --allow-unsafe flag."
+)
+
+MESSAGE_UNSAFE_PACKAGES = comment(
+ "# The following packages are considered to be unsafe in a requirements file:"
+)
+
+MESSAGE_UNINSTALLABLE = (
+ "The generated requirements file may be rejected by pip install. "
+ "See # WARNING lines for details."
+)
+
+
+strip_comes_from_line_re = re.compile(r" \(line \d+\)$")
+
+
+def _comes_from_as_string(ireq):
+ if isinstance(ireq.comes_from, six.string_types):
+ return strip_comes_from_line_re.sub("", ireq.comes_from)
+ return key_from_ireq(ireq.comes_from)
+
+
+class OutputWriter(object):
+ def __init__(
+ self,
+ src_files,
+ dst_file,
+ click_ctx,
+ dry_run,
+ emit_header,
+ emit_index_url,
+ emit_trusted_host,
+ annotate,
+ generate_hashes,
+ default_index_url,
+ index_urls,
+ trusted_hosts,
+ format_control,
+ allow_unsafe,
+ find_links,
+ emit_find_links,
+ ):
+ self.src_files = src_files
+ self.dst_file = dst_file
+ self.click_ctx = click_ctx
+ self.dry_run = dry_run
+ self.emit_header = emit_header
+ self.emit_index_url = emit_index_url
+ self.emit_trusted_host = emit_trusted_host
+ self.annotate = annotate
+ self.generate_hashes = generate_hashes
+ self.default_index_url = default_index_url
+ self.index_urls = index_urls
+ self.trusted_hosts = trusted_hosts
+ self.format_control = format_control
+ self.allow_unsafe = allow_unsafe
+ self.find_links = find_links
+ self.emit_find_links = emit_find_links
+
+ def _sort_key(self, ireq):
+ return (not ireq.editable, str(ireq.req).lower())
+
+ def write_header(self):
+ if self.emit_header:
+ yield comment("#")
+ yield comment("# This file is autogenerated by pip-compile")
+ yield comment("# To update, run:")
+ yield comment("#")
+ compile_command = os.environ.get(
+ "CUSTOM_COMPILE_COMMAND"
+ ) or get_compile_command(self.click_ctx)
+ yield comment("# {}".format(compile_command))
+ yield comment("#")
+
+ def write_index_options(self):
+ if self.emit_index_url:
+ for index, index_url in enumerate(dedup(self.index_urls)):
+ if index_url.rstrip("/") == self.default_index_url:
+ continue
+ flag = "--index-url" if index == 0 else "--extra-index-url"
+ yield "{} {}".format(flag, index_url)
+
+ def write_trusted_hosts(self):
+ if self.emit_trusted_host:
+ for trusted_host in dedup(self.trusted_hosts):
+ yield "--trusted-host {}".format(trusted_host)
+
+ def write_format_controls(self):
+ for nb in dedup(sorted(self.format_control.no_binary)):
+ yield "--no-binary {}".format(nb)
+ for ob in dedup(sorted(self.format_control.only_binary)):
+ yield "--only-binary {}".format(ob)
+
+ def write_find_links(self):
+ if self.emit_find_links:
+ for find_link in dedup(self.find_links):
+ yield "--find-links {}".format(find_link)
+
+ def write_flags(self):
+ emitted = False
+ for line in chain(
+ self.write_index_options(),
+ self.write_find_links(),
+ self.write_trusted_hosts(),
+ self.write_format_controls(),
+ ):
+ emitted = True
+ yield line
+ if emitted:
+ yield ""
+
+ def _iter_lines(self, results, unsafe_requirements=None, markers=None, hashes=None):
+ # default values
+ unsafe_requirements = unsafe_requirements or []
+ markers = markers or {}
+ hashes = hashes or {}
+
+ # Check for unhashed or unpinned packages if at least one package does have
+ # hashes, which will trigger pip install's --require-hashes mode.
+ warn_uninstallable = False
+ has_hashes = hashes and any(hash for hash in hashes.values())
+
+ yielded = False
+
+ for line in self.write_header():
+ yield line
+ yielded = True
+ for line in self.write_flags():
+ yield line
+ yielded = True
+
+ unsafe_requirements = (
+ {r for r in results if r.name in UNSAFE_PACKAGES}
+ if not unsafe_requirements
+ else unsafe_requirements
+ )
+ packages = {r for r in results if r.name not in UNSAFE_PACKAGES}
+
+ if packages:
+ packages = sorted(packages, key=self._sort_key)
+ for ireq in packages:
+ if has_hashes and not hashes.get(ireq):
+ yield MESSAGE_UNHASHED_PACKAGE
+ warn_uninstallable = True
+ line = self._format_requirement(
+ ireq, markers.get(key_from_ireq(ireq)), hashes=hashes
+ )
+ yield line
+ yielded = True
+
+ if unsafe_requirements:
+ unsafe_requirements = sorted(unsafe_requirements, key=self._sort_key)
+ yield ""
+ yielded = True
+ if has_hashes and not self.allow_unsafe:
+ yield MESSAGE_UNSAFE_PACKAGES_UNPINNED
+ warn_uninstallable = True
+ else:
+ yield MESSAGE_UNSAFE_PACKAGES
+
+ for ireq in unsafe_requirements:
+ ireq_key = key_from_ireq(ireq)
+ if not self.allow_unsafe:
+ yield comment("# {}".format(ireq_key))
+ else:
+ line = self._format_requirement(
+ ireq, marker=markers.get(ireq_key), hashes=hashes
+ )
+ yield line
+
+ # Yield even when there's no real content, so that blank files are written
+ if not yielded:
+ yield ""
+
+ if warn_uninstallable:
+ log.warning(MESSAGE_UNINSTALLABLE)
+
+ def write(self, results, unsafe_requirements, markers, hashes):
+
+ for line in self._iter_lines(results, unsafe_requirements, markers, hashes):
+ log.info(line)
+ if not self.dry_run:
+ self.dst_file.write(unstyle(line).encode("utf-8"))
+ self.dst_file.write(os.linesep.encode("utf-8"))
+
+ def _format_requirement(self, ireq, marker=None, hashes=None):
+ ireq_hashes = (hashes if hashes is not None else {}).get(ireq)
+
+ line = format_requirement(ireq, marker=marker, hashes=ireq_hashes)
+
+ if not self.annotate:
+ return line
+
+ # Annotate what packages or reqs-ins this package is required by
+ required_by = set()
+ if hasattr(ireq, "_source_ireqs"):
+ required_by |= {
+ _comes_from_as_string(src_ireq)
+ for src_ireq in ireq._source_ireqs
+ if src_ireq.comes_from
+ }
+ elif ireq.comes_from:
+ required_by.add(_comes_from_as_string(ireq))
+ if required_by:
+ annotation = ", ".join(sorted(required_by))
+ line = "{:24}{}{}".format(
+ line,
+ " \\\n " if ireq_hashes else " ",
+ comment("# via " + annotation),
+ )
+ return line
diff --git a/third_party/python/pip-tools/setup.cfg b/third_party/python/pip-tools/setup.cfg
new file mode 100644
index 0000000000..3c597b0632
--- /dev/null
+++ b/third_party/python/pip-tools/setup.cfg
@@ -0,0 +1,85 @@
+[metadata]
+name = pip-tools
+url = https://github.com/jazzband/pip-tools/
+license = BSD
+author = Vincent Driessen
+author_email = me@nvie.com
+description = pip-tools keeps your pinned dependencies fresh.
+long_description = file: README.rst
+classifiers =
+ Development Status :: 5 - Production/Stable
+ Intended Audience :: Developers
+ Intended Audience :: System Administrators
+ License :: OSI Approved :: BSD License
+ Operating System :: OS Independent
+ Programming Language :: Python
+ Programming Language :: Python :: 2
+ Programming Language :: Python :: 2.7
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3.5
+ Programming Language :: Python :: 3.6
+ Programming Language :: Python :: 3.7
+ Programming Language :: Python :: 3.8
+ Programming Language :: Python :: Implementation :: CPython
+ Programming Language :: Python :: Implementation :: PyPy
+ Topic :: System :: Systems Administration
+
+[options]
+python_requires = >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
+setup_requires = setuptools_scm
+packages = find:
+zip_safe = false
+install_requires =
+ click >= 7
+ six
+ pip >= 20.0
+
+[options.packages.find]
+exclude = tests
+
+[options.extras_require]
+testing =
+ mock
+ pytest
+ pytest-rerunfailures
+coverage = pytest-cov
+
+[options.entry_points]
+console_scripts =
+ pip-compile = piptools.scripts.compile:cli
+ pip-sync = piptools.scripts.sync:cli
+
+[bdist_wheel]
+universal = 1
+
+[tool:pytest]
+norecursedirs = .* build dist venv test_data piptools/_compat/*
+testpaths = tests piptools
+filterwarnings =
+ ignore::PendingDeprecationWarning:pip\._vendor.+
+ ignore::DeprecationWarning:pip\._vendor.+
+markers =
+ network: mark tests that require internet access
+
+[flake8]
+max-line-length = 88
+exclude = build/*, dist/*, pip_tools.egg-info/*, piptools/_compat/*, .tox/*, .venv/*, .git/*, .eggs/*
+extend-ignore = E203 # E203 conflicts with PEP8; see https://github.com/psf/black#slices
+pytest-fixture-no-parentheses = true
+pytest-parametrize-names-type = tuple
+pytest-parametrize-values-type = tuple
+pytest-parametrize-values-row-type = tuple
+
+[isort]
+combine_as_imports = True
+forced_separate = piptools
+include_trailing_comma = True
+line_length = 88
+multi_line_output = 3
+default_section = THIRDPARTY
+known_first_party = piptools, tests, examples
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/pip-tools/setup.py b/third_party/python/pip-tools/setup.py
new file mode 100644
index 0000000000..d5d43d7c93
--- /dev/null
+++ b/third_party/python/pip-tools/setup.py
@@ -0,0 +1,3 @@
+from setuptools import setup
+
+setup(use_scm_version=True)
diff --git a/third_party/python/pip-tools/tox.ini b/third_party/python/pip-tools/tox.ini
new file mode 100644
index 0000000000..f0439eaf70
--- /dev/null
+++ b/third_party/python/pip-tools/tox.ini
@@ -0,0 +1,52 @@
+[tox]
+envlist =
+ # NOTE: keep this in sync with the env list in .travis.yml for tox-travis.
+ py{27,35,36,37,38,39,py,py3}-pip{20.0,20.1,20.2,latest,master}-coverage
+ checkqa
+ readme
+skip_missing_interpreters = True
+
+[testenv]
+extras =
+ testing
+ coverage: coverage
+deps =
+ pipmaster: -e git+https://github.com/pypa/pip.git@master#egg=pip
+; TODO: remove all 20.0 mentions after pip-20.2 being released
+ pip20.0: pip==20.0.*
+ pip20.1: pip==20.1.*
+ pip20.2: pip==20.2.*
+setenv =
+ piplatest: PIP=latest
+ pipmaster: PIP=master
+ pip20.0: PIP==20.0
+ pip20.1: PIP==20.1
+ pip20.2: PIP==20.2
+
+ coverage: PYTEST_ADDOPTS=--strict --doctest-modules --cov --cov-report=term-missing --cov-report=xml {env:PYTEST_ADDOPTS:}
+commands_pre =
+ piplatest: python -m pip install -U pip
+ pip --version
+commands = pytest {posargs}
+passenv = CI GITHUB_ACTIONS
+pip_pre=True
+
+[testenv:checkqa]
+basepython = python3
+skip_install = True
+deps = pre-commit
+commands_pre =
+commands = pre-commit run --all-files --show-diff-on-failure
+
+[testenv:readme]
+deps = twine
+commands_pre =
+commands = twine check {distdir}/*
+
+[travis:env]
+PIP =
+ 20.0: pip20.0
+ 20.1: pip20.1
+ 20.2: pip20.2
+ latest: piplatest
+ master: pipmaster
diff --git a/third_party/python/pluggy/LICENSE b/third_party/python/pluggy/LICENSE
new file mode 100644
index 0000000000..121017d086
--- /dev/null
+++ b/third_party/python/pluggy/LICENSE
@@ -0,0 +1,22 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 holger krekel (rather uses bitbucket/hpk42)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
diff --git a/third_party/python/pluggy/MANIFEST.in b/third_party/python/pluggy/MANIFEST.in
new file mode 100644
index 0000000000..0cf8f3e088
--- /dev/null
+++ b/third_party/python/pluggy/MANIFEST.in
@@ -0,0 +1,7 @@
+include CHANGELOG
+include README.rst
+include setup.py
+include tox.ini
+include LICENSE
+graft testing
+recursive-exclude * *.pyc *.pyo
diff --git a/third_party/python/pluggy/PKG-INFO b/third_party/python/pluggy/PKG-INFO
new file mode 100644
index 0000000000..6e2f59f1d3
--- /dev/null
+++ b/third_party/python/pluggy/PKG-INFO
@@ -0,0 +1,112 @@
+Metadata-Version: 1.2
+Name: pluggy
+Version: 0.6.0
+Summary: plugin and hook calling mechanisms for python
+Home-page: https://github.com/pytest-dev/pluggy
+Author: Holger Krekel
+Author-email: holger@merlinux.eu
+License: MIT license
+Description-Content-Type: UNKNOWN
+Description: pluggy - A minimalist production ready plugin system
+ ====================================================
+ |pypi| |anaconda| |versions| |travis| |appveyor|
+
+
+ This is the core framework used by the `pytest`_, `tox`_, and `devpi`_ projects.
+
+ Please `read the docs`_ to learn more!
+
+ A definitive example
+ ********************
+ .. code-block:: python
+
+ import pluggy
+
+ hookspec = pluggy.HookspecMarker("myproject")
+ hookimpl = pluggy.HookimplMarker("myproject")
+
+
+ class MySpec(object):
+ """A hook specification namespace.
+ """
+ @hookspec
+ def myhook(self, arg1, arg2):
+ """My special little hook that you can customize.
+ """
+
+
+ class Plugin_1(object):
+ """A hook implementation namespace.
+ """
+ @hookimpl
+ def myhook(self, arg1, arg2):
+ print("inside Plugin_1.myhook()")
+ return arg1 + arg2
+
+
+ class Plugin_2(object):
+ """A 2nd hook implementation namespace.
+ """
+ @hookimpl
+ def myhook(self, arg1, arg2):
+ print("inside Plugin_2.myhook()")
+ return arg1 - arg2
+
+
+ # create a manager and add the spec
+ pm = pluggy.PluginManager("myproject")
+ pm.add_hookspecs(MySpec)
+
+ # register plugins
+ pm.register(Plugin_1())
+ pm.register(Plugin_2())
+
+ # call our `myhook` hook
+ results = pm.hook.myhook(arg1=1, arg2=2)
+ print(results)
+
+
+ .. badges
+ .. |pypi| image:: https://img.shields.io/pypi/v/pluggy.svg
+ :target: https://pypi.python.org/pypi/pluggy
+ .. |versions| image:: https://img.shields.io/pypi/pyversions/pluggy.svg
+ :target: https://pypi.python.org/pypi/pluggy
+ .. |travis| image:: https://img.shields.io/travis/pytest-dev/pluggy/master.svg
+ :target: https://travis-ci.org/pytest-dev/pluggy
+ .. |appveyor| image:: https://img.shields.io/appveyor/ci/pytestbot/pluggy/master.svg
+ :target: https://ci.appveyor.com/project/pytestbot/pluggy
+ .. |anaconda| image:: https://anaconda.org/conda-forge/pluggy/badges/version.svg
+ :target: https://anaconda.org/conda-forge/pluggy
+
+ .. links
+ .. _pytest:
+ http://pytest.org
+ .. _tox:
+ https://tox.readthedocs.org
+ .. _devpi:
+ http://doc.devpi.net
+ .. _read the docs:
+ https://pluggy.readthedocs.io/en/latest/
+
+Platform: unix
+Platform: linux
+Platform: osx
+Platform: win32
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Topic :: Software Development :: Testing
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Utilities
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
diff --git a/third_party/python/pluggy/README.rst b/third_party/python/pluggy/README.rst
new file mode 100644
index 0000000000..3636b6ec64
--- /dev/null
+++ b/third_party/python/pluggy/README.rst
@@ -0,0 +1,80 @@
+pluggy - A minimalist production ready plugin system
+====================================================
+|pypi| |anaconda| |versions| |travis| |appveyor|
+
+
+This is the core framework used by the `pytest`_, `tox`_, and `devpi`_ projects.
+
+Please `read the docs`_ to learn more!
+
+A definitive example
+********************
+.. code-block:: python
+
+ import pluggy
+
+ hookspec = pluggy.HookspecMarker("myproject")
+ hookimpl = pluggy.HookimplMarker("myproject")
+
+
+ class MySpec(object):
+ """A hook specification namespace.
+ """
+ @hookspec
+ def myhook(self, arg1, arg2):
+ """My special little hook that you can customize.
+ """
+
+
+ class Plugin_1(object):
+ """A hook implementation namespace.
+ """
+ @hookimpl
+ def myhook(self, arg1, arg2):
+ print("inside Plugin_1.myhook()")
+ return arg1 + arg2
+
+
+ class Plugin_2(object):
+ """A 2nd hook implementation namespace.
+ """
+ @hookimpl
+ def myhook(self, arg1, arg2):
+ print("inside Plugin_2.myhook()")
+ return arg1 - arg2
+
+
+ # create a manager and add the spec
+ pm = pluggy.PluginManager("myproject")
+ pm.add_hookspecs(MySpec)
+
+ # register plugins
+ pm.register(Plugin_1())
+ pm.register(Plugin_2())
+
+ # call our `myhook` hook
+ results = pm.hook.myhook(arg1=1, arg2=2)
+ print(results)
+
+
+.. badges
+.. |pypi| image:: https://img.shields.io/pypi/v/pluggy.svg
+ :target: https://pypi.python.org/pypi/pluggy
+.. |versions| image:: https://img.shields.io/pypi/pyversions/pluggy.svg
+ :target: https://pypi.python.org/pypi/pluggy
+.. |travis| image:: https://img.shields.io/travis/pytest-dev/pluggy/master.svg
+ :target: https://travis-ci.org/pytest-dev/pluggy
+.. |appveyor| image:: https://img.shields.io/appveyor/ci/pytestbot/pluggy/master.svg
+ :target: https://ci.appveyor.com/project/pytestbot/pluggy
+.. |anaconda| image:: https://anaconda.org/conda-forge/pluggy/badges/version.svg
+ :target: https://anaconda.org/conda-forge/pluggy
+
+.. links
+.. _pytest:
+ http://pytest.org
+.. _tox:
+ https://tox.readthedocs.org
+.. _devpi:
+ http://doc.devpi.net
+.. _read the docs:
+ https://pluggy.readthedocs.io/en/latest/
diff --git a/third_party/python/pluggy/pluggy/__init__.py b/third_party/python/pluggy/pluggy/__init__.py
new file mode 100644
index 0000000000..42d2220be4
--- /dev/null
+++ b/third_party/python/pluggy/pluggy/__init__.py
@@ -0,0 +1,684 @@
+import inspect
+import warnings
+from .callers import _multicall, HookCallError, _Result, _legacymulticall
+
+__version__ = '0.6.0'
+
+__all__ = ["PluginManager", "PluginValidationError", "HookCallError",
+ "HookspecMarker", "HookimplMarker"]
+
+
+class PluginValidationError(Exception):
+ """ plugin failed validation. """
+
+
+class HookspecMarker(object):
+ """ Decorator helper class for marking functions as hook specifications.
+
+ You can instantiate it with a project_name to get a decorator.
+ Calling PluginManager.add_hookspecs later will discover all marked functions
+ if the PluginManager uses the same project_name.
+ """
+
+ def __init__(self, project_name):
+ self.project_name = project_name
+
+ def __call__(self, function=None, firstresult=False, historic=False):
+ """ if passed a function, directly sets attributes on the function
+ which will make it discoverable to add_hookspecs(). If passed no
+ function, returns a decorator which can be applied to a function
+ later using the attributes supplied.
+
+ If firstresult is True the 1:N hook call (N being the number of registered
+ hook implementation functions) will stop at I<=N when the I'th function
+ returns a non-None result.
+
+ If historic is True calls to a hook will be memorized and replayed
+ on later registered plugins.
+
+ """
+ def setattr_hookspec_opts(func):
+ if historic and firstresult:
+ raise ValueError("cannot have a historic firstresult hook")
+ setattr(func, self.project_name + "_spec",
+ dict(firstresult=firstresult, historic=historic))
+ return func
+
+ if function is not None:
+ return setattr_hookspec_opts(function)
+ else:
+ return setattr_hookspec_opts
+
+
+class HookimplMarker(object):
+ """ Decorator helper class for marking functions as hook implementations.
+
+ You can instantiate with a project_name to get a decorator.
+ Calling PluginManager.register later will discover all marked functions
+ if the PluginManager uses the same project_name.
+ """
+ def __init__(self, project_name):
+ self.project_name = project_name
+
+ def __call__(self, function=None, hookwrapper=False, optionalhook=False,
+ tryfirst=False, trylast=False):
+
+ """ if passed a function, directly sets attributes on the function
+ which will make it discoverable to register(). If passed no function,
+ returns a decorator which can be applied to a function later using
+ the attributes supplied.
+
+ If optionalhook is True a missing matching hook specification will not result
+ in an error (by default it is an error if no matching spec is found).
+
+ If tryfirst is True this hook implementation will run as early as possible
+ in the chain of N hook implementations for a specfication.
+
+ If trylast is True this hook implementation will run as late as possible
+ in the chain of N hook implementations.
+
+ If hookwrapper is True the hook implementations needs to execute exactly
+ one "yield". The code before the yield is run early before any non-hookwrapper
+ function is run. The code after the yield is run after all non-hookwrapper
+ function have run. The yield receives a ``_Result`` object representing
+ the exception or result outcome of the inner calls (including other hookwrapper
+ calls).
+
+ """
+ def setattr_hookimpl_opts(func):
+ setattr(func, self.project_name + "_impl",
+ dict(hookwrapper=hookwrapper, optionalhook=optionalhook,
+ tryfirst=tryfirst, trylast=trylast))
+ return func
+
+ if function is None:
+ return setattr_hookimpl_opts
+ else:
+ return setattr_hookimpl_opts(function)
+
+
+def normalize_hookimpl_opts(opts):
+ opts.setdefault("tryfirst", False)
+ opts.setdefault("trylast", False)
+ opts.setdefault("hookwrapper", False)
+ opts.setdefault("optionalhook", False)
+
+
+class _TagTracer(object):
+ def __init__(self):
+ self._tag2proc = {}
+ self.writer = None
+ self.indent = 0
+
+ def get(self, name):
+ return _TagTracerSub(self, (name,))
+
+ def format_message(self, tags, args):
+ if isinstance(args[-1], dict):
+ extra = args[-1]
+ args = args[:-1]
+ else:
+ extra = {}
+
+ content = " ".join(map(str, args))
+ indent = " " * self.indent
+
+ lines = [
+ "%s%s [%s]\n" % (indent, content, ":".join(tags))
+ ]
+
+ for name, value in extra.items():
+ lines.append("%s %s: %s\n" % (indent, name, value))
+ return lines
+
+ def processmessage(self, tags, args):
+ if self.writer is not None and args:
+ lines = self.format_message(tags, args)
+ self.writer(''.join(lines))
+ try:
+ self._tag2proc[tags](tags, args)
+ except KeyError:
+ pass
+
+ def setwriter(self, writer):
+ self.writer = writer
+
+ def setprocessor(self, tags, processor):
+ if isinstance(tags, str):
+ tags = tuple(tags.split(":"))
+ else:
+ assert isinstance(tags, tuple)
+ self._tag2proc[tags] = processor
+
+
+class _TagTracerSub(object):
+ def __init__(self, root, tags):
+ self.root = root
+ self.tags = tags
+
+ def __call__(self, *args):
+ self.root.processmessage(self.tags, args)
+
+ def setmyprocessor(self, processor):
+ self.root.setprocessor(self.tags, processor)
+
+ def get(self, name):
+ return self.__class__(self.root, self.tags + (name,))
+
+
+class _TracedHookExecution(object):
+ def __init__(self, pluginmanager, before, after):
+ self.pluginmanager = pluginmanager
+ self.before = before
+ self.after = after
+ self.oldcall = pluginmanager._inner_hookexec
+ assert not isinstance(self.oldcall, _TracedHookExecution)
+ self.pluginmanager._inner_hookexec = self
+
+ def __call__(self, hook, hook_impls, kwargs):
+ self.before(hook.name, hook_impls, kwargs)
+ outcome = _Result.from_call(lambda: self.oldcall(hook, hook_impls, kwargs))
+ self.after(outcome, hook.name, hook_impls, kwargs)
+ return outcome.get_result()
+
+ def undo(self):
+ self.pluginmanager._inner_hookexec = self.oldcall
+
+
+class PluginManager(object):
+ """ Core Pluginmanager class which manages registration
+ of plugin objects and 1:N hook calling.
+
+ You can register new hooks by calling ``add_hookspec(module_or_class)``.
+ You can register plugin objects (which contain hooks) by calling
+ ``register(plugin)``. The Pluginmanager is initialized with a
+ prefix that is searched for in the names of the dict of registered
+ plugin objects. An optional excludefunc allows to blacklist names which
+ are not considered as hooks despite a matching prefix.
+
+ For debugging purposes you can call ``enable_tracing()``
+ which will subsequently send debug information to the trace helper.
+ """
+
+ def __init__(self, project_name, implprefix=None):
+ """ if implprefix is given implementation functions
+ will be recognized if their name matches the implprefix. """
+ self.project_name = project_name
+ self._name2plugin = {}
+ self._plugin2hookcallers = {}
+ self._plugin_distinfo = []
+ self.trace = _TagTracer().get("pluginmanage")
+ self.hook = _HookRelay(self.trace.root.get("hook"))
+ self._implprefix = implprefix
+ self._inner_hookexec = lambda hook, methods, kwargs: \
+ hook.multicall(
+ methods, kwargs,
+ firstresult=hook.spec_opts.get('firstresult'),
+ )
+
+ def _hookexec(self, hook, methods, kwargs):
+ # called from all hookcaller instances.
+ # enable_tracing will set its own wrapping function at self._inner_hookexec
+ return self._inner_hookexec(hook, methods, kwargs)
+
+ def register(self, plugin, name=None):
+ """ Register a plugin and return its canonical name or None if the name
+ is blocked from registering. Raise a ValueError if the plugin is already
+ registered. """
+ plugin_name = name or self.get_canonical_name(plugin)
+
+ if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers:
+ if self._name2plugin.get(plugin_name, -1) is None:
+ return # blocked plugin, return None to indicate no registration
+ raise ValueError("Plugin already registered: %s=%s\n%s" %
+ (plugin_name, plugin, self._name2plugin))
+
+ # XXX if an error happens we should make sure no state has been
+ # changed at point of return
+ self._name2plugin[plugin_name] = plugin
+
+ # register matching hook implementations of the plugin
+ self._plugin2hookcallers[plugin] = hookcallers = []
+ for name in dir(plugin):
+ hookimpl_opts = self.parse_hookimpl_opts(plugin, name)
+ if hookimpl_opts is not None:
+ normalize_hookimpl_opts(hookimpl_opts)
+ method = getattr(plugin, name)
+ hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts)
+ hook = getattr(self.hook, name, None)
+ if hook is None:
+ hook = _HookCaller(name, self._hookexec)
+ setattr(self.hook, name, hook)
+ elif hook.has_spec():
+ self._verify_hook(hook, hookimpl)
+ hook._maybe_apply_history(hookimpl)
+ hook._add_hookimpl(hookimpl)
+ hookcallers.append(hook)
+ return plugin_name
+
+ def parse_hookimpl_opts(self, plugin, name):
+ method = getattr(plugin, name)
+ if not inspect.isroutine(method):
+ return
+ try:
+ res = getattr(method, self.project_name + "_impl", None)
+ except Exception:
+ res = {}
+ if res is not None and not isinstance(res, dict):
+ # false positive
+ res = None
+ elif res is None and self._implprefix and name.startswith(self._implprefix):
+ res = {}
+ return res
+
+ def unregister(self, plugin=None, name=None):
+ """ unregister a plugin object and all its contained hook implementations
+ from internal data structures. """
+ if name is None:
+ assert plugin is not None, "one of name or plugin needs to be specified"
+ name = self.get_name(plugin)
+
+ if plugin is None:
+ plugin = self.get_plugin(name)
+
+ # if self._name2plugin[name] == None registration was blocked: ignore
+ if self._name2plugin.get(name):
+ del self._name2plugin[name]
+
+ for hookcaller in self._plugin2hookcallers.pop(plugin, []):
+ hookcaller._remove_plugin(plugin)
+
+ return plugin
+
+ def set_blocked(self, name):
+ """ block registrations of the given name, unregister if already registered. """
+ self.unregister(name=name)
+ self._name2plugin[name] = None
+
+ def is_blocked(self, name):
+ """ return True if the name blogs registering plugins of that name. """
+ return name in self._name2plugin and self._name2plugin[name] is None
+
+ def add_hookspecs(self, module_or_class):
+ """ add new hook specifications defined in the given module_or_class.
+ Functions are recognized if they have been decorated accordingly. """
+ names = []
+ for name in dir(module_or_class):
+ spec_opts = self.parse_hookspec_opts(module_or_class, name)
+ if spec_opts is not None:
+ hc = getattr(self.hook, name, None)
+ if hc is None:
+ hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts)
+ setattr(self.hook, name, hc)
+ else:
+ # plugins registered this hook without knowing the spec
+ hc.set_specification(module_or_class, spec_opts)
+ for hookfunction in (hc._wrappers + hc._nonwrappers):
+ self._verify_hook(hc, hookfunction)
+ names.append(name)
+
+ if not names:
+ raise ValueError("did not find any %r hooks in %r" %
+ (self.project_name, module_or_class))
+
+ def parse_hookspec_opts(self, module_or_class, name):
+ method = getattr(module_or_class, name)
+ return getattr(method, self.project_name + "_spec", None)
+
+ def get_plugins(self):
+ """ return the set of registered plugins. """
+ return set(self._plugin2hookcallers)
+
+ def is_registered(self, plugin):
+ """ Return True if the plugin is already registered. """
+ return plugin in self._plugin2hookcallers
+
+ def get_canonical_name(self, plugin):
+ """ Return canonical name for a plugin object. Note that a plugin
+ may be registered under a different name which was specified
+ by the caller of register(plugin, name). To obtain the name
+ of an registered plugin use ``get_name(plugin)`` instead."""
+ return getattr(plugin, "__name__", None) or str(id(plugin))
+
+ def get_plugin(self, name):
+ """ Return a plugin or None for the given name. """
+ return self._name2plugin.get(name)
+
+ def has_plugin(self, name):
+ """ Return True if a plugin with the given name is registered. """
+ return self.get_plugin(name) is not None
+
+ def get_name(self, plugin):
+ """ Return name for registered plugin or None if not registered. """
+ for name, val in self._name2plugin.items():
+ if plugin == val:
+ return name
+
+ def _verify_hook(self, hook, hookimpl):
+ if hook.is_historic() and hookimpl.hookwrapper:
+ raise PluginValidationError(
+ "Plugin %r\nhook %r\nhistoric incompatible to hookwrapper" %
+ (hookimpl.plugin_name, hook.name))
+
+ # positional arg checking
+ notinspec = set(hookimpl.argnames) - set(hook.argnames)
+ if notinspec:
+ raise PluginValidationError(
+ "Plugin %r for hook %r\nhookimpl definition: %s\n"
+ "Argument(s) %s are declared in the hookimpl but "
+ "can not be found in the hookspec" %
+ (hookimpl.plugin_name, hook.name,
+ _formatdef(hookimpl.function), notinspec)
+ )
+
+ def check_pending(self):
+ """ Verify that all hooks which have not been verified against
+ a hook specification are optional, otherwise raise PluginValidationError"""
+ for name in self.hook.__dict__:
+ if name[0] != "_":
+ hook = getattr(self.hook, name)
+ if not hook.has_spec():
+ for hookimpl in (hook._wrappers + hook._nonwrappers):
+ if not hookimpl.optionalhook:
+ raise PluginValidationError(
+ "unknown hook %r in plugin %r" %
+ (name, hookimpl.plugin))
+
+ def load_setuptools_entrypoints(self, entrypoint_name):
+ """ Load modules from querying the specified setuptools entrypoint name.
+ Return the number of loaded plugins. """
+ from pkg_resources import (iter_entry_points, DistributionNotFound,
+ VersionConflict)
+ for ep in iter_entry_points(entrypoint_name):
+ # is the plugin registered or blocked?
+ if self.get_plugin(ep.name) or self.is_blocked(ep.name):
+ continue
+ try:
+ plugin = ep.load()
+ except DistributionNotFound:
+ continue
+ except VersionConflict as e:
+ raise PluginValidationError(
+ "Plugin %r could not be loaded: %s!" % (ep.name, e))
+ self.register(plugin, name=ep.name)
+ self._plugin_distinfo.append((plugin, ep.dist))
+ return len(self._plugin_distinfo)
+
+ def list_plugin_distinfo(self):
+ """ return list of distinfo/plugin tuples for all setuptools registered
+ plugins. """
+ return list(self._plugin_distinfo)
+
+ def list_name_plugin(self):
+ """ return list of name/plugin pairs. """
+ return list(self._name2plugin.items())
+
+ def get_hookcallers(self, plugin):
+ """ get all hook callers for the specified plugin. """
+ return self._plugin2hookcallers.get(plugin)
+
+ def add_hookcall_monitoring(self, before, after):
+ """ add before/after tracing functions for all hooks
+ and return an undo function which, when called,
+ will remove the added tracers.
+
+ ``before(hook_name, hook_impls, kwargs)`` will be called ahead
+ of all hook calls and receive a hookcaller instance, a list
+ of HookImpl instances and the keyword arguments for the hook call.
+
+ ``after(outcome, hook_name, hook_impls, kwargs)`` receives the
+ same arguments as ``before`` but also a :py:class:`_Result`` object
+ which represents the result of the overall hook call.
+ """
+ return _TracedHookExecution(self, before, after).undo
+
+ def enable_tracing(self):
+ """ enable tracing of hook calls and return an undo function. """
+ hooktrace = self.hook._trace
+
+ def before(hook_name, methods, kwargs):
+ hooktrace.root.indent += 1
+ hooktrace(hook_name, kwargs)
+
+ def after(outcome, hook_name, methods, kwargs):
+ if outcome.excinfo is None:
+ hooktrace("finish", hook_name, "-->", outcome.get_result())
+ hooktrace.root.indent -= 1
+
+ return self.add_hookcall_monitoring(before, after)
+
+ def subset_hook_caller(self, name, remove_plugins):
+ """ Return a new _HookCaller instance for the named method
+ which manages calls to all registered plugins except the
+ ones from remove_plugins. """
+ orig = getattr(self.hook, name)
+ plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)]
+ if plugins_to_remove:
+ hc = _HookCaller(orig.name, orig._hookexec, orig._specmodule_or_class,
+ orig.spec_opts)
+ for hookimpl in (orig._wrappers + orig._nonwrappers):
+ plugin = hookimpl.plugin
+ if plugin not in plugins_to_remove:
+ hc._add_hookimpl(hookimpl)
+ # we also keep track of this hook caller so it
+ # gets properly removed on plugin unregistration
+ self._plugin2hookcallers.setdefault(plugin, []).append(hc)
+ return hc
+ return orig
+
+
+def varnames(func):
+ """Return tuple of positional and keywrord argument names for a function,
+ method, class or callable.
+
+ In case of a class, its ``__init__`` method is considered.
+ For methods the ``self`` parameter is not included.
+ """
+ cache = getattr(func, "__dict__", {})
+ try:
+ return cache["_varnames"]
+ except KeyError:
+ pass
+
+ if inspect.isclass(func):
+ try:
+ func = func.__init__
+ except AttributeError:
+ return (), ()
+ elif not inspect.isroutine(func): # callable object?
+ try:
+ func = getattr(func, '__call__', func)
+ except Exception:
+ return ()
+
+ try: # func MUST be a function or method here or we won't parse any args
+ spec = _getargspec(func)
+ except TypeError:
+ return (), ()
+
+ args, defaults = tuple(spec.args), spec.defaults
+ if defaults:
+ index = -len(defaults)
+ args, defaults = args[:index], tuple(args[index:])
+ else:
+ defaults = ()
+
+ # strip any implicit instance arg
+ if args:
+ if inspect.ismethod(func) or (
+ '.' in getattr(func, '__qualname__', ()) and args[0] == 'self'
+ ):
+ args = args[1:]
+
+ assert "self" not in args # best naming practises check?
+ try:
+ cache["_varnames"] = args, defaults
+ except TypeError:
+ pass
+ return args, defaults
+
+
+class _HookRelay(object):
+ """ hook holder object for performing 1:N hook calls where N is the number
+ of registered plugins.
+
+ """
+
+ def __init__(self, trace):
+ self._trace = trace
+
+
+class _HookCaller(object):
+ def __init__(self, name, hook_execute, specmodule_or_class=None,
+ spec_opts=None):
+ self.name = name
+ self._wrappers = []
+ self._nonwrappers = []
+ self._hookexec = hook_execute
+ self._specmodule_or_class = None
+ self.argnames = None
+ self.kwargnames = None
+ self.multicall = _multicall
+ self.spec_opts = spec_opts or {}
+ if specmodule_or_class is not None:
+ self.set_specification(specmodule_or_class, spec_opts)
+
+ def has_spec(self):
+ return self._specmodule_or_class is not None
+
+ def set_specification(self, specmodule_or_class, spec_opts):
+ assert not self.has_spec()
+ self._specmodule_or_class = specmodule_or_class
+ specfunc = getattr(specmodule_or_class, self.name)
+ # get spec arg signature
+ argnames, self.kwargnames = varnames(specfunc)
+ self.argnames = ["__multicall__"] + list(argnames)
+ self.spec_opts.update(spec_opts)
+ if spec_opts.get("historic"):
+ self._call_history = []
+
+ def is_historic(self):
+ return hasattr(self, "_call_history")
+
+ def _remove_plugin(self, plugin):
+ def remove(wrappers):
+ for i, method in enumerate(wrappers):
+ if method.plugin == plugin:
+ del wrappers[i]
+ return True
+ if remove(self._wrappers) is None:
+ if remove(self._nonwrappers) is None:
+ raise ValueError("plugin %r not found" % (plugin,))
+
+ def _add_hookimpl(self, hookimpl):
+ """A an implementation to the callback chain.
+ """
+ if hookimpl.hookwrapper:
+ methods = self._wrappers
+ else:
+ methods = self._nonwrappers
+
+ if hookimpl.trylast:
+ methods.insert(0, hookimpl)
+ elif hookimpl.tryfirst:
+ methods.append(hookimpl)
+ else:
+ # find last non-tryfirst method
+ i = len(methods) - 1
+ while i >= 0 and methods[i].tryfirst:
+ i -= 1
+ methods.insert(i + 1, hookimpl)
+
+ if '__multicall__' in hookimpl.argnames:
+ warnings.warn(
+ "Support for __multicall__ is now deprecated and will be"
+ "removed in an upcoming release.",
+ DeprecationWarning
+ )
+ self.multicall = _legacymulticall
+
+ def __repr__(self):
+ return "<_HookCaller %r>" % (self.name,)
+
+ def __call__(self, *args, **kwargs):
+ if args:
+ raise TypeError("hook calling supports only keyword arguments")
+ assert not self.is_historic()
+ if self.argnames:
+ notincall = set(self.argnames) - set(['__multicall__']) - set(
+ kwargs.keys())
+ if notincall:
+ warnings.warn(
+ "Argument(s) {} which are declared in the hookspec "
+ "can not be found in this hook call"
+ .format(tuple(notincall)),
+ stacklevel=2,
+ )
+ return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
+
+ def call_historic(self, proc=None, kwargs=None):
+ """ call the hook with given ``kwargs`` for all registered plugins and
+ for all plugins which will be registered afterwards.
+
+ If ``proc`` is not None it will be called for for each non-None result
+ obtained from a hook implementation.
+ """
+ self._call_history.append((kwargs or {}, proc))
+ # historizing hooks don't return results
+ res = self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
+ for x in res or []:
+ proc(x)
+
+ def call_extra(self, methods, kwargs):
+ """ Call the hook with some additional temporarily participating
+ methods using the specified kwargs as call parameters. """
+ old = list(self._nonwrappers), list(self._wrappers)
+ for method in methods:
+ opts = dict(hookwrapper=False, trylast=False, tryfirst=False)
+ hookimpl = HookImpl(None, "<temp>", method, opts)
+ self._add_hookimpl(hookimpl)
+ try:
+ return self(**kwargs)
+ finally:
+ self._nonwrappers, self._wrappers = old
+
+ def _maybe_apply_history(self, method):
+ """Apply call history to a new hookimpl if it is marked as historic.
+ """
+ if self.is_historic():
+ for kwargs, proc in self._call_history:
+ res = self._hookexec(self, [method], kwargs)
+ if res and proc is not None:
+ proc(res[0])
+
+
+class HookImpl(object):
+ def __init__(self, plugin, plugin_name, function, hook_impl_opts):
+ self.function = function
+ self.argnames, self.kwargnames = varnames(self.function)
+ self.plugin = plugin
+ self.opts = hook_impl_opts
+ self.plugin_name = plugin_name
+ self.__dict__.update(hook_impl_opts)
+
+
+if hasattr(inspect, 'getfullargspec'):
+ def _getargspec(func):
+ return inspect.getfullargspec(func)
+else:
+ def _getargspec(func):
+ return inspect.getargspec(func)
+
+
+if hasattr(inspect, 'signature'):
+ def _formatdef(func):
+ return "%s%s" % (
+ func.__name__,
+ str(inspect.signature(func))
+ )
+else:
+ def _formatdef(func):
+ return "%s%s" % (
+ func.__name__,
+ inspect.formatargspec(*inspect.getargspec(func))
+ )
diff --git a/third_party/python/pluggy/pluggy/callers.py b/third_party/python/pluggy/pluggy/callers.py
new file mode 100644
index 0000000000..3ff67becff
--- /dev/null
+++ b/third_party/python/pluggy/pluggy/callers.py
@@ -0,0 +1,201 @@
+'''
+Call loop machinery
+'''
+import sys
+import warnings
+
+_py3 = sys.version_info > (3, 0)
+
+
+if not _py3:
+ exec("""
+def _reraise(cls, val, tb):
+ raise cls, val, tb
+""")
+
+
+def _raise_wrapfail(wrap_controller, msg):
+ co = wrap_controller.gi_code
+ raise RuntimeError("wrap_controller at %r %s:%d %s" %
+ (co.co_name, co.co_filename, co.co_firstlineno, msg))
+
+
+class HookCallError(Exception):
+ """ Hook was called wrongly. """
+
+
+class _Result(object):
+ def __init__(self, result, excinfo):
+ self._result = result
+ self._excinfo = excinfo
+
+ @property
+ def excinfo(self):
+ return self._excinfo
+
+ @property
+ def result(self):
+ """Get the result(s) for this hook call (DEPRECATED in favor of ``get_result()``)."""
+ msg = 'Use get_result() which forces correct exception handling'
+ warnings.warn(DeprecationWarning(msg), stacklevel=2)
+ return self._result
+
+ @classmethod
+ def from_call(cls, func):
+ __tracebackhide__ = True
+ result = excinfo = None
+ try:
+ result = func()
+ except BaseException:
+ excinfo = sys.exc_info()
+
+ return cls(result, excinfo)
+
+ def force_result(self, result):
+ """Force the result(s) to ``result``.
+
+ If the hook was marked as a ``firstresult`` a single value should
+ be set otherwise set a (modified) list of results. Any exceptions
+ found during invocation will be deleted.
+ """
+ self._result = result
+ self._excinfo = None
+
+ def get_result(self):
+ """Get the result(s) for this hook call.
+
+ If the hook was marked as a ``firstresult`` only a single value
+ will be returned otherwise a list of results.
+ """
+ __tracebackhide__ = True
+ if self._excinfo is None:
+ return self._result
+ else:
+ ex = self._excinfo
+ if _py3:
+ raise ex[1].with_traceback(ex[2])
+ _reraise(*ex) # noqa
+
+
+def _wrapped_call(wrap_controller, func):
+ """ Wrap calling to a function with a generator which needs to yield
+ exactly once. The yield point will trigger calling the wrapped function
+ and return its ``_Result`` to the yield point. The generator then needs
+ to finish (raise StopIteration) in order for the wrapped call to complete.
+ """
+ try:
+ next(wrap_controller) # first yield
+ except StopIteration:
+ _raise_wrapfail(wrap_controller, "did not yield")
+ call_outcome = _Result.from_call(func)
+ try:
+ wrap_controller.send(call_outcome)
+ _raise_wrapfail(wrap_controller, "has second yield")
+ except StopIteration:
+ pass
+ return call_outcome.get_result()
+
+
+class _LegacyMultiCall(object):
+ """ execute a call into multiple python functions/methods. """
+
+ # XXX note that the __multicall__ argument is supported only
+ # for pytest compatibility reasons. It was never officially
+ # supported there and is explicitely deprecated since 2.8
+ # so we can remove it soon, allowing to avoid the below recursion
+ # in execute() and simplify/speed up the execute loop.
+
+ def __init__(self, hook_impls, kwargs, firstresult=False):
+ self.hook_impls = hook_impls
+ self.caller_kwargs = kwargs # come from _HookCaller.__call__()
+ self.caller_kwargs["__multicall__"] = self
+ self.firstresult = firstresult
+
+ def execute(self):
+ caller_kwargs = self.caller_kwargs
+ self.results = results = []
+ firstresult = self.firstresult
+
+ while self.hook_impls:
+ hook_impl = self.hook_impls.pop()
+ try:
+ args = [caller_kwargs[argname] for argname in hook_impl.argnames]
+ except KeyError:
+ for argname in hook_impl.argnames:
+ if argname not in caller_kwargs:
+ raise HookCallError(
+ "hook call must provide argument %r" % (argname,))
+ if hook_impl.hookwrapper:
+ return _wrapped_call(hook_impl.function(*args), self.execute)
+ res = hook_impl.function(*args)
+ if res is not None:
+ if firstresult:
+ return res
+ results.append(res)
+
+ if not firstresult:
+ return results
+
+ def __repr__(self):
+ status = "%d meths" % (len(self.hook_impls),)
+ if hasattr(self, "results"):
+ status = ("%d results, " % len(self.results)) + status
+ return "<_MultiCall %s, kwargs=%r>" % (status, self.caller_kwargs)
+
+
+def _legacymulticall(hook_impls, caller_kwargs, firstresult=False):
+ return _LegacyMultiCall(
+ hook_impls, caller_kwargs, firstresult=firstresult).execute()
+
+
+def _multicall(hook_impls, caller_kwargs, firstresult=False):
+ """Execute a call into multiple python functions/methods and return the
+ result(s).
+
+ ``caller_kwargs`` comes from _HookCaller.__call__().
+ """
+ __tracebackhide__ = True
+ results = []
+ excinfo = None
+ try: # run impl and wrapper setup functions in a loop
+ teardowns = []
+ try:
+ for hook_impl in reversed(hook_impls):
+ try:
+ args = [caller_kwargs[argname] for argname in hook_impl.argnames]
+ except KeyError:
+ for argname in hook_impl.argnames:
+ if argname not in caller_kwargs:
+ raise HookCallError(
+ "hook call must provide argument %r" % (argname,))
+
+ if hook_impl.hookwrapper:
+ try:
+ gen = hook_impl.function(*args)
+ next(gen) # first yield
+ teardowns.append(gen)
+ except StopIteration:
+ _raise_wrapfail(gen, "did not yield")
+ else:
+ res = hook_impl.function(*args)
+ if res is not None:
+ results.append(res)
+ if firstresult: # halt further impl calls
+ break
+ except BaseException:
+ excinfo = sys.exc_info()
+ finally:
+ if firstresult: # first result hooks return a single value
+ outcome = _Result(results[0] if results else None, excinfo)
+ else:
+ outcome = _Result(results, excinfo)
+
+ # run all wrapper post-yield blocks
+ for gen in reversed(teardowns):
+ try:
+ gen.send(outcome)
+ _raise_wrapfail(gen, "has second yield")
+ except StopIteration:
+ pass
+
+ return outcome.get_result()
diff --git a/third_party/python/pluggy/setup.cfg b/third_party/python/pluggy/setup.cfg
new file mode 100644
index 0000000000..ead73f887a
--- /dev/null
+++ b/third_party/python/pluggy/setup.cfg
@@ -0,0 +1,13 @@
+[bdist_wheel]
+universal = 1
+
+[metadata]
+license_file = LICENSE
+
+[devpi:upload]
+formats = sdist.tgz,bdist_wheel
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/pluggy/setup.py b/third_party/python/pluggy/setup.py
new file mode 100644
index 0000000000..b7c0f69712
--- /dev/null
+++ b/third_party/python/pluggy/setup.py
@@ -0,0 +1,51 @@
+import os
+from setuptools import setup
+
+classifiers = [
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: POSIX',
+ 'Operating System :: Microsoft :: Windows',
+ 'Operating System :: MacOS :: MacOS X',
+ 'Topic :: Software Development :: Testing',
+ 'Topic :: Software Development :: Libraries',
+ 'Topic :: Utilities',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ 'Programming Language :: Python :: Implementation :: PyPy'] + [
+ ('Programming Language :: Python :: %s' % x) for x in
+ '2 2.7 3 3.4 3.5 3.6'.split()]
+
+with open('README.rst') as fd:
+ long_description = fd.read()
+
+
+def get_version():
+ p = os.path.join(os.path.dirname(
+ os.path.abspath(__file__)), "pluggy/__init__.py")
+ with open(p) as f:
+ for line in f.readlines():
+ if "__version__" in line:
+ return line.strip().split("=")[-1].strip(" '")
+ raise ValueError("could not read version")
+
+
+def main():
+ setup(
+ name='pluggy',
+ description='plugin and hook calling mechanisms for python',
+ long_description=long_description,
+ version=get_version(),
+ license='MIT license',
+ platforms=['unix', 'linux', 'osx', 'win32'],
+ author='Holger Krekel',
+ author_email='holger@merlinux.eu',
+ url='https://github.com/pytest-dev/pluggy',
+ python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
+ classifiers=classifiers,
+ packages=['pluggy'],
+ )
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/python/pluggy/testing/benchmark.py b/third_party/python/pluggy/testing/benchmark.py
new file mode 100644
index 0000000000..5a913e9d41
--- /dev/null
+++ b/third_party/python/pluggy/testing/benchmark.py
@@ -0,0 +1,59 @@
+"""
+Benchmarking and performance tests.
+"""
+import pytest
+from pluggy import (_multicall, _legacymulticall, HookImpl, HookspecMarker,
+ HookimplMarker)
+
+hookspec = HookspecMarker("example")
+hookimpl = HookimplMarker("example")
+
+
+def MC(methods, kwargs, callertype, firstresult=False):
+ hookfuncs = []
+ for method in methods:
+ f = HookImpl(None, "<temp>", method, method.example_impl)
+ hookfuncs.append(f)
+ return callertype(hookfuncs, kwargs, {"firstresult": firstresult})
+
+
+@hookimpl
+def hook(arg1, arg2, arg3):
+ return arg1, arg2, arg3
+
+
+@hookimpl(hookwrapper=True)
+def wrapper(arg1, arg2, arg3):
+ yield
+
+
+@pytest.fixture(
+ params=[10, 100],
+ ids="hooks={}".format,
+)
+def hooks(request):
+ return [hook for i in range(request.param)]
+
+
+@pytest.fixture(
+ params=[10, 100],
+ ids="wrappers={}".format,
+)
+def wrappers(request):
+ return [wrapper for i in range(request.param)]
+
+
+@pytest.fixture(
+ params=[_multicall, _legacymulticall],
+ ids=lambda item: item.__name__
+)
+def callertype(request):
+ return request.param
+
+
+def inner_exec(methods, callertype):
+ return MC(methods, {'arg1': 1, 'arg2': 2, 'arg3': 3}, callertype)
+
+
+def test_hook_and_wrappers_speed(benchmark, hooks, wrappers, callertype):
+ benchmark(inner_exec, hooks + wrappers, callertype)
diff --git a/third_party/python/pluggy/testing/conftest.py b/third_party/python/pluggy/testing/conftest.py
new file mode 100644
index 0000000000..3d61a349c8
--- /dev/null
+++ b/third_party/python/pluggy/testing/conftest.py
@@ -0,0 +1,30 @@
+import pytest
+
+
+@pytest.fixture(
+ params=[
+ lambda spec: spec,
+ lambda spec: spec()
+ ],
+ ids=[
+ "spec-is-class",
+ "spec-is-instance"
+ ],
+)
+def he_pm(request, pm):
+ from pluggy import HookspecMarker
+ hookspec = HookspecMarker("example")
+
+ class Hooks(object):
+ @hookspec
+ def he_method1(self, arg):
+ return arg + 1
+
+ pm.add_hookspecs(request.param(Hooks))
+ return pm
+
+
+@pytest.fixture
+def pm():
+ from pluggy import PluginManager
+ return PluginManager("example")
diff --git a/third_party/python/pluggy/testing/test_details.py b/third_party/python/pluggy/testing/test_details.py
new file mode 100644
index 0000000000..2fad198d95
--- /dev/null
+++ b/third_party/python/pluggy/testing/test_details.py
@@ -0,0 +1,103 @@
+import warnings
+
+import pytest
+
+from pluggy import PluginManager, HookimplMarker, HookspecMarker, _Result
+
+hookspec = HookspecMarker("example")
+hookimpl = HookimplMarker("example")
+
+
+def test_parse_hookimpl_override():
+ class MyPluginManager(PluginManager):
+ def parse_hookimpl_opts(self, module_or_class, name):
+ opts = PluginManager.parse_hookimpl_opts(
+ self, module_or_class, name)
+ if opts is None:
+ if name.startswith("x1"):
+ opts = {}
+ return opts
+
+ class Plugin(object):
+ def x1meth(self):
+ pass
+
+ @hookimpl(hookwrapper=True, tryfirst=True)
+ def x1meth2(self):
+ pass
+
+ class Spec(object):
+ @hookspec
+ def x1meth(self):
+ pass
+
+ @hookspec
+ def x1meth2(self):
+ pass
+
+ pm = MyPluginManager(hookspec.project_name)
+ pm.register(Plugin())
+ pm.add_hookspecs(Spec)
+ assert not pm.hook.x1meth._nonwrappers[0].hookwrapper
+ assert not pm.hook.x1meth._nonwrappers[0].tryfirst
+ assert not pm.hook.x1meth._nonwrappers[0].trylast
+ assert not pm.hook.x1meth._nonwrappers[0].optionalhook
+
+ assert pm.hook.x1meth2._wrappers[0].tryfirst
+ assert pm.hook.x1meth2._wrappers[0].hookwrapper
+
+
+def test_plugin_getattr_raises_errors():
+ """Pluggy must be able to handle plugins which raise weird exceptions
+ when getattr() gets called (#11).
+ """
+ class DontTouchMe(object):
+ def __getattr__(self, x):
+ raise Exception('cant touch me')
+
+ class Module(object):
+ pass
+
+ module = Module()
+ module.x = DontTouchMe()
+
+ pm = PluginManager(hookspec.project_name)
+ # register() would raise an error
+ pm.register(module, 'donttouch')
+ assert pm.get_plugin('donttouch') is module
+
+
+def test_warning_on_call_vs_hookspec_arg_mismatch():
+ """Verify that is a hook is called with less arguments then defined in the
+ spec that a warning is emitted.
+ """
+ class Spec:
+ @hookspec
+ def myhook(self, arg1, arg2):
+ pass
+
+ class Plugin:
+ @hookimpl
+ def myhook(self, arg1):
+ pass
+
+ pm = PluginManager(hookspec.project_name)
+ pm.register(Plugin())
+ pm.add_hookspecs(Spec())
+
+ with warnings.catch_warnings(record=True) as warns:
+ warnings.simplefilter('always')
+
+ # calling should trigger a warning
+ pm.hook.myhook(arg1=1)
+
+ assert len(warns) == 1
+ warning = warns[-1]
+ assert issubclass(warning.category, Warning)
+ assert "Argument(s) ('arg2',)" in str(warning.message)
+
+
+def test_result_deprecated():
+ r = _Result(10, None)
+ with pytest.deprecated_call():
+ assert r.result == 10
diff --git a/third_party/python/pluggy/testing/test_helpers.py b/third_party/python/pluggy/testing/test_helpers.py
new file mode 100644
index 0000000000..b178096844
--- /dev/null
+++ b/third_party/python/pluggy/testing/test_helpers.py
@@ -0,0 +1,68 @@
+from pluggy import _formatdef, varnames
+
+
+def test_varnames():
+ def f(x):
+ i = 3 # noqa
+
+ class A(object):
+ def f(self, y):
+ pass
+
+ class B(object):
+ def __call__(self, z):
+ pass
+
+ assert varnames(f) == (("x",), ())
+ assert varnames(A().f) == (('y',), ())
+ assert varnames(B()) == (('z',), ())
+
+
+def test_varnames_default():
+ def f(x, y=3):
+ pass
+
+ assert varnames(f) == (("x",), ("y",))
+
+
+def test_varnames_class():
+ class C(object):
+ def __init__(self, x):
+ pass
+
+ class D(object):
+ pass
+
+ class E(object):
+ def __init__(self, x):
+ pass
+
+ class F(object):
+ pass
+
+ assert varnames(C) == (("x",), ())
+ assert varnames(D) == ((), ())
+ assert varnames(E) == (("x",), ())
+ assert varnames(F) == ((), ())
+
+
+def test_formatdef():
+ def function1():
+ pass
+
+ assert _formatdef(function1) == 'function1()'
+
+ def function2(arg1):
+ pass
+
+ assert _formatdef(function2) == "function2(arg1)"
+
+ def function3(arg1, arg2="qwe"):
+ pass
+
+ assert _formatdef(function3) == "function3(arg1, arg2='qwe')"
+
+ def function4(arg1, *args, **kwargs):
+ pass
+
+ assert _formatdef(function4) == "function4(arg1, *args, **kwargs)"
diff --git a/third_party/python/pluggy/testing/test_hookrelay.py b/third_party/python/pluggy/testing/test_hookrelay.py
new file mode 100644
index 0000000000..5e7821bed8
--- /dev/null
+++ b/third_party/python/pluggy/testing/test_hookrelay.py
@@ -0,0 +1,210 @@
+import pytest
+from pluggy import PluginValidationError, HookimplMarker, HookspecMarker
+
+
+hookspec = HookspecMarker("example")
+hookimpl = HookimplMarker("example")
+
+
+def test_happypath(pm):
+ class Api(object):
+ @hookspec
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+ hook = pm.hook
+ assert hasattr(hook, 'hello')
+ assert repr(hook.hello).find("hello") != -1
+
+ class Plugin(object):
+ @hookimpl
+ def hello(self, arg):
+ return arg + 1
+
+ plugin = Plugin()
+ pm.register(plugin)
+ out = hook.hello(arg=3)
+ assert out == [4]
+ assert not hasattr(hook, 'world')
+ pm.unregister(plugin)
+ assert hook.hello(arg=3) == []
+
+
+def test_argmismatch(pm):
+ class Api(object):
+ @hookspec
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+
+ class Plugin(object):
+ @hookimpl
+ def hello(self, argwrong):
+ pass
+
+ with pytest.raises(PluginValidationError) as exc:
+ pm.register(Plugin())
+
+ assert "argwrong" in str(exc.value)
+
+
+def test_only_kwargs(pm):
+ class Api(object):
+ @hookspec
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+ with pytest.raises(TypeError) as exc:
+ pm.hook.hello(3)
+
+ comprehensible = "hook calling supports only keyword arguments"
+ assert comprehensible in str(exc.value)
+
+
+def test_call_order(pm):
+ class Api(object):
+ @hookspec
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+
+ class Plugin1(object):
+ @hookimpl
+ def hello(self, arg):
+ return 1
+
+ class Plugin2(object):
+ @hookimpl
+ def hello(self, arg):
+ return 2
+
+ class Plugin3(object):
+ @hookimpl
+ def hello(self, arg):
+ return 3
+
+ class Plugin4(object):
+ @hookimpl(hookwrapper=True)
+ def hello(self, arg):
+ assert arg == 0
+ outcome = yield
+ assert outcome.get_result() == [3, 2, 1]
+
+ pm.register(Plugin1())
+ pm.register(Plugin2())
+ pm.register(Plugin3())
+ pm.register(Plugin4()) # hookwrapper should get same list result
+ res = pm.hook.hello(arg=0)
+ assert res == [3, 2, 1]
+
+
+def test_firstresult_definition(pm):
+ class Api(object):
+ @hookspec(firstresult=True)
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+
+ class Plugin1(object):
+ @hookimpl
+ def hello(self, arg):
+ return arg + 1
+
+ class Plugin2(object):
+ @hookimpl
+ def hello(self, arg):
+ return arg - 1
+
+ class Plugin3(object):
+ @hookimpl
+ def hello(self, arg):
+ return None
+
+ class Plugin4(object):
+ @hookimpl(hookwrapper=True)
+ def hello(self, arg):
+ assert arg == 3
+ outcome = yield
+ assert outcome.get_result() == 2
+
+ pm.register(Plugin1()) # discarded - not the last registered plugin
+ pm.register(Plugin2()) # used as result
+ pm.register(Plugin3()) # None result is ignored
+ pm.register(Plugin4()) # hookwrapper should get same non-list result
+ res = pm.hook.hello(arg=3)
+ assert res == 2
+
+
+def test_firstresult_force_result(pm):
+ """Verify forcing a result in a wrapper.
+ """
+ class Api(object):
+ @hookspec(firstresult=True)
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+
+ class Plugin1(object):
+ @hookimpl
+ def hello(self, arg):
+ return arg + 1
+
+ class Plugin2(object):
+ @hookimpl(hookwrapper=True)
+ def hello(self, arg):
+ assert arg == 3
+ outcome = yield
+ assert outcome.get_result() == 4
+ outcome.force_result(0)
+
+ class Plugin3(object):
+ @hookimpl
+ def hello(self, arg):
+ return None
+
+ pm.register(Plugin1())
+ pm.register(Plugin2()) # wrapper
+ pm.register(Plugin3()) # ignored since returns None
+ res = pm.hook.hello(arg=3)
+ assert res == 0 # this result is forced and not a list
+
+
+def test_firstresult_returns_none(pm):
+ """If None results are returned by underlying implementations ensure
+ the multi-call loop returns a None value.
+ """
+ class Api(object):
+ @hookspec(firstresult=True)
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+
+ class Plugin1(object):
+ @hookimpl
+ def hello(self, arg):
+ return None
+
+ pm.register(Plugin1())
+ res = pm.hook.hello(arg=3)
+ assert res is None
+
+
+def test_firstresult_no_plugin(pm):
+ """If no implementations/plugins have been registered for a firstresult
+ hook the multi-call loop should return a None value.
+ """
+ class Api(object):
+ @hookspec(firstresult=True)
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+ res = pm.hook.hello(arg=3)
+ assert res is None
diff --git a/third_party/python/pluggy/testing/test_method_ordering.py b/third_party/python/pluggy/testing/test_method_ordering.py
new file mode 100644
index 0000000000..9584a0ae5a
--- /dev/null
+++ b/third_party/python/pluggy/testing/test_method_ordering.py
@@ -0,0 +1,322 @@
+import pytest
+
+
+import sys
+import types
+
+from pluggy import PluginManager, HookImpl, HookimplMarker, HookspecMarker
+
+hookspec = HookspecMarker("example")
+hookimpl = HookimplMarker("example")
+
+
+@pytest.fixture
+def hc(pm):
+ class Hooks(object):
+ @hookspec
+ def he_method1(self, arg):
+ pass
+ pm.add_hookspecs(Hooks)
+ return pm.hook.he_method1
+
+
+@pytest.fixture
+def addmeth(hc):
+ def addmeth(tryfirst=False, trylast=False, hookwrapper=False):
+ def wrap(func):
+ hookimpl(tryfirst=tryfirst, trylast=trylast,
+ hookwrapper=hookwrapper)(func)
+ hc._add_hookimpl(HookImpl(None, "<temp>", func, func.example_impl))
+ return func
+ return wrap
+ return addmeth
+
+
+def funcs(hookmethods):
+ return [hookmethod.function for hookmethod in hookmethods]
+
+
+def test_adding_nonwrappers(hc, addmeth):
+ @addmeth()
+ def he_method1():
+ pass
+
+ @addmeth()
+ def he_method2():
+ pass
+
+ @addmeth()
+ def he_method3():
+ pass
+ assert funcs(hc._nonwrappers) == [he_method1, he_method2, he_method3]
+
+
+def test_adding_nonwrappers_trylast(hc, addmeth):
+ @addmeth()
+ def he_method1_middle():
+ pass
+
+ @addmeth(trylast=True)
+ def he_method1():
+ pass
+
+ @addmeth()
+ def he_method1_b():
+ pass
+ assert funcs(hc._nonwrappers) == [he_method1, he_method1_middle, he_method1_b]
+
+
+def test_adding_nonwrappers_trylast3(hc, addmeth):
+ @addmeth()
+ def he_method1_a():
+ pass
+
+ @addmeth(trylast=True)
+ def he_method1_b():
+ pass
+
+ @addmeth()
+ def he_method1_c():
+ pass
+
+ @addmeth(trylast=True)
+ def he_method1_d():
+ pass
+ assert funcs(hc._nonwrappers) == \
+ [he_method1_d, he_method1_b, he_method1_a, he_method1_c]
+
+
+def test_adding_nonwrappers_trylast2(hc, addmeth):
+ @addmeth()
+ def he_method1_middle():
+ pass
+
+ @addmeth()
+ def he_method1_b():
+ pass
+
+ @addmeth(trylast=True)
+ def he_method1():
+ pass
+ assert funcs(hc._nonwrappers) == \
+ [he_method1, he_method1_middle, he_method1_b]
+
+
+def test_adding_nonwrappers_tryfirst(hc, addmeth):
+ @addmeth(tryfirst=True)
+ def he_method1():
+ pass
+
+ @addmeth()
+ def he_method1_middle():
+ pass
+
+ @addmeth()
+ def he_method1_b():
+ pass
+ assert funcs(hc._nonwrappers) == [
+ he_method1_middle, he_method1_b, he_method1]
+
+
+def test_adding_wrappers_ordering(hc, addmeth):
+ @addmeth(hookwrapper=True)
+ def he_method1():
+ pass
+
+ @addmeth()
+ def he_method1_middle():
+ pass
+
+ @addmeth(hookwrapper=True)
+ def he_method3():
+ pass
+
+ assert funcs(hc._nonwrappers) == [he_method1_middle]
+ assert funcs(hc._wrappers) == [he_method1, he_method3]
+
+
+def test_adding_wrappers_ordering_tryfirst(hc, addmeth):
+ @addmeth(hookwrapper=True, tryfirst=True)
+ def he_method1():
+ pass
+
+ @addmeth(hookwrapper=True)
+ def he_method2():
+ pass
+
+ assert hc._nonwrappers == []
+ assert funcs(hc._wrappers) == [he_method2, he_method1]
+
+
+def test_hookspec(pm):
+ class HookSpec(object):
+ @hookspec()
+ def he_myhook1(arg1):
+ pass
+
+ @hookspec(firstresult=True)
+ def he_myhook2(arg1):
+ pass
+
+ @hookspec(firstresult=False)
+ def he_myhook3(arg1):
+ pass
+
+ pm.add_hookspecs(HookSpec)
+ assert not pm.hook.he_myhook1.spec_opts["firstresult"]
+ assert pm.hook.he_myhook2.spec_opts["firstresult"]
+ assert not pm.hook.he_myhook3.spec_opts["firstresult"]
+
+
+@pytest.mark.parametrize('name', ["hookwrapper", "optionalhook", "tryfirst", "trylast"])
+@pytest.mark.parametrize('val', [True, False])
+def test_hookimpl(name, val):
+ @hookimpl(**{name: val})
+ def he_myhook1(arg1):
+ pass
+ if val:
+ assert he_myhook1.example_impl.get(name)
+ else:
+ assert not hasattr(he_myhook1, name)
+
+
+def test_load_setuptools_instantiation(monkeypatch, pm):
+ pkg_resources = pytest.importorskip("pkg_resources")
+
+ def my_iter(name):
+ assert name == "hello"
+
+ class EntryPoint(object):
+ name = "myname"
+ dist = None
+
+ def load(self):
+ class PseudoPlugin(object):
+ x = 42
+ return PseudoPlugin()
+
+ return iter([EntryPoint()])
+
+ monkeypatch.setattr(pkg_resources, 'iter_entry_points', my_iter)
+ num = pm.load_setuptools_entrypoints("hello")
+ assert num == 1
+ plugin = pm.get_plugin("myname")
+ assert plugin.x == 42
+ assert pm.list_plugin_distinfo() == [(plugin, None)]
+
+
+def test_load_setuptools_not_installed(monkeypatch, pm):
+ monkeypatch.setitem(
+ sys.modules, 'pkg_resources',
+ types.ModuleType("pkg_resources"))
+
+ with pytest.raises(ImportError):
+ pm.load_setuptools_entrypoints("qwe")
+
+
+def test_add_tracefuncs(he_pm):
+ out = []
+
+ class api1(object):
+ @hookimpl
+ def he_method1(self):
+ out.append("he_method1-api1")
+
+ class api2(object):
+ @hookimpl
+ def he_method1(self):
+ out.append("he_method1-api2")
+
+ he_pm.register(api1())
+ he_pm.register(api2())
+
+ def before(hook_name, hook_impls, kwargs):
+ out.append((hook_name, list(hook_impls), kwargs))
+
+ def after(outcome, hook_name, hook_impls, kwargs):
+ out.append((outcome, hook_name, list(hook_impls), kwargs))
+
+ undo = he_pm.add_hookcall_monitoring(before, after)
+
+ he_pm.hook.he_method1(arg=1)
+ assert len(out) == 4
+ assert out[0][0] == "he_method1"
+ assert len(out[0][1]) == 2
+ assert isinstance(out[0][2], dict)
+ assert out[1] == "he_method1-api2"
+ assert out[2] == "he_method1-api1"
+ assert len(out[3]) == 4
+ assert out[3][1] == out[0][0]
+
+ undo()
+ he_pm.hook.he_method1(arg=1)
+ assert len(out) == 4 + 2
+
+
+def test_hook_tracing(he_pm):
+ saveindent = []
+
+ class api1(object):
+ @hookimpl
+ def he_method1(self):
+ saveindent.append(he_pm.trace.root.indent)
+
+ class api2(object):
+ @hookimpl
+ def he_method1(self):
+ saveindent.append(he_pm.trace.root.indent)
+ raise ValueError()
+
+ he_pm.register(api1())
+ out = []
+ he_pm.trace.root.setwriter(out.append)
+ undo = he_pm.enable_tracing()
+ try:
+ indent = he_pm.trace.root.indent
+ he_pm.hook.he_method1(arg=1)
+ assert indent == he_pm.trace.root.indent
+ assert len(out) == 2
+ assert 'he_method1' in out[0]
+ assert 'finish' in out[1]
+
+ out[:] = []
+ he_pm.register(api2())
+
+ with pytest.raises(ValueError):
+ he_pm.hook.he_method1(arg=1)
+ assert he_pm.trace.root.indent == indent
+ assert saveindent[0] > indent
+ finally:
+ undo()
+
+
+@pytest.mark.parametrize('include_hookspec', [True, False])
+def test_prefix_hookimpl(include_hookspec):
+ pm = PluginManager(hookspec.project_name, "hello_")
+
+ if include_hookspec:
+ class HookSpec(object):
+ @hookspec
+ def hello_myhook(self, arg1):
+ """ add to arg1 """
+
+ pm.add_hookspecs(HookSpec)
+
+ class Plugin(object):
+ def hello_myhook(self, arg1):
+ return arg1 + 1
+
+ pm.register(Plugin())
+ pm.register(Plugin())
+ results = pm.hook.hello_myhook(arg1=17)
+ assert results == [18, 18]
+
+
+def test_prefix_hookimpl_dontmatch_module():
+ pm = PluginManager(hookspec.project_name, "hello_")
+
+ class BadPlugin(object):
+ hello_module = __import__('email')
+
+ pm.register(BadPlugin())
+ pm.check_pending()
diff --git a/third_party/python/pluggy/testing/test_multicall.py b/third_party/python/pluggy/testing/test_multicall.py
new file mode 100644
index 0000000000..860a209b66
--- /dev/null
+++ b/third_party/python/pluggy/testing/test_multicall.py
@@ -0,0 +1,194 @@
+import pytest
+
+from pluggy import _multicall, _legacymulticall, HookImpl, HookCallError
+from pluggy.callers import _LegacyMultiCall
+from pluggy import HookspecMarker, HookimplMarker
+
+
+hookspec = HookspecMarker("example")
+hookimpl = HookimplMarker("example")
+
+
+def test_uses_copy_of_methods():
+ out = [lambda: 42]
+ mc = _LegacyMultiCall(out, {})
+ repr(mc)
+ out[:] = []
+ res = mc.execute()
+ return res == 42
+
+
+def MC(methods, kwargs, firstresult=False):
+ caller = _multicall
+ hookfuncs = []
+ for method in methods:
+ f = HookImpl(None, "<temp>", method, method.example_impl)
+ hookfuncs.append(f)
+ if '__multicall__' in f.argnames:
+ caller = _legacymulticall
+ return caller(hookfuncs, kwargs, firstresult=firstresult)
+
+
+def test_call_passing():
+ class P1(object):
+ @hookimpl
+ def m(self, __multicall__, x):
+ assert len(__multicall__.results) == 1
+ assert not __multicall__.hook_impls
+ return 17
+
+ class P2(object):
+ @hookimpl
+ def m(self, __multicall__, x):
+ assert __multicall__.results == []
+ assert __multicall__.hook_impls
+ return 23
+
+ p1 = P1()
+ p2 = P2()
+ reslist = MC([p1.m, p2.m], {"x": 23})
+ assert len(reslist) == 2
+ # ensure reversed order
+ assert reslist == [23, 17]
+
+
+def test_keyword_args():
+ @hookimpl
+ def f(x):
+ return x + 1
+
+ class A(object):
+ @hookimpl
+ def f(self, x, y):
+ return x + y
+
+ reslist = MC([f, A().f], dict(x=23, y=24))
+ assert reslist == [24 + 23, 24]
+
+
+def test_keyword_args_with_defaultargs():
+ @hookimpl
+ def f(x, z=1):
+ return x + z
+ reslist = MC([f], dict(x=23, y=24))
+ assert reslist == [24]
+
+
+def test_tags_call_error():
+ @hookimpl
+ def f(x):
+ return x
+ with pytest.raises(HookCallError):
+ MC([f], {})
+
+
+def test_call_subexecute():
+ @hookimpl
+ def m(__multicall__):
+ subresult = __multicall__.execute()
+ return subresult + 1
+
+ @hookimpl
+ def n():
+ return 1
+
+ res = MC([n, m], {}, firstresult=True)
+ assert res == 2
+
+
+def test_call_none_is_no_result():
+ @hookimpl
+ def m1():
+ return 1
+
+ @hookimpl
+ def m2():
+ return None
+
+ res = MC([m1, m2], {}, firstresult=True)
+ assert res == 1
+ res = MC([m1, m2], {}, {})
+ assert res == [1]
+
+
+def test_hookwrapper():
+ out = []
+
+ @hookimpl(hookwrapper=True)
+ def m1():
+ out.append("m1 init")
+ yield None
+ out.append("m1 finish")
+
+ @hookimpl
+ def m2():
+ out.append("m2")
+ return 2
+
+ res = MC([m2, m1], {})
+ assert res == [2]
+ assert out == ["m1 init", "m2", "m1 finish"]
+ out[:] = []
+ res = MC([m2, m1], {}, firstresult=True)
+ assert res == 2
+ assert out == ["m1 init", "m2", "m1 finish"]
+
+
+def test_hookwrapper_order():
+ out = []
+
+ @hookimpl(hookwrapper=True)
+ def m1():
+ out.append("m1 init")
+ yield 1
+ out.append("m1 finish")
+
+ @hookimpl(hookwrapper=True)
+ def m2():
+ out.append("m2 init")
+ yield 2
+ out.append("m2 finish")
+
+ res = MC([m2, m1], {})
+ assert res == []
+ assert out == ["m1 init", "m2 init", "m2 finish", "m1 finish"]
+
+
+def test_hookwrapper_not_yield():
+ @hookimpl(hookwrapper=True)
+ def m1():
+ pass
+
+ with pytest.raises(TypeError):
+ MC([m1], {})
+
+
+def test_hookwrapper_too_many_yield():
+ @hookimpl(hookwrapper=True)
+ def m1():
+ yield 1
+ yield 2
+
+ with pytest.raises(RuntimeError) as ex:
+ MC([m1], {})
+ assert "m1" in str(ex.value)
+ assert (__file__ + ':') in str(ex.value)
+
+
+@pytest.mark.parametrize("exc", [ValueError, SystemExit])
+def test_hookwrapper_exception(exc):
+ out = []
+
+ @hookimpl(hookwrapper=True)
+ def m1():
+ out.append("m1 init")
+ yield None
+ out.append("m1 finish")
+
+ @hookimpl
+ def m2():
+ raise exc
+
+ with pytest.raises(exc):
+ MC([m2, m1], {})
+ assert out == ["m1 init", "m1 finish"]
diff --git a/third_party/python/pluggy/testing/test_pluginmanager.py b/third_party/python/pluggy/testing/test_pluginmanager.py
new file mode 100644
index 0000000000..e2c86cc644
--- /dev/null
+++ b/third_party/python/pluggy/testing/test_pluginmanager.py
@@ -0,0 +1,374 @@
+import pytest
+import types
+
+from pluggy import (PluginValidationError,
+ HookCallError, HookimplMarker, HookspecMarker)
+
+
+hookspec = HookspecMarker("example")
+hookimpl = HookimplMarker("example")
+
+
+def test_plugin_double_register(pm):
+ pm.register(42, name="abc")
+ with pytest.raises(ValueError):
+ pm.register(42, name="abc")
+ with pytest.raises(ValueError):
+ pm.register(42, name="def")
+
+
+def test_pm(pm):
+ class A(object):
+ pass
+
+ a1, a2 = A(), A()
+ pm.register(a1)
+ assert pm.is_registered(a1)
+ pm.register(a2, "hello")
+ assert pm.is_registered(a2)
+ out = pm.get_plugins()
+ assert a1 in out
+ assert a2 in out
+ assert pm.get_plugin('hello') == a2
+ assert pm.unregister(a1) == a1
+ assert not pm.is_registered(a1)
+
+ out = pm.list_name_plugin()
+ assert len(out) == 1
+ assert out == [("hello", a2)]
+
+
+def test_has_plugin(pm):
+ class A(object):
+ pass
+
+ a1 = A()
+ pm.register(a1, 'hello')
+ assert pm.is_registered(a1)
+ assert pm.has_plugin('hello')
+
+
+def test_register_dynamic_attr(he_pm):
+ class A(object):
+ def __getattr__(self, name):
+ if name[0] != "_":
+ return 42
+ raise AttributeError()
+
+ a = A()
+ he_pm.register(a)
+ assert not he_pm.get_hookcallers(a)
+
+
+def test_pm_name(pm):
+ class A(object):
+ pass
+
+ a1 = A()
+ name = pm.register(a1, name="hello")
+ assert name == "hello"
+ pm.unregister(a1)
+ assert pm.get_plugin(a1) is None
+ assert not pm.is_registered(a1)
+ assert not pm.get_plugins()
+ name2 = pm.register(a1, name="hello")
+ assert name2 == name
+ pm.unregister(name="hello")
+ assert pm.get_plugin(a1) is None
+ assert not pm.is_registered(a1)
+ assert not pm.get_plugins()
+
+
+def test_set_blocked(pm):
+ class A(object):
+ pass
+
+ a1 = A()
+ name = pm.register(a1)
+ assert pm.is_registered(a1)
+ assert not pm.is_blocked(name)
+ pm.set_blocked(name)
+ assert pm.is_blocked(name)
+ assert not pm.is_registered(a1)
+
+ pm.set_blocked("somename")
+ assert pm.is_blocked("somename")
+ assert not pm.register(A(), "somename")
+ pm.unregister(name="somename")
+ assert pm.is_blocked("somename")
+
+
+def test_register_mismatch_method(he_pm):
+ class hello(object):
+ @hookimpl
+ def he_method_notexists(self):
+ pass
+
+ he_pm.register(hello())
+ with pytest.raises(PluginValidationError):
+ he_pm.check_pending()
+
+
+def test_register_mismatch_arg(he_pm):
+ class hello(object):
+ @hookimpl
+ def he_method1(self, qlwkje):
+ pass
+
+ with pytest.raises(PluginValidationError):
+ he_pm.register(hello())
+
+
+def test_register(pm):
+ class MyPlugin(object):
+ pass
+ my = MyPlugin()
+ pm.register(my)
+ assert my in pm.get_plugins()
+ my2 = MyPlugin()
+ pm.register(my2)
+ assert set([my, my2]).issubset(pm.get_plugins())
+
+ assert pm.is_registered(my)
+ assert pm.is_registered(my2)
+ pm.unregister(my)
+ assert not pm.is_registered(my)
+ assert my not in pm.get_plugins()
+
+
+def test_register_unknown_hooks(pm):
+ class Plugin1(object):
+ @hookimpl
+ def he_method1(self, arg):
+ return arg + 1
+
+ pname = pm.register(Plugin1())
+
+ class Hooks(object):
+ @hookspec
+ def he_method1(self, arg):
+ pass
+
+ pm.add_hookspecs(Hooks)
+ # assert not pm._unverified_hooks
+ assert pm.hook.he_method1(arg=1) == [2]
+ assert len(pm.get_hookcallers(pm.get_plugin(pname))) == 1
+
+
+def test_register_historic(pm):
+ class Hooks(object):
+ @hookspec(historic=True)
+ def he_method1(self, arg):
+ pass
+ pm.add_hookspecs(Hooks)
+
+ pm.hook.he_method1.call_historic(kwargs=dict(arg=1))
+ out = []
+
+ class Plugin(object):
+ @hookimpl
+ def he_method1(self, arg):
+ out.append(arg)
+
+ pm.register(Plugin())
+ assert out == [1]
+
+ class Plugin2(object):
+ @hookimpl
+ def he_method1(self, arg):
+ out.append(arg * 10)
+
+ pm.register(Plugin2())
+ assert out == [1, 10]
+ pm.hook.he_method1.call_historic(kwargs=dict(arg=12))
+ assert out == [1, 10, 120, 12]
+
+
+def test_with_result_memorized(pm):
+ class Hooks(object):
+ @hookspec(historic=True)
+ def he_method1(self, arg):
+ pass
+ pm.add_hookspecs(Hooks)
+
+ he_method1 = pm.hook.he_method1
+ he_method1.call_historic(lambda res: out.append(res), dict(arg=1))
+ out = []
+
+ class Plugin(object):
+ @hookimpl
+ def he_method1(self, arg):
+ return arg * 10
+
+ pm.register(Plugin())
+ assert out == [10]
+
+
+def test_with_callbacks_immediately_executed(pm):
+ class Hooks(object):
+ @hookspec(historic=True)
+ def he_method1(self, arg):
+ pass
+ pm.add_hookspecs(Hooks)
+
+ class Plugin1(object):
+ @hookimpl
+ def he_method1(self, arg):
+ return arg * 10
+
+ class Plugin2(object):
+ @hookimpl
+ def he_method1(self, arg):
+ return arg * 20
+
+ class Plugin3(object):
+ @hookimpl
+ def he_method1(self, arg):
+ return arg * 30
+
+ out = []
+ pm.register(Plugin1())
+ pm.register(Plugin2())
+
+ he_method1 = pm.hook.he_method1
+ he_method1.call_historic(lambda res: out.append(res), dict(arg=1))
+ assert out == [20, 10]
+ pm.register(Plugin3())
+ assert out == [20, 10, 30]
+
+
+def test_register_historic_incompat_hookwrapper(pm):
+ class Hooks(object):
+ @hookspec(historic=True)
+ def he_method1(self, arg):
+ pass
+
+ pm.add_hookspecs(Hooks)
+
+ out = []
+
+ class Plugin(object):
+ @hookimpl(hookwrapper=True)
+ def he_method1(self, arg):
+ out.append(arg)
+
+ with pytest.raises(PluginValidationError):
+ pm.register(Plugin())
+
+
+def test_call_extra(pm):
+ class Hooks(object):
+ @hookspec
+ def he_method1(self, arg):
+ pass
+
+ pm.add_hookspecs(Hooks)
+
+ def he_method1(arg):
+ return arg * 10
+
+ out = pm.hook.he_method1.call_extra([he_method1], dict(arg=1))
+ assert out == [10]
+
+
+def test_call_with_too_few_args(pm):
+ class Hooks(object):
+ @hookspec
+ def he_method1(self, arg):
+ pass
+
+ pm.add_hookspecs(Hooks)
+
+ class Plugin1(object):
+ @hookimpl
+ def he_method1(self, arg):
+ 0 / 0
+ pm.register(Plugin1())
+ with pytest.raises(HookCallError):
+ with pytest.warns(UserWarning):
+ pm.hook.he_method1()
+
+
+def test_subset_hook_caller(pm):
+ class Hooks(object):
+ @hookspec
+ def he_method1(self, arg):
+ pass
+
+ pm.add_hookspecs(Hooks)
+
+ out = []
+
+ class Plugin1(object):
+ @hookimpl
+ def he_method1(self, arg):
+ out.append(arg)
+
+ class Plugin2(object):
+ @hookimpl
+ def he_method1(self, arg):
+ out.append(arg * 10)
+
+ class PluginNo(object):
+ pass
+
+ plugin1, plugin2, plugin3 = Plugin1(), Plugin2(), PluginNo()
+ pm.register(plugin1)
+ pm.register(plugin2)
+ pm.register(plugin3)
+ pm.hook.he_method1(arg=1)
+ assert out == [10, 1]
+ out[:] = []
+
+ hc = pm.subset_hook_caller("he_method1", [plugin1])
+ hc(arg=2)
+ assert out == [20]
+ out[:] = []
+
+ hc = pm.subset_hook_caller("he_method1", [plugin2])
+ hc(arg=2)
+ assert out == [2]
+ out[:] = []
+
+ pm.unregister(plugin1)
+ hc(arg=2)
+ assert out == []
+ out[:] = []
+
+ pm.hook.he_method1(arg=1)
+ assert out == [10]
+
+
+def test_multicall_deprecated(pm):
+ class P1(object):
+ @hookimpl
+ def m(self, __multicall__, x):
+ pass
+
+ pytest.deprecated_call(pm.register, P1())
+
+
+def test_add_hookspecs_nohooks(pm):
+ with pytest.raises(ValueError):
+ pm.add_hookspecs(10)
+
+
+def test_reject_prefixed_module(pm):
+ """Verify that a module type attribute that contains the project
+ prefix in its name (in this case `'example_*'` isn't collected
+ when registering a module which imports it.
+ """
+ pm._implprefix = 'example'
+ conftest = types.ModuleType("conftest")
+ src = ("""
+def example_hook():
+ pass
+""")
+ exec(src, conftest.__dict__)
+ conftest.example_blah = types.ModuleType("example_blah")
+ name = pm.register(conftest)
+ assert name == 'conftest'
+ assert getattr(pm.hook, 'example_blah', None) is None
+ assert getattr(pm.hook, 'example_hook', None) # conftest.example_hook should be collected
+ assert pm.parse_hookimpl_opts(conftest, 'example_blah') is None
+ assert pm.parse_hookimpl_opts(conftest, 'example_hook') == {}
diff --git a/third_party/python/pluggy/testing/test_tracer.py b/third_party/python/pluggy/testing/test_tracer.py
new file mode 100644
index 0000000000..4a3e16cec4
--- /dev/null
+++ b/third_party/python/pluggy/testing/test_tracer.py
@@ -0,0 +1,89 @@
+
+from pluggy import _TagTracer
+
+
+def test_simple():
+ rootlogger = _TagTracer()
+ log = rootlogger.get("pytest")
+ log("hello")
+ out = []
+ rootlogger.setwriter(out.append)
+ log("world")
+ assert len(out) == 1
+ assert out[0] == "world [pytest]\n"
+ sublog = log.get("collection")
+ sublog("hello")
+ assert out[1] == "hello [pytest:collection]\n"
+
+
+def test_indent():
+ rootlogger = _TagTracer()
+ log = rootlogger.get("1")
+ out = []
+ log.root.setwriter(lambda arg: out.append(arg))
+ log("hello")
+ log.root.indent += 1
+ log("line1")
+ log("line2")
+ log.root.indent += 1
+ log("line3")
+ log("line4")
+ log.root.indent -= 1
+ log("line5")
+ log.root.indent -= 1
+ log("last")
+ assert len(out) == 7
+ names = [x[:x.rfind(' [')] for x in out]
+ assert names == [
+ 'hello', ' line1', ' line2',
+ ' line3', ' line4', ' line5', 'last']
+
+
+def test_readable_output_dictargs():
+ rootlogger = _TagTracer()
+
+ out = rootlogger.format_message(['test'], [1])
+ assert out == ['1 [test]\n']
+
+ out2 = rootlogger.format_message(['test'], ['test', {'a': 1}])
+ assert out2 == [
+ 'test [test]\n',
+ ' a: 1\n'
+ ]
+
+
+def test_setprocessor():
+ rootlogger = _TagTracer()
+ log = rootlogger.get("1")
+ log2 = log.get("2")
+ assert log2.tags == tuple("12")
+ out = []
+ rootlogger.setprocessor(tuple("12"), lambda *args: out.append(args))
+ log("not seen")
+ log2("seen")
+ assert len(out) == 1
+ tags, args = out[0]
+ assert "1" in tags
+ assert "2" in tags
+ assert args == ("seen",)
+ l2 = []
+ rootlogger.setprocessor("1:2", lambda *args: l2.append(args))
+ log2("seen")
+ tags, args = l2[0]
+ assert args == ("seen",)
+
+
+def test_setmyprocessor():
+ rootlogger = _TagTracer()
+ log = rootlogger.get("1")
+ log2 = log.get("2")
+ out = []
+ log2.setmyprocessor(lambda *args: out.append(args))
+ log("not seen")
+ assert not out
+ log2(42)
+ assert len(out) == 1
+ tags, args = out[0]
+ assert "1" in tags
+ assert "2" in tags
+ assert args == (42,)
diff --git a/third_party/python/pluggy/tox.ini b/third_party/python/pluggy/tox.ini
new file mode 100644
index 0000000000..89d44e352d
--- /dev/null
+++ b/third_party/python/pluggy/tox.ini
@@ -0,0 +1,44 @@
+[tox]
+envlist=check,docs,py{27,34,35,36,py}-pytestrelease,py{27,36}-pytest{master,features}
+
+[testenv]
+commands=py.test {posargs:testing/}
+setenv=
+ _PYTEST_SETUP_SKIP_PLUGGY_DEP=1
+deps=
+ pytestrelease: pytest
+ pytestmaster: git+https://github.com/pytest-dev/pytest.git@master
+ pytestfeatures: git+https://github.com/pytest-dev/pytest.git@features
+
+[testenv:benchmark]
+commands=py.test {posargs:testing/benchmark.py}
+deps=
+ pytest
+ pytest-benchmark
+
+[testenv:check]
+deps =
+ flake8
+ restructuredtext_lint
+ pygments
+commands =
+ flake8 pluggy.py setup.py testing
+ rst-lint CHANGELOG.rst README.rst
+
+[testenv:docs]
+deps =
+ sphinx
+ pygments
+commands =
+ sphinx-build -b html {toxinidir}/docs {toxinidir}/build/html-docs
+
+[pytest]
+minversion=2.0
+#--pyargs --doctest-modules --ignore=.tox
+addopts=-rxsX
+norecursedirs=.tox ja .hg .env*
+filterwarnings =
+ error
+
+[flake8]
+max-line-length=99
diff --git a/third_party/python/ply/ANNOUNCE b/third_party/python/ply/ANNOUNCE
new file mode 100644
index 0000000000..c430051cf4
--- /dev/null
+++ b/third_party/python/ply/ANNOUNCE
@@ -0,0 +1,40 @@
+January 31, 2017
+
+ Announcing : PLY-3.10 (Python Lex-Yacc)
+
+ http://www.dabeaz.com/ply
+
+I'm pleased to announce PLY-3.10--a pure Python implementation of the
+common parsing tools lex and yacc. PLY-3.10 is a minor bug fix
+release. It supports both Python 2 and Python 3.
+
+If you are new to PLY, here are a few highlights:
+
+- PLY is closely modeled after traditional lex/yacc. If you know how
+ to use these or similar tools in other languages, you will find
+ PLY to be comparable.
+
+- PLY provides very extensive error reporting and diagnostic
+ information to assist in parser construction. The original
+ implementation was developed for instructional purposes. As
+ a result, the system tries to identify the most common types
+ of errors made by novice users.
+
+- PLY provides full support for empty productions, error recovery,
+ precedence rules, and ambiguous grammars.
+
+- Parsing is based on LR-parsing which is fast, memory efficient,
+ better suited to large grammars, and which has a number of nice
+ properties when dealing with syntax errors and other parsing
+ problems. Currently, PLY can build its parsing tables using
+ either SLR or LALR(1) algorithms.
+
+More information about PLY can be obtained on the PLY webpage at:
+
+ http://www.dabeaz.com/ply
+
+PLY is freely available.
+
+Cheers,
+
+David Beazley (http://www.dabeaz.com) \ No newline at end of file
diff --git a/third_party/python/ply/CHANGES b/third_party/python/ply/CHANGES
new file mode 100644
index 0000000000..815c23184e
--- /dev/null
+++ b/third_party/python/ply/CHANGES
@@ -0,0 +1,1394 @@
+Version 3.10
+---------------------
+01/31/17: beazley
+ Changed grammar signature computation to not involve hashing
+ functions. Parts are just combined into a big string.
+
+10/07/16: beazley
+ Fixed Issue #101: Incorrect shift-reduce conflict resolution with
+ precedence specifier.
+
+ PLY was incorrectly resolving shift-reduce conflicts in certain
+ cases. For example, in the example/calc/calc.py example, you
+ could trigger it doing this:
+
+ calc > -3 - 4
+ 1 (correct answer should be -7)
+ calc >
+
+ Issue and suggested patch contributed by https://github.com/RomaVis
+
+Version 3.9
+---------------------
+08/30/16: beazley
+ Exposed the parser state number as the parser.state attribute
+ in productions and error functions. For example:
+
+ def p_somerule(p):
+ '''
+ rule : A B C
+ '''
+ print('State:', p.parser.state)
+
+ May address issue #65 (publish current state in error callback).
+
+08/30/16: beazley
+ Fixed Issue #88. Python3 compatibility with ply/cpp.
+
+08/30/16: beazley
+ Fixed Issue #93. Ply can crash if SyntaxError is raised inside
+ a production. Not actually sure if the original implementation
+ worked as documented at all. Yacc has been modified to follow
+ the spec as outlined in the CHANGES noted for 11/27/07 below.
+
+08/30/16: beazley
+ Fixed Issue #97. Failure with code validation when the original
+ source files aren't present. Validation step now ignores
+ the missing file.
+
+08/30/16: beazley
+ Minor fixes to version numbers.
+
+Version 3.8
+---------------------
+10/02/15: beazley
+ Fixed issues related to Python 3.5. Patch contributed by Barry Warsaw.
+
+Version 3.7
+---------------------
+08/25/15: beazley
+ Fixed problems when reading table files from pickled data.
+
+05/07/15: beazley
+ Fixed regression in handling of table modules if specified as module
+ objects. See https://github.com/dabeaz/ply/issues/63
+
+Version 3.6
+---------------------
+04/25/15: beazley
+ If PLY is unable to create the 'parser.out' or 'parsetab.py' files due
+ to permission issues, it now just issues a warning message and
+ continues to operate. This could happen if a module using PLY
+ is installed in a funny way where tables have to be regenerated, but
+ for whatever reason, the user doesn't have write permission on
+ the directory where PLY wants to put them.
+
+04/24/15: beazley
+ Fixed some issues related to use of packages and table file
+ modules. Just to emphasize, PLY now generates its special
+ files such as 'parsetab.py' and 'lextab.py' in the *SAME*
+ directory as the source file that uses lex() and yacc().
+
+ If for some reason, you want to change the name of the table
+ module, use the tabmodule and lextab options:
+
+ lexer = lex.lex(lextab='spamlextab')
+ parser = yacc.yacc(tabmodule='spamparsetab')
+
+ If you specify a simple name as shown, the module will still be
+ created in the same directory as the file invoking lex() or yacc().
+ If you want the table files to be placed into a different package,
+ then give a fully qualified package name. For example:
+
+ lexer = lex.lex(lextab='pkgname.files.lextab')
+ parser = yacc.yacc(tabmodule='pkgname.files.parsetab')
+
+ For this to work, 'pkgname.files' must already exist as a valid
+ Python package (i.e., the directories must already exist and be
+ set up with the proper __init__.py files, etc.).
+
+Version 3.5
+---------------------
+04/21/15: beazley
+ Added support for defaulted_states in the parser. A
+ defaulted_state is a state where the only legal action is a
+ reduction of a single grammar rule across all valid input
+ tokens. For such states, the rule is reduced and the
+ reading of the next lookahead token is delayed until it is
+ actually needed at a later point in time.
+
+ This delay in consuming the next lookahead token is a
+ potentially important feature in advanced parsing
+ applications that require tight interaction between the
+ lexer and the parser. For example, a grammar rule change
+ modify the lexer state upon reduction and have such changes
+ take effect before the next input token is read.
+
+ *** POTENTIAL INCOMPATIBILITY ***
+ One potential danger of defaulted_states is that syntax
+ errors might be deferred to a a later point of processing
+ than where they were detected in past versions of PLY.
+ Thus, it's possible that your error handling could change
+ slightly on the same inputs. defaulted_states do not change
+ the overall parsing of the input (i.e., the same grammar is
+ accepted).
+
+ If for some reason, you need to disable defaulted states,
+ you can do this:
+
+ parser = yacc.yacc()
+ parser.defaulted_states = {}
+
+04/21/15: beazley
+ Fixed debug logging in the parser. It wasn't properly reporting goto states
+ on grammar rule reductions.
+
+04/20/15: beazley
+ Added actions to be defined to character literals (Issue #32). For example:
+
+ literals = [ '{', '}' ]
+
+ def t_lbrace(t):
+ r'\{'
+ # Some action
+ t.type = '{'
+ return t
+
+ def t_rbrace(t):
+ r'\}'
+ # Some action
+ t.type = '}'
+ return t
+
+04/19/15: beazley
+ Import of the 'parsetab.py' file is now constrained to only consider the
+ directory specified by the outputdir argument to yacc(). If not supplied,
+ the import will only consider the directory in which the grammar is defined.
+ This should greatly reduce problems with the wrong parsetab.py file being
+ imported by mistake. For example, if it's found somewhere else on the path
+ by accident.
+
+ *** POTENTIAL INCOMPATIBILITY *** It's possible that this might break some
+ packaging/deployment setup if PLY was instructed to place its parsetab.py
+ in a different location. You'll have to specify a proper outputdir= argument
+ to yacc() to fix this if needed.
+
+04/19/15: beazley
+ Changed default output directory to be the same as that in which the
+ yacc grammar is defined. If your grammar is in a file 'calc.py',
+ then the parsetab.py and parser.out files should be generated in the
+ same directory as that file. The destination directory can be changed
+ using the outputdir= argument to yacc().
+
+04/19/15: beazley
+ Changed the parsetab.py file signature slightly so that the parsetab won't
+ regenerate if created on a different major version of Python (ie., a
+ parsetab created on Python 2 will work with Python 3).
+
+04/16/15: beazley
+ Fixed Issue #44 call_errorfunc() should return the result of errorfunc()
+
+04/16/15: beazley
+ Support for versions of Python <2.7 is officially dropped. PLY may work, but
+ the unit tests requires Python 2.7 or newer.
+
+04/16/15: beazley
+ Fixed bug related to calling yacc(start=...). PLY wasn't regenerating the
+ table file correctly for this case.
+
+04/16/15: beazley
+ Added skipped tests for PyPy and Java. Related to use of Python's -O option.
+
+05/29/13: beazley
+ Added filter to make unit tests pass under 'python -3'.
+ Reported by Neil Muller.
+
+05/29/13: beazley
+ Fixed CPP_INTEGER regex in ply/cpp.py (Issue 21).
+ Reported by @vbraun.
+
+05/29/13: beazley
+ Fixed yacc validation bugs when from __future__ import unicode_literals
+ is being used. Reported by Kenn Knowles.
+
+05/29/13: beazley
+ Added support for Travis-CI. Contributed by Kenn Knowles.
+
+05/29/13: beazley
+ Added a .gitignore file. Suggested by Kenn Knowles.
+
+05/29/13: beazley
+ Fixed validation problems for source files that include a
+ different source code encoding specifier. Fix relies on
+ the inspect module. Should work on Python 2.6 and newer.
+ Not sure about older versions of Python.
+ Contributed by Michael Droettboom
+
+05/21/13: beazley
+ Fixed unit tests for yacc to eliminate random failures due to dict hash value
+ randomization in Python 3.3
+ Reported by Arfrever
+
+10/15/12: beazley
+ Fixed comment whitespace processing bugs in ply/cpp.py.
+ Reported by Alexei Pososin.
+
+10/15/12: beazley
+ Fixed token names in ply/ctokens.py to match rule names.
+ Reported by Alexei Pososin.
+
+04/26/12: beazley
+ Changes to functions available in panic mode error recover. In previous versions
+ of PLY, the following global functions were available for use in the p_error() rule:
+
+ yacc.errok() # Reset error state
+ yacc.token() # Get the next token
+ yacc.restart() # Reset the parsing stack
+
+ The use of global variables was problematic for code involving multiple parsers
+ and frankly was a poor design overall. These functions have been moved to methods
+ of the parser instance created by the yacc() function. You should write code like
+ this:
+
+ def p_error(p):
+ ...
+ parser.errok()
+
+ parser = yacc.yacc()
+
+ *** POTENTIAL INCOMPATIBILITY *** The original global functions now issue a
+ DeprecationWarning.
+
+04/19/12: beazley
+ Fixed some problems with line and position tracking and the use of error
+ symbols. If you have a grammar rule involving an error rule like this:
+
+ def p_assignment_bad(p):
+ '''assignment : location EQUALS error SEMI'''
+ ...
+
+ You can now do line and position tracking on the error token. For example:
+
+ def p_assignment_bad(p):
+ '''assignment : location EQUALS error SEMI'''
+ start_line = p.lineno(3)
+ start_pos = p.lexpos(3)
+
+ If the trackng=True option is supplied to parse(), you can additionally get
+ spans:
+
+ def p_assignment_bad(p):
+ '''assignment : location EQUALS error SEMI'''
+ start_line, end_line = p.linespan(3)
+ start_pos, end_pos = p.lexspan(3)
+
+ Note that error handling is still a hairy thing in PLY. This won't work
+ unless your lexer is providing accurate information. Please report bugs.
+ Suggested by a bug reported by Davis Herring.
+
+04/18/12: beazley
+ Change to doc string handling in lex module. Regex patterns are now first
+ pulled from a function's .regex attribute. If that doesn't exist, then
+ .doc is checked as a fallback. The @TOKEN decorator now sets the .regex
+ attribute of a function instead of its doc string.
+ Changed suggested by Kristoffer Ellersgaard Koch.
+
+04/18/12: beazley
+ Fixed issue #1: Fixed _tabversion. It should use __tabversion__ instead of __version__
+ Reported by Daniele Tricoli
+
+04/18/12: beazley
+ Fixed issue #8: Literals empty list causes IndexError
+ Reported by Walter Nissen.
+
+04/18/12: beazley
+ Fixed issue #12: Typo in code snippet in documentation
+ Reported by florianschanda.
+
+04/18/12: beazley
+ Fixed issue #10: Correctly escape t_XOREQUAL pattern.
+ Reported by Andy Kittner.
+
+Version 3.4
+---------------------
+02/17/11: beazley
+ Minor patch to make cpp.py compatible with Python 3. Note: This
+ is an experimental file not currently used by the rest of PLY.
+
+02/17/11: beazley
+ Fixed setup.py trove classifiers to properly list PLY as
+ Python 3 compatible.
+
+01/02/11: beazley
+ Migration of repository to github.
+
+Version 3.3
+-----------------------------
+08/25/09: beazley
+ Fixed issue 15 related to the set_lineno() method in yacc. Reported by
+ mdsherry.
+
+08/25/09: beazley
+ Fixed a bug related to regular expression compilation flags not being
+ properly stored in lextab.py files created by the lexer when running
+ in optimize mode. Reported by Bruce Frederiksen.
+
+
+Version 3.2
+-----------------------------
+03/24/09: beazley
+ Added an extra check to not print duplicated warning messages
+ about reduce/reduce conflicts.
+
+03/24/09: beazley
+ Switched PLY over to a BSD-license.
+
+03/23/09: beazley
+ Performance optimization. Discovered a few places to make
+ speedups in LR table generation.
+
+03/23/09: beazley
+ New warning message. PLY now warns about rules never
+ reduced due to reduce/reduce conflicts. Suggested by
+ Bruce Frederiksen.
+
+03/23/09: beazley
+ Some clean-up of warning messages related to reduce/reduce errors.
+
+03/23/09: beazley
+ Added a new picklefile option to yacc() to write the parsing
+ tables to a filename using the pickle module. Here is how
+ it works:
+
+ yacc(picklefile="parsetab.p")
+
+ This option can be used if the normal parsetab.py file is
+ extremely large. For example, on jython, it is impossible
+ to read parsing tables if the parsetab.py exceeds a certain
+ threshold.
+
+ The filename supplied to the picklefile option is opened
+ relative to the current working directory of the Python
+ interpreter. If you need to refer to the file elsewhere,
+ you will need to supply an absolute or relative path.
+
+ For maximum portability, the pickle file is written
+ using protocol 0.
+
+03/13/09: beazley
+ Fixed a bug in parser.out generation where the rule numbers
+ where off by one.
+
+03/13/09: beazley
+ Fixed a string formatting bug with one of the error messages.
+ Reported by Richard Reitmeyer
+
+Version 3.1
+-----------------------------
+02/28/09: beazley
+ Fixed broken start argument to yacc(). PLY-3.0 broke this
+ feature by accident.
+
+02/28/09: beazley
+ Fixed debugging output. yacc() no longer reports shift/reduce
+ or reduce/reduce conflicts if debugging is turned off. This
+ restores similar behavior in PLY-2.5. Reported by Andrew Waters.
+
+Version 3.0
+-----------------------------
+02/03/09: beazley
+ Fixed missing lexer attribute on certain tokens when
+ invoking the parser p_error() function. Reported by
+ Bart Whiteley.
+
+02/02/09: beazley
+ The lex() command now does all error-reporting and diagonistics
+ using the logging module interface. Pass in a Logger object
+ using the errorlog parameter to specify a different logger.
+
+02/02/09: beazley
+ Refactored ply.lex to use a more object-oriented and organized
+ approach to collecting lexer information.
+
+02/01/09: beazley
+ Removed the nowarn option from lex(). All output is controlled
+ by passing in a logger object. Just pass in a logger with a high
+ level setting to suppress output. This argument was never
+ documented to begin with so hopefully no one was relying upon it.
+
+02/01/09: beazley
+ Discovered and removed a dead if-statement in the lexer. This
+ resulted in a 6-7% speedup in lexing when I tested it.
+
+01/13/09: beazley
+ Minor change to the procedure for signalling a syntax error in a
+ production rule. A normal SyntaxError exception should be raised
+ instead of yacc.SyntaxError.
+
+01/13/09: beazley
+ Added a new method p.set_lineno(n,lineno) that can be used to set the
+ line number of symbol n in grammar rules. This simplifies manual
+ tracking of line numbers.
+
+01/11/09: beazley
+ Vastly improved debugging support for yacc.parse(). Instead of passing
+ debug as an integer, you can supply a Logging object (see the logging
+ module). Messages will be generated at the ERROR, INFO, and DEBUG
+ logging levels, each level providing progressively more information.
+ The debugging trace also shows states, grammar rule, values passed
+ into grammar rules, and the result of each reduction.
+
+01/09/09: beazley
+ The yacc() command now does all error-reporting and diagnostics using
+ the interface of the logging module. Use the errorlog parameter to
+ specify a logging object for error messages. Use the debuglog parameter
+ to specify a logging object for the 'parser.out' output.
+
+01/09/09: beazley
+ *HUGE* refactoring of the the ply.yacc() implementation. The high-level
+ user interface is backwards compatible, but the internals are completely
+ reorganized into classes. No more global variables. The internals
+ are also more extensible. For example, you can use the classes to
+ construct a LALR(1) parser in an entirely different manner than
+ what is currently the case. Documentation is forthcoming.
+
+01/07/09: beazley
+ Various cleanup and refactoring of yacc internals.
+
+01/06/09: beazley
+ Fixed a bug with precedence assignment. yacc was assigning the precedence
+ each rule based on the left-most token, when in fact, it should have been
+ using the right-most token. Reported by Bruce Frederiksen.
+
+11/27/08: beazley
+ Numerous changes to support Python 3.0 including removal of deprecated
+ statements (e.g., has_key) and the additional of compatibility code
+ to emulate features from Python 2 that have been removed, but which
+ are needed. Fixed the unit testing suite to work with Python 3.0.
+ The code should be backwards compatible with Python 2.
+
+11/26/08: beazley
+ Loosened the rules on what kind of objects can be passed in as the
+ "module" parameter to lex() and yacc(). Previously, you could only use
+ a module or an instance. Now, PLY just uses dir() to get a list of
+ symbols on whatever the object is without regard for its type.
+
+11/26/08: beazley
+ Changed all except: statements to be compatible with Python2.x/3.x syntax.
+
+11/26/08: beazley
+ Changed all raise Exception, value statements to raise Exception(value) for
+ forward compatibility.
+
+11/26/08: beazley
+ Removed all print statements from lex and yacc, using sys.stdout and sys.stderr
+ directly. Preparation for Python 3.0 support.
+
+11/04/08: beazley
+ Fixed a bug with referring to symbols on the the parsing stack using negative
+ indices.
+
+05/29/08: beazley
+ Completely revamped the testing system to use the unittest module for everything.
+ Added additional tests to cover new errors/warnings.
+
+Version 2.5
+-----------------------------
+05/28/08: beazley
+ Fixed a bug with writing lex-tables in optimized mode and start states.
+ Reported by Kevin Henry.
+
+Version 2.4
+-----------------------------
+05/04/08: beazley
+ A version number is now embedded in the table file signature so that
+ yacc can more gracefully accomodate changes to the output format
+ in the future.
+
+05/04/08: beazley
+ Removed undocumented .pushback() method on grammar productions. I'm
+ not sure this ever worked and can't recall ever using it. Might have
+ been an abandoned idea that never really got fleshed out. This
+ feature was never described or tested so removing it is hopefully
+ harmless.
+
+05/04/08: beazley
+ Added extra error checking to yacc() to detect precedence rules defined
+ for undefined terminal symbols. This allows yacc() to detect a potential
+ problem that can be really tricky to debug if no warning message or error
+ message is generated about it.
+
+05/04/08: beazley
+ lex() now has an outputdir that can specify the output directory for
+ tables when running in optimize mode. For example:
+
+ lexer = lex.lex(optimize=True, lextab="ltab", outputdir="foo/bar")
+
+ The behavior of specifying a table module and output directory are
+ more aligned with the behavior of yacc().
+
+05/04/08: beazley
+ [Issue 9]
+ Fixed filename bug in when specifying the modulename in lex() and yacc().
+ If you specified options such as the following:
+
+ parser = yacc.yacc(tabmodule="foo.bar.parsetab",outputdir="foo/bar")
+
+ yacc would create a file "foo.bar.parsetab.py" in the given directory.
+ Now, it simply generates a file "parsetab.py" in that directory.
+ Bug reported by cptbinho.
+
+05/04/08: beazley
+ Slight modification to lex() and yacc() to allow their table files
+ to be loaded from a previously loaded module. This might make
+ it easier to load the parsing tables from a complicated package
+ structure. For example:
+
+ import foo.bar.spam.parsetab as parsetab
+ parser = yacc.yacc(tabmodule=parsetab)
+
+ Note: lex and yacc will never regenerate the table file if used
+ in the form---you will get a warning message instead.
+ This idea suggested by Brian Clapper.
+
+
+04/28/08: beazley
+ Fixed a big with p_error() functions being picked up correctly
+ when running in yacc(optimize=1) mode. Patch contributed by
+ Bart Whiteley.
+
+02/28/08: beazley
+ Fixed a bug with 'nonassoc' precedence rules. Basically the
+ non-precedence was being ignored and not producing the correct
+ run-time behavior in the parser.
+
+02/16/08: beazley
+ Slight relaxation of what the input() method to a lexer will
+ accept as a string. Instead of testing the input to see
+ if the input is a string or unicode string, it checks to see
+ if the input object looks like it contains string data.
+ This change makes it possible to pass string-like objects
+ in as input. For example, the object returned by mmap.
+
+ import mmap, os
+ data = mmap.mmap(os.open(filename,os.O_RDONLY),
+ os.path.getsize(filename),
+ access=mmap.ACCESS_READ)
+ lexer.input(data)
+
+
+11/29/07: beazley
+ Modification of ply.lex to allow token functions to aliased.
+ This is subtle, but it makes it easier to create libraries and
+ to reuse token specifications. For example, suppose you defined
+ a function like this:
+
+ def number(t):
+ r'\d+'
+ t.value = int(t.value)
+ return t
+
+ This change would allow you to define a token rule as follows:
+
+ t_NUMBER = number
+
+ In this case, the token type will be set to 'NUMBER' and use
+ the associated number() function to process tokens.
+
+11/28/07: beazley
+ Slight modification to lex and yacc to grab symbols from both
+ the local and global dictionaries of the caller. This
+ modification allows lexers and parsers to be defined using
+ inner functions and closures.
+
+11/28/07: beazley
+ Performance optimization: The lexer.lexmatch and t.lexer
+ attributes are no longer set for lexer tokens that are not
+ defined by functions. The only normal use of these attributes
+ would be in lexer rules that need to perform some kind of
+ special processing. Thus, it doesn't make any sense to set
+ them on every token.
+
+ *** POTENTIAL INCOMPATIBILITY *** This might break code
+ that is mucking around with internal lexer state in some
+ sort of magical way.
+
+11/27/07: beazley
+ Added the ability to put the parser into error-handling mode
+ from within a normal production. To do this, simply raise
+ a yacc.SyntaxError exception like this:
+
+ def p_some_production(p):
+ 'some_production : prod1 prod2'
+ ...
+ raise yacc.SyntaxError # Signal an error
+
+ A number of things happen after this occurs:
+
+ - The last symbol shifted onto the symbol stack is discarded
+ and parser state backed up to what it was before the
+ the rule reduction.
+
+ - The current lookahead symbol is saved and replaced by
+ the 'error' symbol.
+
+ - The parser enters error recovery mode where it tries
+ to either reduce the 'error' rule or it starts
+ discarding items off of the stack until the parser
+ resets.
+
+ When an error is manually set, the parser does *not* call
+ the p_error() function (if any is defined).
+ *** NEW FEATURE *** Suggested on the mailing list
+
+11/27/07: beazley
+ Fixed structure bug in examples/ansic. Reported by Dion Blazakis.
+
+11/27/07: beazley
+ Fixed a bug in the lexer related to start conditions and ignored
+ token rules. If a rule was defined that changed state, but
+ returned no token, the lexer could be left in an inconsistent
+ state. Reported by
+
+11/27/07: beazley
+ Modified setup.py to support Python Eggs. Patch contributed by
+ Simon Cross.
+
+11/09/07: beazely
+ Fixed a bug in error handling in yacc. If a syntax error occurred and the
+ parser rolled the entire parse stack back, the parser would be left in in
+ inconsistent state that would cause it to trigger incorrect actions on
+ subsequent input. Reported by Ton Biegstraaten, Justin King, and others.
+
+11/09/07: beazley
+ Fixed a bug when passing empty input strings to yacc.parse(). This
+ would result in an error message about "No input given". Reported
+ by Andrew Dalke.
+
+Version 2.3
+-----------------------------
+02/20/07: beazley
+ Fixed a bug with character literals if the literal '.' appeared as the
+ last symbol of a grammar rule. Reported by Ales Smrcka.
+
+02/19/07: beazley
+ Warning messages are now redirected to stderr instead of being printed
+ to standard output.
+
+02/19/07: beazley
+ Added a warning message to lex.py if it detects a literal backslash
+ character inside the t_ignore declaration. This is to help
+ problems that might occur if someone accidentally defines t_ignore
+ as a Python raw string. For example:
+
+ t_ignore = r' \t'
+
+ The idea for this is from an email I received from David Cimimi who
+ reported bizarre behavior in lexing as a result of defining t_ignore
+ as a raw string by accident.
+
+02/18/07: beazley
+ Performance improvements. Made some changes to the internal
+ table organization and LR parser to improve parsing performance.
+
+02/18/07: beazley
+ Automatic tracking of line number and position information must now be
+ enabled by a special flag to parse(). For example:
+
+ yacc.parse(data,tracking=True)
+
+ In many applications, it's just not that important to have the
+ parser automatically track all line numbers. By making this an
+ optional feature, it allows the parser to run significantly faster
+ (more than a 20% speed increase in many cases). Note: positional
+ information is always available for raw tokens---this change only
+ applies to positional information associated with nonterminal
+ grammar symbols.
+ *** POTENTIAL INCOMPATIBILITY ***
+
+02/18/07: beazley
+ Yacc no longer supports extended slices of grammar productions.
+ However, it does support regular slices. For example:
+
+ def p_foo(p):
+ '''foo: a b c d e'''
+ p[0] = p[1:3]
+
+ This change is a performance improvement to the parser--it streamlines
+ normal access to the grammar values since slices are now handled in
+ a __getslice__() method as opposed to __getitem__().
+
+02/12/07: beazley
+ Fixed a bug in the handling of token names when combined with
+ start conditions. Bug reported by Todd O'Bryan.
+
+Version 2.2
+------------------------------
+11/01/06: beazley
+ Added lexpos() and lexspan() methods to grammar symbols. These
+ mirror the same functionality of lineno() and linespan(). For
+ example:
+
+ def p_expr(p):
+ 'expr : expr PLUS expr'
+ p.lexpos(1) # Lexing position of left-hand-expression
+ p.lexpos(1) # Lexing position of PLUS
+ start,end = p.lexspan(3) # Lexing range of right hand expression
+
+11/01/06: beazley
+ Minor change to error handling. The recommended way to skip characters
+ in the input is to use t.lexer.skip() as shown here:
+
+ def t_error(t):
+ print "Illegal character '%s'" % t.value[0]
+ t.lexer.skip(1)
+
+ The old approach of just using t.skip(1) will still work, but won't
+ be documented.
+
+10/31/06: beazley
+ Discarded tokens can now be specified as simple strings instead of
+ functions. To do this, simply include the text "ignore_" in the
+ token declaration. For example:
+
+ t_ignore_cppcomment = r'//.*'
+
+ Previously, this had to be done with a function. For example:
+
+ def t_ignore_cppcomment(t):
+ r'//.*'
+ pass
+
+ If start conditions/states are being used, state names should appear
+ before the "ignore_" text.
+
+10/19/06: beazley
+ The Lex module now provides support for flex-style start conditions
+ as described at http://www.gnu.org/software/flex/manual/html_chapter/flex_11.html.
+ Please refer to this document to understand this change note. Refer to
+ the PLY documentation for PLY-specific explanation of how this works.
+
+ To use start conditions, you first need to declare a set of states in
+ your lexer file:
+
+ states = (
+ ('foo','exclusive'),
+ ('bar','inclusive')
+ )
+
+ This serves the same role as the %s and %x specifiers in flex.
+
+ One a state has been declared, tokens for that state can be
+ declared by defining rules of the form t_state_TOK. For example:
+
+ t_PLUS = '\+' # Rule defined in INITIAL state
+ t_foo_NUM = '\d+' # Rule defined in foo state
+ t_bar_NUM = '\d+' # Rule defined in bar state
+
+ t_foo_bar_NUM = '\d+' # Rule defined in both foo and bar
+ t_ANY_NUM = '\d+' # Rule defined in all states
+
+ In addition to defining tokens for each state, the t_ignore and t_error
+ specifications can be customized for specific states. For example:
+
+ t_foo_ignore = " " # Ignored characters for foo state
+ def t_bar_error(t):
+ # Handle errors in bar state
+
+ With token rules, the following methods can be used to change states
+
+ def t_TOKNAME(t):
+ t.lexer.begin('foo') # Begin state 'foo'
+ t.lexer.push_state('foo') # Begin state 'foo', push old state
+ # onto a stack
+ t.lexer.pop_state() # Restore previous state
+ t.lexer.current_state() # Returns name of current state
+
+ These methods mirror the BEGIN(), yy_push_state(), yy_pop_state(), and
+ yy_top_state() functions in flex.
+
+ The use of start states can be used as one way to write sub-lexers.
+ For example, the lexer or parser might instruct the lexer to start
+ generating a different set of tokens depending on the context.
+
+ example/yply/ylex.py shows the use of start states to grab C/C++
+ code fragments out of traditional yacc specification files.
+
+ *** NEW FEATURE *** Suggested by Daniel Larraz with whom I also
+ discussed various aspects of the design.
+
+10/19/06: beazley
+ Minor change to the way in which yacc.py was reporting shift/reduce
+ conflicts. Although the underlying LALR(1) algorithm was correct,
+ PLY was under-reporting the number of conflicts compared to yacc/bison
+ when precedence rules were in effect. This change should make PLY
+ report the same number of conflicts as yacc.
+
+10/19/06: beazley
+ Modified yacc so that grammar rules could also include the '-'
+ character. For example:
+
+ def p_expr_list(p):
+ 'expression-list : expression-list expression'
+
+ Suggested by Oldrich Jedlicka.
+
+10/18/06: beazley
+ Attribute lexer.lexmatch added so that token rules can access the re
+ match object that was generated. For example:
+
+ def t_FOO(t):
+ r'some regex'
+ m = t.lexer.lexmatch
+ # Do something with m
+
+
+ This may be useful if you want to access named groups specified within
+ the regex for a specific token. Suggested by Oldrich Jedlicka.
+
+10/16/06: beazley
+ Changed the error message that results if an illegal character
+ is encountered and no default error function is defined in lex.
+ The exception is now more informative about the actual cause of
+ the error.
+
+Version 2.1
+------------------------------
+10/02/06: beazley
+ The last Lexer object built by lex() can be found in lex.lexer.
+ The last Parser object built by yacc() can be found in yacc.parser.
+
+10/02/06: beazley
+ New example added: examples/yply
+
+ This example uses PLY to convert Unix-yacc specification files to
+ PLY programs with the same grammar. This may be useful if you
+ want to convert a grammar from bison/yacc to use with PLY.
+
+10/02/06: beazley
+ Added support for a start symbol to be specified in the yacc
+ input file itself. Just do this:
+
+ start = 'name'
+
+ where 'name' matches some grammar rule. For example:
+
+ def p_name(p):
+ 'name : A B C'
+ ...
+
+ This mirrors the functionality of the yacc %start specifier.
+
+09/30/06: beazley
+ Some new examples added.:
+
+ examples/GardenSnake : A simple indentation based language similar
+ to Python. Shows how you might handle
+ whitespace. Contributed by Andrew Dalke.
+
+ examples/BASIC : An implementation of 1964 Dartmouth BASIC.
+ Contributed by Dave against his better
+ judgement.
+
+09/28/06: beazley
+ Minor patch to allow named groups to be used in lex regular
+ expression rules. For example:
+
+ t_QSTRING = r'''(?P<quote>['"]).*?(?P=quote)'''
+
+ Patch submitted by Adam Ring.
+
+09/28/06: beazley
+ LALR(1) is now the default parsing method. To use SLR, use
+ yacc.yacc(method="SLR"). Note: there is no performance impact
+ on parsing when using LALR(1) instead of SLR. However, constructing
+ the parsing tables will take a little longer.
+
+09/26/06: beazley
+ Change to line number tracking. To modify line numbers, modify
+ the line number of the lexer itself. For example:
+
+ def t_NEWLINE(t):
+ r'\n'
+ t.lexer.lineno += 1
+
+ This modification is both cleanup and a performance optimization.
+ In past versions, lex was monitoring every token for changes in
+ the line number. This extra processing is unnecessary for a vast
+ majority of tokens. Thus, this new approach cleans it up a bit.
+
+ *** POTENTIAL INCOMPATIBILITY ***
+ You will need to change code in your lexer that updates the line
+ number. For example, "t.lineno += 1" becomes "t.lexer.lineno += 1"
+
+09/26/06: beazley
+ Added the lexing position to tokens as an attribute lexpos. This
+ is the raw index into the input text at which a token appears.
+ This information can be used to compute column numbers and other
+ details (e.g., scan backwards from lexpos to the first newline
+ to get a column position).
+
+09/25/06: beazley
+ Changed the name of the __copy__() method on the Lexer class
+ to clone(). This is used to clone a Lexer object (e.g., if
+ you're running different lexers at the same time).
+
+09/21/06: beazley
+ Limitations related to the use of the re module have been eliminated.
+ Several users reported problems with regular expressions exceeding
+ more than 100 named groups. To solve this, lex.py is now capable
+ of automatically splitting its master regular regular expression into
+ smaller expressions as needed. This should, in theory, make it
+ possible to specify an arbitrarily large number of tokens.
+
+09/21/06: beazley
+ Improved error checking in lex.py. Rules that match the empty string
+ are now rejected (otherwise they cause the lexer to enter an infinite
+ loop). An extra check for rules containing '#' has also been added.
+ Since lex compiles regular expressions in verbose mode, '#' is interpreted
+ as a regex comment, it is critical to use '\#' instead.
+
+09/18/06: beazley
+ Added a @TOKEN decorator function to lex.py that can be used to
+ define token rules where the documentation string might be computed
+ in some way.
+
+ digit = r'([0-9])'
+ nondigit = r'([_A-Za-z])'
+ identifier = r'(' + nondigit + r'(' + digit + r'|' + nondigit + r')*)'
+
+ from ply.lex import TOKEN
+
+ @TOKEN(identifier)
+ def t_ID(t):
+ # Do whatever
+
+ The @TOKEN decorator merely sets the documentation string of the
+ associated token function as needed for lex to work.
+
+ Note: An alternative solution is the following:
+
+ def t_ID(t):
+ # Do whatever
+
+ t_ID.__doc__ = identifier
+
+ Note: Decorators require the use of Python 2.4 or later. If compatibility
+ with old versions is needed, use the latter solution.
+
+ The need for this feature was suggested by Cem Karan.
+
+09/14/06: beazley
+ Support for single-character literal tokens has been added to yacc.
+ These literals must be enclosed in quotes. For example:
+
+ def p_expr(p):
+ "expr : expr '+' expr"
+ ...
+
+ def p_expr(p):
+ 'expr : expr "-" expr'
+ ...
+
+ In addition to this, it is necessary to tell the lexer module about
+ literal characters. This is done by defining the variable 'literals'
+ as a list of characters. This should be defined in the module that
+ invokes the lex.lex() function. For example:
+
+ literals = ['+','-','*','/','(',')','=']
+
+ or simply
+
+ literals = '+=*/()='
+
+ It is important to note that literals can only be a single character.
+ When the lexer fails to match a token using its normal regular expression
+ rules, it will check the current character against the literal list.
+ If found, it will be returned with a token type set to match the literal
+ character. Otherwise, an illegal character will be signalled.
+
+
+09/14/06: beazley
+ Modified PLY to install itself as a proper Python package called 'ply'.
+ This will make it a little more friendly to other modules. This
+ changes the usage of PLY only slightly. Just do this to import the
+ modules
+
+ import ply.lex as lex
+ import ply.yacc as yacc
+
+ Alternatively, you can do this:
+
+ from ply import *
+
+ Which imports both the lex and yacc modules.
+ Change suggested by Lee June.
+
+09/13/06: beazley
+ Changed the handling of negative indices when used in production rules.
+ A negative production index now accesses already parsed symbols on the
+ parsing stack. For example,
+
+ def p_foo(p):
+ "foo: A B C D"
+ print p[1] # Value of 'A' symbol
+ print p[2] # Value of 'B' symbol
+ print p[-1] # Value of whatever symbol appears before A
+ # on the parsing stack.
+
+ p[0] = some_val # Sets the value of the 'foo' grammer symbol
+
+ This behavior makes it easier to work with embedded actions within the
+ parsing rules. For example, in C-yacc, it is possible to write code like
+ this:
+
+ bar: A { printf("seen an A = %d\n", $1); } B { do_stuff; }
+
+ In this example, the printf() code executes immediately after A has been
+ parsed. Within the embedded action code, $1 refers to the A symbol on
+ the stack.
+
+ To perform this equivalent action in PLY, you need to write a pair
+ of rules like this:
+
+ def p_bar(p):
+ "bar : A seen_A B"
+ do_stuff
+
+ def p_seen_A(p):
+ "seen_A :"
+ print "seen an A =", p[-1]
+
+ The second rule "seen_A" is merely a empty production which should be
+ reduced as soon as A is parsed in the "bar" rule above. The use
+ of the negative index p[-1] is used to access whatever symbol appeared
+ before the seen_A symbol.
+
+ This feature also makes it possible to support inherited attributes.
+ For example:
+
+ def p_decl(p):
+ "decl : scope name"
+
+ def p_scope(p):
+ """scope : GLOBAL
+ | LOCAL"""
+ p[0] = p[1]
+
+ def p_name(p):
+ "name : ID"
+ if p[-1] == "GLOBAL":
+ # ...
+ else if p[-1] == "LOCAL":
+ #...
+
+ In this case, the name rule is inheriting an attribute from the
+ scope declaration that precedes it.
+
+ *** POTENTIAL INCOMPATIBILITY ***
+ If you are currently using negative indices within existing grammar rules,
+ your code will break. This should be extremely rare if non-existent in
+ most cases. The argument to various grammar rules is not usually not
+ processed in the same way as a list of items.
+
+Version 2.0
+------------------------------
+09/07/06: beazley
+ Major cleanup and refactoring of the LR table generation code. Both SLR
+ and LALR(1) table generation is now performed by the same code base with
+ only minor extensions for extra LALR(1) processing.
+
+09/07/06: beazley
+ Completely reimplemented the entire LALR(1) parsing engine to use the
+ DeRemer and Pennello algorithm for calculating lookahead sets. This
+ significantly improves the performance of generating LALR(1) tables
+ and has the added feature of actually working correctly! If you
+ experienced weird behavior with LALR(1) in prior releases, this should
+ hopefully resolve all of those problems. Many thanks to
+ Andrew Waters and Markus Schoepflin for submitting bug reports
+ and helping me test out the revised LALR(1) support.
+
+Version 1.8
+------------------------------
+08/02/06: beazley
+ Fixed a problem related to the handling of default actions in LALR(1)
+ parsing. If you experienced subtle and/or bizarre behavior when trying
+ to use the LALR(1) engine, this may correct those problems. Patch
+ contributed by Russ Cox. Note: This patch has been superceded by
+ revisions for LALR(1) parsing in Ply-2.0.
+
+08/02/06: beazley
+ Added support for slicing of productions in yacc.
+ Patch contributed by Patrick Mezard.
+
+Version 1.7
+------------------------------
+03/02/06: beazley
+ Fixed infinite recursion problem ReduceToTerminals() function that
+ would sometimes come up in LALR(1) table generation. Reported by
+ Markus Schoepflin.
+
+03/01/06: beazley
+ Added "reflags" argument to lex(). For example:
+
+ lex.lex(reflags=re.UNICODE)
+
+ This can be used to specify optional flags to the re.compile() function
+ used inside the lexer. This may be necessary for special situations such
+ as processing Unicode (e.g., if you want escapes like \w and \b to consult
+ the Unicode character property database). The need for this suggested by
+ Andreas Jung.
+
+03/01/06: beazley
+ Fixed a bug with an uninitialized variable on repeated instantiations of parser
+ objects when the write_tables=0 argument was used. Reported by Michael Brown.
+
+03/01/06: beazley
+ Modified lex.py to accept Unicode strings both as the regular expressions for
+ tokens and as input. Hopefully this is the only change needed for Unicode support.
+ Patch contributed by Johan Dahl.
+
+03/01/06: beazley
+ Modified the class-based interface to work with new-style or old-style classes.
+ Patch contributed by Michael Brown (although I tweaked it slightly so it would work
+ with older versions of Python).
+
+Version 1.6
+------------------------------
+05/27/05: beazley
+ Incorporated patch contributed by Christopher Stawarz to fix an extremely
+ devious bug in LALR(1) parser generation. This patch should fix problems
+ numerous people reported with LALR parsing.
+
+05/27/05: beazley
+ Fixed problem with lex.py copy constructor. Reported by Dave Aitel, Aaron Lav,
+ and Thad Austin.
+
+05/27/05: beazley
+ Added outputdir option to yacc() to control output directory. Contributed
+ by Christopher Stawarz.
+
+05/27/05: beazley
+ Added rununit.py test script to run tests using the Python unittest module.
+ Contributed by Miki Tebeka.
+
+Version 1.5
+------------------------------
+05/26/04: beazley
+ Major enhancement. LALR(1) parsing support is now working.
+ This feature was implemented by Elias Ioup (ezioup@alumni.uchicago.edu)
+ and optimized by David Beazley. To use LALR(1) parsing do
+ the following:
+
+ yacc.yacc(method="LALR")
+
+ Computing LALR(1) parsing tables takes about twice as long as
+ the default SLR method. However, LALR(1) allows you to handle
+ more complex grammars. For example, the ANSI C grammar
+ (in example/ansic) has 13 shift-reduce conflicts with SLR, but
+ only has 1 shift-reduce conflict with LALR(1).
+
+05/20/04: beazley
+ Added a __len__ method to parser production lists. Can
+ be used in parser rules like this:
+
+ def p_somerule(p):
+ """a : B C D
+ | E F"
+ if (len(p) == 3):
+ # Must have been first rule
+ elif (len(p) == 2):
+ # Must be second rule
+
+ Suggested by Joshua Gerth and others.
+
+Version 1.4
+------------------------------
+04/23/04: beazley
+ Incorporated a variety of patches contributed by Eric Raymond.
+ These include:
+
+ 0. Cleans up some comments so they don't wrap on an 80-column display.
+ 1. Directs compiler errors to stderr where they belong.
+ 2. Implements and documents automatic line counting when \n is ignored.
+ 3. Changes the way progress messages are dumped when debugging is on.
+ The new format is both less verbose and conveys more information than
+ the old, including shift and reduce actions.
+
+04/23/04: beazley
+ Added a Python setup.py file to simply installation. Contributed
+ by Adam Kerrison.
+
+04/23/04: beazley
+ Added patches contributed by Adam Kerrison.
+
+ - Some output is now only shown when debugging is enabled. This
+ means that PLY will be completely silent when not in debugging mode.
+
+ - An optional parameter "write_tables" can be passed to yacc() to
+ control whether or not parsing tables are written. By default,
+ it is true, but it can be turned off if you don't want the yacc
+ table file. Note: disabling this will cause yacc() to regenerate
+ the parsing table each time.
+
+04/23/04: beazley
+ Added patches contributed by David McNab. This patch addes two
+ features:
+
+ - The parser can be supplied as a class instead of a module.
+ For an example of this, see the example/classcalc directory.
+
+ - Debugging output can be directed to a filename of the user's
+ choice. Use
+
+ yacc(debugfile="somefile.out")
+
+
+Version 1.3
+------------------------------
+12/10/02: jmdyck
+ Various minor adjustments to the code that Dave checked in today.
+ Updated test/yacc_{inf,unused}.exp to reflect today's changes.
+
+12/10/02: beazley
+ Incorporated a variety of minor bug fixes to empty production
+ handling and infinite recursion checking. Contributed by
+ Michael Dyck.
+
+12/10/02: beazley
+ Removed bogus recover() method call in yacc.restart()
+
+Version 1.2
+------------------------------
+11/27/02: beazley
+ Lexer and parser objects are now available as an attribute
+ of tokens and slices respectively. For example:
+
+ def t_NUMBER(t):
+ r'\d+'
+ print t.lexer
+
+ def p_expr_plus(t):
+ 'expr: expr PLUS expr'
+ print t.lexer
+ print t.parser
+
+ This can be used for state management (if needed).
+
+10/31/02: beazley
+ Modified yacc.py to work with Python optimize mode. To make
+ this work, you need to use
+
+ yacc.yacc(optimize=1)
+
+ Furthermore, you need to first run Python in normal mode
+ to generate the necessary parsetab.py files. After that,
+ you can use python -O or python -OO.
+
+ Note: optimized mode turns off a lot of error checking.
+ Only use when you are sure that your grammar is working.
+ Make sure parsetab.py is up to date!
+
+10/30/02: beazley
+ Added cloning of Lexer objects. For example:
+
+ import copy
+ l = lex.lex()
+ lc = copy.copy(l)
+
+ l.input("Some text")
+ lc.input("Some other text")
+ ...
+
+ This might be useful if the same "lexer" is meant to
+ be used in different contexts---or if multiple lexers
+ are running concurrently.
+
+10/30/02: beazley
+ Fixed subtle bug with first set computation and empty productions.
+ Patch submitted by Michael Dyck.
+
+10/30/02: beazley
+ Fixed error messages to use "filename:line: message" instead
+ of "filename:line. message". This makes error reporting more
+ friendly to emacs. Patch submitted by Franois Pinard.
+
+10/30/02: beazley
+ Improvements to parser.out file. Terminals and nonterminals
+ are sorted instead of being printed in random order.
+ Patch submitted by Franois Pinard.
+
+10/30/02: beazley
+ Improvements to parser.out file output. Rules are now printed
+ in a way that's easier to understand. Contributed by Russ Cox.
+
+10/30/02: beazley
+ Added 'nonassoc' associativity support. This can be used
+ to disable the chaining of operators like a < b < c.
+ To use, simply specify 'nonassoc' in the precedence table
+
+ precedence = (
+ ('nonassoc', 'LESSTHAN', 'GREATERTHAN'), # Nonassociative operators
+ ('left', 'PLUS', 'MINUS'),
+ ('left', 'TIMES', 'DIVIDE'),
+ ('right', 'UMINUS'), # Unary minus operator
+ )
+
+ Patch contributed by Russ Cox.
+
+10/30/02: beazley
+ Modified the lexer to provide optional support for Python -O and -OO
+ modes. To make this work, Python *first* needs to be run in
+ unoptimized mode. This reads the lexing information and creates a
+ file "lextab.py". Then, run lex like this:
+
+ # module foo.py
+ ...
+ ...
+ lex.lex(optimize=1)
+
+ Once the lextab file has been created, subsequent calls to
+ lex.lex() will read data from the lextab file instead of using
+ introspection. In optimized mode (-O, -OO) everything should
+ work normally despite the loss of doc strings.
+
+ To change the name of the file 'lextab.py' use the following:
+
+ lex.lex(lextab="footab")
+
+ (this creates a file footab.py)
+
+
+Version 1.1 October 25, 2001
+------------------------------
+
+10/25/01: beazley
+ Modified the table generator to produce much more compact data.
+ This should greatly reduce the size of the parsetab.py[c] file.
+ Caveat: the tables still need to be constructed so a little more
+ work is done in parsetab on import.
+
+10/25/01: beazley
+ There may be a possible bug in the cycle detector that reports errors
+ about infinite recursion. I'm having a little trouble tracking it
+ down, but if you get this problem, you can disable the cycle
+ detector as follows:
+
+ yacc.yacc(check_recursion = 0)
+
+10/25/01: beazley
+ Fixed a bug in lex.py that sometimes caused illegal characters to be
+ reported incorrectly. Reported by Sverre Jrgensen.
+
+7/8/01 : beazley
+ Added a reference to the underlying lexer object when tokens are handled by
+ functions. The lexer is available as the 'lexer' attribute. This
+ was added to provide better lexing support for languages such as Fortran
+ where certain types of tokens can't be conveniently expressed as regular
+ expressions (and where the tokenizing function may want to perform a
+ little backtracking). Suggested by Pearu Peterson.
+
+6/20/01 : beazley
+ Modified yacc() function so that an optional starting symbol can be specified.
+ For example:
+
+ yacc.yacc(start="statement")
+
+ Normally yacc always treats the first production rule as the starting symbol.
+ However, if you are debugging your grammar it may be useful to specify
+ an alternative starting symbol. Idea suggested by Rich Salz.
+
+Version 1.0 June 18, 2001
+--------------------------
+Initial public offering
+
diff --git a/third_party/python/ply/MANIFEST.in b/third_party/python/ply/MANIFEST.in
new file mode 100644
index 0000000000..0d37431b0b
--- /dev/null
+++ b/third_party/python/ply/MANIFEST.in
@@ -0,0 +1,8 @@
+recursive-include example *
+recursive-include doc *
+recursive-include test *
+include ANNOUNCE
+include README.md
+include CHANGES
+include TODO
+global-exclude *.pyc
diff --git a/third_party/python/ply/PKG-INFO b/third_party/python/ply/PKG-INFO
new file mode 100644
index 0000000000..6eedf42595
--- /dev/null
+++ b/third_party/python/ply/PKG-INFO
@@ -0,0 +1,22 @@
+Metadata-Version: 1.1
+Name: ply
+Version: 3.10
+Summary: Python Lex & Yacc
+Home-page: http://www.dabeaz.com/ply/
+Author: David Beazley
+Author-email: dave@dabeaz.com
+License: BSD
+Description:
+ PLY is yet another implementation of lex and yacc for Python. Some notable
+ features include the fact that its implemented entirely in Python and it
+ uses LALR(1) parsing which is efficient and well suited for larger grammars.
+
+ PLY provides most of the standard lex/yacc features including support for empty
+ productions, precedence rules, error recovery, and support for ambiguous grammars.
+
+ PLY is extremely easy to use and provides very extensive error checking.
+ It is compatible with both Python 2 and Python 3.
+
+Platform: UNKNOWN
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 2
diff --git a/third_party/python/ply/README.md b/third_party/python/ply/README.md
new file mode 100644
index 0000000000..e428f1b14a
--- /dev/null
+++ b/third_party/python/ply/README.md
@@ -0,0 +1,273 @@
+PLY (Python Lex-Yacc) Version 3.10
+
+Copyright (C) 2001-2017
+David M. Beazley (Dabeaz LLC)
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+* Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+* Neither the name of the David Beazley or Dabeaz LLC may be used to
+ endorse or promote products derived from this software without
+ specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+Introduction
+============
+
+PLY is a 100% Python implementation of the common parsing tools lex
+and yacc. Here are a few highlights:
+
+ - PLY is very closely modeled after traditional lex/yacc.
+ If you know how to use these tools in C, you will find PLY
+ to be similar.
+
+ - PLY provides *very* extensive error reporting and diagnostic
+ information to assist in parser construction. The original
+ implementation was developed for instructional purposes. As
+ a result, the system tries to identify the most common types
+ of errors made by novice users.
+
+ - PLY provides full support for empty productions, error recovery,
+ precedence specifiers, and moderately ambiguous grammars.
+
+ - Parsing is based on LR-parsing which is fast, memory efficient,
+ better suited to large grammars, and which has a number of nice
+ properties when dealing with syntax errors and other parsing problems.
+ Currently, PLY builds its parsing tables using the LALR(1)
+ algorithm used in yacc.
+
+ - PLY uses Python introspection features to build lexers and parsers.
+ This greatly simplifies the task of parser construction since it reduces
+ the number of files and eliminates the need to run a separate lex/yacc
+ tool before running your program.
+
+ - PLY can be used to build parsers for "real" programming languages.
+ Although it is not ultra-fast due to its Python implementation,
+ PLY can be used to parse grammars consisting of several hundred
+ rules (as might be found for a language like C). The lexer and LR
+ parser are also reasonably efficient when parsing typically
+ sized programs. People have used PLY to build parsers for
+ C, C++, ADA, and other real programming languages.
+
+How to Use
+==========
+
+PLY consists of two files : lex.py and yacc.py. These are contained
+within the 'ply' directory which may also be used as a Python package.
+To use PLY, simply copy the 'ply' directory to your project and import
+lex and yacc from the associated 'ply' package. For example:
+
+ import ply.lex as lex
+ import ply.yacc as yacc
+
+Alternatively, you can copy just the files lex.py and yacc.py
+individually and use them as modules. For example:
+
+ import lex
+ import yacc
+
+The file setup.py can be used to install ply using distutils.
+
+The file doc/ply.html contains complete documentation on how to use
+the system.
+
+The example directory contains several different examples including a
+PLY specification for ANSI C as given in K&R 2nd Ed.
+
+A simple example is found at the end of this document
+
+Requirements
+============
+PLY requires the use of Python 2.6 or greater. However, you should
+use the latest Python release if possible. It should work on just
+about any platform. PLY has been tested with both CPython and Jython.
+It also seems to work with IronPython.
+
+Resources
+=========
+More information about PLY can be obtained on the PLY webpage at:
+
+ http://www.dabeaz.com/ply
+
+For a detailed overview of parsing theory, consult the excellent
+book "Compilers : Principles, Techniques, and Tools" by Aho, Sethi, and
+Ullman. The topics found in "Lex & Yacc" by Levine, Mason, and Brown
+may also be useful.
+
+The GitHub page for PLY can be found at:
+
+ https://github.com/dabeaz/ply
+
+An old and relatively inactive discussion group for PLY is found at:
+
+ http://groups.google.com/group/ply-hack
+
+Acknowledgments
+===============
+A special thanks is in order for all of the students in CS326 who
+suffered through about 25 different versions of these tools :-).
+
+The CHANGES file acknowledges those who have contributed patches.
+
+Elias Ioup did the first implementation of LALR(1) parsing in PLY-1.x.
+Andrew Waters and Markus Schoepflin were instrumental in reporting bugs
+and testing a revised LALR(1) implementation for PLY-2.0.
+
+Special Note for PLY-3.0
+========================
+PLY-3.0 the first PLY release to support Python 3. However, backwards
+compatibility with Python 2.6 is still preserved. PLY provides dual
+Python 2/3 compatibility by restricting its implementation to a common
+subset of basic language features. You should not convert PLY using
+2to3--it is not necessary and may in fact break the implementation.
+
+Example
+=======
+
+Here is a simple example showing a PLY implementation of a calculator
+with variables.
+
+ # -----------------------------------------------------------------------------
+ # calc.py
+ #
+ # A simple calculator with variables.
+ # -----------------------------------------------------------------------------
+
+ tokens = (
+ 'NAME','NUMBER',
+ 'PLUS','MINUS','TIMES','DIVIDE','EQUALS',
+ 'LPAREN','RPAREN',
+ )
+
+ # Tokens
+
+ t_PLUS = r'\+'
+ t_MINUS = r'-'
+ t_TIMES = r'\*'
+ t_DIVIDE = r'/'
+ t_EQUALS = r'='
+ t_LPAREN = r'\('
+ t_RPAREN = r'\)'
+ t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
+
+ def t_NUMBER(t):
+ r'\d+'
+ t.value = int(t.value)
+ return t
+
+ # Ignored characters
+ t_ignore = " \t"
+
+ def t_newline(t):
+ r'\n+'
+ t.lexer.lineno += t.value.count("\n")
+
+ def t_error(t):
+ print("Illegal character '%s'" % t.value[0])
+ t.lexer.skip(1)
+
+ # Build the lexer
+ import ply.lex as lex
+ lex.lex()
+
+ # Precedence rules for the arithmetic operators
+ precedence = (
+ ('left','PLUS','MINUS'),
+ ('left','TIMES','DIVIDE'),
+ ('right','UMINUS'),
+ )
+
+ # dictionary of names (for storing variables)
+ names = { }
+
+ def p_statement_assign(p):
+ 'statement : NAME EQUALS expression'
+ names[p[1]] = p[3]
+
+ def p_statement_expr(p):
+ 'statement : expression'
+ print(p[1])
+
+ def p_expression_binop(p):
+ '''expression : expression PLUS expression
+ | expression MINUS expression
+ | expression TIMES expression
+ | expression DIVIDE expression'''
+ if p[2] == '+' : p[0] = p[1] + p[3]
+ elif p[2] == '-': p[0] = p[1] - p[3]
+ elif p[2] == '*': p[0] = p[1] * p[3]
+ elif p[2] == '/': p[0] = p[1] / p[3]
+
+ def p_expression_uminus(p):
+ 'expression : MINUS expression %prec UMINUS'
+ p[0] = -p[2]
+
+ def p_expression_group(p):
+ 'expression : LPAREN expression RPAREN'
+ p[0] = p[2]
+
+ def p_expression_number(p):
+ 'expression : NUMBER'
+ p[0] = p[1]
+
+ def p_expression_name(p):
+ 'expression : NAME'
+ try:
+ p[0] = names[p[1]]
+ except LookupError:
+ print("Undefined name '%s'" % p[1])
+ p[0] = 0
+
+ def p_error(p):
+ print("Syntax error at '%s'" % p.value)
+
+ import ply.yacc as yacc
+ yacc.yacc()
+
+ while True:
+ try:
+ s = raw_input('calc > ') # use input() on Python 3
+ except EOFError:
+ break
+ yacc.parse(s)
+
+
+Bug Reports and Patches
+=======================
+My goal with PLY is to simply have a decent lex/yacc implementation
+for Python. As a general rule, I don't spend huge amounts of time
+working on it unless I receive very specific bug reports and/or
+patches to fix problems. I also try to incorporate submitted feature
+requests and enhancements into each new version. Please visit the PLY
+github page at https://github.com/dabeaz/ply to submit issues and pull
+requests. To contact me about bugs and/or new features, please send
+email to dave@dabeaz.com.
+
+-- Dave
+
+
+
+
+
+
+
+
+
diff --git a/third_party/python/ply/TODO b/third_party/python/ply/TODO
new file mode 100644
index 0000000000..f4800aacf4
--- /dev/null
+++ b/third_party/python/ply/TODO
@@ -0,0 +1,16 @@
+The PLY to-do list:
+
+1. Finish writing the C Preprocessor module. Started in the
+ file ply/cpp.py
+
+2. Create and document libraries of useful tokens.
+
+3. Expand the examples/yply tool that parses bison/yacc
+ files.
+
+4. Think of various diabolical things to do with the
+ new yacc internals. For example, it is now possible
+ to specify grammrs using completely different schemes
+ than the reflection approach used by PLY.
+
+
diff --git a/third_party/python/ply/example/BASIC/README b/third_party/python/ply/example/BASIC/README
new file mode 100644
index 0000000000..be24a3005e
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/README
@@ -0,0 +1,79 @@
+Inspired by a September 14, 2006 Salon article "Why Johnny Can't Code" by
+David Brin (http://www.salon.com/tech/feature/2006/09/14/basic/index.html),
+I thought that a fully working BASIC interpreter might be an interesting,
+if not questionable, PLY example. Uh, okay, so maybe it's just a bad idea,
+but in any case, here it is.
+
+In this example, you'll find a rough implementation of 1964 Dartmouth BASIC
+as described in the manual at:
+
+ http://www.bitsavers.org/pdf/dartmouth/BASIC_Oct64.pdf
+
+See also:
+
+ http://en.wikipedia.org/wiki/Dartmouth_BASIC
+
+This dialect is downright primitive---there are no string variables
+and no facilities for interactive input. Moreover, subroutines and functions
+are brain-dead even more than they usually are for BASIC. Of course,
+the GOTO statement is provided.
+
+Nevertheless, there are a few interesting aspects of this example:
+
+ - It illustrates a fully working interpreter including lexing, parsing,
+ and interpretation of instructions.
+
+ - The parser shows how to catch and report various kinds of parsing
+ errors in a more graceful way.
+
+ - The example both parses files (supplied on command line) and
+ interactive input entered line by line.
+
+ - It shows how you might represent parsed information. In this case,
+ each BASIC statement is encoded into a Python tuple containing the
+ statement type and parameters. These tuples are then stored in
+ a dictionary indexed by program line numbers.
+
+ - Even though it's just BASIC, the parser contains more than 80
+ rules and 150 parsing states. Thus, it's a little more meaty than
+ the calculator example.
+
+To use the example, run it as follows:
+
+ % python basic.py hello.bas
+ HELLO WORLD
+ %
+
+or use it interactively:
+
+ % python basic.py
+ [BASIC] 10 PRINT "HELLO WORLD"
+ [BASIC] 20 END
+ [BASIC] RUN
+ HELLO WORLD
+ [BASIC]
+
+The following files are defined:
+
+ basic.py - High level script that controls everything
+ basiclex.py - BASIC tokenizer
+ basparse.py - BASIC parser
+ basinterp.py - BASIC interpreter that runs parsed programs.
+
+In addition, a number of sample BASIC programs (.bas suffix) are
+provided. These were taken out of the Dartmouth manual.
+
+Disclaimer: I haven't spent a ton of time testing this and it's likely that
+I've skimped here and there on a few finer details (e.g., strictly enforcing
+variable naming rules). However, the interpreter seems to be able to run
+the examples in the BASIC manual.
+
+Have fun!
+
+-Dave
+
+
+
+
+
+
diff --git a/third_party/python/ply/example/BASIC/basic.py b/third_party/python/ply/example/BASIC/basic.py
new file mode 100644
index 0000000000..70ac9e7c74
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/basic.py
@@ -0,0 +1,65 @@
+# An implementation of Dartmouth BASIC (1964)
+#
+
+import sys
+sys.path.insert(0, "../..")
+
+if sys.version_info[0] >= 3:
+ raw_input = input
+
+import basiclex
+import basparse
+import basinterp
+
+# If a filename has been specified, we try to run it.
+# If a runtime error occurs, we bail out and enter
+# interactive mode below
+if len(sys.argv) == 2:
+ data = open(sys.argv[1]).read()
+ prog = basparse.parse(data)
+ if not prog:
+ raise SystemExit
+ b = basinterp.BasicInterpreter(prog)
+ try:
+ b.run()
+ raise SystemExit
+ except RuntimeError:
+ pass
+
+else:
+ b = basinterp.BasicInterpreter({})
+
+# Interactive mode. This incrementally adds/deletes statements
+# from the program stored in the BasicInterpreter object. In
+# addition, special commands 'NEW','LIST',and 'RUN' are added.
+# Specifying a line number with no code deletes that line from
+# the program.
+
+while 1:
+ try:
+ line = raw_input("[BASIC] ")
+ except EOFError:
+ raise SystemExit
+ if not line:
+ continue
+ line += "\n"
+ prog = basparse.parse(line)
+ if not prog:
+ continue
+
+ keys = list(prog)
+ if keys[0] > 0:
+ b.add_statements(prog)
+ else:
+ stat = prog[keys[0]]
+ if stat[0] == 'RUN':
+ try:
+ b.run()
+ except RuntimeError:
+ pass
+ elif stat[0] == 'LIST':
+ b.list()
+ elif stat[0] == 'BLANK':
+ b.del_line(stat[1])
+ elif stat[0] == 'NEW':
+ b.new()
diff --git a/third_party/python/ply/example/BASIC/basiclex.py b/third_party/python/ply/example/BASIC/basiclex.py
new file mode 100644
index 0000000000..4151f4c34f
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/basiclex.py
@@ -0,0 +1,61 @@
+# An implementation of Dartmouth BASIC (1964)
+
+from ply import *
+
+keywords = (
+ 'LET', 'READ', 'DATA', 'PRINT', 'GOTO', 'IF', 'THEN', 'FOR', 'NEXT', 'TO', 'STEP',
+ 'END', 'STOP', 'DEF', 'GOSUB', 'DIM', 'REM', 'RETURN', 'RUN', 'LIST', 'NEW',
+)
+
+tokens = keywords + (
+ 'EQUALS', 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'POWER',
+ 'LPAREN', 'RPAREN', 'LT', 'LE', 'GT', 'GE', 'NE',
+ 'COMMA', 'SEMI', 'INTEGER', 'FLOAT', 'STRING',
+ 'ID', 'NEWLINE'
+)
+
+t_ignore = ' \t'
+
+
+def t_REM(t):
+ r'REM .*'
+ return t
+
+
+def t_ID(t):
+ r'[A-Z][A-Z0-9]*'
+ if t.value in keywords:
+ t.type = t.value
+ return t
+
+t_EQUALS = r'='
+t_PLUS = r'\+'
+t_MINUS = r'-'
+t_TIMES = r'\*'
+t_POWER = r'\^'
+t_DIVIDE = r'/'
+t_LPAREN = r'\('
+t_RPAREN = r'\)'
+t_LT = r'<'
+t_LE = r'<='
+t_GT = r'>'
+t_GE = r'>='
+t_NE = r'<>'
+t_COMMA = r'\,'
+t_SEMI = r';'
+t_INTEGER = r'\d+'
+t_FLOAT = r'((\d*\.\d+)(E[\+-]?\d+)?|([1-9]\d*E[\+-]?\d+))'
+t_STRING = r'\".*?\"'
+
+
+def t_NEWLINE(t):
+ r'\n'
+ t.lexer.lineno += 1
+ return t
+
+
+def t_error(t):
+ print("Illegal character %s" % t.value[0])
+ t.lexer.skip(1)
+
+lex.lex(debug=0)
diff --git a/third_party/python/ply/example/BASIC/basiclog.py b/third_party/python/ply/example/BASIC/basiclog.py
new file mode 100644
index 0000000000..9dcc7feda6
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/basiclog.py
@@ -0,0 +1,73 @@
+# An implementation of Dartmouth BASIC (1964)
+#
+
+import sys
+sys.path.insert(0, "../..")
+
+if sys.version_info[0] >= 3:
+ raw_input = input
+
+import logging
+logging.basicConfig(
+ level=logging.INFO,
+ filename="parselog.txt",
+ filemode="w"
+)
+log = logging.getLogger()
+
+import basiclex
+import basparse
+import basinterp
+
+# If a filename has been specified, we try to run it.
+# If a runtime error occurs, we bail out and enter
+# interactive mode below
+if len(sys.argv) == 2:
+ data = open(sys.argv[1]).read()
+ prog = basparse.parse(data, debug=log)
+ if not prog:
+ raise SystemExit
+ b = basinterp.BasicInterpreter(prog)
+ try:
+ b.run()
+ raise SystemExit
+ except RuntimeError:
+ pass
+
+else:
+ b = basinterp.BasicInterpreter({})
+
+# Interactive mode. This incrementally adds/deletes statements
+# from the program stored in the BasicInterpreter object. In
+# addition, special commands 'NEW','LIST',and 'RUN' are added.
+# Specifying a line number with no code deletes that line from
+# the program.
+
+while 1:
+ try:
+ line = raw_input("[BASIC] ")
+ except EOFError:
+ raise SystemExit
+ if not line:
+ continue
+ line += "\n"
+ prog = basparse.parse(line, debug=log)
+ if not prog:
+ continue
+
+ keys = list(prog)
+ if keys[0] > 0:
+ b.add_statements(prog)
+ else:
+ stat = prog[keys[0]]
+ if stat[0] == 'RUN':
+ try:
+ b.run()
+ except RuntimeError:
+ pass
+ elif stat[0] == 'LIST':
+ b.list()
+ elif stat[0] == 'BLANK':
+ b.del_line(stat[1])
+ elif stat[0] == 'NEW':
+ b.new()
diff --git a/third_party/python/ply/example/BASIC/basinterp.py b/third_party/python/ply/example/BASIC/basinterp.py
new file mode 100644
index 0000000000..67762c797b
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/basinterp.py
@@ -0,0 +1,496 @@
+# This file provides the runtime support for running a basic program
+# Assumes the program has been parsed using basparse.py
+
+import sys
+import math
+import random
+
+
+class BasicInterpreter:
+
+ # Initialize the interpreter. prog is a dictionary
+ # containing (line,statement) mappings
+ def __init__(self, prog):
+ self.prog = prog
+
+ self.functions = { # Built-in function table
+ 'SIN': lambda z: math.sin(self.eval(z)),
+ 'COS': lambda z: math.cos(self.eval(z)),
+ 'TAN': lambda z: math.tan(self.eval(z)),
+ 'ATN': lambda z: math.atan(self.eval(z)),
+ 'EXP': lambda z: math.exp(self.eval(z)),
+ 'ABS': lambda z: abs(self.eval(z)),
+ 'LOG': lambda z: math.log(self.eval(z)),
+ 'SQR': lambda z: math.sqrt(self.eval(z)),
+ 'INT': lambda z: int(self.eval(z)),
+ 'RND': lambda z: random.random()
+ }
+
+ # Collect all data statements
+ def collect_data(self):
+ self.data = []
+ for lineno in self.stat:
+ if self.prog[lineno][0] == 'DATA':
+ self.data = self.data + self.prog[lineno][1]
+ self.dc = 0 # Initialize the data counter
+
+ # Check for end statements
+ def check_end(self):
+ has_end = 0
+ for lineno in self.stat:
+ if self.prog[lineno][0] == 'END' and not has_end:
+ has_end = lineno
+ if not has_end:
+ print("NO END INSTRUCTION")
+ self.error = 1
+ return
+ if has_end != lineno:
+ print("END IS NOT LAST")
+ self.error = 1
+
+ # Check loops
+ def check_loops(self):
+ for pc in range(len(self.stat)):
+ lineno = self.stat[pc]
+ if self.prog[lineno][0] == 'FOR':
+ forinst = self.prog[lineno]
+ loopvar = forinst[1]
+ for i in range(pc + 1, len(self.stat)):
+ if self.prog[self.stat[i]][0] == 'NEXT':
+ nextvar = self.prog[self.stat[i]][1]
+ if nextvar != loopvar:
+ continue
+ self.loopend[pc] = i
+ break
+ else:
+ print("FOR WITHOUT NEXT AT LINE %s" % self.stat[pc])
+ self.error = 1
+
+ # Evaluate an expression
+ def eval(self, expr):
+ etype = expr[0]
+ if etype == 'NUM':
+ return expr[1]
+ elif etype == 'GROUP':
+ return self.eval(expr[1])
+ elif etype == 'UNARY':
+ if expr[1] == '-':
+ return -self.eval(expr[2])
+ elif etype == 'BINOP':
+ if expr[1] == '+':
+ return self.eval(expr[2]) + self.eval(expr[3])
+ elif expr[1] == '-':
+ return self.eval(expr[2]) - self.eval(expr[3])
+ elif expr[1] == '*':
+ return self.eval(expr[2]) * self.eval(expr[3])
+ elif expr[1] == '/':
+ return float(self.eval(expr[2])) / self.eval(expr[3])
+ elif expr[1] == '^':
+ return abs(self.eval(expr[2]))**self.eval(expr[3])
+ elif etype == 'VAR':
+ var, dim1, dim2 = expr[1]
+ if not dim1 and not dim2:
+ if var in self.vars:
+ return self.vars[var]
+ else:
+ print("UNDEFINED VARIABLE %s AT LINE %s" %
+ (var, self.stat[self.pc]))
+ raise RuntimeError
+ # May be a list lookup or a function evaluation
+ if dim1 and not dim2:
+ if var in self.functions:
+ # A function
+ return self.functions[var](dim1)
+ else:
+ # A list evaluation
+ if var in self.lists:
+ dim1val = self.eval(dim1)
+ if dim1val < 1 or dim1val > len(self.lists[var]):
+ print("LIST INDEX OUT OF BOUNDS AT LINE %s" %
+ self.stat[self.pc])
+ raise RuntimeError
+ return self.lists[var][dim1val - 1]
+ if dim1 and dim2:
+ if var in self.tables:
+ dim1val = self.eval(dim1)
+ dim2val = self.eval(dim2)
+ if dim1val < 1 or dim1val > len(self.tables[var]) or dim2val < 1 or dim2val > len(self.tables[var][0]):
+ print("TABLE INDEX OUT OUT BOUNDS AT LINE %s" %
+ self.stat[self.pc])
+ raise RuntimeError
+ return self.tables[var][dim1val - 1][dim2val - 1]
+ print("UNDEFINED VARIABLE %s AT LINE %s" %
+ (var, self.stat[self.pc]))
+ raise RuntimeError
+
+ # Evaluate a relational expression
+ def releval(self, expr):
+ etype = expr[1]
+ lhs = self.eval(expr[2])
+ rhs = self.eval(expr[3])
+ if etype == '<':
+ if lhs < rhs:
+ return 1
+ else:
+ return 0
+
+ elif etype == '<=':
+ if lhs <= rhs:
+ return 1
+ else:
+ return 0
+
+ elif etype == '>':
+ if lhs > rhs:
+ return 1
+ else:
+ return 0
+
+ elif etype == '>=':
+ if lhs >= rhs:
+ return 1
+ else:
+ return 0
+
+ elif etype == '=':
+ if lhs == rhs:
+ return 1
+ else:
+ return 0
+
+ elif etype == '<>':
+ if lhs != rhs:
+ return 1
+ else:
+ return 0
+
+ # Assignment
+ def assign(self, target, value):
+ var, dim1, dim2 = target
+ if not dim1 and not dim2:
+ self.vars[var] = self.eval(value)
+ elif dim1 and not dim2:
+ # List assignment
+ dim1val = self.eval(dim1)
+ if not var in self.lists:
+ self.lists[var] = [0] * 10
+
+ if dim1val > len(self.lists[var]):
+ print ("DIMENSION TOO LARGE AT LINE %s" % self.stat[self.pc])
+ raise RuntimeError
+ self.lists[var][dim1val - 1] = self.eval(value)
+ elif dim1 and dim2:
+ dim1val = self.eval(dim1)
+ dim2val = self.eval(dim2)
+ if not var in self.tables:
+ temp = [0] * 10
+ v = []
+ for i in range(10):
+ v.append(temp[:])
+ self.tables[var] = v
+ # Variable already exists
+ if dim1val > len(self.tables[var]) or dim2val > len(self.tables[var][0]):
+ print("DIMENSION TOO LARGE AT LINE %s" % self.stat[self.pc])
+ raise RuntimeError
+ self.tables[var][dim1val - 1][dim2val - 1] = self.eval(value)
+
+ # Change the current line number
+ def goto(self, linenum):
+ if not linenum in self.prog:
+ print("UNDEFINED LINE NUMBER %d AT LINE %d" %
+ (linenum, self.stat[self.pc]))
+ raise RuntimeError
+ self.pc = self.stat.index(linenum)
+
+ # Run it
+ def run(self):
+ self.vars = {} # All variables
+ self.lists = {} # List variables
+ self.tables = {} # Tables
+ self.loops = [] # Currently active loops
+ self.loopend = {} # Mapping saying where loops end
+ self.gosub = None # Gosub return point (if any)
+ self.error = 0 # Indicates program error
+
+ self.stat = list(self.prog) # Ordered list of all line numbers
+ self.stat.sort()
+ self.pc = 0 # Current program counter
+
+ # Processing prior to running
+
+ self.collect_data() # Collect all of the data statements
+ self.check_end()
+ self.check_loops()
+
+ if self.error:
+ raise RuntimeError
+
+ while 1:
+ line = self.stat[self.pc]
+ instr = self.prog[line]
+
+ op = instr[0]
+
+ # END and STOP statements
+ if op == 'END' or op == 'STOP':
+ break # We're done
+
+ # GOTO statement
+ elif op == 'GOTO':
+ newline = instr[1]
+ self.goto(newline)
+ continue
+
+ # PRINT statement
+ elif op == 'PRINT':
+ plist = instr[1]
+ out = ""
+ for label, val in plist:
+ if out:
+ out += ' ' * (15 - (len(out) % 15))
+ out += label
+ if val:
+ if label:
+ out += " "
+ eval = self.eval(val)
+ out += str(eval)
+ sys.stdout.write(out)
+ end = instr[2]
+ if not (end == ',' or end == ';'):
+ sys.stdout.write("\n")
+ if end == ',':
+ sys.stdout.write(" " * (15 - (len(out) % 15)))
+ if end == ';':
+ sys.stdout.write(" " * (3 - (len(out) % 3)))
+
+ # LET statement
+ elif op == 'LET':
+ target = instr[1]
+ value = instr[2]
+ self.assign(target, value)
+
+ # READ statement
+ elif op == 'READ':
+ for target in instr[1]:
+ if self.dc < len(self.data):
+ value = ('NUM', self.data[self.dc])
+ self.assign(target, value)
+ self.dc += 1
+ else:
+ # No more data. Program ends
+ return
+ elif op == 'IF':
+ relop = instr[1]
+ newline = instr[2]
+ if (self.releval(relop)):
+ self.goto(newline)
+ continue
+
+ elif op == 'FOR':
+ loopvar = instr[1]
+ initval = instr[2]
+ finval = instr[3]
+ stepval = instr[4]
+
+ # Check to see if this is a new loop
+ if not self.loops or self.loops[-1][0] != self.pc:
+ # Looks like a new loop. Make the initial assignment
+ newvalue = initval
+ self.assign((loopvar, None, None), initval)
+ if not stepval:
+ stepval = ('NUM', 1)
+ stepval = self.eval(stepval) # Evaluate step here
+ self.loops.append((self.pc, stepval))
+ else:
+ # It's a repeat of the previous loop
+ # Update the value of the loop variable according to the
+ # step
+ stepval = ('NUM', self.loops[-1][1])
+ newvalue = (
+ 'BINOP', '+', ('VAR', (loopvar, None, None)), stepval)
+
+ if self.loops[-1][1] < 0:
+ relop = '>='
+ else:
+ relop = '<='
+ if not self.releval(('RELOP', relop, newvalue, finval)):
+ # Loop is done. Jump to the NEXT
+ self.pc = self.loopend[self.pc]
+ self.loops.pop()
+ else:
+ self.assign((loopvar, None, None), newvalue)
+
+ elif op == 'NEXT':
+ if not self.loops:
+ print("NEXT WITHOUT FOR AT LINE %s" % line)
+ return
+
+ nextvar = instr[1]
+ self.pc = self.loops[-1][0]
+ loopinst = self.prog[self.stat[self.pc]]
+ forvar = loopinst[1]
+ if nextvar != forvar:
+ print("NEXT DOESN'T MATCH FOR AT LINE %s" % line)
+ return
+ continue
+ elif op == 'GOSUB':
+ newline = instr[1]
+ if self.gosub:
+ print("ALREADY IN A SUBROUTINE AT LINE %s" % line)
+ return
+ self.gosub = self.stat[self.pc]
+ self.goto(newline)
+ continue
+
+ elif op == 'RETURN':
+ if not self.gosub:
+ print("RETURN WITHOUT A GOSUB AT LINE %s" % line)
+ return
+ self.goto(self.gosub)
+ self.gosub = None
+
+ elif op == 'FUNC':
+ fname = instr[1]
+ pname = instr[2]
+ expr = instr[3]
+
+ def eval_func(pvalue, name=pname, self=self, expr=expr):
+ self.assign((pname, None, None), pvalue)
+ return self.eval(expr)
+ self.functions[fname] = eval_func
+
+ elif op == 'DIM':
+ for vname, x, y in instr[1]:
+ if y == 0:
+ # Single dimension variable
+ self.lists[vname] = [0] * x
+ else:
+ # Double dimension variable
+ temp = [0] * y
+ v = []
+ for i in range(x):
+ v.append(temp[:])
+ self.tables[vname] = v
+
+ self.pc += 1
+
+ # Utility functions for program listing
+ def expr_str(self, expr):
+ etype = expr[0]
+ if etype == 'NUM':
+ return str(expr[1])
+ elif etype == 'GROUP':
+ return "(%s)" % self.expr_str(expr[1])
+ elif etype == 'UNARY':
+ if expr[1] == '-':
+ return "-" + str(expr[2])
+ elif etype == 'BINOP':
+ return "%s %s %s" % (self.expr_str(expr[2]), expr[1], self.expr_str(expr[3]))
+ elif etype == 'VAR':
+ return self.var_str(expr[1])
+
+ def relexpr_str(self, expr):
+ return "%s %s %s" % (self.expr_str(expr[2]), expr[1], self.expr_str(expr[3]))
+
+ def var_str(self, var):
+ varname, dim1, dim2 = var
+ if not dim1 and not dim2:
+ return varname
+ if dim1 and not dim2:
+ return "%s(%s)" % (varname, self.expr_str(dim1))
+ return "%s(%s,%s)" % (varname, self.expr_str(dim1), self.expr_str(dim2))
+
+ # Create a program listing
+ def list(self):
+ stat = list(self.prog) # Ordered list of all line numbers
+ stat.sort()
+ for line in stat:
+ instr = self.prog[line]
+ op = instr[0]
+ if op in ['END', 'STOP', 'RETURN']:
+ print("%s %s" % (line, op))
+ continue
+ elif op == 'REM':
+ print("%s %s" % (line, instr[1]))
+ elif op == 'PRINT':
+ _out = "%s %s " % (line, op)
+ first = 1
+ for p in instr[1]:
+ if not first:
+ _out += ", "
+ if p[0] and p[1]:
+ _out += '"%s"%s' % (p[0], self.expr_str(p[1]))
+ elif p[1]:
+ _out += self.expr_str(p[1])
+ else:
+ _out += '"%s"' % (p[0],)
+ first = 0
+ if instr[2]:
+ _out += instr[2]
+ print(_out)
+ elif op == 'LET':
+ print("%s LET %s = %s" %
+ (line, self.var_str(instr[1]), self.expr_str(instr[2])))
+ elif op == 'READ':
+ _out = "%s READ " % line
+ first = 1
+ for r in instr[1]:
+ if not first:
+ _out += ","
+ _out += self.var_str(r)
+ first = 0
+ print(_out)
+ elif op == 'IF':
+ print("%s IF %s THEN %d" %
+ (line, self.relexpr_str(instr[1]), instr[2]))
+ elif op == 'GOTO' or op == 'GOSUB':
+ print("%s %s %s" % (line, op, instr[1]))
+ elif op == 'FOR':
+ _out = "%s FOR %s = %s TO %s" % (
+ line, instr[1], self.expr_str(instr[2]), self.expr_str(instr[3]))
+ if instr[4]:
+ _out += " STEP %s" % (self.expr_str(instr[4]))
+ print(_out)
+ elif op == 'NEXT':
+ print("%s NEXT %s" % (line, instr[1]))
+ elif op == 'FUNC':
+ print("%s DEF %s(%s) = %s" %
+ (line, instr[1], instr[2], self.expr_str(instr[3])))
+ elif op == 'DIM':
+ _out = "%s DIM " % line
+ first = 1
+ for vname, x, y in instr[1]:
+ if not first:
+ _out += ","
+ first = 0
+ if y == 0:
+ _out += "%s(%d)" % (vname, x)
+ else:
+ _out += "%s(%d,%d)" % (vname, x, y)
+
+ print(_out)
+ elif op == 'DATA':
+ _out = "%s DATA " % line
+ first = 1
+ for v in instr[1]:
+ if not first:
+ _out += ","
+ first = 0
+ _out += v
+ print(_out)
+
+ # Erase the current program
+ def new(self):
+ self.prog = {}
+
+ # Insert statements
+ def add_statements(self, prog):
+ for line, stat in prog.items():
+ self.prog[line] = stat
+
+ # Delete a statement
+ def del_line(self, lineno):
+ try:
+ del self.prog[lineno]
+ except KeyError:
+ pass
diff --git a/third_party/python/ply/example/BASIC/basparse.py b/third_party/python/ply/example/BASIC/basparse.py
new file mode 100644
index 0000000000..d610c7d909
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/basparse.py
@@ -0,0 +1,474 @@
+# An implementation of Dartmouth BASIC (1964)
+#
+
+from ply import *
+import basiclex
+
+tokens = basiclex.tokens
+
+precedence = (
+ ('left', 'PLUS', 'MINUS'),
+ ('left', 'TIMES', 'DIVIDE'),
+ ('left', 'POWER'),
+ ('right', 'UMINUS')
+)
+
+# A BASIC program is a series of statements. We represent the program as a
+# dictionary of tuples indexed by line number.
+
+
+def p_program(p):
+ '''program : program statement
+ | statement'''
+
+ if len(p) == 2 and p[1]:
+ p[0] = {}
+ line, stat = p[1]
+ p[0][line] = stat
+ elif len(p) == 3:
+ p[0] = p[1]
+ if not p[0]:
+ p[0] = {}
+ if p[2]:
+ line, stat = p[2]
+ p[0][line] = stat
+
+# This catch-all rule is used for any catastrophic errors. In this case,
+# we simply return nothing
+
+
+def p_program_error(p):
+ '''program : error'''
+ p[0] = None
+ p.parser.error = 1
+
+# Format of all BASIC statements.
+
+
+def p_statement(p):
+ '''statement : INTEGER command NEWLINE'''
+ if isinstance(p[2], str):
+ print("%s %s %s" % (p[2], "AT LINE", p[1]))
+ p[0] = None
+ p.parser.error = 1
+ else:
+ lineno = int(p[1])
+ p[0] = (lineno, p[2])
+
+# Interactive statements.
+
+
+def p_statement_interactive(p):
+ '''statement : RUN NEWLINE
+ | LIST NEWLINE
+ | NEW NEWLINE'''
+ p[0] = (0, (p[1], 0))
+
+# Blank line number
+
+
+def p_statement_blank(p):
+ '''statement : INTEGER NEWLINE'''
+ p[0] = (0, ('BLANK', int(p[1])))
+
+# Error handling for malformed statements
+
+
+def p_statement_bad(p):
+ '''statement : INTEGER error NEWLINE'''
+ print("MALFORMED STATEMENT AT LINE %s" % p[1])
+ p[0] = None
+ p.parser.error = 1
+
+# Blank line
+
+
+def p_statement_newline(p):
+ '''statement : NEWLINE'''
+ p[0] = None
+
+# LET statement
+
+
+def p_command_let(p):
+ '''command : LET variable EQUALS expr'''
+ p[0] = ('LET', p[2], p[4])
+
+
+def p_command_let_bad(p):
+ '''command : LET variable EQUALS error'''
+ p[0] = "BAD EXPRESSION IN LET"
+
+# READ statement
+
+
+def p_command_read(p):
+ '''command : READ varlist'''
+ p[0] = ('READ', p[2])
+
+
+def p_command_read_bad(p):
+ '''command : READ error'''
+ p[0] = "MALFORMED VARIABLE LIST IN READ"
+
+# DATA statement
+
+
+def p_command_data(p):
+ '''command : DATA numlist'''
+ p[0] = ('DATA', p[2])
+
+
+def p_command_data_bad(p):
+ '''command : DATA error'''
+ p[0] = "MALFORMED NUMBER LIST IN DATA"
+
+# PRINT statement
+
+
+def p_command_print(p):
+ '''command : PRINT plist optend'''
+ p[0] = ('PRINT', p[2], p[3])
+
+
+def p_command_print_bad(p):
+ '''command : PRINT error'''
+ p[0] = "MALFORMED PRINT STATEMENT"
+
+# Optional ending on PRINT. Either a comma (,) or semicolon (;)
+
+
+def p_optend(p):
+ '''optend : COMMA
+ | SEMI
+ |'''
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ p[0] = None
+
+# PRINT statement with no arguments
+
+
+def p_command_print_empty(p):
+ '''command : PRINT'''
+ p[0] = ('PRINT', [], None)
+
+# GOTO statement
+
+
+def p_command_goto(p):
+ '''command : GOTO INTEGER'''
+ p[0] = ('GOTO', int(p[2]))
+
+
+def p_command_goto_bad(p):
+ '''command : GOTO error'''
+ p[0] = "INVALID LINE NUMBER IN GOTO"
+
+# IF-THEN statement
+
+
+def p_command_if(p):
+ '''command : IF relexpr THEN INTEGER'''
+ p[0] = ('IF', p[2], int(p[4]))
+
+
+def p_command_if_bad(p):
+ '''command : IF error THEN INTEGER'''
+ p[0] = "BAD RELATIONAL EXPRESSION"
+
+
+def p_command_if_bad2(p):
+ '''command : IF relexpr THEN error'''
+ p[0] = "INVALID LINE NUMBER IN THEN"
+
+# FOR statement
+
+
+def p_command_for(p):
+ '''command : FOR ID EQUALS expr TO expr optstep'''
+ p[0] = ('FOR', p[2], p[4], p[6], p[7])
+
+
+def p_command_for_bad_initial(p):
+ '''command : FOR ID EQUALS error TO expr optstep'''
+ p[0] = "BAD INITIAL VALUE IN FOR STATEMENT"
+
+
+def p_command_for_bad_final(p):
+ '''command : FOR ID EQUALS expr TO error optstep'''
+ p[0] = "BAD FINAL VALUE IN FOR STATEMENT"
+
+
+def p_command_for_bad_step(p):
+ '''command : FOR ID EQUALS expr TO expr STEP error'''
+ p[0] = "MALFORMED STEP IN FOR STATEMENT"
+
+# Optional STEP qualifier on FOR statement
+
+
+def p_optstep(p):
+ '''optstep : STEP expr
+ | empty'''
+ if len(p) == 3:
+ p[0] = p[2]
+ else:
+ p[0] = None
+
+# NEXT statement
+
+
+def p_command_next(p):
+ '''command : NEXT ID'''
+
+ p[0] = ('NEXT', p[2])
+
+
+def p_command_next_bad(p):
+ '''command : NEXT error'''
+ p[0] = "MALFORMED NEXT"
+
+# END statement
+
+
+def p_command_end(p):
+ '''command : END'''
+ p[0] = ('END',)
+
+# REM statement
+
+
+def p_command_rem(p):
+ '''command : REM'''
+ p[0] = ('REM', p[1])
+
+# STOP statement
+
+
+def p_command_stop(p):
+ '''command : STOP'''
+ p[0] = ('STOP',)
+
+# DEF statement
+
+
+def p_command_def(p):
+ '''command : DEF ID LPAREN ID RPAREN EQUALS expr'''
+ p[0] = ('FUNC', p[2], p[4], p[7])
+
+
+def p_command_def_bad_rhs(p):
+ '''command : DEF ID LPAREN ID RPAREN EQUALS error'''
+ p[0] = "BAD EXPRESSION IN DEF STATEMENT"
+
+
+def p_command_def_bad_arg(p):
+ '''command : DEF ID LPAREN error RPAREN EQUALS expr'''
+ p[0] = "BAD ARGUMENT IN DEF STATEMENT"
+
+# GOSUB statement
+
+
+def p_command_gosub(p):
+ '''command : GOSUB INTEGER'''
+ p[0] = ('GOSUB', int(p[2]))
+
+
+def p_command_gosub_bad(p):
+ '''command : GOSUB error'''
+ p[0] = "INVALID LINE NUMBER IN GOSUB"
+
+# RETURN statement
+
+
+def p_command_return(p):
+ '''command : RETURN'''
+ p[0] = ('RETURN',)
+
+# DIM statement
+
+
+def p_command_dim(p):
+ '''command : DIM dimlist'''
+ p[0] = ('DIM', p[2])
+
+
+def p_command_dim_bad(p):
+ '''command : DIM error'''
+ p[0] = "MALFORMED VARIABLE LIST IN DIM"
+
+# List of variables supplied to DIM statement
+
+
+def p_dimlist(p):
+ '''dimlist : dimlist COMMA dimitem
+ | dimitem'''
+ if len(p) == 4:
+ p[0] = p[1]
+ p[0].append(p[3])
+ else:
+ p[0] = [p[1]]
+
+# DIM items
+
+
+def p_dimitem_single(p):
+ '''dimitem : ID LPAREN INTEGER RPAREN'''
+ p[0] = (p[1], eval(p[3]), 0)
+
+
+def p_dimitem_double(p):
+ '''dimitem : ID LPAREN INTEGER COMMA INTEGER RPAREN'''
+ p[0] = (p[1], eval(p[3]), eval(p[5]))
+
+# Arithmetic expressions
+
+
+def p_expr_binary(p):
+ '''expr : expr PLUS expr
+ | expr MINUS expr
+ | expr TIMES expr
+ | expr DIVIDE expr
+ | expr POWER expr'''
+
+ p[0] = ('BINOP', p[2], p[1], p[3])
+
+
+def p_expr_number(p):
+ '''expr : INTEGER
+ | FLOAT'''
+ p[0] = ('NUM', eval(p[1]))
+
+
+def p_expr_variable(p):
+ '''expr : variable'''
+ p[0] = ('VAR', p[1])
+
+
+def p_expr_group(p):
+ '''expr : LPAREN expr RPAREN'''
+ p[0] = ('GROUP', p[2])
+
+
+def p_expr_unary(p):
+ '''expr : MINUS expr %prec UMINUS'''
+ p[0] = ('UNARY', '-', p[2])
+
+# Relational expressions
+
+
+def p_relexpr(p):
+ '''relexpr : expr LT expr
+ | expr LE expr
+ | expr GT expr
+ | expr GE expr
+ | expr EQUALS expr
+ | expr NE expr'''
+ p[0] = ('RELOP', p[2], p[1], p[3])
+
+# Variables
+
+
+def p_variable(p):
+ '''variable : ID
+ | ID LPAREN expr RPAREN
+ | ID LPAREN expr COMMA expr RPAREN'''
+ if len(p) == 2:
+ p[0] = (p[1], None, None)
+ elif len(p) == 5:
+ p[0] = (p[1], p[3], None)
+ else:
+ p[0] = (p[1], p[3], p[5])
+
+# Builds a list of variable targets as a Python list
+
+
+def p_varlist(p):
+ '''varlist : varlist COMMA variable
+ | variable'''
+ if len(p) > 2:
+ p[0] = p[1]
+ p[0].append(p[3])
+ else:
+ p[0] = [p[1]]
+
+
+# Builds a list of numbers as a Python list
+
+def p_numlist(p):
+ '''numlist : numlist COMMA number
+ | number'''
+
+ if len(p) > 2:
+ p[0] = p[1]
+ p[0].append(p[3])
+ else:
+ p[0] = [p[1]]
+
+# A number. May be an integer or a float
+
+
+def p_number(p):
+ '''number : INTEGER
+ | FLOAT'''
+ p[0] = eval(p[1])
+
+# A signed number.
+
+
+def p_number_signed(p):
+ '''number : MINUS INTEGER
+ | MINUS FLOAT'''
+ p[0] = eval("-" + p[2])
+
+# List of targets for a print statement
+# Returns a list of tuples (label,expr)
+
+
+def p_plist(p):
+ '''plist : plist COMMA pitem
+ | pitem'''
+ if len(p) > 3:
+ p[0] = p[1]
+ p[0].append(p[3])
+ else:
+ p[0] = [p[1]]
+
+
+def p_item_string(p):
+ '''pitem : STRING'''
+ p[0] = (p[1][1:-1], None)
+
+
+def p_item_string_expr(p):
+ '''pitem : STRING expr'''
+ p[0] = (p[1][1:-1], p[2])
+
+
+def p_item_expr(p):
+ '''pitem : expr'''
+ p[0] = ("", p[1])
+
+# Empty
+
+
+def p_empty(p):
+ '''empty : '''
+
+# Catastrophic error handler
+
+
+def p_error(p):
+ if not p:
+ print("SYNTAX ERROR AT EOF")
+
+bparser = yacc.yacc()
+
+
+def parse(data, debug=0):
+ bparser.error = 0
+ p = bparser.parse(data, debug=debug)
+ if bparser.error:
+ return None
+ return p
diff --git a/third_party/python/ply/example/BASIC/dim.bas b/third_party/python/ply/example/BASIC/dim.bas
new file mode 100644
index 0000000000..87bd95b32e
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/dim.bas
@@ -0,0 +1,14 @@
+5 DIM A(50,15)
+10 FOR I = 1 TO 50
+20 FOR J = 1 TO 15
+30 LET A(I,J) = I + J
+35 REM PRINT I,J, A(I,J)
+40 NEXT J
+50 NEXT I
+100 FOR I = 1 TO 50
+110 FOR J = 1 TO 15
+120 PRINT A(I,J),
+130 NEXT J
+140 PRINT
+150 NEXT I
+999 END
diff --git a/third_party/python/ply/example/BASIC/func.bas b/third_party/python/ply/example/BASIC/func.bas
new file mode 100644
index 0000000000..447ee16a92
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/func.bas
@@ -0,0 +1,5 @@
+10 DEF FDX(X) = 2*X
+20 FOR I = 0 TO 100
+30 PRINT FDX(I)
+40 NEXT I
+50 END
diff --git a/third_party/python/ply/example/BASIC/gcd.bas b/third_party/python/ply/example/BASIC/gcd.bas
new file mode 100644
index 0000000000..d0b7746089
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/gcd.bas
@@ -0,0 +1,22 @@
+10 PRINT "A","B","C","GCD"
+20 READ A,B,C
+30 LET X = A
+40 LET Y = B
+50 GOSUB 200
+60 LET X = G
+70 LET Y = C
+80 GOSUB 200
+90 PRINT A, B, C, G
+100 GOTO 20
+110 DATA 60, 90, 120
+120 DATA 38456, 64872, 98765
+130 DATA 32, 384, 72
+200 LET Q = INT(X/Y)
+210 LET R = X - Q*Y
+220 IF R = 0 THEN 300
+230 LET X = Y
+240 LET Y = R
+250 GOTO 200
+300 LET G = Y
+310 RETURN
+999 END
diff --git a/third_party/python/ply/example/BASIC/gosub.bas b/third_party/python/ply/example/BASIC/gosub.bas
new file mode 100644
index 0000000000..99737b16f1
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/gosub.bas
@@ -0,0 +1,13 @@
+100 LET X = 3
+110 GOSUB 400
+120 PRINT U, V, W
+200 LET X = 5
+210 GOSUB 400
+220 LET Z = U + 2*V + 3*W
+230 PRINT Z
+240 GOTO 999
+400 LET U = X*X
+410 LET V = X*X*X
+420 LET W = X*X*X*X + X*X*X + X*X + X
+430 RETURN
+999 END
diff --git a/third_party/python/ply/example/BASIC/hello.bas b/third_party/python/ply/example/BASIC/hello.bas
new file mode 100644
index 0000000000..cc6f0b0b51
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/hello.bas
@@ -0,0 +1,4 @@
+5 REM HELLO WORLD PROGAM
+10 PRINT "HELLO WORLD"
+99 END
+
diff --git a/third_party/python/ply/example/BASIC/linear.bas b/third_party/python/ply/example/BASIC/linear.bas
new file mode 100644
index 0000000000..56c08220b3
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/linear.bas
@@ -0,0 +1,17 @@
+1 REM ::: SOLVE A SYSTEM OF LINEAR EQUATIONS
+2 REM ::: A1*X1 + A2*X2 = B1
+3 REM ::: A3*X1 + A4*X2 = B2
+4 REM --------------------------------------
+10 READ A1, A2, A3, A4
+15 LET D = A1 * A4 - A3 * A2
+20 IF D = 0 THEN 65
+30 READ B1, B2
+37 LET X1 = (B1*A4 - B2*A2) / D
+42 LET X2 = (A1*B2 - A3*B1) / D
+55 PRINT X1, X2
+60 GOTO 30
+65 PRINT "NO UNIQUE SOLUTION"
+70 DATA 1, 2, 4
+80 DATA 2, -7, 5
+85 DATA 1, 3, 4, -7
+90 END
diff --git a/third_party/python/ply/example/BASIC/maxsin.bas b/third_party/python/ply/example/BASIC/maxsin.bas
new file mode 100644
index 0000000000..b96901530c
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/maxsin.bas
@@ -0,0 +1,12 @@
+5 PRINT "X VALUE", "SINE", "RESOLUTION"
+10 READ D
+20 LET M = -1
+30 FOR X = 0 TO 3 STEP D
+40 IF SIN(X) <= M THEN 80
+50 LET X0 = X
+60 LET M = SIN(X)
+80 NEXT X
+85 PRINT X0, M, D
+90 GOTO 10
+100 DATA .1, .01, .001
+110 END
diff --git a/third_party/python/ply/example/BASIC/powers.bas b/third_party/python/ply/example/BASIC/powers.bas
new file mode 100644
index 0000000000..a454dc3e21
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/powers.bas
@@ -0,0 +1,13 @@
+5 PRINT "THIS PROGRAM COMPUTES AND PRINTS THE NTH POWERS"
+6 PRINT "OF THE NUMBERS LESS THAN OR EQUAL TO N FOR VARIOUS"
+7 PRINT "N FROM 1 THROUGH 7"
+8 PRINT
+10 FOR N = 1 TO 7
+15 PRINT "N = "N
+20 FOR I = 1 TO N
+30 PRINT I^N,
+40 NEXT I
+50 PRINT
+60 PRINT
+70 NEXT N
+80 END
diff --git a/third_party/python/ply/example/BASIC/rand.bas b/third_party/python/ply/example/BASIC/rand.bas
new file mode 100644
index 0000000000..4ff7a14670
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/rand.bas
@@ -0,0 +1,4 @@
+10 FOR I = 1 TO 20
+20 PRINT INT(10*RND(0))
+30 NEXT I
+40 END
diff --git a/third_party/python/ply/example/BASIC/sales.bas b/third_party/python/ply/example/BASIC/sales.bas
new file mode 100644
index 0000000000..a39aefb762
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/sales.bas
@@ -0,0 +1,20 @@
+10 FOR I = 1 TO 3
+20 READ P(I)
+30 NEXT I
+40 FOR I = 1 TO 3
+50 FOR J = 1 TO 5
+60 READ S(I,J)
+70 NEXT J
+80 NEXT I
+90 FOR J = 1 TO 5
+100 LET S = 0
+110 FOR I = 1 TO 3
+120 LET S = S + P(I) * S(I,J)
+130 NEXT I
+140 PRINT "TOTAL SALES FOR SALESMAN"J, "$"S
+150 NEXT J
+200 DATA 1.25, 4.30, 2.50
+210 DATA 40, 20, 37, 29, 42
+220 DATA 10, 16, 3, 21, 8
+230 DATA 35, 47, 29, 16, 33
+300 END
diff --git a/third_party/python/ply/example/BASIC/sears.bas b/third_party/python/ply/example/BASIC/sears.bas
new file mode 100644
index 0000000000..5ced3974e2
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/sears.bas
@@ -0,0 +1,18 @@
+1 REM :: THIS PROGRAM COMPUTES HOW MANY TIMES YOU HAVE TO FOLD
+2 REM :: A PIECE OF PAPER SO THAT IT IS TALLER THAN THE
+3 REM :: SEARS TOWER.
+4 REM :: S = HEIGHT OF TOWER (METERS)
+5 REM :: T = THICKNESS OF PAPER (MILLIMETERS)
+10 LET S = 442
+20 LET T = 0.1
+30 REM CONVERT T TO METERS
+40 LET T = T * .001
+50 LET F = 1
+60 LET H = T
+100 IF H > S THEN 200
+120 LET H = 2 * H
+125 LET F = F + 1
+130 GOTO 100
+200 PRINT "NUMBER OF FOLDS ="F
+220 PRINT "FINAL HEIGHT ="H
+999 END
diff --git a/third_party/python/ply/example/BASIC/sqrt1.bas b/third_party/python/ply/example/BASIC/sqrt1.bas
new file mode 100644
index 0000000000..6673a91524
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/sqrt1.bas
@@ -0,0 +1,5 @@
+10 LET X = 0
+20 LET X = X + 1
+30 PRINT X, SQR(X)
+40 IF X < 100 THEN 20
+50 END
diff --git a/third_party/python/ply/example/BASIC/sqrt2.bas b/third_party/python/ply/example/BASIC/sqrt2.bas
new file mode 100644
index 0000000000..862d85ef26
--- /dev/null
+++ b/third_party/python/ply/example/BASIC/sqrt2.bas
@@ -0,0 +1,4 @@
+10 FOR X = 1 TO 100
+20 PRINT X, SQR(X)
+30 NEXT X
+40 END
diff --git a/third_party/python/ply/example/GardenSnake/GardenSnake.py b/third_party/python/ply/example/GardenSnake/GardenSnake.py
new file mode 100644
index 0000000000..8b493b40dc
--- /dev/null
+++ b/third_party/python/ply/example/GardenSnake/GardenSnake.py
@@ -0,0 +1,777 @@
+# GardenSnake - a parser generator demonstration program
+#
+# This implements a modified version of a subset of Python:
+# - only 'def', 'return' and 'if' statements
+# - 'if' only has 'then' clause (no elif nor else)
+# - single-quoted strings only, content in raw format
+# - numbers are decimal.Decimal instances (not integers or floats)
+# - no print statment; use the built-in 'print' function
+# - only < > == + - / * implemented (and unary + -)
+# - assignment and tuple assignment work
+# - no generators of any sort
+# - no ... well, no quite a lot
+
+# Why? I'm thinking about a new indentation-based configuration
+# language for a project and wanted to figure out how to do it. Once
+# I got that working I needed a way to test it out. My original AST
+# was dumb so I decided to target Python's AST and compile it into
+# Python code. Plus, it's pretty cool that it only took a day or so
+# from sitting down with Ply to having working code.
+
+# This uses David Beazley's Ply from http://www.dabeaz.com/ply/
+
+# This work is hereby released into the Public Domain. To view a copy of
+# the public domain dedication, visit
+# http://creativecommons.org/licenses/publicdomain/ or send a letter to
+# Creative Commons, 543 Howard Street, 5th Floor, San Francisco,
+# California, 94105, USA.
+#
+# Portions of this work are derived from Python's Grammar definition
+# and may be covered under the Python copyright and license
+#
+# Andrew Dalke / Dalke Scientific Software, LLC
+# 30 August 2006 / Cape Town, South Africa
+
+# Changelog:
+# 30 August - added link to CC license; removed the "swapcase" encoding
+
+# Modifications for inclusion in PLY distribution
+import sys
+sys.path.insert(0, "../..")
+from ply import *
+
+##### Lexer ######
+#import lex
+import decimal
+
+tokens = (
+ 'DEF',
+ 'IF',
+ 'NAME',
+ 'NUMBER', # Python decimals
+ 'STRING', # single quoted strings only; syntax of raw strings
+ 'LPAR',
+ 'RPAR',
+ 'COLON',
+ 'EQ',
+ 'ASSIGN',
+ 'LT',
+ 'GT',
+ 'PLUS',
+ 'MINUS',
+ 'MULT',
+ 'DIV',
+ 'RETURN',
+ 'WS',
+ 'NEWLINE',
+ 'COMMA',
+ 'SEMICOLON',
+ 'INDENT',
+ 'DEDENT',
+ 'ENDMARKER',
+)
+
+#t_NUMBER = r'\d+'
+# taken from decmial.py but without the leading sign
+
+
+def t_NUMBER(t):
+ r"""(\d+(\.\d*)?|\.\d+)([eE][-+]? \d+)?"""
+ t.value = decimal.Decimal(t.value)
+ return t
+
+
+def t_STRING(t):
+ r"'([^\\']+|\\'|\\\\)*'" # I think this is right ...
+ t.value = t.value[1:-1].decode("string-escape") # .swapcase() # for fun
+ return t
+
+t_COLON = r':'
+t_EQ = r'=='
+t_ASSIGN = r'='
+t_LT = r'<'
+t_GT = r'>'
+t_PLUS = r'\+'
+t_MINUS = r'-'
+t_MULT = r'\*'
+t_DIV = r'/'
+t_COMMA = r','
+t_SEMICOLON = r';'
+
+# Ply nicely documented how to do this.
+
+RESERVED = {
+ "def": "DEF",
+ "if": "IF",
+ "return": "RETURN",
+}
+
+
+def t_NAME(t):
+ r'[a-zA-Z_][a-zA-Z0-9_]*'
+ t.type = RESERVED.get(t.value, "NAME")
+ return t
+
+# Putting this before t_WS let it consume lines with only comments in
+# them so the latter code never sees the WS part. Not consuming the
+# newline. Needed for "if 1: #comment"
+
+
+def t_comment(t):
+ r"[ ]*\043[^\n]*" # \043 is '#'
+ pass
+
+
+# Whitespace
+def t_WS(t):
+ r' [ ]+ '
+ if t.lexer.at_line_start and t.lexer.paren_count == 0:
+ return t
+
+# Don't generate newline tokens when inside of parenthesis, eg
+# a = (1,
+# 2, 3)
+
+
+def t_newline(t):
+ r'\n+'
+ t.lexer.lineno += len(t.value)
+ t.type = "NEWLINE"
+ if t.lexer.paren_count == 0:
+ return t
+
+
+def t_LPAR(t):
+ r'\('
+ t.lexer.paren_count += 1
+ return t
+
+
+def t_RPAR(t):
+ r'\)'
+ # check for underflow? should be the job of the parser
+ t.lexer.paren_count -= 1
+ return t
+
+
+def t_error(t):
+ raise SyntaxError("Unknown symbol %r" % (t.value[0],))
+ print "Skipping", repr(t.value[0])
+ t.lexer.skip(1)
+
+# I implemented INDENT / DEDENT generation as a post-processing filter
+
+# The original lex token stream contains WS and NEWLINE characters.
+# WS will only occur before any other tokens on a line.
+
+# I have three filters. One tags tokens by adding two attributes.
+# "must_indent" is True if the token must be indented from the
+# previous code. The other is "at_line_start" which is True for WS
+# and the first non-WS/non-NEWLINE on a line. It flags the check so
+# see if the new line has changed indication level.
+
+# Python's syntax has three INDENT states
+# 0) no colon hence no need to indent
+# 1) "if 1: go()" - simple statements have a COLON but no need for an indent
+# 2) "if 1:\n go()" - complex statements have a COLON NEWLINE and must indent
+NO_INDENT = 0
+MAY_INDENT = 1
+MUST_INDENT = 2
+
+# only care about whitespace at the start of a line
+
+
+def track_tokens_filter(lexer, tokens):
+ lexer.at_line_start = at_line_start = True
+ indent = NO_INDENT
+ saw_colon = False
+ for token in tokens:
+ token.at_line_start = at_line_start
+
+ if token.type == "COLON":
+ at_line_start = False
+ indent = MAY_INDENT
+ token.must_indent = False
+
+ elif token.type == "NEWLINE":
+ at_line_start = True
+ if indent == MAY_INDENT:
+ indent = MUST_INDENT
+ token.must_indent = False
+
+ elif token.type == "WS":
+ assert token.at_line_start == True
+ at_line_start = True
+ token.must_indent = False
+
+ else:
+ # A real token; only indent after COLON NEWLINE
+ if indent == MUST_INDENT:
+ token.must_indent = True
+ else:
+ token.must_indent = False
+ at_line_start = False
+ indent = NO_INDENT
+
+ yield token
+ lexer.at_line_start = at_line_start
+
+
+def _new_token(type, lineno):
+ tok = lex.LexToken()
+ tok.type = type
+ tok.value = None
+ tok.lineno = lineno
+ return tok
+
+# Synthesize a DEDENT tag
+
+
+def DEDENT(lineno):
+ return _new_token("DEDENT", lineno)
+
+# Synthesize an INDENT tag
+
+
+def INDENT(lineno):
+ return _new_token("INDENT", lineno)
+
+
+# Track the indentation level and emit the right INDENT / DEDENT events.
+def indentation_filter(tokens):
+ # A stack of indentation levels; will never pop item 0
+ levels = [0]
+ token = None
+ depth = 0
+ prev_was_ws = False
+ for token in tokens:
+ # if 1:
+ # print "Process", token,
+ # if token.at_line_start:
+ # print "at_line_start",
+ # if token.must_indent:
+ # print "must_indent",
+ # print
+
+ # WS only occurs at the start of the line
+ # There may be WS followed by NEWLINE so
+ # only track the depth here. Don't indent/dedent
+ # until there's something real.
+ if token.type == "WS":
+ assert depth == 0
+ depth = len(token.value)
+ prev_was_ws = True
+ # WS tokens are never passed to the parser
+ continue
+
+ if token.type == "NEWLINE":
+ depth = 0
+ if prev_was_ws or token.at_line_start:
+ # ignore blank lines
+ continue
+ # pass the other cases on through
+ yield token
+ continue
+
+ # then it must be a real token (not WS, not NEWLINE)
+ # which can affect the indentation level
+
+ prev_was_ws = False
+ if token.must_indent:
+ # The current depth must be larger than the previous level
+ if not (depth > levels[-1]):
+ raise IndentationError("expected an indented block")
+
+ levels.append(depth)
+ yield INDENT(token.lineno)
+
+ elif token.at_line_start:
+ # Must be on the same level or one of the previous levels
+ if depth == levels[-1]:
+ # At the same level
+ pass
+ elif depth > levels[-1]:
+ raise IndentationError(
+ "indentation increase but not in new block")
+ else:
+ # Back up; but only if it matches a previous level
+ try:
+ i = levels.index(depth)
+ except ValueError:
+ raise IndentationError("inconsistent indentation")
+ for _ in range(i + 1, len(levels)):
+ yield DEDENT(token.lineno)
+ levels.pop()
+
+ yield token
+
+ ### Finished processing ###
+
+ # Must dedent any remaining levels
+ if len(levels) > 1:
+ assert token is not None
+ for _ in range(1, len(levels)):
+ yield DEDENT(token.lineno)
+
+
+# The top-level filter adds an ENDMARKER, if requested.
+# Python's grammar uses it.
+def filter(lexer, add_endmarker=True):
+ token = None
+ tokens = iter(lexer.token, None)
+ tokens = track_tokens_filter(lexer, tokens)
+ for token in indentation_filter(tokens):
+ yield token
+
+ if add_endmarker:
+ lineno = 1
+ if token is not None:
+ lineno = token.lineno
+ yield _new_token("ENDMARKER", lineno)
+
+# Combine Ply and my filters into a new lexer
+
+
+class IndentLexer(object):
+
+ def __init__(self, debug=0, optimize=0, lextab='lextab', reflags=0):
+ self.lexer = lex.lex(debug=debug, optimize=optimize,
+ lextab=lextab, reflags=reflags)
+ self.token_stream = None
+
+ def input(self, s, add_endmarker=True):
+ self.lexer.paren_count = 0
+ self.lexer.input(s)
+ self.token_stream = filter(self.lexer, add_endmarker)
+
+ def token(self):
+ try:
+ return self.token_stream.next()
+ except StopIteration:
+ return None
+
+########## Parser (tokens -> AST) ######
+
+# also part of Ply
+#import yacc
+
+# I use the Python AST
+from compiler import ast
+
+# Helper function
+
+
+def Assign(left, right):
+ names = []
+ if isinstance(left, ast.Name):
+ # Single assignment on left
+ return ast.Assign([ast.AssName(left.name, 'OP_ASSIGN')], right)
+ elif isinstance(left, ast.Tuple):
+ # List of things - make sure they are Name nodes
+ names = []
+ for child in left.getChildren():
+ if not isinstance(child, ast.Name):
+ raise SyntaxError("that assignment not supported")
+ names.append(child.name)
+ ass_list = [ast.AssName(name, 'OP_ASSIGN') for name in names]
+ return ast.Assign([ast.AssTuple(ass_list)], right)
+ else:
+ raise SyntaxError("Can't do that yet")
+
+
+# The grammar comments come from Python's Grammar/Grammar file
+
+# NB: compound_stmt in single_input is followed by extra NEWLINE!
+# file_input: (NEWLINE | stmt)* ENDMARKER
+def p_file_input_end(p):
+ """file_input_end : file_input ENDMARKER"""
+ p[0] = ast.Stmt(p[1])
+
+
+def p_file_input(p):
+ """file_input : file_input NEWLINE
+ | file_input stmt
+ | NEWLINE
+ | stmt"""
+ if isinstance(p[len(p) - 1], basestring):
+ if len(p) == 3:
+ p[0] = p[1]
+ else:
+ p[0] = [] # p == 2 --> only a blank line
+ else:
+ if len(p) == 3:
+ p[0] = p[1] + p[2]
+ else:
+ p[0] = p[1]
+
+
+# funcdef: [decorators] 'def' NAME parameters ':' suite
+# ignoring decorators
+def p_funcdef(p):
+ "funcdef : DEF NAME parameters COLON suite"
+ p[0] = ast.Function(None, p[2], tuple(p[3]), (), 0, None, p[5])
+
+# parameters: '(' [varargslist] ')'
+
+
+def p_parameters(p):
+ """parameters : LPAR RPAR
+ | LPAR varargslist RPAR"""
+ if len(p) == 3:
+ p[0] = []
+ else:
+ p[0] = p[2]
+
+
+# varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] | '**' NAME) |
+# highly simplified
+def p_varargslist(p):
+ """varargslist : varargslist COMMA NAME
+ | NAME"""
+ if len(p) == 4:
+ p[0] = p[1] + p[3]
+ else:
+ p[0] = [p[1]]
+
+# stmt: simple_stmt | compound_stmt
+
+
+def p_stmt_simple(p):
+ """stmt : simple_stmt"""
+ # simple_stmt is a list
+ p[0] = p[1]
+
+
+def p_stmt_compound(p):
+ """stmt : compound_stmt"""
+ p[0] = [p[1]]
+
+# simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
+
+
+def p_simple_stmt(p):
+ """simple_stmt : small_stmts NEWLINE
+ | small_stmts SEMICOLON NEWLINE"""
+ p[0] = p[1]
+
+
+def p_small_stmts(p):
+ """small_stmts : small_stmts SEMICOLON small_stmt
+ | small_stmt"""
+ if len(p) == 4:
+ p[0] = p[1] + [p[3]]
+ else:
+ p[0] = [p[1]]
+
+# small_stmt: expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt |
+# import_stmt | global_stmt | exec_stmt | assert_stmt
+
+
+def p_small_stmt(p):
+ """small_stmt : flow_stmt
+ | expr_stmt"""
+ p[0] = p[1]
+
+# expr_stmt: testlist (augassign (yield_expr|testlist) |
+# ('=' (yield_expr|testlist))*)
+# augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
+# '<<=' | '>>=' | '**=' | '//=')
+
+
+def p_expr_stmt(p):
+ """expr_stmt : testlist ASSIGN testlist
+ | testlist """
+ if len(p) == 2:
+ # a list of expressions
+ p[0] = ast.Discard(p[1])
+ else:
+ p[0] = Assign(p[1], p[3])
+
+
+def p_flow_stmt(p):
+ "flow_stmt : return_stmt"
+ p[0] = p[1]
+
+# return_stmt: 'return' [testlist]
+
+
+def p_return_stmt(p):
+ "return_stmt : RETURN testlist"
+ p[0] = ast.Return(p[2])
+
+
+def p_compound_stmt(p):
+ """compound_stmt : if_stmt
+ | funcdef"""
+ p[0] = p[1]
+
+
+def p_if_stmt(p):
+ 'if_stmt : IF test COLON suite'
+ p[0] = ast.If([(p[2], p[4])], None)
+
+
+def p_suite(p):
+ """suite : simple_stmt
+ | NEWLINE INDENT stmts DEDENT"""
+ if len(p) == 2:
+ p[0] = ast.Stmt(p[1])
+ else:
+ p[0] = ast.Stmt(p[3])
+
+
+def p_stmts(p):
+ """stmts : stmts stmt
+ | stmt"""
+ if len(p) == 3:
+ p[0] = p[1] + p[2]
+ else:
+ p[0] = p[1]
+
+# No using Python's approach because Ply supports precedence
+
+# comparison: expr (comp_op expr)*
+# arith_expr: term (('+'|'-') term)*
+# term: factor (('*'|'/'|'%'|'//') factor)*
+# factor: ('+'|'-'|'~') factor | power
+# comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
+
+
+def make_lt_compare((left, right)):
+ return ast.Compare(left, [('<', right), ])
+
+
+def make_gt_compare((left, right)):
+ return ast.Compare(left, [('>', right), ])
+
+
+def make_eq_compare((left, right)):
+ return ast.Compare(left, [('==', right), ])
+
+
+binary_ops = {
+ "+": ast.Add,
+ "-": ast.Sub,
+ "*": ast.Mul,
+ "/": ast.Div,
+ "<": make_lt_compare,
+ ">": make_gt_compare,
+ "==": make_eq_compare,
+}
+unary_ops = {
+ "+": ast.UnaryAdd,
+ "-": ast.UnarySub,
+}
+precedence = (
+ ("left", "EQ", "GT", "LT"),
+ ("left", "PLUS", "MINUS"),
+ ("left", "MULT", "DIV"),
+)
+
+
+def p_comparison(p):
+ """comparison : comparison PLUS comparison
+ | comparison MINUS comparison
+ | comparison MULT comparison
+ | comparison DIV comparison
+ | comparison LT comparison
+ | comparison EQ comparison
+ | comparison GT comparison
+ | PLUS comparison
+ | MINUS comparison
+ | power"""
+ if len(p) == 4:
+ p[0] = binary_ops[p[2]]((p[1], p[3]))
+ elif len(p) == 3:
+ p[0] = unary_ops[p[1]](p[2])
+ else:
+ p[0] = p[1]
+
+# power: atom trailer* ['**' factor]
+# trailers enables function calls. I only allow one level of calls
+# so this is 'trailer'
+
+
+def p_power(p):
+ """power : atom
+ | atom trailer"""
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ if p[2][0] == "CALL":
+ p[0] = ast.CallFunc(p[1], p[2][1], None, None)
+ else:
+ raise AssertionError("not implemented")
+
+
+def p_atom_name(p):
+ """atom : NAME"""
+ p[0] = ast.Name(p[1])
+
+
+def p_atom_number(p):
+ """atom : NUMBER
+ | STRING"""
+ p[0] = ast.Const(p[1])
+
+
+def p_atom_tuple(p):
+ """atom : LPAR testlist RPAR"""
+ p[0] = p[2]
+
+# trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
+
+
+def p_trailer(p):
+ "trailer : LPAR arglist RPAR"
+ p[0] = ("CALL", p[2])
+
+# testlist: test (',' test)* [',']
+# Contains shift/reduce error
+
+
+def p_testlist(p):
+ """testlist : testlist_multi COMMA
+ | testlist_multi """
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ # May need to promote singleton to tuple
+ if isinstance(p[1], list):
+ p[0] = p[1]
+ else:
+ p[0] = [p[1]]
+ # Convert into a tuple?
+ if isinstance(p[0], list):
+ p[0] = ast.Tuple(p[0])
+
+
+def p_testlist_multi(p):
+ """testlist_multi : testlist_multi COMMA test
+ | test"""
+ if len(p) == 2:
+ # singleton
+ p[0] = p[1]
+ else:
+ if isinstance(p[1], list):
+ p[0] = p[1] + [p[3]]
+ else:
+ # singleton -> tuple
+ p[0] = [p[1], p[3]]
+
+
+# test: or_test ['if' or_test 'else' test] | lambdef
+# as I don't support 'and', 'or', and 'not' this works down to 'comparison'
+def p_test(p):
+ "test : comparison"
+ p[0] = p[1]
+
+
+# arglist: (argument ',')* (argument [',']| '*' test [',' '**' test] | '**' test)
+# XXX INCOMPLETE: this doesn't allow the trailing comma
+def p_arglist(p):
+ """arglist : arglist COMMA argument
+ | argument"""
+ if len(p) == 4:
+ p[0] = p[1] + [p[3]]
+ else:
+ p[0] = [p[1]]
+
+# argument: test [gen_for] | test '=' test # Really [keyword '='] test
+
+
+def p_argument(p):
+ "argument : test"
+ p[0] = p[1]
+
+
+def p_error(p):
+ # print "Error!", repr(p)
+ raise SyntaxError(p)
+
+
+class GardenSnakeParser(object):
+
+ def __init__(self, lexer=None):
+ if lexer is None:
+ lexer = IndentLexer()
+ self.lexer = lexer
+ self.parser = yacc.yacc(start="file_input_end")
+
+ def parse(self, code):
+ self.lexer.input(code)
+ result = self.parser.parse(lexer=self.lexer)
+ return ast.Module(None, result)
+
+
+###### Code generation ######
+
+from compiler import misc, syntax, pycodegen
+
+
+class GardenSnakeCompiler(object):
+
+ def __init__(self):
+ self.parser = GardenSnakeParser()
+
+ def compile(self, code, filename="<string>"):
+ tree = self.parser.parse(code)
+ # print tree
+ misc.set_filename(filename, tree)
+ syntax.check(tree)
+ gen = pycodegen.ModuleCodeGenerator(tree)
+ code = gen.getCode()
+ return code
+
+####### Test code #######
+
+compile = GardenSnakeCompiler().compile
+
+code = r"""
+
+print('LET\'S TRY THIS \\OUT')
+
+#Comment here
+def x(a):
+ print('called with',a)
+ if a == 1:
+ return 2
+ if a*2 > 10: return 999 / 4
+ # Another comment here
+
+ return a+2*3
+
+ints = (1, 2,
+ 3, 4,
+5)
+print('mutiline-expression', ints)
+
+t = 4+1/3*2+6*(9-5+1)
+print('predence test; should be 34+2/3:', t, t==(34+2/3))
+
+print('numbers', 1,2,3,4,5)
+if 1:
+ 8
+ a=9
+ print(x(a))
+
+print(x(1))
+print(x(2))
+print(x(8),'3')
+print('this is decimal', 1/5)
+print('BIG DECIMAL', 1.234567891234567e12345)
+
+"""
+
+# Set up the GardenSnake run-time environment
+
+
+def print_(*args):
+ print "-->", " ".join(map(str, args))
+
+globals()["print"] = print_
+
+compiled_code = compile(code)
+
+exec compiled_code in globals()
+print "Done"
diff --git a/third_party/python/ply/example/GardenSnake/README b/third_party/python/ply/example/GardenSnake/README
new file mode 100644
index 0000000000..4d8be2db05
--- /dev/null
+++ b/third_party/python/ply/example/GardenSnake/README
@@ -0,0 +1,5 @@
+This example is Andrew Dalke's GardenSnake language. It shows how to process an
+indentation-like language like Python. Further details can be found here:
+
+http://dalkescientific.com/writings/diary/archive/2006/08/30/gardensnake_language.html
+
diff --git a/third_party/python/ply/example/README b/third_party/python/ply/example/README
new file mode 100644
index 0000000000..63519b557f
--- /dev/null
+++ b/third_party/python/ply/example/README
@@ -0,0 +1,10 @@
+Simple examples:
+ calc - Simple calculator
+ classcalc - Simple calculate defined as a class
+
+Complex examples
+ ansic - ANSI C grammar from K&R
+ BASIC - A small BASIC interpreter
+ GardenSnake - A simple python-like language
+ yply - Converts Unix yacc files to PLY programs.
+
diff --git a/third_party/python/ply/example/ansic/README b/third_party/python/ply/example/ansic/README
new file mode 100644
index 0000000000..e049d3b4e4
--- /dev/null
+++ b/third_party/python/ply/example/ansic/README
@@ -0,0 +1,2 @@
+This example is incomplete. Was going to specify an ANSI C parser.
+This is part of it.
diff --git a/third_party/python/ply/example/ansic/clex.py b/third_party/python/ply/example/ansic/clex.py
new file mode 100644
index 0000000000..4bde1d730b
--- /dev/null
+++ b/third_party/python/ply/example/ansic/clex.py
@@ -0,0 +1,168 @@
+# ----------------------------------------------------------------------
+# clex.py
+#
+# A lexer for ANSI C.
+# ----------------------------------------------------------------------
+
+import sys
+sys.path.insert(0, "../..")
+
+import ply.lex as lex
+
+# Reserved words
+reserved = (
+ 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST', 'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE',
+ 'ELSE', 'ENUM', 'EXTERN', 'FLOAT', 'FOR', 'GOTO', 'IF', 'INT', 'LONG', 'REGISTER',
+ 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT', 'SWITCH', 'TYPEDEF',
+ 'UNION', 'UNSIGNED', 'VOID', 'VOLATILE', 'WHILE',
+)
+
+tokens = reserved + (
+ # Literals (identifier, integer constant, float constant, string constant,
+ # char const)
+ 'ID', 'TYPEID', 'ICONST', 'FCONST', 'SCONST', 'CCONST',
+
+ # Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=)
+ 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD',
+ 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
+ 'LOR', 'LAND', 'LNOT',
+ 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
+
+ # Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=)
+ 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL',
+ 'LSHIFTEQUAL', 'RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL',
+
+ # Increment/decrement (++,--)
+ 'PLUSPLUS', 'MINUSMINUS',
+
+ # Structure dereference (->)
+ 'ARROW',
+
+ # Conditional operator (?)
+ 'CONDOP',
+
+ # Delimeters ( ) [ ] { } , . ; :
+ 'LPAREN', 'RPAREN',
+ 'LBRACKET', 'RBRACKET',
+ 'LBRACE', 'RBRACE',
+ 'COMMA', 'PERIOD', 'SEMI', 'COLON',
+
+ # Ellipsis (...)
+ 'ELLIPSIS',
+)
+
+# Completely ignored characters
+t_ignore = ' \t\x0c'
+
+# Newlines
+
+
+def t_NEWLINE(t):
+ r'\n+'
+ t.lexer.lineno += t.value.count("\n")
+
+# Operators
+t_PLUS = r'\+'
+t_MINUS = r'-'
+t_TIMES = r'\*'
+t_DIVIDE = r'/'
+t_MOD = r'%'
+t_OR = r'\|'
+t_AND = r'&'
+t_NOT = r'~'
+t_XOR = r'\^'
+t_LSHIFT = r'<<'
+t_RSHIFT = r'>>'
+t_LOR = r'\|\|'
+t_LAND = r'&&'
+t_LNOT = r'!'
+t_LT = r'<'
+t_GT = r'>'
+t_LE = r'<='
+t_GE = r'>='
+t_EQ = r'=='
+t_NE = r'!='
+
+# Assignment operators
+
+t_EQUALS = r'='
+t_TIMESEQUAL = r'\*='
+t_DIVEQUAL = r'/='
+t_MODEQUAL = r'%='
+t_PLUSEQUAL = r'\+='
+t_MINUSEQUAL = r'-='
+t_LSHIFTEQUAL = r'<<='
+t_RSHIFTEQUAL = r'>>='
+t_ANDEQUAL = r'&='
+t_OREQUAL = r'\|='
+t_XOREQUAL = r'\^='
+
+# Increment/decrement
+t_PLUSPLUS = r'\+\+'
+t_MINUSMINUS = r'--'
+
+# ->
+t_ARROW = r'->'
+
+# ?
+t_CONDOP = r'\?'
+
+# Delimeters
+t_LPAREN = r'\('
+t_RPAREN = r'\)'
+t_LBRACKET = r'\['
+t_RBRACKET = r'\]'
+t_LBRACE = r'\{'
+t_RBRACE = r'\}'
+t_COMMA = r','
+t_PERIOD = r'\.'
+t_SEMI = r';'
+t_COLON = r':'
+t_ELLIPSIS = r'\.\.\.'
+
+# Identifiers and reserved words
+
+reserved_map = {}
+for r in reserved:
+ reserved_map[r.lower()] = r
+
+
+def t_ID(t):
+ r'[A-Za-z_][\w_]*'
+ t.type = reserved_map.get(t.value, "ID")
+ return t
+
+# Integer literal
+t_ICONST = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
+
+# Floating literal
+t_FCONST = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
+
+# String literal
+t_SCONST = r'\"([^\\\n]|(\\.))*?\"'
+
+# Character constant 'c' or L'c'
+t_CCONST = r'(L)?\'([^\\\n]|(\\.))*?\''
+
+# Comments
+
+
+def t_comment(t):
+ r'/\*(.|\n)*?\*/'
+ t.lexer.lineno += t.value.count('\n')
+
+# Preprocessor directive (ignored)
+
+
+def t_preprocessor(t):
+ r'\#(.)*?\n'
+ t.lexer.lineno += 1
+
+
+def t_error(t):
+ print("Illegal character %s" % repr(t.value[0]))
+ t.lexer.skip(1)
+
+lexer = lex.lex()
+if __name__ == "__main__":
+ lex.runmain(lexer)
diff --git a/third_party/python/ply/example/ansic/cparse.py b/third_party/python/ply/example/ansic/cparse.py
new file mode 100644
index 0000000000..5fe9bce042
--- /dev/null
+++ b/third_party/python/ply/example/ansic/cparse.py
@@ -0,0 +1,1048 @@
+# -----------------------------------------------------------------------------
+# cparse.py
+#
+# Simple parser for ANSI C. Based on the grammar in K&R, 2nd Ed.
+# -----------------------------------------------------------------------------
+
+import sys
+import clex
+import ply.yacc as yacc
+
+# Get the token map
+tokens = clex.tokens
+
+# translation-unit:
+
+
+def p_translation_unit_1(t):
+ 'translation_unit : external_declaration'
+ pass
+
+
+def p_translation_unit_2(t):
+ 'translation_unit : translation_unit external_declaration'
+ pass
+
+# external-declaration:
+
+
+def p_external_declaration_1(t):
+ 'external_declaration : function_definition'
+ pass
+
+
+def p_external_declaration_2(t):
+ 'external_declaration : declaration'
+ pass
+
+# function-definition:
+
+
+def p_function_definition_1(t):
+ 'function_definition : declaration_specifiers declarator declaration_list compound_statement'
+ pass
+
+
+def p_function_definition_2(t):
+ 'function_definition : declarator declaration_list compound_statement'
+ pass
+
+
+def p_function_definition_3(t):
+ 'function_definition : declarator compound_statement'
+ pass
+
+
+def p_function_definition_4(t):
+ 'function_definition : declaration_specifiers declarator compound_statement'
+ pass
+
+# declaration:
+
+
+def p_declaration_1(t):
+ 'declaration : declaration_specifiers init_declarator_list SEMI'
+ pass
+
+
+def p_declaration_2(t):
+ 'declaration : declaration_specifiers SEMI'
+ pass
+
+# declaration-list:
+
+
+def p_declaration_list_1(t):
+ 'declaration_list : declaration'
+ pass
+
+
+def p_declaration_list_2(t):
+ 'declaration_list : declaration_list declaration '
+ pass
+
+# declaration-specifiers
+
+
+def p_declaration_specifiers_1(t):
+ 'declaration_specifiers : storage_class_specifier declaration_specifiers'
+ pass
+
+
+def p_declaration_specifiers_2(t):
+ 'declaration_specifiers : type_specifier declaration_specifiers'
+ pass
+
+
+def p_declaration_specifiers_3(t):
+ 'declaration_specifiers : type_qualifier declaration_specifiers'
+ pass
+
+
+def p_declaration_specifiers_4(t):
+ 'declaration_specifiers : storage_class_specifier'
+ pass
+
+
+def p_declaration_specifiers_5(t):
+ 'declaration_specifiers : type_specifier'
+ pass
+
+
+def p_declaration_specifiers_6(t):
+ 'declaration_specifiers : type_qualifier'
+ pass
+
+# storage-class-specifier
+
+
+def p_storage_class_specifier(t):
+ '''storage_class_specifier : AUTO
+ | REGISTER
+ | STATIC
+ | EXTERN
+ | TYPEDEF
+ '''
+ pass
+
+# type-specifier:
+
+
+def p_type_specifier(t):
+ '''type_specifier : VOID
+ | CHAR
+ | SHORT
+ | INT
+ | LONG
+ | FLOAT
+ | DOUBLE
+ | SIGNED
+ | UNSIGNED
+ | struct_or_union_specifier
+ | enum_specifier
+ | TYPEID
+ '''
+ pass
+
+# type-qualifier:
+
+
+def p_type_qualifier(t):
+ '''type_qualifier : CONST
+ | VOLATILE'''
+ pass
+
+# struct-or-union-specifier
+
+
+def p_struct_or_union_specifier_1(t):
+ 'struct_or_union_specifier : struct_or_union ID LBRACE struct_declaration_list RBRACE'
+ pass
+
+
+def p_struct_or_union_specifier_2(t):
+ 'struct_or_union_specifier : struct_or_union LBRACE struct_declaration_list RBRACE'
+ pass
+
+
+def p_struct_or_union_specifier_3(t):
+ 'struct_or_union_specifier : struct_or_union ID'
+ pass
+
+# struct-or-union:
+
+
+def p_struct_or_union(t):
+ '''struct_or_union : STRUCT
+ | UNION
+ '''
+ pass
+
+# struct-declaration-list:
+
+
+def p_struct_declaration_list_1(t):
+ 'struct_declaration_list : struct_declaration'
+ pass
+
+
+def p_struct_declaration_list_2(t):
+ 'struct_declaration_list : struct_declaration_list struct_declaration'
+ pass
+
+# init-declarator-list:
+
+
+def p_init_declarator_list_1(t):
+ 'init_declarator_list : init_declarator'
+ pass
+
+
+def p_init_declarator_list_2(t):
+ 'init_declarator_list : init_declarator_list COMMA init_declarator'
+ pass
+
+# init-declarator
+
+
+def p_init_declarator_1(t):
+ 'init_declarator : declarator'
+ pass
+
+
+def p_init_declarator_2(t):
+ 'init_declarator : declarator EQUALS initializer'
+ pass
+
+# struct-declaration:
+
+
+def p_struct_declaration(t):
+ 'struct_declaration : specifier_qualifier_list struct_declarator_list SEMI'
+ pass
+
+# specifier-qualifier-list:
+
+
+def p_specifier_qualifier_list_1(t):
+ 'specifier_qualifier_list : type_specifier specifier_qualifier_list'
+ pass
+
+
+def p_specifier_qualifier_list_2(t):
+ 'specifier_qualifier_list : type_specifier'
+ pass
+
+
+def p_specifier_qualifier_list_3(t):
+ 'specifier_qualifier_list : type_qualifier specifier_qualifier_list'
+ pass
+
+
+def p_specifier_qualifier_list_4(t):
+ 'specifier_qualifier_list : type_qualifier'
+ pass
+
+# struct-declarator-list:
+
+
+def p_struct_declarator_list_1(t):
+ 'struct_declarator_list : struct_declarator'
+ pass
+
+
+def p_struct_declarator_list_2(t):
+ 'struct_declarator_list : struct_declarator_list COMMA struct_declarator'
+ pass
+
+# struct-declarator:
+
+
+def p_struct_declarator_1(t):
+ 'struct_declarator : declarator'
+ pass
+
+
+def p_struct_declarator_2(t):
+ 'struct_declarator : declarator COLON constant_expression'
+ pass
+
+
+def p_struct_declarator_3(t):
+ 'struct_declarator : COLON constant_expression'
+ pass
+
+# enum-specifier:
+
+
+def p_enum_specifier_1(t):
+ 'enum_specifier : ENUM ID LBRACE enumerator_list RBRACE'
+ pass
+
+
+def p_enum_specifier_2(t):
+ 'enum_specifier : ENUM LBRACE enumerator_list RBRACE'
+ pass
+
+
+def p_enum_specifier_3(t):
+ 'enum_specifier : ENUM ID'
+ pass
+
+# enumerator_list:
+
+
+def p_enumerator_list_1(t):
+ 'enumerator_list : enumerator'
+ pass
+
+
+def p_enumerator_list_2(t):
+ 'enumerator_list : enumerator_list COMMA enumerator'
+ pass
+
+# enumerator:
+
+
+def p_enumerator_1(t):
+ 'enumerator : ID'
+ pass
+
+
+def p_enumerator_2(t):
+ 'enumerator : ID EQUALS constant_expression'
+ pass
+
+# declarator:
+
+
+def p_declarator_1(t):
+ 'declarator : pointer direct_declarator'
+ pass
+
+
+def p_declarator_2(t):
+ 'declarator : direct_declarator'
+ pass
+
+# direct-declarator:
+
+
+def p_direct_declarator_1(t):
+ 'direct_declarator : ID'
+ pass
+
+
+def p_direct_declarator_2(t):
+ 'direct_declarator : LPAREN declarator RPAREN'
+ pass
+
+
+def p_direct_declarator_3(t):
+ 'direct_declarator : direct_declarator LBRACKET constant_expression_opt RBRACKET'
+ pass
+
+
+def p_direct_declarator_4(t):
+ 'direct_declarator : direct_declarator LPAREN parameter_type_list RPAREN '
+ pass
+
+
+def p_direct_declarator_5(t):
+ 'direct_declarator : direct_declarator LPAREN identifier_list RPAREN '
+ pass
+
+
+def p_direct_declarator_6(t):
+ 'direct_declarator : direct_declarator LPAREN RPAREN '
+ pass
+
+# pointer:
+
+
+def p_pointer_1(t):
+ 'pointer : TIMES type_qualifier_list'
+ pass
+
+
+def p_pointer_2(t):
+ 'pointer : TIMES'
+ pass
+
+
+def p_pointer_3(t):
+ 'pointer : TIMES type_qualifier_list pointer'
+ pass
+
+
+def p_pointer_4(t):
+ 'pointer : TIMES pointer'
+ pass
+
+# type-qualifier-list:
+
+
+def p_type_qualifier_list_1(t):
+ 'type_qualifier_list : type_qualifier'
+ pass
+
+
+def p_type_qualifier_list_2(t):
+ 'type_qualifier_list : type_qualifier_list type_qualifier'
+ pass
+
+# parameter-type-list:
+
+
+def p_parameter_type_list_1(t):
+ 'parameter_type_list : parameter_list'
+ pass
+
+
+def p_parameter_type_list_2(t):
+ 'parameter_type_list : parameter_list COMMA ELLIPSIS'
+ pass
+
+# parameter-list:
+
+
+def p_parameter_list_1(t):
+ 'parameter_list : parameter_declaration'
+ pass
+
+
+def p_parameter_list_2(t):
+ 'parameter_list : parameter_list COMMA parameter_declaration'
+ pass
+
+# parameter-declaration:
+
+
+def p_parameter_declaration_1(t):
+ 'parameter_declaration : declaration_specifiers declarator'
+ pass
+
+
+def p_parameter_declaration_2(t):
+ 'parameter_declaration : declaration_specifiers abstract_declarator_opt'
+ pass
+
+# identifier-list:
+
+
+def p_identifier_list_1(t):
+ 'identifier_list : ID'
+ pass
+
+
+def p_identifier_list_2(t):
+ 'identifier_list : identifier_list COMMA ID'
+ pass
+
+# initializer:
+
+
+def p_initializer_1(t):
+ 'initializer : assignment_expression'
+ pass
+
+
+def p_initializer_2(t):
+ '''initializer : LBRACE initializer_list RBRACE
+ | LBRACE initializer_list COMMA RBRACE'''
+ pass
+
+# initializer-list:
+
+
+def p_initializer_list_1(t):
+ 'initializer_list : initializer'
+ pass
+
+
+def p_initializer_list_2(t):
+ 'initializer_list : initializer_list COMMA initializer'
+ pass
+
+# type-name:
+
+
+def p_type_name(t):
+ 'type_name : specifier_qualifier_list abstract_declarator_opt'
+ pass
+
+
+def p_abstract_declarator_opt_1(t):
+ 'abstract_declarator_opt : empty'
+ pass
+
+
+def p_abstract_declarator_opt_2(t):
+ 'abstract_declarator_opt : abstract_declarator'
+ pass
+
+# abstract-declarator:
+
+
+def p_abstract_declarator_1(t):
+ 'abstract_declarator : pointer '
+ pass
+
+
+def p_abstract_declarator_2(t):
+ 'abstract_declarator : pointer direct_abstract_declarator'
+ pass
+
+
+def p_abstract_declarator_3(t):
+ 'abstract_declarator : direct_abstract_declarator'
+ pass
+
+# direct-abstract-declarator:
+
+
+def p_direct_abstract_declarator_1(t):
+ 'direct_abstract_declarator : LPAREN abstract_declarator RPAREN'
+ pass
+
+
+def p_direct_abstract_declarator_2(t):
+ 'direct_abstract_declarator : direct_abstract_declarator LBRACKET constant_expression_opt RBRACKET'
+ pass
+
+
+def p_direct_abstract_declarator_3(t):
+ 'direct_abstract_declarator : LBRACKET constant_expression_opt RBRACKET'
+ pass
+
+
+def p_direct_abstract_declarator_4(t):
+ 'direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN'
+ pass
+
+
+def p_direct_abstract_declarator_5(t):
+ 'direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN'
+ pass
+
+# Optional fields in abstract declarators
+
+
+def p_constant_expression_opt_1(t):
+ 'constant_expression_opt : empty'
+ pass
+
+
+def p_constant_expression_opt_2(t):
+ 'constant_expression_opt : constant_expression'
+ pass
+
+
+def p_parameter_type_list_opt_1(t):
+ 'parameter_type_list_opt : empty'
+ pass
+
+
+def p_parameter_type_list_opt_2(t):
+ 'parameter_type_list_opt : parameter_type_list'
+ pass
+
+# statement:
+
+
+def p_statement(t):
+ '''
+ statement : labeled_statement
+ | expression_statement
+ | compound_statement
+ | selection_statement
+ | iteration_statement
+ | jump_statement
+ '''
+ pass
+
+# labeled-statement:
+
+
+def p_labeled_statement_1(t):
+ 'labeled_statement : ID COLON statement'
+ pass
+
+
+def p_labeled_statement_2(t):
+ 'labeled_statement : CASE constant_expression COLON statement'
+ pass
+
+
+def p_labeled_statement_3(t):
+ 'labeled_statement : DEFAULT COLON statement'
+ pass
+
+# expression-statement:
+
+
+def p_expression_statement(t):
+ 'expression_statement : expression_opt SEMI'
+ pass
+
+# compound-statement:
+
+
+def p_compound_statement_1(t):
+ 'compound_statement : LBRACE declaration_list statement_list RBRACE'
+ pass
+
+
+def p_compound_statement_2(t):
+ 'compound_statement : LBRACE statement_list RBRACE'
+ pass
+
+
+def p_compound_statement_3(t):
+ 'compound_statement : LBRACE declaration_list RBRACE'
+ pass
+
+
+def p_compound_statement_4(t):
+ 'compound_statement : LBRACE RBRACE'
+ pass
+
+# statement-list:
+
+
+def p_statement_list_1(t):
+ 'statement_list : statement'
+ pass
+
+
+def p_statement_list_2(t):
+ 'statement_list : statement_list statement'
+ pass
+
+# selection-statement
+
+
+def p_selection_statement_1(t):
+ 'selection_statement : IF LPAREN expression RPAREN statement'
+ pass
+
+
+def p_selection_statement_2(t):
+ 'selection_statement : IF LPAREN expression RPAREN statement ELSE statement '
+ pass
+
+
+def p_selection_statement_3(t):
+ 'selection_statement : SWITCH LPAREN expression RPAREN statement '
+ pass
+
+# iteration_statement:
+
+
+def p_iteration_statement_1(t):
+ 'iteration_statement : WHILE LPAREN expression RPAREN statement'
+ pass
+
+
+def p_iteration_statement_2(t):
+ 'iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN statement '
+ pass
+
+
+def p_iteration_statement_3(t):
+ 'iteration_statement : DO statement WHILE LPAREN expression RPAREN SEMI'
+ pass
+
+# jump_statement:
+
+
+def p_jump_statement_1(t):
+ 'jump_statement : GOTO ID SEMI'
+ pass
+
+
+def p_jump_statement_2(t):
+ 'jump_statement : CONTINUE SEMI'
+ pass
+
+
+def p_jump_statement_3(t):
+ 'jump_statement : BREAK SEMI'
+ pass
+
+
+def p_jump_statement_4(t):
+ 'jump_statement : RETURN expression_opt SEMI'
+ pass
+
+
+def p_expression_opt_1(t):
+ 'expression_opt : empty'
+ pass
+
+
+def p_expression_opt_2(t):
+ 'expression_opt : expression'
+ pass
+
+# expression:
+
+
+def p_expression_1(t):
+ 'expression : assignment_expression'
+ pass
+
+
+def p_expression_2(t):
+ 'expression : expression COMMA assignment_expression'
+ pass
+
+# assigment_expression:
+
+
+def p_assignment_expression_1(t):
+ 'assignment_expression : conditional_expression'
+ pass
+
+
+def p_assignment_expression_2(t):
+ 'assignment_expression : unary_expression assignment_operator assignment_expression'
+ pass
+
+# assignment_operator:
+
+
+def p_assignment_operator(t):
+ '''
+ assignment_operator : EQUALS
+ | TIMESEQUAL
+ | DIVEQUAL
+ | MODEQUAL
+ | PLUSEQUAL
+ | MINUSEQUAL
+ | LSHIFTEQUAL
+ | RSHIFTEQUAL
+ | ANDEQUAL
+ | OREQUAL
+ | XOREQUAL
+ '''
+ pass
+
+# conditional-expression
+
+
+def p_conditional_expression_1(t):
+ 'conditional_expression : logical_or_expression'
+ pass
+
+
+def p_conditional_expression_2(t):
+ 'conditional_expression : logical_or_expression CONDOP expression COLON conditional_expression '
+ pass
+
+# constant-expression
+
+
+def p_constant_expression(t):
+ 'constant_expression : conditional_expression'
+ pass
+
+# logical-or-expression
+
+
+def p_logical_or_expression_1(t):
+ 'logical_or_expression : logical_and_expression'
+ pass
+
+
+def p_logical_or_expression_2(t):
+ 'logical_or_expression : logical_or_expression LOR logical_and_expression'
+ pass
+
+# logical-and-expression
+
+
+def p_logical_and_expression_1(t):
+ 'logical_and_expression : inclusive_or_expression'
+ pass
+
+
+def p_logical_and_expression_2(t):
+ 'logical_and_expression : logical_and_expression LAND inclusive_or_expression'
+ pass
+
+# inclusive-or-expression:
+
+
+def p_inclusive_or_expression_1(t):
+ 'inclusive_or_expression : exclusive_or_expression'
+ pass
+
+
+def p_inclusive_or_expression_2(t):
+ 'inclusive_or_expression : inclusive_or_expression OR exclusive_or_expression'
+ pass
+
+# exclusive-or-expression:
+
+
+def p_exclusive_or_expression_1(t):
+ 'exclusive_or_expression : and_expression'
+ pass
+
+
+def p_exclusive_or_expression_2(t):
+ 'exclusive_or_expression : exclusive_or_expression XOR and_expression'
+ pass
+
+# AND-expression
+
+
+def p_and_expression_1(t):
+ 'and_expression : equality_expression'
+ pass
+
+
+def p_and_expression_2(t):
+ 'and_expression : and_expression AND equality_expression'
+ pass
+
+
+# equality-expression:
+def p_equality_expression_1(t):
+ 'equality_expression : relational_expression'
+ pass
+
+
+def p_equality_expression_2(t):
+ 'equality_expression : equality_expression EQ relational_expression'
+ pass
+
+
+def p_equality_expression_3(t):
+ 'equality_expression : equality_expression NE relational_expression'
+ pass
+
+
+# relational-expression:
+def p_relational_expression_1(t):
+ 'relational_expression : shift_expression'
+ pass
+
+
+def p_relational_expression_2(t):
+ 'relational_expression : relational_expression LT shift_expression'
+ pass
+
+
+def p_relational_expression_3(t):
+ 'relational_expression : relational_expression GT shift_expression'
+ pass
+
+
+def p_relational_expression_4(t):
+ 'relational_expression : relational_expression LE shift_expression'
+ pass
+
+
+def p_relational_expression_5(t):
+ 'relational_expression : relational_expression GE shift_expression'
+ pass
+
+# shift-expression
+
+
+def p_shift_expression_1(t):
+ 'shift_expression : additive_expression'
+ pass
+
+
+def p_shift_expression_2(t):
+ 'shift_expression : shift_expression LSHIFT additive_expression'
+ pass
+
+
+def p_shift_expression_3(t):
+ 'shift_expression : shift_expression RSHIFT additive_expression'
+ pass
+
+# additive-expression
+
+
+def p_additive_expression_1(t):
+ 'additive_expression : multiplicative_expression'
+ pass
+
+
+def p_additive_expression_2(t):
+ 'additive_expression : additive_expression PLUS multiplicative_expression'
+ pass
+
+
+def p_additive_expression_3(t):
+ 'additive_expression : additive_expression MINUS multiplicative_expression'
+ pass
+
+# multiplicative-expression
+
+
+def p_multiplicative_expression_1(t):
+ 'multiplicative_expression : cast_expression'
+ pass
+
+
+def p_multiplicative_expression_2(t):
+ 'multiplicative_expression : multiplicative_expression TIMES cast_expression'
+ pass
+
+
+def p_multiplicative_expression_3(t):
+ 'multiplicative_expression : multiplicative_expression DIVIDE cast_expression'
+ pass
+
+
+def p_multiplicative_expression_4(t):
+ 'multiplicative_expression : multiplicative_expression MOD cast_expression'
+ pass
+
+# cast-expression:
+
+
+def p_cast_expression_1(t):
+ 'cast_expression : unary_expression'
+ pass
+
+
+def p_cast_expression_2(t):
+ 'cast_expression : LPAREN type_name RPAREN cast_expression'
+ pass
+
+# unary-expression:
+
+
+def p_unary_expression_1(t):
+ 'unary_expression : postfix_expression'
+ pass
+
+
+def p_unary_expression_2(t):
+ 'unary_expression : PLUSPLUS unary_expression'
+ pass
+
+
+def p_unary_expression_3(t):
+ 'unary_expression : MINUSMINUS unary_expression'
+ pass
+
+
+def p_unary_expression_4(t):
+ 'unary_expression : unary_operator cast_expression'
+ pass
+
+
+def p_unary_expression_5(t):
+ 'unary_expression : SIZEOF unary_expression'
+ pass
+
+
+def p_unary_expression_6(t):
+ 'unary_expression : SIZEOF LPAREN type_name RPAREN'
+ pass
+
+# unary-operator
+
+
+def p_unary_operator(t):
+ '''unary_operator : AND
+ | TIMES
+ | PLUS
+ | MINUS
+ | NOT
+ | LNOT '''
+ pass
+
+# postfix-expression:
+
+
+def p_postfix_expression_1(t):
+ 'postfix_expression : primary_expression'
+ pass
+
+
+def p_postfix_expression_2(t):
+ 'postfix_expression : postfix_expression LBRACKET expression RBRACKET'
+ pass
+
+
+def p_postfix_expression_3(t):
+ 'postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN'
+ pass
+
+
+def p_postfix_expression_4(t):
+ 'postfix_expression : postfix_expression LPAREN RPAREN'
+ pass
+
+
+def p_postfix_expression_5(t):
+ 'postfix_expression : postfix_expression PERIOD ID'
+ pass
+
+
+def p_postfix_expression_6(t):
+ 'postfix_expression : postfix_expression ARROW ID'
+ pass
+
+
+def p_postfix_expression_7(t):
+ 'postfix_expression : postfix_expression PLUSPLUS'
+ pass
+
+
+def p_postfix_expression_8(t):
+ 'postfix_expression : postfix_expression MINUSMINUS'
+ pass
+
+# primary-expression:
+
+
+def p_primary_expression(t):
+ '''primary_expression : ID
+ | constant
+ | SCONST
+ | LPAREN expression RPAREN'''
+ pass
+
+# argument-expression-list:
+
+
+def p_argument_expression_list(t):
+ '''argument_expression_list : assignment_expression
+ | argument_expression_list COMMA assignment_expression'''
+ pass
+
+# constant:
+
+
+def p_constant(t):
+ '''constant : ICONST
+ | FCONST
+ | CCONST'''
+ pass
+
+
+def p_empty(t):
+ 'empty : '
+ pass
+
+
+def p_error(t):
+ print("Whoa. We're hosed")
+
+import profile
+# Build the grammar
+
+yacc.yacc()
+#yacc.yacc(method='LALR',write_tables=False,debug=False)
+
+#profile.run("yacc.yacc(method='LALR')")
diff --git a/third_party/python/ply/example/calc/calc.py b/third_party/python/ply/example/calc/calc.py
new file mode 100644
index 0000000000..824c3d7d0a
--- /dev/null
+++ b/third_party/python/ply/example/calc/calc.py
@@ -0,0 +1,123 @@
+# -----------------------------------------------------------------------------
+# calc.py
+#
+# A simple calculator with variables. This is from O'Reilly's
+# "Lex and Yacc", p. 63.
+# -----------------------------------------------------------------------------
+
+import sys
+sys.path.insert(0, "../..")
+
+if sys.version_info[0] >= 3:
+ raw_input = input
+
+tokens = (
+ 'NAME', 'NUMBER',
+)
+
+literals = ['=', '+', '-', '*', '/', '(', ')']
+
+# Tokens
+
+t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
+
+
+def t_NUMBER(t):
+ r'\d+'
+ t.value = int(t.value)
+ return t
+
+t_ignore = " \t"
+
+
+def t_newline(t):
+ r'\n+'
+ t.lexer.lineno += t.value.count("\n")
+
+
+def t_error(t):
+ print("Illegal character '%s'" % t.value[0])
+ t.lexer.skip(1)
+
+# Build the lexer
+import ply.lex as lex
+lex.lex()
+
+# Parsing rules
+
+precedence = (
+ ('left', '+', '-'),
+ ('left', '*', '/'),
+ ('right', 'UMINUS'),
+)
+
+# dictionary of names
+names = {}
+
+
+def p_statement_assign(p):
+ 'statement : NAME "=" expression'
+ names[p[1]] = p[3]
+
+
+def p_statement_expr(p):
+ 'statement : expression'
+ print(p[1])
+
+
+def p_expression_binop(p):
+ '''expression : expression '+' expression
+ | expression '-' expression
+ | expression '*' expression
+ | expression '/' expression'''
+ if p[2] == '+':
+ p[0] = p[1] + p[3]
+ elif p[2] == '-':
+ p[0] = p[1] - p[3]
+ elif p[2] == '*':
+ p[0] = p[1] * p[3]
+ elif p[2] == '/':
+ p[0] = p[1] / p[3]
+
+
+def p_expression_uminus(p):
+ "expression : '-' expression %prec UMINUS"
+ p[0] = -p[2]
+
+
+def p_expression_group(p):
+ "expression : '(' expression ')'"
+ p[0] = p[2]
+
+
+def p_expression_number(p):
+ "expression : NUMBER"
+ p[0] = p[1]
+
+
+def p_expression_name(p):
+ "expression : NAME"
+ try:
+ p[0] = names[p[1]]
+ except LookupError:
+ print("Undefined name '%s'" % p[1])
+ p[0] = 0
+
+
+def p_error(p):
+ if p:
+ print("Syntax error at '%s'" % p.value)
+ else:
+ print("Syntax error at EOF")
+
+import ply.yacc as yacc
+yacc.yacc()
+
+while 1:
+ try:
+ s = raw_input('calc > ')
+ except EOFError:
+ break
+ if not s:
+ continue
+ yacc.parse(s)
diff --git a/third_party/python/ply/example/calcdebug/calc.py b/third_party/python/ply/example/calcdebug/calc.py
new file mode 100644
index 0000000000..06831e2ca5
--- /dev/null
+++ b/third_party/python/ply/example/calcdebug/calc.py
@@ -0,0 +1,129 @@
+# -----------------------------------------------------------------------------
+# calc.py
+#
+# This example shows how to run the parser in a debugging mode
+# with output routed to a logging object.
+# -----------------------------------------------------------------------------
+
+import sys
+sys.path.insert(0, "../..")
+
+if sys.version_info[0] >= 3:
+ raw_input = input
+
+tokens = (
+ 'NAME', 'NUMBER',
+)
+
+literals = ['=', '+', '-', '*', '/', '(', ')']
+
+# Tokens
+
+t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
+
+
+def t_NUMBER(t):
+ r'\d+'
+ t.value = int(t.value)
+ return t
+
+t_ignore = " \t"
+
+
+def t_newline(t):
+ r'\n+'
+ t.lexer.lineno += t.value.count("\n")
+
+
+def t_error(t):
+ print("Illegal character '%s'" % t.value[0])
+ t.lexer.skip(1)
+
+# Build the lexer
+import ply.lex as lex
+lex.lex()
+
+# Parsing rules
+
+precedence = (
+ ('left', '+', '-'),
+ ('left', '*', '/'),
+ ('right', 'UMINUS'),
+)
+
+# dictionary of names
+names = {}
+
+
+def p_statement_assign(p):
+ 'statement : NAME "=" expression'
+ names[p[1]] = p[3]
+
+
+def p_statement_expr(p):
+ 'statement : expression'
+ print(p[1])
+
+
+def p_expression_binop(p):
+ '''expression : expression '+' expression
+ | expression '-' expression
+ | expression '*' expression
+ | expression '/' expression'''
+ if p[2] == '+':
+ p[0] = p[1] + p[3]
+ elif p[2] == '-':
+ p[0] = p[1] - p[3]
+ elif p[2] == '*':
+ p[0] = p[1] * p[3]
+ elif p[2] == '/':
+ p[0] = p[1] / p[3]
+
+
+def p_expression_uminus(p):
+ "expression : '-' expression %prec UMINUS"
+ p[0] = -p[2]
+
+
+def p_expression_group(p):
+ "expression : '(' expression ')'"
+ p[0] = p[2]
+
+
+def p_expression_number(p):
+ "expression : NUMBER"
+ p[0] = p[1]
+
+
+def p_expression_name(p):
+ "expression : NAME"
+ try:
+ p[0] = names[p[1]]
+ except LookupError:
+ print("Undefined name '%s'" % p[1])
+ p[0] = 0
+
+
+def p_error(p):
+ if p:
+ print("Syntax error at '%s'" % p.value)
+ else:
+ print("Syntax error at EOF")
+
+import ply.yacc as yacc
+yacc.yacc()
+
+import logging
+logging.basicConfig(
+ level=logging.INFO,
+ filename="parselog.txt"
+)
+
+while 1:
+ try:
+ s = raw_input('calc > ')
+ except EOFError:
+ break
+ if not s:
+ continue
+ yacc.parse(s, debug=logging.getLogger())
diff --git a/third_party/python/ply/example/calceof/calc.py b/third_party/python/ply/example/calceof/calc.py
new file mode 100644
index 0000000000..22b39a41a8
--- /dev/null
+++ b/third_party/python/ply/example/calceof/calc.py
@@ -0,0 +1,132 @@
+# -----------------------------------------------------------------------------
+# calc.py
+#
+# A simple calculator with variables. Asks the user for more input and
+# demonstrates the use of the t_eof() rule.
+# -----------------------------------------------------------------------------
+
+import sys
+sys.path.insert(0, "../..")
+
+if sys.version_info[0] >= 3:
+ raw_input = input
+
+tokens = (
+ 'NAME', 'NUMBER',
+)
+
+literals = ['=', '+', '-', '*', '/', '(', ')']
+
+# Tokens
+
+t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
+
+
+def t_NUMBER(t):
+ r'\d+'
+ t.value = int(t.value)
+ return t
+
+t_ignore = " \t"
+
+
+def t_newline(t):
+ r'\n+'
+ t.lexer.lineno += t.value.count("\n")
+
+
+def t_eof(t):
+ more = raw_input('... ')
+ if more:
+ t.lexer.input(more + '\n')
+ return t.lexer.token()
+ else:
+ return None
+
+
+def t_error(t):
+ print("Illegal character '%s'" % t.value[0])
+ t.lexer.skip(1)
+
+# Build the lexer
+import ply.lex as lex
+lex.lex()
+
+# Parsing rules
+
+precedence = (
+ ('left', '+', '-'),
+ ('left', '*', '/'),
+ ('right', 'UMINUS'),
+)
+
+# dictionary of names
+names = {}
+
+
+def p_statement_assign(p):
+ 'statement : NAME "=" expression'
+ names[p[1]] = p[3]
+
+
+def p_statement_expr(p):
+ 'statement : expression'
+ print(p[1])
+
+
+def p_expression_binop(p):
+ '''expression : expression '+' expression
+ | expression '-' expression
+ | expression '*' expression
+ | expression '/' expression'''
+ if p[2] == '+':
+ p[0] = p[1] + p[3]
+ elif p[2] == '-':
+ p[0] = p[1] - p[3]
+ elif p[2] == '*':
+ p[0] = p[1] * p[3]
+ elif p[2] == '/':
+ p[0] = p[1] / p[3]
+
+
+def p_expression_uminus(p):
+ "expression : '-' expression %prec UMINUS"
+ p[0] = -p[2]
+
+
+def p_expression_group(p):
+ "expression : '(' expression ')'"
+ p[0] = p[2]
+
+
+def p_expression_number(p):
+ "expression : NUMBER"
+ p[0] = p[1]
+
+
+def p_expression_name(p):
+ "expression : NAME"
+ try:
+ p[0] = names[p[1]]
+ except LookupError:
+ print("Undefined name '%s'" % p[1])
+ p[0] = 0
+
+
+def p_error(p):
+ if p:
+ print("Syntax error at '%s'" % p.value)
+ else:
+ print("Syntax error at EOF")
+
+import ply.yacc as yacc
+yacc.yacc()
+
+while 1:
+ try:
+ s = raw_input('calc > ')
+ except EOFError:
+ break
+ if not s:
+ continue
+ yacc.parse(s + '\n')
diff --git a/third_party/python/ply/example/classcalc/calc.py b/third_party/python/ply/example/classcalc/calc.py
new file mode 100755
index 0000000000..ada4afd426
--- /dev/null
+++ b/third_party/python/ply/example/classcalc/calc.py
@@ -0,0 +1,165 @@
+#!/usr/bin/env python
+
+# -----------------------------------------------------------------------------
+# calc.py
+#
+# A simple calculator with variables. This is from O'Reilly's
+# "Lex and Yacc", p. 63.
+#
+# Class-based example contributed to PLY by David McNab
+# -----------------------------------------------------------------------------
+
+import sys
+sys.path.insert(0, "../..")
+
+if sys.version_info[0] >= 3:
+ raw_input = input
+
+import ply.lex as lex
+import ply.yacc as yacc
+import os
+
+
+class Parser:
+ """
+ Base class for a lexer/parser that has the rules defined as methods
+ """
+ tokens = ()
+ precedence = ()
+
+ def __init__(self, **kw):
+ self.debug = kw.get('debug', 0)
+ self.names = {}
+ try:
+ modname = os.path.split(os.path.splitext(__file__)[0])[
+ 1] + "_" + self.__class__.__name__
+ except:
+ modname = "parser" + "_" + self.__class__.__name__
+ self.debugfile = modname + ".dbg"
+ self.tabmodule = modname + "_" + "parsetab"
+ # print self.debugfile, self.tabmodule
+
+ # Build the lexer and parser
+ lex.lex(module=self, debug=self.debug)
+ yacc.yacc(module=self,
+ debug=self.debug,
+ debugfile=self.debugfile,
+ tabmodule=self.tabmodule)
+
+ def run(self):
+ while 1:
+ try:
+ s = raw_input('calc > ')
+ except EOFError:
+ break
+ if not s:
+ continue
+ yacc.parse(s)
+
+
+class Calc(Parser):
+
+ tokens = (
+ 'NAME', 'NUMBER',
+ 'PLUS', 'MINUS', 'EXP', 'TIMES', 'DIVIDE', 'EQUALS',
+ 'LPAREN', 'RPAREN',
+ )
+
+ # Tokens
+
+ t_PLUS = r'\+'
+ t_MINUS = r'-'
+ t_EXP = r'\*\*'
+ t_TIMES = r'\*'
+ t_DIVIDE = r'/'
+ t_EQUALS = r'='
+ t_LPAREN = r'\('
+ t_RPAREN = r'\)'
+ t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
+
+ def t_NUMBER(self, t):
+ r'\d+'
+ try:
+ t.value = int(t.value)
+ except ValueError:
+ print("Integer value too large %s" % t.value)
+ t.value = 0
+ # print "parsed number %s" % repr(t.value)
+ return t
+
+ t_ignore = " \t"
+
+ def t_newline(self, t):
+ r'\n+'
+ t.lexer.lineno += t.value.count("\n")
+
+ def t_error(self, t):
+ print("Illegal character '%s'" % t.value[0])
+ t.lexer.skip(1)
+
+ # Parsing rules
+
+ precedence = (
+ ('left', 'PLUS', 'MINUS'),
+ ('left', 'TIMES', 'DIVIDE'),
+ ('left', 'EXP'),
+ ('right', 'UMINUS'),
+ )
+
+ def p_statement_assign(self, p):
+ 'statement : NAME EQUALS expression'
+ self.names[p[1]] = p[3]
+
+ def p_statement_expr(self, p):
+ 'statement : expression'
+ print(p[1])
+
+ def p_expression_binop(self, p):
+ """
+ expression : expression PLUS expression
+ | expression MINUS expression
+ | expression TIMES expression
+ | expression DIVIDE expression
+ | expression EXP expression
+ """
+ # print [repr(p[i]) for i in range(0,4)]
+ if p[2] == '+':
+ p[0] = p[1] + p[3]
+ elif p[2] == '-':
+ p[0] = p[1] - p[3]
+ elif p[2] == '*':
+ p[0] = p[1] * p[3]
+ elif p[2] == '/':
+ p[0] = p[1] / p[3]
+ elif p[2] == '**':
+ p[0] = p[1] ** p[3]
+
+ def p_expression_uminus(self, p):
+ 'expression : MINUS expression %prec UMINUS'
+ p[0] = -p[2]
+
+ def p_expression_group(self, p):
+ 'expression : LPAREN expression RPAREN'
+ p[0] = p[2]
+
+ def p_expression_number(self, p):
+ 'expression : NUMBER'
+ p[0] = p[1]
+
+ def p_expression_name(self, p):
+ 'expression : NAME'
+ try:
+ p[0] = self.names[p[1]]
+ except LookupError:
+ print("Undefined name '%s'" % p[1])
+ p[0] = 0
+
+ def p_error(self, p):
+ if p:
+ print("Syntax error at '%s'" % p.value)
+ else:
+ print("Syntax error at EOF")
+
+if __name__ == '__main__':
+ calc = Calc()
+ calc.run()
diff --git a/third_party/python/ply/example/cleanup.sh b/third_party/python/ply/example/cleanup.sh
new file mode 100755
index 0000000000..3e115f41c4
--- /dev/null
+++ b/third_party/python/ply/example/cleanup.sh
@@ -0,0 +1,2 @@
+#!/bin/sh
+rm -f */*.pyc */parsetab.py */parser.out */*~ */*.class
diff --git a/third_party/python/ply/example/closurecalc/calc.py b/third_party/python/ply/example/closurecalc/calc.py
new file mode 100644
index 0000000000..6031b05813
--- /dev/null
+++ b/third_party/python/ply/example/closurecalc/calc.py
@@ -0,0 +1,132 @@
+# -----------------------------------------------------------------------------
+# calc.py
+#
+# A calculator parser that makes use of closures. The function make_calculator()
+# returns a function that accepts an input string and returns a result. All
+# lexing rules, parsing rules, and internal state are held inside the function.
+# -----------------------------------------------------------------------------
+
+import sys
+sys.path.insert(0, "../..")
+
+if sys.version_info[0] >= 3:
+ raw_input = input
+
+# Make a calculator function
+
+
+def make_calculator():
+ import ply.lex as lex
+ import ply.yacc as yacc
+
+ # ------- Internal calculator state
+
+ variables = {} # Dictionary of stored variables
+
+ # ------- Calculator tokenizing rules
+
+ tokens = (
+ 'NAME', 'NUMBER',
+ )
+
+ literals = ['=', '+', '-', '*', '/', '(', ')']
+
+ t_ignore = " \t"
+
+ t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
+
+ def t_NUMBER(t):
+ r'\d+'
+ t.value = int(t.value)
+ return t
+
+ def t_newline(t):
+ r'\n+'
+ t.lexer.lineno += t.value.count("\n")
+
+ def t_error(t):
+ print("Illegal character '%s'" % t.value[0])
+ t.lexer.skip(1)
+
+ # Build the lexer
+ lexer = lex.lex()
+
+ # ------- Calculator parsing rules
+
+ precedence = (
+ ('left', '+', '-'),
+ ('left', '*', '/'),
+ ('right', 'UMINUS'),
+ )
+
+ def p_statement_assign(p):
+ 'statement : NAME "=" expression'
+ variables[p[1]] = p[3]
+ p[0] = None
+
+ def p_statement_expr(p):
+ 'statement : expression'
+ p[0] = p[1]
+
+ def p_expression_binop(p):
+ '''expression : expression '+' expression
+ | expression '-' expression
+ | expression '*' expression
+ | expression '/' expression'''
+ if p[2] == '+':
+ p[0] = p[1] + p[3]
+ elif p[2] == '-':
+ p[0] = p[1] - p[3]
+ elif p[2] == '*':
+ p[0] = p[1] * p[3]
+ elif p[2] == '/':
+ p[0] = p[1] / p[3]
+
+ def p_expression_uminus(p):
+ "expression : '-' expression %prec UMINUS"
+ p[0] = -p[2]
+
+ def p_expression_group(p):
+ "expression : '(' expression ')'"
+ p[0] = p[2]
+
+ def p_expression_number(p):
+ "expression : NUMBER"
+ p[0] = p[1]
+
+ def p_expression_name(p):
+ "expression : NAME"
+ try:
+ p[0] = variables[p[1]]
+ except LookupError:
+ print("Undefined name '%s'" % p[1])
+ p[0] = 0
+
+ def p_error(p):
+ if p:
+ print("Syntax error at '%s'" % p.value)
+ else:
+ print("Syntax error at EOF")
+
+ # Build the parser
+ parser = yacc.yacc()
+
+ # ------- Input function
+
+ def input(text):
+ result = parser.parse(text, lexer=lexer)
+ return result
+
+ return input
+
+# Make a calculator object and use it
+calc = make_calculator()
+
+while True:
+ try:
+ s = raw_input("calc > ")
+ except EOFError:
+ break
+ r = calc(s)
+ if r:
+ print(r)
diff --git a/third_party/python/ply/example/hedit/hedit.py b/third_party/python/ply/example/hedit/hedit.py
new file mode 100644
index 0000000000..32da745677
--- /dev/null
+++ b/third_party/python/ply/example/hedit/hedit.py
@@ -0,0 +1,48 @@
+# -----------------------------------------------------------------------------
+# hedit.py
+#
+# Paring of Fortran H Edit descriptions (Contributed by Pearu Peterson)
+#
+# These tokens can't be easily tokenized because they are of the following
+# form:
+#
+# nHc1...cn
+#
+# where n is a positive integer and c1 ... cn are characters.
+#
+# This example shows how to modify the state of the lexer to parse
+# such tokens
+# -----------------------------------------------------------------------------
+
+import sys
+sys.path.insert(0, "../..")
+
+
+tokens = (
+ 'H_EDIT_DESCRIPTOR',
+)
+
+# Tokens
+t_ignore = " \t\n"
+
+
+def t_H_EDIT_DESCRIPTOR(t):
+ r"\d+H.*" # This grabs all of the remaining text
+ i = t.value.index('H')
+ n = eval(t.value[:i])
+
+ # Adjust the tokenizing position
+ t.lexer.lexpos -= len(t.value) - (i + 1 + n)
+
+ t.value = t.value[i + 1:i + 1 + n]
+ return t
+
+
+def t_error(t):
+ print("Illegal character '%s'" % t.value[0])
+ t.lexer.skip(1)
+
+# Build the lexer
+import ply.lex as lex
+lex.lex()
+lex.runmain()
diff --git a/third_party/python/ply/example/newclasscalc/calc.py b/third_party/python/ply/example/newclasscalc/calc.py
new file mode 100755
index 0000000000..43c9506a8a
--- /dev/null
+++ b/third_party/python/ply/example/newclasscalc/calc.py
@@ -0,0 +1,167 @@
+#!/usr/bin/env python
+
+# -----------------------------------------------------------------------------
+# calc.py
+#
+# A simple calculator with variables. This is from O'Reilly's
+# "Lex and Yacc", p. 63.
+#
+# Class-based example contributed to PLY by David McNab.
+#
+# Modified to use new-style classes. Test case.
+# -----------------------------------------------------------------------------
+
+import sys
+sys.path.insert(0, "../..")
+
+if sys.version_info[0] >= 3:
+ raw_input = input
+
+import ply.lex as lex
+import ply.yacc as yacc
+import os
+
+
+class Parser(object):
+ """
+ Base class for a lexer/parser that has the rules defined as methods
+ """
+ tokens = ()
+ precedence = ()
+
+ def __init__(self, **kw):
+ self.debug = kw.get('debug', 0)
+ self.names = {}
+ try:
+ modname = os.path.split(os.path.splitext(__file__)[0])[
+ 1] + "_" + self.__class__.__name__
+ except:
+ modname = "parser" + "_" + self.__class__.__name__
+ self.debugfile = modname + ".dbg"
+ self.tabmodule = modname + "_" + "parsetab"
+ # print self.debugfile, self.tabmodule
+
+ # Build the lexer and parser
+ lex.lex(module=self, debug=self.debug)
+ yacc.yacc(module=self,
+ debug=self.debug,
+ debugfile=self.debugfile,
+ tabmodule=self.tabmodule)
+
+ def run(self):
+ while 1:
+ try:
+ s = raw_input('calc > ')
+ except EOFError:
+ break
+ if not s:
+ continue
+ yacc.parse(s)
+
+
+class Calc(Parser):
+
+ tokens = (
+ 'NAME', 'NUMBER',
+ 'PLUS', 'MINUS', 'EXP', 'TIMES', 'DIVIDE', 'EQUALS',
+ 'LPAREN', 'RPAREN',
+ )
+
+ # Tokens
+
+ t_PLUS = r'\+'
+ t_MINUS = r'-'
+ t_EXP = r'\*\*'
+ t_TIMES = r'\*'
+ t_DIVIDE = r'/'
+ t_EQUALS = r'='
+ t_LPAREN = r'\('
+ t_RPAREN = r'\)'
+ t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
+
+ def t_NUMBER(self, t):
+ r'\d+'
+ try:
+ t.value = int(t.value)
+ except ValueError:
+ print("Integer value too large %s" % t.value)
+ t.value = 0
+ # print "parsed number %s" % repr(t.value)
+ return t
+
+ t_ignore = " \t"
+
+ def t_newline(self, t):
+ r'\n+'
+ t.lexer.lineno += t.value.count("\n")
+
+ def t_error(self, t):
+ print("Illegal character '%s'" % t.value[0])
+ t.lexer.skip(1)
+
+ # Parsing rules
+
+ precedence = (
+ ('left', 'PLUS', 'MINUS'),
+ ('left', 'TIMES', 'DIVIDE'),
+ ('left', 'EXP'),
+ ('right', 'UMINUS'),
+ )
+
+ def p_statement_assign(self, p):
+ 'statement : NAME EQUALS expression'
+ self.names[p[1]] = p[3]
+
+ def p_statement_expr(self, p):
+ 'statement : expression'
+ print(p[1])
+
+ def p_expression_binop(self, p):
+ """
+ expression : expression PLUS expression
+ | expression MINUS expression
+ | expression TIMES expression
+ | expression DIVIDE expression
+ | expression EXP expression
+ """
+ # print [repr(p[i]) for i in range(0,4)]
+ if p[2] == '+':
+ p[0] = p[1] + p[3]
+ elif p[2] == '-':
+ p[0] = p[1] - p[3]
+ elif p[2] == '*':
+ p[0] = p[1] * p[3]
+ elif p[2] == '/':
+ p[0] = p[1] / p[3]
+ elif p[2] == '**':
+ p[0] = p[1] ** p[3]
+
+ def p_expression_uminus(self, p):
+ 'expression : MINUS expression %prec UMINUS'
+ p[0] = -p[2]
+
+ def p_expression_group(self, p):
+ 'expression : LPAREN expression RPAREN'
+ p[0] = p[2]
+
+ def p_expression_number(self, p):
+ 'expression : NUMBER'
+ p[0] = p[1]
+
+ def p_expression_name(self, p):
+ 'expression : NAME'
+ try:
+ p[0] = self.names[p[1]]
+ except LookupError:
+ print("Undefined name '%s'" % p[1])
+ p[0] = 0
+
+ def p_error(self, p):
+ if p:
+ print("Syntax error at '%s'" % p.value)
+ else:
+ print("Syntax error at EOF")
+
+if __name__ == '__main__':
+ calc = Calc()
+ calc.run()
diff --git a/third_party/python/ply/example/optcalc/README b/third_party/python/ply/example/optcalc/README
new file mode 100644
index 0000000000..53dd5fcd55
--- /dev/null
+++ b/third_party/python/ply/example/optcalc/README
@@ -0,0 +1,9 @@
+An example showing how to use Python optimized mode.
+To run:
+
+ - First run 'python calc.py'
+
+ - Then run 'python -OO calc.py'
+
+If working correctly, the second version should run the
+same way.
diff --git a/third_party/python/ply/example/optcalc/calc.py b/third_party/python/ply/example/optcalc/calc.py
new file mode 100644
index 0000000000..0c223e5994
--- /dev/null
+++ b/third_party/python/ply/example/optcalc/calc.py
@@ -0,0 +1,134 @@
+# -----------------------------------------------------------------------------
+# calc.py
+#
+# A simple calculator with variables. This is from O'Reilly's
+# "Lex and Yacc", p. 63.
+# -----------------------------------------------------------------------------
+
+import sys
+sys.path.insert(0, "../..")
+
+if sys.version_info[0] >= 3:
+ raw_input = input
+
+tokens = (
+ 'NAME', 'NUMBER',
+ 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'EQUALS',
+ 'LPAREN', 'RPAREN',
+)
+
+# Tokens
+
+t_PLUS = r'\+'
+t_MINUS = r'-'
+t_TIMES = r'\*'
+t_DIVIDE = r'/'
+t_EQUALS = r'='
+t_LPAREN = r'\('
+t_RPAREN = r'\)'
+t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
+
+
+def t_NUMBER(t):
+ r'\d+'
+ try:
+ t.value = int(t.value)
+ except ValueError:
+ print("Integer value too large %s" % t.value)
+ t.value = 0
+ return t
+
+t_ignore = " \t"
+
+
+def t_newline(t):
+ r'\n+'
+ t.lexer.lineno += t.value.count("\n")
+
+
+def t_error(t):
+ print("Illegal character '%s'" % t.value[0])
+ t.lexer.skip(1)
+
+# Build the lexer
+import ply.lex as lex
+lex.lex(optimize=1)
+
+# Parsing rules
+
+precedence = (
+ ('left', 'PLUS', 'MINUS'),
+ ('left', 'TIMES', 'DIVIDE'),
+ ('right', 'UMINUS'),
+)
+
+# dictionary of names
+names = {}
+
+
+def p_statement_assign(t):
+ 'statement : NAME EQUALS expression'
+ names[t[1]] = t[3]
+
+
+def p_statement_expr(t):
+ 'statement : expression'
+ print(t[1])
+
+
+def p_expression_binop(t):
+ '''expression : expression PLUS expression
+ | expression MINUS expression
+ | expression TIMES expression
+ | expression DIVIDE expression'''
+ if t[2] == '+':
+ t[0] = t[1] + t[3]
+ elif t[2] == '-':
+ t[0] = t[1] - t[3]
+ elif t[2] == '*':
+ t[0] = t[1] * t[3]
+ elif t[2] == '/':
+ t[0] = t[1] / t[3]
+ elif t[2] == '<':
+ t[0] = t[1] < t[3]
+
+
+def p_expression_uminus(t):
+ 'expression : MINUS expression %prec UMINUS'
+ t[0] = -t[2]
+
+
+def p_expression_group(t):
+ 'expression : LPAREN expression RPAREN'
+ t[0] = t[2]
+
+
+def p_expression_number(t):
+ 'expression : NUMBER'
+ t[0] = t[1]
+
+
+def p_expression_name(t):
+ 'expression : NAME'
+ try:
+ t[0] = names[t[1]]
+ except LookupError:
+ print("Undefined name '%s'" % t[1])
+ t[0] = 0
+
+
+def p_error(t):
+ if t:
+ print("Syntax error at '%s'" % t.value)
+ else:
+ print("Syntax error at EOF")
+
+import ply.yacc as yacc
+yacc.yacc(optimize=1)
+
+while 1:
+ try:
+ s = raw_input('calc > ')
+ except EOFError:
+ break
+ yacc.parse(s)
diff --git a/third_party/python/ply/example/unicalc/calc.py b/third_party/python/ply/example/unicalc/calc.py
new file mode 100644
index 0000000000..901c4b9d76
--- /dev/null
+++ b/third_party/python/ply/example/unicalc/calc.py
@@ -0,0 +1,133 @@
+# -----------------------------------------------------------------------------
+# calc.py
+#
+# A simple calculator with variables. This is from O'Reilly's
+# "Lex and Yacc", p. 63.
+#
+# This example uses unicode strings for tokens, docstrings, and input.
+# -----------------------------------------------------------------------------
+
+import sys
+sys.path.insert(0, "../..")
+
+tokens = (
+ 'NAME', 'NUMBER',
+ 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'EQUALS',
+ 'LPAREN', 'RPAREN',
+)
+
+# Tokens
+
+t_PLUS = ur'\+'
+t_MINUS = ur'-'
+t_TIMES = ur'\*'
+t_DIVIDE = ur'/'
+t_EQUALS = ur'='
+t_LPAREN = ur'\('
+t_RPAREN = ur'\)'
+t_NAME = ur'[a-zA-Z_][a-zA-Z0-9_]*'
+
+
+def t_NUMBER(t):
+ ur'\d+'
+ try:
+ t.value = int(t.value)
+ except ValueError:
+ print "Integer value too large", t.value
+ t.value = 0
+ return t
+
+t_ignore = u" \t"
+
+
+def t_newline(t):
+ ur'\n+'
+ t.lexer.lineno += t.value.count("\n")
+
+
+def t_error(t):
+ print "Illegal character '%s'" % t.value[0]
+ t.lexer.skip(1)
+
+# Build the lexer
+import ply.lex as lex
+lex.lex()
+
+# Parsing rules
+
+precedence = (
+ ('left', 'PLUS', 'MINUS'),
+ ('left', 'TIMES', 'DIVIDE'),
+ ('right', 'UMINUS'),
+)
+
+# dictionary of names
+names = {}
+
+
+def p_statement_assign(p):
+ 'statement : NAME EQUALS expression'
+ names[p[1]] = p[3]
+
+
+def p_statement_expr(p):
+ 'statement : expression'
+ print p[1]
+
+
+def p_expression_binop(p):
+ '''expression : expression PLUS expression
+ | expression MINUS expression
+ | expression TIMES expression
+ | expression DIVIDE expression'''
+ if p[2] == u'+':
+ p[0] = p[1] + p[3]
+ elif p[2] == u'-':
+ p[0] = p[1] - p[3]
+ elif p[2] == u'*':
+ p[0] = p[1] * p[3]
+ elif p[2] == u'/':
+ p[0] = p[1] / p[3]
+
+
+def p_expression_uminus(p):
+ 'expression : MINUS expression %prec UMINUS'
+ p[0] = -p[2]
+
+
+def p_expression_group(p):
+ 'expression : LPAREN expression RPAREN'
+ p[0] = p[2]
+
+
+def p_expression_number(p):
+ 'expression : NUMBER'
+ p[0] = p[1]
+
+
+def p_expression_name(p):
+ 'expression : NAME'
+ try:
+ p[0] = names[p[1]]
+ except LookupError:
+ print "Undefined name '%s'" % p[1]
+ p[0] = 0
+
+
+def p_error(p):
+ if p:
+ print "Syntax error at '%s'" % p.value
+ else:
+ print "Syntax error at EOF"
+
+import ply.yacc as yacc
+yacc.yacc()
+
+while 1:
+ try:
+ s = raw_input('calc > ')
+ except EOFError:
+ break
+ if not s:
+ continue
+ yacc.parse(unicode(s))
diff --git a/third_party/python/ply/example/yply/README b/third_party/python/ply/example/yply/README
new file mode 100644
index 0000000000..bfadf36436
--- /dev/null
+++ b/third_party/python/ply/example/yply/README
@@ -0,0 +1,41 @@
+yply.py
+
+This example implements a program yply.py that converts a UNIX-yacc
+specification file into a PLY-compatible program. To use, simply
+run it like this:
+
+ % python yply.py [-nocode] inputfile.y >myparser.py
+
+The output of this program is Python code. In the output,
+any C code in the original file is included, but is commented out.
+If you use the -nocode option, then all of the C code in the
+original file is just discarded.
+
+To use the resulting grammer with PLY, you'll need to edit the
+myparser.py file. Within this file, some stub code is included that
+can be used to test the construction of the parsing tables. However,
+you'll need to do more editing to make a workable parser.
+
+Disclaimer: This just an example I threw together in an afternoon.
+It might have some bugs. However, it worked when I tried it on
+a yacc-specified C++ parser containing 442 rules and 855 parsing
+states.
+
+Comments:
+
+1. This example does not parse specification files meant for lex/flex.
+ You'll need to specify the tokenizer on your own.
+
+2. This example shows a number of interesting PLY features including
+
+ - Parsing of literal text delimited by nested parentheses
+ - Some interaction between the parser and the lexer.
+ - Use of literals in the grammar specification
+ - One pass compilation. The program just emits the result,
+ there is no intermediate parse tree.
+
+3. This program could probably be cleaned up and enhanced a lot.
+ It would be great if someone wanted to work on this (hint).
+
+-Dave
+
diff --git a/third_party/python/ply/example/yply/ylex.py b/third_party/python/ply/example/yply/ylex.py
new file mode 100644
index 0000000000..16410e250e
--- /dev/null
+++ b/third_party/python/ply/example/yply/ylex.py
@@ -0,0 +1,119 @@
+# lexer for yacc-grammars
+#
+# Author: David Beazley (dave@dabeaz.com)
+# Date : October 2, 2006
+
+import sys
+sys.path.append("../..")
+
+from ply import *
+
+tokens = (
+ 'LITERAL', 'SECTION', 'TOKEN', 'LEFT', 'RIGHT', 'PREC', 'START', 'TYPE', 'NONASSOC', 'UNION', 'CODE',
+ 'ID', 'QLITERAL', 'NUMBER',
+)
+
+states = (('code', 'exclusive'),)
+
+literals = [';', ',', '<', '>', '|', ':']
+t_ignore = ' \t'
+
+t_TOKEN = r'%token'
+t_LEFT = r'%left'
+t_RIGHT = r'%right'
+t_NONASSOC = r'%nonassoc'
+t_PREC = r'%prec'
+t_START = r'%start'
+t_TYPE = r'%type'
+t_UNION = r'%union'
+t_ID = r'[a-zA-Z_][a-zA-Z_0-9]*'
+t_QLITERAL = r'''(?P<quote>['"]).*?(?P=quote)'''
+t_NUMBER = r'\d+'
+
+
+def t_SECTION(t):
+ r'%%'
+ if getattr(t.lexer, "lastsection", 0):
+ t.value = t.lexer.lexdata[t.lexpos + 2:]
+ t.lexer.lexpos = len(t.lexer.lexdata)
+ else:
+ t.lexer.lastsection = 0
+ return t
+
+# Comments
+
+
+def t_ccomment(t):
+ r'/\*(.|\n)*?\*/'
+ t.lexer.lineno += t.value.count('\n')
+
+t_ignore_cppcomment = r'//.*'
+
+
+def t_LITERAL(t):
+ r'%\{(.|\n)*?%\}'
+ t.lexer.lineno += t.value.count("\n")
+ return t
+
+
+def t_NEWLINE(t):
+ r'\n'
+ t.lexer.lineno += 1
+
+
+def t_code(t):
+ r'\{'
+ t.lexer.codestart = t.lexpos
+ t.lexer.level = 1
+ t.lexer.begin('code')
+
+
+def t_code_ignore_string(t):
+ r'\"([^\\\n]|(\\.))*?\"'
+
+
+def t_code_ignore_char(t):
+ r'\'([^\\\n]|(\\.))*?\''
+
+
+def t_code_ignore_comment(t):
+ r'/\*(.|\n)*?\*/'
+
+
+def t_code_ignore_cppcom(t):
+ r'//.*'
+
+
+def t_code_lbrace(t):
+ r'\{'
+ t.lexer.level += 1
+
+
+def t_code_rbrace(t):
+ r'\}'
+ t.lexer.level -= 1
+ if t.lexer.level == 0:
+ t.type = 'CODE'
+ t.value = t.lexer.lexdata[t.lexer.codestart:t.lexpos + 1]
+ t.lexer.begin('INITIAL')
+ t.lexer.lineno += t.value.count('\n')
+ return t
+
+t_code_ignore_nonspace = r'[^\s\}\'\"\{]+'
+t_code_ignore_whitespace = r'\s+'
+t_code_ignore = ""
+
+
+def t_code_error(t):
+ raise RuntimeError
+
+
+def t_error(t):
+ print("%d: Illegal character '%s'" % (t.lexer.lineno, t.value[0]))
+ print(t.value)
+ t.lexer.skip(1)
+
+lex.lex()
+
+if __name__ == '__main__':
+ lex.runmain()
diff --git a/third_party/python/ply/example/yply/yparse.py b/third_party/python/ply/example/yply/yparse.py
new file mode 100644
index 0000000000..1f2e8d0922
--- /dev/null
+++ b/third_party/python/ply/example/yply/yparse.py
@@ -0,0 +1,244 @@
+# parser for Unix yacc-based grammars
+#
+# Author: David Beazley (dave@dabeaz.com)
+# Date : October 2, 2006
+
+import ylex
+tokens = ylex.tokens
+
+from ply import *
+
+tokenlist = []
+preclist = []
+
+emit_code = 1
+
+
+def p_yacc(p):
+ '''yacc : defsection rulesection'''
+
+
+def p_defsection(p):
+ '''defsection : definitions SECTION
+ | SECTION'''
+ p.lexer.lastsection = 1
+ print("tokens = ", repr(tokenlist))
+ print()
+ print("precedence = ", repr(preclist))
+ print()
+ print("# -------------- RULES ----------------")
+ print()
+
+
+def p_rulesection(p):
+ '''rulesection : rules SECTION'''
+
+ print("# -------------- RULES END ----------------")
+ print_code(p[2], 0)
+
+
+def p_definitions(p):
+ '''definitions : definitions definition
+ | definition'''
+
+
+def p_definition_literal(p):
+ '''definition : LITERAL'''
+ print_code(p[1], 0)
+
+
+def p_definition_start(p):
+ '''definition : START ID'''
+ print("start = '%s'" % p[2])
+
+
+def p_definition_token(p):
+ '''definition : toktype opttype idlist optsemi '''
+ for i in p[3]:
+ if i[0] not in "'\"":
+ tokenlist.append(i)
+ if p[1] == '%left':
+ preclist.append(('left',) + tuple(p[3]))
+ elif p[1] == '%right':
+ preclist.append(('right',) + tuple(p[3]))
+ elif p[1] == '%nonassoc':
+ preclist.append(('nonassoc',) + tuple(p[3]))
+
+
+def p_toktype(p):
+ '''toktype : TOKEN
+ | LEFT
+ | RIGHT
+ | NONASSOC'''
+ p[0] = p[1]
+
+
+def p_opttype(p):
+ '''opttype : '<' ID '>'
+ | empty'''
+
+
+def p_idlist(p):
+ '''idlist : idlist optcomma tokenid
+ | tokenid'''
+ if len(p) == 2:
+ p[0] = [p[1]]
+ else:
+ p[0] = p[1]
+ p[1].append(p[3])
+
+
+def p_tokenid(p):
+ '''tokenid : ID
+ | ID NUMBER
+ | QLITERAL
+ | QLITERAL NUMBER'''
+ p[0] = p[1]
+
+
+def p_optsemi(p):
+ '''optsemi : ';'
+ | empty'''
+
+
+def p_optcomma(p):
+ '''optcomma : ','
+ | empty'''
+
+
+def p_definition_type(p):
+ '''definition : TYPE '<' ID '>' namelist optsemi'''
+ # type declarations are ignored
+
+
+def p_namelist(p):
+ '''namelist : namelist optcomma ID
+ | ID'''
+
+
+def p_definition_union(p):
+ '''definition : UNION CODE optsemi'''
+ # Union declarations are ignored
+
+
+def p_rules(p):
+ '''rules : rules rule
+ | rule'''
+ if len(p) == 2:
+ rule = p[1]
+ else:
+ rule = p[2]
+
+ # Print out a Python equivalent of this rule
+
+ embedded = [] # Embedded actions (a mess)
+ embed_count = 0
+
+ rulename = rule[0]
+ rulecount = 1
+ for r in rule[1]:
+ # r contains one of the rule possibilities
+ print("def p_%s_%d(p):" % (rulename, rulecount))
+ prod = []
+ prodcode = ""
+ for i in range(len(r)):
+ item = r[i]
+ if item[0] == '{': # A code block
+ if i == len(r) - 1:
+ prodcode = item
+ break
+ else:
+ # an embedded action
+ embed_name = "_embed%d_%s" % (embed_count, rulename)
+ prod.append(embed_name)
+ embedded.append((embed_name, item))
+ embed_count += 1
+ else:
+ prod.append(item)
+ print(" '''%s : %s'''" % (rulename, " ".join(prod)))
+ # Emit code
+ print_code(prodcode, 4)
+ print()
+ rulecount += 1
+
+ for e, code in embedded:
+ print("def p_%s(p):" % e)
+ print(" '''%s : '''" % e)
+ print_code(code, 4)
+ print()
+
+
+def p_rule(p):
+ '''rule : ID ':' rulelist ';' '''
+ p[0] = (p[1], [p[3]])
+
+
+def p_rule2(p):
+ '''rule : ID ':' rulelist morerules ';' '''
+ p[4].insert(0, p[3])
+ p[0] = (p[1], p[4])
+
+
+def p_rule_empty(p):
+ '''rule : ID ':' ';' '''
+ p[0] = (p[1], [[]])
+
+
+def p_rule_empty2(p):
+ '''rule : ID ':' morerules ';' '''
+
+ p[3].insert(0, [])
+ p[0] = (p[1], p[3])
+
+
+def p_morerules(p):
+ '''morerules : morerules '|' rulelist
+ | '|' rulelist
+ | '|' '''
+
+ if len(p) == 2:
+ p[0] = [[]]
+ elif len(p) == 3:
+ p[0] = [p[2]]
+ else:
+ p[0] = p[1]
+ p[0].append(p[3])
+
+# print("morerules", len(p), p[0])
+
+
+def p_rulelist(p):
+ '''rulelist : rulelist ruleitem
+ | ruleitem'''
+
+ if len(p) == 2:
+ p[0] = [p[1]]
+ else:
+ p[0] = p[1]
+ p[1].append(p[2])
+
+
+def p_ruleitem(p):
+ '''ruleitem : ID
+ | QLITERAL
+ | CODE
+ | PREC'''
+ p[0] = p[1]
+
+
+def p_empty(p):
+ '''empty : '''
+
+
+def p_error(p):
+ pass
+
+yacc.yacc(debug=0)
+
+
+def print_code(code, indent):
+ if not emit_code:
+ return
+ codelines = code.splitlines()
+ for c in codelines:
+ print("%s# %s" % (" " * indent, c))
diff --git a/third_party/python/ply/example/yply/yply.py b/third_party/python/ply/example/yply/yply.py
new file mode 100755
index 0000000000..e24616c831
--- /dev/null
+++ b/third_party/python/ply/example/yply/yply.py
@@ -0,0 +1,51 @@
+#!/usr/local/bin/python
+# yply.py
+#
+# Author: David Beazley (dave@dabeaz.com)
+# Date : October 2, 2006
+#
+# Converts a UNIX-yacc specification file into a PLY-compatible
+# specification. To use, simply do this:
+#
+# % python yply.py [-nocode] inputfile.y >myparser.py
+#
+# The output of this program is Python code. In the output,
+# any C code in the original file is included, but is commented.
+# If you use the -nocode option, then all of the C code in the
+# original file is discarded.
+#
+# Disclaimer: This just an example I threw together in an afternoon.
+# It might have some bugs. However, it worked when I tried it on
+# a yacc-specified C++ parser containing 442 rules and 855 parsing
+# states.
+#
+
+import sys
+sys.path.insert(0, "../..")
+
+import ylex
+import yparse
+
+from ply import *
+
+if len(sys.argv) == 1:
+ print("usage : yply.py [-nocode] inputfile")
+ raise SystemExit
+
+if len(sys.argv) == 3:
+ if sys.argv[1] == '-nocode':
+ yparse.emit_code = 0
+ else:
+ print("Unknown option '%s'" % sys.argv[1])
+ raise SystemExit
+ filename = sys.argv[2]
+else:
+ filename = sys.argv[1]
+
+yacc.parse(open(filename).read())
+
+print("""
+if __name__ == '__main__':
+ from ply import *
+ yacc.yacc()
+""")
diff --git a/third_party/python/ply/ply/__init__.py b/third_party/python/ply/ply/__init__.py
new file mode 100644
index 0000000000..6e53cddcf6
--- /dev/null
+++ b/third_party/python/ply/ply/__init__.py
@@ -0,0 +1,5 @@
+# PLY package
+# Author: David Beazley (dave@dabeaz.com)
+
+__version__ = '3.9'
+__all__ = ['lex','yacc']
diff --git a/third_party/python/ply/ply/cpp.py b/third_party/python/ply/ply/cpp.py
new file mode 100644
index 0000000000..b6bfc69614
--- /dev/null
+++ b/third_party/python/ply/ply/cpp.py
@@ -0,0 +1,918 @@
+# -----------------------------------------------------------------------------
+# cpp.py
+#
+# Author: David Beazley (http://www.dabeaz.com)
+# Copyright (C) 2007
+# All rights reserved
+#
+# This module implements an ANSI-C style lexical preprocessor for PLY.
+# -----------------------------------------------------------------------------
+from __future__ import generators
+
+import sys
+
+# Some Python 3 compatibility shims
+if sys.version_info.major < 3:
+ STRING_TYPES = (str, unicode)
+else:
+ STRING_TYPES = str
+ xrange = range
+
+# -----------------------------------------------------------------------------
+# Default preprocessor lexer definitions. These tokens are enough to get
+# a basic preprocessor working. Other modules may import these if they want
+# -----------------------------------------------------------------------------
+
+tokens = (
+ 'CPP_ID','CPP_INTEGER', 'CPP_FLOAT', 'CPP_STRING', 'CPP_CHAR', 'CPP_WS', 'CPP_COMMENT1', 'CPP_COMMENT2', 'CPP_POUND','CPP_DPOUND'
+)
+
+literals = "+-*/%|&~^<>=!?()[]{}.,;:\\\'\""
+
+# Whitespace
+def t_CPP_WS(t):
+ r'\s+'
+ t.lexer.lineno += t.value.count("\n")
+ return t
+
+t_CPP_POUND = r'\#'
+t_CPP_DPOUND = r'\#\#'
+
+# Identifier
+t_CPP_ID = r'[A-Za-z_][\w_]*'
+
+# Integer literal
+def CPP_INTEGER(t):
+ r'(((((0x)|(0X))[0-9a-fA-F]+)|(\d+))([uU][lL]|[lL][uU]|[uU]|[lL])?)'
+ return t
+
+t_CPP_INTEGER = CPP_INTEGER
+
+# Floating literal
+t_CPP_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
+
+# String literal
+def t_CPP_STRING(t):
+ r'\"([^\\\n]|(\\(.|\n)))*?\"'
+ t.lexer.lineno += t.value.count("\n")
+ return t
+
+# Character constant 'c' or L'c'
+def t_CPP_CHAR(t):
+ r'(L)?\'([^\\\n]|(\\(.|\n)))*?\''
+ t.lexer.lineno += t.value.count("\n")
+ return t
+
+# Comment
+def t_CPP_COMMENT1(t):
+ r'(/\*(.|\n)*?\*/)'
+ ncr = t.value.count("\n")
+ t.lexer.lineno += ncr
+ # replace with one space or a number of '\n'
+ t.type = 'CPP_WS'; t.value = '\n' * ncr if ncr else ' '
+ return t
+
+# Line comment
+def t_CPP_COMMENT2(t):
+ r'(//.*?(\n|$))'
+ # replace with '/n'
+ t.type = 'CPP_WS'; t.value = '\n'
+ return t
+
+def t_error(t):
+ t.type = t.value[0]
+ t.value = t.value[0]
+ t.lexer.skip(1)
+ return t
+
+import re
+import copy
+import time
+import os.path
+
+# -----------------------------------------------------------------------------
+# trigraph()
+#
+# Given an input string, this function replaces all trigraph sequences.
+# The following mapping is used:
+#
+# ??= #
+# ??/ \
+# ??' ^
+# ??( [
+# ??) ]
+# ??! |
+# ??< {
+# ??> }
+# ??- ~
+# -----------------------------------------------------------------------------
+
+_trigraph_pat = re.compile(r'''\?\?[=/\'\(\)\!<>\-]''')
+_trigraph_rep = {
+ '=':'#',
+ '/':'\\',
+ "'":'^',
+ '(':'[',
+ ')':']',
+ '!':'|',
+ '<':'{',
+ '>':'}',
+ '-':'~'
+}
+
+def trigraph(input):
+ return _trigraph_pat.sub(lambda g: _trigraph_rep[g.group()[-1]],input)
+
+# ------------------------------------------------------------------
+# Macro object
+#
+# This object holds information about preprocessor macros
+#
+# .name - Macro name (string)
+# .value - Macro value (a list of tokens)
+# .arglist - List of argument names
+# .variadic - Boolean indicating whether or not variadic macro
+# .vararg - Name of the variadic parameter
+#
+# When a macro is created, the macro replacement token sequence is
+# pre-scanned and used to create patch lists that are later used
+# during macro expansion
+# ------------------------------------------------------------------
+
+class Macro(object):
+ def __init__(self,name,value,arglist=None,variadic=False):
+ self.name = name
+ self.value = value
+ self.arglist = arglist
+ self.variadic = variadic
+ if variadic:
+ self.vararg = arglist[-1]
+ self.source = None
+
+# ------------------------------------------------------------------
+# Preprocessor object
+#
+# Object representing a preprocessor. Contains macro definitions,
+# include directories, and other information
+# ------------------------------------------------------------------
+
+class Preprocessor(object):
+ def __init__(self,lexer=None):
+ if lexer is None:
+ lexer = lex.lexer
+ self.lexer = lexer
+ self.macros = { }
+ self.path = []
+ self.temp_path = []
+
+ # Probe the lexer for selected tokens
+ self.lexprobe()
+
+ tm = time.localtime()
+ self.define("__DATE__ \"%s\"" % time.strftime("%b %d %Y",tm))
+ self.define("__TIME__ \"%s\"" % time.strftime("%H:%M:%S",tm))
+ self.parser = None
+
+ # -----------------------------------------------------------------------------
+ # tokenize()
+ #
+ # Utility function. Given a string of text, tokenize into a list of tokens
+ # -----------------------------------------------------------------------------
+
+ def tokenize(self,text):
+ tokens = []
+ self.lexer.input(text)
+ while True:
+ tok = self.lexer.token()
+ if not tok: break
+ tokens.append(tok)
+ return tokens
+
+ # ---------------------------------------------------------------------
+ # error()
+ #
+ # Report a preprocessor error/warning of some kind
+ # ----------------------------------------------------------------------
+
+ def error(self,file,line,msg):
+ print("%s:%d %s" % (file,line,msg))
+
+ # ----------------------------------------------------------------------
+ # lexprobe()
+ #
+ # This method probes the preprocessor lexer object to discover
+ # the token types of symbols that are important to the preprocessor.
+ # If this works right, the preprocessor will simply "work"
+ # with any suitable lexer regardless of how tokens have been named.
+ # ----------------------------------------------------------------------
+
+ def lexprobe(self):
+
+ # Determine the token type for identifiers
+ self.lexer.input("identifier")
+ tok = self.lexer.token()
+ if not tok or tok.value != "identifier":
+ print("Couldn't determine identifier type")
+ else:
+ self.t_ID = tok.type
+
+ # Determine the token type for integers
+ self.lexer.input("12345")
+ tok = self.lexer.token()
+ if not tok or int(tok.value) != 12345:
+ print("Couldn't determine integer type")
+ else:
+ self.t_INTEGER = tok.type
+ self.t_INTEGER_TYPE = type(tok.value)
+
+ # Determine the token type for strings enclosed in double quotes
+ self.lexer.input("\"filename\"")
+ tok = self.lexer.token()
+ if not tok or tok.value != "\"filename\"":
+ print("Couldn't determine string type")
+ else:
+ self.t_STRING = tok.type
+
+ # Determine the token type for whitespace--if any
+ self.lexer.input(" ")
+ tok = self.lexer.token()
+ if not tok or tok.value != " ":
+ self.t_SPACE = None
+ else:
+ self.t_SPACE = tok.type
+
+ # Determine the token type for newlines
+ self.lexer.input("\n")
+ tok = self.lexer.token()
+ if not tok or tok.value != "\n":
+ self.t_NEWLINE = None
+ print("Couldn't determine token for newlines")
+ else:
+ self.t_NEWLINE = tok.type
+
+ self.t_WS = (self.t_SPACE, self.t_NEWLINE)
+
+ # Check for other characters used by the preprocessor
+ chars = [ '<','>','#','##','\\','(',')',',','.']
+ for c in chars:
+ self.lexer.input(c)
+ tok = self.lexer.token()
+ if not tok or tok.value != c:
+ print("Unable to lex '%s' required for preprocessor" % c)
+
+ # ----------------------------------------------------------------------
+ # add_path()
+ #
+ # Adds a search path to the preprocessor.
+ # ----------------------------------------------------------------------
+
+ def add_path(self,path):
+ self.path.append(path)
+
+ # ----------------------------------------------------------------------
+ # group_lines()
+ #
+ # Given an input string, this function splits it into lines. Trailing whitespace
+ # is removed. Any line ending with \ is grouped with the next line. This
+ # function forms the lowest level of the preprocessor---grouping into text into
+ # a line-by-line format.
+ # ----------------------------------------------------------------------
+
+ def group_lines(self,input):
+ lex = self.lexer.clone()
+ lines = [x.rstrip() for x in input.splitlines()]
+ for i in xrange(len(lines)):
+ j = i+1
+ while lines[i].endswith('\\') and (j < len(lines)):
+ lines[i] = lines[i][:-1]+lines[j]
+ lines[j] = ""
+ j += 1
+
+ input = "\n".join(lines)
+ lex.input(input)
+ lex.lineno = 1
+
+ current_line = []
+ while True:
+ tok = lex.token()
+ if not tok:
+ break
+ current_line.append(tok)
+ if tok.type in self.t_WS and '\n' in tok.value:
+ yield current_line
+ current_line = []
+
+ if current_line:
+ yield current_line
+
+ # ----------------------------------------------------------------------
+ # tokenstrip()
+ #
+ # Remove leading/trailing whitespace tokens from a token list
+ # ----------------------------------------------------------------------
+
+ def tokenstrip(self,tokens):
+ i = 0
+ while i < len(tokens) and tokens[i].type in self.t_WS:
+ i += 1
+ del tokens[:i]
+ i = len(tokens)-1
+ while i >= 0 and tokens[i].type in self.t_WS:
+ i -= 1
+ del tokens[i+1:]
+ return tokens
+
+
+ # ----------------------------------------------------------------------
+ # collect_args()
+ #
+ # Collects comma separated arguments from a list of tokens. The arguments
+ # must be enclosed in parenthesis. Returns a tuple (tokencount,args,positions)
+ # where tokencount is the number of tokens consumed, args is a list of arguments,
+ # and positions is a list of integers containing the starting index of each
+ # argument. Each argument is represented by a list of tokens.
+ #
+ # When collecting arguments, leading and trailing whitespace is removed
+ # from each argument.
+ #
+ # This function properly handles nested parenthesis and commas---these do not
+ # define new arguments.
+ # ----------------------------------------------------------------------
+
+ def collect_args(self,tokenlist):
+ args = []
+ positions = []
+ current_arg = []
+ nesting = 1
+ tokenlen = len(tokenlist)
+
+ # Search for the opening '('.
+ i = 0
+ while (i < tokenlen) and (tokenlist[i].type in self.t_WS):
+ i += 1
+
+ if (i < tokenlen) and (tokenlist[i].value == '('):
+ positions.append(i+1)
+ else:
+ self.error(self.source,tokenlist[0].lineno,"Missing '(' in macro arguments")
+ return 0, [], []
+
+ i += 1
+
+ while i < tokenlen:
+ t = tokenlist[i]
+ if t.value == '(':
+ current_arg.append(t)
+ nesting += 1
+ elif t.value == ')':
+ nesting -= 1
+ if nesting == 0:
+ if current_arg:
+ args.append(self.tokenstrip(current_arg))
+ positions.append(i)
+ return i+1,args,positions
+ current_arg.append(t)
+ elif t.value == ',' and nesting == 1:
+ args.append(self.tokenstrip(current_arg))
+ positions.append(i+1)
+ current_arg = []
+ else:
+ current_arg.append(t)
+ i += 1
+
+ # Missing end argument
+ self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments")
+ return 0, [],[]
+
+ # ----------------------------------------------------------------------
+ # macro_prescan()
+ #
+ # Examine the macro value (token sequence) and identify patch points
+ # This is used to speed up macro expansion later on---we'll know
+ # right away where to apply patches to the value to form the expansion
+ # ----------------------------------------------------------------------
+
+ def macro_prescan(self,macro):
+ macro.patch = [] # Standard macro arguments
+ macro.str_patch = [] # String conversion expansion
+ macro.var_comma_patch = [] # Variadic macro comma patch
+ i = 0
+ while i < len(macro.value):
+ if macro.value[i].type == self.t_ID and macro.value[i].value in macro.arglist:
+ argnum = macro.arglist.index(macro.value[i].value)
+ # Conversion of argument to a string
+ if i > 0 and macro.value[i-1].value == '#':
+ macro.value[i] = copy.copy(macro.value[i])
+ macro.value[i].type = self.t_STRING
+ del macro.value[i-1]
+ macro.str_patch.append((argnum,i-1))
+ continue
+ # Concatenation
+ elif (i > 0 and macro.value[i-1].value == '##'):
+ macro.patch.append(('c',argnum,i-1))
+ del macro.value[i-1]
+ continue
+ elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'):
+ macro.patch.append(('c',argnum,i))
+ i += 1
+ continue
+ # Standard expansion
+ else:
+ macro.patch.append(('e',argnum,i))
+ elif macro.value[i].value == '##':
+ if macro.variadic and (i > 0) and (macro.value[i-1].value == ',') and \
+ ((i+1) < len(macro.value)) and (macro.value[i+1].type == self.t_ID) and \
+ (macro.value[i+1].value == macro.vararg):
+ macro.var_comma_patch.append(i-1)
+ i += 1
+ macro.patch.sort(key=lambda x: x[2],reverse=True)
+
+ # ----------------------------------------------------------------------
+ # macro_expand_args()
+ #
+ # Given a Macro and list of arguments (each a token list), this method
+ # returns an expanded version of a macro. The return value is a token sequence
+ # representing the replacement macro tokens
+ # ----------------------------------------------------------------------
+
+ def macro_expand_args(self,macro,args):
+ # Make a copy of the macro token sequence
+ rep = [copy.copy(_x) for _x in macro.value]
+
+ # Make string expansion patches. These do not alter the length of the replacement sequence
+
+ str_expansion = {}
+ for argnum, i in macro.str_patch:
+ if argnum not in str_expansion:
+ str_expansion[argnum] = ('"%s"' % "".join([x.value for x in args[argnum]])).replace("\\","\\\\")
+ rep[i] = copy.copy(rep[i])
+ rep[i].value = str_expansion[argnum]
+
+ # Make the variadic macro comma patch. If the variadic macro argument is empty, we get rid
+ comma_patch = False
+ if macro.variadic and not args[-1]:
+ for i in macro.var_comma_patch:
+ rep[i] = None
+ comma_patch = True
+
+ # Make all other patches. The order of these matters. It is assumed that the patch list
+ # has been sorted in reverse order of patch location since replacements will cause the
+ # size of the replacement sequence to expand from the patch point.
+
+ expanded = { }
+ for ptype, argnum, i in macro.patch:
+ # Concatenation. Argument is left unexpanded
+ if ptype == 'c':
+ rep[i:i+1] = args[argnum]
+ # Normal expansion. Argument is macro expanded first
+ elif ptype == 'e':
+ if argnum not in expanded:
+ expanded[argnum] = self.expand_macros(args[argnum])
+ rep[i:i+1] = expanded[argnum]
+
+ # Get rid of removed comma if necessary
+ if comma_patch:
+ rep = [_i for _i in rep if _i]
+
+ return rep
+
+
+ # ----------------------------------------------------------------------
+ # expand_macros()
+ #
+ # Given a list of tokens, this function performs macro expansion.
+ # The expanded argument is a dictionary that contains macros already
+ # expanded. This is used to prevent infinite recursion.
+ # ----------------------------------------------------------------------
+
+ def expand_macros(self,tokens,expanded=None):
+ if expanded is None:
+ expanded = {}
+ i = 0
+ while i < len(tokens):
+ t = tokens[i]
+ if t.type == self.t_ID:
+ if t.value in self.macros and t.value not in expanded:
+ # Yes, we found a macro match
+ expanded[t.value] = True
+
+ m = self.macros[t.value]
+ if not m.arglist:
+ # A simple macro
+ ex = self.expand_macros([copy.copy(_x) for _x in m.value],expanded)
+ for e in ex:
+ e.lineno = t.lineno
+ tokens[i:i+1] = ex
+ i += len(ex)
+ else:
+ # A macro with arguments
+ j = i + 1
+ while j < len(tokens) and tokens[j].type in self.t_WS:
+ j += 1
+ if tokens[j].value == '(':
+ tokcount,args,positions = self.collect_args(tokens[j:])
+ if not m.variadic and len(args) != len(m.arglist):
+ self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist)))
+ i = j + tokcount
+ elif m.variadic and len(args) < len(m.arglist)-1:
+ if len(m.arglist) > 2:
+ self.error(self.source,t.lineno,"Macro %s must have at least %d arguments" % (t.value, len(m.arglist)-1))
+ else:
+ self.error(self.source,t.lineno,"Macro %s must have at least %d argument" % (t.value, len(m.arglist)-1))
+ i = j + tokcount
+ else:
+ if m.variadic:
+ if len(args) == len(m.arglist)-1:
+ args.append([])
+ else:
+ args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1]
+ del args[len(m.arglist):]
+
+ # Get macro replacement text
+ rep = self.macro_expand_args(m,args)
+ rep = self.expand_macros(rep,expanded)
+ for r in rep:
+ r.lineno = t.lineno
+ tokens[i:j+tokcount] = rep
+ i += len(rep)
+ del expanded[t.value]
+ continue
+ elif t.value == '__LINE__':
+ t.type = self.t_INTEGER
+ t.value = self.t_INTEGER_TYPE(t.lineno)
+
+ i += 1
+ return tokens
+
+ # ----------------------------------------------------------------------
+ # evalexpr()
+ #
+ # Evaluate an expression token sequence for the purposes of evaluating
+ # integral expressions.
+ # ----------------------------------------------------------------------
+
+ def evalexpr(self,tokens):
+ # tokens = tokenize(line)
+ # Search for defined macros
+ i = 0
+ while i < len(tokens):
+ if tokens[i].type == self.t_ID and tokens[i].value == 'defined':
+ j = i + 1
+ needparen = False
+ result = "0L"
+ while j < len(tokens):
+ if tokens[j].type in self.t_WS:
+ j += 1
+ continue
+ elif tokens[j].type == self.t_ID:
+ if tokens[j].value in self.macros:
+ result = "1L"
+ else:
+ result = "0L"
+ if not needparen: break
+ elif tokens[j].value == '(':
+ needparen = True
+ elif tokens[j].value == ')':
+ break
+ else:
+ self.error(self.source,tokens[i].lineno,"Malformed defined()")
+ j += 1
+ tokens[i].type = self.t_INTEGER
+ tokens[i].value = self.t_INTEGER_TYPE(result)
+ del tokens[i+1:j+1]
+ i += 1
+ tokens = self.expand_macros(tokens)
+ for i,t in enumerate(tokens):
+ if t.type == self.t_ID:
+ tokens[i] = copy.copy(t)
+ tokens[i].type = self.t_INTEGER
+ tokens[i].value = self.t_INTEGER_TYPE("0L")
+ elif t.type == self.t_INTEGER:
+ tokens[i] = copy.copy(t)
+ # Strip off any trailing suffixes
+ tokens[i].value = str(tokens[i].value)
+ while tokens[i].value[-1] not in "0123456789abcdefABCDEF":
+ tokens[i].value = tokens[i].value[:-1]
+
+ expr = "".join([str(x.value) for x in tokens])
+ expr = expr.replace("&&"," and ")
+ expr = expr.replace("||"," or ")
+ expr = expr.replace("!"," not ")
+ try:
+ result = eval(expr)
+ except Exception:
+ self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression")
+ result = 0
+ return result
+
+ # ----------------------------------------------------------------------
+ # parsegen()
+ #
+ # Parse an input string/
+ # ----------------------------------------------------------------------
+ def parsegen(self,input,source=None):
+
+ # Replace trigraph sequences
+ t = trigraph(input)
+ lines = self.group_lines(t)
+
+ if not source:
+ source = ""
+
+ self.define("__FILE__ \"%s\"" % source)
+
+ self.source = source
+ chunk = []
+ enable = True
+ iftrigger = False
+ ifstack = []
+
+ for x in lines:
+ for i,tok in enumerate(x):
+ if tok.type not in self.t_WS: break
+ if tok.value == '#':
+ # Preprocessor directive
+
+ # insert necessary whitespace instead of eaten tokens
+ for tok in x:
+ if tok.type in self.t_WS and '\n' in tok.value:
+ chunk.append(tok)
+
+ dirtokens = self.tokenstrip(x[i+1:])
+ if dirtokens:
+ name = dirtokens[0].value
+ args = self.tokenstrip(dirtokens[1:])
+ else:
+ name = ""
+ args = []
+
+ if name == 'define':
+ if enable:
+ for tok in self.expand_macros(chunk):
+ yield tok
+ chunk = []
+ self.define(args)
+ elif name == 'include':
+ if enable:
+ for tok in self.expand_macros(chunk):
+ yield tok
+ chunk = []
+ oldfile = self.macros['__FILE__']
+ for tok in self.include(args):
+ yield tok
+ self.macros['__FILE__'] = oldfile
+ self.source = source
+ elif name == 'undef':
+ if enable:
+ for tok in self.expand_macros(chunk):
+ yield tok
+ chunk = []
+ self.undef(args)
+ elif name == 'ifdef':
+ ifstack.append((enable,iftrigger))
+ if enable:
+ if not args[0].value in self.macros:
+ enable = False
+ iftrigger = False
+ else:
+ iftrigger = True
+ elif name == 'ifndef':
+ ifstack.append((enable,iftrigger))
+ if enable:
+ if args[0].value in self.macros:
+ enable = False
+ iftrigger = False
+ else:
+ iftrigger = True
+ elif name == 'if':
+ ifstack.append((enable,iftrigger))
+ if enable:
+ result = self.evalexpr(args)
+ if not result:
+ enable = False
+ iftrigger = False
+ else:
+ iftrigger = True
+ elif name == 'elif':
+ if ifstack:
+ if ifstack[-1][0]: # We only pay attention if outer "if" allows this
+ if enable: # If already true, we flip enable False
+ enable = False
+ elif not iftrigger: # If False, but not triggered yet, we'll check expression
+ result = self.evalexpr(args)
+ if result:
+ enable = True
+ iftrigger = True
+ else:
+ self.error(self.source,dirtokens[0].lineno,"Misplaced #elif")
+
+ elif name == 'else':
+ if ifstack:
+ if ifstack[-1][0]:
+ if enable:
+ enable = False
+ elif not iftrigger:
+ enable = True
+ iftrigger = True
+ else:
+ self.error(self.source,dirtokens[0].lineno,"Misplaced #else")
+
+ elif name == 'endif':
+ if ifstack:
+ enable,iftrigger = ifstack.pop()
+ else:
+ self.error(self.source,dirtokens[0].lineno,"Misplaced #endif")
+ else:
+ # Unknown preprocessor directive
+ pass
+
+ else:
+ # Normal text
+ if enable:
+ chunk.extend(x)
+
+ for tok in self.expand_macros(chunk):
+ yield tok
+ chunk = []
+
+ # ----------------------------------------------------------------------
+ # include()
+ #
+ # Implementation of file-inclusion
+ # ----------------------------------------------------------------------
+
+ def include(self,tokens):
+ # Try to extract the filename and then process an include file
+ if not tokens:
+ return
+ if tokens:
+ if tokens[0].value != '<' and tokens[0].type != self.t_STRING:
+ tokens = self.expand_macros(tokens)
+
+ if tokens[0].value == '<':
+ # Include <...>
+ i = 1
+ while i < len(tokens):
+ if tokens[i].value == '>':
+ break
+ i += 1
+ else:
+ print("Malformed #include <...>")
+ return
+ filename = "".join([x.value for x in tokens[1:i]])
+ path = self.path + [""] + self.temp_path
+ elif tokens[0].type == self.t_STRING:
+ filename = tokens[0].value[1:-1]
+ path = self.temp_path + [""] + self.path
+ else:
+ print("Malformed #include statement")
+ return
+ for p in path:
+ iname = os.path.join(p,filename)
+ try:
+ data = open(iname,"r").read()
+ dname = os.path.dirname(iname)
+ if dname:
+ self.temp_path.insert(0,dname)
+ for tok in self.parsegen(data,filename):
+ yield tok
+ if dname:
+ del self.temp_path[0]
+ break
+ except IOError:
+ pass
+ else:
+ print("Couldn't find '%s'" % filename)
+
+ # ----------------------------------------------------------------------
+ # define()
+ #
+ # Define a new macro
+ # ----------------------------------------------------------------------
+
+ def define(self,tokens):
+ if isinstance(tokens,STRING_TYPES):
+ tokens = self.tokenize(tokens)
+
+ linetok = tokens
+ try:
+ name = linetok[0]
+ if len(linetok) > 1:
+ mtype = linetok[1]
+ else:
+ mtype = None
+ if not mtype:
+ m = Macro(name.value,[])
+ self.macros[name.value] = m
+ elif mtype.type in self.t_WS:
+ # A normal macro
+ m = Macro(name.value,self.tokenstrip(linetok[2:]))
+ self.macros[name.value] = m
+ elif mtype.value == '(':
+ # A macro with arguments
+ tokcount, args, positions = self.collect_args(linetok[1:])
+ variadic = False
+ for a in args:
+ if variadic:
+ print("No more arguments may follow a variadic argument")
+ break
+ astr = "".join([str(_i.value) for _i in a])
+ if astr == "...":
+ variadic = True
+ a[0].type = self.t_ID
+ a[0].value = '__VA_ARGS__'
+ variadic = True
+ del a[1:]
+ continue
+ elif astr[-3:] == "..." and a[0].type == self.t_ID:
+ variadic = True
+ del a[1:]
+ # If, for some reason, "." is part of the identifier, strip off the name for the purposes
+ # of macro expansion
+ if a[0].value[-3:] == '...':
+ a[0].value = a[0].value[:-3]
+ continue
+ if len(a) > 1 or a[0].type != self.t_ID:
+ print("Invalid macro argument")
+ break
+ else:
+ mvalue = self.tokenstrip(linetok[1+tokcount:])
+ i = 0
+ while i < len(mvalue):
+ if i+1 < len(mvalue):
+ if mvalue[i].type in self.t_WS and mvalue[i+1].value == '##':
+ del mvalue[i]
+ continue
+ elif mvalue[i].value == '##' and mvalue[i+1].type in self.t_WS:
+ del mvalue[i+1]
+ i += 1
+ m = Macro(name.value,mvalue,[x[0].value for x in args],variadic)
+ self.macro_prescan(m)
+ self.macros[name.value] = m
+ else:
+ print("Bad macro definition")
+ except LookupError:
+ print("Bad macro definition")
+
+ # ----------------------------------------------------------------------
+ # undef()
+ #
+ # Undefine a macro
+ # ----------------------------------------------------------------------
+
+ def undef(self,tokens):
+ id = tokens[0].value
+ try:
+ del self.macros[id]
+ except LookupError:
+ pass
+
+ # ----------------------------------------------------------------------
+ # parse()
+ #
+ # Parse input text.
+ # ----------------------------------------------------------------------
+ def parse(self,input,source=None,ignore={}):
+ self.ignore = ignore
+ self.parser = self.parsegen(input,source)
+
+ # ----------------------------------------------------------------------
+ # token()
+ #
+ # Method to return individual tokens
+ # ----------------------------------------------------------------------
+ def token(self):
+ try:
+ while True:
+ tok = next(self.parser)
+ if tok.type not in self.ignore: return tok
+ except StopIteration:
+ self.parser = None
+ return None
+
+if __name__ == '__main__':
+ import ply.lex as lex
+ lexer = lex.lex()
+
+ # Run a preprocessor
+ import sys
+ f = open(sys.argv[1])
+ input = f.read()
+
+ p = Preprocessor(lexer)
+ p.parse(input,sys.argv[1])
+ while True:
+ tok = p.token()
+ if not tok: break
+ print(p.source, tok)
+
+
+
+
+
+
+
+
+
+
+
diff --git a/third_party/python/ply/ply/ctokens.py b/third_party/python/ply/ply/ctokens.py
new file mode 100644
index 0000000000..f6f6952d60
--- /dev/null
+++ b/third_party/python/ply/ply/ctokens.py
@@ -0,0 +1,133 @@
+# ----------------------------------------------------------------------
+# ctokens.py
+#
+# Token specifications for symbols in ANSI C and C++. This file is
+# meant to be used as a library in other tokenizers.
+# ----------------------------------------------------------------------
+
+# Reserved words
+
+tokens = [
+ # Literals (identifier, integer constant, float constant, string constant, char const)
+ 'ID', 'TYPEID', 'INTEGER', 'FLOAT', 'STRING', 'CHARACTER',
+
+ # Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=)
+ 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MODULO',
+ 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
+ 'LOR', 'LAND', 'LNOT',
+ 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
+
+ # Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=)
+ 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL',
+ 'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL',
+
+ # Increment/decrement (++,--)
+ 'INCREMENT', 'DECREMENT',
+
+ # Structure dereference (->)
+ 'ARROW',
+
+ # Ternary operator (?)
+ 'TERNARY',
+
+ # Delimeters ( ) [ ] { } , . ; :
+ 'LPAREN', 'RPAREN',
+ 'LBRACKET', 'RBRACKET',
+ 'LBRACE', 'RBRACE',
+ 'COMMA', 'PERIOD', 'SEMI', 'COLON',
+
+ # Ellipsis (...)
+ 'ELLIPSIS',
+]
+
+# Operators
+t_PLUS = r'\+'
+t_MINUS = r'-'
+t_TIMES = r'\*'
+t_DIVIDE = r'/'
+t_MODULO = r'%'
+t_OR = r'\|'
+t_AND = r'&'
+t_NOT = r'~'
+t_XOR = r'\^'
+t_LSHIFT = r'<<'
+t_RSHIFT = r'>>'
+t_LOR = r'\|\|'
+t_LAND = r'&&'
+t_LNOT = r'!'
+t_LT = r'<'
+t_GT = r'>'
+t_LE = r'<='
+t_GE = r'>='
+t_EQ = r'=='
+t_NE = r'!='
+
+# Assignment operators
+
+t_EQUALS = r'='
+t_TIMESEQUAL = r'\*='
+t_DIVEQUAL = r'/='
+t_MODEQUAL = r'%='
+t_PLUSEQUAL = r'\+='
+t_MINUSEQUAL = r'-='
+t_LSHIFTEQUAL = r'<<='
+t_RSHIFTEQUAL = r'>>='
+t_ANDEQUAL = r'&='
+t_OREQUAL = r'\|='
+t_XOREQUAL = r'\^='
+
+# Increment/decrement
+t_INCREMENT = r'\+\+'
+t_DECREMENT = r'--'
+
+# ->
+t_ARROW = r'->'
+
+# ?
+t_TERNARY = r'\?'
+
+# Delimeters
+t_LPAREN = r'\('
+t_RPAREN = r'\)'
+t_LBRACKET = r'\['
+t_RBRACKET = r'\]'
+t_LBRACE = r'\{'
+t_RBRACE = r'\}'
+t_COMMA = r','
+t_PERIOD = r'\.'
+t_SEMI = r';'
+t_COLON = r':'
+t_ELLIPSIS = r'\.\.\.'
+
+# Identifiers
+t_ID = r'[A-Za-z_][A-Za-z0-9_]*'
+
+# Integer literal
+t_INTEGER = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
+
+# Floating literal
+t_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
+
+# String literal
+t_STRING = r'\"([^\\\n]|(\\.))*?\"'
+
+# Character constant 'c' or L'c'
+t_CHARACTER = r'(L)?\'([^\\\n]|(\\.))*?\''
+
+# Comment (C-Style)
+def t_COMMENT(t):
+ r'/\*(.|\n)*?\*/'
+ t.lexer.lineno += t.value.count('\n')
+ return t
+
+# Comment (C++-Style)
+def t_CPPCOMMENT(t):
+ r'//.*\n'
+ t.lexer.lineno += 1
+ return t
+
+
+
+
+
+
diff --git a/third_party/python/ply/ply/lex.py b/third_party/python/ply/ply/lex.py
new file mode 100644
index 0000000000..3e240d1aa2
--- /dev/null
+++ b/third_party/python/ply/ply/lex.py
@@ -0,0 +1,1100 @@
+# -----------------------------------------------------------------------------
+# ply: lex.py
+#
+# Copyright (C) 2001-2017
+# David M. Beazley (Dabeaz LLC)
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+# * Neither the name of the David Beazley or Dabeaz LLC may be used to
+# endorse or promote products derived from this software without
+# specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+# -----------------------------------------------------------------------------
+
+__version__ = '3.10'
+__tabversion__ = '3.10'
+
+import re
+import sys
+import types
+import copy
+import os
+import inspect
+
+# This tuple contains known string types
+try:
+ # Python 2.6
+ StringTypes = (types.StringType, types.UnicodeType)
+except AttributeError:
+ # Python 3.0
+ StringTypes = (str, bytes)
+
+# This regular expression is used to match valid token names
+_is_identifier = re.compile(r'^[a-zA-Z0-9_]+$')
+
+# Exception thrown when invalid token encountered and no default error
+# handler is defined.
+class LexError(Exception):
+ def __init__(self, message, s):
+ self.args = (message,)
+ self.text = s
+
+
+# Token class. This class is used to represent the tokens produced.
+class LexToken(object):
+ def __str__(self):
+ return 'LexToken(%s,%r,%d,%d)' % (self.type, self.value, self.lineno, self.lexpos)
+
+ def __repr__(self):
+ return str(self)
+
+
+# This object is a stand-in for a logging object created by the
+# logging module.
+
+class PlyLogger(object):
+ def __init__(self, f):
+ self.f = f
+
+ def critical(self, msg, *args, **kwargs):
+ self.f.write((msg % args) + '\n')
+
+ def warning(self, msg, *args, **kwargs):
+ self.f.write('WARNING: ' + (msg % args) + '\n')
+
+ def error(self, msg, *args, **kwargs):
+ self.f.write('ERROR: ' + (msg % args) + '\n')
+
+ info = critical
+ debug = critical
+
+
+# Null logger is used when no output is generated. Does nothing.
+class NullLogger(object):
+ def __getattribute__(self, name):
+ return self
+
+ def __call__(self, *args, **kwargs):
+ return self
+
+
+# -----------------------------------------------------------------------------
+# === Lexing Engine ===
+#
+# The following Lexer class implements the lexer runtime. There are only
+# a few public methods and attributes:
+#
+# input() - Store a new string in the lexer
+# token() - Get the next token
+# clone() - Clone the lexer
+#
+# lineno - Current line number
+# lexpos - Current position in the input string
+# -----------------------------------------------------------------------------
+
+class Lexer:
+ def __init__(self):
+ self.lexre = None # Master regular expression. This is a list of
+ # tuples (re, findex) where re is a compiled
+ # regular expression and findex is a list
+ # mapping regex group numbers to rules
+ self.lexretext = None # Current regular expression strings
+ self.lexstatere = {} # Dictionary mapping lexer states to master regexs
+ self.lexstateretext = {} # Dictionary mapping lexer states to regex strings
+ self.lexstaterenames = {} # Dictionary mapping lexer states to symbol names
+ self.lexstate = 'INITIAL' # Current lexer state
+ self.lexstatestack = [] # Stack of lexer states
+ self.lexstateinfo = None # State information
+ self.lexstateignore = {} # Dictionary of ignored characters for each state
+ self.lexstateerrorf = {} # Dictionary of error functions for each state
+ self.lexstateeoff = {} # Dictionary of eof functions for each state
+ self.lexreflags = 0 # Optional re compile flags
+ self.lexdata = None # Actual input data (as a string)
+ self.lexpos = 0 # Current position in input text
+ self.lexlen = 0 # Length of the input text
+ self.lexerrorf = None # Error rule (if any)
+ self.lexeoff = None # EOF rule (if any)
+ self.lextokens = None # List of valid tokens
+ self.lexignore = '' # Ignored characters
+ self.lexliterals = '' # Literal characters that can be passed through
+ self.lexmodule = None # Module
+ self.lineno = 1 # Current line number
+ self.lexoptimize = False # Optimized mode
+
+ def clone(self, object=None):
+ c = copy.copy(self)
+
+ # If the object parameter has been supplied, it means we are attaching the
+ # lexer to a new object. In this case, we have to rebind all methods in
+ # the lexstatere and lexstateerrorf tables.
+
+ if object:
+ newtab = {}
+ for key, ritem in self.lexstatere.items():
+ newre = []
+ for cre, findex in ritem:
+ newfindex = []
+ for f in findex:
+ if not f or not f[0]:
+ newfindex.append(f)
+ continue
+ newfindex.append((getattr(object, f[0].__name__), f[1]))
+ newre.append((cre, newfindex))
+ newtab[key] = newre
+ c.lexstatere = newtab
+ c.lexstateerrorf = {}
+ for key, ef in self.lexstateerrorf.items():
+ c.lexstateerrorf[key] = getattr(object, ef.__name__)
+ c.lexmodule = object
+ return c
+
+ # ------------------------------------------------------------
+ # writetab() - Write lexer information to a table file
+ # ------------------------------------------------------------
+ def writetab(self, lextab, outputdir=''):
+ if isinstance(lextab, types.ModuleType):
+ raise IOError("Won't overwrite existing lextab module")
+ basetabmodule = lextab.split('.')[-1]
+ filename = os.path.join(outputdir, basetabmodule) + '.py'
+ with open(filename, 'w') as tf:
+ tf.write('# %s.py. This file automatically created by PLY (version %s). Don\'t edit!\n' % (basetabmodule, __version__))
+ tf.write('_tabversion = %s\n' % repr(__tabversion__))
+ tf.write('_lextokens = set(%s)\n' % repr(tuple(self.lextokens)))
+ tf.write('_lexreflags = %s\n' % repr(self.lexreflags))
+ tf.write('_lexliterals = %s\n' % repr(self.lexliterals))
+ tf.write('_lexstateinfo = %s\n' % repr(self.lexstateinfo))
+
+ # Rewrite the lexstatere table, replacing function objects with function names
+ tabre = {}
+ for statename, lre in self.lexstatere.items():
+ titem = []
+ for (pat, func), retext, renames in zip(lre, self.lexstateretext[statename], self.lexstaterenames[statename]):
+ titem.append((retext, _funcs_to_names(func, renames)))
+ tabre[statename] = titem
+
+ tf.write('_lexstatere = %s\n' % repr(tabre))
+ tf.write('_lexstateignore = %s\n' % repr(self.lexstateignore))
+
+ taberr = {}
+ for statename, ef in self.lexstateerrorf.items():
+ taberr[statename] = ef.__name__ if ef else None
+ tf.write('_lexstateerrorf = %s\n' % repr(taberr))
+
+ tabeof = {}
+ for statename, ef in self.lexstateeoff.items():
+ tabeof[statename] = ef.__name__ if ef else None
+ tf.write('_lexstateeoff = %s\n' % repr(tabeof))
+
+ # ------------------------------------------------------------
+ # readtab() - Read lexer information from a tab file
+ # ------------------------------------------------------------
+ def readtab(self, tabfile, fdict):
+ if isinstance(tabfile, types.ModuleType):
+ lextab = tabfile
+ else:
+ exec('import %s' % tabfile)
+ lextab = sys.modules[tabfile]
+
+ if getattr(lextab, '_tabversion', '0.0') != __tabversion__:
+ raise ImportError('Inconsistent PLY version')
+
+ self.lextokens = lextab._lextokens
+ self.lexreflags = lextab._lexreflags
+ self.lexliterals = lextab._lexliterals
+ self.lextokens_all = self.lextokens | set(self.lexliterals)
+ self.lexstateinfo = lextab._lexstateinfo
+ self.lexstateignore = lextab._lexstateignore
+ self.lexstatere = {}
+ self.lexstateretext = {}
+ for statename, lre in lextab._lexstatere.items():
+ titem = []
+ txtitem = []
+ for pat, func_name in lre:
+ titem.append((re.compile(pat, lextab._lexreflags), _names_to_funcs(func_name, fdict)))
+
+ self.lexstatere[statename] = titem
+ self.lexstateretext[statename] = txtitem
+
+ self.lexstateerrorf = {}
+ for statename, ef in lextab._lexstateerrorf.items():
+ self.lexstateerrorf[statename] = fdict[ef]
+
+ self.lexstateeoff = {}
+ for statename, ef in lextab._lexstateeoff.items():
+ self.lexstateeoff[statename] = fdict[ef]
+
+ self.begin('INITIAL')
+
+ # ------------------------------------------------------------
+ # input() - Push a new string into the lexer
+ # ------------------------------------------------------------
+ def input(self, s):
+ # Pull off the first character to see if s looks like a string
+ c = s[:1]
+ if not isinstance(c, StringTypes):
+ raise ValueError('Expected a string')
+ self.lexdata = s
+ self.lexpos = 0
+ self.lexlen = len(s)
+
+ # ------------------------------------------------------------
+ # begin() - Changes the lexing state
+ # ------------------------------------------------------------
+ def begin(self, state):
+ if state not in self.lexstatere:
+ raise ValueError('Undefined state')
+ self.lexre = self.lexstatere[state]
+ self.lexretext = self.lexstateretext[state]
+ self.lexignore = self.lexstateignore.get(state, '')
+ self.lexerrorf = self.lexstateerrorf.get(state, None)
+ self.lexeoff = self.lexstateeoff.get(state, None)
+ self.lexstate = state
+
+ # ------------------------------------------------------------
+ # push_state() - Changes the lexing state and saves old on stack
+ # ------------------------------------------------------------
+ def push_state(self, state):
+ self.lexstatestack.append(self.lexstate)
+ self.begin(state)
+
+ # ------------------------------------------------------------
+ # pop_state() - Restores the previous state
+ # ------------------------------------------------------------
+ def pop_state(self):
+ self.begin(self.lexstatestack.pop())
+
+ # ------------------------------------------------------------
+ # current_state() - Returns the current lexing state
+ # ------------------------------------------------------------
+ def current_state(self):
+ return self.lexstate
+
+ # ------------------------------------------------------------
+ # skip() - Skip ahead n characters
+ # ------------------------------------------------------------
+ def skip(self, n):
+ self.lexpos += n
+
+ # ------------------------------------------------------------
+ # opttoken() - Return the next token from the Lexer
+ #
+ # Note: This function has been carefully implemented to be as fast
+ # as possible. Don't make changes unless you really know what
+ # you are doing
+ # ------------------------------------------------------------
+ def token(self):
+ # Make local copies of frequently referenced attributes
+ lexpos = self.lexpos
+ lexlen = self.lexlen
+ lexignore = self.lexignore
+ lexdata = self.lexdata
+
+ while lexpos < lexlen:
+ # This code provides some short-circuit code for whitespace, tabs, and other ignored characters
+ if lexdata[lexpos] in lexignore:
+ lexpos += 1
+ continue
+
+ # Look for a regular expression match
+ for lexre, lexindexfunc in self.lexre:
+ m = lexre.match(lexdata, lexpos)
+ if not m:
+ continue
+
+ # Create a token for return
+ tok = LexToken()
+ tok.value = m.group()
+ tok.lineno = self.lineno
+ tok.lexpos = lexpos
+
+ i = m.lastindex
+ func, tok.type = lexindexfunc[i]
+
+ if not func:
+ # If no token type was set, it's an ignored token
+ if tok.type:
+ self.lexpos = m.end()
+ return tok
+ else:
+ lexpos = m.end()
+ break
+
+ lexpos = m.end()
+
+ # If token is processed by a function, call it
+
+ tok.lexer = self # Set additional attributes useful in token rules
+ self.lexmatch = m
+ self.lexpos = lexpos
+
+ newtok = func(tok)
+
+ # Every function must return a token, if nothing, we just move to next token
+ if not newtok:
+ lexpos = self.lexpos # This is here in case user has updated lexpos.
+ lexignore = self.lexignore # This is here in case there was a state change
+ break
+
+ # Verify type of the token. If not in the token map, raise an error
+ if not self.lexoptimize:
+ if newtok.type not in self.lextokens_all:
+ raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % (
+ func.__code__.co_filename, func.__code__.co_firstlineno,
+ func.__name__, newtok.type), lexdata[lexpos:])
+
+ return newtok
+ else:
+ # No match, see if in literals
+ if lexdata[lexpos] in self.lexliterals:
+ tok = LexToken()
+ tok.value = lexdata[lexpos]
+ tok.lineno = self.lineno
+ tok.type = tok.value
+ tok.lexpos = lexpos
+ self.lexpos = lexpos + 1
+ return tok
+
+ # No match. Call t_error() if defined.
+ if self.lexerrorf:
+ tok = LexToken()
+ tok.value = self.lexdata[lexpos:]
+ tok.lineno = self.lineno
+ tok.type = 'error'
+ tok.lexer = self
+ tok.lexpos = lexpos
+ self.lexpos = lexpos
+ newtok = self.lexerrorf(tok)
+ if lexpos == self.lexpos:
+ # Error method didn't change text position at all. This is an error.
+ raise LexError("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:])
+ lexpos = self.lexpos
+ if not newtok:
+ continue
+ return newtok
+
+ self.lexpos = lexpos
+ raise LexError("Illegal character '%s' at index %d" % (lexdata[lexpos], lexpos), lexdata[lexpos:])
+
+ if self.lexeoff:
+ tok = LexToken()
+ tok.type = 'eof'
+ tok.value = ''
+ tok.lineno = self.lineno
+ tok.lexpos = lexpos
+ tok.lexer = self
+ self.lexpos = lexpos
+ newtok = self.lexeoff(tok)
+ return newtok
+
+ self.lexpos = lexpos + 1
+ if self.lexdata is None:
+ raise RuntimeError('No input string given with input()')
+ return None
+
+ # Iterator interface
+ def __iter__(self):
+ return self
+
+ def next(self):
+ t = self.token()
+ if t is None:
+ raise StopIteration
+ return t
+
+ __next__ = next
+
+# -----------------------------------------------------------------------------
+# ==== Lex Builder ===
+#
+# The functions and classes below are used to collect lexing information
+# and build a Lexer object from it.
+# -----------------------------------------------------------------------------
+
+# -----------------------------------------------------------------------------
+# _get_regex(func)
+#
+# Returns the regular expression assigned to a function either as a doc string
+# or as a .regex attribute attached by the @TOKEN decorator.
+# -----------------------------------------------------------------------------
+def _get_regex(func):
+ return getattr(func, 'regex', func.__doc__)
+
+# -----------------------------------------------------------------------------
+# get_caller_module_dict()
+#
+# This function returns a dictionary containing all of the symbols defined within
+# a caller further down the call stack. This is used to get the environment
+# associated with the yacc() call if none was provided.
+# -----------------------------------------------------------------------------
+def get_caller_module_dict(levels):
+ f = sys._getframe(levels)
+ ldict = f.f_globals.copy()
+ if f.f_globals != f.f_locals:
+ ldict.update(f.f_locals)
+ return ldict
+
+# -----------------------------------------------------------------------------
+# _funcs_to_names()
+#
+# Given a list of regular expression functions, this converts it to a list
+# suitable for output to a table file
+# -----------------------------------------------------------------------------
+def _funcs_to_names(funclist, namelist):
+ result = []
+ for f, name in zip(funclist, namelist):
+ if f and f[0]:
+ result.append((name, f[1]))
+ else:
+ result.append(f)
+ return result
+
+# -----------------------------------------------------------------------------
+# _names_to_funcs()
+#
+# Given a list of regular expression function names, this converts it back to
+# functions.
+# -----------------------------------------------------------------------------
+def _names_to_funcs(namelist, fdict):
+ result = []
+ for n in namelist:
+ if n and n[0]:
+ result.append((fdict[n[0]], n[1]))
+ else:
+ result.append(n)
+ return result
+
+# -----------------------------------------------------------------------------
+# _form_master_re()
+#
+# This function takes a list of all of the regex components and attempts to
+# form the master regular expression. Given limitations in the Python re
+# module, it may be necessary to break the master regex into separate expressions.
+# -----------------------------------------------------------------------------
+def _form_master_re(relist, reflags, ldict, toknames):
+ if not relist:
+ return []
+ regex = '|'.join(relist)
+ try:
+ lexre = re.compile(regex, reflags)
+
+ # Build the index to function map for the matching engine
+ lexindexfunc = [None] * (max(lexre.groupindex.values()) + 1)
+ lexindexnames = lexindexfunc[:]
+
+ for f, i in lexre.groupindex.items():
+ handle = ldict.get(f, None)
+ if type(handle) in (types.FunctionType, types.MethodType):
+ lexindexfunc[i] = (handle, toknames[f])
+ lexindexnames[i] = f
+ elif handle is not None:
+ lexindexnames[i] = f
+ if f.find('ignore_') > 0:
+ lexindexfunc[i] = (None, None)
+ else:
+ lexindexfunc[i] = (None, toknames[f])
+
+ return [(lexre, lexindexfunc)], [regex], [lexindexnames]
+ except Exception:
+ m = int(len(relist)/2)
+ if m == 0:
+ m = 1
+ llist, lre, lnames = _form_master_re(relist[:m], reflags, ldict, toknames)
+ rlist, rre, rnames = _form_master_re(relist[m:], reflags, ldict, toknames)
+ return (llist+rlist), (lre+rre), (lnames+rnames)
+
+# -----------------------------------------------------------------------------
+# def _statetoken(s,names)
+#
+# Given a declaration name s of the form "t_" and a dictionary whose keys are
+# state names, this function returns a tuple (states,tokenname) where states
+# is a tuple of state names and tokenname is the name of the token. For example,
+# calling this with s = "t_foo_bar_SPAM" might return (('foo','bar'),'SPAM')
+# -----------------------------------------------------------------------------
+def _statetoken(s, names):
+ nonstate = 1
+ parts = s.split('_')
+ for i, part in enumerate(parts[1:], 1):
+ if part not in names and part != 'ANY':
+ break
+
+ if i > 1:
+ states = tuple(parts[1:i])
+ else:
+ states = ('INITIAL',)
+
+ if 'ANY' in states:
+ states = tuple(names)
+
+ tokenname = '_'.join(parts[i:])
+ return (states, tokenname)
+
+
+# -----------------------------------------------------------------------------
+# LexerReflect()
+#
+# This class represents information needed to build a lexer as extracted from a
+# user's input file.
+# -----------------------------------------------------------------------------
+class LexerReflect(object):
+ def __init__(self, ldict, log=None, reflags=0):
+ self.ldict = ldict
+ self.error_func = None
+ self.tokens = []
+ self.reflags = reflags
+ self.stateinfo = {'INITIAL': 'inclusive'}
+ self.modules = set()
+ self.error = False
+ self.log = PlyLogger(sys.stderr) if log is None else log
+
+ # Get all of the basic information
+ def get_all(self):
+ self.get_tokens()
+ self.get_literals()
+ self.get_states()
+ self.get_rules()
+
+ # Validate all of the information
+ def validate_all(self):
+ self.validate_tokens()
+ self.validate_literals()
+ self.validate_rules()
+ return self.error
+
+ # Get the tokens map
+ def get_tokens(self):
+ tokens = self.ldict.get('tokens', None)
+ if not tokens:
+ self.log.error('No token list is defined')
+ self.error = True
+ return
+
+ if not isinstance(tokens, (list, tuple)):
+ self.log.error('tokens must be a list or tuple')
+ self.error = True
+ return
+
+ if not tokens:
+ self.log.error('tokens is empty')
+ self.error = True
+ return
+
+ self.tokens = tokens
+
+ # Validate the tokens
+ def validate_tokens(self):
+ terminals = {}
+ for n in self.tokens:
+ if not _is_identifier.match(n):
+ self.log.error("Bad token name '%s'", n)
+ self.error = True
+ if n in terminals:
+ self.log.warning("Token '%s' multiply defined", n)
+ terminals[n] = 1
+
+ # Get the literals specifier
+ def get_literals(self):
+ self.literals = self.ldict.get('literals', '')
+ if not self.literals:
+ self.literals = ''
+
+ # Validate literals
+ def validate_literals(self):
+ try:
+ for c in self.literals:
+ if not isinstance(c, StringTypes) or len(c) > 1:
+ self.log.error('Invalid literal %s. Must be a single character', repr(c))
+ self.error = True
+
+ except TypeError:
+ self.log.error('Invalid literals specification. literals must be a sequence of characters')
+ self.error = True
+
+ def get_states(self):
+ self.states = self.ldict.get('states', None)
+ # Build statemap
+ if self.states:
+ if not isinstance(self.states, (tuple, list)):
+ self.log.error('states must be defined as a tuple or list')
+ self.error = True
+ else:
+ for s in self.states:
+ if not isinstance(s, tuple) or len(s) != 2:
+ self.log.error("Invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')", repr(s))
+ self.error = True
+ continue
+ name, statetype = s
+ if not isinstance(name, StringTypes):
+ self.log.error('State name %s must be a string', repr(name))
+ self.error = True
+ continue
+ if not (statetype == 'inclusive' or statetype == 'exclusive'):
+ self.log.error("State type for state %s must be 'inclusive' or 'exclusive'", name)
+ self.error = True
+ continue
+ if name in self.stateinfo:
+ self.log.error("State '%s' already defined", name)
+ self.error = True
+ continue
+ self.stateinfo[name] = statetype
+
+ # Get all of the symbols with a t_ prefix and sort them into various
+ # categories (functions, strings, error functions, and ignore characters)
+
+ def get_rules(self):
+ tsymbols = [f for f in self.ldict if f[:2] == 't_']
+
+ # Now build up a list of functions and a list of strings
+ self.toknames = {} # Mapping of symbols to token names
+ self.funcsym = {} # Symbols defined as functions
+ self.strsym = {} # Symbols defined as strings
+ self.ignore = {} # Ignore strings by state
+ self.errorf = {} # Error functions by state
+ self.eoff = {} # EOF functions by state
+
+ for s in self.stateinfo:
+ self.funcsym[s] = []
+ self.strsym[s] = []
+
+ if len(tsymbols) == 0:
+ self.log.error('No rules of the form t_rulename are defined')
+ self.error = True
+ return
+
+ for f in tsymbols:
+ t = self.ldict[f]
+ states, tokname = _statetoken(f, self.stateinfo)
+ self.toknames[f] = tokname
+
+ if hasattr(t, '__call__'):
+ if tokname == 'error':
+ for s in states:
+ self.errorf[s] = t
+ elif tokname == 'eof':
+ for s in states:
+ self.eoff[s] = t
+ elif tokname == 'ignore':
+ line = t.__code__.co_firstlineno
+ file = t.__code__.co_filename
+ self.log.error("%s:%d: Rule '%s' must be defined as a string", file, line, t.__name__)
+ self.error = True
+ else:
+ for s in states:
+ self.funcsym[s].append((f, t))
+ elif isinstance(t, StringTypes):
+ if tokname == 'ignore':
+ for s in states:
+ self.ignore[s] = t
+ if '\\' in t:
+ self.log.warning("%s contains a literal backslash '\\'", f)
+
+ elif tokname == 'error':
+ self.log.error("Rule '%s' must be defined as a function", f)
+ self.error = True
+ else:
+ for s in states:
+ self.strsym[s].append((f, t))
+ else:
+ self.log.error('%s not defined as a function or string', f)
+ self.error = True
+
+ # Sort the functions by line number
+ for f in self.funcsym.values():
+ f.sort(key=lambda x: x[1].__code__.co_firstlineno)
+
+ # Sort the strings by regular expression length
+ for s in self.strsym.values():
+ s.sort(key=lambda x: len(x[1]), reverse=True)
+
+ # Validate all of the t_rules collected
+ def validate_rules(self):
+ for state in self.stateinfo:
+ # Validate all rules defined by functions
+
+ for fname, f in self.funcsym[state]:
+ line = f.__code__.co_firstlineno
+ file = f.__code__.co_filename
+ module = inspect.getmodule(f)
+ self.modules.add(module)
+
+ tokname = self.toknames[fname]
+ if isinstance(f, types.MethodType):
+ reqargs = 2
+ else:
+ reqargs = 1
+ nargs = f.__code__.co_argcount
+ if nargs > reqargs:
+ self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__)
+ self.error = True
+ continue
+
+ if nargs < reqargs:
+ self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__)
+ self.error = True
+ continue
+
+ if not _get_regex(f):
+ self.log.error("%s:%d: No regular expression defined for rule '%s'", file, line, f.__name__)
+ self.error = True
+ continue
+
+ try:
+ c = re.compile('(?P<%s>%s)' % (fname, _get_regex(f)), self.reflags)
+ if c.match(''):
+ self.log.error("%s:%d: Regular expression for rule '%s' matches empty string", file, line, f.__name__)
+ self.error = True
+ except re.error as e:
+ self.log.error("%s:%d: Invalid regular expression for rule '%s'. %s", file, line, f.__name__, e)
+ if '#' in _get_regex(f):
+ self.log.error("%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'", file, line, f.__name__)
+ self.error = True
+
+ # Validate all rules defined by strings
+ for name, r in self.strsym[state]:
+ tokname = self.toknames[name]
+ if tokname == 'error':
+ self.log.error("Rule '%s' must be defined as a function", name)
+ self.error = True
+ continue
+
+ if tokname not in self.tokens and tokname.find('ignore_') < 0:
+ self.log.error("Rule '%s' defined for an unspecified token %s", name, tokname)
+ self.error = True
+ continue
+
+ try:
+ c = re.compile('(?P<%s>%s)' % (name, r), self.reflags)
+ if (c.match('')):
+ self.log.error("Regular expression for rule '%s' matches empty string", name)
+ self.error = True
+ except re.error as e:
+ self.log.error("Invalid regular expression for rule '%s'. %s", name, e)
+ if '#' in r:
+ self.log.error("Make sure '#' in rule '%s' is escaped with '\\#'", name)
+ self.error = True
+
+ if not self.funcsym[state] and not self.strsym[state]:
+ self.log.error("No rules defined for state '%s'", state)
+ self.error = True
+
+ # Validate the error function
+ efunc = self.errorf.get(state, None)
+ if efunc:
+ f = efunc
+ line = f.__code__.co_firstlineno
+ file = f.__code__.co_filename
+ module = inspect.getmodule(f)
+ self.modules.add(module)
+
+ if isinstance(f, types.MethodType):
+ reqargs = 2
+ else:
+ reqargs = 1
+ nargs = f.__code__.co_argcount
+ if nargs > reqargs:
+ self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__)
+ self.error = True
+
+ if nargs < reqargs:
+ self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__)
+ self.error = True
+
+ for module in self.modules:
+ self.validate_module(module)
+
+ # -----------------------------------------------------------------------------
+ # validate_module()
+ #
+ # This checks to see if there are duplicated t_rulename() functions or strings
+ # in the parser input file. This is done using a simple regular expression
+ # match on each line in the source code of the given module.
+ # -----------------------------------------------------------------------------
+
+ def validate_module(self, module):
+ try:
+ lines, linen = inspect.getsourcelines(module)
+ except IOError:
+ return
+
+ fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(')
+ sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=')
+
+ counthash = {}
+ linen += 1
+ for line in lines:
+ m = fre.match(line)
+ if not m:
+ m = sre.match(line)
+ if m:
+ name = m.group(1)
+ prev = counthash.get(name)
+ if not prev:
+ counthash[name] = linen
+ else:
+ filename = inspect.getsourcefile(module)
+ self.log.error('%s:%d: Rule %s redefined. Previously defined on line %d', filename, linen, name, prev)
+ self.error = True
+ linen += 1
+
+# -----------------------------------------------------------------------------
+# lex(module)
+#
+# Build all of the regular expression rules from definitions in the supplied module
+# -----------------------------------------------------------------------------
+def lex(module=None, object=None, debug=False, optimize=False, lextab='lextab',
+ reflags=int(re.VERBOSE), nowarn=False, outputdir=None, debuglog=None, errorlog=None):
+
+ if lextab is None:
+ lextab = 'lextab'
+
+ global lexer
+
+ ldict = None
+ stateinfo = {'INITIAL': 'inclusive'}
+ lexobj = Lexer()
+ lexobj.lexoptimize = optimize
+ global token, input
+
+ if errorlog is None:
+ errorlog = PlyLogger(sys.stderr)
+
+ if debug:
+ if debuglog is None:
+ debuglog = PlyLogger(sys.stderr)
+
+ # Get the module dictionary used for the lexer
+ if object:
+ module = object
+
+ # Get the module dictionary used for the parser
+ if module:
+ _items = [(k, getattr(module, k)) for k in dir(module)]
+ ldict = dict(_items)
+ # If no __file__ attribute is available, try to obtain it from the __module__ instead
+ if '__file__' not in ldict:
+ ldict['__file__'] = sys.modules[ldict['__module__']].__file__
+ else:
+ ldict = get_caller_module_dict(2)
+
+ # Determine if the module is package of a package or not.
+ # If so, fix the tabmodule setting so that tables load correctly
+ pkg = ldict.get('__package__')
+ if pkg and isinstance(lextab, str):
+ if '.' not in lextab:
+ lextab = pkg + '.' + lextab
+
+ # Collect parser information from the dictionary
+ linfo = LexerReflect(ldict, log=errorlog, reflags=reflags)
+ linfo.get_all()
+ if not optimize:
+ if linfo.validate_all():
+ raise SyntaxError("Can't build lexer")
+
+ if optimize and lextab:
+ try:
+ lexobj.readtab(lextab, ldict)
+ token = lexobj.token
+ input = lexobj.input
+ lexer = lexobj
+ return lexobj
+
+ except ImportError:
+ pass
+
+ # Dump some basic debugging information
+ if debug:
+ debuglog.info('lex: tokens = %r', linfo.tokens)
+ debuglog.info('lex: literals = %r', linfo.literals)
+ debuglog.info('lex: states = %r', linfo.stateinfo)
+
+ # Build a dictionary of valid token names
+ lexobj.lextokens = set()
+ for n in linfo.tokens:
+ lexobj.lextokens.add(n)
+
+ # Get literals specification
+ if isinstance(linfo.literals, (list, tuple)):
+ lexobj.lexliterals = type(linfo.literals[0])().join(linfo.literals)
+ else:
+ lexobj.lexliterals = linfo.literals
+
+ lexobj.lextokens_all = lexobj.lextokens | set(lexobj.lexliterals)
+
+ # Get the stateinfo dictionary
+ stateinfo = linfo.stateinfo
+
+ regexs = {}
+ # Build the master regular expressions
+ for state in stateinfo:
+ regex_list = []
+
+ # Add rules defined by functions first
+ for fname, f in linfo.funcsym[state]:
+ line = f.__code__.co_firstlineno
+ file = f.__code__.co_filename
+ regex_list.append('(?P<%s>%s)' % (fname, _get_regex(f)))
+ if debug:
+ debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", fname, _get_regex(f), state)
+
+ # Now add all of the simple rules
+ for name, r in linfo.strsym[state]:
+ regex_list.append('(?P<%s>%s)' % (name, r))
+ if debug:
+ debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", name, r, state)
+
+ regexs[state] = regex_list
+
+ # Build the master regular expressions
+
+ if debug:
+ debuglog.info('lex: ==== MASTER REGEXS FOLLOW ====')
+
+ for state in regexs:
+ lexre, re_text, re_names = _form_master_re(regexs[state], reflags, ldict, linfo.toknames)
+ lexobj.lexstatere[state] = lexre
+ lexobj.lexstateretext[state] = re_text
+ lexobj.lexstaterenames[state] = re_names
+ if debug:
+ for i, text in enumerate(re_text):
+ debuglog.info("lex: state '%s' : regex[%d] = '%s'", state, i, text)
+
+ # For inclusive states, we need to add the regular expressions from the INITIAL state
+ for state, stype in stateinfo.items():
+ if state != 'INITIAL' and stype == 'inclusive':
+ lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL'])
+ lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL'])
+ lexobj.lexstaterenames[state].extend(lexobj.lexstaterenames['INITIAL'])
+
+ lexobj.lexstateinfo = stateinfo
+ lexobj.lexre = lexobj.lexstatere['INITIAL']
+ lexobj.lexretext = lexobj.lexstateretext['INITIAL']
+ lexobj.lexreflags = reflags
+
+ # Set up ignore variables
+ lexobj.lexstateignore = linfo.ignore
+ lexobj.lexignore = lexobj.lexstateignore.get('INITIAL', '')
+
+ # Set up error functions
+ lexobj.lexstateerrorf = linfo.errorf
+ lexobj.lexerrorf = linfo.errorf.get('INITIAL', None)
+ if not lexobj.lexerrorf:
+ errorlog.warning('No t_error rule is defined')
+
+ # Set up eof functions
+ lexobj.lexstateeoff = linfo.eoff
+ lexobj.lexeoff = linfo.eoff.get('INITIAL', None)
+
+ # Check state information for ignore and error rules
+ for s, stype in stateinfo.items():
+ if stype == 'exclusive':
+ if s not in linfo.errorf:
+ errorlog.warning("No error rule is defined for exclusive state '%s'", s)
+ if s not in linfo.ignore and lexobj.lexignore:
+ errorlog.warning("No ignore rule is defined for exclusive state '%s'", s)
+ elif stype == 'inclusive':
+ if s not in linfo.errorf:
+ linfo.errorf[s] = linfo.errorf.get('INITIAL', None)
+ if s not in linfo.ignore:
+ linfo.ignore[s] = linfo.ignore.get('INITIAL', '')
+
+ # Create global versions of the token() and input() functions
+ token = lexobj.token
+ input = lexobj.input
+ lexer = lexobj
+
+ # If in optimize mode, we write the lextab
+ if lextab and optimize:
+ if outputdir is None:
+ # If no output directory is set, the location of the output files
+ # is determined according to the following rules:
+ # - If lextab specifies a package, files go into that package directory
+ # - Otherwise, files go in the same directory as the specifying module
+ if isinstance(lextab, types.ModuleType):
+ srcfile = lextab.__file__
+ else:
+ if '.' not in lextab:
+ srcfile = ldict['__file__']
+ else:
+ parts = lextab.split('.')
+ pkgname = '.'.join(parts[:-1])
+ exec('import %s' % pkgname)
+ srcfile = getattr(sys.modules[pkgname], '__file__', '')
+ outputdir = os.path.dirname(srcfile)
+ try:
+ lexobj.writetab(lextab, outputdir)
+ except IOError as e:
+ errorlog.warning("Couldn't write lextab module %r. %s" % (lextab, e))
+
+ return lexobj
+
+# -----------------------------------------------------------------------------
+# runmain()
+#
+# This runs the lexer as a main program
+# -----------------------------------------------------------------------------
+
+def runmain(lexer=None, data=None):
+ if not data:
+ try:
+ filename = sys.argv[1]
+ f = open(filename)
+ data = f.read()
+ f.close()
+ except IndexError:
+ sys.stdout.write('Reading from standard input (type EOF to end):\n')
+ data = sys.stdin.read()
+
+ if lexer:
+ _input = lexer.input
+ else:
+ _input = input
+ _input(data)
+ if lexer:
+ _token = lexer.token
+ else:
+ _token = token
+
+ while True:
+ tok = _token()
+ if not tok:
+ break
+ sys.stdout.write('(%s,%r,%d,%d)\n' % (tok.type, tok.value, tok.lineno, tok.lexpos))
+
+# -----------------------------------------------------------------------------
+# @TOKEN(regex)
+#
+# This decorator function can be used to set the regex expression on a function
+# when its docstring might need to be set in an alternative way
+# -----------------------------------------------------------------------------
+
+def TOKEN(r):
+ def set_regex(f):
+ if hasattr(r, '__call__'):
+ f.regex = _get_regex(r)
+ else:
+ f.regex = r
+ return f
+ return set_regex
+
+# Alternative spelling of the TOKEN decorator
+Token = TOKEN
+
diff --git a/third_party/python/ply/ply/yacc.py b/third_party/python/ply/ply/yacc.py
new file mode 100644
index 0000000000..03bd86ee07
--- /dev/null
+++ b/third_party/python/ply/ply/yacc.py
@@ -0,0 +1,3494 @@
+# -----------------------------------------------------------------------------
+# ply: yacc.py
+#
+# Copyright (C) 2001-2017
+# David M. Beazley (Dabeaz LLC)
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+# * Neither the name of the David Beazley or Dabeaz LLC may be used to
+# endorse or promote products derived from this software without
+# specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+# -----------------------------------------------------------------------------
+#
+# This implements an LR parser that is constructed from grammar rules defined
+# as Python functions. The grammer is specified by supplying the BNF inside
+# Python documentation strings. The inspiration for this technique was borrowed
+# from John Aycock's Spark parsing system. PLY might be viewed as cross between
+# Spark and the GNU bison utility.
+#
+# The current implementation is only somewhat object-oriented. The
+# LR parser itself is defined in terms of an object (which allows multiple
+# parsers to co-exist). However, most of the variables used during table
+# construction are defined in terms of global variables. Users shouldn't
+# notice unless they are trying to define multiple parsers at the same
+# time using threads (in which case they should have their head examined).
+#
+# This implementation supports both SLR and LALR(1) parsing. LALR(1)
+# support was originally implemented by Elias Ioup (ezioup@alumni.uchicago.edu),
+# using the algorithm found in Aho, Sethi, and Ullman "Compilers: Principles,
+# Techniques, and Tools" (The Dragon Book). LALR(1) has since been replaced
+# by the more efficient DeRemer and Pennello algorithm.
+#
+# :::::::: WARNING :::::::
+#
+# Construction of LR parsing tables is fairly complicated and expensive.
+# To make this module run fast, a *LOT* of work has been put into
+# optimization---often at the expensive of readability and what might
+# consider to be good Python "coding style." Modify the code at your
+# own risk!
+# ----------------------------------------------------------------------------
+
+import re
+import types
+import sys
+import os.path
+import inspect
+import base64
+import warnings
+
+__version__ = '3.10'
+__tabversion__ = '3.10'
+
+#-----------------------------------------------------------------------------
+# === User configurable parameters ===
+#
+# Change these to modify the default behavior of yacc (if you wish)
+#-----------------------------------------------------------------------------
+
+yaccdebug = True # Debugging mode. If set, yacc generates a
+ # a 'parser.out' file in the current directory
+
+debug_file = 'parser.out' # Default name of the debugging file
+tab_module = 'parsetab' # Default name of the table module
+default_lr = 'LALR' # Default LR table generation method
+
+error_count = 3 # Number of symbols that must be shifted to leave recovery mode
+
+yaccdevel = False # Set to True if developing yacc. This turns off optimized
+ # implementations of certain functions.
+
+resultlimit = 40 # Size limit of results when running in debug mode.
+
+pickle_protocol = 0 # Protocol to use when writing pickle files
+
+# String type-checking compatibility
+if sys.version_info[0] < 3:
+ string_types = basestring
+else:
+ string_types = str
+
+MAXINT = sys.maxsize
+
+# This object is a stand-in for a logging object created by the
+# logging module. PLY will use this by default to create things
+# such as the parser.out file. If a user wants more detailed
+# information, they can create their own logging object and pass
+# it into PLY.
+
+class PlyLogger(object):
+ def __init__(self, f):
+ self.f = f
+
+ def debug(self, msg, *args, **kwargs):
+ self.f.write((msg % args) + '\n')
+
+ info = debug
+
+ def warning(self, msg, *args, **kwargs):
+ self.f.write('WARNING: ' + (msg % args) + '\n')
+
+ def error(self, msg, *args, **kwargs):
+ self.f.write('ERROR: ' + (msg % args) + '\n')
+
+ critical = debug
+
+# Null logger is used when no output is generated. Does nothing.
+class NullLogger(object):
+ def __getattribute__(self, name):
+ return self
+
+ def __call__(self, *args, **kwargs):
+ return self
+
+# Exception raised for yacc-related errors
+class YaccError(Exception):
+ pass
+
+# Format the result message that the parser produces when running in debug mode.
+def format_result(r):
+ repr_str = repr(r)
+ if '\n' in repr_str:
+ repr_str = repr(repr_str)
+ if len(repr_str) > resultlimit:
+ repr_str = repr_str[:resultlimit] + ' ...'
+ result = '<%s @ 0x%x> (%s)' % (type(r).__name__, id(r), repr_str)
+ return result
+
+# Format stack entries when the parser is running in debug mode
+def format_stack_entry(r):
+ repr_str = repr(r)
+ if '\n' in repr_str:
+ repr_str = repr(repr_str)
+ if len(repr_str) < 16:
+ return repr_str
+ else:
+ return '<%s @ 0x%x>' % (type(r).__name__, id(r))
+
+# Panic mode error recovery support. This feature is being reworked--much of the
+# code here is to offer a deprecation/backwards compatible transition
+
+_errok = None
+_token = None
+_restart = None
+_warnmsg = '''PLY: Don't use global functions errok(), token(), and restart() in p_error().
+Instead, invoke the methods on the associated parser instance:
+
+ def p_error(p):
+ ...
+ # Use parser.errok(), parser.token(), parser.restart()
+ ...
+
+ parser = yacc.yacc()
+'''
+
+def errok():
+ warnings.warn(_warnmsg)
+ return _errok()
+
+def restart():
+ warnings.warn(_warnmsg)
+ return _restart()
+
+def token():
+ warnings.warn(_warnmsg)
+ return _token()
+
+# Utility function to call the p_error() function with some deprecation hacks
+def call_errorfunc(errorfunc, token, parser):
+ global _errok, _token, _restart
+ _errok = parser.errok
+ _token = parser.token
+ _restart = parser.restart
+ r = errorfunc(token)
+ try:
+ del _errok, _token, _restart
+ except NameError:
+ pass
+ return r
+
+#-----------------------------------------------------------------------------
+# === LR Parsing Engine ===
+#
+# The following classes are used for the LR parser itself. These are not
+# used during table construction and are independent of the actual LR
+# table generation algorithm
+#-----------------------------------------------------------------------------
+
+# This class is used to hold non-terminal grammar symbols during parsing.
+# It normally has the following attributes set:
+# .type = Grammar symbol type
+# .value = Symbol value
+# .lineno = Starting line number
+# .endlineno = Ending line number (optional, set automatically)
+# .lexpos = Starting lex position
+# .endlexpos = Ending lex position (optional, set automatically)
+
+class YaccSymbol:
+ def __str__(self):
+ return self.type
+
+ def __repr__(self):
+ return str(self)
+
+# This class is a wrapper around the objects actually passed to each
+# grammar rule. Index lookup and assignment actually assign the
+# .value attribute of the underlying YaccSymbol object.
+# The lineno() method returns the line number of a given
+# item (or 0 if not defined). The linespan() method returns
+# a tuple of (startline,endline) representing the range of lines
+# for a symbol. The lexspan() method returns a tuple (lexpos,endlexpos)
+# representing the range of positional information for a symbol.
+
+class YaccProduction:
+ def __init__(self, s, stack=None):
+ self.slice = s
+ self.stack = stack
+ self.lexer = None
+ self.parser = None
+
+ def __getitem__(self, n):
+ if isinstance(n, slice):
+ return [s.value for s in self.slice[n]]
+ elif n >= 0:
+ return self.slice[n].value
+ else:
+ return self.stack[n].value
+
+ def __setitem__(self, n, v):
+ self.slice[n].value = v
+
+ def __getslice__(self, i, j):
+ return [s.value for s in self.slice[i:j]]
+
+ def __len__(self):
+ return len(self.slice)
+
+ def lineno(self, n):
+ return getattr(self.slice[n], 'lineno', 0)
+
+ def set_lineno(self, n, lineno):
+ self.slice[n].lineno = lineno
+
+ def linespan(self, n):
+ startline = getattr(self.slice[n], 'lineno', 0)
+ endline = getattr(self.slice[n], 'endlineno', startline)
+ return startline, endline
+
+ def lexpos(self, n):
+ return getattr(self.slice[n], 'lexpos', 0)
+
+ def lexspan(self, n):
+ startpos = getattr(self.slice[n], 'lexpos', 0)
+ endpos = getattr(self.slice[n], 'endlexpos', startpos)
+ return startpos, endpos
+
+ def error(self):
+ raise SyntaxError
+
+# -----------------------------------------------------------------------------
+# == LRParser ==
+#
+# The LR Parsing engine.
+# -----------------------------------------------------------------------------
+
+class LRParser:
+ def __init__(self, lrtab, errorf):
+ self.productions = lrtab.lr_productions
+ self.action = lrtab.lr_action
+ self.goto = lrtab.lr_goto
+ self.errorfunc = errorf
+ self.set_defaulted_states()
+ self.errorok = True
+
+ def errok(self):
+ self.errorok = True
+
+ def restart(self):
+ del self.statestack[:]
+ del self.symstack[:]
+ sym = YaccSymbol()
+ sym.type = '$end'
+ self.symstack.append(sym)
+ self.statestack.append(0)
+
+ # Defaulted state support.
+ # This method identifies parser states where there is only one possible reduction action.
+ # For such states, the parser can make a choose to make a rule reduction without consuming
+ # the next look-ahead token. This delayed invocation of the tokenizer can be useful in
+ # certain kinds of advanced parsing situations where the lexer and parser interact with
+ # each other or change states (i.e., manipulation of scope, lexer states, etc.).
+ #
+ # See: http://www.gnu.org/software/bison/manual/html_node/Default-Reductions.html#Default-Reductions
+ def set_defaulted_states(self):
+ self.defaulted_states = {}
+ for state, actions in self.action.items():
+ rules = list(actions.values())
+ if len(rules) == 1 and rules[0] < 0:
+ self.defaulted_states[state] = rules[0]
+
+ def disable_defaulted_states(self):
+ self.defaulted_states = {}
+
+ def parse(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
+ if debug or yaccdevel:
+ if isinstance(debug, int):
+ debug = PlyLogger(sys.stderr)
+ return self.parsedebug(input, lexer, debug, tracking, tokenfunc)
+ elif tracking:
+ return self.parseopt(input, lexer, debug, tracking, tokenfunc)
+ else:
+ return self.parseopt_notrack(input, lexer, debug, tracking, tokenfunc)
+
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # parsedebug().
+ #
+ # This is the debugging enabled version of parse(). All changes made to the
+ # parsing engine should be made here. Optimized versions of this function
+ # are automatically created by the ply/ygen.py script. This script cuts out
+ # sections enclosed in markers such as this:
+ #
+ # #--! DEBUG
+ # statements
+ # #--! DEBUG
+ #
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ def parsedebug(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
+ #--! parsedebug-start
+ lookahead = None # Current lookahead symbol
+ lookaheadstack = [] # Stack of lookahead symbols
+ actions = self.action # Local reference to action table (to avoid lookup on self.)
+ goto = self.goto # Local reference to goto table (to avoid lookup on self.)
+ prod = self.productions # Local reference to production list (to avoid lookup on self.)
+ defaulted_states = self.defaulted_states # Local reference to defaulted states
+ pslice = YaccProduction(None) # Production object passed to grammar rules
+ errorcount = 0 # Used during error recovery
+
+ #--! DEBUG
+ debug.info('PLY: PARSE DEBUG START')
+ #--! DEBUG
+
+ # If no lexer was given, we will try to use the lex module
+ if not lexer:
+ from . import lex
+ lexer = lex.lexer
+
+ # Set up the lexer and parser objects on pslice
+ pslice.lexer = lexer
+ pslice.parser = self
+
+ # If input was supplied, pass to lexer
+ if input is not None:
+ lexer.input(input)
+
+ if tokenfunc is None:
+ # Tokenize function
+ get_token = lexer.token
+ else:
+ get_token = tokenfunc
+
+ # Set the parser() token method (sometimes used in error recovery)
+ self.token = get_token
+
+ # Set up the state and symbol stacks
+
+ statestack = [] # Stack of parsing states
+ self.statestack = statestack
+ symstack = [] # Stack of grammar symbols
+ self.symstack = symstack
+
+ pslice.stack = symstack # Put in the production
+ errtoken = None # Err token
+
+ # The start state is assumed to be (0,$end)
+
+ statestack.append(0)
+ sym = YaccSymbol()
+ sym.type = '$end'
+ symstack.append(sym)
+ state = 0
+ while True:
+ # Get the next symbol on the input. If a lookahead symbol
+ # is already set, we just use that. Otherwise, we'll pull
+ # the next token off of the lookaheadstack or from the lexer
+
+ #--! DEBUG
+ debug.debug('')
+ debug.debug('State : %s', state)
+ #--! DEBUG
+
+ if state not in defaulted_states:
+ if not lookahead:
+ if not lookaheadstack:
+ lookahead = get_token() # Get the next token
+ else:
+ lookahead = lookaheadstack.pop()
+ if not lookahead:
+ lookahead = YaccSymbol()
+ lookahead.type = '$end'
+
+ # Check the action table
+ ltype = lookahead.type
+ t = actions[state].get(ltype)
+ else:
+ t = defaulted_states[state]
+ #--! DEBUG
+ debug.debug('Defaulted state %s: Reduce using %d', state, -t)
+ #--! DEBUG
+
+ #--! DEBUG
+ debug.debug('Stack : %s',
+ ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())
+ #--! DEBUG
+
+ if t is not None:
+ if t > 0:
+ # shift a symbol on the stack
+ statestack.append(t)
+ state = t
+
+ #--! DEBUG
+ debug.debug('Action : Shift and goto state %s', t)
+ #--! DEBUG
+
+ symstack.append(lookahead)
+ lookahead = None
+
+ # Decrease error count on successful shift
+ if errorcount:
+ errorcount -= 1
+ continue
+
+ if t < 0:
+ # reduce a symbol on the stack, emit a production
+ p = prod[-t]
+ pname = p.name
+ plen = p.len
+
+ # Get production function
+ sym = YaccSymbol()
+ sym.type = pname # Production name
+ sym.value = None
+
+ #--! DEBUG
+ if plen:
+ debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str,
+ '['+','.join([format_stack_entry(_v.value) for _v in symstack[-plen:]])+']',
+ goto[statestack[-1-plen]][pname])
+ else:
+ debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str, [],
+ goto[statestack[-1]][pname])
+
+ #--! DEBUG
+
+ if plen:
+ targ = symstack[-plen-1:]
+ targ[0] = sym
+
+ #--! TRACKING
+ if tracking:
+ t1 = targ[1]
+ sym.lineno = t1.lineno
+ sym.lexpos = t1.lexpos
+ t1 = targ[-1]
+ sym.endlineno = getattr(t1, 'endlineno', t1.lineno)
+ sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos)
+ #--! TRACKING
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # The code enclosed in this section is duplicated
+ # below as a performance optimization. Make sure
+ # changes get made in both locations.
+
+ pslice.slice = targ
+
+ try:
+ # Call the grammar rule with our special slice object
+ del symstack[-plen:]
+ self.state = state
+ p.callable(pslice)
+ del statestack[-plen:]
+ #--! DEBUG
+ debug.info('Result : %s', format_result(pslice[0]))
+ #--! DEBUG
+ symstack.append(sym)
+ state = goto[statestack[-1]][pname]
+ statestack.append(state)
+ except SyntaxError:
+ # If an error was set. Enter error recovery state
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ symstack.extend(targ[1:-1]) # Put the production slice back on the stack
+ statestack.pop() # Pop back one state (before the reduce)
+ state = statestack[-1]
+ sym.type = 'error'
+ sym.value = 'error'
+ lookahead = sym
+ errorcount = error_count
+ self.errorok = False
+
+ continue
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ else:
+
+ #--! TRACKING
+ if tracking:
+ sym.lineno = lexer.lineno
+ sym.lexpos = lexer.lexpos
+ #--! TRACKING
+
+ targ = [sym]
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # The code enclosed in this section is duplicated
+ # above as a performance optimization. Make sure
+ # changes get made in both locations.
+
+ pslice.slice = targ
+
+ try:
+ # Call the grammar rule with our special slice object
+ self.state = state
+ p.callable(pslice)
+ #--! DEBUG
+ debug.info('Result : %s', format_result(pslice[0]))
+ #--! DEBUG
+ symstack.append(sym)
+ state = goto[statestack[-1]][pname]
+ statestack.append(state)
+ except SyntaxError:
+ # If an error was set. Enter error recovery state
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ statestack.pop() # Pop back one state (before the reduce)
+ state = statestack[-1]
+ sym.type = 'error'
+ sym.value = 'error'
+ lookahead = sym
+ errorcount = error_count
+ self.errorok = False
+
+ continue
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ if t == 0:
+ n = symstack[-1]
+ result = getattr(n, 'value', None)
+ #--! DEBUG
+ debug.info('Done : Returning %s', format_result(result))
+ debug.info('PLY: PARSE DEBUG END')
+ #--! DEBUG
+ return result
+
+ if t is None:
+
+ #--! DEBUG
+ debug.error('Error : %s',
+ ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())
+ #--! DEBUG
+
+ # We have some kind of parsing error here. To handle
+ # this, we are going to push the current token onto
+ # the tokenstack and replace it with an 'error' token.
+ # If there are any synchronization rules, they may
+ # catch it.
+ #
+ # In addition to pushing the error token, we call call
+ # the user defined p_error() function if this is the
+ # first syntax error. This function is only called if
+ # errorcount == 0.
+ if errorcount == 0 or self.errorok:
+ errorcount = error_count
+ self.errorok = False
+ errtoken = lookahead
+ if errtoken.type == '$end':
+ errtoken = None # End of file!
+ if self.errorfunc:
+ if errtoken and not hasattr(errtoken, 'lexer'):
+ errtoken.lexer = lexer
+ self.state = state
+ tok = call_errorfunc(self.errorfunc, errtoken, self)
+ if self.errorok:
+ # User must have done some kind of panic
+ # mode recovery on their own. The
+ # returned token is the next lookahead
+ lookahead = tok
+ errtoken = None
+ continue
+ else:
+ if errtoken:
+ if hasattr(errtoken, 'lineno'):
+ lineno = lookahead.lineno
+ else:
+ lineno = 0
+ if lineno:
+ sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type))
+ else:
+ sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)
+ else:
+ sys.stderr.write('yacc: Parse error in input. EOF\n')
+ return
+
+ else:
+ errorcount = error_count
+
+ # case 1: the statestack only has 1 entry on it. If we're in this state, the
+ # entire parse has been rolled back and we're completely hosed. The token is
+ # discarded and we just keep going.
+
+ if len(statestack) <= 1 and lookahead.type != '$end':
+ lookahead = None
+ errtoken = None
+ state = 0
+ # Nuke the pushback stack
+ del lookaheadstack[:]
+ continue
+
+ # case 2: the statestack has a couple of entries on it, but we're
+ # at the end of the file. nuke the top entry and generate an error token
+
+ # Start nuking entries on the stack
+ if lookahead.type == '$end':
+ # Whoa. We're really hosed here. Bail out
+ return
+
+ if lookahead.type != 'error':
+ sym = symstack[-1]
+ if sym.type == 'error':
+ # Hmmm. Error is on top of stack, we'll just nuke input
+ # symbol and continue
+ #--! TRACKING
+ if tracking:
+ sym.endlineno = getattr(lookahead, 'lineno', sym.lineno)
+ sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos)
+ #--! TRACKING
+ lookahead = None
+ continue
+
+ # Create the error symbol for the first time and make it the new lookahead symbol
+ t = YaccSymbol()
+ t.type = 'error'
+
+ if hasattr(lookahead, 'lineno'):
+ t.lineno = t.endlineno = lookahead.lineno
+ if hasattr(lookahead, 'lexpos'):
+ t.lexpos = t.endlexpos = lookahead.lexpos
+ t.value = lookahead
+ lookaheadstack.append(lookahead)
+ lookahead = t
+ else:
+ sym = symstack.pop()
+ #--! TRACKING
+ if tracking:
+ lookahead.lineno = sym.lineno
+ lookahead.lexpos = sym.lexpos
+ #--! TRACKING
+ statestack.pop()
+ state = statestack[-1]
+
+ continue
+
+ # Call an error function here
+ raise RuntimeError('yacc: internal parser error!!!\n')
+
+ #--! parsedebug-end
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # parseopt().
+ #
+ # Optimized version of parse() method. DO NOT EDIT THIS CODE DIRECTLY!
+ # This code is automatically generated by the ply/ygen.py script. Make
+ # changes to the parsedebug() method instead.
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ def parseopt(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
+ #--! parseopt-start
+ lookahead = None # Current lookahead symbol
+ lookaheadstack = [] # Stack of lookahead symbols
+ actions = self.action # Local reference to action table (to avoid lookup on self.)
+ goto = self.goto # Local reference to goto table (to avoid lookup on self.)
+ prod = self.productions # Local reference to production list (to avoid lookup on self.)
+ defaulted_states = self.defaulted_states # Local reference to defaulted states
+ pslice = YaccProduction(None) # Production object passed to grammar rules
+ errorcount = 0 # Used during error recovery
+
+
+ # If no lexer was given, we will try to use the lex module
+ if not lexer:
+ from . import lex
+ lexer = lex.lexer
+
+ # Set up the lexer and parser objects on pslice
+ pslice.lexer = lexer
+ pslice.parser = self
+
+ # If input was supplied, pass to lexer
+ if input is not None:
+ lexer.input(input)
+
+ if tokenfunc is None:
+ # Tokenize function
+ get_token = lexer.token
+ else:
+ get_token = tokenfunc
+
+ # Set the parser() token method (sometimes used in error recovery)
+ self.token = get_token
+
+ # Set up the state and symbol stacks
+
+ statestack = [] # Stack of parsing states
+ self.statestack = statestack
+ symstack = [] # Stack of grammar symbols
+ self.symstack = symstack
+
+ pslice.stack = symstack # Put in the production
+ errtoken = None # Err token
+
+ # The start state is assumed to be (0,$end)
+
+ statestack.append(0)
+ sym = YaccSymbol()
+ sym.type = '$end'
+ symstack.append(sym)
+ state = 0
+ while True:
+ # Get the next symbol on the input. If a lookahead symbol
+ # is already set, we just use that. Otherwise, we'll pull
+ # the next token off of the lookaheadstack or from the lexer
+
+
+ if state not in defaulted_states:
+ if not lookahead:
+ if not lookaheadstack:
+ lookahead = get_token() # Get the next token
+ else:
+ lookahead = lookaheadstack.pop()
+ if not lookahead:
+ lookahead = YaccSymbol()
+ lookahead.type = '$end'
+
+ # Check the action table
+ ltype = lookahead.type
+ t = actions[state].get(ltype)
+ else:
+ t = defaulted_states[state]
+
+
+ if t is not None:
+ if t > 0:
+ # shift a symbol on the stack
+ statestack.append(t)
+ state = t
+
+
+ symstack.append(lookahead)
+ lookahead = None
+
+ # Decrease error count on successful shift
+ if errorcount:
+ errorcount -= 1
+ continue
+
+ if t < 0:
+ # reduce a symbol on the stack, emit a production
+ p = prod[-t]
+ pname = p.name
+ plen = p.len
+
+ # Get production function
+ sym = YaccSymbol()
+ sym.type = pname # Production name
+ sym.value = None
+
+
+ if plen:
+ targ = symstack[-plen-1:]
+ targ[0] = sym
+
+ #--! TRACKING
+ if tracking:
+ t1 = targ[1]
+ sym.lineno = t1.lineno
+ sym.lexpos = t1.lexpos
+ t1 = targ[-1]
+ sym.endlineno = getattr(t1, 'endlineno', t1.lineno)
+ sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos)
+ #--! TRACKING
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # The code enclosed in this section is duplicated
+ # below as a performance optimization. Make sure
+ # changes get made in both locations.
+
+ pslice.slice = targ
+
+ try:
+ # Call the grammar rule with our special slice object
+ del symstack[-plen:]
+ self.state = state
+ p.callable(pslice)
+ del statestack[-plen:]
+ symstack.append(sym)
+ state = goto[statestack[-1]][pname]
+ statestack.append(state)
+ except SyntaxError:
+ # If an error was set. Enter error recovery state
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ symstack.extend(targ[1:-1]) # Put the production slice back on the stack
+ statestack.pop() # Pop back one state (before the reduce)
+ state = statestack[-1]
+ sym.type = 'error'
+ sym.value = 'error'
+ lookahead = sym
+ errorcount = error_count
+ self.errorok = False
+
+ continue
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ else:
+
+ #--! TRACKING
+ if tracking:
+ sym.lineno = lexer.lineno
+ sym.lexpos = lexer.lexpos
+ #--! TRACKING
+
+ targ = [sym]
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # The code enclosed in this section is duplicated
+ # above as a performance optimization. Make sure
+ # changes get made in both locations.
+
+ pslice.slice = targ
+
+ try:
+ # Call the grammar rule with our special slice object
+ self.state = state
+ p.callable(pslice)
+ symstack.append(sym)
+ state = goto[statestack[-1]][pname]
+ statestack.append(state)
+ except SyntaxError:
+ # If an error was set. Enter error recovery state
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ statestack.pop() # Pop back one state (before the reduce)
+ state = statestack[-1]
+ sym.type = 'error'
+ sym.value = 'error'
+ lookahead = sym
+ errorcount = error_count
+ self.errorok = False
+
+ continue
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ if t == 0:
+ n = symstack[-1]
+ result = getattr(n, 'value', None)
+ return result
+
+ if t is None:
+
+
+ # We have some kind of parsing error here. To handle
+ # this, we are going to push the current token onto
+ # the tokenstack and replace it with an 'error' token.
+ # If there are any synchronization rules, they may
+ # catch it.
+ #
+ # In addition to pushing the error token, we call call
+ # the user defined p_error() function if this is the
+ # first syntax error. This function is only called if
+ # errorcount == 0.
+ if errorcount == 0 or self.errorok:
+ errorcount = error_count
+ self.errorok = False
+ errtoken = lookahead
+ if errtoken.type == '$end':
+ errtoken = None # End of file!
+ if self.errorfunc:
+ if errtoken and not hasattr(errtoken, 'lexer'):
+ errtoken.lexer = lexer
+ self.state = state
+ tok = call_errorfunc(self.errorfunc, errtoken, self)
+ if self.errorok:
+ # User must have done some kind of panic
+ # mode recovery on their own. The
+ # returned token is the next lookahead
+ lookahead = tok
+ errtoken = None
+ continue
+ else:
+ if errtoken:
+ if hasattr(errtoken, 'lineno'):
+ lineno = lookahead.lineno
+ else:
+ lineno = 0
+ if lineno:
+ sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type))
+ else:
+ sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)
+ else:
+ sys.stderr.write('yacc: Parse error in input. EOF\n')
+ return
+
+ else:
+ errorcount = error_count
+
+ # case 1: the statestack only has 1 entry on it. If we're in this state, the
+ # entire parse has been rolled back and we're completely hosed. The token is
+ # discarded and we just keep going.
+
+ if len(statestack) <= 1 and lookahead.type != '$end':
+ lookahead = None
+ errtoken = None
+ state = 0
+ # Nuke the pushback stack
+ del lookaheadstack[:]
+ continue
+
+ # case 2: the statestack has a couple of entries on it, but we're
+ # at the end of the file. nuke the top entry and generate an error token
+
+ # Start nuking entries on the stack
+ if lookahead.type == '$end':
+ # Whoa. We're really hosed here. Bail out
+ return
+
+ if lookahead.type != 'error':
+ sym = symstack[-1]
+ if sym.type == 'error':
+ # Hmmm. Error is on top of stack, we'll just nuke input
+ # symbol and continue
+ #--! TRACKING
+ if tracking:
+ sym.endlineno = getattr(lookahead, 'lineno', sym.lineno)
+ sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos)
+ #--! TRACKING
+ lookahead = None
+ continue
+
+ # Create the error symbol for the first time and make it the new lookahead symbol
+ t = YaccSymbol()
+ t.type = 'error'
+
+ if hasattr(lookahead, 'lineno'):
+ t.lineno = t.endlineno = lookahead.lineno
+ if hasattr(lookahead, 'lexpos'):
+ t.lexpos = t.endlexpos = lookahead.lexpos
+ t.value = lookahead
+ lookaheadstack.append(lookahead)
+ lookahead = t
+ else:
+ sym = symstack.pop()
+ #--! TRACKING
+ if tracking:
+ lookahead.lineno = sym.lineno
+ lookahead.lexpos = sym.lexpos
+ #--! TRACKING
+ statestack.pop()
+ state = statestack[-1]
+
+ continue
+
+ # Call an error function here
+ raise RuntimeError('yacc: internal parser error!!!\n')
+
+ #--! parseopt-end
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # parseopt_notrack().
+ #
+ # Optimized version of parseopt() with line number tracking removed.
+ # DO NOT EDIT THIS CODE DIRECTLY. This code is automatically generated
+ # by the ply/ygen.py script. Make changes to the parsedebug() method instead.
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ def parseopt_notrack(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
+ #--! parseopt-notrack-start
+ lookahead = None # Current lookahead symbol
+ lookaheadstack = [] # Stack of lookahead symbols
+ actions = self.action # Local reference to action table (to avoid lookup on self.)
+ goto = self.goto # Local reference to goto table (to avoid lookup on self.)
+ prod = self.productions # Local reference to production list (to avoid lookup on self.)
+ defaulted_states = self.defaulted_states # Local reference to defaulted states
+ pslice = YaccProduction(None) # Production object passed to grammar rules
+ errorcount = 0 # Used during error recovery
+
+
+ # If no lexer was given, we will try to use the lex module
+ if not lexer:
+ from . import lex
+ lexer = lex.lexer
+
+ # Set up the lexer and parser objects on pslice
+ pslice.lexer = lexer
+ pslice.parser = self
+
+ # If input was supplied, pass to lexer
+ if input is not None:
+ lexer.input(input)
+
+ if tokenfunc is None:
+ # Tokenize function
+ get_token = lexer.token
+ else:
+ get_token = tokenfunc
+
+ # Set the parser() token method (sometimes used in error recovery)
+ self.token = get_token
+
+ # Set up the state and symbol stacks
+
+ statestack = [] # Stack of parsing states
+ self.statestack = statestack
+ symstack = [] # Stack of grammar symbols
+ self.symstack = symstack
+
+ pslice.stack = symstack # Put in the production
+ errtoken = None # Err token
+
+ # The start state is assumed to be (0,$end)
+
+ statestack.append(0)
+ sym = YaccSymbol()
+ sym.type = '$end'
+ symstack.append(sym)
+ state = 0
+ while True:
+ # Get the next symbol on the input. If a lookahead symbol
+ # is already set, we just use that. Otherwise, we'll pull
+ # the next token off of the lookaheadstack or from the lexer
+
+
+ if state not in defaulted_states:
+ if not lookahead:
+ if not lookaheadstack:
+ lookahead = get_token() # Get the next token
+ else:
+ lookahead = lookaheadstack.pop()
+ if not lookahead:
+ lookahead = YaccSymbol()
+ lookahead.type = '$end'
+
+ # Check the action table
+ ltype = lookahead.type
+ t = actions[state].get(ltype)
+ else:
+ t = defaulted_states[state]
+
+
+ if t is not None:
+ if t > 0:
+ # shift a symbol on the stack
+ statestack.append(t)
+ state = t
+
+
+ symstack.append(lookahead)
+ lookahead = None
+
+ # Decrease error count on successful shift
+ if errorcount:
+ errorcount -= 1
+ continue
+
+ if t < 0:
+ # reduce a symbol on the stack, emit a production
+ p = prod[-t]
+ pname = p.name
+ plen = p.len
+
+ # Get production function
+ sym = YaccSymbol()
+ sym.type = pname # Production name
+ sym.value = None
+
+
+ if plen:
+ targ = symstack[-plen-1:]
+ targ[0] = sym
+
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # The code enclosed in this section is duplicated
+ # below as a performance optimization. Make sure
+ # changes get made in both locations.
+
+ pslice.slice = targ
+
+ try:
+ # Call the grammar rule with our special slice object
+ del symstack[-plen:]
+ self.state = state
+ p.callable(pslice)
+ del statestack[-plen:]
+ symstack.append(sym)
+ state = goto[statestack[-1]][pname]
+ statestack.append(state)
+ except SyntaxError:
+ # If an error was set. Enter error recovery state
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ symstack.extend(targ[1:-1]) # Put the production slice back on the stack
+ statestack.pop() # Pop back one state (before the reduce)
+ state = statestack[-1]
+ sym.type = 'error'
+ sym.value = 'error'
+ lookahead = sym
+ errorcount = error_count
+ self.errorok = False
+
+ continue
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ else:
+
+
+ targ = [sym]
+
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ # The code enclosed in this section is duplicated
+ # above as a performance optimization. Make sure
+ # changes get made in both locations.
+
+ pslice.slice = targ
+
+ try:
+ # Call the grammar rule with our special slice object
+ self.state = state
+ p.callable(pslice)
+ symstack.append(sym)
+ state = goto[statestack[-1]][pname]
+ statestack.append(state)
+ except SyntaxError:
+ # If an error was set. Enter error recovery state
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ statestack.pop() # Pop back one state (before the reduce)
+ state = statestack[-1]
+ sym.type = 'error'
+ sym.value = 'error'
+ lookahead = sym
+ errorcount = error_count
+ self.errorok = False
+
+ continue
+ # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ if t == 0:
+ n = symstack[-1]
+ result = getattr(n, 'value', None)
+ return result
+
+ if t is None:
+
+
+ # We have some kind of parsing error here. To handle
+ # this, we are going to push the current token onto
+ # the tokenstack and replace it with an 'error' token.
+ # If there are any synchronization rules, they may
+ # catch it.
+ #
+ # In addition to pushing the error token, we call call
+ # the user defined p_error() function if this is the
+ # first syntax error. This function is only called if
+ # errorcount == 0.
+ if errorcount == 0 or self.errorok:
+ errorcount = error_count
+ self.errorok = False
+ errtoken = lookahead
+ if errtoken.type == '$end':
+ errtoken = None # End of file!
+ if self.errorfunc:
+ if errtoken and not hasattr(errtoken, 'lexer'):
+ errtoken.lexer = lexer
+ self.state = state
+ tok = call_errorfunc(self.errorfunc, errtoken, self)
+ if self.errorok:
+ # User must have done some kind of panic
+ # mode recovery on their own. The
+ # returned token is the next lookahead
+ lookahead = tok
+ errtoken = None
+ continue
+ else:
+ if errtoken:
+ if hasattr(errtoken, 'lineno'):
+ lineno = lookahead.lineno
+ else:
+ lineno = 0
+ if lineno:
+ sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type))
+ else:
+ sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)
+ else:
+ sys.stderr.write('yacc: Parse error in input. EOF\n')
+ return
+
+ else:
+ errorcount = error_count
+
+ # case 1: the statestack only has 1 entry on it. If we're in this state, the
+ # entire parse has been rolled back and we're completely hosed. The token is
+ # discarded and we just keep going.
+
+ if len(statestack) <= 1 and lookahead.type != '$end':
+ lookahead = None
+ errtoken = None
+ state = 0
+ # Nuke the pushback stack
+ del lookaheadstack[:]
+ continue
+
+ # case 2: the statestack has a couple of entries on it, but we're
+ # at the end of the file. nuke the top entry and generate an error token
+
+ # Start nuking entries on the stack
+ if lookahead.type == '$end':
+ # Whoa. We're really hosed here. Bail out
+ return
+
+ if lookahead.type != 'error':
+ sym = symstack[-1]
+ if sym.type == 'error':
+ # Hmmm. Error is on top of stack, we'll just nuke input
+ # symbol and continue
+ lookahead = None
+ continue
+
+ # Create the error symbol for the first time and make it the new lookahead symbol
+ t = YaccSymbol()
+ t.type = 'error'
+
+ if hasattr(lookahead, 'lineno'):
+ t.lineno = t.endlineno = lookahead.lineno
+ if hasattr(lookahead, 'lexpos'):
+ t.lexpos = t.endlexpos = lookahead.lexpos
+ t.value = lookahead
+ lookaheadstack.append(lookahead)
+ lookahead = t
+ else:
+ sym = symstack.pop()
+ statestack.pop()
+ state = statestack[-1]
+
+ continue
+
+ # Call an error function here
+ raise RuntimeError('yacc: internal parser error!!!\n')
+
+ #--! parseopt-notrack-end
+
+# -----------------------------------------------------------------------------
+# === Grammar Representation ===
+#
+# The following functions, classes, and variables are used to represent and
+# manipulate the rules that make up a grammar.
+# -----------------------------------------------------------------------------
+
+# regex matching identifiers
+_is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$')
+
+# -----------------------------------------------------------------------------
+# class Production:
+#
+# This class stores the raw information about a single production or grammar rule.
+# A grammar rule refers to a specification such as this:
+#
+# expr : expr PLUS term
+#
+# Here are the basic attributes defined on all productions
+#
+# name - Name of the production. For example 'expr'
+# prod - A list of symbols on the right side ['expr','PLUS','term']
+# prec - Production precedence level
+# number - Production number.
+# func - Function that executes on reduce
+# file - File where production function is defined
+# lineno - Line number where production function is defined
+#
+# The following attributes are defined or optional.
+#
+# len - Length of the production (number of symbols on right hand side)
+# usyms - Set of unique symbols found in the production
+# -----------------------------------------------------------------------------
+
+class Production(object):
+ reduced = 0
+ def __init__(self, number, name, prod, precedence=('right', 0), func=None, file='', line=0):
+ self.name = name
+ self.prod = tuple(prod)
+ self.number = number
+ self.func = func
+ self.callable = None
+ self.file = file
+ self.line = line
+ self.prec = precedence
+
+ # Internal settings used during table construction
+
+ self.len = len(self.prod) # Length of the production
+
+ # Create a list of unique production symbols used in the production
+ self.usyms = []
+ for s in self.prod:
+ if s not in self.usyms:
+ self.usyms.append(s)
+
+ # List of all LR items for the production
+ self.lr_items = []
+ self.lr_next = None
+
+ # Create a string representation
+ if self.prod:
+ self.str = '%s -> %s' % (self.name, ' '.join(self.prod))
+ else:
+ self.str = '%s -> <empty>' % self.name
+
+ def __str__(self):
+ return self.str
+
+ def __repr__(self):
+ return 'Production(' + str(self) + ')'
+
+ def __len__(self):
+ return len(self.prod)
+
+ def __nonzero__(self):
+ return 1
+
+ def __getitem__(self, index):
+ return self.prod[index]
+
+ # Return the nth lr_item from the production (or None if at the end)
+ def lr_item(self, n):
+ if n > len(self.prod):
+ return None
+ p = LRItem(self, n)
+ # Precompute the list of productions immediately following.
+ try:
+ p.lr_after = Prodnames[p.prod[n+1]]
+ except (IndexError, KeyError):
+ p.lr_after = []
+ try:
+ p.lr_before = p.prod[n-1]
+ except IndexError:
+ p.lr_before = None
+ return p
+
+ # Bind the production function name to a callable
+ def bind(self, pdict):
+ if self.func:
+ self.callable = pdict[self.func]
+
+# This class serves as a minimal standin for Production objects when
+# reading table data from files. It only contains information
+# actually used by the LR parsing engine, plus some additional
+# debugging information.
+class MiniProduction(object):
+ def __init__(self, str, name, len, func, file, line):
+ self.name = name
+ self.len = len
+ self.func = func
+ self.callable = None
+ self.file = file
+ self.line = line
+ self.str = str
+
+ def __str__(self):
+ return self.str
+
+ def __repr__(self):
+ return 'MiniProduction(%s)' % self.str
+
+ # Bind the production function name to a callable
+ def bind(self, pdict):
+ if self.func:
+ self.callable = pdict[self.func]
+
+
+# -----------------------------------------------------------------------------
+# class LRItem
+#
+# This class represents a specific stage of parsing a production rule. For
+# example:
+#
+# expr : expr . PLUS term
+#
+# In the above, the "." represents the current location of the parse. Here
+# basic attributes:
+#
+# name - Name of the production. For example 'expr'
+# prod - A list of symbols on the right side ['expr','.', 'PLUS','term']
+# number - Production number.
+#
+# lr_next Next LR item. Example, if we are ' expr -> expr . PLUS term'
+# then lr_next refers to 'expr -> expr PLUS . term'
+# lr_index - LR item index (location of the ".") in the prod list.
+# lookaheads - LALR lookahead symbols for this item
+# len - Length of the production (number of symbols on right hand side)
+# lr_after - List of all productions that immediately follow
+# lr_before - Grammar symbol immediately before
+# -----------------------------------------------------------------------------
+
+class LRItem(object):
+ def __init__(self, p, n):
+ self.name = p.name
+ self.prod = list(p.prod)
+ self.number = p.number
+ self.lr_index = n
+ self.lookaheads = {}
+ self.prod.insert(n, '.')
+ self.prod = tuple(self.prod)
+ self.len = len(self.prod)
+ self.usyms = p.usyms
+
+ def __str__(self):
+ if self.prod:
+ s = '%s -> %s' % (self.name, ' '.join(self.prod))
+ else:
+ s = '%s -> <empty>' % self.name
+ return s
+
+ def __repr__(self):
+ return 'LRItem(' + str(self) + ')'
+
+# -----------------------------------------------------------------------------
+# rightmost_terminal()
+#
+# Return the rightmost terminal from a list of symbols. Used in add_production()
+# -----------------------------------------------------------------------------
+def rightmost_terminal(symbols, terminals):
+ i = len(symbols) - 1
+ while i >= 0:
+ if symbols[i] in terminals:
+ return symbols[i]
+ i -= 1
+ return None
+
+# -----------------------------------------------------------------------------
+# === GRAMMAR CLASS ===
+#
+# The following class represents the contents of the specified grammar along
+# with various computed properties such as first sets, follow sets, LR items, etc.
+# This data is used for critical parts of the table generation process later.
+# -----------------------------------------------------------------------------
+
+class GrammarError(YaccError):
+ pass
+
+class Grammar(object):
+ def __init__(self, terminals):
+ self.Productions = [None] # A list of all of the productions. The first
+ # entry is always reserved for the purpose of
+ # building an augmented grammar
+
+ self.Prodnames = {} # A dictionary mapping the names of nonterminals to a list of all
+ # productions of that nonterminal.
+
+ self.Prodmap = {} # A dictionary that is only used to detect duplicate
+ # productions.
+
+ self.Terminals = {} # A dictionary mapping the names of terminal symbols to a
+ # list of the rules where they are used.
+
+ for term in terminals:
+ self.Terminals[term] = []
+
+ self.Terminals['error'] = []
+
+ self.Nonterminals = {} # A dictionary mapping names of nonterminals to a list
+ # of rule numbers where they are used.
+
+ self.First = {} # A dictionary of precomputed FIRST(x) symbols
+
+ self.Follow = {} # A dictionary of precomputed FOLLOW(x) symbols
+
+ self.Precedence = {} # Precedence rules for each terminal. Contains tuples of the
+ # form ('right',level) or ('nonassoc', level) or ('left',level)
+
+ self.UsedPrecedence = set() # Precedence rules that were actually used by the grammer.
+ # This is only used to provide error checking and to generate
+ # a warning about unused precedence rules.
+
+ self.Start = None # Starting symbol for the grammar
+
+
+ def __len__(self):
+ return len(self.Productions)
+
+ def __getitem__(self, index):
+ return self.Productions[index]
+
+ # -----------------------------------------------------------------------------
+ # set_precedence()
+ #
+ # Sets the precedence for a given terminal. assoc is the associativity such as
+ # 'left','right', or 'nonassoc'. level is a numeric level.
+ #
+ # -----------------------------------------------------------------------------
+
+ def set_precedence(self, term, assoc, level):
+ assert self.Productions == [None], 'Must call set_precedence() before add_production()'
+ if term in self.Precedence:
+ raise GrammarError('Precedence already specified for terminal %r' % term)
+ if assoc not in ['left', 'right', 'nonassoc']:
+ raise GrammarError("Associativity must be one of 'left','right', or 'nonassoc'")
+ self.Precedence[term] = (assoc, level)
+
+ # -----------------------------------------------------------------------------
+ # add_production()
+ #
+ # Given an action function, this function assembles a production rule and
+ # computes its precedence level.
+ #
+ # The production rule is supplied as a list of symbols. For example,
+ # a rule such as 'expr : expr PLUS term' has a production name of 'expr' and
+ # symbols ['expr','PLUS','term'].
+ #
+ # Precedence is determined by the precedence of the right-most non-terminal
+ # or the precedence of a terminal specified by %prec.
+ #
+ # A variety of error checks are performed to make sure production symbols
+ # are valid and that %prec is used correctly.
+ # -----------------------------------------------------------------------------
+
+ def add_production(self, prodname, syms, func=None, file='', line=0):
+
+ if prodname in self.Terminals:
+ raise GrammarError('%s:%d: Illegal rule name %r. Already defined as a token' % (file, line, prodname))
+ if prodname == 'error':
+ raise GrammarError('%s:%d: Illegal rule name %r. error is a reserved word' % (file, line, prodname))
+ if not _is_identifier.match(prodname):
+ raise GrammarError('%s:%d: Illegal rule name %r' % (file, line, prodname))
+
+ # Look for literal tokens
+ for n, s in enumerate(syms):
+ if s[0] in "'\"":
+ try:
+ c = eval(s)
+ if (len(c) > 1):
+ raise GrammarError('%s:%d: Literal token %s in rule %r may only be a single character' %
+ (file, line, s, prodname))
+ if c not in self.Terminals:
+ self.Terminals[c] = []
+ syms[n] = c
+ continue
+ except SyntaxError:
+ pass
+ if not _is_identifier.match(s) and s != '%prec':
+ raise GrammarError('%s:%d: Illegal name %r in rule %r' % (file, line, s, prodname))
+
+ # Determine the precedence level
+ if '%prec' in syms:
+ if syms[-1] == '%prec':
+ raise GrammarError('%s:%d: Syntax error. Nothing follows %%prec' % (file, line))
+ if syms[-2] != '%prec':
+ raise GrammarError('%s:%d: Syntax error. %%prec can only appear at the end of a grammar rule' %
+ (file, line))
+ precname = syms[-1]
+ prodprec = self.Precedence.get(precname)
+ if not prodprec:
+ raise GrammarError('%s:%d: Nothing known about the precedence of %r' % (file, line, precname))
+ else:
+ self.UsedPrecedence.add(precname)
+ del syms[-2:] # Drop %prec from the rule
+ else:
+ # If no %prec, precedence is determined by the rightmost terminal symbol
+ precname = rightmost_terminal(syms, self.Terminals)
+ prodprec = self.Precedence.get(precname, ('right', 0))
+
+ # See if the rule is already in the rulemap
+ map = '%s -> %s' % (prodname, syms)
+ if map in self.Prodmap:
+ m = self.Prodmap[map]
+ raise GrammarError('%s:%d: Duplicate rule %s. ' % (file, line, m) +
+ 'Previous definition at %s:%d' % (m.file, m.line))
+
+ # From this point on, everything is valid. Create a new Production instance
+ pnumber = len(self.Productions)
+ if prodname not in self.Nonterminals:
+ self.Nonterminals[prodname] = []
+
+ # Add the production number to Terminals and Nonterminals
+ for t in syms:
+ if t in self.Terminals:
+ self.Terminals[t].append(pnumber)
+ else:
+ if t not in self.Nonterminals:
+ self.Nonterminals[t] = []
+ self.Nonterminals[t].append(pnumber)
+
+ # Create a production and add it to the list of productions
+ p = Production(pnumber, prodname, syms, prodprec, func, file, line)
+ self.Productions.append(p)
+ self.Prodmap[map] = p
+
+ # Add to the global productions list
+ try:
+ self.Prodnames[prodname].append(p)
+ except KeyError:
+ self.Prodnames[prodname] = [p]
+
+ # -----------------------------------------------------------------------------
+ # set_start()
+ #
+ # Sets the starting symbol and creates the augmented grammar. Production
+ # rule 0 is S' -> start where start is the start symbol.
+ # -----------------------------------------------------------------------------
+
+ def set_start(self, start=None):
+ if not start:
+ start = self.Productions[1].name
+ if start not in self.Nonterminals:
+ raise GrammarError('start symbol %s undefined' % start)
+ self.Productions[0] = Production(0, "S'", [start])
+ self.Nonterminals[start].append(0)
+ self.Start = start
+
+ # -----------------------------------------------------------------------------
+ # find_unreachable()
+ #
+ # Find all of the nonterminal symbols that can't be reached from the starting
+ # symbol. Returns a list of nonterminals that can't be reached.
+ # -----------------------------------------------------------------------------
+
+ def find_unreachable(self):
+
+ # Mark all symbols that are reachable from a symbol s
+ def mark_reachable_from(s):
+ if s in reachable:
+ return
+ reachable.add(s)
+ for p in self.Prodnames.get(s, []):
+ for r in p.prod:
+ mark_reachable_from(r)
+
+ reachable = set()
+ mark_reachable_from(self.Productions[0].prod[0])
+ return [s for s in self.Nonterminals if s not in reachable]
+
+ # -----------------------------------------------------------------------------
+ # infinite_cycles()
+ #
+ # This function looks at the various parsing rules and tries to detect
+ # infinite recursion cycles (grammar rules where there is no possible way
+ # to derive a string of only terminals).
+ # -----------------------------------------------------------------------------
+
+ def infinite_cycles(self):
+ terminates = {}
+
+ # Terminals:
+ for t in self.Terminals:
+ terminates[t] = True
+
+ terminates['$end'] = True
+
+ # Nonterminals:
+
+ # Initialize to false:
+ for n in self.Nonterminals:
+ terminates[n] = False
+
+ # Then propagate termination until no change:
+ while True:
+ some_change = False
+ for (n, pl) in self.Prodnames.items():
+ # Nonterminal n terminates iff any of its productions terminates.
+ for p in pl:
+ # Production p terminates iff all of its rhs symbols terminate.
+ for s in p.prod:
+ if not terminates[s]:
+ # The symbol s does not terminate,
+ # so production p does not terminate.
+ p_terminates = False
+ break
+ else:
+ # didn't break from the loop,
+ # so every symbol s terminates
+ # so production p terminates.
+ p_terminates = True
+
+ if p_terminates:
+ # symbol n terminates!
+ if not terminates[n]:
+ terminates[n] = True
+ some_change = True
+ # Don't need to consider any more productions for this n.
+ break
+
+ if not some_change:
+ break
+
+ infinite = []
+ for (s, term) in terminates.items():
+ if not term:
+ if s not in self.Prodnames and s not in self.Terminals and s != 'error':
+ # s is used-but-not-defined, and we've already warned of that,
+ # so it would be overkill to say that it's also non-terminating.
+ pass
+ else:
+ infinite.append(s)
+
+ return infinite
+
+ # -----------------------------------------------------------------------------
+ # undefined_symbols()
+ #
+ # Find all symbols that were used the grammar, but not defined as tokens or
+ # grammar rules. Returns a list of tuples (sym, prod) where sym in the symbol
+ # and prod is the production where the symbol was used.
+ # -----------------------------------------------------------------------------
+ def undefined_symbols(self):
+ result = []
+ for p in self.Productions:
+ if not p:
+ continue
+
+ for s in p.prod:
+ if s not in self.Prodnames and s not in self.Terminals and s != 'error':
+ result.append((s, p))
+ return result
+
+ # -----------------------------------------------------------------------------
+ # unused_terminals()
+ #
+ # Find all terminals that were defined, but not used by the grammar. Returns
+ # a list of all symbols.
+ # -----------------------------------------------------------------------------
+ def unused_terminals(self):
+ unused_tok = []
+ for s, v in self.Terminals.items():
+ if s != 'error' and not v:
+ unused_tok.append(s)
+
+ return unused_tok
+
+ # ------------------------------------------------------------------------------
+ # unused_rules()
+ #
+ # Find all grammar rules that were defined, but not used (maybe not reachable)
+ # Returns a list of productions.
+ # ------------------------------------------------------------------------------
+
+ def unused_rules(self):
+ unused_prod = []
+ for s, v in self.Nonterminals.items():
+ if not v:
+ p = self.Prodnames[s][0]
+ unused_prod.append(p)
+ return unused_prod
+
+ # -----------------------------------------------------------------------------
+ # unused_precedence()
+ #
+ # Returns a list of tuples (term,precedence) corresponding to precedence
+ # rules that were never used by the grammar. term is the name of the terminal
+ # on which precedence was applied and precedence is a string such as 'left' or
+ # 'right' corresponding to the type of precedence.
+ # -----------------------------------------------------------------------------
+
+ def unused_precedence(self):
+ unused = []
+ for termname in self.Precedence:
+ if not (termname in self.Terminals or termname in self.UsedPrecedence):
+ unused.append((termname, self.Precedence[termname][0]))
+
+ return unused
+
+ # -------------------------------------------------------------------------
+ # _first()
+ #
+ # Compute the value of FIRST1(beta) where beta is a tuple of symbols.
+ #
+ # During execution of compute_first1, the result may be incomplete.
+ # Afterward (e.g., when called from compute_follow()), it will be complete.
+ # -------------------------------------------------------------------------
+ def _first(self, beta):
+
+ # We are computing First(x1,x2,x3,...,xn)
+ result = []
+ for x in beta:
+ x_produces_empty = False
+
+ # Add all the non-<empty> symbols of First[x] to the result.
+ for f in self.First[x]:
+ if f == '<empty>':
+ x_produces_empty = True
+ else:
+ if f not in result:
+ result.append(f)
+
+ if x_produces_empty:
+ # We have to consider the next x in beta,
+ # i.e. stay in the loop.
+ pass
+ else:
+ # We don't have to consider any further symbols in beta.
+ break
+ else:
+ # There was no 'break' from the loop,
+ # so x_produces_empty was true for all x in beta,
+ # so beta produces empty as well.
+ result.append('<empty>')
+
+ return result
+
+ # -------------------------------------------------------------------------
+ # compute_first()
+ #
+ # Compute the value of FIRST1(X) for all symbols
+ # -------------------------------------------------------------------------
+ def compute_first(self):
+ if self.First:
+ return self.First
+
+ # Terminals:
+ for t in self.Terminals:
+ self.First[t] = [t]
+
+ self.First['$end'] = ['$end']
+
+ # Nonterminals:
+
+ # Initialize to the empty set:
+ for n in self.Nonterminals:
+ self.First[n] = []
+
+ # Then propagate symbols until no change:
+ while True:
+ some_change = False
+ for n in self.Nonterminals:
+ for p in self.Prodnames[n]:
+ for f in self._first(p.prod):
+ if f not in self.First[n]:
+ self.First[n].append(f)
+ some_change = True
+ if not some_change:
+ break
+
+ return self.First
+
+ # ---------------------------------------------------------------------
+ # compute_follow()
+ #
+ # Computes all of the follow sets for every non-terminal symbol. The
+ # follow set is the set of all symbols that might follow a given
+ # non-terminal. See the Dragon book, 2nd Ed. p. 189.
+ # ---------------------------------------------------------------------
+ def compute_follow(self, start=None):
+ # If already computed, return the result
+ if self.Follow:
+ return self.Follow
+
+ # If first sets not computed yet, do that first.
+ if not self.First:
+ self.compute_first()
+
+ # Add '$end' to the follow list of the start symbol
+ for k in self.Nonterminals:
+ self.Follow[k] = []
+
+ if not start:
+ start = self.Productions[1].name
+
+ self.Follow[start] = ['$end']
+
+ while True:
+ didadd = False
+ for p in self.Productions[1:]:
+ # Here is the production set
+ for i, B in enumerate(p.prod):
+ if B in self.Nonterminals:
+ # Okay. We got a non-terminal in a production
+ fst = self._first(p.prod[i+1:])
+ hasempty = False
+ for f in fst:
+ if f != '<empty>' and f not in self.Follow[B]:
+ self.Follow[B].append(f)
+ didadd = True
+ if f == '<empty>':
+ hasempty = True
+ if hasempty or i == (len(p.prod)-1):
+ # Add elements of follow(a) to follow(b)
+ for f in self.Follow[p.name]:
+ if f not in self.Follow[B]:
+ self.Follow[B].append(f)
+ didadd = True
+ if not didadd:
+ break
+ return self.Follow
+
+
+ # -----------------------------------------------------------------------------
+ # build_lritems()
+ #
+ # This function walks the list of productions and builds a complete set of the
+ # LR items. The LR items are stored in two ways: First, they are uniquely
+ # numbered and placed in the list _lritems. Second, a linked list of LR items
+ # is built for each production. For example:
+ #
+ # E -> E PLUS E
+ #
+ # Creates the list
+ #
+ # [E -> . E PLUS E, E -> E . PLUS E, E -> E PLUS . E, E -> E PLUS E . ]
+ # -----------------------------------------------------------------------------
+
+ def build_lritems(self):
+ for p in self.Productions:
+ lastlri = p
+ i = 0
+ lr_items = []
+ while True:
+ if i > len(p):
+ lri = None
+ else:
+ lri = LRItem(p, i)
+ # Precompute the list of productions immediately following
+ try:
+ lri.lr_after = self.Prodnames[lri.prod[i+1]]
+ except (IndexError, KeyError):
+ lri.lr_after = []
+ try:
+ lri.lr_before = lri.prod[i-1]
+ except IndexError:
+ lri.lr_before = None
+
+ lastlri.lr_next = lri
+ if not lri:
+ break
+ lr_items.append(lri)
+ lastlri = lri
+ i += 1
+ p.lr_items = lr_items
+
+# -----------------------------------------------------------------------------
+# == Class LRTable ==
+#
+# This basic class represents a basic table of LR parsing information.
+# Methods for generating the tables are not defined here. They are defined
+# in the derived class LRGeneratedTable.
+# -----------------------------------------------------------------------------
+
+class VersionError(YaccError):
+ pass
+
+class LRTable(object):
+ def __init__(self):
+ self.lr_action = None
+ self.lr_goto = None
+ self.lr_productions = None
+ self.lr_method = None
+
+ def read_table(self, module):
+ if isinstance(module, types.ModuleType):
+ parsetab = module
+ else:
+ exec('import %s' % module)
+ parsetab = sys.modules[module]
+
+ if parsetab._tabversion != __tabversion__:
+ raise VersionError('yacc table file version is out of date')
+
+ self.lr_action = parsetab._lr_action
+ self.lr_goto = parsetab._lr_goto
+
+ self.lr_productions = []
+ for p in parsetab._lr_productions:
+ self.lr_productions.append(MiniProduction(*p))
+
+ self.lr_method = parsetab._lr_method
+ return parsetab._lr_signature
+
+ def read_pickle(self, filename):
+ try:
+ import cPickle as pickle
+ except ImportError:
+ import pickle
+
+ if not os.path.exists(filename):
+ raise ImportError
+
+ in_f = open(filename, 'rb')
+
+ tabversion = pickle.load(in_f)
+ if tabversion != __tabversion__:
+ raise VersionError('yacc table file version is out of date')
+ self.lr_method = pickle.load(in_f)
+ signature = pickle.load(in_f)
+ self.lr_action = pickle.load(in_f)
+ self.lr_goto = pickle.load(in_f)
+ productions = pickle.load(in_f)
+
+ self.lr_productions = []
+ for p in productions:
+ self.lr_productions.append(MiniProduction(*p))
+
+ in_f.close()
+ return signature
+
+ # Bind all production function names to callable objects in pdict
+ def bind_callables(self, pdict):
+ for p in self.lr_productions:
+ p.bind(pdict)
+
+
+# -----------------------------------------------------------------------------
+# === LR Generator ===
+#
+# The following classes and functions are used to generate LR parsing tables on
+# a grammar.
+# -----------------------------------------------------------------------------
+
+# -----------------------------------------------------------------------------
+# digraph()
+# traverse()
+#
+# The following two functions are used to compute set valued functions
+# of the form:
+#
+# F(x) = F'(x) U U{F(y) | x R y}
+#
+# This is used to compute the values of Read() sets as well as FOLLOW sets
+# in LALR(1) generation.
+#
+# Inputs: X - An input set
+# R - A relation
+# FP - Set-valued function
+# ------------------------------------------------------------------------------
+
+def digraph(X, R, FP):
+ N = {}
+ for x in X:
+ N[x] = 0
+ stack = []
+ F = {}
+ for x in X:
+ if N[x] == 0:
+ traverse(x, N, stack, F, X, R, FP)
+ return F
+
+def traverse(x, N, stack, F, X, R, FP):
+ stack.append(x)
+ d = len(stack)
+ N[x] = d
+ F[x] = FP(x) # F(X) <- F'(x)
+
+ rel = R(x) # Get y's related to x
+ for y in rel:
+ if N[y] == 0:
+ traverse(y, N, stack, F, X, R, FP)
+ N[x] = min(N[x], N[y])
+ for a in F.get(y, []):
+ if a not in F[x]:
+ F[x].append(a)
+ if N[x] == d:
+ N[stack[-1]] = MAXINT
+ F[stack[-1]] = F[x]
+ element = stack.pop()
+ while element != x:
+ N[stack[-1]] = MAXINT
+ F[stack[-1]] = F[x]
+ element = stack.pop()
+
+class LALRError(YaccError):
+ pass
+
+# -----------------------------------------------------------------------------
+# == LRGeneratedTable ==
+#
+# This class implements the LR table generation algorithm. There are no
+# public methods except for write()
+# -----------------------------------------------------------------------------
+
+class LRGeneratedTable(LRTable):
+ def __init__(self, grammar, method='LALR', log=None):
+ if method not in ['SLR', 'LALR']:
+ raise LALRError('Unsupported method %s' % method)
+
+ self.grammar = grammar
+ self.lr_method = method
+
+ # Set up the logger
+ if not log:
+ log = NullLogger()
+ self.log = log
+
+ # Internal attributes
+ self.lr_action = {} # Action table
+ self.lr_goto = {} # Goto table
+ self.lr_productions = grammar.Productions # Copy of grammar Production array
+ self.lr_goto_cache = {} # Cache of computed gotos
+ self.lr0_cidhash = {} # Cache of closures
+
+ self._add_count = 0 # Internal counter used to detect cycles
+
+ # Diagonistic information filled in by the table generator
+ self.sr_conflict = 0
+ self.rr_conflict = 0
+ self.conflicts = [] # List of conflicts
+
+ self.sr_conflicts = []
+ self.rr_conflicts = []
+
+ # Build the tables
+ self.grammar.build_lritems()
+ self.grammar.compute_first()
+ self.grammar.compute_follow()
+ self.lr_parse_table()
+
+ # Compute the LR(0) closure operation on I, where I is a set of LR(0) items.
+
+ def lr0_closure(self, I):
+ self._add_count += 1
+
+ # Add everything in I to J
+ J = I[:]
+ didadd = True
+ while didadd:
+ didadd = False
+ for j in J:
+ for x in j.lr_after:
+ if getattr(x, 'lr0_added', 0) == self._add_count:
+ continue
+ # Add B --> .G to J
+ J.append(x.lr_next)
+ x.lr0_added = self._add_count
+ didadd = True
+
+ return J
+
+ # Compute the LR(0) goto function goto(I,X) where I is a set
+ # of LR(0) items and X is a grammar symbol. This function is written
+ # in a way that guarantees uniqueness of the generated goto sets
+ # (i.e. the same goto set will never be returned as two different Python
+ # objects). With uniqueness, we can later do fast set comparisons using
+ # id(obj) instead of element-wise comparison.
+
+ def lr0_goto(self, I, x):
+ # First we look for a previously cached entry
+ g = self.lr_goto_cache.get((id(I), x))
+ if g:
+ return g
+
+ # Now we generate the goto set in a way that guarantees uniqueness
+ # of the result
+
+ s = self.lr_goto_cache.get(x)
+ if not s:
+ s = {}
+ self.lr_goto_cache[x] = s
+
+ gs = []
+ for p in I:
+ n = p.lr_next
+ if n and n.lr_before == x:
+ s1 = s.get(id(n))
+ if not s1:
+ s1 = {}
+ s[id(n)] = s1
+ gs.append(n)
+ s = s1
+ g = s.get('$end')
+ if not g:
+ if gs:
+ g = self.lr0_closure(gs)
+ s['$end'] = g
+ else:
+ s['$end'] = gs
+ self.lr_goto_cache[(id(I), x)] = g
+ return g
+
+ # Compute the LR(0) sets of item function
+ def lr0_items(self):
+ C = [self.lr0_closure([self.grammar.Productions[0].lr_next])]
+ i = 0
+ for I in C:
+ self.lr0_cidhash[id(I)] = i
+ i += 1
+
+ # Loop over the items in C and each grammar symbols
+ i = 0
+ while i < len(C):
+ I = C[i]
+ i += 1
+
+ # Collect all of the symbols that could possibly be in the goto(I,X) sets
+ asyms = {}
+ for ii in I:
+ for s in ii.usyms:
+ asyms[s] = None
+
+ for x in asyms:
+ g = self.lr0_goto(I, x)
+ if not g or id(g) in self.lr0_cidhash:
+ continue
+ self.lr0_cidhash[id(g)] = len(C)
+ C.append(g)
+
+ return C
+
+ # -----------------------------------------------------------------------------
+ # ==== LALR(1) Parsing ====
+ #
+ # LALR(1) parsing is almost exactly the same as SLR except that instead of
+ # relying upon Follow() sets when performing reductions, a more selective
+ # lookahead set that incorporates the state of the LR(0) machine is utilized.
+ # Thus, we mainly just have to focus on calculating the lookahead sets.
+ #
+ # The method used here is due to DeRemer and Pennelo (1982).
+ #
+ # DeRemer, F. L., and T. J. Pennelo: "Efficient Computation of LALR(1)
+ # Lookahead Sets", ACM Transactions on Programming Languages and Systems,
+ # Vol. 4, No. 4, Oct. 1982, pp. 615-649
+ #
+ # Further details can also be found in:
+ #
+ # J. Tremblay and P. Sorenson, "The Theory and Practice of Compiler Writing",
+ # McGraw-Hill Book Company, (1985).
+ #
+ # -----------------------------------------------------------------------------
+
+ # -----------------------------------------------------------------------------
+ # compute_nullable_nonterminals()
+ #
+ # Creates a dictionary containing all of the non-terminals that might produce
+ # an empty production.
+ # -----------------------------------------------------------------------------
+
+ def compute_nullable_nonterminals(self):
+ nullable = set()
+ num_nullable = 0
+ while True:
+ for p in self.grammar.Productions[1:]:
+ if p.len == 0:
+ nullable.add(p.name)
+ continue
+ for t in p.prod:
+ if t not in nullable:
+ break
+ else:
+ nullable.add(p.name)
+ if len(nullable) == num_nullable:
+ break
+ num_nullable = len(nullable)
+ return nullable
+
+ # -----------------------------------------------------------------------------
+ # find_nonterminal_trans(C)
+ #
+ # Given a set of LR(0) items, this functions finds all of the non-terminal
+ # transitions. These are transitions in which a dot appears immediately before
+ # a non-terminal. Returns a list of tuples of the form (state,N) where state
+ # is the state number and N is the nonterminal symbol.
+ #
+ # The input C is the set of LR(0) items.
+ # -----------------------------------------------------------------------------
+
+ def find_nonterminal_transitions(self, C):
+ trans = []
+ for stateno, state in enumerate(C):
+ for p in state:
+ if p.lr_index < p.len - 1:
+ t = (stateno, p.prod[p.lr_index+1])
+ if t[1] in self.grammar.Nonterminals:
+ if t not in trans:
+ trans.append(t)
+ return trans
+
+ # -----------------------------------------------------------------------------
+ # dr_relation()
+ #
+ # Computes the DR(p,A) relationships for non-terminal transitions. The input
+ # is a tuple (state,N) where state is a number and N is a nonterminal symbol.
+ #
+ # Returns a list of terminals.
+ # -----------------------------------------------------------------------------
+
+ def dr_relation(self, C, trans, nullable):
+ dr_set = {}
+ state, N = trans
+ terms = []
+
+ g = self.lr0_goto(C[state], N)
+ for p in g:
+ if p.lr_index < p.len - 1:
+ a = p.prod[p.lr_index+1]
+ if a in self.grammar.Terminals:
+ if a not in terms:
+ terms.append(a)
+
+ # This extra bit is to handle the start state
+ if state == 0 and N == self.grammar.Productions[0].prod[0]:
+ terms.append('$end')
+
+ return terms
+
+ # -----------------------------------------------------------------------------
+ # reads_relation()
+ #
+ # Computes the READS() relation (p,A) READS (t,C).
+ # -----------------------------------------------------------------------------
+
+ def reads_relation(self, C, trans, empty):
+ # Look for empty transitions
+ rel = []
+ state, N = trans
+
+ g = self.lr0_goto(C[state], N)
+ j = self.lr0_cidhash.get(id(g), -1)
+ for p in g:
+ if p.lr_index < p.len - 1:
+ a = p.prod[p.lr_index + 1]
+ if a in empty:
+ rel.append((j, a))
+
+ return rel
+
+ # -----------------------------------------------------------------------------
+ # compute_lookback_includes()
+ #
+ # Determines the lookback and includes relations
+ #
+ # LOOKBACK:
+ #
+ # This relation is determined by running the LR(0) state machine forward.
+ # For example, starting with a production "N : . A B C", we run it forward
+ # to obtain "N : A B C ." We then build a relationship between this final
+ # state and the starting state. These relationships are stored in a dictionary
+ # lookdict.
+ #
+ # INCLUDES:
+ #
+ # Computes the INCLUDE() relation (p,A) INCLUDES (p',B).
+ #
+ # This relation is used to determine non-terminal transitions that occur
+ # inside of other non-terminal transition states. (p,A) INCLUDES (p', B)
+ # if the following holds:
+ #
+ # B -> LAT, where T -> epsilon and p' -L-> p
+ #
+ # L is essentially a prefix (which may be empty), T is a suffix that must be
+ # able to derive an empty string. State p' must lead to state p with the string L.
+ #
+ # -----------------------------------------------------------------------------
+
+ def compute_lookback_includes(self, C, trans, nullable):
+ lookdict = {} # Dictionary of lookback relations
+ includedict = {} # Dictionary of include relations
+
+ # Make a dictionary of non-terminal transitions
+ dtrans = {}
+ for t in trans:
+ dtrans[t] = 1
+
+ # Loop over all transitions and compute lookbacks and includes
+ for state, N in trans:
+ lookb = []
+ includes = []
+ for p in C[state]:
+ if p.name != N:
+ continue
+
+ # Okay, we have a name match. We now follow the production all the way
+ # through the state machine until we get the . on the right hand side
+
+ lr_index = p.lr_index
+ j = state
+ while lr_index < p.len - 1:
+ lr_index = lr_index + 1
+ t = p.prod[lr_index]
+
+ # Check to see if this symbol and state are a non-terminal transition
+ if (j, t) in dtrans:
+ # Yes. Okay, there is some chance that this is an includes relation
+ # the only way to know for certain is whether the rest of the
+ # production derives empty
+
+ li = lr_index + 1
+ while li < p.len:
+ if p.prod[li] in self.grammar.Terminals:
+ break # No forget it
+ if p.prod[li] not in nullable:
+ break
+ li = li + 1
+ else:
+ # Appears to be a relation between (j,t) and (state,N)
+ includes.append((j, t))
+
+ g = self.lr0_goto(C[j], t) # Go to next set
+ j = self.lr0_cidhash.get(id(g), -1) # Go to next state
+
+ # When we get here, j is the final state, now we have to locate the production
+ for r in C[j]:
+ if r.name != p.name:
+ continue
+ if r.len != p.len:
+ continue
+ i = 0
+ # This look is comparing a production ". A B C" with "A B C ."
+ while i < r.lr_index:
+ if r.prod[i] != p.prod[i+1]:
+ break
+ i = i + 1
+ else:
+ lookb.append((j, r))
+ for i in includes:
+ if i not in includedict:
+ includedict[i] = []
+ includedict[i].append((state, N))
+ lookdict[(state, N)] = lookb
+
+ return lookdict, includedict
+
+ # -----------------------------------------------------------------------------
+ # compute_read_sets()
+ #
+ # Given a set of LR(0) items, this function computes the read sets.
+ #
+ # Inputs: C = Set of LR(0) items
+ # ntrans = Set of nonterminal transitions
+ # nullable = Set of empty transitions
+ #
+ # Returns a set containing the read sets
+ # -----------------------------------------------------------------------------
+
+ def compute_read_sets(self, C, ntrans, nullable):
+ FP = lambda x: self.dr_relation(C, x, nullable)
+ R = lambda x: self.reads_relation(C, x, nullable)
+ F = digraph(ntrans, R, FP)
+ return F
+
+ # -----------------------------------------------------------------------------
+ # compute_follow_sets()
+ #
+ # Given a set of LR(0) items, a set of non-terminal transitions, a readset,
+ # and an include set, this function computes the follow sets
+ #
+ # Follow(p,A) = Read(p,A) U U {Follow(p',B) | (p,A) INCLUDES (p',B)}
+ #
+ # Inputs:
+ # ntrans = Set of nonterminal transitions
+ # readsets = Readset (previously computed)
+ # inclsets = Include sets (previously computed)
+ #
+ # Returns a set containing the follow sets
+ # -----------------------------------------------------------------------------
+
+ def compute_follow_sets(self, ntrans, readsets, inclsets):
+ FP = lambda x: readsets[x]
+ R = lambda x: inclsets.get(x, [])
+ F = digraph(ntrans, R, FP)
+ return F
+
+ # -----------------------------------------------------------------------------
+ # add_lookaheads()
+ #
+ # Attaches the lookahead symbols to grammar rules.
+ #
+ # Inputs: lookbacks - Set of lookback relations
+ # followset - Computed follow set
+ #
+ # This function directly attaches the lookaheads to productions contained
+ # in the lookbacks set
+ # -----------------------------------------------------------------------------
+
+ def add_lookaheads(self, lookbacks, followset):
+ for trans, lb in lookbacks.items():
+ # Loop over productions in lookback
+ for state, p in lb:
+ if state not in p.lookaheads:
+ p.lookaheads[state] = []
+ f = followset.get(trans, [])
+ for a in f:
+ if a not in p.lookaheads[state]:
+ p.lookaheads[state].append(a)
+
+ # -----------------------------------------------------------------------------
+ # add_lalr_lookaheads()
+ #
+ # This function does all of the work of adding lookahead information for use
+ # with LALR parsing
+ # -----------------------------------------------------------------------------
+
+ def add_lalr_lookaheads(self, C):
+ # Determine all of the nullable nonterminals
+ nullable = self.compute_nullable_nonterminals()
+
+ # Find all non-terminal transitions
+ trans = self.find_nonterminal_transitions(C)
+
+ # Compute read sets
+ readsets = self.compute_read_sets(C, trans, nullable)
+
+ # Compute lookback/includes relations
+ lookd, included = self.compute_lookback_includes(C, trans, nullable)
+
+ # Compute LALR FOLLOW sets
+ followsets = self.compute_follow_sets(trans, readsets, included)
+
+ # Add all of the lookaheads
+ self.add_lookaheads(lookd, followsets)
+
+ # -----------------------------------------------------------------------------
+ # lr_parse_table()
+ #
+ # This function constructs the parse tables for SLR or LALR
+ # -----------------------------------------------------------------------------
+ def lr_parse_table(self):
+ Productions = self.grammar.Productions
+ Precedence = self.grammar.Precedence
+ goto = self.lr_goto # Goto array
+ action = self.lr_action # Action array
+ log = self.log # Logger for output
+
+ actionp = {} # Action production array (temporary)
+
+ log.info('Parsing method: %s', self.lr_method)
+
+ # Step 1: Construct C = { I0, I1, ... IN}, collection of LR(0) items
+ # This determines the number of states
+
+ C = self.lr0_items()
+
+ if self.lr_method == 'LALR':
+ self.add_lalr_lookaheads(C)
+
+ # Build the parser table, state by state
+ st = 0
+ for I in C:
+ # Loop over each production in I
+ actlist = [] # List of actions
+ st_action = {}
+ st_actionp = {}
+ st_goto = {}
+ log.info('')
+ log.info('state %d', st)
+ log.info('')
+ for p in I:
+ log.info(' (%d) %s', p.number, p)
+ log.info('')
+
+ for p in I:
+ if p.len == p.lr_index + 1:
+ if p.name == "S'":
+ # Start symbol. Accept!
+ st_action['$end'] = 0
+ st_actionp['$end'] = p
+ else:
+ # We are at the end of a production. Reduce!
+ if self.lr_method == 'LALR':
+ laheads = p.lookaheads[st]
+ else:
+ laheads = self.grammar.Follow[p.name]
+ for a in laheads:
+ actlist.append((a, p, 'reduce using rule %d (%s)' % (p.number, p)))
+ r = st_action.get(a)
+ if r is not None:
+ # Whoa. Have a shift/reduce or reduce/reduce conflict
+ if r > 0:
+ # Need to decide on shift or reduce here
+ # By default we favor shifting. Need to add
+ # some precedence rules here.
+
+ # Shift precedence comes from the token
+ sprec, slevel = Precedence.get(a, ('right', 0))
+
+ # Reduce precedence comes from rule being reduced (p)
+ rprec, rlevel = Productions[p.number].prec
+
+ if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')):
+ # We really need to reduce here.
+ st_action[a] = -p.number
+ st_actionp[a] = p
+ if not slevel and not rlevel:
+ log.info(' ! shift/reduce conflict for %s resolved as reduce', a)
+ self.sr_conflicts.append((st, a, 'reduce'))
+ Productions[p.number].reduced += 1
+ elif (slevel == rlevel) and (rprec == 'nonassoc'):
+ st_action[a] = None
+ else:
+ # Hmmm. Guess we'll keep the shift
+ if not rlevel:
+ log.info(' ! shift/reduce conflict for %s resolved as shift', a)
+ self.sr_conflicts.append((st, a, 'shift'))
+ elif r < 0:
+ # Reduce/reduce conflict. In this case, we favor the rule
+ # that was defined first in the grammar file
+ oldp = Productions[-r]
+ pp = Productions[p.number]
+ if oldp.line > pp.line:
+ st_action[a] = -p.number
+ st_actionp[a] = p
+ chosenp, rejectp = pp, oldp
+ Productions[p.number].reduced += 1
+ Productions[oldp.number].reduced -= 1
+ else:
+ chosenp, rejectp = oldp, pp
+ self.rr_conflicts.append((st, chosenp, rejectp))
+ log.info(' ! reduce/reduce conflict for %s resolved using rule %d (%s)',
+ a, st_actionp[a].number, st_actionp[a])
+ else:
+ raise LALRError('Unknown conflict in state %d' % st)
+ else:
+ st_action[a] = -p.number
+ st_actionp[a] = p
+ Productions[p.number].reduced += 1
+ else:
+ i = p.lr_index
+ a = p.prod[i+1] # Get symbol right after the "."
+ if a in self.grammar.Terminals:
+ g = self.lr0_goto(I, a)
+ j = self.lr0_cidhash.get(id(g), -1)
+ if j >= 0:
+ # We are in a shift state
+ actlist.append((a, p, 'shift and go to state %d' % j))
+ r = st_action.get(a)
+ if r is not None:
+ # Whoa have a shift/reduce or shift/shift conflict
+ if r > 0:
+ if r != j:
+ raise LALRError('Shift/shift conflict in state %d' % st)
+ elif r < 0:
+ # Do a precedence check.
+ # - if precedence of reduce rule is higher, we reduce.
+ # - if precedence of reduce is same and left assoc, we reduce.
+ # - otherwise we shift
+
+ # Shift precedence comes from the token
+ sprec, slevel = Precedence.get(a, ('right', 0))
+
+ # Reduce precedence comes from the rule that could have been reduced
+ rprec, rlevel = Productions[st_actionp[a].number].prec
+
+ if (slevel > rlevel) or ((slevel == rlevel) and (rprec == 'right')):
+ # We decide to shift here... highest precedence to shift
+ Productions[st_actionp[a].number].reduced -= 1
+ st_action[a] = j
+ st_actionp[a] = p
+ if not rlevel:
+ log.info(' ! shift/reduce conflict for %s resolved as shift', a)
+ self.sr_conflicts.append((st, a, 'shift'))
+ elif (slevel == rlevel) and (rprec == 'nonassoc'):
+ st_action[a] = None
+ else:
+ # Hmmm. Guess we'll keep the reduce
+ if not slevel and not rlevel:
+ log.info(' ! shift/reduce conflict for %s resolved as reduce', a)
+ self.sr_conflicts.append((st, a, 'reduce'))
+
+ else:
+ raise LALRError('Unknown conflict in state %d' % st)
+ else:
+ st_action[a] = j
+ st_actionp[a] = p
+
+ # Print the actions associated with each terminal
+ _actprint = {}
+ for a, p, m in actlist:
+ if a in st_action:
+ if p is st_actionp[a]:
+ log.info(' %-15s %s', a, m)
+ _actprint[(a, m)] = 1
+ log.info('')
+ # Print the actions that were not used. (debugging)
+ not_used = 0
+ for a, p, m in actlist:
+ if a in st_action:
+ if p is not st_actionp[a]:
+ if not (a, m) in _actprint:
+ log.debug(' ! %-15s [ %s ]', a, m)
+ not_used = 1
+ _actprint[(a, m)] = 1
+ if not_used:
+ log.debug('')
+
+ # Construct the goto table for this state
+
+ nkeys = {}
+ for ii in I:
+ for s in ii.usyms:
+ if s in self.grammar.Nonterminals:
+ nkeys[s] = None
+ for n in nkeys:
+ g = self.lr0_goto(I, n)
+ j = self.lr0_cidhash.get(id(g), -1)
+ if j >= 0:
+ st_goto[n] = j
+ log.info(' %-30s shift and go to state %d', n, j)
+
+ action[st] = st_action
+ actionp[st] = st_actionp
+ goto[st] = st_goto
+ st += 1
+
+ # -----------------------------------------------------------------------------
+ # write()
+ #
+ # This function writes the LR parsing tables to a file
+ # -----------------------------------------------------------------------------
+
+ def write_table(self, tabmodule, outputdir='', signature=''):
+ if isinstance(tabmodule, types.ModuleType):
+ raise IOError("Won't overwrite existing tabmodule")
+
+ basemodulename = tabmodule.split('.')[-1]
+ filename = os.path.join(outputdir, basemodulename) + '.py'
+ try:
+ f = open(filename, 'w')
+
+ f.write('''
+# %s
+# This file is automatically generated. Do not edit.
+_tabversion = %r
+
+_lr_method = %r
+
+_lr_signature = %r
+ ''' % (os.path.basename(filename), __tabversion__, self.lr_method, signature))
+
+ # Change smaller to 0 to go back to original tables
+ smaller = 1
+
+ # Factor out names to try and make smaller
+ if smaller:
+ items = {}
+
+ for s, nd in self.lr_action.items():
+ for name, v in nd.items():
+ i = items.get(name)
+ if not i:
+ i = ([], [])
+ items[name] = i
+ i[0].append(s)
+ i[1].append(v)
+
+ f.write('\n_lr_action_items = {')
+ for k, v in items.items():
+ f.write('%r:([' % k)
+ for i in v[0]:
+ f.write('%r,' % i)
+ f.write('],[')
+ for i in v[1]:
+ f.write('%r,' % i)
+
+ f.write(']),')
+ f.write('}\n')
+
+ f.write('''
+_lr_action = {}
+for _k, _v in _lr_action_items.items():
+ for _x,_y in zip(_v[0],_v[1]):
+ if not _x in _lr_action: _lr_action[_x] = {}
+ _lr_action[_x][_k] = _y
+del _lr_action_items
+''')
+
+ else:
+ f.write('\n_lr_action = { ')
+ for k, v in self.lr_action.items():
+ f.write('(%r,%r):%r,' % (k[0], k[1], v))
+ f.write('}\n')
+
+ if smaller:
+ # Factor out names to try and make smaller
+ items = {}
+
+ for s, nd in self.lr_goto.items():
+ for name, v in nd.items():
+ i = items.get(name)
+ if not i:
+ i = ([], [])
+ items[name] = i
+ i[0].append(s)
+ i[1].append(v)
+
+ f.write('\n_lr_goto_items = {')
+ for k, v in items.items():
+ f.write('%r:([' % k)
+ for i in v[0]:
+ f.write('%r,' % i)
+ f.write('],[')
+ for i in v[1]:
+ f.write('%r,' % i)
+
+ f.write(']),')
+ f.write('}\n')
+
+ f.write('''
+_lr_goto = {}
+for _k, _v in _lr_goto_items.items():
+ for _x, _y in zip(_v[0], _v[1]):
+ if not _x in _lr_goto: _lr_goto[_x] = {}
+ _lr_goto[_x][_k] = _y
+del _lr_goto_items
+''')
+ else:
+ f.write('\n_lr_goto = { ')
+ for k, v in self.lr_goto.items():
+ f.write('(%r,%r):%r,' % (k[0], k[1], v))
+ f.write('}\n')
+
+ # Write production table
+ f.write('_lr_productions = [\n')
+ for p in self.lr_productions:
+ if p.func:
+ f.write(' (%r,%r,%d,%r,%r,%d),\n' % (p.str, p.name, p.len,
+ p.func, os.path.basename(p.file), p.line))
+ else:
+ f.write(' (%r,%r,%d,None,None,None),\n' % (str(p), p.name, p.len))
+ f.write(']\n')
+ f.close()
+
+ except IOError as e:
+ raise
+
+
+ # -----------------------------------------------------------------------------
+ # pickle_table()
+ #
+ # This function pickles the LR parsing tables to a supplied file object
+ # -----------------------------------------------------------------------------
+
+ def pickle_table(self, filename, signature=''):
+ try:
+ import cPickle as pickle
+ except ImportError:
+ import pickle
+ with open(filename, 'wb') as outf:
+ pickle.dump(__tabversion__, outf, pickle_protocol)
+ pickle.dump(self.lr_method, outf, pickle_protocol)
+ pickle.dump(signature, outf, pickle_protocol)
+ pickle.dump(self.lr_action, outf, pickle_protocol)
+ pickle.dump(self.lr_goto, outf, pickle_protocol)
+
+ outp = []
+ for p in self.lr_productions:
+ if p.func:
+ outp.append((p.str, p.name, p.len, p.func, os.path.basename(p.file), p.line))
+ else:
+ outp.append((str(p), p.name, p.len, None, None, None))
+ pickle.dump(outp, outf, pickle_protocol)
+
+# -----------------------------------------------------------------------------
+# === INTROSPECTION ===
+#
+# The following functions and classes are used to implement the PLY
+# introspection features followed by the yacc() function itself.
+# -----------------------------------------------------------------------------
+
+# -----------------------------------------------------------------------------
+# get_caller_module_dict()
+#
+# This function returns a dictionary containing all of the symbols defined within
+# a caller further down the call stack. This is used to get the environment
+# associated with the yacc() call if none was provided.
+# -----------------------------------------------------------------------------
+
+def get_caller_module_dict(levels):
+ f = sys._getframe(levels)
+ ldict = f.f_globals.copy()
+ if f.f_globals != f.f_locals:
+ ldict.update(f.f_locals)
+ return ldict
+
+# -----------------------------------------------------------------------------
+# parse_grammar()
+#
+# This takes a raw grammar rule string and parses it into production data
+# -----------------------------------------------------------------------------
+def parse_grammar(doc, file, line):
+ grammar = []
+ # Split the doc string into lines
+ pstrings = doc.splitlines()
+ lastp = None
+ dline = line
+ for ps in pstrings:
+ dline += 1
+ p = ps.split()
+ if not p:
+ continue
+ try:
+ if p[0] == '|':
+ # This is a continuation of a previous rule
+ if not lastp:
+ raise SyntaxError("%s:%d: Misplaced '|'" % (file, dline))
+ prodname = lastp
+ syms = p[1:]
+ else:
+ prodname = p[0]
+ lastp = prodname
+ syms = p[2:]
+ assign = p[1]
+ if assign != ':' and assign != '::=':
+ raise SyntaxError("%s:%d: Syntax error. Expected ':'" % (file, dline))
+
+ grammar.append((file, dline, prodname, syms))
+ except SyntaxError:
+ raise
+ except Exception:
+ raise SyntaxError('%s:%d: Syntax error in rule %r' % (file, dline, ps.strip()))
+
+ return grammar
+
+# -----------------------------------------------------------------------------
+# ParserReflect()
+#
+# This class represents information extracted for building a parser including
+# start symbol, error function, tokens, precedence list, action functions,
+# etc.
+# -----------------------------------------------------------------------------
+class ParserReflect(object):
+ def __init__(self, pdict, log=None):
+ self.pdict = pdict
+ self.start = None
+ self.error_func = None
+ self.tokens = None
+ self.modules = set()
+ self.grammar = []
+ self.error = False
+
+ if log is None:
+ self.log = PlyLogger(sys.stderr)
+ else:
+ self.log = log
+
+ # Get all of the basic information
+ def get_all(self):
+ self.get_start()
+ self.get_error_func()
+ self.get_tokens()
+ self.get_precedence()
+ self.get_pfunctions()
+
+ # Validate all of the information
+ def validate_all(self):
+ self.validate_start()
+ self.validate_error_func()
+ self.validate_tokens()
+ self.validate_precedence()
+ self.validate_pfunctions()
+ self.validate_modules()
+ return self.error
+
+ # Compute a signature over the grammar
+ def signature(self):
+ parts = []
+ try:
+ if self.start:
+ parts.append(self.start)
+ if self.prec:
+ parts.append(''.join([''.join(p) for p in self.prec]))
+ if self.tokens:
+ parts.append(' '.join(self.tokens))
+ for f in self.pfuncs:
+ if f[3]:
+ parts.append(f[3])
+ except (TypeError, ValueError):
+ pass
+ return ''.join(parts)
+
+ # -----------------------------------------------------------------------------
+ # validate_modules()
+ #
+ # This method checks to see if there are duplicated p_rulename() functions
+ # in the parser module file. Without this function, it is really easy for
+ # users to make mistakes by cutting and pasting code fragments (and it's a real
+ # bugger to try and figure out why the resulting parser doesn't work). Therefore,
+ # we just do a little regular expression pattern matching of def statements
+ # to try and detect duplicates.
+ # -----------------------------------------------------------------------------
+
+ def validate_modules(self):
+ # Match def p_funcname(
+ fre = re.compile(r'\s*def\s+(p_[a-zA-Z_0-9]*)\(')
+
+ for module in self.modules:
+ try:
+ lines, linen = inspect.getsourcelines(module)
+ except IOError:
+ continue
+
+ counthash = {}
+ for linen, line in enumerate(lines):
+ linen += 1
+ m = fre.match(line)
+ if m:
+ name = m.group(1)
+ prev = counthash.get(name)
+ if not prev:
+ counthash[name] = linen
+ else:
+ filename = inspect.getsourcefile(module)
+ self.log.warning('%s:%d: Function %s redefined. Previously defined on line %d',
+ filename, linen, name, prev)
+
+ # Get the start symbol
+ def get_start(self):
+ self.start = self.pdict.get('start')
+
+ # Validate the start symbol
+ def validate_start(self):
+ if self.start is not None:
+ if not isinstance(self.start, string_types):
+ self.log.error("'start' must be a string")
+
+ # Look for error handler
+ def get_error_func(self):
+ self.error_func = self.pdict.get('p_error')
+
+ # Validate the error function
+ def validate_error_func(self):
+ if self.error_func:
+ if isinstance(self.error_func, types.FunctionType):
+ ismethod = 0
+ elif isinstance(self.error_func, types.MethodType):
+ ismethod = 1
+ else:
+ self.log.error("'p_error' defined, but is not a function or method")
+ self.error = True
+ return
+
+ eline = self.error_func.__code__.co_firstlineno
+ efile = self.error_func.__code__.co_filename
+ module = inspect.getmodule(self.error_func)
+ self.modules.add(module)
+
+ argcount = self.error_func.__code__.co_argcount - ismethod
+ if argcount != 1:
+ self.log.error('%s:%d: p_error() requires 1 argument', efile, eline)
+ self.error = True
+
+ # Get the tokens map
+ def get_tokens(self):
+ tokens = self.pdict.get('tokens')
+ if not tokens:
+ self.log.error('No token list is defined')
+ self.error = True
+ return
+
+ if not isinstance(tokens, (list, tuple)):
+ self.log.error('tokens must be a list or tuple')
+ self.error = True
+ return
+
+ if not tokens:
+ self.log.error('tokens is empty')
+ self.error = True
+ return
+
+ self.tokens = tokens
+
+ # Validate the tokens
+ def validate_tokens(self):
+ # Validate the tokens.
+ if 'error' in self.tokens:
+ self.log.error("Illegal token name 'error'. Is a reserved word")
+ self.error = True
+ return
+
+ terminals = set()
+ for n in self.tokens:
+ if n in terminals:
+ self.log.warning('Token %r multiply defined', n)
+ terminals.add(n)
+
+ # Get the precedence map (if any)
+ def get_precedence(self):
+ self.prec = self.pdict.get('precedence')
+
+ # Validate and parse the precedence map
+ def validate_precedence(self):
+ preclist = []
+ if self.prec:
+ if not isinstance(self.prec, (list, tuple)):
+ self.log.error('precedence must be a list or tuple')
+ self.error = True
+ return
+ for level, p in enumerate(self.prec):
+ if not isinstance(p, (list, tuple)):
+ self.log.error('Bad precedence table')
+ self.error = True
+ return
+
+ if len(p) < 2:
+ self.log.error('Malformed precedence entry %s. Must be (assoc, term, ..., term)', p)
+ self.error = True
+ return
+ assoc = p[0]
+ if not isinstance(assoc, string_types):
+ self.log.error('precedence associativity must be a string')
+ self.error = True
+ return
+ for term in p[1:]:
+ if not isinstance(term, string_types):
+ self.log.error('precedence items must be strings')
+ self.error = True
+ return
+ preclist.append((term, assoc, level+1))
+ self.preclist = preclist
+
+ # Get all p_functions from the grammar
+ def get_pfunctions(self):
+ p_functions = []
+ for name, item in self.pdict.items():
+ if not name.startswith('p_') or name == 'p_error':
+ continue
+ if isinstance(item, (types.FunctionType, types.MethodType)):
+ line = getattr(item, 'co_firstlineno', item.__code__.co_firstlineno)
+ module = inspect.getmodule(item)
+ p_functions.append((line, module, name, item.__doc__))
+
+ # Sort all of the actions by line number; make sure to stringify
+ # modules to make them sortable, since `line` may not uniquely sort all
+ # p functions
+ p_functions.sort(key=lambda p_function: (
+ p_function[0],
+ str(p_function[1]),
+ p_function[2],
+ p_function[3]))
+ self.pfuncs = p_functions
+
+ # Validate all of the p_functions
+ def validate_pfunctions(self):
+ grammar = []
+ # Check for non-empty symbols
+ if len(self.pfuncs) == 0:
+ self.log.error('no rules of the form p_rulename are defined')
+ self.error = True
+ return
+
+ for line, module, name, doc in self.pfuncs:
+ file = inspect.getsourcefile(module)
+ func = self.pdict[name]
+ if isinstance(func, types.MethodType):
+ reqargs = 2
+ else:
+ reqargs = 1
+ if func.__code__.co_argcount > reqargs:
+ self.log.error('%s:%d: Rule %r has too many arguments', file, line, func.__name__)
+ self.error = True
+ elif func.__code__.co_argcount < reqargs:
+ self.log.error('%s:%d: Rule %r requires an argument', file, line, func.__name__)
+ self.error = True
+ elif not func.__doc__:
+ self.log.warning('%s:%d: No documentation string specified in function %r (ignored)',
+ file, line, func.__name__)
+ else:
+ try:
+ parsed_g = parse_grammar(doc, file, line)
+ for g in parsed_g:
+ grammar.append((name, g))
+ except SyntaxError as e:
+ self.log.error(str(e))
+ self.error = True
+
+ # Looks like a valid grammar rule
+ # Mark the file in which defined.
+ self.modules.add(module)
+
+ # Secondary validation step that looks for p_ definitions that are not functions
+ # or functions that look like they might be grammar rules.
+
+ for n, v in self.pdict.items():
+ if n.startswith('p_') and isinstance(v, (types.FunctionType, types.MethodType)):
+ continue
+ if n.startswith('t_'):
+ continue
+ if n.startswith('p_') and n != 'p_error':
+ self.log.warning('%r not defined as a function', n)
+ if ((isinstance(v, types.FunctionType) and v.__code__.co_argcount == 1) or
+ (isinstance(v, types.MethodType) and v.__func__.__code__.co_argcount == 2)):
+ if v.__doc__:
+ try:
+ doc = v.__doc__.split(' ')
+ if doc[1] == ':':
+ self.log.warning('%s:%d: Possible grammar rule %r defined without p_ prefix',
+ v.__code__.co_filename, v.__code__.co_firstlineno, n)
+ except IndexError:
+ pass
+
+ self.grammar = grammar
+
+# -----------------------------------------------------------------------------
+# yacc(module)
+#
+# Build a parser
+# -----------------------------------------------------------------------------
+
+def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, start=None,
+ check_recursion=True, optimize=False, write_tables=True, debugfile=debug_file,
+ outputdir=None, debuglog=None, errorlog=None, picklefile=None):
+
+ if tabmodule is None:
+ tabmodule = tab_module
+
+ # Reference to the parsing method of the last built parser
+ global parse
+
+ # If pickling is enabled, table files are not created
+ if picklefile:
+ write_tables = 0
+
+ if errorlog is None:
+ errorlog = PlyLogger(sys.stderr)
+
+ # Get the module dictionary used for the parser
+ if module:
+ _items = [(k, getattr(module, k)) for k in dir(module)]
+ pdict = dict(_items)
+ # If no __file__ attribute is available, try to obtain it from the __module__ instead
+ if '__file__' not in pdict:
+ pdict['__file__'] = sys.modules[pdict['__module__']].__file__
+ else:
+ pdict = get_caller_module_dict(2)
+
+ if outputdir is None:
+ # If no output directory is set, the location of the output files
+ # is determined according to the following rules:
+ # - If tabmodule specifies a package, files go into that package directory
+ # - Otherwise, files go in the same directory as the specifying module
+ if isinstance(tabmodule, types.ModuleType):
+ srcfile = tabmodule.__file__
+ else:
+ if '.' not in tabmodule:
+ srcfile = pdict['__file__']
+ else:
+ parts = tabmodule.split('.')
+ pkgname = '.'.join(parts[:-1])
+ exec('import %s' % pkgname)
+ srcfile = getattr(sys.modules[pkgname], '__file__', '')
+ outputdir = os.path.dirname(srcfile)
+
+ # Determine if the module is package of a package or not.
+ # If so, fix the tabmodule setting so that tables load correctly
+ pkg = pdict.get('__package__')
+ if pkg and isinstance(tabmodule, str):
+ if '.' not in tabmodule:
+ tabmodule = pkg + '.' + tabmodule
+
+
+
+ # Set start symbol if it's specified directly using an argument
+ if start is not None:
+ pdict['start'] = start
+
+ # Collect parser information from the dictionary
+ pinfo = ParserReflect(pdict, log=errorlog)
+ pinfo.get_all()
+
+ if pinfo.error:
+ raise YaccError('Unable to build parser')
+
+ # Check signature against table files (if any)
+ signature = pinfo.signature()
+
+ # Read the tables
+ try:
+ lr = LRTable()
+ if picklefile:
+ read_signature = lr.read_pickle(picklefile)
+ else:
+ read_signature = lr.read_table(tabmodule)
+ if optimize or (read_signature == signature):
+ try:
+ lr.bind_callables(pinfo.pdict)
+ parser = LRParser(lr, pinfo.error_func)
+ parse = parser.parse
+ return parser
+ except Exception as e:
+ errorlog.warning('There was a problem loading the table file: %r', e)
+ except VersionError as e:
+ errorlog.warning(str(e))
+ except ImportError:
+ pass
+
+ if debuglog is None:
+ if debug:
+ try:
+ debuglog = PlyLogger(open(os.path.join(outputdir, debugfile), 'w'))
+ except IOError as e:
+ errorlog.warning("Couldn't open %r. %s" % (debugfile, e))
+ debuglog = NullLogger()
+ else:
+ debuglog = NullLogger()
+
+ debuglog.info('Created by PLY version %s (http://www.dabeaz.com/ply)', __version__)
+
+ errors = False
+
+ # Validate the parser information
+ if pinfo.validate_all():
+ raise YaccError('Unable to build parser')
+
+ if not pinfo.error_func:
+ errorlog.warning('no p_error() function is defined')
+
+ # Create a grammar object
+ grammar = Grammar(pinfo.tokens)
+
+ # Set precedence level for terminals
+ for term, assoc, level in pinfo.preclist:
+ try:
+ grammar.set_precedence(term, assoc, level)
+ except GrammarError as e:
+ errorlog.warning('%s', e)
+
+ # Add productions to the grammar
+ for funcname, gram in pinfo.grammar:
+ file, line, prodname, syms = gram
+ try:
+ grammar.add_production(prodname, syms, funcname, file, line)
+ except GrammarError as e:
+ errorlog.error('%s', e)
+ errors = True
+
+ # Set the grammar start symbols
+ try:
+ if start is None:
+ grammar.set_start(pinfo.start)
+ else:
+ grammar.set_start(start)
+ except GrammarError as e:
+ errorlog.error(str(e))
+ errors = True
+
+ if errors:
+ raise YaccError('Unable to build parser')
+
+ # Verify the grammar structure
+ undefined_symbols = grammar.undefined_symbols()
+ for sym, prod in undefined_symbols:
+ errorlog.error('%s:%d: Symbol %r used, but not defined as a token or a rule', prod.file, prod.line, sym)
+ errors = True
+
+ unused_terminals = grammar.unused_terminals()
+ if unused_terminals:
+ debuglog.info('')
+ debuglog.info('Unused terminals:')
+ debuglog.info('')
+ for term in unused_terminals:
+ errorlog.warning('Token %r defined, but not used', term)
+ debuglog.info(' %s', term)
+
+ # Print out all productions to the debug log
+ if debug:
+ debuglog.info('')
+ debuglog.info('Grammar')
+ debuglog.info('')
+ for n, p in enumerate(grammar.Productions):
+ debuglog.info('Rule %-5d %s', n, p)
+
+ # Find unused non-terminals
+ unused_rules = grammar.unused_rules()
+ for prod in unused_rules:
+ errorlog.warning('%s:%d: Rule %r defined, but not used', prod.file, prod.line, prod.name)
+
+ if len(unused_terminals) == 1:
+ errorlog.warning('There is 1 unused token')
+ if len(unused_terminals) > 1:
+ errorlog.warning('There are %d unused tokens', len(unused_terminals))
+
+ if len(unused_rules) == 1:
+ errorlog.warning('There is 1 unused rule')
+ if len(unused_rules) > 1:
+ errorlog.warning('There are %d unused rules', len(unused_rules))
+
+ if debug:
+ debuglog.info('')
+ debuglog.info('Terminals, with rules where they appear')
+ debuglog.info('')
+ terms = list(grammar.Terminals)
+ terms.sort()
+ for term in terms:
+ debuglog.info('%-20s : %s', term, ' '.join([str(s) for s in grammar.Terminals[term]]))
+
+ debuglog.info('')
+ debuglog.info('Nonterminals, with rules where they appear')
+ debuglog.info('')
+ nonterms = list(grammar.Nonterminals)
+ nonterms.sort()
+ for nonterm in nonterms:
+ debuglog.info('%-20s : %s', nonterm, ' '.join([str(s) for s in grammar.Nonterminals[nonterm]]))
+ debuglog.info('')
+
+ if check_recursion:
+ unreachable = grammar.find_unreachable()
+ for u in unreachable:
+ errorlog.warning('Symbol %r is unreachable', u)
+
+ infinite = grammar.infinite_cycles()
+ for inf in infinite:
+ errorlog.error('Infinite recursion detected for symbol %r', inf)
+ errors = True
+
+ unused_prec = grammar.unused_precedence()
+ for term, assoc in unused_prec:
+ errorlog.error('Precedence rule %r defined for unknown symbol %r', assoc, term)
+ errors = True
+
+ if errors:
+ raise YaccError('Unable to build parser')
+
+ # Run the LRGeneratedTable on the grammar
+ if debug:
+ errorlog.debug('Generating %s tables', method)
+
+ lr = LRGeneratedTable(grammar, method, debuglog)
+
+ if debug:
+ num_sr = len(lr.sr_conflicts)
+
+ # Report shift/reduce and reduce/reduce conflicts
+ if num_sr == 1:
+ errorlog.warning('1 shift/reduce conflict')
+ elif num_sr > 1:
+ errorlog.warning('%d shift/reduce conflicts', num_sr)
+
+ num_rr = len(lr.rr_conflicts)
+ if num_rr == 1:
+ errorlog.warning('1 reduce/reduce conflict')
+ elif num_rr > 1:
+ errorlog.warning('%d reduce/reduce conflicts', num_rr)
+
+ # Write out conflicts to the output file
+ if debug and (lr.sr_conflicts or lr.rr_conflicts):
+ debuglog.warning('')
+ debuglog.warning('Conflicts:')
+ debuglog.warning('')
+
+ for state, tok, resolution in lr.sr_conflicts:
+ debuglog.warning('shift/reduce conflict for %s in state %d resolved as %s', tok, state, resolution)
+
+ already_reported = set()
+ for state, rule, rejected in lr.rr_conflicts:
+ if (state, id(rule), id(rejected)) in already_reported:
+ continue
+ debuglog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule)
+ debuglog.warning('rejected rule (%s) in state %d', rejected, state)
+ errorlog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule)
+ errorlog.warning('rejected rule (%s) in state %d', rejected, state)
+ already_reported.add((state, id(rule), id(rejected)))
+
+ warned_never = []
+ for state, rule, rejected in lr.rr_conflicts:
+ if not rejected.reduced and (rejected not in warned_never):
+ debuglog.warning('Rule (%s) is never reduced', rejected)
+ errorlog.warning('Rule (%s) is never reduced', rejected)
+ warned_never.append(rejected)
+
+ # Write the table file if requested
+ if write_tables:
+ try:
+ lr.write_table(tabmodule, outputdir, signature)
+ except IOError as e:
+ errorlog.warning("Couldn't create %r. %s" % (tabmodule, e))
+
+ # Write a pickled version of the tables
+ if picklefile:
+ try:
+ lr.pickle_table(picklefile, signature)
+ except IOError as e:
+ errorlog.warning("Couldn't create %r. %s" % (picklefile, e))
+
+ # Build the parser
+ lr.bind_callables(pinfo.pdict)
+ parser = LRParser(lr, pinfo.error_func)
+
+ parse = parser.parse
+ return parser
diff --git a/third_party/python/ply/ply/ygen.py b/third_party/python/ply/ply/ygen.py
new file mode 100644
index 0000000000..acf5ca1a37
--- /dev/null
+++ b/third_party/python/ply/ply/ygen.py
@@ -0,0 +1,74 @@
+# ply: ygen.py
+#
+# This is a support program that auto-generates different versions of the YACC parsing
+# function with different features removed for the purposes of performance.
+#
+# Users should edit the method LParser.parsedebug() in yacc.py. The source code
+# for that method is then used to create the other methods. See the comments in
+# yacc.py for further details.
+
+import os.path
+import shutil
+
+def get_source_range(lines, tag):
+ srclines = enumerate(lines)
+ start_tag = '#--! %s-start' % tag
+ end_tag = '#--! %s-end' % tag
+
+ for start_index, line in srclines:
+ if line.strip().startswith(start_tag):
+ break
+
+ for end_index, line in srclines:
+ if line.strip().endswith(end_tag):
+ break
+
+ return (start_index + 1, end_index)
+
+def filter_section(lines, tag):
+ filtered_lines = []
+ include = True
+ tag_text = '#--! %s' % tag
+ for line in lines:
+ if line.strip().startswith(tag_text):
+ include = not include
+ elif include:
+ filtered_lines.append(line)
+ return filtered_lines
+
+def main():
+ dirname = os.path.dirname(__file__)
+ shutil.copy2(os.path.join(dirname, 'yacc.py'), os.path.join(dirname, 'yacc.py.bak'))
+ with open(os.path.join(dirname, 'yacc.py'), 'r') as f:
+ lines = f.readlines()
+
+ parse_start, parse_end = get_source_range(lines, 'parsedebug')
+ parseopt_start, parseopt_end = get_source_range(lines, 'parseopt')
+ parseopt_notrack_start, parseopt_notrack_end = get_source_range(lines, 'parseopt-notrack')
+
+ # Get the original source
+ orig_lines = lines[parse_start:parse_end]
+
+ # Filter the DEBUG sections out
+ parseopt_lines = filter_section(orig_lines, 'DEBUG')
+
+ # Filter the TRACKING sections out
+ parseopt_notrack_lines = filter_section(parseopt_lines, 'TRACKING')
+
+ # Replace the parser source sections with updated versions
+ lines[parseopt_notrack_start:parseopt_notrack_end] = parseopt_notrack_lines
+ lines[parseopt_start:parseopt_end] = parseopt_lines
+
+ lines = [line.rstrip()+'\n' for line in lines]
+ with open(os.path.join(dirname, 'yacc.py'), 'w') as f:
+ f.writelines(lines)
+
+ print('Updated yacc.py')
+
+if __name__ == '__main__':
+ main()
+
+
+
+
+
diff --git a/third_party/python/ply/setup.cfg b/third_party/python/ply/setup.cfg
new file mode 100644
index 0000000000..4ec8a167da
--- /dev/null
+++ b/third_party/python/ply/setup.cfg
@@ -0,0 +1,11 @@
+[bdist_wheel]
+universal = 1
+
+[metadata]
+description-file = README.md
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/third_party/python/ply/setup.py b/third_party/python/ply/setup.py
new file mode 100644
index 0000000000..ee8ccd0ccf
--- /dev/null
+++ b/third_party/python/ply/setup.py
@@ -0,0 +1,31 @@
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+setup(name = "ply",
+ description="Python Lex & Yacc",
+ long_description = """
+PLY is yet another implementation of lex and yacc for Python. Some notable
+features include the fact that its implemented entirely in Python and it
+uses LALR(1) parsing which is efficient and well suited for larger grammars.
+
+PLY provides most of the standard lex/yacc features including support for empty
+productions, precedence rules, error recovery, and support for ambiguous grammars.
+
+PLY is extremely easy to use and provides very extensive error checking.
+It is compatible with both Python 2 and Python 3.
+""",
+ license="""BSD""",
+ version = "3.10",
+ author = "David Beazley",
+ author_email = "dave@dabeaz.com",
+ maintainer = "David Beazley",
+ maintainer_email = "dave@dabeaz.com",
+ url = "http://www.dabeaz.com/ply/",
+ packages = ['ply'],
+ classifiers = [
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 2',
+ ]
+ )
diff --git a/third_party/python/py/.gitignore b/third_party/python/py/.gitignore
new file mode 100644
index 0000000000..375476fd3c
--- /dev/null
+++ b/third_party/python/py/.gitignore
@@ -0,0 +1,14 @@
+
+.cache/
+.tox/
+__pycache__/
+
+*.pyc
+*.pyo
+
+*.egg-info
+.eggs/
+
+dist/*
+/py/_version.py
+.pytest_cache/
diff --git a/third_party/python/py/.travis.yml b/third_party/python/py/.travis.yml
new file mode 100644
index 0000000000..ea75028de5
--- /dev/null
+++ b/third_party/python/py/.travis.yml
@@ -0,0 +1,44 @@
+sudo: false
+language: python
+python:
+- '2.7'
+- '3.4'
+- '3.5'
+- '3.6'
+- 'pypy-5.4'
+env:
+- DEPS="pytest~=2.9.0"
+- DEPS="pytest~=3.0.0"
+#- DEPS="pytest~=3.1.0"
+
+matrix:
+
+ include:
+ - python: '2.7'
+ # using a different option due to pytest-addopts pytester issues
+ env: PYTEST_XADDOPTS="-n 3 --runslowtests" DEPS="pytest~=3.0.0 pytest-xdist"
+
+ - stage: deploy
+ python: '3.6'
+ env:
+ install: pip install -U setuptools setuptools_scm
+ script: skip
+ deploy:
+ provider: pypi
+ user: nicoddemus
+ distributions: sdist bdist_wheel
+ skip_upload_docs: true
+ password:
+ secure: VNYW/sZoD+9DzKCe6vANNXXJR7jP7rwySafQ33N1jAnCrdylQjEN/p6tSfUe8jDi3wDpLPL9h8pwfxuUT7CRxglHov3Qe7zSeywixvHan5aFahQiQ8+gucYIM7wITHH3oQs7jN35pnhdnF+QlW2+eDCL6qOLU5XwuRhsDKXjQ/hUWR5hlX5EniD1gzyKEf6j1YCpST87tKpeLwVEYEmsucdkUZuXhxDtyaWQHWiPsLWwh/slQtUJEHeLF26r8UxFy0RiGne9jR+CzRfH5ktcA9/pArvp4VuwOii+1TDxVSYP7+I8Z+eUKN9JBg12QLaHwoIN/8J+MvHCkuf+OGSLM3sEyNRJGDev372xg3K7ylIkeeK4WXirKEp2ojgN8tniloDjnwdu/gPWBnrXuooA60tNoByHFa8KbMZAr2B2sQeMxD4VZGr1N8l0rX4gRTrwvdk3i3ulLKVSwkXaGn+GrfZTTboa7dEnpuma8tv1niNCSpStYIy7atS8129+5ijV3OC8DzOMh/rVbO9WsDb/RPG3yjFiDvEJPIPeE0l/m5u42QBqtdZSS2ia7UWTJBiEY09uFMTRmH5hhE/1aiYBbvAztf5CReUbeKdSQz3L8TTSZqewtFZmXTkX97/xQnrEpsnGezIM2DNuMEuQG3MxGkNCxwbQKpx/bkHdrD75yMk=
+ on:
+ tags: true
+ repo: pytest-dev/py
+
+ allow_failures:
+ - python: 'pypy-5.4'
+install:
+- pip install -U setuptools setuptools_scm
+- pip install $DEPS
+- pip install -U . --force-reinstall
+script:
+- py.test --lsof $PYTEST_XADDOPTS
diff --git a/third_party/python/py/AUTHORS b/third_party/python/py/AUTHORS
new file mode 100644
index 0000000000..8c0cf9b71b
--- /dev/null
+++ b/third_party/python/py/AUTHORS
@@ -0,0 +1,24 @@
+Holger Krekel, holger at merlinux eu
+Benjamin Peterson, benjamin at python org
+Ronny Pfannschmidt, Ronny.Pfannschmidt at gmx de
+Guido Wesdorp, johnny at johnnydebris net
+Samuele Pedroni, pedronis at openend se
+Carl Friedrich Bolz, cfbolz at gmx de
+Armin Rigo, arigo at tunes org
+Maciek Fijalkowski, fijal at genesilico pl
+Brian Dorsey, briandorsey at gmail com
+Floris Bruynooghe, flub at devork be
+merlinux GmbH, Germany, office at merlinux eu
+
+Contributors include::
+
+Ross Lawley
+Ralf Schmitt
+Chris Lamb
+Harald Armin Massa
+Martijn Faassen
+Ian Bicking
+Jan Balster
+Grig Gheorghiu
+Bob Ippolito
+Christian Tismer
diff --git a/third_party/python/py/CHANGELOG b/third_party/python/py/CHANGELOG
new file mode 100644
index 0000000000..a17cdb5992
--- /dev/null
+++ b/third_party/python/py/CHANGELOG
@@ -0,0 +1,1160 @@
+1.5.4 (2018-06-27)
+==================
+
+- fix pytest-dev/pytest#3451: don't make assumptions about fs case sensitivity
+ in ``make_numbered_dir``.
+
+1.5.3
+=====
+
+- fix #179: ensure we can support 'from py.error import ...'
+
+1.5.2
+=====
+
+- fix #169, #170: error importing py.log on Windows: no module named ``syslog``.
+
+1.5.1
+=====
+
+- fix #167 - prevent pip from installing py in unsupported Python versions.
+
+1.5.0
+=====
+
+NOTE: **this release has been removed from PyPI** due to missing package
+metadata which caused a number of problems to py26 and py33 users.
+This issue was fixed in the 1.5.1 release.
+
+- python 2.6 and 3.3 are no longer supported
+- deprecate py.std and remove all internal uses
+- fix #73 turn py.error into an actual module
+- path join to / no longer produces leading double slashes
+- fix #82 - remove unsupportable aliases
+- fix python37 compatibility of path.sysfind on windows by correctly replacing vars
+- turn iniconfig and apipkg into vendored packages and ease de-vendoring for distributions
+- fix #68 remove invalid py.test.ensuretemp references
+- fix #25 - deprecate path.listdir(sort=callable)
+- add ``TerminalWriter.chars_on_current_line`` read-only property that tracks how many characters
+ have been written to the current line.
+
+1.4.34
+====================================================================
+
+- fix issue119 / pytest issue708 where tmpdir may fail to make numbered directories
+ when the filesystem is case-insensitive.
+
+1.4.33
+====================================================================
+
+- avoid imports in calls to py.path.local().fnmatch(). Thanks Andreas Pelme for
+ the PR.
+
+- fix issue106: Naive unicode encoding when calling fspath() in python2. Thanks Tiago Nobrega for the PR.
+
+- fix issue110: unittest.TestCase.assertWarns fails with py imported.
+
+1.4.32
+====================================================================
+
+- fix issue70: added ability to copy all stat info in py.path.local.copy.
+
+- make TerminalWriter.fullwidth a property. This results in the correct
+ value when the terminal gets resized.
+
+- update supported html tags to include recent additions.
+ Thanks Denis Afonso for the PR.
+
+- Remove internal code in ``Source.compile`` meant to support earlier Python 3 versions that produced the side effect
+ of leaving ``None`` in ``sys.modules`` when called (see pytest-dev/pytest#2103).
+ Thanks Bruno Oliveira for the PR.
+
+1.4.31
+==================================================
+
+- fix local().copy(dest, mode=True) to also work
+ with unicode.
+
+- pass better error message with svn EEXIST paths
+
+1.4.30
+==================================================
+
+- fix issue68 an assert with a multiline list comprehension
+ was not reported correctly. Thanks Henrik Heibuerger.
+
+
+1.4.29
+==================================================
+
+- fix issue55: revert a change to the statement finding algorithm
+ which is used by pytest for generating tracebacks.
+ Thanks Daniel Hahler for initial analysis.
+
+- fix pytest issue254 for when traceback rendering can't
+ find valid source code. Thanks Ionel Cristian Maries.
+
+
+1.4.28
+==================================================
+
+- fix issue64 -- dirpath regression when "abs=True" is passed.
+ Thanks Gilles Dartiguelongue.
+
+1.4.27
+==================================================
+
+- fix issue59: point to new repo site
+
+- allow a new ensuresyspath="append" mode for py.path.local.pyimport()
+ so that a neccessary import path is appended instead of prepended to
+ sys.path
+
+- strike undocumented, untested argument to py.path.local.pypkgpath
+
+- speed up py.path.local.dirpath by a factor of 10
+
+1.4.26
+==================================================
+
+- avoid calling normpath twice in py.path.local
+
+- py.builtin._reraise properly reraises under Python3 now.
+
+- fix issue53 - remove module index, thanks jenisys.
+
+- allow posix path separators when "fnmatch" is called.
+ Thanks Christian Long for the complete PR.
+
+1.4.25
+==================================================
+
+- fix issue52: vaguely fix py25 compat of py.path.local (it's not
+ officially supported), also fix docs
+
+- fix pytest issue 589: when checking if we have a recursion error
+ check for the specific "maximum recursion depth" text of the exception.
+
+1.4.24
+==================================================
+
+- Fix retrieving source when an else: line has an other statement on
+ the same line.
+
+- add localpath read_text/write_text/read_bytes/write_bytes methods
+ as shortcuts and clearer bytes/text interfaces for read/write.
+ Adapted from a PR from Paul Moore.
+
+
+1.4.23
+==================================================
+
+- use newer apipkg version which makes attribute access on
+ alias modules resolve to None rather than an ImportError.
+ This helps with code that uses inspect.getframeinfo()
+ on py34 which causes a complete walk on sys.modules
+ thus triggering the alias module to resolve and blowing
+ up with ImportError. The negative side is that something
+ like "py.test.X" will now result in None instead of "importerror: pytest"
+ if pytest is not installed. But you shouldn't import "py.test"
+ anyway anymore.
+
+- adapt one svn test to only check for any exception instead
+ of specific ones because different svn versions cause different
+ errors and we don't care.
+
+
+1.4.22
+==================================================
+
+- refactor class-level registry on ForkedFunc child start/finish
+ event to become instance based (i.e. passed into the constructor)
+
+1.4.21
+==================================================
+
+- ForkedFunc now has class-level register_on_start/on_exit()
+ methods to allow adding information in the boxed process.
+ Thanks Marc Schlaich.
+
+- ForkedFunc in the child opens in "auto-flush" mode for
+ stdout/stderr so that when a subprocess dies you can see
+ its output even if it didn't flush itself.
+
+- refactor traceback generation in light of pytest issue 364
+ (shortening tracebacks). you can now set a new traceback style
+ on a per-entry basis such that a caller can force entries to be
+ isplayed as short or long entries.
+
+- win32: py.path.local.sysfind(name) will preferrably return files with
+ extensions so that if "X" and "X.bat" or "X.exe" is on the PATH,
+ one of the latter two will be returned.
+
+1.4.20
+==================================================
+
+- ignore unicode decode errors in xmlescape. Thanks Anatoly Bubenkoff.
+
+- on python2 modify traceback.format_exception_only to match python3
+ behaviour, namely trying to print unicode for Exception instances
+
+- use a safer way for serializing exception reports (helps to fix
+ pytest issue413)
+
+Changes between 1.4.18 and 1.4.19
+==================================================
+
+- merge in apipkg fixes
+
+- some micro-optimizations in py/_code/code.py for speeding
+ up pytest runs. Thanks Alex Gaynor for initiative.
+
+- check PY_COLORS=1 or PY_COLORS=0 to force coloring/not-coloring
+ for py.io.TerminalWriter() independently from capabilities
+ of the output file. Thanks Marc Abramowitz for the PR.
+
+- some fixes to unicode handling in assertion handling.
+ Thanks for the PR to Floris Bruynooghe. (This helps
+ to fix pytest issue 319).
+
+- depend on setuptools presence, remove distribute_setup
+
+Changes between 1.4.17 and 1.4.18
+==================================================
+
+- introduce path.ensure_dir() as a synonym for ensure(..., dir=1)
+
+- some unicode/python3 related fixes wrt to path manipulations
+ (if you start passing unicode particular in py2 you might
+ still get problems, though)
+
+Changes between 1.4.16 and 1.4.17
+==================================================
+
+- make py.io.TerminalWriter() prefer colorama if it is available
+ and avoid empty lines when separator-lines are printed by
+ being defensive and reducing the working terminalwidth by 1
+
+- introduce optional "expanduser" argument to py.path.local
+ to that local("~", expanduser=True) gives the home
+ directory of "user".
+
+Changes between 1.4.15 and 1.4.16
+==================================================
+
+- fix issue35 - define __gt__ ordering between a local path
+ and strings
+
+- fix issue36 - make chdir() work even if os.getcwd() fails.
+
+- add path.exists/isdir/isfile/islink shortcuts
+
+- introduce local path.as_cwd() context manager.
+
+- introduce p.write(ensure=1) and p.open(ensure=1)
+ where ensure triggers creation of neccessary parent
+ dirs.
+
+
+Changes between 1.4.14 and 1.4.15
+==================================================
+
+- majorly speed up some common calling patterns with
+ LocalPath.listdir()/join/check/stat functions considerably.
+
+- fix an edge case with fnmatch where a glob style pattern appeared
+ in an absolute path.
+
+Changes between 1.4.13 and 1.4.14
+==================================================
+
+- fix dupfile to work with files that don't
+ carry a mode. Thanks Jason R. Coombs.
+
+Changes between 1.4.12 and 1.4.13
+==================================================
+
+- fix getting statementrange/compiling a file ending
+ in a comment line without newline (on python2.5)
+- for local paths you can pass "mode=True" to a copy()
+ in order to copy permission bits (underlying mechanism
+ is using shutil.copymode)
+- add paths arguments to py.path.local.sysfind to restrict
+ search to the diretories in the path.
+- add isdir/isfile/islink to path.stat() objects allowing to perform
+ multiple checks without calling out multiple times
+- drop py.path.local.__new__ in favour of a simpler __init__
+- iniconfig: allow "name:value" settings in config files, no space after
+ "name" required
+- fix issue 27 - NameError in unlikely untested case of saferepr
+
+
+Changes between 1.4.11 and 1.4.12
+==================================================
+
+- fix python2.4 support - for pre-AST interpreters re-introduce
+ old way to find statements in exceptions (closes pytest issue 209)
+- add tox.ini to distribution
+- fix issue23 - print *,** args information in tracebacks,
+ thanks Manuel Jacob
+
+
+Changes between 1.4.10 and 1.4.11
+==================================================
+
+- use _ast to determine statement ranges when printing tracebacks -
+ avoiding multi-second delays on some large test modules
+- fix an internal test to not use class-denoted pytest_funcarg__
+- fix a doc link to bug tracker
+- try to make terminal.write() printing more robust against
+ unicodeencode/decode problems, amend according test
+- introduce py.builtin.text and py.builtin.bytes
+ to point to respective str/unicode (py2) and bytes/str (py3) types
+- fix error handling on win32/py33 for ENODIR
+
+Changes between 1.4.9 and 1.4.10
+==================================================
+
+- terminalwriter: default to encode to UTF8 if no encoding is defined
+ on the output stream
+- issue22: improve heuristic for finding the statementrange in exceptions
+
+Changes between 1.4.8 and 1.4.9
+==================================================
+
+- fix bug of path.visit() which would not recognize glob-style patterns
+ for the "rec" recursion argument
+- changed iniconfig parsing to better conform, now the chars ";"
+ and "#" only mark a comment at the stripped start of a line
+- include recent apipkg-1.2
+- change internal terminalwriter.line/reline logic to more nicely
+ support file spinners
+
+Changes between 1.4.7 and 1.4.8
+==================================================
+
+- fix issue 13 - correct handling of the tag name object in xmlgen
+- fix issue 14 - support raw attribute values in xmlgen
+- fix windows terminalwriter printing/re-line problem
+- update distribute_setup.py to 0.6.27
+
+Changes between 1.4.6 and 1.4.7
+==================================================
+
+- fix issue11 - own test failure with python3.3 / Thanks Benjamin Peterson
+- help fix pytest issue 102
+
+Changes between 1.4.5 and 1.4.6
+==================================================
+
+- help to fix pytest issue99: unify output of
+ ExceptionInfo.getrepr(style="native") with ...(style="long")
+- fix issue7: source.getstatementrange() now raises proper error
+ if no valid statement can be found
+- fix issue8: fix code and tests of svnurl/svnwc to work on subversion 1.7 -
+ note that path.status(updates=1) will not properly work svn-17's status
+ --xml output is broken.
+- make source.getstatementrange() more resilent about non-python code frames
+ (as seen from jnja2)
+- make trackeback recursion detection more resilent
+ about the eval magic of a decorator library
+- iniconfig: add support for ; as comment starter
+- properly handle lists in xmlgen on python3
+- normalize py.code.getfslineno(obj) to always return a (string, int) tuple
+ defaulting to ("", -1) respectively if no source code can be found for obj.
+
+Changes between 1.4.4 and 1.4.5
+==================================================
+
+- improve some unicode handling in terminalwriter and capturing
+ (used by pytest)
+
+Changes between 1.4.3 and 1.4.4
+==================================================
+
+- a few fixes and assertion related refinements for pytest-2.1
+- guard py.code.Code and getfslineno against bogus input
+ and make py.code.Code objects for object instance
+ by looking up their __call__ function.
+- make exception presentation robust against invalid current cwd
+
+Changes between 1.4.2 and 1.4.3
+==================================================
+
+- fix terminal coloring issue for skipped tests (thanks Amaury)
+- fix issue4 - large calls to ansi_print (thanks Amaury)
+
+Changes between 1.4.1 and 1.4.2
+==================================================
+
+- fix (pytest) issue23 - tmpdir argument now works on Python3.2 and WindowsXP
+ (which apparently starts to offer os.symlink now)
+
+- better error message for syntax errors from compiled code
+
+- small fix to better deal with (un-)colored terminal output on windows
+
+Changes between 1.4.0 and 1.4.1
+==================================================
+
+- fix issue1 - py.error.* classes to be pickleable
+
+- fix issue2 - on windows32 use PATHEXT as the list of potential
+ extensions to find find binaries with py.path.local.sysfind(commandname)
+
+- fix (pytest-) issue10 and refine assertion reinterpretation
+ to avoid breaking if the __nonzero__ of an object fails
+
+- fix (pytest-) issue17 where python3 does not like "import *"
+ leading to misrepresentation of import-errors in test modules
+
+- fix py.error.* attribute pypy access issue
+
+- allow path.samefile(arg) to succeed when arg is a relative filename
+
+- fix (pytest-) issue20 path.samefile(relpath) works as expected now
+
+- fix (pytest-) issue8 len(long_list) now shows the lenght of the list
+
+Changes between 1.3.4 and 1.4.0
+==================================================
+
+- py.test was moved to a separate "pytest" package. What remains is
+ a stub hook which will proxy ``import py.test`` to ``pytest``.
+- all command line tools ("py.cleanup/lookup/countloc/..." moved
+ to "pycmd" package)
+- removed the old and deprecated "py.magic" namespace
+- use apipkg-1.1 and make py.apipkg.initpkg|ApiModule available
+- add py.iniconfig module for brain-dead easy ini-config file parsing
+- introduce py.builtin.any()
+- path objects have a .dirname attribute now (equivalent to
+ os.path.dirname(path))
+- path.visit() accepts breadthfirst (bf) and sort options
+- remove deprecated py.compat namespace
+
+Changes between 1.3.3 and 1.3.4
+==================================================
+
+- fix issue111: improve install documentation for windows
+- fix issue119: fix custom collectability of __init__.py as a module
+- fix issue116: --doctestmodules work with __init__.py files as well
+- fix issue115: unify internal exception passthrough/catching/GeneratorExit
+- fix issue118: new --tb=native for presenting cpython-standard exceptions
+
+Changes between 1.3.2 and 1.3.3
+==================================================
+
+- fix issue113: assertion representation problem with triple-quoted strings
+ (and possibly other cases)
+- make conftest loading detect that a conftest file with the same
+ content was already loaded, avoids surprises in nested directory structures
+ which can be produced e.g. by Hudson. It probably removes the need to use
+ --confcutdir in most cases.
+- fix terminal coloring for win32
+ (thanks Michael Foord for reporting)
+- fix weirdness: make terminal width detection work on stdout instead of stdin
+ (thanks Armin Ronacher for reporting)
+- remove trailing whitespace in all py/text distribution files
+
+Changes between 1.3.1 and 1.3.2
+==================================================
+
+New features
+++++++++++++++++++
+
+- fix issue103: introduce py.test.raises as context manager, examples::
+
+ with py.test.raises(ZeroDivisionError):
+ x = 0
+ 1 / x
+
+ with py.test.raises(RuntimeError) as excinfo:
+ call_something()
+
+ # you may do extra checks on excinfo.value|type|traceback here
+
+ (thanks Ronny Pfannschmidt)
+
+- Funcarg factories can now dynamically apply a marker to a
+ test invocation. This is for example useful if a factory
+ provides parameters to a test which are expected-to-fail::
+
+ def pytest_funcarg__arg(request):
+ request.applymarker(py.test.mark.xfail(reason="flaky config"))
+ ...
+
+ def test_function(arg):
+ ...
+
+- improved error reporting on collection and import errors. This makes
+ use of a more general mechanism, namely that for custom test item/collect
+ nodes ``node.repr_failure(excinfo)`` is now uniformly called so that you can
+ override it to return a string error representation of your choice
+ which is going to be reported as a (red) string.
+
+- introduce '--junitprefix=STR' option to prepend a prefix
+ to all reports in the junitxml file.
+
+Bug fixes / Maintenance
+++++++++++++++++++++++++++
+
+- make tests and the ``pytest_recwarn`` plugin in particular fully compatible
+ to Python2.7 (if you use the ``recwarn`` funcarg warnings will be enabled so that
+ you can properly check for their existence in a cross-python manner).
+- refine --pdb: ignore xfailed tests, unify its TB-reporting and
+ don't display failures again at the end.
+- fix assertion interpretation with the ** operator (thanks Benjamin Peterson)
+- fix issue105 assignment on the same line as a failing assertion (thanks Benjamin Peterson)
+- fix issue104 proper escaping for test names in junitxml plugin (thanks anonymous)
+- fix issue57 -f|--looponfail to work with xpassing tests (thanks Ronny)
+- fix issue92 collectonly reporter and --pastebin (thanks Benjamin Peterson)
+- fix py.code.compile(source) to generate unique filenames
+- fix assertion re-interp problems on PyPy, by defering code
+ compilation to the (overridable) Frame.eval class. (thanks Amaury Forgeot)
+- fix py.path.local.pyimport() to work with directories
+- streamline py.path.local.mkdtemp implementation and usage
+- don't print empty lines when showing junitxml-filename
+- add optional boolean ignore_errors parameter to py.path.local.remove
+- fix terminal writing on win32/python2.4
+- py.process.cmdexec() now tries harder to return properly encoded unicode objects
+ on all python versions
+- install plain py.test/py.which scripts also for Jython, this helps to
+ get canonical script paths in virtualenv situations
+- make path.bestrelpath(path) return ".", note that when calling
+ X.bestrelpath the assumption is that X is a directory.
+- make initial conftest discovery ignore "--" prefixed arguments
+- fix resultlog plugin when used in an multicpu/multihost xdist situation
+ (thanks Jakub Gustak)
+- perform distributed testing related reporting in the xdist-plugin
+ rather than having dist-related code in the generic py.test
+ distribution
+- fix homedir detection on Windows
+- ship distribute_setup.py version 0.6.13
+
+Changes between 1.3.0 and 1.3.1
+==================================================
+
+New features
+++++++++++++++++++
+
+- issue91: introduce new py.test.xfail(reason) helper
+ to imperatively mark a test as expected to fail. Can
+ be used from within setup and test functions. This is
+ useful especially for parametrized tests when certain
+ configurations are expected-to-fail. In this case the
+ declarative approach with the @py.test.mark.xfail cannot
+ be used as it would mark all configurations as xfail.
+
+- issue102: introduce new --maxfail=NUM option to stop
+ test runs after NUM failures. This is a generalization
+ of the '-x' or '--exitfirst' option which is now equivalent
+ to '--maxfail=1'. Both '-x' and '--maxfail' will
+ now also print a line near the end indicating the Interruption.
+
+- issue89: allow py.test.mark decorators to be used on classes
+ (class decorators were introduced with python2.6) and
+ also allow to have multiple markers applied at class/module level
+ by specifying a list.
+
+- improve and refine letter reporting in the progress bar:
+ . pass
+ f failed test
+ s skipped tests (reminder: use for dependency/platform mismatch only)
+ x xfailed test (test that was expected to fail)
+ X xpassed test (test that was expected to fail but passed)
+
+ You can use any combination of 'fsxX' with the '-r' extended
+ reporting option. The xfail/xpass results will show up as
+ skipped tests in the junitxml output - which also fixes
+ issue99.
+
+- make py.test.cmdline.main() return the exitstatus instead of raising
+ SystemExit and also allow it to be called multiple times. This of
+ course requires that your application and tests are properly teared
+ down and don't have global state.
+
+Fixes / Maintenance
+++++++++++++++++++++++
+
+- improved traceback presentation:
+ - improved and unified reporting for "--tb=short" option
+ - Errors during test module imports are much shorter, (using --tb=short style)
+ - raises shows shorter more relevant tracebacks
+ - --fulltrace now more systematically makes traces longer / inhibits cutting
+
+- improve support for raises and other dynamically compiled code by
+ manipulating python's linecache.cache instead of the previous
+ rather hacky way of creating custom code objects. This makes
+ it seemlessly work on Jython and PyPy where it previously didn't.
+
+- fix issue96: make capturing more resilient against Control-C
+ interruptions (involved somewhat substantial refactoring
+ to the underlying capturing functionality to avoid race
+ conditions).
+
+- fix chaining of conditional skipif/xfail decorators - so it works now
+ as expected to use multiple @py.test.mark.skipif(condition) decorators,
+ including specific reporting which of the conditions lead to skipping.
+
+- fix issue95: late-import zlib so that it's not required
+ for general py.test startup.
+
+- fix issue94: make reporting more robust against bogus source code
+ (and internally be more careful when presenting unexpected byte sequences)
+
+
+Changes between 1.2.1 and 1.3.0
+==================================================
+
+- deprecate --report option in favour of a new shorter and easier to
+ remember -r option: it takes a string argument consisting of any
+ combination of 'xfsX' characters. They relate to the single chars
+ you see during the dotted progress printing and will print an extra line
+ per test at the end of the test run. This extra line indicates the exact
+ position or test ID that you directly paste to the py.test cmdline in order
+ to re-run a particular test.
+
+- allow external plugins to register new hooks via the new
+ pytest_addhooks(pluginmanager) hook. The new release of
+ the pytest-xdist plugin for distributed and looponfailing
+ testing requires this feature.
+
+- add a new pytest_ignore_collect(path, config) hook to allow projects and
+ plugins to define exclusion behaviour for their directory structure -
+ for example you may define in a conftest.py this method::
+
+ def pytest_ignore_collect(path):
+ return path.check(link=1)
+
+ to prevent even a collection try of any tests in symlinked dirs.
+
+- new pytest_pycollect_makemodule(path, parent) hook for
+ allowing customization of the Module collection object for a
+ matching test module.
+
+- extend and refine xfail mechanism:
+ ``@py.test.mark.xfail(run=False)`` do not run the decorated test
+ ``@py.test.mark.xfail(reason="...")`` prints the reason string in xfail summaries
+ specifiying ``--runxfail`` on command line virtually ignores xfail markers
+
+- expose (previously internal) commonly useful methods:
+ py.io.get_terminal_with() -> return terminal width
+ py.io.ansi_print(...) -> print colored/bold text on linux/win32
+ py.io.saferepr(obj) -> return limited representation string
+
+- expose test outcome related exceptions as py.test.skip.Exception,
+ py.test.raises.Exception etc., useful mostly for plugins
+ doing special outcome interpretation/tweaking
+
+- (issue85) fix junitxml plugin to handle tests with non-ascii output
+
+- fix/refine python3 compatibility (thanks Benjamin Peterson)
+
+- fixes for making the jython/win32 combination work, note however:
+ jython2.5.1/win32 does not provide a command line launcher, see
+ http://bugs.jython.org/issue1491 . See pylib install documentation
+ for how to work around.
+
+- fixes for handling of unicode exception values and unprintable objects
+
+- (issue87) fix unboundlocal error in assertionold code
+
+- (issue86) improve documentation for looponfailing
+
+- refine IO capturing: stdin-redirect pseudo-file now has a NOP close() method
+
+- ship distribute_setup.py version 0.6.10
+
+- added links to the new capturelog and coverage plugins
+
+
+Changes between 1.2.1 and 1.2.0
+=====================================
+
+- refined usage and options for "py.cleanup"::
+
+ py.cleanup # remove "*.pyc" and "*$py.class" (jython) files
+ py.cleanup -e .swp -e .cache # also remove files with these extensions
+ py.cleanup -s # remove "build" and "dist" directory next to setup.py files
+ py.cleanup -d # also remove empty directories
+ py.cleanup -a # synonym for "-s -d -e 'pip-log.txt'"
+ py.cleanup -n # dry run, only show what would be removed
+
+- add a new option "py.test --funcargs" which shows available funcargs
+ and their help strings (docstrings on their respective factory function)
+ for a given test path
+
+- display a short and concise traceback if a funcarg lookup fails
+
+- early-load "conftest.py" files in non-dot first-level sub directories.
+ allows to conveniently keep and access test-related options in a ``test``
+ subdir and still add command line options.
+
+- fix issue67: new super-short traceback-printing option: "--tb=line" will print a single line for each failing (python) test indicating its filename, lineno and the failure value
+
+- fix issue78: always call python-level teardown functions even if the
+ according setup failed. This includes refinements for calling setup_module/class functions
+ which will now only be called once instead of the previous behaviour where they'd be called
+ multiple times if they raise an exception (including a Skipped exception). Any exception
+ will be re-corded and associated with all tests in the according module/class scope.
+
+- fix issue63: assume <40 columns to be a bogus terminal width, default to 80
+
+- fix pdb debugging to be in the correct frame on raises-related errors
+
+- update apipkg.py to fix an issue where recursive imports might
+ unnecessarily break importing
+
+- fix plugin links
+
+Changes between 1.2 and 1.1.1
+=====================================
+
+- moved dist/looponfailing from py.test core into a new
+ separately released pytest-xdist plugin.
+
+- new junitxml plugin: --junitxml=path will generate a junit style xml file
+ which is processable e.g. by the Hudson CI system.
+
+- new option: --genscript=path will generate a standalone py.test script
+ which will not need any libraries installed. thanks to Ralf Schmitt.
+
+- new option: --ignore will prevent specified path from collection.
+ Can be specified multiple times.
+
+- new option: --confcutdir=dir will make py.test only consider conftest
+ files that are relative to the specified dir.
+
+- new funcarg: "pytestconfig" is the pytest config object for access
+ to command line args and can now be easily used in a test.
+
+- install 'py.test' and `py.which` with a ``-$VERSION`` suffix to
+ disambiguate between Python3, python2.X, Jython and PyPy installed versions.
+
+- new "pytestconfig" funcarg allows access to test config object
+
+- new "pytest_report_header" hook can return additional lines
+ to be displayed at the header of a test run.
+
+- (experimental) allow "py.test path::name1::name2::..." for pointing
+ to a test within a test collection directly. This might eventually
+ evolve as a full substitute to "-k" specifications.
+
+- streamlined plugin loading: order is now as documented in
+ customize.html: setuptools, ENV, commandline, conftest.
+ also setuptools entry point names are turned to canonical namees ("pytest_*")
+
+- automatically skip tests that need 'capfd' but have no os.dup
+
+- allow pytest_generate_tests to be defined in classes as well
+
+- deprecate usage of 'disabled' attribute in favour of pytestmark
+- deprecate definition of Directory, Module, Class and Function nodes
+ in conftest.py files. Use pytest collect hooks instead.
+
+- collection/item node specific runtest/collect hooks are only called exactly
+ on matching conftest.py files, i.e. ones which are exactly below
+ the filesystem path of an item
+
+- change: the first pytest_collect_directory hook to return something
+ will now prevent further hooks to be called.
+
+- change: figleaf plugin now requires --figleaf to run. Also
+ change its long command line options to be a bit shorter (see py.test -h).
+
+- change: pytest doctest plugin is now enabled by default and has a
+ new option --doctest-glob to set a pattern for file matches.
+
+- change: remove internal py._* helper vars, only keep py._pydir
+
+- robustify capturing to survive if custom pytest_runtest_setup
+ code failed and prevented the capturing setup code from running.
+
+- make py.test.* helpers provided by default plugins visible early -
+ works transparently both for pydoc and for interactive sessions
+ which will regularly see e.g. py.test.mark and py.test.importorskip.
+
+- simplify internal plugin manager machinery
+- simplify internal collection tree by introducing a RootCollector node
+
+- fix assert reinterpreation that sees a call containing "keyword=..."
+
+- fix issue66: invoke pytest_sessionstart and pytest_sessionfinish
+ hooks on slaves during dist-testing, report module/session teardown
+ hooks correctly.
+
+- fix issue65: properly handle dist-testing if no
+ execnet/py lib installed remotely.
+
+- skip some install-tests if no execnet is available
+
+- fix docs, fix internal bin/ script generation
+
+
+Changes between 1.1.1 and 1.1.0
+=====================================
+
+- introduce automatic plugin registration via 'pytest11'
+ entrypoints via setuptools' pkg_resources.iter_entry_points
+
+- fix py.test dist-testing to work with execnet >= 1.0.0b4
+
+- re-introduce py.test.cmdline.main() for better backward compatibility
+
+- svn paths: fix a bug with path.check(versioned=True) for svn paths,
+ allow '%' in svn paths, make svnwc.update() default to interactive mode
+ like in 1.0.x and add svnwc.update(interactive=False) to inhibit interaction.
+
+- refine distributed tarball to contain test and no pyc files
+
+- try harder to have deprecation warnings for py.compat.* accesses
+ report a correct location
+
+Changes between 1.1.0 and 1.0.2
+=====================================
+
+* adjust and improve docs
+
+* remove py.rest tool and internal namespace - it was
+ never really advertised and can still be used with
+ the old release if needed. If there is interest
+ it could be revived into its own tool i guess.
+
+* fix issue48 and issue59: raise an Error if the module
+ from an imported test file does not seem to come from
+ the filepath - avoids "same-name" confusion that has
+ been reported repeatedly
+
+* merged Ronny's nose-compatibility hacks: now
+ nose-style setup_module() and setup() functions are
+ supported
+
+* introduce generalized py.test.mark function marking
+
+* reshuffle / refine command line grouping
+
+* deprecate parser.addgroup in favour of getgroup which creates option group
+
+* add --report command line option that allows to control showing of skipped/xfailed sections
+
+* generalized skipping: a new way to mark python functions with skipif or xfail
+ at function, class and modules level based on platform or sys-module attributes.
+
+* extend py.test.mark decorator to allow for positional args
+
+* introduce and test "py.cleanup -d" to remove empty directories
+
+* fix issue #59 - robustify unittest test collection
+
+* make bpython/help interaction work by adding an __all__ attribute
+ to ApiModule, cleanup initpkg
+
+* use MIT license for pylib, add some contributors
+
+* remove py.execnet code and substitute all usages with 'execnet' proper
+
+* fix issue50 - cached_setup now caches more to expectations
+ for test functions with multiple arguments.
+
+* merge Jarko's fixes, issue #45 and #46
+
+* add the ability to specify a path for py.lookup to search in
+
+* fix a funcarg cached_setup bug probably only occuring
+ in distributed testing and "module" scope with teardown.
+
+* many fixes and changes for making the code base python3 compatible,
+ many thanks to Benjamin Peterson for helping with this.
+
+* consolidate builtins implementation to be compatible with >=2.3,
+ add helpers to ease keeping 2 and 3k compatible code
+
+* deprecate py.compat.doctest|subprocess|textwrap|optparse
+
+* deprecate py.magic.autopath, remove py/magic directory
+
+* move pytest assertion handling to py/code and a pytest_assertion
+ plugin, add "--no-assert" option, deprecate py.magic namespaces
+ in favour of (less) py.code ones.
+
+* consolidate and cleanup py/code classes and files
+
+* cleanup py/misc, move tests to bin-for-dist
+
+* introduce delattr/delitem/delenv methods to py.test's monkeypatch funcarg
+
+* consolidate py.log implementation, remove old approach.
+
+* introduce py.io.TextIO and py.io.BytesIO for distinguishing between
+ text/unicode and byte-streams (uses underlying standard lib io.*
+ if available)
+
+* make py.unittest_convert helper script available which converts "unittest.py"
+ style files into the simpler assert/direct-test-classes py.test/nosetests
+ style. The script was written by Laura Creighton.
+
+* simplified internal localpath implementation
+
+Changes between 1.0.1 and 1.0.2
+=====================================
+
+* fixing packaging issues, triggered by fedora redhat packaging,
+ also added doc, examples and contrib dirs to the tarball.
+
+* added a documentation link to the new django plugin.
+
+Changes between 1.0.0 and 1.0.1
+=====================================
+
+* added a 'pytest_nose' plugin which handles nose.SkipTest,
+ nose-style function/method/generator setup/teardown and
+ tries to report functions correctly.
+
+* capturing of unicode writes or encoded strings to sys.stdout/err
+ work better, also terminalwriting was adapted and somewhat
+ unified between windows and linux.
+
+* improved documentation layout and content a lot
+
+* added a "--help-config" option to show conftest.py / ENV-var names for
+ all longopt cmdline options, and some special conftest.py variables.
+ renamed 'conf_capture' conftest setting to 'option_capture' accordingly.
+
+* fix issue #27: better reporting on non-collectable items given on commandline
+ (e.g. pyc files)
+
+* fix issue #33: added --version flag (thanks Benjamin Peterson)
+
+* fix issue #32: adding support for "incomplete" paths to wcpath.status()
+
+* "Test" prefixed classes are *not* collected by default anymore if they
+ have an __init__ method
+
+* monkeypatch setenv() now accepts a "prepend" parameter
+
+* improved reporting of collection error tracebacks
+
+* simplified multicall mechanism and plugin architecture,
+ renamed some internal methods and argnames
+
+Changes between 1.0.0b9 and 1.0.0
+=====================================
+
+* more terse reporting try to show filesystem path relatively to current dir
+* improve xfail output a bit
+
+Changes between 1.0.0b8 and 1.0.0b9
+=====================================
+
+* cleanly handle and report final teardown of test setup
+
+* fix svn-1.6 compat issue with py.path.svnwc().versioned()
+ (thanks Wouter Vanden Hove)
+
+* setup/teardown or collection problems now show as ERRORs
+ or with big "E"'s in the progress lines. they are reported
+ and counted separately.
+
+* dist-testing: properly handle test items that get locally
+ collected but cannot be collected on the remote side - often
+ due to platform/dependency reasons
+
+* simplified py.test.mark API - see keyword plugin documentation
+
+* integrate better with logging: capturing now by default captures
+ test functions and their immediate setup/teardown in a single stream
+
+* capsys and capfd funcargs now have a readouterr() and a close() method
+ (underlyingly py.io.StdCapture/FD objects are used which grew a
+ readouterr() method as well to return snapshots of captured out/err)
+
+* make assert-reinterpretation work better with comparisons not
+ returning bools (reported with numpy from thanks maciej fijalkowski)
+
+* reworked per-test output capturing into the pytest_iocapture.py plugin
+ and thus removed capturing code from config object
+
+* item.repr_failure(excinfo) instead of item.repr_failure(excinfo, outerr)
+
+
+Changes between 1.0.0b7 and 1.0.0b8
+=====================================
+
+* pytest_unittest-plugin is now enabled by default
+
+* introduced pytest_keyboardinterrupt hook and
+ refined pytest_sessionfinish hooked, added tests.
+
+* workaround a buggy logging module interaction ("closing already closed
+ files"). Thanks to Sridhar Ratnakumar for triggering.
+
+* if plugins use "py.test.importorskip" for importing
+ a dependency only a warning will be issued instead
+ of exiting the testing process.
+
+* many improvements to docs:
+ - refined funcargs doc , use the term "factory" instead of "provider"
+ - added a new talk/tutorial doc page
+ - better download page
+ - better plugin docstrings
+ - added new plugins page and automatic doc generation script
+
+* fixed teardown problem related to partially failing funcarg setups
+ (thanks MrTopf for reporting), "pytest_runtest_teardown" is now
+ always invoked even if the "pytest_runtest_setup" failed.
+
+* tweaked doctest output for docstrings in py modules,
+ thanks Radomir.
+
+Changes between 1.0.0b3 and 1.0.0b7
+=============================================
+
+* renamed py.test.xfail back to py.test.mark.xfail to avoid
+ two ways to decorate for xfail
+
+* re-added py.test.mark decorator for setting keywords on functions
+ (it was actually documented so removing it was not nice)
+
+* remove scope-argument from request.addfinalizer() because
+ request.cached_setup has the scope arg. TOOWTDI.
+
+* perform setup finalization before reporting failures
+
+* apply modified patches from Andreas Kloeckner to allow
+ test functions to have no func_code (#22) and to make
+ "-k" and function keywords work (#20)
+
+* apply patch from Daniel Peolzleithner (issue #23)
+
+* resolve issue #18, multiprocessing.Manager() and
+ redirection clash
+
+* make __name__ == "__channelexec__" for remote_exec code
+
+Changes between 1.0.0b1 and 1.0.0b3
+=============================================
+
+* plugin classes are removed: one now defines
+ hooks directly in conftest.py or global pytest_*.py
+ files.
+
+* added new pytest_namespace(config) hook that allows
+ to inject helpers directly to the py.test.* namespace.
+
+* documented and refined many hooks
+
+* added new style of generative tests via
+ pytest_generate_tests hook that integrates
+ well with function arguments.
+
+
+Changes between 0.9.2 and 1.0.0b1
+=============================================
+
+* introduced new "funcarg" setup method,
+ see doc/test/funcarg.txt
+
+* introduced plugin architecuture and many
+ new py.test plugins, see
+ doc/test/plugins.txt
+
+* teardown_method is now guaranteed to get
+ called after a test method has run.
+
+* new method: py.test.importorskip(mod,minversion)
+ will either import or call py.test.skip()
+
+* completely revised internal py.test architecture
+
+* new py.process.ForkedFunc object allowing to
+ fork execution of a function to a sub process
+ and getting a result back.
+
+XXX lots of things missing here XXX
+
+Changes between 0.9.1 and 0.9.2
+===============================
+
+* refined installation and metadata, created new setup.py,
+ now based on setuptools/ez_setup (thanks to Ralf Schmitt
+ for his support).
+
+* improved the way of making py.* scripts available in
+ windows environments, they are now added to the
+ Scripts directory as ".cmd" files.
+
+* py.path.svnwc.status() now is more complete and
+ uses xml output from the 'svn' command if available
+ (Guido Wesdorp)
+
+* fix for py.path.svn* to work with svn 1.5
+ (Chris Lamb)
+
+* fix path.relto(otherpath) method on windows to
+ use normcase for checking if a path is relative.
+
+* py.test's traceback is better parseable from editors
+ (follows the filenames:LINENO: MSG convention)
+ (thanks to Osmo Salomaa)
+
+* fix to javascript-generation, "py.test --runbrowser"
+ should work more reliably now
+
+* removed previously accidentally added
+ py.test.broken and py.test.notimplemented helpers.
+
+* there now is a py.__version__ attribute
+
+Changes between 0.9.0 and 0.9.1
+===============================
+
+This is a fairly complete list of changes between 0.9 and 0.9.1, which can
+serve as a reference for developers.
+
+* allowing + signs in py.path.svn urls [39106]
+* fixed support for Failed exceptions without excinfo in py.test [39340]
+* added support for killing processes for Windows (as well as platforms that
+ support os.kill) in py.misc.killproc [39655]
+* added setup/teardown for generative tests to py.test [40702]
+* added detection of FAILED TO LOAD MODULE to py.test [40703, 40738, 40739]
+* fixed problem with calling .remove() on wcpaths of non-versioned files in
+ py.path [44248]
+* fixed some import and inheritance issues in py.test [41480, 44648, 44655]
+* fail to run greenlet tests when pypy is available, but without stackless
+ [45294]
+* small fixes in rsession tests [45295]
+* fixed issue with 2.5 type representations in py.test [45483, 45484]
+* made that internal reporting issues displaying is done atomically in py.test
+ [45518]
+* made that non-existing files are igored by the py.lookup script [45519]
+* improved exception name creation in py.test [45535]
+* made that less threads are used in execnet [merge in 45539]
+* removed lock required for atomical reporting issue displaying in py.test
+ [45545]
+* removed globals from execnet [45541, 45547]
+* refactored cleanup mechanics, made that setDaemon is set to 1 to make atexit
+ get called in 2.5 (py.execnet) [45548]
+* fixed bug in joining threads in py.execnet's servemain [45549]
+* refactored py.test.rsession tests to not rely on exact output format anymore
+ [45646]
+* using repr() on test outcome [45647]
+* added 'Reason' classes for py.test.skip() [45648, 45649]
+* killed some unnecessary sanity check in py.test.collect [45655]
+* avoid using os.tmpfile() in py.io.fdcapture because on Windows it's only
+ usable by Administrators [45901]
+* added support for locking and non-recursive commits to py.path.svnwc [45994]
+* locking files in py.execnet to prevent CPython from segfaulting [46010]
+* added export() method to py.path.svnurl
+* fixed -d -x in py.test [47277]
+* fixed argument concatenation problem in py.path.svnwc [49423]
+* restore py.test behaviour that it exits with code 1 when there are failures
+ [49974]
+* don't fail on html files that don't have an accompanying .txt file [50606]
+* fixed 'utestconvert.py < input' [50645]
+* small fix for code indentation in py.code.source [50755]
+* fix _docgen.py documentation building [51285]
+* improved checks for source representation of code blocks in py.test [51292]
+* added support for passing authentication to py.path.svn* objects [52000,
+ 52001]
+* removed sorted() call for py.apigen tests in favour of [].sort() to support
+ Python 2.3 [52481]
diff --git a/third_party/python/py/HOWTORELEASE.rst b/third_party/python/py/HOWTORELEASE.rst
new file mode 100644
index 0000000000..8d0231639d
--- /dev/null
+++ b/third_party/python/py/HOWTORELEASE.rst
@@ -0,0 +1,17 @@
+Release Procedure
+-----------------
+
+#. Create a branch ``release-X.Y.Z`` from the latest ``master``.
+
+#. Manually update the ``CHANGELOG`` and commit.
+
+#. Open a PR for this branch targeting ``master``.
+
+#. After all tests pass and the PR has been approved by at least another maintainer, publish to PyPI by creating and pushing a tag::
+
+ git tag X.Y.Z
+ git push git@github.com:pytest-dev/py X.Y.Z
+
+ Wait for the deploy to complete, then make sure it is `available on PyPI <https://pypi.org/project/py>`_.
+
+#. Merge your PR to ``master``.
diff --git a/third_party/python/py/LICENSE b/third_party/python/py/LICENSE
new file mode 100644
index 0000000000..31ecdfb1db
--- /dev/null
+++ b/third_party/python/py/LICENSE
@@ -0,0 +1,19 @@
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE.
+
diff --git a/third_party/python/py/MANIFEST.in b/third_party/python/py/MANIFEST.in
new file mode 100644
index 0000000000..239ad2283e
--- /dev/null
+++ b/third_party/python/py/MANIFEST.in
@@ -0,0 +1,10 @@
+include CHANGELOG
+include AUTHORS
+include README.rst
+include setup.py
+include LICENSE
+include conftest.py
+include tox.ini
+graft doc
+graft testing
+global-exclude *.pyc
diff --git a/third_party/python/py/PKG-INFO b/third_party/python/py/PKG-INFO
new file mode 100644
index 0000000000..359dcef421
--- /dev/null
+++ b/third_party/python/py/PKG-INFO
@@ -0,0 +1,67 @@
+Metadata-Version: 1.2
+Name: py
+Version: 1.5.4
+Summary: library with cross-python path, ini-parsing, io, code, log facilities
+Home-page: http://py.readthedocs.io/
+Author: holger krekel, Ronny Pfannschmidt, Benjamin Peterson and others
+Author-email: pytest-dev@python.org
+License: MIT license
+Description: .. image:: https://img.shields.io/pypi/v/py.svg
+ :target: https://pypi.org/project/py
+
+ .. image:: https://anaconda.org/conda-forge/py/badges/version.svg
+ :target: https://anaconda.org/conda-forge/py
+
+ .. image:: https://img.shields.io/pypi/pyversions/pytest.svg
+ :target: https://pypi.org/project/py
+
+ .. image:: https://img.shields.io/travis/pytest-dev/py.svg
+ :target: https://travis-ci.org/pytest-dev/py
+
+ .. image:: https://ci.appveyor.com/api/projects/status/10keglan6uqwj5al/branch/master?svg=true
+ :target: https://ci.appveyor.com/project/pytestbot/py
+
+
+ **NOTE**: this library is in **maintenance mode** and should not be used in new code.
+
+ The py lib is a Python development support library featuring
+ the following tools and modules:
+
+ * ``py.path``: uniform local and svn path objects -> please use pathlib/pathlib2 instead
+ * ``py.apipkg``: explicit API control and lazy-importing -> please use the standalone package instead
+ * ``py.iniconfig``: easy parsing of .ini files -> please use the standalone package instead
+ * ``py.code``: dynamic code generation and introspection (deprecated, moved to ``pytest`` as a implementation detail).
+
+ **NOTE**: prior to the 1.4 release this distribution used to
+ contain py.test which is now its own package, see http://pytest.org
+
+ For questions and more information please visit http://py.readthedocs.org
+
+ Bugs and issues: https://github.com/pytest-dev/py
+
+ Authors: Holger Krekel and others, 2004-2017
+
+Platform: unix
+Platform: linux
+Platform: osx
+Platform: cygwin
+Platform: win32
+Classifier: Development Status :: 6 - Mature
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Topic :: Software Development :: Testing
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Utilities
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
diff --git a/third_party/python/py/README.rst b/third_party/python/py/README.rst
new file mode 100644
index 0000000000..3d9ec0faeb
--- /dev/null
+++ b/third_party/python/py/README.rst
@@ -0,0 +1,34 @@
+.. image:: https://img.shields.io/pypi/v/py.svg
+ :target: https://pypi.org/project/py
+
+.. image:: https://anaconda.org/conda-forge/py/badges/version.svg
+ :target: https://anaconda.org/conda-forge/py
+
+.. image:: https://img.shields.io/pypi/pyversions/pytest.svg
+ :target: https://pypi.org/project/py
+
+.. image:: https://img.shields.io/travis/pytest-dev/py.svg
+ :target: https://travis-ci.org/pytest-dev/py
+
+.. image:: https://ci.appveyor.com/api/projects/status/10keglan6uqwj5al/branch/master?svg=true
+ :target: https://ci.appveyor.com/project/pytestbot/py
+
+
+**NOTE**: this library is in **maintenance mode** and should not be used in new code.
+
+The py lib is a Python development support library featuring
+the following tools and modules:
+
+* ``py.path``: uniform local and svn path objects -> please use pathlib/pathlib2 instead
+* ``py.apipkg``: explicit API control and lazy-importing -> please use the standalone package instead
+* ``py.iniconfig``: easy parsing of .ini files -> please use the standalone package instead
+* ``py.code``: dynamic code generation and introspection (deprecated, moved to ``pytest`` as a implementation detail).
+
+**NOTE**: prior to the 1.4 release this distribution used to
+contain py.test which is now its own package, see http://pytest.org
+
+For questions and more information please visit http://py.readthedocs.org
+
+Bugs and issues: https://github.com/pytest-dev/py
+
+Authors: Holger Krekel and others, 2004-2017
diff --git a/third_party/python/py/appveyor.yml b/third_party/python/py/appveyor.yml
new file mode 100644
index 0000000000..5fbeca9ab6
--- /dev/null
+++ b/third_party/python/py/appveyor.yml
@@ -0,0 +1,26 @@
+environment:
+ matrix:
+ # note: please use "tox --listenvs" to populate the build matrix below
+ - TOXENV: "py27-pytest29"
+ - TOXENV: "py27-pytest30"
+ - TOXENV: "py27-pytest31"
+ - TOXENV: "py34-pytest29"
+ - TOXENV: "py34-pytest30"
+ - TOXENV: "py34-pytest31"
+ - TOXENV: "py35-pytest29"
+ - TOXENV: "py35-pytest30"
+ - TOXENV: "py35-pytest31"
+ - TOXENV: "py36-pytest29"
+ - TOXENV: "py36-pytest30"
+ - TOXENV: "py36-pytest31"
+
+install:
+ - echo Installed Pythons
+ - dir c:\Python*
+
+ - C:\Python36\python -m pip install --upgrade --pre tox
+
+build: false # Not a C# project, build stuff at the test step instead.
+
+test_script:
+ - C:\Python36\python -m tox
diff --git a/third_party/python/py/bench/localpath.py b/third_party/python/py/bench/localpath.py
new file mode 100644
index 0000000000..ad4fbd8e2b
--- /dev/null
+++ b/third_party/python/py/bench/localpath.py
@@ -0,0 +1,75 @@
+
+import py
+import timeit
+
+class Listdir:
+ numiter = 100000
+ numentries = 100
+
+ def setup(self):
+ tmpdir = py.path.local.make_numbered_dir(self.__class__.__name__)
+ for i in range(self.numentries):
+ tmpdir.join(str(i))
+ self.tmpdir = tmpdir
+
+ def run(self):
+ return self.tmpdir.listdir()
+
+class Listdir_arg(Listdir):
+ numiter = 100000
+ numentries = 100
+
+ def run(self):
+ return self.tmpdir.listdir("47")
+
+class Join_onearg(Listdir):
+ def run(self):
+ self.tmpdir.join("17")
+ self.tmpdir.join("18")
+ self.tmpdir.join("19")
+
+class Join_multi(Listdir):
+ def run(self):
+ self.tmpdir.join("a", "b")
+ self.tmpdir.join("a", "b", "c")
+ self.tmpdir.join("a", "b", "c", "d")
+
+class Check(Listdir):
+ def run(self):
+ self.tmpdir.check()
+ self.tmpdir.check()
+ self.tmpdir.check()
+
+class CheckDir(Listdir):
+ def run(self):
+ self.tmpdir.check(dir=1)
+ self.tmpdir.check(dir=1)
+ assert not self.tmpdir.check(dir=0)
+
+class CheckDir2(Listdir):
+ def run(self):
+ self.tmpdir.stat().isdir()
+ self.tmpdir.stat().isdir()
+ assert self.tmpdir.stat().isdir()
+
+class CheckFile(Listdir):
+ def run(self):
+ self.tmpdir.check(file=1)
+ assert not self.tmpdir.check(file=1)
+ assert self.tmpdir.check(file=0)
+
+if __name__ == "__main__":
+ import time
+ for cls in [Listdir, Listdir_arg,
+ Join_onearg, Join_multi,
+ Check, CheckDir, CheckDir2, CheckFile,]:
+
+ inst = cls()
+ inst.setup()
+ now = time.time()
+ for i in xrange(cls.numiter):
+ inst.run()
+ elapsed = time.time() - now
+ print "%s: %d loops took %.2f seconds, per call %.6f" %(
+ cls.__name__,
+ cls.numiter, elapsed, elapsed / cls.numiter)
diff --git a/third_party/python/py/conftest.py b/third_party/python/py/conftest.py
new file mode 100644
index 0000000000..5bff3fe022
--- /dev/null
+++ b/third_party/python/py/conftest.py
@@ -0,0 +1,60 @@
+import py
+import pytest
+import sys
+
+pytest_plugins = 'doctest', 'pytester'
+
+collect_ignore = ['build', 'doc/_build']
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("pylib", "py lib testing options")
+ group.addoption('--runslowtests',
+ action="store_true", dest="runslowtests", default=False,
+ help=("run slow tests"))
+
+@pytest.fixture
+def sshhost(request):
+ val = request.config.getvalue("sshhost")
+ if val:
+ return val
+ py.test.skip("need --sshhost option")
+
+
+# XXX copied from execnet's conftest.py - needs to be merged
+winpymap = {
+ 'python2.7': r'C:\Python27\python.exe',
+}
+
+
+def getexecutable(name, cache={}):
+ try:
+ return cache[name]
+ except KeyError:
+ executable = py.path.local.sysfind(name)
+ if executable:
+ if name == "jython":
+ import subprocess
+ popen = subprocess.Popen(
+ [str(executable), "--version"],
+ universal_newlines=True, stderr=subprocess.PIPE)
+ out, err = popen.communicate()
+ if not err or "2.5" not in err:
+ executable = None
+ cache[name] = executable
+ return executable
+
+
+@pytest.fixture(params=('python2.7', 'pypy-c', 'jython'))
+def anypython(request):
+ name = request.param
+ executable = getexecutable(name)
+ if executable is None:
+ if sys.platform == "win32":
+ executable = winpymap.get(name, None)
+ if executable:
+ executable = py.path.local(executable)
+ if executable.check():
+ return executable
+ py.test.skip("no %s found" % (name,))
+ return executable
diff --git a/third_party/python/py/py/__init__.py b/third_party/python/py/py/__init__.py
new file mode 100644
index 0000000000..b892ce1a2a
--- /dev/null
+++ b/third_party/python/py/py/__init__.py
@@ -0,0 +1,156 @@
+"""
+pylib: rapid testing and development utils
+
+this module uses apipkg.py for lazy-loading sub modules
+and classes. The initpkg-dictionary below specifies
+name->value mappings where value can be another namespace
+dictionary or an import path.
+
+(c) Holger Krekel and others, 2004-2014
+"""
+from py._error import error
+
+try:
+ from py._vendored_packages import apipkg
+ lib_not_mangled_by_packagers = True
+ vendor_prefix = '._vendored_packages.'
+except ImportError:
+ import apipkg
+ lib_not_mangled_by_packagers = False
+ vendor_prefix = ''
+
+try:
+ from ._version import version as __version__
+except ImportError:
+ # broken installation, we don't even try
+ __version__ = "unknown"
+
+
+apipkg.initpkg(__name__, attr={'_apipkg': apipkg, 'error': error}, exportdefs={
+ # access to all standard lib modules
+ 'std': '._std:std',
+
+ '_pydir' : '.__metainfo:pydir',
+ 'version': 'py:__version__', # backward compatibility
+
+ # pytest-2.0 has a flat namespace, we use alias modules
+ # to keep old references compatible
+ 'test' : 'pytest',
+
+ # hook into the top-level standard library
+ 'process' : {
+ '__doc__' : '._process:__doc__',
+ 'cmdexec' : '._process.cmdexec:cmdexec',
+ 'kill' : '._process.killproc:kill',
+ 'ForkedFunc' : '._process.forkedfunc:ForkedFunc',
+ },
+
+ 'apipkg' : {
+ 'initpkg' : vendor_prefix + 'apipkg:initpkg',
+ 'ApiModule' : vendor_prefix + 'apipkg:ApiModule',
+ },
+
+ 'iniconfig' : {
+ 'IniConfig' : vendor_prefix + 'iniconfig:IniConfig',
+ 'ParseError' : vendor_prefix + 'iniconfig:ParseError',
+ },
+
+ 'path' : {
+ '__doc__' : '._path:__doc__',
+ 'svnwc' : '._path.svnwc:SvnWCCommandPath',
+ 'svnurl' : '._path.svnurl:SvnCommandPath',
+ 'local' : '._path.local:LocalPath',
+ 'SvnAuth' : '._path.svnwc:SvnAuth',
+ },
+
+ # python inspection/code-generation API
+ 'code' : {
+ '__doc__' : '._code:__doc__',
+ 'compile' : '._code.source:compile_',
+ 'Source' : '._code.source:Source',
+ 'Code' : '._code.code:Code',
+ 'Frame' : '._code.code:Frame',
+ 'ExceptionInfo' : '._code.code:ExceptionInfo',
+ 'Traceback' : '._code.code:Traceback',
+ 'getfslineno' : '._code.source:getfslineno',
+ 'getrawcode' : '._code.code:getrawcode',
+ 'patch_builtins' : '._code.code:patch_builtins',
+ 'unpatch_builtins' : '._code.code:unpatch_builtins',
+ '_AssertionError' : '._code.assertion:AssertionError',
+ '_reinterpret_old' : '._code.assertion:reinterpret_old',
+ '_reinterpret' : '._code.assertion:reinterpret',
+ '_reprcompare' : '._code.assertion:_reprcompare',
+ '_format_explanation' : '._code.assertion:_format_explanation',
+ },
+
+ # backports and additions of builtins
+ 'builtin' : {
+ '__doc__' : '._builtin:__doc__',
+ 'enumerate' : '._builtin:enumerate',
+ 'reversed' : '._builtin:reversed',
+ 'sorted' : '._builtin:sorted',
+ 'any' : '._builtin:any',
+ 'all' : '._builtin:all',
+ 'set' : '._builtin:set',
+ 'frozenset' : '._builtin:frozenset',
+ 'BaseException' : '._builtin:BaseException',
+ 'GeneratorExit' : '._builtin:GeneratorExit',
+ '_sysex' : '._builtin:_sysex',
+ 'print_' : '._builtin:print_',
+ '_reraise' : '._builtin:_reraise',
+ '_tryimport' : '._builtin:_tryimport',
+ 'exec_' : '._builtin:exec_',
+ '_basestring' : '._builtin:_basestring',
+ '_totext' : '._builtin:_totext',
+ '_isbytes' : '._builtin:_isbytes',
+ '_istext' : '._builtin:_istext',
+ '_getimself' : '._builtin:_getimself',
+ '_getfuncdict' : '._builtin:_getfuncdict',
+ '_getcode' : '._builtin:_getcode',
+ 'builtins' : '._builtin:builtins',
+ 'execfile' : '._builtin:execfile',
+ 'callable' : '._builtin:callable',
+ 'bytes' : '._builtin:bytes',
+ 'text' : '._builtin:text',
+ },
+
+ # input-output helping
+ 'io' : {
+ '__doc__' : '._io:__doc__',
+ 'dupfile' : '._io.capture:dupfile',
+ 'TextIO' : '._io.capture:TextIO',
+ 'BytesIO' : '._io.capture:BytesIO',
+ 'FDCapture' : '._io.capture:FDCapture',
+ 'StdCapture' : '._io.capture:StdCapture',
+ 'StdCaptureFD' : '._io.capture:StdCaptureFD',
+ 'TerminalWriter' : '._io.terminalwriter:TerminalWriter',
+ 'ansi_print' : '._io.terminalwriter:ansi_print',
+ 'get_terminal_width' : '._io.terminalwriter:get_terminal_width',
+ 'saferepr' : '._io.saferepr:saferepr',
+ },
+
+ # small and mean xml/html generation
+ 'xml' : {
+ '__doc__' : '._xmlgen:__doc__',
+ 'html' : '._xmlgen:html',
+ 'Tag' : '._xmlgen:Tag',
+ 'raw' : '._xmlgen:raw',
+ 'Namespace' : '._xmlgen:Namespace',
+ 'escape' : '._xmlgen:escape',
+ },
+
+ 'log' : {
+ # logging API ('producers' and 'consumers' connected via keywords)
+ '__doc__' : '._log:__doc__',
+ '_apiwarn' : '._log.warning:_apiwarn',
+ 'Producer' : '._log.log:Producer',
+ 'setconsumer' : '._log.log:setconsumer',
+ '_setstate' : '._log.log:setstate',
+ '_getstate' : '._log.log:getstate',
+ 'Path' : '._log.log:Path',
+ 'STDOUT' : '._log.log:STDOUT',
+ 'STDERR' : '._log.log:STDERR',
+ 'Syslog' : '._log.log:Syslog',
+ },
+
+})
diff --git a/third_party/python/py/py/__metainfo.py b/third_party/python/py/py/__metainfo.py
new file mode 100644
index 0000000000..12581eb7af
--- /dev/null
+++ b/third_party/python/py/py/__metainfo.py
@@ -0,0 +1,2 @@
+import py
+pydir = py.path.local(py.__file__).dirpath()
diff --git a/third_party/python/py/py/_builtin.py b/third_party/python/py/py/_builtin.py
new file mode 100644
index 0000000000..52ee9d79ca
--- /dev/null
+++ b/third_party/python/py/py/_builtin.py
@@ -0,0 +1,248 @@
+import sys
+
+try:
+ reversed = reversed
+except NameError:
+ def reversed(sequence):
+ """reversed(sequence) -> reverse iterator over values of the sequence
+
+ Return a reverse iterator
+ """
+ if hasattr(sequence, '__reversed__'):
+ return sequence.__reversed__()
+ if not hasattr(sequence, '__getitem__'):
+ raise TypeError("argument to reversed() must be a sequence")
+ return reversed_iterator(sequence)
+
+ class reversed_iterator(object):
+
+ def __init__(self, seq):
+ self.seq = seq
+ self.remaining = len(seq)
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ i = self.remaining
+ if i > 0:
+ i -= 1
+ item = self.seq[i]
+ self.remaining = i
+ return item
+ raise StopIteration
+
+ def __length_hint__(self):
+ return self.remaining
+
+try:
+ any = any
+except NameError:
+ def any(iterable):
+ for x in iterable:
+ if x:
+ return True
+ return False
+
+try:
+ all = all
+except NameError:
+ def all(iterable):
+ for x in iterable:
+ if not x:
+ return False
+ return True
+
+try:
+ sorted = sorted
+except NameError:
+ builtin_cmp = cmp # need to use cmp as keyword arg
+
+ def sorted(iterable, cmp=None, key=None, reverse=0):
+ use_cmp = None
+ if key is not None:
+ if cmp is None:
+ def use_cmp(x, y):
+ return builtin_cmp(x[0], y[0])
+ else:
+ def use_cmp(x, y):
+ return cmp(x[0], y[0])
+ l = [(key(element), element) for element in iterable]
+ else:
+ if cmp is not None:
+ use_cmp = cmp
+ l = list(iterable)
+ if use_cmp is not None:
+ l.sort(use_cmp)
+ else:
+ l.sort()
+ if reverse:
+ l.reverse()
+ if key is not None:
+ return [element for (_, element) in l]
+ return l
+
+try:
+ set, frozenset = set, frozenset
+except NameError:
+ from sets import set, frozenset
+
+# pass through
+enumerate = enumerate
+
+try:
+ BaseException = BaseException
+except NameError:
+ BaseException = Exception
+
+try:
+ GeneratorExit = GeneratorExit
+except NameError:
+ class GeneratorExit(Exception):
+ """ This exception is never raised, it is there to make it possible to
+ write code compatible with CPython 2.5 even in lower CPython
+ versions."""
+ pass
+ GeneratorExit.__module__ = 'exceptions'
+
+_sysex = (KeyboardInterrupt, SystemExit, MemoryError, GeneratorExit)
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return hasattr(obj, "__call__")
+
+if sys.version_info >= (3, 0):
+ exec ("print_ = print ; exec_=exec")
+ import builtins
+
+ # some backward compatibility helpers
+ _basestring = str
+ def _totext(obj, encoding=None, errors=None):
+ if isinstance(obj, bytes):
+ if errors is None:
+ obj = obj.decode(encoding)
+ else:
+ obj = obj.decode(encoding, errors)
+ elif not isinstance(obj, str):
+ obj = str(obj)
+ return obj
+
+ def _isbytes(x):
+ return isinstance(x, bytes)
+ def _istext(x):
+ return isinstance(x, str)
+
+ text = str
+ bytes = bytes
+
+
+ def _getimself(function):
+ return getattr(function, '__self__', None)
+
+ def _getfuncdict(function):
+ return getattr(function, "__dict__", None)
+
+ def _getcode(function):
+ return getattr(function, "__code__", None)
+
+ def execfile(fn, globs=None, locs=None):
+ if globs is None:
+ back = sys._getframe(1)
+ globs = back.f_globals
+ locs = back.f_locals
+ del back
+ elif locs is None:
+ locs = globs
+ fp = open(fn, "r")
+ try:
+ source = fp.read()
+ finally:
+ fp.close()
+ co = compile(source, fn, "exec", dont_inherit=True)
+ exec_(co, globs, locs)
+
+else:
+ import __builtin__ as builtins
+ _totext = unicode
+ _basestring = basestring
+ text = unicode
+ bytes = str
+ execfile = execfile
+ callable = callable
+ def _isbytes(x):
+ return isinstance(x, str)
+ def _istext(x):
+ return isinstance(x, unicode)
+
+ def _getimself(function):
+ return getattr(function, 'im_self', None)
+
+ def _getfuncdict(function):
+ return getattr(function, "__dict__", None)
+
+ def _getcode(function):
+ try:
+ return getattr(function, "__code__")
+ except AttributeError:
+ return getattr(function, "func_code", None)
+
+ def print_(*args, **kwargs):
+ """ minimal backport of py3k print statement. """
+ sep = ' '
+ if 'sep' in kwargs:
+ sep = kwargs.pop('sep')
+ end = '\n'
+ if 'end' in kwargs:
+ end = kwargs.pop('end')
+ file = 'file' in kwargs and kwargs.pop('file') or sys.stdout
+ if kwargs:
+ args = ", ".join([str(x) for x in kwargs])
+ raise TypeError("invalid keyword arguments: %s" % args)
+ at_start = True
+ for x in args:
+ if not at_start:
+ file.write(sep)
+ file.write(str(x))
+ at_start = False
+ file.write(end)
+
+ def exec_(obj, globals=None, locals=None):
+ """ minimal backport of py3k exec statement. """
+ __tracebackhide__ = True
+ if globals is None:
+ frame = sys._getframe(1)
+ globals = frame.f_globals
+ if locals is None:
+ locals = frame.f_locals
+ elif locals is None:
+ locals = globals
+ exec2(obj, globals, locals)
+
+if sys.version_info >= (3, 0):
+ def _reraise(cls, val, tb):
+ __tracebackhide__ = True
+ assert hasattr(val, '__traceback__')
+ raise cls.with_traceback(val, tb)
+else:
+ exec ("""
+def _reraise(cls, val, tb):
+ __tracebackhide__ = True
+ raise cls, val, tb
+def exec2(obj, globals, locals):
+ __tracebackhide__ = True
+ exec obj in globals, locals
+""")
+
+def _tryimport(*names):
+ """ return the first successfully imported module. """
+ assert names
+ for name in names:
+ try:
+ __import__(name)
+ except ImportError:
+ excinfo = sys.exc_info()
+ else:
+ return sys.modules[name]
+ _reraise(*excinfo)
diff --git a/third_party/python/py/py/_code/__init__.py b/third_party/python/py/py/_code/__init__.py
new file mode 100644
index 0000000000..f15acf8513
--- /dev/null
+++ b/third_party/python/py/py/_code/__init__.py
@@ -0,0 +1 @@
+""" python inspection/code generation API """
diff --git a/third_party/python/py/py/_code/_assertionnew.py b/third_party/python/py/py/_code/_assertionnew.py
new file mode 100644
index 0000000000..d03f29d870
--- /dev/null
+++ b/third_party/python/py/py/_code/_assertionnew.py
@@ -0,0 +1,322 @@
+"""
+Find intermediate evalutation results in assert statements through builtin AST.
+This should replace _assertionold.py eventually.
+"""
+
+import sys
+import ast
+
+import py
+from py._code.assertion import _format_explanation, BuiltinAssertionError
+
+
+def _is_ast_expr(node):
+ return isinstance(node, ast.expr)
+def _is_ast_stmt(node):
+ return isinstance(node, ast.stmt)
+
+
+class Failure(Exception):
+ """Error found while interpreting AST."""
+
+ def __init__(self, explanation=""):
+ self.cause = sys.exc_info()
+ self.explanation = explanation
+
+
+def interpret(source, frame, should_fail=False):
+ mod = ast.parse(source)
+ visitor = DebugInterpreter(frame)
+ try:
+ visitor.visit(mod)
+ except Failure:
+ failure = sys.exc_info()[1]
+ return getfailure(failure)
+ if should_fail:
+ return ("(assertion failed, but when it was re-run for "
+ "printing intermediate values, it did not fail. Suggestions: "
+ "compute assert expression before the assert or use --no-assert)")
+
+def run(offending_line, frame=None):
+ if frame is None:
+ frame = py.code.Frame(sys._getframe(1))
+ return interpret(offending_line, frame)
+
+def getfailure(failure):
+ explanation = _format_explanation(failure.explanation)
+ value = failure.cause[1]
+ if str(value):
+ lines = explanation.splitlines()
+ if not lines:
+ lines.append("")
+ lines[0] += " << %s" % (value,)
+ explanation = "\n".join(lines)
+ text = "%s: %s" % (failure.cause[0].__name__, explanation)
+ if text.startswith("AssertionError: assert "):
+ text = text[16:]
+ return text
+
+
+operator_map = {
+ ast.BitOr : "|",
+ ast.BitXor : "^",
+ ast.BitAnd : "&",
+ ast.LShift : "<<",
+ ast.RShift : ">>",
+ ast.Add : "+",
+ ast.Sub : "-",
+ ast.Mult : "*",
+ ast.Div : "/",
+ ast.FloorDiv : "//",
+ ast.Mod : "%",
+ ast.Eq : "==",
+ ast.NotEq : "!=",
+ ast.Lt : "<",
+ ast.LtE : "<=",
+ ast.Gt : ">",
+ ast.GtE : ">=",
+ ast.Pow : "**",
+ ast.Is : "is",
+ ast.IsNot : "is not",
+ ast.In : "in",
+ ast.NotIn : "not in"
+}
+
+unary_map = {
+ ast.Not : "not %s",
+ ast.Invert : "~%s",
+ ast.USub : "-%s",
+ ast.UAdd : "+%s"
+}
+
+
+class DebugInterpreter(ast.NodeVisitor):
+ """Interpret AST nodes to gleam useful debugging information. """
+
+ def __init__(self, frame):
+ self.frame = frame
+
+ def generic_visit(self, node):
+ # Fallback when we don't have a special implementation.
+ if _is_ast_expr(node):
+ mod = ast.Expression(node)
+ co = self._compile(mod)
+ try:
+ result = self.frame.eval(co)
+ except Exception:
+ raise Failure()
+ explanation = self.frame.repr(result)
+ return explanation, result
+ elif _is_ast_stmt(node):
+ mod = ast.Module([node])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co)
+ except Exception:
+ raise Failure()
+ return None, None
+ else:
+ raise AssertionError("can't handle %s" %(node,))
+
+ def _compile(self, source, mode="eval"):
+ return compile(source, "<assertion interpretation>", mode)
+
+ def visit_Expr(self, expr):
+ return self.visit(expr.value)
+
+ def visit_Module(self, mod):
+ for stmt in mod.body:
+ self.visit(stmt)
+
+ def visit_Name(self, name):
+ explanation, result = self.generic_visit(name)
+ # See if the name is local.
+ source = "%r in locals() is not globals()" % (name.id,)
+ co = self._compile(source)
+ try:
+ local = self.frame.eval(co)
+ except Exception:
+ # have to assume it isn't
+ local = False
+ if not local:
+ return name.id, result
+ return explanation, result
+
+ def visit_Compare(self, comp):
+ left = comp.left
+ left_explanation, left_result = self.visit(left)
+ for op, next_op in zip(comp.ops, comp.comparators):
+ next_explanation, next_result = self.visit(next_op)
+ op_symbol = operator_map[op.__class__]
+ explanation = "%s %s %s" % (left_explanation, op_symbol,
+ next_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=next_result)
+ except Exception:
+ raise Failure(explanation)
+ try:
+ if not result:
+ break
+ except KeyboardInterrupt:
+ raise
+ except:
+ break
+ left_explanation, left_result = next_explanation, next_result
+
+ rcomp = py.code._reprcompare
+ if rcomp:
+ res = rcomp(op_symbol, left_result, next_result)
+ if res:
+ explanation = res
+ return explanation, result
+
+ def visit_BoolOp(self, boolop):
+ is_or = isinstance(boolop.op, ast.Or)
+ explanations = []
+ for operand in boolop.values:
+ explanation, result = self.visit(operand)
+ explanations.append(explanation)
+ if result == is_or:
+ break
+ name = is_or and " or " or " and "
+ explanation = "(" + name.join(explanations) + ")"
+ return explanation, result
+
+ def visit_UnaryOp(self, unary):
+ pattern = unary_map[unary.op.__class__]
+ operand_explanation, operand_result = self.visit(unary.operand)
+ explanation = pattern % (operand_explanation,)
+ co = self._compile(pattern % ("__exprinfo_expr",))
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=operand_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_BinOp(self, binop):
+ left_explanation, left_result = self.visit(binop.left)
+ right_explanation, right_result = self.visit(binop.right)
+ symbol = operator_map[binop.op.__class__]
+ explanation = "(%s %s %s)" % (left_explanation, symbol,
+ right_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=right_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_Call(self, call):
+ func_explanation, func = self.visit(call.func)
+ arg_explanations = []
+ ns = {"__exprinfo_func" : func}
+ arguments = []
+ for arg in call.args:
+ arg_explanation, arg_result = self.visit(arg)
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ ns[arg_name] = arg_result
+ arguments.append(arg_name)
+ arg_explanations.append(arg_explanation)
+ for keyword in call.keywords:
+ arg_explanation, arg_result = self.visit(keyword.value)
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ ns[arg_name] = arg_result
+ keyword_source = "%s=%%s" % (keyword.arg)
+ arguments.append(keyword_source % (arg_name,))
+ arg_explanations.append(keyword_source % (arg_explanation,))
+ if call.starargs:
+ arg_explanation, arg_result = self.visit(call.starargs)
+ arg_name = "__exprinfo_star"
+ ns[arg_name] = arg_result
+ arguments.append("*%s" % (arg_name,))
+ arg_explanations.append("*%s" % (arg_explanation,))
+ if call.kwargs:
+ arg_explanation, arg_result = self.visit(call.kwargs)
+ arg_name = "__exprinfo_kwds"
+ ns[arg_name] = arg_result
+ arguments.append("**%s" % (arg_name,))
+ arg_explanations.append("**%s" % (arg_explanation,))
+ args_explained = ", ".join(arg_explanations)
+ explanation = "%s(%s)" % (func_explanation, args_explained)
+ args = ", ".join(arguments)
+ source = "__exprinfo_func(%s)" % (args,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, **ns)
+ except Exception:
+ raise Failure(explanation)
+ pattern = "%s\n{%s = %s\n}"
+ rep = self.frame.repr(result)
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def _is_builtin_name(self, name):
+ pattern = "%r not in globals() and %r not in locals()"
+ source = pattern % (name.id, name.id)
+ co = self._compile(source)
+ try:
+ return self.frame.eval(co)
+ except Exception:
+ return False
+
+ def visit_Attribute(self, attr):
+ if not isinstance(attr.ctx, ast.Load):
+ return self.generic_visit(attr)
+ source_explanation, source_result = self.visit(attr.value)
+ explanation = "%s.%s" % (source_explanation, attr.attr)
+ source = "__exprinfo_expr.%s" % (attr.attr,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ raise Failure(explanation)
+ explanation = "%s\n{%s = %s.%s\n}" % (self.frame.repr(result),
+ self.frame.repr(result),
+ source_explanation, attr.attr)
+ # Check if the attr is from an instance.
+ source = "%r in getattr(__exprinfo_expr, '__dict__', {})"
+ source = source % (attr.attr,)
+ co = self._compile(source)
+ try:
+ from_instance = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ from_instance = True
+ if from_instance:
+ rep = self.frame.repr(result)
+ pattern = "%s\n{%s = %s\n}"
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def visit_Assert(self, assrt):
+ test_explanation, test_result = self.visit(assrt.test)
+ if test_explanation.startswith("False\n{False =") and \
+ test_explanation.endswith("\n"):
+ test_explanation = test_explanation[15:-2]
+ explanation = "assert %s" % (test_explanation,)
+ if not test_result:
+ try:
+ raise BuiltinAssertionError
+ except Exception:
+ raise Failure(explanation)
+ return explanation, test_result
+
+ def visit_Assign(self, assign):
+ value_explanation, value_result = self.visit(assign.value)
+ explanation = "... = %s" % (value_explanation,)
+ name = ast.Name("__exprinfo_expr", ast.Load(),
+ lineno=assign.value.lineno,
+ col_offset=assign.value.col_offset)
+ new_assign = ast.Assign(assign.targets, name, lineno=assign.lineno,
+ col_offset=assign.col_offset)
+ mod = ast.Module([new_assign])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co, __exprinfo_expr=value_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, value_result
diff --git a/third_party/python/py/py/_code/_assertionold.py b/third_party/python/py/py/_code/_assertionold.py
new file mode 100644
index 0000000000..1bb70a875d
--- /dev/null
+++ b/third_party/python/py/py/_code/_assertionold.py
@@ -0,0 +1,556 @@
+import py
+import sys, inspect
+from compiler import parse, ast, pycodegen
+from py._code.assertion import BuiltinAssertionError, _format_explanation
+import types
+
+passthroughex = py.builtin._sysex
+
+class Failure:
+ def __init__(self, node):
+ self.exc, self.value, self.tb = sys.exc_info()
+ self.node = node
+
+class View(object):
+ """View base class.
+
+ If C is a subclass of View, then C(x) creates a proxy object around
+ the object x. The actual class of the proxy is not C in general,
+ but a *subclass* of C determined by the rules below. To avoid confusion
+ we call view class the class of the proxy (a subclass of C, so of View)
+ and object class the class of x.
+
+ Attributes and methods not found in the proxy are automatically read on x.
+ Other operations like setting attributes are performed on the proxy, as
+ determined by its view class. The object x is available from the proxy
+ as its __obj__ attribute.
+
+ The view class selection is determined by the __view__ tuples and the
+ optional __viewkey__ method. By default, the selected view class is the
+ most specific subclass of C whose __view__ mentions the class of x.
+ If no such subclass is found, the search proceeds with the parent
+ object classes. For example, C(True) will first look for a subclass
+ of C with __view__ = (..., bool, ...) and only if it doesn't find any
+ look for one with __view__ = (..., int, ...), and then ..., object,...
+ If everything fails the class C itself is considered to be the default.
+
+ Alternatively, the view class selection can be driven by another aspect
+ of the object x, instead of the class of x, by overriding __viewkey__.
+ See last example at the end of this module.
+ """
+
+ _viewcache = {}
+ __view__ = ()
+
+ def __new__(rootclass, obj, *args, **kwds):
+ self = object.__new__(rootclass)
+ self.__obj__ = obj
+ self.__rootclass__ = rootclass
+ key = self.__viewkey__()
+ try:
+ self.__class__ = self._viewcache[key]
+ except KeyError:
+ self.__class__ = self._selectsubclass(key)
+ return self
+
+ def __getattr__(self, attr):
+ # attributes not found in the normal hierarchy rooted on View
+ # are looked up in the object's real class
+ return getattr(self.__obj__, attr)
+
+ def __viewkey__(self):
+ return self.__obj__.__class__
+
+ def __matchkey__(self, key, subclasses):
+ if inspect.isclass(key):
+ keys = inspect.getmro(key)
+ else:
+ keys = [key]
+ for key in keys:
+ result = [C for C in subclasses if key in C.__view__]
+ if result:
+ return result
+ return []
+
+ def _selectsubclass(self, key):
+ subclasses = list(enumsubclasses(self.__rootclass__))
+ for C in subclasses:
+ if not isinstance(C.__view__, tuple):
+ C.__view__ = (C.__view__,)
+ choices = self.__matchkey__(key, subclasses)
+ if not choices:
+ return self.__rootclass__
+ elif len(choices) == 1:
+ return choices[0]
+ else:
+ # combine the multiple choices
+ return type('?', tuple(choices), {})
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__)
+
+
+def enumsubclasses(cls):
+ for subcls in cls.__subclasses__():
+ for subsubclass in enumsubclasses(subcls):
+ yield subsubclass
+ yield cls
+
+
+class Interpretable(View):
+ """A parse tree node with a few extra methods."""
+ explanation = None
+
+ def is_builtin(self, frame):
+ return False
+
+ def eval(self, frame):
+ # fall-back for unknown expression nodes
+ try:
+ expr = ast.Expression(self.__obj__)
+ expr.filename = '<eval>'
+ self.__obj__.filename = '<eval>'
+ co = pycodegen.ExpressionCodeGenerator(expr).getCode()
+ result = frame.eval(co)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ self.result = result
+ self.explanation = self.explanation or frame.repr(self.result)
+
+ def run(self, frame):
+ # fall-back for unknown statement nodes
+ try:
+ expr = ast.Module(None, ast.Stmt([self.__obj__]))
+ expr.filename = '<run>'
+ co = pycodegen.ModuleCodeGenerator(expr).getCode()
+ frame.exec_(co)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ def nice_explanation(self):
+ return _format_explanation(self.explanation)
+
+
+class Name(Interpretable):
+ __view__ = ast.Name
+
+ def is_local(self, frame):
+ source = '%r in locals() is not globals()' % self.name
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def is_global(self, frame):
+ source = '%r in globals()' % self.name
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def is_builtin(self, frame):
+ source = '%r not in locals() and %r not in globals()' % (
+ self.name, self.name)
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def eval(self, frame):
+ super(Name, self).eval(frame)
+ if not self.is_local(frame):
+ self.explanation = self.name
+
+class Compare(Interpretable):
+ __view__ = ast.Compare
+
+ def eval(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ for operation, expr2 in self.ops:
+ if hasattr(self, 'result'):
+ # shortcutting in chained expressions
+ if not frame.is_true(self.result):
+ break
+ expr2 = Interpretable(expr2)
+ expr2.eval(frame)
+ self.explanation = "%s %s %s" % (
+ expr.explanation, operation, expr2.explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % operation
+ try:
+ self.result = frame.eval(source,
+ __exprinfo_left=expr.result,
+ __exprinfo_right=expr2.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ expr = expr2
+
+class And(Interpretable):
+ __view__ = ast.And
+
+ def eval(self, frame):
+ explanations = []
+ for expr in self.nodes:
+ expr = Interpretable(expr)
+ expr.eval(frame)
+ explanations.append(expr.explanation)
+ self.result = expr.result
+ if not frame.is_true(expr.result):
+ break
+ self.explanation = '(' + ' and '.join(explanations) + ')'
+
+class Or(Interpretable):
+ __view__ = ast.Or
+
+ def eval(self, frame):
+ explanations = []
+ for expr in self.nodes:
+ expr = Interpretable(expr)
+ expr.eval(frame)
+ explanations.append(expr.explanation)
+ self.result = expr.result
+ if frame.is_true(expr.result):
+ break
+ self.explanation = '(' + ' or '.join(explanations) + ')'
+
+
+# == Unary operations ==
+keepalive = []
+for astclass, astpattern in {
+ ast.Not : 'not __exprinfo_expr',
+ ast.Invert : '(~__exprinfo_expr)',
+ }.items():
+
+ class UnaryArith(Interpretable):
+ __view__ = astclass
+
+ def eval(self, frame, astpattern=astpattern):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.explanation = astpattern.replace('__exprinfo_expr',
+ expr.explanation)
+ try:
+ self.result = frame.eval(astpattern,
+ __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ keepalive.append(UnaryArith)
+
+# == Binary operations ==
+for astclass, astpattern in {
+ ast.Add : '(__exprinfo_left + __exprinfo_right)',
+ ast.Sub : '(__exprinfo_left - __exprinfo_right)',
+ ast.Mul : '(__exprinfo_left * __exprinfo_right)',
+ ast.Div : '(__exprinfo_left / __exprinfo_right)',
+ ast.Mod : '(__exprinfo_left % __exprinfo_right)',
+ ast.Power : '(__exprinfo_left ** __exprinfo_right)',
+ }.items():
+
+ class BinaryArith(Interpretable):
+ __view__ = astclass
+
+ def eval(self, frame, astpattern=astpattern):
+ left = Interpretable(self.left)
+ left.eval(frame)
+ right = Interpretable(self.right)
+ right.eval(frame)
+ self.explanation = (astpattern
+ .replace('__exprinfo_left', left .explanation)
+ .replace('__exprinfo_right', right.explanation))
+ try:
+ self.result = frame.eval(astpattern,
+ __exprinfo_left=left.result,
+ __exprinfo_right=right.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ keepalive.append(BinaryArith)
+
+
+class CallFunc(Interpretable):
+ __view__ = ast.CallFunc
+
+ def is_bool(self, frame):
+ source = 'isinstance(__exprinfo_value, bool)'
+ try:
+ return frame.is_true(frame.eval(source,
+ __exprinfo_value=self.result))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def eval(self, frame):
+ node = Interpretable(self.node)
+ node.eval(frame)
+ explanations = []
+ vars = {'__exprinfo_fn': node.result}
+ source = '__exprinfo_fn('
+ for a in self.args:
+ if isinstance(a, ast.Keyword):
+ keyword = a.name
+ a = a.expr
+ else:
+ keyword = None
+ a = Interpretable(a)
+ a.eval(frame)
+ argname = '__exprinfo_%d' % len(vars)
+ vars[argname] = a.result
+ if keyword is None:
+ source += argname + ','
+ explanations.append(a.explanation)
+ else:
+ source += '%s=%s,' % (keyword, argname)
+ explanations.append('%s=%s' % (keyword, a.explanation))
+ if self.star_args:
+ star_args = Interpretable(self.star_args)
+ star_args.eval(frame)
+ argname = '__exprinfo_star'
+ vars[argname] = star_args.result
+ source += '*' + argname + ','
+ explanations.append('*' + star_args.explanation)
+ if self.dstar_args:
+ dstar_args = Interpretable(self.dstar_args)
+ dstar_args.eval(frame)
+ argname = '__exprinfo_kwds'
+ vars[argname] = dstar_args.result
+ source += '**' + argname + ','
+ explanations.append('**' + dstar_args.explanation)
+ self.explanation = "%s(%s)" % (
+ node.explanation, ', '.join(explanations))
+ if source.endswith(','):
+ source = source[:-1]
+ source += ')'
+ try:
+ self.result = frame.eval(source, **vars)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ if not node.is_builtin(frame) or not self.is_bool(frame):
+ r = frame.repr(self.result)
+ self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+class Getattr(Interpretable):
+ __view__ = ast.Getattr
+
+ def eval(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ source = '__exprinfo_expr.%s' % self.attrname
+ try:
+ self.result = frame.eval(source, __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ self.explanation = '%s.%s' % (expr.explanation, self.attrname)
+ # if the attribute comes from the instance, its value is interesting
+ source = ('hasattr(__exprinfo_expr, "__dict__") and '
+ '%r in __exprinfo_expr.__dict__' % self.attrname)
+ try:
+ from_instance = frame.is_true(
+ frame.eval(source, __exprinfo_expr=expr.result))
+ except passthroughex:
+ raise
+ except:
+ from_instance = True
+ if from_instance:
+ r = frame.repr(self.result)
+ self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+# == Re-interpretation of full statements ==
+
+class Assert(Interpretable):
+ __view__ = ast.Assert
+
+ def run(self, frame):
+ test = Interpretable(self.test)
+ test.eval(frame)
+ # simplify 'assert False where False = ...'
+ if (test.explanation.startswith('False\n{False = ') and
+ test.explanation.endswith('\n}')):
+ test.explanation = test.explanation[15:-2]
+ # print the result as 'assert <explanation>'
+ self.result = test.result
+ self.explanation = 'assert ' + test.explanation
+ if not frame.is_true(test.result):
+ try:
+ raise BuiltinAssertionError
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+class Assign(Interpretable):
+ __view__ = ast.Assign
+
+ def run(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.result = expr.result
+ self.explanation = '... = ' + expr.explanation
+ # fall-back-run the rest of the assignment
+ ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr'))
+ mod = ast.Module(None, ast.Stmt([ass]))
+ mod.filename = '<run>'
+ co = pycodegen.ModuleCodeGenerator(mod).getCode()
+ try:
+ frame.exec_(co, __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+class Discard(Interpretable):
+ __view__ = ast.Discard
+
+ def run(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.result = expr.result
+ self.explanation = expr.explanation
+
+class Stmt(Interpretable):
+ __view__ = ast.Stmt
+
+ def run(self, frame):
+ for stmt in self.nodes:
+ stmt = Interpretable(stmt)
+ stmt.run(frame)
+
+
+def report_failure(e):
+ explanation = e.node.nice_explanation()
+ if explanation:
+ explanation = ", in: " + explanation
+ else:
+ explanation = ""
+ sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation))
+
+def check(s, frame=None):
+ if frame is None:
+ frame = sys._getframe(1)
+ frame = py.code.Frame(frame)
+ expr = parse(s, 'eval')
+ assert isinstance(expr, ast.Expression)
+ node = Interpretable(expr.node)
+ try:
+ node.eval(frame)
+ except passthroughex:
+ raise
+ except Failure:
+ e = sys.exc_info()[1]
+ report_failure(e)
+ else:
+ if not frame.is_true(node.result):
+ sys.stderr.write("assertion failed: %s\n" % node.nice_explanation())
+
+
+###########################################################
+# API / Entry points
+# #########################################################
+
+def interpret(source, frame, should_fail=False):
+ module = Interpretable(parse(source, 'exec').node)
+ #print "got module", module
+ if isinstance(frame, types.FrameType):
+ frame = py.code.Frame(frame)
+ try:
+ module.run(frame)
+ except Failure:
+ e = sys.exc_info()[1]
+ return getfailure(e)
+ except passthroughex:
+ raise
+ except:
+ import traceback
+ traceback.print_exc()
+ if should_fail:
+ return ("(assertion failed, but when it was re-run for "
+ "printing intermediate values, it did not fail. Suggestions: "
+ "compute assert expression before the assert or use --nomagic)")
+ else:
+ return None
+
+def getmsg(excinfo):
+ if isinstance(excinfo, tuple):
+ excinfo = py.code.ExceptionInfo(excinfo)
+ #frame, line = gettbline(tb)
+ #frame = py.code.Frame(frame)
+ #return interpret(line, frame)
+
+ tb = excinfo.traceback[-1]
+ source = str(tb.statement).strip()
+ x = interpret(source, tb.frame, should_fail=True)
+ if not isinstance(x, str):
+ raise TypeError("interpret returned non-string %r" % (x,))
+ return x
+
+def getfailure(e):
+ explanation = e.node.nice_explanation()
+ if str(e.value):
+ lines = explanation.split('\n')
+ lines[0] += " << %s" % (e.value,)
+ explanation = '\n'.join(lines)
+ text = "%s: %s" % (e.exc.__name__, explanation)
+ if text.startswith('AssertionError: assert '):
+ text = text[16:]
+ return text
+
+def run(s, frame=None):
+ if frame is None:
+ frame = sys._getframe(1)
+ frame = py.code.Frame(frame)
+ module = Interpretable(parse(s, 'exec').node)
+ try:
+ module.run(frame)
+ except Failure:
+ e = sys.exc_info()[1]
+ report_failure(e)
+
+
+if __name__ == '__main__':
+ # example:
+ def f():
+ return 5
+ def g():
+ return 3
+ def h(x):
+ return 'never'
+ check("f() * g() == 5")
+ check("not f()")
+ check("not (f() and g() or 0)")
+ check("f() == g()")
+ i = 4
+ check("i == f()")
+ check("len(f()) == 0")
+ check("isinstance(2+3+4, float)")
+
+ run("x = i")
+ check("x == 5")
+
+ run("assert not f(), 'oops'")
+ run("a, b, c = 1, 2")
+ run("a, b, c = f()")
+
+ check("max([f(),g()]) == 4")
+ check("'hello'[g()] == 'h'")
+ run("'guk%d' % h(f())")
diff --git a/third_party/python/py/py/_code/_py2traceback.py b/third_party/python/py/py/_code/_py2traceback.py
new file mode 100644
index 0000000000..d65e27cb73
--- /dev/null
+++ b/third_party/python/py/py/_code/_py2traceback.py
@@ -0,0 +1,79 @@
+# copied from python-2.7.3's traceback.py
+# CHANGES:
+# - some_str is replaced, trying to create unicode strings
+#
+import types
+
+def format_exception_only(etype, value):
+ """Format the exception part of a traceback.
+
+ The arguments are the exception type and value such as given by
+ sys.last_type and sys.last_value. The return value is a list of
+ strings, each ending in a newline.
+
+ Normally, the list contains a single string; however, for
+ SyntaxError exceptions, it contains several lines that (when
+ printed) display detailed information about where the syntax
+ error occurred.
+
+ The message indicating which exception occurred is always the last
+ string in the list.
+
+ """
+
+ # An instance should not have a meaningful value parameter, but
+ # sometimes does, particularly for string exceptions, such as
+ # >>> raise string1, string2 # deprecated
+ #
+ # Clear these out first because issubtype(string1, SyntaxError)
+ # would throw another exception and mask the original problem.
+ if (isinstance(etype, BaseException) or
+ isinstance(etype, types.InstanceType) or
+ etype is None or type(etype) is str):
+ return [_format_final_exc_line(etype, value)]
+
+ stype = etype.__name__
+
+ if not issubclass(etype, SyntaxError):
+ return [_format_final_exc_line(stype, value)]
+
+ # It was a syntax error; show exactly where the problem was found.
+ lines = []
+ try:
+ msg, (filename, lineno, offset, badline) = value.args
+ except Exception:
+ pass
+ else:
+ filename = filename or "<string>"
+ lines.append(' File "%s", line %d\n' % (filename, lineno))
+ if badline is not None:
+ lines.append(' %s\n' % badline.strip())
+ if offset is not None:
+ caretspace = badline.rstrip('\n')[:offset].lstrip()
+ # non-space whitespace (likes tabs) must be kept for alignment
+ caretspace = ((c.isspace() and c or ' ') for c in caretspace)
+ # only three spaces to account for offset1 == pos 0
+ lines.append(' %s^\n' % ''.join(caretspace))
+ value = msg
+
+ lines.append(_format_final_exc_line(stype, value))
+ return lines
+
+def _format_final_exc_line(etype, value):
+ """Return a list of a single line -- normal case for format_exception_only"""
+ valuestr = _some_str(value)
+ if value is None or not valuestr:
+ line = "%s\n" % etype
+ else:
+ line = "%s: %s\n" % (etype, valuestr)
+ return line
+
+def _some_str(value):
+ try:
+ return unicode(value)
+ except Exception:
+ try:
+ return str(value)
+ except Exception:
+ pass
+ return '<unprintable %s object>' % type(value).__name__
diff --git a/third_party/python/py/py/_code/assertion.py b/third_party/python/py/py/_code/assertion.py
new file mode 100644
index 0000000000..ff1643799c
--- /dev/null
+++ b/third_party/python/py/py/_code/assertion.py
@@ -0,0 +1,90 @@
+import sys
+import py
+
+BuiltinAssertionError = py.builtin.builtins.AssertionError
+
+_reprcompare = None # if set, will be called by assert reinterp for comparison ops
+
+def _format_explanation(explanation):
+ """This formats an explanation
+
+ Normally all embedded newlines are escaped, however there are
+ three exceptions: \n{, \n} and \n~. The first two are intended
+ cover nested explanations, see function and attribute explanations
+ for examples (.visit_Call(), visit_Attribute()). The last one is
+ for when one explanation needs to span multiple lines, e.g. when
+ displaying diffs.
+ """
+ raw_lines = (explanation or '').split('\n')
+ # escape newlines not followed by {, } and ~
+ lines = [raw_lines[0]]
+ for l in raw_lines[1:]:
+ if l.startswith('{') or l.startswith('}') or l.startswith('~'):
+ lines.append(l)
+ else:
+ lines[-1] += '\\n' + l
+
+ result = lines[:1]
+ stack = [0]
+ stackcnt = [0]
+ for line in lines[1:]:
+ if line.startswith('{'):
+ if stackcnt[-1]:
+ s = 'and '
+ else:
+ s = 'where '
+ stack.append(len(result))
+ stackcnt[-1] += 1
+ stackcnt.append(0)
+ result.append(' +' + ' '*(len(stack)-1) + s + line[1:])
+ elif line.startswith('}'):
+ assert line.startswith('}')
+ stack.pop()
+ stackcnt.pop()
+ result[stack[-1]] += line[1:]
+ else:
+ assert line.startswith('~')
+ result.append(' '*len(stack) + line[1:])
+ assert len(stack) == 1
+ return '\n'.join(result)
+
+
+class AssertionError(BuiltinAssertionError):
+ def __init__(self, *args):
+ BuiltinAssertionError.__init__(self, *args)
+ if args:
+ try:
+ self.msg = str(args[0])
+ except py.builtin._sysex:
+ raise
+ except:
+ self.msg = "<[broken __repr__] %s at %0xd>" %(
+ args[0].__class__, id(args[0]))
+ else:
+ f = py.code.Frame(sys._getframe(1))
+ try:
+ source = f.code.fullsource
+ if source is not None:
+ try:
+ source = source.getstatement(f.lineno, assertion=True)
+ except IndexError:
+ source = None
+ else:
+ source = str(source.deindent()).strip()
+ except py.error.ENOENT:
+ source = None
+ # this can also occur during reinterpretation, when the
+ # co_filename is set to "<run>".
+ if source:
+ self.msg = reinterpret(source, f, should_fail=True)
+ else:
+ self.msg = "<could not determine information>"
+ if not self.args:
+ self.args = (self.msg,)
+
+if sys.version_info > (3, 0):
+ AssertionError.__module__ = "builtins"
+ reinterpret_old = "old reinterpretation not available for py3"
+else:
+ from py._code._assertionold import interpret as reinterpret_old
+from py._code._assertionnew import interpret as reinterpret
diff --git a/third_party/python/py/py/_code/code.py b/third_party/python/py/py/_code/code.py
new file mode 100644
index 0000000000..dad796283f
--- /dev/null
+++ b/third_party/python/py/py/_code/code.py
@@ -0,0 +1,796 @@
+import py
+import sys
+from inspect import CO_VARARGS, CO_VARKEYWORDS, isclass
+
+builtin_repr = repr
+
+reprlib = py.builtin._tryimport('repr', 'reprlib')
+
+if sys.version_info[0] >= 3:
+ from traceback import format_exception_only
+else:
+ from py._code._py2traceback import format_exception_only
+
+import traceback
+
+
+class Code(object):
+ """ wrapper around Python code objects """
+ def __init__(self, rawcode):
+ if not hasattr(rawcode, "co_filename"):
+ rawcode = py.code.getrawcode(rawcode)
+ try:
+ self.filename = rawcode.co_filename
+ self.firstlineno = rawcode.co_firstlineno - 1
+ self.name = rawcode.co_name
+ except AttributeError:
+ raise TypeError("not a code object: %r" % (rawcode,))
+ self.raw = rawcode
+
+ def __eq__(self, other):
+ return self.raw == other.raw
+
+ def __ne__(self, other):
+ return not self == other
+
+ @property
+ def path(self):
+ """ return a path object pointing to source code (note that it
+ might not point to an actually existing file). """
+ p = py.path.local(self.raw.co_filename)
+ # maybe don't try this checking
+ if not p.check():
+ # XXX maybe try harder like the weird logic
+ # in the standard lib [linecache.updatecache] does?
+ p = self.raw.co_filename
+ return p
+
+ @property
+ def fullsource(self):
+ """ return a py.code.Source object for the full source file of the code
+ """
+ from py._code import source
+ full, _ = source.findsource(self.raw)
+ return full
+
+ def source(self):
+ """ return a py.code.Source object for the code object's source only
+ """
+ # return source only for that part of code
+ return py.code.Source(self.raw)
+
+ def getargs(self, var=False):
+ """ return a tuple with the argument names for the code object
+
+ if 'var' is set True also return the names of the variable and
+ keyword arguments when present
+ """
+ # handfull shortcut for getting args
+ raw = self.raw
+ argcount = raw.co_argcount
+ if var:
+ argcount += raw.co_flags & CO_VARARGS
+ argcount += raw.co_flags & CO_VARKEYWORDS
+ return raw.co_varnames[:argcount]
+
+class Frame(object):
+ """Wrapper around a Python frame holding f_locals and f_globals
+ in which expressions can be evaluated."""
+
+ def __init__(self, frame):
+ self.lineno = frame.f_lineno - 1
+ self.f_globals = frame.f_globals
+ self.f_locals = frame.f_locals
+ self.raw = frame
+ self.code = py.code.Code(frame.f_code)
+
+ @property
+ def statement(self):
+ """ statement this frame is at """
+ if self.code.fullsource is None:
+ return py.code.Source("")
+ return self.code.fullsource.getstatement(self.lineno)
+
+ def eval(self, code, **vars):
+ """ evaluate 'code' in the frame
+
+ 'vars' are optional additional local variables
+
+ returns the result of the evaluation
+ """
+ f_locals = self.f_locals.copy()
+ f_locals.update(vars)
+ return eval(code, self.f_globals, f_locals)
+
+ def exec_(self, code, **vars):
+ """ exec 'code' in the frame
+
+ 'vars' are optiona; additional local variables
+ """
+ f_locals = self.f_locals.copy()
+ f_locals.update(vars)
+ py.builtin.exec_(code, self.f_globals, f_locals)
+
+ def repr(self, object):
+ """ return a 'safe' (non-recursive, one-line) string repr for 'object'
+ """
+ return py.io.saferepr(object)
+
+ def is_true(self, object):
+ return object
+
+ def getargs(self, var=False):
+ """ return a list of tuples (name, value) for all arguments
+
+ if 'var' is set True also include the variable and keyword
+ arguments when present
+ """
+ retval = []
+ for arg in self.code.getargs(var):
+ try:
+ retval.append((arg, self.f_locals[arg]))
+ except KeyError:
+ pass # this can occur when using Psyco
+ return retval
+
+
+class TracebackEntry(object):
+ """ a single entry in a traceback """
+
+ _repr_style = None
+ exprinfo = None
+
+ def __init__(self, rawentry):
+ self._rawentry = rawentry
+ self.lineno = rawentry.tb_lineno - 1
+
+ def set_repr_style(self, mode):
+ assert mode in ("short", "long")
+ self._repr_style = mode
+
+ @property
+ def frame(self):
+ return py.code.Frame(self._rawentry.tb_frame)
+
+ @property
+ def relline(self):
+ return self.lineno - self.frame.code.firstlineno
+
+ def __repr__(self):
+ return "<TracebackEntry %s:%d>" % (self.frame.code.path, self.lineno+1)
+
+ @property
+ def statement(self):
+ """ py.code.Source object for the current statement """
+ source = self.frame.code.fullsource
+ return source.getstatement(self.lineno)
+
+ @property
+ def path(self):
+ """ path to the source code """
+ return self.frame.code.path
+
+ def getlocals(self):
+ return self.frame.f_locals
+ locals = property(getlocals, None, None, "locals of underlaying frame")
+
+ def reinterpret(self):
+ """Reinterpret the failing statement and returns a detailed information
+ about what operations are performed."""
+ if self.exprinfo is None:
+ source = str(self.statement).strip()
+ x = py.code._reinterpret(source, self.frame, should_fail=True)
+ if not isinstance(x, str):
+ raise TypeError("interpret returned non-string %r" % (x,))
+ self.exprinfo = x
+ return self.exprinfo
+
+ def getfirstlinesource(self):
+ # on Jython this firstlineno can be -1 apparently
+ return max(self.frame.code.firstlineno, 0)
+
+ def getsource(self, astcache=None):
+ """ return failing source code. """
+ # we use the passed in astcache to not reparse asttrees
+ # within exception info printing
+ from py._code.source import getstatementrange_ast
+ source = self.frame.code.fullsource
+ if source is None:
+ return None
+ key = astnode = None
+ if astcache is not None:
+ key = self.frame.code.path
+ if key is not None:
+ astnode = astcache.get(key, None)
+ start = self.getfirstlinesource()
+ try:
+ astnode, _, end = getstatementrange_ast(self.lineno, source,
+ astnode=astnode)
+ except SyntaxError:
+ end = self.lineno + 1
+ else:
+ if key is not None:
+ astcache[key] = astnode
+ return source[start:end]
+
+ source = property(getsource)
+
+ def ishidden(self):
+ """ return True if the current frame has a var __tracebackhide__
+ resolving to True
+
+ mostly for internal use
+ """
+ try:
+ return self.frame.f_locals['__tracebackhide__']
+ except KeyError:
+ try:
+ return self.frame.f_globals['__tracebackhide__']
+ except KeyError:
+ return False
+
+ def __str__(self):
+ try:
+ fn = str(self.path)
+ except py.error.Error:
+ fn = '???'
+ name = self.frame.code.name
+ try:
+ line = str(self.statement).lstrip()
+ except KeyboardInterrupt:
+ raise
+ except:
+ line = "???"
+ return " File %r:%d in %s\n %s\n" % (fn, self.lineno+1, name, line)
+
+ def name(self):
+ return self.frame.code.raw.co_name
+ name = property(name, None, None, "co_name of underlaying code")
+
+
+class Traceback(list):
+ """ Traceback objects encapsulate and offer higher level
+ access to Traceback entries.
+ """
+ Entry = TracebackEntry
+
+ def __init__(self, tb):
+ """ initialize from given python traceback object. """
+ if hasattr(tb, 'tb_next'):
+ def f(cur):
+ while cur is not None:
+ yield self.Entry(cur)
+ cur = cur.tb_next
+ list.__init__(self, f(tb))
+ else:
+ list.__init__(self, tb)
+
+ def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None):
+ """ return a Traceback instance wrapping part of this Traceback
+
+ by provding any combination of path, lineno and firstlineno, the
+ first frame to start the to-be-returned traceback is determined
+
+ this allows cutting the first part of a Traceback instance e.g.
+ for formatting reasons (removing some uninteresting bits that deal
+ with handling of the exception/traceback)
+ """
+ for x in self:
+ code = x.frame.code
+ codepath = code.path
+ if ((path is None or codepath == path) and
+ (excludepath is None or not hasattr(codepath, 'relto') or
+ not codepath.relto(excludepath)) and
+ (lineno is None or x.lineno == lineno) and
+ (firstlineno is None or x.frame.code.firstlineno == firstlineno)):
+ return Traceback(x._rawentry)
+ return self
+
+ def __getitem__(self, key):
+ val = super(Traceback, self).__getitem__(key)
+ if isinstance(key, type(slice(0))):
+ val = self.__class__(val)
+ return val
+
+ def filter(self, fn=lambda x: not x.ishidden()):
+ """ return a Traceback instance with certain items removed
+
+ fn is a function that gets a single argument, a TracebackItem
+ instance, and should return True when the item should be added
+ to the Traceback, False when not
+
+ by default this removes all the TracebackItems which are hidden
+ (see ishidden() above)
+ """
+ return Traceback(filter(fn, self))
+
+ def getcrashentry(self):
+ """ return last non-hidden traceback entry that lead
+ to the exception of a traceback.
+ """
+ for i in range(-1, -len(self)-1, -1):
+ entry = self[i]
+ if not entry.ishidden():
+ return entry
+ return self[-1]
+
+ def recursionindex(self):
+ """ return the index of the frame/TracebackItem where recursion
+ originates if appropriate, None if no recursion occurred
+ """
+ cache = {}
+ for i, entry in enumerate(self):
+ # id for the code.raw is needed to work around
+ # the strange metaprogramming in the decorator lib from pypi
+ # which generates code objects that have hash/value equality
+ #XXX needs a test
+ key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
+ #print "checking for recursion at", key
+ l = cache.setdefault(key, [])
+ if l:
+ f = entry.frame
+ loc = f.f_locals
+ for otherloc in l:
+ if f.is_true(f.eval(co_equal,
+ __recursioncache_locals_1=loc,
+ __recursioncache_locals_2=otherloc)):
+ return i
+ l.append(entry.frame.f_locals)
+ return None
+
+co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2',
+ '?', 'eval')
+
+class ExceptionInfo(object):
+ """ wraps sys.exc_info() objects and offers
+ help for navigating the traceback.
+ """
+ _striptext = ''
+ def __init__(self, tup=None, exprinfo=None):
+ if tup is None:
+ tup = sys.exc_info()
+ if exprinfo is None and isinstance(tup[1], AssertionError):
+ exprinfo = getattr(tup[1], 'msg', None)
+ if exprinfo is None:
+ exprinfo = str(tup[1])
+ if exprinfo and exprinfo.startswith('assert '):
+ self._striptext = 'AssertionError: '
+ self._excinfo = tup
+ #: the exception class
+ self.type = tup[0]
+ #: the exception instance
+ self.value = tup[1]
+ #: the exception raw traceback
+ self.tb = tup[2]
+ #: the exception type name
+ self.typename = self.type.__name__
+ #: the exception traceback (py.code.Traceback instance)
+ self.traceback = py.code.Traceback(self.tb)
+
+ def __repr__(self):
+ return "<ExceptionInfo %s tblen=%d>" % (
+ self.typename, len(self.traceback))
+
+ def exconly(self, tryshort=False):
+ """ return the exception as a string
+
+ when 'tryshort' resolves to True, and the exception is a
+ py.code._AssertionError, only the actual exception part of
+ the exception representation is returned (so 'AssertionError: ' is
+ removed from the beginning)
+ """
+ lines = format_exception_only(self.type, self.value)
+ text = ''.join(lines)
+ text = text.rstrip()
+ if tryshort:
+ if text.startswith(self._striptext):
+ text = text[len(self._striptext):]
+ return text
+
+ def errisinstance(self, exc):
+ """ return True if the exception is an instance of exc """
+ return isinstance(self.value, exc)
+
+ def _getreprcrash(self):
+ exconly = self.exconly(tryshort=True)
+ entry = self.traceback.getcrashentry()
+ path, lineno = entry.frame.code.raw.co_filename, entry.lineno
+ return ReprFileLocation(path, lineno+1, exconly)
+
+ def getrepr(self, showlocals=False, style="long",
+ abspath=False, tbfilter=True, funcargs=False):
+ """ return str()able representation of this exception info.
+ showlocals: show locals per traceback entry
+ style: long|short|no|native traceback style
+ tbfilter: hide entries (where __tracebackhide__ is true)
+
+ in case of style==native, tbfilter and showlocals is ignored.
+ """
+ if style == 'native':
+ return ReprExceptionInfo(ReprTracebackNative(
+ traceback.format_exception(
+ self.type,
+ self.value,
+ self.traceback[0]._rawentry,
+ )), self._getreprcrash())
+
+ fmt = FormattedExcinfo(
+ showlocals=showlocals, style=style,
+ abspath=abspath, tbfilter=tbfilter, funcargs=funcargs)
+ return fmt.repr_excinfo(self)
+
+ def __str__(self):
+ entry = self.traceback[-1]
+ loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
+ return str(loc)
+
+ def __unicode__(self):
+ entry = self.traceback[-1]
+ loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
+ return loc.__unicode__()
+
+
+class FormattedExcinfo(object):
+ """ presenting information about failing Functions and Generators. """
+ # for traceback entries
+ flow_marker = ">"
+ fail_marker = "E"
+
+ def __init__(self, showlocals=False, style="long",
+ abspath=True, tbfilter=True, funcargs=False):
+ self.showlocals = showlocals
+ self.style = style
+ self.tbfilter = tbfilter
+ self.funcargs = funcargs
+ self.abspath = abspath
+ self.astcache = {}
+
+ def _getindent(self, source):
+ # figure out indent for given source
+ try:
+ s = str(source.getstatement(len(source)-1))
+ except KeyboardInterrupt:
+ raise
+ except:
+ try:
+ s = str(source[-1])
+ except KeyboardInterrupt:
+ raise
+ except:
+ return 0
+ return 4 + (len(s) - len(s.lstrip()))
+
+ def _getentrysource(self, entry):
+ source = entry.getsource(self.astcache)
+ if source is not None:
+ source = source.deindent()
+ return source
+
+ def _saferepr(self, obj):
+ return py.io.saferepr(obj)
+
+ def repr_args(self, entry):
+ if self.funcargs:
+ args = []
+ for argname, argvalue in entry.frame.getargs(var=True):
+ args.append((argname, self._saferepr(argvalue)))
+ return ReprFuncArgs(args)
+
+ def get_source(self, source, line_index=-1, excinfo=None, short=False):
+ """ return formatted and marked up source lines. """
+ lines = []
+ if source is None or line_index >= len(source.lines):
+ source = py.code.Source("???")
+ line_index = 0
+ if line_index < 0:
+ line_index += len(source)
+ space_prefix = " "
+ if short:
+ lines.append(space_prefix + source.lines[line_index].strip())
+ else:
+ for line in source.lines[:line_index]:
+ lines.append(space_prefix + line)
+ lines.append(self.flow_marker + " " + source.lines[line_index])
+ for line in source.lines[line_index+1:]:
+ lines.append(space_prefix + line)
+ if excinfo is not None:
+ indent = 4 if short else self._getindent(source)
+ lines.extend(self.get_exconly(excinfo, indent=indent, markall=True))
+ return lines
+
+ def get_exconly(self, excinfo, indent=4, markall=False):
+ lines = []
+ indent = " " * indent
+ # get the real exception information out
+ exlines = excinfo.exconly(tryshort=True).split('\n')
+ failindent = self.fail_marker + indent[1:]
+ for line in exlines:
+ lines.append(failindent + line)
+ if not markall:
+ failindent = indent
+ return lines
+
+ def repr_locals(self, locals):
+ if self.showlocals:
+ lines = []
+ keys = [loc for loc in locals if loc[0] != "@"]
+ keys.sort()
+ for name in keys:
+ value = locals[name]
+ if name == '__builtins__':
+ lines.append("__builtins__ = <builtins>")
+ else:
+ # This formatting could all be handled by the
+ # _repr() function, which is only reprlib.Repr in
+ # disguise, so is very configurable.
+ str_repr = self._saferepr(value)
+ #if len(str_repr) < 70 or not isinstance(value,
+ # (list, tuple, dict)):
+ lines.append("%-10s = %s" %(name, str_repr))
+ #else:
+ # self._line("%-10s =\\" % (name,))
+ # # XXX
+ # pprint.pprint(value, stream=self.excinfowriter)
+ return ReprLocals(lines)
+
+ def repr_traceback_entry(self, entry, excinfo=None):
+ source = self._getentrysource(entry)
+ if source is None:
+ source = py.code.Source("???")
+ line_index = 0
+ else:
+ # entry.getfirstlinesource() can be -1, should be 0 on jython
+ line_index = entry.lineno - max(entry.getfirstlinesource(), 0)
+
+ lines = []
+ style = entry._repr_style
+ if style is None:
+ style = self.style
+ if style in ("short", "long"):
+ short = style == "short"
+ reprargs = self.repr_args(entry) if not short else None
+ s = self.get_source(source, line_index, excinfo, short=short)
+ lines.extend(s)
+ if short:
+ message = "in %s" %(entry.name)
+ else:
+ message = excinfo and excinfo.typename or ""
+ path = self._makepath(entry.path)
+ filelocrepr = ReprFileLocation(path, entry.lineno+1, message)
+ localsrepr = None
+ if not short:
+ localsrepr = self.repr_locals(entry.locals)
+ return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style)
+ if excinfo:
+ lines.extend(self.get_exconly(excinfo, indent=4))
+ return ReprEntry(lines, None, None, None, style)
+
+ def _makepath(self, path):
+ if not self.abspath:
+ try:
+ np = py.path.local().bestrelpath(path)
+ except OSError:
+ return path
+ if len(np) < len(str(path)):
+ path = np
+ return path
+
+ def repr_traceback(self, excinfo):
+ traceback = excinfo.traceback
+ if self.tbfilter:
+ traceback = traceback.filter()
+ recursionindex = None
+ if excinfo.errisinstance(RuntimeError):
+ if "maximum recursion depth exceeded" in str(excinfo.value):
+ recursionindex = traceback.recursionindex()
+ last = traceback[-1]
+ entries = []
+ extraline = None
+ for index, entry in enumerate(traceback):
+ einfo = (last == entry) and excinfo or None
+ reprentry = self.repr_traceback_entry(entry, einfo)
+ entries.append(reprentry)
+ if index == recursionindex:
+ extraline = "!!! Recursion detected (same locals & position)"
+ break
+ return ReprTraceback(entries, extraline, style=self.style)
+
+ def repr_excinfo(self, excinfo):
+ reprtraceback = self.repr_traceback(excinfo)
+ reprcrash = excinfo._getreprcrash()
+ return ReprExceptionInfo(reprtraceback, reprcrash)
+
+class TerminalRepr:
+ def __str__(self):
+ s = self.__unicode__()
+ if sys.version_info[0] < 3:
+ s = s.encode('utf-8')
+ return s
+
+ def __unicode__(self):
+ # FYI this is called from pytest-xdist's serialization of exception
+ # information.
+ io = py.io.TextIO()
+ tw = py.io.TerminalWriter(file=io)
+ self.toterminal(tw)
+ return io.getvalue().strip()
+
+ def __repr__(self):
+ return "<%s instance at %0x>" %(self.__class__, id(self))
+
+
+class ReprExceptionInfo(TerminalRepr):
+ def __init__(self, reprtraceback, reprcrash):
+ self.reprtraceback = reprtraceback
+ self.reprcrash = reprcrash
+ self.sections = []
+
+ def addsection(self, name, content, sep="-"):
+ self.sections.append((name, content, sep))
+
+ def toterminal(self, tw):
+ self.reprtraceback.toterminal(tw)
+ for name, content, sep in self.sections:
+ tw.sep(sep, name)
+ tw.line(content)
+
+class ReprTraceback(TerminalRepr):
+ entrysep = "_ "
+
+ def __init__(self, reprentries, extraline, style):
+ self.reprentries = reprentries
+ self.extraline = extraline
+ self.style = style
+
+ def toterminal(self, tw):
+ # the entries might have different styles
+ last_style = None
+ for i, entry in enumerate(self.reprentries):
+ if entry.style == "long":
+ tw.line("")
+ entry.toterminal(tw)
+ if i < len(self.reprentries) - 1:
+ next_entry = self.reprentries[i+1]
+ if entry.style == "long" or \
+ entry.style == "short" and next_entry.style == "long":
+ tw.sep(self.entrysep)
+
+ if self.extraline:
+ tw.line(self.extraline)
+
+class ReprTracebackNative(ReprTraceback):
+ def __init__(self, tblines):
+ self.style = "native"
+ self.reprentries = [ReprEntryNative(tblines)]
+ self.extraline = None
+
+class ReprEntryNative(TerminalRepr):
+ style = "native"
+
+ def __init__(self, tblines):
+ self.lines = tblines
+
+ def toterminal(self, tw):
+ tw.write("".join(self.lines))
+
+class ReprEntry(TerminalRepr):
+ localssep = "_ "
+
+ def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style):
+ self.lines = lines
+ self.reprfuncargs = reprfuncargs
+ self.reprlocals = reprlocals
+ self.reprfileloc = filelocrepr
+ self.style = style
+
+ def toterminal(self, tw):
+ if self.style == "short":
+ self.reprfileloc.toterminal(tw)
+ for line in self.lines:
+ red = line.startswith("E ")
+ tw.line(line, bold=True, red=red)
+ #tw.line("")
+ return
+ if self.reprfuncargs:
+ self.reprfuncargs.toterminal(tw)
+ for line in self.lines:
+ red = line.startswith("E ")
+ tw.line(line, bold=True, red=red)
+ if self.reprlocals:
+ #tw.sep(self.localssep, "Locals")
+ tw.line("")
+ self.reprlocals.toterminal(tw)
+ if self.reprfileloc:
+ if self.lines:
+ tw.line("")
+ self.reprfileloc.toterminal(tw)
+
+ def __str__(self):
+ return "%s\n%s\n%s" % ("\n".join(self.lines),
+ self.reprlocals,
+ self.reprfileloc)
+
+class ReprFileLocation(TerminalRepr):
+ def __init__(self, path, lineno, message):
+ self.path = str(path)
+ self.lineno = lineno
+ self.message = message
+
+ def toterminal(self, tw):
+ # filename and lineno output for each entry,
+ # using an output format that most editors unterstand
+ msg = self.message
+ i = msg.find("\n")
+ if i != -1:
+ msg = msg[:i]
+ tw.line("%s:%s: %s" %(self.path, self.lineno, msg))
+
+class ReprLocals(TerminalRepr):
+ def __init__(self, lines):
+ self.lines = lines
+
+ def toterminal(self, tw):
+ for line in self.lines:
+ tw.line(line)
+
+class ReprFuncArgs(TerminalRepr):
+ def __init__(self, args):
+ self.args = args
+
+ def toterminal(self, tw):
+ if self.args:
+ linesofar = ""
+ for name, value in self.args:
+ ns = "%s = %s" %(name, value)
+ if len(ns) + len(linesofar) + 2 > tw.fullwidth:
+ if linesofar:
+ tw.line(linesofar)
+ linesofar = ns
+ else:
+ if linesofar:
+ linesofar += ", " + ns
+ else:
+ linesofar = ns
+ if linesofar:
+ tw.line(linesofar)
+ tw.line("")
+
+
+
+oldbuiltins = {}
+
+def patch_builtins(assertion=True, compile=True):
+ """ put compile and AssertionError builtins to Python's builtins. """
+ if assertion:
+ from py._code import assertion
+ l = oldbuiltins.setdefault('AssertionError', [])
+ l.append(py.builtin.builtins.AssertionError)
+ py.builtin.builtins.AssertionError = assertion.AssertionError
+ if compile:
+ l = oldbuiltins.setdefault('compile', [])
+ l.append(py.builtin.builtins.compile)
+ py.builtin.builtins.compile = py.code.compile
+
+def unpatch_builtins(assertion=True, compile=True):
+ """ remove compile and AssertionError builtins from Python builtins. """
+ if assertion:
+ py.builtin.builtins.AssertionError = oldbuiltins['AssertionError'].pop()
+ if compile:
+ py.builtin.builtins.compile = oldbuiltins['compile'].pop()
+
+def getrawcode(obj, trycall=True):
+ """ return code object for given function. """
+ try:
+ return obj.__code__
+ except AttributeError:
+ obj = getattr(obj, 'im_func', obj)
+ obj = getattr(obj, 'func_code', obj)
+ obj = getattr(obj, 'f_code', obj)
+ obj = getattr(obj, '__code__', obj)
+ if trycall and not hasattr(obj, 'co_firstlineno'):
+ if hasattr(obj, '__call__') and not isclass(obj):
+ x = getrawcode(obj.__call__, trycall=False)
+ if hasattr(x, 'co_firstlineno'):
+ return x
+ return obj
+
diff --git a/third_party/python/py/py/_code/source.py b/third_party/python/py/py/_code/source.py
new file mode 100644
index 0000000000..7fc7b23a96
--- /dev/null
+++ b/third_party/python/py/py/_code/source.py
@@ -0,0 +1,410 @@
+from __future__ import generators
+
+from bisect import bisect_right
+import sys
+import inspect, tokenize
+import py
+from types import ModuleType
+cpy_compile = compile
+
+try:
+ import _ast
+ from _ast import PyCF_ONLY_AST as _AST_FLAG
+except ImportError:
+ _AST_FLAG = 0
+ _ast = None
+
+
+class Source(object):
+ """ a immutable object holding a source code fragment,
+ possibly deindenting it.
+ """
+ _compilecounter = 0
+ def __init__(self, *parts, **kwargs):
+ self.lines = lines = []
+ de = kwargs.get('deindent', True)
+ rstrip = kwargs.get('rstrip', True)
+ for part in parts:
+ if not part:
+ partlines = []
+ if isinstance(part, Source):
+ partlines = part.lines
+ elif isinstance(part, (tuple, list)):
+ partlines = [x.rstrip("\n") for x in part]
+ elif isinstance(part, py.builtin._basestring):
+ partlines = part.split('\n')
+ if rstrip:
+ while partlines:
+ if partlines[-1].strip():
+ break
+ partlines.pop()
+ else:
+ partlines = getsource(part, deindent=de).lines
+ if de:
+ partlines = deindent(partlines)
+ lines.extend(partlines)
+
+ def __eq__(self, other):
+ try:
+ return self.lines == other.lines
+ except AttributeError:
+ if isinstance(other, str):
+ return str(self) == other
+ return False
+
+ def __getitem__(self, key):
+ if isinstance(key, int):
+ return self.lines[key]
+ else:
+ if key.step not in (None, 1):
+ raise IndexError("cannot slice a Source with a step")
+ return self.__getslice__(key.start, key.stop)
+
+ def __len__(self):
+ return len(self.lines)
+
+ def __getslice__(self, start, end):
+ newsource = Source()
+ newsource.lines = self.lines[start:end]
+ return newsource
+
+ def strip(self):
+ """ return new source object with trailing
+ and leading blank lines removed.
+ """
+ start, end = 0, len(self)
+ while start < end and not self.lines[start].strip():
+ start += 1
+ while end > start and not self.lines[end-1].strip():
+ end -= 1
+ source = Source()
+ source.lines[:] = self.lines[start:end]
+ return source
+
+ def putaround(self, before='', after='', indent=' ' * 4):
+ """ return a copy of the source object with
+ 'before' and 'after' wrapped around it.
+ """
+ before = Source(before)
+ after = Source(after)
+ newsource = Source()
+ lines = [ (indent + line) for line in self.lines]
+ newsource.lines = before.lines + lines + after.lines
+ return newsource
+
+ def indent(self, indent=' ' * 4):
+ """ return a copy of the source object with
+ all lines indented by the given indent-string.
+ """
+ newsource = Source()
+ newsource.lines = [(indent+line) for line in self.lines]
+ return newsource
+
+ def getstatement(self, lineno, assertion=False):
+ """ return Source statement which contains the
+ given linenumber (counted from 0).
+ """
+ start, end = self.getstatementrange(lineno, assertion)
+ return self[start:end]
+
+ def getstatementrange(self, lineno, assertion=False):
+ """ return (start, end) tuple which spans the minimal
+ statement region which containing the given lineno.
+ """
+ if not (0 <= lineno < len(self)):
+ raise IndexError("lineno out of range")
+ ast, start, end = getstatementrange_ast(lineno, self)
+ return start, end
+
+ def deindent(self, offset=None):
+ """ return a new source object deindented by offset.
+ If offset is None then guess an indentation offset from
+ the first non-blank line. Subsequent lines which have a
+ lower indentation offset will be copied verbatim as
+ they are assumed to be part of multilines.
+ """
+ # XXX maybe use the tokenizer to properly handle multiline
+ # strings etc.pp?
+ newsource = Source()
+ newsource.lines[:] = deindent(self.lines, offset)
+ return newsource
+
+ def isparseable(self, deindent=True):
+ """ return True if source is parseable, heuristically
+ deindenting it by default.
+ """
+ try:
+ import parser
+ except ImportError:
+ syntax_checker = lambda x: compile(x, 'asd', 'exec')
+ else:
+ syntax_checker = parser.suite
+
+ if deindent:
+ source = str(self.deindent())
+ else:
+ source = str(self)
+ try:
+ #compile(source+'\n', "x", "exec")
+ syntax_checker(source+'\n')
+ except KeyboardInterrupt:
+ raise
+ except Exception:
+ return False
+ else:
+ return True
+
+ def __str__(self):
+ return "\n".join(self.lines)
+
+ def compile(self, filename=None, mode='exec',
+ flag=generators.compiler_flag,
+ dont_inherit=0, _genframe=None):
+ """ return compiled code object. if filename is None
+ invent an artificial filename which displays
+ the source/line position of the caller frame.
+ """
+ if not filename or py.path.local(filename).check(file=0):
+ if _genframe is None:
+ _genframe = sys._getframe(1) # the caller
+ fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno
+ base = "<%d-codegen " % self._compilecounter
+ self.__class__._compilecounter += 1
+ if not filename:
+ filename = base + '%s:%d>' % (fn, lineno)
+ else:
+ filename = base + '%r %s:%d>' % (filename, fn, lineno)
+ source = "\n".join(self.lines) + '\n'
+ try:
+ co = cpy_compile(source, filename, mode, flag)
+ except SyntaxError:
+ ex = sys.exc_info()[1]
+ # re-represent syntax errors from parsing python strings
+ msglines = self.lines[:ex.lineno]
+ if ex.offset:
+ msglines.append(" "*ex.offset + '^')
+ msglines.append("(code was compiled probably from here: %s)" % filename)
+ newex = SyntaxError('\n'.join(msglines))
+ newex.offset = ex.offset
+ newex.lineno = ex.lineno
+ newex.text = ex.text
+ raise newex
+ else:
+ if flag & _AST_FLAG:
+ return co
+ lines = [(x + "\n") for x in self.lines]
+ import linecache
+ linecache.cache[filename] = (1, None, lines, filename)
+ return co
+
+#
+# public API shortcut functions
+#
+
+def compile_(source, filename=None, mode='exec', flags=
+ generators.compiler_flag, dont_inherit=0):
+ """ compile the given source to a raw code object,
+ and maintain an internal cache which allows later
+ retrieval of the source code for the code object
+ and any recursively created code objects.
+ """
+ if _ast is not None and isinstance(source, _ast.AST):
+ # XXX should Source support having AST?
+ return cpy_compile(source, filename, mode, flags, dont_inherit)
+ _genframe = sys._getframe(1) # the caller
+ s = Source(source)
+ co = s.compile(filename, mode, flags, _genframe=_genframe)
+ return co
+
+
+def getfslineno(obj):
+ """ Return source location (path, lineno) for the given object.
+ If the source cannot be determined return ("", -1)
+ """
+ try:
+ code = py.code.Code(obj)
+ except TypeError:
+ try:
+ fn = (inspect.getsourcefile(obj) or
+ inspect.getfile(obj))
+ except TypeError:
+ return "", -1
+
+ fspath = fn and py.path.local(fn) or None
+ lineno = -1
+ if fspath:
+ try:
+ _, lineno = findsource(obj)
+ except IOError:
+ pass
+ else:
+ fspath = code.path
+ lineno = code.firstlineno
+ assert isinstance(lineno, int)
+ return fspath, lineno
+
+#
+# helper functions
+#
+
+def findsource(obj):
+ try:
+ sourcelines, lineno = inspect.findsource(obj)
+ except py.builtin._sysex:
+ raise
+ except:
+ return None, -1
+ source = Source()
+ source.lines = [line.rstrip() for line in sourcelines]
+ return source, lineno
+
+def getsource(obj, **kwargs):
+ obj = py.code.getrawcode(obj)
+ try:
+ strsrc = inspect.getsource(obj)
+ except IndentationError:
+ strsrc = "\"Buggy python version consider upgrading, cannot get source\""
+ assert isinstance(strsrc, str)
+ return Source(strsrc, **kwargs)
+
+def deindent(lines, offset=None):
+ if offset is None:
+ for line in lines:
+ line = line.expandtabs()
+ s = line.lstrip()
+ if s:
+ offset = len(line)-len(s)
+ break
+ else:
+ offset = 0
+ if offset == 0:
+ return list(lines)
+ newlines = []
+ def readline_generator(lines):
+ for line in lines:
+ yield line + '\n'
+ while True:
+ yield ''
+
+ it = readline_generator(lines)
+
+ try:
+ for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
+ if sline > len(lines):
+ break # End of input reached
+ if sline > len(newlines):
+ line = lines[sline - 1].expandtabs()
+ if line.lstrip() and line[:offset].isspace():
+ line = line[offset:] # Deindent
+ newlines.append(line)
+
+ for i in range(sline, eline):
+ # Don't deindent continuing lines of
+ # multiline tokens (i.e. multiline strings)
+ newlines.append(lines[i])
+ except (IndentationError, tokenize.TokenError):
+ pass
+ # Add any lines we didn't see. E.g. if an exception was raised.
+ newlines.extend(lines[len(newlines):])
+ return newlines
+
+
+def get_statement_startend2(lineno, node):
+ import ast
+ # flatten all statements and except handlers into one lineno-list
+ # AST's line numbers start indexing at 1
+ l = []
+ for x in ast.walk(node):
+ if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler):
+ l.append(x.lineno - 1)
+ for name in "finalbody", "orelse":
+ val = getattr(x, name, None)
+ if val:
+ # treat the finally/orelse part as its own statement
+ l.append(val[0].lineno - 1 - 1)
+ l.sort()
+ insert_index = bisect_right(l, lineno)
+ start = l[insert_index - 1]
+ if insert_index >= len(l):
+ end = None
+ else:
+ end = l[insert_index]
+ return start, end
+
+
+def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
+ if astnode is None:
+ content = str(source)
+ try:
+ astnode = compile(content, "source", "exec", 1024) # 1024 for AST
+ except ValueError:
+ start, end = getstatementrange_old(lineno, source, assertion)
+ return None, start, end
+ start, end = get_statement_startend2(lineno, astnode)
+ # we need to correct the end:
+ # - ast-parsing strips comments
+ # - there might be empty lines
+ # - we might have lesser indented code blocks at the end
+ if end is None:
+ end = len(source.lines)
+
+ if end > start + 1:
+ # make sure we don't span differently indented code blocks
+ # by using the BlockFinder helper used which inspect.getsource() uses itself
+ block_finder = inspect.BlockFinder()
+ # if we start with an indented line, put blockfinder to "started" mode
+ block_finder.started = source.lines[start][0].isspace()
+ it = ((x + "\n") for x in source.lines[start:end])
+ try:
+ for tok in tokenize.generate_tokens(lambda: next(it)):
+ block_finder.tokeneater(*tok)
+ except (inspect.EndOfBlock, IndentationError):
+ end = block_finder.last + start
+ except Exception:
+ pass
+
+ # the end might still point to a comment or empty line, correct it
+ while end:
+ line = source.lines[end - 1].lstrip()
+ if line.startswith("#") or not line:
+ end -= 1
+ else:
+ break
+ return astnode, start, end
+
+
+def getstatementrange_old(lineno, source, assertion=False):
+ """ return (start, end) tuple which spans the minimal
+ statement region which containing the given lineno.
+ raise an IndexError if no such statementrange can be found.
+ """
+ # XXX this logic is only used on python2.4 and below
+ # 1. find the start of the statement
+ from codeop import compile_command
+ for start in range(lineno, -1, -1):
+ if assertion:
+ line = source.lines[start]
+ # the following lines are not fully tested, change with care
+ if 'super' in line and 'self' in line and '__init__' in line:
+ raise IndexError("likely a subclass")
+ if "assert" not in line and "raise" not in line:
+ continue
+ trylines = source.lines[start:lineno+1]
+ # quick hack to prepare parsing an indented line with
+ # compile_command() (which errors on "return" outside defs)
+ trylines.insert(0, 'def xxx():')
+ trysource = '\n '.join(trylines)
+ # ^ space here
+ try:
+ compile_command(trysource)
+ except (SyntaxError, OverflowError, ValueError):
+ continue
+
+ # 2. find the end of the statement
+ for end in range(lineno+1, len(source)+1):
+ trysource = source[start:end]
+ if trysource.isparseable():
+ return start, end
+ raise SyntaxError("no valid source range around line %d " % (lineno,))
+
+
diff --git a/third_party/python/py/py/_error.py b/third_party/python/py/py/_error.py
new file mode 100644
index 0000000000..a6375de9fa
--- /dev/null
+++ b/third_party/python/py/py/_error.py
@@ -0,0 +1,91 @@
+"""
+create errno-specific classes for IO or os calls.
+
+"""
+from types import ModuleType
+import sys, os, errno
+
+class Error(EnvironmentError):
+ def __repr__(self):
+ return "%s.%s %r: %s " %(self.__class__.__module__,
+ self.__class__.__name__,
+ self.__class__.__doc__,
+ " ".join(map(str, self.args)),
+ #repr(self.args)
+ )
+
+ def __str__(self):
+ s = "[%s]: %s" %(self.__class__.__doc__,
+ " ".join(map(str, self.args)),
+ )
+ return s
+
+_winerrnomap = {
+ 2: errno.ENOENT,
+ 3: errno.ENOENT,
+ 17: errno.EEXIST,
+ 18: errno.EXDEV,
+ 13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailiable
+ 22: errno.ENOTDIR,
+ 20: errno.ENOTDIR,
+ 267: errno.ENOTDIR,
+ 5: errno.EACCES, # anything better?
+}
+
+class ErrorMaker(ModuleType):
+ """ lazily provides Exception classes for each possible POSIX errno
+ (as defined per the 'errno' module). All such instances
+ subclass EnvironmentError.
+ """
+ Error = Error
+ _errno2class = {}
+
+ def __getattr__(self, name):
+ if name[0] == "_":
+ raise AttributeError(name)
+ eno = getattr(errno, name)
+ cls = self._geterrnoclass(eno)
+ setattr(self, name, cls)
+ return cls
+
+ def _geterrnoclass(self, eno):
+ try:
+ return self._errno2class[eno]
+ except KeyError:
+ clsname = errno.errorcode.get(eno, "UnknownErrno%d" %(eno,))
+ errorcls = type(Error)(clsname, (Error,),
+ {'__module__':'py.error',
+ '__doc__': os.strerror(eno)})
+ self._errno2class[eno] = errorcls
+ return errorcls
+
+ def checked_call(self, func, *args, **kwargs):
+ """ call a function and raise an errno-exception if applicable. """
+ __tracebackhide__ = True
+ try:
+ return func(*args, **kwargs)
+ except self.Error:
+ raise
+ except (OSError, EnvironmentError):
+ cls, value, tb = sys.exc_info()
+ if not hasattr(value, 'errno'):
+ raise
+ __tracebackhide__ = False
+ errno = value.errno
+ try:
+ if not isinstance(value, WindowsError):
+ raise NameError
+ except NameError:
+ # we are not on Windows, or we got a proper OSError
+ cls = self._geterrnoclass(errno)
+ else:
+ try:
+ cls = self._geterrnoclass(_winerrnomap[errno])
+ except KeyError:
+ raise value
+ raise cls("%s%r" % (func.__name__, args))
+ __tracebackhide__ = True
+
+
+error = ErrorMaker('py.error')
+sys.modules[error.__name__] = error \ No newline at end of file
diff --git a/third_party/python/py/py/_io/__init__.py b/third_party/python/py/py/_io/__init__.py
new file mode 100644
index 0000000000..835f01f3ab
--- /dev/null
+++ b/third_party/python/py/py/_io/__init__.py
@@ -0,0 +1 @@
+""" input/output helping """
diff --git a/third_party/python/py/py/_io/capture.py b/third_party/python/py/py/_io/capture.py
new file mode 100644
index 0000000000..bc157ed978
--- /dev/null
+++ b/third_party/python/py/py/_io/capture.py
@@ -0,0 +1,371 @@
+import os
+import sys
+import py
+import tempfile
+
+try:
+ from io import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+if sys.version_info < (3,0):
+ class TextIO(StringIO):
+ def write(self, data):
+ if not isinstance(data, unicode):
+ data = unicode(data, getattr(self, '_encoding', 'UTF-8'), 'replace')
+ StringIO.write(self, data)
+else:
+ TextIO = StringIO
+
+try:
+ from io import BytesIO
+except ImportError:
+ class BytesIO(StringIO):
+ def write(self, data):
+ if isinstance(data, unicode):
+ raise TypeError("not a byte value: %r" %(data,))
+ StringIO.write(self, data)
+
+patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'}
+
+class FDCapture:
+ """ Capture IO to/from a given os-level filedescriptor. """
+
+ def __init__(self, targetfd, tmpfile=None, now=True, patchsys=False):
+ """ save targetfd descriptor, and open a new
+ temporary file there. If no tmpfile is
+ specified a tempfile.Tempfile() will be opened
+ in text mode.
+ """
+ self.targetfd = targetfd
+ if tmpfile is None and targetfd != 0:
+ f = tempfile.TemporaryFile('wb+')
+ tmpfile = dupfile(f, encoding="UTF-8")
+ f.close()
+ self.tmpfile = tmpfile
+ self._savefd = os.dup(self.targetfd)
+ if patchsys:
+ self._oldsys = getattr(sys, patchsysdict[targetfd])
+ if now:
+ self.start()
+
+ def start(self):
+ try:
+ os.fstat(self._savefd)
+ except OSError:
+ raise ValueError("saved filedescriptor not valid, "
+ "did you call start() twice?")
+ if self.targetfd == 0 and not self.tmpfile:
+ fd = os.open(devnullpath, os.O_RDONLY)
+ os.dup2(fd, 0)
+ os.close(fd)
+ if hasattr(self, '_oldsys'):
+ setattr(sys, patchsysdict[self.targetfd], DontReadFromInput())
+ else:
+ os.dup2(self.tmpfile.fileno(), self.targetfd)
+ if hasattr(self, '_oldsys'):
+ setattr(sys, patchsysdict[self.targetfd], self.tmpfile)
+
+ def done(self):
+ """ unpatch and clean up, returns the self.tmpfile (file object)
+ """
+ os.dup2(self._savefd, self.targetfd)
+ os.close(self._savefd)
+ if self.targetfd != 0:
+ self.tmpfile.seek(0)
+ if hasattr(self, '_oldsys'):
+ setattr(sys, patchsysdict[self.targetfd], self._oldsys)
+ return self.tmpfile
+
+ def writeorg(self, data):
+ """ write a string to the original file descriptor
+ """
+ tempfp = tempfile.TemporaryFile()
+ try:
+ os.dup2(self._savefd, tempfp.fileno())
+ tempfp.write(data)
+ finally:
+ tempfp.close()
+
+
+def dupfile(f, mode=None, buffering=0, raising=False, encoding=None):
+ """ return a new open file object that's a duplicate of f
+
+ mode is duplicated if not given, 'buffering' controls
+ buffer size (defaulting to no buffering) and 'raising'
+ defines whether an exception is raised when an incompatible
+ file object is passed in (if raising is False, the file
+ object itself will be returned)
+ """
+ try:
+ fd = f.fileno()
+ mode = mode or f.mode
+ except AttributeError:
+ if raising:
+ raise
+ return f
+ newfd = os.dup(fd)
+ if sys.version_info >= (3,0):
+ if encoding is not None:
+ mode = mode.replace("b", "")
+ buffering = True
+ return os.fdopen(newfd, mode, buffering, encoding, closefd=True)
+ else:
+ f = os.fdopen(newfd, mode, buffering)
+ if encoding is not None:
+ return EncodedFile(f, encoding)
+ return f
+
+class EncodedFile(object):
+ def __init__(self, _stream, encoding):
+ self._stream = _stream
+ self.encoding = encoding
+
+ def write(self, obj):
+ if isinstance(obj, unicode):
+ obj = obj.encode(self.encoding)
+ elif isinstance(obj, str):
+ pass
+ else:
+ obj = str(obj)
+ self._stream.write(obj)
+
+ def writelines(self, linelist):
+ data = ''.join(linelist)
+ self.write(data)
+
+ def __getattr__(self, name):
+ return getattr(self._stream, name)
+
+class Capture(object):
+ def call(cls, func, *args, **kwargs):
+ """ return a (res, out, err) tuple where
+ out and err represent the output/error output
+ during function execution.
+ call the given function with args/kwargs
+ and capture output/error during its execution.
+ """
+ so = cls()
+ try:
+ res = func(*args, **kwargs)
+ finally:
+ out, err = so.reset()
+ return res, out, err
+ call = classmethod(call)
+
+ def reset(self):
+ """ reset sys.stdout/stderr and return captured output as strings. """
+ if hasattr(self, '_reset'):
+ raise ValueError("was already reset")
+ self._reset = True
+ outfile, errfile = self.done(save=False)
+ out, err = "", ""
+ if outfile and not outfile.closed:
+ out = outfile.read()
+ outfile.close()
+ if errfile and errfile != outfile and not errfile.closed:
+ err = errfile.read()
+ errfile.close()
+ return out, err
+
+ def suspend(self):
+ """ return current snapshot captures, memorize tempfiles. """
+ outerr = self.readouterr()
+ outfile, errfile = self.done()
+ return outerr
+
+
+class StdCaptureFD(Capture):
+ """ This class allows to capture writes to FD1 and FD2
+ and may connect a NULL file to FD0 (and prevent
+ reads from sys.stdin). If any of the 0,1,2 file descriptors
+ is invalid it will not be captured.
+ """
+ def __init__(self, out=True, err=True, mixed=False,
+ in_=True, patchsys=True, now=True):
+ self._options = {
+ "out": out,
+ "err": err,
+ "mixed": mixed,
+ "in_": in_,
+ "patchsys": patchsys,
+ "now": now,
+ }
+ self._save()
+ if now:
+ self.startall()
+
+ def _save(self):
+ in_ = self._options['in_']
+ out = self._options['out']
+ err = self._options['err']
+ mixed = self._options['mixed']
+ patchsys = self._options['patchsys']
+ if in_:
+ try:
+ self.in_ = FDCapture(0, tmpfile=None, now=False,
+ patchsys=patchsys)
+ except OSError:
+ pass
+ if out:
+ tmpfile = None
+ if hasattr(out, 'write'):
+ tmpfile = out
+ try:
+ self.out = FDCapture(1, tmpfile=tmpfile,
+ now=False, patchsys=patchsys)
+ self._options['out'] = self.out.tmpfile
+ except OSError:
+ pass
+ if err:
+ if out and mixed:
+ tmpfile = self.out.tmpfile
+ elif hasattr(err, 'write'):
+ tmpfile = err
+ else:
+ tmpfile = None
+ try:
+ self.err = FDCapture(2, tmpfile=tmpfile,
+ now=False, patchsys=patchsys)
+ self._options['err'] = self.err.tmpfile
+ except OSError:
+ pass
+
+ def startall(self):
+ if hasattr(self, 'in_'):
+ self.in_.start()
+ if hasattr(self, 'out'):
+ self.out.start()
+ if hasattr(self, 'err'):
+ self.err.start()
+
+ def resume(self):
+ """ resume capturing with original temp files. """
+ self.startall()
+
+ def done(self, save=True):
+ """ return (outfile, errfile) and stop capturing. """
+ outfile = errfile = None
+ if hasattr(self, 'out') and not self.out.tmpfile.closed:
+ outfile = self.out.done()
+ if hasattr(self, 'err') and not self.err.tmpfile.closed:
+ errfile = self.err.done()
+ if hasattr(self, 'in_'):
+ tmpfile = self.in_.done()
+ if save:
+ self._save()
+ return outfile, errfile
+
+ def readouterr(self):
+ """ return snapshot value of stdout/stderr capturings. """
+ if hasattr(self, "out"):
+ out = self._readsnapshot(self.out.tmpfile)
+ else:
+ out = ""
+ if hasattr(self, "err"):
+ err = self._readsnapshot(self.err.tmpfile)
+ else:
+ err = ""
+ return [out, err]
+
+ def _readsnapshot(self, f):
+ f.seek(0)
+ res = f.read()
+ enc = getattr(f, "encoding", None)
+ if enc:
+ res = py.builtin._totext(res, enc, "replace")
+ f.truncate(0)
+ f.seek(0)
+ return res
+
+
+class StdCapture(Capture):
+ """ This class allows to capture writes to sys.stdout|stderr "in-memory"
+ and will raise errors on tries to read from sys.stdin. It only
+ modifies sys.stdout|stderr|stdin attributes and does not
+ touch underlying File Descriptors (use StdCaptureFD for that).
+ """
+ def __init__(self, out=True, err=True, in_=True, mixed=False, now=True):
+ self._oldout = sys.stdout
+ self._olderr = sys.stderr
+ self._oldin = sys.stdin
+ if out and not hasattr(out, 'file'):
+ out = TextIO()
+ self.out = out
+ if err:
+ if mixed:
+ err = out
+ elif not hasattr(err, 'write'):
+ err = TextIO()
+ self.err = err
+ self.in_ = in_
+ if now:
+ self.startall()
+
+ def startall(self):
+ if self.out:
+ sys.stdout = self.out
+ if self.err:
+ sys.stderr = self.err
+ if self.in_:
+ sys.stdin = self.in_ = DontReadFromInput()
+
+ def done(self, save=True):
+ """ return (outfile, errfile) and stop capturing. """
+ outfile = errfile = None
+ if self.out and not self.out.closed:
+ sys.stdout = self._oldout
+ outfile = self.out
+ outfile.seek(0)
+ if self.err and not self.err.closed:
+ sys.stderr = self._olderr
+ errfile = self.err
+ errfile.seek(0)
+ if self.in_:
+ sys.stdin = self._oldin
+ return outfile, errfile
+
+ def resume(self):
+ """ resume capturing with original temp files. """
+ self.startall()
+
+ def readouterr(self):
+ """ return snapshot value of stdout/stderr capturings. """
+ out = err = ""
+ if self.out:
+ out = self.out.getvalue()
+ self.out.truncate(0)
+ self.out.seek(0)
+ if self.err:
+ err = self.err.getvalue()
+ self.err.truncate(0)
+ self.err.seek(0)
+ return out, err
+
+class DontReadFromInput:
+ """Temporary stub class. Ideally when stdin is accessed, the
+ capturing should be turned off, with possibly all data captured
+ so far sent to the screen. This should be configurable, though,
+ because in automated test runs it is better to crash than
+ hang indefinitely.
+ """
+ def read(self, *args):
+ raise IOError("reading from stdin while output is captured")
+ readline = read
+ readlines = read
+ __iter__ = read
+
+ def fileno(self):
+ raise ValueError("redirected Stdin is pseudofile, has no fileno()")
+ def isatty(self):
+ return False
+ def close(self):
+ pass
+
+try:
+ devnullpath = os.devnull
+except AttributeError:
+ if os.name == 'nt':
+ devnullpath = 'NUL'
+ else:
+ devnullpath = '/dev/null'
diff --git a/third_party/python/py/py/_io/saferepr.py b/third_party/python/py/py/_io/saferepr.py
new file mode 100644
index 0000000000..8518290efd
--- /dev/null
+++ b/third_party/python/py/py/_io/saferepr.py
@@ -0,0 +1,71 @@
+import py
+import sys
+
+builtin_repr = repr
+
+reprlib = py.builtin._tryimport('repr', 'reprlib')
+
+class SafeRepr(reprlib.Repr):
+ """ subclass of repr.Repr that limits the resulting size of repr()
+ and includes information on exceptions raised during the call.
+ """
+ def repr(self, x):
+ return self._callhelper(reprlib.Repr.repr, self, x)
+
+ def repr_unicode(self, x, level):
+ # Strictly speaking wrong on narrow builds
+ def repr(u):
+ if "'" not in u:
+ return py.builtin._totext("'%s'") % u
+ elif '"' not in u:
+ return py.builtin._totext('"%s"') % u
+ else:
+ return py.builtin._totext("'%s'") % u.replace("'", r"\'")
+ s = repr(x[:self.maxstring])
+ if len(s) > self.maxstring:
+ i = max(0, (self.maxstring-3)//2)
+ j = max(0, self.maxstring-3-i)
+ s = repr(x[:i] + x[len(x)-j:])
+ s = s[:i] + '...' + s[len(s)-j:]
+ return s
+
+ def repr_instance(self, x, level):
+ return self._callhelper(builtin_repr, x)
+
+ def _callhelper(self, call, x, *args):
+ try:
+ # Try the vanilla repr and make sure that the result is a string
+ s = call(x, *args)
+ except py.builtin._sysex:
+ raise
+ except:
+ cls, e, tb = sys.exc_info()
+ exc_name = getattr(cls, '__name__', 'unknown')
+ try:
+ exc_info = str(e)
+ except py.builtin._sysex:
+ raise
+ except:
+ exc_info = 'unknown'
+ return '<[%s("%s") raised in repr()] %s object at 0x%x>' % (
+ exc_name, exc_info, x.__class__.__name__, id(x))
+ else:
+ if len(s) > self.maxsize:
+ i = max(0, (self.maxsize-3)//2)
+ j = max(0, self.maxsize-3-i)
+ s = s[:i] + '...' + s[len(s)-j:]
+ return s
+
+def saferepr(obj, maxsize=240):
+ """ return a size-limited safe repr-string for the given object.
+ Failing __repr__ functions of user instances will be represented
+ with a short exception info and 'saferepr' generally takes
+ care to never raise exceptions itself. This function is a wrapper
+ around the Repr/reprlib functionality of the standard 2.6 lib.
+ """
+ # review exception handling
+ srepr = SafeRepr()
+ srepr.maxstring = maxsize
+ srepr.maxsize = maxsize
+ srepr.maxother = 160
+ return srepr.repr(obj)
diff --git a/third_party/python/py/py/_io/terminalwriter.py b/third_party/python/py/py/_io/terminalwriter.py
new file mode 100644
index 0000000000..74d31259da
--- /dev/null
+++ b/third_party/python/py/py/_io/terminalwriter.py
@@ -0,0 +1,384 @@
+"""
+
+Helper functions for writing to terminals and files.
+
+"""
+
+
+import sys, os
+import py
+py3k = sys.version_info[0] >= 3
+from py.builtin import text, bytes
+
+win32_and_ctypes = False
+colorama = None
+if sys.platform == "win32":
+ try:
+ import colorama
+ except ImportError:
+ try:
+ import ctypes
+ win32_and_ctypes = True
+ except ImportError:
+ pass
+
+
+def _getdimensions():
+ import termios,fcntl,struct
+ call = fcntl.ioctl(1,termios.TIOCGWINSZ,"\000"*8)
+ height,width = struct.unpack( "hhhh", call ) [:2]
+ return height, width
+
+
+def get_terminal_width():
+ width = 0
+ try:
+ _, width = _getdimensions()
+ except py.builtin._sysex:
+ raise
+ except:
+ # pass to fallback below
+ pass
+
+ if width == 0:
+ # FALLBACK:
+ # * some exception happened
+ # * or this is emacs terminal which reports (0,0)
+ width = int(os.environ.get('COLUMNS', 80))
+
+ # XXX the windows getdimensions may be bogus, let's sanify a bit
+ if width < 40:
+ width = 80
+ return width
+
+terminal_width = get_terminal_width()
+
+# XXX unify with _escaped func below
+def ansi_print(text, esc, file=None, newline=True, flush=False):
+ if file is None:
+ file = sys.stderr
+ text = text.rstrip()
+ if esc and not isinstance(esc, tuple):
+ esc = (esc,)
+ if esc and sys.platform != "win32" and file.isatty():
+ text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
+ text +
+ '\x1b[0m') # ANSI color code "reset"
+ if newline:
+ text += '\n'
+
+ if esc and win32_and_ctypes and file.isatty():
+ if 1 in esc:
+ bold = True
+ esc = tuple([x for x in esc if x != 1])
+ else:
+ bold = False
+ esctable = {() : FOREGROUND_WHITE, # normal
+ (31,): FOREGROUND_RED, # red
+ (32,): FOREGROUND_GREEN, # green
+ (33,): FOREGROUND_GREEN|FOREGROUND_RED, # yellow
+ (34,): FOREGROUND_BLUE, # blue
+ (35,): FOREGROUND_BLUE|FOREGROUND_RED, # purple
+ (36,): FOREGROUND_BLUE|FOREGROUND_GREEN, # cyan
+ (37,): FOREGROUND_WHITE, # white
+ (39,): FOREGROUND_WHITE, # reset
+ }
+ attr = esctable.get(esc, FOREGROUND_WHITE)
+ if bold:
+ attr |= FOREGROUND_INTENSITY
+ STD_OUTPUT_HANDLE = -11
+ STD_ERROR_HANDLE = -12
+ if file is sys.stderr:
+ handle = GetStdHandle(STD_ERROR_HANDLE)
+ else:
+ handle = GetStdHandle(STD_OUTPUT_HANDLE)
+ oldcolors = GetConsoleInfo(handle).wAttributes
+ attr |= (oldcolors & 0x0f0)
+ SetConsoleTextAttribute(handle, attr)
+ while len(text) > 32768:
+ file.write(text[:32768])
+ text = text[32768:]
+ if text:
+ file.write(text)
+ SetConsoleTextAttribute(handle, oldcolors)
+ else:
+ file.write(text)
+
+ if flush:
+ file.flush()
+
+def should_do_markup(file):
+ if os.environ.get('PY_COLORS') == '1':
+ return True
+ if os.environ.get('PY_COLORS') == '0':
+ return False
+ return hasattr(file, 'isatty') and file.isatty() \
+ and os.environ.get('TERM') != 'dumb' \
+ and not (sys.platform.startswith('java') and os._name == 'nt')
+
+class TerminalWriter(object):
+ _esctable = dict(black=30, red=31, green=32, yellow=33,
+ blue=34, purple=35, cyan=36, white=37,
+ Black=40, Red=41, Green=42, Yellow=43,
+ Blue=44, Purple=45, Cyan=46, White=47,
+ bold=1, light=2, blink=5, invert=7)
+
+ # XXX deprecate stringio argument
+ def __init__(self, file=None, stringio=False, encoding=None):
+ if file is None:
+ if stringio:
+ self.stringio = file = py.io.TextIO()
+ else:
+ from sys import stdout as file
+ elif py.builtin.callable(file) and not (
+ hasattr(file, "write") and hasattr(file, "flush")):
+ file = WriteFile(file, encoding=encoding)
+ if hasattr(file, "isatty") and file.isatty() and colorama:
+ file = colorama.AnsiToWin32(file).stream
+ self.encoding = encoding or getattr(file, 'encoding', "utf-8")
+ self._file = file
+ self.hasmarkup = should_do_markup(file)
+ self._lastlen = 0
+ self._chars_on_current_line = 0
+
+ @property
+ def fullwidth(self):
+ if hasattr(self, '_terminal_width'):
+ return self._terminal_width
+ return get_terminal_width()
+
+ @fullwidth.setter
+ def fullwidth(self, value):
+ self._terminal_width = value
+
+ @property
+ def chars_on_current_line(self):
+ """Return the number of characters written so far in the current line.
+
+ Please note that this count does not produce correct results after a reline() call,
+ see #164.
+
+ .. versionadded:: 1.5.0
+
+ :rtype: int
+ """
+ return self._chars_on_current_line
+
+ def _escaped(self, text, esc):
+ if esc and self.hasmarkup:
+ text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
+ text +'\x1b[0m')
+ return text
+
+ def markup(self, text, **kw):
+ esc = []
+ for name in kw:
+ if name not in self._esctable:
+ raise ValueError("unknown markup: %r" %(name,))
+ if kw[name]:
+ esc.append(self._esctable[name])
+ return self._escaped(text, tuple(esc))
+
+ def sep(self, sepchar, title=None, fullwidth=None, **kw):
+ if fullwidth is None:
+ fullwidth = self.fullwidth
+ # the goal is to have the line be as long as possible
+ # under the condition that len(line) <= fullwidth
+ if sys.platform == "win32":
+ # if we print in the last column on windows we are on a
+ # new line but there is no way to verify/neutralize this
+ # (we may not know the exact line width)
+ # so let's be defensive to avoid empty lines in the output
+ fullwidth -= 1
+ if title is not None:
+ # we want 2 + 2*len(fill) + len(title) <= fullwidth
+ # i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth
+ # 2*len(sepchar)*N <= fullwidth - len(title) - 2
+ # N <= (fullwidth - len(title) - 2) // (2*len(sepchar))
+ N = (fullwidth - len(title) - 2) // (2*len(sepchar))
+ fill = sepchar * N
+ line = "%s %s %s" % (fill, title, fill)
+ else:
+ # we want len(sepchar)*N <= fullwidth
+ # i.e. N <= fullwidth // len(sepchar)
+ line = sepchar * (fullwidth // len(sepchar))
+ # in some situations there is room for an extra sepchar at the right,
+ # in particular if we consider that with a sepchar like "_ " the
+ # trailing space is not important at the end of the line
+ if len(line) + len(sepchar.rstrip()) <= fullwidth:
+ line += sepchar.rstrip()
+
+ self.line(line, **kw)
+
+ def write(self, msg, **kw):
+ if msg:
+ if not isinstance(msg, (bytes, text)):
+ msg = text(msg)
+
+ self._update_chars_on_current_line(msg)
+
+ if self.hasmarkup and kw:
+ markupmsg = self.markup(msg, **kw)
+ else:
+ markupmsg = msg
+ write_out(self._file, markupmsg)
+
+ def _update_chars_on_current_line(self, text):
+ fields = text.rsplit('\n', 1)
+ if '\n' in text:
+ self._chars_on_current_line = len(fields[-1])
+ else:
+ self._chars_on_current_line += len(fields[-1])
+
+ def line(self, s='', **kw):
+ self.write(s, **kw)
+ self._checkfill(s)
+ self.write('\n')
+
+ def reline(self, line, **kw):
+ if not self.hasmarkup:
+ raise ValueError("cannot use rewrite-line without terminal")
+ self.write(line, **kw)
+ self._checkfill(line)
+ self.write('\r')
+ self._lastlen = len(line)
+
+ def _checkfill(self, line):
+ diff2last = self._lastlen - len(line)
+ if diff2last > 0:
+ self.write(" " * diff2last)
+
+class Win32ConsoleWriter(TerminalWriter):
+ def write(self, msg, **kw):
+ if msg:
+ if not isinstance(msg, (bytes, text)):
+ msg = text(msg)
+
+ self._update_chars_on_current_line(msg)
+
+ oldcolors = None
+ if self.hasmarkup and kw:
+ handle = GetStdHandle(STD_OUTPUT_HANDLE)
+ oldcolors = GetConsoleInfo(handle).wAttributes
+ default_bg = oldcolors & 0x00F0
+ attr = default_bg
+ if kw.pop('bold', False):
+ attr |= FOREGROUND_INTENSITY
+
+ if kw.pop('red', False):
+ attr |= FOREGROUND_RED
+ elif kw.pop('blue', False):
+ attr |= FOREGROUND_BLUE
+ elif kw.pop('green', False):
+ attr |= FOREGROUND_GREEN
+ elif kw.pop('yellow', False):
+ attr |= FOREGROUND_GREEN|FOREGROUND_RED
+ else:
+ attr |= oldcolors & 0x0007
+
+ SetConsoleTextAttribute(handle, attr)
+ write_out(self._file, msg)
+ if oldcolors:
+ SetConsoleTextAttribute(handle, oldcolors)
+
+class WriteFile(object):
+ def __init__(self, writemethod, encoding=None):
+ self.encoding = encoding
+ self._writemethod = writemethod
+
+ def write(self, data):
+ if self.encoding:
+ data = data.encode(self.encoding, "replace")
+ self._writemethod(data)
+
+ def flush(self):
+ return
+
+
+if win32_and_ctypes:
+ TerminalWriter = Win32ConsoleWriter
+ import ctypes
+ from ctypes import wintypes
+
+ # ctypes access to the Windows console
+ STD_OUTPUT_HANDLE = -11
+ STD_ERROR_HANDLE = -12
+ FOREGROUND_BLACK = 0x0000 # black text
+ FOREGROUND_BLUE = 0x0001 # text color contains blue.
+ FOREGROUND_GREEN = 0x0002 # text color contains green.
+ FOREGROUND_RED = 0x0004 # text color contains red.
+ FOREGROUND_WHITE = 0x0007
+ FOREGROUND_INTENSITY = 0x0008 # text color is intensified.
+ BACKGROUND_BLACK = 0x0000 # background color black
+ BACKGROUND_BLUE = 0x0010 # background color contains blue.
+ BACKGROUND_GREEN = 0x0020 # background color contains green.
+ BACKGROUND_RED = 0x0040 # background color contains red.
+ BACKGROUND_WHITE = 0x0070
+ BACKGROUND_INTENSITY = 0x0080 # background color is intensified.
+
+ SHORT = ctypes.c_short
+ class COORD(ctypes.Structure):
+ _fields_ = [('X', SHORT),
+ ('Y', SHORT)]
+ class SMALL_RECT(ctypes.Structure):
+ _fields_ = [('Left', SHORT),
+ ('Top', SHORT),
+ ('Right', SHORT),
+ ('Bottom', SHORT)]
+ class CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
+ _fields_ = [('dwSize', COORD),
+ ('dwCursorPosition', COORD),
+ ('wAttributes', wintypes.WORD),
+ ('srWindow', SMALL_RECT),
+ ('dwMaximumWindowSize', COORD)]
+
+ _GetStdHandle = ctypes.windll.kernel32.GetStdHandle
+ _GetStdHandle.argtypes = [wintypes.DWORD]
+ _GetStdHandle.restype = wintypes.HANDLE
+ def GetStdHandle(kind):
+ return _GetStdHandle(kind)
+
+ SetConsoleTextAttribute = ctypes.windll.kernel32.SetConsoleTextAttribute
+ SetConsoleTextAttribute.argtypes = [wintypes.HANDLE, wintypes.WORD]
+ SetConsoleTextAttribute.restype = wintypes.BOOL
+
+ _GetConsoleScreenBufferInfo = \
+ ctypes.windll.kernel32.GetConsoleScreenBufferInfo
+ _GetConsoleScreenBufferInfo.argtypes = [wintypes.HANDLE,
+ ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
+ _GetConsoleScreenBufferInfo.restype = wintypes.BOOL
+ def GetConsoleInfo(handle):
+ info = CONSOLE_SCREEN_BUFFER_INFO()
+ _GetConsoleScreenBufferInfo(handle, ctypes.byref(info))
+ return info
+
+ def _getdimensions():
+ handle = GetStdHandle(STD_OUTPUT_HANDLE)
+ info = GetConsoleInfo(handle)
+ # Substract one from the width, otherwise the cursor wraps
+ # and the ending \n causes an empty line to display.
+ return info.dwSize.Y, info.dwSize.X - 1
+
+def write_out(fil, msg):
+ # XXX sometimes "msg" is of type bytes, sometimes text which
+ # complicates the situation. Should we try to enforce unicode?
+ try:
+ # on py27 and above writing out to sys.stdout with an encoding
+ # should usually work for unicode messages (if the encoding is
+ # capable of it)
+ fil.write(msg)
+ except UnicodeEncodeError:
+ # on py26 it might not work because stdout expects bytes
+ if fil.encoding:
+ try:
+ fil.write(msg.encode(fil.encoding))
+ except UnicodeEncodeError:
+ # it might still fail if the encoding is not capable
+ pass
+ else:
+ fil.flush()
+ return
+ # fallback: escape all unicode characters
+ msg = msg.encode("unicode-escape").decode("ascii")
+ fil.write(msg)
+ fil.flush()
diff --git a/third_party/python/py/py/_log/__init__.py b/third_party/python/py/py/_log/__init__.py
new file mode 100644
index 0000000000..fad62e960d
--- /dev/null
+++ b/third_party/python/py/py/_log/__init__.py
@@ -0,0 +1,2 @@
+""" logging API ('producers' and 'consumers' connected via keywords) """
+
diff --git a/third_party/python/py/py/_log/log.py b/third_party/python/py/py/_log/log.py
new file mode 100644
index 0000000000..56969bcb58
--- /dev/null
+++ b/third_party/python/py/py/_log/log.py
@@ -0,0 +1,206 @@
+"""
+basic logging functionality based on a producer/consumer scheme.
+
+XXX implement this API: (maybe put it into slogger.py?)
+
+ log = Logger(
+ info=py.log.STDOUT,
+ debug=py.log.STDOUT,
+ command=None)
+ log.info("hello", "world")
+ log.command("hello", "world")
+
+ log = Logger(info=Logger(something=...),
+ debug=py.log.STDOUT,
+ command=None)
+"""
+import py
+import sys
+
+
+class Message(object):
+ def __init__(self, keywords, args):
+ self.keywords = keywords
+ self.args = args
+
+ def content(self):
+ return " ".join(map(str, self.args))
+
+ def prefix(self):
+ return "[%s] " % (":".join(self.keywords))
+
+ def __str__(self):
+ return self.prefix() + self.content()
+
+
+class Producer(object):
+ """ (deprecated) Log producer API which sends messages to be logged
+ to a 'consumer' object, which then prints them to stdout,
+ stderr, files, etc. Used extensively by PyPy-1.1.
+ """
+
+ Message = Message # to allow later customization
+ keywords2consumer = {}
+
+ def __init__(self, keywords, keywordmapper=None, **kw):
+ if hasattr(keywords, 'split'):
+ keywords = tuple(keywords.split())
+ self._keywords = keywords
+ if keywordmapper is None:
+ keywordmapper = default_keywordmapper
+ self._keywordmapper = keywordmapper
+
+ def __repr__(self):
+ return "<py.log.Producer %s>" % ":".join(self._keywords)
+
+ def __getattr__(self, name):
+ if '_' in name:
+ raise AttributeError(name)
+ producer = self.__class__(self._keywords + (name,))
+ setattr(self, name, producer)
+ return producer
+
+ def __call__(self, *args):
+ """ write a message to the appropriate consumer(s) """
+ func = self._keywordmapper.getconsumer(self._keywords)
+ if func is not None:
+ func(self.Message(self._keywords, args))
+
+class KeywordMapper:
+ def __init__(self):
+ self.keywords2consumer = {}
+
+ def getstate(self):
+ return self.keywords2consumer.copy()
+
+ def setstate(self, state):
+ self.keywords2consumer.clear()
+ self.keywords2consumer.update(state)
+
+ def getconsumer(self, keywords):
+ """ return a consumer matching the given keywords.
+
+ tries to find the most suitable consumer by walking, starting from
+ the back, the list of keywords, the first consumer matching a
+ keyword is returned (falling back to py.log.default)
+ """
+ for i in range(len(keywords), 0, -1):
+ try:
+ return self.keywords2consumer[keywords[:i]]
+ except KeyError:
+ continue
+ return self.keywords2consumer.get('default', default_consumer)
+
+ def setconsumer(self, keywords, consumer):
+ """ set a consumer for a set of keywords. """
+ # normalize to tuples
+ if isinstance(keywords, str):
+ keywords = tuple(filter(None, keywords.split()))
+ elif hasattr(keywords, '_keywords'):
+ keywords = keywords._keywords
+ elif not isinstance(keywords, tuple):
+ raise TypeError("key %r is not a string or tuple" % (keywords,))
+ if consumer is not None and not py.builtin.callable(consumer):
+ if not hasattr(consumer, 'write'):
+ raise TypeError(
+ "%r should be None, callable or file-like" % (consumer,))
+ consumer = File(consumer)
+ self.keywords2consumer[keywords] = consumer
+
+
+def default_consumer(msg):
+ """ the default consumer, prints the message to stdout (using 'print') """
+ sys.stderr.write(str(msg)+"\n")
+
+default_keywordmapper = KeywordMapper()
+
+
+def setconsumer(keywords, consumer):
+ default_keywordmapper.setconsumer(keywords, consumer)
+
+
+def setstate(state):
+ default_keywordmapper.setstate(state)
+
+
+def getstate():
+ return default_keywordmapper.getstate()
+
+#
+# Consumers
+#
+
+
+class File(object):
+ """ log consumer wrapping a file(-like) object """
+ def __init__(self, f):
+ assert hasattr(f, 'write')
+ # assert isinstance(f, file) or not hasattr(f, 'open')
+ self._file = f
+
+ def __call__(self, msg):
+ """ write a message to the log """
+ self._file.write(str(msg) + "\n")
+ if hasattr(self._file, 'flush'):
+ self._file.flush()
+
+
+class Path(object):
+ """ log consumer that opens and writes to a Path """
+ def __init__(self, filename, append=False,
+ delayed_create=False, buffering=False):
+ self._append = append
+ self._filename = str(filename)
+ self._buffering = buffering
+ if not delayed_create:
+ self._openfile()
+
+ def _openfile(self):
+ mode = self._append and 'a' or 'w'
+ f = open(self._filename, mode)
+ self._file = f
+
+ def __call__(self, msg):
+ """ write a message to the log """
+ if not hasattr(self, "_file"):
+ self._openfile()
+ self._file.write(str(msg) + "\n")
+ if not self._buffering:
+ self._file.flush()
+
+
+def STDOUT(msg):
+ """ consumer that writes to sys.stdout """
+ sys.stdout.write(str(msg)+"\n")
+
+
+def STDERR(msg):
+ """ consumer that writes to sys.stderr """
+ sys.stderr.write(str(msg)+"\n")
+
+
+class Syslog:
+ """ consumer that writes to the syslog daemon """
+
+ def __init__(self, priority=None):
+ if priority is None:
+ priority = self.LOG_INFO
+ self.priority = priority
+
+ def __call__(self, msg):
+ """ write a message to the log """
+ import syslog
+ syslog.syslog(self.priority, str(msg))
+
+
+try:
+ import syslog
+except ImportError:
+ pass
+else:
+ for _prio in "EMERG ALERT CRIT ERR WARNING NOTICE INFO DEBUG".split():
+ _prio = "LOG_" + _prio
+ try:
+ setattr(Syslog, _prio, getattr(syslog, _prio))
+ except AttributeError:
+ pass
diff --git a/third_party/python/py/py/_log/warning.py b/third_party/python/py/py/_log/warning.py
new file mode 100644
index 0000000000..6ef20d98a2
--- /dev/null
+++ b/third_party/python/py/py/_log/warning.py
@@ -0,0 +1,79 @@
+import py, sys
+
+class DeprecationWarning(DeprecationWarning):
+ def __init__(self, msg, path, lineno):
+ self.msg = msg
+ self.path = path
+ self.lineno = lineno
+ def __repr__(self):
+ return "%s:%d: %s" %(self.path, self.lineno+1, self.msg)
+ def __str__(self):
+ return self.msg
+
+def _apiwarn(startversion, msg, stacklevel=2, function=None):
+ # below is mostly COPIED from python2.4/warnings.py's def warn()
+ # Get context information
+ if isinstance(stacklevel, str):
+ frame = sys._getframe(1)
+ level = 1
+ found = frame.f_code.co_filename.find(stacklevel) != -1
+ while frame:
+ co = frame.f_code
+ if co.co_filename.find(stacklevel) == -1:
+ if found:
+ stacklevel = level
+ break
+ else:
+ found = True
+ level += 1
+ frame = frame.f_back
+ else:
+ stacklevel = 1
+ msg = "%s (since version %s)" %(msg, startversion)
+ warn(msg, stacklevel=stacklevel+1, function=function)
+
+
+def warn(msg, stacklevel=1, function=None):
+ if function is not None:
+ import inspect
+ filename = inspect.getfile(function)
+ lineno = py.code.getrawcode(function).co_firstlineno
+ else:
+ try:
+ caller = sys._getframe(stacklevel)
+ except ValueError:
+ globals = sys.__dict__
+ lineno = 1
+ else:
+ globals = caller.f_globals
+ lineno = caller.f_lineno
+ if '__name__' in globals:
+ module = globals['__name__']
+ else:
+ module = "<string>"
+ filename = globals.get('__file__')
+ if filename:
+ fnl = filename.lower()
+ if fnl.endswith(".pyc") or fnl.endswith(".pyo"):
+ filename = filename[:-1]
+ elif fnl.endswith("$py.class"):
+ filename = filename.replace('$py.class', '.py')
+ else:
+ if module == "__main__":
+ try:
+ filename = sys.argv[0]
+ except AttributeError:
+ # embedded interpreters don't have sys.argv, see bug #839151
+ filename = '__main__'
+ if not filename:
+ filename = module
+ path = py.path.local(filename)
+ warning = DeprecationWarning(msg, path, lineno)
+ import warnings
+ warnings.warn_explicit(warning, category=Warning,
+ filename=str(warning.path),
+ lineno=warning.lineno,
+ registry=warnings.__dict__.setdefault(
+ "__warningsregistry__", {})
+ )
+
diff --git a/third_party/python/py/py/_path/__init__.py b/third_party/python/py/py/_path/__init__.py
new file mode 100644
index 0000000000..51f3246f80
--- /dev/null
+++ b/third_party/python/py/py/_path/__init__.py
@@ -0,0 +1 @@
+""" unified file system api """
diff --git a/third_party/python/py/py/_path/cacheutil.py b/third_party/python/py/py/_path/cacheutil.py
new file mode 100644
index 0000000000..9922504750
--- /dev/null
+++ b/third_party/python/py/py/_path/cacheutil.py
@@ -0,0 +1,114 @@
+"""
+This module contains multithread-safe cache implementations.
+
+All Caches have
+
+ getorbuild(key, builder)
+ delentry(key)
+
+methods and allow configuration when instantiating the cache class.
+"""
+from time import time as gettime
+
+class BasicCache(object):
+ def __init__(self, maxentries=128):
+ self.maxentries = maxentries
+ self.prunenum = int(maxentries - maxentries/8)
+ self._dict = {}
+
+ def clear(self):
+ self._dict.clear()
+
+ def _getentry(self, key):
+ return self._dict[key]
+
+ def _putentry(self, key, entry):
+ self._prunelowestweight()
+ self._dict[key] = entry
+
+ def delentry(self, key, raising=False):
+ try:
+ del self._dict[key]
+ except KeyError:
+ if raising:
+ raise
+
+ def getorbuild(self, key, builder):
+ try:
+ entry = self._getentry(key)
+ except KeyError:
+ entry = self._build(key, builder)
+ self._putentry(key, entry)
+ return entry.value
+
+ def _prunelowestweight(self):
+ """ prune out entries with lowest weight. """
+ numentries = len(self._dict)
+ if numentries >= self.maxentries:
+ # evict according to entry's weight
+ items = [(entry.weight, key)
+ for key, entry in self._dict.items()]
+ items.sort()
+ index = numentries - self.prunenum
+ if index > 0:
+ for weight, key in items[:index]:
+ # in MT situations the element might be gone
+ self.delentry(key, raising=False)
+
+class BuildcostAccessCache(BasicCache):
+ """ A BuildTime/Access-counting cache implementation.
+ the weight of a value is computed as the product of
+
+ num-accesses-of-a-value * time-to-build-the-value
+
+ The values with the least such weights are evicted
+ if the cache maxentries threshold is superceded.
+ For implementation flexibility more than one object
+ might be evicted at a time.
+ """
+ # time function to use for measuring build-times
+
+ def _build(self, key, builder):
+ start = gettime()
+ val = builder()
+ end = gettime()
+ return WeightedCountingEntry(val, end-start)
+
+
+class WeightedCountingEntry(object):
+ def __init__(self, value, oneweight):
+ self._value = value
+ self.weight = self._oneweight = oneweight
+
+ def value(self):
+ self.weight += self._oneweight
+ return self._value
+ value = property(value)
+
+class AgingCache(BasicCache):
+ """ This cache prunes out cache entries that are too old.
+ """
+ def __init__(self, maxentries=128, maxseconds=10.0):
+ super(AgingCache, self).__init__(maxentries)
+ self.maxseconds = maxseconds
+
+ def _getentry(self, key):
+ entry = self._dict[key]
+ if entry.isexpired():
+ self.delentry(key)
+ raise KeyError(key)
+ return entry
+
+ def _build(self, key, builder):
+ val = builder()
+ entry = AgingEntry(val, gettime() + self.maxseconds)
+ return entry
+
+class AgingEntry(object):
+ def __init__(self, value, expirationtime):
+ self.value = value
+ self.weight = expirationtime
+
+ def isexpired(self):
+ t = gettime()
+ return t >= self.weight
diff --git a/third_party/python/py/py/_path/common.py b/third_party/python/py/py/_path/common.py
new file mode 100644
index 0000000000..2d490b56a8
--- /dev/null
+++ b/third_party/python/py/py/_path/common.py
@@ -0,0 +1,453 @@
+"""
+"""
+import warnings
+import os
+import sys
+import posixpath
+import fnmatch
+import py
+
+# Moved from local.py.
+iswin32 = sys.platform == "win32" or (getattr(os, '_name', False) == 'nt')
+
+try:
+ from os import fspath
+except ImportError:
+ def fspath(path):
+ """
+ Return the string representation of the path.
+ If str or bytes is passed in, it is returned unchanged.
+ This code comes from PEP 519, modified to support earlier versions of
+ python.
+
+ This is required for python < 3.6.
+ """
+ if isinstance(path, (py.builtin.text, py.builtin.bytes)):
+ return path
+
+ # Work from the object's type to match method resolution of other magic
+ # methods.
+ path_type = type(path)
+ try:
+ return path_type.__fspath__(path)
+ except AttributeError:
+ if hasattr(path_type, '__fspath__'):
+ raise
+ try:
+ import pathlib
+ except ImportError:
+ pass
+ else:
+ if isinstance(path, pathlib.PurePath):
+ return py.builtin.text(path)
+
+ raise TypeError("expected str, bytes or os.PathLike object, not "
+ + path_type.__name__)
+
+class Checkers:
+ _depend_on_existence = 'exists', 'link', 'dir', 'file'
+
+ def __init__(self, path):
+ self.path = path
+
+ def dir(self):
+ raise NotImplementedError
+
+ def file(self):
+ raise NotImplementedError
+
+ def dotfile(self):
+ return self.path.basename.startswith('.')
+
+ def ext(self, arg):
+ if not arg.startswith('.'):
+ arg = '.' + arg
+ return self.path.ext == arg
+
+ def exists(self):
+ raise NotImplementedError
+
+ def basename(self, arg):
+ return self.path.basename == arg
+
+ def basestarts(self, arg):
+ return self.path.basename.startswith(arg)
+
+ def relto(self, arg):
+ return self.path.relto(arg)
+
+ def fnmatch(self, arg):
+ return self.path.fnmatch(arg)
+
+ def endswith(self, arg):
+ return str(self.path).endswith(arg)
+
+ def _evaluate(self, kw):
+ for name, value in kw.items():
+ invert = False
+ meth = None
+ try:
+ meth = getattr(self, name)
+ except AttributeError:
+ if name[:3] == 'not':
+ invert = True
+ try:
+ meth = getattr(self, name[3:])
+ except AttributeError:
+ pass
+ if meth is None:
+ raise TypeError(
+ "no %r checker available for %r" % (name, self.path))
+ try:
+ if py.code.getrawcode(meth).co_argcount > 1:
+ if (not meth(value)) ^ invert:
+ return False
+ else:
+ if bool(value) ^ bool(meth()) ^ invert:
+ return False
+ except (py.error.ENOENT, py.error.ENOTDIR, py.error.EBUSY):
+ # EBUSY feels not entirely correct,
+ # but its kind of necessary since ENOMEDIUM
+ # is not accessible in python
+ for name in self._depend_on_existence:
+ if name in kw:
+ if kw.get(name):
+ return False
+ name = 'not' + name
+ if name in kw:
+ if not kw.get(name):
+ return False
+ return True
+
+class NeverRaised(Exception):
+ pass
+
+class PathBase(object):
+ """ shared implementation for filesystem path objects."""
+ Checkers = Checkers
+
+ def __div__(self, other):
+ return self.join(fspath(other))
+ __truediv__ = __div__ # py3k
+
+ def basename(self):
+ """ basename part of path. """
+ return self._getbyspec('basename')[0]
+ basename = property(basename, None, None, basename.__doc__)
+
+ def dirname(self):
+ """ dirname part of path. """
+ return self._getbyspec('dirname')[0]
+ dirname = property(dirname, None, None, dirname.__doc__)
+
+ def purebasename(self):
+ """ pure base name of the path."""
+ return self._getbyspec('purebasename')[0]
+ purebasename = property(purebasename, None, None, purebasename.__doc__)
+
+ def ext(self):
+ """ extension of the path (including the '.')."""
+ return self._getbyspec('ext')[0]
+ ext = property(ext, None, None, ext.__doc__)
+
+ def dirpath(self, *args, **kwargs):
+ """ return the directory path joined with any given path arguments. """
+ return self.new(basename='').join(*args, **kwargs)
+
+ def read_binary(self):
+ """ read and return a bytestring from reading the path. """
+ with self.open('rb') as f:
+ return f.read()
+
+ def read_text(self, encoding):
+ """ read and return a Unicode string from reading the path. """
+ with self.open("r", encoding=encoding) as f:
+ return f.read()
+
+
+ def read(self, mode='r'):
+ """ read and return a bytestring from reading the path. """
+ with self.open(mode) as f:
+ return f.read()
+
+ def readlines(self, cr=1):
+ """ read and return a list of lines from the path. if cr is False, the
+newline will be removed from the end of each line. """
+ if sys.version_info < (3, ):
+ mode = 'rU'
+ else: # python 3 deprecates mode "U" in favor of "newline" option
+ mode = 'r'
+
+ if not cr:
+ content = self.read(mode)
+ return content.split('\n')
+ else:
+ f = self.open(mode)
+ try:
+ return f.readlines()
+ finally:
+ f.close()
+
+ def load(self):
+ """ (deprecated) return object unpickled from self.read() """
+ f = self.open('rb')
+ try:
+ import pickle
+ return py.error.checked_call(pickle.load, f)
+ finally:
+ f.close()
+
+ def move(self, target):
+ """ move this path to target. """
+ if target.relto(self):
+ raise py.error.EINVAL(
+ target,
+ "cannot move path into a subdirectory of itself")
+ try:
+ self.rename(target)
+ except py.error.EXDEV: # invalid cross-device link
+ self.copy(target)
+ self.remove()
+
+ def __repr__(self):
+ """ return a string representation of this path. """
+ return repr(str(self))
+
+ def check(self, **kw):
+ """ check a path for existence and properties.
+
+ Without arguments, return True if the path exists, otherwise False.
+
+ valid checkers::
+
+ file=1 # is a file
+ file=0 # is not a file (may not even exist)
+ dir=1 # is a dir
+ link=1 # is a link
+ exists=1 # exists
+
+ You can specify multiple checker definitions, for example::
+
+ path.check(file=1, link=1) # a link pointing to a file
+ """
+ if not kw:
+ kw = {'exists': 1}
+ return self.Checkers(self)._evaluate(kw)
+
+ def fnmatch(self, pattern):
+ """return true if the basename/fullname matches the glob-'pattern'.
+
+ valid pattern characters::
+
+ * matches everything
+ ? matches any single character
+ [seq] matches any character in seq
+ [!seq] matches any char not in seq
+
+ If the pattern contains a path-separator then the full path
+ is used for pattern matching and a '*' is prepended to the
+ pattern.
+
+ if the pattern doesn't contain a path-separator the pattern
+ is only matched against the basename.
+ """
+ return FNMatcher(pattern)(self)
+
+ def relto(self, relpath):
+ """ return a string which is the relative part of the path
+ to the given 'relpath'.
+ """
+ if not isinstance(relpath, (str, PathBase)):
+ raise TypeError("%r: not a string or path object" %(relpath,))
+ strrelpath = str(relpath)
+ if strrelpath and strrelpath[-1] != self.sep:
+ strrelpath += self.sep
+ #assert strrelpath[-1] == self.sep
+ #assert strrelpath[-2] != self.sep
+ strself = self.strpath
+ if sys.platform == "win32" or getattr(os, '_name', None) == 'nt':
+ if os.path.normcase(strself).startswith(
+ os.path.normcase(strrelpath)):
+ return strself[len(strrelpath):]
+ elif strself.startswith(strrelpath):
+ return strself[len(strrelpath):]
+ return ""
+
+ def ensure_dir(self, *args):
+ """ ensure the path joined with args is a directory. """
+ return self.ensure(*args, **{"dir": True})
+
+ def bestrelpath(self, dest):
+ """ return a string which is a relative path from self
+ (assumed to be a directory) to dest such that
+ self.join(bestrelpath) == dest and if not such
+ path can be determined return dest.
+ """
+ try:
+ if self == dest:
+ return os.curdir
+ base = self.common(dest)
+ if not base: # can be the case on windows
+ return str(dest)
+ self2base = self.relto(base)
+ reldest = dest.relto(base)
+ if self2base:
+ n = self2base.count(self.sep) + 1
+ else:
+ n = 0
+ l = [os.pardir] * n
+ if reldest:
+ l.append(reldest)
+ target = dest.sep.join(l)
+ return target
+ except AttributeError:
+ return str(dest)
+
+ def exists(self):
+ return self.check()
+
+ def isdir(self):
+ return self.check(dir=1)
+
+ def isfile(self):
+ return self.check(file=1)
+
+ def parts(self, reverse=False):
+ """ return a root-first list of all ancestor directories
+ plus the path itself.
+ """
+ current = self
+ l = [self]
+ while 1:
+ last = current
+ current = current.dirpath()
+ if last == current:
+ break
+ l.append(current)
+ if not reverse:
+ l.reverse()
+ return l
+
+ def common(self, other):
+ """ return the common part shared with the other path
+ or None if there is no common part.
+ """
+ last = None
+ for x, y in zip(self.parts(), other.parts()):
+ if x != y:
+ return last
+ last = x
+ return last
+
+ def __add__(self, other):
+ """ return new path object with 'other' added to the basename"""
+ return self.new(basename=self.basename+str(other))
+
+ def __cmp__(self, other):
+ """ return sort value (-1, 0, +1). """
+ try:
+ return cmp(self.strpath, other.strpath)
+ except AttributeError:
+ return cmp(str(self), str(other)) # self.path, other.path)
+
+ def __lt__(self, other):
+ try:
+ return self.strpath < other.strpath
+ except AttributeError:
+ return str(self) < str(other)
+
+ def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False):
+ """ yields all paths below the current one
+
+ fil is a filter (glob pattern or callable), if not matching the
+ path will not be yielded, defaulting to None (everything is
+ returned)
+
+ rec is a filter (glob pattern or callable) that controls whether
+ a node is descended, defaulting to None
+
+ ignore is an Exception class that is ignoredwhen calling dirlist()
+ on any of the paths (by default, all exceptions are reported)
+
+ bf if True will cause a breadthfirst search instead of the
+ default depthfirst. Default: False
+
+ sort if True will sort entries within each directory level.
+ """
+ for x in Visitor(fil, rec, ignore, bf, sort).gen(self):
+ yield x
+
+ def _sortlist(self, res, sort):
+ if sort:
+ if hasattr(sort, '__call__'):
+ warnings.warn(DeprecationWarning(
+ "listdir(sort=callable) is deprecated and breaks on python3"
+ ), stacklevel=3)
+ res.sort(sort)
+ else:
+ res.sort()
+
+ def samefile(self, other):
+ """ return True if other refers to the same stat object as self. """
+ return self.strpath == str(other)
+
+ def __fspath__(self):
+ return self.strpath
+
+class Visitor:
+ def __init__(self, fil, rec, ignore, bf, sort):
+ if isinstance(fil, py.builtin._basestring):
+ fil = FNMatcher(fil)
+ if isinstance(rec, py.builtin._basestring):
+ self.rec = FNMatcher(rec)
+ elif not hasattr(rec, '__call__') and rec:
+ self.rec = lambda path: True
+ else:
+ self.rec = rec
+ self.fil = fil
+ self.ignore = ignore
+ self.breadthfirst = bf
+ self.optsort = sort and sorted or (lambda x: x)
+
+ def gen(self, path):
+ try:
+ entries = path.listdir()
+ except self.ignore:
+ return
+ rec = self.rec
+ dirs = self.optsort([p for p in entries
+ if p.check(dir=1) and (rec is None or rec(p))])
+ if not self.breadthfirst:
+ for subdir in dirs:
+ for p in self.gen(subdir):
+ yield p
+ for p in self.optsort(entries):
+ if self.fil is None or self.fil(p):
+ yield p
+ if self.breadthfirst:
+ for subdir in dirs:
+ for p in self.gen(subdir):
+ yield p
+
+class FNMatcher:
+ def __init__(self, pattern):
+ self.pattern = pattern
+
+ def __call__(self, path):
+ pattern = self.pattern
+
+ if (pattern.find(path.sep) == -1 and
+ iswin32 and
+ pattern.find(posixpath.sep) != -1):
+ # Running on Windows, the pattern has no Windows path separators,
+ # and the pattern has one or more Posix path separators. Replace
+ # the Posix path separators with the Windows path separator.
+ pattern = pattern.replace(posixpath.sep, path.sep)
+
+ if pattern.find(path.sep) == -1:
+ name = path.basename
+ else:
+ name = str(path) # path.strpath # XXX svn?
+ if not os.path.isabs(pattern):
+ pattern = '*' + path.sep + pattern
+ return fnmatch.fnmatch(name, pattern)
diff --git a/third_party/python/py/py/_path/local.py b/third_party/python/py/py/_path/local.py
new file mode 100644
index 0000000000..79dc6284c3
--- /dev/null
+++ b/third_party/python/py/py/_path/local.py
@@ -0,0 +1,992 @@
+"""
+local path implementation.
+"""
+from __future__ import with_statement
+
+from contextlib import contextmanager
+import sys, os, atexit, io, uuid
+import py
+from py._path import common
+from py._path.common import iswin32, fspath
+from stat import S_ISLNK, S_ISDIR, S_ISREG
+
+from os.path import abspath, normpath, isabs, exists, isdir, isfile, islink, dirname
+
+if sys.version_info > (3,0):
+ def map_as_list(func, iter):
+ return list(map(func, iter))
+else:
+ map_as_list = map
+
+class Stat(object):
+ def __getattr__(self, name):
+ return getattr(self._osstatresult, "st_" + name)
+
+ def __init__(self, path, osstatresult):
+ self.path = path
+ self._osstatresult = osstatresult
+
+ @property
+ def owner(self):
+ if iswin32:
+ raise NotImplementedError("XXX win32")
+ import pwd
+ entry = py.error.checked_call(pwd.getpwuid, self.uid)
+ return entry[0]
+
+ @property
+ def group(self):
+ """ return group name of file. """
+ if iswin32:
+ raise NotImplementedError("XXX win32")
+ import grp
+ entry = py.error.checked_call(grp.getgrgid, self.gid)
+ return entry[0]
+
+ def isdir(self):
+ return S_ISDIR(self._osstatresult.st_mode)
+
+ def isfile(self):
+ return S_ISREG(self._osstatresult.st_mode)
+
+ def islink(self):
+ st = self.path.lstat()
+ return S_ISLNK(self._osstatresult.st_mode)
+
+class PosixPath(common.PathBase):
+ def chown(self, user, group, rec=0):
+ """ change ownership to the given user and group.
+ user and group may be specified by a number or
+ by a name. if rec is True change ownership
+ recursively.
+ """
+ uid = getuserid(user)
+ gid = getgroupid(group)
+ if rec:
+ for x in self.visit(rec=lambda x: x.check(link=0)):
+ if x.check(link=0):
+ py.error.checked_call(os.chown, str(x), uid, gid)
+ py.error.checked_call(os.chown, str(self), uid, gid)
+
+ def readlink(self):
+ """ return value of a symbolic link. """
+ return py.error.checked_call(os.readlink, self.strpath)
+
+ def mklinkto(self, oldname):
+ """ posix style hard link to another name. """
+ py.error.checked_call(os.link, str(oldname), str(self))
+
+ def mksymlinkto(self, value, absolute=1):
+ """ create a symbolic link with the given value (pointing to another name). """
+ if absolute:
+ py.error.checked_call(os.symlink, str(value), self.strpath)
+ else:
+ base = self.common(value)
+ # with posix local paths '/' is always a common base
+ relsource = self.__class__(value).relto(base)
+ reldest = self.relto(base)
+ n = reldest.count(self.sep)
+ target = self.sep.join(('..', )*n + (relsource, ))
+ py.error.checked_call(os.symlink, target, self.strpath)
+
+def getuserid(user):
+ import pwd
+ if not isinstance(user, int):
+ user = pwd.getpwnam(user)[2]
+ return user
+
+def getgroupid(group):
+ import grp
+ if not isinstance(group, int):
+ group = grp.getgrnam(group)[2]
+ return group
+
+FSBase = not iswin32 and PosixPath or common.PathBase
+
+class LocalPath(FSBase):
+ """ object oriented interface to os.path and other local filesystem
+ related information.
+ """
+ class ImportMismatchError(ImportError):
+ """ raised on pyimport() if there is a mismatch of __file__'s"""
+
+ sep = os.sep
+ class Checkers(common.Checkers):
+ def _stat(self):
+ try:
+ return self._statcache
+ except AttributeError:
+ try:
+ self._statcache = self.path.stat()
+ except py.error.ELOOP:
+ self._statcache = self.path.lstat()
+ return self._statcache
+
+ def dir(self):
+ return S_ISDIR(self._stat().mode)
+
+ def file(self):
+ return S_ISREG(self._stat().mode)
+
+ def exists(self):
+ return self._stat()
+
+ def link(self):
+ st = self.path.lstat()
+ return S_ISLNK(st.mode)
+
+ def __init__(self, path=None, expanduser=False):
+ """ Initialize and return a local Path instance.
+
+ Path can be relative to the current directory.
+ If path is None it defaults to the current working directory.
+ If expanduser is True, tilde-expansion is performed.
+ Note that Path instances always carry an absolute path.
+ Note also that passing in a local path object will simply return
+ the exact same path object. Use new() to get a new copy.
+ """
+ if path is None:
+ self.strpath = py.error.checked_call(os.getcwd)
+ else:
+ try:
+ path = fspath(path)
+ except TypeError:
+ raise ValueError("can only pass None, Path instances "
+ "or non-empty strings to LocalPath")
+ if expanduser:
+ path = os.path.expanduser(path)
+ self.strpath = abspath(path)
+
+ def __hash__(self):
+ return hash(self.strpath)
+
+ def __eq__(self, other):
+ s1 = fspath(self)
+ try:
+ s2 = fspath(other)
+ except TypeError:
+ return False
+ if iswin32:
+ s1 = s1.lower()
+ try:
+ s2 = s2.lower()
+ except AttributeError:
+ return False
+ return s1 == s2
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __lt__(self, other):
+ return fspath(self) < fspath(other)
+
+ def __gt__(self, other):
+ return fspath(self) > fspath(other)
+
+ def samefile(self, other):
+ """ return True if 'other' references the same file as 'self'.
+ """
+ other = fspath(other)
+ if not isabs(other):
+ other = abspath(other)
+ if self == other:
+ return True
+ if iswin32:
+ return False # there is no samefile
+ return py.error.checked_call(
+ os.path.samefile, self.strpath, other)
+
+ def remove(self, rec=1, ignore_errors=False):
+ """ remove a file or directory (or a directory tree if rec=1).
+ if ignore_errors is True, errors while removing directories will
+ be ignored.
+ """
+ if self.check(dir=1, link=0):
+ if rec:
+ # force remove of readonly files on windows
+ if iswin32:
+ self.chmod(0o700, rec=1)
+ import shutil
+ py.error.checked_call(
+ shutil.rmtree, self.strpath,
+ ignore_errors=ignore_errors)
+ else:
+ py.error.checked_call(os.rmdir, self.strpath)
+ else:
+ if iswin32:
+ self.chmod(0o700)
+ py.error.checked_call(os.remove, self.strpath)
+
+ def computehash(self, hashtype="md5", chunksize=524288):
+ """ return hexdigest of hashvalue for this file. """
+ try:
+ try:
+ import hashlib as mod
+ except ImportError:
+ if hashtype == "sha1":
+ hashtype = "sha"
+ mod = __import__(hashtype)
+ hash = getattr(mod, hashtype)()
+ except (AttributeError, ImportError):
+ raise ValueError("Don't know how to compute %r hash" %(hashtype,))
+ f = self.open('rb')
+ try:
+ while 1:
+ buf = f.read(chunksize)
+ if not buf:
+ return hash.hexdigest()
+ hash.update(buf)
+ finally:
+ f.close()
+
+ def new(self, **kw):
+ """ create a modified version of this path.
+ the following keyword arguments modify various path parts::
+
+ a:/some/path/to/a/file.ext
+ xx drive
+ xxxxxxxxxxxxxxxxx dirname
+ xxxxxxxx basename
+ xxxx purebasename
+ xxx ext
+ """
+ obj = object.__new__(self.__class__)
+ if not kw:
+ obj.strpath = self.strpath
+ return obj
+ drive, dirname, basename, purebasename,ext = self._getbyspec(
+ "drive,dirname,basename,purebasename,ext")
+ if 'basename' in kw:
+ if 'purebasename' in kw or 'ext' in kw:
+ raise ValueError("invalid specification %r" % kw)
+ else:
+ pb = kw.setdefault('purebasename', purebasename)
+ try:
+ ext = kw['ext']
+ except KeyError:
+ pass
+ else:
+ if ext and not ext.startswith('.'):
+ ext = '.' + ext
+ kw['basename'] = pb + ext
+
+ if ('dirname' in kw and not kw['dirname']):
+ kw['dirname'] = drive
+ else:
+ kw.setdefault('dirname', dirname)
+ kw.setdefault('sep', self.sep)
+ obj.strpath = normpath(
+ "%(dirname)s%(sep)s%(basename)s" % kw)
+ return obj
+
+ def _getbyspec(self, spec):
+ """ see new for what 'spec' can be. """
+ res = []
+ parts = self.strpath.split(self.sep)
+
+ args = filter(None, spec.split(',') )
+ append = res.append
+ for name in args:
+ if name == 'drive':
+ append(parts[0])
+ elif name == 'dirname':
+ append(self.sep.join(parts[:-1]))
+ else:
+ basename = parts[-1]
+ if name == 'basename':
+ append(basename)
+ else:
+ i = basename.rfind('.')
+ if i == -1:
+ purebasename, ext = basename, ''
+ else:
+ purebasename, ext = basename[:i], basename[i:]
+ if name == 'purebasename':
+ append(purebasename)
+ elif name == 'ext':
+ append(ext)
+ else:
+ raise ValueError("invalid part specification %r" % name)
+ return res
+
+ def dirpath(self, *args, **kwargs):
+ """ return the directory path joined with any given path arguments. """
+ if not kwargs:
+ path = object.__new__(self.__class__)
+ path.strpath = dirname(self.strpath)
+ if args:
+ path = path.join(*args)
+ return path
+ return super(LocalPath, self).dirpath(*args, **kwargs)
+
+ def join(self, *args, **kwargs):
+ """ return a new path by appending all 'args' as path
+ components. if abs=1 is used restart from root if any
+ of the args is an absolute path.
+ """
+ sep = self.sep
+ strargs = [fspath(arg) for arg in args]
+ strpath = self.strpath
+ if kwargs.get('abs'):
+ newargs = []
+ for arg in reversed(strargs):
+ if isabs(arg):
+ strpath = arg
+ strargs = newargs
+ break
+ newargs.insert(0, arg)
+ # special case for when we have e.g. strpath == "/"
+ actual_sep = "" if strpath.endswith(sep) else sep
+ for arg in strargs:
+ arg = arg.strip(sep)
+ if iswin32:
+ # allow unix style paths even on windows.
+ arg = arg.strip('/')
+ arg = arg.replace('/', sep)
+ strpath = strpath + actual_sep + arg
+ actual_sep = sep
+ obj = object.__new__(self.__class__)
+ obj.strpath = normpath(strpath)
+ return obj
+
+ def open(self, mode='r', ensure=False, encoding=None):
+ """ return an opened file with the given mode.
+
+ If ensure is True, create parent directories if needed.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ if encoding:
+ return py.error.checked_call(io.open, self.strpath, mode, encoding=encoding)
+ return py.error.checked_call(open, self.strpath, mode)
+
+ def _fastjoin(self, name):
+ child = object.__new__(self.__class__)
+ child.strpath = self.strpath + self.sep + name
+ return child
+
+ def islink(self):
+ return islink(self.strpath)
+
+ def check(self, **kw):
+ if not kw:
+ return exists(self.strpath)
+ if len(kw) == 1:
+ if "dir" in kw:
+ return not kw["dir"] ^ isdir(self.strpath)
+ if "file" in kw:
+ return not kw["file"] ^ isfile(self.strpath)
+ return super(LocalPath, self).check(**kw)
+
+ _patternchars = set("*?[" + os.path.sep)
+ def listdir(self, fil=None, sort=None):
+ """ list directory contents, possibly filter by the given fil func
+ and possibly sorted.
+ """
+ if fil is None and sort is None:
+ names = py.error.checked_call(os.listdir, self.strpath)
+ return map_as_list(self._fastjoin, names)
+ if isinstance(fil, py.builtin._basestring):
+ if not self._patternchars.intersection(fil):
+ child = self._fastjoin(fil)
+ if exists(child.strpath):
+ return [child]
+ return []
+ fil = common.FNMatcher(fil)
+ names = py.error.checked_call(os.listdir, self.strpath)
+ res = []
+ for name in names:
+ child = self._fastjoin(name)
+ if fil is None or fil(child):
+ res.append(child)
+ self._sortlist(res, sort)
+ return res
+
+ def size(self):
+ """ return size of the underlying file object """
+ return self.stat().size
+
+ def mtime(self):
+ """ return last modification time of the path. """
+ return self.stat().mtime
+
+ def copy(self, target, mode=False, stat=False):
+ """ copy path to target.
+
+ If mode is True, will copy copy permission from path to target.
+ If stat is True, copy permission, last modification
+ time, last access time, and flags from path to target.
+ """
+ if self.check(file=1):
+ if target.check(dir=1):
+ target = target.join(self.basename)
+ assert self!=target
+ copychunked(self, target)
+ if mode:
+ copymode(self.strpath, target.strpath)
+ if stat:
+ copystat(self, target)
+ else:
+ def rec(p):
+ return p.check(link=0)
+ for x in self.visit(rec=rec):
+ relpath = x.relto(self)
+ newx = target.join(relpath)
+ newx.dirpath().ensure(dir=1)
+ if x.check(link=1):
+ newx.mksymlinkto(x.readlink())
+ continue
+ elif x.check(file=1):
+ copychunked(x, newx)
+ elif x.check(dir=1):
+ newx.ensure(dir=1)
+ if mode:
+ copymode(x.strpath, newx.strpath)
+ if stat:
+ copystat(x, newx)
+
+ def rename(self, target):
+ """ rename this path to target. """
+ target = fspath(target)
+ return py.error.checked_call(os.rename, self.strpath, target)
+
+ def dump(self, obj, bin=1):
+ """ pickle object into path location"""
+ f = self.open('wb')
+ import pickle
+ try:
+ py.error.checked_call(pickle.dump, obj, f, bin)
+ finally:
+ f.close()
+
+ def mkdir(self, *args):
+ """ create & return the directory joined with args. """
+ p = self.join(*args)
+ py.error.checked_call(os.mkdir, fspath(p))
+ return p
+
+ def write_binary(self, data, ensure=False):
+ """ write binary data into path. If ensure is True create
+ missing parent directories.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ with self.open('wb') as f:
+ f.write(data)
+
+ def write_text(self, data, encoding, ensure=False):
+ """ write text data into path using the specified encoding.
+ If ensure is True create missing parent directories.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ with self.open('w', encoding=encoding) as f:
+ f.write(data)
+
+ def write(self, data, mode='w', ensure=False):
+ """ write data into path. If ensure is True create
+ missing parent directories.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ if 'b' in mode:
+ if not py.builtin._isbytes(data):
+ raise ValueError("can only process bytes")
+ else:
+ if not py.builtin._istext(data):
+ if not py.builtin._isbytes(data):
+ data = str(data)
+ else:
+ data = py.builtin._totext(data, sys.getdefaultencoding())
+ f = self.open(mode)
+ try:
+ f.write(data)
+ finally:
+ f.close()
+
+ def _ensuredirs(self):
+ parent = self.dirpath()
+ if parent == self:
+ return self
+ if parent.check(dir=0):
+ parent._ensuredirs()
+ if self.check(dir=0):
+ try:
+ self.mkdir()
+ except py.error.EEXIST:
+ # race condition: file/dir created by another thread/process.
+ # complain if it is not a dir
+ if self.check(dir=0):
+ raise
+ return self
+
+ def ensure(self, *args, **kwargs):
+ """ ensure that an args-joined path exists (by default as
+ a file). if you specify a keyword argument 'dir=True'
+ then the path is forced to be a directory path.
+ """
+ p = self.join(*args)
+ if kwargs.get('dir', 0):
+ return p._ensuredirs()
+ else:
+ p.dirpath()._ensuredirs()
+ if not p.check(file=1):
+ p.open('w').close()
+ return p
+
+ def stat(self, raising=True):
+ """ Return an os.stat() tuple. """
+ if raising == True:
+ return Stat(self, py.error.checked_call(os.stat, self.strpath))
+ try:
+ return Stat(self, os.stat(self.strpath))
+ except KeyboardInterrupt:
+ raise
+ except Exception:
+ return None
+
+ def lstat(self):
+ """ Return an os.lstat() tuple. """
+ return Stat(self, py.error.checked_call(os.lstat, self.strpath))
+
+ def setmtime(self, mtime=None):
+ """ set modification time for the given path. if 'mtime' is None
+ (the default) then the file's mtime is set to current time.
+
+ Note that the resolution for 'mtime' is platform dependent.
+ """
+ if mtime is None:
+ return py.error.checked_call(os.utime, self.strpath, mtime)
+ try:
+ return py.error.checked_call(os.utime, self.strpath, (-1, mtime))
+ except py.error.EINVAL:
+ return py.error.checked_call(os.utime, self.strpath, (self.atime(), mtime))
+
+ def chdir(self):
+ """ change directory to self and return old current directory """
+ try:
+ old = self.__class__()
+ except py.error.ENOENT:
+ old = None
+ py.error.checked_call(os.chdir, self.strpath)
+ return old
+
+
+ @contextmanager
+ def as_cwd(self):
+ """ return context manager which changes to current dir during the
+ managed "with" context. On __enter__ it returns the old dir.
+ """
+ old = self.chdir()
+ try:
+ yield old
+ finally:
+ old.chdir()
+
+ def realpath(self):
+ """ return a new path which contains no symbolic links."""
+ return self.__class__(os.path.realpath(self.strpath))
+
+ def atime(self):
+ """ return last access time of the path. """
+ return self.stat().atime
+
+ def __repr__(self):
+ return 'local(%r)' % self.strpath
+
+ def __str__(self):
+ """ return string representation of the Path. """
+ return self.strpath
+
+ def chmod(self, mode, rec=0):
+ """ change permissions to the given mode. If mode is an
+ integer it directly encodes the os-specific modes.
+ if rec is True perform recursively.
+ """
+ if not isinstance(mode, int):
+ raise TypeError("mode %r must be an integer" % (mode,))
+ if rec:
+ for x in self.visit(rec=rec):
+ py.error.checked_call(os.chmod, str(x), mode)
+ py.error.checked_call(os.chmod, self.strpath, mode)
+
+ def pypkgpath(self):
+ """ return the Python package path by looking for the last
+ directory upwards which still contains an __init__.py.
+ Return None if a pkgpath can not be determined.
+ """
+ pkgpath = None
+ for parent in self.parts(reverse=True):
+ if parent.isdir():
+ if not parent.join('__init__.py').exists():
+ break
+ if not isimportable(parent.basename):
+ break
+ pkgpath = parent
+ return pkgpath
+
+ def _ensuresyspath(self, ensuremode, path):
+ if ensuremode:
+ s = str(path)
+ if ensuremode == "append":
+ if s not in sys.path:
+ sys.path.append(s)
+ else:
+ if s != sys.path[0]:
+ sys.path.insert(0, s)
+
+ def pyimport(self, modname=None, ensuresyspath=True):
+ """ return path as an imported python module.
+
+ If modname is None, look for the containing package
+ and construct an according module name.
+ The module will be put/looked up in sys.modules.
+ if ensuresyspath is True then the root dir for importing
+ the file (taking __init__.py files into account) will
+ be prepended to sys.path if it isn't there already.
+ If ensuresyspath=="append" the root dir will be appended
+ if it isn't already contained in sys.path.
+ if ensuresyspath is False no modification of syspath happens.
+ """
+ if not self.check():
+ raise py.error.ENOENT(self)
+
+ pkgpath = None
+ if modname is None:
+ pkgpath = self.pypkgpath()
+ if pkgpath is not None:
+ pkgroot = pkgpath.dirpath()
+ names = self.new(ext="").relto(pkgroot).split(self.sep)
+ if names[-1] == "__init__":
+ names.pop()
+ modname = ".".join(names)
+ else:
+ pkgroot = self.dirpath()
+ modname = self.purebasename
+
+ self._ensuresyspath(ensuresyspath, pkgroot)
+ __import__(modname)
+ mod = sys.modules[modname]
+ if self.basename == "__init__.py":
+ return mod # we don't check anything as we might
+ # be in a namespace package ... too icky to check
+ modfile = mod.__file__
+ if modfile[-4:] in ('.pyc', '.pyo'):
+ modfile = modfile[:-1]
+ elif modfile.endswith('$py.class'):
+ modfile = modfile[:-9] + '.py'
+ if modfile.endswith(os.path.sep + "__init__.py"):
+ if self.basename != "__init__.py":
+ modfile = modfile[:-12]
+ try:
+ issame = self.samefile(modfile)
+ except py.error.ENOENT:
+ issame = False
+ if not issame:
+ raise self.ImportMismatchError(modname, modfile, self)
+ return mod
+ else:
+ try:
+ return sys.modules[modname]
+ except KeyError:
+ # we have a custom modname, do a pseudo-import
+ import types
+ mod = types.ModuleType(modname)
+ mod.__file__ = str(self)
+ sys.modules[modname] = mod
+ try:
+ py.builtin.execfile(str(self), mod.__dict__)
+ except:
+ del sys.modules[modname]
+ raise
+ return mod
+
+ def sysexec(self, *argv, **popen_opts):
+ """ return stdout text from executing a system child process,
+ where the 'self' path points to executable.
+ The process is directly invoked and not through a system shell.
+ """
+ from subprocess import Popen, PIPE
+ argv = map_as_list(str, argv)
+ popen_opts['stdout'] = popen_opts['stderr'] = PIPE
+ proc = Popen([str(self)] + argv, **popen_opts)
+ stdout, stderr = proc.communicate()
+ ret = proc.wait()
+ if py.builtin._isbytes(stdout):
+ stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
+ if ret != 0:
+ if py.builtin._isbytes(stderr):
+ stderr = py.builtin._totext(stderr, sys.getdefaultencoding())
+ raise py.process.cmdexec.Error(ret, ret, str(self),
+ stdout, stderr,)
+ return stdout
+
+ def sysfind(cls, name, checker=None, paths=None):
+ """ return a path object found by looking at the systems
+ underlying PATH specification. If the checker is not None
+ it will be invoked to filter matching paths. If a binary
+ cannot be found, None is returned
+ Note: This is probably not working on plain win32 systems
+ but may work on cygwin.
+ """
+ if isabs(name):
+ p = py.path.local(name)
+ if p.check(file=1):
+ return p
+ else:
+ if paths is None:
+ if iswin32:
+ paths = os.environ['Path'].split(';')
+ if '' not in paths and '.' not in paths:
+ paths.append('.')
+ try:
+ systemroot = os.environ['SYSTEMROOT']
+ except KeyError:
+ pass
+ else:
+ paths = [path.replace('%SystemRoot%', systemroot)
+ for path in paths]
+ else:
+ paths = os.environ['PATH'].split(':')
+ tryadd = []
+ if iswin32:
+ tryadd += os.environ['PATHEXT'].split(os.pathsep)
+ tryadd.append("")
+
+ for x in paths:
+ for addext in tryadd:
+ p = py.path.local(x).join(name, abs=True) + addext
+ try:
+ if p.check(file=1):
+ if checker:
+ if not checker(p):
+ continue
+ return p
+ except py.error.EACCES:
+ pass
+ return None
+ sysfind = classmethod(sysfind)
+
+ def _gethomedir(cls):
+ try:
+ x = os.environ['HOME']
+ except KeyError:
+ try:
+ x = os.environ["HOMEDRIVE"] + os.environ['HOMEPATH']
+ except KeyError:
+ return None
+ return cls(x)
+ _gethomedir = classmethod(_gethomedir)
+
+ # """
+ # special class constructors for local filesystem paths
+ # """
+ @classmethod
+ def get_temproot(cls):
+ """ return the system's temporary directory
+ (where tempfiles are usually created in)
+ """
+ import tempfile
+ return py.path.local(tempfile.gettempdir())
+
+ @classmethod
+ def mkdtemp(cls, rootdir=None):
+ """ return a Path object pointing to a fresh new temporary directory
+ (which we created ourself).
+ """
+ import tempfile
+ if rootdir is None:
+ rootdir = cls.get_temproot()
+ return cls(py.error.checked_call(tempfile.mkdtemp, dir=str(rootdir)))
+
+ def make_numbered_dir(cls, prefix='session-', rootdir=None, keep=3,
+ lock_timeout=172800): # two days
+ """ return unique directory with a number greater than the current
+ maximum one. The number is assumed to start directly after prefix.
+ if keep is true directories with a number less than (maxnum-keep)
+ will be removed. If .lock files are used (lock_timeout non-zero),
+ algorithm is multi-process safe.
+ """
+ if rootdir is None:
+ rootdir = cls.get_temproot()
+
+ nprefix = prefix.lower()
+ def parse_num(path):
+ """ parse the number out of a path (if it matches the prefix) """
+ nbasename = path.basename.lower()
+ if nbasename.startswith(nprefix):
+ try:
+ return int(nbasename[len(nprefix):])
+ except ValueError:
+ pass
+
+ def create_lockfile(path):
+ """ exclusively create lockfile. Throws when failed """
+ mypid = os.getpid()
+ lockfile = path.join('.lock')
+ if hasattr(lockfile, 'mksymlinkto'):
+ lockfile.mksymlinkto(str(mypid))
+ else:
+ fd = py.error.checked_call(os.open, str(lockfile), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644)
+ with os.fdopen(fd, 'w') as f:
+ f.write(str(mypid))
+ return lockfile
+
+ def atexit_remove_lockfile(lockfile):
+ """ ensure lockfile is removed at process exit """
+ mypid = os.getpid()
+ def try_remove_lockfile():
+ # in a fork() situation, only the last process should
+ # remove the .lock, otherwise the other processes run the
+ # risk of seeing their temporary dir disappear. For now
+ # we remove the .lock in the parent only (i.e. we assume
+ # that the children finish before the parent).
+ if os.getpid() != mypid:
+ return
+ try:
+ lockfile.remove()
+ except py.error.Error:
+ pass
+ atexit.register(try_remove_lockfile)
+
+ # compute the maximum number currently in use with the prefix
+ lastmax = None
+ while True:
+ maxnum = -1
+ for path in rootdir.listdir():
+ num = parse_num(path)
+ if num is not None:
+ maxnum = max(maxnum, num)
+
+ # make the new directory
+ try:
+ udir = rootdir.mkdir(prefix + str(maxnum+1))
+ if lock_timeout:
+ lockfile = create_lockfile(udir)
+ atexit_remove_lockfile(lockfile)
+ except (py.error.EEXIST, py.error.ENOENT, py.error.EBUSY):
+ # race condition (1): another thread/process created the dir
+ # in the meantime - try again
+ # race condition (2): another thread/process spuriously acquired
+ # lock treating empty directory as candidate
+ # for removal - try again
+ # race condition (3): another thread/process tried to create the lock at
+ # the same time (happened in Python 3.3 on Windows)
+ # https://ci.appveyor.com/project/pytestbot/py/build/1.0.21/job/ffi85j4c0lqwsfwa
+ if lastmax == maxnum:
+ raise
+ lastmax = maxnum
+ continue
+ break
+
+ def get_mtime(path):
+ """ read file modification time """
+ try:
+ return path.lstat().mtime
+ except py.error.Error:
+ pass
+
+ garbage_prefix = prefix + 'garbage-'
+
+ def is_garbage(path):
+ """ check if path denotes directory scheduled for removal """
+ bn = path.basename
+ return bn.startswith(garbage_prefix)
+
+ # prune old directories
+ udir_time = get_mtime(udir)
+ if keep and udir_time:
+ for path in rootdir.listdir():
+ num = parse_num(path)
+ if num is not None and num <= (maxnum - keep):
+ try:
+ # try acquiring lock to remove directory as exclusive user
+ if lock_timeout:
+ create_lockfile(path)
+ except (py.error.EEXIST, py.error.ENOENT, py.error.EBUSY):
+ path_time = get_mtime(path)
+ if not path_time:
+ # assume directory doesn't exist now
+ continue
+ if abs(udir_time - path_time) < lock_timeout:
+ # assume directory with lockfile exists
+ # and lock timeout hasn't expired yet
+ continue
+
+ # path dir locked for exclusive use
+ # and scheduled for removal to avoid another thread/process
+ # treating it as a new directory or removal candidate
+ garbage_path = rootdir.join(garbage_prefix + str(uuid.uuid4()))
+ try:
+ path.rename(garbage_path)
+ garbage_path.remove(rec=1)
+ except KeyboardInterrupt:
+ raise
+ except: # this might be py.error.Error, WindowsError ...
+ pass
+ if is_garbage(path):
+ try:
+ path.remove(rec=1)
+ except KeyboardInterrupt:
+ raise
+ except: # this might be py.error.Error, WindowsError ...
+ pass
+
+ # make link...
+ try:
+ username = os.environ['USER'] #linux, et al
+ except KeyError:
+ try:
+ username = os.environ['USERNAME'] #windows
+ except KeyError:
+ username = 'current'
+
+ src = str(udir)
+ dest = src[:src.rfind('-')] + '-' + username
+ try:
+ os.unlink(dest)
+ except OSError:
+ pass
+ try:
+ os.symlink(src, dest)
+ except (OSError, AttributeError, NotImplementedError):
+ pass
+
+ return udir
+ make_numbered_dir = classmethod(make_numbered_dir)
+
+
+def copymode(src, dest):
+ """ copy permission from src to dst. """
+ import shutil
+ shutil.copymode(src, dest)
+
+
+def copystat(src, dest):
+ """ copy permission, last modification time,
+ last access time, and flags from src to dst."""
+ import shutil
+ shutil.copystat(str(src), str(dest))
+
+
+def copychunked(src, dest):
+ chunksize = 524288 # half a meg of bytes
+ fsrc = src.open('rb')
+ try:
+ fdest = dest.open('wb')
+ try:
+ while 1:
+ buf = fsrc.read(chunksize)
+ if not buf:
+ break
+ fdest.write(buf)
+ finally:
+ fdest.close()
+ finally:
+ fsrc.close()
+
+
+def isimportable(name):
+ if name and (name[0].isalpha() or name[0] == '_'):
+ name = name.replace("_", '')
+ return not name or name.isalnum()
diff --git a/third_party/python/py/py/_path/svnurl.py b/third_party/python/py/py/_path/svnurl.py
new file mode 100644
index 0000000000..6589a71d09
--- /dev/null
+++ b/third_party/python/py/py/_path/svnurl.py
@@ -0,0 +1,380 @@
+"""
+module defining a subversion path object based on the external
+command 'svn'. This modules aims to work with svn 1.3 and higher
+but might also interact well with earlier versions.
+"""
+
+import os, sys, time, re
+import py
+from py import path, process
+from py._path import common
+from py._path import svnwc as svncommon
+from py._path.cacheutil import BuildcostAccessCache, AgingCache
+
+DEBUG=False
+
+class SvnCommandPath(svncommon.SvnPathBase):
+ """ path implementation that offers access to (possibly remote) subversion
+ repositories. """
+
+ _lsrevcache = BuildcostAccessCache(maxentries=128)
+ _lsnorevcache = AgingCache(maxentries=1000, maxseconds=60.0)
+
+ def __new__(cls, path, rev=None, auth=None):
+ self = object.__new__(cls)
+ if isinstance(path, cls):
+ rev = path.rev
+ auth = path.auth
+ path = path.strpath
+ svncommon.checkbadchars(path)
+ path = path.rstrip('/')
+ self.strpath = path
+ self.rev = rev
+ self.auth = auth
+ return self
+
+ def __repr__(self):
+ if self.rev == -1:
+ return 'svnurl(%r)' % self.strpath
+ else:
+ return 'svnurl(%r, %r)' % (self.strpath, self.rev)
+
+ def _svnwithrev(self, cmd, *args):
+ """ execute an svn command, append our own url and revision """
+ if self.rev is None:
+ return self._svnwrite(cmd, *args)
+ else:
+ args = ['-r', self.rev] + list(args)
+ return self._svnwrite(cmd, *args)
+
+ def _svnwrite(self, cmd, *args):
+ """ execute an svn command, append our own url """
+ l = ['svn %s' % cmd]
+ args = ['"%s"' % self._escape(item) for item in args]
+ l.extend(args)
+ l.append('"%s"' % self._encodedurl())
+ # fixing the locale because we can't otherwise parse
+ string = " ".join(l)
+ if DEBUG:
+ print("execing %s" % string)
+ out = self._svncmdexecauth(string)
+ return out
+
+ def _svncmdexecauth(self, cmd):
+ """ execute an svn command 'as is' """
+ cmd = svncommon.fixlocale() + cmd
+ if self.auth is not None:
+ cmd += ' ' + self.auth.makecmdoptions()
+ return self._cmdexec(cmd)
+
+ def _cmdexec(self, cmd):
+ try:
+ out = process.cmdexec(cmd)
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if (e.err.find('File Exists') != -1 or
+ e.err.find('File already exists') != -1):
+ raise py.error.EEXIST(self)
+ raise
+ return out
+
+ def _svnpopenauth(self, cmd):
+ """ execute an svn command, return a pipe for reading stdin """
+ cmd = svncommon.fixlocale() + cmd
+ if self.auth is not None:
+ cmd += ' ' + self.auth.makecmdoptions()
+ return self._popen(cmd)
+
+ def _popen(self, cmd):
+ return os.popen(cmd)
+
+ def _encodedurl(self):
+ return self._escape(self.strpath)
+
+ def _norev_delentry(self, path):
+ auth = self.auth and self.auth.makecmdoptions() or None
+ self._lsnorevcache.delentry((str(path), auth))
+
+ def open(self, mode='r'):
+ """ return an opened file with the given mode. """
+ if mode not in ("r", "rU",):
+ raise ValueError("mode %r not supported" % (mode,))
+ assert self.check(file=1) # svn cat returns an empty file otherwise
+ if self.rev is None:
+ return self._svnpopenauth('svn cat "%s"' % (
+ self._escape(self.strpath), ))
+ else:
+ return self._svnpopenauth('svn cat -r %s "%s"' % (
+ self.rev, self._escape(self.strpath)))
+
+ def dirpath(self, *args, **kwargs):
+ """ return the directory path of the current path joined
+ with any given path arguments.
+ """
+ l = self.strpath.split(self.sep)
+ if len(l) < 4:
+ raise py.error.EINVAL(self, "base is not valid")
+ elif len(l) == 4:
+ return self.join(*args, **kwargs)
+ else:
+ return self.new(basename='').join(*args, **kwargs)
+
+ # modifying methods (cache must be invalidated)
+ def mkdir(self, *args, **kwargs):
+ """ create & return the directory joined with args.
+ pass a 'msg' keyword argument to set the commit message.
+ """
+ commit_msg = kwargs.get('msg', "mkdir by py lib invocation")
+ createpath = self.join(*args)
+ createpath._svnwrite('mkdir', '-m', commit_msg)
+ self._norev_delentry(createpath.dirpath())
+ return createpath
+
+ def copy(self, target, msg='copied by py lib invocation'):
+ """ copy path to target with checkin message msg."""
+ if getattr(target, 'rev', None) is not None:
+ raise py.error.EINVAL(target, "revisions are immutable")
+ self._svncmdexecauth('svn copy -m "%s" "%s" "%s"' %(msg,
+ self._escape(self), self._escape(target)))
+ self._norev_delentry(target.dirpath())
+
+ def rename(self, target, msg="renamed by py lib invocation"):
+ """ rename this path to target with checkin message msg. """
+ if getattr(self, 'rev', None) is not None:
+ raise py.error.EINVAL(self, "revisions are immutable")
+ self._svncmdexecauth('svn move -m "%s" --force "%s" "%s"' %(
+ msg, self._escape(self), self._escape(target)))
+ self._norev_delentry(self.dirpath())
+ self._norev_delentry(self)
+
+ def remove(self, rec=1, msg='removed by py lib invocation'):
+ """ remove a file or directory (or a directory tree if rec=1) with
+checkin message msg."""
+ if self.rev is not None:
+ raise py.error.EINVAL(self, "revisions are immutable")
+ self._svncmdexecauth('svn rm -m "%s" "%s"' %(msg, self._escape(self)))
+ self._norev_delentry(self.dirpath())
+
+ def export(self, topath):
+ """ export to a local path
+
+ topath should not exist prior to calling this, returns a
+ py.path.local instance
+ """
+ topath = py.path.local(topath)
+ args = ['"%s"' % (self._escape(self),),
+ '"%s"' % (self._escape(topath),)]
+ if self.rev is not None:
+ args = ['-r', str(self.rev)] + args
+ self._svncmdexecauth('svn export %s' % (' '.join(args),))
+ return topath
+
+ def ensure(self, *args, **kwargs):
+ """ ensure that an args-joined path exists (by default as
+ a file). If you specify a keyword argument 'dir=True'
+ then the path is forced to be a directory path.
+ """
+ if getattr(self, 'rev', None) is not None:
+ raise py.error.EINVAL(self, "revisions are immutable")
+ target = self.join(*args)
+ dir = kwargs.get('dir', 0)
+ for x in target.parts(reverse=True):
+ if x.check():
+ break
+ else:
+ raise py.error.ENOENT(target, "has not any valid base!")
+ if x == target:
+ if not x.check(dir=dir):
+ raise dir and py.error.ENOTDIR(x) or py.error.EISDIR(x)
+ return x
+ tocreate = target.relto(x)
+ basename = tocreate.split(self.sep, 1)[0]
+ tempdir = py.path.local.mkdtemp()
+ try:
+ tempdir.ensure(tocreate, dir=dir)
+ cmd = 'svn import -m "%s" "%s" "%s"' % (
+ "ensure %s" % self._escape(tocreate),
+ self._escape(tempdir.join(basename)),
+ x.join(basename)._encodedurl())
+ self._svncmdexecauth(cmd)
+ self._norev_delentry(x)
+ finally:
+ tempdir.remove()
+ return target
+
+ # end of modifying methods
+ def _propget(self, name):
+ res = self._svnwithrev('propget', name)
+ return res[:-1] # strip trailing newline
+
+ def _proplist(self):
+ res = self._svnwithrev('proplist')
+ lines = res.split('\n')
+ lines = [x.strip() for x in lines[1:]]
+ return svncommon.PropListDict(self, lines)
+
+ def info(self):
+ """ return an Info structure with svn-provided information. """
+ parent = self.dirpath()
+ nameinfo_seq = parent._listdir_nameinfo()
+ bn = self.basename
+ for name, info in nameinfo_seq:
+ if name == bn:
+ return info
+ raise py.error.ENOENT(self)
+
+
+ def _listdir_nameinfo(self):
+ """ return sequence of name-info directory entries of self """
+ def builder():
+ try:
+ res = self._svnwithrev('ls', '-v')
+ except process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if e.err.find('non-existent in that revision') != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find("E200009:") != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find('File not found') != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find('not part of a repository')!=-1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find('Unable to open')!=-1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.lower().find('method not allowed')!=-1:
+ raise py.error.EACCES(self, e.err)
+ raise py.error.Error(e.err)
+ lines = res.split('\n')
+ nameinfo_seq = []
+ for lsline in lines:
+ if lsline:
+ info = InfoSvnCommand(lsline)
+ if info._name != '.': # svn 1.5 produces '.' dirs,
+ nameinfo_seq.append((info._name, info))
+ nameinfo_seq.sort()
+ return nameinfo_seq
+ auth = self.auth and self.auth.makecmdoptions() or None
+ if self.rev is not None:
+ return self._lsrevcache.getorbuild((self.strpath, self.rev, auth),
+ builder)
+ else:
+ return self._lsnorevcache.getorbuild((self.strpath, auth),
+ builder)
+
+ def listdir(self, fil=None, sort=None):
+ """ list directory contents, possibly filter by the given fil func
+ and possibly sorted.
+ """
+ if isinstance(fil, str):
+ fil = common.FNMatcher(fil)
+ nameinfo_seq = self._listdir_nameinfo()
+ if len(nameinfo_seq) == 1:
+ name, info = nameinfo_seq[0]
+ if name == self.basename and info.kind == 'file':
+ #if not self.check(dir=1):
+ raise py.error.ENOTDIR(self)
+ paths = [self.join(name) for (name, info) in nameinfo_seq]
+ if fil:
+ paths = [x for x in paths if fil(x)]
+ self._sortlist(paths, sort)
+ return paths
+
+
+ def log(self, rev_start=None, rev_end=1, verbose=False):
+ """ return a list of LogEntry instances for this path.
+rev_start is the starting revision (defaulting to the first one).
+rev_end is the last revision (defaulting to HEAD).
+if verbose is True, then the LogEntry instances also know which files changed.
+"""
+ assert self.check() #make it simpler for the pipe
+ rev_start = rev_start is None and "HEAD" or rev_start
+ rev_end = rev_end is None and "HEAD" or rev_end
+
+ if rev_start == "HEAD" and rev_end == 1:
+ rev_opt = ""
+ else:
+ rev_opt = "-r %s:%s" % (rev_start, rev_end)
+ verbose_opt = verbose and "-v" or ""
+ xmlpipe = self._svnpopenauth('svn log --xml %s %s "%s"' %
+ (rev_opt, verbose_opt, self.strpath))
+ from xml.dom import minidom
+ tree = minidom.parse(xmlpipe)
+ result = []
+ for logentry in filter(None, tree.firstChild.childNodes):
+ if logentry.nodeType == logentry.ELEMENT_NODE:
+ result.append(svncommon.LogEntry(logentry))
+ return result
+
+#01234567890123456789012345678901234567890123467
+# 2256 hpk 165 Nov 24 17:55 __init__.py
+# XXX spotted by Guido, SVN 1.3.0 has different aligning, breaks the code!!!
+# 1312 johnny 1627 May 05 14:32 test_decorators.py
+#
+class InfoSvnCommand:
+ # the '0?' part in the middle is an indication of whether the resource is
+ # locked, see 'svn help ls'
+ lspattern = re.compile(
+ r'^ *(?P<rev>\d+) +(?P<author>.+?) +(0? *(?P<size>\d+))? '
+ r'*(?P<date>\w+ +\d{2} +[\d:]+) +(?P<file>.*)$')
+ def __init__(self, line):
+ # this is a typical line from 'svn ls http://...'
+ #_ 1127 jum 0 Jul 13 15:28 branch/
+ match = self.lspattern.match(line)
+ data = match.groupdict()
+ self._name = data['file']
+ if self._name[-1] == '/':
+ self._name = self._name[:-1]
+ self.kind = 'dir'
+ else:
+ self.kind = 'file'
+ #self.has_props = l.pop(0) == 'P'
+ self.created_rev = int(data['rev'])
+ self.last_author = data['author']
+ self.size = data['size'] and int(data['size']) or 0
+ self.mtime = parse_time_with_missing_year(data['date'])
+ self.time = self.mtime * 1000000
+
+ def __eq__(self, other):
+ return self.__dict__ == other.__dict__
+
+
+#____________________________________________________
+#
+# helper functions
+#____________________________________________________
+def parse_time_with_missing_year(timestr):
+ """ analyze the time part from a single line of "svn ls -v"
+ the svn output doesn't show the year makes the 'timestr'
+ ambigous.
+ """
+ import calendar
+ t_now = time.gmtime()
+
+ tparts = timestr.split()
+ month = time.strptime(tparts.pop(0), '%b')[1]
+ day = time.strptime(tparts.pop(0), '%d')[2]
+ last = tparts.pop(0) # year or hour:minute
+ try:
+ if ":" in last:
+ raise ValueError()
+ year = time.strptime(last, '%Y')[0]
+ hour = minute = 0
+ except ValueError:
+ hour, minute = time.strptime(last, '%H:%M')[3:5]
+ year = t_now[0]
+
+ t_result = (year, month, day, hour, minute, 0,0,0,0)
+ if t_result > t_now:
+ year -= 1
+ t_result = (year, month, day, hour, minute, 0,0,0,0)
+ return calendar.timegm(t_result)
+
+class PathEntry:
+ def __init__(self, ppart):
+ self.strpath = ppart.firstChild.nodeValue.encode('UTF-8')
+ self.action = ppart.getAttribute('action').encode('UTF-8')
+ if self.action == 'A':
+ self.copyfrom_path = ppart.getAttribute('copyfrom-path').encode('UTF-8')
+ if self.copyfrom_path:
+ self.copyfrom_rev = int(ppart.getAttribute('copyfrom-rev'))
+
diff --git a/third_party/python/py/py/_path/svnwc.py b/third_party/python/py/py/_path/svnwc.py
new file mode 100644
index 0000000000..3138dd85da
--- /dev/null
+++ b/third_party/python/py/py/_path/svnwc.py
@@ -0,0 +1,1240 @@
+"""
+svn-Command based Implementation of a Subversion WorkingCopy Path.
+
+ SvnWCCommandPath is the main class.
+
+"""
+
+import os, sys, time, re, calendar
+import py
+import subprocess
+from py._path import common
+
+#-----------------------------------------------------------
+# Caching latest repository revision and repo-paths
+# (getting them is slow with the current implementations)
+#
+# XXX make mt-safe
+#-----------------------------------------------------------
+
+class cache:
+ proplist = {}
+ info = {}
+ entries = {}
+ prop = {}
+
+class RepoEntry:
+ def __init__(self, url, rev, timestamp):
+ self.url = url
+ self.rev = rev
+ self.timestamp = timestamp
+
+ def __str__(self):
+ return "repo: %s;%s %s" %(self.url, self.rev, self.timestamp)
+
+class RepoCache:
+ """ The Repocache manages discovered repository paths
+ and their revisions. If inside a timeout the cache
+ will even return the revision of the root.
+ """
+ timeout = 20 # seconds after which we forget that we know the last revision
+
+ def __init__(self):
+ self.repos = []
+
+ def clear(self):
+ self.repos = []
+
+ def put(self, url, rev, timestamp=None):
+ if rev is None:
+ return
+ if timestamp is None:
+ timestamp = time.time()
+
+ for entry in self.repos:
+ if url == entry.url:
+ entry.timestamp = timestamp
+ entry.rev = rev
+ #print "set repo", entry
+ break
+ else:
+ entry = RepoEntry(url, rev, timestamp)
+ self.repos.append(entry)
+ #print "appended repo", entry
+
+ def get(self, url):
+ now = time.time()
+ for entry in self.repos:
+ if url.startswith(entry.url):
+ if now < entry.timestamp + self.timeout:
+ #print "returning immediate Etrny", entry
+ return entry.url, entry.rev
+ return entry.url, -1
+ return url, -1
+
+repositories = RepoCache()
+
+
+# svn support code
+
+ALLOWED_CHARS = "_ -/\\=$.~+%" #add characters as necessary when tested
+if sys.platform == "win32":
+ ALLOWED_CHARS += ":"
+ALLOWED_CHARS_HOST = ALLOWED_CHARS + '@:'
+
+def _getsvnversion(ver=[]):
+ try:
+ return ver[0]
+ except IndexError:
+ v = py.process.cmdexec("svn -q --version")
+ v.strip()
+ v = '.'.join(v.split('.')[:2])
+ ver.append(v)
+ return v
+
+def _escape_helper(text):
+ text = str(text)
+ if sys.platform != 'win32':
+ text = str(text).replace('$', '\\$')
+ return text
+
+def _check_for_bad_chars(text, allowed_chars=ALLOWED_CHARS):
+ for c in str(text):
+ if c.isalnum():
+ continue
+ if c in allowed_chars:
+ continue
+ return True
+ return False
+
+def checkbadchars(url):
+ # (hpk) not quite sure about the exact purpose, guido w.?
+ proto, uri = url.split("://", 1)
+ if proto != "file":
+ host, uripath = uri.split('/', 1)
+ # only check for bad chars in the non-protocol parts
+ if (_check_for_bad_chars(host, ALLOWED_CHARS_HOST) \
+ or _check_for_bad_chars(uripath, ALLOWED_CHARS)):
+ raise ValueError("bad char in %r" % (url, ))
+
+
+#_______________________________________________________________
+
+class SvnPathBase(common.PathBase):
+ """ Base implementation for SvnPath implementations. """
+ sep = '/'
+
+ def _geturl(self):
+ return self.strpath
+ url = property(_geturl, None, None, "url of this svn-path.")
+
+ def __str__(self):
+ """ return a string representation (including rev-number) """
+ return self.strpath
+
+ def __hash__(self):
+ return hash(self.strpath)
+
+ def new(self, **kw):
+ """ create a modified version of this path. A 'rev' argument
+ indicates a new revision.
+ the following keyword arguments modify various path parts::
+
+ http://host.com/repo/path/file.ext
+ |-----------------------| dirname
+ |------| basename
+ |--| purebasename
+ |--| ext
+ """
+ obj = object.__new__(self.__class__)
+ obj.rev = kw.get('rev', self.rev)
+ obj.auth = kw.get('auth', self.auth)
+ dirname, basename, purebasename, ext = self._getbyspec(
+ "dirname,basename,purebasename,ext")
+ if 'basename' in kw:
+ if 'purebasename' in kw or 'ext' in kw:
+ raise ValueError("invalid specification %r" % kw)
+ else:
+ pb = kw.setdefault('purebasename', purebasename)
+ ext = kw.setdefault('ext', ext)
+ if ext and not ext.startswith('.'):
+ ext = '.' + ext
+ kw['basename'] = pb + ext
+
+ kw.setdefault('dirname', dirname)
+ kw.setdefault('sep', self.sep)
+ if kw['basename']:
+ obj.strpath = "%(dirname)s%(sep)s%(basename)s" % kw
+ else:
+ obj.strpath = "%(dirname)s" % kw
+ return obj
+
+ def _getbyspec(self, spec):
+ """ get specified parts of the path. 'arg' is a string
+ with comma separated path parts. The parts are returned
+ in exactly the order of the specification.
+
+ you may specify the following parts:
+
+ http://host.com/repo/path/file.ext
+ |-----------------------| dirname
+ |------| basename
+ |--| purebasename
+ |--| ext
+ """
+ res = []
+ parts = self.strpath.split(self.sep)
+ for name in spec.split(','):
+ name = name.strip()
+ if name == 'dirname':
+ res.append(self.sep.join(parts[:-1]))
+ elif name == 'basename':
+ res.append(parts[-1])
+ else:
+ basename = parts[-1]
+ i = basename.rfind('.')
+ if i == -1:
+ purebasename, ext = basename, ''
+ else:
+ purebasename, ext = basename[:i], basename[i:]
+ if name == 'purebasename':
+ res.append(purebasename)
+ elif name == 'ext':
+ res.append(ext)
+ else:
+ raise NameError("Don't know part %r" % name)
+ return res
+
+ def __eq__(self, other):
+ """ return true if path and rev attributes each match """
+ return (str(self) == str(other) and
+ (self.rev == other.rev or self.rev == other.rev))
+
+ def __ne__(self, other):
+ return not self == other
+
+ def join(self, *args):
+ """ return a new Path (with the same revision) which is composed
+ of the self Path followed by 'args' path components.
+ """
+ if not args:
+ return self
+
+ args = tuple([arg.strip(self.sep) for arg in args])
+ parts = (self.strpath, ) + args
+ newpath = self.__class__(self.sep.join(parts), self.rev, self.auth)
+ return newpath
+
+ def propget(self, name):
+ """ return the content of the given property. """
+ value = self._propget(name)
+ return value
+
+ def proplist(self):
+ """ list all property names. """
+ content = self._proplist()
+ return content
+
+ def size(self):
+ """ Return the size of the file content of the Path. """
+ return self.info().size
+
+ def mtime(self):
+ """ Return the last modification time of the file. """
+ return self.info().mtime
+
+ # shared help methods
+
+ def _escape(self, cmd):
+ return _escape_helper(cmd)
+
+
+ #def _childmaxrev(self):
+ # """ return maximum revision number of childs (or self.rev if no childs) """
+ # rev = self.rev
+ # for name, info in self._listdir_nameinfo():
+ # rev = max(rev, info.created_rev)
+ # return rev
+
+ #def _getlatestrevision(self):
+ # """ return latest repo-revision for this path. """
+ # url = self.strpath
+ # path = self.__class__(url, None)
+ #
+ # # we need a long walk to find the root-repo and revision
+ # while 1:
+ # try:
+ # rev = max(rev, path._childmaxrev())
+ # previous = path
+ # path = path.dirpath()
+ # except (IOError, process.cmdexec.Error):
+ # break
+ # if rev is None:
+ # raise IOError, "could not determine newest repo revision for %s" % self
+ # return rev
+
+ class Checkers(common.Checkers):
+ def dir(self):
+ try:
+ return self.path.info().kind == 'dir'
+ except py.error.Error:
+ return self._listdirworks()
+
+ def _listdirworks(self):
+ try:
+ self.path.listdir()
+ except py.error.ENOENT:
+ return False
+ else:
+ return True
+
+ def file(self):
+ try:
+ return self.path.info().kind == 'file'
+ except py.error.ENOENT:
+ return False
+
+ def exists(self):
+ try:
+ return self.path.info()
+ except py.error.ENOENT:
+ return self._listdirworks()
+
+def parse_apr_time(timestr):
+ i = timestr.rfind('.')
+ if i == -1:
+ raise ValueError("could not parse %s" % timestr)
+ timestr = timestr[:i]
+ parsedtime = time.strptime(timestr, "%Y-%m-%dT%H:%M:%S")
+ return time.mktime(parsedtime)
+
+class PropListDict(dict):
+ """ a Dictionary which fetches values (InfoSvnCommand instances) lazily"""
+ def __init__(self, path, keynames):
+ dict.__init__(self, [(x, None) for x in keynames])
+ self.path = path
+
+ def __getitem__(self, key):
+ value = dict.__getitem__(self, key)
+ if value is None:
+ value = self.path.propget(key)
+ dict.__setitem__(self, key, value)
+ return value
+
+def fixlocale():
+ if sys.platform != 'win32':
+ return 'LC_ALL=C '
+ return ''
+
+# some nasty chunk of code to solve path and url conversion and quoting issues
+ILLEGAL_CHARS = '* | \\ / : < > ? \t \n \x0b \x0c \r'.split(' ')
+if os.sep in ILLEGAL_CHARS:
+ ILLEGAL_CHARS.remove(os.sep)
+ISWINDOWS = sys.platform == 'win32'
+_reg_allow_disk = re.compile(r'^([a-z]\:\\)?[^:]+$', re.I)
+def _check_path(path):
+ illegal = ILLEGAL_CHARS[:]
+ sp = path.strpath
+ if ISWINDOWS:
+ illegal.remove(':')
+ if not _reg_allow_disk.match(sp):
+ raise ValueError('path may not contain a colon (:)')
+ for char in sp:
+ if char not in string.printable or char in illegal:
+ raise ValueError('illegal character %r in path' % (char,))
+
+def path_to_fspath(path, addat=True):
+ _check_path(path)
+ sp = path.strpath
+ if addat and path.rev != -1:
+ sp = '%s@%s' % (sp, path.rev)
+ elif addat:
+ sp = '%s@HEAD' % (sp,)
+ return sp
+
+def url_from_path(path):
+ fspath = path_to_fspath(path, False)
+ from urllib import quote
+ if ISWINDOWS:
+ match = _reg_allow_disk.match(fspath)
+ fspath = fspath.replace('\\', '/')
+ if match.group(1):
+ fspath = '/%s%s' % (match.group(1).replace('\\', '/'),
+ quote(fspath[len(match.group(1)):]))
+ else:
+ fspath = quote(fspath)
+ else:
+ fspath = quote(fspath)
+ if path.rev != -1:
+ fspath = '%s@%s' % (fspath, path.rev)
+ else:
+ fspath = '%s@HEAD' % (fspath,)
+ return 'file://%s' % (fspath,)
+
+class SvnAuth(object):
+ """ container for auth information for Subversion """
+ def __init__(self, username, password, cache_auth=True, interactive=True):
+ self.username = username
+ self.password = password
+ self.cache_auth = cache_auth
+ self.interactive = interactive
+
+ def makecmdoptions(self):
+ uname = self.username.replace('"', '\\"')
+ passwd = self.password.replace('"', '\\"')
+ ret = []
+ if uname:
+ ret.append('--username="%s"' % (uname,))
+ if passwd:
+ ret.append('--password="%s"' % (passwd,))
+ if not self.cache_auth:
+ ret.append('--no-auth-cache')
+ if not self.interactive:
+ ret.append('--non-interactive')
+ return ' '.join(ret)
+
+ def __str__(self):
+ return "<SvnAuth username=%s ...>" %(self.username,)
+
+rex_blame = re.compile(r'\s*(\d+)\s*(\S+) (.*)')
+
+class SvnWCCommandPath(common.PathBase):
+ """ path implementation offering access/modification to svn working copies.
+ It has methods similar to the functions in os.path and similar to the
+ commands of the svn client.
+ """
+ sep = os.sep
+
+ def __new__(cls, wcpath=None, auth=None):
+ self = object.__new__(cls)
+ if isinstance(wcpath, cls):
+ if wcpath.__class__ == cls:
+ return wcpath
+ wcpath = wcpath.localpath
+ if _check_for_bad_chars(str(wcpath),
+ ALLOWED_CHARS):
+ raise ValueError("bad char in wcpath %s" % (wcpath, ))
+ self.localpath = py.path.local(wcpath)
+ self.auth = auth
+ return self
+
+ strpath = property(lambda x: str(x.localpath), None, None, "string path")
+ rev = property(lambda x: x.info(usecache=0).rev, None, None, "revision")
+
+ def __eq__(self, other):
+ return self.localpath == getattr(other, 'localpath', None)
+
+ def _geturl(self):
+ if getattr(self, '_url', None) is None:
+ info = self.info()
+ self._url = info.url #SvnPath(info.url, info.rev)
+ assert isinstance(self._url, py.builtin._basestring)
+ return self._url
+
+ url = property(_geturl, None, None, "url of this WC item")
+
+ def _escape(self, cmd):
+ return _escape_helper(cmd)
+
+ def dump(self, obj):
+ """ pickle object into path location"""
+ return self.localpath.dump(obj)
+
+ def svnurl(self):
+ """ return current SvnPath for this WC-item. """
+ info = self.info()
+ return py.path.svnurl(info.url)
+
+ def __repr__(self):
+ return "svnwc(%r)" % (self.strpath) # , self._url)
+
+ def __str__(self):
+ return str(self.localpath)
+
+ def _makeauthoptions(self):
+ if self.auth is None:
+ return ''
+ return self.auth.makecmdoptions()
+
+ def _authsvn(self, cmd, args=None):
+ args = args and list(args) or []
+ args.append(self._makeauthoptions())
+ return self._svn(cmd, *args)
+
+ def _svn(self, cmd, *args):
+ l = ['svn %s' % cmd]
+ args = [self._escape(item) for item in args]
+ l.extend(args)
+ l.append('"%s"' % self._escape(self.strpath))
+ # try fixing the locale because we can't otherwise parse
+ string = fixlocale() + " ".join(l)
+ try:
+ try:
+ key = 'LC_MESSAGES'
+ hold = os.environ.get(key)
+ os.environ[key] = 'C'
+ out = py.process.cmdexec(string)
+ finally:
+ if hold:
+ os.environ[key] = hold
+ else:
+ del os.environ[key]
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ strerr = e.err.lower()
+ if strerr.find('not found') != -1:
+ raise py.error.ENOENT(self)
+ elif strerr.find("E200009:") != -1:
+ raise py.error.ENOENT(self)
+ if (strerr.find('file exists') != -1 or
+ strerr.find('file already exists') != -1 or
+ strerr.find('w150002:') != -1 or
+ strerr.find("can't create directory") != -1):
+ raise py.error.EEXIST(strerr) #self)
+ raise
+ return out
+
+ def switch(self, url):
+ """ switch to given URL. """
+ self._authsvn('switch', [url])
+
+ def checkout(self, url=None, rev=None):
+ """ checkout from url to local wcpath. """
+ args = []
+ if url is None:
+ url = self.url
+ if rev is None or rev == -1:
+ if (sys.platform != 'win32' and
+ _getsvnversion() == '1.3'):
+ url += "@HEAD"
+ else:
+ if _getsvnversion() == '1.3':
+ url += "@%d" % rev
+ else:
+ args.append('-r' + str(rev))
+ args.append(url)
+ self._authsvn('co', args)
+
+ def update(self, rev='HEAD', interactive=True):
+ """ update working copy item to given revision. (None -> HEAD). """
+ opts = ['-r', rev]
+ if not interactive:
+ opts.append("--non-interactive")
+ self._authsvn('up', opts)
+
+ def write(self, content, mode='w'):
+ """ write content into local filesystem wc. """
+ self.localpath.write(content, mode)
+
+ def dirpath(self, *args):
+ """ return the directory Path of the current Path. """
+ return self.__class__(self.localpath.dirpath(*args), auth=self.auth)
+
+ def _ensuredirs(self):
+ parent = self.dirpath()
+ if parent.check(dir=0):
+ parent._ensuredirs()
+ if self.check(dir=0):
+ self.mkdir()
+ return self
+
+ def ensure(self, *args, **kwargs):
+ """ ensure that an args-joined path exists (by default as
+ a file). if you specify a keyword argument 'directory=True'
+ then the path is forced to be a directory path.
+ """
+ p = self.join(*args)
+ if p.check():
+ if p.check(versioned=False):
+ p.add()
+ return p
+ if kwargs.get('dir', 0):
+ return p._ensuredirs()
+ parent = p.dirpath()
+ parent._ensuredirs()
+ p.write("")
+ p.add()
+ return p
+
+ def mkdir(self, *args):
+ """ create & return the directory joined with args. """
+ if args:
+ return self.join(*args).mkdir()
+ else:
+ self._svn('mkdir')
+ return self
+
+ def add(self):
+ """ add ourself to svn """
+ self._svn('add')
+
+ def remove(self, rec=1, force=1):
+ """ remove a file or a directory tree. 'rec'ursive is
+ ignored and considered always true (because of
+ underlying svn semantics.
+ """
+ assert rec, "svn cannot remove non-recursively"
+ if not self.check(versioned=True):
+ # not added to svn (anymore?), just remove
+ py.path.local(self).remove()
+ return
+ flags = []
+ if force:
+ flags.append('--force')
+ self._svn('remove', *flags)
+
+ def copy(self, target):
+ """ copy path to target."""
+ py.process.cmdexec("svn copy %s %s" %(str(self), str(target)))
+
+ def rename(self, target):
+ """ rename this path to target. """
+ py.process.cmdexec("svn move --force %s %s" %(str(self), str(target)))
+
+ def lock(self):
+ """ set a lock (exclusive) on the resource """
+ out = self._authsvn('lock').strip()
+ if not out:
+ # warning or error, raise exception
+ raise ValueError("unknown error in svn lock command")
+
+ def unlock(self):
+ """ unset a previously set lock """
+ out = self._authsvn('unlock').strip()
+ if out.startswith('svn:'):
+ # warning or error, raise exception
+ raise Exception(out[4:])
+
+ def cleanup(self):
+ """ remove any locks from the resource """
+ # XXX should be fixed properly!!!
+ try:
+ self.unlock()
+ except:
+ pass
+
+ def status(self, updates=0, rec=0, externals=0):
+ """ return (collective) Status object for this file. """
+ # http://svnbook.red-bean.com/book.html#svn-ch-3-sect-4.3.1
+ # 2201 2192 jum test
+ # XXX
+ if externals:
+ raise ValueError("XXX cannot perform status() "
+ "on external items yet")
+ else:
+ #1.2 supports: externals = '--ignore-externals'
+ externals = ''
+ if rec:
+ rec= ''
+ else:
+ rec = '--non-recursive'
+
+ # XXX does not work on all subversion versions
+ #if not externals:
+ # externals = '--ignore-externals'
+
+ if updates:
+ updates = '-u'
+ else:
+ updates = ''
+
+ try:
+ cmd = 'status -v --xml --no-ignore %s %s %s' % (
+ updates, rec, externals)
+ out = self._authsvn(cmd)
+ except py.process.cmdexec.Error:
+ cmd = 'status -v --no-ignore %s %s %s' % (
+ updates, rec, externals)
+ out = self._authsvn(cmd)
+ rootstatus = WCStatus(self).fromstring(out, self)
+ else:
+ rootstatus = XMLWCStatus(self).fromstring(out, self)
+ return rootstatus
+
+ def diff(self, rev=None):
+ """ return a diff of the current path against revision rev (defaulting
+ to the last one).
+ """
+ args = []
+ if rev is not None:
+ args.append("-r %d" % rev)
+ out = self._authsvn('diff', args)
+ return out
+
+ def blame(self):
+ """ return a list of tuples of three elements:
+ (revision, commiter, line)
+ """
+ out = self._svn('blame')
+ result = []
+ blamelines = out.splitlines()
+ reallines = py.path.svnurl(self.url).readlines()
+ for i, (blameline, line) in enumerate(
+ zip(blamelines, reallines)):
+ m = rex_blame.match(blameline)
+ if not m:
+ raise ValueError("output line %r of svn blame does not match "
+ "expected format" % (line, ))
+ rev, name, _ = m.groups()
+ result.append((int(rev), name, line))
+ return result
+
+ _rex_commit = re.compile(r'.*Committed revision (\d+)\.$', re.DOTALL)
+ def commit(self, msg='', rec=1):
+ """ commit with support for non-recursive commits """
+ # XXX i guess escaping should be done better here?!?
+ cmd = 'commit -m "%s" --force-log' % (msg.replace('"', '\\"'),)
+ if not rec:
+ cmd += ' -N'
+ out = self._authsvn(cmd)
+ try:
+ del cache.info[self]
+ except KeyError:
+ pass
+ if out:
+ m = self._rex_commit.match(out)
+ return int(m.group(1))
+
+ def propset(self, name, value, *args):
+ """ set property name to value on this path. """
+ d = py.path.local.mkdtemp()
+ try:
+ p = d.join('value')
+ p.write(value)
+ self._svn('propset', name, '--file', str(p), *args)
+ finally:
+ d.remove()
+
+ def propget(self, name):
+ """ get property name on this path. """
+ res = self._svn('propget', name)
+ return res[:-1] # strip trailing newline
+
+ def propdel(self, name):
+ """ delete property name on this path. """
+ res = self._svn('propdel', name)
+ return res[:-1] # strip trailing newline
+
+ def proplist(self, rec=0):
+ """ return a mapping of property names to property values.
+If rec is True, then return a dictionary mapping sub-paths to such mappings.
+"""
+ if rec:
+ res = self._svn('proplist -R')
+ return make_recursive_propdict(self, res)
+ else:
+ res = self._svn('proplist')
+ lines = res.split('\n')
+ lines = [x.strip() for x in lines[1:]]
+ return PropListDict(self, lines)
+
+ def revert(self, rec=0):
+ """ revert the local changes of this path. if rec is True, do so
+recursively. """
+ if rec:
+ result = self._svn('revert -R')
+ else:
+ result = self._svn('revert')
+ return result
+
+ def new(self, **kw):
+ """ create a modified version of this path. A 'rev' argument
+ indicates a new revision.
+ the following keyword arguments modify various path parts:
+
+ http://host.com/repo/path/file.ext
+ |-----------------------| dirname
+ |------| basename
+ |--| purebasename
+ |--| ext
+ """
+ if kw:
+ localpath = self.localpath.new(**kw)
+ else:
+ localpath = self.localpath
+ return self.__class__(localpath, auth=self.auth)
+
+ def join(self, *args, **kwargs):
+ """ return a new Path (with the same revision) which is composed
+ of the self Path followed by 'args' path components.
+ """
+ if not args:
+ return self
+ localpath = self.localpath.join(*args, **kwargs)
+ return self.__class__(localpath, auth=self.auth)
+
+ def info(self, usecache=1):
+ """ return an Info structure with svn-provided information. """
+ info = usecache and cache.info.get(self)
+ if not info:
+ try:
+ output = self._svn('info')
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if e.err.find('Path is not a working copy directory') != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find("is not under version control") != -1:
+ raise py.error.ENOENT(self, e.err)
+ raise
+ # XXX SVN 1.3 has output on stderr instead of stdout (while it does
+ # return 0!), so a bit nasty, but we assume no output is output
+ # to stderr...
+ if (output.strip() == '' or
+ output.lower().find('not a versioned resource') != -1):
+ raise py.error.ENOENT(self, output)
+ info = InfoSvnWCCommand(output)
+
+ # Can't reliably compare on Windows without access to win32api
+ if sys.platform != 'win32':
+ if info.path != self.localpath:
+ raise py.error.ENOENT(self, "not a versioned resource:" +
+ " %s != %s" % (info.path, self.localpath))
+ cache.info[self] = info
+ return info
+
+ def listdir(self, fil=None, sort=None):
+ """ return a sequence of Paths.
+
+ listdir will return either a tuple or a list of paths
+ depending on implementation choices.
+ """
+ if isinstance(fil, str):
+ fil = common.FNMatcher(fil)
+ # XXX unify argument naming with LocalPath.listdir
+ def notsvn(path):
+ return path.basename != '.svn'
+
+ paths = []
+ for localpath in self.localpath.listdir(notsvn):
+ p = self.__class__(localpath, auth=self.auth)
+ if notsvn(p) and (not fil or fil(p)):
+ paths.append(p)
+ self._sortlist(paths, sort)
+ return paths
+
+ def open(self, mode='r'):
+ """ return an opened file with the given mode. """
+ return open(self.strpath, mode)
+
+ def _getbyspec(self, spec):
+ return self.localpath._getbyspec(spec)
+
+ class Checkers(py.path.local.Checkers):
+ def __init__(self, path):
+ self.svnwcpath = path
+ self.path = path.localpath
+ def versioned(self):
+ try:
+ s = self.svnwcpath.info()
+ except (py.error.ENOENT, py.error.EEXIST):
+ return False
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if e.err.find('is not a working copy')!=-1:
+ return False
+ if e.err.lower().find('not a versioned resource') != -1:
+ return False
+ raise
+ else:
+ return True
+
+ def log(self, rev_start=None, rev_end=1, verbose=False):
+ """ return a list of LogEntry instances for this path.
+rev_start is the starting revision (defaulting to the first one).
+rev_end is the last revision (defaulting to HEAD).
+if verbose is True, then the LogEntry instances also know which files changed.
+"""
+ assert self.check() # make it simpler for the pipe
+ rev_start = rev_start is None and "HEAD" or rev_start
+ rev_end = rev_end is None and "HEAD" or rev_end
+ if rev_start == "HEAD" and rev_end == 1:
+ rev_opt = ""
+ else:
+ rev_opt = "-r %s:%s" % (rev_start, rev_end)
+ verbose_opt = verbose and "-v" or ""
+ locale_env = fixlocale()
+ # some blather on stderr
+ auth_opt = self._makeauthoptions()
+ #stdin, stdout, stderr = os.popen3(locale_env +
+ # 'svn log --xml %s %s %s "%s"' % (
+ # rev_opt, verbose_opt, auth_opt,
+ # self.strpath))
+ cmd = locale_env + 'svn log --xml %s %s %s "%s"' % (
+ rev_opt, verbose_opt, auth_opt, self.strpath)
+
+ popen = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ shell=True,
+ )
+ stdout, stderr = popen.communicate()
+ stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
+ minidom,ExpatError = importxml()
+ try:
+ tree = minidom.parseString(stdout)
+ except ExpatError:
+ raise ValueError('no such revision')
+ result = []
+ for logentry in filter(None, tree.firstChild.childNodes):
+ if logentry.nodeType == logentry.ELEMENT_NODE:
+ result.append(LogEntry(logentry))
+ return result
+
+ def size(self):
+ """ Return the size of the file content of the Path. """
+ return self.info().size
+
+ def mtime(self):
+ """ Return the last modification time of the file. """
+ return self.info().mtime
+
+ def __hash__(self):
+ return hash((self.strpath, self.__class__, self.auth))
+
+
+class WCStatus:
+ attrnames = ('modified','added', 'conflict', 'unchanged', 'external',
+ 'deleted', 'prop_modified', 'unknown', 'update_available',
+ 'incomplete', 'kindmismatch', 'ignored', 'locked', 'replaced'
+ )
+
+ def __init__(self, wcpath, rev=None, modrev=None, author=None):
+ self.wcpath = wcpath
+ self.rev = rev
+ self.modrev = modrev
+ self.author = author
+
+ for name in self.attrnames:
+ setattr(self, name, [])
+
+ def allpath(self, sort=True, **kw):
+ d = {}
+ for name in self.attrnames:
+ if name not in kw or kw[name]:
+ for path in getattr(self, name):
+ d[path] = 1
+ l = d.keys()
+ if sort:
+ l.sort()
+ return l
+
+ # XXX a bit scary to assume there's always 2 spaces between username and
+ # path, however with win32 allowing spaces in user names there doesn't
+ # seem to be a more solid approach :(
+ _rex_status = re.compile(r'\s+(\d+|-)\s+(\S+)\s+(.+?)\s{2,}(.*)')
+
+ def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
+ """ return a new WCStatus object from data 's'
+ """
+ rootstatus = WCStatus(rootwcpath, rev, modrev, author)
+ update_rev = None
+ for line in data.split('\n'):
+ if not line.strip():
+ continue
+ #print "processing %r" % line
+ flags, rest = line[:8], line[8:]
+ # first column
+ c0,c1,c2,c3,c4,c5,x6,c7 = flags
+ #if '*' in line:
+ # print "flags", repr(flags), "rest", repr(rest)
+
+ if c0 in '?XI':
+ fn = line.split(None, 1)[1]
+ if c0 == '?':
+ wcpath = rootwcpath.join(fn, abs=1)
+ rootstatus.unknown.append(wcpath)
+ elif c0 == 'X':
+ wcpath = rootwcpath.__class__(
+ rootwcpath.localpath.join(fn, abs=1),
+ auth=rootwcpath.auth)
+ rootstatus.external.append(wcpath)
+ elif c0 == 'I':
+ wcpath = rootwcpath.join(fn, abs=1)
+ rootstatus.ignored.append(wcpath)
+
+ continue
+
+ #elif c0 in '~!' or c4 == 'S':
+ # raise NotImplementedError("received flag %r" % c0)
+
+ m = WCStatus._rex_status.match(rest)
+ if not m:
+ if c7 == '*':
+ fn = rest.strip()
+ wcpath = rootwcpath.join(fn, abs=1)
+ rootstatus.update_available.append(wcpath)
+ continue
+ if line.lower().find('against revision:')!=-1:
+ update_rev = int(rest.split(':')[1].strip())
+ continue
+ if line.lower().find('status on external') > -1:
+ # XXX not sure what to do here... perhaps we want to
+ # store some state instead of just continuing, as right
+ # now it makes the top-level external get added twice
+ # (once as external, once as 'normal' unchanged item)
+ # because of the way SVN presents external items
+ continue
+ # keep trying
+ raise ValueError("could not parse line %r" % line)
+ else:
+ rev, modrev, author, fn = m.groups()
+ wcpath = rootwcpath.join(fn, abs=1)
+ #assert wcpath.check()
+ if c0 == 'M':
+ assert wcpath.check(file=1), "didn't expect a directory with changed content here"
+ rootstatus.modified.append(wcpath)
+ elif c0 == 'A' or c3 == '+' :
+ rootstatus.added.append(wcpath)
+ elif c0 == 'D':
+ rootstatus.deleted.append(wcpath)
+ elif c0 == 'C':
+ rootstatus.conflict.append(wcpath)
+ elif c0 == '~':
+ rootstatus.kindmismatch.append(wcpath)
+ elif c0 == '!':
+ rootstatus.incomplete.append(wcpath)
+ elif c0 == 'R':
+ rootstatus.replaced.append(wcpath)
+ elif not c0.strip():
+ rootstatus.unchanged.append(wcpath)
+ else:
+ raise NotImplementedError("received flag %r" % c0)
+
+ if c1 == 'M':
+ rootstatus.prop_modified.append(wcpath)
+ # XXX do we cover all client versions here?
+ if c2 == 'L' or c5 == 'K':
+ rootstatus.locked.append(wcpath)
+ if c7 == '*':
+ rootstatus.update_available.append(wcpath)
+
+ if wcpath == rootwcpath:
+ rootstatus.rev = rev
+ rootstatus.modrev = modrev
+ rootstatus.author = author
+ if update_rev:
+ rootstatus.update_rev = update_rev
+ continue
+ return rootstatus
+ fromstring = staticmethod(fromstring)
+
+class XMLWCStatus(WCStatus):
+ def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
+ """ parse 'data' (XML string as outputted by svn st) into a status obj
+ """
+ # XXX for externals, the path is shown twice: once
+ # with external information, and once with full info as if
+ # the item was a normal non-external... the current way of
+ # dealing with this issue is by ignoring it - this does make
+ # externals appear as external items as well as 'normal',
+ # unchanged ones in the status object so this is far from ideal
+ rootstatus = WCStatus(rootwcpath, rev, modrev, author)
+ update_rev = None
+ minidom, ExpatError = importxml()
+ try:
+ doc = minidom.parseString(data)
+ except ExpatError:
+ e = sys.exc_info()[1]
+ raise ValueError(str(e))
+ urevels = doc.getElementsByTagName('against')
+ if urevels:
+ rootstatus.update_rev = urevels[-1].getAttribute('revision')
+ for entryel in doc.getElementsByTagName('entry'):
+ path = entryel.getAttribute('path')
+ statusel = entryel.getElementsByTagName('wc-status')[0]
+ itemstatus = statusel.getAttribute('item')
+
+ if itemstatus == 'unversioned':
+ wcpath = rootwcpath.join(path, abs=1)
+ rootstatus.unknown.append(wcpath)
+ continue
+ elif itemstatus == 'external':
+ wcpath = rootwcpath.__class__(
+ rootwcpath.localpath.join(path, abs=1),
+ auth=rootwcpath.auth)
+ rootstatus.external.append(wcpath)
+ continue
+ elif itemstatus == 'ignored':
+ wcpath = rootwcpath.join(path, abs=1)
+ rootstatus.ignored.append(wcpath)
+ continue
+ elif itemstatus == 'incomplete':
+ wcpath = rootwcpath.join(path, abs=1)
+ rootstatus.incomplete.append(wcpath)
+ continue
+
+ rev = statusel.getAttribute('revision')
+ if itemstatus == 'added' or itemstatus == 'none':
+ rev = '0'
+ modrev = '?'
+ author = '?'
+ date = ''
+ elif itemstatus == "replaced":
+ pass
+ else:
+ #print entryel.toxml()
+ commitel = entryel.getElementsByTagName('commit')[0]
+ if commitel:
+ modrev = commitel.getAttribute('revision')
+ author = ''
+ author_els = commitel.getElementsByTagName('author')
+ if author_els:
+ for c in author_els[0].childNodes:
+ author += c.nodeValue
+ date = ''
+ for c in commitel.getElementsByTagName('date')[0]\
+ .childNodes:
+ date += c.nodeValue
+
+ wcpath = rootwcpath.join(path, abs=1)
+
+ assert itemstatus != 'modified' or wcpath.check(file=1), (
+ 'did\'t expect a directory with changed content here')
+
+ itemattrname = {
+ 'normal': 'unchanged',
+ 'unversioned': 'unknown',
+ 'conflicted': 'conflict',
+ 'none': 'added',
+ }.get(itemstatus, itemstatus)
+
+ attr = getattr(rootstatus, itemattrname)
+ attr.append(wcpath)
+
+ propsstatus = statusel.getAttribute('props')
+ if propsstatus not in ('none', 'normal'):
+ rootstatus.prop_modified.append(wcpath)
+
+ if wcpath == rootwcpath:
+ rootstatus.rev = rev
+ rootstatus.modrev = modrev
+ rootstatus.author = author
+ rootstatus.date = date
+
+ # handle repos-status element (remote info)
+ rstatusels = entryel.getElementsByTagName('repos-status')
+ if rstatusels:
+ rstatusel = rstatusels[0]
+ ritemstatus = rstatusel.getAttribute('item')
+ if ritemstatus in ('added', 'modified'):
+ rootstatus.update_available.append(wcpath)
+
+ lockels = entryel.getElementsByTagName('lock')
+ if len(lockels):
+ rootstatus.locked.append(wcpath)
+
+ return rootstatus
+ fromstring = staticmethod(fromstring)
+
+class InfoSvnWCCommand:
+ def __init__(self, output):
+ # Path: test
+ # URL: http://codespeak.net/svn/std.path/trunk/dist/std.path/test
+ # Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
+ # Revision: 2151
+ # Node Kind: directory
+ # Schedule: normal
+ # Last Changed Author: hpk
+ # Last Changed Rev: 2100
+ # Last Changed Date: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
+ # Properties Last Updated: 2003-11-03 14:47:48 +0100 (Mon, 03 Nov 2003)
+
+ d = {}
+ for line in output.split('\n'):
+ if not line.strip():
+ continue
+ key, value = line.split(':', 1)
+ key = key.lower().replace(' ', '')
+ value = value.strip()
+ d[key] = value
+ try:
+ self.url = d['url']
+ except KeyError:
+ raise ValueError("Not a versioned resource")
+ #raise ValueError, "Not a versioned resource %r" % path
+ self.kind = d['nodekind'] == 'directory' and 'dir' or d['nodekind']
+ try:
+ self.rev = int(d['revision'])
+ except KeyError:
+ self.rev = None
+
+ self.path = py.path.local(d['path'])
+ self.size = self.path.size()
+ if 'lastchangedrev' in d:
+ self.created_rev = int(d['lastchangedrev'])
+ if 'lastchangedauthor' in d:
+ self.last_author = d['lastchangedauthor']
+ if 'lastchangeddate' in d:
+ self.mtime = parse_wcinfotime(d['lastchangeddate'])
+ self.time = self.mtime * 1000000
+
+ def __eq__(self, other):
+ return self.__dict__ == other.__dict__
+
+def parse_wcinfotime(timestr):
+ """ Returns seconds since epoch, UTC. """
+ # example: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
+ m = re.match(r'(\d+-\d+-\d+ \d+:\d+:\d+) ([+-]\d+) .*', timestr)
+ if not m:
+ raise ValueError("timestring %r does not match" % timestr)
+ timestr, timezone = m.groups()
+ # do not handle timezone specially, return value should be UTC
+ parsedtime = time.strptime(timestr, "%Y-%m-%d %H:%M:%S")
+ return calendar.timegm(parsedtime)
+
+def make_recursive_propdict(wcroot,
+ output,
+ rex = re.compile("Properties on '(.*)':")):
+ """ Return a dictionary of path->PropListDict mappings. """
+ lines = [x for x in output.split('\n') if x]
+ pdict = {}
+ while lines:
+ line = lines.pop(0)
+ m = rex.match(line)
+ if not m:
+ raise ValueError("could not parse propget-line: %r" % line)
+ path = m.groups()[0]
+ wcpath = wcroot.join(path, abs=1)
+ propnames = []
+ while lines and lines[0].startswith(' '):
+ propname = lines.pop(0).strip()
+ propnames.append(propname)
+ assert propnames, "must have found properties!"
+ pdict[wcpath] = PropListDict(wcpath, propnames)
+ return pdict
+
+
+def importxml(cache=[]):
+ if cache:
+ return cache
+ from xml.dom import minidom
+ from xml.parsers.expat import ExpatError
+ cache.extend([minidom, ExpatError])
+ return cache
+
+class LogEntry:
+ def __init__(self, logentry):
+ self.rev = int(logentry.getAttribute('revision'))
+ for lpart in filter(None, logentry.childNodes):
+ if lpart.nodeType == lpart.ELEMENT_NODE:
+ if lpart.nodeName == 'author':
+ self.author = lpart.firstChild.nodeValue
+ elif lpart.nodeName == 'msg':
+ if lpart.firstChild:
+ self.msg = lpart.firstChild.nodeValue
+ else:
+ self.msg = ''
+ elif lpart.nodeName == 'date':
+ #2003-07-29T20:05:11.598637Z
+ timestr = lpart.firstChild.nodeValue
+ self.date = parse_apr_time(timestr)
+ elif lpart.nodeName == 'paths':
+ self.strpaths = []
+ for ppart in filter(None, lpart.childNodes):
+ if ppart.nodeType == ppart.ELEMENT_NODE:
+ self.strpaths.append(PathEntry(ppart))
+ def __repr__(self):
+ return '<Logentry rev=%d author=%s date=%s>' % (
+ self.rev, self.author, self.date)
+
+
diff --git a/third_party/python/py/py/_process/__init__.py b/third_party/python/py/py/_process/__init__.py
new file mode 100644
index 0000000000..86c714ad1a
--- /dev/null
+++ b/third_party/python/py/py/_process/__init__.py
@@ -0,0 +1 @@
+""" high-level sub-process handling """
diff --git a/third_party/python/py/py/_process/cmdexec.py b/third_party/python/py/py/_process/cmdexec.py
new file mode 100644
index 0000000000..f83a249402
--- /dev/null
+++ b/third_party/python/py/py/_process/cmdexec.py
@@ -0,0 +1,49 @@
+import sys
+import subprocess
+import py
+from subprocess import Popen, PIPE
+
+def cmdexec(cmd):
+ """ return unicode output of executing 'cmd' in a separate process.
+
+ raise cmdexec.Error exeception if the command failed.
+ the exception will provide an 'err' attribute containing
+ the error-output from the command.
+ if the subprocess module does not provide a proper encoding/unicode strings
+ sys.getdefaultencoding() will be used, if that does not exist, 'UTF-8'.
+ """
+ process = subprocess.Popen(cmd, shell=True,
+ universal_newlines=True,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = process.communicate()
+ if sys.version_info[0] < 3: # on py3 we get unicode strings, on py2 not
+ try:
+ default_encoding = sys.getdefaultencoding() # jython may not have it
+ except AttributeError:
+ default_encoding = sys.stdout.encoding or 'UTF-8'
+ out = unicode(out, process.stdout.encoding or default_encoding)
+ err = unicode(err, process.stderr.encoding or default_encoding)
+ status = process.poll()
+ if status:
+ raise ExecutionFailed(status, status, cmd, out, err)
+ return out
+
+class ExecutionFailed(py.error.Error):
+ def __init__(self, status, systemstatus, cmd, out, err):
+ Exception.__init__(self)
+ self.status = status
+ self.systemstatus = systemstatus
+ self.cmd = cmd
+ self.err = err
+ self.out = out
+
+ def __str__(self):
+ return "ExecutionFailed: %d %s\n%s" %(self.status, self.cmd, self.err)
+
+# export the exception under the name 'py.process.cmdexec.Error'
+cmdexec.Error = ExecutionFailed
+try:
+ ExecutionFailed.__module__ = 'py.process.cmdexec'
+ ExecutionFailed.__name__ = 'Error'
+except (AttributeError, TypeError):
+ pass
diff --git a/third_party/python/py/py/_process/forkedfunc.py b/third_party/python/py/py/_process/forkedfunc.py
new file mode 100644
index 0000000000..1c28530688
--- /dev/null
+++ b/third_party/python/py/py/_process/forkedfunc.py
@@ -0,0 +1,120 @@
+
+"""
+ ForkedFunc provides a way to run a function in a forked process
+ and get at its return value, stdout and stderr output as well
+ as signals and exitstatusus.
+"""
+
+import py
+import os
+import sys
+import marshal
+
+
+def get_unbuffered_io(fd, filename):
+ f = open(str(filename), "w")
+ if fd != f.fileno():
+ os.dup2(f.fileno(), fd)
+ class AutoFlush:
+ def write(self, data):
+ f.write(data)
+ f.flush()
+ def __getattr__(self, name):
+ return getattr(f, name)
+ return AutoFlush()
+
+
+class ForkedFunc:
+ EXITSTATUS_EXCEPTION = 3
+
+
+ def __init__(self, fun, args=None, kwargs=None, nice_level=0,
+ child_on_start=None, child_on_exit=None):
+ if args is None:
+ args = []
+ if kwargs is None:
+ kwargs = {}
+ self.fun = fun
+ self.args = args
+ self.kwargs = kwargs
+ self.tempdir = tempdir = py.path.local.mkdtemp()
+ self.RETVAL = tempdir.ensure('retval')
+ self.STDOUT = tempdir.ensure('stdout')
+ self.STDERR = tempdir.ensure('stderr')
+
+ pid = os.fork()
+ if pid: # in parent process
+ self.pid = pid
+ else: # in child process
+ self.pid = None
+ self._child(nice_level, child_on_start, child_on_exit)
+
+ def _child(self, nice_level, child_on_start, child_on_exit):
+ # right now we need to call a function, but first we need to
+ # map all IO that might happen
+ sys.stdout = stdout = get_unbuffered_io(1, self.STDOUT)
+ sys.stderr = stderr = get_unbuffered_io(2, self.STDERR)
+ retvalf = self.RETVAL.open("wb")
+ EXITSTATUS = 0
+ try:
+ if nice_level:
+ os.nice(nice_level)
+ try:
+ if child_on_start is not None:
+ child_on_start()
+ retval = self.fun(*self.args, **self.kwargs)
+ retvalf.write(marshal.dumps(retval))
+ if child_on_exit is not None:
+ child_on_exit()
+ except:
+ excinfo = py.code.ExceptionInfo()
+ stderr.write(str(excinfo._getreprcrash()))
+ EXITSTATUS = self.EXITSTATUS_EXCEPTION
+ finally:
+ stdout.close()
+ stderr.close()
+ retvalf.close()
+ os.close(1)
+ os.close(2)
+ os._exit(EXITSTATUS)
+
+ def waitfinish(self, waiter=os.waitpid):
+ pid, systemstatus = waiter(self.pid, 0)
+ if systemstatus:
+ if os.WIFSIGNALED(systemstatus):
+ exitstatus = os.WTERMSIG(systemstatus) + 128
+ else:
+ exitstatus = os.WEXITSTATUS(systemstatus)
+ else:
+ exitstatus = 0
+ signal = systemstatus & 0x7f
+ if not exitstatus and not signal:
+ retval = self.RETVAL.open('rb')
+ try:
+ retval_data = retval.read()
+ finally:
+ retval.close()
+ retval = marshal.loads(retval_data)
+ else:
+ retval = None
+ stdout = self.STDOUT.read()
+ stderr = self.STDERR.read()
+ self._removetemp()
+ return Result(exitstatus, signal, retval, stdout, stderr)
+
+ def _removetemp(self):
+ if self.tempdir.check():
+ self.tempdir.remove()
+
+ def __del__(self):
+ if self.pid is not None: # only clean up in main process
+ self._removetemp()
+
+
+class Result(object):
+ def __init__(self, exitstatus, signal, retval, stdout, stderr):
+ self.exitstatus = exitstatus
+ self.signal = signal
+ self.retval = retval
+ self.out = stdout
+ self.err = stderr
diff --git a/third_party/python/py/py/_process/killproc.py b/third_party/python/py/py/_process/killproc.py
new file mode 100644
index 0000000000..18e8310b5f
--- /dev/null
+++ b/third_party/python/py/py/_process/killproc.py
@@ -0,0 +1,23 @@
+import py
+import os, sys
+
+if sys.platform == "win32" or getattr(os, '_name', '') == 'nt':
+ try:
+ import ctypes
+ except ImportError:
+ def dokill(pid):
+ py.process.cmdexec("taskkill /F /PID %d" %(pid,))
+ else:
+ def dokill(pid):
+ PROCESS_TERMINATE = 1
+ handle = ctypes.windll.kernel32.OpenProcess(
+ PROCESS_TERMINATE, False, pid)
+ ctypes.windll.kernel32.TerminateProcess(handle, -1)
+ ctypes.windll.kernel32.CloseHandle(handle)
+else:
+ def dokill(pid):
+ os.kill(pid, 15)
+
+def kill(pid):
+ """ kill process by id. """
+ dokill(pid)
diff --git a/third_party/python/py/py/_std.py b/third_party/python/py/py/_std.py
new file mode 100644
index 0000000000..74d4367265
--- /dev/null
+++ b/third_party/python/py/py/_std.py
@@ -0,0 +1,26 @@
+import sys
+import warnings
+
+
+class PyStdIsDeprecatedWarning(DeprecationWarning):
+ pass
+
+
+class Std(object):
+ """ makes top-level python modules available as an attribute,
+ importing them on first access.
+ """
+
+ def __init__(self):
+ self.__dict__ = sys.modules
+
+ def __getattr__(self, name):
+ warnings.warn("py.std is deprecated, plase import %s directly" % name,
+ category=PyStdIsDeprecatedWarning)
+ try:
+ m = __import__(name)
+ except ImportError:
+ raise AttributeError("py.std: could not import %s" % name)
+ return m
+
+std = Std()
diff --git a/third_party/python/py/py/_vendored_packages/__init__.py b/third_party/python/py/py/_vendored_packages/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/py/py/_vendored_packages/__init__.py
diff --git a/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/DESCRIPTION.rst b/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/DESCRIPTION.rst
new file mode 100644
index 0000000000..548222007f
--- /dev/null
+++ b/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/DESCRIPTION.rst
@@ -0,0 +1,87 @@
+Welcome to apipkg!
+------------------------
+
+With apipkg you can control the exported namespace of a
+python package and greatly reduce the number of imports for your users.
+It is a `small pure python module`_ that works on virtually all Python
+versions, including CPython2.3 to Python3.1, Jython and PyPy. It co-operates
+well with Python's ``help()`` system, custom importers (PEP302) and common
+command line completion tools.
+
+Usage is very simple: you can require 'apipkg' as a dependency or you
+can copy paste the <200 Lines of code into your project.
+
+
+Tutorial example
+-------------------
+
+Here is a simple ``mypkg`` package that specifies one namespace
+and exports two objects imported from different modules::
+
+ # mypkg/__init__.py
+ import apipkg
+ apipkg.initpkg(__name__, {
+ 'path': {
+ 'Class1': "_mypkg.somemodule:Class1",
+ 'clsattr': "_mypkg.othermodule:Class2.attr",
+ }
+ }
+
+The package is initialized with a dictionary as namespace.
+
+You need to create a ``_mypkg`` package with a ``somemodule.py``
+and ``othermodule.py`` containing the respective classes.
+The ``_mypkg`` is not special - it's a completely
+regular python package.
+
+Namespace dictionaries contain ``name: value`` mappings
+where the value may be another namespace dictionary or
+a string specifying an import location. On accessing
+an namespace attribute an import will be performed::
+
+ >>> import mypkg
+ >>> mypkg.path
+ <ApiModule 'mypkg.path'>
+ >>> mypkg.path.Class1 # '_mypkg.somemodule' gets imported now
+ <class _mypkg.somemodule.Class1 at 0xb7d428fc>
+ >>> mypkg.path.clsattr # '_mypkg.othermodule' gets imported now
+ 4 # the value of _mypkg.othermodule.Class2.attr
+
+The ``mypkg.path`` namespace and its two entries are
+loaded when they are accessed. This means:
+
+* lazy loading - only what is actually needed is ever loaded
+
+* only the root "mypkg" ever needs to be imported to get
+ access to the complete functionality.
+
+* the underlying modules are also accessible, for example::
+
+ from mypkg.sub import Class1
+
+
+Including apipkg in your package
+--------------------------------------
+
+If you don't want to add an ``apipkg`` dependency to your package you
+can copy the `apipkg.py`_ file somewhere to your own package,
+for example ``_mypkg/apipkg.py`` in the above example. You
+then import the ``initpkg`` function from that new place and
+are good to go.
+
+.. _`small pure python module`:
+.. _`apipkg.py`: http://bitbucket.org/hpk42/apipkg/src/tip/apipkg.py
+
+Feedback?
+-----------------------
+
+If you have questions you are welcome to
+
+* join the #pylib channel on irc.freenode.net
+* subscribe to the http://codespeak.net/mailman/listinfo/py-dev list.
+* create an issue on http://bitbucket.org/hpk42/apipkg/issues
+
+have fun,
+holger krekel
+
+
diff --git a/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/INSTALLER b/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/METADATA b/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/METADATA
new file mode 100644
index 0000000000..eb7e60acff
--- /dev/null
+++ b/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/METADATA
@@ -0,0 +1,109 @@
+Metadata-Version: 2.0
+Name: apipkg
+Version: 1.4
+Summary: apipkg: namespace control and lazy-import mechanism
+Home-page: http://bitbucket.org/hpk42/apipkg
+Author: holger krekel
+Author-email: holger at merlinux.eu
+License: MIT License
+Platform: unix
+Platform: linux
+Platform: osx
+Platform: cygwin
+Platform: win32
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Programming Language :: Python
+
+Welcome to apipkg!
+------------------------
+
+With apipkg you can control the exported namespace of a
+python package and greatly reduce the number of imports for your users.
+It is a `small pure python module`_ that works on virtually all Python
+versions, including CPython2.3 to Python3.1, Jython and PyPy. It co-operates
+well with Python's ``help()`` system, custom importers (PEP302) and common
+command line completion tools.
+
+Usage is very simple: you can require 'apipkg' as a dependency or you
+can copy paste the <200 Lines of code into your project.
+
+
+Tutorial example
+-------------------
+
+Here is a simple ``mypkg`` package that specifies one namespace
+and exports two objects imported from different modules::
+
+ # mypkg/__init__.py
+ import apipkg
+ apipkg.initpkg(__name__, {
+ 'path': {
+ 'Class1': "_mypkg.somemodule:Class1",
+ 'clsattr': "_mypkg.othermodule:Class2.attr",
+ }
+ }
+
+The package is initialized with a dictionary as namespace.
+
+You need to create a ``_mypkg`` package with a ``somemodule.py``
+and ``othermodule.py`` containing the respective classes.
+The ``_mypkg`` is not special - it's a completely
+regular python package.
+
+Namespace dictionaries contain ``name: value`` mappings
+where the value may be another namespace dictionary or
+a string specifying an import location. On accessing
+an namespace attribute an import will be performed::
+
+ >>> import mypkg
+ >>> mypkg.path
+ <ApiModule 'mypkg.path'>
+ >>> mypkg.path.Class1 # '_mypkg.somemodule' gets imported now
+ <class _mypkg.somemodule.Class1 at 0xb7d428fc>
+ >>> mypkg.path.clsattr # '_mypkg.othermodule' gets imported now
+ 4 # the value of _mypkg.othermodule.Class2.attr
+
+The ``mypkg.path`` namespace and its two entries are
+loaded when they are accessed. This means:
+
+* lazy loading - only what is actually needed is ever loaded
+
+* only the root "mypkg" ever needs to be imported to get
+ access to the complete functionality.
+
+* the underlying modules are also accessible, for example::
+
+ from mypkg.sub import Class1
+
+
+Including apipkg in your package
+--------------------------------------
+
+If you don't want to add an ``apipkg`` dependency to your package you
+can copy the `apipkg.py`_ file somewhere to your own package,
+for example ``_mypkg/apipkg.py`` in the above example. You
+then import the ``initpkg`` function from that new place and
+are good to go.
+
+.. _`small pure python module`:
+.. _`apipkg.py`: http://bitbucket.org/hpk42/apipkg/src/tip/apipkg.py
+
+Feedback?
+-----------------------
+
+If you have questions you are welcome to
+
+* join the #pylib channel on irc.freenode.net
+* subscribe to the http://codespeak.net/mailman/listinfo/py-dev list.
+* create an issue on http://bitbucket.org/hpk42/apipkg/issues
+
+have fun,
+holger krekel
+
+
diff --git a/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/RECORD b/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/RECORD
new file mode 100644
index 0000000000..dc72959dfe
--- /dev/null
+++ b/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/RECORD
@@ -0,0 +1,9 @@
+apipkg.py,sha256=BNnv_qvq8zZvku-uudoqgp3XTNFbwsNUmtzOKrVI7X0,6420
+apipkg-1.4.dist-info/top_level.txt,sha256=3TGS6nmN7kjxhUK4LpPCB3QkQI34QYGrT0ZQGWajoZ8,7
+apipkg-1.4.dist-info/METADATA,sha256=Fk_8BrHyXE--kvB3_ZBKgwvPaKusAZUjchH-kpB63Hs,3491
+apipkg-1.4.dist-info/DESCRIPTION.rst,sha256=RkMQqk5ljhGy0DiZkR_nbpjqvwCIhuIEHsyvkn3O96k,2803
+apipkg-1.4.dist-info/metadata.json,sha256=GdshYrA_7gAII3E3EQMH-31BHzU-klTZ6bPQzlDmuy4,779
+apipkg-1.4.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110
+apipkg-1.4.dist-info/RECORD,,
+apipkg-1.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+__pycache__/apipkg.cpython-35.pyc,,
diff --git a/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/WHEEL b/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/WHEEL
new file mode 100644
index 0000000000..9dff69d861
--- /dev/null
+++ b/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.24.0)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/metadata.json b/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/metadata.json
new file mode 100644
index 0000000000..05609b9937
--- /dev/null
+++ b/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/metadata.json
@@ -0,0 +1 @@
+{"license": "MIT License", "name": "apipkg", "metadata_version": "2.0", "generator": "bdist_wheel (0.24.0)", "summary": "apipkg: namespace control and lazy-import mechanism", "platform": "unix", "version": "1.4", "extensions": {"python.details": {"project_urls": {"Home": "http://bitbucket.org/hpk42/apipkg"}, "document_names": {"description": "DESCRIPTION.rst"}, "contacts": [{"role": "author", "email": "holger at merlinux.eu", "name": "holger krekel"}]}}, "classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Operating System :: Microsoft :: Windows", "Operating System :: MacOS :: MacOS X", "Topic :: Software Development :: Libraries", "Programming Language :: Python"]} \ No newline at end of file
diff --git a/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/top_level.txt b/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/top_level.txt
new file mode 100644
index 0000000000..e2221c8f9e
--- /dev/null
+++ b/third_party/python/py/py/_vendored_packages/apipkg-1.4.dist-info/top_level.txt
@@ -0,0 +1 @@
+apipkg
diff --git a/third_party/python/py/py/_vendored_packages/apipkg.py b/third_party/python/py/py/_vendored_packages/apipkg.py
new file mode 100644
index 0000000000..9d56e0bcba
--- /dev/null
+++ b/third_party/python/py/py/_vendored_packages/apipkg.py
@@ -0,0 +1,205 @@
+"""
+apipkg: control the exported namespace of a python package.
+
+see http://pypi.python.org/pypi/apipkg
+
+(c) holger krekel, 2009 - MIT license
+"""
+import os
+import sys
+from types import ModuleType
+
+
+__version__ = '1.4'
+
+
+def _py_abspath(path):
+ """
+ special version of abspath
+ that will leave paths from jython jars alone
+ """
+ if path.startswith('__pyclasspath__'):
+
+ return path
+ else:
+ return os.path.abspath(path)
+
+
+def distribution_version(name):
+ """try to get the version of the named distribution,
+ returs None on failure"""
+ from pkg_resources import get_distribution, DistributionNotFound
+ try:
+ dist = get_distribution(name)
+ except DistributionNotFound:
+ pass
+ else:
+ return dist.version
+
+
+def initpkg(pkgname, exportdefs, attr=dict(), eager=False):
+ """ initialize given package from the export definitions. """
+ oldmod = sys.modules.get(pkgname)
+ d = {}
+ f = getattr(oldmod, '__file__', None)
+ if f:
+ f = _py_abspath(f)
+ d['__file__'] = f
+ if hasattr(oldmod, '__version__'):
+ d['__version__'] = oldmod.__version__
+ if hasattr(oldmod, '__loader__'):
+ d['__loader__'] = oldmod.__loader__
+ if hasattr(oldmod, '__path__'):
+ d['__path__'] = [_py_abspath(p) for p in oldmod.__path__]
+ if '__doc__' not in exportdefs and getattr(oldmod, '__doc__', None):
+ d['__doc__'] = oldmod.__doc__
+ d.update(attr)
+ if hasattr(oldmod, "__dict__"):
+ oldmod.__dict__.update(d)
+ mod = ApiModule(pkgname, exportdefs, implprefix=pkgname, attr=d)
+ sys.modules[pkgname] = mod
+ # eagerload in bypthon to avoid their monkeypatching breaking packages
+ if 'bpython' in sys.modules or eager:
+ for module in sys.modules.values():
+ if isinstance(module, ApiModule):
+ module.__dict__
+
+
+def importobj(modpath, attrname):
+ module = __import__(modpath, None, None, ['__doc__'])
+ if not attrname:
+ return module
+
+ retval = module
+ names = attrname.split(".")
+ for x in names:
+ retval = getattr(retval, x)
+ return retval
+
+
+class ApiModule(ModuleType):
+ def __docget(self):
+ try:
+ return self.__doc
+ except AttributeError:
+ if '__doc__' in self.__map__:
+ return self.__makeattr('__doc__')
+
+ def __docset(self, value):
+ self.__doc = value
+ __doc__ = property(__docget, __docset)
+
+ def __init__(self, name, importspec, implprefix=None, attr=None):
+ self.__name__ = name
+ self.__all__ = [x for x in importspec if x != '__onfirstaccess__']
+ self.__map__ = {}
+ self.__implprefix__ = implprefix or name
+ if attr:
+ for name, val in attr.items():
+ # print "setting", self.__name__, name, val
+ setattr(self, name, val)
+ for name, importspec in importspec.items():
+ if isinstance(importspec, dict):
+ subname = '%s.%s' % (self.__name__, name)
+ apimod = ApiModule(subname, importspec, implprefix)
+ sys.modules[subname] = apimod
+ setattr(self, name, apimod)
+ else:
+ parts = importspec.split(':')
+ modpath = parts.pop(0)
+ attrname = parts and parts[0] or ""
+ if modpath[0] == '.':
+ modpath = implprefix + modpath
+
+ if not attrname:
+ subname = '%s.%s' % (self.__name__, name)
+ apimod = AliasModule(subname, modpath)
+ sys.modules[subname] = apimod
+ if '.' not in name:
+ setattr(self, name, apimod)
+ else:
+ self.__map__[name] = (modpath, attrname)
+
+ def __repr__(self):
+ l = []
+ if hasattr(self, '__version__'):
+ l.append("version=" + repr(self.__version__))
+ if hasattr(self, '__file__'):
+ l.append('from ' + repr(self.__file__))
+ if l:
+ return '<ApiModule %r %s>' % (self.__name__, " ".join(l))
+ return '<ApiModule %r>' % (self.__name__,)
+
+ def __makeattr(self, name):
+ """lazily compute value for name or raise AttributeError if unknown."""
+ # print "makeattr", self.__name__, name
+ target = None
+ if '__onfirstaccess__' in self.__map__:
+ target = self.__map__.pop('__onfirstaccess__')
+ importobj(*target)()
+ try:
+ modpath, attrname = self.__map__[name]
+ except KeyError:
+ if target is not None and name != '__onfirstaccess__':
+ # retry, onfirstaccess might have set attrs
+ return getattr(self, name)
+ raise AttributeError(name)
+ else:
+ result = importobj(modpath, attrname)
+ setattr(self, name, result)
+ try:
+ del self.__map__[name]
+ except KeyError:
+ pass # in a recursive-import situation a double-del can happen
+ return result
+
+ __getattr__ = __makeattr
+
+ @property
+ def __dict__(self):
+ # force all the content of the module
+ # to be loaded when __dict__ is read
+ dictdescr = ModuleType.__dict__['__dict__']
+ dict = dictdescr.__get__(self)
+ if dict is not None:
+ hasattr(self, 'some')
+ for name in self.__all__:
+ try:
+ self.__makeattr(name)
+ except AttributeError:
+ pass
+ return dict
+
+
+def AliasModule(modname, modpath, attrname=None):
+ mod = []
+
+ def getmod():
+ if not mod:
+ x = importobj(modpath, None)
+ if attrname is not None:
+ x = getattr(x, attrname)
+ mod.append(x)
+ return mod[0]
+
+ class AliasModule(ModuleType):
+
+ def __repr__(self):
+ x = modpath
+ if attrname:
+ x += "." + attrname
+ return '<AliasModule %r for %r>' % (modname, x)
+
+ def __getattribute__(self, name):
+ try:
+ return getattr(getmod(), name)
+ except ImportError:
+ return None
+
+ def __setattr__(self, name, value):
+ setattr(getmod(), name, value)
+
+ def __delattr__(self, name):
+ delattr(getmod(), name)
+
+ return AliasModule(str(modname))
diff --git a/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/DESCRIPTION.rst b/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/DESCRIPTION.rst
new file mode 100644
index 0000000000..6d59bc222c
--- /dev/null
+++ b/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/DESCRIPTION.rst
@@ -0,0 +1,53 @@
+iniconfig: brain-dead simple parsing of ini files
+=======================================================
+
+iniconfig is a small and simple INI-file parser module
+having a unique set of features:
+
+* tested against Python2.4 across to Python3.2, Jython, PyPy
+* maintains order of sections and entries
+* supports multi-line values with or without line-continuations
+* supports "#" comments everywhere
+* raises errors with proper line-numbers
+* no bells and whistles like automatic substitutions
+* iniconfig raises an Error if two sections have the same name.
+
+If you encounter issues or have feature wishes please report them to:
+
+ http://github.org/RonnyPfannschmidt/iniconfig/issues
+
+Basic Example
+===================================
+
+If you have an ini file like this::
+
+ # content of example.ini
+ [section1] # comment
+ name1=value1 # comment
+ name1b=value1,value2 # comment
+
+ [section2]
+ name2=
+ line1
+ line2
+
+then you can do::
+
+ >>> import iniconfig
+ >>> ini = iniconfig.IniConfig("example.ini")
+ >>> ini['section1']['name1'] # raises KeyError if not exists
+ 'value1'
+ >>> ini.get('section1', 'name1b', [], lambda x: x.split(","))
+ ['value1', 'value2']
+ >>> ini.get('section1', 'notexist', [], lambda x: x.split(","))
+ []
+ >>> [x.name for x in list(ini)]
+ ['section1', 'section2']
+ >>> list(list(ini)[0].items())
+ [('name1', 'value1'), ('name1b', 'value1,value2')]
+ >>> 'section1' in ini
+ True
+ >>> 'inexistendsection' in ini
+ False
+
+
diff --git a/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/INSTALLER b/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/METADATA b/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/METADATA
new file mode 100644
index 0000000000..79ea62dc34
--- /dev/null
+++ b/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/METADATA
@@ -0,0 +1,78 @@
+Metadata-Version: 2.0
+Name: iniconfig
+Version: 1.0.0
+Summary: iniconfig: brain-dead simple config-ini parsing
+Home-page: http://github.com/RonnyPfannschmidt/iniconfig
+Author: Ronny Pfannschmidt, Holger Krekel
+Author-email: opensource@ronnypfannschmidt.de, holger.krekel@gmail.com
+License: MIT License
+Platform: unix
+Platform: linux
+Platform: osx
+Platform: cygwin
+Platform: win32
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Utilities
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+
+iniconfig: brain-dead simple parsing of ini files
+=======================================================
+
+iniconfig is a small and simple INI-file parser module
+having a unique set of features:
+
+* tested against Python2.4 across to Python3.2, Jython, PyPy
+* maintains order of sections and entries
+* supports multi-line values with or without line-continuations
+* supports "#" comments everywhere
+* raises errors with proper line-numbers
+* no bells and whistles like automatic substitutions
+* iniconfig raises an Error if two sections have the same name.
+
+If you encounter issues or have feature wishes please report them to:
+
+ http://github.org/RonnyPfannschmidt/iniconfig/issues
+
+Basic Example
+===================================
+
+If you have an ini file like this::
+
+ # content of example.ini
+ [section1] # comment
+ name1=value1 # comment
+ name1b=value1,value2 # comment
+
+ [section2]
+ name2=
+ line1
+ line2
+
+then you can do::
+
+ >>> import iniconfig
+ >>> ini = iniconfig.IniConfig("example.ini")
+ >>> ini['section1']['name1'] # raises KeyError if not exists
+ 'value1'
+ >>> ini.get('section1', 'name1b', [], lambda x: x.split(","))
+ ['value1', 'value2']
+ >>> ini.get('section1', 'notexist', [], lambda x: x.split(","))
+ []
+ >>> [x.name for x in list(ini)]
+ ['section1', 'section2']
+ >>> list(list(ini)[0].items())
+ [('name1', 'value1'), ('name1b', 'value1,value2')]
+ >>> 'section1' in ini
+ True
+ >>> 'inexistendsection' in ini
+ False
+
+
diff --git a/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/RECORD b/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/RECORD
new file mode 100644
index 0000000000..ec2f5e1748
--- /dev/null
+++ b/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/RECORD
@@ -0,0 +1,9 @@
+iniconfig.py,sha256=-pBe5AF_6aAwo1CxJQ8i_zJq6ejc6IxHta7qk2tNJhY,5208
+iniconfig-1.0.0.dist-info/DESCRIPTION.rst,sha256=BDLMwWqfjpwZ5yqXRvz1x6bf8Dnt_pZhElekAwtL19o,1522
+iniconfig-1.0.0.dist-info/METADATA,sha256=bb2T8WUSDXXiUVxZ4WXhbffq6stikMTlB1jyrPbLfyU,2405
+iniconfig-1.0.0.dist-info/RECORD,,
+iniconfig-1.0.0.dist-info/WHEEL,sha256=3XK1Z4AI42GuJXciCpiHMOkbehxRV8QDBW8IU41k3ZU,96
+iniconfig-1.0.0.dist-info/metadata.json,sha256=UYYwW0p815nU4qz8Iq1gGqIYaAcsCyGju3jXvTOyXSI,950
+iniconfig-1.0.0.dist-info/top_level.txt,sha256=7KfM0fugdlToj9UW7enKXk2HYALQD8qHiyKtjhSzgN8,10
+iniconfig-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+__pycache__/iniconfig.cpython-35.pyc,,
diff --git a/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/WHEEL b/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/WHEEL
new file mode 100644
index 0000000000..15b96c99ca
--- /dev/null
+++ b/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.30.0.a0)
+Root-Is-Purelib: true
+Tag: cp35-none-any
+
diff --git a/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/metadata.json b/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/metadata.json
new file mode 100644
index 0000000000..084daa6c06
--- /dev/null
+++ b/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/metadata.json
@@ -0,0 +1 @@
+{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Operating System :: Microsoft :: Windows", "Operating System :: MacOS :: MacOS X", "Topic :: Software Development :: Libraries", "Topic :: Utilities", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3"], "extensions": {"python.details": {"contacts": [{"email": "opensource@ronnypfannschmidt.de, holger.krekel@gmail.com", "name": "Ronny Pfannschmidt, Holger Krekel", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://github.com/RonnyPfannschmidt/iniconfig"}}}, "generator": "bdist_wheel (0.30.0.a0)", "license": "MIT License", "metadata_version": "2.0", "name": "iniconfig", "platform": "unix", "summary": "iniconfig: brain-dead simple config-ini parsing", "version": "1.0.0"} \ No newline at end of file
diff --git a/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/top_level.txt b/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/top_level.txt
new file mode 100644
index 0000000000..9dda53692d
--- /dev/null
+++ b/third_party/python/py/py/_vendored_packages/iniconfig-1.0.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+iniconfig
diff --git a/third_party/python/py/py/_vendored_packages/iniconfig.py b/third_party/python/py/py/_vendored_packages/iniconfig.py
new file mode 100644
index 0000000000..6ad9eaf868
--- /dev/null
+++ b/third_party/python/py/py/_vendored_packages/iniconfig.py
@@ -0,0 +1,165 @@
+""" brain-dead simple parser for ini-style files.
+(C) Ronny Pfannschmidt, Holger Krekel -- MIT licensed
+"""
+__all__ = ['IniConfig', 'ParseError']
+
+COMMENTCHARS = "#;"
+
+
+class ParseError(Exception):
+ def __init__(self, path, lineno, msg):
+ Exception.__init__(self, path, lineno, msg)
+ self.path = path
+ self.lineno = lineno
+ self.msg = msg
+
+ def __str__(self):
+ return "%s:%s: %s" % (self.path, self.lineno+1, self.msg)
+
+
+class SectionWrapper(object):
+ def __init__(self, config, name):
+ self.config = config
+ self.name = name
+
+ def lineof(self, name):
+ return self.config.lineof(self.name, name)
+
+ def get(self, key, default=None, convert=str):
+ return self.config.get(self.name, key,
+ convert=convert, default=default)
+
+ def __getitem__(self, key):
+ return self.config.sections[self.name][key]
+
+ def __iter__(self):
+ section = self.config.sections.get(self.name, [])
+
+ def lineof(key):
+ return self.config.lineof(self.name, key)
+ for name in sorted(section, key=lineof):
+ yield name
+
+ def items(self):
+ for name in self:
+ yield name, self[name]
+
+
+class IniConfig(object):
+ def __init__(self, path, data=None):
+ self.path = str(path) # convenience
+ if data is None:
+ f = open(self.path)
+ try:
+ tokens = self._parse(iter(f))
+ finally:
+ f.close()
+ else:
+ tokens = self._parse(data.splitlines(True))
+
+ self._sources = {}
+ self.sections = {}
+
+ for lineno, section, name, value in tokens:
+ if section is None:
+ self._raise(lineno, 'no section header defined')
+ self._sources[section, name] = lineno
+ if name is None:
+ if section in self.sections:
+ self._raise(lineno, 'duplicate section %r' % (section, ))
+ self.sections[section] = {}
+ else:
+ if name in self.sections[section]:
+ self._raise(lineno, 'duplicate name %r' % (name, ))
+ self.sections[section][name] = value
+
+ def _raise(self, lineno, msg):
+ raise ParseError(self.path, lineno, msg)
+
+ def _parse(self, line_iter):
+ result = []
+ section = None
+ for lineno, line in enumerate(line_iter):
+ name, data = self._parseline(line, lineno)
+ # new value
+ if name is not None and data is not None:
+ result.append((lineno, section, name, data))
+ # new section
+ elif name is not None and data is None:
+ if not name:
+ self._raise(lineno, 'empty section name')
+ section = name
+ result.append((lineno, section, None, None))
+ # continuation
+ elif name is None and data is not None:
+ if not result:
+ self._raise(lineno, 'unexpected value continuation')
+ last = result.pop()
+ last_name, last_data = last[-2:]
+ if last_name is None:
+ self._raise(lineno, 'unexpected value continuation')
+
+ if last_data:
+ data = '%s\n%s' % (last_data, data)
+ result.append(last[:-1] + (data,))
+ return result
+
+ def _parseline(self, line, lineno):
+ # blank lines
+ if iscommentline(line):
+ line = ""
+ else:
+ line = line.rstrip()
+ if not line:
+ return None, None
+ # section
+ if line[0] == '[':
+ realline = line
+ for c in COMMENTCHARS:
+ line = line.split(c)[0].rstrip()
+ if line[-1] == "]":
+ return line[1:-1], None
+ return None, realline.strip()
+ # value
+ elif not line[0].isspace():
+ try:
+ name, value = line.split('=', 1)
+ if ":" in name:
+ raise ValueError()
+ except ValueError:
+ try:
+ name, value = line.split(":", 1)
+ except ValueError:
+ self._raise(lineno, 'unexpected line: %r' % line)
+ return name.strip(), value.strip()
+ # continuation
+ else:
+ return None, line.strip()
+
+ def lineof(self, section, name=None):
+ lineno = self._sources.get((section, name))
+ if lineno is not None:
+ return lineno + 1
+
+ def get(self, section, name, default=None, convert=str):
+ try:
+ return convert(self.sections[section][name])
+ except KeyError:
+ return default
+
+ def __getitem__(self, name):
+ if name not in self.sections:
+ raise KeyError(name)
+ return SectionWrapper(self, name)
+
+ def __iter__(self):
+ for name in sorted(self.sections, key=self.lineof):
+ yield SectionWrapper(self, name)
+
+ def __contains__(self, arg):
+ return arg in self.sections
+
+
+def iscommentline(line):
+ c = line.lstrip()[:1]
+ return c in COMMENTCHARS
diff --git a/third_party/python/py/py/_version.py b/third_party/python/py/py/_version.py
new file mode 100644
index 0000000000..e7188f937f
--- /dev/null
+++ b/third_party/python/py/py/_version.py
@@ -0,0 +1,4 @@
+# coding: utf-8
+# file generated by setuptools_scm
+# don't change, don't track in version control
+version = '1.5.4'
diff --git a/third_party/python/py/py/_xmlgen.py b/third_party/python/py/py/_xmlgen.py
new file mode 100644
index 0000000000..1c83545884
--- /dev/null
+++ b/third_party/python/py/py/_xmlgen.py
@@ -0,0 +1,255 @@
+"""
+module for generating and serializing xml and html structures
+by using simple python objects.
+
+(c) holger krekel, holger at merlinux eu. 2009
+"""
+import sys, re
+
+if sys.version_info >= (3,0):
+ def u(s):
+ return s
+ def unicode(x, errors=None):
+ if hasattr(x, '__unicode__'):
+ return x.__unicode__()
+ return str(x)
+else:
+ def u(s):
+ return unicode(s)
+ unicode = unicode
+
+
+class NamespaceMetaclass(type):
+ def __getattr__(self, name):
+ if name[:1] == '_':
+ raise AttributeError(name)
+ if self == Namespace:
+ raise ValueError("Namespace class is abstract")
+ tagspec = self.__tagspec__
+ if tagspec is not None and name not in tagspec:
+ raise AttributeError(name)
+ classattr = {}
+ if self.__stickyname__:
+ classattr['xmlname'] = name
+ cls = type(name, (self.__tagclass__,), classattr)
+ setattr(self, name, cls)
+ return cls
+
+class Tag(list):
+ class Attr(object):
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)
+
+ def __init__(self, *args, **kwargs):
+ super(Tag, self).__init__(args)
+ self.attr = self.Attr(**kwargs)
+
+ def __unicode__(self):
+ return self.unicode(indent=0)
+ __str__ = __unicode__
+
+ def unicode(self, indent=2):
+ l = []
+ SimpleUnicodeVisitor(l.append, indent).visit(self)
+ return u("").join(l)
+
+ def __repr__(self):
+ name = self.__class__.__name__
+ return "<%r tag object %d>" % (name, id(self))
+
+Namespace = NamespaceMetaclass('Namespace', (object, ), {
+ '__tagspec__': None,
+ '__tagclass__': Tag,
+ '__stickyname__': False,
+})
+
+class HtmlTag(Tag):
+ def unicode(self, indent=2):
+ l = []
+ HtmlVisitor(l.append, indent, shortempty=False).visit(self)
+ return u("").join(l)
+
+# exported plain html namespace
+class html(Namespace):
+ __tagclass__ = HtmlTag
+ __stickyname__ = True
+ __tagspec__ = dict([(x,1) for x in (
+ 'a,abbr,acronym,address,applet,area,article,aside,audio,b,'
+ 'base,basefont,bdi,bdo,big,blink,blockquote,body,br,button,'
+ 'canvas,caption,center,cite,code,col,colgroup,command,comment,'
+ 'datalist,dd,del,details,dfn,dir,div,dl,dt,em,embed,'
+ 'fieldset,figcaption,figure,footer,font,form,frame,frameset,h1,'
+ 'h2,h3,h4,h5,h6,head,header,hgroup,hr,html,i,iframe,img,input,'
+ 'ins,isindex,kbd,keygen,label,legend,li,link,listing,map,mark,'
+ 'marquee,menu,meta,meter,multicol,nav,nobr,noembed,noframes,'
+ 'noscript,object,ol,optgroup,option,output,p,param,pre,progress,'
+ 'q,rp,rt,ruby,s,samp,script,section,select,small,source,span,'
+ 'strike,strong,style,sub,summary,sup,table,tbody,td,textarea,'
+ 'tfoot,th,thead,time,title,tr,track,tt,u,ul,xmp,var,video,wbr'
+ ).split(',') if x])
+
+ class Style(object):
+ def __init__(self, **kw):
+ for x, y in kw.items():
+ x = x.replace('_', '-')
+ setattr(self, x, y)
+
+
+class raw(object):
+ """just a box that can contain a unicode string that will be
+ included directly in the output"""
+ def __init__(self, uniobj):
+ self.uniobj = uniobj
+
+class SimpleUnicodeVisitor(object):
+ """ recursive visitor to write unicode. """
+ def __init__(self, write, indent=0, curindent=0, shortempty=True):
+ self.write = write
+ self.cache = {}
+ self.visited = {} # for detection of recursion
+ self.indent = indent
+ self.curindent = curindent
+ self.parents = []
+ self.shortempty = shortempty # short empty tags or not
+
+ def visit(self, node):
+ """ dispatcher on node's class/bases name. """
+ cls = node.__class__
+ try:
+ visitmethod = self.cache[cls]
+ except KeyError:
+ for subclass in cls.__mro__:
+ visitmethod = getattr(self, subclass.__name__, None)
+ if visitmethod is not None:
+ break
+ else:
+ visitmethod = self.__object
+ self.cache[cls] = visitmethod
+ visitmethod(node)
+
+ # the default fallback handler is marked private
+ # to avoid clashes with the tag name object
+ def __object(self, obj):
+ #self.write(obj)
+ self.write(escape(unicode(obj)))
+
+ def raw(self, obj):
+ self.write(obj.uniobj)
+
+ def list(self, obj):
+ assert id(obj) not in self.visited
+ self.visited[id(obj)] = 1
+ for elem in obj:
+ self.visit(elem)
+
+ def Tag(self, tag):
+ assert id(tag) not in self.visited
+ try:
+ tag.parent = self.parents[-1]
+ except IndexError:
+ tag.parent = None
+ self.visited[id(tag)] = 1
+ tagname = getattr(tag, 'xmlname', tag.__class__.__name__)
+ if self.curindent and not self._isinline(tagname):
+ self.write("\n" + u(' ') * self.curindent)
+ if tag:
+ self.curindent += self.indent
+ self.write(u('<%s%s>') % (tagname, self.attributes(tag)))
+ self.parents.append(tag)
+ for x in tag:
+ self.visit(x)
+ self.parents.pop()
+ self.write(u('</%s>') % tagname)
+ self.curindent -= self.indent
+ else:
+ nameattr = tagname+self.attributes(tag)
+ if self._issingleton(tagname):
+ self.write(u('<%s/>') % (nameattr,))
+ else:
+ self.write(u('<%s></%s>') % (nameattr, tagname))
+
+ def attributes(self, tag):
+ # serialize attributes
+ attrlist = dir(tag.attr)
+ attrlist.sort()
+ l = []
+ for name in attrlist:
+ res = self.repr_attribute(tag.attr, name)
+ if res is not None:
+ l.append(res)
+ l.extend(self.getstyle(tag))
+ return u("").join(l)
+
+ def repr_attribute(self, attrs, name):
+ if name[:2] != '__':
+ value = getattr(attrs, name)
+ if name.endswith('_'):
+ name = name[:-1]
+ if isinstance(value, raw):
+ insert = value.uniobj
+ else:
+ insert = escape(unicode(value))
+ return ' %s="%s"' % (name, insert)
+
+ def getstyle(self, tag):
+ """ return attribute list suitable for styling. """
+ try:
+ styledict = tag.style.__dict__
+ except AttributeError:
+ return []
+ else:
+ stylelist = [x+': ' + y for x,y in styledict.items()]
+ return [u(' style="%s"') % u('; ').join(stylelist)]
+
+ def _issingleton(self, tagname):
+ """can (and will) be overridden in subclasses"""
+ return self.shortempty
+
+ def _isinline(self, tagname):
+ """can (and will) be overridden in subclasses"""
+ return False
+
+class HtmlVisitor(SimpleUnicodeVisitor):
+
+ single = dict([(x, 1) for x in
+ ('br,img,area,param,col,hr,meta,link,base,'
+ 'input,frame').split(',')])
+ inline = dict([(x, 1) for x in
+ ('a abbr acronym b basefont bdo big br cite code dfn em font '
+ 'i img input kbd label q s samp select small span strike '
+ 'strong sub sup textarea tt u var'.split(' '))])
+
+ def repr_attribute(self, attrs, name):
+ if name == 'class_':
+ value = getattr(attrs, name)
+ if value is None:
+ return
+ return super(HtmlVisitor, self).repr_attribute(attrs, name)
+
+ def _issingleton(self, tagname):
+ return tagname in self.single
+
+ def _isinline(self, tagname):
+ return tagname in self.inline
+
+
+class _escape:
+ def __init__(self):
+ self.escape = {
+ u('"') : u('&quot;'), u('<') : u('&lt;'), u('>') : u('&gt;'),
+ u('&') : u('&amp;'), u("'") : u('&apos;'),
+ }
+ self.charef_rex = re.compile(u("|").join(self.escape.keys()))
+
+ def _replacer(self, match):
+ return self.escape[match.group(0)]
+
+ def __call__(self, ustring):
+ """ xml-escape the given unicode string. """
+ try:
+ ustring = unicode(ustring)
+ except UnicodeDecodeError:
+ ustring = unicode(ustring, 'utf-8', errors='replace')
+ return self.charef_rex.sub(self._replacer, ustring)
+
+escape = _escape()
diff --git a/third_party/python/py/py/test.py b/third_party/python/py/py/test.py
new file mode 100644
index 0000000000..aa5beb1789
--- /dev/null
+++ b/third_party/python/py/py/test.py
@@ -0,0 +1,10 @@
+import sys
+if __name__ == '__main__':
+ import pytest
+ sys.exit(pytest.main())
+else:
+ import sys, pytest
+ sys.modules['py.test'] = pytest
+
+# for more API entry points see the 'tests' definition
+# in __init__.py
diff --git a/third_party/python/py/setup.cfg b/third_party/python/py/setup.cfg
new file mode 100644
index 0000000000..602dccab83
--- /dev/null
+++ b/third_party/python/py/setup.cfg
@@ -0,0 +1,13 @@
+[wheel]
+universal = 1
+
+[metadata]
+license_file = LICENSE
+
+[devpi:upload]
+formats = sdist.tgz,bdist_wheel
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/py/setup.py b/third_party/python/py/setup.py
new file mode 100644
index 0000000000..a0d723c307
--- /dev/null
+++ b/third_party/python/py/setup.py
@@ -0,0 +1,41 @@
+from setuptools import setup, find_packages
+
+
+def main():
+ setup(
+ name='py',
+ description='library with cross-python path, ini-parsing, io, code, log facilities',
+ long_description=open('README.rst').read(),
+ use_scm_version={"write_to": "py/_version.py"},
+ setup_requires=["setuptools-scm"],
+ url='http://py.readthedocs.io/',
+ license='MIT license',
+ platforms=['unix', 'linux', 'osx', 'cygwin', 'win32'],
+ python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
+ author='holger krekel, Ronny Pfannschmidt, Benjamin Peterson and others',
+ author_email='pytest-dev@python.org',
+ classifiers=['Development Status :: 6 - Mature',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: POSIX',
+ 'Operating System :: Microsoft :: Windows',
+ 'Operating System :: MacOS :: MacOS X',
+ 'Topic :: Software Development :: Testing',
+ 'Topic :: Software Development :: Libraries',
+ 'Topic :: Utilities',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ 'Programming Language :: Python :: Implementation :: PyPy',
+ ],
+ packages=find_packages(exclude=['tasks', 'testing']),
+ zip_safe=False,
+ )
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/python/py/tasks/__init__.py b/third_party/python/py/tasks/__init__.py
new file mode 100644
index 0000000000..5d74b649e1
--- /dev/null
+++ b/third_party/python/py/tasks/__init__.py
@@ -0,0 +1,12 @@
+"""
+Invoke tasks to help with pytest development and release process.
+"""
+
+import invoke
+
+from . import vendoring
+
+
+ns = invoke.Collection(
+ vendoring
+)
diff --git a/third_party/python/py/tasks/vendoring.py b/third_party/python/py/tasks/vendoring.py
new file mode 100644
index 0000000000..fbc171bc3e
--- /dev/null
+++ b/third_party/python/py/tasks/vendoring.py
@@ -0,0 +1,23 @@
+from __future__ import absolute_import, print_function
+import py
+import invoke
+
+VENDOR_TARGET = py.path.local("py/_vendored_packages")
+GOOD_FILES = 'README.md', '__init__.py'
+
+@invoke.task()
+def remove_libs(ctx):
+ print("removing vendored libs")
+ for path in VENDOR_TARGET.listdir():
+ if path.basename not in GOOD_FILES:
+ print(" ", path)
+ path.remove()
+
+@invoke.task(pre=[remove_libs])
+def update_libs(ctx):
+ print("installing libs")
+ ctx.run("pip install -t {target} apipkg iniconfig".format(target=VENDOR_TARGET))
+ ctx.run("git add {target}".format(target=VENDOR_TARGET))
+ print("Please commit to finish the update after running the tests:")
+ print()
+ print(' git commit -am "Updated vendored libs"')
diff --git a/third_party/python/py/testing/code/test_assertion.py b/third_party/python/py/testing/code/test_assertion.py
new file mode 100644
index 0000000000..e2a7f90399
--- /dev/null
+++ b/third_party/python/py/testing/code/test_assertion.py
@@ -0,0 +1,305 @@
+import pytest, py
+import re
+
+def exvalue():
+ import sys
+ return sys.exc_info()[1]
+
+def f():
+ return 2
+
+def test_assert():
+ try:
+ assert f() == 3
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert s.startswith('assert 2 == 3\n')
+
+
+def test_assert_within_finally():
+ excinfo = py.test.raises(ZeroDivisionError, """
+ try:
+ 1/0
+ finally:
+ i = 42
+ """)
+ s = excinfo.exconly()
+ assert re.search("ZeroDivisionError:.*division", s) is not None
+
+
+def test_assert_multiline_1():
+ try:
+ assert (f() ==
+ 3)
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert s.startswith('assert 2 == 3\n')
+
+def test_assert_multiline_2():
+ try:
+ assert (f() == (4,
+ 3)[-1])
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert s.startswith('assert 2 ==')
+
+def test_in():
+ try:
+ assert "hi" in [1, 2]
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert s.startswith("assert 'hi' in")
+
+def test_is():
+ try:
+ assert 1 is 2
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert s.startswith("assert 1 is 2")
+
+
+def test_attrib():
+ class Foo(object):
+ b = 1
+ i = Foo()
+ try:
+ assert i.b == 2
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert s.startswith("assert 1 == 2")
+
+def test_attrib_inst():
+ class Foo(object):
+ b = 1
+ try:
+ assert Foo().b == 2
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert s.startswith("assert 1 == 2")
+
+def test_len():
+ l = list(range(42))
+ try:
+ assert len(l) == 100
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert s.startswith("assert 42 == 100")
+ assert "where 42 = len([" in s
+
+
+def test_assert_keyword_arg():
+ def f(x=3):
+ return False
+ try:
+ assert f(x=5)
+ except AssertionError:
+ e = exvalue()
+ assert "x=5" in str(e)
+
+# These tests should both fail, but should fail nicely...
+class WeirdRepr:
+ def __repr__(self):
+ return '<WeirdRepr\nsecond line>'
+
+def bug_test_assert_repr():
+ v = WeirdRepr()
+ try:
+ assert v == 1
+ except AssertionError:
+ e = exvalue()
+ assert str(e).find('WeirdRepr') != -1
+ assert str(e).find('second line') != -1
+ assert 0
+
+def test_assert_non_string():
+ try:
+ assert 0, ['list']
+ except AssertionError:
+ e = exvalue()
+ assert str(e).find("list") != -1
+
+def test_assert_implicit_multiline():
+ try:
+ x = [1,2,3]
+ assert x != [1,
+ 2, 3]
+ except AssertionError:
+ e = exvalue()
+ assert str(e).find('assert [1, 2, 3] !=') != -1
+
+@py.test.mark.xfail(py.test.__version__[0] != "2",
+ reason="broken on modern pytest",
+ run=False
+)
+def test_assert_with_brokenrepr_arg():
+ class BrokenRepr:
+ def __repr__(self): 0 / 0
+ e = AssertionError(BrokenRepr())
+ if e.msg.find("broken __repr__") == -1:
+ py.test.fail("broken __repr__ not handle correctly")
+
+def test_multiple_statements_per_line():
+ try:
+ a = 1; assert a == 2
+ except AssertionError:
+ e = exvalue()
+ assert "assert 1 == 2" in str(e)
+
+def test_power():
+ try:
+ assert 2**3 == 7
+ except AssertionError:
+ e = exvalue()
+ assert "assert (2 ** 3) == 7" in str(e)
+
+
+class TestView:
+
+ def setup_class(cls):
+ cls.View = py.test.importorskip("py._code._assertionold").View
+
+ def test_class_dispatch(self):
+ ### Use a custom class hierarchy with existing instances
+
+ class Picklable(self.View):
+ pass
+
+ class Simple(Picklable):
+ __view__ = object
+ def pickle(self):
+ return repr(self.__obj__)
+
+ class Seq(Picklable):
+ __view__ = list, tuple, dict
+ def pickle(self):
+ return ';'.join(
+ [Picklable(item).pickle() for item in self.__obj__])
+
+ class Dict(Seq):
+ __view__ = dict
+ def pickle(self):
+ return Seq.pickle(self) + '!' + Seq(self.values()).pickle()
+
+ assert Picklable(123).pickle() == '123'
+ assert Picklable([1,[2,3],4]).pickle() == '1;2;3;4'
+ assert Picklable({1:2}).pickle() == '1!2'
+
+ def test_viewtype_class_hierarchy(self):
+ # Use a custom class hierarchy based on attributes of existing instances
+ class Operation:
+ "Existing class that I don't want to change."
+ def __init__(self, opname, *args):
+ self.opname = opname
+ self.args = args
+
+ existing = [Operation('+', 4, 5),
+ Operation('getitem', '', 'join'),
+ Operation('setattr', 'x', 'y', 3),
+ Operation('-', 12, 1)]
+
+ class PyOp(self.View):
+ def __viewkey__(self):
+ return self.opname
+ def generate(self):
+ return '%s(%s)' % (self.opname, ', '.join(map(repr, self.args)))
+
+ class PyBinaryOp(PyOp):
+ __view__ = ('+', '-', '*', '/')
+ def generate(self):
+ return '%s %s %s' % (self.args[0], self.opname, self.args[1])
+
+ codelines = [PyOp(op).generate() for op in existing]
+ assert codelines == ["4 + 5", "getitem('', 'join')",
+ "setattr('x', 'y', 3)", "12 - 1"]
+
+def test_underscore_api():
+ py.code._AssertionError
+ py.code._reinterpret_old # used by pypy
+ py.code._reinterpret
+
+def test_assert_customizable_reprcompare(monkeypatch):
+ util = pytest.importorskip("_pytest.assertion.util")
+ monkeypatch.setattr(util, '_reprcompare', lambda *args: 'hello')
+ try:
+ assert 3 == 4
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert "hello" in s
+
+def test_assert_long_source_1():
+ try:
+ assert len == [
+ (None, ['somet text', 'more text']),
+ ]
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert 're-run' not in s
+ assert 'somet text' in s
+
+def test_assert_long_source_2():
+ try:
+ assert(len == [
+ (None, ['somet text', 'more text']),
+ ])
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert 're-run' not in s
+ assert 'somet text' in s
+
+def test_assert_raise_alias(testdir):
+ testdir.makepyfile("""
+ import sys
+ EX = AssertionError
+ def test_hello():
+ raise EX("hello"
+ "multi"
+ "line")
+ """)
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines([
+ "*def test_hello*",
+ "*raise EX*",
+ "*1 failed*",
+ ])
+
+@py.test.mark.xfail(py.test.__version__[0] != "2",
+ reason="broken on modern pytest",
+ run=False)
+def test_assert_raise_subclass():
+ class SomeEx(AssertionError):
+ def __init__(self, *args):
+ super(SomeEx, self).__init__()
+ try:
+ raise SomeEx("hello")
+ except AssertionError as e:
+ s = str(e)
+ assert 're-run' not in s
+ assert 'could not determine' in s
+
+def test_assert_raises_in_nonzero_of_object_pytest_issue10():
+ class A(object):
+ def __nonzero__(self):
+ raise ValueError(42)
+ def __lt__(self, other):
+ return A()
+ def __repr__(self):
+ return "<MY42 object>"
+ def myany(x):
+ return True
+ try:
+ assert not(myany(A() < 0))
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert "<MY42 object> < 0" in s
diff --git a/third_party/python/py/testing/code/test_code.py b/third_party/python/py/testing/code/test_code.py
new file mode 100644
index 0000000000..28ec628b00
--- /dev/null
+++ b/third_party/python/py/testing/code/test_code.py
@@ -0,0 +1,159 @@
+import py
+import sys
+
+def test_ne():
+ code1 = py.code.Code(compile('foo = "bar"', '', 'exec'))
+ assert code1 == code1
+ code2 = py.code.Code(compile('foo = "baz"', '', 'exec'))
+ assert code2 != code1
+
+def test_code_gives_back_name_for_not_existing_file():
+ name = 'abc-123'
+ co_code = compile("pass\n", name, 'exec')
+ assert co_code.co_filename == name
+ code = py.code.Code(co_code)
+ assert str(code.path) == name
+ assert code.fullsource is None
+
+def test_code_with_class():
+ class A:
+ pass
+ py.test.raises(TypeError, "py.code.Code(A)")
+
+if True:
+ def x():
+ pass
+
+def test_code_fullsource():
+ code = py.code.Code(x)
+ full = code.fullsource
+ assert 'test_code_fullsource()' in str(full)
+
+def test_code_source():
+ code = py.code.Code(x)
+ src = code.source()
+ expected = """def x():
+ pass"""
+ assert str(src) == expected
+
+def test_frame_getsourcelineno_myself():
+ def func():
+ return sys._getframe(0)
+ f = func()
+ f = py.code.Frame(f)
+ source, lineno = f.code.fullsource, f.lineno
+ assert source[lineno].startswith(" return sys._getframe(0)")
+
+def test_getstatement_empty_fullsource():
+ def func():
+ return sys._getframe(0)
+ f = func()
+ f = py.code.Frame(f)
+ prop = f.code.__class__.fullsource
+ try:
+ f.code.__class__.fullsource = None
+ assert f.statement == py.code.Source("")
+ finally:
+ f.code.__class__.fullsource = prop
+
+def test_code_from_func():
+ co = py.code.Code(test_frame_getsourcelineno_myself)
+ assert co.firstlineno
+ assert co.path
+
+
+
+def test_builtin_patch_unpatch(monkeypatch):
+ cpy_builtin = py.builtin.builtins
+ comp = cpy_builtin.compile
+ def mycompile(*args, **kwargs):
+ return comp(*args, **kwargs)
+ class Sub(AssertionError):
+ pass
+ monkeypatch.setattr(cpy_builtin, 'AssertionError', Sub)
+ monkeypatch.setattr(cpy_builtin, 'compile', mycompile)
+ py.code.patch_builtins()
+ assert cpy_builtin.AssertionError != Sub
+ assert cpy_builtin.compile != mycompile
+ py.code.unpatch_builtins()
+ assert cpy_builtin.AssertionError is Sub
+ assert cpy_builtin.compile == mycompile
+
+
+def test_unicode_handling():
+ value = py.builtin._totext('\xc4\x85\xc4\x87\n', 'utf-8').encode('utf8')
+ def f():
+ raise Exception(value)
+ excinfo = py.test.raises(Exception, f)
+ s = str(excinfo)
+ if sys.version_info[0] < 3:
+ u = unicode(excinfo)
+
+def test_code_getargs():
+ def f1(x):
+ pass
+ c1 = py.code.Code(f1)
+ assert c1.getargs(var=True) == ('x',)
+
+ def f2(x, *y):
+ pass
+ c2 = py.code.Code(f2)
+ assert c2.getargs(var=True) == ('x', 'y')
+
+ def f3(x, **z):
+ pass
+ c3 = py.code.Code(f3)
+ assert c3.getargs(var=True) == ('x', 'z')
+
+ def f4(x, *y, **z):
+ pass
+ c4 = py.code.Code(f4)
+ assert c4.getargs(var=True) == ('x', 'y', 'z')
+
+
+def test_frame_getargs():
+ def f1(x):
+ return sys._getframe(0)
+ fr1 = py.code.Frame(f1('a'))
+ assert fr1.getargs(var=True) == [('x', 'a')]
+
+ def f2(x, *y):
+ return sys._getframe(0)
+ fr2 = py.code.Frame(f2('a', 'b', 'c'))
+ assert fr2.getargs(var=True) == [('x', 'a'), ('y', ('b', 'c'))]
+
+ def f3(x, **z):
+ return sys._getframe(0)
+ fr3 = py.code.Frame(f3('a', b='c'))
+ assert fr3.getargs(var=True) == [('x', 'a'), ('z', {'b': 'c'})]
+
+ def f4(x, *y, **z):
+ return sys._getframe(0)
+ fr4 = py.code.Frame(f4('a', 'b', c='d'))
+ assert fr4.getargs(var=True) == [('x', 'a'), ('y', ('b',)),
+ ('z', {'c': 'd'})]
+
+
+class TestExceptionInfo:
+
+ def test_bad_getsource(self):
+ try:
+ if False: pass
+ else: assert False
+ except AssertionError:
+ exci = py.code.ExceptionInfo()
+ assert exci.getrepr()
+
+
+class TestTracebackEntry:
+
+ def test_getsource(self):
+ try:
+ if False: pass
+ else: assert False
+ except AssertionError:
+ exci = py.code.ExceptionInfo()
+ entry = exci.traceback[0]
+ source = entry.getsource()
+ assert len(source) == 4
+ assert 'else: assert False' in source[3]
diff --git a/third_party/python/py/testing/code/test_excinfo.py b/third_party/python/py/testing/code/test_excinfo.py
new file mode 100644
index 0000000000..c148ab8cfb
--- /dev/null
+++ b/third_party/python/py/testing/code/test_excinfo.py
@@ -0,0 +1,956 @@
+# -*- coding: utf-8 -*-
+
+import py
+import pytest
+import sys
+from test_source import astonly
+
+from py._code.code import FormattedExcinfo, ReprExceptionInfo
+queue = py.builtin._tryimport('queue', 'Queue')
+
+failsonjython = py.test.mark.xfail("sys.platform.startswith('java')")
+
+try:
+ import importlib
+except ImportError:
+ invalidate_import_caches = None
+else:
+ invalidate_import_caches = getattr(importlib, "invalidate_caches", None)
+
+
+pytest_version_info = tuple(map(int, pytest.__version__.split(".")[:3]))
+
+broken_on_modern_pytest = pytest.mark.xfail(
+ pytest_version_info[0] != 2,
+ reason="this test hasn't been fixed after moving py.code into pytest",
+ run=False
+ )
+
+
+class TWMock:
+ def __init__(self):
+ self.lines = []
+
+ def sep(self, sep, line=None):
+ self.lines.append((sep, line))
+
+ def line(self, line, **kw):
+ self.lines.append(line)
+
+ def markup(self, text, **kw):
+ return text
+
+ fullwidth = 80
+
+
+def test_excinfo_simple():
+ try:
+ raise ValueError
+ except ValueError:
+ info = py.code.ExceptionInfo()
+ assert info.type == ValueError
+
+
+def test_excinfo_getstatement():
+ def g():
+ raise ValueError
+
+ def f():
+ g()
+ try:
+ f()
+ except ValueError:
+ excinfo = py.code.ExceptionInfo()
+ linenumbers = [
+ py.code.getrawcode(f).co_firstlineno-1+3,
+ py.code.getrawcode(f).co_firstlineno-1+1,
+ py.code.getrawcode(g).co_firstlineno-1+1,
+ ]
+ l = list(excinfo.traceback)
+ foundlinenumbers = [x.lineno for x in l]
+ assert foundlinenumbers == linenumbers
+ #for x in info:
+ # print "%s:%d %s" %(x.path.relto(root), x.lineno, x.statement)
+ #xxx
+
+# testchain for getentries test below
+def f():
+ #
+ raise ValueError
+ #
+def g():
+ #
+ __tracebackhide__ = True
+ f()
+ #
+def h():
+ #
+ g()
+ #
+
+class TestTraceback_f_g_h:
+ def setup_method(self, method):
+ try:
+ h()
+ except ValueError:
+ self.excinfo = py.code.ExceptionInfo()
+
+ def test_traceback_entries(self):
+ tb = self.excinfo.traceback
+ entries = list(tb)
+ assert len(tb) == 4 # maybe fragile test
+ assert len(entries) == 4 # maybe fragile test
+ names = ['f', 'g', 'h']
+ for entry in entries:
+ try:
+ names.remove(entry.frame.code.name)
+ except ValueError:
+ pass
+ assert not names
+
+ def test_traceback_entry_getsource(self):
+ tb = self.excinfo.traceback
+ s = str(tb[-1].getsource())
+ assert s.startswith("def f():")
+ assert s.endswith("raise ValueError")
+
+ @astonly
+ @failsonjython
+ def test_traceback_entry_getsource_in_construct(self):
+ source = py.code.Source("""\
+ def xyz():
+ try:
+ raise ValueError
+ except somenoname:
+ pass
+ xyz()
+ """)
+ try:
+ exec (source.compile())
+ except NameError:
+ tb = py.code.ExceptionInfo().traceback
+ print (tb[-1].getsource())
+ s = str(tb[-1].getsource())
+ assert s.startswith("def xyz():\n try:")
+ assert s.strip().endswith("except somenoname:")
+
+ def test_traceback_cut(self):
+ co = py.code.Code(f)
+ path, firstlineno = co.path, co.firstlineno
+ traceback = self.excinfo.traceback
+ newtraceback = traceback.cut(path=path, firstlineno=firstlineno)
+ assert len(newtraceback) == 1
+ newtraceback = traceback.cut(path=path, lineno=firstlineno+2)
+ assert len(newtraceback) == 1
+
+ def test_traceback_cut_excludepath(self, testdir):
+ p = testdir.makepyfile("def f(): raise ValueError")
+ excinfo = py.test.raises(ValueError, "p.pyimport().f()")
+ basedir = py.path.local(py.test.__file__).dirpath()
+ newtraceback = excinfo.traceback.cut(excludepath=basedir)
+ for x in newtraceback:
+ if hasattr(x, 'path'):
+ assert not py.path.local(x.path).relto(basedir)
+ assert newtraceback[-1].frame.code.path == p
+
+ def test_traceback_filter(self):
+ traceback = self.excinfo.traceback
+ ntraceback = traceback.filter()
+ assert len(ntraceback) == len(traceback) - 1
+
+ def test_traceback_recursion_index(self):
+ def f(n):
+ if n < 10:
+ n += 1
+ f(n)
+ excinfo = py.test.raises(RuntimeError, f, 8)
+ traceback = excinfo.traceback
+ recindex = traceback.recursionindex()
+ assert recindex == 3
+
+ def test_traceback_only_specific_recursion_errors(self, monkeypatch):
+ def f(n):
+ if n == 0:
+ raise RuntimeError("hello")
+ f(n-1)
+
+ excinfo = pytest.raises(RuntimeError, f, 100)
+ monkeypatch.delattr(excinfo.traceback.__class__, "recursionindex")
+ repr = excinfo.getrepr()
+ assert "RuntimeError: hello" in str(repr.reprcrash)
+
+ def test_traceback_no_recursion_index(self):
+ def do_stuff():
+ raise RuntimeError
+
+ def reraise_me():
+ import sys
+ exc, val, tb = sys.exc_info()
+ py.builtin._reraise(exc, val, tb)
+
+ def f(n):
+ try:
+ do_stuff()
+ except:
+ reraise_me()
+ excinfo = py.test.raises(RuntimeError, f, 8)
+ traceback = excinfo.traceback
+ recindex = traceback.recursionindex()
+ assert recindex is None
+
+ def test_traceback_messy_recursion(self):
+ # XXX: simplified locally testable version
+ decorator = py.test.importorskip('decorator').decorator
+
+ def log(f, *k, **kw):
+ print('%s %s' % (k, kw))
+ f(*k, **kw)
+ log = decorator(log)
+
+ def fail():
+ raise ValueError('')
+
+ fail = log(log(fail))
+
+ excinfo = py.test.raises(ValueError, fail)
+ assert excinfo.traceback.recursionindex() is None
+
+ def test_traceback_getcrashentry(self):
+ def i():
+ __tracebackhide__ = True
+ raise ValueError
+
+ def h():
+ i()
+
+ def g():
+ __tracebackhide__ = True
+ h()
+
+ def f():
+ g()
+
+ excinfo = py.test.raises(ValueError, f)
+ tb = excinfo.traceback
+ entry = tb.getcrashentry()
+ co = py.code.Code(h)
+ assert entry.frame.code.path == co.path
+ assert entry.lineno == co.firstlineno + 1
+ assert entry.frame.code.name == 'h'
+
+ def test_traceback_getcrashentry_empty(self):
+ def g():
+ __tracebackhide__ = True
+ raise ValueError
+
+ def f():
+ __tracebackhide__ = True
+ g()
+
+ excinfo = py.test.raises(ValueError, f)
+ tb = excinfo.traceback
+ entry = tb.getcrashentry()
+ co = py.code.Code(g)
+ assert entry.frame.code.path == co.path
+ assert entry.lineno == co.firstlineno + 2
+ assert entry.frame.code.name == 'g'
+
+
+def hello(x):
+ x + 5
+
+
+def test_tbentry_reinterpret():
+ try:
+ hello("hello")
+ except TypeError:
+ excinfo = py.code.ExceptionInfo()
+ tbentry = excinfo.traceback[-1]
+ msg = tbentry.reinterpret()
+ assert msg.startswith("TypeError: ('hello' + 5)")
+
+
+def test_excinfo_exconly():
+ excinfo = py.test.raises(ValueError, h)
+ assert excinfo.exconly().startswith('ValueError')
+ excinfo = py.test.raises(ValueError,
+ "raise ValueError('hello\\nworld')")
+ msg = excinfo.exconly(tryshort=True)
+ assert msg.startswith('ValueError')
+ assert msg.endswith("world")
+
+
+def test_excinfo_repr():
+ excinfo = py.test.raises(ValueError, h)
+ s = repr(excinfo)
+ assert s == "<ExceptionInfo ValueError tblen=4>"
+
+
+def test_excinfo_str():
+ excinfo = py.test.raises(ValueError, h)
+ s = str(excinfo)
+ assert s.startswith(__file__[:-9]) # pyc file and $py.class
+ assert s.endswith("ValueError")
+ assert len(s.split(":")) >= 3 # on windows it's 4
+
+
+def test_excinfo_errisinstance():
+ excinfo = py.test.raises(ValueError, h)
+ assert excinfo.errisinstance(ValueError)
+
+
+def test_excinfo_no_sourcecode():
+ try:
+ exec ("raise ValueError()")
+ except ValueError:
+ excinfo = py.code.ExceptionInfo()
+ s = str(excinfo.traceback[-1])
+ assert s == " File '<string>':1 in <module>\n ???\n"
+
+
+def test_excinfo_no_python_sourcecode(tmpdir):
+ #XXX: simplified locally testable version
+ tmpdir.join('test.txt').write("{{ h()}}:")
+
+ jinja2 = py.test.importorskip('jinja2')
+ loader = jinja2.FileSystemLoader(str(tmpdir))
+ env = jinja2.Environment(loader=loader)
+ template = env.get_template('test.txt')
+ excinfo = py.test.raises(ValueError,
+ template.render, h=h)
+ for item in excinfo.traceback:
+ print(item) # XXX: for some reason jinja.Template.render is printed in full
+ item.source # shouldnt fail
+ if item.path.basename == 'test.txt':
+ assert str(item.source) == '{{ h()}}:'
+
+
+def test_entrysource_Queue_example():
+ try:
+ queue.Queue().get(timeout=0.001)
+ except queue.Empty:
+ excinfo = py.code.ExceptionInfo()
+ entry = excinfo.traceback[-1]
+ source = entry.getsource()
+ assert source is not None
+ s = str(source).strip()
+ assert s.startswith("def get")
+
+
+def test_codepath_Queue_example():
+ try:
+ queue.Queue().get(timeout=0.001)
+ except queue.Empty:
+ excinfo = py.code.ExceptionInfo()
+ entry = excinfo.traceback[-1]
+ path = entry.path
+ assert isinstance(path, py.path.local)
+ assert path.basename.lower() == "queue.py"
+ assert path.check()
+
+
+class TestFormattedExcinfo:
+ def pytest_funcarg__importasmod(self, request):
+ def importasmod(source):
+ source = py.code.Source(source)
+ tmpdir = request.getfuncargvalue("tmpdir")
+ modpath = tmpdir.join("mod.py")
+ tmpdir.ensure("__init__.py")
+ modpath.write(source)
+ if invalidate_import_caches is not None:
+ invalidate_import_caches()
+ return modpath.pyimport()
+ return importasmod
+
+ def excinfo_from_exec(self, source):
+ source = py.code.Source(source).strip()
+ try:
+ exec (source.compile())
+ except KeyboardInterrupt:
+ raise
+ except:
+ return py.code.ExceptionInfo()
+ assert 0, "did not raise"
+
+ def test_repr_source(self):
+ pr = FormattedExcinfo()
+ source = py.code.Source("""
+ def f(x):
+ pass
+ """).strip()
+ pr.flow_marker = "|"
+ lines = pr.get_source(source, 0)
+ assert len(lines) == 2
+ assert lines[0] == "| def f(x):"
+ assert lines[1] == " pass"
+
+ @broken_on_modern_pytest
+ def test_repr_source_excinfo(self):
+ """ check if indentation is right """
+ pr = FormattedExcinfo()
+ excinfo = self.excinfo_from_exec("""
+ def f():
+ assert 0
+ f()
+ """)
+ pr = FormattedExcinfo()
+ source = pr._getentrysource(excinfo.traceback[-1])
+ lines = pr.get_source(source, 1, excinfo)
+ assert lines == [
+ ' def f():',
+ '> assert 0',
+ 'E assert 0'
+ ]
+
+ def test_repr_source_not_existing(self):
+ pr = FormattedExcinfo()
+ co = compile("raise ValueError()", "", "exec")
+ try:
+ exec (co)
+ except ValueError:
+ excinfo = py.code.ExceptionInfo()
+ repr = pr.repr_excinfo(excinfo)
+ assert repr.reprtraceback.reprentries[1].lines[0] == "> ???"
+
+ def test_repr_many_line_source_not_existing(self):
+ pr = FormattedExcinfo()
+ co = compile("""
+a = 1
+raise ValueError()
+""", "", "exec")
+ try:
+ exec (co)
+ except ValueError:
+ excinfo = py.code.ExceptionInfo()
+ repr = pr.repr_excinfo(excinfo)
+ assert repr.reprtraceback.reprentries[1].lines[0] == "> ???"
+
+ def test_repr_source_failing_fullsource(self):
+ pr = FormattedExcinfo()
+
+ class FakeCode(object):
+ class raw:
+ co_filename = '?'
+ path = '?'
+ firstlineno = 5
+
+ def fullsource(self):
+ return None
+ fullsource = property(fullsource)
+
+ class FakeFrame(object):
+ code = FakeCode()
+ f_locals = {}
+ f_globals = {}
+
+ class FakeTracebackEntry(py.code.Traceback.Entry):
+ def __init__(self, tb):
+ self.lineno = 5+3
+
+ @property
+ def frame(self):
+ return FakeFrame()
+
+ class Traceback(py.code.Traceback):
+ Entry = FakeTracebackEntry
+
+ class FakeExcinfo(py.code.ExceptionInfo):
+ typename = "Foo"
+ def __init__(self):
+ pass
+
+ def exconly(self, tryshort):
+ return "EXC"
+ def errisinstance(self, cls):
+ return False
+
+ excinfo = FakeExcinfo()
+ class FakeRawTB(object):
+ tb_next = None
+ tb = FakeRawTB()
+ excinfo.traceback = Traceback(tb)
+
+ fail = IOError()
+ repr = pr.repr_excinfo(excinfo)
+ assert repr.reprtraceback.reprentries[0].lines[0] == "> ???"
+
+ fail = py.error.ENOENT
+ repr = pr.repr_excinfo(excinfo)
+ assert repr.reprtraceback.reprentries[0].lines[0] == "> ???"
+
+
+ def test_repr_local(self):
+ p = FormattedExcinfo(showlocals=True)
+ loc = {'y': 5, 'z': 7, 'x': 3, '@x': 2, '__builtins__': {}}
+ reprlocals = p.repr_locals(loc)
+ assert reprlocals.lines
+ assert reprlocals.lines[0] == '__builtins__ = <builtins>'
+ assert reprlocals.lines[1] == 'x = 3'
+ assert reprlocals.lines[2] == 'y = 5'
+ assert reprlocals.lines[3] == 'z = 7'
+
+ def test_repr_tracebackentry_lines(self, importasmod):
+ mod = importasmod("""
+ def func1():
+ raise ValueError("hello\\nworld")
+ """)
+ excinfo = py.test.raises(ValueError, mod.func1)
+ excinfo.traceback = excinfo.traceback.filter()
+ p = FormattedExcinfo()
+ reprtb = p.repr_traceback_entry(excinfo.traceback[-1])
+
+ # test as intermittent entry
+ lines = reprtb.lines
+ assert lines[0] == ' def func1():'
+ assert lines[1] == '> raise ValueError("hello\\nworld")'
+
+ # test as last entry
+ p = FormattedExcinfo(showlocals=True)
+ repr_entry = p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ lines = repr_entry.lines
+ assert lines[0] == ' def func1():'
+ assert lines[1] == '> raise ValueError("hello\\nworld")'
+ assert lines[2] == 'E ValueError: hello'
+ assert lines[3] == 'E world'
+ assert not lines[4:]
+
+ loc = repr_entry.reprlocals is not None
+ loc = repr_entry.reprfileloc
+ assert loc.path == mod.__file__
+ assert loc.lineno == 3
+ #assert loc.message == "ValueError: hello"
+
+ def test_repr_tracebackentry_lines(self, importasmod):
+ mod = importasmod("""
+ def func1(m, x, y, z):
+ raise ValueError("hello\\nworld")
+ """)
+ excinfo = py.test.raises(ValueError, mod.func1, "m"*90, 5, 13, "z"*120)
+ excinfo.traceback = excinfo.traceback.filter()
+ entry = excinfo.traceback[-1]
+ p = FormattedExcinfo(funcargs=True)
+ reprfuncargs = p.repr_args(entry)
+ assert reprfuncargs.args[0] == ('m', repr("m"*90))
+ assert reprfuncargs.args[1] == ('x', '5')
+ assert reprfuncargs.args[2] == ('y', '13')
+ assert reprfuncargs.args[3] == ('z', repr("z" * 120))
+
+ p = FormattedExcinfo(funcargs=True)
+ repr_entry = p.repr_traceback_entry(entry)
+ assert repr_entry.reprfuncargs.args == reprfuncargs.args
+ tw = TWMock()
+ repr_entry.toterminal(tw)
+ assert tw.lines[0] == "m = " + repr('m' * 90)
+ assert tw.lines[1] == "x = 5, y = 13"
+ assert tw.lines[2] == "z = " + repr('z' * 120)
+
+ def test_repr_tracebackentry_lines_var_kw_args(self, importasmod):
+ mod = importasmod("""
+ def func1(x, *y, **z):
+ raise ValueError("hello\\nworld")
+ """)
+ excinfo = py.test.raises(ValueError, mod.func1, 'a', 'b', c='d')
+ excinfo.traceback = excinfo.traceback.filter()
+ entry = excinfo.traceback[-1]
+ p = FormattedExcinfo(funcargs=True)
+ reprfuncargs = p.repr_args(entry)
+ assert reprfuncargs.args[0] == ('x', repr('a'))
+ assert reprfuncargs.args[1] == ('y', repr(('b',)))
+ assert reprfuncargs.args[2] == ('z', repr({'c': 'd'}))
+
+ p = FormattedExcinfo(funcargs=True)
+ repr_entry = p.repr_traceback_entry(entry)
+ assert repr_entry.reprfuncargs.args == reprfuncargs.args
+ tw = TWMock()
+ repr_entry.toterminal(tw)
+ assert tw.lines[0] == "x = 'a', y = ('b',), z = {'c': 'd'}"
+
+ def test_repr_tracebackentry_short(self, importasmod):
+ mod = importasmod("""
+ def func1():
+ raise ValueError("hello")
+ def entry():
+ func1()
+ """)
+ excinfo = py.test.raises(ValueError, mod.entry)
+ p = FormattedExcinfo(style="short")
+ reprtb = p.repr_traceback_entry(excinfo.traceback[-2])
+ lines = reprtb.lines
+ basename = py.path.local(mod.__file__).basename
+ assert lines[0] == ' func1()'
+ assert basename in str(reprtb.reprfileloc.path)
+ assert reprtb.reprfileloc.lineno == 5
+
+ # test last entry
+ p = FormattedExcinfo(style="short")
+ reprtb = p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ lines = reprtb.lines
+ assert lines[0] == ' raise ValueError("hello")'
+ assert lines[1] == 'E ValueError: hello'
+ assert basename in str(reprtb.reprfileloc.path)
+ assert reprtb.reprfileloc.lineno == 3
+
+ def test_repr_tracebackentry_no(self, importasmod):
+ mod = importasmod("""
+ def func1():
+ raise ValueError("hello")
+ def entry():
+ func1()
+ """)
+ excinfo = py.test.raises(ValueError, mod.entry)
+ p = FormattedExcinfo(style="no")
+ p.repr_traceback_entry(excinfo.traceback[-2])
+
+ p = FormattedExcinfo(style="no")
+ reprentry = p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ lines = reprentry.lines
+ assert lines[0] == 'E ValueError: hello'
+ assert not lines[1:]
+
+ def test_repr_traceback_tbfilter(self, importasmod):
+ mod = importasmod("""
+ def f(x):
+ raise ValueError(x)
+ def entry():
+ f(0)
+ """)
+ excinfo = py.test.raises(ValueError, mod.entry)
+ p = FormattedExcinfo(tbfilter=True)
+ reprtb = p.repr_traceback(excinfo)
+ assert len(reprtb.reprentries) == 2
+ p = FormattedExcinfo(tbfilter=False)
+ reprtb = p.repr_traceback(excinfo)
+ assert len(reprtb.reprentries) == 3
+
+ def test_traceback_short_no_source(self, importasmod, monkeypatch):
+ mod = importasmod("""
+ def func1():
+ raise ValueError("hello")
+ def entry():
+ func1()
+ """)
+ try:
+ mod.entry()
+ except ValueError:
+ excinfo = py.code.ExceptionInfo()
+ from py._code.code import Code
+ monkeypatch.setattr(Code, 'path', 'bogus')
+ excinfo.traceback[0].frame.code.path = "bogus"
+ p = FormattedExcinfo(style="short")
+ reprtb = p.repr_traceback_entry(excinfo.traceback[-2])
+ lines = reprtb.lines
+ last_p = FormattedExcinfo(style="short")
+ last_reprtb = last_p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ last_lines = last_reprtb.lines
+ monkeypatch.undo()
+ basename = py.path.local(mod.__file__).basename
+ assert lines[0] == ' func1()'
+
+ assert last_lines[0] == ' raise ValueError("hello")'
+ assert last_lines[1] == 'E ValueError: hello'
+
+ def test_repr_traceback_and_excinfo(self, importasmod):
+ mod = importasmod("""
+ def f(x):
+ raise ValueError(x)
+ def entry():
+ f(0)
+ """)
+ excinfo = py.test.raises(ValueError, mod.entry)
+
+ for style in ("long", "short"):
+ p = FormattedExcinfo(style=style)
+ reprtb = p.repr_traceback(excinfo)
+ assert len(reprtb.reprentries) == 2
+ assert reprtb.style == style
+ assert not reprtb.extraline
+ repr = p.repr_excinfo(excinfo)
+ assert repr.reprtraceback
+ assert len(repr.reprtraceback.reprentries) == len(reprtb.reprentries)
+ assert repr.reprcrash.path.endswith("mod.py")
+ assert repr.reprcrash.message == "ValueError: 0"
+
+ def test_repr_traceback_with_invalid_cwd(self, importasmod, monkeypatch):
+ mod = importasmod("""
+ def f(x):
+ raise ValueError(x)
+ def entry():
+ f(0)
+ """)
+ excinfo = py.test.raises(ValueError, mod.entry)
+
+ p = FormattedExcinfo()
+ def raiseos():
+ raise OSError(2)
+ monkeypatch.setattr('os.getcwd', raiseos)
+ assert p._makepath(__file__) == __file__
+ reprtb = p.repr_traceback(excinfo)
+
+ @broken_on_modern_pytest
+ def test_repr_excinfo_addouterr(self, importasmod):
+ mod = importasmod("""
+ def entry():
+ raise ValueError()
+ """)
+ excinfo = py.test.raises(ValueError, mod.entry)
+ repr = excinfo.getrepr()
+ repr.addsection("title", "content")
+ twmock = TWMock()
+ repr.toterminal(twmock)
+ assert twmock.lines[-1] == "content"
+ assert twmock.lines[-2] == ("-", "title")
+
+ def test_repr_excinfo_reprcrash(self, importasmod):
+ mod = importasmod("""
+ def entry():
+ raise ValueError()
+ """)
+ excinfo = py.test.raises(ValueError, mod.entry)
+ repr = excinfo.getrepr()
+ assert repr.reprcrash.path.endswith("mod.py")
+ assert repr.reprcrash.lineno == 3
+ assert repr.reprcrash.message == "ValueError"
+ assert str(repr.reprcrash).endswith("mod.py:3: ValueError")
+
+ def test_repr_traceback_recursion(self, importasmod):
+ mod = importasmod("""
+ def rec2(x):
+ return rec1(x+1)
+ def rec1(x):
+ return rec2(x-1)
+ def entry():
+ rec1(42)
+ """)
+ excinfo = py.test.raises(RuntimeError, mod.entry)
+
+ for style in ("short", "long", "no"):
+ p = FormattedExcinfo(style="short")
+ reprtb = p.repr_traceback(excinfo)
+ assert reprtb.extraline == "!!! Recursion detected (same locals & position)"
+ assert str(reprtb)
+
+ @broken_on_modern_pytest
+ def test_tb_entry_AssertionError(self, importasmod):
+ # probably this test is a bit redundant
+ # as py/magic/testing/test_assertion.py
+ # already tests correctness of
+ # assertion-reinterpretation logic
+ mod = importasmod("""
+ def somefunc():
+ x = 1
+ assert x == 2
+ """)
+ excinfo = py.test.raises(AssertionError, mod.somefunc)
+
+ p = FormattedExcinfo()
+ reprentry = p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ lines = reprentry.lines
+ assert lines[-1] == "E assert 1 == 2"
+
+ def test_reprexcinfo_getrepr(self, importasmod):
+ mod = importasmod("""
+ def f(x):
+ raise ValueError(x)
+ def entry():
+ f(0)
+ """)
+ try:
+ mod.entry()
+ except ValueError:
+ excinfo = py.code.ExceptionInfo()
+
+ for style in ("short", "long", "no"):
+ for showlocals in (True, False):
+ repr = excinfo.getrepr(style=style, showlocals=showlocals)
+ assert isinstance(repr, ReprExceptionInfo)
+ assert repr.reprtraceback.style == style
+
+ def test_reprexcinfo_unicode(self):
+ from py._code.code import TerminalRepr
+ class MyRepr(TerminalRepr):
+ def toterminal(self, tw):
+ tw.line(py.builtin._totext("я", "utf-8"))
+ x = py.builtin._totext(MyRepr())
+ assert x == py.builtin._totext("я", "utf-8")
+
+ @broken_on_modern_pytest
+ def test_toterminal_long(self, importasmod):
+ mod = importasmod("""
+ def g(x):
+ raise ValueError(x)
+ def f():
+ g(3)
+ """)
+ excinfo = py.test.raises(ValueError, mod.f)
+ excinfo.traceback = excinfo.traceback.filter()
+ repr = excinfo.getrepr()
+ tw = TWMock()
+ repr.toterminal(tw)
+ assert tw.lines[0] == ""
+ tw.lines.pop(0)
+ assert tw.lines[0] == " def f():"
+ assert tw.lines[1] == "> g(3)"
+ assert tw.lines[2] == ""
+ assert tw.lines[3].endswith("mod.py:5: ")
+ assert tw.lines[4] == ("_ ", None)
+ assert tw.lines[5] == ""
+ assert tw.lines[6] == " def g(x):"
+ assert tw.lines[7] == "> raise ValueError(x)"
+ assert tw.lines[8] == "E ValueError: 3"
+ assert tw.lines[9] == ""
+ assert tw.lines[10].endswith("mod.py:3: ValueError")
+
+ @broken_on_modern_pytest
+ def test_toterminal_long_missing_source(self, importasmod, tmpdir):
+ mod = importasmod("""
+ def g(x):
+ raise ValueError(x)
+ def f():
+ g(3)
+ """)
+ excinfo = py.test.raises(ValueError, mod.f)
+ tmpdir.join('mod.py').remove()
+ excinfo.traceback = excinfo.traceback.filter()
+ repr = excinfo.getrepr()
+ tw = TWMock()
+ repr.toterminal(tw)
+ assert tw.lines[0] == ""
+ tw.lines.pop(0)
+ assert tw.lines[0] == "> ???"
+ assert tw.lines[1] == ""
+ assert tw.lines[2].endswith("mod.py:5: ")
+ assert tw.lines[3] == ("_ ", None)
+ assert tw.lines[4] == ""
+ assert tw.lines[5] == "> ???"
+ assert tw.lines[6] == "E ValueError: 3"
+ assert tw.lines[7] == ""
+ assert tw.lines[8].endswith("mod.py:3: ValueError")
+
+ @broken_on_modern_pytest
+ def test_toterminal_long_incomplete_source(self, importasmod, tmpdir):
+ mod = importasmod("""
+ def g(x):
+ raise ValueError(x)
+ def f():
+ g(3)
+ """)
+ excinfo = py.test.raises(ValueError, mod.f)
+ tmpdir.join('mod.py').write('asdf')
+ excinfo.traceback = excinfo.traceback.filter()
+ repr = excinfo.getrepr()
+ tw = TWMock()
+ repr.toterminal(tw)
+ assert tw.lines[0] == ""
+ tw.lines.pop(0)
+ assert tw.lines[0] == "> ???"
+ assert tw.lines[1] == ""
+ assert tw.lines[2].endswith("mod.py:5: ")
+ assert tw.lines[3] == ("_ ", None)
+ assert tw.lines[4] == ""
+ assert tw.lines[5] == "> ???"
+ assert tw.lines[6] == "E ValueError: 3"
+ assert tw.lines[7] == ""
+ assert tw.lines[8].endswith("mod.py:3: ValueError")
+
+ @broken_on_modern_pytest
+ def test_toterminal_long_filenames(self, importasmod):
+ mod = importasmod("""
+ def f():
+ raise ValueError()
+ """)
+ excinfo = py.test.raises(ValueError, mod.f)
+ tw = TWMock()
+ path = py.path.local(mod.__file__)
+ old = path.dirpath().chdir()
+ try:
+ repr = excinfo.getrepr(abspath=False)
+ repr.toterminal(tw)
+ line = tw.lines[-1]
+ x = py.path.local().bestrelpath(path)
+ if len(x) < len(str(path)):
+ assert line == "mod.py:3: ValueError"
+
+ repr = excinfo.getrepr(abspath=True)
+ repr.toterminal(tw)
+ line = tw.lines[-1]
+ assert line == "%s:3: ValueError" %(path,)
+ finally:
+ old.chdir()
+
+ @pytest.mark.parametrize('style', ("long", "short", "no"))
+ @pytest.mark.parametrize('showlocals', (True, False),
+ ids=['locals', 'nolocals'])
+ @pytest.mark.parametrize('tbfilter', (True, False),
+ ids=['tbfilter', 'nofilter'])
+ @pytest.mark.parametrize('funcargs', (True, False),
+ ids=['funcargs', 'nofuncargs'])
+ def test_format_excinfo(self, importasmod,
+ style, showlocals, tbfilter, funcargs):
+
+ mod = importasmod("""
+ def g(x):
+ raise ValueError(x)
+ def f():
+ g(3)
+ """)
+ excinfo = py.test.raises(ValueError, mod.f)
+ tw = py.io.TerminalWriter(stringio=True)
+ repr = excinfo.getrepr(
+ style=style,
+ showlocals=showlocals,
+ funcargs=funcargs,
+ tbfilter=tbfilter
+ )
+ repr.toterminal(tw)
+ assert tw.stringio.getvalue()
+
+ @broken_on_modern_pytest
+ def test_native_style(self):
+ excinfo = self.excinfo_from_exec("""
+ assert 0
+ """)
+ repr = excinfo.getrepr(style='native')
+ assert "assert 0" in str(repr.reprcrash)
+ s = str(repr)
+ assert s.startswith('Traceback (most recent call last):\n File')
+ assert s.endswith('\nAssertionError: assert 0')
+ assert 'exec (source.compile())' in s
+ assert s.count('assert 0') == 2
+
+ @broken_on_modern_pytest
+ def test_traceback_repr_style(self, importasmod):
+ mod = importasmod("""
+ def f():
+ g()
+ def g():
+ h()
+ def h():
+ i()
+ def i():
+ raise ValueError()
+ """)
+ excinfo = py.test.raises(ValueError, mod.f)
+ excinfo.traceback = excinfo.traceback.filter()
+ excinfo.traceback[1].set_repr_style("short")
+ excinfo.traceback[2].set_repr_style("short")
+ r = excinfo.getrepr(style="long")
+ tw = TWMock()
+ r.toterminal(tw)
+ for line in tw.lines: print (line)
+ assert tw.lines[0] == ""
+ assert tw.lines[1] == " def f():"
+ assert tw.lines[2] == "> g()"
+ assert tw.lines[3] == ""
+ assert tw.lines[4].endswith("mod.py:3: ")
+ assert tw.lines[5] == ("_ ", None)
+ assert tw.lines[6].endswith("in g")
+ assert tw.lines[7] == " h()"
+ assert tw.lines[8].endswith("in h")
+ assert tw.lines[9] == " i()"
+ assert tw.lines[10] == ("_ ", None)
+ assert tw.lines[11] == ""
+ assert tw.lines[12] == " def i():"
+ assert tw.lines[13] == "> raise ValueError()"
+ assert tw.lines[14] == "E ValueError"
+ assert tw.lines[15] == ""
+ assert tw.lines[16].endswith("mod.py:9: ValueError")
diff --git a/third_party/python/py/testing/code/test_source.py b/third_party/python/py/testing/code/test_source.py
new file mode 100644
index 0000000000..3492761a4e
--- /dev/null
+++ b/third_party/python/py/testing/code/test_source.py
@@ -0,0 +1,648 @@
+from py.code import Source
+import py
+import sys
+import inspect
+
+from py._code.source import _ast
+if _ast is not None:
+ astonly = py.test.mark.nothing
+else:
+ astonly = py.test.mark.xfail("True", reason="only works with AST-compile")
+
+failsonjython = py.test.mark.xfail("sys.platform.startswith('java')")
+
+def test_source_str_function():
+ x = Source("3")
+ assert str(x) == "3"
+
+ x = Source(" 3")
+ assert str(x) == "3"
+
+ x = Source("""
+ 3
+ """, rstrip=False)
+ assert str(x) == "\n3\n "
+
+ x = Source("""
+ 3
+ """, rstrip=True)
+ assert str(x) == "\n3"
+
+def test_unicode():
+ try:
+ unicode
+ except NameError:
+ return
+ x = Source(unicode("4"))
+ assert str(x) == "4"
+ co = py.code.compile(unicode('u"\xc3\xa5"', 'utf8'), mode='eval')
+ val = eval(co)
+ assert isinstance(val, unicode)
+
+def test_source_from_function():
+ source = py.code.Source(test_source_str_function)
+ assert str(source).startswith('def test_source_str_function():')
+
+def test_source_from_method():
+ class TestClass:
+ def test_method(self):
+ pass
+ source = py.code.Source(TestClass().test_method)
+ assert source.lines == ["def test_method(self):",
+ " pass"]
+
+def test_source_from_lines():
+ lines = ["a \n", "b\n", "c"]
+ source = py.code.Source(lines)
+ assert source.lines == ['a ', 'b', 'c']
+
+def test_source_from_inner_function():
+ def f():
+ pass
+ source = py.code.Source(f, deindent=False)
+ assert str(source).startswith(' def f():')
+ source = py.code.Source(f)
+ assert str(source).startswith('def f():')
+
+def test_source_putaround_simple():
+ source = Source("raise ValueError")
+ source = source.putaround(
+ "try:", """\
+ except ValueError:
+ x = 42
+ else:
+ x = 23""")
+ assert str(source)=="""\
+try:
+ raise ValueError
+except ValueError:
+ x = 42
+else:
+ x = 23"""
+
+def test_source_putaround():
+ source = Source()
+ source = source.putaround("""
+ if 1:
+ x=1
+ """)
+ assert str(source).strip() == "if 1:\n x=1"
+
+def test_source_strips():
+ source = Source("")
+ assert source == Source()
+ assert str(source) == ''
+ assert source.strip() == source
+
+def test_source_strip_multiline():
+ source = Source()
+ source.lines = ["", " hello", " "]
+ source2 = source.strip()
+ assert source2.lines == [" hello"]
+
+def test_syntaxerror_rerepresentation():
+ ex = py.test.raises(SyntaxError, py.code.compile, 'xyz xyz')
+ assert ex.value.lineno == 1
+ assert ex.value.offset in (4,7) # XXX pypy/jython versus cpython?
+ assert ex.value.text.strip(), 'x x'
+
+def test_isparseable():
+ assert Source("hello").isparseable()
+ assert Source("if 1:\n pass").isparseable()
+ assert Source(" \nif 1:\n pass").isparseable()
+ assert not Source("if 1:\n").isparseable()
+ assert not Source(" \nif 1:\npass").isparseable()
+ assert not Source(chr(0)).isparseable()
+
+class TestAccesses:
+ source = Source("""\
+ def f(x):
+ pass
+ def g(x):
+ pass
+ """)
+ def test_getrange(self):
+ x = self.source[0:2]
+ assert x.isparseable()
+ assert len(x.lines) == 2
+ assert str(x) == "def f(x):\n pass"
+
+ def test_getline(self):
+ x = self.source[0]
+ assert x == "def f(x):"
+
+ def test_len(self):
+ assert len(self.source) == 4
+
+ def test_iter(self):
+ l = [x for x in self.source]
+ assert len(l) == 4
+
+class TestSourceParsingAndCompiling:
+ source = Source("""\
+ def f(x):
+ assert (x ==
+ 3 +
+ 4)
+ """).strip()
+
+ def test_compile(self):
+ co = py.code.compile("x=3")
+ d = {}
+ exec (co, d)
+ assert d['x'] == 3
+
+ def test_compile_and_getsource_simple(self):
+ co = py.code.compile("x=3")
+ exec (co)
+ source = py.code.Source(co)
+ assert str(source) == "x=3"
+
+ def test_compile_and_getsource_through_same_function(self):
+ def gensource(source):
+ return py.code.compile(source)
+ co1 = gensource("""
+ def f():
+ raise KeyError()
+ """)
+ co2 = gensource("""
+ def f():
+ raise ValueError()
+ """)
+ source1 = inspect.getsource(co1)
+ assert 'KeyError' in source1
+ source2 = inspect.getsource(co2)
+ assert 'ValueError' in source2
+
+ def test_getstatement(self):
+ #print str(self.source)
+ ass = str(self.source[1:])
+ for i in range(1, 4):
+ #print "trying start in line %r" % self.source[i]
+ s = self.source.getstatement(i)
+ #x = s.deindent()
+ assert str(s) == ass
+
+ def test_getstatementrange_triple_quoted(self):
+ #print str(self.source)
+ source = Source("""hello('''
+ ''')""")
+ s = source.getstatement(0)
+ assert s == str(source)
+ s = source.getstatement(1)
+ assert s == str(source)
+
+ @astonly
+ def test_getstatementrange_within_constructs(self):
+ source = Source("""\
+ try:
+ try:
+ raise ValueError
+ except SomeThing:
+ pass
+ finally:
+ 42
+ """)
+ assert len(source) == 7
+ # check all lineno's that could occur in a traceback
+ #assert source.getstatementrange(0) == (0, 7)
+ #assert source.getstatementrange(1) == (1, 5)
+ assert source.getstatementrange(2) == (2, 3)
+ assert source.getstatementrange(3) == (3, 4)
+ assert source.getstatementrange(4) == (4, 5)
+ #assert source.getstatementrange(5) == (0, 7)
+ assert source.getstatementrange(6) == (6, 7)
+
+ def test_getstatementrange_bug(self):
+ source = Source("""\
+ try:
+ x = (
+ y +
+ z)
+ except:
+ pass
+ """)
+ assert len(source) == 6
+ assert source.getstatementrange(2) == (1, 4)
+
+ def test_getstatementrange_bug2(self):
+ source = Source("""\
+ assert (
+ 33
+ ==
+ [
+ X(3,
+ b=1, c=2
+ ),
+ ]
+ )
+ """)
+ assert len(source) == 9
+ assert source.getstatementrange(5) == (0, 9)
+
+ def test_getstatementrange_ast_issue58(self):
+ source = Source("""\
+
+ def test_some():
+ for a in [a for a in
+ CAUSE_ERROR]: pass
+
+ x = 3
+ """)
+ assert getstatement(2, source).lines == source.lines[2:3]
+ assert getstatement(3, source).lines == source.lines[3:4]
+
+ def test_getstatementrange_out_of_bounds_py3(self):
+ source = Source("if xxx:\n from .collections import something")
+ r = source.getstatementrange(1)
+ assert r == (1,2)
+
+ def test_getstatementrange_with_syntaxerror_issue7(self):
+ source = Source(":")
+ py.test.raises(SyntaxError, lambda: source.getstatementrange(0))
+
+ def test_compile_to_ast(self):
+ import ast
+ source = Source("x = 4")
+ mod = source.compile(flag=ast.PyCF_ONLY_AST)
+ assert isinstance(mod, ast.Module)
+ compile(mod, "<filename>", "exec")
+
+ def test_compile_and_getsource(self):
+ co = self.source.compile()
+ py.builtin.exec_(co, globals())
+ f(7)
+ excinfo = py.test.raises(AssertionError, "f(6)")
+ frame = excinfo.traceback[-1].frame
+ stmt = frame.code.fullsource.getstatement(frame.lineno)
+ #print "block", str(block)
+ assert str(stmt).strip().startswith('assert')
+
+ def test_compilefuncs_and_path_sanity(self):
+ def check(comp, name):
+ co = comp(self.source, name)
+ if not name:
+ expected = "codegen %s:%d>" %(mypath, mylineno+2+1)
+ else:
+ expected = "codegen %r %s:%d>" % (name, mypath, mylineno+2+1)
+ fn = co.co_filename
+ assert fn.endswith(expected)
+
+ mycode = py.code.Code(self.test_compilefuncs_and_path_sanity)
+ mylineno = mycode.firstlineno
+ mypath = mycode.path
+
+ for comp in py.code.compile, py.code.Source.compile:
+ for name in '', None, 'my':
+ yield check, comp, name
+
+ def test_offsetless_synerr(self):
+ py.test.raises(SyntaxError, py.code.compile, "lambda a,a: 0", mode='eval')
+
+def test_getstartingblock_singleline():
+ class A:
+ def __init__(self, *args):
+ frame = sys._getframe(1)
+ self.source = py.code.Frame(frame).statement
+
+ x = A('x', 'y')
+
+ l = [i for i in x.source.lines if i.strip()]
+ assert len(l) == 1
+
+def test_getstartingblock_multiline():
+ class A:
+ def __init__(self, *args):
+ frame = sys._getframe(1)
+ self.source = py.code.Frame(frame).statement
+
+ x = A('x',
+ 'y' \
+ ,
+ 'z')
+
+ l = [i for i in x.source.lines if i.strip()]
+ assert len(l) == 4
+
+def test_getline_finally():
+ def c(): pass
+ excinfo = py.test.raises(TypeError, """
+ teardown = None
+ try:
+ c(1)
+ finally:
+ if teardown:
+ teardown()
+ """)
+ source = excinfo.traceback[-1].statement
+ assert str(source).strip() == 'c(1)'
+
+def test_getfuncsource_dynamic():
+ source = """
+ def f():
+ raise ValueError
+
+ def g(): pass
+ """
+ co = py.code.compile(source)
+ py.builtin.exec_(co, globals())
+ assert str(py.code.Source(f)).strip() == 'def f():\n raise ValueError'
+ assert str(py.code.Source(g)).strip() == 'def g(): pass'
+
+
+def test_getfuncsource_with_multine_string():
+ def f():
+ c = '''while True:
+ pass
+'''
+ assert str(py.code.Source(f)).strip() == "def f():\n c = '''while True:\n pass\n'''"
+
+
+def test_deindent():
+ from py._code.source import deindent as deindent
+ assert deindent(['\tfoo', '\tbar', ]) == ['foo', 'bar']
+
+ def f():
+ c = '''while True:
+ pass
+'''
+ import inspect
+ lines = deindent(inspect.getsource(f).splitlines())
+ assert lines == ["def f():", " c = '''while True:", " pass", "'''"]
+
+ source = """
+ def f():
+ def g():
+ pass
+ """
+ lines = deindent(source.splitlines())
+ assert lines == ['', 'def f():', ' def g():', ' pass', ' ']
+
+def test_source_of_class_at_eof_without_newline(tmpdir):
+ # this test fails because the implicit inspect.getsource(A) below
+ # does not return the "x = 1" last line.
+ source = py.code.Source('''
+ class A(object):
+ def method(self):
+ x = 1
+ ''')
+ path = tmpdir.join("a.py")
+ path.write(source)
+ s2 = py.code.Source(tmpdir.join("a.py").pyimport().A)
+ assert str(source).strip() == str(s2).strip()
+
+if True:
+ def x():
+ pass
+
+def test_getsource_fallback():
+ from py._code.source import getsource
+ expected = """def x():
+ pass"""
+ src = getsource(x)
+ assert src == expected
+
+def test_idem_compile_and_getsource():
+ from py._code.source import getsource
+ expected = "def x(): pass"
+ co = py.code.compile(expected)
+ src = getsource(co)
+ assert src == expected
+
+def test_findsource_fallback():
+ from py._code.source import findsource
+ src, lineno = findsource(x)
+ assert 'test_findsource_simple' in str(src)
+ assert src[lineno] == ' def x():'
+
+def test_findsource():
+ from py._code.source import findsource
+ co = py.code.compile("""if 1:
+ def x():
+ pass
+""")
+
+ src, lineno = findsource(co)
+ assert 'if 1:' in str(src)
+
+ d = {}
+ eval(co, d)
+ src, lineno = findsource(d['x'])
+ assert 'if 1:' in str(src)
+ assert src[lineno] == " def x():"
+
+
+def test_getfslineno():
+ from py.code import getfslineno
+
+ def f(x):
+ pass
+
+ fspath, lineno = getfslineno(f)
+
+ assert fspath.basename == "test_source.py"
+ assert lineno == py.code.getrawcode(f).co_firstlineno-1 # see findsource
+
+ class A(object):
+ pass
+
+ fspath, lineno = getfslineno(A)
+
+ _, A_lineno = inspect.findsource(A)
+ assert fspath.basename == "test_source.py"
+ assert lineno == A_lineno
+
+ assert getfslineno(3) == ("", -1)
+ class B:
+ pass
+ B.__name__ = "B2"
+ assert getfslineno(B)[1] == -1
+
+def test_code_of_object_instance_with_call():
+ class A:
+ pass
+ py.test.raises(TypeError, lambda: py.code.Source(A()))
+ class WithCall:
+ def __call__(self):
+ pass
+
+ code = py.code.Code(WithCall())
+ assert 'pass' in str(code.source())
+
+ class Hello(object):
+ def __call__(self):
+ pass
+ py.test.raises(TypeError, lambda: py.code.Code(Hello))
+
+
+def getstatement(lineno, source):
+ from py._code.source import getstatementrange_ast
+ source = py.code.Source(source, deindent=False)
+ ast, start, end = getstatementrange_ast(lineno, source)
+ return source[start:end]
+
+def test_oneline():
+ source = getstatement(0, "raise ValueError")
+ assert str(source) == "raise ValueError"
+
+def test_comment_and_no_newline_at_end():
+ from py._code.source import getstatementrange_ast
+ source = Source(['def test_basic_complex():',
+ ' assert 1 == 2',
+ '# vim: filetype=pyopencl:fdm=marker'])
+ ast, start, end = getstatementrange_ast(1, source)
+ assert end == 2
+
+def test_oneline_and_comment():
+ source = getstatement(0, "raise ValueError\n#hello")
+ assert str(source) == "raise ValueError"
+
+def test_comments():
+ source = '''def test():
+ "comment 1"
+ x = 1
+ # comment 2
+ # comment 3
+
+ assert False
+
+"""
+comment 4
+"""
+'''
+ for line in range(2,6):
+ assert str(getstatement(line, source)) == ' x = 1'
+ for line in range(6,10):
+ assert str(getstatement(line, source)) == ' assert False'
+ assert str(getstatement(10, source)) == '"""'
+
+def test_comment_in_statement():
+ source = '''test(foo=1,
+ # comment 1
+ bar=2)
+'''
+ for line in range(1,3):
+ assert str(getstatement(line, source)) == \
+ 'test(foo=1,\n # comment 1\n bar=2)'
+
+def test_single_line_else():
+ source = getstatement(1, "if False: 2\nelse: 3")
+ assert str(source) == "else: 3"
+
+def test_single_line_finally():
+ source = getstatement(1, "try: 1\nfinally: 3")
+ assert str(source) == "finally: 3"
+
+def test_issue55():
+ source = ('def round_trip(dinp):\n assert 1 == dinp\n'
+ 'def test_rt():\n round_trip("""\n""")\n')
+ s = getstatement(3, source)
+ assert str(s) == ' round_trip("""\n""")'
+
+
+def XXXtest_multiline():
+ source = getstatement(0, """\
+raise ValueError(
+ 23
+)
+x = 3
+""")
+ assert str(source) == "raise ValueError(\n 23\n)"
+
+class TestTry:
+ pytestmark = astonly
+ source = """\
+try:
+ raise ValueError
+except Something:
+ raise IndexError(1)
+else:
+ raise KeyError()
+"""
+
+ def test_body(self):
+ source = getstatement(1, self.source)
+ assert str(source) == " raise ValueError"
+
+ def test_except_line(self):
+ source = getstatement(2, self.source)
+ assert str(source) == "except Something:"
+
+ def test_except_body(self):
+ source = getstatement(3, self.source)
+ assert str(source) == " raise IndexError(1)"
+
+ def test_else(self):
+ source = getstatement(5, self.source)
+ assert str(source) == " raise KeyError()"
+
+class TestTryFinally:
+ source = """\
+try:
+ raise ValueError
+finally:
+ raise IndexError(1)
+"""
+
+ def test_body(self):
+ source = getstatement(1, self.source)
+ assert str(source) == " raise ValueError"
+
+ def test_finally(self):
+ source = getstatement(3, self.source)
+ assert str(source) == " raise IndexError(1)"
+
+
+
+class TestIf:
+ pytestmark = astonly
+ source = """\
+if 1:
+ y = 3
+elif False:
+ y = 5
+else:
+ y = 7
+"""
+
+ def test_body(self):
+ source = getstatement(1, self.source)
+ assert str(source) == " y = 3"
+
+ def test_elif_clause(self):
+ source = getstatement(2, self.source)
+ assert str(source) == "elif False:"
+
+ def test_elif(self):
+ source = getstatement(3, self.source)
+ assert str(source) == " y = 5"
+
+ def test_else(self):
+ source = getstatement(5, self.source)
+ assert str(source) == " y = 7"
+
+def test_semicolon():
+ s = """\
+hello ; pytest.skip()
+"""
+ source = getstatement(0, s)
+ assert str(source) == s.strip()
+
+def test_def_online():
+ s = """\
+def func(): raise ValueError(42)
+
+def something():
+ pass
+"""
+ source = getstatement(0, s)
+ assert str(source) == "def func(): raise ValueError(42)"
+
+def XXX_test_expression_multiline():
+ source = """\
+something
+'''
+'''"""
+ result = getstatement(1, source)
+ assert str(result) == "'''\n'''"
+
diff --git a/third_party/python/py/testing/conftest.py b/third_party/python/py/testing/conftest.py
new file mode 100644
index 0000000000..0f956b3dd2
--- /dev/null
+++ b/third_party/python/py/testing/conftest.py
@@ -0,0 +1,3 @@
+
+pytest_plugins = "pytester",
+
diff --git a/third_party/python/py/testing/io_/__init__.py b/third_party/python/py/testing/io_/__init__.py
new file mode 100644
index 0000000000..792d600548
--- /dev/null
+++ b/third_party/python/py/testing/io_/__init__.py
@@ -0,0 +1 @@
+#
diff --git a/third_party/python/py/testing/io_/test_capture.py b/third_party/python/py/testing/io_/test_capture.py
new file mode 100644
index 0000000000..b5fedd0abc
--- /dev/null
+++ b/third_party/python/py/testing/io_/test_capture.py
@@ -0,0 +1,501 @@
+from __future__ import with_statement
+
+import os, sys
+import py
+
+needsdup = py.test.mark.skipif("not hasattr(os, 'dup')")
+
+from py.builtin import print_
+
+if sys.version_info >= (3,0):
+ def tobytes(obj):
+ if isinstance(obj, str):
+ obj = obj.encode('UTF-8')
+ assert isinstance(obj, bytes)
+ return obj
+ def totext(obj):
+ if isinstance(obj, bytes):
+ obj = str(obj, 'UTF-8')
+ assert isinstance(obj, str)
+ return obj
+else:
+ def tobytes(obj):
+ if isinstance(obj, unicode):
+ obj = obj.encode('UTF-8')
+ assert isinstance(obj, str)
+ return obj
+ def totext(obj):
+ if isinstance(obj, str):
+ obj = unicode(obj, 'UTF-8')
+ assert isinstance(obj, unicode)
+ return obj
+
+def oswritebytes(fd, obj):
+ os.write(fd, tobytes(obj))
+
+class TestTextIO:
+ def test_text(self):
+ f = py.io.TextIO()
+ f.write("hello")
+ s = f.getvalue()
+ assert s == "hello"
+ f.close()
+
+ def test_unicode_and_str_mixture(self):
+ f = py.io.TextIO()
+ if sys.version_info >= (3,0):
+ f.write("\u00f6")
+ py.test.raises(TypeError, "f.write(bytes('hello', 'UTF-8'))")
+ else:
+ f.write(unicode("\u00f6", 'UTF-8'))
+ f.write("hello") # bytes
+ s = f.getvalue()
+ f.close()
+ assert isinstance(s, unicode)
+
+def test_bytes_io():
+ f = py.io.BytesIO()
+ f.write(tobytes("hello"))
+ py.test.raises(TypeError, "f.write(totext('hello'))")
+ s = f.getvalue()
+ assert s == tobytes("hello")
+
+def test_dontreadfrominput():
+ from py._io.capture import DontReadFromInput
+ f = DontReadFromInput()
+ assert not f.isatty()
+ py.test.raises(IOError, f.read)
+ py.test.raises(IOError, f.readlines)
+ py.test.raises(IOError, iter, f)
+ py.test.raises(ValueError, f.fileno)
+ f.close() # just for completeness
+
+def pytest_funcarg__tmpfile(request):
+ testdir = request.getfuncargvalue("testdir")
+ f = testdir.makepyfile("").open('wb+')
+ request.addfinalizer(f.close)
+ return f
+
+@needsdup
+def test_dupfile(tmpfile):
+ flist = []
+ for i in range(5):
+ nf = py.io.dupfile(tmpfile, encoding="utf-8")
+ assert nf != tmpfile
+ assert nf.fileno() != tmpfile.fileno()
+ assert nf not in flist
+ print_(i, end="", file=nf)
+ flist.append(nf)
+ for i in range(5):
+ f = flist[i]
+ f.close()
+ tmpfile.seek(0)
+ s = tmpfile.read()
+ assert "01234" in repr(s)
+ tmpfile.close()
+
+def test_dupfile_no_mode():
+ """
+ dupfile should trap an AttributeError and return f if no mode is supplied.
+ """
+ class SomeFileWrapper(object):
+ "An object with a fileno method but no mode attribute"
+ def fileno(self):
+ return 1
+ tmpfile = SomeFileWrapper()
+ assert py.io.dupfile(tmpfile) is tmpfile
+ with py.test.raises(AttributeError):
+ py.io.dupfile(tmpfile, raising=True)
+
+def lsof_check(func):
+ pid = os.getpid()
+ try:
+ out = py.process.cmdexec("lsof -p %d" % pid)
+ except py.process.cmdexec.Error:
+ py.test.skip("could not run 'lsof'")
+ func()
+ out2 = py.process.cmdexec("lsof -p %d" % pid)
+ len1 = len([x for x in out.split("\n") if "REG" in x])
+ len2 = len([x for x in out2.split("\n") if "REG" in x])
+ assert len2 < len1 + 3, out2
+
+class TestFDCapture:
+ pytestmark = needsdup
+
+ def test_not_now(self, tmpfile):
+ fd = tmpfile.fileno()
+ cap = py.io.FDCapture(fd, now=False)
+ data = tobytes("hello")
+ os.write(fd, data)
+ f = cap.done()
+ s = f.read()
+ assert not s
+ cap = py.io.FDCapture(fd, now=False)
+ cap.start()
+ os.write(fd, data)
+ f = cap.done()
+ s = f.read()
+ assert s == "hello"
+
+ def test_simple(self, tmpfile):
+ fd = tmpfile.fileno()
+ cap = py.io.FDCapture(fd)
+ data = tobytes("hello")
+ os.write(fd, data)
+ f = cap.done()
+ s = f.read()
+ assert s == "hello"
+ f.close()
+
+ def test_simple_many(self, tmpfile):
+ for i in range(10):
+ self.test_simple(tmpfile)
+
+ def test_simple_many_check_open_files(self, tmpfile):
+ lsof_check(lambda: self.test_simple_many(tmpfile))
+
+ def test_simple_fail_second_start(self, tmpfile):
+ fd = tmpfile.fileno()
+ cap = py.io.FDCapture(fd)
+ f = cap.done()
+ py.test.raises(ValueError, cap.start)
+ f.close()
+
+ def test_stderr(self):
+ cap = py.io.FDCapture(2, patchsys=True)
+ print_("hello", file=sys.stderr)
+ f = cap.done()
+ s = f.read()
+ assert s == "hello\n"
+
+ def test_stdin(self, tmpfile):
+ tmpfile.write(tobytes("3"))
+ tmpfile.seek(0)
+ cap = py.io.FDCapture(0, tmpfile=tmpfile)
+ # check with os.read() directly instead of raw_input(), because
+ # sys.stdin itself may be redirected (as py.test now does by default)
+ x = os.read(0, 100).strip()
+ f = cap.done()
+ assert x == tobytes("3")
+
+ def test_writeorg(self, tmpfile):
+ data1, data2 = tobytes("foo"), tobytes("bar")
+ try:
+ cap = py.io.FDCapture(tmpfile.fileno())
+ tmpfile.write(data1)
+ cap.writeorg(data2)
+ finally:
+ tmpfile.close()
+ f = cap.done()
+ scap = f.read()
+ assert scap == totext(data1)
+ stmp = open(tmpfile.name, 'rb').read()
+ assert stmp == data2
+
+
+class TestStdCapture:
+ def getcapture(self, **kw):
+ return py.io.StdCapture(**kw)
+
+ def test_capturing_done_simple(self):
+ cap = self.getcapture()
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ outfile, errfile = cap.done()
+ s = outfile.read()
+ assert s == "hello"
+ s = errfile.read()
+ assert s == "world"
+
+ def test_capturing_reset_simple(self):
+ cap = self.getcapture()
+ print("hello world")
+ sys.stderr.write("hello error\n")
+ out, err = cap.reset()
+ assert out == "hello world\n"
+ assert err == "hello error\n"
+
+ def test_capturing_readouterr(self):
+ cap = self.getcapture()
+ try:
+ print ("hello world")
+ sys.stderr.write("hello error\n")
+ out, err = cap.readouterr()
+ assert out == "hello world\n"
+ assert err == "hello error\n"
+ sys.stderr.write("error2")
+ finally:
+ out, err = cap.reset()
+ assert err == "error2"
+
+ def test_capturing_readouterr_unicode(self):
+ cap = self.getcapture()
+ print ("hx\xc4\x85\xc4\x87")
+ out, err = cap.readouterr()
+ assert out == py.builtin._totext("hx\xc4\x85\xc4\x87\n", "utf8")
+
+ @py.test.mark.skipif('sys.version_info >= (3,)',
+ reason='text output different for bytes on python3')
+ def test_capturing_readouterr_decode_error_handling(self):
+ cap = self.getcapture()
+ # triggered a internal error in pytest
+ print('\xa6')
+ out, err = cap.readouterr()
+ assert out == py.builtin._totext('\ufffd\n', 'unicode-escape')
+
+ def test_capturing_mixed(self):
+ cap = self.getcapture(mixed=True)
+ sys.stdout.write("hello ")
+ sys.stderr.write("world")
+ sys.stdout.write(".")
+ out, err = cap.reset()
+ assert out.strip() == "hello world."
+ assert not err
+
+ def test_reset_twice_error(self):
+ cap = self.getcapture()
+ print ("hello")
+ out, err = cap.reset()
+ py.test.raises(ValueError, cap.reset)
+ assert out == "hello\n"
+ assert not err
+
+ def test_capturing_modify_sysouterr_in_between(self):
+ oldout = sys.stdout
+ olderr = sys.stderr
+ cap = self.getcapture()
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ sys.stdout = py.io.TextIO()
+ sys.stderr = py.io.TextIO()
+ print ("not seen")
+ sys.stderr.write("not seen\n")
+ out, err = cap.reset()
+ assert out == "hello"
+ assert err == "world"
+ assert sys.stdout == oldout
+ assert sys.stderr == olderr
+
+ def test_capturing_error_recursive(self):
+ cap1 = self.getcapture()
+ print ("cap1")
+ cap2 = self.getcapture()
+ print ("cap2")
+ out2, err2 = cap2.reset()
+ out1, err1 = cap1.reset()
+ assert out1 == "cap1\n"
+ assert out2 == "cap2\n"
+
+ def test_just_out_capture(self):
+ cap = self.getcapture(out=True, err=False)
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ out, err = cap.reset()
+ assert out == "hello"
+ assert not err
+
+ def test_just_err_capture(self):
+ cap = self.getcapture(out=False, err=True)
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ out, err = cap.reset()
+ assert err == "world"
+ assert not out
+
+ def test_stdin_restored(self):
+ old = sys.stdin
+ cap = self.getcapture(in_=True)
+ newstdin = sys.stdin
+ out, err = cap.reset()
+ assert newstdin != sys.stdin
+ assert sys.stdin is old
+
+ def test_stdin_nulled_by_default(self):
+ print ("XXX this test may well hang instead of crashing")
+ print ("XXX which indicates an error in the underlying capturing")
+ print ("XXX mechanisms")
+ cap = self.getcapture()
+ py.test.raises(IOError, "sys.stdin.read()")
+ out, err = cap.reset()
+
+ def test_suspend_resume(self):
+ cap = self.getcapture(out=True, err=False, in_=False)
+ try:
+ print ("hello")
+ sys.stderr.write("error\n")
+ out, err = cap.suspend()
+ assert out == "hello\n"
+ assert not err
+ print ("in between")
+ sys.stderr.write("in between\n")
+ cap.resume()
+ print ("after")
+ sys.stderr.write("error_after\n")
+ finally:
+ out, err = cap.reset()
+ assert out == "after\n"
+ assert not err
+
+class TestStdCaptureNotNow(TestStdCapture):
+ def getcapture(self, **kw):
+ kw['now'] = False
+ cap = py.io.StdCapture(**kw)
+ cap.startall()
+ return cap
+
+class TestStdCaptureFD(TestStdCapture):
+ pytestmark = needsdup
+
+ def getcapture(self, **kw):
+ return py.io.StdCaptureFD(**kw)
+
+ def test_intermingling(self):
+ cap = self.getcapture()
+ oswritebytes(1, "1")
+ sys.stdout.write(str(2))
+ sys.stdout.flush()
+ oswritebytes(1, "3")
+ oswritebytes(2, "a")
+ sys.stderr.write("b")
+ sys.stderr.flush()
+ oswritebytes(2, "c")
+ out, err = cap.reset()
+ assert out == "123"
+ assert err == "abc"
+
+ def test_callcapture(self):
+ def func(x, y):
+ print (x)
+ sys.stderr.write(str(y))
+ return 42
+
+ res, out, err = py.io.StdCaptureFD.call(func, 3, y=4)
+ assert res == 42
+ assert out.startswith("3")
+ assert err.startswith("4")
+
+ def test_many(self, capfd):
+ def f():
+ for i in range(10):
+ cap = py.io.StdCaptureFD()
+ cap.reset()
+ lsof_check(f)
+
+class TestStdCaptureFDNotNow(TestStdCaptureFD):
+ pytestmark = needsdup
+
+ def getcapture(self, **kw):
+ kw['now'] = False
+ cap = py.io.StdCaptureFD(**kw)
+ cap.startall()
+ return cap
+
+@needsdup
+def test_stdcapture_fd_tmpfile(tmpfile):
+ capfd = py.io.StdCaptureFD(out=tmpfile)
+ os.write(1, "hello".encode("ascii"))
+ os.write(2, "world".encode("ascii"))
+ outf, errf = capfd.done()
+ assert outf == tmpfile
+
+class TestStdCaptureFDinvalidFD:
+ pytestmark = needsdup
+ def test_stdcapture_fd_invalid_fd(self, testdir):
+ testdir.makepyfile("""
+ import py, os
+ def test_stdout():
+ os.close(1)
+ cap = py.io.StdCaptureFD(out=True, err=False, in_=False)
+ cap.done()
+ def test_stderr():
+ os.close(2)
+ cap = py.io.StdCaptureFD(out=False, err=True, in_=False)
+ cap.done()
+ def test_stdin():
+ os.close(0)
+ cap = py.io.StdCaptureFD(out=False, err=False, in_=True)
+ cap.done()
+ """)
+ result = testdir.runpytest("--capture=fd")
+ assert result.ret == 0
+ assert result.parseoutcomes()['passed'] == 3
+
+def test_capture_not_started_but_reset():
+ capsys = py.io.StdCapture(now=False)
+ capsys.done()
+ capsys.done()
+ capsys.reset()
+
+@needsdup
+def test_capture_no_sys():
+ capsys = py.io.StdCapture()
+ try:
+ cap = py.io.StdCaptureFD(patchsys=False)
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ oswritebytes(1, "1")
+ oswritebytes(2, "2")
+ out, err = cap.reset()
+ assert out == "1"
+ assert err == "2"
+ finally:
+ capsys.reset()
+
+@needsdup
+def test_callcapture_nofd():
+ def func(x, y):
+ oswritebytes(1, "hello")
+ oswritebytes(2, "hello")
+ print (x)
+ sys.stderr.write(str(y))
+ return 42
+
+ capfd = py.io.StdCaptureFD(patchsys=False)
+ try:
+ res, out, err = py.io.StdCapture.call(func, 3, y=4)
+ finally:
+ capfd.reset()
+ assert res == 42
+ assert out.startswith("3")
+ assert err.startswith("4")
+
+@needsdup
+@py.test.mark.parametrize('use', [True, False])
+def test_fdcapture_tmpfile_remains_the_same(tmpfile, use):
+ if not use:
+ tmpfile = True
+ cap = py.io.StdCaptureFD(out=False, err=tmpfile, now=False)
+ cap.startall()
+ capfile = cap.err.tmpfile
+ cap.suspend()
+ cap.resume()
+ capfile2 = cap.err.tmpfile
+ assert capfile2 == capfile
+
+@py.test.mark.parametrize('method', ['StdCapture', 'StdCaptureFD'])
+def test_capturing_and_logging_fundamentals(testdir, method):
+ if method == "StdCaptureFD" and not hasattr(os, 'dup'):
+ py.test.skip("need os.dup")
+ # here we check a fundamental feature
+ p = testdir.makepyfile("""
+ import sys, os
+ import py, logging
+ cap = py.io.%s(out=False, in_=False)
+
+ logging.warn("hello1")
+ outerr = cap.suspend()
+ print ("suspend, captured %%s" %%(outerr,))
+ logging.warn("hello2")
+
+ cap.resume()
+ logging.warn("hello3")
+
+ outerr = cap.suspend()
+ print ("suspend2, captured %%s" %% (outerr,))
+ """ % (method,))
+ result = testdir.runpython(p)
+ result.stdout.fnmatch_lines([
+ "suspend, captured*hello1*",
+ "suspend2, captured*hello2*WARNING:root:hello3*",
+ ])
+ assert "atexit" not in result.stderr.str()
diff --git a/third_party/python/py/testing/io_/test_saferepr.py b/third_party/python/py/testing/io_/test_saferepr.py
new file mode 100644
index 0000000000..97be1416fe
--- /dev/null
+++ b/third_party/python/py/testing/io_/test_saferepr.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import generators
+import py
+import sys
+
+saferepr = py.io.saferepr
+
+class TestSafeRepr:
+ def test_simple_repr(self):
+ assert saferepr(1) == '1'
+ assert saferepr(None) == 'None'
+
+ def test_maxsize(self):
+ s = saferepr('x'*50, maxsize=25)
+ assert len(s) == 25
+ expected = repr('x'*10 + '...' + 'x'*10)
+ assert s == expected
+
+ def test_maxsize_error_on_instance(self):
+ class A:
+ def __repr__(self):
+ raise ValueError('...')
+
+ s = saferepr(('*'*50, A()), maxsize=25)
+ assert len(s) == 25
+ assert s[0] == '(' and s[-1] == ')'
+
+ def test_exceptions(self):
+ class BrokenRepr:
+ def __init__(self, ex):
+ self.ex = ex
+ foo = 0
+ def __repr__(self):
+ raise self.ex
+ class BrokenReprException(Exception):
+ __str__ = None
+ __repr__ = None
+ assert 'Exception' in saferepr(BrokenRepr(Exception("broken")))
+ s = saferepr(BrokenReprException("really broken"))
+ assert 'TypeError' in s
+ assert 'TypeError' in saferepr(BrokenRepr("string"))
+
+ s2 = saferepr(BrokenRepr(BrokenReprException('omg even worse')))
+ assert 'NameError' not in s2
+ assert 'unknown' in s2
+
+ def test_big_repr(self):
+ from py._io.saferepr import SafeRepr
+ assert len(saferepr(range(1000))) <= \
+ len('[' + SafeRepr().maxlist * "1000" + ']')
+
+ def test_repr_on_newstyle(self):
+ class Function(object):
+ def __repr__(self):
+ return "<%s>" %(self.name)
+ try:
+ s = saferepr(Function())
+ except Exception:
+ py.test.fail("saferepr failed for newstyle class")
+
+ def test_unicode(self):
+ val = py.builtin._totext('£€', 'utf-8')
+ reprval = py.builtin._totext("'£€'", 'utf-8')
+ assert saferepr(val) == reprval
+
+def test_unicode_handling():
+ value = py.builtin._totext('\xc4\x85\xc4\x87\n', 'utf-8').encode('utf8')
+ def f():
+ raise Exception(value)
+ excinfo = py.test.raises(Exception, f)
+ s = str(excinfo)
+ if sys.version_info[0] < 3:
+ u = unicode(excinfo)
+
diff --git a/third_party/python/py/testing/io_/test_terminalwriter.py b/third_party/python/py/testing/io_/test_terminalwriter.py
new file mode 100644
index 0000000000..7e9ebf409e
--- /dev/null
+++ b/third_party/python/py/testing/io_/test_terminalwriter.py
@@ -0,0 +1,292 @@
+
+import py
+import os, sys
+from py._io import terminalwriter
+import codecs
+import pytest
+
+def test_get_terminal_width():
+ x = py.io.get_terminal_width
+ assert x == terminalwriter.get_terminal_width
+
+def test_getdimensions(monkeypatch):
+ fcntl = py.test.importorskip("fcntl")
+ import struct
+ l = []
+ monkeypatch.setattr(fcntl, 'ioctl', lambda *args: l.append(args))
+ try:
+ terminalwriter._getdimensions()
+ except (TypeError, struct.error):
+ pass
+ assert len(l) == 1
+ assert l[0][0] == 1
+
+def test_terminal_width_COLUMNS(monkeypatch):
+ """ Dummy test for get_terminal_width
+ """
+ fcntl = py.test.importorskip("fcntl")
+ monkeypatch.setattr(fcntl, 'ioctl', lambda *args: int('x'))
+ monkeypatch.setenv('COLUMNS', '42')
+ assert terminalwriter.get_terminal_width() == 42
+ monkeypatch.delenv('COLUMNS', raising=False)
+
+def test_terminalwriter_defaultwidth_80(monkeypatch):
+ monkeypatch.setattr(terminalwriter, '_getdimensions', lambda: 0/0)
+ monkeypatch.delenv('COLUMNS', raising=False)
+ tw = py.io.TerminalWriter()
+ assert tw.fullwidth == 80
+
+def test_terminalwriter_getdimensions_bogus(monkeypatch):
+ monkeypatch.setattr(terminalwriter, '_getdimensions', lambda: (10,10))
+ monkeypatch.delenv('COLUMNS', raising=False)
+ tw = py.io.TerminalWriter()
+ assert tw.fullwidth == 80
+
+def test_terminalwriter_getdimensions_emacs(monkeypatch):
+ # emacs terminal returns (0,0) but set COLUMNS properly
+ monkeypatch.setattr(terminalwriter, '_getdimensions', lambda: (0,0))
+ monkeypatch.setenv('COLUMNS', '42')
+ tw = py.io.TerminalWriter()
+ assert tw.fullwidth == 42
+
+def test_terminalwriter_computes_width(monkeypatch):
+ monkeypatch.setattr(terminalwriter, 'get_terminal_width', lambda: 42)
+ tw = py.io.TerminalWriter()
+ assert tw.fullwidth == 42
+
+def test_terminalwriter_default_instantiation():
+ tw = py.io.TerminalWriter(stringio=True)
+ assert hasattr(tw, 'stringio')
+
+def test_terminalwriter_dumb_term_no_markup(monkeypatch):
+ monkeypatch.setattr(os, 'environ', {'TERM': 'dumb', 'PATH': ''})
+ class MyFile:
+ closed = False
+ def isatty(self):
+ return True
+ monkeypatch.setattr(sys, 'stdout', MyFile())
+ try:
+ assert sys.stdout.isatty()
+ tw = py.io.TerminalWriter()
+ assert not tw.hasmarkup
+ finally:
+ monkeypatch.undo()
+
+def test_terminalwriter_file_unicode(tmpdir):
+ f = codecs.open(str(tmpdir.join("xyz")), "wb", "utf8")
+ tw = py.io.TerminalWriter(file=f)
+ assert tw.encoding == "utf8"
+
+def test_unicode_encoding():
+ msg = py.builtin._totext('b\u00f6y', 'utf8')
+ for encoding in 'utf8', 'latin1':
+ l = []
+ tw = py.io.TerminalWriter(l.append, encoding=encoding)
+ tw.line(msg)
+ assert l[0].strip() == msg.encode(encoding)
+
+@pytest.mark.parametrize("encoding", ["ascii"])
+def test_unicode_on_file_with_ascii_encoding(tmpdir, monkeypatch, encoding):
+ msg = py.builtin._totext('hell\xf6', "latin1")
+ #pytest.raises(UnicodeEncodeError, lambda: bytes(msg))
+ f = codecs.open(str(tmpdir.join("x")), "w", encoding)
+ tw = py.io.TerminalWriter(f)
+ tw.line(msg)
+ f.close()
+ s = tmpdir.join("x").open("rb").read().strip()
+ assert encoding == "ascii"
+ assert s == msg.encode("unicode-escape")
+
+
+win32 = int(sys.platform == "win32")
+class TestTerminalWriter:
+ def pytest_generate_tests(self, metafunc):
+ if "tw" in metafunc.funcargnames:
+ metafunc.addcall(id="path", param="path")
+ metafunc.addcall(id="stringio", param="stringio")
+ metafunc.addcall(id="callable", param="callable")
+ def pytest_funcarg__tw(self, request):
+ if request.param == "path":
+ tmpdir = request.getfuncargvalue("tmpdir")
+ p = tmpdir.join("tmpfile")
+ f = codecs.open(str(p), 'w+', encoding='utf8')
+ tw = py.io.TerminalWriter(f)
+ def getlines():
+ tw._file.flush()
+ return codecs.open(str(p), 'r',
+ encoding='utf8').readlines()
+ elif request.param == "stringio":
+ tw = py.io.TerminalWriter(stringio=True)
+ def getlines():
+ tw.stringio.seek(0)
+ return tw.stringio.readlines()
+ elif request.param == "callable":
+ writes = []
+ tw = py.io.TerminalWriter(writes.append)
+ def getlines():
+ io = py.io.TextIO()
+ io.write("".join(writes))
+ io.seek(0)
+ return io.readlines()
+ tw.getlines = getlines
+ tw.getvalue = lambda: "".join(getlines())
+ return tw
+
+ def test_line(self, tw):
+ tw.line("hello")
+ l = tw.getlines()
+ assert len(l) == 1
+ assert l[0] == "hello\n"
+
+ def test_line_unicode(self, tw):
+ for encoding in 'utf8', 'latin1':
+ tw._encoding = encoding
+ msg = py.builtin._totext('b\u00f6y', 'utf8')
+ tw.line(msg)
+ l = tw.getlines()
+ assert l[0] == msg + "\n"
+
+ def test_sep_no_title(self, tw):
+ tw.sep("-", fullwidth=60)
+ l = tw.getlines()
+ assert len(l) == 1
+ assert l[0] == "-" * (60-win32) + "\n"
+
+ def test_sep_with_title(self, tw):
+ tw.sep("-", "hello", fullwidth=60)
+ l = tw.getlines()
+ assert len(l) == 1
+ assert l[0] == "-" * 26 + " hello " + "-" * (27-win32) + "\n"
+
+ @py.test.mark.skipif("sys.platform == 'win32'")
+ def test__escaped(self, tw):
+ text2 = tw._escaped("hello", (31))
+ assert text2.find("hello") != -1
+
+ @py.test.mark.skipif("sys.platform == 'win32'")
+ def test_markup(self, tw):
+ for bold in (True, False):
+ for color in ("red", "green"):
+ text2 = tw.markup("hello", **{color: True, 'bold': bold})
+ assert text2.find("hello") != -1
+ py.test.raises(ValueError, "tw.markup('x', wronkw=3)")
+ py.test.raises(ValueError, "tw.markup('x', wronkw=0)")
+
+ def test_line_write_markup(self, tw):
+ tw.hasmarkup = True
+ tw.line("x", bold=True)
+ tw.write("x\n", red=True)
+ l = tw.getlines()
+ if sys.platform != "win32":
+ assert len(l[0]) >= 2, l
+ assert len(l[1]) >= 2, l
+
+ def test_attr_fullwidth(self, tw):
+ tw.sep("-", "hello", fullwidth=70)
+ tw.fullwidth = 70
+ tw.sep("-", "hello")
+ l = tw.getlines()
+ assert len(l[0]) == len(l[1])
+
+ def test_reline(self, tw):
+ tw.line("hello")
+ tw.hasmarkup = False
+ pytest.raises(ValueError, lambda: tw.reline("x"))
+ tw.hasmarkup = True
+ tw.reline("0 1 2")
+ tw.getlines()
+ l = tw.getvalue().split("\n")
+ assert len(l) == 2
+ tw.reline("0 1 3")
+ l = tw.getvalue().split("\n")
+ assert len(l) == 2
+ assert l[1].endswith("0 1 3\r")
+ tw.line("so")
+ l = tw.getvalue().split("\n")
+ assert len(l) == 3
+ assert l[-1] == ""
+ assert l[1] == ("0 1 2\r0 1 3\rso ")
+ assert l[0] == "hello"
+
+
+def test_terminal_with_callable_write_and_flush():
+ l = set()
+ class fil:
+ flush = lambda self: l.add("1")
+ write = lambda self, x: l.add("1")
+ __call__ = lambda self, x: l.add("2")
+
+ tw = py.io.TerminalWriter(fil())
+ tw.line("hello")
+ assert l == set(["1"])
+ del fil.flush
+ l.clear()
+ tw = py.io.TerminalWriter(fil())
+ tw.line("hello")
+ assert l == set(["2"])
+
+
+def test_chars_on_current_line():
+ tw = py.io.TerminalWriter(stringio=True)
+
+ written = []
+
+ def write_and_check(s, expected):
+ tw.write(s, bold=True)
+ written.append(s)
+ assert tw.chars_on_current_line == expected
+ assert tw.stringio.getvalue() == ''.join(written)
+
+ write_and_check('foo', 3)
+ write_and_check('bar', 6)
+ write_and_check('\n', 0)
+ write_and_check('\n', 0)
+ write_and_check('\n\n\n', 0)
+ write_and_check('\nfoo', 3)
+ write_and_check('\nfbar\nhello', 5)
+ write_and_check('10', 7)
+
+
+@pytest.mark.skipif(sys.platform == "win32", reason="win32 has no native ansi")
+def test_attr_hasmarkup():
+ tw = py.io.TerminalWriter(stringio=True)
+ assert not tw.hasmarkup
+ tw.hasmarkup = True
+ tw.line("hello", bold=True)
+ s = tw.stringio.getvalue()
+ assert len(s) > len("hello\n")
+ assert '\x1b[1m' in s
+ assert '\x1b[0m' in s
+
+@pytest.mark.skipif(sys.platform == "win32", reason="win32 has no native ansi")
+def test_ansi_print():
+ # we have no easy way to construct a file that
+ # represents a terminal
+ f = py.io.TextIO()
+ f.isatty = lambda: True
+ py.io.ansi_print("hello", 0x32, file=f)
+ text2 = f.getvalue()
+ assert text2.find("hello") != -1
+ assert len(text2) >= len("hello\n")
+ assert '\x1b[50m' in text2
+ assert '\x1b[0m' in text2
+
+def test_should_do_markup_PY_COLORS_eq_1(monkeypatch):
+ monkeypatch.setitem(os.environ, 'PY_COLORS', '1')
+ tw = py.io.TerminalWriter(stringio=True)
+ assert tw.hasmarkup
+ tw.line("hello", bold=True)
+ s = tw.stringio.getvalue()
+ assert len(s) > len("hello\n")
+ assert '\x1b[1m' in s
+ assert '\x1b[0m' in s
+
+def test_should_do_markup_PY_COLORS_eq_0(monkeypatch):
+ monkeypatch.setitem(os.environ, 'PY_COLORS', '0')
+ f = py.io.TextIO()
+ f.isatty = lambda: True
+ tw = py.io.TerminalWriter(file=f)
+ assert not tw.hasmarkup
+ tw.line("hello", bold=True)
+ s = f.getvalue()
+ assert s == "hello\n"
diff --git a/third_party/python/py/testing/log/__init__.py b/third_party/python/py/testing/log/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/py/testing/log/__init__.py
diff --git a/third_party/python/py/testing/log/test_log.py b/third_party/python/py/testing/log/test_log.py
new file mode 100644
index 0000000000..5c706d9b6a
--- /dev/null
+++ b/third_party/python/py/testing/log/test_log.py
@@ -0,0 +1,191 @@
+import py
+
+from py._log.log import default_keywordmapper
+
+callcapture = py.io.StdCapture.call
+
+
+def setup_module(mod):
+ mod._oldstate = default_keywordmapper.getstate()
+
+def teardown_module(mod):
+ default_keywordmapper.setstate(mod._oldstate)
+
+class TestLogProducer:
+ def setup_method(self, meth):
+ from py._log.log import default_keywordmapper
+ default_keywordmapper.setstate(_oldstate)
+
+ def test_getstate_setstate(self):
+ state = py.log._getstate()
+ py.log.setconsumer("hello", [].append)
+ state2 = py.log._getstate()
+ assert state2 != state
+ py.log._setstate(state)
+ state3 = py.log._getstate()
+ assert state3 == state
+
+ def test_producer_repr(self):
+ d = py.log.Producer("default")
+ assert repr(d).find('default') != -1
+
+ def test_produce_one_keyword(self):
+ l = []
+ py.log.setconsumer('s1', l.append)
+ py.log.Producer('s1')("hello world")
+ assert len(l) == 1
+ msg = l[0]
+ assert msg.content().startswith('hello world')
+ assert msg.prefix() == '[s1] '
+ assert str(msg) == "[s1] hello world"
+
+ def test_producer_class(self):
+ p = py.log.Producer('x1')
+ l = []
+ py.log.setconsumer(p._keywords, l.append)
+ p("hello")
+ assert len(l) == 1
+ assert len(l[0].keywords) == 1
+ assert 'x1' == l[0].keywords[0]
+
+ def test_producer_caching(self):
+ p = py.log.Producer('x1')
+ x2 = p.x2
+ assert x2 is p.x2
+
+class TestLogConsumer:
+ def setup_method(self, meth):
+ default_keywordmapper.setstate(_oldstate)
+ def test_log_none(self):
+ log = py.log.Producer("XXX")
+ l = []
+ py.log.setconsumer('XXX', l.append)
+ log("1")
+ assert l
+ l[:] = []
+ py.log.setconsumer('XXX', None)
+ log("2")
+ assert not l
+
+ def test_log_default_stderr(self):
+ res, out, err = callcapture(py.log.Producer("default"), "hello")
+ assert err.strip() == "[default] hello"
+
+ def test_simple_consumer_match(self):
+ l = []
+ py.log.setconsumer("x1", l.append)
+ p = py.log.Producer("x1 x2")
+ p("hello")
+ assert l
+ assert l[0].content() == "hello"
+
+ def test_simple_consumer_match_2(self):
+ l = []
+ p = py.log.Producer("x1 x2")
+ py.log.setconsumer(p._keywords, l.append)
+ p("42")
+ assert l
+ assert l[0].content() == "42"
+
+ def test_no_auto_producer(self):
+ p = py.log.Producer('x')
+ py.test.raises(AttributeError, "p._x")
+ py.test.raises(AttributeError, "p.x_y")
+
+ def test_setconsumer_with_producer(self):
+ l = []
+ p = py.log.Producer("hello")
+ py.log.setconsumer(p, l.append)
+ p("world")
+ assert str(l[0]) == "[hello] world"
+
+ def test_multi_consumer(self):
+ l = []
+ py.log.setconsumer("x1", l.append)
+ py.log.setconsumer("x1 x2", None)
+ p = py.log.Producer("x1 x2")
+ p("hello")
+ assert not l
+ py.log.Producer("x1")("hello")
+ assert l
+ assert l[0].content() == "hello"
+
+ def test_log_stderr(self):
+ py.log.setconsumer("xyz", py.log.STDOUT)
+ res, out, err = callcapture(py.log.Producer("xyz"), "hello")
+ assert not err
+ assert out.strip() == '[xyz] hello'
+
+ def test_log_file(self, tmpdir):
+ customlog = tmpdir.join('log.out')
+ py.log.setconsumer("default", open(str(customlog), 'w', 1))
+ py.log.Producer("default")("hello world #1")
+ assert customlog.readlines() == ['[default] hello world #1\n']
+
+ py.log.setconsumer("default", py.log.Path(customlog, buffering=False))
+ py.log.Producer("default")("hello world #2")
+ res = customlog.readlines()
+ assert res == ['[default] hello world #2\n'] # no append by default!
+
+ def test_log_file_append_mode(self, tmpdir):
+ logfilefn = tmpdir.join('log_append.out')
+
+ # The append mode is on by default, so we don't need to specify it for File
+ py.log.setconsumer("default", py.log.Path(logfilefn, append=True,
+ buffering=0))
+ assert logfilefn.check()
+ py.log.Producer("default")("hello world #1")
+ lines = logfilefn.readlines()
+ assert lines == ['[default] hello world #1\n']
+ py.log.setconsumer("default", py.log.Path(logfilefn, append=True,
+ buffering=0))
+ py.log.Producer("default")("hello world #1")
+ lines = logfilefn.readlines()
+ assert lines == ['[default] hello world #1\n',
+ '[default] hello world #1\n']
+
+ def test_log_file_delayed_create(self, tmpdir):
+ logfilefn = tmpdir.join('log_create.out')
+
+ py.log.setconsumer("default", py.log.Path(logfilefn,
+ delayed_create=True, buffering=0))
+ assert not logfilefn.check()
+ py.log.Producer("default")("hello world #1")
+ lines = logfilefn.readlines()
+ assert lines == ['[default] hello world #1\n']
+
+ def test_keyword_based_log_files(self, tmpdir):
+ logfiles = []
+ keywords = 'k1 k2 k3'.split()
+ for key in keywords:
+ path = tmpdir.join(key)
+ py.log.setconsumer(key, py.log.Path(path, buffering=0))
+
+ py.log.Producer('k1')('1')
+ py.log.Producer('k2')('2')
+ py.log.Producer('k3')('3')
+
+ for key in keywords:
+ path = tmpdir.join(key)
+ assert path.read().strip() == '[%s] %s' % (key, key[-1])
+
+ # disabled for now; the syslog log file can usually be read only by root
+ # I manually inspected /var/log/messages and the entries were there
+ def no_test_log_syslog(self):
+ py.log.setconsumer("default", py.log.Syslog())
+ py.log.default("hello world #1")
+
+ # disabled for now until I figure out how to read entries in the
+ # Event Logs on Windows
+ # I manually inspected the Application Log and the entries were there
+ def no_test_log_winevent(self):
+ py.log.setconsumer("default", py.log.WinEvent())
+ py.log.default("hello world #1")
+
+ # disabled for now until I figure out how to properly pass the parameters
+ def no_test_log_email(self):
+ py.log.setconsumer("default", py.log.Email(mailhost="gheorghiu.net",
+ fromaddr="grig",
+ toaddrs="grig",
+ subject = "py.log email"))
+ py.log.default("hello world #1")
diff --git a/third_party/python/py/testing/log/test_warning.py b/third_party/python/py/testing/log/test_warning.py
new file mode 100644
index 0000000000..a460c319e8
--- /dev/null
+++ b/third_party/python/py/testing/log/test_warning.py
@@ -0,0 +1,86 @@
+import sys
+from distutils.version import LooseVersion
+
+import pytest
+
+import py
+
+mypath = py.path.local(__file__).new(ext=".py")
+
+
+win = sys.platform.startswith('win')
+pytestmark = pytest.mark.skipif(win and LooseVersion(pytest.__version__) >= LooseVersion('3.1'),
+ reason='apiwarn is not compatible with pytest >= 3.1 (#162)')
+
+
+@pytest.mark.xfail
+def test_forwarding_to_warnings_module():
+ pytest.deprecated_call(py.log._apiwarn, "1.3", "..")
+
+def test_apiwarn_functional(recwarn):
+ capture = py.io.StdCapture()
+ py.log._apiwarn("x.y.z", "something", stacklevel=1)
+ out, err = capture.reset()
+ py.builtin.print_("out", out)
+ py.builtin.print_("err", err)
+ assert err.find("x.y.z") != -1
+ lno = py.code.getrawcode(test_apiwarn_functional).co_firstlineno + 2
+ exp = "%s:%s" % (mypath, lno)
+ assert err.find(exp) != -1
+
+def test_stacklevel(recwarn):
+ def f():
+ py.log._apiwarn("x", "some", stacklevel=2)
+ # 3
+ # 4
+ capture = py.io.StdCapture()
+ f()
+ out, err = capture.reset()
+ lno = py.code.getrawcode(test_stacklevel).co_firstlineno + 6
+ warning = str(err)
+ assert warning.find(":%s" % lno) != -1
+
+def test_stacklevel_initpkg_with_resolve(testdir, recwarn):
+ testdir.makepyfile(modabc="""
+ import py
+ def f():
+ py.log._apiwarn("x", "some", stacklevel="apipkg123")
+ """)
+ testdir.makepyfile(apipkg123="""
+ def __getattr__():
+ import modabc
+ modabc.f()
+ """)
+ p = testdir.makepyfile("""
+ import apipkg123
+ apipkg123.__getattr__()
+ """)
+ capture = py.io.StdCapture()
+ p.pyimport()
+ out, err = capture.reset()
+ warning = str(err)
+ loc = 'test_stacklevel_initpkg_with_resolve.py:2'
+ assert warning.find(loc) != -1
+
+def test_stacklevel_initpkg_no_resolve(recwarn):
+ def f():
+ py.log._apiwarn("x", "some", stacklevel="apipkg")
+ capture = py.io.StdCapture()
+ f()
+ out, err = capture.reset()
+ lno = py.code.getrawcode(test_stacklevel_initpkg_no_resolve).co_firstlineno + 2
+ warning = str(err)
+ assert warning.find(":%s" % lno) != -1
+
+
+def test_function(recwarn):
+ capture = py.io.StdCapture()
+ py.log._apiwarn("x.y.z", "something", function=test_function)
+ out, err = capture.reset()
+ py.builtin.print_("out", out)
+ py.builtin.print_("err", err)
+ assert err.find("x.y.z") != -1
+ lno = py.code.getrawcode(test_function).co_firstlineno
+ exp = "%s:%s" % (mypath, lno)
+ assert err.find(exp) != -1
+
diff --git a/third_party/python/py/testing/path/common.py b/third_party/python/py/testing/path/common.py
new file mode 100644
index 0000000000..d69a1c39d0
--- /dev/null
+++ b/third_party/python/py/testing/path/common.py
@@ -0,0 +1,492 @@
+import py
+import sys
+
+import pytest
+
+class CommonFSTests(object):
+ def test_constructor_equality(self, path1):
+ p = path1.__class__(path1)
+ assert p == path1
+
+ def test_eq_nonstring(self, path1):
+ p1 = path1.join('sampledir')
+ p2 = path1.join('sampledir')
+ assert p1 == p2
+
+ def test_new_identical(self, path1):
+ assert path1 == path1.new()
+
+ def test_join(self, path1):
+ p = path1.join('sampledir')
+ strp = str(p)
+ assert strp.endswith('sampledir')
+ assert strp.startswith(str(path1))
+
+ def test_join_normalized(self, path1):
+ newpath = path1.join(path1.sep+'sampledir')
+ strp = str(newpath)
+ assert strp.endswith('sampledir')
+ assert strp.startswith(str(path1))
+ newpath = path1.join((path1.sep*2) + 'sampledir')
+ strp = str(newpath)
+ assert strp.endswith('sampledir')
+ assert strp.startswith(str(path1))
+
+ def test_join_noargs(self, path1):
+ newpath = path1.join()
+ assert path1 == newpath
+
+ def test_add_something(self, path1):
+ p = path1.join('sample')
+ p = p + 'dir'
+ assert p.check()
+ assert p.exists()
+ assert p.isdir()
+ assert not p.isfile()
+
+ def test_parts(self, path1):
+ newpath = path1.join('sampledir', 'otherfile')
+ par = newpath.parts()[-3:]
+ assert par == [path1, path1.join('sampledir'), newpath]
+
+ revpar = newpath.parts(reverse=True)[:3]
+ assert revpar == [newpath, path1.join('sampledir'), path1]
+
+ def test_common(self, path1):
+ other = path1.join('sampledir')
+ x = other.common(path1)
+ assert x == path1
+
+ #def test_parents_nonexisting_file(self, path1):
+ # newpath = path1 / 'dirnoexist' / 'nonexisting file'
+ # par = list(newpath.parents())
+ # assert par[:2] == [path1 / 'dirnoexist', path1]
+
+ def test_basename_checks(self, path1):
+ newpath = path1.join('sampledir')
+ assert newpath.check(basename='sampledir')
+ assert newpath.check(notbasename='xyz')
+ assert newpath.basename == 'sampledir'
+
+ def test_basename(self, path1):
+ newpath = path1.join('sampledir')
+ assert newpath.check(basename='sampledir')
+ assert newpath.basename, 'sampledir'
+
+ def test_dirname(self, path1):
+ newpath = path1.join('sampledir')
+ assert newpath.dirname == str(path1)
+
+ def test_dirpath(self, path1):
+ newpath = path1.join('sampledir')
+ assert newpath.dirpath() == path1
+
+ def test_dirpath_with_args(self, path1):
+ newpath = path1.join('sampledir')
+ assert newpath.dirpath('x') == path1.join('x')
+
+ def test_newbasename(self, path1):
+ newpath = path1.join('samplefile')
+ newbase = newpath.new(basename="samplefile2")
+ assert newbase.basename == "samplefile2"
+ assert newbase.dirpath() == newpath.dirpath()
+
+ def test_not_exists(self, path1):
+ assert not path1.join('does_not_exist').check()
+ assert path1.join('does_not_exist').check(exists=0)
+
+ def test_exists(self, path1):
+ assert path1.join("samplefile").check()
+ assert path1.join("samplefile").check(exists=1)
+ assert path1.join("samplefile").exists()
+ assert path1.join("samplefile").isfile()
+ assert not path1.join("samplefile").isdir()
+
+ def test_dir(self, path1):
+ #print repr(path1.join("sampledir"))
+ assert path1.join("sampledir").check(dir=1)
+ assert path1.join('samplefile').check(notdir=1)
+ assert not path1.join("samplefile").check(dir=1)
+ assert path1.join("samplefile").exists()
+ assert not path1.join("samplefile").isdir()
+ assert path1.join("samplefile").isfile()
+
+ def test_fnmatch_file(self, path1):
+ assert path1.join("samplefile").check(fnmatch='s*e')
+ assert path1.join("samplefile").fnmatch('s*e')
+ assert not path1.join("samplefile").fnmatch('s*x')
+ assert not path1.join("samplefile").check(fnmatch='s*x')
+
+ #def test_fnmatch_dir(self, path1):
+
+ # pattern = path1.sep.join(['s*file'])
+ # sfile = path1.join("samplefile")
+ # assert sfile.check(fnmatch=pattern)
+
+ def test_relto(self, path1):
+ l=path1.join("sampledir", "otherfile")
+ assert l.relto(path1) == l.sep.join(["sampledir", "otherfile"])
+ assert l.check(relto=path1)
+ assert path1.check(notrelto=l)
+ assert not path1.check(relto=l)
+
+ def test_bestrelpath(self, path1):
+ curdir = path1
+ sep = curdir.sep
+ s = curdir.bestrelpath(curdir)
+ assert s == "."
+ s = curdir.bestrelpath(curdir.join("hello", "world"))
+ assert s == "hello" + sep + "world"
+
+ s = curdir.bestrelpath(curdir.dirpath().join("sister"))
+ assert s == ".." + sep + "sister"
+ assert curdir.bestrelpath(curdir.dirpath()) == ".."
+
+ assert curdir.bestrelpath("hello") == "hello"
+
+ def test_relto_not_relative(self, path1):
+ l1=path1.join("bcde")
+ l2=path1.join("b")
+ assert not l1.relto(l2)
+ assert not l2.relto(l1)
+
+ @py.test.mark.xfail("sys.platform.startswith('java')")
+ def test_listdir(self, path1):
+ l = path1.listdir()
+ assert path1.join('sampledir') in l
+ assert path1.join('samplefile') in l
+ py.test.raises(py.error.ENOTDIR,
+ "path1.join('samplefile').listdir()")
+
+ def test_listdir_fnmatchstring(self, path1):
+ l = path1.listdir('s*dir')
+ assert len(l)
+ assert l[0], path1.join('sampledir')
+
+ def test_listdir_filter(self, path1):
+ l = path1.listdir(lambda x: x.check(dir=1))
+ assert path1.join('sampledir') in l
+ assert not path1.join('samplefile') in l
+
+ def test_listdir_sorted(self, path1):
+ l = path1.listdir(lambda x: x.check(basestarts="sample"), sort=True)
+ assert path1.join('sampledir') == l[0]
+ assert path1.join('samplefile') == l[1]
+ assert path1.join('samplepickle') == l[2]
+
+ def test_visit_nofilter(self, path1):
+ l = []
+ for i in path1.visit():
+ l.append(i.relto(path1))
+ assert "sampledir" in l
+ assert path1.sep.join(["sampledir", "otherfile"]) in l
+
+ def test_visit_norecurse(self, path1):
+ l = []
+ for i in path1.visit(None, lambda x: x.basename != "sampledir"):
+ l.append(i.relto(path1))
+ assert "sampledir" in l
+ assert not path1.sep.join(["sampledir", "otherfile"]) in l
+
+ @pytest.mark.parametrize('fil', ['*dir', u'*dir',
+ pytest.mark.skip("sys.version_info <"
+ " (3,6)")(b'*dir')])
+ def test_visit_filterfunc_is_string(self, path1, fil):
+ l = []
+ for i in path1.visit(fil):
+ l.append(i.relto(path1))
+ assert len(l), 2
+ assert "sampledir" in l
+ assert "otherdir" in l
+
+ @py.test.mark.xfail("sys.platform.startswith('java')")
+ def test_visit_ignore(self, path1):
+ p = path1.join('nonexisting')
+ assert list(p.visit(ignore=py.error.ENOENT)) == []
+
+ def test_visit_endswith(self, path1):
+ l = []
+ for i in path1.visit(lambda x: x.check(endswith="file")):
+ l.append(i.relto(path1))
+ assert path1.sep.join(["sampledir", "otherfile"]) in l
+ assert "samplefile" in l
+
+ def test_endswith(self, path1):
+ assert path1.check(notendswith='.py')
+ x = path1.join('samplefile')
+ assert x.check(endswith='file')
+
+ def test_cmp(self, path1):
+ path1 = path1.join('samplefile')
+ path2 = path1.join('samplefile2')
+ assert (path1 < path2) == ('samplefile' < 'samplefile2')
+ assert not (path1 < path1)
+
+ def test_simple_read(self, path1):
+ x = path1.join('samplefile').read('r')
+ assert x == 'samplefile\n'
+
+ def test_join_div_operator(self, path1):
+ newpath = path1 / '/sampledir' / '/test//'
+ newpath2 = path1.join('sampledir', 'test')
+ assert newpath == newpath2
+
+ def test_ext(self, path1):
+ newpath = path1.join('sampledir.ext')
+ assert newpath.ext == '.ext'
+ newpath = path1.join('sampledir')
+ assert not newpath.ext
+
+ def test_purebasename(self, path1):
+ newpath = path1.join('samplefile.py')
+ assert newpath.purebasename == 'samplefile'
+
+ def test_multiple_parts(self, path1):
+ newpath = path1.join('samplefile.py')
+ dirname, purebasename, basename, ext = newpath._getbyspec(
+ 'dirname,purebasename,basename,ext')
+ assert str(path1).endswith(dirname) # be careful with win32 'drive'
+ assert purebasename == 'samplefile'
+ assert basename == 'samplefile.py'
+ assert ext == '.py'
+
+ def test_dotted_name_ext(self, path1):
+ newpath = path1.join('a.b.c')
+ ext = newpath.ext
+ assert ext == '.c'
+ assert newpath.ext == '.c'
+
+ def test_newext(self, path1):
+ newpath = path1.join('samplefile.py')
+ newext = newpath.new(ext='.txt')
+ assert newext.basename == "samplefile.txt"
+ assert newext.purebasename == "samplefile"
+
+ def test_readlines(self, path1):
+ fn = path1.join('samplefile')
+ contents = fn.readlines()
+ assert contents == ['samplefile\n']
+
+ def test_readlines_nocr(self, path1):
+ fn = path1.join('samplefile')
+ contents = fn.readlines(cr=0)
+ assert contents == ['samplefile', '']
+
+ def test_file(self, path1):
+ assert path1.join('samplefile').check(file=1)
+
+ def test_not_file(self, path1):
+ assert not path1.join("sampledir").check(file=1)
+ assert path1.join("sampledir").check(file=0)
+
+ def test_non_existent(self, path1):
+ assert path1.join("sampledir.nothere").check(dir=0)
+ assert path1.join("sampledir.nothere").check(file=0)
+ assert path1.join("sampledir.nothere").check(notfile=1)
+ assert path1.join("sampledir.nothere").check(notdir=1)
+ assert path1.join("sampledir.nothere").check(notexists=1)
+ assert not path1.join("sampledir.nothere").check(notfile=0)
+
+ # pattern = path1.sep.join(['s*file'])
+ # sfile = path1.join("samplefile")
+ # assert sfile.check(fnmatch=pattern)
+
+ def test_size(self, path1):
+ url = path1.join("samplefile")
+ assert url.size() > len("samplefile")
+
+ def test_mtime(self, path1):
+ url = path1.join("samplefile")
+ assert url.mtime() > 0
+
+ def test_relto_wrong_type(self, path1):
+ py.test.raises(TypeError, "path1.relto(42)")
+
+ def test_load(self, path1):
+ p = path1.join('samplepickle')
+ obj = p.load()
+ assert type(obj) is dict
+ assert obj.get('answer',None) == 42
+
+ def test_visit_filesonly(self, path1):
+ l = []
+ for i in path1.visit(lambda x: x.check(file=1)):
+ l.append(i.relto(path1))
+ assert not "sampledir" in l
+ assert path1.sep.join(["sampledir", "otherfile"]) in l
+
+ def test_visit_nodotfiles(self, path1):
+ l = []
+ for i in path1.visit(lambda x: x.check(dotfile=0)):
+ l.append(i.relto(path1))
+ assert "sampledir" in l
+ assert path1.sep.join(["sampledir", "otherfile"]) in l
+ assert not ".dotfile" in l
+
+ def test_visit_breadthfirst(self, path1):
+ l = []
+ for i in path1.visit(bf=True):
+ l.append(i.relto(path1))
+ for i, p in enumerate(l):
+ if path1.sep in p:
+ for j in range(i, len(l)):
+ assert path1.sep in l[j]
+ break
+ else:
+ py.test.fail("huh")
+
+ def test_visit_sort(self, path1):
+ l = []
+ for i in path1.visit(bf=True, sort=True):
+ l.append(i.relto(path1))
+ for i, p in enumerate(l):
+ if path1.sep in p:
+ break
+ assert l[:i] == sorted(l[:i])
+ assert l[i:] == sorted(l[i:])
+
+ def test_endswith(self, path1):
+ def chk(p):
+ return p.check(endswith="pickle")
+ assert not chk(path1)
+ assert not chk(path1.join('samplefile'))
+ assert chk(path1.join('somepickle'))
+
+ def test_copy_file(self, path1):
+ otherdir = path1.join('otherdir')
+ initpy = otherdir.join('__init__.py')
+ copied = otherdir.join('copied')
+ initpy.copy(copied)
+ try:
+ assert copied.check()
+ s1 = initpy.read()
+ s2 = copied.read()
+ assert s1 == s2
+ finally:
+ if copied.check():
+ copied.remove()
+
+ def test_copy_dir(self, path1):
+ otherdir = path1.join('otherdir')
+ copied = path1.join('newdir')
+ try:
+ otherdir.copy(copied)
+ assert copied.check(dir=1)
+ assert copied.join('__init__.py').check(file=1)
+ s1 = otherdir.join('__init__.py').read()
+ s2 = copied.join('__init__.py').read()
+ assert s1 == s2
+ finally:
+ if copied.check(dir=1):
+ copied.remove(rec=1)
+
+ def test_remove_file(self, path1):
+ d = path1.ensure('todeleted')
+ assert d.check()
+ d.remove()
+ assert not d.check()
+
+ def test_remove_dir_recursive_by_default(self, path1):
+ d = path1.ensure('to', 'be', 'deleted')
+ assert d.check()
+ p = path1.join('to')
+ p.remove()
+ assert not p.check()
+
+ def test_ensure_dir(self, path1):
+ b = path1.ensure_dir("001", "002")
+ assert b.basename == "002"
+ assert b.isdir()
+
+ def test_mkdir_and_remove(self, path1):
+ tmpdir = path1
+ py.test.raises(py.error.EEXIST, tmpdir.mkdir, 'sampledir')
+ new = tmpdir.join('mktest1')
+ new.mkdir()
+ assert new.check(dir=1)
+ new.remove()
+
+ new = tmpdir.mkdir('mktest')
+ assert new.check(dir=1)
+ new.remove()
+ assert tmpdir.join('mktest') == new
+
+ def test_move_file(self, path1):
+ p = path1.join('samplefile')
+ newp = p.dirpath('moved_samplefile')
+ p.move(newp)
+ try:
+ assert newp.check(file=1)
+ assert not p.check()
+ finally:
+ dp = newp.dirpath()
+ if hasattr(dp, 'revert'):
+ dp.revert()
+ else:
+ newp.move(p)
+ assert p.check()
+
+ def test_move_dir(self, path1):
+ source = path1.join('sampledir')
+ dest = path1.join('moveddir')
+ source.move(dest)
+ assert dest.check(dir=1)
+ assert dest.join('otherfile').check(file=1)
+ assert not source.join('sampledir').check()
+
+ def test_fspath_protocol_match_strpath(self, path1):
+ assert path1.__fspath__() == path1.strpath
+
+ def test_fspath_func_match_strpath(self, path1):
+ try:
+ from os import fspath
+ except ImportError:
+ from py._path.common import fspath
+ assert fspath(path1) == path1.strpath
+
+ @py.test.mark.skip("sys.version_info < (3,6)")
+ def test_fspath_open(self, path1):
+ f = path1.join('opentestfile')
+ open(f)
+
+ @py.test.mark.skip("sys.version_info < (3,6)")
+ def test_fspath_fsencode(self, path1):
+ from os import fsencode
+ assert fsencode(path1) == fsencode(path1.strpath)
+
+def setuptestfs(path):
+ if path.join('samplefile').check():
+ return
+ #print "setting up test fs for", repr(path)
+ samplefile = path.ensure('samplefile')
+ samplefile.write('samplefile\n')
+
+ execfile = path.ensure('execfile')
+ execfile.write('x=42')
+
+ execfilepy = path.ensure('execfile.py')
+ execfilepy.write('x=42')
+
+ d = {1:2, 'hello': 'world', 'answer': 42}
+ path.ensure('samplepickle').dump(d)
+
+ sampledir = path.ensure('sampledir', dir=1)
+ sampledir.ensure('otherfile')
+
+ otherdir = path.ensure('otherdir', dir=1)
+ otherdir.ensure('__init__.py')
+
+ module_a = otherdir.ensure('a.py')
+ module_a.write('from .b import stuff as result\n')
+ module_b = otherdir.ensure('b.py')
+ module_b.write('stuff="got it"\n')
+ module_c = otherdir.ensure('c.py')
+ module_c.write('''import py;
+import otherdir.a
+value = otherdir.a.result
+''')
+ module_d = otherdir.ensure('d.py')
+ module_d.write('''import py;
+from otherdir import a
+value2 = a.result
+''')
diff --git a/third_party/python/py/testing/path/conftest.py b/third_party/python/py/testing/path/conftest.py
new file mode 100644
index 0000000000..84fb5c8269
--- /dev/null
+++ b/third_party/python/py/testing/path/conftest.py
@@ -0,0 +1,80 @@
+import py
+import sys
+from py._path import svnwc as svncommon
+
+svnbin = py.path.local.sysfind('svn')
+repodump = py.path.local(__file__).dirpath('repotest.dump')
+from py.builtin import print_
+
+def pytest_funcarg__repowc1(request):
+ if svnbin is None:
+ py.test.skip("svn binary not found")
+
+ tmpdir = request.getfuncargvalue("tmpdir")
+ repo, repourl, wc = request.cached_setup(
+ setup=lambda: getrepowc(tmpdir, "path1repo", "path1wc"),
+ scope="module",
+ )
+ for x in ('test_remove', 'test_move', 'test_status_deleted'):
+ if request.function.__name__.startswith(x):
+ #print >>sys.stderr, ("saving repo", repo, "for", request.function)
+ _savedrepowc = save_repowc(repo, wc)
+ request.addfinalizer(lambda: restore_repowc(_savedrepowc))
+ return repo, repourl, wc
+
+def pytest_funcarg__repowc2(request):
+ tmpdir = request.getfuncargvalue("tmpdir")
+ name = request.function.__name__
+ repo, url, wc = getrepowc(tmpdir, "%s-repo-2" % name, "%s-wc-2" % name)
+ return repo, url, wc
+
+def getsvnbin():
+ if svnbin is None:
+ py.test.skip("svn binary not found")
+ return svnbin
+
+# make a wc directory out of a given root url
+# cache previously obtained wcs!
+#
+def getrepowc(tmpdir, reponame='basetestrepo', wcname='wc'):
+ repo = tmpdir.mkdir(reponame)
+ wcdir = tmpdir.mkdir(wcname)
+ repo.ensure(dir=1)
+ py.process.cmdexec('svnadmin create "%s"' %
+ svncommon._escape_helper(repo))
+ py.process.cmdexec('svnadmin load -q "%s" <"%s"' %
+ (svncommon._escape_helper(repo), repodump))
+ print_("created svn repository", repo)
+ wcdir.ensure(dir=1)
+ wc = py.path.svnwc(wcdir)
+ if sys.platform == 'win32':
+ repourl = "file://" + '/' + str(repo).replace('\\', '/')
+ else:
+ repourl = "file://%s" % repo
+ wc.checkout(repourl)
+ print_("checked out new repo into", wc)
+ return (repo, repourl, wc)
+
+
+def save_repowc(repo, wc):
+ assert not str(repo).startswith("file://"), repo
+ assert repo.check()
+ savedrepo = repo.dirpath(repo.basename+".1")
+ savedwc = wc.dirpath(wc.basename+".1")
+ repo.copy(savedrepo)
+ wc.localpath.copy(savedwc.localpath)
+ return savedrepo, savedwc
+
+def restore_repowc(obj):
+ savedrepo, savedwc = obj
+ #print >>sys.stderr, ("restoring", savedrepo)
+ repo = savedrepo.new(basename=savedrepo.basename[:-2])
+ assert repo.check()
+ wc = savedwc.new(basename=savedwc.basename[:-2])
+ assert wc.check()
+ wc.localpath.remove()
+ repo.remove()
+ savedrepo.move(repo)
+ savedwc.localpath.move(wc.localpath)
+ py.path.svnurl._lsnorevcache.clear()
+ py.path.svnurl._lsrevcache.clear()
diff --git a/third_party/python/py/testing/path/repotest.dump b/third_party/python/py/testing/path/repotest.dump
new file mode 100644
index 0000000000..c7819cad7a
--- /dev/null
+++ b/third_party/python/py/testing/path/repotest.dump
@@ -0,0 +1,228 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 876a30f4-1eed-0310-aeb7-ae314d1e5934
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-01-07T23:55:31.755989Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 118
+Content-length: 118
+
+K 7
+svn:log
+V 20
+testrepo setup rev 1
+K 10
+svn:author
+V 3
+hpk
+K 8
+svn:date
+V 27
+2005-01-07T23:55:37.815386Z
+PROPS-END
+
+Node-path: execfile
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 4
+Text-content-md5: d4b5bc61e16310f08c5d11866eba0a22
+Content-length: 14
+
+PROPS-END
+x=42
+
+Node-path: otherdir
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: otherdir/__init__.py
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: otherdir/a.py
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 247c7daeb2ee5dcab0aba7bd12bad665
+Content-length: 40
+
+PROPS-END
+from b import stuff as result
+
+
+Node-path: otherdir/b.py
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 15
+Text-content-md5: c1b13503469a7711306d03a4b0721bc6
+Content-length: 25
+
+PROPS-END
+stuff="got it"
+
+
+Node-path: otherdir/c.py
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 75
+Text-content-md5: 250cdb6b5df68536152c681f48297569
+Content-length: 85
+
+PROPS-END
+import py; py.magic.autopath()
+import otherdir.a
+value = otherdir.a.result
+
+
+Node-path: otherdir/d.py
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 72
+Text-content-md5: 940c9c621e7b198e081459642c37f5a7
+Content-length: 82
+
+PROPS-END
+import py; py.magic.autopath()
+from otherdir import a
+value2 = a.result
+
+
+Node-path: sampledir
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: sampledir/otherfile
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: samplefile
+Node-kind: file
+Node-action: add
+Prop-content-length: 40
+Text-content-length: 11
+Text-content-md5: 9225ac28b32156979ab6482b8bb5fb8c
+Content-length: 51
+
+K 13
+svn:eol-style
+V 6
+native
+PROPS-END
+samplefile
+
+
+Node-path: samplepickle
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 56
+Text-content-md5: 719d85c1329a33134bb98f56b756c545
+Content-length: 66
+
+PROPS-END
+(dp1
+S'answer'
+p2
+I42
+sI1
+I2
+sS'hello'
+p3
+S'world'
+p4
+s.
+
+Revision-number: 2
+Prop-content-length: 108
+Content-length: 108
+
+K 7
+svn:log
+V 10
+second rev
+K 10
+svn:author
+V 3
+hpk
+K 8
+svn:date
+V 27
+2005-01-07T23:55:39.223202Z
+PROPS-END
+
+Node-path: anotherfile
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 5
+Text-content-md5: 5d41402abc4b2a76b9719d911017c592
+Content-length: 15
+
+PROPS-END
+hello
+
+Revision-number: 3
+Prop-content-length: 106
+Content-length: 106
+
+K 7
+svn:log
+V 9
+third rev
+K 10
+svn:author
+V 3
+hpk
+K 8
+svn:date
+V 27
+2005-01-07T23:55:41.556642Z
+PROPS-END
+
+Node-path: anotherfile
+Node-kind: file
+Node-action: change
+Text-content-length: 5
+Text-content-md5: 7d793037a0760186574b0282f2f435e7
+Content-length: 5
+
+world
+
diff --git a/third_party/python/py/testing/path/svntestbase.py b/third_party/python/py/testing/path/svntestbase.py
new file mode 100644
index 0000000000..8d94a9ca64
--- /dev/null
+++ b/third_party/python/py/testing/path/svntestbase.py
@@ -0,0 +1,31 @@
+import sys
+import py
+from py._path import svnwc as svncommon
+from common import CommonFSTests
+
+class CommonSvnTests(CommonFSTests):
+
+ def test_propget(self, path1):
+ url = path1.join("samplefile")
+ value = url.propget('svn:eol-style')
+ assert value == 'native'
+
+ def test_proplist(self, path1):
+ url = path1.join("samplefile")
+ res = url.proplist()
+ assert res['svn:eol-style'] == 'native'
+
+ def test_info(self, path1):
+ url = path1.join("samplefile")
+ res = url.info()
+ assert res.size > len("samplefile") and res.created_rev >= 0
+
+ def test_log_simple(self, path1):
+ url = path1.join("samplefile")
+ logentries = url.log()
+ for logentry in logentries:
+ assert logentry.rev == 1
+ assert hasattr(logentry, 'author')
+ assert hasattr(logentry, 'date')
+
+#cache.repositories.put(svnrepourl, 1200, 0)
diff --git a/third_party/python/py/testing/path/test_cacheutil.py b/third_party/python/py/testing/path/test_cacheutil.py
new file mode 100644
index 0000000000..c9fc07463a
--- /dev/null
+++ b/third_party/python/py/testing/path/test_cacheutil.py
@@ -0,0 +1,89 @@
+import pytest
+from py._path import cacheutil
+
+import time
+
+class BasicCacheAPITest:
+ cache = None
+ def test_getorbuild(self):
+ val = self.cache.getorbuild(-42, lambda: 42)
+ assert val == 42
+ val = self.cache.getorbuild(-42, lambda: 23)
+ assert val == 42
+
+ def test_cache_get_key_error(self):
+ pytest.raises(KeyError, "self.cache._getentry(-23)")
+
+ def test_delentry_non_raising(self):
+ self.cache.getorbuild(100, lambda: 100)
+ self.cache.delentry(100)
+ pytest.raises(KeyError, "self.cache._getentry(100)")
+
+ def test_delentry_raising(self):
+ self.cache.getorbuild(100, lambda: 100)
+ self.cache.delentry(100)
+ pytest.raises(KeyError, self.cache.delentry, 100, raising=True)
+
+ def test_clear(self):
+ self.cache.clear()
+
+
+class TestBuildcostAccess(BasicCacheAPITest):
+ cache = cacheutil.BuildcostAccessCache(maxentries=128)
+
+ def test_cache_works_somewhat_simple(self, monkeypatch):
+ cache = cacheutil.BuildcostAccessCache()
+ # the default gettime
+ # BuildcostAccessCache.build can
+ # result into time()-time() == 0 which makes the below
+ # test fail randomly. Let's rather use incrementing
+ # numbers instead.
+ l = [0]
+
+ def counter():
+ l[0] = l[0] + 1
+ return l[0]
+ monkeypatch.setattr(cacheutil, 'gettime', counter)
+ for x in range(cache.maxentries):
+ y = cache.getorbuild(x, lambda: x)
+ assert x == y
+ for x in range(cache.maxentries):
+ assert cache.getorbuild(x, None) == x
+ halfentries = int(cache.maxentries / 2)
+ for x in range(halfentries):
+ assert cache.getorbuild(x, None) == x
+ assert cache.getorbuild(x, None) == x
+ # evict one entry
+ val = cache.getorbuild(-1, lambda: 42)
+ assert val == 42
+ # check that recently used ones are still there
+ # and are not build again
+ for x in range(halfentries):
+ assert cache.getorbuild(x, None) == x
+ assert cache.getorbuild(-1, None) == 42
+
+
+class TestAging(BasicCacheAPITest):
+ maxsecs = 0.10
+ cache = cacheutil.AgingCache(maxentries=128, maxseconds=maxsecs)
+
+ def test_cache_eviction(self):
+ self.cache.getorbuild(17, lambda: 17)
+ endtime = time.time() + self.maxsecs * 10
+ while time.time() < endtime:
+ try:
+ self.cache._getentry(17)
+ except KeyError:
+ break
+ time.sleep(self.maxsecs*0.3)
+ else:
+ pytest.fail("waiting for cache eviction failed")
+
+
+def test_prune_lowestweight():
+ maxsecs = 0.05
+ cache = cacheutil.AgingCache(maxentries=10, maxseconds=maxsecs)
+ for x in range(cache.maxentries):
+ cache.getorbuild(x, lambda: x)
+ time.sleep(maxsecs*1.1)
+ cache.getorbuild(cache.maxentries+1, lambda: 42)
diff --git a/third_party/python/py/testing/path/test_local.py b/third_party/python/py/testing/path/test_local.py
new file mode 100644
index 0000000000..ee4b9bde9c
--- /dev/null
+++ b/third_party/python/py/testing/path/test_local.py
@@ -0,0 +1,976 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+import time
+import py
+import pytest
+import os
+import sys
+import multiprocessing
+from py.path import local
+import common
+
+failsonjython = py.test.mark.xfail("sys.platform.startswith('java')")
+failsonjywin32 = py.test.mark.xfail(
+ "sys.platform.startswith('java') "
+ "and getattr(os, '_name', None) == 'nt'")
+win32only = py.test.mark.skipif(
+ "not (sys.platform == 'win32' or getattr(os, '_name', None) == 'nt')")
+skiponwin32 = py.test.mark.skipif(
+ "sys.platform == 'win32' or getattr(os, '_name', None) == 'nt'")
+
+ATIME_RESOLUTION = 0.01
+
+
+@pytest.yield_fixture(scope="session")
+def path1(tmpdir_factory):
+ path = tmpdir_factory.mktemp('path')
+ common.setuptestfs(path)
+ yield path
+ assert path.join("samplefile").check()
+
+
+@pytest.fixture
+def fake_fspath_obj(request):
+ class FakeFSPathClass(object):
+ def __init__(self, path):
+ self._path = path
+
+ def __fspath__(self):
+ return self._path
+
+ return FakeFSPathClass(os.path.join("this", "is", "a", "fake", "path"))
+
+
+def batch_make_numbered_dirs(rootdir, repeats):
+ try:
+ for i in range(repeats):
+ dir_ = py.path.local.make_numbered_dir(prefix='repro-', rootdir=rootdir)
+ file_ = dir_.join('foo')
+ file_.write('%s' % i)
+ actual = int(file_.read())
+ assert actual == i, 'int(file_.read()) is %s instead of %s' % (actual, i)
+ dir_.join('.lock').remove(ignore_errors=True)
+ return True
+ except KeyboardInterrupt:
+ # makes sure that interrupting test session won't hang it
+ os.exit(2)
+
+
+class TestLocalPath(common.CommonFSTests):
+ def test_join_normpath(self, tmpdir):
+ assert tmpdir.join(".") == tmpdir
+ p = tmpdir.join("../%s" % tmpdir.basename)
+ assert p == tmpdir
+ p = tmpdir.join("..//%s/" % tmpdir.basename)
+ assert p == tmpdir
+
+ @skiponwin32
+ def test_dirpath_abs_no_abs(self, tmpdir):
+ p = tmpdir.join('foo')
+ assert p.dirpath('/bar') == tmpdir.join('bar')
+ assert tmpdir.dirpath('/bar', abs=True) == local('/bar')
+
+ def test_gethash(self, tmpdir):
+ md5 = py.builtin._tryimport('md5', 'hashlib').md5
+ lib = py.builtin._tryimport('sha', 'hashlib')
+ sha = getattr(lib, 'sha1', getattr(lib, 'sha', None))
+ fn = tmpdir.join("testhashfile")
+ data = 'hello'.encode('ascii')
+ fn.write(data, mode="wb")
+ assert fn.computehash("md5") == md5(data).hexdigest()
+ assert fn.computehash("sha1") == sha(data).hexdigest()
+ py.test.raises(ValueError, fn.computehash, "asdasd")
+
+ def test_remove_removes_readonly_file(self, tmpdir):
+ readonly_file = tmpdir.join('readonly').ensure()
+ readonly_file.chmod(0)
+ readonly_file.remove()
+ assert not readonly_file.check(exists=1)
+
+ def test_remove_removes_readonly_dir(self, tmpdir):
+ readonly_dir = tmpdir.join('readonlydir').ensure(dir=1)
+ readonly_dir.chmod(int("500", 8))
+ readonly_dir.remove()
+ assert not readonly_dir.check(exists=1)
+
+ def test_remove_removes_dir_and_readonly_file(self, tmpdir):
+ readonly_dir = tmpdir.join('readonlydir').ensure(dir=1)
+ readonly_file = readonly_dir.join('readonlyfile').ensure()
+ readonly_file.chmod(0)
+ readonly_dir.remove()
+ assert not readonly_dir.check(exists=1)
+
+ def test_remove_routes_ignore_errors(self, tmpdir, monkeypatch):
+ l = []
+ monkeypatch.setattr(
+ 'shutil.rmtree',
+ lambda *args, **kwargs: l.append(kwargs))
+ tmpdir.remove()
+ assert not l[0]['ignore_errors']
+ for val in (True, False):
+ l[:] = []
+ tmpdir.remove(ignore_errors=val)
+ assert l[0]['ignore_errors'] == val
+
+ def test_initialize_curdir(self):
+ assert str(local()) == os.getcwd()
+
+ @skiponwin32
+ def test_chdir_gone(self, path1):
+ p = path1.ensure("dir_to_be_removed", dir=1)
+ p.chdir()
+ p.remove()
+ pytest.raises(py.error.ENOENT, py.path.local)
+ assert path1.chdir() is None
+ assert os.getcwd() == str(path1)
+
+ def test_as_cwd(self, path1):
+ dir = path1.ensure("subdir", dir=1)
+ old = py.path.local()
+ with dir.as_cwd() as x:
+ assert x == old
+ assert py.path.local() == dir
+ assert os.getcwd() == str(old)
+
+ def test_as_cwd_exception(self, path1):
+ old = py.path.local()
+ dir = path1.ensure("subdir", dir=1)
+ with pytest.raises(ValueError):
+ with dir.as_cwd():
+ raise ValueError()
+ assert old == py.path.local()
+
+ def test_initialize_reldir(self, path1):
+ with path1.as_cwd():
+ p = local('samplefile')
+ assert p.check()
+
+ def test_tilde_expansion(self, monkeypatch, tmpdir):
+ monkeypatch.setenv("HOME", str(tmpdir))
+ p = py.path.local("~", expanduser=True)
+ assert p == os.path.expanduser("~")
+
+ def test_eq_with_strings(self, path1):
+ path1 = path1.join('sampledir')
+ path2 = str(path1)
+ assert path1 == path2
+ assert path2 == path1
+ path3 = path1.join('samplefile')
+ assert path3 != path2
+ assert path2 != path3
+
+ def test_eq_with_none(self, path1):
+ assert path1 != None # noqa
+
+ def test_eq_non_ascii_unicode(self, path1):
+ path2 = path1.join(u'temp')
+ path3 = path1.join(u'ação')
+ path4 = path1.join(u'ディレクトリ')
+
+ assert path2 != path3
+ assert path2 != path4
+ assert path4 != path3
+
+ def test_gt_with_strings(self, path1):
+ path2 = path1.join('sampledir')
+ path3 = str(path1.join("ttt"))
+ assert path3 > path2
+ assert path2 < path3
+ assert path2 < "ttt"
+ assert "ttt" > path2
+ path4 = path1.join("aaa")
+ l = [path2, path4, path3]
+ assert sorted(l) == [path4, path2, path3]
+
+ def test_open_and_ensure(self, path1):
+ p = path1.join("sub1", "sub2", "file")
+ with p.open("w", ensure=1) as f:
+ f.write("hello")
+ assert p.read() == "hello"
+
+ def test_write_and_ensure(self, path1):
+ p = path1.join("sub1", "sub2", "file")
+ p.write("hello", ensure=1)
+ assert p.read() == "hello"
+
+ @py.test.mark.parametrize('bin', (False, True))
+ def test_dump(self, tmpdir, bin):
+ path = tmpdir.join("dumpfile%s" % int(bin))
+ try:
+ d = {'answer': 42}
+ path.dump(d, bin=bin)
+ f = path.open('rb+')
+ import pickle
+ dnew = pickle.load(f)
+ assert d == dnew
+ finally:
+ f.close()
+
+ @failsonjywin32
+ def test_setmtime(self):
+ import tempfile
+ import time
+ try:
+ fd, name = tempfile.mkstemp()
+ os.close(fd)
+ except AttributeError:
+ name = tempfile.mktemp()
+ open(name, 'w').close()
+ try:
+ mtime = int(time.time())-100
+ path = local(name)
+ assert path.mtime() != mtime
+ path.setmtime(mtime)
+ assert path.mtime() == mtime
+ path.setmtime()
+ assert path.mtime() != mtime
+ finally:
+ os.remove(name)
+
+ def test_normpath(self, path1):
+ new1 = path1.join("/otherdir")
+ new2 = path1.join("otherdir")
+ assert str(new1) == str(new2)
+
+ def test_mkdtemp_creation(self):
+ d = local.mkdtemp()
+ try:
+ assert d.check(dir=1)
+ finally:
+ d.remove(rec=1)
+
+ def test_tmproot(self):
+ d = local.mkdtemp()
+ tmproot = local.get_temproot()
+ try:
+ assert d.check(dir=1)
+ assert d.dirpath() == tmproot
+ finally:
+ d.remove(rec=1)
+
+ def test_chdir(self, tmpdir):
+ old = local()
+ try:
+ res = tmpdir.chdir()
+ assert str(res) == str(old)
+ assert os.getcwd() == str(tmpdir)
+ finally:
+ old.chdir()
+
+ def test_ensure_filepath_withdir(self, tmpdir):
+ newfile = tmpdir.join('test1', 'test')
+ newfile.ensure()
+ assert newfile.check(file=1)
+ newfile.write("42")
+ newfile.ensure()
+ s = newfile.read()
+ assert s == "42"
+
+ def test_ensure_filepath_withoutdir(self, tmpdir):
+ newfile = tmpdir.join('test1file')
+ t = newfile.ensure()
+ assert t == newfile
+ assert newfile.check(file=1)
+
+ def test_ensure_dirpath(self, tmpdir):
+ newfile = tmpdir.join('test1', 'testfile')
+ t = newfile.ensure(dir=1)
+ assert t == newfile
+ assert newfile.check(dir=1)
+
+ def test_ensure_non_ascii_unicode(self, tmpdir):
+ newfile = tmpdir.join(u'ação',u'ディレクトリ')
+ t = newfile.ensure(dir=1)
+ assert t == newfile
+ assert newfile.check(dir=1)
+
+ def test_init_from_path(self, tmpdir):
+ l = local()
+ l2 = local(l)
+ assert l2 == l
+
+ wc = py.path.svnwc('.')
+ l3 = local(wc)
+ assert l3 is not wc
+ assert l3.strpath == wc.strpath
+ assert not hasattr(l3, 'commit')
+
+ @py.test.mark.xfail(run=False, reason="unreliable est for long filenames")
+ def test_long_filenames(self, tmpdir):
+ if sys.platform == "win32":
+ py.test.skip("win32: work around needed for path length limit")
+ # see http://codespeak.net/pipermail/py-dev/2008q2/000922.html
+
+ # testing paths > 260 chars (which is Windows' limitation, but
+ # depending on how the paths are used), but > 4096 (which is the
+ # Linux' limitation) - the behaviour of paths with names > 4096 chars
+ # is undetermined
+ newfilename = '/test' * 60
+ l = tmpdir.join(newfilename)
+ l.ensure(file=True)
+ l.write('foo')
+ l2 = tmpdir.join(newfilename)
+ assert l2.read() == 'foo'
+
+ def test_visit_depth_first(self, tmpdir):
+ tmpdir.ensure("a", "1")
+ tmpdir.ensure("b", "2")
+ p3 = tmpdir.ensure("breadth")
+ l = list(tmpdir.visit(lambda x: x.check(file=1)))
+ assert len(l) == 3
+ # check that breadth comes last
+ assert l[2] == p3
+
+ def test_visit_rec_fnmatch(self, tmpdir):
+ p1 = tmpdir.ensure("a", "123")
+ tmpdir.ensure(".b", "345")
+ l = list(tmpdir.visit("???", rec="[!.]*"))
+ assert len(l) == 1
+ # check that breadth comes last
+ assert l[0] == p1
+
+ def test_fnmatch_file_abspath(self, tmpdir):
+ b = tmpdir.join("a", "b")
+ assert b.fnmatch(os.sep.join("ab"))
+ pattern = os.sep.join([str(tmpdir), "*", "b"])
+ assert b.fnmatch(pattern)
+
+ def test_sysfind(self):
+ name = sys.platform == "win32" and "cmd" or "test"
+ x = py.path.local.sysfind(name)
+ assert x.check(file=1)
+ assert py.path.local.sysfind('jaksdkasldqwe') is None
+ assert py.path.local.sysfind(name, paths=[]) is None
+ x2 = py.path.local.sysfind(name, paths=[x.dirpath()])
+ assert x2 == x
+
+ def test_fspath_protocol_other_class(self, fake_fspath_obj):
+ # py.path is always absolute
+ py_path = py.path.local(fake_fspath_obj)
+ str_path = fake_fspath_obj.__fspath__()
+ assert py_path.check(endswith=str_path)
+ assert py_path.join(fake_fspath_obj).strpath == os.path.join(
+ py_path.strpath, str_path)
+
+ def test_make_numbered_dir_multiprocess_safe(self, tmpdir):
+ # https://github.com/pytest-dev/py/issues/30
+ pool = multiprocessing.Pool()
+ results = [pool.apply_async(batch_make_numbered_dirs, [tmpdir, 100]) for _ in range(20)]
+ for r in results:
+ assert r.get()
+
+
+class TestExecutionOnWindows:
+ pytestmark = win32only
+
+ def test_sysfind_bat_exe_before(self, tmpdir, monkeypatch):
+ monkeypatch.setenv("PATH", str(tmpdir), prepend=os.pathsep)
+ tmpdir.ensure("hello")
+ h = tmpdir.ensure("hello.bat")
+ x = py.path.local.sysfind("hello")
+ assert x == h
+
+
+class TestExecution:
+ pytestmark = skiponwin32
+
+ def test_sysfind_no_permisson_ignored(self, monkeypatch, tmpdir):
+ noperm = tmpdir.ensure('noperm', dir=True)
+ monkeypatch.setenv("PATH", noperm, prepend=":")
+ noperm.chmod(0)
+ assert py.path.local.sysfind('jaksdkasldqwe') is None
+
+ def test_sysfind_absolute(self):
+ x = py.path.local.sysfind('test')
+ assert x.check(file=1)
+ y = py.path.local.sysfind(str(x))
+ assert y.check(file=1)
+ assert y == x
+
+ def test_sysfind_multiple(self, tmpdir, monkeypatch):
+ monkeypatch.setenv('PATH', "%s:%s" % (
+ tmpdir.ensure('a'),
+ tmpdir.join('b')),
+ prepend=":")
+ tmpdir.ensure('b', 'a')
+ x = py.path.local.sysfind(
+ 'a', checker=lambda x: x.dirpath().basename == 'b')
+ assert x.basename == 'a'
+ assert x.dirpath().basename == 'b'
+ assert py.path.local.sysfind('a', checker=lambda x: None) is None
+
+ def test_sysexec(self):
+ x = py.path.local.sysfind('ls')
+ out = x.sysexec('-a')
+ for x in py.path.local().listdir():
+ assert out.find(x.basename) != -1
+
+ def test_sysexec_failing(self):
+ x = py.path.local.sysfind('false')
+ with pytest.raises(py.process.cmdexec.Error):
+ x.sysexec('aksjdkasjd')
+
+ def test_make_numbered_dir(self, tmpdir):
+ tmpdir.ensure('base.not_an_int', dir=1)
+ for i in range(10):
+ numdir = local.make_numbered_dir(prefix='base.', rootdir=tmpdir,
+ keep=2, lock_timeout=0)
+ assert numdir.check()
+ assert numdir.basename == 'base.%d' % i
+ if i >= 1:
+ assert numdir.new(ext=str(i-1)).check()
+ if i >= 2:
+ assert numdir.new(ext=str(i-2)).check()
+ if i >= 3:
+ assert not numdir.new(ext=str(i-3)).check()
+
+ def test_make_numbered_dir_case(self, tmpdir):
+ """make_numbered_dir does not make assumptions on the underlying
+ filesystem based on the platform and will assume it _could_ be case
+ insensitive.
+
+ See issues:
+ - https://github.com/pytest-dev/pytest/issues/708
+ - https://github.com/pytest-dev/pytest/issues/3451
+ """
+ d1 = local.make_numbered_dir(
+ prefix='CAse.', rootdir=tmpdir, keep=2, lock_timeout=0,
+ )
+ d2 = local.make_numbered_dir(
+ prefix='caSE.', rootdir=tmpdir, keep=2, lock_timeout=0,
+ )
+ assert str(d1).lower() != str(d2).lower()
+ assert str(d2).endswith('.1')
+
+ def test_make_numbered_dir_NotImplemented_Error(self, tmpdir, monkeypatch):
+ def notimpl(x, y):
+ raise NotImplementedError(42)
+ monkeypatch.setattr(os, 'symlink', notimpl)
+ x = tmpdir.make_numbered_dir(rootdir=tmpdir, lock_timeout=0)
+ assert x.relto(tmpdir)
+ assert x.check()
+
+ def test_locked_make_numbered_dir(self, tmpdir):
+ for i in range(10):
+ numdir = local.make_numbered_dir(prefix='base2.', rootdir=tmpdir,
+ keep=2)
+ assert numdir.check()
+ assert numdir.basename == 'base2.%d' % i
+ for j in range(i):
+ assert numdir.new(ext=str(j)).check()
+
+ def test_error_preservation(self, path1):
+ py.test.raises(EnvironmentError, path1.join('qwoeqiwe').mtime)
+ py.test.raises(EnvironmentError, path1.join('qwoeqiwe').read)
+
+ # def test_parentdirmatch(self):
+ # local.parentdirmatch('std', startmodule=__name__)
+ #
+
+
+class TestImport:
+ def test_pyimport(self, path1):
+ obj = path1.join('execfile.py').pyimport()
+ assert obj.x == 42
+ assert obj.__name__ == 'execfile'
+
+ def test_pyimport_renamed_dir_creates_mismatch(self, tmpdir):
+ p = tmpdir.ensure("a", "test_x123.py")
+ p.pyimport()
+ tmpdir.join("a").move(tmpdir.join("b"))
+ with pytest.raises(tmpdir.ImportMismatchError):
+ tmpdir.join("b", "test_x123.py").pyimport()
+
+ def test_pyimport_messy_name(self, tmpdir):
+ # http://bitbucket.org/hpk42/py-trunk/issue/129
+ path = tmpdir.ensure('foo__init__.py')
+ path.pyimport()
+
+ def test_pyimport_dir(self, tmpdir):
+ p = tmpdir.join("hello_123")
+ p_init = p.ensure("__init__.py")
+ m = p.pyimport()
+ assert m.__name__ == "hello_123"
+ m = p_init.pyimport()
+ assert m.__name__ == "hello_123"
+
+ def test_pyimport_execfile_different_name(self, path1):
+ obj = path1.join('execfile.py').pyimport(modname="0x.y.z")
+ assert obj.x == 42
+ assert obj.__name__ == '0x.y.z'
+
+ def test_pyimport_a(self, path1):
+ otherdir = path1.join('otherdir')
+ mod = otherdir.join('a.py').pyimport()
+ assert mod.result == "got it"
+ assert mod.__name__ == 'otherdir.a'
+
+ def test_pyimport_b(self, path1):
+ otherdir = path1.join('otherdir')
+ mod = otherdir.join('b.py').pyimport()
+ assert mod.stuff == "got it"
+ assert mod.__name__ == 'otherdir.b'
+
+ def test_pyimport_c(self, path1):
+ otherdir = path1.join('otherdir')
+ mod = otherdir.join('c.py').pyimport()
+ assert mod.value == "got it"
+
+ def test_pyimport_d(self, path1):
+ otherdir = path1.join('otherdir')
+ mod = otherdir.join('d.py').pyimport()
+ assert mod.value2 == "got it"
+
+ def test_pyimport_and_import(self, tmpdir):
+ tmpdir.ensure('xxxpackage', '__init__.py')
+ mod1path = tmpdir.ensure('xxxpackage', 'module1.py')
+ mod1 = mod1path.pyimport()
+ assert mod1.__name__ == 'xxxpackage.module1'
+ from xxxpackage import module1
+ assert module1 is mod1
+
+ def test_pyimport_check_filepath_consistency(self, monkeypatch, tmpdir):
+ name = 'pointsback123'
+ ModuleType = type(os)
+ p = tmpdir.ensure(name + '.py')
+ for ending in ('.pyc', '$py.class', '.pyo'):
+ mod = ModuleType(name)
+ pseudopath = tmpdir.ensure(name+ending)
+ mod.__file__ = str(pseudopath)
+ monkeypatch.setitem(sys.modules, name, mod)
+ newmod = p.pyimport()
+ assert mod == newmod
+ monkeypatch.undo()
+ mod = ModuleType(name)
+ pseudopath = tmpdir.ensure(name+"123.py")
+ mod.__file__ = str(pseudopath)
+ monkeypatch.setitem(sys.modules, name, mod)
+ excinfo = py.test.raises(pseudopath.ImportMismatchError, p.pyimport)
+ modname, modfile, orig = excinfo.value.args
+ assert modname == name
+ assert modfile == pseudopath
+ assert orig == p
+ assert issubclass(pseudopath.ImportMismatchError, ImportError)
+
+ def test_issue131_pyimport_on__init__(self, tmpdir):
+ # __init__.py files may be namespace packages, and thus the
+ # __file__ of an imported module may not be ourselves
+ # see issue
+ p1 = tmpdir.ensure("proja", "__init__.py")
+ p2 = tmpdir.ensure("sub", "proja", "__init__.py")
+ m1 = p1.pyimport()
+ m2 = p2.pyimport()
+ assert m1 == m2
+
+ def test_ensuresyspath_append(self, tmpdir):
+ root1 = tmpdir.mkdir("root1")
+ file1 = root1.ensure("x123.py")
+ assert str(root1) not in sys.path
+ file1.pyimport(ensuresyspath="append")
+ assert str(root1) == sys.path[-1]
+ assert str(root1) not in sys.path[:-1]
+
+
+def test_pypkgdir(tmpdir):
+ pkg = tmpdir.ensure('pkg1', dir=1)
+ pkg.ensure("__init__.py")
+ pkg.ensure("subdir/__init__.py")
+ assert pkg.pypkgpath() == pkg
+ assert pkg.join('subdir', '__init__.py').pypkgpath() == pkg
+
+
+def test_pypkgdir_unimportable(tmpdir):
+ pkg = tmpdir.ensure('pkg1-1', dir=1) # unimportable
+ pkg.ensure("__init__.py")
+ subdir = pkg.ensure("subdir/__init__.py").dirpath()
+ assert subdir.pypkgpath() == subdir
+ assert subdir.ensure("xyz.py").pypkgpath() == subdir
+ assert not pkg.pypkgpath()
+
+
+def test_isimportable():
+ from py._path.local import isimportable
+ assert not isimportable("")
+ assert isimportable("x")
+ assert isimportable("x1")
+ assert isimportable("x_1")
+ assert isimportable("_")
+ assert isimportable("_1")
+ assert not isimportable("x-1")
+ assert not isimportable("x:1")
+
+
+def test_homedir_from_HOME(monkeypatch):
+ path = os.getcwd()
+ monkeypatch.setenv("HOME", path)
+ assert py.path.local._gethomedir() == py.path.local(path)
+
+
+def test_homedir_not_exists(monkeypatch):
+ monkeypatch.delenv("HOME", raising=False)
+ monkeypatch.delenv("HOMEDRIVE", raising=False)
+ homedir = py.path.local._gethomedir()
+ assert homedir is None
+
+
+def test_samefile(tmpdir):
+ assert tmpdir.samefile(tmpdir)
+ p = tmpdir.ensure("hello")
+ assert p.samefile(p)
+ with p.dirpath().as_cwd():
+ assert p.samefile(p.basename)
+ if sys.platform == "win32":
+ p1 = p.__class__(str(p).lower())
+ p2 = p.__class__(str(p).upper())
+ assert p1.samefile(p2)
+
+
+def test_listdir_single_arg(tmpdir):
+ tmpdir.ensure("hello")
+ assert tmpdir.listdir("hello")[0].basename == "hello"
+
+
+def test_mkdtemp_rootdir(tmpdir):
+ dtmp = local.mkdtemp(rootdir=tmpdir)
+ assert tmpdir.listdir() == [dtmp]
+
+
+class TestWINLocalPath:
+ pytestmark = win32only
+
+ def test_owner_group_not_implemented(self, path1):
+ py.test.raises(NotImplementedError, "path1.stat().owner")
+ py.test.raises(NotImplementedError, "path1.stat().group")
+
+ def test_chmod_simple_int(self, path1):
+ py.builtin.print_("path1 is", path1)
+ mode = path1.stat().mode
+ # Ensure that we actually change the mode to something different.
+ path1.chmod(mode == 0 and 1 or 0)
+ try:
+ print(path1.stat().mode)
+ print(mode)
+ assert path1.stat().mode != mode
+ finally:
+ path1.chmod(mode)
+ assert path1.stat().mode == mode
+
+ def test_path_comparison_lowercase_mixed(self, path1):
+ t1 = path1.join("a_path")
+ t2 = path1.join("A_path")
+ assert t1 == t1
+ assert t1 == t2
+
+ def test_relto_with_mixed_case(self, path1):
+ t1 = path1.join("a_path", "fiLe")
+ t2 = path1.join("A_path")
+ assert t1.relto(t2) == "fiLe"
+
+ def test_allow_unix_style_paths(self, path1):
+ t1 = path1.join('a_path')
+ assert t1 == str(path1) + '\\a_path'
+ t1 = path1.join('a_path/')
+ assert t1 == str(path1) + '\\a_path'
+ t1 = path1.join('dir/a_path')
+ assert t1 == str(path1) + '\\dir\\a_path'
+
+ def test_sysfind_in_currentdir(self, path1):
+ cmd = py.path.local.sysfind('cmd')
+ root = cmd.new(dirname='', basename='') # c:\ in most installations
+ with root.as_cwd():
+ x = py.path.local.sysfind(cmd.relto(root))
+ assert x.check(file=1)
+
+ def test_fnmatch_file_abspath_posix_pattern_on_win32(self, tmpdir):
+ # path-matching patterns might contain a posix path separator '/'
+ # Test that we can match that pattern on windows.
+ import posixpath
+ b = tmpdir.join("a", "b")
+ assert b.fnmatch(posixpath.sep.join("ab"))
+ pattern = posixpath.sep.join([str(tmpdir), "*", "b"])
+ assert b.fnmatch(pattern)
+
+
+class TestPOSIXLocalPath:
+ pytestmark = skiponwin32
+
+ def test_hardlink(self, tmpdir):
+ linkpath = tmpdir.join('test')
+ filepath = tmpdir.join('file')
+ filepath.write("Hello")
+ nlink = filepath.stat().nlink
+ linkpath.mklinkto(filepath)
+ assert filepath.stat().nlink == nlink + 1
+
+ def test_symlink_are_identical(self, tmpdir):
+ filepath = tmpdir.join('file')
+ filepath.write("Hello")
+ linkpath = tmpdir.join('test')
+ linkpath.mksymlinkto(filepath)
+ assert linkpath.readlink() == str(filepath)
+
+ def test_symlink_isfile(self, tmpdir):
+ linkpath = tmpdir.join('test')
+ filepath = tmpdir.join('file')
+ filepath.write("")
+ linkpath.mksymlinkto(filepath)
+ assert linkpath.check(file=1)
+ assert not linkpath.check(link=0, file=1)
+ assert linkpath.islink()
+
+ def test_symlink_relative(self, tmpdir):
+ linkpath = tmpdir.join('test')
+ filepath = tmpdir.join('file')
+ filepath.write("Hello")
+ linkpath.mksymlinkto(filepath, absolute=False)
+ assert linkpath.readlink() == "file"
+ assert filepath.read() == linkpath.read()
+
+ def test_symlink_not_existing(self, tmpdir):
+ linkpath = tmpdir.join('testnotexisting')
+ assert not linkpath.check(link=1)
+ assert linkpath.check(link=0)
+
+ def test_relto_with_root(self, path1, tmpdir):
+ y = path1.join('x').relto(py.path.local('/'))
+ assert y[0] == str(path1)[1]
+
+ def test_visit_recursive_symlink(self, tmpdir):
+ linkpath = tmpdir.join('test')
+ linkpath.mksymlinkto(tmpdir)
+ visitor = tmpdir.visit(None, lambda x: x.check(link=0))
+ assert list(visitor) == [linkpath]
+
+ def test_symlink_isdir(self, tmpdir):
+ linkpath = tmpdir.join('test')
+ linkpath.mksymlinkto(tmpdir)
+ assert linkpath.check(dir=1)
+ assert not linkpath.check(link=0, dir=1)
+
+ def test_symlink_remove(self, tmpdir):
+ linkpath = tmpdir.join('test')
+ linkpath.mksymlinkto(linkpath) # point to itself
+ assert linkpath.check(link=1)
+ linkpath.remove()
+ assert not linkpath.check()
+
+ def test_realpath_file(self, tmpdir):
+ linkpath = tmpdir.join('test')
+ filepath = tmpdir.join('file')
+ filepath.write("")
+ linkpath.mksymlinkto(filepath)
+ realpath = linkpath.realpath()
+ assert realpath.basename == 'file'
+
+ def test_owner(self, path1, tmpdir):
+ from pwd import getpwuid
+ from grp import getgrgid
+ stat = path1.stat()
+ assert stat.path == path1
+
+ uid = stat.uid
+ gid = stat.gid
+ owner = getpwuid(uid)[0]
+ group = getgrgid(gid)[0]
+
+ assert uid == stat.uid
+ assert owner == stat.owner
+ assert gid == stat.gid
+ assert group == stat.group
+
+ def test_stat_helpers(self, tmpdir, monkeypatch):
+ path1 = tmpdir.ensure("file")
+ stat1 = path1.stat()
+ stat2 = tmpdir.stat()
+ assert stat1.isfile()
+ assert stat2.isdir()
+ assert not stat1.islink()
+ assert not stat2.islink()
+
+ def test_stat_non_raising(self, tmpdir):
+ path1 = tmpdir.join("file")
+ pytest.raises(py.error.ENOENT, lambda: path1.stat())
+ res = path1.stat(raising=False)
+ assert res is None
+
+ def test_atime(self, tmpdir):
+ import time
+ path = tmpdir.ensure('samplefile')
+ now = time.time()
+ atime1 = path.atime()
+ # we could wait here but timer resolution is very
+ # system dependent
+ path.read()
+ time.sleep(ATIME_RESOLUTION)
+ atime2 = path.atime()
+ time.sleep(ATIME_RESOLUTION)
+ duration = time.time() - now
+ assert (atime2-atime1) <= duration
+
+ def test_commondir(self, path1):
+ # XXX This is here in local until we find a way to implement this
+ # using the subversion command line api.
+ p1 = path1.join('something')
+ p2 = path1.join('otherthing')
+ assert p1.common(p2) == path1
+ assert p2.common(p1) == path1
+
+ def test_commondir_nocommon(self, path1):
+ # XXX This is here in local until we find a way to implement this
+ # using the subversion command line api.
+ p1 = path1.join('something')
+ p2 = py.path.local(path1.sep+'blabla')
+ assert p1.common(p2) == '/'
+
+ def test_join_to_root(self, path1):
+ root = path1.parts()[0]
+ assert len(str(root)) == 1
+ assert str(root.join('a')) == '/a'
+
+ def test_join_root_to_root_with_no_abs(self, path1):
+ nroot = path1.join('/')
+ assert str(path1) == str(nroot)
+ assert path1 == nroot
+
+ def test_chmod_simple_int(self, path1):
+ mode = path1.stat().mode
+ path1.chmod(int(mode/2))
+ try:
+ assert path1.stat().mode != mode
+ finally:
+ path1.chmod(mode)
+ assert path1.stat().mode == mode
+
+ def test_chmod_rec_int(self, path1):
+ # XXX fragile test
+ def recfilter(x): return x.check(dotfile=0, link=0)
+ oldmodes = {}
+ for x in path1.visit(rec=recfilter):
+ oldmodes[x] = x.stat().mode
+ path1.chmod(int("772", 8), rec=recfilter)
+ try:
+ for x in path1.visit(rec=recfilter):
+ assert x.stat().mode & int("777", 8) == int("772", 8)
+ finally:
+ for x, y in oldmodes.items():
+ x.chmod(y)
+
+ def test_copy_archiving(self, tmpdir):
+ unicode_fn = u"something-\342\200\223.txt"
+ f = tmpdir.ensure("a", unicode_fn)
+ a = f.dirpath()
+ oldmode = f.stat().mode
+ newmode = oldmode ^ 1
+ f.chmod(newmode)
+ b = tmpdir.join("b")
+ a.copy(b, mode=True)
+ assert b.join(f.basename).stat().mode == newmode
+
+ def test_copy_stat_file(self, tmpdir):
+ src = tmpdir.ensure('src')
+ dst = tmpdir.join('dst')
+ # a small delay before the copy
+ time.sleep(ATIME_RESOLUTION)
+ src.copy(dst, stat=True)
+ oldstat = src.stat()
+ newstat = dst.stat()
+ assert oldstat.mode == newstat.mode
+ assert (dst.atime() - src.atime()) < ATIME_RESOLUTION
+ assert (dst.mtime() - src.mtime()) < ATIME_RESOLUTION
+
+ def test_copy_stat_dir(self, tmpdir):
+ test_files = ['a', 'b', 'c']
+ src = tmpdir.join('src')
+ for f in test_files:
+ src.join(f).write(f, ensure=True)
+ dst = tmpdir.join('dst')
+ # a small delay before the copy
+ time.sleep(ATIME_RESOLUTION)
+ src.copy(dst, stat=True)
+ for f in test_files:
+ oldstat = src.join(f).stat()
+ newstat = dst.join(f).stat()
+ assert (newstat.atime - oldstat.atime) < ATIME_RESOLUTION
+ assert (newstat.mtime - oldstat.mtime) < ATIME_RESOLUTION
+ assert oldstat.mode == newstat.mode
+
+ @failsonjython
+ def test_chown_identity(self, path1):
+ owner = path1.stat().owner
+ group = path1.stat().group
+ path1.chown(owner, group)
+
+ @failsonjython
+ def test_chown_dangling_link(self, path1):
+ owner = path1.stat().owner
+ group = path1.stat().group
+ x = path1.join('hello')
+ x.mksymlinkto('qlwkejqwlek')
+ try:
+ path1.chown(owner, group, rec=1)
+ finally:
+ x.remove(rec=0)
+
+ @failsonjython
+ def test_chown_identity_rec_mayfail(self, path1):
+ owner = path1.stat().owner
+ group = path1.stat().group
+ path1.chown(owner, group)
+
+
+class TestUnicodePy2Py3:
+ def test_join_ensure(self, tmpdir, monkeypatch):
+ if sys.version_info >= (3, 0) and "LANG" not in os.environ:
+ pytest.skip("cannot run test without locale")
+ x = py.path.local(tmpdir.strpath)
+ part = "hällo"
+ y = x.ensure(part)
+ assert x.join(part) == y
+
+ def test_listdir(self, tmpdir):
+ if sys.version_info >= (3, 0) and "LANG" not in os.environ:
+ pytest.skip("cannot run test without locale")
+ x = py.path.local(tmpdir.strpath)
+ part = "hällo"
+ y = x.ensure(part)
+ assert x.listdir(part)[0] == y
+
+ @pytest.mark.xfail(
+ reason="changing read/write might break existing usages")
+ def test_read_write(self, tmpdir):
+ x = tmpdir.join("hello")
+ part = py.builtin._totext("hällo", "utf8")
+ x.write(part)
+ assert x.read() == part
+ x.write(part.encode(sys.getdefaultencoding()))
+ assert x.read() == part.encode(sys.getdefaultencoding())
+
+
+class TestBinaryAndTextMethods:
+ def test_read_binwrite(self, tmpdir):
+ x = tmpdir.join("hello")
+ part = py.builtin._totext("hällo", "utf8")
+ part_utf8 = part.encode("utf8")
+ x.write_binary(part_utf8)
+ assert x.read_binary() == part_utf8
+ s = x.read_text(encoding="utf8")
+ assert s == part
+ assert py.builtin._istext(s)
+
+ def test_read_textwrite(self, tmpdir):
+ x = tmpdir.join("hello")
+ part = py.builtin._totext("hällo", "utf8")
+ part_utf8 = part.encode("utf8")
+ x.write_text(part, encoding="utf8")
+ assert x.read_binary() == part_utf8
+ assert x.read_text(encoding="utf8") == part
+
+ def test_default_encoding(self, tmpdir):
+ x = tmpdir.join("hello")
+ # Can't use UTF8 as the default encoding (ASCII) doesn't support it
+ part = py.builtin._totext("hello", "ascii")
+ x.write_text(part, "ascii")
+ s = x.read_text("ascii")
+ assert s == part
+ assert type(s) == type(part)
diff --git a/third_party/python/py/testing/path/test_svnauth.py b/third_party/python/py/testing/path/test_svnauth.py
new file mode 100644
index 0000000000..654f033224
--- /dev/null
+++ b/third_party/python/py/testing/path/test_svnauth.py
@@ -0,0 +1,460 @@
+import py
+from py.path import SvnAuth
+import time
+import sys
+
+svnbin = py.path.local.sysfind('svn')
+
+
+def make_repo_auth(repo, userdata):
+ """ write config to repo
+
+ user information in userdata is used for auth
+ userdata has user names as keys, and a tuple (password, readwrite) as
+ values, where 'readwrite' is either 'r' or 'rw'
+ """
+ confdir = py.path.local(repo).join('conf')
+ confdir.join('svnserve.conf').write('''\
+[general]
+anon-access = none
+password-db = passwd
+authz-db = authz
+realm = TestRepo
+''')
+ authzdata = '[/]\n'
+ passwddata = '[users]\n'
+ for user in userdata:
+ authzdata += '%s = %s\n' % (user, userdata[user][1])
+ passwddata += '%s = %s\n' % (user, userdata[user][0])
+ confdir.join('authz').write(authzdata)
+ confdir.join('passwd').write(passwddata)
+
+def serve_bg(repopath):
+ pidfile = py.path.local(repopath).join('pid')
+ port = 10000
+ e = None
+ while port < 10010:
+ cmd = 'svnserve -d -T --listen-port=%d --pid-file=%s -r %s' % (
+ port, pidfile, repopath)
+ print(cmd)
+ try:
+ py.process.cmdexec(cmd)
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ else:
+ # XXX we assume here that the pid file gets written somewhere, I
+ # guess this should be relatively safe... (I hope, at least?)
+ counter = pid = 0
+ while counter < 10:
+ counter += 1
+ try:
+ pid = pidfile.read()
+ except py.error.ENOENT:
+ pass
+ if pid:
+ break
+ time.sleep(0.2)
+ return port, int(pid)
+ port += 1
+ raise IOError('could not start svnserve: %s' % (e,))
+
+class TestSvnAuth(object):
+ def test_basic(self):
+ auth = SvnAuth('foo', 'bar')
+ assert auth.username == 'foo'
+ assert auth.password == 'bar'
+ assert str(auth)
+
+ def test_makecmdoptions_uname_pw_makestr(self):
+ auth = SvnAuth('foo', 'bar')
+ assert auth.makecmdoptions() == '--username="foo" --password="bar"'
+
+ def test_makecmdoptions_quote_escape(self):
+ auth = SvnAuth('fo"o', '"ba\'r"')
+ assert auth.makecmdoptions() == '--username="fo\\"o" --password="\\"ba\'r\\""'
+
+ def test_makecmdoptions_no_cache_auth(self):
+ auth = SvnAuth('foo', 'bar', cache_auth=False)
+ assert auth.makecmdoptions() == ('--username="foo" --password="bar" '
+ '--no-auth-cache')
+
+ def test_makecmdoptions_no_interactive(self):
+ auth = SvnAuth('foo', 'bar', interactive=False)
+ assert auth.makecmdoptions() == ('--username="foo" --password="bar" '
+ '--non-interactive')
+
+ def test_makecmdoptions_no_interactive_no_cache_auth(self):
+ auth = SvnAuth('foo', 'bar', cache_auth=False,
+ interactive=False)
+ assert auth.makecmdoptions() == ('--username="foo" --password="bar" '
+ '--no-auth-cache --non-interactive')
+
+class svnwc_no_svn(py.path.svnwc):
+ def __new__(cls, *args, **kwargs):
+ self = super(svnwc_no_svn, cls).__new__(cls, *args, **kwargs)
+ self.commands = []
+ return self
+
+ def _svn(self, *args):
+ self.commands.append(args)
+
+class TestSvnWCAuth(object):
+ def setup_method(self, meth):
+ if not svnbin:
+ py.test.skip("svn binary required")
+ self.auth = SvnAuth('user', 'pass', cache_auth=False)
+
+ def test_checkout(self):
+ wc = svnwc_no_svn('foo', auth=self.auth)
+ wc.checkout('url')
+ assert wc.commands[0][-1] == ('--username="user" --password="pass" '
+ '--no-auth-cache')
+
+ def test_commit(self):
+ wc = svnwc_no_svn('foo', auth=self.auth)
+ wc.commit('msg')
+ assert wc.commands[0][-1] == ('--username="user" --password="pass" '
+ '--no-auth-cache')
+
+ def test_checkout_no_cache_auth(self):
+ wc = svnwc_no_svn('foo', auth=self.auth)
+ wc.checkout('url')
+ assert wc.commands[0][-1] == ('--username="user" --password="pass" '
+ '--no-auth-cache')
+
+ def test_checkout_auth_from_constructor(self):
+ wc = svnwc_no_svn('foo', auth=self.auth)
+ wc.checkout('url')
+ assert wc.commands[0][-1] == ('--username="user" --password="pass" '
+ '--no-auth-cache')
+
+class svnurl_no_svn(py.path.svnurl):
+ cmdexec_output = 'test'
+ popen_output = 'test'
+ def __new__(cls, *args, **kwargs):
+ self = super(svnurl_no_svn, cls).__new__(cls, *args, **kwargs)
+ self.commands = []
+ return self
+
+ def _cmdexec(self, cmd):
+ self.commands.append(cmd)
+ return self.cmdexec_output
+
+ def _popen(self, cmd):
+ self.commands.append(cmd)
+ return self.popen_output
+
+class TestSvnURLAuth(object):
+ def setup_method(self, meth):
+ self.auth = SvnAuth('foo', 'bar')
+
+ def test_init(self):
+ u = svnurl_no_svn('http://foo.bar/svn')
+ assert u.auth is None
+
+ u = svnurl_no_svn('http://foo.bar/svn', auth=self.auth)
+ assert u.auth is self.auth
+
+ def test_new(self):
+ u = svnurl_no_svn('http://foo.bar/svn/foo', auth=self.auth)
+ new = u.new(basename='bar')
+ assert new.auth is self.auth
+ assert new.url == 'http://foo.bar/svn/bar'
+
+ def test_join(self):
+ u = svnurl_no_svn('http://foo.bar/svn', auth=self.auth)
+ new = u.join('foo')
+ assert new.auth is self.auth
+ assert new.url == 'http://foo.bar/svn/foo'
+
+ def test_listdir(self):
+ u = svnurl_no_svn('http://foo.bar/svn', auth=self.auth)
+ u.cmdexec_output = '''\
+ 1717 johnny 1529 Nov 04 14:32 LICENSE.txt
+ 1716 johnny 5352 Nov 04 14:28 README.txt
+'''
+ paths = u.listdir()
+ assert paths[0].auth is self.auth
+ assert paths[1].auth is self.auth
+ assert paths[0].basename == 'LICENSE.txt'
+
+ def test_info(self):
+ u = svnurl_no_svn('http://foo.bar/svn/LICENSE.txt', auth=self.auth)
+ def dirpath(self):
+ return self
+ u.cmdexec_output = '''\
+ 1717 johnny 1529 Nov 04 14:32 LICENSE.txt
+ 1716 johnny 5352 Nov 04 14:28 README.txt
+'''
+ org_dp = u.__class__.dirpath
+ u.__class__.dirpath = dirpath
+ try:
+ info = u.info()
+ finally:
+ u.dirpath = org_dp
+ assert info.size == 1529
+
+ def test_open(self):
+ u = svnurl_no_svn('http://foo.bar/svn', auth=self.auth)
+ foo = u.join('foo')
+ foo.check = lambda *args, **kwargs: True
+ ret = foo.open()
+ assert ret == 'test'
+ assert '--username="foo" --password="bar"' in foo.commands[0]
+
+ def test_dirpath(self):
+ u = svnurl_no_svn('http://foo.bar/svn/foo', auth=self.auth)
+ parent = u.dirpath()
+ assert parent.auth is self.auth
+
+ def test_mkdir(self):
+ u = svnurl_no_svn('http://foo.bar/svn/qweqwe', auth=self.auth)
+ assert not u.commands
+ u.mkdir(msg='created dir foo')
+ assert u.commands
+ assert '--username="foo" --password="bar"' in u.commands[0]
+
+ def test_copy(self):
+ u = svnurl_no_svn('http://foo.bar/svn', auth=self.auth)
+ u2 = svnurl_no_svn('http://foo.bar/svn2')
+ u.copy(u2, 'copied dir')
+ assert '--username="foo" --password="bar"' in u.commands[0]
+
+ def test_rename(self):
+ u = svnurl_no_svn('http://foo.bar/svn/foo', auth=self.auth)
+ u.rename('http://foo.bar/svn/bar', 'moved foo to bar')
+ assert '--username="foo" --password="bar"' in u.commands[0]
+
+ def test_remove(self):
+ u = svnurl_no_svn('http://foo.bar/svn/foo', auth=self.auth)
+ u.remove(msg='removing foo')
+ assert '--username="foo" --password="bar"' in u.commands[0]
+
+ def test_export(self):
+ u = svnurl_no_svn('http://foo.bar/svn', auth=self.auth)
+ target = py.path.local('/foo')
+ u.export(target)
+ assert '--username="foo" --password="bar"' in u.commands[0]
+
+ def test_log(self):
+ u = svnurl_no_svn('http://foo.bar/svn/foo', auth=self.auth)
+ u.popen_output = py.io.TextIO(py.builtin._totext('''\
+<?xml version="1.0"?>
+<log>
+<logentry revision="51381">
+<author>guido</author>
+<date>2008-02-11T12:12:18.476481Z</date>
+<msg>Creating branch to work on auth support for py.path.svn*.
+</msg>
+</logentry>
+</log>
+''', 'ascii'))
+ u.check = lambda *args, **kwargs: True
+ ret = u.log(10, 20, verbose=True)
+ assert '--username="foo" --password="bar"' in u.commands[0]
+ assert len(ret) == 1
+ assert int(ret[0].rev) == 51381
+ assert ret[0].author == 'guido'
+
+ def test_propget(self):
+ u = svnurl_no_svn('http://foo.bar/svn', auth=self.auth)
+ u.propget('foo')
+ assert '--username="foo" --password="bar"' in u.commands[0]
+
+def pytest_funcarg__setup(request):
+ return Setup(request)
+
+class Setup:
+ def __init__(self, request):
+ if not svnbin:
+ py.test.skip("svn binary required")
+ if not request.config.option.runslowtests:
+ py.test.skip('use --runslowtests to run these tests')
+
+ tmpdir = request.getfuncargvalue("tmpdir")
+ repodir = tmpdir.join("repo")
+ py.process.cmdexec('svnadmin create %s' % repodir)
+ if sys.platform == 'win32':
+ repodir = '/' + str(repodir).replace('\\', '/')
+ self.repo = py.path.svnurl("file://%s" % repodir)
+ if sys.platform == 'win32':
+ # remove trailing slash...
+ repodir = repodir[1:]
+ self.repopath = py.path.local(repodir)
+ self.temppath = tmpdir.mkdir("temppath")
+ self.auth = SvnAuth('johnny', 'foo', cache_auth=False,
+ interactive=False)
+ make_repo_auth(self.repopath, {'johnny': ('foo', 'rw')})
+ self.port, self.pid = serve_bg(self.repopath.dirpath())
+ # XXX caching is too global
+ py.path.svnurl._lsnorevcache._dict.clear()
+ request.addfinalizer(lambda: py.process.kill(self.pid))
+
+class TestSvnWCAuthFunctional:
+ def test_checkout_constructor_arg(self, setup):
+ wc = py.path.svnwc(setup.temppath, auth=setup.auth)
+ wc.checkout(
+ 'svn://localhost:%s/%s' % (setup.port, setup.repopath.basename))
+ assert wc.join('.svn').check()
+
+ def test_checkout_function_arg(self, setup):
+ wc = py.path.svnwc(setup.temppath, auth=setup.auth)
+ wc.checkout(
+ 'svn://localhost:%s/%s' % (setup.port, setup.repopath.basename))
+ assert wc.join('.svn').check()
+
+ def test_checkout_failing_non_interactive(self, setup):
+ auth = SvnAuth('johnny', 'bar', cache_auth=False,
+ interactive=False)
+ wc = py.path.svnwc(setup.temppath, auth)
+ py.test.raises(Exception,
+ ("wc.checkout('svn://localhost:%(port)s/%(repopath)s')" %
+ setup.__dict__))
+
+ def test_log(self, setup):
+ wc = py.path.svnwc(setup.temppath, setup.auth)
+ wc.checkout(
+ 'svn://localhost:%s/%s' % (setup.port, setup.repopath.basename))
+ foo = wc.ensure('foo.txt')
+ wc.commit('added foo.txt')
+ log = foo.log()
+ assert len(log) == 1
+ assert log[0].msg == 'added foo.txt'
+
+ def test_switch(self, setup):
+ import pytest
+ try:
+ import xdist
+ pytest.skip('#160: fails under xdist')
+ except ImportError:
+ pass
+ wc = py.path.svnwc(setup.temppath, auth=setup.auth)
+ svnurl = 'svn://localhost:%s/%s' % (setup.port, setup.repopath.basename)
+ wc.checkout(svnurl)
+ wc.ensure('foo', dir=True).ensure('foo.txt').write('foo')
+ wc.commit('added foo dir with foo.txt file')
+ wc.ensure('bar', dir=True)
+ wc.commit('added bar dir')
+ bar = wc.join('bar')
+ bar.switch(svnurl + '/foo')
+ assert bar.join('foo.txt')
+
+ def test_update(self, setup):
+ wc1 = py.path.svnwc(setup.temppath.ensure('wc1', dir=True),
+ auth=setup.auth)
+ wc2 = py.path.svnwc(setup.temppath.ensure('wc2', dir=True),
+ auth=setup.auth)
+ wc1.checkout(
+ 'svn://localhost:%s/%s' % (setup.port, setup.repopath.basename))
+ wc2.checkout(
+ 'svn://localhost:%s/%s' % (setup.port, setup.repopath.basename))
+ wc1.ensure('foo', dir=True)
+ wc1.commit('added foo dir')
+ wc2.update()
+ assert wc2.join('foo').check()
+
+ auth = SvnAuth('unknown', 'unknown', interactive=False)
+ wc2.auth = auth
+ py.test.raises(Exception, 'wc2.update()')
+
+ def test_lock_unlock_status(self, setup):
+ port = setup.port
+ wc = py.path.svnwc(setup.temppath, auth=setup.auth)
+ wc.checkout(
+ 'svn://localhost:%s/%s' % (port, setup.repopath.basename,))
+ wc.ensure('foo', file=True)
+ wc.commit('added foo file')
+ foo = wc.join('foo')
+ foo.lock()
+ status = foo.status()
+ assert status.locked
+ foo.unlock()
+ status = foo.status()
+ assert not status.locked
+
+ auth = SvnAuth('unknown', 'unknown', interactive=False)
+ foo.auth = auth
+ py.test.raises(Exception, 'foo.lock()')
+ py.test.raises(Exception, 'foo.unlock()')
+
+ def test_diff(self, setup):
+ port = setup.port
+ wc = py.path.svnwc(setup.temppath, auth=setup.auth)
+ wc.checkout(
+ 'svn://localhost:%s/%s' % (port, setup.repopath.basename,))
+ wc.ensure('foo', file=True)
+ wc.commit('added foo file')
+ wc.update()
+ rev = int(wc.status().rev)
+ foo = wc.join('foo')
+ foo.write('bar')
+ diff = foo.diff()
+ assert '\n+bar\n' in diff
+ foo.commit('added some content')
+ diff = foo.diff()
+ assert not diff
+ diff = foo.diff(rev=rev)
+ assert '\n+bar\n' in diff
+
+ auth = SvnAuth('unknown', 'unknown', interactive=False)
+ foo.auth = auth
+ py.test.raises(Exception, 'foo.diff(rev=rev)')
+
+class TestSvnURLAuthFunctional:
+ def test_listdir(self, setup):
+ port = setup.port
+ u = py.path.svnurl(
+ 'svn://localhost:%s/%s' % (port, setup.repopath.basename),
+ auth=setup.auth)
+ u.ensure('foo')
+ paths = u.listdir()
+ assert len(paths) == 1
+ assert paths[0].auth is setup.auth
+
+ auth = SvnAuth('foo', 'bar', interactive=False)
+ u = py.path.svnurl(
+ 'svn://localhost:%s/%s' % (port, setup.repopath.basename),
+ auth=auth)
+ py.test.raises(Exception, 'u.listdir()')
+
+ def test_copy(self, setup):
+ port = setup.port
+ u = py.path.svnurl(
+ 'svn://localhost:%s/%s' % (port, setup.repopath.basename),
+ auth=setup.auth)
+ foo = u.mkdir('foo')
+ assert foo.check()
+ bar = u.join('bar')
+ foo.copy(bar)
+ assert bar.check()
+ assert bar.auth is setup.auth
+
+ auth = SvnAuth('foo', 'bar', interactive=False)
+ u = py.path.svnurl(
+ 'svn://localhost:%s/%s' % (port, setup.repopath.basename),
+ auth=auth)
+ foo = u.join('foo')
+ bar = u.join('bar')
+ py.test.raises(Exception, 'foo.copy(bar)')
+
+ def test_write_read(self, setup):
+ port = setup.port
+ u = py.path.svnurl(
+ 'svn://localhost:%s/%s' % (port, setup.repopath.basename),
+ auth=setup.auth)
+ foo = u.ensure('foo')
+ fp = foo.open()
+ try:
+ data = fp.read()
+ finally:
+ fp.close()
+ assert data == ''
+
+ auth = SvnAuth('foo', 'bar', interactive=False)
+ u = py.path.svnurl(
+ 'svn://localhost:%s/%s' % (port, setup.repopath.basename),
+ auth=auth)
+ foo = u.join('foo')
+ py.test.raises(Exception, 'foo.open()')
+
+ # XXX rinse, repeat... :|
diff --git a/third_party/python/py/testing/path/test_svnurl.py b/third_party/python/py/testing/path/test_svnurl.py
new file mode 100644
index 0000000000..15fbea5047
--- /dev/null
+++ b/third_party/python/py/testing/path/test_svnurl.py
@@ -0,0 +1,95 @@
+import py
+from py._path.svnurl import InfoSvnCommand
+import datetime
+import time
+from svntestbase import CommonSvnTests
+
+def pytest_funcarg__path1(request):
+ repo, repourl, wc = request.getfuncargvalue("repowc1")
+ return py.path.svnurl(repourl)
+
+class TestSvnURLCommandPath(CommonSvnTests):
+ @py.test.mark.xfail
+ def test_load(self, path1):
+ super(TestSvnURLCommandPath, self).test_load(path1)
+
+ # the following two work on jython but not in local/svnwc
+ def test_listdir(self, path1):
+ super(TestSvnURLCommandPath, self).test_listdir(path1)
+ def test_visit_ignore(self, path1):
+ super(TestSvnURLCommandPath, self).test_visit_ignore(path1)
+
+ def test_svnurl_needs_arg(self, path1):
+ py.test.raises(TypeError, "py.path.svnurl()")
+
+ def test_svnurl_does_not_accept_None_either(self, path1):
+ py.test.raises(Exception, "py.path.svnurl(None)")
+
+ def test_svnurl_characters_simple(self, path1):
+ py.path.svnurl("svn+ssh://hello/world")
+
+ def test_svnurl_characters_at_user(self, path1):
+ py.path.svnurl("http://user@host.com/some/dir")
+
+ def test_svnurl_characters_at_path(self, path1):
+ py.test.raises(ValueError, 'py.path.svnurl("http://host.com/foo@bar")')
+
+ def test_svnurl_characters_colon_port(self, path1):
+ py.path.svnurl("http://host.com:8080/some/dir")
+
+ def test_svnurl_characters_tilde_end(self, path1):
+ py.path.svnurl("http://host.com/some/file~")
+
+ @py.test.mark.xfail("sys.platform == 'win32'")
+ def test_svnurl_characters_colon_path(self, path1):
+ # colons are allowed on win32, because they're part of the drive
+ # part of an absolute path... however, they shouldn't be allowed in
+ # other parts, I think
+ py.test.raises(ValueError, 'py.path.svnurl("http://host.com/foo:bar")')
+
+ def test_export(self, path1, tmpdir):
+ tmpdir = tmpdir.join("empty")
+ p = path1.export(tmpdir)
+ assert p == tmpdir # XXX should return None
+ n1 = [x.basename for x in tmpdir.listdir()]
+ n2 = [x.basename for x in path1.listdir()]
+ n1.sort()
+ n2.sort()
+ assert n1 == n2
+ assert not p.join('.svn').check()
+ rev = path1.mkdir("newdir")
+ tmpdir.remove()
+ assert not tmpdir.check()
+ path1.new(rev=1).export(tmpdir)
+ for p in tmpdir.listdir():
+ assert p.basename in n2
+
+class TestSvnInfoCommand:
+
+ def test_svn_1_2(self):
+ line = " 2256 hpk 165 Nov 24 17:55 __init__.py"
+ info = InfoSvnCommand(line)
+ now = datetime.datetime.now()
+ assert info.last_author == 'hpk'
+ assert info.created_rev == 2256
+ assert info.kind == 'file'
+ # we don't check for the year (2006), because that depends
+ # on the clock correctly being setup
+ assert time.gmtime(info.mtime)[1:6] == (11, 24, 17, 55, 0)
+ assert info.size == 165
+ assert info.time == info.mtime * 1000000
+
+ def test_svn_1_3(self):
+ line =" 4784 hpk 2 Jun 01 2004 __init__.py"
+ info = InfoSvnCommand(line)
+ assert info.last_author == 'hpk'
+ assert info.kind == 'file'
+
+ def test_svn_1_3_b(self):
+ line =" 74 autoadmi Oct 06 23:59 plonesolutions.com/"
+ info = InfoSvnCommand(line)
+ assert info.last_author == 'autoadmi'
+ assert info.kind == 'dir'
+
+def test_badchars():
+ py.test.raises(ValueError, "py.path.svnurl('http://host/tmp/@@@:')")
diff --git a/third_party/python/py/testing/path/test_svnwc.py b/third_party/python/py/testing/path/test_svnwc.py
new file mode 100644
index 0000000000..c643d9983f
--- /dev/null
+++ b/third_party/python/py/testing/path/test_svnwc.py
@@ -0,0 +1,557 @@
+import py
+import os, sys
+import pytest
+from py._path.svnwc import InfoSvnWCCommand, XMLWCStatus, parse_wcinfotime
+from py._path import svnwc as svncommon
+from svntestbase import CommonSvnTests
+
+
+pytestmark = pytest.mark.xfail(sys.platform.startswith('win'),
+ reason='#161 all tests in this file are failing on Windows',
+ run=False)
+
+
+def test_make_repo(path1, tmpdir):
+ repo = tmpdir.join("repo")
+ py.process.cmdexec('svnadmin create %s' % repo)
+ if sys.platform == 'win32':
+ repo = '/' + str(repo).replace('\\', '/')
+ repo = py.path.svnurl("file://%s" % repo)
+ wc = py.path.svnwc(tmpdir.join("wc"))
+ wc.checkout(repo)
+ assert wc.rev == 0
+ assert len(wc.listdir()) == 0
+ p = wc.join("a_file")
+ p.write("test file")
+ p.add()
+ rev = wc.commit("some test")
+ assert p.info().rev == 1
+ assert rev == 1
+ rev = wc.commit()
+ assert rev is None
+
+def pytest_funcarg__path1(request):
+ repo, repourl, wc = request.getfuncargvalue("repowc1")
+ return wc
+
+class TestWCSvnCommandPath(CommonSvnTests):
+ def test_status_attributes_simple(self, path1):
+ def assert_nochange(p):
+ s = p.status()
+ assert not s.modified
+ assert not s.prop_modified
+ assert not s.added
+ assert not s.deleted
+ assert not s.replaced
+
+ dpath = path1.join('sampledir')
+ assert_nochange(path1.join('sampledir'))
+ assert_nochange(path1.join('samplefile'))
+
+ def test_status_added(self, path1):
+ nf = path1.join('newfile')
+ nf.write('hello')
+ nf.add()
+ try:
+ s = nf.status()
+ assert s.added
+ assert not s.modified
+ assert not s.prop_modified
+ assert not s.replaced
+ finally:
+ nf.revert()
+
+ def test_status_change(self, path1):
+ nf = path1.join('samplefile')
+ try:
+ nf.write(nf.read() + 'change')
+ s = nf.status()
+ assert not s.added
+ assert s.modified
+ assert not s.prop_modified
+ assert not s.replaced
+ finally:
+ nf.revert()
+
+ def test_status_added_ondirectory(self, path1):
+ sampledir = path1.join('sampledir')
+ try:
+ t2 = sampledir.mkdir('t2')
+ t1 = t2.join('t1')
+ t1.write('test')
+ t1.add()
+ s = sampledir.status(rec=1)
+ # Comparing just the file names, because paths are unpredictable
+ # on Windows. (long vs. 8.3 paths)
+ assert t1.basename in [item.basename for item in s.added]
+ assert t2.basename in [item.basename for item in s.added]
+ finally:
+ t2.revert(rec=1)
+ t2.localpath.remove(rec=1)
+
+ def test_status_unknown(self, path1):
+ t1 = path1.join('un1')
+ try:
+ t1.write('test')
+ s = path1.status()
+ # Comparing just the file names, because paths are unpredictable
+ # on Windows. (long vs. 8.3 paths)
+ assert t1.basename in [item.basename for item in s.unknown]
+ finally:
+ t1.localpath.remove()
+
+ def test_status_unchanged(self, path1):
+ r = path1
+ s = path1.status(rec=1)
+ # Comparing just the file names, because paths are unpredictable
+ # on Windows. (long vs. 8.3 paths)
+ assert r.join('samplefile').basename in [item.basename
+ for item in s.unchanged]
+ assert r.join('sampledir').basename in [item.basename
+ for item in s.unchanged]
+ assert r.join('sampledir/otherfile').basename in [item.basename
+ for item in s.unchanged]
+
+ def test_status_update(self, path1):
+ # not a mark because the global "pytestmark" will end up overwriting a mark here
+ pytest.xfail("svn-1.7 has buggy 'status --xml' output")
+ r = path1
+ try:
+ r.update(rev=1)
+ s = r.status(updates=1, rec=1)
+ # Comparing just the file names, because paths are unpredictable
+ # on Windows. (long vs. 8.3 paths)
+ import pprint
+ pprint.pprint(s.allpath())
+ assert r.join('anotherfile').basename in [item.basename for
+ item in s.update_available]
+ #assert len(s.update_available) == 1
+ finally:
+ r.update()
+
+ def test_status_replaced(self, path1):
+ p = path1.join("samplefile")
+ p.remove()
+ p.ensure(dir=0)
+ try:
+ s = path1.status()
+ assert p.basename in [item.basename for item in s.replaced]
+ finally:
+ path1.revert(rec=1)
+
+ def test_status_ignored(self, path1):
+ try:
+ d = path1.join('sampledir')
+ p = py.path.local(d).join('ignoredfile')
+ p.ensure(file=True)
+ s = d.status()
+ assert [x.basename for x in s.unknown] == ['ignoredfile']
+ assert [x.basename for x in s.ignored] == []
+ d.propset('svn:ignore', 'ignoredfile')
+ s = d.status()
+ assert [x.basename for x in s.unknown] == []
+ assert [x.basename for x in s.ignored] == ['ignoredfile']
+ finally:
+ path1.revert(rec=1)
+
+ def test_status_conflict(self, path1, tmpdir):
+ wc = path1
+ wccopy = py.path.svnwc(tmpdir.join("conflict_copy"))
+ wccopy.checkout(wc.url)
+ p = wc.ensure('conflictsamplefile', file=1)
+ p.write('foo')
+ wc.commit('added conflictsamplefile')
+ wccopy.update()
+ assert wccopy.join('conflictsamplefile').check()
+ p.write('bar')
+ wc.commit('wrote some data')
+ wccopy.join('conflictsamplefile').write('baz')
+ wccopy.update(interactive=False)
+ s = wccopy.status()
+ assert [x.basename for x in s.conflict] == ['conflictsamplefile']
+
+ def test_status_external(self, path1, repowc2):
+ otherrepo, otherrepourl, otherwc = repowc2
+ d = path1.ensure('sampledir', dir=1)
+ try:
+ d.update()
+ d.propset('svn:externals', 'otherwc %s' % (otherwc.url,))
+ d.update()
+ s = d.status()
+ assert [x.basename for x in s.external] == ['otherwc']
+ assert 'otherwc' not in [x.basename for x in s.unchanged]
+ s = d.status(rec=1)
+ assert [x.basename for x in s.external] == ['otherwc']
+ assert 'otherwc' in [x.basename for x in s.unchanged]
+ finally:
+ path1.revert(rec=1)
+
+ def test_status_deleted(self, path1):
+ d = path1.ensure('sampledir', dir=1)
+ d.remove()
+ d.ensure(dir=1)
+ path1.commit()
+ d.ensure('deletefile', dir=0)
+ d.commit()
+ s = d.status()
+ assert 'deletefile' in [x.basename for x in s.unchanged]
+ assert not s.deleted
+ p = d.join('deletefile')
+ p.remove()
+ s = d.status()
+ assert 'deletefile' not in s.unchanged
+ assert [x.basename for x in s.deleted] == ['deletefile']
+
+ def test_status_noauthor(self, path1):
+ # testing for XML without author - this used to raise an exception
+ xml = '''\
+ <entry path="/tmp/pytest-23/wc">
+ <wc-status item="normal" props="none" revision="0">
+ <commit revision="0">
+ <date>2008-08-19T16:50:53.400198Z</date>
+ </commit>
+ </wc-status>
+ </entry>
+ '''
+ XMLWCStatus.fromstring(xml, path1)
+
+ def test_status_wrong_xml(self, path1):
+ # testing for XML without author - this used to raise an exception
+ xml = '<entry path="/home/jean/zope/venv/projectdb/parts/development-products/DataGridField">\n<wc-status item="incomplete" props="none" revision="784">\n</wc-status>\n</entry>'
+ st = XMLWCStatus.fromstring(xml, path1)
+ assert len(st.incomplete) == 1
+
+ def test_diff(self, path1):
+ p = path1 / 'anotherfile'
+ out = p.diff(rev=2)
+ assert out.find('hello') != -1
+
+ def test_blame(self, path1):
+ p = path1.join('samplepickle')
+ lines = p.blame()
+ assert sum([l[0] for l in lines]) == len(lines)
+ for l1, l2 in zip(p.readlines(), [l[2] for l in lines]):
+ assert l1 == l2
+ assert [l[1] for l in lines] == ['hpk'] * len(lines)
+ p = path1.join('samplefile')
+ lines = p.blame()
+ assert sum([l[0] for l in lines]) == len(lines)
+ for l1, l2 in zip(p.readlines(), [l[2] for l in lines]):
+ assert l1 == l2
+ assert [l[1] for l in lines] == ['hpk'] * len(lines)
+
+ def test_join_abs(self, path1):
+ s = str(path1.localpath)
+ n = path1.join(s, abs=1)
+ assert path1 == n
+
+ def test_join_abs2(self, path1):
+ assert path1.join('samplefile', abs=1) == path1.join('samplefile')
+
+ def test_str_gives_localpath(self, path1):
+ assert str(path1) == str(path1.localpath)
+
+ def test_versioned(self, path1):
+ assert path1.check(versioned=1)
+ # TODO: Why does my copy of svn think .svn is versioned?
+ #assert path1.join('.svn').check(versioned=0)
+ assert path1.join('samplefile').check(versioned=1)
+ assert not path1.join('notexisting').check(versioned=1)
+ notexisting = path1.join('hello').localpath
+ try:
+ notexisting.write("")
+ assert path1.join('hello').check(versioned=0)
+ finally:
+ notexisting.remove()
+
+ def test_listdir_versioned(self, path1):
+ assert path1.check(versioned=1)
+ p = path1.localpath.ensure("not_a_versioned_file")
+ l = [x.localpath
+ for x in path1.listdir(lambda x: x.check(versioned=True))]
+ assert p not in l
+
+ def test_nonversioned_remove(self, path1):
+ assert path1.check(versioned=1)
+ somefile = path1.join('nonversioned/somefile')
+ nonwc = py.path.local(somefile)
+ nonwc.ensure()
+ assert somefile.check()
+ assert not somefile.check(versioned=True)
+ somefile.remove() # this used to fail because it tried to 'svn rm'
+
+ def test_properties(self, path1):
+ try:
+ path1.propset('gaga', 'this')
+ assert path1.propget('gaga') == 'this'
+ # Comparing just the file names, because paths are unpredictable
+ # on Windows. (long vs. 8.3 paths)
+ assert path1.basename in [item.basename for item in
+ path1.status().prop_modified]
+ assert 'gaga' in path1.proplist()
+ assert path1.proplist()['gaga'] == 'this'
+
+ finally:
+ path1.propdel('gaga')
+
+ def test_proplist_recursive(self, path1):
+ s = path1.join('samplefile')
+ s.propset('gugu', 'that')
+ try:
+ p = path1.proplist(rec=1)
+ # Comparing just the file names, because paths are unpredictable
+ # on Windows. (long vs. 8.3 paths)
+ assert (path1 / 'samplefile').basename in [item.basename
+ for item in p]
+ finally:
+ s.propdel('gugu')
+
+ def test_long_properties(self, path1):
+ value = """
+ vadm:posix : root root 0100755
+ Properties on 'chroot/dns/var/bind/db.net.xots':
+ """
+ try:
+ path1.propset('gaga', value)
+ backvalue = path1.propget('gaga')
+ assert backvalue == value
+ #assert len(backvalue.split('\n')) == 1
+ finally:
+ path1.propdel('gaga')
+
+
+ def test_ensure(self, path1):
+ newpath = path1.ensure('a', 'b', 'c')
+ try:
+ assert newpath.check(exists=1, versioned=1)
+ newpath.write("hello")
+ newpath.ensure()
+ assert newpath.read() == "hello"
+ finally:
+ path1.join('a').remove(force=1)
+
+ def test_not_versioned(self, path1):
+ p = path1.localpath.mkdir('whatever')
+ f = path1.localpath.ensure('testcreatedfile')
+ try:
+ assert path1.join('whatever').check(versioned=0)
+ assert path1.join('testcreatedfile').check(versioned=0)
+ assert not path1.join('testcreatedfile').check(versioned=1)
+ finally:
+ p.remove(rec=1)
+ f.remove()
+
+ def test_lock_unlock(self, path1):
+ root = path1
+ somefile = root.join('somefile')
+ somefile.ensure(file=True)
+ # not yet added to repo
+ py.test.raises(Exception, 'somefile.lock()')
+ somefile.write('foo')
+ somefile.commit('test')
+ assert somefile.check(versioned=True)
+ somefile.lock()
+ try:
+ locked = root.status().locked
+ assert len(locked) == 1
+ assert locked[0].basename == somefile.basename
+ assert locked[0].dirpath().basename == somefile.dirpath().basename
+ #assert somefile.locked()
+ py.test.raises(Exception, 'somefile.lock()')
+ finally:
+ somefile.unlock()
+ #assert not somefile.locked()
+ locked = root.status().locked
+ assert locked == []
+ py.test.raises(Exception, 'somefile,unlock()')
+ somefile.remove()
+
+ def test_commit_nonrecursive(self, path1):
+ somedir = path1.join('sampledir')
+ somedir.mkdir("subsubdir")
+ somedir.propset('foo', 'bar')
+ status = somedir.status()
+ assert len(status.prop_modified) == 1
+ assert len(status.added) == 1
+
+ somedir.commit('non-recursive commit', rec=0)
+ status = somedir.status()
+ assert len(status.prop_modified) == 0
+ assert len(status.added) == 1
+
+ somedir.commit('recursive commit')
+ status = somedir.status()
+ assert len(status.prop_modified) == 0
+ assert len(status.added) == 0
+
+ def test_commit_return_value(self, path1):
+ testfile = path1.join('test.txt').ensure(file=True)
+ testfile.write('test')
+ rev = path1.commit('testing')
+ assert type(rev) == int
+
+ anotherfile = path1.join('another.txt').ensure(file=True)
+ anotherfile.write('test')
+ rev2 = path1.commit('testing more')
+ assert type(rev2) == int
+ assert rev2 == rev + 1
+
+ #def test_log(self, path1):
+ # l = path1.log()
+ # assert len(l) == 3 # might need to be upped if more tests are added
+
+class XTestWCSvnCommandPathSpecial:
+
+ rooturl = 'http://codespeak.net/svn/py.path/trunk/dist/py.path/test/data'
+ #def test_update_none_rev(self, path1):
+ # path = tmpdir.join('checkouttest')
+ # wcpath = newpath(xsvnwc=str(path), url=path1url)
+ # try:
+ # wcpath.checkout(rev=2100)
+ # wcpath.update()
+ # assert wcpath.info().rev > 2100
+ # finally:
+ # wcpath.localpath.remove(rec=1)
+
+def test_parse_wcinfotime():
+ assert (parse_wcinfotime('2006-05-30 20:45:26 +0200 (Tue, 30 May 2006)') ==
+ 1149021926)
+ assert (parse_wcinfotime('2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)') ==
+ 1067287394)
+
+class TestInfoSvnWCCommand:
+
+ def test_svn_1_2(self, path1):
+ output = """
+ Path: test_svnwc.py
+ Name: test_svnwc.py
+ URL: http://codespeak.net/svn/py/dist/py/path/svn/wccommand.py
+ Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
+ Revision: 28137
+ Node Kind: file
+ Schedule: normal
+ Last Changed Author: jan
+ Last Changed Rev: 27939
+ Last Changed Date: 2006-05-30 20:45:26 +0200 (Tue, 30 May 2006)
+ Text Last Updated: 2006-06-01 00:42:53 +0200 (Thu, 01 Jun 2006)
+ Properties Last Updated: 2006-05-23 11:54:59 +0200 (Tue, 23 May 2006)
+ Checksum: 357e44880e5d80157cc5fbc3ce9822e3
+ """
+ path = py.path.local(__file__).dirpath().chdir()
+ try:
+ info = InfoSvnWCCommand(output)
+ finally:
+ path.chdir()
+ assert info.last_author == 'jan'
+ assert info.kind == 'file'
+ assert info.mtime == 1149021926.0
+ assert info.url == 'http://codespeak.net/svn/py/dist/py/path/svn/wccommand.py'
+ assert info.time == 1149021926000000.0
+ assert info.rev == 28137
+
+
+ def test_svn_1_3(self, path1):
+ output = """
+ Path: test_svnwc.py
+ Name: test_svnwc.py
+ URL: http://codespeak.net/svn/py/dist/py/path/svn/wccommand.py
+ Repository Root: http://codespeak.net/svn
+ Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
+ Revision: 28124
+ Node Kind: file
+ Schedule: normal
+ Last Changed Author: jan
+ Last Changed Rev: 27939
+ Last Changed Date: 2006-05-30 20:45:26 +0200 (Tue, 30 May 2006)
+ Text Last Updated: 2006-06-02 23:46:11 +0200 (Fri, 02 Jun 2006)
+ Properties Last Updated: 2006-06-02 23:45:28 +0200 (Fri, 02 Jun 2006)
+ Checksum: 357e44880e5d80157cc5fbc3ce9822e3
+ """
+ path = py.path.local(__file__).dirpath().chdir()
+ try:
+ info = InfoSvnWCCommand(output)
+ finally:
+ path.chdir()
+ assert info.last_author == 'jan'
+ assert info.kind == 'file'
+ assert info.mtime == 1149021926.0
+ assert info.url == 'http://codespeak.net/svn/py/dist/py/path/svn/wccommand.py'
+ assert info.rev == 28124
+ assert info.time == 1149021926000000.0
+
+
+def test_characters_at():
+ py.test.raises(ValueError, "py.path.svnwc('/tmp/@@@:')")
+
+def test_characters_tilde():
+ py.path.svnwc('/tmp/test~')
+
+
+class TestRepo:
+ def test_trailing_slash_is_stripped(self, path1):
+ # XXX we need to test more normalizing properties
+ url = path1.join("/")
+ assert path1 == url
+
+ #def test_different_revs_compare_unequal(self, path1):
+ # newpath = path1.new(rev=1199)
+ # assert newpath != path1
+
+ def test_exists_svn_root(self, path1):
+ assert path1.check()
+
+ #def test_not_exists_rev(self, path1):
+ # url = path1.__class__(path1url, rev=500)
+ # assert url.check(exists=0)
+
+ #def test_nonexisting_listdir_rev(self, path1):
+ # url = path1.__class__(path1url, rev=500)
+ # raises(py.error.ENOENT, url.listdir)
+
+ #def test_newrev(self, path1):
+ # url = path1.new(rev=None)
+ # assert url.rev == None
+ # assert url.strpath == path1.strpath
+ # url = path1.new(rev=10)
+ # assert url.rev == 10
+
+ #def test_info_rev(self, path1):
+ # url = path1.__class__(path1url, rev=1155)
+ # url = url.join("samplefile")
+ # res = url.info()
+ # assert res.size > len("samplefile") and res.created_rev == 1155
+
+ # the following tests are easier if we have a path class
+ def test_repocache_simple(self, path1):
+ repocache = svncommon.RepoCache()
+ repocache.put(path1.strpath, 42)
+ url, rev = repocache.get(path1.join('test').strpath)
+ assert rev == 42
+ assert url == path1.strpath
+
+ def test_repocache_notimeout(self, path1):
+ repocache = svncommon.RepoCache()
+ repocache.timeout = 0
+ repocache.put(path1.strpath, path1.rev)
+ url, rev = repocache.get(path1.strpath)
+ assert rev == -1
+ assert url == path1.strpath
+
+ def test_repocache_outdated(self, path1):
+ repocache = svncommon.RepoCache()
+ repocache.put(path1.strpath, 42, timestamp=0)
+ url, rev = repocache.get(path1.join('test').strpath)
+ assert rev == -1
+ assert url == path1.strpath
+
+ def _test_getreporev(self):
+ """ this test runs so slow it's usually disabled """
+ old = svncommon.repositories.repos
+ try:
+ _repocache.clear()
+ root = path1.new(rev=-1)
+ url, rev = cache.repocache.get(root.strpath)
+ assert rev>=0
+ assert url == svnrepourl
+ finally:
+ repositories.repos = old
diff --git a/third_party/python/py/testing/process/__init__.py b/third_party/python/py/testing/process/__init__.py
new file mode 100644
index 0000000000..792d600548
--- /dev/null
+++ b/third_party/python/py/testing/process/__init__.py
@@ -0,0 +1 @@
+#
diff --git a/third_party/python/py/testing/process/test_cmdexec.py b/third_party/python/py/testing/process/test_cmdexec.py
new file mode 100644
index 0000000000..98463d906d
--- /dev/null
+++ b/third_party/python/py/testing/process/test_cmdexec.py
@@ -0,0 +1,41 @@
+import py
+from py.process import cmdexec
+
+def exvalue():
+ import sys
+ return sys.exc_info()[1]
+
+
+class Test_exec_cmd:
+ def test_simple(self):
+ out = cmdexec('echo hallo')
+ assert out.strip() == 'hallo'
+ assert py.builtin._istext(out)
+
+ def test_simple_newline(self):
+ import sys
+ out = cmdexec(r"""%s -c "print ('hello')" """ % sys.executable)
+ assert out == 'hello\n'
+ assert py.builtin._istext(out)
+
+ def test_simple_error(self):
+ py.test.raises(cmdexec.Error, cmdexec, 'exit 1')
+
+ def test_simple_error_exact_status(self):
+ try:
+ cmdexec('exit 1')
+ except cmdexec.Error:
+ e = exvalue()
+ assert e.status == 1
+ assert py.builtin._istext(e.out)
+ assert py.builtin._istext(e.err)
+
+ def test_err(self):
+ try:
+ cmdexec('echoqweqwe123 hallo')
+ raise AssertionError("command succeeded but shouldn't")
+ except cmdexec.Error:
+ e = exvalue()
+ assert hasattr(e, 'err')
+ assert hasattr(e, 'out')
+ assert e.err or e.out
diff --git a/third_party/python/py/testing/process/test_forkedfunc.py b/third_party/python/py/testing/process/test_forkedfunc.py
new file mode 100644
index 0000000000..ae0d9ab7e6
--- /dev/null
+++ b/third_party/python/py/testing/process/test_forkedfunc.py
@@ -0,0 +1,173 @@
+import pytest
+import py, sys, os
+
+pytestmark = py.test.mark.skipif("not hasattr(os, 'fork')")
+
+
+def test_waitfinish_removes_tempdir():
+ ff = py.process.ForkedFunc(boxf1)
+ assert ff.tempdir.check()
+ ff.waitfinish()
+ assert not ff.tempdir.check()
+
+def test_tempdir_gets_gc_collected(monkeypatch):
+ monkeypatch.setattr(os, 'fork', lambda: os.getpid())
+ ff = py.process.ForkedFunc(boxf1)
+ assert ff.tempdir.check()
+ ff.__del__()
+ assert not ff.tempdir.check()
+
+def test_basic_forkedfunc():
+ result = py.process.ForkedFunc(boxf1).waitfinish()
+ assert result.out == "some out\n"
+ assert result.err == "some err\n"
+ assert result.exitstatus == 0
+ assert result.signal == 0
+ assert result.retval == 1
+
+def test_exitstatus():
+ def func():
+ os._exit(4)
+ result = py.process.ForkedFunc(func).waitfinish()
+ assert result.exitstatus == 4
+ assert result.signal == 0
+ assert not result.out
+ assert not result.err
+
+def test_execption_in_func():
+ def fun():
+ raise ValueError(42)
+ ff = py.process.ForkedFunc(fun)
+ result = ff.waitfinish()
+ assert result.exitstatus == ff.EXITSTATUS_EXCEPTION
+ assert result.err.find("ValueError: 42") != -1
+ assert result.signal == 0
+ assert not result.retval
+
+def test_forkedfunc_on_fds():
+ result = py.process.ForkedFunc(boxf2).waitfinish()
+ assert result.out == "someout"
+ assert result.err == "someerr"
+ assert result.exitstatus == 0
+ assert result.signal == 0
+ assert result.retval == 2
+
+def test_forkedfunc_on_fds_output():
+ result = py.process.ForkedFunc(boxf3).waitfinish()
+ assert result.signal == 11
+ assert result.out == "s"
+
+
+def test_forkedfunc_on_stdout():
+ def boxf3():
+ import sys
+ sys.stdout.write("hello\n")
+ os.kill(os.getpid(), 11)
+ result = py.process.ForkedFunc(boxf3).waitfinish()
+ assert result.signal == 11
+ assert result.out == "hello\n"
+
+def test_forkedfunc_signal():
+ result = py.process.ForkedFunc(boxseg).waitfinish()
+ assert result.retval is None
+ assert result.signal == 11
+
+def test_forkedfunc_huge_data():
+ result = py.process.ForkedFunc(boxhuge).waitfinish()
+ assert result.out
+ assert result.exitstatus == 0
+ assert result.signal == 0
+ assert result.retval == 3
+
+def test_box_seq():
+ # we run many boxes with huge data, just one after another
+ for i in range(50):
+ result = py.process.ForkedFunc(boxhuge).waitfinish()
+ assert result.out
+ assert result.exitstatus == 0
+ assert result.signal == 0
+ assert result.retval == 3
+
+def test_box_in_a_box():
+ def boxfun():
+ result = py.process.ForkedFunc(boxf2).waitfinish()
+ print (result.out)
+ sys.stderr.write(result.err + "\n")
+ return result.retval
+
+ result = py.process.ForkedFunc(boxfun).waitfinish()
+ assert result.out == "someout\n"
+ assert result.err == "someerr\n"
+ assert result.exitstatus == 0
+ assert result.signal == 0
+ assert result.retval == 2
+
+def test_kill_func_forked():
+ class A:
+ pass
+ info = A()
+ import time
+
+ def box_fun():
+ time.sleep(10) # we don't want to last forever here
+
+ ff = py.process.ForkedFunc(box_fun)
+ os.kill(ff.pid, 15)
+ result = ff.waitfinish()
+ assert result.signal == 15
+
+
+def test_hooks(monkeypatch):
+ def _boxed():
+ return 1
+
+ def _on_start():
+ sys.stdout.write("some out\n")
+ sys.stdout.flush()
+
+ def _on_exit():
+ sys.stderr.write("some err\n")
+ sys.stderr.flush()
+
+ result = py.process.ForkedFunc(_boxed, child_on_start=_on_start,
+ child_on_exit=_on_exit).waitfinish()
+ assert result.out == "some out\n"
+ assert result.err == "some err\n"
+ assert result.exitstatus == 0
+ assert result.signal == 0
+ assert result.retval == 1
+
+
+# ======================================================================
+# examples
+# ======================================================================
+#
+
+def boxf1():
+ sys.stdout.write("some out\n")
+ sys.stderr.write("some err\n")
+ return 1
+
+def boxf2():
+ os.write(1, "someout".encode('ascii'))
+ os.write(2, "someerr".encode('ascii'))
+ return 2
+
+def boxf3():
+ os.write(1, "s".encode('ascii'))
+ os.kill(os.getpid(), 11)
+
+def boxseg():
+ os.kill(os.getpid(), 11)
+
+def boxhuge():
+ s = " ".encode('ascii')
+ os.write(1, s * 10000)
+ os.write(2, s * 10000)
+ os.write(1, s * 10000)
+
+ os.write(1, s * 10000)
+ os.write(2, s * 10000)
+ os.write(2, s * 10000)
+ os.write(1, s * 10000)
+ return 3
diff --git a/third_party/python/py/testing/process/test_killproc.py b/third_party/python/py/testing/process/test_killproc.py
new file mode 100644
index 0000000000..b0d6e2f515
--- /dev/null
+++ b/third_party/python/py/testing/process/test_killproc.py
@@ -0,0 +1,18 @@
+import pytest
+import sys
+import py
+
+
+@pytest.mark.skipif("sys.platform.startswith('java')")
+def test_kill(tmpdir):
+ subprocess = pytest.importorskip("subprocess")
+ t = tmpdir.join("t.py")
+ t.write("import time ; time.sleep(100)")
+ proc = subprocess.Popen([sys.executable, str(t)])
+ assert proc.poll() is None # no return value yet
+ py.process.kill(proc.pid)
+ ret = proc.wait()
+ if sys.platform == "win32" and ret == 0:
+ pytest.skip("XXX on win32, subprocess.Popen().wait() on a killed "
+ "process does not yield return value != 0")
+ assert ret != 0
diff --git a/third_party/python/py/testing/root/__init__.py b/third_party/python/py/testing/root/__init__.py
new file mode 100644
index 0000000000..792d600548
--- /dev/null
+++ b/third_party/python/py/testing/root/__init__.py
@@ -0,0 +1 @@
+#
diff --git a/third_party/python/py/testing/root/test_builtin.py b/third_party/python/py/testing/root/test_builtin.py
new file mode 100644
index 0000000000..a6f1a3c739
--- /dev/null
+++ b/third_party/python/py/testing/root/test_builtin.py
@@ -0,0 +1,179 @@
+import sys
+import types
+import py
+from py.builtin import set, frozenset, reversed, sorted
+
+def test_enumerate():
+ l = [0,1,2]
+ for i,x in enumerate(l):
+ assert i == x
+
+def test_any():
+ assert not py.builtin.any([0,False, None])
+ assert py.builtin.any([0,False, None,1])
+
+def test_all():
+ assert not py.builtin.all([True, 1, False])
+ assert py.builtin.all([True, 1, object])
+
+def test_BaseException():
+ assert issubclass(IndexError, py.builtin.BaseException)
+ assert issubclass(Exception, py.builtin.BaseException)
+ assert issubclass(KeyboardInterrupt, py.builtin.BaseException)
+
+ class MyRandomClass(object):
+ pass
+ assert not issubclass(MyRandomClass, py.builtin.BaseException)
+
+ assert py.builtin.BaseException.__module__ in ('exceptions', 'builtins')
+ assert Exception.__name__ == 'Exception'
+
+
+def test_GeneratorExit():
+ assert py.builtin.GeneratorExit.__module__ in ('exceptions', 'builtins')
+ assert issubclass(py.builtin.GeneratorExit, py.builtin.BaseException)
+
+def test_reversed():
+ reversed = py.builtin.reversed
+ r = reversed("hello")
+ assert iter(r) is r
+ s = "".join(list(r))
+ assert s == "olleh"
+ assert list(reversed(list(reversed("hello")))) == ['h','e','l','l','o']
+ py.test.raises(TypeError, reversed, reversed("hello"))
+
+def test_simple():
+ s = set([1, 2, 3, 4])
+ assert s == set([3, 4, 2, 1])
+ s1 = s.union(set([5, 6]))
+ assert 5 in s1
+ assert 1 in s1
+
+def test_frozenset():
+ s = set([frozenset([0, 1]), frozenset([1, 0])])
+ assert len(s) == 1
+
+def test_sorted():
+ if sorted == py.builtin.sorted:
+ return # don't test a real builtin
+ for s in [py.builtin.sorted]:
+ def test():
+ assert s([3, 2, 1]) == [1, 2, 3]
+ assert s([1, 2, 3], reverse=True) == [3, 2, 1]
+ l = s([1, 2, 3, 4, 5, 6], key=lambda x: x % 2)
+ assert l == [2, 4, 6, 1, 3, 5]
+ l = s([1, 2, 3, 4], cmp=lambda x, y: -cmp(x, y))
+ assert l == [4, 3, 2, 1]
+ l = s([1, 2, 3, 4], cmp=lambda x, y: -cmp(x, y),
+ key=lambda x: x % 2)
+ assert l == [1, 3, 2, 4]
+
+ def compare(x, y):
+ assert type(x) == str
+ assert type(y) == str
+ return cmp(x, y)
+ data = 'The quick Brown fox Jumped over The lazy Dog'.split()
+ s(data, cmp=compare, key=str.lower)
+ yield test
+
+
+def test_print_simple():
+ from py.builtin import print_
+ py.test.raises(TypeError, "print_(hello=3)")
+ f = py.io.TextIO()
+ print_("hello", "world", file=f)
+ s = f.getvalue()
+ assert s == "hello world\n"
+
+ f = py.io.TextIO()
+ print_("hello", end="", file=f)
+ s = f.getvalue()
+ assert s == "hello"
+
+ f = py.io.TextIO()
+ print_("xyz", "abc", sep="", end="", file=f)
+ s = f.getvalue()
+ assert s == "xyzabc"
+
+ class X:
+ def __repr__(self): return "rep"
+ f = py.io.TextIO()
+ print_(X(), file=f)
+ assert f.getvalue() == "rep\n"
+
+def test_execfile(tmpdir):
+ test_file = tmpdir.join("test.py")
+ test_file.write("x = y\ndef f(): pass")
+ ns = {"y" : 42}
+ py.builtin.execfile(str(test_file), ns)
+ assert ns["x"] == 42
+ assert py.code.getrawcode(ns["f"]).co_filename == str(test_file)
+ class A:
+ y = 3
+ x = 4
+ py.builtin.execfile(str(test_file))
+ assert A.x == 3
+
+def test_getfuncdict():
+ def f():
+ pass
+ f.x = 4
+ assert py.builtin._getfuncdict(f)["x"] == 4
+ assert py.builtin._getfuncdict(2) is None
+
+def test_callable():
+ class A: pass
+ assert py.builtin.callable(test_callable)
+ assert py.builtin.callable(A)
+ assert py.builtin.callable(list)
+ assert py.builtin.callable(id)
+ assert not py.builtin.callable(4)
+ assert not py.builtin.callable("hi")
+
+def test_totext():
+ py.builtin._totext("hello", "UTF-8")
+
+def test_bytes_text():
+ if sys.version_info[0] < 3:
+ assert py.builtin.text == unicode
+ assert py.builtin.bytes == str
+ else:
+ assert py.builtin.text == str
+ assert py.builtin.bytes == bytes
+
+def test_totext_badutf8():
+ # this was in printouts within the pytest testsuite
+ # totext would fail
+ if sys.version_info >= (3,):
+ errors = 'surrogateescape'
+ else: # old python has crappy error handlers
+ errors = 'replace'
+ py.builtin._totext("\xa6", "UTF-8", errors)
+
+def test_reraise():
+ from py.builtin import _reraise
+ try:
+ raise Exception()
+ except Exception:
+ cls, val, tb = sys.exc_info()
+ excinfo = py.test.raises(Exception, "_reraise(cls, val, tb)")
+
+def test_exec():
+ l = []
+ py.builtin.exec_("l.append(1)")
+ assert l == [1]
+ d = {}
+ py.builtin.exec_("x=4", d)
+ assert d['x'] == 4
+
+def test_tryimport():
+ py.test.raises(ImportError, py.builtin._tryimport, 'xqwe123')
+ x = py.builtin._tryimport('asldkajsdl', 'py')
+ assert x == py
+ x = py.builtin._tryimport('asldkajsdl', 'py.path')
+ assert x == py.path
+
+def test_getcode():
+ code = py.builtin._getcode(test_getcode)
+ assert isinstance(code, types.CodeType)
+ assert py.builtin._getcode(4) is None
diff --git a/third_party/python/py/testing/root/test_error.py b/third_party/python/py/testing/root/test_error.py
new file mode 100644
index 0000000000..7bfbef3bd4
--- /dev/null
+++ b/third_party/python/py/testing/root/test_error.py
@@ -0,0 +1,76 @@
+
+import py
+
+import errno
+import sys
+import subprocess
+
+
+def test_error_classes():
+ for name in errno.errorcode.values():
+ x = getattr(py.error, name)
+ assert issubclass(x, py.error.Error)
+ assert issubclass(x, EnvironmentError)
+
+
+def test_has_name():
+ assert py.error.__name__ == 'py.error'
+
+
+def test_picklability_issue1():
+ import pickle
+ e1 = py.error.ENOENT()
+ s = pickle.dumps(e1)
+ e2 = pickle.loads(s)
+ assert isinstance(e2, py.error.ENOENT)
+
+
+def test_unknown_error():
+ num = 3999
+ cls = py.error._geterrnoclass(num)
+ assert cls.__name__ == 'UnknownErrno%d' % (num,)
+ assert issubclass(cls, py.error.Error)
+ assert issubclass(cls, EnvironmentError)
+ cls2 = py.error._geterrnoclass(num)
+ assert cls is cls2
+
+
+def test_error_conversion_enotdir(testdir):
+ p = testdir.makepyfile("")
+ excinfo = py.test.raises(py.error.Error, py.error.checked_call, p.listdir)
+ assert isinstance(excinfo.value, EnvironmentError)
+ assert isinstance(excinfo.value, py.error.Error)
+ assert "ENOTDIR" in repr(excinfo.value)
+
+
+def test_checked_call_supports_kwargs(tmpdir):
+ import tempfile
+ py.error.checked_call(tempfile.mkdtemp, dir=str(tmpdir))
+
+
+def test_error_importable():
+ """Regression test for #179"""
+ subprocess.check_call(
+ [sys.executable, '-c', 'from py.error import ENOENT'])
+
+
+try:
+ import unittest
+ unittest.TestCase.assertWarns
+except (ImportError, AttributeError):
+ pass # required interface not available
+else:
+ import sys
+ import warnings
+
+ class Case(unittest.TestCase):
+ def test_assert_warns(self):
+ # Clear everything "py.*" from sys.modules and re-import py
+ # as a fresh start
+ for mod in tuple(sys.modules.keys()):
+ if mod and (mod == 'py' or mod.startswith('py.')):
+ del sys.modules[mod]
+ __import__('py')
+
+ with self.assertWarns(UserWarning):
+ warnings.warn('this should work')
diff --git a/third_party/python/py/testing/root/test_py_imports.py b/third_party/python/py/testing/root/test_py_imports.py
new file mode 100644
index 0000000000..31fe6ead81
--- /dev/null
+++ b/third_party/python/py/testing/root/test_py_imports.py
@@ -0,0 +1,71 @@
+import py
+import sys
+
+
+@py.test.mark.parametrize('name', [x for x in dir(py) if x[0] != '_'])
+def test_dir(name):
+ obj = getattr(py, name)
+ if hasattr(obj, '__map__'): # isinstance(obj, Module):
+ keys = dir(obj)
+ assert len(keys) > 0
+ print (obj.__map__)
+ for name in list(obj.__map__):
+ assert hasattr(obj, name), (obj, name)
+
+
+def test_virtual_module_identity():
+ from py import path as path1
+ from py import path as path2
+ assert path1 is path2
+ from py.path import local as local1
+ from py.path import local as local2
+ assert local1 is local2
+
+
+def test_importall():
+ base = py._pydir
+ nodirs = [
+ ]
+ if sys.version_info >= (3, 0):
+ nodirs.append(base.join('_code', '_assertionold.py'))
+ else:
+ nodirs.append(base.join('_code', '_assertionnew.py'))
+
+ def recurse(p):
+ return p.check(dotfile=0) and p.basename != "attic"
+
+ for p in base.visit('*.py', recurse):
+ if p.basename == '__init__.py':
+ continue
+ relpath = p.new(ext='').relto(base)
+ if base.sep in relpath: # not py/*.py itself
+ for x in nodirs:
+ if p == x or p.relto(x):
+ break
+ else:
+ relpath = relpath.replace(base.sep, '.')
+ modpath = 'py.%s' % relpath
+ try:
+ check_import(modpath)
+ except py.test.skip.Exception:
+ pass
+
+
+def check_import(modpath):
+ py.builtin.print_("checking import", modpath)
+ assert __import__(modpath)
+
+
+def test_star_import():
+ exec("from py import *")
+
+
+def test_all_resolves():
+ seen = py.builtin.set([py])
+ lastlength = None
+ while len(seen) != lastlength:
+ lastlength = len(seen)
+ for item in py.builtin.frozenset(seen):
+ for value in item.__dict__.values():
+ if isinstance(value, type(py.test)):
+ seen.add(value)
diff --git a/third_party/python/py/testing/root/test_std.py b/third_party/python/py/testing/root/test_std.py
new file mode 100644
index 0000000000..143556a055
--- /dev/null
+++ b/third_party/python/py/testing/root/test_std.py
@@ -0,0 +1,13 @@
+
+import py
+
+def test_os():
+ import os
+ assert py.std.os is os
+
+def test_import_error_converts_to_attributeerror():
+ py.test.raises(AttributeError, "py.std.xyzalskdj")
+
+def test_std_gets_it():
+ for x in py.std.sys.modules:
+ assert x in py.std.__dict__
diff --git a/third_party/python/py/testing/root/test_xmlgen.py b/third_party/python/py/testing/root/test_xmlgen.py
new file mode 100644
index 0000000000..fc0e82665f
--- /dev/null
+++ b/third_party/python/py/testing/root/test_xmlgen.py
@@ -0,0 +1,146 @@
+
+import py
+from py._xmlgen import unicode, html, raw
+import sys
+
+class ns(py.xml.Namespace):
+ pass
+
+def test_escape():
+ uvalue = py.builtin._totext('\xc4\x85\xc4\x87\n\xe2\x82\xac\n', 'utf-8')
+ class A:
+ def __unicode__(self):
+ return uvalue
+ def __str__(self):
+ x = self.__unicode__()
+ if sys.version_info[0] < 3:
+ return x.encode('utf-8')
+ return x
+ y = py.xml.escape(uvalue)
+ assert y == uvalue
+ x = py.xml.escape(A())
+ assert x == uvalue
+ if sys.version_info[0] < 3:
+ assert isinstance(x, unicode)
+ assert isinstance(y, unicode)
+ y = py.xml.escape(uvalue.encode('utf-8'))
+ assert y == uvalue
+
+
+def test_tag_with_text():
+ x = ns.hello("world")
+ u = unicode(x)
+ assert u == "<hello>world</hello>"
+
+def test_class_identity():
+ assert ns.hello is ns.hello
+
+def test_tag_with_text_and_attributes():
+ x = ns.some(name="hello", value="world")
+ assert x.attr.name == 'hello'
+ assert x.attr.value == 'world'
+ u = unicode(x)
+ assert u == '<some name="hello" value="world"/>'
+
+def test_tag_with_subclassed_attr_simple():
+ class my(ns.hello):
+ class Attr(ns.hello.Attr):
+ hello="world"
+ x = my()
+ assert x.attr.hello == 'world'
+ assert unicode(x) == '<my hello="world"/>'
+
+def test_tag_with_raw_attr():
+ x = html.object(data=raw('&'))
+ assert unicode(x) == '<object data="&"></object>'
+
+def test_tag_nested():
+ x = ns.hello(ns.world())
+ unicode(x) # triggers parentifying
+ assert x[0].parent is x
+ u = unicode(x)
+ assert u == '<hello><world/></hello>'
+
+def test_list_nested():
+ x = ns.hello([ns.world()]) #pass in a list here
+ u = unicode(x)
+ assert u == '<hello><world/></hello>'
+
+def test_tag_xmlname():
+ class my(ns.hello):
+ xmlname = 'world'
+ u = unicode(my())
+ assert u == '<world/>'
+
+def test_tag_with_text_entity():
+ x = ns.hello('world & rest')
+ u = unicode(x)
+ assert u == "<hello>world &amp; rest</hello>"
+
+def test_tag_with_text_and_attributes_entity():
+ x = ns.some(name="hello & world")
+ assert x.attr.name == "hello & world"
+ u = unicode(x)
+ assert u == '<some name="hello &amp; world"/>'
+
+def test_raw():
+ x = ns.some(py.xml.raw("<p>literal</p>"))
+ u = unicode(x)
+ assert u == "<some><p>literal</p></some>"
+
+
+def test_html_name_stickyness():
+ class my(html.p):
+ pass
+ x = my("hello")
+ assert unicode(x) == '<p>hello</p>'
+
+def test_stylenames():
+ class my:
+ class body(html.body):
+ style = html.Style(font_size = "12pt")
+ u = unicode(my.body())
+ assert u == '<body style="font-size: 12pt"></body>'
+
+def test_class_None():
+ t = html.body(class_=None)
+ u = unicode(t)
+ assert u == '<body></body>'
+
+def test_alternating_style():
+ alternating = (
+ html.Style(background="white"),
+ html.Style(background="grey"),
+ )
+ class my(html):
+ class li(html.li):
+ def style(self):
+ i = self.parent.index(self)
+ return alternating[i%2]
+ style = property(style)
+
+ x = my.ul(
+ my.li("hello"),
+ my.li("world"),
+ my.li("42"))
+ u = unicode(x)
+ assert u == ('<ul><li style="background: white">hello</li>'
+ '<li style="background: grey">world</li>'
+ '<li style="background: white">42</li>'
+ '</ul>')
+
+def test_singleton():
+ h = html.head(html.link(href="foo"))
+ assert unicode(h) == '<head><link href="foo"/></head>'
+
+ h = html.head(html.script(src="foo"))
+ assert unicode(h) == '<head><script src="foo"></script></head>'
+
+def test_inline():
+ h = html.div(html.span('foo'), html.span('bar'))
+ assert (h.unicode(indent=2) ==
+ '<div><span>foo</span><span>bar</span></div>')
+
+def test_object_tags():
+ o = html.object(html.object())
+ assert o.unicode(indent=0) == '<object><object></object></object>'
diff --git a/third_party/python/py/tox.ini b/third_party/python/py/tox.ini
new file mode 100644
index 0000000000..d5f362ae7c
--- /dev/null
+++ b/third_party/python/py/tox.ini
@@ -0,0 +1,33 @@
+[tox]
+envlist=py{27,34,35,36}-pytest{29,30,31}
+
+[testenv]
+changedir=testing
+commands=
+ pip install -U .. # hande the install order fallout since pytest depends on pip
+ py.test --confcutdir=.. -rfsxX --junitxml={envlogdir}/junit-{envname}.xml []
+deps=
+ attrs
+ pytest29: pytest~=2.9.0
+ pytest30: pytest~=3.0.0
+ pytest31: pytest~=3.1.0
+
+[testenv:py27-xdist]
+basepython=python2.7
+deps=
+ pytest~=2.9.0
+ pytest-xdist<=1.16.0
+commands=
+ pip install -U .. # hande the install order fallout since pytest depends on pip
+ py.test -n3 -rfsxX --confcutdir=.. --runslowtests \
+ --junitxml={envlogdir}/junit-{envname}.xml []
+
+[testenv:jython]
+changedir=testing
+commands=
+ {envpython} -m pip install -U .. # hande the install order fallout since pytest depends on pip
+ {envpython} -m pytest --confcutdir=.. -rfsxX --junitxml={envlogdir}/junit-{envname}0.xml {posargs:io_ code}
+
+[pytest]
+rsyncdirs = conftest.py py doc testing
+addopts = -ra
diff --git a/third_party/python/pyasn1-modules/CHANGES.txt b/third_party/python/pyasn1-modules/CHANGES.txt
new file mode 100644
index 0000000000..1cc789f7dc
--- /dev/null
+++ b/third_party/python/pyasn1-modules/CHANGES.txt
@@ -0,0 +1,124 @@
+
+Revision 0.1.5, released 10-10-2017
+-----------------------------------
+
+- OCSP response blob fixed in test
+- Fixed wrong OCSP ResponderID components tagging
+
+Revision 0.1.4, released 07-09-2017
+-----------------------------------
+
+- Typo fixed in the dependency spec
+
+Revision 0.1.3, released 07-09-2017
+-----------------------------------
+
+- Apparently, pip>=1.5.6 is still widely used and it is not PEP440
+ compliant. Had to replace the `~=` version dependency spec with a
+ sequence of simple comparisons to remain compatible with the aging pip.
+
+Revision 0.1.2, released 07-09-2017
+-----------------------------------
+
+- Pinned to pyasn1 ~0.3.4
+
+Revision 0.1.1, released 27-08-2017
+-----------------------------------
+
+- Tests refactored into proper unit tests
+- pem.readBase64fromText() convenience function added
+- Pinned to pyasn1 0.3.3
+
+Revision 0.0.11, released 04-08-2017
+------------------------------------
+
+- Fixed typo in ASN.1 definitions at rfc2315.py
+
+Revision 0.0.10, released 27-07-2017
+------------------------------------
+
+* Fixed SequenceOf initializer to pass now-mandatory componentType
+ keyword argument (since pyasn1 0.3.1)
+* Temporarily fixed recursive ASN.1 type definition to work with
+ pyasn1 0.3.1+. This is going to be fixed properly shortly.
+
+Revision 0.0.9, released 01-06-2017
+-----------------------------------
+
+* More CRL data structures added (RFC3279)
+* Added X.509 certificate extensions map
+* Added X.509 attribute type map
+* Fix to __doc__ use in setup.py to make -O0 installation mode working
+* Copyright added to source files
+* More PEP-8'ing done on the code
+* Author's e-mail changed
+
+Revision 0.0.8, released 28-09-2015
+-----------------------------------
+
+- Wheel distribution format now supported
+- Fix to misspelled rfc2459.id_at_sutname variable
+- Fix to misspelled rfc2459.NameConstraints component tag ID
+- Fix to misspelled rfc2459.GeneralSubtree component default status
+
+Revision 0.0.7, released 01-08-2015
+-----------------------------------
+
+- Extensions added to text files, CVS attic flushed.
+- Fix to rfc2459.BasicConstraints syntax.
+
+Revision 0.0.6, released 21-06-2015
+-----------------------------------
+
+- Typo fix to id_kp_serverAuth object value
+- A test case for indefinite length encoding eliminated as it's
+ forbidden in DER.
+
+Revision 0.0.5
+--------------
+
+- License updated to vanilla BSD 2-Clause to ease package use
+ (http://opensource.org/licenses/BSD-2-Clause).
+- Missing components added to rfc4210.PKIBody.
+- Fix to rfc2459.CRLDistPointsSyntax typo.
+- Fix to rfc2511.CertReqMsg typo.
+
+Revision 0.0.4
+--------------
+
+- CMP structures (RFC4210), cmpdump.py tool and test case added.
+- SNMPv2c Message syntax (RFC1901) properly defined.
+- Package version established in form of __init__.__version__
+ which is in-sync with distutils.
+- Package meta information and classifiers updated.
+
+Revision 0.0.3
+--------------
+
+- Text cases implemented
+- X.509 CRMF structures (RFC2511) and crmfdump.py tool added
+- X.509 CRL structures and crldump.py tool added
+- PKCS#10 structures and pkcs10dump.py tool added
+- PKCS#8 structures and pkcs8dump.py tool added
+- PKCS#1 (rfc3447) structures added
+- OCSP request & response dumping tool added
+- SNMPv2c & SNMPv3/USM structures added
+- keydump.py moved into pkcs1dump.py
+- PEM files read function generalized to be used more universally.
+- complete PKIX1 '88 code implemented at rfc2459.py
+
+
+Revision 0.0.2
+--------------
+
+- Require pyasn1 >= 0.1.1
+- Fixes towards Py3K compatibility
+ + use either of existing urllib module
+ + adopt to the new bytes type
+ + print operator is now a function
+ + new exception syntax
+
+Revision 0.0.1a
+---------------
+
+- Initial revision, most code carried from pyasn1 examples.
diff --git a/third_party/python/pyasn1-modules/LICENSE.txt b/third_party/python/pyasn1-modules/LICENSE.txt
new file mode 100644
index 0000000000..02b45c430c
--- /dev/null
+++ b/third_party/python/pyasn1-modules/LICENSE.txt
@@ -0,0 +1,24 @@
+Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
diff --git a/third_party/python/pyasn1-modules/MANIFEST.in b/third_party/python/pyasn1-modules/MANIFEST.in
new file mode 100644
index 0000000000..57135d7cec
--- /dev/null
+++ b/third_party/python/pyasn1-modules/MANIFEST.in
@@ -0,0 +1,4 @@
+include *.txt *.md
+recursive-include tools *.py
+recursive-include tests *.py
+prune doc/build
diff --git a/third_party/python/pyasn1-modules/PKG-INFO b/third_party/python/pyasn1-modules/PKG-INFO
new file mode 100644
index 0000000000..5b88f04408
--- /dev/null
+++ b/third_party/python/pyasn1-modules/PKG-INFO
@@ -0,0 +1,35 @@
+Metadata-Version: 1.1
+Name: pyasn1-modules
+Version: 0.1.5
+Summary: A collection of ASN.1-based protocols modules.
+Home-page: https://github.com/etingof/pyasn1-modules
+Author: Ilya Etingof <etingof@gmail.com>
+Author-email: etingof@gmail.com
+License: BSD
+Description: A collection of ASN.1 modules expressed in form of pyasn1 classes. Includes protocols PDUs definition (SNMP, LDAP etc.) and various data structures (X.509, PKCS etc.).
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Education
+Classifier: Intended Audience :: Information Technology
+Classifier: Intended Audience :: System Administrators
+Classifier: Intended Audience :: Telecommunications Industry
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.4
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Topic :: Communications
+Classifier: Topic :: System :: Monitoring
+Classifier: Topic :: System :: Networking :: Monitoring
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff --git a/third_party/python/pyasn1-modules/README.md b/third_party/python/pyasn1-modules/README.md
new file mode 100644
index 0000000000..7fd77d6d1c
--- /dev/null
+++ b/third_party/python/pyasn1-modules/README.md
@@ -0,0 +1,30 @@
+
+ASN.1 modules for Python
+------------------------
+[![PyPI](https://img.shields.io/pypi/v/pyasn1-modules.svg?maxAge=2592000)](https://pypi.python.org/pypi/pyasn1-modules)
+[![Python Versions](https://img.shields.io/pypi/pyversions/pyasn1-modules.svg)](https://pypi.python.org/pypi/pyasn1-modules/)
+[![Build status](https://travis-ci.org/etingof/pyasn1-modules.svg?branch=master)](https://secure.travis-ci.org/etingof/pyasn1-modules)
+[![Coverage Status](https://img.shields.io/codecov/c/github/etingof/pyasn1-modules.svg)](https://codecov.io/github/etingof/pyasn1-modules/)
+[![GitHub license](https://img.shields.io/badge/license-BSD-blue.svg)](https://raw.githubusercontent.com/etingof/pyasn1-modules/master/LICENSE.txt)
+
+This is a small but growing collection of
+[ASN.1](https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-X.208-198811-W!!PDF-E&type=items)
+data structures expressed in Python terms using [pyasn1](https://github.com/etingof/pyasn1) data model.
+
+If ASN.1 module you need is not present in this collection, try using
+[Asn1ate](https://github.com/kimgr/asn1ate) tool that compiles ASN.1 documents
+into pyasn1 code.
+
+Feedback
+--------
+
+If something does not work as expected, try browsing pyasn1
+[mailing list archives](https://sourceforge.net/p/pyasn1/mailman/pyasn1-users/)
+or post your question
+[to Stack Overflow](http://stackoverflow.com/questions/ask).
+If you want to contribute ASN.1 modules you have converted into pyasn1,
+please send me a pull request.
+
+Copyright (c) 2005-2017, [Ilya Etingof](mailto:etingof@gmail.com).
+All rights reserved.
+
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/PKG-INFO b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/PKG-INFO
new file mode 100644
index 0000000000..5b88f04408
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/PKG-INFO
@@ -0,0 +1,35 @@
+Metadata-Version: 1.1
+Name: pyasn1-modules
+Version: 0.1.5
+Summary: A collection of ASN.1-based protocols modules.
+Home-page: https://github.com/etingof/pyasn1-modules
+Author: Ilya Etingof <etingof@gmail.com>
+Author-email: etingof@gmail.com
+License: BSD
+Description: A collection of ASN.1 modules expressed in form of pyasn1 classes. Includes protocols PDUs definition (SNMP, LDAP etc.) and various data structures (X.509, PKCS etc.).
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Education
+Classifier: Intended Audience :: Information Technology
+Classifier: Intended Audience :: System Administrators
+Classifier: Intended Audience :: Telecommunications Industry
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.4
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Topic :: Communications
+Classifier: Topic :: System :: Monitoring
+Classifier: Topic :: System :: Networking :: Monitoring
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/SOURCES.txt b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/SOURCES.txt
new file mode 100644
index 0000000000..15c3110f09
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/SOURCES.txt
@@ -0,0 +1,66 @@
+CHANGES.txt
+LICENSE.txt
+MANIFEST.in
+README.md
+requirements.txt
+setup.cfg
+setup.py
+pyasn1_modules/__init__.py
+pyasn1_modules/pem.py
+pyasn1_modules/rfc1155.py
+pyasn1_modules/rfc1157.py
+pyasn1_modules/rfc1901.py
+pyasn1_modules/rfc1902.py
+pyasn1_modules/rfc1905.py
+pyasn1_modules/rfc2251.py
+pyasn1_modules/rfc2314.py
+pyasn1_modules/rfc2315.py
+pyasn1_modules/rfc2437.py
+pyasn1_modules/rfc2459.py
+pyasn1_modules/rfc2511.py
+pyasn1_modules/rfc2560.py
+pyasn1_modules/rfc3279.py
+pyasn1_modules/rfc3280.py
+pyasn1_modules/rfc3281.py
+pyasn1_modules/rfc3412.py
+pyasn1_modules/rfc3414.py
+pyasn1_modules/rfc3447.py
+pyasn1_modules/rfc3852.py
+pyasn1_modules/rfc4210.py
+pyasn1_modules/rfc4211.py
+pyasn1_modules/rfc5208.py
+pyasn1_modules/rfc5280.py
+pyasn1_modules/rfc5652.py
+pyasn1_modules/rfc6402.py
+pyasn1_modules.egg-info/PKG-INFO
+pyasn1_modules.egg-info/SOURCES.txt
+pyasn1_modules.egg-info/dependency_links.txt
+pyasn1_modules.egg-info/requires.txt
+pyasn1_modules.egg-info/top_level.txt
+pyasn1_modules.egg-info/zip-safe
+tests/__init__.py
+tests/__main__.py
+tests/test_rfc2314.py
+tests/test_rfc2315.py
+tests/test_rfc2437.py
+tests/test_rfc2459.py
+tests/test_rfc2511.py
+tests/test_rfc2560.py
+tests/test_rfc4210.py
+tests/test_rfc5208.py
+tests/test_rfc5280.py
+tests/test_rfc5652.py
+tools/cmcdump.py
+tools/cmpdump.py
+tools/crldump.py
+tools/crmfdump.py
+tools/ocspclient.py
+tools/ocspreqdump.py
+tools/ocsprspdump.py
+tools/pkcs10dump.py
+tools/pkcs1dump.py
+tools/pkcs7dump.py
+tools/pkcs8dump.py
+tools/snmpget.py
+tools/x509dump-rfc5280.py
+tools/x509dump.py \ No newline at end of file
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/dependency_links.txt b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/dependency_links.txt
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/requires.txt b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/requires.txt
new file mode 100644
index 0000000000..01d237c82f
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/requires.txt
@@ -0,0 +1 @@
+pyasn1>=0.3.4,<0.4.0
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/top_level.txt b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/top_level.txt
new file mode 100644
index 0000000000..9dad8496ee
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/top_level.txt
@@ -0,0 +1 @@
+pyasn1_modules
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/zip-safe b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/zip-safe
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules.egg-info/zip-safe
@@ -0,0 +1 @@
+
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/__init__.py b/third_party/python/pyasn1-modules/pyasn1_modules/__init__.py
new file mode 100644
index 0000000000..ba164a2c95
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/__init__.py
@@ -0,0 +1,2 @@
+# http://www.python.org/dev/peps/pep-0396/
+__version__ = '0.1.5'
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/pem.py b/third_party/python/pyasn1-modules/pyasn1_modules/pem.py
new file mode 100644
index 0000000000..9f16308a1c
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/pem.py
@@ -0,0 +1,65 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+import base64
+import sys
+
+stSpam, stHam, stDump = 0, 1, 2
+
+
+# The markers parameters is in form ('start1', 'stop1'), ('start2', 'stop2')...
+# Return is (marker-index, substrate)
+def readPemBlocksFromFile(fileObj, *markers):
+ startMarkers = dict(map(lambda x: (x[1], x[0]),
+ enumerate(map(lambda y: y[0], markers))))
+ stopMarkers = dict(map(lambda x: (x[1], x[0]),
+ enumerate(map(lambda y: y[1], markers))))
+ idx = -1
+ substrate = ''
+ certLines = []
+ state = stSpam
+ while True:
+ certLine = fileObj.readline()
+ if not certLine:
+ break
+ certLine = certLine.strip()
+ if state == stSpam:
+ if certLine in startMarkers:
+ certLines = []
+ idx = startMarkers[certLine]
+ state = stHam
+ continue
+ if state == stHam:
+ if certLine in stopMarkers and stopMarkers[certLine] == idx:
+ state = stDump
+ else:
+ certLines.append(certLine)
+ if state == stDump:
+ if sys.version_info[0] <= 2:
+ substrate = ''.join([base64.b64decode(x) for x in certLines])
+ else:
+ substrate = ''.encode().join([base64.b64decode(x.encode()) for x in certLines])
+ break
+ return idx, substrate
+
+
+# Backward compatibility routine
+def readPemFromFile(fileObj,
+ startMarker='-----BEGIN CERTIFICATE-----',
+ endMarker='-----END CERTIFICATE-----'):
+ idx, substrate = readPemBlocksFromFile(fileObj, (startMarker, endMarker))
+ return substrate
+
+
+def readBase64fromText(text):
+ if sys.version_info[0] <= 2:
+ return base64.b64decode(text)
+ else:
+ return base64.b64decode(text.encode())
+
+
+def readBase64FromFile(fileObj):
+ return readBase64fromText(fileObj.read())
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc1155.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1155.py
new file mode 100644
index 0000000000..4980a38edb
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1155.py
@@ -0,0 +1,93 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# SNMPv1 message syntax
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc1155.txt
+#
+# Sample captures from:
+# http://wiki.wireshark.org/SampleCaptures/
+#
+from pyasn1.type import univ, namedtype, tag, constraint
+
+
+class ObjectName(univ.ObjectIdentifier):
+ pass
+
+
+class SimpleSyntax(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('number', univ.Integer()),
+ namedtype.NamedType('string', univ.OctetString()),
+ namedtype.NamedType('object', univ.ObjectIdentifier()),
+ namedtype.NamedType('empty', univ.Null())
+ )
+
+
+class IpAddress(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint(
+ 4, 4
+ )
+
+
+class NetworkAddress(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('internet', IpAddress())
+ )
+
+
+class Counter(univ.Integer):
+ tagSet = univ.Integer.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 1)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
+ 0, 4294967295
+ )
+
+
+class Gauge(univ.Integer):
+ tagSet = univ.Integer.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 2)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
+ 0, 4294967295
+ )
+
+
+class TimeTicks(univ.Integer):
+ tagSet = univ.Integer.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 3)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
+ 0, 4294967295
+ )
+
+
+class Opaque(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 4)
+ )
+
+
+class ApplicationSyntax(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('address', NetworkAddress()),
+ namedtype.NamedType('counter', Counter()),
+ namedtype.NamedType('gauge', Gauge()),
+ namedtype.NamedType('ticks', TimeTicks()),
+ namedtype.NamedType('arbitrary', Opaque())
+ )
+
+
+class ObjectSyntax(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('simple', SimpleSyntax()),
+ namedtype.NamedType('application-wide', ApplicationSyntax())
+ )
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc1157.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1157.py
new file mode 100644
index 0000000000..1ad1d271a9
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1157.py
@@ -0,0 +1,122 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# SNMPv1 message syntax
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc1157.txt
+#
+# Sample captures from:
+# http://wiki.wireshark.org/SampleCaptures/
+#
+from pyasn1.type import univ, namedtype, namedval, tag
+from pyasn1_modules import rfc1155
+
+
+class Version(univ.Integer):
+ namedValues = namedval.NamedValues(
+ ('version-1', 0)
+ )
+ defaultValue = 0
+
+
+class Community(univ.OctetString):
+ pass
+
+
+class RequestID(univ.Integer):
+ pass
+
+
+class ErrorStatus(univ.Integer):
+ namedValues = namedval.NamedValues(
+ ('noError', 0),
+ ('tooBig', 1),
+ ('noSuchName', 2),
+ ('badValue', 3),
+ ('readOnly', 4),
+ ('genErr', 5)
+ )
+
+
+class ErrorIndex(univ.Integer):
+ pass
+
+
+class VarBind(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('name', rfc1155.ObjectName()),
+ namedtype.NamedType('value', rfc1155.ObjectSyntax())
+ )
+
+
+class VarBindList(univ.SequenceOf):
+ componentType = VarBind()
+
+
+class _RequestBase(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('request-id', RequestID()),
+ namedtype.NamedType('error-status', ErrorStatus()),
+ namedtype.NamedType('error-index', ErrorIndex()),
+ namedtype.NamedType('variable-bindings', VarBindList())
+ )
+
+
+class GetRequestPDU(_RequestBase):
+ tagSet = _RequestBase.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)
+ )
+
+
+class GetNextRequestPDU(_RequestBase):
+ tagSet = _RequestBase.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)
+ )
+
+
+class GetResponsePDU(_RequestBase):
+ tagSet = _RequestBase.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2)
+ )
+
+
+class SetRequestPDU(_RequestBase):
+ tagSet = _RequestBase.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3)
+ )
+
+
+class TrapPDU(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('enterprise', univ.ObjectIdentifier()),
+ namedtype.NamedType('agent-addr', rfc1155.NetworkAddress()),
+ namedtype.NamedType('generic-trap', univ.Integer().clone(
+ namedValues=namedval.NamedValues(('coldStart', 0), ('warmStart', 1), ('linkDown', 2), ('linkUp', 3),
+ ('authenticationFailure', 4), ('egpNeighborLoss', 5),
+ ('enterpriseSpecific', 6)))),
+ namedtype.NamedType('specific-trap', univ.Integer()),
+ namedtype.NamedType('time-stamp', rfc1155.TimeTicks()),
+ namedtype.NamedType('variable-bindings', VarBindList())
+ )
+
+
+class Pdus(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('get-request', GetRequestPDU()),
+ namedtype.NamedType('get-next-request', GetNextRequestPDU()),
+ namedtype.NamedType('get-response', GetResponsePDU()),
+ namedtype.NamedType('set-request', SetRequestPDU()),
+ namedtype.NamedType('trap', TrapPDU())
+ )
+
+
+class Message(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('community', Community()),
+ namedtype.NamedType('data', Pdus())
+ )
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc1901.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1901.py
new file mode 100644
index 0000000000..eadf9aa395
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1901.py
@@ -0,0 +1,20 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# SNMPv2c message syntax
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc1901.txt
+#
+from pyasn1.type import univ, namedtype, namedval
+
+
+class Message(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', univ.Integer(namedValues=namedval.NamedValues(('version-2c', 1)))),
+ namedtype.NamedType('community', univ.OctetString()),
+ namedtype.NamedType('data', univ.Any())
+ )
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc1902.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1902.py
new file mode 100644
index 0000000000..5e9307e528
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1902.py
@@ -0,0 +1,126 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# SNMPv2c message syntax
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc1902.txt
+#
+from pyasn1.type import univ, namedtype, tag, constraint
+
+
+class Integer(univ.Integer):
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
+ -2147483648, 2147483647
+ )
+
+
+class Integer32(univ.Integer):
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
+ -2147483648, 2147483647
+ )
+
+
+class OctetString(univ.OctetString):
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint(
+ 0, 65535
+ )
+
+
+class IpAddress(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x00)
+ )
+ subtypeSpec = univ.OctetString.subtypeSpec + constraint.ValueSizeConstraint(
+ 4, 4
+ )
+
+
+class Counter32(univ.Integer):
+ tagSet = univ.Integer.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x01)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
+ 0, 4294967295
+ )
+
+
+class Gauge32(univ.Integer):
+ tagSet = univ.Integer.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x02)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
+ 0, 4294967295
+ )
+
+
+class Unsigned32(univ.Integer):
+ tagSet = univ.Integer.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x02)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
+ 0, 4294967295
+ )
+
+
+class TimeTicks(univ.Integer):
+ tagSet = univ.Integer.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x03)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
+ 0, 4294967295
+ )
+
+
+class Opaque(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x04)
+ )
+
+
+class Counter64(univ.Integer):
+ tagSet = univ.Integer.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x06)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
+ 0, 18446744073709551615
+ )
+
+
+class Bits(univ.OctetString):
+ pass
+
+
+class ObjectName(univ.ObjectIdentifier):
+ pass
+
+
+class SimpleSyntax(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('integer-value', Integer()),
+ namedtype.NamedType('string-value', OctetString()),
+ namedtype.NamedType('objectID-value', univ.ObjectIdentifier())
+ )
+
+
+class ApplicationSyntax(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('ipAddress-value', IpAddress()),
+ namedtype.NamedType('counter-value', Counter32()),
+ namedtype.NamedType('timeticks-value', TimeTicks()),
+ namedtype.NamedType('arbitrary-value', Opaque()),
+ namedtype.NamedType('big-counter-value', Counter64()),
+ # This conflicts with Counter32
+ # namedtype.NamedType('unsigned-integer-value', Unsigned32()),
+ namedtype.NamedType('gauge32-value', Gauge32())
+ ) # BITS misplaced?
+
+
+class ObjectSyntax(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('simple', SimpleSyntax()),
+ namedtype.NamedType('application-wide', ApplicationSyntax())
+ )
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc1905.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1905.py
new file mode 100644
index 0000000000..de5bb031d4
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc1905.py
@@ -0,0 +1,130 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# SNMPv2c PDU syntax
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc1905.txt
+#
+from pyasn1.type import univ, namedtype, namedval, tag, constraint
+from pyasn1_modules import rfc1902
+
+max_bindings = rfc1902.Integer(2147483647)
+
+
+class _BindValue(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('value', rfc1902.ObjectSyntax()),
+ namedtype.NamedType('unSpecified', univ.Null()),
+ namedtype.NamedType('noSuchObject',
+ univ.Null().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('noSuchInstance',
+ univ.Null().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('endOfMibView',
+ univ.Null().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+ )
+
+
+class VarBind(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('name', rfc1902.ObjectName()),
+ namedtype.NamedType('', _BindValue())
+ )
+
+
+class VarBindList(univ.SequenceOf):
+ componentType = VarBind()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(
+ 0, max_bindings
+ )
+
+
+class PDU(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('request-id', rfc1902.Integer32()),
+ namedtype.NamedType('error-status', univ.Integer(
+ namedValues=namedval.NamedValues(('noError', 0), ('tooBig', 1), ('noSuchName', 2), ('badValue', 3),
+ ('readOnly', 4), ('genErr', 5), ('noAccess', 6), ('wrongType', 7),
+ ('wrongLength', 8), ('wrongEncoding', 9), ('wrongValue', 10),
+ ('noCreation', 11), ('inconsistentValue', 12), ('resourceUnavailable', 13),
+ ('commitFailed', 14), ('undoFailed', 15), ('authorizationError', 16),
+ ('notWritable', 17), ('inconsistentName', 18)))),
+ namedtype.NamedType('error-index',
+ univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, max_bindings))),
+ namedtype.NamedType('variable-bindings', VarBindList())
+ )
+
+
+class BulkPDU(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('request-id', rfc1902.Integer32()),
+ namedtype.NamedType('non-repeaters',
+ univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, max_bindings))),
+ namedtype.NamedType('max-repetitions',
+ univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, max_bindings))),
+ namedtype.NamedType('variable-bindings', VarBindList())
+ )
+
+
+class GetRequestPDU(PDU):
+ tagSet = PDU.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)
+ )
+
+
+class GetNextRequestPDU(PDU):
+ tagSet = PDU.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)
+ )
+
+
+class ResponsePDU(PDU):
+ tagSet = PDU.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2)
+ )
+
+
+class SetRequestPDU(PDU):
+ tagSet = PDU.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3)
+ )
+
+
+class GetBulkRequestPDU(BulkPDU):
+ tagSet = PDU.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5)
+ )
+
+
+class InformRequestPDU(PDU):
+ tagSet = PDU.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6)
+ )
+
+
+class SNMPv2TrapPDU(PDU):
+ tagSet = PDU.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 7)
+ )
+
+
+class ReportPDU(PDU):
+ tagSet = PDU.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8)
+ )
+
+
+class PDUs(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('get-request', GetRequestPDU()),
+ namedtype.NamedType('get-next-request', GetNextRequestPDU()),
+ namedtype.NamedType('get-bulk-request', GetBulkRequestPDU()),
+ namedtype.NamedType('response', ResponsePDU()),
+ namedtype.NamedType('set-request', SetRequestPDU()),
+ namedtype.NamedType('inform-request', InformRequestPDU()),
+ namedtype.NamedType('snmpV2-trap', SNMPv2TrapPDU()),
+ namedtype.NamedType('report', ReportPDU())
+ )
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc2251.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2251.py
new file mode 100644
index 0000000000..94ba5891e8
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2251.py
@@ -0,0 +1,559 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# LDAP message syntax
+#
+# ASN.1 source from:
+# http://www.trl.ibm.com/projects/xml/xss4j/data/asn1/grammars/ldap.asn
+#
+# Sample captures from:
+# http://wiki.wireshark.org/SampleCaptures/
+#
+from pyasn1.type import tag, namedtype, namedval, univ, constraint
+
+maxInt = univ.Integer(2147483647)
+
+
+class LDAPString(univ.OctetString):
+ pass
+
+
+class LDAPOID(univ.OctetString):
+ pass
+
+
+class LDAPDN(LDAPString):
+ pass
+
+
+class RelativeLDAPDN(LDAPString):
+ pass
+
+
+class AttributeType(LDAPString):
+ pass
+
+
+class AttributeDescription(LDAPString):
+ pass
+
+
+class AttributeDescriptionList(univ.SequenceOf):
+ componentType = AttributeDescription()
+
+
+class AttributeValue(univ.OctetString):
+ pass
+
+
+class AssertionValue(univ.OctetString):
+ pass
+
+
+class AttributeValueAssertion(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('attributeDesc', AttributeDescription()),
+ namedtype.NamedType('assertionValue', AssertionValue())
+ )
+
+
+class Attribute(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeDescription()),
+ namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))
+ )
+
+
+class MatchingRuleId(LDAPString):
+ pass
+
+
+class Control(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('controlType', LDAPOID()),
+ namedtype.DefaultedNamedType('criticality', univ.Boolean('False')),
+ namedtype.OptionalNamedType('controlValue', univ.OctetString())
+ )
+
+
+class Controls(univ.SequenceOf):
+ componentType = Control()
+
+
+class LDAPURL(LDAPString):
+ pass
+
+
+class Referral(univ.SequenceOf):
+ componentType = LDAPURL()
+
+
+class SaslCredentials(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('mechanism', LDAPString()),
+ namedtype.OptionalNamedType('credentials', univ.OctetString())
+ )
+
+
+class AuthenticationChoice(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('simple', univ.OctetString().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('reserved-1', univ.OctetString().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('reserved-2', univ.OctetString().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.NamedType('sasl',
+ SaslCredentials().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+ )
+
+
+class BindRequest(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 0)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(1, 127))),
+ namedtype.NamedType('name', LDAPDN()),
+ namedtype.NamedType('authentication', AuthenticationChoice())
+ )
+
+
+class PartialAttributeList(univ.SequenceOf):
+ componentType = univ.Sequence(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeDescription()),
+ namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))
+ )
+ )
+
+
+class SearchResultEntry(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 4)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('objectName', LDAPDN()),
+ namedtype.NamedType('attributes', PartialAttributeList())
+ )
+
+
+class MatchingRuleAssertion(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('matchingRule', MatchingRuleId().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('type', AttributeDescription().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.NamedType('matchValue',
+ AssertionValue().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.DefaultedNamedType('dnAttributes', univ.Boolean('False').subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4)))
+ )
+
+
+class SubstringFilter(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeDescription()),
+ namedtype.NamedType('substrings',
+ univ.SequenceOf(
+ componentType=univ.Choice(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType(
+ 'initial', LDAPString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))
+ ),
+ namedtype.NamedType(
+ 'any', LDAPString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))
+ ),
+ namedtype.NamedType(
+ 'final', LDAPString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))
+ )
+ )
+ )
+ )
+ )
+ )
+
+
+# Ugly hack to handle recursive Filter reference (up to 3-levels deep).
+
+class Filter3(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('equalityMatch', AttributeValueAssertion().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+ namedtype.NamedType('substrings', SubstringFilter().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
+ namedtype.NamedType('greaterOrEqual', AttributeValueAssertion().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
+ namedtype.NamedType('lessOrEqual', AttributeValueAssertion().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))),
+ namedtype.NamedType('present', AttributeDescription().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
+ namedtype.NamedType('approxMatch', AttributeValueAssertion().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8))),
+ namedtype.NamedType('extensibleMatch', MatchingRuleAssertion().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 9)))
+ )
+
+
+class Filter2(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('and', univ.SetOf(componentType=Filter3()).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('or', univ.SetOf(componentType=Filter3()).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.NamedType('not',
+ Filter3().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
+ namedtype.NamedType('equalityMatch', AttributeValueAssertion().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+ namedtype.NamedType('substrings', SubstringFilter().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
+ namedtype.NamedType('greaterOrEqual', AttributeValueAssertion().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
+ namedtype.NamedType('lessOrEqual', AttributeValueAssertion().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))),
+ namedtype.NamedType('present', AttributeDescription().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
+ namedtype.NamedType('approxMatch', AttributeValueAssertion().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8))),
+ namedtype.NamedType('extensibleMatch', MatchingRuleAssertion().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 9)))
+ )
+
+
+class Filter(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('and', univ.SetOf(componentType=Filter2()).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('or', univ.SetOf(componentType=Filter2()).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.NamedType('not',
+ Filter2().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
+ namedtype.NamedType('equalityMatch', AttributeValueAssertion().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+ namedtype.NamedType('substrings', SubstringFilter().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
+ namedtype.NamedType('greaterOrEqual', AttributeValueAssertion().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
+ namedtype.NamedType('lessOrEqual', AttributeValueAssertion().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))),
+ namedtype.NamedType('present', AttributeDescription().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
+ namedtype.NamedType('approxMatch', AttributeValueAssertion().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8))),
+ namedtype.NamedType('extensibleMatch', MatchingRuleAssertion().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 9)))
+ )
+
+
+# End of Filter hack
+
+class SearchRequest(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 3)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('baseObject', LDAPDN()),
+ namedtype.NamedType('scope', univ.Enumerated(
+ namedValues=namedval.NamedValues(('baseObject', 0), ('singleLevel', 1), ('wholeSubtree', 2)))),
+ namedtype.NamedType('derefAliases', univ.Enumerated(
+ namedValues=namedval.NamedValues(('neverDerefAliases', 0), ('derefInSearching', 1),
+ ('derefFindingBaseObj', 2), ('derefAlways', 3)))),
+ namedtype.NamedType('sizeLimit',
+ univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, maxInt))),
+ namedtype.NamedType('timeLimit',
+ univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, maxInt))),
+ namedtype.NamedType('typesOnly', univ.Boolean()),
+ namedtype.NamedType('filter', Filter()),
+ namedtype.NamedType('attributes', AttributeDescriptionList())
+ )
+
+
+class UnbindRequest(univ.Null):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 2)
+ )
+
+
+class BindResponse(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 1)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('resultCode', univ.Enumerated(
+ namedValues=namedval.NamedValues(('success', 0), ('operationsError', 1), ('protocolError', 2),
+ ('timeLimitExceeded', 3), ('sizeLimitExceeded', 4), ('compareFalse', 5),
+ ('compareTrue', 6), ('authMethodNotSupported', 7),
+ ('strongAuthRequired', 8), ('reserved-9', 9), ('referral', 10),
+ ('adminLimitExceeded', 11), ('unavailableCriticalExtension', 12),
+ ('confidentialityRequired', 13), ('saslBindInProgress', 14),
+ ('noSuchAttribute', 16), ('undefinedAttributeType', 17),
+ ('inappropriateMatching', 18), ('constraintViolation', 19),
+ ('attributeOrValueExists', 20), ('invalidAttributeSyntax', 21),
+ ('noSuchObject', 32), ('aliasProblem', 33), ('invalidDNSyntax', 34),
+ ('reserved-35', 35), ('aliasDereferencingProblem', 36),
+ ('inappropriateAuthentication', 48), ('invalidCredentials', 49),
+ ('insufficientAccessRights', 50), ('busy', 51), ('unavailable', 52),
+ ('unwillingToPerform', 53), ('loopDetect', 54), ('namingViolation', 64),
+ ('objectClassViolation', 65), ('notAllowedOnNonLeaf', 66),
+ ('notAllowedOnRDN', 67), ('entryAlreadyExists', 68),
+ ('objectClassModsProhibited', 69), ('reserved-70', 70),
+ ('affectsMultipleDSAs', 71), ('other', 80), ('reserved-81', 81),
+ ('reserved-82', 82), ('reserved-83', 83), ('reserved-84', 84),
+ ('reserved-85', 85), ('reserved-86', 86), ('reserved-87', 87),
+ ('reserved-88', 88), ('reserved-89', 89), ('reserved-90', 90)))),
+ namedtype.NamedType('matchedDN', LDAPDN()),
+ namedtype.NamedType('errorMessage', LDAPString()),
+ namedtype.OptionalNamedType('referral', Referral().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+ namedtype.OptionalNamedType('serverSaslCreds', univ.OctetString().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 7)))
+ )
+
+
+class LDAPResult(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('resultCode', univ.Enumerated(
+ namedValues=namedval.NamedValues(('success', 0), ('operationsError', 1), ('protocolError', 2),
+ ('timeLimitExceeded', 3), ('sizeLimitExceeded', 4), ('compareFalse', 5),
+ ('compareTrue', 6), ('authMethodNotSupported', 7),
+ ('strongAuthRequired', 8), ('reserved-9', 9), ('referral', 10),
+ ('adminLimitExceeded', 11), ('unavailableCriticalExtension', 12),
+ ('confidentialityRequired', 13), ('saslBindInProgress', 14),
+ ('noSuchAttribute', 16), ('undefinedAttributeType', 17),
+ ('inappropriateMatching', 18), ('constraintViolation', 19),
+ ('attributeOrValueExists', 20), ('invalidAttributeSyntax', 21),
+ ('noSuchObject', 32), ('aliasProblem', 33), ('invalidDNSyntax', 34),
+ ('reserved-35', 35), ('aliasDereferencingProblem', 36),
+ ('inappropriateAuthentication', 48), ('invalidCredentials', 49),
+ ('insufficientAccessRights', 50), ('busy', 51), ('unavailable', 52),
+ ('unwillingToPerform', 53), ('loopDetect', 54), ('namingViolation', 64),
+ ('objectClassViolation', 65), ('notAllowedOnNonLeaf', 66),
+ ('notAllowedOnRDN', 67), ('entryAlreadyExists', 68),
+ ('objectClassModsProhibited', 69), ('reserved-70', 70),
+ ('affectsMultipleDSAs', 71), ('other', 80), ('reserved-81', 81),
+ ('reserved-82', 82), ('reserved-83', 83), ('reserved-84', 84),
+ ('reserved-85', 85), ('reserved-86', 86), ('reserved-87', 87),
+ ('reserved-88', 88), ('reserved-89', 89), ('reserved-90', 90)))),
+ namedtype.NamedType('matchedDN', LDAPDN()),
+ namedtype.NamedType('errorMessage', LDAPString()),
+ namedtype.OptionalNamedType('referral', Referral().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3)))
+ )
+
+
+class SearchResultReference(univ.SequenceOf):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 19)
+ )
+ componentType = LDAPURL()
+
+
+class SearchResultDone(LDAPResult):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 5)
+ )
+
+
+class AttributeTypeAndValues(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeDescription()),
+ namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))
+ )
+
+
+class ModifyRequest(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 6)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('object', LDAPDN()),
+ namedtype.NamedType('modification',
+ univ.SequenceOf(
+ componentType=univ.Sequence(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType(
+ 'operation', univ.Enumerated(namedValues=namedval.NamedValues(('add', 0), ('delete', 1), ('replace', 2)))
+ ),
+ namedtype.NamedType('modification', AttributeTypeAndValues())))
+ )
+ )
+ )
+
+
+class ModifyResponse(LDAPResult):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 7)
+ )
+
+
+class AttributeList(univ.SequenceOf):
+ componentType = univ.Sequence(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeDescription()),
+ namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))
+ )
+ )
+
+
+class AddRequest(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 8)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('entry', LDAPDN()),
+ namedtype.NamedType('attributes', AttributeList())
+ )
+
+
+class AddResponse(LDAPResult):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 9)
+ )
+
+
+class DelRequest(LDAPResult):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 10)
+ )
+
+
+class DelResponse(LDAPResult):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 11)
+ )
+
+
+class ModifyDNRequest(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 12)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('entry', LDAPDN()),
+ namedtype.NamedType('newrdn', RelativeLDAPDN()),
+ namedtype.NamedType('deleteoldrdn', univ.Boolean()),
+ namedtype.OptionalNamedType('newSuperior',
+ LDAPDN().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+
+ )
+
+
+class ModifyDNResponse(LDAPResult):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 13)
+ )
+
+
+class CompareRequest(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 14)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('entry', LDAPDN()),
+ namedtype.NamedType('ava', AttributeValueAssertion())
+ )
+
+
+class CompareResponse(LDAPResult):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 15)
+ )
+
+
+class AbandonRequest(LDAPResult):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 16)
+ )
+
+
+class ExtendedRequest(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 23)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('requestName',
+ LDAPOID().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('requestValue', univ.OctetString().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+
+class ExtendedResponse(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 24)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('resultCode', univ.Enumerated(
+ namedValues=namedval.NamedValues(('success', 0), ('operationsError', 1), ('protocolError', 2),
+ ('timeLimitExceeded', 3), ('sizeLimitExceeded', 4), ('compareFalse', 5),
+ ('compareTrue', 6), ('authMethodNotSupported', 7),
+ ('strongAuthRequired', 8), ('reserved-9', 9), ('referral', 10),
+ ('adminLimitExceeded', 11), ('unavailableCriticalExtension', 12),
+ ('confidentialityRequired', 13), ('saslBindInProgress', 14),
+ ('noSuchAttribute', 16), ('undefinedAttributeType', 17),
+ ('inappropriateMatching', 18), ('constraintViolation', 19),
+ ('attributeOrValueExists', 20), ('invalidAttributeSyntax', 21),
+ ('noSuchObject', 32), ('aliasProblem', 33), ('invalidDNSyntax', 34),
+ ('reserved-35', 35), ('aliasDereferencingProblem', 36),
+ ('inappropriateAuthentication', 48), ('invalidCredentials', 49),
+ ('insufficientAccessRights', 50), ('busy', 51), ('unavailable', 52),
+ ('unwillingToPerform', 53), ('loopDetect', 54), ('namingViolation', 64),
+ ('objectClassViolation', 65), ('notAllowedOnNonLeaf', 66),
+ ('notAllowedOnRDN', 67), ('entryAlreadyExists', 68),
+ ('objectClassModsProhibited', 69), ('reserved-70', 70),
+ ('affectsMultipleDSAs', 71), ('other', 80), ('reserved-81', 81),
+ ('reserved-82', 82), ('reserved-83', 83), ('reserved-84', 84),
+ ('reserved-85', 85), ('reserved-86', 86), ('reserved-87', 87),
+ ('reserved-88', 88), ('reserved-89', 89), ('reserved-90', 90)))),
+ namedtype.NamedType('matchedDN', LDAPDN()),
+ namedtype.NamedType('errorMessage', LDAPString()),
+ namedtype.OptionalNamedType('referral', Referral().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+
+ namedtype.OptionalNamedType('responseName', LDAPOID().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 10))),
+ namedtype.OptionalNamedType('response', univ.OctetString().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 11)))
+ )
+
+
+class MessageID(univ.Integer):
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
+ 0, maxInt
+ )
+
+
+class LDAPMessage(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('messageID', MessageID()),
+ namedtype.NamedType(
+ 'protocolOp', univ.Choice(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType('bindRequest', BindRequest()),
+ namedtype.NamedType('bindResponse', BindResponse()),
+ namedtype.NamedType('unbindRequest', UnbindRequest()),
+ namedtype.NamedType('searchRequest', SearchRequest()),
+ namedtype.NamedType('searchResEntry', SearchResultEntry()),
+ namedtype.NamedType('searchResDone', SearchResultDone()),
+ namedtype.NamedType('searchResRef', SearchResultReference()),
+ namedtype.NamedType('modifyRequest', ModifyRequest()),
+ namedtype.NamedType('modifyResponse', ModifyResponse()),
+ namedtype.NamedType('addRequest', AddRequest()),
+ namedtype.NamedType('addResponse', AddResponse()),
+ namedtype.NamedType('delRequest', DelRequest()),
+ namedtype.NamedType('delResponse', DelResponse()),
+ namedtype.NamedType('modDNRequest', ModifyDNRequest()),
+ namedtype.NamedType('modDNResponse', ModifyDNResponse()),
+ namedtype.NamedType('compareRequest', CompareRequest()),
+ namedtype.NamedType('compareResponse', CompareResponse()),
+ namedtype.NamedType('abandonRequest', AbandonRequest()),
+ namedtype.NamedType('extendedReq', ExtendedRequest()),
+ namedtype.NamedType('extendedResp', ExtendedResponse())
+ )
+ )
+ ),
+ namedtype.OptionalNamedType('controls', Controls().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc2314.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2314.py
new file mode 100644
index 0000000000..ef6a65bbf8
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2314.py
@@ -0,0 +1,48 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# PKCS#10 syntax
+#
+# ASN.1 source from:
+# http://tools.ietf.org/html/rfc2314
+#
+# Sample captures could be obtained with "openssl req" command
+#
+from pyasn1_modules.rfc2459 import *
+
+
+class Attributes(univ.SetOf):
+ componentType = Attribute()
+
+
+class Version(univ.Integer):
+ pass
+
+
+class CertificationRequestInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('subject', Name()),
+ namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo()),
+ namedtype.NamedType('attributes',
+ Attributes().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
+
+
+class Signature(univ.BitString):
+ pass
+
+
+class SignatureAlgorithmIdentifier(AlgorithmIdentifier):
+ pass
+
+
+class CertificationRequest(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certificationRequestInfo', CertificationRequestInfo()),
+ namedtype.NamedType('signatureAlgorithm', SignatureAlgorithmIdentifier()),
+ namedtype.NamedType('signature', Signature())
+ )
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc2315.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2315.py
new file mode 100644
index 0000000000..cf732b0550
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2315.py
@@ -0,0 +1,272 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# PKCS#7 message syntax
+#
+# ASN.1 source from:
+# https://opensource.apple.com/source/Security/Security-55179.1/libsecurity_asn1/asn1/pkcs7.asn.auto.html
+#
+# Sample captures from:
+# openssl crl2pkcs7 -nocrl -certfile cert1.cer -out outfile.p7b
+#
+from pyasn1_modules.rfc2459 import *
+
+
+class Attribute(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeType()),
+ namedtype.NamedType('values', univ.SetOf(componentType=AttributeValue()))
+ )
+
+
+class AttributeValueAssertion(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('attributeType', AttributeType()),
+ namedtype.NamedType('attributeValue', AttributeValue())
+ )
+
+
+pkcs_7 = univ.ObjectIdentifier('1.2.840.113549.1.7')
+data = univ.ObjectIdentifier('1.2.840.113549.1.7.1')
+signedData = univ.ObjectIdentifier('1.2.840.113549.1.7.2')
+envelopedData = univ.ObjectIdentifier('1.2.840.113549.1.7.3')
+signedAndEnvelopedData = univ.ObjectIdentifier('1.2.840.113549.1.7.4')
+digestedData = univ.ObjectIdentifier('1.2.840.113549.1.7.5')
+encryptedData = univ.ObjectIdentifier('1.2.840.113549.1.7.6')
+
+
+class ContentType(univ.ObjectIdentifier):
+ pass
+
+
+class ContentEncryptionAlgorithmIdentifier(AlgorithmIdentifier):
+ pass
+
+
+class EncryptedContent(univ.OctetString):
+ pass
+
+
+class EncryptedContentInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('contentType', ContentType()),
+ namedtype.NamedType('contentEncryptionAlgorithm', ContentEncryptionAlgorithmIdentifier()),
+ namedtype.OptionalNamedType('encryptedContent', EncryptedContent().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
+
+
+class Version(univ.Integer): # overrides x509.Version
+ pass
+
+
+class EncryptedData(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('encryptedContentInfo', EncryptedContentInfo())
+ )
+
+
+class DigestAlgorithmIdentifier(AlgorithmIdentifier):
+ pass
+
+
+class DigestAlgorithmIdentifiers(univ.SetOf):
+ componentType = DigestAlgorithmIdentifier()
+
+
+class Digest(univ.OctetString):
+ pass
+
+
+class ContentInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('contentType', ContentType()),
+ namedtype.OptionalNamedType('content', univ.Any().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
+
+
+class DigestedData(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('digestAlgorithm', DigestAlgorithmIdentifier()),
+ namedtype.NamedType('contentInfo', ContentInfo()),
+ namedtype.NamedType('digest', Digest())
+ )
+
+
+class IssuerAndSerialNumber(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuer', Name()),
+ namedtype.NamedType('serialNumber', CertificateSerialNumber())
+ )
+
+
+class KeyEncryptionAlgorithmIdentifier(AlgorithmIdentifier):
+ pass
+
+
+class EncryptedKey(univ.OctetString):
+ pass
+
+
+class RecipientInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('issuerAndSerialNumber', IssuerAndSerialNumber()),
+ namedtype.NamedType('keyEncryptionAlgorithm', KeyEncryptionAlgorithmIdentifier()),
+ namedtype.NamedType('encryptedKey', EncryptedKey())
+ )
+
+
+class RecipientInfos(univ.SetOf):
+ componentType = RecipientInfo()
+
+
+class Attributes(univ.SetOf):
+ componentType = Attribute()
+
+
+class ExtendedCertificateInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('certificate', Certificate()),
+ namedtype.NamedType('attributes', Attributes())
+ )
+
+
+class SignatureAlgorithmIdentifier(AlgorithmIdentifier):
+ pass
+
+
+class Signature(univ.BitString):
+ pass
+
+
+class ExtendedCertificate(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('extendedCertificateInfo', ExtendedCertificateInfo()),
+ namedtype.NamedType('signatureAlgorithm', SignatureAlgorithmIdentifier()),
+ namedtype.NamedType('signature', Signature())
+ )
+
+
+class ExtendedCertificateOrCertificate(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certificate', Certificate()),
+ namedtype.NamedType('extendedCertificate', ExtendedCertificate().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
+
+
+class ExtendedCertificatesAndCertificates(univ.SetOf):
+ componentType = ExtendedCertificateOrCertificate()
+
+
+class SerialNumber(univ.Integer):
+ pass
+
+
+class CRLEntry(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('userCertificate', SerialNumber()),
+ namedtype.NamedType('revocationDate', useful.UTCTime())
+ )
+
+
+class TBSCertificateRevocationList(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('signature', AlgorithmIdentifier()),
+ namedtype.NamedType('issuer', Name()),
+ namedtype.NamedType('lastUpdate', useful.UTCTime()),
+ namedtype.NamedType('nextUpdate', useful.UTCTime()),
+ namedtype.OptionalNamedType('revokedCertificates', univ.SequenceOf(componentType=CRLEntry()))
+ )
+
+
+class CertificateRevocationList(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('tbsCertificateRevocationList', TBSCertificateRevocationList()),
+ namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString())
+ )
+
+
+class CertificateRevocationLists(univ.SetOf):
+ componentType = CertificateRevocationList()
+
+
+class DigestEncryptionAlgorithmIdentifier(AlgorithmIdentifier):
+ pass
+
+
+class EncryptedDigest(univ.OctetString):
+ pass
+
+
+class SignerInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('issuerAndSerialNumber', IssuerAndSerialNumber()),
+ namedtype.NamedType('digestAlgorithm', DigestAlgorithmIdentifier()),
+ namedtype.OptionalNamedType('authenticatedAttributes', Attributes().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('digestEncryptionAlgorithm', DigestEncryptionAlgorithmIdentifier()),
+ namedtype.NamedType('encryptedDigest', EncryptedDigest()),
+ namedtype.OptionalNamedType('unauthenticatedAttributes', Attributes().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+ )
+
+
+class SignerInfos(univ.SetOf):
+ componentType = SignerInfo()
+
+
+class SignedAndEnvelopedData(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('recipientInfos', RecipientInfos()),
+ namedtype.NamedType('digestAlgorithms', DigestAlgorithmIdentifiers()),
+ namedtype.NamedType('encryptedContentInfo', EncryptedContentInfo()),
+ namedtype.OptionalNamedType('certificates', ExtendedCertificatesAndCertificates().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('crls', CertificateRevocationLists().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.NamedType('signerInfos', SignerInfos())
+ )
+
+
+class EnvelopedData(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('recipientInfos', RecipientInfos()),
+ namedtype.NamedType('encryptedContentInfo', EncryptedContentInfo())
+ )
+
+
+class DigestInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('digestAlgorithm', DigestAlgorithmIdentifier()),
+ namedtype.NamedType('digest', Digest())
+ )
+
+
+class SignedData(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('digestAlgorithms', DigestAlgorithmIdentifiers()),
+ namedtype.NamedType('contentInfo', ContentInfo()),
+ namedtype.OptionalNamedType('certificates', ExtendedCertificatesAndCertificates().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('crls', CertificateRevocationLists().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.NamedType('signerInfos', SignerInfos())
+ )
+
+
+class Data(univ.OctetString):
+ pass
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc2437.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2437.py
new file mode 100644
index 0000000000..678d92d5a7
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2437.py
@@ -0,0 +1,66 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# PKCS#1 syntax
+#
+# ASN.1 source from:
+# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2.asn
+#
+# Sample captures could be obtained with "openssl genrsa" command
+#
+from pyasn1.type import tag, namedtype, univ
+from pyasn1_modules.rfc2459 import AlgorithmIdentifier
+
+pkcs_1 = univ.ObjectIdentifier('1.2.840.113549.1.1')
+rsaEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.1')
+md2WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.2')
+md4WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.3')
+md5WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.4')
+sha1WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.5')
+rsaOAEPEncryptionSET = univ.ObjectIdentifier('1.2.840.113549.1.1.6')
+id_RSAES_OAEP = univ.ObjectIdentifier('1.2.840.113549.1.1.7')
+id_mgf1 = univ.ObjectIdentifier('1.2.840.113549.1.1.8')
+id_pSpecified = univ.ObjectIdentifier('1.2.840.113549.1.1.9')
+id_sha1 = univ.ObjectIdentifier('1.3.14.3.2.26')
+
+MAX = float('inf')
+
+
+class Version(univ.Integer):
+ pass
+
+
+class RSAPrivateKey(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('modulus', univ.Integer()),
+ namedtype.NamedType('publicExponent', univ.Integer()),
+ namedtype.NamedType('privateExponent', univ.Integer()),
+ namedtype.NamedType('prime1', univ.Integer()),
+ namedtype.NamedType('prime2', univ.Integer()),
+ namedtype.NamedType('exponent1', univ.Integer()),
+ namedtype.NamedType('exponent2', univ.Integer()),
+ namedtype.NamedType('coefficient', univ.Integer())
+ )
+
+
+class RSAPublicKey(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('modulus', univ.Integer()),
+ namedtype.NamedType('publicExponent', univ.Integer())
+ )
+
+
+# XXX defaults not set
+class RSAES_OAEP_params(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('hashFunc', AlgorithmIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('maskGenFunc', AlgorithmIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.NamedType('pSourceFunc', AlgorithmIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2)))
+ )
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc2459.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2459.py
new file mode 100644
index 0000000000..c988c4f2c3
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2459.py
@@ -0,0 +1,1311 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# X.509 message syntax
+#
+# ASN.1 source from:
+# http://www.trl.ibm.com/projects/xml/xss4j/data/asn1/grammars/x509.asn
+# http://www.ietf.org/rfc/rfc2459.txt
+#
+# Sample captures from:
+# http://wiki.wireshark.org/SampleCaptures/
+#
+from pyasn1.type import tag, namedtype, namedval, univ, constraint, char, useful
+
+MAX = float('inf')
+
+#
+# PKIX1Explicit88
+#
+
+# Upper Bounds
+ub_name = univ.Integer(32768)
+ub_common_name = univ.Integer(64)
+ub_locality_name = univ.Integer(128)
+ub_state_name = univ.Integer(128)
+ub_organization_name = univ.Integer(64)
+ub_organizational_unit_name = univ.Integer(64)
+ub_title = univ.Integer(64)
+ub_match = univ.Integer(128)
+ub_emailaddress_length = univ.Integer(128)
+ub_common_name_length = univ.Integer(64)
+ub_country_name_alpha_length = univ.Integer(2)
+ub_country_name_numeric_length = univ.Integer(3)
+ub_domain_defined_attributes = univ.Integer(4)
+ub_domain_defined_attribute_type_length = univ.Integer(8)
+ub_domain_defined_attribute_value_length = univ.Integer(128)
+ub_domain_name_length = univ.Integer(16)
+ub_extension_attributes = univ.Integer(256)
+ub_e163_4_number_length = univ.Integer(15)
+ub_e163_4_sub_address_length = univ.Integer(40)
+ub_generation_qualifier_length = univ.Integer(3)
+ub_given_name_length = univ.Integer(16)
+ub_initials_length = univ.Integer(5)
+ub_integer_options = univ.Integer(256)
+ub_numeric_user_id_length = univ.Integer(32)
+ub_organization_name_length = univ.Integer(64)
+ub_organizational_unit_name_length = univ.Integer(32)
+ub_organizational_units = univ.Integer(4)
+ub_pds_name_length = univ.Integer(16)
+ub_pds_parameter_length = univ.Integer(30)
+ub_pds_physical_address_lines = univ.Integer(6)
+ub_postal_code_length = univ.Integer(16)
+ub_surname_length = univ.Integer(40)
+ub_terminal_id_length = univ.Integer(24)
+ub_unformatted_address_length = univ.Integer(180)
+ub_x121_address_length = univ.Integer(16)
+
+
+class UniversalString(char.UniversalString):
+ pass
+
+
+class BMPString(char.BMPString):
+ pass
+
+
+class UTF8String(char.UTF8String):
+ pass
+
+
+id_pkix = univ.ObjectIdentifier('1.3.6.1.5.5.7')
+id_pe = univ.ObjectIdentifier('1.3.6.1.5.5.7.1')
+id_qt = univ.ObjectIdentifier('1.3.6.1.5.5.7.2')
+id_kp = univ.ObjectIdentifier('1.3.6.1.5.5.7.3')
+id_ad = univ.ObjectIdentifier('1.3.6.1.5.5.7.48')
+
+id_qt_cps = univ.ObjectIdentifier('1.3.6.1.5.5.7.2.1')
+id_qt_unotice = univ.ObjectIdentifier('1.3.6.1.5.5.7.2.2')
+
+id_ad_ocsp = univ.ObjectIdentifier('1.3.6.1.5.5.7.48.1')
+id_ad_caIssuers = univ.ObjectIdentifier('1.3.6.1.5.5.7.48.2')
+
+
+class AttributeValue(univ.Any):
+ pass
+
+
+class AttributeType(univ.ObjectIdentifier):
+ pass
+
+
+class AttributeTypeAndValue(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeType()),
+ namedtype.NamedType('value', AttributeValue())
+ )
+
+
+class Attribute(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeType()),
+ namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))
+ )
+
+
+id_at = univ.ObjectIdentifier('2.5.4')
+id_at_name = univ.ObjectIdentifier('2.5.4.41')
+# preserve misspelled variable for compatibility
+id_at_sutname = id_at_surname = univ.ObjectIdentifier('2.5.4.4')
+id_at_givenName = univ.ObjectIdentifier('2.5.4.42')
+id_at_initials = univ.ObjectIdentifier('2.5.4.43')
+id_at_generationQualifier = univ.ObjectIdentifier('2.5.4.44')
+
+
+class X520name(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('printableString',
+ char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('universalString',
+ char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('bmpString',
+ char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name)))
+ )
+
+
+id_at_commonName = univ.ObjectIdentifier('2.5.4.3')
+
+
+class X520CommonName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('bmpString',
+ char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name)))
+ )
+
+
+id_at_localityName = univ.ObjectIdentifier('2.5.4.7')
+
+
+class X520LocalityName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('bmpString',
+ char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name)))
+ )
+
+
+id_at_stateOrProvinceName = univ.ObjectIdentifier('2.5.4.8')
+
+
+class X520StateOrProvinceName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('bmpString',
+ char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name)))
+ )
+
+
+id_at_organizationName = univ.ObjectIdentifier('2.5.4.10')
+
+
+class X520OrganizationName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name)))
+ )
+
+
+id_at_organizationalUnitName = univ.ObjectIdentifier('2.5.4.11')
+
+
+class X520OrganizationalUnitName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name)))
+ )
+
+
+id_at_title = univ.ObjectIdentifier('2.5.4.12')
+
+
+class X520Title(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('printableString',
+ char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('universalString',
+ char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('bmpString',
+ char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title)))
+ )
+
+
+id_at_dnQualifier = univ.ObjectIdentifier('2.5.4.46')
+
+
+class X520dnQualifier(char.PrintableString):
+ pass
+
+
+id_at_countryName = univ.ObjectIdentifier('2.5.4.6')
+
+
+class X520countryName(char.PrintableString):
+ subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(2, 2)
+
+
+pkcs_9 = univ.ObjectIdentifier('1.2.840.113549.1.9')
+
+emailAddress = univ.ObjectIdentifier('1.2.840.113549.1.9.1')
+
+
+class Pkcs9email(char.IA5String):
+ subtypeSpec = char.IA5String.subtypeSpec + constraint.ValueSizeConstraint(1, ub_emailaddress_length)
+
+
+# ----
+
+class DSAPrivateKey(univ.Sequence):
+ """PKIX compliant DSA private key structure"""
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', univ.Integer(namedValues=namedval.NamedValues(('v1', 0)))),
+ namedtype.NamedType('p', univ.Integer()),
+ namedtype.NamedType('q', univ.Integer()),
+ namedtype.NamedType('g', univ.Integer()),
+ namedtype.NamedType('public', univ.Integer()),
+ namedtype.NamedType('private', univ.Integer())
+ )
+
+
+# ----
+
+class RelativeDistinguishedName(univ.SetOf):
+ componentType = AttributeTypeAndValue()
+
+
+class RDNSequence(univ.SequenceOf):
+ componentType = RelativeDistinguishedName()
+
+
+class Name(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('', RDNSequence())
+ )
+
+
+class DirectoryString(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('printableString',
+ char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('universalString',
+ char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('ia5String', char.IA5String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX)))
+ # hm, this should not be here!? XXX
+ )
+
+
+# certificate and CRL specific structures begin here
+
+class AlgorithmIdentifier(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('algorithm', univ.ObjectIdentifier()),
+ namedtype.OptionalNamedType('parameters', univ.Any())
+ )
+
+
+class Extension(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('extnID', univ.ObjectIdentifier()),
+ namedtype.DefaultedNamedType('critical', univ.Boolean('False')),
+ namedtype.NamedType('extnValue', univ.Any())
+ )
+
+
+class Extensions(univ.SequenceOf):
+ componentType = Extension()
+ sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+
+class SubjectPublicKeyInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('algorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('subjectPublicKey', univ.BitString())
+ )
+
+
+class UniqueIdentifier(univ.BitString):
+ pass
+
+
+class Time(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('utcTime', useful.UTCTime()),
+ namedtype.NamedType('generalTime', useful.GeneralizedTime())
+ )
+
+
+class Validity(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('notBefore', Time()),
+ namedtype.NamedType('notAfter', Time())
+ )
+
+
+class CertificateSerialNumber(univ.Integer):
+ pass
+
+
+class Version(univ.Integer):
+ namedValues = namedval.NamedValues(
+ ('v1', 0), ('v2', 1), ('v3', 2)
+ )
+
+
+class TBSCertificate(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.DefaultedNamedType('version', Version('v1').subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('serialNumber', CertificateSerialNumber()),
+ namedtype.NamedType('signature', AlgorithmIdentifier()),
+ namedtype.NamedType('issuer', Name()),
+ namedtype.NamedType('validity', Validity()),
+ namedtype.NamedType('subject', Name()),
+ namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo()),
+ namedtype.OptionalNamedType('issuerUniqueID', UniqueIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('subjectUniqueID', UniqueIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('extensions', Extensions().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+ )
+
+
+class Certificate(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('tbsCertificate', TBSCertificate()),
+ namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('signatureValue', univ.BitString())
+ )
+
+
+# CRL structures
+
+class RevokedCertificate(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('userCertificate', CertificateSerialNumber()),
+ namedtype.NamedType('revocationDate', Time()),
+ namedtype.OptionalNamedType('crlEntryExtensions', Extensions())
+ )
+
+
+class TBSCertList(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('version', Version()),
+ namedtype.NamedType('signature', AlgorithmIdentifier()),
+ namedtype.NamedType('issuer', Name()),
+ namedtype.NamedType('thisUpdate', Time()),
+ namedtype.OptionalNamedType('nextUpdate', Time()),
+ namedtype.OptionalNamedType('revokedCertificates', univ.SequenceOf(componentType=RevokedCertificate())),
+ namedtype.OptionalNamedType('crlExtensions', Extensions().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
+
+
+class CertificateList(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('tbsCertList', TBSCertList()),
+ namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString())
+ )
+
+
+# Algorithm OIDs and parameter structures
+
+pkcs_1 = univ.ObjectIdentifier('1.2.840.113549.1.1')
+rsaEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.1')
+md2WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.2')
+md5WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.4')
+sha1WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.5')
+id_dsa_with_sha1 = univ.ObjectIdentifier('1.2.840.10040.4.3')
+
+
+class Dss_Sig_Value(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('r', univ.Integer()),
+ namedtype.NamedType('s', univ.Integer())
+ )
+
+
+dhpublicnumber = univ.ObjectIdentifier('1.2.840.10046.2.1')
+
+
+class ValidationParms(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('seed', univ.BitString()),
+ namedtype.NamedType('pgenCounter', univ.Integer())
+ )
+
+
+class DomainParameters(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('p', univ.Integer()),
+ namedtype.NamedType('g', univ.Integer()),
+ namedtype.NamedType('q', univ.Integer()),
+ namedtype.NamedType('j', univ.Integer()),
+ namedtype.OptionalNamedType('validationParms', ValidationParms())
+ )
+
+
+id_dsa = univ.ObjectIdentifier('1.2.840.10040.4.1')
+
+
+class Dss_Parms(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('p', univ.Integer()),
+ namedtype.NamedType('q', univ.Integer()),
+ namedtype.NamedType('g', univ.Integer())
+ )
+
+
+# x400 address syntax starts here
+
+teletex_domain_defined_attributes = univ.Integer(6)
+
+
+class TeletexDomainDefinedAttribute(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))),
+ namedtype.NamedType('value', char.TeletexString())
+ )
+
+
+class TeletexDomainDefinedAttributes(univ.SequenceOf):
+ componentType = TeletexDomainDefinedAttribute()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_domain_defined_attributes)
+
+
+terminal_type = univ.Integer(23)
+
+
+class TerminalType(univ.Integer):
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint(0, ub_integer_options)
+ namedValues = namedval.NamedValues(
+ ('telex', 3),
+ ('teletelex', 4),
+ ('g3-facsimile', 5),
+ ('g4-facsimile', 6),
+ ('ia5-terminal', 7),
+ ('videotex', 8)
+ )
+
+
+class PresentationAddress(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('pSelector', univ.OctetString().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('sSelector', univ.OctetString().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('tSelector', univ.OctetString().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('nAddresses', univ.SetOf(componentType=univ.OctetString()).subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3),
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ )
+
+
+extended_network_address = univ.Integer(22)
+
+
+class E163_4_address(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('number', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_number_length),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('sub-address', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_sub_address_length),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+
+class ExtendedNetworkAddress(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('e163-4-address', E163_4_address()),
+ namedtype.NamedType('psap-address', PresentationAddress().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
+
+
+class PDSParameter(univ.Set):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('printable-string', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length))),
+ namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length)))
+ )
+
+
+local_postal_attributes = univ.Integer(21)
+
+
+class LocalPostalAttributes(PDSParameter):
+ pass
+
+
+class UniquePostalName(PDSParameter):
+ pass
+
+
+unique_postal_name = univ.Integer(20)
+
+poste_restante_address = univ.Integer(19)
+
+
+class PosteRestanteAddress(PDSParameter):
+ pass
+
+
+post_office_box_address = univ.Integer(18)
+
+
+class PostOfficeBoxAddress(PDSParameter):
+ pass
+
+
+street_address = univ.Integer(17)
+
+
+class StreetAddress(PDSParameter):
+ pass
+
+
+class UnformattedPostalAddress(univ.Set):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('printable-address', univ.SequenceOf(componentType=char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length)).subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_physical_address_lines)))),
+ namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_unformatted_address_length)))
+ )
+
+
+physical_delivery_office_name = univ.Integer(10)
+
+
+class PhysicalDeliveryOfficeName(PDSParameter):
+ pass
+
+
+physical_delivery_office_number = univ.Integer(11)
+
+
+class PhysicalDeliveryOfficeNumber(PDSParameter):
+ pass
+
+
+extension_OR_address_components = univ.Integer(12)
+
+
+class ExtensionORAddressComponents(PDSParameter):
+ pass
+
+
+physical_delivery_personal_name = univ.Integer(13)
+
+
+class PhysicalDeliveryPersonalName(PDSParameter):
+ pass
+
+
+physical_delivery_organization_name = univ.Integer(14)
+
+
+class PhysicalDeliveryOrganizationName(PDSParameter):
+ pass
+
+
+extension_physical_delivery_address_components = univ.Integer(15)
+
+
+class ExtensionPhysicalDeliveryAddressComponents(PDSParameter):
+ pass
+
+
+unformatted_postal_address = univ.Integer(16)
+
+postal_code = univ.Integer(9)
+
+
+class PostalCode(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('numeric-code', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length))),
+ namedtype.NamedType('printable-code', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length)))
+ )
+
+
+class PhysicalDeliveryCountryName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length,
+ ub_country_name_numeric_length))),
+ namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length)))
+ )
+
+
+class PDSName(char.PrintableString):
+ subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_pds_name_length)
+
+
+physical_delivery_country_name = univ.Integer(8)
+
+
+class TeletexOrganizationalUnitName(char.TeletexString):
+ subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length)
+
+
+pds_name = univ.Integer(7)
+
+teletex_organizational_unit_names = univ.Integer(5)
+
+
+class TeletexOrganizationalUnitNames(univ.SequenceOf):
+ componentType = TeletexOrganizationalUnitName()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_units)
+
+
+teletex_personal_name = univ.Integer(4)
+
+
+class TeletexPersonalName(univ.Set):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('surname', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('given-name', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('initials', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('generation-qualifier', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+ )
+
+
+teletex_organization_name = univ.Integer(3)
+
+
+class TeletexOrganizationName(char.TeletexString):
+ subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organization_name_length)
+
+
+teletex_common_name = univ.Integer(2)
+
+
+class TeletexCommonName(char.TeletexString):
+ subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_common_name_length)
+
+
+class CommonName(char.PrintableString):
+ subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_common_name_length)
+
+
+common_name = univ.Integer(1)
+
+
+class ExtensionAttribute(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('extension-attribute-type', univ.Integer().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(0, ub_extension_attributes),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('extension-attribute-value',
+ univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+
+class ExtensionAttributes(univ.SetOf):
+ componentType = ExtensionAttribute()
+ subtypeSpec = univ.SetOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_extension_attributes)
+
+
+class BuiltInDomainDefinedAttribute(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))),
+ namedtype.NamedType('value', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_value_length)))
+ )
+
+
+class BuiltInDomainDefinedAttributes(univ.SequenceOf):
+ componentType = BuiltInDomainDefinedAttribute()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_domain_defined_attributes)
+
+
+class OrganizationalUnitName(char.PrintableString):
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length)
+
+
+class OrganizationalUnitNames(univ.SequenceOf):
+ componentType = OrganizationalUnitName()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_units)
+
+
+class PersonalName(univ.Set):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('surname', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('given-name', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('initials', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('generation-qualifier', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+ )
+
+
+class NumericUserIdentifier(char.NumericString):
+ subtypeSpec = char.NumericString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_numeric_user_id_length)
+
+
+class OrganizationName(char.PrintableString):
+ subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organization_name_length)
+
+
+class PrivateDomainName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('numeric', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length))),
+ namedtype.NamedType('printable', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length)))
+ )
+
+
+class TerminalIdentifier(char.PrintableString):
+ subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_terminal_id_length)
+
+
+class X121Address(char.NumericString):
+ subtypeSpec = char.NumericString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_x121_address_length)
+
+
+class NetworkAddress(X121Address):
+ pass
+
+
+class AdministrationDomainName(univ.Choice):
+ tagSet = univ.Choice.tagSet.tagExplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 2)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('numeric', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length))),
+ namedtype.NamedType('printable', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length)))
+ )
+
+
+class CountryName(univ.Choice):
+ tagSet = univ.Choice.tagSet.tagExplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 1)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length,
+ ub_country_name_numeric_length))),
+ namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length)))
+ )
+
+
+class BuiltInStandardAttributes(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('country-name', CountryName()),
+ namedtype.OptionalNamedType('administration-domain-name', AdministrationDomainName()),
+ namedtype.OptionalNamedType('network-address', NetworkAddress().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('terminal-identifier', TerminalIdentifier().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('private-domain-name', PrivateDomainName().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('organization-name', OrganizationName().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.OptionalNamedType('numeric-user-identifier', NumericUserIdentifier().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))),
+ namedtype.OptionalNamedType('personal-name', PersonalName().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))),
+ namedtype.OptionalNamedType('organizational-unit-names', OrganizationalUnitNames().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6)))
+ )
+
+
+class ORAddress(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('built-in-standard-attributes', BuiltInStandardAttributes()),
+ namedtype.OptionalNamedType('built-in-domain-defined-attributes', BuiltInDomainDefinedAttributes()),
+ namedtype.OptionalNamedType('extension-attributes', ExtensionAttributes())
+ )
+
+
+#
+# PKIX1Implicit88
+#
+
+id_ce_invalidityDate = univ.ObjectIdentifier('2.5.29.24')
+
+
+class InvalidityDate(useful.GeneralizedTime):
+ pass
+
+
+id_holdinstruction_none = univ.ObjectIdentifier('2.2.840.10040.2.1')
+id_holdinstruction_callissuer = univ.ObjectIdentifier('2.2.840.10040.2.2')
+id_holdinstruction_reject = univ.ObjectIdentifier('2.2.840.10040.2.3')
+
+holdInstruction = univ.ObjectIdentifier('2.2.840.10040.2')
+
+id_ce_holdInstructionCode = univ.ObjectIdentifier('2.5.29.23')
+
+
+class HoldInstructionCode(univ.ObjectIdentifier):
+ pass
+
+
+id_ce_cRLReasons = univ.ObjectIdentifier('2.5.29.21')
+
+
+class CRLReason(univ.Enumerated):
+ namedValues = namedval.NamedValues(
+ ('unspecified', 0),
+ ('keyCompromise', 1),
+ ('cACompromise', 2),
+ ('affiliationChanged', 3),
+ ('superseded', 4),
+ ('cessationOfOperation', 5),
+ ('certificateHold', 6),
+ ('removeFromCRL', 8)
+ )
+
+
+id_ce_cRLNumber = univ.ObjectIdentifier('2.5.29.20')
+
+
+class CRLNumber(univ.Integer):
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(0, MAX)
+
+
+class BaseCRLNumber(CRLNumber):
+ pass
+
+
+id_kp_serverAuth = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.1')
+id_kp_clientAuth = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.2')
+id_kp_codeSigning = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.3')
+id_kp_emailProtection = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.4')
+id_kp_ipsecEndSystem = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.5')
+id_kp_ipsecTunnel = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.6')
+id_kp_ipsecUser = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.7')
+id_kp_timeStamping = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.8')
+id_pe_authorityInfoAccess = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.1')
+id_ce_extKeyUsage = univ.ObjectIdentifier('2.5.29.37')
+
+
+class KeyPurposeId(univ.ObjectIdentifier):
+ pass
+
+
+class ExtKeyUsageSyntax(univ.SequenceOf):
+ componentType = KeyPurposeId()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+
+class ReasonFlags(univ.BitString):
+ namedValues = namedval.NamedValues(
+ ('unused', 0),
+ ('keyCompromise', 1),
+ ('cACompromise', 2),
+ ('affiliationChanged', 3),
+ ('superseded', 4),
+ ('cessationOfOperation', 5),
+ ('certificateHold', 6)
+ )
+
+
+class SkipCerts(univ.Integer):
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint(0, MAX)
+
+
+id_ce_policyConstraints = univ.ObjectIdentifier('2.5.29.36')
+
+
+class PolicyConstraints(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('requireExplicitPolicy', SkipCerts().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('inhibitPolicyMapping', SkipCerts().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+ )
+
+
+id_ce_basicConstraints = univ.ObjectIdentifier('2.5.29.19')
+
+
+class BasicConstraints(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.DefaultedNamedType('cA', univ.Boolean(False)),
+ namedtype.OptionalNamedType('pathLenConstraint',
+ univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, MAX)))
+ )
+
+
+id_ce_subjectDirectoryAttributes = univ.ObjectIdentifier('2.5.29.9')
+
+
+class SubjectDirectoryAttributes(univ.SequenceOf):
+ componentType = Attribute()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+
+class EDIPartyName(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('nameAssigner', DirectoryString().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('partyName',
+ DirectoryString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+
+class AnotherName(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type-id', univ.ObjectIdentifier()),
+ namedtype.NamedType('value',
+ univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
+
+
+class GeneralName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('otherName',
+ AnotherName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('rfc822Name',
+ char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('dNSName',
+ char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.NamedType('x400Address',
+ ORAddress().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.NamedType('directoryName',
+ Name().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))),
+ namedtype.NamedType('ediPartyName',
+ EDIPartyName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))),
+ namedtype.NamedType('uniformResourceIdentifier',
+ char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))),
+ namedtype.NamedType('iPAddress', univ.OctetString().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
+ namedtype.NamedType('registeredID', univ.ObjectIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8)))
+ )
+
+
+class GeneralNames(univ.SequenceOf):
+ componentType = GeneralName()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+
+class AccessDescription(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('accessMethod', univ.ObjectIdentifier()),
+ namedtype.NamedType('accessLocation', GeneralName())
+ )
+
+
+class AuthorityInfoAccessSyntax(univ.SequenceOf):
+ componentType = AccessDescription()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+
+id_ce_deltaCRLIndicator = univ.ObjectIdentifier('2.5.29.27')
+
+
+class DistributionPointName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('fullName', GeneralNames().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('nameRelativeToCRLIssuer', RelativeDistinguishedName().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+ )
+
+
+class DistributionPoint(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('reasons', ReasonFlags().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('cRLIssuer', GeneralNames().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2)))
+ )
+
+
+class BaseDistance(univ.Integer):
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(0, MAX)
+
+
+id_ce_cRLDistributionPoints = univ.ObjectIdentifier('2.5.29.31')
+
+
+class CRLDistPointsSyntax(univ.SequenceOf):
+ componentType = DistributionPoint()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+
+id_ce_issuingDistributionPoint = univ.ObjectIdentifier('2.5.29.28')
+
+
+class IssuingDistributionPoint(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('onlyContainsUserCerts', univ.Boolean(False).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('onlyContainsCACerts', univ.Boolean(False).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('onlySomeReasons', ReasonFlags().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.NamedType('indirectCRL', univ.Boolean(False).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4)))
+ )
+
+
+class GeneralSubtree(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('base', GeneralName()),
+ namedtype.DefaultedNamedType('minimum', BaseDistance(0).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('maximum', BaseDistance().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+ )
+
+
+class GeneralSubtrees(univ.SequenceOf):
+ componentType = GeneralSubtree()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+
+id_ce_nameConstraints = univ.ObjectIdentifier('2.5.29.30')
+
+
+class NameConstraints(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('permittedSubtrees', GeneralSubtrees().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('excludedSubtrees', GeneralSubtrees().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+ )
+
+
+class DisplayText(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('visibleString',
+ char.VisibleString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200)))
+ )
+
+
+class NoticeReference(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('organization', DisplayText()),
+ namedtype.NamedType('noticeNumbers', univ.SequenceOf(componentType=univ.Integer()))
+ )
+
+
+class UserNotice(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('noticeRef', NoticeReference()),
+ namedtype.OptionalNamedType('explicitText', DisplayText())
+ )
+
+
+class CPSuri(char.IA5String):
+ pass
+
+
+class PolicyQualifierId(univ.ObjectIdentifier):
+ subtypeSpec = univ.ObjectIdentifier.subtypeSpec + constraint.SingleValueConstraint(id_qt_cps, id_qt_unotice)
+
+
+class CertPolicyId(univ.ObjectIdentifier):
+ pass
+
+
+class PolicyQualifierInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('policyQualifierId', PolicyQualifierId()),
+ namedtype.NamedType('qualifier', univ.Any())
+ )
+
+
+id_ce_certificatePolicies = univ.ObjectIdentifier('2.5.29.32')
+
+
+class PolicyInformation(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('policyIdentifier', CertPolicyId()),
+ namedtype.OptionalNamedType('policyQualifiers', univ.SequenceOf(componentType=PolicyQualifierInfo()).subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX)))
+ )
+
+
+class CertificatePolicies(univ.SequenceOf):
+ componentType = PolicyInformation()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+
+id_ce_policyMappings = univ.ObjectIdentifier('2.5.29.33')
+
+
+class PolicyMapping(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuerDomainPolicy', CertPolicyId()),
+ namedtype.NamedType('subjectDomainPolicy', CertPolicyId())
+ )
+
+
+class PolicyMappings(univ.SequenceOf):
+ componentType = PolicyMapping()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+
+id_ce_privateKeyUsagePeriod = univ.ObjectIdentifier('2.5.29.16')
+
+
+class PrivateKeyUsagePeriod(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('notBefore', useful.GeneralizedTime().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('notAfter', useful.GeneralizedTime().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+
+id_ce_keyUsage = univ.ObjectIdentifier('2.5.29.15')
+
+
+class KeyUsage(univ.BitString):
+ namedValues = namedval.NamedValues(
+ ('digitalSignature', 0),
+ ('nonRepudiation', 1),
+ ('keyEncipherment', 2),
+ ('dataEncipherment', 3),
+ ('keyAgreement', 4),
+ ('keyCertSign', 5),
+ ('cRLSign', 6),
+ ('encipherOnly', 7),
+ ('decipherOnly', 8)
+ )
+
+
+id_ce = univ.ObjectIdentifier('2.5.29')
+
+id_ce_authorityKeyIdentifier = univ.ObjectIdentifier('2.5.29.35')
+
+
+class KeyIdentifier(univ.OctetString):
+ pass
+
+
+id_ce_subjectKeyIdentifier = univ.ObjectIdentifier('2.5.29.14')
+
+
+class SubjectKeyIdentifier(KeyIdentifier):
+ pass
+
+
+class AuthorityKeyIdentifier(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('keyIdentifier', KeyIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('authorityCertIssuer', GeneralNames().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('authorityCertSerialNumber', CertificateSerialNumber().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+ )
+
+
+id_ce_certificateIssuer = univ.ObjectIdentifier('2.5.29.29')
+
+
+class CertificateIssuer(GeneralNames):
+ pass
+
+
+id_ce_subjectAltName = univ.ObjectIdentifier('2.5.29.17')
+
+
+class SubjectAltName(GeneralNames):
+ pass
+
+
+id_ce_issuerAltName = univ.ObjectIdentifier('2.5.29.18')
+
+
+class IssuerAltName(GeneralNames):
+ pass
+
+
+# map of AttributeType -> AttributeValue
+
+certificateAttributesMap = {
+ id_at_name: X520name(),
+ id_at_surname: X520name(),
+ id_at_givenName: X520name(),
+ id_at_initials: X520name(),
+ id_at_generationQualifier: X520name(),
+ id_at_commonName: X520CommonName(),
+ id_at_localityName: X520LocalityName(),
+ id_at_stateOrProvinceName: X520StateOrProvinceName(),
+ id_at_organizationName: X520OrganizationName(),
+ id_at_organizationalUnitName: X520OrganizationalUnitName(),
+ id_at_title: X520Title(),
+ id_at_dnQualifier: X520dnQualifier(),
+ id_at_countryName: X520countryName(),
+ emailAddress: Pkcs9email(),
+}
+
+# map of Certificate Extension OIDs to Extensions
+
+certificateExtensionsMap = {
+ id_ce_authorityKeyIdentifier: AuthorityKeyIdentifier(),
+ id_ce_subjectKeyIdentifier: SubjectKeyIdentifier(),
+ id_ce_keyUsage: KeyUsage(),
+ id_ce_privateKeyUsagePeriod: PrivateKeyUsagePeriod(),
+ id_ce_certificatePolicies: PolicyInformation(), # could be a sequence of concat'ed objects?
+ id_ce_policyMappings: PolicyMappings(),
+ id_ce_subjectAltName: SubjectAltName(),
+ id_ce_issuerAltName: IssuerAltName(),
+ id_ce_subjectDirectoryAttributes: SubjectDirectoryAttributes(),
+ id_ce_basicConstraints: BasicConstraints(),
+ id_ce_nameConstraints: NameConstraints(),
+ id_ce_policyConstraints: PolicyConstraints(),
+ id_ce_extKeyUsage: ExtKeyUsageSyntax(),
+ id_ce_cRLDistributionPoints: CRLDistPointsSyntax(),
+ id_pe_authorityInfoAccess: AuthorityInfoAccessSyntax(),
+ id_ce_cRLNumber: univ.Integer(),
+ id_ce_deltaCRLIndicator: BaseCRLNumber(),
+ id_ce_issuingDistributionPoint: IssuingDistributionPoint(),
+ id_ce_cRLReasons: CRLReason(),
+ id_ce_holdInstructionCode: univ.ObjectIdentifier(),
+ id_ce_invalidityDate: useful.GeneralizedTime(),
+ id_ce_certificateIssuer: GeneralNames(),
+}
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc2511.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2511.py
new file mode 100644
index 0000000000..4ae7db5502
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2511.py
@@ -0,0 +1,258 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# X.509 certificate Request Message Format (CRMF) syntax
+#
+# ASN.1 source from:
+# http://tools.ietf.org/html/rfc2511
+#
+# Sample captures could be obtained with OpenSSL
+#
+from pyasn1_modules.rfc2459 import *
+from pyasn1_modules import rfc2315
+
+MAX = float('inf')
+
+id_pkix = univ.ObjectIdentifier('1.3.6.1.5.5.7')
+id_pkip = univ.ObjectIdentifier('1.3.6.1.5.5.7.5')
+id_regCtrl = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1')
+id_regCtrl_regToken = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.1')
+id_regCtrl_authenticator = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.2')
+id_regCtrl_pkiPublicationInfo = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.3')
+id_regCtrl_pkiArchiveOptions = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.4')
+id_regCtrl_oldCertID = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.5')
+id_regCtrl_protocolEncrKey = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.6')
+id_regInfo = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.2')
+id_regInfo_utf8Pairs = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.2.1')
+id_regInfo_certReq = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.2.2')
+
+
+# This should be in PKIX Certificate Extensions module
+
+class GeneralName(univ.OctetString):
+ pass
+
+
+# end of PKIX Certificate Extensions module
+
+class UTF8Pairs(char.UTF8String):
+ pass
+
+
+class ProtocolEncrKey(SubjectPublicKeyInfo):
+ pass
+
+
+class CertId(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuer', GeneralName()),
+ namedtype.NamedType('serialNumber', univ.Integer())
+ )
+
+
+class OldCertId(CertId):
+ pass
+
+
+class KeyGenParameters(univ.OctetString):
+ pass
+
+
+class EncryptedValue(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('intendedAlg', AlgorithmIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('symmAlg', AlgorithmIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.OptionalNamedType('encSymmKey', univ.BitString().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
+ namedtype.OptionalNamedType('keyAlg', AlgorithmIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+ namedtype.OptionalNamedType('valueHint', univ.OctetString().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
+ namedtype.NamedType('encValue', univ.BitString())
+ )
+
+
+class EncryptedKey(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('encryptedValue', EncryptedValue()),
+ namedtype.NamedType('envelopedData', rfc2315.EnvelopedData().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
+
+
+class PKIArchiveOptions(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('encryptedPrivKey', EncryptedKey().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('keyGenParameters', KeyGenParameters().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('archiveRemGenPrivKey',
+ univ.Boolean().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+ )
+
+
+class SinglePubInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('pubMethod', univ.Integer(
+ namedValues=namedval.NamedValues(('dontCare', 0), ('x500', 1), ('web', 2), ('ldap', 3)))),
+ namedtype.OptionalNamedType('pubLocation', GeneralName())
+ )
+
+
+class PKIPublicationInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('action',
+ univ.Integer(namedValues=namedval.NamedValues(('dontPublish', 0), ('pleasePublish', 1)))),
+ namedtype.OptionalNamedType('pubInfos', univ.SequenceOf(componentType=SinglePubInfo()).subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX)))
+ )
+
+
+class Authenticator(char.UTF8String):
+ pass
+
+
+class RegToken(char.UTF8String):
+ pass
+
+
+class SubsequentMessage(univ.Integer):
+ namedValues = namedval.NamedValues(
+ ('encrCert', 0),
+ ('challengeResp', 1)
+ )
+
+
+class POPOPrivKey(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('thisMessage',
+ univ.BitString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('subsequentMessage', SubsequentMessage().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('dhMAC',
+ univ.BitString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+ )
+
+
+class PBMParameter(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('salt', univ.OctetString()),
+ namedtype.NamedType('owf', AlgorithmIdentifier()),
+ namedtype.NamedType('iterationCount', univ.Integer()),
+ namedtype.NamedType('mac', AlgorithmIdentifier())
+ )
+
+
+class PKMACValue(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('algId', AlgorithmIdentifier()),
+ namedtype.NamedType('value', univ.BitString())
+ )
+
+
+class POPOSigningKeyInput(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType(
+ 'authInfo', univ.Choice(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType(
+ 'sender', GeneralName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))
+ ),
+ namedtype.NamedType('publicKeyMAC', PKMACValue())
+ )
+ )
+ ),
+ namedtype.NamedType('publicKey', SubjectPublicKeyInfo())
+ )
+
+
+class POPOSigningKey(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('poposkInput', POPOSigningKeyInput().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('algorithmIdentifier', AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString())
+ )
+
+
+class ProofOfPossession(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('raVerified',
+ univ.Null().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('signature', POPOSigningKey().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.NamedType('keyEncipherment', POPOPrivKey().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
+ namedtype.NamedType('keyAgreement', POPOPrivKey().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3)))
+ )
+
+
+class Controls(univ.SequenceOf):
+ componentType = AttributeTypeAndValue()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+
+class OptionalValidity(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('notBefore',
+ Time().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('notAfter',
+ Time().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+
+class CertTemplate(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('version', Version().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('serialNumber', univ.Integer().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('signingAlg', AlgorithmIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
+ namedtype.OptionalNamedType('issuer', Name().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+ namedtype.OptionalNamedType('validity', OptionalValidity().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
+ namedtype.OptionalNamedType('subject', Name().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
+ namedtype.OptionalNamedType('publicKey', SubjectPublicKeyInfo().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))),
+ namedtype.OptionalNamedType('issuerUID', UniqueIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
+ namedtype.OptionalNamedType('subjectUID', UniqueIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8))),
+ namedtype.OptionalNamedType('extensions', Extensions().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 9)))
+ )
+
+
+class CertRequest(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certReqId', univ.Integer()),
+ namedtype.NamedType('certTemplate', CertTemplate()),
+ namedtype.OptionalNamedType('controls', Controls())
+ )
+
+
+class CertReq(CertRequest):
+ pass
+
+
+class CertReqMsg(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certReq', CertRequest()),
+ namedtype.OptionalNamedType('pop', ProofOfPossession()),
+ namedtype.OptionalNamedType('regInfo', univ.SequenceOf(componentType=AttributeTypeAndValue()).subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX)))
+ )
+
+
+class CertReqMessages(univ.SequenceOf):
+ componentType = CertReqMsg()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc2560.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2560.py
new file mode 100644
index 0000000000..472099e2df
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc2560.py
@@ -0,0 +1,220 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# OCSP request/response syntax
+#
+# Derived from a minimal OCSP library (RFC2560) code written by
+# Bud P. Bruegger <bud@ancitel.it>
+# Copyright: Ancitel, S.p.a, Rome, Italy
+# License: BSD
+#
+
+#
+# current limitations:
+# * request and response works only for a single certificate
+# * only some values are parsed out of the response
+# * the request does't set a nonce nor signature
+# * there is no signature validation of the response
+# * dates are left as strings in GeneralizedTime format -- datetime.datetime
+# would be nicer
+#
+from pyasn1.type import tag, namedtype, namedval, univ, useful
+from pyasn1_modules import rfc2459
+
+
+# Start of OCSP module definitions
+
+# This should be in directory Authentication Framework (X.509) module
+
+class CRLReason(univ.Enumerated):
+ namedValues = namedval.NamedValues(
+ ('unspecified', 0),
+ ('keyCompromise', 1),
+ ('cACompromise', 2),
+ ('affiliationChanged', 3),
+ ('superseded', 4),
+ ('cessationOfOperation', 5),
+ ('certificateHold', 6),
+ ('removeFromCRL', 8),
+ ('privilegeWithdrawn', 9),
+ ('aACompromise', 10)
+ )
+
+
+# end of directory Authentication Framework (X.509) module
+
+# This should be in PKIX Certificate Extensions module
+
+class GeneralName(univ.OctetString):
+ pass
+
+
+# end of PKIX Certificate Extensions module
+
+id_kp_OCSPSigning = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 3, 9))
+id_pkix_ocsp = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1))
+id_pkix_ocsp_basic = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 1))
+id_pkix_ocsp_nonce = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 2))
+id_pkix_ocsp_crl = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 3))
+id_pkix_ocsp_response = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 4))
+id_pkix_ocsp_nocheck = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 5))
+id_pkix_ocsp_archive_cutoff = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 6))
+id_pkix_ocsp_service_locator = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 7))
+
+
+class AcceptableResponses(univ.SequenceOf):
+ componentType = univ.ObjectIdentifier()
+
+
+class ArchiveCutoff(useful.GeneralizedTime):
+ pass
+
+
+class UnknownInfo(univ.Null):
+ pass
+
+
+class RevokedInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('revocationTime', useful.GeneralizedTime()),
+ namedtype.OptionalNamedType('revocationReason', CRLReason().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
+
+
+class CertID(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('hashAlgorithm', rfc2459.AlgorithmIdentifier()),
+ namedtype.NamedType('issuerNameHash', univ.OctetString()),
+ namedtype.NamedType('issuerKeyHash', univ.OctetString()),
+ namedtype.NamedType('serialNumber', rfc2459.CertificateSerialNumber())
+ )
+
+
+class CertStatus(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('good',
+ univ.Null().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('revoked',
+ RevokedInfo().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('unknown',
+ UnknownInfo().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+ )
+
+
+class SingleResponse(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certID', CertID()),
+ namedtype.NamedType('certStatus', CertStatus()),
+ namedtype.NamedType('thisUpdate', useful.GeneralizedTime()),
+ namedtype.OptionalNamedType('nextUpdate', useful.GeneralizedTime().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('singleExtensions', rfc2459.Extensions().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+
+class KeyHash(univ.OctetString):
+ pass
+
+
+class ResponderID(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('byName',
+ rfc2459.Name().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('byKey',
+ KeyHash().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+ )
+
+
+class Version(univ.Integer):
+ namedValues = namedval.NamedValues(('v1', 0))
+
+
+class ResponseData(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.DefaultedNamedType('version', Version('v1').subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('responderID', ResponderID()),
+ namedtype.NamedType('producedAt', useful.GeneralizedTime()),
+ namedtype.NamedType('responses', univ.SequenceOf(componentType=SingleResponse())),
+ namedtype.OptionalNamedType('responseExtensions', rfc2459.Extensions().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+
+class BasicOCSPResponse(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('tbsResponseData', ResponseData()),
+ namedtype.NamedType('signatureAlgorithm', rfc2459.AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString()),
+ namedtype.OptionalNamedType('certs', univ.SequenceOf(componentType=rfc2459.Certificate()).subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
+
+
+class ResponseBytes(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('responseType', univ.ObjectIdentifier()),
+ namedtype.NamedType('response', univ.OctetString())
+ )
+
+
+class OCSPResponseStatus(univ.Enumerated):
+ namedValues = namedval.NamedValues(
+ ('successful', 0),
+ ('malformedRequest', 1),
+ ('internalError', 2),
+ ('tryLater', 3),
+ ('undefinedStatus', 4), # should never occur
+ ('sigRequired', 5),
+ ('unauthorized', 6)
+ )
+
+
+class OCSPResponse(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('responseStatus', OCSPResponseStatus()),
+ namedtype.OptionalNamedType('responseBytes', ResponseBytes().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
+
+
+class Request(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('reqCert', CertID()),
+ namedtype.OptionalNamedType('singleRequestExtensions', rfc2459.Extensions().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
+
+
+class Signature(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('signatureAlgorithm', rfc2459.AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString()),
+ namedtype.OptionalNamedType('certs', univ.SequenceOf(componentType=rfc2459.Certificate()).subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
+
+
+class TBSRequest(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.DefaultedNamedType('version', Version('v1').subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('requestorName', GeneralName().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('requestList', univ.SequenceOf(componentType=Request())),
+ namedtype.OptionalNamedType('requestExtensions', rfc2459.Extensions().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+ )
+
+
+class OCSPRequest(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('tbsRequest', TBSRequest()),
+ namedtype.OptionalNamedType('optionalSignature', Signature().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc3279.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3279.py
new file mode 100644
index 0000000000..f69ff085e6
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3279.py
@@ -0,0 +1,231 @@
+#
+# This file is part of pyasn1-modules.
+#
+# Copyright (c) 2017, Danielle Madeley <danielle@madeley.id.au>
+# License: http://pyasn1.sf.net/license.html
+#
+# Derived from RFC 3279
+#
+from pyasn1.type import univ, char, namedtype, namedval, tag, constraint, useful
+
+
+def _OID(*components):
+ output = []
+ for x in tuple(components):
+ if isinstance(x, univ.ObjectIdentifier):
+ output.extend(list(x))
+ else:
+ output.append(int(x))
+
+ return univ.ObjectIdentifier(output)
+
+
+md2 = _OID(1, 2, 840, 113549, 2, 2)
+md5 = _OID(1, 2, 840, 113549, 2, 5)
+id_sha1 = _OID(1, 3, 14, 3, 2, 26)
+id_dsa = _OID(1, 2, 840, 10040, 4, 1)
+
+
+class DSAPublicKey(univ.Integer):
+ pass
+
+
+class Dss_Parms(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('p', univ.Integer()),
+ namedtype.NamedType('q', univ.Integer()),
+ namedtype.NamedType('g', univ.Integer())
+ )
+
+
+id_dsa_with_sha1 = _OID(1, 2, 840, 10040, 4, 3)
+
+
+class Dss_Sig_Value(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('r', univ.Integer()),
+ namedtype.NamedType('s', univ.Integer())
+ )
+
+
+pkcs_1 = _OID(1, 2, 840, 113549, 1, 1)
+rsaEncryption = _OID(pkcs_1, 1)
+md2WithRSAEncryption = _OID(pkcs_1, 2)
+md5WithRSAEncryption = _OID(pkcs_1, 4)
+sha1WithRSAEncryption = _OID(pkcs_1, 5)
+
+
+class RSAPublicKey(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('modulus', univ.Integer()),
+ namedtype.NamedType('publicExponent', univ.Integer())
+ )
+
+
+dhpublicnumber = _OID(1, 2, 840, 10046, 2, 1)
+
+
+class DHPublicKey(univ.Integer):
+ pass
+
+
+class ValidationParms(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('seed', univ.BitString()),
+ namedtype.NamedType('pgenCounter', univ.Integer())
+ )
+
+
+class DomainParameters(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('p', univ.Integer()),
+ namedtype.NamedType('g', univ.Integer()),
+ namedtype.NamedType('q', univ.Integer()),
+ namedtype.OptionalNamedType('j', univ.Integer()),
+ namedtype.OptionalNamedType('validationParms', ValidationParms())
+ )
+
+
+id_keyExchangeAlgorithm = _OID(2, 16, 840, 1, 101, 2, 1, 1, 22)
+
+
+class KEA_Parms_Id(univ.OctetString):
+ pass
+
+
+ansi_X9_62 = _OID(1, 2, 840, 10045)
+
+
+class FieldID(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('fieldType', univ.ObjectIdentifier()),
+ namedtype.NamedType('parameters', univ.Any())
+ )
+
+
+id_ecSigType = _OID(ansi_X9_62, 4)
+ecdsa_with_SHA1 = _OID(id_ecSigType, 1)
+
+
+class ECDSA_Sig_Value(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('r', univ.Integer()),
+ namedtype.NamedType('s', univ.Integer())
+ )
+
+
+id_fieldType = _OID(ansi_X9_62, 1)
+prime_field = _OID(id_fieldType, 1)
+
+
+class Prime_p(univ.Integer):
+ pass
+
+
+characteristic_two_field = _OID(id_fieldType, 2)
+
+
+class Characteristic_two(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('m', univ.Integer()),
+ namedtype.NamedType('basis', univ.ObjectIdentifier()),
+ namedtype.NamedType('parameters', univ.Any())
+ )
+
+
+id_characteristic_two_basis = _OID(characteristic_two_field, 3)
+gnBasis = _OID(id_characteristic_two_basis, 1)
+tpBasis = _OID(id_characteristic_two_basis, 2)
+
+
+class Trinomial(univ.Integer):
+ pass
+
+
+ppBasis = _OID(id_characteristic_two_basis, 3)
+
+
+class Pentanomial(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('k1', univ.Integer()),
+ namedtype.NamedType('k2', univ.Integer()),
+ namedtype.NamedType('k3', univ.Integer())
+ )
+
+
+class FieldElement(univ.OctetString):
+ pass
+
+
+class ECPoint(univ.OctetString):
+ pass
+
+
+class Curve(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('a', FieldElement()),
+ namedtype.NamedType('b', FieldElement()),
+ namedtype.OptionalNamedType('seed', univ.BitString())
+ )
+
+
+class ECPVer(univ.Integer):
+ namedValues = namedval.NamedValues(
+ ('ecpVer1', 1)
+ )
+
+
+class ECParameters(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', ECPVer()),
+ namedtype.NamedType('fieldID', FieldID()),
+ namedtype.NamedType('curve', Curve()),
+ namedtype.NamedType('base', ECPoint()),
+ namedtype.NamedType('order', univ.Integer()),
+ namedtype.OptionalNamedType('cofactor', univ.Integer())
+ )
+
+
+class EcpkParameters(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('ecParameters', ECParameters()),
+ namedtype.NamedType('namedCurve', univ.ObjectIdentifier()),
+ namedtype.NamedType('implicitlyCA', univ.Null())
+ )
+
+
+id_publicKeyType = _OID(ansi_X9_62, 2)
+id_ecPublicKey = _OID(id_publicKeyType, 1)
+
+ellipticCurve = _OID(ansi_X9_62, 3)
+
+c_TwoCurve = _OID(ellipticCurve, 0)
+c2pnb163v1 = _OID(c_TwoCurve, 1)
+c2pnb163v2 = _OID(c_TwoCurve, 2)
+c2pnb163v3 = _OID(c_TwoCurve, 3)
+c2pnb176w1 = _OID(c_TwoCurve, 4)
+c2tnb191v1 = _OID(c_TwoCurve, 5)
+c2tnb191v2 = _OID(c_TwoCurve, 6)
+c2tnb191v3 = _OID(c_TwoCurve, 7)
+c2onb191v4 = _OID(c_TwoCurve, 8)
+c2onb191v5 = _OID(c_TwoCurve, 9)
+c2pnb208w1 = _OID(c_TwoCurve, 10)
+c2tnb239v1 = _OID(c_TwoCurve, 11)
+c2tnb239v2 = _OID(c_TwoCurve, 12)
+c2tnb239v3 = _OID(c_TwoCurve, 13)
+c2onb239v4 = _OID(c_TwoCurve, 14)
+c2onb239v5 = _OID(c_TwoCurve, 15)
+c2pnb272w1 = _OID(c_TwoCurve, 16)
+c2pnb304w1 = _OID(c_TwoCurve, 17)
+c2tnb359v1 = _OID(c_TwoCurve, 18)
+c2pnb368w1 = _OID(c_TwoCurve, 19)
+c2tnb431r1 = _OID(c_TwoCurve, 20)
+
+primeCurve = _OID(ellipticCurve, 1)
+prime192v1 = _OID(primeCurve, 1)
+prime192v2 = _OID(primeCurve, 2)
+prime192v3 = _OID(primeCurve, 3)
+prime239v1 = _OID(primeCurve, 4)
+prime239v2 = _OID(primeCurve, 5)
+prime239v3 = _OID(primeCurve, 6)
+prime256v1 = _OID(primeCurve, 7)
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc3280.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3280.py
new file mode 100644
index 0000000000..3614e6ce91
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3280.py
@@ -0,0 +1,1537 @@
+# coding: utf-8
+#
+# This file is part of pyasn1-modules software.
+#
+# Created by Stanisław Pitucha with asn1ate tool.
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Internet X.509 Public Key Infrastructure Certificate and Certificate
+# Revocation List (CRL) Profile
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc3280.txt
+#
+from pyasn1.type import univ, char, namedtype, namedval, tag, constraint, useful
+
+MAX = float('inf')
+
+
+def _OID(*components):
+ output = []
+ for x in tuple(components):
+ if isinstance(x, univ.ObjectIdentifier):
+ output.extend(list(x))
+ else:
+ output.append(int(x))
+
+ return univ.ObjectIdentifier(output)
+
+
+unformatted_postal_address = univ.Integer(16)
+
+ub_organizational_units = univ.Integer(4)
+
+ub_organizational_unit_name_length = univ.Integer(32)
+
+
+class OrganizationalUnitName(char.PrintableString):
+ pass
+
+
+OrganizationalUnitName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length)
+
+
+class OrganizationalUnitNames(univ.SequenceOf):
+ pass
+
+
+OrganizationalUnitNames.componentType = OrganizationalUnitName()
+OrganizationalUnitNames.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organizational_units)
+
+
+class AttributeType(univ.ObjectIdentifier):
+ pass
+
+
+id_at = _OID(2, 5, 4)
+
+id_at_name = _OID(id_at, 41)
+
+ub_pds_parameter_length = univ.Integer(30)
+
+
+class PDSParameter(univ.Set):
+ pass
+
+
+PDSParameter.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('printable-string', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length))),
+ namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length)))
+)
+
+
+class PhysicalDeliveryOrganizationName(PDSParameter):
+ pass
+
+
+ub_organization_name_length = univ.Integer(64)
+
+ub_domain_defined_attribute_type_length = univ.Integer(8)
+
+ub_domain_defined_attribute_value_length = univ.Integer(128)
+
+
+class TeletexDomainDefinedAttribute(univ.Sequence):
+ pass
+
+
+TeletexDomainDefinedAttribute.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))),
+ namedtype.NamedType('value', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_value_length)))
+)
+
+id_pkix = _OID(1, 3, 6, 1, 5, 5, 7)
+
+id_qt = _OID(id_pkix, 2)
+
+
+class PresentationAddress(univ.Sequence):
+ pass
+
+
+PresentationAddress.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('pSelector', univ.OctetString().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('sSelector', univ.OctetString().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('tSelector', univ.OctetString().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.NamedType('nAddresses', univ.SetOf(componentType=univ.OctetString()).subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+)
+
+
+class AlgorithmIdentifier(univ.Sequence):
+ pass
+
+
+AlgorithmIdentifier.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('algorithm', univ.ObjectIdentifier()),
+ namedtype.OptionalNamedType('parameters', univ.Any())
+)
+
+
+class UniqueIdentifier(univ.BitString):
+ pass
+
+
+class Extension(univ.Sequence):
+ pass
+
+
+Extension.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('extnID', univ.ObjectIdentifier()),
+ namedtype.DefaultedNamedType('critical', univ.Boolean().subtype(value=0)),
+ namedtype.NamedType('extnValue', univ.OctetString())
+)
+
+
+class Extensions(univ.SequenceOf):
+ pass
+
+
+Extensions.componentType = Extension()
+Extensions.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class CertificateSerialNumber(univ.Integer):
+ pass
+
+
+class SubjectPublicKeyInfo(univ.Sequence):
+ pass
+
+
+SubjectPublicKeyInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('algorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('subjectPublicKey', univ.BitString())
+)
+
+
+class Time(univ.Choice):
+ pass
+
+
+Time.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('utcTime', useful.UTCTime()),
+ namedtype.NamedType('generalTime', useful.GeneralizedTime())
+)
+
+
+class Validity(univ.Sequence):
+ pass
+
+
+Validity.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('notBefore', Time()),
+ namedtype.NamedType('notAfter', Time())
+)
+
+
+class Version(univ.Integer):
+ pass
+
+
+Version.namedValues = namedval.NamedValues(
+ ('v1', 0),
+ ('v2', 1),
+ ('v3', 2)
+)
+
+
+class AttributeValue(univ.Any):
+ pass
+
+
+class AttributeTypeAndValue(univ.Sequence):
+ pass
+
+
+AttributeTypeAndValue.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeType()),
+ namedtype.NamedType('value', AttributeValue())
+)
+
+
+class RelativeDistinguishedName(univ.SetOf):
+ pass
+
+
+RelativeDistinguishedName.componentType = AttributeTypeAndValue()
+RelativeDistinguishedName.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class RDNSequence(univ.SequenceOf):
+ pass
+
+
+RDNSequence.componentType = RelativeDistinguishedName()
+
+
+class Name(univ.Choice):
+ pass
+
+
+Name.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('rdnSequence', RDNSequence())
+)
+
+
+class TBSCertificate(univ.Sequence):
+ pass
+
+
+TBSCertificate.componentType = namedtype.NamedTypes(
+ namedtype.DefaultedNamedType('version',
+ Version().subtype(explicitTag=tag.Tag(tag.tagClassContext,
+ tag.tagFormatSimple, 0)).subtype(value="v1")),
+ namedtype.NamedType('serialNumber', CertificateSerialNumber()),
+ namedtype.NamedType('signature', AlgorithmIdentifier()),
+ namedtype.NamedType('issuer', Name()),
+ namedtype.NamedType('validity', Validity()),
+ namedtype.NamedType('subject', Name()),
+ namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo()),
+ namedtype.OptionalNamedType('issuerUniqueID', UniqueIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('subjectUniqueID', UniqueIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('extensions',
+ Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+)
+
+
+class Certificate(univ.Sequence):
+ pass
+
+
+Certificate.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('tbsCertificate', TBSCertificate()),
+ namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString())
+)
+
+ub_surname_length = univ.Integer(40)
+
+
+class TeletexOrganizationName(char.TeletexString):
+ pass
+
+
+TeletexOrganizationName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organization_name_length)
+
+ub_e163_4_sub_address_length = univ.Integer(40)
+
+teletex_common_name = univ.Integer(2)
+
+ub_country_name_alpha_length = univ.Integer(2)
+
+ub_country_name_numeric_length = univ.Integer(3)
+
+
+class CountryName(univ.Choice):
+ pass
+
+
+CountryName.tagSet = univ.Choice.tagSet.tagExplicitly(tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 1))
+CountryName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length, ub_country_name_numeric_length))),
+ namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length)))
+)
+
+extension_OR_address_components = univ.Integer(12)
+
+id_at_dnQualifier = _OID(id_at, 46)
+
+ub_e163_4_number_length = univ.Integer(15)
+
+
+class ExtendedNetworkAddress(univ.Choice):
+ pass
+
+
+ExtendedNetworkAddress.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('e163-4-address', univ.Sequence(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('number', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_number_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('sub-address', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_sub_address_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ ))
+ ),
+ namedtype.NamedType('psap-address', PresentationAddress().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+)
+
+terminal_type = univ.Integer(23)
+
+id_domainComponent = _OID(0, 9, 2342, 19200300, 100, 1, 25)
+
+ub_state_name = univ.Integer(128)
+
+
+class X520StateOrProvinceName(univ.Choice):
+ pass
+
+
+X520StateOrProvinceName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('printableString',
+ char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('universalString',
+ char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('bmpString',
+ char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name)))
+)
+
+ub_organization_name = univ.Integer(64)
+
+
+class X520OrganizationName(univ.Choice):
+ pass
+
+
+X520OrganizationName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('bmpString',
+ char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name)))
+)
+
+ub_emailaddress_length = univ.Integer(128)
+
+
+class ExtensionPhysicalDeliveryAddressComponents(PDSParameter):
+ pass
+
+
+id_at_surname = _OID(id_at, 4)
+
+ub_common_name_length = univ.Integer(64)
+
+id_ad = _OID(id_pkix, 48)
+
+ub_numeric_user_id_length = univ.Integer(32)
+
+
+class NumericUserIdentifier(char.NumericString):
+ pass
+
+
+NumericUserIdentifier.subtypeSpec = constraint.ValueSizeConstraint(1, ub_numeric_user_id_length)
+
+
+class OrganizationName(char.PrintableString):
+ pass
+
+
+OrganizationName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organization_name_length)
+
+ub_domain_name_length = univ.Integer(16)
+
+
+class AdministrationDomainName(univ.Choice):
+ pass
+
+
+AdministrationDomainName.tagSet = univ.Choice.tagSet.tagExplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 2))
+AdministrationDomainName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('numeric', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length))),
+ namedtype.NamedType('printable', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length)))
+)
+
+
+class PrivateDomainName(univ.Choice):
+ pass
+
+
+PrivateDomainName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('numeric', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length))),
+ namedtype.NamedType('printable', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length)))
+)
+
+ub_generation_qualifier_length = univ.Integer(3)
+
+ub_given_name_length = univ.Integer(16)
+
+ub_initials_length = univ.Integer(5)
+
+
+class PersonalName(univ.Set):
+ pass
+
+
+PersonalName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('surname', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('given-name', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('initials', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('generation-qualifier', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+)
+
+ub_terminal_id_length = univ.Integer(24)
+
+
+class TerminalIdentifier(char.PrintableString):
+ pass
+
+
+TerminalIdentifier.subtypeSpec = constraint.ValueSizeConstraint(1, ub_terminal_id_length)
+
+ub_x121_address_length = univ.Integer(16)
+
+
+class X121Address(char.NumericString):
+ pass
+
+
+X121Address.subtypeSpec = constraint.ValueSizeConstraint(1, ub_x121_address_length)
+
+
+class NetworkAddress(X121Address):
+ pass
+
+
+class BuiltInStandardAttributes(univ.Sequence):
+ pass
+
+
+BuiltInStandardAttributes.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('country-name', CountryName()),
+ namedtype.OptionalNamedType('administration-domain-name', AdministrationDomainName()),
+ namedtype.OptionalNamedType('network-address', NetworkAddress().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('terminal-identifier', TerminalIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('private-domain-name', PrivateDomainName().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
+ namedtype.OptionalNamedType('organization-name', OrganizationName().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.OptionalNamedType('numeric-user-identifier', NumericUserIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))),
+ namedtype.OptionalNamedType('personal-name', PersonalName().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
+ namedtype.OptionalNamedType('organizational-unit-names', OrganizationalUnitNames().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6)))
+)
+
+ub_domain_defined_attributes = univ.Integer(4)
+
+
+class BuiltInDomainDefinedAttribute(univ.Sequence):
+ pass
+
+
+BuiltInDomainDefinedAttribute.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))),
+ namedtype.NamedType('value', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_value_length)))
+)
+
+
+class BuiltInDomainDefinedAttributes(univ.SequenceOf):
+ pass
+
+
+BuiltInDomainDefinedAttributes.componentType = BuiltInDomainDefinedAttribute()
+BuiltInDomainDefinedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, ub_domain_defined_attributes)
+
+ub_extension_attributes = univ.Integer(256)
+
+
+class ExtensionAttribute(univ.Sequence):
+ pass
+
+
+ExtensionAttribute.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('extension-attribute-type', univ.Integer().subtype(
+ subtypeSpec=constraint.ValueRangeConstraint(0, ub_extension_attributes)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('extension-attribute-value',
+ univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+
+class ExtensionAttributes(univ.SetOf):
+ pass
+
+
+ExtensionAttributes.componentType = ExtensionAttribute()
+ExtensionAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, ub_extension_attributes)
+
+
+class ORAddress(univ.Sequence):
+ pass
+
+
+ORAddress.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('built-in-standard-attributes', BuiltInStandardAttributes()),
+ namedtype.OptionalNamedType('built-in-domain-defined-attributes', BuiltInDomainDefinedAttributes()),
+ namedtype.OptionalNamedType('extension-attributes', ExtensionAttributes())
+)
+
+id_pe = _OID(id_pkix, 1)
+
+ub_title = univ.Integer(64)
+
+
+class X520Title(univ.Choice):
+ pass
+
+
+X520Title.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('printableString',
+ char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('universalString',
+ char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title)))
+)
+
+id_at_organizationalUnitName = _OID(id_at, 11)
+
+
+class EmailAddress(char.IA5String):
+ pass
+
+
+EmailAddress.subtypeSpec = constraint.ValueSizeConstraint(1, ub_emailaddress_length)
+
+physical_delivery_country_name = univ.Integer(8)
+
+id_at_givenName = _OID(id_at, 42)
+
+
+class TeletexCommonName(char.TeletexString):
+ pass
+
+
+TeletexCommonName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_common_name_length)
+
+id_qt_cps = _OID(id_qt, 1)
+
+
+class LocalPostalAttributes(PDSParameter):
+ pass
+
+
+class StreetAddress(PDSParameter):
+ pass
+
+
+id_kp = _OID(id_pkix, 3)
+
+
+class DirectoryString(univ.Choice):
+ pass
+
+
+DirectoryString.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('printableString',
+ char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('universalString',
+ char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX)))
+)
+
+
+class DomainComponent(char.IA5String):
+ pass
+
+
+id_at_initials = _OID(id_at, 43)
+
+id_qt_unotice = _OID(id_qt, 2)
+
+ub_pds_name_length = univ.Integer(16)
+
+
+class PDSName(char.PrintableString):
+ pass
+
+
+PDSName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_pds_name_length)
+
+
+class PosteRestanteAddress(PDSParameter):
+ pass
+
+
+class DistinguishedName(RDNSequence):
+ pass
+
+
+class CommonName(char.PrintableString):
+ pass
+
+
+CommonName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_common_name_length)
+
+ub_serial_number = univ.Integer(64)
+
+
+class X520SerialNumber(char.PrintableString):
+ pass
+
+
+X520SerialNumber.subtypeSpec = constraint.ValueSizeConstraint(1, ub_serial_number)
+
+id_at_generationQualifier = _OID(id_at, 44)
+
+ub_organizational_unit_name = univ.Integer(64)
+
+id_ad_ocsp = _OID(id_ad, 1)
+
+
+class TeletexOrganizationalUnitName(char.TeletexString):
+ pass
+
+
+TeletexOrganizationalUnitName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length)
+
+
+class TeletexPersonalName(univ.Set):
+ pass
+
+
+TeletexPersonalName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('surname', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('given-name', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('initials', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('generation-qualifier', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+)
+
+
+class TeletexDomainDefinedAttributes(univ.SequenceOf):
+ pass
+
+
+TeletexDomainDefinedAttributes.componentType = TeletexDomainDefinedAttribute()
+TeletexDomainDefinedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, ub_domain_defined_attributes)
+
+
+class TBSCertList(univ.Sequence):
+ pass
+
+
+TBSCertList.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('version', Version()),
+ namedtype.NamedType('signature', AlgorithmIdentifier()),
+ namedtype.NamedType('issuer', Name()),
+ namedtype.NamedType('thisUpdate', Time()),
+ namedtype.OptionalNamedType('nextUpdate', Time()),
+ namedtype.OptionalNamedType('revokedCertificates',
+ univ.SequenceOf(componentType=univ.Sequence(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('userCertificate', CertificateSerialNumber()),
+ namedtype.NamedType('revocationDate', Time()),
+ namedtype.OptionalNamedType('crlEntryExtensions', Extensions())
+ ))
+ )),
+ namedtype.OptionalNamedType('crlExtensions',
+ Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+)
+
+local_postal_attributes = univ.Integer(21)
+
+pkcs_9 = _OID(1, 2, 840, 113549, 1, 9)
+
+
+class PhysicalDeliveryCountryName(univ.Choice):
+ pass
+
+
+PhysicalDeliveryCountryName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length, ub_country_name_numeric_length))),
+ namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length)))
+)
+
+ub_name = univ.Integer(32768)
+
+
+class X520name(univ.Choice):
+ pass
+
+
+X520name.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('printableString',
+ char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('universalString',
+ char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name)))
+)
+
+id_emailAddress = _OID(pkcs_9, 1)
+
+
+class TerminalType(univ.Integer):
+ pass
+
+
+TerminalType.namedValues = namedval.NamedValues(
+ ('telex', 3),
+ ('teletex', 4),
+ ('g3-facsimile', 5),
+ ('g4-facsimile', 6),
+ ('ia5-terminal', 7),
+ ('videotex', 8)
+)
+
+
+class X520OrganizationalUnitName(univ.Choice):
+ pass
+
+
+X520OrganizationalUnitName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name)))
+)
+
+id_at_commonName = _OID(id_at, 3)
+
+pds_name = univ.Integer(7)
+
+post_office_box_address = univ.Integer(18)
+
+ub_locality_name = univ.Integer(128)
+
+
+class X520LocalityName(univ.Choice):
+ pass
+
+
+X520LocalityName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('bmpString',
+ char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name)))
+)
+
+id_ad_timeStamping = _OID(id_ad, 3)
+
+id_at_countryName = _OID(id_at, 6)
+
+physical_delivery_personal_name = univ.Integer(13)
+
+teletex_personal_name = univ.Integer(4)
+
+teletex_organizational_unit_names = univ.Integer(5)
+
+
+class PhysicalDeliveryPersonalName(PDSParameter):
+ pass
+
+
+ub_postal_code_length = univ.Integer(16)
+
+
+class PostalCode(univ.Choice):
+ pass
+
+
+PostalCode.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('numeric-code', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length))),
+ namedtype.NamedType('printable-code', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length)))
+)
+
+
+class X520countryName(char.PrintableString):
+ pass
+
+
+X520countryName.subtypeSpec = constraint.ValueSizeConstraint(2, 2)
+
+postal_code = univ.Integer(9)
+
+id_ad_caRepository = _OID(id_ad, 5)
+
+extension_physical_delivery_address_components = univ.Integer(15)
+
+
+class PostOfficeBoxAddress(PDSParameter):
+ pass
+
+
+class PhysicalDeliveryOfficeName(PDSParameter):
+ pass
+
+
+id_at_title = _OID(id_at, 12)
+
+id_at_serialNumber = _OID(id_at, 5)
+
+id_ad_caIssuers = _OID(id_ad, 2)
+
+ub_integer_options = univ.Integer(256)
+
+
+class CertificateList(univ.Sequence):
+ pass
+
+
+CertificateList.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('tbsCertList', TBSCertList()),
+ namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString())
+)
+
+
+class PhysicalDeliveryOfficeNumber(PDSParameter):
+ pass
+
+
+class TeletexOrganizationalUnitNames(univ.SequenceOf):
+ pass
+
+
+TeletexOrganizationalUnitNames.componentType = TeletexOrganizationalUnitName()
+TeletexOrganizationalUnitNames.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organizational_units)
+
+physical_delivery_office_name = univ.Integer(10)
+
+ub_common_name = univ.Integer(64)
+
+
+class ExtensionORAddressComponents(PDSParameter):
+ pass
+
+
+ub_pseudonym = univ.Integer(128)
+
+poste_restante_address = univ.Integer(19)
+
+id_at_organizationName = _OID(id_at, 10)
+
+physical_delivery_office_number = univ.Integer(11)
+
+id_at_pseudonym = _OID(id_at, 65)
+
+
+class X520CommonName(univ.Choice):
+ pass
+
+
+X520CommonName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('printableString',
+ char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('universalString',
+ char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('bmpString',
+ char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name)))
+)
+
+physical_delivery_organization_name = univ.Integer(14)
+
+
+class X520dnQualifier(char.PrintableString):
+ pass
+
+
+id_at_stateOrProvinceName = _OID(id_at, 8)
+
+common_name = univ.Integer(1)
+
+id_at_localityName = _OID(id_at, 7)
+
+ub_match = univ.Integer(128)
+
+ub_unformatted_address_length = univ.Integer(180)
+
+
+class Attribute(univ.Sequence):
+ pass
+
+
+Attribute.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeType()),
+ namedtype.NamedType('values', univ.SetOf(componentType=AttributeValue()))
+)
+
+extended_network_address = univ.Integer(22)
+
+unique_postal_name = univ.Integer(20)
+
+ub_pds_physical_address_lines = univ.Integer(6)
+
+
+class UnformattedPostalAddress(univ.Set):
+ pass
+
+
+UnformattedPostalAddress.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('printable-address', univ.SequenceOf(componentType=char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length)))),
+ namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_unformatted_address_length)))
+)
+
+
+class UniquePostalName(PDSParameter):
+ pass
+
+
+class X520Pseudonym(univ.Choice):
+ pass
+
+
+X520Pseudonym.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pseudonym))),
+ namedtype.NamedType('printableString',
+ char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pseudonym))),
+ namedtype.NamedType('universalString',
+ char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pseudonym))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pseudonym))),
+ namedtype.NamedType('bmpString',
+ char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pseudonym)))
+)
+
+teletex_organization_name = univ.Integer(3)
+
+teletex_domain_defined_attributes = univ.Integer(6)
+
+street_address = univ.Integer(17)
+
+id_kp_OCSPSigning = _OID(id_kp, 9)
+
+id_ce = _OID(2, 5, 29)
+
+id_ce_certificatePolicies = _OID(id_ce, 32)
+
+
+class EDIPartyName(univ.Sequence):
+ pass
+
+
+EDIPartyName.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('nameAssigner', DirectoryString().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('partyName',
+ DirectoryString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+
+class AnotherName(univ.Sequence):
+ pass
+
+
+AnotherName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type-id', univ.ObjectIdentifier()),
+ namedtype.NamedType('value', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+)
+
+
+class GeneralName(univ.Choice):
+ pass
+
+
+GeneralName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('otherName',
+ AnotherName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('rfc822Name',
+ char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('dNSName',
+ char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.NamedType('x400Address',
+ ORAddress().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.NamedType('directoryName',
+ Name().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
+ namedtype.NamedType('ediPartyName',
+ EDIPartyName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
+ namedtype.NamedType('uniformResourceIdentifier',
+ char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))),
+ namedtype.NamedType('iPAddress',
+ univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
+ namedtype.NamedType('registeredID', univ.ObjectIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8)))
+)
+
+
+class GeneralNames(univ.SequenceOf):
+ pass
+
+
+GeneralNames.componentType = GeneralName()
+GeneralNames.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class IssuerAltName(GeneralNames):
+ pass
+
+
+id_ce_cRLDistributionPoints = _OID(id_ce, 31)
+
+
+class CertPolicyId(univ.ObjectIdentifier):
+ pass
+
+
+class PolicyMappings(univ.SequenceOf):
+ pass
+
+
+PolicyMappings.componentType = univ.Sequence(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('issuerDomainPolicy', CertPolicyId()),
+ namedtype.NamedType('subjectDomainPolicy', CertPolicyId())
+))
+
+PolicyMappings.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class PolicyQualifierId(univ.ObjectIdentifier):
+ pass
+
+
+holdInstruction = _OID(2, 2, 840, 10040, 2)
+
+id_ce_subjectDirectoryAttributes = _OID(id_ce, 9)
+
+id_holdinstruction_callissuer = _OID(holdInstruction, 2)
+
+
+class SubjectDirectoryAttributes(univ.SequenceOf):
+ pass
+
+
+SubjectDirectoryAttributes.componentType = Attribute()
+SubjectDirectoryAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+anyPolicy = _OID(id_ce_certificatePolicies, 0)
+
+id_ce_subjectAltName = _OID(id_ce, 17)
+
+id_kp_emailProtection = _OID(id_kp, 4)
+
+
+class ReasonFlags(univ.BitString):
+ pass
+
+
+ReasonFlags.namedValues = namedval.NamedValues(
+ ('unused', 0),
+ ('keyCompromise', 1),
+ ('cACompromise', 2),
+ ('affiliationChanged', 3),
+ ('superseded', 4),
+ ('cessationOfOperation', 5),
+ ('certificateHold', 6),
+ ('privilegeWithdrawn', 7),
+ ('aACompromise', 8)
+)
+
+
+class DistributionPointName(univ.Choice):
+ pass
+
+
+DistributionPointName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('fullName',
+ GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('nameRelativeToCRLIssuer', RelativeDistinguishedName().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+
+class DistributionPoint(univ.Sequence):
+ pass
+
+
+DistributionPoint.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('reasons', ReasonFlags().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('cRLIssuer', GeneralNames().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+)
+
+id_ce_keyUsage = _OID(id_ce, 15)
+
+
+class PolicyQualifierInfo(univ.Sequence):
+ pass
+
+
+PolicyQualifierInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('policyQualifierId', PolicyQualifierId()),
+ namedtype.NamedType('qualifier', univ.Any())
+)
+
+
+class PolicyInformation(univ.Sequence):
+ pass
+
+
+PolicyInformation.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('policyIdentifier', CertPolicyId()),
+ namedtype.OptionalNamedType('policyQualifiers', univ.SequenceOf(componentType=PolicyQualifierInfo()))
+)
+
+
+class CertificatePolicies(univ.SequenceOf):
+ pass
+
+
+CertificatePolicies.componentType = PolicyInformation()
+CertificatePolicies.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+id_ce_basicConstraints = _OID(id_ce, 19)
+
+
+class HoldInstructionCode(univ.ObjectIdentifier):
+ pass
+
+
+class KeyPurposeId(univ.ObjectIdentifier):
+ pass
+
+
+class ExtKeyUsageSyntax(univ.SequenceOf):
+ pass
+
+
+ExtKeyUsageSyntax.componentType = KeyPurposeId()
+ExtKeyUsageSyntax.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class SubjectAltName(GeneralNames):
+ pass
+
+
+class BasicConstraints(univ.Sequence):
+ pass
+
+
+BasicConstraints.componentType = namedtype.NamedTypes(
+ namedtype.DefaultedNamedType('cA', univ.Boolean().subtype(value=0)),
+ namedtype.OptionalNamedType('pathLenConstraint',
+ univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, MAX)))
+)
+
+
+class SkipCerts(univ.Integer):
+ pass
+
+
+SkipCerts.subtypeSpec = constraint.ValueRangeConstraint(0, MAX)
+
+
+class InhibitAnyPolicy(SkipCerts):
+ pass
+
+
+class CRLNumber(univ.Integer):
+ pass
+
+
+CRLNumber.subtypeSpec = constraint.ValueRangeConstraint(0, MAX)
+
+
+class BaseCRLNumber(CRLNumber):
+ pass
+
+
+class KeyIdentifier(univ.OctetString):
+ pass
+
+
+class AuthorityKeyIdentifier(univ.Sequence):
+ pass
+
+
+AuthorityKeyIdentifier.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('keyIdentifier', KeyIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('authorityCertIssuer', GeneralNames().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('authorityCertSerialNumber', CertificateSerialNumber().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+)
+
+id_ce_nameConstraints = _OID(id_ce, 30)
+
+id_kp_serverAuth = _OID(id_kp, 1)
+
+id_ce_freshestCRL = _OID(id_ce, 46)
+
+id_ce_cRLReasons = _OID(id_ce, 21)
+
+
+class CRLDistributionPoints(univ.SequenceOf):
+ pass
+
+
+CRLDistributionPoints.componentType = DistributionPoint()
+CRLDistributionPoints.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class FreshestCRL(CRLDistributionPoints):
+ pass
+
+
+id_ce_inhibitAnyPolicy = _OID(id_ce, 54)
+
+
+class CRLReason(univ.Enumerated):
+ pass
+
+
+CRLReason.namedValues = namedval.NamedValues(
+ ('unspecified', 0),
+ ('keyCompromise', 1),
+ ('cACompromise', 2),
+ ('affiliationChanged', 3),
+ ('superseded', 4),
+ ('cessationOfOperation', 5),
+ ('certificateHold', 6),
+ ('removeFromCRL', 8),
+ ('privilegeWithdrawn', 9),
+ ('aACompromise', 10)
+)
+
+
+class BaseDistance(univ.Integer):
+ pass
+
+
+BaseDistance.subtypeSpec = constraint.ValueRangeConstraint(0, MAX)
+
+
+class GeneralSubtree(univ.Sequence):
+ pass
+
+
+GeneralSubtree.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('base', GeneralName()),
+ namedtype.DefaultedNamedType('minimum', BaseDistance().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)).subtype(value=0)),
+ namedtype.OptionalNamedType('maximum', BaseDistance().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+
+class GeneralSubtrees(univ.SequenceOf):
+ pass
+
+
+GeneralSubtrees.componentType = GeneralSubtree()
+GeneralSubtrees.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class NameConstraints(univ.Sequence):
+ pass
+
+
+NameConstraints.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('permittedSubtrees', GeneralSubtrees().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('excludedSubtrees', GeneralSubtrees().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+id_pe_authorityInfoAccess = _OID(id_pe, 1)
+
+id_pe_subjectInfoAccess = _OID(id_pe, 11)
+
+id_ce_certificateIssuer = _OID(id_ce, 29)
+
+id_ce_invalidityDate = _OID(id_ce, 24)
+
+
+class DirectoryString(univ.Choice):
+ pass
+
+
+DirectoryString.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('any', univ.Any())
+)
+
+id_ce_authorityKeyIdentifier = _OID(id_ce, 35)
+
+
+class AccessDescription(univ.Sequence):
+ pass
+
+
+AccessDescription.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('accessMethod', univ.ObjectIdentifier()),
+ namedtype.NamedType('accessLocation', GeneralName())
+)
+
+
+class AuthorityInfoAccessSyntax(univ.SequenceOf):
+ pass
+
+
+AuthorityInfoAccessSyntax.componentType = AccessDescription()
+AuthorityInfoAccessSyntax.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+id_ce_issuingDistributionPoint = _OID(id_ce, 28)
+
+
+class CPSuri(char.IA5String):
+ pass
+
+
+class DisplayText(univ.Choice):
+ pass
+
+
+DisplayText.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('ia5String', char.IA5String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
+ namedtype.NamedType('visibleString',
+ char.VisibleString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200)))
+)
+
+
+class NoticeReference(univ.Sequence):
+ pass
+
+
+NoticeReference.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('organization', DisplayText()),
+ namedtype.NamedType('noticeNumbers', univ.SequenceOf(componentType=univ.Integer()))
+)
+
+
+class UserNotice(univ.Sequence):
+ pass
+
+
+UserNotice.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('noticeRef', NoticeReference()),
+ namedtype.OptionalNamedType('explicitText', DisplayText())
+)
+
+
+class PrivateKeyUsagePeriod(univ.Sequence):
+ pass
+
+
+PrivateKeyUsagePeriod.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('notBefore', useful.GeneralizedTime().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('notAfter', useful.GeneralizedTime().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+id_ce_subjectKeyIdentifier = _OID(id_ce, 14)
+
+
+class CertificateIssuer(GeneralNames):
+ pass
+
+
+class InvalidityDate(useful.GeneralizedTime):
+ pass
+
+
+class SubjectInfoAccessSyntax(univ.SequenceOf):
+ pass
+
+
+SubjectInfoAccessSyntax.componentType = AccessDescription()
+SubjectInfoAccessSyntax.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class KeyUsage(univ.BitString):
+ pass
+
+
+KeyUsage.namedValues = namedval.NamedValues(
+ ('digitalSignature', 0),
+ ('nonRepudiation', 1),
+ ('keyEncipherment', 2),
+ ('dataEncipherment', 3),
+ ('keyAgreement', 4),
+ ('keyCertSign', 5),
+ ('cRLSign', 6),
+ ('encipherOnly', 7),
+ ('decipherOnly', 8)
+)
+
+id_ce_extKeyUsage = _OID(id_ce, 37)
+
+anyExtendedKeyUsage = _OID(id_ce_extKeyUsage, 0)
+
+id_ce_privateKeyUsagePeriod = _OID(id_ce, 16)
+
+id_ce_policyMappings = _OID(id_ce, 33)
+
+id_ce_cRLNumber = _OID(id_ce, 20)
+
+id_ce_policyConstraints = _OID(id_ce, 36)
+
+id_holdinstruction_none = _OID(holdInstruction, 1)
+
+id_holdinstruction_reject = _OID(holdInstruction, 3)
+
+id_kp_timeStamping = _OID(id_kp, 8)
+
+
+class PolicyConstraints(univ.Sequence):
+ pass
+
+
+PolicyConstraints.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('requireExplicitPolicy',
+ SkipCerts().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('inhibitPolicyMapping',
+ SkipCerts().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+
+class SubjectKeyIdentifier(KeyIdentifier):
+ pass
+
+
+id_kp_clientAuth = _OID(id_kp, 2)
+
+id_ce_deltaCRLIndicator = _OID(id_ce, 27)
+
+id_ce_issuerAltName = _OID(id_ce, 18)
+
+id_kp_codeSigning = _OID(id_kp, 3)
+
+id_ce_holdInstructionCode = _OID(id_ce, 23)
+
+
+class IssuingDistributionPoint(univ.Sequence):
+ pass
+
+
+IssuingDistributionPoint.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.DefaultedNamedType('onlyContainsUserCerts', univ.Boolean().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)).subtype(value=0)),
+ namedtype.DefaultedNamedType('onlyContainsCACerts', univ.Boolean().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)).subtype(value=0)),
+ namedtype.OptionalNamedType('onlySomeReasons', ReasonFlags().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.DefaultedNamedType('indirectCRL', univ.Boolean().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4)).subtype(value=0)),
+ namedtype.DefaultedNamedType('onlyContainsAttributeCerts', univ.Boolean().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5)).subtype(value=0))
+)
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc3281.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3281.py
new file mode 100644
index 0000000000..8aa99d39fb
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3281.py
@@ -0,0 +1,331 @@
+# coding: utf-8
+#
+# This file is part of pyasn1-modules software.
+#
+# Created by Stanisław Pitucha with asn1ate tool.
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# An Internet Attribute Certificate Profile for Authorization
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc3281.txt
+#
+from pyasn1.type import univ
+from pyasn1.type import char
+from pyasn1.type import namedtype
+from pyasn1.type import namedval
+from pyasn1.type import tag
+from pyasn1.type import constraint
+from pyasn1.type import useful
+
+from pyasn1_modules import rfc3280
+
+MAX = float('inf')
+
+
+def _buildOid(*components):
+ output = []
+ for x in tuple(components):
+ if isinstance(x, univ.ObjectIdentifier):
+ output.extend(list(x))
+ else:
+ output.append(int(x))
+
+ return univ.ObjectIdentifier(output)
+
+
+class ObjectDigestInfo(univ.Sequence):
+ pass
+
+
+ObjectDigestInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('digestedObjectType', univ.Enumerated(
+ namedValues=namedval.NamedValues(('publicKey', 0), ('publicKeyCert', 1), ('otherObjectTypes', 2)))),
+ namedtype.OptionalNamedType('otherObjectTypeID', univ.ObjectIdentifier()),
+ namedtype.NamedType('digestAlgorithm', rfc3280.AlgorithmIdentifier()),
+ namedtype.NamedType('objectDigest', univ.BitString())
+)
+
+
+class IssuerSerial(univ.Sequence):
+ pass
+
+
+IssuerSerial.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuer', rfc3280.GeneralNames()),
+ namedtype.NamedType('serial', rfc3280.CertificateSerialNumber()),
+ namedtype.OptionalNamedType('issuerUID', rfc3280.UniqueIdentifier())
+)
+
+
+class TargetCert(univ.Sequence):
+ pass
+
+
+TargetCert.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('targetCertificate', IssuerSerial()),
+ namedtype.OptionalNamedType('targetName', rfc3280.GeneralName()),
+ namedtype.OptionalNamedType('certDigestInfo', ObjectDigestInfo())
+)
+
+
+class Target(univ.Choice):
+ pass
+
+
+Target.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('targetName', rfc3280.GeneralName().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('targetGroup', rfc3280.GeneralName().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('targetCert',
+ TargetCert().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2)))
+)
+
+
+class Targets(univ.SequenceOf):
+ pass
+
+
+Targets.componentType = Target()
+
+
+class ProxyInfo(univ.SequenceOf):
+ pass
+
+
+ProxyInfo.componentType = Targets()
+
+id_at_role = _buildOid(rfc3280.id_at, 72)
+
+id_pe_aaControls = _buildOid(rfc3280.id_pe, 6)
+
+id_ce_targetInformation = _buildOid(rfc3280.id_ce, 55)
+
+id_pe_ac_auditIdentity = _buildOid(rfc3280.id_pe, 4)
+
+
+class ClassList(univ.BitString):
+ pass
+
+
+ClassList.namedValues = namedval.NamedValues(
+ ('unmarked', 0),
+ ('unclassified', 1),
+ ('restricted', 2),
+ ('confidential', 3),
+ ('secret', 4),
+ ('topSecret', 5)
+)
+
+
+class SecurityCategory(univ.Sequence):
+ pass
+
+
+SecurityCategory.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', univ.ObjectIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('value', univ.Any().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+
+class Clearance(univ.Sequence):
+ pass
+
+
+Clearance.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('policyId', univ.ObjectIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.DefaultedNamedType('classList',
+ ClassList().subtype(implicitTag=tag.Tag(tag.tagClassContext,
+ tag.tagFormatSimple, 1)).subtype(
+ value="unclassified")),
+ namedtype.OptionalNamedType('securityCategories', univ.SetOf(componentType=SecurityCategory()).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+)
+
+
+class AttCertVersion(univ.Integer):
+ pass
+
+
+AttCertVersion.namedValues = namedval.NamedValues(
+ ('v2', 1)
+)
+
+id_aca = _buildOid(rfc3280.id_pkix, 10)
+
+id_at_clearance = _buildOid(2, 5, 1, 5, 55)
+
+
+class AttrSpec(univ.SequenceOf):
+ pass
+
+
+AttrSpec.componentType = univ.ObjectIdentifier()
+
+
+class AAControls(univ.Sequence):
+ pass
+
+
+AAControls.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('pathLenConstraint',
+ univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, MAX))),
+ namedtype.OptionalNamedType('permittedAttrs',
+ AttrSpec().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('excludedAttrs',
+ AttrSpec().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.DefaultedNamedType('permitUnSpecified', univ.Boolean().subtype(value=1))
+)
+
+
+class AttCertValidityPeriod(univ.Sequence):
+ pass
+
+
+AttCertValidityPeriod.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('notBeforeTime', useful.GeneralizedTime()),
+ namedtype.NamedType('notAfterTime', useful.GeneralizedTime())
+)
+
+
+id_aca_authenticationInfo = _buildOid(id_aca, 1)
+
+
+class V2Form(univ.Sequence):
+ pass
+
+
+V2Form.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('issuerName', rfc3280.GeneralNames()),
+ namedtype.OptionalNamedType('baseCertificateID', IssuerSerial().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('objectDigestInfo', ObjectDigestInfo().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+)
+
+
+class AttCertIssuer(univ.Choice):
+ pass
+
+
+AttCertIssuer.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('v1Form', rfc3280.GeneralNames()),
+ namedtype.NamedType('v2Form',
+ V2Form().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+)
+
+
+class Holder(univ.Sequence):
+ pass
+
+
+Holder.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('baseCertificateID', IssuerSerial().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('entityName', rfc3280.GeneralNames().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('objectDigestInfo', ObjectDigestInfo().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2)))
+)
+
+
+class AttributeCertificateInfo(univ.Sequence):
+ pass
+
+
+AttributeCertificateInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', AttCertVersion()),
+ namedtype.NamedType('holder', Holder()),
+ namedtype.NamedType('issuer', AttCertIssuer()),
+ namedtype.NamedType('signature', rfc3280.AlgorithmIdentifier()),
+ namedtype.NamedType('serialNumber', rfc3280.CertificateSerialNumber()),
+ namedtype.NamedType('attrCertValidityPeriod', AttCertValidityPeriod()),
+ namedtype.NamedType('attributes', univ.SequenceOf(componentType=rfc3280.Attribute())),
+ namedtype.OptionalNamedType('issuerUniqueID', rfc3280.UniqueIdentifier()),
+ namedtype.OptionalNamedType('extensions', rfc3280.Extensions())
+)
+
+
+class AttributeCertificate(univ.Sequence):
+ pass
+
+
+AttributeCertificate.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('acinfo', AttributeCertificateInfo()),
+ namedtype.NamedType('signatureAlgorithm', rfc3280.AlgorithmIdentifier()),
+ namedtype.NamedType('signatureValue', univ.BitString())
+)
+
+id_mod = _buildOid(rfc3280.id_pkix, 0)
+
+id_mod_attribute_cert = _buildOid(id_mod, 12)
+
+id_aca_accessIdentity = _buildOid(id_aca, 2)
+
+
+class RoleSyntax(univ.Sequence):
+ pass
+
+
+RoleSyntax.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('roleAuthority', rfc3280.GeneralNames().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('roleName',
+ rfc3280.GeneralName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+id_aca_chargingIdentity = _buildOid(id_aca, 3)
+
+
+class ACClearAttrs(univ.Sequence):
+ pass
+
+
+ACClearAttrs.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('acIssuer', rfc3280.GeneralName()),
+ namedtype.NamedType('acSerial', univ.Integer()),
+ namedtype.NamedType('attrs', univ.SequenceOf(componentType=rfc3280.Attribute()))
+)
+
+id_aca_group = _buildOid(id_aca, 4)
+
+id_pe_ac_proxying = _buildOid(rfc3280.id_pe, 10)
+
+
+class SvceAuthInfo(univ.Sequence):
+ pass
+
+
+SvceAuthInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('service', rfc3280.GeneralName()),
+ namedtype.NamedType('ident', rfc3280.GeneralName()),
+ namedtype.OptionalNamedType('authInfo', univ.OctetString())
+)
+
+
+class IetfAttrSyntax(univ.Sequence):
+ pass
+
+
+IetfAttrSyntax.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType(
+ 'policyAuthority', rfc3280.GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))
+ ),
+ namedtype.NamedType(
+ 'values', univ.SequenceOf(
+ componentType=univ.Choice(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType('octets', univ.OctetString()),
+ namedtype.NamedType('oid', univ.ObjectIdentifier()),
+ namedtype.NamedType('string', char.UTF8String())
+ )
+ )
+ )
+ )
+)
+
+id_aca_encAttrs = _buildOid(id_aca, 6)
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc3412.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3412.py
new file mode 100644
index 0000000000..b3f5a929c8
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3412.py
@@ -0,0 +1,50 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# SNMPv3 message syntax
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc3412.txt
+#
+from pyasn1.type import univ, namedtype, constraint
+from pyasn1_modules import rfc1905
+
+
+class ScopedPDU(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('contextEngineId', univ.OctetString()),
+ namedtype.NamedType('contextName', univ.OctetString()),
+ namedtype.NamedType('data', rfc1905.PDUs())
+ )
+
+
+class ScopedPduData(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('plaintext', ScopedPDU()),
+ namedtype.NamedType('encryptedPDU', univ.OctetString()),
+ )
+
+
+class HeaderData(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('msgID',
+ univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, 2147483647))),
+ namedtype.NamedType('msgMaxSize',
+ univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(484, 2147483647))),
+ namedtype.NamedType('msgFlags', univ.OctetString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 1))),
+ namedtype.NamedType('msgSecurityModel',
+ univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(1, 2147483647)))
+ )
+
+
+class SNMPv3Message(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('msgVersion',
+ univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, 2147483647))),
+ namedtype.NamedType('msgGlobalData', HeaderData()),
+ namedtype.NamedType('msgSecurityParameters', univ.OctetString()),
+ namedtype.NamedType('msgData', ScopedPduData())
+ )
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc3414.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3414.py
new file mode 100644
index 0000000000..aeb82aa26b
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3414.py
@@ -0,0 +1,26 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# SNMPv3 message syntax
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc3414.txt
+#
+from pyasn1.type import univ, namedtype, constraint
+
+
+class UsmSecurityParameters(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('msgAuthoritativeEngineID', univ.OctetString()),
+ namedtype.NamedType('msgAuthoritativeEngineBoots',
+ univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, 2147483647))),
+ namedtype.NamedType('msgAuthoritativeEngineTime',
+ univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, 2147483647))),
+ namedtype.NamedType('msgUserName',
+ univ.OctetString().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, 32))),
+ namedtype.NamedType('msgAuthenticationParameters', univ.OctetString()),
+ namedtype.NamedType('msgPrivacyParameters', univ.OctetString())
+ )
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc3447.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3447.py
new file mode 100644
index 0000000000..57c99faa4a
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3447.py
@@ -0,0 +1,43 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# PKCS#1 syntax
+#
+# ASN.1 source from:
+# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1.asn
+#
+# Sample captures could be obtained with "openssl genrsa" command
+#
+from pyasn1.type import constraint, namedval
+from pyasn1_modules.rfc2437 import *
+
+
+class OtherPrimeInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('prime', univ.Integer()),
+ namedtype.NamedType('exponent', univ.Integer()),
+ namedtype.NamedType('coefficient', univ.Integer())
+ )
+
+
+class OtherPrimeInfos(univ.SequenceOf):
+ componentType = OtherPrimeInfo()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+
+class RSAPrivateKey(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', univ.Integer(namedValues=namedval.NamedValues(('two-prime', 0), ('multi', 1)))),
+ namedtype.NamedType('modulus', univ.Integer()),
+ namedtype.NamedType('publicExponent', univ.Integer()),
+ namedtype.NamedType('privateExponent', univ.Integer()),
+ namedtype.NamedType('prime1', univ.Integer()),
+ namedtype.NamedType('prime2', univ.Integer()),
+ namedtype.NamedType('exponent1', univ.Integer()),
+ namedtype.NamedType('exponent2', univ.Integer()),
+ namedtype.NamedType('coefficient', univ.Integer()),
+ namedtype.OptionalNamedType('otherPrimeInfos', OtherPrimeInfos())
+ )
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc3852.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3852.py
new file mode 100644
index 0000000000..872eb88cec
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc3852.py
@@ -0,0 +1,701 @@
+# coding: utf-8
+#
+# This file is part of pyasn1-modules software.
+#
+# Created by Stanisław Pitucha with asn1ate tool.
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Cryptographic Message Syntax (CMS)
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc3852.txt
+#
+from pyasn1.type import univ, namedtype, namedval, tag, constraint, useful
+
+from pyasn1_modules import rfc3280
+from pyasn1_modules import rfc3281
+
+MAX = float('inf')
+
+
+def _buildOid(*components):
+ output = []
+ for x in tuple(components):
+ if isinstance(x, univ.ObjectIdentifier):
+ output.extend(list(x))
+ else:
+ output.append(int(x))
+
+ return univ.ObjectIdentifier(output)
+
+
+class AttributeValue(univ.Any):
+ pass
+
+
+class Attribute(univ.Sequence):
+ pass
+
+
+Attribute.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('attrType', univ.ObjectIdentifier()),
+ namedtype.NamedType('attrValues', univ.SetOf(componentType=AttributeValue()))
+)
+
+
+class SignedAttributes(univ.SetOf):
+ pass
+
+
+SignedAttributes.componentType = Attribute()
+SignedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class OtherRevocationInfoFormat(univ.Sequence):
+ pass
+
+
+OtherRevocationInfoFormat.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('otherRevInfoFormat', univ.ObjectIdentifier()),
+ namedtype.NamedType('otherRevInfo', univ.Any())
+)
+
+
+class RevocationInfoChoice(univ.Choice):
+ pass
+
+
+RevocationInfoChoice.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('crl', rfc3280.CertificateList()),
+ namedtype.NamedType('other', OtherRevocationInfoFormat().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+)
+
+
+class RevocationInfoChoices(univ.SetOf):
+ pass
+
+
+RevocationInfoChoices.componentType = RevocationInfoChoice()
+
+
+class OtherKeyAttribute(univ.Sequence):
+ pass
+
+
+OtherKeyAttribute.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('keyAttrId', univ.ObjectIdentifier()),
+ namedtype.OptionalNamedType('keyAttr', univ.Any())
+)
+
+id_signedData = _buildOid(1, 2, 840, 113549, 1, 7, 2)
+
+
+class KeyEncryptionAlgorithmIdentifier(rfc3280.AlgorithmIdentifier):
+ pass
+
+
+class EncryptedKey(univ.OctetString):
+ pass
+
+
+class CMSVersion(univ.Integer):
+ pass
+
+
+CMSVersion.namedValues = namedval.NamedValues(
+ ('v0', 0),
+ ('v1', 1),
+ ('v2', 2),
+ ('v3', 3),
+ ('v4', 4),
+ ('v5', 5)
+)
+
+
+class KEKIdentifier(univ.Sequence):
+ pass
+
+
+KEKIdentifier.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('keyIdentifier', univ.OctetString()),
+ namedtype.OptionalNamedType('date', useful.GeneralizedTime()),
+ namedtype.OptionalNamedType('other', OtherKeyAttribute())
+)
+
+
+class KEKRecipientInfo(univ.Sequence):
+ pass
+
+
+KEKRecipientInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.NamedType('kekid', KEKIdentifier()),
+ namedtype.NamedType('keyEncryptionAlgorithm', KeyEncryptionAlgorithmIdentifier()),
+ namedtype.NamedType('encryptedKey', EncryptedKey())
+)
+
+
+class KeyDerivationAlgorithmIdentifier(rfc3280.AlgorithmIdentifier):
+ pass
+
+
+class PasswordRecipientInfo(univ.Sequence):
+ pass
+
+
+PasswordRecipientInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.OptionalNamedType('keyDerivationAlgorithm', KeyDerivationAlgorithmIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('keyEncryptionAlgorithm', KeyEncryptionAlgorithmIdentifier()),
+ namedtype.NamedType('encryptedKey', EncryptedKey())
+)
+
+
+class OtherRecipientInfo(univ.Sequence):
+ pass
+
+
+OtherRecipientInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('oriType', univ.ObjectIdentifier()),
+ namedtype.NamedType('oriValue', univ.Any())
+)
+
+
+class IssuerAndSerialNumber(univ.Sequence):
+ pass
+
+
+IssuerAndSerialNumber.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuer', rfc3280.Name()),
+ namedtype.NamedType('serialNumber', rfc3280.CertificateSerialNumber())
+)
+
+
+class SubjectKeyIdentifier(univ.OctetString):
+ pass
+
+
+class RecipientKeyIdentifier(univ.Sequence):
+ pass
+
+
+RecipientKeyIdentifier.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('subjectKeyIdentifier', SubjectKeyIdentifier()),
+ namedtype.OptionalNamedType('date', useful.GeneralizedTime()),
+ namedtype.OptionalNamedType('other', OtherKeyAttribute())
+)
+
+
+class KeyAgreeRecipientIdentifier(univ.Choice):
+ pass
+
+
+KeyAgreeRecipientIdentifier.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuerAndSerialNumber', IssuerAndSerialNumber()),
+ namedtype.NamedType('rKeyId', RecipientKeyIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+)
+
+
+class RecipientEncryptedKey(univ.Sequence):
+ pass
+
+
+RecipientEncryptedKey.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('rid', KeyAgreeRecipientIdentifier()),
+ namedtype.NamedType('encryptedKey', EncryptedKey())
+)
+
+
+class RecipientEncryptedKeys(univ.SequenceOf):
+ pass
+
+
+RecipientEncryptedKeys.componentType = RecipientEncryptedKey()
+
+
+class UserKeyingMaterial(univ.OctetString):
+ pass
+
+
+class OriginatorPublicKey(univ.Sequence):
+ pass
+
+
+OriginatorPublicKey.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('algorithm', rfc3280.AlgorithmIdentifier()),
+ namedtype.NamedType('publicKey', univ.BitString())
+)
+
+
+class OriginatorIdentifierOrKey(univ.Choice):
+ pass
+
+
+OriginatorIdentifierOrKey.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuerAndSerialNumber', IssuerAndSerialNumber()),
+ namedtype.NamedType('subjectKeyIdentifier', SubjectKeyIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('originatorKey', OriginatorPublicKey().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+)
+
+
+class KeyAgreeRecipientInfo(univ.Sequence):
+ pass
+
+
+KeyAgreeRecipientInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.NamedType('originator', OriginatorIdentifierOrKey().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('ukm', UserKeyingMaterial().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('keyEncryptionAlgorithm', KeyEncryptionAlgorithmIdentifier()),
+ namedtype.NamedType('recipientEncryptedKeys', RecipientEncryptedKeys())
+)
+
+
+class RecipientIdentifier(univ.Choice):
+ pass
+
+
+RecipientIdentifier.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuerAndSerialNumber', IssuerAndSerialNumber()),
+ namedtype.NamedType('subjectKeyIdentifier', SubjectKeyIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+)
+
+
+class KeyTransRecipientInfo(univ.Sequence):
+ pass
+
+
+KeyTransRecipientInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.NamedType('rid', RecipientIdentifier()),
+ namedtype.NamedType('keyEncryptionAlgorithm', KeyEncryptionAlgorithmIdentifier()),
+ namedtype.NamedType('encryptedKey', EncryptedKey())
+)
+
+
+class RecipientInfo(univ.Choice):
+ pass
+
+
+RecipientInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('ktri', KeyTransRecipientInfo()),
+ namedtype.NamedType('kari', KeyAgreeRecipientInfo().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.NamedType('kekri', KEKRecipientInfo().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
+ namedtype.NamedType('pwri', PasswordRecipientInfo().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+ namedtype.NamedType('ori', OtherRecipientInfo().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4)))
+)
+
+
+class RecipientInfos(univ.SetOf):
+ pass
+
+
+RecipientInfos.componentType = RecipientInfo()
+RecipientInfos.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class DigestAlgorithmIdentifier(rfc3280.AlgorithmIdentifier):
+ pass
+
+
+class Signature(univ.BitString):
+ pass
+
+
+class SignerIdentifier(univ.Choice):
+ pass
+
+
+SignerIdentifier.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuerAndSerialNumber', IssuerAndSerialNumber()),
+ namedtype.NamedType('subjectKeyIdentifier', SubjectKeyIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+)
+
+
+class UnprotectedAttributes(univ.SetOf):
+ pass
+
+
+UnprotectedAttributes.componentType = Attribute()
+UnprotectedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class ContentType(univ.ObjectIdentifier):
+ pass
+
+
+class EncryptedContent(univ.OctetString):
+ pass
+
+
+class ContentEncryptionAlgorithmIdentifier(rfc3280.AlgorithmIdentifier):
+ pass
+
+
+class EncryptedContentInfo(univ.Sequence):
+ pass
+
+
+EncryptedContentInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('contentType', ContentType()),
+ namedtype.NamedType('contentEncryptionAlgorithm', ContentEncryptionAlgorithmIdentifier()),
+ namedtype.OptionalNamedType('encryptedContent', EncryptedContent().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+)
+
+
+class EncryptedData(univ.Sequence):
+ pass
+
+
+EncryptedData.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.NamedType('encryptedContentInfo', EncryptedContentInfo()),
+ namedtype.OptionalNamedType('unprotectedAttrs', UnprotectedAttributes().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+id_contentType = _buildOid(1, 2, 840, 113549, 1, 9, 3)
+
+id_data = _buildOid(1, 2, 840, 113549, 1, 7, 1)
+
+id_messageDigest = _buildOid(1, 2, 840, 113549, 1, 9, 4)
+
+
+class DigestAlgorithmIdentifiers(univ.SetOf):
+ pass
+
+
+DigestAlgorithmIdentifiers.componentType = DigestAlgorithmIdentifier()
+
+
+class EncapsulatedContentInfo(univ.Sequence):
+ pass
+
+
+EncapsulatedContentInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('eContentType', ContentType()),
+ namedtype.OptionalNamedType('eContent', univ.OctetString().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+)
+
+
+class Digest(univ.OctetString):
+ pass
+
+
+class DigestedData(univ.Sequence):
+ pass
+
+
+DigestedData.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.NamedType('digestAlgorithm', DigestAlgorithmIdentifier()),
+ namedtype.NamedType('encapContentInfo', EncapsulatedContentInfo()),
+ namedtype.NamedType('digest', Digest())
+)
+
+
+class ContentInfo(univ.Sequence):
+ pass
+
+
+ContentInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('contentType', ContentType()),
+ namedtype.NamedType('content', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+)
+
+
+class UnauthAttributes(univ.SetOf):
+ pass
+
+
+UnauthAttributes.componentType = Attribute()
+UnauthAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class ExtendedCertificateInfo(univ.Sequence):
+ pass
+
+
+ExtendedCertificateInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.NamedType('certificate', rfc3280.Certificate()),
+ namedtype.NamedType('attributes', UnauthAttributes())
+)
+
+
+class SignatureAlgorithmIdentifier(rfc3280.AlgorithmIdentifier):
+ pass
+
+
+class ExtendedCertificate(univ.Sequence):
+ pass
+
+
+ExtendedCertificate.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('extendedCertificateInfo', ExtendedCertificateInfo()),
+ namedtype.NamedType('signatureAlgorithm', SignatureAlgorithmIdentifier()),
+ namedtype.NamedType('signature', Signature())
+)
+
+
+class OtherCertificateFormat(univ.Sequence):
+ pass
+
+
+OtherCertificateFormat.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('otherCertFormat', univ.ObjectIdentifier()),
+ namedtype.NamedType('otherCert', univ.Any())
+)
+
+
+class AttributeCertificateV2(rfc3281.AttributeCertificate):
+ pass
+
+
+class AttCertVersionV1(univ.Integer):
+ pass
+
+
+AttCertVersionV1.namedValues = namedval.NamedValues(
+ ('v1', 0)
+)
+
+
+class AttributeCertificateInfoV1(univ.Sequence):
+ pass
+
+
+AttributeCertificateInfoV1.componentType = namedtype.NamedTypes(
+ namedtype.DefaultedNamedType('version', AttCertVersionV1().subtype(value="v1")),
+ namedtype.NamedType(
+ 'subject', univ.Choice(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType('baseCertificateID', rfc3281.IssuerSerial().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('subjectName', rfc3280.GeneralNames().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+ )
+ ),
+ namedtype.NamedType('issuer', rfc3280.GeneralNames()),
+ namedtype.NamedType('signature', rfc3280.AlgorithmIdentifier()),
+ namedtype.NamedType('serialNumber', rfc3280.CertificateSerialNumber()),
+ namedtype.NamedType('attCertValidityPeriod', rfc3281.AttCertValidityPeriod()),
+ namedtype.NamedType('attributes', univ.SequenceOf(componentType=rfc3280.Attribute())),
+ namedtype.OptionalNamedType('issuerUniqueID', rfc3280.UniqueIdentifier()),
+ namedtype.OptionalNamedType('extensions', rfc3280.Extensions())
+)
+
+
+class AttributeCertificateV1(univ.Sequence):
+ pass
+
+
+AttributeCertificateV1.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('acInfo', AttributeCertificateInfoV1()),
+ namedtype.NamedType('signatureAlgorithm', rfc3280.AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString())
+)
+
+
+class CertificateChoices(univ.Choice):
+ pass
+
+
+CertificateChoices.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certificate', rfc3280.Certificate()),
+ namedtype.NamedType('extendedCertificate', ExtendedCertificate().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('v1AttrCert', AttributeCertificateV1().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('v2AttrCert', AttributeCertificateV2().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.NamedType('other', OtherCertificateFormat().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3)))
+)
+
+
+class CertificateSet(univ.SetOf):
+ pass
+
+
+CertificateSet.componentType = CertificateChoices()
+
+
+class MessageAuthenticationCode(univ.OctetString):
+ pass
+
+
+class UnsignedAttributes(univ.SetOf):
+ pass
+
+
+UnsignedAttributes.componentType = Attribute()
+UnsignedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class SignatureValue(univ.OctetString):
+ pass
+
+
+class SignerInfo(univ.Sequence):
+ pass
+
+
+SignerInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.NamedType('sid', SignerIdentifier()),
+ namedtype.NamedType('digestAlgorithm', DigestAlgorithmIdentifier()),
+ namedtype.OptionalNamedType('signedAttrs', SignedAttributes().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('signatureAlgorithm', SignatureAlgorithmIdentifier()),
+ namedtype.NamedType('signature', SignatureValue()),
+ namedtype.OptionalNamedType('unsignedAttrs', UnsignedAttributes().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+
+class SignerInfos(univ.SetOf):
+ pass
+
+
+SignerInfos.componentType = SignerInfo()
+
+
+class SignedData(univ.Sequence):
+ pass
+
+
+SignedData.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.NamedType('digestAlgorithms', DigestAlgorithmIdentifiers()),
+ namedtype.NamedType('encapContentInfo', EncapsulatedContentInfo()),
+ namedtype.OptionalNamedType('certificates', CertificateSet().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('crls', RevocationInfoChoices().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('signerInfos', SignerInfos())
+)
+
+
+class MessageAuthenticationCodeAlgorithm(rfc3280.AlgorithmIdentifier):
+ pass
+
+
+class MessageDigest(univ.OctetString):
+ pass
+
+
+class Time(univ.Choice):
+ pass
+
+
+Time.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('utcTime', useful.UTCTime()),
+ namedtype.NamedType('generalTime', useful.GeneralizedTime())
+)
+
+
+class OriginatorInfo(univ.Sequence):
+ pass
+
+
+OriginatorInfo.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('certs', CertificateSet().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('crls', RevocationInfoChoices().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+
+class AuthAttributes(univ.SetOf):
+ pass
+
+
+AuthAttributes.componentType = Attribute()
+AuthAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class AuthenticatedData(univ.Sequence):
+ pass
+
+
+AuthenticatedData.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.OptionalNamedType('originatorInfo', OriginatorInfo().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('recipientInfos', RecipientInfos()),
+ namedtype.NamedType('macAlgorithm', MessageAuthenticationCodeAlgorithm()),
+ namedtype.OptionalNamedType('digestAlgorithm', DigestAlgorithmIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('encapContentInfo', EncapsulatedContentInfo()),
+ namedtype.OptionalNamedType('authAttrs', AuthAttributes().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.NamedType('mac', MessageAuthenticationCode()),
+ namedtype.OptionalNamedType('unauthAttrs', UnauthAttributes().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+)
+
+id_ct_contentInfo = _buildOid(1, 2, 840, 113549, 1, 9, 16, 1, 6)
+
+id_envelopedData = _buildOid(1, 2, 840, 113549, 1, 7, 3)
+
+
+class EnvelopedData(univ.Sequence):
+ pass
+
+
+EnvelopedData.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.OptionalNamedType('originatorInfo', OriginatorInfo().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('recipientInfos', RecipientInfos()),
+ namedtype.NamedType('encryptedContentInfo', EncryptedContentInfo()),
+ namedtype.OptionalNamedType('unprotectedAttrs', UnprotectedAttributes().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+
+class Countersignature(SignerInfo):
+ pass
+
+
+id_digestedData = _buildOid(1, 2, 840, 113549, 1, 7, 5)
+
+id_signingTime = _buildOid(1, 2, 840, 113549, 1, 9, 5)
+
+
+class ExtendedCertificateOrCertificate(univ.Choice):
+ pass
+
+
+ExtendedCertificateOrCertificate.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certificate', rfc3280.Certificate()),
+ namedtype.NamedType('extendedCertificate', ExtendedCertificate().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+)
+
+id_encryptedData = _buildOid(1, 2, 840, 113549, 1, 7, 6)
+
+id_ct_authData = _buildOid(1, 2, 840, 113549, 1, 9, 16, 1, 2)
+
+
+class SigningTime(Time):
+ pass
+
+
+id_countersignature = _buildOid(1, 2, 840, 113549, 1, 9, 6)
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc4210.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc4210.py
new file mode 100644
index 0000000000..d7e6db09be
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc4210.py
@@ -0,0 +1,788 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Certificate Management Protocol structures as per RFC4210
+#
+# Based on Alex Railean's work
+#
+from pyasn1.type import tag, namedtype, namedval, univ, constraint, char, useful
+from pyasn1_modules import rfc2459, rfc2511, rfc2314
+
+MAX = float('inf')
+
+
+class KeyIdentifier(univ.OctetString):
+ pass
+
+
+class CMPCertificate(rfc2459.Certificate):
+ pass
+
+
+class OOBCert(CMPCertificate):
+ pass
+
+
+class CertAnnContent(CMPCertificate):
+ pass
+
+
+class PKIFreeText(univ.SequenceOf):
+ """
+ PKIFreeText ::= SEQUENCE SIZE (1..MAX) OF UTF8String
+ """
+ componentType = char.UTF8String()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+
+class PollRepContent(univ.SequenceOf):
+ """
+ PollRepContent ::= SEQUENCE OF SEQUENCE {
+ certReqId INTEGER,
+ checkAfter INTEGER, -- time in seconds
+ reason PKIFreeText OPTIONAL
+ }
+ """
+
+ class CertReq(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certReqId', univ.Integer()),
+ namedtype.NamedType('checkAfter', univ.Integer()),
+ namedtype.OptionalNamedType('reason', PKIFreeText())
+ )
+
+ componentType = CertReq()
+
+
+class PollReqContent(univ.SequenceOf):
+ """
+ PollReqContent ::= SEQUENCE OF SEQUENCE {
+ certReqId INTEGER
+ }
+
+ """
+
+ class CertReq(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certReqId', univ.Integer())
+ )
+
+ componentType = CertReq()
+
+
+class InfoTypeAndValue(univ.Sequence):
+ """
+ InfoTypeAndValue ::= SEQUENCE {
+ infoType OBJECT IDENTIFIER,
+ infoValue ANY DEFINED BY infoType OPTIONAL
+ }"""
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('infoType', univ.ObjectIdentifier()),
+ namedtype.OptionalNamedType('infoValue', univ.Any())
+ )
+
+
+class GenRepContent(univ.SequenceOf):
+ componentType = InfoTypeAndValue()
+
+
+class GenMsgContent(univ.SequenceOf):
+ componentType = InfoTypeAndValue()
+
+
+class PKIConfirmContent(univ.Null):
+ pass
+
+
+class CRLAnnContent(univ.SequenceOf):
+ componentType = rfc2459.CertificateList()
+
+
+class CAKeyUpdAnnContent(univ.Sequence):
+ """
+ CAKeyUpdAnnContent ::= SEQUENCE {
+ oldWithNew CMPCertificate,
+ newWithOld CMPCertificate,
+ newWithNew CMPCertificate
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('oldWithNew', CMPCertificate()),
+ namedtype.NamedType('newWithOld', CMPCertificate()),
+ namedtype.NamedType('newWithNew', CMPCertificate())
+ )
+
+
+class RevDetails(univ.Sequence):
+ """
+ RevDetails ::= SEQUENCE {
+ certDetails CertTemplate,
+ crlEntryDetails Extensions OPTIONAL
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certDetails', rfc2511.CertTemplate()),
+ namedtype.OptionalNamedType('crlEntryDetails', rfc2459.Extensions())
+ )
+
+
+class RevReqContent(univ.SequenceOf):
+ componentType = RevDetails()
+
+
+class CertOrEncCert(univ.Choice):
+ """
+ CertOrEncCert ::= CHOICE {
+ certificate [0] CMPCertificate,
+ encryptedCert [1] EncryptedValue
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certificate', CMPCertificate().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('encryptedCert', rfc2511.EncryptedValue().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+ )
+
+
+class CertifiedKeyPair(univ.Sequence):
+ """
+ CertifiedKeyPair ::= SEQUENCE {
+ certOrEncCert CertOrEncCert,
+ privateKey [0] EncryptedValue OPTIONAL,
+ publicationInfo [1] PKIPublicationInfo OPTIONAL
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certOrEncCert', CertOrEncCert()),
+ namedtype.OptionalNamedType('privateKey', rfc2511.EncryptedValue().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('publicationInfo', rfc2511.PKIPublicationInfo().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+ )
+
+
+class POPODecKeyRespContent(univ.SequenceOf):
+ componentType = univ.Integer()
+
+
+class Challenge(univ.Sequence):
+ """
+ Challenge ::= SEQUENCE {
+ owf AlgorithmIdentifier OPTIONAL,
+ witness OCTET STRING,
+ challenge OCTET STRING
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('owf', rfc2459.AlgorithmIdentifier()),
+ namedtype.NamedType('witness', univ.OctetString()),
+ namedtype.NamedType('challenge', univ.OctetString())
+ )
+
+
+class PKIStatus(univ.Integer):
+ """
+ PKIStatus ::= INTEGER {
+ accepted (0),
+ grantedWithMods (1),
+ rejection (2),
+ waiting (3),
+ revocationWarning (4),
+ revocationNotification (5),
+ keyUpdateWarning (6)
+ }
+ """
+ namedValues = namedval.NamedValues(
+ ('accepted', 0),
+ ('grantedWithMods', 1),
+ ('rejection', 2),
+ ('waiting', 3),
+ ('revocationWarning', 4),
+ ('revocationNotification', 5),
+ ('keyUpdateWarning', 6)
+ )
+
+
+class PKIFailureInfo(univ.BitString):
+ """
+ PKIFailureInfo ::= BIT STRING {
+ badAlg (0),
+ badMessageCheck (1),
+ badRequest (2),
+ badTime (3),
+ badCertId (4),
+ badDataFormat (5),
+ wrongAuthority (6),
+ incorrectData (7),
+ missingTimeStamp (8),
+ badPOP (9),
+ certRevoked (10),
+ certConfirmed (11),
+ wrongIntegrity (12),
+ badRecipientNonce (13),
+ timeNotAvailable (14),
+ unacceptedPolicy (15),
+ unacceptedExtension (16),
+ addInfoNotAvailable (17),
+ badSenderNonce (18),
+ badCertTemplate (19),
+ signerNotTrusted (20),
+ transactionIdInUse (21),
+ unsupportedVersion (22),
+ notAuthorized (23),
+ systemUnavail (24),
+ systemFailure (25),
+ duplicateCertReq (26)
+ """
+ namedValues = namedval.NamedValues(
+ ('badAlg', 0),
+ ('badMessageCheck', 1),
+ ('badRequest', 2),
+ ('badTime', 3),
+ ('badCertId', 4),
+ ('badDataFormat', 5),
+ ('wrongAuthority', 6),
+ ('incorrectData', 7),
+ ('missingTimeStamp', 8),
+ ('badPOP', 9),
+ ('certRevoked', 10),
+ ('certConfirmed', 11),
+ ('wrongIntegrity', 12),
+ ('badRecipientNonce', 13),
+ ('timeNotAvailable', 14),
+ ('unacceptedPolicy', 15),
+ ('unacceptedExtension', 16),
+ ('addInfoNotAvailable', 17),
+ ('badSenderNonce', 18),
+ ('badCertTemplate', 19),
+ ('signerNotTrusted', 20),
+ ('transactionIdInUse', 21),
+ ('unsupportedVersion', 22),
+ ('notAuthorized', 23),
+ ('systemUnavail', 24),
+ ('systemFailure', 25),
+ ('duplicateCertReq', 26)
+ )
+
+
+class PKIStatusInfo(univ.Sequence):
+ """
+ PKIStatusInfo ::= SEQUENCE {
+ status PKIStatus,
+ statusString PKIFreeText OPTIONAL,
+ failInfo PKIFailureInfo OPTIONAL
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('status', PKIStatus()),
+ namedtype.OptionalNamedType('statusString', PKIFreeText()),
+ namedtype.OptionalNamedType('failInfo', PKIFailureInfo())
+ )
+
+
+class ErrorMsgContent(univ.Sequence):
+ """
+ ErrorMsgContent ::= SEQUENCE {
+ pKIStatusInfo PKIStatusInfo,
+ errorCode INTEGER OPTIONAL,
+ -- implementation-specific error codes
+ errorDetails PKIFreeText OPTIONAL
+ -- implementation-specific error details
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('pKIStatusInfo', PKIStatusInfo()),
+ namedtype.OptionalNamedType('errorCode', univ.Integer()),
+ namedtype.OptionalNamedType('errorDetails', PKIFreeText())
+ )
+
+
+class CertStatus(univ.Sequence):
+ """
+ CertStatus ::= SEQUENCE {
+ certHash OCTET STRING,
+ certReqId INTEGER,
+ statusInfo PKIStatusInfo OPTIONAL
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certHash', univ.OctetString()),
+ namedtype.NamedType('certReqId', univ.Integer()),
+ namedtype.OptionalNamedType('statusInfo', PKIStatusInfo())
+ )
+
+
+class CertConfirmContent(univ.SequenceOf):
+ componentType = CertStatus()
+
+
+class RevAnnContent(univ.Sequence):
+ """
+ RevAnnContent ::= SEQUENCE {
+ status PKIStatus,
+ certId CertId,
+ willBeRevokedAt GeneralizedTime,
+ badSinceDate GeneralizedTime,
+ crlDetails Extensions OPTIONAL
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('status', PKIStatus()),
+ namedtype.NamedType('certId', rfc2511.CertId()),
+ namedtype.NamedType('willBeRevokedAt', useful.GeneralizedTime()),
+ namedtype.NamedType('badSinceDate', useful.GeneralizedTime()),
+ namedtype.OptionalNamedType('crlDetails', rfc2459.Extensions())
+ )
+
+
+class RevRepContent(univ.Sequence):
+ """
+ RevRepContent ::= SEQUENCE {
+ status SEQUENCE SIZE (1..MAX) OF PKIStatusInfo,
+ revCerts [0] SEQUENCE SIZE (1..MAX) OF CertId
+ OPTIONAL,
+ crls [1] SEQUENCE SIZE (1..MAX) OF CertificateList
+ OPTIONAL
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('status', PKIStatusInfo()),
+ namedtype.OptionalNamedType(
+ 'revCerts', univ.SequenceOf(componentType=rfc2511.CertId()).subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)
+ )
+ ),
+ namedtype.OptionalNamedType(
+ 'crls', univ.SequenceOf(componentType=rfc2459.CertificateList()).subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)
+ )
+ )
+ )
+
+
+class KeyRecRepContent(univ.Sequence):
+ """
+ KeyRecRepContent ::= SEQUENCE {
+ status PKIStatusInfo,
+ newSigCert [0] CMPCertificate OPTIONAL,
+ caCerts [1] SEQUENCE SIZE (1..MAX) OF
+ CMPCertificate OPTIONAL,
+ keyPairHist [2] SEQUENCE SIZE (1..MAX) OF
+ CertifiedKeyPair OPTIONAL
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('status', PKIStatusInfo()),
+ namedtype.OptionalNamedType(
+ 'newSigCert', CMPCertificate().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)
+ )
+ ),
+ namedtype.OptionalNamedType(
+ 'caCerts', univ.SequenceOf(componentType=CMPCertificate()).subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1),
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX)
+ )
+ ),
+ namedtype.OptionalNamedType('keyPairHist', univ.SequenceOf(componentType=CertifiedKeyPair()).subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2),
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX))
+ )
+ )
+
+
+class CertResponse(univ.Sequence):
+ """
+ CertResponse ::= SEQUENCE {
+ certReqId INTEGER,
+ status PKIStatusInfo,
+ certifiedKeyPair CertifiedKeyPair OPTIONAL,
+ rspInfo OCTET STRING OPTIONAL
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certReqId', univ.Integer()),
+ namedtype.NamedType('status', PKIStatusInfo()),
+ namedtype.OptionalNamedType('certifiedKeyPair', CertifiedKeyPair()),
+ namedtype.OptionalNamedType('rspInfo', univ.OctetString())
+ )
+
+
+class CertRepMessage(univ.Sequence):
+ """
+ CertRepMessage ::= SEQUENCE {
+ caPubs [1] SEQUENCE SIZE (1..MAX) OF CMPCertificate
+ OPTIONAL,
+ response SEQUENCE OF CertResponse
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType(
+ 'caPubs', univ.SequenceOf(
+ componentType=CMPCertificate()
+ ).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))
+ ),
+ namedtype.NamedType('response', univ.SequenceOf(componentType=CertResponse()))
+ )
+
+
+class POPODecKeyChallContent(univ.SequenceOf):
+ componentType = Challenge()
+
+
+class OOBCertHash(univ.Sequence):
+ """
+ OOBCertHash ::= SEQUENCE {
+ hashAlg [0] AlgorithmIdentifier OPTIONAL,
+ certId [1] CertId OPTIONAL,
+ hashVal BIT STRING
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType(
+ 'hashAlg', rfc2459.AlgorithmIdentifier().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))
+ ),
+ namedtype.OptionalNamedType(
+ 'certId', rfc2511.CertId().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))
+ ),
+ namedtype.NamedType('hashVal', univ.BitString())
+ )
+
+
+# pyasn1 does not naturally handle recursive definitions, thus this hack:
+# NestedMessageContent ::= PKIMessages
+class NestedMessageContent(univ.SequenceOf):
+ """
+ NestedMessageContent ::= PKIMessages
+ """
+ componentType = univ.Any()
+
+
+class DHBMParameter(univ.Sequence):
+ """
+ DHBMParameter ::= SEQUENCE {
+ owf AlgorithmIdentifier,
+ -- AlgId for a One-Way Function (SHA-1 recommended)
+ mac AlgorithmIdentifier
+ -- the MAC AlgId (e.g., DES-MAC, Triple-DES-MAC [PKCS11],
+ } -- or HMAC [RFC2104, RFC2202])
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('owf', rfc2459.AlgorithmIdentifier()),
+ namedtype.NamedType('mac', rfc2459.AlgorithmIdentifier())
+ )
+
+
+id_DHBasedMac = univ.ObjectIdentifier('1.2.840.113533.7.66.30')
+
+
+class PBMParameter(univ.Sequence):
+ """
+ PBMParameter ::= SEQUENCE {
+ salt OCTET STRING,
+ owf AlgorithmIdentifier,
+ iterationCount INTEGER,
+ mac AlgorithmIdentifier
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType(
+ 'salt', univ.OctetString().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, 128))
+ ),
+ namedtype.NamedType('owf', rfc2459.AlgorithmIdentifier()),
+ namedtype.NamedType('iterationCount', univ.Integer()),
+ namedtype.NamedType('mac', rfc2459.AlgorithmIdentifier())
+ )
+
+
+id_PasswordBasedMac = univ.ObjectIdentifier('1.2.840.113533.7.66.13')
+
+
+class PKIProtection(univ.BitString):
+ pass
+
+
+# pyasn1 does not naturally handle recursive definitions, thus this hack:
+# NestedMessageContent ::= PKIMessages
+nestedMessageContent = NestedMessageContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 20))
+
+
+class PKIBody(univ.Choice):
+ """
+ PKIBody ::= CHOICE { -- message-specific body elements
+ ir [0] CertReqMessages, --Initialization Request
+ ip [1] CertRepMessage, --Initialization Response
+ cr [2] CertReqMessages, --Certification Request
+ cp [3] CertRepMessage, --Certification Response
+ p10cr [4] CertificationRequest, --imported from [PKCS10]
+ popdecc [5] POPODecKeyChallContent, --pop Challenge
+ popdecr [6] POPODecKeyRespContent, --pop Response
+ kur [7] CertReqMessages, --Key Update Request
+ kup [8] CertRepMessage, --Key Update Response
+ krr [9] CertReqMessages, --Key Recovery Request
+ krp [10] KeyRecRepContent, --Key Recovery Response
+ rr [11] RevReqContent, --Revocation Request
+ rp [12] RevRepContent, --Revocation Response
+ ccr [13] CertReqMessages, --Cross-Cert. Request
+ ccp [14] CertRepMessage, --Cross-Cert. Response
+ ckuann [15] CAKeyUpdAnnContent, --CA Key Update Ann.
+ cann [16] CertAnnContent, --Certificate Ann.
+ rann [17] RevAnnContent, --Revocation Ann.
+ crlann [18] CRLAnnContent, --CRL Announcement
+ pkiconf [19] PKIConfirmContent, --Confirmation
+ nested [20] NestedMessageContent, --Nested Message
+ genm [21] GenMsgContent, --General Message
+ genp [22] GenRepContent, --General Response
+ error [23] ErrorMsgContent, --Error Message
+ certConf [24] CertConfirmContent, --Certificate confirm
+ pollReq [25] PollReqContent, --Polling request
+ pollRep [26] PollRepContent --Polling response
+
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType(
+ 'ir', rfc2511.CertReqMessages().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)
+ )
+ ),
+ namedtype.NamedType(
+ 'ip', CertRepMessage().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)
+ )
+ ),
+ namedtype.NamedType(
+ 'cr', rfc2511.CertReqMessages().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2)
+ )
+ ),
+ namedtype.NamedType(
+ 'cp', CertRepMessage().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3)
+ )
+ ),
+ namedtype.NamedType(
+ 'p10cr', rfc2314.CertificationRequest().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4)
+ )
+ ),
+ namedtype.NamedType(
+ 'popdecc', POPODecKeyChallContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5)
+ )
+ ),
+ namedtype.NamedType(
+ 'popdecr', POPODecKeyRespContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6)
+ )
+ ),
+ namedtype.NamedType(
+ 'kur', rfc2511.CertReqMessages().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 7)
+ )
+ ),
+ namedtype.NamedType(
+ 'kup', CertRepMessage().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8)
+ )
+ ),
+ namedtype.NamedType(
+ 'krr', rfc2511.CertReqMessages().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 9)
+ )
+ ),
+ namedtype.NamedType(
+ 'krp', KeyRecRepContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 10)
+ )
+ ),
+ namedtype.NamedType(
+ 'rr', RevReqContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 11)
+ )
+ ),
+ namedtype.NamedType(
+ 'rp', RevRepContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 12)
+ )
+ ),
+ namedtype.NamedType(
+ 'ccr', rfc2511.CertReqMessages().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 13)
+ )
+ ),
+ namedtype.NamedType(
+ 'ccp', CertRepMessage().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 14)
+ )
+ ),
+ namedtype.NamedType(
+ 'ckuann', CAKeyUpdAnnContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 15)
+ )
+ ),
+ namedtype.NamedType(
+ 'cann', CertAnnContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 16)
+ )
+ ),
+ namedtype.NamedType(
+ 'rann', RevAnnContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 17)
+ )
+ ),
+ namedtype.NamedType(
+ 'crlann', CRLAnnContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 18)
+ )
+ ),
+ namedtype.NamedType(
+ 'pkiconf', PKIConfirmContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 19)
+ )
+ ),
+ namedtype.NamedType(
+ 'nested', nestedMessageContent
+ ),
+ # namedtype.NamedType('nested', NestedMessageContent().subtype(
+ # explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,20)
+ # )
+ # ),
+ namedtype.NamedType(
+ 'genm', GenMsgContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 21)
+ )
+ ),
+ namedtype.NamedType(
+ 'gen', GenRepContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 22)
+ )
+ ),
+ namedtype.NamedType(
+ 'error', ErrorMsgContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 23)
+ )
+ ),
+ namedtype.NamedType(
+ 'certConf', CertConfirmContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 24)
+ )
+ ),
+ namedtype.NamedType(
+ 'pollReq', PollReqContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 25)
+ )
+ ),
+ namedtype.NamedType(
+ 'pollRep', PollRepContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 26)
+ )
+ )
+ )
+
+
+class PKIHeader(univ.Sequence):
+ """
+ PKIHeader ::= SEQUENCE {
+ pvno INTEGER { cmp1999(1), cmp2000(2) },
+ sender GeneralName,
+ recipient GeneralName,
+ messageTime [0] GeneralizedTime OPTIONAL,
+ protectionAlg [1] AlgorithmIdentifier OPTIONAL,
+ senderKID [2] KeyIdentifier OPTIONAL,
+ recipKID [3] KeyIdentifier OPTIONAL,
+ transactionID [4] OCTET STRING OPTIONAL,
+ senderNonce [5] OCTET STRING OPTIONAL,
+ recipNonce [6] OCTET STRING OPTIONAL,
+ freeText [7] PKIFreeText OPTIONAL,
+ generalInfo [8] SEQUENCE SIZE (1..MAX) OF
+ InfoTypeAndValue OPTIONAL
+ }
+
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType(
+ 'pvno', univ.Integer(
+ namedValues=namedval.NamedValues(('cmp1999', 1), ('cmp2000', 2))
+ )
+ ),
+ namedtype.NamedType('sender', rfc2459.GeneralName()),
+ namedtype.NamedType('recipient', rfc2459.GeneralName()),
+ namedtype.OptionalNamedType('messageTime', useful.GeneralizedTime().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('protectionAlg', rfc2459.AlgorithmIdentifier().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.OptionalNamedType('senderKID', rfc2459.KeyIdentifier().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('recipKID', rfc2459.KeyIdentifier().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.OptionalNamedType('transactionID', univ.OctetString().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))),
+ namedtype.OptionalNamedType('senderNonce', univ.OctetString().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))),
+ namedtype.OptionalNamedType('recipNonce', univ.OctetString().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))),
+ namedtype.OptionalNamedType('freeText', PKIFreeText().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 7))),
+ namedtype.OptionalNamedType('generalInfo',
+ univ.SequenceOf(
+ componentType=InfoTypeAndValue().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8)
+ )
+ )
+ )
+ )
+
+
+class ProtectedPart(univ.Sequence):
+ """
+ ProtectedPart ::= SEQUENCE {
+ header PKIHeader,
+ body PKIBody
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('header', PKIHeader()),
+ namedtype.NamedType('infoValue', PKIBody())
+ )
+
+
+class PKIMessage(univ.Sequence):
+ """
+ PKIMessage ::= SEQUENCE {
+ header PKIHeader,
+ body PKIBody,
+ protection [0] PKIProtection OPTIONAL,
+ extraCerts [1] SEQUENCE SIZE (1..MAX) OF CMPCertificate
+ OPTIONAL
+ }"""
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('header', PKIHeader()),
+ namedtype.NamedType('body', PKIBody()),
+ namedtype.OptionalNamedType('protection', PKIProtection().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('extraCerts',
+ univ.SequenceOf(
+ componentType=CMPCertificate()
+ ).subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)
+ )
+ )
+ )
+
+
+class PKIMessages(univ.SequenceOf):
+ """
+ PKIMessages ::= SEQUENCE SIZE (1..MAX) OF PKIMessage
+ """
+ componentType = PKIMessage()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+
+# pyasn1 does not naturally handle recursive definitions, thus this hack:
+# NestedMessageContent ::= PKIMessages
+NestedMessageContent._componentType = PKIMessages()
+nestedMessageContent._componentType = PKIMessages()
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc4211.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc4211.py
new file mode 100644
index 0000000000..d20da7872a
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc4211.py
@@ -0,0 +1,391 @@
+# coding: utf-8
+#
+# This file is part of pyasn1-modules software.
+#
+# Created by Stanisław Pitucha with asn1ate tool.
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Internet X.509 Public Key Infrastructure Certificate Request
+# Message Format (CRMF)
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc4211.txt
+#
+from pyasn1.type import univ, char, namedtype, namedval, tag, constraint
+
+from pyasn1_modules import rfc3280
+from pyasn1_modules import rfc3852
+
+MAX = float('inf')
+
+
+def _buildOid(*components):
+ output = []
+ for x in tuple(components):
+ if isinstance(x, univ.ObjectIdentifier):
+ output.extend(list(x))
+ else:
+ output.append(int(x))
+
+ return univ.ObjectIdentifier(output)
+
+
+id_pkix = _buildOid(1, 3, 6, 1, 5, 5, 7)
+
+id_pkip = _buildOid(id_pkix, 5)
+
+id_regCtrl = _buildOid(id_pkip, 1)
+
+
+class SinglePubInfo(univ.Sequence):
+ pass
+
+
+SinglePubInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('pubMethod', univ.Integer(
+ namedValues=namedval.NamedValues(('dontCare', 0), ('x500', 1), ('web', 2), ('ldap', 3)))),
+ namedtype.OptionalNamedType('pubLocation', rfc3280.GeneralName())
+)
+
+
+class UTF8Pairs(char.UTF8String):
+ pass
+
+
+class PKMACValue(univ.Sequence):
+ pass
+
+
+PKMACValue.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('algId', rfc3280.AlgorithmIdentifier()),
+ namedtype.NamedType('value', univ.BitString())
+)
+
+
+class POPOSigningKeyInput(univ.Sequence):
+ pass
+
+
+POPOSigningKeyInput.componentType = namedtype.NamedTypes(
+ namedtype.NamedType(
+ 'authInfo', univ.Choice(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType(
+ 'sender', rfc3280.GeneralName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))
+ ),
+ namedtype.NamedType(
+ 'publicKeyMAC', PKMACValue()
+ )
+ )
+ )
+ ),
+ namedtype.NamedType('publicKey', rfc3280.SubjectPublicKeyInfo())
+)
+
+
+class POPOSigningKey(univ.Sequence):
+ pass
+
+
+POPOSigningKey.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('poposkInput', POPOSigningKeyInput().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('algorithmIdentifier', rfc3280.AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString())
+)
+
+
+class Attributes(univ.SetOf):
+ pass
+
+
+Attributes.componentType = rfc3280.Attribute()
+
+
+class PrivateKeyInfo(univ.Sequence):
+ pass
+
+
+PrivateKeyInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', univ.Integer()),
+ namedtype.NamedType('privateKeyAlgorithm', rfc3280.AlgorithmIdentifier()),
+ namedtype.NamedType('privateKey', univ.OctetString()),
+ namedtype.OptionalNamedType('attributes',
+ Attributes().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+)
+
+
+class EncryptedValue(univ.Sequence):
+ pass
+
+
+EncryptedValue.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('intendedAlg', rfc3280.AlgorithmIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('symmAlg', rfc3280.AlgorithmIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('encSymmKey', univ.BitString().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('keyAlg', rfc3280.AlgorithmIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.OptionalNamedType('valueHint', univ.OctetString().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))),
+ namedtype.NamedType('encValue', univ.BitString())
+)
+
+
+class EncryptedKey(univ.Choice):
+ pass
+
+
+EncryptedKey.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('encryptedValue', EncryptedValue()),
+ namedtype.NamedType('envelopedData', rfc3852.EnvelopedData().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+)
+
+
+class KeyGenParameters(univ.OctetString):
+ pass
+
+
+class PKIArchiveOptions(univ.Choice):
+ pass
+
+
+PKIArchiveOptions.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('encryptedPrivKey',
+ EncryptedKey().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('keyGenParameters',
+ KeyGenParameters().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('archiveRemGenPrivKey',
+ univ.Boolean().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+)
+
+id_regCtrl_authenticator = _buildOid(id_regCtrl, 2)
+
+id_regInfo = _buildOid(id_pkip, 2)
+
+id_regInfo_certReq = _buildOid(id_regInfo, 2)
+
+
+class ProtocolEncrKey(rfc3280.SubjectPublicKeyInfo):
+ pass
+
+
+class Authenticator(char.UTF8String):
+ pass
+
+
+class SubsequentMessage(univ.Integer):
+ pass
+
+
+SubsequentMessage.namedValues = namedval.NamedValues(
+ ('encrCert', 0),
+ ('challengeResp', 1)
+)
+
+
+class AttributeTypeAndValue(univ.Sequence):
+ pass
+
+
+AttributeTypeAndValue.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', univ.ObjectIdentifier()),
+ namedtype.NamedType('value', univ.Any())
+)
+
+
+class POPOPrivKey(univ.Choice):
+ pass
+
+
+POPOPrivKey.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('thisMessage',
+ univ.BitString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('subsequentMessage',
+ SubsequentMessage().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('dhMAC',
+ univ.BitString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.NamedType('agreeMAC',
+ PKMACValue().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+ namedtype.NamedType('encryptedKey', rfc3852.EnvelopedData().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4)))
+)
+
+
+class ProofOfPossession(univ.Choice):
+ pass
+
+
+ProofOfPossession.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('raVerified',
+ univ.Null().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('signature', POPOSigningKey().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.NamedType('keyEncipherment',
+ POPOPrivKey().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
+ namedtype.NamedType('keyAgreement',
+ POPOPrivKey().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3)))
+)
+
+
+class OptionalValidity(univ.Sequence):
+ pass
+
+
+OptionalValidity.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('notBefore', rfc3280.Time().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('notAfter', rfc3280.Time().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+)
+
+
+class CertTemplate(univ.Sequence):
+ pass
+
+
+CertTemplate.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('version', rfc3280.Version().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('serialNumber', univ.Integer().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('signingAlg', rfc3280.AlgorithmIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('issuer', rfc3280.Name().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+ namedtype.OptionalNamedType('validity', OptionalValidity().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
+ namedtype.OptionalNamedType('subject', rfc3280.Name().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
+ namedtype.OptionalNamedType('publicKey', rfc3280.SubjectPublicKeyInfo().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))),
+ namedtype.OptionalNamedType('issuerUID', rfc3280.UniqueIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
+ namedtype.OptionalNamedType('subjectUID', rfc3280.UniqueIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8))),
+ namedtype.OptionalNamedType('extensions', rfc3280.Extensions().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 9)))
+)
+
+
+class Controls(univ.SequenceOf):
+ pass
+
+
+Controls.componentType = AttributeTypeAndValue()
+Controls.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class CertRequest(univ.Sequence):
+ pass
+
+
+CertRequest.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certReqId', univ.Integer()),
+ namedtype.NamedType('certTemplate', CertTemplate()),
+ namedtype.OptionalNamedType('controls', Controls())
+)
+
+
+class CertReqMsg(univ.Sequence):
+ pass
+
+
+CertReqMsg.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certReq', CertRequest()),
+ namedtype.OptionalNamedType('popo', ProofOfPossession()),
+ namedtype.OptionalNamedType('regInfo', univ.SequenceOf(componentType=AttributeTypeAndValue()))
+)
+
+
+class CertReqMessages(univ.SequenceOf):
+ pass
+
+
+CertReqMessages.componentType = CertReqMsg()
+CertReqMessages.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class CertReq(CertRequest):
+ pass
+
+
+id_regCtrl_pkiPublicationInfo = _buildOid(id_regCtrl, 3)
+
+
+class CertId(univ.Sequence):
+ pass
+
+
+CertId.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuer', rfc3280.GeneralName()),
+ namedtype.NamedType('serialNumber', univ.Integer())
+)
+
+
+class OldCertId(CertId):
+ pass
+
+
+class PKIPublicationInfo(univ.Sequence):
+ pass
+
+
+PKIPublicationInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('action',
+ univ.Integer(namedValues=namedval.NamedValues(('dontPublish', 0), ('pleasePublish', 1)))),
+ namedtype.OptionalNamedType('pubInfos', univ.SequenceOf(componentType=SinglePubInfo()))
+)
+
+
+class EncKeyWithID(univ.Sequence):
+ pass
+
+
+EncKeyWithID.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('privateKey', PrivateKeyInfo()),
+ namedtype.OptionalNamedType(
+ 'identifier', univ.Choice(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType('string', char.UTF8String()),
+ namedtype.NamedType('generalName', rfc3280.GeneralName())
+ )
+ )
+ )
+)
+
+id_regCtrl_protocolEncrKey = _buildOid(id_regCtrl, 6)
+
+id_regCtrl_oldCertID = _buildOid(id_regCtrl, 5)
+
+id_smime = _buildOid(1, 2, 840, 113549, 1, 9, 16)
+
+
+class PBMParameter(univ.Sequence):
+ pass
+
+
+PBMParameter.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('salt', univ.OctetString()),
+ namedtype.NamedType('owf', rfc3280.AlgorithmIdentifier()),
+ namedtype.NamedType('iterationCount', univ.Integer()),
+ namedtype.NamedType('mac', rfc3280.AlgorithmIdentifier())
+)
+
+id_regCtrl_regToken = _buildOid(id_regCtrl, 1)
+
+id_regCtrl_pkiArchiveOptions = _buildOid(id_regCtrl, 4)
+
+id_regInfo_utf8Pairs = _buildOid(id_regInfo, 1)
+
+id_ct = _buildOid(id_smime, 1)
+
+id_ct_encKeyWithID = _buildOid(id_ct, 21)
+
+
+class RegToken(char.UTF8String):
+ pass
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc5208.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc5208.py
new file mode 100644
index 0000000000..6b6487d83a
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc5208.py
@@ -0,0 +1,56 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# PKCS#8 syntax
+#
+# ASN.1 source from:
+# http://tools.ietf.org/html/rfc5208
+#
+# Sample captures could be obtained with "openssl pkcs8 -topk8" command
+#
+from pyasn1_modules.rfc2459 import *
+from pyasn1_modules import rfc2251
+
+
+class KeyEncryptionAlgorithms(AlgorithmIdentifier):
+ pass
+
+
+class PrivateKeyAlgorithms(AlgorithmIdentifier):
+ pass
+
+
+class EncryptedData(univ.OctetString):
+ pass
+
+
+class EncryptedPrivateKeyInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('encryptionAlgorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('encryptedData', EncryptedData())
+ )
+
+
+class PrivateKey(univ.OctetString):
+ pass
+
+
+class Attributes(univ.SetOf):
+ componentType = rfc2251.Attribute()
+
+
+class Version(univ.Integer):
+ namedValues = namedval.NamedValues(('v1', 0), ('v2', 1))
+
+
+class PrivateKeyInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('privateKeyAlgorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('privateKey', PrivateKey()),
+ namedtype.OptionalNamedType('attributes', Attributes().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc5280.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc5280.py
new file mode 100644
index 0000000000..7d3aa695aa
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc5280.py
@@ -0,0 +1,1597 @@
+# coding: utf-8
+#
+# This file is part of pyasn1-modules software.
+#
+# Created by Stanisław Pitucha with asn1ate tool.
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Internet X.509 Public Key Infrastructure Certificate and Certificate
+# Revocation List (CRL) Profile
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc5280.txt
+#
+from pyasn1.type import univ
+from pyasn1.type import char
+from pyasn1.type import namedtype
+from pyasn1.type import namedval
+from pyasn1.type import tag
+from pyasn1.type import constraint
+from pyasn1.type import useful
+
+MAX = float('inf')
+
+def _buildOid(*components):
+ output = []
+ for x in tuple(components):
+ if isinstance(x, univ.ObjectIdentifier):
+ output.extend(list(x))
+ else:
+ output.append(int(x))
+
+ return univ.ObjectIdentifier(output)
+
+
+ub_e163_4_sub_address_length = univ.Integer(40)
+
+ub_e163_4_number_length = univ.Integer(15)
+
+unformatted_postal_address = univ.Integer(16)
+
+
+class TerminalType(univ.Integer):
+ pass
+
+
+TerminalType.namedValues = namedval.NamedValues(
+ ('telex', 3),
+ ('teletex', 4),
+ ('g3-facsimile', 5),
+ ('g4-facsimile', 6),
+ ('ia5-terminal', 7),
+ ('videotex', 8)
+)
+
+
+class Extension(univ.Sequence):
+ pass
+
+
+Extension.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('extnID', univ.ObjectIdentifier()),
+ namedtype.DefaultedNamedType('critical', univ.Boolean().subtype(value=0)),
+ namedtype.NamedType('extnValue', univ.OctetString())
+)
+
+
+class Extensions(univ.SequenceOf):
+ pass
+
+
+Extensions.componentType = Extension()
+Extensions.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+physical_delivery_personal_name = univ.Integer(13)
+
+ub_unformatted_address_length = univ.Integer(180)
+
+ub_pds_parameter_length = univ.Integer(30)
+
+ub_pds_physical_address_lines = univ.Integer(6)
+
+
+class UnformattedPostalAddress(univ.Set):
+ pass
+
+
+UnformattedPostalAddress.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('printable-address', univ.SequenceOf(componentType=char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length)))),
+ namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_unformatted_address_length)))
+)
+
+ub_organization_name = univ.Integer(64)
+
+
+class X520OrganizationName(univ.Choice):
+ pass
+
+
+X520OrganizationName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('bmpString',
+ char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name)))
+)
+
+ub_x121_address_length = univ.Integer(16)
+
+pds_name = univ.Integer(7)
+
+id_pkix = _buildOid(1, 3, 6, 1, 5, 5, 7)
+
+id_kp = _buildOid(id_pkix, 3)
+
+ub_postal_code_length = univ.Integer(16)
+
+
+class PostalCode(univ.Choice):
+ pass
+
+
+PostalCode.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('numeric-code', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length))),
+ namedtype.NamedType('printable-code', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length)))
+)
+
+ub_generation_qualifier_length = univ.Integer(3)
+
+unique_postal_name = univ.Integer(20)
+
+
+class DomainComponent(char.IA5String):
+ pass
+
+
+ub_domain_defined_attribute_value_length = univ.Integer(128)
+
+ub_match = univ.Integer(128)
+
+id_at = _buildOid(2, 5, 4)
+
+
+class AttributeType(univ.ObjectIdentifier):
+ pass
+
+
+id_at_organizationalUnitName = _buildOid(id_at, 11)
+
+terminal_type = univ.Integer(23)
+
+
+class PDSParameter(univ.Set):
+ pass
+
+
+PDSParameter.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('printable-string', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length))),
+ namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length)))
+)
+
+
+class PhysicalDeliveryPersonalName(PDSParameter):
+ pass
+
+
+ub_surname_length = univ.Integer(40)
+
+id_ad = _buildOid(id_pkix, 48)
+
+ub_domain_defined_attribute_type_length = univ.Integer(8)
+
+
+class TeletexDomainDefinedAttribute(univ.Sequence):
+ pass
+
+
+TeletexDomainDefinedAttribute.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))),
+ namedtype.NamedType('value', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_value_length)))
+)
+
+ub_domain_defined_attributes = univ.Integer(4)
+
+
+class TeletexDomainDefinedAttributes(univ.SequenceOf):
+ pass
+
+
+TeletexDomainDefinedAttributes.componentType = TeletexDomainDefinedAttribute()
+TeletexDomainDefinedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, ub_domain_defined_attributes)
+
+extended_network_address = univ.Integer(22)
+
+ub_locality_name = univ.Integer(128)
+
+
+class X520LocalityName(univ.Choice):
+ pass
+
+
+X520LocalityName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('bmpString',
+ char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name)))
+)
+
+teletex_organization_name = univ.Integer(3)
+
+ub_given_name_length = univ.Integer(16)
+
+ub_initials_length = univ.Integer(5)
+
+
+class PersonalName(univ.Set):
+ pass
+
+
+PersonalName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('surname', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('given-name', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('initials', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('generation-qualifier', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+)
+
+ub_organizational_unit_name_length = univ.Integer(32)
+
+
+class OrganizationalUnitName(char.PrintableString):
+ pass
+
+
+OrganizationalUnitName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length)
+
+id_at_generationQualifier = _buildOid(id_at, 44)
+
+
+class Version(univ.Integer):
+ pass
+
+
+Version.namedValues = namedval.NamedValues(
+ ('v1', 0),
+ ('v2', 1),
+ ('v3', 2)
+)
+
+
+class CertificateSerialNumber(univ.Integer):
+ pass
+
+
+class AlgorithmIdentifier(univ.Sequence):
+ pass
+
+
+AlgorithmIdentifier.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('algorithm', univ.ObjectIdentifier()),
+ namedtype.OptionalNamedType('parameters', univ.Any())
+)
+
+
+class Time(univ.Choice):
+ pass
+
+
+Time.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('utcTime', useful.UTCTime()),
+ namedtype.NamedType('generalTime', useful.GeneralizedTime())
+)
+
+
+class AttributeValue(univ.Any):
+ pass
+
+
+class AttributeTypeAndValue(univ.Sequence):
+ pass
+
+
+AttributeTypeAndValue.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeType()),
+ namedtype.NamedType('value', AttributeValue())
+)
+
+
+class RelativeDistinguishedName(univ.SetOf):
+ pass
+
+
+RelativeDistinguishedName.componentType = AttributeTypeAndValue()
+RelativeDistinguishedName.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class RDNSequence(univ.SequenceOf):
+ pass
+
+
+RDNSequence.componentType = RelativeDistinguishedName()
+
+
+class Name(univ.Choice):
+ pass
+
+
+Name.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('rdnSequence', RDNSequence())
+)
+
+
+class TBSCertList(univ.Sequence):
+ pass
+
+
+TBSCertList.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('version', Version()),
+ namedtype.NamedType('signature', AlgorithmIdentifier()),
+ namedtype.NamedType('issuer', Name()),
+ namedtype.NamedType('thisUpdate', Time()),
+ namedtype.OptionalNamedType('nextUpdate', Time()),
+ namedtype.OptionalNamedType(
+ 'revokedCertificates', univ.SequenceOf(
+ componentType=univ.Sequence(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType('userCertificate', CertificateSerialNumber()),
+ namedtype.NamedType('revocationDate', Time()),
+ namedtype.OptionalNamedType('crlEntryExtensions', Extensions())
+ )
+ )
+ )
+ ),
+ namedtype.OptionalNamedType(
+ 'crlExtensions', Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+)
+
+
+class CertificateList(univ.Sequence):
+ pass
+
+
+CertificateList.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('tbsCertList', TBSCertList()),
+ namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString())
+)
+
+
+class PhysicalDeliveryOfficeName(PDSParameter):
+ pass
+
+
+ub_extension_attributes = univ.Integer(256)
+
+
+class ExtensionAttribute(univ.Sequence):
+ pass
+
+
+ExtensionAttribute.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('extension-attribute-type', univ.Integer().subtype(
+ subtypeSpec=constraint.ValueRangeConstraint(0, ub_extension_attributes)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('extension-attribute-value',
+ univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+id_qt = _buildOid(id_pkix, 2)
+
+id_qt_cps = _buildOid(id_qt, 1)
+
+id_at_stateOrProvinceName = _buildOid(id_at, 8)
+
+id_at_title = _buildOid(id_at, 12)
+
+id_at_serialNumber = _buildOid(id_at, 5)
+
+
+class X520dnQualifier(char.PrintableString):
+ pass
+
+
+class PosteRestanteAddress(PDSParameter):
+ pass
+
+
+poste_restante_address = univ.Integer(19)
+
+
+class UniqueIdentifier(univ.BitString):
+ pass
+
+
+class Validity(univ.Sequence):
+ pass
+
+
+Validity.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('notBefore', Time()),
+ namedtype.NamedType('notAfter', Time())
+)
+
+
+class SubjectPublicKeyInfo(univ.Sequence):
+ pass
+
+
+SubjectPublicKeyInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('algorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('subjectPublicKey', univ.BitString())
+)
+
+
+class TBSCertificate(univ.Sequence):
+ pass
+
+
+TBSCertificate.componentType = namedtype.NamedTypes(
+ namedtype.DefaultedNamedType('version',
+ Version().subtype(explicitTag=tag.Tag(tag.tagClassContext,
+ tag.tagFormatSimple, 0)).subtype(value="v1")),
+ namedtype.NamedType('serialNumber', CertificateSerialNumber()),
+ namedtype.NamedType('signature', AlgorithmIdentifier()),
+ namedtype.NamedType('issuer', Name()),
+ namedtype.NamedType('validity', Validity()),
+ namedtype.NamedType('subject', Name()),
+ namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo()),
+ namedtype.OptionalNamedType('issuerUniqueID', UniqueIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('subjectUniqueID', UniqueIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('extensions',
+ Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+)
+
+physical_delivery_office_name = univ.Integer(10)
+
+ub_name = univ.Integer(32768)
+
+
+class X520name(univ.Choice):
+ pass
+
+
+X520name.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('printableString',
+ char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('universalString',
+ char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name)))
+)
+
+id_at_dnQualifier = _buildOid(id_at, 46)
+
+ub_serial_number = univ.Integer(64)
+
+ub_pseudonym = univ.Integer(128)
+
+pkcs_9 = _buildOid(1, 2, 840, 113549, 1, 9)
+
+
+class X121Address(char.NumericString):
+ pass
+
+
+X121Address.subtypeSpec = constraint.ValueSizeConstraint(1, ub_x121_address_length)
+
+
+class NetworkAddress(X121Address):
+ pass
+
+
+ub_integer_options = univ.Integer(256)
+
+id_at_commonName = _buildOid(id_at, 3)
+
+ub_organization_name_length = univ.Integer(64)
+
+id_ad_ocsp = _buildOid(id_ad, 1)
+
+ub_country_name_numeric_length = univ.Integer(3)
+
+ub_country_name_alpha_length = univ.Integer(2)
+
+
+class PhysicalDeliveryCountryName(univ.Choice):
+ pass
+
+
+PhysicalDeliveryCountryName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length, ub_country_name_numeric_length))),
+ namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length)))
+)
+
+id_emailAddress = _buildOid(pkcs_9, 1)
+
+common_name = univ.Integer(1)
+
+
+class X520Pseudonym(univ.Choice):
+ pass
+
+
+X520Pseudonym.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pseudonym))),
+ namedtype.NamedType('printableString',
+ char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pseudonym))),
+ namedtype.NamedType('universalString',
+ char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pseudonym))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pseudonym))),
+ namedtype.NamedType('bmpString',
+ char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pseudonym)))
+)
+
+ub_domain_name_length = univ.Integer(16)
+
+
+class AdministrationDomainName(univ.Choice):
+ pass
+
+
+AdministrationDomainName.tagSet = univ.Choice.tagSet.tagExplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 2))
+AdministrationDomainName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('numeric', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length))),
+ namedtype.NamedType('printable', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length)))
+)
+
+
+class PresentationAddress(univ.Sequence):
+ pass
+
+
+PresentationAddress.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('pSelector', univ.OctetString().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('sSelector', univ.OctetString().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('tSelector', univ.OctetString().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.NamedType('nAddresses', univ.SetOf(componentType=univ.OctetString()).subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+)
+
+
+class ExtendedNetworkAddress(univ.Choice):
+ pass
+
+
+ExtendedNetworkAddress.componentType = namedtype.NamedTypes(
+ namedtype.NamedType(
+ 'e163-4-address', univ.Sequence(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType('number', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_number_length)).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('sub-address', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_sub_address_length)).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+ )
+ ),
+ namedtype.NamedType('psap-address', PresentationAddress().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+)
+
+
+class TeletexOrganizationName(char.TeletexString):
+ pass
+
+
+TeletexOrganizationName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organization_name_length)
+
+ub_terminal_id_length = univ.Integer(24)
+
+
+class TerminalIdentifier(char.PrintableString):
+ pass
+
+
+TerminalIdentifier.subtypeSpec = constraint.ValueSizeConstraint(1, ub_terminal_id_length)
+
+id_ad_caIssuers = _buildOid(id_ad, 2)
+
+id_at_countryName = _buildOid(id_at, 6)
+
+
+class StreetAddress(PDSParameter):
+ pass
+
+
+postal_code = univ.Integer(9)
+
+id_at_givenName = _buildOid(id_at, 42)
+
+ub_title = univ.Integer(64)
+
+
+class ExtensionAttributes(univ.SetOf):
+ pass
+
+
+ExtensionAttributes.componentType = ExtensionAttribute()
+ExtensionAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, ub_extension_attributes)
+
+ub_emailaddress_length = univ.Integer(255)
+
+id_ad_caRepository = _buildOid(id_ad, 5)
+
+
+class ExtensionORAddressComponents(PDSParameter):
+ pass
+
+
+ub_organizational_unit_name = univ.Integer(64)
+
+
+class X520OrganizationalUnitName(univ.Choice):
+ pass
+
+
+X520OrganizationalUnitName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name)))
+)
+
+
+class LocalPostalAttributes(PDSParameter):
+ pass
+
+
+teletex_organizational_unit_names = univ.Integer(5)
+
+
+class X520Title(univ.Choice):
+ pass
+
+
+X520Title.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('printableString',
+ char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('universalString',
+ char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title)))
+)
+
+id_at_localityName = _buildOid(id_at, 7)
+
+id_at_initials = _buildOid(id_at, 43)
+
+ub_state_name = univ.Integer(128)
+
+
+class X520StateOrProvinceName(univ.Choice):
+ pass
+
+
+X520StateOrProvinceName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('printableString',
+ char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('universalString',
+ char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('bmpString',
+ char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name)))
+)
+
+physical_delivery_organization_name = univ.Integer(14)
+
+id_at_surname = _buildOid(id_at, 4)
+
+
+class X520countryName(char.PrintableString):
+ pass
+
+
+X520countryName.subtypeSpec = constraint.ValueSizeConstraint(2, 2)
+
+physical_delivery_office_number = univ.Integer(11)
+
+id_qt_unotice = _buildOid(id_qt, 2)
+
+
+class X520SerialNumber(char.PrintableString):
+ pass
+
+
+X520SerialNumber.subtypeSpec = constraint.ValueSizeConstraint(1, ub_serial_number)
+
+
+class Attribute(univ.Sequence):
+ pass
+
+
+Attribute.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeType()),
+ namedtype.NamedType('values', univ.SetOf(componentType=AttributeValue()))
+)
+
+ub_common_name = univ.Integer(64)
+
+id_pe = _buildOid(id_pkix, 1)
+
+
+class ExtensionPhysicalDeliveryAddressComponents(PDSParameter):
+ pass
+
+
+class EmailAddress(char.IA5String):
+ pass
+
+
+EmailAddress.subtypeSpec = constraint.ValueSizeConstraint(1, ub_emailaddress_length)
+
+id_at_organizationName = _buildOid(id_at, 10)
+
+post_office_box_address = univ.Integer(18)
+
+
+class BuiltInDomainDefinedAttribute(univ.Sequence):
+ pass
+
+
+BuiltInDomainDefinedAttribute.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))),
+ namedtype.NamedType('value', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_value_length)))
+)
+
+
+class BuiltInDomainDefinedAttributes(univ.SequenceOf):
+ pass
+
+
+BuiltInDomainDefinedAttributes.componentType = BuiltInDomainDefinedAttribute()
+BuiltInDomainDefinedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, ub_domain_defined_attributes)
+
+id_at_pseudonym = _buildOid(id_at, 65)
+
+id_domainComponent = _buildOid(0, 9, 2342, 19200300, 100, 1, 25)
+
+
+class X520CommonName(univ.Choice):
+ pass
+
+
+X520CommonName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('printableString',
+ char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('universalString',
+ char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('utf8String',
+ char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('bmpString',
+ char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name)))
+)
+
+extension_OR_address_components = univ.Integer(12)
+
+ub_organizational_units = univ.Integer(4)
+
+teletex_personal_name = univ.Integer(4)
+
+ub_numeric_user_id_length = univ.Integer(32)
+
+ub_common_name_length = univ.Integer(64)
+
+
+class TeletexCommonName(char.TeletexString):
+ pass
+
+
+TeletexCommonName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_common_name_length)
+
+
+class PhysicalDeliveryOrganizationName(PDSParameter):
+ pass
+
+
+extension_physical_delivery_address_components = univ.Integer(15)
+
+
+class NumericUserIdentifier(char.NumericString):
+ pass
+
+
+NumericUserIdentifier.subtypeSpec = constraint.ValueSizeConstraint(1, ub_numeric_user_id_length)
+
+
+class CountryName(univ.Choice):
+ pass
+
+
+CountryName.tagSet = univ.Choice.tagSet.tagExplicitly(tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 1))
+CountryName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length, ub_country_name_numeric_length))),
+ namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length)))
+)
+
+
+class OrganizationName(char.PrintableString):
+ pass
+
+
+OrganizationName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organization_name_length)
+
+
+class OrganizationalUnitNames(univ.SequenceOf):
+ pass
+
+
+OrganizationalUnitNames.componentType = OrganizationalUnitName()
+OrganizationalUnitNames.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organizational_units)
+
+
+class PrivateDomainName(univ.Choice):
+ pass
+
+
+PrivateDomainName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('numeric', char.NumericString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length))),
+ namedtype.NamedType('printable', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length)))
+)
+
+
+class BuiltInStandardAttributes(univ.Sequence):
+ pass
+
+
+BuiltInStandardAttributes.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('country-name', CountryName()),
+ namedtype.OptionalNamedType('administration-domain-name', AdministrationDomainName()),
+ namedtype.OptionalNamedType('network-address', NetworkAddress().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('terminal-identifier', TerminalIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('private-domain-name', PrivateDomainName().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
+ namedtype.OptionalNamedType('organization-name', OrganizationName().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.OptionalNamedType('numeric-user-identifier', NumericUserIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))),
+ namedtype.OptionalNamedType('personal-name', PersonalName().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
+ namedtype.OptionalNamedType('organizational-unit-names', OrganizationalUnitNames().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6)))
+)
+
+
+class ORAddress(univ.Sequence):
+ pass
+
+
+ORAddress.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('built-in-standard-attributes', BuiltInStandardAttributes()),
+ namedtype.OptionalNamedType('built-in-domain-defined-attributes', BuiltInDomainDefinedAttributes()),
+ namedtype.OptionalNamedType('extension-attributes', ExtensionAttributes())
+)
+
+
+class DistinguishedName(RDNSequence):
+ pass
+
+
+id_ad_timeStamping = _buildOid(id_ad, 3)
+
+
+class PhysicalDeliveryOfficeNumber(PDSParameter):
+ pass
+
+
+teletex_domain_defined_attributes = univ.Integer(6)
+
+
+class UniquePostalName(PDSParameter):
+ pass
+
+
+physical_delivery_country_name = univ.Integer(8)
+
+ub_pds_name_length = univ.Integer(16)
+
+
+class PDSName(char.PrintableString):
+ pass
+
+
+PDSName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_pds_name_length)
+
+
+class TeletexPersonalName(univ.Set):
+ pass
+
+
+TeletexPersonalName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('surname', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('given-name', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('initials', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('generation-qualifier', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length)).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+)
+
+street_address = univ.Integer(17)
+
+
+class PostOfficeBoxAddress(PDSParameter):
+ pass
+
+
+local_postal_attributes = univ.Integer(21)
+
+
+class DirectoryString(univ.Choice):
+ pass
+
+
+DirectoryString.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString',
+ char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('printableString',
+ char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('universalString',
+ char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX)))
+)
+
+teletex_common_name = univ.Integer(2)
+
+
+class CommonName(char.PrintableString):
+ pass
+
+
+CommonName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_common_name_length)
+
+
+class Certificate(univ.Sequence):
+ pass
+
+
+Certificate.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('tbsCertificate', TBSCertificate()),
+ namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString())
+)
+
+
+class TeletexOrganizationalUnitName(char.TeletexString):
+ pass
+
+
+TeletexOrganizationalUnitName.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length)
+
+id_at_name = _buildOid(id_at, 41)
+
+
+class TeletexOrganizationalUnitNames(univ.SequenceOf):
+ pass
+
+
+TeletexOrganizationalUnitNames.componentType = TeletexOrganizationalUnitName()
+TeletexOrganizationalUnitNames.subtypeSpec = constraint.ValueSizeConstraint(1, ub_organizational_units)
+
+id_ce = _buildOid(2, 5, 29)
+
+id_ce_issuerAltName = _buildOid(id_ce, 18)
+
+
+class SkipCerts(univ.Integer):
+ pass
+
+
+SkipCerts.subtypeSpec = constraint.ValueRangeConstraint(0, MAX)
+
+
+class CRLReason(univ.Enumerated):
+ pass
+
+
+CRLReason.namedValues = namedval.NamedValues(
+ ('unspecified', 0),
+ ('keyCompromise', 1),
+ ('cACompromise', 2),
+ ('affiliationChanged', 3),
+ ('superseded', 4),
+ ('cessationOfOperation', 5),
+ ('certificateHold', 6),
+ ('removeFromCRL', 8),
+ ('privilegeWithdrawn', 9),
+ ('aACompromise', 10)
+)
+
+
+class PrivateKeyUsagePeriod(univ.Sequence):
+ pass
+
+
+PrivateKeyUsagePeriod.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('notBefore', useful.GeneralizedTime().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('notAfter', useful.GeneralizedTime().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+
+class AnotherName(univ.Sequence):
+ pass
+
+
+AnotherName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type-id', univ.ObjectIdentifier()),
+ namedtype.NamedType('value', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+)
+
+
+class EDIPartyName(univ.Sequence):
+ pass
+
+
+EDIPartyName.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('nameAssigner', DirectoryString().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('partyName', DirectoryString().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+)
+
+
+class GeneralName(univ.Choice):
+ pass
+
+
+GeneralName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('otherName',
+ AnotherName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('rfc822Name',
+ char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('dNSName',
+ char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.NamedType('x400Address',
+ ORAddress().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.NamedType('directoryName',
+ Name().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
+ namedtype.NamedType('ediPartyName',
+ EDIPartyName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
+ namedtype.NamedType('uniformResourceIdentifier',
+ char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))),
+ namedtype.NamedType('iPAddress',
+ univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
+ namedtype.NamedType('registeredID', univ.ObjectIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8)))
+)
+
+
+class BaseDistance(univ.Integer):
+ pass
+
+
+BaseDistance.subtypeSpec = constraint.ValueRangeConstraint(0, MAX)
+
+
+class GeneralSubtree(univ.Sequence):
+ pass
+
+
+GeneralSubtree.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('base', GeneralName()),
+ namedtype.DefaultedNamedType('minimum', BaseDistance().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)).subtype(value=0)),
+ namedtype.OptionalNamedType('maximum', BaseDistance().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+
+class GeneralNames(univ.SequenceOf):
+ pass
+
+
+GeneralNames.componentType = GeneralName()
+GeneralNames.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class DistributionPointName(univ.Choice):
+ pass
+
+
+DistributionPointName.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('fullName',
+ GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('nameRelativeToCRLIssuer', RelativeDistinguishedName().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+
+class ReasonFlags(univ.BitString):
+ pass
+
+
+ReasonFlags.namedValues = namedval.NamedValues(
+ ('unused', 0),
+ ('keyCompromise', 1),
+ ('cACompromise', 2),
+ ('affiliationChanged', 3),
+ ('superseded', 4),
+ ('cessationOfOperation', 5),
+ ('certificateHold', 6),
+ ('privilegeWithdrawn', 7),
+ ('aACompromise', 8)
+)
+
+
+class IssuingDistributionPoint(univ.Sequence):
+ pass
+
+
+IssuingDistributionPoint.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.DefaultedNamedType('onlyContainsUserCerts', univ.Boolean().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)).subtype(value=0)),
+ namedtype.DefaultedNamedType('onlyContainsCACerts', univ.Boolean().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)).subtype(value=0)),
+ namedtype.OptionalNamedType('onlySomeReasons', ReasonFlags().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.DefaultedNamedType('indirectCRL', univ.Boolean().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4)).subtype(value=0)),
+ namedtype.DefaultedNamedType('onlyContainsAttributeCerts', univ.Boolean().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5)).subtype(value=0))
+)
+
+id_ce_certificatePolicies = _buildOid(id_ce, 32)
+
+id_kp_emailProtection = _buildOid(id_kp, 4)
+
+
+class AccessDescription(univ.Sequence):
+ pass
+
+
+AccessDescription.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('accessMethod', univ.ObjectIdentifier()),
+ namedtype.NamedType('accessLocation', GeneralName())
+)
+
+
+class IssuerAltName(GeneralNames):
+ pass
+
+
+id_ce_cRLDistributionPoints = _buildOid(id_ce, 31)
+
+holdInstruction = _buildOid(2, 2, 840, 10040, 2)
+
+id_holdinstruction_callissuer = _buildOid(holdInstruction, 2)
+
+id_ce_subjectDirectoryAttributes = _buildOid(id_ce, 9)
+
+id_ce_issuingDistributionPoint = _buildOid(id_ce, 28)
+
+
+class DistributionPoint(univ.Sequence):
+ pass
+
+
+DistributionPoint.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('reasons', ReasonFlags().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('cRLIssuer', GeneralNames().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+)
+
+
+class CRLDistributionPoints(univ.SequenceOf):
+ pass
+
+
+CRLDistributionPoints.componentType = DistributionPoint()
+CRLDistributionPoints.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class GeneralSubtrees(univ.SequenceOf):
+ pass
+
+
+GeneralSubtrees.componentType = GeneralSubtree()
+GeneralSubtrees.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class NameConstraints(univ.Sequence):
+ pass
+
+
+NameConstraints.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('permittedSubtrees', GeneralSubtrees().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('excludedSubtrees', GeneralSubtrees().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+
+class SubjectDirectoryAttributes(univ.SequenceOf):
+ pass
+
+
+SubjectDirectoryAttributes.componentType = Attribute()
+SubjectDirectoryAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+id_kp_OCSPSigning = _buildOid(id_kp, 9)
+
+id_kp_timeStamping = _buildOid(id_kp, 8)
+
+
+class DisplayText(univ.Choice):
+ pass
+
+
+DisplayText.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('ia5String', char.IA5String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
+ namedtype.NamedType('visibleString',
+ char.VisibleString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200)))
+)
+
+
+class NoticeReference(univ.Sequence):
+ pass
+
+
+NoticeReference.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('organization', DisplayText()),
+ namedtype.NamedType('noticeNumbers', univ.SequenceOf(componentType=univ.Integer()))
+)
+
+
+class UserNotice(univ.Sequence):
+ pass
+
+
+UserNotice.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('noticeRef', NoticeReference()),
+ namedtype.OptionalNamedType('explicitText', DisplayText())
+)
+
+
+class PolicyQualifierId(univ.ObjectIdentifier):
+ pass
+
+
+class PolicyQualifierInfo(univ.Sequence):
+ pass
+
+
+PolicyQualifierInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('policyQualifierId', PolicyQualifierId()),
+ namedtype.NamedType('qualifier', univ.Any())
+)
+
+
+class CertPolicyId(univ.ObjectIdentifier):
+ pass
+
+
+class PolicyInformation(univ.Sequence):
+ pass
+
+
+PolicyInformation.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('policyIdentifier', CertPolicyId()),
+ namedtype.OptionalNamedType('policyQualifiers', univ.SequenceOf(componentType=PolicyQualifierInfo()))
+)
+
+
+class CertificatePolicies(univ.SequenceOf):
+ pass
+
+
+CertificatePolicies.componentType = PolicyInformation()
+CertificatePolicies.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class SubjectAltName(GeneralNames):
+ pass
+
+
+id_ce_basicConstraints = _buildOid(id_ce, 19)
+
+id_ce_authorityKeyIdentifier = _buildOid(id_ce, 35)
+
+id_kp_codeSigning = _buildOid(id_kp, 3)
+
+
+class BasicConstraints(univ.Sequence):
+ pass
+
+
+BasicConstraints.componentType = namedtype.NamedTypes(
+ namedtype.DefaultedNamedType('cA', univ.Boolean().subtype(value=0)),
+ namedtype.OptionalNamedType('pathLenConstraint',
+ univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, MAX)))
+)
+
+id_ce_certificateIssuer = _buildOid(id_ce, 29)
+
+
+class PolicyMappings(univ.SequenceOf):
+ pass
+
+
+PolicyMappings.componentType = univ.Sequence(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType('issuerDomainPolicy', CertPolicyId()),
+ namedtype.NamedType('subjectDomainPolicy', CertPolicyId())
+ )
+)
+
+PolicyMappings.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class InhibitAnyPolicy(SkipCerts):
+ pass
+
+
+anyPolicy = _buildOid(id_ce_certificatePolicies, 0)
+
+
+class CRLNumber(univ.Integer):
+ pass
+
+
+CRLNumber.subtypeSpec = constraint.ValueRangeConstraint(0, MAX)
+
+
+class BaseCRLNumber(CRLNumber):
+ pass
+
+
+id_ce_nameConstraints = _buildOid(id_ce, 30)
+
+id_kp_serverAuth = _buildOid(id_kp, 1)
+
+id_ce_freshestCRL = _buildOid(id_ce, 46)
+
+id_ce_cRLReasons = _buildOid(id_ce, 21)
+
+id_ce_extKeyUsage = _buildOid(id_ce, 37)
+
+
+class KeyIdentifier(univ.OctetString):
+ pass
+
+
+class AuthorityKeyIdentifier(univ.Sequence):
+ pass
+
+
+AuthorityKeyIdentifier.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('keyIdentifier', KeyIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('authorityCertIssuer', GeneralNames().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('authorityCertSerialNumber', CertificateSerialNumber().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+)
+
+
+class FreshestCRL(CRLDistributionPoints):
+ pass
+
+
+id_ce_policyConstraints = _buildOid(id_ce, 36)
+
+id_pe_authorityInfoAccess = _buildOid(id_pe, 1)
+
+
+class AuthorityInfoAccessSyntax(univ.SequenceOf):
+ pass
+
+
+AuthorityInfoAccessSyntax.componentType = AccessDescription()
+AuthorityInfoAccessSyntax.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+id_holdinstruction_none = _buildOid(holdInstruction, 1)
+
+
+class CPSuri(char.IA5String):
+ pass
+
+
+id_pe_subjectInfoAccess = _buildOid(id_pe, 11)
+
+
+class SubjectKeyIdentifier(KeyIdentifier):
+ pass
+
+
+id_ce_subjectAltName = _buildOid(id_ce, 17)
+
+
+class KeyPurposeId(univ.ObjectIdentifier):
+ pass
+
+
+class ExtKeyUsageSyntax(univ.SequenceOf):
+ pass
+
+
+ExtKeyUsageSyntax.componentType = KeyPurposeId()
+ExtKeyUsageSyntax.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class HoldInstructionCode(univ.ObjectIdentifier):
+ pass
+
+
+id_ce_deltaCRLIndicator = _buildOid(id_ce, 27)
+
+id_ce_keyUsage = _buildOid(id_ce, 15)
+
+id_ce_holdInstructionCode = _buildOid(id_ce, 23)
+
+
+class SubjectInfoAccessSyntax(univ.SequenceOf):
+ pass
+
+
+SubjectInfoAccessSyntax.componentType = AccessDescription()
+SubjectInfoAccessSyntax.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class InvalidityDate(useful.GeneralizedTime):
+ pass
+
+
+class KeyUsage(univ.BitString):
+ pass
+
+
+KeyUsage.namedValues = namedval.NamedValues(
+ ('digitalSignature', 0),
+ ('nonRepudiation', 1),
+ ('keyEncipherment', 2),
+ ('dataEncipherment', 3),
+ ('keyAgreement', 4),
+ ('keyCertSign', 5),
+ ('cRLSign', 6),
+ ('encipherOnly', 7),
+ ('decipherOnly', 8)
+)
+
+id_ce_invalidityDate = _buildOid(id_ce, 24)
+
+id_ce_policyMappings = _buildOid(id_ce, 33)
+
+anyExtendedKeyUsage = _buildOid(id_ce_extKeyUsage, 0)
+
+id_ce_privateKeyUsagePeriod = _buildOid(id_ce, 16)
+
+id_ce_cRLNumber = _buildOid(id_ce, 20)
+
+
+class CertificateIssuer(GeneralNames):
+ pass
+
+
+id_holdinstruction_reject = _buildOid(holdInstruction, 3)
+
+
+class PolicyConstraints(univ.Sequence):
+ pass
+
+
+PolicyConstraints.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('requireExplicitPolicy',
+ SkipCerts().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('inhibitPolicyMapping',
+ SkipCerts().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+id_kp_clientAuth = _buildOid(id_kp, 2)
+
+id_ce_subjectKeyIdentifier = _buildOid(id_ce, 14)
+
+id_ce_inhibitAnyPolicy = _buildOid(id_ce, 54)
+
+# map of AttributeType -> AttributeValue
+
+certificateAttributesMap = {
+ id_at_name: X520name(),
+ id_at_surname: X520name(),
+ id_at_givenName: X520name(),
+ id_at_initials: X520name(),
+ id_at_generationQualifier: X520name(),
+ id_at_commonName: X520CommonName(),
+ id_at_localityName: X520LocalityName(),
+ id_at_stateOrProvinceName: X520StateOrProvinceName(),
+ id_at_organizationName: X520OrganizationName(),
+ id_at_organizationalUnitName: X520OrganizationalUnitName(),
+ id_at_title: X520Title(),
+ id_at_dnQualifier: X520dnQualifier(),
+ id_at_countryName: X520countryName(),
+ id_at_serialNumber: X520SerialNumber(),
+ id_at_pseudonym: X520Pseudonym(),
+ id_domainComponent: DomainComponent(),
+ id_emailAddress: EmailAddress(),
+}
+
+# map of Certificate Extension OIDs to Extensions
+
+certificateExtensionsMap = {
+ id_ce_authorityKeyIdentifier: AuthorityKeyIdentifier(),
+ id_ce_subjectKeyIdentifier: SubjectKeyIdentifier(),
+ id_ce_keyUsage: KeyUsage(),
+ id_ce_privateKeyUsagePeriod: PrivateKeyUsagePeriod(),
+ id_ce_certificatePolicies: PolicyInformation(), # could be a sequence of concat'ed objects?
+ id_ce_policyMappings: PolicyMappings(),
+ id_ce_subjectAltName: SubjectAltName(),
+ id_ce_issuerAltName: IssuerAltName(),
+ id_ce_subjectDirectoryAttributes: SubjectDirectoryAttributes(),
+ id_ce_basicConstraints: BasicConstraints(),
+ id_ce_nameConstraints: NameConstraints(),
+ id_ce_policyConstraints: PolicyConstraints(),
+ id_ce_extKeyUsage: ExtKeyUsageSyntax(),
+ id_ce_cRLDistributionPoints: CRLDistributionPoints(),
+ id_pe_authorityInfoAccess: AuthorityInfoAccessSyntax(),
+ id_ce_cRLNumber: univ.Integer(),
+ id_ce_deltaCRLIndicator: BaseCRLNumber(),
+ id_ce_issuingDistributionPoint: IssuingDistributionPoint(),
+ id_ce_cRLReasons: CRLReason(),
+ id_ce_holdInstructionCode: univ.ObjectIdentifier(),
+ id_ce_invalidityDate: useful.GeneralizedTime(),
+ id_ce_certificateIssuer: GeneralNames(),
+}
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc5652.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc5652.py
new file mode 100644
index 0000000000..5fd5b79a93
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc5652.py
@@ -0,0 +1,706 @@
+# coding: utf-8
+#
+# This file is part of pyasn1-modules software.
+#
+# Created by Stanisław Pitucha with asn1ate tool.
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Cryptographic Message Syntax (CMS)
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc5652.txt
+#
+from pyasn1.type import constraint
+from pyasn1.type import namedtype
+from pyasn1.type import namedval
+from pyasn1.type import tag
+from pyasn1.type import univ
+from pyasn1.type import useful
+
+from pyasn1_modules import rfc3281
+from pyasn1_modules import rfc5280
+
+MAX = float('inf')
+
+
+def _buildOid(*components):
+ output = []
+ for x in tuple(components):
+ if isinstance(x, univ.ObjectIdentifier):
+ output.extend(list(x))
+ else:
+ output.append(int(x))
+
+ return univ.ObjectIdentifier(output)
+
+
+class AttCertVersionV1(univ.Integer):
+ pass
+
+
+AttCertVersionV1.namedValues = namedval.NamedValues(
+ ('v1', 0)
+)
+
+
+class AttributeCertificateInfoV1(univ.Sequence):
+ pass
+
+
+AttributeCertificateInfoV1.componentType = namedtype.NamedTypes(
+ namedtype.DefaultedNamedType('version', AttCertVersionV1().subtype(value="v1")),
+ namedtype.NamedType(
+ 'subject', univ.Choice(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType('baseCertificateID', rfc3281.IssuerSerial().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('subjectName', rfc5280.GeneralNames().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+ )
+ ),
+ namedtype.NamedType('issuer', rfc5280.GeneralNames()),
+ namedtype.NamedType('signature', rfc5280.AlgorithmIdentifier()),
+ namedtype.NamedType('serialNumber', rfc5280.CertificateSerialNumber()),
+ namedtype.NamedType('attCertValidityPeriod', rfc3281.AttCertValidityPeriod()),
+ namedtype.NamedType('attributes', univ.SequenceOf(componentType=rfc5280.Attribute())),
+ namedtype.OptionalNamedType('issuerUniqueID', rfc5280.UniqueIdentifier()),
+ namedtype.OptionalNamedType('extensions', rfc5280.Extensions())
+)
+
+
+class AttributeCertificateV1(univ.Sequence):
+ pass
+
+
+AttributeCertificateV1.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('acInfo', AttributeCertificateInfoV1()),
+ namedtype.NamedType('signatureAlgorithm', rfc5280.AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString())
+)
+
+
+class AttributeValue(univ.Any):
+ pass
+
+
+class Attribute(univ.Sequence):
+ pass
+
+
+Attribute.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('attrType', univ.ObjectIdentifier()),
+ namedtype.NamedType('attrValues', univ.SetOf(componentType=AttributeValue()))
+)
+
+
+class SignedAttributes(univ.SetOf):
+ pass
+
+
+SignedAttributes.componentType = Attribute()
+SignedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class AttributeCertificateV2(rfc3281.AttributeCertificate):
+ pass
+
+
+class OtherKeyAttribute(univ.Sequence):
+ pass
+
+
+OtherKeyAttribute.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('keyAttrId', univ.ObjectIdentifier()),
+ namedtype.OptionalNamedType('keyAttr', univ.Any())
+)
+
+
+class UnauthAttributes(univ.SetOf):
+ pass
+
+
+UnauthAttributes.componentType = Attribute()
+UnauthAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+id_encryptedData = _buildOid(1, 2, 840, 113549, 1, 7, 6)
+
+
+class SignatureValue(univ.OctetString):
+ pass
+
+
+class IssuerAndSerialNumber(univ.Sequence):
+ pass
+
+
+IssuerAndSerialNumber.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuer', rfc5280.Name()),
+ namedtype.NamedType('serialNumber', rfc5280.CertificateSerialNumber())
+)
+
+
+class SubjectKeyIdentifier(univ.OctetString):
+ pass
+
+
+class RecipientKeyIdentifier(univ.Sequence):
+ pass
+
+
+RecipientKeyIdentifier.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('subjectKeyIdentifier', SubjectKeyIdentifier()),
+ namedtype.OptionalNamedType('date', useful.GeneralizedTime()),
+ namedtype.OptionalNamedType('other', OtherKeyAttribute())
+)
+
+
+class KeyAgreeRecipientIdentifier(univ.Choice):
+ pass
+
+
+KeyAgreeRecipientIdentifier.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuerAndSerialNumber', IssuerAndSerialNumber()),
+ namedtype.NamedType('rKeyId', RecipientKeyIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+)
+
+
+class EncryptedKey(univ.OctetString):
+ pass
+
+
+class RecipientEncryptedKey(univ.Sequence):
+ pass
+
+
+RecipientEncryptedKey.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('rid', KeyAgreeRecipientIdentifier()),
+ namedtype.NamedType('encryptedKey', EncryptedKey())
+)
+
+
+class RecipientEncryptedKeys(univ.SequenceOf):
+ pass
+
+
+RecipientEncryptedKeys.componentType = RecipientEncryptedKey()
+
+
+class MessageAuthenticationCode(univ.OctetString):
+ pass
+
+
+class CMSVersion(univ.Integer):
+ pass
+
+
+CMSVersion.namedValues = namedval.NamedValues(
+ ('v0', 0),
+ ('v1', 1),
+ ('v2', 2),
+ ('v3', 3),
+ ('v4', 4),
+ ('v5', 5)
+)
+
+
+class OtherCertificateFormat(univ.Sequence):
+ pass
+
+
+OtherCertificateFormat.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('otherCertFormat', univ.ObjectIdentifier()),
+ namedtype.NamedType('otherCert', univ.Any())
+)
+
+
+class ExtendedCertificateInfo(univ.Sequence):
+ pass
+
+
+ExtendedCertificateInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.NamedType('certificate', rfc5280.Certificate()),
+ namedtype.NamedType('attributes', UnauthAttributes())
+)
+
+
+class Signature(univ.BitString):
+ pass
+
+
+class SignatureAlgorithmIdentifier(rfc5280.AlgorithmIdentifier):
+ pass
+
+
+class ExtendedCertificate(univ.Sequence):
+ pass
+
+
+ExtendedCertificate.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('extendedCertificateInfo', ExtendedCertificateInfo()),
+ namedtype.NamedType('signatureAlgorithm', SignatureAlgorithmIdentifier()),
+ namedtype.NamedType('signature', Signature())
+)
+
+
+class CertificateChoices(univ.Choice):
+ pass
+
+
+CertificateChoices.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certificate', rfc5280.Certificate()),
+ namedtype.NamedType('extendedCertificate', ExtendedCertificate().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('v1AttrCert', AttributeCertificateV1().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('v2AttrCert', AttributeCertificateV2().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.NamedType('other', OtherCertificateFormat().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3)))
+)
+
+
+class CertificateSet(univ.SetOf):
+ pass
+
+
+CertificateSet.componentType = CertificateChoices()
+
+
+class OtherRevocationInfoFormat(univ.Sequence):
+ pass
+
+
+OtherRevocationInfoFormat.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('otherRevInfoFormat', univ.ObjectIdentifier()),
+ namedtype.NamedType('otherRevInfo', univ.Any())
+)
+
+
+class RevocationInfoChoice(univ.Choice):
+ pass
+
+
+RevocationInfoChoice.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('crl', rfc5280.CertificateList()),
+ namedtype.NamedType('other', OtherRevocationInfoFormat().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+)
+
+
+class RevocationInfoChoices(univ.SetOf):
+ pass
+
+
+RevocationInfoChoices.componentType = RevocationInfoChoice()
+
+
+class OriginatorInfo(univ.Sequence):
+ pass
+
+
+OriginatorInfo.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('certs', CertificateSet().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('crls', RevocationInfoChoices().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+
+class ContentType(univ.ObjectIdentifier):
+ pass
+
+
+class EncryptedContent(univ.OctetString):
+ pass
+
+
+class ContentEncryptionAlgorithmIdentifier(rfc5280.AlgorithmIdentifier):
+ pass
+
+
+class EncryptedContentInfo(univ.Sequence):
+ pass
+
+
+EncryptedContentInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('contentType', ContentType()),
+ namedtype.NamedType('contentEncryptionAlgorithm', ContentEncryptionAlgorithmIdentifier()),
+ namedtype.OptionalNamedType('encryptedContent', EncryptedContent().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+)
+
+
+class UnprotectedAttributes(univ.SetOf):
+ pass
+
+
+UnprotectedAttributes.componentType = Attribute()
+UnprotectedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class KeyEncryptionAlgorithmIdentifier(rfc5280.AlgorithmIdentifier):
+ pass
+
+
+class KEKIdentifier(univ.Sequence):
+ pass
+
+
+KEKIdentifier.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('keyIdentifier', univ.OctetString()),
+ namedtype.OptionalNamedType('date', useful.GeneralizedTime()),
+ namedtype.OptionalNamedType('other', OtherKeyAttribute())
+)
+
+
+class KEKRecipientInfo(univ.Sequence):
+ pass
+
+
+KEKRecipientInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.NamedType('kekid', KEKIdentifier()),
+ namedtype.NamedType('keyEncryptionAlgorithm', KeyEncryptionAlgorithmIdentifier()),
+ namedtype.NamedType('encryptedKey', EncryptedKey())
+)
+
+
+class KeyDerivationAlgorithmIdentifier(rfc5280.AlgorithmIdentifier):
+ pass
+
+
+class PasswordRecipientInfo(univ.Sequence):
+ pass
+
+
+PasswordRecipientInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.OptionalNamedType('keyDerivationAlgorithm', KeyDerivationAlgorithmIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('keyEncryptionAlgorithm', KeyEncryptionAlgorithmIdentifier()),
+ namedtype.NamedType('encryptedKey', EncryptedKey())
+)
+
+
+class RecipientIdentifier(univ.Choice):
+ pass
+
+
+RecipientIdentifier.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuerAndSerialNumber', IssuerAndSerialNumber()),
+ namedtype.NamedType('subjectKeyIdentifier', SubjectKeyIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+)
+
+
+class KeyTransRecipientInfo(univ.Sequence):
+ pass
+
+
+KeyTransRecipientInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.NamedType('rid', RecipientIdentifier()),
+ namedtype.NamedType('keyEncryptionAlgorithm', KeyEncryptionAlgorithmIdentifier()),
+ namedtype.NamedType('encryptedKey', EncryptedKey())
+)
+
+
+class UserKeyingMaterial(univ.OctetString):
+ pass
+
+
+class OriginatorPublicKey(univ.Sequence):
+ pass
+
+
+OriginatorPublicKey.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('algorithm', rfc5280.AlgorithmIdentifier()),
+ namedtype.NamedType('publicKey', univ.BitString())
+)
+
+
+class OriginatorIdentifierOrKey(univ.Choice):
+ pass
+
+
+OriginatorIdentifierOrKey.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuerAndSerialNumber', IssuerAndSerialNumber()),
+ namedtype.NamedType('subjectKeyIdentifier', SubjectKeyIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('originatorKey', OriginatorPublicKey().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+)
+
+
+class KeyAgreeRecipientInfo(univ.Sequence):
+ pass
+
+
+KeyAgreeRecipientInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.NamedType('originator', OriginatorIdentifierOrKey().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('ukm', UserKeyingMaterial().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('keyEncryptionAlgorithm', KeyEncryptionAlgorithmIdentifier()),
+ namedtype.NamedType('recipientEncryptedKeys', RecipientEncryptedKeys())
+)
+
+
+class OtherRecipientInfo(univ.Sequence):
+ pass
+
+
+OtherRecipientInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('oriType', univ.ObjectIdentifier()),
+ namedtype.NamedType('oriValue', univ.Any())
+)
+
+
+class RecipientInfo(univ.Choice):
+ pass
+
+
+RecipientInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('ktri', KeyTransRecipientInfo()),
+ namedtype.NamedType('kari', KeyAgreeRecipientInfo().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.NamedType('kekri', KEKRecipientInfo().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
+ namedtype.NamedType('pwri', PasswordRecipientInfo().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+ namedtype.NamedType('ori', OtherRecipientInfo().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4)))
+)
+
+
+class RecipientInfos(univ.SetOf):
+ pass
+
+
+RecipientInfos.componentType = RecipientInfo()
+RecipientInfos.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class EnvelopedData(univ.Sequence):
+ pass
+
+
+EnvelopedData.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.OptionalNamedType('originatorInfo', OriginatorInfo().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('recipientInfos', RecipientInfos()),
+ namedtype.NamedType('encryptedContentInfo', EncryptedContentInfo()),
+ namedtype.OptionalNamedType('unprotectedAttrs', UnprotectedAttributes().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+
+class DigestAlgorithmIdentifier(rfc5280.AlgorithmIdentifier):
+ pass
+
+
+id_ct_contentInfo = _buildOid(1, 2, 840, 113549, 1, 9, 16, 1, 6)
+
+id_digestedData = _buildOid(1, 2, 840, 113549, 1, 7, 5)
+
+
+class EncryptedData(univ.Sequence):
+ pass
+
+
+EncryptedData.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.NamedType('encryptedContentInfo', EncryptedContentInfo()),
+ namedtype.OptionalNamedType('unprotectedAttrs', UnprotectedAttributes().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+id_messageDigest = _buildOid(1, 2, 840, 113549, 1, 9, 4)
+
+id_signedData = _buildOid(1, 2, 840, 113549, 1, 7, 2)
+
+
+class MessageAuthenticationCodeAlgorithm(rfc5280.AlgorithmIdentifier):
+ pass
+
+
+class UnsignedAttributes(univ.SetOf):
+ pass
+
+
+UnsignedAttributes.componentType = Attribute()
+UnsignedAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class SignerIdentifier(univ.Choice):
+ pass
+
+
+SignerIdentifier.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuerAndSerialNumber', IssuerAndSerialNumber()),
+ namedtype.NamedType('subjectKeyIdentifier', SubjectKeyIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+)
+
+
+class SignerInfo(univ.Sequence):
+ pass
+
+
+SignerInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.NamedType('sid', SignerIdentifier()),
+ namedtype.NamedType('digestAlgorithm', DigestAlgorithmIdentifier()),
+ namedtype.OptionalNamedType('signedAttrs', SignedAttributes().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('signatureAlgorithm', SignatureAlgorithmIdentifier()),
+ namedtype.NamedType('signature', SignatureValue()),
+ namedtype.OptionalNamedType('unsignedAttrs', UnsignedAttributes().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+)
+
+
+class SignerInfos(univ.SetOf):
+ pass
+
+
+SignerInfos.componentType = SignerInfo()
+
+
+class Countersignature(SignerInfo):
+ pass
+
+
+class ContentInfo(univ.Sequence):
+ pass
+
+
+ContentInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('contentType', ContentType()),
+ namedtype.NamedType('content', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+)
+
+
+class EncapsulatedContentInfo(univ.Sequence):
+ pass
+
+
+EncapsulatedContentInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('eContentType', ContentType()),
+ namedtype.OptionalNamedType('eContent', univ.OctetString().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+)
+
+id_countersignature = _buildOid(1, 2, 840, 113549, 1, 9, 6)
+
+id_data = _buildOid(1, 2, 840, 113549, 1, 7, 1)
+
+
+class MessageDigest(univ.OctetString):
+ pass
+
+
+class AuthAttributes(univ.SetOf):
+ pass
+
+
+AuthAttributes.componentType = Attribute()
+AuthAttributes.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class Time(univ.Choice):
+ pass
+
+
+Time.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('utcTime', useful.UTCTime()),
+ namedtype.NamedType('generalTime', useful.GeneralizedTime())
+)
+
+
+class AuthenticatedData(univ.Sequence):
+ pass
+
+
+AuthenticatedData.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.OptionalNamedType('originatorInfo', OriginatorInfo().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('recipientInfos', RecipientInfos()),
+ namedtype.NamedType('macAlgorithm', MessageAuthenticationCodeAlgorithm()),
+ namedtype.OptionalNamedType('digestAlgorithm', DigestAlgorithmIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('encapContentInfo', EncapsulatedContentInfo()),
+ namedtype.OptionalNamedType('authAttrs', AuthAttributes().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.NamedType('mac', MessageAuthenticationCode()),
+ namedtype.OptionalNamedType('unauthAttrs', UnauthAttributes().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+)
+
+id_contentType = _buildOid(1, 2, 840, 113549, 1, 9, 3)
+
+
+class ExtendedCertificateOrCertificate(univ.Choice):
+ pass
+
+
+ExtendedCertificateOrCertificate.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certificate', rfc5280.Certificate()),
+ namedtype.NamedType('extendedCertificate', ExtendedCertificate().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+)
+
+
+class Digest(univ.OctetString):
+ pass
+
+
+class DigestedData(univ.Sequence):
+ pass
+
+
+DigestedData.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.NamedType('digestAlgorithm', DigestAlgorithmIdentifier()),
+ namedtype.NamedType('encapContentInfo', EncapsulatedContentInfo()),
+ namedtype.NamedType('digest', Digest())
+)
+
+id_envelopedData = _buildOid(1, 2, 840, 113549, 1, 7, 3)
+
+
+class DigestAlgorithmIdentifiers(univ.SetOf):
+ pass
+
+
+DigestAlgorithmIdentifiers.componentType = DigestAlgorithmIdentifier()
+
+
+class SignedData(univ.Sequence):
+ pass
+
+
+SignedData.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', CMSVersion()),
+ namedtype.NamedType('digestAlgorithms', DigestAlgorithmIdentifiers()),
+ namedtype.NamedType('encapContentInfo', EncapsulatedContentInfo()),
+ namedtype.OptionalNamedType('certificates', CertificateSet().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('crls', RevocationInfoChoices().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('signerInfos', SignerInfos())
+)
+
+id_signingTime = _buildOid(1, 2, 840, 113549, 1, 9, 5)
+
+
+class SigningTime(Time):
+ pass
+
+
+id_ct_authData = _buildOid(1, 2, 840, 113549, 1, 9, 16, 1, 2)
diff --git a/third_party/python/pyasn1-modules/pyasn1_modules/rfc6402.py b/third_party/python/pyasn1-modules/pyasn1_modules/rfc6402.py
new file mode 100644
index 0000000000..c35f855f00
--- /dev/null
+++ b/third_party/python/pyasn1-modules/pyasn1_modules/rfc6402.py
@@ -0,0 +1,561 @@
+# coding: utf-8
+#
+# This file is part of pyasn1-modules software.
+#
+# Created by Stanisław Pitucha with asn1ate tool.
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Certificate Management over CMS (CMC) Updates
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc6402.txt
+#
+from pyasn1.type import univ, char, namedtype, namedval, tag, constraint, useful
+
+from pyasn1_modules import rfc4211
+from pyasn1_modules import rfc5280
+from pyasn1_modules import rfc5652
+
+MAX = float('inf')
+
+
+def _buildOid(*components):
+ output = []
+ for x in tuple(components):
+ if isinstance(x, univ.ObjectIdentifier):
+ output.extend(list(x))
+ else:
+ output.append(int(x))
+
+ return univ.ObjectIdentifier(output)
+
+
+class ChangeSubjectName(univ.Sequence):
+ pass
+
+
+ChangeSubjectName.componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('subject', rfc5280.Name()),
+ namedtype.OptionalNamedType('subjectAlt', rfc5280.GeneralNames())
+)
+
+
+class AttributeValue(univ.Any):
+ pass
+
+
+class CMCStatus(univ.Integer):
+ pass
+
+
+CMCStatus.namedValues = namedval.NamedValues(
+ ('success', 0),
+ ('failed', 2),
+ ('pending', 3),
+ ('noSupport', 4),
+ ('confirmRequired', 5),
+ ('popRequired', 6),
+ ('partial', 7)
+)
+
+
+class PendInfo(univ.Sequence):
+ pass
+
+
+PendInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('pendToken', univ.OctetString()),
+ namedtype.NamedType('pendTime', useful.GeneralizedTime())
+)
+
+bodyIdMax = univ.Integer(4294967295)
+
+
+class BodyPartID(univ.Integer):
+ pass
+
+
+BodyPartID.subtypeSpec = constraint.ValueRangeConstraint(0, bodyIdMax)
+
+
+class BodyPartPath(univ.SequenceOf):
+ pass
+
+
+BodyPartPath.componentType = BodyPartID()
+BodyPartPath.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+
+class BodyPartReference(univ.Choice):
+ pass
+
+
+BodyPartReference.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('bodyPartID', BodyPartID()),
+ namedtype.NamedType('bodyPartPath', BodyPartPath())
+)
+
+
+class CMCFailInfo(univ.Integer):
+ pass
+
+
+CMCFailInfo.namedValues = namedval.NamedValues(
+ ('badAlg', 0),
+ ('badMessageCheck', 1),
+ ('badRequest', 2),
+ ('badTime', 3),
+ ('badCertId', 4),
+ ('unsupportedExt', 5),
+ ('mustArchiveKeys', 6),
+ ('badIdentity', 7),
+ ('popRequired', 8),
+ ('popFailed', 9),
+ ('noKeyReuse', 10),
+ ('internalCAError', 11),
+ ('tryLater', 12),
+ ('authDataFail', 13)
+)
+
+
+class CMCStatusInfoV2(univ.Sequence):
+ pass
+
+
+CMCStatusInfoV2.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('cMCStatus', CMCStatus()),
+ namedtype.NamedType('bodyList', univ.SequenceOf(componentType=BodyPartReference())),
+ namedtype.OptionalNamedType('statusString', char.UTF8String()),
+ namedtype.OptionalNamedType(
+ 'otherInfo', univ.Choice(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType('failInfo', CMCFailInfo()),
+ namedtype.NamedType('pendInfo', PendInfo()),
+ namedtype.NamedType(
+ 'extendedFailInfo', univ.Sequence(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType('failInfoOID', univ.ObjectIdentifier()),
+ namedtype.NamedType('failInfoValue', AttributeValue()))
+ )
+ )
+ )
+ )
+ )
+)
+
+
+class GetCRL(univ.Sequence):
+ pass
+
+
+GetCRL.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuerName', rfc5280.Name()),
+ namedtype.OptionalNamedType('cRLName', rfc5280.GeneralName()),
+ namedtype.OptionalNamedType('time', useful.GeneralizedTime()),
+ namedtype.OptionalNamedType('reasons', rfc5280.ReasonFlags())
+)
+
+id_pkix = _buildOid(1, 3, 6, 1, 5, 5, 7)
+
+id_cmc = _buildOid(id_pkix, 7)
+
+id_cmc_batchResponses = _buildOid(id_cmc, 29)
+
+id_cmc_popLinkWitness = _buildOid(id_cmc, 23)
+
+
+class PopLinkWitnessV2(univ.Sequence):
+ pass
+
+
+PopLinkWitnessV2.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('keyGenAlgorithm', rfc5280.AlgorithmIdentifier()),
+ namedtype.NamedType('macAlgorithm', rfc5280.AlgorithmIdentifier()),
+ namedtype.NamedType('witness', univ.OctetString())
+)
+
+id_cmc_popLinkWitnessV2 = _buildOid(id_cmc, 33)
+
+id_cmc_identityProofV2 = _buildOid(id_cmc, 34)
+
+id_cmc_revokeRequest = _buildOid(id_cmc, 17)
+
+id_cmc_recipientNonce = _buildOid(id_cmc, 7)
+
+
+class ControlsProcessed(univ.Sequence):
+ pass
+
+
+ControlsProcessed.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('bodyList', univ.SequenceOf(componentType=BodyPartReference()))
+)
+
+
+class CertificationRequest(univ.Sequence):
+ pass
+
+
+CertificationRequest.componentType = namedtype.NamedTypes(
+ namedtype.NamedType(
+ 'certificationRequestInfo', univ.Sequence(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType('version', univ.Integer()),
+ namedtype.NamedType('subject', rfc5280.Name()),
+ namedtype.NamedType(
+ 'subjectPublicKeyInfo', univ.Sequence(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType('algorithm', rfc5280.AlgorithmIdentifier()),
+ namedtype.NamedType('subjectPublicKey', univ.BitString())
+ )
+ )
+ ),
+ namedtype.NamedType(
+ 'attributes', univ.SetOf(
+ componentType=rfc5652.Attribute()).subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))
+ )
+ )
+ )
+ ),
+ namedtype.NamedType('signatureAlgorithm', rfc5280.AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString())
+)
+
+
+class TaggedCertificationRequest(univ.Sequence):
+ pass
+
+
+TaggedCertificationRequest.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('bodyPartID', BodyPartID()),
+ namedtype.NamedType('certificationRequest', CertificationRequest())
+)
+
+
+class TaggedRequest(univ.Choice):
+ pass
+
+
+TaggedRequest.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('tcr', TaggedCertificationRequest().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('crm',
+ rfc4211.CertReqMsg().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('orm', univ.Sequence(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('bodyPartID', BodyPartID()),
+ namedtype.NamedType('requestMessageType', univ.ObjectIdentifier()),
+ namedtype.NamedType('requestMessageValue', univ.Any())
+ ))
+ .subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2)))
+)
+
+id_cmc_popLinkRandom = _buildOid(id_cmc, 22)
+
+id_cmc_statusInfo = _buildOid(id_cmc, 1)
+
+id_cmc_trustedAnchors = _buildOid(id_cmc, 26)
+
+id_cmc_transactionId = _buildOid(id_cmc, 5)
+
+id_cmc_encryptedPOP = _buildOid(id_cmc, 9)
+
+
+class PublishTrustAnchors(univ.Sequence):
+ pass
+
+
+PublishTrustAnchors.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('seqNumber', univ.Integer()),
+ namedtype.NamedType('hashAlgorithm', rfc5280.AlgorithmIdentifier()),
+ namedtype.NamedType('anchorHashes', univ.SequenceOf(componentType=univ.OctetString()))
+)
+
+
+class RevokeRequest(univ.Sequence):
+ pass
+
+
+RevokeRequest.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuerName', rfc5280.Name()),
+ namedtype.NamedType('serialNumber', univ.Integer()),
+ namedtype.NamedType('reason', rfc5280.CRLReason()),
+ namedtype.OptionalNamedType('invalidityDate', useful.GeneralizedTime()),
+ namedtype.OptionalNamedType('passphrase', univ.OctetString()),
+ namedtype.OptionalNamedType('comment', char.UTF8String())
+)
+
+id_cmc_senderNonce = _buildOid(id_cmc, 6)
+
+id_cmc_authData = _buildOid(id_cmc, 27)
+
+
+class TaggedContentInfo(univ.Sequence):
+ pass
+
+
+TaggedContentInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('bodyPartID', BodyPartID()),
+ namedtype.NamedType('contentInfo', rfc5652.ContentInfo())
+)
+
+
+class IdentifyProofV2(univ.Sequence):
+ pass
+
+
+IdentifyProofV2.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('proofAlgID', rfc5280.AlgorithmIdentifier()),
+ namedtype.NamedType('macAlgId', rfc5280.AlgorithmIdentifier()),
+ namedtype.NamedType('witness', univ.OctetString())
+)
+
+
+class CMCPublicationInfo(univ.Sequence):
+ pass
+
+
+CMCPublicationInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('hashAlg', rfc5280.AlgorithmIdentifier()),
+ namedtype.NamedType('certHashes', univ.SequenceOf(componentType=univ.OctetString())),
+ namedtype.NamedType('pubInfo', rfc4211.PKIPublicationInfo())
+)
+
+id_kp_cmcCA = _buildOid(rfc5280.id_kp, 27)
+
+id_cmc_confirmCertAcceptance = _buildOid(id_cmc, 24)
+
+id_cmc_raIdentityWitness = _buildOid(id_cmc, 35)
+
+id_ExtensionReq = _buildOid(1, 2, 840, 113549, 1, 9, 14)
+
+id_cct = _buildOid(id_pkix, 12)
+
+id_cct_PKIData = _buildOid(id_cct, 2)
+
+id_kp_cmcRA = _buildOid(rfc5280.id_kp, 28)
+
+
+class CMCStatusInfo(univ.Sequence):
+ pass
+
+
+CMCStatusInfo.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('cMCStatus', CMCStatus()),
+ namedtype.NamedType('bodyList', univ.SequenceOf(componentType=BodyPartID())),
+ namedtype.OptionalNamedType('statusString', char.UTF8String()),
+ namedtype.OptionalNamedType(
+ 'otherInfo', univ.Choice(
+ componentType=namedtype.NamedTypes(
+ namedtype.NamedType('failInfo', CMCFailInfo()),
+ namedtype.NamedType('pendInfo', PendInfo())
+ )
+ )
+ )
+)
+
+
+class DecryptedPOP(univ.Sequence):
+ pass
+
+
+DecryptedPOP.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('bodyPartID', BodyPartID()),
+ namedtype.NamedType('thePOPAlgID', rfc5280.AlgorithmIdentifier()),
+ namedtype.NamedType('thePOP', univ.OctetString())
+)
+
+id_cmc_addExtensions = _buildOid(id_cmc, 8)
+
+id_cmc_modCertTemplate = _buildOid(id_cmc, 31)
+
+
+class TaggedAttribute(univ.Sequence):
+ pass
+
+
+TaggedAttribute.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('bodyPartID', BodyPartID()),
+ namedtype.NamedType('attrType', univ.ObjectIdentifier()),
+ namedtype.NamedType('attrValues', univ.SetOf(componentType=AttributeValue()))
+)
+
+
+class OtherMsg(univ.Sequence):
+ pass
+
+
+OtherMsg.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('bodyPartID', BodyPartID()),
+ namedtype.NamedType('otherMsgType', univ.ObjectIdentifier()),
+ namedtype.NamedType('otherMsgValue', univ.Any())
+)
+
+
+class PKIData(univ.Sequence):
+ pass
+
+
+PKIData.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('controlSequence', univ.SequenceOf(componentType=TaggedAttribute())),
+ namedtype.NamedType('reqSequence', univ.SequenceOf(componentType=TaggedRequest())),
+ namedtype.NamedType('cmsSequence', univ.SequenceOf(componentType=TaggedContentInfo())),
+ namedtype.NamedType('otherMsgSequence', univ.SequenceOf(componentType=OtherMsg()))
+)
+
+
+class BodyPartList(univ.SequenceOf):
+ pass
+
+
+BodyPartList.componentType = BodyPartID()
+BodyPartList.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+id_cmc_responseBody = _buildOid(id_cmc, 37)
+
+
+class AuthPublish(BodyPartID):
+ pass
+
+
+class CMCUnsignedData(univ.Sequence):
+ pass
+
+
+CMCUnsignedData.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('bodyPartPath', BodyPartPath()),
+ namedtype.NamedType('identifier', univ.ObjectIdentifier()),
+ namedtype.NamedType('content', univ.Any())
+)
+
+
+class CMCCertId(rfc5652.IssuerAndSerialNumber):
+ pass
+
+
+class PKIResponse(univ.Sequence):
+ pass
+
+
+PKIResponse.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('controlSequence', univ.SequenceOf(componentType=TaggedAttribute())),
+ namedtype.NamedType('cmsSequence', univ.SequenceOf(componentType=TaggedContentInfo())),
+ namedtype.NamedType('otherMsgSequence', univ.SequenceOf(componentType=OtherMsg()))
+)
+
+
+class ResponseBody(PKIResponse):
+ pass
+
+
+id_cmc_statusInfoV2 = _buildOid(id_cmc, 25)
+
+id_cmc_lraPOPWitness = _buildOid(id_cmc, 11)
+
+
+class ModCertTemplate(univ.Sequence):
+ pass
+
+
+ModCertTemplate.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('pkiDataReference', BodyPartPath()),
+ namedtype.NamedType('certReferences', BodyPartList()),
+ namedtype.DefaultedNamedType('replace', univ.Boolean().subtype(value=1)),
+ namedtype.NamedType('certTemplate', rfc4211.CertTemplate())
+)
+
+id_cmc_regInfo = _buildOid(id_cmc, 18)
+
+id_cmc_identityProof = _buildOid(id_cmc, 3)
+
+
+class ExtensionReq(univ.SequenceOf):
+ pass
+
+
+ExtensionReq.componentType = rfc5280.Extension()
+ExtensionReq.subtypeSpec = constraint.ValueSizeConstraint(1, MAX)
+
+id_kp_cmcArchive = _buildOid(rfc5280.id_kp, 28)
+
+id_cmc_publishCert = _buildOid(id_cmc, 30)
+
+id_cmc_dataReturn = _buildOid(id_cmc, 4)
+
+
+class LraPopWitness(univ.Sequence):
+ pass
+
+
+LraPopWitness.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('pkiDataBodyid', BodyPartID()),
+ namedtype.NamedType('bodyIds', univ.SequenceOf(componentType=BodyPartID()))
+)
+
+id_aa = _buildOid(1, 2, 840, 113549, 1, 9, 16, 2)
+
+id_aa_cmc_unsignedData = _buildOid(id_aa, 34)
+
+id_cmc_getCert = _buildOid(id_cmc, 15)
+
+id_cmc_batchRequests = _buildOid(id_cmc, 28)
+
+id_cmc_decryptedPOP = _buildOid(id_cmc, 10)
+
+id_cmc_responseInfo = _buildOid(id_cmc, 19)
+
+id_cmc_changeSubjectName = _buildOid(id_cmc, 36)
+
+
+class GetCert(univ.Sequence):
+ pass
+
+
+GetCert.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuerName', rfc5280.GeneralName()),
+ namedtype.NamedType('serialNumber', univ.Integer())
+)
+
+id_cmc_identification = _buildOid(id_cmc, 2)
+
+id_cmc_queryPending = _buildOid(id_cmc, 21)
+
+
+class AddExtensions(univ.Sequence):
+ pass
+
+
+AddExtensions.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('pkiDataReference', BodyPartID()),
+ namedtype.NamedType('certReferences', univ.SequenceOf(componentType=BodyPartID())),
+ namedtype.NamedType('extensions', univ.SequenceOf(componentType=rfc5280.Extension()))
+)
+
+
+class EncryptedPOP(univ.Sequence):
+ pass
+
+
+EncryptedPOP.componentType = namedtype.NamedTypes(
+ namedtype.NamedType('request', TaggedRequest()),
+ namedtype.NamedType('cms', rfc5652.ContentInfo()),
+ namedtype.NamedType('thePOPAlgID', rfc5280.AlgorithmIdentifier()),
+ namedtype.NamedType('witnessAlgID', rfc5280.AlgorithmIdentifier()),
+ namedtype.NamedType('witness', univ.OctetString())
+)
+
+id_cmc_getCRL = _buildOid(id_cmc, 16)
+
+id_cct_PKIResponse = _buildOid(id_cct, 3)
+
+id_cmc_controlProcessed = _buildOid(id_cmc, 32)
+
+
+class NoSignatureValue(univ.OctetString):
+ pass
+
+
+id_ad_cmc = _buildOid(rfc5280.id_ad, 12)
+
+id_alg_noSignature = _buildOid(id_pkix, 6, 2)
diff --git a/third_party/python/pyasn1-modules/requirements.txt b/third_party/python/pyasn1-modules/requirements.txt
new file mode 100644
index 0000000000..01d237c82f
--- /dev/null
+++ b/third_party/python/pyasn1-modules/requirements.txt
@@ -0,0 +1 @@
+pyasn1>=0.3.4,<0.4.0
diff --git a/third_party/python/pyasn1-modules/setup.cfg b/third_party/python/pyasn1-modules/setup.cfg
new file mode 100644
index 0000000000..6f08d0e3e7
--- /dev/null
+++ b/third_party/python/pyasn1-modules/setup.cfg
@@ -0,0 +1,8 @@
+[bdist_wheel]
+universal = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/third_party/python/pyasn1-modules/setup.py b/third_party/python/pyasn1-modules/setup.py
new file mode 100644
index 0000000000..4d1a6d3d34
--- /dev/null
+++ b/third_party/python/pyasn1-modules/setup.py
@@ -0,0 +1,135 @@
+#!/usr/bin/env python
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+import sys
+
+doclines = """A collection of ASN.1-based protocols modules.
+
+ A collection of ASN.1 modules expressed in form of pyasn1 classes.
+ Includes protocols PDUs definition (SNMP, LDAP etc.) and various
+ data structures (X.509, PKCS etc.).
+"""
+
+doclines = [x.strip() for x in doclines.split('\n') if x]
+
+
+classifiers = """\
+Development Status :: 5 - Production/Stable
+Environment :: Console
+Intended Audience :: Developers
+Intended Audience :: Education
+Intended Audience :: Information Technology
+Intended Audience :: System Administrators
+Intended Audience :: Telecommunications Industry
+License :: OSI Approved :: BSD License
+Natural Language :: English
+Operating System :: OS Independent
+Programming Language :: Python :: 2
+Programming Language :: Python :: 2.4
+Programming Language :: Python :: 2.5
+Programming Language :: Python :: 2.6
+Programming Language :: Python :: 2.7
+Programming Language :: Python :: 3
+Programming Language :: Python :: 3.2
+Programming Language :: Python :: 3.3
+Programming Language :: Python :: 3.4
+Programming Language :: Python :: 3.5
+Programming Language :: Python :: 3.6
+Topic :: Communications
+Topic :: System :: Monitoring
+Topic :: System :: Networking :: Monitoring
+Topic :: Software Development :: Libraries :: Python Modules
+"""
+
+
+def howto_install_setuptools():
+ print("""
+ Error: You need setuptools Python package!
+
+ It's very easy to install it, just type (as root on Linux):
+
+ wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py
+ python ez_setup.py
+
+ Then you could make eggs from this package.
+""")
+
+
+if sys.version_info[:2] < (2, 4):
+ print("ERROR: this package requires Python 2.4 or later!")
+ sys.exit(1)
+
+try:
+ from setuptools import setup, Command
+
+ params = {
+ 'zip_safe': True,
+ 'install_requires': ['pyasn1>=0.3.4,<0.4.0']
+ }
+
+except ImportError:
+ for arg in sys.argv:
+ if 'egg' in arg:
+ howto_install_setuptools()
+ sys.exit(1)
+
+ from distutils.core import setup, Command
+
+ if sys.version_info[:2] > (2, 4):
+ params = {
+ 'requires': ['pyasn1(>=0.3.4,<0.4.0)']
+ }
+ else:
+ params = {
+ 'requires': ['pyasn1']
+ }
+
+params.update(
+ {'name': 'pyasn1-modules',
+ 'version': open('pyasn1_modules/__init__.py').read().split('\'')[1],
+ 'description': doclines[0],
+ 'long_description': ' '.join(doclines[1:]),
+ 'maintainer': 'Ilya Etingof <etingof@gmail.com>',
+ 'author': 'Ilya Etingof',
+ 'author_email': 'etingof@gmail.com',
+ 'url': 'https://github.com/etingof/pyasn1-modules',
+ 'platforms': ['any'],
+ 'classifiers': [x for x in classifiers.split('\n') if x],
+ 'license': 'BSD',
+ 'packages': ['pyasn1_modules']}
+)
+
+
+# handle unittest discovery feature
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+
+class PyTest(Command):
+ user_options = []
+
+ def initialize_options(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ suite = unittest.TestLoader().loadTestsFromNames(
+ ['tests.__main__.suite']
+ )
+
+ unittest.TextTestRunner(verbosity=2).run(suite)
+
+params['cmdclass'] = {
+ 'test': PyTest,
+ 'tests': PyTest
+}
+
+setup(**params)
diff --git a/third_party/python/pyasn1-modules/tests/__init__.py b/third_party/python/pyasn1-modules/tests/__init__.py
new file mode 100644
index 0000000000..8c3066b2e6
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tests/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/third_party/python/pyasn1-modules/tests/__main__.py b/third_party/python/pyasn1-modules/tests/__main__.py
new file mode 100644
index 0000000000..c6377a6273
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tests/__main__.py
@@ -0,0 +1,28 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+try:
+ import unittest2 as unittest
+
+except ImportError:
+ import unittest
+
+suite = unittest.TestLoader().loadTestsFromNames(
+ ['tests.test_rfc2314.suite',
+ 'tests.test_rfc2315.suite',
+ 'tests.test_rfc2437.suite',
+ 'tests.test_rfc2459.suite',
+ 'tests.test_rfc2511.suite',
+ 'tests.test_rfc2560.suite',
+ 'tests.test_rfc4210.suite',
+ 'tests.test_rfc5208.suite',
+ 'tests.test_rfc5280.suite',
+ 'tests.test_rfc5652.suite',]
+)
+
+
+if __name__ == '__main__':
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/third_party/python/pyasn1-modules/tests/test_rfc2314.py b/third_party/python/pyasn1-modules/tests/test_rfc2314.py
new file mode 100644
index 0000000000..6dd5c47f33
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tests/test_rfc2314.py
@@ -0,0 +1,57 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+import sys
+from pyasn1.codec.der import decoder as der_decoder
+from pyasn1.codec.der import encoder as der_encoder
+
+from pyasn1_modules import rfc2314, pem
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+
+class CertificationRequestTestCase(unittest.TestCase):
+ pem_text = """\
+MIIDATCCAekCAQAwgZkxCzAJBgNVBAYTAlJVMRYwFAYDVQQIEw1Nb3Njb3cgUmVn
+aW9uMQ8wDQYDVQQHEwZNb3Njb3cxGjAYBgNVBAoTEVNOTVAgTGFib3JhdG9yaWVz
+MQwwCgYDVQQLFANSJkQxFTATBgNVBAMTDHNubXBsYWJzLmNvbTEgMB4GCSqGSIb3
+DQEJARYRaW5mb0Bzbm1wbGFicy5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
+ggEKAoIBAQC9n2NfGS98JDBmAXQn+vNUyPB3QPYC1cwpX8UMYh9MdAmBZJCnvXrQ
+Pp14gNAv6AQKxefmGES1b+Yd+1we9HB8AKm1/8xvRDUjAvy4iO0sqFCPvIfSujUy
+pBcfnR7QE2itvyrMxCDSEVnMhKdCNb23L2TptUmpvLcb8wfAMLFsSu2yaOtJysep
+oH/mvGqlRv2ti2+E2YA0M7Pf83wyV1XmuEsc9tQ225rprDk2uyshUglkDD2235rf
+0QyONq3Aw3BMrO9ss1qj7vdDhVHVsxHnTVbEgrxEWkq2GkVKh9QReMZ2AKxe40j4
+og+OjKXguOCggCZHJyXKxccwqCaeCztbAgMBAAGgIjAgBgkqhkiG9w0BCQIxExMR
+U05NUCBMYWJvcmF0b3JpZXMwDQYJKoZIhvcNAQEFBQADggEBAAihbwmN9M2bsNNm
+9KfxqiGMqqcGCtzIlpDz/2NVwY93cEZsbz3Qscc0QpknRmyTSoDwIG+1nUH0vzkT
+Nv8sBmp9I1GdhGg52DIaWwL4t9O5WUHgfHSJpPxZ/zMP2qIsdPJ+8o19BbXRlufc
+73c03H1piGeb9VcePIaulSHI622xukI6f4Sis49vkDaoi+jadbEEb6TYkJQ3AMRD
+WdApGGm0BePdLqboW1Yv70WRRFFD8sxeT7Yw4qrJojdnq0xMHPGfKpf6dJsqWkHk
+b5DRbjil1Zt9pJuF680S9wtBzSi0hsMHXR9TzS7HpMjykL2nmCVY6A78MZapsCzn
+GGbx7DI=
+"""
+
+ def setUp(self):
+ self.asn1Spec = rfc2314.CertificationRequest()
+
+ def testDerCodec(self):
+
+ substrate = pem.readBase64fromText(self.pem_text)
+
+ asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec)
+
+ assert not rest
+ assert asn1Object.prettyPrint()
+ assert der_encoder.encode(asn1Object) == substrate
+
+
+suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
+
+if __name__ == '__main__':
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/third_party/python/pyasn1-modules/tests/test_rfc2315.py b/third_party/python/pyasn1-modules/tests/test_rfc2315.py
new file mode 100644
index 0000000000..b451ed1096
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tests/test_rfc2315.py
@@ -0,0 +1,179 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+import sys
+from pyasn1.codec.der import decoder as der_decoder
+from pyasn1.codec.der import encoder as der_encoder
+
+from pyasn1_modules import rfc2315, pem
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+
+class Pkcs7TestCase(unittest.TestCase):
+ pem_text_unordered = """\
+MIIKdQYJKoZIhvcNAQcCoIIKZjCCCmICAQExADALBgkqhkiG9w0BBwGgggpIMIIC
+XjCCAcegAwIBAgIBADANBgkqhkiG9w0BAQQFADB1MQswCQYDVQQGEwJSVTEPMA0G
+A1UEBxMGTW9zY293MRcwFQYDVQQKEw5Tb3ZhbSBUZWxlcG9ydDEMMAoGA1UECxMD
+TklTMQ8wDQYDVQQDEwZBQlMgQ0ExHTAbBgkqhkiG9w0BCQEWDmNlcnRAb25saW5l
+LnJ1MB4XDTk5MDgxNTE5MDI1OFoXDTAwMDExMjE5MDI1OFowdTELMAkGA1UEBhMC
+UlUxDzANBgNVBAcTBk1vc2NvdzEXMBUGA1UEChMOU292YW0gVGVsZXBvcnQxDDAK
+BgNVBAsTA05JUzEPMA0GA1UEAxMGQUJTIENBMR0wGwYJKoZIhvcNAQkBFg5jZXJ0
+QG9ubGluZS5ydTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAw0g1P0yQAZIi
+ml2XOCOxnCcuhHmAgj4Ei9M2ebrrGwUMONPzr1a8W7JcpnR3FeOjxEIxrzkHr6UA
+oj4l/oC7Rv28uIig+Okf+82ekhH6VgAQNr5LAzfN8J6dZLx2OXAmmLleAqHuisT7
+I40vEFRoRmC5hiMlILE2rIlIKJn6cUkCAwEAATANBgkqhkiG9w0BAQQFAAOBgQBZ
+7ELDfGUNb+fbpHl5W3d9JMXsdOgd96+HG+X1SPgeiRAMjkla8WFCSaQPIR4vCy0m
+tm5a2bWSji6+vP5FGbjOz5iMlHMrCtu0He7Eim2zpaGI06ZIY75Cn1h2r3+KS0/R
+h01TJUbmsfV1tZm6Wk3bayJ+/K8A4mBHv8P6rhYacDCCAowwggH1oAMCAQICAQAw
+DQYJKoZIhvcNAQEEBQAwgYsxCzAJBgNVBAYTAlJVMQ8wDQYDVQQHEwZNb3Njb3cx
+FzAVBgNVBAoTDkdvbGRlbiBUZWxlY29tMQwwCgYDVQQLEwNST0wxHjAcBgNVBAMT
+FUdvbGRlbiBUZWxlY29tIEFCUyBDQTEkMCIGCSqGSIb3DQEJARYVY2VydEBnb2xk
+ZW50ZWxlY29tLnJ1MB4XDTAwMDEwNTE1MDY1MVoXDTEwMDExNTE1MDY1MVowgYsx
+CzAJBgNVBAYTAlJVMQ8wDQYDVQQHEwZNb3Njb3cxFzAVBgNVBAoTDkdvbGRlbiBU
+ZWxlY29tMQwwCgYDVQQLEwNST0wxHjAcBgNVBAMTFUdvbGRlbiBUZWxlY29tIEFC
+UyBDQTEkMCIGCSqGSIb3DQEJARYVY2VydEBnb2xkZW50ZWxlY29tLnJ1MIGfMA0G
+CSqGSIb3DQEBAQUAA4GNADCBiQKBgQDPFel/Svli6ogoUEb6eLtEvNSjyalETSMP
+MIZXdmWIkWijvEUhDnNJVAE3knAt6dVYqxWq0vc6CbAGFZNqEyioGU48IECLzV0G
+toiYejF/c9PuyIKDejeV9/YZnNFaZAUOXhOjREdZURLISKhX4tAbQyvK0Qka9AAR
+MEy9DoqV8QIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAHQzgqFkoSMQr077UCr5C0l1
+rxLA17TrocCmUC1/PLmN0LmUHD0d7TjjTQKJaJBHxcKIg6+FOY6LSSY4nAN79eXi
+nBz+jEUG7+NTU/jcEArI35yP7fi4Mwb96EYDmUkUGtcLNq3JBe/d1Zhmy9HnNBL1
+Dn9thM2Q8RPYAJIU3JnGMIICqTCCAhICAQAwDQYJKoZIhvcNAQEEBQAwgZwxCzAJ
+BgNVBAYTAlJVMQ8wDQYDVQQIEwZNb3Njb3cxDzANBgNVBAcTBk1vc2NvdzEXMBUG
+A1UEChMOR29sZGVuIFRlbGVjb20xDDAKBgNVBAsTA1JPTDEeMBwGA1UEAxMVR29s
+ZGVuIFRlbGVjb20gQUJTIENBMSQwIgYJKoZIhvcNAQkBFhVjZXJ0QGdvbGRlbnRl
+bGVjb20ucnUwHhcNMTAwMTE1MTU0MDI2WhcNMjAwMjIyMTU0MDI2WjCBnDELMAkG
+A1UEBhMCUlUxDzANBgNVBAgTBk1vc2NvdzEPMA0GA1UEBxMGTW9zY293MRcwFQYD
+VQQKEw5Hb2xkZW4gVGVsZWNvbTEMMAoGA1UECxMDUk9MMR4wHAYDVQQDExVHb2xk
+ZW4gVGVsZWNvbSBBQlMgQ0ExJDAiBgkqhkiG9w0BCQEWFWNlcnRAZ29sZGVudGVs
+ZWNvbS5ydTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAzxXpf0r5YuqIKFBG
++ni7RLzUo8mpRE0jDzCGV3ZliJFoo7xFIQ5zSVQBN5JwLenVWKsVqtL3OgmwBhWT
+ahMoqBlOPCBAi81dBraImHoxf3PT7siCg3o3lff2GZzRWmQFDl4To0RHWVESyEio
+V+LQG0MrytEJGvQAETBMvQ6KlfECAwEAATANBgkqhkiG9w0BAQQFAAOBgQCMrS4T
+LIzxcpu8nwOq/xMcxW4Ctz/wjIoePWkmSLe+Tkb4zo7aTsvzn+ETaWb7qztUpyl0
+QvlXn4vC2iCJloPpofPqSzF1UV3g5Zb93ReZu7E6kEyW0ag8R5XZKv0xuR3b3Le+
+ZqolT8wQELd5Mmw5JPofZ+O2cGNvet8tYwOKFjCCAqUwggIOoAMCAQICAgboMA0G
+CSqGSIb3DQEBBAUAMIGcMQswCQYDVQQGEwJSVTEPMA0GA1UECBMGTW9zY293MQ8w
+DQYDVQQHEwZNb3Njb3cxFzAVBgNVBAoTDkdvbGRlbiBUZWxlY29tMQwwCgYDVQQL
+EwNST0wxHjAcBgNVBAMTFUdvbGRlbiBUZWxlY29tIEFCUyBDQTEkMCIGCSqGSIb3
+DQEJARYVY2VydEBnb2xkZW50ZWxlY29tLnJ1MB4XDTExMDEyODEyMTcwOVoXDTEy
+MDIwMTAwMDAwMFowdjELMAkGA1UEBhMCUlUxDDAKBgNVBAgTA04vQTEXMBUGA1UE
+ChMOR29sZGVuIFRlbGVjb20xDDAKBgNVBAsTA0lTUDEWMBQGA1UEAxMNY3JheS5n
+bGFzLm5ldDEaMBgGCSqGSIb3DQEJARYLZWxpZUByb2wucnUwgZ8wDQYJKoZIhvcN
+AQEBBQADgY0AMIGJAoGBAPJAm8KG3ZCoJSvoGmLMPlGaMIpadu/EGSEYu+M/ybLp
+Cs8XmwB3876JVKKCbtGI6eqxOqvjedYXb+nKcyhz4Ztmm8RgAD7Z1WUItIpatejT
+79EYOUWrDN713SLZsImMyP4B4EySl4LZfHFRU2iOwLB6WozGCYuULLqYS9MDPrnT
+AgMBAAGjGzAZMBcGCWCGSAGG+EIBDQQKFghDPS07Uz0tOzANBgkqhkiG9w0BAQQF
+AAOBgQDEttS70qYCA+MGBA3hOR88XiBcTmuBarJDwn/rj31vRjYZUgp9bbFwscRI
+Ic4lDnlyvunwNitl+341bDg7u6Ebu9hCMbciyu4EtrsDh77DlLzbmNcXbnhlvbFL
+K9GiPz3dNyvQMfmaA0twd62zJDOVJ1SmO04lLmu/pAx8GhBZkqEAMQA=
+"""
+
+ pem_text_reordered = """\
+MIIKcwYJKoZIhvcNAQcCoIIKZDCCCmACAQExADALBgkqhkiG9w0BBwGgggpIMIIC
+XjCCAcegAwIBAgIBADANBgkqhkiG9w0BAQQFADB1MQswCQYDVQQGEwJSVTEPMA0G
+A1UEBxMGTW9zY293MRcwFQYDVQQKEw5Tb3ZhbSBUZWxlcG9ydDEMMAoGA1UECxMD
+TklTMQ8wDQYDVQQDEwZBQlMgQ0ExHTAbBgkqhkiG9w0BCQEWDmNlcnRAb25saW5l
+LnJ1MB4XDTk5MDgxNTE5MDI1OFoXDTAwMDExMjE5MDI1OFowdTELMAkGA1UEBhMC
+UlUxDzANBgNVBAcTBk1vc2NvdzEXMBUGA1UEChMOU292YW0gVGVsZXBvcnQxDDAK
+BgNVBAsTA05JUzEPMA0GA1UEAxMGQUJTIENBMR0wGwYJKoZIhvcNAQkBFg5jZXJ0
+QG9ubGluZS5ydTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAw0g1P0yQAZIi
+ml2XOCOxnCcuhHmAgj4Ei9M2ebrrGwUMONPzr1a8W7JcpnR3FeOjxEIxrzkHr6UA
+oj4l/oC7Rv28uIig+Okf+82ekhH6VgAQNr5LAzfN8J6dZLx2OXAmmLleAqHuisT7
+I40vEFRoRmC5hiMlILE2rIlIKJn6cUkCAwEAATANBgkqhkiG9w0BAQQFAAOBgQBZ
+7ELDfGUNb+fbpHl5W3d9JMXsdOgd96+HG+X1SPgeiRAMjkla8WFCSaQPIR4vCy0m
+tm5a2bWSji6+vP5FGbjOz5iMlHMrCtu0He7Eim2zpaGI06ZIY75Cn1h2r3+KS0/R
+h01TJUbmsfV1tZm6Wk3bayJ+/K8A4mBHv8P6rhYacDCCAowwggH1oAMCAQICAQAw
+DQYJKoZIhvcNAQEEBQAwgYsxCzAJBgNVBAYTAlJVMQ8wDQYDVQQHEwZNb3Njb3cx
+FzAVBgNVBAoTDkdvbGRlbiBUZWxlY29tMQwwCgYDVQQLEwNST0wxHjAcBgNVBAMT
+FUdvbGRlbiBUZWxlY29tIEFCUyBDQTEkMCIGCSqGSIb3DQEJARYVY2VydEBnb2xk
+ZW50ZWxlY29tLnJ1MB4XDTAwMDEwNTE1MDY1MVoXDTEwMDExNTE1MDY1MVowgYsx
+CzAJBgNVBAYTAlJVMQ8wDQYDVQQHEwZNb3Njb3cxFzAVBgNVBAoTDkdvbGRlbiBU
+ZWxlY29tMQwwCgYDVQQLEwNST0wxHjAcBgNVBAMTFUdvbGRlbiBUZWxlY29tIEFC
+UyBDQTEkMCIGCSqGSIb3DQEJARYVY2VydEBnb2xkZW50ZWxlY29tLnJ1MIGfMA0G
+CSqGSIb3DQEBAQUAA4GNADCBiQKBgQDPFel/Svli6ogoUEb6eLtEvNSjyalETSMP
+MIZXdmWIkWijvEUhDnNJVAE3knAt6dVYqxWq0vc6CbAGFZNqEyioGU48IECLzV0G
+toiYejF/c9PuyIKDejeV9/YZnNFaZAUOXhOjREdZURLISKhX4tAbQyvK0Qka9AAR
+MEy9DoqV8QIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAHQzgqFkoSMQr077UCr5C0l1
+rxLA17TrocCmUC1/PLmN0LmUHD0d7TjjTQKJaJBHxcKIg6+FOY6LSSY4nAN79eXi
+nBz+jEUG7+NTU/jcEArI35yP7fi4Mwb96EYDmUkUGtcLNq3JBe/d1Zhmy9HnNBL1
+Dn9thM2Q8RPYAJIU3JnGMIICpTCCAg6gAwIBAgICBugwDQYJKoZIhvcNAQEEBQAw
+gZwxCzAJBgNVBAYTAlJVMQ8wDQYDVQQIEwZNb3Njb3cxDzANBgNVBAcTBk1vc2Nv
+dzEXMBUGA1UEChMOR29sZGVuIFRlbGVjb20xDDAKBgNVBAsTA1JPTDEeMBwGA1UE
+AxMVR29sZGVuIFRlbGVjb20gQUJTIENBMSQwIgYJKoZIhvcNAQkBFhVjZXJ0QGdv
+bGRlbnRlbGVjb20ucnUwHhcNMTEwMTI4MTIxNzA5WhcNMTIwMjAxMDAwMDAwWjB2
+MQswCQYDVQQGEwJSVTEMMAoGA1UECBMDTi9BMRcwFQYDVQQKEw5Hb2xkZW4gVGVs
+ZWNvbTEMMAoGA1UECxMDSVNQMRYwFAYDVQQDEw1jcmF5LmdsYXMubmV0MRowGAYJ
+KoZIhvcNAQkBFgtlbGllQHJvbC5ydTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkC
+gYEA8kCbwobdkKglK+gaYsw+UZowilp278QZIRi74z/JsukKzxebAHfzvolUooJu
+0Yjp6rE6q+N51hdv6cpzKHPhm2abxGAAPtnVZQi0ilq16NPv0Rg5RasM3vXdItmw
+iYzI/gHgTJKXgtl8cVFTaI7AsHpajMYJi5QsuphL0wM+udMCAwEAAaMbMBkwFwYJ
+YIZIAYb4QgENBAoWCEM9LTtTPS07MA0GCSqGSIb3DQEBBAUAA4GBAMS21LvSpgID
+4wYEDeE5HzxeIFxOa4FqskPCf+uPfW9GNhlSCn1tsXCxxEghziUOeXK+6fA2K2X7
+fjVsODu7oRu72EIxtyLK7gS2uwOHvsOUvNuY1xdueGW9sUsr0aI/Pd03K9Ax+ZoD
+S3B3rbMkM5UnVKY7TiUua7+kDHwaEFmSMIICqTCCAhICAQAwDQYJKoZIhvcNAQEE
+BQAwgZwxCzAJBgNVBAYTAlJVMQ8wDQYDVQQIEwZNb3Njb3cxDzANBgNVBAcTBk1v
+c2NvdzEXMBUGA1UEChMOR29sZGVuIFRlbGVjb20xDDAKBgNVBAsTA1JPTDEeMBwG
+A1UEAxMVR29sZGVuIFRlbGVjb20gQUJTIENBMSQwIgYJKoZIhvcNAQkBFhVjZXJ0
+QGdvbGRlbnRlbGVjb20ucnUwHhcNMTAwMTE1MTU0MDI2WhcNMjAwMjIyMTU0MDI2
+WjCBnDELMAkGA1UEBhMCUlUxDzANBgNVBAgTBk1vc2NvdzEPMA0GA1UEBxMGTW9z
+Y293MRcwFQYDVQQKEw5Hb2xkZW4gVGVsZWNvbTEMMAoGA1UECxMDUk9MMR4wHAYD
+VQQDExVHb2xkZW4gVGVsZWNvbSBBQlMgQ0ExJDAiBgkqhkiG9w0BCQEWFWNlcnRA
+Z29sZGVudGVsZWNvbS5ydTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAzxXp
+f0r5YuqIKFBG+ni7RLzUo8mpRE0jDzCGV3ZliJFoo7xFIQ5zSVQBN5JwLenVWKsV
+qtL3OgmwBhWTahMoqBlOPCBAi81dBraImHoxf3PT7siCg3o3lff2GZzRWmQFDl4T
+o0RHWVESyEioV+LQG0MrytEJGvQAETBMvQ6KlfECAwEAATANBgkqhkiG9w0BAQQF
+AAOBgQCMrS4TLIzxcpu8nwOq/xMcxW4Ctz/wjIoePWkmSLe+Tkb4zo7aTsvzn+ET
+aWb7qztUpyl0QvlXn4vC2iCJloPpofPqSzF1UV3g5Zb93ReZu7E6kEyW0ag8R5XZ
+Kv0xuR3b3Le+ZqolT8wQELd5Mmw5JPofZ+O2cGNvet8tYwOKFjEA
+"""
+
+ def setUp(self):
+ self.asn1Spec = rfc2315.ContentInfo()
+
+ def testDerCodec(self):
+
+ substrate = pem.readBase64fromText(self.pem_text_unordered)
+
+ asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec)
+
+ assert not rest
+ assert asn1Object.prettyPrint()
+ assert der_encoder.encode(asn1Object) == substrate
+
+ contentType = asn1Object['contentType']
+ substrate = asn1Object['content']
+
+ contentInfoMap = {
+ (1, 2, 840, 113549, 1, 7, 1): rfc2315.Data(),
+ (1, 2, 840, 113549, 1, 7, 2): rfc2315.SignedData(),
+ (1, 2, 840, 113549, 1, 7, 3): rfc2315.EnvelopedData(),
+ (1, 2, 840, 113549, 1, 7, 4): rfc2315.SignedAndEnvelopedData(),
+ (1, 2, 840, 113549, 1, 7, 5): rfc2315.DigestedData(),
+ (1, 2, 840, 113549, 1, 7, 6): rfc2315.EncryptedData()
+ }
+
+ innerAsn1Object, rest = der_decoder.decode(
+ substrate, asn1Spec=contentInfoMap[contentType]
+ )
+
+ asn1Object['content'] = der_encoder.encode(innerAsn1Object)
+
+ substrate = pem.readBase64fromText(self.pem_text_reordered)
+
+ assert not rest
+ assert asn1Object.prettyPrint()
+ assert der_encoder.encode(asn1Object) == substrate
+
+
+suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
+
+if __name__ == '__main__':
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/third_party/python/pyasn1-modules/tests/test_rfc2437.py b/third_party/python/pyasn1-modules/tests/test_rfc2437.py
new file mode 100644
index 0000000000..8d3539aa13
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tests/test_rfc2437.py
@@ -0,0 +1,47 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+import sys
+from pyasn1.codec.der import decoder as der_decoder
+from pyasn1.codec.der import encoder as der_encoder
+
+from pyasn1_modules import rfc2437, pem
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+
+class RSAPrivateKeyTestCase(unittest.TestCase):
+ pem_text = """\
+MIIBPAIBAAJBAMfAjvBNDDYBCl1w3yNcagZkPhqd0q5KqeOTgKSLuJWfe5+VSeR5
+Y1PcF3DyH8dvS3t8PIQjxJLoKS7HVRlsfhECAwEAAQJBAIr93/gxhIenXbD7MykF
+yvi7k8MtgkWoymICZwcX+c6RudFyuPPfQJ/sf6RmFZlRA9X9CQm5NwVG7+x1Yi6t
+KoECIQDmJUCWkPCiQYow6YxetpXFa0K6hTzOPmax7MNHVWNgmQIhAN4xOZ4JFT34
+xVhK+8EudBCYRomJUHmOJfoQAxiIXVw5AiEAyB7ecc5on/5zhqKef4Eu7LKfHIdc
+304diFuDVpTmTAkCIC2ZmKOQZaWkSowGR4isCfHl7oQHhFaOD8k0RA5i3hYxAiEA
+n8lDw3JT6NjvMnD6aM8KBsLyhazWSVVkaUSqmJzgCF0=
+"""
+
+ def setUp(self):
+ self.asn1Spec = rfc2437.RSAPrivateKey()
+
+ def testDerCodec(self):
+
+ substrate = pem.readBase64fromText(self.pem_text)
+
+ asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec)
+
+ assert not rest
+ assert asn1Object.prettyPrint()
+ assert der_encoder.encode(asn1Object) == substrate
+
+
+suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
+
+if __name__ == '__main__':
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/third_party/python/pyasn1-modules/tests/test_rfc2459.py b/third_party/python/pyasn1-modules/tests/test_rfc2459.py
new file mode 100644
index 0000000000..1fa9d07ed7
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tests/test_rfc2459.py
@@ -0,0 +1,111 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+import sys
+from pyasn1.codec.der import decoder as der_decoder
+from pyasn1.codec.der import encoder as der_encoder
+
+from pyasn1_modules import rfc2459, pem
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+
+class CertificateTestCase(unittest.TestCase):
+ pem_text = """\
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy
+NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD
+cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs
+2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY
+JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE
+Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ
+n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A
+PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu
+"""
+
+ def setUp(self):
+ self.asn1Spec = rfc2459.Certificate()
+
+ def testDerCodec(self):
+
+ substrate = pem.readBase64fromText(self.pem_text)
+
+ asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec)
+
+ assert not rest
+ assert asn1Object.prettyPrint()
+ assert der_encoder.encode(asn1Object) == substrate
+
+
+class CertificateListTestCase(unittest.TestCase):
+ pem_text = """\
+MIIBVjCBwAIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJBVTETMBEGA1UE
+CBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRk
+MRUwEwYDVQQDEwxzbm1wbGFicy5jb20xIDAeBgkqhkiG9w0BCQEWEWluZm9Ac25t
+cGxhYnMuY29tFw0xMjA0MTExMzQwNTlaFw0xMjA1MTExMzQwNTlaoA4wDDAKBgNV
+HRQEAwIBATANBgkqhkiG9w0BAQUFAAOBgQC1D/wwnrcY/uFBHGc6SyoYss2kn+nY
+RTwzXmmldbNTCQ03x5vkWGGIaRJdN8QeCzbEi7gpgxgpxAx6Y5WkxkMQ1UPjNM5n
+DGVDOtR0dskFrrbHuNpWqWrDaBN0/ryZiWKjr9JRbrpkHgVY29I1gLooQ6IHuKHY
+vjnIhxTFoCb5vA==
+"""
+
+ def setUp(self):
+ self.asn1Spec = rfc2459.CertificateList()
+
+ def testDerCodec(self):
+
+ substrate = pem.readBase64fromText(self.pem_text)
+
+ asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec)
+
+ assert not rest
+ assert asn1Object.prettyPrint()
+ assert der_encoder.encode(asn1Object) == substrate
+
+
+class DSAPrivateKeyTestCase(unittest.TestCase):
+ pem_text = """\
+MIIBugIBAAKBgQCN91+Cma8UPw09gjwP9WOJCdpv3mv3/qFqzgiODGZx0Q002iTl
+1dq36m5TsWYFEcMCEyC3tFuoQ0mGq5zUUOmJvHCIPufs0g8Av0fhY77uFqneHHUi
+VQMCPCHX9vTCWskmDE21LJppU27bR4H2q+ysE30d6u3+84qrItsn4bjpcQIVAPR5
+QrmooOXDn7fHJzshmxImGC4VAoGAXxKyEnlvzq93d4V6KLWX3H5Jk2JP771Ss1bT
+6D/mSbLlvjjo7qsj6diul1axu6Wny31oPertzA2FeGEzkqvjSNmSxyYYMDB3kEcx
+ahntt37I1FgSlgdZHuhdtl1h1DBKXqCCneOZuNj+kW5ib14u5HDfFIbec2HJbvVs
+lJ/k83kCgYB4TD8vgHetXHxqsiZDoy5wOnQ3mmFAfl8ZdQsIfov6kEgArwPYUOVB
+JsX84f+MFjIOKXUV8dHZ8VRrGCLAbXcxKqLNWKlKHUnEsvt63pkaTy/RKHyQS+pn
+wontdTt9EtbF+CqIWnm2wpn3O+SbdtawzPOL1CcGB0jYABwbeQ81RwIUFKdyRYaa
+INow2I3/ks+0MxDabTY=
+"""
+
+ def setUp(self):
+ self.asn1Spec = rfc2459.DSAPrivateKey()
+
+ def testDerCodec(self):
+
+ substrate = pem.readBase64fromText(self.pem_text)
+
+ asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec)
+
+ assert not rest
+ assert asn1Object.prettyPrint()
+ assert der_encoder.encode(asn1Object) == substrate
+
+
+
+suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
+
+if __name__ == '__main__':
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/third_party/python/pyasn1-modules/tests/test_rfc2511.py b/third_party/python/pyasn1-modules/tests/test_rfc2511.py
new file mode 100644
index 0000000000..ef4cc000c2
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tests/test_rfc2511.py
@@ -0,0 +1,49 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+import sys
+from pyasn1.codec.der import decoder as der_decoder
+from pyasn1.codec.der import encoder as der_encoder
+
+from pyasn1_modules import rfc2511, pem
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+
+class CertificateReqTestCase(unittest.TestCase):
+ pem_text = """\
+MIIBozCCAZ8wggEFAgUAwTnj2jCByoABAqURMA8xDTALBgNVBAMTBHVzZXKmgZ8w
+DQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAJ6ZQ2cYbn/lFsmBOlRltbRbFQUvvE0Q
+nbopOu1kC7Bmaaz7QTx8nxeiHi4m7uxCbGGxHNoGCt7EmdG8eZUBNAcHyGlXrJdm
+0z3/uNEGiBHq+xB8FnFJCA5EIJ3RWFnlbu9otSITLxWK7c5+/NHmWM+yaeHD/f/h
+rp01c/8qXZfZAgMBAAGpEDAOBgNVHQ8BAf8EBAMCBeAwLzASBgkrBgEFBQcFAQEM
+BTExMTExMBkGCSsGAQUFBwUBAgwMc2VydmVyX21hZ2ljoYGTMA0GCSqGSIb3DQEB
+BQUAA4GBAEI3KNEvTq/n1kNVhNhPkovk1AZxyJrN1u1+7Gkc4PLjWwjLOjcEVWt4
+AajUk/gkIJ6bbeO+fZlMjHfPSDKcD6AV2hN+n72QZwfzcw3icNvBG1el9EU4XfIm
+xfu5YVWi81/fw8QQ6X6YGHFQkomLd7jxakVyjxSng9BhO6GpjJNF
+"""
+
+ def setUp(self):
+ self.asn1Spec = rfc2511.CertReqMessages()
+
+ def testDerCodec(self):
+
+ substrate = pem.readBase64fromText(self.pem_text)
+
+ asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec)
+
+ assert not rest
+ assert asn1Object.prettyPrint()
+ assert der_encoder.encode(asn1Object) == substrate
+
+
+suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
+
+if __name__ == '__main__':
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/third_party/python/pyasn1-modules/tests/test_rfc2560.py b/third_party/python/pyasn1-modules/tests/test_rfc2560.py
new file mode 100644
index 0000000000..142284bc30
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tests/test_rfc2560.py
@@ -0,0 +1,81 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+import sys
+from pyasn1.codec.der import decoder as der_decoder
+from pyasn1.codec.der import encoder as der_encoder
+
+from pyasn1_modules import rfc2560, pem
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+
+class OCSPRequestTestCase(unittest.TestCase):
+ pem_text = """\
+MGowaDBBMD8wPTAJBgUrDgMCGgUABBS3ZrMV9C5Dko03aH13cEZeppg3wgQUkqR1LKSevoFE63n8
+isWVpesQdXMCBDXe9M+iIzAhMB8GCSsGAQUFBzABAgQSBBBjdJOiIW9EKJGELNNf/rdA
+"""
+
+ def setUp(self):
+ self.asn1Spec = rfc2560.OCSPRequest()
+
+ def testDerCodec(self):
+
+ substrate = pem.readBase64fromText(self.pem_text)
+
+ asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec)
+
+ assert not rest
+ assert asn1Object.prettyPrint()
+ assert der_encoder.encode(asn1Object) == substrate
+
+
+class OCSPResponseTestCase(unittest.TestCase):
+ pem_text = """\
+MIIEvQoBAKCCBLYwggSyBgkrBgEFBQcwAQEEggSjMIIEnzCCAQ+hgYAwfjELMAkGA1UEBhMCQVUx
+EzARBgNVBAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDEV
+MBMGA1UEAxMMc25tcGxhYnMuY29tMSAwHgYJKoZIhvcNAQkBFhFpbmZvQHNubXBsYWJzLmNvbRgP
+MjAxMjA0MTExNDA5MjJaMFQwUjA9MAkGBSsOAwIaBQAEFLdmsxX0LkOSjTdofXdwRl6mmDfCBBSS
+pHUspJ6+gUTrefyKxZWl6xB1cwIENd70z4IAGA8yMDEyMDQxMTE0MDkyMlqhIzAhMB8GCSsGAQUF
+BzABAgQSBBBjdJOiIW9EKJGELNNf/rdAMA0GCSqGSIb3DQEBBQUAA4GBADk7oRiCy4ew1u0N52QL
+RFpW+tdb0NfkV2Xyu+HChKiTThZPr9ZXalIgkJ1w3BAnzhbB0JX/zq7Pf8yEz/OrQ4GGH7HyD3Vg
+PkMu+J6I3A2An+bUQo99AmCbZ5/tSHtDYQMQt3iNbv1fk0yvDmh7UdKuXUNSyJdHeg27dMNy4k8A
+oIIC9TCCAvEwggLtMIICVqADAgECAgEBMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAkFVMRMw
+EQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQxFTAT
+BgNVBAMTDHNubXBsYWJzLmNvbTEgMB4GCSqGSIb3DQEJARYRaW5mb0Bzbm1wbGFicy5jb20wHhcN
+MTIwNDExMTMyNTM1WhcNMTMwNDExMTMyNTM1WjB+MQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29t
+ZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMRUwEwYDVQQDEwxzbm1w
+bGFicy5jb20xIDAeBgkqhkiG9w0BCQEWEWluZm9Ac25tcGxhYnMuY29tMIGfMA0GCSqGSIb3DQEB
+AQUAA4GNADCBiQKBgQDDDU5HOnNV8I2CojxB8ilIWRHYQuaAjnjrETMOprouDHFXnwWqQo/I3m0b
+XYmocrh9kDefb+cgc7+eJKvAvBqrqXRnU38DmQU/zhypCftGGfP8xjuBZ1n23lR3hplN1yYA0J2X
+SgBaAg6e8OsKf1vcX8Es09rDo8mQpt4G2zR56wIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG
++EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQU8Ys2dpJFLMHl
+yY57D4BNmlqnEcYwHwYDVR0jBBgwFoAU8Ys2dpJFLMHlyY57D4BNmlqnEcYwDQYJKoZIhvcNAQEF
+BQADgYEAWR0uFJVlQId6hVpUbgXFTpywtNitNXFiYYkRRv77McSJqLCa/c1wnuLmqcFcuRUK0oN6
+8ZJDP2HDDKe8MCZ8+sx+CF54eM8VCgN9uQ9XyE7x9XrXDd3Uw9RJVaWSIezkNKNeBE0lDM2jUjC4
+HAESdf7nebz1wtqAOXE1jWF/y8g=
+"""
+
+ def setUp(self):
+ self.asn1Spec = rfc2560.OCSPResponse()
+
+ def testDerCodec(self):
+ substrate = pem.readBase64fromText(self.pem_text)
+
+ asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec)
+
+ assert not rest
+ assert asn1Object.prettyPrint()
+ assert der_encoder.encode(asn1Object) == substrate
+
+
+suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
+
+if __name__ == '__main__':
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/third_party/python/pyasn1-modules/tests/test_rfc4210.py b/third_party/python/pyasn1-modules/tests/test_rfc4210.py
new file mode 100644
index 0000000000..35451c8a45
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tests/test_rfc4210.py
@@ -0,0 +1,129 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+import sys
+from pyasn1.codec.der import decoder as der_decoder
+from pyasn1.codec.der import encoder as der_encoder
+
+from pyasn1_modules import rfc4210, pem
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+
+class PKIMessageTestCase(unittest.TestCase):
+ pem_text = """\
+MIITuTCCARECAQKkWTBXMQswCQYDVQQGEwJUUjEQMA4GA1UEChMHRS1HdXZlbjEUMBIGA1UECxML
+VHJ1c3RDZW50ZXIxIDAeBgNVBAMTF1JTQSBTZWN1cml0eSBDTVAgU2VydmVypC0wKzELMAkGA1UE
+BhMCVFIxHDAaBgNVBAMME1ZhbGltby1WZXR0b3ItMTdEZWOgERgPMjAxMjA1MDMxMTE2MTdaoQ8w
+DQYJKoZIhvcNAQEFBQCiIgQgZWVhMjg5MGU2ZGY5N2IyNzk5NWY2MWE0MzE2MzI1OWGkEgQQQ01Q
+VjJUMTIyMzM0NjI3MKUSBBCAAAABgAAAAYAAAAGAAAABphIEEDEzNjY0NDMwMjlSYW5kb22jghIZ
+MIISFaGCC84wggvKMIIFwDCCBKigAwIBAgIQfOVE05R616R6Nqgu3drXHzANBgkqhkiG9w0BAQUF
+ADBxMQswCQYDVQQGEwJUUjEoMCYGA1UEChMfRWxla3Ryb25payBCaWxnaSBHdXZlbmxpZ2kgQS5T
+LjE4MDYGA1UEAxMvZS1HdXZlbiBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2FnbGF5aWNp
+c2kwHhcNMDgxMTI0MTAwMzI0WhcNMTYxMjE0MTExNzI0WjBdMQswCQYDVQQGEwJUUjEoMCYGA1UE
+CgwfRWxla3Ryb25payBCaWxnaSBHdXZlbmxpZ2kgQS5TLjEkMCIGA1UEAwwbZS1HdXZlbiBNb2Jp
+bCBUZXN0VVRGLTgtU09OMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzqaymRo5chRK
+EKrhjWQky1HOm6b/Jy4tSUuo4vq3O9U3G2osOU/hHb6fyMmznLpc6CaZ3qKYiuDMFRW8g1kNjEjV
+sFSvH0Yd4qgwP1+qqzhBSe+nCAnEbRUrz+nXJ4fKhmGaQ+ZSic+MeyoqDsf/zENKqdV7ea9l3Ilu
+Rj93bmTxas9aWPWQ/U/fpwkwRXaqaONlM5e4GWdgA7T1aq106NvH1z6LDNXcMYw4lSZkj/UjmM/0
+NhVz+57Ib4a0bogTaBmm8a1E5NtzkcA7pgnZT8576T0UoiOpEo+NAELA1B0mRh1/82HK1/0xn1zt
+1ym4XZRtn2r2l/wTeEwU79ALVQIDAQABo4ICZjCCAmIwfAYIKwYBBQUHAQEEcDBuMDIGCCsGAQUF
+BzABhiZodHRwOi8vdGVzdG9jc3AyLmUtZ3V2ZW4uY29tL29jc3AueHVkYTA4BggrBgEFBQcwAoYs
+aHR0cDovL3d3dy5lLWd1dmVuLmNvbS9kb2N1bWVudHMvVGVzdEtvay5jcnQwDgYDVR0PAQH/BAQD
+AgEGMA8GA1UdEwEB/wQFMAMBAf8wggElBgNVHSAEggEcMIIBGDCCARQGCWCGGAMAAQECATCCAQUw
+NgYIKwYBBQUHAgEWKmh0dHA6Ly93d3cuZS1ndXZlbi5jb20vZG9jdW1lbnRzL05FU1VFLnBkZjCB
+ygYIKwYBBQUHAgIwgb0egboAQgB1ACAAcwBlAHIAdABpAGYAaQBrAGEAIABpAGwAZQAgAGkAbABn
+AGkAbABpACAAcwBlAHIAdABpAGYAaQBrAGEAIAB1AHkAZwB1AGwAYQBtAGEAIABlAHMAYQBzAGwA
+YQByATEAbgExACAAbwBrAHUAbQBhAGsAIABpAOcAaQBuACAAYgBlAGwAaQByAHQAaQBsAGUAbgAg
+AGQAbwBrAPwAbQBhAG4BMQAgAGEA5wExAG4BMQB6AC4wWAYDVR0fBFEwTzBNoEugSYZHaHR0cDov
+L3Rlc3RzaWwuZS1ndXZlbi5jb20vRWxla3Ryb25pa0JpbGdpR3V2ZW5saWdpQVNSb290L0xhdGVz
+dENSTC5jcmwwHQYDVR0OBBYEFLMoTImEKeXbqNjbYZkKshQi2vwzMB8GA1UdIwQYMBaAFGCI4dY9
+qCIkag0hwBgz5haCSNl0MA0GCSqGSIb3DQEBBQUAA4IBAQAWOsmvpoFB9sX2aq1/LjPDJ+A5Fpxm
+0XkOGM9yD/FsLfWgyv2HqBY1cVM7mjJfJ1ezkS0ODdlU6TyN5ouvAi21V9CIk69I3eUYSDjPpGia
+qcCCvJoMF0QD7B70kj2zW7IJ7pF11cbvPLaatdzojsH9fVfKtxtn/ZLrXtKsyUW5vKHOeniU6BBB
+Gl/ZZkFNXNN4mrB+B+wDV9OmdMw+Mc8KPq463hJQRat5a9lrXMdNtMAJOkvsUUzOemAsITjXWlyg
+BULijBhi8ZmMp0W7p6oKENX3vH2HCPCGQU29WIrK4iUoscjz93fB6oa4FQpxY0k3JRnWvD5FqkRD
+FKJdq/q9MIIDzzCCAregAwIBAgIQa34pJYdDFNXx90OkMkKzIjANBgkqhkiG9w0BAQUFADBxMQsw
+CQYDVQQGEwJUUjEoMCYGA1UEChMfRWxla3Ryb25payBCaWxnaSBHdXZlbmxpZ2kgQS5TLjE4MDYG
+A1UEAxMvZS1HdXZlbiBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2FnbGF5aWNpc2kwHhcN
+MDYxMjE1MTUxMzU0WhcNMTYxMjE1MTExMzU0WjBxMQswCQYDVQQGEwJUUjEoMCYGA1UEChMfRWxl
+a3Ryb25payBCaWxnaSBHdXZlbmxpZ2kgQS5TLjE4MDYGA1UEAxMvZS1HdXZlbiBFbGVrdHJvbmlr
+IFNlcnRpZmlrYSBIaXptZXQgU2FnbGF5aWNpc2kwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQCU/PTxSkcWPJMx4UO8L8ep9/JqRgAZ79EqYWgR4K2bNLgENpc5j0hO+QydgovFODzkEIBP
+RIBavMz9Cw2PONpSBmxd4K1A/5hGqoGEz8UCA2tIx4+Z2A9AQ2O3BYi9FWM+0D1brJDO+6yvX4m5
+Rf3mLlso52NIVV705fIkmOExHjdAj/xB0/LICZMfwKn8F19Jae/SQv9cFnptbNRCq8hU5zLRngpR
+eT1PYrZVV0XLbzbDPwgzLXCzDxG1atdGd5JRTnD58qM1foC3+hGafuyissMQVGnBQFlsx7V6OdlD
+bsxUXegCl2li0RpRJXLqyqMdtEplaznKp8NnbddylfrPAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB
+hjAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFGCI4dY9qCIkag0hwBgz5haCSNl0MB0GA1Ud
+DgQWBBRgiOHWPagiJGoNIcAYM+YWgkjZdDANBgkqhkiG9w0BAQUFAAOCAQEAKftTVjgltZJxXwDs
+MumguOSlljOQjotVVpES1QYwo3a5RQVpKuS4KYDEdWLD4ITtDNOA/iGKYWCNyKsE1BCL66irknZw
+iR6p6P+q2Wf7fGYSwUBcSBwWBTA+0EgpvPL3/vRuVVCVgC8XHBr72jKKTg9Nwcj+1FwXGZTDpjX8
+dzPhTXEWceQcDn2FRdNt6BQad9Hdq08lMHiyozsWniYZYuWpud91i8Pl698H9t0KqiJg6rPKc9kd
+z9QyC8E/cLIJgYhvfzXMxvmSjeSSFSqTHioqfpU3k8AWXuxqJUxbdQ8QrVaTXRByzEr1Ze0TYpDs
+oel1PjC9ouO8bC7cGrbCWzCCAi8wggGYAhBlEjJUo9asY2ISG4oHjcpzMA0GCSqGSIb3DQEBBQUA
+MFoxCzAJBgNVBAYTAlRSMRAwDgYDVQQKEwdFLUd1dmVuMRQwEgYDVQQLEwtUcnVzdENlbnRlcjEj
+MCEGA1UEAxMaRS1HdXZlblRFU1RDQUhTTSBTeXN0ZW0gQ0EwHhcNMDkxMTMwMjIxMzEzWhcNMTYx
+MTMwMTkxMTUxWjBXMQswCQYDVQQGEwJUUjEQMA4GA1UEChMHRS1HdXZlbjEUMBIGA1UECxMLVHJ1
+c3RDZW50ZXIxIDAeBgNVBAMTF1JTQSBTZWN1cml0eSBDTVAgU2VydmVyMIGfMA0GCSqGSIb3DQEB
+AQUAA4GNADCBiQKBgQDCaZeJerGULW+1UPSu9T0voPNgzPcihXX6G5Q45nS4RNCe+pOc226EtD51
+wu6Eq2oARpZmCrKPn63EFmHEE04dRDr8MS2LHuZK8xslIx/AvPnV568795EPoAyhGIX9Na9ZHhnI
+zSPWmWfBd9bsQiLVF7C9dOvfW125mtywWXELewIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAAiIse/x
+aWwRWUM0CIzfnoXfrgyLdKVykK7dTPgoMJgAx229uN6VTPyk+E+lTKq9PhK+e/VJNNg9PjSFjKFd
+lfSDOi9ne1xOrb7cNTjw+sGf1mfNWyzizLXa7su7ISFN+GaClmAstH9vXsRxg1oh3pFMJv47I6iw
+gUQlwwg8WsY/MIIGPzCCBjsCAQAwAwIBADCCBi+gggYrMIIGJzCCBQ+gAwIBAgIRALGVtVAeoM1x
+gjgOX3alZ5MwDQYJKoZIhvcNAQEFBQAwXTELMAkGA1UEBhMCVFIxKDAmBgNVBAoMH0VsZWt0cm9u
+aWsgQmlsZ2kgR3V2ZW5saWdpIEEuUy4xJDAiBgNVBAMMG2UtR3V2ZW4gTW9iaWwgVGVzdFVURi04
+LVNPTjAeFw0xMjA1MDMxMTE2MTdaFw0xMzA1MDMxMTE2MTdaMGoxCzAJBgNVBAYTAlRSMREwDwYD
+VQQKDAhGaXJlIExMVDEbMBkGA1UECwwScG9wQ29kZSAtIDEyMzQ1Njc4MRQwEgYDVQQFEws3NjU0
+MzQ1Njc2NTEVMBMGA1UEAwwMQnVyYWsgWW9uZGVtMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB
+gQCpfSB7xcsHZR4E27yGHkzUJx1y2iknzX4gRM2acyPljRw/V5Lm7POrfWIX9UF2sxfYfRqxYmD0
++nw72nx8R/5AFQK0BfjHxIc5W1YekMHF8PSORo9rJqcX+qn+NBYwqcJl4EdObTcOtMWC6ws6n0uA
+oDvYYN0ujkua496sp+INiQIDAQABo4IDVzCCA1MwQgYIKwYBBQUHAQEENjA0MDIGCCsGAQUFBzAB
+hiZodHRwOi8vdGVzdG9jc3AyLmUtZ3V2ZW4uY29tL29jc3AueHVkYTAfBgNVHSMEGDAWgBSzKEyJ
+hCnl26jY22GZCrIUItr8MzCCAXIGA1UdIASCAWkwggFlMIGxBgZghhgDAAEwgaYwNgYIKwYBBQUH
+AgEWKmh0dHA6Ly93d3cuZS1ndXZlbi5jb20vZG9jdW1lbnRzL05FU1VFLnBkZjBsBggrBgEFBQcC
+AjBgGl5CdSBzZXJ0aWZpa2EsIDUwNzAgc2F5xLFsxLEgRWxla3Ryb25payDEsG16YSBLYW51bnVu
+YSBnw7ZyZSBuaXRlbGlrbGkgZWxla3Ryb25payBzZXJ0aWZpa2FkxLFyMIGuBglghhgDAAEBAQMw
+gaAwNwYIKwYBBQUHAgEWK2h0dHA6Ly93d3cuZS1ndXZlbi5jb20vZG9jdW1lbnRzL01LTkVTSS5w
+ZGYwZQYIKwYBBQUHAgIwWRpXQnUgc2VydGlmaWthLCBNS05FU0kga2Fwc2FtxLFuZGEgeWF5xLFu
+bGFubcSxxZ8gYmlyIG5pdGVsaWtsaSBlbGVrdHJvbmlrIHNlcnRpZmlrYWTEsXIuMA4GA1UdDwEB
+/wQEAwIGwDCBgwYIKwYBBQUHAQMEdzB1MAgGBgQAjkYBATBpBgtghhgBPQABp04BAQxaQnUgc2Vy
+dGlmaWthLCA1MDcwIHNheWlsaSBFbGVrdHJvbmlrIEltemEgS2FudW51bmEgZ8O2cmUgbml0ZWxp
+a2xpIGVsZWt0cm9uaWsgc2VydGlmaWthZGlyMEUGA1UdCQQ+MDwwFAYIKwYBBQUHCQIxCAQGQW5r
+YXJhMBIGCCsGAQUFBwkBMQYEBDE5NzkwEAYIKwYBBQUHCQQxBAQCVFIwGAYDVR0RBBEwD4ENZmly
+ZUBmaXJlLmNvbTBgBgNVHR8EWTBXMFWgU6BRhk9odHRwOi8vdGVzdHNpbC5lLWd1dmVuLmNvbS9F
+bGVrdHJvbmlrQmlsZ2lHdXZlbmxpZ2lBU01LTkVTSS1VVEYtOC9MYXRlc3RDUkwuY3JsMB0GA1Ud
+DgQWBBSLG9aIb1k2emFLCpM93kXJkWhzuTANBgkqhkiG9w0BAQUFAAOCAQEACoGCn4bzDWLzs799
+rndpB971UD2wbwt8Hkw1MGZkkJVQeVF4IS8FacAyYk5vY8ONuTA/Wsh4x23v9WTCtO89HMTz81eU
+BclqZ2Gc2UeMq7Y4FQWR8PNCMdCsxVVhpRRE6jQAyyR9YEBHQYVLfy34e3+9G/h/BR73VGHZJdZI
+DDJYd+VWXmUD9kGk/mI35qYdzN3O28KI8sokqX0z2hvkpDKuP4jNXSCHcVkK23tX2x5m6m0LdqVn
+vnCx2LfBn1wf1u7q30p/GgMVX+mR3QHs7feGewEjlkxuEyLVVD+uBwWCT6zcad17oaAyXV5RV28L
+vH0WNg6pFUpwOP0l+nIOqqCBhAOBgQBAtTB5Qd18sTxEKhSzRiN2OycFPrqoqlZZTHBohe8bE2D4
+Xc1ejkFWUEvQivkqJxCD6C7I37xgDaq8DZnaczIBxbPkY0QMdeL4MiEqlw/tlrJGrWoC5Twb0t/m
+JA5RSwQoMDYTj2WrwtM/nsP12T39or4JRZhlLSM43IaTwEBtQw==
+"""
+
+ def setUp(self):
+ self.asn1Spec = rfc4210.PKIMessage()
+
+ def testDerCodec(self):
+
+ substrate = pem.readBase64fromText(self.pem_text)
+
+ asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec)
+
+ assert not rest
+ assert asn1Object.prettyPrint()
+ assert der_encoder.encode(asn1Object) == substrate
+
+
+suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
+
+if __name__ == '__main__':
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/third_party/python/pyasn1-modules/tests/test_rfc5208.py b/third_party/python/pyasn1-modules/tests/test_rfc5208.py
new file mode 100644
index 0000000000..94d1d0e0a5
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tests/test_rfc5208.py
@@ -0,0 +1,74 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+import sys
+from pyasn1.codec.der import decoder as der_decoder
+from pyasn1.codec.der import encoder as der_encoder
+
+from pyasn1_modules import rfc5208, pem
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+
+class PrivateKeyInfoTestCase(unittest.TestCase):
+ pem_text = """\
+MIIBVgIBADANBgkqhkiG9w0BAQEFAASCAUAwggE8AgEAAkEAx8CO8E0MNgEKXXDf
+I1xqBmQ+Gp3Srkqp45OApIu4lZ97n5VJ5HljU9wXcPIfx29Le3w8hCPEkugpLsdV
+GWx+EQIDAQABAkEAiv3f+DGEh6ddsPszKQXK+LuTwy2CRajKYgJnBxf5zpG50XK4
+899An+x/pGYVmVED1f0JCbk3BUbv7HViLq0qgQIhAOYlQJaQ8KJBijDpjF62lcVr
+QrqFPM4+ZrHsw0dVY2CZAiEA3jE5ngkVPfjFWEr7wS50EJhGiYlQeY4l+hADGIhd
+XDkCIQDIHt5xzmif/nOGop5/gS7ssp8ch1zfTh2IW4NWlOZMCQIgLZmYo5BlpaRK
+jAZHiKwJ8eXuhAeEVo4PyTREDmLeFjECIQCfyUPDclPo2O8ycPpozwoGwvKFrNZJ
+VWRpRKqYnOAIXQ==
+"""
+
+ def setUp(self):
+ self.asn1Spec = rfc5208.PrivateKeyInfo()
+
+ def testDerCodec(self):
+
+ substrate = pem.readBase64fromText(self.pem_text)
+
+ asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec)
+
+ assert not rest
+ assert asn1Object.prettyPrint()
+ assert der_encoder.encode(asn1Object) == substrate
+
+
+class EncryptedPrivateKeyInfoInfoTestCase(unittest.TestCase):
+ pem_text = """\
+MIIBgTAbBgkqhkiG9w0BBQMwDgQIdtFgDWnipT8CAggABIIBYN0hkm2xqkTCt8dJ
+iZS8+HNiyHxy8g+rmWSXv/i+bTHFUReZA2GINtTRUkWpXqWcSHxNslgf7QdfgbVJ
+xQiUM+lLhwOFh85iAHR3xmPU1wfN9NvY9DiLSpM0DMhF3OvAMZD75zIhA0GSKu7w
+dUu7ey7H4fv7bez6RhEyLdKw9/Lf2KNStNOs4ow9CAtCoxeoMSniTt6CNhbvCkve
+9vNHKiGavX1tS/YTog4wiiGzh2YxuW1RiQpTdhWiKyECgD8qQVg2tY5t3QRcXrzi
+OkStpkiAPAbiwS/gyHpsqiLo0al63SCxRefugbn1ucZyc5Ya59e3xNFQXCNhYl+Z
+Hl3hIl3cssdWZkJ455Z/bBE29ks1HtsL+bTfFi+kw/4yuMzoaB8C7rXScpGNI/8E
+pvTU2+wtuoOFcttJregtR94ZHu5wgdYqRydmFNG8PnvZT1mRMmQgUe/vp88FMmsZ
+dLsZjNQ=
+"""
+
+ def setUp(self):
+ self.asn1Spec = rfc5208.EncryptedPrivateKeyInfo()
+
+ def testDerCodec(self):
+ substrate = pem.readBase64fromText(self.pem_text)
+
+ asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec)
+
+ assert not rest
+ assert asn1Object.prettyPrint()
+ assert der_encoder.encode(asn1Object) == substrate
+
+
+suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
+
+if __name__ == '__main__':
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/third_party/python/pyasn1-modules/tests/test_rfc5280.py b/third_party/python/pyasn1-modules/tests/test_rfc5280.py
new file mode 100644
index 0000000000..49983ef4b5
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tests/test_rfc5280.py
@@ -0,0 +1,82 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+import sys
+from pyasn1.codec.der import decoder as der_decoder
+from pyasn1.codec.der import encoder as der_encoder
+
+from pyasn1_modules import rfc5280, pem
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+
+class CertificateTestCase(unittest.TestCase):
+ pem_text = """\
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy
+NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD
+cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs
+2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY
+JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE
+Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ
+n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A
+PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu
+"""
+
+ def setUp(self):
+ self.asn1Spec = rfc5280.Certificate()
+
+ def testDerCodec(self):
+
+ substrate = pem.readBase64fromText(self.pem_text)
+
+ asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec)
+
+ assert not rest
+ assert asn1Object.prettyPrint()
+ assert der_encoder.encode(asn1Object) == substrate
+
+
+class CertificateListTestCase(unittest.TestCase):
+ pem_text = """\
+MIIBVjCBwAIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJBVTETMBEGA1UE
+CBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRk
+MRUwEwYDVQQDEwxzbm1wbGFicy5jb20xIDAeBgkqhkiG9w0BCQEWEWluZm9Ac25t
+cGxhYnMuY29tFw0xMjA0MTExMzQwNTlaFw0xMjA1MTExMzQwNTlaoA4wDDAKBgNV
+HRQEAwIBATANBgkqhkiG9w0BAQUFAAOBgQC1D/wwnrcY/uFBHGc6SyoYss2kn+nY
+RTwzXmmldbNTCQ03x5vkWGGIaRJdN8QeCzbEi7gpgxgpxAx6Y5WkxkMQ1UPjNM5n
+DGVDOtR0dskFrrbHuNpWqWrDaBN0/ryZiWKjr9JRbrpkHgVY29I1gLooQ6IHuKHY
+vjnIhxTFoCb5vA==
+"""
+
+ def setUp(self):
+ self.asn1Spec = rfc5280.CertificateList()
+
+ def testDerCodec(self):
+
+ substrate = pem.readBase64fromText(self.pem_text)
+
+ asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec)
+
+ assert not rest
+ assert asn1Object.prettyPrint()
+ assert der_encoder.encode(asn1Object) == substrate
+
+
+suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
+
+if __name__ == '__main__':
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/third_party/python/pyasn1-modules/tests/test_rfc5652.py b/third_party/python/pyasn1-modules/tests/test_rfc5652.py
new file mode 100644
index 0000000000..5fa4296b9f
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tests/test_rfc5652.py
@@ -0,0 +1,87 @@
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+import sys
+from pyasn1.codec.der import decoder as der_decoder
+from pyasn1.codec.der import encoder as der_encoder
+
+from pyasn1_modules import rfc5652, rfc6402, pem
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+
+class ContentInfoTestCase(unittest.TestCase):
+ pem_text = """\
+MIIEJQYJKoZIhvcNAQcCoIIEFjCCBBICAQMxCzAJBgUrDgMCGgUAMIIDAgYIKwYBBQUHDAKgggL0
+BIIC8DCCAuwweDB2AgECBgorBgEEAYI3CgoBMWUwYwIBADADAgEBMVkwVwYJKwYBBAGCNxUUMUow
+SAIBBQwZcGl0dWNoYTEuZW1lYS5ocHFjb3JwLm5ldAwMRU1FQVxwaXR1Y2hhDBpDTUNSZXFHZW5l
+cmF0b3IudnNob3N0LmV4ZTCCAmqgggJmAgEBMIICXzCCAcgCAQAwADCBnzANBgkqhkiG9w0BAQEF
+AAOBjQAwgYkCgYEA0jm7SSSm2wyEAzuNKtFZFJKo91SrJq9wQwEhEKHDavZwMQOm1rZ2PF8NWCEb
+PqrhToQ7rtiGLSZa4dF4bzgmBqQ9aoSfEX4jISt31Vy+skHidXjHHpbsjT24NPhrZgANivL7CxD6
+Ft+s7qS1gL4HRm2twQkqSwOLrE/q2QeXl2UCAwEAAaCCAR0wGgYKKwYBBAGCNw0CAzEMFgo2LjIu
+OTIwMC4yMD4GCSqGSIb3DQEJDjExMC8wHQYDVR0OBBYEFMW2skn88gxhONWZQA4sWGBDb68yMA4G
+A1UdDwEB/wQEAwIHgDBXBgkrBgEEAYI3FRQxSjBIAgEFDBlwaXR1Y2hhMS5lbWVhLmhwcWNvcnAu
+bmV0DAxFTUVBXHBpdHVjaGEMGkNNQ1JlcUdlbmVyYXRvci52c2hvc3QuZXhlMGYGCisGAQQBgjcN
+AgIxWDBWAgECHk4ATQBpAGMAcgBvAHMAbwBmAHQAIABTAHQAcgBvAG4AZwAgAEMAcgB5AHAAdABv
+AGcAcgBhAHAAaABpAGMAIABQAHIAbwB2AGkAZABlAHIDAQAwDQYJKoZIhvcNAQEFBQADgYEAJZlu
+mxjtCxSOQi27jsVdd3y8NSIlzNv0b3LqmzvAly6L+CstXcnuG2MPQqPH9R7tbJonGUniBQO9sQ7C
+KhYWj2gfhiEkSID82lV5chINVUFKoUlSiEhWr0tPGgvOaqdsKQcrHfzrsBbFkhDqrFSVy7Yivbnh
+qYszKrOjJKiiCPMwADAAMYH5MIH2AgEDgBTFtrJJ/PIMYTjVmUAOLFhgQ2+vMjAJBgUrDgMCGgUA
+oD4wFwYJKoZIhvcNAQkDMQoGCCsGAQUFBwwCMCMGCSqGSIb3DQEJBDEWBBTFTkK/OifaFjwqHiJu
+xM7qXcg/VzANBgkqhkiG9w0BAQEFAASBgKfC6jOi1Wgy4xxDCQVK9+e5tktL8wE/j2cb9JSqq+aU
+5UxEgXEw7q7BoYZCAzcxMRriGzakXr8aXHcgkRJ7XcFvLPUjpmGg9SOZ2sGW4zQdWAwImN/i8loc
+xicQmJP+VoMHo/ZpjFY9fYCjNZUArgKsEwK/s+p9yrVVeB1Nf8Mn
+"""
+
+ def setUp(self):
+ self.asn1Spec = rfc5652.ContentInfo()
+
+ def testDerCodec(self):
+
+ substrate = pem.readBase64fromText(self.pem_text)
+
+ layers = {
+ rfc5652.id_ct_contentInfo: rfc5652.ContentInfo(),
+ rfc5652.id_signedData: rfc5652.SignedData(),
+ rfc6402.id_cct_PKIData: rfc6402.PKIData()
+ }
+
+ getNextLayer = {
+ rfc5652.id_ct_contentInfo: lambda x: x['contentType'],
+ rfc5652.id_signedData: lambda x: x['encapContentInfo']['eContentType'],
+ rfc6402.id_cct_PKIData: lambda x: None
+ }
+
+ getNextSubstrate = {
+ rfc5652.id_ct_contentInfo: lambda x: x['content'],
+ rfc5652.id_signedData: lambda x: x['encapContentInfo']['eContent'],
+ rfc6402.id_cct_PKIData: lambda x: None
+ }
+
+
+ next_layer = rfc5652.id_ct_contentInfo
+
+ while next_layer:
+
+ asn1Object, rest = der_decoder.decode(
+ substrate, asn1Spec=layers[next_layer]
+ )
+
+ assert not rest
+ assert asn1Object.prettyPrint()
+ assert der_encoder.encode(asn1Object) == substrate
+
+ substrate = getNextSubstrate[next_layer](asn1Object)
+ next_layer = getNextLayer[next_layer](asn1Object)
+
+
+suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
+
+if __name__ == '__main__':
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/third_party/python/pyasn1-modules/tools/cmcdump.py b/third_party/python/pyasn1-modules/tools/cmcdump.py
new file mode 100755
index 0000000000..bce48b1990
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tools/cmcdump.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+#
+# Read CMC certificate request with wrappers on stdin, parse each into
+# plain text, then build substrate from it
+#
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc5652, rfc6402, pem
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat cmc_request.pem | %s""" % (sys.argv[0],))
+ sys.exit(-1)
+
+reqCnt = 0
+
+substrate = pem.readBase64FromFile(sys.stdin)
+
+_, rest = decoder.decode(substrate, asn1Spec=rfc5652.ContentInfo())
+assert not rest
+
+next_layer = rfc5652.id_ct_contentInfo
+data = substrate
+while next_layer:
+ if next_layer == rfc5652.id_ct_contentInfo:
+ layer, rest = decoder.decode(data, asn1Spec=rfc5652.ContentInfo())
+ assert encoder.encode(layer) == data, 'wrapper recode fails'
+ assert not rest
+
+ print(" * New layer (wrapper):")
+ print(layer.prettyPrint())
+
+ next_layer = layer['contentType']
+ data = layer['content']
+
+ elif next_layer == rfc5652.id_signedData:
+ layer, rest = decoder.decode(data, asn1Spec=rfc5652.SignedData())
+ assert encoder.encode(layer) == data, 'wrapper recode fails'
+ assert not rest
+
+ print(" * New layer (wrapper):")
+ print(layer.prettyPrint())
+
+ next_layer = layer['encapContentInfo']['eContentType']
+ data = layer['encapContentInfo']['eContent']
+
+ elif next_layer == rfc6402.id_cct_PKIData:
+ layer, rest = decoder.decode(data, asn1Spec=rfc6402.PKIData())
+ assert encoder.encode(layer) == data, 'pkidata recode fails'
+ assert not rest
+
+ print(" * New layer (pkidata):")
+ print(layer.prettyPrint())
+
+ next_layer = None
+ data = None
diff --git a/third_party/python/pyasn1-modules/tools/cmpdump.py b/third_party/python/pyasn1-modules/tools/cmpdump.py
new file mode 100755
index 0000000000..c89951ac9f
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tools/cmpdump.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Read ASN.1/PEM CMP message on stdin, parse into
+# plain text, then build substrate from it
+#
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc4210, pem
+from pyasn1 import debug
+import sys
+
+if len(sys.argv) == 2 and sys.argv[1] == '-d':
+ debug.setLogger(debug.Debug('all'))
+elif len(sys.argv) != 1:
+ print("""Usage:
+$ cat cmp.pem | %s [-d]""" % sys.argv[0])
+ sys.exit(-1)
+
+pkiMessage = rfc4210.PKIMessage()
+
+substrate = pem.readBase64FromFile(sys.stdin)
+if not substrate:
+ sys.exit(0)
+
+pkiMsg, rest = decoder.decode(substrate, asn1Spec=pkiMessage)
+
+print(pkiMsg.prettyPrint())
+
+assert encoder.encode(pkiMsg) == substrate, 'CMP message recode fails'
diff --git a/third_party/python/pyasn1-modules/tools/crldump.py b/third_party/python/pyasn1-modules/tools/crldump.py
new file mode 100755
index 0000000000..b871ddd962
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tools/crldump.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Read X.509 CRL on stdin, print them pretty and encode back into
+# original wire format.
+# CRL can be generated with "openssl openssl ca -gencrl ..." commands.
+#
+from pyasn1_modules import rfc2459, pem
+from pyasn1.codec.der import encoder, decoder
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat crl.pem | %s""" % sys.argv[0])
+ sys.exit(-1)
+
+asn1Spec = rfc2459.CertificateList()
+
+cnt = 0
+
+while True:
+ idx, substrate = pem.readPemBlocksFromFile(sys.stdin, ('-----BEGIN X509 CRL-----', '-----END X509 CRL-----'))
+ if not substrate:
+ break
+
+ key, rest = decoder.decode(substrate, asn1Spec=asn1Spec)
+
+ if rest:
+ substrate = substrate[:-len(rest)]
+
+ print(key.prettyPrint())
+
+ assert encoder.encode(key) == substrate, 'pkcs8 recode fails'
+
+ cnt += 1
+
+print('*** %s CRL(s) re/serialized' % cnt)
diff --git a/third_party/python/pyasn1-modules/tools/crmfdump.py b/third_party/python/pyasn1-modules/tools/crmfdump.py
new file mode 100755
index 0000000000..efb0ffc0e4
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tools/crmfdump.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Read ASN.1/PEM X.509 CRMF request on stdin, parse into
+# plain text, then build substrate from it
+#
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc2511, pem
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat crmf.pem | %s""" % sys.argv[0])
+ sys.exit(-1)
+
+certReq = rfc2511.CertReqMessages()
+
+substrate = pem.readBase64FromFile(sys.stdin)
+if not substrate:
+ sys.exit(0)
+
+cr, rest = decoder.decode(substrate, asn1Spec=certReq)
+
+print(cr.prettyPrint())
+
+assert encoder.encode(cr) == substrate, 'crmf recode fails'
diff --git a/third_party/python/pyasn1-modules/tools/ocspclient.py b/third_party/python/pyasn1-modules/tools/ocspclient.py
new file mode 100755
index 0000000000..07ff5ada68
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tools/ocspclient.py
@@ -0,0 +1,165 @@
+#!/usr/bin/env python
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+import hashlib
+import sys
+
+try:
+ import urllib2
+except ImportError:
+ import urllib.request as urllib2
+
+from pyasn1.codec.der import decoder, encoder
+from pyasn1.type import univ
+
+from pyasn1_modules import rfc2560, rfc2459, pem
+
+sha1oid = univ.ObjectIdentifier((1, 3, 14, 3, 2, 26))
+
+
+# noinspection PyClassHasNoInit
+class ValueOnlyBitStringEncoder(encoder.encoder.BitStringEncoder):
+ # These methods just do not encode tag and length fields of TLV
+ def encodeTag(self, *args):
+ return ''
+
+ def encodeLength(self, *args):
+ return ''
+
+ def encodeValue(*args):
+ substrate, isConstructed = encoder.encoder.BitStringEncoder.encodeValue(*args)
+ # OCSP-specific hack follows: cut off the "unused bit count"
+ # encoded bit-string value.
+ return substrate[1:], isConstructed
+
+ def __call__(self, bitStringValue):
+ return self.encode(None, bitStringValue, defMode=True, maxChunkSize=0)
+
+
+valueOnlyBitStringEncoder = ValueOnlyBitStringEncoder()
+
+
+# noinspection PyShadowingNames
+def mkOcspRequest(issuerCert, userCert):
+ issuerTbsCertificate = issuerCert.getComponentByName('tbsCertificate')
+ issuerSubject = issuerTbsCertificate.getComponentByName('subject')
+
+ userTbsCertificate = userCert.getComponentByName('tbsCertificate')
+ userIssuer = userTbsCertificate.getComponentByName('issuer')
+
+ assert issuerSubject == userIssuer, '%s\n%s' % (
+ issuerSubject.prettyPrint(), userIssuer.prettyPrint()
+ )
+
+ userIssuerHash = hashlib.sha1(
+ encoder.encode(userIssuer)
+ ).digest()
+
+ issuerSubjectPublicKey = issuerTbsCertificate.getComponentByName('subjectPublicKeyInfo').getComponentByName(
+ 'subjectPublicKey')
+
+ issuerKeyHash = hashlib.sha1(
+ valueOnlyBitStringEncoder(issuerSubjectPublicKey)
+ ).digest()
+
+ userSerialNumber = userTbsCertificate.getComponentByName('serialNumber')
+
+ # Build request object
+
+ request = rfc2560.Request()
+
+ reqCert = request.setComponentByName('reqCert').getComponentByName('reqCert')
+
+ hashAlgorithm = reqCert.setComponentByName('hashAlgorithm').getComponentByName('hashAlgorithm')
+ hashAlgorithm.setComponentByName('algorithm', sha1oid)
+
+ reqCert.setComponentByName('issuerNameHash', userIssuerHash)
+ reqCert.setComponentByName('issuerKeyHash', issuerKeyHash)
+ reqCert.setComponentByName('serialNumber', userSerialNumber)
+
+ ocspRequest = rfc2560.OCSPRequest()
+
+ tbsRequest = ocspRequest.setComponentByName('tbsRequest').getComponentByName('tbsRequest')
+ tbsRequest.setComponentByName('version', 'v1')
+
+ requestList = tbsRequest.setComponentByName('requestList').getComponentByName('requestList')
+ requestList.setComponentByPosition(0, request)
+
+ return ocspRequest
+
+
+def parseOcspResponse(ocspResponse):
+ responseStatus = ocspResponse.getComponentByName('responseStatus')
+ assert responseStatus == rfc2560.OCSPResponseStatus('successful'), responseStatus.prettyPrint()
+ responseBytes = ocspResponse.getComponentByName('responseBytes')
+ responseType = responseBytes.getComponentByName('responseType')
+ assert responseType == rfc2560.id_pkix_ocsp_basic, responseType.prettyPrint()
+
+ response = responseBytes.getComponentByName('response')
+
+ basicOCSPResponse, _ = decoder.decode(
+ response, asn1Spec=rfc2560.BasicOCSPResponse()
+ )
+
+ tbsResponseData = basicOCSPResponse.getComponentByName('tbsResponseData')
+
+ response0 = tbsResponseData.getComponentByName('responses').getComponentByPosition(0)
+
+ return (
+ tbsResponseData.getComponentByName('producedAt'),
+ response0.getComponentByName('certID'),
+ response0.getComponentByName('certStatus').getName(),
+ response0.getComponentByName('thisUpdate')
+ )
+
+
+if len(sys.argv) != 2:
+ print("""Usage:
+$ cat CACertificate.pem userCertificate.pem | %s <ocsp-responder-url>""" % sys.argv[0])
+ sys.exit(-1)
+else:
+ ocspUrl = sys.argv[1]
+
+# Parse CA and user certificates
+
+issuerCert, _ = decoder.decode(
+ pem.readPemBlocksFromFile(
+ sys.stdin, ('-----BEGIN CERTIFICATE-----', '-----END CERTIFICATE-----')
+ )[1],
+ asn1Spec=rfc2459.Certificate()
+)
+# noinspection PyRedeclaration
+userCert, _ = decoder.decode(
+ pem.readPemBlocksFromFile(
+ sys.stdin, ('-----BEGIN CERTIFICATE-----', '-----END CERTIFICATE-----')
+ )[1],
+ asn1Spec=rfc2459.Certificate()
+)
+
+# Build OCSP request
+
+ocspReq = mkOcspRequest(issuerCert, userCert)
+
+# Use HTTP POST to get response (see Appendix A of RFC 2560)
+# In case you need proxies, set the http_proxy env variable
+
+httpReq = urllib2.Request(
+ ocspUrl,
+ encoder.encode(ocspReq),
+ {'Content-Type': 'application/ocsp-request'}
+)
+httpRsp = urllib2.urlopen(httpReq).read()
+
+# Process OCSP response
+
+# noinspection PyRedeclaration
+ocspRsp, _ = decoder.decode(httpRsp, asn1Spec=rfc2560.OCSPResponse())
+
+producedAt, certId, certStatus, thisUpdate = parseOcspResponse(ocspRsp)
+
+print('Certificate ID %s is %s at %s till %s\n' % (certId.getComponentByName('serialNumber'),
+ certStatus, producedAt, thisUpdate))
diff --git a/third_party/python/pyasn1-modules/tools/ocspreqdump.py b/third_party/python/pyasn1-modules/tools/ocspreqdump.py
new file mode 100755
index 0000000000..40c088a130
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tools/ocspreqdump.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Read ASN.1/PEM X.509 CRMF request on stdin, parse into
+# plain text, then build substrate from it
+#
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc2560, pem
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat ocsp-request.pem | %s""" % sys.argv[0])
+ sys.exit(-1)
+
+ocspReq = rfc2560.OCSPRequest()
+
+substrate = pem.readBase64FromFile(sys.stdin)
+if not substrate:
+ sys.exit(0)
+
+cr, rest = decoder.decode(substrate, asn1Spec=ocspReq)
+
+print(cr.prettyPrint())
+
+assert encoder.encode(cr) == substrate, 'OCSP request recode fails'
diff --git a/third_party/python/pyasn1-modules/tools/ocsprspdump.py b/third_party/python/pyasn1-modules/tools/ocsprspdump.py
new file mode 100755
index 0000000000..ca52f64bd7
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tools/ocsprspdump.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Read ASN.1/PEM OCSP response on stdin, parse into
+# plain text, then build substrate from it
+#
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc2560, pem
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat ocsp-response.pem | %s""" % sys.argv[0])
+ sys.exit(-1)
+
+ocspReq = rfc2560.OCSPResponse()
+
+substrate = pem.readBase64FromFile(sys.stdin)
+if not substrate:
+ sys.exit(0)
+
+cr, rest = decoder.decode(substrate, asn1Spec=ocspReq)
+
+print(cr.prettyPrint())
+
+assert encoder.encode(cr) == substrate, 'OCSP request recode fails'
diff --git a/third_party/python/pyasn1-modules/tools/pkcs10dump.py b/third_party/python/pyasn1-modules/tools/pkcs10dump.py
new file mode 100755
index 0000000000..56417ae483
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tools/pkcs10dump.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Read ASN.1/PEM X.509 certificate requests (PKCS#10 format) on stdin,
+# parse each into plain text, then build substrate from it
+#
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc2314, pem
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat certificateRequest.pem | %s""" % sys.argv[0])
+ sys.exit(-1)
+
+certType = rfc2314.CertificationRequest()
+
+certCnt = 0
+
+while True:
+ idx, substrate = pem.readPemBlocksFromFile(
+ sys.stdin, ('-----BEGIN CERTIFICATE REQUEST-----',
+ '-----END CERTIFICATE REQUEST-----')
+ )
+ if not substrate:
+ break
+
+ cert, rest = decoder.decode(substrate, asn1Spec=certType)
+
+ if rest:
+ substrate = substrate[:-len(rest)]
+
+ print(cert.prettyPrint())
+
+ assert encoder.encode(cert) == substrate, 'cert recode fails'
+
+ certCnt += 1
+
+print('*** %s PEM certificate request(s) de/serialized' % certCnt)
diff --git a/third_party/python/pyasn1-modules/tools/pkcs1dump.py b/third_party/python/pyasn1-modules/tools/pkcs1dump.py
new file mode 100755
index 0000000000..f205d779cc
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tools/pkcs1dump.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Read unencrypted PKCS#1/PKIX-compliant, PEM&DER encoded private keys on
+# stdin, print them pretty and encode back into original wire format.
+# Private keys can be generated with "openssl genrsa|gendsa" commands.
+#
+from pyasn1_modules import rfc2459, rfc2437, pem
+from pyasn1.codec.der import encoder, decoder
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat rsakey.pem | %s""" % sys.argv[0])
+ sys.exit(-1)
+
+cnt = 0
+
+while True:
+ idx, substrate = pem.readPemBlocksFromFile(
+ sys.stdin,
+ ('-----BEGIN RSA PRIVATE KEY-----', '-----END RSA PRIVATE KEY-----'),
+ ('-----BEGIN DSA PRIVATE KEY-----', '-----END DSA PRIVATE KEY-----')
+ )
+ if not substrate:
+ break
+
+ if idx == 0:
+ asn1Spec = rfc2437.RSAPrivateKey()
+ elif idx == 1:
+ asn1Spec = rfc2459.DSAPrivateKey()
+ else:
+ break
+
+ key, rest = decoder.decode(substrate, asn1Spec=asn1Spec)
+
+ if rest:
+ substrate = substrate[:-len(rest)]
+
+ print(key.prettyPrint())
+
+ assert encoder.encode(key) == substrate, 'pkcs8 recode fails'
+
+ cnt += 1
+
+print('*** %s key(s) re/serialized' % cnt)
diff --git a/third_party/python/pyasn1-modules/tools/pkcs7dump.py b/third_party/python/pyasn1-modules/tools/pkcs7dump.py
new file mode 100755
index 0000000000..72fe70d7b3
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tools/pkcs7dump.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Read ASN.1/PEM PKCS#7 on stdin, parse it into plain text,
+# then build substrate from it
+#
+from pyasn1_modules import rfc2315, pem
+from pyasn1.codec.der import encoder, decoder
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat pkcs7Certificate.pem | %s""" % sys.argv[0])
+ sys.exit(-1)
+
+idx, substrate = pem.readPemBlocksFromFile(
+ sys.stdin, ('-----BEGIN PKCS7-----', '-----END PKCS7-----')
+)
+
+assert substrate, 'bad PKCS7 data on input'
+
+contentInfo, rest = decoder.decode(substrate, asn1Spec=rfc2315.ContentInfo())
+
+if rest:
+ substrate = substrate[:-len(rest)]
+
+print(contentInfo.prettyPrint())
+
+assert encoder.encode(contentInfo) == substrate, 're-encode fails'
+
+contentType = contentInfo.getComponentByName('contentType')
+
+contentInfoMap = {
+ (1, 2, 840, 113549, 1, 7, 1): rfc2315.Data(),
+ (1, 2, 840, 113549, 1, 7, 2): rfc2315.SignedData(),
+ (1, 2, 840, 113549, 1, 7, 3): rfc2315.EnvelopedData(),
+ (1, 2, 840, 113549, 1, 7, 4): rfc2315.SignedAndEnvelopedData(),
+ (1, 2, 840, 113549, 1, 7, 5): rfc2315.DigestedData(),
+ (1, 2, 840, 113549, 1, 7, 6): rfc2315.EncryptedData()
+}
+
+content, _ = decoder.decode(
+ contentInfo.getComponentByName('content'),
+ asn1Spec=contentInfoMap[contentType]
+)
+
+print(content.prettyPrint())
diff --git a/third_party/python/pyasn1-modules/tools/pkcs8dump.py b/third_party/python/pyasn1-modules/tools/pkcs8dump.py
new file mode 100755
index 0000000000..2bb83884a8
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tools/pkcs8dump.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Read bunch of ASN.1/PEM plain/encrypted private keys in PKCS#8
+# format on stdin, parse each into plain text, then build substrate from it
+#
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc5208, pem
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat pkcs8key.pem | %s""" % sys.argv[0])
+ sys.exit(-1)
+
+cnt = 0
+
+while True:
+ idx, substrate = pem.readPemBlocksFromFile(
+ sys.stdin,
+ ('-----BEGIN PRIVATE KEY-----', '-----END PRIVATE KEY-----'),
+ ('-----BEGIN ENCRYPTED PRIVATE KEY-----', '-----END ENCRYPTED PRIVATE KEY-----')
+ )
+ if not substrate:
+ break
+
+ if idx == 0:
+ asn1Spec = rfc5208.PrivateKeyInfo()
+ elif idx == 1:
+ asn1Spec = rfc5208.EncryptedPrivateKeyInfo()
+ else:
+ break
+
+ key, rest = decoder.decode(substrate, asn1Spec=asn1Spec)
+
+ if rest:
+ substrate = substrate[:-len(rest)]
+
+ print(key.prettyPrint())
+
+ assert encoder.encode(key) == substrate, 'pkcs8 recode fails'
+
+ cnt += 1
+
+print('*** %s PKCS#8 key(s) de/serialized' % cnt)
diff --git a/third_party/python/pyasn1-modules/tools/snmpget.py b/third_party/python/pyasn1-modules/tools/snmpget.py
new file mode 100755
index 0000000000..cd9fec21a2
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tools/snmpget.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Generate SNMPGET request, parse response
+#
+from pyasn1.codec.ber import encoder, decoder
+from pyasn1_modules import rfc1157
+import sys
+import socket
+
+if len(sys.argv) != 4:
+ print("""Usage:
+$ %s <community> <host> <OID>""" % sys.argv[0])
+ sys.exit(-1)
+
+msg = rfc1157.Message()
+msg.setComponentByPosition(0)
+msg.setComponentByPosition(1, sys.argv[1])
+# pdu
+pdus = msg.setComponentByPosition(2).getComponentByPosition(2)
+pdu = pdus.setComponentByPosition(0).getComponentByPosition(0)
+pdu.setComponentByPosition(0, 123)
+pdu.setComponentByPosition(1, 0)
+pdu.setComponentByPosition(2, 0)
+vbl = pdu.setComponentByPosition(3).getComponentByPosition(3)
+vb = vbl.setComponentByPosition(0).getComponentByPosition(0)
+vb.setComponentByPosition(0, sys.argv[3])
+v = vb.setComponentByPosition(1).getComponentByPosition(1).setComponentByPosition(0).getComponentByPosition(0).setComponentByPosition(3).getComponentByPosition(3)
+
+print('sending: %s' % msg.prettyPrint())
+
+sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+sock.sendto(encoder.encode(msg), (sys.argv[2], 161))
+
+substrate, _ = sock.recvfrom(2048)
+
+# noinspection PyRedeclaration
+rMsg, _ = decoder.decode(substrate, asn1Spec=msg)
+
+print('received: %s' % rMsg.prettyPrint())
diff --git a/third_party/python/pyasn1-modules/tools/x509dump-rfc5280.py b/third_party/python/pyasn1-modules/tools/x509dump-rfc5280.py
new file mode 100755
index 0000000000..482df7e5bf
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tools/x509dump-rfc5280.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+# coding: utf-8
+#
+# This file is part of pyasn1-modules software.
+#
+# Created by Stanisław Pitucha with asn1ate tool.
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Read ASN.1/PEM X.509 certificates on stdin, parse each into plain text,
+# then build substrate from it (using RFC5280)
+#
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc5280, pem
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat CACertificate.pem | %s
+$ cat userCertificate.pem | %s""" % (sys.argv[0], sys.argv[0]))
+ sys.exit(-1)
+
+certType = rfc5280.Certificate()
+
+certCnt = 0
+
+while 1:
+ idx, substrate = pem.readPemBlocksFromFile(
+ sys.stdin, ('-----BEGIN CERTIFICATE-----',
+ '-----END CERTIFICATE-----')
+ )
+ if not substrate:
+ break
+
+ cert, rest = decoder.decode(substrate, asn1Spec=certType)
+
+ if rest:
+ substrate = substrate[:-len(rest)]
+
+ print(cert.prettyPrint())
+
+ assert encoder.encode(cert) == substrate, 'cert recode fails'
+
+ certCnt += 1
+
+print('*** %s PEM cert(s) de/serialized' % certCnt)
diff --git a/third_party/python/pyasn1-modules/tools/x509dump.py b/third_party/python/pyasn1-modules/tools/x509dump.py
new file mode 100755
index 0000000000..2c51c6a516
--- /dev/null
+++ b/third_party/python/pyasn1-modules/tools/x509dump.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+#
+# This file is part of pyasn1-modules software.
+#
+# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
+# License: http://pyasn1.sf.net/license.html
+#
+# Read ASN.1/PEM X.509 certificates on stdin, parse each into plain text,
+# then build substrate from it
+#
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc2459, pem
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat CACertificate.pem | %s
+$ cat userCertificate.pem | %s""" % (sys.argv[0], sys.argv[0]))
+ sys.exit(-1)
+
+certType = rfc2459.Certificate()
+
+certCnt = 0
+
+while True:
+ idx, substrate = pem.readPemBlocksFromFile(
+ sys.stdin, ('-----BEGIN CERTIFICATE-----',
+ '-----END CERTIFICATE-----')
+ )
+ if not substrate:
+ break
+
+ cert, rest = decoder.decode(substrate, asn1Spec=certType)
+
+ if rest:
+ substrate = substrate[:-len(rest)]
+
+ print(cert.prettyPrint())
+
+ assert encoder.encode(cert) == substrate, 'cert recode fails'
+
+ certCnt += 1
+
+print('*** %s PEM cert(s) de/serialized' % certCnt)
diff --git a/third_party/python/pyasn1/CHANGES.rst b/third_party/python/pyasn1/CHANGES.rst
new file mode 100644
index 0000000000..3ba82c06e3
--- /dev/null
+++ b/third_party/python/pyasn1/CHANGES.rst
@@ -0,0 +1,716 @@
+
+Revision 0.4.8, released 16-11-2019
+-----------------------------------
+
+- Added ability of combining `SingleValueConstraint` and
+ `PermittedAlphabetConstraint` objects into one for proper modeling
+ `FROM ... EXCEPT ...` ASN.1 clause.
+
+Revision 0.4.7, released 01-09-2019
+-----------------------------------
+
+- Added `isInconsistent` property to all constructed types. This property
+ conceptually replaces `verifySizeSpec` method to serve a more general
+ purpose e.g. ensuring all required fields are in a good shape. By default
+ this check invokes subtype constraints verification and is run by codecs
+ on value de/serialisation.
+- Deprecate `subtypeSpec` attributes and keyword argument. It is now
+ recommended to pass `ValueSizeConstraint`, as well as all other constraints,
+ to `subtypeSpec`.
+- Fixed a design bug in a way of how the items assigned to constructed
+ types are verified. Now if `Asn1Type`-based object is assigned, its
+ compatibility is verified based on having all tags and constraint
+ objects as the type in field definition. When a bare Python value is
+ assigned, then field type object is cloned and initialized with the
+ bare value (constraints verificaton would run at this moment).
+- Added `WithComponentsConstraint` along with related
+ `ComponentPresentConstraint` and `ComponentAbsentConstraint` classes
+ to be used with `Sequence`/`Set` types representing
+ `SET ... WITH COMPONENTS ...` like ASN.1 constructs.
+
+Revision 0.4.6, released 31-07-2019
+-----------------------------------
+
+- Added previously missing `SET OF ANY` construct encoding/decoding support.
+- Added `omitEmptyOptionals` option which is respected by `Sequence`
+ and `Set` encoders. When `omitEmptyOptionals` is set to `True`, empty
+ initialized optional components are not encoded. Default is `False`.
+- New elements to `SequenceOf`/`SetOf` objects can now be added at any
+ position - the requirement for the new elements to reside at the end
+ of the existing ones (i.e. s[len(s)] = 123) is removed.
+- List-like slicing support added to `SequenceOf`/`SetOf` objects.
+- Removed default initializer from `SequenceOf`/`SetOf` types to ensure
+ consistent behaviour with the rest of ASN.1 types. Before this change,
+ `SequenceOf`/`SetOf` instances immediately become value objects behaving
+ like an empty list. With this change, `SequenceOf`/`SetOf` objects
+ remain schema objects unless a component is added or `.clear()` is
+ called.
+ This change can potentially cause incompatibilities with existing
+ pyasn1 objects which assume `SequenceOf`/`SetOf` instances are value
+ objects right upon instantiation.
+ The behaviour of `Sequence`/`Set` types depends on the `componentType`
+ initializer: if on `componentType` is given, the behaviour is the
+ same as `SequenceOf`/`SetOf` have. IF `componentType` is given, but
+ neither optional nor defaulted components are present, the created
+ instance remains schema object, If, however, either optional or
+ defaulted component isi present, the created instance immediately
+ becomes a value object.
+- Added `.reset()` method to all constructed types to turn value object
+ into a schema object.
+- Added `PyAsn1UnicodeDecodeError`/`PyAsn1UnicodeDecodeError` exceptions
+ to help the caller treating unicode errors happening internally
+ to pyasn1 at the upper layers.
+- Added support for subseconds CER/DER encoding edge cases in
+ `GeneralizedTime` codec.
+- Fixed 3-digit fractional seconds value CER/DER encoding of
+ `GeneralizedTime`.
+- Fixed `AnyDecoder` to accept possible `TagMap` as `asn1Spec`
+ to make dumping raw value operational
+
+Revision 0.4.5, released 29-12-2018
+-----------------------------------
+
+- Debug logging refactored for more efficiency when disabled and
+ for more functionality when in use. Specifically, the global
+ LOG object can easily be used from any function/method, not just
+ from codec main loop as it used to be.
+- More debug logging added to BER family of codecs to ease encoding
+ problems troubleshooting.
+- Copyright notice extended to the year 2019
+- Fixed defaulted constructed SEQUENCE component initialization.
+
+Revision 0.4.4, released 26-07-2018
+-----------------------------------
+
+- Fixed native encoder type map to include all ASN.1 types
+ rather than just ambiguous ones
+- Fixed crash in `.prettyPrint` of `Sequence` and `Set` occurring
+ at OPTIONAL components
+
+Revision 0.4.3, released 23-05-2018
+-----------------------------------
+
+- Copyright notice extended to the year 2018
+- Fixed GeneralizedTime.asDateTime to perform milliseconds conversion
+ correctly
+
+Revision 0.4.2, released 23-11-2017
+-----------------------------------
+
+- Fixed explicit tag splitting in chunked encoding mode at
+ OctetString and BitString encoders
+
+Revision 0.4.1, released 23-11-2017
+-----------------------------------
+
+- ANY DEFINED BY clause support implemented
+- Encoders refactored to take either a value (as ASN.1 object)
+ or a Python value plus ASN.1 schema
+- BitString decoder optimised for better performance when running on
+ constructed encoding
+- Constructed types' .getComponentBy*() methods accept the `default`
+ parameter to return instead if schema object is to be returned
+- Constructed types' .getComponentBy*() methods accept the `instantiate`
+ parameter to disable automatic inner component instantiation
+- The ASN.1 types' `__repr__` implementation reworked for better readability
+ at the cost of not being `eval`-compliant
+- Most ASN.1 types' `__str__` magic methods (except for OctetString and
+ character types) reworked to call `.prettyPrint()` rather than
+ `.prettyPrint` calling `__str__` as it was before. The intention is
+ to eventually deprecate `.prettyPrint()` in favor of `str()`.
+ The other related change is that `str()` of enumerations and boolean
+ types will return string label instead of number.
+- Fixed Choice.clear() to fully reset internal state of the object
+- Sphinx documentation rearranged, simplified and reworded
+- The `isValue` singleton is now the only way to indicate ASN.1 schema
+ as opposed to ASN.1 schema instance. The legacy `None` initializer
+ support has been removed.
+- Changed `Null` object initialization behaviour: previous default
+ value (`''`) is not set anymore. Thus `Null()` call produces a
+ ASN.1 schema object, while `Null('')` - value object.
+- Migrated all docs and references from SourceForge
+- Imports PEP8'ed
+- Fixed ASN.1 encoder not to omit empty substrate produced for inner
+ component if the inner component belongs to the simple class (as
+ opposed to constructed class)
+- Fixed CER/DER encoders to respect tagged CHOICE when ordering
+ SET components
+- Fixed ASN.1 types not to interfere with the Pickle protocol
+- Fixed Sequence/SequenceOf types decoding heuristics in schema-less
+ decoding mode
+
+Revision 0.3.7, released 04-10-2017
+-----------------------------------
+
+- Fixed ASN.1 time types pickling/deepcopy'ing
+
+Revision 0.3.6, released 21-09-2017
+-----------------------------------
+
+- End-of-octets encoding optimized at ASN.1 encoders
+- The __getitem__/__setitem__ behavior of Set/Sequence and SetOf/SequenceOf
+ objects aligned with the canonical Mapping and Sequence protocols in part
+- Fixed crash in ASN.1 encoder when encoding an explicitly tagged
+ component of a Sequence
+
+Revision 0.3.5, released 16-09-2017
+-----------------------------------
+
+- Codecs signatures unified and pass the options kwargs through the
+ call chain
+- Explicit tag encoding optimized to avoid unnecessary copying
+- End-of-octets sentinel encoding optimized
+- Refactored ASN.1 codecs properties to silently enforce proper
+ length and chunk size encoding modes
+- Fixed DER encoder to always produce primitive encoding
+- Fixed crash at SequenceOf native decoder
+- Fixed Real.prettyPrint() to fail gracefully on overflow
+- Fixed a couple of crashes when debug mode is enabled
+
+Revision 0.3.4, released 07-09-2017
+-----------------------------------
+
+- Fixed Native encoder to handle SEQUENCE/SET objects without
+ the componentType property
+- Added missing component-less SEQUENCE/SET objects dict duck-typing support
+- Fixed unnecessary duplicate tags detection at NamesType.tagMap
+- Fixed crash at SEQUENCE and SEQUENCE OF CER encoder when running
+ in schemaless mode
+- Fixed Character types instantiation from OctetString type -- double
+ unicode decoding may have scrambled the data
+
+Revision 0.3.3, released 27-08-2017
+-----------------------------------
+
+- Improved ASN.1 types instantiation performance
+- Improved BER/CER/DER decoder performance by not unconditionally casting
+ substrate into str/bytes.
+- Fixed exponential index size growth bug when building ambiguous
+ NamedTypes tree
+- Fixed constructed types decoding failure at BER codec if running
+ in schema-less mode
+- Fixed crash on prettyPrint'ing a SEQUENCE with no defined components
+- Fixed SetOf ordering at CER/DER encoder
+- Fixed crash on conditional binascii module import
+- Fix to TagSet hash value build
+
+Revision 0.3.2, released 04-08-2017
+-----------------------------------
+
+- Fixed SequenceOf/SetOf types initialization syntax to remain
+ backward compatible with pyasn1 0.2.*
+- Rectified thread safety issues by moving lazy, run-time computation
+ into object initializer.
+- Fixed .isValue property to return True for empty SetOf/SequenceOf
+ objects
+- Fixed GeneralizedTime/UTCTime CER/DER codecs to actually get invoked
+- Fixed DER/CER encoders handling optional SEQUENCE/SET fields containing
+ nested SEQUENCE/SET with optional fields.
+- Fixed crash in SequenceOf/SetOf pretty printing and decoding (in some
+ cases)
+- Fixed documentation markup issues.
+
+Revision 0.3.1, released 26-07-2017
+-----------------------------------
+
+- ASN.1 types __init__(), .clone() and .subtype() signatures
+ refactored into keyword arguments to simplify their signatures.
+- ASN.1 types initialization refactored to minimize the use of
+ relatively expensive isNoValue() call
+- Lazily pre-populate list of values of Sequence/Set/Choice types
+- NamedTypes comparison made more efficient
+- More efficient constraints computation and code clean up
+- The __getitem__() implementation of some ASN.1 types & tag object
+ refactored for better performance
+- BER/CER/DER value encoders refactored to produce either tuple of
+ bytes or octet-stream depending on what is more optimal
+- Reduced the frequency of expensive isinstance() calls
+- Tag-related classes optimized, refactored into properties and
+ documented.
+- The NamedValues implementation refactored to mimic Python dict, its use
+ in ASN.1 types refactored into properties and better documented.
+ WARNING: this change introduces a deviation from original API.
+- NamedType family of classes overhauled, optimized and documented.
+- The `componentType` attribute of constructed ASN.1 types turned
+ read-only on instances.
+- Sequence/Set DER/CER/DER decoder optimized to skip the case of
+ reordered components handling when not necessary.
+- Tags and constraints-related getter methods refactored into read-only
+ instance attributes.
+- The .hasValue() method refactored into .isValue property. All ASN.1
+ objects now support them, not just scalars.
+- The Real.{isInfinity, isPlusInfinity, isMinusInfinity} methods
+ refactored into properties and renamed into IsInf, IsPlusInf and isMinusInf
+- The end-of-octets type refactored to ensure it is a singleton. Codecs
+ changed to rely on that for better performance.
+- Codecs lookup made more efficient at BER/CER/DER decoder main loop by
+ assigning `typeId` to every ASN.1 type, not just ambiguous ones.
+- The .getComponent*() methods of constructed ASN.1 types changed
+ to lazily instantiate underlying type rather than return `None`.
+ This should simplify its API as initialization like `X[0][1] = 2` becomes
+ possible.
+ WARNING: this change introduces a deviation from the original API.
+- The .setComponent*() methods of SetOf/SequenceOf types changed not
+ to allow uninitialized "holes" inside the sequences of their components.
+ They now behave similarly to Python lists.
+ WARNING: this change introduces a deviation from the original API.
+- Default and optional components en/decoding of Constructed type
+ refactored towards better efficiency and more control.
+- OctetsString and Any decoder optimized to avoid creating ASN.1
+ objects for chunks of substrate. Instead they now join substrate
+ chunks together and create ASN.1 object from it just once.
+- The GeneralizedTime and UTCTime types now support to/from Python
+ datetime object conversion.
+- Unit tests added for the `compat` sub-package.
+- Fixed BitString named bits initialization bug.
+- Fixed non-functional tag cache (when running Python 2) at DER decoder.
+- Fixed chunked encoding restriction on DER encoder.
+- Fixed SET components ordering at DER encoder.
+- Fixed BIT STRING & OCTET STRING encoding to be always non-chunked (e.g.
+ primitive) at DER encoder
+- Fixed `compat.integer.from_bytes()` behaviour on empty input.
+
+Revision 0.2.3, released 25-02-2017
+-----------------------------------
+
+- Improved SEQUENCE/SET/CHOICE decoding performance by maintaining a single shared
+ NamedType object for all instances of SEQUENCE/SET object.
+- Improved INTEGER encoding/decoding by switching to Python's built-in
+ integer serialisation functions.
+- Improved BitString performance by rebasing it onto Python int type and leveraging
+ fast Integer serialisation functions.
+- BitString type usability improved in many ways: for example bitshifting and
+ numeric operation on BitString is now possible.
+- Minor ObjectIdentifier type performance optimization.
+- ASN.1 character types refactored to keep unicode contents internally
+ (rather than serialised octet stream) and duck-type it directly.
+- ASN.1 OctetString initialized from a Python object performs bytes()
+ on it when running on Python 3 (used to do str() which is probably
+ less logical).
+- Missing support for NoValue.__sizeof__ added.
+- Added checks to make sure SEQUENCE/SET components being assigned
+ match the prototypes.
+- Setter methods for constructed types consistently accept matchTags
+ and matchConstraints flags to control the strictness of inner
+ components compatibility verification. Previously, these checks
+ were tied to verifyConstraints flag, now they are all independent.
+- General documentation improvements here and there.
+- Fix to __reversed__() magic to make it returning an iterator.
+- Test suite simplified and unified.
+- The __all__ variable added to most of the Python modules.
+- The "test" directory renamed into "tests" not to collide with
+ the "test" module.
+
+Revision 0.2.2, released 07-02-2017
+-----------------------------------
+
+- FIX TO A SECURITY WEAKNESS: define length only decoders could have successfully
+ processed indefinite length serialisation.
+- FIX TO A SECURITY WEAKNESS: canonical decoders (CER/DER) may have successfully
+ consumed non-canonical variations of (otherwise valid) serialisation.
+- Broken Enumerated subtyping fixed.
+
+Revision 0.2.1, released 05-02-2017
+-----------------------------------
+
+- FIX TO A SECURITY WEAKNESS: BER decoder improperly cached long tags.
+- New "native" codec implemented to transform pyasn1 types to Python built-in types and back.
+- Switched to new-style classes.
+- Sphinx documentation added.
+- BitString improvements:
+
+ * simple string of binary digits is now supported as initializer
+ * default str() yields string of binary digits (used to yield str(tuple())
+ * binValue and hexValue initializers added
+ * .asNumbers(), .asOctets() and asInteger() representation added
+
+- Components of constructed ASN.1 types can now be populated with
+ uninitialized ASN.1 objects by assigning either noValue sentinel or
+ setupComponent() function return in addition to now-legacy None sentinel.
+ This should improve code readability.
+- NoValue class improved to become a singleton and catch more kinds
+ of access to it.
+- Compatibility wrappers str2octs() and oct2strs() fixed to run over
+ iso-8859-1 encoding.
+- Integer changed to emit Real instance if division produces a float.
+- True division operation now supported by Integer type.
+- The __contains__(), __reverse__() methods implemented for container types
+- Iterator protocol support implemented for all container types.
+ Warning, warning, warning: this change may potentially affect backward
+ compatibility when:
+
+ * user class overrides __getitem__() without overriding __iter__()
+ * when user code iterates over SEQUENCE object to get its components (now keys will be yielded)
+
+- Almost complete Python list and dict protocols added to SequenceOf/SetOf and
+ Sequence/Set respectively
+- Fix to divmod operation implementation in Integer type.
+- Fix to IntegerDecoder's precomputed value map on Python 3.
+- Fix to base ASN.1 types to run in "unicode_literals" mode.
+- Fix to composite constraints "+" operands ordering (AbstractConstraintSet.__radd__)
+- Fix to constraints merge in .subtype() -- on merge existing constraints are
+ expanded to accommodate new constraints, not the other way round. When existing
+ constraints are wrapped in ConstraintsIntersection composite, additional
+ constraints being added on subtyping effectively further narrow the set of
+ allowed values, which aligns well with the notion of subtyping.
+- Fix to NamedTypes methods to handle .getTagMap() returning None
+- Fix to Set/Sequence.setDefaultComponents() to return self
+- Copyright notice added to non-trivial source code files.
+- Author's email changed, copyright extended to 2017
+
+Revision 0.1.9, released 28-09-2015
+-----------------------------------
+
+- Wheel distribution format now supported.
+- Extensions added to text files, CVS attic flushed.
+- Fix to make uninitialized pyasn1 objects failing properly on hash().
+- Fix to ObjectIdentifier initialization from unicode string.
+- Fix to CER/DER Boolean decoder - fail on non single-octet payload.
+
+Revision 0.1.8, released 22-06-2015
+-----------------------------------
+
+- ObjectIdentifier codec fixed to work properly with arc 0 and arc 2 values.
+- Explicit limit on ObjectIdentifier arc value size removed.
+- Unicode initializer support added to OctetString type and derivatives.
+- New prettyPrintType() abstract method implemented to base pyasn1 types
+ to facilitate encoding errors analysis.
+- The __str__() method implemented to Tag, TagSet and TagMap classes to
+ ease encoding errors troubleshooting.
+ easing encoding errors
+- Fix to SEQUENCE and SET types to give them their private componentTypes
+ collection (which is a NamedTypes object) so that they won't collide in
+ a MT execution environment.
+- Missing T61String,ISO646String character types and ObjectDescriptor useful
+ type added.
+- Distribute is gone, switched to setuptools completely.
+- Missing NamedValues.__repr__() added.
+- The base.NoValue() class, that indicates uninitialized ASN.1 object,
+ made public.
+- The base.NoValue() class instances now support __repr__() what makes
+ possible to perform repr() on uninitialized pyasn1 types objects.
+- When comparing ASN.1 types, by-tag and/or by-constraints matching
+ can now be performed with the isSuperTypeOf()/isSameTypeWith() optional
+ flags.
+- Constructed types now verify their consistency by invoking
+ isSameTypeWith(matchTags=True, matchConstraints=False) and
+ isSuperTypeOf(matchTags=False, matchConstraints=True) for each of their
+ components rather than isSuperTypeOf() as it used to be. Constriants check
+ could be enforced to isSameTypeWith() with the strictConstraints=True
+ constructed classes attribute.
+- Constructed types can now be initialized with new .setComponents() method
+ which accepts both var-args and keyword-args. Default repr() modified to
+ reflect this change.
+- NamedTypes() and NamedValues() made comparable.
+- Test coverage extended to cover pyasn1 types __repr__() function.
+- The abs(Integer()) & abs(Real()) operation now returns respective pyasn1
+ type, not a Python type.
+- More Python magic methods implementations added to Integer & Real classes
+ (e.g. __pos__, __neg__, __round__, __floor__, __ceil__, __trunc__)
+- The Integer.__invert__ Python magic method implemented.
+- The OctetString.__int__() and .__float__() magic methods implemented.
+- Handle the case of null writer at Debug printer.
+- BitString encoder/decoder performance improved.
+- Built-in debugging is now based on Python logging module.
+- Fix to NamedType.__repr__() to work properly.
+- Fixes to __repr__() implementation of many built-in ASN.1 types to take into
+ account all of their initializers such as tagSet, subtypeSpec etc.
+- String typed float initializer to REAL type now supported.
+- Float typed mantissa initializer to REAL type for base 2 added.
+- Encoding bases 8 and 16 support for REAL type binary encoder added.
+- More strict CER/DER encoders added for GeneralizedTime and UTCTime types.
+- Asn1Item.hasValue() added to easily distinguish initalized ASN.1 objects
+ from uninitialized ones (e.g. pure types).
+- Fix to REAL type binary decoder to handle different bases and scale factor.
+- Fix to TagSet.repr() to include [obsolete] baseTag information.
+- Fix to broken REAL type decoding handling.
+- Fix to BitString and OctetString decoders dealing with constructed
+ encoding -- it used to be possible to embed other types in substrate.
+- DER codec hardened not to tolerate indefinite length encoding/decoding.
+- Fix to end-of-octest sentinel handling:
+
+ + require strict two-zeros sentinel encoding
+ + recognize EOO sentinel only when explicitly requested by caller
+ of the decoder via allowEoo=True parameter (warning: API change)
+
+Revision 0.1.7
+--------------
+
+- License updated to vanilla BSD 2-Clause to ease package use
+ (https://opensource.org/licenses/BSD-2-Clause).
+- Test suite made discoverable by unittest/unittest2 discovery feature.
+- Fix to decoder working on indefinite length substrate -- end-of-octets
+ marker is now detected by both tag and value. Otherwise zero values may
+ interfere with end-of-octets marker.
+- Fix to decoder to fail in cases where tagFormat indicates inappropriate
+ format for the type (e.g. BOOLEAN is always PRIMITIVE, SET is always
+ CONSTRUCTED and OCTET STRING is either of the two)
+- Fix to REAL type encoder to force primitive encoding form encoding.
+- Fix to CHOICE decoder to handle explicitly tagged, indefinite length
+ mode encoding
+- Fix to REAL type decoder to handle negative REAL values correctly. Test
+ case added.
+
+Revision 0.1.6
+--------------
+
+- The compact (valueless) way of encoding zero INTEGERs introduced in
+ 0.1.5 seems to fail miserably as the world is filled with broken
+ BER decoders. So we had to back off the *encoder* for a while.
+ There's still the IntegerEncoder.supportCompactZero flag which
+ enables compact encoding form whenever it evaluates to True.
+- Report package version on debugging code initialization.
+
+Revision 0.1.5
+--------------
+
+- Documentation updated and split into chapters to better match
+ web-site contents.
+- Make prettyPrint() working for non-initialized pyasn1 data objects. It
+ used to throw an exception.
+- Fix to encoder to produce empty-payload INTEGER values for zeros
+- Fix to decoder to support empty-payload INTEGER and REAL values
+- Fix to unit test suites imports to be able to run each from
+ their current directory
+
+Revision 0.1.4
+--------------
+
+- Built-in codec debugging facility added
+- Added some more checks to ObjectIdentifier BER encoder catching
+ posible 2^8 overflow condition by two leading sub-OIDs
+- Implementations overriding the AbstractDecoder.valueDecoder method
+ changed to return the rest of substrate behind the item being processed
+ rather than the unprocessed substrate within the item (which is usually
+ empty).
+- Decoder's recursiveFlag feature generalized as a user callback function
+ which is passed an uninitialized object recovered from substrate and
+ its uninterpreted payload.
+- Catch inappropriate substrate type passed to decoder.
+- Expose tagMap/typeMap/Decoder objects at DER decoder to uniform API.
+- Obsolete __init__.MajorVersionId replaced with __init__.__version__
+ which is now in-sync with distutils.
+- Package classifiers updated.
+- The __init__.py's made non-empty (rumors are that they may be optimized
+ out by package managers).
+- Bail out gracefully whenever Python version is older than 2.4.
+- Fix to Real codec exponent encoding (should be in 2's complement form),
+ some more test cases added.
+- Fix in Boolean truth testing built-in methods
+- Fix to substrate underrun error handling at ObjectIdentifier BER decoder
+- Fix to BER Boolean decoder that allows other pre-computed
+ values besides 0 and 1
+- Fix to leading 0x80 octet handling in DER/CER/DER ObjectIdentifier decoder.
+ See https://www.esat.kuleuven.be/cosic/publications/article-1432.pdf
+
+Revision 0.1.3
+--------------
+
+- Include class name into asn1 value constraint violation exception.
+- Fix to OctetString.prettyOut() method that looses leading zero when
+ building hex string.
+
+Revision 0.1.2
+--------------
+
+- Fix to __long__() to actually return longs on py2k
+- Fix to OctetString.__str__() workings of a non-initialized object.
+- Fix to quote initializer of OctetString.__repr__()
+- Minor fix towards ObjectIdentifier.prettyIn() reliability
+- ObjectIdentifier.__str__() is aliased to prettyPrint()
+- Exlicit repr() calls replaced with '%r'
+
+Revision 0.1.1
+--------------
+
+- Hex/bin string initializer to OctetString object reworked
+ (in a backward-incompatible manner)
+- Fixed float() infinity compatibility issue (affects 2.5 and earlier)
+- Fixed a bug/typo at Boolean CER encoder.
+- Major overhawl for Python 2.4 -- 3.2 compatibility:
+ + get rid of old-style types
+ + drop string module usage
+ + switch to rich comparation
+ + drop explicit long integer type use
+ + map()/filter() replaced with list comprehension
+ + apply() replaced with \*/\*\*args
+ + switched to use 'key' sort() callback function
+ + support both __nonzero__() and __bool__() methods
+ + modified not to use py3k-incompatible exception syntax
+ + getslice() operator fully replaced with getitem()
+ + dictionary operations made 2K/3K compatible
+ + base type for encoding substrate and OctetString-based types
+ is now 'bytes' when running py3k and 'str' otherwise
+ + OctetString and derivatives now unicode compliant.
+ + OctetString now supports two python-neutral getters: asOcts() & asInts()
+ + print OctetString content in hex whenever it is not printable otherwise
+ + in test suite, implicit relative import replaced with the absolute one
+ + in test suite, string constants replaced with numerics
+
+Revision 0.0.13
+---------------
+
+- Fix to base10 normalization function that loops on univ.Real(0)
+
+Revision 0.0.13b
+----------------
+
+- ASN.1 Real type is now supported properly.
+- Objects of Constructed types now support __setitem__()
+- Set/Sequence objects can now be addressed by their field names (string index)
+ and position (integer index).
+- Typo fix to ber.SetDecoder code that prevented with schema decoding
+ operation.
+- Fix to explicitly tagged items decoding support.
+- Fix to OctetString.prettyPrint() to better handle non-printable content.
+- Fix to repr() workings of Choice objects.
+
+Revision 0.0.13a
+----------------
+
+- Major codec re-design.
+- Documentation significantly improved.
+- ASN.1 Any type is now supported.
+- All example ASN.1 modules moved to separate pyasn1-modules package.
+- Fix to initial sub-OID overflow condition detection an encoder.
+- BitString initialization value verification improved.
+- The Set/Sequence.getNameByPosition() method implemented.
+- Fix to proper behaviour of PermittedAlphabetConstraint object.
+- Fix to improper Boolean substrate handling at CER/DER decoders.
+- Changes towards performance improvement:
+
+ + all dict.has_key() & dict.get() invocations replaced with modern syntax
+ (this breaks compatibility with Python 2.1 and older).
+ + tag and tagset caches introduced to decoder
+ + decoder code improved to prevent unnecessary pyasn1 objects creation
+ + allow disabling components verification when setting components to
+ structured types, this is used by decoder whilst running with schema
+ mode.
+ + BER decoder for integer values now looks up a small set of pre-computed
+ substrate values to save on decoding.
+ + a few pre-computed values configured to ObjectIdentifier BER encoder.
+ + ChoiceDecoder split-off SequenceOf one to save on unnecessary checks.
+ + replace slow hasattr()/getattr() calls with isinstance() introspection.
+ + track the number of initialized components of Constructed types to save
+ on default/optional components initialization.
+ + added a shortcut ObjectIdentifier.asTuple() to be used instead of
+ __getitem__() in hotspots.
+ + use Tag.asTuple() and pure integers at tag encoder.
+ + introduce and use in decoder the baseTagSet attribute of the built-in
+ ASN.1 types.
+
+Revision 0.0.12a
+----------------
+
+- The individual tag/length/value processing methods of
+ encoder.AbstractItemEncoder renamed (leading underscore stripped)
+ to promote overloading in cases where partial substrate processing
+ is required.
+- The ocsp.py, ldap.py example scripts added.
+- Fix to univ.ObjectIdentifier input value handler to disallow negative
+ sub-IDs.
+
+Revision 0.0.11a
+----------------
+
+- Decoder can now treat values of unknown types as opaque OctetString.
+- Fix to Set/SetOf type decoder to handle uninitialized scalar SetOf
+ components correctly.
+
+Revision 0.0.10a
+----------------
+
+- API versioning mechanics retired (pyasn1.v1 -> pyasn1) what makes
+ it possible to zip-import pyasn1 sources (used by egg and py2exe).
+
+Revision 0.0.9a
+---------------
+
+- Allow any non-zero values in Boolean type BER decoder, as it's in
+ accordnance with the standard.
+
+Revision 0.0.8a
+---------------
+
+- Integer.__index__() now supported (for Python 2.5+).
+- Fix to empty value encoding in BitString encoder, test case added.
+- Fix to SequenceOf decoder that prevents it skipping possible Choice
+ typed inner component.
+- Choice.getName() method added for getting currently set component
+ name.
+- OctetsString.prettyPrint() does a single str() against its value
+ eliminating an extra quotes.
+
+Revision 0.0.7a
+---------------
+
+- Large tags (>31) now supported by codecs.
+- Fix to encoder to properly handle explicitly tagged untagged items.
+- All possible value lengths (up to 256^126) now supported by encoders.
+- Fix to Tag class constructor to prevent negative IDs.
+
+Revision 0.0.6a
+---------------
+
+- Make use of setuptools.
+- Constraints derivation verification (isSuperTypeOf()/isSubTypeOf()) fixed.
+- Fix to constraints comparation logic -- can't cmp() hash values as it
+ may cause false positives due to hash conflicts.
+
+Revision 0.0.5a
+---------------
+
+- Integer BER codec reworked fixing negative values encoding bug.
+- clone() and subtype() methods of Constructed ASN.1 classes now
+ accept optional cloneValueFlag flag which controls original value
+ inheritance. The default is *not* to inherit original value for
+ performance reasons (this may affect backward compatibility).
+ Performance penalty may be huge on deeply nested Constructed objects
+ re-creation.
+- Base ASN.1 types (pyasn1.type.univ.*) do not have default values
+ anymore. They remain uninitialized acting as ASN.1 types. In
+ this model, initialized ASN.1 types represent either types with
+ default value installed or a type instance.
+- Decoders' prototypes are now class instances rather than classes.
+ This is to simplify initial value installation to decoder's
+ prototype value.
+- Bugfix to BitString BER decoder (trailing bits not regarded).
+- Bugfix to Constraints use as mapping keys.
+- Bugfix to Integer & BitString clone() methods
+- Bugix to the way to distinguish Set from SetOf at CER/DER SetOfEncoder
+- Adjustments to make it running on Python 1.5.
+- In tests, substrate constants converted from hex escaped literals into
+ octals to overcome indefinite hex width issue occuring in young Python.
+- Minor performance optimization of TagSet.isSuperTagSetOf() method
+- examples/sshkey.py added
+
+Revision 0.0.4a
+---------------
+
+* Asn1Type.prettyPrinter() -> \*.prettyPrint()
+
+Revision 0.0.3a
+---------------
+
+* Simple ASN1 objects now hash to their Python value and don't
+ depend upon tag/constraints/etc.
+* prettyIn & prettyOut methods of SimplleAsn1Object become public
+* many syntax fixes
+
+Revision 0.0.2a
+---------------
+
+* ConstraintsIntersection.isSuperTypeOf() and
+ ConstraintsIntersection.hasConstraint() implemented
+* Bugfix to NamedValues initialization code
+* +/- operators added to NamedValues objects
+* Integer.__abs__() & Integer.subtype() added
+* ObjectIdentifier.prettyOut() fixes
+* Allow subclass components at SequenceAndSetBase
+* AbstractConstraint.__cmp__() dropped
+* error.Asn1Error replaced with error.PyAsn1Error
+
+Revision 0.0.1a
+---------------
+
+* Initial public alpha release
diff --git a/third_party/python/pyasn1/LICENSE.rst b/third_party/python/pyasn1/LICENSE.rst
new file mode 100644
index 0000000000..ac630e821c
--- /dev/null
+++ b/third_party/python/pyasn1/LICENSE.rst
@@ -0,0 +1,24 @@
+Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
diff --git a/third_party/python/pyasn1/MANIFEST.in b/third_party/python/pyasn1/MANIFEST.in
new file mode 100644
index 0000000000..c605b0eef0
--- /dev/null
+++ b/third_party/python/pyasn1/MANIFEST.in
@@ -0,0 +1,5 @@
+include *.rst *.md
+recursive-include tests *.py
+recursive-include docs Makefile *.rst *.svg conf.py
+prune docs/build
+prune docs/source/.templates
diff --git a/third_party/python/pyasn1/PKG-INFO b/third_party/python/pyasn1/PKG-INFO
new file mode 100644
index 0000000000..45d958b171
--- /dev/null
+++ b/third_party/python/pyasn1/PKG-INFO
@@ -0,0 +1,35 @@
+Metadata-Version: 1.2
+Name: pyasn1
+Version: 0.4.8
+Summary: ASN.1 types and codecs
+Home-page: https://github.com/etingof/pyasn1
+Author: Ilya Etingof
+Author-email: etingof@gmail.com
+Maintainer: Ilya Etingof <etingof@gmail.com>
+License: BSD
+Description: Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Education
+Classifier: Intended Audience :: Information Technology
+Classifier: Intended Audience :: System Administrators
+Classifier: Intended Audience :: Telecommunications Industry
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.4
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Topic :: Communications
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff --git a/third_party/python/pyasn1/README.md b/third_party/python/pyasn1/README.md
new file mode 100644
index 0000000000..e36324b0de
--- /dev/null
+++ b/third_party/python/pyasn1/README.md
@@ -0,0 +1,184 @@
+
+ASN.1 library for Python
+------------------------
+[![PyPI](https://img.shields.io/pypi/v/pyasn1.svg?maxAge=2592000)](https://pypi.org/project/pyasn1)
+[![Python Versions](https://img.shields.io/pypi/pyversions/pyasn1.svg)](https://pypi.org/project/pyasn1/)
+[![Build status](https://travis-ci.org/etingof/pyasn1.svg?branch=master)](https://secure.travis-ci.org/etingof/pyasn1)
+[![Coverage Status](https://img.shields.io/codecov/c/github/etingof/pyasn1.svg)](https://codecov.io/github/etingof/pyasn1)
+[![GitHub license](https://img.shields.io/badge/license-BSD-blue.svg)](https://raw.githubusercontent.com/etingof/pyasn1/master/LICENSE.txt)
+
+This is a free and open source implementation of ASN.1 types and codecs
+as a Python package. It has been first written to support particular
+protocol (SNMP) but then generalized to be suitable for a wide range
+of protocols based on
+[ASN.1 specification](https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-X.208-198811-W!!PDF-E&type=items).
+
+Features
+--------
+
+* Generic implementation of ASN.1 types (X.208)
+* Standards compliant BER/CER/DER codecs
+* Dumps/loads ASN.1 structures from Python types
+* 100% Python, works with Python 2.4 up to Python 3.7
+* MT-safe
+* Contributed ASN.1 compiler [Asn1ate](https://github.com/kimgr/asn1ate)
+
+Why using pyasn1
+----------------
+
+ASN.1 solves the data serialisation problem. This solution was
+designed long ago by the wise Ancients. Back then, they did not
+have the luxury of wasting bits. That is why ASN.1 is designed
+to serialise data structures of unbounded complexity into
+something compact and efficient when it comes to processing
+the data.
+
+That probably explains why many network protocols and file formats
+still rely on the 30+ years old technology. Including a number of
+high-profile Internet protocols and file formats.
+
+Quite a number of books cover the topic of ASN.1.
+[Communication between heterogeneous systems](http://www.oss.com/asn1/dubuisson.html)
+by Olivier Dubuisson is one of those high quality books freely
+available on the Internet.
+
+The pyasn1 package is designed to help Python programmers tackling
+network protocols and file formats at the comfort of their Python
+prompt. The tool struggles to capture all aspects of a rather
+complicated ASN.1 system and to represent it on the Python terms.
+
+How to use pyasn1
+-----------------
+
+With pyasn1 you can build Python objects from ASN.1 data structures.
+For example, the following ASN.1 data structure:
+
+```bash
+Record ::= SEQUENCE {
+ id INTEGER,
+ room [0] INTEGER OPTIONAL,
+ house [1] INTEGER DEFAULT 0
+}
+```
+
+Could be expressed in pyasn1 like this:
+
+```python
+class Record(Sequence):
+ componentType = NamedTypes(
+ NamedType('id', Integer()),
+ OptionalNamedType(
+ 'room', Integer().subtype(
+ implicitTag=Tag(tagClassContext, tagFormatSimple, 0)
+ )
+ ),
+ DefaultedNamedType(
+ 'house', Integer(0).subtype(
+ implicitTag=Tag(tagClassContext, tagFormatSimple, 1)
+ )
+ )
+ )
+```
+
+It is in the spirit of ASN.1 to take abstract data description
+and turn it into a programming language specific form.
+Once you have your ASN.1 data structure expressed in Python, you
+can use it along the lines of similar Python type (e.g. ASN.1
+`SET` is similar to Python `dict`, `SET OF` to `list`):
+
+```python
+>>> record = Record()
+>>> record['id'] = 123
+>>> record['room'] = 321
+>>> str(record)
+Record:
+ id=123
+ room=321
+>>>
+```
+
+Part of the power of ASN.1 comes from its serialisation features. You
+can serialise your data structure and send it over the network.
+
+```python
+>>> from pyasn1.codec.der.encoder import encode
+>>> substrate = encode(record)
+>>> hexdump(substrate)
+00000: 30 07 02 01 7B 80 02 01 41
+```
+
+Conversely, you can turn serialised ASN.1 content, as received from
+network or read from a file, into a Python object which you can
+introspect, modify, encode and send back.
+
+```python
+>>> from pyasn1.codec.der.decoder import decode
+>>> received_record, rest_of_substrate = decode(substrate, asn1Spec=Record())
+>>>
+>>> for field in received_record:
+>>> print('{} is {}'.format(field, received_record[field]))
+id is 123
+room is 321
+house is 0
+>>>
+>>> record == received_record
+True
+>>> received_record.update(room=123)
+>>> substrate = encode(received_record)
+>>> hexdump(substrate)
+00000: 30 06 02 01 7B 80 01 7B
+```
+
+The pyasn1 classes struggle to emulate their Python prototypes (e.g. int,
+list, dict etc.). But ASN.1 types exhibit more complicated behaviour.
+To make life easier for a Pythonista, they can turn their pyasn1
+classes into Python built-ins:
+
+```python
+>>> from pyasn1.codec.native.encoder import encode
+>>> encode(record)
+{'id': 123, 'room': 321, 'house': 0}
+```
+
+Or vice-versa -- you can initialize an ASN.1 structure from a tree of
+Python objects:
+
+```python
+>>> from pyasn1.codec.native.decoder import decode
+>>> record = decode({'id': 123, 'room': 321, 'house': 0}, asn1Spec=Record())
+>>> str(record)
+Record:
+ id=123
+ room=321
+>>>
+```
+
+With ASN.1 design, serialisation codecs are decoupled from data objects,
+so you could turn every single ASN.1 object into many different
+serialised forms. As of this moment, pyasn1 supports BER, DER, CER and
+Python built-ins codecs. The extremely compact PER encoding is expected
+to be introduced in the upcoming pyasn1 release.
+
+More information on pyasn1 APIs can be found in the
+[documentation](http://snmplabs.com/pyasn1/),
+compiled ASN.1 modules for different protocols and file formats
+could be found in the pyasn1-modules
+[repo](https://github.com/etingof/pyasn1-modules).
+
+How to get pyasn1
+-----------------
+
+The pyasn1 package is distributed under terms and conditions of 2-clause
+BSD [license](http://snmplabs.com/pyasn1/license.html). Source code is freely
+available as a GitHub [repo](https://github.com/etingof/pyasn1).
+
+You could `pip install pyasn1` or download it from [PyPI](https://pypi.org/project/pyasn1).
+
+If something does not work as expected,
+[open an issue](https://github.com/etingof/pyasn1/issues) at GitHub or
+post your question [on Stack Overflow](https://stackoverflow.com/questions/ask)
+or try browsing pyasn1
+[mailing list archives](https://sourceforge.net/p/pyasn1/mailman/pyasn1-users/).
+
+Copyright (c) 2005-2019, [Ilya Etingof](mailto:etingof@gmail.com).
+All rights reserved.
diff --git a/third_party/python/pyasn1/TODO.rst b/third_party/python/pyasn1/TODO.rst
new file mode 100644
index 0000000000..5c79ee7cdf
--- /dev/null
+++ b/third_party/python/pyasn1/TODO.rst
@@ -0,0 +1,92 @@
+
+Things to be done
+=================
+
+Big things to tackle, anyone interested is welcome to fork pyasn1, work on
+it and come up with a PR!
+
+New codecs
+----------
+
+* PER
+* OER
+* XER
+* LWER
+* JSON (alinged with existing experimental schemas)
+
+Lazy codecs
+-----------
+
+Implement a thin layer over base types to cache pieces
+of substrate being decoded till the very moment of ASN.1
+object access in the parse tree.
+
+Codecs generator interface
+--------------------------
+
+For indefinite length or chunked encoding mode, make codecs
+iterable producing/consuming substrate/objects.
+
+ASN.1 schema compiler
+---------------------
+
+Ideally, the compiler should parse modern schema files and be
+designed to emit code for arbitrary languages (including SQL).
+
+Base types
+----------
+
+Implement X.680 constructs, including information schema.
+
+Examples
+--------
+
+Add examples, including advanced/obscure use cases.
+
+Documentation
+-------------
+
+Document more API, add notes and example snippets.
+
+More fresh modules
+------------------
+
+Compile and ship more Pythonized ASN.1 modules for
+various ASN.1-based protocols (e.g. Kerberos etc).
+Refresh outdated modules in pyasn1-packages.
+
+Minor, housekeeping things
+--------------------------
+
+* more PEP8'ing at places
+* consider simplifying repr(), otherwise it tend to be too hard to grasp
+* Specialize ASN.1 character and useful types
+
+* ber.decoder:
+
+ * suspend codec on underrun error ?
+ * present subtypes ?
+ * component presence check wont work at innertypeconst
+ * type vs value, defaultValue
+
+* ber.encoder:
+
+ * Asn1Item.clone() / shallowcopy issue
+ * large length encoder?
+ * lookup type by tag first to allow custom codecs for non-base types
+
+* type.useful:
+
+ * may need to implement prettyIn/Out
+
+* type.char:
+
+ * may need to implement constraints
+
+* type.namedtypes
+
+ * type vs tagset name convention
+
+* how untagged TagSet should be initialized?
+
+* type and codecs for Real needs refactoring
diff --git a/third_party/python/pyasn1/pyasn1.egg-info/PKG-INFO b/third_party/python/pyasn1/pyasn1.egg-info/PKG-INFO
new file mode 100644
index 0000000000..45d958b171
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1.egg-info/PKG-INFO
@@ -0,0 +1,35 @@
+Metadata-Version: 1.2
+Name: pyasn1
+Version: 0.4.8
+Summary: ASN.1 types and codecs
+Home-page: https://github.com/etingof/pyasn1
+Author: Ilya Etingof
+Author-email: etingof@gmail.com
+Maintainer: Ilya Etingof <etingof@gmail.com>
+License: BSD
+Description: Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Education
+Classifier: Intended Audience :: Information Technology
+Classifier: Intended Audience :: System Administrators
+Classifier: Intended Audience :: Telecommunications Industry
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.4
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Topic :: Communications
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff --git a/third_party/python/pyasn1/pyasn1.egg-info/SOURCES.txt b/third_party/python/pyasn1/pyasn1.egg-info/SOURCES.txt
new file mode 100644
index 0000000000..4877900b83
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1.egg-info/SOURCES.txt
@@ -0,0 +1,161 @@
+CHANGES.rst
+LICENSE.rst
+MANIFEST.in
+README.md
+TODO.rst
+setup.cfg
+setup.py
+docs/Makefile
+docs/tutorial.rst
+docs/source/changelog.rst
+docs/source/conf.py
+docs/source/contents.rst
+docs/source/download.rst
+docs/source/example-use-case.rst
+docs/source/license.rst
+docs/source/.static/logo.svg
+docs/source/pyasn1/contents.rst
+docs/source/pyasn1/codec/ber/contents.rst
+docs/source/pyasn1/codec/cer/contents.rst
+docs/source/pyasn1/codec/der/contents.rst
+docs/source/pyasn1/codec/native/contents.rst
+docs/source/pyasn1/error/contents.rst
+docs/source/pyasn1/type/base/asn1type.rst
+docs/source/pyasn1/type/base/constructedasn1type.rst
+docs/source/pyasn1/type/base/contents.rst
+docs/source/pyasn1/type/base/novalue.rst
+docs/source/pyasn1/type/base/simpleasn1type.rst
+docs/source/pyasn1/type/char/bmpstring.rst
+docs/source/pyasn1/type/char/contents.rst
+docs/source/pyasn1/type/char/generalstring.rst
+docs/source/pyasn1/type/char/graphicstring.rst
+docs/source/pyasn1/type/char/ia5string.rst
+docs/source/pyasn1/type/char/iso646string.rst
+docs/source/pyasn1/type/char/numericstring.rst
+docs/source/pyasn1/type/char/printablestring.rst
+docs/source/pyasn1/type/char/t61string.rst
+docs/source/pyasn1/type/char/teletexstring.rst
+docs/source/pyasn1/type/char/universalstring.rst
+docs/source/pyasn1/type/char/utf8string.rst
+docs/source/pyasn1/type/char/videotexstring.rst
+docs/source/pyasn1/type/char/visiblestring.rst
+docs/source/pyasn1/type/constraint/constraintsexclusion.rst
+docs/source/pyasn1/type/constraint/constraintsintersection.rst
+docs/source/pyasn1/type/constraint/constraintsunion.rst
+docs/source/pyasn1/type/constraint/containedsubtype.rst
+docs/source/pyasn1/type/constraint/contents.rst
+docs/source/pyasn1/type/constraint/permittedalphabet.rst
+docs/source/pyasn1/type/constraint/singlevalue.rst
+docs/source/pyasn1/type/constraint/valuerange.rst
+docs/source/pyasn1/type/constraint/valuesize.rst
+docs/source/pyasn1/type/constraint/withcomponents.rst
+docs/source/pyasn1/type/namedtype/contents.rst
+docs/source/pyasn1/type/namedtype/defaultednamedtype.rst
+docs/source/pyasn1/type/namedtype/namedtype.rst
+docs/source/pyasn1/type/namedtype/namedtypes.rst
+docs/source/pyasn1/type/namedtype/optionalnamedtype.rst
+docs/source/pyasn1/type/namedval/contents.rst
+docs/source/pyasn1/type/namedval/namedval.rst
+docs/source/pyasn1/type/opentype/contents.rst
+docs/source/pyasn1/type/opentype/opentype.rst
+docs/source/pyasn1/type/tag/contents.rst
+docs/source/pyasn1/type/tag/tag.rst
+docs/source/pyasn1/type/tag/tagmap.rst
+docs/source/pyasn1/type/tag/tagset.rst
+docs/source/pyasn1/type/univ/any.rst
+docs/source/pyasn1/type/univ/bitstring.rst
+docs/source/pyasn1/type/univ/boolean.rst
+docs/source/pyasn1/type/univ/choice.rst
+docs/source/pyasn1/type/univ/contents.rst
+docs/source/pyasn1/type/univ/enumerated.rst
+docs/source/pyasn1/type/univ/integer.rst
+docs/source/pyasn1/type/univ/null.rst
+docs/source/pyasn1/type/univ/objectidentifier.rst
+docs/source/pyasn1/type/univ/octetstring.rst
+docs/source/pyasn1/type/univ/real.rst
+docs/source/pyasn1/type/univ/sequence.rst
+docs/source/pyasn1/type/univ/sequenceof.rst
+docs/source/pyasn1/type/univ/set.rst
+docs/source/pyasn1/type/univ/setof.rst
+docs/source/pyasn1/type/useful/contents.rst
+docs/source/pyasn1/type/useful/generalizedtime.rst
+docs/source/pyasn1/type/useful/objectdescriptor.rst
+docs/source/pyasn1/type/useful/utctime.rst
+pyasn1/__init__.py
+pyasn1/debug.py
+pyasn1/error.py
+pyasn1.egg-info/PKG-INFO
+pyasn1.egg-info/SOURCES.txt
+pyasn1.egg-info/dependency_links.txt
+pyasn1.egg-info/top_level.txt
+pyasn1.egg-info/zip-safe
+pyasn1/codec/__init__.py
+pyasn1/codec/ber/__init__.py
+pyasn1/codec/ber/decoder.py
+pyasn1/codec/ber/encoder.py
+pyasn1/codec/ber/eoo.py
+pyasn1/codec/cer/__init__.py
+pyasn1/codec/cer/decoder.py
+pyasn1/codec/cer/encoder.py
+pyasn1/codec/der/__init__.py
+pyasn1/codec/der/decoder.py
+pyasn1/codec/der/encoder.py
+pyasn1/codec/native/__init__.py
+pyasn1/codec/native/decoder.py
+pyasn1/codec/native/encoder.py
+pyasn1/compat/__init__.py
+pyasn1/compat/binary.py
+pyasn1/compat/calling.py
+pyasn1/compat/dateandtime.py
+pyasn1/compat/integer.py
+pyasn1/compat/octets.py
+pyasn1/compat/string.py
+pyasn1/type/__init__.py
+pyasn1/type/base.py
+pyasn1/type/char.py
+pyasn1/type/constraint.py
+pyasn1/type/error.py
+pyasn1/type/namedtype.py
+pyasn1/type/namedval.py
+pyasn1/type/opentype.py
+pyasn1/type/tag.py
+pyasn1/type/tagmap.py
+pyasn1/type/univ.py
+pyasn1/type/useful.py
+tests/__init__.py
+tests/__main__.py
+tests/base.py
+tests/test_debug.py
+tests/codec/__init__.py
+tests/codec/__main__.py
+tests/codec/ber/__init__.py
+tests/codec/ber/__main__.py
+tests/codec/ber/test_decoder.py
+tests/codec/ber/test_encoder.py
+tests/codec/cer/__init__.py
+tests/codec/cer/__main__.py
+tests/codec/cer/test_decoder.py
+tests/codec/cer/test_encoder.py
+tests/codec/der/__init__.py
+tests/codec/der/__main__.py
+tests/codec/der/test_decoder.py
+tests/codec/der/test_encoder.py
+tests/codec/native/__init__.py
+tests/codec/native/__main__.py
+tests/codec/native/test_decoder.py
+tests/codec/native/test_encoder.py
+tests/compat/__init__.py
+tests/compat/__main__.py
+tests/compat/test_binary.py
+tests/compat/test_integer.py
+tests/compat/test_octets.py
+tests/type/__init__.py
+tests/type/__main__.py
+tests/type/test_char.py
+tests/type/test_constraint.py
+tests/type/test_namedtype.py
+tests/type/test_namedval.py
+tests/type/test_opentype.py
+tests/type/test_tag.py
+tests/type/test_univ.py
+tests/type/test_useful.py \ No newline at end of file
diff --git a/third_party/python/pyasn1/pyasn1.egg-info/dependency_links.txt b/third_party/python/pyasn1/pyasn1.egg-info/dependency_links.txt
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/third_party/python/pyasn1/pyasn1.egg-info/top_level.txt b/third_party/python/pyasn1/pyasn1.egg-info/top_level.txt
new file mode 100644
index 0000000000..38fe414575
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1.egg-info/top_level.txt
@@ -0,0 +1 @@
+pyasn1
diff --git a/third_party/python/pyasn1/pyasn1.egg-info/zip-safe b/third_party/python/pyasn1/pyasn1.egg-info/zip-safe
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1.egg-info/zip-safe
@@ -0,0 +1 @@
+
diff --git a/third_party/python/pyasn1/pyasn1/__init__.py b/third_party/python/pyasn1/pyasn1/__init__.py
new file mode 100644
index 0000000000..5a56a707c8
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/__init__.py
@@ -0,0 +1,7 @@
+import sys
+
+# https://www.python.org/dev/peps/pep-0396/
+__version__ = '0.4.8'
+
+if sys.version_info[:2] < (2, 4):
+ raise RuntimeError('PyASN1 requires Python 2.4 or later')
diff --git a/third_party/python/pyasn1/pyasn1/codec/__init__.py b/third_party/python/pyasn1/pyasn1/codec/__init__.py
new file mode 100644
index 0000000000..8c3066b2e6
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/codec/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/third_party/python/pyasn1/pyasn1/codec/ber/__init__.py b/third_party/python/pyasn1/pyasn1/codec/ber/__init__.py
new file mode 100644
index 0000000000..8c3066b2e6
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/codec/ber/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/third_party/python/pyasn1/pyasn1/codec/ber/decoder.py b/third_party/python/pyasn1/pyasn1/codec/ber/decoder.py
new file mode 100644
index 0000000000..5ff485fbeb
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/codec/ber/decoder.py
@@ -0,0 +1,1682 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+from pyasn1 import debug
+from pyasn1 import error
+from pyasn1.codec.ber import eoo
+from pyasn1.compat.integer import from_bytes
+from pyasn1.compat.octets import oct2int, octs2ints, ints2octs, null
+from pyasn1.type import base
+from pyasn1.type import char
+from pyasn1.type import tag
+from pyasn1.type import tagmap
+from pyasn1.type import univ
+from pyasn1.type import useful
+
+__all__ = ['decode']
+
+LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_DECODER)
+
+noValue = base.noValue
+
+
+class AbstractDecoder(object):
+ protoComponent = None
+
+ def valueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+ raise error.PyAsn1Error('Decoder not implemented for %s' % (tagSet,))
+
+ def indefLenValueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+ raise error.PyAsn1Error('Indefinite length mode decoder not implemented for %s' % (tagSet,))
+
+
+class AbstractSimpleDecoder(AbstractDecoder):
+ @staticmethod
+ def substrateCollector(asn1Object, substrate, length):
+ return substrate[:length], substrate[length:]
+
+ def _createComponent(self, asn1Spec, tagSet, value, **options):
+ if options.get('native'):
+ return value
+ elif asn1Spec is None:
+ return self.protoComponent.clone(value, tagSet=tagSet)
+ elif value is noValue:
+ return asn1Spec
+ else:
+ return asn1Spec.clone(value)
+
+
+class ExplicitTagDecoder(AbstractSimpleDecoder):
+ protoComponent = univ.Any('')
+
+ def valueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+ if substrateFun:
+ return substrateFun(
+ self._createComponent(asn1Spec, tagSet, '', **options),
+ substrate, length
+ )
+
+ head, tail = substrate[:length], substrate[length:]
+
+ value, _ = decodeFun(head, asn1Spec, tagSet, length, **options)
+
+ if LOG:
+ LOG('explicit tag container carries %d octets of trailing payload '
+ '(will be lost!): %s' % (len(_), debug.hexdump(_)))
+
+ return value, tail
+
+ def indefLenValueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+ if substrateFun:
+ return substrateFun(
+ self._createComponent(asn1Spec, tagSet, '', **options),
+ substrate, length
+ )
+
+ value, substrate = decodeFun(substrate, asn1Spec, tagSet, length, **options)
+
+ eooMarker, substrate = decodeFun(substrate, allowEoo=True, **options)
+
+ if eooMarker is eoo.endOfOctets:
+ return value, substrate
+ else:
+ raise error.PyAsn1Error('Missing end-of-octets terminator')
+
+
+explicitTagDecoder = ExplicitTagDecoder()
+
+
+class IntegerDecoder(AbstractSimpleDecoder):
+ protoComponent = univ.Integer(0)
+
+ def valueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+
+ if tagSet[0].tagFormat != tag.tagFormatSimple:
+ raise error.PyAsn1Error('Simple tag format expected')
+
+ head, tail = substrate[:length], substrate[length:]
+
+ if not head:
+ return self._createComponent(asn1Spec, tagSet, 0, **options), tail
+
+ value = from_bytes(head, signed=True)
+
+ return self._createComponent(asn1Spec, tagSet, value, **options), tail
+
+
+class BooleanDecoder(IntegerDecoder):
+ protoComponent = univ.Boolean(0)
+
+ def _createComponent(self, asn1Spec, tagSet, value, **options):
+ return IntegerDecoder._createComponent(
+ self, asn1Spec, tagSet, value and 1 or 0, **options)
+
+
+class BitStringDecoder(AbstractSimpleDecoder):
+ protoComponent = univ.BitString(())
+ supportConstructedForm = True
+
+ def valueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+ head, tail = substrate[:length], substrate[length:]
+
+ if substrateFun:
+ return substrateFun(self._createComponent(
+ asn1Spec, tagSet, noValue, **options), substrate, length)
+
+ if not head:
+ raise error.PyAsn1Error('Empty BIT STRING substrate')
+
+ if tagSet[0].tagFormat == tag.tagFormatSimple: # XXX what tag to check?
+
+ trailingBits = oct2int(head[0])
+ if trailingBits > 7:
+ raise error.PyAsn1Error(
+ 'Trailing bits overflow %s' % trailingBits
+ )
+
+ value = self.protoComponent.fromOctetString(
+ head[1:], internalFormat=True, padding=trailingBits)
+
+ return self._createComponent(asn1Spec, tagSet, value, **options), tail
+
+ if not self.supportConstructedForm:
+ raise error.PyAsn1Error('Constructed encoding form prohibited '
+ 'at %s' % self.__class__.__name__)
+
+ if LOG:
+ LOG('assembling constructed serialization')
+
+ # All inner fragments are of the same type, treat them as octet string
+ substrateFun = self.substrateCollector
+
+ bitString = self.protoComponent.fromOctetString(null, internalFormat=True)
+
+ while head:
+ component, head = decodeFun(head, self.protoComponent,
+ substrateFun=substrateFun, **options)
+
+ trailingBits = oct2int(component[0])
+ if trailingBits > 7:
+ raise error.PyAsn1Error(
+ 'Trailing bits overflow %s' % trailingBits
+ )
+
+ bitString = self.protoComponent.fromOctetString(
+ component[1:], internalFormat=True,
+ prepend=bitString, padding=trailingBits
+ )
+
+ return self._createComponent(asn1Spec, tagSet, bitString, **options), tail
+
+ def indefLenValueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+
+ if substrateFun:
+ return substrateFun(self._createComponent(asn1Spec, tagSet, noValue, **options), substrate, length)
+
+ # All inner fragments are of the same type, treat them as octet string
+ substrateFun = self.substrateCollector
+
+ bitString = self.protoComponent.fromOctetString(null, internalFormat=True)
+
+ while substrate:
+ component, substrate = decodeFun(substrate, self.protoComponent,
+ substrateFun=substrateFun,
+ allowEoo=True, **options)
+ if component is eoo.endOfOctets:
+ break
+
+ trailingBits = oct2int(component[0])
+ if trailingBits > 7:
+ raise error.PyAsn1Error(
+ 'Trailing bits overflow %s' % trailingBits
+ )
+
+ bitString = self.protoComponent.fromOctetString(
+ component[1:], internalFormat=True,
+ prepend=bitString, padding=trailingBits
+ )
+
+ else:
+ raise error.SubstrateUnderrunError('No EOO seen before substrate ends')
+
+ return self._createComponent(asn1Spec, tagSet, bitString, **options), substrate
+
+
+class OctetStringDecoder(AbstractSimpleDecoder):
+ protoComponent = univ.OctetString('')
+ supportConstructedForm = True
+
+ def valueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+ head, tail = substrate[:length], substrate[length:]
+
+ if substrateFun:
+ return substrateFun(self._createComponent(asn1Spec, tagSet, noValue, **options),
+ substrate, length)
+
+ if tagSet[0].tagFormat == tag.tagFormatSimple: # XXX what tag to check?
+ return self._createComponent(asn1Spec, tagSet, head, **options), tail
+
+ if not self.supportConstructedForm:
+ raise error.PyAsn1Error('Constructed encoding form prohibited at %s' % self.__class__.__name__)
+
+ if LOG:
+ LOG('assembling constructed serialization')
+
+ # All inner fragments are of the same type, treat them as octet string
+ substrateFun = self.substrateCollector
+
+ header = null
+
+ while head:
+ component, head = decodeFun(head, self.protoComponent,
+ substrateFun=substrateFun,
+ **options)
+ header += component
+
+ return self._createComponent(asn1Spec, tagSet, header, **options), tail
+
+ def indefLenValueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+ if substrateFun and substrateFun is not self.substrateCollector:
+ asn1Object = self._createComponent(asn1Spec, tagSet, noValue, **options)
+ return substrateFun(asn1Object, substrate, length)
+
+ # All inner fragments are of the same type, treat them as octet string
+ substrateFun = self.substrateCollector
+
+ header = null
+
+ while substrate:
+ component, substrate = decodeFun(substrate,
+ self.protoComponent,
+ substrateFun=substrateFun,
+ allowEoo=True, **options)
+ if component is eoo.endOfOctets:
+ break
+
+ header += component
+
+ else:
+ raise error.SubstrateUnderrunError(
+ 'No EOO seen before substrate ends'
+ )
+
+ return self._createComponent(asn1Spec, tagSet, header, **options), substrate
+
+
+class NullDecoder(AbstractSimpleDecoder):
+ protoComponent = univ.Null('')
+
+ def valueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+
+ if tagSet[0].tagFormat != tag.tagFormatSimple:
+ raise error.PyAsn1Error('Simple tag format expected')
+
+ head, tail = substrate[:length], substrate[length:]
+
+ component = self._createComponent(asn1Spec, tagSet, '', **options)
+
+ if head:
+ raise error.PyAsn1Error('Unexpected %d-octet substrate for Null' % length)
+
+ return component, tail
+
+
+class ObjectIdentifierDecoder(AbstractSimpleDecoder):
+ protoComponent = univ.ObjectIdentifier(())
+
+ def valueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+ if tagSet[0].tagFormat != tag.tagFormatSimple:
+ raise error.PyAsn1Error('Simple tag format expected')
+
+ head, tail = substrate[:length], substrate[length:]
+ if not head:
+ raise error.PyAsn1Error('Empty substrate')
+
+ head = octs2ints(head)
+
+ oid = ()
+ index = 0
+ substrateLen = len(head)
+ while index < substrateLen:
+ subId = head[index]
+ index += 1
+ if subId < 128:
+ oid += (subId,)
+ elif subId > 128:
+ # Construct subid from a number of octets
+ nextSubId = subId
+ subId = 0
+ while nextSubId >= 128:
+ subId = (subId << 7) + (nextSubId & 0x7F)
+ if index >= substrateLen:
+ raise error.SubstrateUnderrunError(
+ 'Short substrate for sub-OID past %s' % (oid,)
+ )
+ nextSubId = head[index]
+ index += 1
+ oid += ((subId << 7) + nextSubId,)
+ elif subId == 128:
+ # ASN.1 spec forbids leading zeros (0x80) in OID
+ # encoding, tolerating it opens a vulnerability. See
+ # https://www.esat.kuleuven.be/cosic/publications/article-1432.pdf
+ # page 7
+ raise error.PyAsn1Error('Invalid octet 0x80 in OID encoding')
+
+ # Decode two leading arcs
+ if 0 <= oid[0] <= 39:
+ oid = (0,) + oid
+ elif 40 <= oid[0] <= 79:
+ oid = (1, oid[0] - 40) + oid[1:]
+ elif oid[0] >= 80:
+ oid = (2, oid[0] - 80) + oid[1:]
+ else:
+ raise error.PyAsn1Error('Malformed first OID octet: %s' % head[0])
+
+ return self._createComponent(asn1Spec, tagSet, oid, **options), tail
+
+
+class RealDecoder(AbstractSimpleDecoder):
+ protoComponent = univ.Real()
+
+ def valueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+ if tagSet[0].tagFormat != tag.tagFormatSimple:
+ raise error.PyAsn1Error('Simple tag format expected')
+
+ head, tail = substrate[:length], substrate[length:]
+
+ if not head:
+ return self._createComponent(asn1Spec, tagSet, 0.0, **options), tail
+
+ fo = oct2int(head[0])
+ head = head[1:]
+ if fo & 0x80: # binary encoding
+ if not head:
+ raise error.PyAsn1Error("Incomplete floating-point value")
+
+ if LOG:
+ LOG('decoding binary encoded REAL')
+
+ n = (fo & 0x03) + 1
+
+ if n == 4:
+ n = oct2int(head[0])
+ head = head[1:]
+
+ eo, head = head[:n], head[n:]
+
+ if not eo or not head:
+ raise error.PyAsn1Error('Real exponent screwed')
+
+ e = oct2int(eo[0]) & 0x80 and -1 or 0
+
+ while eo: # exponent
+ e <<= 8
+ e |= oct2int(eo[0])
+ eo = eo[1:]
+
+ b = fo >> 4 & 0x03 # base bits
+
+ if b > 2:
+ raise error.PyAsn1Error('Illegal Real base')
+
+ if b == 1: # encbase = 8
+ e *= 3
+
+ elif b == 2: # encbase = 16
+ e *= 4
+ p = 0
+
+ while head: # value
+ p <<= 8
+ p |= oct2int(head[0])
+ head = head[1:]
+
+ if fo & 0x40: # sign bit
+ p = -p
+
+ sf = fo >> 2 & 0x03 # scale bits
+ p *= 2 ** sf
+ value = (p, 2, e)
+
+ elif fo & 0x40: # infinite value
+ if LOG:
+ LOG('decoding infinite REAL')
+
+ value = fo & 0x01 and '-inf' or 'inf'
+
+ elif fo & 0xc0 == 0: # character encoding
+ if not head:
+ raise error.PyAsn1Error("Incomplete floating-point value")
+
+ if LOG:
+ LOG('decoding character encoded REAL')
+
+ try:
+ if fo & 0x3 == 0x1: # NR1
+ value = (int(head), 10, 0)
+
+ elif fo & 0x3 == 0x2: # NR2
+ value = float(head)
+
+ elif fo & 0x3 == 0x3: # NR3
+ value = float(head)
+
+ else:
+ raise error.SubstrateUnderrunError(
+ 'Unknown NR (tag %s)' % fo
+ )
+
+ except ValueError:
+ raise error.SubstrateUnderrunError(
+ 'Bad character Real syntax'
+ )
+
+ else:
+ raise error.SubstrateUnderrunError(
+ 'Unknown encoding (tag %s)' % fo
+ )
+
+ return self._createComponent(asn1Spec, tagSet, value, **options), tail
+
+
+class AbstractConstructedDecoder(AbstractDecoder):
+ protoComponent = None
+
+
+class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
+ protoRecordComponent = None
+ protoSequenceComponent = None
+
+ def _getComponentTagMap(self, asn1Object, idx):
+ raise NotImplementedError()
+
+ def _getComponentPositionByType(self, asn1Object, tagSet, idx):
+ raise NotImplementedError()
+
+ def _decodeComponents(self, substrate, tagSet=None, decodeFun=None, **options):
+ components = []
+ componentTypes = set()
+
+ while substrate:
+ component, substrate = decodeFun(substrate, **options)
+ if component is eoo.endOfOctets:
+ break
+
+ components.append(component)
+ componentTypes.add(component.tagSet)
+
+ # Now we have to guess is it SEQUENCE/SET or SEQUENCE OF/SET OF
+ # The heuristics is:
+ # * 1+ components of different types -> likely SEQUENCE/SET
+ # * otherwise -> likely SEQUENCE OF/SET OF
+ if len(componentTypes) > 1:
+ protoComponent = self.protoRecordComponent
+
+ else:
+ protoComponent = self.protoSequenceComponent
+
+ asn1Object = protoComponent.clone(
+ # construct tagSet from base tag from prototype ASN.1 object
+ # and additional tags recovered from the substrate
+ tagSet=tag.TagSet(protoComponent.tagSet.baseTag, *tagSet.superTags)
+ )
+
+ if LOG:
+ LOG('guessed %r container type (pass `asn1Spec` to guide the '
+ 'decoder)' % asn1Object)
+
+ for idx, component in enumerate(components):
+ asn1Object.setComponentByPosition(
+ idx, component,
+ verifyConstraints=False,
+ matchTags=False, matchConstraints=False
+ )
+
+ return asn1Object, substrate
+
+ def valueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+ if tagSet[0].tagFormat != tag.tagFormatConstructed:
+ raise error.PyAsn1Error('Constructed tag format expected')
+
+ head, tail = substrate[:length], substrate[length:]
+
+ if substrateFun is not None:
+ if asn1Spec is not None:
+ asn1Object = asn1Spec.clone()
+
+ elif self.protoComponent is not None:
+ asn1Object = self.protoComponent.clone(tagSet=tagSet)
+
+ else:
+ asn1Object = self.protoRecordComponent, self.protoSequenceComponent
+
+ return substrateFun(asn1Object, substrate, length)
+
+ if asn1Spec is None:
+ asn1Object, trailing = self._decodeComponents(
+ head, tagSet=tagSet, decodeFun=decodeFun, **options
+ )
+
+ if trailing:
+ if LOG:
+ LOG('Unused trailing %d octets encountered: %s' % (
+ len(trailing), debug.hexdump(trailing)))
+
+ return asn1Object, tail
+
+ asn1Object = asn1Spec.clone()
+ asn1Object.clear()
+
+ if asn1Spec.typeId in (univ.Sequence.typeId, univ.Set.typeId):
+
+ namedTypes = asn1Spec.componentType
+
+ isSetType = asn1Spec.typeId == univ.Set.typeId
+ isDeterministic = not isSetType and not namedTypes.hasOptionalOrDefault
+
+ if LOG:
+ LOG('decoding %sdeterministic %s type %r chosen by type ID' % (
+ not isDeterministic and 'non-' or '', isSetType and 'SET' or '',
+ asn1Spec))
+
+ seenIndices = set()
+ idx = 0
+ while head:
+ if not namedTypes:
+ componentType = None
+
+ elif isSetType:
+ componentType = namedTypes.tagMapUnique
+
+ else:
+ try:
+ if isDeterministic:
+ componentType = namedTypes[idx].asn1Object
+
+ elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted:
+ componentType = namedTypes.getTagMapNearPosition(idx)
+
+ else:
+ componentType = namedTypes[idx].asn1Object
+
+ except IndexError:
+ raise error.PyAsn1Error(
+ 'Excessive components decoded at %r' % (asn1Spec,)
+ )
+
+ component, head = decodeFun(head, componentType, **options)
+
+ if not isDeterministic and namedTypes:
+ if isSetType:
+ idx = namedTypes.getPositionByType(component.effectiveTagSet)
+
+ elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted:
+ idx = namedTypes.getPositionNearType(component.effectiveTagSet, idx)
+
+ asn1Object.setComponentByPosition(
+ idx, component,
+ verifyConstraints=False,
+ matchTags=False, matchConstraints=False
+ )
+
+ seenIndices.add(idx)
+ idx += 1
+
+ if LOG:
+ LOG('seen component indices %s' % seenIndices)
+
+ if namedTypes:
+ if not namedTypes.requiredComponents.issubset(seenIndices):
+ raise error.PyAsn1Error(
+ 'ASN.1 object %s has uninitialized '
+ 'components' % asn1Object.__class__.__name__)
+
+ if namedTypes.hasOpenTypes:
+
+ openTypes = options.get('openTypes', {})
+
+ if LOG:
+ LOG('user-specified open types map:')
+
+ for k, v in openTypes.items():
+ LOG('%s -> %r' % (k, v))
+
+ if openTypes or options.get('decodeOpenTypes', False):
+
+ for idx, namedType in enumerate(namedTypes.namedTypes):
+ if not namedType.openType:
+ continue
+
+ if namedType.isOptional and not asn1Object.getComponentByPosition(idx).isValue:
+ continue
+
+ governingValue = asn1Object.getComponentByName(
+ namedType.openType.name
+ )
+
+ try:
+ openType = openTypes[governingValue]
+
+ except KeyError:
+
+ if LOG:
+ LOG('default open types map of component '
+ '"%s.%s" governed by component "%s.%s"'
+ ':' % (asn1Object.__class__.__name__,
+ namedType.name,
+ asn1Object.__class__.__name__,
+ namedType.openType.name))
+
+ for k, v in namedType.openType.items():
+ LOG('%s -> %r' % (k, v))
+
+ try:
+ openType = namedType.openType[governingValue]
+
+ except KeyError:
+ if LOG:
+ LOG('failed to resolve open type by governing '
+ 'value %r' % (governingValue,))
+ continue
+
+ if LOG:
+ LOG('resolved open type %r by governing '
+ 'value %r' % (openType, governingValue))
+
+ containerValue = asn1Object.getComponentByPosition(idx)
+
+ if containerValue.typeId in (
+ univ.SetOf.typeId, univ.SequenceOf.typeId):
+
+ for pos, containerElement in enumerate(
+ containerValue):
+
+ component, rest = decodeFun(
+ containerValue[pos].asOctets(),
+ asn1Spec=openType, **options
+ )
+
+ containerValue[pos] = component
+
+ else:
+ component, rest = decodeFun(
+ asn1Object.getComponentByPosition(idx).asOctets(),
+ asn1Spec=openType, **options
+ )
+
+ asn1Object.setComponentByPosition(idx, component)
+
+ else:
+ inconsistency = asn1Object.isInconsistent
+ if inconsistency:
+ raise inconsistency
+
+ else:
+ asn1Object = asn1Spec.clone()
+ asn1Object.clear()
+
+ componentType = asn1Spec.componentType
+
+ if LOG:
+ LOG('decoding type %r chosen by given `asn1Spec`' % componentType)
+
+ idx = 0
+
+ while head:
+ component, head = decodeFun(head, componentType, **options)
+ asn1Object.setComponentByPosition(
+ idx, component,
+ verifyConstraints=False,
+ matchTags=False, matchConstraints=False
+ )
+
+ idx += 1
+
+ return asn1Object, tail
+
+ def indefLenValueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+ if tagSet[0].tagFormat != tag.tagFormatConstructed:
+ raise error.PyAsn1Error('Constructed tag format expected')
+
+ if substrateFun is not None:
+ if asn1Spec is not None:
+ asn1Object = asn1Spec.clone()
+
+ elif self.protoComponent is not None:
+ asn1Object = self.protoComponent.clone(tagSet=tagSet)
+
+ else:
+ asn1Object = self.protoRecordComponent, self.protoSequenceComponent
+
+ return substrateFun(asn1Object, substrate, length)
+
+ if asn1Spec is None:
+ return self._decodeComponents(
+ substrate, tagSet=tagSet, decodeFun=decodeFun,
+ **dict(options, allowEoo=True)
+ )
+
+ asn1Object = asn1Spec.clone()
+ asn1Object.clear()
+
+ if asn1Spec.typeId in (univ.Sequence.typeId, univ.Set.typeId):
+
+ namedTypes = asn1Object.componentType
+
+ isSetType = asn1Object.typeId == univ.Set.typeId
+ isDeterministic = not isSetType and not namedTypes.hasOptionalOrDefault
+
+ if LOG:
+ LOG('decoding %sdeterministic %s type %r chosen by type ID' % (
+ not isDeterministic and 'non-' or '', isSetType and 'SET' or '',
+ asn1Spec))
+
+ seenIndices = set()
+ idx = 0
+ while substrate:
+ if len(namedTypes) <= idx:
+ asn1Spec = None
+
+ elif isSetType:
+ asn1Spec = namedTypes.tagMapUnique
+
+ else:
+ try:
+ if isDeterministic:
+ asn1Spec = namedTypes[idx].asn1Object
+
+ elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted:
+ asn1Spec = namedTypes.getTagMapNearPosition(idx)
+
+ else:
+ asn1Spec = namedTypes[idx].asn1Object
+
+ except IndexError:
+ raise error.PyAsn1Error(
+ 'Excessive components decoded at %r' % (asn1Object,)
+ )
+
+ component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True, **options)
+ if component is eoo.endOfOctets:
+ break
+
+ if not isDeterministic and namedTypes:
+ if isSetType:
+ idx = namedTypes.getPositionByType(component.effectiveTagSet)
+ elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted:
+ idx = namedTypes.getPositionNearType(component.effectiveTagSet, idx)
+
+ asn1Object.setComponentByPosition(
+ idx, component,
+ verifyConstraints=False,
+ matchTags=False, matchConstraints=False
+ )
+
+ seenIndices.add(idx)
+ idx += 1
+
+ else:
+ raise error.SubstrateUnderrunError(
+ 'No EOO seen before substrate ends'
+ )
+
+ if LOG:
+ LOG('seen component indices %s' % seenIndices)
+
+ if namedTypes:
+ if not namedTypes.requiredComponents.issubset(seenIndices):
+ raise error.PyAsn1Error('ASN.1 object %s has uninitialized components' % asn1Object.__class__.__name__)
+
+ if namedTypes.hasOpenTypes:
+
+ openTypes = options.get('openTypes', {})
+
+ if LOG:
+ LOG('user-specified open types map:')
+
+ for k, v in openTypes.items():
+ LOG('%s -> %r' % (k, v))
+
+ if openTypes or options.get('decodeOpenTypes', False):
+
+ for idx, namedType in enumerate(namedTypes.namedTypes):
+ if not namedType.openType:
+ continue
+
+ if namedType.isOptional and not asn1Object.getComponentByPosition(idx).isValue:
+ continue
+
+ governingValue = asn1Object.getComponentByName(
+ namedType.openType.name
+ )
+
+ try:
+ openType = openTypes[governingValue]
+
+ except KeyError:
+
+ if LOG:
+ LOG('default open types map of component '
+ '"%s.%s" governed by component "%s.%s"'
+ ':' % (asn1Object.__class__.__name__,
+ namedType.name,
+ asn1Object.__class__.__name__,
+ namedType.openType.name))
+
+ for k, v in namedType.openType.items():
+ LOG('%s -> %r' % (k, v))
+
+ try:
+ openType = namedType.openType[governingValue]
+
+ except KeyError:
+ if LOG:
+ LOG('failed to resolve open type by governing '
+ 'value %r' % (governingValue,))
+ continue
+
+ if LOG:
+ LOG('resolved open type %r by governing '
+ 'value %r' % (openType, governingValue))
+
+ containerValue = asn1Object.getComponentByPosition(idx)
+
+ if containerValue.typeId in (
+ univ.SetOf.typeId, univ.SequenceOf.typeId):
+
+ for pos, containerElement in enumerate(
+ containerValue):
+
+ component, rest = decodeFun(
+ containerValue[pos].asOctets(),
+ asn1Spec=openType, **dict(options, allowEoo=True)
+ )
+
+ containerValue[pos] = component
+
+ else:
+ component, rest = decodeFun(
+ asn1Object.getComponentByPosition(idx).asOctets(),
+ asn1Spec=openType, **dict(options, allowEoo=True)
+ )
+
+ if component is not eoo.endOfOctets:
+ asn1Object.setComponentByPosition(idx, component)
+
+ else:
+ inconsistency = asn1Object.isInconsistent
+ if inconsistency:
+ raise inconsistency
+
+ else:
+ asn1Object = asn1Spec.clone()
+ asn1Object.clear()
+
+ componentType = asn1Spec.componentType
+
+ if LOG:
+ LOG('decoding type %r chosen by given `asn1Spec`' % componentType)
+
+ idx = 0
+
+ while substrate:
+ component, substrate = decodeFun(substrate, componentType, allowEoo=True, **options)
+
+ if component is eoo.endOfOctets:
+ break
+
+ asn1Object.setComponentByPosition(
+ idx, component,
+ verifyConstraints=False,
+ matchTags=False, matchConstraints=False
+ )
+
+ idx += 1
+
+ else:
+ raise error.SubstrateUnderrunError(
+ 'No EOO seen before substrate ends'
+ )
+
+ return asn1Object, substrate
+
+
+class SequenceOrSequenceOfDecoder(UniversalConstructedTypeDecoder):
+ protoRecordComponent = univ.Sequence()
+ protoSequenceComponent = univ.SequenceOf()
+
+
+class SequenceDecoder(SequenceOrSequenceOfDecoder):
+ protoComponent = univ.Sequence()
+
+
+class SequenceOfDecoder(SequenceOrSequenceOfDecoder):
+ protoComponent = univ.SequenceOf()
+
+
+class SetOrSetOfDecoder(UniversalConstructedTypeDecoder):
+ protoRecordComponent = univ.Set()
+ protoSequenceComponent = univ.SetOf()
+
+
+class SetDecoder(SetOrSetOfDecoder):
+ protoComponent = univ.Set()
+
+
+
+class SetOfDecoder(SetOrSetOfDecoder):
+ protoComponent = univ.SetOf()
+
+
+class ChoiceDecoder(AbstractConstructedDecoder):
+ protoComponent = univ.Choice()
+
+ def valueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+ head, tail = substrate[:length], substrate[length:]
+
+ if asn1Spec is None:
+ asn1Object = self.protoComponent.clone(tagSet=tagSet)
+
+ else:
+ asn1Object = asn1Spec.clone()
+
+ if substrateFun:
+ return substrateFun(asn1Object, substrate, length)
+
+ if asn1Object.tagSet == tagSet:
+ if LOG:
+ LOG('decoding %s as explicitly tagged CHOICE' % (tagSet,))
+
+ component, head = decodeFun(
+ head, asn1Object.componentTagMap, **options
+ )
+
+ else:
+ if LOG:
+ LOG('decoding %s as untagged CHOICE' % (tagSet,))
+
+ component, head = decodeFun(
+ head, asn1Object.componentTagMap,
+ tagSet, length, state, **options
+ )
+
+ effectiveTagSet = component.effectiveTagSet
+
+ if LOG:
+ LOG('decoded component %s, effective tag set %s' % (component, effectiveTagSet))
+
+ asn1Object.setComponentByType(
+ effectiveTagSet, component,
+ verifyConstraints=False,
+ matchTags=False, matchConstraints=False,
+ innerFlag=False
+ )
+
+ return asn1Object, tail
+
+ def indefLenValueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+ if asn1Spec is None:
+ asn1Object = self.protoComponent.clone(tagSet=tagSet)
+ else:
+ asn1Object = asn1Spec.clone()
+
+ if substrateFun:
+ return substrateFun(asn1Object, substrate, length)
+
+ if asn1Object.tagSet == tagSet:
+ if LOG:
+ LOG('decoding %s as explicitly tagged CHOICE' % (tagSet,))
+
+ component, substrate = decodeFun(
+ substrate, asn1Object.componentType.tagMapUnique, **options
+ )
+
+ # eat up EOO marker
+ eooMarker, substrate = decodeFun(
+ substrate, allowEoo=True, **options
+ )
+
+ if eooMarker is not eoo.endOfOctets:
+ raise error.PyAsn1Error('No EOO seen before substrate ends')
+
+ else:
+ if LOG:
+ LOG('decoding %s as untagged CHOICE' % (tagSet,))
+
+ component, substrate = decodeFun(
+ substrate, asn1Object.componentType.tagMapUnique,
+ tagSet, length, state, **options
+ )
+
+ effectiveTagSet = component.effectiveTagSet
+
+ if LOG:
+ LOG('decoded component %s, effective tag set %s' % (component, effectiveTagSet))
+
+ asn1Object.setComponentByType(
+ effectiveTagSet, component,
+ verifyConstraints=False,
+ matchTags=False, matchConstraints=False,
+ innerFlag=False
+ )
+
+ return asn1Object, substrate
+
+
+class AnyDecoder(AbstractSimpleDecoder):
+ protoComponent = univ.Any()
+
+ def valueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+ if asn1Spec is None:
+ isUntagged = True
+
+ elif asn1Spec.__class__ is tagmap.TagMap:
+ isUntagged = tagSet not in asn1Spec.tagMap
+
+ else:
+ isUntagged = tagSet != asn1Spec.tagSet
+
+ if isUntagged:
+ fullSubstrate = options['fullSubstrate']
+
+ # untagged Any container, recover inner header substrate
+ length += len(fullSubstrate) - len(substrate)
+ substrate = fullSubstrate
+
+ if LOG:
+ LOG('decoding as untagged ANY, substrate %s' % debug.hexdump(substrate))
+
+ if substrateFun:
+ return substrateFun(self._createComponent(asn1Spec, tagSet, noValue, **options),
+ substrate, length)
+
+ head, tail = substrate[:length], substrate[length:]
+
+ return self._createComponent(asn1Spec, tagSet, head, **options), tail
+
+ def indefLenValueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+ if asn1Spec is None:
+ isTagged = False
+
+ elif asn1Spec.__class__ is tagmap.TagMap:
+ isTagged = tagSet in asn1Spec.tagMap
+
+ else:
+ isTagged = tagSet == asn1Spec.tagSet
+
+ if isTagged:
+ # tagged Any type -- consume header substrate
+ header = null
+
+ if LOG:
+ LOG('decoding as tagged ANY')
+
+ else:
+ fullSubstrate = options['fullSubstrate']
+
+ # untagged Any, recover header substrate
+ header = fullSubstrate[:-len(substrate)]
+
+ if LOG:
+ LOG('decoding as untagged ANY, header substrate %s' % debug.hexdump(header))
+
+ # Any components do not inherit initial tag
+ asn1Spec = self.protoComponent
+
+ if substrateFun and substrateFun is not self.substrateCollector:
+ asn1Object = self._createComponent(asn1Spec, tagSet, noValue, **options)
+ return substrateFun(asn1Object, header + substrate, length + len(header))
+
+ if LOG:
+ LOG('assembling constructed serialization')
+
+ # All inner fragments are of the same type, treat them as octet string
+ substrateFun = self.substrateCollector
+
+ while substrate:
+ component, substrate = decodeFun(substrate, asn1Spec,
+ substrateFun=substrateFun,
+ allowEoo=True, **options)
+ if component is eoo.endOfOctets:
+ break
+
+ header += component
+
+ else:
+ raise error.SubstrateUnderrunError(
+ 'No EOO seen before substrate ends'
+ )
+
+ if substrateFun:
+ return header, substrate
+
+ else:
+ return self._createComponent(asn1Spec, tagSet, header, **options), substrate
+
+
+# character string types
+class UTF8StringDecoder(OctetStringDecoder):
+ protoComponent = char.UTF8String()
+
+
+class NumericStringDecoder(OctetStringDecoder):
+ protoComponent = char.NumericString()
+
+
+class PrintableStringDecoder(OctetStringDecoder):
+ protoComponent = char.PrintableString()
+
+
+class TeletexStringDecoder(OctetStringDecoder):
+ protoComponent = char.TeletexString()
+
+
+class VideotexStringDecoder(OctetStringDecoder):
+ protoComponent = char.VideotexString()
+
+
+class IA5StringDecoder(OctetStringDecoder):
+ protoComponent = char.IA5String()
+
+
+class GraphicStringDecoder(OctetStringDecoder):
+ protoComponent = char.GraphicString()
+
+
+class VisibleStringDecoder(OctetStringDecoder):
+ protoComponent = char.VisibleString()
+
+
+class GeneralStringDecoder(OctetStringDecoder):
+ protoComponent = char.GeneralString()
+
+
+class UniversalStringDecoder(OctetStringDecoder):
+ protoComponent = char.UniversalString()
+
+
+class BMPStringDecoder(OctetStringDecoder):
+ protoComponent = char.BMPString()
+
+
+# "useful" types
+class ObjectDescriptorDecoder(OctetStringDecoder):
+ protoComponent = useful.ObjectDescriptor()
+
+
+class GeneralizedTimeDecoder(OctetStringDecoder):
+ protoComponent = useful.GeneralizedTime()
+
+
+class UTCTimeDecoder(OctetStringDecoder):
+ protoComponent = useful.UTCTime()
+
+
+tagMap = {
+ univ.Integer.tagSet: IntegerDecoder(),
+ univ.Boolean.tagSet: BooleanDecoder(),
+ univ.BitString.tagSet: BitStringDecoder(),
+ univ.OctetString.tagSet: OctetStringDecoder(),
+ univ.Null.tagSet: NullDecoder(),
+ univ.ObjectIdentifier.tagSet: ObjectIdentifierDecoder(),
+ univ.Enumerated.tagSet: IntegerDecoder(),
+ univ.Real.tagSet: RealDecoder(),
+ univ.Sequence.tagSet: SequenceOrSequenceOfDecoder(), # conflicts with SequenceOf
+ univ.Set.tagSet: SetOrSetOfDecoder(), # conflicts with SetOf
+ univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any
+ # character string types
+ char.UTF8String.tagSet: UTF8StringDecoder(),
+ char.NumericString.tagSet: NumericStringDecoder(),
+ char.PrintableString.tagSet: PrintableStringDecoder(),
+ char.TeletexString.tagSet: TeletexStringDecoder(),
+ char.VideotexString.tagSet: VideotexStringDecoder(),
+ char.IA5String.tagSet: IA5StringDecoder(),
+ char.GraphicString.tagSet: GraphicStringDecoder(),
+ char.VisibleString.tagSet: VisibleStringDecoder(),
+ char.GeneralString.tagSet: GeneralStringDecoder(),
+ char.UniversalString.tagSet: UniversalStringDecoder(),
+ char.BMPString.tagSet: BMPStringDecoder(),
+ # useful types
+ useful.ObjectDescriptor.tagSet: ObjectDescriptorDecoder(),
+ useful.GeneralizedTime.tagSet: GeneralizedTimeDecoder(),
+ useful.UTCTime.tagSet: UTCTimeDecoder()
+}
+
+# Type-to-codec map for ambiguous ASN.1 types
+typeMap = {
+ univ.Set.typeId: SetDecoder(),
+ univ.SetOf.typeId: SetOfDecoder(),
+ univ.Sequence.typeId: SequenceDecoder(),
+ univ.SequenceOf.typeId: SequenceOfDecoder(),
+ univ.Choice.typeId: ChoiceDecoder(),
+ univ.Any.typeId: AnyDecoder()
+}
+
+# Put in non-ambiguous types for faster codec lookup
+for typeDecoder in tagMap.values():
+ if typeDecoder.protoComponent is not None:
+ typeId = typeDecoder.protoComponent.__class__.typeId
+ if typeId is not None and typeId not in typeMap:
+ typeMap[typeId] = typeDecoder
+
+
+(stDecodeTag,
+ stDecodeLength,
+ stGetValueDecoder,
+ stGetValueDecoderByAsn1Spec,
+ stGetValueDecoderByTag,
+ stTryAsExplicitTag,
+ stDecodeValue,
+ stDumpRawValue,
+ stErrorCondition,
+ stStop) = [x for x in range(10)]
+
+
+class Decoder(object):
+ defaultErrorState = stErrorCondition
+ #defaultErrorState = stDumpRawValue
+ defaultRawDecoder = AnyDecoder()
+ supportIndefLength = True
+
+ # noinspection PyDefaultArgument
+ def __init__(self, tagMap, typeMap={}):
+ self.__tagMap = tagMap
+ self.__typeMap = typeMap
+ # Tag & TagSet objects caches
+ self.__tagCache = {}
+ self.__tagSetCache = {}
+ self.__eooSentinel = ints2octs((0, 0))
+
+ def __call__(self, substrate, asn1Spec=None,
+ tagSet=None, length=None, state=stDecodeTag,
+ decodeFun=None, substrateFun=None,
+ **options):
+
+ if LOG:
+ LOG('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate)))
+
+ allowEoo = options.pop('allowEoo', False)
+
+ # Look for end-of-octets sentinel
+ if allowEoo and self.supportIndefLength:
+ if substrate[:2] == self.__eooSentinel:
+ if LOG:
+ LOG('end-of-octets sentinel found')
+ return eoo.endOfOctets, substrate[2:]
+
+ value = noValue
+
+ tagMap = self.__tagMap
+ typeMap = self.__typeMap
+ tagCache = self.__tagCache
+ tagSetCache = self.__tagSetCache
+
+ fullSubstrate = substrate
+
+ while state is not stStop:
+
+ if state is stDecodeTag:
+ if not substrate:
+ raise error.SubstrateUnderrunError(
+ 'Short octet stream on tag decoding'
+ )
+
+ # Decode tag
+ isShortTag = True
+ firstOctet = substrate[0]
+ substrate = substrate[1:]
+
+ try:
+ lastTag = tagCache[firstOctet]
+
+ except KeyError:
+ integerTag = oct2int(firstOctet)
+ tagClass = integerTag & 0xC0
+ tagFormat = integerTag & 0x20
+ tagId = integerTag & 0x1F
+
+ if tagId == 0x1F:
+ isShortTag = False
+ lengthOctetIdx = 0
+ tagId = 0
+
+ try:
+ while True:
+ integerTag = oct2int(substrate[lengthOctetIdx])
+ lengthOctetIdx += 1
+ tagId <<= 7
+ tagId |= (integerTag & 0x7F)
+ if not integerTag & 0x80:
+ break
+
+ substrate = substrate[lengthOctetIdx:]
+
+ except IndexError:
+ raise error.SubstrateUnderrunError(
+ 'Short octet stream on long tag decoding'
+ )
+
+ lastTag = tag.Tag(
+ tagClass=tagClass, tagFormat=tagFormat, tagId=tagId
+ )
+
+ if isShortTag:
+ # cache short tags
+ tagCache[firstOctet] = lastTag
+
+ if tagSet is None:
+ if isShortTag:
+ try:
+ tagSet = tagSetCache[firstOctet]
+
+ except KeyError:
+ # base tag not recovered
+ tagSet = tag.TagSet((), lastTag)
+ tagSetCache[firstOctet] = tagSet
+ else:
+ tagSet = tag.TagSet((), lastTag)
+
+ else:
+ tagSet = lastTag + tagSet
+
+ state = stDecodeLength
+
+ if LOG:
+ LOG('tag decoded into %s, decoding length' % tagSet)
+
+ if state is stDecodeLength:
+ # Decode length
+ if not substrate:
+ raise error.SubstrateUnderrunError(
+ 'Short octet stream on length decoding'
+ )
+
+ firstOctet = oct2int(substrate[0])
+
+ if firstOctet < 128:
+ size = 1
+ length = firstOctet
+
+ elif firstOctet > 128:
+ size = firstOctet & 0x7F
+ # encoded in size bytes
+ encodedLength = octs2ints(substrate[1:size + 1])
+ # missing check on maximum size, which shouldn't be a
+ # problem, we can handle more than is possible
+ if len(encodedLength) != size:
+ raise error.SubstrateUnderrunError(
+ '%s<%s at %s' % (size, len(encodedLength), tagSet)
+ )
+
+ length = 0
+ for lengthOctet in encodedLength:
+ length <<= 8
+ length |= lengthOctet
+ size += 1
+
+ else:
+ size = 1
+ length = -1
+
+ substrate = substrate[size:]
+
+ if length == -1:
+ if not self.supportIndefLength:
+ raise error.PyAsn1Error('Indefinite length encoding not supported by this codec')
+
+ else:
+ if len(substrate) < length:
+ raise error.SubstrateUnderrunError('%d-octet short' % (length - len(substrate)))
+
+ state = stGetValueDecoder
+
+ if LOG:
+ LOG('value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length])))
+
+ if state is stGetValueDecoder:
+ if asn1Spec is None:
+ state = stGetValueDecoderByTag
+
+ else:
+ state = stGetValueDecoderByAsn1Spec
+ #
+ # There're two ways of creating subtypes in ASN.1 what influences
+ # decoder operation. These methods are:
+ # 1) Either base types used in or no IMPLICIT tagging has been
+ # applied on subtyping.
+ # 2) Subtype syntax drops base type information (by means of
+ # IMPLICIT tagging.
+ # The first case allows for complete tag recovery from substrate
+ # while the second one requires original ASN.1 type spec for
+ # decoding.
+ #
+ # In either case a set of tags (tagSet) is coming from substrate
+ # in an incremental, tag-by-tag fashion (this is the case of
+ # EXPLICIT tag which is most basic). Outermost tag comes first
+ # from the wire.
+ #
+ if state is stGetValueDecoderByTag:
+ try:
+ concreteDecoder = tagMap[tagSet]
+
+ except KeyError:
+ concreteDecoder = None
+
+ if concreteDecoder:
+ state = stDecodeValue
+
+ else:
+ try:
+ concreteDecoder = tagMap[tagSet[:1]]
+
+ except KeyError:
+ concreteDecoder = None
+
+ if concreteDecoder:
+ state = stDecodeValue
+ else:
+ state = stTryAsExplicitTag
+
+ if LOG:
+ LOG('codec %s chosen by a built-in type, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "<none>", state is stDecodeValue and 'value' or 'as explicit tag'))
+ debug.scope.push(concreteDecoder is None and '?' or concreteDecoder.protoComponent.__class__.__name__)
+
+ if state is stGetValueDecoderByAsn1Spec:
+
+ if asn1Spec.__class__ is tagmap.TagMap:
+ try:
+ chosenSpec = asn1Spec[tagSet]
+
+ except KeyError:
+ chosenSpec = None
+
+ if LOG:
+ LOG('candidate ASN.1 spec is a map of:')
+
+ for firstOctet, v in asn1Spec.presentTypes.items():
+ LOG(' %s -> %s' % (firstOctet, v.__class__.__name__))
+
+ if asn1Spec.skipTypes:
+ LOG('but neither of: ')
+ for firstOctet, v in asn1Spec.skipTypes.items():
+ LOG(' %s -> %s' % (firstOctet, v.__class__.__name__))
+ LOG('new candidate ASN.1 spec is %s, chosen by %s' % (chosenSpec is None and '<none>' or chosenSpec.prettyPrintType(), tagSet))
+
+ elif tagSet == asn1Spec.tagSet or tagSet in asn1Spec.tagMap:
+ chosenSpec = asn1Spec
+ if LOG:
+ LOG('candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__)
+
+ else:
+ chosenSpec = None
+
+ if chosenSpec is not None:
+ try:
+ # ambiguous type or just faster codec lookup
+ concreteDecoder = typeMap[chosenSpec.typeId]
+
+ if LOG:
+ LOG('value decoder chosen for an ambiguous type by type ID %s' % (chosenSpec.typeId,))
+
+ except KeyError:
+ # use base type for codec lookup to recover untagged types
+ baseTagSet = tag.TagSet(chosenSpec.tagSet.baseTag, chosenSpec.tagSet.baseTag)
+ try:
+ # base type or tagged subtype
+ concreteDecoder = tagMap[baseTagSet]
+
+ if LOG:
+ LOG('value decoder chosen by base %s' % (baseTagSet,))
+
+ except KeyError:
+ concreteDecoder = None
+
+ if concreteDecoder:
+ asn1Spec = chosenSpec
+ state = stDecodeValue
+
+ else:
+ state = stTryAsExplicitTag
+
+ else:
+ concreteDecoder = None
+ state = stTryAsExplicitTag
+
+ if LOG:
+ LOG('codec %s chosen by ASN.1 spec, decoding %s' % (state is stDecodeValue and concreteDecoder.__class__.__name__ or "<none>", state is stDecodeValue and 'value' or 'as explicit tag'))
+ debug.scope.push(chosenSpec is None and '?' or chosenSpec.__class__.__name__)
+
+ if state is stDecodeValue:
+ if not options.get('recursiveFlag', True) and not substrateFun: # deprecate this
+ substrateFun = lambda a, b, c: (a, b[:c])
+
+ options.update(fullSubstrate=fullSubstrate)
+
+ if length == -1: # indef length
+ value, substrate = concreteDecoder.indefLenValueDecoder(
+ substrate, asn1Spec,
+ tagSet, length, stGetValueDecoder,
+ self, substrateFun,
+ **options
+ )
+
+ else:
+ value, substrate = concreteDecoder.valueDecoder(
+ substrate, asn1Spec,
+ tagSet, length, stGetValueDecoder,
+ self, substrateFun,
+ **options
+ )
+
+ if LOG:
+ LOG('codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, isinstance(value, base.Asn1Item) and value.prettyPrint() or value, substrate and debug.hexdump(substrate) or '<none>'))
+
+ state = stStop
+ break
+
+ if state is stTryAsExplicitTag:
+ if (tagSet and
+ tagSet[0].tagFormat == tag.tagFormatConstructed and
+ tagSet[0].tagClass != tag.tagClassUniversal):
+ # Assume explicit tagging
+ concreteDecoder = explicitTagDecoder
+ state = stDecodeValue
+
+ else:
+ concreteDecoder = None
+ state = self.defaultErrorState
+
+ if LOG:
+ LOG('codec %s chosen, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "<none>", state is stDecodeValue and 'value' or 'as failure'))
+
+ if state is stDumpRawValue:
+ concreteDecoder = self.defaultRawDecoder
+
+ if LOG:
+ LOG('codec %s chosen, decoding value' % concreteDecoder.__class__.__name__)
+
+ state = stDecodeValue
+
+ if state is stErrorCondition:
+ raise error.PyAsn1Error(
+ '%s not in asn1Spec: %r' % (tagSet, asn1Spec)
+ )
+
+ if LOG:
+ debug.scope.pop()
+ LOG('decoder left scope %s, call completed' % debug.scope)
+
+ return value, substrate
+
+
+#: Turns BER octet stream into an ASN.1 object.
+#:
+#: Takes BER octet-stream and decode it into an ASN.1 object
+#: (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which
+#: may be a scalar or an arbitrary nested structure.
+#:
+#: Parameters
+#: ----------
+#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
+#: BER octet-stream
+#:
+#: Keyword Args
+#: ------------
+#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
+#: A pyasn1 type object to act as a template guiding the decoder. Depending on the ASN.1 structure
+#: being decoded, *asn1Spec* may or may not be required. Most common reason for
+#: it to require is that ASN.1 structure is encoded in *IMPLICIT* tagging mode.
+#:
+#: Returns
+#: -------
+#: : :py:class:`tuple`
+#: A tuple of pyasn1 object recovered from BER substrate (:py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
+#: and the unprocessed trailing portion of the *substrate* (may be empty)
+#:
+#: Raises
+#: ------
+#: ~pyasn1.error.PyAsn1Error, ~pyasn1.error.SubstrateUnderrunError
+#: On decoding errors
+#:
+#: Examples
+#: --------
+#: Decode BER serialisation without ASN.1 schema
+#:
+#: .. code-block:: pycon
+#:
+#: >>> s, _ = decode(b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03')
+#: >>> str(s)
+#: SequenceOf:
+#: 1 2 3
+#:
+#: Decode BER serialisation with ASN.1 schema
+#:
+#: .. code-block:: pycon
+#:
+#: >>> seq = SequenceOf(componentType=Integer())
+#: >>> s, _ = decode(b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03', asn1Spec=seq)
+#: >>> str(s)
+#: SequenceOf:
+#: 1 2 3
+#:
+decode = Decoder(tagMap, typeMap)
+
+# XXX
+# non-recursive decoding; return position rather than substrate
diff --git a/third_party/python/pyasn1/pyasn1/codec/ber/encoder.py b/third_party/python/pyasn1/pyasn1/codec/ber/encoder.py
new file mode 100644
index 0000000000..778aa86706
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/codec/ber/encoder.py
@@ -0,0 +1,890 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+import sys
+
+from pyasn1 import debug
+from pyasn1 import error
+from pyasn1.codec.ber import eoo
+from pyasn1.compat.integer import to_bytes
+from pyasn1.compat.octets import (int2oct, oct2int, ints2octs, null,
+ str2octs, isOctetsType)
+from pyasn1.type import char
+from pyasn1.type import tag
+from pyasn1.type import univ
+from pyasn1.type import useful
+
+__all__ = ['encode']
+
+LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_ENCODER)
+
+
+class AbstractItemEncoder(object):
+ supportIndefLenMode = True
+
+ # An outcome of otherwise legit call `encodeFun(eoo.endOfOctets)`
+ eooIntegerSubstrate = (0, 0)
+ eooOctetsSubstrate = ints2octs(eooIntegerSubstrate)
+
+ # noinspection PyMethodMayBeStatic
+ def encodeTag(self, singleTag, isConstructed):
+ tagClass, tagFormat, tagId = singleTag
+ encodedTag = tagClass | tagFormat
+ if isConstructed:
+ encodedTag |= tag.tagFormatConstructed
+
+ if tagId < 31:
+ return encodedTag | tagId,
+
+ else:
+ substrate = tagId & 0x7f,
+
+ tagId >>= 7
+
+ while tagId:
+ substrate = (0x80 | (tagId & 0x7f),) + substrate
+ tagId >>= 7
+
+ return (encodedTag | 0x1F,) + substrate
+
+ def encodeLength(self, length, defMode):
+ if not defMode and self.supportIndefLenMode:
+ return (0x80,)
+
+ if length < 0x80:
+ return length,
+
+ else:
+ substrate = ()
+ while length:
+ substrate = (length & 0xff,) + substrate
+ length >>= 8
+
+ substrateLen = len(substrate)
+
+ if substrateLen > 126:
+ raise error.PyAsn1Error('Length octets overflow (%d)' % substrateLen)
+
+ return (0x80 | substrateLen,) + substrate
+
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+ raise error.PyAsn1Error('Not implemented')
+
+ def encode(self, value, asn1Spec=None, encodeFun=None, **options):
+
+ if asn1Spec is None:
+ tagSet = value.tagSet
+ else:
+ tagSet = asn1Spec.tagSet
+
+ # untagged item?
+ if not tagSet:
+ substrate, isConstructed, isOctets = self.encodeValue(
+ value, asn1Spec, encodeFun, **options
+ )
+ return substrate
+
+ defMode = options.get('defMode', True)
+
+ substrate = null
+
+ for idx, singleTag in enumerate(tagSet.superTags):
+
+ defModeOverride = defMode
+
+ # base tag?
+ if not idx:
+ try:
+ substrate, isConstructed, isOctets = self.encodeValue(
+ value, asn1Spec, encodeFun, **options
+ )
+
+ except error.PyAsn1Error:
+ exc = sys.exc_info()
+ raise error.PyAsn1Error(
+ 'Error encoding %r: %s' % (value, exc[1]))
+
+ if LOG:
+ LOG('encoded %svalue %s into %s' % (
+ isConstructed and 'constructed ' or '', value, substrate
+ ))
+
+ if not substrate and isConstructed and options.get('ifNotEmpty', False):
+ return substrate
+
+ if not isConstructed:
+ defModeOverride = True
+
+ if LOG:
+ LOG('overridden encoding mode into definitive for primitive type')
+
+ header = self.encodeTag(singleTag, isConstructed)
+
+ if LOG:
+ LOG('encoded %stag %s into %s' % (
+ isConstructed and 'constructed ' or '',
+ singleTag, debug.hexdump(ints2octs(header))))
+
+ header += self.encodeLength(len(substrate), defModeOverride)
+
+ if LOG:
+ LOG('encoded %s octets (tag + payload) into %s' % (
+ len(substrate), debug.hexdump(ints2octs(header))))
+
+ if isOctets:
+ substrate = ints2octs(header) + substrate
+
+ if not defModeOverride:
+ substrate += self.eooOctetsSubstrate
+
+ else:
+ substrate = header + substrate
+
+ if not defModeOverride:
+ substrate += self.eooIntegerSubstrate
+
+ if not isOctets:
+ substrate = ints2octs(substrate)
+
+ return substrate
+
+
+class EndOfOctetsEncoder(AbstractItemEncoder):
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+ return null, False, True
+
+
+class BooleanEncoder(AbstractItemEncoder):
+ supportIndefLenMode = False
+
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+ return value and (1,) or (0,), False, False
+
+
+class IntegerEncoder(AbstractItemEncoder):
+ supportIndefLenMode = False
+ supportCompactZero = False
+
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+ if value == 0:
+ if LOG:
+ LOG('encoding %spayload for zero INTEGER' % (
+ self.supportCompactZero and 'no ' or ''
+ ))
+
+ # de-facto way to encode zero
+ if self.supportCompactZero:
+ return (), False, False
+ else:
+ return (0,), False, False
+
+ return to_bytes(int(value), signed=True), False, True
+
+
+class BitStringEncoder(AbstractItemEncoder):
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+ if asn1Spec is not None:
+ # TODO: try to avoid ASN.1 schema instantiation
+ value = asn1Spec.clone(value)
+
+ valueLength = len(value)
+ if valueLength % 8:
+ alignedValue = value << (8 - valueLength % 8)
+ else:
+ alignedValue = value
+
+ maxChunkSize = options.get('maxChunkSize', 0)
+ if not maxChunkSize or len(alignedValue) <= maxChunkSize * 8:
+ substrate = alignedValue.asOctets()
+ return int2oct(len(substrate) * 8 - valueLength) + substrate, False, True
+
+ if LOG:
+ LOG('encoding into up to %s-octet chunks' % maxChunkSize)
+
+ baseTag = value.tagSet.baseTag
+
+ # strip off explicit tags
+ if baseTag:
+ tagSet = tag.TagSet(baseTag, baseTag)
+
+ else:
+ tagSet = tag.TagSet()
+
+ alignedValue = alignedValue.clone(tagSet=tagSet)
+
+ stop = 0
+ substrate = null
+ while stop < valueLength:
+ start = stop
+ stop = min(start + maxChunkSize * 8, valueLength)
+ substrate += encodeFun(alignedValue[start:stop], asn1Spec, **options)
+
+ return substrate, True, True
+
+
+class OctetStringEncoder(AbstractItemEncoder):
+
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+
+ if asn1Spec is None:
+ substrate = value.asOctets()
+
+ elif not isOctetsType(value):
+ substrate = asn1Spec.clone(value).asOctets()
+
+ else:
+ substrate = value
+
+ maxChunkSize = options.get('maxChunkSize', 0)
+
+ if not maxChunkSize or len(substrate) <= maxChunkSize:
+ return substrate, False, True
+
+ if LOG:
+ LOG('encoding into up to %s-octet chunks' % maxChunkSize)
+
+ # strip off explicit tags for inner chunks
+
+ if asn1Spec is None:
+ baseTag = value.tagSet.baseTag
+
+ # strip off explicit tags
+ if baseTag:
+ tagSet = tag.TagSet(baseTag, baseTag)
+
+ else:
+ tagSet = tag.TagSet()
+
+ asn1Spec = value.clone(tagSet=tagSet)
+
+ elif not isOctetsType(value):
+ baseTag = asn1Spec.tagSet.baseTag
+
+ # strip off explicit tags
+ if baseTag:
+ tagSet = tag.TagSet(baseTag, baseTag)
+
+ else:
+ tagSet = tag.TagSet()
+
+ asn1Spec = asn1Spec.clone(tagSet=tagSet)
+
+ pos = 0
+ substrate = null
+
+ while True:
+ chunk = value[pos:pos + maxChunkSize]
+ if not chunk:
+ break
+
+ substrate += encodeFun(chunk, asn1Spec, **options)
+ pos += maxChunkSize
+
+ return substrate, True, True
+
+
+class NullEncoder(AbstractItemEncoder):
+ supportIndefLenMode = False
+
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+ return null, False, True
+
+
+class ObjectIdentifierEncoder(AbstractItemEncoder):
+ supportIndefLenMode = False
+
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+ if asn1Spec is not None:
+ value = asn1Spec.clone(value)
+
+ oid = value.asTuple()
+
+ # Build the first pair
+ try:
+ first = oid[0]
+ second = oid[1]
+
+ except IndexError:
+ raise error.PyAsn1Error('Short OID %s' % (value,))
+
+ if 0 <= second <= 39:
+ if first == 1:
+ oid = (second + 40,) + oid[2:]
+ elif first == 0:
+ oid = (second,) + oid[2:]
+ elif first == 2:
+ oid = (second + 80,) + oid[2:]
+ else:
+ raise error.PyAsn1Error('Impossible first/second arcs at %s' % (value,))
+
+ elif first == 2:
+ oid = (second + 80,) + oid[2:]
+
+ else:
+ raise error.PyAsn1Error('Impossible first/second arcs at %s' % (value,))
+
+ octets = ()
+
+ # Cycle through subIds
+ for subOid in oid:
+ if 0 <= subOid <= 127:
+ # Optimize for the common case
+ octets += (subOid,)
+
+ elif subOid > 127:
+ # Pack large Sub-Object IDs
+ res = (subOid & 0x7f,)
+ subOid >>= 7
+
+ while subOid:
+ res = (0x80 | (subOid & 0x7f),) + res
+ subOid >>= 7
+
+ # Add packed Sub-Object ID to resulted Object ID
+ octets += res
+
+ else:
+ raise error.PyAsn1Error('Negative OID arc %s at %s' % (subOid, value))
+
+ return octets, False, False
+
+
+class RealEncoder(AbstractItemEncoder):
+ supportIndefLenMode = 0
+ binEncBase = 2 # set to None to choose encoding base automatically
+
+ @staticmethod
+ def _dropFloatingPoint(m, encbase, e):
+ ms, es = 1, 1
+ if m < 0:
+ ms = -1 # mantissa sign
+
+ if e < 0:
+ es = -1 # exponent sign
+
+ m *= ms
+
+ if encbase == 8:
+ m *= 2 ** (abs(e) % 3 * es)
+ e = abs(e) // 3 * es
+
+ elif encbase == 16:
+ m *= 2 ** (abs(e) % 4 * es)
+ e = abs(e) // 4 * es
+
+ while True:
+ if int(m) != m:
+ m *= encbase
+ e -= 1
+ continue
+ break
+
+ return ms, int(m), encbase, e
+
+ def _chooseEncBase(self, value):
+ m, b, e = value
+ encBase = [2, 8, 16]
+ if value.binEncBase in encBase:
+ return self._dropFloatingPoint(m, value.binEncBase, e)
+
+ elif self.binEncBase in encBase:
+ return self._dropFloatingPoint(m, self.binEncBase, e)
+
+ # auto choosing base 2/8/16
+ mantissa = [m, m, m]
+ exponent = [e, e, e]
+ sign = 1
+ encbase = 2
+ e = float('inf')
+
+ for i in range(3):
+ (sign,
+ mantissa[i],
+ encBase[i],
+ exponent[i]) = self._dropFloatingPoint(mantissa[i], encBase[i], exponent[i])
+
+ if abs(exponent[i]) < abs(e) or (abs(exponent[i]) == abs(e) and mantissa[i] < m):
+ e = exponent[i]
+ m = int(mantissa[i])
+ encbase = encBase[i]
+
+ if LOG:
+ LOG('automatically chosen REAL encoding base %s, sign %s, mantissa %s, '
+ 'exponent %s' % (encbase, sign, m, e))
+
+ return sign, m, encbase, e
+
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+ if asn1Spec is not None:
+ value = asn1Spec.clone(value)
+
+ if value.isPlusInf:
+ return (0x40,), False, False
+
+ if value.isMinusInf:
+ return (0x41,), False, False
+
+ m, b, e = value
+
+ if not m:
+ return null, False, True
+
+ if b == 10:
+ if LOG:
+ LOG('encoding REAL into character form')
+
+ return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), False, True
+
+ elif b == 2:
+ fo = 0x80 # binary encoding
+ ms, m, encbase, e = self._chooseEncBase(value)
+
+ if ms < 0: # mantissa sign
+ fo |= 0x40 # sign bit
+
+ # exponent & mantissa normalization
+ if encbase == 2:
+ while m & 0x1 == 0:
+ m >>= 1
+ e += 1
+
+ elif encbase == 8:
+ while m & 0x7 == 0:
+ m >>= 3
+ e += 1
+ fo |= 0x10
+
+ else: # encbase = 16
+ while m & 0xf == 0:
+ m >>= 4
+ e += 1
+ fo |= 0x20
+
+ sf = 0 # scale factor
+
+ while m & 0x1 == 0:
+ m >>= 1
+ sf += 1
+
+ if sf > 3:
+ raise error.PyAsn1Error('Scale factor overflow') # bug if raised
+
+ fo |= sf << 2
+ eo = null
+ if e == 0 or e == -1:
+ eo = int2oct(e & 0xff)
+
+ else:
+ while e not in (0, -1):
+ eo = int2oct(e & 0xff) + eo
+ e >>= 8
+
+ if e == 0 and eo and oct2int(eo[0]) & 0x80:
+ eo = int2oct(0) + eo
+
+ if e == -1 and eo and not (oct2int(eo[0]) & 0x80):
+ eo = int2oct(0xff) + eo
+
+ n = len(eo)
+ if n > 0xff:
+ raise error.PyAsn1Error('Real exponent overflow')
+
+ if n == 1:
+ pass
+
+ elif n == 2:
+ fo |= 1
+
+ elif n == 3:
+ fo |= 2
+
+ else:
+ fo |= 3
+ eo = int2oct(n & 0xff) + eo
+
+ po = null
+
+ while m:
+ po = int2oct(m & 0xff) + po
+ m >>= 8
+
+ substrate = int2oct(fo) + eo + po
+
+ return substrate, False, True
+
+ else:
+ raise error.PyAsn1Error('Prohibited Real base %s' % b)
+
+
+class SequenceEncoder(AbstractItemEncoder):
+ omitEmptyOptionals = False
+
+ # TODO: handling three flavors of input is too much -- split over codecs
+
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+
+ substrate = null
+
+ omitEmptyOptionals = options.get(
+ 'omitEmptyOptionals', self.omitEmptyOptionals)
+
+ if LOG:
+ LOG('%sencoding empty OPTIONAL components' % (
+ omitEmptyOptionals and 'not ' or ''))
+
+ if asn1Spec is None:
+ # instance of ASN.1 schema
+ inconsistency = value.isInconsistent
+ if inconsistency:
+ raise inconsistency
+
+ namedTypes = value.componentType
+
+ for idx, component in enumerate(value.values()):
+ if namedTypes:
+ namedType = namedTypes[idx]
+
+ if namedType.isOptional and not component.isValue:
+ if LOG:
+ LOG('not encoding OPTIONAL component %r' % (namedType,))
+ continue
+
+ if namedType.isDefaulted and component == namedType.asn1Object:
+ if LOG:
+ LOG('not encoding DEFAULT component %r' % (namedType,))
+ continue
+
+ if omitEmptyOptionals:
+ options.update(ifNotEmpty=namedType.isOptional)
+
+ # wrap open type blob if needed
+ if namedTypes and namedType.openType:
+
+ wrapType = namedType.asn1Object
+
+ if wrapType.typeId in (
+ univ.SetOf.typeId, univ.SequenceOf.typeId):
+
+ substrate += encodeFun(
+ component, asn1Spec,
+ **dict(options, wrapType=wrapType.componentType))
+
+ else:
+ chunk = encodeFun(component, asn1Spec, **options)
+
+ if wrapType.isSameTypeWith(component):
+ substrate += chunk
+
+ else:
+ substrate += encodeFun(chunk, wrapType, **options)
+
+ if LOG:
+ LOG('wrapped with wrap type %r' % (wrapType,))
+
+ else:
+ substrate += encodeFun(component, asn1Spec, **options)
+
+ else:
+ # bare Python value + ASN.1 schema
+ for idx, namedType in enumerate(asn1Spec.componentType.namedTypes):
+
+ try:
+ component = value[namedType.name]
+
+ except KeyError:
+ raise error.PyAsn1Error('Component name "%s" not found in %r' % (
+ namedType.name, value))
+
+ if namedType.isOptional and namedType.name not in value:
+ if LOG:
+ LOG('not encoding OPTIONAL component %r' % (namedType,))
+ continue
+
+ if namedType.isDefaulted and component == namedType.asn1Object:
+ if LOG:
+ LOG('not encoding DEFAULT component %r' % (namedType,))
+ continue
+
+ if omitEmptyOptionals:
+ options.update(ifNotEmpty=namedType.isOptional)
+
+ componentSpec = namedType.asn1Object
+
+ # wrap open type blob if needed
+ if namedType.openType:
+
+ if componentSpec.typeId in (
+ univ.SetOf.typeId, univ.SequenceOf.typeId):
+
+ substrate += encodeFun(
+ component, componentSpec,
+ **dict(options, wrapType=componentSpec.componentType))
+
+ else:
+ chunk = encodeFun(component, componentSpec, **options)
+
+ if componentSpec.isSameTypeWith(component):
+ substrate += chunk
+
+ else:
+ substrate += encodeFun(chunk, componentSpec, **options)
+
+ if LOG:
+ LOG('wrapped with wrap type %r' % (componentSpec,))
+
+ else:
+ substrate += encodeFun(component, componentSpec, **options)
+
+ return substrate, True, True
+
+
+class SequenceOfEncoder(AbstractItemEncoder):
+ def _encodeComponents(self, value, asn1Spec, encodeFun, **options):
+
+ if asn1Spec is None:
+ inconsistency = value.isInconsistent
+ if inconsistency:
+ raise inconsistency
+
+ else:
+ asn1Spec = asn1Spec.componentType
+
+ chunks = []
+
+ wrapType = options.pop('wrapType', None)
+
+ for idx, component in enumerate(value):
+ chunk = encodeFun(component, asn1Spec, **options)
+
+ if (wrapType is not None and
+ not wrapType.isSameTypeWith(component)):
+ # wrap encoded value with wrapper container (e.g. ANY)
+ chunk = encodeFun(chunk, wrapType, **options)
+
+ if LOG:
+ LOG('wrapped with wrap type %r' % (wrapType,))
+
+ chunks.append(chunk)
+
+ return chunks
+
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+ chunks = self._encodeComponents(
+ value, asn1Spec, encodeFun, **options)
+
+ return null.join(chunks), True, True
+
+
+class ChoiceEncoder(AbstractItemEncoder):
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+ if asn1Spec is None:
+ component = value.getComponent()
+ else:
+ names = [namedType.name for namedType in asn1Spec.componentType.namedTypes
+ if namedType.name in value]
+ if len(names) != 1:
+ raise error.PyAsn1Error('%s components for Choice at %r' % (len(names) and 'Multiple ' or 'None ', value))
+
+ name = names[0]
+
+ component = value[name]
+ asn1Spec = asn1Spec[name]
+
+ return encodeFun(component, asn1Spec, **options), True, True
+
+
+class AnyEncoder(OctetStringEncoder):
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+ if asn1Spec is None:
+ value = value.asOctets()
+ elif not isOctetsType(value):
+ value = asn1Spec.clone(value).asOctets()
+
+ return value, not options.get('defMode', True), True
+
+
+tagMap = {
+ eoo.endOfOctets.tagSet: EndOfOctetsEncoder(),
+ univ.Boolean.tagSet: BooleanEncoder(),
+ univ.Integer.tagSet: IntegerEncoder(),
+ univ.BitString.tagSet: BitStringEncoder(),
+ univ.OctetString.tagSet: OctetStringEncoder(),
+ univ.Null.tagSet: NullEncoder(),
+ univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(),
+ univ.Enumerated.tagSet: IntegerEncoder(),
+ univ.Real.tagSet: RealEncoder(),
+ # Sequence & Set have same tags as SequenceOf & SetOf
+ univ.SequenceOf.tagSet: SequenceOfEncoder(),
+ univ.SetOf.tagSet: SequenceOfEncoder(),
+ univ.Choice.tagSet: ChoiceEncoder(),
+ # character string types
+ char.UTF8String.tagSet: OctetStringEncoder(),
+ char.NumericString.tagSet: OctetStringEncoder(),
+ char.PrintableString.tagSet: OctetStringEncoder(),
+ char.TeletexString.tagSet: OctetStringEncoder(),
+ char.VideotexString.tagSet: OctetStringEncoder(),
+ char.IA5String.tagSet: OctetStringEncoder(),
+ char.GraphicString.tagSet: OctetStringEncoder(),
+ char.VisibleString.tagSet: OctetStringEncoder(),
+ char.GeneralString.tagSet: OctetStringEncoder(),
+ char.UniversalString.tagSet: OctetStringEncoder(),
+ char.BMPString.tagSet: OctetStringEncoder(),
+ # useful types
+ useful.ObjectDescriptor.tagSet: OctetStringEncoder(),
+ useful.GeneralizedTime.tagSet: OctetStringEncoder(),
+ useful.UTCTime.tagSet: OctetStringEncoder()
+}
+
+# Put in ambiguous & non-ambiguous types for faster codec lookup
+typeMap = {
+ univ.Boolean.typeId: BooleanEncoder(),
+ univ.Integer.typeId: IntegerEncoder(),
+ univ.BitString.typeId: BitStringEncoder(),
+ univ.OctetString.typeId: OctetStringEncoder(),
+ univ.Null.typeId: NullEncoder(),
+ univ.ObjectIdentifier.typeId: ObjectIdentifierEncoder(),
+ univ.Enumerated.typeId: IntegerEncoder(),
+ univ.Real.typeId: RealEncoder(),
+ # Sequence & Set have same tags as SequenceOf & SetOf
+ univ.Set.typeId: SequenceEncoder(),
+ univ.SetOf.typeId: SequenceOfEncoder(),
+ univ.Sequence.typeId: SequenceEncoder(),
+ univ.SequenceOf.typeId: SequenceOfEncoder(),
+ univ.Choice.typeId: ChoiceEncoder(),
+ univ.Any.typeId: AnyEncoder(),
+ # character string types
+ char.UTF8String.typeId: OctetStringEncoder(),
+ char.NumericString.typeId: OctetStringEncoder(),
+ char.PrintableString.typeId: OctetStringEncoder(),
+ char.TeletexString.typeId: OctetStringEncoder(),
+ char.VideotexString.typeId: OctetStringEncoder(),
+ char.IA5String.typeId: OctetStringEncoder(),
+ char.GraphicString.typeId: OctetStringEncoder(),
+ char.VisibleString.typeId: OctetStringEncoder(),
+ char.GeneralString.typeId: OctetStringEncoder(),
+ char.UniversalString.typeId: OctetStringEncoder(),
+ char.BMPString.typeId: OctetStringEncoder(),
+ # useful types
+ useful.ObjectDescriptor.typeId: OctetStringEncoder(),
+ useful.GeneralizedTime.typeId: OctetStringEncoder(),
+ useful.UTCTime.typeId: OctetStringEncoder()
+}
+
+
+class Encoder(object):
+ fixedDefLengthMode = None
+ fixedChunkSize = None
+
+ # noinspection PyDefaultArgument
+ def __init__(self, tagMap, typeMap={}):
+ self.__tagMap = tagMap
+ self.__typeMap = typeMap
+
+ def __call__(self, value, asn1Spec=None, **options):
+ try:
+ if asn1Spec is None:
+ typeId = value.typeId
+ else:
+ typeId = asn1Spec.typeId
+
+ except AttributeError:
+ raise error.PyAsn1Error('Value %r is not ASN.1 type instance '
+ 'and "asn1Spec" not given' % (value,))
+
+ if LOG:
+ LOG('encoder called in %sdef mode, chunk size %s for '
+ 'type %s, value:\n%s' % (not options.get('defMode', True) and 'in' or '', options.get('maxChunkSize', 0), asn1Spec is None and value.prettyPrintType() or asn1Spec.prettyPrintType(), value))
+
+ if self.fixedDefLengthMode is not None:
+ options.update(defMode=self.fixedDefLengthMode)
+
+ if self.fixedChunkSize is not None:
+ options.update(maxChunkSize=self.fixedChunkSize)
+
+
+ try:
+ concreteEncoder = self.__typeMap[typeId]
+
+ if LOG:
+ LOG('using value codec %s chosen by type ID %s' % (concreteEncoder.__class__.__name__, typeId))
+
+ except KeyError:
+ if asn1Spec is None:
+ tagSet = value.tagSet
+ else:
+ tagSet = asn1Spec.tagSet
+
+ # use base type for codec lookup to recover untagged types
+ baseTagSet = tag.TagSet(tagSet.baseTag, tagSet.baseTag)
+
+ try:
+ concreteEncoder = self.__tagMap[baseTagSet]
+
+ except KeyError:
+ raise error.PyAsn1Error('No encoder for %r (%s)' % (value, tagSet))
+
+ if LOG:
+ LOG('using value codec %s chosen by tagSet %s' % (concreteEncoder.__class__.__name__, tagSet))
+
+ substrate = concreteEncoder.encode(value, asn1Spec, self, **options)
+
+ if LOG:
+ LOG('codec %s built %s octets of substrate: %s\nencoder completed' % (concreteEncoder, len(substrate), debug.hexdump(substrate)))
+
+ return substrate
+
+#: Turns ASN.1 object into BER octet stream.
+#:
+#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
+#: walks all its components recursively and produces a BER octet stream.
+#:
+#: Parameters
+#: ----------
+#: value: either a Python or pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
+#: A Python or pyasn1 object to encode. If Python object is given, `asnSpec`
+#: parameter is required to guide the encoding process.
+#:
+#: Keyword Args
+#: ------------
+#: asn1Spec:
+#: Optional ASN.1 schema or value object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
+#:
+#: defMode: :py:class:`bool`
+#: If :obj:`False`, produces indefinite length encoding
+#:
+#: maxChunkSize: :py:class:`int`
+#: Maximum chunk size in chunked encoding mode (0 denotes unlimited chunk size)
+#:
+#: Returns
+#: -------
+#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
+#: Given ASN.1 object encoded into BER octetstream
+#:
+#: Raises
+#: ------
+#: ~pyasn1.error.PyAsn1Error
+#: On encoding errors
+#:
+#: Examples
+#: --------
+#: Encode Python value into BER with ASN.1 schema
+#:
+#: .. code-block:: pycon
+#:
+#: >>> seq = SequenceOf(componentType=Integer())
+#: >>> encode([1, 2, 3], asn1Spec=seq)
+#: b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03'
+#:
+#: Encode ASN.1 value object into BER
+#:
+#: .. code-block:: pycon
+#:
+#: >>> seq = SequenceOf(componentType=Integer())
+#: >>> seq.extend([1, 2, 3])
+#: >>> encode(seq)
+#: b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03'
+#:
+encode = Encoder(tagMap, typeMap)
diff --git a/third_party/python/pyasn1/pyasn1/codec/ber/eoo.py b/third_party/python/pyasn1/pyasn1/codec/ber/eoo.py
new file mode 100644
index 0000000000..48eb859e97
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/codec/ber/eoo.py
@@ -0,0 +1,28 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+from pyasn1.type import base
+from pyasn1.type import tag
+
+__all__ = ['endOfOctets']
+
+
+class EndOfOctets(base.SimpleAsn1Type):
+ defaultValue = 0
+ tagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x00)
+ )
+
+ _instance = None
+
+ def __new__(cls, *args, **kwargs):
+ if cls._instance is None:
+ cls._instance = object.__new__(cls, *args, **kwargs)
+
+ return cls._instance
+
+
+endOfOctets = EndOfOctets()
diff --git a/third_party/python/pyasn1/pyasn1/codec/cer/__init__.py b/third_party/python/pyasn1/pyasn1/codec/cer/__init__.py
new file mode 100644
index 0000000000..8c3066b2e6
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/codec/cer/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/third_party/python/pyasn1/pyasn1/codec/cer/decoder.py b/third_party/python/pyasn1/pyasn1/codec/cer/decoder.py
new file mode 100644
index 0000000000..3e86fd0bc1
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/codec/cer/decoder.py
@@ -0,0 +1,114 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+from pyasn1 import error
+from pyasn1.codec.ber import decoder
+from pyasn1.compat.octets import oct2int
+from pyasn1.type import univ
+
+__all__ = ['decode']
+
+
+class BooleanDecoder(decoder.AbstractSimpleDecoder):
+ protoComponent = univ.Boolean(0)
+
+ def valueDecoder(self, substrate, asn1Spec,
+ tagSet=None, length=None, state=None,
+ decodeFun=None, substrateFun=None,
+ **options):
+ head, tail = substrate[:length], substrate[length:]
+ if not head or length != 1:
+ raise error.PyAsn1Error('Not single-octet Boolean payload')
+ byte = oct2int(head[0])
+ # CER/DER specifies encoding of TRUE as 0xFF and FALSE as 0x0, while
+ # BER allows any non-zero value as TRUE; cf. sections 8.2.2. and 11.1
+ # in https://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf
+ if byte == 0xff:
+ value = 1
+ elif byte == 0x00:
+ value = 0
+ else:
+ raise error.PyAsn1Error('Unexpected Boolean payload: %s' % byte)
+ return self._createComponent(asn1Spec, tagSet, value, **options), tail
+
+# TODO: prohibit non-canonical encoding
+BitStringDecoder = decoder.BitStringDecoder
+OctetStringDecoder = decoder.OctetStringDecoder
+RealDecoder = decoder.RealDecoder
+
+tagMap = decoder.tagMap.copy()
+tagMap.update(
+ {univ.Boolean.tagSet: BooleanDecoder(),
+ univ.BitString.tagSet: BitStringDecoder(),
+ univ.OctetString.tagSet: OctetStringDecoder(),
+ univ.Real.tagSet: RealDecoder()}
+)
+
+typeMap = decoder.typeMap.copy()
+
+# Put in non-ambiguous types for faster codec lookup
+for typeDecoder in tagMap.values():
+ if typeDecoder.protoComponent is not None:
+ typeId = typeDecoder.protoComponent.__class__.typeId
+ if typeId is not None and typeId not in typeMap:
+ typeMap[typeId] = typeDecoder
+
+
+class Decoder(decoder.Decoder):
+ pass
+
+
+#: Turns CER octet stream into an ASN.1 object.
+#:
+#: Takes CER octet-stream and decode it into an ASN.1 object
+#: (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which
+#: may be a scalar or an arbitrary nested structure.
+#:
+#: Parameters
+#: ----------
+#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
+#: CER octet-stream
+#:
+#: Keyword Args
+#: ------------
+#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
+#: A pyasn1 type object to act as a template guiding the decoder. Depending on the ASN.1 structure
+#: being decoded, *asn1Spec* may or may not be required. Most common reason for
+#: it to require is that ASN.1 structure is encoded in *IMPLICIT* tagging mode.
+#:
+#: Returns
+#: -------
+#: : :py:class:`tuple`
+#: A tuple of pyasn1 object recovered from CER substrate (:py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
+#: and the unprocessed trailing portion of the *substrate* (may be empty)
+#:
+#: Raises
+#: ------
+#: ~pyasn1.error.PyAsn1Error, ~pyasn1.error.SubstrateUnderrunError
+#: On decoding errors
+#:
+#: Examples
+#: --------
+#: Decode CER serialisation without ASN.1 schema
+#:
+#: .. code-block:: pycon
+#:
+#: >>> s, _ = decode(b'0\x80\x02\x01\x01\x02\x01\x02\x02\x01\x03\x00\x00')
+#: >>> str(s)
+#: SequenceOf:
+#: 1 2 3
+#:
+#: Decode CER serialisation with ASN.1 schema
+#:
+#: .. code-block:: pycon
+#:
+#: >>> seq = SequenceOf(componentType=Integer())
+#: >>> s, _ = decode(b'0\x80\x02\x01\x01\x02\x01\x02\x02\x01\x03\x00\x00', asn1Spec=seq)
+#: >>> str(s)
+#: SequenceOf:
+#: 1 2 3
+#:
+decode = Decoder(tagMap, decoder.typeMap)
diff --git a/third_party/python/pyasn1/pyasn1/codec/cer/encoder.py b/third_party/python/pyasn1/pyasn1/codec/cer/encoder.py
new file mode 100644
index 0000000000..935b696561
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/codec/cer/encoder.py
@@ -0,0 +1,313 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+from pyasn1 import error
+from pyasn1.codec.ber import encoder
+from pyasn1.compat.octets import str2octs, null
+from pyasn1.type import univ
+from pyasn1.type import useful
+
+__all__ = ['encode']
+
+
+class BooleanEncoder(encoder.IntegerEncoder):
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+ if value == 0:
+ substrate = (0,)
+ else:
+ substrate = (255,)
+ return substrate, False, False
+
+
+class RealEncoder(encoder.RealEncoder):
+ def _chooseEncBase(self, value):
+ m, b, e = value
+ return self._dropFloatingPoint(m, b, e)
+
+
+# specialized GeneralStringEncoder here
+
+class TimeEncoderMixIn(object):
+ Z_CHAR = ord('Z')
+ PLUS_CHAR = ord('+')
+ MINUS_CHAR = ord('-')
+ COMMA_CHAR = ord(',')
+ DOT_CHAR = ord('.')
+ ZERO_CHAR = ord('0')
+
+ MIN_LENGTH = 12
+ MAX_LENGTH = 19
+
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+ # CER encoding constraints:
+ # - minutes are mandatory, seconds are optional
+ # - sub-seconds must NOT be zero / no meaningless zeros
+ # - no hanging fraction dot
+ # - time in UTC (Z)
+ # - only dot is allowed for fractions
+
+ if asn1Spec is not None:
+ value = asn1Spec.clone(value)
+
+ numbers = value.asNumbers()
+
+ if self.PLUS_CHAR in numbers or self.MINUS_CHAR in numbers:
+ raise error.PyAsn1Error('Must be UTC time: %r' % value)
+
+ if numbers[-1] != self.Z_CHAR:
+ raise error.PyAsn1Error('Missing "Z" time zone specifier: %r' % value)
+
+ if self.COMMA_CHAR in numbers:
+ raise error.PyAsn1Error('Comma in fractions disallowed: %r' % value)
+
+ if self.DOT_CHAR in numbers:
+
+ isModified = False
+
+ numbers = list(numbers)
+
+ searchIndex = min(numbers.index(self.DOT_CHAR) + 4, len(numbers) - 1)
+
+ while numbers[searchIndex] != self.DOT_CHAR:
+ if numbers[searchIndex] == self.ZERO_CHAR:
+ del numbers[searchIndex]
+ isModified = True
+
+ searchIndex -= 1
+
+ searchIndex += 1
+
+ if searchIndex < len(numbers):
+ if numbers[searchIndex] == self.Z_CHAR:
+ # drop hanging comma
+ del numbers[searchIndex - 1]
+ isModified = True
+
+ if isModified:
+ value = value.clone(numbers)
+
+ if not self.MIN_LENGTH < len(numbers) < self.MAX_LENGTH:
+ raise error.PyAsn1Error('Length constraint violated: %r' % value)
+
+ options.update(maxChunkSize=1000)
+
+ return encoder.OctetStringEncoder.encodeValue(
+ self, value, asn1Spec, encodeFun, **options
+ )
+
+
+class GeneralizedTimeEncoder(TimeEncoderMixIn, encoder.OctetStringEncoder):
+ MIN_LENGTH = 12
+ MAX_LENGTH = 20
+
+
+class UTCTimeEncoder(TimeEncoderMixIn, encoder.OctetStringEncoder):
+ MIN_LENGTH = 10
+ MAX_LENGTH = 14
+
+
+class SetOfEncoder(encoder.SequenceOfEncoder):
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+ chunks = self._encodeComponents(
+ value, asn1Spec, encodeFun, **options)
+
+ # sort by serialised and padded components
+ if len(chunks) > 1:
+ zero = str2octs('\x00')
+ maxLen = max(map(len, chunks))
+ paddedChunks = [
+ (x.ljust(maxLen, zero), x) for x in chunks
+ ]
+ paddedChunks.sort(key=lambda x: x[0])
+
+ chunks = [x[1] for x in paddedChunks]
+
+ return null.join(chunks), True, True
+
+
+class SequenceOfEncoder(encoder.SequenceOfEncoder):
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+
+ if options.get('ifNotEmpty', False) and not len(value):
+ return null, True, True
+
+ chunks = self._encodeComponents(
+ value, asn1Spec, encodeFun, **options)
+
+ return null.join(chunks), True, True
+
+
+class SetEncoder(encoder.SequenceEncoder):
+ @staticmethod
+ def _componentSortKey(componentAndType):
+ """Sort SET components by tag
+
+ Sort regardless of the Choice value (static sort)
+ """
+ component, asn1Spec = componentAndType
+
+ if asn1Spec is None:
+ asn1Spec = component
+
+ if asn1Spec.typeId == univ.Choice.typeId and not asn1Spec.tagSet:
+ if asn1Spec.tagSet:
+ return asn1Spec.tagSet
+ else:
+ return asn1Spec.componentType.minTagSet
+ else:
+ return asn1Spec.tagSet
+
+ def encodeValue(self, value, asn1Spec, encodeFun, **options):
+
+ substrate = null
+
+ comps = []
+ compsMap = {}
+
+ if asn1Spec is None:
+ # instance of ASN.1 schema
+ inconsistency = value.isInconsistent
+ if inconsistency:
+ raise inconsistency
+
+ namedTypes = value.componentType
+
+ for idx, component in enumerate(value.values()):
+ if namedTypes:
+ namedType = namedTypes[idx]
+
+ if namedType.isOptional and not component.isValue:
+ continue
+
+ if namedType.isDefaulted and component == namedType.asn1Object:
+ continue
+
+ compsMap[id(component)] = namedType
+
+ else:
+ compsMap[id(component)] = None
+
+ comps.append((component, asn1Spec))
+
+ else:
+ # bare Python value + ASN.1 schema
+ for idx, namedType in enumerate(asn1Spec.componentType.namedTypes):
+
+ try:
+ component = value[namedType.name]
+
+ except KeyError:
+ raise error.PyAsn1Error('Component name "%s" not found in %r' % (namedType.name, value))
+
+ if namedType.isOptional and namedType.name not in value:
+ continue
+
+ if namedType.isDefaulted and component == namedType.asn1Object:
+ continue
+
+ compsMap[id(component)] = namedType
+ comps.append((component, asn1Spec[idx]))
+
+ for comp, compType in sorted(comps, key=self._componentSortKey):
+ namedType = compsMap[id(comp)]
+
+ if namedType:
+ options.update(ifNotEmpty=namedType.isOptional)
+
+ chunk = encodeFun(comp, compType, **options)
+
+ # wrap open type blob if needed
+ if namedType and namedType.openType:
+ wrapType = namedType.asn1Object
+ if wrapType.tagSet and not wrapType.isSameTypeWith(comp):
+ chunk = encodeFun(chunk, wrapType, **options)
+
+ substrate += chunk
+
+ return substrate, True, True
+
+
+class SequenceEncoder(encoder.SequenceEncoder):
+ omitEmptyOptionals = True
+
+
+tagMap = encoder.tagMap.copy()
+tagMap.update({
+ univ.Boolean.tagSet: BooleanEncoder(),
+ univ.Real.tagSet: RealEncoder(),
+ useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(),
+ useful.UTCTime.tagSet: UTCTimeEncoder(),
+ # Sequence & Set have same tags as SequenceOf & SetOf
+ univ.SetOf.tagSet: SetOfEncoder(),
+ univ.Sequence.typeId: SequenceEncoder()
+})
+
+typeMap = encoder.typeMap.copy()
+typeMap.update({
+ univ.Boolean.typeId: BooleanEncoder(),
+ univ.Real.typeId: RealEncoder(),
+ useful.GeneralizedTime.typeId: GeneralizedTimeEncoder(),
+ useful.UTCTime.typeId: UTCTimeEncoder(),
+ # Sequence & Set have same tags as SequenceOf & SetOf
+ univ.Set.typeId: SetEncoder(),
+ univ.SetOf.typeId: SetOfEncoder(),
+ univ.Sequence.typeId: SequenceEncoder(),
+ univ.SequenceOf.typeId: SequenceOfEncoder()
+})
+
+
+class Encoder(encoder.Encoder):
+ fixedDefLengthMode = False
+ fixedChunkSize = 1000
+
+#: Turns ASN.1 object into CER octet stream.
+#:
+#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
+#: walks all its components recursively and produces a CER octet stream.
+#:
+#: Parameters
+#: ----------
+#: value: either a Python or pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
+#: A Python or pyasn1 object to encode. If Python object is given, `asnSpec`
+#: parameter is required to guide the encoding process.
+#:
+#: Keyword Args
+#: ------------
+#: asn1Spec:
+#: Optional ASN.1 schema or value object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
+#:
+#: Returns
+#: -------
+#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
+#: Given ASN.1 object encoded into BER octet-stream
+#:
+#: Raises
+#: ------
+#: ~pyasn1.error.PyAsn1Error
+#: On encoding errors
+#:
+#: Examples
+#: --------
+#: Encode Python value into CER with ASN.1 schema
+#:
+#: .. code-block:: pycon
+#:
+#: >>> seq = SequenceOf(componentType=Integer())
+#: >>> encode([1, 2, 3], asn1Spec=seq)
+#: b'0\x80\x02\x01\x01\x02\x01\x02\x02\x01\x03\x00\x00'
+#:
+#: Encode ASN.1 value object into CER
+#:
+#: .. code-block:: pycon
+#:
+#: >>> seq = SequenceOf(componentType=Integer())
+#: >>> seq.extend([1, 2, 3])
+#: >>> encode(seq)
+#: b'0\x80\x02\x01\x01\x02\x01\x02\x02\x01\x03\x00\x00'
+#:
+encode = Encoder(tagMap, typeMap)
+
+# EncoderFactory queries class instance and builds a map of tags -> encoders
diff --git a/third_party/python/pyasn1/pyasn1/codec/der/__init__.py b/third_party/python/pyasn1/pyasn1/codec/der/__init__.py
new file mode 100644
index 0000000000..8c3066b2e6
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/codec/der/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/third_party/python/pyasn1/pyasn1/codec/der/decoder.py b/third_party/python/pyasn1/pyasn1/codec/der/decoder.py
new file mode 100644
index 0000000000..1a13fdb5be
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/codec/der/decoder.py
@@ -0,0 +1,94 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+from pyasn1.codec.cer import decoder
+from pyasn1.type import univ
+
+__all__ = ['decode']
+
+
+class BitStringDecoder(decoder.BitStringDecoder):
+ supportConstructedForm = False
+
+
+class OctetStringDecoder(decoder.OctetStringDecoder):
+ supportConstructedForm = False
+
+# TODO: prohibit non-canonical encoding
+RealDecoder = decoder.RealDecoder
+
+tagMap = decoder.tagMap.copy()
+tagMap.update(
+ {univ.BitString.tagSet: BitStringDecoder(),
+ univ.OctetString.tagSet: OctetStringDecoder(),
+ univ.Real.tagSet: RealDecoder()}
+)
+
+typeMap = decoder.typeMap.copy()
+
+# Put in non-ambiguous types for faster codec lookup
+for typeDecoder in tagMap.values():
+ if typeDecoder.protoComponent is not None:
+ typeId = typeDecoder.protoComponent.__class__.typeId
+ if typeId is not None and typeId not in typeMap:
+ typeMap[typeId] = typeDecoder
+
+
+class Decoder(decoder.Decoder):
+ supportIndefLength = False
+
+
+#: Turns DER octet stream into an ASN.1 object.
+#:
+#: Takes DER octet-stream and decode it into an ASN.1 object
+#: (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which
+#: may be a scalar or an arbitrary nested structure.
+#:
+#: Parameters
+#: ----------
+#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
+#: DER octet-stream
+#:
+#: Keyword Args
+#: ------------
+#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
+#: A pyasn1 type object to act as a template guiding the decoder. Depending on the ASN.1 structure
+#: being decoded, *asn1Spec* may or may not be required. Most common reason for
+#: it to require is that ASN.1 structure is encoded in *IMPLICIT* tagging mode.
+#:
+#: Returns
+#: -------
+#: : :py:class:`tuple`
+#: A tuple of pyasn1 object recovered from DER substrate (:py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
+#: and the unprocessed trailing portion of the *substrate* (may be empty)
+#:
+#: Raises
+#: ------
+#: ~pyasn1.error.PyAsn1Error, ~pyasn1.error.SubstrateUnderrunError
+#: On decoding errors
+#:
+#: Examples
+#: --------
+#: Decode DER serialisation without ASN.1 schema
+#:
+#: .. code-block:: pycon
+#:
+#: >>> s, _ = decode(b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03')
+#: >>> str(s)
+#: SequenceOf:
+#: 1 2 3
+#:
+#: Decode DER serialisation with ASN.1 schema
+#:
+#: .. code-block:: pycon
+#:
+#: >>> seq = SequenceOf(componentType=Integer())
+#: >>> s, _ = decode(b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03', asn1Spec=seq)
+#: >>> str(s)
+#: SequenceOf:
+#: 1 2 3
+#:
+decode = Decoder(tagMap, typeMap)
diff --git a/third_party/python/pyasn1/pyasn1/codec/der/encoder.py b/third_party/python/pyasn1/pyasn1/codec/der/encoder.py
new file mode 100644
index 0000000000..90e982daa4
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/codec/der/encoder.py
@@ -0,0 +1,107 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+from pyasn1 import error
+from pyasn1.codec.cer import encoder
+from pyasn1.type import univ
+
+__all__ = ['encode']
+
+
+class SetEncoder(encoder.SetEncoder):
+ @staticmethod
+ def _componentSortKey(componentAndType):
+ """Sort SET components by tag
+
+ Sort depending on the actual Choice value (dynamic sort)
+ """
+ component, asn1Spec = componentAndType
+
+ if asn1Spec is None:
+ compType = component
+ else:
+ compType = asn1Spec
+
+ if compType.typeId == univ.Choice.typeId and not compType.tagSet:
+ if asn1Spec is None:
+ return component.getComponent().tagSet
+ else:
+ # TODO: move out of sorting key function
+ names = [namedType.name for namedType in asn1Spec.componentType.namedTypes
+ if namedType.name in component]
+ if len(names) != 1:
+ raise error.PyAsn1Error(
+ '%s components for Choice at %r' % (len(names) and 'Multiple ' or 'None ', component))
+
+ # TODO: support nested CHOICE ordering
+ return asn1Spec[names[0]].tagSet
+
+ else:
+ return compType.tagSet
+
+tagMap = encoder.tagMap.copy()
+tagMap.update({
+ # Set & SetOf have same tags
+ univ.Set.tagSet: SetEncoder()
+})
+
+typeMap = encoder.typeMap.copy()
+typeMap.update({
+ # Set & SetOf have same tags
+ univ.Set.typeId: SetEncoder()
+})
+
+
+class Encoder(encoder.Encoder):
+ fixedDefLengthMode = True
+ fixedChunkSize = 0
+
+#: Turns ASN.1 object into DER octet stream.
+#:
+#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
+#: walks all its components recursively and produces a DER octet stream.
+#:
+#: Parameters
+#: ----------
+#: value: either a Python or pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
+#: A Python or pyasn1 object to encode. If Python object is given, `asnSpec`
+#: parameter is required to guide the encoding process.
+#:
+#: Keyword Args
+#: ------------
+#: asn1Spec:
+#: Optional ASN.1 schema or value object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
+#:
+#: Returns
+#: -------
+#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
+#: Given ASN.1 object encoded into BER octet-stream
+#:
+#: Raises
+#: ------
+#: ~pyasn1.error.PyAsn1Error
+#: On encoding errors
+#:
+#: Examples
+#: --------
+#: Encode Python value into DER with ASN.1 schema
+#:
+#: .. code-block:: pycon
+#:
+#: >>> seq = SequenceOf(componentType=Integer())
+#: >>> encode([1, 2, 3], asn1Spec=seq)
+#: b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03'
+#:
+#: Encode ASN.1 value object into DER
+#:
+#: .. code-block:: pycon
+#:
+#: >>> seq = SequenceOf(componentType=Integer())
+#: >>> seq.extend([1, 2, 3])
+#: >>> encode(seq)
+#: b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03'
+#:
+encode = Encoder(tagMap, typeMap)
diff --git a/third_party/python/pyasn1/pyasn1/codec/native/__init__.py b/third_party/python/pyasn1/pyasn1/codec/native/__init__.py
new file mode 100644
index 0000000000..8c3066b2e6
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/codec/native/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/third_party/python/pyasn1/pyasn1/codec/native/decoder.py b/third_party/python/pyasn1/pyasn1/codec/native/decoder.py
new file mode 100644
index 0000000000..104b92e6d3
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/codec/native/decoder.py
@@ -0,0 +1,213 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+from pyasn1 import debug
+from pyasn1 import error
+from pyasn1.type import base
+from pyasn1.type import char
+from pyasn1.type import tag
+from pyasn1.type import univ
+from pyasn1.type import useful
+
+__all__ = ['decode']
+
+LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_DECODER)
+
+
+class AbstractScalarDecoder(object):
+ def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
+ return asn1Spec.clone(pyObject)
+
+
+class BitStringDecoder(AbstractScalarDecoder):
+ def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
+ return asn1Spec.clone(univ.BitString.fromBinaryString(pyObject))
+
+
+class SequenceOrSetDecoder(object):
+ def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
+ asn1Value = asn1Spec.clone()
+
+ componentsTypes = asn1Spec.componentType
+
+ for field in asn1Value:
+ if field in pyObject:
+ asn1Value[field] = decodeFun(pyObject[field], componentsTypes[field].asn1Object, **options)
+
+ return asn1Value
+
+
+class SequenceOfOrSetOfDecoder(object):
+ def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
+ asn1Value = asn1Spec.clone()
+
+ for pyValue in pyObject:
+ asn1Value.append(decodeFun(pyValue, asn1Spec.componentType), **options)
+
+ return asn1Value
+
+
+class ChoiceDecoder(object):
+ def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
+ asn1Value = asn1Spec.clone()
+
+ componentsTypes = asn1Spec.componentType
+
+ for field in pyObject:
+ if field in componentsTypes:
+ asn1Value[field] = decodeFun(pyObject[field], componentsTypes[field].asn1Object, **options)
+ break
+
+ return asn1Value
+
+
+tagMap = {
+ univ.Integer.tagSet: AbstractScalarDecoder(),
+ univ.Boolean.tagSet: AbstractScalarDecoder(),
+ univ.BitString.tagSet: BitStringDecoder(),
+ univ.OctetString.tagSet: AbstractScalarDecoder(),
+ univ.Null.tagSet: AbstractScalarDecoder(),
+ univ.ObjectIdentifier.tagSet: AbstractScalarDecoder(),
+ univ.Enumerated.tagSet: AbstractScalarDecoder(),
+ univ.Real.tagSet: AbstractScalarDecoder(),
+ univ.Sequence.tagSet: SequenceOrSetDecoder(), # conflicts with SequenceOf
+ univ.Set.tagSet: SequenceOrSetDecoder(), # conflicts with SetOf
+ univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any
+ # character string types
+ char.UTF8String.tagSet: AbstractScalarDecoder(),
+ char.NumericString.tagSet: AbstractScalarDecoder(),
+ char.PrintableString.tagSet: AbstractScalarDecoder(),
+ char.TeletexString.tagSet: AbstractScalarDecoder(),
+ char.VideotexString.tagSet: AbstractScalarDecoder(),
+ char.IA5String.tagSet: AbstractScalarDecoder(),
+ char.GraphicString.tagSet: AbstractScalarDecoder(),
+ char.VisibleString.tagSet: AbstractScalarDecoder(),
+ char.GeneralString.tagSet: AbstractScalarDecoder(),
+ char.UniversalString.tagSet: AbstractScalarDecoder(),
+ char.BMPString.tagSet: AbstractScalarDecoder(),
+ # useful types
+ useful.ObjectDescriptor.tagSet: AbstractScalarDecoder(),
+ useful.GeneralizedTime.tagSet: AbstractScalarDecoder(),
+ useful.UTCTime.tagSet: AbstractScalarDecoder()
+}
+
+# Put in ambiguous & non-ambiguous types for faster codec lookup
+typeMap = {
+ univ.Integer.typeId: AbstractScalarDecoder(),
+ univ.Boolean.typeId: AbstractScalarDecoder(),
+ univ.BitString.typeId: BitStringDecoder(),
+ univ.OctetString.typeId: AbstractScalarDecoder(),
+ univ.Null.typeId: AbstractScalarDecoder(),
+ univ.ObjectIdentifier.typeId: AbstractScalarDecoder(),
+ univ.Enumerated.typeId: AbstractScalarDecoder(),
+ univ.Real.typeId: AbstractScalarDecoder(),
+ # ambiguous base types
+ univ.Set.typeId: SequenceOrSetDecoder(),
+ univ.SetOf.typeId: SequenceOfOrSetOfDecoder(),
+ univ.Sequence.typeId: SequenceOrSetDecoder(),
+ univ.SequenceOf.typeId: SequenceOfOrSetOfDecoder(),
+ univ.Choice.typeId: ChoiceDecoder(),
+ univ.Any.typeId: AbstractScalarDecoder(),
+ # character string types
+ char.UTF8String.typeId: AbstractScalarDecoder(),
+ char.NumericString.typeId: AbstractScalarDecoder(),
+ char.PrintableString.typeId: AbstractScalarDecoder(),
+ char.TeletexString.typeId: AbstractScalarDecoder(),
+ char.VideotexString.typeId: AbstractScalarDecoder(),
+ char.IA5String.typeId: AbstractScalarDecoder(),
+ char.GraphicString.typeId: AbstractScalarDecoder(),
+ char.VisibleString.typeId: AbstractScalarDecoder(),
+ char.GeneralString.typeId: AbstractScalarDecoder(),
+ char.UniversalString.typeId: AbstractScalarDecoder(),
+ char.BMPString.typeId: AbstractScalarDecoder(),
+ # useful types
+ useful.ObjectDescriptor.typeId: AbstractScalarDecoder(),
+ useful.GeneralizedTime.typeId: AbstractScalarDecoder(),
+ useful.UTCTime.typeId: AbstractScalarDecoder()
+}
+
+
+class Decoder(object):
+
+ # noinspection PyDefaultArgument
+ def __init__(self, tagMap, typeMap):
+ self.__tagMap = tagMap
+ self.__typeMap = typeMap
+
+ def __call__(self, pyObject, asn1Spec, **options):
+
+ if LOG:
+ debug.scope.push(type(pyObject).__name__)
+ LOG('decoder called at scope %s, working with type %s' % (debug.scope, type(pyObject).__name__))
+
+ if asn1Spec is None or not isinstance(asn1Spec, base.Asn1Item):
+ raise error.PyAsn1Error('asn1Spec is not valid (should be an instance of an ASN.1 Item, not %s)' % asn1Spec.__class__.__name__)
+
+ try:
+ valueDecoder = self.__typeMap[asn1Spec.typeId]
+
+ except KeyError:
+ # use base type for codec lookup to recover untagged types
+ baseTagSet = tag.TagSet(asn1Spec.tagSet.baseTag, asn1Spec.tagSet.baseTag)
+
+ try:
+ valueDecoder = self.__tagMap[baseTagSet]
+ except KeyError:
+ raise error.PyAsn1Error('Unknown ASN.1 tag %s' % asn1Spec.tagSet)
+
+ if LOG:
+ LOG('calling decoder %s on Python type %s <%s>' % (type(valueDecoder).__name__, type(pyObject).__name__, repr(pyObject)))
+
+ value = valueDecoder(pyObject, asn1Spec, self, **options)
+
+ if LOG:
+ LOG('decoder %s produced ASN.1 type %s <%s>' % (type(valueDecoder).__name__, type(value).__name__, repr(value)))
+ debug.scope.pop()
+
+ return value
+
+
+#: Turns Python objects of built-in types into ASN.1 objects.
+#:
+#: Takes Python objects of built-in types and turns them into a tree of
+#: ASN.1 objects (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which
+#: may be a scalar or an arbitrary nested structure.
+#:
+#: Parameters
+#: ----------
+#: pyObject: :py:class:`object`
+#: A scalar or nested Python objects
+#:
+#: Keyword Args
+#: ------------
+#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
+#: A pyasn1 type object to act as a template guiding the decoder. It is required
+#: for successful interpretation of Python objects mapping into their ASN.1
+#: representations.
+#:
+#: Returns
+#: -------
+#: : :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
+#: A scalar or constructed pyasn1 object
+#:
+#: Raises
+#: ------
+#: ~pyasn1.error.PyAsn1Error
+#: On decoding errors
+#:
+#: Examples
+#: --------
+#: Decode native Python object into ASN.1 objects with ASN.1 schema
+#:
+#: .. code-block:: pycon
+#:
+#: >>> seq = SequenceOf(componentType=Integer())
+#: >>> s, _ = decode([1, 2, 3], asn1Spec=seq)
+#: >>> str(s)
+#: SequenceOf:
+#: 1 2 3
+#:
+decode = Decoder(tagMap, typeMap)
diff --git a/third_party/python/pyasn1/pyasn1/codec/native/encoder.py b/third_party/python/pyasn1/pyasn1/codec/native/encoder.py
new file mode 100644
index 0000000000..4318abde6f
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/codec/native/encoder.py
@@ -0,0 +1,256 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+try:
+ from collections import OrderedDict
+
+except ImportError:
+ OrderedDict = dict
+
+from pyasn1 import debug
+from pyasn1 import error
+from pyasn1.type import base
+from pyasn1.type import char
+from pyasn1.type import tag
+from pyasn1.type import univ
+from pyasn1.type import useful
+
+__all__ = ['encode']
+
+LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_ENCODER)
+
+
+class AbstractItemEncoder(object):
+ def encode(self, value, encodeFun, **options):
+ raise error.PyAsn1Error('Not implemented')
+
+
+class BooleanEncoder(AbstractItemEncoder):
+ def encode(self, value, encodeFun, **options):
+ return bool(value)
+
+
+class IntegerEncoder(AbstractItemEncoder):
+ def encode(self, value, encodeFun, **options):
+ return int(value)
+
+
+class BitStringEncoder(AbstractItemEncoder):
+ def encode(self, value, encodeFun, **options):
+ return str(value)
+
+
+class OctetStringEncoder(AbstractItemEncoder):
+ def encode(self, value, encodeFun, **options):
+ return value.asOctets()
+
+
+class TextStringEncoder(AbstractItemEncoder):
+ def encode(self, value, encodeFun, **options):
+ return str(value)
+
+
+class NullEncoder(AbstractItemEncoder):
+ def encode(self, value, encodeFun, **options):
+ return None
+
+
+class ObjectIdentifierEncoder(AbstractItemEncoder):
+ def encode(self, value, encodeFun, **options):
+ return str(value)
+
+
+class RealEncoder(AbstractItemEncoder):
+ def encode(self, value, encodeFun, **options):
+ return float(value)
+
+
+class SetEncoder(AbstractItemEncoder):
+ protoDict = dict
+
+ def encode(self, value, encodeFun, **options):
+ inconsistency = value.isInconsistent
+ if inconsistency:
+ raise inconsistency
+
+ namedTypes = value.componentType
+ substrate = self.protoDict()
+
+ for idx, (key, subValue) in enumerate(value.items()):
+ if namedTypes and namedTypes[idx].isOptional and not value[idx].isValue:
+ continue
+ substrate[key] = encodeFun(subValue, **options)
+ return substrate
+
+
+class SequenceEncoder(SetEncoder):
+ protoDict = OrderedDict
+
+
+class SequenceOfEncoder(AbstractItemEncoder):
+ def encode(self, value, encodeFun, **options):
+ inconsistency = value.isInconsistent
+ if inconsistency:
+ raise inconsistency
+ return [encodeFun(x, **options) for x in value]
+
+
+class ChoiceEncoder(SequenceEncoder):
+ pass
+
+
+class AnyEncoder(AbstractItemEncoder):
+ def encode(self, value, encodeFun, **options):
+ return value.asOctets()
+
+
+tagMap = {
+ univ.Boolean.tagSet: BooleanEncoder(),
+ univ.Integer.tagSet: IntegerEncoder(),
+ univ.BitString.tagSet: BitStringEncoder(),
+ univ.OctetString.tagSet: OctetStringEncoder(),
+ univ.Null.tagSet: NullEncoder(),
+ univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(),
+ univ.Enumerated.tagSet: IntegerEncoder(),
+ univ.Real.tagSet: RealEncoder(),
+ # Sequence & Set have same tags as SequenceOf & SetOf
+ univ.SequenceOf.tagSet: SequenceOfEncoder(),
+ univ.SetOf.tagSet: SequenceOfEncoder(),
+ univ.Choice.tagSet: ChoiceEncoder(),
+ # character string types
+ char.UTF8String.tagSet: TextStringEncoder(),
+ char.NumericString.tagSet: TextStringEncoder(),
+ char.PrintableString.tagSet: TextStringEncoder(),
+ char.TeletexString.tagSet: TextStringEncoder(),
+ char.VideotexString.tagSet: TextStringEncoder(),
+ char.IA5String.tagSet: TextStringEncoder(),
+ char.GraphicString.tagSet: TextStringEncoder(),
+ char.VisibleString.tagSet: TextStringEncoder(),
+ char.GeneralString.tagSet: TextStringEncoder(),
+ char.UniversalString.tagSet: TextStringEncoder(),
+ char.BMPString.tagSet: TextStringEncoder(),
+ # useful types
+ useful.ObjectDescriptor.tagSet: OctetStringEncoder(),
+ useful.GeneralizedTime.tagSet: OctetStringEncoder(),
+ useful.UTCTime.tagSet: OctetStringEncoder()
+}
+
+
+# Put in ambiguous & non-ambiguous types for faster codec lookup
+typeMap = {
+ univ.Boolean.typeId: BooleanEncoder(),
+ univ.Integer.typeId: IntegerEncoder(),
+ univ.BitString.typeId: BitStringEncoder(),
+ univ.OctetString.typeId: OctetStringEncoder(),
+ univ.Null.typeId: NullEncoder(),
+ univ.ObjectIdentifier.typeId: ObjectIdentifierEncoder(),
+ univ.Enumerated.typeId: IntegerEncoder(),
+ univ.Real.typeId: RealEncoder(),
+ # Sequence & Set have same tags as SequenceOf & SetOf
+ univ.Set.typeId: SetEncoder(),
+ univ.SetOf.typeId: SequenceOfEncoder(),
+ univ.Sequence.typeId: SequenceEncoder(),
+ univ.SequenceOf.typeId: SequenceOfEncoder(),
+ univ.Choice.typeId: ChoiceEncoder(),
+ univ.Any.typeId: AnyEncoder(),
+ # character string types
+ char.UTF8String.typeId: OctetStringEncoder(),
+ char.NumericString.typeId: OctetStringEncoder(),
+ char.PrintableString.typeId: OctetStringEncoder(),
+ char.TeletexString.typeId: OctetStringEncoder(),
+ char.VideotexString.typeId: OctetStringEncoder(),
+ char.IA5String.typeId: OctetStringEncoder(),
+ char.GraphicString.typeId: OctetStringEncoder(),
+ char.VisibleString.typeId: OctetStringEncoder(),
+ char.GeneralString.typeId: OctetStringEncoder(),
+ char.UniversalString.typeId: OctetStringEncoder(),
+ char.BMPString.typeId: OctetStringEncoder(),
+ # useful types
+ useful.ObjectDescriptor.typeId: OctetStringEncoder(),
+ useful.GeneralizedTime.typeId: OctetStringEncoder(),
+ useful.UTCTime.typeId: OctetStringEncoder()
+}
+
+
+class Encoder(object):
+
+ # noinspection PyDefaultArgument
+ def __init__(self, tagMap, typeMap={}):
+ self.__tagMap = tagMap
+ self.__typeMap = typeMap
+
+ def __call__(self, value, **options):
+ if not isinstance(value, base.Asn1Item):
+ raise error.PyAsn1Error('value is not valid (should be an instance of an ASN.1 Item)')
+
+ if LOG:
+ debug.scope.push(type(value).__name__)
+ LOG('encoder called for type %s <%s>' % (type(value).__name__, value.prettyPrint()))
+
+ tagSet = value.tagSet
+
+ try:
+ concreteEncoder = self.__typeMap[value.typeId]
+
+ except KeyError:
+ # use base type for codec lookup to recover untagged types
+ baseTagSet = tag.TagSet(value.tagSet.baseTag, value.tagSet.baseTag)
+
+ try:
+ concreteEncoder = self.__tagMap[baseTagSet]
+
+ except KeyError:
+ raise error.PyAsn1Error('No encoder for %s' % (value,))
+
+ if LOG:
+ LOG('using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet))
+
+ pyObject = concreteEncoder.encode(value, self, **options)
+
+ if LOG:
+ LOG('encoder %s produced: %s' % (type(concreteEncoder).__name__, repr(pyObject)))
+ debug.scope.pop()
+
+ return pyObject
+
+
+#: Turns ASN.1 object into a Python built-in type object(s).
+#:
+#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
+#: walks all its components recursively and produces a Python built-in type or a tree
+#: of those.
+#:
+#: One exception is that instead of :py:class:`dict`, the :py:class:`OrderedDict`
+#: can be produced (whenever available) to preserve ordering of the components
+#: in ASN.1 SEQUENCE.
+#:
+#: Parameters
+#: ----------
+# asn1Value: any pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
+#: pyasn1 object to encode (or a tree of them)
+#:
+#: Returns
+#: -------
+#: : :py:class:`object`
+#: Python built-in type instance (or a tree of them)
+#:
+#: Raises
+#: ------
+#: ~pyasn1.error.PyAsn1Error
+#: On encoding errors
+#:
+#: Examples
+#: --------
+#: Encode ASN.1 value object into native Python types
+#:
+#: .. code-block:: pycon
+#:
+#: >>> seq = SequenceOf(componentType=Integer())
+#: >>> seq.extend([1, 2, 3])
+#: >>> encode(seq)
+#: [1, 2, 3]
+#:
+encode = Encoder(tagMap, typeMap)
diff --git a/third_party/python/pyasn1/pyasn1/compat/__init__.py b/third_party/python/pyasn1/pyasn1/compat/__init__.py
new file mode 100644
index 0000000000..8c3066b2e6
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/compat/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/third_party/python/pyasn1/pyasn1/compat/binary.py b/third_party/python/pyasn1/pyasn1/compat/binary.py
new file mode 100644
index 0000000000..addbdc9caa
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/compat/binary.py
@@ -0,0 +1,33 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+from sys import version_info
+
+if version_info[0:2] < (2, 6):
+ def bin(value):
+ bitstring = []
+
+ if value > 0:
+ prefix = '0b'
+ elif value < 0:
+ prefix = '-0b'
+ value = abs(value)
+ else:
+ prefix = '0b0'
+
+ while value:
+ if value & 1 == 1:
+ bitstring.append('1')
+ else:
+ bitstring.append('0')
+
+ value >>= 1
+
+ bitstring.reverse()
+
+ return prefix + ''.join(bitstring)
+else:
+ bin = bin
diff --git a/third_party/python/pyasn1/pyasn1/compat/calling.py b/third_party/python/pyasn1/pyasn1/compat/calling.py
new file mode 100644
index 0000000000..778a3d15d0
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/compat/calling.py
@@ -0,0 +1,20 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+from sys import version_info
+
+__all__ = ['callable']
+
+
+if (2, 7) < version_info[:2] < (3, 2):
+ import collections
+
+ def callable(x):
+ return isinstance(x, collections.Callable)
+
+else:
+
+ callable = callable
diff --git a/third_party/python/pyasn1/pyasn1/compat/dateandtime.py b/third_party/python/pyasn1/pyasn1/compat/dateandtime.py
new file mode 100644
index 0000000000..5e471bf761
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/compat/dateandtime.py
@@ -0,0 +1,22 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+import time
+from datetime import datetime
+from sys import version_info
+
+__all__ = ['strptime']
+
+
+if version_info[:2] <= (2, 4):
+
+ def strptime(text, dateFormat):
+ return datetime(*(time.strptime(text, dateFormat)[0:6]))
+
+else:
+
+ def strptime(text, dateFormat):
+ return datetime.strptime(text, dateFormat)
diff --git a/third_party/python/pyasn1/pyasn1/compat/integer.py b/third_party/python/pyasn1/pyasn1/compat/integer.py
new file mode 100644
index 0000000000..4b31791d5e
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/compat/integer.py
@@ -0,0 +1,110 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+import sys
+
+try:
+ import platform
+
+ implementation = platform.python_implementation()
+
+except (ImportError, AttributeError):
+ implementation = 'CPython'
+
+from pyasn1.compat.octets import oct2int, null, ensureString
+
+if sys.version_info[0:2] < (3, 2) or implementation != 'CPython':
+ from binascii import a2b_hex, b2a_hex
+
+ if sys.version_info[0] > 2:
+ long = int
+
+ def from_bytes(octets, signed=False):
+ if not octets:
+ return 0
+
+ value = long(b2a_hex(ensureString(octets)), 16)
+
+ if signed and oct2int(octets[0]) & 0x80:
+ return value - (1 << len(octets) * 8)
+
+ return value
+
+ def to_bytes(value, signed=False, length=0):
+ if value < 0:
+ if signed:
+ bits = bitLength(value)
+
+ # two's complement form
+ maxValue = 1 << bits
+ valueToEncode = (value + maxValue) % maxValue
+
+ else:
+ raise OverflowError('can\'t convert negative int to unsigned')
+ elif value == 0 and length == 0:
+ return null
+ else:
+ bits = 0
+ valueToEncode = value
+
+ hexValue = hex(valueToEncode)[2:]
+ if hexValue.endswith('L'):
+ hexValue = hexValue[:-1]
+
+ if len(hexValue) & 1:
+ hexValue = '0' + hexValue
+
+ # padding may be needed for two's complement encoding
+ if value != valueToEncode or length:
+ hexLength = len(hexValue) * 4
+
+ padLength = max(length, bits)
+
+ if padLength > hexLength:
+ hexValue = '00' * ((padLength - hexLength - 1) // 8 + 1) + hexValue
+ elif length and hexLength - length > 7:
+ raise OverflowError('int too big to convert')
+
+ firstOctet = int(hexValue[:2], 16)
+
+ if signed:
+ if firstOctet & 0x80:
+ if value >= 0:
+ hexValue = '00' + hexValue
+ elif value < 0:
+ hexValue = 'ff' + hexValue
+
+ octets_value = a2b_hex(hexValue)
+
+ return octets_value
+
+ def bitLength(number):
+ # bits in unsigned number
+ hexValue = hex(abs(number))
+ bits = len(hexValue) - 2
+ if hexValue.endswith('L'):
+ bits -= 1
+ if bits & 1:
+ bits += 1
+ bits *= 4
+ # TODO: strip lhs zeros
+ return bits
+
+else:
+
+ def from_bytes(octets, signed=False):
+ return int.from_bytes(bytes(octets), 'big', signed=signed)
+
+ def to_bytes(value, signed=False, length=0):
+ length = max(value.bit_length(), length)
+
+ if signed and length % 8 == 0:
+ length += 1
+
+ return value.to_bytes(length // 8 + (length % 8 and 1 or 0), 'big', signed=signed)
+
+ def bitLength(number):
+ return int(number).bit_length()
diff --git a/third_party/python/pyasn1/pyasn1/compat/octets.py b/third_party/python/pyasn1/pyasn1/compat/octets.py
new file mode 100644
index 0000000000..99d23bb3f1
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/compat/octets.py
@@ -0,0 +1,46 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+from sys import version_info
+
+if version_info[0] <= 2:
+ int2oct = chr
+ # noinspection PyPep8
+ ints2octs = lambda s: ''.join([int2oct(x) for x in s])
+ null = ''
+ oct2int = ord
+ # TODO: refactor to return a sequence of ints
+ # noinspection PyPep8
+ octs2ints = lambda s: [oct2int(x) for x in s]
+ # noinspection PyPep8
+ str2octs = lambda x: x
+ # noinspection PyPep8
+ octs2str = lambda x: x
+ # noinspection PyPep8
+ isOctetsType = lambda s: isinstance(s, str)
+ # noinspection PyPep8
+ isStringType = lambda s: isinstance(s, (str, unicode))
+ # noinspection PyPep8
+ ensureString = str
+else:
+ ints2octs = bytes
+ # noinspection PyPep8
+ int2oct = lambda x: ints2octs((x,))
+ null = ints2octs()
+ # noinspection PyPep8
+ oct2int = lambda x: x
+ # noinspection PyPep8
+ octs2ints = lambda x: x
+ # noinspection PyPep8
+ str2octs = lambda x: x.encode('iso-8859-1')
+ # noinspection PyPep8
+ octs2str = lambda x: x.decode('iso-8859-1')
+ # noinspection PyPep8
+ isOctetsType = lambda s: isinstance(s, bytes)
+ # noinspection PyPep8
+ isStringType = lambda s: isinstance(s, str)
+ # noinspection PyPep8
+ ensureString = bytes
diff --git a/third_party/python/pyasn1/pyasn1/compat/string.py b/third_party/python/pyasn1/pyasn1/compat/string.py
new file mode 100644
index 0000000000..b9bc8c3802
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/compat/string.py
@@ -0,0 +1,26 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+from sys import version_info
+
+if version_info[:2] <= (2, 5):
+
+ def partition(string, sep):
+ try:
+ a, c = string.split(sep, 1)
+
+ except ValueError:
+ a, b, c = string, '', ''
+
+ else:
+ b = sep
+
+ return a, b, c
+
+else:
+
+ def partition(string, sep):
+ return string.partition(sep)
diff --git a/third_party/python/pyasn1/pyasn1/debug.py b/third_party/python/pyasn1/pyasn1/debug.py
new file mode 100644
index 0000000000..8707aa887e
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/debug.py
@@ -0,0 +1,157 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+import logging
+import sys
+
+from pyasn1 import __version__
+from pyasn1 import error
+from pyasn1.compat.octets import octs2ints
+
+__all__ = ['Debug', 'setLogger', 'hexdump']
+
+DEBUG_NONE = 0x0000
+DEBUG_ENCODER = 0x0001
+DEBUG_DECODER = 0x0002
+DEBUG_ALL = 0xffff
+
+FLAG_MAP = {
+ 'none': DEBUG_NONE,
+ 'encoder': DEBUG_ENCODER,
+ 'decoder': DEBUG_DECODER,
+ 'all': DEBUG_ALL
+}
+
+LOGGEE_MAP = {}
+
+
+class Printer(object):
+ # noinspection PyShadowingNames
+ def __init__(self, logger=None, handler=None, formatter=None):
+ if logger is None:
+ logger = logging.getLogger('pyasn1')
+
+ logger.setLevel(logging.DEBUG)
+
+ if handler is None:
+ handler = logging.StreamHandler()
+
+ if formatter is None:
+ formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s')
+
+ handler.setFormatter(formatter)
+ handler.setLevel(logging.DEBUG)
+ logger.addHandler(handler)
+
+ self.__logger = logger
+
+ def __call__(self, msg):
+ self.__logger.debug(msg)
+
+ def __str__(self):
+ return '<python logging>'
+
+
+if hasattr(logging, 'NullHandler'):
+ NullHandler = logging.NullHandler
+
+else:
+ # Python 2.6 and older
+ class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+
+class Debug(object):
+ defaultPrinter = Printer()
+
+ def __init__(self, *flags, **options):
+ self._flags = DEBUG_NONE
+
+ if 'loggerName' in options:
+ # route our logs to parent logger
+ self._printer = Printer(
+ logger=logging.getLogger(options['loggerName']),
+ handler=NullHandler()
+ )
+
+ elif 'printer' in options:
+ self._printer = options.get('printer')
+
+ else:
+ self._printer = self.defaultPrinter
+
+ self._printer('running pyasn1 %s, debug flags %s' % (__version__, ', '.join(flags)))
+
+ for flag in flags:
+ inverse = flag and flag[0] in ('!', '~')
+ if inverse:
+ flag = flag[1:]
+ try:
+ if inverse:
+ self._flags &= ~FLAG_MAP[flag]
+ else:
+ self._flags |= FLAG_MAP[flag]
+ except KeyError:
+ raise error.PyAsn1Error('bad debug flag %s' % flag)
+
+ self._printer("debug category '%s' %s" % (flag, inverse and 'disabled' or 'enabled'))
+
+ def __str__(self):
+ return 'logger %s, flags %x' % (self._printer, self._flags)
+
+ def __call__(self, msg):
+ self._printer(msg)
+
+ def __and__(self, flag):
+ return self._flags & flag
+
+ def __rand__(self, flag):
+ return flag & self._flags
+
+_LOG = DEBUG_NONE
+
+
+def setLogger(userLogger):
+ global _LOG
+
+ if userLogger:
+ _LOG = userLogger
+ else:
+ _LOG = DEBUG_NONE
+
+ # Update registered logging clients
+ for module, (name, flags) in LOGGEE_MAP.items():
+ setattr(module, name, _LOG & flags and _LOG or DEBUG_NONE)
+
+
+def registerLoggee(module, name='LOG', flags=DEBUG_NONE):
+ LOGGEE_MAP[sys.modules[module]] = name, flags
+ setLogger(_LOG)
+ return _LOG
+
+
+def hexdump(octets):
+ return ' '.join(
+ ['%s%.2X' % (n % 16 == 0 and ('\n%.5d: ' % n) or '', x)
+ for n, x in zip(range(len(octets)), octs2ints(octets))]
+ )
+
+
+class Scope(object):
+ def __init__(self):
+ self._list = []
+
+ def __str__(self): return '.'.join(self._list)
+
+ def push(self, token):
+ self._list.append(token)
+
+ def pop(self):
+ return self._list.pop()
+
+
+scope = Scope()
diff --git a/third_party/python/pyasn1/pyasn1/error.py b/third_party/python/pyasn1/pyasn1/error.py
new file mode 100644
index 0000000000..4f48db2516
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/error.py
@@ -0,0 +1,75 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+
+
+class PyAsn1Error(Exception):
+ """Base pyasn1 exception
+
+ `PyAsn1Error` is the base exception class (based on
+ :class:`Exception`) that represents all possible ASN.1 related
+ errors.
+ """
+
+
+class ValueConstraintError(PyAsn1Error):
+ """ASN.1 type constraints violation exception
+
+ The `ValueConstraintError` exception indicates an ASN.1 value
+ constraint violation.
+
+ It might happen on value object instantiation (for scalar types) or on
+ serialization (for constructed types).
+ """
+
+
+class SubstrateUnderrunError(PyAsn1Error):
+ """ASN.1 data structure deserialization error
+
+ The `SubstrateUnderrunError` exception indicates insufficient serialised
+ data on input of a de-serialization codec.
+ """
+
+
+class PyAsn1UnicodeError(PyAsn1Error, UnicodeError):
+ """Unicode text processing error
+
+ The `PyAsn1UnicodeError` exception is a base class for errors relating to
+ unicode text de/serialization.
+
+ Apart from inheriting from :class:`PyAsn1Error`, it also inherits from
+ :class:`UnicodeError` to help the caller catching unicode-related errors.
+ """
+ def __init__(self, message, unicode_error=None):
+ if isinstance(unicode_error, UnicodeError):
+ UnicodeError.__init__(self, *unicode_error.args)
+ PyAsn1Error.__init__(self, message)
+
+
+class PyAsn1UnicodeDecodeError(PyAsn1UnicodeError, UnicodeDecodeError):
+ """Unicode text decoding error
+
+ The `PyAsn1UnicodeDecodeError` exception represents a failure to
+ deserialize unicode text.
+
+ Apart from inheriting from :class:`PyAsn1UnicodeError`, it also inherits
+ from :class:`UnicodeDecodeError` to help the caller catching unicode-related
+ errors.
+ """
+
+
+class PyAsn1UnicodeEncodeError(PyAsn1UnicodeError, UnicodeEncodeError):
+ """Unicode text encoding error
+
+ The `PyAsn1UnicodeEncodeError` exception represents a failure to
+ serialize unicode text.
+
+ Apart from inheriting from :class:`PyAsn1UnicodeError`, it also inherits
+ from :class:`UnicodeEncodeError` to help the caller catching
+ unicode-related errors.
+ """
+
+
diff --git a/third_party/python/pyasn1/pyasn1/type/__init__.py b/third_party/python/pyasn1/pyasn1/type/__init__.py
new file mode 100644
index 0000000000..8c3066b2e6
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/type/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/third_party/python/pyasn1/pyasn1/type/base.py b/third_party/python/pyasn1/pyasn1/type/base.py
new file mode 100644
index 0000000000..994f1c99b3
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/type/base.py
@@ -0,0 +1,707 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+import sys
+
+from pyasn1 import error
+from pyasn1.compat import calling
+from pyasn1.type import constraint
+from pyasn1.type import tag
+from pyasn1.type import tagmap
+
+__all__ = ['Asn1Item', 'Asn1Type', 'SimpleAsn1Type',
+ 'ConstructedAsn1Type']
+
+
+class Asn1Item(object):
+ @classmethod
+ def getTypeId(cls, increment=1):
+ try:
+ Asn1Item._typeCounter += increment
+ except AttributeError:
+ Asn1Item._typeCounter = increment
+ return Asn1Item._typeCounter
+
+
+class Asn1Type(Asn1Item):
+ """Base class for all classes representing ASN.1 types.
+
+ In the user code, |ASN.1| class is normally used only for telling
+ ASN.1 objects from others.
+
+ Note
+ ----
+ For as long as ASN.1 is concerned, a way to compare ASN.1 types
+ is to use :meth:`isSameTypeWith` and :meth:`isSuperTypeOf` methods.
+ """
+ #: Set or return a :py:class:`~pyasn1.type.tag.TagSet` object representing
+ #: ASN.1 tag(s) associated with |ASN.1| type.
+ tagSet = tag.TagSet()
+
+ #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
+ #: object imposing constraints on initialization values.
+ subtypeSpec = constraint.ConstraintsIntersection()
+
+ # Disambiguation ASN.1 types identification
+ typeId = None
+
+ def __init__(self, **kwargs):
+ readOnly = {
+ 'tagSet': self.tagSet,
+ 'subtypeSpec': self.subtypeSpec
+ }
+
+ readOnly.update(kwargs)
+
+ self.__dict__.update(readOnly)
+
+ self._readOnly = readOnly
+
+ def __setattr__(self, name, value):
+ if name[0] != '_' and name in self._readOnly:
+ raise error.PyAsn1Error('read-only instance attribute "%s"' % name)
+
+ self.__dict__[name] = value
+
+ def __str__(self):
+ return self.prettyPrint()
+
+ @property
+ def readOnly(self):
+ return self._readOnly
+
+ @property
+ def effectiveTagSet(self):
+ """For |ASN.1| type is equivalent to *tagSet*
+ """
+ return self.tagSet # used by untagged types
+
+ @property
+ def tagMap(self):
+ """Return a :class:`~pyasn1.type.tagmap.TagMap` object mapping ASN.1 tags to ASN.1 objects within callee object.
+ """
+ return tagmap.TagMap({self.tagSet: self})
+
+ def isSameTypeWith(self, other, matchTags=True, matchConstraints=True):
+ """Examine |ASN.1| type for equality with other ASN.1 type.
+
+ ASN.1 tags (:py:mod:`~pyasn1.type.tag`) and constraints
+ (:py:mod:`~pyasn1.type.constraint`) are examined when carrying
+ out ASN.1 types comparison.
+
+ Python class inheritance relationship is NOT considered.
+
+ Parameters
+ ----------
+ other: a pyasn1 type object
+ Class instance representing ASN.1 type.
+
+ Returns
+ -------
+ : :class:`bool`
+ :obj:`True` if *other* is |ASN.1| type,
+ :obj:`False` otherwise.
+ """
+ return (self is other or
+ (not matchTags or self.tagSet == other.tagSet) and
+ (not matchConstraints or self.subtypeSpec == other.subtypeSpec))
+
+ def isSuperTypeOf(self, other, matchTags=True, matchConstraints=True):
+ """Examine |ASN.1| type for subtype relationship with other ASN.1 type.
+
+ ASN.1 tags (:py:mod:`~pyasn1.type.tag`) and constraints
+ (:py:mod:`~pyasn1.type.constraint`) are examined when carrying
+ out ASN.1 types comparison.
+
+ Python class inheritance relationship is NOT considered.
+
+ Parameters
+ ----------
+ other: a pyasn1 type object
+ Class instance representing ASN.1 type.
+
+ Returns
+ -------
+ : :class:`bool`
+ :obj:`True` if *other* is a subtype of |ASN.1| type,
+ :obj:`False` otherwise.
+ """
+ return (not matchTags or
+ (self.tagSet.isSuperTagSetOf(other.tagSet)) and
+ (not matchConstraints or self.subtypeSpec.isSuperTypeOf(other.subtypeSpec)))
+
+ @staticmethod
+ def isNoValue(*values):
+ for value in values:
+ if value is not noValue:
+ return False
+ return True
+
+ def prettyPrint(self, scope=0):
+ raise NotImplementedError()
+
+ # backward compatibility
+
+ def getTagSet(self):
+ return self.tagSet
+
+ def getEffectiveTagSet(self):
+ return self.effectiveTagSet
+
+ def getTagMap(self):
+ return self.tagMap
+
+ def getSubtypeSpec(self):
+ return self.subtypeSpec
+
+ # backward compatibility
+ def hasValue(self):
+ return self.isValue
+
+# Backward compatibility
+Asn1ItemBase = Asn1Type
+
+
+class NoValue(object):
+ """Create a singleton instance of NoValue class.
+
+ The *NoValue* sentinel object represents an instance of ASN.1 schema
+ object as opposed to ASN.1 value object.
+
+ Only ASN.1 schema-related operations can be performed on ASN.1
+ schema objects.
+
+ Warning
+ -------
+ Any operation attempted on the *noValue* object will raise the
+ *PyAsn1Error* exception.
+ """
+ skipMethods = set(
+ ('__slots__',
+ # attributes
+ '__getattribute__',
+ '__getattr__',
+ '__setattr__',
+ '__delattr__',
+ # class instance
+ '__class__',
+ '__init__',
+ '__del__',
+ '__new__',
+ '__repr__',
+ '__qualname__',
+ '__objclass__',
+ 'im_class',
+ '__sizeof__',
+ # pickle protocol
+ '__reduce__',
+ '__reduce_ex__',
+ '__getnewargs__',
+ '__getinitargs__',
+ '__getstate__',
+ '__setstate__')
+ )
+
+ _instance = None
+
+ def __new__(cls):
+ if cls._instance is None:
+ def getPlug(name):
+ def plug(self, *args, **kw):
+ raise error.PyAsn1Error('Attempted "%s" operation on ASN.1 schema object' % name)
+ return plug
+
+ op_names = [name
+ for typ in (str, int, list, dict)
+ for name in dir(typ)
+ if (name not in cls.skipMethods and
+ name.startswith('__') and
+ name.endswith('__') and
+ calling.callable(getattr(typ, name)))]
+
+ for name in set(op_names):
+ setattr(cls, name, getPlug(name))
+
+ cls._instance = object.__new__(cls)
+
+ return cls._instance
+
+ def __getattr__(self, attr):
+ if attr in self.skipMethods:
+ raise AttributeError('Attribute %s not present' % attr)
+
+ raise error.PyAsn1Error('Attempted "%s" operation on ASN.1 schema object' % attr)
+
+ def __repr__(self):
+ return '<%s object>' % self.__class__.__name__
+
+
+noValue = NoValue()
+
+
+class SimpleAsn1Type(Asn1Type):
+ """Base class for all simple classes representing ASN.1 types.
+
+ ASN.1 distinguishes types by their ability to hold other objects.
+ Scalar types are known as *simple* in ASN.1.
+
+ In the user code, |ASN.1| class is normally used only for telling
+ ASN.1 objects from others.
+
+ Note
+ ----
+ For as long as ASN.1 is concerned, a way to compare ASN.1 types
+ is to use :meth:`isSameTypeWith` and :meth:`isSuperTypeOf` methods.
+ """
+ #: Default payload value
+ defaultValue = noValue
+
+ def __init__(self, value=noValue, **kwargs):
+ Asn1Type.__init__(self, **kwargs)
+ if value is noValue:
+ value = self.defaultValue
+ else:
+ value = self.prettyIn(value)
+ try:
+ self.subtypeSpec(value)
+
+ except error.PyAsn1Error:
+ exType, exValue, exTb = sys.exc_info()
+ raise exType('%s at %s' % (exValue, self.__class__.__name__))
+
+ self._value = value
+
+ def __repr__(self):
+ representation = '%s %s object' % (
+ self.__class__.__name__, self.isValue and 'value' or 'schema')
+
+ for attr, value in self.readOnly.items():
+ if value:
+ representation += ', %s %s' % (attr, value)
+
+ if self.isValue:
+ value = self.prettyPrint()
+ if len(value) > 32:
+ value = value[:16] + '...' + value[-16:]
+ representation += ', payload [%s]' % value
+
+ return '<%s>' % representation
+
+ def __eq__(self, other):
+ return self is other and True or self._value == other
+
+ def __ne__(self, other):
+ return self._value != other
+
+ def __lt__(self, other):
+ return self._value < other
+
+ def __le__(self, other):
+ return self._value <= other
+
+ def __gt__(self, other):
+ return self._value > other
+
+ def __ge__(self, other):
+ return self._value >= other
+
+ if sys.version_info[0] <= 2:
+ def __nonzero__(self):
+ return self._value and True or False
+ else:
+ def __bool__(self):
+ return self._value and True or False
+
+ def __hash__(self):
+ return hash(self._value)
+
+ @property
+ def isValue(self):
+ """Indicate that |ASN.1| object represents ASN.1 value.
+
+ If *isValue* is :obj:`False` then this object represents just
+ ASN.1 schema.
+
+ If *isValue* is :obj:`True` then, in addition to its ASN.1 schema
+ features, this object can also be used like a Python built-in object
+ (e.g. :class:`int`, :class:`str`, :class:`dict` etc.).
+
+ Returns
+ -------
+ : :class:`bool`
+ :obj:`False` if object represents just ASN.1 schema.
+ :obj:`True` if object represents ASN.1 schema and can be used as a normal value.
+
+ Note
+ ----
+ There is an important distinction between PyASN1 schema and value objects.
+ The PyASN1 schema objects can only participate in ASN.1 schema-related
+ operations (e.g. defining or testing the structure of the data). Most
+ obvious uses of ASN.1 schema is to guide serialisation codecs whilst
+ encoding/decoding serialised ASN.1 contents.
+
+ The PyASN1 value objects can **additionally** participate in many operations
+ involving regular Python objects (e.g. arithmetic, comprehension etc).
+ """
+ return self._value is not noValue
+
+ def clone(self, value=noValue, **kwargs):
+ """Create a modified version of |ASN.1| schema or value object.
+
+ The `clone()` method accepts the same set arguments as |ASN.1|
+ class takes on instantiation except that all arguments
+ of the `clone()` method are optional.
+
+ Whatever arguments are supplied, they are used to create a copy
+ of `self` taking precedence over the ones used to instantiate `self`.
+
+ Note
+ ----
+ Due to the immutable nature of the |ASN.1| object, if no arguments
+ are supplied, no new |ASN.1| object will be created and `self` will
+ be returned instead.
+ """
+ if value is noValue:
+ if not kwargs:
+ return self
+
+ value = self._value
+
+ initializers = self.readOnly.copy()
+ initializers.update(kwargs)
+
+ return self.__class__(value, **initializers)
+
+ def subtype(self, value=noValue, **kwargs):
+ """Create a specialization of |ASN.1| schema or value object.
+
+ The subtype relationship between ASN.1 types has no correlation with
+ subtype relationship between Python types. ASN.1 type is mainly identified
+ by its tag(s) (:py:class:`~pyasn1.type.tag.TagSet`) and value range
+ constraints (:py:class:`~pyasn1.type.constraint.ConstraintsIntersection`).
+ These ASN.1 type properties are implemented as |ASN.1| attributes.
+
+ The `subtype()` method accepts the same set arguments as |ASN.1|
+ class takes on instantiation except that all parameters
+ of the `subtype()` method are optional.
+
+ With the exception of the arguments described below, the rest of
+ supplied arguments they are used to create a copy of `self` taking
+ precedence over the ones used to instantiate `self`.
+
+ The following arguments to `subtype()` create a ASN.1 subtype out of
+ |ASN.1| type:
+
+ Other Parameters
+ ----------------
+ implicitTag: :py:class:`~pyasn1.type.tag.Tag`
+ Implicitly apply given ASN.1 tag object to `self`'s
+ :py:class:`~pyasn1.type.tag.TagSet`, then use the result as
+ new object's ASN.1 tag(s).
+
+ explicitTag: :py:class:`~pyasn1.type.tag.Tag`
+ Explicitly apply given ASN.1 tag object to `self`'s
+ :py:class:`~pyasn1.type.tag.TagSet`, then use the result as
+ new object's ASN.1 tag(s).
+
+ subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
+ Add ASN.1 constraints object to one of the `self`'s, then
+ use the result as new object's ASN.1 constraints.
+
+ Returns
+ -------
+ :
+ new instance of |ASN.1| schema or value object
+
+ Note
+ ----
+ Due to the immutable nature of the |ASN.1| object, if no arguments
+ are supplied, no new |ASN.1| object will be created and `self` will
+ be returned instead.
+ """
+ if value is noValue:
+ if not kwargs:
+ return self
+
+ value = self._value
+
+ initializers = self.readOnly.copy()
+
+ implicitTag = kwargs.pop('implicitTag', None)
+ if implicitTag is not None:
+ initializers['tagSet'] = self.tagSet.tagImplicitly(implicitTag)
+
+ explicitTag = kwargs.pop('explicitTag', None)
+ if explicitTag is not None:
+ initializers['tagSet'] = self.tagSet.tagExplicitly(explicitTag)
+
+ for arg, option in kwargs.items():
+ initializers[arg] += option
+
+ return self.__class__(value, **initializers)
+
+ def prettyIn(self, value):
+ return value
+
+ def prettyOut(self, value):
+ return str(value)
+
+ def prettyPrint(self, scope=0):
+ return self.prettyOut(self._value)
+
+ def prettyPrintType(self, scope=0):
+ return '%s -> %s' % (self.tagSet, self.__class__.__name__)
+
+# Backward compatibility
+AbstractSimpleAsn1Item = SimpleAsn1Type
+
+#
+# Constructed types:
+# * There are five of them: Sequence, SequenceOf/SetOf, Set and Choice
+# * ASN1 types and values are represened by Python class instances
+# * Value initialization is made for defaulted components only
+# * Primary method of component addressing is by-position. Data model for base
+# type is Python sequence. Additional type-specific addressing methods
+# may be implemented for particular types.
+# * SequenceOf and SetOf types do not implement any additional methods
+# * Sequence, Set and Choice types also implement by-identifier addressing
+# * Sequence, Set and Choice types also implement by-asn1-type (tag) addressing
+# * Sequence and Set types may include optional and defaulted
+# components
+# * Constructed types hold a reference to component types used for value
+# verification and ordering.
+# * Component type is a scalar type for SequenceOf/SetOf types and a list
+# of types for Sequence/Set/Choice.
+#
+
+
+class ConstructedAsn1Type(Asn1Type):
+ """Base class for all constructed classes representing ASN.1 types.
+
+ ASN.1 distinguishes types by their ability to hold other objects.
+ Those "nesting" types are known as *constructed* in ASN.1.
+
+ In the user code, |ASN.1| class is normally used only for telling
+ ASN.1 objects from others.
+
+ Note
+ ----
+ For as long as ASN.1 is concerned, a way to compare ASN.1 types
+ is to use :meth:`isSameTypeWith` and :meth:`isSuperTypeOf` methods.
+ """
+
+ #: If :obj:`True`, requires exact component type matching,
+ #: otherwise subtype relation is only enforced
+ strictConstraints = False
+
+ componentType = None
+
+ # backward compatibility, unused
+ sizeSpec = constraint.ConstraintsIntersection()
+
+ def __init__(self, **kwargs):
+ readOnly = {
+ 'componentType': self.componentType,
+ # backward compatibility, unused
+ 'sizeSpec': self.sizeSpec
+ }
+
+ # backward compatibility: preserve legacy sizeSpec support
+ kwargs = self._moveSizeSpec(**kwargs)
+
+ readOnly.update(kwargs)
+
+ Asn1Type.__init__(self, **readOnly)
+
+ def _moveSizeSpec(self, **kwargs):
+ # backward compatibility, unused
+ sizeSpec = kwargs.pop('sizeSpec', self.sizeSpec)
+ if sizeSpec:
+ subtypeSpec = kwargs.pop('subtypeSpec', self.subtypeSpec)
+ if subtypeSpec:
+ subtypeSpec = sizeSpec
+
+ else:
+ subtypeSpec += sizeSpec
+
+ kwargs['subtypeSpec'] = subtypeSpec
+
+ return kwargs
+
+ def __repr__(self):
+ representation = '%s %s object' % (
+ self.__class__.__name__, self.isValue and 'value' or 'schema'
+ )
+
+ for attr, value in self.readOnly.items():
+ if value is not noValue:
+ representation += ', %s=%r' % (attr, value)
+
+ if self.isValue and self.components:
+ representation += ', payload [%s]' % ', '.join(
+ [repr(x) for x in self.components])
+
+ return '<%s>' % representation
+
+ def __eq__(self, other):
+ return self is other or self.components == other
+
+ def __ne__(self, other):
+ return self.components != other
+
+ def __lt__(self, other):
+ return self.components < other
+
+ def __le__(self, other):
+ return self.components <= other
+
+ def __gt__(self, other):
+ return self.components > other
+
+ def __ge__(self, other):
+ return self.components >= other
+
+ if sys.version_info[0] <= 2:
+ def __nonzero__(self):
+ return bool(self.components)
+ else:
+ def __bool__(self):
+ return bool(self.components)
+
+ @property
+ def components(self):
+ raise error.PyAsn1Error('Method not implemented')
+
+ def _cloneComponentValues(self, myClone, cloneValueFlag):
+ pass
+
+ def clone(self, **kwargs):
+ """Create a modified version of |ASN.1| schema object.
+
+ The `clone()` method accepts the same set arguments as |ASN.1|
+ class takes on instantiation except that all arguments
+ of the `clone()` method are optional.
+
+ Whatever arguments are supplied, they are used to create a copy
+ of `self` taking precedence over the ones used to instantiate `self`.
+
+ Possible values of `self` are never copied over thus `clone()` can
+ only create a new schema object.
+
+ Returns
+ -------
+ :
+ new instance of |ASN.1| type/value
+
+ Note
+ ----
+ Due to the mutable nature of the |ASN.1| object, even if no arguments
+ are supplied, a new |ASN.1| object will be created and returned.
+ """
+ cloneValueFlag = kwargs.pop('cloneValueFlag', False)
+
+ initializers = self.readOnly.copy()
+ initializers.update(kwargs)
+
+ clone = self.__class__(**initializers)
+
+ if cloneValueFlag:
+ self._cloneComponentValues(clone, cloneValueFlag)
+
+ return clone
+
+ def subtype(self, **kwargs):
+ """Create a specialization of |ASN.1| schema object.
+
+ The `subtype()` method accepts the same set arguments as |ASN.1|
+ class takes on instantiation except that all parameters
+ of the `subtype()` method are optional.
+
+ With the exception of the arguments described below, the rest of
+ supplied arguments they are used to create a copy of `self` taking
+ precedence over the ones used to instantiate `self`.
+
+ The following arguments to `subtype()` create a ASN.1 subtype out of
+ |ASN.1| type.
+
+ Other Parameters
+ ----------------
+ implicitTag: :py:class:`~pyasn1.type.tag.Tag`
+ Implicitly apply given ASN.1 tag object to `self`'s
+ :py:class:`~pyasn1.type.tag.TagSet`, then use the result as
+ new object's ASN.1 tag(s).
+
+ explicitTag: :py:class:`~pyasn1.type.tag.Tag`
+ Explicitly apply given ASN.1 tag object to `self`'s
+ :py:class:`~pyasn1.type.tag.TagSet`, then use the result as
+ new object's ASN.1 tag(s).
+
+ subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
+ Add ASN.1 constraints object to one of the `self`'s, then
+ use the result as new object's ASN.1 constraints.
+
+
+ Returns
+ -------
+ :
+ new instance of |ASN.1| type/value
+
+ Note
+ ----
+ Due to the mutable nature of the |ASN.1| object, even if no arguments
+ are supplied, a new |ASN.1| object will be created and returned.
+ """
+
+ initializers = self.readOnly.copy()
+
+ cloneValueFlag = kwargs.pop('cloneValueFlag', False)
+
+ implicitTag = kwargs.pop('implicitTag', None)
+ if implicitTag is not None:
+ initializers['tagSet'] = self.tagSet.tagImplicitly(implicitTag)
+
+ explicitTag = kwargs.pop('explicitTag', None)
+ if explicitTag is not None:
+ initializers['tagSet'] = self.tagSet.tagExplicitly(explicitTag)
+
+ for arg, option in kwargs.items():
+ initializers[arg] += option
+
+ clone = self.__class__(**initializers)
+
+ if cloneValueFlag:
+ self._cloneComponentValues(clone, cloneValueFlag)
+
+ return clone
+
+ def getComponentByPosition(self, idx):
+ raise error.PyAsn1Error('Method not implemented')
+
+ def setComponentByPosition(self, idx, value, verifyConstraints=True):
+ raise error.PyAsn1Error('Method not implemented')
+
+ def setComponents(self, *args, **kwargs):
+ for idx, value in enumerate(args):
+ self[idx] = value
+ for k in kwargs:
+ self[k] = kwargs[k]
+ return self
+
+ # backward compatibility
+
+ def setDefaultComponents(self):
+ pass
+
+ def getComponentType(self):
+ return self.componentType
+
+ # backward compatibility, unused
+ def verifySizeSpec(self):
+ self.subtypeSpec(self)
+
+
+ # Backward compatibility
+AbstractConstructedAsn1Item = ConstructedAsn1Type
diff --git a/third_party/python/pyasn1/pyasn1/type/char.py b/third_party/python/pyasn1/pyasn1/type/char.py
new file mode 100644
index 0000000000..06074da0f7
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/type/char.py
@@ -0,0 +1,335 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+import sys
+
+from pyasn1 import error
+from pyasn1.type import tag
+from pyasn1.type import univ
+
+__all__ = ['NumericString', 'PrintableString', 'TeletexString', 'T61String', 'VideotexString',
+ 'IA5String', 'GraphicString', 'VisibleString', 'ISO646String',
+ 'GeneralString', 'UniversalString', 'BMPString', 'UTF8String']
+
+NoValue = univ.NoValue
+noValue = univ.noValue
+
+
+class AbstractCharacterString(univ.OctetString):
+ """Creates |ASN.1| schema or value object.
+
+ |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`,
+ its objects are immutable and duck-type Python 2 :class:`str` or Python 3
+ :class:`bytes`. When used in octet-stream context, |ASN.1| type assumes
+ "|encoding|" encoding.
+
+ Keyword Args
+ ------------
+ value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object
+ :class:`unicode` object (Python 2) or :class:`str` (Python 3),
+ alternatively :class:`str` (Python 2) or :class:`bytes` (Python 3)
+ representing octet-stream of serialised unicode string
+ (note `encoding` parameter) or |ASN.1| class instance.
+ If `value` is not given, schema object will be created.
+
+ tagSet: :py:class:`~pyasn1.type.tag.TagSet`
+ Object representing non-default ASN.1 tag(s)
+
+ subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
+ Object representing non-default ASN.1 subtype constraint(s). Constraints
+ verification for |ASN.1| type occurs automatically on object
+ instantiation.
+
+ encoding: :py:class:`str`
+ Unicode codec ID to encode/decode :class:`unicode` (Python 2) or
+ :class:`str` (Python 3) the payload when |ASN.1| object is used
+ in octet-stream context.
+
+ Raises
+ ------
+ ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
+ On constraint violation or bad initializer.
+ """
+
+ if sys.version_info[0] <= 2:
+ def __str__(self):
+ try:
+ # `str` is Py2 text representation
+ return self._value.encode(self.encoding)
+
+ except UnicodeEncodeError:
+ exc = sys.exc_info()[1]
+ raise error.PyAsn1UnicodeEncodeError(
+ "Can't encode string '%s' with codec "
+ "%s" % (self._value, self.encoding), exc
+ )
+
+ def __unicode__(self):
+ return unicode(self._value)
+
+ def prettyIn(self, value):
+ try:
+ if isinstance(value, unicode):
+ return value
+ elif isinstance(value, str):
+ return value.decode(self.encoding)
+ elif isinstance(value, (tuple, list)):
+ return self.prettyIn(''.join([chr(x) for x in value]))
+ elif isinstance(value, univ.OctetString):
+ return value.asOctets().decode(self.encoding)
+ else:
+ return unicode(value)
+
+ except (UnicodeDecodeError, LookupError):
+ exc = sys.exc_info()[1]
+ raise error.PyAsn1UnicodeDecodeError(
+ "Can't decode string '%s' with codec "
+ "%s" % (value, self.encoding), exc
+ )
+
+ def asOctets(self, padding=True):
+ return str(self)
+
+ def asNumbers(self, padding=True):
+ return tuple([ord(x) for x in str(self)])
+
+ else:
+ def __str__(self):
+ # `unicode` is Py3 text representation
+ return str(self._value)
+
+ def __bytes__(self):
+ try:
+ return self._value.encode(self.encoding)
+ except UnicodeEncodeError:
+ exc = sys.exc_info()[1]
+ raise error.PyAsn1UnicodeEncodeError(
+ "Can't encode string '%s' with codec "
+ "%s" % (self._value, self.encoding), exc
+ )
+
+ def prettyIn(self, value):
+ try:
+ if isinstance(value, str):
+ return value
+ elif isinstance(value, bytes):
+ return value.decode(self.encoding)
+ elif isinstance(value, (tuple, list)):
+ return self.prettyIn(bytes(value))
+ elif isinstance(value, univ.OctetString):
+ return value.asOctets().decode(self.encoding)
+ else:
+ return str(value)
+
+ except (UnicodeDecodeError, LookupError):
+ exc = sys.exc_info()[1]
+ raise error.PyAsn1UnicodeDecodeError(
+ "Can't decode string '%s' with codec "
+ "%s" % (value, self.encoding), exc
+ )
+
+ def asOctets(self, padding=True):
+ return bytes(self)
+
+ def asNumbers(self, padding=True):
+ return tuple(bytes(self))
+
+ #
+ # See OctetString.prettyPrint() for the explanation
+ #
+
+ def prettyOut(self, value):
+ return value
+
+ def prettyPrint(self, scope=0):
+ # first see if subclass has its own .prettyOut()
+ value = self.prettyOut(self._value)
+
+ if value is not self._value:
+ return value
+
+ return AbstractCharacterString.__str__(self)
+
+ def __reversed__(self):
+ return reversed(self._value)
+
+
+class NumericString(AbstractCharacterString):
+ __doc__ = AbstractCharacterString.__doc__
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = AbstractCharacterString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 18)
+ )
+ encoding = 'us-ascii'
+
+ # Optimization for faster codec lookup
+ typeId = AbstractCharacterString.getTypeId()
+
+
+class PrintableString(AbstractCharacterString):
+ __doc__ = AbstractCharacterString.__doc__
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = AbstractCharacterString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 19)
+ )
+ encoding = 'us-ascii'
+
+ # Optimization for faster codec lookup
+ typeId = AbstractCharacterString.getTypeId()
+
+
+class TeletexString(AbstractCharacterString):
+ __doc__ = AbstractCharacterString.__doc__
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = AbstractCharacterString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 20)
+ )
+ encoding = 'iso-8859-1'
+
+ # Optimization for faster codec lookup
+ typeId = AbstractCharacterString.getTypeId()
+
+
+class T61String(TeletexString):
+ __doc__ = TeletexString.__doc__
+
+ # Optimization for faster codec lookup
+ typeId = AbstractCharacterString.getTypeId()
+
+
+class VideotexString(AbstractCharacterString):
+ __doc__ = AbstractCharacterString.__doc__
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = AbstractCharacterString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 21)
+ )
+ encoding = 'iso-8859-1'
+
+ # Optimization for faster codec lookup
+ typeId = AbstractCharacterString.getTypeId()
+
+
+class IA5String(AbstractCharacterString):
+ __doc__ = AbstractCharacterString.__doc__
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = AbstractCharacterString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 22)
+ )
+ encoding = 'us-ascii'
+
+ # Optimization for faster codec lookup
+ typeId = AbstractCharacterString.getTypeId()
+
+
+class GraphicString(AbstractCharacterString):
+ __doc__ = AbstractCharacterString.__doc__
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = AbstractCharacterString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 25)
+ )
+ encoding = 'iso-8859-1'
+
+ # Optimization for faster codec lookup
+ typeId = AbstractCharacterString.getTypeId()
+
+
+class VisibleString(AbstractCharacterString):
+ __doc__ = AbstractCharacterString.__doc__
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = AbstractCharacterString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 26)
+ )
+ encoding = 'us-ascii'
+
+ # Optimization for faster codec lookup
+ typeId = AbstractCharacterString.getTypeId()
+
+
+class ISO646String(VisibleString):
+ __doc__ = VisibleString.__doc__
+
+ # Optimization for faster codec lookup
+ typeId = AbstractCharacterString.getTypeId()
+
+class GeneralString(AbstractCharacterString):
+ __doc__ = AbstractCharacterString.__doc__
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = AbstractCharacterString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 27)
+ )
+ encoding = 'iso-8859-1'
+
+ # Optimization for faster codec lookup
+ typeId = AbstractCharacterString.getTypeId()
+
+
+class UniversalString(AbstractCharacterString):
+ __doc__ = AbstractCharacterString.__doc__
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = AbstractCharacterString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 28)
+ )
+ encoding = "utf-32-be"
+
+ # Optimization for faster codec lookup
+ typeId = AbstractCharacterString.getTypeId()
+
+
+class BMPString(AbstractCharacterString):
+ __doc__ = AbstractCharacterString.__doc__
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = AbstractCharacterString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 30)
+ )
+ encoding = "utf-16-be"
+
+ # Optimization for faster codec lookup
+ typeId = AbstractCharacterString.getTypeId()
+
+
+class UTF8String(AbstractCharacterString):
+ __doc__ = AbstractCharacterString.__doc__
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = AbstractCharacterString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12)
+ )
+ encoding = "utf-8"
+
+ # Optimization for faster codec lookup
+ typeId = AbstractCharacterString.getTypeId()
diff --git a/third_party/python/pyasn1/pyasn1/type/constraint.py b/third_party/python/pyasn1/pyasn1/type/constraint.py
new file mode 100644
index 0000000000..8f152e9e9c
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/type/constraint.py
@@ -0,0 +1,756 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+# Original concept and code by Mike C. Fletcher.
+#
+import sys
+
+from pyasn1.type import error
+
+__all__ = ['SingleValueConstraint', 'ContainedSubtypeConstraint',
+ 'ValueRangeConstraint', 'ValueSizeConstraint',
+ 'PermittedAlphabetConstraint', 'InnerTypeConstraint',
+ 'ConstraintsExclusion', 'ConstraintsIntersection',
+ 'ConstraintsUnion']
+
+
+class AbstractConstraint(object):
+
+ def __init__(self, *values):
+ self._valueMap = set()
+ self._setValues(values)
+ self.__hash = hash((self.__class__.__name__, self._values))
+
+ def __call__(self, value, idx=None):
+ if not self._values:
+ return
+
+ try:
+ self._testValue(value, idx)
+
+ except error.ValueConstraintError:
+ raise error.ValueConstraintError(
+ '%s failed at: %r' % (self, sys.exc_info()[1])
+ )
+
+ def __repr__(self):
+ representation = '%s object' % (self.__class__.__name__)
+
+ if self._values:
+ representation += ', consts %s' % ', '.join(
+ [repr(x) for x in self._values])
+
+ return '<%s>' % representation
+
+ def __eq__(self, other):
+ return self is other and True or self._values == other
+
+ def __ne__(self, other):
+ return self._values != other
+
+ def __lt__(self, other):
+ return self._values < other
+
+ def __le__(self, other):
+ return self._values <= other
+
+ def __gt__(self, other):
+ return self._values > other
+
+ def __ge__(self, other):
+ return self._values >= other
+
+ if sys.version_info[0] <= 2:
+ def __nonzero__(self):
+ return self._values and True or False
+ else:
+ def __bool__(self):
+ return self._values and True or False
+
+ def __hash__(self):
+ return self.__hash
+
+ def _setValues(self, values):
+ self._values = values
+
+ def _testValue(self, value, idx):
+ raise error.ValueConstraintError(value)
+
+ # Constraints derivation logic
+ def getValueMap(self):
+ return self._valueMap
+
+ def isSuperTypeOf(self, otherConstraint):
+ # TODO: fix possible comparison of set vs scalars here
+ return (otherConstraint is self or
+ not self._values or
+ otherConstraint == self or
+ self in otherConstraint.getValueMap())
+
+ def isSubTypeOf(self, otherConstraint):
+ return (otherConstraint is self or
+ not self or
+ otherConstraint == self or
+ otherConstraint in self._valueMap)
+
+
+class SingleValueConstraint(AbstractConstraint):
+ """Create a SingleValueConstraint object.
+
+ The SingleValueConstraint satisfies any value that
+ is present in the set of permitted values.
+
+ Objects of this type are iterable (emitting constraint values) and
+ can act as operands for some arithmetic operations e.g. addition
+ and subtraction. The latter can be used for combining multiple
+ SingleValueConstraint objects into one.
+
+ The SingleValueConstraint object can be applied to
+ any ASN.1 type.
+
+ Parameters
+ ----------
+ *values: :class:`int`
+ Full set of values permitted by this constraint object.
+
+ Examples
+ --------
+ .. code-block:: python
+
+ class DivisorOfSix(Integer):
+ '''
+ ASN.1 specification:
+
+ Divisor-Of-6 ::= INTEGER (1 | 2 | 3 | 6)
+ '''
+ subtypeSpec = SingleValueConstraint(1, 2, 3, 6)
+
+ # this will succeed
+ divisor_of_six = DivisorOfSix(1)
+
+ # this will raise ValueConstraintError
+ divisor_of_six = DivisorOfSix(7)
+ """
+ def _setValues(self, values):
+ self._values = values
+ self._set = set(values)
+
+ def _testValue(self, value, idx):
+ if value not in self._set:
+ raise error.ValueConstraintError(value)
+
+ # Constrains can be merged or reduced
+
+ def __contains__(self, item):
+ return item in self._set
+
+ def __iter__(self):
+ return iter(self._set)
+
+ def __sub__(self, constraint):
+ return self.__class__(*(self._set.difference(constraint)))
+
+ def __add__(self, constraint):
+ return self.__class__(*(self._set.union(constraint)))
+
+ def __sub__(self, constraint):
+ return self.__class__(*(self._set.difference(constraint)))
+
+
+class ContainedSubtypeConstraint(AbstractConstraint):
+ """Create a ContainedSubtypeConstraint object.
+
+ The ContainedSubtypeConstraint satisfies any value that
+ is present in the set of permitted values and also
+ satisfies included constraints.
+
+ The ContainedSubtypeConstraint object can be applied to
+ any ASN.1 type.
+
+ Parameters
+ ----------
+ *values:
+ Full set of values and constraint objects permitted
+ by this constraint object.
+
+ Examples
+ --------
+ .. code-block:: python
+
+ class DivisorOfEighteen(Integer):
+ '''
+ ASN.1 specification:
+
+ Divisors-of-18 ::= INTEGER (INCLUDES Divisors-of-6 | 9 | 18)
+ '''
+ subtypeSpec = ContainedSubtypeConstraint(
+ SingleValueConstraint(1, 2, 3, 6), 9, 18
+ )
+
+ # this will succeed
+ divisor_of_eighteen = DivisorOfEighteen(9)
+
+ # this will raise ValueConstraintError
+ divisor_of_eighteen = DivisorOfEighteen(10)
+ """
+ def _testValue(self, value, idx):
+ for constraint in self._values:
+ if isinstance(constraint, AbstractConstraint):
+ constraint(value, idx)
+ elif value not in self._set:
+ raise error.ValueConstraintError(value)
+
+
+class ValueRangeConstraint(AbstractConstraint):
+ """Create a ValueRangeConstraint object.
+
+ The ValueRangeConstraint satisfies any value that
+ falls in the range of permitted values.
+
+ The ValueRangeConstraint object can only be applied
+ to :class:`~pyasn1.type.univ.Integer` and
+ :class:`~pyasn1.type.univ.Real` types.
+
+ Parameters
+ ----------
+ start: :class:`int`
+ Minimum permitted value in the range (inclusive)
+
+ end: :class:`int`
+ Maximum permitted value in the range (inclusive)
+
+ Examples
+ --------
+ .. code-block:: python
+
+ class TeenAgeYears(Integer):
+ '''
+ ASN.1 specification:
+
+ TeenAgeYears ::= INTEGER (13 .. 19)
+ '''
+ subtypeSpec = ValueRangeConstraint(13, 19)
+
+ # this will succeed
+ teen_year = TeenAgeYears(18)
+
+ # this will raise ValueConstraintError
+ teen_year = TeenAgeYears(20)
+ """
+ def _testValue(self, value, idx):
+ if value < self.start or value > self.stop:
+ raise error.ValueConstraintError(value)
+
+ def _setValues(self, values):
+ if len(values) != 2:
+ raise error.PyAsn1Error(
+ '%s: bad constraint values' % (self.__class__.__name__,)
+ )
+ self.start, self.stop = values
+ if self.start > self.stop:
+ raise error.PyAsn1Error(
+ '%s: screwed constraint values (start > stop): %s > %s' % (
+ self.__class__.__name__,
+ self.start, self.stop
+ )
+ )
+ AbstractConstraint._setValues(self, values)
+
+
+class ValueSizeConstraint(ValueRangeConstraint):
+ """Create a ValueSizeConstraint object.
+
+ The ValueSizeConstraint satisfies any value for
+ as long as its size falls within the range of
+ permitted sizes.
+
+ The ValueSizeConstraint object can be applied
+ to :class:`~pyasn1.type.univ.BitString`,
+ :class:`~pyasn1.type.univ.OctetString` (including
+ all :ref:`character ASN.1 types <type.char>`),
+ :class:`~pyasn1.type.univ.SequenceOf`
+ and :class:`~pyasn1.type.univ.SetOf` types.
+
+ Parameters
+ ----------
+ minimum: :class:`int`
+ Minimum permitted size of the value (inclusive)
+
+ maximum: :class:`int`
+ Maximum permitted size of the value (inclusive)
+
+ Examples
+ --------
+ .. code-block:: python
+
+ class BaseballTeamRoster(SetOf):
+ '''
+ ASN.1 specification:
+
+ BaseballTeamRoster ::= SET SIZE (1..25) OF PlayerNames
+ '''
+ componentType = PlayerNames()
+ subtypeSpec = ValueSizeConstraint(1, 25)
+
+ # this will succeed
+ team = BaseballTeamRoster()
+ team.extend(['Jan', 'Matej'])
+ encode(team)
+
+ # this will raise ValueConstraintError
+ team = BaseballTeamRoster()
+ team.extend(['Jan'] * 26)
+ encode(team)
+
+ Note
+ ----
+ Whenever ValueSizeConstraint is applied to mutable types
+ (e.g. :class:`~pyasn1.type.univ.SequenceOf`,
+ :class:`~pyasn1.type.univ.SetOf`), constraint
+ validation only happens at the serialisation phase rather
+ than schema instantiation phase (as it is with immutable
+ types).
+ """
+ def _testValue(self, value, idx):
+ valueSize = len(value)
+ if valueSize < self.start or valueSize > self.stop:
+ raise error.ValueConstraintError(value)
+
+
+class PermittedAlphabetConstraint(SingleValueConstraint):
+ """Create a PermittedAlphabetConstraint object.
+
+ The PermittedAlphabetConstraint satisfies any character
+ string for as long as all its characters are present in
+ the set of permitted characters.
+
+ Objects of this type are iterable (emitting constraint values) and
+ can act as operands for some arithmetic operations e.g. addition
+ and subtraction.
+
+ The PermittedAlphabetConstraint object can only be applied
+ to the :ref:`character ASN.1 types <type.char>` such as
+ :class:`~pyasn1.type.char.IA5String`.
+
+ Parameters
+ ----------
+ *alphabet: :class:`str`
+ Full set of characters permitted by this constraint object.
+
+ Example
+ -------
+ .. code-block:: python
+
+ class BooleanValue(IA5String):
+ '''
+ ASN.1 specification:
+
+ BooleanValue ::= IA5String (FROM ('T' | 'F'))
+ '''
+ subtypeSpec = PermittedAlphabetConstraint('T', 'F')
+
+ # this will succeed
+ truth = BooleanValue('T')
+ truth = BooleanValue('TF')
+
+ # this will raise ValueConstraintError
+ garbage = BooleanValue('TAF')
+
+ ASN.1 `FROM ... EXCEPT ...` clause can be modelled by combining multiple
+ PermittedAlphabetConstraint objects into one:
+
+ Example
+ -------
+ .. code-block:: python
+
+ class Lipogramme(IA5String):
+ '''
+ ASN.1 specification:
+
+ Lipogramme ::=
+ IA5String (FROM (ALL EXCEPT ("e"|"E")))
+ '''
+ subtypeSpec = (
+ PermittedAlphabetConstraint(*string.printable) -
+ PermittedAlphabetConstraint('e', 'E')
+ )
+
+ # this will succeed
+ lipogramme = Lipogramme('A work of fiction?')
+
+ # this will raise ValueConstraintError
+ lipogramme = Lipogramme('Eel')
+
+ Note
+ ----
+ Although `ConstraintsExclusion` object could seemingly be used for this
+ purpose, practically, for it to work, it needs to represent its operand
+ constraints as sets and intersect one with the other. That would require
+ the insight into the constraint values (and their types) that are otherwise
+ hidden inside the constraint object.
+
+ Therefore it's more practical to model `EXCEPT` clause at
+ `PermittedAlphabetConstraint` level instead.
+ """
+ def _setValues(self, values):
+ self._values = values
+ self._set = set(values)
+
+ def _testValue(self, value, idx):
+ if not self._set.issuperset(value):
+ raise error.ValueConstraintError(value)
+
+
+class ComponentPresentConstraint(AbstractConstraint):
+ """Create a ComponentPresentConstraint object.
+
+ The ComponentPresentConstraint is only satisfied when the value
+ is not `None`.
+
+ The ComponentPresentConstraint object is typically used with
+ `WithComponentsConstraint`.
+
+ Examples
+ --------
+ .. code-block:: python
+
+ present = ComponentPresentConstraint()
+
+ # this will succeed
+ present('whatever')
+
+ # this will raise ValueConstraintError
+ present(None)
+ """
+ def _setValues(self, values):
+ self._values = ('<must be present>',)
+
+ if values:
+ raise error.PyAsn1Error('No arguments expected')
+
+ def _testValue(self, value, idx):
+ if value is None:
+ raise error.ValueConstraintError(
+ 'Component is not present:')
+
+
+class ComponentAbsentConstraint(AbstractConstraint):
+ """Create a ComponentAbsentConstraint object.
+
+ The ComponentAbsentConstraint is only satisfied when the value
+ is `None`.
+
+ The ComponentAbsentConstraint object is typically used with
+ `WithComponentsConstraint`.
+
+ Examples
+ --------
+ .. code-block:: python
+
+ absent = ComponentAbsentConstraint()
+
+ # this will succeed
+ absent(None)
+
+ # this will raise ValueConstraintError
+ absent('whatever')
+ """
+ def _setValues(self, values):
+ self._values = ('<must be absent>',)
+
+ if values:
+ raise error.PyAsn1Error('No arguments expected')
+
+ def _testValue(self, value, idx):
+ if value is not None:
+ raise error.ValueConstraintError(
+ 'Component is not absent: %r' % value)
+
+
+class WithComponentsConstraint(AbstractConstraint):
+ """Create a WithComponentsConstraint object.
+
+ The `WithComponentsConstraint` satisfies any mapping object that has
+ constrained fields present or absent, what is indicated by
+ `ComponentPresentConstraint` and `ComponentAbsentConstraint`
+ objects respectively.
+
+ The `WithComponentsConstraint` object is typically applied
+ to :class:`~pyasn1.type.univ.Set` or
+ :class:`~pyasn1.type.univ.Sequence` types.
+
+ Parameters
+ ----------
+ *fields: :class:`tuple`
+ Zero or more tuples of (`field`, `constraint`) indicating constrained
+ fields.
+
+ Notes
+ -----
+ On top of the primary use of `WithComponentsConstraint` (ensuring presence
+ or absence of particular components of a :class:`~pyasn1.type.univ.Set` or
+ :class:`~pyasn1.type.univ.Sequence`), it is also possible to pass any other
+ constraint objects or their combinations. In case of scalar fields, these
+ constraints will be verified in addition to the constraints belonging to
+ scalar components themselves. However, formally, these additional
+ constraints do not change the type of these ASN.1 objects.
+
+ Examples
+ --------
+
+ .. code-block:: python
+
+ class Item(Sequence): # Set is similar
+ '''
+ ASN.1 specification:
+
+ Item ::= SEQUENCE {
+ id INTEGER OPTIONAL,
+ name OCTET STRING OPTIONAL
+ } WITH COMPONENTS id PRESENT, name ABSENT | id ABSENT, name PRESENT
+ '''
+ componentType = NamedTypes(
+ OptionalNamedType('id', Integer()),
+ OptionalNamedType('name', OctetString())
+ )
+ withComponents = ConstraintsUnion(
+ WithComponentsConstraint(
+ ('id', ComponentPresentConstraint()),
+ ('name', ComponentAbsentConstraint())
+ ),
+ WithComponentsConstraint(
+ ('id', ComponentAbsentConstraint()),
+ ('name', ComponentPresentConstraint())
+ )
+ )
+
+ item = Item()
+
+ # This will succeed
+ item['id'] = 1
+
+ # This will succeed
+ item.reset()
+ item['name'] = 'John'
+
+ # This will fail (on encoding)
+ item.reset()
+ descr['id'] = 1
+ descr['name'] = 'John'
+ """
+ def _testValue(self, value, idx):
+ for field, constraint in self._values:
+ constraint(value.get(field))
+
+ def _setValues(self, values):
+ AbstractConstraint._setValues(self, values)
+
+
+# This is a bit kludgy, meaning two op modes within a single constraint
+class InnerTypeConstraint(AbstractConstraint):
+ """Value must satisfy the type and presence constraints"""
+
+ def _testValue(self, value, idx):
+ if self.__singleTypeConstraint:
+ self.__singleTypeConstraint(value)
+ elif self.__multipleTypeConstraint:
+ if idx not in self.__multipleTypeConstraint:
+ raise error.ValueConstraintError(value)
+ constraint, status = self.__multipleTypeConstraint[idx]
+ if status == 'ABSENT': # XXX presence is not checked!
+ raise error.ValueConstraintError(value)
+ constraint(value)
+
+ def _setValues(self, values):
+ self.__multipleTypeConstraint = {}
+ self.__singleTypeConstraint = None
+ for v in values:
+ if isinstance(v, tuple):
+ self.__multipleTypeConstraint[v[0]] = v[1], v[2]
+ else:
+ self.__singleTypeConstraint = v
+ AbstractConstraint._setValues(self, values)
+
+
+# Logic operations on constraints
+
+class ConstraintsExclusion(AbstractConstraint):
+ """Create a ConstraintsExclusion logic operator object.
+
+ The ConstraintsExclusion logic operator succeeds when the
+ value does *not* satisfy the operand constraint.
+
+ The ConstraintsExclusion object can be applied to
+ any constraint and logic operator object.
+
+ Parameters
+ ----------
+ *constraints:
+ Constraint or logic operator objects.
+
+ Examples
+ --------
+ .. code-block:: python
+
+ class LuckyNumber(Integer):
+ subtypeSpec = ConstraintsExclusion(
+ SingleValueConstraint(13)
+ )
+
+ # this will succeed
+ luckyNumber = LuckyNumber(12)
+
+ # this will raise ValueConstraintError
+ luckyNumber = LuckyNumber(13)
+
+ Note
+ ----
+ The `FROM ... EXCEPT ...` ASN.1 clause should be modeled by combining
+ constraint objects into one. See `PermittedAlphabetConstraint` for more
+ information.
+ """
+ def _testValue(self, value, idx):
+ for constraint in self._values:
+ try:
+ constraint(value, idx)
+
+ except error.ValueConstraintError:
+ continue
+
+ raise error.ValueConstraintError(value)
+
+ def _setValues(self, values):
+ AbstractConstraint._setValues(self, values)
+
+
+class AbstractConstraintSet(AbstractConstraint):
+
+ def __getitem__(self, idx):
+ return self._values[idx]
+
+ def __iter__(self):
+ return iter(self._values)
+
+ def __add__(self, value):
+ return self.__class__(*(self._values + (value,)))
+
+ def __radd__(self, value):
+ return self.__class__(*((value,) + self._values))
+
+ def __len__(self):
+ return len(self._values)
+
+ # Constraints inclusion in sets
+
+ def _setValues(self, values):
+ self._values = values
+ for constraint in values:
+ if constraint:
+ self._valueMap.add(constraint)
+ self._valueMap.update(constraint.getValueMap())
+
+
+class ConstraintsIntersection(AbstractConstraintSet):
+ """Create a ConstraintsIntersection logic operator object.
+
+ The ConstraintsIntersection logic operator only succeeds
+ if *all* its operands succeed.
+
+ The ConstraintsIntersection object can be applied to
+ any constraint and logic operator objects.
+
+ The ConstraintsIntersection object duck-types the immutable
+ container object like Python :py:class:`tuple`.
+
+ Parameters
+ ----------
+ *constraints:
+ Constraint or logic operator objects.
+
+ Examples
+ --------
+ .. code-block:: python
+
+ class CapitalAndSmall(IA5String):
+ '''
+ ASN.1 specification:
+
+ CapitalAndSmall ::=
+ IA5String (FROM ("A".."Z"|"a".."z"))
+ '''
+ subtypeSpec = ConstraintsIntersection(
+ PermittedAlphabetConstraint('A', 'Z'),
+ PermittedAlphabetConstraint('a', 'z')
+ )
+
+ # this will succeed
+ capital_and_small = CapitalAndSmall('Hello')
+
+ # this will raise ValueConstraintError
+ capital_and_small = CapitalAndSmall('hello')
+ """
+ def _testValue(self, value, idx):
+ for constraint in self._values:
+ constraint(value, idx)
+
+
+class ConstraintsUnion(AbstractConstraintSet):
+ """Create a ConstraintsUnion logic operator object.
+
+ The ConstraintsUnion logic operator succeeds if
+ *at least* a single operand succeeds.
+
+ The ConstraintsUnion object can be applied to
+ any constraint and logic operator objects.
+
+ The ConstraintsUnion object duck-types the immutable
+ container object like Python :py:class:`tuple`.
+
+ Parameters
+ ----------
+ *constraints:
+ Constraint or logic operator objects.
+
+ Examples
+ --------
+ .. code-block:: python
+
+ class CapitalOrSmall(IA5String):
+ '''
+ ASN.1 specification:
+
+ CapitalOrSmall ::=
+ IA5String (FROM ("A".."Z") | FROM ("a".."z"))
+ '''
+ subtypeSpec = ConstraintsUnion(
+ PermittedAlphabetConstraint('A', 'Z'),
+ PermittedAlphabetConstraint('a', 'z')
+ )
+
+ # this will succeed
+ capital_or_small = CapitalAndSmall('Hello')
+
+ # this will raise ValueConstraintError
+ capital_or_small = CapitalOrSmall('hello!')
+ """
+ def _testValue(self, value, idx):
+ for constraint in self._values:
+ try:
+ constraint(value, idx)
+ except error.ValueConstraintError:
+ pass
+ else:
+ return
+
+ raise error.ValueConstraintError(
+ 'all of %s failed for "%s"' % (self._values, value)
+ )
+
+# TODO:
+# refactor InnerTypeConstraint
+# add tests for type check
+# implement other constraint types
+# make constraint validation easy to skip
diff --git a/third_party/python/pyasn1/pyasn1/type/error.py b/third_party/python/pyasn1/pyasn1/type/error.py
new file mode 100644
index 0000000000..80fcf3bdcd
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/type/error.py
@@ -0,0 +1,11 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+from pyasn1.error import PyAsn1Error
+
+
+class ValueConstraintError(PyAsn1Error):
+ pass
diff --git a/third_party/python/pyasn1/pyasn1/type/namedtype.py b/third_party/python/pyasn1/pyasn1/type/namedtype.py
new file mode 100644
index 0000000000..cbc14293e0
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/type/namedtype.py
@@ -0,0 +1,561 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+import sys
+
+from pyasn1 import error
+from pyasn1.type import tag
+from pyasn1.type import tagmap
+
+__all__ = ['NamedType', 'OptionalNamedType', 'DefaultedNamedType',
+ 'NamedTypes']
+
+try:
+ any
+
+except NameError:
+ any = lambda x: bool(filter(bool, x))
+
+
+class NamedType(object):
+ """Create named field object for a constructed ASN.1 type.
+
+ The |NamedType| object represents a single name and ASN.1 type of a constructed ASN.1 type.
+
+ |NamedType| objects are immutable and duck-type Python :class:`tuple` objects
+ holding *name* and *asn1Object* components.
+
+ Parameters
+ ----------
+ name: :py:class:`str`
+ Field name
+
+ asn1Object:
+ ASN.1 type object
+ """
+ isOptional = False
+ isDefaulted = False
+
+ def __init__(self, name, asn1Object, openType=None):
+ self.__name = name
+ self.__type = asn1Object
+ self.__nameAndType = name, asn1Object
+ self.__openType = openType
+
+ def __repr__(self):
+ representation = '%s=%r' % (self.name, self.asn1Object)
+
+ if self.openType:
+ representation += ', open type %r' % self.openType
+
+ return '<%s object, type %s>' % (
+ self.__class__.__name__, representation)
+
+ def __eq__(self, other):
+ return self.__nameAndType == other
+
+ def __ne__(self, other):
+ return self.__nameAndType != other
+
+ def __lt__(self, other):
+ return self.__nameAndType < other
+
+ def __le__(self, other):
+ return self.__nameAndType <= other
+
+ def __gt__(self, other):
+ return self.__nameAndType > other
+
+ def __ge__(self, other):
+ return self.__nameAndType >= other
+
+ def __hash__(self):
+ return hash(self.__nameAndType)
+
+ def __getitem__(self, idx):
+ return self.__nameAndType[idx]
+
+ def __iter__(self):
+ return iter(self.__nameAndType)
+
+ @property
+ def name(self):
+ return self.__name
+
+ @property
+ def asn1Object(self):
+ return self.__type
+
+ @property
+ def openType(self):
+ return self.__openType
+
+ # Backward compatibility
+
+ def getName(self):
+ return self.name
+
+ def getType(self):
+ return self.asn1Object
+
+
+class OptionalNamedType(NamedType):
+ __doc__ = NamedType.__doc__
+
+ isOptional = True
+
+
+class DefaultedNamedType(NamedType):
+ __doc__ = NamedType.__doc__
+
+ isDefaulted = True
+
+
+class NamedTypes(object):
+ """Create a collection of named fields for a constructed ASN.1 type.
+
+ The NamedTypes object represents a collection of named fields of a constructed ASN.1 type.
+
+ *NamedTypes* objects are immutable and duck-type Python :class:`dict` objects
+ holding *name* as keys and ASN.1 type object as values.
+
+ Parameters
+ ----------
+ *namedTypes: :class:`~pyasn1.type.namedtype.NamedType`
+
+ Examples
+ --------
+
+ .. code-block:: python
+
+ class Description(Sequence):
+ '''
+ ASN.1 specification:
+
+ Description ::= SEQUENCE {
+ surname IA5String,
+ first-name IA5String OPTIONAL,
+ age INTEGER DEFAULT 40
+ }
+ '''
+ componentType = NamedTypes(
+ NamedType('surname', IA5String()),
+ OptionalNamedType('first-name', IA5String()),
+ DefaultedNamedType('age', Integer(40))
+ )
+
+ descr = Description()
+ descr['surname'] = 'Smith'
+ descr['first-name'] = 'John'
+ """
+ def __init__(self, *namedTypes, **kwargs):
+ self.__namedTypes = namedTypes
+ self.__namedTypesLen = len(self.__namedTypes)
+ self.__minTagSet = self.__computeMinTagSet()
+ self.__nameToPosMap = self.__computeNameToPosMap()
+ self.__tagToPosMap = self.__computeTagToPosMap()
+ self.__ambiguousTypes = 'terminal' not in kwargs and self.__computeAmbiguousTypes() or {}
+ self.__uniqueTagMap = self.__computeTagMaps(unique=True)
+ self.__nonUniqueTagMap = self.__computeTagMaps(unique=False)
+ self.__hasOptionalOrDefault = any([True for namedType in self.__namedTypes
+ if namedType.isDefaulted or namedType.isOptional])
+ self.__hasOpenTypes = any([True for namedType in self.__namedTypes
+ if namedType.openType])
+
+ self.__requiredComponents = frozenset(
+ [idx for idx, nt in enumerate(self.__namedTypes) if not nt.isOptional and not nt.isDefaulted]
+ )
+ self.__keys = frozenset([namedType.name for namedType in self.__namedTypes])
+ self.__values = tuple([namedType.asn1Object for namedType in self.__namedTypes])
+ self.__items = tuple([(namedType.name, namedType.asn1Object) for namedType in self.__namedTypes])
+
+ def __repr__(self):
+ representation = ', '.join(['%r' % x for x in self.__namedTypes])
+ return '<%s object, types %s>' % (
+ self.__class__.__name__, representation)
+
+ def __eq__(self, other):
+ return self.__namedTypes == other
+
+ def __ne__(self, other):
+ return self.__namedTypes != other
+
+ def __lt__(self, other):
+ return self.__namedTypes < other
+
+ def __le__(self, other):
+ return self.__namedTypes <= other
+
+ def __gt__(self, other):
+ return self.__namedTypes > other
+
+ def __ge__(self, other):
+ return self.__namedTypes >= other
+
+ def __hash__(self):
+ return hash(self.__namedTypes)
+
+ def __getitem__(self, idx):
+ try:
+ return self.__namedTypes[idx]
+
+ except TypeError:
+ return self.__namedTypes[self.__nameToPosMap[idx]]
+
+ def __contains__(self, key):
+ return key in self.__nameToPosMap
+
+ def __iter__(self):
+ return (x[0] for x in self.__namedTypes)
+
+ if sys.version_info[0] <= 2:
+ def __nonzero__(self):
+ return self.__namedTypesLen > 0
+ else:
+ def __bool__(self):
+ return self.__namedTypesLen > 0
+
+ def __len__(self):
+ return self.__namedTypesLen
+
+ # Python dict protocol
+
+ def values(self):
+ return self.__values
+
+ def keys(self):
+ return self.__keys
+
+ def items(self):
+ return self.__items
+
+ def clone(self):
+ return self.__class__(*self.__namedTypes)
+
+ class PostponedError(object):
+ def __init__(self, errorMsg):
+ self.__errorMsg = errorMsg
+
+ def __getitem__(self, item):
+ raise error.PyAsn1Error(self.__errorMsg)
+
+ def __computeTagToPosMap(self):
+ tagToPosMap = {}
+ for idx, namedType in enumerate(self.__namedTypes):
+ tagMap = namedType.asn1Object.tagMap
+ if isinstance(tagMap, NamedTypes.PostponedError):
+ return tagMap
+ if not tagMap:
+ continue
+ for _tagSet in tagMap.presentTypes:
+ if _tagSet in tagToPosMap:
+ return NamedTypes.PostponedError('Duplicate component tag %s at %s' % (_tagSet, namedType))
+ tagToPosMap[_tagSet] = idx
+
+ return tagToPosMap
+
+ def __computeNameToPosMap(self):
+ nameToPosMap = {}
+ for idx, namedType in enumerate(self.__namedTypes):
+ if namedType.name in nameToPosMap:
+ return NamedTypes.PostponedError('Duplicate component name %s at %s' % (namedType.name, namedType))
+ nameToPosMap[namedType.name] = idx
+
+ return nameToPosMap
+
+ def __computeAmbiguousTypes(self):
+ ambiguousTypes = {}
+ partialAmbiguousTypes = ()
+ for idx, namedType in reversed(tuple(enumerate(self.__namedTypes))):
+ if namedType.isOptional or namedType.isDefaulted:
+ partialAmbiguousTypes = (namedType,) + partialAmbiguousTypes
+ else:
+ partialAmbiguousTypes = (namedType,)
+ if len(partialAmbiguousTypes) == len(self.__namedTypes):
+ ambiguousTypes[idx] = self
+ else:
+ ambiguousTypes[idx] = NamedTypes(*partialAmbiguousTypes, **dict(terminal=True))
+ return ambiguousTypes
+
+ def getTypeByPosition(self, idx):
+ """Return ASN.1 type object by its position in fields set.
+
+ Parameters
+ ----------
+ idx: :py:class:`int`
+ Field index
+
+ Returns
+ -------
+ :
+ ASN.1 type
+
+ Raises
+ ------
+ ~pyasn1.error.PyAsn1Error
+ If given position is out of fields range
+ """
+ try:
+ return self.__namedTypes[idx].asn1Object
+
+ except IndexError:
+ raise error.PyAsn1Error('Type position out of range')
+
+ def getPositionByType(self, tagSet):
+ """Return field position by its ASN.1 type.
+
+ Parameters
+ ----------
+ tagSet: :class:`~pysnmp.type.tag.TagSet`
+ ASN.1 tag set distinguishing one ASN.1 type from others.
+
+ Returns
+ -------
+ : :py:class:`int`
+ ASN.1 type position in fields set
+
+ Raises
+ ------
+ ~pyasn1.error.PyAsn1Error
+ If *tagSet* is not present or ASN.1 types are not unique within callee *NamedTypes*
+ """
+ try:
+ return self.__tagToPosMap[tagSet]
+
+ except KeyError:
+ raise error.PyAsn1Error('Type %s not found' % (tagSet,))
+
+ def getNameByPosition(self, idx):
+ """Return field name by its position in fields set.
+
+ Parameters
+ ----------
+ idx: :py:class:`idx`
+ Field index
+
+ Returns
+ -------
+ : :py:class:`str`
+ Field name
+
+ Raises
+ ------
+ ~pyasn1.error.PyAsn1Error
+ If given field name is not present in callee *NamedTypes*
+ """
+ try:
+ return self.__namedTypes[idx].name
+
+ except IndexError:
+ raise error.PyAsn1Error('Type position out of range')
+
+ def getPositionByName(self, name):
+ """Return field position by filed name.
+
+ Parameters
+ ----------
+ name: :py:class:`str`
+ Field name
+
+ Returns
+ -------
+ : :py:class:`int`
+ Field position in fields set
+
+ Raises
+ ------
+ ~pyasn1.error.PyAsn1Error
+ If *name* is not present or not unique within callee *NamedTypes*
+ """
+ try:
+ return self.__nameToPosMap[name]
+
+ except KeyError:
+ raise error.PyAsn1Error('Name %s not found' % (name,))
+
+ def getTagMapNearPosition(self, idx):
+ """Return ASN.1 types that are allowed at or past given field position.
+
+ Some ASN.1 serialisation allow for skipping optional and defaulted fields.
+ Some constructed ASN.1 types allow reordering of the fields. When recovering
+ such objects it may be important to know which types can possibly be
+ present at any given position in the field sets.
+
+ Parameters
+ ----------
+ idx: :py:class:`int`
+ Field index
+
+ Returns
+ -------
+ : :class:`~pyasn1.type.tagmap.TagMap`
+ Map if ASN.1 types allowed at given field position
+
+ Raises
+ ------
+ ~pyasn1.error.PyAsn1Error
+ If given position is out of fields range
+ """
+ try:
+ return self.__ambiguousTypes[idx].tagMap
+
+ except KeyError:
+ raise error.PyAsn1Error('Type position out of range')
+
+ def getPositionNearType(self, tagSet, idx):
+ """Return the closest field position where given ASN.1 type is allowed.
+
+ Some ASN.1 serialisation allow for skipping optional and defaulted fields.
+ Some constructed ASN.1 types allow reordering of the fields. When recovering
+ such objects it may be important to know at which field position, in field set,
+ given *tagSet* is allowed at or past *idx* position.
+
+ Parameters
+ ----------
+ tagSet: :class:`~pyasn1.type.tag.TagSet`
+ ASN.1 type which field position to look up
+
+ idx: :py:class:`int`
+ Field position at or past which to perform ASN.1 type look up
+
+ Returns
+ -------
+ : :py:class:`int`
+ Field position in fields set
+
+ Raises
+ ------
+ ~pyasn1.error.PyAsn1Error
+ If *tagSet* is not present or not unique within callee *NamedTypes*
+ or *idx* is out of fields range
+ """
+ try:
+ return idx + self.__ambiguousTypes[idx].getPositionByType(tagSet)
+
+ except KeyError:
+ raise error.PyAsn1Error('Type position out of range')
+
+ def __computeMinTagSet(self):
+ minTagSet = None
+ for namedType in self.__namedTypes:
+ asn1Object = namedType.asn1Object
+
+ try:
+ tagSet = asn1Object.minTagSet
+
+ except AttributeError:
+ tagSet = asn1Object.tagSet
+
+ if minTagSet is None or tagSet < minTagSet:
+ minTagSet = tagSet
+
+ return minTagSet or tag.TagSet()
+
+ @property
+ def minTagSet(self):
+ """Return the minimal TagSet among ASN.1 type in callee *NamedTypes*.
+
+ Some ASN.1 types/serialisation protocols require ASN.1 types to be
+ arranged based on their numerical tag value. The *minTagSet* property
+ returns that.
+
+ Returns
+ -------
+ : :class:`~pyasn1.type.tagset.TagSet`
+ Minimal TagSet among ASN.1 types in callee *NamedTypes*
+ """
+ return self.__minTagSet
+
+ def __computeTagMaps(self, unique):
+ presentTypes = {}
+ skipTypes = {}
+ defaultType = None
+ for namedType in self.__namedTypes:
+ tagMap = namedType.asn1Object.tagMap
+ if isinstance(tagMap, NamedTypes.PostponedError):
+ return tagMap
+ for tagSet in tagMap:
+ if unique and tagSet in presentTypes:
+ return NamedTypes.PostponedError('Non-unique tagSet %s of %s at %s' % (tagSet, namedType, self))
+ presentTypes[tagSet] = namedType.asn1Object
+ skipTypes.update(tagMap.skipTypes)
+
+ if defaultType is None:
+ defaultType = tagMap.defaultType
+ elif tagMap.defaultType is not None:
+ return NamedTypes.PostponedError('Duplicate default ASN.1 type at %s' % (self,))
+
+ return tagmap.TagMap(presentTypes, skipTypes, defaultType)
+
+ @property
+ def tagMap(self):
+ """Return a *TagMap* object from tags and types recursively.
+
+ Return a :class:`~pyasn1.type.tagmap.TagMap` object by
+ combining tags from *TagMap* objects of children types and
+ associating them with their immediate child type.
+
+ Example
+ -------
+ .. code-block:: python
+
+ OuterType ::= CHOICE {
+ innerType INTEGER
+ }
+
+ Calling *.tagMap* on *OuterType* will yield a map like this:
+
+ .. code-block:: python
+
+ Integer.tagSet -> Choice
+ """
+ return self.__nonUniqueTagMap
+
+ @property
+ def tagMapUnique(self):
+ """Return a *TagMap* object from unique tags and types recursively.
+
+ Return a :class:`~pyasn1.type.tagmap.TagMap` object by
+ combining tags from *TagMap* objects of children types and
+ associating them with their immediate child type.
+
+ Example
+ -------
+ .. code-block:: python
+
+ OuterType ::= CHOICE {
+ innerType INTEGER
+ }
+
+ Calling *.tagMapUnique* on *OuterType* will yield a map like this:
+
+ .. code-block:: python
+
+ Integer.tagSet -> Choice
+
+ Note
+ ----
+
+ Duplicate *TagSet* objects found in the tree of children
+ types would cause error.
+ """
+ return self.__uniqueTagMap
+
+ @property
+ def hasOptionalOrDefault(self):
+ return self.__hasOptionalOrDefault
+
+ @property
+ def hasOpenTypes(self):
+ return self.__hasOpenTypes
+
+ @property
+ def namedTypes(self):
+ return tuple(self.__namedTypes)
+
+ @property
+ def requiredComponents(self):
+ return self.__requiredComponents
diff --git a/third_party/python/pyasn1/pyasn1/type/namedval.py b/third_party/python/pyasn1/pyasn1/type/namedval.py
new file mode 100644
index 0000000000..424759784b
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/type/namedval.py
@@ -0,0 +1,192 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+# ASN.1 named integers
+#
+from pyasn1 import error
+
+__all__ = ['NamedValues']
+
+
+class NamedValues(object):
+ """Create named values object.
+
+ The |NamedValues| object represents a collection of string names
+ associated with numeric IDs. These objects are used for giving
+ names to otherwise numerical values.
+
+ |NamedValues| objects are immutable and duck-type Python
+ :class:`dict` object mapping ID to name and vice-versa.
+
+ Parameters
+ ----------
+ *args: variable number of two-element :py:class:`tuple`
+
+ name: :py:class:`str`
+ Value label
+
+ value: :py:class:`int`
+ Numeric value
+
+ Keyword Args
+ ------------
+ name: :py:class:`str`
+ Value label
+
+ value: :py:class:`int`
+ Numeric value
+
+ Examples
+ --------
+
+ .. code-block:: pycon
+
+ >>> nv = NamedValues('a', 'b', ('c', 0), d=1)
+ >>> nv
+ >>> {'c': 0, 'd': 1, 'a': 2, 'b': 3}
+ >>> nv[0]
+ 'c'
+ >>> nv['a']
+ 2
+ """
+ def __init__(self, *args, **kwargs):
+ self.__names = {}
+ self.__numbers = {}
+
+ anonymousNames = []
+
+ for namedValue in args:
+ if isinstance(namedValue, (tuple, list)):
+ try:
+ name, number = namedValue
+
+ except ValueError:
+ raise error.PyAsn1Error('Not a proper attribute-value pair %r' % (namedValue,))
+
+ else:
+ anonymousNames.append(namedValue)
+ continue
+
+ if name in self.__names:
+ raise error.PyAsn1Error('Duplicate name %s' % (name,))
+
+ if number in self.__numbers:
+ raise error.PyAsn1Error('Duplicate number %s=%s' % (name, number))
+
+ self.__names[name] = number
+ self.__numbers[number] = name
+
+ for name, number in kwargs.items():
+ if name in self.__names:
+ raise error.PyAsn1Error('Duplicate name %s' % (name,))
+
+ if number in self.__numbers:
+ raise error.PyAsn1Error('Duplicate number %s=%s' % (name, number))
+
+ self.__names[name] = number
+ self.__numbers[number] = name
+
+ if anonymousNames:
+
+ number = self.__numbers and max(self.__numbers) + 1 or 0
+
+ for name in anonymousNames:
+
+ if name in self.__names:
+ raise error.PyAsn1Error('Duplicate name %s' % (name,))
+
+ self.__names[name] = number
+ self.__numbers[number] = name
+
+ number += 1
+
+ def __repr__(self):
+ representation = ', '.join(['%s=%d' % x for x in self.items()])
+
+ if len(representation) > 64:
+ representation = representation[:32] + '...' + representation[-32:]
+
+ return '<%s object, enums %s>' % (
+ self.__class__.__name__, representation)
+
+ def __eq__(self, other):
+ return dict(self) == other
+
+ def __ne__(self, other):
+ return dict(self) != other
+
+ def __lt__(self, other):
+ return dict(self) < other
+
+ def __le__(self, other):
+ return dict(self) <= other
+
+ def __gt__(self, other):
+ return dict(self) > other
+
+ def __ge__(self, other):
+ return dict(self) >= other
+
+ def __hash__(self):
+ return hash(self.items())
+
+ # Python dict protocol (read-only)
+
+ def __getitem__(self, key):
+ try:
+ return self.__numbers[key]
+
+ except KeyError:
+ return self.__names[key]
+
+ def __len__(self):
+ return len(self.__names)
+
+ def __contains__(self, key):
+ return key in self.__names or key in self.__numbers
+
+ def __iter__(self):
+ return iter(self.__names)
+
+ def values(self):
+ return iter(self.__numbers)
+
+ def keys(self):
+ return iter(self.__names)
+
+ def items(self):
+ for name in self.__names:
+ yield name, self.__names[name]
+
+ # support merging
+
+ def __add__(self, namedValues):
+ return self.__class__(*tuple(self.items()) + tuple(namedValues.items()))
+
+ # XXX clone/subtype?
+
+ def clone(self, *args, **kwargs):
+ new = self.__class__(*args, **kwargs)
+ return self + new
+
+ # legacy protocol
+
+ def getName(self, value):
+ if value in self.__numbers:
+ return self.__numbers[value]
+
+ def getValue(self, name):
+ if name in self.__names:
+ return self.__names[name]
+
+ def getValues(self, *names):
+ try:
+ return [self.__names[name] for name in names]
+
+ except KeyError:
+ raise error.PyAsn1Error(
+ 'Unknown bit identifier(s): %s' % (set(names).difference(self.__names),)
+ )
diff --git a/third_party/python/pyasn1/pyasn1/type/opentype.py b/third_party/python/pyasn1/pyasn1/type/opentype.py
new file mode 100644
index 0000000000..29645f0f8d
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/type/opentype.py
@@ -0,0 +1,104 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+
+__all__ = ['OpenType']
+
+
+class OpenType(object):
+ """Create ASN.1 type map indexed by a value
+
+ The *OpenType* object models an untyped field of a constructed ASN.1
+ type. In ASN.1 syntax it is usually represented by the
+ `ANY DEFINED BY` for scalars or `SET OF ANY DEFINED BY`,
+ `SEQUENCE OF ANY DEFINED BY` for container types clauses. Typically
+ used together with :class:`~pyasn1.type.univ.Any` object.
+
+ OpenType objects duck-type a read-only Python :class:`dict` objects,
+ however the passed `typeMap` is not copied, but stored by reference.
+ That means the user can manipulate `typeMap` at run time having this
+ reflected on *OpenType* object behavior.
+
+ The |OpenType| class models an untyped field of a constructed ASN.1
+ type. In ASN.1 syntax it is usually represented by the
+ `ANY DEFINED BY` for scalars or `SET OF ANY DEFINED BY`,
+ `SEQUENCE OF ANY DEFINED BY` for container types clauses. Typically
+ used with :class:`~pyasn1.type.univ.Any` type.
+
+ Parameters
+ ----------
+ name: :py:class:`str`
+ Field name
+
+ typeMap: :py:class:`dict`
+ A map of value->ASN.1 type. It's stored by reference and can be
+ mutated later to register new mappings.
+
+ Examples
+ --------
+
+ For untyped scalars:
+
+ .. code-block:: python
+
+ openType = OpenType(
+ 'id', {1: Integer(),
+ 2: OctetString()}
+ )
+ Sequence(
+ componentType=NamedTypes(
+ NamedType('id', Integer()),
+ NamedType('blob', Any(), openType=openType)
+ )
+ )
+
+ For untyped `SET OF` or `SEQUENCE OF` vectors:
+
+ .. code-block:: python
+
+ openType = OpenType(
+ 'id', {1: Integer(),
+ 2: OctetString()}
+ )
+ Sequence(
+ componentType=NamedTypes(
+ NamedType('id', Integer()),
+ NamedType('blob', SetOf(componentType=Any()),
+ openType=openType)
+ )
+ )
+ """
+
+ def __init__(self, name, typeMap=None):
+ self.__name = name
+ if typeMap is None:
+ self.__typeMap = {}
+ else:
+ self.__typeMap = typeMap
+
+ @property
+ def name(self):
+ return self.__name
+
+ # Python dict protocol
+
+ def values(self):
+ return self.__typeMap.values()
+
+ def keys(self):
+ return self.__typeMap.keys()
+
+ def items(self):
+ return self.__typeMap.items()
+
+ def __contains__(self, key):
+ return key in self.__typeMap
+
+ def __getitem__(self, key):
+ return self.__typeMap[key]
+
+ def __iter__(self):
+ return iter(self.__typeMap)
diff --git a/third_party/python/pyasn1/pyasn1/type/tag.py b/third_party/python/pyasn1/pyasn1/type/tag.py
new file mode 100644
index 0000000000..b88a73417a
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/type/tag.py
@@ -0,0 +1,335 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+from pyasn1 import error
+
+__all__ = ['tagClassUniversal', 'tagClassApplication', 'tagClassContext',
+ 'tagClassPrivate', 'tagFormatSimple', 'tagFormatConstructed',
+ 'tagCategoryImplicit', 'tagCategoryExplicit',
+ 'tagCategoryUntagged', 'Tag', 'TagSet']
+
+#: Identifier for ASN.1 class UNIVERSAL
+tagClassUniversal = 0x00
+
+#: Identifier for ASN.1 class APPLICATION
+tagClassApplication = 0x40
+
+#: Identifier for ASN.1 class context-specific
+tagClassContext = 0x80
+
+#: Identifier for ASN.1 class private
+tagClassPrivate = 0xC0
+
+#: Identifier for "simple" ASN.1 structure (e.g. scalar)
+tagFormatSimple = 0x00
+
+#: Identifier for "constructed" ASN.1 structure (e.g. may have inner components)
+tagFormatConstructed = 0x20
+
+tagCategoryImplicit = 0x01
+tagCategoryExplicit = 0x02
+tagCategoryUntagged = 0x04
+
+
+class Tag(object):
+ """Create ASN.1 tag
+
+ Represents ASN.1 tag that can be attached to a ASN.1 type to make
+ types distinguishable from each other.
+
+ *Tag* objects are immutable and duck-type Python :class:`tuple` objects
+ holding three integer components of a tag.
+
+ Parameters
+ ----------
+ tagClass: :py:class:`int`
+ Tag *class* value
+
+ tagFormat: :py:class:`int`
+ Tag *format* value
+
+ tagId: :py:class:`int`
+ Tag ID value
+ """
+ def __init__(self, tagClass, tagFormat, tagId):
+ if tagId < 0:
+ raise error.PyAsn1Error('Negative tag ID (%s) not allowed' % tagId)
+ self.__tagClass = tagClass
+ self.__tagFormat = tagFormat
+ self.__tagId = tagId
+ self.__tagClassId = tagClass, tagId
+ self.__hash = hash(self.__tagClassId)
+
+ def __repr__(self):
+ representation = '[%s:%s:%s]' % (
+ self.__tagClass, self.__tagFormat, self.__tagId)
+ return '<%s object, tag %s>' % (
+ self.__class__.__name__, representation)
+
+ def __eq__(self, other):
+ return self.__tagClassId == other
+
+ def __ne__(self, other):
+ return self.__tagClassId != other
+
+ def __lt__(self, other):
+ return self.__tagClassId < other
+
+ def __le__(self, other):
+ return self.__tagClassId <= other
+
+ def __gt__(self, other):
+ return self.__tagClassId > other
+
+ def __ge__(self, other):
+ return self.__tagClassId >= other
+
+ def __hash__(self):
+ return self.__hash
+
+ def __getitem__(self, idx):
+ if idx == 0:
+ return self.__tagClass
+ elif idx == 1:
+ return self.__tagFormat
+ elif idx == 2:
+ return self.__tagId
+ else:
+ raise IndexError()
+
+ def __iter__(self):
+ yield self.__tagClass
+ yield self.__tagFormat
+ yield self.__tagId
+
+ def __and__(self, otherTag):
+ return self.__class__(self.__tagClass & otherTag.tagClass,
+ self.__tagFormat & otherTag.tagFormat,
+ self.__tagId & otherTag.tagId)
+
+ def __or__(self, otherTag):
+ return self.__class__(self.__tagClass | otherTag.tagClass,
+ self.__tagFormat | otherTag.tagFormat,
+ self.__tagId | otherTag.tagId)
+
+ @property
+ def tagClass(self):
+ """ASN.1 tag class
+
+ Returns
+ -------
+ : :py:class:`int`
+ Tag class
+ """
+ return self.__tagClass
+
+ @property
+ def tagFormat(self):
+ """ASN.1 tag format
+
+ Returns
+ -------
+ : :py:class:`int`
+ Tag format
+ """
+ return self.__tagFormat
+
+ @property
+ def tagId(self):
+ """ASN.1 tag ID
+
+ Returns
+ -------
+ : :py:class:`int`
+ Tag ID
+ """
+ return self.__tagId
+
+
+class TagSet(object):
+ """Create a collection of ASN.1 tags
+
+ Represents a combination of :class:`~pyasn1.type.tag.Tag` objects
+ that can be attached to a ASN.1 type to make types distinguishable
+ from each other.
+
+ *TagSet* objects are immutable and duck-type Python :class:`tuple` objects
+ holding arbitrary number of :class:`~pyasn1.type.tag.Tag` objects.
+
+ Parameters
+ ----------
+ baseTag: :class:`~pyasn1.type.tag.Tag`
+ Base *Tag* object. This tag survives IMPLICIT tagging.
+
+ *superTags: :class:`~pyasn1.type.tag.Tag`
+ Additional *Tag* objects taking part in subtyping.
+
+ Examples
+ --------
+ .. code-block:: python
+
+ class OrderNumber(NumericString):
+ '''
+ ASN.1 specification
+
+ Order-number ::=
+ [APPLICATION 5] IMPLICIT NumericString
+ '''
+ tagSet = NumericString.tagSet.tagImplicitly(
+ Tag(tagClassApplication, tagFormatSimple, 5)
+ )
+
+ orderNumber = OrderNumber('1234')
+ """
+ def __init__(self, baseTag=(), *superTags):
+ self.__baseTag = baseTag
+ self.__superTags = superTags
+ self.__superTagsClassId = tuple(
+ [(superTag.tagClass, superTag.tagId) for superTag in superTags]
+ )
+ self.__lenOfSuperTags = len(superTags)
+ self.__hash = hash(self.__superTagsClassId)
+
+ def __repr__(self):
+ representation = '-'.join(['%s:%s:%s' % (x.tagClass, x.tagFormat, x.tagId)
+ for x in self.__superTags])
+ if representation:
+ representation = 'tags ' + representation
+ else:
+ representation = 'untagged'
+
+ return '<%s object, %s>' % (self.__class__.__name__, representation)
+
+ def __add__(self, superTag):
+ return self.__class__(self.__baseTag, *self.__superTags + (superTag,))
+
+ def __radd__(self, superTag):
+ return self.__class__(self.__baseTag, *(superTag,) + self.__superTags)
+
+ def __getitem__(self, i):
+ if i.__class__ is slice:
+ return self.__class__(self.__baseTag, *self.__superTags[i])
+ else:
+ return self.__superTags[i]
+
+ def __eq__(self, other):
+ return self.__superTagsClassId == other
+
+ def __ne__(self, other):
+ return self.__superTagsClassId != other
+
+ def __lt__(self, other):
+ return self.__superTagsClassId < other
+
+ def __le__(self, other):
+ return self.__superTagsClassId <= other
+
+ def __gt__(self, other):
+ return self.__superTagsClassId > other
+
+ def __ge__(self, other):
+ return self.__superTagsClassId >= other
+
+ def __hash__(self):
+ return self.__hash
+
+ def __len__(self):
+ return self.__lenOfSuperTags
+
+ @property
+ def baseTag(self):
+ """Return base ASN.1 tag
+
+ Returns
+ -------
+ : :class:`~pyasn1.type.tag.Tag`
+ Base tag of this *TagSet*
+ """
+ return self.__baseTag
+
+ @property
+ def superTags(self):
+ """Return ASN.1 tags
+
+ Returns
+ -------
+ : :py:class:`tuple`
+ Tuple of :class:`~pyasn1.type.tag.Tag` objects that this *TagSet* contains
+ """
+ return self.__superTags
+
+ def tagExplicitly(self, superTag):
+ """Return explicitly tagged *TagSet*
+
+ Create a new *TagSet* representing callee *TagSet* explicitly tagged
+ with passed tag(s). With explicit tagging mode, new tags are appended
+ to existing tag(s).
+
+ Parameters
+ ----------
+ superTag: :class:`~pyasn1.type.tag.Tag`
+ *Tag* object to tag this *TagSet*
+
+ Returns
+ -------
+ : :class:`~pyasn1.type.tag.TagSet`
+ New *TagSet* object
+ """
+ if superTag.tagClass == tagClassUniversal:
+ raise error.PyAsn1Error("Can't tag with UNIVERSAL class tag")
+ if superTag.tagFormat != tagFormatConstructed:
+ superTag = Tag(superTag.tagClass, tagFormatConstructed, superTag.tagId)
+ return self + superTag
+
+ def tagImplicitly(self, superTag):
+ """Return implicitly tagged *TagSet*
+
+ Create a new *TagSet* representing callee *TagSet* implicitly tagged
+ with passed tag(s). With implicit tagging mode, new tag(s) replace the
+ last existing tag.
+
+ Parameters
+ ----------
+ superTag: :class:`~pyasn1.type.tag.Tag`
+ *Tag* object to tag this *TagSet*
+
+ Returns
+ -------
+ : :class:`~pyasn1.type.tag.TagSet`
+ New *TagSet* object
+ """
+ if self.__superTags:
+ superTag = Tag(superTag.tagClass, self.__superTags[-1].tagFormat, superTag.tagId)
+ return self[:-1] + superTag
+
+ def isSuperTagSetOf(self, tagSet):
+ """Test type relationship against given *TagSet*
+
+ The callee is considered to be a supertype of given *TagSet*
+ tag-wise if all tags in *TagSet* are present in the callee and
+ they are in the same order.
+
+ Parameters
+ ----------
+ tagSet: :class:`~pyasn1.type.tag.TagSet`
+ *TagSet* object to evaluate against the callee
+
+ Returns
+ -------
+ : :py:class:`bool`
+ :obj:`True` if callee is a supertype of *tagSet*
+ """
+ if len(tagSet) < self.__lenOfSuperTags:
+ return False
+ return self.__superTags == tagSet[:self.__lenOfSuperTags]
+
+ # Backward compatibility
+
+ def getBaseTag(self):
+ return self.__baseTag
+
+def initTagSet(tag):
+ return TagSet(tag, tag)
diff --git a/third_party/python/pyasn1/pyasn1/type/tagmap.py b/third_party/python/pyasn1/pyasn1/type/tagmap.py
new file mode 100644
index 0000000000..6f5163b4e8
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/type/tagmap.py
@@ -0,0 +1,96 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+from pyasn1 import error
+
+__all__ = ['TagMap']
+
+
+class TagMap(object):
+ """Map *TagSet* objects to ASN.1 types
+
+ Create an object mapping *TagSet* object to ASN.1 type.
+
+ *TagMap* objects are immutable and duck-type read-only Python
+ :class:`dict` objects holding *TagSet* objects as keys and ASN.1
+ type objects as values.
+
+ Parameters
+ ----------
+ presentTypes: :py:class:`dict`
+ Map of :class:`~pyasn1.type.tag.TagSet` to ASN.1 objects considered
+ as being unconditionally present in the *TagMap*.
+
+ skipTypes: :py:class:`dict`
+ A collection of :class:`~pyasn1.type.tag.TagSet` objects considered
+ as absent in the *TagMap* even when *defaultType* is present.
+
+ defaultType: ASN.1 type object
+ An ASN.1 type object callee *TagMap* returns for any *TagSet* key not present
+ in *presentTypes* (unless given key is present in *skipTypes*).
+ """
+ def __init__(self, presentTypes=None, skipTypes=None, defaultType=None):
+ self.__presentTypes = presentTypes or {}
+ self.__skipTypes = skipTypes or {}
+ self.__defaultType = defaultType
+
+ def __contains__(self, tagSet):
+ return (tagSet in self.__presentTypes or
+ self.__defaultType is not None and tagSet not in self.__skipTypes)
+
+ def __getitem__(self, tagSet):
+ try:
+ return self.__presentTypes[tagSet]
+ except KeyError:
+ if self.__defaultType is None:
+ raise KeyError()
+ elif tagSet in self.__skipTypes:
+ raise error.PyAsn1Error('Key in negative map')
+ else:
+ return self.__defaultType
+
+ def __iter__(self):
+ return iter(self.__presentTypes)
+
+ def __repr__(self):
+ representation = '%s object' % self.__class__.__name__
+
+ if self.__presentTypes:
+ representation += ', present %s' % repr(self.__presentTypes)
+
+ if self.__skipTypes:
+ representation += ', skip %s' % repr(self.__skipTypes)
+
+ if self.__defaultType is not None:
+ representation += ', default %s' % repr(self.__defaultType)
+
+ return '<%s>' % representation
+
+ @property
+ def presentTypes(self):
+ """Return *TagSet* to ASN.1 type map present in callee *TagMap*"""
+ return self.__presentTypes
+
+ @property
+ def skipTypes(self):
+ """Return *TagSet* collection unconditionally absent in callee *TagMap*"""
+ return self.__skipTypes
+
+ @property
+ def defaultType(self):
+ """Return default ASN.1 type being returned for any missing *TagSet*"""
+ return self.__defaultType
+
+ # Backward compatibility
+
+ def getPosMap(self):
+ return self.presentTypes
+
+ def getNegMap(self):
+ return self.skipTypes
+
+ def getDef(self):
+ return self.defaultType
diff --git a/third_party/python/pyasn1/pyasn1/type/univ.py b/third_party/python/pyasn1/pyasn1/type/univ.py
new file mode 100644
index 0000000000..aa688b22af
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/type/univ.py
@@ -0,0 +1,3321 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+import math
+import sys
+
+from pyasn1 import error
+from pyasn1.codec.ber import eoo
+from pyasn1.compat import binary
+from pyasn1.compat import integer
+from pyasn1.compat import octets
+from pyasn1.type import base
+from pyasn1.type import constraint
+from pyasn1.type import namedtype
+from pyasn1.type import namedval
+from pyasn1.type import tag
+from pyasn1.type import tagmap
+
+NoValue = base.NoValue
+noValue = NoValue()
+
+__all__ = ['Integer', 'Boolean', 'BitString', 'OctetString', 'Null',
+ 'ObjectIdentifier', 'Real', 'Enumerated',
+ 'SequenceOfAndSetOfBase', 'SequenceOf', 'SetOf',
+ 'SequenceAndSetBase', 'Sequence', 'Set', 'Choice', 'Any',
+ 'NoValue', 'noValue']
+
+# "Simple" ASN.1 types (yet incomplete)
+
+
+class Integer(base.SimpleAsn1Type):
+ """Create |ASN.1| schema or value object.
+
+ |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its
+ objects are immutable and duck-type Python :class:`int` objects.
+
+ Keyword Args
+ ------------
+ value: :class:`int`, :class:`str` or |ASN.1| object
+ Python :class:`int` or :class:`str` literal or |ASN.1| class
+ instance. If `value` is not given, schema object will be created.
+
+ tagSet: :py:class:`~pyasn1.type.tag.TagSet`
+ Object representing non-default ASN.1 tag(s)
+
+ subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
+ Object representing non-default ASN.1 subtype constraint(s). Constraints
+ verification for |ASN.1| type occurs automatically on object
+ instantiation.
+
+ namedValues: :py:class:`~pyasn1.type.namedval.NamedValues`
+ Object representing non-default symbolic aliases for numbers
+
+ Raises
+ ------
+ ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
+ On constraint violation or bad initializer.
+
+ Examples
+ --------
+
+ .. code-block:: python
+
+ class ErrorCode(Integer):
+ '''
+ ASN.1 specification:
+
+ ErrorCode ::=
+ INTEGER { disk-full(1), no-disk(-1),
+ disk-not-formatted(2) }
+
+ error ErrorCode ::= disk-full
+ '''
+ namedValues = NamedValues(
+ ('disk-full', 1), ('no-disk', -1),
+ ('disk-not-formatted', 2)
+ )
+
+ error = ErrorCode('disk-full')
+ """
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x02)
+ )
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object
+ #: imposing constraints on |ASN.1| type initialization values.
+ subtypeSpec = constraint.ConstraintsIntersection()
+
+ #: Default :py:class:`~pyasn1.type.namedval.NamedValues` object
+ #: representing symbolic aliases for numbers
+ namedValues = namedval.NamedValues()
+
+ # Optimization for faster codec lookup
+ typeId = base.SimpleAsn1Type.getTypeId()
+
+ def __init__(self, value=noValue, **kwargs):
+ if 'namedValues' not in kwargs:
+ kwargs['namedValues'] = self.namedValues
+
+ base.SimpleAsn1Type.__init__(self, value, **kwargs)
+
+ def __and__(self, value):
+ return self.clone(self._value & value)
+
+ def __rand__(self, value):
+ return self.clone(value & self._value)
+
+ def __or__(self, value):
+ return self.clone(self._value | value)
+
+ def __ror__(self, value):
+ return self.clone(value | self._value)
+
+ def __xor__(self, value):
+ return self.clone(self._value ^ value)
+
+ def __rxor__(self, value):
+ return self.clone(value ^ self._value)
+
+ def __lshift__(self, value):
+ return self.clone(self._value << value)
+
+ def __rshift__(self, value):
+ return self.clone(self._value >> value)
+
+ def __add__(self, value):
+ return self.clone(self._value + value)
+
+ def __radd__(self, value):
+ return self.clone(value + self._value)
+
+ def __sub__(self, value):
+ return self.clone(self._value - value)
+
+ def __rsub__(self, value):
+ return self.clone(value - self._value)
+
+ def __mul__(self, value):
+ return self.clone(self._value * value)
+
+ def __rmul__(self, value):
+ return self.clone(value * self._value)
+
+ def __mod__(self, value):
+ return self.clone(self._value % value)
+
+ def __rmod__(self, value):
+ return self.clone(value % self._value)
+
+ def __pow__(self, value, modulo=None):
+ return self.clone(pow(self._value, value, modulo))
+
+ def __rpow__(self, value):
+ return self.clone(pow(value, self._value))
+
+ def __floordiv__(self, value):
+ return self.clone(self._value // value)
+
+ def __rfloordiv__(self, value):
+ return self.clone(value // self._value)
+
+ if sys.version_info[0] <= 2:
+ def __div__(self, value):
+ if isinstance(value, float):
+ return Real(self._value / value)
+ else:
+ return self.clone(self._value / value)
+
+ def __rdiv__(self, value):
+ if isinstance(value, float):
+ return Real(value / self._value)
+ else:
+ return self.clone(value / self._value)
+ else:
+ def __truediv__(self, value):
+ return Real(self._value / value)
+
+ def __rtruediv__(self, value):
+ return Real(value / self._value)
+
+ def __divmod__(self, value):
+ return self.clone(divmod(self._value, value))
+
+ def __rdivmod__(self, value):
+ return self.clone(divmod(value, self._value))
+
+ __hash__ = base.SimpleAsn1Type.__hash__
+
+ def __int__(self):
+ return int(self._value)
+
+ if sys.version_info[0] <= 2:
+ def __long__(self):
+ return long(self._value)
+
+ def __float__(self):
+ return float(self._value)
+
+ def __abs__(self):
+ return self.clone(abs(self._value))
+
+ def __index__(self):
+ return int(self._value)
+
+ def __pos__(self):
+ return self.clone(+self._value)
+
+ def __neg__(self):
+ return self.clone(-self._value)
+
+ def __invert__(self):
+ return self.clone(~self._value)
+
+ def __round__(self, n=0):
+ r = round(self._value, n)
+ if n:
+ return self.clone(r)
+ else:
+ return r
+
+ def __floor__(self):
+ return math.floor(self._value)
+
+ def __ceil__(self):
+ return math.ceil(self._value)
+
+ if sys.version_info[0:2] > (2, 5):
+ def __trunc__(self):
+ return self.clone(math.trunc(self._value))
+
+ def __lt__(self, value):
+ return self._value < value
+
+ def __le__(self, value):
+ return self._value <= value
+
+ def __eq__(self, value):
+ return self._value == value
+
+ def __ne__(self, value):
+ return self._value != value
+
+ def __gt__(self, value):
+ return self._value > value
+
+ def __ge__(self, value):
+ return self._value >= value
+
+ def prettyIn(self, value):
+ try:
+ return int(value)
+
+ except ValueError:
+ try:
+ return self.namedValues[value]
+
+ except KeyError:
+ raise error.PyAsn1Error(
+ 'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1])
+ )
+
+ def prettyOut(self, value):
+ try:
+ return str(self.namedValues[value])
+
+ except KeyError:
+ return str(value)
+
+ # backward compatibility
+
+ def getNamedValues(self):
+ return self.namedValues
+
+
+class Boolean(Integer):
+ """Create |ASN.1| schema or value object.
+
+ |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its
+ objects are immutable and duck-type Python :class:`int` objects.
+
+ Keyword Args
+ ------------
+ value: :class:`int`, :class:`str` or |ASN.1| object
+ Python :class:`int` or :class:`str` literal or |ASN.1| class
+ instance. If `value` is not given, schema object will be created.
+
+ tagSet: :py:class:`~pyasn1.type.tag.TagSet`
+ Object representing non-default ASN.1 tag(s)
+
+ subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
+ Object representing non-default ASN.1 subtype constraint(s).Constraints
+ verification for |ASN.1| type occurs automatically on object
+ instantiation.
+
+ namedValues: :py:class:`~pyasn1.type.namedval.NamedValues`
+ Object representing non-default symbolic aliases for numbers
+
+ Raises
+ ------
+ ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
+ On constraint violation or bad initializer.
+
+ Examples
+ --------
+ .. code-block:: python
+
+ class RoundResult(Boolean):
+ '''
+ ASN.1 specification:
+
+ RoundResult ::= BOOLEAN
+
+ ok RoundResult ::= TRUE
+ ko RoundResult ::= FALSE
+ '''
+ ok = RoundResult(True)
+ ko = RoundResult(False)
+ """
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x01),
+ )
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object
+ #: imposing constraints on |ASN.1| type initialization values.
+ subtypeSpec = Integer.subtypeSpec + constraint.SingleValueConstraint(0, 1)
+
+ #: Default :py:class:`~pyasn1.type.namedval.NamedValues` object
+ #: representing symbolic aliases for numbers
+ namedValues = namedval.NamedValues(('False', 0), ('True', 1))
+
+ # Optimization for faster codec lookup
+ typeId = Integer.getTypeId()
+
+if sys.version_info[0] < 3:
+ SizedIntegerBase = long
+else:
+ SizedIntegerBase = int
+
+
+class SizedInteger(SizedIntegerBase):
+ bitLength = leadingZeroBits = None
+
+ def setBitLength(self, bitLength):
+ self.bitLength = bitLength
+ self.leadingZeroBits = max(bitLength - integer.bitLength(self), 0)
+ return self
+
+ def __len__(self):
+ if self.bitLength is None:
+ self.setBitLength(integer.bitLength(self))
+
+ return self.bitLength
+
+
+class BitString(base.SimpleAsn1Type):
+ """Create |ASN.1| schema or value object.
+
+ |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its
+ objects are immutable and duck-type both Python :class:`tuple` (as a tuple
+ of bits) and :class:`int` objects.
+
+ Keyword Args
+ ------------
+ value: :class:`int`, :class:`str` or |ASN.1| object
+ Python :class:`int` or :class:`str` literal representing binary
+ or hexadecimal number or sequence of integer bits or |ASN.1| object.
+ If `value` is not given, schema object will be created.
+
+ tagSet: :py:class:`~pyasn1.type.tag.TagSet`
+ Object representing non-default ASN.1 tag(s)
+
+ subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
+ Object representing non-default ASN.1 subtype constraint(s). Constraints
+ verification for |ASN.1| type occurs automatically on object
+ instantiation.
+
+ namedValues: :py:class:`~pyasn1.type.namedval.NamedValues`
+ Object representing non-default symbolic aliases for numbers
+
+ binValue: :py:class:`str`
+ Binary string initializer to use instead of the *value*.
+ Example: '10110011'.
+
+ hexValue: :py:class:`str`
+ Hexadecimal string initializer to use instead of the *value*.
+ Example: 'DEADBEEF'.
+
+ Raises
+ ------
+ ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
+ On constraint violation or bad initializer.
+
+ Examples
+ --------
+ .. code-block:: python
+
+ class Rights(BitString):
+ '''
+ ASN.1 specification:
+
+ Rights ::= BIT STRING { user-read(0), user-write(1),
+ group-read(2), group-write(3),
+ other-read(4), other-write(5) }
+
+ group1 Rights ::= { group-read, group-write }
+ group2 Rights ::= '0011'B
+ group3 Rights ::= '3'H
+ '''
+ namedValues = NamedValues(
+ ('user-read', 0), ('user-write', 1),
+ ('group-read', 2), ('group-write', 3),
+ ('other-read', 4), ('other-write', 5)
+ )
+
+ group1 = Rights(('group-read', 'group-write'))
+ group2 = Rights('0011')
+ group3 = Rights(0x3)
+ """
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x03)
+ )
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object
+ #: imposing constraints on |ASN.1| type initialization values.
+ subtypeSpec = constraint.ConstraintsIntersection()
+
+ #: Default :py:class:`~pyasn1.type.namedval.NamedValues` object
+ #: representing symbolic aliases for numbers
+ namedValues = namedval.NamedValues()
+
+ # Optimization for faster codec lookup
+ typeId = base.SimpleAsn1Type.getTypeId()
+
+ defaultBinValue = defaultHexValue = noValue
+
+ def __init__(self, value=noValue, **kwargs):
+ if value is noValue:
+ if kwargs:
+ try:
+ value = self.fromBinaryString(kwargs.pop('binValue'), internalFormat=True)
+
+ except KeyError:
+ pass
+
+ try:
+ value = self.fromHexString(kwargs.pop('hexValue'), internalFormat=True)
+
+ except KeyError:
+ pass
+
+ if value is noValue:
+ if self.defaultBinValue is not noValue:
+ value = self.fromBinaryString(self.defaultBinValue, internalFormat=True)
+
+ elif self.defaultHexValue is not noValue:
+ value = self.fromHexString(self.defaultHexValue, internalFormat=True)
+
+ if 'namedValues' not in kwargs:
+ kwargs['namedValues'] = self.namedValues
+
+ base.SimpleAsn1Type.__init__(self, value, **kwargs)
+
+ def __str__(self):
+ return self.asBinary()
+
+ def __eq__(self, other):
+ other = self.prettyIn(other)
+ return self is other or self._value == other and len(self._value) == len(other)
+
+ def __ne__(self, other):
+ other = self.prettyIn(other)
+ return self._value != other or len(self._value) != len(other)
+
+ def __lt__(self, other):
+ other = self.prettyIn(other)
+ return len(self._value) < len(other) or len(self._value) == len(other) and self._value < other
+
+ def __le__(self, other):
+ other = self.prettyIn(other)
+ return len(self._value) <= len(other) or len(self._value) == len(other) and self._value <= other
+
+ def __gt__(self, other):
+ other = self.prettyIn(other)
+ return len(self._value) > len(other) or len(self._value) == len(other) and self._value > other
+
+ def __ge__(self, other):
+ other = self.prettyIn(other)
+ return len(self._value) >= len(other) or len(self._value) == len(other) and self._value >= other
+
+ # Immutable sequence object protocol
+
+ def __len__(self):
+ return len(self._value)
+
+ def __getitem__(self, i):
+ if i.__class__ is slice:
+ return self.clone([self[x] for x in range(*i.indices(len(self)))])
+ else:
+ length = len(self._value) - 1
+ if i > length or i < 0:
+ raise IndexError('bit index out of range')
+ return (self._value >> (length - i)) & 1
+
+ def __iter__(self):
+ length = len(self._value)
+ while length:
+ length -= 1
+ yield (self._value >> length) & 1
+
+ def __reversed__(self):
+ return reversed(tuple(self))
+
+ # arithmetic operators
+
+ def __add__(self, value):
+ value = self.prettyIn(value)
+ return self.clone(SizedInteger(self._value << len(value) | value).setBitLength(len(self._value) + len(value)))
+
+ def __radd__(self, value):
+ value = self.prettyIn(value)
+ return self.clone(SizedInteger(value << len(self._value) | self._value).setBitLength(len(self._value) + len(value)))
+
+ def __mul__(self, value):
+ bitString = self._value
+ while value > 1:
+ bitString <<= len(self._value)
+ bitString |= self._value
+ value -= 1
+ return self.clone(bitString)
+
+ def __rmul__(self, value):
+ return self * value
+
+ def __lshift__(self, count):
+ return self.clone(SizedInteger(self._value << count).setBitLength(len(self._value) + count))
+
+ def __rshift__(self, count):
+ return self.clone(SizedInteger(self._value >> count).setBitLength(max(0, len(self._value) - count)))
+
+ def __int__(self):
+ return self._value
+
+ def __float__(self):
+ return float(self._value)
+
+ if sys.version_info[0] < 3:
+ def __long__(self):
+ return self._value
+
+ def asNumbers(self):
+ """Get |ASN.1| value as a sequence of 8-bit integers.
+
+ If |ASN.1| object length is not a multiple of 8, result
+ will be left-padded with zeros.
+ """
+ return tuple(octets.octs2ints(self.asOctets()))
+
+ def asOctets(self):
+ """Get |ASN.1| value as a sequence of octets.
+
+ If |ASN.1| object length is not a multiple of 8, result
+ will be left-padded with zeros.
+ """
+ return integer.to_bytes(self._value, length=len(self))
+
+ def asInteger(self):
+ """Get |ASN.1| value as a single integer value.
+ """
+ return self._value
+
+ def asBinary(self):
+ """Get |ASN.1| value as a text string of bits.
+ """
+ binString = binary.bin(self._value)[2:]
+ return '0' * (len(self._value) - len(binString)) + binString
+
+ @classmethod
+ def fromHexString(cls, value, internalFormat=False, prepend=None):
+ """Create a |ASN.1| object initialized from the hex string.
+
+ Parameters
+ ----------
+ value: :class:`str`
+ Text string like 'DEADBEEF'
+ """
+ try:
+ value = SizedInteger(value, 16).setBitLength(len(value) * 4)
+
+ except ValueError:
+ raise error.PyAsn1Error('%s.fromHexString() error: %s' % (cls.__name__, sys.exc_info()[1]))
+
+ if prepend is not None:
+ value = SizedInteger(
+ (SizedInteger(prepend) << len(value)) | value
+ ).setBitLength(len(prepend) + len(value))
+
+ if not internalFormat:
+ value = cls(value)
+
+ return value
+
+ @classmethod
+ def fromBinaryString(cls, value, internalFormat=False, prepend=None):
+ """Create a |ASN.1| object initialized from a string of '0' and '1'.
+
+ Parameters
+ ----------
+ value: :class:`str`
+ Text string like '1010111'
+ """
+ try:
+ value = SizedInteger(value or '0', 2).setBitLength(len(value))
+
+ except ValueError:
+ raise error.PyAsn1Error('%s.fromBinaryString() error: %s' % (cls.__name__, sys.exc_info()[1]))
+
+ if prepend is not None:
+ value = SizedInteger(
+ (SizedInteger(prepend) << len(value)) | value
+ ).setBitLength(len(prepend) + len(value))
+
+ if not internalFormat:
+ value = cls(value)
+
+ return value
+
+ @classmethod
+ def fromOctetString(cls, value, internalFormat=False, prepend=None, padding=0):
+ """Create a |ASN.1| object initialized from a string.
+
+ Parameters
+ ----------
+ value: :class:`str` (Py2) or :class:`bytes` (Py3)
+ Text string like '\\\\x01\\\\xff' (Py2) or b'\\\\x01\\\\xff' (Py3)
+ """
+ value = SizedInteger(integer.from_bytes(value) >> padding).setBitLength(len(value) * 8 - padding)
+
+ if prepend is not None:
+ value = SizedInteger(
+ (SizedInteger(prepend) << len(value)) | value
+ ).setBitLength(len(prepend) + len(value))
+
+ if not internalFormat:
+ value = cls(value)
+
+ return value
+
+ def prettyIn(self, value):
+ if isinstance(value, SizedInteger):
+ return value
+ elif octets.isStringType(value):
+ if not value:
+ return SizedInteger(0).setBitLength(0)
+
+ elif value[0] == '\'': # "'1011'B" -- ASN.1 schema representation (deprecated)
+ if value[-2:] == '\'B':
+ return self.fromBinaryString(value[1:-2], internalFormat=True)
+ elif value[-2:] == '\'H':
+ return self.fromHexString(value[1:-2], internalFormat=True)
+ else:
+ raise error.PyAsn1Error(
+ 'Bad BIT STRING value notation %s' % (value,)
+ )
+
+ elif self.namedValues and not value.isdigit(): # named bits like 'Urgent, Active'
+ names = [x.strip() for x in value.split(',')]
+
+ try:
+
+ bitPositions = [self.namedValues[name] for name in names]
+
+ except KeyError:
+ raise error.PyAsn1Error('unknown bit name(s) in %r' % (names,))
+
+ rightmostPosition = max(bitPositions)
+
+ number = 0
+ for bitPosition in bitPositions:
+ number |= 1 << (rightmostPosition - bitPosition)
+
+ return SizedInteger(number).setBitLength(rightmostPosition + 1)
+
+ elif value.startswith('0x'):
+ return self.fromHexString(value[2:], internalFormat=True)
+
+ elif value.startswith('0b'):
+ return self.fromBinaryString(value[2:], internalFormat=True)
+
+ else: # assume plain binary string like '1011'
+ return self.fromBinaryString(value, internalFormat=True)
+
+ elif isinstance(value, (tuple, list)):
+ return self.fromBinaryString(''.join([b and '1' or '0' for b in value]), internalFormat=True)
+
+ elif isinstance(value, BitString):
+ return SizedInteger(value).setBitLength(len(value))
+
+ elif isinstance(value, intTypes):
+ return SizedInteger(value)
+
+ else:
+ raise error.PyAsn1Error(
+ 'Bad BitString initializer type \'%s\'' % (value,)
+ )
+
+
+try:
+ # noinspection PyStatementEffect
+ all
+
+except NameError: # Python 2.4
+ # noinspection PyShadowingBuiltins
+ def all(iterable):
+ for element in iterable:
+ if not element:
+ return False
+ return True
+
+
+class OctetString(base.SimpleAsn1Type):
+ """Create |ASN.1| schema or value object.
+
+ |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its
+ objects are immutable and duck-type Python 2 :class:`str` or
+ Python 3 :class:`bytes`. When used in Unicode context, |ASN.1| type
+ assumes "|encoding|" serialisation.
+
+ Keyword Args
+ ------------
+ value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object
+ class:`str` (Python 2) or :class:`bytes` (Python 3), alternatively
+ class:`unicode` object (Python 2) or :class:`str` (Python 3)
+ representing character string to be serialised into octets
+ (note `encoding` parameter) or |ASN.1| object.
+ If `value` is not given, schema object will be created.
+
+ tagSet: :py:class:`~pyasn1.type.tag.TagSet`
+ Object representing non-default ASN.1 tag(s)
+
+ subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
+ Object representing non-default ASN.1 subtype constraint(s). Constraints
+ verification for |ASN.1| type occurs automatically on object
+ instantiation.
+
+ encoding: :py:class:`str`
+ Unicode codec ID to encode/decode :class:`unicode` (Python 2) or
+ :class:`str` (Python 3) the payload when |ASN.1| object is used
+ in text string context.
+
+ binValue: :py:class:`str`
+ Binary string initializer to use instead of the *value*.
+ Example: '10110011'.
+
+ hexValue: :py:class:`str`
+ Hexadecimal string initializer to use instead of the *value*.
+ Example: 'DEADBEEF'.
+
+ Raises
+ ------
+ ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
+ On constraint violation or bad initializer.
+
+ Examples
+ --------
+ .. code-block:: python
+
+ class Icon(OctetString):
+ '''
+ ASN.1 specification:
+
+ Icon ::= OCTET STRING
+
+ icon1 Icon ::= '001100010011001000110011'B
+ icon2 Icon ::= '313233'H
+ '''
+ icon1 = Icon.fromBinaryString('001100010011001000110011')
+ icon2 = Icon.fromHexString('313233')
+ """
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x04)
+ )
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object
+ #: imposing constraints on |ASN.1| type initialization values.
+ subtypeSpec = constraint.ConstraintsIntersection()
+
+ # Optimization for faster codec lookup
+ typeId = base.SimpleAsn1Type.getTypeId()
+
+ defaultBinValue = defaultHexValue = noValue
+ encoding = 'iso-8859-1'
+
+ def __init__(self, value=noValue, **kwargs):
+ if kwargs:
+ if value is noValue:
+ try:
+ value = self.fromBinaryString(kwargs.pop('binValue'))
+
+ except KeyError:
+ pass
+
+ try:
+ value = self.fromHexString(kwargs.pop('hexValue'))
+
+ except KeyError:
+ pass
+
+ if value is noValue:
+ if self.defaultBinValue is not noValue:
+ value = self.fromBinaryString(self.defaultBinValue)
+
+ elif self.defaultHexValue is not noValue:
+ value = self.fromHexString(self.defaultHexValue)
+
+ if 'encoding' not in kwargs:
+ kwargs['encoding'] = self.encoding
+
+ base.SimpleAsn1Type.__init__(self, value, **kwargs)
+
+ if sys.version_info[0] <= 2:
+ def prettyIn(self, value):
+ if isinstance(value, str):
+ return value
+
+ elif isinstance(value, unicode):
+ try:
+ return value.encode(self.encoding)
+
+ except (LookupError, UnicodeEncodeError):
+ exc = sys.exc_info()[1]
+ raise error.PyAsn1UnicodeEncodeError(
+ "Can't encode string '%s' with codec "
+ "%s" % (value, self.encoding), exc
+ )
+
+ elif isinstance(value, (tuple, list)):
+ try:
+ return ''.join([chr(x) for x in value])
+
+ except ValueError:
+ raise error.PyAsn1Error(
+ "Bad %s initializer '%s'" % (self.__class__.__name__, value)
+ )
+
+ else:
+ return str(value)
+
+ def __str__(self):
+ return str(self._value)
+
+ def __unicode__(self):
+ try:
+ return self._value.decode(self.encoding)
+
+ except UnicodeDecodeError:
+ exc = sys.exc_info()[1]
+ raise error.PyAsn1UnicodeDecodeError(
+ "Can't decode string '%s' with codec "
+ "%s" % (self._value, self.encoding), exc
+ )
+
+ def asOctets(self):
+ return str(self._value)
+
+ def asNumbers(self):
+ return tuple([ord(x) for x in self._value])
+
+ else:
+ def prettyIn(self, value):
+ if isinstance(value, bytes):
+ return value
+
+ elif isinstance(value, str):
+ try:
+ return value.encode(self.encoding)
+
+ except UnicodeEncodeError:
+ exc = sys.exc_info()[1]
+ raise error.PyAsn1UnicodeEncodeError(
+ "Can't encode string '%s' with '%s' "
+ "codec" % (value, self.encoding), exc
+ )
+ elif isinstance(value, OctetString): # a shortcut, bytes() would work the same way
+ return value.asOctets()
+
+ elif isinstance(value, base.SimpleAsn1Type): # this mostly targets Integer objects
+ return self.prettyIn(str(value))
+
+ elif isinstance(value, (tuple, list)):
+ return self.prettyIn(bytes(value))
+
+ else:
+ return bytes(value)
+
+ def __str__(self):
+ try:
+ return self._value.decode(self.encoding)
+
+ except UnicodeDecodeError:
+ exc = sys.exc_info()[1]
+ raise error.PyAsn1UnicodeDecodeError(
+ "Can't decode string '%s' with '%s' codec at "
+ "'%s'" % (self._value, self.encoding,
+ self.__class__.__name__), exc
+ )
+
+ def __bytes__(self):
+ return bytes(self._value)
+
+ def asOctets(self):
+ return bytes(self._value)
+
+ def asNumbers(self):
+ return tuple(self._value)
+
+ #
+ # Normally, `.prettyPrint()` is called from `__str__()`. Historically,
+ # OctetString.prettyPrint() used to return hexified payload
+ # representation in cases when non-printable content is present. At the
+ # same time `str()` used to produce either octet-stream (Py2) or
+ # text (Py3) representations.
+ #
+ # Therefore `OctetString.__str__()` -> `.prettyPrint()` call chain is
+ # reversed to preserve the original behaviour.
+ #
+ # Eventually we should deprecate `.prettyPrint()` / `.prettyOut()` harness
+ # and end up with just `__str__()` producing hexified representation while
+ # both text and octet-stream representation should only be requested via
+ # the `.asOctets()` method.
+ #
+ # Note: ASN.1 OCTET STRING is never mean to contain text!
+ #
+
+ def prettyOut(self, value):
+ return value
+
+ def prettyPrint(self, scope=0):
+ # first see if subclass has its own .prettyOut()
+ value = self.prettyOut(self._value)
+
+ if value is not self._value:
+ return value
+
+ numbers = self.asNumbers()
+
+ for x in numbers:
+ # hexify if needed
+ if x < 32 or x > 126:
+ return '0x' + ''.join(('%.2x' % x for x in numbers))
+ else:
+ # this prevents infinite recursion
+ return OctetString.__str__(self)
+
+ @staticmethod
+ def fromBinaryString(value):
+ """Create a |ASN.1| object initialized from a string of '0' and '1'.
+
+ Parameters
+ ----------
+ value: :class:`str`
+ Text string like '1010111'
+ """
+ bitNo = 8
+ byte = 0
+ r = []
+ for v in value:
+ if bitNo:
+ bitNo -= 1
+ else:
+ bitNo = 7
+ r.append(byte)
+ byte = 0
+ if v in ('0', '1'):
+ v = int(v)
+ else:
+ raise error.PyAsn1Error(
+ 'Non-binary OCTET STRING initializer %s' % (v,)
+ )
+ byte |= v << bitNo
+
+ r.append(byte)
+
+ return octets.ints2octs(r)
+
+ @staticmethod
+ def fromHexString(value):
+ """Create a |ASN.1| object initialized from the hex string.
+
+ Parameters
+ ----------
+ value: :class:`str`
+ Text string like 'DEADBEEF'
+ """
+ r = []
+ p = []
+ for v in value:
+ if p:
+ r.append(int(p + v, 16))
+ p = None
+ else:
+ p = v
+ if p:
+ r.append(int(p + '0', 16))
+
+ return octets.ints2octs(r)
+
+ # Immutable sequence object protocol
+
+ def __len__(self):
+ return len(self._value)
+
+ def __getitem__(self, i):
+ if i.__class__ is slice:
+ return self.clone(self._value[i])
+ else:
+ return self._value[i]
+
+ def __iter__(self):
+ return iter(self._value)
+
+ def __contains__(self, value):
+ return value in self._value
+
+ def __add__(self, value):
+ return self.clone(self._value + self.prettyIn(value))
+
+ def __radd__(self, value):
+ return self.clone(self.prettyIn(value) + self._value)
+
+ def __mul__(self, value):
+ return self.clone(self._value * value)
+
+ def __rmul__(self, value):
+ return self * value
+
+ def __int__(self):
+ return int(self._value)
+
+ def __float__(self):
+ return float(self._value)
+
+ def __reversed__(self):
+ return reversed(self._value)
+
+
+class Null(OctetString):
+ """Create |ASN.1| schema or value object.
+
+ |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its
+ objects are immutable and duck-type Python :class:`str` objects
+ (always empty).
+
+ Keyword Args
+ ------------
+ value: :class:`str` or |ASN.1| object
+ Python empty :class:`str` literal or any object that evaluates to :obj:`False`
+ If `value` is not given, schema object will be created.
+
+ tagSet: :py:class:`~pyasn1.type.tag.TagSet`
+ Object representing non-default ASN.1 tag(s)
+
+ Raises
+ ------
+ ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
+ On constraint violation or bad initializer.
+
+ Examples
+ --------
+ .. code-block:: python
+
+ class Ack(Null):
+ '''
+ ASN.1 specification:
+
+ Ack ::= NULL
+ '''
+ ack = Ack('')
+ """
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x05)
+ )
+ subtypeSpec = OctetString.subtypeSpec + constraint.SingleValueConstraint(octets.str2octs(''))
+
+ # Optimization for faster codec lookup
+ typeId = OctetString.getTypeId()
+
+ def prettyIn(self, value):
+ if value:
+ return value
+
+ return octets.str2octs('')
+
+if sys.version_info[0] <= 2:
+ intTypes = (int, long)
+else:
+ intTypes = (int,)
+
+numericTypes = intTypes + (float,)
+
+
+class ObjectIdentifier(base.SimpleAsn1Type):
+ """Create |ASN.1| schema or value object.
+
+ |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its
+ objects are immutable and duck-type Python :class:`tuple` objects
+ (tuple of non-negative integers).
+
+ Keyword Args
+ ------------
+ value: :class:`tuple`, :class:`str` or |ASN.1| object
+ Python sequence of :class:`int` or :class:`str` literal or |ASN.1| object.
+ If `value` is not given, schema object will be created.
+
+ tagSet: :py:class:`~pyasn1.type.tag.TagSet`
+ Object representing non-default ASN.1 tag(s)
+
+ subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
+ Object representing non-default ASN.1 subtype constraint(s). Constraints
+ verification for |ASN.1| type occurs automatically on object
+ instantiation.
+
+ Raises
+ ------
+ ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
+ On constraint violation or bad initializer.
+
+ Examples
+ --------
+ .. code-block:: python
+
+ class ID(ObjectIdentifier):
+ '''
+ ASN.1 specification:
+
+ ID ::= OBJECT IDENTIFIER
+
+ id-edims ID ::= { joint-iso-itu-t mhs-motif(6) edims(7) }
+ id-bp ID ::= { id-edims 11 }
+ '''
+ id_edims = ID('2.6.7')
+ id_bp = id_edims + (11,)
+ """
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x06)
+ )
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object
+ #: imposing constraints on |ASN.1| type initialization values.
+ subtypeSpec = constraint.ConstraintsIntersection()
+
+ # Optimization for faster codec lookup
+ typeId = base.SimpleAsn1Type.getTypeId()
+
+ def __add__(self, other):
+ return self.clone(self._value + other)
+
+ def __radd__(self, other):
+ return self.clone(other + self._value)
+
+ def asTuple(self):
+ return self._value
+
+ # Sequence object protocol
+
+ def __len__(self):
+ return len(self._value)
+
+ def __getitem__(self, i):
+ if i.__class__ is slice:
+ return self.clone(self._value[i])
+ else:
+ return self._value[i]
+
+ def __iter__(self):
+ return iter(self._value)
+
+ def __contains__(self, value):
+ return value in self._value
+
+ def index(self, suboid):
+ return self._value.index(suboid)
+
+ def isPrefixOf(self, other):
+ """Indicate if this |ASN.1| object is a prefix of other |ASN.1| object.
+
+ Parameters
+ ----------
+ other: |ASN.1| object
+ |ASN.1| object
+
+ Returns
+ -------
+ : :class:`bool`
+ :obj:`True` if this |ASN.1| object is a parent (e.g. prefix) of the other |ASN.1| object
+ or :obj:`False` otherwise.
+ """
+ l = len(self)
+ if l <= len(other):
+ if self._value[:l] == other[:l]:
+ return True
+ return False
+
+ def prettyIn(self, value):
+ if isinstance(value, ObjectIdentifier):
+ return tuple(value)
+ elif octets.isStringType(value):
+ if '-' in value:
+ raise error.PyAsn1Error(
+ 'Malformed Object ID %s at %s: %s' % (value, self.__class__.__name__, sys.exc_info()[1])
+ )
+ try:
+ return tuple([int(subOid) for subOid in value.split('.') if subOid])
+ except ValueError:
+ raise error.PyAsn1Error(
+ 'Malformed Object ID %s at %s: %s' % (value, self.__class__.__name__, sys.exc_info()[1])
+ )
+
+ try:
+ tupleOfInts = tuple([int(subOid) for subOid in value if subOid >= 0])
+
+ except (ValueError, TypeError):
+ raise error.PyAsn1Error(
+ 'Malformed Object ID %s at %s: %s' % (value, self.__class__.__name__, sys.exc_info()[1])
+ )
+
+ if len(tupleOfInts) == len(value):
+ return tupleOfInts
+
+ raise error.PyAsn1Error('Malformed Object ID %s at %s' % (value, self.__class__.__name__))
+
+ def prettyOut(self, value):
+ return '.'.join([str(x) for x in value])
+
+
+class Real(base.SimpleAsn1Type):
+ """Create |ASN.1| schema or value object.
+
+ |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its
+ objects are immutable and duck-type Python :class:`float` objects.
+ Additionally, |ASN.1| objects behave like a :class:`tuple` in which case its
+ elements are mantissa, base and exponent.
+
+ Keyword Args
+ ------------
+ value: :class:`tuple`, :class:`float` or |ASN.1| object
+ Python sequence of :class:`int` (representing mantissa, base and
+ exponent) or :class:`float` instance or |ASN.1| object.
+ If `value` is not given, schema object will be created.
+
+ tagSet: :py:class:`~pyasn1.type.tag.TagSet`
+ Object representing non-default ASN.1 tag(s)
+
+ subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
+ Object representing non-default ASN.1 subtype constraint(s). Constraints
+ verification for |ASN.1| type occurs automatically on object
+ instantiation.
+
+ Raises
+ ------
+ ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
+ On constraint violation or bad initializer.
+
+ Examples
+ --------
+ .. code-block:: python
+
+ class Pi(Real):
+ '''
+ ASN.1 specification:
+
+ Pi ::= REAL
+
+ pi Pi ::= { mantissa 314159, base 10, exponent -5 }
+
+ '''
+ pi = Pi((314159, 10, -5))
+ """
+ binEncBase = None # binEncBase = 16 is recommended for large numbers
+
+ try:
+ _plusInf = float('inf')
+ _minusInf = float('-inf')
+ _inf = _plusInf, _minusInf
+
+ except ValueError:
+ # Infinity support is platform and Python dependent
+ _plusInf = _minusInf = None
+ _inf = ()
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x09)
+ )
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object
+ #: imposing constraints on |ASN.1| type initialization values.
+ subtypeSpec = constraint.ConstraintsIntersection()
+
+ # Optimization for faster codec lookup
+ typeId = base.SimpleAsn1Type.getTypeId()
+
+ @staticmethod
+ def __normalizeBase10(value):
+ m, b, e = value
+ while m and m % 10 == 0:
+ m /= 10
+ e += 1
+ return m, b, e
+
+ def prettyIn(self, value):
+ if isinstance(value, tuple) and len(value) == 3:
+ if (not isinstance(value[0], numericTypes) or
+ not isinstance(value[1], intTypes) or
+ not isinstance(value[2], intTypes)):
+ raise error.PyAsn1Error('Lame Real value syntax: %s' % (value,))
+ if (isinstance(value[0], float) and
+ self._inf and value[0] in self._inf):
+ return value[0]
+ if value[1] not in (2, 10):
+ raise error.PyAsn1Error(
+ 'Prohibited base for Real value: %s' % (value[1],)
+ )
+ if value[1] == 10:
+ value = self.__normalizeBase10(value)
+ return value
+ elif isinstance(value, intTypes):
+ return self.__normalizeBase10((value, 10, 0))
+ elif isinstance(value, float) or octets.isStringType(value):
+ if octets.isStringType(value):
+ try:
+ value = float(value)
+ except ValueError:
+ raise error.PyAsn1Error(
+ 'Bad real value syntax: %s' % (value,)
+ )
+ if self._inf and value in self._inf:
+ return value
+ else:
+ e = 0
+ while int(value) != value:
+ value *= 10
+ e -= 1
+ return self.__normalizeBase10((int(value), 10, e))
+ elif isinstance(value, Real):
+ return tuple(value)
+ raise error.PyAsn1Error(
+ 'Bad real value syntax: %s' % (value,)
+ )
+
+ def prettyPrint(self, scope=0):
+ try:
+ return self.prettyOut(float(self))
+
+ except OverflowError:
+ return '<overflow>'
+
+ @property
+ def isPlusInf(self):
+ """Indicate PLUS-INFINITY object value
+
+ Returns
+ -------
+ : :class:`bool`
+ :obj:`True` if calling object represents plus infinity
+ or :obj:`False` otherwise.
+
+ """
+ return self._value == self._plusInf
+
+ @property
+ def isMinusInf(self):
+ """Indicate MINUS-INFINITY object value
+
+ Returns
+ -------
+ : :class:`bool`
+ :obj:`True` if calling object represents minus infinity
+ or :obj:`False` otherwise.
+ """
+ return self._value == self._minusInf
+
+ @property
+ def isInf(self):
+ return self._value in self._inf
+
+ def __add__(self, value):
+ return self.clone(float(self) + value)
+
+ def __radd__(self, value):
+ return self + value
+
+ def __mul__(self, value):
+ return self.clone(float(self) * value)
+
+ def __rmul__(self, value):
+ return self * value
+
+ def __sub__(self, value):
+ return self.clone(float(self) - value)
+
+ def __rsub__(self, value):
+ return self.clone(value - float(self))
+
+ def __mod__(self, value):
+ return self.clone(float(self) % value)
+
+ def __rmod__(self, value):
+ return self.clone(value % float(self))
+
+ def __pow__(self, value, modulo=None):
+ return self.clone(pow(float(self), value, modulo))
+
+ def __rpow__(self, value):
+ return self.clone(pow(value, float(self)))
+
+ if sys.version_info[0] <= 2:
+ def __div__(self, value):
+ return self.clone(float(self) / value)
+
+ def __rdiv__(self, value):
+ return self.clone(value / float(self))
+ else:
+ def __truediv__(self, value):
+ return self.clone(float(self) / value)
+
+ def __rtruediv__(self, value):
+ return self.clone(value / float(self))
+
+ def __divmod__(self, value):
+ return self.clone(float(self) // value)
+
+ def __rdivmod__(self, value):
+ return self.clone(value // float(self))
+
+ def __int__(self):
+ return int(float(self))
+
+ if sys.version_info[0] <= 2:
+ def __long__(self):
+ return long(float(self))
+
+ def __float__(self):
+ if self._value in self._inf:
+ return self._value
+ else:
+ return float(
+ self._value[0] * pow(self._value[1], self._value[2])
+ )
+
+ def __abs__(self):
+ return self.clone(abs(float(self)))
+
+ def __pos__(self):
+ return self.clone(+float(self))
+
+ def __neg__(self):
+ return self.clone(-float(self))
+
+ def __round__(self, n=0):
+ r = round(float(self), n)
+ if n:
+ return self.clone(r)
+ else:
+ return r
+
+ def __floor__(self):
+ return self.clone(math.floor(float(self)))
+
+ def __ceil__(self):
+ return self.clone(math.ceil(float(self)))
+
+ if sys.version_info[0:2] > (2, 5):
+ def __trunc__(self):
+ return self.clone(math.trunc(float(self)))
+
+ def __lt__(self, value):
+ return float(self) < value
+
+ def __le__(self, value):
+ return float(self) <= value
+
+ def __eq__(self, value):
+ return float(self) == value
+
+ def __ne__(self, value):
+ return float(self) != value
+
+ def __gt__(self, value):
+ return float(self) > value
+
+ def __ge__(self, value):
+ return float(self) >= value
+
+ if sys.version_info[0] <= 2:
+ def __nonzero__(self):
+ return bool(float(self))
+ else:
+ def __bool__(self):
+ return bool(float(self))
+
+ __hash__ = base.SimpleAsn1Type.__hash__
+
+ def __getitem__(self, idx):
+ if self._value in self._inf:
+ raise error.PyAsn1Error('Invalid infinite value operation')
+ else:
+ return self._value[idx]
+
+ # compatibility stubs
+
+ def isPlusInfinity(self):
+ return self.isPlusInf
+
+ def isMinusInfinity(self):
+ return self.isMinusInf
+
+ def isInfinity(self):
+ return self.isInf
+
+
+class Enumerated(Integer):
+ """Create |ASN.1| schema or value object.
+
+ |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its
+ objects are immutable and duck-type Python :class:`int` objects.
+
+ Keyword Args
+ ------------
+ value: :class:`int`, :class:`str` or |ASN.1| object
+ Python :class:`int` or :class:`str` literal or |ASN.1| object.
+ If `value` is not given, schema object will be created.
+
+ tagSet: :py:class:`~pyasn1.type.tag.TagSet`
+ Object representing non-default ASN.1 tag(s)
+
+ subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
+ Object representing non-default ASN.1 subtype constraint(s). Constraints
+ verification for |ASN.1| type occurs automatically on object
+ instantiation.
+
+ namedValues: :py:class:`~pyasn1.type.namedval.NamedValues`
+ Object representing non-default symbolic aliases for numbers
+
+ Raises
+ ------
+ ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
+ On constraint violation or bad initializer.
+
+ Examples
+ --------
+
+ .. code-block:: python
+
+ class RadioButton(Enumerated):
+ '''
+ ASN.1 specification:
+
+ RadioButton ::= ENUMERATED { button1(0), button2(1),
+ button3(2) }
+
+ selected-by-default RadioButton ::= button1
+ '''
+ namedValues = NamedValues(
+ ('button1', 0), ('button2', 1),
+ ('button3', 2)
+ )
+
+ selected_by_default = RadioButton('button1')
+ """
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x0A)
+ )
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object
+ #: imposing constraints on |ASN.1| type initialization values.
+ subtypeSpec = constraint.ConstraintsIntersection()
+
+ # Optimization for faster codec lookup
+ typeId = Integer.getTypeId()
+
+ #: Default :py:class:`~pyasn1.type.namedval.NamedValues` object
+ #: representing symbolic aliases for numbers
+ namedValues = namedval.NamedValues()
+
+
+# "Structured" ASN.1 types
+
+class SequenceOfAndSetOfBase(base.ConstructedAsn1Type):
+ """Create |ASN.1| schema or value object.
+
+ |ASN.1| class is based on :class:`~pyasn1.type.base.ConstructedAsn1Type`,
+ its objects are mutable and duck-type Python :class:`list` objects.
+
+ Keyword Args
+ ------------
+ componentType : :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
+ A pyasn1 object representing ASN.1 type allowed within |ASN.1| type
+
+ tagSet: :py:class:`~pyasn1.type.tag.TagSet`
+ Object representing non-default ASN.1 tag(s)
+
+ subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
+ Object representing non-default ASN.1 subtype constraint(s). Constraints
+ verification for |ASN.1| type can only occur on explicit
+ `.isInconsistent` call.
+
+ Examples
+ --------
+
+ .. code-block:: python
+
+ class LotteryDraw(SequenceOf): # SetOf is similar
+ '''
+ ASN.1 specification:
+
+ LotteryDraw ::= SEQUENCE OF INTEGER
+ '''
+ componentType = Integer()
+
+ lotteryDraw = LotteryDraw()
+ lotteryDraw.extend([123, 456, 789])
+ """
+ def __init__(self, *args, **kwargs):
+ # support positional params for backward compatibility
+ if args:
+ for key, value in zip(('componentType', 'tagSet',
+ 'subtypeSpec'), args):
+ if key in kwargs:
+ raise error.PyAsn1Error('Conflicting positional and keyword params!')
+ kwargs['componentType'] = value
+
+ self._componentValues = noValue
+
+ base.ConstructedAsn1Type.__init__(self, **kwargs)
+
+ # Python list protocol
+
+ def __getitem__(self, idx):
+ try:
+ return self.getComponentByPosition(idx)
+
+ except error.PyAsn1Error:
+ raise IndexError(sys.exc_info()[1])
+
+ def __setitem__(self, idx, value):
+ try:
+ self.setComponentByPosition(idx, value)
+
+ except error.PyAsn1Error:
+ raise IndexError(sys.exc_info()[1])
+
+ def append(self, value):
+ if self._componentValues is noValue:
+ pos = 0
+
+ else:
+ pos = len(self._componentValues)
+
+ self[pos] = value
+
+ def count(self, value):
+ return list(self._componentValues.values()).count(value)
+
+ def extend(self, values):
+ for value in values:
+ self.append(value)
+
+ if self._componentValues is noValue:
+ self._componentValues = {}
+
+ def index(self, value, start=0, stop=None):
+ if stop is None:
+ stop = len(self)
+
+ indices, values = zip(*self._componentValues.items())
+
+ # TODO: remove when Py2.5 support is gone
+ values = list(values)
+
+ try:
+ return indices[values.index(value, start, stop)]
+
+ except error.PyAsn1Error:
+ raise ValueError(sys.exc_info()[1])
+
+ def reverse(self):
+ self._componentValues.reverse()
+
+ def sort(self, key=None, reverse=False):
+ self._componentValues = dict(
+ enumerate(sorted(self._componentValues.values(),
+ key=key, reverse=reverse)))
+
+ def __len__(self):
+ if self._componentValues is noValue or not self._componentValues:
+ return 0
+
+ return max(self._componentValues) + 1
+
+ def __iter__(self):
+ for idx in range(0, len(self)):
+ yield self.getComponentByPosition(idx)
+
+ def _cloneComponentValues(self, myClone, cloneValueFlag):
+ for idx, componentValue in self._componentValues.items():
+ if componentValue is not noValue:
+ if isinstance(componentValue, base.ConstructedAsn1Type):
+ myClone.setComponentByPosition(
+ idx, componentValue.clone(cloneValueFlag=cloneValueFlag)
+ )
+ else:
+ myClone.setComponentByPosition(idx, componentValue.clone())
+
+ def getComponentByPosition(self, idx, default=noValue, instantiate=True):
+ """Return |ASN.1| type component value by position.
+
+ Equivalent to Python sequence subscription operation (e.g. `[]`).
+
+ Parameters
+ ----------
+ idx : :class:`int`
+ Component index (zero-based). Must either refer to an existing
+ component or to N+1 component (if *componentType* is set). In the latter
+ case a new component type gets instantiated and appended to the |ASN.1|
+ sequence.
+
+ Keyword Args
+ ------------
+ default: :class:`object`
+ If set and requested component is a schema object, return the `default`
+ object instead of the requested component.
+
+ instantiate: :class:`bool`
+ If :obj:`True` (default), inner component will be automatically instantiated.
+ If :obj:`False` either existing component or the :class:`NoValue` object will be
+ returned.
+
+ Returns
+ -------
+ : :py:class:`~pyasn1.type.base.PyAsn1Item`
+ Instantiate |ASN.1| component type or return existing component value
+
+ Examples
+ --------
+
+ .. code-block:: python
+
+ # can also be SetOf
+ class MySequenceOf(SequenceOf):
+ componentType = OctetString()
+
+ s = MySequenceOf()
+
+ # returns component #0 with `.isValue` property False
+ s.getComponentByPosition(0)
+
+ # returns None
+ s.getComponentByPosition(0, default=None)
+
+ s.clear()
+
+ # returns noValue
+ s.getComponentByPosition(0, instantiate=False)
+
+ # sets component #0 to OctetString() ASN.1 schema
+ # object and returns it
+ s.getComponentByPosition(0, instantiate=True)
+
+ # sets component #0 to ASN.1 value object
+ s.setComponentByPosition(0, 'ABCD')
+
+ # returns OctetString('ABCD') value object
+ s.getComponentByPosition(0, instantiate=False)
+
+ s.clear()
+
+ # returns noValue
+ s.getComponentByPosition(0, instantiate=False)
+ """
+ if isinstance(idx, slice):
+ indices = tuple(range(len(self)))
+ return [self.getComponentByPosition(subidx, default, instantiate)
+ for subidx in indices[idx]]
+
+ if idx < 0:
+ idx = len(self) + idx
+ if idx < 0:
+ raise error.PyAsn1Error(
+ 'SequenceOf/SetOf index is out of range')
+
+ try:
+ componentValue = self._componentValues[idx]
+
+ except (KeyError, error.PyAsn1Error):
+ if not instantiate:
+ return default
+
+ self.setComponentByPosition(idx)
+
+ componentValue = self._componentValues[idx]
+
+ if default is noValue or componentValue.isValue:
+ return componentValue
+ else:
+ return default
+
+ def setComponentByPosition(self, idx, value=noValue,
+ verifyConstraints=True,
+ matchTags=True,
+ matchConstraints=True):
+ """Assign |ASN.1| type component by position.
+
+ Equivalent to Python sequence item assignment operation (e.g. `[]`)
+ or list.append() (when idx == len(self)).
+
+ Parameters
+ ----------
+ idx: :class:`int`
+ Component index (zero-based). Must either refer to existing
+ component or to N+1 component. In the latter case a new component
+ type gets instantiated (if *componentType* is set, or given ASN.1
+ object is taken otherwise) and appended to the |ASN.1| sequence.
+
+ Keyword Args
+ ------------
+ value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
+ A Python value to initialize |ASN.1| component with (if *componentType* is set)
+ or ASN.1 value object to assign to |ASN.1| component.
+ If `value` is not given, schema object will be set as a component.
+
+ verifyConstraints: :class:`bool`
+ If :obj:`False`, skip constraints validation
+
+ matchTags: :class:`bool`
+ If :obj:`False`, skip component tags matching
+
+ matchConstraints: :class:`bool`
+ If :obj:`False`, skip component constraints matching
+
+ Returns
+ -------
+ self
+
+ Raises
+ ------
+ ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
+ On constraint violation or bad initializer
+ IndexError
+ When idx > len(self)
+ """
+ if isinstance(idx, slice):
+ indices = tuple(range(len(self)))
+ startIdx = indices and indices[idx][0] or 0
+ for subIdx, subValue in enumerate(value):
+ self.setComponentByPosition(
+ startIdx + subIdx, subValue, verifyConstraints,
+ matchTags, matchConstraints)
+ return self
+
+ if idx < 0:
+ idx = len(self) + idx
+ if idx < 0:
+ raise error.PyAsn1Error(
+ 'SequenceOf/SetOf index is out of range')
+
+ componentType = self.componentType
+
+ if self._componentValues is noValue:
+ componentValues = {}
+
+ else:
+ componentValues = self._componentValues
+
+ currentValue = componentValues.get(idx, noValue)
+
+ if value is noValue:
+ if componentType is not None:
+ value = componentType.clone()
+
+ elif currentValue is noValue:
+ raise error.PyAsn1Error('Component type not defined')
+
+ elif not isinstance(value, base.Asn1Item):
+ if (componentType is not None and
+ isinstance(componentType, base.SimpleAsn1Type)):
+ value = componentType.clone(value=value)
+
+ elif (currentValue is not noValue and
+ isinstance(currentValue, base.SimpleAsn1Type)):
+ value = currentValue.clone(value=value)
+
+ else:
+ raise error.PyAsn1Error(
+ 'Non-ASN.1 value %r and undefined component'
+ ' type at %r' % (value, self))
+
+ elif componentType is not None and (matchTags or matchConstraints):
+ subtypeChecker = (
+ self.strictConstraints and
+ componentType.isSameTypeWith or
+ componentType.isSuperTypeOf)
+
+ if not subtypeChecker(value, verifyConstraints and matchTags,
+ verifyConstraints and matchConstraints):
+ # TODO: we should wrap componentType with UnnamedType to carry
+ # additional properties associated with componentType
+ if componentType.typeId != Any.typeId:
+ raise error.PyAsn1Error(
+ 'Component value is tag-incompatible: %r vs '
+ '%r' % (value, componentType))
+
+ componentValues[idx] = value
+
+ self._componentValues = componentValues
+
+ return self
+
+ @property
+ def componentTagMap(self):
+ if self.componentType is not None:
+ return self.componentType.tagMap
+
+ @property
+ def components(self):
+ return [self._componentValues[idx]
+ for idx in sorted(self._componentValues)]
+
+ def clear(self):
+ """Remove all components and become an empty |ASN.1| value object.
+
+ Has the same effect on |ASN.1| object as it does on :class:`list`
+ built-in.
+ """
+ self._componentValues = {}
+ return self
+
+ def reset(self):
+ """Remove all components and become a |ASN.1| schema object.
+
+ See :meth:`isValue` property for more information on the
+ distinction between value and schema objects.
+ """
+ self._componentValues = noValue
+ return self
+
+ def prettyPrint(self, scope=0):
+ scope += 1
+ representation = self.__class__.__name__ + ':\n'
+
+ if not self.isValue:
+ return representation
+
+ for idx, componentValue in enumerate(self):
+ representation += ' ' * scope
+ if (componentValue is noValue and
+ self.componentType is not None):
+ representation += '<empty>'
+ else:
+ representation += componentValue.prettyPrint(scope)
+
+ return representation
+
+ def prettyPrintType(self, scope=0):
+ scope += 1
+ representation = '%s -> %s {\n' % (self.tagSet, self.__class__.__name__)
+ if self.componentType is not None:
+ representation += ' ' * scope
+ representation += self.componentType.prettyPrintType(scope)
+ return representation + '\n' + ' ' * (scope - 1) + '}'
+
+
+ @property
+ def isValue(self):
+ """Indicate that |ASN.1| object represents ASN.1 value.
+
+ If *isValue* is :obj:`False` then this object represents just ASN.1 schema.
+
+ If *isValue* is :obj:`True` then, in addition to its ASN.1 schema features,
+ this object can also be used like a Python built-in object
+ (e.g. :class:`int`, :class:`str`, :class:`dict` etc.).
+
+ Returns
+ -------
+ : :class:`bool`
+ :obj:`False` if object represents just ASN.1 schema.
+ :obj:`True` if object represents ASN.1 schema and can be used as a normal value.
+
+ Note
+ ----
+ There is an important distinction between PyASN1 schema and value objects.
+ The PyASN1 schema objects can only participate in ASN.1 schema-related
+ operations (e.g. defining or testing the structure of the data). Most
+ obvious uses of ASN.1 schema is to guide serialisation codecs whilst
+ encoding/decoding serialised ASN.1 contents.
+
+ The PyASN1 value objects can **additionally** participate in many operations
+ involving regular Python objects (e.g. arithmetic, comprehension etc).
+ """
+ if self._componentValues is noValue:
+ return False
+
+ if len(self._componentValues) != len(self):
+ return False
+
+ for componentValue in self._componentValues.values():
+ if componentValue is noValue or not componentValue.isValue:
+ return False
+
+ return True
+
+ @property
+ def isInconsistent(self):
+ """Run necessary checks to ensure |ASN.1| object consistency.
+
+ Default action is to verify |ASN.1| object against constraints imposed
+ by `subtypeSpec`.
+
+ Raises
+ ------
+ :py:class:`~pyasn1.error.PyAsn1tError` on any inconsistencies found
+ """
+ if self.componentType is noValue or not self.subtypeSpec:
+ return False
+
+ if self._componentValues is noValue:
+ return True
+
+ mapping = {}
+
+ for idx, value in self._componentValues.items():
+ # Absent fields are not in the mapping
+ if value is noValue:
+ continue
+
+ mapping[idx] = value
+
+ try:
+ # Represent SequenceOf/SetOf as a bare dict to constraints chain
+ self.subtypeSpec(mapping)
+
+ except error.PyAsn1Error:
+ exc = sys.exc_info()[1]
+ return exc
+
+ return False
+
+class SequenceOf(SequenceOfAndSetOfBase):
+ __doc__ = SequenceOfAndSetOfBase.__doc__
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10)
+ )
+
+ #: Default :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
+ #: object representing ASN.1 type allowed within |ASN.1| type
+ componentType = None
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object
+ #: imposing constraints on |ASN.1| type initialization values.
+ subtypeSpec = constraint.ConstraintsIntersection()
+
+ # Disambiguation ASN.1 types identification
+ typeId = SequenceOfAndSetOfBase.getTypeId()
+
+
+class SetOf(SequenceOfAndSetOfBase):
+ __doc__ = SequenceOfAndSetOfBase.__doc__
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11)
+ )
+
+ #: Default :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
+ #: object representing ASN.1 type allowed within |ASN.1| type
+ componentType = None
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object
+ #: imposing constraints on |ASN.1| type initialization values.
+ subtypeSpec = constraint.ConstraintsIntersection()
+
+ # Disambiguation ASN.1 types identification
+ typeId = SequenceOfAndSetOfBase.getTypeId()
+
+
+class SequenceAndSetBase(base.ConstructedAsn1Type):
+ """Create |ASN.1| schema or value object.
+
+ |ASN.1| class is based on :class:`~pyasn1.type.base.ConstructedAsn1Type`,
+ its objects are mutable and duck-type Python :class:`dict` objects.
+
+ Keyword Args
+ ------------
+ componentType: :py:class:`~pyasn1.type.namedtype.NamedType`
+ Object holding named ASN.1 types allowed within this collection
+
+ tagSet: :py:class:`~pyasn1.type.tag.TagSet`
+ Object representing non-default ASN.1 tag(s)
+
+ subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
+ Object representing non-default ASN.1 subtype constraint(s). Constraints
+ verification for |ASN.1| type can only occur on explicit
+ `.isInconsistent` call.
+
+ Examples
+ --------
+
+ .. code-block:: python
+
+ class Description(Sequence): # Set is similar
+ '''
+ ASN.1 specification:
+
+ Description ::= SEQUENCE {
+ surname IA5String,
+ first-name IA5String OPTIONAL,
+ age INTEGER DEFAULT 40
+ }
+ '''
+ componentType = NamedTypes(
+ NamedType('surname', IA5String()),
+ OptionalNamedType('first-name', IA5String()),
+ DefaultedNamedType('age', Integer(40))
+ )
+
+ descr = Description()
+ descr['surname'] = 'Smith'
+ descr['first-name'] = 'John'
+ """
+ #: Default :py:class:`~pyasn1.type.namedtype.NamedTypes`
+ #: object representing named ASN.1 types allowed within |ASN.1| type
+ componentType = namedtype.NamedTypes()
+
+
+ class DynamicNames(object):
+ """Fields names/positions mapping for component-less objects"""
+ def __init__(self):
+ self._keyToIdxMap = {}
+ self._idxToKeyMap = {}
+
+ def __len__(self):
+ return len(self._keyToIdxMap)
+
+ def __contains__(self, item):
+ return item in self._keyToIdxMap or item in self._idxToKeyMap
+
+ def __iter__(self):
+ return (self._idxToKeyMap[idx] for idx in range(len(self._idxToKeyMap)))
+
+ def __getitem__(self, item):
+ try:
+ return self._keyToIdxMap[item]
+
+ except KeyError:
+ return self._idxToKeyMap[item]
+
+ def getNameByPosition(self, idx):
+ try:
+ return self._idxToKeyMap[idx]
+
+ except KeyError:
+ raise error.PyAsn1Error('Type position out of range')
+
+ def getPositionByName(self, name):
+ try:
+ return self._keyToIdxMap[name]
+
+ except KeyError:
+ raise error.PyAsn1Error('Name %s not found' % (name,))
+
+ def addField(self, idx):
+ self._keyToIdxMap['field-%d' % idx] = idx
+ self._idxToKeyMap[idx] = 'field-%d' % idx
+
+
+ def __init__(self, **kwargs):
+ base.ConstructedAsn1Type.__init__(self, **kwargs)
+ self._componentTypeLen = len(self.componentType)
+ if self._componentTypeLen:
+ self._componentValues = []
+ else:
+ self._componentValues = noValue
+ self._dynamicNames = self._componentTypeLen or self.DynamicNames()
+
+ def __getitem__(self, idx):
+ if octets.isStringType(idx):
+ try:
+ return self.getComponentByName(idx)
+
+ except error.PyAsn1Error:
+ # duck-typing dict
+ raise KeyError(sys.exc_info()[1])
+
+ else:
+ try:
+ return self.getComponentByPosition(idx)
+
+ except error.PyAsn1Error:
+ # duck-typing list
+ raise IndexError(sys.exc_info()[1])
+
+ def __setitem__(self, idx, value):
+ if octets.isStringType(idx):
+ try:
+ self.setComponentByName(idx, value)
+
+ except error.PyAsn1Error:
+ # duck-typing dict
+ raise KeyError(sys.exc_info()[1])
+
+ else:
+ try:
+ self.setComponentByPosition(idx, value)
+
+ except error.PyAsn1Error:
+ # duck-typing list
+ raise IndexError(sys.exc_info()[1])
+
+ def __contains__(self, key):
+ if self._componentTypeLen:
+ return key in self.componentType
+ else:
+ return key in self._dynamicNames
+
+ def __len__(self):
+ return len(self._componentValues)
+
+ def __iter__(self):
+ return iter(self.componentType or self._dynamicNames)
+
+ # Python dict protocol
+
+ def values(self):
+ for idx in range(self._componentTypeLen or len(self._dynamicNames)):
+ yield self[idx]
+
+ def keys(self):
+ return iter(self)
+
+ def items(self):
+ for idx in range(self._componentTypeLen or len(self._dynamicNames)):
+ if self._componentTypeLen:
+ yield self.componentType[idx].name, self[idx]
+ else:
+ yield self._dynamicNames[idx], self[idx]
+
+ def update(self, *iterValue, **mappingValue):
+ for k, v in iterValue:
+ self[k] = v
+ for k in mappingValue:
+ self[k] = mappingValue[k]
+
+ def clear(self):
+ """Remove all components and become an empty |ASN.1| value object.
+
+ Has the same effect on |ASN.1| object as it does on :class:`dict`
+ built-in.
+ """
+ self._componentValues = []
+ self._dynamicNames = self.DynamicNames()
+ return self
+
+ def reset(self):
+ """Remove all components and become a |ASN.1| schema object.
+
+ See :meth:`isValue` property for more information on the
+ distinction between value and schema objects.
+ """
+ self._componentValues = noValue
+ self._dynamicNames = self.DynamicNames()
+ return self
+
+ @property
+ def components(self):
+ return self._componentValues
+
+ def _cloneComponentValues(self, myClone, cloneValueFlag):
+ if self._componentValues is noValue:
+ return
+
+ for idx, componentValue in enumerate(self._componentValues):
+ if componentValue is not noValue:
+ if isinstance(componentValue, base.ConstructedAsn1Type):
+ myClone.setComponentByPosition(
+ idx, componentValue.clone(cloneValueFlag=cloneValueFlag)
+ )
+ else:
+ myClone.setComponentByPosition(idx, componentValue.clone())
+
+ def getComponentByName(self, name, default=noValue, instantiate=True):
+ """Returns |ASN.1| type component by name.
+
+ Equivalent to Python :class:`dict` subscription operation (e.g. `[]`).
+
+ Parameters
+ ----------
+ name: :class:`str`
+ |ASN.1| type component name
+
+ Keyword Args
+ ------------
+ default: :class:`object`
+ If set and requested component is a schema object, return the `default`
+ object instead of the requested component.
+
+ instantiate: :class:`bool`
+ If :obj:`True` (default), inner component will be automatically
+ instantiated.
+ If :obj:`False` either existing component or the :class:`NoValue`
+ object will be returned.
+
+ Returns
+ -------
+ : :py:class:`~pyasn1.type.base.PyAsn1Item`
+ Instantiate |ASN.1| component type or return existing
+ component value
+ """
+ if self._componentTypeLen:
+ idx = self.componentType.getPositionByName(name)
+ else:
+ try:
+ idx = self._dynamicNames.getPositionByName(name)
+
+ except KeyError:
+ raise error.PyAsn1Error('Name %s not found' % (name,))
+
+ return self.getComponentByPosition(idx, default=default, instantiate=instantiate)
+
+ def setComponentByName(self, name, value=noValue,
+ verifyConstraints=True,
+ matchTags=True,
+ matchConstraints=True):
+ """Assign |ASN.1| type component by name.
+
+ Equivalent to Python :class:`dict` item assignment operation (e.g. `[]`).
+
+ Parameters
+ ----------
+ name: :class:`str`
+ |ASN.1| type component name
+
+ Keyword Args
+ ------------
+ value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
+ A Python value to initialize |ASN.1| component with (if *componentType* is set)
+ or ASN.1 value object to assign to |ASN.1| component.
+ If `value` is not given, schema object will be set as a component.
+
+ verifyConstraints: :class:`bool`
+ If :obj:`False`, skip constraints validation
+
+ matchTags: :class:`bool`
+ If :obj:`False`, skip component tags matching
+
+ matchConstraints: :class:`bool`
+ If :obj:`False`, skip component constraints matching
+
+ Returns
+ -------
+ self
+ """
+ if self._componentTypeLen:
+ idx = self.componentType.getPositionByName(name)
+ else:
+ try:
+ idx = self._dynamicNames.getPositionByName(name)
+
+ except KeyError:
+ raise error.PyAsn1Error('Name %s not found' % (name,))
+
+ return self.setComponentByPosition(
+ idx, value, verifyConstraints, matchTags, matchConstraints
+ )
+
+ def getComponentByPosition(self, idx, default=noValue, instantiate=True):
+ """Returns |ASN.1| type component by index.
+
+ Equivalent to Python sequence subscription operation (e.g. `[]`).
+
+ Parameters
+ ----------
+ idx: :class:`int`
+ Component index (zero-based). Must either refer to an existing
+ component or (if *componentType* is set) new ASN.1 schema object gets
+ instantiated.
+
+ Keyword Args
+ ------------
+ default: :class:`object`
+ If set and requested component is a schema object, return the `default`
+ object instead of the requested component.
+
+ instantiate: :class:`bool`
+ If :obj:`True` (default), inner component will be automatically
+ instantiated.
+ If :obj:`False` either existing component or the :class:`NoValue`
+ object will be returned.
+
+ Returns
+ -------
+ : :py:class:`~pyasn1.type.base.PyAsn1Item`
+ a PyASN1 object
+
+ Examples
+ --------
+
+ .. code-block:: python
+
+ # can also be Set
+ class MySequence(Sequence):
+ componentType = NamedTypes(
+ NamedType('id', OctetString())
+ )
+
+ s = MySequence()
+
+ # returns component #0 with `.isValue` property False
+ s.getComponentByPosition(0)
+
+ # returns None
+ s.getComponentByPosition(0, default=None)
+
+ s.clear()
+
+ # returns noValue
+ s.getComponentByPosition(0, instantiate=False)
+
+ # sets component #0 to OctetString() ASN.1 schema
+ # object and returns it
+ s.getComponentByPosition(0, instantiate=True)
+
+ # sets component #0 to ASN.1 value object
+ s.setComponentByPosition(0, 'ABCD')
+
+ # returns OctetString('ABCD') value object
+ s.getComponentByPosition(0, instantiate=False)
+
+ s.clear()
+
+ # returns noValue
+ s.getComponentByPosition(0, instantiate=False)
+ """
+ try:
+ if self._componentValues is noValue:
+ componentValue = noValue
+
+ else:
+ componentValue = self._componentValues[idx]
+
+ except IndexError:
+ componentValue = noValue
+
+ if not instantiate:
+ if componentValue is noValue or not componentValue.isValue:
+ return default
+ else:
+ return componentValue
+
+ if componentValue is noValue:
+ self.setComponentByPosition(idx)
+
+ componentValue = self._componentValues[idx]
+
+ if default is noValue or componentValue.isValue:
+ return componentValue
+ else:
+ return default
+
+ def setComponentByPosition(self, idx, value=noValue,
+ verifyConstraints=True,
+ matchTags=True,
+ matchConstraints=True):
+ """Assign |ASN.1| type component by position.
+
+ Equivalent to Python sequence item assignment operation (e.g. `[]`).
+
+ Parameters
+ ----------
+ idx : :class:`int`
+ Component index (zero-based). Must either refer to existing
+ component (if *componentType* is set) or to N+1 component
+ otherwise. In the latter case a new component of given ASN.1
+ type gets instantiated and appended to |ASN.1| sequence.
+
+ Keyword Args
+ ------------
+ value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
+ A Python value to initialize |ASN.1| component with (if *componentType* is set)
+ or ASN.1 value object to assign to |ASN.1| component.
+ If `value` is not given, schema object will be set as a component.
+
+ verifyConstraints : :class:`bool`
+ If :obj:`False`, skip constraints validation
+
+ matchTags: :class:`bool`
+ If :obj:`False`, skip component tags matching
+
+ matchConstraints: :class:`bool`
+ If :obj:`False`, skip component constraints matching
+
+ Returns
+ -------
+ self
+ """
+ componentType = self.componentType
+ componentTypeLen = self._componentTypeLen
+
+ if self._componentValues is noValue:
+ componentValues = []
+
+ else:
+ componentValues = self._componentValues
+
+ try:
+ currentValue = componentValues[idx]
+
+ except IndexError:
+ currentValue = noValue
+ if componentTypeLen:
+ if componentTypeLen < idx:
+ raise error.PyAsn1Error('component index out of range')
+
+ componentValues = [noValue] * componentTypeLen
+
+ if value is noValue:
+ if componentTypeLen:
+ value = componentType.getTypeByPosition(idx)
+ if isinstance(value, base.ConstructedAsn1Type):
+ value = value.clone(cloneValueFlag=componentType[idx].isDefaulted)
+
+ elif currentValue is noValue:
+ raise error.PyAsn1Error('Component type not defined')
+
+ elif not isinstance(value, base.Asn1Item):
+ if componentTypeLen:
+ subComponentType = componentType.getTypeByPosition(idx)
+ if isinstance(subComponentType, base.SimpleAsn1Type):
+ value = subComponentType.clone(value=value)
+
+ else:
+ raise error.PyAsn1Error('%s can cast only scalar values' % componentType.__class__.__name__)
+
+ elif currentValue is not noValue and isinstance(currentValue, base.SimpleAsn1Type):
+ value = currentValue.clone(value=value)
+
+ else:
+ raise error.PyAsn1Error('%s undefined component type' % componentType.__class__.__name__)
+
+ elif ((verifyConstraints or matchTags or matchConstraints) and
+ componentTypeLen):
+ subComponentType = componentType.getTypeByPosition(idx)
+ if subComponentType is not noValue:
+ subtypeChecker = (self.strictConstraints and
+ subComponentType.isSameTypeWith or
+ subComponentType.isSuperTypeOf)
+
+ if not subtypeChecker(value, verifyConstraints and matchTags,
+ verifyConstraints and matchConstraints):
+ if not componentType[idx].openType:
+ raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType))
+
+ if componentTypeLen or idx in self._dynamicNames:
+ componentValues[idx] = value
+
+ elif len(componentValues) == idx:
+ componentValues.append(value)
+ self._dynamicNames.addField(idx)
+
+ else:
+ raise error.PyAsn1Error('Component index out of range')
+
+ self._componentValues = componentValues
+
+ return self
+
+ @property
+ def isValue(self):
+ """Indicate that |ASN.1| object represents ASN.1 value.
+
+ If *isValue* is :obj:`False` then this object represents just ASN.1 schema.
+
+ If *isValue* is :obj:`True` then, in addition to its ASN.1 schema features,
+ this object can also be used like a Python built-in object (e.g.
+ :class:`int`, :class:`str`, :class:`dict` etc.).
+
+ Returns
+ -------
+ : :class:`bool`
+ :obj:`False` if object represents just ASN.1 schema.
+ :obj:`True` if object represents ASN.1 schema and can be used as a
+ normal value.
+
+ Note
+ ----
+ There is an important distinction between PyASN1 schema and value objects.
+ The PyASN1 schema objects can only participate in ASN.1 schema-related
+ operations (e.g. defining or testing the structure of the data). Most
+ obvious uses of ASN.1 schema is to guide serialisation codecs whilst
+ encoding/decoding serialised ASN.1 contents.
+
+ The PyASN1 value objects can **additionally** participate in many operations
+ involving regular Python objects (e.g. arithmetic, comprehension etc).
+
+ It is sufficient for |ASN.1| objects to have all non-optional and non-defaulted
+ components being value objects to be considered as a value objects as a whole.
+ In other words, even having one or more optional components not turned into
+ value objects, |ASN.1| object is still considered as a value object. Defaulted
+ components are normally value objects by default.
+ """
+ if self._componentValues is noValue:
+ return False
+
+ componentType = self.componentType
+
+ if componentType:
+ for idx, subComponentType in enumerate(componentType.namedTypes):
+ if subComponentType.isDefaulted or subComponentType.isOptional:
+ continue
+
+ if not self._componentValues:
+ return False
+
+ componentValue = self._componentValues[idx]
+ if componentValue is noValue or not componentValue.isValue:
+ return False
+
+ else:
+ for componentValue in self._componentValues:
+ if componentValue is noValue or not componentValue.isValue:
+ return False
+
+ return True
+
+ @property
+ def isInconsistent(self):
+ """Run necessary checks to ensure |ASN.1| object consistency.
+
+ Default action is to verify |ASN.1| object against constraints imposed
+ by `subtypeSpec`.
+
+ Raises
+ ------
+ :py:class:`~pyasn1.error.PyAsn1tError` on any inconsistencies found
+ """
+ if self.componentType is noValue or not self.subtypeSpec:
+ return False
+
+ if self._componentValues is noValue:
+ return True
+
+ mapping = {}
+
+ for idx, value in enumerate(self._componentValues):
+ # Absent fields are not in the mapping
+ if value is noValue:
+ continue
+
+ name = self.componentType.getNameByPosition(idx)
+
+ mapping[name] = value
+
+ try:
+ # Represent Sequence/Set as a bare dict to constraints chain
+ self.subtypeSpec(mapping)
+
+ except error.PyAsn1Error:
+ exc = sys.exc_info()[1]
+ return exc
+
+ return False
+
+ def prettyPrint(self, scope=0):
+ """Return an object representation string.
+
+ Returns
+ -------
+ : :class:`str`
+ Human-friendly object representation.
+ """
+ scope += 1
+ representation = self.__class__.__name__ + ':\n'
+ for idx, componentValue in enumerate(self._componentValues):
+ if componentValue is not noValue and componentValue.isValue:
+ representation += ' ' * scope
+ if self.componentType:
+ representation += self.componentType.getNameByPosition(idx)
+ else:
+ representation += self._dynamicNames.getNameByPosition(idx)
+ representation = '%s=%s\n' % (
+ representation, componentValue.prettyPrint(scope)
+ )
+ return representation
+
+ def prettyPrintType(self, scope=0):
+ scope += 1
+ representation = '%s -> %s {\n' % (self.tagSet, self.__class__.__name__)
+ for idx, componentType in enumerate(self.componentType.values() or self._componentValues):
+ representation += ' ' * scope
+ if self.componentType:
+ representation += '"%s"' % self.componentType.getNameByPosition(idx)
+ else:
+ representation += '"%s"' % self._dynamicNames.getNameByPosition(idx)
+ representation = '%s = %s\n' % (
+ representation, componentType.prettyPrintType(scope)
+ )
+ return representation + '\n' + ' ' * (scope - 1) + '}'
+
+ # backward compatibility
+
+ def setDefaultComponents(self):
+ return self
+
+ def getComponentType(self):
+ if self._componentTypeLen:
+ return self.componentType
+
+ def getNameByPosition(self, idx):
+ if self._componentTypeLen:
+ return self.componentType[idx].name
+
+class Sequence(SequenceAndSetBase):
+ __doc__ = SequenceAndSetBase.__doc__
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10)
+ )
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object
+ #: imposing constraints on |ASN.1| type initialization values.
+ subtypeSpec = constraint.ConstraintsIntersection()
+
+ #: Default collection of ASN.1 types of component (e.g. :py:class:`~pyasn1.type.namedtype.NamedType`)
+ #: object imposing size constraint on |ASN.1| objects
+ componentType = namedtype.NamedTypes()
+
+ # Disambiguation ASN.1 types identification
+ typeId = SequenceAndSetBase.getTypeId()
+
+ # backward compatibility
+
+ def getComponentTagMapNearPosition(self, idx):
+ if self.componentType:
+ return self.componentType.getTagMapNearPosition(idx)
+
+ def getComponentPositionNearType(self, tagSet, idx):
+ if self.componentType:
+ return self.componentType.getPositionNearType(tagSet, idx)
+ else:
+ return idx
+
+
+class Set(SequenceAndSetBase):
+ __doc__ = SequenceAndSetBase.__doc__
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11)
+ )
+
+ #: Default collection of ASN.1 types of component (e.g. :py:class:`~pyasn1.type.namedtype.NamedType`)
+ #: object representing ASN.1 type allowed within |ASN.1| type
+ componentType = namedtype.NamedTypes()
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object
+ #: imposing constraints on |ASN.1| type initialization values.
+ subtypeSpec = constraint.ConstraintsIntersection()
+
+ # Disambiguation ASN.1 types identification
+ typeId = SequenceAndSetBase.getTypeId()
+
+ def getComponent(self, innerFlag=False):
+ return self
+
+ def getComponentByType(self, tagSet, default=noValue,
+ instantiate=True, innerFlag=False):
+ """Returns |ASN.1| type component by ASN.1 tag.
+
+ Parameters
+ ----------
+ tagSet : :py:class:`~pyasn1.type.tag.TagSet`
+ Object representing ASN.1 tags to identify one of
+ |ASN.1| object component
+
+ Keyword Args
+ ------------
+ default: :class:`object`
+ If set and requested component is a schema object, return the `default`
+ object instead of the requested component.
+
+ instantiate: :class:`bool`
+ If :obj:`True` (default), inner component will be automatically
+ instantiated.
+ If :obj:`False` either existing component or the :class:`noValue`
+ object will be returned.
+
+ Returns
+ -------
+ : :py:class:`~pyasn1.type.base.PyAsn1Item`
+ a pyasn1 object
+ """
+ componentValue = self.getComponentByPosition(
+ self.componentType.getPositionByType(tagSet),
+ default=default, instantiate=instantiate
+ )
+ if innerFlag and isinstance(componentValue, Set):
+ # get inner component by inner tagSet
+ return componentValue.getComponent(innerFlag=True)
+ else:
+ # get outer component by inner tagSet
+ return componentValue
+
+ def setComponentByType(self, tagSet, value=noValue,
+ verifyConstraints=True,
+ matchTags=True,
+ matchConstraints=True,
+ innerFlag=False):
+ """Assign |ASN.1| type component by ASN.1 tag.
+
+ Parameters
+ ----------
+ tagSet : :py:class:`~pyasn1.type.tag.TagSet`
+ Object representing ASN.1 tags to identify one of
+ |ASN.1| object component
+
+ Keyword Args
+ ------------
+ value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
+ A Python value to initialize |ASN.1| component with (if *componentType* is set)
+ or ASN.1 value object to assign to |ASN.1| component.
+ If `value` is not given, schema object will be set as a component.
+
+ verifyConstraints : :class:`bool`
+ If :obj:`False`, skip constraints validation
+
+ matchTags: :class:`bool`
+ If :obj:`False`, skip component tags matching
+
+ matchConstraints: :class:`bool`
+ If :obj:`False`, skip component constraints matching
+
+ innerFlag: :class:`bool`
+ If :obj:`True`, search for matching *tagSet* recursively.
+
+ Returns
+ -------
+ self
+ """
+ idx = self.componentType.getPositionByType(tagSet)
+
+ if innerFlag: # set inner component by inner tagSet
+ componentType = self.componentType.getTypeByPosition(idx)
+
+ if componentType.tagSet:
+ return self.setComponentByPosition(
+ idx, value, verifyConstraints, matchTags, matchConstraints
+ )
+ else:
+ componentType = self.getComponentByPosition(idx)
+ return componentType.setComponentByType(
+ tagSet, value, verifyConstraints, matchTags, matchConstraints, innerFlag=innerFlag
+ )
+ else: # set outer component by inner tagSet
+ return self.setComponentByPosition(
+ idx, value, verifyConstraints, matchTags, matchConstraints
+ )
+
+ @property
+ def componentTagMap(self):
+ if self.componentType:
+ return self.componentType.tagMapUnique
+
+
+class Choice(Set):
+ """Create |ASN.1| schema or value object.
+
+ |ASN.1| class is based on :class:`~pyasn1.type.base.ConstructedAsn1Type`,
+ its objects are mutable and duck-type Python :class:`list` objects.
+
+ Keyword Args
+ ------------
+ componentType: :py:class:`~pyasn1.type.namedtype.NamedType`
+ Object holding named ASN.1 types allowed within this collection
+
+ tagSet: :py:class:`~pyasn1.type.tag.TagSet`
+ Object representing non-default ASN.1 tag(s)
+
+ subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
+ Object representing non-default ASN.1 subtype constraint(s). Constraints
+ verification for |ASN.1| type can only occur on explicit
+ `.isInconsistent` call.
+
+ Examples
+ --------
+
+ .. code-block:: python
+
+ class Afters(Choice):
+ '''
+ ASN.1 specification:
+
+ Afters ::= CHOICE {
+ cheese [0] IA5String,
+ dessert [1] IA5String
+ }
+ '''
+ componentType = NamedTypes(
+ NamedType('cheese', IA5String().subtype(
+ implicitTag=Tag(tagClassContext, tagFormatSimple, 0)
+ ),
+ NamedType('dessert', IA5String().subtype(
+ implicitTag=Tag(tagClassContext, tagFormatSimple, 1)
+ )
+ )
+
+ afters = Afters()
+ afters['cheese'] = 'Mascarpone'
+ """
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = tag.TagSet() # untagged
+
+ #: Default collection of ASN.1 types of component (e.g. :py:class:`~pyasn1.type.namedtype.NamedType`)
+ #: object representing ASN.1 type allowed within |ASN.1| type
+ componentType = namedtype.NamedTypes()
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object
+ #: imposing constraints on |ASN.1| type initialization values.
+ subtypeSpec = constraint.ConstraintsIntersection(
+ constraint.ValueSizeConstraint(1, 1)
+ )
+
+ # Disambiguation ASN.1 types identification
+ typeId = Set.getTypeId()
+
+ _currentIdx = None
+
+ def __eq__(self, other):
+ if self._componentValues:
+ return self._componentValues[self._currentIdx] == other
+ return NotImplemented
+
+ def __ne__(self, other):
+ if self._componentValues:
+ return self._componentValues[self._currentIdx] != other
+ return NotImplemented
+
+ def __lt__(self, other):
+ if self._componentValues:
+ return self._componentValues[self._currentIdx] < other
+ return NotImplemented
+
+ def __le__(self, other):
+ if self._componentValues:
+ return self._componentValues[self._currentIdx] <= other
+ return NotImplemented
+
+ def __gt__(self, other):
+ if self._componentValues:
+ return self._componentValues[self._currentIdx] > other
+ return NotImplemented
+
+ def __ge__(self, other):
+ if self._componentValues:
+ return self._componentValues[self._currentIdx] >= other
+ return NotImplemented
+
+ if sys.version_info[0] <= 2:
+ def __nonzero__(self):
+ return self._componentValues and True or False
+ else:
+ def __bool__(self):
+ return self._componentValues and True or False
+
+ def __len__(self):
+ return self._currentIdx is not None and 1 or 0
+
+ def __contains__(self, key):
+ if self._currentIdx is None:
+ return False
+ return key == self.componentType[self._currentIdx].getName()
+
+ def __iter__(self):
+ if self._currentIdx is None:
+ raise StopIteration
+ yield self.componentType[self._currentIdx].getName()
+
+ # Python dict protocol
+
+ def values(self):
+ if self._currentIdx is not None:
+ yield self._componentValues[self._currentIdx]
+
+ def keys(self):
+ if self._currentIdx is not None:
+ yield self.componentType[self._currentIdx].getName()
+
+ def items(self):
+ if self._currentIdx is not None:
+ yield self.componentType[self._currentIdx].getName(), self[self._currentIdx]
+
+ def checkConsistency(self):
+ if self._currentIdx is None:
+ raise error.PyAsn1Error('Component not chosen')
+
+ def _cloneComponentValues(self, myClone, cloneValueFlag):
+ try:
+ component = self.getComponent()
+ except error.PyAsn1Error:
+ pass
+ else:
+ if isinstance(component, Choice):
+ tagSet = component.effectiveTagSet
+ else:
+ tagSet = component.tagSet
+ if isinstance(component, base.ConstructedAsn1Type):
+ myClone.setComponentByType(
+ tagSet, component.clone(cloneValueFlag=cloneValueFlag)
+ )
+ else:
+ myClone.setComponentByType(tagSet, component.clone())
+
+ def getComponentByPosition(self, idx, default=noValue, instantiate=True):
+ __doc__ = Set.__doc__
+
+ if self._currentIdx is None or self._currentIdx != idx:
+ return Set.getComponentByPosition(self, idx, default=default,
+ instantiate=instantiate)
+
+ return self._componentValues[idx]
+
+ def setComponentByPosition(self, idx, value=noValue,
+ verifyConstraints=True,
+ matchTags=True,
+ matchConstraints=True):
+ """Assign |ASN.1| type component by position.
+
+ Equivalent to Python sequence item assignment operation (e.g. `[]`).
+
+ Parameters
+ ----------
+ idx: :class:`int`
+ Component index (zero-based). Must either refer to existing
+ component or to N+1 component. In the latter case a new component
+ type gets instantiated (if *componentType* is set, or given ASN.1
+ object is taken otherwise) and appended to the |ASN.1| sequence.
+
+ Keyword Args
+ ------------
+ value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
+ A Python value to initialize |ASN.1| component with (if *componentType* is set)
+ or ASN.1 value object to assign to |ASN.1| component. Once a new value is
+ set to *idx* component, previous value is dropped.
+ If `value` is not given, schema object will be set as a component.
+
+ verifyConstraints : :class:`bool`
+ If :obj:`False`, skip constraints validation
+
+ matchTags: :class:`bool`
+ If :obj:`False`, skip component tags matching
+
+ matchConstraints: :class:`bool`
+ If :obj:`False`, skip component constraints matching
+
+ Returns
+ -------
+ self
+ """
+ oldIdx = self._currentIdx
+ Set.setComponentByPosition(self, idx, value, verifyConstraints, matchTags, matchConstraints)
+ self._currentIdx = idx
+ if oldIdx is not None and oldIdx != idx:
+ self._componentValues[oldIdx] = noValue
+ return self
+
+ @property
+ def effectiveTagSet(self):
+ """Return a :class:`~pyasn1.type.tag.TagSet` object of the currently initialized component or self (if |ASN.1| is tagged)."""
+ if self.tagSet:
+ return self.tagSet
+ else:
+ component = self.getComponent()
+ return component.effectiveTagSet
+
+ @property
+ def tagMap(self):
+ """"Return a :class:`~pyasn1.type.tagmap.TagMap` object mapping
+ ASN.1 tags to ASN.1 objects contained within callee.
+ """
+ if self.tagSet:
+ return Set.tagMap.fget(self)
+ else:
+ return self.componentType.tagMapUnique
+
+ def getComponent(self, innerFlag=False):
+ """Return currently assigned component of the |ASN.1| object.
+
+ Returns
+ -------
+ : :py:class:`~pyasn1.type.base.PyAsn1Item`
+ a PyASN1 object
+ """
+ if self._currentIdx is None:
+ raise error.PyAsn1Error('Component not chosen')
+ else:
+ c = self._componentValues[self._currentIdx]
+ if innerFlag and isinstance(c, Choice):
+ return c.getComponent(innerFlag)
+ else:
+ return c
+
+ def getName(self, innerFlag=False):
+ """Return the name of currently assigned component of the |ASN.1| object.
+
+ Returns
+ -------
+ : :py:class:`str`
+ |ASN.1| component name
+ """
+ if self._currentIdx is None:
+ raise error.PyAsn1Error('Component not chosen')
+ else:
+ if innerFlag:
+ c = self._componentValues[self._currentIdx]
+ if isinstance(c, Choice):
+ return c.getName(innerFlag)
+ return self.componentType.getNameByPosition(self._currentIdx)
+
+ @property
+ def isValue(self):
+ """Indicate that |ASN.1| object represents ASN.1 value.
+
+ If *isValue* is :obj:`False` then this object represents just ASN.1 schema.
+
+ If *isValue* is :obj:`True` then, in addition to its ASN.1 schema features,
+ this object can also be used like a Python built-in object (e.g.
+ :class:`int`, :class:`str`, :class:`dict` etc.).
+
+ Returns
+ -------
+ : :class:`bool`
+ :obj:`False` if object represents just ASN.1 schema.
+ :obj:`True` if object represents ASN.1 schema and can be used as a normal
+ value.
+
+ Note
+ ----
+ There is an important distinction between PyASN1 schema and value objects.
+ The PyASN1 schema objects can only participate in ASN.1 schema-related
+ operations (e.g. defining or testing the structure of the data). Most
+ obvious uses of ASN.1 schema is to guide serialisation codecs whilst
+ encoding/decoding serialised ASN.1 contents.
+
+ The PyASN1 value objects can **additionally** participate in many operations
+ involving regular Python objects (e.g. arithmetic, comprehension etc).
+ """
+ if self._currentIdx is None:
+ return False
+
+ componentValue = self._componentValues[self._currentIdx]
+
+ return componentValue is not noValue and componentValue.isValue
+
+ def clear(self):
+ self._currentIdx = None
+ return Set.clear(self)
+
+ # compatibility stubs
+
+ def getMinTagSet(self):
+ return self.minTagSet
+
+
+class Any(OctetString):
+ """Create |ASN.1| schema or value object.
+
+ |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`,
+ its objects are immutable and duck-type Python 2 :class:`str` or Python 3
+ :class:`bytes`. When used in Unicode context, |ASN.1| type assumes
+ "|encoding|" serialisation.
+
+ Keyword Args
+ ------------
+ value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object
+ :class:`str` (Python 2) or :class:`bytes` (Python 3), alternatively
+ :class:`unicode` object (Python 2) or :class:`str` (Python 3)
+ representing character string to be serialised into octets (note
+ `encoding` parameter) or |ASN.1| object.
+ If `value` is not given, schema object will be created.
+
+ tagSet: :py:class:`~pyasn1.type.tag.TagSet`
+ Object representing non-default ASN.1 tag(s)
+
+ subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
+ Object representing non-default ASN.1 subtype constraint(s). Constraints
+ verification for |ASN.1| type occurs automatically on object
+ instantiation.
+
+ encoding: :py:class:`str`
+ Unicode codec ID to encode/decode :class:`unicode` (Python 2) or
+ :class:`str` (Python 3) the payload when |ASN.1| object is used
+ in text string context.
+
+ binValue: :py:class:`str`
+ Binary string initializer to use instead of the *value*.
+ Example: '10110011'.
+
+ hexValue: :py:class:`str`
+ Hexadecimal string initializer to use instead of the *value*.
+ Example: 'DEADBEEF'.
+
+ Raises
+ ------
+ ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
+ On constraint violation or bad initializer.
+
+ Examples
+ --------
+ .. code-block:: python
+
+ class Error(Sequence):
+ '''
+ ASN.1 specification:
+
+ Error ::= SEQUENCE {
+ code INTEGER,
+ parameter ANY DEFINED BY code -- Either INTEGER or REAL
+ }
+ '''
+ componentType=NamedTypes(
+ NamedType('code', Integer()),
+ NamedType('parameter', Any(),
+ openType=OpenType('code', {1: Integer(),
+ 2: Real()}))
+ )
+
+ error = Error()
+ error['code'] = 1
+ error['parameter'] = Integer(1234)
+ """
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
+ #: associated with |ASN.1| type.
+ tagSet = tag.TagSet() # untagged
+
+ #: Set (on class, not on instance) or return a
+ #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object
+ #: imposing constraints on |ASN.1| type initialization values.
+ subtypeSpec = constraint.ConstraintsIntersection()
+
+ # Disambiguation ASN.1 types identification
+ typeId = OctetString.getTypeId()
+
+ @property
+ def tagMap(self):
+ """"Return a :class:`~pyasn1.type.tagmap.TagMap` object mapping
+ ASN.1 tags to ASN.1 objects contained within callee.
+ """
+ try:
+ return self._tagMap
+
+ except AttributeError:
+ self._tagMap = tagmap.TagMap(
+ {self.tagSet: self},
+ {eoo.endOfOctets.tagSet: eoo.endOfOctets},
+ self
+ )
+
+ return self._tagMap
+
+# XXX
+# coercion rules?
diff --git a/third_party/python/pyasn1/pyasn1/type/useful.py b/third_party/python/pyasn1/pyasn1/type/useful.py
new file mode 100644
index 0000000000..7536b95cee
--- /dev/null
+++ b/third_party/python/pyasn1/pyasn1/type/useful.py
@@ -0,0 +1,191 @@
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+import datetime
+
+from pyasn1 import error
+from pyasn1.compat import dateandtime
+from pyasn1.compat import string
+from pyasn1.type import char
+from pyasn1.type import tag
+from pyasn1.type import univ
+
+__all__ = ['ObjectDescriptor', 'GeneralizedTime', 'UTCTime']
+
+NoValue = univ.NoValue
+noValue = univ.noValue
+
+
+class ObjectDescriptor(char.GraphicString):
+ __doc__ = char.GraphicString.__doc__
+
+ #: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects
+ tagSet = char.GraphicString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 7)
+ )
+
+ # Optimization for faster codec lookup
+ typeId = char.GraphicString.getTypeId()
+
+
+class TimeMixIn(object):
+
+ _yearsDigits = 4
+ _hasSubsecond = False
+ _optionalMinutes = False
+ _shortTZ = False
+
+ class FixedOffset(datetime.tzinfo):
+ """Fixed offset in minutes east from UTC."""
+
+ # defaulted arguments required
+ # https: // docs.python.org / 2.3 / lib / datetime - tzinfo.html
+ def __init__(self, offset=0, name='UTC'):
+ self.__offset = datetime.timedelta(minutes=offset)
+ self.__name = name
+
+ def utcoffset(self, dt):
+ return self.__offset
+
+ def tzname(self, dt):
+ return self.__name
+
+ def dst(self, dt):
+ return datetime.timedelta(0)
+
+ UTC = FixedOffset()
+
+ @property
+ def asDateTime(self):
+ """Create :py:class:`datetime.datetime` object from a |ASN.1| object.
+
+ Returns
+ -------
+ :
+ new instance of :py:class:`datetime.datetime` object
+ """
+ text = str(self)
+ if text.endswith('Z'):
+ tzinfo = TimeMixIn.UTC
+ text = text[:-1]
+
+ elif '-' in text or '+' in text:
+ if '+' in text:
+ text, plusminus, tz = string.partition(text, '+')
+ else:
+ text, plusminus, tz = string.partition(text, '-')
+
+ if self._shortTZ and len(tz) == 2:
+ tz += '00'
+
+ if len(tz) != 4:
+ raise error.PyAsn1Error('malformed time zone offset %s' % tz)
+
+ try:
+ minutes = int(tz[:2]) * 60 + int(tz[2:])
+ if plusminus == '-':
+ minutes *= -1
+
+ except ValueError:
+ raise error.PyAsn1Error('unknown time specification %s' % self)
+
+ tzinfo = TimeMixIn.FixedOffset(minutes, '?')
+
+ else:
+ tzinfo = None
+
+ if '.' in text or ',' in text:
+ if '.' in text:
+ text, _, ms = string.partition(text, '.')
+ else:
+ text, _, ms = string.partition(text, ',')
+
+ try:
+ ms = int(ms) * 1000
+
+ except ValueError:
+ raise error.PyAsn1Error('bad sub-second time specification %s' % self)
+
+ else:
+ ms = 0
+
+ if self._optionalMinutes and len(text) - self._yearsDigits == 6:
+ text += '0000'
+ elif len(text) - self._yearsDigits == 8:
+ text += '00'
+
+ try:
+ dt = dateandtime.strptime(text, self._yearsDigits == 4 and '%Y%m%d%H%M%S' or '%y%m%d%H%M%S')
+
+ except ValueError:
+ raise error.PyAsn1Error('malformed datetime format %s' % self)
+
+ return dt.replace(microsecond=ms, tzinfo=tzinfo)
+
+ @classmethod
+ def fromDateTime(cls, dt):
+ """Create |ASN.1| object from a :py:class:`datetime.datetime` object.
+
+ Parameters
+ ----------
+ dt: :py:class:`datetime.datetime` object
+ The `datetime.datetime` object to initialize the |ASN.1| object
+ from
+
+ Returns
+ -------
+ :
+ new instance of |ASN.1| value
+ """
+ text = dt.strftime(cls._yearsDigits == 4 and '%Y%m%d%H%M%S' or '%y%m%d%H%M%S')
+ if cls._hasSubsecond:
+ text += '.%d' % (dt.microsecond // 1000)
+
+ if dt.utcoffset():
+ seconds = dt.utcoffset().seconds
+ if seconds < 0:
+ text += '-'
+ else:
+ text += '+'
+ text += '%.2d%.2d' % (seconds // 3600, seconds % 3600)
+ else:
+ text += 'Z'
+
+ return cls(text)
+
+
+class GeneralizedTime(char.VisibleString, TimeMixIn):
+ __doc__ = char.VisibleString.__doc__
+
+ #: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects
+ tagSet = char.VisibleString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 24)
+ )
+
+ # Optimization for faster codec lookup
+ typeId = char.VideotexString.getTypeId()
+
+ _yearsDigits = 4
+ _hasSubsecond = True
+ _optionalMinutes = True
+ _shortTZ = True
+
+
+class UTCTime(char.VisibleString, TimeMixIn):
+ __doc__ = char.VisibleString.__doc__
+
+ #: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects
+ tagSet = char.VisibleString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 23)
+ )
+
+ # Optimization for faster codec lookup
+ typeId = char.VideotexString.getTypeId()
+
+ _yearsDigits = 2
+ _hasSubsecond = False
+ _optionalMinutes = False
+ _shortTZ = False
diff --git a/third_party/python/pyasn1/setup.cfg b/third_party/python/pyasn1/setup.cfg
new file mode 100644
index 0000000000..b998b2a06f
--- /dev/null
+++ b/third_party/python/pyasn1/setup.cfg
@@ -0,0 +1,10 @@
+[bdist_wheel]
+universal = 1
+
+[metadata]
+license_file = LICENSE.rst
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/pyasn1/setup.py b/third_party/python/pyasn1/setup.py
new file mode 100644
index 0000000000..fa0a876e78
--- /dev/null
+++ b/third_party/python/pyasn1/setup.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+#
+# This file is part of pyasn1 software.
+#
+# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
+# License: http://snmplabs.com/pyasn1/license.html
+#
+import os
+import sys
+
+classifiers = """\
+Development Status :: 5 - Production/Stable
+Environment :: Console
+Intended Audience :: Developers
+Intended Audience :: Education
+Intended Audience :: Information Technology
+Intended Audience :: System Administrators
+Intended Audience :: Telecommunications Industry
+License :: OSI Approved :: BSD License
+Natural Language :: English
+Operating System :: OS Independent
+Programming Language :: Python :: 2
+Programming Language :: Python :: 2.4
+Programming Language :: Python :: 2.5
+Programming Language :: Python :: 2.6
+Programming Language :: Python :: 2.7
+Programming Language :: Python :: 3
+Programming Language :: Python :: 3.2
+Programming Language :: Python :: 3.3
+Programming Language :: Python :: 3.4
+Programming Language :: Python :: 3.5
+Programming Language :: Python :: 3.6
+Programming Language :: Python :: 3.7
+Topic :: Communications
+Topic :: Software Development :: Libraries :: Python Modules
+"""
+
+
+def howto_install_setuptools():
+ print("""
+ Error: You need setuptools Python package!
+
+ It's very easy to install it, just type:
+
+ wget https://bootstrap.pypa.io/ez_setup.py
+ python ez_setup.py
+
+ Then you could make eggs from this package.
+""")
+
+
+if sys.version_info[:2] < (2, 4):
+ print("ERROR: this package requires Python 2.4 or later!")
+ sys.exit(1)
+
+try:
+ from setuptools import setup, Command
+
+ params = {
+ 'zip_safe': True
+ }
+
+except ImportError:
+ for arg in sys.argv:
+ if 'egg' in arg:
+ howto_install_setuptools()
+ sys.exit(1)
+ from distutils.core import setup, Command
+
+ params = {}
+
+params.update({
+ 'name': 'pyasn1',
+ 'version': open(os.path.join('pyasn1', '__init__.py')).read().split('\'')[1],
+ 'description': 'ASN.1 types and codecs',
+ 'long_description': 'Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)',
+ 'maintainer': 'Ilya Etingof <etingof@gmail.com>',
+ 'author': 'Ilya Etingof',
+ 'author_email': 'etingof@gmail.com',
+ 'url': 'https://github.com/etingof/pyasn1',
+ 'platforms': ['any'],
+ 'classifiers': [x for x in classifiers.split('\n') if x],
+ 'license': 'BSD',
+ 'packages': ['pyasn1',
+ 'pyasn1.type',
+ 'pyasn1.compat',
+ 'pyasn1.codec',
+ 'pyasn1.codec.ber',
+ 'pyasn1.codec.cer',
+ 'pyasn1.codec.der',
+ 'pyasn1.codec.native']})
+
+# handle unittest discovery feature
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+
+class PyTest(Command):
+ user_options = []
+
+ def initialize_options(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ suite = unittest.TestLoader().loadTestsFromNames(
+ ['tests.__main__.suite']
+ )
+
+ unittest.TextTestRunner(verbosity=2).run(suite)
+
+params['cmdclass'] = {
+ 'test': PyTest,
+ 'tests': PyTest,
+}
+
+setup(**params)
diff --git a/third_party/python/pyflakes/AUTHORS b/third_party/python/pyflakes/AUTHORS
new file mode 100644
index 0000000000..85b1e617e1
--- /dev/null
+++ b/third_party/python/pyflakes/AUTHORS
@@ -0,0 +1,22 @@
+Contributors
+------------
+
+* Phil Frost - Former Divmod Team
+* Moe Aboulkheir - Former Divmod Team
+* Jean-Paul Calderone - Former Divmod Team
+* Glyph Lefkowitz - Former Divmod Team
+* Tristan Seligmann
+* Jonathan Lange
+* Georg Brandl
+* Ronny Pfannschmidt
+* Virgil Dupras
+* Kevin Watters
+* Ian Cordasco
+* Florent Xicluna
+* Domen Kožar
+* Marcin Cieślak
+* Steven Myint
+* Ignas Mikalajūnas
+
+See also the contributors list on GitHub:
+https://github.com/PyCQA/pyflakes/graphs/contributors
diff --git a/third_party/python/pyflakes/LICENSE b/third_party/python/pyflakes/LICENSE
new file mode 100644
index 0000000000..e4d553ac98
--- /dev/null
+++ b/third_party/python/pyflakes/LICENSE
@@ -0,0 +1,21 @@
+Copyright 2005-2011 Divmod, Inc.
+Copyright 2013-2014 Florent Xicluna
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/third_party/python/pyflakes/MANIFEST.in b/third_party/python/pyflakes/MANIFEST.in
new file mode 100644
index 0000000000..6bc4f26782
--- /dev/null
+++ b/third_party/python/pyflakes/MANIFEST.in
@@ -0,0 +1,3 @@
+include README.rst NEWS.rst
+include AUTHORS LICENSE
+include bin/pyflakes
diff --git a/third_party/python/pyflakes/NEWS.rst b/third_party/python/pyflakes/NEWS.rst
new file mode 100644
index 0000000000..50a39623ee
--- /dev/null
+++ b/third_party/python/pyflakes/NEWS.rst
@@ -0,0 +1,266 @@
+2.2.0 (2020-04-08)
+
+- Include column information in error messages
+- Fix ``@overload`` detection with other decorators and in non-global scopes
+- Fix return-type annotation being a class member
+- Fix assignment to ``_`` in doctests with existing ``_`` name
+- Namespace attributes which are attached to ast nodes with ``_pyflakes_`` to
+ avoid conflicts with other libraries (notably bandit)
+- Add check for f-strings without placeholders
+- Add check for unused/extra/invalid ``'string literal'.format(...)``
+- Add check for unused/extra/invalid ``'string literal % ...``
+- Improve python shebang detection
+- Allow type ignore to be followed by a code ``# type: ignore[attr-defined]``
+- Add support for assignment expressions (PEP 572)
+- Support ``@overload`` detection from ``typing_extensions`` as well
+- Fix ``@overload`` detection for async functions
+- Allow ``continue`` inside ``finally`` in python 3.8+
+- Fix handling of annotations in positional-only arguments
+- Make pyflakes more resistant to future syntax additions
+- Fix false positives in partially quoted type annotations
+- Warn about ``is`` comparison to tuples
+- Fix ``Checker`` usage with async function subtrees
+- Add check for ``if`` of non-empty tuple
+- Switch from ``optparse`` to ``argparse``
+- Fix false positives in partially quoted type annotations in unusual contexts
+- Be more cautious when identifying ``Literal`` type expressions
+
+2.1.1 (2019-02-28)
+
+- Fix reported line number for type comment errors
+- Fix typing.overload check to only check imported names
+
+2.1.0 (2019-01-23)
+
+- Allow intentional assignment to variables named ``_``
+- Recognize ``__module__`` as a valid name in class scope
+- ``pyflakes.checker.Checker`` supports checking of partial ``ast`` trees
+- Detect assign-before-use for local variables which shadow builtin names
+- Detect invalid ``print`` syntax using ``>>`` operator
+- Treat ``async for`` the same as a ``for`` loop for introducing variables
+- Add detection for list concatenation in ``__all__``
+- Exempt ``@typing.overload`` from duplicate function declaration
+- Importing a submodule of an ``as``-aliased ``import``-import is marked as
+ used
+- Report undefined names from ``__all__`` as possibly coming from a ``*``
+ import
+- Add support for changes in Python 3.8-dev
+- Add support for PEP 563 (``from __future__ import annotations``)
+- Include Python version and platform information in ``pyflakes --version``
+- Recognize ``__annotations__`` as a valid magic global in Python 3.6+
+- Mark names used in PEP 484 ``# type: ...`` comments as used
+- Add check for use of ``is`` operator with ``str``, ``bytes``, and ``int``
+ literals
+
+2.0.0 (2018-05-20)
+
+- Drop support for EOL Python <2.7 and 3.2-3.3
+- Check for unused exception binding in ``except:`` block
+- Handle string literal type annotations
+- Ignore redefinitions of ``_``, unless originally defined by import
+- Support ``__class__`` without ``self`` in Python 3
+- Issue an error for ``raise NotImplemented(...)``
+
+1.6.0 (2017-08-03)
+
+- Process function scope variable annotations for used names
+- Find Python files without extensions by their shebang
+
+1.5.0 (2017-01-09)
+
+- Enable support for PEP 526 annotated assignments
+
+1.4.0 (2016-12-30):
+
+- Change formatting of ImportStarMessage to be consistent with other errors
+- Support PEP 498 "f-strings"
+
+1.3.0 (2016-09-01):
+
+- Fix PyPy2 Windows IntegrationTests
+- Check for duplicate dictionary keys
+- Fix TestMain tests on Windows
+- Fix "continue" and "break" checks ignoring py3.5's "async for" loop
+
+1.2.3 (2016-05-12):
+
+- Fix TypeError when processing relative imports
+
+1.2.2 (2016-05-06):
+
+- Avoid traceback when exception is del-ed in except
+
+1.2.1 (2015-05-05):
+
+- Fix false RedefinedWhileUnused for submodule imports
+
+1.2.0 (2016-05-03):
+
+- Warn against reusing exception names after the except: block on Python 3
+- Improve the error messages for imports
+
+1.1.0 (2016-03-01):
+
+- Allow main() to accept arguments.
+- Support @ matrix-multiplication operator
+- Validate ``__future__`` imports
+- Fix doctest scope testing
+- Warn for tuple assertions which are always true
+- Warn for "import \*" not at module level on Python 3
+- Catch many more kinds of SyntaxErrors
+- Check PEP 498 f-strings
+- (and a few more sundry bugfixes)
+
+1.0.0 (2015-09-20):
+
+- Python 3.5 support. async/await statements in particular.
+- Fix test_api.py on Windows.
+- Eliminate a false UnusedImport warning when the name has been
+ declared "global"
+
+0.9.2 (2015-06-17):
+
+- Fix a traceback when a global is defined in one scope, and used in another.
+
+0.9.1 (2015-06-09):
+
+- Update NEWS.txt to include 0.9.0, which had been forgotten.
+
+0.9.0 (2015-05-31):
+
+- Exit gracefully, not with a traceback, on SIGINT and SIGPIPE.
+- Fix incorrect report of undefined name when using lambda expressions in
+ generator expressions.
+- Don't crash on DOS line endings on Windows and Python 2.6.
+- Don't report an undefined name if the 'del' which caused a name to become
+ undefined is only conditionally executed.
+- Properly handle differences in list comprehension scope in Python 3.
+- Improve handling of edge cases around 'global' defined variables.
+- Report an error for 'return' outside a function.
+
+0.8.1 (2014-03-30):
+
+- Detect the declared encoding in Python 3.
+- Do not report redefinition of import in a local scope, if the
+ global name is used elsewhere in the module.
+- Catch undefined variable in loop generator when it is also used as
+ loop variable.
+- Report undefined name for ``(a, b) = (1, 2)`` but not for the general
+ unpacking ``(a, b) = func()``.
+- Correctly detect when an imported module is used in default arguments
+ of a method, when the method and the module use the same name.
+- Distribute a universal wheel file.
+
+0.8.0 (2014-03-22):
+
+- Adapt for the AST in Python 3.4.
+- Fix caret position on SyntaxError.
+- Fix crash on Python 2.x with some doctest SyntaxError.
+- Add tox.ini.
+- The ``PYFLAKES_NODOCTEST`` environment variable has been replaced with the
+ ``PYFLAKES_DOCTEST`` environment variable (with the opposite meaning).
+ Doctest checking is now disabled by default; set the environment variable
+ to enable it.
+- Correctly parse incremental ``__all__ += [...]``.
+- Catch return with arguments inside a generator (Python <= 3.2).
+- Do not complain about ``_`` in doctests.
+- Drop deprecated methods ``pushFunctionScope`` and ``pushClassScope``.
+
+0.7.3 (2013-07-02):
+
+- Do not report undefined name for generator expression and dict or
+ set comprehension at class level.
+- Deprecate ``Checker.pushFunctionScope`` and ``Checker.pushClassScope``:
+ use ``Checker.pushScope`` instead.
+- Remove dependency on Unittest2 for the tests.
+
+0.7.2 (2013-04-24):
+
+- Fix computation of ``DoctestSyntaxError.lineno`` and ``col``.
+- Add boolean attribute ``Checker.withDoctest`` to ignore doctests.
+- If environment variable ``PYFLAKES_NODOCTEST`` is set, skip doctests.
+- Environment variable ``PYFLAKES_BUILTINS`` accepts a comma-separated
+ list of additional built-in names.
+
+0.7.1 (2013-04-23):
+
+- File ``bin/pyflakes`` was missing in tarball generated with distribute.
+- Fix reporting errors in non-ASCII filenames (Python 2.x).
+
+0.7.0 (2013-04-17):
+
+- Add --version and --help options.
+- Support ``python -m pyflakes`` (Python 2.7 and Python 3.x).
+- Add attribute ``Message.col`` to report column offset.
+- Do not report redefinition of variable for a variable used in a list
+ comprehension in a conditional.
+- Do not report redefinition of variable for generator expressions and
+ set or dict comprehensions.
+- Do not report undefined name when the code is protected with a
+ ``NameError`` exception handler.
+- Do not report redefinition of variable when unassigning a module imported
+ for its side-effect.
+- Support special locals like ``__tracebackhide__`` for py.test.
+- Support checking doctests.
+- Fix issue with Turkish locale where ``'i'.upper() == 'i'`` in Python 2.
+
+0.6.1 (2013-01-29):
+
+- Fix detection of variables in augmented assignments.
+
+0.6.0 (2013-01-29):
+
+- Support Python 3 up to 3.3, based on the pyflakes3k project.
+- Preserve compatibility with Python 2.5 and all recent versions of Python.
+- Support custom reporters in addition to the default Reporter.
+- Allow function redefinition for modern property construction via
+ property.setter/deleter.
+- Fix spurious redefinition warnings in conditionals.
+- Do not report undefined name in ``__all__`` if import * is used.
+- Add WindowsError as a known built-in name on all platforms.
+- Support specifying additional built-ins in the ``Checker`` constructor.
+- Don't issue Unused Variable warning when using locals() in current scope.
+- Handle problems with the encoding of source files.
+- Remove dependency on Twisted for the tests.
+- Support ``python setup.py test`` and ``python setup.py develop``.
+- Create script using setuptools ``entry_points`` to support all platforms,
+ including Windows.
+
+0.5.0 (2011-09-02):
+
+- Convert pyflakes to use newer _ast infrastructure rather than compiler.
+- Support for new syntax in 2.7 (including set literals, set comprehensions,
+ and dictionary comprehensions).
+- Make sure class names don't get bound until after class definition.
+
+0.4.0 (2009-11-25):
+
+- Fix reporting for certain SyntaxErrors which lack line number
+ information.
+- Check for syntax errors more rigorously.
+- Support checking names used with the class decorator syntax in versions
+ of Python which have it.
+- Detect local variables which are bound but never used.
+- Handle permission errors when trying to read source files.
+- Handle problems with the encoding of source files.
+- Support importing dotted names so as not to incorrectly report them as
+ redefined unused names.
+- Support all forms of the with statement.
+- Consider static ``__all__`` definitions and avoid reporting unused names
+ if the names are listed there.
+- Fix incorrect checking of class names with respect to the names of their
+ bases in the class statement.
+- Support the ``__path__`` global in ``__init__.py``.
+
+0.3.0 (2009-01-30):
+
+- Display more informative SyntaxError messages.
+- Don't hang flymake with unmatched triple quotes (only report a single
+ line of source for a multiline syntax error).
+- Recognize ``__builtins__`` as a defined name.
+- Improve pyflakes support for python versions 2.3-2.5
+- Support for if-else expressions and with statements.
+- Warn instead of error on non-existent file paths.
+- Check for ``__future__`` imports after other statements.
+- Add reporting for some types of import shadowing.
+- Improve reporting of unbound locals
diff --git a/third_party/python/pyflakes/PKG-INFO b/third_party/python/pyflakes/PKG-INFO
new file mode 100644
index 0000000000..b25f353948
--- /dev/null
+++ b/third_party/python/pyflakes/PKG-INFO
@@ -0,0 +1,116 @@
+Metadata-Version: 1.2
+Name: pyflakes
+Version: 2.2.0
+Summary: passive checker of Python programs
+Home-page: https://github.com/PyCQA/pyflakes
+Author: A lot of people
+Author-email: code-quality@python.org
+License: MIT
+Description: ========
+ Pyflakes
+ ========
+
+ A simple program which checks Python source files for errors.
+
+ Pyflakes analyzes programs and detects various errors. It works by
+ parsing the source file, not importing it, so it is safe to use on
+ modules with side effects. It's also much faster.
+
+ It is `available on PyPI <https://pypi.org/project/pyflakes/>`_
+ and it supports all active versions of Python: 2.7 and 3.4 to 3.7.
+
+
+
+ Installation
+ ------------
+
+ It can be installed with::
+
+ $ pip install --upgrade pyflakes
+
+
+ Useful tips:
+
+ * Be sure to install it for a version of Python which is compatible
+ with your codebase: for Python 2, ``pip2 install pyflakes`` and for
+ Python3, ``pip3 install pyflakes``.
+
+ * You can also invoke Pyflakes with ``python3 -m pyflakes .`` or
+ ``python2 -m pyflakes .`` if you have it installed for both versions.
+
+ * If you require more options and more flexibility, you could give a
+ look to Flake8_ too.
+
+
+ Design Principles
+ -----------------
+ Pyflakes makes a simple promise: it will never complain about style,
+ and it will try very, very hard to never emit false positives.
+
+ Pyflakes is also faster than Pylint_
+ or Pychecker_. This is
+ largely because Pyflakes only examines the syntax tree of each file
+ individually. As a consequence, Pyflakes is more limited in the
+ types of things it can check.
+
+ If you like Pyflakes but also want stylistic checks, you want
+ flake8_, which combines
+ Pyflakes with style checks against
+ `PEP 8`_ and adds
+ per-project configuration ability.
+
+
+ Mailing-list
+ ------------
+
+ Share your feedback and ideas: `subscribe to the mailing-list
+ <https://mail.python.org/mailman/listinfo/code-quality>`_
+
+ Contributing
+ ------------
+
+ Issues are tracked on `GitHub <https://github.com/PyCQA/pyflakes/issues>`_.
+
+ Patches may be submitted via a `GitHub pull request`_ or via the mailing list
+ if you prefer. If you are comfortable doing so, please `rebase your changes`_
+ so they may be applied to master with a fast-forward merge, and each commit is
+ a coherent unit of work with a well-written log message. If you are not
+ comfortable with this rebase workflow, the project maintainers will be happy to
+ rebase your commits for you.
+
+ All changes should include tests and pass flake8_.
+
+ .. image:: https://api.travis-ci.org/PyCQA/pyflakes.svg?branch=master
+ :target: https://travis-ci.org/PyCQA/pyflakes
+ :alt: Build status
+
+ .. _Pylint: http://www.pylint.org/
+ .. _flake8: https://pypi.org/project/flake8/
+ .. _`PEP 8`: http://legacy.python.org/dev/peps/pep-0008/
+ .. _Pychecker: http://pychecker.sourceforge.net/
+ .. _`rebase your changes`: https://git-scm.com/book/en/v2/Git-Branching-Rebasing
+ .. _`GitHub pull request`: https://github.com/PyCQA/pyflakes/pulls
+
+ Changelog
+ ---------
+
+ Please see `NEWS.rst <https://github.com/PyCQA/pyflakes/blob/master/NEWS.rst>`_.
+
+Platform: UNKNOWN
+Classifier: Development Status :: 6 - Mature
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development
+Classifier: Topic :: Utilities
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
diff --git a/third_party/python/pyflakes/README.rst b/third_party/python/pyflakes/README.rst
new file mode 100644
index 0000000000..ca802a8ac6
--- /dev/null
+++ b/third_party/python/pyflakes/README.rst
@@ -0,0 +1,89 @@
+========
+Pyflakes
+========
+
+A simple program which checks Python source files for errors.
+
+Pyflakes analyzes programs and detects various errors. It works by
+parsing the source file, not importing it, so it is safe to use on
+modules with side effects. It's also much faster.
+
+It is `available on PyPI <https://pypi.org/project/pyflakes/>`_
+and it supports all active versions of Python: 2.7 and 3.4 to 3.7.
+
+
+
+Installation
+------------
+
+It can be installed with::
+
+ $ pip install --upgrade pyflakes
+
+
+Useful tips:
+
+* Be sure to install it for a version of Python which is compatible
+ with your codebase: for Python 2, ``pip2 install pyflakes`` and for
+ Python3, ``pip3 install pyflakes``.
+
+* You can also invoke Pyflakes with ``python3 -m pyflakes .`` or
+ ``python2 -m pyflakes .`` if you have it installed for both versions.
+
+* If you require more options and more flexibility, you could give a
+ look to Flake8_ too.
+
+
+Design Principles
+-----------------
+Pyflakes makes a simple promise: it will never complain about style,
+and it will try very, very hard to never emit false positives.
+
+Pyflakes is also faster than Pylint_
+or Pychecker_. This is
+largely because Pyflakes only examines the syntax tree of each file
+individually. As a consequence, Pyflakes is more limited in the
+types of things it can check.
+
+If you like Pyflakes but also want stylistic checks, you want
+flake8_, which combines
+Pyflakes with style checks against
+`PEP 8`_ and adds
+per-project configuration ability.
+
+
+Mailing-list
+------------
+
+Share your feedback and ideas: `subscribe to the mailing-list
+<https://mail.python.org/mailman/listinfo/code-quality>`_
+
+Contributing
+------------
+
+Issues are tracked on `GitHub <https://github.com/PyCQA/pyflakes/issues>`_.
+
+Patches may be submitted via a `GitHub pull request`_ or via the mailing list
+if you prefer. If you are comfortable doing so, please `rebase your changes`_
+so they may be applied to master with a fast-forward merge, and each commit is
+a coherent unit of work with a well-written log message. If you are not
+comfortable with this rebase workflow, the project maintainers will be happy to
+rebase your commits for you.
+
+All changes should include tests and pass flake8_.
+
+.. image:: https://api.travis-ci.org/PyCQA/pyflakes.svg?branch=master
+ :target: https://travis-ci.org/PyCQA/pyflakes
+ :alt: Build status
+
+.. _Pylint: http://www.pylint.org/
+.. _flake8: https://pypi.org/project/flake8/
+.. _`PEP 8`: http://legacy.python.org/dev/peps/pep-0008/
+.. _Pychecker: http://pychecker.sourceforge.net/
+.. _`rebase your changes`: https://git-scm.com/book/en/v2/Git-Branching-Rebasing
+.. _`GitHub pull request`: https://github.com/PyCQA/pyflakes/pulls
+
+Changelog
+---------
+
+Please see `NEWS.rst <https://github.com/PyCQA/pyflakes/blob/master/NEWS.rst>`_.
diff --git a/third_party/python/pyflakes/bin/pyflakes b/third_party/python/pyflakes/bin/pyflakes
new file mode 100755
index 0000000000..54a3814344
--- /dev/null
+++ b/third_party/python/pyflakes/bin/pyflakes
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+from pyflakes.scripts.pyflakes import main
+main()
diff --git a/third_party/python/pyflakes/pyflakes/__init__.py b/third_party/python/pyflakes/pyflakes/__init__.py
new file mode 100644
index 0000000000..04188a16d9
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/__init__.py
@@ -0,0 +1 @@
+__version__ = '2.2.0'
diff --git a/third_party/python/pyflakes/pyflakes/__main__.py b/third_party/python/pyflakes/pyflakes/__main__.py
new file mode 100644
index 0000000000..68cd9efbc5
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/__main__.py
@@ -0,0 +1,5 @@
+from pyflakes.api import main
+
+# python -m pyflakes
+if __name__ == '__main__':
+ main(prog='pyflakes')
diff --git a/third_party/python/pyflakes/pyflakes/api.py b/third_party/python/pyflakes/pyflakes/api.py
new file mode 100644
index 0000000000..ec3ef5a736
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/api.py
@@ -0,0 +1,213 @@
+"""
+API for the command-line I{pyflakes} tool.
+"""
+from __future__ import with_statement
+
+import ast
+import os
+import platform
+import re
+import sys
+
+from pyflakes import checker, __version__
+from pyflakes import reporter as modReporter
+
+__all__ = ['check', 'checkPath', 'checkRecursive', 'iterSourceCode', 'main']
+
+PYTHON_SHEBANG_REGEX = re.compile(br'^#!.*\bpython([23](\.\d+)?|w)?[dmu]?\s')
+
+
+def check(codeString, filename, reporter=None):
+ """
+ Check the Python source given by C{codeString} for flakes.
+
+ @param codeString: The Python source to check.
+ @type codeString: C{str}
+
+ @param filename: The name of the file the source came from, used to report
+ errors.
+ @type filename: C{str}
+
+ @param reporter: A L{Reporter} instance, where errors and warnings will be
+ reported.
+
+ @return: The number of warnings emitted.
+ @rtype: C{int}
+ """
+ if reporter is None:
+ reporter = modReporter._makeDefaultReporter()
+ # First, compile into an AST and handle syntax errors.
+ try:
+ tree = ast.parse(codeString, filename=filename)
+ except SyntaxError:
+ value = sys.exc_info()[1]
+ msg = value.args[0]
+
+ (lineno, offset, text) = value.lineno, value.offset, value.text
+
+ if checker.PYPY:
+ if text is None:
+ lines = codeString.splitlines()
+ if len(lines) >= lineno:
+ text = lines[lineno - 1]
+ if sys.version_info >= (3, ) and isinstance(text, bytes):
+ try:
+ text = text.decode('ascii')
+ except UnicodeDecodeError:
+ text = None
+ offset -= 1
+
+ # If there's an encoding problem with the file, the text is None.
+ if text is None:
+ # Avoid using msg, since for the only known case, it contains a
+ # bogus message that claims the encoding the file declared was
+ # unknown.
+ reporter.unexpectedError(filename, 'problem decoding source')
+ else:
+ reporter.syntaxError(filename, msg, lineno, offset, text)
+ return 1
+ except Exception:
+ reporter.unexpectedError(filename, 'problem decoding source')
+ return 1
+ # Okay, it's syntactically valid. Now check it.
+ file_tokens = checker.make_tokens(codeString)
+ w = checker.Checker(tree, file_tokens=file_tokens, filename=filename)
+ w.messages.sort(key=lambda m: m.lineno)
+ for warning in w.messages:
+ reporter.flake(warning)
+ return len(w.messages)
+
+
+def checkPath(filename, reporter=None):
+ """
+ Check the given path, printing out any warnings detected.
+
+ @param reporter: A L{Reporter} instance, where errors and warnings will be
+ reported.
+
+ @return: the number of warnings printed
+ """
+ if reporter is None:
+ reporter = modReporter._makeDefaultReporter()
+ try:
+ with open(filename, 'rb') as f:
+ codestr = f.read()
+ except IOError:
+ msg = sys.exc_info()[1]
+ reporter.unexpectedError(filename, msg.args[1])
+ return 1
+ return check(codestr, filename, reporter)
+
+
+def isPythonFile(filename):
+ """Return True if filename points to a Python file."""
+ if filename.endswith('.py'):
+ return True
+
+ # Avoid obvious Emacs backup files
+ if filename.endswith("~"):
+ return False
+
+ max_bytes = 128
+
+ try:
+ with open(filename, 'rb') as f:
+ text = f.read(max_bytes)
+ if not text:
+ return False
+ except IOError:
+ return False
+
+ return PYTHON_SHEBANG_REGEX.match(text)
+
+
+def iterSourceCode(paths):
+ """
+ Iterate over all Python source files in C{paths}.
+
+ @param paths: A list of paths. Directories will be recursed into and
+ any .py files found will be yielded. Any non-directories will be
+ yielded as-is.
+ """
+ for path in paths:
+ if os.path.isdir(path):
+ for dirpath, dirnames, filenames in os.walk(path):
+ for filename in filenames:
+ full_path = os.path.join(dirpath, filename)
+ if isPythonFile(full_path):
+ yield full_path
+ else:
+ yield path
+
+
+def checkRecursive(paths, reporter):
+ """
+ Recursively check all source files in C{paths}.
+
+ @param paths: A list of paths to Python source files and directories
+ containing Python source files.
+ @param reporter: A L{Reporter} where all of the warnings and errors
+ will be reported to.
+ @return: The number of warnings found.
+ """
+ warnings = 0
+ for sourcePath in iterSourceCode(paths):
+ warnings += checkPath(sourcePath, reporter)
+ return warnings
+
+
+def _exitOnSignal(sigName, message):
+ """Handles a signal with sys.exit.
+
+ Some of these signals (SIGPIPE, for example) don't exist or are invalid on
+ Windows. So, ignore errors that might arise.
+ """
+ import signal
+
+ try:
+ sigNumber = getattr(signal, sigName)
+ except AttributeError:
+ # the signal constants defined in the signal module are defined by
+ # whether the C library supports them or not. So, SIGPIPE might not
+ # even be defined.
+ return
+
+ def handler(sig, f):
+ sys.exit(message)
+
+ try:
+ signal.signal(sigNumber, handler)
+ except ValueError:
+ # It's also possible the signal is defined, but then it's invalid. In
+ # this case, signal.signal raises ValueError.
+ pass
+
+
+def _get_version():
+ """
+ Retrieve and format package version along with python version & OS used
+ """
+ return ('%s Python %s on %s' %
+ (__version__, platform.python_version(), platform.system()))
+
+
+def main(prog=None, args=None):
+ """Entry point for the script "pyflakes"."""
+ import argparse
+
+ # Handle "Keyboard Interrupt" and "Broken pipe" gracefully
+ _exitOnSignal('SIGINT', '... stopped')
+ _exitOnSignal('SIGPIPE', 1)
+
+ parser = argparse.ArgumentParser(prog=prog,
+ description='Check Python source files for errors')
+ parser.add_argument('-V', '--version', action='version', version=_get_version())
+ parser.add_argument('path', nargs='*',
+ help='Path(s) of Python file(s) to check. STDIN if not given.')
+ args = parser.parse_args(args=args).path
+ reporter = modReporter._makeDefaultReporter()
+ if args:
+ warnings = checkRecursive(args, reporter)
+ else:
+ warnings = check(sys.stdin.read(), '<stdin>', reporter)
+ raise SystemExit(warnings > 0)
diff --git a/third_party/python/pyflakes/pyflakes/checker.py b/third_party/python/pyflakes/pyflakes/checker.py
new file mode 100644
index 0000000000..5af956cce2
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/checker.py
@@ -0,0 +1,2249 @@
+"""
+Main module.
+
+Implement the central Checker class.
+Also, it models the Bindings and Scopes.
+"""
+import __future__
+import ast
+import bisect
+import collections
+import contextlib
+import doctest
+import functools
+import os
+import re
+import string
+import sys
+import tokenize
+
+from pyflakes import messages
+
+PY2 = sys.version_info < (3, 0)
+PY35_PLUS = sys.version_info >= (3, 5) # Python 3.5 and above
+PY36_PLUS = sys.version_info >= (3, 6) # Python 3.6 and above
+PY38_PLUS = sys.version_info >= (3, 8)
+try:
+ sys.pypy_version_info
+ PYPY = True
+except AttributeError:
+ PYPY = False
+
+builtin_vars = dir(__import__('__builtin__' if PY2 else 'builtins'))
+
+parse_format_string = string.Formatter().parse
+
+if PY2:
+ tokenize_tokenize = tokenize.generate_tokens
+else:
+ tokenize_tokenize = tokenize.tokenize
+
+if PY2:
+ def getNodeType(node_class):
+ # workaround str.upper() which is locale-dependent
+ return str(unicode(node_class.__name__).upper())
+
+ def get_raise_argument(node):
+ return node.type
+
+else:
+ def getNodeType(node_class):
+ return node_class.__name__.upper()
+
+ def get_raise_argument(node):
+ return node.exc
+
+ # Silence `pyflakes` from reporting `undefined name 'unicode'` in Python 3.
+ unicode = str
+
+# Python >= 3.3 uses ast.Try instead of (ast.TryExcept + ast.TryFinally)
+if PY2:
+ def getAlternatives(n):
+ if isinstance(n, (ast.If, ast.TryFinally)):
+ return [n.body]
+ if isinstance(n, ast.TryExcept):
+ return [n.body + n.orelse] + [[hdl] for hdl in n.handlers]
+else:
+ def getAlternatives(n):
+ if isinstance(n, ast.If):
+ return [n.body]
+ if isinstance(n, ast.Try):
+ return [n.body + n.orelse] + [[hdl] for hdl in n.handlers]
+
+if PY35_PLUS:
+ FOR_TYPES = (ast.For, ast.AsyncFor)
+ LOOP_TYPES = (ast.While, ast.For, ast.AsyncFor)
+ FUNCTION_TYPES = (ast.FunctionDef, ast.AsyncFunctionDef)
+else:
+ FOR_TYPES = (ast.For,)
+ LOOP_TYPES = (ast.While, ast.For)
+ FUNCTION_TYPES = (ast.FunctionDef,)
+
+
+if PY38_PLUS:
+ def _is_singleton(node): # type: (ast.AST) -> bool
+ return (
+ isinstance(node, ast.Constant) and
+ isinstance(node.value, (bool, type(Ellipsis), type(None)))
+ )
+elif not PY2:
+ def _is_singleton(node): # type: (ast.AST) -> bool
+ return isinstance(node, (ast.NameConstant, ast.Ellipsis))
+else:
+ def _is_singleton(node): # type: (ast.AST) -> bool
+ return (
+ isinstance(node, ast.Name) and
+ node.id in {'True', 'False', 'Ellipsis', 'None'}
+ )
+
+
+def _is_tuple_constant(node): # type: (ast.AST) -> bool
+ return (
+ isinstance(node, ast.Tuple) and
+ all(_is_constant(elt) for elt in node.elts)
+ )
+
+
+if PY38_PLUS:
+ def _is_constant(node):
+ return isinstance(node, ast.Constant) or _is_tuple_constant(node)
+else:
+ _const_tps = (ast.Str, ast.Num)
+ if not PY2:
+ _const_tps += (ast.Bytes,)
+
+ def _is_constant(node):
+ return (
+ isinstance(node, _const_tps) or
+ _is_singleton(node) or
+ _is_tuple_constant(node)
+ )
+
+
+def _is_const_non_singleton(node): # type: (ast.AST) -> bool
+ return _is_constant(node) and not _is_singleton(node)
+
+
+# https://github.com/python/typed_ast/blob/1.4.0/ast27/Parser/tokenizer.c#L102-L104
+TYPE_COMMENT_RE = re.compile(r'^#\s*type:\s*')
+# https://github.com/python/typed_ast/blob/1.4.0/ast27/Parser/tokenizer.c#L1408-L1413
+ASCII_NON_ALNUM = ''.join([chr(i) for i in range(128) if not chr(i).isalnum()])
+TYPE_IGNORE_RE = re.compile(
+ TYPE_COMMENT_RE.pattern + r'ignore([{}]|$)'.format(ASCII_NON_ALNUM))
+# https://github.com/python/typed_ast/blob/1.4.0/ast27/Grammar/Grammar#L147
+TYPE_FUNC_RE = re.compile(r'^(\(.*?\))\s*->\s*(.*)$')
+
+
+MAPPING_KEY_RE = re.compile(r'\(([^()]*)\)')
+CONVERSION_FLAG_RE = re.compile('[#0+ -]*')
+WIDTH_RE = re.compile(r'(?:\*|\d*)')
+PRECISION_RE = re.compile(r'(?:\.(?:\*|\d*))?')
+LENGTH_RE = re.compile('[hlL]?')
+# https://docs.python.org/3/library/stdtypes.html#old-string-formatting
+VALID_CONVERSIONS = frozenset('diouxXeEfFgGcrsa%')
+
+
+def _must_match(regex, string, pos):
+ # type: (Pattern[str], str, int) -> Match[str]
+ match = regex.match(string, pos)
+ assert match is not None
+ return match
+
+
+def parse_percent_format(s): # type: (str) -> Tuple[PercentFormat, ...]
+ """Parses the string component of a `'...' % ...` format call
+
+ Copied from https://github.com/asottile/pyupgrade at v1.20.1
+ """
+
+ def _parse_inner():
+ # type: () -> Generator[PercentFormat, None, None]
+ string_start = 0
+ string_end = 0
+ in_fmt = False
+
+ i = 0
+ while i < len(s):
+ if not in_fmt:
+ try:
+ i = s.index('%', i)
+ except ValueError: # no more % fields!
+ yield s[string_start:], None
+ return
+ else:
+ string_end = i
+ i += 1
+ in_fmt = True
+ else:
+ key_match = MAPPING_KEY_RE.match(s, i)
+ if key_match:
+ key = key_match.group(1) # type: Optional[str]
+ i = key_match.end()
+ else:
+ key = None
+
+ conversion_flag_match = _must_match(CONVERSION_FLAG_RE, s, i)
+ conversion_flag = conversion_flag_match.group() or None
+ i = conversion_flag_match.end()
+
+ width_match = _must_match(WIDTH_RE, s, i)
+ width = width_match.group() or None
+ i = width_match.end()
+
+ precision_match = _must_match(PRECISION_RE, s, i)
+ precision = precision_match.group() or None
+ i = precision_match.end()
+
+ # length modifier is ignored
+ i = _must_match(LENGTH_RE, s, i).end()
+
+ try:
+ conversion = s[i]
+ except IndexError:
+ raise ValueError('end-of-string while parsing format')
+ i += 1
+
+ fmt = (key, conversion_flag, width, precision, conversion)
+ yield s[string_start:string_end], fmt
+
+ in_fmt = False
+ string_start = i
+
+ if in_fmt:
+ raise ValueError('end-of-string while parsing format')
+
+ return tuple(_parse_inner())
+
+
+class _FieldsOrder(dict):
+ """Fix order of AST node fields."""
+
+ def _get_fields(self, node_class):
+ # handle iter before target, and generators before element
+ fields = node_class._fields
+ if 'iter' in fields:
+ key_first = 'iter'.find
+ elif 'generators' in fields:
+ key_first = 'generators'.find
+ else:
+ key_first = 'value'.find
+ return tuple(sorted(fields, key=key_first, reverse=True))
+
+ def __missing__(self, node_class):
+ self[node_class] = fields = self._get_fields(node_class)
+ return fields
+
+
+def counter(items):
+ """
+ Simplest required implementation of collections.Counter. Required as 2.6
+ does not have Counter in collections.
+ """
+ results = {}
+ for item in items:
+ results[item] = results.get(item, 0) + 1
+ return results
+
+
+def iter_child_nodes(node, omit=None, _fields_order=_FieldsOrder()):
+ """
+ Yield all direct child nodes of *node*, that is, all fields that
+ are nodes and all items of fields that are lists of nodes.
+
+ :param node: AST node to be iterated upon
+ :param omit: String or tuple of strings denoting the
+ attributes of the node to be omitted from
+ further parsing
+ :param _fields_order: Order of AST node fields
+ """
+ for name in _fields_order[node.__class__]:
+ if omit and name in omit:
+ continue
+ field = getattr(node, name, None)
+ if isinstance(field, ast.AST):
+ yield field
+ elif isinstance(field, list):
+ for item in field:
+ yield item
+
+
+def convert_to_value(item):
+ if isinstance(item, ast.Str):
+ return item.s
+ elif hasattr(ast, 'Bytes') and isinstance(item, ast.Bytes):
+ return item.s
+ elif isinstance(item, ast.Tuple):
+ return tuple(convert_to_value(i) for i in item.elts)
+ elif isinstance(item, ast.Num):
+ return item.n
+ elif isinstance(item, ast.Name):
+ result = VariableKey(item=item)
+ constants_lookup = {
+ 'True': True,
+ 'False': False,
+ 'None': None,
+ }
+ return constants_lookup.get(
+ result.name,
+ result,
+ )
+ elif (not PY2) and isinstance(item, ast.NameConstant):
+ # None, True, False are nameconstants in python3, but names in 2
+ return item.value
+ else:
+ return UnhandledKeyType()
+
+
+def is_notimplemented_name_node(node):
+ return isinstance(node, ast.Name) and getNodeName(node) == 'NotImplemented'
+
+
+class Binding(object):
+ """
+ Represents the binding of a value to a name.
+
+ The checker uses this to keep track of which names have been bound and
+ which names have not. See L{Assignment} for a special type of binding that
+ is checked with stricter rules.
+
+ @ivar used: pair of (L{Scope}, node) indicating the scope and
+ the node that this binding was last used.
+ """
+
+ def __init__(self, name, source):
+ self.name = name
+ self.source = source
+ self.used = False
+
+ def __str__(self):
+ return self.name
+
+ def __repr__(self):
+ return '<%s object %r from line %r at 0x%x>' % (self.__class__.__name__,
+ self.name,
+ self.source.lineno,
+ id(self))
+
+ def redefines(self, other):
+ return isinstance(other, Definition) and self.name == other.name
+
+
+class Definition(Binding):
+ """
+ A binding that defines a function or a class.
+ """
+
+
+class Builtin(Definition):
+ """A definition created for all Python builtins."""
+
+ def __init__(self, name):
+ super(Builtin, self).__init__(name, None)
+
+ def __repr__(self):
+ return '<%s object %r at 0x%x>' % (self.__class__.__name__,
+ self.name,
+ id(self))
+
+
+class UnhandledKeyType(object):
+ """
+ A dictionary key of a type that we cannot or do not check for duplicates.
+ """
+
+
+class VariableKey(object):
+ """
+ A dictionary key which is a variable.
+
+ @ivar item: The variable AST object.
+ """
+ def __init__(self, item):
+ self.name = item.id
+
+ def __eq__(self, compare):
+ return (
+ compare.__class__ == self.__class__ and
+ compare.name == self.name
+ )
+
+ def __hash__(self):
+ return hash(self.name)
+
+
+class Importation(Definition):
+ """
+ A binding created by an import statement.
+
+ @ivar fullName: The complete name given to the import statement,
+ possibly including multiple dotted components.
+ @type fullName: C{str}
+ """
+
+ def __init__(self, name, source, full_name=None):
+ self.fullName = full_name or name
+ self.redefined = []
+ super(Importation, self).__init__(name, source)
+
+ def redefines(self, other):
+ if isinstance(other, SubmoduleImportation):
+ # See note in SubmoduleImportation about RedefinedWhileUnused
+ return self.fullName == other.fullName
+ return isinstance(other, Definition) and self.name == other.name
+
+ def _has_alias(self):
+ """Return whether importation needs an as clause."""
+ return not self.fullName.split('.')[-1] == self.name
+
+ @property
+ def source_statement(self):
+ """Generate a source statement equivalent to the import."""
+ if self._has_alias():
+ return 'import %s as %s' % (self.fullName, self.name)
+ else:
+ return 'import %s' % self.fullName
+
+ def __str__(self):
+ """Return import full name with alias."""
+ if self._has_alias():
+ return self.fullName + ' as ' + self.name
+ else:
+ return self.fullName
+
+
+class SubmoduleImportation(Importation):
+ """
+ A binding created by a submodule import statement.
+
+ A submodule import is a special case where the root module is implicitly
+ imported, without an 'as' clause, and the submodule is also imported.
+ Python does not restrict which attributes of the root module may be used.
+
+ This class is only used when the submodule import is without an 'as' clause.
+
+ pyflakes handles this case by registering the root module name in the scope,
+ allowing any attribute of the root module to be accessed.
+
+ RedefinedWhileUnused is suppressed in `redefines` unless the submodule
+ name is also the same, to avoid false positives.
+ """
+
+ def __init__(self, name, source):
+ # A dot should only appear in the name when it is a submodule import
+ assert '.' in name and (not source or isinstance(source, ast.Import))
+ package_name = name.split('.')[0]
+ super(SubmoduleImportation, self).__init__(package_name, source)
+ self.fullName = name
+
+ def redefines(self, other):
+ if isinstance(other, Importation):
+ return self.fullName == other.fullName
+ return super(SubmoduleImportation, self).redefines(other)
+
+ def __str__(self):
+ return self.fullName
+
+ @property
+ def source_statement(self):
+ return 'import ' + self.fullName
+
+
+class ImportationFrom(Importation):
+
+ def __init__(self, name, source, module, real_name=None):
+ self.module = module
+ self.real_name = real_name or name
+
+ if module.endswith('.'):
+ full_name = module + self.real_name
+ else:
+ full_name = module + '.' + self.real_name
+
+ super(ImportationFrom, self).__init__(name, source, full_name)
+
+ def __str__(self):
+ """Return import full name with alias."""
+ if self.real_name != self.name:
+ return self.fullName + ' as ' + self.name
+ else:
+ return self.fullName
+
+ @property
+ def source_statement(self):
+ if self.real_name != self.name:
+ return 'from %s import %s as %s' % (self.module,
+ self.real_name,
+ self.name)
+ else:
+ return 'from %s import %s' % (self.module, self.name)
+
+
+class StarImportation(Importation):
+ """A binding created by a 'from x import *' statement."""
+
+ def __init__(self, name, source):
+ super(StarImportation, self).__init__('*', source)
+ # Each star importation needs a unique name, and
+ # may not be the module name otherwise it will be deemed imported
+ self.name = name + '.*'
+ self.fullName = name
+
+ @property
+ def source_statement(self):
+ return 'from ' + self.fullName + ' import *'
+
+ def __str__(self):
+ # When the module ends with a ., avoid the ambiguous '..*'
+ if self.fullName.endswith('.'):
+ return self.source_statement
+ else:
+ return self.name
+
+
+class FutureImportation(ImportationFrom):
+ """
+ A binding created by a from `__future__` import statement.
+
+ `__future__` imports are implicitly used.
+ """
+
+ def __init__(self, name, source, scope):
+ super(FutureImportation, self).__init__(name, source, '__future__')
+ self.used = (scope, source)
+
+
+class Argument(Binding):
+ """
+ Represents binding a name as an argument.
+ """
+
+
+class Assignment(Binding):
+ """
+ Represents binding a name with an explicit assignment.
+
+ The checker will raise warnings for any Assignment that isn't used. Also,
+ the checker does not consider assignments in tuple/list unpacking to be
+ Assignments, rather it treats them as simple Bindings.
+ """
+
+
+class FunctionDefinition(Definition):
+ pass
+
+
+class ClassDefinition(Definition):
+ pass
+
+
+class ExportBinding(Binding):
+ """
+ A binding created by an C{__all__} assignment. If the names in the list
+ can be determined statically, they will be treated as names for export and
+ additional checking applied to them.
+
+ The only recognized C{__all__} assignment via list concatenation is in the
+ following format:
+
+ __all__ = ['a'] + ['b'] + ['c']
+
+ Names which are imported and not otherwise used but appear in the value of
+ C{__all__} will not have an unused import warning reported for them.
+ """
+
+ def __init__(self, name, source, scope):
+ if '__all__' in scope and isinstance(source, ast.AugAssign):
+ self.names = list(scope['__all__'].names)
+ else:
+ self.names = []
+
+ def _add_to_names(container):
+ for node in container.elts:
+ if isinstance(node, ast.Str):
+ self.names.append(node.s)
+
+ if isinstance(source.value, (ast.List, ast.Tuple)):
+ _add_to_names(source.value)
+ # If concatenating lists
+ elif isinstance(source.value, ast.BinOp):
+ currentValue = source.value
+ while isinstance(currentValue.right, ast.List):
+ left = currentValue.left
+ right = currentValue.right
+ _add_to_names(right)
+ # If more lists are being added
+ if isinstance(left, ast.BinOp):
+ currentValue = left
+ # If just two lists are being added
+ elif isinstance(left, ast.List):
+ _add_to_names(left)
+ # All lists accounted for - done
+ break
+ # If not list concatenation
+ else:
+ break
+ super(ExportBinding, self).__init__(name, source)
+
+
+class Scope(dict):
+ importStarred = False # set to True when import * is found
+
+ def __repr__(self):
+ scope_cls = self.__class__.__name__
+ return '<%s at 0x%x %s>' % (scope_cls, id(self), dict.__repr__(self))
+
+
+class ClassScope(Scope):
+ pass
+
+
+class FunctionScope(Scope):
+ """
+ I represent a name scope for a function.
+
+ @ivar globals: Names declared 'global' in this function.
+ """
+ usesLocals = False
+ alwaysUsed = {'__tracebackhide__', '__traceback_info__',
+ '__traceback_supplement__'}
+
+ def __init__(self):
+ super(FunctionScope, self).__init__()
+ # Simplify: manage the special locals as globals
+ self.globals = self.alwaysUsed.copy()
+ self.returnValue = None # First non-empty return
+ self.isGenerator = False # Detect a generator
+
+ def unusedAssignments(self):
+ """
+ Return a generator for the assignments which have not been used.
+ """
+ for name, binding in self.items():
+ if (not binding.used and
+ name != '_' and # see issue #202
+ name not in self.globals and
+ not self.usesLocals and
+ isinstance(binding, Assignment)):
+ yield name, binding
+
+
+class GeneratorScope(Scope):
+ pass
+
+
+class ModuleScope(Scope):
+ """Scope for a module."""
+ _futures_allowed = True
+ _annotations_future_enabled = False
+
+
+class DoctestScope(ModuleScope):
+ """Scope for a doctest."""
+
+
+class DummyNode(object):
+ """Used in place of an `ast.AST` to set error message positions"""
+ def __init__(self, lineno, col_offset):
+ self.lineno = lineno
+ self.col_offset = col_offset
+
+
+# Globally defined names which are not attributes of the builtins module, or
+# are only present on some platforms.
+_MAGIC_GLOBALS = ['__file__', '__builtins__', 'WindowsError']
+# module scope annotation will store in `__annotations__`, see also PEP 526.
+if PY36_PLUS:
+ _MAGIC_GLOBALS.append('__annotations__')
+
+
+def getNodeName(node):
+ # Returns node.id, or node.name, or None
+ if hasattr(node, 'id'): # One of the many nodes with an id
+ return node.id
+ if hasattr(node, 'name'): # an ExceptHandler node
+ return node.name
+
+
+TYPING_MODULES = frozenset(('typing', 'typing_extensions'))
+
+
+def _is_typing_helper(node, is_name_match_fn, scope_stack):
+ """
+ Internal helper to determine whether or not something is a member of a
+ typing module. This is used as part of working out whether we are within a
+ type annotation context.
+
+ Note: you probably don't want to use this function directly. Instead see the
+ utils below which wrap it (`_is_typing` and `_is_any_typing_member`).
+ """
+
+ def _bare_name_is_attr(name):
+ for scope in reversed(scope_stack):
+ if name in scope:
+ return (
+ isinstance(scope[name], ImportationFrom) and
+ scope[name].module in TYPING_MODULES and
+ is_name_match_fn(scope[name].real_name)
+ )
+
+ return False
+
+ return (
+ (
+ isinstance(node, ast.Name) and
+ _bare_name_is_attr(node.id)
+ ) or (
+ isinstance(node, ast.Attribute) and
+ isinstance(node.value, ast.Name) and
+ node.value.id in TYPING_MODULES and
+ is_name_match_fn(node.attr)
+ )
+ )
+
+
+def _is_typing(node, typing_attr, scope_stack):
+ """
+ Determine whether `node` represents the member of a typing module specified
+ by `typing_attr`.
+
+ This is used as part of working out whether we are within a type annotation
+ context.
+ """
+ return _is_typing_helper(node, lambda x: x == typing_attr, scope_stack)
+
+
+def _is_any_typing_member(node, scope_stack):
+ """
+ Determine whether `node` represents any member of a typing module.
+
+ This is used as part of working out whether we are within a type annotation
+ context.
+ """
+ return _is_typing_helper(node, lambda x: True, scope_stack)
+
+
+def is_typing_overload(value, scope_stack):
+ return (
+ isinstance(value.source, FUNCTION_TYPES) and
+ any(
+ _is_typing(dec, 'overload', scope_stack)
+ for dec in value.source.decorator_list
+ )
+ )
+
+
+def in_annotation(func):
+ @functools.wraps(func)
+ def in_annotation_func(self, *args, **kwargs):
+ with self._enter_annotation():
+ return func(self, *args, **kwargs)
+ return in_annotation_func
+
+
+def make_tokens(code):
+ # PY3: tokenize.tokenize requires readline of bytes
+ if not isinstance(code, bytes):
+ code = code.encode('UTF-8')
+ lines = iter(code.splitlines(True))
+ # next(lines, b'') is to prevent an error in pypy3
+ return tuple(tokenize_tokenize(lambda: next(lines, b'')))
+
+
+class _TypeableVisitor(ast.NodeVisitor):
+ """Collect the line number and nodes which are deemed typeable by
+ PEP 484
+
+ https://www.python.org/dev/peps/pep-0484/#type-comments
+ """
+ def __init__(self):
+ self.typeable_lines = [] # type: List[int]
+ self.typeable_nodes = {} # type: Dict[int, ast.AST]
+
+ def _typeable(self, node):
+ # if there is more than one typeable thing on a line last one wins
+ self.typeable_lines.append(node.lineno)
+ self.typeable_nodes[node.lineno] = node
+
+ self.generic_visit(node)
+
+ visit_Assign = visit_For = visit_FunctionDef = visit_With = _typeable
+ visit_AsyncFor = visit_AsyncFunctionDef = visit_AsyncWith = _typeable
+
+
+def _collect_type_comments(tree, tokens):
+ visitor = _TypeableVisitor()
+ visitor.visit(tree)
+
+ type_comments = collections.defaultdict(list)
+ for tp, text, start, _, _ in tokens:
+ if (
+ tp != tokenize.COMMENT or # skip non comments
+ not TYPE_COMMENT_RE.match(text) or # skip non-type comments
+ TYPE_IGNORE_RE.match(text) # skip ignores
+ ):
+ continue
+
+ # search for the typeable node at or before the line number of the
+ # type comment.
+ # if the bisection insertion point is before any nodes this is an
+ # invalid type comment which is ignored.
+ lineno, _ = start
+ idx = bisect.bisect_right(visitor.typeable_lines, lineno)
+ if idx == 0:
+ continue
+ node = visitor.typeable_nodes[visitor.typeable_lines[idx - 1]]
+ type_comments[node].append((start, text))
+
+ return type_comments
+
+
+class Checker(object):
+ """
+ I check the cleanliness and sanity of Python code.
+
+ @ivar _deferredFunctions: Tracking list used by L{deferFunction}. Elements
+ of the list are two-tuples. The first element is the callable passed
+ to L{deferFunction}. The second element is a copy of the scope stack
+ at the time L{deferFunction} was called.
+
+ @ivar _deferredAssignments: Similar to C{_deferredFunctions}, but for
+ callables which are deferred assignment checks.
+ """
+
+ _ast_node_scope = {
+ ast.Module: ModuleScope,
+ ast.ClassDef: ClassScope,
+ ast.FunctionDef: FunctionScope,
+ ast.Lambda: FunctionScope,
+ ast.ListComp: GeneratorScope,
+ ast.SetComp: GeneratorScope,
+ ast.GeneratorExp: GeneratorScope,
+ ast.DictComp: GeneratorScope,
+ }
+ if PY35_PLUS:
+ _ast_node_scope[ast.AsyncFunctionDef] = FunctionScope
+
+ nodeDepth = 0
+ offset = None
+ traceTree = False
+ _in_annotation = False
+ _in_typing_literal = False
+ _in_deferred = False
+
+ builtIns = set(builtin_vars).union(_MAGIC_GLOBALS)
+ _customBuiltIns = os.environ.get('PYFLAKES_BUILTINS')
+ if _customBuiltIns:
+ builtIns.update(_customBuiltIns.split(','))
+ del _customBuiltIns
+
+ # TODO: file_tokens= is required to perform checks on type comments,
+ # eventually make this a required positional argument. For now it
+ # is defaulted to `()` for api compatibility.
+ def __init__(self, tree, filename='(none)', builtins=None,
+ withDoctest='PYFLAKES_DOCTEST' in os.environ, file_tokens=()):
+ self._nodeHandlers = {}
+ self._deferredFunctions = []
+ self._deferredAssignments = []
+ self.deadScopes = []
+ self.messages = []
+ self.filename = filename
+ if builtins:
+ self.builtIns = self.builtIns.union(builtins)
+ self.withDoctest = withDoctest
+ try:
+ self.scopeStack = [Checker._ast_node_scope[type(tree)]()]
+ except KeyError:
+ raise RuntimeError('No scope implemented for the node %r' % tree)
+ self.exceptHandlers = [()]
+ self.root = tree
+ self._type_comments = _collect_type_comments(tree, file_tokens)
+ for builtin in self.builtIns:
+ self.addBinding(None, Builtin(builtin))
+ self.handleChildren(tree)
+ self._in_deferred = True
+ self.runDeferred(self._deferredFunctions)
+ # Set _deferredFunctions to None so that deferFunction will fail
+ # noisily if called after we've run through the deferred functions.
+ self._deferredFunctions = None
+ self.runDeferred(self._deferredAssignments)
+ # Set _deferredAssignments to None so that deferAssignment will fail
+ # noisily if called after we've run through the deferred assignments.
+ self._deferredAssignments = None
+ del self.scopeStack[1:]
+ self.popScope()
+ self.checkDeadScopes()
+
+ def deferFunction(self, callable):
+ """
+ Schedule a function handler to be called just before completion.
+
+ This is used for handling function bodies, which must be deferred
+ because code later in the file might modify the global scope. When
+ `callable` is called, the scope at the time this is called will be
+ restored, however it will contain any new bindings added to it.
+ """
+ self._deferredFunctions.append((callable, self.scopeStack[:], self.offset))
+
+ def deferAssignment(self, callable):
+ """
+ Schedule an assignment handler to be called just after deferred
+ function handlers.
+ """
+ self._deferredAssignments.append((callable, self.scopeStack[:], self.offset))
+
+ def runDeferred(self, deferred):
+ """
+ Run the callables in C{deferred} using their associated scope stack.
+ """
+ for handler, scope, offset in deferred:
+ self.scopeStack = scope
+ self.offset = offset
+ handler()
+
+ def _in_doctest(self):
+ return (len(self.scopeStack) >= 2 and
+ isinstance(self.scopeStack[1], DoctestScope))
+
+ @property
+ def futuresAllowed(self):
+ if not all(isinstance(scope, ModuleScope)
+ for scope in self.scopeStack):
+ return False
+
+ return self.scope._futures_allowed
+
+ @futuresAllowed.setter
+ def futuresAllowed(self, value):
+ assert value is False
+ if isinstance(self.scope, ModuleScope):
+ self.scope._futures_allowed = False
+
+ @property
+ def annotationsFutureEnabled(self):
+ scope = self.scopeStack[0]
+ if not isinstance(scope, ModuleScope):
+ return False
+ return scope._annotations_future_enabled
+
+ @annotationsFutureEnabled.setter
+ def annotationsFutureEnabled(self, value):
+ assert value is True
+ assert isinstance(self.scope, ModuleScope)
+ self.scope._annotations_future_enabled = True
+
+ @property
+ def scope(self):
+ return self.scopeStack[-1]
+
+ def popScope(self):
+ self.deadScopes.append(self.scopeStack.pop())
+
+ def checkDeadScopes(self):
+ """
+ Look at scopes which have been fully examined and report names in them
+ which were imported but unused.
+ """
+ for scope in self.deadScopes:
+ # imports in classes are public members
+ if isinstance(scope, ClassScope):
+ continue
+
+ all_binding = scope.get('__all__')
+ if all_binding and not isinstance(all_binding, ExportBinding):
+ all_binding = None
+
+ if all_binding:
+ all_names = set(all_binding.names)
+ undefined = all_names.difference(scope)
+ else:
+ all_names = undefined = []
+
+ if undefined:
+ if not scope.importStarred and \
+ os.path.basename(self.filename) != '__init__.py':
+ # Look for possible mistakes in the export list
+ for name in undefined:
+ self.report(messages.UndefinedExport,
+ scope['__all__'].source, name)
+
+ # mark all import '*' as used by the undefined in __all__
+ if scope.importStarred:
+ from_list = []
+ for binding in scope.values():
+ if isinstance(binding, StarImportation):
+ binding.used = all_binding
+ from_list.append(binding.fullName)
+ # report * usage, with a list of possible sources
+ from_list = ', '.join(sorted(from_list))
+ for name in undefined:
+ self.report(messages.ImportStarUsage,
+ scope['__all__'].source, name, from_list)
+
+ # Look for imported names that aren't used.
+ for value in scope.values():
+ if isinstance(value, Importation):
+ used = value.used or value.name in all_names
+ if not used:
+ messg = messages.UnusedImport
+ self.report(messg, value.source, str(value))
+ for node in value.redefined:
+ if isinstance(self.getParent(node), FOR_TYPES):
+ messg = messages.ImportShadowedByLoopVar
+ elif used:
+ continue
+ else:
+ messg = messages.RedefinedWhileUnused
+ self.report(messg, node, value.name, value.source)
+
+ def pushScope(self, scopeClass=FunctionScope):
+ self.scopeStack.append(scopeClass())
+
+ def report(self, messageClass, *args, **kwargs):
+ self.messages.append(messageClass(self.filename, *args, **kwargs))
+
+ def getParent(self, node):
+ # Lookup the first parent which is not Tuple, List or Starred
+ while True:
+ node = node._pyflakes_parent
+ if not hasattr(node, 'elts') and not hasattr(node, 'ctx'):
+ return node
+
+ def getCommonAncestor(self, lnode, rnode, stop):
+ if (
+ stop in (lnode, rnode) or
+ not (
+ hasattr(lnode, '_pyflakes_parent') and
+ hasattr(rnode, '_pyflakes_parent')
+ )
+ ):
+ return None
+ if lnode is rnode:
+ return lnode
+
+ if (lnode._pyflakes_depth > rnode._pyflakes_depth):
+ return self.getCommonAncestor(lnode._pyflakes_parent, rnode, stop)
+ if (lnode._pyflakes_depth < rnode._pyflakes_depth):
+ return self.getCommonAncestor(lnode, rnode._pyflakes_parent, stop)
+ return self.getCommonAncestor(
+ lnode._pyflakes_parent,
+ rnode._pyflakes_parent,
+ stop,
+ )
+
+ def descendantOf(self, node, ancestors, stop):
+ for a in ancestors:
+ if self.getCommonAncestor(node, a, stop):
+ return True
+ return False
+
+ def _getAncestor(self, node, ancestor_type):
+ parent = node
+ while True:
+ if parent is self.root:
+ return None
+ parent = self.getParent(parent)
+ if isinstance(parent, ancestor_type):
+ return parent
+
+ def getScopeNode(self, node):
+ return self._getAncestor(node, tuple(Checker._ast_node_scope.keys()))
+
+ def differentForks(self, lnode, rnode):
+ """True, if lnode and rnode are located on different forks of IF/TRY"""
+ ancestor = self.getCommonAncestor(lnode, rnode, self.root)
+ parts = getAlternatives(ancestor)
+ if parts:
+ for items in parts:
+ if self.descendantOf(lnode, items, ancestor) ^ \
+ self.descendantOf(rnode, items, ancestor):
+ return True
+ return False
+
+ def addBinding(self, node, value):
+ """
+ Called when a binding is altered.
+
+ - `node` is the statement responsible for the change
+ - `value` is the new value, a Binding instance
+ """
+ # assert value.source in (node, node._pyflakes_parent):
+ for scope in self.scopeStack[::-1]:
+ if value.name in scope:
+ break
+ existing = scope.get(value.name)
+
+ if (existing and not isinstance(existing, Builtin) and
+ not self.differentForks(node, existing.source)):
+
+ parent_stmt = self.getParent(value.source)
+ if isinstance(existing, Importation) and isinstance(parent_stmt, FOR_TYPES):
+ self.report(messages.ImportShadowedByLoopVar,
+ node, value.name, existing.source)
+
+ elif scope is self.scope:
+ if (isinstance(parent_stmt, ast.comprehension) and
+ not isinstance(self.getParent(existing.source),
+ (FOR_TYPES, ast.comprehension))):
+ self.report(messages.RedefinedInListComp,
+ node, value.name, existing.source)
+ elif not existing.used and value.redefines(existing):
+ if value.name != '_' or isinstance(existing, Importation):
+ if not is_typing_overload(existing, self.scopeStack):
+ self.report(messages.RedefinedWhileUnused,
+ node, value.name, existing.source)
+
+ elif isinstance(existing, Importation) and value.redefines(existing):
+ existing.redefined.append(node)
+
+ if value.name in self.scope:
+ # then assume the rebound name is used as a global or within a loop
+ value.used = self.scope[value.name].used
+
+ self.scope[value.name] = value
+
+ def _unknown_handler(self, node):
+ # this environment variable configures whether to error on unknown
+ # ast types.
+ #
+ # this is silent by default but the error is enabled for the pyflakes
+ # testsuite.
+ #
+ # this allows new syntax to be added to python without *requiring*
+ # changes from the pyflakes side. but will still produce an error
+ # in the pyflakes testsuite (so more specific handling can be added if
+ # needed).
+ if os.environ.get('PYFLAKES_ERROR_UNKNOWN'):
+ raise NotImplementedError('Unexpected type: {}'.format(type(node)))
+ else:
+ self.handleChildren(node)
+
+ def getNodeHandler(self, node_class):
+ try:
+ return self._nodeHandlers[node_class]
+ except KeyError:
+ nodeType = getNodeType(node_class)
+ self._nodeHandlers[node_class] = handler = getattr(
+ self, nodeType, self._unknown_handler,
+ )
+ return handler
+
+ def handleNodeLoad(self, node):
+ name = getNodeName(node)
+ if not name:
+ return
+
+ in_generators = None
+ importStarred = None
+
+ # try enclosing function scopes and global scope
+ for scope in self.scopeStack[-1::-1]:
+ if isinstance(scope, ClassScope):
+ if not PY2 and name == '__class__':
+ return
+ elif in_generators is False:
+ # only generators used in a class scope can access the
+ # names of the class. this is skipped during the first
+ # iteration
+ continue
+
+ if (name == 'print' and
+ isinstance(scope.get(name, None), Builtin)):
+ parent = self.getParent(node)
+ if (isinstance(parent, ast.BinOp) and
+ isinstance(parent.op, ast.RShift)):
+ self.report(messages.InvalidPrintSyntax, node)
+
+ try:
+ scope[name].used = (self.scope, node)
+
+ # if the name of SubImportation is same as
+ # alias of other Importation and the alias
+ # is used, SubImportation also should be marked as used.
+ n = scope[name]
+ if isinstance(n, Importation) and n._has_alias():
+ try:
+ scope[n.fullName].used = (self.scope, node)
+ except KeyError:
+ pass
+ except KeyError:
+ pass
+ else:
+ return
+
+ importStarred = importStarred or scope.importStarred
+
+ if in_generators is not False:
+ in_generators = isinstance(scope, GeneratorScope)
+
+ if importStarred:
+ from_list = []
+
+ for scope in self.scopeStack[-1::-1]:
+ for binding in scope.values():
+ if isinstance(binding, StarImportation):
+ # mark '*' imports as used for each scope
+ binding.used = (self.scope, node)
+ from_list.append(binding.fullName)
+
+ # report * usage, with a list of possible sources
+ from_list = ', '.join(sorted(from_list))
+ self.report(messages.ImportStarUsage, node, name, from_list)
+ return
+
+ if name == '__path__' and os.path.basename(self.filename) == '__init__.py':
+ # the special name __path__ is valid only in packages
+ return
+
+ if name == '__module__' and isinstance(self.scope, ClassScope):
+ return
+
+ # protected with a NameError handler?
+ if 'NameError' not in self.exceptHandlers[-1]:
+ self.report(messages.UndefinedName, node, name)
+
+ def handleNodeStore(self, node):
+ name = getNodeName(node)
+ if not name:
+ return
+ # if the name hasn't already been defined in the current scope
+ if isinstance(self.scope, FunctionScope) and name not in self.scope:
+ # for each function or module scope above us
+ for scope in self.scopeStack[:-1]:
+ if not isinstance(scope, (FunctionScope, ModuleScope)):
+ continue
+ # if the name was defined in that scope, and the name has
+ # been accessed already in the current scope, and hasn't
+ # been declared global
+ used = name in scope and scope[name].used
+ if used and used[0] is self.scope and name not in self.scope.globals:
+ # then it's probably a mistake
+ self.report(messages.UndefinedLocal,
+ scope[name].used[1], name, scope[name].source)
+ break
+
+ parent_stmt = self.getParent(node)
+ if isinstance(parent_stmt, (FOR_TYPES, ast.comprehension)) or (
+ parent_stmt != node._pyflakes_parent and
+ not self.isLiteralTupleUnpacking(parent_stmt)):
+ binding = Binding(name, node)
+ elif name == '__all__' and isinstance(self.scope, ModuleScope):
+ binding = ExportBinding(name, node._pyflakes_parent, self.scope)
+ elif PY2 and isinstance(getattr(node, 'ctx', None), ast.Param):
+ binding = Argument(name, self.getScopeNode(node))
+ else:
+ binding = Assignment(name, node)
+ self.addBinding(node, binding)
+
+ def handleNodeDelete(self, node):
+
+ def on_conditional_branch():
+ """
+ Return `True` if node is part of a conditional body.
+ """
+ current = getattr(node, '_pyflakes_parent', None)
+ while current:
+ if isinstance(current, (ast.If, ast.While, ast.IfExp)):
+ return True
+ current = getattr(current, '_pyflakes_parent', None)
+ return False
+
+ name = getNodeName(node)
+ if not name:
+ return
+
+ if on_conditional_branch():
+ # We cannot predict if this conditional branch is going to
+ # be executed.
+ return
+
+ if isinstance(self.scope, FunctionScope) and name in self.scope.globals:
+ self.scope.globals.remove(name)
+ else:
+ try:
+ del self.scope[name]
+ except KeyError:
+ self.report(messages.UndefinedName, node, name)
+
+ @contextlib.contextmanager
+ def _enter_annotation(self):
+ orig, self._in_annotation = self._in_annotation, True
+ try:
+ yield
+ finally:
+ self._in_annotation = orig
+
+ def _handle_type_comments(self, node):
+ for (lineno, col_offset), comment in self._type_comments.get(node, ()):
+ comment = comment.split(':', 1)[1].strip()
+ func_match = TYPE_FUNC_RE.match(comment)
+ if func_match:
+ parts = (
+ func_match.group(1).replace('*', ''),
+ func_match.group(2).strip(),
+ )
+ else:
+ parts = (comment,)
+
+ for part in parts:
+ if PY2:
+ part = part.replace('...', 'Ellipsis')
+ self.deferFunction(functools.partial(
+ self.handleStringAnnotation,
+ part, DummyNode(lineno, col_offset), lineno, col_offset,
+ messages.CommentAnnotationSyntaxError,
+ ))
+
+ def handleChildren(self, tree, omit=None):
+ self._handle_type_comments(tree)
+ for node in iter_child_nodes(tree, omit=omit):
+ self.handleNode(node, tree)
+
+ def isLiteralTupleUnpacking(self, node):
+ if isinstance(node, ast.Assign):
+ for child in node.targets + [node.value]:
+ if not hasattr(child, 'elts'):
+ return False
+ return True
+
+ def isDocstring(self, node):
+ """
+ Determine if the given node is a docstring, as long as it is at the
+ correct place in the node tree.
+ """
+ return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and
+ isinstance(node.value, ast.Str))
+
+ def getDocstring(self, node):
+ if isinstance(node, ast.Expr):
+ node = node.value
+ if not isinstance(node, ast.Str):
+ return (None, None)
+
+ if PYPY or PY38_PLUS:
+ doctest_lineno = node.lineno - 1
+ else:
+ # Computed incorrectly if the docstring has backslash
+ doctest_lineno = node.lineno - node.s.count('\n') - 1
+
+ return (node.s, doctest_lineno)
+
+ def handleNode(self, node, parent):
+ if node is None:
+ return
+ if self.offset and getattr(node, 'lineno', None) is not None:
+ node.lineno += self.offset[0]
+ node.col_offset += self.offset[1]
+ if self.traceTree:
+ print(' ' * self.nodeDepth + node.__class__.__name__)
+ if self.futuresAllowed and not (isinstance(node, ast.ImportFrom) or
+ self.isDocstring(node)):
+ self.futuresAllowed = False
+ self.nodeDepth += 1
+ node._pyflakes_depth = self.nodeDepth
+ node._pyflakes_parent = parent
+ try:
+ handler = self.getNodeHandler(node.__class__)
+ handler(node)
+ finally:
+ self.nodeDepth -= 1
+ if self.traceTree:
+ print(' ' * self.nodeDepth + 'end ' + node.__class__.__name__)
+
+ _getDoctestExamples = doctest.DocTestParser().get_examples
+
+ def handleDoctests(self, node):
+ try:
+ if hasattr(node, 'docstring'):
+ docstring = node.docstring
+
+ # This is just a reasonable guess. In Python 3.7, docstrings no
+ # longer have line numbers associated with them. This will be
+ # incorrect if there are empty lines between the beginning
+ # of the function and the docstring.
+ node_lineno = node.lineno
+ if hasattr(node, 'args'):
+ node_lineno = max([node_lineno] +
+ [arg.lineno for arg in node.args.args])
+ else:
+ (docstring, node_lineno) = self.getDocstring(node.body[0])
+ examples = docstring and self._getDoctestExamples(docstring)
+ except (ValueError, IndexError):
+ # e.g. line 6 of the docstring for <string> has inconsistent
+ # leading whitespace: ...
+ return
+ if not examples:
+ return
+
+ # Place doctest in module scope
+ saved_stack = self.scopeStack
+ self.scopeStack = [self.scopeStack[0]]
+ node_offset = self.offset or (0, 0)
+ self.pushScope(DoctestScope)
+ if '_' not in self.scopeStack[0]:
+ self.addBinding(None, Builtin('_'))
+ for example in examples:
+ try:
+ tree = ast.parse(example.source, "<doctest>")
+ except SyntaxError:
+ e = sys.exc_info()[1]
+ if PYPY:
+ e.offset += 1
+ position = (node_lineno + example.lineno + e.lineno,
+ example.indent + 4 + (e.offset or 0))
+ self.report(messages.DoctestSyntaxError, node, position)
+ else:
+ self.offset = (node_offset[0] + node_lineno + example.lineno,
+ node_offset[1] + example.indent + 4)
+ self.handleChildren(tree)
+ self.offset = node_offset
+ self.popScope()
+ self.scopeStack = saved_stack
+
+ @in_annotation
+ def handleStringAnnotation(self, s, node, ref_lineno, ref_col_offset, err):
+ try:
+ tree = ast.parse(s)
+ except SyntaxError:
+ self.report(err, node, s)
+ return
+
+ body = tree.body
+ if len(body) != 1 or not isinstance(body[0], ast.Expr):
+ self.report(err, node, s)
+ return
+
+ parsed_annotation = tree.body[0].value
+ for descendant in ast.walk(parsed_annotation):
+ if (
+ 'lineno' in descendant._attributes and
+ 'col_offset' in descendant._attributes
+ ):
+ descendant.lineno = ref_lineno
+ descendant.col_offset = ref_col_offset
+
+ self.handleNode(parsed_annotation, node)
+
+ @in_annotation
+ def handleAnnotation(self, annotation, node):
+ if isinstance(annotation, ast.Str):
+ # Defer handling forward annotation.
+ self.deferFunction(functools.partial(
+ self.handleStringAnnotation,
+ annotation.s,
+ node,
+ annotation.lineno,
+ annotation.col_offset,
+ messages.ForwardAnnotationSyntaxError,
+ ))
+ elif self.annotationsFutureEnabled:
+ fn = in_annotation(Checker.handleNode)
+ self.deferFunction(lambda: fn(self, annotation, node))
+ else:
+ self.handleNode(annotation, node)
+
+ def ignore(self, node):
+ pass
+
+ # "stmt" type nodes
+ DELETE = PRINT = FOR = ASYNCFOR = WHILE = WITH = WITHITEM = \
+ ASYNCWITH = ASYNCWITHITEM = TRYFINALLY = EXEC = \
+ EXPR = ASSIGN = handleChildren
+
+ PASS = ignore
+
+ # "expr" type nodes
+ BOOLOP = UNARYOP = SET = \
+ REPR = ATTRIBUTE = \
+ STARRED = NAMECONSTANT = NAMEDEXPR = handleChildren
+
+ def SUBSCRIPT(self, node):
+ if (
+ (
+ isinstance(node.value, ast.Name) and
+ node.value.id == 'Literal'
+ ) or (
+ isinstance(node.value, ast.Attribute) and
+ node.value.attr == 'Literal'
+ )
+ ):
+ orig, self._in_typing_literal = self._in_typing_literal, True
+ try:
+ self.handleChildren(node)
+ finally:
+ self._in_typing_literal = orig
+ else:
+ if _is_any_typing_member(node.value, self.scopeStack):
+ with self._enter_annotation():
+ self.handleChildren(node)
+ else:
+ self.handleChildren(node)
+
+ def _handle_string_dot_format(self, node):
+ try:
+ placeholders = tuple(parse_format_string(node.func.value.s))
+ except ValueError as e:
+ self.report(messages.StringDotFormatInvalidFormat, node, e)
+ return
+
+ class state: # py2-compatible `nonlocal`
+ auto = None
+ next_auto = 0
+
+ placeholder_positional = set()
+ placeholder_named = set()
+
+ def _add_key(fmtkey):
+ """Returns True if there is an error which should early-exit"""
+ if fmtkey is None: # end of string or `{` / `}` escapes
+ return False
+
+ # attributes / indices are allowed in `.format(...)`
+ fmtkey, _, _ = fmtkey.partition('.')
+ fmtkey, _, _ = fmtkey.partition('[')
+
+ try:
+ fmtkey = int(fmtkey)
+ except ValueError:
+ pass
+ else: # fmtkey was an integer
+ if state.auto is True:
+ self.report(messages.StringDotFormatMixingAutomatic, node)
+ return True
+ else:
+ state.auto = False
+
+ if fmtkey == '':
+ if state.auto is False:
+ self.report(messages.StringDotFormatMixingAutomatic, node)
+ return True
+ else:
+ state.auto = True
+
+ fmtkey = state.next_auto
+ state.next_auto += 1
+
+ if isinstance(fmtkey, int):
+ placeholder_positional.add(fmtkey)
+ else:
+ placeholder_named.add(fmtkey)
+
+ return False
+
+ for _, fmtkey, spec, _ in placeholders:
+ if _add_key(fmtkey):
+ return
+
+ # spec can also contain format specifiers
+ if spec is not None:
+ try:
+ spec_placeholders = tuple(parse_format_string(spec))
+ except ValueError as e:
+ self.report(messages.StringDotFormatInvalidFormat, node, e)
+ return
+
+ for _, spec_fmtkey, spec_spec, _ in spec_placeholders:
+ # can't recurse again
+ if spec_spec is not None and '{' in spec_spec:
+ self.report(
+ messages.StringDotFormatInvalidFormat,
+ node,
+ 'Max string recursion exceeded',
+ )
+ return
+ if _add_key(spec_fmtkey):
+ return
+
+ # bail early if there is *args or **kwargs
+ if (
+ # python 2.x *args / **kwargs
+ getattr(node, 'starargs', None) or
+ getattr(node, 'kwargs', None) or
+ # python 3.x *args
+ any(
+ isinstance(arg, getattr(ast, 'Starred', ()))
+ for arg in node.args
+ ) or
+ # python 3.x **kwargs
+ any(kwd.arg is None for kwd in node.keywords)
+ ):
+ return
+
+ substitution_positional = set(range(len(node.args)))
+ substitution_named = {kwd.arg for kwd in node.keywords}
+
+ extra_positional = substitution_positional - placeholder_positional
+ extra_named = substitution_named - placeholder_named
+
+ missing_arguments = (
+ (placeholder_positional | placeholder_named) -
+ (substitution_positional | substitution_named)
+ )
+
+ if extra_positional:
+ self.report(
+ messages.StringDotFormatExtraPositionalArguments,
+ node,
+ ', '.join(sorted(str(x) for x in extra_positional)),
+ )
+ if extra_named:
+ self.report(
+ messages.StringDotFormatExtraNamedArguments,
+ node,
+ ', '.join(sorted(extra_named)),
+ )
+ if missing_arguments:
+ self.report(
+ messages.StringDotFormatMissingArgument,
+ node,
+ ', '.join(sorted(str(x) for x in missing_arguments)),
+ )
+
+ def CALL(self, node):
+ if (
+ isinstance(node.func, ast.Attribute) and
+ isinstance(node.func.value, ast.Str) and
+ node.func.attr == 'format'
+ ):
+ self._handle_string_dot_format(node)
+
+ if (
+ _is_typing(node.func, 'cast', self.scopeStack) and
+ len(node.args) >= 1 and
+ isinstance(node.args[0], ast.Str)
+ ):
+ with self._enter_annotation():
+ self.handleNode(node.args[0], node)
+
+ self.handleChildren(node)
+
+ def _handle_percent_format(self, node):
+ try:
+ placeholders = parse_percent_format(node.left.s)
+ except ValueError:
+ self.report(
+ messages.PercentFormatInvalidFormat,
+ node,
+ 'incomplete format',
+ )
+ return
+
+ named = set()
+ positional_count = 0
+ positional = None
+ for _, placeholder in placeholders:
+ if placeholder is None:
+ continue
+ name, _, width, precision, conversion = placeholder
+
+ if conversion == '%':
+ continue
+
+ if conversion not in VALID_CONVERSIONS:
+ self.report(
+ messages.PercentFormatUnsupportedFormatCharacter,
+ node,
+ conversion,
+ )
+
+ if positional is None and conversion:
+ positional = name is None
+
+ for part in (width, precision):
+ if part is not None and '*' in part:
+ if not positional:
+ self.report(
+ messages.PercentFormatStarRequiresSequence,
+ node,
+ )
+ else:
+ positional_count += 1
+
+ if positional and name is not None:
+ self.report(
+ messages.PercentFormatMixedPositionalAndNamed,
+ node,
+ )
+ return
+ elif not positional and name is None:
+ self.report(
+ messages.PercentFormatMixedPositionalAndNamed,
+ node,
+ )
+ return
+
+ if positional:
+ positional_count += 1
+ else:
+ named.add(name)
+
+ if (
+ isinstance(node.right, (ast.List, ast.Tuple)) and
+ # does not have any *splats (py35+ feature)
+ not any(
+ isinstance(elt, getattr(ast, 'Starred', ()))
+ for elt in node.right.elts
+ )
+ ):
+ substitution_count = len(node.right.elts)
+ if positional and positional_count != substitution_count:
+ self.report(
+ messages.PercentFormatPositionalCountMismatch,
+ node,
+ positional_count,
+ substitution_count,
+ )
+ elif not positional:
+ self.report(messages.PercentFormatExpectedMapping, node)
+
+ if (
+ isinstance(node.right, ast.Dict) and
+ all(isinstance(k, ast.Str) for k in node.right.keys)
+ ):
+ if positional and positional_count > 1:
+ self.report(messages.PercentFormatExpectedSequence, node)
+ return
+
+ substitution_keys = {k.s for k in node.right.keys}
+ extra_keys = substitution_keys - named
+ missing_keys = named - substitution_keys
+ if not positional and extra_keys:
+ self.report(
+ messages.PercentFormatExtraNamedArguments,
+ node,
+ ', '.join(sorted(extra_keys)),
+ )
+ if not positional and missing_keys:
+ self.report(
+ messages.PercentFormatMissingArgument,
+ node,
+ ', '.join(sorted(missing_keys)),
+ )
+
+ def BINOP(self, node):
+ if (
+ isinstance(node.op, ast.Mod) and
+ isinstance(node.left, ast.Str)
+ ):
+ self._handle_percent_format(node)
+ self.handleChildren(node)
+
+ def STR(self, node):
+ if self._in_annotation and not self._in_typing_literal:
+ fn = functools.partial(
+ self.handleStringAnnotation,
+ node.s,
+ node,
+ node.lineno,
+ node.col_offset,
+ messages.ForwardAnnotationSyntaxError,
+ )
+ if self._in_deferred:
+ fn()
+ else:
+ self.deferFunction(fn)
+
+ if PY38_PLUS:
+ def CONSTANT(self, node):
+ if isinstance(node.value, str):
+ return self.STR(node)
+ else:
+ NUM = BYTES = ELLIPSIS = CONSTANT = ignore
+
+ # "slice" type nodes
+ SLICE = EXTSLICE = INDEX = handleChildren
+
+ # expression contexts are node instances too, though being constants
+ LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore
+
+ # same for operators
+ AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = \
+ BITOR = BITXOR = BITAND = FLOORDIV = INVERT = NOT = UADD = USUB = \
+ EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = \
+ MATMULT = ignore
+
+ def RAISE(self, node):
+ self.handleChildren(node)
+
+ arg = get_raise_argument(node)
+
+ if isinstance(arg, ast.Call):
+ if is_notimplemented_name_node(arg.func):
+ # Handle "raise NotImplemented(...)"
+ self.report(messages.RaiseNotImplemented, node)
+ elif is_notimplemented_name_node(arg):
+ # Handle "raise NotImplemented"
+ self.report(messages.RaiseNotImplemented, node)
+
+ # additional node types
+ COMPREHENSION = KEYWORD = FORMATTEDVALUE = handleChildren
+
+ _in_fstring = False
+
+ def JOINEDSTR(self, node):
+ if (
+ # the conversion / etc. flags are parsed as f-strings without
+ # placeholders
+ not self._in_fstring and
+ not any(isinstance(x, ast.FormattedValue) for x in node.values)
+ ):
+ self.report(messages.FStringMissingPlaceholders, node)
+
+ self._in_fstring, orig = True, self._in_fstring
+ try:
+ self.handleChildren(node)
+ finally:
+ self._in_fstring = orig
+
+ def DICT(self, node):
+ # Complain if there are duplicate keys with different values
+ # If they have the same value it's not going to cause potentially
+ # unexpected behaviour so we'll not complain.
+ keys = [
+ convert_to_value(key) for key in node.keys
+ ]
+
+ key_counts = counter(keys)
+ duplicate_keys = [
+ key for key, count in key_counts.items()
+ if count > 1
+ ]
+
+ for key in duplicate_keys:
+ key_indices = [i for i, i_key in enumerate(keys) if i_key == key]
+
+ values = counter(
+ convert_to_value(node.values[index])
+ for index in key_indices
+ )
+ if any(count == 1 for value, count in values.items()):
+ for key_index in key_indices:
+ key_node = node.keys[key_index]
+ if isinstance(key, VariableKey):
+ self.report(messages.MultiValueRepeatedKeyVariable,
+ key_node,
+ key.name)
+ else:
+ self.report(
+ messages.MultiValueRepeatedKeyLiteral,
+ key_node,
+ key,
+ )
+ self.handleChildren(node)
+
+ def IF(self, node):
+ if isinstance(node.test, ast.Tuple) and node.test.elts != []:
+ self.report(messages.IfTuple, node)
+ self.handleChildren(node)
+
+ IFEXP = IF
+
+ def ASSERT(self, node):
+ if isinstance(node.test, ast.Tuple) and node.test.elts != []:
+ self.report(messages.AssertTuple, node)
+ self.handleChildren(node)
+
+ def GLOBAL(self, node):
+ """
+ Keep track of globals declarations.
+ """
+ global_scope_index = 1 if self._in_doctest() else 0
+ global_scope = self.scopeStack[global_scope_index]
+
+ # Ignore 'global' statement in global scope.
+ if self.scope is not global_scope:
+
+ # One 'global' statement can bind multiple (comma-delimited) names.
+ for node_name in node.names:
+ node_value = Assignment(node_name, node)
+
+ # Remove UndefinedName messages already reported for this name.
+ # TODO: if the global is not used in this scope, it does not
+ # become a globally defined name. See test_unused_global.
+ self.messages = [
+ m for m in self.messages if not
+ isinstance(m, messages.UndefinedName) or
+ m.message_args[0] != node_name]
+
+ # Bind name to global scope if it doesn't exist already.
+ global_scope.setdefault(node_name, node_value)
+
+ # Bind name to non-global scopes, but as already "used".
+ node_value.used = (global_scope, node)
+ for scope in self.scopeStack[global_scope_index + 1:]:
+ scope[node_name] = node_value
+
+ NONLOCAL = GLOBAL
+
+ def GENERATOREXP(self, node):
+ self.pushScope(GeneratorScope)
+ self.handleChildren(node)
+ self.popScope()
+
+ LISTCOMP = handleChildren if PY2 else GENERATOREXP
+
+ DICTCOMP = SETCOMP = GENERATOREXP
+
+ def NAME(self, node):
+ """
+ Handle occurrence of Name (which can be a load/store/delete access.)
+ """
+ # Locate the name in locals / function / globals scopes.
+ if isinstance(node.ctx, ast.Load):
+ self.handleNodeLoad(node)
+ if (node.id == 'locals' and isinstance(self.scope, FunctionScope) and
+ isinstance(node._pyflakes_parent, ast.Call)):
+ # we are doing locals() call in current scope
+ self.scope.usesLocals = True
+ elif isinstance(node.ctx, ast.Store):
+ self.handleNodeStore(node)
+ elif PY2 and isinstance(node.ctx, ast.Param):
+ self.handleNodeStore(node)
+ elif isinstance(node.ctx, ast.Del):
+ self.handleNodeDelete(node)
+ else:
+ # Unknown context
+ raise RuntimeError("Got impossible expression context: %r" % (node.ctx,))
+
+ def CONTINUE(self, node):
+ # Walk the tree up until we see a loop (OK), a function or class
+ # definition (not OK), for 'continue', a finally block (not OK), or
+ # the top module scope (not OK)
+ n = node
+ while hasattr(n, '_pyflakes_parent'):
+ n, n_child = n._pyflakes_parent, n
+ if isinstance(n, LOOP_TYPES):
+ # Doesn't apply unless it's in the loop itself
+ if n_child not in n.orelse:
+ return
+ if isinstance(n, (ast.FunctionDef, ast.ClassDef)):
+ break
+ # Handle Try/TryFinally difference in Python < and >= 3.3
+ if hasattr(n, 'finalbody') and isinstance(node, ast.Continue):
+ if n_child in n.finalbody and not PY38_PLUS:
+ self.report(messages.ContinueInFinally, node)
+ return
+ if isinstance(node, ast.Continue):
+ self.report(messages.ContinueOutsideLoop, node)
+ else: # ast.Break
+ self.report(messages.BreakOutsideLoop, node)
+
+ BREAK = CONTINUE
+
+ def RETURN(self, node):
+ if isinstance(self.scope, (ClassScope, ModuleScope)):
+ self.report(messages.ReturnOutsideFunction, node)
+ return
+
+ if (
+ node.value and
+ hasattr(self.scope, 'returnValue') and
+ not self.scope.returnValue
+ ):
+ self.scope.returnValue = node.value
+ self.handleNode(node.value, node)
+
+ def YIELD(self, node):
+ if isinstance(self.scope, (ClassScope, ModuleScope)):
+ self.report(messages.YieldOutsideFunction, node)
+ return
+
+ self.scope.isGenerator = True
+ self.handleNode(node.value, node)
+
+ AWAIT = YIELDFROM = YIELD
+
+ def FUNCTIONDEF(self, node):
+ for deco in node.decorator_list:
+ self.handleNode(deco, node)
+ self.LAMBDA(node)
+ self.addBinding(node, FunctionDefinition(node.name, node))
+ # doctest does not process doctest within a doctest,
+ # or in nested functions.
+ if (self.withDoctest and
+ not self._in_doctest() and
+ not isinstance(self.scope, FunctionScope)):
+ self.deferFunction(lambda: self.handleDoctests(node))
+
+ ASYNCFUNCTIONDEF = FUNCTIONDEF
+
+ def LAMBDA(self, node):
+ args = []
+ annotations = []
+
+ if PY2:
+ def addArgs(arglist):
+ for arg in arglist:
+ if isinstance(arg, ast.Tuple):
+ addArgs(arg.elts)
+ else:
+ args.append(arg.id)
+ addArgs(node.args.args)
+ defaults = node.args.defaults
+ else:
+ if PY38_PLUS:
+ for arg in node.args.posonlyargs:
+ args.append(arg.arg)
+ annotations.append(arg.annotation)
+ for arg in node.args.args + node.args.kwonlyargs:
+ args.append(arg.arg)
+ annotations.append(arg.annotation)
+ defaults = node.args.defaults + node.args.kw_defaults
+
+ # Only for Python3 FunctionDefs
+ is_py3_func = hasattr(node, 'returns')
+
+ for arg_name in ('vararg', 'kwarg'):
+ wildcard = getattr(node.args, arg_name)
+ if not wildcard:
+ continue
+ args.append(wildcard if PY2 else wildcard.arg)
+ if is_py3_func:
+ if PY2: # Python 2.7
+ argannotation = arg_name + 'annotation'
+ annotations.append(getattr(node.args, argannotation))
+ else: # Python >= 3.4
+ annotations.append(wildcard.annotation)
+
+ if is_py3_func:
+ annotations.append(node.returns)
+
+ if len(set(args)) < len(args):
+ for (idx, arg) in enumerate(args):
+ if arg in args[:idx]:
+ self.report(messages.DuplicateArgument, node, arg)
+
+ for annotation in annotations:
+ self.handleAnnotation(annotation, node)
+
+ for default in defaults:
+ self.handleNode(default, node)
+
+ def runFunction():
+
+ self.pushScope()
+
+ self.handleChildren(node, omit=['decorator_list', 'returns'])
+
+ def checkUnusedAssignments():
+ """
+ Check to see if any assignments have not been used.
+ """
+ for name, binding in self.scope.unusedAssignments():
+ self.report(messages.UnusedVariable, binding.source, name)
+ self.deferAssignment(checkUnusedAssignments)
+
+ if PY2:
+ def checkReturnWithArgumentInsideGenerator():
+ """
+ Check to see if there is any return statement with
+ arguments but the function is a generator.
+ """
+ if self.scope.isGenerator and self.scope.returnValue:
+ self.report(messages.ReturnWithArgsInsideGenerator,
+ self.scope.returnValue)
+ self.deferAssignment(checkReturnWithArgumentInsideGenerator)
+ self.popScope()
+
+ self.deferFunction(runFunction)
+
+ def ARGUMENTS(self, node):
+ self.handleChildren(node, omit=('defaults', 'kw_defaults'))
+ if PY2:
+ scope_node = self.getScopeNode(node)
+ if node.vararg:
+ self.addBinding(node, Argument(node.vararg, scope_node))
+ if node.kwarg:
+ self.addBinding(node, Argument(node.kwarg, scope_node))
+
+ def ARG(self, node):
+ self.addBinding(node, Argument(node.arg, self.getScopeNode(node)))
+
+ def CLASSDEF(self, node):
+ """
+ Check names used in a class definition, including its decorators, base
+ classes, and the body of its definition. Additionally, add its name to
+ the current scope.
+ """
+ for deco in node.decorator_list:
+ self.handleNode(deco, node)
+ for baseNode in node.bases:
+ self.handleNode(baseNode, node)
+ if not PY2:
+ for keywordNode in node.keywords:
+ self.handleNode(keywordNode, node)
+ self.pushScope(ClassScope)
+ # doctest does not process doctest within a doctest
+ # classes within classes are processed.
+ if (self.withDoctest and
+ not self._in_doctest() and
+ not isinstance(self.scope, FunctionScope)):
+ self.deferFunction(lambda: self.handleDoctests(node))
+ for stmt in node.body:
+ self.handleNode(stmt, node)
+ self.popScope()
+ self.addBinding(node, ClassDefinition(node.name, node))
+
+ def AUGASSIGN(self, node):
+ self.handleNodeLoad(node.target)
+ self.handleNode(node.value, node)
+ self.handleNode(node.target, node)
+
+ def TUPLE(self, node):
+ if not PY2 and isinstance(node.ctx, ast.Store):
+ # Python 3 advanced tuple unpacking: a, *b, c = d.
+ # Only one starred expression is allowed, and no more than 1<<8
+ # assignments are allowed before a stared expression. There is
+ # also a limit of 1<<24 expressions after the starred expression,
+ # which is impossible to test due to memory restrictions, but we
+ # add it here anyway
+ has_starred = False
+ star_loc = -1
+ for i, n in enumerate(node.elts):
+ if isinstance(n, ast.Starred):
+ if has_starred:
+ self.report(messages.TwoStarredExpressions, node)
+ # The SyntaxError doesn't distinguish two from more
+ # than two.
+ break
+ has_starred = True
+ star_loc = i
+ if star_loc >= 1 << 8 or len(node.elts) - star_loc - 1 >= 1 << 24:
+ self.report(messages.TooManyExpressionsInStarredAssignment, node)
+ self.handleChildren(node)
+
+ LIST = TUPLE
+
+ def IMPORT(self, node):
+ for alias in node.names:
+ if '.' in alias.name and not alias.asname:
+ importation = SubmoduleImportation(alias.name, node)
+ else:
+ name = alias.asname or alias.name
+ importation = Importation(name, node, alias.name)
+ self.addBinding(node, importation)
+
+ def IMPORTFROM(self, node):
+ if node.module == '__future__':
+ if not self.futuresAllowed:
+ self.report(messages.LateFutureImport,
+ node, [n.name for n in node.names])
+ else:
+ self.futuresAllowed = False
+
+ module = ('.' * node.level) + (node.module or '')
+
+ for alias in node.names:
+ name = alias.asname or alias.name
+ if node.module == '__future__':
+ importation = FutureImportation(name, node, self.scope)
+ if alias.name not in __future__.all_feature_names:
+ self.report(messages.FutureFeatureNotDefined,
+ node, alias.name)
+ if alias.name == 'annotations':
+ self.annotationsFutureEnabled = True
+ elif alias.name == '*':
+ # Only Python 2, local import * is a SyntaxWarning
+ if not PY2 and not isinstance(self.scope, ModuleScope):
+ self.report(messages.ImportStarNotPermitted,
+ node, module)
+ continue
+
+ self.scope.importStarred = True
+ self.report(messages.ImportStarUsed, node, module)
+ importation = StarImportation(module, node)
+ else:
+ importation = ImportationFrom(name, node,
+ module, alias.name)
+ self.addBinding(node, importation)
+
+ def TRY(self, node):
+ handler_names = []
+ # List the exception handlers
+ for i, handler in enumerate(node.handlers):
+ if isinstance(handler.type, ast.Tuple):
+ for exc_type in handler.type.elts:
+ handler_names.append(getNodeName(exc_type))
+ elif handler.type:
+ handler_names.append(getNodeName(handler.type))
+
+ if handler.type is None and i < len(node.handlers) - 1:
+ self.report(messages.DefaultExceptNotLast, handler)
+ # Memorize the except handlers and process the body
+ self.exceptHandlers.append(handler_names)
+ for child in node.body:
+ self.handleNode(child, node)
+ self.exceptHandlers.pop()
+ # Process the other nodes: "except:", "else:", "finally:"
+ self.handleChildren(node, omit='body')
+
+ TRYEXCEPT = TRY
+
+ def EXCEPTHANDLER(self, node):
+ if PY2 or node.name is None:
+ self.handleChildren(node)
+ return
+
+ # If the name already exists in the scope, modify state of existing
+ # binding.
+ if node.name in self.scope:
+ self.handleNodeStore(node)
+
+ # 3.x: the name of the exception, which is not a Name node, but a
+ # simple string, creates a local that is only bound within the scope of
+ # the except: block. As such, temporarily remove the existing binding
+ # to more accurately determine if the name is used in the except:
+ # block.
+
+ try:
+ prev_definition = self.scope.pop(node.name)
+ except KeyError:
+ prev_definition = None
+
+ self.handleNodeStore(node)
+ self.handleChildren(node)
+
+ # See discussion on https://github.com/PyCQA/pyflakes/pull/59
+
+ # We're removing the local name since it's being unbound after leaving
+ # the except: block and it's always unbound if the except: block is
+ # never entered. This will cause an "undefined name" error raised if
+ # the checked code tries to use the name afterwards.
+ #
+ # Unless it's been removed already. Then do nothing.
+
+ try:
+ binding = self.scope.pop(node.name)
+ except KeyError:
+ pass
+ else:
+ if not binding.used:
+ self.report(messages.UnusedVariable, node, node.name)
+
+ # Restore.
+ if prev_definition:
+ self.scope[node.name] = prev_definition
+
+ def ANNASSIGN(self, node):
+ if node.value:
+ # Only bind the *targets* if the assignment has a value.
+ # Otherwise it's not really ast.Store and shouldn't silence
+ # UndefinedLocal warnings.
+ self.handleNode(node.target, node)
+ self.handleAnnotation(node.annotation, node)
+ if node.value:
+ # If the assignment has value, handle the *value* now.
+ self.handleNode(node.value, node)
+
+ def COMPARE(self, node):
+ left = node.left
+ for op, right in zip(node.ops, node.comparators):
+ if (
+ isinstance(op, (ast.Is, ast.IsNot)) and (
+ _is_const_non_singleton(left) or
+ _is_const_non_singleton(right)
+ )
+ ):
+ self.report(messages.IsLiteral, node)
+ left = right
+
+ self.handleChildren(node)
diff --git a/third_party/python/pyflakes/pyflakes/messages.py b/third_party/python/pyflakes/pyflakes/messages.py
new file mode 100644
index 0000000000..1bb4ab0f16
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/messages.py
@@ -0,0 +1,371 @@
+"""
+Provide the class Message and its subclasses.
+"""
+
+
+class Message(object):
+ message = ''
+ message_args = ()
+
+ def __init__(self, filename, loc):
+ self.filename = filename
+ self.lineno = loc.lineno
+ self.col = getattr(loc, 'col_offset', 0)
+
+ def __str__(self):
+ return '%s:%s:%s %s' % (self.filename, self.lineno, self.col+1,
+ self.message % self.message_args)
+
+
+class UnusedImport(Message):
+ message = '%r imported but unused'
+
+ def __init__(self, filename, loc, name):
+ Message.__init__(self, filename, loc)
+ self.message_args = (name,)
+
+
+class RedefinedWhileUnused(Message):
+ message = 'redefinition of unused %r from line %r'
+
+ def __init__(self, filename, loc, name, orig_loc):
+ Message.__init__(self, filename, loc)
+ self.message_args = (name, orig_loc.lineno)
+
+
+class RedefinedInListComp(Message):
+ message = 'list comprehension redefines %r from line %r'
+
+ def __init__(self, filename, loc, name, orig_loc):
+ Message.__init__(self, filename, loc)
+ self.message_args = (name, orig_loc.lineno)
+
+
+class ImportShadowedByLoopVar(Message):
+ message = 'import %r from line %r shadowed by loop variable'
+
+ def __init__(self, filename, loc, name, orig_loc):
+ Message.__init__(self, filename, loc)
+ self.message_args = (name, orig_loc.lineno)
+
+
+class ImportStarNotPermitted(Message):
+ message = "'from %s import *' only allowed at module level"
+
+ def __init__(self, filename, loc, modname):
+ Message.__init__(self, filename, loc)
+ self.message_args = (modname,)
+
+
+class ImportStarUsed(Message):
+ message = "'from %s import *' used; unable to detect undefined names"
+
+ def __init__(self, filename, loc, modname):
+ Message.__init__(self, filename, loc)
+ self.message_args = (modname,)
+
+
+class ImportStarUsage(Message):
+ message = "%r may be undefined, or defined from star imports: %s"
+
+ def __init__(self, filename, loc, name, from_list):
+ Message.__init__(self, filename, loc)
+ self.message_args = (name, from_list)
+
+
+class UndefinedName(Message):
+ message = 'undefined name %r'
+
+ def __init__(self, filename, loc, name):
+ Message.__init__(self, filename, loc)
+ self.message_args = (name,)
+
+
+class DoctestSyntaxError(Message):
+ message = 'syntax error in doctest'
+
+ def __init__(self, filename, loc, position=None):
+ Message.__init__(self, filename, loc)
+ if position:
+ (self.lineno, self.col) = position
+ self.message_args = ()
+
+
+class UndefinedExport(Message):
+ message = 'undefined name %r in __all__'
+
+ def __init__(self, filename, loc, name):
+ Message.__init__(self, filename, loc)
+ self.message_args = (name,)
+
+
+class UndefinedLocal(Message):
+ message = 'local variable %r {0} referenced before assignment'
+
+ default = 'defined in enclosing scope on line %r'
+ builtin = 'defined as a builtin'
+
+ def __init__(self, filename, loc, name, orig_loc):
+ Message.__init__(self, filename, loc)
+ if orig_loc is None:
+ self.message = self.message.format(self.builtin)
+ self.message_args = name
+ else:
+ self.message = self.message.format(self.default)
+ self.message_args = (name, orig_loc.lineno)
+
+
+class DuplicateArgument(Message):
+ message = 'duplicate argument %r in function definition'
+
+ def __init__(self, filename, loc, name):
+ Message.__init__(self, filename, loc)
+ self.message_args = (name,)
+
+
+class MultiValueRepeatedKeyLiteral(Message):
+ message = 'dictionary key %r repeated with different values'
+
+ def __init__(self, filename, loc, key):
+ Message.__init__(self, filename, loc)
+ self.message_args = (key,)
+
+
+class MultiValueRepeatedKeyVariable(Message):
+ message = 'dictionary key variable %s repeated with different values'
+
+ def __init__(self, filename, loc, key):
+ Message.__init__(self, filename, loc)
+ self.message_args = (key,)
+
+
+class LateFutureImport(Message):
+ message = 'from __future__ imports must occur at the beginning of the file'
+
+ def __init__(self, filename, loc, names):
+ Message.__init__(self, filename, loc)
+ self.message_args = ()
+
+
+class FutureFeatureNotDefined(Message):
+ """An undefined __future__ feature name was imported."""
+ message = 'future feature %s is not defined'
+
+ def __init__(self, filename, loc, name):
+ Message.__init__(self, filename, loc)
+ self.message_args = (name,)
+
+
+class UnusedVariable(Message):
+ """
+ Indicates that a variable has been explicitly assigned to but not actually
+ used.
+ """
+ message = 'local variable %r is assigned to but never used'
+
+ def __init__(self, filename, loc, names):
+ Message.__init__(self, filename, loc)
+ self.message_args = (names,)
+
+
+class ReturnWithArgsInsideGenerator(Message):
+ """
+ Indicates a return statement with arguments inside a generator.
+ """
+ message = '\'return\' with argument inside generator'
+
+
+class ReturnOutsideFunction(Message):
+ """
+ Indicates a return statement outside of a function/method.
+ """
+ message = '\'return\' outside function'
+
+
+class YieldOutsideFunction(Message):
+ """
+ Indicates a yield or yield from statement outside of a function/method.
+ """
+ message = '\'yield\' outside function'
+
+
+# For whatever reason, Python gives different error messages for these two. We
+# match the Python error message exactly.
+class ContinueOutsideLoop(Message):
+ """
+ Indicates a continue statement outside of a while or for loop.
+ """
+ message = '\'continue\' not properly in loop'
+
+
+class BreakOutsideLoop(Message):
+ """
+ Indicates a break statement outside of a while or for loop.
+ """
+ message = '\'break\' outside loop'
+
+
+class ContinueInFinally(Message):
+ """
+ Indicates a continue statement in a finally block in a while or for loop.
+ """
+ message = '\'continue\' not supported inside \'finally\' clause'
+
+
+class DefaultExceptNotLast(Message):
+ """
+ Indicates an except: block as not the last exception handler.
+ """
+ message = 'default \'except:\' must be last'
+
+
+class TwoStarredExpressions(Message):
+ """
+ Two or more starred expressions in an assignment (a, *b, *c = d).
+ """
+ message = 'two starred expressions in assignment'
+
+
+class TooManyExpressionsInStarredAssignment(Message):
+ """
+ Too many expressions in an assignment with star-unpacking
+ """
+ message = 'too many expressions in star-unpacking assignment'
+
+
+class IfTuple(Message):
+ """
+ Conditional test is a non-empty tuple literal, which are always True.
+ """
+ message = '\'if tuple literal\' is always true, perhaps remove accidental comma?'
+
+
+class AssertTuple(Message):
+ """
+ Assertion test is a non-empty tuple literal, which are always True.
+ """
+ message = 'assertion is always true, perhaps remove parentheses?'
+
+
+class ForwardAnnotationSyntaxError(Message):
+ message = 'syntax error in forward annotation %r'
+
+ def __init__(self, filename, loc, annotation):
+ Message.__init__(self, filename, loc)
+ self.message_args = (annotation,)
+
+
+class CommentAnnotationSyntaxError(Message):
+ message = 'syntax error in type comment %r'
+
+ def __init__(self, filename, loc, annotation):
+ Message.__init__(self, filename, loc)
+ self.message_args = (annotation,)
+
+
+class RaiseNotImplemented(Message):
+ message = "'raise NotImplemented' should be 'raise NotImplementedError'"
+
+
+class InvalidPrintSyntax(Message):
+ message = 'use of >> is invalid with print function'
+
+
+class IsLiteral(Message):
+ message = 'use ==/!= to compare constant literals (str, bytes, int, float, tuple)'
+
+
+class FStringMissingPlaceholders(Message):
+ message = 'f-string is missing placeholders'
+
+
+class StringDotFormatExtraPositionalArguments(Message):
+ message = "'...'.format(...) has unused arguments at position(s): %s"
+
+ def __init__(self, filename, loc, extra_positions):
+ Message.__init__(self, filename, loc)
+ self.message_args = (extra_positions,)
+
+
+class StringDotFormatExtraNamedArguments(Message):
+ message = "'...'.format(...) has unused named argument(s): %s"
+
+ def __init__(self, filename, loc, extra_keywords):
+ Message.__init__(self, filename, loc)
+ self.message_args = (extra_keywords,)
+
+
+class StringDotFormatMissingArgument(Message):
+ message = "'...'.format(...) is missing argument(s) for placeholder(s): %s"
+
+ def __init__(self, filename, loc, missing_arguments):
+ Message.__init__(self, filename, loc)
+ self.message_args = (missing_arguments,)
+
+
+class StringDotFormatMixingAutomatic(Message):
+ message = "'...'.format(...) mixes automatic and manual numbering"
+
+
+class StringDotFormatInvalidFormat(Message):
+ message = "'...'.format(...) has invalid format string: %s"
+
+ def __init__(self, filename, loc, error):
+ Message.__init__(self, filename, loc)
+ self.message_args = (error,)
+
+
+class PercentFormatInvalidFormat(Message):
+ message = "'...' %% ... has invalid format string: %s"
+
+ def __init__(self, filename, loc, error):
+ Message.__init__(self, filename, loc)
+ self.message_args = (error,)
+
+
+class PercentFormatMixedPositionalAndNamed(Message):
+ message = "'...' %% ... has mixed positional and named placeholders"
+
+
+class PercentFormatUnsupportedFormatCharacter(Message):
+ message = "'...' %% ... has unsupported format character %r"
+
+ def __init__(self, filename, loc, c):
+ Message.__init__(self, filename, loc)
+ self.message_args = (c,)
+
+
+class PercentFormatPositionalCountMismatch(Message):
+ message = "'...' %% ... has %d placeholder(s) but %d substitution(s)"
+
+ def __init__(self, filename, loc, n_placeholders, n_substitutions):
+ Message.__init__(self, filename, loc)
+ self.message_args = (n_placeholders, n_substitutions)
+
+
+class PercentFormatExtraNamedArguments(Message):
+ message = "'...' %% ... has unused named argument(s): %s"
+
+ def __init__(self, filename, loc, extra_keywords):
+ Message.__init__(self, filename, loc)
+ self.message_args = (extra_keywords,)
+
+
+class PercentFormatMissingArgument(Message):
+ message = "'...' %% ... is missing argument(s) for placeholder(s): %s"
+
+ def __init__(self, filename, loc, missing_arguments):
+ Message.__init__(self, filename, loc)
+ self.message_args = (missing_arguments,)
+
+
+class PercentFormatExpectedMapping(Message):
+ message = "'...' %% ... expected mapping but got sequence"
+
+
+class PercentFormatExpectedSequence(Message):
+ message = "'...' %% ... expected sequence but got mapping"
+
+
+class PercentFormatStarRequiresSequence(Message):
+ message = "'...' %% ... `*` specifier requires sequence"
diff --git a/third_party/python/pyflakes/pyflakes/reporter.py b/third_party/python/pyflakes/pyflakes/reporter.py
new file mode 100644
index 0000000000..0faef656bf
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/reporter.py
@@ -0,0 +1,82 @@
+"""
+Provide the Reporter class.
+"""
+
+import re
+import sys
+
+
+class Reporter(object):
+ """
+ Formats the results of pyflakes checks to users.
+ """
+
+ def __init__(self, warningStream, errorStream):
+ """
+ Construct a L{Reporter}.
+
+ @param warningStream: A file-like object where warnings will be
+ written to. The stream's C{write} method must accept unicode.
+ C{sys.stdout} is a good value.
+ @param errorStream: A file-like object where error output will be
+ written to. The stream's C{write} method must accept unicode.
+ C{sys.stderr} is a good value.
+ """
+ self._stdout = warningStream
+ self._stderr = errorStream
+
+ def unexpectedError(self, filename, msg):
+ """
+ An unexpected error occurred trying to process C{filename}.
+
+ @param filename: The path to a file that we could not process.
+ @ptype filename: C{unicode}
+ @param msg: A message explaining the problem.
+ @ptype msg: C{unicode}
+ """
+ self._stderr.write("%s: %s\n" % (filename, msg))
+
+ def syntaxError(self, filename, msg, lineno, offset, text):
+ """
+ There was a syntax error in C{filename}.
+
+ @param filename: The path to the file with the syntax error.
+ @ptype filename: C{unicode}
+ @param msg: An explanation of the syntax error.
+ @ptype msg: C{unicode}
+ @param lineno: The line number where the syntax error occurred.
+ @ptype lineno: C{int}
+ @param offset: The column on which the syntax error occurred, or None.
+ @ptype offset: C{int}
+ @param text: The source code containing the syntax error.
+ @ptype text: C{unicode}
+ """
+ line = text.splitlines()[-1]
+ if offset is not None:
+ if sys.version_info < (3, 8):
+ offset = offset - (len(text) - len(line)) + 1
+ self._stderr.write('%s:%d:%d: %s\n' %
+ (filename, lineno, offset, msg))
+ else:
+ self._stderr.write('%s:%d: %s\n' % (filename, lineno, msg))
+ self._stderr.write(line)
+ self._stderr.write('\n')
+ if offset is not None:
+ self._stderr.write(re.sub(r'\S', ' ', line[:offset - 1]) +
+ "^\n")
+
+ def flake(self, message):
+ """
+ pyflakes found something wrong with the code.
+
+ @param: A L{pyflakes.messages.Message}.
+ """
+ self._stdout.write(str(message))
+ self._stdout.write('\n')
+
+
+def _makeDefaultReporter():
+ """
+ Make a reporter that can be used when no reporter is specified.
+ """
+ return Reporter(sys.stdout, sys.stderr)
diff --git a/third_party/python/pyflakes/pyflakes/scripts/__init__.py b/third_party/python/pyflakes/pyflakes/scripts/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/scripts/__init__.py
diff --git a/third_party/python/pyflakes/pyflakes/scripts/pyflakes.py b/third_party/python/pyflakes/pyflakes/scripts/pyflakes.py
new file mode 100644
index 0000000000..4a18e79664
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/scripts/pyflakes.py
@@ -0,0 +1,8 @@
+"""
+Implementation of the command-line I{pyflakes} tool.
+"""
+from __future__ import absolute_import
+
+# For backward compatibility
+__all__ = ['check', 'checkPath', 'checkRecursive', 'iterSourceCode', 'main']
+from pyflakes.api import check, checkPath, checkRecursive, iterSourceCode, main
diff --git a/third_party/python/pyflakes/pyflakes/test/__init__.py b/third_party/python/pyflakes/pyflakes/test/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/test/__init__.py
diff --git a/third_party/python/pyflakes/pyflakes/test/harness.py b/third_party/python/pyflakes/pyflakes/test/harness.py
new file mode 100644
index 0000000000..b20ac79d73
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/test/harness.py
@@ -0,0 +1,72 @@
+import ast
+import textwrap
+import unittest
+
+from pyflakes import checker
+
+__all__ = ['TestCase', 'skip', 'skipIf']
+
+skip = unittest.skip
+skipIf = unittest.skipIf
+
+
+class TestCase(unittest.TestCase):
+
+ withDoctest = False
+
+ def flakes(self, input, *expectedOutputs, **kw):
+ tree = ast.parse(textwrap.dedent(input))
+ file_tokens = checker.make_tokens(textwrap.dedent(input))
+ if kw.get('is_segment'):
+ tree = tree.body[0]
+ kw.pop('is_segment')
+ w = checker.Checker(
+ tree, file_tokens=file_tokens, withDoctest=self.withDoctest, **kw
+ )
+ outputs = [type(o) for o in w.messages]
+ expectedOutputs = list(expectedOutputs)
+ outputs.sort(key=lambda t: t.__name__)
+ expectedOutputs.sort(key=lambda t: t.__name__)
+ self.assertEqual(outputs, expectedOutputs, '''\
+for input:
+%s
+expected outputs:
+%r
+but got:
+%s''' % (input, expectedOutputs, '\n'.join([str(o) for o in w.messages])))
+ return w
+
+ if not hasattr(unittest.TestCase, 'assertIs'):
+
+ def assertIs(self, expr1, expr2, msg=None):
+ if expr1 is not expr2:
+ self.fail(msg or '%r is not %r' % (expr1, expr2))
+
+ if not hasattr(unittest.TestCase, 'assertIsInstance'):
+
+ def assertIsInstance(self, obj, cls, msg=None):
+ """Same as self.assertTrue(isinstance(obj, cls))."""
+ if not isinstance(obj, cls):
+ self.fail(msg or '%r is not an instance of %r' % (obj, cls))
+
+ if not hasattr(unittest.TestCase, 'assertNotIsInstance'):
+
+ def assertNotIsInstance(self, obj, cls, msg=None):
+ """Same as self.assertFalse(isinstance(obj, cls))."""
+ if isinstance(obj, cls):
+ self.fail(msg or '%r is an instance of %r' % (obj, cls))
+
+ if not hasattr(unittest.TestCase, 'assertIn'):
+
+ def assertIn(self, member, container, msg=None):
+ """Just like self.assertTrue(a in b)."""
+ if member not in container:
+ self.fail(msg or '%r not found in %r' % (member, container))
+
+ if not hasattr(unittest.TestCase, 'assertNotIn'):
+
+ def assertNotIn(self, member, container, msg=None):
+ """Just like self.assertTrue(a not in b)."""
+ if member in container:
+ self.fail(msg or
+ '%r unexpectedly found in %r' % (member, container))
diff --git a/third_party/python/pyflakes/pyflakes/test/test_api.py b/third_party/python/pyflakes/pyflakes/test/test_api.py
new file mode 100644
index 0000000000..128aa69ded
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/test/test_api.py
@@ -0,0 +1,835 @@
+"""
+Tests for L{pyflakes.scripts.pyflakes}.
+"""
+
+import contextlib
+import os
+import sys
+import shutil
+import subprocess
+import tempfile
+
+from pyflakes.messages import UnusedImport
+from pyflakes.reporter import Reporter
+from pyflakes.api import (
+ main,
+ checkPath,
+ checkRecursive,
+ iterSourceCode,
+)
+from pyflakes.test.harness import TestCase, skipIf
+
+if sys.version_info < (3,):
+ from cStringIO import StringIO
+else:
+ from io import StringIO
+ unichr = chr
+
+try:
+ sys.pypy_version_info
+ PYPY = True
+except AttributeError:
+ PYPY = False
+
+try:
+ WindowsError
+ WIN = True
+except NameError:
+ WIN = False
+
+ERROR_HAS_COL_NUM = ERROR_HAS_LAST_LINE = sys.version_info >= (3, 2) or PYPY
+
+
+def withStderrTo(stderr, f, *args, **kwargs):
+ """
+ Call C{f} with C{sys.stderr} redirected to C{stderr}.
+ """
+ (outer, sys.stderr) = (sys.stderr, stderr)
+ try:
+ return f(*args, **kwargs)
+ finally:
+ sys.stderr = outer
+
+
+class Node(object):
+ """
+ Mock an AST node.
+ """
+ def __init__(self, lineno, col_offset=0):
+ self.lineno = lineno
+ self.col_offset = col_offset
+
+
+class SysStreamCapturing(object):
+
+ """
+ Context manager capturing sys.stdin, sys.stdout and sys.stderr.
+
+ The file handles are replaced with a StringIO object.
+ On environments that support it, the StringIO object uses newlines
+ set to os.linesep. Otherwise newlines are converted from \\n to
+ os.linesep during __exit__.
+ """
+
+ def _create_StringIO(self, buffer=None):
+ # Python 3 has a newline argument
+ try:
+ return StringIO(buffer, newline=os.linesep)
+ except TypeError:
+ self._newline = True
+ # Python 2 creates an input only stream when buffer is not None
+ if buffer is None:
+ return StringIO()
+ else:
+ return StringIO(buffer)
+
+ def __init__(self, stdin):
+ self._newline = False
+ self._stdin = self._create_StringIO(stdin or '')
+
+ def __enter__(self):
+ self._orig_stdin = sys.stdin
+ self._orig_stdout = sys.stdout
+ self._orig_stderr = sys.stderr
+
+ sys.stdin = self._stdin
+ sys.stdout = self._stdout_stringio = self._create_StringIO()
+ sys.stderr = self._stderr_stringio = self._create_StringIO()
+
+ return self
+
+ def __exit__(self, *args):
+ self.output = self._stdout_stringio.getvalue()
+ self.error = self._stderr_stringio.getvalue()
+
+ if self._newline and os.linesep != '\n':
+ self.output = self.output.replace('\n', os.linesep)
+ self.error = self.error.replace('\n', os.linesep)
+
+ sys.stdin = self._orig_stdin
+ sys.stdout = self._orig_stdout
+ sys.stderr = self._orig_stderr
+
+
+class LoggingReporter(object):
+ """
+ Implementation of Reporter that just appends any error to a list.
+ """
+
+ def __init__(self, log):
+ """
+ Construct a C{LoggingReporter}.
+
+ @param log: A list to append log messages to.
+ """
+ self.log = log
+
+ def flake(self, message):
+ self.log.append(('flake', str(message)))
+
+ def unexpectedError(self, filename, message):
+ self.log.append(('unexpectedError', filename, message))
+
+ def syntaxError(self, filename, msg, lineno, offset, line):
+ self.log.append(('syntaxError', filename, msg, lineno, offset, line))
+
+
+class TestIterSourceCode(TestCase):
+ """
+ Tests for L{iterSourceCode}.
+ """
+
+ def setUp(self):
+ self.tempdir = tempfile.mkdtemp()
+
+ def tearDown(self):
+ shutil.rmtree(self.tempdir)
+
+ def makeEmptyFile(self, *parts):
+ assert parts
+ fpath = os.path.join(self.tempdir, *parts)
+ open(fpath, 'a').close()
+ return fpath
+
+ def test_emptyDirectory(self):
+ """
+ There are no Python files in an empty directory.
+ """
+ self.assertEqual(list(iterSourceCode([self.tempdir])), [])
+
+ def test_singleFile(self):
+ """
+ If the directory contains one Python file, C{iterSourceCode} will find
+ it.
+ """
+ childpath = self.makeEmptyFile('foo.py')
+ self.assertEqual(list(iterSourceCode([self.tempdir])), [childpath])
+
+ def test_onlyPythonSource(self):
+ """
+ Files that are not Python source files are not included.
+ """
+ self.makeEmptyFile('foo.pyc')
+ self.assertEqual(list(iterSourceCode([self.tempdir])), [])
+
+ def test_recurses(self):
+ """
+ If the Python files are hidden deep down in child directories, we will
+ find them.
+ """
+ os.mkdir(os.path.join(self.tempdir, 'foo'))
+ apath = self.makeEmptyFile('foo', 'a.py')
+ self.makeEmptyFile('foo', 'a.py~')
+ os.mkdir(os.path.join(self.tempdir, 'bar'))
+ bpath = self.makeEmptyFile('bar', 'b.py')
+ cpath = self.makeEmptyFile('c.py')
+ self.assertEqual(
+ sorted(iterSourceCode([self.tempdir])),
+ sorted([apath, bpath, cpath]))
+
+ def test_shebang(self):
+ """
+ Find Python files that don't end with `.py`, but contain a Python
+ shebang.
+ """
+ python = os.path.join(self.tempdir, 'a')
+ with open(python, 'w') as fd:
+ fd.write('#!/usr/bin/env python\n')
+
+ self.makeEmptyFile('b')
+
+ with open(os.path.join(self.tempdir, 'c'), 'w') as fd:
+ fd.write('hello\nworld\n')
+
+ python2 = os.path.join(self.tempdir, 'd')
+ with open(python2, 'w') as fd:
+ fd.write('#!/usr/bin/env python2\n')
+
+ python3 = os.path.join(self.tempdir, 'e')
+ with open(python3, 'w') as fd:
+ fd.write('#!/usr/bin/env python3\n')
+
+ pythonw = os.path.join(self.tempdir, 'f')
+ with open(pythonw, 'w') as fd:
+ fd.write('#!/usr/bin/env pythonw\n')
+
+ python3args = os.path.join(self.tempdir, 'g')
+ with open(python3args, 'w') as fd:
+ fd.write('#!/usr/bin/python3 -u\n')
+
+ python2u = os.path.join(self.tempdir, 'h')
+ with open(python2u, 'w') as fd:
+ fd.write('#!/usr/bin/python2u\n')
+
+ python3d = os.path.join(self.tempdir, 'i')
+ with open(python3d, 'w') as fd:
+ fd.write('#!/usr/local/bin/python3d\n')
+
+ python38m = os.path.join(self.tempdir, 'j')
+ with open(python38m, 'w') as fd:
+ fd.write('#! /usr/bin/env python3.8m\n')
+
+ python27 = os.path.join(self.tempdir, 'k')
+ with open(python27, 'w') as fd:
+ fd.write('#!/usr/bin/python2.7 \n')
+
+ # Should NOT be treated as Python source
+ notfirst = os.path.join(self.tempdir, 'l')
+ with open(notfirst, 'w') as fd:
+ fd.write('#!/bin/sh\n#!/usr/bin/python\n')
+
+ self.assertEqual(
+ sorted(iterSourceCode([self.tempdir])),
+ sorted([python, python2, python3, pythonw, python3args, python2u,
+ python3d, python38m, python27]))
+
+ def test_multipleDirectories(self):
+ """
+ L{iterSourceCode} can be given multiple directories. It will recurse
+ into each of them.
+ """
+ foopath = os.path.join(self.tempdir, 'foo')
+ barpath = os.path.join(self.tempdir, 'bar')
+ os.mkdir(foopath)
+ apath = self.makeEmptyFile('foo', 'a.py')
+ os.mkdir(barpath)
+ bpath = self.makeEmptyFile('bar', 'b.py')
+ self.assertEqual(
+ sorted(iterSourceCode([foopath, barpath])),
+ sorted([apath, bpath]))
+
+ def test_explicitFiles(self):
+ """
+ If one of the paths given to L{iterSourceCode} is not a directory but
+ a file, it will include that in its output.
+ """
+ epath = self.makeEmptyFile('e.py')
+ self.assertEqual(list(iterSourceCode([epath])),
+ [epath])
+
+
+class TestReporter(TestCase):
+ """
+ Tests for L{Reporter}.
+ """
+
+ def test_syntaxError(self):
+ """
+ C{syntaxError} reports that there was a syntax error in the source
+ file. It reports to the error stream and includes the filename, line
+ number, error message, actual line of source and a caret pointing to
+ where the error is.
+ """
+ err = StringIO()
+ reporter = Reporter(None, err)
+ reporter.syntaxError('foo.py', 'a problem', 3,
+ 8 if sys.version_info >= (3, 8) else 7,
+ 'bad line of source')
+ self.assertEqual(
+ ("foo.py:3:8: a problem\n"
+ "bad line of source\n"
+ " ^\n"),
+ err.getvalue())
+
+ def test_syntaxErrorNoOffset(self):
+ """
+ C{syntaxError} doesn't include a caret pointing to the error if
+ C{offset} is passed as C{None}.
+ """
+ err = StringIO()
+ reporter = Reporter(None, err)
+ reporter.syntaxError('foo.py', 'a problem', 3, None,
+ 'bad line of source')
+ self.assertEqual(
+ ("foo.py:3: a problem\n"
+ "bad line of source\n"),
+ err.getvalue())
+
+ def test_multiLineSyntaxError(self):
+ """
+ If there's a multi-line syntax error, then we only report the last
+ line. The offset is adjusted so that it is relative to the start of
+ the last line.
+ """
+ err = StringIO()
+ lines = [
+ 'bad line of source',
+ 'more bad lines of source',
+ ]
+ reporter = Reporter(None, err)
+ reporter.syntaxError('foo.py', 'a problem', 3, len(lines[0]) + 7,
+ '\n'.join(lines))
+ column = 25 if sys.version_info >= (3, 8) else 7
+ self.assertEqual(
+ ("foo.py:3:%d: a problem\n" % column +
+ lines[-1] + "\n" +
+ " " * (column - 1) + "^\n"),
+ err.getvalue())
+
+ def test_unexpectedError(self):
+ """
+ C{unexpectedError} reports an error processing a source file.
+ """
+ err = StringIO()
+ reporter = Reporter(None, err)
+ reporter.unexpectedError('source.py', 'error message')
+ self.assertEqual('source.py: error message\n', err.getvalue())
+
+ def test_flake(self):
+ """
+ C{flake} reports a code warning from Pyflakes. It is exactly the
+ str() of a L{pyflakes.messages.Message}.
+ """
+ out = StringIO()
+ reporter = Reporter(out, None)
+ message = UnusedImport('foo.py', Node(42), 'bar')
+ reporter.flake(message)
+ self.assertEqual(out.getvalue(), "%s\n" % (message,))
+
+
+class CheckTests(TestCase):
+ """
+ Tests for L{check} and L{checkPath} which check a file for flakes.
+ """
+
+ @contextlib.contextmanager
+ def makeTempFile(self, content):
+ """
+ Make a temporary file containing C{content} and return a path to it.
+ """
+ fd, name = tempfile.mkstemp()
+ try:
+ with os.fdopen(fd, 'wb') as f:
+ if not hasattr(content, 'decode'):
+ content = content.encode('ascii')
+ f.write(content)
+ yield name
+ finally:
+ os.remove(name)
+
+ def assertHasErrors(self, path, errorList):
+ """
+ Assert that C{path} causes errors.
+
+ @param path: A path to a file to check.
+ @param errorList: A list of errors expected to be printed to stderr.
+ """
+ err = StringIO()
+ count = withStderrTo(err, checkPath, path)
+ self.assertEqual(
+ (count, err.getvalue()), (len(errorList), ''.join(errorList)))
+
+ def getErrors(self, path):
+ """
+ Get any warnings or errors reported by pyflakes for the file at C{path}.
+
+ @param path: The path to a Python file on disk that pyflakes will check.
+ @return: C{(count, log)}, where C{count} is the number of warnings or
+ errors generated, and log is a list of those warnings, presented
+ as structured data. See L{LoggingReporter} for more details.
+ """
+ log = []
+ reporter = LoggingReporter(log)
+ count = checkPath(path, reporter)
+ return count, log
+
+ def test_legacyScript(self):
+ from pyflakes.scripts import pyflakes as script_pyflakes
+ self.assertIs(script_pyflakes.checkPath, checkPath)
+
+ def test_missingTrailingNewline(self):
+ """
+ Source which doesn't end with a newline shouldn't cause any
+ exception to be raised nor an error indicator to be returned by
+ L{check}.
+ """
+ with self.makeTempFile("def foo():\n\tpass\n\t") as fName:
+ self.assertHasErrors(fName, [])
+
+ def test_checkPathNonExisting(self):
+ """
+ L{checkPath} handles non-existing files.
+ """
+ count, errors = self.getErrors('extremo')
+ self.assertEqual(count, 1)
+ self.assertEqual(
+ errors,
+ [('unexpectedError', 'extremo', 'No such file or directory')])
+
+ def test_multilineSyntaxError(self):
+ """
+ Source which includes a syntax error which results in the raised
+ L{SyntaxError.text} containing multiple lines of source are reported
+ with only the last line of that source.
+ """
+ source = """\
+def foo():
+ '''
+
+def bar():
+ pass
+
+def baz():
+ '''quux'''
+"""
+
+ # Sanity check - SyntaxError.text should be multiple lines, if it
+ # isn't, something this test was unprepared for has happened.
+ def evaluate(source):
+ exec(source)
+ try:
+ evaluate(source)
+ except SyntaxError:
+ e = sys.exc_info()[1]
+ if not PYPY:
+ self.assertTrue(e.text.count('\n') > 1)
+ else:
+ self.fail()
+
+ with self.makeTempFile(source) as sourcePath:
+ if PYPY:
+ message = 'end of file (EOF) while scanning triple-quoted string literal'
+ else:
+ message = 'invalid syntax'
+
+ column = 8 if sys.version_info >= (3, 8) else 11
+ self.assertHasErrors(
+ sourcePath,
+ ["""\
+%s:8:%d: %s
+ '''quux'''
+%s^
+""" % (sourcePath, column, message, ' ' * (column - 1))])
+
+ def test_eofSyntaxError(self):
+ """
+ The error reported for source files which end prematurely causing a
+ syntax error reflects the cause for the syntax error.
+ """
+ with self.makeTempFile("def foo(") as sourcePath:
+ if PYPY:
+ result = """\
+%s:1:7: parenthesis is never closed
+def foo(
+ ^
+""" % (sourcePath,)
+ else:
+ result = """\
+%s:1:9: unexpected EOF while parsing
+def foo(
+ ^
+""" % (sourcePath,)
+
+ self.assertHasErrors(
+ sourcePath,
+ [result])
+
+ def test_eofSyntaxErrorWithTab(self):
+ """
+ The error reported for source files which end prematurely causing a
+ syntax error reflects the cause for the syntax error.
+ """
+ with self.makeTempFile("if True:\n\tfoo =") as sourcePath:
+ column = 6 if PYPY else 7
+ last_line = '\t ^' if PYPY else '\t ^'
+
+ self.assertHasErrors(
+ sourcePath,
+ ["""\
+%s:2:%s: invalid syntax
+\tfoo =
+%s
+""" % (sourcePath, column, last_line)])
+
+ def test_nonDefaultFollowsDefaultSyntaxError(self):
+ """
+ Source which has a non-default argument following a default argument
+ should include the line number of the syntax error. However these
+ exceptions do not include an offset.
+ """
+ source = """\
+def foo(bar=baz, bax):
+ pass
+"""
+ with self.makeTempFile(source) as sourcePath:
+ if ERROR_HAS_LAST_LINE:
+ if PYPY and sys.version_info >= (3,):
+ column = 7
+ elif sys.version_info >= (3, 8):
+ column = 9
+ else:
+ column = 8
+ last_line = ' ' * (column - 1) + '^\n'
+ columnstr = '%d:' % column
+ else:
+ last_line = columnstr = ''
+ self.assertHasErrors(
+ sourcePath,
+ ["""\
+%s:1:%s non-default argument follows default argument
+def foo(bar=baz, bax):
+%s""" % (sourcePath, columnstr, last_line)])
+
+ def test_nonKeywordAfterKeywordSyntaxError(self):
+ """
+ Source which has a non-keyword argument after a keyword argument should
+ include the line number of the syntax error. However these exceptions
+ do not include an offset.
+ """
+ source = """\
+foo(bar=baz, bax)
+"""
+ with self.makeTempFile(source) as sourcePath:
+ if ERROR_HAS_LAST_LINE:
+ if PYPY and sys.version_info >= (3,):
+ column = 12
+ elif sys.version_info >= (3, 8):
+ column = 14
+ else:
+ column = 13
+ last_line = ' ' * (column - 1) + '^\n'
+ columnstr = '%d:' % column
+ else:
+ last_line = columnstr = ''
+
+ if sys.version_info >= (3, 5):
+ message = 'positional argument follows keyword argument'
+ else:
+ message = 'non-keyword arg after keyword arg'
+
+ self.assertHasErrors(
+ sourcePath,
+ ["""\
+%s:1:%s %s
+foo(bar=baz, bax)
+%s""" % (sourcePath, columnstr, message, last_line)])
+
+ def test_invalidEscape(self):
+ """
+ The invalid escape syntax raises ValueError in Python 2
+ """
+ ver = sys.version_info
+ # ValueError: invalid \x escape
+ with self.makeTempFile(r"foo = '\xyz'") as sourcePath:
+ if ver < (3,):
+ decoding_error = "%s: problem decoding source\n" % (sourcePath,)
+ else:
+ position_end = 1
+ if PYPY:
+ column = 6
+ else:
+ column = 7
+ # Column has been "fixed" since 3.2.4 and 3.3.1
+ if ver < (3, 2, 4) or ver[:3] == (3, 3, 0):
+ position_end = 2
+
+ if ERROR_HAS_LAST_LINE:
+ last_line = '%s^\n' % (' ' * (column - 1))
+ else:
+ last_line = ''
+
+ decoding_error = """\
+%s:1:%d: (unicode error) 'unicodeescape' codec can't decode bytes \
+in position 0-%d: truncated \\xXX escape
+foo = '\\xyz'
+%s""" % (sourcePath, column, position_end, last_line)
+
+ self.assertHasErrors(
+ sourcePath, [decoding_error])
+
+ @skipIf(sys.platform == 'win32', 'unsupported on Windows')
+ def test_permissionDenied(self):
+ """
+ If the source file is not readable, this is reported on standard
+ error.
+ """
+ if os.getuid() == 0:
+ self.skipTest('root user can access all files regardless of '
+ 'permissions')
+ with self.makeTempFile('') as sourcePath:
+ os.chmod(sourcePath, 0)
+ count, errors = self.getErrors(sourcePath)
+ self.assertEqual(count, 1)
+ self.assertEqual(
+ errors,
+ [('unexpectedError', sourcePath, "Permission denied")])
+
+ def test_pyflakesWarning(self):
+ """
+ If the source file has a pyflakes warning, this is reported as a
+ 'flake'.
+ """
+ with self.makeTempFile("import foo") as sourcePath:
+ count, errors = self.getErrors(sourcePath)
+ self.assertEqual(count, 1)
+ self.assertEqual(
+ errors, [('flake', str(UnusedImport(sourcePath, Node(1), 'foo')))])
+
+ def test_encodedFileUTF8(self):
+ """
+ If source file declares the correct encoding, no error is reported.
+ """
+ SNOWMAN = unichr(0x2603)
+ source = ("""\
+# coding: utf-8
+x = "%s"
+""" % SNOWMAN).encode('utf-8')
+ with self.makeTempFile(source) as sourcePath:
+ self.assertHasErrors(sourcePath, [])
+
+ def test_CRLFLineEndings(self):
+ """
+ Source files with Windows CR LF line endings are parsed successfully.
+ """
+ with self.makeTempFile("x = 42\r\n") as sourcePath:
+ self.assertHasErrors(sourcePath, [])
+
+ def test_misencodedFileUTF8(self):
+ """
+ If a source file contains bytes which cannot be decoded, this is
+ reported on stderr.
+ """
+ SNOWMAN = unichr(0x2603)
+ source = ("""\
+# coding: ascii
+x = "%s"
+""" % SNOWMAN).encode('utf-8')
+ with self.makeTempFile(source) as sourcePath:
+ if PYPY and sys.version_info < (3, ):
+ message = ('\'ascii\' codec can\'t decode byte 0xe2 '
+ 'in position 21: ordinal not in range(128)')
+ result = """\
+%s:0:0: %s
+x = "\xe2\x98\x83"
+ ^\n""" % (sourcePath, message)
+
+ else:
+ message = 'problem decoding source'
+ result = "%s: problem decoding source\n" % (sourcePath,)
+
+ self.assertHasErrors(
+ sourcePath, [result])
+
+ def test_misencodedFileUTF16(self):
+ """
+ If a source file contains bytes which cannot be decoded, this is
+ reported on stderr.
+ """
+ SNOWMAN = unichr(0x2603)
+ source = ("""\
+# coding: ascii
+x = "%s"
+""" % SNOWMAN).encode('utf-16')
+ with self.makeTempFile(source) as sourcePath:
+ self.assertHasErrors(
+ sourcePath, ["%s: problem decoding source\n" % (sourcePath,)])
+
+ def test_checkRecursive(self):
+ """
+ L{checkRecursive} descends into each directory, finding Python files
+ and reporting problems.
+ """
+ tempdir = tempfile.mkdtemp()
+ try:
+ os.mkdir(os.path.join(tempdir, 'foo'))
+ file1 = os.path.join(tempdir, 'foo', 'bar.py')
+ with open(file1, 'wb') as fd:
+ fd.write("import baz\n".encode('ascii'))
+ file2 = os.path.join(tempdir, 'baz.py')
+ with open(file2, 'wb') as fd:
+ fd.write("import contraband".encode('ascii'))
+ log = []
+ reporter = LoggingReporter(log)
+ warnings = checkRecursive([tempdir], reporter)
+ self.assertEqual(warnings, 2)
+ self.assertEqual(
+ sorted(log),
+ sorted([('flake', str(UnusedImport(file1, Node(1), 'baz'))),
+ ('flake',
+ str(UnusedImport(file2, Node(1), 'contraband')))]))
+ finally:
+ shutil.rmtree(tempdir)
+
+
+class IntegrationTests(TestCase):
+ """
+ Tests of the pyflakes script that actually spawn the script.
+ """
+
+ # https://bitbucket.org/pypy/pypy/issues/3069/pypy36-on-windows-incorrect-line-separator
+ if PYPY and sys.version_info >= (3,) and WIN:
+ LINESEP = '\n'
+ else:
+ LINESEP = os.linesep
+
+ def setUp(self):
+ self.tempdir = tempfile.mkdtemp()
+ self.tempfilepath = os.path.join(self.tempdir, 'temp')
+
+ def tearDown(self):
+ shutil.rmtree(self.tempdir)
+
+ def getPyflakesBinary(self):
+ """
+ Return the path to the pyflakes binary.
+ """
+ import pyflakes
+ package_dir = os.path.dirname(pyflakes.__file__)
+ return os.path.join(package_dir, '..', 'bin', 'pyflakes')
+
+ def runPyflakes(self, paths, stdin=None):
+ """
+ Launch a subprocess running C{pyflakes}.
+
+ @param paths: Command-line arguments to pass to pyflakes.
+ @param stdin: Text to use as stdin.
+ @return: C{(returncode, stdout, stderr)} of the completed pyflakes
+ process.
+ """
+ env = dict(os.environ)
+ env['PYTHONPATH'] = os.pathsep.join(sys.path)
+ command = [sys.executable, self.getPyflakesBinary()]
+ command.extend(paths)
+ if stdin:
+ p = subprocess.Popen(command, env=env, stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (stdout, stderr) = p.communicate(stdin.encode('ascii'))
+ else:
+ p = subprocess.Popen(command, env=env,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (stdout, stderr) = p.communicate()
+ rv = p.wait()
+ if sys.version_info >= (3,):
+ stdout = stdout.decode('utf-8')
+ stderr = stderr.decode('utf-8')
+ return (stdout, stderr, rv)
+
+ def test_goodFile(self):
+ """
+ When a Python source file is all good, the return code is zero and no
+ messages are printed to either stdout or stderr.
+ """
+ open(self.tempfilepath, 'a').close()
+ d = self.runPyflakes([self.tempfilepath])
+ self.assertEqual(d, ('', '', 0))
+
+ def test_fileWithFlakes(self):
+ """
+ When a Python source file has warnings, the return code is non-zero
+ and the warnings are printed to stdout.
+ """
+ with open(self.tempfilepath, 'wb') as fd:
+ fd.write("import contraband\n".encode('ascii'))
+ d = self.runPyflakes([self.tempfilepath])
+ expected = UnusedImport(self.tempfilepath, Node(1), 'contraband')
+ self.assertEqual(d, ("%s%s" % (expected, self.LINESEP), '', 1))
+
+ def test_errors_io(self):
+ """
+ When pyflakes finds errors with the files it's given, (if they don't
+ exist, say), then the return code is non-zero and the errors are
+ printed to stderr.
+ """
+ d = self.runPyflakes([self.tempfilepath])
+ error_msg = '%s: No such file or directory%s' % (self.tempfilepath,
+ self.LINESEP)
+ self.assertEqual(d, ('', error_msg, 1))
+
+ def test_errors_syntax(self):
+ """
+ When pyflakes finds errors with the files it's given, (if they don't
+ exist, say), then the return code is non-zero and the errors are
+ printed to stderr.
+ """
+ with open(self.tempfilepath, 'wb') as fd:
+ fd.write("import".encode('ascii'))
+ d = self.runPyflakes([self.tempfilepath])
+ error_msg = '{0}:1:{2}: invalid syntax{1}import{1} {3}^{1}'.format(
+ self.tempfilepath, self.LINESEP, 6 if PYPY else 7, '' if PYPY else ' ')
+ self.assertEqual(d, ('', error_msg, 1))
+
+ def test_readFromStdin(self):
+ """
+ If no arguments are passed to C{pyflakes} then it reads from stdin.
+ """
+ d = self.runPyflakes([], stdin='import contraband')
+ expected = UnusedImport('<stdin>', Node(1), 'contraband')
+ self.assertEqual(d, ("%s%s" % (expected, self.LINESEP), '', 1))
+
+
+class TestMain(IntegrationTests):
+ """
+ Tests of the pyflakes main function.
+ """
+ LINESEP = os.linesep
+
+ def runPyflakes(self, paths, stdin=None):
+ try:
+ with SysStreamCapturing(stdin) as capture:
+ main(args=paths)
+ except SystemExit as e:
+ self.assertIsInstance(e.code, bool)
+ rv = int(e.code)
+ return (capture.output, capture.error, rv)
+ else:
+ raise RuntimeError('SystemExit not raised')
diff --git a/third_party/python/pyflakes/pyflakes/test/test_builtin.py b/third_party/python/pyflakes/pyflakes/test/test_builtin.py
new file mode 100644
index 0000000000..7150ddb1e4
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/test/test_builtin.py
@@ -0,0 +1,41 @@
+"""
+Tests for detecting redefinition of builtins.
+"""
+from sys import version_info
+
+from pyflakes import messages as m
+from pyflakes.test.harness import TestCase, skipIf
+
+
+class TestBuiltins(TestCase):
+
+ def test_builtin_unbound_local(self):
+ self.flakes('''
+ def foo():
+ a = range(1, 10)
+ range = a
+ return range
+
+ foo()
+
+ print(range)
+ ''', m.UndefinedLocal)
+
+ def test_global_shadowing_builtin(self):
+ self.flakes('''
+ def f():
+ global range
+ range = None
+ print(range)
+
+ f()
+ ''')
+
+ @skipIf(version_info >= (3,), 'not an UnboundLocalError in Python 3')
+ def test_builtin_in_comprehension(self):
+ self.flakes('''
+ def f():
+ [range for range in range(1, 10)]
+
+ f()
+ ''', m.UndefinedLocal)
diff --git a/third_party/python/pyflakes/pyflakes/test/test_checker.py b/third_party/python/pyflakes/pyflakes/test/test_checker.py
new file mode 100644
index 0000000000..b52757260a
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/test/test_checker.py
@@ -0,0 +1,186 @@
+import ast
+import sys
+
+from pyflakes import checker
+from pyflakes.test.harness import TestCase, skipIf
+
+
+class TypeableVisitorTests(TestCase):
+ """
+ Tests of L{_TypeableVisitor}
+ """
+
+ @staticmethod
+ def _run_visitor(s):
+ """
+ Run L{_TypeableVisitor} on the parsed source and return the visitor.
+ """
+ tree = ast.parse(s)
+ visitor = checker._TypeableVisitor()
+ visitor.visit(tree)
+ return visitor
+
+ def test_node_types(self):
+ """
+ Test that the typeable node types are collected
+ """
+ visitor = self._run_visitor(
+ """\
+x = 1 # assignment
+for x in range(1): pass # for loop
+def f(): pass # function definition
+with a as b: pass # with statement
+"""
+ )
+ self.assertEqual(visitor.typeable_lines, [1, 2, 3, 4])
+ self.assertIsInstance(visitor.typeable_nodes[1], ast.Assign)
+ self.assertIsInstance(visitor.typeable_nodes[2], ast.For)
+ self.assertIsInstance(visitor.typeable_nodes[3], ast.FunctionDef)
+ self.assertIsInstance(visitor.typeable_nodes[4], ast.With)
+
+ def test_visitor_recurses(self):
+ """
+ Test the common pitfall of missing `generic_visit` in visitors by
+ ensuring that nested nodes are reported
+ """
+ visitor = self._run_visitor(
+ """\
+def f():
+ x = 1
+"""
+ )
+ self.assertEqual(visitor.typeable_lines, [1, 2])
+ self.assertIsInstance(visitor.typeable_nodes[1], ast.FunctionDef)
+ self.assertIsInstance(visitor.typeable_nodes[2], ast.Assign)
+
+ @skipIf(sys.version_info < (3, 5), 'async syntax introduced in py35')
+ def test_py35_node_types(self):
+ """
+ Test that the PEP 492 node types are collected
+ """
+ visitor = self._run_visitor(
+ """\
+async def f(): # async def
+ async for x in y: pass # async for
+ async with a as b: pass # async with
+"""
+ )
+ self.assertEqual(visitor.typeable_lines, [1, 2, 3])
+ self.assertIsInstance(visitor.typeable_nodes[1], ast.AsyncFunctionDef)
+ self.assertIsInstance(visitor.typeable_nodes[2], ast.AsyncFor)
+ self.assertIsInstance(visitor.typeable_nodes[3], ast.AsyncWith)
+
+ def test_last_node_wins(self):
+ """
+ Test that when two typeable nodes are present on a line, the last
+ typeable one wins.
+ """
+ visitor = self._run_visitor('x = 1; y = 1')
+ # detected both assignable nodes
+ self.assertEqual(visitor.typeable_lines, [1, 1])
+ # but the assignment to `y` wins
+ self.assertEqual(visitor.typeable_nodes[1].targets[0].id, 'y')
+
+
+class CollectTypeCommentsTests(TestCase):
+ """
+ Tests of L{_collect_type_comments}
+ """
+
+ @staticmethod
+ def _collect(s):
+ """
+ Run L{_collect_type_comments} on the parsed source and return the
+ mapping from nodes to comments. The return value is converted to
+ a set: {(node_type, tuple of comments), ...}
+ """
+ tree = ast.parse(s)
+ tokens = checker.make_tokens(s)
+ ret = checker._collect_type_comments(tree, tokens)
+ return {(type(k), tuple(s for _, s in v)) for k, v in ret.items()}
+
+ def test_bytes(self):
+ """
+ Test that the function works for binary source
+ """
+ ret = self._collect(b'x = 1 # type: int')
+ self.assertSetEqual(ret, {(ast.Assign, ('# type: int',))})
+
+ def test_text(self):
+ """
+ Test that the function works for text source
+ """
+ ret = self._collect(u'x = 1 # type: int')
+ self.assertEqual(ret, {(ast.Assign, ('# type: int',))})
+
+ def test_non_type_comment_ignored(self):
+ """
+ Test that a non-type comment is ignored
+ """
+ ret = self._collect('x = 1 # noqa')
+ self.assertSetEqual(ret, set())
+
+ def test_type_comment_before_typeable(self):
+ """
+ Test that a type comment before something typeable is ignored.
+ """
+ ret = self._collect('# type: int\nx = 1')
+ self.assertSetEqual(ret, set())
+
+ def test_type_ignore_comment_ignored(self):
+ """
+ Test that `# type: ignore` comments are not collected.
+ """
+ ret = self._collect('x = 1 # type: ignore')
+ self.assertSetEqual(ret, set())
+
+ def test_type_ignore_with_other_things_ignored(self):
+ """
+ Test that `# type: ignore` comments with more content are also not
+ collected.
+ """
+ ret = self._collect('x = 1 # type: ignore # noqa')
+ self.assertSetEqual(ret, set())
+ ret = self._collect('x = 1 #type:ignore#noqa')
+ self.assertSetEqual(ret, set())
+
+ def test_type_comment_with_extra_still_collected(self):
+ ret = self._collect('x = 1 # type: int # noqa')
+ self.assertSetEqual(ret, {(ast.Assign, ('# type: int # noqa',))})
+
+ def test_type_comment_without_whitespace(self):
+ ret = self._collect('x = 1 #type:int')
+ self.assertSetEqual(ret, {(ast.Assign, ('#type:int',))})
+
+ def test_type_comment_starts_with_word_ignore(self):
+ ret = self._collect('x = 1 # type: ignore[T]')
+ self.assertSetEqual(ret, set())
+
+ def test_last_node_wins(self):
+ """
+ Test that when two typeable nodes are present on a line, the last
+ typeable one wins.
+ """
+ ret = self._collect('def f(): x = 1 # type: int')
+ self.assertSetEqual(ret, {(ast.Assign, ('# type: int',))})
+
+ def test_function_def_assigned_comments(self):
+ """
+ Test that type comments for function arguments are all attributed to
+ the function definition.
+ """
+ ret = self._collect(
+ """\
+def f(
+ a, # type: int
+ b, # type: str
+):
+ # type: (...) -> None
+ pass
+"""
+ )
+ expected = {(
+ ast.FunctionDef,
+ ('# type: int', '# type: str', '# type: (...) -> None'),
+ )}
+ self.assertSetEqual(ret, expected)
diff --git a/third_party/python/pyflakes/pyflakes/test/test_code_segment.py b/third_party/python/pyflakes/pyflakes/test/test_code_segment.py
new file mode 100644
index 0000000000..131a74dc1b
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/test/test_code_segment.py
@@ -0,0 +1,132 @@
+from sys import version_info
+
+from pyflakes import messages as m
+from pyflakes.checker import (FunctionScope, ClassScope, ModuleScope,
+ Argument, FunctionDefinition, Assignment)
+from pyflakes.test.harness import TestCase, skipIf
+
+
+class TestCodeSegments(TestCase):
+ """
+ Tests for segments of a module
+ """
+
+ def test_function_segment(self):
+ self.flakes('''
+ def foo():
+ def bar():
+ pass
+ ''', is_segment=True)
+
+ self.flakes('''
+ def foo():
+ def bar():
+ x = 0
+ ''', m.UnusedVariable, is_segment=True)
+
+ def test_class_segment(self):
+ self.flakes('''
+ class Foo:
+ class Bar:
+ pass
+ ''', is_segment=True)
+
+ self.flakes('''
+ class Foo:
+ def bar():
+ x = 0
+ ''', m.UnusedVariable, is_segment=True)
+
+ def test_scope_class(self):
+ checker = self.flakes('''
+ class Foo:
+ x = 0
+ def bar(a, b=1, *d, **e):
+ pass
+ ''', is_segment=True)
+
+ scopes = checker.deadScopes
+ module_scopes = [
+ scope for scope in scopes if scope.__class__ is ModuleScope]
+ class_scopes = [
+ scope for scope in scopes if scope.__class__ is ClassScope]
+ function_scopes = [
+ scope for scope in scopes if scope.__class__ is FunctionScope]
+
+ # Ensure module scope is not present because we are analysing
+ # the inner contents of Foo
+ self.assertEqual(len(module_scopes), 0)
+ self.assertEqual(len(class_scopes), 1)
+ self.assertEqual(len(function_scopes), 1)
+
+ class_scope = class_scopes[0]
+ function_scope = function_scopes[0]
+
+ self.assertIsInstance(class_scope, ClassScope)
+ self.assertIsInstance(function_scope, FunctionScope)
+
+ self.assertIn('x', class_scope)
+ self.assertIn('bar', class_scope)
+
+ self.assertIn('a', function_scope)
+ self.assertIn('b', function_scope)
+ self.assertIn('d', function_scope)
+ self.assertIn('e', function_scope)
+
+ self.assertIsInstance(class_scope['bar'], FunctionDefinition)
+ self.assertIsInstance(class_scope['x'], Assignment)
+
+ self.assertIsInstance(function_scope['a'], Argument)
+ self.assertIsInstance(function_scope['b'], Argument)
+ self.assertIsInstance(function_scope['d'], Argument)
+ self.assertIsInstance(function_scope['e'], Argument)
+
+ def test_scope_function(self):
+ checker = self.flakes('''
+ def foo(a, b=1, *d, **e):
+ def bar(f, g=1, *h, **i):
+ pass
+ ''', is_segment=True)
+
+ scopes = checker.deadScopes
+ module_scopes = [
+ scope for scope in scopes if scope.__class__ is ModuleScope]
+ function_scopes = [
+ scope for scope in scopes if scope.__class__ is FunctionScope]
+
+ # Ensure module scope is not present because we are analysing
+ # the inner contents of foo
+ self.assertEqual(len(module_scopes), 0)
+ self.assertEqual(len(function_scopes), 2)
+
+ function_scope_foo = function_scopes[1]
+ function_scope_bar = function_scopes[0]
+
+ self.assertIsInstance(function_scope_foo, FunctionScope)
+ self.assertIsInstance(function_scope_bar, FunctionScope)
+
+ self.assertIn('a', function_scope_foo)
+ self.assertIn('b', function_scope_foo)
+ self.assertIn('d', function_scope_foo)
+ self.assertIn('e', function_scope_foo)
+ self.assertIn('bar', function_scope_foo)
+
+ self.assertIn('f', function_scope_bar)
+ self.assertIn('g', function_scope_bar)
+ self.assertIn('h', function_scope_bar)
+ self.assertIn('i', function_scope_bar)
+
+ self.assertIsInstance(function_scope_foo['bar'], FunctionDefinition)
+ self.assertIsInstance(function_scope_foo['a'], Argument)
+ self.assertIsInstance(function_scope_foo['b'], Argument)
+ self.assertIsInstance(function_scope_foo['d'], Argument)
+ self.assertIsInstance(function_scope_foo['e'], Argument)
+
+ self.assertIsInstance(function_scope_bar['f'], Argument)
+ self.assertIsInstance(function_scope_bar['g'], Argument)
+ self.assertIsInstance(function_scope_bar['h'], Argument)
+ self.assertIsInstance(function_scope_bar['i'], Argument)
+
+ @skipIf(version_info < (3, 5), 'new in Python 3.5')
+ def test_scope_async_function(self):
+ self.flakes('async def foo(): pass', is_segment=True)
diff --git a/third_party/python/pyflakes/pyflakes/test/test_dict.py b/third_party/python/pyflakes/pyflakes/test/test_dict.py
new file mode 100644
index 0000000000..b9059c22b3
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/test/test_dict.py
@@ -0,0 +1,213 @@
+"""
+Tests for dict duplicate keys Pyflakes behavior.
+"""
+
+from sys import version_info
+
+from pyflakes import messages as m
+from pyflakes.test.harness import TestCase, skipIf
+
+
+class Test(TestCase):
+
+ def test_duplicate_keys(self):
+ self.flakes(
+ "{'yes': 1, 'yes': 2}",
+ m.MultiValueRepeatedKeyLiteral,
+ m.MultiValueRepeatedKeyLiteral,
+ )
+
+ @skipIf(version_info < (3,),
+ "bytes and strings with same 'value' are not equal in python3")
+ def test_duplicate_keys_bytes_vs_unicode_py3(self):
+ self.flakes("{b'a': 1, u'a': 2}")
+
+ @skipIf(version_info < (3,),
+ "bytes and strings with same 'value' are not equal in python3")
+ def test_duplicate_values_bytes_vs_unicode_py3(self):
+ self.flakes(
+ "{1: b'a', 1: u'a'}",
+ m.MultiValueRepeatedKeyLiteral,
+ m.MultiValueRepeatedKeyLiteral,
+ )
+
+ @skipIf(version_info >= (3,),
+ "bytes and strings with same 'value' are equal in python2")
+ def test_duplicate_keys_bytes_vs_unicode_py2(self):
+ self.flakes(
+ "{b'a': 1, u'a': 2}",
+ m.MultiValueRepeatedKeyLiteral,
+ m.MultiValueRepeatedKeyLiteral,
+ )
+
+ @skipIf(version_info >= (3,),
+ "bytes and strings with same 'value' are equal in python2")
+ def test_duplicate_values_bytes_vs_unicode_py2(self):
+ self.flakes("{1: b'a', 1: u'a'}")
+
+ def test_multiple_duplicate_keys(self):
+ self.flakes(
+ "{'yes': 1, 'yes': 2, 'no': 2, 'no': 3}",
+ m.MultiValueRepeatedKeyLiteral,
+ m.MultiValueRepeatedKeyLiteral,
+ m.MultiValueRepeatedKeyLiteral,
+ m.MultiValueRepeatedKeyLiteral,
+ )
+
+ def test_duplicate_keys_in_function(self):
+ self.flakes(
+ '''
+ def f(thing):
+ pass
+ f({'yes': 1, 'yes': 2})
+ ''',
+ m.MultiValueRepeatedKeyLiteral,
+ m.MultiValueRepeatedKeyLiteral,
+ )
+
+ def test_duplicate_keys_in_lambda(self):
+ self.flakes(
+ "lambda x: {(0,1): 1, (0,1): 2}",
+ m.MultiValueRepeatedKeyLiteral,
+ m.MultiValueRepeatedKeyLiteral,
+ )
+
+ def test_duplicate_keys_tuples(self):
+ self.flakes(
+ "{(0,1): 1, (0,1): 2}",
+ m.MultiValueRepeatedKeyLiteral,
+ m.MultiValueRepeatedKeyLiteral,
+ )
+
+ def test_duplicate_keys_tuples_int_and_float(self):
+ self.flakes(
+ "{(0,1): 1, (0,1.0): 2}",
+ m.MultiValueRepeatedKeyLiteral,
+ m.MultiValueRepeatedKeyLiteral,
+ )
+
+ def test_duplicate_keys_ints(self):
+ self.flakes(
+ "{1: 1, 1: 2}",
+ m.MultiValueRepeatedKeyLiteral,
+ m.MultiValueRepeatedKeyLiteral,
+ )
+
+ def test_duplicate_keys_bools(self):
+ self.flakes(
+ "{True: 1, True: 2}",
+ m.MultiValueRepeatedKeyLiteral,
+ m.MultiValueRepeatedKeyLiteral,
+ )
+
+ def test_duplicate_keys_bools_false(self):
+ # Needed to ensure 2.x correctly coerces these from variables
+ self.flakes(
+ "{False: 1, False: 2}",
+ m.MultiValueRepeatedKeyLiteral,
+ m.MultiValueRepeatedKeyLiteral,
+ )
+
+ def test_duplicate_keys_none(self):
+ self.flakes(
+ "{None: 1, None: 2}",
+ m.MultiValueRepeatedKeyLiteral,
+ m.MultiValueRepeatedKeyLiteral,
+ )
+
+ def test_duplicate_variable_keys(self):
+ self.flakes(
+ '''
+ a = 1
+ {a: 1, a: 2}
+ ''',
+ m.MultiValueRepeatedKeyVariable,
+ m.MultiValueRepeatedKeyVariable,
+ )
+
+ def test_duplicate_variable_values(self):
+ self.flakes(
+ '''
+ a = 1
+ b = 2
+ {1: a, 1: b}
+ ''',
+ m.MultiValueRepeatedKeyLiteral,
+ m.MultiValueRepeatedKeyLiteral,
+ )
+
+ def test_duplicate_variable_values_same_value(self):
+ # Current behaviour is not to look up variable values. This is to
+ # confirm that.
+ self.flakes(
+ '''
+ a = 1
+ b = 1
+ {1: a, 1: b}
+ ''',
+ m.MultiValueRepeatedKeyLiteral,
+ m.MultiValueRepeatedKeyLiteral,
+ )
+
+ def test_duplicate_key_float_and_int(self):
+ """
+ These do look like different values, but when it comes to their use as
+ keys, they compare as equal and so are actually duplicates.
+ The literal dict {1: 1, 1.0: 1} actually becomes {1.0: 1}.
+ """
+ self.flakes(
+ '''
+ {1: 1, 1.0: 2}
+ ''',
+ m.MultiValueRepeatedKeyLiteral,
+ m.MultiValueRepeatedKeyLiteral,
+ )
+
+ def test_no_duplicate_key_error_same_value(self):
+ self.flakes('''
+ {'yes': 1, 'yes': 1}
+ ''')
+
+ def test_no_duplicate_key_errors(self):
+ self.flakes('''
+ {'yes': 1, 'no': 2}
+ ''')
+
+ def test_no_duplicate_keys_tuples_same_first_element(self):
+ self.flakes("{(0,1): 1, (0,2): 1}")
+
+ def test_no_duplicate_key_errors_func_call(self):
+ self.flakes('''
+ def test(thing):
+ pass
+ test({True: 1, None: 2, False: 1})
+ ''')
+
+ def test_no_duplicate_key_errors_bool_or_none(self):
+ self.flakes("{True: 1, None: 2, False: 1}")
+
+ def test_no_duplicate_key_errors_ints(self):
+ self.flakes('''
+ {1: 1, 2: 1}
+ ''')
+
+ def test_no_duplicate_key_errors_vars(self):
+ self.flakes('''
+ test = 'yes'
+ rest = 'yes'
+ {test: 1, rest: 2}
+ ''')
+
+ def test_no_duplicate_key_errors_tuples(self):
+ self.flakes('''
+ {(0,1): 1, (0,2): 1}
+ ''')
+
+ def test_no_duplicate_key_errors_instance_attributes(self):
+ self.flakes('''
+ class Test():
+ pass
+ f = Test()
+ f.a = 1
+ {f.a: 1, f.a: 1}
+ ''')
diff --git a/third_party/python/pyflakes/pyflakes/test/test_doctests.py b/third_party/python/pyflakes/pyflakes/test/test_doctests.py
new file mode 100644
index 0000000000..836b248906
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/test/test_doctests.py
@@ -0,0 +1,465 @@
+import sys
+import textwrap
+
+from pyflakes import messages as m
+from pyflakes.checker import (
+ DoctestScope,
+ FunctionScope,
+ ModuleScope,
+)
+from pyflakes.test.test_other import Test as TestOther
+from pyflakes.test.test_imports import Test as TestImports
+from pyflakes.test.test_undefined_names import Test as TestUndefinedNames
+from pyflakes.test.harness import TestCase, skip
+
+try:
+ sys.pypy_version_info
+ PYPY = True
+except AttributeError:
+ PYPY = False
+
+
+class _DoctestMixin(object):
+
+ withDoctest = True
+
+ def doctestify(self, input):
+ lines = []
+ for line in textwrap.dedent(input).splitlines():
+ if line.strip() == '':
+ pass
+ elif (line.startswith(' ') or
+ line.startswith('except:') or
+ line.startswith('except ') or
+ line.startswith('finally:') or
+ line.startswith('else:') or
+ line.startswith('elif ') or
+ (lines and lines[-1].startswith(('>>> @', '... @')))):
+ line = "... %s" % line
+ else:
+ line = ">>> %s" % line
+ lines.append(line)
+ doctestificator = textwrap.dedent('''\
+ def doctest_something():
+ """
+ %s
+ """
+ ''')
+ return doctestificator % "\n ".join(lines)
+
+ def flakes(self, input, *args, **kw):
+ return super(_DoctestMixin, self).flakes(self.doctestify(input), *args, **kw)
+
+
+class Test(TestCase):
+
+ withDoctest = True
+
+ def test_scope_class(self):
+ """Check that a doctest is given a DoctestScope."""
+ checker = self.flakes("""
+ m = None
+
+ def doctest_stuff():
+ '''
+ >>> d = doctest_stuff()
+ '''
+ f = m
+ return f
+ """)
+
+ scopes = checker.deadScopes
+ module_scopes = [
+ scope for scope in scopes if scope.__class__ is ModuleScope]
+ doctest_scopes = [
+ scope for scope in scopes if scope.__class__ is DoctestScope]
+ function_scopes = [
+ scope for scope in scopes if scope.__class__ is FunctionScope]
+
+ self.assertEqual(len(module_scopes), 1)
+ self.assertEqual(len(doctest_scopes), 1)
+
+ module_scope = module_scopes[0]
+ doctest_scope = doctest_scopes[0]
+
+ self.assertIsInstance(doctest_scope, DoctestScope)
+ self.assertIsInstance(doctest_scope, ModuleScope)
+ self.assertNotIsInstance(doctest_scope, FunctionScope)
+ self.assertNotIsInstance(module_scope, DoctestScope)
+
+ self.assertIn('m', module_scope)
+ self.assertIn('doctest_stuff', module_scope)
+
+ self.assertIn('d', doctest_scope)
+
+ self.assertEqual(len(function_scopes), 1)
+ self.assertIn('f', function_scopes[0])
+
+ def test_nested_doctest_ignored(self):
+ """Check that nested doctests are ignored."""
+ checker = self.flakes("""
+ m = None
+
+ def doctest_stuff():
+ '''
+ >>> def function_in_doctest():
+ ... \"\"\"
+ ... >>> ignored_undefined_name
+ ... \"\"\"
+ ... df = m
+ ... return df
+ ...
+ >>> function_in_doctest()
+ '''
+ f = m
+ return f
+ """)
+
+ scopes = checker.deadScopes
+ module_scopes = [
+ scope for scope in scopes if scope.__class__ is ModuleScope]
+ doctest_scopes = [
+ scope for scope in scopes if scope.__class__ is DoctestScope]
+ function_scopes = [
+ scope for scope in scopes if scope.__class__ is FunctionScope]
+
+ self.assertEqual(len(module_scopes), 1)
+ self.assertEqual(len(doctest_scopes), 1)
+
+ module_scope = module_scopes[0]
+ doctest_scope = doctest_scopes[0]
+
+ self.assertIn('m', module_scope)
+ self.assertIn('doctest_stuff', module_scope)
+ self.assertIn('function_in_doctest', doctest_scope)
+
+ self.assertEqual(len(function_scopes), 2)
+
+ self.assertIn('f', function_scopes[0])
+ self.assertIn('df', function_scopes[1])
+
+ def test_global_module_scope_pollution(self):
+ """Check that global in doctest does not pollute module scope."""
+ checker = self.flakes("""
+ def doctest_stuff():
+ '''
+ >>> def function_in_doctest():
+ ... global m
+ ... m = 50
+ ... df = 10
+ ... m = df
+ ...
+ >>> function_in_doctest()
+ '''
+ f = 10
+ return f
+
+ """)
+
+ scopes = checker.deadScopes
+ module_scopes = [
+ scope for scope in scopes if scope.__class__ is ModuleScope]
+ doctest_scopes = [
+ scope for scope in scopes if scope.__class__ is DoctestScope]
+ function_scopes = [
+ scope for scope in scopes if scope.__class__ is FunctionScope]
+
+ self.assertEqual(len(module_scopes), 1)
+ self.assertEqual(len(doctest_scopes), 1)
+
+ module_scope = module_scopes[0]
+ doctest_scope = doctest_scopes[0]
+
+ self.assertIn('doctest_stuff', module_scope)
+ self.assertIn('function_in_doctest', doctest_scope)
+
+ self.assertEqual(len(function_scopes), 2)
+
+ self.assertIn('f', function_scopes[0])
+ self.assertIn('df', function_scopes[1])
+ self.assertIn('m', function_scopes[1])
+
+ self.assertNotIn('m', module_scope)
+
+ def test_global_undefined(self):
+ self.flakes("""
+ global m
+
+ def doctest_stuff():
+ '''
+ >>> m
+ '''
+ """, m.UndefinedName)
+
+ def test_nested_class(self):
+ """Doctest within nested class are processed."""
+ self.flakes("""
+ class C:
+ class D:
+ '''
+ >>> m
+ '''
+ def doctest_stuff(self):
+ '''
+ >>> m
+ '''
+ return 1
+ """, m.UndefinedName, m.UndefinedName)
+
+ def test_ignore_nested_function(self):
+ """Doctest module does not process doctest in nested functions."""
+ # 'syntax error' would cause a SyntaxError if the doctest was processed.
+ # However doctest does not find doctest in nested functions
+ # (https://bugs.python.org/issue1650090). If nested functions were
+ # processed, this use of m should cause UndefinedName, and the
+ # name inner_function should probably exist in the doctest scope.
+ self.flakes("""
+ def doctest_stuff():
+ def inner_function():
+ '''
+ >>> syntax error
+ >>> inner_function()
+ 1
+ >>> m
+ '''
+ return 1
+ m = inner_function()
+ return m
+ """)
+
+ def test_inaccessible_scope_class(self):
+ """Doctest may not access class scope."""
+ self.flakes("""
+ class C:
+ def doctest_stuff(self):
+ '''
+ >>> m
+ '''
+ return 1
+ m = 1
+ """, m.UndefinedName)
+
+ def test_importBeforeDoctest(self):
+ self.flakes("""
+ import foo
+
+ def doctest_stuff():
+ '''
+ >>> foo
+ '''
+ """)
+
+ @skip("todo")
+ def test_importBeforeAndInDoctest(self):
+ self.flakes('''
+ import foo
+
+ def doctest_stuff():
+ """
+ >>> import foo
+ >>> foo
+ """
+
+ foo
+ ''', m.RedefinedWhileUnused)
+
+ def test_importInDoctestAndAfter(self):
+ self.flakes('''
+ def doctest_stuff():
+ """
+ >>> import foo
+ >>> foo
+ """
+
+ import foo
+ foo()
+ ''')
+
+ def test_offsetInDoctests(self):
+ exc = self.flakes('''
+
+ def doctest_stuff():
+ """
+ >>> x # line 5
+ """
+
+ ''', m.UndefinedName).messages[0]
+ self.assertEqual(exc.lineno, 5)
+ self.assertEqual(exc.col, 12)
+
+ def test_offsetInLambdasInDoctests(self):
+ exc = self.flakes('''
+
+ def doctest_stuff():
+ """
+ >>> lambda: x # line 5
+ """
+
+ ''', m.UndefinedName).messages[0]
+ self.assertEqual(exc.lineno, 5)
+ self.assertEqual(exc.col, 20)
+
+ def test_offsetAfterDoctests(self):
+ exc = self.flakes('''
+
+ def doctest_stuff():
+ """
+ >>> x = 5
+ """
+
+ x
+
+ ''', m.UndefinedName).messages[0]
+ self.assertEqual(exc.lineno, 8)
+ self.assertEqual(exc.col, 0)
+
+ def test_syntaxErrorInDoctest(self):
+ exceptions = self.flakes(
+ '''
+ def doctest_stuff():
+ """
+ >>> from # line 4
+ >>> fortytwo = 42
+ >>> except Exception:
+ """
+ ''',
+ m.DoctestSyntaxError,
+ m.DoctestSyntaxError,
+ m.DoctestSyntaxError).messages
+ exc = exceptions[0]
+ self.assertEqual(exc.lineno, 4)
+ if PYPY:
+ self.assertEqual(exc.col, 27)
+ elif sys.version_info >= (3, 8):
+ self.assertEqual(exc.col, 18)
+ else:
+ self.assertEqual(exc.col, 26)
+
+ # PyPy error column offset is 0,
+ # for the second and third line of the doctest
+ # i.e. at the beginning of the line
+ exc = exceptions[1]
+ self.assertEqual(exc.lineno, 5)
+ if PYPY:
+ self.assertEqual(exc.col, 14)
+ else:
+ self.assertEqual(exc.col, 16)
+ exc = exceptions[2]
+ self.assertEqual(exc.lineno, 6)
+ if PYPY:
+ self.assertEqual(exc.col, 14)
+ elif sys.version_info >= (3, 8):
+ self.assertEqual(exc.col, 13)
+ else:
+ self.assertEqual(exc.col, 18)
+
+ def test_indentationErrorInDoctest(self):
+ exc = self.flakes('''
+ def doctest_stuff():
+ """
+ >>> if True:
+ ... pass
+ """
+ ''', m.DoctestSyntaxError).messages[0]
+ self.assertEqual(exc.lineno, 5)
+ if PYPY:
+ self.assertEqual(exc.col, 14)
+ elif sys.version_info >= (3, 8):
+ self.assertEqual(exc.col, 13)
+ else:
+ self.assertEqual(exc.col, 16)
+
+ def test_offsetWithMultiLineArgs(self):
+ (exc1, exc2) = self.flakes(
+ '''
+ def doctest_stuff(arg1,
+ arg2,
+ arg3):
+ """
+ >>> assert
+ >>> this
+ """
+ ''',
+ m.DoctestSyntaxError,
+ m.UndefinedName).messages
+ self.assertEqual(exc1.lineno, 6)
+ if PYPY:
+ self.assertEqual(exc1.col, 20)
+ else:
+ self.assertEqual(exc1.col, 19)
+ self.assertEqual(exc2.lineno, 7)
+ self.assertEqual(exc2.col, 12)
+
+ def test_doctestCanReferToFunction(self):
+ self.flakes("""
+ def foo():
+ '''
+ >>> foo
+ '''
+ """)
+
+ def test_doctestCanReferToClass(self):
+ self.flakes("""
+ class Foo():
+ '''
+ >>> Foo
+ '''
+ def bar(self):
+ '''
+ >>> Foo
+ '''
+ """)
+
+ def test_noOffsetSyntaxErrorInDoctest(self):
+ exceptions = self.flakes(
+ '''
+ def buildurl(base, *args, **kwargs):
+ """
+ >>> buildurl('/blah.php', ('a', '&'), ('b', '=')
+ '/blah.php?a=%26&b=%3D'
+ >>> buildurl('/blah.php', a='&', 'b'='=')
+ '/blah.php?b=%3D&a=%26'
+ """
+ pass
+ ''',
+ m.DoctestSyntaxError,
+ m.DoctestSyntaxError).messages
+ exc = exceptions[0]
+ self.assertEqual(exc.lineno, 4)
+ exc = exceptions[1]
+ self.assertEqual(exc.lineno, 6)
+
+ def test_singleUnderscoreInDoctest(self):
+ self.flakes('''
+ def func():
+ """A docstring
+
+ >>> func()
+ 1
+ >>> _
+ 1
+ """
+ return 1
+ ''')
+
+ def test_globalUnderscoreInDoctest(self):
+ self.flakes("""
+ from gettext import ugettext as _
+
+ def doctest_stuff():
+ '''
+ >>> pass
+ '''
+ """, m.UnusedImport)
+
+
+class TestOther(_DoctestMixin, TestOther):
+ """Run TestOther with each test wrapped in a doctest."""
+
+
+class TestImports(_DoctestMixin, TestImports):
+ """Run TestImports with each test wrapped in a doctest."""
+
+
+class TestUndefinedNames(_DoctestMixin, TestUndefinedNames):
+ """Run TestUndefinedNames with each test wrapped in a doctest."""
diff --git a/third_party/python/pyflakes/pyflakes/test/test_imports.py b/third_party/python/pyflakes/pyflakes/test/test_imports.py
new file mode 100644
index 0000000000..13e7beff8a
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/test/test_imports.py
@@ -0,0 +1,1221 @@
+from sys import version_info
+
+from pyflakes import messages as m
+from pyflakes.checker import (
+ FutureImportation,
+ Importation,
+ ImportationFrom,
+ StarImportation,
+ SubmoduleImportation,
+)
+from pyflakes.test.harness import TestCase, skip, skipIf
+
+
+class TestImportationObject(TestCase):
+
+ def test_import_basic(self):
+ binding = Importation('a', None, 'a')
+ assert binding.source_statement == 'import a'
+ assert str(binding) == 'a'
+
+ def test_import_as(self):
+ binding = Importation('c', None, 'a')
+ assert binding.source_statement == 'import a as c'
+ assert str(binding) == 'a as c'
+
+ def test_import_submodule(self):
+ binding = SubmoduleImportation('a.b', None)
+ assert binding.source_statement == 'import a.b'
+ assert str(binding) == 'a.b'
+
+ def test_import_submodule_as(self):
+ # A submodule import with an as clause is not a SubmoduleImportation
+ binding = Importation('c', None, 'a.b')
+ assert binding.source_statement == 'import a.b as c'
+ assert str(binding) == 'a.b as c'
+
+ def test_import_submodule_as_source_name(self):
+ binding = Importation('a', None, 'a.b')
+ assert binding.source_statement == 'import a.b as a'
+ assert str(binding) == 'a.b as a'
+
+ def test_importfrom_relative(self):
+ binding = ImportationFrom('a', None, '.', 'a')
+ assert binding.source_statement == 'from . import a'
+ assert str(binding) == '.a'
+
+ def test_importfrom_relative_parent(self):
+ binding = ImportationFrom('a', None, '..', 'a')
+ assert binding.source_statement == 'from .. import a'
+ assert str(binding) == '..a'
+
+ def test_importfrom_relative_with_module(self):
+ binding = ImportationFrom('b', None, '..a', 'b')
+ assert binding.source_statement == 'from ..a import b'
+ assert str(binding) == '..a.b'
+
+ def test_importfrom_relative_with_module_as(self):
+ binding = ImportationFrom('c', None, '..a', 'b')
+ assert binding.source_statement == 'from ..a import b as c'
+ assert str(binding) == '..a.b as c'
+
+ def test_importfrom_member(self):
+ binding = ImportationFrom('b', None, 'a', 'b')
+ assert binding.source_statement == 'from a import b'
+ assert str(binding) == 'a.b'
+
+ def test_importfrom_submodule_member(self):
+ binding = ImportationFrom('c', None, 'a.b', 'c')
+ assert binding.source_statement == 'from a.b import c'
+ assert str(binding) == 'a.b.c'
+
+ def test_importfrom_member_as(self):
+ binding = ImportationFrom('c', None, 'a', 'b')
+ assert binding.source_statement == 'from a import b as c'
+ assert str(binding) == 'a.b as c'
+
+ def test_importfrom_submodule_member_as(self):
+ binding = ImportationFrom('d', None, 'a.b', 'c')
+ assert binding.source_statement == 'from a.b import c as d'
+ assert str(binding) == 'a.b.c as d'
+
+ def test_importfrom_star(self):
+ binding = StarImportation('a.b', None)
+ assert binding.source_statement == 'from a.b import *'
+ assert str(binding) == 'a.b.*'
+
+ def test_importfrom_star_relative(self):
+ binding = StarImportation('.b', None)
+ assert binding.source_statement == 'from .b import *'
+ assert str(binding) == '.b.*'
+
+ def test_importfrom_future(self):
+ binding = FutureImportation('print_function', None, None)
+ assert binding.source_statement == 'from __future__ import print_function'
+ assert str(binding) == '__future__.print_function'
+
+ def test_unusedImport_underscore(self):
+ """
+ The magic underscore var should be reported as unused when used as an
+ import alias.
+ """
+ self.flakes('import fu as _', m.UnusedImport)
+
+
+class Test(TestCase):
+
+ def test_unusedImport(self):
+ self.flakes('import fu, bar', m.UnusedImport, m.UnusedImport)
+ self.flakes('from baz import fu, bar', m.UnusedImport, m.UnusedImport)
+
+ def test_unusedImport_relative(self):
+ self.flakes('from . import fu', m.UnusedImport)
+ self.flakes('from . import fu as baz', m.UnusedImport)
+ self.flakes('from .. import fu', m.UnusedImport)
+ self.flakes('from ... import fu', m.UnusedImport)
+ self.flakes('from .. import fu as baz', m.UnusedImport)
+ self.flakes('from .bar import fu', m.UnusedImport)
+ self.flakes('from ..bar import fu', m.UnusedImport)
+ self.flakes('from ...bar import fu', m.UnusedImport)
+ self.flakes('from ...bar import fu as baz', m.UnusedImport)
+
+ checker = self.flakes('from . import fu', m.UnusedImport)
+
+ error = checker.messages[0]
+ assert error.message == '%r imported but unused'
+ assert error.message_args == ('.fu', )
+
+ checker = self.flakes('from . import fu as baz', m.UnusedImport)
+
+ error = checker.messages[0]
+ assert error.message == '%r imported but unused'
+ assert error.message_args == ('.fu as baz', )
+
+ def test_aliasedImport(self):
+ self.flakes('import fu as FU, bar as FU',
+ m.RedefinedWhileUnused, m.UnusedImport)
+ self.flakes('from moo import fu as FU, bar as FU',
+ m.RedefinedWhileUnused, m.UnusedImport)
+
+ def test_aliasedImportShadowModule(self):
+ """Imported aliases can shadow the source of the import."""
+ self.flakes('from moo import fu as moo; moo')
+ self.flakes('import fu as fu; fu')
+ self.flakes('import fu.bar as fu; fu')
+
+ def test_usedImport(self):
+ self.flakes('import fu; print(fu)')
+ self.flakes('from baz import fu; print(fu)')
+ self.flakes('import fu; del fu')
+
+ def test_usedImport_relative(self):
+ self.flakes('from . import fu; assert fu')
+ self.flakes('from .bar import fu; assert fu')
+ self.flakes('from .. import fu; assert fu')
+ self.flakes('from ..bar import fu as baz; assert baz')
+
+ def test_redefinedWhileUnused(self):
+ self.flakes('import fu; fu = 3', m.RedefinedWhileUnused)
+ self.flakes('import fu; fu, bar = 3', m.RedefinedWhileUnused)
+ self.flakes('import fu; [fu, bar] = 3', m.RedefinedWhileUnused)
+
+ def test_redefinedIf(self):
+ """
+ Test that importing a module twice within an if
+ block does raise a warning.
+ """
+ self.flakes('''
+ i = 2
+ if i==1:
+ import os
+ import os
+ os.path''', m.RedefinedWhileUnused)
+
+ def test_redefinedIfElse(self):
+ """
+ Test that importing a module twice in if
+ and else blocks does not raise a warning.
+ """
+ self.flakes('''
+ i = 2
+ if i==1:
+ import os
+ else:
+ import os
+ os.path''')
+
+ def test_redefinedTry(self):
+ """
+ Test that importing a module twice in a try block
+ does raise a warning.
+ """
+ self.flakes('''
+ try:
+ import os
+ import os
+ except:
+ pass
+ os.path''', m.RedefinedWhileUnused)
+
+ def test_redefinedTryExcept(self):
+ """
+ Test that importing a module twice in a try
+ and except block does not raise a warning.
+ """
+ self.flakes('''
+ try:
+ import os
+ except:
+ import os
+ os.path''')
+
+ def test_redefinedTryNested(self):
+ """
+ Test that importing a module twice using a nested
+ try/except and if blocks does not issue a warning.
+ """
+ self.flakes('''
+ try:
+ if True:
+ if True:
+ import os
+ except:
+ import os
+ os.path''')
+
+ def test_redefinedTryExceptMulti(self):
+ self.flakes("""
+ try:
+ from aa import mixer
+ except AttributeError:
+ from bb import mixer
+ except RuntimeError:
+ from cc import mixer
+ except:
+ from dd import mixer
+ mixer(123)
+ """)
+
+ def test_redefinedTryElse(self):
+ self.flakes("""
+ try:
+ from aa import mixer
+ except ImportError:
+ pass
+ else:
+ from bb import mixer
+ mixer(123)
+ """, m.RedefinedWhileUnused)
+
+ def test_redefinedTryExceptElse(self):
+ self.flakes("""
+ try:
+ import funca
+ except ImportError:
+ from bb import funca
+ from bb import funcb
+ else:
+ from bbb import funcb
+ print(funca, funcb)
+ """)
+
+ def test_redefinedTryExceptFinally(self):
+ self.flakes("""
+ try:
+ from aa import a
+ except ImportError:
+ from bb import a
+ finally:
+ a = 42
+ print(a)
+ """)
+
+ def test_redefinedTryExceptElseFinally(self):
+ self.flakes("""
+ try:
+ import b
+ except ImportError:
+ b = Ellipsis
+ from bb import a
+ else:
+ from aa import a
+ finally:
+ a = 42
+ print(a, b)
+ """)
+
+ def test_redefinedByFunction(self):
+ self.flakes('''
+ import fu
+ def fu():
+ pass
+ ''', m.RedefinedWhileUnused)
+
+ def test_redefinedInNestedFunction(self):
+ """
+ Test that shadowing a global name with a nested function definition
+ generates a warning.
+ """
+ self.flakes('''
+ import fu
+ def bar():
+ def baz():
+ def fu():
+ pass
+ ''', m.RedefinedWhileUnused, m.UnusedImport)
+
+ def test_redefinedInNestedFunctionTwice(self):
+ """
+ Test that shadowing a global name with a nested function definition
+ generates a warning.
+ """
+ self.flakes('''
+ import fu
+ def bar():
+ import fu
+ def baz():
+ def fu():
+ pass
+ ''',
+ m.RedefinedWhileUnused, m.RedefinedWhileUnused,
+ m.UnusedImport, m.UnusedImport)
+
+ def test_redefinedButUsedLater(self):
+ """
+ Test that a global import which is redefined locally,
+ but used later in another scope does not generate a warning.
+ """
+ self.flakes('''
+ import unittest, transport
+
+ class GetTransportTestCase(unittest.TestCase):
+ def test_get_transport(self):
+ transport = 'transport'
+ self.assertIsNotNone(transport)
+
+ class TestTransportMethodArgs(unittest.TestCase):
+ def test_send_defaults(self):
+ transport.Transport()
+ ''')
+
+ def test_redefinedByClass(self):
+ self.flakes('''
+ import fu
+ class fu:
+ pass
+ ''', m.RedefinedWhileUnused)
+
+ def test_redefinedBySubclass(self):
+ """
+ If an imported name is redefined by a class statement which also uses
+ that name in the bases list, no warning is emitted.
+ """
+ self.flakes('''
+ from fu import bar
+ class bar(bar):
+ pass
+ ''')
+
+ def test_redefinedInClass(self):
+ """
+ Test that shadowing a global with a class attribute does not produce a
+ warning.
+ """
+ self.flakes('''
+ import fu
+ class bar:
+ fu = 1
+ print(fu)
+ ''')
+
+ def test_importInClass(self):
+ """
+ Test that import within class is a locally scoped attribute.
+ """
+ self.flakes('''
+ class bar:
+ import fu
+ ''')
+
+ self.flakes('''
+ class bar:
+ import fu
+
+ fu
+ ''', m.UndefinedName)
+
+ def test_usedInFunction(self):
+ self.flakes('''
+ import fu
+ def fun():
+ print(fu)
+ ''')
+
+ def test_shadowedByParameter(self):
+ self.flakes('''
+ import fu
+ def fun(fu):
+ print(fu)
+ ''', m.UnusedImport, m.RedefinedWhileUnused)
+
+ self.flakes('''
+ import fu
+ def fun(fu):
+ print(fu)
+ print(fu)
+ ''')
+
+ def test_newAssignment(self):
+ self.flakes('fu = None')
+
+ def test_usedInGetattr(self):
+ self.flakes('import fu; fu.bar.baz')
+ self.flakes('import fu; "bar".fu.baz', m.UnusedImport)
+
+ def test_usedInSlice(self):
+ self.flakes('import fu; print(fu.bar[1:])')
+
+ def test_usedInIfBody(self):
+ self.flakes('''
+ import fu
+ if True: print(fu)
+ ''')
+
+ def test_usedInIfConditional(self):
+ self.flakes('''
+ import fu
+ if fu: pass
+ ''')
+
+ def test_usedInElifConditional(self):
+ self.flakes('''
+ import fu
+ if False: pass
+ elif fu: pass
+ ''')
+
+ def test_usedInElse(self):
+ self.flakes('''
+ import fu
+ if False: pass
+ else: print(fu)
+ ''')
+
+ def test_usedInCall(self):
+ self.flakes('import fu; fu.bar()')
+
+ def test_usedInClass(self):
+ self.flakes('''
+ import fu
+ class bar:
+ bar = fu
+ ''')
+
+ def test_usedInClassBase(self):
+ self.flakes('''
+ import fu
+ class bar(object, fu.baz):
+ pass
+ ''')
+
+ def test_notUsedInNestedScope(self):
+ self.flakes('''
+ import fu
+ def bleh():
+ pass
+ print(fu)
+ ''')
+
+ def test_usedInFor(self):
+ self.flakes('''
+ import fu
+ for bar in range(9):
+ print(fu)
+ ''')
+
+ def test_usedInForElse(self):
+ self.flakes('''
+ import fu
+ for bar in range(10):
+ pass
+ else:
+ print(fu)
+ ''')
+
+ def test_redefinedByFor(self):
+ self.flakes('''
+ import fu
+ for fu in range(2):
+ pass
+ ''', m.ImportShadowedByLoopVar)
+
+ def test_shadowedByFor(self):
+ """
+ Test that shadowing a global name with a for loop variable generates a
+ warning.
+ """
+ self.flakes('''
+ import fu
+ fu.bar()
+ for fu in ():
+ pass
+ ''', m.ImportShadowedByLoopVar)
+
+ def test_shadowedByForDeep(self):
+ """
+ Test that shadowing a global name with a for loop variable nested in a
+ tuple unpack generates a warning.
+ """
+ self.flakes('''
+ import fu
+ fu.bar()
+ for (x, y, z, (a, b, c, (fu,))) in ():
+ pass
+ ''', m.ImportShadowedByLoopVar)
+ # Same with a list instead of a tuple
+ self.flakes('''
+ import fu
+ fu.bar()
+ for [x, y, z, (a, b, c, (fu,))] in ():
+ pass
+ ''', m.ImportShadowedByLoopVar)
+
+ def test_usedInReturn(self):
+ self.flakes('''
+ import fu
+ def fun():
+ return fu
+ ''')
+
+ def test_usedInOperators(self):
+ self.flakes('import fu; 3 + fu.bar')
+ self.flakes('import fu; 3 % fu.bar')
+ self.flakes('import fu; 3 - fu.bar')
+ self.flakes('import fu; 3 * fu.bar')
+ self.flakes('import fu; 3 ** fu.bar')
+ self.flakes('import fu; 3 / fu.bar')
+ self.flakes('import fu; 3 // fu.bar')
+ self.flakes('import fu; -fu.bar')
+ self.flakes('import fu; ~fu.bar')
+ self.flakes('import fu; 1 == fu.bar')
+ self.flakes('import fu; 1 | fu.bar')
+ self.flakes('import fu; 1 & fu.bar')
+ self.flakes('import fu; 1 ^ fu.bar')
+ self.flakes('import fu; 1 >> fu.bar')
+ self.flakes('import fu; 1 << fu.bar')
+
+ def test_usedInAssert(self):
+ self.flakes('import fu; assert fu.bar')
+
+ def test_usedInSubscript(self):
+ self.flakes('import fu; fu.bar[1]')
+
+ def test_usedInLogic(self):
+ self.flakes('import fu; fu and False')
+ self.flakes('import fu; fu or False')
+ self.flakes('import fu; not fu.bar')
+
+ def test_usedInList(self):
+ self.flakes('import fu; [fu]')
+
+ def test_usedInTuple(self):
+ self.flakes('import fu; (fu,)')
+
+ def test_usedInTry(self):
+ self.flakes('''
+ import fu
+ try: fu
+ except: pass
+ ''')
+
+ def test_usedInExcept(self):
+ self.flakes('''
+ import fu
+ try: fu
+ except: pass
+ ''')
+
+ def test_redefinedByExcept(self):
+ expected = [m.RedefinedWhileUnused]
+ if version_info >= (3,):
+ # The exc variable is unused inside the exception handler.
+ expected.append(m.UnusedVariable)
+ self.flakes('''
+ import fu
+ try: pass
+ except Exception as fu: pass
+ ''', *expected)
+
+ def test_usedInRaise(self):
+ self.flakes('''
+ import fu
+ raise fu.bar
+ ''')
+
+ def test_usedInYield(self):
+ self.flakes('''
+ import fu
+ def gen():
+ yield fu
+ ''')
+
+ def test_usedInDict(self):
+ self.flakes('import fu; {fu:None}')
+ self.flakes('import fu; {1:fu}')
+
+ def test_usedInParameterDefault(self):
+ self.flakes('''
+ import fu
+ def f(bar=fu):
+ pass
+ ''')
+
+ def test_usedInAttributeAssign(self):
+ self.flakes('import fu; fu.bar = 1')
+
+ def test_usedInKeywordArg(self):
+ self.flakes('import fu; fu.bar(stuff=fu)')
+
+ def test_usedInAssignment(self):
+ self.flakes('import fu; bar=fu')
+ self.flakes('import fu; n=0; n+=fu')
+
+ def test_usedInListComp(self):
+ self.flakes('import fu; [fu for _ in range(1)]')
+ self.flakes('import fu; [1 for _ in range(1) if fu]')
+
+ @skipIf(version_info >= (3,),
+ 'in Python 3 list comprehensions execute in a separate scope')
+ def test_redefinedByListComp(self):
+ self.flakes('import fu; [1 for fu in range(1)]',
+ m.RedefinedInListComp)
+
+ def test_usedInTryFinally(self):
+ self.flakes('''
+ import fu
+ try: pass
+ finally: fu
+ ''')
+
+ self.flakes('''
+ import fu
+ try: fu
+ finally: pass
+ ''')
+
+ def test_usedInWhile(self):
+ self.flakes('''
+ import fu
+ while 0:
+ fu
+ ''')
+
+ self.flakes('''
+ import fu
+ while fu: pass
+ ''')
+
+ def test_usedInGlobal(self):
+ """
+ A 'global' statement shadowing an unused import should not prevent it
+ from being reported.
+ """
+ self.flakes('''
+ import fu
+ def f(): global fu
+ ''', m.UnusedImport)
+
+ def test_usedAndGlobal(self):
+ """
+ A 'global' statement shadowing a used import should not cause it to be
+ reported as unused.
+ """
+ self.flakes('''
+ import foo
+ def f(): global foo
+ def g(): foo.is_used()
+ ''')
+
+ def test_assignedToGlobal(self):
+ """
+ Binding an import to a declared global should not cause it to be
+ reported as unused.
+ """
+ self.flakes('''
+ def f(): global foo; import foo
+ def g(): foo.is_used()
+ ''')
+
+ @skipIf(version_info >= (3,), 'deprecated syntax')
+ def test_usedInBackquote(self):
+ self.flakes('import fu; `fu`')
+
+ def test_usedInExec(self):
+ if version_info < (3,):
+ exec_stmt = 'exec "print 1" in fu.bar'
+ else:
+ exec_stmt = 'exec("print(1)", fu.bar)'
+ self.flakes('import fu; %s' % exec_stmt)
+
+ def test_usedInLambda(self):
+ self.flakes('import fu; lambda: fu')
+
+ def test_shadowedByLambda(self):
+ self.flakes('import fu; lambda fu: fu',
+ m.UnusedImport, m.RedefinedWhileUnused)
+ self.flakes('import fu; lambda fu: fu\nfu()')
+
+ def test_usedInSliceObj(self):
+ self.flakes('import fu; "meow"[::fu]')
+
+ def test_unusedInNestedScope(self):
+ self.flakes('''
+ def bar():
+ import fu
+ fu
+ ''', m.UnusedImport, m.UndefinedName)
+
+ def test_methodsDontUseClassScope(self):
+ self.flakes('''
+ class bar:
+ import fu
+ def fun(self):
+ fu
+ ''', m.UndefinedName)
+
+ def test_nestedFunctionsNestScope(self):
+ self.flakes('''
+ def a():
+ def b():
+ fu
+ import fu
+ ''')
+
+ def test_nestedClassAndFunctionScope(self):
+ self.flakes('''
+ def a():
+ import fu
+ class b:
+ def c(self):
+ print(fu)
+ ''')
+
+ def test_importStar(self):
+ """Use of import * at module level is reported."""
+ self.flakes('from fu import *', m.ImportStarUsed, m.UnusedImport)
+ self.flakes('''
+ try:
+ from fu import *
+ except:
+ pass
+ ''', m.ImportStarUsed, m.UnusedImport)
+
+ checker = self.flakes('from fu import *',
+ m.ImportStarUsed, m.UnusedImport)
+
+ error = checker.messages[0]
+ assert error.message.startswith("'from %s import *' used; unable ")
+ assert error.message_args == ('fu', )
+
+ error = checker.messages[1]
+ assert error.message == '%r imported but unused'
+ assert error.message_args == ('fu.*', )
+
+ def test_importStar_relative(self):
+ """Use of import * from a relative import is reported."""
+ self.flakes('from .fu import *', m.ImportStarUsed, m.UnusedImport)
+ self.flakes('''
+ try:
+ from .fu import *
+ except:
+ pass
+ ''', m.ImportStarUsed, m.UnusedImport)
+
+ checker = self.flakes('from .fu import *',
+ m.ImportStarUsed, m.UnusedImport)
+
+ error = checker.messages[0]
+ assert error.message.startswith("'from %s import *' used; unable ")
+ assert error.message_args == ('.fu', )
+
+ error = checker.messages[1]
+ assert error.message == '%r imported but unused'
+ assert error.message_args == ('.fu.*', )
+
+ checker = self.flakes('from .. import *',
+ m.ImportStarUsed, m.UnusedImport)
+
+ error = checker.messages[0]
+ assert error.message.startswith("'from %s import *' used; unable ")
+ assert error.message_args == ('..', )
+
+ error = checker.messages[1]
+ assert error.message == '%r imported but unused'
+ assert error.message_args == ('from .. import *', )
+
+ @skipIf(version_info < (3,),
+ 'import * below module level is a warning on Python 2')
+ def test_localImportStar(self):
+ """import * is only allowed at module level."""
+ self.flakes('''
+ def a():
+ from fu import *
+ ''', m.ImportStarNotPermitted)
+ self.flakes('''
+ class a:
+ from fu import *
+ ''', m.ImportStarNotPermitted)
+
+ checker = self.flakes('''
+ class a:
+ from .. import *
+ ''', m.ImportStarNotPermitted)
+ error = checker.messages[0]
+ assert error.message == "'from %s import *' only allowed at module level"
+ assert error.message_args == ('..', )
+
+ @skipIf(version_info > (3,),
+ 'import * below module level is an error on Python 3')
+ def test_importStarNested(self):
+ """All star imports are marked as used by an undefined variable."""
+ self.flakes('''
+ from fu import *
+ def f():
+ from bar import *
+ x
+ ''', m.ImportStarUsed, m.ImportStarUsed, m.ImportStarUsage)
+
+ def test_packageImport(self):
+ """
+ If a dotted name is imported and used, no warning is reported.
+ """
+ self.flakes('''
+ import fu.bar
+ fu.bar
+ ''')
+
+ def test_unusedPackageImport(self):
+ """
+ If a dotted name is imported and not used, an unused import warning is
+ reported.
+ """
+ self.flakes('import fu.bar', m.UnusedImport)
+
+ def test_duplicateSubmoduleImport(self):
+ """
+ If a submodule of a package is imported twice, an unused import warning
+ and a redefined while unused warning are reported.
+ """
+ self.flakes('''
+ import fu.bar, fu.bar
+ fu.bar
+ ''', m.RedefinedWhileUnused)
+ self.flakes('''
+ import fu.bar
+ import fu.bar
+ fu.bar
+ ''', m.RedefinedWhileUnused)
+
+ def test_differentSubmoduleImport(self):
+ """
+ If two different submodules of a package are imported, no duplicate
+ import warning is reported for the package.
+ """
+ self.flakes('''
+ import fu.bar, fu.baz
+ fu.bar, fu.baz
+ ''')
+ self.flakes('''
+ import fu.bar
+ import fu.baz
+ fu.bar, fu.baz
+ ''')
+
+ def test_used_package_with_submodule_import(self):
+ """
+ Usage of package marks submodule imports as used.
+ """
+ self.flakes('''
+ import fu
+ import fu.bar
+ fu.x
+ ''')
+
+ self.flakes('''
+ import fu.bar
+ import fu
+ fu.x
+ ''')
+
+ def test_used_package_with_submodule_import_of_alias(self):
+ """
+ Usage of package by alias marks submodule imports as used.
+ """
+ self.flakes('''
+ import foo as f
+ import foo.bar
+ f.bar.do_something()
+ ''')
+
+ self.flakes('''
+ import foo as f
+ import foo.bar.blah
+ f.bar.blah.do_something()
+ ''')
+
+ def test_unused_package_with_submodule_import(self):
+ """
+ When a package and its submodule are imported, only report once.
+ """
+ checker = self.flakes('''
+ import fu
+ import fu.bar
+ ''', m.UnusedImport)
+ error = checker.messages[0]
+ assert error.message == '%r imported but unused'
+ assert error.message_args == ('fu.bar', )
+ assert error.lineno == 5 if self.withDoctest else 3
+
+ def test_assignRHSFirst(self):
+ self.flakes('import fu; fu = fu')
+ self.flakes('import fu; fu, bar = fu')
+ self.flakes('import fu; [fu, bar] = fu')
+ self.flakes('import fu; fu += fu')
+
+ def test_tryingMultipleImports(self):
+ self.flakes('''
+ try:
+ import fu
+ except ImportError:
+ import bar as fu
+ fu
+ ''')
+
+ def test_nonGlobalDoesNotRedefine(self):
+ self.flakes('''
+ import fu
+ def a():
+ fu = 3
+ return fu
+ fu
+ ''')
+
+ def test_functionsRunLater(self):
+ self.flakes('''
+ def a():
+ fu
+ import fu
+ ''')
+
+ def test_functionNamesAreBoundNow(self):
+ self.flakes('''
+ import fu
+ def fu():
+ fu
+ fu
+ ''', m.RedefinedWhileUnused)
+
+ def test_ignoreNonImportRedefinitions(self):
+ self.flakes('a = 1; a = 2')
+
+ @skip("todo")
+ def test_importingForImportError(self):
+ self.flakes('''
+ try:
+ import fu
+ except ImportError:
+ pass
+ ''')
+
+ def test_importedInClass(self):
+ """Imports in class scope can be used through self."""
+ self.flakes('''
+ class c:
+ import i
+ def __init__(self):
+ self.i
+ ''')
+
+ def test_importUsedInMethodDefinition(self):
+ """
+ Method named 'foo' with default args referring to module named 'foo'.
+ """
+ self.flakes('''
+ import foo
+
+ class Thing(object):
+ def foo(self, parser=foo.parse_foo):
+ pass
+ ''')
+
+ def test_futureImport(self):
+ """__future__ is special."""
+ self.flakes('from __future__ import division')
+ self.flakes('''
+ "docstring is allowed before future import"
+ from __future__ import division
+ ''')
+
+ def test_futureImportFirst(self):
+ """
+ __future__ imports must come before anything else.
+ """
+ self.flakes('''
+ x = 5
+ from __future__ import division
+ ''', m.LateFutureImport)
+ self.flakes('''
+ from foo import bar
+ from __future__ import division
+ bar
+ ''', m.LateFutureImport)
+
+ def test_futureImportUsed(self):
+ """__future__ is special, but names are injected in the namespace."""
+ self.flakes('''
+ from __future__ import division
+ from __future__ import print_function
+
+ assert print_function is not division
+ ''')
+
+ def test_futureImportUndefined(self):
+ """Importing undefined names from __future__ fails."""
+ self.flakes('''
+ from __future__ import print_statement
+ ''', m.FutureFeatureNotDefined)
+
+ def test_futureImportStar(self):
+ """Importing '*' from __future__ fails."""
+ self.flakes('''
+ from __future__ import *
+ ''', m.FutureFeatureNotDefined)
+
+
+class TestSpecialAll(TestCase):
+ """
+ Tests for suppression of unused import warnings by C{__all__}.
+ """
+ def test_ignoredInFunction(self):
+ """
+ An C{__all__} definition does not suppress unused import warnings in a
+ function scope.
+ """
+ self.flakes('''
+ def foo():
+ import bar
+ __all__ = ["bar"]
+ ''', m.UnusedImport, m.UnusedVariable)
+
+ def test_ignoredInClass(self):
+ """
+ An C{__all__} definition in a class does not suppress unused import warnings.
+ """
+ self.flakes('''
+ import bar
+ class foo:
+ __all__ = ["bar"]
+ ''', m.UnusedImport)
+
+ def test_warningSuppressed(self):
+ """
+ If a name is imported and unused but is named in C{__all__}, no warning
+ is reported.
+ """
+ self.flakes('''
+ import foo
+ __all__ = ["foo"]
+ ''')
+ self.flakes('''
+ import foo
+ __all__ = ("foo",)
+ ''')
+
+ def test_augmentedAssignment(self):
+ """
+ The C{__all__} variable is defined incrementally.
+ """
+ self.flakes('''
+ import a
+ import c
+ __all__ = ['a']
+ __all__ += ['b']
+ if 1 < 3:
+ __all__ += ['c', 'd']
+ ''', m.UndefinedExport, m.UndefinedExport)
+
+ def test_concatenationAssignment(self):
+ """
+ The C{__all__} variable is defined through list concatenation.
+ """
+ self.flakes('''
+ import sys
+ __all__ = ['a'] + ['b'] + ['c']
+ ''', m.UndefinedExport, m.UndefinedExport, m.UndefinedExport, m.UnusedImport)
+
+ def test_all_with_attributes(self):
+ self.flakes('''
+ from foo import bar
+ __all__ = [bar.__name__]
+ ''')
+
+ def test_all_with_names(self):
+ # not actually valid, but shouldn't produce a crash
+ self.flakes('''
+ from foo import bar
+ __all__ = [bar]
+ ''')
+
+ def test_all_with_attributes_added(self):
+ self.flakes('''
+ from foo import bar
+ from bar import baz
+ __all__ = [bar.__name__] + [baz.__name__]
+ ''')
+
+ def test_all_mixed_attributes_and_strings(self):
+ self.flakes('''
+ from foo import bar
+ from foo import baz
+ __all__ = ['bar', baz.__name__]
+ ''')
+
+ def test_unboundExported(self):
+ """
+ If C{__all__} includes a name which is not bound, a warning is emitted.
+ """
+ self.flakes('''
+ __all__ = ["foo"]
+ ''', m.UndefinedExport)
+
+ # Skip this in __init__.py though, since the rules there are a little
+ # different.
+ for filename in ["foo/__init__.py", "__init__.py"]:
+ self.flakes('''
+ __all__ = ["foo"]
+ ''', filename=filename)
+
+ def test_importStarExported(self):
+ """
+ Report undefined if import * is used
+ """
+ self.flakes('''
+ from math import *
+ __all__ = ['sin', 'cos']
+ csc(1)
+ ''', m.ImportStarUsed, m.ImportStarUsage, m.ImportStarUsage, m.ImportStarUsage)
+
+ def test_importStarNotExported(self):
+ """Report unused import when not needed to satisfy __all__."""
+ self.flakes('''
+ from foolib import *
+ a = 1
+ __all__ = ['a']
+ ''', m.ImportStarUsed, m.UnusedImport)
+
+ def test_usedInGenExp(self):
+ """
+ Using a global in a generator expression results in no warnings.
+ """
+ self.flakes('import fu; (fu for _ in range(1))')
+ self.flakes('import fu; (1 for _ in range(1) if fu)')
+
+ def test_redefinedByGenExp(self):
+ """
+ Re-using a global name as the loop variable for a generator
+ expression results in a redefinition warning.
+ """
+ self.flakes('import fu; (1 for fu in range(1))',
+ m.RedefinedWhileUnused, m.UnusedImport)
+
+ def test_usedAsDecorator(self):
+ """
+ Using a global name in a decorator statement results in no warnings,
+ but using an undefined name in a decorator statement results in an
+ undefined name warning.
+ """
+ self.flakes('''
+ from interior import decorate
+ @decorate
+ def f():
+ return "hello"
+ ''')
+
+ self.flakes('''
+ from interior import decorate
+ @decorate('value')
+ def f():
+ return "hello"
+ ''')
+
+ self.flakes('''
+ @decorate
+ def f():
+ return "hello"
+ ''', m.UndefinedName)
+
+ def test_usedAsClassDecorator(self):
+ """
+ Using an imported name as a class decorator results in no warnings,
+ but using an undefined name as a class decorator results in an
+ undefined name warning.
+ """
+ self.flakes('''
+ from interior import decorate
+ @decorate
+ class foo:
+ pass
+ ''')
+
+ self.flakes('''
+ from interior import decorate
+ @decorate("foo")
+ class bar:
+ pass
+ ''')
+
+ self.flakes('''
+ @decorate
+ class foo:
+ pass
+ ''', m.UndefinedName)
diff --git a/third_party/python/pyflakes/pyflakes/test/test_is_literal.py b/third_party/python/pyflakes/pyflakes/test/test_is_literal.py
new file mode 100644
index 0000000000..fbbb20536d
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/test/test_is_literal.py
@@ -0,0 +1,222 @@
+from pyflakes.messages import IsLiteral
+from pyflakes.test.harness import TestCase
+
+
+class Test(TestCase):
+ def test_is_str(self):
+ self.flakes("""
+ x = 'foo'
+ if x is 'foo':
+ pass
+ """, IsLiteral)
+
+ def test_is_bytes(self):
+ self.flakes("""
+ x = b'foo'
+ if x is b'foo':
+ pass
+ """, IsLiteral)
+
+ def test_is_unicode(self):
+ self.flakes("""
+ x = u'foo'
+ if x is u'foo':
+ pass
+ """, IsLiteral)
+
+ def test_is_int(self):
+ self.flakes("""
+ x = 10
+ if x is 10:
+ pass
+ """, IsLiteral)
+
+ def test_is_true(self):
+ self.flakes("""
+ x = True
+ if x is True:
+ pass
+ """)
+
+ def test_is_false(self):
+ self.flakes("""
+ x = False
+ if x is False:
+ pass
+ """)
+
+ def test_is_not_str(self):
+ self.flakes("""
+ x = 'foo'
+ if x is not 'foo':
+ pass
+ """, IsLiteral)
+
+ def test_is_not_bytes(self):
+ self.flakes("""
+ x = b'foo'
+ if x is not b'foo':
+ pass
+ """, IsLiteral)
+
+ def test_is_not_unicode(self):
+ self.flakes("""
+ x = u'foo'
+ if x is not u'foo':
+ pass
+ """, IsLiteral)
+
+ def test_is_not_int(self):
+ self.flakes("""
+ x = 10
+ if x is not 10:
+ pass
+ """, IsLiteral)
+
+ def test_is_not_true(self):
+ self.flakes("""
+ x = True
+ if x is not True:
+ pass
+ """)
+
+ def test_is_not_false(self):
+ self.flakes("""
+ x = False
+ if x is not False:
+ pass
+ """)
+
+ def test_left_is_str(self):
+ self.flakes("""
+ x = 'foo'
+ if 'foo' is x:
+ pass
+ """, IsLiteral)
+
+ def test_left_is_bytes(self):
+ self.flakes("""
+ x = b'foo'
+ if b'foo' is x:
+ pass
+ """, IsLiteral)
+
+ def test_left_is_unicode(self):
+ self.flakes("""
+ x = u'foo'
+ if u'foo' is x:
+ pass
+ """, IsLiteral)
+
+ def test_left_is_int(self):
+ self.flakes("""
+ x = 10
+ if 10 is x:
+ pass
+ """, IsLiteral)
+
+ def test_left_is_true(self):
+ self.flakes("""
+ x = True
+ if True is x:
+ pass
+ """)
+
+ def test_left_is_false(self):
+ self.flakes("""
+ x = False
+ if False is x:
+ pass
+ """)
+
+ def test_left_is_not_str(self):
+ self.flakes("""
+ x = 'foo'
+ if 'foo' is not x:
+ pass
+ """, IsLiteral)
+
+ def test_left_is_not_bytes(self):
+ self.flakes("""
+ x = b'foo'
+ if b'foo' is not x:
+ pass
+ """, IsLiteral)
+
+ def test_left_is_not_unicode(self):
+ self.flakes("""
+ x = u'foo'
+ if u'foo' is not x:
+ pass
+ """, IsLiteral)
+
+ def test_left_is_not_int(self):
+ self.flakes("""
+ x = 10
+ if 10 is not x:
+ pass
+ """, IsLiteral)
+
+ def test_left_is_not_true(self):
+ self.flakes("""
+ x = True
+ if True is not x:
+ pass
+ """)
+
+ def test_left_is_not_false(self):
+ self.flakes("""
+ x = False
+ if False is not x:
+ pass
+ """)
+
+ def test_chained_operators_is_true(self):
+ self.flakes("""
+ x = 5
+ if x is True < 4:
+ pass
+ """)
+
+ def test_chained_operators_is_str(self):
+ self.flakes("""
+ x = 5
+ if x is 'foo' < 4:
+ pass
+ """, IsLiteral)
+
+ def test_chained_operators_is_true_end(self):
+ self.flakes("""
+ x = 5
+ if 4 < x is True:
+ pass
+ """)
+
+ def test_chained_operators_is_str_end(self):
+ self.flakes("""
+ x = 5
+ if 4 < x is 'foo':
+ pass
+ """, IsLiteral)
+
+ def test_is_tuple_constant(self):
+ self.flakes('''\
+ x = 5
+ if x is ():
+ pass
+ ''', IsLiteral)
+
+ def test_is_tuple_constant_containing_constants(self):
+ self.flakes('''\
+ x = 5
+ if x is (1, '2', True, (1.5, ())):
+ pass
+ ''', IsLiteral)
+
+ def test_is_tuple_containing_variables_ok(self):
+ # a bit nonsensical, but does not trigger a SyntaxWarning
+ self.flakes('''\
+ x = 5
+ if x is (x,):
+ pass
+ ''')
diff --git a/third_party/python/pyflakes/pyflakes/test/test_other.py b/third_party/python/pyflakes/pyflakes/test/test_other.py
new file mode 100644
index 0000000000..7a024680e2
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/test/test_other.py
@@ -0,0 +1,2142 @@
+"""
+Tests for various Pyflakes behavior.
+"""
+
+from sys import version_info
+
+from pyflakes import messages as m
+from pyflakes.test.harness import TestCase, skip, skipIf
+
+
+class Test(TestCase):
+
+ def test_duplicateArgs(self):
+ self.flakes('def fu(bar, bar): pass', m.DuplicateArgument)
+
+ def test_localReferencedBeforeAssignment(self):
+ self.flakes('''
+ a = 1
+ def f():
+ a; a=1
+ f()
+ ''', m.UndefinedLocal, m.UnusedVariable)
+
+ @skipIf(version_info >= (3,),
+ 'in Python 3 list comprehensions execute in a separate scope')
+ def test_redefinedInListComp(self):
+ """
+ Test that shadowing a variable in a list comprehension raises
+ a warning.
+ """
+ self.flakes('''
+ a = 1
+ [1 for a, b in [(1, 2)]]
+ ''', m.RedefinedInListComp)
+ self.flakes('''
+ class A:
+ a = 1
+ [1 for a, b in [(1, 2)]]
+ ''', m.RedefinedInListComp)
+ self.flakes('''
+ def f():
+ a = 1
+ [1 for a, b in [(1, 2)]]
+ ''', m.RedefinedInListComp)
+ self.flakes('''
+ [1 for a, b in [(1, 2)]]
+ [1 for a, b in [(1, 2)]]
+ ''')
+ self.flakes('''
+ for a, b in [(1, 2)]:
+ pass
+ [1 for a, b in [(1, 2)]]
+ ''')
+
+ def test_redefinedInGenerator(self):
+ """
+ Test that reusing a variable in a generator does not raise
+ a warning.
+ """
+ self.flakes('''
+ a = 1
+ (1 for a, b in [(1, 2)])
+ ''')
+ self.flakes('''
+ class A:
+ a = 1
+ list(1 for a, b in [(1, 2)])
+ ''')
+ self.flakes('''
+ def f():
+ a = 1
+ (1 for a, b in [(1, 2)])
+ ''', m.UnusedVariable)
+ self.flakes('''
+ (1 for a, b in [(1, 2)])
+ (1 for a, b in [(1, 2)])
+ ''')
+ self.flakes('''
+ for a, b in [(1, 2)]:
+ pass
+ (1 for a, b in [(1, 2)])
+ ''')
+
+ def test_redefinedInSetComprehension(self):
+ """
+ Test that reusing a variable in a set comprehension does not raise
+ a warning.
+ """
+ self.flakes('''
+ a = 1
+ {1 for a, b in [(1, 2)]}
+ ''')
+ self.flakes('''
+ class A:
+ a = 1
+ {1 for a, b in [(1, 2)]}
+ ''')
+ self.flakes('''
+ def f():
+ a = 1
+ {1 for a, b in [(1, 2)]}
+ ''', m.UnusedVariable)
+ self.flakes('''
+ {1 for a, b in [(1, 2)]}
+ {1 for a, b in [(1, 2)]}
+ ''')
+ self.flakes('''
+ for a, b in [(1, 2)]:
+ pass
+ {1 for a, b in [(1, 2)]}
+ ''')
+
+ def test_redefinedInDictComprehension(self):
+ """
+ Test that reusing a variable in a dict comprehension does not raise
+ a warning.
+ """
+ self.flakes('''
+ a = 1
+ {1: 42 for a, b in [(1, 2)]}
+ ''')
+ self.flakes('''
+ class A:
+ a = 1
+ {1: 42 for a, b in [(1, 2)]}
+ ''')
+ self.flakes('''
+ def f():
+ a = 1
+ {1: 42 for a, b in [(1, 2)]}
+ ''', m.UnusedVariable)
+ self.flakes('''
+ {1: 42 for a, b in [(1, 2)]}
+ {1: 42 for a, b in [(1, 2)]}
+ ''')
+ self.flakes('''
+ for a, b in [(1, 2)]:
+ pass
+ {1: 42 for a, b in [(1, 2)]}
+ ''')
+
+ def test_redefinedFunction(self):
+ """
+ Test that shadowing a function definition with another one raises a
+ warning.
+ """
+ self.flakes('''
+ def a(): pass
+ def a(): pass
+ ''', m.RedefinedWhileUnused)
+
+ def test_redefinedUnderscoreFunction(self):
+ """
+ Test that shadowing a function definition named with underscore doesn't
+ raise anything.
+ """
+ self.flakes('''
+ def _(): pass
+ def _(): pass
+ ''')
+
+ def test_redefinedUnderscoreImportation(self):
+ """
+ Test that shadowing an underscore importation raises a warning.
+ """
+ self.flakes('''
+ from .i18n import _
+ def _(): pass
+ ''', m.RedefinedWhileUnused)
+
+ def test_redefinedClassFunction(self):
+ """
+ Test that shadowing a function definition in a class suite with another
+ one raises a warning.
+ """
+ self.flakes('''
+ class A:
+ def a(): pass
+ def a(): pass
+ ''', m.RedefinedWhileUnused)
+
+ def test_redefinedIfElseFunction(self):
+ """
+ Test that shadowing a function definition twice in an if
+ and else block does not raise a warning.
+ """
+ self.flakes('''
+ if True:
+ def a(): pass
+ else:
+ def a(): pass
+ ''')
+
+ def test_redefinedIfFunction(self):
+ """
+ Test that shadowing a function definition within an if block
+ raises a warning.
+ """
+ self.flakes('''
+ if True:
+ def a(): pass
+ def a(): pass
+ ''', m.RedefinedWhileUnused)
+
+ def test_redefinedTryExceptFunction(self):
+ """
+ Test that shadowing a function definition twice in try
+ and except block does not raise a warning.
+ """
+ self.flakes('''
+ try:
+ def a(): pass
+ except:
+ def a(): pass
+ ''')
+
+ def test_redefinedTryFunction(self):
+ """
+ Test that shadowing a function definition within a try block
+ raises a warning.
+ """
+ self.flakes('''
+ try:
+ def a(): pass
+ def a(): pass
+ except:
+ pass
+ ''', m.RedefinedWhileUnused)
+
+ def test_redefinedIfElseInListComp(self):
+ """
+ Test that shadowing a variable in a list comprehension in
+ an if and else block does not raise a warning.
+ """
+ self.flakes('''
+ if False:
+ a = 1
+ else:
+ [a for a in '12']
+ ''')
+
+ @skipIf(version_info >= (3,),
+ 'in Python 3 list comprehensions execute in a separate scope')
+ def test_redefinedElseInListComp(self):
+ """
+ Test that shadowing a variable in a list comprehension in
+ an else (or if) block raises a warning.
+ """
+ self.flakes('''
+ if False:
+ pass
+ else:
+ a = 1
+ [a for a in '12']
+ ''', m.RedefinedInListComp)
+
+ def test_functionDecorator(self):
+ """
+ Test that shadowing a function definition with a decorated version of
+ that function does not raise a warning.
+ """
+ self.flakes('''
+ from somewhere import somedecorator
+
+ def a(): pass
+ a = somedecorator(a)
+ ''')
+
+ def test_classFunctionDecorator(self):
+ """
+ Test that shadowing a function definition in a class suite with a
+ decorated version of that function does not raise a warning.
+ """
+ self.flakes('''
+ class A:
+ def a(): pass
+ a = classmethod(a)
+ ''')
+
+ def test_modernProperty(self):
+ self.flakes("""
+ class A:
+ @property
+ def t(self):
+ pass
+ @t.setter
+ def t(self, value):
+ pass
+ @t.deleter
+ def t(self):
+ pass
+ """)
+
+ def test_unaryPlus(self):
+ """Don't die on unary +."""
+ self.flakes('+1')
+
+ def test_undefinedBaseClass(self):
+ """
+ If a name in the base list of a class definition is undefined, a
+ warning is emitted.
+ """
+ self.flakes('''
+ class foo(foo):
+ pass
+ ''', m.UndefinedName)
+
+ def test_classNameUndefinedInClassBody(self):
+ """
+ If a class name is used in the body of that class's definition and
+ the name is not already defined, a warning is emitted.
+ """
+ self.flakes('''
+ class foo:
+ foo
+ ''', m.UndefinedName)
+
+ def test_classNameDefinedPreviously(self):
+ """
+ If a class name is used in the body of that class's definition and
+ the name was previously defined in some other way, no warning is
+ emitted.
+ """
+ self.flakes('''
+ foo = None
+ class foo:
+ foo
+ ''')
+
+ def test_classRedefinition(self):
+ """
+ If a class is defined twice in the same module, a warning is emitted.
+ """
+ self.flakes('''
+ class Foo:
+ pass
+ class Foo:
+ pass
+ ''', m.RedefinedWhileUnused)
+
+ def test_functionRedefinedAsClass(self):
+ """
+ If a function is redefined as a class, a warning is emitted.
+ """
+ self.flakes('''
+ def Foo():
+ pass
+ class Foo:
+ pass
+ ''', m.RedefinedWhileUnused)
+
+ def test_classRedefinedAsFunction(self):
+ """
+ If a class is redefined as a function, a warning is emitted.
+ """
+ self.flakes('''
+ class Foo:
+ pass
+ def Foo():
+ pass
+ ''', m.RedefinedWhileUnused)
+
+ def test_classWithReturn(self):
+ """
+ If a return is used inside a class, a warning is emitted.
+ """
+ self.flakes('''
+ class Foo(object):
+ return
+ ''', m.ReturnOutsideFunction)
+
+ def test_moduleWithReturn(self):
+ """
+ If a return is used at the module level, a warning is emitted.
+ """
+ self.flakes('''
+ return
+ ''', m.ReturnOutsideFunction)
+
+ def test_classWithYield(self):
+ """
+ If a yield is used inside a class, a warning is emitted.
+ """
+ self.flakes('''
+ class Foo(object):
+ yield
+ ''', m.YieldOutsideFunction)
+
+ def test_moduleWithYield(self):
+ """
+ If a yield is used at the module level, a warning is emitted.
+ """
+ self.flakes('''
+ yield
+ ''', m.YieldOutsideFunction)
+
+ @skipIf(version_info < (3, 3), "Python >= 3.3 only")
+ def test_classWithYieldFrom(self):
+ """
+ If a yield from is used inside a class, a warning is emitted.
+ """
+ self.flakes('''
+ class Foo(object):
+ yield from range(10)
+ ''', m.YieldOutsideFunction)
+
+ @skipIf(version_info < (3, 3), "Python >= 3.3 only")
+ def test_moduleWithYieldFrom(self):
+ """
+ If a yield from is used at the module level, a warning is emitted.
+ """
+ self.flakes('''
+ yield from range(10)
+ ''', m.YieldOutsideFunction)
+
+ def test_continueOutsideLoop(self):
+ self.flakes('''
+ continue
+ ''', m.ContinueOutsideLoop)
+
+ self.flakes('''
+ def f():
+ continue
+ ''', m.ContinueOutsideLoop)
+
+ self.flakes('''
+ while True:
+ pass
+ else:
+ continue
+ ''', m.ContinueOutsideLoop)
+
+ self.flakes('''
+ while True:
+ pass
+ else:
+ if 1:
+ if 2:
+ continue
+ ''', m.ContinueOutsideLoop)
+
+ self.flakes('''
+ while True:
+ def f():
+ continue
+ ''', m.ContinueOutsideLoop)
+
+ self.flakes('''
+ while True:
+ class A:
+ continue
+ ''', m.ContinueOutsideLoop)
+
+ def test_continueInsideLoop(self):
+ self.flakes('''
+ while True:
+ continue
+ ''')
+
+ self.flakes('''
+ for i in range(10):
+ continue
+ ''')
+
+ self.flakes('''
+ while True:
+ if 1:
+ continue
+ ''')
+
+ self.flakes('''
+ for i in range(10):
+ if 1:
+ continue
+ ''')
+
+ self.flakes('''
+ while True:
+ while True:
+ pass
+ else:
+ continue
+ else:
+ pass
+ ''')
+
+ self.flakes('''
+ while True:
+ try:
+ pass
+ finally:
+ while True:
+ continue
+ ''')
+
+ @skipIf(version_info > (3, 8), "Python <= 3.8 only")
+ def test_continueInFinally(self):
+ # 'continue' inside 'finally' is a special syntax error
+ # that is removed in 3.8
+ self.flakes('''
+ while True:
+ try:
+ pass
+ finally:
+ continue
+ ''', m.ContinueInFinally)
+
+ self.flakes('''
+ while True:
+ try:
+ pass
+ finally:
+ if 1:
+ if 2:
+ continue
+ ''', m.ContinueInFinally)
+
+ # Even when not in a loop, this is the error Python gives
+ self.flakes('''
+ try:
+ pass
+ finally:
+ continue
+ ''', m.ContinueInFinally)
+
+ def test_breakOutsideLoop(self):
+ self.flakes('''
+ break
+ ''', m.BreakOutsideLoop)
+
+ self.flakes('''
+ def f():
+ break
+ ''', m.BreakOutsideLoop)
+
+ self.flakes('''
+ while True:
+ pass
+ else:
+ break
+ ''', m.BreakOutsideLoop)
+
+ self.flakes('''
+ while True:
+ pass
+ else:
+ if 1:
+ if 2:
+ break
+ ''', m.BreakOutsideLoop)
+
+ self.flakes('''
+ while True:
+ def f():
+ break
+ ''', m.BreakOutsideLoop)
+
+ self.flakes('''
+ while True:
+ class A:
+ break
+ ''', m.BreakOutsideLoop)
+
+ self.flakes('''
+ try:
+ pass
+ finally:
+ break
+ ''', m.BreakOutsideLoop)
+
+ def test_breakInsideLoop(self):
+ self.flakes('''
+ while True:
+ break
+ ''')
+
+ self.flakes('''
+ for i in range(10):
+ break
+ ''')
+
+ self.flakes('''
+ while True:
+ if 1:
+ break
+ ''')
+
+ self.flakes('''
+ for i in range(10):
+ if 1:
+ break
+ ''')
+
+ self.flakes('''
+ while True:
+ while True:
+ pass
+ else:
+ break
+ else:
+ pass
+ ''')
+
+ self.flakes('''
+ while True:
+ try:
+ pass
+ finally:
+ while True:
+ break
+ ''')
+
+ self.flakes('''
+ while True:
+ try:
+ pass
+ finally:
+ break
+ ''')
+
+ self.flakes('''
+ while True:
+ try:
+ pass
+ finally:
+ if 1:
+ if 2:
+ break
+ ''')
+
+ def test_defaultExceptLast(self):
+ """
+ A default except block should be last.
+
+ YES:
+
+ try:
+ ...
+ except Exception:
+ ...
+ except:
+ ...
+
+ NO:
+
+ try:
+ ...
+ except:
+ ...
+ except Exception:
+ ...
+ """
+ self.flakes('''
+ try:
+ pass
+ except ValueError:
+ pass
+ ''')
+
+ self.flakes('''
+ try:
+ pass
+ except ValueError:
+ pass
+ except:
+ pass
+ ''')
+
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ ''')
+
+ self.flakes('''
+ try:
+ pass
+ except ValueError:
+ pass
+ else:
+ pass
+ ''')
+
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ else:
+ pass
+ ''')
+
+ self.flakes('''
+ try:
+ pass
+ except ValueError:
+ pass
+ except:
+ pass
+ else:
+ pass
+ ''')
+
+ def test_defaultExceptNotLast(self):
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ except ValueError:
+ pass
+ ''', m.DefaultExceptNotLast)
+
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ except:
+ pass
+ ''', m.DefaultExceptNotLast)
+
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ except ValueError:
+ pass
+ except:
+ pass
+ ''', m.DefaultExceptNotLast)
+
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ except ValueError:
+ pass
+ except:
+ pass
+ except ValueError:
+ pass
+ ''', m.DefaultExceptNotLast, m.DefaultExceptNotLast)
+
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ except ValueError:
+ pass
+ else:
+ pass
+ ''', m.DefaultExceptNotLast)
+
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ except:
+ pass
+ else:
+ pass
+ ''', m.DefaultExceptNotLast)
+
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ except ValueError:
+ pass
+ except:
+ pass
+ else:
+ pass
+ ''', m.DefaultExceptNotLast)
+
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ except ValueError:
+ pass
+ except:
+ pass
+ except ValueError:
+ pass
+ else:
+ pass
+ ''', m.DefaultExceptNotLast, m.DefaultExceptNotLast)
+
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ except ValueError:
+ pass
+ finally:
+ pass
+ ''', m.DefaultExceptNotLast)
+
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ except:
+ pass
+ finally:
+ pass
+ ''', m.DefaultExceptNotLast)
+
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ except ValueError:
+ pass
+ except:
+ pass
+ finally:
+ pass
+ ''', m.DefaultExceptNotLast)
+
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ except ValueError:
+ pass
+ except:
+ pass
+ except ValueError:
+ pass
+ finally:
+ pass
+ ''', m.DefaultExceptNotLast, m.DefaultExceptNotLast)
+
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ except ValueError:
+ pass
+ else:
+ pass
+ finally:
+ pass
+ ''', m.DefaultExceptNotLast)
+
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ except:
+ pass
+ else:
+ pass
+ finally:
+ pass
+ ''', m.DefaultExceptNotLast)
+
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ except ValueError:
+ pass
+ except:
+ pass
+ else:
+ pass
+ finally:
+ pass
+ ''', m.DefaultExceptNotLast)
+
+ self.flakes('''
+ try:
+ pass
+ except:
+ pass
+ except ValueError:
+ pass
+ except:
+ pass
+ except ValueError:
+ pass
+ else:
+ pass
+ finally:
+ pass
+ ''', m.DefaultExceptNotLast, m.DefaultExceptNotLast)
+
+ @skipIf(version_info < (3,), "Python 3 only")
+ def test_starredAssignmentNoError(self):
+ """
+ Python 3 extended iterable unpacking
+ """
+ self.flakes('''
+ a, *b = range(10)
+ ''')
+
+ self.flakes('''
+ *a, b = range(10)
+ ''')
+
+ self.flakes('''
+ a, *b, c = range(10)
+ ''')
+
+ self.flakes('''
+ (a, *b) = range(10)
+ ''')
+
+ self.flakes('''
+ (*a, b) = range(10)
+ ''')
+
+ self.flakes('''
+ (a, *b, c) = range(10)
+ ''')
+
+ self.flakes('''
+ [a, *b] = range(10)
+ ''')
+
+ self.flakes('''
+ [*a, b] = range(10)
+ ''')
+
+ self.flakes('''
+ [a, *b, c] = range(10)
+ ''')
+
+ # Taken from test_unpack_ex.py in the cPython source
+ s = ", ".join("a%d" % i for i in range(1 << 8 - 1)) + \
+ ", *rest = range(1<<8)"
+ self.flakes(s)
+
+ s = "(" + ", ".join("a%d" % i for i in range(1 << 8 - 1)) + \
+ ", *rest) = range(1<<8)"
+ self.flakes(s)
+
+ s = "[" + ", ".join("a%d" % i for i in range(1 << 8 - 1)) + \
+ ", *rest] = range(1<<8)"
+ self.flakes(s)
+
+ @skipIf(version_info < (3, ), "Python 3 only")
+ def test_starredAssignmentErrors(self):
+ """
+ SyntaxErrors (not encoded in the ast) surrounding Python 3 extended
+ iterable unpacking
+ """
+ # Taken from test_unpack_ex.py in the cPython source
+ s = ", ".join("a%d" % i for i in range(1 << 8)) + \
+ ", *rest = range(1<<8 + 1)"
+ self.flakes(s, m.TooManyExpressionsInStarredAssignment)
+
+ s = "(" + ", ".join("a%d" % i for i in range(1 << 8)) + \
+ ", *rest) = range(1<<8 + 1)"
+ self.flakes(s, m.TooManyExpressionsInStarredAssignment)
+
+ s = "[" + ", ".join("a%d" % i for i in range(1 << 8)) + \
+ ", *rest] = range(1<<8 + 1)"
+ self.flakes(s, m.TooManyExpressionsInStarredAssignment)
+
+ s = ", ".join("a%d" % i for i in range(1 << 8 + 1)) + \
+ ", *rest = range(1<<8 + 2)"
+ self.flakes(s, m.TooManyExpressionsInStarredAssignment)
+
+ s = "(" + ", ".join("a%d" % i for i in range(1 << 8 + 1)) + \
+ ", *rest) = range(1<<8 + 2)"
+ self.flakes(s, m.TooManyExpressionsInStarredAssignment)
+
+ s = "[" + ", ".join("a%d" % i for i in range(1 << 8 + 1)) + \
+ ", *rest] = range(1<<8 + 2)"
+ self.flakes(s, m.TooManyExpressionsInStarredAssignment)
+
+ # No way we can actually test this!
+ # s = "*rest, " + ", ".join("a%d" % i for i in range(1<<24)) + \
+ # ", *rest = range(1<<24 + 1)"
+ # self.flakes(s, m.TooManyExpressionsInStarredAssignment)
+
+ self.flakes('''
+ a, *b, *c = range(10)
+ ''', m.TwoStarredExpressions)
+
+ self.flakes('''
+ a, *b, c, *d = range(10)
+ ''', m.TwoStarredExpressions)
+
+ self.flakes('''
+ *a, *b, *c = range(10)
+ ''', m.TwoStarredExpressions)
+
+ self.flakes('''
+ (a, *b, *c) = range(10)
+ ''', m.TwoStarredExpressions)
+
+ self.flakes('''
+ (a, *b, c, *d) = range(10)
+ ''', m.TwoStarredExpressions)
+
+ self.flakes('''
+ (*a, *b, *c) = range(10)
+ ''', m.TwoStarredExpressions)
+
+ self.flakes('''
+ [a, *b, *c] = range(10)
+ ''', m.TwoStarredExpressions)
+
+ self.flakes('''
+ [a, *b, c, *d] = range(10)
+ ''', m.TwoStarredExpressions)
+
+ self.flakes('''
+ [*a, *b, *c] = range(10)
+ ''', m.TwoStarredExpressions)
+
+ @skip("todo: Too hard to make this warn but other cases stay silent")
+ def test_doubleAssignment(self):
+ """
+ If a variable is re-assigned to without being used, no warning is
+ emitted.
+ """
+ self.flakes('''
+ x = 10
+ x = 20
+ ''', m.RedefinedWhileUnused)
+
+ def test_doubleAssignmentConditionally(self):
+ """
+ If a variable is re-assigned within a conditional, no warning is
+ emitted.
+ """
+ self.flakes('''
+ x = 10
+ if True:
+ x = 20
+ ''')
+
+ def test_doubleAssignmentWithUse(self):
+ """
+ If a variable is re-assigned to after being used, no warning is
+ emitted.
+ """
+ self.flakes('''
+ x = 10
+ y = x * 2
+ x = 20
+ ''')
+
+ def test_comparison(self):
+ """
+ If a defined name is used on either side of any of the six comparison
+ operators, no warning is emitted.
+ """
+ self.flakes('''
+ x = 10
+ y = 20
+ x < y
+ x <= y
+ x == y
+ x != y
+ x >= y
+ x > y
+ ''')
+
+ def test_identity(self):
+ """
+ If a defined name is used on either side of an identity test, no
+ warning is emitted.
+ """
+ self.flakes('''
+ x = 10
+ y = 20
+ x is y
+ x is not y
+ ''')
+
+ def test_containment(self):
+ """
+ If a defined name is used on either side of a containment test, no
+ warning is emitted.
+ """
+ self.flakes('''
+ x = 10
+ y = 20
+ x in y
+ x not in y
+ ''')
+
+ def test_loopControl(self):
+ """
+ break and continue statements are supported.
+ """
+ self.flakes('''
+ for x in [1, 2]:
+ break
+ ''')
+ self.flakes('''
+ for x in [1, 2]:
+ continue
+ ''')
+
+ def test_ellipsis(self):
+ """
+ Ellipsis in a slice is supported.
+ """
+ self.flakes('''
+ [1, 2][...]
+ ''')
+
+ def test_extendedSlice(self):
+ """
+ Extended slices are supported.
+ """
+ self.flakes('''
+ x = 3
+ [1, 2][x,:]
+ ''')
+
+ def test_varAugmentedAssignment(self):
+ """
+ Augmented assignment of a variable is supported.
+ We don't care about var refs.
+ """
+ self.flakes('''
+ foo = 0
+ foo += 1
+ ''')
+
+ def test_attrAugmentedAssignment(self):
+ """
+ Augmented assignment of attributes is supported.
+ We don't care about attr refs.
+ """
+ self.flakes('''
+ foo = None
+ foo.bar += foo.baz
+ ''')
+
+ def test_globalDeclaredInDifferentScope(self):
+ """
+ A 'global' can be declared in one scope and reused in another.
+ """
+ self.flakes('''
+ def f(): global foo
+ def g(): foo = 'anything'; foo.is_used()
+ ''')
+
+ def test_function_arguments(self):
+ """
+ Test to traverse ARG and ARGUMENT handler
+ """
+ self.flakes('''
+ def foo(a, b):
+ pass
+ ''')
+
+ self.flakes('''
+ def foo(a, b, c=0):
+ pass
+ ''')
+
+ self.flakes('''
+ def foo(a, b, c=0, *args):
+ pass
+ ''')
+
+ self.flakes('''
+ def foo(a, b, c=0, *args, **kwargs):
+ pass
+ ''')
+
+ @skipIf(version_info < (3, 3), "Python >= 3.3 only")
+ def test_function_arguments_python3(self):
+ self.flakes('''
+ def foo(a, b, c=0, *args, d=0, **kwargs):
+ pass
+ ''')
+
+
+class TestUnusedAssignment(TestCase):
+ """
+ Tests for warning about unused assignments.
+ """
+
+ def test_unusedVariable(self):
+ """
+ Warn when a variable in a function is assigned a value that's never
+ used.
+ """
+ self.flakes('''
+ def a():
+ b = 1
+ ''', m.UnusedVariable)
+
+ def test_unusedUnderscoreVariable(self):
+ """
+ Don't warn when the magic "_" (underscore) variable is unused.
+ See issue #202.
+ """
+ self.flakes('''
+ def a(unused_param):
+ _ = unused_param
+ ''')
+
+ def test_unusedVariableAsLocals(self):
+ """
+ Using locals() it is perfectly valid to have unused variables
+ """
+ self.flakes('''
+ def a():
+ b = 1
+ return locals()
+ ''')
+
+ def test_unusedVariableNoLocals(self):
+ """
+ Using locals() in wrong scope should not matter
+ """
+ self.flakes('''
+ def a():
+ locals()
+ def a():
+ b = 1
+ return
+ ''', m.UnusedVariable)
+
+ @skip("todo: Difficult because it doesn't apply in the context of a loop")
+ def test_unusedReassignedVariable(self):
+ """
+ Shadowing a used variable can still raise an UnusedVariable warning.
+ """
+ self.flakes('''
+ def a():
+ b = 1
+ b.foo()
+ b = 2
+ ''', m.UnusedVariable)
+
+ def test_variableUsedInLoop(self):
+ """
+ Shadowing a used variable cannot raise an UnusedVariable warning in the
+ context of a loop.
+ """
+ self.flakes('''
+ def a():
+ b = True
+ while b:
+ b = False
+ ''')
+
+ def test_assignToGlobal(self):
+ """
+ Assigning to a global and then not using that global is perfectly
+ acceptable. Do not mistake it for an unused local variable.
+ """
+ self.flakes('''
+ b = 0
+ def a():
+ global b
+ b = 1
+ ''')
+
+ @skipIf(version_info < (3,), 'new in Python 3')
+ def test_assignToNonlocal(self):
+ """
+ Assigning to a nonlocal and then not using that binding is perfectly
+ acceptable. Do not mistake it for an unused local variable.
+ """
+ self.flakes('''
+ b = b'0'
+ def a():
+ nonlocal b
+ b = b'1'
+ ''')
+
+ def test_assignToMember(self):
+ """
+ Assigning to a member of another object and then not using that member
+ variable is perfectly acceptable. Do not mistake it for an unused
+ local variable.
+ """
+ # XXX: Adding this test didn't generate a failure. Maybe not
+ # necessary?
+ self.flakes('''
+ class b:
+ pass
+ def a():
+ b.foo = 1
+ ''')
+
+ def test_assignInForLoop(self):
+ """
+ Don't warn when a variable in a for loop is assigned to but not used.
+ """
+ self.flakes('''
+ def f():
+ for i in range(10):
+ pass
+ ''')
+
+ def test_assignInListComprehension(self):
+ """
+ Don't warn when a variable in a list comprehension is
+ assigned to but not used.
+ """
+ self.flakes('''
+ def f():
+ [None for i in range(10)]
+ ''')
+
+ def test_generatorExpression(self):
+ """
+ Don't warn when a variable in a generator expression is
+ assigned to but not used.
+ """
+ self.flakes('''
+ def f():
+ (None for i in range(10))
+ ''')
+
+ def test_assignmentInsideLoop(self):
+ """
+ Don't warn when a variable assignment occurs lexically after its use.
+ """
+ self.flakes('''
+ def f():
+ x = None
+ for i in range(10):
+ if i > 2:
+ return x
+ x = i * 2
+ ''')
+
+ def test_tupleUnpacking(self):
+ """
+ Don't warn when a variable included in tuple unpacking is unused. It's
+ very common for variables in a tuple unpacking assignment to be unused
+ in good Python code, so warning will only create false positives.
+ """
+ self.flakes('''
+ def f(tup):
+ (x, y) = tup
+ ''')
+ self.flakes('''
+ def f():
+ (x, y) = 1, 2
+ ''', m.UnusedVariable, m.UnusedVariable)
+ self.flakes('''
+ def f():
+ (x, y) = coords = 1, 2
+ if x > 1:
+ print(coords)
+ ''')
+ self.flakes('''
+ def f():
+ (x, y) = coords = 1, 2
+ ''', m.UnusedVariable)
+ self.flakes('''
+ def f():
+ coords = (x, y) = 1, 2
+ ''', m.UnusedVariable)
+
+ def test_listUnpacking(self):
+ """
+ Don't warn when a variable included in list unpacking is unused.
+ """
+ self.flakes('''
+ def f(tup):
+ [x, y] = tup
+ ''')
+ self.flakes('''
+ def f():
+ [x, y] = [1, 2]
+ ''', m.UnusedVariable, m.UnusedVariable)
+
+ def test_closedOver(self):
+ """
+ Don't warn when the assignment is used in an inner function.
+ """
+ self.flakes('''
+ def barMaker():
+ foo = 5
+ def bar():
+ return foo
+ return bar
+ ''')
+
+ def test_doubleClosedOver(self):
+ """
+ Don't warn when the assignment is used in an inner function, even if
+ that inner function itself is in an inner function.
+ """
+ self.flakes('''
+ def barMaker():
+ foo = 5
+ def bar():
+ def baz():
+ return foo
+ return bar
+ ''')
+
+ def test_tracebackhideSpecialVariable(self):
+ """
+ Do not warn about unused local variable __tracebackhide__, which is
+ a special variable for py.test.
+ """
+ self.flakes("""
+ def helper():
+ __tracebackhide__ = True
+ """)
+
+ def test_ifexp(self):
+ """
+ Test C{foo if bar else baz} statements.
+ """
+ self.flakes("a = 'moo' if True else 'oink'")
+ self.flakes("a = foo if True else 'oink'", m.UndefinedName)
+ self.flakes("a = 'moo' if True else bar", m.UndefinedName)
+
+ def test_if_tuple(self):
+ """
+ Test C{if (foo,)} conditions.
+ """
+ self.flakes("""if (): pass""")
+ self.flakes("""
+ if (
+ True
+ ):
+ pass
+ """)
+ self.flakes("""
+ if (
+ True,
+ ):
+ pass
+ """, m.IfTuple)
+ self.flakes("""
+ x = 1 if (
+ True,
+ ) else 2
+ """, m.IfTuple)
+
+ def test_withStatementNoNames(self):
+ """
+ No warnings are emitted for using inside or after a nameless C{with}
+ statement a name defined beforehand.
+ """
+ self.flakes('''
+ from __future__ import with_statement
+ bar = None
+ with open("foo"):
+ bar
+ bar
+ ''')
+
+ def test_withStatementSingleName(self):
+ """
+ No warnings are emitted for using a name defined by a C{with} statement
+ within the suite or afterwards.
+ """
+ self.flakes('''
+ from __future__ import with_statement
+ with open('foo') as bar:
+ bar
+ bar
+ ''')
+
+ def test_withStatementAttributeName(self):
+ """
+ No warnings are emitted for using an attribute as the target of a
+ C{with} statement.
+ """
+ self.flakes('''
+ from __future__ import with_statement
+ import foo
+ with open('foo') as foo.bar:
+ pass
+ ''')
+
+ def test_withStatementSubscript(self):
+ """
+ No warnings are emitted for using a subscript as the target of a
+ C{with} statement.
+ """
+ self.flakes('''
+ from __future__ import with_statement
+ import foo
+ with open('foo') as foo[0]:
+ pass
+ ''')
+
+ def test_withStatementSubscriptUndefined(self):
+ """
+ An undefined name warning is emitted if the subscript used as the
+ target of a C{with} statement is not defined.
+ """
+ self.flakes('''
+ from __future__ import with_statement
+ import foo
+ with open('foo') as foo[bar]:
+ pass
+ ''', m.UndefinedName)
+
+ def test_withStatementTupleNames(self):
+ """
+ No warnings are emitted for using any of the tuple of names defined by
+ a C{with} statement within the suite or afterwards.
+ """
+ self.flakes('''
+ from __future__ import with_statement
+ with open('foo') as (bar, baz):
+ bar, baz
+ bar, baz
+ ''')
+
+ def test_withStatementListNames(self):
+ """
+ No warnings are emitted for using any of the list of names defined by a
+ C{with} statement within the suite or afterwards.
+ """
+ self.flakes('''
+ from __future__ import with_statement
+ with open('foo') as [bar, baz]:
+ bar, baz
+ bar, baz
+ ''')
+
+ def test_withStatementComplicatedTarget(self):
+ """
+ If the target of a C{with} statement uses any or all of the valid forms
+ for that part of the grammar (See
+ U{http://docs.python.org/reference/compound_stmts.html#the-with-statement}),
+ the names involved are checked both for definedness and any bindings
+ created are respected in the suite of the statement and afterwards.
+ """
+ self.flakes('''
+ from __future__ import with_statement
+ c = d = e = g = h = i = None
+ with open('foo') as [(a, b), c[d], e.f, g[h:i]]:
+ a, b, c, d, e, g, h, i
+ a, b, c, d, e, g, h, i
+ ''')
+
+ def test_withStatementSingleNameUndefined(self):
+ """
+ An undefined name warning is emitted if the name first defined by a
+ C{with} statement is used before the C{with} statement.
+ """
+ self.flakes('''
+ from __future__ import with_statement
+ bar
+ with open('foo') as bar:
+ pass
+ ''', m.UndefinedName)
+
+ def test_withStatementTupleNamesUndefined(self):
+ """
+ An undefined name warning is emitted if a name first defined by the
+ tuple-unpacking form of the C{with} statement is used before the
+ C{with} statement.
+ """
+ self.flakes('''
+ from __future__ import with_statement
+ baz
+ with open('foo') as (bar, baz):
+ pass
+ ''', m.UndefinedName)
+
+ def test_withStatementSingleNameRedefined(self):
+ """
+ A redefined name warning is emitted if a name bound by an import is
+ rebound by the name defined by a C{with} statement.
+ """
+ self.flakes('''
+ from __future__ import with_statement
+ import bar
+ with open('foo') as bar:
+ pass
+ ''', m.RedefinedWhileUnused)
+
+ def test_withStatementTupleNamesRedefined(self):
+ """
+ A redefined name warning is emitted if a name bound by an import is
+ rebound by one of the names defined by the tuple-unpacking form of a
+ C{with} statement.
+ """
+ self.flakes('''
+ from __future__ import with_statement
+ import bar
+ with open('foo') as (bar, baz):
+ pass
+ ''', m.RedefinedWhileUnused)
+
+ def test_withStatementUndefinedInside(self):
+ """
+ An undefined name warning is emitted if a name is used inside the
+ body of a C{with} statement without first being bound.
+ """
+ self.flakes('''
+ from __future__ import with_statement
+ with open('foo') as bar:
+ baz
+ ''', m.UndefinedName)
+
+ def test_withStatementNameDefinedInBody(self):
+ """
+ A name defined in the body of a C{with} statement can be used after
+ the body ends without warning.
+ """
+ self.flakes('''
+ from __future__ import with_statement
+ with open('foo') as bar:
+ baz = 10
+ baz
+ ''')
+
+ def test_withStatementUndefinedInExpression(self):
+ """
+ An undefined name warning is emitted if a name in the I{test}
+ expression of a C{with} statement is undefined.
+ """
+ self.flakes('''
+ from __future__ import with_statement
+ with bar as baz:
+ pass
+ ''', m.UndefinedName)
+
+ self.flakes('''
+ from __future__ import with_statement
+ with bar as bar:
+ pass
+ ''', m.UndefinedName)
+
+ def test_dictComprehension(self):
+ """
+ Dict comprehensions are properly handled.
+ """
+ self.flakes('''
+ a = {1: x for x in range(10)}
+ ''')
+
+ def test_setComprehensionAndLiteral(self):
+ """
+ Set comprehensions are properly handled.
+ """
+ self.flakes('''
+ a = {1, 2, 3}
+ b = {x for x in range(10)}
+ ''')
+
+ def test_exceptionUsedInExcept(self):
+ self.flakes('''
+ try: pass
+ except Exception as e: e
+ ''')
+
+ self.flakes('''
+ def download_review():
+ try: pass
+ except Exception as e: e
+ ''')
+
+ @skipIf(version_info < (3,),
+ "In Python 2 exception names stay bound after the exception handler")
+ def test_exceptionUnusedInExcept(self):
+ self.flakes('''
+ try: pass
+ except Exception as e: pass
+ ''', m.UnusedVariable)
+
+ def test_exceptionUnusedInExceptInFunction(self):
+ self.flakes('''
+ def download_review():
+ try: pass
+ except Exception as e: pass
+ ''', m.UnusedVariable)
+
+ def test_exceptWithoutNameInFunction(self):
+ """
+ Don't issue false warning when an unnamed exception is used.
+ Previously, there would be a false warning, but only when the
+ try..except was in a function
+ """
+ self.flakes('''
+ import tokenize
+ def foo():
+ try: pass
+ except tokenize.TokenError: pass
+ ''')
+
+ def test_exceptWithoutNameInFunctionTuple(self):
+ """
+ Don't issue false warning when an unnamed exception is used.
+ This example catches a tuple of exception types.
+ """
+ self.flakes('''
+ import tokenize
+ def foo():
+ try: pass
+ except (tokenize.TokenError, IndentationError): pass
+ ''')
+
+ def test_augmentedAssignmentImportedFunctionCall(self):
+ """
+ Consider a function that is called on the right part of an
+ augassign operation to be used.
+ """
+ self.flakes('''
+ from foo import bar
+ baz = 0
+ baz += bar()
+ ''')
+
+ def test_assert_without_message(self):
+ """An assert without a message is not an error."""
+ self.flakes('''
+ a = 1
+ assert a
+ ''')
+
+ def test_assert_with_message(self):
+ """An assert with a message is not an error."""
+ self.flakes('''
+ a = 1
+ assert a, 'x'
+ ''')
+
+ def test_assert_tuple(self):
+ """An assert of a non-empty tuple is always True."""
+ self.flakes('''
+ assert (False, 'x')
+ assert (False, )
+ ''', m.AssertTuple, m.AssertTuple)
+
+ def test_assert_tuple_empty(self):
+ """An assert of an empty tuple is always False."""
+ self.flakes('''
+ assert ()
+ ''')
+
+ def test_assert_static(self):
+ """An assert of a static value is not an error."""
+ self.flakes('''
+ assert True
+ assert 1
+ ''')
+
+ @skipIf(version_info < (3, 3), 'new in Python 3.3')
+ def test_yieldFromUndefined(self):
+ """
+ Test C{yield from} statement
+ """
+ self.flakes('''
+ def bar():
+ yield from foo()
+ ''', m.UndefinedName)
+
+ @skipIf(version_info < (3, 6), 'new in Python 3.6')
+ def test_f_string(self):
+ """Test PEP 498 f-strings are treated as a usage."""
+ self.flakes('''
+ baz = 0
+ print(f'\x7b4*baz\N{RIGHT CURLY BRACKET}')
+ ''')
+
+ @skipIf(version_info < (3, 8), 'new in Python 3.8')
+ def test_assign_expr(self):
+ """Test PEP 572 assignment expressions are treated as usage / write."""
+ self.flakes('''
+ from foo import y
+ print(x := y)
+ print(x)
+ ''')
+
+
+class TestStringFormatting(TestCase):
+
+ @skipIf(version_info < (3, 6), 'new in Python 3.6')
+ def test_f_string_without_placeholders(self):
+ self.flakes("f'foo'", m.FStringMissingPlaceholders)
+ self.flakes('''
+ f"""foo
+ bar
+ """
+ ''', m.FStringMissingPlaceholders)
+ self.flakes('''
+ print(
+ f'foo'
+ f'bar'
+ )
+ ''', m.FStringMissingPlaceholders)
+ # this is an "escaped placeholder" but not a placeholder
+ self.flakes("f'{{}}'", m.FStringMissingPlaceholders)
+ # ok: f-string with placeholders
+ self.flakes('''
+ x = 5
+ print(f'{x}')
+ ''')
+ # ok: f-string with format specifiers
+ self.flakes('''
+ x = 'a' * 90
+ print(f'{x:.8}')
+ ''')
+ # ok: f-string with multiple format specifiers
+ self.flakes('''
+ x = y = 5
+ print(f'{x:>2} {y:>2}')
+ ''')
+
+ def test_invalid_dot_format_calls(self):
+ self.flakes('''
+ '{'.format(1)
+ ''', m.StringDotFormatInvalidFormat)
+ self.flakes('''
+ '{} {1}'.format(1, 2)
+ ''', m.StringDotFormatMixingAutomatic)
+ self.flakes('''
+ '{0} {}'.format(1, 2)
+ ''', m.StringDotFormatMixingAutomatic)
+ self.flakes('''
+ '{}'.format(1, 2)
+ ''', m.StringDotFormatExtraPositionalArguments)
+ self.flakes('''
+ '{}'.format(1, bar=2)
+ ''', m.StringDotFormatExtraNamedArguments)
+ self.flakes('''
+ '{} {}'.format(1)
+ ''', m.StringDotFormatMissingArgument)
+ self.flakes('''
+ '{2}'.format()
+ ''', m.StringDotFormatMissingArgument)
+ self.flakes('''
+ '{bar}'.format()
+ ''', m.StringDotFormatMissingArgument)
+ # too much string recursion (placeholder-in-placeholder)
+ self.flakes('''
+ '{:{:{}}}'.format(1, 2, 3)
+ ''', m.StringDotFormatInvalidFormat)
+ # ok: dotted / bracketed names need to handle the param differently
+ self.flakes("'{.__class__}'.format('')")
+ self.flakes("'{foo[bar]}'.format(foo={'bar': 'barv'})")
+ # ok: placeholder-placeholders
+ self.flakes('''
+ print('{:{}} {}'.format(1, 15, 2))
+ ''')
+ # ok: not a placeholder-placeholder
+ self.flakes('''
+ print('{:2}'.format(1))
+ ''')
+ # ok: not mixed automatic
+ self.flakes('''
+ '{foo}-{}'.format(1, foo=2)
+ ''')
+ # ok: we can't determine statically the format args
+ self.flakes('''
+ a = ()
+ "{}".format(*a)
+ ''')
+ self.flakes('''
+ k = {}
+ "{foo}".format(**k)
+ ''')
+
+ def test_invalid_percent_format_calls(self):
+ self.flakes('''
+ '%(foo)' % {'foo': 'bar'}
+ ''', m.PercentFormatInvalidFormat)
+ self.flakes('''
+ '%s %(foo)s' % {'foo': 'bar'}
+ ''', m.PercentFormatMixedPositionalAndNamed)
+ self.flakes('''
+ '%(foo)s %s' % {'foo': 'bar'}
+ ''', m.PercentFormatMixedPositionalAndNamed)
+ self.flakes('''
+ '%j' % (1,)
+ ''', m.PercentFormatUnsupportedFormatCharacter)
+ self.flakes('''
+ '%s %s' % (1,)
+ ''', m.PercentFormatPositionalCountMismatch)
+ self.flakes('''
+ '%s %s' % (1, 2, 3)
+ ''', m.PercentFormatPositionalCountMismatch)
+ self.flakes('''
+ '%(bar)s' % {}
+ ''', m.PercentFormatMissingArgument,)
+ self.flakes('''
+ '%(bar)s' % {'bar': 1, 'baz': 2}
+ ''', m.PercentFormatExtraNamedArguments)
+ self.flakes('''
+ '%(bar)s' % (1, 2, 3)
+ ''', m.PercentFormatExpectedMapping)
+ self.flakes('''
+ '%s %s' % {'k': 'v'}
+ ''', m.PercentFormatExpectedSequence)
+ self.flakes('''
+ '%(bar)*s' % {'bar': 'baz'}
+ ''', m.PercentFormatStarRequiresSequence)
+ # ok: single %s with mapping
+ self.flakes('''
+ '%s' % {'foo': 'bar', 'baz': 'womp'}
+ ''')
+ # ok: does not cause a MemoryError (the strings aren't evaluated)
+ self.flakes('''
+ "%1000000000000f" % 1
+ ''')
+ # ok: %% should not count towards placeholder count
+ self.flakes('''
+ '%% %s %% %s' % (1, 2)
+ ''')
+ # ok: * consumes one positional argument
+ self.flakes('''
+ '%.*f' % (2, 1.1234)
+ '%*.*f' % (5, 2, 3.1234)
+ ''')
+
+ @skipIf(version_info < (3, 5), 'new in Python 3.5')
+ def test_ok_percent_format_cannot_determine_element_count(self):
+ self.flakes('''
+ a = []
+ '%s %s' % [*a]
+ '%s %s' % (*a,)
+ ''')
+ self.flakes('''
+ k = {}
+ '%(k)s' % {**k}
+ ''')
+
+
+class TestAsyncStatements(TestCase):
+
+ @skipIf(version_info < (3, 5), 'new in Python 3.5')
+ def test_asyncDef(self):
+ self.flakes('''
+ async def bar():
+ return 42
+ ''')
+
+ @skipIf(version_info < (3, 5), 'new in Python 3.5')
+ def test_asyncDefAwait(self):
+ self.flakes('''
+ async def read_data(db):
+ await db.fetch('SELECT ...')
+ ''')
+
+ @skipIf(version_info < (3, 5), 'new in Python 3.5')
+ def test_asyncDefUndefined(self):
+ self.flakes('''
+ async def bar():
+ return foo()
+ ''', m.UndefinedName)
+
+ @skipIf(version_info < (3, 5), 'new in Python 3.5')
+ def test_asyncFor(self):
+ self.flakes('''
+ async def read_data(db):
+ output = []
+ async for row in db.cursor():
+ output.append(row)
+ return output
+ ''')
+
+ @skipIf(version_info < (3, 5), 'new in Python 3.5')
+ def test_asyncForUnderscoreLoopVar(self):
+ self.flakes('''
+ async def coro(it):
+ async for _ in it:
+ pass
+ ''')
+
+ @skipIf(version_info < (3, 5), 'new in Python 3.5')
+ def test_loopControlInAsyncFor(self):
+ self.flakes('''
+ async def read_data(db):
+ output = []
+ async for row in db.cursor():
+ if row[0] == 'skip':
+ continue
+ output.append(row)
+ return output
+ ''')
+
+ self.flakes('''
+ async def read_data(db):
+ output = []
+ async for row in db.cursor():
+ if row[0] == 'stop':
+ break
+ output.append(row)
+ return output
+ ''')
+
+ @skipIf(version_info < (3, 5), 'new in Python 3.5')
+ def test_loopControlInAsyncForElse(self):
+ self.flakes('''
+ async def read_data(db):
+ output = []
+ async for row in db.cursor():
+ output.append(row)
+ else:
+ continue
+ return output
+ ''', m.ContinueOutsideLoop)
+
+ self.flakes('''
+ async def read_data(db):
+ output = []
+ async for row in db.cursor():
+ output.append(row)
+ else:
+ break
+ return output
+ ''', m.BreakOutsideLoop)
+
+ @skipIf(version_info < (3, 5), 'new in Python 3.5')
+ @skipIf(version_info > (3, 8), "Python <= 3.8 only")
+ def test_continueInAsyncForFinally(self):
+ self.flakes('''
+ async def read_data(db):
+ output = []
+ async for row in db.cursor():
+ try:
+ output.append(row)
+ finally:
+ continue
+ return output
+ ''', m.ContinueInFinally)
+
+ @skipIf(version_info < (3, 5), 'new in Python 3.5')
+ def test_asyncWith(self):
+ self.flakes('''
+ async def commit(session, data):
+ async with session.transaction():
+ await session.update(data)
+ ''')
+
+ @skipIf(version_info < (3, 5), 'new in Python 3.5')
+ def test_asyncWithItem(self):
+ self.flakes('''
+ async def commit(session, data):
+ async with session.transaction() as trans:
+ await trans.begin()
+ ...
+ await trans.end()
+ ''')
+
+ @skipIf(version_info < (3, 5), 'new in Python 3.5')
+ def test_matmul(self):
+ self.flakes('''
+ def foo(a, b):
+ return a @ b
+ ''')
+
+ @skipIf(version_info < (3, 6), 'new in Python 3.6')
+ def test_formatstring(self):
+ self.flakes('''
+ hi = 'hi'
+ mom = 'mom'
+ f'{hi} {mom}'
+ ''')
+
+ def test_raise_notimplemented(self):
+ self.flakes('''
+ raise NotImplementedError("This is fine")
+ ''')
+
+ self.flakes('''
+ raise NotImplementedError
+ ''')
+
+ self.flakes('''
+ raise NotImplemented("This isn't gonna work")
+ ''', m.RaiseNotImplemented)
+
+ self.flakes('''
+ raise NotImplemented
+ ''', m.RaiseNotImplemented)
+
+
+class TestIncompatiblePrintOperator(TestCase):
+ """
+ Tests for warning about invalid use of print function.
+ """
+
+ def test_valid_print(self):
+ self.flakes('''
+ print("Hello")
+ ''')
+
+ def test_invalid_print_when_imported_from_future(self):
+ exc = self.flakes('''
+ from __future__ import print_function
+ import sys
+ print >>sys.stderr, "Hello"
+ ''', m.InvalidPrintSyntax).messages[0]
+
+ self.assertEqual(exc.lineno, 4)
+ self.assertEqual(exc.col, 0)
+
+ def test_print_function_assignment(self):
+ """
+ A valid assignment, tested for catching false positives.
+ """
+ self.flakes('''
+ from __future__ import print_function
+ log = print
+ log("Hello")
+ ''')
+
+ def test_print_in_lambda(self):
+ self.flakes('''
+ from __future__ import print_function
+ a = lambda: print
+ ''')
+
+ def test_print_returned_in_function(self):
+ self.flakes('''
+ from __future__ import print_function
+ def a():
+ return print
+ ''')
+
+ def test_print_as_condition_test(self):
+ self.flakes('''
+ from __future__ import print_function
+ if print: pass
+ ''')
diff --git a/third_party/python/pyflakes/pyflakes/test/test_return_with_arguments_inside_generator.py b/third_party/python/pyflakes/pyflakes/test/test_return_with_arguments_inside_generator.py
new file mode 100644
index 0000000000..fc1272a936
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/test/test_return_with_arguments_inside_generator.py
@@ -0,0 +1,34 @@
+
+from sys import version_info
+
+from pyflakes import messages as m
+from pyflakes.test.harness import TestCase, skipIf
+
+
+class Test(TestCase):
+ @skipIf(version_info >= (3, 3), 'new in Python 3.3')
+ def test_return(self):
+ self.flakes('''
+ class a:
+ def b():
+ for x in a.c:
+ if x:
+ yield x
+ return a
+ ''', m.ReturnWithArgsInsideGenerator)
+
+ @skipIf(version_info >= (3, 3), 'new in Python 3.3')
+ def test_returnNone(self):
+ self.flakes('''
+ def a():
+ yield 12
+ return None
+ ''', m.ReturnWithArgsInsideGenerator)
+
+ @skipIf(version_info >= (3, 3), 'new in Python 3.3')
+ def test_returnYieldExpression(self):
+ self.flakes('''
+ def a():
+ b = yield a
+ return b
+ ''', m.ReturnWithArgsInsideGenerator)
diff --git a/third_party/python/pyflakes/pyflakes/test/test_type_annotations.py b/third_party/python/pyflakes/pyflakes/test/test_type_annotations.py
new file mode 100644
index 0000000000..ed28127ed7
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/test/test_type_annotations.py
@@ -0,0 +1,554 @@
+"""
+Tests for behaviour related to type annotations.
+"""
+
+from sys import version_info
+
+from pyflakes import messages as m
+from pyflakes.test.harness import TestCase, skipIf
+
+
+class TestTypeAnnotations(TestCase):
+
+ def test_typingOverload(self):
+ """Allow intentional redefinitions via @typing.overload"""
+ self.flakes("""
+ import typing
+ from typing import overload
+
+ @overload
+ def f(s): # type: (None) -> None
+ pass
+
+ @overload
+ def f(s): # type: (int) -> int
+ pass
+
+ def f(s):
+ return s
+
+ @typing.overload
+ def g(s): # type: (None) -> None
+ pass
+
+ @typing.overload
+ def g(s): # type: (int) -> int
+ pass
+
+ def g(s):
+ return s
+ """)
+
+ def test_typingExtensionsOverload(self):
+ """Allow intentional redefinitions via @typing_extensions.overload"""
+ self.flakes("""
+ import typing_extensions
+ from typing_extensions import overload
+
+ @overload
+ def f(s): # type: (None) -> None
+ pass
+
+ @overload
+ def f(s): # type: (int) -> int
+ pass
+
+ def f(s):
+ return s
+
+ @typing_extensions.overload
+ def g(s): # type: (None) -> None
+ pass
+
+ @typing_extensions.overload
+ def g(s): # type: (int) -> int
+ pass
+
+ def g(s):
+ return s
+ """)
+
+ @skipIf(version_info < (3, 5), 'new in Python 3.5')
+ def test_typingOverloadAsync(self):
+ """Allow intentional redefinitions via @typing.overload (async)"""
+ self.flakes("""
+ from typing import overload
+
+ @overload
+ async def f(s): # type: (None) -> None
+ pass
+
+ @overload
+ async def f(s): # type: (int) -> int
+ pass
+
+ async def f(s):
+ return s
+ """)
+
+ def test_overload_with_multiple_decorators(self):
+ self.flakes("""
+ from typing import overload
+ dec = lambda f: f
+
+ @dec
+ @overload
+ def f(x): # type: (int) -> int
+ pass
+
+ @dec
+ @overload
+ def f(x): # type: (str) -> str
+ pass
+
+ @dec
+ def f(x): return x
+ """)
+
+ def test_overload_in_class(self):
+ self.flakes("""
+ from typing import overload
+
+ class C:
+ @overload
+ def f(self, x): # type: (int) -> int
+ pass
+
+ @overload
+ def f(self, x): # type: (str) -> str
+ pass
+
+ def f(self, x): return x
+ """)
+
+ def test_not_a_typing_overload(self):
+ """regression test for @typing.overload detection bug in 2.1.0"""
+ self.flakes("""
+ def foo(x):
+ return x
+
+ @foo
+ def bar():
+ pass
+
+ def bar():
+ pass
+ """, m.RedefinedWhileUnused)
+
+ @skipIf(version_info < (3, 6), 'new in Python 3.6')
+ def test_variable_annotations(self):
+ self.flakes('''
+ name: str
+ age: int
+ ''')
+ self.flakes('''
+ name: str = 'Bob'
+ age: int = 18
+ ''')
+ self.flakes('''
+ class C:
+ name: str
+ age: int
+ ''')
+ self.flakes('''
+ class C:
+ name: str = 'Bob'
+ age: int = 18
+ ''')
+ self.flakes('''
+ def f():
+ name: str
+ age: int
+ ''')
+ self.flakes('''
+ def f():
+ name: str = 'Bob'
+ age: int = 18
+ foo: not_a_real_type = None
+ ''', m.UnusedVariable, m.UnusedVariable, m.UnusedVariable, m.UndefinedName)
+ self.flakes('''
+ def f():
+ name: str
+ print(name)
+ ''', m.UndefinedName)
+ self.flakes('''
+ from typing import Any
+ def f():
+ a: Any
+ ''')
+ self.flakes('''
+ foo: not_a_real_type
+ ''', m.UndefinedName)
+ self.flakes('''
+ foo: not_a_real_type = None
+ ''', m.UndefinedName)
+ self.flakes('''
+ class C:
+ foo: not_a_real_type
+ ''', m.UndefinedName)
+ self.flakes('''
+ class C:
+ foo: not_a_real_type = None
+ ''', m.UndefinedName)
+ self.flakes('''
+ def f():
+ class C:
+ foo: not_a_real_type
+ ''', m.UndefinedName)
+ self.flakes('''
+ def f():
+ class C:
+ foo: not_a_real_type = None
+ ''', m.UndefinedName)
+ self.flakes('''
+ from foo import Bar
+ bar: Bar
+ ''')
+ self.flakes('''
+ from foo import Bar
+ bar: 'Bar'
+ ''')
+ self.flakes('''
+ import foo
+ bar: foo.Bar
+ ''')
+ self.flakes('''
+ import foo
+ bar: 'foo.Bar'
+ ''')
+ self.flakes('''
+ from foo import Bar
+ def f(bar: Bar): pass
+ ''')
+ self.flakes('''
+ from foo import Bar
+ def f(bar: 'Bar'): pass
+ ''')
+ self.flakes('''
+ from foo import Bar
+ def f(bar) -> Bar: return bar
+ ''')
+ self.flakes('''
+ from foo import Bar
+ def f(bar) -> 'Bar': return bar
+ ''')
+ self.flakes('''
+ bar: 'Bar'
+ ''', m.UndefinedName)
+ self.flakes('''
+ bar: 'foo.Bar'
+ ''', m.UndefinedName)
+ self.flakes('''
+ from foo import Bar
+ bar: str
+ ''', m.UnusedImport)
+ self.flakes('''
+ from foo import Bar
+ def f(bar: str): pass
+ ''', m.UnusedImport)
+ self.flakes('''
+ def f(a: A) -> A: pass
+ class A: pass
+ ''', m.UndefinedName, m.UndefinedName)
+ self.flakes('''
+ def f(a: 'A') -> 'A': return a
+ class A: pass
+ ''')
+ self.flakes('''
+ a: A
+ class A: pass
+ ''', m.UndefinedName)
+ self.flakes('''
+ a: 'A'
+ class A: pass
+ ''')
+ self.flakes('''
+ a: 'A B'
+ ''', m.ForwardAnnotationSyntaxError)
+ self.flakes('''
+ a: 'A; B'
+ ''', m.ForwardAnnotationSyntaxError)
+ self.flakes('''
+ a: '1 + 2'
+ ''')
+ self.flakes('''
+ a: 'a: "A"'
+ ''', m.ForwardAnnotationSyntaxError)
+
+ @skipIf(version_info < (3, 5), 'new in Python 3.5')
+ def test_annotated_async_def(self):
+ self.flakes('''
+ class c: pass
+ async def func(c: c) -> None: pass
+ ''')
+
+ @skipIf(version_info < (3, 7), 'new in Python 3.7')
+ def test_postponed_annotations(self):
+ self.flakes('''
+ from __future__ import annotations
+ def f(a: A) -> A: pass
+ class A:
+ b: B
+ class B: pass
+ ''')
+
+ self.flakes('''
+ from __future__ import annotations
+ def f(a: A) -> A: pass
+ class A:
+ b: Undefined
+ class B: pass
+ ''', m.UndefinedName)
+
+ def test_typeCommentsMarkImportsAsUsed(self):
+ self.flakes("""
+ from mod import A, B, C, D, E, F, G
+
+
+ def f(
+ a, # type: A
+ ):
+ # type: (...) -> B
+ for b in a: # type: C
+ with b as c: # type: D
+ d = c.x # type: E
+ return d
+
+
+ def g(x): # type: (F) -> G
+ return x.y
+ """)
+
+ def test_typeCommentsFullSignature(self):
+ self.flakes("""
+ from mod import A, B, C, D
+ def f(a, b):
+ # type: (A, B[C]) -> D
+ return a + b
+ """)
+
+ def test_typeCommentsStarArgs(self):
+ self.flakes("""
+ from mod import A, B, C, D
+ def f(a, *b, **c):
+ # type: (A, *B, **C) -> D
+ return a + b
+ """)
+
+ def test_typeCommentsFullSignatureWithDocstring(self):
+ self.flakes('''
+ from mod import A, B, C, D
+ def f(a, b):
+ # type: (A, B[C]) -> D
+ """do the thing!"""
+ return a + b
+ ''')
+
+ def test_typeCommentsAdditionalComment(self):
+ self.flakes("""
+ from mod import F
+
+ x = 1 # type: F # noqa
+ """)
+
+ def test_typeCommentsNoWhitespaceAnnotation(self):
+ self.flakes("""
+ from mod import F
+
+ x = 1 #type:F
+ """)
+
+ def test_typeCommentsInvalidDoesNotMarkAsUsed(self):
+ self.flakes("""
+ from mod import F
+
+ # type: F
+ """, m.UnusedImport)
+
+ def test_typeCommentsSyntaxError(self):
+ self.flakes("""
+ def f(x): # type: (F[) -> None
+ pass
+ """, m.CommentAnnotationSyntaxError)
+
+ def test_typeCommentsSyntaxErrorCorrectLine(self):
+ checker = self.flakes("""\
+ x = 1
+ # type: definitely not a PEP 484 comment
+ """, m.CommentAnnotationSyntaxError)
+ self.assertEqual(checker.messages[0].lineno, 2)
+
+ def test_typeCommentsAssignedToPreviousNode(self):
+ # This test demonstrates an issue in the implementation which
+ # associates the type comment with a node above it, however the type
+ # comment isn't valid according to mypy. If an improved approach
+ # which can detect these "invalid" type comments is implemented, this
+ # test should be removed / improved to assert that new check.
+ self.flakes("""
+ from mod import F
+ x = 1
+ # type: F
+ """)
+
+ def test_typeIgnore(self):
+ self.flakes("""
+ a = 0 # type: ignore
+ b = 0 # type: ignore[excuse]
+ c = 0 # type: ignore=excuse
+ d = 0 # type: ignore [excuse]
+ e = 0 # type: ignore whatever
+ """)
+
+ def test_typeIgnoreBogus(self):
+ self.flakes("""
+ x = 1 # type: ignored
+ """, m.UndefinedName)
+
+ def test_typeIgnoreBogusUnicode(self):
+ error = (m.CommentAnnotationSyntaxError if version_info < (3,)
+ else m.UndefinedName)
+ self.flakes("""
+ x = 2 # type: ignore\xc3
+ """, error)
+
+ @skipIf(version_info < (3,), 'new in Python 3')
+ def test_return_annotation_is_class_scope_variable(self):
+ self.flakes("""
+ from typing import TypeVar
+ class Test:
+ Y = TypeVar('Y')
+
+ def t(self, x: Y) -> Y:
+ return x
+ """)
+
+ @skipIf(version_info < (3,), 'new in Python 3')
+ def test_return_annotation_is_function_body_variable(self):
+ self.flakes("""
+ class Test:
+ def t(self) -> Y:
+ Y = 2
+ return Y
+ """, m.UndefinedName)
+
+ @skipIf(version_info < (3, 8), 'new in Python 3.8')
+ def test_positional_only_argument_annotations(self):
+ self.flakes("""
+ from x import C
+
+ def f(c: C, /): ...
+ """)
+
+ @skipIf(version_info < (3,), 'new in Python 3')
+ def test_partially_quoted_type_annotation(self):
+ self.flakes("""
+ from queue import Queue
+ from typing import Optional
+
+ def f() -> Optional['Queue[str]']:
+ return None
+ """)
+
+ def test_partially_quoted_type_assignment(self):
+ self.flakes("""
+ from queue import Queue
+ from typing import Optional
+
+ MaybeQueue = Optional['Queue[str]']
+ """)
+
+ def test_nested_partially_quoted_type_assignment(self):
+ self.flakes("""
+ from queue import Queue
+ from typing import Callable
+
+ Func = Callable[['Queue[str]'], None]
+ """)
+
+ def test_quoted_type_cast(self):
+ self.flakes("""
+ from typing import cast, Optional
+
+ maybe_int = cast('Optional[int]', 42)
+ """)
+
+ def test_type_cast_literal_str_to_str(self):
+ # Checks that our handling of quoted type annotations in the first
+ # argument to `cast` doesn't cause issues when (only) the _second_
+ # argument is a literal str which looks a bit like a type annoation.
+ self.flakes("""
+ from typing import cast
+
+ a_string = cast(str, 'Optional[int]')
+ """)
+
+ def test_quoted_type_cast_renamed_import(self):
+ self.flakes("""
+ from typing import cast as tsac, Optional as Maybe
+
+ maybe_int = tsac('Maybe[int]', 42)
+ """)
+
+ @skipIf(version_info < (3,), 'new in Python 3')
+ def test_literal_type_typing(self):
+ self.flakes("""
+ from typing import Literal
+
+ def f(x: Literal['some string']) -> None:
+ return None
+ """)
+
+ @skipIf(version_info < (3,), 'new in Python 3')
+ def test_literal_type_typing_extensions(self):
+ self.flakes("""
+ from typing_extensions import Literal
+
+ def f(x: Literal['some string']) -> None:
+ return None
+ """)
+
+ @skipIf(version_info < (3,), 'new in Python 3')
+ def test_literal_type_some_other_module(self):
+ """err on the side of false-negatives for types named Literal"""
+ self.flakes("""
+ from my_module import compat
+ from my_module.compat import Literal
+
+ def f(x: compat.Literal['some string']) -> None:
+ return None
+ def g(x: Literal['some string']) -> None:
+ return None
+ """)
+
+ @skipIf(version_info < (3,), 'new in Python 3')
+ def test_literal_union_type_typing(self):
+ self.flakes("""
+ from typing import Literal
+
+ def f(x: Literal['some string', 'foo bar']) -> None:
+ return None
+ """)
+
+ @skipIf(version_info < (3,), 'new in Python 3')
+ def test_deferred_twice_annotation(self):
+ self.flakes("""
+ from queue import Queue
+ from typing import Optional
+
+
+ def f() -> "Optional['Queue[str]']":
+ return None
+ """)
+
+ @skipIf(version_info < (3, 7), 'new in Python 3.7')
+ def test_partial_string_annotations_with_future_annotations(self):
+ self.flakes("""
+ from __future__ import annotations
+
+ from queue import Queue
+ from typing import Optional
+
+
+ def f() -> Optional['Queue[str]']:
+ return None
+ """)
diff --git a/third_party/python/pyflakes/pyflakes/test/test_undefined_names.py b/third_party/python/pyflakes/pyflakes/test/test_undefined_names.py
new file mode 100644
index 0000000000..c952cbb66e
--- /dev/null
+++ b/third_party/python/pyflakes/pyflakes/test/test_undefined_names.py
@@ -0,0 +1,854 @@
+import ast
+from sys import version_info
+
+from pyflakes import messages as m, checker
+from pyflakes.test.harness import TestCase, skipIf, skip
+
+
+class Test(TestCase):
+ def test_undefined(self):
+ self.flakes('bar', m.UndefinedName)
+
+ def test_definedInListComp(self):
+ self.flakes('[a for a in range(10) if a]')
+
+ @skipIf(version_info < (3,),
+ 'in Python 2 list comprehensions execute in the same scope')
+ def test_undefinedInListComp(self):
+ self.flakes('''
+ [a for a in range(10)]
+ a
+ ''',
+ m.UndefinedName)
+
+ @skipIf(version_info < (3,),
+ 'in Python 2 exception names stay bound after the except: block')
+ def test_undefinedExceptionName(self):
+ """Exception names can't be used after the except: block.
+
+ The exc variable is unused inside the exception handler."""
+ self.flakes('''
+ try:
+ raise ValueError('ve')
+ except ValueError as exc:
+ pass
+ exc
+ ''', m.UndefinedName, m.UnusedVariable)
+
+ def test_namesDeclaredInExceptBlocks(self):
+ """Locals declared in except: blocks can be used after the block.
+
+ This shows the example in test_undefinedExceptionName is
+ different."""
+ self.flakes('''
+ try:
+ raise ValueError('ve')
+ except ValueError as exc:
+ e = exc
+ e
+ ''')
+
+ @skip('error reporting disabled due to false positives below')
+ def test_undefinedExceptionNameObscuringLocalVariable(self):
+ """Exception names obscure locals, can't be used after.
+
+ Last line will raise UnboundLocalError on Python 3 after exiting
+ the except: block. Note next two examples for false positives to
+ watch out for."""
+ self.flakes('''
+ exc = 'Original value'
+ try:
+ raise ValueError('ve')
+ except ValueError as exc:
+ pass
+ exc
+ ''',
+ m.UndefinedName)
+
+ @skipIf(version_info < (3,),
+ 'in Python 2 exception names stay bound after the except: block')
+ def test_undefinedExceptionNameObscuringLocalVariable2(self):
+ """Exception names are unbound after the `except:` block.
+
+ Last line will raise UnboundLocalError on Python 3 but would print out
+ 've' on Python 2. The exc variable is unused inside the exception
+ handler."""
+ self.flakes('''
+ try:
+ raise ValueError('ve')
+ except ValueError as exc:
+ pass
+ print(exc)
+ exc = 'Original value'
+ ''', m.UndefinedName, m.UnusedVariable)
+
+ def test_undefinedExceptionNameObscuringLocalVariableFalsePositive1(self):
+ """Exception names obscure locals, can't be used after. Unless.
+
+ Last line will never raise UnboundLocalError because it's only
+ entered if no exception was raised."""
+ # The exc variable is unused inside the exception handler.
+ expected = [] if version_info < (3,) else [m.UnusedVariable]
+ self.flakes('''
+ exc = 'Original value'
+ try:
+ raise ValueError('ve')
+ except ValueError as exc:
+ print('exception logged')
+ raise
+ exc
+ ''', *expected)
+
+ def test_delExceptionInExcept(self):
+ """The exception name can be deleted in the except: block."""
+ self.flakes('''
+ try:
+ pass
+ except Exception as exc:
+ del exc
+ ''')
+
+ def test_undefinedExceptionNameObscuringLocalVariableFalsePositive2(self):
+ """Exception names obscure locals, can't be used after. Unless.
+
+ Last line will never raise UnboundLocalError because `error` is
+ only falsy if the `except:` block has not been entered."""
+ # The exc variable is unused inside the exception handler.
+ expected = [] if version_info < (3,) else [m.UnusedVariable]
+ self.flakes('''
+ exc = 'Original value'
+ error = None
+ try:
+ raise ValueError('ve')
+ except ValueError as exc:
+ error = 'exception logged'
+ if error:
+ print(error)
+ else:
+ exc
+ ''', *expected)
+
+ @skip('error reporting disabled due to false positives below')
+ def test_undefinedExceptionNameObscuringGlobalVariable(self):
+ """Exception names obscure globals, can't be used after.
+
+ Last line will raise UnboundLocalError on both Python 2 and
+ Python 3 because the existence of that exception name creates
+ a local scope placeholder for it, obscuring any globals, etc."""
+ self.flakes('''
+ exc = 'Original value'
+ def func():
+ try:
+ pass # nothing is raised
+ except ValueError as exc:
+ pass # block never entered, exc stays unbound
+ exc
+ ''',
+ m.UndefinedLocal)
+
+ @skip('error reporting disabled due to false positives below')
+ def test_undefinedExceptionNameObscuringGlobalVariable2(self):
+ """Exception names obscure globals, can't be used after.
+
+ Last line will raise NameError on Python 3 because the name is
+ locally unbound after the `except:` block, even if it's
+ nonlocal. We should issue an error in this case because code
+ only working correctly if an exception isn't raised, is invalid.
+ Unless it's explicitly silenced, see false positives below."""
+ self.flakes('''
+ exc = 'Original value'
+ def func():
+ global exc
+ try:
+ raise ValueError('ve')
+ except ValueError as exc:
+ pass # block never entered, exc stays unbound
+ exc
+ ''',
+ m.UndefinedLocal)
+
+ def test_undefinedExceptionNameObscuringGlobalVariableFalsePositive1(self):
+ """Exception names obscure globals, can't be used after. Unless.
+
+ Last line will never raise NameError because it's only entered
+ if no exception was raised."""
+ # The exc variable is unused inside the exception handler.
+ expected = [] if version_info < (3,) else [m.UnusedVariable]
+ self.flakes('''
+ exc = 'Original value'
+ def func():
+ global exc
+ try:
+ raise ValueError('ve')
+ except ValueError as exc:
+ print('exception logged')
+ raise
+ exc
+ ''', *expected)
+
+ def test_undefinedExceptionNameObscuringGlobalVariableFalsePositive2(self):
+ """Exception names obscure globals, can't be used after. Unless.
+
+ Last line will never raise NameError because `error` is only
+ falsy if the `except:` block has not been entered."""
+ # The exc variable is unused inside the exception handler.
+ expected = [] if version_info < (3,) else [m.UnusedVariable]
+ self.flakes('''
+ exc = 'Original value'
+ def func():
+ global exc
+ error = None
+ try:
+ raise ValueError('ve')
+ except ValueError as exc:
+ error = 'exception logged'
+ if error:
+ print(error)
+ else:
+ exc
+ ''', *expected)
+
+ def test_functionsNeedGlobalScope(self):
+ self.flakes('''
+ class a:
+ def b():
+ fu
+ fu = 1
+ ''')
+
+ def test_builtins(self):
+ self.flakes('range(10)')
+
+ def test_builtinWindowsError(self):
+ """
+ C{WindowsError} is sometimes a builtin name, so no warning is emitted
+ for using it.
+ """
+ self.flakes('WindowsError')
+
+ @skipIf(version_info < (3, 6), 'new feature in 3.6')
+ def test_moduleAnnotations(self):
+ """
+ Use of the C{__annotations__} in module scope should not emit
+ an undefined name warning when version is greater than or equal to 3.6.
+ """
+ self.flakes('__annotations__')
+
+ def test_magicGlobalsFile(self):
+ """
+ Use of the C{__file__} magic global should not emit an undefined name
+ warning.
+ """
+ self.flakes('__file__')
+
+ def test_magicGlobalsBuiltins(self):
+ """
+ Use of the C{__builtins__} magic global should not emit an undefined
+ name warning.
+ """
+ self.flakes('__builtins__')
+
+ def test_magicGlobalsName(self):
+ """
+ Use of the C{__name__} magic global should not emit an undefined name
+ warning.
+ """
+ self.flakes('__name__')
+
+ def test_magicGlobalsPath(self):
+ """
+ Use of the C{__path__} magic global should not emit an undefined name
+ warning, if you refer to it from a file called __init__.py.
+ """
+ self.flakes('__path__', m.UndefinedName)
+ self.flakes('__path__', filename='package/__init__.py')
+
+ def test_magicModuleInClassScope(self):
+ """
+ Use of the C{__module__} magic builtin should not emit an undefined
+ name warning if used in class scope.
+ """
+ self.flakes('__module__', m.UndefinedName)
+ self.flakes('''
+ class Foo:
+ __module__
+ ''')
+ self.flakes('''
+ class Foo:
+ def bar(self):
+ __module__
+ ''', m.UndefinedName)
+
+ def test_globalImportStar(self):
+ """Can't find undefined names with import *."""
+ self.flakes('from fu import *; bar',
+ m.ImportStarUsed, m.ImportStarUsage)
+
+ @skipIf(version_info >= (3,), 'obsolete syntax')
+ def test_localImportStar(self):
+ """
+ A local import * still allows undefined names to be found
+ in upper scopes.
+ """
+ self.flakes('''
+ def a():
+ from fu import *
+ bar
+ ''', m.ImportStarUsed, m.UndefinedName, m.UnusedImport)
+
+ @skipIf(version_info >= (3,), 'obsolete syntax')
+ def test_unpackedParameter(self):
+ """Unpacked function parameters create bindings."""
+ self.flakes('''
+ def a((bar, baz)):
+ bar; baz
+ ''')
+
+ def test_definedByGlobal(self):
+ """
+ "global" can make an otherwise undefined name in another function
+ defined.
+ """
+ self.flakes('''
+ def a(): global fu; fu = 1
+ def b(): fu
+ ''')
+ self.flakes('''
+ def c(): bar
+ def b(): global bar; bar = 1
+ ''')
+
+ def test_definedByGlobalMultipleNames(self):
+ """
+ "global" can accept multiple names.
+ """
+ self.flakes('''
+ def a(): global fu, bar; fu = 1; bar = 2
+ def b(): fu; bar
+ ''')
+
+ def test_globalInGlobalScope(self):
+ """
+ A global statement in the global scope is ignored.
+ """
+ self.flakes('''
+ global x
+ def foo():
+ print(x)
+ ''', m.UndefinedName)
+
+ def test_global_reset_name_only(self):
+ """A global statement does not prevent other names being undefined."""
+ # Only different undefined names are reported.
+ # See following test that fails where the same name is used.
+ self.flakes('''
+ def f1():
+ s
+
+ def f2():
+ global m
+ ''', m.UndefinedName)
+
+ @skip("todo")
+ def test_unused_global(self):
+ """An unused global statement does not define the name."""
+ self.flakes('''
+ def f1():
+ m
+
+ def f2():
+ global m
+ ''', m.UndefinedName)
+
+ def test_del(self):
+ """Del deletes bindings."""
+ self.flakes('a = 1; del a; a', m.UndefinedName)
+
+ def test_delGlobal(self):
+ """Del a global binding from a function."""
+ self.flakes('''
+ a = 1
+ def f():
+ global a
+ del a
+ a
+ ''')
+
+ def test_delUndefined(self):
+ """Del an undefined name."""
+ self.flakes('del a', m.UndefinedName)
+
+ def test_delConditional(self):
+ """
+ Ignores conditional bindings deletion.
+ """
+ self.flakes('''
+ context = None
+ test = True
+ if False:
+ del(test)
+ assert(test)
+ ''')
+
+ def test_delConditionalNested(self):
+ """
+ Ignored conditional bindings deletion even if they are nested in other
+ blocks.
+ """
+ self.flakes('''
+ context = None
+ test = True
+ if False:
+ with context():
+ del(test)
+ assert(test)
+ ''')
+
+ def test_delWhile(self):
+ """
+ Ignore bindings deletion if called inside the body of a while
+ statement.
+ """
+ self.flakes('''
+ def test():
+ foo = 'bar'
+ while False:
+ del foo
+ assert(foo)
+ ''')
+
+ def test_delWhileTestUsage(self):
+ """
+ Ignore bindings deletion if called inside the body of a while
+ statement and name is used inside while's test part.
+ """
+ self.flakes('''
+ def _worker():
+ o = True
+ while o is not True:
+ del o
+ o = False
+ ''')
+
+ def test_delWhileNested(self):
+ """
+ Ignore bindings deletions if node is part of while's test, even when
+ del is in a nested block.
+ """
+ self.flakes('''
+ context = None
+ def _worker():
+ o = True
+ while o is not True:
+ while True:
+ with context():
+ del o
+ o = False
+ ''')
+
+ def test_globalFromNestedScope(self):
+ """Global names are available from nested scopes."""
+ self.flakes('''
+ a = 1
+ def b():
+ def c():
+ a
+ ''')
+
+ def test_laterRedefinedGlobalFromNestedScope(self):
+ """
+ Test that referencing a local name that shadows a global, before it is
+ defined, generates a warning.
+ """
+ self.flakes('''
+ a = 1
+ def fun():
+ a
+ a = 2
+ return a
+ ''', m.UndefinedLocal)
+
+ def test_laterRedefinedGlobalFromNestedScope2(self):
+ """
+ Test that referencing a local name in a nested scope that shadows a
+ global declared in an enclosing scope, before it is defined, generates
+ a warning.
+ """
+ self.flakes('''
+ a = 1
+ def fun():
+ global a
+ def fun2():
+ a
+ a = 2
+ return a
+ ''', m.UndefinedLocal)
+
+ def test_intermediateClassScopeIgnored(self):
+ """
+ If a name defined in an enclosing scope is shadowed by a local variable
+ and the name is used locally before it is bound, an unbound local
+ warning is emitted, even if there is a class scope between the enclosing
+ scope and the local scope.
+ """
+ self.flakes('''
+ def f():
+ x = 1
+ class g:
+ def h(self):
+ a = x
+ x = None
+ print(x, a)
+ print(x)
+ ''', m.UndefinedLocal)
+
+ def test_doubleNestingReportsClosestName(self):
+ """
+ Test that referencing a local name in a nested scope that shadows a
+ variable declared in two different outer scopes before it is defined
+ in the innermost scope generates an UnboundLocal warning which
+ refers to the nearest shadowed name.
+ """
+ exc = self.flakes('''
+ def a():
+ x = 1
+ def b():
+ x = 2 # line 5
+ def c():
+ x
+ x = 3
+ return x
+ return x
+ return x
+ ''', m.UndefinedLocal).messages[0]
+
+ # _DoctestMixin.flakes adds two lines preceding the code above.
+ expected_line_num = 7 if self.withDoctest else 5
+
+ self.assertEqual(exc.message_args, ('x', expected_line_num))
+
+ def test_laterRedefinedGlobalFromNestedScope3(self):
+ """
+ Test that referencing a local name in a nested scope that shadows a
+ global, before it is defined, generates a warning.
+ """
+ self.flakes('''
+ def fun():
+ a = 1
+ def fun2():
+ a
+ a = 1
+ return a
+ return a
+ ''', m.UndefinedLocal)
+
+ def test_undefinedAugmentedAssignment(self):
+ self.flakes(
+ '''
+ def f(seq):
+ a = 0
+ seq[a] += 1
+ seq[b] /= 2
+ c[0] *= 2
+ a -= 3
+ d += 4
+ e[any] = 5
+ ''',
+ m.UndefinedName, # b
+ m.UndefinedName, # c
+ m.UndefinedName, m.UnusedVariable, # d
+ m.UndefinedName, # e
+ )
+
+ def test_nestedClass(self):
+ """Nested classes can access enclosing scope."""
+ self.flakes('''
+ def f(foo):
+ class C:
+ bar = foo
+ def f(self):
+ return foo
+ return C()
+
+ f(123).f()
+ ''')
+
+ def test_badNestedClass(self):
+ """Free variables in nested classes must bind at class creation."""
+ self.flakes('''
+ def f():
+ class C:
+ bar = foo
+ foo = 456
+ return foo
+ f()
+ ''', m.UndefinedName)
+
+ def test_definedAsStarArgs(self):
+ """Star and double-star arg names are defined."""
+ self.flakes('''
+ def f(a, *b, **c):
+ print(a, b, c)
+ ''')
+
+ @skipIf(version_info < (3,), 'new in Python 3')
+ def test_definedAsStarUnpack(self):
+ """Star names in unpack are defined."""
+ self.flakes('''
+ a, *b = range(10)
+ print(a, b)
+ ''')
+ self.flakes('''
+ *a, b = range(10)
+ print(a, b)
+ ''')
+ self.flakes('''
+ a, *b, c = range(10)
+ print(a, b, c)
+ ''')
+
+ @skipIf(version_info < (3,), 'new in Python 3')
+ def test_usedAsStarUnpack(self):
+ """
+ Star names in unpack are used if RHS is not a tuple/list literal.
+ """
+ self.flakes('''
+ def f():
+ a, *b = range(10)
+ ''')
+ self.flakes('''
+ def f():
+ (*a, b) = range(10)
+ ''')
+ self.flakes('''
+ def f():
+ [a, *b, c] = range(10)
+ ''')
+
+ @skipIf(version_info < (3,), 'new in Python 3')
+ def test_unusedAsStarUnpack(self):
+ """
+ Star names in unpack are unused if RHS is a tuple/list literal.
+ """
+ self.flakes('''
+ def f():
+ a, *b = any, all, 4, 2, 'un'
+ ''', m.UnusedVariable, m.UnusedVariable)
+ self.flakes('''
+ def f():
+ (*a, b) = [bool, int, float, complex]
+ ''', m.UnusedVariable, m.UnusedVariable)
+ self.flakes('''
+ def f():
+ [a, *b, c] = 9, 8, 7, 6, 5, 4
+ ''', m.UnusedVariable, m.UnusedVariable, m.UnusedVariable)
+
+ @skipIf(version_info < (3,), 'new in Python 3')
+ def test_keywordOnlyArgs(self):
+ """Keyword-only arg names are defined."""
+ self.flakes('''
+ def f(*, a, b=None):
+ print(a, b)
+ ''')
+
+ self.flakes('''
+ import default_b
+ def f(*, a, b=default_b):
+ print(a, b)
+ ''')
+
+ @skipIf(version_info < (3,), 'new in Python 3')
+ def test_keywordOnlyArgsUndefined(self):
+ """Typo in kwonly name."""
+ self.flakes('''
+ def f(*, a, b=default_c):
+ print(a, b)
+ ''', m.UndefinedName)
+
+ @skipIf(version_info < (3,), 'new in Python 3')
+ def test_annotationUndefined(self):
+ """Undefined annotations."""
+ self.flakes('''
+ from abc import note1, note2, note3, note4, note5
+ def func(a: note1, *args: note2,
+ b: note3=12, **kw: note4) -> note5: pass
+ ''')
+
+ self.flakes('''
+ def func():
+ d = e = 42
+ def func(a: {1, d}) -> (lambda c: e): pass
+ ''')
+
+ @skipIf(version_info < (3,), 'new in Python 3')
+ def test_metaClassUndefined(self):
+ self.flakes('''
+ from abc import ABCMeta
+ class A(metaclass=ABCMeta): pass
+ ''')
+
+ def test_definedInGenExp(self):
+ """
+ Using the loop variable of a generator expression results in no
+ warnings.
+ """
+ self.flakes('(a for a in [1, 2, 3] if a)')
+
+ self.flakes('(b for b in (a for a in [1, 2, 3] if a) if b)')
+
+ def test_undefinedInGenExpNested(self):
+ """
+ The loop variables of generator expressions nested together are
+ not defined in the other generator.
+ """
+ self.flakes('(b for b in (a for a in [1, 2, 3] if b) if b)',
+ m.UndefinedName)
+
+ self.flakes('(b for b in (a for a in [1, 2, 3] if a) if a)',
+ m.UndefinedName)
+
+ def test_undefinedWithErrorHandler(self):
+ """
+ Some compatibility code checks explicitly for NameError.
+ It should not trigger warnings.
+ """
+ self.flakes('''
+ try:
+ socket_map
+ except NameError:
+ socket_map = {}
+ ''')
+ self.flakes('''
+ try:
+ _memoryview.contiguous
+ except (NameError, AttributeError):
+ raise RuntimeError("Python >= 3.3 is required")
+ ''')
+ # If NameError is not explicitly handled, generate a warning
+ self.flakes('''
+ try:
+ socket_map
+ except:
+ socket_map = {}
+ ''', m.UndefinedName)
+ self.flakes('''
+ try:
+ socket_map
+ except Exception:
+ socket_map = {}
+ ''', m.UndefinedName)
+
+ def test_definedInClass(self):
+ """
+ Defined name for generator expressions and dict/set comprehension.
+ """
+ self.flakes('''
+ class A:
+ T = range(10)
+
+ Z = (x for x in T)
+ L = [x for x in T]
+ B = dict((i, str(i)) for i in T)
+ ''')
+
+ self.flakes('''
+ class A:
+ T = range(10)
+
+ X = {x for x in T}
+ Y = {x:x for x in T}
+ ''')
+
+ def test_definedInClassNested(self):
+ """Defined name for nested generator expressions in a class."""
+ self.flakes('''
+ class A:
+ T = range(10)
+
+ Z = (x for x in (a for a in T))
+ ''')
+
+ def test_undefinedInLoop(self):
+ """
+ The loop variable is defined after the expression is computed.
+ """
+ self.flakes('''
+ for i in range(i):
+ print(i)
+ ''', m.UndefinedName)
+ self.flakes('''
+ [42 for i in range(i)]
+ ''', m.UndefinedName)
+ self.flakes('''
+ (42 for i in range(i))
+ ''', m.UndefinedName)
+
+ def test_definedFromLambdaInDictionaryComprehension(self):
+ """
+ Defined name referenced from a lambda function within a dict/set
+ comprehension.
+ """
+ self.flakes('''
+ {lambda: id(x) for x in range(10)}
+ ''')
+
+ def test_definedFromLambdaInGenerator(self):
+ """
+ Defined name referenced from a lambda function within a generator
+ expression.
+ """
+ self.flakes('''
+ any(lambda: id(x) for x in range(10))
+ ''')
+
+ def test_undefinedFromLambdaInDictionaryComprehension(self):
+ """
+ Undefined name referenced from a lambda function within a dict/set
+ comprehension.
+ """
+ self.flakes('''
+ {lambda: id(y) for x in range(10)}
+ ''', m.UndefinedName)
+
+ def test_undefinedFromLambdaInComprehension(self):
+ """
+ Undefined name referenced from a lambda function within a generator
+ expression.
+ """
+ self.flakes('''
+ any(lambda: id(y) for x in range(10))
+ ''', m.UndefinedName)
+
+ def test_dunderClass(self):
+ """
+ `__class__` is defined in class scope under Python 3, but is not
+ in Python 2.
+ """
+ code = '''
+ class Test(object):
+ def __init__(self):
+ print(__class__.__name__)
+ self.x = 1
+
+ t = Test()
+ '''
+ if version_info < (3,):
+ self.flakes(code, m.UndefinedName)
+ else:
+ self.flakes(code)
+
+
+class NameTests(TestCase):
+ """
+ Tests for some extra cases of name handling.
+ """
+ def test_impossibleContext(self):
+ """
+ A Name node with an unrecognized context results in a RuntimeError being
+ raised.
+ """
+ tree = ast.parse("x = 10")
+ file_tokens = checker.make_tokens("x = 10")
+ # Make it into something unrecognizable.
+ tree.body[0].targets[0].ctx = object()
+ self.assertRaises(RuntimeError, checker.Checker, tree, file_tokens=file_tokens)
diff --git a/third_party/python/pyflakes/setup.cfg b/third_party/python/pyflakes/setup.cfg
new file mode 100644
index 0000000000..51b5f83b3d
--- /dev/null
+++ b/third_party/python/pyflakes/setup.cfg
@@ -0,0 +1,10 @@
+[bdist_wheel]
+universal = 1
+
+[metadata]
+license_file = LICENSE
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/pyflakes/setup.py b/third_party/python/pyflakes/setup.py
new file mode 100755
index 0000000000..5e2088ee6a
--- /dev/null
+++ b/third_party/python/pyflakes/setup.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+# Copyright 2005-2011 Divmod, Inc.
+# Copyright 2013 Florent Xicluna. See LICENSE file for details
+from __future__ import with_statement
+
+import os.path
+
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+ extra = {'scripts': ["bin/pyflakes"]}
+else:
+ extra = {
+ 'test_suite': 'pyflakes.test',
+ 'entry_points': {
+ 'console_scripts': ['pyflakes = pyflakes.api:main'],
+ },
+ }
+
+
+def get_version(fname=os.path.join('pyflakes', '__init__.py')):
+ with open(fname) as f:
+ for line in f:
+ if line.startswith('__version__'):
+ return eval(line.split('=')[-1])
+
+
+def get_long_description():
+ descr = []
+ for fname in ('README.rst',):
+ with open(fname) as f:
+ descr.append(f.read())
+ return '\n\n'.join(descr)
+
+
+setup(
+ name="pyflakes",
+ license="MIT",
+ version=get_version(),
+ description="passive checker of Python programs",
+ long_description=get_long_description(),
+ author="A lot of people",
+ author_email="code-quality@python.org",
+ url="https://github.com/PyCQA/pyflakes",
+ packages=["pyflakes", "pyflakes.scripts", "pyflakes.test"],
+ python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
+ classifiers=[
+ "Development Status :: 6 - Mature",
+ "Environment :: Console",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: MIT License",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.4",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Programming Language :: Python :: Implementation :: PyPy",
+ "Topic :: Software Development",
+ "Topic :: Utilities",
+ ],
+ **extra)
diff --git a/third_party/python/pylru/pylru.py b/third_party/python/pylru/pylru.py
new file mode 100644
index 0000000000..e69cadb76c
--- /dev/null
+++ b/third_party/python/pylru/pylru.py
@@ -0,0 +1,556 @@
+
+# Cache implementaion with a Least Recently Used (LRU) replacement policy and
+# a basic dictionary interface.
+
+# Copyright (C) 2006, 2009, 2010, 2011 Jay Hutchinson
+
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; either version 2 of the License, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+# more details.
+
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc., 51
+# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+
+
+# The cache is implemented using a combination of a python dictionary (hash
+# table) and a circular doubly linked list. Items in the cache are stored in
+# nodes. These nodes make up the linked list. The list is used to efficiently
+# maintain the order that the items have been used in. The front or head of
+# the list contains the most recently used item, the tail of the list
+# contains the least recently used item. When an item is used it can easily
+# (in a constant amount of time) be moved to the front of the list, thus
+# updating its position in the ordering. These nodes are also placed in the
+# hash table under their associated key. The hash table allows efficient
+# lookup of values by key.
+
+# Class for the node objects.
+class _dlnode(object):
+ def __init__(self):
+ self.empty = True
+
+
+class lrucache(object):
+
+ def __init__(self, size, callback=None):
+
+ self.callback = callback
+
+ # Create an empty hash table.
+ self.table = {}
+
+ # Initialize the doubly linked list with one empty node. This is an
+ # invariant. The cache size must always be greater than zero. Each
+ # node has a 'prev' and 'next' variable to hold the node that comes
+ # before it and after it respectively. Initially the two variables
+ # each point to the head node itself, creating a circular doubly
+ # linked list of size one. Then the size() method is used to adjust
+ # the list to the desired size.
+
+ self.head = _dlnode()
+ self.head.next = self.head
+ self.head.prev = self.head
+
+ self.listSize = 1
+
+ # Adjust the size
+ self.size(size)
+
+
+ def __len__(self):
+ return len(self.table)
+
+ def clear(self):
+ for node in self.dli():
+ node.empty = True
+ node.key = None
+ node.value = None
+
+ self.table.clear()
+
+
+ def __contains__(self, key):
+ return key in self.table
+
+ # Looks up a value in the cache without affecting cache order.
+ def peek(self, key):
+ # Look up the node
+ node = self.table[key]
+ return node.value
+
+
+ def __getitem__(self, key):
+ # Look up the node
+ node = self.table[key]
+
+ # Update the list ordering. Move this node so that is directly
+ # proceeds the head node. Then set the 'head' variable to it. This
+ # makes it the new head of the list.
+ self.mtf(node)
+ self.head = node
+
+ # Return the value.
+ return node.value
+
+ def get(self, key, default=None):
+ """Get an item - return default (None) if not present"""
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def __setitem__(self, key, value):
+ # First, see if any value is stored under 'key' in the cache already.
+ # If so we are going to replace that value with the new one.
+ if key in self.table:
+
+ # Lookup the node
+ node = self.table[key]
+
+ # Replace the value.
+ node.value = value
+
+ # Update the list ordering.
+ self.mtf(node)
+ self.head = node
+
+ return
+
+ # Ok, no value is currently stored under 'key' in the cache. We need
+ # to choose a node to place the new item in. There are two cases. If
+ # the cache is full some item will have to be pushed out of the
+ # cache. We want to choose the node with the least recently used
+ # item. This is the node at the tail of the list. If the cache is not
+ # full we want to choose a node that is empty. Because of the way the
+ # list is managed, the empty nodes are always together at the tail
+ # end of the list. Thus, in either case, by chooseing the node at the
+ # tail of the list our conditions are satisfied.
+
+ # Since the list is circular, the tail node directly preceeds the
+ # 'head' node.
+ node = self.head.prev
+
+ # If the node already contains something we need to remove the old
+ # key from the dictionary.
+ if not node.empty:
+ if self.callback is not None:
+ self.callback(node.key, node.value)
+ del self.table[node.key]
+
+ # Place the new key and value in the node
+ node.empty = False
+ node.key = key
+ node.value = value
+
+ # Add the node to the dictionary under the new key.
+ self.table[key] = node
+
+ # We need to move the node to the head of the list. The node is the
+ # tail node, so it directly preceeds the head node due to the list
+ # being circular. Therefore, the ordering is already correct, we just
+ # need to adjust the 'head' variable.
+ self.head = node
+
+
+ def __delitem__(self, key):
+
+ # Lookup the node, then remove it from the hash table.
+ node = self.table[key]
+ del self.table[key]
+
+ node.empty = True
+
+ # Not strictly necessary.
+ node.key = None
+ node.value = None
+
+ # Because this node is now empty we want to reuse it before any
+ # non-empty node. To do that we want to move it to the tail of the
+ # list. We move it so that it directly preceeds the 'head' node. This
+ # makes it the tail node. The 'head' is then adjusted. This
+ # adjustment ensures correctness even for the case where the 'node'
+ # is the 'head' node.
+ self.mtf(node)
+ self.head = node.next
+
+ def __iter__(self):
+
+ # Return an iterator that returns the keys in the cache in order from
+ # the most recently to least recently used. Does not modify the cache
+ # order.
+ for node in self.dli():
+ yield node.key
+
+ def items(self):
+
+ # Return an iterator that returns the (key, value) pairs in the cache
+ # in order from the most recently to least recently used. Does not
+ # modify the cache order.
+ for node in self.dli():
+ yield (node.key, node.value)
+
+ def keys(self):
+
+ # Return an iterator that returns the keys in the cache in order from
+ # the most recently to least recently used. Does not modify the cache
+ # order.
+ for node in self.dli():
+ yield node.key
+
+ def values(self):
+
+ # Return an iterator that returns the values in the cache in order
+ # from the most recently to least recently used. Does not modify the
+ # cache order.
+ for node in self.dli():
+ yield node.value
+
+ def size(self, size=None):
+
+ if size is not None:
+ assert size > 0
+ if size > self.listSize:
+ self.addTailNode(size - self.listSize)
+ elif size < self.listSize:
+ self.removeTailNode(self.listSize - size)
+
+ return self.listSize
+
+ # Increases the size of the cache by inserting n empty nodes at the tail
+ # of the list.
+ def addTailNode(self, n):
+ for i in range(n):
+ node = _dlnode()
+ node.next = self.head
+ node.prev = self.head.prev
+
+ self.head.prev.next = node
+ self.head.prev = node
+
+ self.listSize += n
+
+ # Decreases the size of the list by removing n nodes from the tail of the
+ # list.
+ def removeTailNode(self, n):
+ assert self.listSize > n
+ for i in range(n):
+ node = self.head.prev
+ if not node.empty:
+ if self.callback is not None:
+ self.callback(node.key, node.value)
+ del self.table[node.key]
+
+ # Splice the tail node out of the list
+ self.head.prev = node.prev
+ node.prev.next = self.head
+
+ # The next four lines are not strictly necessary.
+ node.prev = None
+ node.next = None
+
+ node.key = None
+ node.value = None
+
+ self.listSize -= n
+
+
+ # This method adjusts the ordering of the doubly linked list so that
+ # 'node' directly precedes the 'head' node. Because of the order of
+ # operations, if 'node' already directly precedes the 'head' node or if
+ # 'node' is the 'head' node the order of the list will be unchanged.
+ def mtf(self, node):
+ node.prev.next = node.next
+ node.next.prev = node.prev
+
+ node.prev = self.head.prev
+ node.next = self.head.prev.next
+
+ node.next.prev = node
+ node.prev.next = node
+
+ # This method returns an iterator that iterates over the non-empty nodes
+ # in the doubly linked list in order from the most recently to the least
+ # recently used.
+ def dli(self):
+ node = self.head
+ for i in range(len(self.table)):
+ yield node
+ node = node.next
+
+
+
+
+class WriteThroughCacheManager(object):
+ def __init__(self, store, size):
+ self.store = store
+ self.cache = lrucache(size)
+
+ def __len__(self):
+ return len(self.store)
+
+ # Returns/sets the size of the managed cache.
+ def size(self, size=None):
+ return self.cache.size(size)
+
+ def clear(self):
+ self.cache.clear()
+ self.store.clear()
+
+ def __contains__(self, key):
+ # Check the cache first. If it is there we can return quickly.
+ if key in self.cache:
+ return True
+
+ # Not in the cache. Might be in the underlying store.
+ if key in self.store:
+ return True
+
+ return False
+
+ def __getitem__(self, key):
+ # First we try the cache. If successful we just return the value. If
+ # not we catch KeyError and ignore it since that just means the key
+ # was not in the cache.
+ try:
+ return self.cache[key]
+ except KeyError:
+ pass
+
+ # It wasn't in the cache. Look it up in the store, add the entry to
+ # the cache, and return the value.
+ value = self.store[key]
+ self.cache[key] = value
+ return value
+
+ def get(self, key, default=None):
+ """Get an item - return default (None) if not present"""
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def __setitem__(self, key, value):
+ # Add the key/value pair to the cache and store.
+ self.cache[key] = value
+ self.store[key] = value
+
+ def __delitem__(self, key):
+ # Write-through behavior cache and store should be consistent. Delete
+ # it from the store.
+ del self.store[key]
+ try:
+ # Ok, delete from the store was successful. It might also be in
+ # the cache, try and delete it. If not we catch the KeyError and
+ # ignore it.
+ del self.cache[key]
+ except KeyError:
+ pass
+
+ def __iter__(self):
+ return self.keys()
+
+ def keys(self):
+ return self.store.keys()
+
+ def values(self):
+ return self.store.values()
+
+ def items(self):
+ return self.store.items()
+
+
+
+class WriteBackCacheManager(object):
+ def __init__(self, store, size):
+ self.store = store
+
+ # Create a set to hold the dirty keys.
+ self.dirty = set()
+
+ # Define a callback function to be called by the cache when a
+ # key/value pair is about to be ejected. This callback will check to
+ # see if the key is in the dirty set. If so, then it will update the
+ # store object and remove the key from the dirty set.
+ def callback(key, value):
+ if key in self.dirty:
+ self.store[key] = value
+ self.dirty.remove(key)
+
+ # Create a cache and give it the callback function.
+ self.cache = lrucache(size, callback)
+
+ # Returns/sets the size of the managed cache.
+ def size(self, size=None):
+ return self.cache.size(size)
+
+ def clear(self):
+ self.cache.clear()
+ self.dirty.clear()
+ self.store.clear()
+
+ def __contains__(self, key):
+ # Check the cache first, since if it is there we can return quickly.
+ if key in self.cache:
+ return True
+
+ # Not in the cache. Might be in the underlying store.
+ if key in self.store:
+ return True
+
+ return False
+
+ def __getitem__(self, key):
+ # First we try the cache. If successful we just return the value. If
+ # not we catch KeyError and ignore it since that just means the key
+ # was not in the cache.
+ try:
+ return self.cache[key]
+ except KeyError:
+ pass
+
+ # It wasn't in the cache. Look it up in the store, add the entry to
+ # the cache, and return the value.
+ value = self.store[key]
+ self.cache[key] = value
+ return value
+
+ def get(self, key, default=None):
+ """Get an item - return default (None) if not present"""
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def __setitem__(self, key, value):
+ # Add the key/value pair to the cache.
+ self.cache[key] = value
+ self.dirty.add(key)
+
+ def __delitem__(self, key):
+
+ found = False
+ try:
+ del self.cache[key]
+ found = True
+ self.dirty.remove(key)
+ except KeyError:
+ pass
+
+ try:
+ del self.store[key]
+ found = True
+ except KeyError:
+ pass
+
+ if not found: # If not found in cache or store, raise error.
+ raise KeyError
+
+
+ def __iter__(self):
+ return self.keys()
+
+ def keys(self):
+ for key in self.store.keys():
+ if key not in self.dirty:
+ yield key
+
+ for key in self.dirty:
+ yield key
+
+
+ def values(self):
+ for key, value in self.items():
+ yield value
+
+
+ def items(self):
+ for key, value in self.store.items():
+ if key not in self.dirty:
+ yield (key, value)
+
+ for key in self.dirty:
+ value = self.cache.peek(key)
+ yield (key, value)
+
+
+
+ def sync(self):
+ # For each dirty key, peek at its value in the cache and update the
+ # store. Doesn't change the cache's order.
+ for key in self.dirty:
+ self.store[key] = self.cache.peek(key)
+ # There are no dirty keys now.
+ self.dirty.clear()
+
+ def flush(self):
+ self.sync()
+ self.cache.clear()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.sync()
+ return False
+
+
+class FunctionCacheManager(object):
+ def __init__(self, func, size):
+ self.func = func
+ self.cache = lrucache(size)
+
+ def size(self, size=None):
+ return self.cache.size(size)
+
+ def clear(self):
+ self.cache.clear()
+
+ def __call__(self, *args, **kwargs):
+ kwtuple = tuple((key, kwargs[key]) for key in sorted(kwargs.keys()))
+ key = (args, kwtuple)
+ try:
+ return self.cache[key]
+ except KeyError:
+ pass
+
+ value = self.func(*args, **kwargs)
+ self.cache[key] = value
+ return value
+
+
+def lruwrap(store, size, writeback=False):
+ if writeback:
+ return WriteBackCacheManager(store, size)
+ else:
+ return WriteThroughCacheManager(store, size)
+
+import functools
+
+class lrudecorator(object):
+ def __init__(self, size):
+ self.cache = lrucache(size)
+
+ def __call__(self, func):
+ def wrapper(*args, **kwargs):
+ kwtuple = tuple((key, kwargs[key]) for key in sorted(kwargs.keys()))
+ key = (args, kwtuple)
+ try:
+ return self.cache[key]
+ except KeyError:
+ pass
+
+ value = func(*args, **kwargs)
+ self.cache[key] = value
+ return value
+
+ wrapper.cache = self.cache
+ wrapper.size = self.cache.size
+ wrapper.clear = self.cache.clear
+ return functools.update_wrapper(wrapper, func)
diff --git a/third_party/python/pylru/test.py b/third_party/python/pylru/test.py
new file mode 100644
index 0000000000..7a4842fb52
--- /dev/null
+++ b/third_party/python/pylru/test.py
@@ -0,0 +1,238 @@
+
+from pylru import *
+import random
+
+# This tests PyLRU by fuzzing it with random operations, then checking the
+# results against another, simpler, LRU cache implementation.
+
+class simplelrucache:
+
+ def __init__(self, size):
+
+ # Initialize the cache as empty.
+ self.cache = []
+ self.size = size
+
+ def __contains__(self, key):
+
+ for x in self.cache:
+ if x[0] == key:
+ return True
+
+ return False
+
+
+ def __getitem__(self, key):
+
+ for i in range(len(self.cache)):
+ x = self.cache[i]
+ if x[0] == key:
+ del self.cache[i]
+ self.cache.append(x)
+ return x[1]
+
+ raise KeyError
+
+
+ def __setitem__(self, key, value):
+
+ for i in range(len(self.cache)):
+ x = self.cache[i]
+ if x[0] == key:
+ x[1] = value
+ del self.cache[i]
+ self.cache.append(x)
+ return
+
+ if len(self.cache) == self.size:
+ self.cache = self.cache[1:]
+
+ self.cache.append([key, value])
+
+
+ def __delitem__(self, key):
+
+ for i in range(len(self.cache)):
+ if self.cache[i][0] == key:
+ del self.cache[i]
+ return
+
+ raise KeyError
+
+ def resize(self, x=None):
+ assert x > 0
+ self.size = x
+ if x < len(self.cache):
+ del self.cache[:len(self.cache) - x]
+
+
+def test(a, b, c, d, verify):
+
+ for i in range(1000):
+ x = random.randint(0, 512)
+ y = random.randint(0, 512)
+
+ a[x] = y
+ b[x] = y
+ verify(c, d)
+
+ for i in range(1000):
+ x = random.randint(0, 512)
+ if x in a:
+ assert x in b
+ z = a[x]
+ z += b[x]
+ else:
+ assert x not in b
+ verify(c, d)
+
+ for i in range(256):
+ x = random.randint(0, 512)
+ if x in a:
+ assert x in b
+ del a[x]
+ del b[x]
+ else:
+ assert x not in b
+ verify(c, d)
+
+
+def testcache():
+ def verify(a, b):
+ q = []
+ z = a.head
+ for j in range(len(a.table)):
+ q.append([z.key, z.value])
+ z = z.next
+
+ assert q == b.cache[::-1]
+
+ q2 = []
+ for x, y in q:
+ q2.append((x, y))
+
+ assert list(a.items()) == q2
+ assert list(zip(a.keys(), a.values())) == q2
+ assert list(a.keys()) == list(a)
+
+
+ a = lrucache(128)
+ b = simplelrucache(128)
+ verify(a, b)
+ test(a, b, a, b, verify)
+
+ a.size(71)
+ b.resize(71)
+ verify(a, b)
+ test(a, b, a, b, verify)
+
+ a.size(341)
+ b.resize(341)
+ verify(a, b)
+ test(a, b, a, b, verify)
+
+ a.size(127)
+ b.resize(127)
+ verify(a, b)
+ test(a, b, a, b, verify)
+
+
+def wraptest():
+
+ def verify(p, x):
+ assert p == x.store
+ for key, value in x.cache.items():
+ assert x.store[key] == value
+
+ tmp = list(x.items())
+ tmp.sort()
+
+ tmp2 = list(p.items())
+ tmp2.sort()
+
+ assert tmp == tmp2
+
+ p = dict()
+ q = dict()
+ x = lruwrap(q, 128)
+
+ test(p, x, p, x, verify)
+
+
+
+def wraptest2():
+
+ def verify(p, x):
+ for key, value in x.store.items():
+ if key not in x.dirty:
+ assert p[key] == value
+
+ for key in x.dirty:
+ assert x.cache.peek(key) == p[key]
+
+ for key, value in x.cache.items():
+ if key not in x.dirty:
+ assert x.store[key] == p[key] == value
+
+ tmp = list(x.items())
+ tmp.sort()
+
+ tmp2 = list(p.items())
+ tmp2.sort()
+
+ assert tmp == tmp2
+
+ p = dict()
+ q = dict()
+ x = lruwrap(q, 128, True)
+
+ test(p, x, p, x, verify)
+
+ x.sync()
+ assert p == q
+
+def wraptest3():
+
+ def verify(p, x):
+ for key, value in x.store.items():
+ if key not in x.dirty:
+ assert p[key] == value
+
+ for key in x.dirty:
+ assert x.cache.peek(key) == p[key]
+
+ for key, value in x.cache.items():
+ if key not in x.dirty:
+ assert x.store[key] == p[key] == value
+
+ p = dict()
+ q = dict()
+ with lruwrap(q, 128, True) as x:
+ test(p, x, p, x, verify)
+
+ assert p == q
+
+
+@lrudecorator(100)
+def square(x):
+ return x*x
+
+def testDecorator():
+ for i in range(1000):
+ x = random.randint(0, 200)
+ assert square(x) == x*x
+
+
+if __name__ == '__main__':
+
+ random.seed()
+
+
+ for i in range(20):
+ testcache()
+ wraptest()
+ wraptest2()
+ wraptest3()
+ testDecorator()
+
+
diff --git a/third_party/python/pyrsistent/CHANGES.txt b/third_party/python/pyrsistent/CHANGES.txt
new file mode 100644
index 0000000000..603b3f2048
--- /dev/null
+++ b/third_party/python/pyrsistent/CHANGES.txt
@@ -0,0 +1,333 @@
+Revision history
+----------------
+0.16.0, 2020-03-24
+ * No major updates but Python 2 support no longer guaranteed.
+ * Fix #192, 'ignore_extra' for 'pvector_field'. Thanks @ss18 for this!
+ * Fix #191, include LICENCE in distribution. Thanks @johnthagen for this!
+ * Fix #190, minor MyPy errors. Thanks @Qhesz for this!
+
+0.15.7, 2020-01-07
+ * NOTE! This is the last version of Pyrsistent that officially supports Python 2.X!
+ * Fix #186, type errors with more recent versions of MyPy. Thanks @qhesz for this!
+ * Build and test on ARM during CI. Thanks @ossdev07 for this!
+ * Set absolute imports for python2 compatibility. Thanks @michalvi for this!
+
+0.15.6, 2019-11-23
+ * Fix #182 moduleinit name clash.
+
+0.15.5, 2019-10-27
+ * Fix #179 Fixed 'ignore_extra' factory parameter for pvector. Thanks @ss18 for this!
+
+0.15.4, 2019-07-27
+ * Fix #174, fix a GC traversal bug in pvector evolver C extension. Thanks @till-varoquaux for finding and fixing this!
+ * Fix #175, pytest 5 compatibility, this is a quick fix, some more work is needed to get coverage working etc.
+
+0.15.3, 2019-07-07
+ * Fix #172, catch all exceptions during extension build to reduce chance of corner cases that prevents installation.
+ * Fix #171, in PVector equality comparison don's assume that other object has a length, check before calling len.
+ * Fix #168, write warning about failing build of C extension directly to stderr to avoid that pip silences it.
+ * Fix #155, update PMapEvolver type stub to better reflect implementation.
+
+0.15.2, 2019-05-12
+ * Fix #166, Propagate 'ignore_extra' param in hierarchy. Thanks @ss18 for this!
+ * Fix #167, thaw typing. Thanks @nattofriends for this!
+ * Fix #154, not possible to insert empty pmap as leaf node with transform.
+
+0.15.1, 2019-04-26
+ * Fix #163 installation broken on Python 2 because of fix of #161, thanks @vphilippon for this! Sorry for the
+ inconvenience.
+
+0.15.0, 2019-04-25
+ * Python 3.4 is no longer officially supported since it is EOL since 2019-03-18.
+ * Fix #157, major improvements to type hints. Thanks @je-l for working on this and @nattofriend for reviewing the PR!
+ * Fix #161, installation fails on some Windows platforms because fallback to Python pvector does not work.
+ Thanks @MaxTaggart for fixing and verifying this!
+
+0.14.11, 2019-02-21
+ * Fix #152 Don't use __builtin_popcount, this hopefully fixes #147 Error in pvectorc.cp37-win_amd64.pyd file, as well.
+ Thanks @benrg for this!
+ * Fix #151 Fix compatibility for hypothesis 4. Thanks @felixonmars for this!
+
+0.14.10, 2019-02-09
+ * Fix #148, only require pytest-runner if running tests. Thanks @ccorbacho for this!
+
+0.14.9, 2019-01-06
+ * Fix #144, Compile pvectormodule.c on windows. Thanks @ganwell for this!
+
+0.14.8, 2018-12-19
+ * Fix #142, Improve type stubs. Thanks @arxanas for this!
+
+0.14.7, 2018-11-20
+ * Fix #102, add PEP 561 type annotation stubs for most pyrsistent types. Thanks @nattofriends for this!
+
+0.14.6, 2018-11-17
+ * Fix #135, Type classes for Python 3 type annotations of pyrsistent types. Thanks @nattofriends for this!
+ * Fix #128, Allow PClass and PRecord to ignore input parameters to constructor that are not part of the spec
+ instead of blowing up with a type error. Thanks @agberk for this!
+
+0.14.5, 2018-10-14
+ * Fix #137, deprecation warnings in Python 3.7. Thanks @thombashi for this!
+ * Fix #129, building via setuptools and setup.py. Thanks @galuszkak for this!
+
+0.14.4, 2018-07-08
+ * Fix #133, minor Python 3.7 compatibility issue. Pyrsistent is now officially Python 3.7 compliant!
+
+v0.14.3, 2018-06-11
+ * Fix #123 regression where type names break sequence fields. Thanks @doozr for this!
+ * Fix #124 using the class name to make AttributeError on __getattr__ more informative for PRecords.
+ Thanks @neilvyas for this!
+ * Fix #125 how fields handle type arguments. Thanks @neilvyas for this!
+
+v0.14.2, 2017-12-06
+ * Fix #121, regression in PClass.set() introduced in 0.14.1.
+
+v0.14.1, 2017-11-27
+ * Equality check performance improvements for pvectors and pmaps. Thanks @dtomas for this!
+ * Avoid calling factories multiple times for fields that do not change, see PR #120 for for
+ details. Thanks @teepark for this!
+
+v0.14.0, 2017-10-08
+ * Fix #117, pmap now accepts iterators as input to constructor. Thanks @Julian for this!
+ * Drop support for Python 2.6. Nothing has been done in this release that will explicitly
+ break pyrsistent for 2.6 but it will not be considered moving forward. Dropping 2.6
+ support is the reason for stepping the second decimal instead of the third.
+
+v0.13.0, 2017-09-01
+ * Fix #113, Skip field factories when loading pickled objects. There is a
+ minor backwards incompatibilty in the behaviour because of this. Thanks
+ @teepark for fi this!
+ * Fix #116, negative indexing for pdeques. Thanks @Julian for this!
+
+v0.12.3, 2017-06-04
+ * Fix #83, make it possible to use Python 3 enums as field type without having to wrap it in
+ a list or tuple. Thanks @douglas-treadwell for this!
+
+v0.12.2, 2017-05-30
+ * Fix #108, now possible to use the values in predicates to transform. Thanks @exarkus for this!
+ * Fix #107, support multiple level of __invariant__ inheritance. Thanks @exarkus for this!
+
+v0.12.1, 2017-02-26
+ * Fix #97, initialize CheckedPVector from iterator-
+ * Fix #97, cache hash value on PMap. Thanks @sarum90 for this!
+
+v0.12.0, 2017-01-06
+ * Fix #87, add function get_in() for access to elements in deeply nested structures.
+ * Fix #91, add method update() to pset and pbag.
+ * Fix #92, incorrect discard of elements in transform on pvector
+ * This is a release candidate for 1.0 as I now consider pyrsistent fairly stable.
+
+v0.11.13, 2016-04-03
+ * Fix #84, pvector segfault in CPython 3 when repr of contained object raises Exception.
+ * Update README to cover for issue described in #83.
+
+v0.11.12, 2016-02-06
+ * Minor modifications of tests to allow testing as requested in #79 and #80.
+ * Also run CI tests under python 3.5
+
+v0.11.11, 2016-01-31
+ * #78, include tests in pypi dist.
+
+v0.11.10, 2015-12-27, NOTE! This release contains a backwards incompatible change
+ despite only stepping the patch version number. See below.
+ * Implement #74, attribute access on PClass evolver
+ * Implement #75, lazily evaluated invariant messages by providing a
+ callable with no arguments.
+ * Initial values on fields can now be evaluated on object creation
+ by providing a callable with no arguments.
+
+ NOTE! If you previously had callables as initial values this change means that those
+ will be called upon object creation which may not be what you want. As
+ a temporary workaround a callable returning a callable can be used. This
+ feature and the concept of initial values will likely change slightly in the future.
+ See #77 and and #76 for more information.
+
+v0.11.9, 2015-11-01
+ * Added PVector.remove(), thanks @radix for initiating this!
+
+v0.11.8, 2015-10-18
+ * Fix #66, UnicodeDecodeError when doing pip install in environments with ascii encoding as default.
+ Thanks @foolswood!
+ * Implement support for multiple types in pmap_field(), pvector_field() and pset_field(). Thanks @itamarst!
+
+v0.11.7, 2015-10-03
+ * Fix #52, occasional SEGFAULTs due to misplaced call to PyObject_GC_Track. Thanks @jkbjh for this!
+ * Fix #42, complete support for delete. Now also on the C-implementation of the PVectorEvolver.
+ Thanks @itamarst for contributing a whole bunch of Hypothesis test cases covering the evolver operations!
+
+v0.11.6, 2015-09-30
+ * Add +, -, & and | operations to PBag. Thanks @Futrell for this!
+
+v0.11.5, 2015-09-29
+ * Fix bug introduced in 0.11.4 that prevented multi level inheritance from PClass.
+ * Make PClassMeta public for friendlier subclassing
+
+v0.11.4, 2015-09-28
+ * Fix #59, make it possible to create weakrefs to all collection types.
+ Thanks @itamarst for reporting it.
+ * Fix #58, add __str__ to InvariantException. Thanks @tomprince for reporting it.
+
+v0.11.3, 2015-09-15
+ * Fix #57, support pickling of PClasses and PRecords using pmap_field, pvector_field, and pset_field.
+ Thanks @radix for reporting this and submitting a fix for it!
+
+v0.11.2, 2015-09-09
+ * Fix bug causing potential element loss when reallocating PMap. Thanks to @jml for finding
+ this and submitting a PR with a fix!
+ * Removed python 3.2 test build from Travis. There is nothing breaking 3.2 compatibility in this
+ release but there will be no effort moving forward to keep the 3.2 compatibility.
+
+v0.11.1, 2015-08-24
+ * Fix #51, PClass.set() broken when used with string+value argument.
+ * #50, make it possible to specify more than one assertion in an invariant
+ * #48, make it possible to make recursive type references by using a string
+ as type specification.
+
+v0.11.0, 2015-07-11
+ * #42, delete() function added to PVector to allow deletion of elements by index
+ and range. Will perform a full copy of the vector, no structural sharing.
+ Thanks @radix for helping out with this one!
+ * Fix #39, explicitly disallow ordering for PMap and PBag, Python 3 style
+ * Fix #37, PMap.values()/keys()/items() now returns PVectors instead of lists
+
+v0.10.3, 2015-06-13
+ * Fix #40, make it possible to disable the C extension by setting the
+ PYRSISTENT_NO_C_EXTENSION environment variable.
+
+v0.10.2, 2015-06-07
+ * Fix #38, construction from serialized object for pvector/pset/pmap fields.
+
+v0.10.1, 2015-04-27
+ * Fix broken README.rst
+
+v10.0.0, 2015-04-27
+ * New type PClass, a persistent version of a Python object. Related to issues #30 and #32.
+ Thanks @exarkun and @radix for input on this one!
+ * Rename PRecordTypeError -> PTypeError, it is now also raised by PClass
+ * New convenience functions, pvector_field, pmap_field and pset_field to create PRecord/PClass
+ fields for checked collections. Issues #26 and #36. Thanks to @itamarst for this!
+ * Removed deprecated function set_in() on PMap and PVector.
+ * Removed deprecated factory function pclass.
+ * Major internal restructuring breaking pyrsistent.py into multiple files. This should
+ not affect those only using the public interface but if you experience problems please
+ let me know.
+
+v0.9.4, 2015-04-20
+ * Fix #34, PVector now compares against built in list type
+
+v0.9.3, 2015-04-06
+ * Rename pclass back to immutable and deprecate the usage of the pclass function. PClass will be used by
+ a new, different type in upcoming releases.
+ * Documentation strings for the exceptions introduced in 0.9.2.
+
+v0.9.2, 2015-04-03
+ * More informative type errors from checked types, issue #30
+ * Support multiple optional types, issue #28
+
+v0.9.1, 2015-02-25
+ * Multi level serialization for checked types
+
+v0.9.0, 2015-02-25, Lots of new stuff in this release!
+ * Checked types, checked versions of PVector, PMap, PSet that support type and invariant specification.
+ Currently lacking proper documentation but I'm working on it.
+ * set_in() on PVector and PMap are now deprecated and will be removed in the next release.
+ Use transform() instead. set_in() has been updated to use transform() for this release
+ this means that some corner error cases behave slightly different than before.
+ * Refactoring of the PVector to unify the type. Should not have any user impact as long as
+ only the public interface of pyrsistent has been used. PVector is now an abstract base class
+ with which the different implementations are registered.
+ * Evolvers have been updated to return themselves for evolving operations to allow function chaining.
+ * Richer exception messages for KeyErrors and IndexErrors specifying the key/index that caused the failure.
+ Thanks @radix for this.
+ * Missing attribute on PMaps when accessing with dot-notation now raises an AttributeError instead of a
+ KeyError. Issue #21.
+ * New function decorator @mutant that freezes all input arguments to a function and the return value.
+ * Add __version__ to pyrsistent.py. Issue #23.
+ * Fix pickling for pset. Issue #24.
+
+v0.8.0, 2015-01-21
+ * New type PRecord. Subtype of PMap that allows explicit, declarative field specification. Thanks @boxed
+ for inspiration!
+ * Efficient transformations of arbitrary complexity on PMap and PVector. Thanks @boxed for inspiration!
+ * Breaking change to the evolver interface. What used to be .pvector(), .pmap() and .pset()
+ on the different evolvers has now been unified so that all evolvers have one method .persistent()
+ to produce the persistent counterpart. Sorry for any inconvenience.
+ * Removed the tests directory from the package.
+ * PMap and PSet now contains a copy-function to closer mimic the interface of the dict and set. These
+ functions will simply return a reference to self.
+ * Removed deprecated alias 'immutable' from pclass.
+
+v0.7.1, 2015-01-17
+ * Fixes #14 where a file executed (unexpectedly) during installation was not python 3 compatible.
+
+v0.7.0, 2015-01-04, No 1.0, instead a bunch of new stuff and one API breaking change to PMap.remove().
+ * Evolvers for pvector, pmap and pset to allow simple and efficient updates of multiple elements
+ in the collection. See the documentation for a closer description.
+ * New method mset on pvector to update multiple values in one operation
+ * Remove deprecated methods merge and merge_with on PMap
+ * Change behavior of PMap.remove, it will now raise a KeyError if the element is not present.
+ New method PMap.discard will instead return the original pmap if the element is not present.
+ This aligns the PMap with how things are done in the PSet and is closer to the behavior of the
+ built in counterparts.
+
+v0.6.3, 2014-11-27
+ * Python 2.6 support, thanks @wrmsr!
+ * PMap.merge/merge_with renamed to update/update_with. merge/merge_with remains but will be
+ removed for 1.0.
+ * This is a release candidate for 1.0! Please be aware that PMap.merge/merge_with and immutable()
+ will be removed for 1.0.
+
+v0.6.2, 2014-11-03
+ * Fix typo causing the pure python vector to be used even if the C implementation was
+ available. Thanks @zerc for finding it!
+
+v0.6.1, 2014-10-31
+ * Renamed 'immutable' to 'pclass' for consistency but left immutable for compatibility.
+
+v0.6.0, 2014-10-25
+ * New data structure, persistent linked list
+ * New data structure, persistent double ended queue
+
+v0.5.0, 2014-09-24
+ * New data structure, persistent bag / multiset
+ * New functions freeze and thaw to recursively convert between python
+ built in data types and corresponding pyrsistent data types.
+ * All data structures can now be pickled
+ * New function merge_in on persistent map which allows a user
+ supplied function to implement the merge strategy.
+
+v0.4.0, 2014-09-20
+ * Full Python 3 support.
+ * Immutable object implemented.
+ * Bug fixes in PVector.__repr__() and PMap.__hash__() and index check of PVector.
+ * Repr changed to be fully cut and paste compatible
+ * Changed assoc() -> set(), assoc_in() -> set_in(), massoc() -> mset().
+ Sorry for the API breaking change but I think those names are more pythonic.
+ * Improved documentation.
+
+v0.3.1, 2014-06-29
+ * assoc() on PSet renamed back to add()
+
+v0.3.0, 2014-06-28
+ * Full Sequence protocol support for PVector
+ * Full Mapping protocol support for PMap
+ * Full Set protocol support for PSet
+ * assoc_in() support for both PMap and PVector
+ * merge() support for PMap
+ * Performance improvements to the PVector C extension speed up allocation
+
+v0.2.1, 2014-06-21
+ * Supply the tests with the distribution
+
+v0.2.0, 2014-06-21
+ * New C extension with an optimized version of the persistent vector
+ * Updated API slightly
+
+v0.1.0, 2013-11-10
+ * Initial release.
+
+
+TODO (in no particular order)
+-----------------------------
+- Versioned data structure where the different versions can be accessed by index?
+- Ordered sets and maps
+- A good performance measurement suite
diff --git a/third_party/python/pyrsistent/LICENCE.mit b/third_party/python/pyrsistent/LICENCE.mit
new file mode 100644
index 0000000000..6609e4c05a
--- /dev/null
+++ b/third_party/python/pyrsistent/LICENCE.mit
@@ -0,0 +1,22 @@
+Copyright (c) 2019 Tobias Gustafsson
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file
diff --git a/third_party/python/pyrsistent/MANIFEST.in b/third_party/python/pyrsistent/MANIFEST.in
new file mode 100644
index 0000000000..155c6351bb
--- /dev/null
+++ b/third_party/python/pyrsistent/MANIFEST.in
@@ -0,0 +1,5 @@
+include *.rst
+include tests/*_test.py
+include tests/memory_profiling.py
+include CHANGES.txt
+include LICENCE.mit \ No newline at end of file
diff --git a/third_party/python/pyrsistent/PKG-INFO b/third_party/python/pyrsistent/PKG-INFO
new file mode 100644
index 0000000000..1d1c159034
--- /dev/null
+++ b/third_party/python/pyrsistent/PKG-INFO
@@ -0,0 +1,742 @@
+Metadata-Version: 1.1
+Name: pyrsistent
+Version: 0.16.0
+Summary: Persistent/Functional/Immutable data structures
+Home-page: http://github.com/tobgu/pyrsistent/
+Author: Tobias Gustafsson
+Author-email: tobias.l.gustafsson@gmail.com
+License: MIT
+Description: Pyrsistent
+ ==========
+ .. image:: https://travis-ci.org/tobgu/pyrsistent.png?branch=master
+ :target: https://travis-ci.org/tobgu/pyrsistent
+
+ .. image:: https://badge.fury.io/py/pyrsistent.svg
+ :target: https://badge.fury.io/py/pyrsistent
+
+ .. image:: https://coveralls.io/repos/tobgu/pyrsistent/badge.svg?branch=master&service=github
+ :target: https://coveralls.io/github/tobgu/pyrsistent?branch=master
+
+
+ .. _Pyrthon: https://www.github.com/tobgu/pyrthon/
+
+ Pyrsistent is a number of persistent collections (by some referred to as functional data structures). Persistent in
+ the sense that they are immutable.
+
+ All methods on a data structure that would normally mutate it instead return a new copy of the structure containing the
+ requested updates. The original structure is left untouched.
+
+ This will simplify the reasoning about what a program does since no hidden side effects ever can take place to these
+ data structures. You can rest assured that the object you hold a reference to will remain the same throughout its
+ lifetime and need not worry that somewhere five stack levels below you in the darkest corner of your application
+ someone has decided to remove that element that you expected to be there.
+
+ Pyrsistent is influenced by persistent data structures such as those found in the standard library of Clojure. The
+ data structures are designed to share common elements through path copying.
+ It aims at taking these concepts and make them as pythonic as possible so that they can be easily integrated into any python
+ program without hassle.
+
+ If you want to go all in on persistent data structures and use literal syntax to define them in your code rather
+ than function calls check out Pyrthon_.
+
+ Examples
+ --------
+ .. _Sequence: collections_
+ .. _Hashable: collections_
+ .. _Mapping: collections_
+ .. _Mappings: collections_
+ .. _Set: collections_
+ .. _collections: https://docs.python.org/3/library/collections.abc.html
+ .. _documentation: http://pyrsistent.readthedocs.org/
+
+ The collection types and key features currently implemented are:
+
+ * PVector_, similar to a python list
+ * PMap_, similar to dict
+ * PSet_, similar to set
+ * PRecord_, a PMap on steroids with fixed fields, optional type and invariant checking and much more
+ * PClass_, a Python class fixed fields, optional type and invariant checking and much more
+ * `Checked collections`_, PVector, PMap and PSet with optional type and invariance checks and more
+ * PBag, similar to collections.Counter
+ * PList, a classic singly linked list
+ * PDeque, similar to collections.deque
+ * Immutable object type (immutable) built on the named tuple
+ * freeze_ and thaw_ functions to convert between pythons standard collections and pyrsistent collections.
+ * Flexible transformations_ of arbitrarily complex structures built from PMaps and PVectors.
+
+ Below are examples of common usage patterns for some of the structures and features. More information and
+ full documentation for all data structures is available in the documentation_.
+
+ .. _PVector:
+
+ PVector
+ ~~~~~~~
+ With full support for the Sequence_ protocol PVector is meant as a drop in replacement to the built in list from a readers
+ point of view. Write operations of course differ since no in place mutation is done but naming should be in line
+ with corresponding operations on the built in list.
+
+ Support for the Hashable_ protocol also means that it can be used as key in Mappings_.
+
+ Appends are amortized O(1). Random access and insert is log32(n) where n is the size of the vector.
+
+ .. code:: python
+
+ >>> from pyrsistent import v, pvector
+
+ # No mutation of vectors once created, instead they
+ # are "evolved" leaving the original untouched
+ >>> v1 = v(1, 2, 3)
+ >>> v2 = v1.append(4)
+ >>> v3 = v2.set(1, 5)
+ >>> v1
+ pvector([1, 2, 3])
+ >>> v2
+ pvector([1, 2, 3, 4])
+ >>> v3
+ pvector([1, 5, 3, 4])
+
+ # Random access and slicing
+ >>> v3[1]
+ 5
+ >>> v3[1:3]
+ pvector([5, 3])
+
+ # Iteration
+ >>> list(x + 1 for x in v3)
+ [2, 6, 4, 5]
+ >>> pvector(2 * x for x in range(3))
+ pvector([0, 2, 4])
+
+ .. _PMap:
+
+ PMap
+ ~~~~
+ With full support for the Mapping_ protocol PMap is meant as a drop in replacement to the built in dict from a readers point
+ of view. Support for the Hashable_ protocol also means that it can be used as key in other Mappings_.
+
+ Random access and insert is log32(n) where n is the size of the map.
+
+ .. code:: python
+
+ >>> from pyrsistent import m, pmap, v
+
+ # No mutation of maps once created, instead they are
+ # "evolved" leaving the original untouched
+ >>> m1 = m(a=1, b=2)
+ >>> m2 = m1.set('c', 3)
+ >>> m3 = m2.set('a', 5)
+ >>> m1
+ pmap({'a': 1, 'b': 2})
+ >>> m2
+ pmap({'a': 1, 'c': 3, 'b': 2})
+ >>> m3
+ pmap({'a': 5, 'c': 3, 'b': 2})
+ >>> m3['a']
+ 5
+
+ # Evolution of nested persistent structures
+ >>> m4 = m(a=5, b=6, c=v(1, 2))
+ >>> m4.transform(('c', 1), 17)
+ pmap({'a': 5, 'c': pvector([1, 17]), 'b': 6})
+ >>> m5 = m(a=1, b=2)
+
+ # Evolve by merging with other mappings
+ >>> m5.update(m(a=2, c=3), {'a': 17, 'd': 35})
+ pmap({'a': 17, 'c': 3, 'b': 2, 'd': 35})
+ >>> pmap({'x': 1, 'y': 2}) + pmap({'y': 3, 'z': 4})
+ pmap({'y': 3, 'x': 1, 'z': 4})
+
+ # Dict-like methods to convert to list and iterate
+ >>> m3.items()
+ pvector([('a', 5), ('c', 3), ('b', 2)])
+ >>> list(m3)
+ ['a', 'c', 'b']
+
+ .. _PSet:
+
+ PSet
+ ~~~~
+ With full support for the Set_ protocol PSet is meant as a drop in replacement to the built in set from a readers point
+ of view. Support for the Hashable_ protocol also means that it can be used as key in Mappings_.
+
+ Random access and insert is log32(n) where n is the size of the set.
+
+ .. code:: python
+
+ >>> from pyrsistent import s
+
+ # No mutation of sets once created, you know the story...
+ >>> s1 = s(1, 2, 3, 2)
+ >>> s2 = s1.add(4)
+ >>> s3 = s1.remove(1)
+ >>> s1
+ pset([1, 2, 3])
+ >>> s2
+ pset([1, 2, 3, 4])
+ >>> s3
+ pset([2, 3])
+
+ # Full support for set operations
+ >>> s1 | s(3, 4, 5)
+ pset([1, 2, 3, 4, 5])
+ >>> s1 & s(3, 4, 5)
+ pset([3])
+ >>> s1 < s2
+ True
+ >>> s1 < s(3, 4, 5)
+ False
+
+ .. _PRecord:
+
+ PRecord
+ ~~~~~~~
+ A PRecord is a PMap with a fixed set of specified fields. Records are declared as python classes inheriting
+ from PRecord. Because it is a PMap it has full support for all Mapping methods such as iteration and element
+ access using subscript notation.
+
+ .. code:: python
+
+ >>> from pyrsistent import PRecord, field
+ >>> class ARecord(PRecord):
+ ... x = field()
+ ...
+ >>> r = ARecord(x=3)
+ >>> r
+ ARecord(x=3)
+ >>> r.x
+ 3
+ >>> r.set(x=2)
+ ARecord(x=2)
+ >>> r.set(y=2)
+ Traceback (most recent call last):
+ AttributeError: 'y' is not among the specified fields for ARecord
+
+ Type information
+ ****************
+ It is possible to add type information to the record to enforce type checks. Multiple allowed types can be specified
+ by providing an iterable of types.
+
+ .. code:: python
+
+ >>> class BRecord(PRecord):
+ ... x = field(type=int)
+ ... y = field(type=(int, type(None)))
+ ...
+ >>> BRecord(x=3, y=None)
+ BRecord(y=None, x=3)
+ >>> BRecord(x=3.0)
+ Traceback (most recent call last):
+ PTypeError: Invalid type for field BRecord.x, was float
+
+
+ Custom types (classes) that are iterable should be wrapped in a tuple to prevent their
+ members being added to the set of valid types. Although Enums in particular are now
+ supported without wrapping, see #83 for more information.
+
+ Mandatory fields
+ ****************
+ Fields are not mandatory by default but can be specified as such. If fields are missing an
+ *InvariantException* will be thrown which contains information about the missing fields.
+
+ .. code:: python
+
+ >>> from pyrsistent import InvariantException
+ >>> class CRecord(PRecord):
+ ... x = field(mandatory=True)
+ ...
+ >>> r = CRecord(x=3)
+ >>> try:
+ ... r.discard('x')
+ ... except InvariantException as e:
+ ... print(e.missing_fields)
+ ...
+ ('CRecord.x',)
+
+ Invariants
+ **********
+ It is possible to add invariants that must hold when evolving the record. Invariants can be
+ specified on both field and record level. If invariants fail an *InvariantException* will be
+ thrown which contains information about the failing invariants. An invariant function should
+ return a tuple consisting of a boolean that tells if the invariant holds or not and an object
+ describing the invariant. This object can later be used to identify which invariant that failed.
+
+ The global invariant function is only executed if all field invariants hold.
+
+ Global invariants are inherited to subclasses.
+
+ .. code:: python
+
+ >>> class RestrictedVector(PRecord):
+ ... __invariant__ = lambda r: (r.y >= r.x, 'x larger than y')
+ ... x = field(invariant=lambda x: (x > 0, 'x negative'))
+ ... y = field(invariant=lambda y: (y > 0, 'y negative'))
+ ...
+ >>> r = RestrictedVector(y=3, x=2)
+ >>> try:
+ ... r.set(x=-1, y=-2)
+ ... except InvariantException as e:
+ ... print(e.invariant_errors)
+ ...
+ ('y negative', 'x negative')
+ >>> try:
+ ... r.set(x=2, y=1)
+ ... except InvariantException as e:
+ ... print(e.invariant_errors)
+ ...
+ ('x larger than y',)
+
+ Invariants may also contain multiple assertions. For those cases the invariant function should
+ return a tuple of invariant tuples as described above. This structure is reflected in the
+ invariant_errors attribute of the exception which will contain tuples with data from all failed
+ invariants. Eg:
+
+ .. code:: python
+
+ >>> class EvenX(PRecord):
+ ... x = field(invariant=lambda x: ((x > 0, 'x negative'), (x % 2 == 0, 'x odd')))
+ ...
+ >>> try:
+ ... EvenX(x=-1)
+ ... except InvariantException as e:
+ ... print(e.invariant_errors)
+ ...
+ (('x negative', 'x odd'),)
+
+
+ Factories
+ *********
+ It's possible to specify factory functions for fields. The factory function receives whatever
+ is supplied as field value and the actual returned by the factory is assigned to the field
+ given that any type and invariant checks hold.
+ PRecords have a default factory specified as a static function on the class, create(). It takes
+ a *Mapping* as argument and returns an instance of the specific record.
+ If a record has fields of type PRecord the create() method of that record will
+ be called to create the "sub record" if no factory has explicitly been specified to override
+ this behaviour.
+
+ .. code:: python
+
+ >>> class DRecord(PRecord):
+ ... x = field(factory=int)
+ ...
+ >>> class ERecord(PRecord):
+ ... d = field(type=DRecord)
+ ...
+ >>> ERecord.create({'d': {'x': '1'}})
+ ERecord(d=DRecord(x=1))
+
+ Collection fields
+ *****************
+ It is also possible to have fields with ``pyrsistent`` collections.
+
+ .. code:: python
+
+ >>> from pyrsistent import pset_field, pmap_field, pvector_field
+ >>> class MultiRecord(PRecord):
+ ... set_of_ints = pset_field(int)
+ ... map_int_to_str = pmap_field(int, str)
+ ... vector_of_strs = pvector_field(str)
+ ...
+
+ Serialization
+ *************
+ PRecords support serialization back to dicts. Default serialization will take keys and values
+ "as is" and output them into a dict. It is possible to specify custom serialization functions
+ to take care of fields that require special treatment.
+
+ .. code:: python
+
+ >>> from datetime import date
+ >>> class Person(PRecord):
+ ... name = field(type=unicode)
+ ... birth_date = field(type=date,
+ ... serializer=lambda format, d: d.strftime(format['date']))
+ ...
+ >>> john = Person(name=u'John', birth_date=date(1985, 10, 21))
+ >>> john.serialize({'date': '%Y-%m-%d'})
+ {'birth_date': '1985-10-21', 'name': u'John'}
+
+
+ .. _instar: https://github.com/boxed/instar/
+
+ .. _PClass:
+
+ PClass
+ ~~~~~~
+ A PClass is a python class with a fixed set of specified fields. PClasses are declared as python classes inheriting
+ from PClass. It is defined the same way that PRecords are and behaves like a PRecord in all aspects except that it
+ is not a PMap and hence not a collection but rather a plain Python object.
+
+ .. code:: python
+
+ >>> from pyrsistent import PClass, field
+ >>> class AClass(PClass):
+ ... x = field()
+ ...
+ >>> a = AClass(x=3)
+ >>> a
+ AClass(x=3)
+ >>> a.x
+ 3
+
+
+ Checked collections
+ ~~~~~~~~~~~~~~~~~~~
+ Checked collections currently come in three flavors: CheckedPVector, CheckedPMap and CheckedPSet.
+
+ .. code:: python
+
+ >>> from pyrsistent import CheckedPVector, CheckedPMap, CheckedPSet, thaw
+ >>> class Positives(CheckedPSet):
+ ... __type__ = (long, int)
+ ... __invariant__ = lambda n: (n >= 0, 'Negative')
+ ...
+ >>> class Lottery(PRecord):
+ ... name = field(type=str)
+ ... numbers = field(type=Positives, invariant=lambda p: (len(p) > 0, 'No numbers'))
+ ...
+ >>> class Lotteries(CheckedPVector):
+ ... __type__ = Lottery
+ ...
+ >>> class LotteriesByDate(CheckedPMap):
+ ... __key_type__ = date
+ ... __value_type__ = Lotteries
+ ...
+ >>> lotteries = LotteriesByDate.create({date(2015, 2, 15): [{'name': 'SuperLotto', 'numbers': {1, 2, 3}},
+ ... {'name': 'MegaLotto', 'numbers': {4, 5, 6}}],
+ ... date(2015, 2, 16): [{'name': 'SuperLotto', 'numbers': {3, 2, 1}},
+ ... {'name': 'MegaLotto', 'numbers': {6, 5, 4}}]})
+ >>> lotteries
+ LotteriesByDate({datetime.date(2015, 2, 15): Lotteries([Lottery(numbers=Positives([1, 2, 3]), name='SuperLotto'), Lottery(numbers=Positives([4, 5, 6]), name='MegaLotto')]), datetime.date(2015, 2, 16): Lotteries([Lottery(numbers=Positives([1, 2, 3]), name='SuperLotto'), Lottery(numbers=Positives([4, 5, 6]), name='MegaLotto')])})
+
+ # The checked versions support all operations that the corresponding
+ # unchecked types do
+ >>> lottery_0215 = lotteries[date(2015, 2, 15)]
+ >>> lottery_0215.transform([0, 'name'], 'SuperDuperLotto')
+ Lotteries([Lottery(numbers=Positives([1, 2, 3]), name='SuperDuperLotto'), Lottery(numbers=Positives([4, 5, 6]), name='MegaLotto')])
+
+ # But also makes asserts that types and invariants hold
+ >>> lottery_0215.transform([0, 'name'], 999)
+ Traceback (most recent call last):
+ PTypeError: Invalid type for field Lottery.name, was int
+
+ >>> lottery_0215.transform([0, 'numbers'], set())
+ Traceback (most recent call last):
+ InvariantException: Field invariant failed
+
+ # They can be converted back to python built ins with either thaw()
+ # or serialize() (which provides possibilities to customize serialization)
+ >>> thaw(lottery_0215)
+ [{'numbers': set([1, 2, 3]), 'name': 'SuperLotto'}, {'numbers': set([4, 5, 6]), 'name': 'MegaLotto'}]
+ >>> lottery_0215.serialize()
+ [{'numbers': set([1, 2, 3]), 'name': 'SuperLotto'}, {'numbers': set([4, 5, 6]), 'name': 'MegaLotto'}]
+
+ .. _transformations:
+
+ Transformations
+ ~~~~~~~~~~~~~~~
+ Transformations are inspired by the cool library instar_ for Clojure. They let you evolve PMaps and PVectors
+ with arbitrarily deep/complex nesting using simple syntax and flexible matching syntax.
+
+ The first argument to transformation is the path that points out the value to transform. The
+ second is the transformation to perform. If the transformation is callable it will be applied
+ to the value(s) matching the path. The path may also contain callables. In that case they are
+ treated as matchers. If the matcher returns True for a specific key it is considered for transformation.
+
+ .. code:: python
+
+ # Basic examples
+ >>> from pyrsistent import inc, freeze, thaw, rex, ny, discard
+ >>> v1 = freeze([1, 2, 3, 4, 5])
+ >>> v1.transform([2], inc)
+ pvector([1, 2, 4, 4, 5])
+ >>> v1.transform([lambda ix: 0 < ix < 4], 8)
+ pvector([1, 8, 8, 8, 5])
+ >>> v1.transform([lambda ix, v: ix == 0 or v == 5], 0)
+ pvector([0, 2, 3, 4, 0])
+
+ # The (a)ny matcher can be used to match anything
+ >>> v1.transform([ny], 8)
+ pvector([8, 8, 8, 8, 8])
+
+ # Regular expressions can be used for matching
+ >>> scores = freeze({'John': 12, 'Joseph': 34, 'Sara': 23})
+ >>> scores.transform([rex('^Jo')], 0)
+ pmap({'Joseph': 0, 'Sara': 23, 'John': 0})
+
+ # Transformations can be done on arbitrarily deep structures
+ >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
+ ... {'author': 'Steve', 'content': 'A slightly longer article'}],
+ ... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
+ >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
+ >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
+ >>> very_short_news.articles[0].content
+ 'A short article'
+ >>> very_short_news.articles[1].content
+ 'A slightly long...'
+
+ # When nothing has been transformed the original data structure is kept
+ >>> short_news is news_paper
+ True
+ >>> very_short_news is news_paper
+ False
+ >>> very_short_news.articles[0] is news_paper.articles[0]
+ True
+
+ # There is a special transformation that can be used to discard elements. Also
+ # multiple transformations can be applied in one call
+ >>> thaw(news_paper.transform(['weather'], discard, ['articles', ny, 'content'], discard))
+ {'articles': [{'author': 'Sara'}, {'author': 'Steve'}]}
+
+ Evolvers
+ ~~~~~~~~
+ PVector, PMap and PSet all have support for a concept dubbed *evolvers*. An evolver acts like a mutable
+ view of the underlying persistent data structure with "transaction like" semantics. No updates of the original
+ data structure is ever performed, it is still fully immutable.
+
+ The evolvers have a very limited API by design to discourage excessive, and inappropriate, usage as that would
+ take us down the mutable road. In principle only basic mutation and element access functions are supported.
+ Check out the documentation_ of each data structure for specific examples.
+
+ Examples of when you may want to use an evolver instead of working directly with the data structure include:
+
+ * Multiple updates are done to the same data structure and the intermediate results are of no
+ interest. In this case using an evolver may be a more efficient and easier to work with.
+ * You need to pass a vector into a legacy function or a function that you have no control
+ over which performs in place mutations. In this case pass an evolver instance
+ instead and then create a new pvector from the evolver once the function returns.
+
+ .. code:: python
+
+ >>> from pyrsistent import v
+
+ # In place mutation as when working with the built in counterpart
+ >>> v1 = v(1, 2, 3)
+ >>> e = v1.evolver()
+ >>> e[1] = 22
+ >>> e = e.append(4)
+ >>> e = e.extend([5, 6])
+ >>> e[5] += 1
+ >>> len(e)
+ 6
+
+ # The evolver is considered *dirty* when it contains changes compared to the underlying vector
+ >>> e.is_dirty()
+ True
+
+ # But the underlying pvector still remains untouched
+ >>> v1
+ pvector([1, 2, 3])
+
+ # Once satisfied with the updates you can produce a new pvector containing the updates.
+ # The new pvector will share data with the original pvector in the same way that would have
+ # been done if only using operations on the pvector.
+ >>> v2 = e.persistent()
+ >>> v2
+ pvector([1, 22, 3, 4, 5, 7])
+
+ # The evolver is now no longer considered *dirty* as it contains no differences compared to the
+ # pvector just produced.
+ >>> e.is_dirty()
+ False
+
+ # You may continue to work with the same evolver without affecting the content of v2
+ >>> e[0] = 11
+
+ # Or create a new evolver from v2. The two evolvers can be updated independently but will both
+ # share data with v2 where possible.
+ >>> e2 = v2.evolver()
+ >>> e2[0] = 1111
+ >>> e.persistent()
+ pvector([11, 22, 3, 4, 5, 7])
+ >>> e2.persistent()
+ pvector([1111, 22, 3, 4, 5, 7])
+
+ .. _freeze:
+ .. _thaw:
+
+ freeze and thaw
+ ~~~~~~~~~~~~~~~
+ These functions are great when your cozy immutable world has to interact with the evil mutable world outside.
+
+ .. code:: python
+
+ >>> from pyrsistent import freeze, thaw, v, m
+ >>> freeze([1, {'a': 3}])
+ pvector([1, pmap({'a': 3})])
+ >>> thaw(v(1, m(a=3)))
+ [1, {'a': 3}]
+
+ Compatibility
+ -------------
+
+ Pyrsistent is developed and tested on Python 2.7, 3.5, 3.6, 3.7 and PyPy (Python 2 and 3 compatible). It will most
+ likely work on all other versions >= 3.4 but no guarantees are given. :)
+
+ Compatibility issues
+ ~~~~~~~~~~~~~~~~~~~~
+
+ .. _27: https://github.com/tobgu/pyrsistent/issues/27
+
+ There is currently one known compatibility issue when comparing built in sets and frozensets to PSets as discussed in 27_.
+ It affects python 2 versions < 2.7.8 and python 3 versions < 3.4.0 and is due to a bug described in
+ http://bugs.python.org/issue8743.
+
+ Comparisons will fail or be incorrect when using the set/frozenset as left hand side of the comparison. As a workaround
+ you need to either upgrade Python to a more recent version, avoid comparing sets/frozensets with PSets or always make
+ sure to convert both sides of the comparison to the same type before performing the comparison.
+
+ Performance
+ -----------
+
+ Pyrsistent is developed with performance in mind. Still, while some operations are nearly on par with their built in,
+ mutable, counterparts in terms of speed, other operations are slower. In the cases where attempts at
+ optimizations have been done, speed has generally been valued over space.
+
+ Pyrsistent comes with two API compatible flavors of PVector (on which PMap and PSet are based), one pure Python
+ implementation and one implemented as a C extension. The latter generally being 2 - 20 times faster than the former.
+ The C extension will be used automatically when possible.
+
+ The pure python implementation is fully PyPy compatible. Running it under PyPy speeds operations up considerably if
+ the structures are used heavily (if JITed), for some cases the performance is almost on par with the built in counterparts.
+
+ Type hints
+ ----------
+
+ PEP 561 style type hints for use with mypy and various editors are available for most types and functions in pyrsistent.
+
+ Type classes for annotating your own code with pyrsistent types are also available under pyrsistent.typing.
+
+ Installation
+ ------------
+
+ pip install pyrsistent
+
+ Documentation
+ -------------
+
+ Available at http://pyrsistent.readthedocs.org/
+
+ Brief presentation available at http://slides.com/tobiasgustafsson/immutability-and-python/
+
+ Contributors
+ ------------
+
+ Tobias Gustafsson https://github.com/tobgu
+
+ Christopher Armstrong https://github.com/radix
+
+ Anders Hovmöller https://github.com/boxed
+
+ Itamar Turner-Trauring https://github.com/itamarst
+
+ Jonathan Lange https://github.com/jml
+
+ Richard Futrell https://github.com/Futrell
+
+ Jakob Hollenstein https://github.com/jkbjh
+
+ David Honour https://github.com/foolswood
+
+ David R. MacIver https://github.com/DRMacIver
+
+ Marcus Ewert https://github.com/sarum90
+
+ Jean-Paul Calderone https://github.com/exarkun
+
+ Douglas Treadwell https://github.com/douglas-treadwell
+
+ Travis Parker https://github.com/teepark
+
+ Julian Berman https://github.com/Julian
+
+ Dennis Tomas https://github.com/dtomas
+
+ Neil Vyas https://github.com/neilvyas
+
+ doozr https://github.com/doozr
+
+ Kamil Galuszka https://github.com/galuszkak
+
+ Tsuyoshi Hombashi https://github.com/thombashi
+
+ nattofriends https://github.com/nattofriends
+
+ agberk https://github.com/agberk
+
+ Waleed Khan https://github.com/arxanas
+
+ Jean-Louis Fuchs https://github.com/ganwell
+
+ Carlos Corbacho https://github.com/ccorbacho
+
+ Felix Yan https://github.com/felixonmars
+
+ benrg https://github.com/benrg
+
+ Jere Lahelma https://github.com/je-l
+
+ Max Taggart https://github.com/MaxTaggart
+
+ Vincent Philippon https://github.com/vphilippon
+
+ Semen Zhydenko https://github.com/ss18
+
+ Till Varoquaux https://github.com/till-varoquaux
+
+ Michal Kowalik https://github.com/michalvi
+
+ ossdev07 https://github.com/ossdev07
+
+ Kerry Olesen https://github.com/qhesz
+
+ johnthagen https://github.com/johnthagen
+
+ Contributing
+ ------------
+
+ Want to contribute? That's great! If you experience problems please log them on GitHub. If you want to contribute code,
+ please fork the repository and submit a pull request.
+
+ Run tests
+ ~~~~~~~~~
+ .. _tox: https://tox.readthedocs.io/en/latest/
+
+ Tests can be executed using tox_.
+
+ Install tox: ``pip install tox``
+
+ Run test for Python 2.7: ``tox -epy27``
+
+ Release
+ ~~~~~~~
+ * Update CHANGES.txt
+ * Update README with any new contributors and potential info needed.
+ * Update _pyrsistent_version.py
+ * python setup.py sdist upload
+ * Commit and tag with new version: git add -u . && git commit -m 'Prepare version vX.Y.Z' && git tag -a vX.Y.Z -m 'vX.Y.Z'
+ * Push commit and tags: git push && git push --tags
+
+ Project status
+ --------------
+ Pyrsistent can be considered stable and mature (who knows, there may even be a 1.0 some day :-)). The project is
+ maintained, bugs fixed, PRs reviewed and merged and new releases made. I currently do not have time for development
+ of new features or functionality which I don't have use for myself. I'm more than happy to take PRs for new
+ functionality though!
+
+ There are a bunch of issues marked with ``enhancement`` and ``help wanted`` that contain requests for new functionality
+ that would be nice to include. The level of difficulty and extend of the issues varies, please reach out to me if you're
+ interested in working on any of them.
+
+ If you feel that you have a grand master plan for where you would like Pyrsistent to go and have the time to put into
+ it please don't hesitate to discuss this with me and submit PRs for it. If all goes well I'd be more than happy to add
+ additional maintainers to the project!
+
+Platform: UNKNOWN
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: Implementation :: PyPy
diff --git a/third_party/python/pyrsistent/README b/third_party/python/pyrsistent/README
new file mode 100644
index 0000000000..a4c24e49bd
--- /dev/null
+++ b/third_party/python/pyrsistent/README
@@ -0,0 +1,725 @@
+Pyrsistent
+==========
+.. image:: https://travis-ci.org/tobgu/pyrsistent.png?branch=master
+ :target: https://travis-ci.org/tobgu/pyrsistent
+
+.. image:: https://badge.fury.io/py/pyrsistent.svg
+ :target: https://badge.fury.io/py/pyrsistent
+
+.. image:: https://coveralls.io/repos/tobgu/pyrsistent/badge.svg?branch=master&service=github
+ :target: https://coveralls.io/github/tobgu/pyrsistent?branch=master
+
+
+.. _Pyrthon: https://www.github.com/tobgu/pyrthon/
+
+Pyrsistent is a number of persistent collections (by some referred to as functional data structures). Persistent in
+the sense that they are immutable.
+
+All methods on a data structure that would normally mutate it instead return a new copy of the structure containing the
+requested updates. The original structure is left untouched.
+
+This will simplify the reasoning about what a program does since no hidden side effects ever can take place to these
+data structures. You can rest assured that the object you hold a reference to will remain the same throughout its
+lifetime and need not worry that somewhere five stack levels below you in the darkest corner of your application
+someone has decided to remove that element that you expected to be there.
+
+Pyrsistent is influenced by persistent data structures such as those found in the standard library of Clojure. The
+data structures are designed to share common elements through path copying.
+It aims at taking these concepts and make them as pythonic as possible so that they can be easily integrated into any python
+program without hassle.
+
+If you want to go all in on persistent data structures and use literal syntax to define them in your code rather
+than function calls check out Pyrthon_.
+
+Examples
+--------
+.. _Sequence: collections_
+.. _Hashable: collections_
+.. _Mapping: collections_
+.. _Mappings: collections_
+.. _Set: collections_
+.. _collections: https://docs.python.org/3/library/collections.abc.html
+.. _documentation: http://pyrsistent.readthedocs.org/
+
+The collection types and key features currently implemented are:
+
+* PVector_, similar to a python list
+* PMap_, similar to dict
+* PSet_, similar to set
+* PRecord_, a PMap on steroids with fixed fields, optional type and invariant checking and much more
+* PClass_, a Python class fixed fields, optional type and invariant checking and much more
+* `Checked collections`_, PVector, PMap and PSet with optional type and invariance checks and more
+* PBag, similar to collections.Counter
+* PList, a classic singly linked list
+* PDeque, similar to collections.deque
+* Immutable object type (immutable) built on the named tuple
+* freeze_ and thaw_ functions to convert between pythons standard collections and pyrsistent collections.
+* Flexible transformations_ of arbitrarily complex structures built from PMaps and PVectors.
+
+Below are examples of common usage patterns for some of the structures and features. More information and
+full documentation for all data structures is available in the documentation_.
+
+.. _PVector:
+
+PVector
+~~~~~~~
+With full support for the Sequence_ protocol PVector is meant as a drop in replacement to the built in list from a readers
+point of view. Write operations of course differ since no in place mutation is done but naming should be in line
+with corresponding operations on the built in list.
+
+Support for the Hashable_ protocol also means that it can be used as key in Mappings_.
+
+Appends are amortized O(1). Random access and insert is log32(n) where n is the size of the vector.
+
+.. code:: python
+
+ >>> from pyrsistent import v, pvector
+
+ # No mutation of vectors once created, instead they
+ # are "evolved" leaving the original untouched
+ >>> v1 = v(1, 2, 3)
+ >>> v2 = v1.append(4)
+ >>> v3 = v2.set(1, 5)
+ >>> v1
+ pvector([1, 2, 3])
+ >>> v2
+ pvector([1, 2, 3, 4])
+ >>> v3
+ pvector([1, 5, 3, 4])
+
+ # Random access and slicing
+ >>> v3[1]
+ 5
+ >>> v3[1:3]
+ pvector([5, 3])
+
+ # Iteration
+ >>> list(x + 1 for x in v3)
+ [2, 6, 4, 5]
+ >>> pvector(2 * x for x in range(3))
+ pvector([0, 2, 4])
+
+.. _PMap:
+
+PMap
+~~~~
+With full support for the Mapping_ protocol PMap is meant as a drop in replacement to the built in dict from a readers point
+of view. Support for the Hashable_ protocol also means that it can be used as key in other Mappings_.
+
+Random access and insert is log32(n) where n is the size of the map.
+
+.. code:: python
+
+ >>> from pyrsistent import m, pmap, v
+
+ # No mutation of maps once created, instead they are
+ # "evolved" leaving the original untouched
+ >>> m1 = m(a=1, b=2)
+ >>> m2 = m1.set('c', 3)
+ >>> m3 = m2.set('a', 5)
+ >>> m1
+ pmap({'a': 1, 'b': 2})
+ >>> m2
+ pmap({'a': 1, 'c': 3, 'b': 2})
+ >>> m3
+ pmap({'a': 5, 'c': 3, 'b': 2})
+ >>> m3['a']
+ 5
+
+ # Evolution of nested persistent structures
+ >>> m4 = m(a=5, b=6, c=v(1, 2))
+ >>> m4.transform(('c', 1), 17)
+ pmap({'a': 5, 'c': pvector([1, 17]), 'b': 6})
+ >>> m5 = m(a=1, b=2)
+
+ # Evolve by merging with other mappings
+ >>> m5.update(m(a=2, c=3), {'a': 17, 'd': 35})
+ pmap({'a': 17, 'c': 3, 'b': 2, 'd': 35})
+ >>> pmap({'x': 1, 'y': 2}) + pmap({'y': 3, 'z': 4})
+ pmap({'y': 3, 'x': 1, 'z': 4})
+
+ # Dict-like methods to convert to list and iterate
+ >>> m3.items()
+ pvector([('a', 5), ('c', 3), ('b', 2)])
+ >>> list(m3)
+ ['a', 'c', 'b']
+
+.. _PSet:
+
+PSet
+~~~~
+With full support for the Set_ protocol PSet is meant as a drop in replacement to the built in set from a readers point
+of view. Support for the Hashable_ protocol also means that it can be used as key in Mappings_.
+
+Random access and insert is log32(n) where n is the size of the set.
+
+.. code:: python
+
+ >>> from pyrsistent import s
+
+ # No mutation of sets once created, you know the story...
+ >>> s1 = s(1, 2, 3, 2)
+ >>> s2 = s1.add(4)
+ >>> s3 = s1.remove(1)
+ >>> s1
+ pset([1, 2, 3])
+ >>> s2
+ pset([1, 2, 3, 4])
+ >>> s3
+ pset([2, 3])
+
+ # Full support for set operations
+ >>> s1 | s(3, 4, 5)
+ pset([1, 2, 3, 4, 5])
+ >>> s1 & s(3, 4, 5)
+ pset([3])
+ >>> s1 < s2
+ True
+ >>> s1 < s(3, 4, 5)
+ False
+
+.. _PRecord:
+
+PRecord
+~~~~~~~
+A PRecord is a PMap with a fixed set of specified fields. Records are declared as python classes inheriting
+from PRecord. Because it is a PMap it has full support for all Mapping methods such as iteration and element
+access using subscript notation.
+
+.. code:: python
+
+ >>> from pyrsistent import PRecord, field
+ >>> class ARecord(PRecord):
+ ... x = field()
+ ...
+ >>> r = ARecord(x=3)
+ >>> r
+ ARecord(x=3)
+ >>> r.x
+ 3
+ >>> r.set(x=2)
+ ARecord(x=2)
+ >>> r.set(y=2)
+ Traceback (most recent call last):
+ AttributeError: 'y' is not among the specified fields for ARecord
+
+Type information
+****************
+It is possible to add type information to the record to enforce type checks. Multiple allowed types can be specified
+by providing an iterable of types.
+
+.. code:: python
+
+ >>> class BRecord(PRecord):
+ ... x = field(type=int)
+ ... y = field(type=(int, type(None)))
+ ...
+ >>> BRecord(x=3, y=None)
+ BRecord(y=None, x=3)
+ >>> BRecord(x=3.0)
+ Traceback (most recent call last):
+ PTypeError: Invalid type for field BRecord.x, was float
+
+
+Custom types (classes) that are iterable should be wrapped in a tuple to prevent their
+members being added to the set of valid types. Although Enums in particular are now
+supported without wrapping, see #83 for more information.
+
+Mandatory fields
+****************
+Fields are not mandatory by default but can be specified as such. If fields are missing an
+*InvariantException* will be thrown which contains information about the missing fields.
+
+.. code:: python
+
+ >>> from pyrsistent import InvariantException
+ >>> class CRecord(PRecord):
+ ... x = field(mandatory=True)
+ ...
+ >>> r = CRecord(x=3)
+ >>> try:
+ ... r.discard('x')
+ ... except InvariantException as e:
+ ... print(e.missing_fields)
+ ...
+ ('CRecord.x',)
+
+Invariants
+**********
+It is possible to add invariants that must hold when evolving the record. Invariants can be
+specified on both field and record level. If invariants fail an *InvariantException* will be
+thrown which contains information about the failing invariants. An invariant function should
+return a tuple consisting of a boolean that tells if the invariant holds or not and an object
+describing the invariant. This object can later be used to identify which invariant that failed.
+
+The global invariant function is only executed if all field invariants hold.
+
+Global invariants are inherited to subclasses.
+
+.. code:: python
+
+ >>> class RestrictedVector(PRecord):
+ ... __invariant__ = lambda r: (r.y >= r.x, 'x larger than y')
+ ... x = field(invariant=lambda x: (x > 0, 'x negative'))
+ ... y = field(invariant=lambda y: (y > 0, 'y negative'))
+ ...
+ >>> r = RestrictedVector(y=3, x=2)
+ >>> try:
+ ... r.set(x=-1, y=-2)
+ ... except InvariantException as e:
+ ... print(e.invariant_errors)
+ ...
+ ('y negative', 'x negative')
+ >>> try:
+ ... r.set(x=2, y=1)
+ ... except InvariantException as e:
+ ... print(e.invariant_errors)
+ ...
+ ('x larger than y',)
+
+Invariants may also contain multiple assertions. For those cases the invariant function should
+return a tuple of invariant tuples as described above. This structure is reflected in the
+invariant_errors attribute of the exception which will contain tuples with data from all failed
+invariants. Eg:
+
+.. code:: python
+
+ >>> class EvenX(PRecord):
+ ... x = field(invariant=lambda x: ((x > 0, 'x negative'), (x % 2 == 0, 'x odd')))
+ ...
+ >>> try:
+ ... EvenX(x=-1)
+ ... except InvariantException as e:
+ ... print(e.invariant_errors)
+ ...
+ (('x negative', 'x odd'),)
+
+
+Factories
+*********
+It's possible to specify factory functions for fields. The factory function receives whatever
+is supplied as field value and the actual returned by the factory is assigned to the field
+given that any type and invariant checks hold.
+PRecords have a default factory specified as a static function on the class, create(). It takes
+a *Mapping* as argument and returns an instance of the specific record.
+If a record has fields of type PRecord the create() method of that record will
+be called to create the "sub record" if no factory has explicitly been specified to override
+this behaviour.
+
+.. code:: python
+
+ >>> class DRecord(PRecord):
+ ... x = field(factory=int)
+ ...
+ >>> class ERecord(PRecord):
+ ... d = field(type=DRecord)
+ ...
+ >>> ERecord.create({'d': {'x': '1'}})
+ ERecord(d=DRecord(x=1))
+
+Collection fields
+*****************
+It is also possible to have fields with ``pyrsistent`` collections.
+
+.. code:: python
+
+ >>> from pyrsistent import pset_field, pmap_field, pvector_field
+ >>> class MultiRecord(PRecord):
+ ... set_of_ints = pset_field(int)
+ ... map_int_to_str = pmap_field(int, str)
+ ... vector_of_strs = pvector_field(str)
+ ...
+
+Serialization
+*************
+PRecords support serialization back to dicts. Default serialization will take keys and values
+"as is" and output them into a dict. It is possible to specify custom serialization functions
+to take care of fields that require special treatment.
+
+.. code:: python
+
+ >>> from datetime import date
+ >>> class Person(PRecord):
+ ... name = field(type=unicode)
+ ... birth_date = field(type=date,
+ ... serializer=lambda format, d: d.strftime(format['date']))
+ ...
+ >>> john = Person(name=u'John', birth_date=date(1985, 10, 21))
+ >>> john.serialize({'date': '%Y-%m-%d'})
+ {'birth_date': '1985-10-21', 'name': u'John'}
+
+
+.. _instar: https://github.com/boxed/instar/
+
+.. _PClass:
+
+PClass
+~~~~~~
+A PClass is a python class with a fixed set of specified fields. PClasses are declared as python classes inheriting
+from PClass. It is defined the same way that PRecords are and behaves like a PRecord in all aspects except that it
+is not a PMap and hence not a collection but rather a plain Python object.
+
+.. code:: python
+
+ >>> from pyrsistent import PClass, field
+ >>> class AClass(PClass):
+ ... x = field()
+ ...
+ >>> a = AClass(x=3)
+ >>> a
+ AClass(x=3)
+ >>> a.x
+ 3
+
+
+Checked collections
+~~~~~~~~~~~~~~~~~~~
+Checked collections currently come in three flavors: CheckedPVector, CheckedPMap and CheckedPSet.
+
+.. code:: python
+
+ >>> from pyrsistent import CheckedPVector, CheckedPMap, CheckedPSet, thaw
+ >>> class Positives(CheckedPSet):
+ ... __type__ = (long, int)
+ ... __invariant__ = lambda n: (n >= 0, 'Negative')
+ ...
+ >>> class Lottery(PRecord):
+ ... name = field(type=str)
+ ... numbers = field(type=Positives, invariant=lambda p: (len(p) > 0, 'No numbers'))
+ ...
+ >>> class Lotteries(CheckedPVector):
+ ... __type__ = Lottery
+ ...
+ >>> class LotteriesByDate(CheckedPMap):
+ ... __key_type__ = date
+ ... __value_type__ = Lotteries
+ ...
+ >>> lotteries = LotteriesByDate.create({date(2015, 2, 15): [{'name': 'SuperLotto', 'numbers': {1, 2, 3}},
+ ... {'name': 'MegaLotto', 'numbers': {4, 5, 6}}],
+ ... date(2015, 2, 16): [{'name': 'SuperLotto', 'numbers': {3, 2, 1}},
+ ... {'name': 'MegaLotto', 'numbers': {6, 5, 4}}]})
+ >>> lotteries
+ LotteriesByDate({datetime.date(2015, 2, 15): Lotteries([Lottery(numbers=Positives([1, 2, 3]), name='SuperLotto'), Lottery(numbers=Positives([4, 5, 6]), name='MegaLotto')]), datetime.date(2015, 2, 16): Lotteries([Lottery(numbers=Positives([1, 2, 3]), name='SuperLotto'), Lottery(numbers=Positives([4, 5, 6]), name='MegaLotto')])})
+
+ # The checked versions support all operations that the corresponding
+ # unchecked types do
+ >>> lottery_0215 = lotteries[date(2015, 2, 15)]
+ >>> lottery_0215.transform([0, 'name'], 'SuperDuperLotto')
+ Lotteries([Lottery(numbers=Positives([1, 2, 3]), name='SuperDuperLotto'), Lottery(numbers=Positives([4, 5, 6]), name='MegaLotto')])
+
+ # But also makes asserts that types and invariants hold
+ >>> lottery_0215.transform([0, 'name'], 999)
+ Traceback (most recent call last):
+ PTypeError: Invalid type for field Lottery.name, was int
+
+ >>> lottery_0215.transform([0, 'numbers'], set())
+ Traceback (most recent call last):
+ InvariantException: Field invariant failed
+
+ # They can be converted back to python built ins with either thaw()
+ # or serialize() (which provides possibilities to customize serialization)
+ >>> thaw(lottery_0215)
+ [{'numbers': set([1, 2, 3]), 'name': 'SuperLotto'}, {'numbers': set([4, 5, 6]), 'name': 'MegaLotto'}]
+ >>> lottery_0215.serialize()
+ [{'numbers': set([1, 2, 3]), 'name': 'SuperLotto'}, {'numbers': set([4, 5, 6]), 'name': 'MegaLotto'}]
+
+.. _transformations:
+
+Transformations
+~~~~~~~~~~~~~~~
+Transformations are inspired by the cool library instar_ for Clojure. They let you evolve PMaps and PVectors
+with arbitrarily deep/complex nesting using simple syntax and flexible matching syntax.
+
+The first argument to transformation is the path that points out the value to transform. The
+second is the transformation to perform. If the transformation is callable it will be applied
+to the value(s) matching the path. The path may also contain callables. In that case they are
+treated as matchers. If the matcher returns True for a specific key it is considered for transformation.
+
+.. code:: python
+
+ # Basic examples
+ >>> from pyrsistent import inc, freeze, thaw, rex, ny, discard
+ >>> v1 = freeze([1, 2, 3, 4, 5])
+ >>> v1.transform([2], inc)
+ pvector([1, 2, 4, 4, 5])
+ >>> v1.transform([lambda ix: 0 < ix < 4], 8)
+ pvector([1, 8, 8, 8, 5])
+ >>> v1.transform([lambda ix, v: ix == 0 or v == 5], 0)
+ pvector([0, 2, 3, 4, 0])
+
+ # The (a)ny matcher can be used to match anything
+ >>> v1.transform([ny], 8)
+ pvector([8, 8, 8, 8, 8])
+
+ # Regular expressions can be used for matching
+ >>> scores = freeze({'John': 12, 'Joseph': 34, 'Sara': 23})
+ >>> scores.transform([rex('^Jo')], 0)
+ pmap({'Joseph': 0, 'Sara': 23, 'John': 0})
+
+ # Transformations can be done on arbitrarily deep structures
+ >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
+ ... {'author': 'Steve', 'content': 'A slightly longer article'}],
+ ... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
+ >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
+ >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
+ >>> very_short_news.articles[0].content
+ 'A short article'
+ >>> very_short_news.articles[1].content
+ 'A slightly long...'
+
+ # When nothing has been transformed the original data structure is kept
+ >>> short_news is news_paper
+ True
+ >>> very_short_news is news_paper
+ False
+ >>> very_short_news.articles[0] is news_paper.articles[0]
+ True
+
+ # There is a special transformation that can be used to discard elements. Also
+ # multiple transformations can be applied in one call
+ >>> thaw(news_paper.transform(['weather'], discard, ['articles', ny, 'content'], discard))
+ {'articles': [{'author': 'Sara'}, {'author': 'Steve'}]}
+
+Evolvers
+~~~~~~~~
+PVector, PMap and PSet all have support for a concept dubbed *evolvers*. An evolver acts like a mutable
+view of the underlying persistent data structure with "transaction like" semantics. No updates of the original
+data structure is ever performed, it is still fully immutable.
+
+The evolvers have a very limited API by design to discourage excessive, and inappropriate, usage as that would
+take us down the mutable road. In principle only basic mutation and element access functions are supported.
+Check out the documentation_ of each data structure for specific examples.
+
+Examples of when you may want to use an evolver instead of working directly with the data structure include:
+
+* Multiple updates are done to the same data structure and the intermediate results are of no
+ interest. In this case using an evolver may be a more efficient and easier to work with.
+* You need to pass a vector into a legacy function or a function that you have no control
+ over which performs in place mutations. In this case pass an evolver instance
+ instead and then create a new pvector from the evolver once the function returns.
+
+.. code:: python
+
+ >>> from pyrsistent import v
+
+ # In place mutation as when working with the built in counterpart
+ >>> v1 = v(1, 2, 3)
+ >>> e = v1.evolver()
+ >>> e[1] = 22
+ >>> e = e.append(4)
+ >>> e = e.extend([5, 6])
+ >>> e[5] += 1
+ >>> len(e)
+ 6
+
+ # The evolver is considered *dirty* when it contains changes compared to the underlying vector
+ >>> e.is_dirty()
+ True
+
+ # But the underlying pvector still remains untouched
+ >>> v1
+ pvector([1, 2, 3])
+
+ # Once satisfied with the updates you can produce a new pvector containing the updates.
+ # The new pvector will share data with the original pvector in the same way that would have
+ # been done if only using operations on the pvector.
+ >>> v2 = e.persistent()
+ >>> v2
+ pvector([1, 22, 3, 4, 5, 7])
+
+ # The evolver is now no longer considered *dirty* as it contains no differences compared to the
+ # pvector just produced.
+ >>> e.is_dirty()
+ False
+
+ # You may continue to work with the same evolver without affecting the content of v2
+ >>> e[0] = 11
+
+ # Or create a new evolver from v2. The two evolvers can be updated independently but will both
+ # share data with v2 where possible.
+ >>> e2 = v2.evolver()
+ >>> e2[0] = 1111
+ >>> e.persistent()
+ pvector([11, 22, 3, 4, 5, 7])
+ >>> e2.persistent()
+ pvector([1111, 22, 3, 4, 5, 7])
+
+.. _freeze:
+.. _thaw:
+
+freeze and thaw
+~~~~~~~~~~~~~~~
+These functions are great when your cozy immutable world has to interact with the evil mutable world outside.
+
+.. code:: python
+
+ >>> from pyrsistent import freeze, thaw, v, m
+ >>> freeze([1, {'a': 3}])
+ pvector([1, pmap({'a': 3})])
+ >>> thaw(v(1, m(a=3)))
+ [1, {'a': 3}]
+
+Compatibility
+-------------
+
+Pyrsistent is developed and tested on Python 2.7, 3.5, 3.6, 3.7 and PyPy (Python 2 and 3 compatible). It will most
+likely work on all other versions >= 3.4 but no guarantees are given. :)
+
+Compatibility issues
+~~~~~~~~~~~~~~~~~~~~
+
+.. _27: https://github.com/tobgu/pyrsistent/issues/27
+
+There is currently one known compatibility issue when comparing built in sets and frozensets to PSets as discussed in 27_.
+It affects python 2 versions < 2.7.8 and python 3 versions < 3.4.0 and is due to a bug described in
+http://bugs.python.org/issue8743.
+
+Comparisons will fail or be incorrect when using the set/frozenset as left hand side of the comparison. As a workaround
+you need to either upgrade Python to a more recent version, avoid comparing sets/frozensets with PSets or always make
+sure to convert both sides of the comparison to the same type before performing the comparison.
+
+Performance
+-----------
+
+Pyrsistent is developed with performance in mind. Still, while some operations are nearly on par with their built in,
+mutable, counterparts in terms of speed, other operations are slower. In the cases where attempts at
+optimizations have been done, speed has generally been valued over space.
+
+Pyrsistent comes with two API compatible flavors of PVector (on which PMap and PSet are based), one pure Python
+implementation and one implemented as a C extension. The latter generally being 2 - 20 times faster than the former.
+The C extension will be used automatically when possible.
+
+The pure python implementation is fully PyPy compatible. Running it under PyPy speeds operations up considerably if
+the structures are used heavily (if JITed), for some cases the performance is almost on par with the built in counterparts.
+
+Type hints
+----------
+
+PEP 561 style type hints for use with mypy and various editors are available for most types and functions in pyrsistent.
+
+Type classes for annotating your own code with pyrsistent types are also available under pyrsistent.typing.
+
+Installation
+------------
+
+pip install pyrsistent
+
+Documentation
+-------------
+
+Available at http://pyrsistent.readthedocs.org/
+
+Brief presentation available at http://slides.com/tobiasgustafsson/immutability-and-python/
+
+Contributors
+------------
+
+Tobias Gustafsson https://github.com/tobgu
+
+Christopher Armstrong https://github.com/radix
+
+Anders Hovmöller https://github.com/boxed
+
+Itamar Turner-Trauring https://github.com/itamarst
+
+Jonathan Lange https://github.com/jml
+
+Richard Futrell https://github.com/Futrell
+
+Jakob Hollenstein https://github.com/jkbjh
+
+David Honour https://github.com/foolswood
+
+David R. MacIver https://github.com/DRMacIver
+
+Marcus Ewert https://github.com/sarum90
+
+Jean-Paul Calderone https://github.com/exarkun
+
+Douglas Treadwell https://github.com/douglas-treadwell
+
+Travis Parker https://github.com/teepark
+
+Julian Berman https://github.com/Julian
+
+Dennis Tomas https://github.com/dtomas
+
+Neil Vyas https://github.com/neilvyas
+
+doozr https://github.com/doozr
+
+Kamil Galuszka https://github.com/galuszkak
+
+Tsuyoshi Hombashi https://github.com/thombashi
+
+nattofriends https://github.com/nattofriends
+
+agberk https://github.com/agberk
+
+Waleed Khan https://github.com/arxanas
+
+Jean-Louis Fuchs https://github.com/ganwell
+
+Carlos Corbacho https://github.com/ccorbacho
+
+Felix Yan https://github.com/felixonmars
+
+benrg https://github.com/benrg
+
+Jere Lahelma https://github.com/je-l
+
+Max Taggart https://github.com/MaxTaggart
+
+Vincent Philippon https://github.com/vphilippon
+
+Semen Zhydenko https://github.com/ss18
+
+Till Varoquaux https://github.com/till-varoquaux
+
+Michal Kowalik https://github.com/michalvi
+
+ossdev07 https://github.com/ossdev07
+
+Kerry Olesen https://github.com/qhesz
+
+johnthagen https://github.com/johnthagen
+
+Contributing
+------------
+
+Want to contribute? That's great! If you experience problems please log them on GitHub. If you want to contribute code,
+please fork the repository and submit a pull request.
+
+Run tests
+~~~~~~~~~
+.. _tox: https://tox.readthedocs.io/en/latest/
+
+Tests can be executed using tox_.
+
+Install tox: ``pip install tox``
+
+Run test for Python 2.7: ``tox -epy27``
+
+Release
+~~~~~~~
+* Update CHANGES.txt
+* Update README with any new contributors and potential info needed.
+* Update _pyrsistent_version.py
+* python setup.py sdist upload
+* Commit and tag with new version: git add -u . && git commit -m 'Prepare version vX.Y.Z' && git tag -a vX.Y.Z -m 'vX.Y.Z'
+* Push commit and tags: git push && git push --tags
+
+Project status
+--------------
+Pyrsistent can be considered stable and mature (who knows, there may even be a 1.0 some day :-)). The project is
+maintained, bugs fixed, PRs reviewed and merged and new releases made. I currently do not have time for development
+of new features or functionality which I don't have use for myself. I'm more than happy to take PRs for new
+functionality though!
+
+There are a bunch of issues marked with ``enhancement`` and ``help wanted`` that contain requests for new functionality
+that would be nice to include. The level of difficulty and extend of the issues varies, please reach out to me if you're
+interested in working on any of them.
+
+If you feel that you have a grand master plan for where you would like Pyrsistent to go and have the time to put into
+it please don't hesitate to discuss this with me and submit PRs for it. If all goes well I'd be more than happy to add
+additional maintainers to the project!
diff --git a/third_party/python/pyrsistent/README.rst b/third_party/python/pyrsistent/README.rst
new file mode 100644
index 0000000000..a4c24e49bd
--- /dev/null
+++ b/third_party/python/pyrsistent/README.rst
@@ -0,0 +1,725 @@
+Pyrsistent
+==========
+.. image:: https://travis-ci.org/tobgu/pyrsistent.png?branch=master
+ :target: https://travis-ci.org/tobgu/pyrsistent
+
+.. image:: https://badge.fury.io/py/pyrsistent.svg
+ :target: https://badge.fury.io/py/pyrsistent
+
+.. image:: https://coveralls.io/repos/tobgu/pyrsistent/badge.svg?branch=master&service=github
+ :target: https://coveralls.io/github/tobgu/pyrsistent?branch=master
+
+
+.. _Pyrthon: https://www.github.com/tobgu/pyrthon/
+
+Pyrsistent is a number of persistent collections (by some referred to as functional data structures). Persistent in
+the sense that they are immutable.
+
+All methods on a data structure that would normally mutate it instead return a new copy of the structure containing the
+requested updates. The original structure is left untouched.
+
+This will simplify the reasoning about what a program does since no hidden side effects ever can take place to these
+data structures. You can rest assured that the object you hold a reference to will remain the same throughout its
+lifetime and need not worry that somewhere five stack levels below you in the darkest corner of your application
+someone has decided to remove that element that you expected to be there.
+
+Pyrsistent is influenced by persistent data structures such as those found in the standard library of Clojure. The
+data structures are designed to share common elements through path copying.
+It aims at taking these concepts and make them as pythonic as possible so that they can be easily integrated into any python
+program without hassle.
+
+If you want to go all in on persistent data structures and use literal syntax to define them in your code rather
+than function calls check out Pyrthon_.
+
+Examples
+--------
+.. _Sequence: collections_
+.. _Hashable: collections_
+.. _Mapping: collections_
+.. _Mappings: collections_
+.. _Set: collections_
+.. _collections: https://docs.python.org/3/library/collections.abc.html
+.. _documentation: http://pyrsistent.readthedocs.org/
+
+The collection types and key features currently implemented are:
+
+* PVector_, similar to a python list
+* PMap_, similar to dict
+* PSet_, similar to set
+* PRecord_, a PMap on steroids with fixed fields, optional type and invariant checking and much more
+* PClass_, a Python class fixed fields, optional type and invariant checking and much more
+* `Checked collections`_, PVector, PMap and PSet with optional type and invariance checks and more
+* PBag, similar to collections.Counter
+* PList, a classic singly linked list
+* PDeque, similar to collections.deque
+* Immutable object type (immutable) built on the named tuple
+* freeze_ and thaw_ functions to convert between pythons standard collections and pyrsistent collections.
+* Flexible transformations_ of arbitrarily complex structures built from PMaps and PVectors.
+
+Below are examples of common usage patterns for some of the structures and features. More information and
+full documentation for all data structures is available in the documentation_.
+
+.. _PVector:
+
+PVector
+~~~~~~~
+With full support for the Sequence_ protocol PVector is meant as a drop in replacement to the built in list from a readers
+point of view. Write operations of course differ since no in place mutation is done but naming should be in line
+with corresponding operations on the built in list.
+
+Support for the Hashable_ protocol also means that it can be used as key in Mappings_.
+
+Appends are amortized O(1). Random access and insert is log32(n) where n is the size of the vector.
+
+.. code:: python
+
+ >>> from pyrsistent import v, pvector
+
+ # No mutation of vectors once created, instead they
+ # are "evolved" leaving the original untouched
+ >>> v1 = v(1, 2, 3)
+ >>> v2 = v1.append(4)
+ >>> v3 = v2.set(1, 5)
+ >>> v1
+ pvector([1, 2, 3])
+ >>> v2
+ pvector([1, 2, 3, 4])
+ >>> v3
+ pvector([1, 5, 3, 4])
+
+ # Random access and slicing
+ >>> v3[1]
+ 5
+ >>> v3[1:3]
+ pvector([5, 3])
+
+ # Iteration
+ >>> list(x + 1 for x in v3)
+ [2, 6, 4, 5]
+ >>> pvector(2 * x for x in range(3))
+ pvector([0, 2, 4])
+
+.. _PMap:
+
+PMap
+~~~~
+With full support for the Mapping_ protocol PMap is meant as a drop in replacement to the built in dict from a readers point
+of view. Support for the Hashable_ protocol also means that it can be used as key in other Mappings_.
+
+Random access and insert is log32(n) where n is the size of the map.
+
+.. code:: python
+
+ >>> from pyrsistent import m, pmap, v
+
+ # No mutation of maps once created, instead they are
+ # "evolved" leaving the original untouched
+ >>> m1 = m(a=1, b=2)
+ >>> m2 = m1.set('c', 3)
+ >>> m3 = m2.set('a', 5)
+ >>> m1
+ pmap({'a': 1, 'b': 2})
+ >>> m2
+ pmap({'a': 1, 'c': 3, 'b': 2})
+ >>> m3
+ pmap({'a': 5, 'c': 3, 'b': 2})
+ >>> m3['a']
+ 5
+
+ # Evolution of nested persistent structures
+ >>> m4 = m(a=5, b=6, c=v(1, 2))
+ >>> m4.transform(('c', 1), 17)
+ pmap({'a': 5, 'c': pvector([1, 17]), 'b': 6})
+ >>> m5 = m(a=1, b=2)
+
+ # Evolve by merging with other mappings
+ >>> m5.update(m(a=2, c=3), {'a': 17, 'd': 35})
+ pmap({'a': 17, 'c': 3, 'b': 2, 'd': 35})
+ >>> pmap({'x': 1, 'y': 2}) + pmap({'y': 3, 'z': 4})
+ pmap({'y': 3, 'x': 1, 'z': 4})
+
+ # Dict-like methods to convert to list and iterate
+ >>> m3.items()
+ pvector([('a', 5), ('c', 3), ('b', 2)])
+ >>> list(m3)
+ ['a', 'c', 'b']
+
+.. _PSet:
+
+PSet
+~~~~
+With full support for the Set_ protocol PSet is meant as a drop in replacement to the built in set from a readers point
+of view. Support for the Hashable_ protocol also means that it can be used as key in Mappings_.
+
+Random access and insert is log32(n) where n is the size of the set.
+
+.. code:: python
+
+ >>> from pyrsistent import s
+
+ # No mutation of sets once created, you know the story...
+ >>> s1 = s(1, 2, 3, 2)
+ >>> s2 = s1.add(4)
+ >>> s3 = s1.remove(1)
+ >>> s1
+ pset([1, 2, 3])
+ >>> s2
+ pset([1, 2, 3, 4])
+ >>> s3
+ pset([2, 3])
+
+ # Full support for set operations
+ >>> s1 | s(3, 4, 5)
+ pset([1, 2, 3, 4, 5])
+ >>> s1 & s(3, 4, 5)
+ pset([3])
+ >>> s1 < s2
+ True
+ >>> s1 < s(3, 4, 5)
+ False
+
+.. _PRecord:
+
+PRecord
+~~~~~~~
+A PRecord is a PMap with a fixed set of specified fields. Records are declared as python classes inheriting
+from PRecord. Because it is a PMap it has full support for all Mapping methods such as iteration and element
+access using subscript notation.
+
+.. code:: python
+
+ >>> from pyrsistent import PRecord, field
+ >>> class ARecord(PRecord):
+ ... x = field()
+ ...
+ >>> r = ARecord(x=3)
+ >>> r
+ ARecord(x=3)
+ >>> r.x
+ 3
+ >>> r.set(x=2)
+ ARecord(x=2)
+ >>> r.set(y=2)
+ Traceback (most recent call last):
+ AttributeError: 'y' is not among the specified fields for ARecord
+
+Type information
+****************
+It is possible to add type information to the record to enforce type checks. Multiple allowed types can be specified
+by providing an iterable of types.
+
+.. code:: python
+
+ >>> class BRecord(PRecord):
+ ... x = field(type=int)
+ ... y = field(type=(int, type(None)))
+ ...
+ >>> BRecord(x=3, y=None)
+ BRecord(y=None, x=3)
+ >>> BRecord(x=3.0)
+ Traceback (most recent call last):
+ PTypeError: Invalid type for field BRecord.x, was float
+
+
+Custom types (classes) that are iterable should be wrapped in a tuple to prevent their
+members being added to the set of valid types. Although Enums in particular are now
+supported without wrapping, see #83 for more information.
+
+Mandatory fields
+****************
+Fields are not mandatory by default but can be specified as such. If fields are missing an
+*InvariantException* will be thrown which contains information about the missing fields.
+
+.. code:: python
+
+ >>> from pyrsistent import InvariantException
+ >>> class CRecord(PRecord):
+ ... x = field(mandatory=True)
+ ...
+ >>> r = CRecord(x=3)
+ >>> try:
+ ... r.discard('x')
+ ... except InvariantException as e:
+ ... print(e.missing_fields)
+ ...
+ ('CRecord.x',)
+
+Invariants
+**********
+It is possible to add invariants that must hold when evolving the record. Invariants can be
+specified on both field and record level. If invariants fail an *InvariantException* will be
+thrown which contains information about the failing invariants. An invariant function should
+return a tuple consisting of a boolean that tells if the invariant holds or not and an object
+describing the invariant. This object can later be used to identify which invariant that failed.
+
+The global invariant function is only executed if all field invariants hold.
+
+Global invariants are inherited to subclasses.
+
+.. code:: python
+
+ >>> class RestrictedVector(PRecord):
+ ... __invariant__ = lambda r: (r.y >= r.x, 'x larger than y')
+ ... x = field(invariant=lambda x: (x > 0, 'x negative'))
+ ... y = field(invariant=lambda y: (y > 0, 'y negative'))
+ ...
+ >>> r = RestrictedVector(y=3, x=2)
+ >>> try:
+ ... r.set(x=-1, y=-2)
+ ... except InvariantException as e:
+ ... print(e.invariant_errors)
+ ...
+ ('y negative', 'x negative')
+ >>> try:
+ ... r.set(x=2, y=1)
+ ... except InvariantException as e:
+ ... print(e.invariant_errors)
+ ...
+ ('x larger than y',)
+
+Invariants may also contain multiple assertions. For those cases the invariant function should
+return a tuple of invariant tuples as described above. This structure is reflected in the
+invariant_errors attribute of the exception which will contain tuples with data from all failed
+invariants. Eg:
+
+.. code:: python
+
+ >>> class EvenX(PRecord):
+ ... x = field(invariant=lambda x: ((x > 0, 'x negative'), (x % 2 == 0, 'x odd')))
+ ...
+ >>> try:
+ ... EvenX(x=-1)
+ ... except InvariantException as e:
+ ... print(e.invariant_errors)
+ ...
+ (('x negative', 'x odd'),)
+
+
+Factories
+*********
+It's possible to specify factory functions for fields. The factory function receives whatever
+is supplied as field value and the actual returned by the factory is assigned to the field
+given that any type and invariant checks hold.
+PRecords have a default factory specified as a static function on the class, create(). It takes
+a *Mapping* as argument and returns an instance of the specific record.
+If a record has fields of type PRecord the create() method of that record will
+be called to create the "sub record" if no factory has explicitly been specified to override
+this behaviour.
+
+.. code:: python
+
+ >>> class DRecord(PRecord):
+ ... x = field(factory=int)
+ ...
+ >>> class ERecord(PRecord):
+ ... d = field(type=DRecord)
+ ...
+ >>> ERecord.create({'d': {'x': '1'}})
+ ERecord(d=DRecord(x=1))
+
+Collection fields
+*****************
+It is also possible to have fields with ``pyrsistent`` collections.
+
+.. code:: python
+
+ >>> from pyrsistent import pset_field, pmap_field, pvector_field
+ >>> class MultiRecord(PRecord):
+ ... set_of_ints = pset_field(int)
+ ... map_int_to_str = pmap_field(int, str)
+ ... vector_of_strs = pvector_field(str)
+ ...
+
+Serialization
+*************
+PRecords support serialization back to dicts. Default serialization will take keys and values
+"as is" and output them into a dict. It is possible to specify custom serialization functions
+to take care of fields that require special treatment.
+
+.. code:: python
+
+ >>> from datetime import date
+ >>> class Person(PRecord):
+ ... name = field(type=unicode)
+ ... birth_date = field(type=date,
+ ... serializer=lambda format, d: d.strftime(format['date']))
+ ...
+ >>> john = Person(name=u'John', birth_date=date(1985, 10, 21))
+ >>> john.serialize({'date': '%Y-%m-%d'})
+ {'birth_date': '1985-10-21', 'name': u'John'}
+
+
+.. _instar: https://github.com/boxed/instar/
+
+.. _PClass:
+
+PClass
+~~~~~~
+A PClass is a python class with a fixed set of specified fields. PClasses are declared as python classes inheriting
+from PClass. It is defined the same way that PRecords are and behaves like a PRecord in all aspects except that it
+is not a PMap and hence not a collection but rather a plain Python object.
+
+.. code:: python
+
+ >>> from pyrsistent import PClass, field
+ >>> class AClass(PClass):
+ ... x = field()
+ ...
+ >>> a = AClass(x=3)
+ >>> a
+ AClass(x=3)
+ >>> a.x
+ 3
+
+
+Checked collections
+~~~~~~~~~~~~~~~~~~~
+Checked collections currently come in three flavors: CheckedPVector, CheckedPMap and CheckedPSet.
+
+.. code:: python
+
+ >>> from pyrsistent import CheckedPVector, CheckedPMap, CheckedPSet, thaw
+ >>> class Positives(CheckedPSet):
+ ... __type__ = (long, int)
+ ... __invariant__ = lambda n: (n >= 0, 'Negative')
+ ...
+ >>> class Lottery(PRecord):
+ ... name = field(type=str)
+ ... numbers = field(type=Positives, invariant=lambda p: (len(p) > 0, 'No numbers'))
+ ...
+ >>> class Lotteries(CheckedPVector):
+ ... __type__ = Lottery
+ ...
+ >>> class LotteriesByDate(CheckedPMap):
+ ... __key_type__ = date
+ ... __value_type__ = Lotteries
+ ...
+ >>> lotteries = LotteriesByDate.create({date(2015, 2, 15): [{'name': 'SuperLotto', 'numbers': {1, 2, 3}},
+ ... {'name': 'MegaLotto', 'numbers': {4, 5, 6}}],
+ ... date(2015, 2, 16): [{'name': 'SuperLotto', 'numbers': {3, 2, 1}},
+ ... {'name': 'MegaLotto', 'numbers': {6, 5, 4}}]})
+ >>> lotteries
+ LotteriesByDate({datetime.date(2015, 2, 15): Lotteries([Lottery(numbers=Positives([1, 2, 3]), name='SuperLotto'), Lottery(numbers=Positives([4, 5, 6]), name='MegaLotto')]), datetime.date(2015, 2, 16): Lotteries([Lottery(numbers=Positives([1, 2, 3]), name='SuperLotto'), Lottery(numbers=Positives([4, 5, 6]), name='MegaLotto')])})
+
+ # The checked versions support all operations that the corresponding
+ # unchecked types do
+ >>> lottery_0215 = lotteries[date(2015, 2, 15)]
+ >>> lottery_0215.transform([0, 'name'], 'SuperDuperLotto')
+ Lotteries([Lottery(numbers=Positives([1, 2, 3]), name='SuperDuperLotto'), Lottery(numbers=Positives([4, 5, 6]), name='MegaLotto')])
+
+ # But also makes asserts that types and invariants hold
+ >>> lottery_0215.transform([0, 'name'], 999)
+ Traceback (most recent call last):
+ PTypeError: Invalid type for field Lottery.name, was int
+
+ >>> lottery_0215.transform([0, 'numbers'], set())
+ Traceback (most recent call last):
+ InvariantException: Field invariant failed
+
+ # They can be converted back to python built ins with either thaw()
+ # or serialize() (which provides possibilities to customize serialization)
+ >>> thaw(lottery_0215)
+ [{'numbers': set([1, 2, 3]), 'name': 'SuperLotto'}, {'numbers': set([4, 5, 6]), 'name': 'MegaLotto'}]
+ >>> lottery_0215.serialize()
+ [{'numbers': set([1, 2, 3]), 'name': 'SuperLotto'}, {'numbers': set([4, 5, 6]), 'name': 'MegaLotto'}]
+
+.. _transformations:
+
+Transformations
+~~~~~~~~~~~~~~~
+Transformations are inspired by the cool library instar_ for Clojure. They let you evolve PMaps and PVectors
+with arbitrarily deep/complex nesting using simple syntax and flexible matching syntax.
+
+The first argument to transformation is the path that points out the value to transform. The
+second is the transformation to perform. If the transformation is callable it will be applied
+to the value(s) matching the path. The path may also contain callables. In that case they are
+treated as matchers. If the matcher returns True for a specific key it is considered for transformation.
+
+.. code:: python
+
+ # Basic examples
+ >>> from pyrsistent import inc, freeze, thaw, rex, ny, discard
+ >>> v1 = freeze([1, 2, 3, 4, 5])
+ >>> v1.transform([2], inc)
+ pvector([1, 2, 4, 4, 5])
+ >>> v1.transform([lambda ix: 0 < ix < 4], 8)
+ pvector([1, 8, 8, 8, 5])
+ >>> v1.transform([lambda ix, v: ix == 0 or v == 5], 0)
+ pvector([0, 2, 3, 4, 0])
+
+ # The (a)ny matcher can be used to match anything
+ >>> v1.transform([ny], 8)
+ pvector([8, 8, 8, 8, 8])
+
+ # Regular expressions can be used for matching
+ >>> scores = freeze({'John': 12, 'Joseph': 34, 'Sara': 23})
+ >>> scores.transform([rex('^Jo')], 0)
+ pmap({'Joseph': 0, 'Sara': 23, 'John': 0})
+
+ # Transformations can be done on arbitrarily deep structures
+ >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
+ ... {'author': 'Steve', 'content': 'A slightly longer article'}],
+ ... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
+ >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
+ >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
+ >>> very_short_news.articles[0].content
+ 'A short article'
+ >>> very_short_news.articles[1].content
+ 'A slightly long...'
+
+ # When nothing has been transformed the original data structure is kept
+ >>> short_news is news_paper
+ True
+ >>> very_short_news is news_paper
+ False
+ >>> very_short_news.articles[0] is news_paper.articles[0]
+ True
+
+ # There is a special transformation that can be used to discard elements. Also
+ # multiple transformations can be applied in one call
+ >>> thaw(news_paper.transform(['weather'], discard, ['articles', ny, 'content'], discard))
+ {'articles': [{'author': 'Sara'}, {'author': 'Steve'}]}
+
+Evolvers
+~~~~~~~~
+PVector, PMap and PSet all have support for a concept dubbed *evolvers*. An evolver acts like a mutable
+view of the underlying persistent data structure with "transaction like" semantics. No updates of the original
+data structure is ever performed, it is still fully immutable.
+
+The evolvers have a very limited API by design to discourage excessive, and inappropriate, usage as that would
+take us down the mutable road. In principle only basic mutation and element access functions are supported.
+Check out the documentation_ of each data structure for specific examples.
+
+Examples of when you may want to use an evolver instead of working directly with the data structure include:
+
+* Multiple updates are done to the same data structure and the intermediate results are of no
+ interest. In this case using an evolver may be a more efficient and easier to work with.
+* You need to pass a vector into a legacy function or a function that you have no control
+ over which performs in place mutations. In this case pass an evolver instance
+ instead and then create a new pvector from the evolver once the function returns.
+
+.. code:: python
+
+ >>> from pyrsistent import v
+
+ # In place mutation as when working with the built in counterpart
+ >>> v1 = v(1, 2, 3)
+ >>> e = v1.evolver()
+ >>> e[1] = 22
+ >>> e = e.append(4)
+ >>> e = e.extend([5, 6])
+ >>> e[5] += 1
+ >>> len(e)
+ 6
+
+ # The evolver is considered *dirty* when it contains changes compared to the underlying vector
+ >>> e.is_dirty()
+ True
+
+ # But the underlying pvector still remains untouched
+ >>> v1
+ pvector([1, 2, 3])
+
+ # Once satisfied with the updates you can produce a new pvector containing the updates.
+ # The new pvector will share data with the original pvector in the same way that would have
+ # been done if only using operations on the pvector.
+ >>> v2 = e.persistent()
+ >>> v2
+ pvector([1, 22, 3, 4, 5, 7])
+
+ # The evolver is now no longer considered *dirty* as it contains no differences compared to the
+ # pvector just produced.
+ >>> e.is_dirty()
+ False
+
+ # You may continue to work with the same evolver without affecting the content of v2
+ >>> e[0] = 11
+
+ # Or create a new evolver from v2. The two evolvers can be updated independently but will both
+ # share data with v2 where possible.
+ >>> e2 = v2.evolver()
+ >>> e2[0] = 1111
+ >>> e.persistent()
+ pvector([11, 22, 3, 4, 5, 7])
+ >>> e2.persistent()
+ pvector([1111, 22, 3, 4, 5, 7])
+
+.. _freeze:
+.. _thaw:
+
+freeze and thaw
+~~~~~~~~~~~~~~~
+These functions are great when your cozy immutable world has to interact with the evil mutable world outside.
+
+.. code:: python
+
+ >>> from pyrsistent import freeze, thaw, v, m
+ >>> freeze([1, {'a': 3}])
+ pvector([1, pmap({'a': 3})])
+ >>> thaw(v(1, m(a=3)))
+ [1, {'a': 3}]
+
+Compatibility
+-------------
+
+Pyrsistent is developed and tested on Python 2.7, 3.5, 3.6, 3.7 and PyPy (Python 2 and 3 compatible). It will most
+likely work on all other versions >= 3.4 but no guarantees are given. :)
+
+Compatibility issues
+~~~~~~~~~~~~~~~~~~~~
+
+.. _27: https://github.com/tobgu/pyrsistent/issues/27
+
+There is currently one known compatibility issue when comparing built in sets and frozensets to PSets as discussed in 27_.
+It affects python 2 versions < 2.7.8 and python 3 versions < 3.4.0 and is due to a bug described in
+http://bugs.python.org/issue8743.
+
+Comparisons will fail or be incorrect when using the set/frozenset as left hand side of the comparison. As a workaround
+you need to either upgrade Python to a more recent version, avoid comparing sets/frozensets with PSets or always make
+sure to convert both sides of the comparison to the same type before performing the comparison.
+
+Performance
+-----------
+
+Pyrsistent is developed with performance in mind. Still, while some operations are nearly on par with their built in,
+mutable, counterparts in terms of speed, other operations are slower. In the cases where attempts at
+optimizations have been done, speed has generally been valued over space.
+
+Pyrsistent comes with two API compatible flavors of PVector (on which PMap and PSet are based), one pure Python
+implementation and one implemented as a C extension. The latter generally being 2 - 20 times faster than the former.
+The C extension will be used automatically when possible.
+
+The pure python implementation is fully PyPy compatible. Running it under PyPy speeds operations up considerably if
+the structures are used heavily (if JITed), for some cases the performance is almost on par with the built in counterparts.
+
+Type hints
+----------
+
+PEP 561 style type hints for use with mypy and various editors are available for most types and functions in pyrsistent.
+
+Type classes for annotating your own code with pyrsistent types are also available under pyrsistent.typing.
+
+Installation
+------------
+
+pip install pyrsistent
+
+Documentation
+-------------
+
+Available at http://pyrsistent.readthedocs.org/
+
+Brief presentation available at http://slides.com/tobiasgustafsson/immutability-and-python/
+
+Contributors
+------------
+
+Tobias Gustafsson https://github.com/tobgu
+
+Christopher Armstrong https://github.com/radix
+
+Anders Hovmöller https://github.com/boxed
+
+Itamar Turner-Trauring https://github.com/itamarst
+
+Jonathan Lange https://github.com/jml
+
+Richard Futrell https://github.com/Futrell
+
+Jakob Hollenstein https://github.com/jkbjh
+
+David Honour https://github.com/foolswood
+
+David R. MacIver https://github.com/DRMacIver
+
+Marcus Ewert https://github.com/sarum90
+
+Jean-Paul Calderone https://github.com/exarkun
+
+Douglas Treadwell https://github.com/douglas-treadwell
+
+Travis Parker https://github.com/teepark
+
+Julian Berman https://github.com/Julian
+
+Dennis Tomas https://github.com/dtomas
+
+Neil Vyas https://github.com/neilvyas
+
+doozr https://github.com/doozr
+
+Kamil Galuszka https://github.com/galuszkak
+
+Tsuyoshi Hombashi https://github.com/thombashi
+
+nattofriends https://github.com/nattofriends
+
+agberk https://github.com/agberk
+
+Waleed Khan https://github.com/arxanas
+
+Jean-Louis Fuchs https://github.com/ganwell
+
+Carlos Corbacho https://github.com/ccorbacho
+
+Felix Yan https://github.com/felixonmars
+
+benrg https://github.com/benrg
+
+Jere Lahelma https://github.com/je-l
+
+Max Taggart https://github.com/MaxTaggart
+
+Vincent Philippon https://github.com/vphilippon
+
+Semen Zhydenko https://github.com/ss18
+
+Till Varoquaux https://github.com/till-varoquaux
+
+Michal Kowalik https://github.com/michalvi
+
+ossdev07 https://github.com/ossdev07
+
+Kerry Olesen https://github.com/qhesz
+
+johnthagen https://github.com/johnthagen
+
+Contributing
+------------
+
+Want to contribute? That's great! If you experience problems please log them on GitHub. If you want to contribute code,
+please fork the repository and submit a pull request.
+
+Run tests
+~~~~~~~~~
+.. _tox: https://tox.readthedocs.io/en/latest/
+
+Tests can be executed using tox_.
+
+Install tox: ``pip install tox``
+
+Run test for Python 2.7: ``tox -epy27``
+
+Release
+~~~~~~~
+* Update CHANGES.txt
+* Update README with any new contributors and potential info needed.
+* Update _pyrsistent_version.py
+* python setup.py sdist upload
+* Commit and tag with new version: git add -u . && git commit -m 'Prepare version vX.Y.Z' && git tag -a vX.Y.Z -m 'vX.Y.Z'
+* Push commit and tags: git push && git push --tags
+
+Project status
+--------------
+Pyrsistent can be considered stable and mature (who knows, there may even be a 1.0 some day :-)). The project is
+maintained, bugs fixed, PRs reviewed and merged and new releases made. I currently do not have time for development
+of new features or functionality which I don't have use for myself. I'm more than happy to take PRs for new
+functionality though!
+
+There are a bunch of issues marked with ``enhancement`` and ``help wanted`` that contain requests for new functionality
+that would be nice to include. The level of difficulty and extend of the issues varies, please reach out to me if you're
+interested in working on any of them.
+
+If you feel that you have a grand master plan for where you would like Pyrsistent to go and have the time to put into
+it please don't hesitate to discuss this with me and submit PRs for it. If all goes well I'd be more than happy to add
+additional maintainers to the project!
diff --git a/third_party/python/pyrsistent/_pyrsistent_version.py b/third_party/python/pyrsistent/_pyrsistent_version.py
new file mode 100644
index 0000000000..8911e95ca7
--- /dev/null
+++ b/third_party/python/pyrsistent/_pyrsistent_version.py
@@ -0,0 +1 @@
+__version__ = '0.16.0'
diff --git a/third_party/python/pyrsistent/pvectorcmodule.c b/third_party/python/pyrsistent/pvectorcmodule.c
new file mode 100644
index 0000000000..11a5bd6411
--- /dev/null
+++ b/third_party/python/pyrsistent/pvectorcmodule.c
@@ -0,0 +1,1642 @@
+#include <Python.h>
+#include <structmember.h>
+
+/*
+Persistent/Immutable/Functional vector and helper types.
+
+Please note that they are anything but immutable at this level since
+there is a whole lot of reference counting going on. That's the way
+CPython works though and the GIL makes them appear immutable.
+
+To the programmer using them from Python they appear immutable and
+behave immutably at least.
+
+Naming conventions
+------------------
+initpyrsistentc - This is the method that initializes the whole module
+pyrsistent_* - Methods part of the interface
+<typename>_* - Instance methods of types. For examle PVector_append(...)
+
+All other methods are camel cased without prefix. All methods are static, none should
+require to be exposed outside of this module.
+*/
+
+#define SHIFT 5
+#define BRANCH_FACTOR (1 << SHIFT)
+#define BIT_MASK (BRANCH_FACTOR - 1)
+
+static PyTypeObject PVectorType;
+static PyTypeObject PVectorEvolverType;
+
+typedef struct {
+ void *items[BRANCH_FACTOR];
+ unsigned int refCount;
+} VNode;
+
+#define NODE_CACHE_MAX_SIZE 1024
+
+typedef struct {
+ unsigned int size;
+ VNode* nodes[NODE_CACHE_MAX_SIZE];
+} vNodeCache;
+
+static vNodeCache nodeCache;
+
+typedef struct {
+ PyObject_HEAD
+ unsigned int count; // Perhaps ditch this one in favor of ob_size/Py_SIZE()
+ unsigned int shift;
+ VNode *root;
+ VNode *tail;
+ PyObject *in_weakreflist; /* List of weak references */
+} PVector;
+
+typedef struct {
+ PyObject_HEAD
+ PVector* originalVector;
+ PVector* newVector;
+ PyObject* appendList;
+} PVectorEvolver;
+
+
+static PVector* EMPTY_VECTOR = NULL;
+static PyObject* transform_fn = NULL;
+
+static PyObject* transform(PVector* self, PyObject* args) {
+ if(transform_fn == NULL) {
+ // transform to avoid circular import problems
+ transform_fn = PyObject_GetAttrString(PyImport_ImportModule("pyrsistent._transformations"), "transform");
+ }
+
+ return PyObject_CallFunctionObjArgs(transform_fn, self, args, NULL);
+}
+
+
+// No access to internal members
+static PyMemberDef PVector_members[] = {
+ {NULL} /* Sentinel */
+};
+
+#define debug(...)
+// #define debug printf
+
+#define NODE_REF_COUNT(n) ((n)->refCount)
+#define SET_NODE_REF_COUNT(n, c) (NODE_REF_COUNT(n) = (c))
+#define INC_NODE_REF_COUNT(n) (NODE_REF_COUNT(n)++)
+#define DEC_NODE_REF_COUNT(n) (NODE_REF_COUNT(n)--)
+
+static VNode* allocNode(void) {
+ if(nodeCache.size > 0) {
+ nodeCache.size--;
+ return nodeCache.nodes[nodeCache.size];
+ }
+
+ return PyMem_Malloc(sizeof(VNode));
+}
+
+static void freeNode(VNode *node) {
+ if(nodeCache.size < NODE_CACHE_MAX_SIZE) {
+ nodeCache.nodes[nodeCache.size] = node;
+ nodeCache.size++;
+ } else {
+ PyMem_Free(node);
+ }
+}
+
+static VNode* newNode(void) {
+ VNode* result = allocNode();
+ memset(result, 0x0, sizeof(VNode));
+ SET_NODE_REF_COUNT(result, 1);
+ debug("newNode() %p\n", result);
+ return result;
+}
+
+static VNode* copyNode(VNode* source) {
+ /* NB: Only to be used for internal nodes, eg. nodes that do not
+ hold direct references to python objects but only to other nodes. */
+ int i;
+ VNode* result = allocNode();
+ debug("copyNode() %p\n", result);
+ memcpy(result->items, source->items, sizeof(source->items));
+
+ for(i = 0; i < BRANCH_FACTOR; i++) {
+ // TODO-OPT: Any need to go on when the first NULL has been found?
+ if(result->items[i] != NULL) {
+ INC_NODE_REF_COUNT((VNode*)result->items[i]);
+ }
+ }
+
+ SET_NODE_REF_COUNT(result, 1);
+ return result;
+}
+
+static PVector* emptyNewPvec(void);
+static PVector* copyPVector(PVector *original);
+static void extendWithItem(PVector *newVec, PyObject *item);
+
+static PyObject *PVectorEvolver_persistent(PVectorEvolver *);
+static int PVectorEvolver_set_item(PVectorEvolver *, PyObject*, PyObject*);
+
+static Py_ssize_t PVector_len(PVector *self) {
+ return self->count;
+}
+
+/* Convenience macros */
+#define ROOT_NODE_FULL(vec) ((vec->count >> SHIFT) > (1 << vec->shift))
+#define TAIL_OFF(vec) ((vec->count < BRANCH_FACTOR) ? 0 : (((vec->count - 1) >> SHIFT) << SHIFT))
+#define TAIL_SIZE(vec) (vec->count - TAIL_OFF(vec))
+#define PVector_CheckExact(op) (Py_TYPE(op) == &PVectorType)
+
+static VNode* nodeFor(PVector *self, int i){
+ int level;
+ if((i >= 0) && (i < self->count)) {
+ if(i >= TAIL_OFF(self)) {
+ return self->tail;
+ }
+
+ VNode* node = self->root;
+ for(level = self->shift; level > 0; level -= SHIFT) {
+ node = (VNode*) node->items[(i >> level) & BIT_MASK];
+ }
+
+ return node;
+ }
+
+ PyErr_Format(PyExc_IndexError, "Index out of range: %i", i);
+ return NULL;
+}
+
+static PyObject* _get_item(PVector *self, Py_ssize_t pos) {
+ VNode* node = nodeFor((PVector*)self, pos);
+ PyObject *result = NULL;
+ if(node != NULL) {
+ result = node->items[pos & BIT_MASK];
+ }
+ return result;
+}
+
+/*
+ Returns a new reference as specified by the PySequence_GetItem function.
+*/
+static PyObject* PVector_get_item(PVector *self, Py_ssize_t pos) {
+ if (pos < 0) {
+ pos += self->count;
+ }
+
+ PyObject* obj = _get_item(self, pos);
+ Py_XINCREF(obj);
+ return obj;
+}
+
+static void releaseNode(int level, VNode *node) {
+ if(node == NULL) {
+ return;
+ }
+
+ debug("releaseNode(): node=%p, level=%i, refCount=%i\n", node, level, NODE_REF_COUNT(node));
+
+ int i;
+
+ DEC_NODE_REF_COUNT(node);
+ debug("Refcount when trying to release: %u\n", NODE_REF_COUNT(node));
+ if(NODE_REF_COUNT(node) == 0) {
+ if(level > 0) {
+ for(i = 0; i < BRANCH_FACTOR; i++) {
+ if(node->items[i] != NULL) {
+ releaseNode(level - SHIFT, node->items[i]);
+ }
+ }
+ freeNode(node);
+ } else {
+ for(i = 0; i < BRANCH_FACTOR; i++) {
+ Py_XDECREF(node->items[i]);
+ }
+ freeNode(node);
+ }
+ }
+
+ debug("releaseNode(): Done! node=%p!\n", node);
+}
+
+/*
+ Returns all references to PyObjects that have been stolen. Also decrements
+ the internal reference counts used for shared memory structures and deallocates
+ those if needed.
+*/
+static void PVector_dealloc(PVector *self) {
+ debug("Dealloc(): self=%p, self->count=%u, tail->refCount=%u, root->refCount=%u, self->shift=%u, self->tail=%p, self->root=%p\n",
+ self, self->count, NODE_REF_COUNT(self->tail), NODE_REF_COUNT(self->root), self->shift, self->tail, self->root);
+
+ if (self->in_weakreflist != NULL) {
+ PyObject_ClearWeakRefs((PyObject *) self);
+ }
+
+ PyObject_GC_UnTrack((PyObject*)self);
+ Py_TRASHCAN_SAFE_BEGIN(self);
+
+ releaseNode(0, self->tail);
+ releaseNode(self->shift, self->root);
+
+ PyObject_GC_Del(self);
+ Py_TRASHCAN_SAFE_END(self);
+}
+
+static PyObject *PVector_toList(PVector *self) {
+ Py_ssize_t i;
+ PyObject *list = PyList_New(self->count);
+ for (i = 0; i < self->count; ++i) {
+ PyObject *o = _get_item(self, i);
+ Py_INCREF(o);
+ PyList_SET_ITEM(list, i, o);
+ }
+
+ return list;
+}
+
+
+static PyObject *PVector_repr(PVector *self) {
+ // Reuse the list repr code, a bit less efficient but saves some code
+ PyObject *list = PVector_toList(self);
+ PyObject *list_repr = PyObject_Repr(list);
+ Py_DECREF(list);
+
+ if(list_repr == NULL) {
+ // Exception raised during call to repr
+ return NULL;
+ }
+
+ // Repr for list implemented differently in python 2 and 3. Need to
+ // handle this or core dump will occur.
+#if PY_MAJOR_VERSION >= 3
+ PyObject *s = PyUnicode_FromFormat("%s%U%s", "pvector(", list_repr, ")");
+ Py_DECREF(list_repr);
+#else
+ PyObject *s = PyString_FromString("pvector(");
+ PyString_ConcatAndDel(&s, list_repr);
+ PyString_ConcatAndDel(&s, PyString_FromString(")"));
+#endif
+
+ return s;
+}
+
+
+static long PVector_hash(PVector *self) {
+ // Follows the pattern of the tuple hash
+ long x, y;
+ Py_ssize_t i;
+ long mult = 1000003L;
+ x = 0x456789L;
+ for(i=0; i<self->count; i++) {
+ y = PyObject_Hash(_get_item(self, i));
+ if (y == -1) {
+ return -1;
+ }
+ x = (x ^ y) * mult;
+ mult += (long)(82520L + i + i);
+ }
+
+ x += 97531L;
+ if(x == -1) {
+ x = -2;
+ }
+
+ return x;
+}
+
+static PyObject* compareSizes(long vlen, long wlen, int op) {
+ int cmp;
+ PyObject *res;
+ switch (op) {
+ case Py_LT: cmp = vlen < wlen; break;
+ case Py_LE: cmp = vlen <= wlen; break;
+ case Py_EQ: cmp = vlen == wlen; break;
+ case Py_NE: cmp = vlen != wlen; break;
+ case Py_GT: cmp = vlen > wlen; break;
+ case Py_GE: cmp = vlen >= wlen; break;
+ default: return NULL; /* cannot happen */
+ }
+
+ if (cmp) {
+ res = Py_True;
+ } else {
+ res = Py_False;
+ }
+
+ Py_INCREF(res);
+ return res;
+}
+
+static PyObject* PVector_richcompare(PyObject *v, PyObject *w, int op) {
+ // Follows the principles of the tuple comparison
+ PVector *vt, *wt;
+ Py_ssize_t i;
+ Py_ssize_t vlen, wlen;
+ PyObject *list;
+ PyObject *result;
+
+ if(!PVector_CheckExact(v) || !PVector_CheckExact(w)) {
+ if(PVector_CheckExact(v)) {
+ list = PVector_toList((PVector*)v);
+ result = PyObject_RichCompare(list , w, op);
+ Py_DECREF(list);
+ return result;
+ }
+
+ if(PVector_CheckExact(w)) {
+ list = PVector_toList((PVector*)w);
+ result = PyObject_RichCompare(v, list, op);
+ Py_DECREF(list);
+ return result;
+ }
+
+ Py_INCREF(Py_NotImplemented);
+ return Py_NotImplemented;
+ }
+
+ if((op == Py_EQ) && (v == w)) {
+ Py_INCREF(Py_True);
+ return Py_True;
+ }
+
+ vt = (PVector *)v;
+ wt = (PVector *)w;
+
+ vlen = vt->count;
+ wlen = wt->count;
+
+ if (vlen != wlen) {
+ if (op == Py_EQ) {
+ Py_INCREF(Py_False);
+ return Py_False;
+ } else if (op == Py_NE) {
+ Py_INCREF(Py_True);
+ return Py_True;
+ }
+ }
+
+ /* Search for the first index where items are different. */
+ PyObject *left = NULL;
+ PyObject *right = NULL;
+ for (i = 0; i < vlen && i < wlen; i++) {
+ left = _get_item(vt, i);
+ right = _get_item(wt, i);
+ int k = PyObject_RichCompareBool(left, right, Py_EQ);
+ if (k < 0) {
+ return NULL;
+ }
+ if (!k) {
+ break;
+ }
+ }
+
+ if (i >= vlen || i >= wlen) {
+ /* No more items to compare -- compare sizes */
+ return compareSizes(vlen, wlen, op);
+ }
+
+ /* We have an item that differs -- shortcuts for EQ/NE */
+ if (op == Py_EQ) {
+ Py_INCREF(Py_False);
+ return Py_False;
+ } else if (op == Py_NE) {
+ Py_INCREF(Py_True);
+ return Py_True;
+ } else {
+ /* Compare the final item again using the proper operator */
+ return PyObject_RichCompare(left, right, op);
+ }
+}
+
+
+static PyObject* PVector_repeat(PVector *self, Py_ssize_t n) {
+ if (n < 0) {
+ n = 0;
+ }
+
+ if ((n == 0) || (self->count == 0)) {
+ Py_INCREF(EMPTY_VECTOR);
+ return (PyObject *)EMPTY_VECTOR;
+ } else if (n == 1) {
+ Py_INCREF(self);
+ return (PyObject *)self;
+ } else if ((self->count * n)/self->count != n) {
+ return PyErr_NoMemory();
+ } else {
+ int i, j;
+ PVector *newVec = copyPVector(self);
+ for(i=0; i<(n-1); i++) {
+ for(j=0; j<self->count; j++) {
+ extendWithItem(newVec, PVector_get_item(self, j));
+ }
+ }
+ return (PyObject*)newVec;
+ }
+}
+
+static int PVector_traverse(PVector *o, visitproc visit, void *arg) {
+ // Naive traverse
+ Py_ssize_t i;
+ for (i = o->count; --i >= 0; ) {
+ Py_VISIT(_get_item(o, i));
+ }
+
+ return 0;
+}
+
+
+static PyObject* PVector_index(PVector *self, PyObject *args) {
+ // A direct rip-off of the tuple version
+ Py_ssize_t i, start=0, stop=self->count;
+ PyObject *value;
+
+ if (!PyArg_ParseTuple(args, "O|O&O&:index", &value,
+ _PyEval_SliceIndex, &start,
+ _PyEval_SliceIndex, &stop)) {
+ return NULL;
+ }
+
+ if (start < 0) {
+ start += self->count;
+ if (start < 0) {
+ start = 0;
+ }
+ }
+
+ if (stop < 0) {
+ stop += self->count;
+ if (stop < 0) {
+ stop = 0;
+ }
+ }
+
+ for (i = start; i < stop && i < self->count; i++) {
+ int cmp = PyObject_RichCompareBool(_get_item(self, i), value, Py_EQ);
+ if (cmp > 0) {
+#if PY_MAJOR_VERSION >= 3
+ return PyLong_FromSsize_t(i);
+#else
+ return PyInt_FromSsize_t(i);
+#endif
+ } else if (cmp < 0) {
+ return NULL;
+ }
+ }
+
+ PyErr_SetString(PyExc_ValueError, "PVector.index(x): x not in vector");
+ return NULL;
+}
+
+static PyObject* PVector_count(PVector *self, PyObject *value) {
+ Py_ssize_t count = 0;
+ Py_ssize_t i;
+
+ for (i = 0; i < self->count; i++) {
+ int cmp = PyObject_RichCompareBool(_get_item(self, i), value, Py_EQ);
+ if (cmp > 0) {
+ count++;
+ } else if (cmp < 0) {
+ return NULL;
+ }
+ }
+
+#if PY_MAJOR_VERSION >= 3
+ return PyLong_FromSsize_t(count);
+#else
+ return PyInt_FromSsize_t(count);
+#endif
+}
+
+static PyObject* PVector_pickle_reduce(PVector *self) {
+
+ PyObject* module = PyImport_ImportModule("pvectorc");
+ PyObject* pvector_fn = PyObject_GetAttrString(module, "pvector");
+ Py_DECREF(module);
+
+ PyObject *list = PVector_toList(self);
+ PyObject *arg_tuple = PyTuple_New(1);
+ PyTuple_SET_ITEM(arg_tuple, 0, list);
+
+ PyObject *result_tuple = PyTuple_New(2);
+ PyTuple_SET_ITEM(result_tuple, 0, pvector_fn);
+ PyTuple_SET_ITEM(result_tuple, 1, arg_tuple);
+
+ return result_tuple;
+}
+
+static PVector* rawCopyPVector(PVector* vector) {
+ PVector* newVector = PyObject_GC_New(PVector, &PVectorType);
+ newVector->count = vector->count;
+ newVector->shift = vector->shift;
+ newVector->root = vector->root;
+ newVector->tail = vector->tail;
+ newVector->in_weakreflist = NULL;
+ PyObject_GC_Track((PyObject*)newVector);
+ return newVector;
+}
+
+static void initializeEvolver(PVectorEvolver* evolver, PVector* vector, PyObject* appendList) {
+ // Need to hold a reference to the underlying vector to manage
+ // the ref counting properly.
+ evolver->originalVector = vector;
+ evolver->newVector = vector;
+
+ if(appendList == NULL) {
+ evolver->appendList = PyList_New(0);
+ } else {
+ evolver->appendList = appendList;
+ }
+}
+
+static PyObject * PVector_evolver(PVector *self) {
+ PVectorEvolver *evolver = PyObject_GC_New(PVectorEvolver, &PVectorEvolverType);
+ if (evolver == NULL) {
+ return NULL;
+ }
+ initializeEvolver(evolver, self, NULL);
+ PyObject_GC_Track(evolver);
+ Py_INCREF(self);
+ return (PyObject *)evolver;
+}
+
+
+static void copyInsert(void** dest, void** src, Py_ssize_t pos, void *obj) {
+ memcpy(dest, src, BRANCH_FACTOR * sizeof(void*));
+ dest[pos] = obj;
+}
+
+static PyObject* PVector_append(PVector *self, PyObject *obj);
+
+static PyObject* PVector_transform(PVector *self, PyObject *obj);
+
+static PyObject* PVector_set(PVector *self, PyObject *obj);
+
+static PyObject* PVector_mset(PVector *self, PyObject *args);
+
+static PyObject* PVector_subscript(PVector* self, PyObject* item);
+
+static PyObject* PVector_extend(PVector *self, PyObject *args);
+
+static PyObject* PVector_delete(PVector *self, PyObject *args);
+
+static PyObject* PVector_remove(PVector *self, PyObject *args);
+
+static PySequenceMethods PVector_sequence_methods = {
+ (lenfunc)PVector_len, /* sq_length */
+ (binaryfunc)PVector_extend, /* sq_concat */
+ (ssizeargfunc)PVector_repeat, /* sq_repeat */
+ (ssizeargfunc)PVector_get_item, /* sq_item */
+ // TODO might want to move the slice function to here
+ NULL, /* sq_slice */
+ NULL, /* sq_ass_item */
+ NULL, /* sq_ass_slice */
+ NULL, /* sq_contains */
+ NULL, /* sq_inplace_concat */
+ NULL, /* sq_inplace_repeat */
+};
+
+static PyMappingMethods PVector_mapping_methods = {
+ (lenfunc)PVector_len,
+ (binaryfunc)PVector_subscript,
+ NULL
+};
+
+
+static PyMethodDef PVector_methods[] = {
+ {"append", (PyCFunction)PVector_append, METH_O, "Appends an element"},
+ {"set", (PyCFunction)PVector_set, METH_VARARGS, "Inserts an element at the specified position"},
+ {"extend", (PyCFunction)PVector_extend, METH_O|METH_COEXIST, "Extend"},
+ {"transform", (PyCFunction)PVector_transform, METH_VARARGS, "Apply one or more transformations"},
+ {"index", (PyCFunction)PVector_index, METH_VARARGS, "Return first index of value"},
+ {"count", (PyCFunction)PVector_count, METH_O, "Return number of occurrences of value"},
+ {"__reduce__", (PyCFunction)PVector_pickle_reduce, METH_NOARGS, "Pickle support method"},
+ {"evolver", (PyCFunction)PVector_evolver, METH_NOARGS, "Return new evolver for pvector"},
+ {"mset", (PyCFunction)PVector_mset, METH_VARARGS, "Inserts multiple elements at the specified positions"},
+ {"tolist", (PyCFunction)PVector_toList, METH_NOARGS, "Convert to list"},
+ {"delete", (PyCFunction)PVector_delete, METH_VARARGS, "Delete element(s) by index"},
+ {"remove", (PyCFunction)PVector_remove, METH_VARARGS, "Remove element(s) by equality"},
+ {NULL}
+};
+
+static PyObject * PVectorIter_iter(PyObject *seq);
+
+static PyTypeObject PVectorType = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "pvectorc.PVector", /* tp_name */
+ sizeof(PVector), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ (destructor)PVector_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ (reprfunc)PVector_repr, /* tp_repr */
+ 0, /* tp_as_number */
+ &PVector_sequence_methods, /* tp_as_sequence */
+ &PVector_mapping_methods, /* tp_as_mapping */
+ (hashfunc)PVector_hash, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ 0, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
+ "Persistent vector", /* tp_doc */
+ (traverseproc)PVector_traverse, /* tp_traverse */
+ 0, /* tp_clear */
+ PVector_richcompare, /* tp_richcompare */
+ offsetof(PVector, in_weakreflist), /* tp_weaklistoffset */
+ PVectorIter_iter, /* tp_iter */
+ 0, /* tp_iternext */
+ PVector_methods, /* tp_methods */
+ PVector_members, /* tp_members */
+ 0, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+};
+
+static PyObject* pyrsistent_pvec(PyObject *self, PyObject *args) {
+ debug("pyrsistent_pvec(): %x\n", args);
+
+ PyObject *argObj = NULL; /* list of arguments */
+
+ if(!PyArg_ParseTuple(args, "|O", &argObj)) {
+ return NULL;
+ }
+
+ if(argObj == NULL) {
+ Py_INCREF(EMPTY_VECTOR);
+ return (PyObject*)EMPTY_VECTOR;
+ }
+
+ return PVector_extend(EMPTY_VECTOR, argObj);
+}
+
+static PVector* emptyNewPvec(void) {
+ PVector *pvec = PyObject_GC_New(PVector, &PVectorType);
+ debug("pymem alloc_new %x, ref cnt: %u\n", pvec, pvec->ob_refcnt);
+ pvec->count = (Py_ssize_t)0;
+ pvec->shift = SHIFT;
+ pvec->root = newNode();
+ pvec->tail = newNode();
+ pvec->in_weakreflist = NULL;
+ PyObject_GC_Track((PyObject*)pvec);
+ return pvec;
+}
+
+static void incRefs(PyObject **obj) {
+ // TODO-OPT: Would it be OK to exit on first NULL? Should not be any
+ // non NULLs beyond a NULL.
+ int i;
+ for(i = 0; i < BRANCH_FACTOR; i++) {
+ Py_XINCREF(obj[i]);
+ }
+}
+
+
+static PVector* newPvec(unsigned int count, unsigned int shift, VNode *root) {
+ // TODO-OPT: Introduce object cache
+ PVector *pvec = PyObject_GC_New(PVector, &PVectorType);
+ debug("pymem alloc_copy %x, ref cnt: %u\n", pvec, pvec->ob_refcnt);
+ pvec->count = count;
+ pvec->shift = shift;
+ pvec->root = root;
+ pvec->tail = newNode();
+ pvec->in_weakreflist = NULL;
+ PyObject_GC_Track((PyObject*)pvec);
+ return pvec;
+}
+
+static VNode* newPath(unsigned int level, VNode* node){
+ if(level == 0) {
+ INC_NODE_REF_COUNT(node);
+ return node;
+ }
+
+ VNode* result = newNode();
+ result->items[0] = newPath(level - SHIFT, node);
+ return result;
+}
+
+static VNode* pushTail(unsigned int level, unsigned int count, VNode* parent, VNode* tail) {
+ int subIndex = ((count - 1) >> level) & BIT_MASK;
+ VNode* result = copyNode(parent);
+ VNode* nodeToInsert;
+ VNode* child;
+ debug("pushTail(): count = %i, subIndex = %i\n", count, subIndex);
+
+ if(level == SHIFT) {
+ // We're at the bottom
+ INC_NODE_REF_COUNT(tail);
+ nodeToInsert = tail;
+ } else {
+ // More levels available in the tree
+ child = parent->items[subIndex];
+
+ if(child != NULL) {
+ nodeToInsert = pushTail(level - SHIFT, count, child, tail);
+
+ // Need to make an adjustment of the ref COUNT for the child node here since
+ // it was incremented in an earlier stage when the node was copied. Now the child
+ // node will be part of the path copy so the number of references to the original
+ // child will not increase at all.
+ DEC_NODE_REF_COUNT(child);
+ } else {
+ nodeToInsert = newPath(level - SHIFT, tail);
+ }
+ }
+
+ result->items[subIndex] = nodeToInsert;
+ return result;
+}
+
+static PVector* copyPVector(PVector *original) {
+ PVector *newVec = newPvec(original->count, original->shift, original->root);
+ INC_NODE_REF_COUNT(original->root);
+ memcpy(newVec->tail->items, original->tail->items, TAIL_SIZE(original) * sizeof(void*));
+ incRefs((PyObject**)newVec->tail->items);
+ return newVec;
+}
+
+/* Does not steal a reference, this must be managed outside of this function */
+static void extendWithItem(PVector *newVec, PyObject *item) {
+ unsigned int tail_size = TAIL_SIZE(newVec);
+
+ if(tail_size >= BRANCH_FACTOR) {
+ VNode* new_root;
+ if(ROOT_NODE_FULL(newVec)) {
+ new_root = newNode();
+ new_root->items[0] = newVec->root;
+ new_root->items[1] = newPath(newVec->shift, newVec->tail);
+ newVec->shift += SHIFT;
+ } else {
+ new_root = pushTail(newVec->shift, newVec->count, newVec->root, newVec->tail);
+ releaseNode(newVec->shift, newVec->root);
+ }
+
+ newVec->root = new_root;
+
+ // Need to adjust the ref count of the old tail here since no new references were
+ // actually created, we just moved the tail.
+ DEC_NODE_REF_COUNT(newVec->tail);
+ newVec->tail = newNode();
+ tail_size = 0;
+ }
+
+ newVec->tail->items[tail_size] = item;
+ newVec->count++;
+}
+
+
+#if PY_MAJOR_VERSION >= 3
+// This was changed in 3.2 but we do not claim compatibility with any older version of python 3.
+#define SLICE_CAST
+#else
+#define SLICE_CAST (PySliceObject *)
+#endif
+
+static PyObject *PVector_subscript(PVector* self, PyObject* item) {
+ if (PyIndex_Check(item)) {
+ Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError);
+ if (i == -1 && PyErr_Occurred()) {
+ return NULL;
+ }
+
+ return PVector_get_item(self, i);
+ } else if (PySlice_Check(item)) {
+ Py_ssize_t start, stop, step, slicelength, cur, i;
+ if (PySlice_GetIndicesEx(SLICE_CAST item, self->count,
+ &start, &stop, &step, &slicelength) < 0) {
+ return NULL;
+ }
+
+ debug("start=%i, stop=%i, step=%i\n", start, stop, step);
+
+ if (slicelength <= 0) {
+ Py_INCREF(EMPTY_VECTOR);
+ return (PyObject*)EMPTY_VECTOR;
+ } else if((slicelength == self->count) && (step > 0)) {
+ Py_INCREF(self);
+ return (PyObject*)self;
+ } else {
+ PVector *newVec = copyPVector(EMPTY_VECTOR);
+ for (cur=start, i=0; i<slicelength; cur += (size_t)step, i++) {
+ extendWithItem(newVec, PVector_get_item(self, cur));
+ }
+
+ return (PyObject*)newVec;
+ }
+ } else {
+ PyErr_Format(PyExc_TypeError, "pvector indices must be integers, not %.200s", Py_TYPE(item)->tp_name);
+ return NULL;
+ }
+}
+
+/* A hack to get some of the error handling code away from the function
+ doing the actual work */
+#define HANDLE_ITERATION_ERROR() \
+ if (PyErr_Occurred()) { \
+ if (PyErr_ExceptionMatches(PyExc_StopIteration)) { \
+ PyErr_Clear(); \
+ } else { \
+ return NULL; \
+ } \
+ }
+
+
+/* Returns a new vector that is extended with the iterable b.
+ Takes a copy of the original vector and performs the extension in place on this
+ one for efficiency.
+
+ These are some optimizations that could be done to this function,
+ these are not considered important enough yet though.
+ - Use the PySequence_Fast ops if the iterable is a list or a tuple (which it
+ whould probably often be)
+ - Only copy the original tail if it is not full
+ - No need to try to increment ref count in tail for the whole tail
+*/
+static PyObject* PVector_extend(PVector *self, PyObject *iterable) {
+ PyObject *it;
+ PyObject *(*iternext)(PyObject *);
+
+ it = PyObject_GetIter(iterable);
+ if (it == NULL) {
+ return NULL;
+ }
+
+ // TODO-OPT: Use special fast iterator if available
+ iternext = *Py_TYPE(it)->tp_iternext;
+ PyObject *item = iternext(it);
+ if (item == NULL) {
+ Py_DECREF(it);
+ HANDLE_ITERATION_ERROR()
+ Py_INCREF(self);
+ return (PyObject *)self;
+ } else {
+ PVector *newVec = copyPVector(self);
+ // TODO-OPT test using special case code here for extension to
+ // avoid recalculating tail length all the time.
+ while(item != NULL) {
+ extendWithItem(newVec, item);
+ item = iternext(it);
+ }
+
+ Py_DECREF(it);
+ HANDLE_ITERATION_ERROR()
+ return (PyObject*)newVec;
+ }
+}
+
+/*
+ Steals a reference to the object that is appended to the list.
+*/
+static PyObject* PVector_append(PVector *self, PyObject *obj) {
+ assert (obj != NULL);
+
+ unsigned int tail_size = TAIL_SIZE(self);
+ debug("append(): count = %u, tail_size = %u\n", self->count, tail_size);
+
+ // Does the new object fit in the tail? If so, take a copy of the tail and
+ // insert the new element in that.
+ if(tail_size < BRANCH_FACTOR) {
+ INC_NODE_REF_COUNT(self->root);
+ PVector *new_pvec = newPvec(self->count + 1, self->shift, self->root);
+ // TODO-OPT No need to copy more than the current tail length
+ // TODO-OPT No need to incRefs for all elements all the time
+ copyInsert(new_pvec->tail->items, self->tail->items, tail_size, obj);
+ incRefs((PyObject**)new_pvec->tail->items);
+ debug("append(): new_pvec=%p, new_pvec->tail=%p, new_pvec->root=%p\n",
+ new_pvec, new_pvec->tail, new_pvec->root);
+
+ return (PyObject*)new_pvec;
+ }
+
+ // Tail is full, need to push it into the tree
+ VNode* new_root;
+ unsigned int new_shift;
+ if(ROOT_NODE_FULL(self)) {
+ new_root = newNode();
+ new_root->items[0] = self->root;
+ INC_NODE_REF_COUNT(self->root);
+ new_root->items[1] = newPath(self->shift, self->tail);
+ new_shift = self->shift + SHIFT;
+ } else {
+ new_root = pushTail(self->shift, self->count, self->root, self->tail);
+ new_shift = self->shift;
+ }
+
+ PVector* pvec = newPvec(self->count + 1, new_shift, new_root);
+ pvec->tail->items[0] = obj;
+ Py_XINCREF(obj);
+ debug("append_push(): pvec=%p, pvec->tail=%p, pvec->root=%p\n", pvec, pvec->tail, pvec->root);
+ return (PyObject*)pvec;
+}
+
+static VNode* doSet(VNode* node, unsigned int level, unsigned int position, PyObject* value) {
+ debug("doSet(): level == %i\n", level);
+ if(level == 0) {
+ // TODO-OPT: Perhaps an alloc followed by a reset of reference
+ // count is enough here since we overwrite all subnodes below.
+ VNode* theNewNode = newNode();
+ copyInsert(theNewNode->items, node->items, position & BIT_MASK, value);
+ incRefs((PyObject**)theNewNode->items);
+ return theNewNode;
+ } else {
+ VNode* theNewNode = copyNode(node);
+ Py_ssize_t index = (position >> level) & BIT_MASK;
+
+ // Drop reference to this node since we're about to replace it
+ DEC_NODE_REF_COUNT((VNode*)theNewNode->items[index]);
+ theNewNode->items[index] = doSet(node->items[index], level - SHIFT, position, value);
+ return theNewNode;
+ }
+}
+
+
+static PyObject* internalSet(PVector *self, Py_ssize_t position, PyObject *argObj) {
+ if(position < 0) {
+ position += self->count;
+ }
+
+ if((0 <= position) && (position < self->count)) {
+ if(position >= TAIL_OFF(self)) {
+ // Reuse the root, replace the tail
+ INC_NODE_REF_COUNT(self->root);
+ PVector *new_pvec = newPvec(self->count, self->shift, self->root);
+ copyInsert(new_pvec->tail->items, self->tail->items, position & BIT_MASK, argObj);
+ incRefs((PyObject**)new_pvec->tail->items);
+ return (PyObject*)new_pvec;
+ } else {
+ // Keep the tail, replace the root
+ VNode *newRoot = doSet(self->root, self->shift, position, argObj);
+ PVector *new_pvec = newPvec(self->count, self->shift, newRoot);
+
+ // Free the tail and replace it with a reference to the tail of the original vector
+ freeNode(new_pvec->tail);
+ new_pvec->tail = self->tail;
+ INC_NODE_REF_COUNT(self->tail);
+ return (PyObject*)new_pvec;
+ }
+ } else if (position == self->count) {
+ // TODO Remove this case?
+ return PVector_append(self, argObj);
+ } else {
+ PyErr_Format(PyExc_IndexError, "Index out of range: %zd", position);
+ return NULL;
+ }
+}
+
+static PyObject* PVector_transform(PVector *self, PyObject *obj) {
+ return transform(self, obj);
+}
+
+/*
+ Steals a reference to the object that is inserted in the vector.
+*/
+static PyObject* PVector_set(PVector *self, PyObject *args) {
+ PyObject *argObj = NULL; /* argument to insert */
+ Py_ssize_t position;
+
+ /* The n parses for size, the O parses for a Python object */
+ if(!PyArg_ParseTuple(args, "nO", &position, &argObj)) {
+ return NULL;
+ }
+
+ return internalSet(self, position, argObj);
+}
+
+
+static PyObject* PVector_mset(PVector *self, PyObject *args) {
+ Py_ssize_t size = PyTuple_Size(args);
+ if(size % 2) {
+ PyErr_SetString(PyExc_TypeError, "mset expected an even number of arguments");
+ return NULL;
+ }
+
+ PVectorEvolver* evolver = (PVectorEvolver*)PVector_evolver(self);
+ Py_ssize_t i;
+ for(i=0; i<size; i+=2) {
+ if(PVectorEvolver_set_item(evolver, PyTuple_GetItem(args, i), PyTuple_GetItem(args, i + 1)) < 0) {
+ Py_DECREF(evolver);
+ return NULL;
+ }
+ }
+
+ PyObject* vector = PVectorEvolver_persistent(evolver);
+ Py_DECREF(evolver);
+ return vector;
+}
+
+
+static PyObject* internalDelete(PVector *self, Py_ssize_t index, PyObject *stop_obj) {
+ Py_ssize_t stop;
+ PyObject *list;
+ PyObject *result;
+
+ if (index < 0) {
+ index += self->count;
+ }
+
+ if (stop_obj != NULL) {
+ if (PyIndex_Check(stop_obj)) {
+ stop = PyNumber_AsSsize_t(stop_obj, PyExc_IndexError);
+ if (stop == -1 && PyErr_Occurred()) {
+ return NULL;
+ }
+ } else {
+ PyErr_Format(PyExc_TypeError, "Stop index must be integer, not %.200s", Py_TYPE(stop_obj)->tp_name);
+ return NULL;
+ }
+
+ if (stop < 0) {
+ stop += self->count;
+ }
+ } else {
+ if (index < 0 || index >= self->count) {
+ PyErr_SetString(PyExc_IndexError, "delete index out of range");
+ return NULL;
+ }
+
+ stop = index + 1;
+ }
+
+ list = PVector_toList(self);
+ if(PyList_SetSlice(list, index, stop, NULL) < 0) {
+ return NULL;
+ }
+
+ result = PVector_extend(EMPTY_VECTOR, list);
+ Py_DECREF(list);
+ return result;
+}
+
+static PyObject* PVector_delete(PVector *self, PyObject *args) {
+ Py_ssize_t index;
+ PyObject *stop_obj = NULL;
+
+ if(!PyArg_ParseTuple(args, "n|O:delete", &index, &stop_obj)) {
+ return NULL;
+ }
+
+ return internalDelete(self, index, stop_obj);
+}
+
+static PyObject* PVector_remove(PVector *self, PyObject *args) {
+ Py_ssize_t index;
+ PyObject* py_index = PVector_index(self, args);
+
+ if(py_index != NULL) {
+#if PY_MAJOR_VERSION >= 3
+ index = PyLong_AsSsize_t(py_index);
+#else
+ index = PyInt_AsSsize_t(py_index);
+#endif
+ Py_DECREF(py_index);
+ return internalDelete(self, index, NULL);
+ }
+
+ PyErr_SetString(PyExc_ValueError, "PVector.remove(x): x not in vector");
+ return NULL;
+}
+
+
+/*********************** PVector Iterator **************************/
+
+/*
+The Sequence class provides us with a default iterator but the runtime
+overhead of using that compared to the iterator below is huge.
+*/
+
+typedef struct {
+ PyObject_HEAD
+ Py_ssize_t it_index;
+ PVector *it_seq; /* Set to NULL when iterator is exhausted */
+} PVectorIter;
+
+static void PVectorIter_dealloc(PVectorIter *);
+static int PVectorIter_traverse(PVectorIter *, visitproc, void *);
+static PyObject *PVectorIter_next(PVectorIter *);
+
+static PyMethodDef PVectorIter_methods[] = {
+ {NULL, NULL} /* sentinel */
+};
+
+static PyTypeObject PVectorIterType = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "pvector_iterator", /* tp_name */
+ sizeof(PVectorIter), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ /* methods */
+ (destructor)PVectorIter_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ 0, /* tp_repr */
+ 0, /* tp_as_number */
+ 0, /* tp_as_sequence */
+ 0, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ PyObject_GenericGetAttr, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
+ 0, /* tp_doc */
+ (traverseproc)PVectorIter_traverse, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ PyObject_SelfIter, /* tp_iter */
+ (iternextfunc)PVectorIter_next, /* tp_iternext */
+ PVectorIter_methods, /* tp_methods */
+ 0, /* tp_members */
+};
+
+static PyObject *PVectorIter_iter(PyObject *seq) {
+ PVectorIter *it = PyObject_GC_New(PVectorIter, &PVectorIterType);
+ if (it == NULL) {
+ return NULL;
+ }
+
+ it->it_index = 0;
+ Py_INCREF(seq);
+ it->it_seq = (PVector *)seq;
+ PyObject_GC_Track(it);
+ return (PyObject *)it;
+}
+
+static void PVectorIter_dealloc(PVectorIter *it) {
+ PyObject_GC_UnTrack(it);
+ Py_XDECREF(it->it_seq);
+ PyObject_GC_Del(it);
+}
+
+static int PVectorIter_traverse(PVectorIter *it, visitproc visit, void *arg) {
+ Py_VISIT(it->it_seq);
+ return 0;
+}
+
+static PyObject *PVectorIter_next(PVectorIter *it) {
+ assert(it != NULL);
+ PVector *seq = it->it_seq;
+ if (seq == NULL) {
+ return NULL;
+ }
+
+ if (it->it_index < seq->count) {
+ PyObject *item = _get_item(seq, it->it_index);
+ ++it->it_index;
+ Py_INCREF(item);
+ return item;
+ }
+
+ Py_DECREF(seq);
+ it->it_seq = NULL;
+ return NULL;
+}
+
+
+/*********************** PVector Evolver **************************/
+
+/*
+Evolver to make multi updates easier to work with and more efficient.
+*/
+
+static void PVectorEvolver_dealloc(PVectorEvolver *);
+static PyObject *PVectorEvolver_append(PVectorEvolver *, PyObject *);
+static PyObject *PVectorEvolver_extend(PVectorEvolver *, PyObject *);
+static PyObject *PVectorEvolver_set(PVectorEvolver *, PyObject *);
+static PyObject *PVectorEvolver_delete(PVectorEvolver *self, PyObject *args);
+static PyObject *PVectorEvolver_subscript(PVectorEvolver *, PyObject *);
+static PyObject *PVectorEvolver_persistent(PVectorEvolver *);
+static Py_ssize_t PVectorEvolver_len(PVectorEvolver *);
+static PyObject *PVectorEvolver_is_dirty(PVectorEvolver *);
+static int PVectorEvolver_traverse(PVectorEvolver *self, visitproc visit, void *arg);
+
+static PyMappingMethods PVectorEvolver_mapping_methods = {
+ (lenfunc)PVectorEvolver_len,
+ (binaryfunc)PVectorEvolver_subscript,
+ (objobjargproc)PVectorEvolver_set_item,
+};
+
+
+static PyMethodDef PVectorEvolver_methods[] = {
+ {"append", (PyCFunction)PVectorEvolver_append, METH_O, "Appends an element"},
+ {"extend", (PyCFunction)PVectorEvolver_extend, METH_O|METH_COEXIST, "Extend"},
+ {"set", (PyCFunction)PVectorEvolver_set, METH_VARARGS, "Set item"},
+ {"delete", (PyCFunction)PVectorEvolver_delete, METH_VARARGS, "Delete item"},
+ {"persistent", (PyCFunction)PVectorEvolver_persistent, METH_NOARGS, "Create PVector from evolver"},
+ {"is_dirty", (PyCFunction)PVectorEvolver_is_dirty, METH_NOARGS, "Check if evolver contains modifications"},
+ {NULL, NULL} /* sentinel */
+};
+
+static PyTypeObject PVectorEvolverType = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "pvector_evolver", /* tp_name */
+ sizeof(PVectorEvolver), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ /* methods */
+ (destructor)PVectorEvolver_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ 0, /* tp_repr */
+ 0, /* tp_as_number */
+ 0, /* tp_as_sequence */
+ &PVectorEvolver_mapping_methods, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ PyObject_GenericGetAttr, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
+ 0, /* tp_doc */
+ (traverseproc)PVectorEvolver_traverse, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
+ PVectorEvolver_methods, /* tp_methods */
+ 0, /* tp_members */
+};
+
+
+// Indicate that a node is "dirty" (has been updated by the evolver)
+// by setting the MSB of the refCount. This will be cleared when
+// creating a pvector from the evolver (cleaning it).
+#define DIRTY_BIT 0x80000000
+#define REF_COUNT_MASK (~DIRTY_BIT)
+#define IS_DIRTY(node) ((node)->refCount & DIRTY_BIT)
+#define SET_DIRTY(node) ((node)->refCount |= DIRTY_BIT)
+#define CLEAR_DIRTY(node) ((node)->refCount &= REF_COUNT_MASK)
+
+
+static void cleanNodeRecursively(VNode *node, int level) {
+ debug("Cleaning recursively node=%p, level=%u\n", node, level);
+
+ int i;
+ CLEAR_DIRTY(node);
+ SET_NODE_REF_COUNT(node, 1);
+ if(level > 0) {
+ for(i = 0; i < BRANCH_FACTOR; i++) {
+ VNode *nextNode = (VNode*)node->items[i];
+ if((nextNode != NULL) && IS_DIRTY(nextNode)) {
+ cleanNodeRecursively(nextNode, level - SHIFT);
+ }
+ }
+ }
+}
+
+static void cleanVector(PVector *vector) {
+ // Cleaning the vector means that all dirty indications are cleared
+ // and that the nodes that were dirty get a ref count of 1 since
+ // they are brand new. Once cleaned the vector can be released into
+ // the wild.
+ if(IS_DIRTY(vector->tail)) {
+ cleanNodeRecursively(vector->tail, 0);
+ } else {
+ INC_NODE_REF_COUNT(vector->tail);
+ }
+
+ if(IS_DIRTY(vector->root)) {
+ cleanNodeRecursively(vector->root, vector->shift);
+ } else {
+ INC_NODE_REF_COUNT(vector->root);
+ }
+}
+
+static void PVectorEvolver_dealloc(PVectorEvolver *self) {
+ PyObject_GC_UnTrack(self);
+ Py_TRASHCAN_SAFE_BEGIN(self);
+
+ if(self->originalVector != self->newVector) {
+ cleanVector(self->newVector);
+ Py_DECREF(self->newVector);
+ }
+
+ Py_DECREF(self->originalVector);
+ Py_DECREF(self->appendList);
+
+ PyObject_GC_Del(self);
+ Py_TRASHCAN_SAFE_END(self);
+}
+
+static PyObject *PVectorEvolver_append(PVectorEvolver *self, PyObject *args) {
+ if (PyList_Append(self->appendList, args) == 0) {
+ Py_INCREF(self);
+ return (PyObject*)self;
+ }
+
+ return NULL;
+}
+
+static PyObject *PVectorEvolver_extend(PVectorEvolver *self, PyObject *args) {
+ PyObject *retVal = _PyList_Extend((PyListObject *)self->appendList, args);
+ if (retVal == NULL) {
+ return NULL;
+ }
+
+ Py_DECREF(retVal);
+ Py_INCREF(self);
+ return (PyObject*)self;
+}
+
+static PyObject *PVectorEvolver_subscript(PVectorEvolver *self, PyObject *item) {
+ if (PyIndex_Check(item)) {
+ Py_ssize_t position = PyNumber_AsSsize_t(item, PyExc_IndexError);
+ if (position == -1 && PyErr_Occurred()) {
+ return NULL;
+ }
+
+ if (position < 0) {
+ position += self->newVector->count + PyList_GET_SIZE(self->appendList);
+ }
+
+ if(0 <= position && position < self->newVector->count) {
+ PyObject *result = _get_item(self->newVector, position);
+ Py_XINCREF(result);
+ return result;
+ } else if (0 <= position && position < (self->newVector->count + PyList_GET_SIZE(self->appendList))) {
+ PyObject *result = PyList_GetItem(self->appendList, position - self->newVector->count);
+ Py_INCREF(result);
+ return result;
+ } else {
+ PyErr_SetString(PyExc_IndexError, "Index out of range");
+ }
+ } else {
+ PyErr_Format(PyExc_TypeError, "Indices must be integers, not %.200s", item->ob_type->tp_name);
+ }
+
+ return NULL;
+}
+
+static VNode* doSetWithDirty(VNode* node, unsigned int level, unsigned int position, PyObject* value) {
+ VNode* resultNode;
+ debug("doSetWithDirty(): level == %i\n", level);
+ if(level == 0) {
+ if(!IS_DIRTY(node)) {
+ resultNode = allocNode();
+ copyInsert(resultNode->items, node->items, position & BIT_MASK, value);
+ incRefs((PyObject**)resultNode->items);
+ SET_DIRTY(resultNode);
+ } else {
+ resultNode = node;
+ Py_INCREF(value);
+ Py_DECREF(resultNode->items[position & BIT_MASK]);
+ resultNode->items[position & BIT_MASK] = value;
+ }
+ } else {
+ if(!IS_DIRTY(node)) {
+ resultNode = copyNode(node);
+ SET_DIRTY(resultNode);
+ } else {
+ resultNode = node;
+ }
+
+ Py_ssize_t index = (position >> level) & BIT_MASK;
+ VNode* oldNode = (VNode*)resultNode->items[index];
+ resultNode->items[index] = doSetWithDirty(resultNode->items[index], level - SHIFT, position, value);
+
+ if(resultNode->items[index] != oldNode) {
+ // Node replaced, drop references to old node
+ DEC_NODE_REF_COUNT(oldNode);
+ }
+ }
+
+ return resultNode;
+}
+
+/*
+ Steals a reference to the object that is inserted in the vector.
+*/
+static PyObject *PVectorEvolver_set(PVectorEvolver *self, PyObject *args) {
+ PyObject *argObj = NULL; /* argument to insert */
+ PyObject *position = NULL;
+
+ /* The n parses for size, the O parses for a Python object */
+ if(!PyArg_ParseTuple(args, "OO", &position, &argObj)) {
+ return NULL;
+ }
+
+ if(PVectorEvolver_set_item(self, position, argObj) < 0) {
+ return NULL;
+ }
+
+ Py_INCREF(self);
+ return (PyObject*)self;
+}
+
+static PyObject *PVectorEvolver_delete(PVectorEvolver *self, PyObject *args) {
+ PyObject *position = NULL;
+
+ /* The n parses for size, the O parses for a Python object */
+ if(!PyArg_ParseTuple(args, "O", &position)) {
+ return NULL;
+ }
+
+ if(PVectorEvolver_set_item(self, position, NULL) < 0) {
+ return NULL;
+ }
+
+ Py_INCREF(self);
+ return (PyObject*)self;
+}
+
+
+static int internalPVectorDelete(PVectorEvolver *self, Py_ssize_t position) {
+ // Delete element. Should be unusual. Simple but expensive operation
+ // that reuses the delete code for the vector. Realize the vector, delete on it and
+ // then reset the evolver to work on the new vector.
+ PVector *temp = (PVector*)PVectorEvolver_persistent(self);
+ PVector *temp2 = (PVector*)internalDelete(temp, position, NULL);
+ Py_DECREF(temp);
+
+ if(temp2 == NULL) {
+ return -1;
+ }
+
+ Py_DECREF(self->originalVector);
+ self->originalVector = temp2;
+ self->newVector = self->originalVector;
+ return 0;
+}
+
+static int PVectorEvolver_set_item(PVectorEvolver *self, PyObject* item, PyObject* value) {
+ if (PyIndex_Check(item)) {
+ Py_ssize_t position = PyNumber_AsSsize_t(item, PyExc_IndexError);
+ if (position == -1 && PyErr_Occurred()) {
+ return -1;
+ }
+
+ if (position < 0) {
+ position += self->newVector->count + PyList_GET_SIZE(self->appendList);
+ }
+
+ if((0 <= position) && (position < self->newVector->count)) {
+ if(self->originalVector == self->newVector) {
+ // Create new vector since we're about to modify the original
+ self->newVector = rawCopyPVector(self->originalVector);
+ }
+
+ if(value != NULL) {
+ if(position < TAIL_OFF(self->newVector)) {
+ self->newVector->root = doSetWithDirty(self->newVector->root, self->newVector->shift, position, value);
+ } else {
+ self->newVector->tail = doSetWithDirty(self->newVector->tail, 0, position, value);
+ }
+
+ return 0;
+ }
+
+ return internalPVectorDelete(self, position);
+ } else if((0 <= position) && (position < (self->newVector->count + PyList_GET_SIZE(self->appendList)))) {
+ if (value != NULL) {
+ int result = PyList_SetItem(self->appendList, position - self->newVector->count, value);
+ if(result == 0) {
+ Py_INCREF(value);
+ }
+ return result;
+ }
+
+ return internalPVectorDelete(self, position);
+ } else if((0 <= position)
+ && (position < (self->newVector->count + PyList_GET_SIZE(self->appendList) + 1))
+ && (value != NULL)) {
+ return PyList_Append(self->appendList, value);
+ } else {
+ PyErr_Format(PyExc_IndexError, "Index out of range: %zd", position);
+ }
+ } else {
+ PyErr_Format(PyExc_TypeError, "Indices must be integers, not %.200s", item->ob_type->tp_name);
+ }
+ return -1;
+}
+
+static PyObject *PVectorEvolver_persistent(PVectorEvolver *self) {
+ PVector *resultVector;
+ if(self->newVector != self->originalVector) {
+ cleanVector(self->newVector);
+ Py_DECREF(self->originalVector);
+ }
+
+ resultVector = self->newVector;
+
+ if(PyList_GET_SIZE(self->appendList)) {
+ PVector *oldVector = resultVector;
+ resultVector = (PVector*)PVector_extend(resultVector, self->appendList);
+ Py_DECREF(oldVector);
+ Py_DECREF(self->appendList);
+ self->appendList = NULL;
+ }
+
+ initializeEvolver(self, resultVector, self->appendList);
+ Py_INCREF(resultVector);
+ return (PyObject*)resultVector;
+}
+
+static Py_ssize_t PVectorEvolver_len(PVectorEvolver *self) {
+ return self->newVector->count + PyList_GET_SIZE(self->appendList);
+}
+
+static PyObject* PVectorEvolver_is_dirty(PVectorEvolver *self) {
+ if((self->newVector != self->originalVector) || (PyList_GET_SIZE(self->appendList) > 0)) {
+ Py_INCREF(Py_True);
+ return Py_True;
+ }
+
+ Py_INCREF(Py_False);
+ return Py_False;
+}
+
+static int PVectorEvolver_traverse(PVectorEvolver *self, visitproc visit, void *arg) {
+ Py_VISIT(self->newVector);
+ if (self->newVector != self->originalVector) {
+ Py_VISIT(self->originalVector);
+ }
+ Py_VISIT(self->appendList);
+ return 0;
+}
+
+static PyMethodDef PyrsistentMethods[] = {
+ {"pvector", pyrsistent_pvec, METH_VARARGS,
+ "pvector([iterable])\n"
+ "Create a new persistent vector containing the elements in iterable.\n\n"
+ ">>> v1 = pvector([1, 2, 3])\n"
+ ">>> v1\n"
+ "pvector([1, 2, 3])"},
+ {NULL, NULL, 0, NULL}
+};
+
+
+/********************* Python module initialization ************************/
+
+#if PY_MAJOR_VERSION >= 3
+ static struct PyModuleDef moduledef = {
+ PyModuleDef_HEAD_INIT,
+ "pvectorc", /* m_name */
+ "Persistent vector", /* m_doc */
+ -1, /* m_size */
+ PyrsistentMethods, /* m_methods */
+ NULL, /* m_reload */
+ NULL, /* m_traverse */
+ NULL, /* m_clear */
+ NULL, /* m_free */
+ };
+#endif
+
+static PyObject* pyrsistent_pvectorc_moduleinit(void) {
+ PyObject* m;
+
+ // Only allow creation/initialization through factory method pvec
+ PVectorType.tp_init = NULL;
+ PVectorType.tp_new = NULL;
+
+ if (PyType_Ready(&PVectorType) < 0) {
+ return NULL;
+ }
+ if (PyType_Ready(&PVectorIterType) < 0) {
+ return NULL;
+ }
+ if (PyType_Ready(&PVectorEvolverType) < 0) {
+ return NULL;
+ }
+
+
+#if PY_MAJOR_VERSION >= 3
+ m = PyModule_Create(&moduledef);
+#else
+ m = Py_InitModule3("pvectorc", PyrsistentMethods, "Persistent vector");
+#endif
+
+ if (m == NULL) {
+ return NULL;
+ }
+
+ if(EMPTY_VECTOR == NULL) {
+ EMPTY_VECTOR = emptyNewPvec();
+ }
+
+ nodeCache.size = 0;
+
+ Py_INCREF(&PVectorType);
+ PyModule_AddObject(m, "PVector", (PyObject *)&PVectorType);
+
+ return m;
+}
+
+#if PY_MAJOR_VERSION >= 3
+PyMODINIT_FUNC PyInit_pvectorc(void) {
+ return pyrsistent_pvectorc_moduleinit();
+}
+#else
+PyMODINIT_FUNC initpvectorc(void) {
+ pyrsistent_pvectorc_moduleinit();
+}
+#endif
diff --git a/third_party/python/pyrsistent/pyrsistent/__init__.py b/third_party/python/pyrsistent/pyrsistent/__init__.py
new file mode 100644
index 0000000000..be299658f3
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/__init__.py
@@ -0,0 +1,47 @@
+# -*- coding: utf-8 -*-
+
+from pyrsistent._pmap import pmap, m, PMap
+
+from pyrsistent._pvector import pvector, v, PVector
+
+from pyrsistent._pset import pset, s, PSet
+
+from pyrsistent._pbag import pbag, b, PBag
+
+from pyrsistent._plist import plist, l, PList
+
+from pyrsistent._pdeque import pdeque, dq, PDeque
+
+from pyrsistent._checked_types import (
+ CheckedPMap, CheckedPVector, CheckedPSet, InvariantException, CheckedKeyTypeError,
+ CheckedValueTypeError, CheckedType, optional)
+
+from pyrsistent._field_common import (
+ field, PTypeError, pset_field, pmap_field, pvector_field)
+
+from pyrsistent._precord import PRecord
+
+from pyrsistent._pclass import PClass, PClassMeta
+
+from pyrsistent._immutable import immutable
+
+from pyrsistent._helpers import freeze, thaw, mutant
+
+from pyrsistent._transformations import inc, discard, rex, ny
+
+from pyrsistent._toolz import get_in
+
+
+__all__ = ('pmap', 'm', 'PMap',
+ 'pvector', 'v', 'PVector',
+ 'pset', 's', 'PSet',
+ 'pbag', 'b', 'PBag',
+ 'plist', 'l', 'PList',
+ 'pdeque', 'dq', 'PDeque',
+ 'CheckedPMap', 'CheckedPVector', 'CheckedPSet', 'InvariantException', 'CheckedKeyTypeError', 'CheckedValueTypeError', 'CheckedType', 'optional',
+ 'PRecord', 'field', 'pset_field', 'pmap_field', 'pvector_field',
+ 'PClass', 'PClassMeta',
+ 'immutable',
+ 'freeze', 'thaw', 'mutant',
+ 'get_in',
+ 'inc', 'discard', 'rex', 'ny')
diff --git a/third_party/python/pyrsistent/pyrsistent/__init__.pyi b/third_party/python/pyrsistent/pyrsistent/__init__.pyi
new file mode 100644
index 0000000000..5909f7991a
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/__init__.pyi
@@ -0,0 +1,213 @@
+# flake8: noqa: E704
+# from https://gist.github.com/WuTheFWasThat/091a17d4b5cab597dfd5d4c2d96faf09
+# Stubs for pyrsistent (Python 3.6)
+
+from typing import Any
+from typing import AnyStr
+from typing import Callable
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Optional
+from typing import Mapping
+from typing import MutableMapping
+from typing import Sequence
+from typing import Set
+from typing import Union
+from typing import Tuple
+from typing import Type
+from typing import TypeVar
+from typing import overload
+
+# see commit 08519aa for explanation of the re-export
+from pyrsistent.typing import CheckedKeyTypeError as CheckedKeyTypeError
+from pyrsistent.typing import CheckedPMap as CheckedPMap
+from pyrsistent.typing import CheckedPSet as CheckedPSet
+from pyrsistent.typing import CheckedPVector as CheckedPVector
+from pyrsistent.typing import CheckedType as CheckedType
+from pyrsistent.typing import CheckedValueTypeError as CheckedValueTypeError
+from pyrsistent.typing import InvariantException as InvariantException
+from pyrsistent.typing import PClass as PClass
+from pyrsistent.typing import PBag as PBag
+from pyrsistent.typing import PDeque as PDeque
+from pyrsistent.typing import PList as PList
+from pyrsistent.typing import PMap as PMap
+from pyrsistent.typing import PMapEvolver as PMapEvolver
+from pyrsistent.typing import PSet as PSet
+from pyrsistent.typing import PSetEvolver as PSetEvolver
+from pyrsistent.typing import PTypeError as PTypeError
+from pyrsistent.typing import PVector as PVector
+from pyrsistent.typing import PVectorEvolver as PVectorEvolver
+
+T = TypeVar('T')
+KT = TypeVar('KT')
+VT = TypeVar('VT')
+
+def pmap(initial: Union[Mapping[KT, VT], Iterable[Tuple[KT, VT]]] = {}, pre_size: int = 0) -> PMap[KT, VT]: ...
+def m(**kwargs: VT) -> PMap[str, VT]: ...
+
+def pvector(iterable: Iterable[T] = ...) -> PVector[T]: ...
+def v(*iterable: T) -> PVector[T]: ...
+
+def pset(iterable: Iterable[T] = (), pre_size: int = 8) -> PSet[T]: ...
+def s(*iterable: T) -> PSet[T]: ...
+
+# see class_test.py for use cases
+Invariant = Tuple[bool, Optional[Union[str, Callable[[], str]]]]
+
+@overload
+def field(
+ type: Union[Type[T], Sequence[Type[T]]] = ...,
+ invariant: Callable[[Any], Union[Invariant, Iterable[Invariant]]] = lambda _: (True, None),
+ initial: Any = object(),
+ mandatory: bool = False,
+ factory: Callable[[Any], T] = lambda x: x,
+ serializer: Callable[[Any, T], Any] = lambda _, value: value,
+) -> T: ...
+# The actual return value (_PField) is irrelevant after a PRecord has been instantiated,
+# see https://github.com/tobgu/pyrsistent/blob/master/pyrsistent/_precord.py#L10
+@overload
+def field(
+ type: Any = ...,
+ invariant: Callable[[Any], Union[Invariant, Iterable[Invariant]]] = lambda _: (True, None),
+ initial: Any = object(),
+ mandatory: bool = False,
+ factory: Callable[[Any], Any] = lambda x: x,
+ serializer: Callable[[Any, Any], Any] = lambda _, value: value,
+) -> Any: ...
+
+# Use precise types for the simplest use cases, but fall back to Any for
+# everything else. See record_test.py for the wide range of possible types for
+# item_type
+@overload
+def pset_field(
+ item_type: Type[T],
+ optional: bool = False,
+ initial: Iterable[T] = ...,
+) -> PSet[T]: ...
+@overload
+def pset_field(
+ item_type: Any,
+ optional: bool = False,
+ initial: Any = (),
+) -> PSet[Any]: ...
+
+@overload
+def pmap_field(
+ key_type: Type[KT],
+ value_type: Type[VT],
+ optional: bool = False,
+ invariant: Callable[[Any], Tuple[bool, Optional[str]]] = lambda _: (True, None),
+) -> PMap[KT, VT]: ...
+@overload
+def pmap_field(
+ key_type: Any,
+ value_type: Any,
+ optional: bool = False,
+ invariant: Callable[[Any], Tuple[bool, Optional[str]]] = lambda _: (True, None),
+) -> PMap[Any, Any]: ...
+
+@overload
+def pvector_field(
+ item_type: Type[T],
+ optional: bool = False,
+ initial: Iterable[T] = ...,
+) -> PVector[T]: ...
+@overload
+def pvector_field(
+ item_type: Any,
+ optional: bool = False,
+ initial: Any = (),
+) -> PVector[Any]: ...
+
+def pbag(elements: Iterable[T]) -> PBag[T]: ...
+def b(*elements: T) -> PBag[T]: ...
+
+def plist(iterable: Iterable[T] = (), reverse: bool = False) -> PList[T]: ...
+def l(*elements: T) -> PList[T]: ...
+
+def pdeque(iterable: Optional[Iterable[T]] = None, maxlen: Optional[int] = None) -> PDeque[T]: ...
+def dq(*iterable: T) -> PDeque[T]: ...
+
+@overload
+def optional(type: T) -> Tuple[T, Type[None]]: ...
+@overload
+def optional(*typs: Any) -> Tuple[Any, ...]: ...
+
+T_PRecord = TypeVar('T_PRecord', bound='PRecord')
+class PRecord(PMap[AnyStr, Any]):
+ _precord_fields: Mapping
+ _precord_initial_values: Mapping
+
+ def __hash__(self) -> int: ...
+ def __init__(self, **kwargs: Any) -> None: ...
+ def __iter__(self) -> Iterator[Any]: ...
+ def __len__(self) -> int: ...
+ @classmethod
+ def create(
+ cls: Type[T_PRecord],
+ kwargs: Mapping,
+ _factory_fields: Optional[Iterable] = None,
+ ignore_extra: bool = False,
+ ) -> T_PRecord: ...
+ # This is OK because T_PRecord is a concrete type
+ def discard(self: T_PRecord, key: KT) -> T_PRecord: ...
+ def remove(self: T_PRecord, key: KT) -> T_PRecord: ...
+
+ def serialize(self, format: Optional[Any] = ...) -> MutableMapping: ...
+
+ # From pyrsistent documentation:
+ # This set function differs slightly from that in the PMap
+ # class. First of all it accepts key-value pairs. Second it accepts multiple key-value
+ # pairs to perform one, atomic, update of multiple fields.
+ @overload
+ def set(self, key: KT, val: VT) -> Any: ...
+ @overload
+ def set(self, **kwargs: VT) -> Any: ...
+
+def immutable(
+ members: Union[str, Iterable[str]] = '',
+ name: str = 'Immutable',
+ verbose: bool = False,
+) -> Tuple: ... # actually a namedtuple
+
+# ignore mypy warning "Overloaded function signatures 1 and 5 overlap with
+# incompatible return types"
+@overload
+def freeze(o: Mapping[KT, VT]) -> PMap[KT, VT]: ... # type: ignore
+@overload
+def freeze(o: List[T]) -> PVector[T]: ... # type: ignore
+@overload
+def freeze(o: Tuple[T, ...]) -> Tuple[T, ...]: ...
+@overload
+def freeze(o: Set[T]) -> PSet[T]: ... # type: ignore
+@overload
+def freeze(o: T) -> T: ...
+
+
+@overload
+def thaw(o: PMap[KT, VT]) -> MutableMapping[KT, VT]: ... # type: ignore
+@overload
+def thaw(o: PVector[T]) -> List[T]: ... # type: ignore
+@overload
+def thaw(o: Tuple[T, ...]) -> Tuple[T, ...]: ...
+# collections.abc.MutableSet is kind of garbage:
+# https://stackoverflow.com/questions/24977898/why-does-collections-mutableset-not-bestow-an-update-method
+@overload
+def thaw(o: PSet[T]) -> Set[T]: ... # type: ignore
+@overload
+def thaw(o: T) -> T: ...
+
+def mutant(fn: Callable) -> Callable: ...
+
+def inc(x: int) -> int: ...
+@overload
+def discard(evolver: PMapEvolver[KT, VT], key: KT) -> None: ...
+@overload
+def discard(evolver: PVectorEvolver[T], key: int) -> None: ...
+@overload
+def discard(evolver: PSetEvolver[T], key: T) -> None: ...
+def rex(expr: str) -> Callable[[Any], bool]: ...
+def ny(_: Any) -> bool: ...
+
+def get_in(keys: Iterable, coll: Mapping, default: Optional[Any] = None, no_default: bool = False) -> Any: ...
diff --git a/third_party/python/pyrsistent/pyrsistent/_checked_types.py b/third_party/python/pyrsistent/pyrsistent/_checked_types.py
new file mode 100644
index 0000000000..293d989f13
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/_checked_types.py
@@ -0,0 +1,542 @@
+from ._compat import Iterable
+import six
+
+from pyrsistent._compat import Enum, string_types
+from pyrsistent._pmap import PMap, pmap
+from pyrsistent._pset import PSet, pset
+from pyrsistent._pvector import PythonPVector, python_pvector
+
+
+class CheckedType(object):
+ """
+ Marker class to enable creation and serialization of checked object graphs.
+ """
+ __slots__ = ()
+
+ @classmethod
+ def create(cls, source_data, _factory_fields=None):
+ raise NotImplementedError()
+
+ def serialize(self, format=None):
+ raise NotImplementedError()
+
+
+def _restore_pickle(cls, data):
+ return cls.create(data, _factory_fields=set())
+
+
+class InvariantException(Exception):
+ """
+ Exception raised from a :py:class:`CheckedType` when invariant tests fail or when a mandatory
+ field is missing.
+
+ Contains two fields of interest:
+ invariant_errors, a tuple of error data for the failing invariants
+ missing_fields, a tuple of strings specifying the missing names
+ """
+
+ def __init__(self, error_codes=(), missing_fields=(), *args, **kwargs):
+ self.invariant_errors = tuple(e() if callable(e) else e for e in error_codes)
+ self.missing_fields = missing_fields
+ super(InvariantException, self).__init__(*args, **kwargs)
+
+ def __str__(self):
+ return super(InvariantException, self).__str__() + \
+ ", invariant_errors=[{invariant_errors}], missing_fields=[{missing_fields}]".format(
+ invariant_errors=', '.join(str(e) for e in self.invariant_errors),
+ missing_fields=', '.join(self.missing_fields))
+
+
+_preserved_iterable_types = (
+ Enum,
+)
+"""Some types are themselves iterable, but we want to use the type itself and
+not its members for the type specification. This defines a set of such types
+that we explicitly preserve.
+
+Note that strings are not such types because the string inputs we pass in are
+values, not types.
+"""
+
+
+def maybe_parse_user_type(t):
+ """Try to coerce a user-supplied type directive into a list of types.
+
+ This function should be used in all places where a user specifies a type,
+ for consistency.
+
+ The policy for what defines valid user input should be clear from the implementation.
+ """
+ is_type = isinstance(t, type)
+ is_preserved = isinstance(t, type) and issubclass(t, _preserved_iterable_types)
+ is_string = isinstance(t, string_types)
+ is_iterable = isinstance(t, Iterable)
+
+ if is_preserved:
+ return [t]
+ elif is_string:
+ return [t]
+ elif is_type and not is_iterable:
+ return [t]
+ elif is_iterable:
+ # Recur to validate contained types as well.
+ ts = t
+ return tuple(e for t in ts for e in maybe_parse_user_type(t))
+ else:
+ # If this raises because `t` cannot be formatted, so be it.
+ raise TypeError(
+ 'Type specifications must be types or strings. Input: {}'.format(t)
+ )
+
+
+def maybe_parse_many_user_types(ts):
+ # Just a different name to communicate that you're parsing multiple user
+ # inputs. `maybe_parse_user_type` handles the iterable case anyway.
+ return maybe_parse_user_type(ts)
+
+
+def _store_types(dct, bases, destination_name, source_name):
+ maybe_types = maybe_parse_many_user_types([
+ d[source_name]
+ for d in ([dct] + [b.__dict__ for b in bases]) if source_name in d
+ ])
+
+ dct[destination_name] = maybe_types
+
+
+def _merge_invariant_results(result):
+ verdict = True
+ data = []
+ for verd, dat in result:
+ if not verd:
+ verdict = False
+ data.append(dat)
+
+ return verdict, tuple(data)
+
+
+def wrap_invariant(invariant):
+ # Invariant functions may return the outcome of several tests
+ # In those cases the results have to be merged before being passed
+ # back to the client.
+ def f(*args, **kwargs):
+ result = invariant(*args, **kwargs)
+ if isinstance(result[0], bool):
+ return result
+
+ return _merge_invariant_results(result)
+
+ return f
+
+
+def _all_dicts(bases, seen=None):
+ """
+ Yield each class in ``bases`` and each of their base classes.
+ """
+ if seen is None:
+ seen = set()
+ for cls in bases:
+ if cls in seen:
+ continue
+ seen.add(cls)
+ yield cls.__dict__
+ for b in _all_dicts(cls.__bases__, seen):
+ yield b
+
+
+def store_invariants(dct, bases, destination_name, source_name):
+ # Invariants are inherited
+ invariants = []
+ for ns in [dct] + list(_all_dicts(bases)):
+ try:
+ invariant = ns[source_name]
+ except KeyError:
+ continue
+ invariants.append(invariant)
+
+ if not all(callable(invariant) for invariant in invariants):
+ raise TypeError('Invariants must be callable')
+ dct[destination_name] = tuple(wrap_invariant(inv) for inv in invariants)
+
+
+class _CheckedTypeMeta(type):
+ def __new__(mcs, name, bases, dct):
+ _store_types(dct, bases, '_checked_types', '__type__')
+ store_invariants(dct, bases, '_checked_invariants', '__invariant__')
+
+ def default_serializer(self, _, value):
+ if isinstance(value, CheckedType):
+ return value.serialize()
+ return value
+
+ dct.setdefault('__serializer__', default_serializer)
+
+ dct['__slots__'] = ()
+
+ return super(_CheckedTypeMeta, mcs).__new__(mcs, name, bases, dct)
+
+
+class CheckedTypeError(TypeError):
+ def __init__(self, source_class, expected_types, actual_type, actual_value, *args, **kwargs):
+ super(CheckedTypeError, self).__init__(*args, **kwargs)
+ self.source_class = source_class
+ self.expected_types = expected_types
+ self.actual_type = actual_type
+ self.actual_value = actual_value
+
+
+class CheckedKeyTypeError(CheckedTypeError):
+ """
+ Raised when trying to set a value using a key with a type that doesn't match the declared type.
+
+ Attributes:
+ source_class -- The class of the collection
+ expected_types -- Allowed types
+ actual_type -- The non matching type
+ actual_value -- Value of the variable with the non matching type
+ """
+ pass
+
+
+class CheckedValueTypeError(CheckedTypeError):
+ """
+ Raised when trying to set a value using a key with a type that doesn't match the declared type.
+
+ Attributes:
+ source_class -- The class of the collection
+ expected_types -- Allowed types
+ actual_type -- The non matching type
+ actual_value -- Value of the variable with the non matching type
+ """
+ pass
+
+
+def _get_class(type_name):
+ module_name, class_name = type_name.rsplit('.', 1)
+ module = __import__(module_name, fromlist=[class_name])
+ return getattr(module, class_name)
+
+
+def get_type(typ):
+ if isinstance(typ, type):
+ return typ
+
+ return _get_class(typ)
+
+
+def get_types(typs):
+ return [get_type(typ) for typ in typs]
+
+
+def _check_types(it, expected_types, source_class, exception_type=CheckedValueTypeError):
+ if expected_types:
+ for e in it:
+ if not any(isinstance(e, get_type(t)) for t in expected_types):
+ actual_type = type(e)
+ msg = "Type {source_class} can only be used with {expected_types}, not {actual_type}".format(
+ source_class=source_class.__name__,
+ expected_types=tuple(get_type(et).__name__ for et in expected_types),
+ actual_type=actual_type.__name__)
+ raise exception_type(source_class, expected_types, actual_type, e, msg)
+
+
+def _invariant_errors(elem, invariants):
+ return [data for valid, data in (invariant(elem) for invariant in invariants) if not valid]
+
+
+def _invariant_errors_iterable(it, invariants):
+ return sum([_invariant_errors(elem, invariants) for elem in it], [])
+
+
+def optional(*typs):
+ """ Convenience function to specify that a value may be of any of the types in type 'typs' or None """
+ return tuple(typs) + (type(None),)
+
+
+def _checked_type_create(cls, source_data, _factory_fields=None, ignore_extra=False):
+ if isinstance(source_data, cls):
+ return source_data
+
+ # Recursively apply create methods of checked types if the types of the supplied data
+ # does not match any of the valid types.
+ types = get_types(cls._checked_types)
+ checked_type = next((t for t in types if issubclass(t, CheckedType)), None)
+ if checked_type:
+ return cls([checked_type.create(data, ignore_extra=ignore_extra)
+ if not any(isinstance(data, t) for t in types) else data
+ for data in source_data])
+
+ return cls(source_data)
+
+@six.add_metaclass(_CheckedTypeMeta)
+class CheckedPVector(PythonPVector, CheckedType):
+ """
+ A CheckedPVector is a PVector which allows specifying type and invariant checks.
+
+ >>> class Positives(CheckedPVector):
+ ... __type__ = (int, float)
+ ... __invariant__ = lambda n: (n >= 0, 'Negative')
+ ...
+ >>> Positives([1, 2, 3])
+ Positives([1, 2, 3])
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, initial=()):
+ if type(initial) == PythonPVector:
+ return super(CheckedPVector, cls).__new__(cls, initial._count, initial._shift, initial._root, initial._tail)
+
+ return CheckedPVector.Evolver(cls, python_pvector()).extend(initial).persistent()
+
+ def set(self, key, value):
+ return self.evolver().set(key, value).persistent()
+
+ def append(self, val):
+ return self.evolver().append(val).persistent()
+
+ def extend(self, it):
+ return self.evolver().extend(it).persistent()
+
+ create = classmethod(_checked_type_create)
+
+ def serialize(self, format=None):
+ serializer = self.__serializer__
+ return list(serializer(format, v) for v in self)
+
+ def __reduce__(self):
+ # Pickling support
+ return _restore_pickle, (self.__class__, list(self),)
+
+ class Evolver(PythonPVector.Evolver):
+ __slots__ = ('_destination_class', '_invariant_errors')
+
+ def __init__(self, destination_class, vector):
+ super(CheckedPVector.Evolver, self).__init__(vector)
+ self._destination_class = destination_class
+ self._invariant_errors = []
+
+ def _check(self, it):
+ _check_types(it, self._destination_class._checked_types, self._destination_class)
+ error_data = _invariant_errors_iterable(it, self._destination_class._checked_invariants)
+ self._invariant_errors.extend(error_data)
+
+ def __setitem__(self, key, value):
+ self._check([value])
+ return super(CheckedPVector.Evolver, self).__setitem__(key, value)
+
+ def append(self, elem):
+ self._check([elem])
+ return super(CheckedPVector.Evolver, self).append(elem)
+
+ def extend(self, it):
+ it = list(it)
+ self._check(it)
+ return super(CheckedPVector.Evolver, self).extend(it)
+
+ def persistent(self):
+ if self._invariant_errors:
+ raise InvariantException(error_codes=self._invariant_errors)
+
+ result = self._orig_pvector
+ if self.is_dirty() or (self._destination_class != type(self._orig_pvector)):
+ pv = super(CheckedPVector.Evolver, self).persistent().extend(self._extra_tail)
+ result = self._destination_class(pv)
+ self._reset(result)
+
+ return result
+
+ def __repr__(self):
+ return self.__class__.__name__ + "({0})".format(self.tolist())
+
+ __str__ = __repr__
+
+ def evolver(self):
+ return CheckedPVector.Evolver(self.__class__, self)
+
+
+@six.add_metaclass(_CheckedTypeMeta)
+class CheckedPSet(PSet, CheckedType):
+ """
+ A CheckedPSet is a PSet which allows specifying type and invariant checks.
+
+ >>> class Positives(CheckedPSet):
+ ... __type__ = (int, float)
+ ... __invariant__ = lambda n: (n >= 0, 'Negative')
+ ...
+ >>> Positives([1, 2, 3])
+ Positives([1, 2, 3])
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, initial=()):
+ if type(initial) is PMap:
+ return super(CheckedPSet, cls).__new__(cls, initial)
+
+ evolver = CheckedPSet.Evolver(cls, pset())
+ for e in initial:
+ evolver.add(e)
+
+ return evolver.persistent()
+
+ def __repr__(self):
+ return self.__class__.__name__ + super(CheckedPSet, self).__repr__()[4:]
+
+ def __str__(self):
+ return self.__repr__()
+
+ def serialize(self, format=None):
+ serializer = self.__serializer__
+ return set(serializer(format, v) for v in self)
+
+ create = classmethod(_checked_type_create)
+
+ def __reduce__(self):
+ # Pickling support
+ return _restore_pickle, (self.__class__, list(self),)
+
+ def evolver(self):
+ return CheckedPSet.Evolver(self.__class__, self)
+
+ class Evolver(PSet._Evolver):
+ __slots__ = ('_destination_class', '_invariant_errors')
+
+ def __init__(self, destination_class, original_set):
+ super(CheckedPSet.Evolver, self).__init__(original_set)
+ self._destination_class = destination_class
+ self._invariant_errors = []
+
+ def _check(self, it):
+ _check_types(it, self._destination_class._checked_types, self._destination_class)
+ error_data = _invariant_errors_iterable(it, self._destination_class._checked_invariants)
+ self._invariant_errors.extend(error_data)
+
+ def add(self, element):
+ self._check([element])
+ self._pmap_evolver[element] = True
+ return self
+
+ def persistent(self):
+ if self._invariant_errors:
+ raise InvariantException(error_codes=self._invariant_errors)
+
+ if self.is_dirty() or self._destination_class != type(self._original_pset):
+ return self._destination_class(self._pmap_evolver.persistent())
+
+ return self._original_pset
+
+
+class _CheckedMapTypeMeta(type):
+ def __new__(mcs, name, bases, dct):
+ _store_types(dct, bases, '_checked_key_types', '__key_type__')
+ _store_types(dct, bases, '_checked_value_types', '__value_type__')
+ store_invariants(dct, bases, '_checked_invariants', '__invariant__')
+
+ def default_serializer(self, _, key, value):
+ sk = key
+ if isinstance(key, CheckedType):
+ sk = key.serialize()
+
+ sv = value
+ if isinstance(value, CheckedType):
+ sv = value.serialize()
+
+ return sk, sv
+
+ dct.setdefault('__serializer__', default_serializer)
+
+ dct['__slots__'] = ()
+
+ return super(_CheckedMapTypeMeta, mcs).__new__(mcs, name, bases, dct)
+
+# Marker object
+_UNDEFINED_CHECKED_PMAP_SIZE = object()
+
+
+@six.add_metaclass(_CheckedMapTypeMeta)
+class CheckedPMap(PMap, CheckedType):
+ """
+ A CheckedPMap is a PMap which allows specifying type and invariant checks.
+
+ >>> class IntToFloatMap(CheckedPMap):
+ ... __key_type__ = int
+ ... __value_type__ = float
+ ... __invariant__ = lambda k, v: (int(v) == k, 'Invalid mapping')
+ ...
+ >>> IntToFloatMap({1: 1.5, 2: 2.25})
+ IntToFloatMap({1: 1.5, 2: 2.25})
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, initial={}, size=_UNDEFINED_CHECKED_PMAP_SIZE):
+ if size is not _UNDEFINED_CHECKED_PMAP_SIZE:
+ return super(CheckedPMap, cls).__new__(cls, size, initial)
+
+ evolver = CheckedPMap.Evolver(cls, pmap())
+ for k, v in initial.items():
+ evolver.set(k, v)
+
+ return evolver.persistent()
+
+ def evolver(self):
+ return CheckedPMap.Evolver(self.__class__, self)
+
+ def __repr__(self):
+ return self.__class__.__name__ + "({0})".format(str(dict(self)))
+
+ __str__ = __repr__
+
+ def serialize(self, format=None):
+ serializer = self.__serializer__
+ return dict(serializer(format, k, v) for k, v in self.items())
+
+ @classmethod
+ def create(cls, source_data, _factory_fields=None):
+ if isinstance(source_data, cls):
+ return source_data
+
+ # Recursively apply create methods of checked types if the types of the supplied data
+ # does not match any of the valid types.
+ key_types = get_types(cls._checked_key_types)
+ checked_key_type = next((t for t in key_types if issubclass(t, CheckedType)), None)
+ value_types = get_types(cls._checked_value_types)
+ checked_value_type = next((t for t in value_types if issubclass(t, CheckedType)), None)
+
+ if checked_key_type or checked_value_type:
+ return cls(dict((checked_key_type.create(key) if checked_key_type and not any(isinstance(key, t) for t in key_types) else key,
+ checked_value_type.create(value) if checked_value_type and not any(isinstance(value, t) for t in value_types) else value)
+ for key, value in source_data.items()))
+
+ return cls(source_data)
+
+ def __reduce__(self):
+ # Pickling support
+ return _restore_pickle, (self.__class__, dict(self),)
+
+ class Evolver(PMap._Evolver):
+ __slots__ = ('_destination_class', '_invariant_errors')
+
+ def __init__(self, destination_class, original_map):
+ super(CheckedPMap.Evolver, self).__init__(original_map)
+ self._destination_class = destination_class
+ self._invariant_errors = []
+
+ def set(self, key, value):
+ _check_types([key], self._destination_class._checked_key_types, self._destination_class, CheckedKeyTypeError)
+ _check_types([value], self._destination_class._checked_value_types, self._destination_class)
+ self._invariant_errors.extend(data for valid, data in (invariant(key, value)
+ for invariant in self._destination_class._checked_invariants)
+ if not valid)
+
+ return super(CheckedPMap.Evolver, self).set(key, value)
+
+ def persistent(self):
+ if self._invariant_errors:
+ raise InvariantException(error_codes=self._invariant_errors)
+
+ if self.is_dirty() or type(self._original_pmap) != self._destination_class:
+ return self._destination_class(self._buckets_evolver.persistent(), self._size)
+
+ return self._original_pmap
diff --git a/third_party/python/pyrsistent/pyrsistent/_compat.py b/third_party/python/pyrsistent/pyrsistent/_compat.py
new file mode 100644
index 0000000000..e728586afe
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/_compat.py
@@ -0,0 +1,31 @@
+from six import string_types
+
+
+# enum compat
+try:
+ from enum import Enum
+except:
+ class Enum(object): pass
+ # no objects will be instances of this class
+
+# collections compat
+try:
+ from collections.abc import (
+ Container,
+ Hashable,
+ Iterable,
+ Mapping,
+ Sequence,
+ Set,
+ Sized,
+ )
+except ImportError:
+ from collections import (
+ Container,
+ Hashable,
+ Iterable,
+ Mapping,
+ Sequence,
+ Set,
+ Sized,
+ )
diff --git a/third_party/python/pyrsistent/pyrsistent/_field_common.py b/third_party/python/pyrsistent/pyrsistent/_field_common.py
new file mode 100644
index 0000000000..ca1cccd43c
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/_field_common.py
@@ -0,0 +1,330 @@
+import six
+import sys
+
+from pyrsistent._checked_types import (
+ CheckedPMap,
+ CheckedPSet,
+ CheckedPVector,
+ CheckedType,
+ InvariantException,
+ _restore_pickle,
+ get_type,
+ maybe_parse_user_type,
+ maybe_parse_many_user_types,
+)
+from pyrsistent._checked_types import optional as optional_type
+from pyrsistent._checked_types import wrap_invariant
+import inspect
+
+PY2 = sys.version_info[0] < 3
+
+
+def set_fields(dct, bases, name):
+ dct[name] = dict(sum([list(b.__dict__.get(name, {}).items()) for b in bases], []))
+
+ for k, v in list(dct.items()):
+ if isinstance(v, _PField):
+ dct[name][k] = v
+ del dct[k]
+
+
+def check_global_invariants(subject, invariants):
+ error_codes = tuple(error_code for is_ok, error_code in
+ (invariant(subject) for invariant in invariants) if not is_ok)
+ if error_codes:
+ raise InvariantException(error_codes, (), 'Global invariant failed')
+
+
+def serialize(serializer, format, value):
+ if isinstance(value, CheckedType) and serializer is PFIELD_NO_SERIALIZER:
+ return value.serialize(format)
+
+ return serializer(format, value)
+
+
+def check_type(destination_cls, field, name, value):
+ if field.type and not any(isinstance(value, get_type(t)) for t in field.type):
+ actual_type = type(value)
+ message = "Invalid type for field {0}.{1}, was {2}".format(destination_cls.__name__, name, actual_type.__name__)
+ raise PTypeError(destination_cls, name, field.type, actual_type, message)
+
+
+def is_type_cls(type_cls, field_type):
+ if type(field_type) is set:
+ return True
+ types = tuple(field_type)
+ if len(types) == 0:
+ return False
+ return issubclass(get_type(types[0]), type_cls)
+
+
+def is_field_ignore_extra_complaint(type_cls, field, ignore_extra):
+ # ignore_extra param has default False value, for speed purpose no need to propagate False
+ if not ignore_extra:
+ return False
+
+ if not is_type_cls(type_cls, field.type):
+ return False
+
+ if PY2:
+ return 'ignore_extra' in inspect.getargspec(field.factory).args
+ else:
+ return 'ignore_extra' in inspect.signature(field.factory).parameters
+
+
+
+class _PField(object):
+ __slots__ = ('type', 'invariant', 'initial', 'mandatory', '_factory', 'serializer')
+
+ def __init__(self, type, invariant, initial, mandatory, factory, serializer):
+ self.type = type
+ self.invariant = invariant
+ self.initial = initial
+ self.mandatory = mandatory
+ self._factory = factory
+ self.serializer = serializer
+
+ @property
+ def factory(self):
+ # If no factory is specified and the type is another CheckedType use the factory method of that CheckedType
+ if self._factory is PFIELD_NO_FACTORY and len(self.type) == 1:
+ typ = get_type(tuple(self.type)[0])
+ if issubclass(typ, CheckedType):
+ return typ.create
+
+ return self._factory
+
+PFIELD_NO_TYPE = ()
+PFIELD_NO_INVARIANT = lambda _: (True, None)
+PFIELD_NO_FACTORY = lambda x: x
+PFIELD_NO_INITIAL = object()
+PFIELD_NO_SERIALIZER = lambda _, value: value
+
+
+def field(type=PFIELD_NO_TYPE, invariant=PFIELD_NO_INVARIANT, initial=PFIELD_NO_INITIAL,
+ mandatory=False, factory=PFIELD_NO_FACTORY, serializer=PFIELD_NO_SERIALIZER):
+ """
+ Field specification factory for :py:class:`PRecord`.
+
+ :param type: a type or iterable with types that are allowed for this field
+ :param invariant: a function specifying an invariant that must hold for the field
+ :param initial: value of field if not specified when instantiating the record
+ :param mandatory: boolean specifying if the field is mandatory or not
+ :param factory: function called when field is set.
+ :param serializer: function that returns a serialized version of the field
+ """
+
+ # NB: We have to check this predicate separately from the predicates in
+ # `maybe_parse_user_type` et al. because this one is related to supporting
+ # the argspec for `field`, while those are related to supporting the valid
+ # ways to specify types.
+
+ # Multiple types must be passed in one of the following containers. Note
+ # that a type that is a subclass of one of these containers, like a
+ # `collections.namedtuple`, will work as expected, since we check
+ # `isinstance` and not `issubclass`.
+ if isinstance(type, (list, set, tuple)):
+ types = set(maybe_parse_many_user_types(type))
+ else:
+ types = set(maybe_parse_user_type(type))
+
+ invariant_function = wrap_invariant(invariant) if invariant != PFIELD_NO_INVARIANT and callable(invariant) else invariant
+ field = _PField(type=types, invariant=invariant_function, initial=initial,
+ mandatory=mandatory, factory=factory, serializer=serializer)
+
+ _check_field_parameters(field)
+
+ return field
+
+
+def _check_field_parameters(field):
+ for t in field.type:
+ if not isinstance(t, type) and not isinstance(t, six.string_types):
+ raise TypeError('Type parameter expected, not {0}'.format(type(t)))
+
+ if field.initial is not PFIELD_NO_INITIAL and \
+ not callable(field.initial) and \
+ field.type and not any(isinstance(field.initial, t) for t in field.type):
+ raise TypeError('Initial has invalid type {0}'.format(type(field.initial)))
+
+ if not callable(field.invariant):
+ raise TypeError('Invariant must be callable')
+
+ if not callable(field.factory):
+ raise TypeError('Factory must be callable')
+
+ if not callable(field.serializer):
+ raise TypeError('Serializer must be callable')
+
+
+class PTypeError(TypeError):
+ """
+ Raised when trying to assign a value with a type that doesn't match the declared type.
+
+ Attributes:
+ source_class -- The class of the record
+ field -- Field name
+ expected_types -- Types allowed for the field
+ actual_type -- The non matching type
+ """
+ def __init__(self, source_class, field, expected_types, actual_type, *args, **kwargs):
+ super(PTypeError, self).__init__(*args, **kwargs)
+ self.source_class = source_class
+ self.field = field
+ self.expected_types = expected_types
+ self.actual_type = actual_type
+
+
+SEQ_FIELD_TYPE_SUFFIXES = {
+ CheckedPVector: "PVector",
+ CheckedPSet: "PSet",
+}
+
+# Global dictionary to hold auto-generated field types: used for unpickling
+_seq_field_types = {}
+
+def _restore_seq_field_pickle(checked_class, item_type, data):
+ """Unpickling function for auto-generated PVec/PSet field types."""
+ type_ = _seq_field_types[checked_class, item_type]
+ return _restore_pickle(type_, data)
+
+def _types_to_names(types):
+ """Convert a tuple of types to a human-readable string."""
+ return "".join(get_type(typ).__name__.capitalize() for typ in types)
+
+def _make_seq_field_type(checked_class, item_type):
+ """Create a subclass of the given checked class with the given item type."""
+ type_ = _seq_field_types.get((checked_class, item_type))
+ if type_ is not None:
+ return type_
+
+ class TheType(checked_class):
+ __type__ = item_type
+
+ def __reduce__(self):
+ return (_restore_seq_field_pickle,
+ (checked_class, item_type, list(self)))
+
+ suffix = SEQ_FIELD_TYPE_SUFFIXES[checked_class]
+ TheType.__name__ = _types_to_names(TheType._checked_types) + suffix
+ _seq_field_types[checked_class, item_type] = TheType
+ return TheType
+
+def _sequence_field(checked_class, item_type, optional, initial):
+ """
+ Create checked field for either ``PSet`` or ``PVector``.
+
+ :param checked_class: ``CheckedPSet`` or ``CheckedPVector``.
+ :param item_type: The required type for the items in the set.
+ :param optional: If true, ``None`` can be used as a value for
+ this field.
+ :param initial: Initial value to pass to factory.
+
+ :return: A ``field`` containing a checked class.
+ """
+ TheType = _make_seq_field_type(checked_class, item_type)
+
+ if optional:
+ def factory(argument, _factory_fields=None, ignore_extra=False):
+ if argument is None:
+ return None
+ else:
+ return TheType.create(argument, _factory_fields=_factory_fields, ignore_extra=ignore_extra)
+ else:
+ factory = TheType.create
+
+ return field(type=optional_type(TheType) if optional else TheType,
+ factory=factory, mandatory=True,
+ initial=factory(initial))
+
+
+def pset_field(item_type, optional=False, initial=()):
+ """
+ Create checked ``PSet`` field.
+
+ :param item_type: The required type for the items in the set.
+ :param optional: If true, ``None`` can be used as a value for
+ this field.
+ :param initial: Initial value to pass to factory if no value is given
+ for the field.
+
+ :return: A ``field`` containing a ``CheckedPSet`` of the given type.
+ """
+ return _sequence_field(CheckedPSet, item_type, optional,
+ initial)
+
+
+def pvector_field(item_type, optional=False, initial=()):
+ """
+ Create checked ``PVector`` field.
+
+ :param item_type: The required type for the items in the vector.
+ :param optional: If true, ``None`` can be used as a value for
+ this field.
+ :param initial: Initial value to pass to factory if no value is given
+ for the field.
+
+ :return: A ``field`` containing a ``CheckedPVector`` of the given type.
+ """
+ return _sequence_field(CheckedPVector, item_type, optional,
+ initial)
+
+
+_valid = lambda item: (True, "")
+
+
+# Global dictionary to hold auto-generated field types: used for unpickling
+_pmap_field_types = {}
+
+def _restore_pmap_field_pickle(key_type, value_type, data):
+ """Unpickling function for auto-generated PMap field types."""
+ type_ = _pmap_field_types[key_type, value_type]
+ return _restore_pickle(type_, data)
+
+def _make_pmap_field_type(key_type, value_type):
+ """Create a subclass of CheckedPMap with the given key and value types."""
+ type_ = _pmap_field_types.get((key_type, value_type))
+ if type_ is not None:
+ return type_
+
+ class TheMap(CheckedPMap):
+ __key_type__ = key_type
+ __value_type__ = value_type
+
+ def __reduce__(self):
+ return (_restore_pmap_field_pickle,
+ (self.__key_type__, self.__value_type__, dict(self)))
+
+ TheMap.__name__ = "{0}To{1}PMap".format(
+ _types_to_names(TheMap._checked_key_types),
+ _types_to_names(TheMap._checked_value_types))
+ _pmap_field_types[key_type, value_type] = TheMap
+ return TheMap
+
+
+def pmap_field(key_type, value_type, optional=False, invariant=PFIELD_NO_INVARIANT):
+ """
+ Create a checked ``PMap`` field.
+
+ :param key: The required type for the keys of the map.
+ :param value: The required type for the values of the map.
+ :param optional: If true, ``None`` can be used as a value for
+ this field.
+ :param invariant: Pass-through to ``field``.
+
+ :return: A ``field`` containing a ``CheckedPMap``.
+ """
+ TheMap = _make_pmap_field_type(key_type, value_type)
+
+ if optional:
+ def factory(argument):
+ if argument is None:
+ return None
+ else:
+ return TheMap.create(argument)
+ else:
+ factory = TheMap.create
+
+ return field(mandatory=True, initial=TheMap(),
+ type=optional_type(TheMap) if optional else TheMap,
+ factory=factory, invariant=invariant)
diff --git a/third_party/python/pyrsistent/pyrsistent/_helpers.py b/third_party/python/pyrsistent/pyrsistent/_helpers.py
new file mode 100644
index 0000000000..c9c58feac5
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/_helpers.py
@@ -0,0 +1,82 @@
+from functools import wraps
+import six
+from pyrsistent._pmap import PMap, pmap
+from pyrsistent._pset import PSet, pset
+from pyrsistent._pvector import PVector, pvector
+
+
+def freeze(o):
+ """
+ Recursively convert simple Python containers into pyrsistent versions
+ of those containers.
+
+ - list is converted to pvector, recursively
+ - dict is converted to pmap, recursively on values (but not keys)
+ - set is converted to pset, but not recursively
+ - tuple is converted to tuple, recursively.
+
+ Sets and dict keys are not recursively frozen because they do not contain
+ mutable data by convention. The main exception to this rule is that
+ dict keys and set elements are often instances of mutable objects that
+ support hash-by-id, which this function can't convert anyway.
+
+ >>> freeze(set([1, 2]))
+ pset([1, 2])
+ >>> freeze([1, {'a': 3}])
+ pvector([1, pmap({'a': 3})])
+ >>> freeze((1, []))
+ (1, pvector([]))
+ """
+ typ = type(o)
+ if typ is dict:
+ return pmap(dict((k, freeze(v)) for k, v in six.iteritems(o)))
+ if typ is list:
+ return pvector(map(freeze, o))
+ if typ is tuple:
+ return tuple(map(freeze, o))
+ if typ is set:
+ return pset(o)
+ return o
+
+
+def thaw(o):
+ """
+ Recursively convert pyrsistent containers into simple Python containers.
+
+ - pvector is converted to list, recursively
+ - pmap is converted to dict, recursively on values (but not keys)
+ - pset is converted to set, but not recursively
+ - tuple is converted to tuple, recursively.
+
+ >>> from pyrsistent import s, m, v
+ >>> thaw(s(1, 2))
+ {1, 2}
+ >>> thaw(v(1, m(a=3)))
+ [1, {'a': 3}]
+ >>> thaw((1, v()))
+ (1, [])
+ """
+ if isinstance(o, PVector):
+ return list(map(thaw, o))
+ if isinstance(o, PMap):
+ return dict((k, thaw(v)) for k, v in o.iteritems())
+ if isinstance(o, PSet):
+ return set(o)
+ if type(o) is tuple:
+ return tuple(map(thaw, o))
+ return o
+
+
+def mutant(fn):
+ """
+ Convenience decorator to isolate mutation to within the decorated function (with respect
+ to the input arguments).
+
+ All arguments to the decorated function will be frozen so that they are guaranteed not to change.
+ The return value is also frozen.
+ """
+ @wraps(fn)
+ def inner_f(*args, **kwargs):
+ return freeze(fn(*[freeze(e) for e in args], **dict(freeze(item) for item in kwargs.items())))
+
+ return inner_f
diff --git a/third_party/python/pyrsistent/pyrsistent/_immutable.py b/third_party/python/pyrsistent/pyrsistent/_immutable.py
new file mode 100644
index 0000000000..a89bd7552f
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/_immutable.py
@@ -0,0 +1,105 @@
+import sys
+
+import six
+
+
+def immutable(members='', name='Immutable', verbose=False):
+ """
+ Produces a class that either can be used standalone or as a base class for persistent classes.
+
+ This is a thin wrapper around a named tuple.
+
+ Constructing a type and using it to instantiate objects:
+
+ >>> Point = immutable('x, y', name='Point')
+ >>> p = Point(1, 2)
+ >>> p2 = p.set(x=3)
+ >>> p
+ Point(x=1, y=2)
+ >>> p2
+ Point(x=3, y=2)
+
+ Inheriting from a constructed type. In this case no type name needs to be supplied:
+
+ >>> class PositivePoint(immutable('x, y')):
+ ... __slots__ = tuple()
+ ... def __new__(cls, x, y):
+ ... if x > 0 and y > 0:
+ ... return super(PositivePoint, cls).__new__(cls, x, y)
+ ... raise Exception('Coordinates must be positive!')
+ ...
+ >>> p = PositivePoint(1, 2)
+ >>> p.set(x=3)
+ PositivePoint(x=3, y=2)
+ >>> p.set(y=-3)
+ Traceback (most recent call last):
+ Exception: Coordinates must be positive!
+
+ The persistent class also supports the notion of frozen members. The value of a frozen member
+ cannot be updated. For example it could be used to implement an ID that should remain the same
+ over time. A frozen member is denoted by a trailing underscore.
+
+ >>> Point = immutable('x, y, id_', name='Point')
+ >>> p = Point(1, 2, id_=17)
+ >>> p.set(x=3)
+ Point(x=3, y=2, id_=17)
+ >>> p.set(id_=18)
+ Traceback (most recent call last):
+ AttributeError: Cannot set frozen members id_
+ """
+
+ if isinstance(members, six.string_types):
+ members = members.replace(',', ' ').split()
+
+ def frozen_member_test():
+ frozen_members = ["'%s'" % f for f in members if f.endswith('_')]
+ if frozen_members:
+ return """
+ frozen_fields = fields_to_modify & set([{frozen_members}])
+ if frozen_fields:
+ raise AttributeError('Cannot set frozen members %s' % ', '.join(frozen_fields))
+ """.format(frozen_members=', '.join(frozen_members))
+
+ return ''
+
+ verbose_string = ""
+ if sys.version_info < (3, 7):
+ # Verbose is no longer supported in Python 3.7
+ verbose_string = ", verbose={verbose}".format(verbose=verbose)
+
+ quoted_members = ', '.join("'%s'" % m for m in members)
+ template = """
+class {class_name}(namedtuple('ImmutableBase', [{quoted_members}]{verbose_string})):
+ __slots__ = tuple()
+
+ def __repr__(self):
+ return super({class_name}, self).__repr__().replace('ImmutableBase', self.__class__.__name__)
+
+ def set(self, **kwargs):
+ if not kwargs:
+ return self
+
+ fields_to_modify = set(kwargs.keys())
+ if not fields_to_modify <= {member_set}:
+ raise AttributeError("'%s' is not a member" % ', '.join(fields_to_modify - {member_set}))
+
+ {frozen_member_test}
+
+ return self.__class__.__new__(self.__class__, *map(kwargs.pop, [{quoted_members}], self))
+""".format(quoted_members=quoted_members,
+ member_set="set([%s])" % quoted_members if quoted_members else 'set()',
+ frozen_member_test=frozen_member_test(),
+ verbose_string=verbose_string,
+ class_name=name)
+
+ if verbose:
+ print(template)
+
+ from collections import namedtuple
+ namespace = dict(namedtuple=namedtuple, __name__='pyrsistent_immutable')
+ try:
+ six.exec_(template, namespace)
+ except SyntaxError as e:
+ raise SyntaxError(e.message + ':\n' + template)
+
+ return namespace[name] \ No newline at end of file
diff --git a/third_party/python/pyrsistent/pyrsistent/_pbag.py b/third_party/python/pyrsistent/pyrsistent/_pbag.py
new file mode 100644
index 0000000000..9905e9a6e3
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/_pbag.py
@@ -0,0 +1,267 @@
+from ._compat import Container, Iterable, Sized, Hashable
+from functools import reduce
+from pyrsistent._pmap import pmap
+
+
+def _add_to_counters(counters, element):
+ return counters.set(element, counters.get(element, 0) + 1)
+
+
+class PBag(object):
+ """
+ A persistent bag/multiset type.
+
+ Requires elements to be hashable, and allows duplicates, but has no
+ ordering. Bags are hashable.
+
+ Do not instantiate directly, instead use the factory functions :py:func:`b`
+ or :py:func:`pbag` to create an instance.
+
+ Some examples:
+
+ >>> s = pbag([1, 2, 3, 1])
+ >>> s2 = s.add(4)
+ >>> s3 = s2.remove(1)
+ >>> s
+ pbag([1, 1, 2, 3])
+ >>> s2
+ pbag([1, 1, 2, 3, 4])
+ >>> s3
+ pbag([1, 2, 3, 4])
+ """
+
+ __slots__ = ('_counts', '__weakref__')
+
+ def __init__(self, counts):
+ self._counts = counts
+
+ def add(self, element):
+ """
+ Add an element to the bag.
+
+ >>> s = pbag([1])
+ >>> s2 = s.add(1)
+ >>> s3 = s.add(2)
+ >>> s2
+ pbag([1, 1])
+ >>> s3
+ pbag([1, 2])
+ """
+ return PBag(_add_to_counters(self._counts, element))
+
+ def update(self, iterable):
+ """
+ Update bag with all elements in iterable.
+
+ >>> s = pbag([1])
+ >>> s.update([1, 2])
+ pbag([1, 1, 2])
+ """
+ if iterable:
+ return PBag(reduce(_add_to_counters, iterable, self._counts))
+
+ return self
+
+ def remove(self, element):
+ """
+ Remove an element from the bag.
+
+ >>> s = pbag([1, 1, 2])
+ >>> s2 = s.remove(1)
+ >>> s3 = s.remove(2)
+ >>> s2
+ pbag([1, 2])
+ >>> s3
+ pbag([1, 1])
+ """
+ if element not in self._counts:
+ raise KeyError(element)
+ elif self._counts[element] == 1:
+ newc = self._counts.remove(element)
+ else:
+ newc = self._counts.set(element, self._counts[element] - 1)
+ return PBag(newc)
+
+ def count(self, element):
+ """
+ Return the number of times an element appears.
+
+
+ >>> pbag([]).count('non-existent')
+ 0
+ >>> pbag([1, 1, 2]).count(1)
+ 2
+ """
+ return self._counts.get(element, 0)
+
+ def __len__(self):
+ """
+ Return the length including duplicates.
+
+ >>> len(pbag([1, 1, 2]))
+ 3
+ """
+ return sum(self._counts.itervalues())
+
+ def __iter__(self):
+ """
+ Return an iterator of all elements, including duplicates.
+
+ >>> list(pbag([1, 1, 2]))
+ [1, 1, 2]
+ >>> list(pbag([1, 2]))
+ [1, 2]
+ """
+ for elt, count in self._counts.iteritems():
+ for i in range(count):
+ yield elt
+
+ def __contains__(self, elt):
+ """
+ Check if an element is in the bag.
+
+ >>> 1 in pbag([1, 1, 2])
+ True
+ >>> 0 in pbag([1, 2])
+ False
+ """
+ return elt in self._counts
+
+ def __repr__(self):
+ return "pbag({0})".format(list(self))
+
+ def __eq__(self, other):
+ """
+ Check if two bags are equivalent, honoring the number of duplicates,
+ and ignoring insertion order.
+
+ >>> pbag([1, 1, 2]) == pbag([1, 2])
+ False
+ >>> pbag([2, 1, 0]) == pbag([0, 1, 2])
+ True
+ """
+ if type(other) is not PBag:
+ raise TypeError("Can only compare PBag with PBags")
+ return self._counts == other._counts
+
+ def __lt__(self, other):
+ raise TypeError('PBags are not orderable')
+
+ __le__ = __lt__
+ __gt__ = __lt__
+ __ge__ = __lt__
+
+ # Multiset-style operations similar to collections.Counter
+
+ def __add__(self, other):
+ """
+ Combine elements from two PBags.
+
+ >>> pbag([1, 2, 2]) + pbag([2, 3, 3])
+ pbag([1, 2, 2, 2, 3, 3])
+ """
+ if not isinstance(other, PBag):
+ return NotImplemented
+ result = self._counts.evolver()
+ for elem, other_count in other._counts.iteritems():
+ result[elem] = self.count(elem) + other_count
+ return PBag(result.persistent())
+
+ def __sub__(self, other):
+ """
+ Remove elements from one PBag that are present in another.
+
+ >>> pbag([1, 2, 2, 2, 3]) - pbag([2, 3, 3, 4])
+ pbag([1, 2, 2])
+ """
+ if not isinstance(other, PBag):
+ return NotImplemented
+ result = self._counts.evolver()
+ for elem, other_count in other._counts.iteritems():
+ newcount = self.count(elem) - other_count
+ if newcount > 0:
+ result[elem] = newcount
+ elif elem in self:
+ result.remove(elem)
+ return PBag(result.persistent())
+
+ def __or__(self, other):
+ """
+ Union: Keep elements that are present in either of two PBags.
+
+ >>> pbag([1, 2, 2, 2]) | pbag([2, 3, 3])
+ pbag([1, 2, 2, 2, 3, 3])
+ """
+ if not isinstance(other, PBag):
+ return NotImplemented
+ result = self._counts.evolver()
+ for elem, other_count in other._counts.iteritems():
+ count = self.count(elem)
+ newcount = max(count, other_count)
+ result[elem] = newcount
+ return PBag(result.persistent())
+
+ def __and__(self, other):
+ """
+ Intersection: Only keep elements that are present in both PBags.
+
+ >>> pbag([1, 2, 2, 2]) & pbag([2, 3, 3])
+ pbag([2])
+ """
+ if not isinstance(other, PBag):
+ return NotImplemented
+ result = pmap().evolver()
+ for elem, count in self._counts.iteritems():
+ newcount = min(count, other.count(elem))
+ if newcount > 0:
+ result[elem] = newcount
+ return PBag(result.persistent())
+
+ def __hash__(self):
+ """
+ Hash based on value of elements.
+
+ >>> m = pmap({pbag([1, 2]): "it's here!"})
+ >>> m[pbag([2, 1])]
+ "it's here!"
+ >>> pbag([1, 1, 2]) in m
+ False
+ """
+ return hash(self._counts)
+
+
+Container.register(PBag)
+Iterable.register(PBag)
+Sized.register(PBag)
+Hashable.register(PBag)
+
+
+def b(*elements):
+ """
+ Construct a persistent bag.
+
+ Takes an arbitrary number of arguments to insert into the new persistent
+ bag.
+
+ >>> b(1, 2, 3, 2)
+ pbag([1, 2, 2, 3])
+ """
+ return pbag(elements)
+
+
+def pbag(elements):
+ """
+ Convert an iterable to a persistent bag.
+
+ Takes an iterable with elements to insert.
+
+ >>> pbag([1, 2, 3, 2])
+ pbag([1, 2, 2, 3])
+ """
+ if not elements:
+ return _EMPTY_PBAG
+ return PBag(reduce(_add_to_counters, elements, pmap()))
+
+
+_EMPTY_PBAG = PBag(pmap())
+
diff --git a/third_party/python/pyrsistent/pyrsistent/_pclass.py b/third_party/python/pyrsistent/pyrsistent/_pclass.py
new file mode 100644
index 0000000000..a437f71648
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/_pclass.py
@@ -0,0 +1,264 @@
+import six
+from pyrsistent._checked_types import (InvariantException, CheckedType, _restore_pickle, store_invariants)
+from pyrsistent._field_common import (
+ set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants
+)
+from pyrsistent._transformations import transform
+
+
+def _is_pclass(bases):
+ return len(bases) == 1 and bases[0] == CheckedType
+
+
+class PClassMeta(type):
+ def __new__(mcs, name, bases, dct):
+ set_fields(dct, bases, name='_pclass_fields')
+ store_invariants(dct, bases, '_pclass_invariants', '__invariant__')
+ dct['__slots__'] = ('_pclass_frozen',) + tuple(key for key in dct['_pclass_fields'])
+
+ # There must only be one __weakref__ entry in the inheritance hierarchy,
+ # lets put it on the top level class.
+ if _is_pclass(bases):
+ dct['__slots__'] += ('__weakref__',)
+
+ return super(PClassMeta, mcs).__new__(mcs, name, bases, dct)
+
+_MISSING_VALUE = object()
+
+
+def _check_and_set_attr(cls, field, name, value, result, invariant_errors):
+ check_type(cls, field, name, value)
+ is_ok, error_code = field.invariant(value)
+ if not is_ok:
+ invariant_errors.append(error_code)
+ else:
+ setattr(result, name, value)
+
+
+@six.add_metaclass(PClassMeta)
+class PClass(CheckedType):
+ """
+ A PClass is a python class with a fixed set of specified fields. PClasses are declared as python classes inheriting
+ from PClass. It is defined the same way that PRecords are and behaves like a PRecord in all aspects except that it
+ is not a PMap and hence not a collection but rather a plain Python object.
+
+
+ More documentation and examples of PClass usage is available at https://github.com/tobgu/pyrsistent
+ """
+ def __new__(cls, **kwargs): # Support *args?
+ result = super(PClass, cls).__new__(cls)
+ factory_fields = kwargs.pop('_factory_fields', None)
+ ignore_extra = kwargs.pop('ignore_extra', None)
+ missing_fields = []
+ invariant_errors = []
+ for name, field in cls._pclass_fields.items():
+ if name in kwargs:
+ if factory_fields is None or name in factory_fields:
+ if is_field_ignore_extra_complaint(PClass, field, ignore_extra):
+ value = field.factory(kwargs[name], ignore_extra=ignore_extra)
+ else:
+ value = field.factory(kwargs[name])
+ else:
+ value = kwargs[name]
+ _check_and_set_attr(cls, field, name, value, result, invariant_errors)
+ del kwargs[name]
+ elif field.initial is not PFIELD_NO_INITIAL:
+ initial = field.initial() if callable(field.initial) else field.initial
+ _check_and_set_attr(
+ cls, field, name, initial, result, invariant_errors)
+ elif field.mandatory:
+ missing_fields.append('{0}.{1}'.format(cls.__name__, name))
+
+ if invariant_errors or missing_fields:
+ raise InvariantException(tuple(invariant_errors), tuple(missing_fields), 'Field invariant failed')
+
+ if kwargs:
+ raise AttributeError("'{0}' are not among the specified fields for {1}".format(
+ ', '.join(kwargs), cls.__name__))
+
+ check_global_invariants(result, cls._pclass_invariants)
+
+ result._pclass_frozen = True
+ return result
+
+ def set(self, *args, **kwargs):
+ """
+ Set a field in the instance. Returns a new instance with the updated value. The original instance remains
+ unmodified. Accepts key-value pairs or single string representing the field name and a value.
+
+ >>> from pyrsistent import PClass, field
+ >>> class AClass(PClass):
+ ... x = field()
+ ...
+ >>> a = AClass(x=1)
+ >>> a2 = a.set(x=2)
+ >>> a3 = a.set('x', 3)
+ >>> a
+ AClass(x=1)
+ >>> a2
+ AClass(x=2)
+ >>> a3
+ AClass(x=3)
+ """
+ if args:
+ kwargs[args[0]] = args[1]
+
+ factory_fields = set(kwargs)
+
+ for key in self._pclass_fields:
+ if key not in kwargs:
+ value = getattr(self, key, _MISSING_VALUE)
+ if value is not _MISSING_VALUE:
+ kwargs[key] = value
+
+ return self.__class__(_factory_fields=factory_fields, **kwargs)
+
+ @classmethod
+ def create(cls, kwargs, _factory_fields=None, ignore_extra=False):
+ """
+ Factory method. Will create a new PClass of the current type and assign the values
+ specified in kwargs.
+
+ :param ignore_extra: A boolean which when set to True will ignore any keys which appear in kwargs that are not
+ in the set of fields on the PClass.
+ """
+ if isinstance(kwargs, cls):
+ return kwargs
+
+ if ignore_extra:
+ kwargs = {k: kwargs[k] for k in cls._pclass_fields if k in kwargs}
+
+ return cls(_factory_fields=_factory_fields, ignore_extra=ignore_extra, **kwargs)
+
+ def serialize(self, format=None):
+ """
+ Serialize the current PClass using custom serializer functions for fields where
+ such have been supplied.
+ """
+ result = {}
+ for name in self._pclass_fields:
+ value = getattr(self, name, _MISSING_VALUE)
+ if value is not _MISSING_VALUE:
+ result[name] = serialize(self._pclass_fields[name].serializer, format, value)
+
+ return result
+
+ def transform(self, *transformations):
+ """
+ Apply transformations to the currency PClass. For more details on transformations see
+ the documentation for PMap. Transformations on PClasses do not support key matching
+ since the PClass is not a collection. Apart from that the transformations available
+ for other persistent types work as expected.
+ """
+ return transform(self, transformations)
+
+ def __eq__(self, other):
+ if isinstance(other, self.__class__):
+ for name in self._pclass_fields:
+ if getattr(self, name, _MISSING_VALUE) != getattr(other, name, _MISSING_VALUE):
+ return False
+
+ return True
+
+ return NotImplemented
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ # May want to optimize this by caching the hash somehow
+ return hash(tuple((key, getattr(self, key, _MISSING_VALUE)) for key in self._pclass_fields))
+
+ def __setattr__(self, key, value):
+ if getattr(self, '_pclass_frozen', False):
+ raise AttributeError("Can't set attribute, key={0}, value={1}".format(key, value))
+
+ super(PClass, self).__setattr__(key, value)
+
+ def __delattr__(self, key):
+ raise AttributeError("Can't delete attribute, key={0}, use remove()".format(key))
+
+ def _to_dict(self):
+ result = {}
+ for key in self._pclass_fields:
+ value = getattr(self, key, _MISSING_VALUE)
+ if value is not _MISSING_VALUE:
+ result[key] = value
+
+ return result
+
+ def __repr__(self):
+ return "{0}({1})".format(self.__class__.__name__,
+ ', '.join('{0}={1}'.format(k, repr(v)) for k, v in self._to_dict().items()))
+
+ def __reduce__(self):
+ # Pickling support
+ data = dict((key, getattr(self, key)) for key in self._pclass_fields if hasattr(self, key))
+ return _restore_pickle, (self.__class__, data,)
+
+ def evolver(self):
+ """
+ Returns an evolver for this object.
+ """
+ return _PClassEvolver(self, self._to_dict())
+
+ def remove(self, name):
+ """
+ Remove attribute given by name from the current instance. Raises AttributeError if the
+ attribute doesn't exist.
+ """
+ evolver = self.evolver()
+ del evolver[name]
+ return evolver.persistent()
+
+
+class _PClassEvolver(object):
+ __slots__ = ('_pclass_evolver_original', '_pclass_evolver_data', '_pclass_evolver_data_is_dirty', '_factory_fields')
+
+ def __init__(self, original, initial_dict):
+ self._pclass_evolver_original = original
+ self._pclass_evolver_data = initial_dict
+ self._pclass_evolver_data_is_dirty = False
+ self._factory_fields = set()
+
+ def __getitem__(self, item):
+ return self._pclass_evolver_data[item]
+
+ def set(self, key, value):
+ if self._pclass_evolver_data.get(key, _MISSING_VALUE) is not value:
+ self._pclass_evolver_data[key] = value
+ self._factory_fields.add(key)
+ self._pclass_evolver_data_is_dirty = True
+
+ return self
+
+ def __setitem__(self, key, value):
+ self.set(key, value)
+
+ def remove(self, item):
+ if item in self._pclass_evolver_data:
+ del self._pclass_evolver_data[item]
+ self._factory_fields.discard(item)
+ self._pclass_evolver_data_is_dirty = True
+ return self
+
+ raise AttributeError(item)
+
+ def __delitem__(self, item):
+ self.remove(item)
+
+ def persistent(self):
+ if self._pclass_evolver_data_is_dirty:
+ return self._pclass_evolver_original.__class__(_factory_fields=self._factory_fields,
+ **self._pclass_evolver_data)
+
+ return self._pclass_evolver_original
+
+ def __setattr__(self, key, value):
+ if key not in self.__slots__:
+ self.set(key, value)
+ else:
+ super(_PClassEvolver, self).__setattr__(key, value)
+
+ def __getattr__(self, item):
+ return self[item]
diff --git a/third_party/python/pyrsistent/pyrsistent/_pdeque.py b/third_party/python/pyrsistent/pyrsistent/_pdeque.py
new file mode 100644
index 0000000000..5147b3fa6a
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/_pdeque.py
@@ -0,0 +1,376 @@
+from ._compat import Sequence, Hashable
+from itertools import islice, chain
+from numbers import Integral
+from pyrsistent._plist import plist
+
+
+class PDeque(object):
+ """
+ Persistent double ended queue (deque). Allows quick appends and pops in both ends. Implemented
+ using two persistent lists.
+
+ A maximum length can be specified to create a bounded queue.
+
+ Fully supports the Sequence and Hashable protocols including indexing and slicing but
+ if you need fast random access go for the PVector instead.
+
+ Do not instantiate directly, instead use the factory functions :py:func:`dq` or :py:func:`pdeque` to
+ create an instance.
+
+ Some examples:
+
+ >>> x = pdeque([1, 2, 3])
+ >>> x.left
+ 1
+ >>> x.right
+ 3
+ >>> x[0] == x.left
+ True
+ >>> x[-1] == x.right
+ True
+ >>> x.pop()
+ pdeque([1, 2])
+ >>> x.pop() == x[:-1]
+ True
+ >>> x.popleft()
+ pdeque([2, 3])
+ >>> x.append(4)
+ pdeque([1, 2, 3, 4])
+ >>> x.appendleft(4)
+ pdeque([4, 1, 2, 3])
+
+ >>> y = pdeque([1, 2, 3], maxlen=3)
+ >>> y.append(4)
+ pdeque([2, 3, 4], maxlen=3)
+ >>> y.appendleft(4)
+ pdeque([4, 1, 2], maxlen=3)
+ """
+ __slots__ = ('_left_list', '_right_list', '_length', '_maxlen', '__weakref__')
+
+ def __new__(cls, left_list, right_list, length, maxlen=None):
+ instance = super(PDeque, cls).__new__(cls)
+ instance._left_list = left_list
+ instance._right_list = right_list
+ instance._length = length
+
+ if maxlen is not None:
+ if not isinstance(maxlen, Integral):
+ raise TypeError('An integer is required as maxlen')
+
+ if maxlen < 0:
+ raise ValueError("maxlen must be non-negative")
+
+ instance._maxlen = maxlen
+ return instance
+
+ @property
+ def right(self):
+ """
+ Rightmost element in dqueue.
+ """
+ return PDeque._tip_from_lists(self._right_list, self._left_list)
+
+ @property
+ def left(self):
+ """
+ Leftmost element in dqueue.
+ """
+ return PDeque._tip_from_lists(self._left_list, self._right_list)
+
+ @staticmethod
+ def _tip_from_lists(primary_list, secondary_list):
+ if primary_list:
+ return primary_list.first
+
+ if secondary_list:
+ return secondary_list[-1]
+
+ raise IndexError('No elements in empty deque')
+
+ def __iter__(self):
+ return chain(self._left_list, self._right_list.reverse())
+
+ def __repr__(self):
+ return "pdeque({0}{1})".format(list(self),
+ ', maxlen={0}'.format(self._maxlen) if self._maxlen is not None else '')
+ __str__ = __repr__
+
+ @property
+ def maxlen(self):
+ """
+ Maximum length of the queue.
+ """
+ return self._maxlen
+
+ def pop(self, count=1):
+ """
+ Return new deque with rightmost element removed. Popping the empty queue
+ will return the empty queue. A optional count can be given to indicate the
+ number of elements to pop. Popping with a negative index is the same as
+ popleft. Executes in amortized O(k) where k is the number of elements to pop.
+
+ >>> pdeque([1, 2]).pop()
+ pdeque([1])
+ >>> pdeque([1, 2]).pop(2)
+ pdeque([])
+ >>> pdeque([1, 2]).pop(-1)
+ pdeque([2])
+ """
+ if count < 0:
+ return self.popleft(-count)
+
+ new_right_list, new_left_list = PDeque._pop_lists(self._right_list, self._left_list, count)
+ return PDeque(new_left_list, new_right_list, max(self._length - count, 0), self._maxlen)
+
+ def popleft(self, count=1):
+ """
+ Return new deque with leftmost element removed. Otherwise functionally
+ equivalent to pop().
+
+ >>> pdeque([1, 2]).popleft()
+ pdeque([2])
+ """
+ if count < 0:
+ return self.pop(-count)
+
+ new_left_list, new_right_list = PDeque._pop_lists(self._left_list, self._right_list, count)
+ return PDeque(new_left_list, new_right_list, max(self._length - count, 0), self._maxlen)
+
+ @staticmethod
+ def _pop_lists(primary_list, secondary_list, count):
+ new_primary_list = primary_list
+ new_secondary_list = secondary_list
+
+ while count > 0 and (new_primary_list or new_secondary_list):
+ count -= 1
+ if new_primary_list.rest:
+ new_primary_list = new_primary_list.rest
+ elif new_primary_list:
+ new_primary_list = new_secondary_list.reverse()
+ new_secondary_list = plist()
+ else:
+ new_primary_list = new_secondary_list.reverse().rest
+ new_secondary_list = plist()
+
+ return new_primary_list, new_secondary_list
+
+ def _is_empty(self):
+ return not self._left_list and not self._right_list
+
+ def __lt__(self, other):
+ if not isinstance(other, PDeque):
+ return NotImplemented
+
+ return tuple(self) < tuple(other)
+
+ def __eq__(self, other):
+ if not isinstance(other, PDeque):
+ return NotImplemented
+
+ if tuple(self) == tuple(other):
+ # Sanity check of the length value since it is redundant (there for performance)
+ assert len(self) == len(other)
+ return True
+
+ return False
+
+ def __hash__(self):
+ return hash(tuple(self))
+
+ def __len__(self):
+ return self._length
+
+ def append(self, elem):
+ """
+ Return new deque with elem as the rightmost element.
+
+ >>> pdeque([1, 2]).append(3)
+ pdeque([1, 2, 3])
+ """
+ new_left_list, new_right_list, new_length = self._append(self._left_list, self._right_list, elem)
+ return PDeque(new_left_list, new_right_list, new_length, self._maxlen)
+
+ def appendleft(self, elem):
+ """
+ Return new deque with elem as the leftmost element.
+
+ >>> pdeque([1, 2]).appendleft(3)
+ pdeque([3, 1, 2])
+ """
+ new_right_list, new_left_list, new_length = self._append(self._right_list, self._left_list, elem)
+ return PDeque(new_left_list, new_right_list, new_length, self._maxlen)
+
+ def _append(self, primary_list, secondary_list, elem):
+ if self._maxlen is not None and self._length == self._maxlen:
+ if self._maxlen == 0:
+ return primary_list, secondary_list, 0
+ new_primary_list, new_secondary_list = PDeque._pop_lists(primary_list, secondary_list, 1)
+ return new_primary_list, new_secondary_list.cons(elem), self._length
+
+ return primary_list, secondary_list.cons(elem), self._length + 1
+
+ @staticmethod
+ def _extend_list(the_list, iterable):
+ count = 0
+ for elem in iterable:
+ the_list = the_list.cons(elem)
+ count += 1
+
+ return the_list, count
+
+ def _extend(self, primary_list, secondary_list, iterable):
+ new_primary_list, extend_count = PDeque._extend_list(primary_list, iterable)
+ new_secondary_list = secondary_list
+ current_len = self._length + extend_count
+ if self._maxlen is not None and current_len > self._maxlen:
+ pop_len = current_len - self._maxlen
+ new_secondary_list, new_primary_list = PDeque._pop_lists(new_secondary_list, new_primary_list, pop_len)
+ extend_count -= pop_len
+
+ return new_primary_list, new_secondary_list, extend_count
+
+ def extend(self, iterable):
+ """
+ Return new deque with all elements of iterable appended to the right.
+
+ >>> pdeque([1, 2]).extend([3, 4])
+ pdeque([1, 2, 3, 4])
+ """
+ new_right_list, new_left_list, extend_count = self._extend(self._right_list, self._left_list, iterable)
+ return PDeque(new_left_list, new_right_list, self._length + extend_count, self._maxlen)
+
+ def extendleft(self, iterable):
+ """
+ Return new deque with all elements of iterable appended to the left.
+
+ NB! The elements will be inserted in reverse order compared to the order in the iterable.
+
+ >>> pdeque([1, 2]).extendleft([3, 4])
+ pdeque([4, 3, 1, 2])
+ """
+ new_left_list, new_right_list, extend_count = self._extend(self._left_list, self._right_list, iterable)
+ return PDeque(new_left_list, new_right_list, self._length + extend_count, self._maxlen)
+
+ def count(self, elem):
+ """
+ Return the number of elements equal to elem present in the queue
+
+ >>> pdeque([1, 2, 1]).count(1)
+ 2
+ """
+ return self._left_list.count(elem) + self._right_list.count(elem)
+
+ def remove(self, elem):
+ """
+ Return new deque with first element from left equal to elem removed. If no such element is found
+ a ValueError is raised.
+
+ >>> pdeque([2, 1, 2]).remove(2)
+ pdeque([1, 2])
+ """
+ try:
+ return PDeque(self._left_list.remove(elem), self._right_list, self._length - 1)
+ except ValueError:
+ # Value not found in left list, try the right list
+ try:
+ # This is severely inefficient with a double reverse, should perhaps implement a remove_last()?
+ return PDeque(self._left_list,
+ self._right_list.reverse().remove(elem).reverse(), self._length - 1)
+ except ValueError:
+ raise ValueError('{0} not found in PDeque'.format(elem))
+
+ def reverse(self):
+ """
+ Return reversed deque.
+
+ >>> pdeque([1, 2, 3]).reverse()
+ pdeque([3, 2, 1])
+
+ Also supports the standard python reverse function.
+
+ >>> reversed(pdeque([1, 2, 3]))
+ pdeque([3, 2, 1])
+ """
+ return PDeque(self._right_list, self._left_list, self._length)
+ __reversed__ = reverse
+
+ def rotate(self, steps):
+ """
+ Return deque with elements rotated steps steps.
+
+ >>> x = pdeque([1, 2, 3])
+ >>> x.rotate(1)
+ pdeque([3, 1, 2])
+ >>> x.rotate(-2)
+ pdeque([3, 1, 2])
+ """
+ popped_deque = self.pop(steps)
+ if steps >= 0:
+ return popped_deque.extendleft(islice(self.reverse(), steps))
+
+ return popped_deque.extend(islice(self, -steps))
+
+ def __reduce__(self):
+ # Pickling support
+ return pdeque, (list(self), self._maxlen)
+
+ def __getitem__(self, index):
+ if isinstance(index, slice):
+ if index.step is not None and index.step != 1:
+ # Too difficult, no structural sharing possible
+ return pdeque(tuple(self)[index], maxlen=self._maxlen)
+
+ result = self
+ if index.start is not None:
+ result = result.popleft(index.start % self._length)
+ if index.stop is not None:
+ result = result.pop(self._length - (index.stop % self._length))
+
+ return result
+
+ if not isinstance(index, Integral):
+ raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
+
+ if index >= 0:
+ return self.popleft(index).left
+
+ shifted = len(self) + index
+ if shifted < 0:
+ raise IndexError(
+ "pdeque index {0} out of range {1}".format(index, len(self)),
+ )
+ return self.popleft(shifted).left
+
+ index = Sequence.index
+
+Sequence.register(PDeque)
+Hashable.register(PDeque)
+
+
+def pdeque(iterable=(), maxlen=None):
+ """
+ Return deque containing the elements of iterable. If maxlen is specified then
+ len(iterable) - maxlen elements are discarded from the left to if len(iterable) > maxlen.
+
+ >>> pdeque([1, 2, 3])
+ pdeque([1, 2, 3])
+ >>> pdeque([1, 2, 3, 4], maxlen=2)
+ pdeque([3, 4], maxlen=2)
+ """
+ t = tuple(iterable)
+ if maxlen is not None:
+ t = t[-maxlen:]
+ length = len(t)
+ pivot = int(length / 2)
+ left = plist(t[:pivot])
+ right = plist(t[pivot:], reverse=True)
+ return PDeque(left, right, length, maxlen)
+
+def dq(*elements):
+ """
+ Return deque containing all arguments.
+
+ >>> dq(1, 2, 3)
+ pdeque([1, 2, 3])
+ """
+ return pdeque(elements)
diff --git a/third_party/python/pyrsistent/pyrsistent/_plist.py b/third_party/python/pyrsistent/pyrsistent/_plist.py
new file mode 100644
index 0000000000..8b4267f5e3
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/_plist.py
@@ -0,0 +1,313 @@
+from ._compat import Sequence, Hashable
+from numbers import Integral
+from functools import reduce
+
+
+class _PListBuilder(object):
+ """
+ Helper class to allow construction of a list without
+ having to reverse it in the end.
+ """
+ __slots__ = ('_head', '_tail')
+
+ def __init__(self):
+ self._head = _EMPTY_PLIST
+ self._tail = _EMPTY_PLIST
+
+ def _append(self, elem, constructor):
+ if not self._tail:
+ self._head = constructor(elem)
+ self._tail = self._head
+ else:
+ self._tail.rest = constructor(elem)
+ self._tail = self._tail.rest
+
+ return self._head
+
+ def append_elem(self, elem):
+ return self._append(elem, lambda e: PList(e, _EMPTY_PLIST))
+
+ def append_plist(self, pl):
+ return self._append(pl, lambda l: l)
+
+ def build(self):
+ return self._head
+
+
+class _PListBase(object):
+ __slots__ = ('__weakref__',)
+
+ # Selected implementations can be taken straight from the Sequence
+ # class, other are less suitable. Especially those that work with
+ # index lookups.
+ count = Sequence.count
+ index = Sequence.index
+
+ def __reduce__(self):
+ # Pickling support
+ return plist, (list(self),)
+
+ def __len__(self):
+ """
+ Return the length of the list, computed by traversing it.
+
+ This is obviously O(n) but with the current implementation
+ where a list is also a node the overhead of storing the length
+ in every node would be quite significant.
+ """
+ return sum(1 for _ in self)
+
+ def __repr__(self):
+ return "plist({0})".format(list(self))
+ __str__ = __repr__
+
+ def cons(self, elem):
+ """
+ Return a new list with elem inserted as new head.
+
+ >>> plist([1, 2]).cons(3)
+ plist([3, 1, 2])
+ """
+ return PList(elem, self)
+
+ def mcons(self, iterable):
+ """
+ Return a new list with all elements of iterable repeatedly cons:ed to the current list.
+ NB! The elements will be inserted in the reverse order of the iterable.
+ Runs in O(len(iterable)).
+
+ >>> plist([1, 2]).mcons([3, 4])
+ plist([4, 3, 1, 2])
+ """
+ head = self
+ for elem in iterable:
+ head = head.cons(elem)
+
+ return head
+
+ def reverse(self):
+ """
+ Return a reversed version of list. Runs in O(n) where n is the length of the list.
+
+ >>> plist([1, 2, 3]).reverse()
+ plist([3, 2, 1])
+
+ Also supports the standard reversed function.
+
+ >>> reversed(plist([1, 2, 3]))
+ plist([3, 2, 1])
+ """
+ result = plist()
+ head = self
+ while head:
+ result = result.cons(head.first)
+ head = head.rest
+
+ return result
+ __reversed__ = reverse
+
+ def split(self, index):
+ """
+ Spilt the list at position specified by index. Returns a tuple containing the
+ list up until index and the list after the index. Runs in O(index).
+
+ >>> plist([1, 2, 3, 4]).split(2)
+ (plist([1, 2]), plist([3, 4]))
+ """
+ lb = _PListBuilder()
+ right_list = self
+ i = 0
+ while right_list and i < index:
+ lb.append_elem(right_list.first)
+ right_list = right_list.rest
+ i += 1
+
+ if not right_list:
+ # Just a small optimization in the cases where no split occurred
+ return self, _EMPTY_PLIST
+
+ return lb.build(), right_list
+
+ def __iter__(self):
+ li = self
+ while li:
+ yield li.first
+ li = li.rest
+
+ def __lt__(self, other):
+ if not isinstance(other, _PListBase):
+ return NotImplemented
+
+ return tuple(self) < tuple(other)
+
+ def __eq__(self, other):
+ """
+ Traverses the lists, checking equality of elements.
+
+ This is an O(n) operation, but preserves the standard semantics of list equality.
+ """
+ if not isinstance(other, _PListBase):
+ return NotImplemented
+
+ self_head = self
+ other_head = other
+ while self_head and other_head:
+ if not self_head.first == other_head.first:
+ return False
+ self_head = self_head.rest
+ other_head = other_head.rest
+
+ return not self_head and not other_head
+
+ def __getitem__(self, index):
+ # Don't use this this data structure if you plan to do a lot of indexing, it is
+ # very inefficient! Use a PVector instead!
+
+ if isinstance(index, slice):
+ if index.start is not None and index.stop is None and (index.step is None or index.step == 1):
+ return self._drop(index.start)
+
+ # Take the easy way out for all other slicing cases, not much structural reuse possible anyway
+ return plist(tuple(self)[index])
+
+ if not isinstance(index, Integral):
+ raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
+
+ if index < 0:
+ # NB: O(n)!
+ index += len(self)
+
+ try:
+ return self._drop(index).first
+ except AttributeError:
+ raise IndexError("PList index out of range")
+
+ def _drop(self, count):
+ if count < 0:
+ raise IndexError("PList index out of range")
+
+ head = self
+ while count > 0:
+ head = head.rest
+ count -= 1
+
+ return head
+
+ def __hash__(self):
+ return hash(tuple(self))
+
+ def remove(self, elem):
+ """
+ Return new list with first element equal to elem removed. O(k) where k is the position
+ of the element that is removed.
+
+ Raises ValueError if no matching element is found.
+
+ >>> plist([1, 2, 1]).remove(1)
+ plist([2, 1])
+ """
+
+ builder = _PListBuilder()
+ head = self
+ while head:
+ if head.first == elem:
+ return builder.append_plist(head.rest)
+
+ builder.append_elem(head.first)
+ head = head.rest
+
+ raise ValueError('{0} not found in PList'.format(elem))
+
+
+class PList(_PListBase):
+ """
+ Classical Lisp style singly linked list. Adding elements to the head using cons is O(1).
+ Element access is O(k) where k is the position of the element in the list. Taking the
+ length of the list is O(n).
+
+ Fully supports the Sequence and Hashable protocols including indexing and slicing but
+ if you need fast random access go for the PVector instead.
+
+ Do not instantiate directly, instead use the factory functions :py:func:`l` or :py:func:`plist` to
+ create an instance.
+
+ Some examples:
+
+ >>> x = plist([1, 2])
+ >>> y = x.cons(3)
+ >>> x
+ plist([1, 2])
+ >>> y
+ plist([3, 1, 2])
+ >>> y.first
+ 3
+ >>> y.rest == x
+ True
+ >>> y[:2]
+ plist([3, 1])
+ """
+ __slots__ = ('first', 'rest')
+
+ def __new__(cls, first, rest):
+ instance = super(PList, cls).__new__(cls)
+ instance.first = first
+ instance.rest = rest
+ return instance
+
+ def __bool__(self):
+ return True
+ __nonzero__ = __bool__
+
+
+Sequence.register(PList)
+Hashable.register(PList)
+
+
+class _EmptyPList(_PListBase):
+ __slots__ = ()
+
+ def __bool__(self):
+ return False
+ __nonzero__ = __bool__
+
+ @property
+ def first(self):
+ raise AttributeError("Empty PList has no first")
+
+ @property
+ def rest(self):
+ return self
+
+
+Sequence.register(_EmptyPList)
+Hashable.register(_EmptyPList)
+
+_EMPTY_PLIST = _EmptyPList()
+
+
+def plist(iterable=(), reverse=False):
+ """
+ Creates a new persistent list containing all elements of iterable.
+ Optional parameter reverse specifies if the elements should be inserted in
+ reverse order or not.
+
+ >>> plist([1, 2, 3])
+ plist([1, 2, 3])
+ >>> plist([1, 2, 3], reverse=True)
+ plist([3, 2, 1])
+ """
+ if not reverse:
+ iterable = list(iterable)
+ iterable.reverse()
+
+ return reduce(lambda pl, elem: pl.cons(elem), iterable, _EMPTY_PLIST)
+
+
+def l(*elements):
+ """
+ Creates a new persistent list containing all arguments.
+
+ >>> l(1, 2, 3)
+ plist([1, 2, 3])
+ """
+ return plist(elements)
diff --git a/third_party/python/pyrsistent/pyrsistent/_pmap.py b/third_party/python/pyrsistent/pyrsistent/_pmap.py
new file mode 100644
index 0000000000..e8a0ec53f8
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/_pmap.py
@@ -0,0 +1,460 @@
+from ._compat import Mapping, Hashable
+from itertools import chain
+import six
+from pyrsistent._pvector import pvector
+from pyrsistent._transformations import transform
+
+
+class PMap(object):
+ """
+ Persistent map/dict. Tries to follow the same naming conventions as the built in dict where feasible.
+
+ Do not instantiate directly, instead use the factory functions :py:func:`m` or :py:func:`pmap` to
+ create an instance.
+
+ Was originally written as a very close copy of the Clojure equivalent but was later rewritten to closer
+ re-assemble the python dict. This means that a sparse vector (a PVector) of buckets is used. The keys are
+ hashed and the elements inserted at position hash % len(bucket_vector). Whenever the map size exceeds 2/3 of
+ the containing vectors size the map is reallocated to a vector of double the size. This is done to avoid
+ excessive hash collisions.
+
+ This structure corresponds most closely to the built in dict type and is intended as a replacement. Where the
+ semantics are the same (more or less) the same function names have been used but for some cases it is not possible,
+ for example assignments and deletion of values.
+
+ PMap implements the Mapping protocol and is Hashable. It also supports dot-notation for
+ element access.
+
+ Random access and insert is log32(n) where n is the size of the map.
+
+ The following are examples of some common operations on persistent maps
+
+ >>> m1 = m(a=1, b=3)
+ >>> m2 = m1.set('c', 3)
+ >>> m3 = m2.remove('a')
+ >>> m1
+ pmap({'b': 3, 'a': 1})
+ >>> m2
+ pmap({'c': 3, 'b': 3, 'a': 1})
+ >>> m3
+ pmap({'c': 3, 'b': 3})
+ >>> m3['c']
+ 3
+ >>> m3.c
+ 3
+ """
+ __slots__ = ('_size', '_buckets', '__weakref__', '_cached_hash')
+
+ def __new__(cls, size, buckets):
+ self = super(PMap, cls).__new__(cls)
+ self._size = size
+ self._buckets = buckets
+ return self
+
+ @staticmethod
+ def _get_bucket(buckets, key):
+ index = hash(key) % len(buckets)
+ bucket = buckets[index]
+ return index, bucket
+
+ @staticmethod
+ def _getitem(buckets, key):
+ _, bucket = PMap._get_bucket(buckets, key)
+ if bucket:
+ for k, v in bucket:
+ if k == key:
+ return v
+
+ raise KeyError(key)
+
+ def __getitem__(self, key):
+ return PMap._getitem(self._buckets, key)
+
+ @staticmethod
+ def _contains(buckets, key):
+ _, bucket = PMap._get_bucket(buckets, key)
+ if bucket:
+ for k, _ in bucket:
+ if k == key:
+ return True
+
+ return False
+
+ return False
+
+ def __contains__(self, key):
+ return self._contains(self._buckets, key)
+
+ get = Mapping.get
+
+ def __iter__(self):
+ return self.iterkeys()
+
+ def __getattr__(self, key):
+ try:
+ return self[key]
+ except KeyError:
+ raise AttributeError(
+ "{0} has no attribute '{1}'".format(type(self).__name__, key)
+ )
+
+ def iterkeys(self):
+ for k, _ in self.iteritems():
+ yield k
+
+ # These are more efficient implementations compared to the original
+ # methods that are based on the keys iterator and then calls the
+ # accessor functions to access the value for the corresponding key
+ def itervalues(self):
+ for _, v in self.iteritems():
+ yield v
+
+ def iteritems(self):
+ for bucket in self._buckets:
+ if bucket:
+ for k, v in bucket:
+ yield k, v
+
+ def values(self):
+ return pvector(self.itervalues())
+
+ def keys(self):
+ return pvector(self.iterkeys())
+
+ def items(self):
+ return pvector(self.iteritems())
+
+ def __len__(self):
+ return self._size
+
+ def __repr__(self):
+ return 'pmap({0})'.format(str(dict(self)))
+
+ def __eq__(self, other):
+ if self is other:
+ return True
+ if not isinstance(other, Mapping):
+ return NotImplemented
+ if len(self) != len(other):
+ return False
+ if isinstance(other, PMap):
+ if (hasattr(self, '_cached_hash') and hasattr(other, '_cached_hash')
+ and self._cached_hash != other._cached_hash):
+ return False
+ if self._buckets == other._buckets:
+ return True
+ return dict(self.iteritems()) == dict(other.iteritems())
+ elif isinstance(other, dict):
+ return dict(self.iteritems()) == other
+ return dict(self.iteritems()) == dict(six.iteritems(other))
+
+ __ne__ = Mapping.__ne__
+
+ def __lt__(self, other):
+ raise TypeError('PMaps are not orderable')
+
+ __le__ = __lt__
+ __gt__ = __lt__
+ __ge__ = __lt__
+
+ def __str__(self):
+ return self.__repr__()
+
+ def __hash__(self):
+ if not hasattr(self, '_cached_hash'):
+ self._cached_hash = hash(frozenset(self.iteritems()))
+ return self._cached_hash
+
+ def set(self, key, val):
+ """
+ Return a new PMap with key and val inserted.
+
+ >>> m1 = m(a=1, b=2)
+ >>> m2 = m1.set('a', 3)
+ >>> m3 = m1.set('c' ,4)
+ >>> m1
+ pmap({'b': 2, 'a': 1})
+ >>> m2
+ pmap({'b': 2, 'a': 3})
+ >>> m3
+ pmap({'c': 4, 'b': 2, 'a': 1})
+ """
+ return self.evolver().set(key, val).persistent()
+
+ def remove(self, key):
+ """
+ Return a new PMap without the element specified by key. Raises KeyError if the element
+ is not present.
+
+ >>> m1 = m(a=1, b=2)
+ >>> m1.remove('a')
+ pmap({'b': 2})
+ """
+ return self.evolver().remove(key).persistent()
+
+ def discard(self, key):
+ """
+ Return a new PMap without the element specified by key. Returns reference to itself
+ if element is not present.
+
+ >>> m1 = m(a=1, b=2)
+ >>> m1.discard('a')
+ pmap({'b': 2})
+ >>> m1 is m1.discard('c')
+ True
+ """
+ try:
+ return self.remove(key)
+ except KeyError:
+ return self
+
+ def update(self, *maps):
+ """
+ Return a new PMap with the items in Mappings inserted. If the same key is present in multiple
+ maps the rightmost (last) value is inserted.
+
+ >>> m1 = m(a=1, b=2)
+ >>> m1.update(m(a=2, c=3), {'a': 17, 'd': 35})
+ pmap({'c': 3, 'b': 2, 'a': 17, 'd': 35})
+ """
+ return self.update_with(lambda l, r: r, *maps)
+
+ def update_with(self, update_fn, *maps):
+ """
+ Return a new PMap with the items in Mappings maps inserted. If the same key is present in multiple
+ maps the values will be merged using merge_fn going from left to right.
+
+ >>> from operator import add
+ >>> m1 = m(a=1, b=2)
+ >>> m1.update_with(add, m(a=2))
+ pmap({'b': 2, 'a': 3})
+
+ The reverse behaviour of the regular merge. Keep the leftmost element instead of the rightmost.
+
+ >>> m1 = m(a=1)
+ >>> m1.update_with(lambda l, r: l, m(a=2), {'a':3})
+ pmap({'a': 1})
+ """
+ evolver = self.evolver()
+ for map in maps:
+ for key, value in map.items():
+ evolver.set(key, update_fn(evolver[key], value) if key in evolver else value)
+
+ return evolver.persistent()
+
+ def __add__(self, other):
+ return self.update(other)
+
+ def __reduce__(self):
+ # Pickling support
+ return pmap, (dict(self),)
+
+ def transform(self, *transformations):
+ """
+ Transform arbitrarily complex combinations of PVectors and PMaps. A transformation
+ consists of two parts. One match expression that specifies which elements to transform
+ and one transformation function that performs the actual transformation.
+
+ >>> from pyrsistent import freeze, ny
+ >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
+ ... {'author': 'Steve', 'content': 'A slightly longer article'}],
+ ... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
+ >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
+ >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
+ >>> very_short_news.articles[0].content
+ 'A short article'
+ >>> very_short_news.articles[1].content
+ 'A slightly long...'
+
+ When nothing has been transformed the original data structure is kept
+
+ >>> short_news is news_paper
+ True
+ >>> very_short_news is news_paper
+ False
+ >>> very_short_news.articles[0] is news_paper.articles[0]
+ True
+ """
+ return transform(self, transformations)
+
+ def copy(self):
+ return self
+
+ class _Evolver(object):
+ __slots__ = ('_buckets_evolver', '_size', '_original_pmap')
+
+ def __init__(self, original_pmap):
+ self._original_pmap = original_pmap
+ self._buckets_evolver = original_pmap._buckets.evolver()
+ self._size = original_pmap._size
+
+ def __getitem__(self, key):
+ return PMap._getitem(self._buckets_evolver, key)
+
+ def __setitem__(self, key, val):
+ self.set(key, val)
+
+ def set(self, key, val):
+ if len(self._buckets_evolver) < 0.67 * self._size:
+ self._reallocate(2 * len(self._buckets_evolver))
+
+ kv = (key, val)
+ index, bucket = PMap._get_bucket(self._buckets_evolver, key)
+ if bucket:
+ for k, v in bucket:
+ if k == key:
+ if v is not val:
+ new_bucket = [(k2, v2) if k2 != k else (k2, val) for k2, v2 in bucket]
+ self._buckets_evolver[index] = new_bucket
+
+ return self
+
+ new_bucket = [kv]
+ new_bucket.extend(bucket)
+ self._buckets_evolver[index] = new_bucket
+ self._size += 1
+ else:
+ self._buckets_evolver[index] = [kv]
+ self._size += 1
+
+ return self
+
+ def _reallocate(self, new_size):
+ new_list = new_size * [None]
+ buckets = self._buckets_evolver.persistent()
+ for k, v in chain.from_iterable(x for x in buckets if x):
+ index = hash(k) % new_size
+ if new_list[index]:
+ new_list[index].append((k, v))
+ else:
+ new_list[index] = [(k, v)]
+
+ # A reallocation should always result in a dirty buckets evolver to avoid
+ # possible loss of elements when doing the reallocation.
+ self._buckets_evolver = pvector().evolver()
+ self._buckets_evolver.extend(new_list)
+
+ def is_dirty(self):
+ return self._buckets_evolver.is_dirty()
+
+ def persistent(self):
+ if self.is_dirty():
+ self._original_pmap = PMap(self._size, self._buckets_evolver.persistent())
+
+ return self._original_pmap
+
+ def __len__(self):
+ return self._size
+
+ def __contains__(self, key):
+ return PMap._contains(self._buckets_evolver, key)
+
+ def __delitem__(self, key):
+ self.remove(key)
+
+ def remove(self, key):
+ index, bucket = PMap._get_bucket(self._buckets_evolver, key)
+
+ if bucket:
+ new_bucket = [(k, v) for (k, v) in bucket if k != key]
+ if len(bucket) > len(new_bucket):
+ self._buckets_evolver[index] = new_bucket if new_bucket else None
+ self._size -= 1
+ return self
+
+ raise KeyError('{0}'.format(key))
+
+ def evolver(self):
+ """
+ Create a new evolver for this pmap. For a discussion on evolvers in general see the
+ documentation for the pvector evolver.
+
+ Create the evolver and perform various mutating updates to it:
+
+ >>> m1 = m(a=1, b=2)
+ >>> e = m1.evolver()
+ >>> e['c'] = 3
+ >>> len(e)
+ 3
+ >>> del e['a']
+
+ The underlying pmap remains the same:
+
+ >>> m1
+ pmap({'b': 2, 'a': 1})
+
+ The changes are kept in the evolver. An updated pmap can be created using the
+ persistent() function on the evolver.
+
+ >>> m2 = e.persistent()
+ >>> m2
+ pmap({'c': 3, 'b': 2})
+
+ The new pmap will share data with the original pmap in the same way that would have
+ been done if only using operations on the pmap.
+ """
+ return self._Evolver(self)
+
+Mapping.register(PMap)
+Hashable.register(PMap)
+
+
+def _turbo_mapping(initial, pre_size):
+ if pre_size:
+ size = pre_size
+ else:
+ try:
+ size = 2 * len(initial) or 8
+ except Exception:
+ # Guess we can't figure out the length. Give up on length hinting,
+ # we can always reallocate later.
+ size = 8
+
+ buckets = size * [None]
+
+ if not isinstance(initial, Mapping):
+ # Make a dictionary of the initial data if it isn't already,
+ # that will save us some job further down since we can assume no
+ # key collisions
+ initial = dict(initial)
+
+ for k, v in six.iteritems(initial):
+ h = hash(k)
+ index = h % size
+ bucket = buckets[index]
+
+ if bucket:
+ bucket.append((k, v))
+ else:
+ buckets[index] = [(k, v)]
+
+ return PMap(len(initial), pvector().extend(buckets))
+
+
+_EMPTY_PMAP = _turbo_mapping({}, 0)
+
+
+def pmap(initial={}, pre_size=0):
+ """
+ Create new persistent map, inserts all elements in initial into the newly created map.
+ The optional argument pre_size may be used to specify an initial size of the underlying bucket vector. This
+ may have a positive performance impact in the cases where you know beforehand that a large number of elements
+ will be inserted into the map eventually since it will reduce the number of reallocations required.
+
+ >>> pmap({'a': 13, 'b': 14})
+ pmap({'b': 14, 'a': 13})
+ """
+ if not initial:
+ return _EMPTY_PMAP
+
+ return _turbo_mapping(initial, pre_size)
+
+
+def m(**kwargs):
+ """
+ Creates a new persitent map. Inserts all key value arguments into the newly created map.
+
+ >>> m(a=13, b=14)
+ pmap({'b': 14, 'a': 13})
+ """
+ return pmap(kwargs)
diff --git a/third_party/python/pyrsistent/pyrsistent/_precord.py b/third_party/python/pyrsistent/pyrsistent/_precord.py
new file mode 100644
index 0000000000..ec8d32c3da
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/_precord.py
@@ -0,0 +1,169 @@
+import six
+from pyrsistent._checked_types import CheckedType, _restore_pickle, InvariantException, store_invariants
+from pyrsistent._field_common import (
+ set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants
+)
+from pyrsistent._pmap import PMap, pmap
+
+
+class _PRecordMeta(type):
+ def __new__(mcs, name, bases, dct):
+ set_fields(dct, bases, name='_precord_fields')
+ store_invariants(dct, bases, '_precord_invariants', '__invariant__')
+
+ dct['_precord_mandatory_fields'] = \
+ set(name for name, field in dct['_precord_fields'].items() if field.mandatory)
+
+ dct['_precord_initial_values'] = \
+ dict((k, field.initial) for k, field in dct['_precord_fields'].items() if field.initial is not PFIELD_NO_INITIAL)
+
+
+ dct['__slots__'] = ()
+
+ return super(_PRecordMeta, mcs).__new__(mcs, name, bases, dct)
+
+
+@six.add_metaclass(_PRecordMeta)
+class PRecord(PMap, CheckedType):
+ """
+ A PRecord is a PMap with a fixed set of specified fields. Records are declared as python classes inheriting
+ from PRecord. Because it is a PMap it has full support for all Mapping methods such as iteration and element
+ access using subscript notation.
+
+ More documentation and examples of PRecord usage is available at https://github.com/tobgu/pyrsistent
+ """
+ def __new__(cls, **kwargs):
+ # Hack total! If these two special attributes exist that means we can create
+ # ourselves. Otherwise we need to go through the Evolver to create the structures
+ # for us.
+ if '_precord_size' in kwargs and '_precord_buckets' in kwargs:
+ return super(PRecord, cls).__new__(cls, kwargs['_precord_size'], kwargs['_precord_buckets'])
+
+ factory_fields = kwargs.pop('_factory_fields', None)
+ ignore_extra = kwargs.pop('_ignore_extra', False)
+
+ initial_values = kwargs
+ if cls._precord_initial_values:
+ initial_values = dict((k, v() if callable(v) else v)
+ for k, v in cls._precord_initial_values.items())
+ initial_values.update(kwargs)
+
+ e = _PRecordEvolver(cls, pmap(), _factory_fields=factory_fields, _ignore_extra=ignore_extra)
+ for k, v in initial_values.items():
+ e[k] = v
+
+ return e.persistent()
+
+ def set(self, *args, **kwargs):
+ """
+ Set a field in the record. This set function differs slightly from that in the PMap
+ class. First of all it accepts key-value pairs. Second it accepts multiple key-value
+ pairs to perform one, atomic, update of multiple fields.
+ """
+
+ # The PRecord set() can accept kwargs since all fields that have been declared are
+ # valid python identifiers. Also allow multiple fields to be set in one operation.
+ if args:
+ return super(PRecord, self).set(args[0], args[1])
+
+ return self.update(kwargs)
+
+ def evolver(self):
+ """
+ Returns an evolver of this object.
+ """
+ return _PRecordEvolver(self.__class__, self)
+
+ def __repr__(self):
+ return "{0}({1})".format(self.__class__.__name__,
+ ', '.join('{0}={1}'.format(k, repr(v)) for k, v in self.items()))
+
+ @classmethod
+ def create(cls, kwargs, _factory_fields=None, ignore_extra=False):
+ """
+ Factory method. Will create a new PRecord of the current type and assign the values
+ specified in kwargs.
+
+ :param ignore_extra: A boolean which when set to True will ignore any keys which appear in kwargs that are not
+ in the set of fields on the PRecord.
+ """
+ if isinstance(kwargs, cls):
+ return kwargs
+
+ if ignore_extra:
+ kwargs = {k: kwargs[k] for k in cls._precord_fields if k in kwargs}
+
+ return cls(_factory_fields=_factory_fields, _ignore_extra=ignore_extra, **kwargs)
+
+ def __reduce__(self):
+ # Pickling support
+ return _restore_pickle, (self.__class__, dict(self),)
+
+ def serialize(self, format=None):
+ """
+ Serialize the current PRecord using custom serializer functions for fields where
+ such have been supplied.
+ """
+ return dict((k, serialize(self._precord_fields[k].serializer, format, v)) for k, v in self.items())
+
+
+class _PRecordEvolver(PMap._Evolver):
+ __slots__ = ('_destination_cls', '_invariant_error_codes', '_missing_fields', '_factory_fields', '_ignore_extra')
+
+ def __init__(self, cls, original_pmap, _factory_fields=None, _ignore_extra=False):
+ super(_PRecordEvolver, self).__init__(original_pmap)
+ self._destination_cls = cls
+ self._invariant_error_codes = []
+ self._missing_fields = []
+ self._factory_fields = _factory_fields
+ self._ignore_extra = _ignore_extra
+
+ def __setitem__(self, key, original_value):
+ self.set(key, original_value)
+
+ def set(self, key, original_value):
+ field = self._destination_cls._precord_fields.get(key)
+ if field:
+ if self._factory_fields is None or field in self._factory_fields:
+ try:
+ if is_field_ignore_extra_complaint(PRecord, field, self._ignore_extra):
+ value = field.factory(original_value, ignore_extra=self._ignore_extra)
+ else:
+ value = field.factory(original_value)
+ except InvariantException as e:
+ self._invariant_error_codes += e.invariant_errors
+ self._missing_fields += e.missing_fields
+ return self
+ else:
+ value = original_value
+
+ check_type(self._destination_cls, field, key, value)
+
+ is_ok, error_code = field.invariant(value)
+ if not is_ok:
+ self._invariant_error_codes.append(error_code)
+
+ return super(_PRecordEvolver, self).set(key, value)
+ else:
+ raise AttributeError("'{0}' is not among the specified fields for {1}".format(key, self._destination_cls.__name__))
+
+ def persistent(self):
+ cls = self._destination_cls
+ is_dirty = self.is_dirty()
+ pm = super(_PRecordEvolver, self).persistent()
+ if is_dirty or not isinstance(pm, cls):
+ result = cls(_precord_buckets=pm._buckets, _precord_size=pm._size)
+ else:
+ result = pm
+
+ if cls._precord_mandatory_fields:
+ self._missing_fields += tuple('{0}.{1}'.format(cls.__name__, f) for f
+ in (cls._precord_mandatory_fields - set(result.keys())))
+
+ if self._invariant_error_codes or self._missing_fields:
+ raise InvariantException(tuple(self._invariant_error_codes), tuple(self._missing_fields),
+ 'Field invariant failed')
+
+ check_global_invariants(result, cls._precord_invariants)
+
+ return result
diff --git a/third_party/python/pyrsistent/pyrsistent/_pset.py b/third_party/python/pyrsistent/pyrsistent/_pset.py
new file mode 100644
index 0000000000..a972ec533b
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/_pset.py
@@ -0,0 +1,229 @@
+from ._compat import Set, Hashable
+import sys
+from pyrsistent._pmap import pmap
+
+PY2 = sys.version_info[0] < 3
+
+
+class PSet(object):
+ """
+ Persistent set implementation. Built on top of the persistent map. The set supports all operations
+ in the Set protocol and is Hashable.
+
+ Do not instantiate directly, instead use the factory functions :py:func:`s` or :py:func:`pset`
+ to create an instance.
+
+ Random access and insert is log32(n) where n is the size of the set.
+
+ Some examples:
+
+ >>> s = pset([1, 2, 3, 1])
+ >>> s2 = s.add(4)
+ >>> s3 = s2.remove(2)
+ >>> s
+ pset([1, 2, 3])
+ >>> s2
+ pset([1, 2, 3, 4])
+ >>> s3
+ pset([1, 3, 4])
+ """
+ __slots__ = ('_map', '__weakref__')
+
+ def __new__(cls, m):
+ self = super(PSet, cls).__new__(cls)
+ self._map = m
+ return self
+
+ def __contains__(self, element):
+ return element in self._map
+
+ def __iter__(self):
+ return iter(self._map)
+
+ def __len__(self):
+ return len(self._map)
+
+ def __repr__(self):
+ if PY2 or not self:
+ return 'p' + str(set(self))
+
+ return 'pset([{0}])'.format(str(set(self))[1:-1])
+
+ def __str__(self):
+ return self.__repr__()
+
+ def __hash__(self):
+ return hash(self._map)
+
+ def __reduce__(self):
+ # Pickling support
+ return pset, (list(self),)
+
+ @classmethod
+ def _from_iterable(cls, it, pre_size=8):
+ return PSet(pmap(dict((k, True) for k in it), pre_size=pre_size))
+
+ def add(self, element):
+ """
+ Return a new PSet with element added
+
+ >>> s1 = s(1, 2)
+ >>> s1.add(3)
+ pset([1, 2, 3])
+ """
+ return self.evolver().add(element).persistent()
+
+ def update(self, iterable):
+ """
+ Return a new PSet with elements in iterable added
+
+ >>> s1 = s(1, 2)
+ >>> s1.update([3, 4, 4])
+ pset([1, 2, 3, 4])
+ """
+ e = self.evolver()
+ for element in iterable:
+ e.add(element)
+
+ return e.persistent()
+
+ def remove(self, element):
+ """
+ Return a new PSet with element removed. Raises KeyError if element is not present.
+
+ >>> s1 = s(1, 2)
+ >>> s1.remove(2)
+ pset([1])
+ """
+ if element in self._map:
+ return self.evolver().remove(element).persistent()
+
+ raise KeyError("Element '%s' not present in PSet" % element)
+
+ def discard(self, element):
+ """
+ Return a new PSet with element removed. Returns itself if element is not present.
+ """
+ if element in self._map:
+ return self.evolver().remove(element).persistent()
+
+ return self
+
+ class _Evolver(object):
+ __slots__ = ('_original_pset', '_pmap_evolver')
+
+ def __init__(self, original_pset):
+ self._original_pset = original_pset
+ self._pmap_evolver = original_pset._map.evolver()
+
+ def add(self, element):
+ self._pmap_evolver[element] = True
+ return self
+
+ def remove(self, element):
+ del self._pmap_evolver[element]
+ return self
+
+ def is_dirty(self):
+ return self._pmap_evolver.is_dirty()
+
+ def persistent(self):
+ if not self.is_dirty():
+ return self._original_pset
+
+ return PSet(self._pmap_evolver.persistent())
+
+ def __len__(self):
+ return len(self._pmap_evolver)
+
+ def copy(self):
+ return self
+
+ def evolver(self):
+ """
+ Create a new evolver for this pset. For a discussion on evolvers in general see the
+ documentation for the pvector evolver.
+
+ Create the evolver and perform various mutating updates to it:
+
+ >>> s1 = s(1, 2, 3)
+ >>> e = s1.evolver()
+ >>> _ = e.add(4)
+ >>> len(e)
+ 4
+ >>> _ = e.remove(1)
+
+ The underlying pset remains the same:
+
+ >>> s1
+ pset([1, 2, 3])
+
+ The changes are kept in the evolver. An updated pmap can be created using the
+ persistent() function on the evolver.
+
+ >>> s2 = e.persistent()
+ >>> s2
+ pset([2, 3, 4])
+
+ The new pset will share data with the original pset in the same way that would have
+ been done if only using operations on the pset.
+ """
+ return PSet._Evolver(self)
+
+ # All the operations and comparisons you would expect on a set.
+ #
+ # This is not very beautiful. If we avoid inheriting from PSet we can use the
+ # __slots__ concepts (which requires a new style class) and hopefully save some memory.
+ __le__ = Set.__le__
+ __lt__ = Set.__lt__
+ __gt__ = Set.__gt__
+ __ge__ = Set.__ge__
+ __eq__ = Set.__eq__
+ __ne__ = Set.__ne__
+
+ __and__ = Set.__and__
+ __or__ = Set.__or__
+ __sub__ = Set.__sub__
+ __xor__ = Set.__xor__
+
+ issubset = __le__
+ issuperset = __ge__
+ union = __or__
+ intersection = __and__
+ difference = __sub__
+ symmetric_difference = __xor__
+
+ isdisjoint = Set.isdisjoint
+
+Set.register(PSet)
+Hashable.register(PSet)
+
+_EMPTY_PSET = PSet(pmap())
+
+
+def pset(iterable=(), pre_size=8):
+ """
+ Creates a persistent set from iterable. Optionally takes a sizing parameter equivalent to that
+ used for :py:func:`pmap`.
+
+ >>> s1 = pset([1, 2, 3, 2])
+ >>> s1
+ pset([1, 2, 3])
+ """
+ if not iterable:
+ return _EMPTY_PSET
+
+ return PSet._from_iterable(iterable, pre_size=pre_size)
+
+
+def s(*elements):
+ """
+ Create a persistent set.
+
+ Takes an arbitrary number of arguments to insert into the new set.
+
+ >>> s1 = s(1, 2, 3, 2)
+ >>> s1
+ pset([1, 2, 3])
+ """
+ return pset(elements)
diff --git a/third_party/python/pyrsistent/pyrsistent/_pvector.py b/third_party/python/pyrsistent/pyrsistent/_pvector.py
new file mode 100644
index 0000000000..82232782b7
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/_pvector.py
@@ -0,0 +1,713 @@
+from abc import abstractmethod, ABCMeta
+from ._compat import Sequence, Hashable
+from numbers import Integral
+import operator
+import six
+from pyrsistent._transformations import transform
+
+
+def _bitcount(val):
+ return bin(val).count("1")
+
+BRANCH_FACTOR = 32
+BIT_MASK = BRANCH_FACTOR - 1
+SHIFT = _bitcount(BIT_MASK)
+
+
+def compare_pvector(v, other, operator):
+ return operator(v.tolist(), other.tolist() if isinstance(other, PVector) else other)
+
+
+def _index_or_slice(index, stop):
+ if stop is None:
+ return index
+
+ return slice(index, stop)
+
+
+class PythonPVector(object):
+ """
+ Support structure for PVector that implements structural sharing for vectors using a trie.
+ """
+ __slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '__weakref__')
+
+ def __new__(cls, count, shift, root, tail):
+ self = super(PythonPVector, cls).__new__(cls)
+ self._count = count
+ self._shift = shift
+ self._root = root
+ self._tail = tail
+
+ # Derived attribute stored for performance
+ self._tail_offset = self._count - len(self._tail)
+ return self
+
+ def __len__(self):
+ return self._count
+
+ def __getitem__(self, index):
+ if isinstance(index, slice):
+ # There are more conditions than the below where it would be OK to
+ # return ourselves, implement those...
+ if index.start is None and index.stop is None and index.step is None:
+ return self
+
+ # This is a bit nasty realizing the whole structure as a list before
+ # slicing it but it is the fastest way I've found to date, and it's easy :-)
+ return _EMPTY_PVECTOR.extend(self.tolist()[index])
+
+ if index < 0:
+ index += self._count
+
+ return PythonPVector._node_for(self, index)[index & BIT_MASK]
+
+ def __add__(self, other):
+ return self.extend(other)
+
+ def __repr__(self):
+ return 'pvector({0})'.format(str(self.tolist()))
+
+ def __str__(self):
+ return self.__repr__()
+
+ def __iter__(self):
+ # This is kind of lazy and will produce some memory overhead but it is the fasted method
+ # by far of those tried since it uses the speed of the built in python list directly.
+ return iter(self.tolist())
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __eq__(self, other):
+ return self is other or (hasattr(other, '__len__') and self._count == len(other)) and compare_pvector(self, other, operator.eq)
+
+ def __gt__(self, other):
+ return compare_pvector(self, other, operator.gt)
+
+ def __lt__(self, other):
+ return compare_pvector(self, other, operator.lt)
+
+ def __ge__(self, other):
+ return compare_pvector(self, other, operator.ge)
+
+ def __le__(self, other):
+ return compare_pvector(self, other, operator.le)
+
+ def __mul__(self, times):
+ if times <= 0 or self is _EMPTY_PVECTOR:
+ return _EMPTY_PVECTOR
+
+ if times == 1:
+ return self
+
+ return _EMPTY_PVECTOR.extend(times * self.tolist())
+
+ __rmul__ = __mul__
+
+ def _fill_list(self, node, shift, the_list):
+ if shift:
+ shift -= SHIFT
+ for n in node:
+ self._fill_list(n, shift, the_list)
+ else:
+ the_list.extend(node)
+
+ def tolist(self):
+ """
+ The fastest way to convert the vector into a python list.
+ """
+ the_list = []
+ self._fill_list(self._root, self._shift, the_list)
+ the_list.extend(self._tail)
+ return the_list
+
+ def _totuple(self):
+ """
+ Returns the content as a python tuple.
+ """
+ return tuple(self.tolist())
+
+ def __hash__(self):
+ # Taking the easy way out again...
+ return hash(self._totuple())
+
+ def transform(self, *transformations):
+ return transform(self, transformations)
+
+ def __reduce__(self):
+ # Pickling support
+ return pvector, (self.tolist(),)
+
+ def mset(self, *args):
+ if len(args) % 2:
+ raise TypeError("mset expected an even number of arguments")
+
+ evolver = self.evolver()
+ for i in range(0, len(args), 2):
+ evolver[args[i]] = args[i+1]
+
+ return evolver.persistent()
+
+ class Evolver(object):
+ __slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '_dirty_nodes',
+ '_extra_tail', '_cached_leafs', '_orig_pvector')
+
+ def __init__(self, v):
+ self._reset(v)
+
+ def __getitem__(self, index):
+ if not isinstance(index, Integral):
+ raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
+
+ if index < 0:
+ index += self._count + len(self._extra_tail)
+
+ if self._count <= index < self._count + len(self._extra_tail):
+ return self._extra_tail[index - self._count]
+
+ return PythonPVector._node_for(self, index)[index & BIT_MASK]
+
+ def _reset(self, v):
+ self._count = v._count
+ self._shift = v._shift
+ self._root = v._root
+ self._tail = v._tail
+ self._tail_offset = v._tail_offset
+ self._dirty_nodes = {}
+ self._cached_leafs = {}
+ self._extra_tail = []
+ self._orig_pvector = v
+
+ def append(self, element):
+ self._extra_tail.append(element)
+ return self
+
+ def extend(self, iterable):
+ self._extra_tail.extend(iterable)
+ return self
+
+ def set(self, index, val):
+ self[index] = val
+ return self
+
+ def __setitem__(self, index, val):
+ if not isinstance(index, Integral):
+ raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
+
+ if index < 0:
+ index += self._count + len(self._extra_tail)
+
+ if 0 <= index < self._count:
+ node = self._cached_leafs.get(index >> SHIFT)
+ if node:
+ node[index & BIT_MASK] = val
+ elif index >= self._tail_offset:
+ if id(self._tail) not in self._dirty_nodes:
+ self._tail = list(self._tail)
+ self._dirty_nodes[id(self._tail)] = True
+ self._cached_leafs[index >> SHIFT] = self._tail
+ self._tail[index & BIT_MASK] = val
+ else:
+ self._root = self._do_set(self._shift, self._root, index, val)
+ elif self._count <= index < self._count + len(self._extra_tail):
+ self._extra_tail[index - self._count] = val
+ elif index == self._count + len(self._extra_tail):
+ self._extra_tail.append(val)
+ else:
+ raise IndexError("Index out of range: %s" % (index,))
+
+ def _do_set(self, level, node, i, val):
+ if id(node) in self._dirty_nodes:
+ ret = node
+ else:
+ ret = list(node)
+ self._dirty_nodes[id(ret)] = True
+
+ if level == 0:
+ ret[i & BIT_MASK] = val
+ self._cached_leafs[i >> SHIFT] = ret
+ else:
+ sub_index = (i >> level) & BIT_MASK # >>>
+ ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val)
+
+ return ret
+
+ def delete(self, index):
+ del self[index]
+ return self
+
+ def __delitem__(self, key):
+ if self._orig_pvector:
+ # All structural sharing bets are off, base evolver on _extra_tail only
+ l = PythonPVector(self._count, self._shift, self._root, self._tail).tolist()
+ l.extend(self._extra_tail)
+ self._reset(_EMPTY_PVECTOR)
+ self._extra_tail = l
+
+ del self._extra_tail[key]
+
+ def persistent(self):
+ result = self._orig_pvector
+ if self.is_dirty():
+ result = PythonPVector(self._count, self._shift, self._root, self._tail).extend(self._extra_tail)
+ self._reset(result)
+
+ return result
+
+ def __len__(self):
+ return self._count + len(self._extra_tail)
+
+ def is_dirty(self):
+ return bool(self._dirty_nodes or self._extra_tail)
+
+ def evolver(self):
+ return PythonPVector.Evolver(self)
+
+ def set(self, i, val):
+ # This method could be implemented by a call to mset() but doing so would cause
+ # a ~5 X performance penalty on PyPy (considered the primary platform for this implementation
+ # of PVector) so we're keeping this implementation for now.
+
+ if not isinstance(i, Integral):
+ raise TypeError("'%s' object cannot be interpreted as an index" % type(i).__name__)
+
+ if i < 0:
+ i += self._count
+
+ if 0 <= i < self._count:
+ if i >= self._tail_offset:
+ new_tail = list(self._tail)
+ new_tail[i & BIT_MASK] = val
+ return PythonPVector(self._count, self._shift, self._root, new_tail)
+
+ return PythonPVector(self._count, self._shift, self._do_set(self._shift, self._root, i, val), self._tail)
+
+ if i == self._count:
+ return self.append(val)
+
+ raise IndexError("Index out of range: %s" % (i,))
+
+ def _do_set(self, level, node, i, val):
+ ret = list(node)
+ if level == 0:
+ ret[i & BIT_MASK] = val
+ else:
+ sub_index = (i >> level) & BIT_MASK # >>>
+ ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val)
+
+ return ret
+
+ @staticmethod
+ def _node_for(pvector_like, i):
+ if 0 <= i < pvector_like._count:
+ if i >= pvector_like._tail_offset:
+ return pvector_like._tail
+
+ node = pvector_like._root
+ for level in range(pvector_like._shift, 0, -SHIFT):
+ node = node[(i >> level) & BIT_MASK] # >>>
+
+ return node
+
+ raise IndexError("Index out of range: %s" % (i,))
+
+ def _create_new_root(self):
+ new_shift = self._shift
+
+ # Overflow root?
+ if (self._count >> SHIFT) > (1 << self._shift): # >>>
+ new_root = [self._root, self._new_path(self._shift, self._tail)]
+ new_shift += SHIFT
+ else:
+ new_root = self._push_tail(self._shift, self._root, self._tail)
+
+ return new_root, new_shift
+
+ def append(self, val):
+ if len(self._tail) < BRANCH_FACTOR:
+ new_tail = list(self._tail)
+ new_tail.append(val)
+ return PythonPVector(self._count + 1, self._shift, self._root, new_tail)
+
+ # Full tail, push into tree
+ new_root, new_shift = self._create_new_root()
+ return PythonPVector(self._count + 1, new_shift, new_root, [val])
+
+ def _new_path(self, level, node):
+ if level == 0:
+ return node
+
+ return [self._new_path(level - SHIFT, node)]
+
+ def _mutating_insert_tail(self):
+ self._root, self._shift = self._create_new_root()
+ self._tail = []
+
+ def _mutating_fill_tail(self, offset, sequence):
+ max_delta_len = BRANCH_FACTOR - len(self._tail)
+ delta = sequence[offset:offset + max_delta_len]
+ self._tail.extend(delta)
+ delta_len = len(delta)
+ self._count += delta_len
+ return offset + delta_len
+
+ def _mutating_extend(self, sequence):
+ offset = 0
+ sequence_len = len(sequence)
+ while offset < sequence_len:
+ offset = self._mutating_fill_tail(offset, sequence)
+ if len(self._tail) == BRANCH_FACTOR:
+ self._mutating_insert_tail()
+
+ self._tail_offset = self._count - len(self._tail)
+
+ def extend(self, obj):
+ # Mutates the new vector directly for efficiency but that's only an
+ # implementation detail, once it is returned it should be considered immutable
+ l = obj.tolist() if isinstance(obj, PythonPVector) else list(obj)
+ if l:
+ new_vector = self.append(l[0])
+ new_vector._mutating_extend(l[1:])
+ return new_vector
+
+ return self
+
+ def _push_tail(self, level, parent, tail_node):
+ """
+ if parent is leaf, insert node,
+ else does it map to an existing child? ->
+ node_to_insert = push node one more level
+ else alloc new path
+
+ return node_to_insert placed in copy of parent
+ """
+ ret = list(parent)
+
+ if level == SHIFT:
+ ret.append(tail_node)
+ return ret
+
+ sub_index = ((self._count - 1) >> level) & BIT_MASK # >>>
+ if len(parent) > sub_index:
+ ret[sub_index] = self._push_tail(level - SHIFT, parent[sub_index], tail_node)
+ return ret
+
+ ret.append(self._new_path(level - SHIFT, tail_node))
+ return ret
+
+ def index(self, value, *args, **kwargs):
+ return self.tolist().index(value, *args, **kwargs)
+
+ def count(self, value):
+ return self.tolist().count(value)
+
+ def delete(self, index, stop=None):
+ l = self.tolist()
+ del l[_index_or_slice(index, stop)]
+ return _EMPTY_PVECTOR.extend(l)
+
+ def remove(self, value):
+ l = self.tolist()
+ l.remove(value)
+ return _EMPTY_PVECTOR.extend(l)
+
+@six.add_metaclass(ABCMeta)
+class PVector(object):
+ """
+ Persistent vector implementation. Meant as a replacement for the cases where you would normally
+ use a Python list.
+
+ Do not instantiate directly, instead use the factory functions :py:func:`v` and :py:func:`pvector` to
+ create an instance.
+
+ Heavily influenced by the persistent vector available in Clojure. Initially this was more or
+ less just a port of the Java code for the Clojure vector. It has since been modified and to
+ some extent optimized for usage in Python.
+
+ The vector is organized as a trie, any mutating method will return a new vector that contains the changes. No
+ updates are done to the original vector. Structural sharing between vectors are applied where possible to save
+ space and to avoid making complete copies.
+
+ This structure corresponds most closely to the built in list type and is intended as a replacement. Where the
+ semantics are the same (more or less) the same function names have been used but for some cases it is not possible,
+ for example assignments.
+
+ The PVector implements the Sequence protocol and is Hashable.
+
+ Inserts are amortized O(1). Random access is log32(n) where n is the size of the vector.
+
+ The following are examples of some common operations on persistent vectors:
+
+ >>> p = v(1, 2, 3)
+ >>> p2 = p.append(4)
+ >>> p3 = p2.extend([5, 6, 7])
+ >>> p
+ pvector([1, 2, 3])
+ >>> p2
+ pvector([1, 2, 3, 4])
+ >>> p3
+ pvector([1, 2, 3, 4, 5, 6, 7])
+ >>> p3[5]
+ 6
+ >>> p.set(1, 99)
+ pvector([1, 99, 3])
+ >>>
+ """
+
+ @abstractmethod
+ def __len__(self):
+ """
+ >>> len(v(1, 2, 3))
+ 3
+ """
+
+ @abstractmethod
+ def __getitem__(self, index):
+ """
+ Get value at index. Full slicing support.
+
+ >>> v1 = v(5, 6, 7, 8)
+ >>> v1[2]
+ 7
+ >>> v1[1:3]
+ pvector([6, 7])
+ """
+
+ @abstractmethod
+ def __add__(self, other):
+ """
+ >>> v1 = v(1, 2)
+ >>> v2 = v(3, 4)
+ >>> v1 + v2
+ pvector([1, 2, 3, 4])
+ """
+
+ @abstractmethod
+ def __mul__(self, times):
+ """
+ >>> v1 = v(1, 2)
+ >>> 3 * v1
+ pvector([1, 2, 1, 2, 1, 2])
+ """
+
+ @abstractmethod
+ def __hash__(self):
+ """
+ >>> v1 = v(1, 2, 3)
+ >>> v2 = v(1, 2, 3)
+ >>> hash(v1) == hash(v2)
+ True
+ """
+
+ @abstractmethod
+ def evolver(self):
+ """
+ Create a new evolver for this pvector. The evolver acts as a mutable view of the vector
+ with "transaction like" semantics. No part of the underlying vector i updated, it is still
+ fully immutable. Furthermore multiple evolvers created from the same pvector do not
+ interfere with each other.
+
+ You may want to use an evolver instead of working directly with the pvector in the
+ following cases:
+
+ * Multiple updates are done to the same vector and the intermediate results are of no
+ interest. In this case using an evolver may be a more efficient and easier to work with.
+ * You need to pass a vector into a legacy function or a function that you have no control
+ over which performs in place mutations of lists. In this case pass an evolver instance
+ instead and then create a new pvector from the evolver once the function returns.
+
+ The following example illustrates a typical workflow when working with evolvers. It also
+ displays most of the API (which i kept small by design, you should not be tempted to
+ use evolvers in excess ;-)).
+
+ Create the evolver and perform various mutating updates to it:
+
+ >>> v1 = v(1, 2, 3, 4, 5)
+ >>> e = v1.evolver()
+ >>> e[1] = 22
+ >>> _ = e.append(6)
+ >>> _ = e.extend([7, 8, 9])
+ >>> e[8] += 1
+ >>> len(e)
+ 9
+
+ The underlying pvector remains the same:
+
+ >>> v1
+ pvector([1, 2, 3, 4, 5])
+
+ The changes are kept in the evolver. An updated pvector can be created using the
+ persistent() function on the evolver.
+
+ >>> v2 = e.persistent()
+ >>> v2
+ pvector([1, 22, 3, 4, 5, 6, 7, 8, 10])
+
+ The new pvector will share data with the original pvector in the same way that would have
+ been done if only using operations on the pvector.
+ """
+
+ @abstractmethod
+ def mset(self, *args):
+ """
+ Return a new vector with elements in specified positions replaced by values (multi set).
+
+ Elements on even positions in the argument list are interpreted as indexes while
+ elements on odd positions are considered values.
+
+ >>> v1 = v(1, 2, 3)
+ >>> v1.mset(0, 11, 2, 33)
+ pvector([11, 2, 33])
+ """
+
+ @abstractmethod
+ def set(self, i, val):
+ """
+ Return a new vector with element at position i replaced with val. The original vector remains unchanged.
+
+ Setting a value one step beyond the end of the vector is equal to appending. Setting beyond that will
+ result in an IndexError.
+
+ >>> v1 = v(1, 2, 3)
+ >>> v1.set(1, 4)
+ pvector([1, 4, 3])
+ >>> v1.set(3, 4)
+ pvector([1, 2, 3, 4])
+ >>> v1.set(-1, 4)
+ pvector([1, 2, 4])
+ """
+
+ @abstractmethod
+ def append(self, val):
+ """
+ Return a new vector with val appended.
+
+ >>> v1 = v(1, 2)
+ >>> v1.append(3)
+ pvector([1, 2, 3])
+ """
+
+ @abstractmethod
+ def extend(self, obj):
+ """
+ Return a new vector with all values in obj appended to it. Obj may be another
+ PVector or any other Iterable.
+
+ >>> v1 = v(1, 2, 3)
+ >>> v1.extend([4, 5])
+ pvector([1, 2, 3, 4, 5])
+ """
+
+ @abstractmethod
+ def index(self, value, *args, **kwargs):
+ """
+ Return first index of value. Additional indexes may be supplied to limit the search to a
+ sub range of the vector.
+
+ >>> v1 = v(1, 2, 3, 4, 3)
+ >>> v1.index(3)
+ 2
+ >>> v1.index(3, 3, 5)
+ 4
+ """
+
+ @abstractmethod
+ def count(self, value):
+ """
+ Return the number of times that value appears in the vector.
+
+ >>> v1 = v(1, 4, 3, 4)
+ >>> v1.count(4)
+ 2
+ """
+
+ @abstractmethod
+ def transform(self, *transformations):
+ """
+ Transform arbitrarily complex combinations of PVectors and PMaps. A transformation
+ consists of two parts. One match expression that specifies which elements to transform
+ and one transformation function that performs the actual transformation.
+
+ >>> from pyrsistent import freeze, ny
+ >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
+ ... {'author': 'Steve', 'content': 'A slightly longer article'}],
+ ... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
+ >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
+ >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
+ >>> very_short_news.articles[0].content
+ 'A short article'
+ >>> very_short_news.articles[1].content
+ 'A slightly long...'
+
+ When nothing has been transformed the original data structure is kept
+
+ >>> short_news is news_paper
+ True
+ >>> very_short_news is news_paper
+ False
+ >>> very_short_news.articles[0] is news_paper.articles[0]
+ True
+ """
+
+ @abstractmethod
+ def delete(self, index, stop=None):
+ """
+ Delete a portion of the vector by index or range.
+
+ >>> v1 = v(1, 2, 3, 4, 5)
+ >>> v1.delete(1)
+ pvector([1, 3, 4, 5])
+ >>> v1.delete(1, 3)
+ pvector([1, 4, 5])
+ """
+
+ @abstractmethod
+ def remove(self, value):
+ """
+ Remove the first occurrence of a value from the vector.
+
+ >>> v1 = v(1, 2, 3, 2, 1)
+ >>> v2 = v1.remove(1)
+ >>> v2
+ pvector([2, 3, 2, 1])
+ >>> v2.remove(1)
+ pvector([2, 3, 2])
+ """
+
+
+_EMPTY_PVECTOR = PythonPVector(0, SHIFT, [], [])
+PVector.register(PythonPVector)
+Sequence.register(PVector)
+Hashable.register(PVector)
+
+def python_pvector(iterable=()):
+ """
+ Create a new persistent vector containing the elements in iterable.
+
+ >>> v1 = pvector([1, 2, 3])
+ >>> v1
+ pvector([1, 2, 3])
+ """
+ return _EMPTY_PVECTOR.extend(iterable)
+
+try:
+ # Use the C extension as underlying trie implementation if it is available
+ import os
+ if os.environ.get('PYRSISTENT_NO_C_EXTENSION'):
+ pvector = python_pvector
+ else:
+ from pvectorc import pvector
+ PVector.register(type(pvector()))
+except ImportError:
+ pvector = python_pvector
+
+
+def v(*elements):
+ """
+ Create a new persistent vector containing all parameters to this function.
+
+ >>> v1 = v(1, 2, 3)
+ >>> v1
+ pvector([1, 2, 3])
+ """
+ return pvector(elements)
diff --git a/third_party/python/pyrsistent/pyrsistent/_toolz.py b/third_party/python/pyrsistent/pyrsistent/_toolz.py
new file mode 100644
index 0000000000..6643ee860d
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/_toolz.py
@@ -0,0 +1,83 @@
+"""
+Functionality copied from the toolz package to avoid having
+to add toolz as a dependency.
+
+See https://github.com/pytoolz/toolz/.
+
+toolz is relased under BSD licence. Below is the licence text
+from toolz as it appeared when copying the code.
+
+--------------------------------------------------------------
+
+Copyright (c) 2013 Matthew Rocklin
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ a. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ b. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ c. Neither the name of toolz nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGE.
+"""
+import operator
+from six.moves import reduce
+
+
+def get_in(keys, coll, default=None, no_default=False):
+ """
+ NB: This is a straight copy of the get_in implementation found in
+ the toolz library (https://github.com/pytoolz/toolz/). It works
+ with persistent data structures as well as the corresponding
+ datastructures from the stdlib.
+
+ Returns coll[i0][i1]...[iX] where [i0, i1, ..., iX]==keys.
+
+ If coll[i0][i1]...[iX] cannot be found, returns ``default``, unless
+ ``no_default`` is specified, then it raises KeyError or IndexError.
+
+ ``get_in`` is a generalization of ``operator.getitem`` for nested data
+ structures such as dictionaries and lists.
+ >>> from pyrsistent import freeze
+ >>> transaction = freeze({'name': 'Alice',
+ ... 'purchase': {'items': ['Apple', 'Orange'],
+ ... 'costs': [0.50, 1.25]},
+ ... 'credit card': '5555-1234-1234-1234'})
+ >>> get_in(['purchase', 'items', 0], transaction)
+ 'Apple'
+ >>> get_in(['name'], transaction)
+ 'Alice'
+ >>> get_in(['purchase', 'total'], transaction)
+ >>> get_in(['purchase', 'items', 'apple'], transaction)
+ >>> get_in(['purchase', 'items', 10], transaction)
+ >>> get_in(['purchase', 'total'], transaction, 0)
+ 0
+ >>> get_in(['y'], {}, no_default=True)
+ Traceback (most recent call last):
+ ...
+ KeyError: 'y'
+ """
+ try:
+ return reduce(operator.getitem, keys, coll)
+ except (KeyError, IndexError, TypeError):
+ if no_default:
+ raise
+ return default \ No newline at end of file
diff --git a/third_party/python/pyrsistent/pyrsistent/_transformations.py b/third_party/python/pyrsistent/pyrsistent/_transformations.py
new file mode 100644
index 0000000000..612098969b
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/_transformations.py
@@ -0,0 +1,143 @@
+import re
+import six
+try:
+ from inspect import Parameter, signature
+except ImportError:
+ signature = None
+ try:
+ from inspect import getfullargspec as getargspec
+ except ImportError:
+ from inspect import getargspec
+
+
+_EMPTY_SENTINEL = object()
+
+
+def inc(x):
+ """ Add one to the current value """
+ return x + 1
+
+
+def dec(x):
+ """ Subtract one from the current value """
+ return x - 1
+
+
+def discard(evolver, key):
+ """ Discard the element and returns a structure without the discarded elements """
+ try:
+ del evolver[key]
+ except KeyError:
+ pass
+
+
+# Matchers
+def rex(expr):
+ """ Regular expression matcher to use together with transform functions """
+ r = re.compile(expr)
+ return lambda key: isinstance(key, six.string_types) and r.match(key)
+
+
+def ny(_):
+ """ Matcher that matches any value """
+ return True
+
+
+# Support functions
+def _chunks(l, n):
+ for i in range(0, len(l), n):
+ yield l[i:i + n]
+
+
+def transform(structure, transformations):
+ r = structure
+ for path, command in _chunks(transformations, 2):
+ r = _do_to_path(r, path, command)
+ return r
+
+
+def _do_to_path(structure, path, command):
+ if not path:
+ return command(structure) if callable(command) else command
+
+ kvs = _get_keys_and_values(structure, path[0])
+ return _update_structure(structure, kvs, path[1:], command)
+
+
+def _items(structure):
+ try:
+ return structure.items()
+ except AttributeError:
+ # Support wider range of structures by adding a transform_items() or similar?
+ return list(enumerate(structure))
+
+
+def _get(structure, key, default):
+ try:
+ if hasattr(structure, '__getitem__'):
+ return structure[key]
+
+ return getattr(structure, key)
+
+ except (IndexError, KeyError):
+ return default
+
+
+def _get_keys_and_values(structure, key_spec):
+ if callable(key_spec):
+ # Support predicates as callable objects in the path
+ arity = _get_arity(key_spec)
+ if arity == 1:
+ # Unary predicates are called with the "key" of the path
+ # - eg a key in a mapping, an index in a sequence.
+ return [(k, v) for k, v in _items(structure) if key_spec(k)]
+ elif arity == 2:
+ # Binary predicates are called with the key and the corresponding
+ # value.
+ return [(k, v) for k, v in _items(structure) if key_spec(k, v)]
+ else:
+ # Other arities are an error.
+ raise ValueError(
+ "callable in transform path must take 1 or 2 arguments"
+ )
+
+ # Non-callables are used as-is as a key.
+ return [(key_spec, _get(structure, key_spec, _EMPTY_SENTINEL))]
+
+
+if signature is None:
+ def _get_arity(f):
+ argspec = getargspec(f)
+ return len(argspec.args) - len(argspec.defaults or ())
+else:
+ def _get_arity(f):
+ return sum(
+ 1
+ for p
+ in signature(f).parameters.values()
+ if p.default is Parameter.empty
+ and p.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD)
+ )
+
+
+def _update_structure(structure, kvs, path, command):
+ from pyrsistent._pmap import pmap
+ e = structure.evolver()
+ if not path and command is discard:
+ # Do this in reverse to avoid index problems with vectors. See #92.
+ for k, v in reversed(kvs):
+ discard(e, k)
+ else:
+ for k, v in kvs:
+ is_empty = False
+ if v is _EMPTY_SENTINEL:
+ # Allow expansion of structure but make sure to cover the case
+ # when an empty pmap is added as leaf node. See #154.
+ is_empty = True
+ v = pmap()
+
+ result = _do_to_path(v, path, command)
+ if result is not v or is_empty:
+ e[k] = result
+
+ return e.persistent()
diff --git a/third_party/python/pyrsistent/pyrsistent/py.typed b/third_party/python/pyrsistent/pyrsistent/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/py.typed
diff --git a/third_party/python/pyrsistent/pyrsistent/typing.py b/third_party/python/pyrsistent/pyrsistent/typing.py
new file mode 100644
index 0000000000..6a86c831ba
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/typing.py
@@ -0,0 +1,80 @@
+"""Helpers for use with type annotation.
+
+Use the empty classes in this module when annotating the types of Pyrsistent
+objects, instead of using the actual collection class.
+
+For example,
+
+ from pyrsistent import pvector
+ from pyrsistent.typing import PVector
+
+ myvector: PVector[str] = pvector(['a', 'b', 'c'])
+
+"""
+from __future__ import absolute_import
+
+try:
+ from typing import Container
+ from typing import Hashable
+ from typing import Generic
+ from typing import Iterable
+ from typing import Mapping
+ from typing import Sequence
+ from typing import Sized
+ from typing import TypeVar
+
+ __all__ = [
+ 'CheckedPMap',
+ 'CheckedPSet',
+ 'CheckedPVector',
+ 'PBag',
+ 'PDeque',
+ 'PList',
+ 'PMap',
+ 'PSet',
+ 'PVector',
+ ]
+
+ T = TypeVar('T')
+ KT = TypeVar('KT')
+ VT = TypeVar('VT')
+
+ class CheckedPMap(Mapping[KT, VT], Hashable):
+ pass
+
+ # PSet.add and PSet.discard have different type signatures than that of Set.
+ class CheckedPSet(Generic[T], Hashable):
+ pass
+
+ class CheckedPVector(Sequence[T], Hashable):
+ pass
+
+ class PBag(Container[T], Iterable[T], Sized, Hashable):
+ pass
+
+ class PDeque(Sequence[T], Hashable):
+ pass
+
+ class PList(Sequence[T], Hashable):
+ pass
+
+ class PMap(Mapping[KT, VT], Hashable):
+ pass
+
+ # PSet.add and PSet.discard have different type signatures than that of Set.
+ class PSet(Generic[T], Hashable):
+ pass
+
+ class PVector(Sequence[T], Hashable):
+ pass
+
+ class PVectorEvolver(Generic[T]):
+ pass
+
+ class PMapEvolver(Generic[KT, VT]):
+ pass
+
+ class PSetEvolver(Generic[T]):
+ pass
+except ImportError:
+ pass
diff --git a/third_party/python/pyrsistent/pyrsistent/typing.pyi b/third_party/python/pyrsistent/pyrsistent/typing.pyi
new file mode 100644
index 0000000000..0221c48cc9
--- /dev/null
+++ b/third_party/python/pyrsistent/pyrsistent/typing.pyi
@@ -0,0 +1,292 @@
+# flake8: noqa: E704
+# from https://gist.github.com/WuTheFWasThat/091a17d4b5cab597dfd5d4c2d96faf09
+# Stubs for pyrsistent (Python 3.6)
+#
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import Generic
+from typing import Hashable
+from typing import Iterator
+from typing import Iterable
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import Sequence
+from typing import AbstractSet
+from typing import Sized
+from typing import Set
+from typing import Tuple
+from typing import TypeVar
+from typing import Type
+from typing import Union
+from typing import overload
+
+T = TypeVar('T')
+KT = TypeVar('KT')
+VT = TypeVar('VT')
+
+
+class PMap(Mapping[KT, VT], Hashable):
+ def __add__(self, other: PMap[KT, VT]) -> PMap[KT, VT]: ...
+ def __getitem__(self, key: KT) -> VT: ...
+ def __getattr__(self, key: str) -> VT: ...
+ def __hash__(self) -> int: ...
+ def __iter__(self) -> Iterator[KT]: ...
+ def __len__(self) -> int: ...
+ def copy(self) -> PMap[KT, VT]: ...
+ def discard(self, key: KT) -> PMap[KT, VT]: ...
+ def evolver(self) -> PMapEvolver[KT, VT]: ...
+ def iteritems(self) -> Iterable[Tuple[KT, VT]]: ...
+ def iterkeys(self) -> Iterable[KT]: ...
+ def itervalues(self) -> Iterable[VT]: ...
+ def remove(self, key: KT) -> PMap[KT, VT]: ...
+ def set(self, key: KT, val: VT) -> PMap[KT, VT]: ...
+ def transform(self, *transformations: Any) -> PMap[KT, VT]: ...
+ def update(self, *args: Mapping): ...
+ def update_with(self, update_fn: Callable[[VT, VT], VT], *args: Mapping) -> Any: ...
+
+
+class PMapEvolver(Generic[KT, VT]):
+ def __delitem__(self, key: KT) -> None: ...
+ def __getitem__(self, key: KT) -> VT: ...
+ def __len__(self) -> int: ...
+ def __setitem__(self, key: KT, val: VT) -> None: ...
+ def is_dirty(self) -> bool: ...
+ def persistent(self) -> PMap[KT, VT]: ...
+ def remove(self, key: KT) -> PMapEvolver[KT, VT]: ...
+ def set(self, key: KT, val: VT) -> PMapEvolver[KT, VT]: ...
+
+
+class PVector(Sequence[T], Hashable):
+ def __add__(self, other: PVector[T]) -> PVector[T]: ...
+ @overload
+ def __getitem__(self, index: int) -> T: ...
+ @overload
+ def __getitem__(self, index: slice) -> PVector[T]: ...
+ def __hash__(self) -> int: ...
+ def __len__(self) -> int: ...
+ def __mul__(self, other: PVector[T]) -> PVector[T]: ...
+ def append(self, val: T) -> PVector[T]: ...
+ def delete(self, index: int, stop: Optional[int]) -> PVector[T]: ...
+ def evolver(self) -> PVectorEvolver[T]: ...
+ def extend(self, obj: Iterable[T]) -> PVector[T]: ...
+ def tolist(self) -> List[T]: ...
+ def mset(self, *args: Iterable[Union[T, int]]) -> PVector[T]: ...
+ def remove(self, value: T) -> PVector[T]: ...
+ # Not compatible with MutableSequence
+ def set(self, i: int, val: T) -> PVector[T]: ...
+ def transform(self, *transformations: Any) -> PVector[T]: ...
+
+
+class PVectorEvolver(Sequence[T], Sized):
+ def __delitem__(self, i: Union[int, slice]) -> None: ...
+ @overload
+ def __getitem__(self, index: int) -> T: ...
+ # Not actually supported
+ @overload
+ def __getitem__(self, index: slice) -> PVectorEvolver[T]: ...
+ def __len__(self) -> int: ...
+ def __setitem__(self, index: int, val: T) -> None: ...
+ def append(self, val: T) -> PVectorEvolver[T]: ...
+ def delete(self, value: T) -> PVectorEvolver[T]: ...
+ def extend(self, obj: Iterable[T]) -> PVectorEvolver[T]: ...
+ def is_dirty(self) -> bool: ...
+ def persistent(self) -> PVector[T]: ...
+ def set(self, i: int, val: T) -> PVectorEvolver[T]: ...
+
+
+class PSet(AbstractSet[T], Hashable):
+ def __contains__(self, element: object) -> bool: ...
+ def __hash__(self) -> int: ...
+ def __iter__(self) -> Iterator[T]: ...
+ def __len__(self) -> int: ...
+ def add(self, element: T) -> PSet[T]: ...
+ def copy(self) -> PSet[T]: ...
+ def difference(self, iterable: Iterable) -> PSet[T]: ...
+ def discard(self, element: T) -> PSet[T]: ...
+ def evolver(self) -> PSetEvolver[T]: ...
+ def intersection(self, iterable: Iterable) -> PSet[T]: ...
+ def issubset(self, iterable: Iterable) -> bool: ...
+ def issuperset(self, iterable: Iterable) -> bool: ...
+ def remove(self, element: T) -> PSet[T]: ...
+ def symmetric_difference(self, iterable: Iterable[T]) -> PSet[T]: ...
+ def union(self, iterable: Iterable[T]) -> PSet[T]: ...
+ def update(self, iterable: Iterable[T]) -> PSet[T]: ...
+
+
+class PSetEvolver(Generic[T], Sized):
+ def __len__(self) -> int: ...
+ def add(self, element: T) -> PSetEvolver[T]: ...
+ def is_dirty(self) -> bool: ...
+ def persistent(self) -> PSet[T]: ...
+ def remove(self, element: T) -> PSetEvolver[T]: ...
+
+
+class PBag(Generic[T], Sized, Hashable):
+ def __add__(self, other: PBag[T]) -> PBag[T]: ...
+ def __and__(self, other: PBag[T]) -> PBag[T]: ...
+ def __contains__(self, elem: object) -> bool: ...
+ def __hash__(self) -> int: ...
+ def __iter__(self) -> Iterator[T]: ...
+ def __len__(self) -> int: ...
+ def __or__(self, other: PBag[T]) -> PBag[T]: ...
+ def __sub__(self, other: PBag[T]) -> PBag[T]: ...
+ def add(self, elem: T) -> PBag[T]: ...
+ def count(self, elem: T) -> int: ...
+ def remove(self, elem: T) -> PBag[T]: ...
+ def update(self, iterable: Iterable[T]) -> PBag[T]: ...
+
+
+class PDeque(Sequence[T], Hashable):
+ @overload
+ def __getitem__(self, index: int) -> T: ...
+ @overload
+ def __getitem__(self, index: slice) -> PDeque[T]: ...
+ def __hash__(self) -> int: ...
+ def __len__(self) -> int: ...
+ def __lt__(self, other: PDeque[T]) -> bool: ...
+ def append(self, elem: T) -> PDeque[T]: ...
+ def appendleft(self, elem: T) -> PDeque[T]: ...
+ def extend(self, iterable: Iterable[T]) -> PDeque[T]: ...
+ def extendleft(self, iterable: Iterable[T]) -> PDeque[T]: ...
+ @property
+ def left(self) -> T: ...
+ # The real return type is Integral according to what pyrsistent
+ # checks at runtime but mypy doesn't deal in numeric.*:
+ # https://github.com/python/mypy/issues/2636
+ @property
+ def maxlen(self) -> int: ...
+ def pop(self, count: int = 1) -> PDeque[T]: ...
+ def popleft(self, count: int = 1) -> PDeque[T]: ...
+ def remove(self, elem: T) -> PDeque[T]: ...
+ def reverse(self) -> PDeque[T]: ...
+ @property
+ def right(self) -> T: ...
+ def rotate(self, steps: int) -> PDeque[T]: ...
+
+
+class PList(Sequence[T], Hashable):
+ @overload
+ def __getitem__(self, index: int) -> T: ...
+ @overload
+ def __getitem__(self, index: slice) -> PList[T]: ...
+ def __hash__(self) -> int: ...
+ def __len__(self) -> int: ...
+ def __lt__(self, other: PList[T]) -> bool: ...
+ def __gt__(self, other: PList[T]) -> bool: ...
+ def cons(self, elem: T) -> PList[T]: ...
+ @property
+ def first(self) -> T: ...
+ def mcons(self, iterable: Iterable[T]) -> PList[T]: ...
+ def remove(self, elem: T) -> PList[T]: ...
+ @property
+ def rest(self) -> PList[T]: ...
+ def reverse(self) -> PList[T]: ...
+ def split(self, index: int) -> Tuple[PList[T], PList[T]]: ...
+
+T_PClass = TypeVar('T_PClass', bound='PClass')
+
+class PClass(Hashable):
+ def __new__(cls, **kwargs: Any): ...
+ def set(self: T_PClass, *args: Any, **kwargs: Any) -> T_PClass: ...
+ @classmethod
+ def create(
+ cls: Type[T_PClass],
+ kwargs: Any,
+ _factory_fields: Optional[Any] = ...,
+ ignore_extra: bool = ...,
+ ) -> T_PClass: ...
+ def serialize(self, format: Optional[Any] = ...): ...
+ def transform(self, *transformations: Any): ...
+ def __eq__(self, other: object): ...
+ def __ne__(self, other: object): ...
+ def __hash__(self): ...
+ def __reduce__(self): ...
+ def evolver(self) -> PClassEvolver: ...
+ def remove(self: T_PClass, name: Any) -> T_PClass: ...
+
+class PClassEvolver:
+ def __init__(self, original: Any, initial_dict: Any) -> None: ...
+ def __getitem__(self, item: Any): ...
+ def set(self, key: Any, value: Any): ...
+ def __setitem__(self, key: Any, value: Any) -> None: ...
+ def remove(self, item: Any): ...
+ def __delitem__(self, item: Any) -> None: ...
+ def persistent(self) -> PClass: ...
+ def __getattr__(self, item: Any): ...
+
+
+
+class CheckedPMap(PMap[KT, VT]):
+ __key_type__: Type[KT]
+ __value_type__: Type[VT]
+ def __new__(cls, source: Mapping[KT, VT] = ..., size: int = ...) -> CheckedPMap: ...
+ @classmethod
+ def create(cls, source_data: Mapping[KT, VT], _factory_fields: Any = ...) -> CheckedPMap[KT, VT]: ...
+ def serialize(self, format: Optional[Any] = ...) -> Dict[KT, VT]: ...
+
+
+class CheckedPVector(PVector[T]):
+ __type__: Type[T]
+ def __new__(self, initial: Iterable[T] = ...) -> CheckedPVector: ...
+ @classmethod
+ def create(cls, source_data: Iterable[T], _factory_fields: Any = ...) -> CheckedPVector[T]: ...
+ def serialize(self, format: Optional[Any] = ...) -> List[T]: ...
+
+
+class CheckedPSet(PSet[T]):
+ __type__: Type[T]
+ def __new__(cls, initial: Iterable[T] = ...) -> CheckedPSet: ...
+ @classmethod
+ def create(cls, source_data: Iterable[T], _factory_fields: Any = ...) -> CheckedPSet[T]: ...
+ def serialize(self, format: Optional[Any] = ...) -> Set[T]: ...
+
+
+class InvariantException(Exception):
+ invariant_errors: Tuple[Any, ...] = ... # possibly nested tuple
+ missing_fields: Tuple[str, ...] = ...
+ def __init__(
+ self,
+ error_codes: Any = ...,
+ missing_fields: Any = ...,
+ *args: Any,
+ **kwargs: Any
+ ) -> None: ...
+
+
+class CheckedTypeError(TypeError):
+ source_class: Type[Any]
+ expected_types: Tuple[Any, ...]
+ actual_type: Type[Any]
+ actual_value: Any
+ def __init__(
+ self,
+ source_class: Any,
+ expected_types: Any,
+ actual_type: Any,
+ actual_value: Any,
+ *args: Any,
+ **kwargs: Any
+ ) -> None: ...
+
+
+class CheckedKeyTypeError(CheckedTypeError): ...
+class CheckedValueTypeError(CheckedTypeError): ...
+class CheckedType: ...
+
+
+class PTypeError(TypeError):
+ source_class: Type[Any] = ...
+ field: str = ...
+ expected_types: Tuple[Any, ...] = ...
+ actual_type: Type[Any] = ...
+ def __init__(
+ self,
+ source_class: Any,
+ field: Any,
+ expected_types: Any,
+ actual_type: Any,
+ *args: Any,
+ **kwargs: Any
+ ) -> None: ...
diff --git a/third_party/python/pyrsistent/setup.cfg b/third_party/python/pyrsistent/setup.cfg
new file mode 100644
index 0000000000..e4eba0b6c3
--- /dev/null
+++ b/third_party/python/pyrsistent/setup.cfg
@@ -0,0 +1,7 @@
+[aliases]
+test = pytest
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/pyrsistent/setup.py b/third_party/python/pyrsistent/setup.py
new file mode 100644
index 0000000000..931800ff15
--- /dev/null
+++ b/third_party/python/pyrsistent/setup.py
@@ -0,0 +1,81 @@
+import os
+from setuptools import setup, Extension
+import sys
+import platform
+import warnings
+import codecs
+from distutils.command.build_ext import build_ext
+from distutils.errors import CCompilerError
+from distutils.errors import DistutilsPlatformError, DistutilsExecError
+from _pyrsistent_version import __version__
+
+readme_path = os.path.join(os.path.dirname(__file__), 'README.rst')
+with codecs.open(readme_path, encoding='utf8') as f:
+ readme = f.read()
+
+extensions = []
+if platform.python_implementation() == 'CPython':
+ extensions = [Extension('pvectorc', sources=['pvectorcmodule.c'])]
+
+needs_pytest = {'pytest', 'test', 'ptr'}.intersection(sys.argv)
+pytest_runner = ['pytest-runner'] if needs_pytest else []
+
+
+class custom_build_ext(build_ext):
+ """Allow C extension building to fail."""
+
+ warning_message = """
+********************************************************************************
+WARNING: Could not build the %s.
+ Pyrsistent will still work but performance may be degraded.
+ %s
+********************************************************************************
+"""
+
+ def run(self):
+ try:
+ build_ext.run(self)
+ except Exception:
+ e = sys.exc_info()[1]
+ sys.stderr.write('%s\n' % str(e))
+ sys.stderr.write(self.warning_message % ("extension modules", "There was an issue with your platform configuration - see above."))
+
+ def build_extension(self, ext):
+ name = ext.name
+ try:
+ build_ext.build_extension(self, ext)
+ except Exception:
+ e = sys.exc_info()[1]
+ sys.stderr.write('%s\n' % str(e))
+ sys.stderr.write(self.warning_message % ("%s extension module" % name, "The output above this warning shows how the compilation failed."))
+
+setup(
+ name='pyrsistent',
+ version=__version__,
+ description='Persistent/Functional/Immutable data structures',
+ long_description=readme,
+ author='Tobias Gustafsson',
+ author_email='tobias.l.gustafsson@gmail.com',
+ url='http://github.com/tobgu/pyrsistent/',
+ license='MIT',
+ license_files=['LICENCE.mit'],
+ py_modules=['_pyrsistent_version'],
+ classifiers=[
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: Implementation :: PyPy',
+ ],
+ test_suite='tests',
+ tests_require=['pytest<5', 'hypothesis<5'],
+ scripts=[],
+ setup_requires=pytest_runner,
+ ext_modules=extensions,
+ cmdclass={'build_ext': custom_build_ext},
+ install_requires=['six'],
+ packages=['pyrsistent'],
+ package_data={'pyrsistent': ['py.typed', '__init__.pyi', 'typing.pyi']},
+)
diff --git a/third_party/python/pystache/.gitignore b/third_party/python/pystache/.gitignore
new file mode 100644
index 0000000000..758d62df92
--- /dev/null
+++ b/third_party/python/pystache/.gitignore
@@ -0,0 +1,17 @@
+*.pyc
+.DS_Store
+# Tox support. See: http://pypi.python.org/pypi/tox
+.tox
+# Our tox runs convert the doctests in *.rst files to Python 3 prior to
+# running tests. Ignore these temporary files.
+*.temp2to3.rst
+# The setup.py "prep" command converts *.md to *.temp.rst (via *.temp.md).
+*.temp.md
+*.temp.rst
+# TextMate project file
+*.tmproj
+# Distribution-related folders and files.
+build
+dist
+MANIFEST
+pystache.egg-info
diff --git a/third_party/python/pystache/.travis.yml b/third_party/python/pystache/.travis.yml
new file mode 100644
index 0000000000..00227053aa
--- /dev/null
+++ b/third_party/python/pystache/.travis.yml
@@ -0,0 +1,14 @@
+language: python
+
+# Travis CI has no plans to support Jython and no longer supports Python 2.5.
+python:
+ - 2.6
+ - 2.7
+ - 3.2
+ - pypy
+
+script:
+ - python setup.py install
+ # Include the spec tests directory for Mustache spec tests and the
+ # project directory for doctests.
+ - pystache-test . ext/spec/specs
diff --git a/third_party/python/pystache/HISTORY.md b/third_party/python/pystache/HISTORY.md
new file mode 100644
index 0000000000..e5b7638aee
--- /dev/null
+++ b/third_party/python/pystache/HISTORY.md
@@ -0,0 +1,169 @@
+History
+=======
+
+**Note:** Official support for Python 2.4 will end with Pystache version 0.6.0.
+
+0.5.4 (2014-07-11)
+------------------
+
+- Bugfix: made test with filenames OS agnostic (issue \#162).
+
+0.5.3 (2012-11-03)
+------------------
+
+- Added ability to customize string coercion (e.g. to have None render as
+ `''`) (issue \#130).
+- Added Renderer.render_name() to render a template by name (issue \#122).
+- Added TemplateSpec.template_path to specify an absolute path to a
+ template (issue \#41).
+- Added option of raising errors on missing tags/partials:
+ `Renderer(missing_tags='strict')` (issue \#110).
+- Added support for finding and loading templates by file name in
+ addition to by template name (issue \#127). [xgecko]
+- Added a `parse()` function that yields a printable, pre-compiled
+ parse tree.
+- Added support for rendering pre-compiled templates.
+- Added Python 3.3 to the list of supported versions.
+- Added support for [PyPy](http://pypy.org/) (issue \#125).
+- Added support for [Travis CI](http://travis-ci.org) (issue \#124).
+ [msabramo]
+- Bugfix: `defaults.DELIMITERS` can now be changed at runtime (issue \#135).
+ [bennoleslie]
+- Bugfix: exceptions raised from a property are no longer swallowed
+ when getting a key from a context stack (issue \#110).
+- Bugfix: lambda section values can now return non-ascii, non-unicode
+ strings (issue \#118).
+- Bugfix: allow `test_pystache.py` and `tox` to pass when run from a
+ downloaded sdist (i.e. without the spec test directory).
+- Convert HISTORY and README files from reST to Markdown.
+- More robust handling of byte strings in Python 3.
+- Added Creative Commons license for David Phillips's logo.
+
+0.5.2 (2012-05-03)
+------------------
+
+- Added support for dot notation and version 1.1.2 of the spec (issue
+ \#99). [rbp]
+- Missing partials now render as empty string per latest version of
+ spec (issue \#115).
+- Bugfix: falsey values now coerced to strings using str().
+- Bugfix: lambda return values for sections no longer pushed onto
+ context stack (issue \#113).
+- Bugfix: lists of lambdas for sections were not rendered (issue
+ \#114).
+
+0.5.1 (2012-04-24)
+------------------
+
+- Added support for Python 3.1 and 3.2.
+- Added tox support to test multiple Python versions.
+- Added test script entry point: pystache-test.
+- Added \_\_version\_\_ package attribute.
+- Test harness now supports both YAML and JSON forms of Mustache spec.
+- Test harness no longer requires nose.
+
+0.5.0 (2012-04-03)
+------------------
+
+This version represents a major rewrite and refactoring of the code base
+that also adds features and fixes many bugs. All functionality and
+nearly all unit tests have been preserved. However, some backwards
+incompatible changes to the API have been made.
+
+Below is a selection of some of the changes (not exhaustive).
+
+Highlights:
+
+- Pystache now passes all tests in version 1.0.3 of the [Mustache
+ spec](https://github.com/mustache/spec). [pvande]
+- Removed View class: it is no longer necessary to subclass from View
+ or from any other class to create a view.
+- Replaced Template with Renderer class: template rendering behavior
+ can be modified via the Renderer constructor or by setting
+ attributes on a Renderer instance.
+- Added TemplateSpec class: template rendering can be specified on a
+ per-view basis by subclassing from TemplateSpec.
+- Introduced separation of concerns and removed circular dependencies
+ (e.g. between Template and View classes, cf. [issue
+ \#13](https://github.com/defunkt/pystache/issues/13)).
+- Unicode now used consistently throughout the rendering process.
+- Expanded test coverage: nosetests now runs doctests and \~105 test
+ cases from the Mustache spec (increasing the number of tests from 56
+ to \~315).
+- Added a rudimentary benchmarking script to gauge performance while
+ refactoring.
+- Extensive documentation added (e.g. docstrings).
+
+Other changes:
+
+- Added a command-line interface. [vrde]
+- The main rendering class now accepts a custom partial loader (e.g. a
+ dictionary) and a custom escape function.
+- Non-ascii characters in str strings are now supported while
+ rendering.
+- Added string encoding, file encoding, and errors options for
+ decoding to unicode.
+- Removed the output encoding option.
+- Removed the use of markupsafe.
+
+Bug fixes:
+
+- Context values no longer processed as template strings.
+ [jakearchibald]
+- Whitespace surrounding sections is no longer altered, per the spec.
+ [heliodor]
+- Zeroes now render correctly when using PyPy. [alex]
+- Multline comments now permitted. [fczuardi]
+- Extensionless template files are now supported.
+- Passing `**kwargs` to `Template()` no longer modifies the context.
+- Passing `**kwargs` to `Template()` with no context no longer raises
+ an exception.
+
+0.4.1 (2012-03-25)
+------------------
+
+- Added support for Python 2.4. [wangtz, jvantuyl]
+
+0.4.0 (2011-01-12)
+------------------
+
+- Add support for nested contexts (within template and view)
+- Add support for inverted lists
+- Decoupled template loading
+
+0.3.1 (2010-05-07)
+------------------
+
+- Fix package
+
+0.3.0 (2010-05-03)
+------------------
+
+- View.template\_path can now hold a list of path
+- Add {{& blah}} as an alias for {{{ blah }}}
+- Higher Order Sections
+- Inverted sections
+
+0.2.0 (2010-02-15)
+------------------
+
+- Bugfix: Methods returning False or None are not rendered
+- Bugfix: Don't render an empty string when a tag's value is 0.
+ [enaeseth]
+- Add support for using non-callables as View attributes.
+ [joshthecoder]
+- Allow using View instances as attributes. [joshthecoder]
+- Support for Unicode and non-ASCII-encoded bytestring output.
+ [enaeseth]
+- Template file encoding awareness. [enaeseth]
+
+0.1.1 (2009-11-13)
+------------------
+
+- Ensure we're dealing with strings, always
+- Tests can be run by executing the test file directly
+
+0.1.0 (2009-11-12)
+------------------
+
+- First release
diff --git a/third_party/python/pystache/LICENSE b/third_party/python/pystache/LICENSE
new file mode 100644
index 0000000000..42be9d6460
--- /dev/null
+++ b/third_party/python/pystache/LICENSE
@@ -0,0 +1,22 @@
+Copyright (C) 2012 Chris Jerdonek. All rights reserved.
+
+Copyright (c) 2009 Chris Wanstrath
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/third_party/python/pystache/MANIFEST.in b/third_party/python/pystache/MANIFEST.in
new file mode 100644
index 0000000000..bdc64bf718
--- /dev/null
+++ b/third_party/python/pystache/MANIFEST.in
@@ -0,0 +1,13 @@
+include README.md
+include HISTORY.md
+include LICENSE
+include TODO.md
+include setup_description.rst
+include tox.ini
+include test_pystache.py
+# You cannot use package_data, for example, to include data files in a
+# source distribution when using Distribute.
+recursive-include pystache/tests *.mustache *.txt
+# We deliberately exclude the gh/ directory because it contains copies
+# of resources needed only for the web page hosted on GitHub (via the
+# gh-pages branch).
diff --git a/third_party/python/pystache/README.md b/third_party/python/pystache/README.md
new file mode 100644
index 0000000000..54a96088b4
--- /dev/null
+++ b/third_party/python/pystache/README.md
@@ -0,0 +1,276 @@
+Pystache
+========
+
+<!-- Since PyPI rejects reST long descriptions that contain HTML, -->
+<!-- HTML comments must be removed when converting this file to reST. -->
+<!-- For more information on PyPI's behavior in this regard, see: -->
+<!-- http://docs.python.org/distutils/uploading.html#pypi-package-display -->
+<!-- The Pystache setup script strips 1-line HTML comments prior -->
+<!-- to converting to reST, so all HTML comments should be one line. -->
+<!-- -->
+<!-- We leave the leading brackets empty here. Otherwise, unwanted -->
+<!-- caption text shows up in the reST version converted by pandoc. -->
+![](http://defunkt.github.com/pystache/images/logo_phillips.png "mustachioed, monocled snake by David Phillips")
+
+![](https://secure.travis-ci.org/defunkt/pystache.png "Travis CI current build status")
+
+[Pystache](http://defunkt.github.com/pystache) is a Python
+implementation of [Mustache](http://mustache.github.com/). Mustache is a
+framework-agnostic, logic-free templating system inspired by
+[ctemplate](http://code.google.com/p/google-ctemplate/) and
+[et](http://www.ivan.fomichev.name/2008/05/erlang-template-engine-prototype.html).
+Like ctemplate, Mustache "emphasizes separating logic from presentation:
+it is impossible to embed application logic in this template language."
+
+The [mustache(5)](http://mustache.github.com/mustache.5.html) man page
+provides a good introduction to Mustache's syntax. For a more complete
+(and more current) description of Mustache's behavior, see the official
+[Mustache spec](https://github.com/mustache/spec).
+
+Pystache is [semantically versioned](http://semver.org) and can be found
+on [PyPI](http://pypi.python.org/pypi/pystache). This version of
+Pystache passes all tests in [version
+1.1.2](https://github.com/mustache/spec/tree/v1.1.2) of the spec.
+
+
+Requirements
+------------
+
+Pystache is tested with--
+
+- Python 2.4 (requires simplejson [version
+ 2.0.9](http://pypi.python.org/pypi/simplejson/2.0.9) or earlier)
+- Python 2.5 (requires
+ [simplejson](http://pypi.python.org/pypi/simplejson/))
+- Python 2.6
+- Python 2.7
+- Python 3.1
+- Python 3.2
+- Python 3.3
+- [PyPy](http://pypy.org/)
+
+[Distribute](http://packages.python.org/distribute/) (the setuptools fork)
+is recommended over [setuptools](http://pypi.python.org/pypi/setuptools),
+and is required in some cases (e.g. for Python 3 support).
+If you use [pip](http://www.pip-installer.org/), you probably already satisfy
+this requirement.
+
+JSON support is needed only for the command-line interface and to run
+the spec tests. We require simplejson for earlier versions of Python
+since Python's [json](http://docs.python.org/library/json.html) module
+was added in Python 2.6.
+
+For Python 2.4 we require an earlier version of simplejson since
+simplejson stopped officially supporting Python 2.4 in simplejson
+version 2.1.0. Earlier versions of simplejson can be installed manually,
+as follows:
+
+ pip install 'simplejson<2.1.0'
+
+Official support for Python 2.4 will end with Pystache version 0.6.0.
+
+Install It
+----------
+
+ pip install pystache
+
+And test it--
+
+ pystache-test
+
+To install and test from source (e.g. from GitHub), see the Develop
+section.
+
+Use It
+------
+
+ >>> import pystache
+ >>> print pystache.render('Hi {{person}}!', {'person': 'Mom'})
+ Hi Mom!
+
+You can also create dedicated view classes to hold your view logic.
+
+Here's your view class (in .../examples/readme.py):
+
+ class SayHello(object):
+ def to(self):
+ return "Pizza"
+
+Instantiating like so:
+
+ >>> from pystache.tests.examples.readme import SayHello
+ >>> hello = SayHello()
+
+Then your template, say\_hello.mustache (by default in the same
+directory as your class definition):
+
+ Hello, {{to}}!
+
+Pull it together:
+
+ >>> renderer = pystache.Renderer()
+ >>> print renderer.render(hello)
+ Hello, Pizza!
+
+For greater control over rendering (e.g. to specify a custom template
+directory), use the `Renderer` class like above. One can pass attributes
+to the Renderer class constructor or set them on a Renderer instance. To
+customize template loading on a per-view basis, subclass `TemplateSpec`.
+See the docstrings of the
+[Renderer](https://github.com/defunkt/pystache/blob/master/pystache/renderer.py)
+class and
+[TemplateSpec](https://github.com/defunkt/pystache/blob/master/pystache/template_spec.py)
+class for more information.
+
+You can also pre-parse a template:
+
+ >>> parsed = pystache.parse(u"Hey {{#who}}{{.}}!{{/who}}")
+ >>> print parsed
+ [u'Hey ', _SectionNode(key=u'who', index_begin=12, index_end=18, parsed=[_EscapeNode(key=u'.'), u'!'])]
+
+And then:
+
+ >>> print renderer.render(parsed, {'who': 'Pops'})
+ Hey Pops!
+ >>> print renderer.render(parsed, {'who': 'you'})
+ Hey you!
+
+Python 3
+--------
+
+Pystache has supported Python 3 since version 0.5.1. Pystache behaves
+slightly differently between Python 2 and 3, as follows:
+
+- In Python 2, the default html-escape function `cgi.escape()` does
+ not escape single quotes. In Python 3, the default escape function
+ `html.escape()` does escape single quotes.
+- In both Python 2 and 3, the string and file encodings default to
+ `sys.getdefaultencoding()`. However, this function can return
+ different values under Python 2 and 3, even when run from the same
+ system. Check your own system for the behavior on your system, or do
+ not rely on the defaults by passing in the encodings explicitly
+ (e.g. to the `Renderer` class).
+
+Unicode
+-------
+
+This section describes how Pystache handles unicode, strings, and
+encodings.
+
+Internally, Pystache uses [only unicode
+strings](http://docs.python.org/howto/unicode.html#tips-for-writing-unicode-aware-programs)
+(`str` in Python 3 and `unicode` in Python 2). For input, Pystache
+accepts both unicode strings and byte strings (`bytes` in Python 3 and
+`str` in Python 2). For output, Pystache's template rendering methods
+return only unicode.
+
+Pystache's `Renderer` class supports a number of attributes to control
+how Pystache converts byte strings to unicode on input. These include
+the `file_encoding`, `string_encoding`, and `decode_errors` attributes.
+
+The `file_encoding` attribute is the encoding the renderer uses to
+convert to unicode any files read from the file system. Similarly,
+`string_encoding` is the encoding the renderer uses to convert any other
+byte strings encountered during the rendering process into unicode (e.g.
+context values that are encoded byte strings).
+
+The `decode_errors` attribute is what the renderer passes as the
+`errors` argument to Python's built-in unicode-decoding function
+(`str()` in Python 3 and `unicode()` in Python 2). The valid values for
+this argument are `strict`, `ignore`, and `replace`.
+
+Each of these attributes can be set via the `Renderer` class's
+constructor using a keyword argument of the same name. See the Renderer
+class's docstrings for further details. In addition, the `file_encoding`
+attribute can be controlled on a per-view basis by subclassing the
+`TemplateSpec` class. When not specified explicitly, these attributes
+default to values set in Pystache's `defaults` module.
+
+Develop
+-------
+
+To test from a source distribution (without installing)--
+
+ python test_pystache.py
+
+To test Pystache with multiple versions of Python (with a single
+command!), you can use [tox](http://pypi.python.org/pypi/tox):
+
+ pip install 'virtualenv<1.8' # Version 1.8 dropped support for Python 2.4.
+ pip install 'tox<1.4' # Version 1.4 dropped support for Python 2.4.
+ tox
+
+If you do not have all Python versions listed in `tox.ini`--
+
+ tox -e py26,py32 # for example
+
+The source distribution tests also include doctests and tests from the
+Mustache spec. To include tests from the Mustache spec in your test
+runs:
+
+ git submodule init
+ git submodule update
+
+The test harness parses the spec's (more human-readable) yaml files if
+[PyYAML](http://pypi.python.org/pypi/PyYAML) is present. Otherwise, it
+parses the json files. To install PyYAML--
+
+ pip install pyyaml
+
+To run a subset of the tests, you can use
+[nose](http://somethingaboutorange.com/mrl/projects/nose/0.11.1/testing.html):
+
+ pip install nose
+ nosetests --tests pystache/tests/test_context.py:GetValueTests.test_dictionary__key_present
+
+### Using Python 3 with Pystache from source
+
+Pystache is written in Python 2 and must be converted to Python 3 prior to
+using it with Python 3. The installation process (and tox) do this
+automatically.
+
+To convert the code to Python 3 manually (while using Python 3)--
+
+ python setup.py build
+
+This writes the converted code to a subdirectory called `build`.
+By design, Python 3 builds
+[cannot](https://bitbucket.org/tarek/distribute/issue/292/allow-use_2to3-with-python-2)
+be created from Python 2.
+
+To convert the code without using setup.py, you can use
+[2to3](http://docs.python.org/library/2to3.html) as follows (two steps)--
+
+ 2to3 --write --nobackups --no-diffs --doctests_only pystache
+ 2to3 --write --nobackups --no-diffs pystache
+
+This converts the code (and doctests) in place.
+
+To `import pystache` from a source distribution while using Python 3, be
+sure that you are importing from a directory containing a converted
+version of the code (e.g. from the `build` directory after converting),
+and not from the original (unconverted) source directory. Otherwise, you will
+get a syntax error. You can help prevent this by not running the Python
+IDE from the project directory when importing Pystache while using Python 3.
+
+
+Mailing List
+------------
+
+There is a [mailing list](http://librelist.com/browser/pystache/). Note
+that there is a bit of a delay between posting a message and seeing it
+appear in the mailing list archive.
+
+Credits
+-------
+
+ >>> context = { 'author': 'Chris Wanstrath', 'maintainer': 'Chris Jerdonek' }
+ >>> print pystache.render("Author: {{author}}\nMaintainer: {{maintainer}}", context)
+ Author: Chris Wanstrath
+ Maintainer: Chris Jerdonek
+
+Pystache logo by [David Phillips](http://davidphillips.us/) is licensed
+under a [Creative Commons Attribution-ShareAlike 3.0 Unported
+License](http://creativecommons.org/licenses/by-sa/3.0/deed.en_US).
+![](http://i.creativecommons.org/l/by-sa/3.0/88x31.png "Creative
+Commons Attribution-ShareAlike 3.0 Unported License")
diff --git a/third_party/python/pystache/TODO.md b/third_party/python/pystache/TODO.md
new file mode 100644
index 0000000000..cd82417657
--- /dev/null
+++ b/third_party/python/pystache/TODO.md
@@ -0,0 +1,16 @@
+TODO
+====
+
+In development branch:
+
+* Figure out a way to suppress center alignment of images in reST output.
+* Add a unit test for the change made in 7ea8e7180c41. This is with regard
+ to not requiring spec tests when running tests from a downloaded sdist.
+* End support for Python 2.4.
+* Add Python 3.3 to tox file (after deprecating 2.4).
+* Turn the benchmarking script at pystache/tests/benchmark.py into a command
+ in pystache/commands, or make it a subcommand of one of the existing
+ commands (i.e. using a command argument).
+* Provide support for logging in at least one of the commands.
+* Make sure command parsing to pystache-test doesn't break with Python 2.4 and earlier.
+* Combine pystache-test with the main command.
diff --git a/third_party/python/pystache/gh/images/logo_phillips.png b/third_party/python/pystache/gh/images/logo_phillips.png
new file mode 100644
index 0000000000..7491901366
--- /dev/null
+++ b/third_party/python/pystache/gh/images/logo_phillips.png
Binary files differ
diff --git a/third_party/python/pystache/pystache/__init__.py b/third_party/python/pystache/pystache/__init__.py
new file mode 100644
index 0000000000..4cf24344e5
--- /dev/null
+++ b/third_party/python/pystache/pystache/__init__.py
@@ -0,0 +1,13 @@
+
+"""
+TODO: add a docstring.
+
+"""
+
+# We keep all initialization code in a separate module.
+
+from pystache.init import parse, render, Renderer, TemplateSpec
+
+__all__ = ['parse', 'render', 'Renderer', 'TemplateSpec']
+
+__version__ = '0.5.4' # Also change in setup.py.
diff --git a/third_party/python/pystache/pystache/commands/__init__.py b/third_party/python/pystache/pystache/commands/__init__.py
new file mode 100644
index 0000000000..a0d386a38c
--- /dev/null
+++ b/third_party/python/pystache/pystache/commands/__init__.py
@@ -0,0 +1,4 @@
+"""
+TODO: add a docstring.
+
+"""
diff --git a/third_party/python/pystache/pystache/commands/render.py b/third_party/python/pystache/pystache/commands/render.py
new file mode 100644
index 0000000000..1a9c309d52
--- /dev/null
+++ b/third_party/python/pystache/pystache/commands/render.py
@@ -0,0 +1,95 @@
+# coding: utf-8
+
+"""
+This module provides command-line access to pystache.
+
+Run this script using the -h option for command-line help.
+
+"""
+
+
+try:
+ import json
+except:
+ # The json module is new in Python 2.6, whereas simplejson is
+ # compatible with earlier versions.
+ try:
+ import simplejson as json
+ except ImportError:
+ # Raise an error with a type different from ImportError as a hack around
+ # this issue:
+ # http://bugs.python.org/issue7559
+ from sys import exc_info
+ ex_type, ex_value, tb = exc_info()
+ new_ex = Exception("%s: %s" % (ex_type.__name__, ex_value))
+ raise new_ex.__class__, new_ex, tb
+
+# The optparse module is deprecated in Python 2.7 in favor of argparse.
+# However, argparse is not available in Python 2.6 and earlier.
+from optparse import OptionParser
+import sys
+
+# We use absolute imports here to allow use of this script from its
+# location in source control (e.g. for development purposes).
+# Otherwise, the following error occurs:
+#
+# ValueError: Attempted relative import in non-package
+#
+from pystache.common import TemplateNotFoundError
+from pystache.renderer import Renderer
+
+
+USAGE = """\
+%prog [-h] template context
+
+Render a mustache template with the given context.
+
+positional arguments:
+ template A filename or template string.
+ context A filename or JSON string."""
+
+
+def parse_args(sys_argv, usage):
+ """
+ Return an OptionParser for the script.
+
+ """
+ args = sys_argv[1:]
+
+ parser = OptionParser(usage=usage)
+ options, args = parser.parse_args(args)
+
+ template, context = args
+
+ return template, context
+
+
+# TODO: verify whether the setup() method's entry_points argument
+# supports passing arguments to main:
+#
+# http://packages.python.org/distribute/setuptools.html#automatic-script-creation
+#
+def main(sys_argv=sys.argv):
+ template, context = parse_args(sys_argv, USAGE)
+
+ if template.endswith('.mustache'):
+ template = template[:-9]
+
+ renderer = Renderer()
+
+ try:
+ template = renderer.load_template(template)
+ except TemplateNotFoundError:
+ pass
+
+ try:
+ context = json.load(open(context))
+ except IOError:
+ context = json.loads(context)
+
+ rendered = renderer.render(template, context)
+ print rendered
+
+
+if __name__=='__main__':
+ main()
diff --git a/third_party/python/pystache/pystache/commands/test.py b/third_party/python/pystache/pystache/commands/test.py
new file mode 100644
index 0000000000..0872453388
--- /dev/null
+++ b/third_party/python/pystache/pystache/commands/test.py
@@ -0,0 +1,18 @@
+# coding: utf-8
+
+"""
+This module provides a command to test pystache (unit tests, doctests, etc).
+
+"""
+
+import sys
+
+from pystache.tests.main import main as run_tests
+
+
+def main(sys_argv=sys.argv):
+ run_tests(sys_argv=sys_argv)
+
+
+if __name__=='__main__':
+ main()
diff --git a/third_party/python/pystache/pystache/common.py b/third_party/python/pystache/pystache/common.py
new file mode 100644
index 0000000000..fb266dd8b5
--- /dev/null
+++ b/third_party/python/pystache/pystache/common.py
@@ -0,0 +1,71 @@
+# coding: utf-8
+
+"""
+Exposes functionality needed throughout the project.
+
+"""
+
+from sys import version_info
+
+def _get_string_types():
+ # TODO: come up with a better solution for this. One of the issues here
+ # is that in Python 3 there is no common base class for unicode strings
+ # and byte strings, and 2to3 seems to convert all of "str", "unicode",
+ # and "basestring" to Python 3's "str".
+ if version_info < (3, ):
+ return basestring
+ # The latter evaluates to "bytes" in Python 3 -- even after conversion by 2to3.
+ return (unicode, type(u"a".encode('utf-8')))
+
+
+_STRING_TYPES = _get_string_types()
+
+
+def is_string(obj):
+ """
+ Return whether the given object is a byte string or unicode string.
+
+ This function is provided for compatibility with both Python 2 and 3
+ when using 2to3.
+
+ """
+ return isinstance(obj, _STRING_TYPES)
+
+
+# This function was designed to be portable across Python versions -- both
+# with older versions and with Python 3 after applying 2to3.
+def read(path):
+ """
+ Return the contents of a text file as a byte string.
+
+ """
+ # Opening in binary mode is necessary for compatibility across Python
+ # 2 and 3. In both Python 2 and 3, open() defaults to opening files in
+ # text mode. However, in Python 2, open() returns file objects whose
+ # read() method returns byte strings (strings of type `str` in Python 2),
+ # whereas in Python 3, the file object returns unicode strings (strings
+ # of type `str` in Python 3).
+ f = open(path, 'rb')
+ # We avoid use of the with keyword for Python 2.4 support.
+ try:
+ return f.read()
+ finally:
+ f.close()
+
+
+class MissingTags(object):
+
+ """Contains the valid values for Renderer.missing_tags."""
+
+ ignore = 'ignore'
+ strict = 'strict'
+
+
+class PystacheError(Exception):
+ """Base class for Pystache exceptions."""
+ pass
+
+
+class TemplateNotFoundError(PystacheError):
+ """An exception raised when a template is not found."""
+ pass
diff --git a/third_party/python/pystache/pystache/context.py b/third_party/python/pystache/pystache/context.py
new file mode 100644
index 0000000000..6715916092
--- /dev/null
+++ b/third_party/python/pystache/pystache/context.py
@@ -0,0 +1,342 @@
+# coding: utf-8
+
+"""
+Exposes a ContextStack class.
+
+The Mustache spec makes a special distinction between two types of context
+stack elements: hashes and objects. For the purposes of interpreting the
+spec, we define these categories mutually exclusively as follows:
+
+ (1) Hash: an item whose type is a subclass of dict.
+
+ (2) Object: an item that is neither a hash nor an instance of a
+ built-in type.
+
+"""
+
+from pystache.common import PystacheError
+
+
+# This equals '__builtin__' in Python 2 and 'builtins' in Python 3.
+_BUILTIN_MODULE = type(0).__module__
+
+
+# We use this private global variable as a return value to represent a key
+# not being found on lookup. This lets us distinguish between the case
+# of a key's value being None with the case of a key not being found --
+# without having to rely on exceptions (e.g. KeyError) for flow control.
+#
+# TODO: eliminate the need for a private global variable, e.g. by using the
+# preferred Python approach of "easier to ask for forgiveness than permission":
+# http://docs.python.org/glossary.html#term-eafp
+class NotFound(object):
+ pass
+_NOT_FOUND = NotFound()
+
+
+def _get_value(context, key):
+ """
+ Retrieve a key's value from a context item.
+
+ Returns _NOT_FOUND if the key does not exist.
+
+ The ContextStack.get() docstring documents this function's intended behavior.
+
+ """
+ if isinstance(context, dict):
+ # Then we consider the argument a "hash" for the purposes of the spec.
+ #
+ # We do a membership test to avoid using exceptions for flow control
+ # (e.g. catching KeyError).
+ if key in context:
+ return context[key]
+ elif type(context).__module__ != _BUILTIN_MODULE:
+ # Then we consider the argument an "object" for the purposes of
+ # the spec.
+ #
+ # The elif test above lets us avoid treating instances of built-in
+ # types like integers and strings as objects (cf. issue #81).
+ # Instances of user-defined classes on the other hand, for example,
+ # are considered objects by the test above.
+ try:
+ attr = getattr(context, key)
+ except AttributeError:
+ # TODO: distinguish the case of the attribute not existing from
+ # an AttributeError being raised by the call to the attribute.
+ # See the following issue for implementation ideas:
+ # http://bugs.python.org/issue7559
+ pass
+ else:
+ # TODO: consider using EAFP here instead.
+ # http://docs.python.org/glossary.html#term-eafp
+ if callable(attr):
+ return attr()
+ return attr
+
+ return _NOT_FOUND
+
+
+class KeyNotFoundError(PystacheError):
+
+ """
+ An exception raised when a key is not found in a context stack.
+
+ """
+
+ def __init__(self, key, details):
+ self.key = key
+ self.details = details
+
+ def __str__(self):
+ return "Key %s not found: %s" % (repr(self.key), self.details)
+
+
+class ContextStack(object):
+
+ """
+ Provides dictionary-like access to a stack of zero or more items.
+
+ Instances of this class are meant to act as the rendering context
+ when rendering Mustache templates in accordance with mustache(5)
+ and the Mustache spec.
+
+ Instances encapsulate a private stack of hashes, objects, and built-in
+ type instances. Querying the stack for the value of a key queries
+ the items in the stack in order from last-added objects to first
+ (last in, first out).
+
+ Caution: this class does not currently support recursive nesting in
+ that items in the stack cannot themselves be ContextStack instances.
+
+ See the docstrings of the methods of this class for more details.
+
+ """
+
+ # We reserve keyword arguments for future options (e.g. a "strict=True"
+ # option for enabling a strict mode).
+ def __init__(self, *items):
+ """
+ Construct an instance, and initialize the private stack.
+
+ The *items arguments are the items with which to populate the
+ initial stack. Items in the argument list are added to the
+ stack in order so that, in particular, items at the end of
+ the argument list are queried first when querying the stack.
+
+ Caution: items should not themselves be ContextStack instances, as
+ recursive nesting does not behave as one might expect.
+
+ """
+ self._stack = list(items)
+
+ def __repr__(self):
+ """
+ Return a string representation of the instance.
+
+ For example--
+
+ >>> context = ContextStack({'alpha': 'abc'}, {'numeric': 123})
+ >>> repr(context)
+ "ContextStack({'alpha': 'abc'}, {'numeric': 123})"
+
+ """
+ return "%s%s" % (self.__class__.__name__, tuple(self._stack))
+
+ @staticmethod
+ def create(*context, **kwargs):
+ """
+ Build a ContextStack instance from a sequence of context-like items.
+
+ This factory-style method is more general than the ContextStack class's
+ constructor in that, unlike the constructor, the argument list
+ can itself contain ContextStack instances.
+
+ Here is an example illustrating various aspects of this method:
+
+ >>> obj1 = {'animal': 'cat', 'vegetable': 'carrot', 'mineral': 'copper'}
+ >>> obj2 = ContextStack({'vegetable': 'spinach', 'mineral': 'silver'})
+ >>>
+ >>> context = ContextStack.create(obj1, None, obj2, mineral='gold')
+ >>>
+ >>> context.get('animal')
+ 'cat'
+ >>> context.get('vegetable')
+ 'spinach'
+ >>> context.get('mineral')
+ 'gold'
+
+ Arguments:
+
+ *context: zero or more dictionaries, ContextStack instances, or objects
+ with which to populate the initial context stack. None
+ arguments will be skipped. Items in the *context list are
+ added to the stack in order so that later items in the argument
+ list take precedence over earlier items. This behavior is the
+ same as the constructor's.
+
+ **kwargs: additional key-value data to add to the context stack.
+ As these arguments appear after all items in the *context list,
+ in the case of key conflicts these values take precedence over
+ all items in the *context list. This behavior is the same as
+ the constructor's.
+
+ """
+ items = context
+
+ context = ContextStack()
+
+ for item in items:
+ if item is None:
+ continue
+ if isinstance(item, ContextStack):
+ context._stack.extend(item._stack)
+ else:
+ context.push(item)
+
+ if kwargs:
+ context.push(kwargs)
+
+ return context
+
+ # TODO: add more unit tests for this.
+ # TODO: update the docstring for dotted names.
+ def get(self, name):
+ """
+ Resolve a dotted name against the current context stack.
+
+ This function follows the rules outlined in the section of the
+ spec regarding tag interpolation. This function returns the value
+ as is and does not coerce the return value to a string.
+
+ Arguments:
+
+ name: a dotted or non-dotted name.
+
+ default: the value to return if name resolution fails at any point.
+ Defaults to the empty string per the Mustache spec.
+
+ This method queries items in the stack in order from last-added
+ objects to first (last in, first out). The value returned is
+ the value of the key in the first item that contains the key.
+ If the key is not found in any item in the stack, then the default
+ value is returned. The default value defaults to None.
+
+ In accordance with the spec, this method queries items in the
+ stack for a key differently depending on whether the item is a
+ hash, object, or neither (as defined in the module docstring):
+
+ (1) Hash: if the item is a hash, then the key's value is the
+ dictionary value of the key. If the dictionary doesn't contain
+ the key, then the key is considered not found.
+
+ (2) Object: if the item is an an object, then the method looks for
+ an attribute with the same name as the key. If an attribute
+ with that name exists, the value of the attribute is returned.
+ If the attribute is callable, however (i.e. if the attribute
+ is a method), then the attribute is called with no arguments
+ and that value is returned. If there is no attribute with
+ the same name as the key, then the key is considered not found.
+
+ (3) Neither: if the item is neither a hash nor an object, then
+ the key is considered not found.
+
+ *Caution*:
+
+ Callables are handled differently depending on whether they are
+ dictionary values, as in (1) above, or attributes, as in (2).
+ The former are returned as-is, while the latter are first
+ called and that value returned.
+
+ Here is an example to illustrate:
+
+ >>> def greet():
+ ... return "Hi Bob!"
+ >>>
+ >>> class Greeter(object):
+ ... greet = None
+ >>>
+ >>> dct = {'greet': greet}
+ >>> obj = Greeter()
+ >>> obj.greet = greet
+ >>>
+ >>> dct['greet'] is obj.greet
+ True
+ >>> ContextStack(dct).get('greet') #doctest: +ELLIPSIS
+ <function greet at 0x...>
+ >>> ContextStack(obj).get('greet')
+ 'Hi Bob!'
+
+ TODO: explain the rationale for this difference in treatment.
+
+ """
+ if name == '.':
+ try:
+ return self.top()
+ except IndexError:
+ raise KeyNotFoundError(".", "empty context stack")
+
+ parts = name.split('.')
+
+ try:
+ result = self._get_simple(parts[0])
+ except KeyNotFoundError:
+ raise KeyNotFoundError(name, "first part")
+
+ for part in parts[1:]:
+ # The full context stack is not used to resolve the remaining parts.
+ # From the spec--
+ #
+ # 5) If any name parts were retained in step 1, each should be
+ # resolved against a context stack containing only the result
+ # from the former resolution. If any part fails resolution, the
+ # result should be considered falsey, and should interpolate as
+ # the empty string.
+ #
+ # TODO: make sure we have a test case for the above point.
+ result = _get_value(result, part)
+ # TODO: consider using EAFP here instead.
+ # http://docs.python.org/glossary.html#term-eafp
+ if result is _NOT_FOUND:
+ raise KeyNotFoundError(name, "missing %s" % repr(part))
+
+ return result
+
+ def _get_simple(self, name):
+ """
+ Query the stack for a non-dotted name.
+
+ """
+ for item in reversed(self._stack):
+ result = _get_value(item, name)
+ if result is not _NOT_FOUND:
+ return result
+
+ raise KeyNotFoundError(name, "part missing")
+
+ def push(self, item):
+ """
+ Push an item onto the stack.
+
+ """
+ self._stack.append(item)
+
+ def pop(self):
+ """
+ Pop an item off of the stack, and return it.
+
+ """
+ return self._stack.pop()
+
+ def top(self):
+ """
+ Return the item last added to the stack.
+
+ """
+ return self._stack[-1]
+
+ def copy(self):
+ """
+ Return a copy of this instance.
+
+ """
+ return ContextStack(*self._stack)
diff --git a/third_party/python/pystache/pystache/defaults.py b/third_party/python/pystache/pystache/defaults.py
new file mode 100644
index 0000000000..bcfdf4cd3a
--- /dev/null
+++ b/third_party/python/pystache/pystache/defaults.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+
+"""
+This module provides a central location for defining default behavior.
+
+Throughout the package, these defaults take effect only when the user
+does not otherwise specify a value.
+
+"""
+
+try:
+ # Python 3.2 adds html.escape() and deprecates cgi.escape().
+ from html import escape
+except ImportError:
+ from cgi import escape
+
+import os
+import sys
+
+from pystache.common import MissingTags
+
+
+# How to handle encoding errors when decoding strings from str to unicode.
+#
+# This value is passed as the "errors" argument to Python's built-in
+# unicode() function:
+#
+# http://docs.python.org/library/functions.html#unicode
+#
+DECODE_ERRORS = 'strict'
+
+# The name of the encoding to use when converting to unicode any strings of
+# type str encountered during the rendering process.
+STRING_ENCODING = sys.getdefaultencoding()
+
+# The name of the encoding to use when converting file contents to unicode.
+# This default takes precedence over the STRING_ENCODING default for
+# strings that arise from files.
+FILE_ENCODING = sys.getdefaultencoding()
+
+# The delimiters to start with when parsing.
+DELIMITERS = (u'{{', u'}}')
+
+# How to handle missing tags when rendering a template.
+MISSING_TAGS = MissingTags.ignore
+
+# The starting list of directories in which to search for templates when
+# loading a template by file name.
+SEARCH_DIRS = [os.curdir] # i.e. ['.']
+
+# The escape function to apply to strings that require escaping when
+# rendering templates (e.g. for tags enclosed in double braces).
+# Only unicode strings will be passed to this function.
+#
+# The quote=True argument causes double but not single quotes to be escaped
+# in Python 3.1 and earlier, and both double and single quotes to be
+# escaped in Python 3.2 and later:
+#
+# http://docs.python.org/library/cgi.html#cgi.escape
+# http://docs.python.org/dev/library/html.html#html.escape
+#
+TAG_ESCAPE = lambda u: escape(u, quote=True)
+
+# The default template extension, without the leading dot.
+TEMPLATE_EXTENSION = 'mustache'
diff --git a/third_party/python/pystache/pystache/init.py b/third_party/python/pystache/pystache/init.py
new file mode 100644
index 0000000000..38bb1f5a0e
--- /dev/null
+++ b/third_party/python/pystache/pystache/init.py
@@ -0,0 +1,19 @@
+# encoding: utf-8
+
+"""
+This module contains the initialization logic called by __init__.py.
+
+"""
+
+from pystache.parser import parse
+from pystache.renderer import Renderer
+from pystache.template_spec import TemplateSpec
+
+
+def render(template, context=None, **kwargs):
+ """
+ Return the given template string rendered using the given context.
+
+ """
+ renderer = Renderer()
+ return renderer.render(template, context, **kwargs)
diff --git a/third_party/python/pystache/pystache/loader.py b/third_party/python/pystache/pystache/loader.py
new file mode 100644
index 0000000000..d4a7e5310f
--- /dev/null
+++ b/third_party/python/pystache/pystache/loader.py
@@ -0,0 +1,170 @@
+# coding: utf-8
+
+"""
+This module provides a Loader class for locating and reading templates.
+
+"""
+
+import os
+import sys
+
+from pystache import common
+from pystache import defaults
+from pystache.locator import Locator
+
+
+# We make a function so that the current defaults take effect.
+# TODO: revisit whether this is necessary.
+
+def _make_to_unicode():
+ def to_unicode(s, encoding=None):
+ """
+ Raises a TypeError exception if the given string is already unicode.
+
+ """
+ if encoding is None:
+ encoding = defaults.STRING_ENCODING
+ return unicode(s, encoding, defaults.DECODE_ERRORS)
+ return to_unicode
+
+
+class Loader(object):
+
+ """
+ Loads the template associated to a name or user-defined object.
+
+ All load_*() methods return the template as a unicode string.
+
+ """
+
+ def __init__(self, file_encoding=None, extension=None, to_unicode=None,
+ search_dirs=None):
+ """
+ Construct a template loader instance.
+
+ Arguments:
+
+ extension: the template file extension, without the leading dot.
+ Pass False for no extension (e.g. to use extensionless template
+ files). Defaults to the package default.
+
+ file_encoding: the name of the encoding to use when converting file
+ contents to unicode. Defaults to the package default.
+
+ search_dirs: the list of directories in which to search when loading
+ a template by name or file name. Defaults to the package default.
+
+ to_unicode: the function to use when converting strings of type
+ str to unicode. The function should have the signature:
+
+ to_unicode(s, encoding=None)
+
+ It should accept a string of type str and an optional encoding
+ name and return a string of type unicode. Defaults to calling
+ Python's built-in function unicode() using the package string
+ encoding and decode errors defaults.
+
+ """
+ if extension is None:
+ extension = defaults.TEMPLATE_EXTENSION
+
+ if file_encoding is None:
+ file_encoding = defaults.FILE_ENCODING
+
+ if search_dirs is None:
+ search_dirs = defaults.SEARCH_DIRS
+
+ if to_unicode is None:
+ to_unicode = _make_to_unicode()
+
+ self.extension = extension
+ self.file_encoding = file_encoding
+ # TODO: unit test setting this attribute.
+ self.search_dirs = search_dirs
+ self.to_unicode = to_unicode
+
+ def _make_locator(self):
+ return Locator(extension=self.extension)
+
+ def unicode(self, s, encoding=None):
+ """
+ Convert a string to unicode using the given encoding, and return it.
+
+ This function uses the underlying to_unicode attribute.
+
+ Arguments:
+
+ s: a basestring instance to convert to unicode. Unlike Python's
+ built-in unicode() function, it is okay to pass unicode strings
+ to this function. (Passing a unicode string to Python's unicode()
+ with the encoding argument throws the error, "TypeError: decoding
+ Unicode is not supported.")
+
+ encoding: the encoding to pass to the to_unicode attribute.
+ Defaults to None.
+
+ """
+ if isinstance(s, unicode):
+ return unicode(s)
+
+ return self.to_unicode(s, encoding)
+
+ def read(self, path, encoding=None):
+ """
+ Read the template at the given path, and return it as a unicode string.
+
+ """
+ b = common.read(path)
+
+ if encoding is None:
+ encoding = self.file_encoding
+
+ return self.unicode(b, encoding)
+
+ def load_file(self, file_name):
+ """
+ Find and return the template with the given file name.
+
+ Arguments:
+
+ file_name: the file name of the template.
+
+ """
+ locator = self._make_locator()
+
+ path = locator.find_file(file_name, self.search_dirs)
+
+ return self.read(path)
+
+ def load_name(self, name):
+ """
+ Find and return the template with the given template name.
+
+ Arguments:
+
+ name: the name of the template.
+
+ """
+ locator = self._make_locator()
+
+ path = locator.find_name(name, self.search_dirs)
+
+ return self.read(path)
+
+ # TODO: unit-test this method.
+ def load_object(self, obj):
+ """
+ Find and return the template associated to the given object.
+
+ Arguments:
+
+ obj: an instance of a user-defined class.
+
+ search_dirs: the list of directories in which to search.
+
+ """
+ locator = self._make_locator()
+
+ path = locator.find_object(obj, self.search_dirs)
+
+ return self.read(path)
diff --git a/third_party/python/pystache/pystache/locator.py b/third_party/python/pystache/pystache/locator.py
new file mode 100644
index 0000000000..30c5b01e01
--- /dev/null
+++ b/third_party/python/pystache/pystache/locator.py
@@ -0,0 +1,171 @@
+# coding: utf-8
+
+"""
+This module provides a Locator class for finding template files.
+
+"""
+
+import os
+import re
+import sys
+
+from pystache.common import TemplateNotFoundError
+from pystache import defaults
+
+
+class Locator(object):
+
+ def __init__(self, extension=None):
+ """
+ Construct a template locator.
+
+ Arguments:
+
+ extension: the template file extension, without the leading dot.
+ Pass False for no extension (e.g. to use extensionless template
+ files). Defaults to the package default.
+
+ """
+ if extension is None:
+ extension = defaults.TEMPLATE_EXTENSION
+
+ self.template_extension = extension
+
+ def get_object_directory(self, obj):
+ """
+ Return the directory containing an object's defining class.
+
+ Returns None if there is no such directory, for example if the
+ class was defined in an interactive Python session, or in a
+ doctest that appears in a text file (rather than a Python file).
+
+ """
+ if not hasattr(obj, '__module__'):
+ return None
+
+ module = sys.modules[obj.__module__]
+
+ if not hasattr(module, '__file__'):
+ # TODO: add a unit test for this case.
+ return None
+
+ path = module.__file__
+
+ return os.path.dirname(path)
+
+ def make_template_name(self, obj):
+ """
+ Return the canonical template name for an object instance.
+
+ This method converts Python-style class names (PEP 8's recommended
+ CamelCase, aka CapWords) to lower_case_with_underscords. Here
+ is an example with code:
+
+ >>> class HelloWorld(object):
+ ... pass
+ >>> hi = HelloWorld()
+ >>>
+ >>> locator = Locator()
+ >>> locator.make_template_name(hi)
+ 'hello_world'
+
+ """
+ template_name = obj.__class__.__name__
+
+ def repl(match):
+ return '_' + match.group(0).lower()
+
+ return re.sub('[A-Z]', repl, template_name)[1:]
+
+ def make_file_name(self, template_name, template_extension=None):
+ """
+ Generate and return the file name for the given template name.
+
+ Arguments:
+
+ template_extension: defaults to the instance's extension.
+
+ """
+ file_name = template_name
+
+ if template_extension is None:
+ template_extension = self.template_extension
+
+ if template_extension is not False:
+ file_name += os.path.extsep + template_extension
+
+ return file_name
+
+ def _find_path(self, search_dirs, file_name):
+ """
+ Search for the given file, and return the path.
+
+ Returns None if the file is not found.
+
+ """
+ for dir_path in search_dirs:
+ file_path = os.path.join(dir_path, file_name)
+ if os.path.exists(file_path):
+ return file_path
+
+ return None
+
+ def _find_path_required(self, search_dirs, file_name):
+ """
+ Return the path to a template with the given file name.
+
+ """
+ path = self._find_path(search_dirs, file_name)
+
+ if path is None:
+ raise TemplateNotFoundError('File %s not found in dirs: %s' %
+ (repr(file_name), repr(search_dirs)))
+
+ return path
+
+ def find_file(self, file_name, search_dirs):
+ """
+ Return the path to a template with the given file name.
+
+ Arguments:
+
+ file_name: the file name of the template.
+
+ search_dirs: the list of directories in which to search.
+
+ """
+ return self._find_path_required(search_dirs, file_name)
+
+ def find_name(self, template_name, search_dirs):
+ """
+ Return the path to a template with the given name.
+
+ Arguments:
+
+ template_name: the name of the template.
+
+ search_dirs: the list of directories in which to search.
+
+ """
+ file_name = self.make_file_name(template_name)
+
+ return self._find_path_required(search_dirs, file_name)
+
+ def find_object(self, obj, search_dirs, file_name=None):
+ """
+ Return the path to a template associated with the given object.
+
+ """
+ if file_name is None:
+ # TODO: should we define a make_file_name() method?
+ template_name = self.make_template_name(obj)
+ file_name = self.make_file_name(template_name)
+
+ dir_path = self.get_object_directory(obj)
+
+ if dir_path is not None:
+ search_dirs = [dir_path] + search_dirs
+
+ path = self._find_path_required(search_dirs, file_name)
+
+ return path
diff --git a/third_party/python/pystache/pystache/parsed.py b/third_party/python/pystache/pystache/parsed.py
new file mode 100644
index 0000000000..372d96c666
--- /dev/null
+++ b/third_party/python/pystache/pystache/parsed.py
@@ -0,0 +1,50 @@
+# coding: utf-8
+
+"""
+Exposes a class that represents a parsed (or compiled) template.
+
+"""
+
+
+class ParsedTemplate(object):
+
+ """
+ Represents a parsed or compiled template.
+
+ An instance wraps a list of unicode strings and node objects. A node
+ object must have a `render(engine, stack)` method that accepts a
+ RenderEngine instance and a ContextStack instance and returns a unicode
+ string.
+
+ """
+
+ def __init__(self):
+ self._parse_tree = []
+
+ def __repr__(self):
+ return repr(self._parse_tree)
+
+ def add(self, node):
+ """
+ Arguments:
+
+ node: a unicode string or node object instance. See the class
+ docstring for information.
+
+ """
+ self._parse_tree.append(node)
+
+ def render(self, engine, context):
+ """
+ Returns: a string of type unicode.
+
+ """
+ # We avoid use of the ternary operator for Python 2.4 support.
+ def get_unicode(node):
+ if type(node) is unicode:
+ return node
+ return node.render(engine, context)
+ parts = map(get_unicode, self._parse_tree)
+ s = ''.join(parts)
+
+ return unicode(s)
diff --git a/third_party/python/pystache/pystache/parser.py b/third_party/python/pystache/pystache/parser.py
new file mode 100644
index 0000000000..9a4fba235b
--- /dev/null
+++ b/third_party/python/pystache/pystache/parser.py
@@ -0,0 +1,378 @@
+# coding: utf-8
+
+"""
+Exposes a parse() function to parse template strings.
+
+"""
+
+import re
+
+from pystache import defaults
+from pystache.parsed import ParsedTemplate
+
+
+END_OF_LINE_CHARACTERS = [u'\r', u'\n']
+NON_BLANK_RE = re.compile(ur'^(.)', re.M)
+
+
+# TODO: add some unit tests for this.
+# TODO: add a test case that checks for spurious spaces.
+# TODO: add test cases for delimiters.
+def parse(template, delimiters=None):
+ """
+ Parse a unicode template string and return a ParsedTemplate instance.
+
+ Arguments:
+
+ template: a unicode template string.
+
+ delimiters: a 2-tuple of delimiters. Defaults to the package default.
+
+ Examples:
+
+ >>> parsed = parse(u"Hey {{#who}}{{name}}!{{/who}}")
+ >>> print str(parsed).replace('u', '') # This is a hack to get the test to pass both in Python 2 and 3.
+ ['Hey ', _SectionNode(key='who', index_begin=12, index_end=21, parsed=[_EscapeNode(key='name'), '!'])]
+
+ """
+ if type(template) is not unicode:
+ raise Exception("Template is not unicode: %s" % type(template))
+ parser = _Parser(delimiters)
+ return parser.parse(template)
+
+
+def _compile_template_re(delimiters):
+ """
+ Return a regular expression object (re.RegexObject) instance.
+
+ """
+ # The possible tag type characters following the opening tag,
+ # excluding "=" and "{".
+ tag_types = "!>&/#^"
+
+ # TODO: are we following this in the spec?
+ #
+ # The tag's content MUST be a non-whitespace character sequence
+ # NOT containing the current closing delimiter.
+ #
+ tag = r"""
+ (?P<whitespace>[\ \t]*)
+ %(otag)s \s*
+ (?:
+ (?P<change>=) \s* (?P<delims>.+?) \s* = |
+ (?P<raw>{) \s* (?P<raw_name>.+?) \s* } |
+ (?P<tag>[%(tag_types)s]?) \s* (?P<tag_key>[\s\S]+?)
+ )
+ \s* %(ctag)s
+ """ % {'tag_types': tag_types, 'otag': re.escape(delimiters[0]), 'ctag': re.escape(delimiters[1])}
+
+ return re.compile(tag, re.VERBOSE)
+
+
+class ParsingError(Exception):
+
+ pass
+
+
+## Node types
+
+def _format(obj, exclude=None):
+ if exclude is None:
+ exclude = []
+ exclude.append('key')
+ attrs = obj.__dict__
+ names = list(set(attrs.keys()) - set(exclude))
+ names.sort()
+ names.insert(0, 'key')
+ args = ["%s=%s" % (name, repr(attrs[name])) for name in names]
+ return "%s(%s)" % (obj.__class__.__name__, ", ".join(args))
+
+
+class _CommentNode(object):
+
+ def __repr__(self):
+ return _format(self)
+
+ def render(self, engine, context):
+ return u''
+
+
+class _ChangeNode(object):
+
+ def __init__(self, delimiters):
+ self.delimiters = delimiters
+
+ def __repr__(self):
+ return _format(self)
+
+ def render(self, engine, context):
+ return u''
+
+
+class _EscapeNode(object):
+
+ def __init__(self, key):
+ self.key = key
+
+ def __repr__(self):
+ return _format(self)
+
+ def render(self, engine, context):
+ s = engine.fetch_string(context, self.key)
+ return engine.escape(s)
+
+
+class _LiteralNode(object):
+
+ def __init__(self, key):
+ self.key = key
+
+ def __repr__(self):
+ return _format(self)
+
+ def render(self, engine, context):
+ s = engine.fetch_string(context, self.key)
+ return engine.literal(s)
+
+
+class _PartialNode(object):
+
+ def __init__(self, key, indent):
+ self.key = key
+ self.indent = indent
+
+ def __repr__(self):
+ return _format(self)
+
+ def render(self, engine, context):
+ template = engine.resolve_partial(self.key)
+ # Indent before rendering.
+ template = re.sub(NON_BLANK_RE, self.indent + ur'\1', template)
+
+ return engine.render(template, context)
+
+
+class _InvertedNode(object):
+
+ def __init__(self, key, parsed_section):
+ self.key = key
+ self.parsed_section = parsed_section
+
+ def __repr__(self):
+ return _format(self)
+
+ def render(self, engine, context):
+ # TODO: is there a bug because we are not using the same
+ # logic as in fetch_string()?
+ data = engine.resolve_context(context, self.key)
+ # Note that lambdas are considered truthy for inverted sections
+ # per the spec.
+ if data:
+ return u''
+ return self.parsed_section.render(engine, context)
+
+
+class _SectionNode(object):
+
+ # TODO: the template_ and parsed_template_ arguments don't both seem
+ # to be necessary. Can we remove one of them? For example, if
+ # callable(data) is True, then the initial parsed_template isn't used.
+ def __init__(self, key, parsed, delimiters, template, index_begin, index_end):
+ self.delimiters = delimiters
+ self.key = key
+ self.parsed = parsed
+ self.template = template
+ self.index_begin = index_begin
+ self.index_end = index_end
+
+ def __repr__(self):
+ return _format(self, exclude=['delimiters', 'template'])
+
+ def render(self, engine, context):
+ values = engine.fetch_section_data(context, self.key)
+
+ parts = []
+ for val in values:
+ if callable(val):
+ # Lambdas special case section rendering and bypass pushing
+ # the data value onto the context stack. From the spec--
+ #
+ # When used as the data value for a Section tag, the
+ # lambda MUST be treatable as an arity 1 function, and
+ # invoked as such (passing a String containing the
+ # unprocessed section contents). The returned value
+ # MUST be rendered against the current delimiters, then
+ # interpolated in place of the section.
+ #
+ # Also see--
+ #
+ # https://github.com/defunkt/pystache/issues/113
+ #
+ # TODO: should we check the arity?
+ val = val(self.template[self.index_begin:self.index_end])
+ val = engine._render_value(val, context, delimiters=self.delimiters)
+ parts.append(val)
+ continue
+
+ context.push(val)
+ parts.append(self.parsed.render(engine, context))
+ context.pop()
+
+ return unicode(''.join(parts))
+
+
+class _Parser(object):
+
+ _delimiters = None
+ _template_re = None
+
+ def __init__(self, delimiters=None):
+ if delimiters is None:
+ delimiters = defaults.DELIMITERS
+
+ self._delimiters = delimiters
+
+ def _compile_delimiters(self):
+ self._template_re = _compile_template_re(self._delimiters)
+
+ def _change_delimiters(self, delimiters):
+ self._delimiters = delimiters
+ self._compile_delimiters()
+
+ def parse(self, template):
+ """
+ Parse a template string starting at some index.
+
+ This method uses the current tag delimiter.
+
+ Arguments:
+
+ template: a unicode string that is the template to parse.
+
+ index: the index at which to start parsing.
+
+ Returns:
+
+ a ParsedTemplate instance.
+
+ """
+ self._compile_delimiters()
+
+ start_index = 0
+ content_end_index, parsed_section, section_key = None, None, None
+ parsed_template = ParsedTemplate()
+
+ states = []
+
+ while True:
+ match = self._template_re.search(template, start_index)
+
+ if match is None:
+ break
+
+ match_index = match.start()
+ end_index = match.end()
+
+ matches = match.groupdict()
+
+ # Normalize the matches dictionary.
+ if matches['change'] is not None:
+ matches.update(tag='=', tag_key=matches['delims'])
+ elif matches['raw'] is not None:
+ matches.update(tag='&', tag_key=matches['raw_name'])
+
+ tag_type = matches['tag']
+ tag_key = matches['tag_key']
+ leading_whitespace = matches['whitespace']
+
+ # Standalone (non-interpolation) tags consume the entire line,
+ # both leading whitespace and trailing newline.
+ did_tag_begin_line = match_index == 0 or template[match_index - 1] in END_OF_LINE_CHARACTERS
+ did_tag_end_line = end_index == len(template) or template[end_index] in END_OF_LINE_CHARACTERS
+ is_tag_interpolating = tag_type in ['', '&']
+
+ if did_tag_begin_line and did_tag_end_line and not is_tag_interpolating:
+ if end_index < len(template):
+ end_index += template[end_index] == '\r' and 1 or 0
+ if end_index < len(template):
+ end_index += template[end_index] == '\n' and 1 or 0
+ elif leading_whitespace:
+ match_index += len(leading_whitespace)
+ leading_whitespace = ''
+
+ # Avoid adding spurious empty strings to the parse tree.
+ if start_index != match_index:
+ parsed_template.add(template[start_index:match_index])
+
+ start_index = end_index
+
+ if tag_type in ('#', '^'):
+ # Cache current state.
+ state = (tag_type, end_index, section_key, parsed_template)
+ states.append(state)
+
+ # Initialize new state
+ section_key, parsed_template = tag_key, ParsedTemplate()
+ continue
+
+ if tag_type == '/':
+ if tag_key != section_key:
+ raise ParsingError("Section end tag mismatch: %s != %s" % (tag_key, section_key))
+
+ # Restore previous state with newly found section data.
+ parsed_section = parsed_template
+
+ (tag_type, section_start_index, section_key, parsed_template) = states.pop()
+ node = self._make_section_node(template, tag_type, tag_key, parsed_section,
+ section_start_index, match_index)
+
+ else:
+ node = self._make_interpolation_node(tag_type, tag_key, leading_whitespace)
+
+ parsed_template.add(node)
+
+ # Avoid adding spurious empty strings to the parse tree.
+ if start_index != len(template):
+ parsed_template.add(template[start_index:])
+
+ return parsed_template
+
+ def _make_interpolation_node(self, tag_type, tag_key, leading_whitespace):
+ """
+ Create and return a non-section node for the parse tree.
+
+ """
+ # TODO: switch to using a dictionary instead of a bunch of ifs and elifs.
+ if tag_type == '!':
+ return _CommentNode()
+
+ if tag_type == '=':
+ delimiters = tag_key.split()
+ self._change_delimiters(delimiters)
+ return _ChangeNode(delimiters)
+
+ if tag_type == '':
+ return _EscapeNode(tag_key)
+
+ if tag_type == '&':
+ return _LiteralNode(tag_key)
+
+ if tag_type == '>':
+ return _PartialNode(tag_key, leading_whitespace)
+
+ raise Exception("Invalid symbol for interpolation tag: %s" % repr(tag_type))
+
+ def _make_section_node(self, template, tag_type, tag_key, parsed_section,
+ section_start_index, section_end_index):
+ """
+ Create and return a section node for the parse tree.
+
+ """
+ if tag_type == '#':
+ return _SectionNode(tag_key, parsed_section, self._delimiters,
+ template, section_start_index, section_end_index)
+
+ if tag_type == '^':
+ return _InvertedNode(tag_key, parsed_section)
+
+ raise Exception("Invalid symbol for section tag: %s" % repr(tag_type))
diff --git a/third_party/python/pystache/pystache/renderengine.py b/third_party/python/pystache/pystache/renderengine.py
new file mode 100644
index 0000000000..c797b1765a
--- /dev/null
+++ b/third_party/python/pystache/pystache/renderengine.py
@@ -0,0 +1,181 @@
+# coding: utf-8
+
+"""
+Defines a class responsible for rendering logic.
+
+"""
+
+import re
+
+from pystache.common import is_string
+from pystache.parser import parse
+
+
+def context_get(stack, name):
+ """
+ Find and return a name from a ContextStack instance.
+
+ """
+ return stack.get(name)
+
+
+class RenderEngine(object):
+
+ """
+ Provides a render() method.
+
+ This class is meant only for internal use.
+
+ As a rule, the code in this class operates on unicode strings where
+ possible rather than, say, strings of type str or markupsafe.Markup.
+ This means that strings obtained from "external" sources like partials
+ and variable tag values are immediately converted to unicode (or
+ escaped and converted to unicode) before being operated on further.
+ This makes maintaining, reasoning about, and testing the correctness
+ of the code much simpler. In particular, it keeps the implementation
+ of this class independent of the API details of one (or possibly more)
+ unicode subclasses (e.g. markupsafe.Markup).
+
+ """
+
+ # TODO: it would probably be better for the constructor to accept
+ # and set as an attribute a single RenderResolver instance
+ # that encapsulates the customizable aspects of converting
+ # strings and resolving partials and names from context.
+ def __init__(self, literal=None, escape=None, resolve_context=None,
+ resolve_partial=None, to_str=None):
+ """
+ Arguments:
+
+ literal: the function used to convert unescaped variable tag
+ values to unicode, e.g. the value corresponding to a tag
+ "{{{name}}}". The function should accept a string of type
+ str or unicode (or a subclass) and return a string of type
+ unicode (but not a proper subclass of unicode).
+ This class will only pass basestring instances to this
+ function. For example, it will call str() on integer variable
+ values prior to passing them to this function.
+
+ escape: the function used to escape and convert variable tag
+ values to unicode, e.g. the value corresponding to a tag
+ "{{name}}". The function should obey the same properties
+ described above for the "literal" function argument.
+ This function should take care to convert any str
+ arguments to unicode just as the literal function should, as
+ this class will not pass tag values to literal prior to passing
+ them to this function. This allows for more flexibility,
+ for example using a custom escape function that handles
+ incoming strings of type markupsafe.Markup differently
+ from plain unicode strings.
+
+ resolve_context: the function to call to resolve a name against
+ a context stack. The function should accept two positional
+ arguments: a ContextStack instance and a name to resolve.
+
+ resolve_partial: the function to call when loading a partial.
+ The function should accept a template name string and return a
+ template string of type unicode (not a subclass).
+
+ to_str: a function that accepts an object and returns a string (e.g.
+ the built-in function str). This function is used for string
+ coercion whenever a string is required (e.g. for converting None
+ or 0 to a string).
+
+ """
+ self.escape = escape
+ self.literal = literal
+ self.resolve_context = resolve_context
+ self.resolve_partial = resolve_partial
+ self.to_str = to_str
+
+ # TODO: Rename context to stack throughout this module.
+
+ # From the spec:
+ #
+ # When used as the data value for an Interpolation tag, the lambda
+ # MUST be treatable as an arity 0 function, and invoked as such.
+ # The returned value MUST be rendered against the default delimiters,
+ # then interpolated in place of the lambda.
+ #
+ def fetch_string(self, context, name):
+ """
+ Get a value from the given context as a basestring instance.
+
+ """
+ val = self.resolve_context(context, name)
+
+ if callable(val):
+ # Return because _render_value() is already a string.
+ return self._render_value(val(), context)
+
+ if not is_string(val):
+ return self.to_str(val)
+
+ return val
+
+ def fetch_section_data(self, context, name):
+ """
+ Fetch the value of a section as a list.
+
+ """
+ data = self.resolve_context(context, name)
+
+ # From the spec:
+ #
+ # If the data is not of a list type, it is coerced into a list
+ # as follows: if the data is truthy (e.g. `!!data == true`),
+ # use a single-element list containing the data, otherwise use
+ # an empty list.
+ #
+ if not data:
+ data = []
+ else:
+ # The least brittle way to determine whether something
+ # supports iteration is by trying to call iter() on it:
+ #
+ # http://docs.python.org/library/functions.html#iter
+ #
+ # It is not sufficient, for example, to check whether the item
+ # implements __iter__ () (the iteration protocol). There is
+ # also __getitem__() (the sequence protocol). In Python 2,
+ # strings do not implement __iter__(), but in Python 3 they do.
+ try:
+ iter(data)
+ except TypeError:
+ # Then the value does not support iteration.
+ data = [data]
+ else:
+ if is_string(data) or isinstance(data, dict):
+ # Do not treat strings and dicts (which are iterable) as lists.
+ data = [data]
+ # Otherwise, treat the value as a list.
+
+ return data
+
+ def _render_value(self, val, context, delimiters=None):
+ """
+ Render an arbitrary value.
+
+ """
+ if not is_string(val):
+ # In case the template is an integer, for example.
+ val = self.to_str(val)
+ if type(val) is not unicode:
+ val = self.literal(val)
+ return self.render(val, context, delimiters)
+
+ def render(self, template, context_stack, delimiters=None):
+ """
+ Render a unicode template string, and return as unicode.
+
+ Arguments:
+
+ template: a template string of type unicode (but not a proper
+ subclass of unicode).
+
+ context_stack: a ContextStack instance.
+
+ """
+ parsed_template = parse(template, delimiters)
+
+ return parsed_template.render(self, context_stack)
diff --git a/third_party/python/pystache/pystache/renderer.py b/third_party/python/pystache/pystache/renderer.py
new file mode 100644
index 0000000000..ff6a90c64b
--- /dev/null
+++ b/third_party/python/pystache/pystache/renderer.py
@@ -0,0 +1,460 @@
+# coding: utf-8
+
+"""
+This module provides a Renderer class to render templates.
+
+"""
+
+import sys
+
+from pystache import defaults
+from pystache.common import TemplateNotFoundError, MissingTags, is_string
+from pystache.context import ContextStack, KeyNotFoundError
+from pystache.loader import Loader
+from pystache.parsed import ParsedTemplate
+from pystache.renderengine import context_get, RenderEngine
+from pystache.specloader import SpecLoader
+from pystache.template_spec import TemplateSpec
+
+
+class Renderer(object):
+
+ """
+ A class for rendering mustache templates.
+
+ This class supports several rendering options which are described in
+ the constructor's docstring. Other behavior can be customized by
+ subclassing this class.
+
+ For example, one can pass a string-string dictionary to the constructor
+ to bypass loading partials from the file system:
+
+ >>> partials = {'partial': 'Hello, {{thing}}!'}
+ >>> renderer = Renderer(partials=partials)
+ >>> # We apply print to make the test work in Python 3 after 2to3.
+ >>> print renderer.render('{{>partial}}', {'thing': 'world'})
+ Hello, world!
+
+ To customize string coercion (e.g. to render False values as ''), one can
+ subclass this class. For example:
+
+ class MyRenderer(Renderer):
+ def str_coerce(self, val):
+ if not val:
+ return ''
+ else:
+ return str(val)
+
+ """
+
+ def __init__(self, file_encoding=None, string_encoding=None,
+ decode_errors=None, search_dirs=None, file_extension=None,
+ escape=None, partials=None, missing_tags=None):
+ """
+ Construct an instance.
+
+ Arguments:
+
+ file_encoding: the name of the encoding to use by default when
+ reading template files. All templates are converted to unicode
+ prior to parsing. Defaults to the package default.
+
+ string_encoding: the name of the encoding to use when converting
+ to unicode any byte strings (type str in Python 2) encountered
+ during the rendering process. This name will be passed as the
+ encoding argument to the built-in function unicode().
+ Defaults to the package default.
+
+ decode_errors: the string to pass as the errors argument to the
+ built-in function unicode() when converting byte strings to
+ unicode. Defaults to the package default.
+
+ search_dirs: the list of directories in which to search when
+ loading a template by name or file name. If given a string,
+ the method interprets the string as a single directory.
+ Defaults to the package default.
+
+ file_extension: the template file extension. Pass False for no
+ extension (i.e. to use extensionless template files).
+ Defaults to the package default.
+
+ partials: an object (e.g. a dictionary) for custom partial loading
+ during the rendering process.
+ The object should have a get() method that accepts a string
+ and returns the corresponding template as a string, preferably
+ as a unicode string. If there is no template with that name,
+ the get() method should either return None (as dict.get() does)
+ or raise an exception.
+ If this argument is None, the rendering process will use
+ the normal procedure of locating and reading templates from
+ the file system -- using relevant instance attributes like
+ search_dirs, file_encoding, etc.
+
+ escape: the function used to escape variable tag values when
+ rendering a template. The function should accept a unicode
+ string (or subclass of unicode) and return an escaped string
+ that is again unicode (or a subclass of unicode).
+ This function need not handle strings of type `str` because
+ this class will only pass it unicode strings. The constructor
+ assigns this function to the constructed instance's escape()
+ method.
+ To disable escaping entirely, one can pass `lambda u: u`
+ as the escape function, for example. One may also wish to
+ consider using markupsafe's escape function: markupsafe.escape().
+ This argument defaults to the package default.
+
+ missing_tags: a string specifying how to handle missing tags.
+ If 'strict', an error is raised on a missing tag. If 'ignore',
+ the value of the tag is the empty string. Defaults to the
+ package default.
+
+ """
+ if decode_errors is None:
+ decode_errors = defaults.DECODE_ERRORS
+
+ if escape is None:
+ escape = defaults.TAG_ESCAPE
+
+ if file_encoding is None:
+ file_encoding = defaults.FILE_ENCODING
+
+ if file_extension is None:
+ file_extension = defaults.TEMPLATE_EXTENSION
+
+ if missing_tags is None:
+ missing_tags = defaults.MISSING_TAGS
+
+ if search_dirs is None:
+ search_dirs = defaults.SEARCH_DIRS
+
+ if string_encoding is None:
+ string_encoding = defaults.STRING_ENCODING
+
+ if isinstance(search_dirs, basestring):
+ search_dirs = [search_dirs]
+
+ self._context = None
+ self.decode_errors = decode_errors
+ self.escape = escape
+ self.file_encoding = file_encoding
+ self.file_extension = file_extension
+ self.missing_tags = missing_tags
+ self.partials = partials
+ self.search_dirs = search_dirs
+ self.string_encoding = string_encoding
+
+ # This is an experimental way of giving views access to the current context.
+ # TODO: consider another approach of not giving access via a property,
+ # but instead letting the caller pass the initial context to the
+ # main render() method by reference. This approach would probably
+ # be less likely to be misused.
+ @property
+ def context(self):
+ """
+ Return the current rendering context [experimental].
+
+ """
+ return self._context
+
+ # We could not choose str() as the name because 2to3 renames the unicode()
+ # method of this class to str().
+ def str_coerce(self, val):
+ """
+ Coerce a non-string value to a string.
+
+ This method is called whenever a non-string is encountered during the
+ rendering process when a string is needed (e.g. if a context value
+ for string interpolation is not a string). To customize string
+ coercion, you can override this method.
+
+ """
+ return str(val)
+
+ def _to_unicode_soft(self, s):
+ """
+ Convert a basestring to unicode, preserving any unicode subclass.
+
+ """
+ # We type-check to avoid "TypeError: decoding Unicode is not supported".
+ # We avoid the Python ternary operator for Python 2.4 support.
+ if isinstance(s, unicode):
+ return s
+ return self.unicode(s)
+
+ def _to_unicode_hard(self, s):
+ """
+ Convert a basestring to a string with type unicode (not subclass).
+
+ """
+ return unicode(self._to_unicode_soft(s))
+
+ def _escape_to_unicode(self, s):
+ """
+ Convert a basestring to unicode (preserving any unicode subclass), and escape it.
+
+ Returns a unicode string (not subclass).
+
+ """
+ return unicode(self.escape(self._to_unicode_soft(s)))
+
+ def unicode(self, b, encoding=None):
+ """
+ Convert a byte string to unicode, using string_encoding and decode_errors.
+
+ Arguments:
+
+ b: a byte string.
+
+ encoding: the name of an encoding. Defaults to the string_encoding
+ attribute for this instance.
+
+ Raises:
+
+ TypeError: Because this method calls Python's built-in unicode()
+ function, this method raises the following exception if the
+ given string is already unicode:
+
+ TypeError: decoding Unicode is not supported
+
+ """
+ if encoding is None:
+ encoding = self.string_encoding
+
+ # TODO: Wrap UnicodeDecodeErrors with a message about setting
+ # the string_encoding and decode_errors attributes.
+ return unicode(b, encoding, self.decode_errors)
+
+ def _make_loader(self):
+ """
+ Create a Loader instance using current attributes.
+
+ """
+ return Loader(file_encoding=self.file_encoding, extension=self.file_extension,
+ to_unicode=self.unicode, search_dirs=self.search_dirs)
+
+ def _make_load_template(self):
+ """
+ Return a function that loads a template by name.
+
+ """
+ loader = self._make_loader()
+
+ def load_template(template_name):
+ return loader.load_name(template_name)
+
+ return load_template
+
+ def _make_load_partial(self):
+ """
+ Return a function that loads a partial by name.
+
+ """
+ if self.partials is None:
+ return self._make_load_template()
+
+ # Otherwise, create a function from the custom partial loader.
+ partials = self.partials
+
+ def load_partial(name):
+ # TODO: consider using EAFP here instead.
+ # http://docs.python.org/glossary.html#term-eafp
+ # This would mean requiring that the custom partial loader
+ # raise a KeyError on name not found.
+ template = partials.get(name)
+ if template is None:
+ raise TemplateNotFoundError("Name %s not found in partials: %s" %
+ (repr(name), type(partials)))
+
+ # RenderEngine requires that the return value be unicode.
+ return self._to_unicode_hard(template)
+
+ return load_partial
+
+ def _is_missing_tags_strict(self):
+ """
+ Return whether missing_tags is set to strict.
+
+ """
+ val = self.missing_tags
+
+ if val == MissingTags.strict:
+ return True
+ elif val == MissingTags.ignore:
+ return False
+
+ raise Exception("Unsupported 'missing_tags' value: %s" % repr(val))
+
+ def _make_resolve_partial(self):
+ """
+ Return the resolve_partial function to pass to RenderEngine.__init__().
+
+ """
+ load_partial = self._make_load_partial()
+
+ if self._is_missing_tags_strict():
+ return load_partial
+ # Otherwise, ignore missing tags.
+
+ def resolve_partial(name):
+ try:
+ return load_partial(name)
+ except TemplateNotFoundError:
+ return u''
+
+ return resolve_partial
+
+ def _make_resolve_context(self):
+ """
+ Return the resolve_context function to pass to RenderEngine.__init__().
+
+ """
+ if self._is_missing_tags_strict():
+ return context_get
+ # Otherwise, ignore missing tags.
+
+ def resolve_context(stack, name):
+ try:
+ return context_get(stack, name)
+ except KeyNotFoundError:
+ return u''
+
+ return resolve_context
+
+ def _make_render_engine(self):
+ """
+ Return a RenderEngine instance for rendering.
+
+ """
+ resolve_context = self._make_resolve_context()
+ resolve_partial = self._make_resolve_partial()
+
+ engine = RenderEngine(literal=self._to_unicode_hard,
+ escape=self._escape_to_unicode,
+ resolve_context=resolve_context,
+ resolve_partial=resolve_partial,
+ to_str=self.str_coerce)
+ return engine
+
+ # TODO: add unit tests for this method.
+ def load_template(self, template_name):
+ """
+ Load a template by name from the file system.
+
+ """
+ load_template = self._make_load_template()
+ return load_template(template_name)
+
+ def _render_object(self, obj, *context, **kwargs):
+ """
+ Render the template associated with the given object.
+
+ """
+ loader = self._make_loader()
+
+ # TODO: consider an approach that does not require using an if
+ # block here. For example, perhaps this class's loader can be
+ # a SpecLoader in all cases, and the SpecLoader instance can
+ # check the object's type. Or perhaps Loader and SpecLoader
+ # can be refactored to implement the same interface.
+ if isinstance(obj, TemplateSpec):
+ loader = SpecLoader(loader)
+ template = loader.load(obj)
+ else:
+ template = loader.load_object(obj)
+
+ context = [obj] + list(context)
+
+ return self._render_string(template, *context, **kwargs)
+
+ def render_name(self, template_name, *context, **kwargs):
+ """
+ Render the template with the given name using the given context.
+
+ See the render() docstring for more information.
+
+ """
+ loader = self._make_loader()
+ template = loader.load_name(template_name)
+ return self._render_string(template, *context, **kwargs)
+
+ def render_path(self, template_path, *context, **kwargs):
+ """
+ Render the template at the given path using the given context.
+
+ Read the render() docstring for more information.
+
+ """
+ loader = self._make_loader()
+ template = loader.read(template_path)
+
+ return self._render_string(template, *context, **kwargs)
+
+ def _render_string(self, template, *context, **kwargs):
+ """
+ Render the given template string using the given context.
+
+ """
+ # RenderEngine.render() requires that the template string be unicode.
+ template = self._to_unicode_hard(template)
+
+ render_func = lambda engine, stack: engine.render(template, stack)
+
+ return self._render_final(render_func, *context, **kwargs)
+
+ # All calls to render() should end here because it prepares the
+ # context stack correctly.
+ def _render_final(self, render_func, *context, **kwargs):
+ """
+ Arguments:
+
+ render_func: a function that accepts a RenderEngine and ContextStack
+ instance and returns a template rendering as a unicode string.
+
+ """
+ stack = ContextStack.create(*context, **kwargs)
+ self._context = stack
+
+ engine = self._make_render_engine()
+
+ return render_func(engine, stack)
+
+ def render(self, template, *context, **kwargs):
+ """
+ Render the given template string, view template, or parsed template.
+
+ Returns a unicode string.
+
+ Prior to rendering, this method will convert a template that is a
+ byte string (type str in Python 2) to unicode using the string_encoding
+ and decode_errors attributes. See the constructor docstring for
+ more information.
+
+ Arguments:
+
+ template: a template string that is unicode or a byte string,
+ a ParsedTemplate instance, or another object instance. In the
+ final case, the function first looks for the template associated
+ to the object by calling this class's get_associated_template()
+ method. The rendering process also uses the passed object as
+ the first element of the context stack when rendering.
+
+ *context: zero or more dictionaries, ContextStack instances, or objects
+ with which to populate the initial context stack. None
+ arguments are skipped. Items in the *context list are added to
+ the context stack in order so that later items in the argument
+ list take precedence over earlier items.
+
+ **kwargs: additional key-value data to add to the context stack.
+ As these arguments appear after all items in the *context list,
+ in the case of key conflicts these values take precedence over
+ all items in the *context list.
+
+ """
+ if is_string(template):
+ return self._render_string(template, *context, **kwargs)
+ if isinstance(template, ParsedTemplate):
+ render_func = lambda engine, stack: template.render(engine, stack)
+ return self._render_final(render_func, *context, **kwargs)
+ # Otherwise, we assume the template is an object.
+
+ return self._render_object(template, *context, **kwargs)
diff --git a/third_party/python/pystache/pystache/specloader.py b/third_party/python/pystache/pystache/specloader.py
new file mode 100644
index 0000000000..3a77d4c528
--- /dev/null
+++ b/third_party/python/pystache/pystache/specloader.py
@@ -0,0 +1,90 @@
+# coding: utf-8
+
+"""
+This module supports customized (aka special or specified) template loading.
+
+"""
+
+import os.path
+
+from pystache.loader import Loader
+
+
+# TODO: add test cases for this class.
+class SpecLoader(object):
+
+ """
+ Supports loading custom-specified templates (from TemplateSpec instances).
+
+ """
+
+ def __init__(self, loader=None):
+ if loader is None:
+ loader = Loader()
+
+ self.loader = loader
+
+ def _find_relative(self, spec):
+ """
+ Return the path to the template as a relative (dir, file_name) pair.
+
+ The directory returned is relative to the directory containing the
+ class definition of the given object. The method returns None for
+ this directory if the directory is unknown without first searching
+ the search directories.
+
+ """
+ if spec.template_rel_path is not None:
+ return os.path.split(spec.template_rel_path)
+ # Otherwise, determine the file name separately.
+
+ locator = self.loader._make_locator()
+
+ # We do not use the ternary operator for Python 2.4 support.
+ if spec.template_name is not None:
+ template_name = spec.template_name
+ else:
+ template_name = locator.make_template_name(spec)
+
+ file_name = locator.make_file_name(template_name, spec.template_extension)
+
+ return (spec.template_rel_directory, file_name)
+
+ def _find(self, spec):
+ """
+ Find and return the path to the template associated to the instance.
+
+ """
+ if spec.template_path is not None:
+ return spec.template_path
+
+ dir_path, file_name = self._find_relative(spec)
+
+ locator = self.loader._make_locator()
+
+ if dir_path is None:
+ # Then we need to search for the path.
+ path = locator.find_object(spec, self.loader.search_dirs, file_name=file_name)
+ else:
+ obj_dir = locator.get_object_directory(spec)
+ path = os.path.join(obj_dir, dir_path, file_name)
+
+ return path
+
+ def load(self, spec):
+ """
+ Find and return the template associated to a TemplateSpec instance.
+
+ Returns the template as a unicode string.
+
+ Arguments:
+
+ spec: a TemplateSpec instance.
+
+ """
+ if spec.template is not None:
+ return self.loader.unicode(spec.template, spec.template_encoding)
+
+ path = self._find(spec)
+
+ return self.loader.read(path, spec.template_encoding)
diff --git a/third_party/python/pystache/pystache/template_spec.py b/third_party/python/pystache/pystache/template_spec.py
new file mode 100644
index 0000000000..9e9f454c19
--- /dev/null
+++ b/third_party/python/pystache/pystache/template_spec.py
@@ -0,0 +1,53 @@
+# coding: utf-8
+
+"""
+Provides a class to customize template information on a per-view basis.
+
+To customize template properties for a particular view, create that view
+from a class that subclasses TemplateSpec. The "spec" in TemplateSpec
+stands for "special" or "specified" template information.
+
+"""
+
+class TemplateSpec(object):
+
+ """
+ A mixin or interface for specifying custom template information.
+
+ The "spec" in TemplateSpec can be taken to mean that the template
+ information is either "specified" or "special."
+
+ A view should subclass this class only if customized template loading
+ is needed. The following attributes allow one to customize/override
+ template information on a per view basis. A None value means to use
+ default behavior for that value and perform no customization. All
+ attributes are initialized to None.
+
+ Attributes:
+
+ template: the template as a string.
+
+ template_encoding: the encoding used by the template.
+
+ template_extension: the template file extension. Defaults to "mustache".
+ Pass False for no extension (i.e. extensionless template files).
+
+ template_name: the name of the template.
+
+ template_path: absolute path to the template.
+
+ template_rel_directory: the directory containing the template file,
+ relative to the directory containing the module defining the class.
+
+ template_rel_path: the path to the template file, relative to the
+ directory containing the module defining the class.
+
+ """
+
+ template = None
+ template_encoding = None
+ template_extension = None
+ template_name = None
+ template_path = None
+ template_rel_directory = None
+ template_rel_path = None
diff --git a/third_party/python/pystache/setup.py b/third_party/python/pystache/setup.py
new file mode 100644
index 0000000000..0d99aae8fb
--- /dev/null
+++ b/third_party/python/pystache/setup.py
@@ -0,0 +1,413 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+"""
+This script supports publishing Pystache to PyPI.
+
+This docstring contains instructions to Pystache maintainers on how
+to release a new version of Pystache.
+
+(1) Prepare the release.
+
+Make sure the code is finalized and merged to master. Bump the version
+number in setup.py, update the release date in the HISTORY file, etc.
+
+Generate the reStructuredText long_description using--
+
+ $ python setup.py prep
+
+and be sure this new version is checked in. You must have pandoc installed
+to do this step:
+
+ http://johnmacfarlane.net/pandoc/
+
+It helps to review this auto-generated file on GitHub prior to uploading
+because the long description will be sent to PyPI and appear there after
+publishing. PyPI attempts to convert this string to HTML before displaying
+it on the PyPI project page. If PyPI finds any issues, it will render it
+instead as plain-text, which we do not want.
+
+To check in advance that PyPI will accept and parse the reST file as HTML,
+you can use the rst2html program installed by the docutils package
+(http://docutils.sourceforge.net/). To install docutils:
+
+ $ pip install docutils
+
+To check the file, run the following command and confirm that it reports
+no warnings:
+
+ $ python setup.py --long-description | rst2html.py -v --no-raw > out.html
+
+See here for more information:
+
+ http://docs.python.org/distutils/uploading.html#pypi-package-display
+
+(2) Push to PyPI. To release a new version of Pystache to PyPI--
+
+ http://pypi.python.org/pypi/pystache
+
+create a PyPI user account if you do not already have one. The user account
+will need permissions to push to PyPI. A current "Package Index Owner" of
+Pystache can grant you those permissions.
+
+When you have permissions, run the following:
+
+ python setup.py publish
+
+If you get an error like the following--
+
+ Upload failed (401): You must be identified to edit package information
+
+then add a file called .pyirc to your home directory with the following
+contents:
+
+ [server-login]
+ username: <PyPI username>
+ password: <PyPI password>
+
+as described here, for example:
+
+ http://docs.python.org/release/2.5.2/dist/pypirc.html
+
+(3) Tag the release on GitHub. Here are some commands for tagging.
+
+List current tags:
+
+ git tag -l -n3
+
+Create an annotated tag:
+
+ git tag -a -m "Version 0.5.1" "v0.5.1"
+
+Push a tag to GitHub:
+
+ git push --tags defunkt v0.5.1
+
+"""
+
+import os
+import shutil
+import sys
+
+
+py_version = sys.version_info
+
+# distutils does not seem to support the following setup() arguments.
+# It displays a UserWarning when setup() is passed those options:
+#
+# * entry_points
+# * install_requires
+#
+# distribute works with Python 2.3.5 and above:
+#
+# http://packages.python.org/distribute/setuptools.html#building-and-distributing-packages-with-distribute
+#
+if py_version < (2, 3, 5):
+ # TODO: this might not work yet.
+ import distutils as dist
+ from distutils import core
+ setup = core.setup
+else:
+ import setuptools as dist
+ setup = dist.setup
+
+
+VERSION = '0.5.4' # Also change in pystache/__init__.py.
+
+FILE_ENCODING = 'utf-8'
+
+README_PATH = 'README.md'
+HISTORY_PATH = 'HISTORY.md'
+LICENSE_PATH = 'LICENSE'
+
+RST_DESCRIPTION_PATH = 'setup_description.rst'
+
+TEMP_EXTENSION = '.temp'
+
+PREP_COMMAND = 'prep'
+
+CLASSIFIERS = (
+ 'Development Status :: 4 - Beta',
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.4',
+ 'Programming Language :: Python :: 2.5',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.1',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: Implementation :: PyPy',
+)
+
+# Comments in reST begin with two dots.
+RST_LONG_DESCRIPTION_INTRO = """\
+.. Do not edit this file. This file is auto-generated for PyPI by setup.py
+.. using pandoc, so edits should go in the source files rather than here.
+"""
+
+
+def read(path):
+ """
+ Read and return the contents of a text file as a unicode string.
+
+ """
+ # This function implementation was chosen to be compatible across Python 2/3.
+ f = open(path, 'rb')
+ # We avoid use of the with keyword for Python 2.4 support.
+ try:
+ b = f.read()
+ finally:
+ f.close()
+
+ return b.decode(FILE_ENCODING)
+
+
+def write(u, path):
+ """
+ Write a unicode string to a file (as utf-8).
+
+ """
+ print("writing to: %s" % path)
+ # This function implementation was chosen to be compatible across Python 2/3.
+ f = open(path, "wb")
+ try:
+ b = u.encode(FILE_ENCODING)
+ f.write(b)
+ finally:
+ f.close()
+
+
+def make_temp_path(path, new_ext=None):
+ """
+ Arguments:
+
+ new_ext: the new file extension, including the leading dot.
+ Defaults to preserving the existing file extension.
+
+ """
+ root, ext = os.path.splitext(path)
+ if new_ext is None:
+ new_ext = ext
+ temp_path = root + TEMP_EXTENSION + new_ext
+ return temp_path
+
+
+def strip_html_comments(text):
+ """Strip HTML comments from a unicode string."""
+ lines = text.splitlines(True) # preserve line endings.
+
+ # Remove HTML comments (which we only allow to take a special form).
+ new_lines = filter(lambda line: not line.startswith("<!--"), lines)
+
+ return "".join(new_lines)
+
+
+# We write the converted file to a temp file to simplify debugging and
+# to avoid removing a valid pre-existing file on failure.
+def convert_md_to_rst(md_path, rst_temp_path):
+ """
+ Convert the contents of a file from Markdown to reStructuredText.
+
+ Returns the converted text as a Unicode string.
+
+ Arguments:
+
+ md_path: a path to a UTF-8 encoded Markdown file to convert.
+
+ rst_temp_path: a temporary path to which to write the converted contents.
+
+ """
+ # Pandoc uses the UTF-8 character encoding for both input and output.
+ command = "pandoc --write=rst --output=%s %s" % (rst_temp_path, md_path)
+ print("converting with pandoc: %s to %s\n-->%s" % (md_path, rst_temp_path,
+ command))
+
+ if os.path.exists(rst_temp_path):
+ os.remove(rst_temp_path)
+
+ os.system(command)
+
+ if not os.path.exists(rst_temp_path):
+ s = ("Error running: %s\n"
+ " Did you install pandoc per the %s docstring?" % (command,
+ __file__))
+ sys.exit(s)
+
+ return read(rst_temp_path)
+
+
+# The long_description needs to be formatted as reStructuredText.
+# See the following for more information:
+#
+# http://docs.python.org/distutils/setupscript.html#additional-meta-data
+# http://docs.python.org/distutils/uploading.html#pypi-package-display
+#
+def make_long_description():
+ """
+ Generate the reST long_description for setup() from source files.
+
+ Returns the generated long_description as a unicode string.
+
+ """
+ readme_path = README_PATH
+
+ # Remove our HTML comments because PyPI does not allow it.
+ # See the setup.py docstring for more info on this.
+ readme_md = strip_html_comments(read(readme_path))
+ history_md = strip_html_comments(read(HISTORY_PATH))
+ license_md = """\
+License
+=======
+
+""" + read(LICENSE_PATH)
+
+ sections = [readme_md, history_md, license_md]
+ md_description = '\n\n'.join(sections)
+
+ # Write the combined Markdown file to a temp path.
+ md_ext = os.path.splitext(readme_path)[1]
+ md_description_path = make_temp_path(RST_DESCRIPTION_PATH, new_ext=md_ext)
+ write(md_description, md_description_path)
+
+ rst_temp_path = make_temp_path(RST_DESCRIPTION_PATH)
+ long_description = convert_md_to_rst(md_path=md_description_path,
+ rst_temp_path=rst_temp_path)
+
+ return "\n".join([RST_LONG_DESCRIPTION_INTRO, long_description])
+
+
+def prep():
+ """Update the reST long_description file."""
+ long_description = make_long_description()
+ write(long_description, RST_DESCRIPTION_PATH)
+
+
+def publish():
+ """Publish this package to PyPI (aka "the Cheeseshop")."""
+ long_description = make_long_description()
+
+ if long_description != read(RST_DESCRIPTION_PATH):
+ print("""\
+Description file not up-to-date: %s
+Run the following command and commit the changes--
+
+ python setup.py %s
+""" % (RST_DESCRIPTION_PATH, PREP_COMMAND))
+ sys.exit()
+
+ print("Description up-to-date: %s" % RST_DESCRIPTION_PATH)
+
+ answer = raw_input("Are you sure you want to publish to PyPI (yes/no)?")
+
+ if answer != "yes":
+ exit("Aborted: nothing published")
+
+ os.system('python setup.py sdist upload')
+
+
+# We use the package simplejson for older Python versions since Python
+# does not contain the module json before 2.6:
+#
+# http://docs.python.org/library/json.html
+#
+# Moreover, simplejson stopped officially support for Python 2.4 in version 2.1.0:
+#
+# https://github.com/simplejson/simplejson/blob/master/CHANGES.txt
+#
+requires = []
+if py_version < (2, 5):
+ requires.append('simplejson<2.1')
+elif py_version < (2, 6):
+ requires.append('simplejson')
+
+INSTALL_REQUIRES = requires
+
+# TODO: decide whether to use find_packages() instead. I'm not sure that
+# find_packages() is available with distutils, for example.
+PACKAGES = [
+ 'pystache',
+ 'pystache.commands',
+ # The following packages are only for testing.
+ 'pystache.tests',
+ 'pystache.tests.data',
+ 'pystache.tests.data.locator',
+ 'pystache.tests.examples',
+]
+
+
+# The purpose of this function is to follow the guidance suggested here:
+#
+# http://packages.python.org/distribute/python3.html#note-on-compatibility-with-setuptools
+#
+# The guidance is for better compatibility when using setuptools (e.g. with
+# earlier versions of Python 2) instead of Distribute, because of new
+# keyword arguments to setup() that setuptools may not recognize.
+def get_extra_args():
+ """
+ Return a dictionary of extra args to pass to setup().
+
+ """
+ extra = {}
+ # TODO: it might be more correct to check whether we are using
+ # Distribute instead of setuptools, since use_2to3 doesn't take
+ # effect when using Python 2, even when using Distribute.
+ if py_version >= (3, ):
+ # Causes 2to3 to be run during the build step.
+ extra['use_2to3'] = True
+
+ return extra
+
+
+def main(sys_argv):
+
+ # TODO: use the logging module instead of printing.
+ # TODO: include the following in a verbose mode.
+ sys.stderr.write("pystache: using: version %s of %s\n" % (repr(dist.__version__), repr(dist)))
+
+ command = sys_argv[-1]
+
+ if command == 'publish':
+ publish()
+ sys.exit()
+ elif command == PREP_COMMAND:
+ prep()
+ sys.exit()
+
+ long_description = read(RST_DESCRIPTION_PATH)
+ template_files = ['*.mustache', '*.txt']
+ extra_args = get_extra_args()
+
+ setup(name='pystache',
+ version=VERSION,
+ license='MIT',
+ description='Mustache for Python',
+ long_description=long_description,
+ author='Chris Wanstrath',
+ author_email='chris@ozmm.org',
+ maintainer='Chris Jerdonek',
+ maintainer_email='chris.jerdonek@gmail.com',
+ url='http://github.com/defunkt/pystache',
+ install_requires=INSTALL_REQUIRES,
+ packages=PACKAGES,
+ package_data = {
+ # Include template files so tests can be run.
+ 'pystache.tests.data': template_files,
+ 'pystache.tests.data.locator': template_files,
+ 'pystache.tests.examples': template_files,
+ },
+ entry_points = {
+ 'console_scripts': [
+ 'pystache=pystache.commands.render:main',
+ 'pystache-test=pystache.commands.test:main',
+ ],
+ },
+ classifiers = CLASSIFIERS,
+ **extra_args
+ )
+
+
+if __name__=='__main__':
+ main(sys.argv)
diff --git a/third_party/python/pystache/setup_description.rst b/third_party/python/pystache/setup_description.rst
new file mode 100644
index 0000000000..724c457233
--- /dev/null
+++ b/third_party/python/pystache/setup_description.rst
@@ -0,0 +1,513 @@
+.. Do not edit this file. This file is auto-generated for PyPI by setup.py
+.. using pandoc, so edits should go in the source files rather than here.
+
+Pystache
+========
+
+.. figure:: http://defunkt.github.com/pystache/images/logo_phillips.png
+ :alt: mustachioed, monocled snake by David Phillips
+
+.. figure:: https://secure.travis-ci.org/defunkt/pystache.png
+ :alt: Travis CI current build status
+
+`Pystache <http://defunkt.github.com/pystache>`__ is a Python
+implementation of `Mustache <http://mustache.github.com/>`__. Mustache
+is a framework-agnostic, logic-free templating system inspired by
+`ctemplate <http://code.google.com/p/google-ctemplate/>`__ and
+`et <http://www.ivan.fomichev.name/2008/05/erlang-template-engine-prototype.html>`__.
+Like ctemplate, Mustache "emphasizes separating logic from presentation:
+it is impossible to embed application logic in this template language."
+
+The `mustache(5) <http://mustache.github.com/mustache.5.html>`__ man
+page provides a good introduction to Mustache's syntax. For a more
+complete (and more current) description of Mustache's behavior, see the
+official `Mustache spec <https://github.com/mustache/spec>`__.
+
+Pystache is `semantically versioned <http://semver.org>`__ and can be
+found on `PyPI <http://pypi.python.org/pypi/pystache>`__. This version
+of Pystache passes all tests in `version
+1.1.2 <https://github.com/mustache/spec/tree/v1.1.2>`__ of the spec.
+
+Requirements
+------------
+
+Pystache is tested with--
+
+- Python 2.4 (requires simplejson `version
+ 2.0.9 <http://pypi.python.org/pypi/simplejson/2.0.9>`__ or earlier)
+- Python 2.5 (requires
+ `simplejson <http://pypi.python.org/pypi/simplejson/>`__)
+- Python 2.6
+- Python 2.7
+- Python 3.1
+- Python 3.2
+- Python 3.3
+- `PyPy <http://pypy.org/>`__
+
+`Distribute <http://packages.python.org/distribute/>`__ (the setuptools
+fork) is recommended over
+`setuptools <http://pypi.python.org/pypi/setuptools>`__, and is required
+in some cases (e.g. for Python 3 support). If you use
+`pip <http://www.pip-installer.org/>`__, you probably already satisfy
+this requirement.
+
+JSON support is needed only for the command-line interface and to run
+the spec tests. We require simplejson for earlier versions of Python
+since Python's `json <http://docs.python.org/library/json.html>`__
+module was added in Python 2.6.
+
+For Python 2.4 we require an earlier version of simplejson since
+simplejson stopped officially supporting Python 2.4 in simplejson
+version 2.1.0. Earlier versions of simplejson can be installed manually,
+as follows:
+
+::
+
+ pip install 'simplejson<2.1.0'
+
+Official support for Python 2.4 will end with Pystache version 0.6.0.
+
+Install It
+----------
+
+::
+
+ pip install pystache
+
+And test it--
+
+::
+
+ pystache-test
+
+To install and test from source (e.g. from GitHub), see the Develop
+section.
+
+Use It
+------
+
+::
+
+ >>> import pystache
+ >>> print pystache.render('Hi {{person}}!', {'person': 'Mom'})
+ Hi Mom!
+
+You can also create dedicated view classes to hold your view logic.
+
+Here's your view class (in .../examples/readme.py):
+
+::
+
+ class SayHello(object):
+ def to(self):
+ return "Pizza"
+
+Instantiating like so:
+
+::
+
+ >>> from pystache.tests.examples.readme import SayHello
+ >>> hello = SayHello()
+
+Then your template, say\_hello.mustache (by default in the same
+directory as your class definition):
+
+::
+
+ Hello, {{to}}!
+
+Pull it together:
+
+::
+
+ >>> renderer = pystache.Renderer()
+ >>> print renderer.render(hello)
+ Hello, Pizza!
+
+For greater control over rendering (e.g. to specify a custom template
+directory), use the ``Renderer`` class like above. One can pass
+attributes to the Renderer class constructor or set them on a Renderer
+instance. To customize template loading on a per-view basis, subclass
+``TemplateSpec``. See the docstrings of the
+`Renderer <https://github.com/defunkt/pystache/blob/master/pystache/renderer.py>`__
+class and
+`TemplateSpec <https://github.com/defunkt/pystache/blob/master/pystache/template_spec.py>`__
+class for more information.
+
+You can also pre-parse a template:
+
+::
+
+ >>> parsed = pystache.parse(u"Hey {{#who}}{{.}}!{{/who}}")
+ >>> print parsed
+ [u'Hey ', _SectionNode(key=u'who', index_begin=12, index_end=18, parsed=[_EscapeNode(key=u'.'), u'!'])]
+
+And then:
+
+::
+
+ >>> print renderer.render(parsed, {'who': 'Pops'})
+ Hey Pops!
+ >>> print renderer.render(parsed, {'who': 'you'})
+ Hey you!
+
+Python 3
+--------
+
+Pystache has supported Python 3 since version 0.5.1. Pystache behaves
+slightly differently between Python 2 and 3, as follows:
+
+- In Python 2, the default html-escape function ``cgi.escape()`` does
+ not escape single quotes. In Python 3, the default escape function
+ ``html.escape()`` does escape single quotes.
+- In both Python 2 and 3, the string and file encodings default to
+ ``sys.getdefaultencoding()``. However, this function can return
+ different values under Python 2 and 3, even when run from the same
+ system. Check your own system for the behavior on your system, or do
+ not rely on the defaults by passing in the encodings explicitly (e.g.
+ to the ``Renderer`` class).
+
+Unicode
+-------
+
+This section describes how Pystache handles unicode, strings, and
+encodings.
+
+Internally, Pystache uses `only unicode
+strings <http://docs.python.org/howto/unicode.html#tips-for-writing-unicode-aware-programs>`__
+(``str`` in Python 3 and ``unicode`` in Python 2). For input, Pystache
+accepts both unicode strings and byte strings (``bytes`` in Python 3 and
+``str`` in Python 2). For output, Pystache's template rendering methods
+return only unicode.
+
+Pystache's ``Renderer`` class supports a number of attributes to control
+how Pystache converts byte strings to unicode on input. These include
+the ``file_encoding``, ``string_encoding``, and ``decode_errors``
+attributes.
+
+The ``file_encoding`` attribute is the encoding the renderer uses to
+convert to unicode any files read from the file system. Similarly,
+``string_encoding`` is the encoding the renderer uses to convert any
+other byte strings encountered during the rendering process into unicode
+(e.g. context values that are encoded byte strings).
+
+The ``decode_errors`` attribute is what the renderer passes as the
+``errors`` argument to Python's built-in unicode-decoding function
+(``str()`` in Python 3 and ``unicode()`` in Python 2). The valid values
+for this argument are ``strict``, ``ignore``, and ``replace``.
+
+Each of these attributes can be set via the ``Renderer`` class's
+constructor using a keyword argument of the same name. See the Renderer
+class's docstrings for further details. In addition, the
+``file_encoding`` attribute can be controlled on a per-view basis by
+subclassing the ``TemplateSpec`` class. When not specified explicitly,
+these attributes default to values set in Pystache's ``defaults``
+module.
+
+Develop
+-------
+
+To test from a source distribution (without installing)--
+
+::
+
+ python test_pystache.py
+
+To test Pystache with multiple versions of Python (with a single
+command!), you can use `tox <http://pypi.python.org/pypi/tox>`__:
+
+::
+
+ pip install 'virtualenv<1.8' # Version 1.8 dropped support for Python 2.4.
+ pip install 'tox<1.4' # Version 1.4 dropped support for Python 2.4.
+ tox
+
+If you do not have all Python versions listed in ``tox.ini``--
+
+::
+
+ tox -e py26,py32 # for example
+
+The source distribution tests also include doctests and tests from the
+Mustache spec. To include tests from the Mustache spec in your test
+runs:
+
+::
+
+ git submodule init
+ git submodule update
+
+The test harness parses the spec's (more human-readable) yaml files if
+`PyYAML <http://pypi.python.org/pypi/PyYAML>`__ is present. Otherwise,
+it parses the json files. To install PyYAML--
+
+::
+
+ pip install pyyaml
+
+To run a subset of the tests, you can use
+`nose <http://somethingaboutorange.com/mrl/projects/nose/0.11.1/testing.html>`__:
+
+::
+
+ pip install nose
+ nosetests --tests pystache/tests/test_context.py:GetValueTests.test_dictionary__key_present
+
+Using Python 3 with Pystache from source
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Pystache is written in Python 2 and must be converted to Python 3 prior
+to using it with Python 3. The installation process (and tox) do this
+automatically.
+
+To convert the code to Python 3 manually (while using Python 3)--
+
+::
+
+ python setup.py build
+
+This writes the converted code to a subdirectory called ``build``. By
+design, Python 3 builds
+`cannot <https://bitbucket.org/tarek/distribute/issue/292/allow-use_2to3-with-python-2>`__
+be created from Python 2.
+
+To convert the code without using setup.py, you can use
+`2to3 <http://docs.python.org/library/2to3.html>`__ as follows (two
+steps)--
+
+::
+
+ 2to3 --write --nobackups --no-diffs --doctests_only pystache
+ 2to3 --write --nobackups --no-diffs pystache
+
+This converts the code (and doctests) in place.
+
+To ``import pystache`` from a source distribution while using Python 3,
+be sure that you are importing from a directory containing a converted
+version of the code (e.g. from the ``build`` directory after
+converting), and not from the original (unconverted) source directory.
+Otherwise, you will get a syntax error. You can help prevent this by not
+running the Python IDE from the project directory when importing
+Pystache while using Python 3.
+
+Mailing List
+------------
+
+There is a `mailing list <http://librelist.com/browser/pystache/>`__.
+Note that there is a bit of a delay between posting a message and seeing
+it appear in the mailing list archive.
+
+Credits
+-------
+
+::
+
+ >>> context = { 'author': 'Chris Wanstrath', 'maintainer': 'Chris Jerdonek' }
+ >>> print pystache.render("Author: {{author}}\nMaintainer: {{maintainer}}", context)
+ Author: Chris Wanstrath
+ Maintainer: Chris Jerdonek
+
+Pystache logo by `David Phillips <http://davidphillips.us/>`__ is
+licensed under a `Creative Commons Attribution-ShareAlike 3.0 Unported
+License <http://creativecommons.org/licenses/by-sa/3.0/deed.en_US>`__.
+|image0|
+
+History
+=======
+
+**Note:** Official support for Python 2.4 will end with Pystache version
+0.6.0.
+
+0.5.4 (2014-07-11)
+------------------
+
+- Bugfix: made test with filenames OS agnostic (issue #162).
+
+0.5.3 (2012-11-03)
+------------------
+
+- Added ability to customize string coercion (e.g. to have None render
+ as ``''``) (issue #130).
+- Added Renderer.render\_name() to render a template by name (issue
+ #122).
+- Added TemplateSpec.template\_path to specify an absolute path to a
+ template (issue #41).
+- Added option of raising errors on missing tags/partials:
+ ``Renderer(missing_tags='strict')`` (issue #110).
+- Added support for finding and loading templates by file name in
+ addition to by template name (issue #127). [xgecko]
+- Added a ``parse()`` function that yields a printable, pre-compiled
+ parse tree.
+- Added support for rendering pre-compiled templates.
+- Added Python 3.3 to the list of supported versions.
+- Added support for `PyPy <http://pypy.org/>`__ (issue #125).
+- Added support for `Travis CI <http://travis-ci.org>`__ (issue #124).
+ [msabramo]
+- Bugfix: ``defaults.DELIMITERS`` can now be changed at runtime (issue
+ #135). [bennoleslie]
+- Bugfix: exceptions raised from a property are no longer swallowed
+ when getting a key from a context stack (issue #110).
+- Bugfix: lambda section values can now return non-ascii, non-unicode
+ strings (issue #118).
+- Bugfix: allow ``test_pystache.py`` and ``tox`` to pass when run from
+ a downloaded sdist (i.e. without the spec test directory).
+- Convert HISTORY and README files from reST to Markdown.
+- More robust handling of byte strings in Python 3.
+- Added Creative Commons license for David Phillips's logo.
+
+0.5.2 (2012-05-03)
+------------------
+
+- Added support for dot notation and version 1.1.2 of the spec (issue
+ #99). [rbp]
+- Missing partials now render as empty string per latest version of
+ spec (issue #115).
+- Bugfix: falsey values now coerced to strings using str().
+- Bugfix: lambda return values for sections no longer pushed onto
+ context stack (issue #113).
+- Bugfix: lists of lambdas for sections were not rendered (issue #114).
+
+0.5.1 (2012-04-24)
+------------------
+
+- Added support for Python 3.1 and 3.2.
+- Added tox support to test multiple Python versions.
+- Added test script entry point: pystache-test.
+- Added \_\_version\_\_ package attribute.
+- Test harness now supports both YAML and JSON forms of Mustache spec.
+- Test harness no longer requires nose.
+
+0.5.0 (2012-04-03)
+------------------
+
+This version represents a major rewrite and refactoring of the code base
+that also adds features and fixes many bugs. All functionality and
+nearly all unit tests have been preserved. However, some backwards
+incompatible changes to the API have been made.
+
+Below is a selection of some of the changes (not exhaustive).
+
+Highlights:
+
+- Pystache now passes all tests in version 1.0.3 of the `Mustache
+ spec <https://github.com/mustache/spec>`__. [pvande]
+- Removed View class: it is no longer necessary to subclass from View
+ or from any other class to create a view.
+- Replaced Template with Renderer class: template rendering behavior
+ can be modified via the Renderer constructor or by setting attributes
+ on a Renderer instance.
+- Added TemplateSpec class: template rendering can be specified on a
+ per-view basis by subclassing from TemplateSpec.
+- Introduced separation of concerns and removed circular dependencies
+ (e.g. between Template and View classes, cf. `issue
+ #13 <https://github.com/defunkt/pystache/issues/13>`__).
+- Unicode now used consistently throughout the rendering process.
+- Expanded test coverage: nosetests now runs doctests and ~105 test
+ cases from the Mustache spec (increasing the number of tests from 56
+ to ~315).
+- Added a rudimentary benchmarking script to gauge performance while
+ refactoring.
+- Extensive documentation added (e.g. docstrings).
+
+Other changes:
+
+- Added a command-line interface. [vrde]
+- The main rendering class now accepts a custom partial loader (e.g. a
+ dictionary) and a custom escape function.
+- Non-ascii characters in str strings are now supported while
+ rendering.
+- Added string encoding, file encoding, and errors options for decoding
+ to unicode.
+- Removed the output encoding option.
+- Removed the use of markupsafe.
+
+Bug fixes:
+
+- Context values no longer processed as template strings.
+ [jakearchibald]
+- Whitespace surrounding sections is no longer altered, per the spec.
+ [heliodor]
+- Zeroes now render correctly when using PyPy. [alex]
+- Multline comments now permitted. [fczuardi]
+- Extensionless template files are now supported.
+- Passing ``**kwargs`` to ``Template()`` no longer modifies the
+ context.
+- Passing ``**kwargs`` to ``Template()`` with no context no longer
+ raises an exception.
+
+0.4.1 (2012-03-25)
+------------------
+
+- Added support for Python 2.4. [wangtz, jvantuyl]
+
+0.4.0 (2011-01-12)
+------------------
+
+- Add support for nested contexts (within template and view)
+- Add support for inverted lists
+- Decoupled template loading
+
+0.3.1 (2010-05-07)
+------------------
+
+- Fix package
+
+0.3.0 (2010-05-03)
+------------------
+
+- View.template\_path can now hold a list of path
+- Add {{& blah}} as an alias for {{{ blah }}}
+- Higher Order Sections
+- Inverted sections
+
+0.2.0 (2010-02-15)
+------------------
+
+- Bugfix: Methods returning False or None are not rendered
+- Bugfix: Don't render an empty string when a tag's value is 0.
+ [enaeseth]
+- Add support for using non-callables as View attributes.
+ [joshthecoder]
+- Allow using View instances as attributes. [joshthecoder]
+- Support for Unicode and non-ASCII-encoded bytestring output.
+ [enaeseth]
+- Template file encoding awareness. [enaeseth]
+
+0.1.1 (2009-11-13)
+------------------
+
+- Ensure we're dealing with strings, always
+- Tests can be run by executing the test file directly
+
+0.1.0 (2009-11-12)
+------------------
+
+- First release
+
+License
+=======
+
+Copyright (C) 2012 Chris Jerdonek. All rights reserved.
+
+Copyright (c) 2009 Chris Wanstrath
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+.. |image0| image:: http://i.creativecommons.org/l/by-sa/3.0/88x31.png
diff --git a/third_party/python/pystache/test_pystache.py b/third_party/python/pystache/test_pystache.py
new file mode 100644
index 0000000000..9a1a3ca26d
--- /dev/null
+++ b/third_party/python/pystache/test_pystache.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+"""
+Runs project tests.
+
+This script is a substitute for running--
+
+ python -m pystache.commands.test
+
+It is useful in Python 2.4 because the -m flag does not accept subpackages
+in Python 2.4:
+
+ http://docs.python.org/using/cmdline.html#cmdoption-m
+
+"""
+
+import sys
+
+from pystache.commands import test
+from pystache.tests.main import FROM_SOURCE_OPTION
+
+
+def main(sys_argv=sys.argv):
+ sys.argv.insert(1, FROM_SOURCE_OPTION)
+ test.main()
+
+
+if __name__=='__main__':
+ main()
diff --git a/third_party/python/pystache/tox.ini b/third_party/python/pystache/tox.ini
new file mode 100644
index 0000000000..d1eaebfbfc
--- /dev/null
+++ b/third_party/python/pystache/tox.ini
@@ -0,0 +1,36 @@
+# A tox configuration file to test across multiple Python versions.
+#
+# http://pypi.python.org/pypi/tox
+#
+[tox]
+# Tox 1.4 drops py24 and adds py33. In the current version, we want to
+# support 2.4, so we can't simultaneously support 3.3.
+envlist = py24,py25,py26,py27,py27-yaml,py27-noargs,py31,py32,pypy
+
+[testenv]
+# Change the working directory so that we don't import the pystache located
+# in the original location.
+changedir =
+ {envbindir}
+commands =
+ pystache-test {toxinidir}
+
+# Check that the spec tests work with PyYAML.
+[testenv:py27-yaml]
+basepython =
+ python2.7
+deps =
+ PyYAML
+changedir =
+ {envbindir}
+commands =
+ pystache-test {toxinidir}
+
+# Check that pystache-test works from an install with no arguments.
+[testenv:py27-noargs]
+basepython =
+ python2.7
+changedir =
+ {envbindir}
+commands =
+ pystache-test
diff --git a/third_party/python/pytest/.coveragerc b/third_party/python/pytest/.coveragerc
new file mode 100644
index 0000000000..61ff66749d
--- /dev/null
+++ b/third_party/python/pytest/.coveragerc
@@ -0,0 +1,4 @@
+[run]
+omit =
+ # standlonetemplate is read dynamically and tested by test_genscript
+ *standalonetemplate.py
diff --git a/third_party/python/pytest/.github/ISSUE_TEMPLATE.md b/third_party/python/pytest/.github/ISSUE_TEMPLATE.md
new file mode 100644
index 0000000000..fbcbb16fc3
--- /dev/null
+++ b/third_party/python/pytest/.github/ISSUE_TEMPLATE.md
@@ -0,0 +1,8 @@
+Thanks for submitting an issue!
+
+Here's a quick checklist in what to include:
+
+- [ ] Include a detailed description of the bug or suggestion
+- [ ] `pip list` of the virtual environment you are using
+- [ ] pytest and operating system versions
+- [ ] Minimal example if possible
diff --git a/third_party/python/pytest/.github/PULL_REQUEST_TEMPLATE.md b/third_party/python/pytest/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000000..23a9f8c568
--- /dev/null
+++ b/third_party/python/pytest/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,14 @@
+Thanks for submitting a PR, your contribution is really appreciated!
+
+Here's a quick checklist that should be present in PRs (you can delete this text from the final description, this is
+just a guideline):
+
+- [ ] Create a new changelog file in the `changelog` folder, with a name like `<ISSUE NUMBER>.<TYPE>.rst`. See [changelog/README.rst](/changelog/README.rst) for details.
+- [ ] Target the `master` branch for bug fixes, documentation updates and trivial changes.
+- [ ] Target the `features` branch for new features and removals/deprecations.
+- [ ] Include documentation when adding new features.
+- [ ] Include new tests or update existing tests when applicable.
+
+Unless your change is trivial or a small documentation fix (e.g., a typo or reword of a small section) please:
+
+- [ ] Add yourself to `AUTHORS` in alphabetical order;
diff --git a/third_party/python/pytest/.gitignore b/third_party/python/pytest/.gitignore
new file mode 100644
index 0000000000..afb6bf9fd3
--- /dev/null
+++ b/third_party/python/pytest/.gitignore
@@ -0,0 +1,40 @@
+# Automatically generated by `hgimportsvn`
+.svn
+.hgsvn
+
+# Ignore local virtualenvs
+lib/
+bin/
+include/
+.Python/
+
+# These lines are suggested according to the svn:ignore property
+# Feel free to enable them by uncommenting them
+*.pyc
+*.pyo
+*.swp
+*.class
+*.orig
+*~
+.hypothesis/
+
+# autogenerated
+src/_pytest/_version.py
+# setuptools
+.eggs/
+
+doc/*/_build
+build/
+dist/
+*.egg-info
+issue/
+env/
+.env/
+3rdparty/
+.tox
+.cache
+.pytest_cache
+.coverage
+.ropeproject
+.idea
+.hypothesis
diff --git a/third_party/python/pytest/.pre-commit-config.yaml b/third_party/python/pytest/.pre-commit-config.yaml
new file mode 100644
index 0000000000..e50891bbc1
--- /dev/null
+++ b/third_party/python/pytest/.pre-commit-config.yaml
@@ -0,0 +1,36 @@
+exclude: doc/en/example/py2py3/test_py2.py
+repos:
+- repo: https://github.com/ambv/black
+ rev: 18.4a4
+ hooks:
+ - id: black
+ args: [--safe, --quiet]
+ language_version: python3.6
+- repo: https://github.com/asottile/blacken-docs
+ rev: v0.1.1
+ hooks:
+ - id: blacken-docs
+ additional_dependencies: [black==18.5b1]
+ language_version: python3.6
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v1.2.3
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: check-yaml
+ - id: debug-statements
+ exclude: _pytest/debugging.py
+ - id: flake8
+- repo: https://github.com/asottile/pyupgrade
+ rev: v1.2.0
+ hooks:
+ - id: pyupgrade
+- repo: local
+ hooks:
+ - id: rst
+ name: rst
+ entry: rst-lint --encoding utf-8
+ files: ^(CHANGELOG.rst|HOWTORELEASE.rst|README.rst|changelog/.*)$
+ language: python
+ additional_dependencies: [pygments, restructuredtext_lint]
+ python_version: python3.6
diff --git a/third_party/python/pytest/.travis.yml b/third_party/python/pytest/.travis.yml
new file mode 100644
index 0000000000..1d092149b5
--- /dev/null
+++ b/third_party/python/pytest/.travis.yml
@@ -0,0 +1,83 @@
+sudo: false
+language: python
+stages:
+- linting
+- test
+- deploy
+python:
+ - '3.6'
+install:
+ - pip install --upgrade --pre tox
+env:
+ matrix:
+ # coveralls is not listed in tox's envlist, but should run in travis
+ - TOXENV=coveralls
+ # note: please use "tox --listenvs" to populate the build matrix below
+ # please remove the linting env in all cases
+ - TOXENV=py27
+ - TOXENV=py34
+ - TOXENV=py36
+ - TOXENV=py27-pexpect
+ - TOXENV=py27-xdist
+ - TOXENV=py27-trial
+ - TOXENV=py27-numpy
+ - TOXENV=py27-pluggymaster
+ - TOXENV=py36-pexpect
+ - TOXENV=py36-xdist
+ - TOXENV=py36-trial
+ - TOXENV=py36-numpy
+ - TOXENV=py36-pluggymaster
+ - TOXENV=py27-nobyte
+ - TOXENV=doctesting
+ - TOXENV=docs
+
+jobs:
+ include:
+ - env: TOXENV=pypy
+ python: 'pypy-5.4'
+ - env: TOXENV=py35
+ python: '3.5'
+ - env: TOXENV=py35-freeze
+ python: '3.5'
+ - env: TOXENV=py37
+ python: 'nightly'
+
+ - stage: deploy
+ python: '3.6'
+ env:
+ install: pip install -U setuptools setuptools_scm
+ script: skip
+ deploy:
+ provider: pypi
+ user: nicoddemus
+ distributions: sdist bdist_wheel
+ skip_upload_docs: true
+ password:
+ secure: xanTgTUu6XDQVqB/0bwJQXoDMnU5tkwZc5koz6mBkkqZhKdNOi2CLoC1XhiSZ+ah24l4V1E0GAqY5kBBcy9d7NVe4WNg4tD095LsHw+CRU6/HCVIFfyk2IZ+FPAlguesCcUiJSXOrlBF+Wj68wEvLoK7EoRFbJeiZ/f91Ww1sbtDlqXABWGHrmhPJL5Wva7o7+wG7JwJowqdZg1pbQExsCc7b53w4v2RBu3D6TJaTAzHiVsW+nUSI67vKI/uf+cR/OixsTfy37wlHgSwihYmrYLFls3V0bSpahCim3bCgMaFZx8S8xrdgJ++PzBCof2HeflFKvW+VCkoYzGEG4NrTWJoNz6ni4red9GdvfjGH3YCjAKS56h9x58zp2E5rpsb/kVq5/45xzV+dq6JRuhQ1nJWjBC6fSKAc/bfwnuFK3EBxNLkvBssLHvsNjj5XG++cB8DdS9wVGUqjpoK4puaXUWFqy4q3S9F86HEsKNgExtieA9qNx+pCIZVs6JCXZNjr0I5eVNzqJIyggNgJG6RyravsU35t9Zd9doL5g4Y7UKmAGTn1Sz24HQ4sMQgXdm2SyD8gEK5je4tlhUvfGtDvMSlstq71kIn9nRpFnqB6MFlbYSEAZmo8dGbCquoUc++6Rum208wcVbrzzVtGlXB/Ow9AbFMYeAGA0+N/K1e59c=
+ on:
+ tags: true
+ repo: pytest-dev/pytest
+ - stage: linting
+ python: '3.6'
+ env:
+ install:
+ - pip install pre-commit
+ - pre-commit install-hooks
+ script:
+ - pre-commit run --all-files
+
+script: tox --recreate
+
+notifications:
+ irc:
+ channels:
+ - "chat.freenode.net#pytest"
+ on_success: change
+ on_failure: change
+ skip_join: true
+ email:
+ - pytest-commit@python.org
+cache:
+ directories:
+ - $HOME/.cache/pip
+ - $HOME/.cache/pre-commit
diff --git a/third_party/python/pytest/AUTHORS b/third_party/python/pytest/AUTHORS
new file mode 100644
index 0000000000..3edfdcf85e
--- /dev/null
+++ b/third_party/python/pytest/AUTHORS
@@ -0,0 +1,213 @@
+Holger Krekel, holger at merlinux eu
+merlinux GmbH, Germany, office at merlinux eu
+
+Contributors include::
+
+Aaron Coleman
+Abdeali JK
+Abhijeet Kasurde
+Ahn Ki-Wook
+Alan Velasco
+Alexander Johnson
+Alexei Kozlenok
+Anatoly Bubenkoff
+Anders Hovmöller
+Andras Tim
+Andreas Zeidler
+Andrzej Ostrowski
+Andy Freeland
+Anthon van der Neut
+Anthony Shaw
+Anthony Sottile
+Antony Lee
+Armin Rigo
+Aron Coyle
+Aron Curzon
+Aviral Verma
+Aviv Palivoda
+Barney Gale
+Ben Webb
+Benjamin Peterson
+Bernard Pratz
+Bob Ippolito
+Brian Dorsey
+Brian Maissy
+Brian Okken
+Brianna Laugher
+Bruno Oliveira
+Cal Leeming
+Carl Friedrich Bolz
+Carlos Jenkins
+Ceridwen
+Charles Cloud
+Charnjit SiNGH (CCSJ)
+Chris Lamb
+Christian Boelsen
+Christian Theunert
+Christian Tismer
+Christopher Gilling
+Cyrus Maden
+Daniel Grana
+Daniel Hahler
+Daniel Nuri
+Daniel Wandschneider
+Danielle Jenkins
+Dave Hunt
+David Díaz-Barquero
+David Mohr
+David Vierra
+Daw-Ran Liou
+Denis Kirisov
+Diego Russo
+Dmitry Dygalo
+Dmitry Pribysh
+Duncan Betts
+Edison Gustavo Muenz
+Edoardo Batini
+Eduardo Schettino
+Eli Boyarski
+Elizaveta Shashkova
+Endre Galaczi
+Eric Hunsberger
+Eric Siegerman
+Erik M. Bray
+Feng Ma
+Florian Bruhin
+Floris Bruynooghe
+Gabriel Reis
+George Kussumoto
+Georgy Dyuldin
+Graham Horler
+Greg Price
+Grig Gheorghiu
+Grigorii Eremeev (budulianin)
+Guido Wesdorp
+Guoqiang Zhang
+Harald Armin Massa
+Henk-Jaap Wagenaar
+Hugo van Kemenade
+Hui Wang (coldnight)
+Ian Bicking
+Ian Lesperance
+Jaap Broekhuizen
+Jan Balster
+Janne Vanhala
+Jason R. Coombs
+Javier Domingo Cansino
+Javier Romero
+Jeff Rackauckas
+Jeff Widman
+John Eddie Ayson
+John Towler
+Jon Sonesen
+Jonas Obrist
+Jordan Guymon
+Jordan Moldow
+Jordan Speicher
+Joshua Bronson
+Jurko Gospodnetić
+Justyna Janczyszyn
+Kale Kundert
+Katarzyna Jachim
+Katerina Koukiou
+Kevin Cox
+Kodi B. Arfer
+Kostis Anagnostopoulos
+Lawrence Mitchell
+Lee Kamentsky
+Lev Maximov
+Llandy Riveron Del Risco
+Loic Esteve
+Lukas Bednar
+Luke Murphy
+Maciek Fijalkowski
+Maho
+Maik Figura
+Mandeep Bhutani
+Manuel Krebber
+Marc Schlaich
+Marcin Bachry
+Mark Abramowitz
+Markus Unterwaditzer
+Martijn Faassen
+Martin Altmayer
+Martin K. Scherer
+Martin Prusse
+Mathieu Clabaut
+Matt Bachmann
+Matt Duck
+Matt Williams
+Matthias Hafner
+Maxim Filipenko
+mbyt
+Michael Aquilina
+Michael Birtwell
+Michael Droettboom
+Michael Seifert
+Michal Wajszczuk
+Mihai Capotă
+Mike Lundy
+Miro Hrončok
+Nathaniel Waisbrot
+Ned Batchelder
+Neven Mundar
+Nicolas Delaby
+Oleg Pidsadnyi
+Oleg Sushchenko
+Oliver Bestwalter
+Omar Kohl
+Omer Hadari
+Patrick Hayes
+Paweł Adamczak
+Pedro Algarvio
+Pieter Mulder
+Piotr Banaszkiewicz
+Punyashloka Biswal
+Quentin Pradet
+Ralf Schmitt
+Ran Benita
+Raphael Castaneda
+Raphael Pierzina
+Raquel Alegre
+Ravi Chandra
+Roberto Polli
+Romain Dorgueil
+Roman Bolshakov
+Ronny Pfannschmidt
+Ross Lawley
+Russel Winder
+Ryan Wooden
+Samuel Dion-Girardeau
+Samuele Pedroni
+Segev Finer
+Simon Gomizelj
+Skylar Downes
+Srinivas Reddy Thatiparthy
+Stefan Farmbauer
+Stefan Zimmermann
+Stefano Taschini
+Steffen Allner
+Stephan Obermann
+Tarcisio Fischer
+Tareq Alayan
+Ted Xiao
+Thomas Grainger
+Thomas Hisch
+Tim Strazny
+Tom Dalton
+Tom Viner
+Trevor Bekolay
+Tyler Goodlet
+Tzu-ping Chung
+Vasily Kuznetsov
+Victor Uriarte
+Vidar T. Fauske
+Vitaly Lashmanov
+Vlad Dragos
+William Lee
+Wouter van Ackooy
+Xuan Luong
+Xuecong Liao
+Zoltán Máté
+Roland Puntaier
+Allan Feldman
diff --git a/third_party/python/pytest/CHANGELOG.rst b/third_party/python/pytest/CHANGELOG.rst
new file mode 100644
index 0000000000..21a090414a
--- /dev/null
+++ b/third_party/python/pytest/CHANGELOG.rst
@@ -0,0 +1,4883 @@
+..
+ You should *NOT* be adding new change log entries to this file, this
+ file is managed by towncrier. You *may* edit previous change logs to
+ fix problems like typo corrections or such.
+ To add a new change log entry, please see
+ https://pip.pypa.io/en/latest/development/#adding-a-news-entry
+ we named the news folder changelog
+
+.. towncrier release notes start
+
+Pytest 3.6.2 (2018-06-20)
+=========================
+
+Bug Fixes
+---------
+
+- Fix regression in ``Node.add_marker`` by extracting the mark object of a
+ ``MarkDecorator``. (`#3555
+ <https://github.com/pytest-dev/pytest/issues/3555>`_)
+
+- Warnings without ``location`` were reported as ``None``. This is corrected to
+ now report ``<undetermined location>``. (`#3563
+ <https://github.com/pytest-dev/pytest/issues/3563>`_)
+
+- Continue to call finalizers in the stack when a finalizer in a former scope
+ raises an exception. (`#3569
+ <https://github.com/pytest-dev/pytest/issues/3569>`_)
+
+- Fix encoding error with `print` statements in doctests (`#3583
+ <https://github.com/pytest-dev/pytest/issues/3583>`_)
+
+
+Improved Documentation
+----------------------
+
+- Add documentation for the ``--strict`` flag. (`#3549
+ <https://github.com/pytest-dev/pytest/issues/3549>`_)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Update old quotation style to parens in fixture.rst documentation. (`#3525
+ <https://github.com/pytest-dev/pytest/issues/3525>`_)
+
+- Improve display of hint about ``--fulltrace`` with ``KeyboardInterrupt``.
+ (`#3545 <https://github.com/pytest-dev/pytest/issues/3545>`_)
+
+- pytest's testsuite is no longer runnable through ``python setup.py test`` --
+ instead invoke ``pytest`` or ``tox`` directly. (`#3552
+ <https://github.com/pytest-dev/pytest/issues/3552>`_)
+
+- Fix typo in documentation (`#3567
+ <https://github.com/pytest-dev/pytest/issues/3567>`_)
+
+
+Pytest 3.6.1 (2018-06-05)
+=========================
+
+Bug Fixes
+---------
+
+- Fixed a bug where stdout and stderr were logged twice by junitxml when a test
+ was marked xfail. (`#3491
+ <https://github.com/pytest-dev/pytest/issues/3491>`_)
+
+- Fix ``usefixtures`` mark applyed to unittest tests by correctly instantiating
+ ``FixtureInfo``. (`#3498
+ <https://github.com/pytest-dev/pytest/issues/3498>`_)
+
+- Fix assertion rewriter compatibility with libraries that monkey patch
+ ``file`` objects. (`#3503
+ <https://github.com/pytest-dev/pytest/issues/3503>`_)
+
+
+Improved Documentation
+----------------------
+
+- Added a section on how to use fixtures as factories to the fixture
+ documentation. (`#3461 <https://github.com/pytest-dev/pytest/issues/3461>`_)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Enable caching for pip/pre-commit in order to reduce build time on
+ travis/appveyor. (`#3502
+ <https://github.com/pytest-dev/pytest/issues/3502>`_)
+
+- Switch pytest to the src/ layout as we already suggested it for good practice
+ - now we implement it as well. (`#3513
+ <https://github.com/pytest-dev/pytest/issues/3513>`_)
+
+- Fix if in tests to support 3.7.0b5, where a docstring handling in AST got
+ reverted. (`#3530 <https://github.com/pytest-dev/pytest/issues/3530>`_)
+
+- Remove some python2.5 compatibility code. (`#3529
+ <https://github.com/pytest-dev/pytest/issues/3529>`_)
+
+
+Pytest 3.6.0 (2018-05-23)
+=========================
+
+Features
+--------
+
+- Revamp the internals of the ``pytest.mark`` implementation with correct per
+ node handling which fixes a number of long standing bugs caused by the old
+ design. This introduces new ``Node.iter_markers(name)`` and
+ ``Node.get_closest_mark(name)`` APIs. Users are **strongly encouraged** to
+ read the `reasons for the revamp in the docs
+ <https://docs.pytest.org/en/latest/mark.html#marker-revamp-and-iteration>`_,
+ or jump over to details about `updating existing code to use the new APIs
+ <https://docs.pytest.org/en/latest/mark.html#updating-code>`_. (`#3317
+ <https://github.com/pytest-dev/pytest/issues/3317>`_)
+
+- Now when ``@pytest.fixture`` is applied more than once to the same function a
+ ``ValueError`` is raised. This buggy behavior would cause surprising problems
+ and if was working for a test suite it was mostly by accident. (`#2334
+ <https://github.com/pytest-dev/pytest/issues/2334>`_)
+
+- Support for Python 3.7's builtin ``breakpoint()`` method, see `Using the
+ builtin breakpoint function
+ <https://docs.pytest.org/en/latest/usage.html#breakpoint-builtin>`_ for
+ details. (`#3180 <https://github.com/pytest-dev/pytest/issues/3180>`_)
+
+- ``monkeypatch`` now supports a ``context()`` function which acts as a context
+ manager which undoes all patching done within the ``with`` block. (`#3290
+ <https://github.com/pytest-dev/pytest/issues/3290>`_)
+
+- The ``--pdb`` option now causes KeyboardInterrupt to enter the debugger,
+ instead of stopping the test session. On python 2.7, hitting CTRL+C again
+ exits the debugger. On python 3.2 and higher, use CTRL+D. (`#3299
+ <https://github.com/pytest-dev/pytest/issues/3299>`_)
+
+- pytest not longer changes the log level of the root logger when the
+ ``log-level`` parameter has greater numeric value than that of the level of
+ the root logger, which makes it play better with custom logging configuration
+ in user code. (`#3307 <https://github.com/pytest-dev/pytest/issues/3307>`_)
+
+
+Bug Fixes
+---------
+
+- A rare race-condition which might result in corrupted ``.pyc`` files on
+ Windows has been hopefully solved. (`#3008
+ <https://github.com/pytest-dev/pytest/issues/3008>`_)
+
+- Also use iter_marker for discovering the marks applying for marker
+ expressions from the cli to avoid the bad data from the legacy mark storage.
+ (`#3441 <https://github.com/pytest-dev/pytest/issues/3441>`_)
+
+- When showing diffs of failed assertions where the contents contain only
+ whitespace, escape them using ``repr()`` first to make it easy to spot the
+ differences. (`#3443 <https://github.com/pytest-dev/pytest/issues/3443>`_)
+
+
+Improved Documentation
+----------------------
+
+- Change documentation copyright year to a range which auto-updates itself each
+ time it is published. (`#3303
+ <https://github.com/pytest-dev/pytest/issues/3303>`_)
+
+
+Trivial/Internal Changes
+------------------------
+
+- ``pytest`` now depends on the `python-atomicwrites
+ <https://github.com/untitaker/python-atomicwrites>`_ library. (`#3008
+ <https://github.com/pytest-dev/pytest/issues/3008>`_)
+
+- Update all pypi.python.org URLs to pypi.org. (`#3431
+ <https://github.com/pytest-dev/pytest/issues/3431>`_)
+
+- Detect `pytest_` prefixed hooks using the internal plugin manager since
+ ``pluggy`` is deprecating the ``implprefix`` argument to ``PluginManager``.
+ (`#3487 <https://github.com/pytest-dev/pytest/issues/3487>`_)
+
+- Import ``Mapping`` and ``Sequence`` from ``_pytest.compat`` instead of
+ directly from ``collections`` in ``python_api.py::approx``. Add ``Mapping``
+ to ``_pytest.compat``, import it from ``collections`` on python 2, but from
+ ``collections.abc`` on Python 3 to avoid a ``DeprecationWarning`` on Python
+ 3.7 or newer. (`#3497 <https://github.com/pytest-dev/pytest/issues/3497>`_)
+
+
+Pytest 3.5.1 (2018-04-23)
+=========================
+
+
+Bug Fixes
+---------
+
+- Reset ``sys.last_type``, ``sys.last_value`` and ``sys.last_traceback`` before
+ each test executes. Those attributes are added by pytest during the test run
+ to aid debugging, but were never reset so they would create a leaking
+ reference to the last failing test's frame which in turn could never be
+ reclaimed by the garbage collector. (`#2798
+ <https://github.com/pytest-dev/pytest/issues/2798>`_)
+
+- ``pytest.raises`` now raises ``TypeError`` when receiving an unknown keyword
+ argument. (`#3348 <https://github.com/pytest-dev/pytest/issues/3348>`_)
+
+- ``pytest.raises`` now works with exception classes that look like iterables.
+ (`#3372 <https://github.com/pytest-dev/pytest/issues/3372>`_)
+
+
+Improved Documentation
+----------------------
+
+- Fix typo in ``caplog`` fixture documentation, which incorrectly identified
+ certain attributes as methods. (`#3406
+ <https://github.com/pytest-dev/pytest/issues/3406>`_)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Added a more indicative error message when parametrizing a function whose
+ argument takes a default value. (`#3221
+ <https://github.com/pytest-dev/pytest/issues/3221>`_)
+
+- Remove internal ``_pytest.terminal.flatten`` function in favor of
+ ``more_itertools.collapse``. (`#3330
+ <https://github.com/pytest-dev/pytest/issues/3330>`_)
+
+- Import some modules from ``collections.abc`` instead of ``collections`` as
+ the former modules trigger ``DeprecationWarning`` in Python 3.7. (`#3339
+ <https://github.com/pytest-dev/pytest/issues/3339>`_)
+
+- record_property is no longer experimental, removing the warnings was
+ forgotten. (`#3360 <https://github.com/pytest-dev/pytest/issues/3360>`_)
+
+- Mention in documentation and CLI help that fixtures with leading ``_`` are
+ printed by ``pytest --fixtures`` only if the ``-v`` option is added. (`#3398
+ <https://github.com/pytest-dev/pytest/issues/3398>`_)
+
+
+Pytest 3.5.0 (2018-03-21)
+=========================
+
+Deprecations and Removals
+-------------------------
+
+- ``record_xml_property`` fixture is now deprecated in favor of the more
+ generic ``record_property``. (`#2770
+ <https://github.com/pytest-dev/pytest/issues/2770>`_)
+
+- Defining ``pytest_plugins`` is now deprecated in non-top-level conftest.py
+ files, because they "leak" to the entire directory tree. (`#3084
+ <https://github.com/pytest-dev/pytest/issues/3084>`_)
+
+
+Features
+--------
+
+- New ``--show-capture`` command-line option that allows to specify how to
+ display captured output when tests fail: ``no``, ``stdout``, ``stderr``,
+ ``log`` or ``all`` (the default). (`#1478
+ <https://github.com/pytest-dev/pytest/issues/1478>`_)
+
+- New ``--rootdir`` command-line option to override the rules for discovering
+ the root directory. See `customize
+ <https://docs.pytest.org/en/latest/customize.html>`_ in the documentation for
+ details. (`#1642 <https://github.com/pytest-dev/pytest/issues/1642>`_)
+
+- Fixtures are now instantiated based on their scopes, with higher-scoped
+ fixtures (such as ``session``) being instantiated first than lower-scoped
+ fixtures (such as ``function``). The relative order of fixtures of the same
+ scope is kept unchanged, based in their declaration order and their
+ dependencies. (`#2405 <https://github.com/pytest-dev/pytest/issues/2405>`_)
+
+- ``record_xml_property`` renamed to ``record_property`` and is now compatible
+ with xdist, markers and any reporter. ``record_xml_property`` name is now
+ deprecated. (`#2770 <https://github.com/pytest-dev/pytest/issues/2770>`_)
+
+- New ``--nf``, ``--new-first`` options: run new tests first followed by the
+ rest of the tests, in both cases tests are also sorted by the file modified
+ time, with more recent files coming first. (`#3034
+ <https://github.com/pytest-dev/pytest/issues/3034>`_)
+
+- New ``--last-failed-no-failures`` command-line option that allows to specify
+ the behavior of the cache plugin's ```--last-failed`` feature when no tests
+ failed in the last run (or no cache was found): ``none`` or ``all`` (the
+ default). (`#3139 <https://github.com/pytest-dev/pytest/issues/3139>`_)
+
+- New ``--doctest-continue-on-failure`` command-line option to enable doctests
+ to show multiple failures for each snippet, instead of stopping at the first
+ failure. (`#3149 <https://github.com/pytest-dev/pytest/issues/3149>`_)
+
+- Captured log messages are added to the ``<system-out>`` tag in the generated
+ junit xml file if the ``junit_logging`` ini option is set to ``system-out``.
+ If the value of this ini option is ``system-err``, the logs are written to
+ ``<system-err>``. The default value for ``junit_logging`` is ``no``, meaning
+ captured logs are not written to the output file. (`#3156
+ <https://github.com/pytest-dev/pytest/issues/3156>`_)
+
+- Allow the logging plugin to handle ``pytest_runtest_logstart`` and
+ ``pytest_runtest_logfinish`` hooks when live logs are enabled. (`#3189
+ <https://github.com/pytest-dev/pytest/issues/3189>`_)
+
+- Passing `--log-cli-level` in the command-line now automatically activates
+ live logging. (`#3190 <https://github.com/pytest-dev/pytest/issues/3190>`_)
+
+- Add command line option ``--deselect`` to allow deselection of individual
+ tests at collection time. (`#3198
+ <https://github.com/pytest-dev/pytest/issues/3198>`_)
+
+- Captured logs are printed before entering pdb. (`#3204
+ <https://github.com/pytest-dev/pytest/issues/3204>`_)
+
+- Deselected item count is now shown before tests are run, e.g. ``collected X
+ items / Y deselected``. (`#3213
+ <https://github.com/pytest-dev/pytest/issues/3213>`_)
+
+- The builtin module ``platform`` is now available for use in expressions in
+ ``pytest.mark``. (`#3236
+ <https://github.com/pytest-dev/pytest/issues/3236>`_)
+
+- The *short test summary info* section now is displayed after tracebacks and
+ warnings in the terminal. (`#3255
+ <https://github.com/pytest-dev/pytest/issues/3255>`_)
+
+- New ``--verbosity`` flag to set verbosity level explicitly. (`#3296
+ <https://github.com/pytest-dev/pytest/issues/3296>`_)
+
+- ``pytest.approx`` now accepts comparing a numpy array with a scalar. (`#3312
+ <https://github.com/pytest-dev/pytest/issues/3312>`_)
+
+
+Bug Fixes
+---------
+
+- Suppress ``IOError`` when closing the temporary file used for capturing
+ streams in Python 2.7. (`#2370
+ <https://github.com/pytest-dev/pytest/issues/2370>`_)
+
+- Fixed ``clear()`` method on ``caplog`` fixture which cleared ``records``, but
+ not the ``text`` property. (`#3297
+ <https://github.com/pytest-dev/pytest/issues/3297>`_)
+
+- During test collection, when stdin is not allowed to be read, the
+ ``DontReadFromStdin`` object still allow itself to be iterable and resolved
+ to an iterator without crashing. (`#3314
+ <https://github.com/pytest-dev/pytest/issues/3314>`_)
+
+
+Improved Documentation
+----------------------
+
+- Added a `reference <https://docs.pytest.org/en/latest/reference.html>`_ page
+ to the docs. (`#1713 <https://github.com/pytest-dev/pytest/issues/1713>`_)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Change minimum requirement of ``attrs`` to ``17.4.0``. (`#3228
+ <https://github.com/pytest-dev/pytest/issues/3228>`_)
+
+- Renamed example directories so all tests pass when ran from the base
+ directory. (`#3245 <https://github.com/pytest-dev/pytest/issues/3245>`_)
+
+- Internal ``mark.py`` module has been turned into a package. (`#3250
+ <https://github.com/pytest-dev/pytest/issues/3250>`_)
+
+- ``pytest`` now depends on the `more-itertools
+ <https://github.com/erikrose/more-itertools>`_ package. (`#3265
+ <https://github.com/pytest-dev/pytest/issues/3265>`_)
+
+- Added warning when ``[pytest]`` section is used in a ``.cfg`` file passed
+ with ``-c`` (`#3268 <https://github.com/pytest-dev/pytest/issues/3268>`_)
+
+- ``nodeids`` can now be passed explicitly to ``FSCollector`` and ``Node``
+ constructors. (`#3291 <https://github.com/pytest-dev/pytest/issues/3291>`_)
+
+- Internal refactoring of ``FormattedExcinfo`` to use ``attrs`` facilities and
+ remove old support code for legacy Python versions. (`#3292
+ <https://github.com/pytest-dev/pytest/issues/3292>`_)
+
+- Refactoring to unify how verbosity is handled internally. (`#3296
+ <https://github.com/pytest-dev/pytest/issues/3296>`_)
+
+- Internal refactoring to better integrate with argparse. (`#3304
+ <https://github.com/pytest-dev/pytest/issues/3304>`_)
+
+- Fix a python example when calling a fixture in doc/en/usage.rst (`#3308
+ <https://github.com/pytest-dev/pytest/issues/3308>`_)
+
+
+Pytest 3.4.2 (2018-03-04)
+=========================
+
+Bug Fixes
+---------
+
+- Removed progress information when capture option is ``no``. (`#3203
+ <https://github.com/pytest-dev/pytest/issues/3203>`_)
+
+- Refactor check of bindir from ``exists`` to ``isdir``. (`#3241
+ <https://github.com/pytest-dev/pytest/issues/3241>`_)
+
+- Fix ``TypeError`` issue when using ``approx`` with a ``Decimal`` value.
+ (`#3247 <https://github.com/pytest-dev/pytest/issues/3247>`_)
+
+- Fix reference cycle generated when using the ``request`` fixture. (`#3249
+ <https://github.com/pytest-dev/pytest/issues/3249>`_)
+
+- ``[tool:pytest]`` sections in ``*.cfg`` files passed by the ``-c`` option are
+ now properly recognized. (`#3260
+ <https://github.com/pytest-dev/pytest/issues/3260>`_)
+
+
+Improved Documentation
+----------------------
+
+- Add logging plugin to plugins list. (`#3209
+ <https://github.com/pytest-dev/pytest/issues/3209>`_)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Fix minor typo in fixture.rst (`#3259
+ <https://github.com/pytest-dev/pytest/issues/3259>`_)
+
+
+Pytest 3.4.1 (2018-02-20)
+=========================
+
+Bug Fixes
+---------
+
+- Move import of ``doctest.UnexpectedException`` to top-level to avoid possible
+ errors when using ``--pdb``. (`#1810
+ <https://github.com/pytest-dev/pytest/issues/1810>`_)
+
+- Added printing of captured stdout/stderr before entering pdb, and improved a
+ test which was giving false negatives about output capturing. (`#3052
+ <https://github.com/pytest-dev/pytest/issues/3052>`_)
+
+- Fix ordering of tests using parametrized fixtures which can lead to fixtures
+ being created more than necessary. (`#3161
+ <https://github.com/pytest-dev/pytest/issues/3161>`_)
+
+- Fix bug where logging happening at hooks outside of "test run" hooks would
+ cause an internal error. (`#3184
+ <https://github.com/pytest-dev/pytest/issues/3184>`_)
+
+- Detect arguments injected by ``unittest.mock.patch`` decorator correctly when
+ pypi ``mock.patch`` is installed and imported. (`#3206
+ <https://github.com/pytest-dev/pytest/issues/3206>`_)
+
+- Errors shown when a ``pytest.raises()`` with ``match=`` fails are now cleaner
+ on what happened: When no exception was raised, the "matching '...'" part got
+ removed as it falsely implies that an exception was raised but it didn't
+ match. When a wrong exception was raised, it's now thrown (like
+ ``pytest.raised()`` without ``match=`` would) instead of complaining about
+ the unmatched text. (`#3222
+ <https://github.com/pytest-dev/pytest/issues/3222>`_)
+
+- Fixed output capture handling in doctests on macOS. (`#985
+ <https://github.com/pytest-dev/pytest/issues/985>`_)
+
+
+Improved Documentation
+----------------------
+
+- Add Sphinx parameter docs for ``match`` and ``message`` args to
+ ``pytest.raises``. (`#3202
+ <https://github.com/pytest-dev/pytest/issues/3202>`_)
+
+
+Trivial/Internal Changes
+------------------------
+
+- pytest has changed the publication procedure and is now being published to
+ PyPI directly from Travis. (`#3060
+ <https://github.com/pytest-dev/pytest/issues/3060>`_)
+
+- Rename ``ParameterSet._for_parameterize()`` to ``_for_parametrize()`` in
+ order to comply with the naming convention. (`#3166
+ <https://github.com/pytest-dev/pytest/issues/3166>`_)
+
+- Skip failing pdb/doctest test on mac. (`#985
+ <https://github.com/pytest-dev/pytest/issues/985>`_)
+
+
+Pytest 3.4.0 (2018-01-30)
+=========================
+
+Deprecations and Removals
+-------------------------
+
+- All pytest classes now subclass ``object`` for better Python 2/3 compatibility.
+ This should not affect user code except in very rare edge cases. (`#2147
+ <https://github.com/pytest-dev/pytest/issues/2147>`_)
+
+
+Features
+--------
+
+- Introduce ``empty_parameter_set_mark`` ini option to select which mark to
+ apply when ``@pytest.mark.parametrize`` is given an empty set of parameters.
+ Valid options are ``skip`` (default) and ``xfail``. Note that it is planned
+ to change the default to ``xfail`` in future releases as this is considered
+ less error prone. (`#2527
+ <https://github.com/pytest-dev/pytest/issues/2527>`_)
+
+- **Incompatible change**: after community feedback the `logging
+ <https://docs.pytest.org/en/latest/logging.html>`_ functionality has
+ undergone some changes. Please consult the `logging documentation
+ <https://docs.pytest.org/en/latest/logging.html#incompatible-changes-in-pytest-3-4>`_
+ for details. (`#3013 <https://github.com/pytest-dev/pytest/issues/3013>`_)
+
+- Console output falls back to "classic" mode when capturing is disabled (``-s``),
+ otherwise the output gets garbled to the point of being useless. (`#3038
+ <https://github.com/pytest-dev/pytest/issues/3038>`_)
+
+- New `pytest_runtest_logfinish
+ <https://docs.pytest.org/en/latest/writing_plugins.html#_pytest.hookspec.pytest_runtest_logfinish>`_
+ hook which is called when a test item has finished executing, analogous to
+ `pytest_runtest_logstart
+ <https://docs.pytest.org/en/latest/writing_plugins.html#_pytest.hookspec.pytest_runtest_start>`_.
+ (`#3101 <https://github.com/pytest-dev/pytest/issues/3101>`_)
+
+- Improve performance when collecting tests using many fixtures. (`#3107
+ <https://github.com/pytest-dev/pytest/issues/3107>`_)
+
+- New ``caplog.get_records(when)`` method which provides access to the captured
+ records for the ``"setup"``, ``"call"`` and ``"teardown"``
+ testing stages. (`#3117 <https://github.com/pytest-dev/pytest/issues/3117>`_)
+
+- New fixture ``record_xml_attribute`` that allows modifying and inserting
+ attributes on the ``<testcase>`` xml node in JUnit reports. (`#3130
+ <https://github.com/pytest-dev/pytest/issues/3130>`_)
+
+- The default cache directory has been renamed from ``.cache`` to
+ ``.pytest_cache`` after community feedback that the name ``.cache`` did not
+ make it clear that it was used by pytest. (`#3138
+ <https://github.com/pytest-dev/pytest/issues/3138>`_)
+
+- Colorize the levelname column in the live-log output. (`#3142
+ <https://github.com/pytest-dev/pytest/issues/3142>`_)
+
+
+Bug Fixes
+---------
+
+- Fix hanging pexpect test on MacOS by using flush() instead of wait().
+ (`#2022 <https://github.com/pytest-dev/pytest/issues/2022>`_)
+
+- Fix restoring Python state after in-process pytest runs with the
+ ``pytester`` plugin; this may break tests using multiple inprocess
+ pytest runs if later ones depend on earlier ones leaking global interpreter
+ changes. (`#3016 <https://github.com/pytest-dev/pytest/issues/3016>`_)
+
+- Fix skipping plugin reporting hook when test aborted before plugin setup
+ hook. (`#3074 <https://github.com/pytest-dev/pytest/issues/3074>`_)
+
+- Fix progress percentage reported when tests fail during teardown. (`#3088
+ <https://github.com/pytest-dev/pytest/issues/3088>`_)
+
+- **Incompatible change**: ``-o/--override`` option no longer eats all the
+ remaining options, which can lead to surprising behavior: for example,
+ ``pytest -o foo=1 /path/to/test.py`` would fail because ``/path/to/test.py``
+ would be considered as part of the ``-o`` command-line argument. One
+ consequence of this is that now multiple configuration overrides need
+ multiple ``-o`` flags: ``pytest -o foo=1 -o bar=2``. (`#3103
+ <https://github.com/pytest-dev/pytest/issues/3103>`_)
+
+
+Improved Documentation
+----------------------
+
+- Document hooks (defined with ``historic=True``) which cannot be used with
+ ``hookwrapper=True``. (`#2423
+ <https://github.com/pytest-dev/pytest/issues/2423>`_)
+
+- Clarify that warning capturing doesn't change the warning filter by default.
+ (`#2457 <https://github.com/pytest-dev/pytest/issues/2457>`_)
+
+- Clarify a possible confusion when using pytest_fixture_setup with fixture
+ functions that return None. (`#2698
+ <https://github.com/pytest-dev/pytest/issues/2698>`_)
+
+- Fix the wording of a sentence on doctest flags used in pytest. (`#3076
+ <https://github.com/pytest-dev/pytest/issues/3076>`_)
+
+- Prefer ``https://*.readthedocs.io`` over ``http://*.rtfd.org`` for links in
+ the documentation. (`#3092
+ <https://github.com/pytest-dev/pytest/issues/3092>`_)
+
+- Improve readability (wording, grammar) of Getting Started guide (`#3131
+ <https://github.com/pytest-dev/pytest/issues/3131>`_)
+
+- Added note that calling pytest.main multiple times from the same process is
+ not recommended because of import caching. (`#3143
+ <https://github.com/pytest-dev/pytest/issues/3143>`_)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Show a simple and easy error when keyword expressions trigger a syntax error
+ (for example, ``"-k foo and import"`` will show an error that you can not use
+ the ``import`` keyword in expressions). (`#2953
+ <https://github.com/pytest-dev/pytest/issues/2953>`_)
+
+- Change parametrized automatic test id generation to use the ``__name__``
+ attribute of functions instead of the fallback argument name plus counter.
+ (`#2976 <https://github.com/pytest-dev/pytest/issues/2976>`_)
+
+- Replace py.std with stdlib imports. (`#3067
+ <https://github.com/pytest-dev/pytest/issues/3067>`_)
+
+- Corrected 'you' to 'your' in logging docs. (`#3129
+ <https://github.com/pytest-dev/pytest/issues/3129>`_)
+
+
+Pytest 3.3.2 (2017-12-25)
+=========================
+
+Bug Fixes
+---------
+
+- pytester: ignore files used to obtain current user metadata in the fd leak
+ detector. (`#2784 <https://github.com/pytest-dev/pytest/issues/2784>`_)
+
+- Fix **memory leak** where objects returned by fixtures were never destructed
+ by the garbage collector. (`#2981
+ <https://github.com/pytest-dev/pytest/issues/2981>`_)
+
+- Fix conversion of pyargs to filename to not convert symlinks on Python 2. (`#2985
+ <https://github.com/pytest-dev/pytest/issues/2985>`_)
+
+- ``PYTEST_DONT_REWRITE`` is now checked for plugins too rather than only for
+ test modules. (`#2995 <https://github.com/pytest-dev/pytest/issues/2995>`_)
+
+
+Improved Documentation
+----------------------
+
+- Add clarifying note about behavior of multiple parametrized arguments (`#3001
+ <https://github.com/pytest-dev/pytest/issues/3001>`_)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Code cleanup. (`#3015 <https://github.com/pytest-dev/pytest/issues/3015>`_,
+ `#3021 <https://github.com/pytest-dev/pytest/issues/3021>`_)
+
+- Clean up code by replacing imports and references of `_ast` to `ast`. (`#3018
+ <https://github.com/pytest-dev/pytest/issues/3018>`_)
+
+
+Pytest 3.3.1 (2017-12-05)
+=========================
+
+Bug Fixes
+---------
+
+- Fix issue about ``-p no:<plugin>`` having no effect. (`#2920
+ <https://github.com/pytest-dev/pytest/issues/2920>`_)
+
+- Fix regression with warnings that contained non-strings in their arguments in
+ Python 2. (`#2956 <https://github.com/pytest-dev/pytest/issues/2956>`_)
+
+- Always escape null bytes when setting ``PYTEST_CURRENT_TEST``. (`#2957
+ <https://github.com/pytest-dev/pytest/issues/2957>`_)
+
+- Fix ``ZeroDivisionError`` when using the ``testmon`` plugin when no tests
+ were actually collected. (`#2971
+ <https://github.com/pytest-dev/pytest/issues/2971>`_)
+
+- Bring back ``TerminalReporter.writer`` as an alias to
+ ``TerminalReporter._tw``. This alias was removed by accident in the ``3.3.0``
+ release. (`#2984 <https://github.com/pytest-dev/pytest/issues/2984>`_)
+
+- The ``pytest-capturelog`` plugin is now also blacklisted, avoiding errors when
+ running pytest with it still installed. (`#3004
+ <https://github.com/pytest-dev/pytest/issues/3004>`_)
+
+
+Improved Documentation
+----------------------
+
+- Fix broken link to plugin ``pytest-localserver``. (`#2963
+ <https://github.com/pytest-dev/pytest/issues/2963>`_)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Update github "bugs" link in ``CONTRIBUTING.rst`` (`#2949
+ <https://github.com/pytest-dev/pytest/issues/2949>`_)
+
+
+Pytest 3.3.0 (2017-11-23)
+=========================
+
+Deprecations and Removals
+-------------------------
+
+- Pytest no longer supports Python **2.6** and **3.3**. Those Python versions
+ are EOL for some time now and incur maintenance and compatibility costs on
+ the pytest core team, and following up with the rest of the community we
+ decided that they will no longer be supported starting on this version. Users
+ which still require those versions should pin pytest to ``<3.3``. (`#2812
+ <https://github.com/pytest-dev/pytest/issues/2812>`_)
+
+- Remove internal ``_preloadplugins()`` function. This removal is part of the
+ ``pytest_namespace()`` hook deprecation. (`#2636
+ <https://github.com/pytest-dev/pytest/issues/2636>`_)
+
+- Internally change ``CallSpec2`` to have a list of marks instead of a broken
+ mapping of keywords. This removes the keywords attribute of the internal
+ ``CallSpec2`` class. (`#2672
+ <https://github.com/pytest-dev/pytest/issues/2672>`_)
+
+- Remove ParameterSet.deprecated_arg_dict - its not a public api and the lack
+ of the underscore was a naming error. (`#2675
+ <https://github.com/pytest-dev/pytest/issues/2675>`_)
+
+- Remove the internal multi-typed attribute ``Node._evalskip`` and replace it
+ with the boolean ``Node._skipped_by_mark``. (`#2767
+ <https://github.com/pytest-dev/pytest/issues/2767>`_)
+
+- The ``params`` list passed to ``pytest.fixture`` is now for
+ all effects considered immutable and frozen at the moment of the ``pytest.fixture``
+ call. Previously the list could be changed before the first invocation of the fixture
+ allowing for a form of dynamic parametrization (for example, updated from command-line options),
+ but this was an unwanted implementation detail which complicated the internals and prevented
+ some internal cleanup. See issue `#2959 <https://github.com/pytest-dev/pytest/issues/2959>`_
+ for details and a recommended workaround.
+
+Features
+--------
+
+- ``pytest_fixture_post_finalizer`` hook can now receive a ``request``
+ argument. (`#2124 <https://github.com/pytest-dev/pytest/issues/2124>`_)
+
+- Replace the old introspection code in compat.py that determines the available
+ arguments of fixtures with inspect.signature on Python 3 and
+ funcsigs.signature on Python 2. This should respect ``__signature__``
+ declarations on functions. (`#2267
+ <https://github.com/pytest-dev/pytest/issues/2267>`_)
+
+- Report tests with global ``pytestmark`` variable only once. (`#2549
+ <https://github.com/pytest-dev/pytest/issues/2549>`_)
+
+- Now pytest displays the total progress percentage while running tests. The
+ previous output style can be set by configuring the ``console_output_style``
+ setting to ``classic``. (`#2657 <https://github.com/pytest-dev/pytest/issues/2657>`_)
+
+- Match ``warns`` signature to ``raises`` by adding ``match`` keyword. (`#2708
+ <https://github.com/pytest-dev/pytest/issues/2708>`_)
+
+- Pytest now captures and displays output from the standard ``logging`` module.
+ The user can control the logging level to be captured by specifying options
+ in ``pytest.ini``, the command line and also during individual tests using
+ markers. Also, a ``caplog`` fixture is available that enables users to test
+ the captured log during specific tests (similar to ``capsys`` for example).
+ For more information, please see the `logging docs
+ <https://docs.pytest.org/en/latest/logging.html>`_. This feature was
+ introduced by merging the popular `pytest-catchlog
+ <https://pypi.org/project/pytest-catchlog/>`_ plugin, thanks to `Thomas Hisch
+ <https://github.com/thisch>`_. Be advised that during the merging the
+ backward compatibility interface with the defunct ``pytest-capturelog`` has
+ been dropped. (`#2794 <https://github.com/pytest-dev/pytest/issues/2794>`_)
+
+- Add ``allow_module_level`` kwarg to ``pytest.skip()``, enabling to skip the
+ whole module. (`#2808 <https://github.com/pytest-dev/pytest/issues/2808>`_)
+
+- Allow setting ``file_or_dir``, ``-c``, and ``-o`` in PYTEST_ADDOPTS. (`#2824
+ <https://github.com/pytest-dev/pytest/issues/2824>`_)
+
+- Return stdout/stderr capture results as a ``namedtuple``, so ``out`` and
+ ``err`` can be accessed by attribute. (`#2879
+ <https://github.com/pytest-dev/pytest/issues/2879>`_)
+
+- Add ``capfdbinary``, a version of ``capfd`` which returns bytes from
+ ``readouterr()``. (`#2923
+ <https://github.com/pytest-dev/pytest/issues/2923>`_)
+
+- Add ``capsysbinary`` a version of ``capsys`` which returns bytes from
+ ``readouterr()``. (`#2934
+ <https://github.com/pytest-dev/pytest/issues/2934>`_)
+
+- Implement feature to skip ``setup.py`` files when run with
+ ``--doctest-modules``. (`#502
+ <https://github.com/pytest-dev/pytest/issues/502>`_)
+
+
+Bug Fixes
+---------
+
+- Resume output capturing after ``capsys/capfd.disabled()`` context manager.
+ (`#1993 <https://github.com/pytest-dev/pytest/issues/1993>`_)
+
+- ``pytest_fixture_setup`` and ``pytest_fixture_post_finalizer`` hooks are now
+ called for all ``conftest.py`` files. (`#2124
+ <https://github.com/pytest-dev/pytest/issues/2124>`_)
+
+- If an exception happens while loading a plugin, pytest no longer hides the
+ original traceback. In Python 2 it will show the original traceback with a new
+ message that explains in which plugin. In Python 3 it will show 2 canonized
+ exceptions, the original exception while loading the plugin in addition to an
+ exception that pytest throws about loading a plugin. (`#2491
+ <https://github.com/pytest-dev/pytest/issues/2491>`_)
+
+- ``capsys`` and ``capfd`` can now be used by other fixtures. (`#2709
+ <https://github.com/pytest-dev/pytest/issues/2709>`_)
+
+- Internal ``pytester`` plugin properly encodes ``bytes`` arguments to
+ ``utf-8``. (`#2738 <https://github.com/pytest-dev/pytest/issues/2738>`_)
+
+- ``testdir`` now uses use the same method used by ``tmpdir`` to create its
+ temporary directory. This changes the final structure of the ``testdir``
+ directory slightly, but should not affect usage in normal scenarios and
+ avoids a number of potential problems. (`#2751
+ <https://github.com/pytest-dev/pytest/issues/2751>`_)
+
+- Pytest no longer complains about warnings with unicode messages being
+ non-ascii compatible even for ascii-compatible messages. As a result of this,
+ warnings with unicode messages are converted first to an ascii representation
+ for safety. (`#2809 <https://github.com/pytest-dev/pytest/issues/2809>`_)
+
+- Change return value of pytest command when ``--maxfail`` is reached from
+ ``2`` (interrupted) to ``1`` (failed). (`#2845
+ <https://github.com/pytest-dev/pytest/issues/2845>`_)
+
+- Fix issue in assertion rewriting which could lead it to rewrite modules which
+ should not be rewritten. (`#2939
+ <https://github.com/pytest-dev/pytest/issues/2939>`_)
+
+- Handle marks without description in ``pytest.ini``. (`#2942
+ <https://github.com/pytest-dev/pytest/issues/2942>`_)
+
+
+Trivial/Internal Changes
+------------------------
+
+- pytest now depends on `attrs <https://pypi.org/project/attrs/>`_ for internal
+ structures to ease code maintainability. (`#2641
+ <https://github.com/pytest-dev/pytest/issues/2641>`_)
+
+- Refactored internal Python 2/3 compatibility code to use ``six``. (`#2642
+ <https://github.com/pytest-dev/pytest/issues/2642>`_)
+
+- Stop vendoring ``pluggy`` - we're missing out on its latest changes for not
+ much benefit (`#2719 <https://github.com/pytest-dev/pytest/issues/2719>`_)
+
+- Internal refactor: simplify ascii string escaping by using the
+ backslashreplace error handler in newer Python 3 versions. (`#2734
+ <https://github.com/pytest-dev/pytest/issues/2734>`_)
+
+- Remove unnecessary mark evaluator in unittest plugin (`#2767
+ <https://github.com/pytest-dev/pytest/issues/2767>`_)
+
+- Calls to ``Metafunc.addcall`` now emit a deprecation warning. This function
+ is scheduled to be removed in ``pytest-4.0``. (`#2876
+ <https://github.com/pytest-dev/pytest/issues/2876>`_)
+
+- Internal move of the parameterset extraction to a more maintainable place.
+ (`#2877 <https://github.com/pytest-dev/pytest/issues/2877>`_)
+
+- Internal refactoring to simplify scope node lookup. (`#2910
+ <https://github.com/pytest-dev/pytest/issues/2910>`_)
+
+- Configure ``pytest`` to prevent pip from installing pytest in unsupported
+ Python versions. (`#2922
+ <https://github.com/pytest-dev/pytest/issues/2922>`_)
+
+
+Pytest 3.2.5 (2017-11-15)
+=========================
+
+Bug Fixes
+---------
+
+- Remove ``py<1.5`` restriction from ``pytest`` as this can cause version
+ conflicts in some installations. (`#2926
+ <https://github.com/pytest-dev/pytest/issues/2926>`_)
+
+
+Pytest 3.2.4 (2017-11-13)
+=========================
+
+Bug Fixes
+---------
+
+- Fix the bug where running with ``--pyargs`` will result in items with
+ empty ``parent.nodeid`` if run from a different root directory. (`#2775
+ <https://github.com/pytest-dev/pytest/issues/2775>`_)
+
+- Fix issue with ``@pytest.parametrize`` if argnames was specified as keyword arguments.
+ (`#2819 <https://github.com/pytest-dev/pytest/issues/2819>`_)
+
+- Strip whitespace from marker names when reading them from INI config. (`#2856
+ <https://github.com/pytest-dev/pytest/issues/2856>`_)
+
+- Show full context of doctest source in the pytest output, if the line number of
+ failed example in the docstring is < 9. (`#2882
+ <https://github.com/pytest-dev/pytest/issues/2882>`_)
+
+- Match fixture paths against actual path segments in order to avoid matching folders which share a prefix.
+ (`#2836 <https://github.com/pytest-dev/pytest/issues/2836>`_)
+
+Improved Documentation
+----------------------
+
+- Introduce a dedicated section about conftest.py. (`#1505
+ <https://github.com/pytest-dev/pytest/issues/1505>`_)
+
+- Explicitly mention ``xpass`` in the documentation of ``xfail``. (`#1997
+ <https://github.com/pytest-dev/pytest/issues/1997>`_)
+
+- Append example for pytest.param in the example/parametrize document. (`#2658
+ <https://github.com/pytest-dev/pytest/issues/2658>`_)
+
+- Clarify language of proposal for fixtures parameters (`#2893
+ <https://github.com/pytest-dev/pytest/issues/2893>`_)
+
+- List python 3.6 in the documented supported versions in the getting started
+ document. (`#2903 <https://github.com/pytest-dev/pytest/issues/2903>`_)
+
+- Clarify the documentation of available fixture scopes. (`#538
+ <https://github.com/pytest-dev/pytest/issues/538>`_)
+
+- Add documentation about the ``python -m pytest`` invocation adding the
+ current directory to sys.path. (`#911
+ <https://github.com/pytest-dev/pytest/issues/911>`_)
+
+
+Pytest 3.2.3 (2017-10-03)
+=========================
+
+Bug Fixes
+---------
+
+- Fix crash in tab completion when no prefix is given. (`#2748
+ <https://github.com/pytest-dev/pytest/issues/2748>`_)
+
+- The equality checking function (``__eq__``) of ``MarkDecorator`` returns
+ ``False`` if one object is not an instance of ``MarkDecorator``. (`#2758
+ <https://github.com/pytest-dev/pytest/issues/2758>`_)
+
+- When running ``pytest --fixtures-per-test``: don't crash if an item has no
+ _fixtureinfo attribute (e.g. doctests) (`#2788
+ <https://github.com/pytest-dev/pytest/issues/2788>`_)
+
+
+Improved Documentation
+----------------------
+
+- In help text of ``-k`` option, add example of using ``not`` to not select
+ certain tests whose names match the provided expression. (`#1442
+ <https://github.com/pytest-dev/pytest/issues/1442>`_)
+
+- Add note in ``parametrize.rst`` about calling ``metafunc.parametrize``
+ multiple times. (`#1548 <https://github.com/pytest-dev/pytest/issues/1548>`_)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Set ``xfail_strict=True`` in pytest's own test suite to catch expected
+ failures as soon as they start to pass. (`#2722
+ <https://github.com/pytest-dev/pytest/issues/2722>`_)
+
+- Fix typo in example of passing a callable to markers (in example/markers.rst)
+ (`#2765 <https://github.com/pytest-dev/pytest/issues/2765>`_)
+
+
+Pytest 3.2.2 (2017-09-06)
+=========================
+
+Bug Fixes
+---------
+
+- Calling the deprecated `request.getfuncargvalue()` now shows the source of
+ the call. (`#2681 <https://github.com/pytest-dev/pytest/issues/2681>`_)
+
+- Allow tests declared as ``@staticmethod`` to use fixtures. (`#2699
+ <https://github.com/pytest-dev/pytest/issues/2699>`_)
+
+- Fixed edge-case during collection: attributes which raised ``pytest.fail``
+ when accessed would abort the entire collection. (`#2707
+ <https://github.com/pytest-dev/pytest/issues/2707>`_)
+
+- Fix ``ReprFuncArgs`` with mixed unicode and UTF-8 args. (`#2731
+ <https://github.com/pytest-dev/pytest/issues/2731>`_)
+
+
+Improved Documentation
+----------------------
+
+- In examples on working with custom markers, add examples demonstrating the
+ usage of ``pytest.mark.MARKER_NAME.with_args`` in comparison with
+ ``pytest.mark.MARKER_NAME.__call__`` (`#2604
+ <https://github.com/pytest-dev/pytest/issues/2604>`_)
+
+- In one of the simple examples, use `pytest_collection_modifyitems()` to skip
+ tests based on a command-line option, allowing its sharing while preventing a
+ user error when acessing `pytest.config` before the argument parsing. (`#2653
+ <https://github.com/pytest-dev/pytest/issues/2653>`_)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Fixed minor error in 'Good Practices/Manual Integration' code snippet.
+ (`#2691 <https://github.com/pytest-dev/pytest/issues/2691>`_)
+
+- Fixed typo in goodpractices.rst. (`#2721
+ <https://github.com/pytest-dev/pytest/issues/2721>`_)
+
+- Improve user guidance regarding ``--resultlog`` deprecation. (`#2739
+ <https://github.com/pytest-dev/pytest/issues/2739>`_)
+
+
+Pytest 3.2.1 (2017-08-08)
+=========================
+
+Bug Fixes
+---------
+
+- Fixed small terminal glitch when collecting a single test item. (`#2579
+ <https://github.com/pytest-dev/pytest/issues/2579>`_)
+
+- Correctly consider ``/`` as the file separator to automatically mark plugin
+ files for rewrite on Windows. (`#2591 <https://github.com/pytest-
+ dev/pytest/issues/2591>`_)
+
+- Properly escape test names when setting ``PYTEST_CURRENT_TEST`` environment
+ variable. (`#2644 <https://github.com/pytest-dev/pytest/issues/2644>`_)
+
+- Fix error on Windows and Python 3.6+ when ``sys.stdout`` has been replaced
+ with a stream-like object which does not implement the full ``io`` module
+ buffer protocol. In particular this affects ``pytest-xdist`` users on the
+ aforementioned platform. (`#2666 <https://github.com/pytest-
+ dev/pytest/issues/2666>`_)
+
+
+Improved Documentation
+----------------------
+
+- Explicitly document which pytest features work with ``unittest``. (`#2626
+ <https://github.com/pytest-dev/pytest/issues/2626>`_)
+
+
+Pytest 3.2.0 (2017-07-30)
+=========================
+
+Deprecations and Removals
+-------------------------
+
+- ``pytest.approx`` no longer supports ``>``, ``>=``, ``<`` and ``<=``
+ operators to avoid surprising/inconsistent behavior. See `the approx docs
+ <https://docs.pytest.org/en/latest/builtin.html#pytest.approx>`_ for more
+ information. (`#2003 <https://github.com/pytest-dev/pytest/issues/2003>`_)
+
+- All old-style specific behavior in current classes in the pytest's API is
+ considered deprecated at this point and will be removed in a future release.
+ This affects Python 2 users only and in rare situations. (`#2147
+ <https://github.com/pytest-dev/pytest/issues/2147>`_)
+
+- A deprecation warning is now raised when using marks for parameters
+ in ``pytest.mark.parametrize``. Use ``pytest.param`` to apply marks to
+ parameters instead. (`#2427 <https://github.com/pytest-dev/pytest/issues/2427>`_)
+
+
+Features
+--------
+
+- Add support for numpy arrays (and dicts) to approx. (`#1994
+ <https://github.com/pytest-dev/pytest/issues/1994>`_)
+
+- Now test function objects have a ``pytestmark`` attribute containing a list
+ of marks applied directly to the test function, as opposed to marks inherited
+ from parent classes or modules. (`#2516 <https://github.com/pytest-
+ dev/pytest/issues/2516>`_)
+
+- Collection ignores local virtualenvs by default; `--collect-in-virtualenv`
+ overrides this behavior. (`#2518 <https://github.com/pytest-
+ dev/pytest/issues/2518>`_)
+
+- Allow class methods decorated as ``@staticmethod`` to be candidates for
+ collection as a test function. (Only for Python 2.7 and above. Python 2.6
+ will still ignore static methods.) (`#2528 <https://github.com/pytest-
+ dev/pytest/issues/2528>`_)
+
+- Introduce ``mark.with_args`` in order to allow passing functions/classes as
+ sole argument to marks. (`#2540 <https://github.com/pytest-
+ dev/pytest/issues/2540>`_)
+
+- New ``cache_dir`` ini option: sets the directory where the contents of the
+ cache plugin are stored. Directory may be relative or absolute path: if relative path, then
+ directory is created relative to ``rootdir``, otherwise it is used as is.
+ Additionally path may contain environment variables which are expanded during
+ runtime. (`#2543 <https://github.com/pytest-dev/pytest/issues/2543>`_)
+
+- Introduce the ``PYTEST_CURRENT_TEST`` environment variable that is set with
+ the ``nodeid`` and stage (``setup``, ``call`` and ``teardown``) of the test
+ being currently executed. See the `documentation
+ <https://docs.pytest.org/en/latest/example/simple.html#pytest-current-test-
+ environment-variable>`_ for more info. (`#2583 <https://github.com/pytest-
+ dev/pytest/issues/2583>`_)
+
+- Introduced ``@pytest.mark.filterwarnings`` mark which allows overwriting the
+ warnings filter on a per test, class or module level. See the `docs
+ <https://docs.pytest.org/en/latest/warnings.html#pytest-mark-
+ filterwarnings>`_ for more information. (`#2598 <https://github.com/pytest-
+ dev/pytest/issues/2598>`_)
+
+- ``--last-failed`` now remembers forever when a test has failed and only
+ forgets it if it passes again. This makes it easy to fix a test suite by
+ selectively running files and fixing tests incrementally. (`#2621
+ <https://github.com/pytest-dev/pytest/issues/2621>`_)
+
+- New ``pytest_report_collectionfinish`` hook which allows plugins to add
+ messages to the terminal reporting after collection has been finished
+ successfully. (`#2622 <https://github.com/pytest-dev/pytest/issues/2622>`_)
+
+- Added support for `PEP-415's <https://www.python.org/dev/peps/pep-0415/>`_
+ ``Exception.__suppress_context__``. Now if a ``raise exception from None`` is
+ caught by pytest, pytest will no longer chain the context in the test report.
+ The behavior now matches Python's traceback behavior. (`#2631
+ <https://github.com/pytest-dev/pytest/issues/2631>`_)
+
+- Exceptions raised by ``pytest.fail``, ``pytest.skip`` and ``pytest.xfail``
+ now subclass BaseException, making them harder to be caught unintentionally
+ by normal code. (`#580 <https://github.com/pytest-dev/pytest/issues/580>`_)
+
+
+Bug Fixes
+---------
+
+- Set ``stdin`` to a closed ``PIPE`` in ``pytester.py.Testdir.popen()`` for
+ avoid unwanted interactive ``pdb`` (`#2023 <https://github.com/pytest-
+ dev/pytest/issues/2023>`_)
+
+- Add missing ``encoding`` attribute to ``sys.std*`` streams when using
+ ``capsys`` capture mode. (`#2375 <https://github.com/pytest-
+ dev/pytest/issues/2375>`_)
+
+- Fix terminal color changing to black on Windows if ``colorama`` is imported
+ in a ``conftest.py`` file. (`#2510 <https://github.com/pytest-
+ dev/pytest/issues/2510>`_)
+
+- Fix line number when reporting summary of skipped tests. (`#2548
+ <https://github.com/pytest-dev/pytest/issues/2548>`_)
+
+- capture: ensure that EncodedFile.name is a string. (`#2555
+ <https://github.com/pytest-dev/pytest/issues/2555>`_)
+
+- The options ``--fixtures`` and ``--fixtures-per-test`` will now keep
+ indentation within docstrings. (`#2574 <https://github.com/pytest-
+ dev/pytest/issues/2574>`_)
+
+- doctests line numbers are now reported correctly, fixing `pytest-sugar#122
+ <https://github.com/Frozenball/pytest-sugar/issues/122>`_. (`#2610
+ <https://github.com/pytest-dev/pytest/issues/2610>`_)
+
+- Fix non-determinism in order of fixture collection. Adds new dependency
+ (ordereddict) for Python 2.6. (`#920 <https://github.com/pytest-
+ dev/pytest/issues/920>`_)
+
+
+Improved Documentation
+----------------------
+
+- Clarify ``pytest_configure`` hook call order. (`#2539
+ <https://github.com/pytest-dev/pytest/issues/2539>`_)
+
+- Extend documentation for testing plugin code with the ``pytester`` plugin.
+ (`#971 <https://github.com/pytest-dev/pytest/issues/971>`_)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Update help message for ``--strict`` to make it clear it only deals with
+ unregistered markers, not warnings. (`#2444 <https://github.com/pytest-
+ dev/pytest/issues/2444>`_)
+
+- Internal code move: move code for pytest.approx/pytest.raises to own files in
+ order to cut down the size of python.py (`#2489 <https://github.com/pytest-
+ dev/pytest/issues/2489>`_)
+
+- Renamed the utility function ``_pytest.compat._escape_strings`` to
+ ``_ascii_escaped`` to better communicate the function's purpose. (`#2533
+ <https://github.com/pytest-dev/pytest/issues/2533>`_)
+
+- Improve error message for CollectError with skip/skipif. (`#2546
+ <https://github.com/pytest-dev/pytest/issues/2546>`_)
+
+- Emit warning about ``yield`` tests being deprecated only once per generator.
+ (`#2562 <https://github.com/pytest-dev/pytest/issues/2562>`_)
+
+- Ensure final collected line doesn't include artifacts of previous write.
+ (`#2571 <https://github.com/pytest-dev/pytest/issues/2571>`_)
+
+- Fixed all flake8 errors and warnings. (`#2581 <https://github.com/pytest-
+ dev/pytest/issues/2581>`_)
+
+- Added ``fix-lint`` tox environment to run automatic pep8 fixes on the code.
+ (`#2582 <https://github.com/pytest-dev/pytest/issues/2582>`_)
+
+- Turn warnings into errors in pytest's own test suite in order to catch
+ regressions due to deprecations more promptly. (`#2588
+ <https://github.com/pytest-dev/pytest/issues/2588>`_)
+
+- Show multiple issue links in CHANGELOG entries. (`#2620
+ <https://github.com/pytest-dev/pytest/issues/2620>`_)
+
+
+Pytest 3.1.3 (2017-07-03)
+=========================
+
+Bug Fixes
+---------
+
+- Fix decode error in Python 2 for doctests in docstrings. (`#2434
+ <https://github.com/pytest-dev/pytest/issues/2434>`_)
+
+- Exceptions raised during teardown by finalizers are now suppressed until all
+ finalizers are called, with the initial exception reraised. (`#2440
+ <https://github.com/pytest-dev/pytest/issues/2440>`_)
+
+- Fix incorrect "collected items" report when specifying tests on the command-
+ line. (`#2464 <https://github.com/pytest-dev/pytest/issues/2464>`_)
+
+- ``deprecated_call`` in context-manager form now captures deprecation warnings
+ even if the same warning has already been raised. Also, ``deprecated_call``
+ will always produce the same error message (previously it would produce
+ different messages in context-manager vs. function-call mode). (`#2469
+ <https://github.com/pytest-dev/pytest/issues/2469>`_)
+
+- Fix issue where paths collected by pytest could have triple leading ``/``
+ characters. (`#2475 <https://github.com/pytest-dev/pytest/issues/2475>`_)
+
+- Fix internal error when trying to detect the start of a recursive traceback.
+ (`#2486 <https://github.com/pytest-dev/pytest/issues/2486>`_)
+
+
+Improved Documentation
+----------------------
+
+- Explicitly state for which hooks the calls stop after the first non-None
+ result. (`#2493 <https://github.com/pytest-dev/pytest/issues/2493>`_)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Create invoke tasks for updating the vendored packages. (`#2474
+ <https://github.com/pytest-dev/pytest/issues/2474>`_)
+
+- Update copyright dates in LICENSE, README.rst and in the documentation.
+ (`#2499 <https://github.com/pytest-dev/pytest/issues/2499>`_)
+
+
+Pytest 3.1.2 (2017-06-08)
+=========================
+
+Bug Fixes
+---------
+
+- Required options added via ``pytest_addoption`` will no longer prevent using
+ --help without passing them. (#1999)
+
+- Respect ``python_files`` in assertion rewriting. (#2121)
+
+- Fix recursion error detection when frames in the traceback contain objects
+ that can't be compared (like ``numpy`` arrays). (#2459)
+
+- ``UnicodeWarning`` is issued from the internal pytest warnings plugin only
+ when the message contains non-ascii unicode (Python 2 only). (#2463)
+
+- Added a workaround for Python 3.6 ``WindowsConsoleIO`` breaking due to Pytests's
+ ``FDCapture``. Other code using console handles might still be affected by the
+ very same issue and might require further workarounds/fixes, i.e. ``colorama``.
+ (#2467)
+
+
+Improved Documentation
+----------------------
+
+- Fix internal API links to ``pluggy`` objects. (#2331)
+
+- Make it clear that ``pytest.xfail`` stops test execution at the calling point
+ and improve overall flow of the ``skipping`` docs. (#810)
+
+
+Pytest 3.1.1 (2017-05-30)
+=========================
+
+Bug Fixes
+---------
+
+- pytest warning capture no longer overrides existing warning filters. The
+ previous behaviour would override all filters and caused regressions in test
+ suites which configure warning filters to match their needs. Note that as a
+ side-effect of this is that ``DeprecationWarning`` and
+ ``PendingDeprecationWarning`` are no longer shown by default. (#2430)
+
+- Fix issue with non-ascii contents in doctest text files. (#2434)
+
+- Fix encoding errors for unicode warnings in Python 2. (#2436)
+
+- ``pytest.deprecated_call`` now captures ``PendingDeprecationWarning`` in
+ context manager form. (#2441)
+
+
+Improved Documentation
+----------------------
+
+- Addition of towncrier for changelog management. (#2390)
+
+
+3.1.0 (2017-05-22)
+==================
+
+
+New Features
+------------
+
+* The ``pytest-warnings`` plugin has been integrated into the core and now ``pytest`` automatically
+ captures and displays warnings at the end of the test session.
+
+ .. warning::
+
+ This feature may disrupt test suites which apply and treat warnings themselves, and can be
+ disabled in your ``pytest.ini``:
+
+ .. code-block:: ini
+
+ [pytest]
+ addopts = -p no:warnings
+
+ See the `warnings documentation page <https://docs.pytest.org/en/latest/warnings.html>`_ for more
+ information.
+
+ Thanks `@nicoddemus`_ for the PR.
+
+* Added ``junit_suite_name`` ini option to specify root ``<testsuite>`` name for JUnit XML reports (`#533`_).
+
+* Added an ini option ``doctest_encoding`` to specify which encoding to use for doctest files.
+ Thanks `@wheerd`_ for the PR (`#2101`_).
+
+* ``pytest.warns`` now checks for subclass relationship rather than
+ class equality. Thanks `@lesteve`_ for the PR (`#2166`_)
+
+* ``pytest.raises`` now asserts that the error message matches a text or regex
+ with the ``match`` keyword argument. Thanks `@Kriechi`_ for the PR.
+
+* ``pytest.param`` can be used to declare test parameter sets with marks and test ids.
+ Thanks `@RonnyPfannschmidt`_ for the PR.
+
+
+Changes
+-------
+
+* remove all internal uses of pytest_namespace hooks,
+ this is to prepare the removal of preloadconfig in pytest 4.0
+ Thanks to `@RonnyPfannschmidt`_ for the PR.
+
+* pytest now warns when a callable ids raises in a parametrized test. Thanks `@fogo`_ for the PR.
+
+* It is now possible to skip test classes from being collected by setting a
+ ``__test__`` attribute to ``False`` in the class body (`#2007`_). Thanks
+ to `@syre`_ for the report and `@lwm`_ for the PR.
+
+* Change junitxml.py to produce reports that comply with Junitxml schema.
+ If the same test fails with failure in call and then errors in teardown
+ we split testcase element into two, one containing the error and the other
+ the failure. (`#2228`_) Thanks to `@kkoukiou`_ for the PR.
+
+* Testcase reports with a ``url`` attribute will now properly write this to junitxml.
+ Thanks `@fushi`_ for the PR (`#1874`_).
+
+* Remove common items from dict comparison output when verbosity=1. Also update
+ the truncation message to make it clearer that pytest truncates all
+ assertion messages if verbosity < 2 (`#1512`_).
+ Thanks `@mattduck`_ for the PR
+
+* ``--pdbcls`` no longer implies ``--pdb``. This makes it possible to use
+ ``addopts=--pdbcls=module.SomeClass`` on ``pytest.ini``. Thanks `@davidszotten`_ for
+ the PR (`#1952`_).
+
+* fix `#2013`_: turn RecordedWarning into ``namedtuple``,
+ to give it a comprehensible repr while preventing unwarranted modification.
+
+* fix `#2208`_: ensure an iteration limit for _pytest.compat.get_real_func.
+ Thanks `@RonnyPfannschmidt`_ for the report and PR.
+
+* Hooks are now verified after collection is complete, rather than right after loading installed plugins. This
+ makes it easy to write hooks for plugins which will be loaded during collection, for example using the
+ ``pytest_plugins`` special variable (`#1821`_).
+ Thanks `@nicoddemus`_ for the PR.
+
+* Modify ``pytest_make_parametrize_id()`` hook to accept ``argname`` as an
+ additional parameter.
+ Thanks `@unsignedint`_ for the PR.
+
+* Add ``venv`` to the default ``norecursedirs`` setting.
+ Thanks `@The-Compiler`_ for the PR.
+
+* ``PluginManager.import_plugin`` now accepts unicode plugin names in Python 2.
+ Thanks `@reutsharabani`_ for the PR.
+
+* fix `#2308`_: When using both ``--lf`` and ``--ff``, only the last failed tests are run.
+ Thanks `@ojii`_ for the PR.
+
+* Replace minor/patch level version numbers in the documentation with placeholders.
+ This significantly reduces change-noise as different contributors regnerate
+ the documentation on different platforms.
+ Thanks `@RonnyPfannschmidt`_ for the PR.
+
+* fix `#2391`_: consider pytest_plugins on all plugin modules
+ Thanks `@RonnyPfannschmidt`_ for the PR.
+
+
+Bug Fixes
+---------
+
+* Fix ``AttributeError`` on ``sys.stdout.buffer`` / ``sys.stderr.buffer``
+ while using ``capsys`` fixture in python 3. (`#1407`_).
+ Thanks to `@asottile`_.
+
+* Change capture.py's ``DontReadFromInput`` class to throw ``io.UnsupportedOperation`` errors rather
+ than ValueErrors in the ``fileno`` method (`#2276`_).
+ Thanks `@metasyn`_ and `@vlad-dragos`_ for the PR.
+
+* Fix exception formatting while importing modules when the exception message
+ contains non-ascii characters (`#2336`_).
+ Thanks `@fabioz`_ for the report and `@nicoddemus`_ for the PR.
+
+* Added documentation related to issue (`#1937`_)
+ Thanks `@skylarjhdownes`_ for the PR.
+
+* Allow collecting files with any file extension as Python modules (`#2369`_).
+ Thanks `@Kodiologist`_ for the PR.
+
+* Show the correct error message when collect "parametrize" func with wrong args (`#2383`_).
+ Thanks `@The-Compiler`_ for the report and `@robin0371`_ for the PR.
+
+
+.. _@davidszotten: https://github.com/davidszotten
+.. _@fabioz: https://github.com/fabioz
+.. _@fogo: https://github.com/fogo
+.. _@fushi: https://github.com/fushi
+.. _@Kodiologist: https://github.com/Kodiologist
+.. _@Kriechi: https://github.com/Kriechi
+.. _@mandeep: https://github.com/mandeep
+.. _@mattduck: https://github.com/mattduck
+.. _@metasyn: https://github.com/metasyn
+.. _@MichalTHEDUDE: https://github.com/MichalTHEDUDE
+.. _@ojii: https://github.com/ojii
+.. _@reutsharabani: https://github.com/reutsharabani
+.. _@robin0371: https://github.com/robin0371
+.. _@skylarjhdownes: https://github.com/skylarjhdownes
+.. _@unsignedint: https://github.com/unsignedint
+.. _@wheerd: https://github.com/wheerd
+
+
+.. _#1407: https://github.com/pytest-dev/pytest/issues/1407
+.. _#1512: https://github.com/pytest-dev/pytest/issues/1512
+.. _#1821: https://github.com/pytest-dev/pytest/issues/1821
+.. _#1874: https://github.com/pytest-dev/pytest/pull/1874
+.. _#1937: https://github.com/pytest-dev/pytest/issues/1937
+.. _#1952: https://github.com/pytest-dev/pytest/pull/1952
+.. _#2007: https://github.com/pytest-dev/pytest/issues/2007
+.. _#2013: https://github.com/pytest-dev/pytest/issues/2013
+.. _#2101: https://github.com/pytest-dev/pytest/pull/2101
+.. _#2166: https://github.com/pytest-dev/pytest/pull/2166
+.. _#2208: https://github.com/pytest-dev/pytest/issues/2208
+.. _#2228: https://github.com/pytest-dev/pytest/issues/2228
+.. _#2276: https://github.com/pytest-dev/pytest/issues/2276
+.. _#2308: https://github.com/pytest-dev/pytest/issues/2308
+.. _#2336: https://github.com/pytest-dev/pytest/issues/2336
+.. _#2369: https://github.com/pytest-dev/pytest/issues/2369
+.. _#2383: https://github.com/pytest-dev/pytest/issues/2383
+.. _#2391: https://github.com/pytest-dev/pytest/issues/2391
+.. _#533: https://github.com/pytest-dev/pytest/issues/533
+
+
+
+3.0.7 (2017-03-14)
+==================
+
+
+* Fix issue in assertion rewriting breaking due to modules silently discarding
+ other modules when importing fails
+ Notably, importing the ``anydbm`` module is fixed. (`#2248`_).
+ Thanks `@pfhayes`_ for the PR.
+
+* junitxml: Fix problematic case where system-out tag occurred twice per testcase
+ element in the XML report. Thanks `@kkoukiou`_ for the PR.
+
+* Fix regression, pytest now skips unittest correctly if run with ``--pdb``
+ (`#2137`_). Thanks to `@gst`_ for the report and `@mbyt`_ for the PR.
+
+* Ignore exceptions raised from descriptors (e.g. properties) during Python test collection (`#2234`_).
+ Thanks to `@bluetech`_.
+
+* ``--override-ini`` now correctly overrides some fundamental options like ``python_files`` (`#2238`_).
+ Thanks `@sirex`_ for the report and `@nicoddemus`_ for the PR.
+
+* Replace ``raise StopIteration`` usages in the code by simple ``returns`` to finish generators, in accordance to `PEP-479`_ (`#2160`_).
+ Thanks `@tgoodlet`_ for the report and `@nicoddemus`_ for the PR.
+
+* Fix internal errors when an unprintable ``AssertionError`` is raised inside a test.
+ Thanks `@omerhadari`_ for the PR.
+
+* Skipping plugin now also works with test items generated by custom collectors (`#2231`_).
+ Thanks to `@vidartf`_.
+
+* Fix trailing whitespace in console output if no .ini file presented (`#2281`_). Thanks `@fbjorn`_ for the PR.
+
+* Conditionless ``xfail`` markers no longer rely on the underlying test item
+ being an instance of ``PyobjMixin``, and can therefore apply to tests not
+ collected by the built-in python test collector. Thanks `@barneygale`_ for the
+ PR.
+
+
+.. _@pfhayes: https://github.com/pfhayes
+.. _@bluetech: https://github.com/bluetech
+.. _@gst: https://github.com/gst
+.. _@sirex: https://github.com/sirex
+.. _@vidartf: https://github.com/vidartf
+.. _@kkoukiou: https://github.com/KKoukiou
+.. _@omerhadari: https://github.com/omerhadari
+.. _@fbjorn: https://github.com/fbjorn
+
+.. _#2248: https://github.com/pytest-dev/pytest/issues/2248
+.. _#2137: https://github.com/pytest-dev/pytest/issues/2137
+.. _#2160: https://github.com/pytest-dev/pytest/issues/2160
+.. _#2231: https://github.com/pytest-dev/pytest/issues/2231
+.. _#2234: https://github.com/pytest-dev/pytest/issues/2234
+.. _#2238: https://github.com/pytest-dev/pytest/issues/2238
+.. _#2281: https://github.com/pytest-dev/pytest/issues/2281
+
+.. _PEP-479: https://www.python.org/dev/peps/pep-0479/
+
+
+3.0.6 (2017-01-22)
+==================
+
+* pytest no longer generates ``PendingDeprecationWarning`` from its own operations, which was introduced by mistake in version ``3.0.5`` (`#2118`_).
+ Thanks to `@nicoddemus`_ for the report and `@RonnyPfannschmidt`_ for the PR.
+
+
+* pytest no longer recognizes coroutine functions as yield tests (`#2129`_).
+ Thanks to `@malinoff`_ for the PR.
+
+* Plugins loaded by the ``PYTEST_PLUGINS`` environment variable are now automatically
+ considered for assertion rewriting (`#2185`_).
+ Thanks `@nicoddemus`_ for the PR.
+
+* Improve error message when pytest.warns fails (`#2150`_). The type(s) of the
+ expected warnings and the list of caught warnings is added to the
+ error message. Thanks `@lesteve`_ for the PR.
+
+* Fix ``pytester`` internal plugin to work correctly with latest versions of
+ ``zope.interface`` (`#1989`_). Thanks `@nicoddemus`_ for the PR.
+
+* Assert statements of the ``pytester`` plugin again benefit from assertion rewriting (`#1920`_).
+ Thanks `@RonnyPfannschmidt`_ for the report and `@nicoddemus`_ for the PR.
+
+* Specifying tests with colons like ``test_foo.py::test_bar`` for tests in
+ subdirectories with ini configuration files now uses the correct ini file
+ (`#2148`_). Thanks `@pelme`_.
+
+* Fail ``testdir.runpytest().assert_outcomes()`` explicitly if the pytest
+ terminal output it relies on is missing. Thanks to `@eli-b`_ for the PR.
+
+
+.. _@barneygale: https://github.com/barneygale
+.. _@lesteve: https://github.com/lesteve
+.. _@malinoff: https://github.com/malinoff
+.. _@pelme: https://github.com/pelme
+.. _@eli-b: https://github.com/eli-b
+
+.. _#2118: https://github.com/pytest-dev/pytest/issues/2118
+
+.. _#1989: https://github.com/pytest-dev/pytest/issues/1989
+.. _#1920: https://github.com/pytest-dev/pytest/issues/1920
+.. _#2129: https://github.com/pytest-dev/pytest/issues/2129
+.. _#2148: https://github.com/pytest-dev/pytest/issues/2148
+.. _#2150: https://github.com/pytest-dev/pytest/issues/2150
+.. _#2185: https://github.com/pytest-dev/pytest/issues/2185
+
+
+3.0.5 (2016-12-05)
+==================
+
+* Add warning when not passing ``option=value`` correctly to ``-o/--override-ini`` (`#2105`_).
+ Also improved the help documentation. Thanks to `@mbukatov`_ for the report and
+ `@lwm`_ for the PR.
+
+* Now ``--confcutdir`` and ``--junit-xml`` are properly validated if they are directories
+ and filenames, respectively (`#2089`_ and `#2078`_). Thanks to `@lwm`_ for the PR.
+
+* Add hint to error message hinting possible missing ``__init__.py`` (`#478`_). Thanks `@DuncanBetts`_.
+
+* More accurately describe when fixture finalization occurs in documentation (`#687`_). Thanks `@DuncanBetts`_.
+
+* Provide ``:ref:`` targets for ``recwarn.rst`` so we can use intersphinx referencing.
+ Thanks to `@dupuy`_ for the report and `@lwm`_ for the PR.
+
+* In Python 2, use a simple ``+-`` ASCII string in the string representation of ``pytest.approx`` (for example ``"4 +- 4.0e-06"``)
+ because it is brittle to handle that in different contexts and representations internally in pytest
+ which can result in bugs such as `#2111`_. In Python 3, the representation still uses ``±`` (for example ``4 ± 4.0e-06``).
+ Thanks `@kerrick-lyft`_ for the report and `@nicoddemus`_ for the PR.
+
+* Using ``item.Function``, ``item.Module``, etc., is now issuing deprecation warnings, prefer
+ ``pytest.Function``, ``pytest.Module``, etc., instead (`#2034`_).
+ Thanks `@nmundar`_ for the PR.
+
+* Fix error message using ``approx`` with complex numbers (`#2082`_).
+ Thanks `@adler-j`_ for the report and `@nicoddemus`_ for the PR.
+
+* Fixed false-positives warnings from assertion rewrite hook for modules imported more than
+ once by the ``pytest_plugins`` mechanism.
+ Thanks `@nicoddemus`_ for the PR.
+
+* Remove an internal cache which could cause hooks from ``conftest.py`` files in
+ sub-directories to be called in other directories incorrectly (`#2016`_).
+ Thanks `@d-b-w`_ for the report and `@nicoddemus`_ for the PR.
+
+* Remove internal code meant to support earlier Python 3 versions that produced the side effect
+ of leaving ``None`` in ``sys.modules`` when expressions were evaluated by pytest (for example passing a condition
+ as a string to ``pytest.mark.skipif``)(`#2103`_).
+ Thanks `@jaraco`_ for the report and `@nicoddemus`_ for the PR.
+
+* Cope gracefully with a .pyc file with no matching .py file (`#2038`_). Thanks
+ `@nedbat`_.
+
+.. _@syre: https://github.com/syre
+.. _@adler-j: https://github.com/adler-j
+.. _@d-b-w: https://bitbucket.org/d-b-w/
+.. _@DuncanBetts: https://github.com/DuncanBetts
+.. _@dupuy: https://bitbucket.org/dupuy/
+.. _@kerrick-lyft: https://github.com/kerrick-lyft
+.. _@lwm: https://github.com/lwm
+.. _@mbukatov: https://github.com/mbukatov
+.. _@nedbat: https://github.com/nedbat
+.. _@nmundar: https://github.com/nmundar
+
+.. _#2016: https://github.com/pytest-dev/pytest/issues/2016
+.. _#2034: https://github.com/pytest-dev/pytest/issues/2034
+.. _#2038: https://github.com/pytest-dev/pytest/issues/2038
+.. _#2078: https://github.com/pytest-dev/pytest/issues/2078
+.. _#2082: https://github.com/pytest-dev/pytest/issues/2082
+.. _#2089: https://github.com/pytest-dev/pytest/issues/2089
+.. _#2103: https://github.com/pytest-dev/pytest/issues/2103
+.. _#2105: https://github.com/pytest-dev/pytest/issues/2105
+.. _#2111: https://github.com/pytest-dev/pytest/issues/2111
+.. _#478: https://github.com/pytest-dev/pytest/issues/478
+.. _#687: https://github.com/pytest-dev/pytest/issues/687
+
+
+3.0.4 (2016-11-09)
+==================
+
+* Import errors when collecting test modules now display the full traceback (`#1976`_).
+ Thanks `@cwitty`_ for the report and `@nicoddemus`_ for the PR.
+
+* Fix confusing command-line help message for custom options with two or more ``metavar`` properties (`#2004`_).
+ Thanks `@okulynyak`_ and `@davehunt`_ for the report and `@nicoddemus`_ for the PR.
+
+* When loading plugins, import errors which contain non-ascii messages are now properly handled in Python 2 (`#1998`_).
+ Thanks `@nicoddemus`_ for the PR.
+
+* Fixed cyclic reference when ``pytest.raises`` is used in context-manager form (`#1965`_). Also as a
+ result of this fix, ``sys.exc_info()`` is left empty in both context-manager and function call usages.
+ Previously, ``sys.exc_info`` would contain the exception caught by the context manager,
+ even when the expected exception occurred.
+ Thanks `@MSeifert04`_ for the report and the PR.
+
+* Fixed false-positives warnings from assertion rewrite hook for modules that were rewritten but
+ were later marked explicitly by ``pytest.register_assert_rewrite``
+ or implicitly as a plugin (`#2005`_).
+ Thanks `@RonnyPfannschmidt`_ for the report and `@nicoddemus`_ for the PR.
+
+* Report teardown output on test failure (`#442`_).
+ Thanks `@matclab`_ for the PR.
+
+* Fix teardown error message in generated xUnit XML.
+ Thanks `@gdyuldin`_ for the PR.
+
+* Properly handle exceptions in ``multiprocessing`` tasks (`#1984`_).
+ Thanks `@adborden`_ for the report and `@nicoddemus`_ for the PR.
+
+* Clean up unittest TestCase objects after tests are complete (`#1649`_).
+ Thanks `@d_b_w`_ for the report and PR.
+
+
+.. _@adborden: https://github.com/adborden
+.. _@cwitty: https://github.com/cwitty
+.. _@d_b_w: https://github.com/d_b_w
+.. _@gdyuldin: https://github.com/gdyuldin
+.. _@matclab: https://github.com/matclab
+.. _@MSeifert04: https://github.com/MSeifert04
+.. _@okulynyak: https://github.com/okulynyak
+
+.. _#442: https://github.com/pytest-dev/pytest/issues/442
+.. _#1965: https://github.com/pytest-dev/pytest/issues/1965
+.. _#1976: https://github.com/pytest-dev/pytest/issues/1976
+.. _#1984: https://github.com/pytest-dev/pytest/issues/1984
+.. _#1998: https://github.com/pytest-dev/pytest/issues/1998
+.. _#2004: https://github.com/pytest-dev/pytest/issues/2004
+.. _#2005: https://github.com/pytest-dev/pytest/issues/2005
+.. _#1649: https://github.com/pytest-dev/pytest/issues/1649
+
+
+3.0.3 (2016-09-28)
+==================
+
+* The ``ids`` argument to ``parametrize`` again accepts ``unicode`` strings
+ in Python 2 (`#1905`_).
+ Thanks `@philpep`_ for the report and `@nicoddemus`_ for the PR.
+
+* Assertions are now being rewritten for plugins in development mode
+ (``pip install -e``) (`#1934`_).
+ Thanks `@nicoddemus`_ for the PR.
+
+* Fix pkg_resources import error in Jython projects (`#1853`_).
+ Thanks `@raquel-ucl`_ for the PR.
+
+* Got rid of ``AttributeError: 'Module' object has no attribute '_obj'`` exception
+ in Python 3 (`#1944`_).
+ Thanks `@axil`_ for the PR.
+
+* Explain a bad scope value passed to ``@fixture`` declarations or
+ a ``MetaFunc.parametrize()`` call. Thanks `@tgoodlet`_ for the PR.
+
+* This version includes ``pluggy-0.4.0``, which correctly handles
+ ``VersionConflict`` errors in plugins (`#704`_).
+ Thanks `@nicoddemus`_ for the PR.
+
+
+.. _@philpep: https://github.com/philpep
+.. _@raquel-ucl: https://github.com/raquel-ucl
+.. _@axil: https://github.com/axil
+.. _@tgoodlet: https://github.com/tgoodlet
+.. _@vlad-dragos: https://github.com/vlad-dragos
+
+.. _#1853: https://github.com/pytest-dev/pytest/issues/1853
+.. _#1905: https://github.com/pytest-dev/pytest/issues/1905
+.. _#1934: https://github.com/pytest-dev/pytest/issues/1934
+.. _#1944: https://github.com/pytest-dev/pytest/issues/1944
+.. _#704: https://github.com/pytest-dev/pytest/issues/704
+
+
+
+
+3.0.2 (2016-09-01)
+==================
+
+* Improve error message when passing non-string ids to ``pytest.mark.parametrize`` (`#1857`_).
+ Thanks `@okken`_ for the report and `@nicoddemus`_ for the PR.
+
+* Add ``buffer`` attribute to stdin stub class ``pytest.capture.DontReadFromInput``
+ Thanks `@joguSD`_ for the PR.
+
+* Fix ``UnicodeEncodeError`` when string comparison with unicode has failed. (`#1864`_)
+ Thanks `@AiOO`_ for the PR.
+
+* ``pytest_plugins`` is now handled correctly if defined as a string (as opposed as
+ a sequence of strings) when modules are considered for assertion rewriting.
+ Due to this bug, much more modules were being rewritten than necessary
+ if a test suite uses ``pytest_plugins`` to load internal plugins (`#1888`_).
+ Thanks `@jaraco`_ for the report and `@nicoddemus`_ for the PR (`#1891`_).
+
+* Do not call tearDown and cleanups when running tests from
+ ``unittest.TestCase`` subclasses with ``--pdb``
+ enabled. This allows proper post mortem debugging for all applications
+ which have significant logic in their tearDown machinery (`#1890`_). Thanks
+ `@mbyt`_ for the PR.
+
+* Fix use of deprecated ``getfuncargvalue`` method in the internal doctest plugin.
+ Thanks `@ViviCoder`_ for the report (`#1898`_).
+
+.. _@joguSD: https://github.com/joguSD
+.. _@AiOO: https://github.com/AiOO
+.. _@mbyt: https://github.com/mbyt
+.. _@ViviCoder: https://github.com/ViviCoder
+
+.. _#1857: https://github.com/pytest-dev/pytest/issues/1857
+.. _#1864: https://github.com/pytest-dev/pytest/issues/1864
+.. _#1888: https://github.com/pytest-dev/pytest/issues/1888
+.. _#1891: https://github.com/pytest-dev/pytest/pull/1891
+.. _#1890: https://github.com/pytest-dev/pytest/issues/1890
+.. _#1898: https://github.com/pytest-dev/pytest/issues/1898
+
+
+3.0.1 (2016-08-23)
+==================
+
+* Fix regression when ``importorskip`` is used at module level (`#1822`_).
+ Thanks `@jaraco`_ and `@The-Compiler`_ for the report and `@nicoddemus`_ for the PR.
+
+* Fix parametrization scope when session fixtures are used in conjunction
+ with normal parameters in the same call (`#1832`_).
+ Thanks `@The-Compiler`_ for the report, `@Kingdread`_ and `@nicoddemus`_ for the PR.
+
+* Fix internal error when parametrizing tests or fixtures using an empty ``ids`` argument (`#1849`_).
+ Thanks `@OPpuolitaival`_ for the report and `@nicoddemus`_ for the PR.
+
+* Fix loader error when running ``pytest`` embedded in a zipfile.
+ Thanks `@mbachry`_ for the PR.
+
+
+.. _@Kingdread: https://github.com/Kingdread
+.. _@mbachry: https://github.com/mbachry
+.. _@OPpuolitaival: https://github.com/OPpuolitaival
+
+.. _#1822: https://github.com/pytest-dev/pytest/issues/1822
+.. _#1832: https://github.com/pytest-dev/pytest/issues/1832
+.. _#1849: https://github.com/pytest-dev/pytest/issues/1849
+
+
+3.0.0 (2016-08-18)
+==================
+
+**Incompatible changes**
+
+
+A number of incompatible changes were made in this release, with the intent of removing features deprecated for a long
+time or change existing behaviors in order to make them less surprising/more useful.
+
+* Reinterpretation mode has now been removed. Only plain and rewrite
+ mode are available, consequently the ``--assert=reinterp`` option is
+ no longer available. This also means files imported from plugins or
+ ``conftest.py`` will not benefit from improved assertions by
+ default, you should use ``pytest.register_assert_rewrite()`` to
+ explicitly turn on assertion rewriting for those files. Thanks
+ `@flub`_ for the PR.
+
+* The following deprecated commandline options were removed:
+
+ * ``--genscript``: no longer supported;
+ * ``--no-assert``: use ``--assert=plain`` instead;
+ * ``--nomagic``: use ``--assert=plain`` instead;
+ * ``--report``: use ``-r`` instead;
+
+ Thanks to `@RedBeardCode`_ for the PR (`#1664`_).
+
+* ImportErrors in plugins now are a fatal error instead of issuing a
+ pytest warning (`#1479`_). Thanks to `@The-Compiler`_ for the PR.
+
+* Removed support code for Python 3 versions < 3.3 (`#1627`_).
+
+* Removed all ``py.test-X*`` entry points. The versioned, suffixed entry points
+ were never documented and a leftover from a pre-virtualenv era. These entry
+ points also created broken entry points in wheels, so removing them also
+ removes a source of confusion for users (`#1632`_).
+ Thanks `@obestwalter`_ for the PR.
+
+* ``pytest.skip()`` now raises an error when used to decorate a test function,
+ as opposed to its original intent (to imperatively skip a test inside a test function). Previously
+ this usage would cause the entire module to be skipped (`#607`_).
+ Thanks `@omarkohl`_ for the complete PR (`#1519`_).
+
+* Exit tests if a collection error occurs. A poll indicated most users will hit CTRL-C
+ anyway as soon as they see collection errors, so pytest might as well make that the default behavior (`#1421`_).
+ A ``--continue-on-collection-errors`` option has been added to restore the previous behaviour.
+ Thanks `@olegpidsadnyi`_ and `@omarkohl`_ for the complete PR (`#1628`_).
+
+* Renamed the pytest ``pdb`` module (plugin) into ``debugging`` to avoid clashes with the builtin ``pdb`` module.
+
+* Raise a helpful failure message when requesting a parametrized fixture at runtime,
+ e.g. with ``request.getfixturevalue``. Previously these parameters were simply
+ never defined, so a fixture decorated like ``@pytest.fixture(params=[0, 1, 2])``
+ only ran once (`#460`_).
+ Thanks to `@nikratio`_ for the bug report, `@RedBeardCode`_ and `@tomviner`_ for the PR.
+
+* ``_pytest.monkeypatch.monkeypatch`` class has been renamed to ``_pytest.monkeypatch.MonkeyPatch``
+ so it doesn't conflict with the ``monkeypatch`` fixture.
+
+* ``--exitfirst / -x`` can now be overridden by a following ``--maxfail=N``
+ and is just a synonym for ``--maxfail=1``.
+
+
+**New Features**
+
+* Support nose-style ``__test__`` attribute on methods of classes,
+ including unittest-style Classes. If set to ``False``, the test will not be
+ collected.
+
+* New ``doctest_namespace`` fixture for injecting names into the
+ namespace in which doctests run.
+ Thanks `@milliams`_ for the complete PR (`#1428`_).
+
+* New ``--doctest-report`` option available to change the output format of diffs
+ when running (failing) doctests (implements `#1749`_).
+ Thanks `@hartym`_ for the PR.
+
+* New ``name`` argument to ``pytest.fixture`` decorator which allows a custom name
+ for a fixture (to solve the funcarg-shadowing-fixture problem).
+ Thanks `@novas0x2a`_ for the complete PR (`#1444`_).
+
+* New ``approx()`` function for easily comparing floating-point numbers in
+ tests.
+ Thanks `@kalekundert`_ for the complete PR (`#1441`_).
+
+* Ability to add global properties in the final xunit output file by accessing
+ the internal ``junitxml`` plugin (experimental).
+ Thanks `@tareqalayan`_ for the complete PR `#1454`_).
+
+* New ``ExceptionInfo.match()`` method to match a regular expression on the
+ string representation of an exception (`#372`_).
+ Thanks `@omarkohl`_ for the complete PR (`#1502`_).
+
+* ``__tracebackhide__`` can now also be set to a callable which then can decide
+ whether to filter the traceback based on the ``ExceptionInfo`` object passed
+ to it. Thanks `@The-Compiler`_ for the complete PR (`#1526`_).
+
+* New ``pytest_make_parametrize_id(config, val)`` hook which can be used by plugins to provide
+ friendly strings for custom types.
+ Thanks `@palaviv`_ for the PR.
+
+* ``capsys`` and ``capfd`` now have a ``disabled()`` context-manager method, which
+ can be used to temporarily disable capture within a test.
+ Thanks `@nicoddemus`_ for the PR.
+
+* New cli flag ``--fixtures-per-test``: shows which fixtures are being used
+ for each selected test item. Features doc strings of fixtures by default.
+ Can also show where fixtures are defined if combined with ``-v``.
+ Thanks `@hackebrot`_ for the PR.
+
+* Introduce ``pytest`` command as recommended entry point. Note that ``py.test``
+ still works and is not scheduled for removal. Closes proposal
+ `#1629`_. Thanks `@obestwalter`_ and `@davehunt`_ for the complete PR
+ (`#1633`_).
+
+* New cli flags:
+
+ + ``--setup-plan``: performs normal collection and reports
+ the potential setup and teardown and does not execute any fixtures and tests;
+ + ``--setup-only``: performs normal collection, executes setup and teardown of
+ fixtures and reports them;
+ + ``--setup-show``: performs normal test execution and additionally shows
+ setup and teardown of fixtures;
+ + ``--keep-duplicates``: py.test now ignores duplicated paths given in the command
+ line. To retain the previous behavior where the same test could be run multiple
+ times by specifying it in the command-line multiple times, pass the ``--keep-duplicates``
+ argument (`#1609`_);
+
+ Thanks `@d6e`_, `@kvas-it`_, `@sallner`_, `@ioggstream`_ and `@omarkohl`_ for the PRs.
+
+* New CLI flag ``--override-ini``/``-o``: overrides values from the ini file.
+ For example: ``"-o xfail_strict=True"``'.
+ Thanks `@blueyed`_ and `@fengxx`_ for the PR.
+
+* New hooks:
+
+ + ``pytest_fixture_setup(fixturedef, request)``: executes fixture setup;
+ + ``pytest_fixture_post_finalizer(fixturedef)``: called after the fixture's
+ finalizer and has access to the fixture's result cache.
+
+ Thanks `@d6e`_, `@sallner`_.
+
+* Issue warnings for asserts whose test is a tuple literal. Such asserts will
+ never fail because tuples are always truthy and are usually a mistake
+ (see `#1562`_). Thanks `@kvas-it`_, for the PR.
+
+* Allow passing a custom debugger class (e.g. ``--pdbcls=IPython.core.debugger:Pdb``).
+ Thanks to `@anntzer`_ for the PR.
+
+
+**Changes**
+
+* Plugins now benefit from assertion rewriting. Thanks
+ `@sober7`_, `@nicoddemus`_ and `@flub`_ for the PR.
+
+* Change ``report.outcome`` for ``xpassed`` tests to ``"passed"`` in non-strict
+ mode and ``"failed"`` in strict mode. Thanks to `@hackebrot`_ for the PR
+ (`#1795`_) and `@gprasad84`_ for report (`#1546`_).
+
+* Tests marked with ``xfail(strict=False)`` (the default) now appear in
+ JUnitXML reports as passing tests instead of skipped.
+ Thanks to `@hackebrot`_ for the PR (`#1795`_).
+
+* Highlight path of the file location in the error report to make it easier to copy/paste.
+ Thanks `@suzaku`_ for the PR (`#1778`_).
+
+* Fixtures marked with ``@pytest.fixture`` can now use ``yield`` statements exactly like
+ those marked with the ``@pytest.yield_fixture`` decorator. This change renders
+ ``@pytest.yield_fixture`` deprecated and makes ``@pytest.fixture`` with ``yield`` statements
+ the preferred way to write teardown code (`#1461`_).
+ Thanks `@csaftoiu`_ for bringing this to attention and `@nicoddemus`_ for the PR.
+
+* Explicitly passed parametrize ids do not get escaped to ascii (`#1351`_).
+ Thanks `@ceridwen`_ for the PR.
+
+* Fixtures are now sorted in the error message displayed when an unknown
+ fixture is declared in a test function.
+ Thanks `@nicoddemus`_ for the PR.
+
+* ``pytest_terminal_summary`` hook now receives the ``exitstatus``
+ of the test session as argument. Thanks `@blueyed`_ for the PR (`#1809`_).
+
+* Parametrize ids can accept ``None`` as specific test id, in which case the
+ automatically generated id for that argument will be used.
+ Thanks `@palaviv`_ for the complete PR (`#1468`_).
+
+* The parameter to xunit-style setup/teardown methods (``setup_method``,
+ ``setup_module``, etc.) is now optional and may be omitted.
+ Thanks `@okken`_ for bringing this to attention and `@nicoddemus`_ for the PR.
+
+* Improved automatic id generation selection in case of duplicate ids in
+ parametrize.
+ Thanks `@palaviv`_ for the complete PR (`#1474`_).
+
+* Now pytest warnings summary is shown up by default. Added a new flag
+ ``--disable-pytest-warnings`` to explicitly disable the warnings summary (`#1668`_).
+
+* Make ImportError during collection more explicit by reminding
+ the user to check the name of the test module/package(s) (`#1426`_).
+ Thanks `@omarkohl`_ for the complete PR (`#1520`_).
+
+* Add ``build/`` and ``dist/`` to the default ``--norecursedirs`` list. Thanks
+ `@mikofski`_ for the report and `@tomviner`_ for the PR (`#1544`_).
+
+* ``pytest.raises`` in the context manager form accepts a custom
+ ``message`` to raise when no exception occurred.
+ Thanks `@palaviv`_ for the complete PR (`#1616`_).
+
+* ``conftest.py`` files now benefit from assertion rewriting; previously it
+ was only available for test modules. Thanks `@flub`_, `@sober7`_ and
+ `@nicoddemus`_ for the PR (`#1619`_).
+
+* Text documents without any doctests no longer appear as "skipped".
+ Thanks `@graingert`_ for reporting and providing a full PR (`#1580`_).
+
+* Ensure that a module within a namespace package can be found when it
+ is specified on the command line together with the ``--pyargs``
+ option. Thanks to `@taschini`_ for the PR (`#1597`_).
+
+* Always include full assertion explanation during assertion rewriting. The previous behaviour was hiding
+ sub-expressions that happened to be ``False``, assuming this was redundant information.
+ Thanks `@bagerard`_ for reporting (`#1503`_). Thanks to `@davehunt`_ and
+ `@tomviner`_ for the PR.
+
+* ``OptionGroup.addoption()`` now checks if option names were already
+ added before, to make it easier to track down issues like `#1618`_.
+ Before, you only got exceptions later from ``argparse`` library,
+ giving no clue about the actual reason for double-added options.
+
+* ``yield``-based tests are considered deprecated and will be removed in pytest-4.0.
+ Thanks `@nicoddemus`_ for the PR.
+
+* ``[pytest]`` sections in ``setup.cfg`` files should now be named ``[tool:pytest]``
+ to avoid conflicts with other distutils commands (see `#567`_). ``[pytest]`` sections in
+ ``pytest.ini`` or ``tox.ini`` files are supported and unchanged.
+ Thanks `@nicoddemus`_ for the PR.
+
+* Using ``pytest_funcarg__`` prefix to declare fixtures is considered deprecated and will be
+ removed in pytest-4.0 (`#1684`_).
+ Thanks `@nicoddemus`_ for the PR.
+
+* Passing a command-line string to ``pytest.main()`` is considered deprecated and scheduled
+ for removal in pytest-4.0. It is recommended to pass a list of arguments instead (`#1723`_).
+
+* Rename ``getfuncargvalue`` to ``getfixturevalue``. ``getfuncargvalue`` is
+ still present but is now considered deprecated. Thanks to `@RedBeardCode`_ and `@tomviner`_
+ for the PR (`#1626`_).
+
+* ``optparse`` type usage now triggers DeprecationWarnings (`#1740`_).
+
+
+* ``optparse`` backward compatibility supports float/complex types (`#457`_).
+
+* Refined logic for determining the ``rootdir``, considering only valid
+ paths which fixes a number of issues: `#1594`_, `#1435`_ and `#1471`_.
+ Updated the documentation according to current behavior. Thanks to
+ `@blueyed`_, `@davehunt`_ and `@matthiasha`_ for the PR.
+
+* Always include full assertion explanation. The previous behaviour was hiding
+ sub-expressions that happened to be False, assuming this was redundant information.
+ Thanks `@bagerard`_ for reporting (`#1503`_). Thanks to `@davehunt`_ and
+ `@tomviner`_ for PR.
+
+* Better message in case of not using parametrized variable (see `#1539`_).
+ Thanks to `@tramwaj29`_ for the PR.
+
+* Updated docstrings with a more uniform style.
+
+* Add stderr write for ``pytest.exit(msg)`` during startup. Previously the message was never shown.
+ Thanks `@BeyondEvil`_ for reporting `#1210`_. Thanks to `@JonathonSonesen`_ and
+ `@tomviner`_ for the PR.
+
+* No longer display the incorrect test deselection reason (`#1372`_).
+ Thanks `@ronnypfannschmidt`_ for the PR.
+
+* The ``--resultlog`` command line option has been deprecated: it is little used
+ and there are more modern and better alternatives (see `#830`_).
+ Thanks `@nicoddemus`_ for the PR.
+
+* Improve error message with fixture lookup errors: add an 'E' to the first
+ line and '>' to the rest. Fixes `#717`_. Thanks `@blueyed`_ for reporting and
+ a PR, `@eolo999`_ for the initial PR and `@tomviner`_ for his guidance during
+ EuroPython2016 sprint.
+
+
+**Bug Fixes**
+
+* Parametrize now correctly handles duplicated test ids.
+
+* Fix internal error issue when the ``method`` argument is missing for
+ ``teardown_method()`` (`#1605`_).
+
+* Fix exception visualization in case the current working directory (CWD) gets
+ deleted during testing (`#1235`_). Thanks `@bukzor`_ for reporting. PR by
+ `@marscher`_.
+
+* Improve test output for logical expression with brackets (`#925`_).
+ Thanks `@DRMacIver`_ for reporting and `@RedBeardCode`_ for the PR.
+
+* Create correct diff for strings ending with newlines (`#1553`_).
+ Thanks `@Vogtinator`_ for reporting and `@RedBeardCode`_ and
+ `@tomviner`_ for the PR.
+
+* ``ConftestImportFailure`` now shows the traceback making it easier to
+ identify bugs in ``conftest.py`` files (`#1516`_). Thanks `@txomon`_ for
+ the PR.
+
+* Text documents without any doctests no longer appear as "skipped".
+ Thanks `@graingert`_ for reporting and providing a full PR (`#1580`_).
+
+* Fixed collection of classes with custom ``__new__`` method.
+ Fixes `#1579`_. Thanks to `@Stranger6667`_ for the PR.
+
+* Fixed scope overriding inside metafunc.parametrize (`#634`_).
+ Thanks to `@Stranger6667`_ for the PR.
+
+* Fixed the total tests tally in junit xml output (`#1798`_).
+ Thanks to `@cryporchild`_ for the PR.
+
+* Fixed off-by-one error with lines from ``request.node.warn``.
+ Thanks to `@blueyed`_ for the PR.
+
+
+.. _#1210: https://github.com/pytest-dev/pytest/issues/1210
+.. _#1235: https://github.com/pytest-dev/pytest/issues/1235
+.. _#1351: https://github.com/pytest-dev/pytest/issues/1351
+.. _#1372: https://github.com/pytest-dev/pytest/issues/1372
+.. _#1421: https://github.com/pytest-dev/pytest/issues/1421
+.. _#1426: https://github.com/pytest-dev/pytest/issues/1426
+.. _#1428: https://github.com/pytest-dev/pytest/pull/1428
+.. _#1435: https://github.com/pytest-dev/pytest/issues/1435
+.. _#1441: https://github.com/pytest-dev/pytest/pull/1441
+.. _#1444: https://github.com/pytest-dev/pytest/pull/1444
+.. _#1454: https://github.com/pytest-dev/pytest/pull/1454
+.. _#1461: https://github.com/pytest-dev/pytest/pull/1461
+.. _#1468: https://github.com/pytest-dev/pytest/pull/1468
+.. _#1471: https://github.com/pytest-dev/pytest/issues/1471
+.. _#1474: https://github.com/pytest-dev/pytest/pull/1474
+.. _#1479: https://github.com/pytest-dev/pytest/issues/1479
+.. _#1502: https://github.com/pytest-dev/pytest/pull/1502
+.. _#1503: https://github.com/pytest-dev/pytest/issues/1503
+.. _#1516: https://github.com/pytest-dev/pytest/pull/1516
+.. _#1519: https://github.com/pytest-dev/pytest/pull/1519
+.. _#1520: https://github.com/pytest-dev/pytest/pull/1520
+.. _#1526: https://github.com/pytest-dev/pytest/pull/1526
+.. _#1539: https://github.com/pytest-dev/pytest/issues/1539
+.. _#1544: https://github.com/pytest-dev/pytest/issues/1544
+.. _#1546: https://github.com/pytest-dev/pytest/issues/1546
+.. _#1553: https://github.com/pytest-dev/pytest/issues/1553
+.. _#1562: https://github.com/pytest-dev/pytest/issues/1562
+.. _#1579: https://github.com/pytest-dev/pytest/issues/1579
+.. _#1580: https://github.com/pytest-dev/pytest/pull/1580
+.. _#1594: https://github.com/pytest-dev/pytest/issues/1594
+.. _#1597: https://github.com/pytest-dev/pytest/pull/1597
+.. _#1605: https://github.com/pytest-dev/pytest/issues/1605
+.. _#1616: https://github.com/pytest-dev/pytest/pull/1616
+.. _#1618: https://github.com/pytest-dev/pytest/issues/1618
+.. _#1619: https://github.com/pytest-dev/pytest/issues/1619
+.. _#1626: https://github.com/pytest-dev/pytest/pull/1626
+.. _#1627: https://github.com/pytest-dev/pytest/pull/1627
+.. _#1628: https://github.com/pytest-dev/pytest/pull/1628
+.. _#1629: https://github.com/pytest-dev/pytest/issues/1629
+.. _#1632: https://github.com/pytest-dev/pytest/issues/1632
+.. _#1633: https://github.com/pytest-dev/pytest/pull/1633
+.. _#1664: https://github.com/pytest-dev/pytest/pull/1664
+.. _#1668: https://github.com/pytest-dev/pytest/issues/1668
+.. _#1684: https://github.com/pytest-dev/pytest/pull/1684
+.. _#1723: https://github.com/pytest-dev/pytest/pull/1723
+.. _#1740: https://github.com/pytest-dev/pytest/issues/1740
+.. _#1749: https://github.com/pytest-dev/pytest/issues/1749
+.. _#1778: https://github.com/pytest-dev/pytest/pull/1778
+.. _#1795: https://github.com/pytest-dev/pytest/pull/1795
+.. _#1798: https://github.com/pytest-dev/pytest/pull/1798
+.. _#1809: https://github.com/pytest-dev/pytest/pull/1809
+.. _#372: https://github.com/pytest-dev/pytest/issues/372
+.. _#457: https://github.com/pytest-dev/pytest/issues/457
+.. _#460: https://github.com/pytest-dev/pytest/pull/460
+.. _#567: https://github.com/pytest-dev/pytest/pull/567
+.. _#607: https://github.com/pytest-dev/pytest/issues/607
+.. _#634: https://github.com/pytest-dev/pytest/issues/634
+.. _#717: https://github.com/pytest-dev/pytest/issues/717
+.. _#830: https://github.com/pytest-dev/pytest/issues/830
+.. _#925: https://github.com/pytest-dev/pytest/issues/925
+
+
+.. _@anntzer: https://github.com/anntzer
+.. _@bagerard: https://github.com/bagerard
+.. _@BeyondEvil: https://github.com/BeyondEvil
+.. _@blueyed: https://github.com/blueyed
+.. _@ceridwen: https://github.com/ceridwen
+.. _@cryporchild: https://github.com/cryporchild
+.. _@csaftoiu: https://github.com/csaftoiu
+.. _@d6e: https://github.com/d6e
+.. _@davehunt: https://github.com/davehunt
+.. _@DRMacIver: https://github.com/DRMacIver
+.. _@eolo999: https://github.com/eolo999
+.. _@fengxx: https://github.com/fengxx
+.. _@flub: https://github.com/flub
+.. _@gprasad84: https://github.com/gprasad84
+.. _@graingert: https://github.com/graingert
+.. _@hartym: https://github.com/hartym
+.. _@JonathonSonesen: https://github.com/JonathonSonesen
+.. _@kalekundert: https://github.com/kalekundert
+.. _@kvas-it: https://github.com/kvas-it
+.. _@marscher: https://github.com/marscher
+.. _@mikofski: https://github.com/mikofski
+.. _@milliams: https://github.com/milliams
+.. _@nikratio: https://github.com/nikratio
+.. _@novas0x2a: https://github.com/novas0x2a
+.. _@obestwalter: https://github.com/obestwalter
+.. _@okken: https://github.com/okken
+.. _@olegpidsadnyi: https://github.com/olegpidsadnyi
+.. _@omarkohl: https://github.com/omarkohl
+.. _@palaviv: https://github.com/palaviv
+.. _@RedBeardCode: https://github.com/RedBeardCode
+.. _@sallner: https://github.com/sallner
+.. _@sober7: https://github.com/sober7
+.. _@Stranger6667: https://github.com/Stranger6667
+.. _@suzaku: https://github.com/suzaku
+.. _@tareqalayan: https://github.com/tareqalayan
+.. _@taschini: https://github.com/taschini
+.. _@tramwaj29: https://github.com/tramwaj29
+.. _@txomon: https://github.com/txomon
+.. _@Vogtinator: https://github.com/Vogtinator
+.. _@matthiasha: https://github.com/matthiasha
+
+
+2.9.2 (2016-05-31)
+==================
+
+**Bug Fixes**
+
+* fix `#510`_: skip tests where one parameterize dimension was empty
+ thanks Alex Stapleton for the Report and `@RonnyPfannschmidt`_ for the PR
+
+* Fix Xfail does not work with condition keyword argument.
+ Thanks `@astraw38`_ for reporting the issue (`#1496`_) and `@tomviner`_
+ for PR the (`#1524`_).
+
+* Fix win32 path issue when putting custom config file with absolute path
+ in ``pytest.main("-c your_absolute_path")``.
+
+* Fix maximum recursion depth detection when raised error class is not aware
+ of unicode/encoded bytes.
+ Thanks `@prusse-martin`_ for the PR (`#1506`_).
+
+* Fix ``pytest.mark.skip`` mark when used in strict mode.
+ Thanks `@pquentin`_ for the PR and `@RonnyPfannschmidt`_ for
+ showing how to fix the bug.
+
+* Minor improvements and fixes to the documentation.
+ Thanks `@omarkohl`_ for the PR.
+
+* Fix ``--fixtures`` to show all fixture definitions as opposed to just
+ one per fixture name.
+ Thanks to `@hackebrot`_ for the PR.
+
+.. _#510: https://github.com/pytest-dev/pytest/issues/510
+.. _#1506: https://github.com/pytest-dev/pytest/pull/1506
+.. _#1496: https://github.com/pytest-dev/pytest/issues/1496
+.. _#1524: https://github.com/pytest-dev/pytest/pull/1524
+
+.. _@prusse-martin: https://github.com/prusse-martin
+.. _@astraw38: https://github.com/astraw38
+
+
+2.9.1 (2016-03-17)
+==================
+
+**Bug Fixes**
+
+* Improve error message when a plugin fails to load.
+ Thanks `@nicoddemus`_ for the PR.
+
+* Fix (`#1178 <https://github.com/pytest-dev/pytest/issues/1178>`_):
+ ``pytest.fail`` with non-ascii characters raises an internal pytest error.
+ Thanks `@nicoddemus`_ for the PR.
+
+* Fix (`#469`_): junit parses report.nodeid incorrectly, when params IDs
+ contain ``::``. Thanks `@tomviner`_ for the PR (`#1431`_).
+
+* Fix (`#578 <https://github.com/pytest-dev/pytest/issues/578>`_): SyntaxErrors
+ containing non-ascii lines at the point of failure generated an internal
+ py.test error.
+ Thanks `@asottile`_ for the report and `@nicoddemus`_ for the PR.
+
+* Fix (`#1437`_): When passing in a bytestring regex pattern to parameterize
+ attempt to decode it as utf-8 ignoring errors.
+
+* Fix (`#649`_): parametrized test nodes cannot be specified to run on the command line.
+
+* Fix (`#138`_): better reporting for python 3.3+ chained exceptions
+
+.. _#1437: https://github.com/pytest-dev/pytest/issues/1437
+.. _#469: https://github.com/pytest-dev/pytest/issues/469
+.. _#1431: https://github.com/pytest-dev/pytest/pull/1431
+.. _#649: https://github.com/pytest-dev/pytest/issues/649
+.. _#138: https://github.com/pytest-dev/pytest/issues/138
+
+.. _@asottile: https://github.com/asottile
+
+
+2.9.0 (2016-02-29)
+==================
+
+**New Features**
+
+* New ``pytest.mark.skip`` mark, which unconditionally skips marked tests.
+ Thanks `@MichaelAquilina`_ for the complete PR (`#1040`_).
+
+* ``--doctest-glob`` may now be passed multiple times in the command-line.
+ Thanks `@jab`_ and `@nicoddemus`_ for the PR.
+
+* New ``-rp`` and ``-rP`` reporting options give the summary and full output
+ of passing tests, respectively. Thanks to `@codewarrior0`_ for the PR.
+
+* ``pytest.mark.xfail`` now has a ``strict`` option, which makes ``XPASS``
+ tests to fail the test suite (defaulting to ``False``). There's also a
+ ``xfail_strict`` ini option that can be used to configure it project-wise.
+ Thanks `@rabbbit`_ for the request and `@nicoddemus`_ for the PR (`#1355`_).
+
+* ``Parser.addini`` now supports options of type ``bool``.
+ Thanks `@nicoddemus`_ for the PR.
+
+* New ``ALLOW_BYTES`` doctest option. This strips ``b`` prefixes from byte strings
+ in doctest output (similar to ``ALLOW_UNICODE``).
+ Thanks `@jaraco`_ for the request and `@nicoddemus`_ for the PR (`#1287`_).
+
+* Give a hint on ``KeyboardInterrupt`` to use the ``--fulltrace`` option to show the errors.
+ Fixes `#1366`_.
+ Thanks to `@hpk42`_ for the report and `@RonnyPfannschmidt`_ for the PR.
+
+* Catch ``IndexError`` exceptions when getting exception source location.
+ Fixes a pytest internal error for dynamically generated code (fixtures and tests)
+ where source lines are fake by intention.
+
+**Changes**
+
+* **Important**: `py.code <https://pylib.readthedocs.io/en/latest/code.html>`_ has been
+ merged into the ``pytest`` repository as ``pytest._code``. This decision
+ was made because ``py.code`` had very few uses outside ``pytest`` and the
+ fact that it was in a different repository made it difficult to fix bugs on
+ its code in a timely manner. The team hopes with this to be able to better
+ refactor out and improve that code.
+ This change shouldn't affect users, but it is useful to let users aware
+ if they encounter any strange behavior.
+
+ Keep in mind that the code for ``pytest._code`` is **private** and
+ **experimental**, so you definitely should not import it explicitly!
+
+ Please note that the original ``py.code`` is still available in
+ `pylib <https://pylib.readthedocs.io>`_.
+
+* ``pytest_enter_pdb`` now optionally receives the pytest config object.
+ Thanks `@nicoddemus`_ for the PR.
+
+* Removed code and documentation for Python 2.5 or lower versions,
+ including removal of the obsolete ``_pytest.assertion.oldinterpret`` module.
+ Thanks `@nicoddemus`_ for the PR (`#1226`_).
+
+* Comparisons now always show up in full when ``CI`` or ``BUILD_NUMBER`` is
+ found in the environment, even when ``-vv`` isn't used.
+ Thanks `@The-Compiler`_ for the PR.
+
+* ``--lf`` and ``--ff`` now support long names: ``--last-failed`` and
+ ``--failed-first`` respectively.
+ Thanks `@MichaelAquilina`_ for the PR.
+
+* Added expected exceptions to ``pytest.raises`` fail message.
+
+* Collection only displays progress ("collecting X items") when in a terminal.
+ This avoids cluttering the output when using ``--color=yes`` to obtain
+ colors in CI integrations systems (`#1397`_).
+
+**Bug Fixes**
+
+* The ``-s`` and ``-c`` options should now work under ``xdist``;
+ ``Config.fromdictargs`` now represents its input much more faithfully.
+ Thanks to `@bukzor`_ for the complete PR (`#680`_).
+
+* Fix (`#1290`_): support Python 3.5's ``@`` operator in assertion rewriting.
+ Thanks `@Shinkenjoe`_ for report with test case and `@tomviner`_ for the PR.
+
+* Fix formatting utf-8 explanation messages (`#1379`_).
+ Thanks `@biern`_ for the PR.
+
+* Fix `traceback style docs`_ to describe all of the available options
+ (auto/long/short/line/native/no), with ``auto`` being the default since v2.6.
+ Thanks `@hackebrot`_ for the PR.
+
+* Fix (`#1422`_): junit record_xml_property doesn't allow multiple records
+ with same name.
+
+.. _`traceback style docs`: https://pytest.org/latest/usage.html#modifying-python-traceback-printing
+
+.. _#1609: https://github.com/pytest-dev/pytest/issues/1609
+.. _#1422: https://github.com/pytest-dev/pytest/issues/1422
+.. _#1379: https://github.com/pytest-dev/pytest/issues/1379
+.. _#1366: https://github.com/pytest-dev/pytest/issues/1366
+.. _#1040: https://github.com/pytest-dev/pytest/pull/1040
+.. _#680: https://github.com/pytest-dev/pytest/issues/680
+.. _#1287: https://github.com/pytest-dev/pytest/pull/1287
+.. _#1226: https://github.com/pytest-dev/pytest/pull/1226
+.. _#1290: https://github.com/pytest-dev/pytest/pull/1290
+.. _#1355: https://github.com/pytest-dev/pytest/pull/1355
+.. _#1397: https://github.com/pytest-dev/pytest/issues/1397
+.. _@biern: https://github.com/biern
+.. _@MichaelAquilina: https://github.com/MichaelAquilina
+.. _@bukzor: https://github.com/bukzor
+.. _@hpk42: https://github.com/hpk42
+.. _@nicoddemus: https://github.com/nicoddemus
+.. _@jab: https://github.com/jab
+.. _@codewarrior0: https://github.com/codewarrior0
+.. _@jaraco: https://github.com/jaraco
+.. _@The-Compiler: https://github.com/The-Compiler
+.. _@Shinkenjoe: https://github.com/Shinkenjoe
+.. _@tomviner: https://github.com/tomviner
+.. _@RonnyPfannschmidt: https://github.com/RonnyPfannschmidt
+.. _@rabbbit: https://github.com/rabbbit
+.. _@hackebrot: https://github.com/hackebrot
+.. _@pquentin: https://github.com/pquentin
+.. _@ioggstream: https://github.com/ioggstream
+
+2.8.7 (2016-01-24)
+==================
+
+- fix #1338: use predictable object resolution for monkeypatch
+
+2.8.6 (2016-01-21)
+==================
+
+- fix #1259: allow for double nodeids in junitxml,
+ this was a regression failing plugins combinations
+ like pytest-pep8 + pytest-flakes
+
+- Workaround for exception that occurs in pyreadline when using
+ ``--pdb`` with standard I/O capture enabled.
+ Thanks Erik M. Bray for the PR.
+
+- fix #900: Better error message in case the target of a ``monkeypatch`` call
+ raises an ``ImportError``.
+
+- fix #1292: monkeypatch calls (setattr, setenv, etc.) are now O(1).
+ Thanks David R. MacIver for the report and Bruno Oliveira for the PR.
+
+- fix #1223: captured stdout and stderr are now properly displayed before
+ entering pdb when ``--pdb`` is used instead of being thrown away.
+ Thanks Cal Leeming for the PR.
+
+- fix #1305: pytest warnings emitted during ``pytest_terminal_summary`` are now
+ properly displayed.
+ Thanks Ionel Maries Cristian for the report and Bruno Oliveira for the PR.
+
+- fix #628: fixed internal UnicodeDecodeError when doctests contain unicode.
+ Thanks Jason R. Coombs for the report and Bruno Oliveira for the PR.
+
+- fix #1334: Add captured stdout to jUnit XML report on setup error.
+ Thanks Georgy Dyuldin for the PR.
+
+
+2.8.5 (2015-12-11)
+==================
+
+- fix #1243: fixed issue where class attributes injected during collection could break pytest.
+ PR by Alexei Kozlenok, thanks Ronny Pfannschmidt and Bruno Oliveira for the review and help.
+
+- fix #1074: precompute junitxml chunks instead of storing the whole tree in objects
+ Thanks Bruno Oliveira for the report and Ronny Pfannschmidt for the PR
+
+- fix #1238: fix ``pytest.deprecated_call()`` receiving multiple arguments
+ (Regression introduced in 2.8.4). Thanks Alex Gaynor for the report and
+ Bruno Oliveira for the PR.
+
+
+2.8.4 (2015-12-06)
+==================
+
+- fix #1190: ``deprecated_call()`` now works when the deprecated
+ function has been already called by another test in the same
+ module. Thanks Mikhail Chernykh for the report and Bruno Oliveira for the
+ PR.
+
+- fix #1198: ``--pastebin`` option now works on Python 3. Thanks
+ Mehdy Khoshnoody for the PR.
+
+- fix #1219: ``--pastebin`` now works correctly when captured output contains
+ non-ascii characters. Thanks Bruno Oliveira for the PR.
+
+- fix #1204: another error when collecting with a nasty __getattr__().
+ Thanks Florian Bruhin for the PR.
+
+- fix the summary printed when no tests did run.
+ Thanks Florian Bruhin for the PR.
+- fix #1185 - ensure MANIFEST.in exactly matches what should go to a sdist
+
+- a number of documentation modernizations wrt good practices.
+ Thanks Bruno Oliveira for the PR.
+
+2.8.3 (2015-11-18)
+==================
+
+- fix #1169: add __name__ attribute to testcases in TestCaseFunction to
+ support the @unittest.skip decorator on functions and methods.
+ Thanks Lee Kamentsky for the PR.
+
+- fix #1035: collecting tests if test module level obj has __getattr__().
+ Thanks Suor for the report and Bruno Oliveira / Tom Viner for the PR.
+
+- fix #331: don't collect tests if their failure cannot be reported correctly
+ e.g. they are a callable instance of a class.
+
+- fix #1133: fixed internal error when filtering tracebacks where one entry
+ belongs to a file which is no longer available.
+ Thanks Bruno Oliveira for the PR.
+
+- enhancement made to highlight in red the name of the failing tests so
+ they stand out in the output.
+ Thanks Gabriel Reis for the PR.
+
+- add more talks to the documentation
+- extend documentation on the --ignore cli option
+- use pytest-runner for setuptools integration
+- minor fixes for interaction with OS X El Capitan
+ system integrity protection (thanks Florian)
+
+
+2.8.2 (2015-10-07)
+==================
+
+- fix #1085: proper handling of encoding errors when passing encoded byte
+ strings to pytest.parametrize in Python 2.
+ Thanks Themanwithoutaplan for the report and Bruno Oliveira for the PR.
+
+- fix #1087: handling SystemError when passing empty byte strings to
+ pytest.parametrize in Python 3.
+ Thanks Paul Kehrer for the report and Bruno Oliveira for the PR.
+
+- fix #995: fixed internal error when filtering tracebacks where one entry
+ was generated by an exec() statement.
+ Thanks Daniel Hahler, Ashley C Straw, Philippe Gauthier and Pavel Savchenko
+ for contributing and Bruno Oliveira for the PR.
+
+- fix #1100 and #1057: errors when using autouse fixtures and doctest modules.
+ Thanks Sergey B Kirpichev and Vital Kudzelka for contributing and Bruno
+ Oliveira for the PR.
+
+2.8.1 (2015-09-29)
+==================
+
+- fix #1034: Add missing nodeid on pytest_logwarning call in
+ addhook. Thanks Simon Gomizelj for the PR.
+
+- 'deprecated_call' is now only satisfied with a DeprecationWarning or
+ PendingDeprecationWarning. Before 2.8.0, it accepted any warning, and 2.8.0
+ made it accept only DeprecationWarning (but not PendingDeprecationWarning).
+ Thanks Alex Gaynor for the issue and Eric Hunsberger for the PR.
+
+- fix issue #1073: avoid calling __getattr__ on potential plugin objects.
+ This fixes an incompatibility with pytest-django. Thanks Andreas Pelme,
+ Bruno Oliveira and Ronny Pfannschmidt for contributing and Holger Krekel
+ for the fix.
+
+- Fix issue #704: handle versionconflict during plugin loading more
+ gracefully. Thanks Bruno Oliveira for the PR.
+
+- Fix issue #1064: ""--junitxml" regression when used with the
+ "pytest-xdist" plugin, with test reports being assigned to the wrong tests.
+ Thanks Daniel Grunwald for the report and Bruno Oliveira for the PR.
+
+- (experimental) adapt more SEMVER style versioning and change meaning of
+ master branch in git repo: "master" branch now keeps the bugfixes, changes
+ aimed for micro releases. "features" branch will only be released
+ with minor or major pytest releases.
+
+- Fix issue #766 by removing documentation references to distutils.
+ Thanks Russel Winder.
+
+- Fix issue #1030: now byte-strings are escaped to produce item node ids
+ to make them always serializable.
+ Thanks Andy Freeland for the report and Bruno Oliveira for the PR.
+
+- Python 2: if unicode parametrized values are convertible to ascii, their
+ ascii representation is used for the node id.
+
+- Fix issue #411: Add __eq__ method to assertion comparison example.
+ Thanks Ben Webb.
+- Fix issue #653: deprecated_call can be used as context manager.
+
+- fix issue 877: properly handle assertion explanations with non-ascii repr
+ Thanks Mathieu Agopian for the report and Ronny Pfannschmidt for the PR.
+
+- fix issue 1029: transform errors when writing cache values into pytest-warnings
+
+2.8.0 (2015-09-18)
+==================
+
+- new ``--lf`` and ``-ff`` options to run only the last failing tests or
+ "failing tests first" from the last run. This functionality is provided
+ through porting the formerly external pytest-cache plugin into pytest core.
+ BACKWARD INCOMPAT: if you used pytest-cache's functionality to persist
+ data between test runs be aware that we don't serialize sets anymore.
+ Thanks Ronny Pfannschmidt for most of the merging work.
+
+- "-r" option now accepts "a" to include all possible reports, similar
+ to passing "fEsxXw" explicitly (isse960).
+ Thanks Abhijeet Kasurde for the PR.
+
+- avoid python3.5 deprecation warnings by introducing version
+ specific inspection helpers, thanks Michael Droettboom.
+
+- fix issue562: @nose.tools.istest now fully respected.
+
+- fix issue934: when string comparison fails and a diff is too large to display
+ without passing -vv, still show a few lines of the diff.
+ Thanks Florian Bruhin for the report and Bruno Oliveira for the PR.
+
+- fix issue736: Fix a bug where fixture params would be discarded when combined
+ with parametrization markers.
+ Thanks to Markus Unterwaditzer for the PR.
+
+- fix issue710: introduce ALLOW_UNICODE doctest option: when enabled, the
+ ``u`` prefix is stripped from unicode strings in expected doctest output. This
+ allows doctests which use unicode to run in Python 2 and 3 unchanged.
+ Thanks Jason R. Coombs for the report and Bruno Oliveira for the PR.
+
+- parametrize now also generates meaningful test IDs for enum, regex and class
+ objects (as opposed to class instances).
+ Thanks to Florian Bruhin for the PR.
+
+- Add 'warns' to assert that warnings are thrown (like 'raises').
+ Thanks to Eric Hunsberger for the PR.
+
+- Fix issue683: Do not apply an already applied mark. Thanks ojake for the PR.
+
+- Deal with capturing failures better so fewer exceptions get lost to
+ /dev/null. Thanks David Szotten for the PR.
+
+- fix issue730: deprecate and warn about the --genscript option.
+ Thanks Ronny Pfannschmidt for the report and Christian Pommranz for the PR.
+
+- fix issue751: multiple parametrize with ids bug if it parametrizes class with
+ two or more test methods. Thanks Sergey Chipiga for reporting and Jan
+ Bednarik for PR.
+
+- fix issue82: avoid loading conftest files from setup.cfg/pytest.ini/tox.ini
+ files and upwards by default (--confcutdir can still be set to override this).
+ Thanks Bruno Oliveira for the PR.
+
+- fix issue768: docstrings found in python modules were not setting up session
+ fixtures. Thanks Jason R. Coombs for reporting and Bruno Oliveira for the PR.
+
+- added ``tmpdir_factory``, a session-scoped fixture that can be used to create
+ directories under the base temporary directory. Previously this object was
+ installed as a ``_tmpdirhandler`` attribute of the ``config`` object, but now it
+ is part of the official API and using ``config._tmpdirhandler`` is
+ deprecated.
+ Thanks Bruno Oliveira for the PR.
+
+- fix issue808: pytest's internal assertion rewrite hook now implements the
+ optional PEP302 get_data API so tests can access data files next to them.
+ Thanks xmo-odoo for request and example and Bruno Oliveira for
+ the PR.
+
+- rootdir and inifile are now displayed during usage errors to help
+ users diagnose problems such as unexpected ini files which add
+ unknown options being picked up by pytest. Thanks to Pavel Savchenko for
+ bringing the problem to attention in #821 and Bruno Oliveira for the PR.
+
+- Summary bar now is colored yellow for warning
+ situations such as: all tests either were skipped or xpass/xfailed,
+ or no tests were run at all (this is a partial fix for issue500).
+
+- fix issue812: pytest now exits with status code 5 in situations where no
+ tests were run at all, such as the directory given in the command line does
+ not contain any tests or as result of a command line option filters
+ all out all tests (-k for example).
+ Thanks Eric Siegerman (issue812) and Bruno Oliveira for the PR.
+
+- Summary bar now is colored yellow for warning
+ situations such as: all tests either were skipped or xpass/xfailed,
+ or no tests were run at all (related to issue500).
+ Thanks Eric Siegerman.
+
+- New ``testpaths`` ini option: list of directories to search for tests
+ when executing pytest from the root directory. This can be used
+ to speed up test collection when a project has well specified directories
+ for tests, being usually more practical than configuring norecursedirs for
+ all directories that do not contain tests.
+ Thanks to Adrian for idea (#694) and Bruno Oliveira for the PR.
+
+- fix issue713: JUnit XML reports for doctest failures.
+ Thanks Punyashloka Biswal.
+
+- fix issue970: internal pytest warnings now appear as "pytest-warnings" in
+ the terminal instead of "warnings", so it is clear for users that those
+ warnings are from pytest and not from the builtin "warnings" module.
+ Thanks Bruno Oliveira.
+
+- Include setup and teardown in junitxml test durations.
+ Thanks Janne Vanhala.
+
+- fix issue735: assertion failures on debug versions of Python 3.4+
+
+- new option ``--import-mode`` to allow to change test module importing
+ behaviour to append to sys.path instead of prepending. This better allows
+ to run test modules against installed versions of a package even if the
+ package under test has the same import root. In this example::
+
+ testing/__init__.py
+ testing/test_pkg_under_test.py
+ pkg_under_test/
+
+ the tests will run against the installed version
+ of pkg_under_test when ``--import-mode=append`` is used whereas
+ by default they would always pick up the local version. Thanks Holger Krekel.
+
+- pytester: add method ``TmpTestdir.delete_loaded_modules()``, and call it
+ from ``inline_run()`` to allow temporary modules to be reloaded.
+ Thanks Eduardo Schettino.
+
+- internally refactor pluginmanager API and code so that there
+ is a clear distinction between a pytest-agnostic rather simple
+ pluginmanager and the PytestPluginManager which adds a lot of
+ behaviour, among it handling of the local conftest files.
+ In terms of documented methods this is a backward compatible
+ change but it might still break 3rd party plugins which relied on
+ details like especially the pluginmanager.add_shutdown() API.
+ Thanks Holger Krekel.
+
+- pluginmanagement: introduce ``pytest.hookimpl`` and
+ ``pytest.hookspec`` decorators for setting impl/spec
+ specific parameters. This substitutes the previous
+ now deprecated use of ``pytest.mark`` which is meant to
+ contain markers for test functions only.
+
+- write/refine docs for "writing plugins" which now have their
+ own page and are separate from the "using/installing plugins`` page.
+
+- fix issue732: properly unregister plugins from any hook calling
+ sites allowing to have temporary plugins during test execution.
+
+- deprecate and warn about ``__multicall__`` argument in hook
+ implementations. Use the ``hookwrapper`` mechanism instead already
+ introduced with pytest-2.7.
+
+- speed up pytest's own test suite considerably by using inprocess
+ tests by default (testrun can be modified with --runpytest=subprocess
+ to create subprocesses in many places instead). The main
+ APIs to run pytest in a test is "runpytest()" or "runpytest_subprocess"
+ and "runpytest_inprocess" if you need a particular way of running
+ the test. In all cases you get back a RunResult but the inprocess
+ one will also have a "reprec" attribute with the recorded events/reports.
+
+- fix monkeypatch.setattr("x.y", raising=False) to actually not raise
+ if "y" is not a pre-existing attribute. Thanks Florian Bruhin.
+
+- fix issue741: make running output from testdir.run copy/pasteable
+ Thanks Bruno Oliveira.
+
+- add a new ``--noconftest`` argument which ignores all ``conftest.py`` files.
+
+- add ``file`` and ``line`` attributes to JUnit-XML output.
+
+- fix issue890: changed extension of all documentation files from ``txt`` to
+ ``rst``. Thanks to Abhijeet for the PR.
+
+- fix issue714: add ability to apply indirect=True parameter on particular argnames.
+ Thanks Elizaveta239.
+
+- fix issue890: changed extension of all documentation files from ``txt`` to
+ ``rst``. Thanks to Abhijeet for the PR.
+
+- fix issue957: "# doctest: SKIP" option will now register doctests as SKIPPED
+ rather than PASSED.
+ Thanks Thomas Grainger for the report and Bruno Oliveira for the PR.
+
+- issue951: add new record_xml_property fixture, that supports logging
+ additional information on xml output. Thanks David Diaz for the PR.
+
+- issue949: paths after normal options (for example ``-s``, ``-v``, etc) are now
+ properly used to discover ``rootdir`` and ``ini`` files.
+ Thanks Peter Lauri for the report and Bruno Oliveira for the PR.
+
+2.7.3 (2015-09-15)
+==================
+
+- Allow 'dev', 'rc', or other non-integer version strings in ``importorskip``.
+ Thanks to Eric Hunsberger for the PR.
+
+- fix issue856: consider --color parameter in all outputs (for example
+ --fixtures). Thanks Barney Gale for the report and Bruno Oliveira for the PR.
+
+- fix issue855: passing str objects as ``plugins`` argument to pytest.main
+ is now interpreted as a module name to be imported and registered as a
+ plugin, instead of silently having no effect.
+ Thanks xmo-odoo for the report and Bruno Oliveira for the PR.
+
+- fix issue744: fix for ast.Call changes in Python 3.5+. Thanks
+ Guido van Rossum, Matthias Bussonnier, Stefan Zimmermann and
+ Thomas Kluyver.
+
+- fix issue842: applying markers in classes no longer propagate this markers
+ to superclasses which also have markers.
+ Thanks xmo-odoo for the report and Bruno Oliveira for the PR.
+
+- preserve warning functions after call to pytest.deprecated_call. Thanks
+ Pieter Mulder for PR.
+
+- fix issue854: autouse yield_fixtures defined as class members of
+ unittest.TestCase subclasses now work as expected.
+ Thannks xmo-odoo for the report and Bruno Oliveira for the PR.
+
+- fix issue833: --fixtures now shows all fixtures of collected test files, instead of just the
+ fixtures declared on the first one.
+ Thanks Florian Bruhin for reporting and Bruno Oliveira for the PR.
+
+- fix issue863: skipped tests now report the correct reason when a skip/xfail
+ condition is met when using multiple markers.
+ Thanks Raphael Pierzina for reporting and Bruno Oliveira for the PR.
+
+- optimized tmpdir fixture initialization, which should make test sessions
+ faster (specially when using pytest-xdist). The only visible effect
+ is that now pytest uses a subdirectory in the $TEMP directory for all
+ directories created by this fixture (defaults to $TEMP/pytest-$USER).
+ Thanks Bruno Oliveira for the PR.
+
+2.7.2 (2015-06-23)
+==================
+
+- fix issue767: pytest.raises value attribute does not contain the exception
+ instance on Python 2.6. Thanks Eric Siegerman for providing the test
+ case and Bruno Oliveira for PR.
+
+- Automatically create directory for junitxml and results log.
+ Thanks Aron Curzon.
+
+- fix issue713: JUnit XML reports for doctest failures.
+ Thanks Punyashloka Biswal.
+
+- fix issue735: assertion failures on debug versions of Python 3.4+
+ Thanks Benjamin Peterson.
+
+- fix issue114: skipif marker reports to internal skipping plugin;
+ Thanks Floris Bruynooghe for reporting and Bruno Oliveira for the PR.
+
+- fix issue748: unittest.SkipTest reports to internal pytest unittest plugin.
+ Thanks Thomas De Schampheleire for reporting and Bruno Oliveira for the PR.
+
+- fix issue718: failed to create representation of sets containing unsortable
+ elements in python 2. Thanks Edison Gustavo Muenz.
+
+- fix issue756, fix issue752 (and similar issues): depend on py-1.4.29
+ which has a refined algorithm for traceback generation.
+
+
+2.7.1 (2015-05-19)
+==================
+
+- fix issue731: do not get confused by the braces which may be present
+ and unbalanced in an object's repr while collapsing False
+ explanations. Thanks Carl Meyer for the report and test case.
+
+- fix issue553: properly handling inspect.getsourcelines failures in
+ FixtureLookupError which would lead to an internal error,
+ obfuscating the original problem. Thanks talljosh for initial
+ diagnose/patch and Bruno Oliveira for final patch.
+
+- fix issue660: properly report scope-mismatch-access errors
+ independently from ordering of fixture arguments. Also
+ avoid the pytest internal traceback which does not provide
+ information to the user. Thanks Holger Krekel.
+
+- streamlined and documented release process. Also all versions
+ (in setup.py and documentation generation) are now read
+ from _pytest/__init__.py. Thanks Holger Krekel.
+
+- fixed docs to remove the notion that yield-fixtures are experimental.
+ They are here to stay :) Thanks Bruno Oliveira.
+
+- Support building wheels by using environment markers for the
+ requirements. Thanks Ionel Maries Cristian.
+
+- fixed regression to 2.6.4 which surfaced e.g. in lost stdout capture printing
+ when tests raised SystemExit. Thanks Holger Krekel.
+
+- reintroduced _pytest fixture of the pytester plugin which is used
+ at least by pytest-xdist.
+
+2.7.0 (2015-03-26)
+==================
+
+- fix issue435: make reload() work when assert rewriting is active.
+ Thanks Daniel Hahler.
+
+- fix issue616: conftest.py files and their contained fixutres are now
+ properly considered for visibility, independently from the exact
+ current working directory and test arguments that are used.
+ Many thanks to Eric Siegerman and his PR235 which contains
+ systematic tests for conftest visibility and now passes.
+ This change also introduces the concept of a ``rootdir`` which
+ is printed as a new pytest header and documented in the pytest
+ customize web page.
+
+- change reporting of "diverted" tests, i.e. tests that are collected
+ in one file but actually come from another (e.g. when tests in a test class
+ come from a base class in a different file). We now show the nodeid
+ and indicate via a postfix the other file.
+
+- add ability to set command line options by environment variable PYTEST_ADDOPTS.
+
+- added documentation on the new pytest-dev teams on bitbucket and
+ github. See https://pytest.org/latest/contributing.html .
+ Thanks to Anatoly for pushing and initial work on this.
+
+- fix issue650: new option ``--docttest-ignore-import-errors`` which
+ will turn import errors in doctests into skips. Thanks Charles Cloud
+ for the complete PR.
+
+- fix issue655: work around different ways that cause python2/3
+ to leak sys.exc_info into fixtures/tests causing failures in 3rd party code
+
+- fix issue615: assertion rewriting did not correctly escape % signs
+ when formatting boolean operations, which tripped over mixing
+ booleans with modulo operators. Thanks to Tom Viner for the report,
+ triaging and fix.
+
+- implement issue351: add ability to specify parametrize ids as a callable
+ to generate custom test ids. Thanks Brianna Laugher for the idea and
+ implementation.
+
+- introduce and document new hookwrapper mechanism useful for plugins
+ which want to wrap the execution of certain hooks for their purposes.
+ This supersedes the undocumented ``__multicall__`` protocol which
+ pytest itself and some external plugins use. Note that pytest-2.8
+ is scheduled to drop supporting the old ``__multicall__``
+ and only support the hookwrapper protocol.
+
+- majorly speed up invocation of plugin hooks
+
+- use hookwrapper mechanism in builtin pytest plugins.
+
+- add a doctest ini option for doctest flags, thanks Holger Peters.
+
+- add note to docs that if you want to mark a parameter and the
+ parameter is a callable, you also need to pass in a reason to disambiguate
+ it from the "decorator" case. Thanks Tom Viner.
+
+- "python_classes" and "python_functions" options now support glob-patterns
+ for test discovery, as discussed in issue600. Thanks Ldiary Translations.
+
+- allow to override parametrized fixtures with non-parametrized ones and vice versa (bubenkoff).
+
+- fix issue463: raise specific error for 'parameterize' misspelling (pfctdayelise).
+
+- On failure, the ``sys.last_value``, ``sys.last_type`` and
+ ``sys.last_traceback`` are set, so that a user can inspect the error
+ via postmortem debugging (almarklein).
+
+2.6.4 (2014-10-24)
+==================
+
+- Improve assertion failure reporting on iterables, by using ndiff and
+ pprint.
+
+- removed outdated japanese docs from source tree.
+
+- docs for "pytest_addhooks" hook. Thanks Bruno Oliveira.
+
+- updated plugin index docs. Thanks Bruno Oliveira.
+
+- fix issue557: with "-k" we only allow the old style "-" for negation
+ at the beginning of strings and even that is deprecated. Use "not" instead.
+ This should allow to pick parametrized tests where "-" appeared in the parameter.
+
+- fix issue604: Escape % character in the assertion message.
+
+- fix issue620: add explanation in the --genscript target about what
+ the binary blob means. Thanks Dinu Gherman.
+
+- fix issue614: fixed pastebin support.
+
+
+- fix issue620: add explanation in the --genscript target about what
+ the binary blob means. Thanks Dinu Gherman.
+
+- fix issue614: fixed pastebin support.
+
+2.6.3 (2014-09-24)
+==================
+
+- fix issue575: xunit-xml was reporting collection errors as failures
+ instead of errors, thanks Oleg Sinyavskiy.
+
+- fix issue582: fix setuptools example, thanks Laszlo Papp and Ronny
+ Pfannschmidt.
+
+- Fix infinite recursion bug when pickling capture.EncodedFile, thanks
+ Uwe Schmitt.
+
+- fix issue589: fix bad interaction with numpy and others when showing
+ exceptions. Check for precise "maximum recursion depth exceed" exception
+ instead of presuming any RuntimeError is that one (implemented in py
+ dep). Thanks Charles Cloud for analysing the issue.
+
+- fix conftest related fixture visibility issue: when running with a
+ CWD outside of a test package pytest would get fixture discovery wrong.
+ Thanks to Wolfgang Schnerring for figuring out a reproducible example.
+
+- Introduce pytest_enter_pdb hook (needed e.g. by pytest_timeout to cancel the
+ timeout when interactively entering pdb). Thanks Wolfgang Schnerring.
+
+- check xfail/skip also with non-python function test items. Thanks
+ Floris Bruynooghe.
+
+2.6.2 (2014-09-05)
+==================
+
+- Added function pytest.freeze_includes(), which makes it easy to embed
+ pytest into executables using tools like cx_freeze.
+ See docs for examples and rationale. Thanks Bruno Oliveira.
+
+- Improve assertion rewriting cache invalidation precision.
+
+- fixed issue561: adapt autouse fixture example for python3.
+
+- fixed issue453: assertion rewriting issue with __repr__ containing
+ "\n{", "\n}" and "\n~".
+
+- fix issue560: correctly display code if an "else:" or "finally:" is
+ followed by statements on the same line.
+
+- Fix example in monkeypatch documentation, thanks t-8ch.
+
+- fix issue572: correct tmpdir doc example for python3.
+
+- Do not mark as universal wheel because Python 2.6 is different from
+ other builds due to the extra argparse dependency. Fixes issue566.
+ Thanks sontek.
+
+- Implement issue549: user-provided assertion messages now no longer
+ replace the py.test introspection message but are shown in addition
+ to them.
+
+2.6.1 (2014-08-07)
+==================
+
+- No longer show line numbers in the --verbose output, the output is now
+ purely the nodeid. The line number is still shown in failure reports.
+ Thanks Floris Bruynooghe.
+
+- fix issue437 where assertion rewriting could cause pytest-xdist slaves
+ to collect different tests. Thanks Bruno Oliveira.
+
+- fix issue555: add "errors" attribute to capture-streams to satisfy
+ some distutils and possibly other code accessing sys.stdout.errors.
+
+- fix issue547 capsys/capfd also work when output capturing ("-s") is disabled.
+
+- address issue170: allow pytest.mark.xfail(...) to specify expected exceptions via
+ an optional "raises=EXC" argument where EXC can be a single exception
+ or a tuple of exception classes. Thanks David Mohr for the complete
+ PR.
+
+- fix integration of pytest with unittest.mock.patch decorator when
+ it uses the "new" argument. Thanks Nicolas Delaby for test and PR.
+
+- fix issue with detecting conftest files if the arguments contain
+ "::" node id specifications (copy pasted from "-v" output)
+
+- fix issue544 by only removing "@NUM" at the end of "::" separated parts
+ and if the part has a ".py" extension
+
+- don't use py.std import helper, rather import things directly.
+ Thanks Bruno Oliveira.
+
+2.6
+===
+
+- Cache exceptions from fixtures according to their scope (issue 467).
+
+- fix issue537: Avoid importing old assertion reinterpretation code by default.
+
+- fix issue364: shorten and enhance tracebacks representation by default.
+ The new "--tb=auto" option (default) will only display long tracebacks
+ for the first and last entry. You can get the old behaviour of printing
+ all entries as long entries with "--tb=long". Also short entries by
+ default are now printed very similarly to "--tb=native" ones.
+
+- fix issue514: teach assertion reinterpretation about private class attributes
+
+- change -v output to include full node IDs of tests. Users can copy
+ a node ID from a test run, including line number, and use it as a
+ positional argument in order to run only a single test.
+
+- fix issue 475: fail early and comprehensible if calling
+ pytest.raises with wrong exception type.
+
+- fix issue516: tell in getting-started about current dependencies.
+
+- cleanup setup.py a bit and specify supported versions. Thanks Jurko
+ Gospodnetic for the PR.
+
+- change XPASS colour to yellow rather then red when tests are run
+ with -v.
+
+- fix issue473: work around mock putting an unbound method into a class
+ dict when double-patching.
+
+- fix issue498: if a fixture finalizer fails, make sure that
+ the fixture is still invalidated.
+
+- fix issue453: the result of the pytest_assertrepr_compare hook now gets
+ it's newlines escaped so that format_exception does not blow up.
+
+- internal new warning system: pytest will now produce warnings when
+ it detects oddities in your test collection or execution.
+ Warnings are ultimately sent to a new pytest_logwarning hook which is
+ currently only implemented by the terminal plugin which displays
+ warnings in the summary line and shows more details when -rw (report on
+ warnings) is specified.
+
+- change skips into warnings for test classes with an __init__ and
+ callables in test modules which look like a test but are not functions.
+
+- fix issue436: improved finding of initial conftest files from command
+ line arguments by using the result of parse_known_args rather than
+ the previous flaky heuristics. Thanks Marc Abramowitz for tests
+ and initial fixing approaches in this area.
+
+- fix issue #479: properly handle nose/unittest(2) SkipTest exceptions
+ during collection/loading of test modules. Thanks to Marc Schlaich
+ for the complete PR.
+
+- fix issue490: include pytest_load_initial_conftests in documentation
+ and improve docstring.
+
+- fix issue472: clarify that ``pytest.config.getvalue()`` cannot work
+ if it's triggered ahead of command line parsing.
+
+- merge PR123: improved integration with mock.patch decorator on tests.
+
+- fix issue412: messing with stdout/stderr FD-level streams is now
+ captured without crashes.
+
+- fix issue483: trial/py33 works now properly. Thanks Daniel Grana for PR.
+
+- improve example for pytest integration with "python setup.py test"
+ which now has a generic "-a" or "--pytest-args" option where you
+ can pass additional options as a quoted string. Thanks Trevor Bekolay.
+
+- simplified internal capturing mechanism and made it more robust
+ against tests or setups changing FD1/FD2, also better integrated
+ now with pytest.pdb() in single tests.
+
+- improvements to pytest's own test-suite leakage detection, courtesy of PRs
+ from Marc Abramowitz
+
+- fix issue492: avoid leak in test_writeorg. Thanks Marc Abramowitz.
+
+- fix issue493: don't run tests in doc directory with ``python setup.py test``
+ (use tox -e doctesting for that)
+
+- fix issue486: better reporting and handling of early conftest loading failures
+
+- some cleanup and simplification of internal conftest handling.
+
+- work a bit harder to break reference cycles when catching exceptions.
+ Thanks Jurko Gospodnetic.
+
+- fix issue443: fix skip examples to use proper comparison. Thanks Alex
+ Groenholm.
+
+- support nose-style ``__test__`` attribute on modules, classes and
+ functions, including unittest-style Classes. If set to False, the
+ test will not be collected.
+
+- fix issue512: show "<notset>" for arguments which might not be set
+ in monkeypatch plugin. Improves output in documentation.
+
+
+2.5.2 (2014-01-29)
+==================
+
+- fix issue409 -- better interoperate with cx_freeze by not
+ trying to import from collections.abc which causes problems
+ for py27/cx_freeze. Thanks Wolfgang L. for reporting and tracking it down.
+
+- fixed docs and code to use "pytest" instead of "py.test" almost everywhere.
+ Thanks Jurko Gospodnetic for the complete PR.
+
+- fix issue425: mention at end of "py.test -h" that --markers
+ and --fixtures work according to specified test path (or current dir)
+
+- fix issue413: exceptions with unicode attributes are now printed
+ correctly also on python2 and with pytest-xdist runs. (the fix
+ requires py-1.4.20)
+
+- copy, cleanup and integrate py.io capture
+ from pylib 1.4.20.dev2 (rev 13d9af95547e)
+
+- address issue416: clarify docs as to conftest.py loading semantics
+
+- fix issue429: comparing byte strings with non-ascii chars in assert
+ expressions now work better. Thanks Floris Bruynooghe.
+
+- make capfd/capsys.capture private, its unused and shouldn't be exposed
+
+
+2.5.1 (2013-12-17)
+==================
+
+- merge new documentation styling PR from Tobias Bieniek.
+
+- fix issue403: allow parametrize of multiple same-name functions within
+ a collection node. Thanks Andreas Kloeckner and Alex Gaynor for reporting
+ and analysis.
+
+- Allow parameterized fixtures to specify the ID of the parameters by
+ adding an ids argument to pytest.fixture() and pytest.yield_fixture().
+ Thanks Floris Bruynooghe.
+
+- fix issue404 by always using the binary xml escape in the junitxml
+ plugin. Thanks Ronny Pfannschmidt.
+
+- fix issue407: fix addoption docstring to point to argparse instead of
+ optparse. Thanks Daniel D. Wright.
+
+
+
+2.5.0 (2013-12-12)
+==================
+
+- dropped python2.5 from automated release testing of pytest itself
+ which means it's probably going to break soon (but still works
+ with this release we believe).
+
+- simplified and fixed implementation for calling finalizers when
+ parametrized fixtures or function arguments are involved. finalization
+ is now performed lazily at setup time instead of in the "teardown phase".
+ While this might sound odd at first, it helps to ensure that we are
+ correctly handling setup/teardown even in complex code. User-level code
+ should not be affected unless it's implementing the pytest_runtest_teardown
+ hook and expecting certain fixture instances are torn down within (very
+ unlikely and would have been unreliable anyway).
+
+- PR90: add --color=yes|no|auto option to force terminal coloring
+ mode ("auto" is default). Thanks Marc Abramowitz.
+
+- fix issue319 - correctly show unicode in assertion errors. Many
+ thanks to Floris Bruynooghe for the complete PR. Also means
+ we depend on py>=1.4.19 now.
+
+- fix issue396 - correctly sort and finalize class-scoped parametrized
+ tests independently from number of methods on the class.
+
+- refix issue323 in a better way -- parametrization should now never
+ cause Runtime Recursion errors because the underlying algorithm
+ for re-ordering tests per-scope/per-fixture is not recursive
+ anymore (it was tail-call recursive before which could lead
+ to problems for more than >966 non-function scoped parameters).
+
+- fix issue290 - there is preliminary support now for parametrizing
+ with repeated same values (sometimes useful to test if calling
+ a second time works as with the first time).
+
+- close issue240 - document precisely how pytest module importing
+ works, discuss the two common test directory layouts, and how it
+ interacts with PEP420-namespace packages.
+
+- fix issue246 fix finalizer order to be LIFO on independent fixtures
+ depending on a parametrized higher-than-function scoped fixture.
+ (was quite some effort so please bear with the complexity of this sentence :)
+ Thanks Ralph Schmitt for the precise failure example.
+
+- fix issue244 by implementing special index for parameters to only use
+ indices for paramentrized test ids
+
+- fix issue287 by running all finalizers but saving the exception
+ from the first failing finalizer and re-raising it so teardown will
+ still have failed. We reraise the first failing exception because
+ it might be the cause for other finalizers to fail.
+
+- fix ordering when mock.patch or other standard decorator-wrappings
+ are used with test methods. This fixues issue346 and should
+ help with random "xdist" collection failures. Thanks to
+ Ronny Pfannschmidt and Donald Stufft for helping to isolate it.
+
+- fix issue357 - special case "-k" expressions to allow for
+ filtering with simple strings that are not valid python expressions.
+ Examples: "-k 1.3" matches all tests parametrized with 1.3.
+ "-k None" filters all tests that have "None" in their name
+ and conversely "-k 'not None'".
+ Previously these examples would raise syntax errors.
+
+- fix issue384 by removing the trial support code
+ since the unittest compat enhancements allow
+ trial to handle it on its own
+
+- don't hide an ImportError when importing a plugin produces one.
+ fixes issue375.
+
+- fix issue275 - allow usefixtures and autouse fixtures
+ for running doctest text files.
+
+- fix issue380 by making --resultlog only rely on longrepr instead
+ of the "reprcrash" attribute which only exists sometimes.
+
+- address issue122: allow @pytest.fixture(params=iterator) by exploding
+ into a list early on.
+
+- fix pexpect-3.0 compatibility for pytest's own tests.
+ (fixes issue386)
+
+- allow nested parametrize-value markers, thanks James Lan for the PR.
+
+- fix unicode handling with new monkeypatch.setattr(import_path, value)
+ API. Thanks Rob Dennis. Fixes issue371.
+
+- fix unicode handling with junitxml, fixes issue368.
+
+- In assertion rewriting mode on Python 2, fix the detection of coding
+ cookies. See issue #330.
+
+- make "--runxfail" turn imperative pytest.xfail calls into no ops
+ (it already did neutralize pytest.mark.xfail markers)
+
+- refine pytest / pkg_resources interactions: The AssertionRewritingHook
+ PEP302 compliant loader now registers itself with setuptools/pkg_resources
+ properly so that the pkg_resources.resource_stream method works properly.
+ Fixes issue366. Thanks for the investigations and full PR to Jason R. Coombs.
+
+- pytestconfig fixture is now session-scoped as it is the same object during the
+ whole test run. Fixes issue370.
+
+- avoid one surprising case of marker malfunction/confusion::
+
+ @pytest.mark.some(lambda arg: ...)
+ def test_function():
+
+ would not work correctly because pytest assumes @pytest.mark.some
+ gets a function to be decorated already. We now at least detect if this
+ arg is a lambda and thus the example will work. Thanks Alex Gaynor
+ for bringing it up.
+
+- xfail a test on pypy that checks wrong encoding/ascii (pypy does
+ not error out). fixes issue385.
+
+- internally make varnames() deal with classes's __init__,
+ although it's not needed by pytest itself atm. Also
+ fix caching. Fixes issue376.
+
+- fix issue221 - handle importing of namespace-package with no
+ __init__.py properly.
+
+- refactor internal FixtureRequest handling to avoid monkeypatching.
+ One of the positive user-facing effects is that the "request" object
+ can now be used in closures.
+
+- fixed version comparison in pytest.importskip(modname, minverstring)
+
+- fix issue377 by clarifying in the nose-compat docs that pytest
+ does not duplicate the unittest-API into the "plain" namespace.
+
+- fix verbose reporting for @mock'd test functions
+
+2.4.2 (2013-10-04)
+==================
+
+- on Windows require colorama and a newer py lib so that py.io.TerminalWriter()
+ now uses colorama instead of its own ctypes hacks. (fixes issue365)
+ thanks Paul Moore for bringing it up.
+
+- fix "-k" matching of tests where "repr" and "attr" and other names would
+ cause wrong matches because of an internal implementation quirk
+ (don't ask) which is now properly implemented. fixes issue345.
+
+- avoid tmpdir fixture to create too long filenames especially
+ when parametrization is used (issue354)
+
+- fix pytest-pep8 and pytest-flakes / pytest interactions
+ (collection names in mark plugin was assuming an item always
+ has a function which is not true for those plugins etc.)
+ Thanks Andi Zeidler.
+
+- introduce node.get_marker/node.add_marker API for plugins
+ like pytest-pep8 and pytest-flakes to avoid the messy
+ details of the node.keywords pseudo-dicts. Adapted
+ docs.
+
+- remove attempt to "dup" stdout at startup as it's icky.
+ the normal capturing should catch enough possibilities
+ of tests messing up standard FDs.
+
+- add pluginmanager.do_configure(config) as a link to
+ config.do_configure() for plugin-compatibility
+
+2.4.1 (2013-10-02)
+==================
+
+- When using parser.addoption() unicode arguments to the
+ "type" keyword should also be converted to the respective types.
+ thanks Floris Bruynooghe, @dnozay. (fixes issue360 and issue362)
+
+- fix dotted filename completion when using argcomplete
+ thanks Anthon van der Neuth. (fixes issue361)
+
+- fix regression when a 1-tuple ("arg",) is used for specifying
+ parametrization (the values of the parametrization were passed
+ nested in a tuple). Thanks Donald Stufft.
+
+- merge doc typo fixes, thanks Andy Dirnberger
+
+2.4
+===
+
+known incompatibilities:
+
+- if calling --genscript from python2.7 or above, you only get a
+ standalone script which works on python2.7 or above. Use Python2.6
+ to also get a python2.5 compatible version.
+
+- all xunit-style teardown methods (nose-style, pytest-style,
+ unittest-style) will not be called if the corresponding setup method failed,
+ see issue322 below.
+
+- the pytest_plugin_unregister hook wasn't ever properly called
+ and there is no known implementation of the hook - so it got removed.
+
+- pytest.fixture-decorated functions cannot be generators (i.e. use
+ yield) anymore. This change might be reversed in 2.4.1 if it causes
+ unforeseen real-life issues. However, you can always write and return
+ an inner function/generator and change the fixture consumer to iterate
+ over the returned generator. This change was done in lieu of the new
+ ``pytest.yield_fixture`` decorator, see below.
+
+new features:
+
+- experimentally introduce a new ``pytest.yield_fixture`` decorator
+ which accepts exactly the same parameters as pytest.fixture but
+ mandates a ``yield`` statement instead of a ``return statement`` from
+ fixture functions. This allows direct integration with "with-style"
+ context managers in fixture functions and generally avoids registering
+ of finalization callbacks in favour of treating the "after-yield" as
+ teardown code. Thanks Andreas Pelme, Vladimir Keleshev, Floris
+ Bruynooghe, Ronny Pfannschmidt and many others for discussions.
+
+- allow boolean expression directly with skipif/xfail
+ if a "reason" is also specified. Rework skipping documentation
+ to recommend "condition as booleans" because it prevents surprises
+ when importing markers between modules. Specifying conditions
+ as strings will remain fully supported.
+
+- reporting: color the last line red or green depending if
+ failures/errors occurred or everything passed. thanks Christian
+ Theunert.
+
+- make "import pdb ; pdb.set_trace()" work natively wrt capturing (no
+ "-s" needed anymore), making ``pytest.set_trace()`` a mere shortcut.
+
+- fix issue181: --pdb now also works on collect errors (and
+ on internal errors) . This was implemented by a slight internal
+ refactoring and the introduction of a new hook
+ ``pytest_exception_interact`` hook (see next item).
+
+- fix issue341: introduce new experimental hook for IDEs/terminals to
+ intercept debugging: ``pytest_exception_interact(node, call, report)``.
+
+- new monkeypatch.setattr() variant to provide a shorter
+ invocation for patching out classes/functions from modules:
+
+ monkeypatch.setattr("requests.get", myfunc)
+
+ will replace the "get" function of the "requests" module with ``myfunc``.
+
+- fix issue322: tearDownClass is not run if setUpClass failed. Thanks
+ Mathieu Agopian for the initial fix. Also make all of pytest/nose
+ finalizer mimic the same generic behaviour: if a setupX exists and
+ fails, don't run teardownX. This internally introduces a new method
+ "node.addfinalizer()" helper which can only be called during the setup
+ phase of a node.
+
+- simplify pytest.mark.parametrize() signature: allow to pass a
+ CSV-separated string to specify argnames. For example:
+ ``pytest.mark.parametrize("input,expected", [(1,2), (2,3)])``
+ works as well as the previous:
+ ``pytest.mark.parametrize(("input", "expected"), ...)``.
+
+- add support for setUpModule/tearDownModule detection, thanks Brian Okken.
+
+- integrate tab-completion on options through use of "argcomplete".
+ Thanks Anthon van der Neut for the PR.
+
+- change option names to be hyphen-separated long options but keep the
+ old spelling backward compatible. py.test -h will only show the
+ hyphenated version, for example "--collect-only" but "--collectonly"
+ will remain valid as well (for backward-compat reasons). Many thanks to
+ Anthon van der Neut for the implementation and to Hynek Schlawack for
+ pushing us.
+
+- fix issue 308 - allow to mark/xfail/skip individual parameter sets
+ when parametrizing. Thanks Brianna Laugher.
+
+- call new experimental pytest_load_initial_conftests hook to allow
+ 3rd party plugins to do something before a conftest is loaded.
+
+Bug fixes:
+
+- fix issue358 - capturing options are now parsed more properly
+ by using a new parser.parse_known_args method.
+
+- pytest now uses argparse instead of optparse (thanks Anthon) which
+ means that "argparse" is added as a dependency if installing into python2.6
+ environments or below.
+
+- fix issue333: fix a case of bad unittest/pytest hook interaction.
+
+- PR27: correctly handle nose.SkipTest during collection. Thanks
+ Antonio Cuni, Ronny Pfannschmidt.
+
+- fix issue355: junitxml puts name="pytest" attribute to testsuite tag.
+
+- fix issue336: autouse fixture in plugins should work again.
+
+- fix issue279: improve object comparisons on assertion failure
+ for standard datatypes and recognise collections.abc. Thanks to
+ Brianna Laugher and Mathieu Agopian.
+
+- fix issue317: assertion rewriter support for the is_package method
+
+- fix issue335: document py.code.ExceptionInfo() object returned
+ from pytest.raises(), thanks Mathieu Agopian.
+
+- remove implicit distribute_setup support from setup.py.
+
+- fix issue305: ignore any problems when writing pyc files.
+
+- SO-17664702: call fixture finalizers even if the fixture function
+ partially failed (finalizers would not always be called before)
+
+- fix issue320 - fix class scope for fixtures when mixed with
+ module-level functions. Thanks Anatloy Bubenkoff.
+
+- you can specify "-q" or "-qq" to get different levels of "quieter"
+ reporting (thanks Katarzyna Jachim)
+
+- fix issue300 - Fix order of conftest loading when starting py.test
+ in a subdirectory.
+
+- fix issue323 - sorting of many module-scoped arg parametrizations
+
+- make sessionfinish hooks execute with the same cwd-context as at
+ session start (helps fix plugin behaviour which write output files
+ with relative path such as pytest-cov)
+
+- fix issue316 - properly reference collection hooks in docs
+
+- fix issue 306 - cleanup of -k/-m options to only match markers/test
+ names/keywords respectively. Thanks Wouter van Ackooy.
+
+- improved doctest counting for doctests in python modules --
+ files without any doctest items will not show up anymore
+ and doctest examples are counted as separate test items.
+ thanks Danilo Bellini.
+
+- fix issue245 by depending on the released py-1.4.14
+ which fixes py.io.dupfile to work with files with no
+ mode. Thanks Jason R. Coombs.
+
+- fix junitxml generation when test output contains control characters,
+ addressing issue267, thanks Jaap Broekhuizen
+
+- fix issue338: honor --tb style for setup/teardown errors as well. Thanks Maho.
+
+- fix issue307 - use yaml.safe_load in example, thanks Mark Eichin.
+
+- better parametrize error messages, thanks Brianna Laugher
+
+- pytest_terminal_summary(terminalreporter) hooks can now use
+ ".section(title)" and ".line(msg)" methods to print extra
+ information at the end of a test run.
+
+2.3.5 (2013-04-30)
+==================
+
+- fix issue169: respect --tb=style with setup/teardown errors as well.
+
+- never consider a fixture function for test function collection
+
+- allow re-running of test items / helps to fix pytest-reruntests plugin
+ and also help to keep less fixture/resource references alive
+
+- put captured stdout/stderr into junitxml output even for passing tests
+ (thanks Adam Goucher)
+
+- Issue 265 - integrate nose setup/teardown with setupstate
+ so it doesn't try to teardown if it did not setup
+
+- issue 271 - don't write junitxml on slave nodes
+
+- Issue 274 - don't try to show full doctest example
+ when doctest does not know the example location
+
+- issue 280 - disable assertion rewriting on buggy CPython 2.6.0
+
+- inject "getfixture()" helper to retrieve fixtures from doctests,
+ thanks Andreas Zeidler
+
+- issue 259 - when assertion rewriting, be consistent with the default
+ source encoding of ASCII on Python 2
+
+- issue 251 - report a skip instead of ignoring classes with init
+
+- issue250 unicode/str mixes in parametrization names and values now works
+
+- issue257, assertion-triggered compilation of source ending in a
+ comment line doesn't blow up in python2.5 (fixed through py>=1.4.13.dev6)
+
+- fix --genscript option to generate standalone scripts that also
+ work with python3.3 (importer ordering)
+
+- issue171 - in assertion rewriting, show the repr of some
+ global variables
+
+- fix option help for "-k"
+
+- move long description of distribution into README.rst
+
+- improve docstring for metafunc.parametrize()
+
+- fix bug where using capsys with pytest.set_trace() in a test
+ function would break when looking at capsys.readouterr()
+
+- allow to specify prefixes starting with "_" when
+ customizing python_functions test discovery. (thanks Graham Horler)
+
+- improve PYTEST_DEBUG tracing output by putting
+ extra data on a new lines with additional indent
+
+- ensure OutcomeExceptions like skip/fail have initialized exception attributes
+
+- issue 260 - don't use nose special setup on plain unittest cases
+
+- fix issue134 - print the collect errors that prevent running specified test items
+
+- fix issue266 - accept unicode in MarkEvaluator expressions
+
+2.3.4 (2012-11-20)
+==================
+
+- yielded test functions will now have autouse-fixtures active but
+ cannot accept fixtures as funcargs - it's anyway recommended to
+ rather use the post-2.0 parametrize features instead of yield, see:
+ http://pytest.org/latest/example/parametrize.html
+- fix autouse-issue where autouse-fixtures would not be discovered
+ if defined in an a/conftest.py file and tests in a/tests/test_some.py
+- fix issue226 - LIFO ordering for fixture teardowns
+- fix issue224 - invocations with >256 char arguments now work
+- fix issue91 - add/discuss package/directory level setups in example
+- allow to dynamically define markers via
+ item.keywords[...]=assignment integrating with "-m" option
+- make "-k" accept an expressions the same as with "-m" so that one
+ can write: -k "name1 or name2" etc. This is a slight incompatibility
+ if you used special syntax like "TestClass.test_method" which you now
+ need to write as -k "TestClass and test_method" to match a certain
+ method in a certain test class.
+
+2.3.3 (2012-11-06)
+==================
+
+- fix issue214 - parse modules that contain special objects like e. g.
+ flask's request object which blows up on getattr access if no request
+ is active. thanks Thomas Waldmann.
+
+- fix issue213 - allow to parametrize with values like numpy arrays that
+ do not support an __eq__ operator
+
+- fix issue215 - split test_python.org into multiple files
+
+- fix issue148 - @unittest.skip on classes is now recognized and avoids
+ calling setUpClass/tearDownClass, thanks Pavel Repin
+
+- fix issue209 - reintroduce python2.4 support by depending on newer
+ pylib which re-introduced statement-finding for pre-AST interpreters
+
+- nose support: only call setup if it's a callable, thanks Andrew
+ Taumoefolau
+
+- fix issue219 - add py2.4-3.3 classifiers to TROVE list
+
+- in tracebacks *,** arg values are now shown next to normal arguments
+ (thanks Manuel Jacob)
+
+- fix issue217 - support mock.patch with pytest's fixtures - note that
+ you need either mock-1.0.1 or the python3.3 builtin unittest.mock.
+
+- fix issue127 - improve documentation for pytest_addoption() and
+ add a ``config.getoption(name)`` helper function for consistency.
+
+2.3.2 (2012-10-25)
+==================
+
+- fix issue208 and fix issue29 use new py version to avoid long pauses
+ when printing tracebacks in long modules
+
+- fix issue205 - conftests in subdirs customizing
+ pytest_pycollect_makemodule and pytest_pycollect_makeitem
+ now work properly
+
+- fix teardown-ordering for parametrized setups
+
+- fix issue127 - better documentation for pytest_addoption
+ and related objects.
+
+- fix unittest behaviour: TestCase.runtest only called if there are
+ test methods defined
+
+- improve trial support: don't collect its empty
+ unittest.TestCase.runTest() method
+
+- "python setup.py test" now works with pytest itself
+
+- fix/improve internal/packaging related bits:
+
+ - exception message check of test_nose.py now passes on python33 as well
+
+ - issue206 - fix test_assertrewrite.py to work when a global
+ PYTHONDONTWRITEBYTECODE=1 is present
+
+ - add tox.ini to pytest distribution so that ignore-dirs and others config
+ bits are properly distributed for maintainers who run pytest-own tests
+
+2.3.1 (2012-10-20)
+==================
+
+- fix issue202 - fix regression: using "self" from fixture functions now
+ works as expected (it's the same "self" instance that a test method
+ which uses the fixture sees)
+
+- skip pexpect using tests (test_pdb.py mostly) on freebsd* systems
+ due to pexpect not supporting it properly (hanging)
+
+- link to web pages from --markers output which provides help for
+ pytest.mark.* usage.
+
+2.3.0 (2012-10-19)
+==================
+
+- fix issue202 - better automatic names for parametrized test functions
+- fix issue139 - introduce @pytest.fixture which allows direct scoping
+ and parametrization of funcarg factories.
+- fix issue198 - conftest fixtures were not found on windows32 in some
+ circumstances with nested directory structures due to path manipulation issues
+- fix issue193 skip test functions with were parametrized with empty
+ parameter sets
+- fix python3.3 compat, mostly reporting bits that previously depended
+ on dict ordering
+- introduce re-ordering of tests by resource and parametrization setup
+ which takes precedence to the usual file-ordering
+- fix issue185 monkeypatching time.time does not cause pytest to fail
+- fix issue172 duplicate call of pytest.fixture decoratored setup_module
+ functions
+- fix junitxml=path construction so that if tests change the
+ current working directory and the path is a relative path
+ it is constructed correctly from the original current working dir.
+- fix "python setup.py test" example to cause a proper "errno" return
+- fix issue165 - fix broken doc links and mention stackoverflow for FAQ
+- catch unicode-issues when writing failure representations
+ to terminal to prevent the whole session from crashing
+- fix xfail/skip confusion: a skip-mark or an imperative pytest.skip
+ will now take precedence before xfail-markers because we
+ can't determine xfail/xpass status in case of a skip. see also:
+ http://stackoverflow.com/questions/11105828/in-py-test-when-i-explicitly-skip-a-test-that-is-marked-as-xfail-how-can-i-get
+
+- always report installed 3rd party plugins in the header of a test run
+
+- fix issue160: a failing setup of an xfail-marked tests should
+ be reported as xfail (not xpass)
+
+- fix issue128: show captured output when capsys/capfd are used
+
+- fix issue179: properly show the dependency chain of factories
+
+- pluginmanager.register(...) now raises ValueError if the
+ plugin has been already registered or the name is taken
+
+- fix issue159: improve http://pytest.org/latest/faq.html
+ especially with respect to the "magic" history, also mention
+ pytest-django, trial and unittest integration.
+
+- make request.keywords and node.keywords writable. All descendant
+ collection nodes will see keyword values. Keywords are dictionaries
+ containing markers and other info.
+
+- fix issue 178: xml binary escapes are now wrapped in py.xml.raw
+
+- fix issue 176: correctly catch the builtin AssertionError
+ even when we replaced AssertionError with a subclass on the
+ python level
+
+- factory discovery no longer fails with magic global callables
+ that provide no sane __code__ object (mock.call for example)
+
+- fix issue 182: testdir.inprocess_run now considers passed plugins
+
+- fix issue 188: ensure sys.exc_info is clear on python2
+ before calling into a test
+
+- fix issue 191: add unittest TestCase runTest method support
+- fix issue 156: monkeypatch correctly handles class level descriptors
+
+- reporting refinements:
+
+ - pytest_report_header now receives a "startdir" so that
+ you can use startdir.bestrelpath(yourpath) to show
+ nice relative path
+
+ - allow plugins to implement both pytest_report_header and
+ pytest_sessionstart (sessionstart is invoked first).
+
+ - don't show deselected reason line if there is none
+
+ - py.test -vv will show all of assert comparisons instead of truncating
+
+2.2.4 (2012-05-22)
+==================
+
+- fix error message for rewritten assertions involving the % operator
+- fix issue 126: correctly match all invalid xml characters for junitxml
+ binary escape
+- fix issue with unittest: now @unittest.expectedFailure markers should
+ be processed correctly (you can also use @pytest.mark markers)
+- document integration with the extended distribute/setuptools test commands
+- fix issue 140: properly get the real functions
+ of bound classmethods for setup/teardown_class
+- fix issue #141: switch from the deceased paste.pocoo.org to bpaste.net
+- fix issue #143: call unconfigure/sessionfinish always when
+ configure/sessionstart where called
+- fix issue #144: better mangle test ids to junitxml classnames
+- upgrade distribute_setup.py to 0.6.27
+
+2.2.3 (2012-02-05)
+==================
+
+- fix uploaded package to only include necessary files
+
+2.2.2 (2012-02-05)
+==================
+
+- fix issue101: wrong args to unittest.TestCase test function now
+ produce better output
+- fix issue102: report more useful errors and hints for when a
+ test directory was renamed and some pyc/__pycache__ remain
+- fix issue106: allow parametrize to be applied multiple times
+ e.g. from module, class and at function level.
+- fix issue107: actually perform session scope finalization
+- don't check in parametrize if indirect parameters are funcarg names
+- add chdir method to monkeypatch funcarg
+- fix crash resulting from calling monkeypatch undo a second time
+- fix issue115: make --collectonly robust against early failure
+ (missing files/directories)
+- "-qq --collectonly" now shows only files and the number of tests in them
+- "-q --collectonly" now shows test ids
+- allow adding of attributes to test reports such that it also works
+ with distributed testing (no upgrade of pytest-xdist needed)
+
+2.2.1 (2011-12-16)
+==================
+
+- fix issue99 (in pytest and py) internallerrors with resultlog now
+ produce better output - fixed by normalizing pytest_internalerror
+ input arguments.
+- fix issue97 / traceback issues (in pytest and py) improve traceback output
+ in conjunction with jinja2 and cython which hack tracebacks
+- fix issue93 (in pytest and pytest-xdist) avoid "delayed teardowns":
+ the final test in a test node will now run its teardown directly
+ instead of waiting for the end of the session. Thanks Dave Hunt for
+ the good reporting and feedback. The pytest_runtest_protocol as well
+ as the pytest_runtest_teardown hooks now have "nextitem" available
+ which will be None indicating the end of the test run.
+- fix collection crash due to unknown-source collected items, thanks
+ to Ralf Schmitt (fixed by depending on a more recent pylib)
+
+2.2.0 (2011-11-18)
+==================
+
+- fix issue90: introduce eager tearing down of test items so that
+ teardown function are called earlier.
+- add an all-powerful metafunc.parametrize function which allows to
+ parametrize test function arguments in multiple steps and therefore
+ from independent plugins and places.
+- add a @pytest.mark.parametrize helper which allows to easily
+ call a test function with different argument values
+- Add examples to the "parametrize" example page, including a quick port
+ of Test scenarios and the new parametrize function and decorator.
+- introduce registration for "pytest.mark.*" helpers via ini-files
+ or through plugin hooks. Also introduce a "--strict" option which
+ will treat unregistered markers as errors
+ allowing to avoid typos and maintain a well described set of markers
+ for your test suite. See exaples at http://pytest.org/latest/mark.html
+ and its links.
+- issue50: introduce "-m marker" option to select tests based on markers
+ (this is a stricter and more predictable version of '-k' in that "-m"
+ only matches complete markers and has more obvious rules for and/or
+ semantics.
+- new feature to help optimizing the speed of your tests:
+ --durations=N option for displaying N slowest test calls
+ and setup/teardown methods.
+- fix issue87: --pastebin now works with python3
+- fix issue89: --pdb with unexpected exceptions in doctest work more sensibly
+- fix and cleanup pytest's own test suite to not leak FDs
+- fix issue83: link to generated funcarg list
+- fix issue74: pyarg module names are now checked against imp.find_module false positives
+- fix compatibility with twisted/trial-11.1.0 use cases
+- simplify Node.listchain
+- simplify junitxml output code by relying on py.xml
+- add support for skip properties on unittest classes and functions
+
+2.1.3 (2011-10-18)
+==================
+
+- fix issue79: assertion rewriting failed on some comparisons in boolops
+- correctly handle zero length arguments (a la pytest '')
+- fix issue67 / junitxml now contains correct test durations, thanks ronny
+- fix issue75 / skipping test failure on jython
+- fix issue77 / Allow assertrepr_compare hook to apply to a subset of tests
+
+2.1.2 (2011-09-24)
+==================
+
+- fix assertion rewriting on files with windows newlines on some Python versions
+- refine test discovery by package/module name (--pyargs), thanks Florian Mayer
+- fix issue69 / assertion rewriting fixed on some boolean operations
+- fix issue68 / packages now work with assertion rewriting
+- fix issue66: use different assertion rewriting caches when the -O option is passed
+- don't try assertion rewriting on Jython, use reinterp
+
+2.1.1
+=====
+
+- fix issue64 / pytest.set_trace now works within pytest_generate_tests hooks
+- fix issue60 / fix error conditions involving the creation of __pycache__
+- fix issue63 / assertion rewriting on inserts involving strings containing '%'
+- fix assertion rewriting on calls with a ** arg
+- don't cache rewritten modules if bytecode generation is disabled
+- fix assertion rewriting in read-only directories
+- fix issue59: provide system-out/err tags for junitxml output
+- fix issue61: assertion rewriting on boolean operations with 3 or more operands
+- you can now build a man page with "cd doc ; make man"
+
+2.1.0 (2011-07-09)
+==================
+
+- fix issue53 call nosestyle setup functions with correct ordering
+- fix issue58 and issue59: new assertion code fixes
+- merge Benjamin's assertionrewrite branch: now assertions
+ for test modules on python 2.6 and above are done by rewriting
+ the AST and saving the pyc file before the test module is imported.
+ see doc/assert.txt for more info.
+- fix issue43: improve doctests with better traceback reporting on
+ unexpected exceptions
+- fix issue47: timing output in junitxml for test cases is now correct
+- fix issue48: typo in MarkInfo repr leading to exception
+- fix issue49: avoid confusing error when initizaliation partially fails
+- fix issue44: env/username expansion for junitxml file path
+- show releaselevel information in test runs for pypy
+- reworked doc pages for better navigation and PDF generation
+- report KeyboardInterrupt even if interrupted during session startup
+- fix issue 35 - provide PDF doc version and download link from index page
+
+2.0.3 (2011-05-11)
+==================
+
+- fix issue38: nicer tracebacks on calls to hooks, particularly early
+ configure/sessionstart ones
+
+- fix missing skip reason/meta information in junitxml files, reported
+ via http://lists.idyll.org/pipermail/testing-in-python/2011-March/003928.html
+
+- fix issue34: avoid collection failure with "test" prefixed classes
+ deriving from object.
+
+- don't require zlib (and other libs) for genscript plugin without
+ --genscript actually being used.
+
+- speed up skips (by not doing a full traceback representation
+ internally)
+
+- fix issue37: avoid invalid characters in junitxml's output
+
+2.0.2 (2011-03-09)
+==================
+
+- tackle issue32 - speed up test runs of very quick test functions
+ by reducing the relative overhead
+
+- fix issue30 - extended xfail/skipif handling and improved reporting.
+ If you have a syntax error in your skip/xfail
+ expressions you now get nice error reports.
+
+ Also you can now access module globals from xfail/skipif
+ expressions so that this for example works now::
+
+ import pytest
+ import mymodule
+ @pytest.mark.skipif("mymodule.__version__[0] == "1")
+ def test_function():
+ pass
+
+ This will not run the test function if the module's version string
+ does not start with a "1". Note that specifying a string instead
+ of a boolean expressions allows py.test to report meaningful information
+ when summarizing a test run as to what conditions lead to skipping
+ (or xfail-ing) tests.
+
+- fix issue28 - setup_method and pytest_generate_tests work together
+ The setup_method fixture method now gets called also for
+ test function invocations generated from the pytest_generate_tests
+ hook.
+
+- fix issue27 - collectonly and keyword-selection (-k) now work together
+ Also, if you do "py.test --collectonly -q" you now get a flat list
+ of test ids that you can use to paste to the py.test commandline
+ in order to execute a particular test.
+
+- fix issue25 avoid reported problems with --pdb and python3.2/encodings output
+
+- fix issue23 - tmpdir argument now works on Python3.2 and WindowsXP
+ Starting with Python3.2 os.symlink may be supported. By requiring
+ a newer py lib version the py.path.local() implementation acknowledges
+ this.
+
+- fixed typos in the docs (thanks Victor Garcia, Brianna Laugher) and particular
+ thanks to Laura Creighton who also reviewed parts of the documentation.
+
+- fix slightly wrong output of verbose progress reporting for classes
+ (thanks Amaury)
+
+- more precise (avoiding of) deprecation warnings for node.Class|Function accesses
+
+- avoid std unittest assertion helper code in tracebacks (thanks Ronny)
+
+2.0.1 (2011-02-07)
+==================
+
+- refine and unify initial capturing so that it works nicely
+ even if the logging module is used on an early-loaded conftest.py
+ file or plugin.
+- allow to omit "()" in test ids to allow for uniform test ids
+ as produced by Alfredo's nice pytest.vim plugin.
+- fix issue12 - show plugin versions with "--version" and
+ "--traceconfig" and also document how to add extra information
+ to reporting test header
+- fix issue17 (import-* reporting issue on python3) by
+ requiring py>1.4.0 (1.4.1 is going to include it)
+- fix issue10 (numpy arrays truth checking) by refining
+ assertion interpretation in py lib
+- fix issue15: make nose compatibility tests compatible
+ with python3 (now that nose-1.0 supports python3)
+- remove somewhat surprising "same-conftest" detection because
+ it ignores conftest.py when they appear in several subdirs.
+- improve assertions ("not in"), thanks Floris Bruynooghe
+- improve behaviour/warnings when running on top of "python -OO"
+ (assertions and docstrings are turned off, leading to potential
+ false positives)
+- introduce a pytest_cmdline_processargs(args) hook
+ to allow dynamic computation of command line arguments.
+ This fixes a regression because py.test prior to 2.0
+ allowed to set command line options from conftest.py
+ files which so far pytest-2.0 only allowed from ini-files now.
+- fix issue7: assert failures in doctest modules.
+ unexpected failures in doctests will not generally
+ show nicer, i.e. within the doctest failing context.
+- fix issue9: setup/teardown functions for an xfail-marked
+ test will report as xfail if they fail but report as normally
+ passing (not xpassing) if they succeed. This only is true
+ for "direct" setup/teardown invocations because teardown_class/
+ teardown_module cannot closely relate to a single test.
+- fix issue14: no logging errors at process exit
+- refinements to "collecting" output on non-ttys
+- refine internal plugin registration and --traceconfig output
+- introduce a mechanism to prevent/unregister plugins from the
+ command line, see http://pytest.org/plugins.html#cmdunregister
+- activate resultlog plugin by default
+- fix regression wrt yielded tests which due to the
+ collection-before-running semantics were not
+ setup as with pytest 1.3.4. Note, however, that
+ the recommended and much cleaner way to do test
+ parametraization remains the "pytest_generate_tests"
+ mechanism, see the docs.
+
+2.0.0 (2010-11-25)
+==================
+
+- pytest-2.0 is now its own package and depends on pylib-2.0
+- new ability: python -m pytest / python -m pytest.main ability
+- new python invocation: pytest.main(args, plugins) to load
+ some custom plugins early.
+- try harder to run unittest test suites in a more compatible manner
+ by deferring setup/teardown semantics to the unittest package.
+ also work harder to run twisted/trial and Django tests which
+ should now basically work by default.
+- introduce a new way to set config options via ini-style files,
+ by default setup.cfg and tox.ini files are searched. The old
+ ways (certain environment variables, dynamic conftest.py reading
+ is removed).
+- add a new "-q" option which decreases verbosity and prints a more
+ nose/unittest-style "dot" output.
+- fix issue135 - marks now work with unittest test cases as well
+- fix issue126 - introduce py.test.set_trace() to trace execution via
+ PDB during the running of tests even if capturing is ongoing.
+- fix issue123 - new "python -m py.test" invocation for py.test
+ (requires Python 2.5 or above)
+- fix issue124 - make reporting more resilient against tests opening
+ files on filedescriptor 1 (stdout).
+- fix issue109 - sibling conftest.py files will not be loaded.
+ (and Directory collectors cannot be customized anymore from a Directory's
+ conftest.py - this needs to happen at least one level up).
+- introduce (customizable) assertion failure representations and enhance
+ output on assertion failures for comparisons and other cases (Floris Bruynooghe)
+- nose-plugin: pass through type-signature failures in setup/teardown
+ functions instead of not calling them (Ed Singleton)
+- remove py.test.collect.Directory (follows from a major refactoring
+ and simplification of the collection process)
+- majorly reduce py.test core code, shift function/python testing to own plugin
+- fix issue88 (finding custom test nodes from command line arg)
+- refine 'tmpdir' creation, will now create basenames better associated
+ with test names (thanks Ronny)
+- "xpass" (unexpected pass) tests don't cause exitcode!=0
+- fix issue131 / issue60 - importing doctests in __init__ files used as namespace packages
+- fix issue93 stdout/stderr is captured while importing conftest.py
+- fix bug: unittest collected functions now also can have "pytestmark"
+ applied at class/module level
+- add ability to use "class" level for cached_setup helper
+- fix strangeness: mark.* objects are now immutable, create new instances
+
+1.3.4 (2010-09-14)
+==================
+
+- fix issue111: improve install documentation for windows
+- fix issue119: fix custom collectability of __init__.py as a module
+- fix issue116: --doctestmodules work with __init__.py files as well
+- fix issue115: unify internal exception passthrough/catching/GeneratorExit
+- fix issue118: new --tb=native for presenting cpython-standard exceptions
+
+1.3.3 (2010-07-30)
+==================
+
+- fix issue113: assertion representation problem with triple-quoted strings
+ (and possibly other cases)
+- make conftest loading detect that a conftest file with the same
+ content was already loaded, avoids surprises in nested directory structures
+ which can be produced e.g. by Hudson. It probably removes the need to use
+ --confcutdir in most cases.
+- fix terminal coloring for win32
+ (thanks Michael Foord for reporting)
+- fix weirdness: make terminal width detection work on stdout instead of stdin
+ (thanks Armin Ronacher for reporting)
+- remove trailing whitespace in all py/text distribution files
+
+1.3.2 (2010-07-08)
+==================
+
+**New features**
+
+- fix issue103: introduce py.test.raises as context manager, examples::
+
+ with py.test.raises(ZeroDivisionError):
+ x = 0
+ 1 / x
+
+ with py.test.raises(RuntimeError) as excinfo:
+ call_something()
+
+ # you may do extra checks on excinfo.value|type|traceback here
+
+ (thanks Ronny Pfannschmidt)
+
+- Funcarg factories can now dynamically apply a marker to a
+ test invocation. This is for example useful if a factory
+ provides parameters to a test which are expected-to-fail::
+
+ def pytest_funcarg__arg(request):
+ request.applymarker(py.test.mark.xfail(reason="flaky config"))
+ ...
+
+ def test_function(arg):
+ ...
+
+- improved error reporting on collection and import errors. This makes
+ use of a more general mechanism, namely that for custom test item/collect
+ nodes ``node.repr_failure(excinfo)`` is now uniformly called so that you can
+ override it to return a string error representation of your choice
+ which is going to be reported as a (red) string.
+
+- introduce '--junitprefix=STR' option to prepend a prefix
+ to all reports in the junitxml file.
+
+**Bug fixes**
+
+- make tests and the ``pytest_recwarn`` plugin in particular fully compatible
+ to Python2.7 (if you use the ``recwarn`` funcarg warnings will be enabled so that
+ you can properly check for their existence in a cross-python manner).
+- refine --pdb: ignore xfailed tests, unify its TB-reporting and
+ don't display failures again at the end.
+- fix assertion interpretation with the ** operator (thanks Benjamin Peterson)
+- fix issue105 assignment on the same line as a failing assertion (thanks Benjamin Peterson)
+- fix issue104 proper escaping for test names in junitxml plugin (thanks anonymous)
+- fix issue57 -f|--looponfail to work with xpassing tests (thanks Ronny)
+- fix issue92 collectonly reporter and --pastebin (thanks Benjamin Peterson)
+- fix py.code.compile(source) to generate unique filenames
+- fix assertion re-interp problems on PyPy, by defering code
+ compilation to the (overridable) Frame.eval class. (thanks Amaury Forgeot)
+- fix py.path.local.pyimport() to work with directories
+- streamline py.path.local.mkdtemp implementation and usage
+- don't print empty lines when showing junitxml-filename
+- add optional boolean ignore_errors parameter to py.path.local.remove
+- fix terminal writing on win32/python2.4
+- py.process.cmdexec() now tries harder to return properly encoded unicode objects
+ on all python versions
+- install plain py.test/py.which scripts also for Jython, this helps to
+ get canonical script paths in virtualenv situations
+- make path.bestrelpath(path) return ".", note that when calling
+ X.bestrelpath the assumption is that X is a directory.
+- make initial conftest discovery ignore "--" prefixed arguments
+- fix resultlog plugin when used in a multicpu/multihost xdist situation
+ (thanks Jakub Gustak)
+- perform distributed testing related reporting in the xdist-plugin
+ rather than having dist-related code in the generic py.test
+ distribution
+- fix homedir detection on Windows
+- ship distribute_setup.py version 0.6.13
+
+1.3.1 (2010-05-25)
+==================
+
+**New features**
+
+- issue91: introduce new py.test.xfail(reason) helper
+ to imperatively mark a test as expected to fail. Can
+ be used from within setup and test functions. This is
+ useful especially for parametrized tests when certain
+ configurations are expected-to-fail. In this case the
+ declarative approach with the @py.test.mark.xfail cannot
+ be used as it would mark all configurations as xfail.
+
+- issue102: introduce new --maxfail=NUM option to stop
+ test runs after NUM failures. This is a generalization
+ of the '-x' or '--exitfirst' option which is now equivalent
+ to '--maxfail=1'. Both '-x' and '--maxfail' will
+ now also print a line near the end indicating the Interruption.
+
+- issue89: allow py.test.mark decorators to be used on classes
+ (class decorators were introduced with python2.6) and
+ also allow to have multiple markers applied at class/module level
+ by specifying a list.
+
+- improve and refine letter reporting in the progress bar:
+ . pass
+ f failed test
+ s skipped tests (reminder: use for dependency/platform mismatch only)
+ x xfailed test (test that was expected to fail)
+ X xpassed test (test that was expected to fail but passed)
+
+ You can use any combination of 'fsxX' with the '-r' extended
+ reporting option. The xfail/xpass results will show up as
+ skipped tests in the junitxml output - which also fixes
+ issue99.
+
+- make py.test.cmdline.main() return the exitstatus instead of raising
+ SystemExit and also allow it to be called multiple times. This of
+ course requires that your application and tests are properly teared
+ down and don't have global state.
+
+**Bug Fixes**
+
+- improved traceback presentation:
+ - improved and unified reporting for "--tb=short" option
+ - Errors during test module imports are much shorter, (using --tb=short style)
+ - raises shows shorter more relevant tracebacks
+ - --fulltrace now more systematically makes traces longer / inhibits cutting
+
+- improve support for raises and other dynamically compiled code by
+ manipulating python's linecache.cache instead of the previous
+ rather hacky way of creating custom code objects. This makes
+ it seemlessly work on Jython and PyPy where it previously didn't.
+
+- fix issue96: make capturing more resilient against Control-C
+ interruptions (involved somewhat substantial refactoring
+ to the underlying capturing functionality to avoid race
+ conditions).
+
+- fix chaining of conditional skipif/xfail decorators - so it works now
+ as expected to use multiple @py.test.mark.skipif(condition) decorators,
+ including specific reporting which of the conditions lead to skipping.
+
+- fix issue95: late-import zlib so that it's not required
+ for general py.test startup.
+
+- fix issue94: make reporting more robust against bogus source code
+ (and internally be more careful when presenting unexpected byte sequences)
+
+
+1.3.0 (2010-05-05)
+==================
+
+- deprecate --report option in favour of a new shorter and easier to
+ remember -r option: it takes a string argument consisting of any
+ combination of 'xfsX' characters. They relate to the single chars
+ you see during the dotted progress printing and will print an extra line
+ per test at the end of the test run. This extra line indicates the exact
+ position or test ID that you directly paste to the py.test cmdline in order
+ to re-run a particular test.
+
+- allow external plugins to register new hooks via the new
+ pytest_addhooks(pluginmanager) hook. The new release of
+ the pytest-xdist plugin for distributed and looponfailing
+ testing requires this feature.
+
+- add a new pytest_ignore_collect(path, config) hook to allow projects and
+ plugins to define exclusion behaviour for their directory structure -
+ for example you may define in a conftest.py this method::
+
+ def pytest_ignore_collect(path):
+ return path.check(link=1)
+
+ to prevent even a collection try of any tests in symlinked dirs.
+
+- new pytest_pycollect_makemodule(path, parent) hook for
+ allowing customization of the Module collection object for a
+ matching test module.
+
+- extend and refine xfail mechanism:
+ ``@py.test.mark.xfail(run=False)`` do not run the decorated test
+ ``@py.test.mark.xfail(reason="...")`` prints the reason string in xfail summaries
+ specifying ``--runxfail`` on command line virtually ignores xfail markers
+
+- expose (previously internal) commonly useful methods:
+ py.io.get_terminal_with() -> return terminal width
+ py.io.ansi_print(...) -> print colored/bold text on linux/win32
+ py.io.saferepr(obj) -> return limited representation string
+
+- expose test outcome related exceptions as py.test.skip.Exception,
+ py.test.raises.Exception etc., useful mostly for plugins
+ doing special outcome interpretation/tweaking
+
+- (issue85) fix junitxml plugin to handle tests with non-ascii output
+
+- fix/refine python3 compatibility (thanks Benjamin Peterson)
+
+- fixes for making the jython/win32 combination work, note however:
+ jython2.5.1/win32 does not provide a command line launcher, see
+ http://bugs.jython.org/issue1491 . See pylib install documentation
+ for how to work around.
+
+- fixes for handling of unicode exception values and unprintable objects
+
+- (issue87) fix unboundlocal error in assertionold code
+
+- (issue86) improve documentation for looponfailing
+
+- refine IO capturing: stdin-redirect pseudo-file now has a NOP close() method
+
+- ship distribute_setup.py version 0.6.10
+
+- added links to the new capturelog and coverage plugins
+
+
+1.2.0 (2010-01-18)
+==================
+
+- refined usage and options for "py.cleanup"::
+
+ py.cleanup # remove "*.pyc" and "*$py.class" (jython) files
+ py.cleanup -e .swp -e .cache # also remove files with these extensions
+ py.cleanup -s # remove "build" and "dist" directory next to setup.py files
+ py.cleanup -d # also remove empty directories
+ py.cleanup -a # synonym for "-s -d -e 'pip-log.txt'"
+ py.cleanup -n # dry run, only show what would be removed
+
+- add a new option "py.test --funcargs" which shows available funcargs
+ and their help strings (docstrings on their respective factory function)
+ for a given test path
+
+- display a short and concise traceback if a funcarg lookup fails
+
+- early-load "conftest.py" files in non-dot first-level sub directories.
+ allows to conveniently keep and access test-related options in a ``test``
+ subdir and still add command line options.
+
+- fix issue67: new super-short traceback-printing option: "--tb=line" will print a single line for each failing (python) test indicating its filename, lineno and the failure value
+
+- fix issue78: always call python-level teardown functions even if the
+ according setup failed. This includes refinements for calling setup_module/class functions
+ which will now only be called once instead of the previous behaviour where they'd be called
+ multiple times if they raise an exception (including a Skipped exception). Any exception
+ will be re-corded and associated with all tests in the according module/class scope.
+
+- fix issue63: assume <40 columns to be a bogus terminal width, default to 80
+
+- fix pdb debugging to be in the correct frame on raises-related errors
+
+- update apipkg.py to fix an issue where recursive imports might
+ unnecessarily break importing
+
+- fix plugin links
+
+1.1.1 (2009-11-24)
+==================
+
+- moved dist/looponfailing from py.test core into a new
+ separately released pytest-xdist plugin.
+
+- new junitxml plugin: --junitxml=path will generate a junit style xml file
+ which is processable e.g. by the Hudson CI system.
+
+- new option: --genscript=path will generate a standalone py.test script
+ which will not need any libraries installed. thanks to Ralf Schmitt.
+
+- new option: --ignore will prevent specified path from collection.
+ Can be specified multiple times.
+
+- new option: --confcutdir=dir will make py.test only consider conftest
+ files that are relative to the specified dir.
+
+- new funcarg: "pytestconfig" is the pytest config object for access
+ to command line args and can now be easily used in a test.
+
+- install ``py.test`` and ``py.which`` with a ``-$VERSION`` suffix to
+ disambiguate between Python3, python2.X, Jython and PyPy installed versions.
+
+- new "pytestconfig" funcarg allows access to test config object
+
+- new "pytest_report_header" hook can return additional lines
+ to be displayed at the header of a test run.
+
+- (experimental) allow "py.test path::name1::name2::..." for pointing
+ to a test within a test collection directly. This might eventually
+ evolve as a full substitute to "-k" specifications.
+
+- streamlined plugin loading: order is now as documented in
+ customize.html: setuptools, ENV, commandline, conftest.
+ also setuptools entry point names are turned to canonical namees ("pytest_*")
+
+- automatically skip tests that need 'capfd' but have no os.dup
+
+- allow pytest_generate_tests to be defined in classes as well
+
+- deprecate usage of 'disabled' attribute in favour of pytestmark
+- deprecate definition of Directory, Module, Class and Function nodes
+ in conftest.py files. Use pytest collect hooks instead.
+
+- collection/item node specific runtest/collect hooks are only called exactly
+ on matching conftest.py files, i.e. ones which are exactly below
+ the filesystem path of an item
+
+- change: the first pytest_collect_directory hook to return something
+ will now prevent further hooks to be called.
+
+- change: figleaf plugin now requires --figleaf to run. Also
+ change its long command line options to be a bit shorter (see py.test -h).
+
+- change: pytest doctest plugin is now enabled by default and has a
+ new option --doctest-glob to set a pattern for file matches.
+
+- change: remove internal py._* helper vars, only keep py._pydir
+
+- robustify capturing to survive if custom pytest_runtest_setup
+ code failed and prevented the capturing setup code from running.
+
+- make py.test.* helpers provided by default plugins visible early -
+ works transparently both for pydoc and for interactive sessions
+ which will regularly see e.g. py.test.mark and py.test.importorskip.
+
+- simplify internal plugin manager machinery
+- simplify internal collection tree by introducing a RootCollector node
+
+- fix assert reinterpreation that sees a call containing "keyword=..."
+
+- fix issue66: invoke pytest_sessionstart and pytest_sessionfinish
+ hooks on slaves during dist-testing, report module/session teardown
+ hooks correctly.
+
+- fix issue65: properly handle dist-testing if no
+ execnet/py lib installed remotely.
+
+- skip some install-tests if no execnet is available
+
+- fix docs, fix internal bin/ script generation
+
+
+1.1.0 (2009-11-05)
+==================
+
+- introduce automatic plugin registration via 'pytest11'
+ entrypoints via setuptools' pkg_resources.iter_entry_points
+
+- fix py.test dist-testing to work with execnet >= 1.0.0b4
+
+- re-introduce py.test.cmdline.main() for better backward compatibility
+
+- svn paths: fix a bug with path.check(versioned=True) for svn paths,
+ allow '%' in svn paths, make svnwc.update() default to interactive mode
+ like in 1.0.x and add svnwc.update(interactive=False) to inhibit interaction.
+
+- refine distributed tarball to contain test and no pyc files
+
+- try harder to have deprecation warnings for py.compat.* accesses
+ report a correct location
+
+1.0.3
+=====
+
+* adjust and improve docs
+
+* remove py.rest tool and internal namespace - it was
+ never really advertised and can still be used with
+ the old release if needed. If there is interest
+ it could be revived into its own tool i guess.
+
+* fix issue48 and issue59: raise an Error if the module
+ from an imported test file does not seem to come from
+ the filepath - avoids "same-name" confusion that has
+ been reported repeatedly
+
+* merged Ronny's nose-compatibility hacks: now
+ nose-style setup_module() and setup() functions are
+ supported
+
+* introduce generalized py.test.mark function marking
+
+* reshuffle / refine command line grouping
+
+* deprecate parser.addgroup in favour of getgroup which creates option group
+
+* add --report command line option that allows to control showing of skipped/xfailed sections
+
+* generalized skipping: a new way to mark python functions with skipif or xfail
+ at function, class and modules level based on platform or sys-module attributes.
+
+* extend py.test.mark decorator to allow for positional args
+
+* introduce and test "py.cleanup -d" to remove empty directories
+
+* fix issue #59 - robustify unittest test collection
+
+* make bpython/help interaction work by adding an __all__ attribute
+ to ApiModule, cleanup initpkg
+
+* use MIT license for pylib, add some contributors
+
+* remove py.execnet code and substitute all usages with 'execnet' proper
+
+* fix issue50 - cached_setup now caches more to expectations
+ for test functions with multiple arguments.
+
+* merge Jarko's fixes, issue #45 and #46
+
+* add the ability to specify a path for py.lookup to search in
+
+* fix a funcarg cached_setup bug probably only occurring
+ in distributed testing and "module" scope with teardown.
+
+* many fixes and changes for making the code base python3 compatible,
+ many thanks to Benjamin Peterson for helping with this.
+
+* consolidate builtins implementation to be compatible with >=2.3,
+ add helpers to ease keeping 2 and 3k compatible code
+
+* deprecate py.compat.doctest|subprocess|textwrap|optparse
+
+* deprecate py.magic.autopath, remove py/magic directory
+
+* move pytest assertion handling to py/code and a pytest_assertion
+ plugin, add "--no-assert" option, deprecate py.magic namespaces
+ in favour of (less) py.code ones.
+
+* consolidate and cleanup py/code classes and files
+
+* cleanup py/misc, move tests to bin-for-dist
+
+* introduce delattr/delitem/delenv methods to py.test's monkeypatch funcarg
+
+* consolidate py.log implementation, remove old approach.
+
+* introduce py.io.TextIO and py.io.BytesIO for distinguishing between
+ text/unicode and byte-streams (uses underlying standard lib io.*
+ if available)
+
+* make py.unittest_convert helper script available which converts "unittest.py"
+ style files into the simpler assert/direct-test-classes py.test/nosetests
+ style. The script was written by Laura Creighton.
+
+* simplified internal localpath implementation
+
+1.0.2 (2009-08-27)
+==================
+
+* fixing packaging issues, triggered by fedora redhat packaging,
+ also added doc, examples and contrib dirs to the tarball.
+
+* added a documentation link to the new django plugin.
+
+1.0.1 (2009-08-19)
+==================
+
+* added a 'pytest_nose' plugin which handles nose.SkipTest,
+ nose-style function/method/generator setup/teardown and
+ tries to report functions correctly.
+
+* capturing of unicode writes or encoded strings to sys.stdout/err
+ work better, also terminalwriting was adapted and somewhat
+ unified between windows and linux.
+
+* improved documentation layout and content a lot
+
+* added a "--help-config" option to show conftest.py / ENV-var names for
+ all longopt cmdline options, and some special conftest.py variables.
+ renamed 'conf_capture' conftest setting to 'option_capture' accordingly.
+
+* fix issue #27: better reporting on non-collectable items given on commandline
+ (e.g. pyc files)
+
+* fix issue #33: added --version flag (thanks Benjamin Peterson)
+
+* fix issue #32: adding support for "incomplete" paths to wcpath.status()
+
+* "Test" prefixed classes are *not* collected by default anymore if they
+ have an __init__ method
+
+* monkeypatch setenv() now accepts a "prepend" parameter
+
+* improved reporting of collection error tracebacks
+
+* simplified multicall mechanism and plugin architecture,
+ renamed some internal methods and argnames
+
+1.0.0 (2009-08-04)
+==================
+
+* more terse reporting try to show filesystem path relatively to current dir
+* improve xfail output a bit
+
+1.0.0b9 (2009-07-31)
+====================
+
+* cleanly handle and report final teardown of test setup
+
+* fix svn-1.6 compat issue with py.path.svnwc().versioned()
+ (thanks Wouter Vanden Hove)
+
+* setup/teardown or collection problems now show as ERRORs
+ or with big "E"'s in the progress lines. they are reported
+ and counted separately.
+
+* dist-testing: properly handle test items that get locally
+ collected but cannot be collected on the remote side - often
+ due to platform/dependency reasons
+
+* simplified py.test.mark API - see keyword plugin documentation
+
+* integrate better with logging: capturing now by default captures
+ test functions and their immediate setup/teardown in a single stream
+
+* capsys and capfd funcargs now have a readouterr() and a close() method
+ (underlyingly py.io.StdCapture/FD objects are used which grew a
+ readouterr() method as well to return snapshots of captured out/err)
+
+* make assert-reinterpretation work better with comparisons not
+ returning bools (reported with numpy from thanks maciej fijalkowski)
+
+* reworked per-test output capturing into the pytest_iocapture.py plugin
+ and thus removed capturing code from config object
+
+* item.repr_failure(excinfo) instead of item.repr_failure(excinfo, outerr)
+
+
+1.0.0b8 (2009-07-22)
+====================
+
+* pytest_unittest-plugin is now enabled by default
+
+* introduced pytest_keyboardinterrupt hook and
+ refined pytest_sessionfinish hooked, added tests.
+
+* workaround a buggy logging module interaction ("closing already closed
+ files"). Thanks to Sridhar Ratnakumar for triggering.
+
+* if plugins use "py.test.importorskip" for importing
+ a dependency only a warning will be issued instead
+ of exiting the testing process.
+
+* many improvements to docs:
+ - refined funcargs doc , use the term "factory" instead of "provider"
+ - added a new talk/tutorial doc page
+ - better download page
+ - better plugin docstrings
+ - added new plugins page and automatic doc generation script
+
+* fixed teardown problem related to partially failing funcarg setups
+ (thanks MrTopf for reporting), "pytest_runtest_teardown" is now
+ always invoked even if the "pytest_runtest_setup" failed.
+
+* tweaked doctest output for docstrings in py modules,
+ thanks Radomir.
+
+1.0.0b7
+=======
+
+* renamed py.test.xfail back to py.test.mark.xfail to avoid
+ two ways to decorate for xfail
+
+* re-added py.test.mark decorator for setting keywords on functions
+ (it was actually documented so removing it was not nice)
+
+* remove scope-argument from request.addfinalizer() because
+ request.cached_setup has the scope arg. TOOWTDI.
+
+* perform setup finalization before reporting failures
+
+* apply modified patches from Andreas Kloeckner to allow
+ test functions to have no func_code (#22) and to make
+ "-k" and function keywords work (#20)
+
+* apply patch from Daniel Peolzleithner (issue #23)
+
+* resolve issue #18, multiprocessing.Manager() and
+ redirection clash
+
+* make __name__ == "__channelexec__" for remote_exec code
+
+1.0.0b3 (2009-06-19)
+====================
+
+* plugin classes are removed: one now defines
+ hooks directly in conftest.py or global pytest_*.py
+ files.
+
+* added new pytest_namespace(config) hook that allows
+ to inject helpers directly to the py.test.* namespace.
+
+* documented and refined many hooks
+
+* added new style of generative tests via
+ pytest_generate_tests hook that integrates
+ well with function arguments.
+
+
+1.0.0b1
+=======
+
+* introduced new "funcarg" setup method,
+ see doc/test/funcarg.txt
+
+* introduced plugin architecture and many
+ new py.test plugins, see
+ doc/test/plugins.txt
+
+* teardown_method is now guaranteed to get
+ called after a test method has run.
+
+* new method: py.test.importorskip(mod,minversion)
+ will either import or call py.test.skip()
+
+* completely revised internal py.test architecture
+
+* new py.process.ForkedFunc object allowing to
+ fork execution of a function to a sub process
+ and getting a result back.
+
+XXX lots of things missing here XXX
+
+0.9.2
+=====
+
+* refined installation and metadata, created new setup.py,
+ now based on setuptools/ez_setup (thanks to Ralf Schmitt
+ for his support).
+
+* improved the way of making py.* scripts available in
+ windows environments, they are now added to the
+ Scripts directory as ".cmd" files.
+
+* py.path.svnwc.status() now is more complete and
+ uses xml output from the 'svn' command if available
+ (Guido Wesdorp)
+
+* fix for py.path.svn* to work with svn 1.5
+ (Chris Lamb)
+
+* fix path.relto(otherpath) method on windows to
+ use normcase for checking if a path is relative.
+
+* py.test's traceback is better parseable from editors
+ (follows the filenames:LINENO: MSG convention)
+ (thanks to Osmo Salomaa)
+
+* fix to javascript-generation, "py.test --runbrowser"
+ should work more reliably now
+
+* removed previously accidentally added
+ py.test.broken and py.test.notimplemented helpers.
+
+* there now is a py.__version__ attribute
+
+0.9.1
+=====
+
+This is a fairly complete list of v0.9.1, which can
+serve as a reference for developers.
+
+* allowing + signs in py.path.svn urls [39106]
+* fixed support for Failed exceptions without excinfo in py.test [39340]
+* added support for killing processes for Windows (as well as platforms that
+ support os.kill) in py.misc.killproc [39655]
+* added setup/teardown for generative tests to py.test [40702]
+* added detection of FAILED TO LOAD MODULE to py.test [40703, 40738, 40739]
+* fixed problem with calling .remove() on wcpaths of non-versioned files in
+ py.path [44248]
+* fixed some import and inheritance issues in py.test [41480, 44648, 44655]
+* fail to run greenlet tests when pypy is available, but without stackless
+ [45294]
+* small fixes in rsession tests [45295]
+* fixed issue with 2.5 type representations in py.test [45483, 45484]
+* made that internal reporting issues displaying is done atomically in py.test
+ [45518]
+* made that non-existing files are ignored by the py.lookup script [45519]
+* improved exception name creation in py.test [45535]
+* made that less threads are used in execnet [merge in 45539]
+* removed lock required for atomic reporting issue displaying in py.test
+ [45545]
+* removed globals from execnet [45541, 45547]
+* refactored cleanup mechanics, made that setDaemon is set to 1 to make atexit
+ get called in 2.5 (py.execnet) [45548]
+* fixed bug in joining threads in py.execnet's servemain [45549]
+* refactored py.test.rsession tests to not rely on exact output format anymore
+ [45646]
+* using repr() on test outcome [45647]
+* added 'Reason' classes for py.test.skip() [45648, 45649]
+* killed some unnecessary sanity check in py.test.collect [45655]
+* avoid using os.tmpfile() in py.io.fdcapture because on Windows it's only
+ usable by Administrators [45901]
+* added support for locking and non-recursive commits to py.path.svnwc [45994]
+* locking files in py.execnet to prevent CPython from segfaulting [46010]
+* added export() method to py.path.svnurl
+* fixed -d -x in py.test [47277]
+* fixed argument concatenation problem in py.path.svnwc [49423]
+* restore py.test behaviour that it exits with code 1 when there are failures
+ [49974]
+* don't fail on html files that don't have an accompanying .txt file [50606]
+* fixed 'utestconvert.py < input' [50645]
+* small fix for code indentation in py.code.source [50755]
+* fix _docgen.py documentation building [51285]
+* improved checks for source representation of code blocks in py.test [51292]
+* added support for passing authentication to py.path.svn* objects [52000,
+ 52001]
+* removed sorted() call for py.apigen tests in favour of [].sort() to support
+ Python 2.3 [52481]
diff --git a/third_party/python/pytest/CONTRIBUTING.rst b/third_party/python/pytest/CONTRIBUTING.rst
new file mode 100644
index 0000000000..c005c2fb25
--- /dev/null
+++ b/third_party/python/pytest/CONTRIBUTING.rst
@@ -0,0 +1,294 @@
+============================
+Contribution getting started
+============================
+
+Contributions are highly welcomed and appreciated. Every little help counts,
+so do not hesitate!
+
+.. contents:: Contribution links
+ :depth: 2
+
+
+.. _submitfeedback:
+
+Feature requests and feedback
+-----------------------------
+
+Do you like pytest? Share some love on Twitter or in your blog posts!
+
+We'd also like to hear about your propositions and suggestions. Feel free to
+`submit them as issues <https://github.com/pytest-dev/pytest/issues>`_ and:
+
+* Explain in detail how they should work.
+* Keep the scope as narrow as possible. This will make it easier to implement.
+
+
+.. _reportbugs:
+
+Report bugs
+-----------
+
+Report bugs for pytest in the `issue tracker <https://github.com/pytest-dev/pytest/issues>`_.
+
+If you are reporting a bug, please include:
+
+* Your operating system name and version.
+* Any details about your local setup that might be helpful in troubleshooting,
+ specifically the Python interpreter version, installed libraries, and pytest
+ version.
+* Detailed steps to reproduce the bug.
+
+If you can write a demonstration test that currently fails but should pass
+(xfail), that is a very useful commit to make as well, even if you cannot
+fix the bug itself.
+
+
+.. _fixbugs:
+
+Fix bugs
+--------
+
+Look through the `GitHub issues for bugs <https://github.com/pytest-dev/pytest/labels/type:%20bug>`_.
+
+:ref:`Talk <contact>` to developers to find out how you can fix specific bugs.
+
+Don't forget to check the issue trackers of your favourite plugins, too!
+
+.. _writeplugins:
+
+Implement features
+------------------
+
+Look through the `GitHub issues for enhancements <https://github.com/pytest-dev/pytest/labels/type:%20enhancement>`_.
+
+:ref:`Talk <contact>` to developers to find out how you can implement specific
+features.
+
+Write documentation
+-------------------
+
+Pytest could always use more documentation. What exactly is needed?
+
+* More complementary documentation. Have you perhaps found something unclear?
+* Documentation translations. We currently have only English.
+* Docstrings. There can never be too many of them.
+* Blog posts, articles and such -- they're all very appreciated.
+
+You can also edit documentation files directly in the GitHub web interface,
+without using a local copy. This can be convenient for small fixes.
+
+.. note::
+ Build the documentation locally with the following command:
+
+ .. code:: bash
+
+ $ tox -e docs
+
+ The built documentation should be available in the ``doc/en/_build/``.
+
+ Where 'en' refers to the documentation language.
+
+.. _submitplugin:
+
+Submitting Plugins to pytest-dev
+--------------------------------
+
+Pytest development of the core, some plugins and support code happens
+in repositories living under the ``pytest-dev`` organisations:
+
+- `pytest-dev on GitHub <https://github.com/pytest-dev>`_
+
+- `pytest-dev on Bitbucket <https://bitbucket.org/pytest-dev>`_
+
+All pytest-dev Contributors team members have write access to all contained
+repositories. Pytest core and plugins are generally developed
+using `pull requests`_ to respective repositories.
+
+The objectives of the ``pytest-dev`` organisation are:
+
+* Having a central location for popular pytest plugins
+* Sharing some of the maintenance responsibility (in case a maintainer no
+ longer wishes to maintain a plugin)
+
+You can submit your plugin by subscribing to the `pytest-dev mail list
+<https://mail.python.org/mailman/listinfo/pytest-dev>`_ and writing a
+mail pointing to your existing pytest plugin repository which must have
+the following:
+
+- PyPI presence with a ``setup.py`` that contains a license, ``pytest-``
+ prefixed name, version number, authors, short and long description.
+
+- a ``tox.ini`` for running tests using `tox <https://tox.readthedocs.io>`_.
+
+- a ``README.txt`` describing how to use the plugin and on which
+ platforms it runs.
+
+- a ``LICENSE.txt`` file or equivalent containing the licensing
+ information, with matching info in ``setup.py``.
+
+- an issue tracker for bug reports and enhancement requests.
+
+- a `changelog <http://keepachangelog.com/>`_
+
+If no contributor strongly objects and two agree, the repository can then be
+transferred to the ``pytest-dev`` organisation.
+
+Here's a rundown of how a repository transfer usually proceeds
+(using a repository named ``joedoe/pytest-xyz`` as example):
+
+* ``joedoe`` transfers repository ownership to ``pytest-dev`` administrator ``calvin``.
+* ``calvin`` creates ``pytest-xyz-admin`` and ``pytest-xyz-developers`` teams, inviting ``joedoe`` to both as **maintainer**.
+* ``calvin`` transfers repository to ``pytest-dev`` and configures team access:
+
+ - ``pytest-xyz-admin`` **admin** access;
+ - ``pytest-xyz-developers`` **write** access;
+
+The ``pytest-dev/Contributors`` team has write access to all projects, and
+every project administrator is in it. We recommend that each plugin has at least three
+people who have the right to release to PyPI.
+
+Repository owners can rest assured that no ``pytest-dev`` administrator will ever make
+releases of your repository or take ownership in any way, except in rare cases
+where someone becomes unresponsive after months of contact attempts.
+As stated, the objective is to share maintenance and avoid "plugin-abandon".
+
+
+.. _`pull requests`:
+.. _pull-requests:
+
+Preparing Pull Requests
+-----------------------
+
+Short version
+~~~~~~~~~~~~~
+
+#. Fork the repository.
+#. Enable and install `pre-commit <https://pre-commit.com>`_ to ensure style-guides and code checks are followed.
+#. Target ``master`` for bugfixes and doc changes.
+#. Target ``features`` for new features or functionality changes.
+#. Follow **PEP-8** for naming and `black <https://github.com/ambv/black>`_ for formatting.
+#. Tests are run using ``tox``::
+
+ tox -e linting,py27,py36
+
+ The test environments above are usually enough to cover most cases locally.
+
+#. Write a ``changelog`` entry: ``changelog/2574.bugfix``, use issue id number
+ and one of ``bugfix``, ``removal``, ``feature``, ``vendor``, ``doc`` or
+ ``trivial`` for the issue type.
+#. Unless your change is a trivial or a documentation fix (e.g., a typo or reword of a small section) please
+ add yourself to the ``AUTHORS`` file, in alphabetical order.
+
+
+Long version
+~~~~~~~~~~~~
+
+What is a "pull request"? It informs the project's core developers about the
+changes you want to review and merge. Pull requests are stored on
+`GitHub servers <https://github.com/pytest-dev/pytest/pulls>`_.
+Once you send a pull request, we can discuss its potential modifications and
+even add more commits to it later on. There's an excellent tutorial on how Pull
+Requests work in the
+`GitHub Help Center <https://help.github.com/articles/using-pull-requests/>`_.
+
+Here is a simple overview, with pytest-specific bits:
+
+#. Fork the
+ `pytest GitHub repository <https://github.com/pytest-dev/pytest>`__. It's
+ fine to use ``pytest`` as your fork repository name because it will live
+ under your user.
+
+#. Clone your fork locally using `git <https://git-scm.com/>`_ and create a branch::
+
+ $ git clone git@github.com:YOUR_GITHUB_USERNAME/pytest.git
+ $ cd pytest
+ # now, to fix a bug create your own branch off "master":
+
+ $ git checkout -b your-bugfix-branch-name master
+
+ # or to instead add a feature create your own branch off "features":
+
+ $ git checkout -b your-feature-branch-name features
+
+ Given we have "major.minor.micro" version numbers, bugfixes will usually
+ be released in micro releases whereas features will be released in
+ minor releases and incompatible changes in major releases.
+
+ If you need some help with Git, follow this quick start
+ guide: https://git.wiki.kernel.org/index.php/QuickStart
+
+#. Install `pre-commit <https://pre-commit.com>`_ and its hook on the pytest repo::
+
+ $ pip install --user pre-commit
+ $ pre-commit install
+
+ Afterwards ``pre-commit`` will run whenever you commit.
+
+ https://pre-commit.com/ is a framework for managing and maintaining multi-language pre-commit hooks
+ to ensure code-style and code formatting is consistent.
+
+#. Install tox
+
+ Tox is used to run all the tests and will automatically setup virtualenvs
+ to run the tests in.
+ (will implicitly use http://www.virtualenv.org/en/latest/)::
+
+ $ pip install tox
+
+#. Run all the tests
+
+ You need to have Python 2.7 and 3.6 available in your system. Now
+ running tests is as simple as issuing this command::
+
+ $ tox -e linting,py27,py36
+
+ This command will run tests via the "tox" tool against Python 2.7 and 3.6
+ and also perform "lint" coding-style checks.
+
+#. You can now edit your local working copy and run the tests again as necessary. Please follow PEP-8 for naming.
+
+ You can pass different options to ``tox``. For example, to run tests on Python 2.7 and pass options to pytest
+ (e.g. enter pdb on failure) to pytest you can do::
+
+ $ tox -e py27 -- --pdb
+
+ Or to only run tests in a particular test module on Python 3.6::
+
+ $ tox -e py36 -- testing/test_config.py
+
+
+ When committing, ``pre-commit`` will re-format the files if necessary.
+
+#. Commit and push once your tests pass and you are happy with your change(s)::
+
+ $ git commit -a -m "<commit message>"
+ $ git push -u
+
+#. Create a new changelog entry in ``changelog``. The file should be named ``<issueid>.<type>``,
+ where *issueid* is the number of the issue related to the change and *type* is one of
+ ``bugfix``, ``removal``, ``feature``, ``vendor``, ``doc`` or ``trivial``.
+
+#. Add yourself to ``AUTHORS`` file if not there yet, in alphabetical order.
+
+#. Finally, submit a pull request through the GitHub website using this data::
+
+ head-fork: YOUR_GITHUB_USERNAME/pytest
+ compare: your-branch-name
+
+ base-fork: pytest-dev/pytest
+ base: master # if it's a bugfix
+ base: features # if it's a feature
+
+
+Joining the Development Team
+----------------------------
+
+Anyone who has successfully seen through a pull request which did not
+require any extra work from the development team to merge will
+themselves gain commit access if they so wish (if we forget to ask please send a friendly
+reminder). This does not mean your workflow to contribute changes,
+everyone goes through the same pull-request-and-review process and
+no-one merges their own pull requests unless already approved. It does however mean you can
+participate in the development process more fully since you can merge
+pull requests from other contributors yourself after having reviewed
+them.
diff --git a/third_party/python/pytest/HOWTORELEASE.rst b/third_party/python/pytest/HOWTORELEASE.rst
new file mode 100644
index 0000000000..97bddf7202
--- /dev/null
+++ b/third_party/python/pytest/HOWTORELEASE.rst
@@ -0,0 +1,49 @@
+Release Procedure
+-----------------
+
+Our current policy for releasing is to aim for a bugfix every few weeks and a minor release every 2-3 months. The idea
+is to get fixes and new features out instead of trying to cram a ton of features into a release and by consequence
+taking a lot of time to make a new one.
+
+.. important::
+
+ pytest releases must be prepared on **Linux** because the docs and examples expect
+ to be executed in that platform.
+
+#. Install development dependencies in a virtual environment with::
+
+ pip3 install -U -r tasks/requirements.txt
+
+#. Create a branch ``release-X.Y.Z`` with the version for the release.
+
+ * **patch releases**: from the latest ``master``;
+
+ * **minor releases**: from the latest ``features``; then merge with the latest ``master``;
+
+ Ensure your are in a clean work tree.
+
+#. Generate docs, changelog, announcements and a **local** tag::
+
+ invoke generate.pre-release <VERSION>
+
+#. Open a PR for this branch targeting ``master``.
+
+#. After all tests pass and the PR has been approved, publish to PyPI by pushing the tag::
+
+ git push git@github.com:pytest-dev/pytest.git <VERSION>
+
+ Wait for the deploy to complete, then make sure it is `available on PyPI <https://pypi.org/project/pytest>`_.
+
+#. Send an email announcement with the contents from::
+
+ doc/en/announce/release-<VERSION>.rst
+
+ To the following mailing lists:
+
+ * pytest-dev@python.org (all releases)
+ * python-announce-list@python.org (all releases)
+ * testing-in-python@lists.idyll.org (only major/minor releases)
+
+ And announce it on `Twitter <https://twitter.com/>`_ with the ``#pytest`` hashtag.
+
+#. After a minor/major release, merge ``release-X.Y.Z`` into ``master`` and push (or open a PR).
diff --git a/third_party/python/pytest/LICENSE b/third_party/python/pytest/LICENSE
new file mode 100644
index 0000000000..629df45ac4
--- /dev/null
+++ b/third_party/python/pytest/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2004-2017 Holger Krekel and others
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/third_party/python/pytest/PKG-INFO b/third_party/python/pytest/PKG-INFO
new file mode 100644
index 0000000000..1c50ee34df
--- /dev/null
+++ b/third_party/python/pytest/PKG-INFO
@@ -0,0 +1,149 @@
+Metadata-Version: 1.2
+Name: pytest
+Version: 3.6.2
+Summary: pytest: simple powerful testing with Python
+Home-page: http://pytest.org
+Author: Holger Krekel, Bruno Oliveira, Ronny Pfannschmidt, Floris Bruynooghe, Brianna Laugher, Florian Bruhin and others
+License: MIT license
+Project-URL: Source, https://github.com/pytest-dev/pytest
+Project-URL: Tracker, https://github.com/pytest-dev/pytest/issues
+Description: .. image:: http://docs.pytest.org/en/latest/_static/pytest1.png
+ :target: http://docs.pytest.org
+ :align: center
+ :alt: pytest
+
+ ------
+
+ .. image:: https://img.shields.io/pypi/v/pytest.svg
+ :target: https://pypi.org/project/pytest/
+
+ .. image:: https://img.shields.io/conda/vn/conda-forge/pytest.svg
+ :target: https://anaconda.org/conda-forge/pytest
+
+ .. image:: https://img.shields.io/pypi/pyversions/pytest.svg
+ :target: https://pypi.org/project/pytest/
+
+ .. image:: https://img.shields.io/coveralls/pytest-dev/pytest/master.svg
+ :target: https://coveralls.io/r/pytest-dev/pytest
+
+ .. image:: https://travis-ci.org/pytest-dev/pytest.svg?branch=master
+ :target: https://travis-ci.org/pytest-dev/pytest
+
+ .. image:: https://ci.appveyor.com/api/projects/status/mrgbjaua7t33pg6b?svg=true
+ :target: https://ci.appveyor.com/project/pytestbot/pytest
+
+ .. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+
+ .. image:: https://www.codetriage.com/pytest-dev/pytest/badges/users.svg
+ :target: https://www.codetriage.com/pytest-dev/pytest
+
+ The ``pytest`` framework makes it easy to write small tests, yet
+ scales to support complex functional testing for applications and libraries.
+
+ An example of a simple test:
+
+ .. code-block:: python
+
+ # content of test_sample.py
+ def inc(x):
+ return x + 1
+
+
+ def test_answer():
+ assert inc(3) == 5
+
+
+ To execute it::
+
+ $ pytest
+ ============================= test session starts =============================
+ collected 1 items
+
+ test_sample.py F
+
+ ================================== FAILURES ===================================
+ _________________________________ test_answer _________________________________
+
+ def test_answer():
+ > assert inc(3) == 5
+ E assert 4 == 5
+ E + where 4 = inc(3)
+
+ test_sample.py:5: AssertionError
+ ========================== 1 failed in 0.04 seconds ===========================
+
+
+ Due to ``pytest``'s detailed assertion introspection, only plain ``assert`` statements are used. See `getting-started <http://docs.pytest.org/en/latest/getting-started.html#our-first-test-run>`_ for more examples.
+
+
+ Features
+ --------
+
+ - Detailed info on failing `assert statements <http://docs.pytest.org/en/latest/assert.html>`_ (no need to remember ``self.assert*`` names);
+
+ - `Auto-discovery
+ <http://docs.pytest.org/en/latest/goodpractices.html#python-test-discovery>`_
+ of test modules and functions;
+
+ - `Modular fixtures <http://docs.pytest.org/en/latest/fixture.html>`_ for
+ managing small or parametrized long-lived test resources;
+
+ - Can run `unittest <http://docs.pytest.org/en/latest/unittest.html>`_ (or trial),
+ `nose <http://docs.pytest.org/en/latest/nose.html>`_ test suites out of the box;
+
+ - Python 2.7, Python 3.4+, PyPy 2.3, Jython 2.5 (untested);
+
+ - Rich plugin architecture, with over 315+ `external plugins <http://plugincompat.herokuapp.com>`_ and thriving community;
+
+
+ Documentation
+ -------------
+
+ For full documentation, including installation, tutorials and PDF documents, please see http://docs.pytest.org.
+
+
+ Bugs/Requests
+ -------------
+
+ Please use the `GitHub issue tracker <https://github.com/pytest-dev/pytest/issues>`_ to submit bugs or request features.
+
+
+ Changelog
+ ---------
+
+ Consult the `Changelog <http://docs.pytest.org/en/latest/changelog.html>`__ page for fixes and enhancements of each version.
+
+
+ License
+ -------
+
+ Copyright Holger Krekel and others, 2004-2017.
+
+ Distributed under the terms of the `MIT`_ license, pytest is free and open source software.
+
+ .. _`MIT`: https://github.com/pytest-dev/pytest/blob/master/LICENSE
+
+Keywords: test unittest
+Platform: unix
+Platform: linux
+Platform: osx
+Platform: cygwin
+Platform: win32
+Classifier: Development Status :: 6 - Mature
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Topic :: Software Development :: Testing
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Utilities
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
diff --git a/third_party/python/pytest/README.rst b/third_party/python/pytest/README.rst
new file mode 100644
index 0000000000..564ffff6c9
--- /dev/null
+++ b/third_party/python/pytest/README.rst
@@ -0,0 +1,116 @@
+.. image:: http://docs.pytest.org/en/latest/_static/pytest1.png
+ :target: http://docs.pytest.org
+ :align: center
+ :alt: pytest
+
+------
+
+.. image:: https://img.shields.io/pypi/v/pytest.svg
+ :target: https://pypi.org/project/pytest/
+
+.. image:: https://img.shields.io/conda/vn/conda-forge/pytest.svg
+ :target: https://anaconda.org/conda-forge/pytest
+
+.. image:: https://img.shields.io/pypi/pyversions/pytest.svg
+ :target: https://pypi.org/project/pytest/
+
+.. image:: https://img.shields.io/coveralls/pytest-dev/pytest/master.svg
+ :target: https://coveralls.io/r/pytest-dev/pytest
+
+.. image:: https://travis-ci.org/pytest-dev/pytest.svg?branch=master
+ :target: https://travis-ci.org/pytest-dev/pytest
+
+.. image:: https://ci.appveyor.com/api/projects/status/mrgbjaua7t33pg6b?svg=true
+ :target: https://ci.appveyor.com/project/pytestbot/pytest
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+
+.. image:: https://www.codetriage.com/pytest-dev/pytest/badges/users.svg
+ :target: https://www.codetriage.com/pytest-dev/pytest
+
+The ``pytest`` framework makes it easy to write small tests, yet
+scales to support complex functional testing for applications and libraries.
+
+An example of a simple test:
+
+.. code-block:: python
+
+ # content of test_sample.py
+ def inc(x):
+ return x + 1
+
+
+ def test_answer():
+ assert inc(3) == 5
+
+
+To execute it::
+
+ $ pytest
+ ============================= test session starts =============================
+ collected 1 items
+
+ test_sample.py F
+
+ ================================== FAILURES ===================================
+ _________________________________ test_answer _________________________________
+
+ def test_answer():
+ > assert inc(3) == 5
+ E assert 4 == 5
+ E + where 4 = inc(3)
+
+ test_sample.py:5: AssertionError
+ ========================== 1 failed in 0.04 seconds ===========================
+
+
+Due to ``pytest``'s detailed assertion introspection, only plain ``assert`` statements are used. See `getting-started <http://docs.pytest.org/en/latest/getting-started.html#our-first-test-run>`_ for more examples.
+
+
+Features
+--------
+
+- Detailed info on failing `assert statements <http://docs.pytest.org/en/latest/assert.html>`_ (no need to remember ``self.assert*`` names);
+
+- `Auto-discovery
+ <http://docs.pytest.org/en/latest/goodpractices.html#python-test-discovery>`_
+ of test modules and functions;
+
+- `Modular fixtures <http://docs.pytest.org/en/latest/fixture.html>`_ for
+ managing small or parametrized long-lived test resources;
+
+- Can run `unittest <http://docs.pytest.org/en/latest/unittest.html>`_ (or trial),
+ `nose <http://docs.pytest.org/en/latest/nose.html>`_ test suites out of the box;
+
+- Python 2.7, Python 3.4+, PyPy 2.3, Jython 2.5 (untested);
+
+- Rich plugin architecture, with over 315+ `external plugins <http://plugincompat.herokuapp.com>`_ and thriving community;
+
+
+Documentation
+-------------
+
+For full documentation, including installation, tutorials and PDF documents, please see http://docs.pytest.org.
+
+
+Bugs/Requests
+-------------
+
+Please use the `GitHub issue tracker <https://github.com/pytest-dev/pytest/issues>`_ to submit bugs or request features.
+
+
+Changelog
+---------
+
+Consult the `Changelog <http://docs.pytest.org/en/latest/changelog.html>`__ page for fixes and enhancements of each version.
+
+
+License
+-------
+
+Copyright Holger Krekel and others, 2004-2017.
+
+Distributed under the terms of the `MIT`_ license, pytest is free and open source software.
+
+.. _`MIT`: https://github.com/pytest-dev/pytest/blob/master/LICENSE
diff --git a/third_party/python/pytest/appveyor.yml b/third_party/python/pytest/appveyor.yml
new file mode 100644
index 0000000000..b808fa6d91
--- /dev/null
+++ b/third_party/python/pytest/appveyor.yml
@@ -0,0 +1,48 @@
+environment:
+ COVERALLS_REPO_TOKEN:
+ secure: 2NJ5Ct55cHJ9WEg3xbSqCuv0rdgzzb6pnzOIG5OkMbTndw3wOBrXntWFoQrXiMFi
+ # this is pytest's token in coveralls.io, encrypted
+ # using pytestbot account as detailed here:
+ # https://www.appveyor.com/docs/build-configuration#secure-variables
+
+ matrix:
+ # coveralls is not in the default env list
+ - TOXENV: "coveralls"
+ # note: please use "tox --listenvs" to populate the build matrix below
+ - TOXENV: "linting"
+ - TOXENV: "py27"
+ - TOXENV: "py34"
+ - TOXENV: "py35"
+ - TOXENV: "py36"
+ - TOXENV: "pypy"
+ - TOXENV: "py27-pexpect"
+ - TOXENV: "py27-xdist"
+ - TOXENV: "py27-trial"
+ - TOXENV: "py27-numpy"
+ - TOXENV: "py27-pluggymaster"
+ - TOXENV: "py36-pexpect"
+ - TOXENV: "py36-xdist"
+ - TOXENV: "py36-trial"
+ - TOXENV: "py36-numpy"
+ - TOXENV: "py36-pluggymaster"
+ - TOXENV: "py27-nobyte"
+ - TOXENV: "doctesting"
+ - TOXENV: "py35-freeze"
+ - TOXENV: "docs"
+
+install:
+ - echo Installed Pythons
+ - dir c:\Python*
+
+ - if "%TOXENV%" == "pypy" call scripts\install-pypy.bat
+
+ - C:\Python36\python -m pip install --upgrade --pre tox
+
+build: false # Not a C# project, build stuff at the test step instead.
+
+test_script:
+ - call scripts\call-tox.bat
+
+cache:
+ - '%LOCALAPPDATA%\pip\cache'
+ - '%USERPROFILE%\.cache\pre-commit'
diff --git a/third_party/python/pytest/bench/bench.py b/third_party/python/pytest/bench/bench.py
new file mode 100644
index 0000000000..4e72444e7d
--- /dev/null
+++ b/third_party/python/pytest/bench/bench.py
@@ -0,0 +1,13 @@
+import sys
+
+if __name__ == "__main__":
+ import cProfile
+ import pytest # NOQA
+ import pstats
+
+ script = sys.argv[1:] if len(sys.argv) > 1 else "empty.py"
+ stats = cProfile.run("pytest.cmdline.main(%r)" % script, "prof")
+ p = pstats.Stats("prof")
+ p.strip_dirs()
+ p.sort_stats("cumulative")
+ print(p.print_stats(500))
diff --git a/third_party/python/pytest/bench/bench_argcomplete.py b/third_party/python/pytest/bench/bench_argcomplete.py
new file mode 100644
index 0000000000..495e2c4ed3
--- /dev/null
+++ b/third_party/python/pytest/bench/bench_argcomplete.py
@@ -0,0 +1,22 @@
+
+
+# 10000 iterations, just for relative comparison
+# 2.7.5 3.3.2
+# FilesCompleter 75.1109 69.2116
+# FastFilesCompleter 0.7383 1.0760
+
+import timeit
+
+imports = [
+ "from argcomplete.completers import FilesCompleter as completer",
+ "from _pytest._argcomplete import FastFilesCompleter as completer",
+]
+
+count = 1000 # only a few seconds
+setup = "%s\nfc = completer()"
+run = 'fc("/d")'
+
+
+if __name__ == "__main__":
+ print(timeit.timeit(run, setup=setup % imports[0], number=count))
+ print((timeit.timeit(run, setup=setup % imports[1], number=count)))
diff --git a/third_party/python/pytest/bench/empty.py b/third_party/python/pytest/bench/empty.py
new file mode 100644
index 0000000000..b90319936b
--- /dev/null
+++ b/third_party/python/pytest/bench/empty.py
@@ -0,0 +1,4 @@
+import py
+
+for i in range(1000):
+ py.builtin.exec_("def test_func_%d(): pass" % i)
diff --git a/third_party/python/pytest/bench/manyparam.py b/third_party/python/pytest/bench/manyparam.py
new file mode 100644
index 0000000000..a25b098de8
--- /dev/null
+++ b/third_party/python/pytest/bench/manyparam.py
@@ -0,0 +1,15 @@
+
+import pytest
+
+
+@pytest.fixture(scope="module", params=range(966))
+def foo(request):
+ return request.param
+
+
+def test_it(foo):
+ pass
+
+
+def test_it2(foo):
+ pass
diff --git a/third_party/python/pytest/bench/skip.py b/third_party/python/pytest/bench/skip.py
new file mode 100644
index 0000000000..b105e79f82
--- /dev/null
+++ b/third_party/python/pytest/bench/skip.py
@@ -0,0 +1,11 @@
+from six.moves import range
+import pytest
+
+
+SKIP = True
+
+
+@pytest.mark.parametrize("x", range(5000))
+def test_foo(x):
+ if SKIP:
+ pytest.skip("heh")
diff --git a/third_party/python/pytest/changelog/README.rst b/third_party/python/pytest/changelog/README.rst
new file mode 100644
index 0000000000..e34bd4da26
--- /dev/null
+++ b/third_party/python/pytest/changelog/README.rst
@@ -0,0 +1,32 @@
+This directory contains "newsfragments" which are short files that contain a small **ReST**-formatted
+text that will be added to the next ``CHANGELOG``.
+
+The ``CHANGELOG`` will be read by users, so this description should be aimed to pytest users
+instead of describing internal changes which are only relevant to the developers.
+
+Make sure to use full sentences with correct case and punctuation, for example::
+
+ Fix issue with non-ascii messages from the ``warnings`` module.
+
+Each file should be named like ``<ISSUE>.<TYPE>.rst``, where
+``<ISSUE>`` is an issue number, and ``<TYPE>`` is one of:
+
+* ``feature``: new user facing features, like new command-line options and new behavior.
+* ``bugfix``: fixes a reported bug.
+* ``doc``: documentation improvement, like rewording an entire session or adding missing docs.
+* ``removal``: feature deprecation or removal.
+* ``vendor``: changes in packages vendored in pytest.
+* ``trivial``: fixing a small typo or internal change that might be noteworthy.
+
+So for example: ``123.feature.rst``, ``456.bugfix.rst``.
+
+If your PR fixes an issue, use that number here. If there is no issue,
+then after you submit the PR and get the PR number you can add a
+changelog using that instead.
+
+If you are not sure what issue type to use, don't hesitate to ask in your PR.
+
+Note that the ``towncrier`` tool will automatically
+reflow your text, so it will work best if you stick to a single paragraph, but multiple sentences and links are OK
+and encouraged. You can install ``towncrier`` and then run ``towncrier --draft``
+if you want to get a preview of how your change will look in the final release notes.
diff --git a/third_party/python/pytest/changelog/_template.rst b/third_party/python/pytest/changelog/_template.rst
new file mode 100644
index 0000000000..a898abc15a
--- /dev/null
+++ b/third_party/python/pytest/changelog/_template.rst
@@ -0,0 +1,40 @@
+{% for section in sections %}
+{% set underline = "-" %}
+{% if section %}
+{{section}}
+{{ underline * section|length }}{% set underline = "~" %}
+
+{% endif %}
+{% if sections[section] %}
+{% for category, val in definitions.items() if category in sections[section] %}
+
+{{ definitions[category]['name'] }}
+{{ underline * definitions[category]['name']|length }}
+
+{% if definitions[category]['showcontent'] %}
+{% for text, values in sections[section][category]|dictsort(by='value') %}
+{% set issue_joiner = joiner(', ') %}
+- {{ text }}{% if category != 'vendor' %} ({% for value in values|sort %}{{ issue_joiner() }}`{{ value }} <https://github.com/pytest-dev/pytest/issues/{{ value[1:] }}>`_{% endfor %}){% endif %}
+
+
+{% endfor %}
+{% else %}
+- {{ sections[section][category]['']|sort|join(', ') }}
+
+
+{% endif %}
+{% if sections[section][category]|length == 0 %}
+
+No significant changes.
+
+
+{% else %}
+{% endif %}
+{% endfor %}
+{% else %}
+
+No significant changes.
+
+
+{% endif %}
+{% endfor %}
diff --git a/third_party/python/pytest/extra/get_issues.py b/third_party/python/pytest/extra/get_issues.py
new file mode 100644
index 0000000000..c026972b14
--- /dev/null
+++ b/third_party/python/pytest/extra/get_issues.py
@@ -0,0 +1,84 @@
+import json
+import py
+import requests
+
+issues_url = "https://api.github.com/repos/pytest-dev/pytest/issues"
+
+
+def get_issues():
+ issues = []
+ url = issues_url
+ while 1:
+ get_data = {"state": "all"}
+ r = requests.get(url, params=get_data)
+ data = r.json()
+ if r.status_code == 403:
+ # API request limit exceeded
+ print(data["message"])
+ exit(1)
+ issues.extend(data)
+
+ # Look for next page
+ links = requests.utils.parse_header_links(r.headers["Link"])
+ another_page = False
+ for link in links:
+ if link["rel"] == "next":
+ url = link["url"]
+ another_page = True
+ if not another_page:
+ return issues
+
+
+def main(args):
+ cachefile = py.path.local(args.cache)
+ if not cachefile.exists() or args.refresh:
+ issues = get_issues()
+ cachefile.write(json.dumps(issues))
+ else:
+ issues = json.loads(cachefile.read())
+
+ open_issues = [x for x in issues if x["state"] == "open"]
+
+ open_issues.sort(key=lambda x: x["number"])
+ report(open_issues)
+
+
+def _get_kind(issue):
+ labels = [l["name"] for l in issue["labels"]]
+ for key in ("bug", "enhancement", "proposal"):
+ if key in labels:
+ return key
+ return "issue"
+
+
+def report(issues):
+ for issue in issues:
+ title = issue["title"]
+ # body = issue["body"]
+ kind = _get_kind(issue)
+ status = issue["state"]
+ number = issue["number"]
+ link = "https://github.com/pytest-dev/pytest/issues/%s/" % number
+ print("----")
+ print(status, kind, link)
+ print(title)
+ # print()
+ # lines = body.split("\n")
+ # print ("\n".join(lines[:3]))
+ # if len(lines) > 3 or len(body) > 240:
+ # print ("...")
+ print("\n\nFound %s open issues" % len(issues))
+
+
+if __name__ == "__main__":
+ import argparse
+
+ parser = argparse.ArgumentParser("process bitbucket issues")
+ parser.add_argument(
+ "--refresh", action="store_true", help="invalidate cache, refresh issues"
+ )
+ parser.add_argument(
+ "--cache", action="store", default="issues.json", help="cache file"
+ )
+ args = parser.parse_args()
+ main(args)
diff --git a/third_party/python/pytest/extra/setup-py.test/setup.py b/third_party/python/pytest/extra/setup-py.test/setup.py
new file mode 100644
index 0000000000..d0560ce1f5
--- /dev/null
+++ b/third_party/python/pytest/extra/setup-py.test/setup.py
@@ -0,0 +1,11 @@
+import sys
+from distutils.core import setup
+
+if __name__ == "__main__":
+ if "sdist" not in sys.argv[1:]:
+ raise ValueError("please use 'pytest' pypi package instead of 'py.test'")
+ setup(
+ name="py.test",
+ version="0.0",
+ description="please use 'pytest' for installation",
+ )
diff --git a/third_party/python/pytest/pyproject.toml b/third_party/python/pytest/pyproject.toml
new file mode 100644
index 0000000000..65e6bf59bf
--- /dev/null
+++ b/third_party/python/pytest/pyproject.toml
@@ -0,0 +1,43 @@
+[build-system]
+requires = [
+ "setuptools",
+ "setuptools-scm",
+ "wheel",
+]
+
+[tool.towncrier]
+package = "pytest"
+package_dir = "src"
+filename = "CHANGELOG.rst"
+directory = "changelog/"
+template = "changelog/_template.rst"
+
+ [[tool.towncrier.type]]
+ directory = "removal"
+ name = "Deprecations and Removals"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "feature"
+ name = "Features"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "bugfix"
+ name = "Bug Fixes"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "vendor"
+ name = "Vendored Libraries"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "doc"
+ name = "Improved Documentation"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "trivial"
+ name = "Trivial/Internal Changes"
+ showcontent = true
diff --git a/third_party/python/pytest/scripts/call-tox.bat b/third_party/python/pytest/scripts/call-tox.bat
new file mode 100644
index 0000000000..86fb25c1df
--- /dev/null
+++ b/third_party/python/pytest/scripts/call-tox.bat
@@ -0,0 +1,8 @@
+REM skip "coveralls" run in PRs or forks
+if "%TOXENV%" == "coveralls" (
+ if not defined COVERALLS_REPO_TOKEN (
+ echo skipping coveralls run because COVERALLS_REPO_TOKEN is not defined
+ exit /b 0
+ )
+)
+C:\Python36\python -m tox
diff --git a/third_party/python/pytest/scripts/install-pypy.bat b/third_party/python/pytest/scripts/install-pypy.bat
new file mode 100644
index 0000000000..8012ea46ac
--- /dev/null
+++ b/third_party/python/pytest/scripts/install-pypy.bat
@@ -0,0 +1,6 @@
+REM install pypy using choco
+REM redirect to a file because choco install python.pypy is too noisy. If the command fails, write output to console
+choco install python.pypy > pypy-inst.log 2>&1 || (type pypy-inst.log & exit /b 1)
+set PATH=C:\tools\pypy\pypy;%PATH% # so tox can find pypy
+echo PyPy installed
+pypy --version
diff --git a/third_party/python/pytest/setup.cfg b/third_party/python/pytest/setup.cfg
new file mode 100644
index 0000000000..e52a837592
--- /dev/null
+++ b/third_party/python/pytest/setup.cfg
@@ -0,0 +1,25 @@
+[build_sphinx]
+source-dir = doc/en/
+build-dir = doc/build
+all_files = 1
+
+[upload_sphinx]
+upload-dir = doc/en/build/html
+
+[bdist_wheel]
+universal = 1
+
+[check-manifest]
+ignore =
+ _pytest/_version.py
+
+[metadata]
+license_file = LICENSE
+
+[devpi:upload]
+formats = sdist.tgz,bdist_wheel
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/pytest/setup.py b/third_party/python/pytest/setup.py
new file mode 100644
index 0000000000..3d60d6becf
--- /dev/null
+++ b/third_party/python/pytest/setup.py
@@ -0,0 +1,124 @@
+import os
+import sys
+import setuptools
+import pkg_resources
+from setuptools import setup
+
+classifiers = [
+ "Development Status :: 6 - Mature",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: MIT License",
+ "Operating System :: POSIX",
+ "Operating System :: Microsoft :: Windows",
+ "Operating System :: MacOS :: MacOS X",
+ "Topic :: Software Development :: Testing",
+ "Topic :: Software Development :: Libraries",
+ "Topic :: Utilities",
+] + [
+ ("Programming Language :: Python :: %s" % x)
+ for x in "2 2.7 3 3.4 3.5 3.6 3.7".split()
+]
+
+with open("README.rst") as fd:
+ long_description = fd.read()
+
+
+def get_environment_marker_support_level():
+ """
+ Tests how well setuptools supports PEP-426 environment marker.
+
+ The first known release to support it is 0.7 (and the earliest on PyPI seems to be 0.7.2
+ so we're using that), see: https://setuptools.readthedocs.io/en/latest/history.html#id350
+
+ The support is later enhanced to allow direct conditional inclusions inside install_requires,
+ which is now recommended by setuptools. It first appeared in 36.2.0, went broken with 36.2.1, and
+ again worked since 36.2.2, so we're using that. See:
+ https://setuptools.readthedocs.io/en/latest/history.html#v36-2-2
+ https://github.com/pypa/setuptools/issues/1099
+
+ References:
+
+ * https://wheel.readthedocs.io/en/latest/index.html#defining-conditional-dependencies
+ * https://www.python.org/dev/peps/pep-0426/#environment-markers
+ * https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-platform-specific-dependencies
+ """
+ try:
+ version = pkg_resources.parse_version(setuptools.__version__)
+ if version >= pkg_resources.parse_version("36.2.2"):
+ return 2
+ if version >= pkg_resources.parse_version("0.7.2"):
+ return 1
+ except Exception as exc:
+ sys.stderr.write("Could not test setuptool's version: %s\n" % exc)
+
+ # as of testing on 2018-05-26 fedora was on version 37* and debian was on version 33+
+ # we should consider erroring on those
+ return 0
+
+
+def main():
+ extras_require = {}
+ install_requires = [
+ "py>=1.5.0",
+ "six>=1.10.0",
+ "setuptools",
+ "attrs>=17.4.0",
+ "more-itertools>=4.0.0",
+ "atomicwrites>=1.0",
+ ]
+ # if _PYTEST_SETUP_SKIP_PLUGGY_DEP is set, skip installing pluggy;
+ # used by tox.ini to test with pluggy master
+ if "_PYTEST_SETUP_SKIP_PLUGGY_DEP" not in os.environ:
+ install_requires.append("pluggy>=0.5,<0.7")
+ environment_marker_support_level = get_environment_marker_support_level()
+ if environment_marker_support_level >= 2:
+ install_requires.append('funcsigs;python_version<"3.0"')
+ install_requires.append('colorama;sys_platform=="win32"')
+ elif environment_marker_support_level == 1:
+ extras_require[':python_version<"3.0"'] = ["funcsigs"]
+ extras_require[':sys_platform=="win32"'] = ["colorama"]
+ else:
+ if sys.platform == "win32":
+ install_requires.append("colorama")
+ if sys.version_info < (3, 0):
+ install_requires.append("funcsigs")
+
+ setup(
+ name="pytest",
+ description="pytest: simple powerful testing with Python",
+ long_description=long_description,
+ use_scm_version={"write_to": "src/_pytest/_version.py"},
+ url="http://pytest.org",
+ project_urls={
+ "Source": "https://github.com/pytest-dev/pytest",
+ "Tracker": "https://github.com/pytest-dev/pytest/issues",
+ },
+ license="MIT license",
+ platforms=["unix", "linux", "osx", "cygwin", "win32"],
+ author=(
+ "Holger Krekel, Bruno Oliveira, Ronny Pfannschmidt, "
+ "Floris Bruynooghe, Brianna Laugher, Florian Bruhin and others"
+ ),
+ entry_points={"console_scripts": ["pytest=pytest:main", "py.test=pytest:main"]},
+ classifiers=classifiers,
+ keywords="test unittest",
+ # the following should be enabled for release
+ setup_requires=["setuptools-scm"],
+ package_dir={"": "src"},
+ python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
+ install_requires=install_requires,
+ extras_require=extras_require,
+ packages=[
+ "_pytest",
+ "_pytest.assertion",
+ "_pytest._code",
+ "_pytest.mark",
+ "_pytest.config",
+ ],
+ py_modules=["pytest"],
+ zip_safe=False,
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/third_party/python/pytest/src/_pytest/__init__.py b/third_party/python/pytest/src/_pytest/__init__.py
new file mode 100644
index 0000000000..46c7827ed5
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/__init__.py
@@ -0,0 +1,8 @@
+__all__ = ["__version__"]
+
+try:
+ from ._version import version as __version__
+except ImportError:
+ # broken installation, we don't even try
+ # unknown only works because we do poor mans version compare
+ __version__ = "unknown"
diff --git a/third_party/python/pytest/src/_pytest/_argcomplete.py b/third_party/python/pytest/src/_pytest/_argcomplete.py
new file mode 100644
index 0000000000..8f480d71d7
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/_argcomplete.py
@@ -0,0 +1,107 @@
+
+"""allow bash-completion for argparse with argcomplete if installed
+needs argcomplete>=0.5.6 for python 3.2/3.3 (older versions fail
+to find the magic string, so _ARGCOMPLETE env. var is never set, and
+this does not need special code.
+
+Function try_argcomplete(parser) should be called directly before
+the call to ArgumentParser.parse_args().
+
+The filescompleter is what you normally would use on the positional
+arguments specification, in order to get "dirname/" after "dirn<TAB>"
+instead of the default "dirname ":
+
+ optparser.add_argument(Config._file_or_dir, nargs='*'
+ ).completer=filescompleter
+
+Other, application specific, completers should go in the file
+doing the add_argument calls as they need to be specified as .completer
+attributes as well. (If argcomplete is not installed, the function the
+attribute points to will not be used).
+
+SPEEDUP
+=======
+The generic argcomplete script for bash-completion
+(/etc/bash_completion.d/python-argcomplete.sh )
+uses a python program to determine startup script generated by pip.
+You can speed up completion somewhat by changing this script to include
+ # PYTHON_ARGCOMPLETE_OK
+so the the python-argcomplete-check-easy-install-script does not
+need to be called to find the entry point of the code and see if that is
+marked with PYTHON_ARGCOMPLETE_OK
+
+INSTALL/DEBUGGING
+=================
+To include this support in another application that has setup.py generated
+scripts:
+- add the line:
+ # PYTHON_ARGCOMPLETE_OK
+ near the top of the main python entry point
+- include in the file calling parse_args():
+ from _argcomplete import try_argcomplete, filescompleter
+ , call try_argcomplete just before parse_args(), and optionally add
+ filescompleter to the positional arguments' add_argument()
+If things do not work right away:
+- switch on argcomplete debugging with (also helpful when doing custom
+ completers):
+ export _ARC_DEBUG=1
+- run:
+ python-argcomplete-check-easy-install-script $(which appname)
+ echo $?
+ will echo 0 if the magic line has been found, 1 if not
+- sometimes it helps to find early on errors using:
+ _ARGCOMPLETE=1 _ARC_DEBUG=1 appname
+ which should throw a KeyError: 'COMPLINE' (which is properly set by the
+ global argcomplete script).
+"""
+from __future__ import absolute_import, division, print_function
+import sys
+import os
+from glob import glob
+
+
+class FastFilesCompleter(object):
+ "Fast file completer class"
+
+ def __init__(self, directories=True):
+ self.directories = directories
+
+ def __call__(self, prefix, **kwargs):
+ """only called on non option completions"""
+ if os.path.sep in prefix[1:]:
+ prefix_dir = len(os.path.dirname(prefix) + os.path.sep)
+ else:
+ prefix_dir = 0
+ completion = []
+ globbed = []
+ if "*" not in prefix and "?" not in prefix:
+ # we are on unix, otherwise no bash
+ if not prefix or prefix[-1] == os.path.sep:
+ globbed.extend(glob(prefix + ".*"))
+ prefix += "*"
+ globbed.extend(glob(prefix))
+ for x in sorted(globbed):
+ if os.path.isdir(x):
+ x += "/"
+ # append stripping the prefix (like bash, not like compgen)
+ completion.append(x[prefix_dir:])
+ return completion
+
+
+if os.environ.get("_ARGCOMPLETE"):
+ try:
+ import argcomplete.completers
+ except ImportError:
+ sys.exit(-1)
+ filescompleter = FastFilesCompleter()
+
+ def try_argcomplete(parser):
+ argcomplete.autocomplete(parser, always_complete_options=False)
+
+
+else:
+
+ def try_argcomplete(parser):
+ pass
+
+ filescompleter = None
diff --git a/third_party/python/pytest/src/_pytest/_code/__init__.py b/third_party/python/pytest/src/_pytest/_code/__init__.py
new file mode 100644
index 0000000000..815c13b42c
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/_code/__init__.py
@@ -0,0 +1,10 @@
+""" python inspection/code generation API """
+from __future__ import absolute_import, division, print_function
+from .code import Code # noqa
+from .code import ExceptionInfo # noqa
+from .code import Frame # noqa
+from .code import Traceback # noqa
+from .code import getrawcode # noqa
+from .source import Source # noqa
+from .source import compile_ as compile # noqa
+from .source import getfslineno # noqa
diff --git a/third_party/python/pytest/src/_pytest/_code/_py2traceback.py b/third_party/python/pytest/src/_pytest/_code/_py2traceback.py
new file mode 100644
index 0000000000..2dd100c33b
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/_code/_py2traceback.py
@@ -0,0 +1,89 @@
+# copied from python-2.7.3's traceback.py
+# CHANGES:
+# - some_str is replaced, trying to create unicode strings
+#
+from __future__ import absolute_import, division, print_function
+import types
+from six import text_type
+
+
+def format_exception_only(etype, value):
+ """Format the exception part of a traceback.
+
+ The arguments are the exception type and value such as given by
+ sys.last_type and sys.last_value. The return value is a list of
+ strings, each ending in a newline.
+
+ Normally, the list contains a single string; however, for
+ SyntaxError exceptions, it contains several lines that (when
+ printed) display detailed information about where the syntax
+ error occurred.
+
+ The message indicating which exception occurred is always the last
+ string in the list.
+
+ """
+
+ # An instance should not have a meaningful value parameter, but
+ # sometimes does, particularly for string exceptions, such as
+ # >>> raise string1, string2 # deprecated
+ #
+ # Clear these out first because issubtype(string1, SyntaxError)
+ # would throw another exception and mask the original problem.
+ if (
+ isinstance(etype, BaseException)
+ or isinstance(etype, types.InstanceType)
+ or etype is None
+ or type(etype) is str
+ ):
+ return [_format_final_exc_line(etype, value)]
+
+ stype = etype.__name__
+
+ if not issubclass(etype, SyntaxError):
+ return [_format_final_exc_line(stype, value)]
+
+ # It was a syntax error; show exactly where the problem was found.
+ lines = []
+ try:
+ msg, (filename, lineno, offset, badline) = value.args
+ except Exception:
+ pass
+ else:
+ filename = filename or "<string>"
+ lines.append(' File "%s", line %d\n' % (filename, lineno))
+ if badline is not None:
+ if isinstance(badline, bytes): # python 2 only
+ badline = badline.decode("utf-8", "replace")
+ lines.append(u" %s\n" % badline.strip())
+ if offset is not None:
+ caretspace = badline.rstrip("\n")[:offset].lstrip()
+ # non-space whitespace (likes tabs) must be kept for alignment
+ caretspace = ((c.isspace() and c or " ") for c in caretspace)
+ # only three spaces to account for offset1 == pos 0
+ lines.append(" %s^\n" % "".join(caretspace))
+ value = msg
+
+ lines.append(_format_final_exc_line(stype, value))
+ return lines
+
+
+def _format_final_exc_line(etype, value):
+ """Return a list of a single line -- normal case for format_exception_only"""
+ valuestr = _some_str(value)
+ if value is None or not valuestr:
+ line = "%s\n" % etype
+ else:
+ line = "%s: %s\n" % (etype, valuestr)
+ return line
+
+
+def _some_str(value):
+ try:
+ return text_type(value)
+ except Exception:
+ try:
+ return str(value)
+ except Exception:
+ pass
+ return "<unprintable %s object>" % type(value).__name__
diff --git a/third_party/python/pytest/src/_pytest/_code/code.py b/third_party/python/pytest/src/_pytest/_code/code.py
new file mode 100644
index 0000000000..cb788c17f0
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/_code/code.py
@@ -0,0 +1,970 @@
+from __future__ import absolute_import, division, print_function
+import inspect
+import sys
+import traceback
+from inspect import CO_VARARGS, CO_VARKEYWORDS
+
+import attr
+import re
+from weakref import ref
+from _pytest.compat import _PY2, _PY3, PY35, safe_str
+from six import text_type
+import py
+
+builtin_repr = repr
+
+if _PY3:
+ from traceback import format_exception_only
+else:
+ from ._py2traceback import format_exception_only
+
+
+class Code(object):
+ """ wrapper around Python code objects """
+
+ def __init__(self, rawcode):
+ if not hasattr(rawcode, "co_filename"):
+ rawcode = getrawcode(rawcode)
+ try:
+ self.filename = rawcode.co_filename
+ self.firstlineno = rawcode.co_firstlineno - 1
+ self.name = rawcode.co_name
+ except AttributeError:
+ raise TypeError("not a code object: %r" % (rawcode,))
+ self.raw = rawcode
+
+ def __eq__(self, other):
+ return self.raw == other.raw
+
+ __hash__ = None
+
+ def __ne__(self, other):
+ return not self == other
+
+ @property
+ def path(self):
+ """ return a path object pointing to source code (note that it
+ might not point to an actually existing file). """
+ try:
+ p = py.path.local(self.raw.co_filename)
+ # maybe don't try this checking
+ if not p.check():
+ raise OSError("py.path check failed.")
+ except OSError:
+ # XXX maybe try harder like the weird logic
+ # in the standard lib [linecache.updatecache] does?
+ p = self.raw.co_filename
+
+ return p
+
+ @property
+ def fullsource(self):
+ """ return a _pytest._code.Source object for the full source file of the code
+ """
+ from _pytest._code import source
+
+ full, _ = source.findsource(self.raw)
+ return full
+
+ def source(self):
+ """ return a _pytest._code.Source object for the code object's source only
+ """
+ # return source only for that part of code
+ import _pytest._code
+
+ return _pytest._code.Source(self.raw)
+
+ def getargs(self, var=False):
+ """ return a tuple with the argument names for the code object
+
+ if 'var' is set True also return the names of the variable and
+ keyword arguments when present
+ """
+ # handfull shortcut for getting args
+ raw = self.raw
+ argcount = raw.co_argcount
+ if var:
+ argcount += raw.co_flags & CO_VARARGS
+ argcount += raw.co_flags & CO_VARKEYWORDS
+ return raw.co_varnames[:argcount]
+
+
+class Frame(object):
+ """Wrapper around a Python frame holding f_locals and f_globals
+ in which expressions can be evaluated."""
+
+ def __init__(self, frame):
+ self.lineno = frame.f_lineno - 1
+ self.f_globals = frame.f_globals
+ self.f_locals = frame.f_locals
+ self.raw = frame
+ self.code = Code(frame.f_code)
+
+ @property
+ def statement(self):
+ """ statement this frame is at """
+ import _pytest._code
+
+ if self.code.fullsource is None:
+ return _pytest._code.Source("")
+ return self.code.fullsource.getstatement(self.lineno)
+
+ def eval(self, code, **vars):
+ """ evaluate 'code' in the frame
+
+ 'vars' are optional additional local variables
+
+ returns the result of the evaluation
+ """
+ f_locals = self.f_locals.copy()
+ f_locals.update(vars)
+ return eval(code, self.f_globals, f_locals)
+
+ def exec_(self, code, **vars):
+ """ exec 'code' in the frame
+
+ 'vars' are optiona; additional local variables
+ """
+ f_locals = self.f_locals.copy()
+ f_locals.update(vars)
+ py.builtin.exec_(code, self.f_globals, f_locals)
+
+ def repr(self, object):
+ """ return a 'safe' (non-recursive, one-line) string repr for 'object'
+ """
+ return py.io.saferepr(object)
+
+ def is_true(self, object):
+ return object
+
+ def getargs(self, var=False):
+ """ return a list of tuples (name, value) for all arguments
+
+ if 'var' is set True also include the variable and keyword
+ arguments when present
+ """
+ retval = []
+ for arg in self.code.getargs(var):
+ try:
+ retval.append((arg, self.f_locals[arg]))
+ except KeyError:
+ pass # this can occur when using Psyco
+ return retval
+
+
+class TracebackEntry(object):
+ """ a single entry in a traceback """
+
+ _repr_style = None
+ exprinfo = None
+
+ def __init__(self, rawentry, excinfo=None):
+ self._excinfo = excinfo
+ self._rawentry = rawentry
+ self.lineno = rawentry.tb_lineno - 1
+
+ def set_repr_style(self, mode):
+ assert mode in ("short", "long")
+ self._repr_style = mode
+
+ @property
+ def frame(self):
+ import _pytest._code
+
+ return _pytest._code.Frame(self._rawentry.tb_frame)
+
+ @property
+ def relline(self):
+ return self.lineno - self.frame.code.firstlineno
+
+ def __repr__(self):
+ return "<TracebackEntry %s:%d>" % (self.frame.code.path, self.lineno + 1)
+
+ @property
+ def statement(self):
+ """ _pytest._code.Source object for the current statement """
+ source = self.frame.code.fullsource
+ return source.getstatement(self.lineno)
+
+ @property
+ def path(self):
+ """ path to the source code """
+ return self.frame.code.path
+
+ def getlocals(self):
+ return self.frame.f_locals
+
+ locals = property(getlocals, None, None, "locals of underlaying frame")
+
+ def getfirstlinesource(self):
+ # on Jython this firstlineno can be -1 apparently
+ return max(self.frame.code.firstlineno, 0)
+
+ def getsource(self, astcache=None):
+ """ return failing source code. """
+ # we use the passed in astcache to not reparse asttrees
+ # within exception info printing
+ from _pytest._code.source import getstatementrange_ast
+
+ source = self.frame.code.fullsource
+ if source is None:
+ return None
+ key = astnode = None
+ if astcache is not None:
+ key = self.frame.code.path
+ if key is not None:
+ astnode = astcache.get(key, None)
+ start = self.getfirstlinesource()
+ try:
+ astnode, _, end = getstatementrange_ast(
+ self.lineno, source, astnode=astnode
+ )
+ except SyntaxError:
+ end = self.lineno + 1
+ else:
+ if key is not None:
+ astcache[key] = astnode
+ return source[start:end]
+
+ source = property(getsource)
+
+ def ishidden(self):
+ """ return True if the current frame has a var __tracebackhide__
+ resolving to True
+
+ If __tracebackhide__ is a callable, it gets called with the
+ ExceptionInfo instance and can decide whether to hide the traceback.
+
+ mostly for internal use
+ """
+ try:
+ tbh = self.frame.f_locals["__tracebackhide__"]
+ except KeyError:
+ try:
+ tbh = self.frame.f_globals["__tracebackhide__"]
+ except KeyError:
+ return False
+
+ if callable(tbh):
+ return tbh(None if self._excinfo is None else self._excinfo())
+ else:
+ return tbh
+
+ def __str__(self):
+ try:
+ fn = str(self.path)
+ except py.error.Error:
+ fn = "???"
+ name = self.frame.code.name
+ try:
+ line = str(self.statement).lstrip()
+ except KeyboardInterrupt:
+ raise
+ except: # noqa
+ line = "???"
+ return " File %r:%d in %s\n %s\n" % (fn, self.lineno + 1, name, line)
+
+ def name(self):
+ return self.frame.code.raw.co_name
+
+ name = property(name, None, None, "co_name of underlaying code")
+
+
+class Traceback(list):
+ """ Traceback objects encapsulate and offer higher level
+ access to Traceback entries.
+ """
+ Entry = TracebackEntry
+
+ def __init__(self, tb, excinfo=None):
+ """ initialize from given python traceback object and ExceptionInfo """
+ self._excinfo = excinfo
+ if hasattr(tb, "tb_next"):
+
+ def f(cur):
+ while cur is not None:
+ yield self.Entry(cur, excinfo=excinfo)
+ cur = cur.tb_next
+
+ list.__init__(self, f(tb))
+ else:
+ list.__init__(self, tb)
+
+ def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None):
+ """ return a Traceback instance wrapping part of this Traceback
+
+ by provding any combination of path, lineno and firstlineno, the
+ first frame to start the to-be-returned traceback is determined
+
+ this allows cutting the first part of a Traceback instance e.g.
+ for formatting reasons (removing some uninteresting bits that deal
+ with handling of the exception/traceback)
+ """
+ for x in self:
+ code = x.frame.code
+ codepath = code.path
+ if (
+ (path is None or codepath == path)
+ and (
+ excludepath is None
+ or not hasattr(codepath, "relto")
+ or not codepath.relto(excludepath)
+ )
+ and (lineno is None or x.lineno == lineno)
+ and (firstlineno is None or x.frame.code.firstlineno == firstlineno)
+ ):
+ return Traceback(x._rawentry, self._excinfo)
+ return self
+
+ def __getitem__(self, key):
+ val = super(Traceback, self).__getitem__(key)
+ if isinstance(key, type(slice(0))):
+ val = self.__class__(val)
+ return val
+
+ def filter(self, fn=lambda x: not x.ishidden()):
+ """ return a Traceback instance with certain items removed
+
+ fn is a function that gets a single argument, a TracebackEntry
+ instance, and should return True when the item should be added
+ to the Traceback, False when not
+
+ by default this removes all the TracebackEntries which are hidden
+ (see ishidden() above)
+ """
+ return Traceback(filter(fn, self), self._excinfo)
+
+ def getcrashentry(self):
+ """ return last non-hidden traceback entry that lead
+ to the exception of a traceback.
+ """
+ for i in range(-1, -len(self) - 1, -1):
+ entry = self[i]
+ if not entry.ishidden():
+ return entry
+ return self[-1]
+
+ def recursionindex(self):
+ """ return the index of the frame/TracebackEntry where recursion
+ originates if appropriate, None if no recursion occurred
+ """
+ cache = {}
+ for i, entry in enumerate(self):
+ # id for the code.raw is needed to work around
+ # the strange metaprogramming in the decorator lib from pypi
+ # which generates code objects that have hash/value equality
+ # XXX needs a test
+ key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
+ # print "checking for recursion at", key
+ values = cache.setdefault(key, [])
+ if values:
+ f = entry.frame
+ loc = f.f_locals
+ for otherloc in values:
+ if f.is_true(
+ f.eval(
+ co_equal,
+ __recursioncache_locals_1=loc,
+ __recursioncache_locals_2=otherloc,
+ )
+ ):
+ return i
+ values.append(entry.frame.f_locals)
+ return None
+
+
+co_equal = compile(
+ "__recursioncache_locals_1 == __recursioncache_locals_2", "?", "eval"
+)
+
+
+class ExceptionInfo(object):
+ """ wraps sys.exc_info() objects and offers
+ help for navigating the traceback.
+ """
+ _striptext = ""
+ _assert_start_repr = "AssertionError(u'assert " if _PY2 else "AssertionError('assert "
+
+ def __init__(self, tup=None, exprinfo=None):
+ import _pytest._code
+
+ if tup is None:
+ tup = sys.exc_info()
+ if exprinfo is None and isinstance(tup[1], AssertionError):
+ exprinfo = getattr(tup[1], "msg", None)
+ if exprinfo is None:
+ exprinfo = py.io.saferepr(tup[1])
+ if exprinfo and exprinfo.startswith(self._assert_start_repr):
+ self._striptext = "AssertionError: "
+ self._excinfo = tup
+ #: the exception class
+ self.type = tup[0]
+ #: the exception instance
+ self.value = tup[1]
+ #: the exception raw traceback
+ self.tb = tup[2]
+ #: the exception type name
+ self.typename = self.type.__name__
+ #: the exception traceback (_pytest._code.Traceback instance)
+ self.traceback = _pytest._code.Traceback(self.tb, excinfo=ref(self))
+
+ def __repr__(self):
+ return "<ExceptionInfo %s tblen=%d>" % (self.typename, len(self.traceback))
+
+ def exconly(self, tryshort=False):
+ """ return the exception as a string
+
+ when 'tryshort' resolves to True, and the exception is a
+ _pytest._code._AssertionError, only the actual exception part of
+ the exception representation is returned (so 'AssertionError: ' is
+ removed from the beginning)
+ """
+ lines = format_exception_only(self.type, self.value)
+ text = "".join(lines)
+ text = text.rstrip()
+ if tryshort:
+ if text.startswith(self._striptext):
+ text = text[len(self._striptext):]
+ return text
+
+ def errisinstance(self, exc):
+ """ return True if the exception is an instance of exc """
+ return isinstance(self.value, exc)
+
+ def _getreprcrash(self):
+ exconly = self.exconly(tryshort=True)
+ entry = self.traceback.getcrashentry()
+ path, lineno = entry.frame.code.raw.co_filename, entry.lineno
+ return ReprFileLocation(path, lineno + 1, exconly)
+
+ def getrepr(
+ self,
+ showlocals=False,
+ style="long",
+ abspath=False,
+ tbfilter=True,
+ funcargs=False,
+ ):
+ """ return str()able representation of this exception info.
+ showlocals: show locals per traceback entry
+ style: long|short|no|native traceback style
+ tbfilter: hide entries (where __tracebackhide__ is true)
+
+ in case of style==native, tbfilter and showlocals is ignored.
+ """
+ if style == "native":
+ return ReprExceptionInfo(
+ ReprTracebackNative(
+ traceback.format_exception(
+ self.type, self.value, self.traceback[0]._rawentry
+ )
+ ),
+ self._getreprcrash(),
+ )
+
+ fmt = FormattedExcinfo(
+ showlocals=showlocals,
+ style=style,
+ abspath=abspath,
+ tbfilter=tbfilter,
+ funcargs=funcargs,
+ )
+ return fmt.repr_excinfo(self)
+
+ def __str__(self):
+ entry = self.traceback[-1]
+ loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
+ return str(loc)
+
+ def __unicode__(self):
+ entry = self.traceback[-1]
+ loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
+ return text_type(loc)
+
+ def match(self, regexp):
+ """
+ Match the regular expression 'regexp' on the string representation of
+ the exception. If it matches then True is returned (so that it is
+ possible to write 'assert excinfo.match()'). If it doesn't match an
+ AssertionError is raised.
+ """
+ __tracebackhide__ = True
+ if not re.search(regexp, str(self.value)):
+ assert 0, "Pattern '{!s}' not found in '{!s}'".format(regexp, self.value)
+ return True
+
+
+@attr.s
+class FormattedExcinfo(object):
+ """ presenting information about failing Functions and Generators. """
+ # for traceback entries
+ flow_marker = ">"
+ fail_marker = "E"
+
+ showlocals = attr.ib(default=False)
+ style = attr.ib(default="long")
+ abspath = attr.ib(default=True)
+ tbfilter = attr.ib(default=True)
+ funcargs = attr.ib(default=False)
+ astcache = attr.ib(default=attr.Factory(dict), init=False, repr=False)
+
+ def _getindent(self, source):
+ # figure out indent for given source
+ try:
+ s = str(source.getstatement(len(source) - 1))
+ except KeyboardInterrupt:
+ raise
+ except: # noqa
+ try:
+ s = str(source[-1])
+ except KeyboardInterrupt:
+ raise
+ except: # noqa
+ return 0
+ return 4 + (len(s) - len(s.lstrip()))
+
+ def _getentrysource(self, entry):
+ source = entry.getsource(self.astcache)
+ if source is not None:
+ source = source.deindent()
+ return source
+
+ def _saferepr(self, obj):
+ return py.io.saferepr(obj)
+
+ def repr_args(self, entry):
+ if self.funcargs:
+ args = []
+ for argname, argvalue in entry.frame.getargs(var=True):
+ args.append((argname, self._saferepr(argvalue)))
+ return ReprFuncArgs(args)
+
+ def get_source(self, source, line_index=-1, excinfo=None, short=False):
+ """ return formatted and marked up source lines. """
+ import _pytest._code
+
+ lines = []
+ if source is None or line_index >= len(source.lines):
+ source = _pytest._code.Source("???")
+ line_index = 0
+ if line_index < 0:
+ line_index += len(source)
+ space_prefix = " "
+ if short:
+ lines.append(space_prefix + source.lines[line_index].strip())
+ else:
+ for line in source.lines[:line_index]:
+ lines.append(space_prefix + line)
+ lines.append(self.flow_marker + " " + source.lines[line_index])
+ for line in source.lines[line_index + 1:]:
+ lines.append(space_prefix + line)
+ if excinfo is not None:
+ indent = 4 if short else self._getindent(source)
+ lines.extend(self.get_exconly(excinfo, indent=indent, markall=True))
+ return lines
+
+ def get_exconly(self, excinfo, indent=4, markall=False):
+ lines = []
+ indent = " " * indent
+ # get the real exception information out
+ exlines = excinfo.exconly(tryshort=True).split("\n")
+ failindent = self.fail_marker + indent[1:]
+ for line in exlines:
+ lines.append(failindent + line)
+ if not markall:
+ failindent = indent
+ return lines
+
+ def repr_locals(self, locals):
+ if self.showlocals:
+ lines = []
+ keys = [loc for loc in locals if loc[0] != "@"]
+ keys.sort()
+ for name in keys:
+ value = locals[name]
+ if name == "__builtins__":
+ lines.append("__builtins__ = <builtins>")
+ else:
+ # This formatting could all be handled by the
+ # _repr() function, which is only reprlib.Repr in
+ # disguise, so is very configurable.
+ str_repr = self._saferepr(value)
+ # if len(str_repr) < 70 or not isinstance(value,
+ # (list, tuple, dict)):
+ lines.append("%-10s = %s" % (name, str_repr))
+ # else:
+ # self._line("%-10s =\\" % (name,))
+ # # XXX
+ # pprint.pprint(value, stream=self.excinfowriter)
+ return ReprLocals(lines)
+
+ def repr_traceback_entry(self, entry, excinfo=None):
+ import _pytest._code
+
+ source = self._getentrysource(entry)
+ if source is None:
+ source = _pytest._code.Source("???")
+ line_index = 0
+ else:
+ # entry.getfirstlinesource() can be -1, should be 0 on jython
+ line_index = entry.lineno - max(entry.getfirstlinesource(), 0)
+
+ lines = []
+ style = entry._repr_style
+ if style is None:
+ style = self.style
+ if style in ("short", "long"):
+ short = style == "short"
+ reprargs = self.repr_args(entry) if not short else None
+ s = self.get_source(source, line_index, excinfo, short=short)
+ lines.extend(s)
+ if short:
+ message = "in %s" % (entry.name)
+ else:
+ message = excinfo and excinfo.typename or ""
+ path = self._makepath(entry.path)
+ filelocrepr = ReprFileLocation(path, entry.lineno + 1, message)
+ localsrepr = None
+ if not short:
+ localsrepr = self.repr_locals(entry.locals)
+ return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style)
+ if excinfo:
+ lines.extend(self.get_exconly(excinfo, indent=4))
+ return ReprEntry(lines, None, None, None, style)
+
+ def _makepath(self, path):
+ if not self.abspath:
+ try:
+ np = py.path.local().bestrelpath(path)
+ except OSError:
+ return path
+ if len(np) < len(str(path)):
+ path = np
+ return path
+
+ def repr_traceback(self, excinfo):
+ traceback = excinfo.traceback
+ if self.tbfilter:
+ traceback = traceback.filter()
+
+ if is_recursion_error(excinfo):
+ traceback, extraline = self._truncate_recursive_traceback(traceback)
+ else:
+ extraline = None
+
+ last = traceback[-1]
+ entries = []
+ for index, entry in enumerate(traceback):
+ einfo = (last == entry) and excinfo or None
+ reprentry = self.repr_traceback_entry(entry, einfo)
+ entries.append(reprentry)
+ return ReprTraceback(entries, extraline, style=self.style)
+
+ def _truncate_recursive_traceback(self, traceback):
+ """
+ Truncate the given recursive traceback trying to find the starting point
+ of the recursion.
+
+ The detection is done by going through each traceback entry and finding the
+ point in which the locals of the frame are equal to the locals of a previous frame (see ``recursionindex()``.
+
+ Handle the situation where the recursion process might raise an exception (for example
+ comparing numpy arrays using equality raises a TypeError), in which case we do our best to
+ warn the user of the error and show a limited traceback.
+ """
+ try:
+ recursionindex = traceback.recursionindex()
+ except Exception as e:
+ max_frames = 10
+ extraline = (
+ "!!! Recursion error detected, but an error occurred locating the origin of recursion.\n"
+ " The following exception happened when comparing locals in the stack frame:\n"
+ " {exc_type}: {exc_msg}\n"
+ " Displaying first and last {max_frames} stack frames out of {total}."
+ ).format(
+ exc_type=type(e).__name__,
+ exc_msg=safe_str(e),
+ max_frames=max_frames,
+ total=len(traceback),
+ )
+ traceback = traceback[:max_frames] + traceback[-max_frames:]
+ else:
+ if recursionindex is not None:
+ extraline = "!!! Recursion detected (same locals & position)"
+ traceback = traceback[:recursionindex + 1]
+ else:
+ extraline = None
+
+ return traceback, extraline
+
+ def repr_excinfo(self, excinfo):
+ if _PY2:
+ reprtraceback = self.repr_traceback(excinfo)
+ reprcrash = excinfo._getreprcrash()
+
+ return ReprExceptionInfo(reprtraceback, reprcrash)
+ else:
+ repr_chain = []
+ e = excinfo.value
+ descr = None
+ while e is not None:
+ if excinfo:
+ reprtraceback = self.repr_traceback(excinfo)
+ reprcrash = excinfo._getreprcrash()
+ else:
+ # fallback to native repr if the exception doesn't have a traceback:
+ # ExceptionInfo objects require a full traceback to work
+ reprtraceback = ReprTracebackNative(
+ traceback.format_exception(type(e), e, None)
+ )
+ reprcrash = None
+
+ repr_chain += [(reprtraceback, reprcrash, descr)]
+ if e.__cause__ is not None:
+ e = e.__cause__
+ excinfo = ExceptionInfo(
+ (type(e), e, e.__traceback__)
+ ) if e.__traceback__ else None
+ descr = "The above exception was the direct cause of the following exception:"
+ elif (e.__context__ is not None and not e.__suppress_context__):
+ e = e.__context__
+ excinfo = ExceptionInfo(
+ (type(e), e, e.__traceback__)
+ ) if e.__traceback__ else None
+ descr = "During handling of the above exception, another exception occurred:"
+ else:
+ e = None
+ repr_chain.reverse()
+ return ExceptionChainRepr(repr_chain)
+
+
+class TerminalRepr(object):
+
+ def __str__(self):
+ s = self.__unicode__()
+ if _PY2:
+ s = s.encode("utf-8")
+ return s
+
+ def __unicode__(self):
+ # FYI this is called from pytest-xdist's serialization of exception
+ # information.
+ io = py.io.TextIO()
+ tw = py.io.TerminalWriter(file=io)
+ self.toterminal(tw)
+ return io.getvalue().strip()
+
+ def __repr__(self):
+ return "<%s instance at %0x>" % (self.__class__, id(self))
+
+
+class ExceptionRepr(TerminalRepr):
+
+ def __init__(self):
+ self.sections = []
+
+ def addsection(self, name, content, sep="-"):
+ self.sections.append((name, content, sep))
+
+ def toterminal(self, tw):
+ for name, content, sep in self.sections:
+ tw.sep(sep, name)
+ tw.line(content)
+
+
+class ExceptionChainRepr(ExceptionRepr):
+
+ def __init__(self, chain):
+ super(ExceptionChainRepr, self).__init__()
+ self.chain = chain
+ # reprcrash and reprtraceback of the outermost (the newest) exception
+ # in the chain
+ self.reprtraceback = chain[-1][0]
+ self.reprcrash = chain[-1][1]
+
+ def toterminal(self, tw):
+ for element in self.chain:
+ element[0].toterminal(tw)
+ if element[2] is not None:
+ tw.line("")
+ tw.line(element[2], yellow=True)
+ super(ExceptionChainRepr, self).toterminal(tw)
+
+
+class ReprExceptionInfo(ExceptionRepr):
+
+ def __init__(self, reprtraceback, reprcrash):
+ super(ReprExceptionInfo, self).__init__()
+ self.reprtraceback = reprtraceback
+ self.reprcrash = reprcrash
+
+ def toterminal(self, tw):
+ self.reprtraceback.toterminal(tw)
+ super(ReprExceptionInfo, self).toterminal(tw)
+
+
+class ReprTraceback(TerminalRepr):
+ entrysep = "_ "
+
+ def __init__(self, reprentries, extraline, style):
+ self.reprentries = reprentries
+ self.extraline = extraline
+ self.style = style
+
+ def toterminal(self, tw):
+ # the entries might have different styles
+ for i, entry in enumerate(self.reprentries):
+ if entry.style == "long":
+ tw.line("")
+ entry.toterminal(tw)
+ if i < len(self.reprentries) - 1:
+ next_entry = self.reprentries[i + 1]
+ if (
+ entry.style == "long"
+ or entry.style == "short"
+ and next_entry.style == "long"
+ ):
+ tw.sep(self.entrysep)
+
+ if self.extraline:
+ tw.line(self.extraline)
+
+
+class ReprTracebackNative(ReprTraceback):
+
+ def __init__(self, tblines):
+ self.style = "native"
+ self.reprentries = [ReprEntryNative(tblines)]
+ self.extraline = None
+
+
+class ReprEntryNative(TerminalRepr):
+ style = "native"
+
+ def __init__(self, tblines):
+ self.lines = tblines
+
+ def toterminal(self, tw):
+ tw.write("".join(self.lines))
+
+
+class ReprEntry(TerminalRepr):
+ localssep = "_ "
+
+ def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style):
+ self.lines = lines
+ self.reprfuncargs = reprfuncargs
+ self.reprlocals = reprlocals
+ self.reprfileloc = filelocrepr
+ self.style = style
+
+ def toterminal(self, tw):
+ if self.style == "short":
+ self.reprfileloc.toterminal(tw)
+ for line in self.lines:
+ red = line.startswith("E ")
+ tw.line(line, bold=True, red=red)
+ # tw.line("")
+ return
+ if self.reprfuncargs:
+ self.reprfuncargs.toterminal(tw)
+ for line in self.lines:
+ red = line.startswith("E ")
+ tw.line(line, bold=True, red=red)
+ if self.reprlocals:
+ # tw.sep(self.localssep, "Locals")
+ tw.line("")
+ self.reprlocals.toterminal(tw)
+ if self.reprfileloc:
+ if self.lines:
+ tw.line("")
+ self.reprfileloc.toterminal(tw)
+
+ def __str__(self):
+ return "%s\n%s\n%s" % ("\n".join(self.lines), self.reprlocals, self.reprfileloc)
+
+
+class ReprFileLocation(TerminalRepr):
+
+ def __init__(self, path, lineno, message):
+ self.path = str(path)
+ self.lineno = lineno
+ self.message = message
+
+ def toterminal(self, tw):
+ # filename and lineno output for each entry,
+ # using an output format that most editors unterstand
+ msg = self.message
+ i = msg.find("\n")
+ if i != -1:
+ msg = msg[:i]
+ tw.write(self.path, bold=True, red=True)
+ tw.line(":%s: %s" % (self.lineno, msg))
+
+
+class ReprLocals(TerminalRepr):
+
+ def __init__(self, lines):
+ self.lines = lines
+
+ def toterminal(self, tw):
+ for line in self.lines:
+ tw.line(line)
+
+
+class ReprFuncArgs(TerminalRepr):
+
+ def __init__(self, args):
+ self.args = args
+
+ def toterminal(self, tw):
+ if self.args:
+ linesofar = ""
+ for name, value in self.args:
+ ns = "%s = %s" % (safe_str(name), safe_str(value))
+ if len(ns) + len(linesofar) + 2 > tw.fullwidth:
+ if linesofar:
+ tw.line(linesofar)
+ linesofar = ns
+ else:
+ if linesofar:
+ linesofar += ", " + ns
+ else:
+ linesofar = ns
+ if linesofar:
+ tw.line(linesofar)
+ tw.line("")
+
+
+def getrawcode(obj, trycall=True):
+ """ return code object for given function. """
+ try:
+ return obj.__code__
+ except AttributeError:
+ obj = getattr(obj, "im_func", obj)
+ obj = getattr(obj, "func_code", obj)
+ obj = getattr(obj, "f_code", obj)
+ obj = getattr(obj, "__code__", obj)
+ if trycall and not hasattr(obj, "co_firstlineno"):
+ if hasattr(obj, "__call__") and not inspect.isclass(obj):
+ x = getrawcode(obj.__call__, trycall=False)
+ if hasattr(x, "co_firstlineno"):
+ return x
+ return obj
+
+
+if PY35: # RecursionError introduced in 3.5
+
+ def is_recursion_error(excinfo):
+ return excinfo.errisinstance(RecursionError) # noqa
+
+
+else:
+
+ def is_recursion_error(excinfo):
+ if not excinfo.errisinstance(RuntimeError):
+ return False
+ try:
+ return "maximum recursion depth exceeded" in str(excinfo.value)
+ except UnicodeError:
+ return False
diff --git a/third_party/python/pytest/src/_pytest/_code/source.py b/third_party/python/pytest/src/_pytest/_code/source.py
new file mode 100644
index 0000000000..6b982a4cac
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/_code/source.py
@@ -0,0 +1,379 @@
+from __future__ import absolute_import, division, generators, print_function
+
+import ast
+from ast import PyCF_ONLY_AST as _AST_FLAG
+from bisect import bisect_right
+import linecache
+import sys
+import six
+import inspect
+import tokenize
+import py
+
+cpy_compile = compile
+
+
+class Source(object):
+ """ an immutable object holding a source code fragment,
+ possibly deindenting it.
+ """
+ _compilecounter = 0
+
+ def __init__(self, *parts, **kwargs):
+ self.lines = lines = []
+ de = kwargs.get("deindent", True)
+ rstrip = kwargs.get("rstrip", True)
+ for part in parts:
+ if not part:
+ partlines = []
+ elif isinstance(part, Source):
+ partlines = part.lines
+ elif isinstance(part, (tuple, list)):
+ partlines = [x.rstrip("\n") for x in part]
+ elif isinstance(part, six.string_types):
+ partlines = part.split("\n")
+ if rstrip:
+ while partlines:
+ if partlines[-1].strip():
+ break
+ partlines.pop()
+ else:
+ partlines = getsource(part, deindent=de).lines
+ if de:
+ partlines = deindent(partlines)
+ lines.extend(partlines)
+
+ def __eq__(self, other):
+ try:
+ return self.lines == other.lines
+ except AttributeError:
+ if isinstance(other, str):
+ return str(self) == other
+ return False
+
+ __hash__ = None
+
+ def __getitem__(self, key):
+ if isinstance(key, int):
+ return self.lines[key]
+ else:
+ if key.step not in (None, 1):
+ raise IndexError("cannot slice a Source with a step")
+ newsource = Source()
+ newsource.lines = self.lines[key.start:key.stop]
+ return newsource
+
+ def __len__(self):
+ return len(self.lines)
+
+ def strip(self):
+ """ return new source object with trailing
+ and leading blank lines removed.
+ """
+ start, end = 0, len(self)
+ while start < end and not self.lines[start].strip():
+ start += 1
+ while end > start and not self.lines[end - 1].strip():
+ end -= 1
+ source = Source()
+ source.lines[:] = self.lines[start:end]
+ return source
+
+ def putaround(self, before="", after="", indent=" " * 4):
+ """ return a copy of the source object with
+ 'before' and 'after' wrapped around it.
+ """
+ before = Source(before)
+ after = Source(after)
+ newsource = Source()
+ lines = [(indent + line) for line in self.lines]
+ newsource.lines = before.lines + lines + after.lines
+ return newsource
+
+ def indent(self, indent=" " * 4):
+ """ return a copy of the source object with
+ all lines indented by the given indent-string.
+ """
+ newsource = Source()
+ newsource.lines = [(indent + line) for line in self.lines]
+ return newsource
+
+ def getstatement(self, lineno):
+ """ return Source statement which contains the
+ given linenumber (counted from 0).
+ """
+ start, end = self.getstatementrange(lineno)
+ return self[start:end]
+
+ def getstatementrange(self, lineno):
+ """ return (start, end) tuple which spans the minimal
+ statement region which containing the given lineno.
+ """
+ if not (0 <= lineno < len(self)):
+ raise IndexError("lineno out of range")
+ ast, start, end = getstatementrange_ast(lineno, self)
+ return start, end
+
+ def deindent(self, offset=None):
+ """ return a new source object deindented by offset.
+ If offset is None then guess an indentation offset from
+ the first non-blank line. Subsequent lines which have a
+ lower indentation offset will be copied verbatim as
+ they are assumed to be part of multilines.
+ """
+ # XXX maybe use the tokenizer to properly handle multiline
+ # strings etc.pp?
+ newsource = Source()
+ newsource.lines[:] = deindent(self.lines, offset)
+ return newsource
+
+ def isparseable(self, deindent=True):
+ """ return True if source is parseable, heuristically
+ deindenting it by default.
+ """
+ from parser import suite as syntax_checker
+
+ if deindent:
+ source = str(self.deindent())
+ else:
+ source = str(self)
+ try:
+ # compile(source+'\n', "x", "exec")
+ syntax_checker(source + "\n")
+ except KeyboardInterrupt:
+ raise
+ except Exception:
+ return False
+ else:
+ return True
+
+ def __str__(self):
+ return "\n".join(self.lines)
+
+ def compile(
+ self,
+ filename=None,
+ mode="exec",
+ flag=generators.compiler_flag,
+ dont_inherit=0,
+ _genframe=None,
+ ):
+ """ return compiled code object. if filename is None
+ invent an artificial filename which displays
+ the source/line position of the caller frame.
+ """
+ if not filename or py.path.local(filename).check(file=0):
+ if _genframe is None:
+ _genframe = sys._getframe(1) # the caller
+ fn, lineno = _genframe.f_code.co_filename, _genframe.f_lineno
+ base = "<%d-codegen " % self._compilecounter
+ self.__class__._compilecounter += 1
+ if not filename:
+ filename = base + "%s:%d>" % (fn, lineno)
+ else:
+ filename = base + "%r %s:%d>" % (filename, fn, lineno)
+ source = "\n".join(self.lines) + "\n"
+ try:
+ co = cpy_compile(source, filename, mode, flag)
+ except SyntaxError:
+ ex = sys.exc_info()[1]
+ # re-represent syntax errors from parsing python strings
+ msglines = self.lines[:ex.lineno]
+ if ex.offset:
+ msglines.append(" " * ex.offset + "^")
+ msglines.append("(code was compiled probably from here: %s)" % filename)
+ newex = SyntaxError("\n".join(msglines))
+ newex.offset = ex.offset
+ newex.lineno = ex.lineno
+ newex.text = ex.text
+ raise newex
+ else:
+ if flag & _AST_FLAG:
+ return co
+ lines = [(x + "\n") for x in self.lines]
+ linecache.cache[filename] = (1, None, lines, filename)
+ return co
+
+
+#
+# public API shortcut functions
+#
+
+
+def compile_(
+ source, filename=None, mode="exec", flags=generators.compiler_flag, dont_inherit=0
+):
+ """ compile the given source to a raw code object,
+ and maintain an internal cache which allows later
+ retrieval of the source code for the code object
+ and any recursively created code objects.
+ """
+ if isinstance(source, ast.AST):
+ # XXX should Source support having AST?
+ return cpy_compile(source, filename, mode, flags, dont_inherit)
+ _genframe = sys._getframe(1) # the caller
+ s = Source(source)
+ co = s.compile(filename, mode, flags, _genframe=_genframe)
+ return co
+
+
+def getfslineno(obj):
+ """ Return source location (path, lineno) for the given object.
+ If the source cannot be determined return ("", -1)
+ """
+ from .code import Code
+
+ try:
+ code = Code(obj)
+ except TypeError:
+ try:
+ fn = inspect.getsourcefile(obj) or inspect.getfile(obj)
+ except TypeError:
+ return "", -1
+
+ fspath = fn and py.path.local(fn) or None
+ lineno = -1
+ if fspath:
+ try:
+ _, lineno = findsource(obj)
+ except IOError:
+ pass
+ else:
+ fspath = code.path
+ lineno = code.firstlineno
+ assert isinstance(lineno, int)
+ return fspath, lineno
+
+
+#
+# helper functions
+#
+
+
+def findsource(obj):
+ try:
+ sourcelines, lineno = inspect.findsource(obj)
+ except py.builtin._sysex:
+ raise
+ except: # noqa
+ return None, -1
+ source = Source()
+ source.lines = [line.rstrip() for line in sourcelines]
+ return source, lineno
+
+
+def getsource(obj, **kwargs):
+ from .code import getrawcode
+
+ obj = getrawcode(obj)
+ try:
+ strsrc = inspect.getsource(obj)
+ except IndentationError:
+ strsrc = '"Buggy python version consider upgrading, cannot get source"'
+ assert isinstance(strsrc, str)
+ return Source(strsrc, **kwargs)
+
+
+def deindent(lines, offset=None):
+ if offset is None:
+ for line in lines:
+ line = line.expandtabs()
+ s = line.lstrip()
+ if s:
+ offset = len(line) - len(s)
+ break
+ else:
+ offset = 0
+ if offset == 0:
+ return list(lines)
+ newlines = []
+
+ def readline_generator(lines):
+ for line in lines:
+ yield line + "\n"
+
+ it = readline_generator(lines)
+
+ try:
+ for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(
+ lambda: next(it)
+ ):
+ if sline > len(lines):
+ break # End of input reached
+ if sline > len(newlines):
+ line = lines[sline - 1].expandtabs()
+ if line.lstrip() and line[:offset].isspace():
+ line = line[offset:] # Deindent
+ newlines.append(line)
+
+ for i in range(sline, eline):
+ # Don't deindent continuing lines of
+ # multiline tokens (i.e. multiline strings)
+ newlines.append(lines[i])
+ except (IndentationError, tokenize.TokenError):
+ pass
+ # Add any lines we didn't see. E.g. if an exception was raised.
+ newlines.extend(lines[len(newlines):])
+ return newlines
+
+
+def get_statement_startend2(lineno, node):
+ import ast
+
+ # flatten all statements and except handlers into one lineno-list
+ # AST's line numbers start indexing at 1
+ values = []
+ for x in ast.walk(node):
+ if isinstance(x, (ast.stmt, ast.ExceptHandler)):
+ values.append(x.lineno - 1)
+ for name in ("finalbody", "orelse"):
+ val = getattr(x, name, None)
+ if val:
+ # treat the finally/orelse part as its own statement
+ values.append(val[0].lineno - 1 - 1)
+ values.sort()
+ insert_index = bisect_right(values, lineno)
+ start = values[insert_index - 1]
+ if insert_index >= len(values):
+ end = None
+ else:
+ end = values[insert_index]
+ return start, end
+
+
+def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
+ if astnode is None:
+ content = str(source)
+ astnode = compile(content, "source", "exec", 1024) # 1024 for AST
+
+ start, end = get_statement_startend2(lineno, astnode)
+ # we need to correct the end:
+ # - ast-parsing strips comments
+ # - there might be empty lines
+ # - we might have lesser indented code blocks at the end
+ if end is None:
+ end = len(source.lines)
+
+ if end > start + 1:
+ # make sure we don't span differently indented code blocks
+ # by using the BlockFinder helper used which inspect.getsource() uses itself
+ block_finder = inspect.BlockFinder()
+ # if we start with an indented line, put blockfinder to "started" mode
+ block_finder.started = source.lines[start][0].isspace()
+ it = ((x + "\n") for x in source.lines[start:end])
+ try:
+ for tok in tokenize.generate_tokens(lambda: next(it)):
+ block_finder.tokeneater(*tok)
+ except (inspect.EndOfBlock, IndentationError):
+ end = block_finder.last + start
+ except Exception:
+ pass
+
+ # the end might still point to a comment or empty line, correct it
+ while end:
+ line = source.lines[end - 1].lstrip()
+ if line.startswith("#") or not line:
+ end -= 1
+ else:
+ break
+ return astnode, start, end
diff --git a/third_party/python/pytest/src/_pytest/_version.py b/third_party/python/pytest/src/_pytest/_version.py
new file mode 100644
index 0000000000..f54dbb0462
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/_version.py
@@ -0,0 +1,4 @@
+# coding: utf-8
+# file generated by setuptools_scm
+# don't change, don't track in version control
+version = '3.6.2'
diff --git a/third_party/python/pytest/src/_pytest/assertion/__init__.py b/third_party/python/pytest/src/_pytest/assertion/__init__.py
new file mode 100644
index 0000000000..2c9a8890c9
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/assertion/__init__.py
@@ -0,0 +1,151 @@
+"""
+support for presenting detailed information in failing assertions.
+"""
+from __future__ import absolute_import, division, print_function
+import sys
+import six
+
+from _pytest.assertion import util
+from _pytest.assertion import rewrite
+from _pytest.assertion import truncate
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("debugconfig")
+ group.addoption(
+ "--assert",
+ action="store",
+ dest="assertmode",
+ choices=("rewrite", "plain"),
+ default="rewrite",
+ metavar="MODE",
+ help="""Control assertion debugging tools. 'plain'
+ performs no assertion debugging. 'rewrite'
+ (the default) rewrites assert statements in
+ test modules on import to provide assert
+ expression information.""",
+ )
+
+
+def register_assert_rewrite(*names):
+ """Register one or more module names to be rewritten on import.
+
+ This function will make sure that this module or all modules inside
+ the package will get their assert statements rewritten.
+ Thus you should make sure to call this before the module is
+ actually imported, usually in your __init__.py if you are a plugin
+ using a package.
+
+ :raise TypeError: if the given module names are not strings.
+ """
+ for name in names:
+ if not isinstance(name, str):
+ msg = "expected module names as *args, got {0} instead"
+ raise TypeError(msg.format(repr(names)))
+ for hook in sys.meta_path:
+ if isinstance(hook, rewrite.AssertionRewritingHook):
+ importhook = hook
+ break
+ else:
+ importhook = DummyRewriteHook()
+ importhook.mark_rewrite(*names)
+
+
+class DummyRewriteHook(object):
+ """A no-op import hook for when rewriting is disabled."""
+
+ def mark_rewrite(self, *names):
+ pass
+
+
+class AssertionState(object):
+ """State for the assertion plugin."""
+
+ def __init__(self, config, mode):
+ self.mode = mode
+ self.trace = config.trace.root.get("assertion")
+ self.hook = None
+
+
+def install_importhook(config):
+ """Try to install the rewrite hook, raise SystemError if it fails."""
+ # Jython has an AST bug that make the assertion rewriting hook malfunction.
+ if sys.platform.startswith("java"):
+ raise SystemError("rewrite not supported")
+
+ config._assertstate = AssertionState(config, "rewrite")
+ config._assertstate.hook = hook = rewrite.AssertionRewritingHook(config)
+ sys.meta_path.insert(0, hook)
+ config._assertstate.trace("installed rewrite import hook")
+
+ def undo():
+ hook = config._assertstate.hook
+ if hook is not None and hook in sys.meta_path:
+ sys.meta_path.remove(hook)
+
+ config.add_cleanup(undo)
+ return hook
+
+
+def pytest_collection(session):
+ # this hook is only called when test modules are collected
+ # so for example not in the master process of pytest-xdist
+ # (which does not collect test modules)
+ assertstate = getattr(session.config, "_assertstate", None)
+ if assertstate:
+ if assertstate.hook is not None:
+ assertstate.hook.set_session(session)
+
+
+def pytest_runtest_setup(item):
+ """Setup the pytest_assertrepr_compare hook
+
+ The newinterpret and rewrite modules will use util._reprcompare if
+ it exists to use custom reporting via the
+ pytest_assertrepr_compare hook. This sets up this custom
+ comparison for the test.
+ """
+
+ def callbinrepr(op, left, right):
+ """Call the pytest_assertrepr_compare hook and prepare the result
+
+ This uses the first result from the hook and then ensures the
+ following:
+ * Overly verbose explanations are truncated unless configured otherwise
+ (eg. if running in verbose mode).
+ * Embedded newlines are escaped to help util.format_explanation()
+ later.
+ * If the rewrite mode is used embedded %-characters are replaced
+ to protect later % formatting.
+
+ The result can be formatted by util.format_explanation() for
+ pretty printing.
+ """
+ hook_result = item.ihook.pytest_assertrepr_compare(
+ config=item.config, op=op, left=left, right=right
+ )
+ for new_expl in hook_result:
+ if new_expl:
+ new_expl = truncate.truncate_if_required(new_expl, item)
+ new_expl = [line.replace("\n", "\\n") for line in new_expl]
+ res = six.text_type("\n~").join(new_expl)
+ if item.config.getvalue("assertmode") == "rewrite":
+ res = res.replace("%", "%%")
+ return res
+
+ util._reprcompare = callbinrepr
+
+
+def pytest_runtest_teardown(item):
+ util._reprcompare = None
+
+
+def pytest_sessionfinish(session):
+ assertstate = getattr(session.config, "_assertstate", None)
+ if assertstate:
+ if assertstate.hook is not None:
+ assertstate.hook.set_session(None)
+
+
+# Expose this plugin's implementation for the pytest_assertrepr_compare hook
+pytest_assertrepr_compare = util.assertrepr_compare
diff --git a/third_party/python/pytest/src/_pytest/assertion/rewrite.py b/third_party/python/pytest/src/_pytest/assertion/rewrite.py
new file mode 100644
index 0000000000..eceed611ff
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/assertion/rewrite.py
@@ -0,0 +1,954 @@
+"""Rewrite assertion AST to produce nice error messages"""
+from __future__ import absolute_import, division, print_function
+import ast
+import errno
+import itertools
+import imp
+import marshal
+import os
+import re
+import six
+import struct
+import sys
+import types
+
+import atomicwrites
+import py
+
+from _pytest.assertion import util
+
+
+# pytest caches rewritten pycs in __pycache__.
+if hasattr(imp, "get_tag"):
+ PYTEST_TAG = imp.get_tag() + "-PYTEST"
+else:
+ if hasattr(sys, "pypy_version_info"):
+ impl = "pypy"
+ elif sys.platform == "java":
+ impl = "jython"
+ else:
+ impl = "cpython"
+ ver = sys.version_info
+ PYTEST_TAG = "%s-%s%s-PYTEST" % (impl, ver[0], ver[1])
+ del ver, impl
+
+PYC_EXT = ".py" + (__debug__ and "c" or "o")
+PYC_TAIL = "." + PYTEST_TAG + PYC_EXT
+
+ASCII_IS_DEFAULT_ENCODING = sys.version_info[0] < 3
+
+if sys.version_info >= (3, 5):
+ ast_Call = ast.Call
+else:
+
+ def ast_Call(a, b, c):
+ return ast.Call(a, b, c, None, None)
+
+
+class AssertionRewritingHook(object):
+ """PEP302 Import hook which rewrites asserts."""
+
+ def __init__(self, config):
+ self.config = config
+ self.fnpats = config.getini("python_files")
+ self.session = None
+ self.modules = {}
+ self._rewritten_names = set()
+ self._register_with_pkg_resources()
+ self._must_rewrite = set()
+
+ def set_session(self, session):
+ self.session = session
+
+ def find_module(self, name, path=None):
+ state = self.config._assertstate
+ state.trace("find_module called for: %s" % name)
+ names = name.rsplit(".", 1)
+ lastname = names[-1]
+ pth = None
+ if path is not None:
+ # Starting with Python 3.3, path is a _NamespacePath(), which
+ # causes problems if not converted to list.
+ path = list(path)
+ if len(path) == 1:
+ pth = path[0]
+ if pth is None:
+ try:
+ fd, fn, desc = imp.find_module(lastname, path)
+ except ImportError:
+ return None
+ if fd is not None:
+ fd.close()
+ tp = desc[2]
+ if tp == imp.PY_COMPILED:
+ if hasattr(imp, "source_from_cache"):
+ try:
+ fn = imp.source_from_cache(fn)
+ except ValueError:
+ # Python 3 doesn't like orphaned but still-importable
+ # .pyc files.
+ fn = fn[:-1]
+ else:
+ fn = fn[:-1]
+ elif tp != imp.PY_SOURCE:
+ # Don't know what this is.
+ return None
+ else:
+ fn = os.path.join(pth, name.rpartition(".")[2] + ".py")
+
+ fn_pypath = py.path.local(fn)
+ if not self._should_rewrite(name, fn_pypath, state):
+ return None
+
+ self._rewritten_names.add(name)
+
+ # The requested module looks like a test file, so rewrite it. This is
+ # the most magical part of the process: load the source, rewrite the
+ # asserts, and load the rewritten source. We also cache the rewritten
+ # module code in a special pyc. We must be aware of the possibility of
+ # concurrent pytest processes rewriting and loading pycs. To avoid
+ # tricky race conditions, we maintain the following invariant: The
+ # cached pyc is always a complete, valid pyc. Operations on it must be
+ # atomic. POSIX's atomic rename comes in handy.
+ write = not sys.dont_write_bytecode
+ cache_dir = os.path.join(fn_pypath.dirname, "__pycache__")
+ if write:
+ try:
+ os.mkdir(cache_dir)
+ except OSError:
+ e = sys.exc_info()[1].errno
+ if e == errno.EEXIST:
+ # Either the __pycache__ directory already exists (the
+ # common case) or it's blocked by a non-dir node. In the
+ # latter case, we'll ignore it in _write_pyc.
+ pass
+ elif e in [errno.ENOENT, errno.ENOTDIR]:
+ # One of the path components was not a directory, likely
+ # because we're in a zip file.
+ write = False
+ elif e in [errno.EACCES, errno.EROFS, errno.EPERM]:
+ state.trace("read only directory: %r" % fn_pypath.dirname)
+ write = False
+ else:
+ raise
+ cache_name = fn_pypath.basename[:-3] + PYC_TAIL
+ pyc = os.path.join(cache_dir, cache_name)
+ # Notice that even if we're in a read-only directory, I'm going
+ # to check for a cached pyc. This may not be optimal...
+ co = _read_pyc(fn_pypath, pyc, state.trace)
+ if co is None:
+ state.trace("rewriting %r" % (fn,))
+ source_stat, co = _rewrite_test(self.config, fn_pypath)
+ if co is None:
+ # Probably a SyntaxError in the test.
+ return None
+ if write:
+ _write_pyc(state, co, source_stat, pyc)
+ else:
+ state.trace("found cached rewritten pyc for %r" % (fn,))
+ self.modules[name] = co, pyc
+ return self
+
+ def _should_rewrite(self, name, fn_pypath, state):
+ # always rewrite conftest files
+ fn = str(fn_pypath)
+ if fn_pypath.basename == "conftest.py":
+ state.trace("rewriting conftest file: %r" % (fn,))
+ return True
+
+ if self.session is not None:
+ if self.session.isinitpath(fn):
+ state.trace("matched test file (was specified on cmdline): %r" % (fn,))
+ return True
+
+ # modules not passed explicitly on the command line are only
+ # rewritten if they match the naming convention for test files
+ for pat in self.fnpats:
+ if fn_pypath.fnmatch(pat):
+ state.trace("matched test file %r" % (fn,))
+ return True
+
+ for marked in self._must_rewrite:
+ if name == marked or name.startswith(marked + "."):
+ state.trace("matched marked file %r (from %r)" % (name, marked))
+ return True
+
+ return False
+
+ def mark_rewrite(self, *names):
+ """Mark import names as needing to be rewritten.
+
+ The named module or package as well as any nested modules will
+ be rewritten on import.
+ """
+ already_imported = (
+ set(names).intersection(sys.modules).difference(self._rewritten_names)
+ )
+ for name in already_imported:
+ if not AssertionRewriter.is_rewrite_disabled(
+ sys.modules[name].__doc__ or ""
+ ):
+ self._warn_already_imported(name)
+ self._must_rewrite.update(names)
+
+ def _warn_already_imported(self, name):
+ self.config.warn(
+ "P1", "Module already imported so cannot be rewritten: %s" % name
+ )
+
+ def load_module(self, name):
+ # If there is an existing module object named 'fullname' in
+ # sys.modules, the loader must use that existing module. (Otherwise,
+ # the reload() builtin will not work correctly.)
+ if name in sys.modules:
+ return sys.modules[name]
+
+ co, pyc = self.modules.pop(name)
+ # I wish I could just call imp.load_compiled here, but __file__ has to
+ # be set properly. In Python 3.2+, this all would be handled correctly
+ # by load_compiled.
+ mod = sys.modules[name] = imp.new_module(name)
+ try:
+ mod.__file__ = co.co_filename
+ # Normally, this attribute is 3.2+.
+ mod.__cached__ = pyc
+ mod.__loader__ = self
+ py.builtin.exec_(co, mod.__dict__)
+ except: # noqa
+ if name in sys.modules:
+ del sys.modules[name]
+ raise
+ return sys.modules[name]
+
+ def is_package(self, name):
+ try:
+ fd, fn, desc = imp.find_module(name)
+ except ImportError:
+ return False
+ if fd is not None:
+ fd.close()
+ tp = desc[2]
+ return tp == imp.PKG_DIRECTORY
+
+ @classmethod
+ def _register_with_pkg_resources(cls):
+ """
+ Ensure package resources can be loaded from this loader. May be called
+ multiple times, as the operation is idempotent.
+ """
+ try:
+ import pkg_resources
+
+ # access an attribute in case a deferred importer is present
+ pkg_resources.__name__
+ except ImportError:
+ return
+
+ # Since pytest tests are always located in the file system, the
+ # DefaultProvider is appropriate.
+ pkg_resources.register_loader_type(cls, pkg_resources.DefaultProvider)
+
+ def get_data(self, pathname):
+ """Optional PEP302 get_data API.
+ """
+ with open(pathname, "rb") as f:
+ return f.read()
+
+
+def _write_pyc(state, co, source_stat, pyc):
+ # Technically, we don't have to have the same pyc format as
+ # (C)Python, since these "pycs" should never be seen by builtin
+ # import. However, there's little reason deviate, and I hope
+ # sometime to be able to use imp.load_compiled to load them. (See
+ # the comment in load_module above.)
+ try:
+ with atomicwrites.atomic_write(pyc, mode="wb", overwrite=True) as fp:
+ fp.write(imp.get_magic())
+ mtime = int(source_stat.mtime)
+ size = source_stat.size & 0xFFFFFFFF
+ fp.write(struct.pack("<ll", mtime, size))
+ fp.write(marshal.dumps(co))
+ except EnvironmentError as e:
+ state.trace("error writing pyc file at %s: errno=%s" % (pyc, e.errno))
+ # we ignore any failure to write the cache file
+ # there are many reasons, permission-denied, __pycache__ being a
+ # file etc.
+ return False
+ return True
+
+
+RN = "\r\n".encode("utf-8")
+N = "\n".encode("utf-8")
+
+cookie_re = re.compile(r"^[ \t\f]*#.*coding[:=][ \t]*[-\w.]+")
+BOM_UTF8 = "\xef\xbb\xbf"
+
+
+def _rewrite_test(config, fn):
+ """Try to read and rewrite *fn* and return the code object."""
+ state = config._assertstate
+ try:
+ stat = fn.stat()
+ source = fn.read("rb")
+ except EnvironmentError:
+ return None, None
+ if ASCII_IS_DEFAULT_ENCODING:
+ # ASCII is the default encoding in Python 2. Without a coding
+ # declaration, Python 2 will complain about any bytes in the file
+ # outside the ASCII range. Sadly, this behavior does not extend to
+ # compile() or ast.parse(), which prefer to interpret the bytes as
+ # latin-1. (At least they properly handle explicit coding cookies.) To
+ # preserve this error behavior, we could force ast.parse() to use ASCII
+ # as the encoding by inserting a coding cookie. Unfortunately, that
+ # messes up line numbers. Thus, we have to check ourselves if anything
+ # is outside the ASCII range in the case no encoding is explicitly
+ # declared. For more context, see issue #269. Yay for Python 3 which
+ # gets this right.
+ end1 = source.find("\n")
+ end2 = source.find("\n", end1 + 1)
+ if (
+ not source.startswith(BOM_UTF8)
+ and cookie_re.match(source[0:end1]) is None
+ and cookie_re.match(source[end1 + 1:end2]) is None
+ ):
+ if hasattr(state, "_indecode"):
+ # encodings imported us again, so don't rewrite.
+ return None, None
+ state._indecode = True
+ try:
+ try:
+ source.decode("ascii")
+ except UnicodeDecodeError:
+ # Let it fail in real import.
+ return None, None
+ finally:
+ del state._indecode
+ try:
+ tree = ast.parse(source)
+ except SyntaxError:
+ # Let this pop up again in the real import.
+ state.trace("failed to parse: %r" % (fn,))
+ return None, None
+ rewrite_asserts(tree, fn, config)
+ try:
+ co = compile(tree, fn.strpath, "exec", dont_inherit=True)
+ except SyntaxError:
+ # It's possible that this error is from some bug in the
+ # assertion rewriting, but I don't know of a fast way to tell.
+ state.trace("failed to compile: %r" % (fn,))
+ return None, None
+ return stat, co
+
+
+def _read_pyc(source, pyc, trace=lambda x: None):
+ """Possibly read a pytest pyc containing rewritten code.
+
+ Return rewritten code if successful or None if not.
+ """
+ try:
+ fp = open(pyc, "rb")
+ except IOError:
+ return None
+ with fp:
+ try:
+ mtime = int(source.mtime())
+ size = source.size()
+ data = fp.read(12)
+ except EnvironmentError as e:
+ trace("_read_pyc(%s): EnvironmentError %s" % (source, e))
+ return None
+ # Check for invalid or out of date pyc file.
+ if (
+ len(data) != 12
+ or data[:4] != imp.get_magic()
+ or struct.unpack("<ll", data[4:]) != (mtime, size)
+ ):
+ trace("_read_pyc(%s): invalid or out of date pyc" % source)
+ return None
+ try:
+ co = marshal.load(fp)
+ except Exception as e:
+ trace("_read_pyc(%s): marshal.load error %s" % (source, e))
+ return None
+ if not isinstance(co, types.CodeType):
+ trace("_read_pyc(%s): not a code object" % source)
+ return None
+ return co
+
+
+def rewrite_asserts(mod, module_path=None, config=None):
+ """Rewrite the assert statements in mod."""
+ AssertionRewriter(module_path, config).run(mod)
+
+
+def _saferepr(obj):
+ """Get a safe repr of an object for assertion error messages.
+
+ The assertion formatting (util.format_explanation()) requires
+ newlines to be escaped since they are a special character for it.
+ Normally assertion.util.format_explanation() does this but for a
+ custom repr it is possible to contain one of the special escape
+ sequences, especially '\n{' and '\n}' are likely to be present in
+ JSON reprs.
+
+ """
+ repr = py.io.saferepr(obj)
+ if isinstance(repr, six.text_type):
+ t = six.text_type
+ else:
+ t = six.binary_type
+ return repr.replace(t("\n"), t("\\n"))
+
+
+from _pytest.assertion.util import format_explanation as _format_explanation # noqa
+
+
+def _format_assertmsg(obj):
+ """Format the custom assertion message given.
+
+ For strings this simply replaces newlines with '\n~' so that
+ util.format_explanation() will preserve them instead of escaping
+ newlines. For other objects py.io.saferepr() is used first.
+
+ """
+ # reprlib appears to have a bug which means that if a string
+ # contains a newline it gets escaped, however if an object has a
+ # .__repr__() which contains newlines it does not get escaped.
+ # However in either case we want to preserve the newline.
+ if isinstance(obj, six.text_type) or isinstance(obj, six.binary_type):
+ s = obj
+ is_repr = False
+ else:
+ s = py.io.saferepr(obj)
+ is_repr = True
+ if isinstance(s, six.text_type):
+ t = six.text_type
+ else:
+ t = six.binary_type
+ s = s.replace(t("\n"), t("\n~")).replace(t("%"), t("%%"))
+ if is_repr:
+ s = s.replace(t("\\n"), t("\n~"))
+ return s
+
+
+def _should_repr_global_name(obj):
+ return not hasattr(obj, "__name__") and not callable(obj)
+
+
+def _format_boolop(explanations, is_or):
+ explanation = "(" + (is_or and " or " or " and ").join(explanations) + ")"
+ if isinstance(explanation, six.text_type):
+ t = six.text_type
+ else:
+ t = six.binary_type
+ return explanation.replace(t("%"), t("%%"))
+
+
+def _call_reprcompare(ops, results, expls, each_obj):
+ for i, res, expl in zip(range(len(ops)), results, expls):
+ try:
+ done = not res
+ except Exception:
+ done = True
+ if done:
+ break
+ if util._reprcompare is not None:
+ custom = util._reprcompare(ops[i], each_obj[i], each_obj[i + 1])
+ if custom is not None:
+ return custom
+ return expl
+
+
+unary_map = {ast.Not: "not %s", ast.Invert: "~%s", ast.USub: "-%s", ast.UAdd: "+%s"}
+
+binop_map = {
+ ast.BitOr: "|",
+ ast.BitXor: "^",
+ ast.BitAnd: "&",
+ ast.LShift: "<<",
+ ast.RShift: ">>",
+ ast.Add: "+",
+ ast.Sub: "-",
+ ast.Mult: "*",
+ ast.Div: "/",
+ ast.FloorDiv: "//",
+ ast.Mod: "%%", # escaped for string formatting
+ ast.Eq: "==",
+ ast.NotEq: "!=",
+ ast.Lt: "<",
+ ast.LtE: "<=",
+ ast.Gt: ">",
+ ast.GtE: ">=",
+ ast.Pow: "**",
+ ast.Is: "is",
+ ast.IsNot: "is not",
+ ast.In: "in",
+ ast.NotIn: "not in",
+}
+# Python 3.5+ compatibility
+try:
+ binop_map[ast.MatMult] = "@"
+except AttributeError:
+ pass
+
+# Python 3.4+ compatibility
+if hasattr(ast, "NameConstant"):
+ _NameConstant = ast.NameConstant
+else:
+
+ def _NameConstant(c):
+ return ast.Name(str(c), ast.Load())
+
+
+def set_location(node, lineno, col_offset):
+ """Set node location information recursively."""
+
+ def _fix(node, lineno, col_offset):
+ if "lineno" in node._attributes:
+ node.lineno = lineno
+ if "col_offset" in node._attributes:
+ node.col_offset = col_offset
+ for child in ast.iter_child_nodes(node):
+ _fix(child, lineno, col_offset)
+
+ _fix(node, lineno, col_offset)
+ return node
+
+
+class AssertionRewriter(ast.NodeVisitor):
+ """Assertion rewriting implementation.
+
+ The main entrypoint is to call .run() with an ast.Module instance,
+ this will then find all the assert statements and rewrite them to
+ provide intermediate values and a detailed assertion error. See
+ http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html
+ for an overview of how this works.
+
+ The entry point here is .run() which will iterate over all the
+ statements in an ast.Module and for each ast.Assert statement it
+ finds call .visit() with it. Then .visit_Assert() takes over and
+ is responsible for creating new ast statements to replace the
+ original assert statement: it rewrites the test of an assertion
+ to provide intermediate values and replace it with an if statement
+ which raises an assertion error with a detailed explanation in
+ case the expression is false.
+
+ For this .visit_Assert() uses the visitor pattern to visit all the
+ AST nodes of the ast.Assert.test field, each visit call returning
+ an AST node and the corresponding explanation string. During this
+ state is kept in several instance attributes:
+
+ :statements: All the AST statements which will replace the assert
+ statement.
+
+ :variables: This is populated by .variable() with each variable
+ used by the statements so that they can all be set to None at
+ the end of the statements.
+
+ :variable_counter: Counter to create new unique variables needed
+ by statements. Variables are created using .variable() and
+ have the form of "@py_assert0".
+
+ :on_failure: The AST statements which will be executed if the
+ assertion test fails. This is the code which will construct
+ the failure message and raises the AssertionError.
+
+ :explanation_specifiers: A dict filled by .explanation_param()
+ with %-formatting placeholders and their corresponding
+ expressions to use in the building of an assertion message.
+ This is used by .pop_format_context() to build a message.
+
+ :stack: A stack of the explanation_specifiers dicts maintained by
+ .push_format_context() and .pop_format_context() which allows
+ to build another %-formatted string while already building one.
+
+ This state is reset on every new assert statement visited and used
+ by the other visitors.
+
+ """
+
+ def __init__(self, module_path, config):
+ super(AssertionRewriter, self).__init__()
+ self.module_path = module_path
+ self.config = config
+
+ def run(self, mod):
+ """Find all assert statements in *mod* and rewrite them."""
+ if not mod.body:
+ # Nothing to do.
+ return
+ # Insert some special imports at the top of the module but after any
+ # docstrings and __future__ imports.
+ aliases = [
+ ast.alias(py.builtin.builtins.__name__, "@py_builtins"),
+ ast.alias("_pytest.assertion.rewrite", "@pytest_ar"),
+ ]
+ doc = getattr(mod, "docstring", None)
+ expect_docstring = doc is None
+ if doc is not None and self.is_rewrite_disabled(doc):
+ return
+ pos = 0
+ lineno = 1
+ for item in mod.body:
+ if (
+ expect_docstring
+ and isinstance(item, ast.Expr)
+ and isinstance(item.value, ast.Str)
+ ):
+ doc = item.value.s
+ if self.is_rewrite_disabled(doc):
+ return
+ expect_docstring = False
+ elif (
+ not isinstance(item, ast.ImportFrom)
+ or item.level > 0
+ or item.module != "__future__"
+ ):
+ lineno = item.lineno
+ break
+ pos += 1
+ else:
+ lineno = item.lineno
+ imports = [
+ ast.Import([alias], lineno=lineno, col_offset=0) for alias in aliases
+ ]
+ mod.body[pos:pos] = imports
+ # Collect asserts.
+ nodes = [mod]
+ while nodes:
+ node = nodes.pop()
+ for name, field in ast.iter_fields(node):
+ if isinstance(field, list):
+ new = []
+ for i, child in enumerate(field):
+ if isinstance(child, ast.Assert):
+ # Transform assert.
+ new.extend(self.visit(child))
+ else:
+ new.append(child)
+ if isinstance(child, ast.AST):
+ nodes.append(child)
+ setattr(node, name, new)
+ elif (
+ isinstance(field, ast.AST)
+ and
+ # Don't recurse into expressions as they can't contain
+ # asserts.
+ not isinstance(field, ast.expr)
+ ):
+ nodes.append(field)
+
+ @staticmethod
+ def is_rewrite_disabled(docstring):
+ return "PYTEST_DONT_REWRITE" in docstring
+
+ def variable(self):
+ """Get a new variable."""
+ # Use a character invalid in python identifiers to avoid clashing.
+ name = "@py_assert" + str(next(self.variable_counter))
+ self.variables.append(name)
+ return name
+
+ def assign(self, expr):
+ """Give *expr* a name."""
+ name = self.variable()
+ self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr))
+ return ast.Name(name, ast.Load())
+
+ def display(self, expr):
+ """Call py.io.saferepr on the expression."""
+ return self.helper("saferepr", expr)
+
+ def helper(self, name, *args):
+ """Call a helper in this module."""
+ py_name = ast.Name("@pytest_ar", ast.Load())
+ attr = ast.Attribute(py_name, "_" + name, ast.Load())
+ return ast_Call(attr, list(args), [])
+
+ def builtin(self, name):
+ """Return the builtin called *name*."""
+ builtin_name = ast.Name("@py_builtins", ast.Load())
+ return ast.Attribute(builtin_name, name, ast.Load())
+
+ def explanation_param(self, expr):
+ """Return a new named %-formatting placeholder for expr.
+
+ This creates a %-formatting placeholder for expr in the
+ current formatting context, e.g. ``%(py0)s``. The placeholder
+ and expr are placed in the current format context so that it
+ can be used on the next call to .pop_format_context().
+
+ """
+ specifier = "py" + str(next(self.variable_counter))
+ self.explanation_specifiers[specifier] = expr
+ return "%(" + specifier + ")s"
+
+ def push_format_context(self):
+ """Create a new formatting context.
+
+ The format context is used for when an explanation wants to
+ have a variable value formatted in the assertion message. In
+ this case the value required can be added using
+ .explanation_param(). Finally .pop_format_context() is used
+ to format a string of %-formatted values as added by
+ .explanation_param().
+
+ """
+ self.explanation_specifiers = {}
+ self.stack.append(self.explanation_specifiers)
+
+ def pop_format_context(self, expl_expr):
+ """Format the %-formatted string with current format context.
+
+ The expl_expr should be an ast.Str instance constructed from
+ the %-placeholders created by .explanation_param(). This will
+ add the required code to format said string to .on_failure and
+ return the ast.Name instance of the formatted string.
+
+ """
+ current = self.stack.pop()
+ if self.stack:
+ self.explanation_specifiers = self.stack[-1]
+ keys = [ast.Str(key) for key in current.keys()]
+ format_dict = ast.Dict(keys, list(current.values()))
+ form = ast.BinOp(expl_expr, ast.Mod(), format_dict)
+ name = "@py_format" + str(next(self.variable_counter))
+ self.on_failure.append(ast.Assign([ast.Name(name, ast.Store())], form))
+ return ast.Name(name, ast.Load())
+
+ def generic_visit(self, node):
+ """Handle expressions we don't have custom code for."""
+ assert isinstance(node, ast.expr)
+ res = self.assign(node)
+ return res, self.explanation_param(self.display(res))
+
+ def visit_Assert(self, assert_):
+ """Return the AST statements to replace the ast.Assert instance.
+
+ This rewrites the test of an assertion to provide
+ intermediate values and replace it with an if statement which
+ raises an assertion error with a detailed explanation in case
+ the expression is false.
+
+ """
+ if isinstance(assert_.test, ast.Tuple) and self.config is not None:
+ fslocation = (self.module_path, assert_.lineno)
+ self.config.warn(
+ "R1",
+ "assertion is always true, perhaps " "remove parentheses?",
+ fslocation=fslocation,
+ )
+ self.statements = []
+ self.variables = []
+ self.variable_counter = itertools.count()
+ self.stack = []
+ self.on_failure = []
+ self.push_format_context()
+ # Rewrite assert into a bunch of statements.
+ top_condition, explanation = self.visit(assert_.test)
+ # Create failure message.
+ body = self.on_failure
+ negation = ast.UnaryOp(ast.Not(), top_condition)
+ self.statements.append(ast.If(negation, body, []))
+ if assert_.msg:
+ assertmsg = self.helper("format_assertmsg", assert_.msg)
+ explanation = "\n>assert " + explanation
+ else:
+ assertmsg = ast.Str("")
+ explanation = "assert " + explanation
+ template = ast.BinOp(assertmsg, ast.Add(), ast.Str(explanation))
+ msg = self.pop_format_context(template)
+ fmt = self.helper("format_explanation", msg)
+ err_name = ast.Name("AssertionError", ast.Load())
+ exc = ast_Call(err_name, [fmt], [])
+ if sys.version_info[0] >= 3:
+ raise_ = ast.Raise(exc, None)
+ else:
+ raise_ = ast.Raise(exc, None, None)
+ body.append(raise_)
+ # Clear temporary variables by setting them to None.
+ if self.variables:
+ variables = [ast.Name(name, ast.Store()) for name in self.variables]
+ clear = ast.Assign(variables, _NameConstant(None))
+ self.statements.append(clear)
+ # Fix line numbers.
+ for stmt in self.statements:
+ set_location(stmt, assert_.lineno, assert_.col_offset)
+ return self.statements
+
+ def visit_Name(self, name):
+ # Display the repr of the name if it's a local variable or
+ # _should_repr_global_name() thinks it's acceptable.
+ locs = ast_Call(self.builtin("locals"), [], [])
+ inlocs = ast.Compare(ast.Str(name.id), [ast.In()], [locs])
+ dorepr = self.helper("should_repr_global_name", name)
+ test = ast.BoolOp(ast.Or(), [inlocs, dorepr])
+ expr = ast.IfExp(test, self.display(name), ast.Str(name.id))
+ return name, self.explanation_param(expr)
+
+ def visit_BoolOp(self, boolop):
+ res_var = self.variable()
+ expl_list = self.assign(ast.List([], ast.Load()))
+ app = ast.Attribute(expl_list, "append", ast.Load())
+ is_or = int(isinstance(boolop.op, ast.Or))
+ body = save = self.statements
+ fail_save = self.on_failure
+ levels = len(boolop.values) - 1
+ self.push_format_context()
+ # Process each operand, short-circuting if needed.
+ for i, v in enumerate(boolop.values):
+ if i:
+ fail_inner = []
+ # cond is set in a prior loop iteration below
+ self.on_failure.append(ast.If(cond, fail_inner, [])) # noqa
+ self.on_failure = fail_inner
+ self.push_format_context()
+ res, expl = self.visit(v)
+ body.append(ast.Assign([ast.Name(res_var, ast.Store())], res))
+ expl_format = self.pop_format_context(ast.Str(expl))
+ call = ast_Call(app, [expl_format], [])
+ self.on_failure.append(ast.Expr(call))
+ if i < levels:
+ cond = res
+ if is_or:
+ cond = ast.UnaryOp(ast.Not(), cond)
+ inner = []
+ self.statements.append(ast.If(cond, inner, []))
+ self.statements = body = inner
+ self.statements = save
+ self.on_failure = fail_save
+ expl_template = self.helper("format_boolop", expl_list, ast.Num(is_or))
+ expl = self.pop_format_context(expl_template)
+ return ast.Name(res_var, ast.Load()), self.explanation_param(expl)
+
+ def visit_UnaryOp(self, unary):
+ pattern = unary_map[unary.op.__class__]
+ operand_res, operand_expl = self.visit(unary.operand)
+ res = self.assign(ast.UnaryOp(unary.op, operand_res))
+ return res, pattern % (operand_expl,)
+
+ def visit_BinOp(self, binop):
+ symbol = binop_map[binop.op.__class__]
+ left_expr, left_expl = self.visit(binop.left)
+ right_expr, right_expl = self.visit(binop.right)
+ explanation = "(%s %s %s)" % (left_expl, symbol, right_expl)
+ res = self.assign(ast.BinOp(left_expr, binop.op, right_expr))
+ return res, explanation
+
+ def visit_Call_35(self, call):
+ """
+ visit `ast.Call` nodes on Python3.5 and after
+ """
+ new_func, func_expl = self.visit(call.func)
+ arg_expls = []
+ new_args = []
+ new_kwargs = []
+ for arg in call.args:
+ res, expl = self.visit(arg)
+ arg_expls.append(expl)
+ new_args.append(res)
+ for keyword in call.keywords:
+ res, expl = self.visit(keyword.value)
+ new_kwargs.append(ast.keyword(keyword.arg, res))
+ if keyword.arg:
+ arg_expls.append(keyword.arg + "=" + expl)
+ else: # **args have `arg` keywords with an .arg of None
+ arg_expls.append("**" + expl)
+
+ expl = "%s(%s)" % (func_expl, ", ".join(arg_expls))
+ new_call = ast.Call(new_func, new_args, new_kwargs)
+ res = self.assign(new_call)
+ res_expl = self.explanation_param(self.display(res))
+ outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
+ return res, outer_expl
+
+ def visit_Starred(self, starred):
+ # From Python 3.5, a Starred node can appear in a function call
+ res, expl = self.visit(starred.value)
+ return starred, "*" + expl
+
+ def visit_Call_legacy(self, call):
+ """
+ visit `ast.Call nodes on 3.4 and below`
+ """
+ new_func, func_expl = self.visit(call.func)
+ arg_expls = []
+ new_args = []
+ new_kwargs = []
+ new_star = new_kwarg = None
+ for arg in call.args:
+ res, expl = self.visit(arg)
+ new_args.append(res)
+ arg_expls.append(expl)
+ for keyword in call.keywords:
+ res, expl = self.visit(keyword.value)
+ new_kwargs.append(ast.keyword(keyword.arg, res))
+ arg_expls.append(keyword.arg + "=" + expl)
+ if call.starargs:
+ new_star, expl = self.visit(call.starargs)
+ arg_expls.append("*" + expl)
+ if call.kwargs:
+ new_kwarg, expl = self.visit(call.kwargs)
+ arg_expls.append("**" + expl)
+ expl = "%s(%s)" % (func_expl, ", ".join(arg_expls))
+ new_call = ast.Call(new_func, new_args, new_kwargs, new_star, new_kwarg)
+ res = self.assign(new_call)
+ res_expl = self.explanation_param(self.display(res))
+ outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
+ return res, outer_expl
+
+ # ast.Call signature changed on 3.5,
+ # conditionally change which methods is named
+ # visit_Call depending on Python version
+ if sys.version_info >= (3, 5):
+ visit_Call = visit_Call_35
+ else:
+ visit_Call = visit_Call_legacy
+
+ def visit_Attribute(self, attr):
+ if not isinstance(attr.ctx, ast.Load):
+ return self.generic_visit(attr)
+ value, value_expl = self.visit(attr.value)
+ res = self.assign(ast.Attribute(value, attr.attr, ast.Load()))
+ res_expl = self.explanation_param(self.display(res))
+ pat = "%s\n{%s = %s.%s\n}"
+ expl = pat % (res_expl, res_expl, value_expl, attr.attr)
+ return res, expl
+
+ def visit_Compare(self, comp):
+ self.push_format_context()
+ left_res, left_expl = self.visit(comp.left)
+ if isinstance(comp.left, (ast.Compare, ast.BoolOp)):
+ left_expl = "({})".format(left_expl)
+ res_variables = [self.variable() for i in range(len(comp.ops))]
+ load_names = [ast.Name(v, ast.Load()) for v in res_variables]
+ store_names = [ast.Name(v, ast.Store()) for v in res_variables]
+ it = zip(range(len(comp.ops)), comp.ops, comp.comparators)
+ expls = []
+ syms = []
+ results = [left_res]
+ for i, op, next_operand in it:
+ next_res, next_expl = self.visit(next_operand)
+ if isinstance(next_operand, (ast.Compare, ast.BoolOp)):
+ next_expl = "({})".format(next_expl)
+ results.append(next_res)
+ sym = binop_map[op.__class__]
+ syms.append(ast.Str(sym))
+ expl = "%s %s %s" % (left_expl, sym, next_expl)
+ expls.append(ast.Str(expl))
+ res_expr = ast.Compare(left_res, [op], [next_res])
+ self.statements.append(ast.Assign([store_names[i]], res_expr))
+ left_res, left_expl = next_res, next_expl
+ # Use pytest.assertion.util._reprcompare if that's available.
+ expl_call = self.helper(
+ "call_reprcompare",
+ ast.Tuple(syms, ast.Load()),
+ ast.Tuple(load_names, ast.Load()),
+ ast.Tuple(expls, ast.Load()),
+ ast.Tuple(results, ast.Load()),
+ )
+ if len(comp.ops) > 1:
+ res = ast.BoolOp(ast.And(), load_names)
+ else:
+ res = load_names[0]
+ return res, self.explanation_param(self.pop_format_context(expl_call))
diff --git a/third_party/python/pytest/src/_pytest/assertion/truncate.py b/third_party/python/pytest/src/_pytest/assertion/truncate.py
new file mode 100644
index 0000000000..79adeca6a9
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/assertion/truncate.py
@@ -0,0 +1,99 @@
+"""
+Utilities for truncating assertion output.
+
+Current default behaviour is to truncate assertion explanations at
+~8 terminal lines, unless running in "-vv" mode or running on CI.
+"""
+from __future__ import absolute_import, division, print_function
+import os
+
+import six
+
+
+DEFAULT_MAX_LINES = 8
+DEFAULT_MAX_CHARS = 8 * 80
+USAGE_MSG = "use '-vv' to show"
+
+
+def truncate_if_required(explanation, item, max_length=None):
+ """
+ Truncate this assertion explanation if the given test item is eligible.
+ """
+ if _should_truncate_item(item):
+ return _truncate_explanation(explanation)
+ return explanation
+
+
+def _should_truncate_item(item):
+ """
+ Whether or not this test item is eligible for truncation.
+ """
+ verbose = item.config.option.verbose
+ return verbose < 2 and not _running_on_ci()
+
+
+def _running_on_ci():
+ """Check if we're currently running on a CI system."""
+ env_vars = ["CI", "BUILD_NUMBER"]
+ return any(var in os.environ for var in env_vars)
+
+
+def _truncate_explanation(input_lines, max_lines=None, max_chars=None):
+ """
+ Truncate given list of strings that makes up the assertion explanation.
+
+ Truncates to either 8 lines, or 640 characters - whichever the input reaches
+ first. The remaining lines will be replaced by a usage message.
+ """
+
+ if max_lines is None:
+ max_lines = DEFAULT_MAX_LINES
+ if max_chars is None:
+ max_chars = DEFAULT_MAX_CHARS
+
+ # Check if truncation required
+ input_char_count = len("".join(input_lines))
+ if len(input_lines) <= max_lines and input_char_count <= max_chars:
+ return input_lines
+
+ # Truncate first to max_lines, and then truncate to max_chars if max_chars
+ # is exceeded.
+ truncated_explanation = input_lines[:max_lines]
+ truncated_explanation = _truncate_by_char_count(truncated_explanation, max_chars)
+
+ # Add ellipsis to final line
+ truncated_explanation[-1] = truncated_explanation[-1] + "..."
+
+ # Append useful message to explanation
+ truncated_line_count = len(input_lines) - len(truncated_explanation)
+ truncated_line_count += 1 # Account for the part-truncated final line
+ msg = "...Full output truncated"
+ if truncated_line_count == 1:
+ msg += " ({} line hidden)".format(truncated_line_count)
+ else:
+ msg += " ({} lines hidden)".format(truncated_line_count)
+ msg += ", {}".format(USAGE_MSG)
+ truncated_explanation.extend([six.text_type(""), six.text_type(msg)])
+ return truncated_explanation
+
+
+def _truncate_by_char_count(input_lines, max_chars):
+ # Check if truncation required
+ if len("".join(input_lines)) <= max_chars:
+ return input_lines
+
+ # Find point at which input length exceeds total allowed length
+ iterated_char_count = 0
+ for iterated_index, input_line in enumerate(input_lines):
+ if iterated_char_count + len(input_line) > max_chars:
+ break
+ iterated_char_count += len(input_line)
+
+ # Create truncated explanation with modified final line
+ truncated_result = input_lines[:iterated_index]
+ final_line = input_lines[iterated_index]
+ if final_line:
+ final_line_truncate_point = max_chars - iterated_char_count
+ final_line = final_line[:final_line_truncate_point]
+ truncated_result.append(final_line)
+ return truncated_result
diff --git a/third_party/python/pytest/src/_pytest/assertion/util.py b/third_party/python/pytest/src/_pytest/assertion/util.py
new file mode 100644
index 0000000000..bcb800a4a0
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/assertion/util.py
@@ -0,0 +1,338 @@
+"""Utilities for assertion debugging"""
+from __future__ import absolute_import, division, print_function
+import pprint
+
+import _pytest._code
+import py
+import six
+from ..compat import Sequence
+
+u = six.text_type
+
+# The _reprcompare attribute on the util module is used by the new assertion
+# interpretation code and assertion rewriter to detect this plugin was
+# loaded and in turn call the hooks defined here as part of the
+# DebugInterpreter.
+_reprcompare = None
+
+
+# the re-encoding is needed for python2 repr
+# with non-ascii characters (see issue 877 and 1379)
+def ecu(s):
+ try:
+ return u(s, "utf-8", "replace")
+ except TypeError:
+ return s
+
+
+def format_explanation(explanation):
+ """This formats an explanation
+
+ Normally all embedded newlines are escaped, however there are
+ three exceptions: \n{, \n} and \n~. The first two are intended
+ cover nested explanations, see function and attribute explanations
+ for examples (.visit_Call(), visit_Attribute()). The last one is
+ for when one explanation needs to span multiple lines, e.g. when
+ displaying diffs.
+ """
+ explanation = ecu(explanation)
+ lines = _split_explanation(explanation)
+ result = _format_lines(lines)
+ return u("\n").join(result)
+
+
+def _split_explanation(explanation):
+ """Return a list of individual lines in the explanation
+
+ This will return a list of lines split on '\n{', '\n}' and '\n~'.
+ Any other newlines will be escaped and appear in the line as the
+ literal '\n' characters.
+ """
+ raw_lines = (explanation or u("")).split("\n")
+ lines = [raw_lines[0]]
+ for values in raw_lines[1:]:
+ if values and values[0] in ["{", "}", "~", ">"]:
+ lines.append(values)
+ else:
+ lines[-1] += "\\n" + values
+ return lines
+
+
+def _format_lines(lines):
+ """Format the individual lines
+
+ This will replace the '{', '}' and '~' characters of our mini
+ formatting language with the proper 'where ...', 'and ...' and ' +
+ ...' text, taking care of indentation along the way.
+
+ Return a list of formatted lines.
+ """
+ result = lines[:1]
+ stack = [0]
+ stackcnt = [0]
+ for line in lines[1:]:
+ if line.startswith("{"):
+ if stackcnt[-1]:
+ s = u("and ")
+ else:
+ s = u("where ")
+ stack.append(len(result))
+ stackcnt[-1] += 1
+ stackcnt.append(0)
+ result.append(u(" +") + u(" ") * (len(stack) - 1) + s + line[1:])
+ elif line.startswith("}"):
+ stack.pop()
+ stackcnt.pop()
+ result[stack[-1]] += line[1:]
+ else:
+ assert line[0] in ["~", ">"]
+ stack[-1] += 1
+ indent = len(stack) if line.startswith("~") else len(stack) - 1
+ result.append(u(" ") * indent + line[1:])
+ assert len(stack) == 1
+ return result
+
+
+# Provide basestring in python3
+try:
+ basestring = basestring
+except NameError:
+ basestring = str
+
+
+def assertrepr_compare(config, op, left, right):
+ """Return specialised explanations for some operators/operands"""
+ width = 80 - 15 - len(op) - 2 # 15 chars indentation, 1 space around op
+ left_repr = py.io.saferepr(left, maxsize=int(width // 2))
+ right_repr = py.io.saferepr(right, maxsize=width - len(left_repr))
+
+ summary = u("%s %s %s") % (ecu(left_repr), op, ecu(right_repr))
+
+ def issequence(x):
+ return isinstance(x, Sequence) and not isinstance(x, basestring)
+
+ def istext(x):
+ return isinstance(x, basestring)
+
+ def isdict(x):
+ return isinstance(x, dict)
+
+ def isset(x):
+ return isinstance(x, (set, frozenset))
+
+ def isiterable(obj):
+ try:
+ iter(obj)
+ return not istext(obj)
+ except TypeError:
+ return False
+
+ verbose = config.getoption("verbose")
+ explanation = None
+ try:
+ if op == "==":
+ if istext(left) and istext(right):
+ explanation = _diff_text(left, right, verbose)
+ else:
+ if issequence(left) and issequence(right):
+ explanation = _compare_eq_sequence(left, right, verbose)
+ elif isset(left) and isset(right):
+ explanation = _compare_eq_set(left, right, verbose)
+ elif isdict(left) and isdict(right):
+ explanation = _compare_eq_dict(left, right, verbose)
+ if isiterable(left) and isiterable(right):
+ expl = _compare_eq_iterable(left, right, verbose)
+ if explanation is not None:
+ explanation.extend(expl)
+ else:
+ explanation = expl
+ elif op == "not in":
+ if istext(left) and istext(right):
+ explanation = _notin_text(left, right, verbose)
+ except Exception:
+ explanation = [
+ u(
+ "(pytest_assertion plugin: representation of details failed. "
+ "Probably an object has a faulty __repr__.)"
+ ),
+ u(_pytest._code.ExceptionInfo()),
+ ]
+
+ if not explanation:
+ return None
+
+ return [summary] + explanation
+
+
+def _diff_text(left, right, verbose=False):
+ """Return the explanation for the diff between text or bytes
+
+ Unless --verbose is used this will skip leading and trailing
+ characters which are identical to keep the diff minimal.
+
+ If the input are bytes they will be safely converted to text.
+ """
+ from difflib import ndiff
+
+ explanation = []
+
+ def escape_for_readable_diff(binary_text):
+ """
+ Ensures that the internal string is always valid unicode, converting any bytes safely to valid unicode.
+ This is done using repr() which then needs post-processing to fix the encompassing quotes and un-escape
+ newlines and carriage returns (#429).
+ """
+ r = six.text_type(repr(binary_text)[1:-1])
+ r = r.replace(r"\n", "\n")
+ r = r.replace(r"\r", "\r")
+ return r
+
+ if isinstance(left, six.binary_type):
+ left = escape_for_readable_diff(left)
+ if isinstance(right, six.binary_type):
+ right = escape_for_readable_diff(right)
+ if not verbose:
+ i = 0 # just in case left or right has zero length
+ for i in range(min(len(left), len(right))):
+ if left[i] != right[i]:
+ break
+ if i > 42:
+ i -= 10 # Provide some context
+ explanation = [
+ u("Skipping %s identical leading " "characters in diff, use -v to show")
+ % i
+ ]
+ left = left[i:]
+ right = right[i:]
+ if len(left) == len(right):
+ for i in range(len(left)):
+ if left[-i] != right[-i]:
+ break
+ if i > 42:
+ i -= 10 # Provide some context
+ explanation += [
+ u(
+ "Skipping %s identical trailing "
+ "characters in diff, use -v to show"
+ )
+ % i
+ ]
+ left = left[:-i]
+ right = right[:-i]
+ keepends = True
+ if left.isspace() or right.isspace():
+ left = repr(str(left))
+ right = repr(str(right))
+ explanation += [u"Strings contain only whitespace, escaping them using repr()"]
+ explanation += [
+ line.strip("\n")
+ for line in ndiff(left.splitlines(keepends), right.splitlines(keepends))
+ ]
+ return explanation
+
+
+def _compare_eq_iterable(left, right, verbose=False):
+ if not verbose:
+ return [u("Use -v to get the full diff")]
+ # dynamic import to speedup pytest
+ import difflib
+
+ try:
+ left_formatting = pprint.pformat(left).splitlines()
+ right_formatting = pprint.pformat(right).splitlines()
+ explanation = [u("Full diff:")]
+ except Exception:
+ # hack: PrettyPrinter.pformat() in python 2 fails when formatting items that can't be sorted(), ie, calling
+ # sorted() on a list would raise. See issue #718.
+ # As a workaround, the full diff is generated by using the repr() string of each item of each container.
+ left_formatting = sorted(repr(x) for x in left)
+ right_formatting = sorted(repr(x) for x in right)
+ explanation = [u("Full diff (fallback to calling repr on each item):")]
+ explanation.extend(
+ line.strip() for line in difflib.ndiff(left_formatting, right_formatting)
+ )
+ return explanation
+
+
+def _compare_eq_sequence(left, right, verbose=False):
+ explanation = []
+ for i in range(min(len(left), len(right))):
+ if left[i] != right[i]:
+ explanation += [u("At index %s diff: %r != %r") % (i, left[i], right[i])]
+ break
+ if len(left) > len(right):
+ explanation += [
+ u("Left contains more items, first extra item: %s")
+ % py.io.saferepr(left[len(right)])
+ ]
+ elif len(left) < len(right):
+ explanation += [
+ u("Right contains more items, first extra item: %s")
+ % py.io.saferepr(right[len(left)])
+ ]
+ return explanation
+
+
+def _compare_eq_set(left, right, verbose=False):
+ explanation = []
+ diff_left = left - right
+ diff_right = right - left
+ if diff_left:
+ explanation.append(u("Extra items in the left set:"))
+ for item in diff_left:
+ explanation.append(py.io.saferepr(item))
+ if diff_right:
+ explanation.append(u("Extra items in the right set:"))
+ for item in diff_right:
+ explanation.append(py.io.saferepr(item))
+ return explanation
+
+
+def _compare_eq_dict(left, right, verbose=False):
+ explanation = []
+ common = set(left).intersection(set(right))
+ same = {k: left[k] for k in common if left[k] == right[k]}
+ if same and verbose < 2:
+ explanation += [u("Omitting %s identical items, use -vv to show") % len(same)]
+ elif same:
+ explanation += [u("Common items:")]
+ explanation += pprint.pformat(same).splitlines()
+ diff = {k for k in common if left[k] != right[k]}
+ if diff:
+ explanation += [u("Differing items:")]
+ for k in diff:
+ explanation += [
+ py.io.saferepr({k: left[k]}) + " != " + py.io.saferepr({k: right[k]})
+ ]
+ extra_left = set(left) - set(right)
+ if extra_left:
+ explanation.append(u("Left contains more items:"))
+ explanation.extend(
+ pprint.pformat({k: left[k] for k in extra_left}).splitlines()
+ )
+ extra_right = set(right) - set(left)
+ if extra_right:
+ explanation.append(u("Right contains more items:"))
+ explanation.extend(
+ pprint.pformat({k: right[k] for k in extra_right}).splitlines()
+ )
+ return explanation
+
+
+def _notin_text(term, text, verbose=False):
+ index = text.find(term)
+ head = text[:index]
+ tail = text[index + len(term):]
+ correct_text = head + tail
+ diff = _diff_text(correct_text, text, verbose)
+ newdiff = [u("%s is contained here:") % py.io.saferepr(term, maxsize=42)]
+ for line in diff:
+ if line.startswith(u("Skipping")):
+ continue
+ if line.startswith(u("- ")):
+ continue
+ if line.startswith(u("+ ")):
+ newdiff.append(u(" ") + line[2:])
+ else:
+ newdiff.append(line)
+ return newdiff
diff --git a/third_party/python/pytest/src/_pytest/cacheprovider.py b/third_party/python/pytest/src/_pytest/cacheprovider.py
new file mode 100755
index 0000000000..eb0fcc06f9
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/cacheprovider.py
@@ -0,0 +1,339 @@
+"""
+merged implementation of the cache provider
+
+the name cache was not chosen to ensure pluggy automatically
+ignores the external pytest-cache
+"""
+from __future__ import absolute_import, division, print_function
+
+from collections import OrderedDict
+
+import py
+import six
+
+import pytest
+import json
+import os
+from os.path import sep as _sep, altsep as _altsep
+
+
+class Cache(object):
+
+ def __init__(self, config):
+ self.config = config
+ self._cachedir = Cache.cache_dir_from_config(config)
+ self.trace = config.trace.root.get("cache")
+ if config.getoption("cacheclear"):
+ self.trace("clearing cachedir")
+ if self._cachedir.check():
+ self._cachedir.remove()
+ self._cachedir.mkdir()
+
+ @staticmethod
+ def cache_dir_from_config(config):
+ cache_dir = config.getini("cache_dir")
+ cache_dir = os.path.expanduser(cache_dir)
+ cache_dir = os.path.expandvars(cache_dir)
+ if os.path.isabs(cache_dir):
+ return py.path.local(cache_dir)
+ else:
+ return config.rootdir.join(cache_dir)
+
+ def makedir(self, name):
+ """ return a directory path object with the given name. If the
+ directory does not yet exist, it will be created. You can use it
+ to manage files likes e. g. store/retrieve database
+ dumps across test sessions.
+
+ :param name: must be a string not containing a ``/`` separator.
+ Make sure the name contains your plugin or application
+ identifiers to prevent clashes with other cache users.
+ """
+ if _sep in name or _altsep is not None and _altsep in name:
+ raise ValueError("name is not allowed to contain path separators")
+ return self._cachedir.ensure_dir("d", name)
+
+ def _getvaluepath(self, key):
+ return self._cachedir.join("v", *key.split("/"))
+
+ def get(self, key, default):
+ """ return cached value for the given key. If no value
+ was yet cached or the value cannot be read, the specified
+ default is returned.
+
+ :param key: must be a ``/`` separated value. Usually the first
+ name is the name of your plugin or your application.
+ :param default: must be provided in case of a cache-miss or
+ invalid cache values.
+
+ """
+ path = self._getvaluepath(key)
+ if path.check():
+ try:
+ with path.open("r") as f:
+ return json.load(f)
+ except ValueError:
+ self.trace("cache-invalid at %s" % (path,))
+ return default
+
+ def set(self, key, value):
+ """ save value for the given key.
+
+ :param key: must be a ``/`` separated value. Usually the first
+ name is the name of your plugin or your application.
+ :param value: must be of any combination of basic
+ python types, including nested types
+ like e. g. lists of dictionaries.
+ """
+ path = self._getvaluepath(key)
+ try:
+ path.dirpath().ensure_dir()
+ except (py.error.EEXIST, py.error.EACCES):
+ self.config.warn(
+ code="I9", message="could not create cache path %s" % (path,)
+ )
+ return
+ try:
+ f = path.open("w")
+ except py.error.ENOTDIR:
+ self.config.warn(
+ code="I9", message="cache could not write path %s" % (path,)
+ )
+ else:
+ with f:
+ self.trace("cache-write %s: %r" % (key, value))
+ json.dump(value, f, indent=2, sort_keys=True)
+
+
+class LFPlugin(object):
+ """ Plugin which implements the --lf (run last-failing) option """
+
+ def __init__(self, config):
+ self.config = config
+ active_keys = "lf", "failedfirst"
+ self.active = any(config.getoption(key) for key in active_keys)
+ self.lastfailed = config.cache.get("cache/lastfailed", {})
+ self._previously_failed_count = None
+ self._no_failures_behavior = self.config.getoption("last_failed_no_failures")
+
+ def pytest_report_collectionfinish(self):
+ if self.active:
+ if not self._previously_failed_count:
+ mode = "run {} (no recorded failures)".format(
+ self._no_failures_behavior
+ )
+ else:
+ noun = "failure" if self._previously_failed_count == 1 else "failures"
+ suffix = " first" if self.config.getoption("failedfirst") else ""
+ mode = "rerun previous {count} {noun}{suffix}".format(
+ count=self._previously_failed_count, suffix=suffix, noun=noun
+ )
+ return "run-last-failure: %s" % mode
+
+ def pytest_runtest_logreport(self, report):
+ if (report.when == "call" and report.passed) or report.skipped:
+ self.lastfailed.pop(report.nodeid, None)
+ elif report.failed:
+ self.lastfailed[report.nodeid] = True
+
+ def pytest_collectreport(self, report):
+ passed = report.outcome in ("passed", "skipped")
+ if passed:
+ if report.nodeid in self.lastfailed:
+ self.lastfailed.pop(report.nodeid)
+ self.lastfailed.update((item.nodeid, True) for item in report.result)
+ else:
+ self.lastfailed[report.nodeid] = True
+
+ def pytest_collection_modifyitems(self, session, config, items):
+ if self.active:
+ if self.lastfailed:
+ previously_failed = []
+ previously_passed = []
+ for item in items:
+ if item.nodeid in self.lastfailed:
+ previously_failed.append(item)
+ else:
+ previously_passed.append(item)
+ self._previously_failed_count = len(previously_failed)
+ if not previously_failed:
+ # running a subset of all tests with recorded failures outside
+ # of the set of tests currently executing
+ return
+ if self.config.getoption("lf"):
+ items[:] = previously_failed
+ config.hook.pytest_deselected(items=previously_passed)
+ else:
+ items[:] = previously_failed + previously_passed
+ elif self._no_failures_behavior == "none":
+ config.hook.pytest_deselected(items=items)
+ items[:] = []
+
+ def pytest_sessionfinish(self, session):
+ config = self.config
+ if config.getoption("cacheshow") or hasattr(config, "slaveinput"):
+ return
+
+ saved_lastfailed = config.cache.get("cache/lastfailed", {})
+ if saved_lastfailed != self.lastfailed:
+ config.cache.set("cache/lastfailed", self.lastfailed)
+
+
+class NFPlugin(object):
+ """ Plugin which implements the --nf (run new-first) option """
+
+ def __init__(self, config):
+ self.config = config
+ self.active = config.option.newfirst
+ self.cached_nodeids = config.cache.get("cache/nodeids", [])
+
+ def pytest_collection_modifyitems(self, session, config, items):
+ if self.active:
+ new_items = OrderedDict()
+ other_items = OrderedDict()
+ for item in items:
+ if item.nodeid not in self.cached_nodeids:
+ new_items[item.nodeid] = item
+ else:
+ other_items[item.nodeid] = item
+
+ items[:] = self._get_increasing_order(
+ six.itervalues(new_items)
+ ) + self._get_increasing_order(
+ six.itervalues(other_items)
+ )
+ self.cached_nodeids = [x.nodeid for x in items if isinstance(x, pytest.Item)]
+
+ def _get_increasing_order(self, items):
+ return sorted(items, key=lambda item: item.fspath.mtime(), reverse=True)
+
+ def pytest_sessionfinish(self, session):
+ config = self.config
+ if config.getoption("cacheshow") or hasattr(config, "slaveinput"):
+ return
+
+ config.cache.set("cache/nodeids", self.cached_nodeids)
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group.addoption(
+ "--lf",
+ "--last-failed",
+ action="store_true",
+ dest="lf",
+ help="rerun only the tests that failed "
+ "at the last run (or all if none failed)",
+ )
+ group.addoption(
+ "--ff",
+ "--failed-first",
+ action="store_true",
+ dest="failedfirst",
+ help="run all tests but run the last failures first. "
+ "This may re-order tests and thus lead to "
+ "repeated fixture setup/teardown",
+ )
+ group.addoption(
+ "--nf",
+ "--new-first",
+ action="store_true",
+ dest="newfirst",
+ help="run tests from new files first, then the rest of the tests "
+ "sorted by file mtime",
+ )
+ group.addoption(
+ "--cache-show",
+ action="store_true",
+ dest="cacheshow",
+ help="show cache contents, don't perform collection or tests",
+ )
+ group.addoption(
+ "--cache-clear",
+ action="store_true",
+ dest="cacheclear",
+ help="remove all cache contents at start of test run.",
+ )
+ parser.addini("cache_dir", default=".pytest_cache", help="cache directory path.")
+ group.addoption(
+ "--lfnf",
+ "--last-failed-no-failures",
+ action="store",
+ dest="last_failed_no_failures",
+ choices=("all", "none"),
+ default="all",
+ help="change the behavior when no test failed in the last run or no "
+ "information about the last failures was found in the cache",
+ )
+
+
+def pytest_cmdline_main(config):
+ if config.option.cacheshow:
+ from _pytest.main import wrap_session
+
+ return wrap_session(config, cacheshow)
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_configure(config):
+ config.cache = Cache(config)
+ config.pluginmanager.register(LFPlugin(config), "lfplugin")
+ config.pluginmanager.register(NFPlugin(config), "nfplugin")
+
+
+@pytest.fixture
+def cache(request):
+ """
+ Return a cache object that can persist state between testing sessions.
+
+ cache.get(key, default)
+ cache.set(key, value)
+
+ Keys must be a ``/`` separated value, where the first part is usually the
+ name of your plugin or application to avoid clashes with other cache users.
+
+ Values can be any object handled by the json stdlib module.
+ """
+ return request.config.cache
+
+
+def pytest_report_header(config):
+ if config.option.verbose:
+ relpath = py.path.local().bestrelpath(config.cache._cachedir)
+ return "cachedir: %s" % relpath
+
+
+def cacheshow(config, session):
+ from pprint import pprint
+
+ tw = py.io.TerminalWriter()
+ tw.line("cachedir: " + str(config.cache._cachedir))
+ if not config.cache._cachedir.check():
+ tw.line("cache is empty")
+ return 0
+ dummy = object()
+ basedir = config.cache._cachedir
+ vdir = basedir.join("v")
+ tw.sep("-", "cache values")
+ for valpath in sorted(vdir.visit(lambda x: x.isfile())):
+ key = valpath.relto(vdir).replace(valpath.sep, "/")
+ val = config.cache.get(key, dummy)
+ if val is dummy:
+ tw.line("%s contains unreadable content, " "will be ignored" % key)
+ else:
+ tw.line("%s contains:" % key)
+ stream = py.io.TextIO()
+ pprint(val, stream=stream)
+ for line in stream.getvalue().splitlines():
+ tw.line(" " + line)
+
+ ddir = basedir.join("d")
+ if ddir.isdir() and ddir.listdir():
+ tw.sep("-", "cache directories")
+ for p in sorted(basedir.join("d").visit()):
+ # if p.check(dir=1):
+ # print("%s/" % p.relto(basedir))
+ if p.isfile():
+ key = p.relto(basedir)
+ tw.line("%s is a file of length %d" % (key, p.size()))
+ return 0
diff --git a/third_party/python/pytest/src/_pytest/capture.py b/third_party/python/pytest/src/_pytest/capture.py
new file mode 100644
index 0000000000..7a57adb752
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/capture.py
@@ -0,0 +1,724 @@
+"""
+per-test stdout/stderr capturing mechanism.
+
+"""
+from __future__ import absolute_import, division, print_function
+
+import collections
+import contextlib
+import sys
+import os
+import io
+from io import UnsupportedOperation
+from tempfile import TemporaryFile
+
+import six
+import pytest
+from _pytest.compat import CaptureIO
+
+
+patchsysdict = {0: "stdin", 1: "stdout", 2: "stderr"}
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group._addoption(
+ "--capture",
+ action="store",
+ default="fd" if hasattr(os, "dup") else "sys",
+ metavar="method",
+ choices=["fd", "sys", "no"],
+ help="per-test capturing method: one of fd|sys|no.",
+ )
+ group._addoption(
+ "-s",
+ action="store_const",
+ const="no",
+ dest="capture",
+ help="shortcut for --capture=no.",
+ )
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_load_initial_conftests(early_config, parser, args):
+ ns = early_config.known_args_namespace
+ if ns.capture == "fd":
+ _py36_windowsconsoleio_workaround(sys.stdout)
+ _colorama_workaround()
+ _readline_workaround()
+ pluginmanager = early_config.pluginmanager
+ capman = CaptureManager(ns.capture)
+ pluginmanager.register(capman, "capturemanager")
+
+ # make sure that capturemanager is properly reset at final shutdown
+ early_config.add_cleanup(capman.stop_global_capturing)
+
+ # make sure logging does not raise exceptions at the end
+ def silence_logging_at_shutdown():
+ if "logging" in sys.modules:
+ sys.modules["logging"].raiseExceptions = False
+
+ early_config.add_cleanup(silence_logging_at_shutdown)
+
+ # finally trigger conftest loading but while capturing (issue93)
+ capman.start_global_capturing()
+ outcome = yield
+ out, err = capman.suspend_global_capture()
+ if outcome.excinfo is not None:
+ sys.stdout.write(out)
+ sys.stderr.write(err)
+
+
+class CaptureManager(object):
+ """
+ Capture plugin, manages that the appropriate capture method is enabled/disabled during collection and each
+ test phase (setup, call, teardown). After each of those points, the captured output is obtained and
+ attached to the collection/runtest report.
+
+ There are two levels of capture:
+ * global: which is enabled by default and can be suppressed by the ``-s`` option. This is always enabled/disabled
+ during collection and each test phase.
+ * fixture: when a test function or one of its fixture depend on the ``capsys`` or ``capfd`` fixtures. In this
+ case special handling is needed to ensure the fixtures take precedence over the global capture.
+ """
+
+ def __init__(self, method):
+ self._method = method
+ self._global_capturing = None
+
+ def _getcapture(self, method):
+ if method == "fd":
+ return MultiCapture(out=True, err=True, Capture=FDCapture)
+ elif method == "sys":
+ return MultiCapture(out=True, err=True, Capture=SysCapture)
+ elif method == "no":
+ return MultiCapture(out=False, err=False, in_=False)
+ else:
+ raise ValueError("unknown capturing method: %r" % method)
+
+ def start_global_capturing(self):
+ assert self._global_capturing is None
+ self._global_capturing = self._getcapture(self._method)
+ self._global_capturing.start_capturing()
+
+ def stop_global_capturing(self):
+ if self._global_capturing is not None:
+ self._global_capturing.pop_outerr_to_orig()
+ self._global_capturing.stop_capturing()
+ self._global_capturing = None
+
+ def resume_global_capture(self):
+ self._global_capturing.resume_capturing()
+
+ def suspend_global_capture(self, item=None, in_=False):
+ if item is not None:
+ self.deactivate_fixture(item)
+ cap = getattr(self, "_global_capturing", None)
+ if cap is not None:
+ try:
+ outerr = cap.readouterr()
+ finally:
+ cap.suspend_capturing(in_=in_)
+ return outerr
+
+ def activate_fixture(self, item):
+ """If the current item is using ``capsys`` or ``capfd``, activate them so they take precedence over
+ the global capture.
+ """
+ fixture = getattr(item, "_capture_fixture", None)
+ if fixture is not None:
+ fixture._start()
+
+ def deactivate_fixture(self, item):
+ """Deactivates the ``capsys`` or ``capfd`` fixture of this item, if any."""
+ fixture = getattr(item, "_capture_fixture", None)
+ if fixture is not None:
+ fixture.close()
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_make_collect_report(self, collector):
+ if isinstance(collector, pytest.File):
+ self.resume_global_capture()
+ outcome = yield
+ out, err = self.suspend_global_capture()
+ rep = outcome.get_result()
+ if out:
+ rep.sections.append(("Captured stdout", out))
+ if err:
+ rep.sections.append(("Captured stderr", err))
+ else:
+ yield
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_setup(self, item):
+ self.resume_global_capture()
+ # no need to activate a capture fixture because they activate themselves during creation; this
+ # only makes sense when a fixture uses a capture fixture, otherwise the capture fixture will
+ # be activated during pytest_runtest_call
+ yield
+ self.suspend_capture_item(item, "setup")
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_call(self, item):
+ self.resume_global_capture()
+ # it is important to activate this fixture during the call phase so it overwrites the "global"
+ # capture
+ self.activate_fixture(item)
+ yield
+ self.suspend_capture_item(item, "call")
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_teardown(self, item):
+ self.resume_global_capture()
+ self.activate_fixture(item)
+ yield
+ self.suspend_capture_item(item, "teardown")
+
+ @pytest.hookimpl(tryfirst=True)
+ def pytest_keyboard_interrupt(self, excinfo):
+ self.stop_global_capturing()
+
+ @pytest.hookimpl(tryfirst=True)
+ def pytest_internalerror(self, excinfo):
+ self.stop_global_capturing()
+
+ def suspend_capture_item(self, item, when, in_=False):
+ out, err = self.suspend_global_capture(item, in_=in_)
+ item.add_report_section(when, "stdout", out)
+ item.add_report_section(when, "stderr", err)
+
+
+capture_fixtures = {"capfd", "capfdbinary", "capsys", "capsysbinary"}
+
+
+def _ensure_only_one_capture_fixture(request, name):
+ fixtures = set(request.fixturenames) & capture_fixtures - {name}
+ if fixtures:
+ fixtures = sorted(fixtures)
+ fixtures = fixtures[0] if len(fixtures) == 1 else fixtures
+ raise request.raiseerror(
+ "cannot use {} and {} at the same time".format(fixtures, name)
+ )
+
+
+@pytest.fixture
+def capsys(request):
+ """Enable capturing of writes to ``sys.stdout`` and ``sys.stderr`` and make
+ captured output available via ``capsys.readouterr()`` method calls
+ which return a ``(out, err)`` namedtuple. ``out`` and ``err`` will be ``text``
+ objects.
+ """
+ _ensure_only_one_capture_fixture(request, "capsys")
+ with _install_capture_fixture_on_item(request, SysCapture) as fixture:
+ yield fixture
+
+
+@pytest.fixture
+def capsysbinary(request):
+ """Enable capturing of writes to ``sys.stdout`` and ``sys.stderr`` and make
+ captured output available via ``capsys.readouterr()`` method calls
+ which return a ``(out, err)`` tuple. ``out`` and ``err`` will be ``bytes``
+ objects.
+ """
+ _ensure_only_one_capture_fixture(request, "capsysbinary")
+ # Currently, the implementation uses the python3 specific `.buffer`
+ # property of CaptureIO.
+ if sys.version_info < (3,):
+ raise request.raiseerror("capsysbinary is only supported on python 3")
+ with _install_capture_fixture_on_item(request, SysCaptureBinary) as fixture:
+ yield fixture
+
+
+@pytest.fixture
+def capfd(request):
+ """Enable capturing of writes to file descriptors ``1`` and ``2`` and make
+ captured output available via ``capfd.readouterr()`` method calls
+ which return a ``(out, err)`` tuple. ``out`` and ``err`` will be ``text``
+ objects.
+ """
+ _ensure_only_one_capture_fixture(request, "capfd")
+ if not hasattr(os, "dup"):
+ pytest.skip(
+ "capfd fixture needs os.dup function which is not available in this system"
+ )
+ with _install_capture_fixture_on_item(request, FDCapture) as fixture:
+ yield fixture
+
+
+@pytest.fixture
+def capfdbinary(request):
+ """Enable capturing of write to file descriptors 1 and 2 and make
+ captured output available via ``capfdbinary.readouterr`` method calls
+ which return a ``(out, err)`` tuple. ``out`` and ``err`` will be
+ ``bytes`` objects.
+ """
+ _ensure_only_one_capture_fixture(request, "capfdbinary")
+ if not hasattr(os, "dup"):
+ pytest.skip(
+ "capfdbinary fixture needs os.dup function which is not available in this system"
+ )
+ with _install_capture_fixture_on_item(request, FDCaptureBinary) as fixture:
+ yield fixture
+
+
+@contextlib.contextmanager
+def _install_capture_fixture_on_item(request, capture_class):
+ """
+ Context manager which creates a ``CaptureFixture`` instance and "installs" it on
+ the item/node of the given request. Used by ``capsys`` and ``capfd``.
+
+ The CaptureFixture is added as attribute of the item because it needs to accessed
+ by ``CaptureManager`` during its ``pytest_runtest_*`` hooks.
+ """
+ request.node._capture_fixture = fixture = CaptureFixture(capture_class, request)
+ capmanager = request.config.pluginmanager.getplugin("capturemanager")
+ # need to active this fixture right away in case it is being used by another fixture (setup phase)
+ # if this fixture is being used only by a test function (call phase), then we wouldn't need this
+ # activation, but it doesn't hurt
+ capmanager.activate_fixture(request.node)
+ yield fixture
+ fixture.close()
+ del request.node._capture_fixture
+
+
+class CaptureFixture(object):
+ """
+ Object returned by :py:func:`capsys`, :py:func:`capsysbinary`, :py:func:`capfd` and :py:func:`capfdbinary`
+ fixtures.
+ """
+
+ def __init__(self, captureclass, request):
+ self.captureclass = captureclass
+ self.request = request
+
+ def _start(self):
+ self._capture = MultiCapture(
+ out=True, err=True, in_=False, Capture=self.captureclass
+ )
+ self._capture.start_capturing()
+
+ def close(self):
+ cap = self.__dict__.pop("_capture", None)
+ if cap is not None:
+ self._outerr = cap.pop_outerr_to_orig()
+ cap.stop_capturing()
+
+ def readouterr(self):
+ """Read and return the captured output so far, resetting the internal buffer.
+
+ :return: captured content as a namedtuple with ``out`` and ``err`` string attributes
+ """
+ try:
+ return self._capture.readouterr()
+ except AttributeError:
+ return self._outerr
+
+ @contextlib.contextmanager
+ def disabled(self):
+ """Temporarily disables capture while inside the 'with' block."""
+ self._capture.suspend_capturing()
+ capmanager = self.request.config.pluginmanager.getplugin("capturemanager")
+ capmanager.suspend_global_capture(item=None, in_=False)
+ try:
+ yield
+ finally:
+ capmanager.resume_global_capture()
+ self._capture.resume_capturing()
+
+
+def safe_text_dupfile(f, mode, default_encoding="UTF8"):
+ """ return an open text file object that's a duplicate of f on the
+ FD-level if possible.
+ """
+ encoding = getattr(f, "encoding", None)
+ try:
+ fd = f.fileno()
+ except Exception:
+ if "b" not in getattr(f, "mode", "") and hasattr(f, "encoding"):
+ # we seem to have a text stream, let's just use it
+ return f
+ else:
+ newfd = os.dup(fd)
+ if "b" not in mode:
+ mode += "b"
+ f = os.fdopen(newfd, mode, 0) # no buffering
+ return EncodedFile(f, encoding or default_encoding)
+
+
+class EncodedFile(object):
+ errors = "strict" # possibly needed by py3 code (issue555)
+
+ def __init__(self, buffer, encoding):
+ self.buffer = buffer
+ self.encoding = encoding
+
+ def write(self, obj):
+ if isinstance(obj, six.text_type):
+ obj = obj.encode(self.encoding, "replace")
+ self.buffer.write(obj)
+
+ def writelines(self, linelist):
+ data = "".join(linelist)
+ self.write(data)
+
+ @property
+ def name(self):
+ """Ensure that file.name is a string."""
+ return repr(self.buffer)
+
+ def __getattr__(self, name):
+ return getattr(object.__getattribute__(self, "buffer"), name)
+
+
+CaptureResult = collections.namedtuple("CaptureResult", ["out", "err"])
+
+
+class MultiCapture(object):
+ out = err = in_ = None
+
+ def __init__(self, out=True, err=True, in_=True, Capture=None):
+ if in_:
+ self.in_ = Capture(0)
+ if out:
+ self.out = Capture(1)
+ if err:
+ self.err = Capture(2)
+
+ def start_capturing(self):
+ if self.in_:
+ self.in_.start()
+ if self.out:
+ self.out.start()
+ if self.err:
+ self.err.start()
+
+ def pop_outerr_to_orig(self):
+ """ pop current snapshot out/err capture and flush to orig streams. """
+ out, err = self.readouterr()
+ if out:
+ self.out.writeorg(out)
+ if err:
+ self.err.writeorg(err)
+ return out, err
+
+ def suspend_capturing(self, in_=False):
+ if self.out:
+ self.out.suspend()
+ if self.err:
+ self.err.suspend()
+ if in_ and self.in_:
+ self.in_.suspend()
+ self._in_suspended = True
+
+ def resume_capturing(self):
+ if self.out:
+ self.out.resume()
+ if self.err:
+ self.err.resume()
+ if hasattr(self, "_in_suspended"):
+ self.in_.resume()
+ del self._in_suspended
+
+ def stop_capturing(self):
+ """ stop capturing and reset capturing streams """
+ if hasattr(self, "_reset"):
+ raise ValueError("was already stopped")
+ self._reset = True
+ if self.out:
+ self.out.done()
+ if self.err:
+ self.err.done()
+ if self.in_:
+ self.in_.done()
+
+ def readouterr(self):
+ """ return snapshot unicode value of stdout/stderr capturings. """
+ return CaptureResult(
+ self.out.snap() if self.out is not None else "",
+ self.err.snap() if self.err is not None else "",
+ )
+
+
+class NoCapture(object):
+ __init__ = start = done = suspend = resume = lambda *args: None
+
+
+class FDCaptureBinary(object):
+ """Capture IO to/from a given os-level filedescriptor.
+
+ snap() produces `bytes`
+ """
+
+ def __init__(self, targetfd, tmpfile=None):
+ self.targetfd = targetfd
+ try:
+ self.targetfd_save = os.dup(self.targetfd)
+ except OSError:
+ self.start = lambda: None
+ self.done = lambda: None
+ else:
+ if targetfd == 0:
+ assert not tmpfile, "cannot set tmpfile with stdin"
+ tmpfile = open(os.devnull, "r")
+ self.syscapture = SysCapture(targetfd)
+ else:
+ if tmpfile is None:
+ f = TemporaryFile()
+ with f:
+ tmpfile = safe_text_dupfile(f, mode="wb+")
+ if targetfd in patchsysdict:
+ self.syscapture = SysCapture(targetfd, tmpfile)
+ else:
+ self.syscapture = NoCapture()
+ self.tmpfile = tmpfile
+ self.tmpfile_fd = tmpfile.fileno()
+
+ def __repr__(self):
+ return "<FDCapture %s oldfd=%s>" % (self.targetfd, self.targetfd_save)
+
+ def start(self):
+ """ Start capturing on targetfd using memorized tmpfile. """
+ try:
+ os.fstat(self.targetfd_save)
+ except (AttributeError, OSError):
+ raise ValueError("saved filedescriptor not valid anymore")
+ os.dup2(self.tmpfile_fd, self.targetfd)
+ self.syscapture.start()
+
+ def snap(self):
+ self.tmpfile.seek(0)
+ res = self.tmpfile.read()
+ self.tmpfile.seek(0)
+ self.tmpfile.truncate()
+ return res
+
+ def done(self):
+ """ stop capturing, restore streams, return original capture file,
+ seeked to position zero. """
+ targetfd_save = self.__dict__.pop("targetfd_save")
+ os.dup2(targetfd_save, self.targetfd)
+ os.close(targetfd_save)
+ self.syscapture.done()
+ _attempt_to_close_capture_file(self.tmpfile)
+
+ def suspend(self):
+ self.syscapture.suspend()
+ os.dup2(self.targetfd_save, self.targetfd)
+
+ def resume(self):
+ self.syscapture.resume()
+ os.dup2(self.tmpfile_fd, self.targetfd)
+
+ def writeorg(self, data):
+ """ write to original file descriptor. """
+ if isinstance(data, six.text_type):
+ data = data.encode("utf8") # XXX use encoding of original stream
+ os.write(self.targetfd_save, data)
+
+
+class FDCapture(FDCaptureBinary):
+ """Capture IO to/from a given os-level filedescriptor.
+
+ snap() produces text
+ """
+
+ def snap(self):
+ res = FDCaptureBinary.snap(self)
+ enc = getattr(self.tmpfile, "encoding", None)
+ if enc and isinstance(res, bytes):
+ res = six.text_type(res, enc, "replace")
+ return res
+
+
+class SysCapture(object):
+
+ def __init__(self, fd, tmpfile=None):
+ name = patchsysdict[fd]
+ self._old = getattr(sys, name)
+ self.name = name
+ if tmpfile is None:
+ if name == "stdin":
+ tmpfile = DontReadFromInput()
+ else:
+ tmpfile = CaptureIO()
+ self.tmpfile = tmpfile
+
+ def start(self):
+ setattr(sys, self.name, self.tmpfile)
+
+ def snap(self):
+ res = self.tmpfile.getvalue()
+ self.tmpfile.seek(0)
+ self.tmpfile.truncate()
+ return res
+
+ def done(self):
+ setattr(sys, self.name, self._old)
+ del self._old
+ _attempt_to_close_capture_file(self.tmpfile)
+
+ def suspend(self):
+ setattr(sys, self.name, self._old)
+
+ def resume(self):
+ setattr(sys, self.name, self.tmpfile)
+
+ def writeorg(self, data):
+ self._old.write(data)
+ self._old.flush()
+
+
+class SysCaptureBinary(SysCapture):
+
+ def snap(self):
+ res = self.tmpfile.buffer.getvalue()
+ self.tmpfile.seek(0)
+ self.tmpfile.truncate()
+ return res
+
+
+class DontReadFromInput(six.Iterator):
+ """Temporary stub class. Ideally when stdin is accessed, the
+ capturing should be turned off, with possibly all data captured
+ so far sent to the screen. This should be configurable, though,
+ because in automated test runs it is better to crash than
+ hang indefinitely.
+ """
+
+ encoding = None
+
+ def read(self, *args):
+ raise IOError("reading from stdin while output is captured")
+
+ readline = read
+ readlines = read
+ __next__ = read
+
+ def __iter__(self):
+ return self
+
+ def fileno(self):
+ raise UnsupportedOperation("redirected stdin is pseudofile, " "has no fileno()")
+
+ def isatty(self):
+ return False
+
+ def close(self):
+ pass
+
+ @property
+ def buffer(self):
+ if sys.version_info >= (3, 0):
+ return self
+ else:
+ raise AttributeError("redirected stdin has no attribute buffer")
+
+
+def _colorama_workaround():
+ """
+ Ensure colorama is imported so that it attaches to the correct stdio
+ handles on Windows.
+
+ colorama uses the terminal on import time. So if something does the
+ first import of colorama while I/O capture is active, colorama will
+ fail in various ways.
+ """
+
+ if not sys.platform.startswith("win32"):
+ return
+ try:
+ import colorama # noqa
+ except ImportError:
+ pass
+
+
+def _readline_workaround():
+ """
+ Ensure readline is imported so that it attaches to the correct stdio
+ handles on Windows.
+
+ Pdb uses readline support where available--when not running from the Python
+ prompt, the readline module is not imported until running the pdb REPL. If
+ running pytest with the --pdb option this means the readline module is not
+ imported until after I/O capture has been started.
+
+ This is a problem for pyreadline, which is often used to implement readline
+ support on Windows, as it does not attach to the correct handles for stdout
+ and/or stdin if they have been redirected by the FDCapture mechanism. This
+ workaround ensures that readline is imported before I/O capture is setup so
+ that it can attach to the actual stdin/out for the console.
+
+ See https://github.com/pytest-dev/pytest/pull/1281
+ """
+
+ if not sys.platform.startswith("win32"):
+ return
+ try:
+ import readline # noqa
+ except ImportError:
+ pass
+
+
+def _py36_windowsconsoleio_workaround(stream):
+ """
+ Python 3.6 implemented unicode console handling for Windows. This works
+ by reading/writing to the raw console handle using
+ ``{Read,Write}ConsoleW``.
+
+ The problem is that we are going to ``dup2`` over the stdio file
+ descriptors when doing ``FDCapture`` and this will ``CloseHandle`` the
+ handles used by Python to write to the console. Though there is still some
+ weirdness and the console handle seems to only be closed randomly and not
+ on the first call to ``CloseHandle``, or maybe it gets reopened with the
+ same handle value when we suspend capturing.
+
+ The workaround in this case will reopen stdio with a different fd which
+ also means a different handle by replicating the logic in
+ "Py_lifecycle.c:initstdio/create_stdio".
+
+ :param stream: in practice ``sys.stdout`` or ``sys.stderr``, but given
+ here as parameter for unittesting purposes.
+
+ See https://github.com/pytest-dev/py/issues/103
+ """
+ if not sys.platform.startswith("win32") or sys.version_info[:2] < (3, 6):
+ return
+
+ # bail out if ``stream`` doesn't seem like a proper ``io`` stream (#2666)
+ if not hasattr(stream, "buffer"):
+ return
+
+ buffered = hasattr(stream.buffer, "raw")
+ raw_stdout = stream.buffer.raw if buffered else stream.buffer
+
+ if not isinstance(raw_stdout, io._WindowsConsoleIO):
+ return
+
+ def _reopen_stdio(f, mode):
+ if not buffered and mode[0] == "w":
+ buffering = 0
+ else:
+ buffering = -1
+
+ return io.TextIOWrapper(
+ open(os.dup(f.fileno()), mode, buffering),
+ f.encoding,
+ f.errors,
+ f.newlines,
+ f.line_buffering,
+ )
+
+ sys.__stdin__ = sys.stdin = _reopen_stdio(sys.stdin, "rb")
+ sys.__stdout__ = sys.stdout = _reopen_stdio(sys.stdout, "wb")
+ sys.__stderr__ = sys.stderr = _reopen_stdio(sys.stderr, "wb")
+
+
+def _attempt_to_close_capture_file(f):
+ """Suppress IOError when closing the temporary file used for capturing streams in py27 (#2370)"""
+ if six.PY2:
+ try:
+ f.close()
+ except IOError:
+ pass
+ else:
+ f.close()
diff --git a/third_party/python/pytest/src/_pytest/compat.py b/third_party/python/pytest/src/_pytest/compat.py
new file mode 100644
index 0000000000..7abd3d53ff
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/compat.py
@@ -0,0 +1,371 @@
+"""
+python version compatibility code
+"""
+from __future__ import absolute_import, division, print_function
+
+import codecs
+import functools
+import inspect
+import re
+import sys
+
+import py
+
+import _pytest
+from _pytest.outcomes import TEST_OUTCOME
+from six import text_type
+import six
+
+try:
+ import enum
+except ImportError: # pragma: no cover
+ # Only available in Python 3.4+ or as a backport
+ enum = None
+
+
+_PY3 = sys.version_info > (3, 0)
+_PY2 = not _PY3
+
+
+if _PY3:
+ from inspect import signature, Parameter as Parameter
+else:
+ from funcsigs import signature, Parameter as Parameter
+
+
+NoneType = type(None)
+NOTSET = object()
+
+PY35 = sys.version_info[:2] >= (3, 5)
+PY36 = sys.version_info[:2] >= (3, 6)
+MODULE_NOT_FOUND_ERROR = "ModuleNotFoundError" if PY36 else "ImportError"
+
+if _PY3:
+ from collections.abc import MutableMapping as MappingMixin # noqa
+ from collections.abc import Mapping, Sequence # noqa
+else:
+ # those raise DeprecationWarnings in Python >=3.7
+ from collections import MutableMapping as MappingMixin # noqa
+ from collections import Mapping, Sequence # noqa
+
+
+def _format_args(func):
+ return str(signature(func))
+
+
+isfunction = inspect.isfunction
+isclass = inspect.isclass
+# used to work around a python2 exception info leak
+exc_clear = getattr(sys, "exc_clear", lambda: None)
+# The type of re.compile objects is not exposed in Python.
+REGEX_TYPE = type(re.compile(""))
+
+
+def is_generator(func):
+ genfunc = inspect.isgeneratorfunction(func)
+ return genfunc and not iscoroutinefunction(func)
+
+
+def iscoroutinefunction(func):
+ """Return True if func is a decorated coroutine function.
+
+ Note: copied and modified from Python 3.5's builtin couroutines.py to avoid import asyncio directly,
+ which in turns also initializes the "logging" module as side-effect (see issue #8).
+ """
+ return (
+ getattr(func, "_is_coroutine", False)
+ or (
+ hasattr(inspect, "iscoroutinefunction")
+ and inspect.iscoroutinefunction(func)
+ )
+ )
+
+
+def getlocation(function, curdir):
+ fn = py.path.local(inspect.getfile(function))
+ lineno = function.__code__.co_firstlineno
+ if fn.relto(curdir):
+ fn = fn.relto(curdir)
+ return "%s:%d" % (fn, lineno + 1)
+
+
+def num_mock_patch_args(function):
+ """ return number of arguments used up by mock arguments (if any) """
+ patchings = getattr(function, "patchings", None)
+ if not patchings:
+ return 0
+ mock_modules = [sys.modules.get("mock"), sys.modules.get("unittest.mock")]
+ if any(mock_modules):
+ sentinels = [m.DEFAULT for m in mock_modules if m is not None]
+ return len(
+ [p for p in patchings if not p.attribute_name and p.new in sentinels]
+ )
+ return len(patchings)
+
+
+def getfuncargnames(function, is_method=False, cls=None):
+ """Returns the names of a function's mandatory arguments.
+
+ This should return the names of all function arguments that:
+ * Aren't bound to an instance or type as in instance or class methods.
+ * Don't have default values.
+ * Aren't bound with functools.partial.
+ * Aren't replaced with mocks.
+
+ The is_method and cls arguments indicate that the function should
+ be treated as a bound method even though it's not unless, only in
+ the case of cls, the function is a static method.
+
+ @RonnyPfannschmidt: This function should be refactored when we
+ revisit fixtures. The fixture mechanism should ask the node for
+ the fixture names, and not try to obtain directly from the
+ function object well after collection has occurred.
+
+ """
+ # The parameters attribute of a Signature object contains an
+ # ordered mapping of parameter names to Parameter instances. This
+ # creates a tuple of the names of the parameters that don't have
+ # defaults.
+ arg_names = tuple(
+ p.name
+ for p in signature(function).parameters.values()
+ if (
+ p.kind is Parameter.POSITIONAL_OR_KEYWORD
+ or p.kind is Parameter.KEYWORD_ONLY
+ )
+ and p.default is Parameter.empty
+ )
+ # If this function should be treated as a bound method even though
+ # it's passed as an unbound method or function, remove the first
+ # parameter name.
+ if (
+ is_method
+ or (
+ cls
+ and not isinstance(cls.__dict__.get(function.__name__, None), staticmethod)
+ )
+ ):
+ arg_names = arg_names[1:]
+ # Remove any names that will be replaced with mocks.
+ if hasattr(function, "__wrapped__"):
+ arg_names = arg_names[num_mock_patch_args(function):]
+ return arg_names
+
+
+def get_default_arg_names(function):
+ # Note: this code intentionally mirrors the code at the beginning of getfuncargnames,
+ # to get the arguments which were excluded from its result because they had default values
+ return tuple(
+ p.name
+ for p in signature(function).parameters.values()
+ if p.kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY)
+ and p.default is not Parameter.empty
+ )
+
+
+if _PY3:
+ STRING_TYPES = bytes, str
+ UNICODE_TYPES = six.text_type
+
+ if PY35:
+
+ def _bytes_to_ascii(val):
+ return val.decode("ascii", "backslashreplace")
+
+ else:
+
+ def _bytes_to_ascii(val):
+ if val:
+ # source: http://goo.gl/bGsnwC
+ encoded_bytes, _ = codecs.escape_encode(val)
+ return encoded_bytes.decode("ascii")
+ else:
+ # empty bytes crashes codecs.escape_encode (#1087)
+ return ""
+
+ def ascii_escaped(val):
+ """If val is pure ascii, returns it as a str(). Otherwise, escapes
+ bytes objects into a sequence of escaped bytes:
+
+ b'\xc3\xb4\xc5\xd6' -> u'\\xc3\\xb4\\xc5\\xd6'
+
+ and escapes unicode objects into a sequence of escaped unicode
+ ids, e.g.:
+
+ '4\\nV\\U00043efa\\x0eMXWB\\x1e\\u3028\\u15fd\\xcd\\U0007d944'
+
+ note:
+ the obvious "v.decode('unicode-escape')" will return
+ valid utf-8 unicode if it finds them in bytes, but we
+ want to return escaped bytes for any byte, even if they match
+ a utf-8 string.
+
+ """
+ if isinstance(val, bytes):
+ return _bytes_to_ascii(val)
+ else:
+ return val.encode("unicode_escape").decode("ascii")
+
+
+else:
+ STRING_TYPES = six.string_types
+ UNICODE_TYPES = six.text_type
+
+ def ascii_escaped(val):
+ """In py2 bytes and str are the same type, so return if it's a bytes
+ object, return it unchanged if it is a full ascii string,
+ otherwise escape it into its binary form.
+
+ If it's a unicode string, change the unicode characters into
+ unicode escapes.
+
+ """
+ if isinstance(val, bytes):
+ try:
+ return val.encode("ascii")
+ except UnicodeDecodeError:
+ return val.encode("string-escape")
+ else:
+ return val.encode("unicode-escape")
+
+
+def get_real_func(obj):
+ """ gets the real function object of the (possibly) wrapped object by
+ functools.wraps or functools.partial.
+ """
+ start_obj = obj
+ for i in range(100):
+ new_obj = getattr(obj, "__wrapped__", None)
+ if new_obj is None:
+ break
+ obj = new_obj
+ else:
+ raise ValueError(
+ ("could not find real function of {start}" "\nstopped at {current}").format(
+ start=py.io.saferepr(start_obj), current=py.io.saferepr(obj)
+ )
+ )
+ if isinstance(obj, functools.partial):
+ obj = obj.func
+ return obj
+
+
+def getfslineno(obj):
+ # xxx let decorators etc specify a sane ordering
+ obj = get_real_func(obj)
+ if hasattr(obj, "place_as"):
+ obj = obj.place_as
+ fslineno = _pytest._code.getfslineno(obj)
+ assert isinstance(fslineno[1], int), obj
+ return fslineno
+
+
+def getimfunc(func):
+ try:
+ return func.__func__
+ except AttributeError:
+ return func
+
+
+def safe_getattr(object, name, default):
+ """ Like getattr but return default upon any Exception or any OutcomeException.
+
+ Attribute access can potentially fail for 'evil' Python objects.
+ See issue #214.
+ It catches OutcomeException because of #2490 (issue #580), new outcomes are derived from BaseException
+ instead of Exception (for more details check #2707)
+ """
+ try:
+ return getattr(object, name, default)
+ except TEST_OUTCOME:
+ return default
+
+
+def _is_unittest_unexpected_success_a_failure():
+ """Return if the test suite should fail if an @expectedFailure unittest test PASSES.
+
+ From https://docs.python.org/3/library/unittest.html?highlight=unittest#unittest.TestResult.wasSuccessful:
+ Changed in version 3.4: Returns False if there were any
+ unexpectedSuccesses from tests marked with the expectedFailure() decorator.
+ """
+ return sys.version_info >= (3, 4)
+
+
+if _PY3:
+
+ def safe_str(v):
+ """returns v as string"""
+ return str(v)
+
+
+else:
+
+ def safe_str(v):
+ """returns v as string, converting to ascii if necessary"""
+ try:
+ return str(v)
+ except UnicodeError:
+ if not isinstance(v, text_type):
+ v = text_type(v)
+ errors = "replace"
+ return v.encode("utf-8", errors)
+
+
+COLLECT_FAKEMODULE_ATTRIBUTES = (
+ "Collector",
+ "Module",
+ "Generator",
+ "Function",
+ "Instance",
+ "Session",
+ "Item",
+ "Class",
+ "File",
+ "_fillfuncargs",
+)
+
+
+def _setup_collect_fakemodule():
+ from types import ModuleType
+ import pytest
+
+ pytest.collect = ModuleType("pytest.collect")
+ pytest.collect.__all__ = [] # used for setns
+ for attr in COLLECT_FAKEMODULE_ATTRIBUTES:
+ setattr(pytest.collect, attr, getattr(pytest, attr))
+
+
+if _PY2:
+ # Without this the test_dupfile_on_textio will fail, otherwise CaptureIO could directly inherit from StringIO.
+ from py.io import TextIO
+
+ class CaptureIO(TextIO):
+
+ @property
+ def encoding(self):
+ return getattr(self, "_encoding", "UTF-8")
+
+
+else:
+ import io
+
+ class CaptureIO(io.TextIOWrapper):
+
+ def __init__(self):
+ super(CaptureIO, self).__init__(
+ io.BytesIO(), encoding="UTF-8", newline="", write_through=True
+ )
+
+ def getvalue(self):
+ return self.buffer.getvalue().decode("UTF-8")
+
+
+class FuncargnamesCompatAttr(object):
+ """ helper class so that Metafunc, Function and FixtureRequest
+ don't need to each define the "funcargnames" compatibility attribute.
+ """
+
+ @property
+ def funcargnames(self):
+ """ alias attribute for ``fixturenames`` for pre-2.3 compatibility"""
+ return self.fixturenames
diff --git a/third_party/python/pytest/src/_pytest/config/__init__.py b/third_party/python/pytest/src/_pytest/config/__init__.py
new file mode 100644
index 0000000000..11348b80d0
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/config/__init__.py
@@ -0,0 +1,979 @@
+""" command line options, ini-file and conftest.py processing. """
+from __future__ import absolute_import, division, print_function
+import argparse
+import shlex
+import traceback
+import types
+import warnings
+import copy
+import six
+import py
+
+# DON't import pytest here because it causes import cycle troubles
+import sys
+import os
+from _pytest.outcomes import Skipped
+
+import _pytest._code
+import _pytest.hookspec # the extension point definitions
+import _pytest.assertion
+from pluggy import PluginManager, HookimplMarker, HookspecMarker
+from _pytest.compat import safe_str
+from .exceptions import UsageError, PrintHelp
+from .findpaths import determine_setup, exists
+
+hookimpl = HookimplMarker("pytest")
+hookspec = HookspecMarker("pytest")
+
+# pytest startup
+#
+
+
+class ConftestImportFailure(Exception):
+
+ def __init__(self, path, excinfo):
+ Exception.__init__(self, path, excinfo)
+ self.path = path
+ self.excinfo = excinfo
+
+ def __str__(self):
+ etype, evalue, etb = self.excinfo
+ formatted = traceback.format_tb(etb)
+ # The level of the tracebacks we want to print is hand crafted :(
+ return repr(evalue) + "\n" + "".join(formatted[2:])
+
+
+def main(args=None, plugins=None):
+ """ return exit code, after performing an in-process test run.
+
+ :arg args: list of command line arguments.
+
+ :arg plugins: list of plugin objects to be auto-registered during
+ initialization.
+ """
+ try:
+ try:
+ config = _prepareconfig(args, plugins)
+ except ConftestImportFailure as e:
+ tw = py.io.TerminalWriter(sys.stderr)
+ for line in traceback.format_exception(*e.excinfo):
+ tw.line(line.rstrip(), red=True)
+ tw.line("ERROR: could not load %s\n" % (e.path,), red=True)
+ return 4
+ else:
+ try:
+ return config.hook.pytest_cmdline_main(config=config)
+ finally:
+ config._ensure_unconfigure()
+ except UsageError as e:
+ tw = py.io.TerminalWriter(sys.stderr)
+ for msg in e.args:
+ tw.line("ERROR: {}\n".format(msg), red=True)
+ return 4
+
+
+class cmdline(object): # NOQA compatibility namespace
+ main = staticmethod(main)
+
+
+def filename_arg(path, optname):
+ """ Argparse type validator for filename arguments.
+
+ :path: path of filename
+ :optname: name of the option
+ """
+ if os.path.isdir(path):
+ raise UsageError("{} must be a filename, given: {}".format(optname, path))
+ return path
+
+
+def directory_arg(path, optname):
+ """Argparse type validator for directory arguments.
+
+ :path: path of directory
+ :optname: name of the option
+ """
+ if not os.path.isdir(path):
+ raise UsageError("{} must be a directory, given: {}".format(optname, path))
+ return path
+
+
+default_plugins = (
+ "mark",
+ "main",
+ "terminal",
+ "runner",
+ "python",
+ "fixtures",
+ "debugging",
+ "unittest",
+ "capture",
+ "skipping",
+ "tmpdir",
+ "monkeypatch",
+ "recwarn",
+ "pastebin",
+ "helpconfig",
+ "nose",
+ "assertion",
+ "junitxml",
+ "resultlog",
+ "doctest",
+ "cacheprovider",
+ "freeze_support",
+ "setuponly",
+ "setupplan",
+ "warnings",
+ "logging",
+)
+
+
+builtin_plugins = set(default_plugins)
+builtin_plugins.add("pytester")
+
+
+def get_config():
+ # subsequent calls to main will create a fresh instance
+ pluginmanager = PytestPluginManager()
+ config = Config(pluginmanager)
+ for spec in default_plugins:
+ pluginmanager.import_plugin(spec)
+ return config
+
+
+def get_plugin_manager():
+ """
+ Obtain a new instance of the
+ :py:class:`_pytest.config.PytestPluginManager`, with default plugins
+ already loaded.
+
+ This function can be used by integration with other tools, like hooking
+ into pytest to run tests into an IDE.
+ """
+ return get_config().pluginmanager
+
+
+def _prepareconfig(args=None, plugins=None):
+ warning = None
+ if args is None:
+ args = sys.argv[1:]
+ elif isinstance(args, py.path.local):
+ args = [str(args)]
+ elif not isinstance(args, (tuple, list)):
+ if not isinstance(args, str):
+ raise ValueError("not a string or argument list: %r" % (args,))
+ args = shlex.split(args, posix=sys.platform != "win32")
+ from _pytest import deprecated
+
+ warning = deprecated.MAIN_STR_ARGS
+ config = get_config()
+ pluginmanager = config.pluginmanager
+ try:
+ if plugins:
+ for plugin in plugins:
+ if isinstance(plugin, six.string_types):
+ pluginmanager.consider_pluginarg(plugin)
+ else:
+ pluginmanager.register(plugin)
+ if warning:
+ config.warn("C1", warning)
+ return pluginmanager.hook.pytest_cmdline_parse(
+ pluginmanager=pluginmanager, args=args
+ )
+ except BaseException:
+ config._ensure_unconfigure()
+ raise
+
+
+class PytestPluginManager(PluginManager):
+ """
+ Overwrites :py:class:`pluggy.PluginManager <pluggy.PluginManager>` to add pytest-specific
+ functionality:
+
+ * loading plugins from the command line, ``PYTEST_PLUGINS`` env variable and
+ ``pytest_plugins`` global variables found in plugins being loaded;
+ * ``conftest.py`` loading during start-up;
+ """
+
+ def __init__(self):
+ super(PytestPluginManager, self).__init__("pytest")
+ self._conftest_plugins = set()
+
+ # state related to local conftest plugins
+ self._path2confmods = {}
+ self._conftestpath2mod = {}
+ self._confcutdir = None
+ self._noconftest = False
+ self._duplicatepaths = set()
+
+ self.add_hookspecs(_pytest.hookspec)
+ self.register(self)
+ if os.environ.get("PYTEST_DEBUG"):
+ err = sys.stderr
+ encoding = getattr(err, "encoding", "utf8")
+ try:
+ err = py.io.dupfile(err, encoding=encoding)
+ except Exception:
+ pass
+ self.trace.root.setwriter(err.write)
+ self.enable_tracing()
+
+ # Config._consider_importhook will set a real object if required.
+ self.rewrite_hook = _pytest.assertion.DummyRewriteHook()
+ # Used to know when we are importing conftests after the pytest_configure stage
+ self._configured = False
+
+ def addhooks(self, module_or_class):
+ """
+ .. deprecated:: 2.8
+
+ Use :py:meth:`pluggy.PluginManager.add_hookspecs <PluginManager.add_hookspecs>`
+ instead.
+ """
+ warning = dict(
+ code="I2",
+ fslocation=_pytest._code.getfslineno(sys._getframe(1)),
+ nodeid=None,
+ message="use pluginmanager.add_hookspecs instead of "
+ "deprecated addhooks() method.",
+ )
+ self._warn(warning)
+ return self.add_hookspecs(module_or_class)
+
+ def parse_hookimpl_opts(self, plugin, name):
+ # pytest hooks are always prefixed with pytest_
+ # so we avoid accessing possibly non-readable attributes
+ # (see issue #1073)
+ if not name.startswith("pytest_"):
+ return
+ # ignore some historic special names which can not be hooks anyway
+ if name == "pytest_plugins" or name.startswith("pytest_funcarg__"):
+ return
+
+ method = getattr(plugin, name)
+ opts = super(PytestPluginManager, self).parse_hookimpl_opts(plugin, name)
+
+ # collect unmarked hooks as long as they have the `pytest_' prefix
+ if opts is None and name.startswith("pytest_"):
+ opts = {}
+
+ if opts is not None:
+ for name in ("tryfirst", "trylast", "optionalhook", "hookwrapper"):
+ opts.setdefault(name, hasattr(method, name))
+ return opts
+
+ def parse_hookspec_opts(self, module_or_class, name):
+ opts = super(PytestPluginManager, self).parse_hookspec_opts(
+ module_or_class, name
+ )
+ if opts is None:
+ method = getattr(module_or_class, name)
+ if name.startswith("pytest_"):
+ opts = {
+ "firstresult": hasattr(method, "firstresult"),
+ "historic": hasattr(method, "historic"),
+ }
+ return opts
+
+ def register(self, plugin, name=None):
+ if name in ["pytest_catchlog", "pytest_capturelog"]:
+ self._warn(
+ "{} plugin has been merged into the core, "
+ "please remove it from your requirements.".format(
+ name.replace("_", "-")
+ )
+ )
+ return
+ ret = super(PytestPluginManager, self).register(plugin, name)
+ if ret:
+ self.hook.pytest_plugin_registered.call_historic(
+ kwargs=dict(plugin=plugin, manager=self)
+ )
+
+ if isinstance(plugin, types.ModuleType):
+ self.consider_module(plugin)
+ return ret
+
+ def getplugin(self, name):
+ # support deprecated naming because plugins (xdist e.g.) use it
+ return self.get_plugin(name)
+
+ def hasplugin(self, name):
+ """Return True if the plugin with the given name is registered."""
+ return bool(self.get_plugin(name))
+
+ def pytest_configure(self, config):
+ # XXX now that the pluginmanager exposes hookimpl(tryfirst...)
+ # we should remove tryfirst/trylast as markers
+ config.addinivalue_line(
+ "markers",
+ "tryfirst: mark a hook implementation function such that the "
+ "plugin machinery will try to call it first/as early as possible.",
+ )
+ config.addinivalue_line(
+ "markers",
+ "trylast: mark a hook implementation function such that the "
+ "plugin machinery will try to call it last/as late as possible.",
+ )
+ self._configured = True
+
+ def _warn(self, message):
+ kwargs = message if isinstance(message, dict) else {
+ "code": "I1", "message": message, "fslocation": None, "nodeid": None
+ }
+ self.hook.pytest_logwarning.call_historic(kwargs=kwargs)
+
+ #
+ # internal API for local conftest plugin handling
+ #
+ def _set_initial_conftests(self, namespace):
+ """ load initial conftest files given a preparsed "namespace".
+ As conftest files may add their own command line options
+ which have arguments ('--my-opt somepath') we might get some
+ false positives. All builtin and 3rd party plugins will have
+ been loaded, however, so common options will not confuse our logic
+ here.
+ """
+ current = py.path.local()
+ self._confcutdir = current.join(
+ namespace.confcutdir, abs=True
+ ) if namespace.confcutdir else None
+ self._noconftest = namespace.noconftest
+ testpaths = namespace.file_or_dir
+ foundanchor = False
+ for path in testpaths:
+ path = str(path)
+ # remove node-id syntax
+ i = path.find("::")
+ if i != -1:
+ path = path[:i]
+ anchor = current.join(path, abs=1)
+ if exists(anchor): # we found some file object
+ self._try_load_conftest(anchor)
+ foundanchor = True
+ if not foundanchor:
+ self._try_load_conftest(current)
+
+ def _try_load_conftest(self, anchor):
+ self._getconftestmodules(anchor)
+ # let's also consider test* subdirs
+ if anchor.check(dir=1):
+ for x in anchor.listdir("test*"):
+ if x.check(dir=1):
+ self._getconftestmodules(x)
+
+ def _getconftestmodules(self, path):
+ if self._noconftest:
+ return []
+ try:
+ return self._path2confmods[path]
+ except KeyError:
+ if path.isfile():
+ clist = self._getconftestmodules(path.dirpath())
+ else:
+ # XXX these days we may rather want to use config.rootdir
+ # and allow users to opt into looking into the rootdir parent
+ # directories instead of requiring to specify confcutdir
+ clist = []
+ for parent in path.parts():
+ if self._confcutdir and self._confcutdir.relto(parent):
+ continue
+ conftestpath = parent.join("conftest.py")
+ if conftestpath.isfile():
+ mod = self._importconftest(conftestpath)
+ clist.append(mod)
+
+ self._path2confmods[path] = clist
+ return clist
+
+ def _rget_with_confmod(self, name, path):
+ modules = self._getconftestmodules(path)
+ for mod in reversed(modules):
+ try:
+ return mod, getattr(mod, name)
+ except AttributeError:
+ continue
+ raise KeyError(name)
+
+ def _importconftest(self, conftestpath):
+ try:
+ return self._conftestpath2mod[conftestpath]
+ except KeyError:
+ pkgpath = conftestpath.pypkgpath()
+ if pkgpath is None:
+ _ensure_removed_sysmodule(conftestpath.purebasename)
+ try:
+ mod = conftestpath.pyimport()
+ if hasattr(mod, "pytest_plugins") and self._configured:
+ from _pytest.deprecated import PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST
+
+ warnings.warn(PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST)
+ except Exception:
+ raise ConftestImportFailure(conftestpath, sys.exc_info())
+
+ self._conftest_plugins.add(mod)
+ self._conftestpath2mod[conftestpath] = mod
+ dirpath = conftestpath.dirpath()
+ if dirpath in self._path2confmods:
+ for path, mods in self._path2confmods.items():
+ if path and path.relto(dirpath) or path == dirpath:
+ assert mod not in mods
+ mods.append(mod)
+ self.trace("loaded conftestmodule %r" % (mod))
+ self.consider_conftest(mod)
+ return mod
+
+ #
+ # API for bootstrapping plugin loading
+ #
+ #
+
+ def consider_preparse(self, args):
+ for opt1, opt2 in zip(args, args[1:]):
+ if opt1 == "-p":
+ self.consider_pluginarg(opt2)
+
+ def consider_pluginarg(self, arg):
+ if arg.startswith("no:"):
+ name = arg[3:]
+ self.set_blocked(name)
+ if not name.startswith("pytest_"):
+ self.set_blocked("pytest_" + name)
+ else:
+ self.import_plugin(arg)
+
+ def consider_conftest(self, conftestmodule):
+ self.register(conftestmodule, name=conftestmodule.__file__)
+
+ def consider_env(self):
+ self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS"))
+
+ def consider_module(self, mod):
+ self._import_plugin_specs(getattr(mod, "pytest_plugins", []))
+
+ def _import_plugin_specs(self, spec):
+ plugins = _get_plugin_specs_as_list(spec)
+ for import_spec in plugins:
+ self.import_plugin(import_spec)
+
+ def import_plugin(self, modname):
+ # most often modname refers to builtin modules, e.g. "pytester",
+ # "terminal" or "capture". Those plugins are registered under their
+ # basename for historic purposes but must be imported with the
+ # _pytest prefix.
+ assert isinstance(modname, (six.text_type, str)), (
+ "module name as text required, got %r" % modname
+ )
+ modname = str(modname)
+ if self.is_blocked(modname) or self.get_plugin(modname) is not None:
+ return
+ if modname in builtin_plugins:
+ importspec = "_pytest." + modname
+ else:
+ importspec = modname
+ self.rewrite_hook.mark_rewrite(importspec)
+ try:
+ __import__(importspec)
+ except ImportError as e:
+ new_exc_type = ImportError
+ new_exc_message = 'Error importing plugin "%s": %s' % (
+ modname, safe_str(e.args[0])
+ )
+ new_exc = new_exc_type(new_exc_message)
+
+ six.reraise(new_exc_type, new_exc, sys.exc_info()[2])
+
+ except Skipped as e:
+ self._warn("skipped plugin %r: %s" % ((modname, e.msg)))
+ else:
+ mod = sys.modules[importspec]
+ self.register(mod, modname)
+
+
+def _get_plugin_specs_as_list(specs):
+ """
+ Parses a list of "plugin specs" and returns a list of plugin names.
+
+ Plugin specs can be given as a list of strings separated by "," or already as a list/tuple in
+ which case it is returned as a list. Specs can also be `None` in which case an
+ empty list is returned.
+ """
+ if specs is not None:
+ if isinstance(specs, str):
+ specs = specs.split(",") if specs else []
+ if not isinstance(specs, (list, tuple)):
+ raise UsageError(
+ "Plugin specs must be a ','-separated string or a "
+ "list/tuple of strings for plugin names. Given: %r" % specs
+ )
+ return list(specs)
+ return []
+
+
+def _ensure_removed_sysmodule(modname):
+ try:
+ del sys.modules[modname]
+ except KeyError:
+ pass
+
+
+class Notset(object):
+
+ def __repr__(self):
+ return "<NOTSET>"
+
+
+notset = Notset()
+
+
+def _iter_rewritable_modules(package_files):
+ for fn in package_files:
+ is_simple_module = "/" not in fn and fn.endswith(".py")
+ is_package = fn.count("/") == 1 and fn.endswith("__init__.py")
+ if is_simple_module:
+ module_name, _ = os.path.splitext(fn)
+ yield module_name
+ elif is_package:
+ package_name = os.path.dirname(fn)
+ yield package_name
+
+
+class Config(object):
+ """ access to configuration values, pluginmanager and plugin hooks. """
+
+ def __init__(self, pluginmanager):
+ #: access to command line option as attributes.
+ #: (deprecated), use :py:func:`getoption() <_pytest.config.Config.getoption>` instead
+ self.option = argparse.Namespace()
+ from .argparsing import Parser, FILE_OR_DIR
+
+ _a = FILE_OR_DIR
+ self._parser = Parser(
+ usage="%%(prog)s [options] [%s] [%s] [...]" % (_a, _a),
+ processopt=self._processopt,
+ )
+ #: a pluginmanager instance
+ self.pluginmanager = pluginmanager
+ self.trace = self.pluginmanager.trace.root.get("config")
+ self.hook = self.pluginmanager.hook
+ self._inicache = {}
+ self._override_ini = ()
+ self._opt2dest = {}
+ self._cleanup = []
+ self._warn = self.pluginmanager._warn
+ self.pluginmanager.register(self, "pytestconfig")
+ self._configured = False
+
+ def do_setns(dic):
+ import pytest
+
+ setns(pytest, dic)
+
+ self.hook.pytest_namespace.call_historic(do_setns, {})
+ self.hook.pytest_addoption.call_historic(kwargs=dict(parser=self._parser))
+
+ def add_cleanup(self, func):
+ """ Add a function to be called when the config object gets out of
+ use (usually coninciding with pytest_unconfigure)."""
+ self._cleanup.append(func)
+
+ def _do_configure(self):
+ assert not self._configured
+ self._configured = True
+ self.hook.pytest_configure.call_historic(kwargs=dict(config=self))
+
+ def _ensure_unconfigure(self):
+ if self._configured:
+ self._configured = False
+ self.hook.pytest_unconfigure(config=self)
+ self.hook.pytest_configure._call_history = []
+ while self._cleanup:
+ fin = self._cleanup.pop()
+ fin()
+
+ def warn(self, code, message, fslocation=None, nodeid=None):
+ """ generate a warning for this test session. """
+ self.hook.pytest_logwarning.call_historic(
+ kwargs=dict(
+ code=code, message=message, fslocation=fslocation, nodeid=nodeid
+ )
+ )
+
+ def get_terminal_writer(self):
+ return self.pluginmanager.get_plugin("terminalreporter")._tw
+
+ def pytest_cmdline_parse(self, pluginmanager, args):
+ # REF1 assert self == pluginmanager.config, (self, pluginmanager.config)
+ self.parse(args)
+ return self
+
+ def notify_exception(self, excinfo, option=None):
+ if option and option.fulltrace:
+ style = "long"
+ else:
+ style = "native"
+ excrepr = excinfo.getrepr(
+ funcargs=True, showlocals=getattr(option, "showlocals", False), style=style
+ )
+ res = self.hook.pytest_internalerror(excrepr=excrepr, excinfo=excinfo)
+ if not any(res):
+ for line in str(excrepr).split("\n"):
+ sys.stderr.write("INTERNALERROR> %s\n" % line)
+ sys.stderr.flush()
+
+ def cwd_relative_nodeid(self, nodeid):
+ # nodeid's are relative to the rootpath, compute relative to cwd
+ if self.invocation_dir != self.rootdir:
+ fullpath = self.rootdir.join(nodeid)
+ nodeid = self.invocation_dir.bestrelpath(fullpath)
+ return nodeid
+
+ @classmethod
+ def fromdictargs(cls, option_dict, args):
+ """ constructor useable for subprocesses. """
+ config = get_config()
+ config.option.__dict__.update(option_dict)
+ config.parse(args, addopts=False)
+ for x in config.option.plugins:
+ config.pluginmanager.consider_pluginarg(x)
+ return config
+
+ def _processopt(self, opt):
+ for name in opt._short_opts + opt._long_opts:
+ self._opt2dest[name] = opt.dest
+
+ if hasattr(opt, "default") and opt.dest:
+ if not hasattr(self.option, opt.dest):
+ setattr(self.option, opt.dest, opt.default)
+
+ @hookimpl(trylast=True)
+ def pytest_load_initial_conftests(self, early_config):
+ self.pluginmanager._set_initial_conftests(early_config.known_args_namespace)
+
+ def _initini(self, args):
+ ns, unknown_args = self._parser.parse_known_and_unknown_args(
+ args, namespace=copy.copy(self.option)
+ )
+ r = determine_setup(
+ ns.inifilename,
+ ns.file_or_dir + unknown_args,
+ warnfunc=self.warn,
+ rootdir_cmd_arg=ns.rootdir or None,
+ )
+ self.rootdir, self.inifile, self.inicfg = r
+ self._parser.extra_info["rootdir"] = self.rootdir
+ self._parser.extra_info["inifile"] = self.inifile
+ self.invocation_dir = py.path.local()
+ self._parser.addini("addopts", "extra command line options", "args")
+ self._parser.addini("minversion", "minimally required pytest version")
+ self._override_ini = ns.override_ini or ()
+
+ def _consider_importhook(self, args):
+ """Install the PEP 302 import hook if using assertion rewriting.
+
+ Needs to parse the --assert=<mode> option from the commandline
+ and find all the installed plugins to mark them for rewriting
+ by the importhook.
+ """
+ ns, unknown_args = self._parser.parse_known_and_unknown_args(args)
+ mode = ns.assertmode
+ if mode == "rewrite":
+ try:
+ hook = _pytest.assertion.install_importhook(self)
+ except SystemError:
+ mode = "plain"
+ else:
+ self._mark_plugins_for_rewrite(hook)
+ _warn_about_missing_assertion(mode)
+
+ def _mark_plugins_for_rewrite(self, hook):
+ """
+ Given an importhook, mark for rewrite any top-level
+ modules or packages in the distribution package for
+ all pytest plugins.
+ """
+ import pkg_resources
+
+ self.pluginmanager.rewrite_hook = hook
+
+ # 'RECORD' available for plugins installed normally (pip install)
+ # 'SOURCES.txt' available for plugins installed in dev mode (pip install -e)
+ # for installed plugins 'SOURCES.txt' returns an empty list, and vice-versa
+ # so it shouldn't be an issue
+ metadata_files = "RECORD", "SOURCES.txt"
+
+ package_files = (
+ entry.split(",")[0]
+ for entrypoint in pkg_resources.iter_entry_points("pytest11")
+ for metadata in metadata_files
+ for entry in entrypoint.dist._get_metadata(metadata)
+ )
+
+ for name in _iter_rewritable_modules(package_files):
+ hook.mark_rewrite(name)
+
+ def _preparse(self, args, addopts=True):
+ if addopts:
+ args[:] = shlex.split(os.environ.get("PYTEST_ADDOPTS", "")) + args
+ self._initini(args)
+ if addopts:
+ args[:] = self.getini("addopts") + args
+ self._checkversion()
+ self._consider_importhook(args)
+ self.pluginmanager.consider_preparse(args)
+ self.pluginmanager.load_setuptools_entrypoints("pytest11")
+ self.pluginmanager.consider_env()
+ self.known_args_namespace = ns = self._parser.parse_known_args(
+ args, namespace=copy.copy(self.option)
+ )
+ if self.known_args_namespace.confcutdir is None and self.inifile:
+ confcutdir = py.path.local(self.inifile).dirname
+ self.known_args_namespace.confcutdir = confcutdir
+ try:
+ self.hook.pytest_load_initial_conftests(
+ early_config=self, args=args, parser=self._parser
+ )
+ except ConftestImportFailure:
+ e = sys.exc_info()[1]
+ if ns.help or ns.version:
+ # we don't want to prevent --help/--version to work
+ # so just let is pass and print a warning at the end
+ self._warn("could not load initial conftests (%s)\n" % e.path)
+ else:
+ raise
+
+ def _checkversion(self):
+ import pytest
+
+ minver = self.inicfg.get("minversion", None)
+ if minver:
+ ver = minver.split(".")
+ myver = pytest.__version__.split(".")
+ if myver < ver:
+ raise pytest.UsageError(
+ "%s:%d: requires pytest-%s, actual pytest-%s'"
+ % (
+ self.inicfg.config.path,
+ self.inicfg.lineof("minversion"),
+ minver,
+ pytest.__version__,
+ )
+ )
+
+ def parse(self, args, addopts=True):
+ # parse given cmdline arguments into this config object.
+ assert not hasattr(
+ self, "args"
+ ), "can only parse cmdline args at most once per Config object"
+ self._origargs = args
+ self.hook.pytest_addhooks.call_historic(
+ kwargs=dict(pluginmanager=self.pluginmanager)
+ )
+ self._preparse(args, addopts=addopts)
+ # XXX deprecated hook:
+ self.hook.pytest_cmdline_preparse(config=self, args=args)
+ self._parser.after_preparse = True
+ try:
+ args = self._parser.parse_setoption(
+ args, self.option, namespace=self.option
+ )
+ if not args:
+ cwd = os.getcwd()
+ if cwd == self.rootdir:
+ args = self.getini("testpaths")
+ if not args:
+ args = [cwd]
+ self.args = args
+ except PrintHelp:
+ pass
+
+ def addinivalue_line(self, name, line):
+ """ add a line to an ini-file option. The option must have been
+ declared but might not yet be set in which case the line becomes the
+ the first line in its value. """
+ x = self.getini(name)
+ assert isinstance(x, list)
+ x.append(line) # modifies the cached list inline
+
+ def getini(self, name):
+ """ return configuration value from an :ref:`ini file <inifiles>`. If the
+ specified name hasn't been registered through a prior
+ :py:func:`parser.addini <_pytest.config.Parser.addini>`
+ call (usually from a plugin), a ValueError is raised. """
+ try:
+ return self._inicache[name]
+ except KeyError:
+ self._inicache[name] = val = self._getini(name)
+ return val
+
+ def _getini(self, name):
+ try:
+ description, type, default = self._parser._inidict[name]
+ except KeyError:
+ raise ValueError("unknown configuration value: %r" % (name,))
+ value = self._get_override_ini_value(name)
+ if value is None:
+ try:
+ value = self.inicfg[name]
+ except KeyError:
+ if default is not None:
+ return default
+ if type is None:
+ return ""
+ return []
+ if type == "pathlist":
+ dp = py.path.local(self.inicfg.config.path).dirpath()
+ values = []
+ for relpath in shlex.split(value):
+ values.append(dp.join(relpath, abs=True))
+ return values
+ elif type == "args":
+ return shlex.split(value)
+ elif type == "linelist":
+ return [t for t in map(lambda x: x.strip(), value.split("\n")) if t]
+ elif type == "bool":
+ return bool(_strtobool(value.strip()))
+ else:
+ assert type is None
+ return value
+
+ def _getconftest_pathlist(self, name, path):
+ try:
+ mod, relroots = self.pluginmanager._rget_with_confmod(name, path)
+ except KeyError:
+ return None
+ modpath = py.path.local(mod.__file__).dirpath()
+ values = []
+ for relroot in relroots:
+ if not isinstance(relroot, py.path.local):
+ relroot = relroot.replace("/", py.path.local.sep)
+ relroot = modpath.join(relroot, abs=True)
+ values.append(relroot)
+ return values
+
+ def _get_override_ini_value(self, name):
+ value = None
+ # override_ini is a list of "ini=value" options
+ # always use the last item if multiple values are set for same ini-name,
+ # e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2
+ for ini_config in self._override_ini:
+ try:
+ key, user_ini_value = ini_config.split("=", 1)
+ except ValueError:
+ raise UsageError("-o/--override-ini expects option=value style.")
+ else:
+ if key == name:
+ value = user_ini_value
+ return value
+
+ def getoption(self, name, default=notset, skip=False):
+ """ return command line option value.
+
+ :arg name: name of the option. You may also specify
+ the literal ``--OPT`` option instead of the "dest" option name.
+ :arg default: default value if no option of that name exists.
+ :arg skip: if True raise pytest.skip if option does not exists
+ or has a None value.
+ """
+ name = self._opt2dest.get(name, name)
+ try:
+ val = getattr(self.option, name)
+ if val is None and skip:
+ raise AttributeError(name)
+ return val
+ except AttributeError:
+ if default is not notset:
+ return default
+ if skip:
+ import pytest
+
+ pytest.skip("no %r option found" % (name,))
+ raise ValueError("no option named %r" % (name,))
+
+ def getvalue(self, name, path=None):
+ """ (deprecated, use getoption()) """
+ return self.getoption(name)
+
+ def getvalueorskip(self, name, path=None):
+ """ (deprecated, use getoption(skip=True)) """
+ return self.getoption(name, skip=True)
+
+
+def _assertion_supported():
+ try:
+ assert False
+ except AssertionError:
+ return True
+ else:
+ return False
+
+
+def _warn_about_missing_assertion(mode):
+ if not _assertion_supported():
+ if mode == "plain":
+ sys.stderr.write(
+ "WARNING: ASSERTIONS ARE NOT EXECUTED"
+ " and FAILING TESTS WILL PASS. Are you"
+ " using python -O?"
+ )
+ else:
+ sys.stderr.write(
+ "WARNING: assertions not in test modules or"
+ " plugins will be ignored"
+ " because assert statements are not executed "
+ "by the underlying Python interpreter "
+ "(are you using python -O?)\n"
+ )
+
+
+def setns(obj, dic):
+ import pytest
+
+ for name, value in dic.items():
+ if isinstance(value, dict):
+ mod = getattr(obj, name, None)
+ if mod is None:
+ modname = "pytest.%s" % name
+ mod = types.ModuleType(modname)
+ sys.modules[modname] = mod
+ mod.__all__ = []
+ setattr(obj, name, mod)
+ obj.__all__.append(name)
+ setns(mod, value)
+ else:
+ setattr(obj, name, value)
+ obj.__all__.append(name)
+ # if obj != pytest:
+ # pytest.__all__.append(name)
+ setattr(pytest, name, value)
+
+
+def create_terminal_writer(config, *args, **kwargs):
+ """Create a TerminalWriter instance configured according to the options
+ in the config object. Every code which requires a TerminalWriter object
+ and has access to a config object should use this function.
+ """
+ tw = py.io.TerminalWriter(*args, **kwargs)
+ if config.option.color == "yes":
+ tw.hasmarkup = True
+ if config.option.color == "no":
+ tw.hasmarkup = False
+ return tw
+
+
+def _strtobool(val):
+ """Convert a string representation of truth to true (1) or false (0).
+
+ True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
+ are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
+ 'val' is anything else.
+
+ .. note:: copied from distutils.util
+ """
+ val = val.lower()
+ if val in ("y", "yes", "t", "true", "on", "1"):
+ return 1
+ elif val in ("n", "no", "f", "false", "off", "0"):
+ return 0
+ else:
+ raise ValueError("invalid truth value %r" % (val,))
diff --git a/third_party/python/pytest/src/_pytest/config/argparsing.py b/third_party/python/pytest/src/_pytest/config/argparsing.py
new file mode 100644
index 0000000000..781d8e8c4e
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/config/argparsing.py
@@ -0,0 +1,394 @@
+import six
+import warnings
+import argparse
+
+FILE_OR_DIR = "file_or_dir"
+
+
+class Parser(object):
+ """ Parser for command line arguments and ini-file values.
+
+ :ivar extra_info: dict of generic param -> value to display in case
+ there's an error processing the command line arguments.
+ """
+
+ def __init__(self, usage=None, processopt=None):
+ self._anonymous = OptionGroup("custom options", parser=self)
+ self._groups = []
+ self._processopt = processopt
+ self._usage = usage
+ self._inidict = {}
+ self._ininames = []
+ self.extra_info = {}
+
+ def processoption(self, option):
+ if self._processopt:
+ if option.dest:
+ self._processopt(option)
+
+ def getgroup(self, name, description="", after=None):
+ """ get (or create) a named option Group.
+
+ :name: name of the option group.
+ :description: long description for --help output.
+ :after: name of other group, used for ordering --help output.
+
+ The returned group object has an ``addoption`` method with the same
+ signature as :py:func:`parser.addoption
+ <_pytest.config.Parser.addoption>` but will be shown in the
+ respective group in the output of ``pytest. --help``.
+ """
+ for group in self._groups:
+ if group.name == name:
+ return group
+ group = OptionGroup(name, description, parser=self)
+ i = 0
+ for i, grp in enumerate(self._groups):
+ if grp.name == after:
+ break
+ self._groups.insert(i + 1, group)
+ return group
+
+ def addoption(self, *opts, **attrs):
+ """ register a command line option.
+
+ :opts: option names, can be short or long options.
+ :attrs: same attributes which the ``add_option()`` function of the
+ `argparse library
+ <http://docs.python.org/2/library/argparse.html>`_
+ accepts.
+
+ After command line parsing options are available on the pytest config
+ object via ``config.option.NAME`` where ``NAME`` is usually set
+ by passing a ``dest`` attribute, for example
+ ``addoption("--long", dest="NAME", ...)``.
+ """
+ self._anonymous.addoption(*opts, **attrs)
+
+ def parse(self, args, namespace=None):
+ from _pytest._argcomplete import try_argcomplete
+
+ self.optparser = self._getparser()
+ try_argcomplete(self.optparser)
+ return self.optparser.parse_args([str(x) for x in args], namespace=namespace)
+
+ def _getparser(self):
+ from _pytest._argcomplete import filescompleter
+
+ optparser = MyOptionParser(self, self.extra_info)
+ groups = self._groups + [self._anonymous]
+ for group in groups:
+ if group.options:
+ desc = group.description or group.name
+ arggroup = optparser.add_argument_group(desc)
+ for option in group.options:
+ n = option.names()
+ a = option.attrs()
+ arggroup.add_argument(*n, **a)
+ # bash like autocompletion for dirs (appending '/')
+ optparser.add_argument(FILE_OR_DIR, nargs="*").completer = filescompleter
+ return optparser
+
+ def parse_setoption(self, args, option, namespace=None):
+ parsedoption = self.parse(args, namespace=namespace)
+ for name, value in parsedoption.__dict__.items():
+ setattr(option, name, value)
+ return getattr(parsedoption, FILE_OR_DIR)
+
+ def parse_known_args(self, args, namespace=None):
+ """parses and returns a namespace object with known arguments at this
+ point.
+ """
+ return self.parse_known_and_unknown_args(args, namespace=namespace)[0]
+
+ def parse_known_and_unknown_args(self, args, namespace=None):
+ """parses and returns a namespace object with known arguments, and
+ the remaining arguments unknown at this point.
+ """
+ optparser = self._getparser()
+ args = [str(x) for x in args]
+ return optparser.parse_known_args(args, namespace=namespace)
+
+ def addini(self, name, help, type=None, default=None):
+ """ register an ini-file option.
+
+ :name: name of the ini-variable
+ :type: type of the variable, can be ``pathlist``, ``args``, ``linelist``
+ or ``bool``.
+ :default: default value if no ini-file option exists but is queried.
+
+ The value of ini-variables can be retrieved via a call to
+ :py:func:`config.getini(name) <_pytest.config.Config.getini>`.
+ """
+ assert type in (None, "pathlist", "args", "linelist", "bool")
+ self._inidict[name] = (help, type, default)
+ self._ininames.append(name)
+
+
+class ArgumentError(Exception):
+ """
+ Raised if an Argument instance is created with invalid or
+ inconsistent arguments.
+ """
+
+ def __init__(self, msg, option):
+ self.msg = msg
+ self.option_id = str(option)
+
+ def __str__(self):
+ if self.option_id:
+ return "option %s: %s" % (self.option_id, self.msg)
+ else:
+ return self.msg
+
+
+class Argument(object):
+ """class that mimics the necessary behaviour of optparse.Option
+
+ its currently a least effort implementation
+ and ignoring choices and integer prefixes
+ https://docs.python.org/3/library/optparse.html#optparse-standard-option-types
+ """
+ _typ_map = {"int": int, "string": str, "float": float, "complex": complex}
+
+ def __init__(self, *names, **attrs):
+ """store parms in private vars for use in add_argument"""
+ self._attrs = attrs
+ self._short_opts = []
+ self._long_opts = []
+ self.dest = attrs.get("dest")
+ if "%default" in (attrs.get("help") or ""):
+ warnings.warn(
+ 'pytest now uses argparse. "%default" should be'
+ ' changed to "%(default)s" ',
+ DeprecationWarning,
+ stacklevel=3,
+ )
+ try:
+ typ = attrs["type"]
+ except KeyError:
+ pass
+ else:
+ # this might raise a keyerror as well, don't want to catch that
+ if isinstance(typ, six.string_types):
+ if typ == "choice":
+ warnings.warn(
+ "type argument to addoption() is a string %r."
+ " For parsearg this is optional and when supplied"
+ " should be a type."
+ " (options: %s)" % (typ, names),
+ DeprecationWarning,
+ stacklevel=3,
+ )
+ # argparse expects a type here take it from
+ # the type of the first element
+ attrs["type"] = type(attrs["choices"][0])
+ else:
+ warnings.warn(
+ "type argument to addoption() is a string %r."
+ " For parsearg this should be a type."
+ " (options: %s)" % (typ, names),
+ DeprecationWarning,
+ stacklevel=3,
+ )
+ attrs["type"] = Argument._typ_map[typ]
+ # used in test_parseopt -> test_parse_defaultgetter
+ self.type = attrs["type"]
+ else:
+ self.type = typ
+ try:
+ # attribute existence is tested in Config._processopt
+ self.default = attrs["default"]
+ except KeyError:
+ pass
+ self._set_opt_strings(names)
+ if not self.dest:
+ if self._long_opts:
+ self.dest = self._long_opts[0][2:].replace("-", "_")
+ else:
+ try:
+ self.dest = self._short_opts[0][1:]
+ except IndexError:
+ raise ArgumentError("need a long or short option", self)
+
+ def names(self):
+ return self._short_opts + self._long_opts
+
+ def attrs(self):
+ # update any attributes set by processopt
+ attrs = "default dest help".split()
+ if self.dest:
+ attrs.append(self.dest)
+ for attr in attrs:
+ try:
+ self._attrs[attr] = getattr(self, attr)
+ except AttributeError:
+ pass
+ if self._attrs.get("help"):
+ a = self._attrs["help"]
+ a = a.replace("%default", "%(default)s")
+ # a = a.replace('%prog', '%(prog)s')
+ self._attrs["help"] = a
+ return self._attrs
+
+ def _set_opt_strings(self, opts):
+ """directly from optparse
+
+ might not be necessary as this is passed to argparse later on"""
+ for opt in opts:
+ if len(opt) < 2:
+ raise ArgumentError(
+ "invalid option string %r: "
+ "must be at least two characters long" % opt,
+ self,
+ )
+ elif len(opt) == 2:
+ if not (opt[0] == "-" and opt[1] != "-"):
+ raise ArgumentError(
+ "invalid short option string %r: "
+ "must be of the form -x, (x any non-dash char)" % opt,
+ self,
+ )
+ self._short_opts.append(opt)
+ else:
+ if not (opt[0:2] == "--" and opt[2] != "-"):
+ raise ArgumentError(
+ "invalid long option string %r: "
+ "must start with --, followed by non-dash" % opt,
+ self,
+ )
+ self._long_opts.append(opt)
+
+ def __repr__(self):
+ args = []
+ if self._short_opts:
+ args += ["_short_opts: " + repr(self._short_opts)]
+ if self._long_opts:
+ args += ["_long_opts: " + repr(self._long_opts)]
+ args += ["dest: " + repr(self.dest)]
+ if hasattr(self, "type"):
+ args += ["type: " + repr(self.type)]
+ if hasattr(self, "default"):
+ args += ["default: " + repr(self.default)]
+ return "Argument({})".format(", ".join(args))
+
+
+class OptionGroup(object):
+
+ def __init__(self, name, description="", parser=None):
+ self.name = name
+ self.description = description
+ self.options = []
+ self.parser = parser
+
+ def addoption(self, *optnames, **attrs):
+ """ add an option to this group.
+
+ if a shortened version of a long option is specified it will
+ be suppressed in the help. addoption('--twowords', '--two-words')
+ results in help showing '--two-words' only, but --twowords gets
+ accepted **and** the automatic destination is in args.twowords
+ """
+ conflict = set(optnames).intersection(
+ name for opt in self.options for name in opt.names()
+ )
+ if conflict:
+ raise ValueError("option names %s already added" % conflict)
+ option = Argument(*optnames, **attrs)
+ self._addoption_instance(option, shortupper=False)
+
+ def _addoption(self, *optnames, **attrs):
+ option = Argument(*optnames, **attrs)
+ self._addoption_instance(option, shortupper=True)
+
+ def _addoption_instance(self, option, shortupper=False):
+ if not shortupper:
+ for opt in option._short_opts:
+ if opt[0] == "-" and opt[1].islower():
+ raise ValueError("lowercase shortoptions reserved")
+ if self.parser:
+ self.parser.processoption(option)
+ self.options.append(option)
+
+
+class MyOptionParser(argparse.ArgumentParser):
+
+ def __init__(self, parser, extra_info=None):
+ if not extra_info:
+ extra_info = {}
+ self._parser = parser
+ argparse.ArgumentParser.__init__(
+ self,
+ usage=parser._usage,
+ add_help=False,
+ formatter_class=DropShorterLongHelpFormatter,
+ )
+ # extra_info is a dict of (param -> value) to display if there's
+ # an usage error to provide more contextual information to the user
+ self.extra_info = extra_info
+
+ def parse_args(self, args=None, namespace=None):
+ """allow splitting of positional arguments"""
+ args, argv = self.parse_known_args(args, namespace)
+ if argv:
+ for arg in argv:
+ if arg and arg[0] == "-":
+ lines = ["unrecognized arguments: %s" % (" ".join(argv))]
+ for k, v in sorted(self.extra_info.items()):
+ lines.append(" %s: %s" % (k, v))
+ self.error("\n".join(lines))
+ getattr(args, FILE_OR_DIR).extend(argv)
+ return args
+
+
+class DropShorterLongHelpFormatter(argparse.HelpFormatter):
+ """shorten help for long options that differ only in extra hyphens
+
+ - collapse **long** options that are the same except for extra hyphens
+ - special action attribute map_long_option allows surpressing additional
+ long options
+ - shortcut if there are only two options and one of them is a short one
+ - cache result on action object as this is called at least 2 times
+ """
+
+ def _format_action_invocation(self, action):
+ orgstr = argparse.HelpFormatter._format_action_invocation(self, action)
+ if orgstr and orgstr[0] != "-": # only optional arguments
+ return orgstr
+ res = getattr(action, "_formatted_action_invocation", None)
+ if res:
+ return res
+ options = orgstr.split(", ")
+ if len(options) == 2 and (len(options[0]) == 2 or len(options[1]) == 2):
+ # a shortcut for '-h, --help' or '--abc', '-a'
+ action._formatted_action_invocation = orgstr
+ return orgstr
+ return_list = []
+ option_map = getattr(action, "map_long_option", {})
+ if option_map is None:
+ option_map = {}
+ short_long = {}
+ for option in options:
+ if len(option) == 2 or option[2] == " ":
+ continue
+ if not option.startswith("--"):
+ raise ArgumentError(
+ 'long optional argument without "--": [%s]' % (option), self
+ )
+ xxoption = option[2:]
+ if xxoption.split()[0] not in option_map:
+ shortened = xxoption.replace("-", "")
+ if (
+ shortened not in short_long
+ or len(short_long[shortened]) < len(xxoption)
+ ):
+ short_long[shortened] = xxoption
+ # now short_long has been filled out to the longest with dashes
+ # **and** we keep the right option ordering from add_argument
+ for option in options:
+ if len(option) == 2 or option[2] == " ":
+ return_list.append(option)
+ if option[2:] == short_long.get(option.replace("-", "")):
+ return_list.append(option.replace(" ", "=", 1))
+ action._formatted_action_invocation = ", ".join(return_list)
+ return action._formatted_action_invocation
diff --git a/third_party/python/pytest/src/_pytest/config/exceptions.py b/third_party/python/pytest/src/_pytest/config/exceptions.py
new file mode 100644
index 0000000000..64bae834d1
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/config/exceptions.py
@@ -0,0 +1,8 @@
+class UsageError(Exception):
+ """ error in pytest usage or invocation"""
+
+
+class PrintHelp(Exception):
+ """Raised when pytest should print it's help to skip the rest of the
+ argument parsing and validation."""
+ pass
diff --git a/third_party/python/pytest/src/_pytest/config/findpaths.py b/third_party/python/pytest/src/_pytest/config/findpaths.py
new file mode 100644
index 0000000000..fde7bddb93
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/config/findpaths.py
@@ -0,0 +1,140 @@
+import py
+import os
+from .exceptions import UsageError
+
+
+def exists(path, ignore=EnvironmentError):
+ try:
+ return path.check()
+ except ignore:
+ return False
+
+
+def getcfg(args, warnfunc=None):
+ """
+ Search the list of arguments for a valid ini-file for pytest,
+ and return a tuple of (rootdir, inifile, cfg-dict).
+
+ note: warnfunc is an optional function used to warn
+ about ini-files that use deprecated features.
+ This parameter should be removed when pytest
+ adopts standard deprecation warnings (#1804).
+ """
+ from _pytest.deprecated import CFG_PYTEST_SECTION
+
+ inibasenames = ["pytest.ini", "tox.ini", "setup.cfg"]
+ args = [x for x in args if not str(x).startswith("-")]
+ if not args:
+ args = [py.path.local()]
+ for arg in args:
+ arg = py.path.local(arg)
+ for base in arg.parts(reverse=True):
+ for inibasename in inibasenames:
+ p = base.join(inibasename)
+ if exists(p):
+ iniconfig = py.iniconfig.IniConfig(p)
+ if "pytest" in iniconfig.sections:
+ if inibasename == "setup.cfg" and warnfunc:
+ warnfunc(
+ "C1", CFG_PYTEST_SECTION.format(filename=inibasename)
+ )
+ return base, p, iniconfig["pytest"]
+ if (
+ inibasename == "setup.cfg"
+ and "tool:pytest" in iniconfig.sections
+ ):
+ return base, p, iniconfig["tool:pytest"]
+ elif inibasename == "pytest.ini":
+ # allowed to be empty
+ return base, p, {}
+ return None, None, None
+
+
+def get_common_ancestor(paths):
+ common_ancestor = None
+ for path in paths:
+ if not path.exists():
+ continue
+ if common_ancestor is None:
+ common_ancestor = path
+ else:
+ if path.relto(common_ancestor) or path == common_ancestor:
+ continue
+ elif common_ancestor.relto(path):
+ common_ancestor = path
+ else:
+ shared = path.common(common_ancestor)
+ if shared is not None:
+ common_ancestor = shared
+ if common_ancestor is None:
+ common_ancestor = py.path.local()
+ elif common_ancestor.isfile():
+ common_ancestor = common_ancestor.dirpath()
+ return common_ancestor
+
+
+def get_dirs_from_args(args):
+
+ def is_option(x):
+ return str(x).startswith("-")
+
+ def get_file_part_from_node_id(x):
+ return str(x).split("::")[0]
+
+ def get_dir_from_path(path):
+ if path.isdir():
+ return path
+ return py.path.local(path.dirname)
+
+ # These look like paths but may not exist
+ possible_paths = (
+ py.path.local(get_file_part_from_node_id(arg))
+ for arg in args
+ if not is_option(arg)
+ )
+
+ return [get_dir_from_path(path) for path in possible_paths if path.exists()]
+
+
+def determine_setup(inifile, args, warnfunc=None, rootdir_cmd_arg=None):
+ dirs = get_dirs_from_args(args)
+ if inifile:
+ iniconfig = py.iniconfig.IniConfig(inifile)
+ is_cfg_file = str(inifile).endswith(".cfg")
+ # TODO: [pytest] section in *.cfg files is depricated. Need refactoring.
+ sections = ["tool:pytest", "pytest"] if is_cfg_file else ["pytest"]
+ for section in sections:
+ try:
+ inicfg = iniconfig[section]
+ if is_cfg_file and section == "pytest" and warnfunc:
+ from _pytest.deprecated import CFG_PYTEST_SECTION
+
+ warnfunc("C1", CFG_PYTEST_SECTION.format(filename=str(inifile)))
+ break
+ except KeyError:
+ inicfg = None
+ rootdir = get_common_ancestor(dirs)
+ else:
+ ancestor = get_common_ancestor(dirs)
+ rootdir, inifile, inicfg = getcfg([ancestor], warnfunc=warnfunc)
+ if rootdir is None:
+ for rootdir in ancestor.parts(reverse=True):
+ if rootdir.join("setup.py").exists():
+ break
+ else:
+ rootdir, inifile, inicfg = getcfg(dirs, warnfunc=warnfunc)
+ if rootdir is None:
+ rootdir = get_common_ancestor([py.path.local(), ancestor])
+ is_fs_root = os.path.splitdrive(str(rootdir))[1] == "/"
+ if is_fs_root:
+ rootdir = ancestor
+ if rootdir_cmd_arg:
+ rootdir_abs_path = py.path.local(os.path.expandvars(rootdir_cmd_arg))
+ if not os.path.isdir(str(rootdir_abs_path)):
+ raise UsageError(
+ "Directory '{}' not found. Check your '--rootdir' option.".format(
+ rootdir_abs_path
+ )
+ )
+ rootdir = rootdir_abs_path
+ return rootdir, inifile, inicfg or {}
diff --git a/third_party/python/pytest/src/_pytest/debugging.py b/third_party/python/pytest/src/_pytest/debugging.py
new file mode 100644
index 0000000000..2e253aaa2a
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/debugging.py
@@ -0,0 +1,162 @@
+""" interactive debugging with PDB, the Python Debugger. """
+from __future__ import absolute_import, division, print_function
+import pdb
+import sys
+import os
+from doctest import UnexpectedException
+
+try:
+ from builtins import breakpoint # noqa
+
+ SUPPORTS_BREAKPOINT_BUILTIN = True
+except ImportError:
+ SUPPORTS_BREAKPOINT_BUILTIN = False
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group._addoption(
+ "--pdb",
+ dest="usepdb",
+ action="store_true",
+ help="start the interactive Python debugger on errors or KeyboardInterrupt.",
+ )
+ group._addoption(
+ "--pdbcls",
+ dest="usepdb_cls",
+ metavar="modulename:classname",
+ help="start a custom interactive Python debugger on errors. "
+ "For example: --pdbcls=IPython.terminal.debugger:TerminalPdb",
+ )
+
+
+def pytest_configure(config):
+ if config.getvalue("usepdb_cls"):
+ modname, classname = config.getvalue("usepdb_cls").split(":")
+ __import__(modname)
+ pdb_cls = getattr(sys.modules[modname], classname)
+ else:
+ pdb_cls = pdb.Pdb
+
+ if config.getvalue("usepdb"):
+ config.pluginmanager.register(PdbInvoke(), "pdbinvoke")
+
+ # Use custom Pdb class set_trace instead of default Pdb on breakpoint() call
+ if SUPPORTS_BREAKPOINT_BUILTIN:
+ _environ_pythonbreakpoint = os.environ.get("PYTHONBREAKPOINT", "")
+ if _environ_pythonbreakpoint == "":
+ sys.breakpointhook = pytestPDB.set_trace
+
+ old = (pdb.set_trace, pytestPDB._pluginmanager)
+
+ def fin():
+ pdb.set_trace, pytestPDB._pluginmanager = old
+ pytestPDB._config = None
+ pytestPDB._pdb_cls = pdb.Pdb
+ if SUPPORTS_BREAKPOINT_BUILTIN:
+ sys.breakpointhook = sys.__breakpointhook__
+
+ pdb.set_trace = pytestPDB.set_trace
+ pytestPDB._pluginmanager = config.pluginmanager
+ pytestPDB._config = config
+ pytestPDB._pdb_cls = pdb_cls
+ config._cleanup.append(fin)
+
+
+class pytestPDB(object):
+ """ Pseudo PDB that defers to the real pdb. """
+ _pluginmanager = None
+ _config = None
+ _pdb_cls = pdb.Pdb
+
+ @classmethod
+ def set_trace(cls):
+ """ invoke PDB set_trace debugging, dropping any IO capturing. """
+ import _pytest.config
+
+ frame = sys._getframe().f_back
+ if cls._pluginmanager is not None:
+ capman = cls._pluginmanager.getplugin("capturemanager")
+ if capman:
+ capman.suspend_global_capture(in_=True)
+ tw = _pytest.config.create_terminal_writer(cls._config)
+ tw.line()
+ tw.sep(">", "PDB set_trace (IO-capturing turned off)")
+ cls._pluginmanager.hook.pytest_enter_pdb(config=cls._config)
+ cls._pdb_cls().set_trace(frame)
+
+
+class PdbInvoke(object):
+
+ def pytest_exception_interact(self, node, call, report):
+ capman = node.config.pluginmanager.getplugin("capturemanager")
+ if capman:
+ out, err = capman.suspend_global_capture(in_=True)
+ sys.stdout.write(out)
+ sys.stdout.write(err)
+ _enter_pdb(node, call.excinfo, report)
+
+ def pytest_internalerror(self, excrepr, excinfo):
+ for line in str(excrepr).split("\n"):
+ sys.stderr.write("INTERNALERROR> %s\n" % line)
+ sys.stderr.flush()
+ tb = _postmortem_traceback(excinfo)
+ post_mortem(tb)
+
+
+def _enter_pdb(node, excinfo, rep):
+ # XXX we re-use the TerminalReporter's terminalwriter
+ # because this seems to avoid some encoding related troubles
+ # for not completely clear reasons.
+ tw = node.config.pluginmanager.getplugin("terminalreporter")._tw
+ tw.line()
+
+ showcapture = node.config.option.showcapture
+
+ for sectionname, content in (
+ ("stdout", rep.capstdout), ("stderr", rep.capstderr), ("log", rep.caplog)
+ ):
+ if showcapture in (sectionname, "all") and content:
+ tw.sep(">", "captured " + sectionname)
+ if content[-1:] == "\n":
+ content = content[:-1]
+ tw.line(content)
+
+ tw.sep(">", "traceback")
+ rep.toterminal(tw)
+ tw.sep(">", "entering PDB")
+ tb = _postmortem_traceback(excinfo)
+ post_mortem(tb)
+ rep._pdbshown = True
+ return rep
+
+
+def _postmortem_traceback(excinfo):
+ if isinstance(excinfo.value, UnexpectedException):
+ # A doctest.UnexpectedException is not useful for post_mortem.
+ # Use the underlying exception instead:
+ return excinfo.value.exc_info[2]
+ else:
+ return excinfo._excinfo[2]
+
+
+def _find_last_non_hidden_frame(stack):
+ i = max(0, len(stack) - 1)
+ while i and stack[i][0].f_locals.get("__tracebackhide__", False):
+ i -= 1
+ return i
+
+
+def post_mortem(t):
+
+ class Pdb(pytestPDB._pdb_cls):
+
+ def get_stack(self, f, t):
+ stack, i = pdb.Pdb.get_stack(self, f, t)
+ if f is None:
+ i = _find_last_non_hidden_frame(stack)
+ return stack, i
+
+ p = Pdb()
+ p.reset()
+ p.interaction(None, t)
diff --git a/third_party/python/pytest/src/_pytest/deprecated.py b/third_party/python/pytest/src/_pytest/deprecated.py
new file mode 100644
index 0000000000..7ebdcf9997
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/deprecated.py
@@ -0,0 +1,65 @@
+"""
+This module contains deprecation messages and bits of code used elsewhere in the codebase
+that is planned to be removed in the next pytest release.
+
+Keeping it in a central location makes it easy to track what is deprecated and should
+be removed when the time comes.
+"""
+from __future__ import absolute_import, division, print_function
+
+
+class RemovedInPytest4Warning(DeprecationWarning):
+ """warning class for features removed in pytest 4.0"""
+
+
+MAIN_STR_ARGS = "passing a string to pytest.main() is deprecated, " "pass a list of arguments instead."
+
+YIELD_TESTS = "yield tests are deprecated, and scheduled to be removed in pytest 4.0"
+
+FUNCARG_PREFIX = (
+ '{name}: declaring fixtures using "pytest_funcarg__" prefix is deprecated '
+ "and scheduled to be removed in pytest 4.0. "
+ "Please remove the prefix and use the @pytest.fixture decorator instead."
+)
+
+CFG_PYTEST_SECTION = "[pytest] section in {filename} files is deprecated, use [tool:pytest] instead."
+
+GETFUNCARGVALUE = "use of getfuncargvalue is deprecated, use getfixturevalue"
+
+RESULT_LOG = (
+ "--result-log is deprecated and scheduled for removal in pytest 4.0.\n"
+ "See https://docs.pytest.org/en/latest/usage.html#creating-resultlog-format-files for more information."
+)
+
+MARK_INFO_ATTRIBUTE = RemovedInPytest4Warning(
+ "MarkInfo objects are deprecated as they contain merged marks which are hard to deal with correctly.\n"
+ "Please use node.get_closest_marker(name) or node.iter_markers(name).\n"
+ "Docs: https://docs.pytest.org/en/latest/mark.html#updating-code"
+)
+
+MARK_PARAMETERSET_UNPACKING = RemovedInPytest4Warning(
+ "Applying marks directly to parameters is deprecated,"
+ " please use pytest.param(..., marks=...) instead.\n"
+ "For more details, see: https://docs.pytest.org/en/latest/parametrize.html"
+)
+
+RECORD_XML_PROPERTY = (
+ 'Fixture renamed from "record_xml_property" to "record_property" as user '
+ "properties are now available to all reporters.\n"
+ '"record_xml_property" is now deprecated.'
+)
+
+COLLECTOR_MAKEITEM = RemovedInPytest4Warning(
+ "pycollector makeitem was removed " "as it is an accidentially leaked internal api"
+)
+
+METAFUNC_ADD_CALL = (
+ "Metafunc.addcall is deprecated and scheduled to be removed in pytest 4.0.\n"
+ "Please use Metafunc.parametrize instead."
+)
+
+PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST = RemovedInPytest4Warning(
+ "Defining pytest_plugins in a non-top-level conftest is deprecated, "
+ "because it affects the entire directory tree in a non-explicit way.\n"
+ "Please move it to the top level conftest file instead."
+)
diff --git a/third_party/python/pytest/src/_pytest/doctest.py b/third_party/python/pytest/src/_pytest/doctest.py
new file mode 100644
index 0000000000..b0a3ad08df
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/doctest.py
@@ -0,0 +1,520 @@
+""" discover and run doctests in modules and test files."""
+from __future__ import absolute_import, division, print_function
+
+import traceback
+import sys
+import platform
+
+import pytest
+from _pytest._code.code import ExceptionInfo, ReprFileLocation, TerminalRepr
+from _pytest.fixtures import FixtureRequest
+
+
+DOCTEST_REPORT_CHOICE_NONE = "none"
+DOCTEST_REPORT_CHOICE_CDIFF = "cdiff"
+DOCTEST_REPORT_CHOICE_NDIFF = "ndiff"
+DOCTEST_REPORT_CHOICE_UDIFF = "udiff"
+DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE = "only_first_failure"
+
+DOCTEST_REPORT_CHOICES = (
+ DOCTEST_REPORT_CHOICE_NONE,
+ DOCTEST_REPORT_CHOICE_CDIFF,
+ DOCTEST_REPORT_CHOICE_NDIFF,
+ DOCTEST_REPORT_CHOICE_UDIFF,
+ DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE,
+)
+
+# Lazy definition of runner class
+RUNNER_CLASS = None
+
+
+def pytest_addoption(parser):
+ parser.addini(
+ "doctest_optionflags",
+ "option flags for doctests",
+ type="args",
+ default=["ELLIPSIS"],
+ )
+ parser.addini(
+ "doctest_encoding", "encoding used for doctest files", default="utf-8"
+ )
+ group = parser.getgroup("collect")
+ group.addoption(
+ "--doctest-modules",
+ action="store_true",
+ default=False,
+ help="run doctests in all .py modules",
+ dest="doctestmodules",
+ )
+ group.addoption(
+ "--doctest-report",
+ type=str.lower,
+ default="udiff",
+ help="choose another output format for diffs on doctest failure",
+ choices=DOCTEST_REPORT_CHOICES,
+ dest="doctestreport",
+ )
+ group.addoption(
+ "--doctest-glob",
+ action="append",
+ default=[],
+ metavar="pat",
+ help="doctests file matching pattern, default: test*.txt",
+ dest="doctestglob",
+ )
+ group.addoption(
+ "--doctest-ignore-import-errors",
+ action="store_true",
+ default=False,
+ help="ignore doctest ImportErrors",
+ dest="doctest_ignore_import_errors",
+ )
+ group.addoption(
+ "--doctest-continue-on-failure",
+ action="store_true",
+ default=False,
+ help="for a given doctest, continue to run after the first failure",
+ dest="doctest_continue_on_failure",
+ )
+
+
+def pytest_collect_file(path, parent):
+ config = parent.config
+ if path.ext == ".py":
+ if config.option.doctestmodules and not _is_setup_py(config, path, parent):
+ return DoctestModule(path, parent)
+ elif _is_doctest(config, path, parent):
+ return DoctestTextfile(path, parent)
+
+
+def _is_setup_py(config, path, parent):
+ if path.basename != "setup.py":
+ return False
+ contents = path.read()
+ return "setuptools" in contents or "distutils" in contents
+
+
+def _is_doctest(config, path, parent):
+ if path.ext in (".txt", ".rst") and parent.session.isinitpath(path):
+ return True
+ globs = config.getoption("doctestglob") or ["test*.txt"]
+ for glob in globs:
+ if path.check(fnmatch=glob):
+ return True
+ return False
+
+
+class ReprFailDoctest(TerminalRepr):
+
+ def __init__(self, reprlocation_lines):
+ # List of (reprlocation, lines) tuples
+ self.reprlocation_lines = reprlocation_lines
+
+ def toterminal(self, tw):
+ for reprlocation, lines in self.reprlocation_lines:
+ for line in lines:
+ tw.line(line)
+ reprlocation.toterminal(tw)
+
+
+class MultipleDoctestFailures(Exception):
+
+ def __init__(self, failures):
+ super(MultipleDoctestFailures, self).__init__()
+ self.failures = failures
+
+
+def _init_runner_class():
+ import doctest
+
+ class PytestDoctestRunner(doctest.DebugRunner):
+ """
+ Runner to collect failures. Note that the out variable in this case is
+ a list instead of a stdout-like object
+ """
+
+ def __init__(
+ self, checker=None, verbose=None, optionflags=0, continue_on_failure=True
+ ):
+ doctest.DebugRunner.__init__(
+ self, checker=checker, verbose=verbose, optionflags=optionflags
+ )
+ self.continue_on_failure = continue_on_failure
+
+ def report_failure(self, out, test, example, got):
+ failure = doctest.DocTestFailure(test, example, got)
+ if self.continue_on_failure:
+ out.append(failure)
+ else:
+ raise failure
+
+ def report_unexpected_exception(self, out, test, example, exc_info):
+ failure = doctest.UnexpectedException(test, example, exc_info)
+ if self.continue_on_failure:
+ out.append(failure)
+ else:
+ raise failure
+
+ return PytestDoctestRunner
+
+
+def _get_runner(checker=None, verbose=None, optionflags=0, continue_on_failure=True):
+ # We need this in order to do a lazy import on doctest
+ global RUNNER_CLASS
+ if RUNNER_CLASS is None:
+ RUNNER_CLASS = _init_runner_class()
+ return RUNNER_CLASS(
+ checker=checker,
+ verbose=verbose,
+ optionflags=optionflags,
+ continue_on_failure=continue_on_failure,
+ )
+
+
+class DoctestItem(pytest.Item):
+
+ def __init__(self, name, parent, runner=None, dtest=None):
+ super(DoctestItem, self).__init__(name, parent)
+ self.runner = runner
+ self.dtest = dtest
+ self.obj = None
+ self.fixture_request = None
+
+ def setup(self):
+ if self.dtest is not None:
+ self.fixture_request = _setup_fixtures(self)
+ globs = dict(getfixture=self.fixture_request.getfixturevalue)
+ for name, value in self.fixture_request.getfixturevalue(
+ "doctest_namespace"
+ ).items():
+ globs[name] = value
+ self.dtest.globs.update(globs)
+
+ def runtest(self):
+ _check_all_skipped(self.dtest)
+ self._disable_output_capturing_for_darwin()
+ failures = []
+ self.runner.run(self.dtest, out=failures)
+ if failures:
+ raise MultipleDoctestFailures(failures)
+
+ def _disable_output_capturing_for_darwin(self):
+ """
+ Disable output capturing. Otherwise, stdout is lost to doctest (#985)
+ """
+ if platform.system() != "Darwin":
+ return
+ capman = self.config.pluginmanager.getplugin("capturemanager")
+ if capman:
+ out, err = capman.suspend_global_capture(in_=True)
+ sys.stdout.write(out)
+ sys.stderr.write(err)
+
+ def repr_failure(self, excinfo):
+ import doctest
+
+ failures = None
+ if excinfo.errisinstance((doctest.DocTestFailure, doctest.UnexpectedException)):
+ failures = [excinfo.value]
+ elif excinfo.errisinstance(MultipleDoctestFailures):
+ failures = excinfo.value.failures
+
+ if failures is not None:
+ reprlocation_lines = []
+ for failure in failures:
+ example = failure.example
+ test = failure.test
+ filename = test.filename
+ if test.lineno is None:
+ lineno = None
+ else:
+ lineno = test.lineno + example.lineno + 1
+ message = type(failure).__name__
+ reprlocation = ReprFileLocation(filename, lineno, message)
+ checker = _get_checker()
+ report_choice = _get_report_choice(
+ self.config.getoption("doctestreport")
+ )
+ if lineno is not None:
+ lines = failure.test.docstring.splitlines(False)
+ # add line numbers to the left of the error message
+ lines = [
+ "%03d %s" % (i + test.lineno + 1, x)
+ for (i, x) in enumerate(lines)
+ ]
+ # trim docstring error lines to 10
+ lines = lines[max(example.lineno - 9, 0):example.lineno + 1]
+ else:
+ lines = [
+ "EXAMPLE LOCATION UNKNOWN, not showing all tests of that example"
+ ]
+ indent = ">>>"
+ for line in example.source.splitlines():
+ lines.append("??? %s %s" % (indent, line))
+ indent = "..."
+ if isinstance(failure, doctest.DocTestFailure):
+ lines += checker.output_difference(
+ example, failure.got, report_choice
+ ).split(
+ "\n"
+ )
+ else:
+ inner_excinfo = ExceptionInfo(failure.exc_info)
+ lines += ["UNEXPECTED EXCEPTION: %s" % repr(inner_excinfo.value)]
+ lines += traceback.format_exception(*failure.exc_info)
+ reprlocation_lines.append((reprlocation, lines))
+ return ReprFailDoctest(reprlocation_lines)
+ else:
+ return super(DoctestItem, self).repr_failure(excinfo)
+
+ def reportinfo(self):
+ return self.fspath, self.dtest.lineno, "[doctest] %s" % self.name
+
+
+def _get_flag_lookup():
+ import doctest
+
+ return dict(
+ DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1,
+ DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE,
+ NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE,
+ ELLIPSIS=doctest.ELLIPSIS,
+ IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL,
+ COMPARISON_FLAGS=doctest.COMPARISON_FLAGS,
+ ALLOW_UNICODE=_get_allow_unicode_flag(),
+ ALLOW_BYTES=_get_allow_bytes_flag(),
+ )
+
+
+def get_optionflags(parent):
+ optionflags_str = parent.config.getini("doctest_optionflags")
+ flag_lookup_table = _get_flag_lookup()
+ flag_acc = 0
+ for flag in optionflags_str:
+ flag_acc |= flag_lookup_table[flag]
+ return flag_acc
+
+
+def _get_continue_on_failure(config):
+ continue_on_failure = config.getvalue("doctest_continue_on_failure")
+ if continue_on_failure:
+ # We need to turn off this if we use pdb since we should stop at
+ # the first failure
+ if config.getvalue("usepdb"):
+ continue_on_failure = False
+ return continue_on_failure
+
+
+class DoctestTextfile(pytest.Module):
+ obj = None
+
+ def collect(self):
+ import doctest
+
+ # inspired by doctest.testfile; ideally we would use it directly,
+ # but it doesn't support passing a custom checker
+ encoding = self.config.getini("doctest_encoding")
+ text = self.fspath.read_text(encoding)
+ filename = str(self.fspath)
+ name = self.fspath.basename
+ globs = {"__name__": "__main__"}
+
+ optionflags = get_optionflags(self)
+
+ runner = _get_runner(
+ verbose=0,
+ optionflags=optionflags,
+ checker=_get_checker(),
+ continue_on_failure=_get_continue_on_failure(self.config),
+ )
+ _fix_spoof_python2(runner, encoding)
+
+ parser = doctest.DocTestParser()
+ test = parser.get_doctest(text, globs, name, filename, 0)
+ if test.examples:
+ yield DoctestItem(test.name, self, runner, test)
+
+
+def _check_all_skipped(test):
+ """raises pytest.skip() if all examples in the given DocTest have the SKIP
+ option set.
+ """
+ import doctest
+
+ all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples)
+ if all_skipped:
+ pytest.skip("all tests skipped by +SKIP option")
+
+
+class DoctestModule(pytest.Module):
+
+ def collect(self):
+ import doctest
+
+ if self.fspath.basename == "conftest.py":
+ module = self.config.pluginmanager._importconftest(self.fspath)
+ else:
+ try:
+ module = self.fspath.pyimport()
+ except ImportError:
+ if self.config.getvalue("doctest_ignore_import_errors"):
+ pytest.skip("unable to import module %r" % self.fspath)
+ else:
+ raise
+ # uses internal doctest module parsing mechanism
+ finder = doctest.DocTestFinder()
+ optionflags = get_optionflags(self)
+ runner = _get_runner(
+ verbose=0,
+ optionflags=optionflags,
+ checker=_get_checker(),
+ continue_on_failure=_get_continue_on_failure(self.config),
+ )
+
+ for test in finder.find(module, module.__name__):
+ if test.examples: # skip empty doctests
+ yield DoctestItem(test.name, self, runner, test)
+
+
+def _setup_fixtures(doctest_item):
+ """
+ Used by DoctestTextfile and DoctestItem to setup fixture information.
+ """
+
+ def func():
+ pass
+
+ doctest_item.funcargs = {}
+ fm = doctest_item.session._fixturemanager
+ doctest_item._fixtureinfo = fm.getfixtureinfo(
+ node=doctest_item, func=func, cls=None, funcargs=False
+ )
+ fixture_request = FixtureRequest(doctest_item)
+ fixture_request._fillfixtures()
+ return fixture_request
+
+
+def _get_checker():
+ """
+ Returns a doctest.OutputChecker subclass that takes in account the
+ ALLOW_UNICODE option to ignore u'' prefixes in strings and ALLOW_BYTES
+ to strip b'' prefixes.
+ Useful when the same doctest should run in Python 2 and Python 3.
+
+ An inner class is used to avoid importing "doctest" at the module
+ level.
+ """
+ if hasattr(_get_checker, "LiteralsOutputChecker"):
+ return _get_checker.LiteralsOutputChecker()
+
+ import doctest
+ import re
+
+ class LiteralsOutputChecker(doctest.OutputChecker):
+ """
+ Copied from doctest_nose_plugin.py from the nltk project:
+ https://github.com/nltk/nltk
+
+ Further extended to also support byte literals.
+ """
+
+ _unicode_literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE)
+ _bytes_literal_re = re.compile(r"(\W|^)[bB]([rR]?[\'\"])", re.UNICODE)
+
+ def check_output(self, want, got, optionflags):
+ res = doctest.OutputChecker.check_output(self, want, got, optionflags)
+ if res:
+ return True
+
+ allow_unicode = optionflags & _get_allow_unicode_flag()
+ allow_bytes = optionflags & _get_allow_bytes_flag()
+ if not allow_unicode and not allow_bytes:
+ return False
+
+ else: # pragma: no cover
+
+ def remove_prefixes(regex, txt):
+ return re.sub(regex, r"\1\2", txt)
+
+ if allow_unicode:
+ want = remove_prefixes(self._unicode_literal_re, want)
+ got = remove_prefixes(self._unicode_literal_re, got)
+ if allow_bytes:
+ want = remove_prefixes(self._bytes_literal_re, want)
+ got = remove_prefixes(self._bytes_literal_re, got)
+ res = doctest.OutputChecker.check_output(self, want, got, optionflags)
+ return res
+
+ _get_checker.LiteralsOutputChecker = LiteralsOutputChecker
+ return _get_checker.LiteralsOutputChecker()
+
+
+def _get_allow_unicode_flag():
+ """
+ Registers and returns the ALLOW_UNICODE flag.
+ """
+ import doctest
+
+ return doctest.register_optionflag("ALLOW_UNICODE")
+
+
+def _get_allow_bytes_flag():
+ """
+ Registers and returns the ALLOW_BYTES flag.
+ """
+ import doctest
+
+ return doctest.register_optionflag("ALLOW_BYTES")
+
+
+def _get_report_choice(key):
+ """
+ This function returns the actual `doctest` module flag value, we want to do it as late as possible to avoid
+ importing `doctest` and all its dependencies when parsing options, as it adds overhead and breaks tests.
+ """
+ import doctest
+
+ return {
+ DOCTEST_REPORT_CHOICE_UDIFF: doctest.REPORT_UDIFF,
+ DOCTEST_REPORT_CHOICE_CDIFF: doctest.REPORT_CDIFF,
+ DOCTEST_REPORT_CHOICE_NDIFF: doctest.REPORT_NDIFF,
+ DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE: doctest.REPORT_ONLY_FIRST_FAILURE,
+ DOCTEST_REPORT_CHOICE_NONE: 0,
+ }[
+ key
+ ]
+
+
+def _fix_spoof_python2(runner, encoding):
+ """
+ Installs a "SpoofOut" into the given DebugRunner so it properly deals with unicode output. This
+ should patch only doctests for text files because they don't have a way to declare their
+ encoding. Doctests in docstrings from Python modules don't have the same problem given that
+ Python already decoded the strings.
+
+ This fixes the problem related in issue #2434.
+ """
+ from _pytest.compat import _PY2
+
+ if not _PY2:
+ return
+
+ from doctest import _SpoofOut
+
+ class UnicodeSpoof(_SpoofOut):
+
+ def getvalue(self):
+ result = _SpoofOut.getvalue(self)
+ if encoding and isinstance(result, bytes):
+ result = result.decode(encoding)
+ return result
+
+ runner._fakeout = UnicodeSpoof()
+
+
+@pytest.fixture(scope="session")
+def doctest_namespace():
+ """
+ Fixture that returns a :py:class:`dict` that will be injected into the namespace of doctests.
+ """
+ return dict()
diff --git a/third_party/python/pytest/src/_pytest/fixtures.py b/third_party/python/pytest/src/_pytest/fixtures.py
new file mode 100644
index 0000000000..495e6b9b33
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/fixtures.py
@@ -0,0 +1,1229 @@
+from __future__ import absolute_import, division, print_function
+
+import functools
+import inspect
+import sys
+import warnings
+from collections import OrderedDict, deque, defaultdict
+from more_itertools import flatten
+
+import attr
+import py
+from py._code.code import FormattedExcinfo
+
+import _pytest
+from _pytest import nodes
+from _pytest._code.code import TerminalRepr
+from _pytest.compat import (
+ NOTSET,
+ exc_clear,
+ _format_args,
+ getfslineno,
+ get_real_func,
+ is_generator,
+ isclass,
+ getimfunc,
+ getlocation,
+ getfuncargnames,
+ safe_getattr,
+ FuncargnamesCompatAttr,
+)
+from _pytest.outcomes import fail, TEST_OUTCOME
+
+FIXTURE_MSG = 'fixtures cannot have "pytest_funcarg__" prefix and be decorated with @pytest.fixture:\n{}'
+
+
+@attr.s(frozen=True)
+class PseudoFixtureDef(object):
+ cached_result = attr.ib()
+ scope = attr.ib()
+
+
+def pytest_sessionstart(session):
+ import _pytest.python
+ import _pytest.nodes
+
+ scopename2class.update(
+ {
+ "class": _pytest.python.Class,
+ "module": _pytest.python.Module,
+ "function": _pytest.nodes.Item,
+ "session": _pytest.main.Session,
+ }
+ )
+ session._fixturemanager = FixtureManager(session)
+
+
+scopename2class = {}
+
+
+scope2props = dict(session=())
+scope2props["module"] = ("fspath", "module")
+scope2props["class"] = scope2props["module"] + ("cls",)
+scope2props["instance"] = scope2props["class"] + ("instance",)
+scope2props["function"] = scope2props["instance"] + ("function", "keywords")
+
+
+def scopeproperty(name=None, doc=None):
+
+ def decoratescope(func):
+ scopename = name or func.__name__
+
+ def provide(self):
+ if func.__name__ in scope2props[self.scope]:
+ return func(self)
+ raise AttributeError(
+ "%s not available in %s-scoped context" % (scopename, self.scope)
+ )
+
+ return property(provide, None, None, func.__doc__)
+
+ return decoratescope
+
+
+def get_scope_node(node, scope):
+ cls = scopename2class.get(scope)
+ if cls is None:
+ raise ValueError("unknown scope")
+ return node.getparent(cls)
+
+
+def add_funcarg_pseudo_fixture_def(collector, metafunc, fixturemanager):
+ # this function will transform all collected calls to a functions
+ # if they use direct funcargs (i.e. direct parametrization)
+ # because we want later test execution to be able to rely on
+ # an existing FixtureDef structure for all arguments.
+ # XXX we can probably avoid this algorithm if we modify CallSpec2
+ # to directly care for creating the fixturedefs within its methods.
+ if not metafunc._calls[0].funcargs:
+ return # this function call does not have direct parametrization
+ # collect funcargs of all callspecs into a list of values
+ arg2params = {}
+ arg2scope = {}
+ for callspec in metafunc._calls:
+ for argname, argvalue in callspec.funcargs.items():
+ assert argname not in callspec.params
+ callspec.params[argname] = argvalue
+ arg2params_list = arg2params.setdefault(argname, [])
+ callspec.indices[argname] = len(arg2params_list)
+ arg2params_list.append(argvalue)
+ if argname not in arg2scope:
+ scopenum = callspec._arg2scopenum.get(argname, scopenum_function)
+ arg2scope[argname] = scopes[scopenum]
+ callspec.funcargs.clear()
+
+ # register artificial FixtureDef's so that later at test execution
+ # time we can rely on a proper FixtureDef to exist for fixture setup.
+ arg2fixturedefs = metafunc._arg2fixturedefs
+ for argname, valuelist in arg2params.items():
+ # if we have a scope that is higher than function we need
+ # to make sure we only ever create an according fixturedef on
+ # a per-scope basis. We thus store and cache the fixturedef on the
+ # node related to the scope.
+ scope = arg2scope[argname]
+ node = None
+ if scope != "function":
+ node = get_scope_node(collector, scope)
+ if node is None:
+ assert scope == "class" and isinstance(collector, _pytest.python.Module)
+ # use module-level collector for class-scope (for now)
+ node = collector
+ if node and argname in node._name2pseudofixturedef:
+ arg2fixturedefs[argname] = [node._name2pseudofixturedef[argname]]
+ else:
+ fixturedef = FixtureDef(
+ fixturemanager,
+ "",
+ argname,
+ get_direct_param_fixture_func,
+ arg2scope[argname],
+ valuelist,
+ False,
+ False,
+ )
+ arg2fixturedefs[argname] = [fixturedef]
+ if node is not None:
+ node._name2pseudofixturedef[argname] = fixturedef
+
+
+def getfixturemarker(obj):
+ """ return fixturemarker or None if it doesn't exist or raised
+ exceptions."""
+ try:
+ return getattr(obj, "_pytestfixturefunction", None)
+ except TEST_OUTCOME:
+ # some objects raise errors like request (from flask import request)
+ # we don't expect them to be fixture functions
+ return None
+
+
+def get_parametrized_fixture_keys(item, scopenum):
+ """ return list of keys for all parametrized arguments which match
+ the specified scope. """
+ assert scopenum < scopenum_function # function
+ try:
+ cs = item.callspec
+ except AttributeError:
+ pass
+ else:
+ # cs.indices.items() is random order of argnames. Need to
+ # sort this so that different calls to
+ # get_parametrized_fixture_keys will be deterministic.
+ for argname, param_index in sorted(cs.indices.items()):
+ if cs._arg2scopenum[argname] != scopenum:
+ continue
+ if scopenum == 0: # session
+ key = (argname, param_index)
+ elif scopenum == 1: # module
+ key = (argname, param_index, item.fspath)
+ elif scopenum == 2: # class
+ key = (argname, param_index, item.fspath, item.cls)
+ yield key
+
+
+# algorithm for sorting on a per-parametrized resource setup basis
+# it is called for scopenum==0 (session) first and performs sorting
+# down to the lower scopes such as to minimize number of "high scope"
+# setups and teardowns
+
+
+def reorder_items(items):
+ argkeys_cache = {}
+ items_by_argkey = {}
+ for scopenum in range(0, scopenum_function):
+ argkeys_cache[scopenum] = d = {}
+ items_by_argkey[scopenum] = item_d = defaultdict(deque)
+ for item in items:
+ keys = OrderedDict.fromkeys(get_parametrized_fixture_keys(item, scopenum))
+ if keys:
+ d[item] = keys
+ for key in keys:
+ item_d[key].append(item)
+ items = OrderedDict.fromkeys(items)
+ return list(reorder_items_atscope(items, argkeys_cache, items_by_argkey, 0))
+
+
+def fix_cache_order(item, argkeys_cache, items_by_argkey):
+ for scopenum in range(0, scopenum_function):
+ for key in argkeys_cache[scopenum].get(item, []):
+ items_by_argkey[scopenum][key].appendleft(item)
+
+
+def reorder_items_atscope(items, argkeys_cache, items_by_argkey, scopenum):
+ if scopenum >= scopenum_function or len(items) < 3:
+ return items
+ ignore = set()
+ items_deque = deque(items)
+ items_done = OrderedDict()
+ scoped_items_by_argkey = items_by_argkey[scopenum]
+ scoped_argkeys_cache = argkeys_cache[scopenum]
+ while items_deque:
+ no_argkey_group = OrderedDict()
+ slicing_argkey = None
+ while items_deque:
+ item = items_deque.popleft()
+ if item in items_done or item in no_argkey_group:
+ continue
+ argkeys = OrderedDict.fromkeys(
+ k for k in scoped_argkeys_cache.get(item, []) if k not in ignore
+ )
+ if not argkeys:
+ no_argkey_group[item] = None
+ else:
+ slicing_argkey, _ = argkeys.popitem()
+ # we don't have to remove relevant items from later in the deque because they'll just be ignored
+ matching_items = [
+ i for i in scoped_items_by_argkey[slicing_argkey] if i in items
+ ]
+ for i in reversed(matching_items):
+ fix_cache_order(i, argkeys_cache, items_by_argkey)
+ items_deque.appendleft(i)
+ break
+ if no_argkey_group:
+ no_argkey_group = reorder_items_atscope(
+ no_argkey_group, argkeys_cache, items_by_argkey, scopenum + 1
+ )
+ for item in no_argkey_group:
+ items_done[item] = None
+ ignore.add(slicing_argkey)
+ return items_done
+
+
+def fillfixtures(function):
+ """ fill missing funcargs for a test function. """
+ try:
+ request = function._request
+ except AttributeError:
+ # XXX this special code path is only expected to execute
+ # with the oejskit plugin. It uses classes with funcargs
+ # and we thus have to work a bit to allow this.
+ fm = function.session._fixturemanager
+ fi = fm.getfixtureinfo(function.parent, function.obj, None)
+ function._fixtureinfo = fi
+ request = function._request = FixtureRequest(function)
+ request._fillfixtures()
+ # prune out funcargs for jstests
+ newfuncargs = {}
+ for name in fi.argnames:
+ newfuncargs[name] = function.funcargs[name]
+ function.funcargs = newfuncargs
+ else:
+ request._fillfixtures()
+
+
+def get_direct_param_fixture_func(request):
+ return request.param
+
+
+class FuncFixtureInfo(object):
+
+ def __init__(self, argnames, names_closure, name2fixturedefs):
+ self.argnames = argnames
+ self.names_closure = names_closure
+ self.name2fixturedefs = name2fixturedefs
+
+
+class FixtureRequest(FuncargnamesCompatAttr):
+ """ A request for a fixture from a test or fixture function.
+
+ A request object gives access to the requesting test context
+ and has an optional ``param`` attribute in case
+ the fixture is parametrized indirectly.
+ """
+
+ def __init__(self, pyfuncitem):
+ self._pyfuncitem = pyfuncitem
+ #: fixture for which this request is being performed
+ self.fixturename = None
+ #: Scope string, one of "function", "class", "module", "session"
+ self.scope = "function"
+ self._fixture_defs = {} # argname -> FixtureDef
+ fixtureinfo = pyfuncitem._fixtureinfo
+ self._arg2fixturedefs = fixtureinfo.name2fixturedefs.copy()
+ self._arg2index = {}
+ self._fixturemanager = pyfuncitem.session._fixturemanager
+
+ @property
+ def fixturenames(self):
+ # backward incompatible note: now a readonly property
+ return list(self._pyfuncitem._fixtureinfo.names_closure)
+
+ @property
+ def node(self):
+ """ underlying collection node (depends on current request scope)"""
+ return self._getscopeitem(self.scope)
+
+ def _getnextfixturedef(self, argname):
+ fixturedefs = self._arg2fixturedefs.get(argname, None)
+ if fixturedefs is None:
+ # we arrive here because of a dynamic call to
+ # getfixturevalue(argname) usage which was naturally
+ # not known at parsing/collection time
+ parentid = self._pyfuncitem.parent.nodeid
+ fixturedefs = self._fixturemanager.getfixturedefs(argname, parentid)
+ self._arg2fixturedefs[argname] = fixturedefs
+ # fixturedefs list is immutable so we maintain a decreasing index
+ index = self._arg2index.get(argname, 0) - 1
+ if fixturedefs is None or (-index > len(fixturedefs)):
+ raise FixtureLookupError(argname, self)
+ self._arg2index[argname] = index
+ return fixturedefs[index]
+
+ @property
+ def config(self):
+ """ the pytest config object associated with this request. """
+ return self._pyfuncitem.config
+
+ @scopeproperty()
+ def function(self):
+ """ test function object if the request has a per-function scope. """
+ return self._pyfuncitem.obj
+
+ @scopeproperty("class")
+ def cls(self):
+ """ class (can be None) where the test function was collected. """
+ clscol = self._pyfuncitem.getparent(_pytest.python.Class)
+ if clscol:
+ return clscol.obj
+
+ @property
+ def instance(self):
+ """ instance (can be None) on which test function was collected. """
+ # unittest support hack, see _pytest.unittest.TestCaseFunction
+ try:
+ return self._pyfuncitem._testcase
+ except AttributeError:
+ function = getattr(self, "function", None)
+ return getattr(function, "__self__", None)
+
+ @scopeproperty()
+ def module(self):
+ """ python module object where the test function was collected. """
+ return self._pyfuncitem.getparent(_pytest.python.Module).obj
+
+ @scopeproperty()
+ def fspath(self):
+ """ the file system path of the test module which collected this test. """
+ return self._pyfuncitem.fspath
+
+ @property
+ def keywords(self):
+ """ keywords/markers dictionary for the underlying node. """
+ return self.node.keywords
+
+ @property
+ def session(self):
+ """ pytest session object. """
+ return self._pyfuncitem.session
+
+ def addfinalizer(self, finalizer):
+ """ add finalizer/teardown function to be called after the
+ last test within the requesting test context finished
+ execution. """
+ # XXX usually this method is shadowed by fixturedef specific ones
+ self._addfinalizer(finalizer, scope=self.scope)
+
+ def _addfinalizer(self, finalizer, scope):
+ colitem = self._getscopeitem(scope)
+ self._pyfuncitem.session._setupstate.addfinalizer(
+ finalizer=finalizer, colitem=colitem
+ )
+
+ def applymarker(self, marker):
+ """ Apply a marker to a single test function invocation.
+ This method is useful if you don't want to have a keyword/marker
+ on all function invocations.
+
+ :arg marker: a :py:class:`_pytest.mark.MarkDecorator` object
+ created by a call to ``pytest.mark.NAME(...)``.
+ """
+ self.node.add_marker(marker)
+
+ def raiseerror(self, msg):
+ """ raise a FixtureLookupError with the given message. """
+ raise self._fixturemanager.FixtureLookupError(None, self, msg)
+
+ def _fillfixtures(self):
+ item = self._pyfuncitem
+ fixturenames = getattr(item, "fixturenames", self.fixturenames)
+ for argname in fixturenames:
+ if argname not in item.funcargs:
+ item.funcargs[argname] = self.getfixturevalue(argname)
+
+ def cached_setup(self, setup, teardown=None, scope="module", extrakey=None):
+ """ (deprecated) Return a testing resource managed by ``setup`` &
+ ``teardown`` calls. ``scope`` and ``extrakey`` determine when the
+ ``teardown`` function will be called so that subsequent calls to
+ ``setup`` would recreate the resource. With pytest-2.3 you often
+ do not need ``cached_setup()`` as you can directly declare a scope
+ on a fixture function and register a finalizer through
+ ``request.addfinalizer()``.
+
+ :arg teardown: function receiving a previously setup resource.
+ :arg setup: a no-argument function creating a resource.
+ :arg scope: a string value out of ``function``, ``class``, ``module``
+ or ``session`` indicating the caching lifecycle of the resource.
+ :arg extrakey: added to internal caching key of (funcargname, scope).
+ """
+ if not hasattr(self.config, "_setupcache"):
+ self.config._setupcache = {} # XXX weakref?
+ cachekey = (self.fixturename, self._getscopeitem(scope), extrakey)
+ cache = self.config._setupcache
+ try:
+ val = cache[cachekey]
+ except KeyError:
+ self._check_scope(self.fixturename, self.scope, scope)
+ val = setup()
+ cache[cachekey] = val
+ if teardown is not None:
+
+ def finalizer():
+ del cache[cachekey]
+ teardown(val)
+
+ self._addfinalizer(finalizer, scope=scope)
+ return val
+
+ def getfixturevalue(self, argname):
+ """ Dynamically run a named fixture function.
+
+ Declaring fixtures via function argument is recommended where possible.
+ But if you can only decide whether to use another fixture at test
+ setup time, you may use this function to retrieve it inside a fixture
+ or test function body.
+ """
+ return self._get_active_fixturedef(argname).cached_result[0]
+
+ def getfuncargvalue(self, argname):
+ """ Deprecated, use getfixturevalue. """
+ from _pytest import deprecated
+
+ warnings.warn(deprecated.GETFUNCARGVALUE, DeprecationWarning, stacklevel=2)
+ return self.getfixturevalue(argname)
+
+ def _get_active_fixturedef(self, argname):
+ try:
+ return self._fixture_defs[argname]
+ except KeyError:
+ try:
+ fixturedef = self._getnextfixturedef(argname)
+ except FixtureLookupError:
+ if argname == "request":
+ cached_result = (self, [0], None)
+ scope = "function"
+ return PseudoFixtureDef(cached_result, scope)
+ raise
+ # remove indent to prevent the python3 exception
+ # from leaking into the call
+ self._compute_fixture_value(fixturedef)
+ self._fixture_defs[argname] = fixturedef
+ return fixturedef
+
+ def _get_fixturestack(self):
+ current = self
+ values = []
+ while 1:
+ fixturedef = getattr(current, "_fixturedef", None)
+ if fixturedef is None:
+ values.reverse()
+ return values
+ values.append(fixturedef)
+ current = current._parent_request
+
+ def _compute_fixture_value(self, fixturedef):
+ """
+ Creates a SubRequest based on "self" and calls the execute method of the given fixturedef object. This will
+ force the FixtureDef object to throw away any previous results and compute a new fixture value, which
+ will be stored into the FixtureDef object itself.
+
+ :param FixtureDef fixturedef:
+ """
+ # prepare a subrequest object before calling fixture function
+ # (latter managed by fixturedef)
+ argname = fixturedef.argname
+ funcitem = self._pyfuncitem
+ scope = fixturedef.scope
+ try:
+ param = funcitem.callspec.getparam(argname)
+ except (AttributeError, ValueError):
+ param = NOTSET
+ param_index = 0
+ if fixturedef.params is not None:
+ frame = inspect.stack()[3]
+ frameinfo = inspect.getframeinfo(frame[0])
+ source_path = frameinfo.filename
+ source_lineno = frameinfo.lineno
+ source_path = py.path.local(source_path)
+ if source_path.relto(funcitem.config.rootdir):
+ source_path = source_path.relto(funcitem.config.rootdir)
+ msg = (
+ "The requested fixture has no parameter defined for the "
+ "current test.\n\nRequested fixture '{}' defined in:\n{}"
+ "\n\nRequested here:\n{}:{}".format(
+ fixturedef.argname,
+ getlocation(fixturedef.func, funcitem.config.rootdir),
+ source_path,
+ source_lineno,
+ )
+ )
+ fail(msg)
+ else:
+ # indices might not be set if old-style metafunc.addcall() was used
+ param_index = funcitem.callspec.indices.get(argname, 0)
+ # if a parametrize invocation set a scope it will override
+ # the static scope defined with the fixture function
+ paramscopenum = funcitem.callspec._arg2scopenum.get(argname)
+ if paramscopenum is not None:
+ scope = scopes[paramscopenum]
+
+ subrequest = SubRequest(self, scope, param, param_index, fixturedef)
+
+ # check if a higher-level scoped fixture accesses a lower level one
+ subrequest._check_scope(argname, self.scope, scope)
+
+ # clear sys.exc_info before invoking the fixture (python bug?)
+ # if its not explicitly cleared it will leak into the call
+ exc_clear()
+ try:
+ # call the fixture function
+ fixturedef.execute(request=subrequest)
+ finally:
+ # if fixture function failed it might have registered finalizers
+ self.session._setupstate.addfinalizer(
+ functools.partial(fixturedef.finish, request=subrequest),
+ subrequest.node,
+ )
+
+ def _check_scope(self, argname, invoking_scope, requested_scope):
+ if argname == "request":
+ return
+ if scopemismatch(invoking_scope, requested_scope):
+ # try to report something helpful
+ lines = self._factorytraceback()
+ fail(
+ "ScopeMismatch: You tried to access the %r scoped "
+ "fixture %r with a %r scoped request object, "
+ "involved factories\n%s"
+ % ((requested_scope, argname, invoking_scope, "\n".join(lines))),
+ pytrace=False,
+ )
+
+ def _factorytraceback(self):
+ lines = []
+ for fixturedef in self._get_fixturestack():
+ factory = fixturedef.func
+ fs, lineno = getfslineno(factory)
+ p = self._pyfuncitem.session.fspath.bestrelpath(fs)
+ args = _format_args(factory)
+ lines.append("%s:%d: def %s%s" % (p, lineno, factory.__name__, args))
+ return lines
+
+ def _getscopeitem(self, scope):
+ if scope == "function":
+ # this might also be a non-function Item despite its attribute name
+ return self._pyfuncitem
+ node = get_scope_node(self._pyfuncitem, scope)
+ if node is None and scope == "class":
+ # fallback to function item itself
+ node = self._pyfuncitem
+ assert node, 'Could not obtain a node for scope "{}" for function {!r}'.format(
+ scope, self._pyfuncitem
+ )
+ return node
+
+ def __repr__(self):
+ return "<FixtureRequest for %r>" % (self.node)
+
+
+class SubRequest(FixtureRequest):
+ """ a sub request for handling getting a fixture from a
+ test function/fixture. """
+
+ def __init__(self, request, scope, param, param_index, fixturedef):
+ self._parent_request = request
+ self.fixturename = fixturedef.argname
+ if param is not NOTSET:
+ self.param = param
+ self.param_index = param_index
+ self.scope = scope
+ self._fixturedef = fixturedef
+ self._pyfuncitem = request._pyfuncitem
+ self._fixture_defs = request._fixture_defs
+ self._arg2fixturedefs = request._arg2fixturedefs
+ self._arg2index = request._arg2index
+ self._fixturemanager = request._fixturemanager
+
+ def __repr__(self):
+ return "<SubRequest %r for %r>" % (self.fixturename, self._pyfuncitem)
+
+ def addfinalizer(self, finalizer):
+ self._fixturedef.addfinalizer(finalizer)
+
+
+class ScopeMismatchError(Exception):
+ """ A fixture function tries to use a different fixture function which
+ which has a lower scope (e.g. a Session one calls a function one)
+ """
+
+
+scopes = "session module class function".split()
+scopenum_function = scopes.index("function")
+
+
+def scopemismatch(currentscope, newscope):
+ return scopes.index(newscope) > scopes.index(currentscope)
+
+
+def scope2index(scope, descr, where=None):
+ """Look up the index of ``scope`` and raise a descriptive value error
+ if not defined.
+ """
+ try:
+ return scopes.index(scope)
+ except ValueError:
+ raise ValueError(
+ "{} {}has an unsupported scope value '{}'".format(
+ descr, "from {} ".format(where) if where else "", scope
+ )
+ )
+
+
+class FixtureLookupError(LookupError):
+ """ could not return a requested Fixture (missing or invalid). """
+
+ def __init__(self, argname, request, msg=None):
+ self.argname = argname
+ self.request = request
+ self.fixturestack = request._get_fixturestack()
+ self.msg = msg
+
+ def formatrepr(self):
+ tblines = []
+ addline = tblines.append
+ stack = [self.request._pyfuncitem.obj]
+ stack.extend(map(lambda x: x.func, self.fixturestack))
+ msg = self.msg
+ if msg is not None:
+ # the last fixture raise an error, let's present
+ # it at the requesting side
+ stack = stack[:-1]
+ for function in stack:
+ fspath, lineno = getfslineno(function)
+ try:
+ lines, _ = inspect.getsourcelines(get_real_func(function))
+ except (IOError, IndexError, TypeError):
+ error_msg = "file %s, line %s: source code not available"
+ addline(error_msg % (fspath, lineno + 1))
+ else:
+ addline("file %s, line %s" % (fspath, lineno + 1))
+ for i, line in enumerate(lines):
+ line = line.rstrip()
+ addline(" " + line)
+ if line.lstrip().startswith("def"):
+ break
+
+ if msg is None:
+ fm = self.request._fixturemanager
+ available = []
+ parentid = self.request._pyfuncitem.parent.nodeid
+ for name, fixturedefs in fm._arg2fixturedefs.items():
+ faclist = list(fm._matchfactories(fixturedefs, parentid))
+ if faclist and name not in available:
+ available.append(name)
+ msg = "fixture %r not found" % (self.argname,)
+ msg += "\n available fixtures: %s" % (", ".join(sorted(available)),)
+ msg += "\n use 'pytest --fixtures [testpath]' for help on them."
+
+ return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname)
+
+
+class FixtureLookupErrorRepr(TerminalRepr):
+
+ def __init__(self, filename, firstlineno, tblines, errorstring, argname):
+ self.tblines = tblines
+ self.errorstring = errorstring
+ self.filename = filename
+ self.firstlineno = firstlineno
+ self.argname = argname
+
+ def toterminal(self, tw):
+ # tw.line("FixtureLookupError: %s" %(self.argname), red=True)
+ for tbline in self.tblines:
+ tw.line(tbline.rstrip())
+ lines = self.errorstring.split("\n")
+ if lines:
+ tw.line(
+ "{} {}".format(FormattedExcinfo.fail_marker, lines[0].strip()),
+ red=True,
+ )
+ for line in lines[1:]:
+ tw.line(
+ "{} {}".format(FormattedExcinfo.flow_marker, line.strip()),
+ red=True,
+ )
+ tw.line()
+ tw.line("%s:%d" % (self.filename, self.firstlineno + 1))
+
+
+def fail_fixturefunc(fixturefunc, msg):
+ fs, lineno = getfslineno(fixturefunc)
+ location = "%s:%s" % (fs, lineno + 1)
+ source = _pytest._code.Source(fixturefunc)
+ fail(msg + ":\n\n" + str(source.indent()) + "\n" + location, pytrace=False)
+
+
+def call_fixture_func(fixturefunc, request, kwargs):
+ yieldctx = is_generator(fixturefunc)
+ if yieldctx:
+ it = fixturefunc(**kwargs)
+ res = next(it)
+
+ def teardown():
+ try:
+ next(it)
+ except StopIteration:
+ pass
+ else:
+ fail_fixturefunc(
+ fixturefunc, "yield_fixture function has more than one 'yield'"
+ )
+
+ request.addfinalizer(teardown)
+ else:
+ res = fixturefunc(**kwargs)
+ return res
+
+
+class FixtureDef(object):
+ """ A container for a factory definition. """
+
+ def __init__(
+ self,
+ fixturemanager,
+ baseid,
+ argname,
+ func,
+ scope,
+ params,
+ unittest=False,
+ ids=None,
+ ):
+ self._fixturemanager = fixturemanager
+ self.baseid = baseid or ""
+ self.has_location = baseid is not None
+ self.func = func
+ self.argname = argname
+ self.scope = scope
+ self.scopenum = scope2index(
+ scope or "function", descr="fixture {}".format(func.__name__), where=baseid
+ )
+ self.params = params
+ self.argnames = getfuncargnames(func, is_method=unittest)
+ self.unittest = unittest
+ self.ids = ids
+ self._finalizers = []
+
+ def addfinalizer(self, finalizer):
+ self._finalizers.append(finalizer)
+
+ def finish(self, request):
+ exceptions = []
+ try:
+ while self._finalizers:
+ try:
+ func = self._finalizers.pop()
+ func()
+ except: # noqa
+ exceptions.append(sys.exc_info())
+ if exceptions:
+ e = exceptions[0]
+ del exceptions # ensure we don't keep all frames alive because of the traceback
+ py.builtin._reraise(*e)
+
+ finally:
+ hook = self._fixturemanager.session.gethookproxy(request.node.fspath)
+ hook.pytest_fixture_post_finalizer(fixturedef=self, request=request)
+ # even if finalization fails, we invalidate
+ # the cached fixture value and remove
+ # all finalizers because they may be bound methods which will
+ # keep instances alive
+ if hasattr(self, "cached_result"):
+ del self.cached_result
+ self._finalizers = []
+
+ def execute(self, request):
+ # get required arguments and register our own finish()
+ # with their finalization
+ for argname in self.argnames:
+ fixturedef = request._get_active_fixturedef(argname)
+ if argname != "request":
+ fixturedef.addfinalizer(functools.partial(self.finish, request=request))
+
+ my_cache_key = request.param_index
+ cached_result = getattr(self, "cached_result", None)
+ if cached_result is not None:
+ result, cache_key, err = cached_result
+ if my_cache_key == cache_key:
+ if err is not None:
+ py.builtin._reraise(*err)
+ else:
+ return result
+ # we have a previous but differently parametrized fixture instance
+ # so we need to tear it down before creating a new one
+ self.finish(request)
+ assert not hasattr(self, "cached_result")
+
+ hook = self._fixturemanager.session.gethookproxy(request.node.fspath)
+ return hook.pytest_fixture_setup(fixturedef=self, request=request)
+
+ def __repr__(self):
+ return (
+ "<FixtureDef name=%r scope=%r baseid=%r >"
+ % (self.argname, self.scope, self.baseid)
+ )
+
+
+def pytest_fixture_setup(fixturedef, request):
+ """ Execution of fixture setup. """
+ kwargs = {}
+ for argname in fixturedef.argnames:
+ fixdef = request._get_active_fixturedef(argname)
+ result, arg_cache_key, exc = fixdef.cached_result
+ request._check_scope(argname, request.scope, fixdef.scope)
+ kwargs[argname] = result
+
+ fixturefunc = fixturedef.func
+ if fixturedef.unittest:
+ if request.instance is not None:
+ # bind the unbound method to the TestCase instance
+ fixturefunc = fixturedef.func.__get__(request.instance)
+ else:
+ # the fixture function needs to be bound to the actual
+ # request.instance so that code working with "fixturedef" behaves
+ # as expected.
+ if request.instance is not None:
+ fixturefunc = getimfunc(fixturedef.func)
+ if fixturefunc != fixturedef.func:
+ fixturefunc = fixturefunc.__get__(request.instance)
+ my_cache_key = request.param_index
+ try:
+ result = call_fixture_func(fixturefunc, request, kwargs)
+ except TEST_OUTCOME:
+ fixturedef.cached_result = (None, my_cache_key, sys.exc_info())
+ raise
+ fixturedef.cached_result = (result, my_cache_key, None)
+ return result
+
+
+def _ensure_immutable_ids(ids):
+ if ids is None:
+ return
+ if callable(ids):
+ return ids
+ return tuple(ids)
+
+
+@attr.s(frozen=True)
+class FixtureFunctionMarker(object):
+ scope = attr.ib()
+ params = attr.ib(converter=attr.converters.optional(tuple))
+ autouse = attr.ib(default=False)
+ ids = attr.ib(default=None, converter=_ensure_immutable_ids)
+ name = attr.ib(default=None)
+
+ def __call__(self, function):
+ if isclass(function):
+ raise ValueError("class fixtures not supported (may be in the future)")
+
+ if getattr(function, "_pytestfixturefunction", False):
+ raise ValueError(
+ "fixture is being applied more than once to the same function"
+ )
+
+ function._pytestfixturefunction = self
+ return function
+
+
+def fixture(scope="function", params=None, autouse=False, ids=None, name=None):
+ """Decorator to mark a fixture factory function.
+
+ This decorator can be used (with or without parameters) to define a
+ fixture function. The name of the fixture function can later be
+ referenced to cause its invocation ahead of running tests: test
+ modules or classes can use the pytest.mark.usefixtures(fixturename)
+ marker. Test functions can directly use fixture names as input
+ arguments in which case the fixture instance returned from the fixture
+ function will be injected.
+
+ :arg scope: the scope for which this fixture is shared, one of
+ "function" (default), "class", "module" or "session".
+
+ :arg params: an optional list of parameters which will cause multiple
+ invocations of the fixture function and all of the tests
+ using it.
+
+ :arg autouse: if True, the fixture func is activated for all tests that
+ can see it. If False (the default) then an explicit
+ reference is needed to activate the fixture.
+
+ :arg ids: list of string ids each corresponding to the params
+ so that they are part of the test id. If no ids are provided
+ they will be generated automatically from the params.
+
+ :arg name: the name of the fixture. This defaults to the name of the
+ decorated function. If a fixture is used in the same module in
+ which it is defined, the function name of the fixture will be
+ shadowed by the function arg that requests the fixture; one way
+ to resolve this is to name the decorated function
+ ``fixture_<fixturename>`` and then use
+ ``@pytest.fixture(name='<fixturename>')``.
+
+ Fixtures can optionally provide their values to test functions using a ``yield`` statement,
+ instead of ``return``. In this case, the code block after the ``yield`` statement is executed
+ as teardown code regardless of the test outcome. A fixture function must yield exactly once.
+ """
+ if callable(scope) and params is None and autouse is False:
+ # direct decoration
+ return FixtureFunctionMarker("function", params, autouse, name=name)(scope)
+ if params is not None and not isinstance(params, (list, tuple)):
+ params = list(params)
+ return FixtureFunctionMarker(scope, params, autouse, ids=ids, name=name)
+
+
+def yield_fixture(scope="function", params=None, autouse=False, ids=None, name=None):
+ """ (return a) decorator to mark a yield-fixture factory function.
+
+ .. deprecated:: 3.0
+ Use :py:func:`pytest.fixture` directly instead.
+ """
+ if callable(scope) and params is None and not autouse:
+ # direct decoration
+ return FixtureFunctionMarker("function", params, autouse, ids=ids, name=name)(
+ scope
+ )
+ else:
+ return FixtureFunctionMarker(scope, params, autouse, ids=ids, name=name)
+
+
+defaultfuncargprefixmarker = fixture()
+
+
+@fixture(scope="session")
+def pytestconfig(request):
+ """Session-scoped fixture that returns the :class:`_pytest.config.Config` object.
+
+ Example::
+
+ def test_foo(pytestconfig):
+ if pytestconfig.getoption("verbose"):
+ ...
+
+ """
+ return request.config
+
+
+class FixtureManager(object):
+ """
+ pytest fixtures definitions and information is stored and managed
+ from this class.
+
+ During collection fm.parsefactories() is called multiple times to parse
+ fixture function definitions into FixtureDef objects and internal
+ data structures.
+
+ During collection of test functions, metafunc-mechanics instantiate
+ a FuncFixtureInfo object which is cached per node/func-name.
+ This FuncFixtureInfo object is later retrieved by Function nodes
+ which themselves offer a fixturenames attribute.
+
+ The FuncFixtureInfo object holds information about fixtures and FixtureDefs
+ relevant for a particular function. An initial list of fixtures is
+ assembled like this:
+
+ - ini-defined usefixtures
+ - autouse-marked fixtures along the collection chain up from the function
+ - usefixtures markers at module/class/function level
+ - test function funcargs
+
+ Subsequently the funcfixtureinfo.fixturenames attribute is computed
+ as the closure of the fixtures needed to setup the initial fixtures,
+ i. e. fixtures needed by fixture functions themselves are appended
+ to the fixturenames list.
+
+ Upon the test-setup phases all fixturenames are instantiated, retrieved
+ by a lookup of their FuncFixtureInfo.
+ """
+
+ _argprefix = "pytest_funcarg__"
+ FixtureLookupError = FixtureLookupError
+ FixtureLookupErrorRepr = FixtureLookupErrorRepr
+
+ def __init__(self, session):
+ self.session = session
+ self.config = session.config
+ self._arg2fixturedefs = {}
+ self._holderobjseen = set()
+ self._arg2finish = {}
+ self._nodeid_and_autousenames = [("", self.config.getini("usefixtures"))]
+ session.config.pluginmanager.register(self, "funcmanage")
+
+ def getfixtureinfo(self, node, func, cls, funcargs=True):
+ if funcargs and not getattr(node, "nofuncargs", False):
+ argnames = getfuncargnames(func, cls=cls)
+ else:
+ argnames = ()
+ usefixtures = flatten(
+ mark.args for mark in node.iter_markers(name="usefixtures")
+ )
+ initialnames = argnames
+ initialnames = tuple(usefixtures) + initialnames
+ fm = node.session._fixturemanager
+ names_closure, arg2fixturedefs = fm.getfixtureclosure(initialnames, node)
+ return FuncFixtureInfo(argnames, names_closure, arg2fixturedefs)
+
+ def pytest_plugin_registered(self, plugin):
+ nodeid = None
+ try:
+ p = py.path.local(plugin.__file__)
+ except AttributeError:
+ pass
+ else:
+ # construct the base nodeid which is later used to check
+ # what fixtures are visible for particular tests (as denoted
+ # by their test id)
+ if p.basename.startswith("conftest.py"):
+ nodeid = p.dirpath().relto(self.config.rootdir)
+ if p.sep != nodes.SEP:
+ nodeid = nodeid.replace(p.sep, nodes.SEP)
+ self.parsefactories(plugin, nodeid)
+
+ def _getautousenames(self, nodeid):
+ """ return a tuple of fixture names to be used. """
+ autousenames = []
+ for baseid, basenames in self._nodeid_and_autousenames:
+ if nodeid.startswith(baseid):
+ if baseid:
+ i = len(baseid)
+ nextchar = nodeid[i:i + 1]
+ if nextchar and nextchar not in ":/":
+ continue
+ autousenames.extend(basenames)
+ return autousenames
+
+ def getfixtureclosure(self, fixturenames, parentnode):
+ # collect the closure of all fixtures , starting with the given
+ # fixturenames as the initial set. As we have to visit all
+ # factory definitions anyway, we also return an arg2fixturedefs
+ # mapping so that the caller can reuse it and does not have
+ # to re-discover fixturedefs again for each fixturename
+ # (discovering matching fixtures for a given name/node is expensive)
+
+ parentid = parentnode.nodeid
+ fixturenames_closure = self._getautousenames(parentid)
+
+ def merge(otherlist):
+ for arg in otherlist:
+ if arg not in fixturenames_closure:
+ fixturenames_closure.append(arg)
+
+ merge(fixturenames)
+ arg2fixturedefs = {}
+ lastlen = -1
+ while lastlen != len(fixturenames_closure):
+ lastlen = len(fixturenames_closure)
+ for argname in fixturenames_closure:
+ if argname in arg2fixturedefs:
+ continue
+ fixturedefs = self.getfixturedefs(argname, parentid)
+ if fixturedefs:
+ arg2fixturedefs[argname] = fixturedefs
+ merge(fixturedefs[-1].argnames)
+
+ def sort_by_scope(arg_name):
+ try:
+ fixturedefs = arg2fixturedefs[arg_name]
+ except KeyError:
+ return scopes.index("function")
+ else:
+ return fixturedefs[-1].scopenum
+
+ fixturenames_closure.sort(key=sort_by_scope)
+ return fixturenames_closure, arg2fixturedefs
+
+ def pytest_generate_tests(self, metafunc):
+ for argname in metafunc.fixturenames:
+ faclist = metafunc._arg2fixturedefs.get(argname)
+ if faclist:
+ fixturedef = faclist[-1]
+ if fixturedef.params is not None:
+ parametrize_func = getattr(metafunc.function, "parametrize", None)
+ if parametrize_func is not None:
+ parametrize_func = parametrize_func.combined
+ func_params = getattr(parametrize_func, "args", [[None]])
+ func_kwargs = getattr(parametrize_func, "kwargs", {})
+ # skip directly parametrized arguments
+ if "argnames" in func_kwargs:
+ argnames = parametrize_func.kwargs["argnames"]
+ else:
+ argnames = func_params[0]
+ if not isinstance(argnames, (tuple, list)):
+ argnames = [x.strip() for x in argnames.split(",") if x.strip()]
+ if argname not in func_params and argname not in argnames:
+ metafunc.parametrize(
+ argname,
+ fixturedef.params,
+ indirect=True,
+ scope=fixturedef.scope,
+ ids=fixturedef.ids,
+ )
+ else:
+ continue # will raise FixtureLookupError at setup time
+
+ def pytest_collection_modifyitems(self, items):
+ # separate parametrized setups
+ items[:] = reorder_items(items)
+
+ def parsefactories(self, node_or_obj, nodeid=NOTSET, unittest=False):
+ if nodeid is not NOTSET:
+ holderobj = node_or_obj
+ else:
+ holderobj = node_or_obj.obj
+ nodeid = node_or_obj.nodeid
+ if holderobj in self._holderobjseen:
+ return
+ self._holderobjseen.add(holderobj)
+ autousenames = []
+ for name in dir(holderobj):
+ # The attribute can be an arbitrary descriptor, so the attribute
+ # access below can raise. safe_getatt() ignores such exceptions.
+ obj = safe_getattr(holderobj, name, None)
+ # fixture functions have a pytest_funcarg__ prefix (pre-2.3 style)
+ # or are "@pytest.fixture" marked
+ marker = getfixturemarker(obj)
+ if marker is None:
+ if not name.startswith(self._argprefix):
+ continue
+ if not callable(obj):
+ continue
+ marker = defaultfuncargprefixmarker
+ from _pytest import deprecated
+
+ self.config.warn(
+ "C1", deprecated.FUNCARG_PREFIX.format(name=name), nodeid=nodeid
+ )
+ name = name[len(self._argprefix):]
+ elif not isinstance(marker, FixtureFunctionMarker):
+ # magic globals with __getattr__ might have got us a wrong
+ # fixture attribute
+ continue
+ else:
+ if marker.name:
+ name = marker.name
+ assert not name.startswith(self._argprefix), FIXTURE_MSG.format(name)
+
+ fixture_def = FixtureDef(
+ self,
+ nodeid,
+ name,
+ obj,
+ marker.scope,
+ marker.params,
+ unittest=unittest,
+ ids=marker.ids,
+ )
+
+ faclist = self._arg2fixturedefs.setdefault(name, [])
+ if fixture_def.has_location:
+ faclist.append(fixture_def)
+ else:
+ # fixturedefs with no location are at the front
+ # so this inserts the current fixturedef after the
+ # existing fixturedefs from external plugins but
+ # before the fixturedefs provided in conftests.
+ i = len([f for f in faclist if not f.has_location])
+ faclist.insert(i, fixture_def)
+ if marker.autouse:
+ autousenames.append(name)
+
+ if autousenames:
+ self._nodeid_and_autousenames.append((nodeid or "", autousenames))
+
+ def getfixturedefs(self, argname, nodeid):
+ """
+ Gets a list of fixtures which are applicable to the given node id.
+
+ :param str argname: name of the fixture to search for
+ :param str nodeid: full node id of the requesting test.
+ :return: list[FixtureDef]
+ """
+ try:
+ fixturedefs = self._arg2fixturedefs[argname]
+ except KeyError:
+ return None
+ else:
+ return tuple(self._matchfactories(fixturedefs, nodeid))
+
+ def _matchfactories(self, fixturedefs, nodeid):
+ for fixturedef in fixturedefs:
+ if nodes.ischildnode(fixturedef.baseid, nodeid):
+ yield fixturedef
diff --git a/third_party/python/pytest/src/_pytest/freeze_support.py b/third_party/python/pytest/src/_pytest/freeze_support.py
new file mode 100644
index 0000000000..002e077301
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/freeze_support.py
@@ -0,0 +1,45 @@
+"""
+Provides a function to report all internal modules for using freezing tools
+pytest
+"""
+from __future__ import absolute_import, division, print_function
+
+
+def freeze_includes():
+ """
+ Returns a list of module names used by pytest that should be
+ included by cx_freeze.
+ """
+ import py
+ import _pytest
+
+ result = list(_iter_all_modules(py))
+ result += list(_iter_all_modules(_pytest))
+ return result
+
+
+def _iter_all_modules(package, prefix=""):
+ """
+ Iterates over the names of all modules that can be found in the given
+ package, recursively.
+ Example:
+ _iter_all_modules(_pytest) ->
+ ['_pytest.assertion.newinterpret',
+ '_pytest.capture',
+ '_pytest.core',
+ ...
+ ]
+ """
+ import os
+ import pkgutil
+
+ if type(package) is not str:
+ path, prefix = package.__path__[0], package.__name__ + "."
+ else:
+ path = package
+ for _, name, is_package in pkgutil.iter_modules([path]):
+ if is_package:
+ for m in _iter_all_modules(os.path.join(path, name), prefix=name + "."):
+ yield prefix + m
+ else:
+ yield prefix + name
diff --git a/third_party/python/pytest/src/_pytest/helpconfig.py b/third_party/python/pytest/src/_pytest/helpconfig.py
new file mode 100644
index 0000000000..5514fec404
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/helpconfig.py
@@ -0,0 +1,212 @@
+""" version info, help messages, tracing configuration. """
+from __future__ import absolute_import, division, print_function
+
+import py
+import pytest
+from _pytest.config import PrintHelp
+import os
+import sys
+from argparse import Action
+
+
+class HelpAction(Action):
+ """This is an argparse Action that will raise an exception in
+ order to skip the rest of the argument parsing when --help is passed.
+ This prevents argparse from quitting due to missing required arguments
+ when any are defined, for example by ``pytest_addoption``.
+ This is similar to the way that the builtin argparse --help option is
+ implemented by raising SystemExit.
+ """
+
+ def __init__(self, option_strings, dest=None, default=False, help=None):
+ super(HelpAction, self).__init__(
+ option_strings=option_strings,
+ dest=dest,
+ const=True,
+ default=default,
+ nargs=0,
+ help=help,
+ )
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ setattr(namespace, self.dest, self.const)
+
+ # We should only skip the rest of the parsing after preparse is done
+ if getattr(parser._parser, "after_preparse", False):
+ raise PrintHelp
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("debugconfig")
+ group.addoption(
+ "--version",
+ action="store_true",
+ help="display pytest lib version and import information.",
+ )
+ group._addoption(
+ "-h",
+ "--help",
+ action=HelpAction,
+ dest="help",
+ help="show help message and configuration info",
+ )
+ group._addoption(
+ "-p",
+ action="append",
+ dest="plugins",
+ default=[],
+ metavar="name",
+ help="early-load given plugin (multi-allowed). "
+ "To avoid loading of plugins, use the `no:` prefix, e.g. "
+ "`no:doctest`.",
+ )
+ group.addoption(
+ "--traceconfig",
+ "--trace-config",
+ action="store_true",
+ default=False,
+ help="trace considerations of conftest.py files.",
+ ),
+ group.addoption(
+ "--debug",
+ action="store_true",
+ dest="debug",
+ default=False,
+ help="store internal tracing debug information in 'pytestdebug.log'.",
+ )
+ group._addoption(
+ "-o",
+ "--override-ini",
+ dest="override_ini",
+ action="append",
+ help='override ini option with "option=value" style, e.g. `-o xfail_strict=True -o cache_dir=cache`.',
+ )
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_cmdline_parse():
+ outcome = yield
+ config = outcome.get_result()
+ if config.option.debug:
+ path = os.path.abspath("pytestdebug.log")
+ debugfile = open(path, "w")
+ debugfile.write(
+ "versions pytest-%s, py-%s, "
+ "python-%s\ncwd=%s\nargs=%s\n\n"
+ % (
+ pytest.__version__,
+ py.__version__,
+ ".".join(map(str, sys.version_info)),
+ os.getcwd(),
+ config._origargs,
+ )
+ )
+ config.trace.root.setwriter(debugfile.write)
+ undo_tracing = config.pluginmanager.enable_tracing()
+ sys.stderr.write("writing pytestdebug information to %s\n" % path)
+
+ def unset_tracing():
+ debugfile.close()
+ sys.stderr.write("wrote pytestdebug information to %s\n" % debugfile.name)
+ config.trace.root.setwriter(None)
+ undo_tracing()
+
+ config.add_cleanup(unset_tracing)
+
+
+def pytest_cmdline_main(config):
+ if config.option.version:
+ p = py.path.local(pytest.__file__)
+ sys.stderr.write(
+ "This is pytest version %s, imported from %s\n" % (pytest.__version__, p)
+ )
+ plugininfo = getpluginversioninfo(config)
+ if plugininfo:
+ for line in plugininfo:
+ sys.stderr.write(line + "\n")
+ return 0
+ elif config.option.help:
+ config._do_configure()
+ showhelp(config)
+ config._ensure_unconfigure()
+ return 0
+
+
+def showhelp(config):
+ reporter = config.pluginmanager.get_plugin("terminalreporter")
+ tw = reporter._tw
+ tw.write(config._parser.optparser.format_help())
+ tw.line()
+ tw.line()
+ tw.line(
+ "[pytest] ini-options in the first " "pytest.ini|tox.ini|setup.cfg file found:"
+ )
+ tw.line()
+
+ for name in config._parser._ininames:
+ help, type, default = config._parser._inidict[name]
+ if type is None:
+ type = "string"
+ spec = "%s (%s)" % (name, type)
+ line = " %-24s %s" % (spec, help)
+ tw.line(line[:tw.fullwidth])
+
+ tw.line()
+ tw.line("environment variables:")
+ vars = [
+ ("PYTEST_ADDOPTS", "extra command line options"),
+ ("PYTEST_PLUGINS", "comma-separated plugins to load during startup"),
+ ("PYTEST_DEBUG", "set to enable debug tracing of pytest's internals"),
+ ]
+ for name, help in vars:
+ tw.line(" %-24s %s" % (name, help))
+ tw.line()
+ tw.line()
+
+ tw.line("to see available markers type: pytest --markers")
+ tw.line("to see available fixtures type: pytest --fixtures")
+ tw.line(
+ "(shown according to specified file_or_dir or current dir "
+ "if not specified; fixtures with leading '_' are only shown "
+ "with the '-v' option"
+ )
+
+ for warningreport in reporter.stats.get("warnings", []):
+ tw.line("warning : " + warningreport.message, red=True)
+ return
+
+
+conftest_options = [("pytest_plugins", "list of plugin names to load")]
+
+
+def getpluginversioninfo(config):
+ lines = []
+ plugininfo = config.pluginmanager.list_plugin_distinfo()
+ if plugininfo:
+ lines.append("setuptools registered plugins:")
+ for plugin, dist in plugininfo:
+ loc = getattr(plugin, "__file__", repr(plugin))
+ content = "%s-%s at %s" % (dist.project_name, dist.version, loc)
+ lines.append(" " + content)
+ return lines
+
+
+def pytest_report_header(config):
+ lines = []
+ if config.option.debug or config.option.traceconfig:
+ lines.append("using: pytest-%s pylib-%s" % (pytest.__version__, py.__version__))
+
+ verinfo = getpluginversioninfo(config)
+ if verinfo:
+ lines.extend(verinfo)
+
+ if config.option.traceconfig:
+ lines.append("active plugins:")
+ items = config.pluginmanager.list_name_plugin()
+ for name, plugin in items:
+ if hasattr(plugin, "__file__"):
+ r = plugin.__file__
+ else:
+ r = repr(plugin)
+ lines.append(" %-20s: %s" % (name, r))
+ return lines
diff --git a/third_party/python/pytest/src/_pytest/hookspec.py b/third_party/python/pytest/src/_pytest/hookspec.py
new file mode 100644
index 0000000000..fec43a4009
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/hookspec.py
@@ -0,0 +1,563 @@
+""" hook specifications for pytest plugins, invoked from main.py and builtin plugins. """
+
+from pluggy import HookspecMarker
+
+hookspec = HookspecMarker("pytest")
+
+# -------------------------------------------------------------------------
+# Initialization hooks called for every plugin
+# -------------------------------------------------------------------------
+
+
+@hookspec(historic=True)
+def pytest_addhooks(pluginmanager):
+ """called at plugin registration time to allow adding new hooks via a call to
+ ``pluginmanager.add_hookspecs(module_or_class, prefix)``.
+
+
+ :param _pytest.config.PytestPluginManager pluginmanager: pytest plugin manager
+
+ .. note::
+ This hook is incompatible with ``hookwrapper=True``.
+ """
+
+
+@hookspec(historic=True)
+def pytest_namespace():
+ """
+ (**Deprecated**) this hook causes direct monkeypatching on pytest, its use is strongly discouraged
+ return dict of name->object to be made globally available in
+ the pytest namespace.
+
+ This hook is called at plugin registration time.
+
+ .. note::
+ This hook is incompatible with ``hookwrapper=True``.
+ """
+
+
+@hookspec(historic=True)
+def pytest_plugin_registered(plugin, manager):
+ """ a new pytest plugin got registered.
+
+ :param plugin: the plugin module or instance
+ :param _pytest.config.PytestPluginManager manager: pytest plugin manager
+
+ .. note::
+ This hook is incompatible with ``hookwrapper=True``.
+ """
+
+
+@hookspec(historic=True)
+def pytest_addoption(parser):
+ """register argparse-style options and ini-style config values,
+ called once at the beginning of a test run.
+
+ .. note::
+
+ This function should be implemented only in plugins or ``conftest.py``
+ files situated at the tests root directory due to how pytest
+ :ref:`discovers plugins during startup <pluginorder>`.
+
+ :arg _pytest.config.Parser parser: To add command line options, call
+ :py:func:`parser.addoption(...) <_pytest.config.Parser.addoption>`.
+ To add ini-file values call :py:func:`parser.addini(...)
+ <_pytest.config.Parser.addini>`.
+
+ Options can later be accessed through the
+ :py:class:`config <_pytest.config.Config>` object, respectively:
+
+ - :py:func:`config.getoption(name) <_pytest.config.Config.getoption>` to
+ retrieve the value of a command line option.
+
+ - :py:func:`config.getini(name) <_pytest.config.Config.getini>` to retrieve
+ a value read from an ini-style file.
+
+ The config object is passed around on many internal objects via the ``.config``
+ attribute or can be retrieved as the ``pytestconfig`` fixture.
+
+ .. note::
+ This hook is incompatible with ``hookwrapper=True``.
+ """
+
+
+@hookspec(historic=True)
+def pytest_configure(config):
+ """
+ Allows plugins and conftest files to perform initial configuration.
+
+ This hook is called for every plugin and initial conftest file
+ after command line options have been parsed.
+
+ After that, the hook is called for other conftest files as they are
+ imported.
+
+ .. note::
+ This hook is incompatible with ``hookwrapper=True``.
+
+ :arg _pytest.config.Config config: pytest config object
+ """
+
+
+# -------------------------------------------------------------------------
+# Bootstrapping hooks called for plugins registered early enough:
+# internal and 3rd party plugins.
+# -------------------------------------------------------------------------
+
+
+@hookspec(firstresult=True)
+def pytest_cmdline_parse(pluginmanager, args):
+ """return initialized config object, parsing the specified args.
+
+ Stops at first non-None result, see :ref:`firstresult`
+
+ .. note::
+ This hook will not be called for ``conftest.py`` files, only for setuptools plugins.
+
+ :param _pytest.config.PytestPluginManager pluginmanager: pytest plugin manager
+ :param list[str] args: list of arguments passed on the command line
+ """
+
+
+def pytest_cmdline_preparse(config, args):
+ """(**Deprecated**) modify command line arguments before option parsing.
+
+ This hook is considered deprecated and will be removed in a future pytest version. Consider
+ using :func:`pytest_load_initial_conftests` instead.
+
+ .. note::
+ This hook will not be called for ``conftest.py`` files, only for setuptools plugins.
+
+ :param _pytest.config.Config config: pytest config object
+ :param list[str] args: list of arguments passed on the command line
+ """
+
+
+@hookspec(firstresult=True)
+def pytest_cmdline_main(config):
+ """ called for performing the main command line action. The default
+ implementation will invoke the configure hooks and runtest_mainloop.
+
+ .. note::
+ This hook will not be called for ``conftest.py`` files, only for setuptools plugins.
+
+ Stops at first non-None result, see :ref:`firstresult`
+
+ :param _pytest.config.Config config: pytest config object
+ """
+
+
+def pytest_load_initial_conftests(early_config, parser, args):
+ """ implements the loading of initial conftest files ahead
+ of command line option parsing.
+
+ .. note::
+ This hook will not be called for ``conftest.py`` files, only for setuptools plugins.
+
+ :param _pytest.config.Config early_config: pytest config object
+ :param list[str] args: list of arguments passed on the command line
+ :param _pytest.config.Parser parser: to add command line options
+ """
+
+
+# -------------------------------------------------------------------------
+# collection hooks
+# -------------------------------------------------------------------------
+
+
+@hookspec(firstresult=True)
+def pytest_collection(session):
+ """Perform the collection protocol for the given session.
+
+ Stops at first non-None result, see :ref:`firstresult`.
+
+ :param _pytest.main.Session session: the pytest session object
+ """
+
+
+def pytest_collection_modifyitems(session, config, items):
+ """ called after collection has been performed, may filter or re-order
+ the items in-place.
+
+ :param _pytest.main.Session session: the pytest session object
+ :param _pytest.config.Config config: pytest config object
+ :param List[_pytest.nodes.Item] items: list of item objects
+ """
+
+
+def pytest_collection_finish(session):
+ """ called after collection has been performed and modified.
+
+ :param _pytest.main.Session session: the pytest session object
+ """
+
+
+@hookspec(firstresult=True)
+def pytest_ignore_collect(path, config):
+ """ return True to prevent considering this path for collection.
+ This hook is consulted for all files and directories prior to calling
+ more specific hooks.
+
+ Stops at first non-None result, see :ref:`firstresult`
+
+ :param str path: the path to analyze
+ :param _pytest.config.Config config: pytest config object
+ """
+
+
+@hookspec(firstresult=True)
+def pytest_collect_directory(path, parent):
+ """ called before traversing a directory for collection files.
+
+ Stops at first non-None result, see :ref:`firstresult`
+
+ :param str path: the path to analyze
+ """
+
+
+def pytest_collect_file(path, parent):
+ """ return collection Node or None for the given path. Any new node
+ needs to have the specified ``parent`` as a parent.
+
+ :param str path: the path to collect
+ """
+
+
+# logging hooks for collection
+
+
+def pytest_collectstart(collector):
+ """ collector starts collecting. """
+
+
+def pytest_itemcollected(item):
+ """ we just collected a test item. """
+
+
+def pytest_collectreport(report):
+ """ collector finished collecting. """
+
+
+def pytest_deselected(items):
+ """ called for test items deselected by keyword. """
+
+
+@hookspec(firstresult=True)
+def pytest_make_collect_report(collector):
+ """ perform ``collector.collect()`` and return a CollectReport.
+
+ Stops at first non-None result, see :ref:`firstresult` """
+
+
+# -------------------------------------------------------------------------
+# Python test function related hooks
+# -------------------------------------------------------------------------
+
+
+@hookspec(firstresult=True)
+def pytest_pycollect_makemodule(path, parent):
+ """ return a Module collector or None for the given path.
+ This hook will be called for each matching test module path.
+ The pytest_collect_file hook needs to be used if you want to
+ create test modules for files that do not match as a test module.
+
+ Stops at first non-None result, see :ref:`firstresult` """
+
+
+@hookspec(firstresult=True)
+def pytest_pycollect_makeitem(collector, name, obj):
+ """ return custom item/collector for a python object in a module, or None.
+
+ Stops at first non-None result, see :ref:`firstresult` """
+
+
+@hookspec(firstresult=True)
+def pytest_pyfunc_call(pyfuncitem):
+ """ call underlying test function.
+
+ Stops at first non-None result, see :ref:`firstresult` """
+
+
+def pytest_generate_tests(metafunc):
+ """ generate (multiple) parametrized calls to a test function."""
+
+
+@hookspec(firstresult=True)
+def pytest_make_parametrize_id(config, val, argname):
+ """Return a user-friendly string representation of the given ``val`` that will be used
+ by @pytest.mark.parametrize calls. Return None if the hook doesn't know about ``val``.
+ The parameter name is available as ``argname``, if required.
+
+ Stops at first non-None result, see :ref:`firstresult`
+
+ :param _pytest.config.Config config: pytest config object
+ :param val: the parametrized value
+ :param str argname: the automatic parameter name produced by pytest
+ """
+
+
+# -------------------------------------------------------------------------
+# generic runtest related hooks
+# -------------------------------------------------------------------------
+
+
+@hookspec(firstresult=True)
+def pytest_runtestloop(session):
+ """ called for performing the main runtest loop
+ (after collection finished).
+
+ Stops at first non-None result, see :ref:`firstresult`
+
+ :param _pytest.main.Session session: the pytest session object
+ """
+
+
+def pytest_itemstart(item, node):
+ """(**Deprecated**) use pytest_runtest_logstart. """
+
+
+@hookspec(firstresult=True)
+def pytest_runtest_protocol(item, nextitem):
+ """ implements the runtest_setup/call/teardown protocol for
+ the given test item, including capturing exceptions and calling
+ reporting hooks.
+
+ :arg item: test item for which the runtest protocol is performed.
+
+ :arg nextitem: the scheduled-to-be-next test item (or None if this
+ is the end my friend). This argument is passed on to
+ :py:func:`pytest_runtest_teardown`.
+
+ :return boolean: True if no further hook implementations should be invoked.
+
+
+ Stops at first non-None result, see :ref:`firstresult` """
+
+
+def pytest_runtest_logstart(nodeid, location):
+ """ signal the start of running a single test item.
+
+ This hook will be called **before** :func:`pytest_runtest_setup`, :func:`pytest_runtest_call` and
+ :func:`pytest_runtest_teardown` hooks.
+
+ :param str nodeid: full id of the item
+ :param location: a triple of ``(filename, linenum, testname)``
+ """
+
+
+def pytest_runtest_logfinish(nodeid, location):
+ """ signal the complete finish of running a single test item.
+
+ This hook will be called **after** :func:`pytest_runtest_setup`, :func:`pytest_runtest_call` and
+ :func:`pytest_runtest_teardown` hooks.
+
+ :param str nodeid: full id of the item
+ :param location: a triple of ``(filename, linenum, testname)``
+ """
+
+
+def pytest_runtest_setup(item):
+ """ called before ``pytest_runtest_call(item)``. """
+
+
+def pytest_runtest_call(item):
+ """ called to execute the test ``item``. """
+
+
+def pytest_runtest_teardown(item, nextitem):
+ """ called after ``pytest_runtest_call``.
+
+ :arg nextitem: the scheduled-to-be-next test item (None if no further
+ test item is scheduled). This argument can be used to
+ perform exact teardowns, i.e. calling just enough finalizers
+ so that nextitem only needs to call setup-functions.
+ """
+
+
+@hookspec(firstresult=True)
+def pytest_runtest_makereport(item, call):
+ """ return a :py:class:`_pytest.runner.TestReport` object
+ for the given :py:class:`pytest.Item <_pytest.main.Item>` and
+ :py:class:`_pytest.runner.CallInfo`.
+
+ Stops at first non-None result, see :ref:`firstresult` """
+
+
+def pytest_runtest_logreport(report):
+ """ process a test setup/call/teardown report relating to
+ the respective phase of executing a test. """
+
+
+# -------------------------------------------------------------------------
+# Fixture related hooks
+# -------------------------------------------------------------------------
+
+
+@hookspec(firstresult=True)
+def pytest_fixture_setup(fixturedef, request):
+ """ performs fixture setup execution.
+
+ :return: The return value of the call to the fixture function
+
+ Stops at first non-None result, see :ref:`firstresult`
+
+ .. note::
+ If the fixture function returns None, other implementations of
+ this hook function will continue to be called, according to the
+ behavior of the :ref:`firstresult` option.
+ """
+
+
+def pytest_fixture_post_finalizer(fixturedef, request):
+ """ called after fixture teardown, but before the cache is cleared so
+ the fixture result cache ``fixturedef.cached_result`` can
+ still be accessed."""
+
+
+# -------------------------------------------------------------------------
+# test session related hooks
+# -------------------------------------------------------------------------
+
+
+def pytest_sessionstart(session):
+ """ called after the ``Session`` object has been created and before performing collection
+ and entering the run test loop.
+
+ :param _pytest.main.Session session: the pytest session object
+ """
+
+
+def pytest_sessionfinish(session, exitstatus):
+ """ called after whole test run finished, right before returning the exit status to the system.
+
+ :param _pytest.main.Session session: the pytest session object
+ :param int exitstatus: the status which pytest will return to the system
+ """
+
+
+def pytest_unconfigure(config):
+ """ called before test process is exited.
+
+ :param _pytest.config.Config config: pytest config object
+ """
+
+
+# -------------------------------------------------------------------------
+# hooks for customizing the assert methods
+# -------------------------------------------------------------------------
+
+
+def pytest_assertrepr_compare(config, op, left, right):
+ """return explanation for comparisons in failing assert expressions.
+
+ Return None for no custom explanation, otherwise return a list
+ of strings. The strings will be joined by newlines but any newlines
+ *in* a string will be escaped. Note that all but the first line will
+ be indented slightly, the intention is for the first line to be a summary.
+
+ :param _pytest.config.Config config: pytest config object
+ """
+
+
+# -------------------------------------------------------------------------
+# hooks for influencing reporting (invoked from _pytest_terminal)
+# -------------------------------------------------------------------------
+
+
+def pytest_report_header(config, startdir):
+ """ return a string or list of strings to be displayed as header info for terminal reporting.
+
+ :param _pytest.config.Config config: pytest config object
+ :param startdir: py.path object with the starting dir
+
+ .. note::
+
+ This function should be implemented only in plugins or ``conftest.py``
+ files situated at the tests root directory due to how pytest
+ :ref:`discovers plugins during startup <pluginorder>`.
+ """
+
+
+def pytest_report_collectionfinish(config, startdir, items):
+ """
+ .. versionadded:: 3.2
+
+ return a string or list of strings to be displayed after collection has finished successfully.
+
+ This strings will be displayed after the standard "collected X items" message.
+
+ :param _pytest.config.Config config: pytest config object
+ :param startdir: py.path object with the starting dir
+ :param items: list of pytest items that are going to be executed; this list should not be modified.
+ """
+
+
+@hookspec(firstresult=True)
+def pytest_report_teststatus(report):
+ """ return result-category, shortletter and verbose word for reporting.
+
+ Stops at first non-None result, see :ref:`firstresult` """
+
+
+def pytest_terminal_summary(terminalreporter, exitstatus):
+ """Add a section to terminal summary reporting.
+
+ :param _pytest.terminal.TerminalReporter terminalreporter: the internal terminal reporter object
+ :param int exitstatus: the exit status that will be reported back to the OS
+
+ .. versionadded:: 3.5
+ The ``config`` parameter.
+ """
+
+
+@hookspec(historic=True)
+def pytest_logwarning(message, code, nodeid, fslocation):
+ """ process a warning specified by a message, a code string,
+ a nodeid and fslocation (both of which may be None
+ if the warning is not tied to a particular node/location).
+
+ .. note::
+ This hook is incompatible with ``hookwrapper=True``.
+ """
+
+
+# -------------------------------------------------------------------------
+# doctest hooks
+# -------------------------------------------------------------------------
+
+
+@hookspec(firstresult=True)
+def pytest_doctest_prepare_content(content):
+ """ return processed content for a given doctest
+
+ Stops at first non-None result, see :ref:`firstresult` """
+
+
+# -------------------------------------------------------------------------
+# error handling and internal debugging hooks
+# -------------------------------------------------------------------------
+
+
+def pytest_internalerror(excrepr, excinfo):
+ """ called for internal errors. """
+
+
+def pytest_keyboard_interrupt(excinfo):
+ """ called for keyboard interrupt. """
+
+
+def pytest_exception_interact(node, call, report):
+ """called when an exception was raised which can potentially be
+ interactively handled.
+
+ This hook is only called if an exception was raised
+ that is not an internal exception like ``skip.Exception``.
+ """
+
+
+def pytest_enter_pdb(config):
+ """ called upon pdb.set_trace(), can be used by plugins to take special
+ action just before the python debugger enters in interactive mode.
+
+ :param _pytest.config.Config config: pytest config object
+ """
diff --git a/third_party/python/pytest/src/_pytest/junitxml.py b/third_party/python/pytest/src/_pytest/junitxml.py
new file mode 100644
index 0000000000..29da27de7c
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/junitxml.py
@@ -0,0 +1,569 @@
+"""
+ report test results in JUnit-XML format,
+ for use with Jenkins and build integration servers.
+
+
+Based on initial code from Ross Lawley.
+
+Output conforms to https://github.com/jenkinsci/xunit-plugin/blob/master/
+src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd
+"""
+from __future__ import absolute_import, division, print_function
+
+import functools
+import py
+import os
+import re
+import sys
+import time
+import pytest
+from _pytest import nodes
+from _pytest.config import filename_arg
+
+# Python 2.X and 3.X compatibility
+if sys.version_info[0] < 3:
+ from codecs import open
+else:
+ unichr = chr
+ unicode = str
+ long = int
+
+
+class Junit(py.xml.Namespace):
+ pass
+
+
+# We need to get the subset of the invalid unicode ranges according to
+# XML 1.0 which are valid in this python build. Hence we calculate
+# this dynamically instead of hardcoding it. The spec range of valid
+# chars is: Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD]
+# | [#x10000-#x10FFFF]
+_legal_chars = (0x09, 0x0A, 0x0d)
+_legal_ranges = ((0x20, 0x7E), (0x80, 0xD7FF), (0xE000, 0xFFFD), (0x10000, 0x10FFFF))
+_legal_xml_re = [
+ unicode("%s-%s") % (unichr(low), unichr(high))
+ for (low, high) in _legal_ranges
+ if low < sys.maxunicode
+]
+_legal_xml_re = [unichr(x) for x in _legal_chars] + _legal_xml_re
+illegal_xml_re = re.compile(unicode("[^%s]") % unicode("").join(_legal_xml_re))
+del _legal_chars
+del _legal_ranges
+del _legal_xml_re
+
+_py_ext_re = re.compile(r"\.py$")
+
+
+def bin_xml_escape(arg):
+
+ def repl(matchobj):
+ i = ord(matchobj.group())
+ if i <= 0xFF:
+ return unicode("#x%02X") % i
+ else:
+ return unicode("#x%04X") % i
+
+ return py.xml.raw(illegal_xml_re.sub(repl, py.xml.escape(arg)))
+
+
+class _NodeReporter(object):
+
+ def __init__(self, nodeid, xml):
+
+ self.id = nodeid
+ self.xml = xml
+ self.add_stats = self.xml.add_stats
+ self.duration = 0
+ self.properties = []
+ self.nodes = []
+ self.testcase = None
+ self.attrs = {}
+
+ def append(self, node):
+ self.xml.add_stats(type(node).__name__)
+ self.nodes.append(node)
+
+ def add_property(self, name, value):
+ self.properties.append((str(name), bin_xml_escape(value)))
+
+ def add_attribute(self, name, value):
+ self.attrs[str(name)] = bin_xml_escape(value)
+
+ def make_properties_node(self):
+ """Return a Junit node containing custom properties, if any.
+ """
+ if self.properties:
+ return Junit.properties(
+ [
+ Junit.property(name=name, value=value)
+ for name, value in self.properties
+ ]
+ )
+ return ""
+
+ def record_testreport(self, testreport):
+ assert not self.testcase
+ names = mangle_test_address(testreport.nodeid)
+ existing_attrs = self.attrs
+ classnames = names[:-1]
+ if self.xml.prefix:
+ classnames.insert(0, self.xml.prefix)
+ attrs = {
+ "classname": ".".join(classnames),
+ "name": bin_xml_escape(names[-1]),
+ "file": testreport.location[0],
+ }
+ if testreport.location[1] is not None:
+ attrs["line"] = testreport.location[1]
+ if hasattr(testreport, "url"):
+ attrs["url"] = testreport.url
+ self.attrs = attrs
+ self.attrs.update(existing_attrs) # restore any user-defined attributes
+
+ def to_xml(self):
+ testcase = Junit.testcase(time=self.duration, **self.attrs)
+ testcase.append(self.make_properties_node())
+ for node in self.nodes:
+ testcase.append(node)
+ return testcase
+
+ def _add_simple(self, kind, message, data=None):
+ data = bin_xml_escape(data)
+ node = kind(data, message=message)
+ self.append(node)
+
+ def write_captured_output(self, report):
+ content_out = report.capstdout
+ content_log = report.caplog
+ content_err = report.capstderr
+
+ if content_log or content_out:
+ if content_log and self.xml.logging == "system-out":
+ if content_out:
+ # syncing stdout and the log-output is not done yet. It's
+ # probably not worth the effort. Therefore, first the captured
+ # stdout is shown and then the captured logs.
+ content = "\n".join(
+ [
+ " Captured Stdout ".center(80, "-"),
+ content_out,
+ "",
+ " Captured Log ".center(80, "-"),
+ content_log,
+ ]
+ )
+ else:
+ content = content_log
+ else:
+ content = content_out
+
+ if content:
+ tag = getattr(Junit, "system-out")
+ self.append(tag(bin_xml_escape(content)))
+
+ if content_log or content_err:
+ if content_log and self.xml.logging == "system-err":
+ if content_err:
+ content = "\n".join(
+ [
+ " Captured Stderr ".center(80, "-"),
+ content_err,
+ "",
+ " Captured Log ".center(80, "-"),
+ content_log,
+ ]
+ )
+ else:
+ content = content_log
+ else:
+ content = content_err
+
+ if content:
+ tag = getattr(Junit, "system-err")
+ self.append(tag(bin_xml_escape(content)))
+
+ def append_pass(self, report):
+ self.add_stats("passed")
+
+ def append_failure(self, report):
+ # msg = str(report.longrepr.reprtraceback.extraline)
+ if hasattr(report, "wasxfail"):
+ self._add_simple(Junit.skipped, "xfail-marked test passes unexpectedly")
+ else:
+ if hasattr(report.longrepr, "reprcrash"):
+ message = report.longrepr.reprcrash.message
+ elif isinstance(report.longrepr, (unicode, str)):
+ message = report.longrepr
+ else:
+ message = str(report.longrepr)
+ message = bin_xml_escape(message)
+ fail = Junit.failure(message=message)
+ fail.append(bin_xml_escape(report.longrepr))
+ self.append(fail)
+
+ def append_collect_error(self, report):
+ # msg = str(report.longrepr.reprtraceback.extraline)
+ self.append(
+ Junit.error(bin_xml_escape(report.longrepr), message="collection failure")
+ )
+
+ def append_collect_skipped(self, report):
+ self._add_simple(Junit.skipped, "collection skipped", report.longrepr)
+
+ def append_error(self, report):
+ if getattr(report, "when", None) == "teardown":
+ msg = "test teardown failure"
+ else:
+ msg = "test setup failure"
+ self._add_simple(Junit.error, msg, report.longrepr)
+
+ def append_skipped(self, report):
+ if hasattr(report, "wasxfail"):
+ self._add_simple(Junit.skipped, "expected test failure", report.wasxfail)
+ else:
+ filename, lineno, skipreason = report.longrepr
+ if skipreason.startswith("Skipped: "):
+ skipreason = bin_xml_escape(skipreason[9:])
+ self.append(
+ Junit.skipped(
+ "%s:%s: %s" % (filename, lineno, skipreason),
+ type="pytest.skip",
+ message=skipreason,
+ )
+ )
+ self.write_captured_output(report)
+
+ def finalize(self):
+ data = self.to_xml().unicode(indent=0)
+ self.__dict__.clear()
+ self.to_xml = lambda: py.xml.raw(data)
+
+
+@pytest.fixture
+def record_property(request):
+ """Add an extra properties the calling test.
+ User properties become part of the test report and are available to the
+ configured reporters, like JUnit XML.
+ The fixture is callable with ``(name, value)``, with value being automatically
+ xml-encoded.
+
+ Example::
+
+ def test_function(record_property):
+ record_property("example_key", 1)
+ """
+
+ def append_property(name, value):
+ request.node.user_properties.append((name, value))
+
+ return append_property
+
+
+@pytest.fixture
+def record_xml_property(record_property):
+ """(Deprecated) use record_property."""
+ import warnings
+ from _pytest import deprecated
+
+ warnings.warn(deprecated.RECORD_XML_PROPERTY, DeprecationWarning, stacklevel=2)
+
+ return record_property
+
+
+@pytest.fixture
+def record_xml_attribute(request):
+ """Add extra xml attributes to the tag for the calling test.
+ The fixture is callable with ``(name, value)``, with value being
+ automatically xml-encoded
+ """
+ request.node.warn(
+ code="C3", message="record_xml_attribute is an experimental feature"
+ )
+ xml = getattr(request.config, "_xml", None)
+ if xml is not None:
+ node_reporter = xml.node_reporter(request.node.nodeid)
+ return node_reporter.add_attribute
+ else:
+
+ def add_attr_noop(name, value):
+ pass
+
+ return add_attr_noop
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting")
+ group.addoption(
+ "--junitxml",
+ "--junit-xml",
+ action="store",
+ dest="xmlpath",
+ metavar="path",
+ type=functools.partial(filename_arg, optname="--junitxml"),
+ default=None,
+ help="create junit-xml style report file at given path.",
+ )
+ group.addoption(
+ "--junitprefix",
+ "--junit-prefix",
+ action="store",
+ metavar="str",
+ default=None,
+ help="prepend prefix to classnames in junit-xml output",
+ )
+ parser.addini(
+ "junit_suite_name", "Test suite name for JUnit report", default="pytest"
+ )
+ parser.addini(
+ "junit_logging",
+ "Write captured log messages to JUnit report: "
+ "one of no|system-out|system-err",
+ default="no",
+ ) # choices=['no', 'stdout', 'stderr'])
+
+
+def pytest_configure(config):
+ xmlpath = config.option.xmlpath
+ # prevent opening xmllog on slave nodes (xdist)
+ if xmlpath and not hasattr(config, "slaveinput"):
+ config._xml = LogXML(
+ xmlpath,
+ config.option.junitprefix,
+ config.getini("junit_suite_name"),
+ config.getini("junit_logging"),
+ )
+ config.pluginmanager.register(config._xml)
+
+
+def pytest_unconfigure(config):
+ xml = getattr(config, "_xml", None)
+ if xml:
+ del config._xml
+ config.pluginmanager.unregister(xml)
+
+
+def mangle_test_address(address):
+ path, possible_open_bracket, params = address.partition("[")
+ names = path.split("::")
+ try:
+ names.remove("()")
+ except ValueError:
+ pass
+ # convert file path to dotted path
+ names[0] = names[0].replace(nodes.SEP, ".")
+ names[0] = _py_ext_re.sub("", names[0])
+ # put any params back
+ names[-1] += possible_open_bracket + params
+ return names
+
+
+class LogXML(object):
+
+ def __init__(self, logfile, prefix, suite_name="pytest", logging="no"):
+ logfile = os.path.expanduser(os.path.expandvars(logfile))
+ self.logfile = os.path.normpath(os.path.abspath(logfile))
+ self.prefix = prefix
+ self.suite_name = suite_name
+ self.logging = logging
+ self.stats = dict.fromkeys(["error", "passed", "failure", "skipped"], 0)
+ self.node_reporters = {} # nodeid -> _NodeReporter
+ self.node_reporters_ordered = []
+ self.global_properties = []
+ # List of reports that failed on call but teardown is pending.
+ self.open_reports = []
+ self.cnt_double_fail_tests = 0
+
+ def finalize(self, report):
+ nodeid = getattr(report, "nodeid", report)
+ # local hack to handle xdist report order
+ slavenode = getattr(report, "node", None)
+ reporter = self.node_reporters.pop((nodeid, slavenode))
+ if reporter is not None:
+ reporter.finalize()
+
+ def node_reporter(self, report):
+ nodeid = getattr(report, "nodeid", report)
+ # local hack to handle xdist report order
+ slavenode = getattr(report, "node", None)
+
+ key = nodeid, slavenode
+
+ if key in self.node_reporters:
+ # TODO: breasks for --dist=each
+ return self.node_reporters[key]
+
+ reporter = _NodeReporter(nodeid, self)
+
+ self.node_reporters[key] = reporter
+ self.node_reporters_ordered.append(reporter)
+
+ return reporter
+
+ def add_stats(self, key):
+ if key in self.stats:
+ self.stats[key] += 1
+
+ def _opentestcase(self, report):
+ reporter = self.node_reporter(report)
+ reporter.record_testreport(report)
+ return reporter
+
+ def pytest_runtest_logreport(self, report):
+ """handle a setup/call/teardown report, generating the appropriate
+ xml tags as necessary.
+
+ note: due to plugins like xdist, this hook may be called in interlaced
+ order with reports from other nodes. for example:
+
+ usual call order:
+ -> setup node1
+ -> call node1
+ -> teardown node1
+ -> setup node2
+ -> call node2
+ -> teardown node2
+
+ possible call order in xdist:
+ -> setup node1
+ -> call node1
+ -> setup node2
+ -> call node2
+ -> teardown node2
+ -> teardown node1
+ """
+ close_report = None
+ if report.passed:
+ if report.when == "call": # ignore setup/teardown
+ reporter = self._opentestcase(report)
+ reporter.append_pass(report)
+ elif report.failed:
+ if report.when == "teardown":
+ # The following vars are needed when xdist plugin is used
+ report_wid = getattr(report, "worker_id", None)
+ report_ii = getattr(report, "item_index", None)
+ close_report = next(
+ (
+ rep
+ for rep in self.open_reports
+ if (
+ rep.nodeid == report.nodeid
+ and getattr(rep, "item_index", None) == report_ii
+ and getattr(rep, "worker_id", None) == report_wid
+ )
+ ),
+ None,
+ )
+ if close_report:
+ # We need to open new testcase in case we have failure in
+ # call and error in teardown in order to follow junit
+ # schema
+ self.finalize(close_report)
+ self.cnt_double_fail_tests += 1
+ reporter = self._opentestcase(report)
+ if report.when == "call":
+ reporter.append_failure(report)
+ self.open_reports.append(report)
+ else:
+ reporter.append_error(report)
+ elif report.skipped:
+ reporter = self._opentestcase(report)
+ reporter.append_skipped(report)
+ self.update_testcase_duration(report)
+ if report.when == "teardown":
+ reporter = self._opentestcase(report)
+ reporter.write_captured_output(report)
+
+ for propname, propvalue in report.user_properties:
+ reporter.add_property(propname, propvalue)
+
+ self.finalize(report)
+ report_wid = getattr(report, "worker_id", None)
+ report_ii = getattr(report, "item_index", None)
+ close_report = next(
+ (
+ rep
+ for rep in self.open_reports
+ if (
+ rep.nodeid == report.nodeid
+ and getattr(rep, "item_index", None) == report_ii
+ and getattr(rep, "worker_id", None) == report_wid
+ )
+ ),
+ None,
+ )
+ if close_report:
+ self.open_reports.remove(close_report)
+
+ def update_testcase_duration(self, report):
+ """accumulates total duration for nodeid from given report and updates
+ the Junit.testcase with the new total if already created.
+ """
+ reporter = self.node_reporter(report)
+ reporter.duration += getattr(report, "duration", 0.0)
+
+ def pytest_collectreport(self, report):
+ if not report.passed:
+ reporter = self._opentestcase(report)
+ if report.failed:
+ reporter.append_collect_error(report)
+ else:
+ reporter.append_collect_skipped(report)
+
+ def pytest_internalerror(self, excrepr):
+ reporter = self.node_reporter("internal")
+ reporter.attrs.update(classname="pytest", name="internal")
+ reporter._add_simple(Junit.error, "internal error", excrepr)
+
+ def pytest_sessionstart(self):
+ self.suite_start_time = time.time()
+
+ def pytest_sessionfinish(self):
+ dirname = os.path.dirname(os.path.abspath(self.logfile))
+ if not os.path.isdir(dirname):
+ os.makedirs(dirname)
+ logfile = open(self.logfile, "w", encoding="utf-8")
+ suite_stop_time = time.time()
+ suite_time_delta = suite_stop_time - self.suite_start_time
+
+ numtests = (
+ self.stats["passed"]
+ + self.stats["failure"]
+ + self.stats["skipped"]
+ + self.stats["error"]
+ - self.cnt_double_fail_tests
+ )
+ logfile.write('<?xml version="1.0" encoding="utf-8"?>')
+
+ logfile.write(
+ Junit.testsuite(
+ self._get_global_properties_node(),
+ [x.to_xml() for x in self.node_reporters_ordered],
+ name=self.suite_name,
+ errors=self.stats["error"],
+ failures=self.stats["failure"],
+ skips=self.stats["skipped"],
+ tests=numtests,
+ time="%.3f" % suite_time_delta,
+ ).unicode(
+ indent=0
+ )
+ )
+ logfile.close()
+
+ def pytest_terminal_summary(self, terminalreporter):
+ terminalreporter.write_sep("-", "generated xml file: %s" % (self.logfile))
+
+ def add_global_property(self, name, value):
+ self.global_properties.append((str(name), bin_xml_escape(value)))
+
+ def _get_global_properties_node(self):
+ """Return a Junit node containing custom properties, if any.
+ """
+ if self.global_properties:
+ return Junit.properties(
+ [
+ Junit.property(name=name, value=value)
+ for name, value in self.global_properties
+ ]
+ )
+ return ""
diff --git a/third_party/python/pytest/src/_pytest/logging.py b/third_party/python/pytest/src/_pytest/logging.py
new file mode 100644
index 0000000000..00bb9aeb54
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/logging.py
@@ -0,0 +1,575 @@
+""" Access and control log capturing. """
+from __future__ import absolute_import, division, print_function
+
+import logging
+from contextlib import closing, contextmanager
+import re
+import six
+
+from _pytest.config import create_terminal_writer
+import pytest
+import py
+
+
+DEFAULT_LOG_FORMAT = "%(filename)-25s %(lineno)4d %(levelname)-8s %(message)s"
+DEFAULT_LOG_DATE_FORMAT = "%H:%M:%S"
+
+
+class ColoredLevelFormatter(logging.Formatter):
+ """
+ Colorize the %(levelname)..s part of the log format passed to __init__.
+ """
+
+ LOGLEVEL_COLOROPTS = {
+ logging.CRITICAL: {"red"},
+ logging.ERROR: {"red", "bold"},
+ logging.WARNING: {"yellow"},
+ logging.WARN: {"yellow"},
+ logging.INFO: {"green"},
+ logging.DEBUG: {"purple"},
+ logging.NOTSET: set(),
+ }
+ LEVELNAME_FMT_REGEX = re.compile(r"%\(levelname\)([+-]?\d*s)")
+
+ def __init__(self, terminalwriter, *args, **kwargs):
+ super(ColoredLevelFormatter, self).__init__(*args, **kwargs)
+ if six.PY2:
+ self._original_fmt = self._fmt
+ else:
+ self._original_fmt = self._style._fmt
+ self._level_to_fmt_mapping = {}
+
+ levelname_fmt_match = self.LEVELNAME_FMT_REGEX.search(self._fmt)
+ if not levelname_fmt_match:
+ return
+ levelname_fmt = levelname_fmt_match.group()
+
+ for level, color_opts in self.LOGLEVEL_COLOROPTS.items():
+ formatted_levelname = levelname_fmt % {
+ "levelname": logging.getLevelName(level)
+ }
+
+ # add ANSI escape sequences around the formatted levelname
+ color_kwargs = {name: True for name in color_opts}
+ colorized_formatted_levelname = terminalwriter.markup(
+ formatted_levelname, **color_kwargs
+ )
+ self._level_to_fmt_mapping[level] = self.LEVELNAME_FMT_REGEX.sub(
+ colorized_formatted_levelname, self._fmt
+ )
+
+ def format(self, record):
+ fmt = self._level_to_fmt_mapping.get(record.levelno, self._original_fmt)
+ if six.PY2:
+ self._fmt = fmt
+ else:
+ self._style._fmt = fmt
+ return super(ColoredLevelFormatter, self).format(record)
+
+
+def get_option_ini(config, *names):
+ for name in names:
+ ret = config.getoption(name) # 'default' arg won't work as expected
+ if ret is None:
+ ret = config.getini(name)
+ if ret:
+ return ret
+
+
+def pytest_addoption(parser):
+ """Add options to control log capturing."""
+ group = parser.getgroup("logging")
+
+ def add_option_ini(option, dest, default=None, type=None, **kwargs):
+ parser.addini(
+ dest, default=default, type=type, help="default value for " + option
+ )
+ group.addoption(option, dest=dest, **kwargs)
+
+ add_option_ini(
+ "--no-print-logs",
+ dest="log_print",
+ action="store_const",
+ const=False,
+ default=True,
+ type="bool",
+ help="disable printing caught logs on failed tests.",
+ )
+ add_option_ini(
+ "--log-level",
+ dest="log_level",
+ default=None,
+ help="logging level used by the logging module",
+ )
+ add_option_ini(
+ "--log-format",
+ dest="log_format",
+ default=DEFAULT_LOG_FORMAT,
+ help="log format as used by the logging module.",
+ )
+ add_option_ini(
+ "--log-date-format",
+ dest="log_date_format",
+ default=DEFAULT_LOG_DATE_FORMAT,
+ help="log date format as used by the logging module.",
+ )
+ parser.addini(
+ "log_cli",
+ default=False,
+ type="bool",
+ help='enable log display during test run (also known as "live logging").',
+ )
+ add_option_ini(
+ "--log-cli-level", dest="log_cli_level", default=None, help="cli logging level."
+ )
+ add_option_ini(
+ "--log-cli-format",
+ dest="log_cli_format",
+ default=None,
+ help="log format as used by the logging module.",
+ )
+ add_option_ini(
+ "--log-cli-date-format",
+ dest="log_cli_date_format",
+ default=None,
+ help="log date format as used by the logging module.",
+ )
+ add_option_ini(
+ "--log-file",
+ dest="log_file",
+ default=None,
+ help="path to a file when logging will be written to.",
+ )
+ add_option_ini(
+ "--log-file-level",
+ dest="log_file_level",
+ default=None,
+ help="log file logging level.",
+ )
+ add_option_ini(
+ "--log-file-format",
+ dest="log_file_format",
+ default=DEFAULT_LOG_FORMAT,
+ help="log format as used by the logging module.",
+ )
+ add_option_ini(
+ "--log-file-date-format",
+ dest="log_file_date_format",
+ default=DEFAULT_LOG_DATE_FORMAT,
+ help="log date format as used by the logging module.",
+ )
+
+
+@contextmanager
+def catching_logs(handler, formatter=None, level=None):
+ """Context manager that prepares the whole logging machinery properly."""
+ root_logger = logging.getLogger()
+
+ if formatter is not None:
+ handler.setFormatter(formatter)
+ if level is not None:
+ handler.setLevel(level)
+
+ # Adding the same handler twice would confuse logging system.
+ # Just don't do that.
+ add_new_handler = handler not in root_logger.handlers
+
+ if add_new_handler:
+ root_logger.addHandler(handler)
+ if level is not None:
+ orig_level = root_logger.level
+ root_logger.setLevel(min(orig_level, level))
+ try:
+ yield handler
+ finally:
+ if level is not None:
+ root_logger.setLevel(orig_level)
+ if add_new_handler:
+ root_logger.removeHandler(handler)
+
+
+class LogCaptureHandler(logging.StreamHandler):
+ """A logging handler that stores log records and the log text."""
+
+ def __init__(self):
+ """Creates a new log handler."""
+ logging.StreamHandler.__init__(self, py.io.TextIO())
+ self.records = []
+
+ def emit(self, record):
+ """Keep the log records in a list in addition to the log text."""
+ self.records.append(record)
+ logging.StreamHandler.emit(self, record)
+
+ def reset(self):
+ self.records = []
+ self.stream = py.io.TextIO()
+
+
+class LogCaptureFixture(object):
+ """Provides access and control of log capturing."""
+
+ def __init__(self, item):
+ """Creates a new funcarg."""
+ self._item = item
+ self._initial_log_levels = {} # type: Dict[str, int] # dict of log name -> log level
+
+ def _finalize(self):
+ """Finalizes the fixture.
+
+ This restores the log levels changed by :meth:`set_level`.
+ """
+ # restore log levels
+ for logger_name, level in self._initial_log_levels.items():
+ logger = logging.getLogger(logger_name)
+ logger.setLevel(level)
+
+ @property
+ def handler(self):
+ """
+ :rtype: LogCaptureHandler
+ """
+ return self._item.catch_log_handler
+
+ def get_records(self, when):
+ """
+ Get the logging records for one of the possible test phases.
+
+ :param str when:
+ Which test phase to obtain the records from. Valid values are: "setup", "call" and "teardown".
+
+ :rtype: List[logging.LogRecord]
+ :return: the list of captured records at the given stage
+
+ .. versionadded:: 3.4
+ """
+ handler = self._item.catch_log_handlers.get(when)
+ if handler:
+ return handler.records
+ else:
+ return []
+
+ @property
+ def text(self):
+ """Returns the log text."""
+ return self.handler.stream.getvalue()
+
+ @property
+ def records(self):
+ """Returns the list of log records."""
+ return self.handler.records
+
+ @property
+ def record_tuples(self):
+ """Returns a list of a striped down version of log records intended
+ for use in assertion comparison.
+
+ The format of the tuple is:
+
+ (logger_name, log_level, message)
+ """
+ return [(r.name, r.levelno, r.getMessage()) for r in self.records]
+
+ def clear(self):
+ """Reset the list of log records and the captured log text."""
+ self.handler.reset()
+
+ def set_level(self, level, logger=None):
+ """Sets the level for capturing of logs. The level will be restored to its previous value at the end of
+ the test.
+
+ :param int level: the logger to level.
+ :param str logger: the logger to update the level. If not given, the root logger level is updated.
+
+ .. versionchanged:: 3.4
+ The levels of the loggers changed by this function will be restored to their initial values at the
+ end of the test.
+ """
+ logger_name = logger
+ logger = logging.getLogger(logger_name)
+ # save the original log-level to restore it during teardown
+ self._initial_log_levels.setdefault(logger_name, logger.level)
+ logger.setLevel(level)
+
+ @contextmanager
+ def at_level(self, level, logger=None):
+ """Context manager that sets the level for capturing of logs. After the end of the 'with' statement the
+ level is restored to its original value.
+
+ :param int level: the logger to level.
+ :param str logger: the logger to update the level. If not given, the root logger level is updated.
+ """
+ logger = logging.getLogger(logger)
+ orig_level = logger.level
+ logger.setLevel(level)
+ try:
+ yield
+ finally:
+ logger.setLevel(orig_level)
+
+
+@pytest.fixture
+def caplog(request):
+ """Access and control log capturing.
+
+ Captured logs are available through the following methods::
+
+ * caplog.text -> string containing formatted log output
+ * caplog.records -> list of logging.LogRecord instances
+ * caplog.record_tuples -> list of (logger_name, level, message) tuples
+ * caplog.clear() -> clear captured records and formatted log output string
+ """
+ result = LogCaptureFixture(request.node)
+ yield result
+ result._finalize()
+
+
+def get_actual_log_level(config, *setting_names):
+ """Return the actual logging level."""
+
+ for setting_name in setting_names:
+ log_level = config.getoption(setting_name)
+ if log_level is None:
+ log_level = config.getini(setting_name)
+ if log_level:
+ break
+ else:
+ return
+
+ if isinstance(log_level, six.string_types):
+ log_level = log_level.upper()
+ try:
+ return int(getattr(logging, log_level, log_level))
+ except ValueError:
+ # Python logging does not recognise this as a logging level
+ raise pytest.UsageError(
+ "'{}' is not recognized as a logging level name for "
+ "'{}'. Please consider passing the "
+ "logging level num instead.".format(log_level, setting_name)
+ )
+
+
+def pytest_configure(config):
+ config.pluginmanager.register(LoggingPlugin(config), "logging-plugin")
+
+
+@contextmanager
+def _dummy_context_manager():
+ yield
+
+
+class LoggingPlugin(object):
+ """Attaches to the logging module and captures log messages for each test.
+ """
+
+ def __init__(self, config):
+ """Creates a new plugin to capture log messages.
+
+ The formatter can be safely shared across all handlers so
+ create a single one for the entire test session here.
+ """
+ self._config = config
+
+ # enable verbose output automatically if live logging is enabled
+ if self._log_cli_enabled() and not config.getoption("verbose"):
+ # sanity check: terminal reporter should not have been loaded at this point
+ assert self._config.pluginmanager.get_plugin("terminalreporter") is None
+ config.option.verbose = 1
+
+ self.print_logs = get_option_ini(config, "log_print")
+ self.formatter = logging.Formatter(
+ get_option_ini(config, "log_format"),
+ get_option_ini(config, "log_date_format"),
+ )
+ self.log_level = get_actual_log_level(config, "log_level")
+
+ log_file = get_option_ini(config, "log_file")
+ if log_file:
+ self.log_file_level = get_actual_log_level(config, "log_file_level")
+
+ log_file_format = get_option_ini(config, "log_file_format", "log_format")
+ log_file_date_format = get_option_ini(
+ config, "log_file_date_format", "log_date_format"
+ )
+ # Each pytest runtests session will write to a clean logfile
+ self.log_file_handler = logging.FileHandler(log_file, mode="w")
+ log_file_formatter = logging.Formatter(
+ log_file_format, datefmt=log_file_date_format
+ )
+ self.log_file_handler.setFormatter(log_file_formatter)
+ else:
+ self.log_file_handler = None
+
+ # initialized during pytest_runtestloop
+ self.log_cli_handler = None
+
+ def _log_cli_enabled(self):
+ """Return True if log_cli should be considered enabled, either explicitly
+ or because --log-cli-level was given in the command-line.
+ """
+ return self._config.getoption(
+ "--log-cli-level"
+ ) is not None or self._config.getini(
+ "log_cli"
+ )
+
+ @contextmanager
+ def _runtest_for(self, item, when):
+ """Implements the internals of pytest_runtest_xxx() hook."""
+ with catching_logs(
+ LogCaptureHandler(), formatter=self.formatter, level=self.log_level
+ ) as log_handler:
+ if self.log_cli_handler:
+ self.log_cli_handler.set_when(when)
+
+ if item is None:
+ yield # run the test
+ return
+
+ if not hasattr(item, "catch_log_handlers"):
+ item.catch_log_handlers = {}
+ item.catch_log_handlers[when] = log_handler
+ item.catch_log_handler = log_handler
+ try:
+ yield # run test
+ finally:
+ del item.catch_log_handler
+ if when == "teardown":
+ del item.catch_log_handlers
+
+ if self.print_logs:
+ # Add a captured log section to the report.
+ log = log_handler.stream.getvalue().strip()
+ item.add_report_section(when, "log", log)
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_setup(self, item):
+ with self._runtest_for(item, "setup"):
+ yield
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_call(self, item):
+ with self._runtest_for(item, "call"):
+ yield
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_teardown(self, item):
+ with self._runtest_for(item, "teardown"):
+ yield
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_logstart(self):
+ if self.log_cli_handler:
+ self.log_cli_handler.reset()
+ with self._runtest_for(None, "start"):
+ yield
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_logfinish(self):
+ with self._runtest_for(None, "finish"):
+ yield
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtestloop(self, session):
+ """Runs all collected test items."""
+ self._setup_cli_logging()
+ with self.live_logs_context:
+ if self.log_file_handler is not None:
+ with closing(self.log_file_handler):
+ with catching_logs(
+ self.log_file_handler, level=self.log_file_level
+ ):
+ yield # run all the tests
+ else:
+ yield # run all the tests
+
+ def _setup_cli_logging(self):
+ """Sets up the handler and logger for the Live Logs feature, if enabled.
+
+ This must be done right before starting the loop so we can access the terminal reporter plugin.
+ """
+ terminal_reporter = self._config.pluginmanager.get_plugin("terminalreporter")
+ if self._log_cli_enabled() and terminal_reporter is not None:
+ capture_manager = self._config.pluginmanager.get_plugin("capturemanager")
+ log_cli_handler = _LiveLoggingStreamHandler(
+ terminal_reporter, capture_manager
+ )
+ log_cli_format = get_option_ini(
+ self._config, "log_cli_format", "log_format"
+ )
+ log_cli_date_format = get_option_ini(
+ self._config, "log_cli_date_format", "log_date_format"
+ )
+ if (
+ self._config.option.color != "no"
+ and ColoredLevelFormatter.LEVELNAME_FMT_REGEX.search(log_cli_format)
+ ):
+ log_cli_formatter = ColoredLevelFormatter(
+ create_terminal_writer(self._config),
+ log_cli_format,
+ datefmt=log_cli_date_format,
+ )
+ else:
+ log_cli_formatter = logging.Formatter(
+ log_cli_format, datefmt=log_cli_date_format
+ )
+ log_cli_level = get_actual_log_level(
+ self._config, "log_cli_level", "log_level"
+ )
+ self.log_cli_handler = log_cli_handler
+ self.live_logs_context = catching_logs(
+ log_cli_handler, formatter=log_cli_formatter, level=log_cli_level
+ )
+ else:
+ self.live_logs_context = _dummy_context_manager()
+
+
+class _LiveLoggingStreamHandler(logging.StreamHandler):
+ """
+ Custom StreamHandler used by the live logging feature: it will write a newline before the first log message
+ in each test.
+
+ During live logging we must also explicitly disable stdout/stderr capturing otherwise it will get captured
+ and won't appear in the terminal.
+ """
+
+ def __init__(self, terminal_reporter, capture_manager):
+ """
+ :param _pytest.terminal.TerminalReporter terminal_reporter:
+ :param _pytest.capture.CaptureManager capture_manager:
+ """
+ logging.StreamHandler.__init__(self, stream=terminal_reporter)
+ self.capture_manager = capture_manager
+ self.reset()
+ self.set_when(None)
+ self._test_outcome_written = False
+
+ def reset(self):
+ """Reset the handler; should be called before the start of each test"""
+ self._first_record_emitted = False
+
+ def set_when(self, when):
+ """Prepares for the given test phase (setup/call/teardown)"""
+ self._when = when
+ self._section_name_shown = False
+ if when == "start":
+ self._test_outcome_written = False
+
+ def emit(self, record):
+ if self.capture_manager is not None:
+ self.capture_manager.suspend_global_capture()
+ try:
+ if not self._first_record_emitted:
+ self.stream.write("\n")
+ self._first_record_emitted = True
+ elif self._when in ("teardown", "finish"):
+ if not self._test_outcome_written:
+ self._test_outcome_written = True
+ self.stream.write("\n")
+ if not self._section_name_shown and self._when:
+ self.stream.section("live log " + self._when, sep="-", bold=True)
+ self._section_name_shown = True
+ logging.StreamHandler.emit(self, record)
+ finally:
+ if self.capture_manager is not None:
+ self.capture_manager.resume_global_capture()
diff --git a/third_party/python/pytest/src/_pytest/main.py b/third_party/python/pytest/src/_pytest/main.py
new file mode 100644
index 0000000000..23562358d7
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/main.py
@@ -0,0 +1,611 @@
+""" core implementation of testing process: init, session, runtest loop. """
+from __future__ import absolute_import, division, print_function
+
+import contextlib
+import functools
+import os
+import pkgutil
+import six
+import sys
+
+import _pytest
+from _pytest import nodes
+import _pytest._code
+import py
+
+from _pytest.config import directory_arg, UsageError, hookimpl
+from _pytest.outcomes import exit
+from _pytest.runner import collect_one_node
+
+
+# exitcodes for the command line
+EXIT_OK = 0
+EXIT_TESTSFAILED = 1
+EXIT_INTERRUPTED = 2
+EXIT_INTERNALERROR = 3
+EXIT_USAGEERROR = 4
+EXIT_NOTESTSCOLLECTED = 5
+
+
+def pytest_addoption(parser):
+ parser.addini(
+ "norecursedirs",
+ "directory patterns to avoid for recursion",
+ type="args",
+ default=[".*", "build", "dist", "CVS", "_darcs", "{arch}", "*.egg", "venv"],
+ )
+ parser.addini(
+ "testpaths",
+ "directories to search for tests when no files or directories are given in the "
+ "command line.",
+ type="args",
+ default=[],
+ )
+ # parser.addini("dirpatterns",
+ # "patterns specifying possible locations of test files",
+ # type="linelist", default=["**/test_*.txt",
+ # "**/test_*.py", "**/*_test.py"]
+ # )
+ group = parser.getgroup("general", "running and selection options")
+ group._addoption(
+ "-x",
+ "--exitfirst",
+ action="store_const",
+ dest="maxfail",
+ const=1,
+ help="exit instantly on first error or failed test.",
+ ),
+ group._addoption(
+ "--maxfail",
+ metavar="num",
+ action="store",
+ type=int,
+ dest="maxfail",
+ default=0,
+ help="exit after first num failures or errors.",
+ )
+ group._addoption(
+ "--strict",
+ action="store_true",
+ help="marks not registered in configuration file raise errors.",
+ )
+ group._addoption(
+ "-c",
+ metavar="file",
+ type=str,
+ dest="inifilename",
+ help="load configuration from `file` instead of trying to locate one of the implicit "
+ "configuration files.",
+ )
+ group._addoption(
+ "--continue-on-collection-errors",
+ action="store_true",
+ default=False,
+ dest="continue_on_collection_errors",
+ help="Force test execution even if collection errors occur.",
+ )
+ group._addoption(
+ "--rootdir",
+ action="store",
+ dest="rootdir",
+ help="Define root directory for tests. Can be relative path: 'root_dir', './root_dir', "
+ "'root_dir/another_dir/'; absolute path: '/home/user/root_dir'; path with variables: "
+ "'$HOME/root_dir'.",
+ )
+
+ group = parser.getgroup("collect", "collection")
+ group.addoption(
+ "--collectonly",
+ "--collect-only",
+ action="store_true",
+ help="only collect tests, don't execute them.",
+ ),
+ group.addoption(
+ "--pyargs",
+ action="store_true",
+ help="try to interpret all arguments as python packages.",
+ )
+ group.addoption(
+ "--ignore",
+ action="append",
+ metavar="path",
+ help="ignore path during collection (multi-allowed).",
+ )
+ group.addoption(
+ "--deselect",
+ action="append",
+ metavar="nodeid_prefix",
+ help="deselect item during collection (multi-allowed).",
+ )
+ # when changing this to --conf-cut-dir, config.py Conftest.setinitial
+ # needs upgrading as well
+ group.addoption(
+ "--confcutdir",
+ dest="confcutdir",
+ default=None,
+ metavar="dir",
+ type=functools.partial(directory_arg, optname="--confcutdir"),
+ help="only load conftest.py's relative to specified dir.",
+ )
+ group.addoption(
+ "--noconftest",
+ action="store_true",
+ dest="noconftest",
+ default=False,
+ help="Don't load any conftest.py files.",
+ )
+ group.addoption(
+ "--keepduplicates",
+ "--keep-duplicates",
+ action="store_true",
+ dest="keepduplicates",
+ default=False,
+ help="Keep duplicate tests.",
+ )
+ group.addoption(
+ "--collect-in-virtualenv",
+ action="store_true",
+ dest="collect_in_virtualenv",
+ default=False,
+ help="Don't ignore tests in a local virtualenv directory",
+ )
+
+ group = parser.getgroup("debugconfig", "test session debugging and configuration")
+ group.addoption(
+ "--basetemp",
+ dest="basetemp",
+ default=None,
+ metavar="dir",
+ help="base temporary directory for this test run.",
+ )
+
+
+def pytest_configure(config):
+ __import__("pytest").config = config # compatibility
+
+
+def wrap_session(config, doit):
+ """Skeleton command line program"""
+ session = Session(config)
+ session.exitstatus = EXIT_OK
+ initstate = 0
+ try:
+ try:
+ config._do_configure()
+ initstate = 1
+ config.hook.pytest_sessionstart(session=session)
+ initstate = 2
+ session.exitstatus = doit(config, session) or 0
+ except UsageError:
+ raise
+ except Failed:
+ session.exitstatus = EXIT_TESTSFAILED
+ except KeyboardInterrupt:
+ excinfo = _pytest._code.ExceptionInfo()
+ if initstate < 2 and isinstance(excinfo.value, exit.Exception):
+ sys.stderr.write("{}: {}\n".format(excinfo.typename, excinfo.value.msg))
+ config.hook.pytest_keyboard_interrupt(excinfo=excinfo)
+ session.exitstatus = EXIT_INTERRUPTED
+ except: # noqa
+ excinfo = _pytest._code.ExceptionInfo()
+ config.notify_exception(excinfo, config.option)
+ session.exitstatus = EXIT_INTERNALERROR
+ if excinfo.errisinstance(SystemExit):
+ sys.stderr.write("mainloop: caught Spurious SystemExit!\n")
+
+ finally:
+ excinfo = None # Explicitly break reference cycle.
+ session.startdir.chdir()
+ if initstate >= 2:
+ config.hook.pytest_sessionfinish(
+ session=session, exitstatus=session.exitstatus
+ )
+ config._ensure_unconfigure()
+ return session.exitstatus
+
+
+def pytest_cmdline_main(config):
+ return wrap_session(config, _main)
+
+
+def _main(config, session):
+ """ default command line protocol for initialization, session,
+ running tests and reporting. """
+ config.hook.pytest_collection(session=session)
+ config.hook.pytest_runtestloop(session=session)
+
+ if session.testsfailed:
+ return EXIT_TESTSFAILED
+ elif session.testscollected == 0:
+ return EXIT_NOTESTSCOLLECTED
+
+
+def pytest_collection(session):
+ return session.perform_collect()
+
+
+def pytest_runtestloop(session):
+ if session.testsfailed and not session.config.option.continue_on_collection_errors:
+ raise session.Interrupted("%d errors during collection" % session.testsfailed)
+
+ if session.config.option.collectonly:
+ return True
+
+ for i, item in enumerate(session.items):
+ nextitem = session.items[i + 1] if i + 1 < len(session.items) else None
+ item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)
+ if session.shouldfail:
+ raise session.Failed(session.shouldfail)
+ if session.shouldstop:
+ raise session.Interrupted(session.shouldstop)
+ return True
+
+
+def _in_venv(path):
+ """Attempts to detect if ``path`` is the root of a Virtual Environment by
+ checking for the existence of the appropriate activate script"""
+ bindir = path.join("Scripts" if sys.platform.startswith("win") else "bin")
+ if not bindir.isdir():
+ return False
+ activates = (
+ "activate",
+ "activate.csh",
+ "activate.fish",
+ "Activate",
+ "Activate.bat",
+ "Activate.ps1",
+ )
+ return any([fname.basename in activates for fname in bindir.listdir()])
+
+
+def pytest_ignore_collect(path, config):
+ ignore_paths = config._getconftest_pathlist("collect_ignore", path=path.dirpath())
+ ignore_paths = ignore_paths or []
+ excludeopt = config.getoption("ignore")
+ if excludeopt:
+ ignore_paths.extend([py.path.local(x) for x in excludeopt])
+
+ if py.path.local(path) in ignore_paths:
+ return True
+
+ allow_in_venv = config.getoption("collect_in_virtualenv")
+ if _in_venv(path) and not allow_in_venv:
+ return True
+
+ # Skip duplicate paths.
+ keepduplicates = config.getoption("keepduplicates")
+ duplicate_paths = config.pluginmanager._duplicatepaths
+ if not keepduplicates:
+ if path in duplicate_paths:
+ return True
+ else:
+ duplicate_paths.add(path)
+
+ return False
+
+
+def pytest_collection_modifyitems(items, config):
+ deselect_prefixes = tuple(config.getoption("deselect") or [])
+ if not deselect_prefixes:
+ return
+
+ remaining = []
+ deselected = []
+ for colitem in items:
+ if colitem.nodeid.startswith(deselect_prefixes):
+ deselected.append(colitem)
+ else:
+ remaining.append(colitem)
+
+ if deselected:
+ config.hook.pytest_deselected(items=deselected)
+ items[:] = remaining
+
+
+@contextlib.contextmanager
+def _patched_find_module():
+ """Patch bug in pkgutil.ImpImporter.find_module
+
+ When using pkgutil.find_loader on python<3.4 it removes symlinks
+ from the path due to a call to os.path.realpath. This is not consistent
+ with actually doing the import (in these versions, pkgutil and __import__
+ did not share the same underlying code). This can break conftest
+ discovery for pytest where symlinks are involved.
+
+ The only supported python<3.4 by pytest is python 2.7.
+ """
+ if six.PY2: # python 3.4+ uses importlib instead
+
+ def find_module_patched(self, fullname, path=None):
+ # Note: we ignore 'path' argument since it is only used via meta_path
+ subname = fullname.split(".")[-1]
+ if subname != fullname and self.path is None:
+ return None
+ if self.path is None:
+ path = None
+ else:
+ # original: path = [os.path.realpath(self.path)]
+ path = [self.path]
+ try:
+ file, filename, etc = pkgutil.imp.find_module(subname, path)
+ except ImportError:
+ return None
+ return pkgutil.ImpLoader(fullname, file, filename, etc)
+
+ old_find_module = pkgutil.ImpImporter.find_module
+ pkgutil.ImpImporter.find_module = find_module_patched
+ try:
+ yield
+ finally:
+ pkgutil.ImpImporter.find_module = old_find_module
+ else:
+ yield
+
+
+class FSHookProxy(object):
+
+ def __init__(self, fspath, pm, remove_mods):
+ self.fspath = fspath
+ self.pm = pm
+ self.remove_mods = remove_mods
+
+ def __getattr__(self, name):
+ x = self.pm.subset_hook_caller(name, remove_plugins=self.remove_mods)
+ self.__dict__[name] = x
+ return x
+
+
+class NoMatch(Exception):
+ """ raised if matching cannot locate a matching names. """
+
+
+class Interrupted(KeyboardInterrupt):
+ """ signals an interrupted test run. """
+ __module__ = "builtins" # for py3
+
+
+class Failed(Exception):
+ """ signals a stop as failed test run. """
+
+
+class Session(nodes.FSCollector):
+ Interrupted = Interrupted
+ Failed = Failed
+
+ def __init__(self, config):
+ nodes.FSCollector.__init__(
+ self, config.rootdir, parent=None, config=config, session=self, nodeid=""
+ )
+ self.testsfailed = 0
+ self.testscollected = 0
+ self.shouldstop = False
+ self.shouldfail = False
+ self.trace = config.trace.root.get("collection")
+ self._norecursepatterns = config.getini("norecursedirs")
+ self.startdir = py.path.local()
+
+ self.config.pluginmanager.register(self, name="session")
+
+ @hookimpl(tryfirst=True)
+ def pytest_collectstart(self):
+ if self.shouldfail:
+ raise self.Failed(self.shouldfail)
+ if self.shouldstop:
+ raise self.Interrupted(self.shouldstop)
+
+ @hookimpl(tryfirst=True)
+ def pytest_runtest_logreport(self, report):
+ if report.failed and not hasattr(report, "wasxfail"):
+ self.testsfailed += 1
+ maxfail = self.config.getvalue("maxfail")
+ if maxfail and self.testsfailed >= maxfail:
+ self.shouldfail = "stopping after %d failures" % (self.testsfailed)
+
+ pytest_collectreport = pytest_runtest_logreport
+
+ def isinitpath(self, path):
+ return path in self._initialpaths
+
+ def gethookproxy(self, fspath):
+ # check if we have the common case of running
+ # hooks with all conftest.py files
+ pm = self.config.pluginmanager
+ my_conftestmodules = pm._getconftestmodules(fspath)
+ remove_mods = pm._conftest_plugins.difference(my_conftestmodules)
+ if remove_mods:
+ # one or more conftests are not in use at this fspath
+ proxy = FSHookProxy(fspath, pm, remove_mods)
+ else:
+ # all plugis are active for this fspath
+ proxy = self.config.hook
+ return proxy
+
+ def perform_collect(self, args=None, genitems=True):
+ hook = self.config.hook
+ try:
+ items = self._perform_collect(args, genitems)
+ self.config.pluginmanager.check_pending()
+ hook.pytest_collection_modifyitems(
+ session=self, config=self.config, items=items
+ )
+ finally:
+ hook.pytest_collection_finish(session=self)
+ self.testscollected = len(items)
+ return items
+
+ def _perform_collect(self, args, genitems):
+ if args is None:
+ args = self.config.args
+ self.trace("perform_collect", self, args)
+ self.trace.root.indent += 1
+ self._notfound = []
+ self._initialpaths = set()
+ self._initialparts = []
+ self.items = items = []
+ for arg in args:
+ parts = self._parsearg(arg)
+ self._initialparts.append(parts)
+ self._initialpaths.add(parts[0])
+ rep = collect_one_node(self)
+ self.ihook.pytest_collectreport(report=rep)
+ self.trace.root.indent -= 1
+ if self._notfound:
+ errors = []
+ for arg, exc in self._notfound:
+ line = "(no name %r in any of %r)" % (arg, exc.args[0])
+ errors.append("not found: %s\n%s" % (arg, line))
+ # XXX: test this
+ raise UsageError(*errors)
+ if not genitems:
+ return rep.result
+ else:
+ if rep.passed:
+ for node in rep.result:
+ self.items.extend(self.genitems(node))
+ return items
+
+ def collect(self):
+ for parts in self._initialparts:
+ arg = "::".join(map(str, parts))
+ self.trace("processing argument", arg)
+ self.trace.root.indent += 1
+ try:
+ for x in self._collect(arg):
+ yield x
+ except NoMatch:
+ # we are inside a make_report hook so
+ # we cannot directly pass through the exception
+ self._notfound.append((arg, sys.exc_info()[1]))
+
+ self.trace.root.indent -= 1
+
+ def _collect(self, arg):
+ names = self._parsearg(arg)
+ path = names.pop(0)
+ if path.check(dir=1):
+ assert not names, "invalid arg %r" % (arg,)
+ for path in path.visit(
+ fil=lambda x: x.check(file=1), rec=self._recurse, bf=True, sort=True
+ ):
+ for x in self._collectfile(path):
+ yield x
+ else:
+ assert path.check(file=1)
+ for x in self.matchnodes(self._collectfile(path), names):
+ yield x
+
+ def _collectfile(self, path):
+ ihook = self.gethookproxy(path)
+ if not self.isinitpath(path):
+ if ihook.pytest_ignore_collect(path=path, config=self.config):
+ return ()
+ return ihook.pytest_collect_file(path=path, parent=self)
+
+ def _recurse(self, path):
+ ihook = self.gethookproxy(path.dirpath())
+ if ihook.pytest_ignore_collect(path=path, config=self.config):
+ return
+ for pat in self._norecursepatterns:
+ if path.check(fnmatch=pat):
+ return False
+ ihook = self.gethookproxy(path)
+ ihook.pytest_collect_directory(path=path, parent=self)
+ return True
+
+ def _tryconvertpyarg(self, x):
+ """Convert a dotted module name to path.
+
+ """
+
+ try:
+ with _patched_find_module():
+ loader = pkgutil.find_loader(x)
+ except ImportError:
+ return x
+ if loader is None:
+ return x
+ # This method is sometimes invoked when AssertionRewritingHook, which
+ # does not define a get_filename method, is already in place:
+ try:
+ with _patched_find_module():
+ path = loader.get_filename(x)
+ except AttributeError:
+ # Retrieve path from AssertionRewritingHook:
+ path = loader.modules[x][0].co_filename
+ if loader.is_package(x):
+ path = os.path.dirname(path)
+ return path
+
+ def _parsearg(self, arg):
+ """ return (fspath, names) tuple after checking the file exists. """
+ parts = str(arg).split("::")
+ if self.config.option.pyargs:
+ parts[0] = self._tryconvertpyarg(parts[0])
+ relpath = parts[0].replace("/", os.sep)
+ path = self.config.invocation_dir.join(relpath, abs=True)
+ if not path.check():
+ if self.config.option.pyargs:
+ raise UsageError(
+ "file or package not found: " + arg + " (missing __init__.py?)"
+ )
+ else:
+ raise UsageError("file not found: " + arg)
+ parts[0] = path
+ return parts
+
+ def matchnodes(self, matching, names):
+ self.trace("matchnodes", matching, names)
+ self.trace.root.indent += 1
+ nodes = self._matchnodes(matching, names)
+ num = len(nodes)
+ self.trace("matchnodes finished -> ", num, "nodes")
+ self.trace.root.indent -= 1
+ if num == 0:
+ raise NoMatch(matching, names[:1])
+ return nodes
+
+ def _matchnodes(self, matching, names):
+ if not matching or not names:
+ return matching
+ name = names[0]
+ assert name
+ nextnames = names[1:]
+ resultnodes = []
+ for node in matching:
+ if isinstance(node, nodes.Item):
+ if not names:
+ resultnodes.append(node)
+ continue
+ assert isinstance(node, nodes.Collector)
+ rep = collect_one_node(node)
+ if rep.passed:
+ has_matched = False
+ for x in rep.result:
+ # TODO: remove parametrized workaround once collection structure contains parametrization
+ if x.name == name or x.name.split("[")[0] == name:
+ resultnodes.extend(self.matchnodes([x], nextnames))
+ has_matched = True
+ # XXX accept IDs that don't have "()" for class instances
+ if not has_matched and len(rep.result) == 1 and x.name == "()":
+ nextnames.insert(0, name)
+ resultnodes.extend(self.matchnodes([x], nextnames))
+ else:
+ # report collection failures here to avoid failing to run some test
+ # specified in the command line because the module could not be
+ # imported (#134)
+ node.ihook.pytest_collectreport(report=rep)
+ return resultnodes
+
+ def genitems(self, node):
+ self.trace("genitems", node)
+ if isinstance(node, nodes.Item):
+ node.ihook.pytest_itemcollected(item=node)
+ yield node
+ else:
+ assert isinstance(node, nodes.Collector)
+ rep = collect_one_node(node)
+ if rep.passed:
+ for subnode in rep.result:
+ for x in self.genitems(subnode):
+ yield x
+ node.ihook.pytest_collectreport(report=rep)
diff --git a/third_party/python/pytest/src/_pytest/mark/__init__.py b/third_party/python/pytest/src/_pytest/mark/__init__.py
new file mode 100644
index 0000000000..e3918ca6a4
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/mark/__init__.py
@@ -0,0 +1,174 @@
+""" generic mechanism for marking and selecting python functions. """
+from __future__ import absolute_import, division, print_function
+from _pytest.config import UsageError
+from .structures import (
+ ParameterSet,
+ EMPTY_PARAMETERSET_OPTION,
+ MARK_GEN,
+ Mark,
+ MarkInfo,
+ MarkDecorator,
+ MarkGenerator,
+ transfer_markers,
+ get_empty_parameterset_mark,
+)
+from .legacy import matchkeyword, matchmark
+
+__all__ = [
+ "Mark",
+ "MarkInfo",
+ "MarkDecorator",
+ "MarkGenerator",
+ "transfer_markers",
+ "get_empty_parameterset_mark",
+]
+
+
+class MarkerError(Exception):
+
+ """Error in use of a pytest marker/attribute."""
+
+
+def param(*values, **kw):
+ """Specify a parameter in `pytest.mark.parametrize`_ calls or
+ :ref:`parametrized fixtures <fixture-parametrize-marks>`.
+
+ .. code-block:: python
+
+ @pytest.mark.parametrize("test_input,expected", [
+ ("3+5", 8),
+ pytest.param("6*9", 42, marks=pytest.mark.xfail),
+ ])
+ def test_eval(test_input, expected):
+ assert eval(test_input) == expected
+
+ :param values: variable args of the values of the parameter set, in order.
+ :keyword marks: a single mark or a list of marks to be applied to this parameter set.
+ :keyword str id: the id to attribute to this parameter set.
+ """
+ return ParameterSet.param(*values, **kw)
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group._addoption(
+ "-k",
+ action="store",
+ dest="keyword",
+ default="",
+ metavar="EXPRESSION",
+ help="only run tests which match the given substring expression. "
+ "An expression is a python evaluatable expression "
+ "where all names are substring-matched against test names "
+ "and their parent classes. Example: -k 'test_method or test_"
+ "other' matches all test functions and classes whose name "
+ "contains 'test_method' or 'test_other', while -k 'not test_method' "
+ "matches those that don't contain 'test_method' in their names. "
+ "Additionally keywords are matched to classes and functions "
+ "containing extra names in their 'extra_keyword_matches' set, "
+ "as well as functions which have names assigned directly to them.",
+ )
+
+ group._addoption(
+ "-m",
+ action="store",
+ dest="markexpr",
+ default="",
+ metavar="MARKEXPR",
+ help="only run tests matching given mark expression. "
+ "example: -m 'mark1 and not mark2'.",
+ )
+
+ group.addoption(
+ "--markers",
+ action="store_true",
+ help="show markers (builtin, plugin and per-project ones).",
+ )
+
+ parser.addini("markers", "markers for test functions", "linelist")
+ parser.addini(EMPTY_PARAMETERSET_OPTION, "default marker for empty parametersets")
+
+
+def pytest_cmdline_main(config):
+ import _pytest.config
+
+ if config.option.markers:
+ config._do_configure()
+ tw = _pytest.config.create_terminal_writer(config)
+ for line in config.getini("markers"):
+ parts = line.split(":", 1)
+ name = parts[0]
+ rest = parts[1] if len(parts) == 2 else ""
+ tw.write("@pytest.mark.%s:" % name, bold=True)
+ tw.line(rest)
+ tw.line()
+ config._ensure_unconfigure()
+ return 0
+
+
+pytest_cmdline_main.tryfirst = True
+
+
+def deselect_by_keyword(items, config):
+ keywordexpr = config.option.keyword.lstrip()
+ if keywordexpr.startswith("-"):
+ keywordexpr = "not " + keywordexpr[1:]
+ selectuntil = False
+ if keywordexpr[-1:] == ":":
+ selectuntil = True
+ keywordexpr = keywordexpr[:-1]
+
+ remaining = []
+ deselected = []
+ for colitem in items:
+ if keywordexpr and not matchkeyword(colitem, keywordexpr):
+ deselected.append(colitem)
+ else:
+ if selectuntil:
+ keywordexpr = None
+ remaining.append(colitem)
+
+ if deselected:
+ config.hook.pytest_deselected(items=deselected)
+ items[:] = remaining
+
+
+def deselect_by_mark(items, config):
+ matchexpr = config.option.markexpr
+ if not matchexpr:
+ return
+
+ remaining = []
+ deselected = []
+ for item in items:
+ if matchmark(item, matchexpr):
+ remaining.append(item)
+ else:
+ deselected.append(item)
+
+ if deselected:
+ config.hook.pytest_deselected(items=deselected)
+ items[:] = remaining
+
+
+def pytest_collection_modifyitems(items, config):
+ deselect_by_keyword(items, config)
+ deselect_by_mark(items, config)
+
+
+def pytest_configure(config):
+ config._old_mark_config = MARK_GEN._config
+ if config.option.strict:
+ MARK_GEN._config = config
+
+ empty_parameterset = config.getini(EMPTY_PARAMETERSET_OPTION)
+
+ if empty_parameterset not in ("skip", "xfail", None, ""):
+ raise UsageError(
+ "{!s} must be one of skip and xfail,"
+ " but it is {!r}".format(EMPTY_PARAMETERSET_OPTION, empty_parameterset)
+ )
+
+
+def pytest_unconfigure(config):
+ MARK_GEN._config = getattr(config, "_old_mark_config", None)
diff --git a/third_party/python/pytest/src/_pytest/mark/evaluate.py b/third_party/python/pytest/src/_pytest/mark/evaluate.py
new file mode 100644
index 0000000000..a3d11ee0f8
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/mark/evaluate.py
@@ -0,0 +1,121 @@
+import os
+import six
+import sys
+import platform
+import traceback
+
+from ..outcomes import fail, TEST_OUTCOME
+
+
+def cached_eval(config, expr, d):
+ if not hasattr(config, "_evalcache"):
+ config._evalcache = {}
+ try:
+ return config._evalcache[expr]
+ except KeyError:
+ import _pytest._code
+
+ exprcode = _pytest._code.compile(expr, mode="eval")
+ config._evalcache[expr] = x = eval(exprcode, d)
+ return x
+
+
+class MarkEvaluator(object):
+
+ def __init__(self, item, name):
+ self.item = item
+ self._marks = None
+ self._mark = None
+ self._mark_name = name
+
+ def __bool__(self):
+ # dont cache here to prevent staleness
+ return bool(self._get_marks())
+
+ __nonzero__ = __bool__
+
+ def wasvalid(self):
+ return not hasattr(self, "exc")
+
+ def _get_marks(self):
+ return list(self.item.iter_markers(name=self._mark_name))
+
+ def invalidraise(self, exc):
+ raises = self.get("raises")
+ if not raises:
+ return
+ return not isinstance(exc, raises)
+
+ def istrue(self):
+ try:
+ return self._istrue()
+ except TEST_OUTCOME:
+ self.exc = sys.exc_info()
+ if isinstance(self.exc[1], SyntaxError):
+ msg = [" " * (self.exc[1].offset + 4) + "^"]
+ msg.append("SyntaxError: invalid syntax")
+ else:
+ msg = traceback.format_exception_only(*self.exc[:2])
+ fail(
+ "Error evaluating %r expression\n"
+ " %s\n"
+ "%s" % (self._mark_name, self.expr, "\n".join(msg)),
+ pytrace=False,
+ )
+
+ def _getglobals(self):
+ d = {"os": os, "sys": sys, "platform": platform, "config": self.item.config}
+ if hasattr(self.item, "obj"):
+ d.update(self.item.obj.__globals__)
+ return d
+
+ def _istrue(self):
+ if hasattr(self, "result"):
+ return self.result
+ self._marks = self._get_marks()
+
+ if self._marks:
+ self.result = False
+ for mark in self._marks:
+ self._mark = mark
+ if "condition" in mark.kwargs:
+ args = (mark.kwargs["condition"],)
+ else:
+ args = mark.args
+
+ for expr in args:
+ self.expr = expr
+ if isinstance(expr, six.string_types):
+ d = self._getglobals()
+ result = cached_eval(self.item.config, expr, d)
+ else:
+ if "reason" not in mark.kwargs:
+ # XXX better be checked at collection time
+ msg = "you need to specify reason=STRING " "when using booleans as conditions."
+ fail(msg)
+ result = bool(expr)
+ if result:
+ self.result = True
+ self.reason = mark.kwargs.get("reason", None)
+ self.expr = expr
+ return self.result
+
+ if not args:
+ self.result = True
+ self.reason = mark.kwargs.get("reason", None)
+ return self.result
+ return False
+
+ def get(self, attr, default=None):
+ if self._mark is None:
+ return default
+ return self._mark.kwargs.get(attr, default)
+
+ def getexplanation(self):
+ expl = getattr(self, "reason", None) or self.get("reason", None)
+ if not expl:
+ if not hasattr(self, "expr"):
+ return ""
+ else:
+ return "condition: " + str(self.expr)
+ return expl
diff --git a/third_party/python/pytest/src/_pytest/mark/legacy.py b/third_party/python/pytest/src/_pytest/mark/legacy.py
new file mode 100644
index 0000000000..ab016a0355
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/mark/legacy.py
@@ -0,0 +1,97 @@
+"""
+this is a place where we put datastructures used by legacy apis
+we hope ot remove
+"""
+import attr
+import keyword
+
+from _pytest.config import UsageError
+
+
+@attr.s
+class MarkMapping(object):
+ """Provides a local mapping for markers where item access
+ resolves to True if the marker is present. """
+
+ own_mark_names = attr.ib()
+
+ @classmethod
+ def from_item(cls, item):
+ mark_names = {mark.name for mark in item.iter_markers()}
+ return cls(mark_names)
+
+ def __getitem__(self, name):
+ return name in self.own_mark_names
+
+
+class KeywordMapping(object):
+ """Provides a local mapping for keywords.
+ Given a list of names, map any substring of one of these names to True.
+ """
+
+ def __init__(self, names):
+ self._names = names
+
+ @classmethod
+ def from_item(cls, item):
+ mapped_names = set()
+
+ # Add the names of the current item and any parent items
+ import pytest
+
+ for item in item.listchain():
+ if not isinstance(item, pytest.Instance):
+ mapped_names.add(item.name)
+
+ # Add the names added as extra keywords to current or parent items
+ for name in item.listextrakeywords():
+ mapped_names.add(name)
+
+ # Add the names attached to the current function through direct assignment
+ if hasattr(item, "function"):
+ for name in item.function.__dict__:
+ mapped_names.add(name)
+
+ return cls(mapped_names)
+
+ def __getitem__(self, subname):
+ for name in self._names:
+ if subname in name:
+ return True
+ return False
+
+
+python_keywords_allowed_list = ["or", "and", "not"]
+
+
+def matchmark(colitem, markexpr):
+ """Tries to match on any marker names, attached to the given colitem."""
+ return eval(markexpr, {}, MarkMapping.from_item(colitem))
+
+
+def matchkeyword(colitem, keywordexpr):
+ """Tries to match given keyword expression to given collector item.
+
+ Will match on the name of colitem, including the names of its parents.
+ Only matches names of items which are either a :class:`Class` or a
+ :class:`Function`.
+ Additionally, matches on names in the 'extra_keyword_matches' set of
+ any item, as well as names directly assigned to test functions.
+ """
+ mapping = KeywordMapping.from_item(colitem)
+ if " " not in keywordexpr:
+ # special case to allow for simple "-k pass" and "-k 1.3"
+ return mapping[keywordexpr]
+ elif keywordexpr.startswith("not ") and " " not in keywordexpr[4:]:
+ return not mapping[keywordexpr[4:]]
+ for kwd in keywordexpr.split():
+ if keyword.iskeyword(kwd) and kwd not in python_keywords_allowed_list:
+ raise UsageError(
+ "Python keyword '{}' not accepted in expressions passed to '-k'".format(
+ kwd
+ )
+ )
+ try:
+ return eval(keywordexpr, {}, mapping)
+ except SyntaxError:
+ raise UsageError("Wrong expression passed to '-k': {}".format(keywordexpr))
diff --git a/third_party/python/pytest/src/_pytest/mark/structures.py b/third_party/python/pytest/src/_pytest/mark/structures.py
new file mode 100644
index 0000000000..1a2bd73de5
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/mark/structures.py
@@ -0,0 +1,428 @@
+import inspect
+import warnings
+from collections import namedtuple
+from operator import attrgetter
+
+import attr
+
+from ..deprecated import MARK_PARAMETERSET_UNPACKING, MARK_INFO_ATTRIBUTE
+from ..compat import NOTSET, getfslineno, MappingMixin
+from six.moves import map, reduce
+
+
+EMPTY_PARAMETERSET_OPTION = "empty_parameter_set_mark"
+
+
+def alias(name, warning=None):
+ getter = attrgetter(name)
+
+ def warned(self):
+ warnings.warn(warning, stacklevel=2)
+ return getter(self)
+
+ return property(getter if warning is None else warned, doc="alias for " + name)
+
+
+def istestfunc(func):
+ return hasattr(func, "__call__") and getattr(
+ func, "__name__", "<lambda>"
+ ) != "<lambda>"
+
+
+def get_empty_parameterset_mark(config, argnames, func):
+ requested_mark = config.getini(EMPTY_PARAMETERSET_OPTION)
+ if requested_mark in ("", None, "skip"):
+ mark = MARK_GEN.skip
+ elif requested_mark == "xfail":
+ mark = MARK_GEN.xfail(run=False)
+ else:
+ raise LookupError(requested_mark)
+ fs, lineno = getfslineno(func)
+ reason = "got empty parameter set %r, function %s at %s:%d" % (
+ argnames, func.__name__, fs, lineno
+ )
+ return mark(reason=reason)
+
+
+class ParameterSet(namedtuple("ParameterSet", "values, marks, id")):
+
+ @classmethod
+ def param(cls, *values, **kw):
+ marks = kw.pop("marks", ())
+ if isinstance(marks, MarkDecorator):
+ marks = marks,
+ else:
+ assert isinstance(marks, (tuple, list, set))
+
+ def param_extract_id(id=None):
+ return id
+
+ id_ = param_extract_id(**kw)
+ return cls(values, marks, id_)
+
+ @classmethod
+ def extract_from(cls, parameterset, legacy_force_tuple=False):
+ """
+ :param parameterset:
+ a legacy style parameterset that may or may not be a tuple,
+ and may or may not be wrapped into a mess of mark objects
+
+ :param legacy_force_tuple:
+ enforce tuple wrapping so single argument tuple values
+ don't get decomposed and break tests
+
+ """
+
+ if isinstance(parameterset, cls):
+ return parameterset
+ if not isinstance(parameterset, MarkDecorator) and legacy_force_tuple:
+ return cls.param(parameterset)
+
+ newmarks = []
+ argval = parameterset
+ while isinstance(argval, MarkDecorator):
+ newmarks.append(
+ MarkDecorator(Mark(argval.markname, argval.args[:-1], argval.kwargs))
+ )
+ argval = argval.args[-1]
+ assert not isinstance(argval, ParameterSet)
+ if legacy_force_tuple:
+ argval = argval,
+
+ if newmarks:
+ warnings.warn(MARK_PARAMETERSET_UNPACKING)
+
+ return cls(argval, marks=newmarks, id=None)
+
+ @classmethod
+ def _for_parametrize(cls, argnames, argvalues, func, config):
+ if not isinstance(argnames, (tuple, list)):
+ argnames = [x.strip() for x in argnames.split(",") if x.strip()]
+ force_tuple = len(argnames) == 1
+ else:
+ force_tuple = False
+ parameters = [
+ ParameterSet.extract_from(x, legacy_force_tuple=force_tuple)
+ for x in argvalues
+ ]
+ del argvalues
+
+ if not parameters:
+ mark = get_empty_parameterset_mark(config, argnames, func)
+ parameters.append(
+ ParameterSet(values=(NOTSET,) * len(argnames), marks=[mark], id=None)
+ )
+ return argnames, parameters
+
+
+@attr.s(frozen=True)
+class Mark(object):
+ #: name of the mark
+ name = attr.ib(type=str)
+ #: positional arguments of the mark decorator
+ args = attr.ib(type="List[object]")
+ #: keyword arguments of the mark decorator
+ kwargs = attr.ib(type="Dict[str, object]")
+
+ def combined_with(self, other):
+ """
+ :param other: the mark to combine with
+ :type other: Mark
+ :rtype: Mark
+
+ combines by appending aargs and merging the mappings
+ """
+ assert self.name == other.name
+ return Mark(
+ self.name, self.args + other.args, dict(self.kwargs, **other.kwargs)
+ )
+
+
+@attr.s
+class MarkDecorator(object):
+ """ A decorator for test functions and test classes. When applied
+ it will create :class:`MarkInfo` objects which may be
+ :ref:`retrieved by hooks as item keywords <excontrolskip>`.
+ MarkDecorator instances are often created like this::
+
+ mark1 = pytest.mark.NAME # simple MarkDecorator
+ mark2 = pytest.mark.NAME(name1=value) # parametrized MarkDecorator
+
+ and can then be applied as decorators to test functions::
+
+ @mark2
+ def test_function():
+ pass
+
+ When a MarkDecorator instance is called it does the following:
+ 1. If called with a single class as its only positional argument and no
+ additional keyword arguments, it attaches itself to the class so it
+ gets applied automatically to all test cases found in that class.
+ 2. If called with a single function as its only positional argument and
+ no additional keyword arguments, it attaches a MarkInfo object to the
+ function, containing all the arguments already stored internally in
+ the MarkDecorator.
+ 3. When called in any other case, it performs a 'fake construction' call,
+ i.e. it returns a new MarkDecorator instance with the original
+ MarkDecorator's content updated with the arguments passed to this
+ call.
+
+ Note: The rules above prevent MarkDecorator objects from storing only a
+ single function or class reference as their positional argument with no
+ additional keyword or positional arguments.
+
+ """
+
+ mark = attr.ib(validator=attr.validators.instance_of(Mark))
+
+ name = alias("mark.name")
+ args = alias("mark.args")
+ kwargs = alias("mark.kwargs")
+
+ @property
+ def markname(self):
+ return self.name # for backward-compat (2.4.1 had this attr)
+
+ def __eq__(self, other):
+ return self.mark == other.mark if isinstance(other, MarkDecorator) else False
+
+ def __repr__(self):
+ return "<MarkDecorator %r>" % (self.mark,)
+
+ def with_args(self, *args, **kwargs):
+ """ return a MarkDecorator with extra arguments added
+
+ unlike call this can be used even if the sole argument is a callable/class
+
+ :return: MarkDecorator
+ """
+
+ mark = Mark(self.name, args, kwargs)
+ return self.__class__(self.mark.combined_with(mark))
+
+ def __call__(self, *args, **kwargs):
+ """ if passed a single callable argument: decorate it with mark info.
+ otherwise add *args/**kwargs in-place to mark information. """
+ if args and not kwargs:
+ func = args[0]
+ is_class = inspect.isclass(func)
+ if len(args) == 1 and (istestfunc(func) or is_class):
+ if is_class:
+ store_mark(func, self.mark)
+ else:
+ store_legacy_markinfo(func, self.mark)
+ store_mark(func, self.mark)
+ return func
+ return self.with_args(*args, **kwargs)
+
+
+def get_unpacked_marks(obj):
+ """
+ obtain the unpacked marks that are stored on an object
+ """
+ mark_list = getattr(obj, "pytestmark", [])
+
+ if not isinstance(mark_list, list):
+ mark_list = [mark_list]
+ return [getattr(mark, "mark", mark) for mark in mark_list] # unpack MarkDecorator
+
+
+def store_mark(obj, mark):
+ """store a Mark on an object
+ this is used to implement the Mark declarations/decorators correctly
+ """
+ assert isinstance(mark, Mark), mark
+ # always reassign name to avoid updating pytestmark
+ # in a reference that was only borrowed
+ obj.pytestmark = get_unpacked_marks(obj) + [mark]
+
+
+def store_legacy_markinfo(func, mark):
+ """create the legacy MarkInfo objects and put them onto the function
+ """
+ if not isinstance(mark, Mark):
+ raise TypeError("got {mark!r} instead of a Mark".format(mark=mark))
+ holder = getattr(func, mark.name, None)
+ if holder is None:
+ holder = MarkInfo.for_mark(mark)
+ setattr(func, mark.name, holder)
+ else:
+ holder.add_mark(mark)
+
+
+def transfer_markers(funcobj, cls, mod):
+ """
+ this function transfers class level markers and module level markers
+ into function level markinfo objects
+
+ this is the main reason why marks are so broken
+ the resolution will involve phasing out function level MarkInfo objects
+
+ """
+ for obj in (cls, mod):
+ for mark in get_unpacked_marks(obj):
+ if not _marked(funcobj, mark):
+ store_legacy_markinfo(funcobj, mark)
+
+
+def _marked(func, mark):
+ """ Returns True if :func: is already marked with :mark:, False otherwise.
+ This can happen if marker is applied to class and the test file is
+ invoked more than once.
+ """
+ try:
+ func_mark = getattr(func, getattr(mark, "combined", mark).name)
+ except AttributeError:
+ return False
+ return any(mark == info.combined for info in func_mark)
+
+
+@attr.s
+class MarkInfo(object):
+ """ Marking object created by :class:`MarkDecorator` instances. """
+
+ _marks = attr.ib(convert=list)
+
+ @_marks.validator
+ def validate_marks(self, attribute, value):
+ for item in value:
+ if not isinstance(item, Mark):
+ raise ValueError(
+ "MarkInfo expects Mark instances, got {!r} ({!r})".format(
+ item, type(item)
+ )
+ )
+
+ combined = attr.ib(
+ repr=False,
+ default=attr.Factory(
+ lambda self: reduce(Mark.combined_with, self._marks), takes_self=True
+ ),
+ )
+
+ name = alias("combined.name", warning=MARK_INFO_ATTRIBUTE)
+ args = alias("combined.args", warning=MARK_INFO_ATTRIBUTE)
+ kwargs = alias("combined.kwargs", warning=MARK_INFO_ATTRIBUTE)
+
+ @classmethod
+ def for_mark(cls, mark):
+ return cls([mark])
+
+ def __repr__(self):
+ return "<MarkInfo {!r}>".format(self.combined)
+
+ def add_mark(self, mark):
+ """ add a MarkInfo with the given args and kwargs. """
+ self._marks.append(mark)
+ self.combined = self.combined.combined_with(mark)
+
+ def __iter__(self):
+ """ yield MarkInfo objects each relating to a marking-call. """
+ return map(MarkInfo.for_mark, self._marks)
+
+
+class MarkGenerator(object):
+ """ Factory for :class:`MarkDecorator` objects - exposed as
+ a ``pytest.mark`` singleton instance. Example::
+
+ import pytest
+ @pytest.mark.slowtest
+ def test_function():
+ pass
+
+ will set a 'slowtest' :class:`MarkInfo` object
+ on the ``test_function`` object. """
+ _config = None
+
+ def __getattr__(self, name):
+ if name[0] == "_":
+ raise AttributeError("Marker name must NOT start with underscore")
+ if self._config is not None:
+ self._check(name)
+ return MarkDecorator(Mark(name, (), {}))
+
+ def _check(self, name):
+ try:
+ if name in self._markers:
+ return
+ except AttributeError:
+ pass
+ self._markers = values = set()
+ for line in self._config.getini("markers"):
+ marker = line.split(":", 1)[0]
+ marker = marker.rstrip()
+ x = marker.split("(", 1)[0]
+ values.add(x)
+ if name not in self._markers:
+ raise AttributeError("%r not a registered marker" % (name,))
+
+
+MARK_GEN = MarkGenerator()
+
+
+class NodeKeywords(MappingMixin):
+
+ def __init__(self, node):
+ self.node = node
+ self.parent = node.parent
+ self._markers = {node.name: True}
+
+ def __getitem__(self, key):
+ try:
+ return self._markers[key]
+ except KeyError:
+ if self.parent is None:
+ raise
+ return self.parent.keywords[key]
+
+ def __setitem__(self, key, value):
+ self._markers[key] = value
+
+ def __delitem__(self, key):
+ raise ValueError("cannot delete key in keywords dict")
+
+ def __iter__(self):
+ seen = self._seen()
+ return iter(seen)
+
+ def _seen(self):
+ seen = set(self._markers)
+ if self.parent is not None:
+ seen.update(self.parent.keywords)
+ return seen
+
+ def __len__(self):
+ return len(self._seen())
+
+ def __repr__(self):
+ return "<NodeKeywords for node %s>" % (self.node,)
+
+
+@attr.s(cmp=False, hash=False)
+class NodeMarkers(object):
+ """
+ internal strucutre for storing marks belongong to a node
+
+ ..warning::
+
+ unstable api
+
+ """
+ own_markers = attr.ib(default=attr.Factory(list))
+
+ def update(self, add_markers):
+ """update the own markers
+ """
+ self.own_markers.extend(add_markers)
+
+ def find(self, name):
+ """
+ find markers in own nodes or parent nodes
+ needs a better place
+ """
+ for mark in self.own_markers:
+ if mark.name == name:
+ yield mark
+
+ def __iter__(self):
+ return iter(self.own_markers)
diff --git a/third_party/python/pytest/src/_pytest/monkeypatch.py b/third_party/python/pytest/src/_pytest/monkeypatch.py
new file mode 100644
index 0000000000..16080b5d5c
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/monkeypatch.py
@@ -0,0 +1,283 @@
+""" monkeypatching and mocking functionality. """
+from __future__ import absolute_import, division, print_function
+
+import os
+import sys
+import re
+from contextlib import contextmanager
+
+import six
+from _pytest.fixtures import fixture
+
+RE_IMPORT_ERROR_NAME = re.compile("^No module named (.*)$")
+
+
+@fixture
+def monkeypatch():
+ """The returned ``monkeypatch`` fixture provides these
+ helper methods to modify objects, dictionaries or os.environ::
+
+ monkeypatch.setattr(obj, name, value, raising=True)
+ monkeypatch.delattr(obj, name, raising=True)
+ monkeypatch.setitem(mapping, name, value)
+ monkeypatch.delitem(obj, name, raising=True)
+ monkeypatch.setenv(name, value, prepend=False)
+ monkeypatch.delenv(name, value, raising=True)
+ monkeypatch.syspath_prepend(path)
+ monkeypatch.chdir(path)
+
+ All modifications will be undone after the requesting
+ test function or fixture has finished. The ``raising``
+ parameter determines if a KeyError or AttributeError
+ will be raised if the set/deletion operation has no target.
+ """
+ mpatch = MonkeyPatch()
+ yield mpatch
+ mpatch.undo()
+
+
+def resolve(name):
+ # simplified from zope.dottedname
+ parts = name.split(".")
+
+ used = parts.pop(0)
+ found = __import__(used)
+ for part in parts:
+ used += "." + part
+ try:
+ found = getattr(found, part)
+ except AttributeError:
+ pass
+ else:
+ continue
+ # we use explicit un-nesting of the handling block in order
+ # to avoid nested exceptions on python 3
+ try:
+ __import__(used)
+ except ImportError as ex:
+ # str is used for py2 vs py3
+ expected = str(ex).split()[-1]
+ if expected == used:
+ raise
+ else:
+ raise ImportError("import error in %s: %s" % (used, ex))
+ found = annotated_getattr(found, part, used)
+ return found
+
+
+def annotated_getattr(obj, name, ann):
+ try:
+ obj = getattr(obj, name)
+ except AttributeError:
+ raise AttributeError(
+ "%r object at %s has no attribute %r" % (type(obj).__name__, ann, name)
+ )
+ return obj
+
+
+def derive_importpath(import_path, raising):
+ if not isinstance(import_path, six.string_types) or "." not in import_path:
+ raise TypeError("must be absolute import path string, not %r" % (import_path,))
+ module, attr = import_path.rsplit(".", 1)
+ target = resolve(module)
+ if raising:
+ annotated_getattr(target, attr, ann=module)
+ return attr, target
+
+
+class Notset(object):
+
+ def __repr__(self):
+ return "<notset>"
+
+
+notset = Notset()
+
+
+class MonkeyPatch(object):
+ """ Object returned by the ``monkeypatch`` fixture keeping a record of setattr/item/env/syspath changes.
+ """
+
+ def __init__(self):
+ self._setattr = []
+ self._setitem = []
+ self._cwd = None
+ self._savesyspath = None
+
+ @contextmanager
+ def context(self):
+ """
+ Context manager that returns a new :class:`MonkeyPatch` object which
+ undoes any patching done inside the ``with`` block upon exit:
+
+ .. code-block:: python
+
+ import functools
+ def test_partial(monkeypatch):
+ with monkeypatch.context() as m:
+ m.setattr(functools, "partial", 3)
+
+ Useful in situations where it is desired to undo some patches before the test ends,
+ such as mocking ``stdlib`` functions that might break pytest itself if mocked (for examples
+ of this see `#3290 <https://github.com/pytest-dev/pytest/issues/3290>`_.
+ """
+ m = MonkeyPatch()
+ try:
+ yield m
+ finally:
+ m.undo()
+
+ def setattr(self, target, name, value=notset, raising=True):
+ """ Set attribute value on target, memorizing the old value.
+ By default raise AttributeError if the attribute did not exist.
+
+ For convenience you can specify a string as ``target`` which
+ will be interpreted as a dotted import path, with the last part
+ being the attribute name. Example:
+ ``monkeypatch.setattr("os.getcwd", lambda: "/")``
+ would set the ``getcwd`` function of the ``os`` module.
+
+ The ``raising`` value determines if the setattr should fail
+ if the attribute is not already present (defaults to True
+ which means it will raise).
+ """
+ __tracebackhide__ = True
+ import inspect
+
+ if value is notset:
+ if not isinstance(target, six.string_types):
+ raise TypeError(
+ "use setattr(target, name, value) or "
+ "setattr(target, value) with target being a dotted "
+ "import string"
+ )
+ value = name
+ name, target = derive_importpath(target, raising)
+
+ oldval = getattr(target, name, notset)
+ if raising and oldval is notset:
+ raise AttributeError("%r has no attribute %r" % (target, name))
+
+ # avoid class descriptors like staticmethod/classmethod
+ if inspect.isclass(target):
+ oldval = target.__dict__.get(name, notset)
+ self._setattr.append((target, name, oldval))
+ setattr(target, name, value)
+
+ def delattr(self, target, name=notset, raising=True):
+ """ Delete attribute ``name`` from ``target``, by default raise
+ AttributeError it the attribute did not previously exist.
+
+ If no ``name`` is specified and ``target`` is a string
+ it will be interpreted as a dotted import path with the
+ last part being the attribute name.
+
+ If ``raising`` is set to False, no exception will be raised if the
+ attribute is missing.
+ """
+ __tracebackhide__ = True
+ if name is notset:
+ if not isinstance(target, six.string_types):
+ raise TypeError(
+ "use delattr(target, name) or "
+ "delattr(target) with target being a dotted "
+ "import string"
+ )
+ name, target = derive_importpath(target, raising)
+
+ if not hasattr(target, name):
+ if raising:
+ raise AttributeError(name)
+ else:
+ self._setattr.append((target, name, getattr(target, name, notset)))
+ delattr(target, name)
+
+ def setitem(self, dic, name, value):
+ """ Set dictionary entry ``name`` to value. """
+ self._setitem.append((dic, name, dic.get(name, notset)))
+ dic[name] = value
+
+ def delitem(self, dic, name, raising=True):
+ """ Delete ``name`` from dict. Raise KeyError if it doesn't exist.
+
+ If ``raising`` is set to False, no exception will be raised if the
+ key is missing.
+ """
+ if name not in dic:
+ if raising:
+ raise KeyError(name)
+ else:
+ self._setitem.append((dic, name, dic.get(name, notset)))
+ del dic[name]
+
+ def setenv(self, name, value, prepend=None):
+ """ Set environment variable ``name`` to ``value``. If ``prepend``
+ is a character, read the current environment variable value
+ and prepend the ``value`` adjoined with the ``prepend`` character."""
+ value = str(value)
+ if prepend and name in os.environ:
+ value = value + prepend + os.environ[name]
+ self.setitem(os.environ, name, value)
+
+ def delenv(self, name, raising=True):
+ """ Delete ``name`` from the environment. Raise KeyError it does not
+ exist.
+
+ If ``raising`` is set to False, no exception will be raised if the
+ environment variable is missing.
+ """
+ self.delitem(os.environ, name, raising=raising)
+
+ def syspath_prepend(self, path):
+ """ Prepend ``path`` to ``sys.path`` list of import locations. """
+ if self._savesyspath is None:
+ self._savesyspath = sys.path[:]
+ sys.path.insert(0, str(path))
+
+ def chdir(self, path):
+ """ Change the current working directory to the specified path.
+ Path can be a string or a py.path.local object.
+ """
+ if self._cwd is None:
+ self._cwd = os.getcwd()
+ if hasattr(path, "chdir"):
+ path.chdir()
+ else:
+ os.chdir(path)
+
+ def undo(self):
+ """ Undo previous changes. This call consumes the
+ undo stack. Calling it a second time has no effect unless
+ you do more monkeypatching after the undo call.
+
+ There is generally no need to call `undo()`, since it is
+ called automatically during tear-down.
+
+ Note that the same `monkeypatch` fixture is used across a
+ single test function invocation. If `monkeypatch` is used both by
+ the test function itself and one of the test fixtures,
+ calling `undo()` will undo all of the changes made in
+ both functions.
+ """
+ for obj, name, value in reversed(self._setattr):
+ if value is not notset:
+ setattr(obj, name, value)
+ else:
+ delattr(obj, name)
+ self._setattr[:] = []
+ for dictionary, name, value in reversed(self._setitem):
+ if value is notset:
+ try:
+ del dictionary[name]
+ except KeyError:
+ pass # was already deleted, so we have the desired state
+ else:
+ dictionary[name] = value
+ self._setitem[:] = []
+ if self._savesyspath is not None:
+ sys.path[:] = self._savesyspath
+ self._savesyspath = None
+
+ if self._cwd is not None:
+ os.chdir(self._cwd)
+ self._cwd = None
diff --git a/third_party/python/pytest/src/_pytest/nodes.py b/third_party/python/pytest/src/_pytest/nodes.py
new file mode 100644
index 0000000000..ef74c53eba
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/nodes.py
@@ -0,0 +1,419 @@
+from __future__ import absolute_import, division, print_function
+import os
+
+import six
+import py
+import attr
+
+import _pytest
+import _pytest._code
+
+from _pytest.mark.structures import NodeKeywords, MarkInfo
+
+SEP = "/"
+
+tracebackcutdir = py.path.local(_pytest.__file__).dirpath()
+
+
+def _splitnode(nodeid):
+ """Split a nodeid into constituent 'parts'.
+
+ Node IDs are strings, and can be things like:
+ ''
+ 'testing/code'
+ 'testing/code/test_excinfo.py'
+ 'testing/code/test_excinfo.py::TestFormattedExcinfo::()'
+
+ Return values are lists e.g.
+ []
+ ['testing', 'code']
+ ['testing', 'code', 'test_excinfo.py']
+ ['testing', 'code', 'test_excinfo.py', 'TestFormattedExcinfo', '()']
+ """
+ if nodeid == "":
+ # If there is no root node at all, return an empty list so the caller's logic can remain sane
+ return []
+ parts = nodeid.split(SEP)
+ # Replace single last element 'test_foo.py::Bar::()' with multiple elements 'test_foo.py', 'Bar', '()'
+ parts[-1:] = parts[-1].split("::")
+ return parts
+
+
+def ischildnode(baseid, nodeid):
+ """Return True if the nodeid is a child node of the baseid.
+
+ E.g. 'foo/bar::Baz::()' is a child of 'foo', 'foo/bar' and 'foo/bar::Baz', but not of 'foo/blorp'
+ """
+ base_parts = _splitnode(baseid)
+ node_parts = _splitnode(nodeid)
+ if len(node_parts) < len(base_parts):
+ return False
+ return node_parts[:len(base_parts)] == base_parts
+
+
+@attr.s
+class _CompatProperty(object):
+ name = attr.ib()
+
+ def __get__(self, obj, owner):
+ if obj is None:
+ return self
+
+ # TODO: reenable in the features branch
+ # warnings.warn(
+ # "usage of {owner!r}.{name} is deprecated, please use pytest.{name} instead".format(
+ # name=self.name, owner=type(owner).__name__),
+ # PendingDeprecationWarning, stacklevel=2)
+ return getattr(__import__("pytest"), self.name)
+
+
+class Node(object):
+ """ base class for Collector and Item the test collection tree.
+ Collector subclasses have children, Items are terminal nodes."""
+
+ def __init__(
+ self, name, parent=None, config=None, session=None, fspath=None, nodeid=None
+ ):
+ #: a unique name within the scope of the parent node
+ self.name = name
+
+ #: the parent collector node.
+ self.parent = parent
+
+ #: the pytest config object
+ self.config = config or parent.config
+
+ #: the session this node is part of
+ self.session = session or parent.session
+
+ #: filesystem path where this node was collected from (can be None)
+ self.fspath = fspath or getattr(parent, "fspath", None)
+
+ #: keywords/markers collected from all scopes
+ self.keywords = NodeKeywords(self)
+
+ #: the marker objects belonging to this node
+ self.own_markers = []
+
+ #: allow adding of extra keywords to use for matching
+ self.extra_keyword_matches = set()
+
+ # used for storing artificial fixturedefs for direct parametrization
+ self._name2pseudofixturedef = {}
+
+ if nodeid is not None:
+ self._nodeid = nodeid
+ else:
+ assert parent is not None
+ self._nodeid = self.parent.nodeid + "::" + self.name
+
+ @property
+ def ihook(self):
+ """ fspath sensitive hook proxy used to call pytest hooks"""
+ return self.session.gethookproxy(self.fspath)
+
+ Module = _CompatProperty("Module")
+ Class = _CompatProperty("Class")
+ Instance = _CompatProperty("Instance")
+ Function = _CompatProperty("Function")
+ File = _CompatProperty("File")
+ Item = _CompatProperty("Item")
+
+ def _getcustomclass(self, name):
+ maybe_compatprop = getattr(type(self), name)
+ if isinstance(maybe_compatprop, _CompatProperty):
+ return getattr(__import__("pytest"), name)
+ else:
+ cls = getattr(self, name)
+ # TODO: reenable in the features branch
+ # warnings.warn("use of node.%s is deprecated, "
+ # "use pytest_pycollect_makeitem(...) to create custom "
+ # "collection nodes" % name, category=DeprecationWarning)
+ return cls
+
+ def __repr__(self):
+ return "<%s %r>" % (self.__class__.__name__, getattr(self, "name", None))
+
+ def warn(self, code, message):
+ """ generate a warning with the given code and message for this
+ item. """
+ assert isinstance(code, str)
+ fslocation = getattr(self, "location", None)
+ if fslocation is None:
+ fslocation = getattr(self, "fspath", None)
+ self.ihook.pytest_logwarning.call_historic(
+ kwargs=dict(
+ code=code, message=message, nodeid=self.nodeid, fslocation=fslocation
+ )
+ )
+
+ # methods for ordering nodes
+ @property
+ def nodeid(self):
+ """ a ::-separated string denoting its collection tree address. """
+ return self._nodeid
+
+ def __hash__(self):
+ return hash(self.nodeid)
+
+ def setup(self):
+ pass
+
+ def teardown(self):
+ pass
+
+ def listchain(self):
+ """ return list of all parent collectors up to self,
+ starting from root of collection tree. """
+ chain = []
+ item = self
+ while item is not None:
+ chain.append(item)
+ item = item.parent
+ chain.reverse()
+ return chain
+
+ def add_marker(self, marker):
+ """dynamically add a marker object to the node.
+
+ :type marker: str or pytest.mark.*
+ """
+ from _pytest.mark import MarkDecorator, MARK_GEN
+
+ if isinstance(marker, six.string_types):
+ marker = getattr(MARK_GEN, marker)
+ elif not isinstance(marker, MarkDecorator):
+ raise ValueError("is not a string or pytest.mark.* Marker")
+ self.keywords[marker.name] = marker
+ self.own_markers.append(marker.mark)
+
+ def iter_markers(self, name=None):
+ """
+ :param name: if given, filter the results by the name attribute
+
+ iterate over all markers of the node
+ """
+ return (x[1] for x in self.iter_markers_with_node(name=name))
+
+ def iter_markers_with_node(self, name=None):
+ """
+ :param name: if given, filter the results by the name attribute
+
+ iterate over all markers of the node
+ returns sequence of tuples (node, mark)
+ """
+ for node in reversed(self.listchain()):
+ for mark in node.own_markers:
+ if name is None or getattr(mark, "name", None) == name:
+ yield node, mark
+
+ def get_closest_marker(self, name, default=None):
+ """return the first marker matching the name, from closest (for example function) to farther level (for example
+ module level).
+
+ :param default: fallback return value of no marker was found
+ :param name: name to filter by
+ """
+ return next(self.iter_markers(name=name), default)
+
+ def get_marker(self, name):
+ """ get a marker object from this node or None if
+ the node doesn't have a marker with that name.
+
+ .. deprecated:: 3.6
+ This function has been deprecated in favor of
+ :meth:`Node.get_closest_marker <_pytest.nodes.Node.get_closest_marker>` and
+ :meth:`Node.iter_markers <_pytest.nodes.Node.iter_markers>`, see :ref:`update marker code`
+ for more details.
+ """
+ markers = list(self.iter_markers(name=name))
+ if markers:
+ return MarkInfo(markers)
+
+ def listextrakeywords(self):
+ """ Return a set of all extra keywords in self and any parents."""
+ extra_keywords = set()
+ for item in self.listchain():
+ extra_keywords.update(item.extra_keyword_matches)
+ return extra_keywords
+
+ def listnames(self):
+ return [x.name for x in self.listchain()]
+
+ def addfinalizer(self, fin):
+ """ register a function to be called when this node is finalized.
+
+ This method can only be called when this node is active
+ in a setup chain, for example during self.setup().
+ """
+ self.session._setupstate.addfinalizer(fin, self)
+
+ def getparent(self, cls):
+ """ get the next parent node (including ourself)
+ which is an instance of the given class"""
+ current = self
+ while current and not isinstance(current, cls):
+ current = current.parent
+ return current
+
+ def _prunetraceback(self, excinfo):
+ pass
+
+ def _repr_failure_py(self, excinfo, style=None):
+ fm = self.session._fixturemanager
+ if excinfo.errisinstance(fm.FixtureLookupError):
+ return excinfo.value.formatrepr()
+ tbfilter = True
+ if self.config.option.fulltrace:
+ style = "long"
+ else:
+ tb = _pytest._code.Traceback([excinfo.traceback[-1]])
+ self._prunetraceback(excinfo)
+ if len(excinfo.traceback) == 0:
+ excinfo.traceback = tb
+ tbfilter = False # prunetraceback already does it
+ if style == "auto":
+ style = "long"
+ # XXX should excinfo.getrepr record all data and toterminal() process it?
+ if style is None:
+ if self.config.option.tbstyle == "short":
+ style = "short"
+ else:
+ style = "long"
+
+ try:
+ os.getcwd()
+ abspath = False
+ except OSError:
+ abspath = True
+
+ return excinfo.getrepr(
+ funcargs=True,
+ abspath=abspath,
+ showlocals=self.config.option.showlocals,
+ style=style,
+ tbfilter=tbfilter,
+ )
+
+ repr_failure = _repr_failure_py
+
+
+class Collector(Node):
+ """ Collector instances create children through collect()
+ and thus iteratively build a tree.
+ """
+
+ class CollectError(Exception):
+ """ an error during collection, contains a custom message. """
+
+ def collect(self):
+ """ returns a list of children (items and collectors)
+ for this collection node.
+ """
+ raise NotImplementedError("abstract")
+
+ def repr_failure(self, excinfo):
+ """ represent a collection failure. """
+ if excinfo.errisinstance(self.CollectError):
+ exc = excinfo.value
+ return str(exc.args[0])
+ return self._repr_failure_py(excinfo, style="short")
+
+ def _prunetraceback(self, excinfo):
+ if hasattr(self, "fspath"):
+ traceback = excinfo.traceback
+ ntraceback = traceback.cut(path=self.fspath)
+ if ntraceback == traceback:
+ ntraceback = ntraceback.cut(excludepath=tracebackcutdir)
+ excinfo.traceback = ntraceback.filter()
+
+
+def _check_initialpaths_for_relpath(session, fspath):
+ for initial_path in session._initialpaths:
+ if fspath.common(initial_path) == initial_path:
+ return fspath.relto(initial_path.dirname)
+
+
+class FSCollector(Collector):
+
+ def __init__(self, fspath, parent=None, config=None, session=None, nodeid=None):
+ fspath = py.path.local(fspath) # xxx only for test_resultlog.py?
+ name = fspath.basename
+ if parent is not None:
+ rel = fspath.relto(parent.fspath)
+ if rel:
+ name = rel
+ name = name.replace(os.sep, SEP)
+ self.fspath = fspath
+
+ session = session or parent.session
+
+ if nodeid is None:
+ nodeid = self.fspath.relto(session.config.rootdir)
+
+ if not nodeid:
+ nodeid = _check_initialpaths_for_relpath(session, fspath)
+ if os.sep != SEP:
+ nodeid = nodeid.replace(os.sep, SEP)
+
+ super(FSCollector, self).__init__(
+ name, parent, config, session, nodeid=nodeid, fspath=fspath
+ )
+
+
+class File(FSCollector):
+ """ base class for collecting tests from a file. """
+
+
+class Item(Node):
+ """ a basic test invocation item. Note that for a single function
+ there might be multiple test invocation items.
+ """
+ nextitem = None
+
+ def __init__(self, name, parent=None, config=None, session=None, nodeid=None):
+ super(Item, self).__init__(name, parent, config, session, nodeid=nodeid)
+ self._report_sections = []
+
+ #: user properties is a list of tuples (name, value) that holds user
+ #: defined properties for this test.
+ self.user_properties = []
+
+ def add_report_section(self, when, key, content):
+ """
+ Adds a new report section, similar to what's done internally to add stdout and
+ stderr captured output::
+
+ item.add_report_section("call", "stdout", "report section contents")
+
+ :param str when:
+ One of the possible capture states, ``"setup"``, ``"call"``, ``"teardown"``.
+ :param str key:
+ Name of the section, can be customized at will. Pytest uses ``"stdout"`` and
+ ``"stderr"`` internally.
+
+ :param str content:
+ The full contents as a string.
+ """
+ if content:
+ self._report_sections.append((when, key, content))
+
+ def reportinfo(self):
+ return self.fspath, None, ""
+
+ @property
+ def location(self):
+ try:
+ return self._location
+ except AttributeError:
+ location = self.reportinfo()
+ # bestrelpath is a quite slow function
+ cache = self.config.__dict__.setdefault("_bestrelpathcache", {})
+ try:
+ fspath = cache[location[0]]
+ except KeyError:
+ fspath = self.session.fspath.bestrelpath(location[0])
+ cache[location[0]] = fspath
+ location = (fspath, location[1], str(location[2]))
+ self._location = location
+ return location
diff --git a/third_party/python/pytest/src/_pytest/nose.py b/third_party/python/pytest/src/_pytest/nose.py
new file mode 100644
index 0000000000..bb2e4277d1
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/nose.py
@@ -0,0 +1,72 @@
+""" run test suites written for nose. """
+from __future__ import absolute_import, division, print_function
+
+import sys
+
+from _pytest import unittest, runner, python
+from _pytest.config import hookimpl
+
+
+def get_skip_exceptions():
+ skip_classes = set()
+ for module_name in ("unittest", "unittest2", "nose"):
+ mod = sys.modules.get(module_name)
+ if hasattr(mod, "SkipTest"):
+ skip_classes.add(mod.SkipTest)
+ return tuple(skip_classes)
+
+
+def pytest_runtest_makereport(item, call):
+ if call.excinfo and call.excinfo.errisinstance(get_skip_exceptions()):
+ # let's substitute the excinfo with a pytest.skip one
+ call2 = call.__class__(lambda: runner.skip(str(call.excinfo.value)), call.when)
+ call.excinfo = call2.excinfo
+
+
+@hookimpl(trylast=True)
+def pytest_runtest_setup(item):
+ if is_potential_nosetest(item):
+ if isinstance(item.parent, python.Generator):
+ gen = item.parent
+ if not hasattr(gen, "_nosegensetup"):
+ call_optional(gen.obj, "setup")
+ if isinstance(gen.parent, python.Instance):
+ call_optional(gen.parent.obj, "setup")
+ gen._nosegensetup = True
+ if not call_optional(item.obj, "setup"):
+ # call module level setup if there is no object level one
+ call_optional(item.parent.obj, "setup")
+ # XXX this implies we only call teardown when setup worked
+ item.session._setupstate.addfinalizer((lambda: teardown_nose(item)), item)
+
+
+def teardown_nose(item):
+ if is_potential_nosetest(item):
+ if not call_optional(item.obj, "teardown"):
+ call_optional(item.parent.obj, "teardown")
+ # if hasattr(item.parent, '_nosegensetup'):
+ # #call_optional(item._nosegensetup, 'teardown')
+ # del item.parent._nosegensetup
+
+
+def pytest_make_collect_report(collector):
+ if isinstance(collector, python.Generator):
+ call_optional(collector.obj, "setup")
+
+
+def is_potential_nosetest(item):
+ # extra check needed since we do not do nose style setup/teardown
+ # on direct unittest style classes
+ return isinstance(item, python.Function) and not isinstance(
+ item, unittest.TestCaseFunction
+ )
+
+
+def call_optional(obj, name):
+ method = getattr(obj, name, None)
+ isfixture = hasattr(method, "_pytestfixturefunction")
+ if method is not None and not isfixture and callable(method):
+ # If there's any problems allow the exception to raise rather than
+ # silently ignoring them
+ method()
+ return True
diff --git a/third_party/python/pytest/src/_pytest/outcomes.py b/third_party/python/pytest/src/_pytest/outcomes.py
new file mode 100644
index 0000000000..8a3662e1b3
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/outcomes.py
@@ -0,0 +1,157 @@
+"""
+exception classes and constants handling test outcomes
+as well as functions creating them
+"""
+from __future__ import absolute_import, division, print_function
+import py
+import sys
+
+
+class OutcomeException(BaseException):
+ """ OutcomeException and its subclass instances indicate and
+ contain info about test and collection outcomes.
+ """
+
+ def __init__(self, msg=None, pytrace=True):
+ BaseException.__init__(self, msg)
+ self.msg = msg
+ self.pytrace = pytrace
+
+ def __repr__(self):
+ if self.msg:
+ val = self.msg
+ if isinstance(val, bytes):
+ val = py._builtin._totext(val, errors="replace")
+ return val
+ return "<%s instance>" % (self.__class__.__name__,)
+
+ __str__ = __repr__
+
+
+TEST_OUTCOME = (OutcomeException, Exception)
+
+
+class Skipped(OutcomeException):
+ # XXX hackish: on 3k we fake to live in the builtins
+ # in order to have Skipped exception printing shorter/nicer
+ __module__ = "builtins"
+
+ def __init__(self, msg=None, pytrace=True, allow_module_level=False):
+ OutcomeException.__init__(self, msg=msg, pytrace=pytrace)
+ self.allow_module_level = allow_module_level
+
+
+class Failed(OutcomeException):
+ """ raised from an explicit call to pytest.fail() """
+ __module__ = "builtins"
+
+
+class Exit(KeyboardInterrupt):
+ """ raised for immediate program exits (no tracebacks/summaries)"""
+
+ def __init__(self, msg="unknown reason"):
+ self.msg = msg
+ KeyboardInterrupt.__init__(self, msg)
+
+
+# exposed helper methods
+
+
+def exit(msg):
+ """ exit testing process as if KeyboardInterrupt was triggered. """
+ __tracebackhide__ = True
+ raise Exit(msg)
+
+
+exit.Exception = Exit
+
+
+def skip(msg="", **kwargs):
+ """ skip an executing test with the given message. Note: it's usually
+ better to use the pytest.mark.skipif marker to declare a test to be
+ skipped under certain conditions like mismatching platforms or
+ dependencies. See the pytest_skipping plugin for details.
+
+ :kwarg bool allow_module_level: allows this function to be called at
+ module level, skipping the rest of the module. Default to False.
+ """
+ __tracebackhide__ = True
+ allow_module_level = kwargs.pop("allow_module_level", False)
+ if kwargs:
+ keys = [k for k in kwargs.keys()]
+ raise TypeError("unexpected keyword arguments: {}".format(keys))
+ raise Skipped(msg=msg, allow_module_level=allow_module_level)
+
+
+skip.Exception = Skipped
+
+
+def fail(msg="", pytrace=True):
+ """ explicitly fail a currently-executing test with the given Message.
+
+ :arg pytrace: if false the msg represents the full failure information
+ and no python traceback will be reported.
+ """
+ __tracebackhide__ = True
+ raise Failed(msg=msg, pytrace=pytrace)
+
+
+fail.Exception = Failed
+
+
+class XFailed(fail.Exception):
+ """ raised from an explicit call to pytest.xfail() """
+
+
+def xfail(reason=""):
+ """ xfail an executing test or setup functions with the given reason."""
+ __tracebackhide__ = True
+ raise XFailed(reason)
+
+
+xfail.Exception = XFailed
+
+
+def importorskip(modname, minversion=None):
+ """ return imported module if it has at least "minversion" as its
+ __version__ attribute. If no minversion is specified the a skip
+ is only triggered if the module can not be imported.
+ """
+ import warnings
+
+ __tracebackhide__ = True
+ compile(modname, "", "eval") # to catch syntaxerrors
+ should_skip = False
+
+ with warnings.catch_warnings():
+ # make sure to ignore ImportWarnings that might happen because
+ # of existing directories with the same name we're trying to
+ # import but without a __init__.py file
+ warnings.simplefilter("ignore")
+ try:
+ __import__(modname)
+ except ImportError:
+ # Do not raise chained exception here(#1485)
+ should_skip = True
+ if should_skip:
+ raise Skipped("could not import %r" % (modname,), allow_module_level=True)
+ mod = sys.modules[modname]
+ if minversion is None:
+ return mod
+ verattr = getattr(mod, "__version__", None)
+ if minversion is not None:
+ try:
+ from pkg_resources import parse_version as pv
+ except ImportError:
+ raise Skipped(
+ "we have a required version for %r but can not import "
+ "pkg_resources to parse version strings." % (modname,),
+ allow_module_level=True,
+ )
+ if verattr is None or pv(verattr) < pv(minversion):
+ raise Skipped(
+ "module %r has __version__ %r, required is: %r"
+ % (modname, verattr, minversion),
+ allow_module_level=True,
+ )
+ return mod
diff --git a/third_party/python/pytest/src/_pytest/pastebin.py b/third_party/python/pytest/src/_pytest/pastebin.py
new file mode 100644
index 0000000000..6af202d1f0
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/pastebin.py
@@ -0,0 +1,109 @@
+""" submit failure or test session information to a pastebin service. """
+from __future__ import absolute_import, division, print_function
+
+import pytest
+import six
+import sys
+import tempfile
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting")
+ group._addoption(
+ "--pastebin",
+ metavar="mode",
+ action="store",
+ dest="pastebin",
+ default=None,
+ choices=["failed", "all"],
+ help="send failed|all info to bpaste.net pastebin service.",
+ )
+
+
+@pytest.hookimpl(trylast=True)
+def pytest_configure(config):
+ if config.option.pastebin == "all":
+ tr = config.pluginmanager.getplugin("terminalreporter")
+ # if no terminal reporter plugin is present, nothing we can do here;
+ # this can happen when this function executes in a slave node
+ # when using pytest-xdist, for example
+ if tr is not None:
+ # pastebin file will be utf-8 encoded binary file
+ config._pastebinfile = tempfile.TemporaryFile("w+b")
+ oldwrite = tr._tw.write
+
+ def tee_write(s, **kwargs):
+ oldwrite(s, **kwargs)
+ if isinstance(s, six.text_type):
+ s = s.encode("utf-8")
+ config._pastebinfile.write(s)
+
+ tr._tw.write = tee_write
+
+
+def pytest_unconfigure(config):
+ if hasattr(config, "_pastebinfile"):
+ # get terminal contents and delete file
+ config._pastebinfile.seek(0)
+ sessionlog = config._pastebinfile.read()
+ config._pastebinfile.close()
+ del config._pastebinfile
+ # undo our patching in the terminal reporter
+ tr = config.pluginmanager.getplugin("terminalreporter")
+ del tr._tw.__dict__["write"]
+ # write summary
+ tr.write_sep("=", "Sending information to Paste Service")
+ pastebinurl = create_new_paste(sessionlog)
+ tr.write_line("pastebin session-log: %s\n" % pastebinurl)
+
+
+def create_new_paste(contents):
+ """
+ Creates a new paste using bpaste.net service.
+
+ :contents: paste contents as utf-8 encoded bytes
+ :returns: url to the pasted contents
+ """
+ import re
+
+ if sys.version_info < (3, 0):
+ from urllib import urlopen, urlencode
+ else:
+ from urllib.request import urlopen
+ from urllib.parse import urlencode
+
+ params = {
+ "code": contents,
+ "lexer": "python3" if sys.version_info[0] == 3 else "python",
+ "expiry": "1week",
+ }
+ url = "https://bpaste.net"
+ response = urlopen(url, data=urlencode(params).encode("ascii")).read()
+ m = re.search(r'href="/raw/(\w+)"', response.decode("utf-8"))
+ if m:
+ return "%s/show/%s" % (url, m.group(1))
+ else:
+ return "bad response: " + response
+
+
+def pytest_terminal_summary(terminalreporter):
+ import _pytest.config
+
+ if terminalreporter.config.option.pastebin != "failed":
+ return
+ tr = terminalreporter
+ if "failed" in tr.stats:
+ terminalreporter.write_sep("=", "Sending information to Paste Service")
+ for rep in terminalreporter.stats.get("failed"):
+ try:
+ msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc
+ except AttributeError:
+ msg = tr._getfailureheadline(rep)
+ tw = _pytest.config.create_terminal_writer(
+ terminalreporter.config, stringio=True
+ )
+ rep.toterminal(tw)
+ s = tw.stringio.getvalue()
+ assert len(s)
+ pastebinurl = create_new_paste(s)
+ tr.write_line("%s --> %s" % (msg, pastebinurl))
diff --git a/third_party/python/pytest/src/_pytest/pytester.py b/third_party/python/pytest/src/_pytest/pytester.py
new file mode 100644
index 0000000000..c9defe03a6
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/pytester.py
@@ -0,0 +1,1272 @@
+"""(disabled by default) support for testing pytest and pytest plugins."""
+from __future__ import absolute_import, division, print_function
+
+import codecs
+import gc
+import os
+import platform
+import re
+import subprocess
+import six
+import sys
+import time
+import traceback
+from fnmatch import fnmatch
+
+from weakref import WeakKeyDictionary
+
+from _pytest.capture import MultiCapture, SysCapture
+from _pytest._code import Source
+import py
+import pytest
+from _pytest.main import Session, EXIT_OK
+from _pytest.assertion.rewrite import AssertionRewritingHook
+
+
+PYTEST_FULLPATH = os.path.abspath(pytest.__file__.rstrip("oc")).replace(
+ "$py.class", ".py"
+)
+
+
+IGNORE_PAM = [ # filenames added when obtaining details about the current user
+ u"/var/lib/sss/mc/passwd"
+]
+
+
+def pytest_addoption(parser):
+ parser.addoption(
+ "--lsof",
+ action="store_true",
+ dest="lsof",
+ default=False,
+ help=("run FD checks if lsof is available"),
+ )
+
+ parser.addoption(
+ "--runpytest",
+ default="inprocess",
+ dest="runpytest",
+ choices=("inprocess", "subprocess"),
+ help=(
+ "run pytest sub runs in tests using an 'inprocess' "
+ "or 'subprocess' (python -m main) method"
+ ),
+ )
+
+
+def pytest_configure(config):
+ if config.getvalue("lsof"):
+ checker = LsofFdLeakChecker()
+ if checker.matching_platform():
+ config.pluginmanager.register(checker)
+
+
+class LsofFdLeakChecker(object):
+
+ def get_open_files(self):
+ out = self._exec_lsof()
+ open_files = self._parse_lsof_output(out)
+ return open_files
+
+ def _exec_lsof(self):
+ pid = os.getpid()
+ return py.process.cmdexec("lsof -Ffn0 -p %d" % pid)
+
+ def _parse_lsof_output(self, out):
+
+ def isopen(line):
+ return line.startswith("f") and (
+ "deleted" not in line
+ and "mem" not in line
+ and "txt" not in line
+ and "cwd" not in line
+ )
+
+ open_files = []
+
+ for line in out.split("\n"):
+ if isopen(line):
+ fields = line.split("\0")
+ fd = fields[0][1:]
+ filename = fields[1][1:]
+ if filename in IGNORE_PAM:
+ continue
+ if filename.startswith("/"):
+ open_files.append((fd, filename))
+
+ return open_files
+
+ def matching_platform(self):
+ try:
+ py.process.cmdexec("lsof -v")
+ except (py.process.cmdexec.Error, UnicodeDecodeError):
+ # cmdexec may raise UnicodeDecodeError on Windows systems with
+ # locale other than English:
+ # https://bitbucket.org/pytest-dev/py/issues/66
+ return False
+ else:
+ return True
+
+ @pytest.hookimpl(hookwrapper=True, tryfirst=True)
+ def pytest_runtest_protocol(self, item):
+ lines1 = self.get_open_files()
+ yield
+ if hasattr(sys, "pypy_version_info"):
+ gc.collect()
+ lines2 = self.get_open_files()
+
+ new_fds = {t[0] for t in lines2} - {t[0] for t in lines1}
+ leaked_files = [t for t in lines2 if t[0] in new_fds]
+ if leaked_files:
+ error = []
+ error.append("***** %s FD leakage detected" % len(leaked_files))
+ error.extend([str(f) for f in leaked_files])
+ error.append("*** Before:")
+ error.extend([str(f) for f in lines1])
+ error.append("*** After:")
+ error.extend([str(f) for f in lines2])
+ error.append(error[0])
+ error.append("*** function %s:%s: %s " % item.location)
+ error.append("See issue #2366")
+ item.warn("", "\n".join(error))
+
+
+# XXX copied from execnet's conftest.py - needs to be merged
+winpymap = {
+ "python2.7": r"C:\Python27\python.exe",
+ "python3.4": r"C:\Python34\python.exe",
+ "python3.5": r"C:\Python35\python.exe",
+ "python3.6": r"C:\Python36\python.exe",
+}
+
+
+def getexecutable(name, cache={}):
+ try:
+ return cache[name]
+ except KeyError:
+ executable = py.path.local.sysfind(name)
+ if executable:
+ import subprocess
+
+ popen = subprocess.Popen(
+ [str(executable), "--version"],
+ universal_newlines=True,
+ stderr=subprocess.PIPE,
+ )
+ out, err = popen.communicate()
+ if name == "jython":
+ if not err or "2.5" not in err:
+ executable = None
+ if "2.5.2" in err:
+ executable = None # http://bugs.jython.org/issue1790
+ elif popen.returncode != 0:
+ # handle pyenv's 127
+ executable = None
+ cache[name] = executable
+ return executable
+
+
+@pytest.fixture(params=["python2.7", "python3.4", "pypy", "pypy3"])
+def anypython(request):
+ name = request.param
+ executable = getexecutable(name)
+ if executable is None:
+ if sys.platform == "win32":
+ executable = winpymap.get(name, None)
+ if executable:
+ executable = py.path.local(executable)
+ if executable.check():
+ return executable
+ pytest.skip("no suitable %s found" % (name,))
+ return executable
+
+
+# used at least by pytest-xdist plugin
+
+
+@pytest.fixture
+def _pytest(request):
+ """Return a helper which offers a gethookrecorder(hook) method which
+ returns a HookRecorder instance which helps to make assertions about called
+ hooks.
+
+ """
+ return PytestArg(request)
+
+
+class PytestArg(object):
+
+ def __init__(self, request):
+ self.request = request
+
+ def gethookrecorder(self, hook):
+ hookrecorder = HookRecorder(hook._pm)
+ self.request.addfinalizer(hookrecorder.finish_recording)
+ return hookrecorder
+
+
+def get_public_names(values):
+ """Only return names from iterator values without a leading underscore."""
+ return [x for x in values if x[0] != "_"]
+
+
+class ParsedCall(object):
+
+ def __init__(self, name, kwargs):
+ self.__dict__.update(kwargs)
+ self._name = name
+
+ def __repr__(self):
+ d = self.__dict__.copy()
+ del d["_name"]
+ return "<ParsedCall %r(**%r)>" % (self._name, d)
+
+
+class HookRecorder(object):
+ """Record all hooks called in a plugin manager.
+
+ This wraps all the hook calls in the plugin manager, recording each call
+ before propagating the normal calls.
+
+ """
+
+ def __init__(self, pluginmanager):
+ self._pluginmanager = pluginmanager
+ self.calls = []
+
+ def before(hook_name, hook_impls, kwargs):
+ self.calls.append(ParsedCall(hook_name, kwargs))
+
+ def after(outcome, hook_name, hook_impls, kwargs):
+ pass
+
+ self._undo_wrapping = pluginmanager.add_hookcall_monitoring(before, after)
+
+ def finish_recording(self):
+ self._undo_wrapping()
+
+ def getcalls(self, names):
+ if isinstance(names, str):
+ names = names.split()
+ return [call for call in self.calls if call._name in names]
+
+ def assert_contains(self, entries):
+ __tracebackhide__ = True
+ i = 0
+ entries = list(entries)
+ backlocals = sys._getframe(1).f_locals
+ while entries:
+ name, check = entries.pop(0)
+ for ind, call in enumerate(self.calls[i:]):
+ if call._name == name:
+ print("NAMEMATCH", name, call)
+ if eval(check, backlocals, call.__dict__):
+ print("CHECKERMATCH", repr(check), "->", call)
+ else:
+ print("NOCHECKERMATCH", repr(check), "-", call)
+ continue
+ i += ind + 1
+ break
+ print("NONAMEMATCH", name, "with", call)
+ else:
+ pytest.fail("could not find %r check %r" % (name, check))
+
+ def popcall(self, name):
+ __tracebackhide__ = True
+ for i, call in enumerate(self.calls):
+ if call._name == name:
+ del self.calls[i]
+ return call
+ lines = ["could not find call %r, in:" % (name,)]
+ lines.extend([" %s" % str(x) for x in self.calls])
+ pytest.fail("\n".join(lines))
+
+ def getcall(self, name):
+ values = self.getcalls(name)
+ assert len(values) == 1, (name, values)
+ return values[0]
+
+ # functionality for test reports
+
+ def getreports(self, names="pytest_runtest_logreport pytest_collectreport"):
+ return [x.report for x in self.getcalls(names)]
+
+ def matchreport(
+ self,
+ inamepart="",
+ names="pytest_runtest_logreport pytest_collectreport",
+ when=None,
+ ):
+ """return a testreport whose dotted import path matches"""
+ values = []
+ for rep in self.getreports(names=names):
+ try:
+ if not when and rep.when != "call" and rep.passed:
+ # setup/teardown passing reports - let's ignore those
+ continue
+ except AttributeError:
+ pass
+ if when and getattr(rep, "when", None) != when:
+ continue
+ if not inamepart or inamepart in rep.nodeid.split("::"):
+ values.append(rep)
+ if not values:
+ raise ValueError(
+ "could not find test report matching %r: "
+ "no test reports at all!" % (inamepart,)
+ )
+ if len(values) > 1:
+ raise ValueError(
+ "found 2 or more testreports matching %r: %s" % (inamepart, values)
+ )
+ return values[0]
+
+ def getfailures(self, names="pytest_runtest_logreport pytest_collectreport"):
+ return [rep for rep in self.getreports(names) if rep.failed]
+
+ def getfailedcollections(self):
+ return self.getfailures("pytest_collectreport")
+
+ def listoutcomes(self):
+ passed = []
+ skipped = []
+ failed = []
+ for rep in self.getreports("pytest_collectreport pytest_runtest_logreport"):
+ if rep.passed:
+ if getattr(rep, "when", None) == "call":
+ passed.append(rep)
+ elif rep.skipped:
+ skipped.append(rep)
+ elif rep.failed:
+ failed.append(rep)
+ return passed, skipped, failed
+
+ def countoutcomes(self):
+ return [len(x) for x in self.listoutcomes()]
+
+ def assertoutcome(self, passed=0, skipped=0, failed=0):
+ realpassed, realskipped, realfailed = self.listoutcomes()
+ assert passed == len(realpassed)
+ assert skipped == len(realskipped)
+ assert failed == len(realfailed)
+
+ def clear(self):
+ self.calls[:] = []
+
+
+@pytest.fixture
+def linecomp(request):
+ return LineComp()
+
+
+@pytest.fixture(name="LineMatcher")
+def LineMatcher_fixture(request):
+ return LineMatcher
+
+
+@pytest.fixture
+def testdir(request, tmpdir_factory):
+ return Testdir(request, tmpdir_factory)
+
+
+rex_outcome = re.compile(r"(\d+) ([\w-]+)")
+
+
+class RunResult(object):
+ """The result of running a command.
+
+ Attributes:
+
+ :ret: the return value
+ :outlines: list of lines captured from stdout
+ :errlines: list of lines captures from stderr
+ :stdout: :py:class:`LineMatcher` of stdout, use ``stdout.str()`` to
+ reconstruct stdout or the commonly used ``stdout.fnmatch_lines()``
+ method
+ :stderr: :py:class:`LineMatcher` of stderr
+ :duration: duration in seconds
+
+ """
+
+ def __init__(self, ret, outlines, errlines, duration):
+ self.ret = ret
+ self.outlines = outlines
+ self.errlines = errlines
+ self.stdout = LineMatcher(outlines)
+ self.stderr = LineMatcher(errlines)
+ self.duration = duration
+
+ def parseoutcomes(self):
+ """Return a dictionary of outcomestring->num from parsing the terminal
+ output that the test process produced.
+
+ """
+ for line in reversed(self.outlines):
+ if "seconds" in line:
+ outcomes = rex_outcome.findall(line)
+ if outcomes:
+ d = {}
+ for num, cat in outcomes:
+ d[cat] = int(num)
+ return d
+ raise ValueError("Pytest terminal report not found")
+
+ def assert_outcomes(self, passed=0, skipped=0, failed=0, error=0):
+ """Assert that the specified outcomes appear with the respective
+ numbers (0 means it didn't occur) in the text output from a test run.
+
+ """
+ d = self.parseoutcomes()
+ obtained = {
+ "passed": d.get("passed", 0),
+ "skipped": d.get("skipped", 0),
+ "failed": d.get("failed", 0),
+ "error": d.get("error", 0),
+ }
+ assert (
+ obtained == dict(passed=passed, skipped=skipped, failed=failed, error=error)
+ )
+
+
+class CwdSnapshot(object):
+
+ def __init__(self):
+ self.__saved = os.getcwd()
+
+ def restore(self):
+ os.chdir(self.__saved)
+
+
+class SysModulesSnapshot(object):
+
+ def __init__(self, preserve=None):
+ self.__preserve = preserve
+ self.__saved = dict(sys.modules)
+
+ def restore(self):
+ if self.__preserve:
+ self.__saved.update(
+ (k, m) for k, m in sys.modules.items() if self.__preserve(k)
+ )
+ sys.modules.clear()
+ sys.modules.update(self.__saved)
+
+
+class SysPathsSnapshot(object):
+
+ def __init__(self):
+ self.__saved = list(sys.path), list(sys.meta_path)
+
+ def restore(self):
+ sys.path[:], sys.meta_path[:] = self.__saved
+
+
+class Testdir(object):
+ """Temporary test directory with tools to test/run pytest itself.
+
+ This is based on the ``tmpdir`` fixture but provides a number of methods
+ which aid with testing pytest itself. Unless :py:meth:`chdir` is used all
+ methods will use :py:attr:`tmpdir` as their current working directory.
+
+ Attributes:
+
+ :tmpdir: The :py:class:`py.path.local` instance of the temporary directory.
+
+ :plugins: A list of plugins to use with :py:meth:`parseconfig` and
+ :py:meth:`runpytest`. Initially this is an empty list but plugins can
+ be added to the list. The type of items to add to the list depends on
+ the method using them so refer to them for details.
+
+ """
+
+ def __init__(self, request, tmpdir_factory):
+ self.request = request
+ self._mod_collections = WeakKeyDictionary()
+ name = request.function.__name__
+ self.tmpdir = tmpdir_factory.mktemp(name, numbered=True)
+ self.plugins = []
+ self._cwd_snapshot = CwdSnapshot()
+ self._sys_path_snapshot = SysPathsSnapshot()
+ self._sys_modules_snapshot = self.__take_sys_modules_snapshot()
+ self.chdir()
+ self.request.addfinalizer(self.finalize)
+ method = self.request.config.getoption("--runpytest")
+ if method == "inprocess":
+ self._runpytest_method = self.runpytest_inprocess
+ elif method == "subprocess":
+ self._runpytest_method = self.runpytest_subprocess
+
+ def __repr__(self):
+ return "<Testdir %r>" % (self.tmpdir,)
+
+ def finalize(self):
+ """Clean up global state artifacts.
+
+ Some methods modify the global interpreter state and this tries to
+ clean this up. It does not remove the temporary directory however so
+ it can be looked at after the test run has finished.
+
+ """
+ self._sys_modules_snapshot.restore()
+ self._sys_path_snapshot.restore()
+ self._cwd_snapshot.restore()
+
+ def __take_sys_modules_snapshot(self):
+ # some zope modules used by twisted-related tests keep internal state
+ # and can't be deleted; we had some trouble in the past with
+ # `zope.interface` for example
+ def preserve_module(name):
+ return name.startswith("zope")
+
+ return SysModulesSnapshot(preserve=preserve_module)
+
+ def make_hook_recorder(self, pluginmanager):
+ """Create a new :py:class:`HookRecorder` for a PluginManager."""
+ assert not hasattr(pluginmanager, "reprec")
+ pluginmanager.reprec = reprec = HookRecorder(pluginmanager)
+ self.request.addfinalizer(reprec.finish_recording)
+ return reprec
+
+ def chdir(self):
+ """Cd into the temporary directory.
+
+ This is done automatically upon instantiation.
+
+ """
+ self.tmpdir.chdir()
+
+ def _makefile(self, ext, args, kwargs, encoding="utf-8"):
+ items = list(kwargs.items())
+
+ def to_text(s):
+ return s.decode(encoding) if isinstance(s, bytes) else six.text_type(s)
+
+ if args:
+ source = u"\n".join(to_text(x) for x in args)
+ basename = self.request.function.__name__
+ items.insert(0, (basename, source))
+
+ ret = None
+ for basename, value in items:
+ p = self.tmpdir.join(basename).new(ext=ext)
+ p.dirpath().ensure_dir()
+ source = Source(value)
+ source = u"\n".join(to_text(line) for line in source.lines)
+ p.write(source.strip().encode(encoding), "wb")
+ if ret is None:
+ ret = p
+ return ret
+
+ def makefile(self, ext, *args, **kwargs):
+ """Create a new file in the testdir.
+
+ ext: The extension the file should use, including the dot, e.g. `.py`.
+
+ args: All args will be treated as strings and joined using newlines.
+ The result will be written as contents to the file. The name of the
+ file will be based on the test function requesting this fixture.
+ E.g. "testdir.makefile('.txt', 'line1', 'line2')"
+
+ kwargs: Each keyword is the name of a file, while the value of it will
+ be written as contents of the file.
+ E.g. "testdir.makefile('.ini', pytest='[pytest]\naddopts=-rs\n')"
+
+ """
+ return self._makefile(ext, args, kwargs)
+
+ def makeconftest(self, source):
+ """Write a contest.py file with 'source' as contents."""
+ return self.makepyfile(conftest=source)
+
+ def makeini(self, source):
+ """Write a tox.ini file with 'source' as contents."""
+ return self.makefile(".ini", tox=source)
+
+ def getinicfg(self, source):
+ """Return the pytest section from the tox.ini config file."""
+ p = self.makeini(source)
+ return py.iniconfig.IniConfig(p)["pytest"]
+
+ def makepyfile(self, *args, **kwargs):
+ """Shortcut for .makefile() with a .py extension."""
+ return self._makefile(".py", args, kwargs)
+
+ def maketxtfile(self, *args, **kwargs):
+ """Shortcut for .makefile() with a .txt extension."""
+ return self._makefile(".txt", args, kwargs)
+
+ def syspathinsert(self, path=None):
+ """Prepend a directory to sys.path, defaults to :py:attr:`tmpdir`.
+
+ This is undone automatically when this object dies at the end of each
+ test.
+
+ """
+ if path is None:
+ path = self.tmpdir
+ sys.path.insert(0, str(path))
+ # a call to syspathinsert() usually means that the caller wants to
+ # import some dynamically created files, thus with python3 we
+ # invalidate its import caches
+ self._possibly_invalidate_import_caches()
+
+ def _possibly_invalidate_import_caches(self):
+ # invalidate caches if we can (py33 and above)
+ try:
+ import importlib
+ except ImportError:
+ pass
+ else:
+ if hasattr(importlib, "invalidate_caches"):
+ importlib.invalidate_caches()
+
+ def mkdir(self, name):
+ """Create a new (sub)directory."""
+ return self.tmpdir.mkdir(name)
+
+ def mkpydir(self, name):
+ """Create a new python package.
+
+ This creates a (sub)directory with an empty ``__init__.py`` file so it
+ gets recognised as a python package.
+
+ """
+ p = self.mkdir(name)
+ p.ensure("__init__.py")
+ return p
+
+ Session = Session
+
+ def getnode(self, config, arg):
+ """Return the collection node of a file.
+
+ :param config: :py:class:`_pytest.config.Config` instance, see
+ :py:meth:`parseconfig` and :py:meth:`parseconfigure` to create the
+ configuration
+
+ :param arg: a :py:class:`py.path.local` instance of the file
+
+ """
+ session = Session(config)
+ assert "::" not in str(arg)
+ p = py.path.local(arg)
+ config.hook.pytest_sessionstart(session=session)
+ res = session.perform_collect([str(p)], genitems=False)[0]
+ config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK)
+ return res
+
+ def getpathnode(self, path):
+ """Return the collection node of a file.
+
+ This is like :py:meth:`getnode` but uses :py:meth:`parseconfigure` to
+ create the (configured) pytest Config instance.
+
+ :param path: a :py:class:`py.path.local` instance of the file
+
+ """
+ config = self.parseconfigure(path)
+ session = Session(config)
+ x = session.fspath.bestrelpath(path)
+ config.hook.pytest_sessionstart(session=session)
+ res = session.perform_collect([x], genitems=False)[0]
+ config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK)
+ return res
+
+ def genitems(self, colitems):
+ """Generate all test items from a collection node.
+
+ This recurses into the collection node and returns a list of all the
+ test items contained within.
+
+ """
+ session = colitems[0].session
+ result = []
+ for colitem in colitems:
+ result.extend(session.genitems(colitem))
+ return result
+
+ def runitem(self, source):
+ """Run the "test_func" Item.
+
+ The calling test instance (class containing the test method) must
+ provide a ``.getrunner()`` method which should return a runner which
+ can run the test protocol for a single item, e.g.
+ :py:func:`_pytest.runner.runtestprotocol`.
+
+ """
+ # used from runner functional tests
+ item = self.getitem(source)
+ # the test class where we are called from wants to provide the runner
+ testclassinstance = self.request.instance
+ runner = testclassinstance.getrunner()
+ return runner(item)
+
+ def inline_runsource(self, source, *cmdlineargs):
+ """Run a test module in process using ``pytest.main()``.
+
+ This run writes "source" into a temporary file and runs
+ ``pytest.main()`` on it, returning a :py:class:`HookRecorder` instance
+ for the result.
+
+ :param source: the source code of the test module
+
+ :param cmdlineargs: any extra command line arguments to use
+
+ :return: :py:class:`HookRecorder` instance of the result
+
+ """
+ p = self.makepyfile(source)
+ values = list(cmdlineargs) + [p]
+ return self.inline_run(*values)
+
+ def inline_genitems(self, *args):
+ """Run ``pytest.main(['--collectonly'])`` in-process.
+
+ Runs the :py:func:`pytest.main` function to run all of pytest inside
+ the test process itself like :py:meth:`inline_run`, but returns a
+ tuple of the collected items and a :py:class:`HookRecorder` instance.
+
+ """
+ rec = self.inline_run("--collect-only", *args)
+ items = [x.item for x in rec.getcalls("pytest_itemcollected")]
+ return items, rec
+
+ def inline_run(self, *args, **kwargs):
+ """Run ``pytest.main()`` in-process, returning a HookRecorder.
+
+ Runs the :py:func:`pytest.main` function to run all of pytest inside
+ the test process itself. This means it can return a
+ :py:class:`HookRecorder` instance which gives more detailed results
+ from that run than can be done by matching stdout/stderr from
+ :py:meth:`runpytest`.
+
+ :param args: command line arguments to pass to :py:func:`pytest.main`
+
+ :param plugin: (keyword-only) extra plugin instances the
+ ``pytest.main()`` instance should use
+
+ :return: a :py:class:`HookRecorder` instance
+
+ """
+ finalizers = []
+ try:
+ # When running pytest inline any plugins active in the main test
+ # process are already imported. So this disables the warning which
+ # will trigger to say they can no longer be rewritten, which is
+ # fine as they have already been rewritten.
+ orig_warn = AssertionRewritingHook._warn_already_imported
+
+ def revert_warn_already_imported():
+ AssertionRewritingHook._warn_already_imported = orig_warn
+
+ finalizers.append(revert_warn_already_imported)
+ AssertionRewritingHook._warn_already_imported = lambda *a: None
+
+ # Any sys.module or sys.path changes done while running pytest
+ # inline should be reverted after the test run completes to avoid
+ # clashing with later inline tests run within the same pytest test,
+ # e.g. just because they use matching test module names.
+ finalizers.append(self.__take_sys_modules_snapshot().restore)
+ finalizers.append(SysPathsSnapshot().restore)
+
+ # Important note:
+ # - our tests should not leave any other references/registrations
+ # laying around other than possibly loaded test modules
+ # referenced from sys.modules, as nothing will clean those up
+ # automatically
+
+ rec = []
+
+ class Collect(object):
+
+ def pytest_configure(x, config):
+ rec.append(self.make_hook_recorder(config.pluginmanager))
+
+ plugins = kwargs.get("plugins") or []
+ plugins.append(Collect())
+ ret = pytest.main(list(args), plugins=plugins)
+ if len(rec) == 1:
+ reprec = rec.pop()
+ else:
+
+ class reprec(object):
+ pass
+
+ reprec.ret = ret
+
+ # typically we reraise keyboard interrupts from the child run
+ # because it's our user requesting interruption of the testing
+ if ret == 2 and not kwargs.get("no_reraise_ctrlc"):
+ calls = reprec.getcalls("pytest_keyboard_interrupt")
+ if calls and calls[-1].excinfo.type == KeyboardInterrupt:
+ raise KeyboardInterrupt()
+ return reprec
+ finally:
+ for finalizer in finalizers:
+ finalizer()
+
+ def runpytest_inprocess(self, *args, **kwargs):
+ """Return result of running pytest in-process, providing a similar
+ interface to what self.runpytest() provides.
+
+ """
+ if kwargs.get("syspathinsert"):
+ self.syspathinsert()
+ now = time.time()
+ capture = MultiCapture(Capture=SysCapture)
+ capture.start_capturing()
+ try:
+ try:
+ reprec = self.inline_run(*args, **kwargs)
+ except SystemExit as e:
+
+ class reprec(object):
+ ret = e.args[0]
+
+ except Exception:
+ traceback.print_exc()
+
+ class reprec(object):
+ ret = 3
+
+ finally:
+ out, err = capture.readouterr()
+ capture.stop_capturing()
+ sys.stdout.write(out)
+ sys.stderr.write(err)
+
+ res = RunResult(reprec.ret, out.split("\n"), err.split("\n"), time.time() - now)
+ res.reprec = reprec
+ return res
+
+ def runpytest(self, *args, **kwargs):
+ """Run pytest inline or in a subprocess, depending on the command line
+ option "--runpytest" and return a :py:class:`RunResult`.
+
+ """
+ args = self._ensure_basetemp(args)
+ return self._runpytest_method(*args, **kwargs)
+
+ def _ensure_basetemp(self, args):
+ args = [str(x) for x in args]
+ for x in args:
+ if str(x).startswith("--basetemp"):
+ # print("basedtemp exists: %s" %(args,))
+ break
+ else:
+ args.append("--basetemp=%s" % self.tmpdir.dirpath("basetemp"))
+ # print("added basetemp: %s" %(args,))
+ return args
+
+ def parseconfig(self, *args):
+ """Return a new pytest Config instance from given commandline args.
+
+ This invokes the pytest bootstrapping code in _pytest.config to create
+ a new :py:class:`_pytest.core.PluginManager` and call the
+ pytest_cmdline_parse hook to create a new
+ :py:class:`_pytest.config.Config` instance.
+
+ If :py:attr:`plugins` has been populated they should be plugin modules
+ to be registered with the PluginManager.
+
+ """
+ args = self._ensure_basetemp(args)
+
+ import _pytest.config
+
+ config = _pytest.config._prepareconfig(args, self.plugins)
+ # we don't know what the test will do with this half-setup config
+ # object and thus we make sure it gets unconfigured properly in any
+ # case (otherwise capturing could still be active, for example)
+ self.request.addfinalizer(config._ensure_unconfigure)
+ return config
+
+ def parseconfigure(self, *args):
+ """Return a new pytest configured Config instance.
+
+ This returns a new :py:class:`_pytest.config.Config` instance like
+ :py:meth:`parseconfig`, but also calls the pytest_configure hook.
+
+ """
+ config = self.parseconfig(*args)
+ config._do_configure()
+ self.request.addfinalizer(config._ensure_unconfigure)
+ return config
+
+ def getitem(self, source, funcname="test_func"):
+ """Return the test item for a test function.
+
+ This writes the source to a python file and runs pytest's collection on
+ the resulting module, returning the test item for the requested
+ function name.
+
+ :param source: the module source
+
+ :param funcname: the name of the test function for which to return a
+ test item
+
+ """
+ items = self.getitems(source)
+ for item in items:
+ if item.name == funcname:
+ return item
+ assert 0, (
+ "%r item not found in module:\n%s\nitems: %s" % (funcname, source, items)
+ )
+
+ def getitems(self, source):
+ """Return all test items collected from the module.
+
+ This writes the source to a python file and runs pytest's collection on
+ the resulting module, returning all test items contained within.
+
+ """
+ modcol = self.getmodulecol(source)
+ return self.genitems([modcol])
+
+ def getmodulecol(self, source, configargs=(), withinit=False):
+ """Return the module collection node for ``source``.
+
+ This writes ``source`` to a file using :py:meth:`makepyfile` and then
+ runs the pytest collection on it, returning the collection node for the
+ test module.
+
+ :param source: the source code of the module to collect
+
+ :param configargs: any extra arguments to pass to
+ :py:meth:`parseconfigure`
+
+ :param withinit: whether to also write an ``__init__.py`` file to the
+ same directory to ensure it is a package
+
+ """
+ kw = {self.request.function.__name__: Source(source).strip()}
+ path = self.makepyfile(**kw)
+ if withinit:
+ self.makepyfile(__init__="#")
+ self.config = config = self.parseconfigure(path, *configargs)
+ node = self.getnode(config, path)
+
+ return node
+
+ def collect_by_name(self, modcol, name):
+ """Return the collection node for name from the module collection.
+
+ This will search a module collection node for a collection node
+ matching the given name.
+
+ :param modcol: a module collection node; see :py:meth:`getmodulecol`
+
+ :param name: the name of the node to return
+
+ """
+ if modcol not in self._mod_collections:
+ self._mod_collections[modcol] = list(modcol.collect())
+ for colitem in self._mod_collections[modcol]:
+ if colitem.name == name:
+ return colitem
+
+ def popen(self, cmdargs, stdout, stderr, **kw):
+ """Invoke subprocess.Popen.
+
+ This calls subprocess.Popen making sure the current working directory
+ is in the PYTHONPATH.
+
+ You probably want to use :py:meth:`run` instead.
+
+ """
+ env = os.environ.copy()
+ env["PYTHONPATH"] = os.pathsep.join(
+ filter(None, [str(os.getcwd()), env.get("PYTHONPATH", "")])
+ )
+ kw["env"] = env
+
+ popen = subprocess.Popen(
+ cmdargs, stdin=subprocess.PIPE, stdout=stdout, stderr=stderr, **kw
+ )
+ popen.stdin.close()
+
+ return popen
+
+ def run(self, *cmdargs):
+ """Run a command with arguments.
+
+ Run a process using subprocess.Popen saving the stdout and stderr.
+
+ Returns a :py:class:`RunResult`.
+
+ """
+ return self._run(*cmdargs)
+
+ def _run(self, *cmdargs):
+ cmdargs = [str(x) for x in cmdargs]
+ p1 = self.tmpdir.join("stdout")
+ p2 = self.tmpdir.join("stderr")
+ print("running:", " ".join(cmdargs))
+ print(" in:", str(py.path.local()))
+ f1 = codecs.open(str(p1), "w", encoding="utf8")
+ f2 = codecs.open(str(p2), "w", encoding="utf8")
+ try:
+ now = time.time()
+ popen = self.popen(
+ cmdargs, stdout=f1, stderr=f2, close_fds=(sys.platform != "win32")
+ )
+ ret = popen.wait()
+ finally:
+ f1.close()
+ f2.close()
+ f1 = codecs.open(str(p1), "r", encoding="utf8")
+ f2 = codecs.open(str(p2), "r", encoding="utf8")
+ try:
+ out = f1.read().splitlines()
+ err = f2.read().splitlines()
+ finally:
+ f1.close()
+ f2.close()
+ self._dump_lines(out, sys.stdout)
+ self._dump_lines(err, sys.stderr)
+ return RunResult(ret, out, err, time.time() - now)
+
+ def _dump_lines(self, lines, fp):
+ try:
+ for line in lines:
+ print(line, file=fp)
+ except UnicodeEncodeError:
+ print("couldn't print to %s because of encoding" % (fp,))
+
+ def _getpytestargs(self):
+ # we cannot use `(sys.executable, script)` because on Windows the
+ # script is e.g. `pytest.exe`
+ return (sys.executable, PYTEST_FULLPATH) # noqa
+
+ def runpython(self, script):
+ """Run a python script using sys.executable as interpreter.
+
+ Returns a :py:class:`RunResult`.
+
+ """
+ return self.run(sys.executable, script)
+
+ def runpython_c(self, command):
+ """Run python -c "command", return a :py:class:`RunResult`."""
+ return self.run(sys.executable, "-c", command)
+
+ def runpytest_subprocess(self, *args, **kwargs):
+ """Run pytest as a subprocess with given arguments.
+
+ Any plugins added to the :py:attr:`plugins` list will added using the
+ ``-p`` command line option. Additionally ``--basetemp`` is used put
+ any temporary files and directories in a numbered directory prefixed
+ with "runpytest-" so they do not conflict with the normal numbered
+ pytest location for temporary files and directories.
+
+ Returns a :py:class:`RunResult`.
+
+ """
+ p = py.path.local.make_numbered_dir(
+ prefix="runpytest-", keep=None, rootdir=self.tmpdir
+ )
+ args = ("--basetemp=%s" % p,) + args
+ plugins = [x for x in self.plugins if isinstance(x, str)]
+ if plugins:
+ args = ("-p", plugins[0]) + args
+ args = self._getpytestargs() + args
+ return self.run(*args)
+
+ def spawn_pytest(self, string, expect_timeout=10.0):
+ """Run pytest using pexpect.
+
+ This makes sure to use the right pytest and sets up the temporary
+ directory locations.
+
+ The pexpect child is returned.
+
+ """
+ basetemp = self.tmpdir.mkdir("temp-pexpect")
+ invoke = " ".join(map(str, self._getpytestargs()))
+ cmd = "%s --basetemp=%s %s" % (invoke, basetemp, string)
+ return self.spawn(cmd, expect_timeout=expect_timeout)
+
+ def spawn(self, cmd, expect_timeout=10.0):
+ """Run a command using pexpect.
+
+ The pexpect child is returned.
+
+ """
+ pexpect = pytest.importorskip("pexpect", "3.0")
+ if hasattr(sys, "pypy_version_info") and "64" in platform.machine():
+ pytest.skip("pypy-64 bit not supported")
+ if sys.platform.startswith("freebsd"):
+ pytest.xfail("pexpect does not work reliably on freebsd")
+ logfile = self.tmpdir.join("spawn.out").open("wb")
+ child = pexpect.spawn(cmd, logfile=logfile)
+ self.request.addfinalizer(logfile.close)
+ child.timeout = expect_timeout
+ return child
+
+
+def getdecoded(out):
+ try:
+ return out.decode("utf-8")
+ except UnicodeDecodeError:
+ return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % (
+ py.io.saferepr(out),
+ )
+
+
+class LineComp(object):
+
+ def __init__(self):
+ self.stringio = py.io.TextIO()
+
+ def assert_contains_lines(self, lines2):
+ """Assert that lines2 are contained (linearly) in lines1.
+
+ Return a list of extralines found.
+
+ """
+ __tracebackhide__ = True
+ val = self.stringio.getvalue()
+ self.stringio.truncate(0)
+ self.stringio.seek(0)
+ lines1 = val.split("\n")
+ return LineMatcher(lines1).fnmatch_lines(lines2)
+
+
+class LineMatcher(object):
+ """Flexible matching of text.
+
+ This is a convenience class to test large texts like the output of
+ commands.
+
+ The constructor takes a list of lines without their trailing newlines, i.e.
+ ``text.splitlines()``.
+
+ """
+
+ def __init__(self, lines):
+ self.lines = lines
+ self._log_output = []
+
+ def str(self):
+ """Return the entire original text."""
+ return "\n".join(self.lines)
+
+ def _getlines(self, lines2):
+ if isinstance(lines2, str):
+ lines2 = Source(lines2)
+ if isinstance(lines2, Source):
+ lines2 = lines2.strip().lines
+ return lines2
+
+ def fnmatch_lines_random(self, lines2):
+ """Check lines exist in the output using in any order.
+
+ Lines are checked using ``fnmatch.fnmatch``. The argument is a list of
+ lines which have to occur in the output, in any order.
+
+ """
+ self._match_lines_random(lines2, fnmatch)
+
+ def re_match_lines_random(self, lines2):
+ """Check lines exist in the output using ``re.match``, in any order.
+
+ The argument is a list of lines which have to occur in the output, in
+ any order.
+
+ """
+ self._match_lines_random(lines2, lambda name, pat: re.match(pat, name))
+
+ def _match_lines_random(self, lines2, match_func):
+ """Check lines exist in the output.
+
+ The argument is a list of lines which have to occur in the output, in
+ any order. Each line can contain glob whildcards.
+
+ """
+ lines2 = self._getlines(lines2)
+ for line in lines2:
+ for x in self.lines:
+ if line == x or match_func(x, line):
+ self._log("matched: ", repr(line))
+ break
+ else:
+ self._log("line %r not found in output" % line)
+ raise ValueError(self._log_text)
+
+ def get_lines_after(self, fnline):
+ """Return all lines following the given line in the text.
+
+ The given line can contain glob wildcards.
+
+ """
+ for i, line in enumerate(self.lines):
+ if fnline == line or fnmatch(line, fnline):
+ return self.lines[i + 1:]
+ raise ValueError("line %r not found in output" % fnline)
+
+ def _log(self, *args):
+ self._log_output.append(" ".join((str(x) for x in args)))
+
+ @property
+ def _log_text(self):
+ return "\n".join(self._log_output)
+
+ def fnmatch_lines(self, lines2):
+ """Search captured text for matching lines using ``fnmatch.fnmatch``.
+
+ The argument is a list of lines which have to match and can use glob
+ wildcards. If they do not match a pytest.fail() is called. The
+ matches and non-matches are also printed on stdout.
+
+ """
+ self._match_lines(lines2, fnmatch, "fnmatch")
+
+ def re_match_lines(self, lines2):
+ """Search captured text for matching lines using ``re.match``.
+
+ The argument is a list of lines which have to match using ``re.match``.
+ If they do not match a pytest.fail() is called.
+
+ The matches and non-matches are also printed on stdout.
+
+ """
+ self._match_lines(lines2, lambda name, pat: re.match(pat, name), "re.match")
+
+ def _match_lines(self, lines2, match_func, match_nickname):
+ """Underlying implementation of ``fnmatch_lines`` and ``re_match_lines``.
+
+ :param list[str] lines2: list of string patterns to match. The actual
+ format depends on ``match_func``
+ :param match_func: a callable ``match_func(line, pattern)`` where line
+ is the captured line from stdout/stderr and pattern is the matching
+ pattern
+ :param str match_nickname: the nickname for the match function that
+ will be logged to stdout when a match occurs
+
+ """
+ lines2 = self._getlines(lines2)
+ lines1 = self.lines[:]
+ nextline = None
+ extralines = []
+ __tracebackhide__ = True
+ for line in lines2:
+ nomatchprinted = False
+ while lines1:
+ nextline = lines1.pop(0)
+ if line == nextline:
+ self._log("exact match:", repr(line))
+ break
+ elif match_func(nextline, line):
+ self._log("%s:" % match_nickname, repr(line))
+ self._log(" with:", repr(nextline))
+ break
+ else:
+ if not nomatchprinted:
+ self._log("nomatch:", repr(line))
+ nomatchprinted = True
+ self._log(" and:", repr(nextline))
+ extralines.append(nextline)
+ else:
+ self._log("remains unmatched: %r" % (line,))
+ pytest.fail(self._log_text)
diff --git a/third_party/python/pytest/src/_pytest/python.py b/third_party/python/pytest/src/_pytest/python.py
new file mode 100644
index 0000000000..48516199ff
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/python.py
@@ -0,0 +1,1331 @@
+""" Python test discovery, setup and run of test functions. """
+from __future__ import absolute_import, division, print_function
+
+import fnmatch
+import inspect
+import sys
+import os
+import collections
+import warnings
+from textwrap import dedent
+from itertools import count
+
+
+import py
+import six
+from _pytest.mark import MarkerError
+from _pytest.config import hookimpl
+
+import _pytest
+import pluggy
+from _pytest import fixtures
+from _pytest import nodes
+from _pytest import deprecated
+from _pytest.compat import (
+ isclass,
+ isfunction,
+ is_generator,
+ ascii_escaped,
+ REGEX_TYPE,
+ STRING_TYPES,
+ NoneType,
+ NOTSET,
+ get_real_func,
+ getfslineno,
+ safe_getattr,
+ safe_str,
+ getlocation,
+ enum,
+ get_default_arg_names,
+)
+from _pytest.outcomes import fail
+from _pytest.mark.structures import transfer_markers, get_unpacked_marks
+
+
+# relative paths that we use to filter traceback entries from appearing to the user;
+# see filter_traceback
+# note: if we need to add more paths than what we have now we should probably use a list
+# for better maintenance
+_pluggy_dir = py.path.local(pluggy.__file__.rstrip("oc"))
+# pluggy is either a package or a single module depending on the version
+if _pluggy_dir.basename == "__init__.py":
+ _pluggy_dir = _pluggy_dir.dirpath()
+_pytest_dir = py.path.local(_pytest.__file__).dirpath()
+_py_dir = py.path.local(py.__file__).dirpath()
+
+
+def filter_traceback(entry):
+ """Return True if a TracebackEntry instance should be removed from tracebacks:
+ * dynamically generated code (no code to show up for it);
+ * internal traceback from pytest or its internal libraries, py and pluggy.
+ """
+ # entry.path might sometimes return a str object when the entry
+ # points to dynamically generated code
+ # see https://bitbucket.org/pytest-dev/py/issues/71
+ raw_filename = entry.frame.code.raw.co_filename
+ is_generated = "<" in raw_filename and ">" in raw_filename
+ if is_generated:
+ return False
+ # entry.path might point to a non-existing file, in which case it will
+ # also return a str object. see #1133
+ p = py.path.local(entry.path)
+ return not p.relto(_pluggy_dir) and not p.relto(_pytest_dir) and not p.relto(
+ _py_dir
+ )
+
+
+def pyobj_property(name):
+
+ def get(self):
+ node = self.getparent(getattr(__import__("pytest"), name))
+ if node is not None:
+ return node.obj
+
+ doc = "python %s object this node was collected from (can be None)." % (
+ name.lower(),
+ )
+ return property(get, None, None, doc)
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group.addoption(
+ "--fixtures",
+ "--funcargs",
+ action="store_true",
+ dest="showfixtures",
+ default=False,
+ help="show available fixtures, sorted by plugin appearance "
+ "(fixtures with leading '_' are only shown with '-v')",
+ )
+ group.addoption(
+ "--fixtures-per-test",
+ action="store_true",
+ dest="show_fixtures_per_test",
+ default=False,
+ help="show fixtures per test",
+ )
+ parser.addini(
+ "usefixtures",
+ type="args",
+ default=[],
+ help="list of default fixtures to be used with this project",
+ )
+ parser.addini(
+ "python_files",
+ type="args",
+ default=["test_*.py", "*_test.py"],
+ help="glob-style file patterns for Python test module discovery",
+ )
+ parser.addini(
+ "python_classes",
+ type="args",
+ default=["Test"],
+ help="prefixes or glob names for Python test class discovery",
+ )
+ parser.addini(
+ "python_functions",
+ type="args",
+ default=["test"],
+ help="prefixes or glob names for Python test function and " "method discovery",
+ )
+
+ group.addoption(
+ "--import-mode",
+ default="prepend",
+ choices=["prepend", "append"],
+ dest="importmode",
+ help="prepend/append to sys.path when importing test modules, "
+ "default is to prepend.",
+ )
+
+
+def pytest_cmdline_main(config):
+ if config.option.showfixtures:
+ showfixtures(config)
+ return 0
+ if config.option.show_fixtures_per_test:
+ show_fixtures_per_test(config)
+ return 0
+
+
+def pytest_generate_tests(metafunc):
+ # those alternative spellings are common - raise a specific error to alert
+ # the user
+ alt_spellings = ["parameterize", "parametrise", "parameterise"]
+ for attr in alt_spellings:
+ if hasattr(metafunc.function, attr):
+ msg = "{0} has '{1}', spelling should be 'parametrize'"
+ raise MarkerError(msg.format(metafunc.function.__name__, attr))
+ for marker in metafunc.definition.iter_markers(name="parametrize"):
+ metafunc.parametrize(*marker.args, **marker.kwargs)
+
+
+def pytest_configure(config):
+ config.addinivalue_line(
+ "markers",
+ "parametrize(argnames, argvalues): call a test function multiple "
+ "times passing in different arguments in turn. argvalues generally "
+ "needs to be a list of values if argnames specifies only one name "
+ "or a list of tuples of values if argnames specifies multiple names. "
+ "Example: @parametrize('arg1', [1,2]) would lead to two calls of the "
+ "decorated test function, one with arg1=1 and another with arg1=2."
+ "see http://pytest.org/latest/parametrize.html for more info and "
+ "examples.",
+ )
+ config.addinivalue_line(
+ "markers",
+ "usefixtures(fixturename1, fixturename2, ...): mark tests as needing "
+ "all of the specified fixtures. see http://pytest.org/latest/fixture.html#usefixtures ",
+ )
+
+
+@hookimpl(trylast=True)
+def pytest_pyfunc_call(pyfuncitem):
+ testfunction = pyfuncitem.obj
+ if pyfuncitem._isyieldedfunction():
+ testfunction(*pyfuncitem._args)
+ else:
+ funcargs = pyfuncitem.funcargs
+ testargs = {}
+ for arg in pyfuncitem._fixtureinfo.argnames:
+ testargs[arg] = funcargs[arg]
+ testfunction(**testargs)
+ return True
+
+
+def pytest_collect_file(path, parent):
+ ext = path.ext
+ if ext == ".py":
+ if not parent.session.isinitpath(path):
+ for pat in parent.config.getini("python_files"):
+ if path.fnmatch(pat):
+ break
+ else:
+ return
+ ihook = parent.session.gethookproxy(path)
+ return ihook.pytest_pycollect_makemodule(path=path, parent=parent)
+
+
+def pytest_pycollect_makemodule(path, parent):
+ return Module(path, parent)
+
+
+@hookimpl(hookwrapper=True)
+def pytest_pycollect_makeitem(collector, name, obj):
+ outcome = yield
+ res = outcome.get_result()
+ if res is not None:
+ return
+ # nothing was collected elsewhere, let's do it here
+ if isclass(obj):
+ if collector.istestclass(obj, name):
+ Class = collector._getcustomclass("Class")
+ outcome.force_result(Class(name, parent=collector))
+ elif collector.istestfunction(obj, name):
+ # mock seems to store unbound methods (issue473), normalize it
+ obj = getattr(obj, "__func__", obj)
+ # We need to try and unwrap the function if it's a functools.partial
+ # or a funtools.wrapped.
+ # We musn't if it's been wrapped with mock.patch (python 2 only)
+ if not (isfunction(obj) or isfunction(get_real_func(obj))):
+ collector.warn(
+ code="C2",
+ message="cannot collect %r because it is not a function." % name,
+ )
+ elif getattr(obj, "__test__", True):
+ if is_generator(obj):
+ res = Generator(name, parent=collector)
+ else:
+ res = list(collector._genfunctions(name, obj))
+ outcome.force_result(res)
+
+
+def pytest_make_parametrize_id(config, val, argname=None):
+ return None
+
+
+class PyobjContext(object):
+ module = pyobj_property("Module")
+ cls = pyobj_property("Class")
+ instance = pyobj_property("Instance")
+
+
+class PyobjMixin(PyobjContext):
+ _ALLOW_MARKERS = True
+
+ def __init__(self, *k, **kw):
+ super(PyobjMixin, self).__init__(*k, **kw)
+
+ def obj():
+
+ def fget(self):
+ obj = getattr(self, "_obj", None)
+ if obj is None:
+ self._obj = obj = self._getobj()
+ # XXX evil hack
+ # used to avoid Instance collector marker duplication
+ if self._ALLOW_MARKERS:
+ self.own_markers.extend(get_unpacked_marks(self.obj))
+ return obj
+
+ def fset(self, value):
+ self._obj = value
+
+ return property(fget, fset, None, "underlying python object")
+
+ obj = obj()
+
+ def _getobj(self):
+ return getattr(self.parent.obj, self.name)
+
+ def getmodpath(self, stopatmodule=True, includemodule=False):
+ """ return python path relative to the containing module. """
+ chain = self.listchain()
+ chain.reverse()
+ parts = []
+ for node in chain:
+ if isinstance(node, Instance):
+ continue
+ name = node.name
+ if isinstance(node, Module):
+ name = os.path.splitext(name)[0]
+ if stopatmodule:
+ if includemodule:
+ parts.append(name)
+ break
+ parts.append(name)
+ parts.reverse()
+ s = ".".join(parts)
+ return s.replace(".[", "[")
+
+ def _getfslineno(self):
+ return getfslineno(self.obj)
+
+ def reportinfo(self):
+ # XXX caching?
+ obj = self.obj
+ compat_co_firstlineno = getattr(obj, "compat_co_firstlineno", None)
+ if isinstance(compat_co_firstlineno, int):
+ # nose compatibility
+ fspath = sys.modules[obj.__module__].__file__
+ if fspath.endswith(".pyc"):
+ fspath = fspath[:-1]
+ lineno = compat_co_firstlineno
+ else:
+ fspath, lineno = getfslineno(obj)
+ modpath = self.getmodpath()
+ assert isinstance(lineno, int)
+ return fspath, lineno, modpath
+
+
+class PyCollector(PyobjMixin, nodes.Collector):
+
+ def funcnamefilter(self, name):
+ return self._matches_prefix_or_glob_option("python_functions", name)
+
+ def isnosetest(self, obj):
+ """ Look for the __test__ attribute, which is applied by the
+ @nose.tools.istest decorator
+ """
+ # We explicitly check for "is True" here to not mistakenly treat
+ # classes with a custom __getattr__ returning something truthy (like a
+ # function) as test classes.
+ return safe_getattr(obj, "__test__", False) is True
+
+ def classnamefilter(self, name):
+ return self._matches_prefix_or_glob_option("python_classes", name)
+
+ def istestfunction(self, obj, name):
+ if self.funcnamefilter(name) or self.isnosetest(obj):
+ if isinstance(obj, staticmethod):
+ # static methods need to be unwrapped
+ obj = safe_getattr(obj, "__func__", False)
+ if obj is False:
+ # Python 2.6 wraps in a different way that we won't try to handle
+ msg = "cannot collect static method %r because it is not a function"
+ self.warn(code="C2", message=msg % name)
+ return False
+ return (
+ safe_getattr(obj, "__call__", False)
+ and fixtures.getfixturemarker(obj) is None
+ )
+ else:
+ return False
+
+ def istestclass(self, obj, name):
+ return self.classnamefilter(name) or self.isnosetest(obj)
+
+ def _matches_prefix_or_glob_option(self, option_name, name):
+ """
+ checks if the given name matches the prefix or glob-pattern defined
+ in ini configuration.
+ """
+ for option in self.config.getini(option_name):
+ if name.startswith(option):
+ return True
+ # check that name looks like a glob-string before calling fnmatch
+ # because this is called for every name in each collected module,
+ # and fnmatch is somewhat expensive to call
+ elif ("*" in option or "?" in option or "[" in option) and fnmatch.fnmatch(
+ name, option
+ ):
+ return True
+ return False
+
+ def collect(self):
+ if not getattr(self.obj, "__test__", True):
+ return []
+
+ # NB. we avoid random getattrs and peek in the __dict__ instead
+ # (XXX originally introduced from a PyPy need, still true?)
+ dicts = [getattr(self.obj, "__dict__", {})]
+ for basecls in inspect.getmro(self.obj.__class__):
+ dicts.append(basecls.__dict__)
+ seen = {}
+ values = []
+ for dic in dicts:
+ for name, obj in list(dic.items()):
+ if name in seen:
+ continue
+ seen[name] = True
+ res = self._makeitem(name, obj)
+ if res is None:
+ continue
+ if not isinstance(res, list):
+ res = [res]
+ values.extend(res)
+ values.sort(key=lambda item: item.reportinfo()[:2])
+ return values
+
+ def makeitem(self, name, obj):
+ warnings.warn(deprecated.COLLECTOR_MAKEITEM, stacklevel=2)
+ self._makeitem(name, obj)
+
+ def _makeitem(self, name, obj):
+ # assert self.ihook.fspath == self.fspath, self
+ return self.ihook.pytest_pycollect_makeitem(collector=self, name=name, obj=obj)
+
+ def _genfunctions(self, name, funcobj):
+ module = self.getparent(Module).obj
+ clscol = self.getparent(Class)
+ cls = clscol and clscol.obj or None
+ transfer_markers(funcobj, cls, module)
+ fm = self.session._fixturemanager
+
+ definition = FunctionDefinition(name=name, parent=self, callobj=funcobj)
+ fixtureinfo = fm.getfixtureinfo(definition, funcobj, cls)
+
+ metafunc = Metafunc(
+ definition, fixtureinfo, self.config, cls=cls, module=module
+ )
+ methods = []
+ if hasattr(module, "pytest_generate_tests"):
+ methods.append(module.pytest_generate_tests)
+ if hasattr(cls, "pytest_generate_tests"):
+ methods.append(cls().pytest_generate_tests)
+ if methods:
+ self.ihook.pytest_generate_tests.call_extra(
+ methods, dict(metafunc=metafunc)
+ )
+ else:
+ self.ihook.pytest_generate_tests(metafunc=metafunc)
+
+ Function = self._getcustomclass("Function")
+ if not metafunc._calls:
+ yield Function(name, parent=self, fixtureinfo=fixtureinfo)
+ else:
+ # add funcargs() as fixturedefs to fixtureinfo.arg2fixturedefs
+ fixtures.add_funcarg_pseudo_fixture_def(self, metafunc, fm)
+
+ for callspec in metafunc._calls:
+ subname = "%s[%s]" % (name, callspec.id)
+ yield Function(
+ name=subname,
+ parent=self,
+ callspec=callspec,
+ callobj=funcobj,
+ fixtureinfo=fixtureinfo,
+ keywords={callspec.id: True},
+ originalname=name,
+ )
+
+
+class Module(nodes.File, PyCollector):
+ """ Collector for test classes and functions. """
+
+ def _getobj(self):
+ return self._importtestmodule()
+
+ def collect(self):
+ self.session._fixturemanager.parsefactories(self)
+ return super(Module, self).collect()
+
+ def _importtestmodule(self):
+ # we assume we are only called once per module
+ importmode = self.config.getoption("--import-mode")
+ try:
+ mod = self.fspath.pyimport(ensuresyspath=importmode)
+ except SyntaxError:
+ raise self.CollectError(
+ _pytest._code.ExceptionInfo().getrepr(style="short")
+ )
+ except self.fspath.ImportMismatchError:
+ e = sys.exc_info()[1]
+ raise self.CollectError(
+ "import file mismatch:\n"
+ "imported module %r has this __file__ attribute:\n"
+ " %s\n"
+ "which is not the same as the test file we want to collect:\n"
+ " %s\n"
+ "HINT: remove __pycache__ / .pyc files and/or use a "
+ "unique basename for your test file modules" % e.args
+ )
+ except ImportError:
+ from _pytest._code.code import ExceptionInfo
+
+ exc_info = ExceptionInfo()
+ if self.config.getoption("verbose") < 2:
+ exc_info.traceback = exc_info.traceback.filter(filter_traceback)
+ exc_repr = exc_info.getrepr(
+ style="short"
+ ) if exc_info.traceback else exc_info.exconly()
+ formatted_tb = safe_str(exc_repr)
+ raise self.CollectError(
+ "ImportError while importing test module '{fspath}'.\n"
+ "Hint: make sure your test modules/packages have valid Python names.\n"
+ "Traceback:\n"
+ "{traceback}".format(fspath=self.fspath, traceback=formatted_tb)
+ )
+ except _pytest.runner.Skipped as e:
+ if e.allow_module_level:
+ raise
+ raise self.CollectError(
+ "Using pytest.skip outside of a test is not allowed. "
+ "To decorate a test function, use the @pytest.mark.skip "
+ "or @pytest.mark.skipif decorators instead, and to skip a "
+ "module use `pytestmark = pytest.mark.{skip,skipif}."
+ )
+ self.config.pluginmanager.consider_module(mod)
+ return mod
+
+ def setup(self):
+ setup_module = _get_xunit_setup_teardown(self.obj, "setUpModule")
+ if setup_module is None:
+ setup_module = _get_xunit_setup_teardown(self.obj, "setup_module")
+ if setup_module is not None:
+ setup_module()
+
+ teardown_module = _get_xunit_setup_teardown(self.obj, "tearDownModule")
+ if teardown_module is None:
+ teardown_module = _get_xunit_setup_teardown(self.obj, "teardown_module")
+ if teardown_module is not None:
+ self.addfinalizer(teardown_module)
+
+
+def _get_xunit_setup_teardown(holder, attr_name, param_obj=None):
+ """
+ Return a callable to perform xunit-style setup or teardown if
+ the function exists in the ``holder`` object.
+ The ``param_obj`` parameter is the parameter which will be passed to the function
+ when the callable is called without arguments, defaults to the ``holder`` object.
+ Return ``None`` if a suitable callable is not found.
+ """
+ param_obj = param_obj if param_obj is not None else holder
+ result = _get_xunit_func(holder, attr_name)
+ if result is not None:
+ arg_count = result.__code__.co_argcount
+ if inspect.ismethod(result):
+ arg_count -= 1
+ if arg_count:
+ return lambda: result(param_obj)
+ else:
+ return result
+
+
+def _get_xunit_func(obj, name):
+ """Return the attribute from the given object to be used as a setup/teardown
+ xunit-style function, but only if not marked as a fixture to
+ avoid calling it twice.
+ """
+ meth = getattr(obj, name, None)
+ if fixtures.getfixturemarker(meth) is None:
+ return meth
+
+
+class Class(PyCollector):
+ """ Collector for test methods. """
+
+ def collect(self):
+ if not safe_getattr(self.obj, "__test__", True):
+ return []
+ if hasinit(self.obj):
+ self.warn(
+ "C1",
+ "cannot collect test class %r because it has a "
+ "__init__ constructor" % self.obj.__name__,
+ )
+ return []
+ elif hasnew(self.obj):
+ self.warn(
+ "C1",
+ "cannot collect test class %r because it has a "
+ "__new__ constructor" % self.obj.__name__,
+ )
+ return []
+ return [self._getcustomclass("Instance")(name="()", parent=self)]
+
+ def setup(self):
+ setup_class = _get_xunit_func(self.obj, "setup_class")
+ if setup_class is not None:
+ setup_class = getattr(setup_class, "im_func", setup_class)
+ setup_class = getattr(setup_class, "__func__", setup_class)
+ setup_class(self.obj)
+
+ fin_class = getattr(self.obj, "teardown_class", None)
+ if fin_class is not None:
+ fin_class = getattr(fin_class, "im_func", fin_class)
+ fin_class = getattr(fin_class, "__func__", fin_class)
+ self.addfinalizer(lambda: fin_class(self.obj))
+
+
+class Instance(PyCollector):
+ _ALLOW_MARKERS = False # hack, destroy later
+ # instances share the object with their parents in a way
+ # that duplicates markers instances if not taken out
+ # can be removed at node strucutre reorganization time
+
+ def _getobj(self):
+ return self.parent.obj()
+
+ def collect(self):
+ self.session._fixturemanager.parsefactories(self)
+ return super(Instance, self).collect()
+
+ def newinstance(self):
+ self.obj = self._getobj()
+ return self.obj
+
+
+class FunctionMixin(PyobjMixin):
+ """ mixin for the code common to Function and Generator.
+ """
+
+ def setup(self):
+ """ perform setup for this test function. """
+ if hasattr(self, "_preservedparent"):
+ obj = self._preservedparent
+ elif isinstance(self.parent, Instance):
+ obj = self.parent.newinstance()
+ self.obj = self._getobj()
+ else:
+ obj = self.parent.obj
+ if inspect.ismethod(self.obj):
+ setup_name = "setup_method"
+ teardown_name = "teardown_method"
+ else:
+ setup_name = "setup_function"
+ teardown_name = "teardown_function"
+ setup_func_or_method = _get_xunit_setup_teardown(
+ obj, setup_name, param_obj=self.obj
+ )
+ if setup_func_or_method is not None:
+ setup_func_or_method()
+ teardown_func_or_method = _get_xunit_setup_teardown(
+ obj, teardown_name, param_obj=self.obj
+ )
+ if teardown_func_or_method is not None:
+ self.addfinalizer(teardown_func_or_method)
+
+ def _prunetraceback(self, excinfo):
+ if hasattr(self, "_obj") and not self.config.option.fulltrace:
+ code = _pytest._code.Code(get_real_func(self.obj))
+ path, firstlineno = code.path, code.firstlineno
+ traceback = excinfo.traceback
+ ntraceback = traceback.cut(path=path, firstlineno=firstlineno)
+ if ntraceback == traceback:
+ ntraceback = ntraceback.cut(path=path)
+ if ntraceback == traceback:
+ ntraceback = ntraceback.filter(filter_traceback)
+ if not ntraceback:
+ ntraceback = traceback
+
+ excinfo.traceback = ntraceback.filter()
+ # issue364: mark all but first and last frames to
+ # only show a single-line message for each frame
+ if self.config.option.tbstyle == "auto":
+ if len(excinfo.traceback) > 2:
+ for entry in excinfo.traceback[1:-1]:
+ entry.set_repr_style("short")
+
+ def _repr_failure_py(self, excinfo, style="long"):
+ if excinfo.errisinstance(fail.Exception):
+ if not excinfo.value.pytrace:
+ return py._builtin._totext(excinfo.value)
+ return super(FunctionMixin, self)._repr_failure_py(excinfo, style=style)
+
+ def repr_failure(self, excinfo, outerr=None):
+ assert outerr is None, "XXX outerr usage is deprecated"
+ style = self.config.option.tbstyle
+ if style == "auto":
+ style = "long"
+ return self._repr_failure_py(excinfo, style=style)
+
+
+class Generator(FunctionMixin, PyCollector):
+
+ def collect(self):
+ # test generators are seen as collectors but they also
+ # invoke setup/teardown on popular request
+ # (induced by the common "test_*" naming shared with normal tests)
+ from _pytest import deprecated
+
+ self.session._setupstate.prepare(self)
+ # see FunctionMixin.setup and test_setupstate_is_preserved_134
+ self._preservedparent = self.parent.obj
+ values = []
+ seen = {}
+ for i, x in enumerate(self.obj()):
+ name, call, args = self.getcallargs(x)
+ if not callable(call):
+ raise TypeError("%r yielded non callable test %r" % (self.obj, call))
+ if name is None:
+ name = "[%d]" % i
+ else:
+ name = "['%s']" % name
+ if name in seen:
+ raise ValueError(
+ "%r generated tests with non-unique name %r" % (self, name)
+ )
+ seen[name] = True
+ values.append(self.Function(name, self, args=args, callobj=call))
+ self.warn("C1", deprecated.YIELD_TESTS)
+ return values
+
+ def getcallargs(self, obj):
+ if not isinstance(obj, (tuple, list)):
+ obj = (obj,)
+ # explicit naming
+ if isinstance(obj[0], six.string_types):
+ name = obj[0]
+ obj = obj[1:]
+ else:
+ name = None
+ call, args = obj[0], obj[1:]
+ return name, call, args
+
+
+def hasinit(obj):
+ init = getattr(obj, "__init__", None)
+ if init:
+ return init != object.__init__
+
+
+def hasnew(obj):
+ new = getattr(obj, "__new__", None)
+ if new:
+ return new != object.__new__
+
+
+class CallSpec2(object):
+
+ def __init__(self, metafunc):
+ self.metafunc = metafunc
+ self.funcargs = {}
+ self._idlist = []
+ self.params = {}
+ self._globalid = NOTSET
+ self._globalid_args = set()
+ self._globalparam = NOTSET
+ self._arg2scopenum = {} # used for sorting parametrized resources
+ self.marks = []
+ self.indices = {}
+
+ def copy(self, metafunc):
+ cs = CallSpec2(self.metafunc)
+ cs.funcargs.update(self.funcargs)
+ cs.params.update(self.params)
+ cs.marks.extend(self.marks)
+ cs.indices.update(self.indices)
+ cs._arg2scopenum.update(self._arg2scopenum)
+ cs._idlist = list(self._idlist)
+ cs._globalid = self._globalid
+ cs._globalid_args = self._globalid_args
+ cs._globalparam = self._globalparam
+ return cs
+
+ def _checkargnotcontained(self, arg):
+ if arg in self.params or arg in self.funcargs:
+ raise ValueError("duplicate %r" % (arg,))
+
+ def getparam(self, name):
+ try:
+ return self.params[name]
+ except KeyError:
+ if self._globalparam is NOTSET:
+ raise ValueError(name)
+ return self._globalparam
+
+ @property
+ def id(self):
+ return "-".join(map(str, filter(None, self._idlist)))
+
+ def setmulti2(self, valtypes, argnames, valset, id, marks, scopenum, param_index):
+ for arg, val in zip(argnames, valset):
+ self._checkargnotcontained(arg)
+ valtype_for_arg = valtypes[arg]
+ getattr(self, valtype_for_arg)[arg] = val
+ self.indices[arg] = param_index
+ self._arg2scopenum[arg] = scopenum
+ self._idlist.append(id)
+ self.marks.extend(marks)
+
+ def setall(self, funcargs, id, param):
+ for x in funcargs:
+ self._checkargnotcontained(x)
+ self.funcargs.update(funcargs)
+ if id is not NOTSET:
+ self._idlist.append(id)
+ if param is not NOTSET:
+ assert self._globalparam is NOTSET
+ self._globalparam = param
+ for arg in funcargs:
+ self._arg2scopenum[arg] = fixtures.scopenum_function
+
+
+class Metafunc(fixtures.FuncargnamesCompatAttr):
+ """
+ Metafunc objects are passed to the :func:`pytest_generate_tests <_pytest.hookspec.pytest_generate_tests>` hook.
+ They help to inspect a test function and to generate tests according to
+ test configuration or values specified in the class or module where a
+ test function is defined.
+ """
+
+ def __init__(self, definition, fixtureinfo, config, cls=None, module=None):
+ #: access to the :class:`_pytest.config.Config` object for the test session
+ assert (
+ isinstance(definition, FunctionDefinition)
+ or type(definition).__name__ == "DefinitionMock"
+ )
+ self.definition = definition
+ self.config = config
+
+ #: the module object where the test function is defined in.
+ self.module = module
+
+ #: underlying python test function
+ self.function = definition.obj
+
+ #: set of fixture names required by the test function
+ self.fixturenames = fixtureinfo.names_closure
+
+ #: class object where the test function is defined in or ``None``.
+ self.cls = cls
+
+ self._calls = []
+ self._ids = set()
+ self._arg2fixturedefs = fixtureinfo.name2fixturedefs
+
+ def parametrize(self, argnames, argvalues, indirect=False, ids=None, scope=None):
+ """ Add new invocations to the underlying test function using the list
+ of argvalues for the given argnames. Parametrization is performed
+ during the collection phase. If you need to setup expensive resources
+ see about setting indirect to do it rather at test setup time.
+
+ :arg argnames: a comma-separated string denoting one or more argument
+ names, or a list/tuple of argument strings.
+
+ :arg argvalues: The list of argvalues determines how often a
+ test is invoked with different argument values. If only one
+ argname was specified argvalues is a list of values. If N
+ argnames were specified, argvalues must be a list of N-tuples,
+ where each tuple-element specifies a value for its respective
+ argname.
+
+ :arg indirect: The list of argnames or boolean. A list of arguments'
+ names (subset of argnames). If True the list contains all names from
+ the argnames. Each argvalue corresponding to an argname in this list will
+ be passed as request.param to its respective argname fixture
+ function so that it can perform more expensive setups during the
+ setup phase of a test rather than at collection time.
+
+ :arg ids: list of string ids, or a callable.
+ If strings, each is corresponding to the argvalues so that they are
+ part of the test id. If None is given as id of specific test, the
+ automatically generated id for that argument will be used.
+ If callable, it should take one argument (a single argvalue) and return
+ a string or return None. If None, the automatically generated id for that
+ argument will be used.
+ If no ids are provided they will be generated automatically from
+ the argvalues.
+
+ :arg scope: if specified it denotes the scope of the parameters.
+ The scope is used for grouping tests by parameter instances.
+ It will also override any fixture-function defined scope, allowing
+ to set a dynamic scope using test context or configuration.
+ """
+ from _pytest.fixtures import scope2index
+ from _pytest.mark import ParameterSet
+ from py.io import saferepr
+
+ argnames, parameters = ParameterSet._for_parametrize(
+ argnames, argvalues, self.function, self.config
+ )
+ del argvalues
+ default_arg_names = set(get_default_arg_names(self.function))
+
+ if scope is None:
+ scope = _find_parametrized_scope(argnames, self._arg2fixturedefs, indirect)
+
+ scopenum = scope2index(scope, descr="call to {}".format(self.parametrize))
+ valtypes = {}
+ for arg in argnames:
+ if arg not in self.fixturenames:
+ if arg in default_arg_names:
+ raise ValueError(
+ "%r already takes an argument %r with a default value"
+ % (self.function, arg)
+ )
+ else:
+ if isinstance(indirect, (tuple, list)):
+ name = "fixture" if arg in indirect else "argument"
+ else:
+ name = "fixture" if indirect else "argument"
+ raise ValueError("%r uses no %s %r" % (self.function, name, arg))
+
+ if indirect is True:
+ valtypes = dict.fromkeys(argnames, "params")
+ elif indirect is False:
+ valtypes = dict.fromkeys(argnames, "funcargs")
+ elif isinstance(indirect, (tuple, list)):
+ valtypes = dict.fromkeys(argnames, "funcargs")
+ for arg in indirect:
+ if arg not in argnames:
+ raise ValueError(
+ "indirect given to %r: fixture %r doesn't exist"
+ % (self.function, arg)
+ )
+ valtypes[arg] = "params"
+ idfn = None
+ if callable(ids):
+ idfn = ids
+ ids = None
+ if ids:
+ if len(ids) != len(parameters):
+ raise ValueError(
+ "%d tests specified with %d ids" % (len(parameters), len(ids))
+ )
+ for id_value in ids:
+ if id_value is not None and not isinstance(id_value, six.string_types):
+ msg = "ids must be list of strings, found: %s (type: %s)"
+ raise ValueError(
+ msg % (saferepr(id_value), type(id_value).__name__)
+ )
+ ids = idmaker(argnames, parameters, idfn, ids, self.config)
+ newcalls = []
+ for callspec in self._calls or [CallSpec2(self)]:
+ elements = zip(ids, parameters, count())
+ for a_id, param, param_index in elements:
+ if len(param.values) != len(argnames):
+ raise ValueError(
+ 'In "parametrize" the number of values ({}) must be '
+ "equal to the number of names ({})".format(
+ param.values, argnames
+ )
+ )
+ newcallspec = callspec.copy(self)
+ newcallspec.setmulti2(
+ valtypes,
+ argnames,
+ param.values,
+ a_id,
+ param.marks,
+ scopenum,
+ param_index,
+ )
+ newcalls.append(newcallspec)
+ self._calls = newcalls
+
+ def addcall(self, funcargs=None, id=NOTSET, param=NOTSET):
+ """ Add a new call to the underlying test function during the collection phase of a test run.
+
+ .. deprecated:: 3.3
+
+ Use :meth:`parametrize` instead.
+
+ Note that request.addcall() is called during the test collection phase prior and
+ independently to actual test execution. You should only use addcall()
+ if you need to specify multiple arguments of a test function.
+
+ :arg funcargs: argument keyword dictionary used when invoking
+ the test function.
+
+ :arg id: used for reporting and identification purposes. If you
+ don't supply an `id` an automatic unique id will be generated.
+
+ :arg param: a parameter which will be exposed to a later fixture function
+ invocation through the ``request.param`` attribute.
+ """
+ if self.config:
+ self.config.warn(
+ "C1", message=deprecated.METAFUNC_ADD_CALL, fslocation=None
+ )
+ assert funcargs is None or isinstance(funcargs, dict)
+ if funcargs is not None:
+ for name in funcargs:
+ if name not in self.fixturenames:
+ fail("funcarg %r not used in this function." % name)
+ else:
+ funcargs = {}
+ if id is None:
+ raise ValueError("id=None not allowed")
+ if id is NOTSET:
+ id = len(self._calls)
+ id = str(id)
+ if id in self._ids:
+ raise ValueError("duplicate id %r" % id)
+ self._ids.add(id)
+
+ cs = CallSpec2(self)
+ cs.setall(funcargs, id, param)
+ self._calls.append(cs)
+
+
+def _find_parametrized_scope(argnames, arg2fixturedefs, indirect):
+ """Find the most appropriate scope for a parametrized call based on its arguments.
+
+ When there's at least one direct argument, always use "function" scope.
+
+ When a test function is parametrized and all its arguments are indirect
+ (e.g. fixtures), return the most narrow scope based on the fixtures used.
+
+ Related to issue #1832, based on code posted by @Kingdread.
+ """
+ from _pytest.fixtures import scopes
+
+ indirect_as_list = isinstance(indirect, (list, tuple))
+ all_arguments_are_fixtures = indirect is True or indirect_as_list and len(
+ indirect
+ ) == argnames
+ if all_arguments_are_fixtures:
+ fixturedefs = arg2fixturedefs or {}
+ used_scopes = [fixturedef[0].scope for name, fixturedef in fixturedefs.items()]
+ if used_scopes:
+ # Takes the most narrow scope from used fixtures
+ for scope in reversed(scopes):
+ if scope in used_scopes:
+ return scope
+
+ return "function"
+
+
+def _idval(val, argname, idx, idfn, config=None):
+ if idfn:
+ s = None
+ try:
+ s = idfn(val)
+ except Exception:
+ # See issue https://github.com/pytest-dev/pytest/issues/2169
+ import warnings
+
+ msg = "Raised while trying to determine id of parameter %s at position %d." % (
+ argname, idx
+ )
+ msg += "\nUpdate your code as this will raise an error in pytest-4.0."
+ warnings.warn(msg, DeprecationWarning)
+ if s:
+ return ascii_escaped(s)
+
+ if config:
+ hook_id = config.hook.pytest_make_parametrize_id(
+ config=config, val=val, argname=argname
+ )
+ if hook_id:
+ return hook_id
+
+ if isinstance(val, STRING_TYPES):
+ return ascii_escaped(val)
+ elif isinstance(val, (float, int, bool, NoneType)):
+ return str(val)
+ elif isinstance(val, REGEX_TYPE):
+ return ascii_escaped(val.pattern)
+ elif enum is not None and isinstance(val, enum.Enum):
+ return str(val)
+ elif (isclass(val) or isfunction(val)) and hasattr(val, "__name__"):
+ return val.__name__
+ return str(argname) + str(idx)
+
+
+def _idvalset(idx, parameterset, argnames, idfn, ids, config=None):
+ if parameterset.id is not None:
+ return parameterset.id
+ if ids is None or (idx >= len(ids) or ids[idx] is None):
+ this_id = [
+ _idval(val, argname, idx, idfn, config)
+ for val, argname in zip(parameterset.values, argnames)
+ ]
+ return "-".join(this_id)
+ else:
+ return ascii_escaped(ids[idx])
+
+
+def idmaker(argnames, parametersets, idfn=None, ids=None, config=None):
+ ids = [
+ _idvalset(valindex, parameterset, argnames, idfn, ids, config)
+ for valindex, parameterset in enumerate(parametersets)
+ ]
+ if len(set(ids)) != len(ids):
+ # The ids are not unique
+ duplicates = [testid for testid in ids if ids.count(testid) > 1]
+ counters = collections.defaultdict(lambda: 0)
+ for index, testid in enumerate(ids):
+ if testid in duplicates:
+ ids[index] = testid + str(counters[testid])
+ counters[testid] += 1
+ return ids
+
+
+def show_fixtures_per_test(config):
+ from _pytest.main import wrap_session
+
+ return wrap_session(config, _show_fixtures_per_test)
+
+
+def _show_fixtures_per_test(config, session):
+ import _pytest.config
+
+ session.perform_collect()
+ curdir = py.path.local()
+ tw = _pytest.config.create_terminal_writer(config)
+ verbose = config.getvalue("verbose")
+
+ def get_best_relpath(func):
+ loc = getlocation(func, curdir)
+ return curdir.bestrelpath(loc)
+
+ def write_fixture(fixture_def):
+ argname = fixture_def.argname
+ if verbose <= 0 and argname.startswith("_"):
+ return
+ if verbose > 0:
+ bestrel = get_best_relpath(fixture_def.func)
+ funcargspec = "{} -- {}".format(argname, bestrel)
+ else:
+ funcargspec = argname
+ tw.line(funcargspec, green=True)
+ fixture_doc = fixture_def.func.__doc__
+ if fixture_doc:
+ write_docstring(tw, fixture_doc)
+ else:
+ tw.line(" no docstring available", red=True)
+
+ def write_item(item):
+ try:
+ info = item._fixtureinfo
+ except AttributeError:
+ # doctests items have no _fixtureinfo attribute
+ return
+ if not info.name2fixturedefs:
+ # this test item does not use any fixtures
+ return
+ tw.line()
+ tw.sep("-", "fixtures used by {}".format(item.name))
+ tw.sep("-", "({})".format(get_best_relpath(item.function)))
+ # dict key not used in loop but needed for sorting
+ for _, fixturedefs in sorted(info.name2fixturedefs.items()):
+ assert fixturedefs is not None
+ if not fixturedefs:
+ continue
+ # last item is expected to be the one used by the test item
+ write_fixture(fixturedefs[-1])
+
+ for session_item in session.items:
+ write_item(session_item)
+
+
+def showfixtures(config):
+ from _pytest.main import wrap_session
+
+ return wrap_session(config, _showfixtures_main)
+
+
+def _showfixtures_main(config, session):
+ import _pytest.config
+
+ session.perform_collect()
+ curdir = py.path.local()
+ tw = _pytest.config.create_terminal_writer(config)
+ verbose = config.getvalue("verbose")
+
+ fm = session._fixturemanager
+
+ available = []
+ seen = set()
+
+ for argname, fixturedefs in fm._arg2fixturedefs.items():
+ assert fixturedefs is not None
+ if not fixturedefs:
+ continue
+ for fixturedef in fixturedefs:
+ loc = getlocation(fixturedef.func, curdir)
+ if (fixturedef.argname, loc) in seen:
+ continue
+ seen.add((fixturedef.argname, loc))
+ available.append(
+ (
+ len(fixturedef.baseid),
+ fixturedef.func.__module__,
+ curdir.bestrelpath(loc),
+ fixturedef.argname,
+ fixturedef,
+ )
+ )
+
+ available.sort()
+ currentmodule = None
+ for baseid, module, bestrel, argname, fixturedef in available:
+ if currentmodule != module:
+ if not module.startswith("_pytest."):
+ tw.line()
+ tw.sep("-", "fixtures defined from %s" % (module,))
+ currentmodule = module
+ if verbose <= 0 and argname[0] == "_":
+ continue
+ if verbose > 0:
+ funcargspec = "%s -- %s" % (argname, bestrel)
+ else:
+ funcargspec = argname
+ tw.line(funcargspec, green=True)
+ loc = getlocation(fixturedef.func, curdir)
+ doc = fixturedef.func.__doc__ or ""
+ if doc:
+ write_docstring(tw, doc)
+ else:
+ tw.line(" %s: no docstring available" % (loc,), red=True)
+
+
+def write_docstring(tw, doc):
+ INDENT = " "
+ doc = doc.rstrip()
+ if "\n" in doc:
+ firstline, rest = doc.split("\n", 1)
+ else:
+ firstline, rest = doc, ""
+
+ if firstline.strip():
+ tw.line(INDENT + firstline.strip())
+
+ if rest:
+ for line in dedent(rest).split("\n"):
+ tw.write(INDENT + line + "\n")
+
+
+class Function(FunctionMixin, nodes.Item, fixtures.FuncargnamesCompatAttr):
+ """ a Function Item is responsible for setting up and executing a
+ Python test function.
+ """
+ _genid = None
+ # disable since functions handle it themselfes
+ _ALLOW_MARKERS = False
+
+ def __init__(
+ self,
+ name,
+ parent,
+ args=None,
+ config=None,
+ callspec=None,
+ callobj=NOTSET,
+ keywords=None,
+ session=None,
+ fixtureinfo=None,
+ originalname=None,
+ ):
+ super(Function, self).__init__(name, parent, config=config, session=session)
+ self._args = args
+ if callobj is not NOTSET:
+ self.obj = callobj
+
+ self.keywords.update(self.obj.__dict__)
+ self.own_markers.extend(get_unpacked_marks(self.obj))
+ if callspec:
+ self.callspec = callspec
+ # this is total hostile and a mess
+ # keywords are broken by design by now
+ # this will be redeemed later
+ for mark in callspec.marks:
+ # feel free to cry, this was broken for years before
+ # and keywords cant fix it per design
+ self.keywords[mark.name] = mark
+ self.own_markers.extend(callspec.marks)
+ if keywords:
+ self.keywords.update(keywords)
+
+ if fixtureinfo is None:
+ fixtureinfo = self.session._fixturemanager.getfixtureinfo(
+ self, self.obj, self.cls, funcargs=not self._isyieldedfunction()
+ )
+ self._fixtureinfo = fixtureinfo
+ self.fixturenames = fixtureinfo.names_closure
+ self._initrequest()
+
+ #: original function name, without any decorations (for example
+ #: parametrization adds a ``"[...]"`` suffix to function names).
+ #:
+ #: .. versionadded:: 3.0
+ self.originalname = originalname
+
+ def _initrequest(self):
+ self.funcargs = {}
+ if self._isyieldedfunction():
+ assert not hasattr(
+ self, "callspec"
+ ), "yielded functions (deprecated) cannot have funcargs"
+ else:
+ if hasattr(self, "callspec"):
+ callspec = self.callspec
+ assert not callspec.funcargs
+ self._genid = callspec.id
+ if hasattr(callspec, "param"):
+ self.param = callspec.param
+ self._request = fixtures.FixtureRequest(self)
+
+ @property
+ def function(self):
+ "underlying python 'function' object"
+ return getattr(self.obj, "im_func", self.obj)
+
+ def _getobj(self):
+ name = self.name
+ i = name.find("[") # parametrization
+ if i != -1:
+ name = name[:i]
+ return getattr(self.parent.obj, name)
+
+ @property
+ def _pyfuncitem(self):
+ "(compatonly) for code expecting pytest-2.2 style request objects"
+ return self
+
+ def _isyieldedfunction(self):
+ return getattr(self, "_args", None) is not None
+
+ def runtest(self):
+ """ execute the underlying test function. """
+ self.ihook.pytest_pyfunc_call(pyfuncitem=self)
+
+ def setup(self):
+ super(Function, self).setup()
+ fixtures.fillfixtures(self)
+
+
+class FunctionDefinition(Function):
+ """
+ internal hack until we get actual definition nodes instead of the
+ crappy metafunc hack
+ """
+
+ def runtest(self):
+ raise RuntimeError("function definitions are not supposed to be used")
+
+ setup = runtest
diff --git a/third_party/python/pytest/src/_pytest/python_api.py b/third_party/python/pytest/src/_pytest/python_api.py
new file mode 100644
index 0000000000..9257d210c2
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/python_api.py
@@ -0,0 +1,674 @@
+import math
+import sys
+
+import py
+from six.moves import zip, filterfalse
+from more_itertools.more import always_iterable
+
+from _pytest.compat import isclass
+
+from _pytest.compat import Mapping, Sequence
+from _pytest.compat import STRING_TYPES
+
+from _pytest.outcomes import fail
+import _pytest._code
+
+BASE_TYPE = (type, STRING_TYPES)
+
+
+def _cmp_raises_type_error(self, other):
+ """__cmp__ implementation which raises TypeError. Used
+ by Approx base classes to implement only == and != and raise a
+ TypeError for other comparisons.
+
+ Needed in Python 2 only, Python 3 all it takes is not implementing the
+ other operators at all.
+ """
+ __tracebackhide__ = True
+ raise TypeError(
+ "Comparison operators other than == and != not supported by approx objects"
+ )
+
+
+# builtin pytest.approx helper
+
+
+class ApproxBase(object):
+ """
+ Provide shared utilities for making approximate comparisons between numbers
+ or sequences of numbers.
+ """
+
+ # Tell numpy to use our `__eq__` operator instead of its
+ __array_ufunc__ = None
+ __array_priority__ = 100
+
+ def __init__(self, expected, rel=None, abs=None, nan_ok=False):
+ self.expected = expected
+ self.abs = abs
+ self.rel = rel
+ self.nan_ok = nan_ok
+
+ def __repr__(self):
+ raise NotImplementedError
+
+ def __eq__(self, actual):
+ return all(
+ a == self._approx_scalar(x) for a, x in self._yield_comparisons(actual)
+ )
+
+ __hash__ = None
+
+ def __ne__(self, actual):
+ return not (actual == self)
+
+ if sys.version_info[0] == 2:
+ __cmp__ = _cmp_raises_type_error
+
+ def _approx_scalar(self, x):
+ return ApproxScalar(x, rel=self.rel, abs=self.abs, nan_ok=self.nan_ok)
+
+ def _yield_comparisons(self, actual):
+ """
+ Yield all the pairs of numbers to be compared. This is used to
+ implement the `__eq__` method.
+ """
+ raise NotImplementedError
+
+
+class ApproxNumpy(ApproxBase):
+ """
+ Perform approximate comparisons for numpy arrays.
+ """
+
+ def __repr__(self):
+ # It might be nice to rewrite this function to account for the
+ # shape of the array...
+ import numpy as np
+
+ return "approx({!r})".format(
+ list(self._approx_scalar(x) for x in np.asarray(self.expected))
+ )
+
+ if sys.version_info[0] == 2:
+ __cmp__ = _cmp_raises_type_error
+
+ def __eq__(self, actual):
+ import numpy as np
+
+ # self.expected is supposed to always be an array here
+
+ if not np.isscalar(actual):
+ try:
+ actual = np.asarray(actual)
+ except: # noqa
+ raise TypeError("cannot compare '{}' to numpy.ndarray".format(actual))
+
+ if not np.isscalar(actual) and actual.shape != self.expected.shape:
+ return False
+
+ return ApproxBase.__eq__(self, actual)
+
+ def _yield_comparisons(self, actual):
+ import numpy as np
+
+ # `actual` can either be a numpy array or a scalar, it is treated in
+ # `__eq__` before being passed to `ApproxBase.__eq__`, which is the
+ # only method that calls this one.
+
+ if np.isscalar(actual):
+ for i in np.ndindex(self.expected.shape):
+ yield actual, np.asscalar(self.expected[i])
+ else:
+ for i in np.ndindex(self.expected.shape):
+ yield np.asscalar(actual[i]), np.asscalar(self.expected[i])
+
+
+class ApproxMapping(ApproxBase):
+ """
+ Perform approximate comparisons for mappings where the values are numbers
+ (the keys can be anything).
+ """
+
+ def __repr__(self):
+ return "approx({!r})".format(
+ {k: self._approx_scalar(v) for k, v in self.expected.items()}
+ )
+
+ def __eq__(self, actual):
+ if set(actual.keys()) != set(self.expected.keys()):
+ return False
+
+ return ApproxBase.__eq__(self, actual)
+
+ def _yield_comparisons(self, actual):
+ for k in self.expected.keys():
+ yield actual[k], self.expected[k]
+
+
+class ApproxSequence(ApproxBase):
+ """
+ Perform approximate comparisons for sequences of numbers.
+ """
+
+ def __repr__(self):
+ seq_type = type(self.expected)
+ if seq_type not in (tuple, list, set):
+ seq_type = list
+ return "approx({!r})".format(
+ seq_type(self._approx_scalar(x) for x in self.expected)
+ )
+
+ def __eq__(self, actual):
+ if len(actual) != len(self.expected):
+ return False
+ return ApproxBase.__eq__(self, actual)
+
+ def _yield_comparisons(self, actual):
+ return zip(actual, self.expected)
+
+
+class ApproxScalar(ApproxBase):
+ """
+ Perform approximate comparisons for single numbers only.
+ """
+ DEFAULT_ABSOLUTE_TOLERANCE = 1e-12
+ DEFAULT_RELATIVE_TOLERANCE = 1e-6
+
+ def __repr__(self):
+ """
+ Return a string communicating both the expected value and the tolerance
+ for the comparison being made, e.g. '1.0 +- 1e-6'. Use the unicode
+ plus/minus symbol if this is python3 (it's too hard to get right for
+ python2).
+ """
+ if isinstance(self.expected, complex):
+ return str(self.expected)
+
+ # Infinities aren't compared using tolerances, so don't show a
+ # tolerance.
+ if math.isinf(self.expected):
+ return str(self.expected)
+
+ # If a sensible tolerance can't be calculated, self.tolerance will
+ # raise a ValueError. In this case, display '???'.
+ try:
+ vetted_tolerance = "{:.1e}".format(self.tolerance)
+ except ValueError:
+ vetted_tolerance = "???"
+
+ if sys.version_info[0] == 2:
+ return "{} +- {}".format(self.expected, vetted_tolerance)
+ else:
+ return u"{} \u00b1 {}".format(self.expected, vetted_tolerance)
+
+ def __eq__(self, actual):
+ """
+ Return true if the given value is equal to the expected value within
+ the pre-specified tolerance.
+ """
+ if _is_numpy_array(actual):
+ return ApproxNumpy(actual, self.abs, self.rel, self.nan_ok) == self.expected
+
+ # Short-circuit exact equality.
+ if actual == self.expected:
+ return True
+
+ # Allow the user to control whether NaNs are considered equal to each
+ # other or not. The abs() calls are for compatibility with complex
+ # numbers.
+ if math.isnan(abs(self.expected)):
+ return self.nan_ok and math.isnan(abs(actual))
+
+ # Infinity shouldn't be approximately equal to anything but itself, but
+ # if there's a relative tolerance, it will be infinite and infinity
+ # will seem approximately equal to everything. The equal-to-itself
+ # case would have been short circuited above, so here we can just
+ # return false if the expected value is infinite. The abs() call is
+ # for compatibility with complex numbers.
+ if math.isinf(abs(self.expected)):
+ return False
+
+ # Return true if the two numbers are within the tolerance.
+ return abs(self.expected - actual) <= self.tolerance
+
+ __hash__ = None
+
+ @property
+ def tolerance(self):
+ """
+ Return the tolerance for the comparison. This could be either an
+ absolute tolerance or a relative tolerance, depending on what the user
+ specified or which would be larger.
+ """
+
+ def set_default(x, default):
+ return x if x is not None else default
+
+ # Figure out what the absolute tolerance should be. ``self.abs`` is
+ # either None or a value specified by the user.
+ absolute_tolerance = set_default(self.abs, self.DEFAULT_ABSOLUTE_TOLERANCE)
+
+ if absolute_tolerance < 0:
+ raise ValueError(
+ "absolute tolerance can't be negative: {}".format(absolute_tolerance)
+ )
+ if math.isnan(absolute_tolerance):
+ raise ValueError("absolute tolerance can't be NaN.")
+
+ # If the user specified an absolute tolerance but not a relative one,
+ # just return the absolute tolerance.
+ if self.rel is None:
+ if self.abs is not None:
+ return absolute_tolerance
+
+ # Figure out what the relative tolerance should be. ``self.rel`` is
+ # either None or a value specified by the user. This is done after
+ # we've made sure the user didn't ask for an absolute tolerance only,
+ # because we don't want to raise errors about the relative tolerance if
+ # we aren't even going to use it.
+ relative_tolerance = set_default(
+ self.rel, self.DEFAULT_RELATIVE_TOLERANCE
+ ) * abs(
+ self.expected
+ )
+
+ if relative_tolerance < 0:
+ raise ValueError(
+ "relative tolerance can't be negative: {}".format(absolute_tolerance)
+ )
+ if math.isnan(relative_tolerance):
+ raise ValueError("relative tolerance can't be NaN.")
+
+ # Return the larger of the relative and absolute tolerances.
+ return max(relative_tolerance, absolute_tolerance)
+
+
+class ApproxDecimal(ApproxScalar):
+ from decimal import Decimal
+
+ DEFAULT_ABSOLUTE_TOLERANCE = Decimal("1e-12")
+ DEFAULT_RELATIVE_TOLERANCE = Decimal("1e-6")
+
+
+def approx(expected, rel=None, abs=None, nan_ok=False):
+ """
+ Assert that two numbers (or two sets of numbers) are equal to each other
+ within some tolerance.
+
+ Due to the `intricacies of floating-point arithmetic`__, numbers that we
+ would intuitively expect to be equal are not always so::
+
+ >>> 0.1 + 0.2 == 0.3
+ False
+
+ __ https://docs.python.org/3/tutorial/floatingpoint.html
+
+ This problem is commonly encountered when writing tests, e.g. when making
+ sure that floating-point values are what you expect them to be. One way to
+ deal with this problem is to assert that two floating-point numbers are
+ equal to within some appropriate tolerance::
+
+ >>> abs((0.1 + 0.2) - 0.3) < 1e-6
+ True
+
+ However, comparisons like this are tedious to write and difficult to
+ understand. Furthermore, absolute comparisons like the one above are
+ usually discouraged because there's no tolerance that works well for all
+ situations. ``1e-6`` is good for numbers around ``1``, but too small for
+ very big numbers and too big for very small ones. It's better to express
+ the tolerance as a fraction of the expected value, but relative comparisons
+ like that are even more difficult to write correctly and concisely.
+
+ The ``approx`` class performs floating-point comparisons using a syntax
+ that's as intuitive as possible::
+
+ >>> from pytest import approx
+ >>> 0.1 + 0.2 == approx(0.3)
+ True
+
+ The same syntax also works for sequences of numbers::
+
+ >>> (0.1 + 0.2, 0.2 + 0.4) == approx((0.3, 0.6))
+ True
+
+ Dictionary *values*::
+
+ >>> {'a': 0.1 + 0.2, 'b': 0.2 + 0.4} == approx({'a': 0.3, 'b': 0.6})
+ True
+
+ ``numpy`` arrays::
+
+ >>> import numpy as np # doctest: +SKIP
+ >>> np.array([0.1, 0.2]) + np.array([0.2, 0.4]) == approx(np.array([0.3, 0.6])) # doctest: +SKIP
+ True
+
+ And for a ``numpy`` array against a scalar::
+
+ >>> import numpy as np # doctest: +SKIP
+ >>> np.array([0.1, 0.2]) + np.array([0.2, 0.1]) == approx(0.3) # doctest: +SKIP
+ True
+
+ By default, ``approx`` considers numbers within a relative tolerance of
+ ``1e-6`` (i.e. one part in a million) of its expected value to be equal.
+ This treatment would lead to surprising results if the expected value was
+ ``0.0``, because nothing but ``0.0`` itself is relatively close to ``0.0``.
+ To handle this case less surprisingly, ``approx`` also considers numbers
+ within an absolute tolerance of ``1e-12`` of its expected value to be
+ equal. Infinity and NaN are special cases. Infinity is only considered
+ equal to itself, regardless of the relative tolerance. NaN is not
+ considered equal to anything by default, but you can make it be equal to
+ itself by setting the ``nan_ok`` argument to True. (This is meant to
+ facilitate comparing arrays that use NaN to mean "no data".)
+
+ Both the relative and absolute tolerances can be changed by passing
+ arguments to the ``approx`` constructor::
+
+ >>> 1.0001 == approx(1)
+ False
+ >>> 1.0001 == approx(1, rel=1e-3)
+ True
+ >>> 1.0001 == approx(1, abs=1e-3)
+ True
+
+ If you specify ``abs`` but not ``rel``, the comparison will not consider
+ the relative tolerance at all. In other words, two numbers that are within
+ the default relative tolerance of ``1e-6`` will still be considered unequal
+ if they exceed the specified absolute tolerance. If you specify both
+ ``abs`` and ``rel``, the numbers will be considered equal if either
+ tolerance is met::
+
+ >>> 1 + 1e-8 == approx(1)
+ True
+ >>> 1 + 1e-8 == approx(1, abs=1e-12)
+ False
+ >>> 1 + 1e-8 == approx(1, rel=1e-6, abs=1e-12)
+ True
+
+ If you're thinking about using ``approx``, then you might want to know how
+ it compares to other good ways of comparing floating-point numbers. All of
+ these algorithms are based on relative and absolute tolerances and should
+ agree for the most part, but they do have meaningful differences:
+
+ - ``math.isclose(a, b, rel_tol=1e-9, abs_tol=0.0)``: True if the relative
+ tolerance is met w.r.t. either ``a`` or ``b`` or if the absolute
+ tolerance is met. Because the relative tolerance is calculated w.r.t.
+ both ``a`` and ``b``, this test is symmetric (i.e. neither ``a`` nor
+ ``b`` is a "reference value"). You have to specify an absolute tolerance
+ if you want to compare to ``0.0`` because there is no tolerance by
+ default. Only available in python>=3.5. `More information...`__
+
+ __ https://docs.python.org/3/library/math.html#math.isclose
+
+ - ``numpy.isclose(a, b, rtol=1e-5, atol=1e-8)``: True if the difference
+ between ``a`` and ``b`` is less that the sum of the relative tolerance
+ w.r.t. ``b`` and the absolute tolerance. Because the relative tolerance
+ is only calculated w.r.t. ``b``, this test is asymmetric and you can
+ think of ``b`` as the reference value. Support for comparing sequences
+ is provided by ``numpy.allclose``. `More information...`__
+
+ __ http://docs.scipy.org/doc/numpy-1.10.0/reference/generated/numpy.isclose.html
+
+ - ``unittest.TestCase.assertAlmostEqual(a, b)``: True if ``a`` and ``b``
+ are within an absolute tolerance of ``1e-7``. No relative tolerance is
+ considered and the absolute tolerance cannot be changed, so this function
+ is not appropriate for very large or very small numbers. Also, it's only
+ available in subclasses of ``unittest.TestCase`` and it's ugly because it
+ doesn't follow PEP8. `More information...`__
+
+ __ https://docs.python.org/3/library/unittest.html#unittest.TestCase.assertAlmostEqual
+
+ - ``a == pytest.approx(b, rel=1e-6, abs=1e-12)``: True if the relative
+ tolerance is met w.r.t. ``b`` or if the absolute tolerance is met.
+ Because the relative tolerance is only calculated w.r.t. ``b``, this test
+ is asymmetric and you can think of ``b`` as the reference value. In the
+ special case that you explicitly specify an absolute tolerance but not a
+ relative tolerance, only the absolute tolerance is considered.
+
+ .. warning::
+
+ .. versionchanged:: 3.2
+
+ In order to avoid inconsistent behavior, ``TypeError`` is
+ raised for ``>``, ``>=``, ``<`` and ``<=`` comparisons.
+ The example below illustrates the problem::
+
+ assert approx(0.1) > 0.1 + 1e-10 # calls approx(0.1).__gt__(0.1 + 1e-10)
+ assert 0.1 + 1e-10 > approx(0.1) # calls approx(0.1).__lt__(0.1 + 1e-10)
+
+ In the second example one expects ``approx(0.1).__le__(0.1 + 1e-10)``
+ to be called. But instead, ``approx(0.1).__lt__(0.1 + 1e-10)`` is used to
+ comparison. This is because the call hierarchy of rich comparisons
+ follows a fixed behavior. `More information...`__
+
+ __ https://docs.python.org/3/reference/datamodel.html#object.__ge__
+ """
+
+ from decimal import Decimal
+
+ # Delegate the comparison to a class that knows how to deal with the type
+ # of the expected value (e.g. int, float, list, dict, numpy.array, etc).
+ #
+ # This architecture is really driven by the need to support numpy arrays.
+ # The only way to override `==` for arrays without requiring that approx be
+ # the left operand is to inherit the approx object from `numpy.ndarray`.
+ # But that can't be a general solution, because it requires (1) numpy to be
+ # installed and (2) the expected value to be a numpy array. So the general
+ # solution is to delegate each type of expected value to a different class.
+ #
+ # This has the advantage that it made it easy to support mapping types
+ # (i.e. dict). The old code accepted mapping types, but would only compare
+ # their keys, which is probably not what most people would expect.
+
+ if _is_numpy_array(expected):
+ cls = ApproxNumpy
+ elif isinstance(expected, Mapping):
+ cls = ApproxMapping
+ elif isinstance(expected, Sequence) and not isinstance(expected, STRING_TYPES):
+ cls = ApproxSequence
+ elif isinstance(expected, Decimal):
+ cls = ApproxDecimal
+ else:
+ cls = ApproxScalar
+
+ return cls(expected, rel, abs, nan_ok)
+
+
+def _is_numpy_array(obj):
+ """
+ Return true if the given object is a numpy array. Make a special effort to
+ avoid importing numpy unless it's really necessary.
+ """
+ import inspect
+
+ for cls in inspect.getmro(type(obj)):
+ if cls.__module__ == "numpy":
+ try:
+ import numpy as np
+
+ return isinstance(obj, np.ndarray)
+ except ImportError:
+ pass
+
+ return False
+
+
+# builtin pytest.raises helper
+
+
+def raises(expected_exception, *args, **kwargs):
+ r"""
+ Assert that a code block/function call raises ``expected_exception``
+ and raise a failure exception otherwise.
+
+ :arg message: if specified, provides a custom failure message if the
+ exception is not raised
+ :arg match: if specified, asserts that the exception matches a text or regex
+
+ This helper produces a ``ExceptionInfo()`` object (see below).
+
+ You may use this function as a context manager::
+
+ >>> with raises(ZeroDivisionError):
+ ... 1/0
+
+ .. versionchanged:: 2.10
+
+ In the context manager form you may use the keyword argument
+ ``message`` to specify a custom failure message::
+
+ >>> with raises(ZeroDivisionError, message="Expecting ZeroDivisionError"):
+ ... pass
+ Traceback (most recent call last):
+ ...
+ Failed: Expecting ZeroDivisionError
+
+ .. note::
+
+ When using ``pytest.raises`` as a context manager, it's worthwhile to
+ note that normal context manager rules apply and that the exception
+ raised *must* be the final line in the scope of the context manager.
+ Lines of code after that, within the scope of the context manager will
+ not be executed. For example::
+
+ >>> value = 15
+ >>> with raises(ValueError) as exc_info:
+ ... if value > 10:
+ ... raise ValueError("value must be <= 10")
+ ... assert exc_info.type == ValueError # this will not execute
+
+ Instead, the following approach must be taken (note the difference in
+ scope)::
+
+ >>> with raises(ValueError) as exc_info:
+ ... if value > 10:
+ ... raise ValueError("value must be <= 10")
+ ...
+ >>> assert exc_info.type == ValueError
+
+
+ Since version ``3.1`` you can use the keyword argument ``match`` to assert that the
+ exception matches a text or regex::
+
+ >>> with raises(ValueError, match='must be 0 or None'):
+ ... raise ValueError("value must be 0 or None")
+
+ >>> with raises(ValueError, match=r'must be \d+$'):
+ ... raise ValueError("value must be 42")
+
+ **Legacy forms**
+
+ The forms below are fully supported but are discouraged for new code because the
+ context manager form is regarded as more readable and less error-prone.
+
+ It is possible to specify a callable by passing a to-be-called lambda::
+
+ >>> raises(ZeroDivisionError, lambda: 1/0)
+ <ExceptionInfo ...>
+
+ or you can specify an arbitrary callable with arguments::
+
+ >>> def f(x): return 1/x
+ ...
+ >>> raises(ZeroDivisionError, f, 0)
+ <ExceptionInfo ...>
+ >>> raises(ZeroDivisionError, f, x=0)
+ <ExceptionInfo ...>
+
+ It is also possible to pass a string to be evaluated at runtime::
+
+ >>> raises(ZeroDivisionError, "f(0)")
+ <ExceptionInfo ...>
+
+ The string will be evaluated using the same ``locals()`` and ``globals()``
+ at the moment of the ``raises`` call.
+
+ .. currentmodule:: _pytest._code
+
+ Consult the API of ``excinfo`` objects: :class:`ExceptionInfo`.
+
+ .. note::
+ Similar to caught exception objects in Python, explicitly clearing
+ local references to returned ``ExceptionInfo`` objects can
+ help the Python interpreter speed up its garbage collection.
+
+ Clearing those references breaks a reference cycle
+ (``ExceptionInfo`` --> caught exception --> frame stack raising
+ the exception --> current frame stack --> local variables -->
+ ``ExceptionInfo``) which makes Python keep all objects referenced
+ from that cycle (including all local variables in the current
+ frame) alive until the next cyclic garbage collection run. See the
+ official Python ``try`` statement documentation for more detailed
+ information.
+
+ """
+ __tracebackhide__ = True
+ for exc in filterfalse(isclass, always_iterable(expected_exception, BASE_TYPE)):
+ msg = (
+ "exceptions must be old-style classes or"
+ " derived from BaseException, not %s"
+ )
+ raise TypeError(msg % type(exc))
+
+ message = "DID NOT RAISE {}".format(expected_exception)
+ match_expr = None
+
+ if not args:
+ if "message" in kwargs:
+ message = kwargs.pop("message")
+ if "match" in kwargs:
+ match_expr = kwargs.pop("match")
+ if kwargs:
+ msg = "Unexpected keyword arguments passed to pytest.raises: "
+ msg += ", ".join(kwargs.keys())
+ raise TypeError(msg)
+ return RaisesContext(expected_exception, message, match_expr)
+ elif isinstance(args[0], str):
+ code, = args
+ assert isinstance(code, str)
+ frame = sys._getframe(1)
+ loc = frame.f_locals.copy()
+ loc.update(kwargs)
+ # print "raises frame scope: %r" % frame.f_locals
+ try:
+ code = _pytest._code.Source(code).compile()
+ py.builtin.exec_(code, frame.f_globals, loc)
+ # XXX didn'T mean f_globals == f_locals something special?
+ # this is destroyed here ...
+ except expected_exception:
+ return _pytest._code.ExceptionInfo()
+ else:
+ func = args[0]
+ try:
+ func(*args[1:], **kwargs)
+ except expected_exception:
+ return _pytest._code.ExceptionInfo()
+ fail(message)
+
+
+raises.Exception = fail.Exception
+
+
+class RaisesContext(object):
+
+ def __init__(self, expected_exception, message, match_expr):
+ self.expected_exception = expected_exception
+ self.message = message
+ self.match_expr = match_expr
+ self.excinfo = None
+
+ def __enter__(self):
+ self.excinfo = object.__new__(_pytest._code.ExceptionInfo)
+ return self.excinfo
+
+ def __exit__(self, *tp):
+ __tracebackhide__ = True
+ if tp[0] is None:
+ fail(self.message)
+ self.excinfo.__init__(tp)
+ suppress_exception = issubclass(self.excinfo.type, self.expected_exception)
+ if sys.version_info[0] == 2 and suppress_exception:
+ sys.exc_clear()
+ if self.match_expr and suppress_exception:
+ self.excinfo.match(self.match_expr)
+ return suppress_exception
diff --git a/third_party/python/pytest/src/_pytest/recwarn.py b/third_party/python/pytest/src/_pytest/recwarn.py
new file mode 100644
index 0000000000..7839f5700c
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/recwarn.py
@@ -0,0 +1,243 @@
+""" recording warnings during test function execution. """
+from __future__ import absolute_import, division, print_function
+
+import inspect
+
+import _pytest._code
+import py
+import sys
+import warnings
+
+import re
+
+from _pytest.fixtures import yield_fixture
+from _pytest.outcomes import fail
+
+
+@yield_fixture
+def recwarn():
+ """Return a :class:`WarningsRecorder` instance that records all warnings emitted by test functions.
+
+ See http://docs.python.org/library/warnings.html for information
+ on warning categories.
+ """
+ wrec = WarningsRecorder()
+ with wrec:
+ warnings.simplefilter("default")
+ yield wrec
+
+
+def deprecated_call(func=None, *args, **kwargs):
+ """context manager that can be used to ensure a block of code triggers a
+ ``DeprecationWarning`` or ``PendingDeprecationWarning``::
+
+ >>> import warnings
+ >>> def api_call_v2():
+ ... warnings.warn('use v3 of this api', DeprecationWarning)
+ ... return 200
+
+ >>> with deprecated_call():
+ ... assert api_call_v2() == 200
+
+ ``deprecated_call`` can also be used by passing a function and ``*args`` and ``*kwargs``,
+ in which case it will ensure calling ``func(*args, **kwargs)`` produces one of the warnings
+ types above.
+ """
+ if not func:
+ return _DeprecatedCallContext()
+ else:
+ __tracebackhide__ = True
+ with _DeprecatedCallContext():
+ return func(*args, **kwargs)
+
+
+class _DeprecatedCallContext(object):
+ """Implements the logic to capture deprecation warnings as a context manager."""
+
+ def __enter__(self):
+ self._captured_categories = []
+ self._old_warn = warnings.warn
+ self._old_warn_explicit = warnings.warn_explicit
+ warnings.warn_explicit = self._warn_explicit
+ warnings.warn = self._warn
+
+ def _warn_explicit(self, message, category, *args, **kwargs):
+ self._captured_categories.append(category)
+
+ def _warn(self, message, category=None, *args, **kwargs):
+ if isinstance(message, Warning):
+ self._captured_categories.append(message.__class__)
+ else:
+ self._captured_categories.append(category)
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ warnings.warn_explicit = self._old_warn_explicit
+ warnings.warn = self._old_warn
+
+ if exc_type is None:
+ deprecation_categories = (DeprecationWarning, PendingDeprecationWarning)
+ if not any(
+ issubclass(c, deprecation_categories) for c in self._captured_categories
+ ):
+ __tracebackhide__ = True
+ msg = "Did not produce DeprecationWarning or PendingDeprecationWarning"
+ raise AssertionError(msg)
+
+
+def warns(expected_warning, *args, **kwargs):
+ r"""Assert that code raises a particular class of warning.
+
+ Specifically, the parameter ``expected_warning`` can be a warning class or
+ sequence of warning classes, and the inside the ``with`` block must issue a warning of that class or
+ classes.
+
+ This helper produces a list of :class:`warnings.WarningMessage` objects,
+ one for each warning raised.
+
+ This function can be used as a context manager, or any of the other ways
+ ``pytest.raises`` can be used::
+
+ >>> with warns(RuntimeWarning):
+ ... warnings.warn("my warning", RuntimeWarning)
+
+ In the context manager form you may use the keyword argument ``match`` to assert
+ that the exception matches a text or regex::
+
+ >>> with warns(UserWarning, match='must be 0 or None'):
+ ... warnings.warn("value must be 0 or None", UserWarning)
+
+ >>> with warns(UserWarning, match=r'must be \d+$'):
+ ... warnings.warn("value must be 42", UserWarning)
+
+ >>> with warns(UserWarning, match=r'must be \d+$'):
+ ... warnings.warn("this is not here", UserWarning)
+ Traceback (most recent call last):
+ ...
+ Failed: DID NOT WARN. No warnings of type ...UserWarning... was emitted...
+
+ """
+ match_expr = None
+ if not args:
+ if "match" in kwargs:
+ match_expr = kwargs.pop("match")
+ return WarningsChecker(expected_warning, match_expr=match_expr)
+ elif isinstance(args[0], str):
+ code, = args
+ assert isinstance(code, str)
+ frame = sys._getframe(1)
+ loc = frame.f_locals.copy()
+ loc.update(kwargs)
+
+ with WarningsChecker(expected_warning, match_expr=match_expr):
+ code = _pytest._code.Source(code).compile()
+ py.builtin.exec_(code, frame.f_globals, loc)
+ else:
+ func = args[0]
+ with WarningsChecker(expected_warning, match_expr=match_expr):
+ return func(*args[1:], **kwargs)
+
+
+class WarningsRecorder(warnings.catch_warnings):
+ """A context manager to record raised warnings.
+
+ Adapted from `warnings.catch_warnings`.
+ """
+
+ def __init__(self):
+ super(WarningsRecorder, self).__init__(record=True)
+ self._entered = False
+ self._list = []
+
+ @property
+ def list(self):
+ """The list of recorded warnings."""
+ return self._list
+
+ def __getitem__(self, i):
+ """Get a recorded warning by index."""
+ return self._list[i]
+
+ def __iter__(self):
+ """Iterate through the recorded warnings."""
+ return iter(self._list)
+
+ def __len__(self):
+ """The number of recorded warnings."""
+ return len(self._list)
+
+ def pop(self, cls=Warning):
+ """Pop the first recorded warning, raise exception if not exists."""
+ for i, w in enumerate(self._list):
+ if issubclass(w.category, cls):
+ return self._list.pop(i)
+ __tracebackhide__ = True
+ raise AssertionError("%r not found in warning list" % cls)
+
+ def clear(self):
+ """Clear the list of recorded warnings."""
+ self._list[:] = []
+
+ def __enter__(self):
+ if self._entered:
+ __tracebackhide__ = True
+ raise RuntimeError("Cannot enter %r twice" % self)
+ self._list = super(WarningsRecorder, self).__enter__()
+ warnings.simplefilter("always")
+ return self
+
+ def __exit__(self, *exc_info):
+ if not self._entered:
+ __tracebackhide__ = True
+ raise RuntimeError("Cannot exit %r without entering first" % self)
+ super(WarningsRecorder, self).__exit__(*exc_info)
+
+
+class WarningsChecker(WarningsRecorder):
+
+ def __init__(self, expected_warning=None, match_expr=None):
+ super(WarningsChecker, self).__init__()
+
+ msg = (
+ "exceptions must be old-style classes or " "derived from Warning, not %s"
+ )
+ if isinstance(expected_warning, tuple):
+ for exc in expected_warning:
+ if not inspect.isclass(exc):
+ raise TypeError(msg % type(exc))
+ elif inspect.isclass(expected_warning):
+ expected_warning = (expected_warning,)
+ elif expected_warning is not None:
+ raise TypeError(msg % type(expected_warning))
+
+ self.expected_warning = expected_warning
+ self.match_expr = match_expr
+
+ def __exit__(self, *exc_info):
+ super(WarningsChecker, self).__exit__(*exc_info)
+
+ # only check if we're not currently handling an exception
+ if all(a is None for a in exc_info):
+ if self.expected_warning is not None:
+ if not any(issubclass(r.category, self.expected_warning) for r in self):
+ __tracebackhide__ = True
+ fail(
+ "DID NOT WARN. No warnings of type {} was emitted. "
+ "The list of emitted warnings is: {}.".format(
+ self.expected_warning, [each.message for each in self]
+ )
+ )
+ elif self.match_expr is not None:
+ for r in self:
+ if issubclass(r.category, self.expected_warning):
+ if re.compile(self.match_expr).search(str(r.message)):
+ break
+ else:
+ fail(
+ "DID NOT WARN. No warnings of type {} matching"
+ " ('{}') was emitted. The list of emitted warnings"
+ " is: {}.".format(
+ self.expected_warning,
+ self.match_expr,
+ [each.message for each in self],
+ )
+ )
diff --git a/third_party/python/pytest/src/_pytest/resultlog.py b/third_party/python/pytest/src/_pytest/resultlog.py
new file mode 100644
index 0000000000..8f300c9830
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/resultlog.py
@@ -0,0 +1,120 @@
+""" log machine-parseable test session result information in a plain
+text file.
+"""
+from __future__ import absolute_import, division, print_function
+
+import py
+import os
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting", "resultlog plugin options")
+ group.addoption(
+ "--resultlog",
+ "--result-log",
+ action="store",
+ metavar="path",
+ default=None,
+ help="DEPRECATED path for machine-readable result log.",
+ )
+
+
+def pytest_configure(config):
+ resultlog = config.option.resultlog
+ # prevent opening resultlog on slave nodes (xdist)
+ if resultlog and not hasattr(config, "slaveinput"):
+ dirname = os.path.dirname(os.path.abspath(resultlog))
+ if not os.path.isdir(dirname):
+ os.makedirs(dirname)
+ logfile = open(resultlog, "w", 1) # line buffered
+ config._resultlog = ResultLog(config, logfile)
+ config.pluginmanager.register(config._resultlog)
+
+ from _pytest.deprecated import RESULT_LOG
+
+ config.warn("C1", RESULT_LOG)
+
+
+def pytest_unconfigure(config):
+ resultlog = getattr(config, "_resultlog", None)
+ if resultlog:
+ resultlog.logfile.close()
+ del config._resultlog
+ config.pluginmanager.unregister(resultlog)
+
+
+def generic_path(item):
+ chain = item.listchain()
+ gpath = [chain[0].name]
+ fspath = chain[0].fspath
+ fspart = False
+ for node in chain[1:]:
+ newfspath = node.fspath
+ if newfspath == fspath:
+ if fspart:
+ gpath.append(":")
+ fspart = False
+ else:
+ gpath.append(".")
+ else:
+ gpath.append("/")
+ fspart = True
+ name = node.name
+ if name[0] in "([":
+ gpath.pop()
+ gpath.append(name)
+ fspath = newfspath
+ return "".join(gpath)
+
+
+class ResultLog(object):
+
+ def __init__(self, config, logfile):
+ self.config = config
+ self.logfile = logfile # preferably line buffered
+
+ def write_log_entry(self, testpath, lettercode, longrepr):
+ print("%s %s" % (lettercode, testpath), file=self.logfile)
+ for line in longrepr.splitlines():
+ print(" %s" % line, file=self.logfile)
+
+ def log_outcome(self, report, lettercode, longrepr):
+ testpath = getattr(report, "nodeid", None)
+ if testpath is None:
+ testpath = report.fspath
+ self.write_log_entry(testpath, lettercode, longrepr)
+
+ def pytest_runtest_logreport(self, report):
+ if report.when != "call" and report.passed:
+ return
+ res = self.config.hook.pytest_report_teststatus(report=report)
+ code = res[1]
+ if code == "x":
+ longrepr = str(report.longrepr)
+ elif code == "X":
+ longrepr = ""
+ elif report.passed:
+ longrepr = ""
+ elif report.failed:
+ longrepr = str(report.longrepr)
+ elif report.skipped:
+ longrepr = str(report.longrepr[2])
+ self.log_outcome(report, code, longrepr)
+
+ def pytest_collectreport(self, report):
+ if not report.passed:
+ if report.failed:
+ code = "F"
+ longrepr = str(report.longrepr)
+ else:
+ assert report.skipped
+ code = "S"
+ longrepr = "%s:%d: %s" % report.longrepr
+ self.log_outcome(report, code, longrepr)
+
+ def pytest_internalerror(self, excrepr):
+ reprcrash = getattr(excrepr, "reprcrash", None)
+ path = getattr(reprcrash, "path", None)
+ if path is None:
+ path = "cwd:%s" % py.path.local()
+ self.write_log_entry(path, "!", str(excrepr))
diff --git a/third_party/python/pytest/src/_pytest/runner.py b/third_party/python/pytest/src/_pytest/runner.py
new file mode 100644
index 0000000000..18e925509e
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/runner.py
@@ -0,0 +1,570 @@
+""" basic collect and runtest protocol implementations """
+from __future__ import absolute_import, division, print_function
+
+import bdb
+import os
+import sys
+from time import time
+
+import py
+from _pytest._code.code import TerminalRepr, ExceptionInfo
+from _pytest.outcomes import skip, Skipped, TEST_OUTCOME
+
+#
+# pytest plugin hooks
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting", "reporting", after="general")
+ group.addoption(
+ "--durations",
+ action="store",
+ type=int,
+ default=None,
+ metavar="N",
+ help="show N slowest setup/test durations (N=0 for all).",
+ ),
+
+
+def pytest_terminal_summary(terminalreporter):
+ durations = terminalreporter.config.option.durations
+ if durations is None:
+ return
+ tr = terminalreporter
+ dlist = []
+ for replist in tr.stats.values():
+ for rep in replist:
+ if hasattr(rep, "duration"):
+ dlist.append(rep)
+ if not dlist:
+ return
+ dlist.sort(key=lambda x: x.duration)
+ dlist.reverse()
+ if not durations:
+ tr.write_sep("=", "slowest test durations")
+ else:
+ tr.write_sep("=", "slowest %s test durations" % durations)
+ dlist = dlist[:durations]
+
+ for rep in dlist:
+ nodeid = rep.nodeid.replace("::()::", "::")
+ tr.write_line("%02.2fs %-8s %s" % (rep.duration, rep.when, nodeid))
+
+
+def pytest_sessionstart(session):
+ session._setupstate = SetupState()
+
+
+def pytest_sessionfinish(session):
+ session._setupstate.teardown_all()
+
+
+def pytest_runtest_protocol(item, nextitem):
+ item.ihook.pytest_runtest_logstart(nodeid=item.nodeid, location=item.location)
+ runtestprotocol(item, nextitem=nextitem)
+ item.ihook.pytest_runtest_logfinish(nodeid=item.nodeid, location=item.location)
+ return True
+
+
+def runtestprotocol(item, log=True, nextitem=None):
+ hasrequest = hasattr(item, "_request")
+ if hasrequest and not item._request:
+ item._initrequest()
+ rep = call_and_report(item, "setup", log)
+ reports = [rep]
+ if rep.passed:
+ if item.config.option.setupshow:
+ show_test_item(item)
+ if not item.config.option.setuponly:
+ reports.append(call_and_report(item, "call", log))
+ reports.append(call_and_report(item, "teardown", log, nextitem=nextitem))
+ # after all teardown hooks have been called
+ # want funcargs and request info to go away
+ if hasrequest:
+ item._request = False
+ item.funcargs = None
+ return reports
+
+
+def show_test_item(item):
+ """Show test function, parameters and the fixtures of the test item."""
+ tw = item.config.get_terminal_writer()
+ tw.line()
+ tw.write(" " * 8)
+ tw.write(item._nodeid)
+ used_fixtures = sorted(item._fixtureinfo.name2fixturedefs.keys())
+ if used_fixtures:
+ tw.write(" (fixtures used: {})".format(", ".join(used_fixtures)))
+
+
+def pytest_runtest_setup(item):
+ _update_current_test_var(item, "setup")
+ item.session._setupstate.prepare(item)
+
+
+def pytest_runtest_call(item):
+ _update_current_test_var(item, "call")
+ sys.last_type, sys.last_value, sys.last_traceback = (None, None, None)
+ try:
+ item.runtest()
+ except Exception:
+ # Store trace info to allow postmortem debugging
+ type, value, tb = sys.exc_info()
+ tb = tb.tb_next # Skip *this* frame
+ sys.last_type = type
+ sys.last_value = value
+ sys.last_traceback = tb
+ del type, value, tb # Get rid of these in this frame
+ raise
+
+
+def pytest_runtest_teardown(item, nextitem):
+ _update_current_test_var(item, "teardown")
+ item.session._setupstate.teardown_exact(item, nextitem)
+ _update_current_test_var(item, None)
+
+
+def _update_current_test_var(item, when):
+ """
+ Update PYTEST_CURRENT_TEST to reflect the current item and stage.
+
+ If ``when`` is None, delete PYTEST_CURRENT_TEST from the environment.
+ """
+ var_name = "PYTEST_CURRENT_TEST"
+ if when:
+ value = "{} ({})".format(item.nodeid, when)
+ # don't allow null bytes on environment variables (see #2644, #2957)
+ value = value.replace("\x00", "(null)")
+ os.environ[var_name] = value
+ else:
+ os.environ.pop(var_name)
+
+
+def pytest_report_teststatus(report):
+ if report.when in ("setup", "teardown"):
+ if report.failed:
+ # category, shortletter, verbose-word
+ return "error", "E", "ERROR"
+ elif report.skipped:
+ return "skipped", "s", "SKIPPED"
+ else:
+ return "", "", ""
+
+
+#
+# Implementation
+
+
+def call_and_report(item, when, log=True, **kwds):
+ call = call_runtest_hook(item, when, **kwds)
+ hook = item.ihook
+ report = hook.pytest_runtest_makereport(item=item, call=call)
+ if log:
+ hook.pytest_runtest_logreport(report=report)
+ if check_interactive_exception(call, report):
+ hook.pytest_exception_interact(node=item, call=call, report=report)
+ return report
+
+
+def check_interactive_exception(call, report):
+ return call.excinfo and not (
+ hasattr(report, "wasxfail")
+ or call.excinfo.errisinstance(skip.Exception)
+ or call.excinfo.errisinstance(bdb.BdbQuit)
+ )
+
+
+def call_runtest_hook(item, when, **kwds):
+ hookname = "pytest_runtest_" + when
+ ihook = getattr(item.ihook, hookname)
+ return CallInfo(
+ lambda: ihook(item=item, **kwds),
+ when=when,
+ treat_keyboard_interrupt_as_exception=item.config.getvalue("usepdb"),
+ )
+
+
+class CallInfo(object):
+ """ Result/Exception info a function invocation. """
+ #: None or ExceptionInfo object.
+ excinfo = None
+
+ def __init__(self, func, when, treat_keyboard_interrupt_as_exception=False):
+ #: context of invocation: one of "setup", "call",
+ #: "teardown", "memocollect"
+ self.when = when
+ self.start = time()
+ try:
+ self.result = func()
+ except KeyboardInterrupt:
+ if treat_keyboard_interrupt_as_exception:
+ self.excinfo = ExceptionInfo()
+ else:
+ self.stop = time()
+ raise
+ except: # noqa
+ self.excinfo = ExceptionInfo()
+ self.stop = time()
+
+ def __repr__(self):
+ if self.excinfo:
+ status = "exception: %s" % str(self.excinfo.value)
+ else:
+ status = "result: %r" % (self.result,)
+ return "<CallInfo when=%r %s>" % (self.when, status)
+
+
+def getslaveinfoline(node):
+ try:
+ return node._slaveinfocache
+ except AttributeError:
+ d = node.slaveinfo
+ ver = "%s.%s.%s" % d["version_info"][:3]
+ node._slaveinfocache = s = "[%s] %s -- Python %s %s" % (
+ d["id"], d["sysplatform"], ver, d["executable"]
+ )
+ return s
+
+
+class BaseReport(object):
+
+ def __init__(self, **kw):
+ self.__dict__.update(kw)
+
+ def toterminal(self, out):
+ if hasattr(self, "node"):
+ out.line(getslaveinfoline(self.node))
+
+ longrepr = self.longrepr
+ if longrepr is None:
+ return
+
+ if hasattr(longrepr, "toterminal"):
+ longrepr.toterminal(out)
+ else:
+ try:
+ out.line(longrepr)
+ except UnicodeEncodeError:
+ out.line("<unprintable longrepr>")
+
+ def get_sections(self, prefix):
+ for name, content in self.sections:
+ if name.startswith(prefix):
+ yield prefix, content
+
+ @property
+ def longreprtext(self):
+ """
+ Read-only property that returns the full string representation
+ of ``longrepr``.
+
+ .. versionadded:: 3.0
+ """
+ tw = py.io.TerminalWriter(stringio=True)
+ tw.hasmarkup = False
+ self.toterminal(tw)
+ exc = tw.stringio.getvalue()
+ return exc.strip()
+
+ @property
+ def caplog(self):
+ """Return captured log lines, if log capturing is enabled
+
+ .. versionadded:: 3.5
+ """
+ return "\n".join(
+ content for (prefix, content) in self.get_sections("Captured log")
+ )
+
+ @property
+ def capstdout(self):
+ """Return captured text from stdout, if capturing is enabled
+
+ .. versionadded:: 3.0
+ """
+ return "".join(
+ content for (prefix, content) in self.get_sections("Captured stdout")
+ )
+
+ @property
+ def capstderr(self):
+ """Return captured text from stderr, if capturing is enabled
+
+ .. versionadded:: 3.0
+ """
+ return "".join(
+ content for (prefix, content) in self.get_sections("Captured stderr")
+ )
+
+ passed = property(lambda x: x.outcome == "passed")
+ failed = property(lambda x: x.outcome == "failed")
+ skipped = property(lambda x: x.outcome == "skipped")
+
+ @property
+ def fspath(self):
+ return self.nodeid.split("::")[0]
+
+
+def pytest_runtest_makereport(item, call):
+ when = call.when
+ duration = call.stop - call.start
+ keywords = {x: 1 for x in item.keywords}
+ excinfo = call.excinfo
+ sections = []
+ if not call.excinfo:
+ outcome = "passed"
+ longrepr = None
+ else:
+ if not isinstance(excinfo, ExceptionInfo):
+ outcome = "failed"
+ longrepr = excinfo
+ elif excinfo.errisinstance(skip.Exception):
+ outcome = "skipped"
+ r = excinfo._getreprcrash()
+ longrepr = (str(r.path), r.lineno, r.message)
+ else:
+ outcome = "failed"
+ if call.when == "call":
+ longrepr = item.repr_failure(excinfo)
+ else: # exception in setup or teardown
+ longrepr = item._repr_failure_py(
+ excinfo, style=item.config.option.tbstyle
+ )
+ for rwhen, key, content in item._report_sections:
+ sections.append(("Captured %s %s" % (key, rwhen), content))
+ return TestReport(
+ item.nodeid,
+ item.location,
+ keywords,
+ outcome,
+ longrepr,
+ when,
+ sections,
+ duration,
+ user_properties=item.user_properties,
+ )
+
+
+class TestReport(BaseReport):
+ """ Basic test report object (also used for setup and teardown calls if
+ they fail).
+ """
+
+ def __init__(
+ self,
+ nodeid,
+ location,
+ keywords,
+ outcome,
+ longrepr,
+ when,
+ sections=(),
+ duration=0,
+ user_properties=(),
+ **extra
+ ):
+ #: normalized collection node id
+ self.nodeid = nodeid
+
+ #: a (filesystempath, lineno, domaininfo) tuple indicating the
+ #: actual location of a test item - it might be different from the
+ #: collected one e.g. if a method is inherited from a different module.
+ self.location = location
+
+ #: a name -> value dictionary containing all keywords and
+ #: markers associated with a test invocation.
+ self.keywords = keywords
+
+ #: test outcome, always one of "passed", "failed", "skipped".
+ self.outcome = outcome
+
+ #: None or a failure representation.
+ self.longrepr = longrepr
+
+ #: one of 'setup', 'call', 'teardown' to indicate runtest phase.
+ self.when = when
+
+ #: user properties is a list of tuples (name, value) that holds user
+ #: defined properties of the test
+ self.user_properties = user_properties
+
+ #: list of pairs ``(str, str)`` of extra information which needs to
+ #: marshallable. Used by pytest to add captured text
+ #: from ``stdout`` and ``stderr``, but may be used by other plugins
+ #: to add arbitrary information to reports.
+ self.sections = list(sections)
+
+ #: time it took to run just the test
+ self.duration = duration
+
+ self.__dict__.update(extra)
+
+ def __repr__(self):
+ return "<TestReport %r when=%r outcome=%r>" % (
+ self.nodeid, self.when, self.outcome
+ )
+
+
+class TeardownErrorReport(BaseReport):
+ outcome = "failed"
+ when = "teardown"
+
+ def __init__(self, longrepr, **extra):
+ self.longrepr = longrepr
+ self.sections = []
+ self.__dict__.update(extra)
+
+
+def pytest_make_collect_report(collector):
+ call = CallInfo(lambda: list(collector.collect()), "collect")
+ longrepr = None
+ if not call.excinfo:
+ outcome = "passed"
+ else:
+ from _pytest import nose
+
+ skip_exceptions = (Skipped,) + nose.get_skip_exceptions()
+ if call.excinfo.errisinstance(skip_exceptions):
+ outcome = "skipped"
+ r = collector._repr_failure_py(call.excinfo, "line").reprcrash
+ longrepr = (str(r.path), r.lineno, r.message)
+ else:
+ outcome = "failed"
+ errorinfo = collector.repr_failure(call.excinfo)
+ if not hasattr(errorinfo, "toterminal"):
+ errorinfo = CollectErrorRepr(errorinfo)
+ longrepr = errorinfo
+ rep = CollectReport(
+ collector.nodeid, outcome, longrepr, getattr(call, "result", None)
+ )
+ rep.call = call # see collect_one_node
+ return rep
+
+
+class CollectReport(BaseReport):
+
+ def __init__(self, nodeid, outcome, longrepr, result, sections=(), **extra):
+ self.nodeid = nodeid
+ self.outcome = outcome
+ self.longrepr = longrepr
+ self.result = result or []
+ self.sections = list(sections)
+ self.__dict__.update(extra)
+
+ @property
+ def location(self):
+ return (self.fspath, None, self.fspath)
+
+ def __repr__(self):
+ return "<CollectReport %r lenresult=%s outcome=%r>" % (
+ self.nodeid, len(self.result), self.outcome
+ )
+
+
+class CollectErrorRepr(TerminalRepr):
+
+ def __init__(self, msg):
+ self.longrepr = msg
+
+ def toterminal(self, out):
+ out.line(self.longrepr, red=True)
+
+
+class SetupState(object):
+ """ shared state for setting up/tearing down test items or collectors. """
+
+ def __init__(self):
+ self.stack = []
+ self._finalizers = {}
+
+ def addfinalizer(self, finalizer, colitem):
+ """ attach a finalizer to the given colitem.
+ if colitem is None, this will add a finalizer that
+ is called at the end of teardown_all().
+ """
+ assert colitem and not isinstance(colitem, tuple)
+ assert callable(finalizer)
+ # assert colitem in self.stack # some unit tests don't setup stack :/
+ self._finalizers.setdefault(colitem, []).append(finalizer)
+
+ def _pop_and_teardown(self):
+ colitem = self.stack.pop()
+ self._teardown_with_finalization(colitem)
+
+ def _callfinalizers(self, colitem):
+ finalizers = self._finalizers.pop(colitem, None)
+ exc = None
+ while finalizers:
+ fin = finalizers.pop()
+ try:
+ fin()
+ except TEST_OUTCOME:
+ # XXX Only first exception will be seen by user,
+ # ideally all should be reported.
+ if exc is None:
+ exc = sys.exc_info()
+ if exc:
+ py.builtin._reraise(*exc)
+
+ def _teardown_with_finalization(self, colitem):
+ self._callfinalizers(colitem)
+ if hasattr(colitem, "teardown"):
+ colitem.teardown()
+ for colitem in self._finalizers:
+ assert (
+ colitem is None or colitem in self.stack or isinstance(colitem, tuple)
+ )
+
+ def teardown_all(self):
+ while self.stack:
+ self._pop_and_teardown()
+ for key in list(self._finalizers):
+ self._teardown_with_finalization(key)
+ assert not self._finalizers
+
+ def teardown_exact(self, item, nextitem):
+ needed_collectors = nextitem and nextitem.listchain() or []
+ self._teardown_towards(needed_collectors)
+
+ def _teardown_towards(self, needed_collectors):
+ exc = None
+ while self.stack:
+ if self.stack == needed_collectors[:len(self.stack)]:
+ break
+ try:
+ self._pop_and_teardown()
+ except TEST_OUTCOME:
+ # XXX Only first exception will be seen by user,
+ # ideally all should be reported.
+ if exc is None:
+ exc = sys.exc_info()
+ if exc:
+ py.builtin._reraise(*exc)
+
+ def prepare(self, colitem):
+ """ setup objects along the collector chain to the test-method
+ and teardown previously setup objects."""
+ needed_collectors = colitem.listchain()
+ self._teardown_towards(needed_collectors)
+
+ # check if the last collection node has raised an error
+ for col in self.stack:
+ if hasattr(col, "_prepare_exc"):
+ py.builtin._reraise(*col._prepare_exc)
+ for col in needed_collectors[len(self.stack):]:
+ self.stack.append(col)
+ try:
+ col.setup()
+ except TEST_OUTCOME:
+ col._prepare_exc = sys.exc_info()
+ raise
+
+
+def collect_one_node(collector):
+ ihook = collector.ihook
+ ihook.pytest_collectstart(collector=collector)
+ rep = ihook.pytest_make_collect_report(collector=collector)
+ call = rep.__dict__.pop("call", None)
+ if call and check_interactive_exception(call, rep):
+ ihook.pytest_exception_interact(node=collector, call=call, report=rep)
+ return rep
diff --git a/third_party/python/pytest/src/_pytest/setuponly.py b/third_party/python/pytest/src/_pytest/setuponly.py
new file mode 100644
index 0000000000..81240d9d05
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/setuponly.py
@@ -0,0 +1,84 @@
+from __future__ import absolute_import, division, print_function
+
+import pytest
+import sys
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("debugconfig")
+ group.addoption(
+ "--setuponly",
+ "--setup-only",
+ action="store_true",
+ help="only setup fixtures, do not execute tests.",
+ )
+ group.addoption(
+ "--setupshow",
+ "--setup-show",
+ action="store_true",
+ help="show setup of fixtures while executing tests.",
+ )
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_fixture_setup(fixturedef, request):
+ yield
+ config = request.config
+ if config.option.setupshow:
+ if hasattr(request, "param"):
+ # Save the fixture parameter so ._show_fixture_action() can
+ # display it now and during the teardown (in .finish()).
+ if fixturedef.ids:
+ if callable(fixturedef.ids):
+ fixturedef.cached_param = fixturedef.ids(request.param)
+ else:
+ fixturedef.cached_param = fixturedef.ids[request.param_index]
+ else:
+ fixturedef.cached_param = request.param
+ _show_fixture_action(fixturedef, "SETUP")
+
+
+def pytest_fixture_post_finalizer(fixturedef):
+ if hasattr(fixturedef, "cached_result"):
+ config = fixturedef._fixturemanager.config
+ if config.option.setupshow:
+ _show_fixture_action(fixturedef, "TEARDOWN")
+ if hasattr(fixturedef, "cached_param"):
+ del fixturedef.cached_param
+
+
+def _show_fixture_action(fixturedef, msg):
+ config = fixturedef._fixturemanager.config
+ capman = config.pluginmanager.getplugin("capturemanager")
+ if capman:
+ out, err = capman.suspend_global_capture()
+
+ tw = config.get_terminal_writer()
+ tw.line()
+ tw.write(" " * 2 * fixturedef.scopenum)
+ tw.write(
+ "{step} {scope} {fixture}".format(
+ step=msg.ljust(8), # align the output to TEARDOWN
+ scope=fixturedef.scope[0].upper(),
+ fixture=fixturedef.argname,
+ )
+ )
+
+ if msg == "SETUP":
+ deps = sorted(arg for arg in fixturedef.argnames if arg != "request")
+ if deps:
+ tw.write(" (fixtures used: {})".format(", ".join(deps)))
+
+ if hasattr(fixturedef, "cached_param"):
+ tw.write("[{}]".format(fixturedef.cached_param))
+
+ if capman:
+ capman.resume_global_capture()
+ sys.stdout.write(out)
+ sys.stderr.write(err)
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_cmdline_main(config):
+ if config.option.setuponly:
+ config.option.setupshow = True
diff --git a/third_party/python/pytest/src/_pytest/setupplan.py b/third_party/python/pytest/src/_pytest/setupplan.py
new file mode 100644
index 0000000000..23f4f97e6d
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/setupplan.py
@@ -0,0 +1,29 @@
+from __future__ import absolute_import, division, print_function
+
+import pytest
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("debugconfig")
+ group.addoption(
+ "--setupplan",
+ "--setup-plan",
+ action="store_true",
+ help="show what fixtures and tests would be executed but "
+ "don't execute anything.",
+ )
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_fixture_setup(fixturedef, request):
+ # Will return a dummy fixture if the setuponly option is provided.
+ if request.config.option.setupplan:
+ fixturedef.cached_result = (None, None, None)
+ return fixturedef.cached_result
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_cmdline_main(config):
+ if config.option.setupplan:
+ config.option.setuponly = True
+ config.option.setupshow = True
diff --git a/third_party/python/pytest/src/_pytest/skipping.py b/third_party/python/pytest/src/_pytest/skipping.py
new file mode 100644
index 0000000000..a348d5484c
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/skipping.py
@@ -0,0 +1,293 @@
+""" support for skip/xfail functions and markers. """
+from __future__ import absolute_import, division, print_function
+
+from _pytest.config import hookimpl
+from _pytest.mark.evaluate import MarkEvaluator
+from _pytest.outcomes import fail, skip, xfail
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group.addoption(
+ "--runxfail",
+ action="store_true",
+ dest="runxfail",
+ default=False,
+ help="run tests even if they are marked xfail",
+ )
+
+ parser.addini(
+ "xfail_strict",
+ "default for the strict parameter of xfail "
+ "markers when not given explicitly (default: False)",
+ default=False,
+ type="bool",
+ )
+
+
+def pytest_configure(config):
+ if config.option.runxfail:
+ # yay a hack
+ import pytest
+
+ old = pytest.xfail
+ config._cleanup.append(lambda: setattr(pytest, "xfail", old))
+
+ def nop(*args, **kwargs):
+ pass
+
+ nop.Exception = xfail.Exception
+ setattr(pytest, "xfail", nop)
+
+ config.addinivalue_line(
+ "markers",
+ "skip(reason=None): skip the given test function with an optional reason. "
+ 'Example: skip(reason="no way of currently testing this") skips the '
+ "test.",
+ )
+ config.addinivalue_line(
+ "markers",
+ "skipif(condition): skip the given test function if eval(condition) "
+ "results in a True value. Evaluation happens within the "
+ "module global context. Example: skipif('sys.platform == \"win32\"') "
+ "skips the test if we are on the win32 platform. see "
+ "http://pytest.org/latest/skipping.html",
+ )
+ config.addinivalue_line(
+ "markers",
+ "xfail(condition, reason=None, run=True, raises=None, strict=False): "
+ "mark the test function as an expected failure if eval(condition) "
+ "has a True value. Optionally specify a reason for better reporting "
+ "and run=False if you don't even want to execute the test function. "
+ "If only specific exception(s) are expected, you can list them in "
+ "raises, and if the test fails in other ways, it will be reported as "
+ "a true failure. See http://pytest.org/latest/skipping.html",
+ )
+
+
+@hookimpl(tryfirst=True)
+def pytest_runtest_setup(item):
+ # Check if skip or skipif are specified as pytest marks
+ item._skipped_by_mark = False
+ eval_skipif = MarkEvaluator(item, "skipif")
+ if eval_skipif.istrue():
+ item._skipped_by_mark = True
+ skip(eval_skipif.getexplanation())
+
+ for skip_info in item.iter_markers(name="skip"):
+ item._skipped_by_mark = True
+ if "reason" in skip_info.kwargs:
+ skip(skip_info.kwargs["reason"])
+ elif skip_info.args:
+ skip(skip_info.args[0])
+ else:
+ skip("unconditional skip")
+
+ item._evalxfail = MarkEvaluator(item, "xfail")
+ check_xfail_no_run(item)
+
+
+@hookimpl(hookwrapper=True)
+def pytest_pyfunc_call(pyfuncitem):
+ check_xfail_no_run(pyfuncitem)
+ outcome = yield
+ passed = outcome.excinfo is None
+ if passed:
+ check_strict_xfail(pyfuncitem)
+
+
+def check_xfail_no_run(item):
+ """check xfail(run=False)"""
+ if not item.config.option.runxfail:
+ evalxfail = item._evalxfail
+ if evalxfail.istrue():
+ if not evalxfail.get("run", True):
+ xfail("[NOTRUN] " + evalxfail.getexplanation())
+
+
+def check_strict_xfail(pyfuncitem):
+ """check xfail(strict=True) for the given PASSING test"""
+ evalxfail = pyfuncitem._evalxfail
+ if evalxfail.istrue():
+ strict_default = pyfuncitem.config.getini("xfail_strict")
+ is_strict_xfail = evalxfail.get("strict", strict_default)
+ if is_strict_xfail:
+ del pyfuncitem._evalxfail
+ explanation = evalxfail.getexplanation()
+ fail("[XPASS(strict)] " + explanation, pytrace=False)
+
+
+@hookimpl(hookwrapper=True)
+def pytest_runtest_makereport(item, call):
+ outcome = yield
+ rep = outcome.get_result()
+ evalxfail = getattr(item, "_evalxfail", None)
+ # unitttest special case, see setting of _unexpectedsuccess
+ if hasattr(item, "_unexpectedsuccess") and rep.when == "call":
+ from _pytest.compat import _is_unittest_unexpected_success_a_failure
+
+ if item._unexpectedsuccess:
+ rep.longrepr = "Unexpected success: {}".format(item._unexpectedsuccess)
+ else:
+ rep.longrepr = "Unexpected success"
+ if _is_unittest_unexpected_success_a_failure():
+ rep.outcome = "failed"
+ else:
+ rep.outcome = "passed"
+ rep.wasxfail = rep.longrepr
+ elif item.config.option.runxfail:
+ pass # don't interefere
+ elif call.excinfo and call.excinfo.errisinstance(xfail.Exception):
+ rep.wasxfail = "reason: " + call.excinfo.value.msg
+ rep.outcome = "skipped"
+ elif evalxfail and not rep.skipped and evalxfail.wasvalid() and evalxfail.istrue():
+ if call.excinfo:
+ if evalxfail.invalidraise(call.excinfo.value):
+ rep.outcome = "failed"
+ else:
+ rep.outcome = "skipped"
+ rep.wasxfail = evalxfail.getexplanation()
+ elif call.when == "call":
+ strict_default = item.config.getini("xfail_strict")
+ is_strict_xfail = evalxfail.get("strict", strict_default)
+ explanation = evalxfail.getexplanation()
+ if is_strict_xfail:
+ rep.outcome = "failed"
+ rep.longrepr = "[XPASS(strict)] {}".format(explanation)
+ else:
+ rep.outcome = "passed"
+ rep.wasxfail = explanation
+ elif getattr(item, "_skipped_by_mark", False) and rep.skipped and type(
+ rep.longrepr
+ ) is tuple:
+ # skipped by mark.skipif; change the location of the failure
+ # to point to the item definition, otherwise it will display
+ # the location of where the skip exception was raised within pytest
+ filename, line, reason = rep.longrepr
+ filename, line = item.location[:2]
+ rep.longrepr = filename, line, reason
+
+
+# called by terminalreporter progress reporting
+
+
+def pytest_report_teststatus(report):
+ if hasattr(report, "wasxfail"):
+ if report.skipped:
+ return "xfailed", "x", "xfail"
+ elif report.passed:
+ return "xpassed", "X", ("XPASS", {"yellow": True})
+
+
+# called by the terminalreporter instance/plugin
+
+
+def pytest_terminal_summary(terminalreporter):
+ tr = terminalreporter
+ if not tr.reportchars:
+ # for name in "xfailed skipped failed xpassed":
+ # if not tr.stats.get(name, 0):
+ # tr.write_line("HINT: use '-r' option to see extra "
+ # "summary info about tests")
+ # break
+ return
+
+ lines = []
+ for char in tr.reportchars:
+ action = REPORTCHAR_ACTIONS.get(char, lambda tr, lines: None)
+ action(terminalreporter, lines)
+
+ if lines:
+ tr._tw.sep("=", "short test summary info")
+ for line in lines:
+ tr._tw.line(line)
+
+
+def show_simple(terminalreporter, lines, stat, format):
+ failed = terminalreporter.stats.get(stat)
+ if failed:
+ for rep in failed:
+ pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
+ lines.append(format % (pos,))
+
+
+def show_xfailed(terminalreporter, lines):
+ xfailed = terminalreporter.stats.get("xfailed")
+ if xfailed:
+ for rep in xfailed:
+ pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
+ reason = rep.wasxfail
+ lines.append("XFAIL %s" % (pos,))
+ if reason:
+ lines.append(" " + str(reason))
+
+
+def show_xpassed(terminalreporter, lines):
+ xpassed = terminalreporter.stats.get("xpassed")
+ if xpassed:
+ for rep in xpassed:
+ pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
+ reason = rep.wasxfail
+ lines.append("XPASS %s %s" % (pos, reason))
+
+
+def folded_skips(skipped):
+ d = {}
+ for event in skipped:
+ key = event.longrepr
+ assert len(key) == 3, (event, key)
+ keywords = getattr(event, "keywords", {})
+ # folding reports with global pytestmark variable
+ # this is workaround, because for now we cannot identify the scope of a skip marker
+ # TODO: revisit after marks scope would be fixed
+ when = getattr(event, "when", None)
+ if when == "setup" and "skip" in keywords and "pytestmark" not in keywords:
+ key = (key[0], None, key[2])
+ d.setdefault(key, []).append(event)
+ values = []
+ for key, events in d.items():
+ values.append((len(events),) + key)
+ return values
+
+
+def show_skipped(terminalreporter, lines):
+ tr = terminalreporter
+ skipped = tr.stats.get("skipped", [])
+ if skipped:
+ # if not tr.hasopt('skipped'):
+ # tr.write_line(
+ # "%d skipped tests, specify -rs for more info" %
+ # len(skipped))
+ # return
+ fskips = folded_skips(skipped)
+ if fskips:
+ # tr.write_sep("_", "skipped test summary")
+ for num, fspath, lineno, reason in fskips:
+ if reason.startswith("Skipped: "):
+ reason = reason[9:]
+ if lineno is not None:
+ lines.append(
+ "SKIP [%d] %s:%d: %s" % (num, fspath, lineno + 1, reason)
+ )
+ else:
+ lines.append("SKIP [%d] %s: %s" % (num, fspath, reason))
+
+
+def shower(stat, format):
+
+ def show_(terminalreporter, lines):
+ return show_simple(terminalreporter, lines, stat, format)
+
+ return show_
+
+
+REPORTCHAR_ACTIONS = {
+ "x": show_xfailed,
+ "X": show_xpassed,
+ "f": shower("failed", "FAIL %s"),
+ "F": shower("failed", "FAIL %s"),
+ "s": show_skipped,
+ "S": show_skipped,
+ "p": shower("passed", "PASSED %s"),
+ "E": shower("error", "ERROR %s"),
+}
diff --git a/third_party/python/pytest/src/_pytest/terminal.py b/third_party/python/pytest/src/_pytest/terminal.py
new file mode 100644
index 0000000000..9c4eec7531
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/terminal.py
@@ -0,0 +1,829 @@
+""" terminal reporting of the full testing process.
+
+This is a good source for looking at the various reporting hooks.
+"""
+from __future__ import absolute_import, division, print_function
+
+import itertools
+import platform
+import sys
+import time
+
+import pluggy
+import py
+import six
+from more_itertools import collapse
+
+import pytest
+from _pytest import nodes
+from _pytest.main import (
+ EXIT_OK,
+ EXIT_TESTSFAILED,
+ EXIT_INTERRUPTED,
+ EXIT_USAGEERROR,
+ EXIT_NOTESTSCOLLECTED,
+)
+
+
+import argparse
+
+
+class MoreQuietAction(argparse.Action):
+ """
+ a modified copy of the argparse count action which counts down and updates
+ the legacy quiet attribute at the same time
+
+ used to unify verbosity handling
+ """
+
+ def __init__(self, option_strings, dest, default=None, required=False, help=None):
+ super(MoreQuietAction, self).__init__(
+ option_strings=option_strings,
+ dest=dest,
+ nargs=0,
+ default=default,
+ required=required,
+ help=help,
+ )
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ new_count = getattr(namespace, self.dest, 0) - 1
+ setattr(namespace, self.dest, new_count)
+ # todo Deprecate config.quiet
+ namespace.quiet = getattr(namespace, "quiet", 0) + 1
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting", "reporting", after="general")
+ group._addoption(
+ "-v",
+ "--verbose",
+ action="count",
+ default=0,
+ dest="verbose",
+ help="increase verbosity.",
+ ),
+ group._addoption(
+ "-q",
+ "--quiet",
+ action=MoreQuietAction,
+ default=0,
+ dest="verbose",
+ help="decrease verbosity.",
+ ),
+ group._addoption(
+ "--verbosity", dest="verbose", type=int, default=0, help="set verbosity"
+ )
+ group._addoption(
+ "-r",
+ action="store",
+ dest="reportchars",
+ default="",
+ metavar="chars",
+ help="show extra test summary info as specified by chars (f)ailed, "
+ "(E)error, (s)skipped, (x)failed, (X)passed, "
+ "(p)passed, (P)passed with output, (a)all except pP. "
+ "Warnings are displayed at all times except when "
+ "--disable-warnings is set",
+ )
+ group._addoption(
+ "--disable-warnings",
+ "--disable-pytest-warnings",
+ default=False,
+ dest="disable_warnings",
+ action="store_true",
+ help="disable warnings summary",
+ )
+ group._addoption(
+ "-l",
+ "--showlocals",
+ action="store_true",
+ dest="showlocals",
+ default=False,
+ help="show locals in tracebacks (disabled by default).",
+ )
+ group._addoption(
+ "--tb",
+ metavar="style",
+ action="store",
+ dest="tbstyle",
+ default="auto",
+ choices=["auto", "long", "short", "no", "line", "native"],
+ help="traceback print mode (auto/long/short/line/native/no).",
+ )
+ group._addoption(
+ "--show-capture",
+ action="store",
+ dest="showcapture",
+ choices=["no", "stdout", "stderr", "log", "all"],
+ default="all",
+ help="Controls how captured stdout/stderr/log is shown on failed tests. "
+ "Default is 'all'.",
+ )
+ group._addoption(
+ "--fulltrace",
+ "--full-trace",
+ action="store_true",
+ default=False,
+ help="don't cut any tracebacks (default is to cut).",
+ )
+ group._addoption(
+ "--color",
+ metavar="color",
+ action="store",
+ dest="color",
+ default="auto",
+ choices=["yes", "no", "auto"],
+ help="color terminal output (yes/no/auto).",
+ )
+
+ parser.addini(
+ "console_output_style",
+ help="console output: classic or with additional progress information (classic|progress).",
+ default="progress",
+ )
+
+
+def pytest_configure(config):
+ reporter = TerminalReporter(config, sys.stdout)
+ config.pluginmanager.register(reporter, "terminalreporter")
+ if config.option.debug or config.option.traceconfig:
+
+ def mywriter(tags, args):
+ msg = " ".join(map(str, args))
+ reporter.write_line("[traceconfig] " + msg)
+
+ config.trace.root.setprocessor("pytest:config", mywriter)
+
+
+def getreportopt(config):
+ reportopts = ""
+ reportchars = config.option.reportchars
+ if not config.option.disable_warnings and "w" not in reportchars:
+ reportchars += "w"
+ elif config.option.disable_warnings and "w" in reportchars:
+ reportchars = reportchars.replace("w", "")
+ if reportchars:
+ for char in reportchars:
+ if char not in reportopts and char != "a":
+ reportopts += char
+ elif char == "a":
+ reportopts = "fEsxXw"
+ return reportopts
+
+
+def pytest_report_teststatus(report):
+ if report.passed:
+ letter = "."
+ elif report.skipped:
+ letter = "s"
+ elif report.failed:
+ letter = "F"
+ if report.when != "call":
+ letter = "f"
+ return report.outcome, letter, report.outcome.upper()
+
+
+class WarningReport(object):
+ """
+ Simple structure to hold warnings information captured by ``pytest_logwarning``.
+ """
+
+ def __init__(self, code, message, nodeid=None, fslocation=None):
+ """
+ :param code: unused
+ :param str message: user friendly message about the warning
+ :param str|None nodeid: node id that generated the warning (see ``get_location``).
+ :param tuple|py.path.local fslocation:
+ file system location of the source of the warning (see ``get_location``).
+ """
+ self.code = code
+ self.message = message
+ self.nodeid = nodeid
+ self.fslocation = fslocation
+
+ def get_location(self, config):
+ """
+ Returns the more user-friendly information about the location
+ of a warning, or None.
+ """
+ if self.nodeid:
+ return self.nodeid
+ if self.fslocation:
+ if isinstance(self.fslocation, tuple) and len(self.fslocation) >= 2:
+ filename, linenum = self.fslocation[:2]
+ relpath = py.path.local(filename).relto(config.invocation_dir)
+ return "%s:%s" % (relpath, linenum)
+ else:
+ return str(self.fslocation)
+ return None
+
+
+class TerminalReporter(object):
+
+ def __init__(self, config, file=None):
+ import _pytest.config
+
+ self.config = config
+ self.verbosity = self.config.option.verbose
+ self.showheader = self.verbosity >= 0
+ self.showfspath = self.verbosity >= 0
+ self.showlongtestinfo = self.verbosity > 0
+ self._numcollected = 0
+ self._session = None
+
+ self.stats = {}
+ self.startdir = py.path.local()
+ if file is None:
+ file = sys.stdout
+ self._tw = _pytest.config.create_terminal_writer(config, file)
+ # self.writer will be deprecated in pytest-3.4
+ self.writer = self._tw
+ self._screen_width = self._tw.fullwidth
+ self.currentfspath = None
+ self.reportchars = getreportopt(config)
+ self.hasmarkup = self._tw.hasmarkup
+ self.isatty = file.isatty()
+ self._progress_nodeids_reported = set()
+ self._show_progress_info = self._determine_show_progress_info()
+
+ def _determine_show_progress_info(self):
+ """Return True if we should display progress information based on the current config"""
+ # do not show progress if we are not capturing output (#3038)
+ if self.config.getoption("capture") == "no":
+ return False
+ # do not show progress if we are showing fixture setup/teardown
+ if self.config.getoption("setupshow"):
+ return False
+ return self.config.getini("console_output_style") == "progress"
+
+ def hasopt(self, char):
+ char = {"xfailed": "x", "skipped": "s"}.get(char, char)
+ return char in self.reportchars
+
+ def write_fspath_result(self, nodeid, res):
+ fspath = self.config.rootdir.join(nodeid.split("::")[0])
+ if fspath != self.currentfspath:
+ if self.currentfspath is not None:
+ self._write_progress_information_filling_space()
+ self.currentfspath = fspath
+ fspath = self.startdir.bestrelpath(fspath)
+ self._tw.line()
+ self._tw.write(fspath + " ")
+ self._tw.write(res)
+
+ def write_ensure_prefix(self, prefix, extra="", **kwargs):
+ if self.currentfspath != prefix:
+ self._tw.line()
+ self.currentfspath = prefix
+ self._tw.write(prefix)
+ if extra:
+ self._tw.write(extra, **kwargs)
+ self.currentfspath = -2
+
+ def ensure_newline(self):
+ if self.currentfspath:
+ self._tw.line()
+ self.currentfspath = None
+
+ def write(self, content, **markup):
+ self._tw.write(content, **markup)
+
+ def write_line(self, line, **markup):
+ if not isinstance(line, six.text_type):
+ line = six.text_type(line, errors="replace")
+ self.ensure_newline()
+ self._tw.line(line, **markup)
+
+ def rewrite(self, line, **markup):
+ """
+ Rewinds the terminal cursor to the beginning and writes the given line.
+
+ :kwarg erase: if True, will also add spaces until the full terminal width to ensure
+ previous lines are properly erased.
+
+ The rest of the keyword arguments are markup instructions.
+ """
+ erase = markup.pop("erase", False)
+ if erase:
+ fill_count = self._tw.fullwidth - len(line) - 1
+ fill = " " * fill_count
+ else:
+ fill = ""
+ line = str(line)
+ self._tw.write("\r" + line + fill, **markup)
+
+ def write_sep(self, sep, title=None, **markup):
+ self.ensure_newline()
+ self._tw.sep(sep, title, **markup)
+
+ def section(self, title, sep="=", **kw):
+ self._tw.sep(sep, title, **kw)
+
+ def line(self, msg, **kw):
+ self._tw.line(msg, **kw)
+
+ def pytest_internalerror(self, excrepr):
+ for line in six.text_type(excrepr).split("\n"):
+ self.write_line("INTERNALERROR> " + line)
+ return 1
+
+ def pytest_logwarning(self, code, fslocation, message, nodeid):
+ warnings = self.stats.setdefault("warnings", [])
+ warning = WarningReport(
+ code=code, fslocation=fslocation, message=message, nodeid=nodeid
+ )
+ warnings.append(warning)
+
+ def pytest_plugin_registered(self, plugin):
+ if self.config.option.traceconfig:
+ msg = "PLUGIN registered: %s" % (plugin,)
+ # XXX this event may happen during setup/teardown time
+ # which unfortunately captures our output here
+ # which garbles our output if we use self.write_line
+ self.write_line(msg)
+
+ def pytest_deselected(self, items):
+ self.stats.setdefault("deselected", []).extend(items)
+
+ def pytest_runtest_logstart(self, nodeid, location):
+ # ensure that the path is printed before the
+ # 1st test of a module starts running
+ if self.showlongtestinfo:
+ line = self._locationline(nodeid, *location)
+ self.write_ensure_prefix(line, "")
+ elif self.showfspath:
+ fsid = nodeid.split("::")[0]
+ self.write_fspath_result(fsid, "")
+
+ def pytest_runtest_logreport(self, report):
+ rep = report
+ res = self.config.hook.pytest_report_teststatus(report=rep)
+ cat, letter, word = res
+ if isinstance(word, tuple):
+ word, markup = word
+ else:
+ markup = None
+ self.stats.setdefault(cat, []).append(rep)
+ self._tests_ran = True
+ if not letter and not word:
+ # probably passed setup/teardown
+ return
+ running_xdist = hasattr(rep, "node")
+ if self.verbosity <= 0:
+ if not running_xdist and self.showfspath:
+ self.write_fspath_result(rep.nodeid, letter)
+ else:
+ self._tw.write(letter)
+ else:
+ self._progress_nodeids_reported.add(rep.nodeid)
+ if markup is None:
+ if rep.passed:
+ markup = {"green": True}
+ elif rep.failed:
+ markup = {"red": True}
+ elif rep.skipped:
+ markup = {"yellow": True}
+ else:
+ markup = {}
+ line = self._locationline(rep.nodeid, *rep.location)
+ if not running_xdist:
+ self.write_ensure_prefix(line, word, **markup)
+ if self._show_progress_info:
+ self._write_progress_information_filling_space()
+ else:
+ self.ensure_newline()
+ self._tw.write("[%s]" % rep.node.gateway.id)
+ if self._show_progress_info:
+ self._tw.write(
+ self._get_progress_information_message() + " ", cyan=True
+ )
+ else:
+ self._tw.write(" ")
+ self._tw.write(word, **markup)
+ self._tw.write(" " + line)
+ self.currentfspath = -2
+
+ def pytest_runtest_logfinish(self, nodeid):
+ if self.verbosity <= 0 and self._show_progress_info:
+ self._progress_nodeids_reported.add(nodeid)
+ last_item = len(
+ self._progress_nodeids_reported
+ ) == self._session.testscollected
+ if last_item:
+ self._write_progress_information_filling_space()
+ else:
+ past_edge = self._tw.chars_on_current_line + self._PROGRESS_LENGTH + 1 >= self._screen_width
+ if past_edge:
+ msg = self._get_progress_information_message()
+ self._tw.write(msg + "\n", cyan=True)
+
+ _PROGRESS_LENGTH = len(" [100%]")
+
+ def _get_progress_information_message(self):
+ if self.config.getoption("capture") == "no":
+ return ""
+ collected = self._session.testscollected
+ if collected:
+ progress = len(self._progress_nodeids_reported) * 100 // collected
+ return " [{:3d}%]".format(progress)
+ return " [100%]"
+
+ def _write_progress_information_filling_space(self):
+ msg = self._get_progress_information_message()
+ fill = " " * (
+ self._tw.fullwidth - self._tw.chars_on_current_line - len(msg) - 1
+ )
+ self.write(fill + msg, cyan=True)
+
+ def pytest_collection(self):
+ if not self.isatty and self.config.option.verbose >= 1:
+ self.write("collecting ... ", bold=True)
+
+ def pytest_collectreport(self, report):
+ if report.failed:
+ self.stats.setdefault("error", []).append(report)
+ elif report.skipped:
+ self.stats.setdefault("skipped", []).append(report)
+ items = [x for x in report.result if isinstance(x, pytest.Item)]
+ self._numcollected += len(items)
+ if self.isatty:
+ # self.write_fspath_result(report.nodeid, 'E')
+ self.report_collect()
+
+ def report_collect(self, final=False):
+ if self.config.option.verbose < 0:
+ return
+
+ errors = len(self.stats.get("error", []))
+ skipped = len(self.stats.get("skipped", []))
+ deselected = len(self.stats.get("deselected", []))
+ if final:
+ line = "collected "
+ else:
+ line = "collecting "
+ line += str(self._numcollected) + " item" + (
+ "" if self._numcollected == 1 else "s"
+ )
+ if errors:
+ line += " / %d errors" % errors
+ if deselected:
+ line += " / %d deselected" % deselected
+ if skipped:
+ line += " / %d skipped" % skipped
+ if self.isatty:
+ self.rewrite(line, bold=True, erase=True)
+ if final:
+ self.write("\n")
+ else:
+ self.write_line(line)
+
+ @pytest.hookimpl(trylast=True)
+ def pytest_collection_modifyitems(self):
+ self.report_collect(True)
+
+ @pytest.hookimpl(trylast=True)
+ def pytest_sessionstart(self, session):
+ self._session = session
+ self._sessionstarttime = time.time()
+ if not self.showheader:
+ return
+ self.write_sep("=", "test session starts", bold=True)
+ verinfo = platform.python_version()
+ msg = "platform %s -- Python %s" % (sys.platform, verinfo)
+ if hasattr(sys, "pypy_version_info"):
+ verinfo = ".".join(map(str, sys.pypy_version_info[:3]))
+ msg += "[pypy-%s-%s]" % (verinfo, sys.pypy_version_info[3])
+ msg += ", pytest-%s, py-%s, pluggy-%s" % (
+ pytest.__version__, py.__version__, pluggy.__version__
+ )
+ if (
+ self.verbosity > 0
+ or self.config.option.debug
+ or getattr(self.config.option, "pastebin", None)
+ ):
+ msg += " -- " + str(sys.executable)
+ self.write_line(msg)
+ lines = self.config.hook.pytest_report_header(
+ config=self.config, startdir=self.startdir
+ )
+ self._write_report_lines_from_hooks(lines)
+
+ def _write_report_lines_from_hooks(self, lines):
+ lines.reverse()
+ for line in collapse(lines):
+ self.write_line(line)
+
+ def pytest_report_header(self, config):
+ inifile = ""
+ if config.inifile:
+ inifile = " " + config.rootdir.bestrelpath(config.inifile)
+ lines = ["rootdir: %s, inifile:%s" % (config.rootdir, inifile)]
+
+ plugininfo = config.pluginmanager.list_plugin_distinfo()
+ if plugininfo:
+
+ lines.append("plugins: %s" % ", ".join(_plugin_nameversions(plugininfo)))
+ return lines
+
+ def pytest_collection_finish(self, session):
+ if self.config.option.collectonly:
+ self._printcollecteditems(session.items)
+ if self.stats.get("failed"):
+ self._tw.sep("!", "collection failures")
+ for rep in self.stats.get("failed"):
+ rep.toterminal(self._tw)
+ return 1
+ return 0
+ lines = self.config.hook.pytest_report_collectionfinish(
+ config=self.config, startdir=self.startdir, items=session.items
+ )
+ self._write_report_lines_from_hooks(lines)
+
+ def _printcollecteditems(self, items):
+ # to print out items and their parent collectors
+ # we take care to leave out Instances aka ()
+ # because later versions are going to get rid of them anyway
+ if self.config.option.verbose < 0:
+ if self.config.option.verbose < -1:
+ counts = {}
+ for item in items:
+ name = item.nodeid.split("::", 1)[0]
+ counts[name] = counts.get(name, 0) + 1
+ for name, count in sorted(counts.items()):
+ self._tw.line("%s: %d" % (name, count))
+ else:
+ for item in items:
+ nodeid = item.nodeid
+ nodeid = nodeid.replace("::()::", "::")
+ self._tw.line(nodeid)
+ return
+ stack = []
+ indent = ""
+ for item in items:
+ needed_collectors = item.listchain()[1:] # strip root node
+ while stack:
+ if stack == needed_collectors[:len(stack)]:
+ break
+ stack.pop()
+ for col in needed_collectors[len(stack):]:
+ stack.append(col)
+ # if col.name == "()":
+ # continue
+ indent = (len(stack) - 1) * " "
+ self._tw.line("%s%s" % (indent, col))
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_sessionfinish(self, exitstatus):
+ outcome = yield
+ outcome.get_result()
+ self._tw.line("")
+ summary_exit_codes = (
+ EXIT_OK,
+ EXIT_TESTSFAILED,
+ EXIT_INTERRUPTED,
+ EXIT_USAGEERROR,
+ EXIT_NOTESTSCOLLECTED,
+ )
+ if exitstatus in summary_exit_codes:
+ self.config.hook.pytest_terminal_summary(
+ terminalreporter=self, exitstatus=exitstatus
+ )
+ if exitstatus == EXIT_INTERRUPTED:
+ self._report_keyboardinterrupt()
+ del self._keyboardinterrupt_memo
+ self.summary_stats()
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_terminal_summary(self):
+ self.summary_errors()
+ self.summary_failures()
+ yield
+ self.summary_warnings()
+ self.summary_passes()
+
+ def pytest_keyboard_interrupt(self, excinfo):
+ self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True)
+
+ def pytest_unconfigure(self):
+ if hasattr(self, "_keyboardinterrupt_memo"):
+ self._report_keyboardinterrupt()
+
+ def _report_keyboardinterrupt(self):
+ excrepr = self._keyboardinterrupt_memo
+ msg = excrepr.reprcrash.message
+ self.write_sep("!", msg)
+ if "KeyboardInterrupt" in msg:
+ if self.config.option.fulltrace:
+ excrepr.toterminal(self._tw)
+ else:
+ excrepr.reprcrash.toterminal(self._tw)
+ self._tw.line(
+ "(to show a full traceback on KeyboardInterrupt use --fulltrace)",
+ yellow=True,
+ )
+
+ def _locationline(self, nodeid, fspath, lineno, domain):
+
+ def mkrel(nodeid):
+ line = self.config.cwd_relative_nodeid(nodeid)
+ if domain and line.endswith(domain):
+ line = line[:-len(domain)]
+ values = domain.split("[")
+ values[0] = values[0].replace(".", "::") # don't replace '.' in params
+ line += "[".join(values)
+ return line
+
+ # collect_fspath comes from testid which has a "/"-normalized path
+
+ if fspath:
+ res = mkrel(nodeid).replace("::()", "") # parens-normalization
+ if nodeid.split("::")[0] != fspath.replace("\\", nodes.SEP):
+ res += " <- " + self.startdir.bestrelpath(fspath)
+ else:
+ res = "[location]"
+ return res + " "
+
+ def _getfailureheadline(self, rep):
+ if hasattr(rep, "location"):
+ fspath, lineno, domain = rep.location
+ return domain
+ else:
+ return "test session" # XXX?
+
+ def _getcrashline(self, rep):
+ try:
+ return str(rep.longrepr.reprcrash)
+ except AttributeError:
+ try:
+ return str(rep.longrepr)[:50]
+ except AttributeError:
+ return ""
+
+ #
+ # summaries for sessionfinish
+ #
+ def getreports(self, name):
+ values = []
+ for x in self.stats.get(name, []):
+ if not hasattr(x, "_pdbshown"):
+ values.append(x)
+ return values
+
+ def summary_warnings(self):
+ if self.hasopt("w"):
+ all_warnings = self.stats.get("warnings")
+ if not all_warnings:
+ return
+
+ grouped = itertools.groupby(
+ all_warnings, key=lambda wr: wr.get_location(self.config)
+ )
+
+ self.write_sep("=", "warnings summary", yellow=True, bold=False)
+ for location, warning_records in grouped:
+ self._tw.line(str(location) if location else "<undetermined location>")
+ for w in warning_records:
+ lines = w.message.splitlines()
+ indented = "\n".join(" " + x for x in lines)
+ self._tw.line(indented)
+ self._tw.line()
+ self._tw.line("-- Docs: http://doc.pytest.org/en/latest/warnings.html")
+
+ def summary_passes(self):
+ if self.config.option.tbstyle != "no":
+ if self.hasopt("P"):
+ reports = self.getreports("passed")
+ if not reports:
+ return
+ self.write_sep("=", "PASSES")
+ for rep in reports:
+ msg = self._getfailureheadline(rep)
+ self.write_sep("_", msg)
+ self._outrep_summary(rep)
+
+ def print_teardown_sections(self, rep):
+ for secname, content in rep.sections:
+ if "teardown" in secname:
+ self._tw.sep("-", secname)
+ if content[-1:] == "\n":
+ content = content[:-1]
+ self._tw.line(content)
+
+ def summary_failures(self):
+ if self.config.option.tbstyle != "no":
+ reports = self.getreports("failed")
+ if not reports:
+ return
+ self.write_sep("=", "FAILURES")
+ for rep in reports:
+ if self.config.option.tbstyle == "line":
+ line = self._getcrashline(rep)
+ self.write_line(line)
+ else:
+ msg = self._getfailureheadline(rep)
+ markup = {"red": True, "bold": True}
+ self.write_sep("_", msg, **markup)
+ self._outrep_summary(rep)
+ for report in self.getreports(""):
+ if report.nodeid == rep.nodeid and report.when == "teardown":
+ self.print_teardown_sections(report)
+
+ def summary_errors(self):
+ if self.config.option.tbstyle != "no":
+ reports = self.getreports("error")
+ if not reports:
+ return
+ self.write_sep("=", "ERRORS")
+ for rep in self.stats["error"]:
+ msg = self._getfailureheadline(rep)
+ if not hasattr(rep, "when"):
+ # collect
+ msg = "ERROR collecting " + msg
+ elif rep.when == "setup":
+ msg = "ERROR at setup of " + msg
+ elif rep.when == "teardown":
+ msg = "ERROR at teardown of " + msg
+ self.write_sep("_", msg)
+ self._outrep_summary(rep)
+
+ def _outrep_summary(self, rep):
+ rep.toterminal(self._tw)
+ showcapture = self.config.option.showcapture
+ if showcapture == "no":
+ return
+ for secname, content in rep.sections:
+ if showcapture != "all" and showcapture not in secname:
+ continue
+ self._tw.sep("-", secname)
+ if content[-1:] == "\n":
+ content = content[:-1]
+ self._tw.line(content)
+
+ def summary_stats(self):
+ session_duration = time.time() - self._sessionstarttime
+ (line, color) = build_summary_stats_line(self.stats)
+ msg = "%s in %.2f seconds" % (line, session_duration)
+ markup = {color: True, "bold": True}
+
+ if self.verbosity >= 0:
+ self.write_sep("=", msg, **markup)
+ if self.verbosity == -1:
+ self.write_line(msg, **markup)
+
+
+def repr_pythonversion(v=None):
+ if v is None:
+ v = sys.version_info
+ try:
+ return "%s.%s.%s-%s-%s" % v
+ except (TypeError, ValueError):
+ return str(v)
+
+
+def build_summary_stats_line(stats):
+ keys = (
+ "failed passed skipped deselected " "xfailed xpassed warnings error"
+ ).split()
+ unknown_key_seen = False
+ for key in stats.keys():
+ if key not in keys:
+ if key: # setup/teardown reports have an empty key, ignore them
+ keys.append(key)
+ unknown_key_seen = True
+ parts = []
+ for key in keys:
+ val = stats.get(key, None)
+ if val:
+ parts.append("%d %s" % (len(val), key))
+
+ if parts:
+ line = ", ".join(parts)
+ else:
+ line = "no tests ran"
+
+ if "failed" in stats or "error" in stats:
+ color = "red"
+ elif "warnings" in stats or unknown_key_seen:
+ color = "yellow"
+ elif "passed" in stats:
+ color = "green"
+ else:
+ color = "yellow"
+
+ return (line, color)
+
+
+def _plugin_nameversions(plugininfo):
+ values = []
+ for plugin, dist in plugininfo:
+ # gets us name and version!
+ name = "{dist.project_name}-{dist.version}".format(dist=dist)
+ # questionable convenience, but it keeps things short
+ if name.startswith("pytest-"):
+ name = name[7:]
+ # we decided to print python package names
+ # they can have more than one plugin
+ if name not in values:
+ values.append(name)
+ return values
diff --git a/third_party/python/pytest/src/_pytest/tmpdir.py b/third_party/python/pytest/src/_pytest/tmpdir.py
new file mode 100644
index 0000000000..260d284223
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/tmpdir.py
@@ -0,0 +1,131 @@
+""" support for providing temporary directories to test functions. """
+from __future__ import absolute_import, division, print_function
+
+import re
+
+import pytest
+import py
+from _pytest.monkeypatch import MonkeyPatch
+
+
+class TempdirFactory(object):
+ """Factory for temporary directories under the common base temp directory.
+
+ The base directory can be configured using the ``--basetemp`` option.
+ """
+
+ def __init__(self, config):
+ self.config = config
+ self.trace = config.trace.get("tmpdir")
+
+ def ensuretemp(self, string, dir=1):
+ """ (deprecated) return temporary directory path with
+ the given string as the trailing part. It is usually
+ better to use the 'tmpdir' function argument which
+ provides an empty unique-per-test-invocation directory
+ and is guaranteed to be empty.
+ """
+ # py.log._apiwarn(">1.1", "use tmpdir function argument")
+ return self.getbasetemp().ensure(string, dir=dir)
+
+ def mktemp(self, basename, numbered=True):
+ """Create a subdirectory of the base temporary directory and return it.
+ If ``numbered``, ensure the directory is unique by adding a number
+ prefix greater than any existing one.
+ """
+ basetemp = self.getbasetemp()
+ if not numbered:
+ p = basetemp.mkdir(basename)
+ else:
+ p = py.path.local.make_numbered_dir(
+ prefix=basename, keep=0, rootdir=basetemp, lock_timeout=None
+ )
+ self.trace("mktemp", p)
+ return p
+
+ def getbasetemp(self):
+ """ return base temporary directory. """
+ try:
+ return self._basetemp
+ except AttributeError:
+ basetemp = self.config.option.basetemp
+ if basetemp:
+ basetemp = py.path.local(basetemp)
+ if basetemp.check():
+ basetemp.remove()
+ basetemp.mkdir()
+ else:
+ temproot = py.path.local.get_temproot()
+ user = get_user()
+ if user:
+ # use a sub-directory in the temproot to speed-up
+ # make_numbered_dir() call
+ rootdir = temproot.join("pytest-of-%s" % user)
+ else:
+ rootdir = temproot
+ rootdir.ensure(dir=1)
+ basetemp = py.path.local.make_numbered_dir(
+ prefix="pytest-", rootdir=rootdir
+ )
+ self._basetemp = t = basetemp.realpath()
+ self.trace("new basetemp", t)
+ return t
+
+ def finish(self):
+ self.trace("finish")
+
+
+def get_user():
+ """Return the current user name, or None if getuser() does not work
+ in the current environment (see #1010).
+ """
+ import getpass
+
+ try:
+ return getpass.getuser()
+ except (ImportError, KeyError):
+ return None
+
+
+# backward compatibility
+TempdirHandler = TempdirFactory
+
+
+def pytest_configure(config):
+ """Create a TempdirFactory and attach it to the config object.
+
+ This is to comply with existing plugins which expect the handler to be
+ available at pytest_configure time, but ideally should be moved entirely
+ to the tmpdir_factory session fixture.
+ """
+ mp = MonkeyPatch()
+ t = TempdirFactory(config)
+ config._cleanup.extend([mp.undo, t.finish])
+ mp.setattr(config, "_tmpdirhandler", t, raising=False)
+ mp.setattr(pytest, "ensuretemp", t.ensuretemp, raising=False)
+
+
+@pytest.fixture(scope="session")
+def tmpdir_factory(request):
+ """Return a TempdirFactory instance for the test session.
+ """
+ return request.config._tmpdirhandler
+
+
+@pytest.fixture
+def tmpdir(request, tmpdir_factory):
+ """Return a temporary directory path object
+ which is unique to each test function invocation,
+ created as a sub directory of the base temporary
+ directory. The returned object is a `py.path.local`_
+ path object.
+
+ .. _`py.path.local`: https://py.readthedocs.io/en/latest/path.html
+ """
+ name = request.node.name
+ name = re.sub(r"[\W]", "_", name)
+ MAXVAL = 30
+ if len(name) > MAXVAL:
+ name = name[:MAXVAL]
+ x = tmpdir_factory.mktemp(name, numbered=True)
+ return x
diff --git a/third_party/python/pytest/src/_pytest/unittest.py b/third_party/python/pytest/src/_pytest/unittest.py
new file mode 100644
index 0000000000..6ad9fda887
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/unittest.py
@@ -0,0 +1,253 @@
+""" discovery and running of std-library "unittest" style tests. """
+from __future__ import absolute_import, division, print_function
+
+import sys
+import traceback
+
+# for transferring markers
+import _pytest._code
+from _pytest.config import hookimpl
+from _pytest.outcomes import fail, skip, xfail
+from _pytest.python import transfer_markers, Class, Module, Function
+
+
+def pytest_pycollect_makeitem(collector, name, obj):
+ # has unittest been imported and is obj a subclass of its TestCase?
+ try:
+ if not issubclass(obj, sys.modules["unittest"].TestCase):
+ return
+ except Exception:
+ return
+ # yes, so let's collect it
+ return UnitTestCase(name, parent=collector)
+
+
+class UnitTestCase(Class):
+ # marker for fixturemanger.getfixtureinfo()
+ # to declare that our children do not support funcargs
+ nofuncargs = True
+
+ def setup(self):
+ cls = self.obj
+ if getattr(cls, "__unittest_skip__", False):
+ return # skipped
+ setup = getattr(cls, "setUpClass", None)
+ if setup is not None:
+ setup()
+ teardown = getattr(cls, "tearDownClass", None)
+ if teardown is not None:
+ self.addfinalizer(teardown)
+ super(UnitTestCase, self).setup()
+
+ def collect(self):
+ from unittest import TestLoader
+
+ cls = self.obj
+ if not getattr(cls, "__test__", True):
+ return
+ self.session._fixturemanager.parsefactories(self, unittest=True)
+ loader = TestLoader()
+ module = self.getparent(Module).obj
+ foundsomething = False
+ for name in loader.getTestCaseNames(self.obj):
+ x = getattr(self.obj, name)
+ if not getattr(x, "__test__", True):
+ continue
+ funcobj = getattr(x, "im_func", x)
+ transfer_markers(funcobj, cls, module)
+ yield TestCaseFunction(name, parent=self, callobj=funcobj)
+ foundsomething = True
+
+ if not foundsomething:
+ runtest = getattr(self.obj, "runTest", None)
+ if runtest is not None:
+ ut = sys.modules.get("twisted.trial.unittest", None)
+ if ut is None or runtest != ut.TestCase.runTest:
+ yield TestCaseFunction("runTest", parent=self)
+
+
+class TestCaseFunction(Function):
+ nofuncargs = True
+ _excinfo = None
+
+ def setup(self):
+ self._testcase = self.parent.obj(self.name)
+ self._fix_unittest_skip_decorator()
+ self._obj = getattr(self._testcase, self.name)
+ if hasattr(self._testcase, "setup_method"):
+ self._testcase.setup_method(self._obj)
+ if hasattr(self, "_request"):
+ self._request._fillfixtures()
+
+ def _fix_unittest_skip_decorator(self):
+ """
+ The @unittest.skip decorator calls functools.wraps(self._testcase)
+ The call to functools.wraps() fails unless self._testcase
+ has a __name__ attribute. This is usually automatically supplied
+ if the test is a function or method, but we need to add manually
+ here.
+
+ See issue #1169
+ """
+ if sys.version_info[0] == 2:
+ setattr(self._testcase, "__name__", self.name)
+
+ def teardown(self):
+ if hasattr(self._testcase, "teardown_method"):
+ self._testcase.teardown_method(self._obj)
+ # Allow garbage collection on TestCase instance attributes.
+ self._testcase = None
+ self._obj = None
+
+ def startTest(self, testcase):
+ pass
+
+ def _addexcinfo(self, rawexcinfo):
+ # unwrap potential exception info (see twisted trial support below)
+ rawexcinfo = getattr(rawexcinfo, "_rawexcinfo", rawexcinfo)
+ try:
+ excinfo = _pytest._code.ExceptionInfo(rawexcinfo)
+ except TypeError:
+ try:
+ try:
+ values = traceback.format_exception(*rawexcinfo)
+ values.insert(
+ 0,
+ "NOTE: Incompatible Exception Representation, "
+ "displaying natively:\n\n",
+ )
+ fail("".join(values), pytrace=False)
+ except (fail.Exception, KeyboardInterrupt):
+ raise
+ except: # noqa
+ fail(
+ "ERROR: Unknown Incompatible Exception "
+ "representation:\n%r" % (rawexcinfo,),
+ pytrace=False,
+ )
+ except KeyboardInterrupt:
+ raise
+ except fail.Exception:
+ excinfo = _pytest._code.ExceptionInfo()
+ self.__dict__.setdefault("_excinfo", []).append(excinfo)
+
+ def addError(self, testcase, rawexcinfo):
+ self._addexcinfo(rawexcinfo)
+
+ def addFailure(self, testcase, rawexcinfo):
+ self._addexcinfo(rawexcinfo)
+
+ def addSkip(self, testcase, reason):
+ try:
+ skip(reason)
+ except skip.Exception:
+ self._skipped_by_mark = True
+ self._addexcinfo(sys.exc_info())
+
+ def addExpectedFailure(self, testcase, rawexcinfo, reason=""):
+ try:
+ xfail(str(reason))
+ except xfail.Exception:
+ self._addexcinfo(sys.exc_info())
+
+ def addUnexpectedSuccess(self, testcase, reason=""):
+ self._unexpectedsuccess = reason
+
+ def addSuccess(self, testcase):
+ pass
+
+ def stopTest(self, testcase):
+ pass
+
+ def _handle_skip(self):
+ # implements the skipping machinery (see #2137)
+ # analog to pythons Lib/unittest/case.py:run
+ testMethod = getattr(self._testcase, self._testcase._testMethodName)
+ if (
+ getattr(self._testcase.__class__, "__unittest_skip__", False)
+ or getattr(testMethod, "__unittest_skip__", False)
+ ):
+ # If the class or method was skipped.
+ skip_why = (
+ getattr(self._testcase.__class__, "__unittest_skip_why__", "")
+ or getattr(testMethod, "__unittest_skip_why__", "")
+ )
+ try: # PY3, unittest2 on PY2
+ self._testcase._addSkip(self, self._testcase, skip_why)
+ except TypeError: # PY2
+ if sys.version_info[0] != 2:
+ raise
+ self._testcase._addSkip(self, skip_why)
+ return True
+ return False
+
+ def runtest(self):
+ if self.config.pluginmanager.get_plugin("pdbinvoke") is None:
+ self._testcase(result=self)
+ else:
+ # disables tearDown and cleanups for post mortem debugging (see #1890)
+ if self._handle_skip():
+ return
+ self._testcase.debug()
+
+ def _prunetraceback(self, excinfo):
+ Function._prunetraceback(self, excinfo)
+ traceback = excinfo.traceback.filter(
+ lambda x: not x.frame.f_globals.get("__unittest")
+ )
+ if traceback:
+ excinfo.traceback = traceback
+
+
+@hookimpl(tryfirst=True)
+def pytest_runtest_makereport(item, call):
+ if isinstance(item, TestCaseFunction):
+ if item._excinfo:
+ call.excinfo = item._excinfo.pop(0)
+ try:
+ del call.result
+ except AttributeError:
+ pass
+
+
+# twisted trial support
+
+
+@hookimpl(hookwrapper=True)
+def pytest_runtest_protocol(item):
+ if isinstance(item, TestCaseFunction) and "twisted.trial.unittest" in sys.modules:
+ ut = sys.modules["twisted.python.failure"]
+ Failure__init__ = ut.Failure.__init__
+ check_testcase_implements_trial_reporter()
+
+ def excstore(
+ self, exc_value=None, exc_type=None, exc_tb=None, captureVars=None
+ ):
+ if exc_value is None:
+ self._rawexcinfo = sys.exc_info()
+ else:
+ if exc_type is None:
+ exc_type = type(exc_value)
+ self._rawexcinfo = (exc_type, exc_value, exc_tb)
+ try:
+ Failure__init__(
+ self, exc_value, exc_type, exc_tb, captureVars=captureVars
+ )
+ except TypeError:
+ Failure__init__(self, exc_value, exc_type, exc_tb)
+
+ ut.Failure.__init__ = excstore
+ yield
+ ut.Failure.__init__ = Failure__init__
+ else:
+ yield
+
+
+def check_testcase_implements_trial_reporter(done=[]):
+ if done:
+ return
+ from zope.interface import classImplements
+ from twisted.trial.itrial import IReporter
+
+ classImplements(TestCaseFunction, IReporter)
+ done.append(1)
diff --git a/third_party/python/pytest/src/_pytest/warnings.py b/third_party/python/pytest/src/_pytest/warnings.py
new file mode 100644
index 0000000000..e023d0ab49
--- /dev/null
+++ b/third_party/python/pytest/src/_pytest/warnings.py
@@ -0,0 +1,112 @@
+from __future__ import absolute_import, division, print_function
+
+import warnings
+from contextlib import contextmanager
+
+import pytest
+
+from _pytest import compat
+
+
+def _setoption(wmod, arg):
+ """
+ Copy of the warning._setoption function but does not escape arguments.
+ """
+ parts = arg.split(":")
+ if len(parts) > 5:
+ raise wmod._OptionError("too many fields (max 5): %r" % (arg,))
+ while len(parts) < 5:
+ parts.append("")
+ action, message, category, module, lineno = [s.strip() for s in parts]
+ action = wmod._getaction(action)
+ category = wmod._getcategory(category)
+ if lineno:
+ try:
+ lineno = int(lineno)
+ if lineno < 0:
+ raise ValueError
+ except (ValueError, OverflowError):
+ raise wmod._OptionError("invalid lineno %r" % (lineno,))
+ else:
+ lineno = 0
+ wmod.filterwarnings(action, message, category, module, lineno)
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("pytest-warnings")
+ group.addoption(
+ "-W",
+ "--pythonwarnings",
+ action="append",
+ help="set which warnings to report, see -W option of python itself.",
+ )
+ parser.addini(
+ "filterwarnings",
+ type="linelist",
+ help="Each line specifies a pattern for "
+ "warnings.filterwarnings. "
+ "Processed after -W and --pythonwarnings.",
+ )
+
+
+@contextmanager
+def catch_warnings_for_item(item):
+ """
+ catches the warnings generated during setup/call/teardown execution
+ of the given item and after it is done posts them as warnings to this
+ item.
+ """
+ args = item.config.getoption("pythonwarnings") or []
+ inifilters = item.config.getini("filterwarnings")
+ with warnings.catch_warnings(record=True) as log:
+ for arg in args:
+ warnings._setoption(arg)
+
+ for arg in inifilters:
+ _setoption(warnings, arg)
+
+ for mark in item.iter_markers(name="filterwarnings"):
+ for arg in mark.args:
+ warnings._setoption(arg)
+
+ yield
+
+ for warning in log:
+ warn_msg = warning.message
+ unicode_warning = False
+
+ if (
+ compat._PY2
+ and any(isinstance(m, compat.UNICODE_TYPES) for m in warn_msg.args)
+ ):
+ new_args = []
+ for m in warn_msg.args:
+ new_args.append(
+ compat.ascii_escaped(m)
+ if isinstance(m, compat.UNICODE_TYPES)
+ else m
+ )
+ unicode_warning = list(warn_msg.args) != new_args
+ warn_msg.args = new_args
+
+ msg = warnings.formatwarning(
+ warn_msg,
+ warning.category,
+ warning.filename,
+ warning.lineno,
+ warning.line,
+ )
+ item.warn("unused", msg)
+
+ if unicode_warning:
+ warnings.warn(
+ "Warning is using unicode non convertible to ascii, "
+ "converting to a safe representation:\n %s" % msg,
+ UnicodeWarning,
+ )
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_runtest_protocol(item):
+ with catch_warnings_for_item(item):
+ yield
diff --git a/third_party/python/pytest/src/pytest.py b/third_party/python/pytest/src/pytest.py
new file mode 100644
index 0000000000..f27f5a1955
--- /dev/null
+++ b/third_party/python/pytest/src/pytest.py
@@ -0,0 +1,72 @@
+# PYTHON_ARGCOMPLETE_OK
+"""
+pytest: unit and functional testing with Python.
+"""
+
+
+# else we are imported
+
+from _pytest.config import main, UsageError, cmdline, hookspec, hookimpl
+from _pytest.fixtures import fixture, yield_fixture
+from _pytest.assertion import register_assert_rewrite
+from _pytest.freeze_support import freeze_includes
+from _pytest import __version__
+from _pytest.debugging import pytestPDB as __pytestPDB
+from _pytest.recwarn import warns, deprecated_call
+from _pytest.outcomes import fail, skip, importorskip, exit, xfail
+from _pytest.mark import MARK_GEN as mark, param
+from _pytest.main import Session
+from _pytest.nodes import Item, Collector, File
+from _pytest.fixtures import fillfixtures as _fillfuncargs
+from _pytest.python import Module, Class, Instance, Function, Generator
+
+from _pytest.python_api import approx, raises
+
+set_trace = __pytestPDB.set_trace
+
+__all__ = [
+ "main",
+ "UsageError",
+ "cmdline",
+ "hookspec",
+ "hookimpl",
+ "__version__",
+ "register_assert_rewrite",
+ "freeze_includes",
+ "set_trace",
+ "warns",
+ "deprecated_call",
+ "fixture",
+ "yield_fixture",
+ "fail",
+ "skip",
+ "xfail",
+ "importorskip",
+ "exit",
+ "mark",
+ "param",
+ "approx",
+ "_fillfuncargs",
+ "Item",
+ "File",
+ "Collector",
+ "Session",
+ "Module",
+ "Class",
+ "Instance",
+ "Function",
+ "Generator",
+ "raises",
+]
+
+if __name__ == "__main__":
+ # if run as a script or by 'python -m pytest'
+ # we trigger the below "else" condition by the following import
+ import pytest
+
+ raise SystemExit(pytest.main())
+else:
+
+ from _pytest.compat import _setup_collect_fakemodule
+
+ _setup_collect_fakemodule()
diff --git a/third_party/python/pytest/tasks/__init__.py b/third_party/python/pytest/tasks/__init__.py
new file mode 100644
index 0000000000..ea5b1293e3
--- /dev/null
+++ b/third_party/python/pytest/tasks/__init__.py
@@ -0,0 +1,10 @@
+"""
+Invoke tasks to help with pytest development and release process.
+"""
+
+import invoke
+
+from . import generate
+
+
+ns = invoke.Collection(generate)
diff --git a/third_party/python/pytest/tasks/generate.py b/third_party/python/pytest/tasks/generate.py
new file mode 100644
index 0000000000..398af70c94
--- /dev/null
+++ b/third_party/python/pytest/tasks/generate.py
@@ -0,0 +1,118 @@
+"""
+Invoke development tasks.
+"""
+from pathlib import Path
+from subprocess import check_output, check_call
+
+import invoke
+
+
+@invoke.task(help={"version": "version being released"})
+def announce(ctx, version):
+ """Generates a new release announcement entry in the docs."""
+ # Get our list of authors
+ stdout = check_output(["git", "describe", "--abbrev=0", "--tags"])
+ stdout = stdout.decode("utf-8")
+ last_version = stdout.strip()
+
+ stdout = check_output(
+ ["git", "log", "{}..HEAD".format(last_version), "--format=%aN"]
+ )
+ stdout = stdout.decode("utf-8")
+
+ contributors = set(stdout.splitlines())
+
+ template_name = "release.minor.rst" if version.endswith(
+ ".0"
+ ) else "release.patch.rst"
+ template_text = Path(__file__).parent.joinpath(template_name).read_text(
+ encoding="UTF-8"
+ )
+
+ contributors_text = "\n".join(
+ "* {}".format(name) for name in sorted(contributors)
+ ) + "\n"
+ text = template_text.format(version=version, contributors=contributors_text)
+
+ target = Path(__file__).parent.joinpath(
+ "../doc/en/announce/release-{}.rst".format(version)
+ )
+ target.write_text(text, encoding="UTF-8")
+ print("[generate.announce] Generated {}".format(target.name))
+
+ # Update index with the new release entry
+ index_path = Path(__file__).parent.joinpath("../doc/en/announce/index.rst")
+ lines = index_path.read_text(encoding="UTF-8").splitlines()
+ indent = " "
+ for index, line in enumerate(lines):
+ if line.startswith("{}release-".format(indent)):
+ new_line = indent + target.stem
+ if line != new_line:
+ lines.insert(index, new_line)
+ index_path.write_text("\n".join(lines) + "\n", encoding="UTF-8")
+ print("[generate.announce] Updated {}".format(index_path.name))
+ else:
+ print(
+ "[generate.announce] Skip {} (already contains release)".format(
+ index_path.name
+ )
+ )
+ break
+
+ check_call(["git", "add", str(target)])
+
+
+@invoke.task()
+def regen(ctx):
+ """Call regendoc tool to update examples and pytest output in the docs."""
+ print("[generate.regen] Updating docs")
+ check_call(["tox", "-e", "regen"])
+
+
+@invoke.task()
+def make_tag(ctx, version):
+ """Create a new, local tag for the release, only if the repository is clean."""
+ from git import Repo
+
+ repo = Repo(".")
+ if repo.is_dirty():
+ print("Current repository is dirty. Please commit any changes and try again.")
+ raise invoke.Exit(code=2)
+
+ tag_names = [x.name for x in repo.tags]
+ if version in tag_names:
+ print("[generate.make_tag] Delete existing tag {}".format(version))
+ repo.delete_tag(version)
+
+ print("[generate.make_tag] Create tag {}".format(version))
+ repo.create_tag(version)
+
+
+@invoke.task(help={"version": "version being released"})
+def pre_release(ctx, version):
+ """Generates new docs, release announcements and creates a local tag."""
+ announce(ctx, version)
+ regen(ctx)
+ changelog(ctx, version, write_out=True)
+
+ msg = "Preparing release version {}".format(version)
+ check_call(["git", "commit", "-a", "-m", msg])
+
+ make_tag(ctx, version)
+
+ print()
+ print("[generate.pre_release] Please push your branch and open a PR.")
+
+
+@invoke.task(
+ help={
+ "version": "version being released",
+ "write_out": "write changes to the actual changelog",
+ }
+)
+def changelog(ctx, version, write_out=False):
+ if write_out:
+ addopts = []
+ else:
+ addopts = ["--draft"]
+ check_call(["towncrier", "--yes", "--version", version] + addopts)
diff --git a/third_party/python/pytest/tasks/release.minor.rst b/third_party/python/pytest/tasks/release.minor.rst
new file mode 100644
index 0000000000..bdd8282cfa
--- /dev/null
+++ b/third_party/python/pytest/tasks/release.minor.rst
@@ -0,0 +1,27 @@
+pytest-{version}
+=======================================
+
+The pytest team is proud to announce the {version} release!
+
+pytest is a mature Python testing tool with more than a 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ http://doc.pytest.org/en/latest/changelog.html
+
+For complete documentation, please visit:
+
+ http://docs.pytest.org
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+{contributors}
+
+Happy testing,
+The Pytest Development Team
diff --git a/third_party/python/pytest/tasks/release.patch.rst b/third_party/python/pytest/tasks/release.patch.rst
new file mode 100644
index 0000000000..1982dc353c
--- /dev/null
+++ b/third_party/python/pytest/tasks/release.patch.rst
@@ -0,0 +1,17 @@
+pytest-{version}
+=======================================
+
+pytest {version} has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/latest/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+{contributors}
+
+Happy testing,
+The pytest Development Team
diff --git a/third_party/python/pytest/tasks/requirements.txt b/third_party/python/pytest/tasks/requirements.txt
new file mode 100644
index 0000000000..db54e76e85
--- /dev/null
+++ b/third_party/python/pytest/tasks/requirements.txt
@@ -0,0 +1,6 @@
+-e .
+gitpython
+invoke
+towncrier
+tox
+wheel
diff --git a/third_party/python/pytest/testing/acceptance_test.py b/third_party/python/pytest/testing/acceptance_test.py
new file mode 100644
index 0000000000..c2eed419c9
--- /dev/null
+++ b/third_party/python/pytest/testing/acceptance_test.py
@@ -0,0 +1,1066 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import, division, print_function
+import os
+import sys
+import types
+
+import six
+
+import _pytest._code
+import py
+import pytest
+from _pytest.main import EXIT_NOTESTSCOLLECTED, EXIT_USAGEERROR
+
+
+class TestGeneralUsage(object):
+
+ def test_config_error(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_configure(config):
+ import pytest
+ raise pytest.UsageError("hello")
+ """
+ )
+ result = testdir.runpytest(testdir.tmpdir)
+ assert result.ret != 0
+ result.stderr.fnmatch_lines(["*ERROR: hello"])
+
+ def test_root_conftest_syntax_error(self, testdir):
+ testdir.makepyfile(conftest="raise SyntaxError\n")
+ result = testdir.runpytest()
+ result.stderr.fnmatch_lines(["*raise SyntaxError*"])
+ assert result.ret != 0
+
+ def test_early_hook_error_issue38_1(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_sessionstart():
+ 0 / 0
+ """
+ )
+ result = testdir.runpytest(testdir.tmpdir)
+ assert result.ret != 0
+ # tracestyle is native by default for hook failures
+ result.stdout.fnmatch_lines(
+ ["*INTERNALERROR*File*conftest.py*line 2*", "*0 / 0*"]
+ )
+ result = testdir.runpytest(testdir.tmpdir, "--fulltrace")
+ assert result.ret != 0
+ # tracestyle is native by default for hook failures
+ result.stdout.fnmatch_lines(
+ ["*INTERNALERROR*def pytest_sessionstart():*", "*INTERNALERROR*0 / 0*"]
+ )
+
+ def test_early_hook_configure_error_issue38(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_configure():
+ 0 / 0
+ """
+ )
+ result = testdir.runpytest(testdir.tmpdir)
+ assert result.ret != 0
+ # here we get it on stderr
+ result.stderr.fnmatch_lines(
+ ["*INTERNALERROR*File*conftest.py*line 2*", "*0 / 0*"]
+ )
+
+ def test_file_not_found(self, testdir):
+ result = testdir.runpytest("asd")
+ assert result.ret != 0
+ result.stderr.fnmatch_lines(["ERROR: file not found*asd"])
+
+ def test_file_not_found_unconfigure_issue143(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_configure():
+ print("---configure")
+ def pytest_unconfigure():
+ print("---unconfigure")
+ """
+ )
+ result = testdir.runpytest("-s", "asd")
+ assert result.ret == 4 # EXIT_USAGEERROR
+ result.stderr.fnmatch_lines(["ERROR: file not found*asd"])
+ result.stdout.fnmatch_lines(["*---configure", "*---unconfigure"])
+
+ def test_config_preparse_plugin_option(self, testdir):
+ testdir.makepyfile(
+ pytest_xyz="""
+ def pytest_addoption(parser):
+ parser.addoption("--xyz", dest="xyz", action="store")
+ """
+ )
+ testdir.makepyfile(
+ test_one="""
+ def test_option(pytestconfig):
+ assert pytestconfig.option.xyz == "123"
+ """
+ )
+ result = testdir.runpytest("-p", "pytest_xyz", "--xyz=123", syspathinsert=True)
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_assertion_magic(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def test_this():
+ x = 0
+ assert x
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(["> assert x", "E assert 0"])
+ assert result.ret == 1
+
+ def test_nested_import_error(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import import_fails
+ def test_this():
+ assert import_fails.a == 1
+ """
+ )
+ testdir.makepyfile(import_fails="import does_not_work")
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ # XXX on jython this fails: "> import import_fails",
+ "ImportError while importing test module*",
+ "*No module named *does_not_work*",
+ ]
+ )
+ assert result.ret == 2
+
+ def test_not_collectable_arguments(self, testdir):
+ p1 = testdir.makepyfile("")
+ p2 = testdir.makefile(".pyc", "123")
+ result = testdir.runpytest(p1, p2)
+ assert result.ret
+ result.stderr.fnmatch_lines(["*ERROR: not found:*%s" % (p2.basename,)])
+
+ def test_issue486_better_reporting_on_conftest_load_failure(self, testdir):
+ testdir.makepyfile("")
+ testdir.makeconftest("import qwerty")
+ result = testdir.runpytest("--help")
+ result.stdout.fnmatch_lines(
+ """
+ *--version*
+ *warning*conftest.py*
+ """
+ )
+ result = testdir.runpytest()
+ result.stderr.fnmatch_lines(
+ """
+ *ERROR*could not load*conftest.py*
+ """
+ )
+
+ def test_early_skip(self, testdir):
+ testdir.mkdir("xyz")
+ testdir.makeconftest(
+ """
+ import pytest
+ def pytest_collect_directory():
+ pytest.skip("early")
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+ result.stdout.fnmatch_lines(["*1 skip*"])
+
+ def test_issue88_initial_file_multinodes(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ class MyFile(pytest.File):
+ def collect(self):
+ return [MyItem("hello", parent=self)]
+ def pytest_collect_file(path, parent):
+ return MyFile(path, parent)
+ class MyItem(pytest.Item):
+ pass
+ """
+ )
+ p = testdir.makepyfile("def test_hello(): pass")
+ result = testdir.runpytest(p, "--collect-only")
+ result.stdout.fnmatch_lines(["*MyFile*test_issue88*", "*Module*test_issue88*"])
+
+ def test_issue93_initialnode_importing_capturing(self, testdir):
+ testdir.makeconftest(
+ """
+ import sys
+ print ("should not be seen")
+ sys.stderr.write("stder42\\n")
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+ assert "should not be seen" not in result.stdout.str()
+ assert "stderr42" not in result.stderr.str()
+
+ def test_conftest_printing_shows_if_error(self, testdir):
+ testdir.makeconftest(
+ """
+ print ("should be seen")
+ assert 0
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret != 0
+ assert "should be seen" in result.stdout.str()
+
+ @pytest.mark.skipif(
+ not hasattr(py.path.local, "mksymlinkto"),
+ reason="symlink not available on this platform",
+ )
+ def test_chdir(self, testdir):
+ testdir.tmpdir.join("py").mksymlinkto(py._pydir)
+ p = testdir.tmpdir.join("main.py")
+ p.write(
+ _pytest._code.Source(
+ """
+ import sys, os
+ sys.path.insert(0, '')
+ import py
+ print (py.__file__)
+ print (py.__path__)
+ os.chdir(os.path.dirname(os.getcwd()))
+ print (py.log)
+ """
+ )
+ )
+ result = testdir.runpython(p)
+ assert not result.ret
+
+ def test_issue109_sibling_conftests_not_loaded(self, testdir):
+ sub1 = testdir.mkdir("sub1")
+ sub2 = testdir.mkdir("sub2")
+ sub1.join("conftest.py").write("assert 0")
+ result = testdir.runpytest(sub2)
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+ sub2.ensure("__init__.py")
+ p = sub2.ensure("test_hello.py")
+ result = testdir.runpytest(p)
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+ result = testdir.runpytest(sub1)
+ assert result.ret == EXIT_USAGEERROR
+
+ def test_directory_skipped(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ def pytest_ignore_collect():
+ pytest.skip("intentional")
+ """
+ )
+ testdir.makepyfile("def test_hello(): pass")
+ result = testdir.runpytest()
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+ result.stdout.fnmatch_lines(["*1 skipped*"])
+
+ def test_multiple_items_per_collector_byid(self, testdir):
+ c = testdir.makeconftest(
+ """
+ import pytest
+ class MyItem(pytest.Item):
+ def runtest(self):
+ pass
+ class MyCollector(pytest.File):
+ def collect(self):
+ return [MyItem(name="xyz", parent=self)]
+ def pytest_collect_file(path, parent):
+ if path.basename.startswith("conftest"):
+ return MyCollector(path, parent)
+ """
+ )
+ result = testdir.runpytest(c.basename + "::" + "xyz")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 pass*"])
+
+ def test_skip_on_generated_funcarg_id(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ def pytest_generate_tests(metafunc):
+ metafunc.addcall({'x': 3}, id='hello-123')
+ def pytest_runtest_setup(item):
+ print (item.keywords)
+ if 'hello-123' in item.keywords:
+ pytest.skip("hello")
+ assert 0
+ """
+ )
+ p = testdir.makepyfile("""def test_func(x): pass""")
+ res = testdir.runpytest(p)
+ assert res.ret == 0
+ res.stdout.fnmatch_lines(["*1 skipped*"])
+
+ def test_direct_addressing_selects(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ metafunc.addcall({'i': 1}, id="1")
+ metafunc.addcall({'i': 2}, id="2")
+ def test_func(i):
+ pass
+ """
+ )
+ res = testdir.runpytest(p.basename + "::" + "test_func[1]")
+ assert res.ret == 0
+ res.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_direct_addressing_notfound(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def test_func():
+ pass
+ """
+ )
+ res = testdir.runpytest(p.basename + "::" + "test_notfound")
+ assert res.ret
+ res.stderr.fnmatch_lines(["*ERROR*not found*"])
+
+ def test_docstring_on_hookspec(self):
+ from _pytest import hookspec
+
+ for name, value in vars(hookspec).items():
+ if name.startswith("pytest_"):
+ assert value.__doc__, "no docstring for %s" % name
+
+ def test_initialization_error_issue49(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_configure():
+ x
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 3 # internal error
+ result.stderr.fnmatch_lines(["INTERNAL*pytest_configure*", "INTERNAL*x*"])
+ assert "sessionstarttime" not in result.stderr.str()
+
+ @pytest.mark.parametrize("lookfor", ["test_fun.py::test_a"])
+ def test_issue134_report_error_when_collecting_member(self, testdir, lookfor):
+ testdir.makepyfile(
+ test_fun="""
+ def test_a():
+ pass
+ def"""
+ )
+ result = testdir.runpytest(lookfor)
+ result.stdout.fnmatch_lines(["*SyntaxError*"])
+ if "::" in lookfor:
+ result.stderr.fnmatch_lines(["*ERROR*"])
+ assert result.ret == 4 # usage error only if item not found
+
+ def test_report_all_failed_collections_initargs(self, testdir):
+ testdir.makepyfile(test_a="def", test_b="def")
+ result = testdir.runpytest("test_a.py::a", "test_b.py::b")
+ result.stderr.fnmatch_lines(["*ERROR*test_a.py::a*", "*ERROR*test_b.py::b*"])
+
+ @pytest.mark.usefixtures("recwarn")
+ def test_namespace_import_doesnt_confuse_import_hook(self, testdir):
+ """
+ Ref #383. Python 3.3's namespace package messed with our import hooks
+ Importing a module that didn't exist, even if the ImportError was
+ gracefully handled, would make our test crash.
+
+ Use recwarn here to silence this warning in Python 2.7:
+ ImportWarning: Not importing directory '...\not_a_package': missing __init__.py
+ """
+ testdir.mkdir("not_a_package")
+ p = testdir.makepyfile(
+ """
+ try:
+ from not_a_package import doesnt_exist
+ except ImportError:
+ # We handle the import error gracefully here
+ pass
+
+ def test_whatever():
+ pass
+ """
+ )
+ res = testdir.runpytest(p.basename)
+ assert res.ret == 0
+
+ def test_unknown_option(self, testdir):
+ result = testdir.runpytest("--qwlkej")
+ result.stderr.fnmatch_lines(
+ """
+ *unrecognized*
+ """
+ )
+
+ def test_getsourcelines_error_issue553(self, testdir, monkeypatch):
+ monkeypatch.setattr("inspect.getsourcelines", None)
+ p = testdir.makepyfile(
+ """
+ def raise_error(obj):
+ raise IOError('source code not available')
+
+ import inspect
+ inspect.getsourcelines = raise_error
+
+ def test_foo(invalid_fixture):
+ pass
+ """
+ )
+ res = testdir.runpytest(p)
+ res.stdout.fnmatch_lines(
+ ["*source code not available*", "E*fixture 'invalid_fixture' not found"]
+ )
+
+ def test_plugins_given_as_strings(self, tmpdir, monkeypatch):
+ """test that str values passed to main() as `plugins` arg
+ are interpreted as module names to be imported and registered.
+ #855.
+ """
+ with pytest.raises(ImportError) as excinfo:
+ pytest.main([str(tmpdir)], plugins=["invalid.module"])
+ assert "invalid" in str(excinfo.value)
+
+ p = tmpdir.join("test_test_plugins_given_as_strings.py")
+ p.write("def test_foo(): pass")
+ mod = types.ModuleType("myplugin")
+ monkeypatch.setitem(sys.modules, "myplugin", mod)
+ assert pytest.main(args=[str(tmpdir)], plugins=["myplugin"]) == 0
+
+ def test_parametrized_with_bytes_regex(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import re
+ import pytest
+ @pytest.mark.parametrize('r', [re.compile(b'foo')])
+ def test_stuff(r):
+ pass
+ """
+ )
+ res = testdir.runpytest(p)
+ res.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_parametrized_with_null_bytes(self, testdir):
+ """Test parametrization with values that contain null bytes and unicode characters (#2644, #2957)"""
+ p = testdir.makepyfile(
+ u"""
+ # encoding: UTF-8
+ import pytest
+
+ @pytest.mark.parametrize("data", [b"\\x00", "\\x00", u'ação'])
+ def test_foo(data):
+ assert data
+ """
+ )
+ res = testdir.runpytest(p)
+ res.assert_outcomes(passed=3)
+
+
+class TestInvocationVariants(object):
+
+ def test_earlyinit(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ assert hasattr(pytest, 'mark')
+ """
+ )
+ result = testdir.runpython(p)
+ assert result.ret == 0
+
+ @pytest.mark.xfail("sys.platform.startswith('java')")
+ def test_pydoc(self, testdir):
+ for name in ("py.test", "pytest"):
+ result = testdir.runpython_c("import %s;help(%s)" % (name, name))
+ assert result.ret == 0
+ s = result.stdout.str()
+ assert "MarkGenerator" in s
+
+ def test_import_star_py_dot_test(self, testdir):
+ p = testdir.makepyfile(
+ """
+ from py.test import *
+ #collect
+ #cmdline
+ #Item
+ # assert collect.Item is Item
+ # assert collect.Collector is Collector
+ main
+ skip
+ xfail
+ """
+ )
+ result = testdir.runpython(p)
+ assert result.ret == 0
+
+ def test_import_star_pytest(self, testdir):
+ p = testdir.makepyfile(
+ """
+ from pytest import *
+ #Item
+ #File
+ main
+ skip
+ xfail
+ """
+ )
+ result = testdir.runpython(p)
+ assert result.ret == 0
+
+ def test_double_pytestcmdline(self, testdir):
+ p = testdir.makepyfile(
+ run="""
+ import pytest
+ pytest.main()
+ pytest.main()
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_hello():
+ pass
+ """
+ )
+ result = testdir.runpython(p)
+ result.stdout.fnmatch_lines(["*1 passed*", "*1 passed*"])
+
+ def test_python_minus_m_invocation_ok(self, testdir):
+ p1 = testdir.makepyfile("def test_hello(): pass")
+ res = testdir.run(sys.executable, "-m", "pytest", str(p1))
+ assert res.ret == 0
+
+ def test_python_minus_m_invocation_fail(self, testdir):
+ p1 = testdir.makepyfile("def test_fail(): 0/0")
+ res = testdir.run(sys.executable, "-m", "pytest", str(p1))
+ assert res.ret == 1
+
+ def test_python_pytest_package(self, testdir):
+ p1 = testdir.makepyfile("def test_pass(): pass")
+ res = testdir.run(sys.executable, "-m", "pytest", str(p1))
+ assert res.ret == 0
+ res.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_equivalence_pytest_pytest(self):
+ assert pytest.main == py.test.cmdline.main
+
+ def test_invoke_with_string(self, capsys):
+ retcode = pytest.main("-h")
+ assert not retcode
+ out, err = capsys.readouterr()
+ assert "--help" in out
+ pytest.raises(ValueError, lambda: pytest.main(0))
+
+ def test_invoke_with_path(self, tmpdir, capsys):
+ retcode = pytest.main(tmpdir)
+ assert retcode == EXIT_NOTESTSCOLLECTED
+ out, err = capsys.readouterr()
+
+ def test_invoke_plugin_api(self, testdir, capsys):
+
+ class MyPlugin(object):
+
+ def pytest_addoption(self, parser):
+ parser.addoption("--myopt")
+
+ pytest.main(["-h"], plugins=[MyPlugin()])
+ out, err = capsys.readouterr()
+ assert "--myopt" in out
+
+ def test_pyargs_importerror(self, testdir, monkeypatch):
+ monkeypatch.delenv("PYTHONDONTWRITEBYTECODE", False)
+ path = testdir.mkpydir("tpkg")
+ path.join("test_hello.py").write("raise ImportError")
+
+ result = testdir.runpytest("--pyargs", "tpkg.test_hello", syspathinsert=True)
+ assert result.ret != 0
+
+ result.stdout.fnmatch_lines(["collected*0*items*/*1*errors"])
+
+ def test_cmdline_python_package(self, testdir, monkeypatch):
+ import warnings
+
+ monkeypatch.delenv("PYTHONDONTWRITEBYTECODE", False)
+ path = testdir.mkpydir("tpkg")
+ path.join("test_hello.py").write("def test_hello(): pass")
+ path.join("test_world.py").write("def test_world(): pass")
+ result = testdir.runpytest("--pyargs", "tpkg")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*2 passed*"])
+ result = testdir.runpytest("--pyargs", "tpkg.test_hello", syspathinsert=True)
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def join_pythonpath(what):
+ cur = os.environ.get("PYTHONPATH")
+ if cur:
+ return str(what) + os.pathsep + cur
+ return what
+
+ empty_package = testdir.mkpydir("empty_package")
+ monkeypatch.setenv("PYTHONPATH", join_pythonpath(empty_package))
+ # the path which is not a package raises a warning on pypy;
+ # no idea why only pypy and not normal python warn about it here
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", ImportWarning)
+ result = testdir.runpytest("--pyargs", ".")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+ monkeypatch.setenv("PYTHONPATH", join_pythonpath(testdir))
+ result = testdir.runpytest("--pyargs", "tpkg.test_missing", syspathinsert=True)
+ assert result.ret != 0
+ result.stderr.fnmatch_lines(["*not*found*test_missing*"])
+
+ def test_cmdline_python_namespace_package(self, testdir, monkeypatch):
+ """
+ test --pyargs option with namespace packages (#1567)
+ """
+ monkeypatch.delenv("PYTHONDONTWRITEBYTECODE", raising=False)
+
+ search_path = []
+ for dirname in "hello", "world":
+ d = testdir.mkdir(dirname)
+ search_path.append(d)
+ ns = d.mkdir("ns_pkg")
+ ns.join("__init__.py").write(
+ "__import__('pkg_resources').declare_namespace(__name__)"
+ )
+ lib = ns.mkdir(dirname)
+ lib.ensure("__init__.py")
+ lib.join("test_{}.py".format(dirname)).write(
+ "def test_{}(): pass\n" "def test_other():pass".format(dirname)
+ )
+
+ # The structure of the test directory is now:
+ # .
+ # ├── hello
+ # │ └── ns_pkg
+ # │ ├── __init__.py
+ # │ └── hello
+ # │ ├── __init__.py
+ # │ └── test_hello.py
+ # └── world
+ # └── ns_pkg
+ # ├── __init__.py
+ # └── world
+ # ├── __init__.py
+ # └── test_world.py
+
+ def join_pythonpath(*dirs):
+ cur = os.environ.get("PYTHONPATH")
+ if cur:
+ dirs += (cur,)
+ return os.pathsep.join(str(p) for p in dirs)
+
+ monkeypatch.setenv("PYTHONPATH", join_pythonpath(*search_path))
+ for p in search_path:
+ monkeypatch.syspath_prepend(p)
+
+ # mixed module and filenames:
+ os.chdir("world")
+ result = testdir.runpytest("--pyargs", "-v", "ns_pkg.hello", "ns_pkg/world")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ [
+ "*test_hello.py::test_hello*PASSED*",
+ "*test_hello.py::test_other*PASSED*",
+ "*test_world.py::test_world*PASSED*",
+ "*test_world.py::test_other*PASSED*",
+ "*4 passed*",
+ ]
+ )
+
+ # specify tests within a module
+ testdir.chdir()
+ result = testdir.runpytest(
+ "--pyargs", "-v", "ns_pkg.world.test_world::test_other"
+ )
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ ["*test_world.py::test_other*PASSED*", "*1 passed*"]
+ )
+
+ @pytest.mark.skipif(not hasattr(os, "symlink"), reason="requires symlinks")
+ def test_cmdline_python_package_symlink(self, testdir, monkeypatch):
+ """
+ test --pyargs option with packages with path containing symlink can
+ have conftest.py in their package (#2985)
+ """
+ # dummy check that we can actually create symlinks: on Windows `os.symlink` is available,
+ # but normal users require special admin privileges to create symlinks.
+ if sys.platform == "win32":
+ try:
+ os.symlink(
+ str(testdir.tmpdir.ensure("tmpfile")),
+ str(testdir.tmpdir.join("tmpfile2")),
+ )
+ except OSError as e:
+ pytest.skip(six.text_type(e.args[0]))
+ monkeypatch.delenv("PYTHONDONTWRITEBYTECODE", raising=False)
+
+ search_path = ["lib", os.path.join("local", "lib")]
+
+ dirname = "lib"
+ d = testdir.mkdir(dirname)
+ foo = d.mkdir("foo")
+ foo.ensure("__init__.py")
+ lib = foo.mkdir("bar")
+ lib.ensure("__init__.py")
+ lib.join("test_bar.py").write(
+ "def test_bar(): pass\n" "def test_other(a_fixture):pass"
+ )
+ lib.join("conftest.py").write(
+ "import pytest\n" "@pytest.fixture\n" "def a_fixture():pass"
+ )
+
+ d_local = testdir.mkdir("local")
+ symlink_location = os.path.join(str(d_local), "lib")
+ if six.PY2:
+ os.symlink(str(d), symlink_location)
+ else:
+ os.symlink(str(d), symlink_location, target_is_directory=True)
+
+ # The structure of the test directory is now:
+ # .
+ # ├── local
+ # │ └── lib -> ../lib
+ # └── lib
+ # └── foo
+ # ├── __init__.py
+ # └── bar
+ # ├── __init__.py
+ # ├── conftest.py
+ # └── test_bar.py
+
+ def join_pythonpath(*dirs):
+ cur = os.getenv("PYTHONPATH")
+ if cur:
+ dirs += (cur,)
+ return os.pathsep.join(str(p) for p in dirs)
+
+ monkeypatch.setenv("PYTHONPATH", join_pythonpath(*search_path))
+ for p in search_path:
+ monkeypatch.syspath_prepend(p)
+
+ # module picked up in symlink-ed directory:
+ result = testdir.runpytest("--pyargs", "-v", "foo.bar")
+ testdir.chdir()
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ [
+ "*lib/foo/bar/test_bar.py::test_bar*PASSED*",
+ "*lib/foo/bar/test_bar.py::test_other*PASSED*",
+ "*2 passed*",
+ ]
+ )
+
+ def test_cmdline_python_package_not_exists(self, testdir):
+ result = testdir.runpytest("--pyargs", "tpkgwhatv")
+ assert result.ret
+ result.stderr.fnmatch_lines(["ERROR*file*or*package*not*found*"])
+
+ @pytest.mark.xfail(reason="decide: feature or bug")
+ def test_noclass_discovery_if_not_testcase(self, testdir):
+ testpath = testdir.makepyfile(
+ """
+ import unittest
+ class TestHello(object):
+ def test_hello(self):
+ assert self.attr
+
+ class RealTest(unittest.TestCase, TestHello):
+ attr = 42
+ """
+ )
+ reprec = testdir.inline_run(testpath)
+ reprec.assertoutcome(passed=1)
+
+ def test_doctest_id(self, testdir):
+ testdir.makefile(
+ ".txt",
+ """
+ >>> x=3
+ >>> x
+ 4
+ """,
+ )
+ result = testdir.runpytest("-rf")
+ lines = result.stdout.str().splitlines()
+ for line in lines:
+ if line.startswith("FAIL "):
+ testid = line[5:].strip()
+ break
+ result = testdir.runpytest(testid, "-rf")
+ result.stdout.fnmatch_lines([line, "*1 failed*"])
+
+ def test_core_backward_compatibility(self):
+ """Test backward compatibility for get_plugin_manager function. See #787."""
+ import _pytest.config
+
+ assert type(
+ _pytest.config.get_plugin_manager()
+ ) is _pytest.config.PytestPluginManager
+
+ def test_has_plugin(self, request):
+ """Test hasplugin function of the plugin manager (#932)."""
+ assert request.config.pluginmanager.hasplugin("python")
+
+
+class TestDurations(object):
+ source = """
+ import time
+ frag = 0.002
+ def test_something():
+ pass
+ def test_2():
+ time.sleep(frag*5)
+ def test_1():
+ time.sleep(frag)
+ def test_3():
+ time.sleep(frag*10)
+ """
+
+ def test_calls(self, testdir):
+ testdir.makepyfile(self.source)
+ result = testdir.runpytest("--durations=10")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines_random(
+ ["*durations*", "*call*test_3*", "*call*test_2*", "*call*test_1*"]
+ )
+
+ def test_calls_show_2(self, testdir):
+ testdir.makepyfile(self.source)
+ result = testdir.runpytest("--durations=2")
+ assert result.ret == 0
+ lines = result.stdout.get_lines_after("*slowest*durations*")
+ assert "4 passed" in lines[2]
+
+ def test_calls_showall(self, testdir):
+ testdir.makepyfile(self.source)
+ result = testdir.runpytest("--durations=0")
+ assert result.ret == 0
+ for x in "123":
+ for y in ("call",): # 'setup', 'call', 'teardown':
+ for line in result.stdout.lines:
+ if ("test_%s" % x) in line and y in line:
+ break
+ else:
+ raise AssertionError("not found %s %s" % (x, y))
+
+ def test_with_deselected(self, testdir):
+ testdir.makepyfile(self.source)
+ result = testdir.runpytest("--durations=2", "-k test_1")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*durations*", "*call*test_1*"])
+
+ def test_with_failing_collection(self, testdir):
+ testdir.makepyfile(self.source)
+ testdir.makepyfile(test_collecterror="""xyz""")
+ result = testdir.runpytest("--durations=2", "-k test_1")
+ assert result.ret == 2
+ result.stdout.fnmatch_lines(["*Interrupted: 1 errors during collection*"])
+ # Collection errors abort test execution, therefore no duration is
+ # output
+ assert "duration" not in result.stdout.str()
+
+ def test_with_not(self, testdir):
+ testdir.makepyfile(self.source)
+ result = testdir.runpytest("-k not 1")
+ assert result.ret == 0
+
+
+class TestDurationWithFixture(object):
+ source = """
+ import time
+ frag = 0.001
+ def setup_function(func):
+ time.sleep(frag * 3)
+ def test_1():
+ time.sleep(frag*2)
+ def test_2():
+ time.sleep(frag)
+ """
+
+ def test_setup_function(self, testdir):
+ testdir.makepyfile(self.source)
+ result = testdir.runpytest("--durations=10")
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines_random(
+ """
+ *durations*
+ * setup *test_1*
+ * call *test_1*
+ """
+ )
+
+
+def test_zipimport_hook(testdir, tmpdir):
+ """Test package loader is being used correctly (see #1837)."""
+ zipapp = pytest.importorskip("zipapp")
+ testdir.tmpdir.join("app").ensure(dir=1)
+ testdir.makepyfile(
+ **{
+ "app/foo.py": """
+ import pytest
+ def main():
+ pytest.main(['--pyarg', 'foo'])
+ """
+ }
+ )
+ target = tmpdir.join("foo.zip")
+ zipapp.create_archive(str(testdir.tmpdir.join("app")), str(target), main="foo:main")
+ result = testdir.runpython(target)
+ assert result.ret == 0
+ result.stderr.fnmatch_lines(["*not found*foo*"])
+ assert "INTERNALERROR>" not in result.stdout.str()
+
+
+def test_import_plugin_unicode_name(testdir):
+ testdir.makepyfile(myplugin="")
+ testdir.makepyfile(
+ """
+ def test(): pass
+ """
+ )
+ testdir.makeconftest(
+ """
+ pytest_plugins = [u'myplugin']
+ """
+ )
+ r = testdir.runpytest()
+ assert r.ret == 0
+
+
+def test_deferred_hook_checking(testdir):
+ """
+ Check hooks as late as possible (#1821).
+ """
+ testdir.syspathinsert()
+ testdir.makepyfile(
+ **{
+ "plugin.py": """
+ class Hooks(object):
+ def pytest_my_hook(self, config):
+ pass
+
+ def pytest_configure(config):
+ config.pluginmanager.add_hookspecs(Hooks)
+ """,
+ "conftest.py": """
+ pytest_plugins = ['plugin']
+ def pytest_my_hook(config):
+ return 40
+ """,
+ "test_foo.py": """
+ def test(request):
+ assert request.config.hook.pytest_my_hook(config=request.config) == [40]
+ """,
+ }
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["* 1 passed *"])
+
+
+def test_fixture_values_leak(testdir):
+ """Ensure that fixture objects are properly destroyed by the garbage collector at the end of their expected
+ life-times (#2981).
+ """
+ testdir.makepyfile(
+ """
+ import attr
+ import gc
+ import pytest
+ import weakref
+
+ @attr.s
+ class SomeObj(object):
+ name = attr.ib()
+
+ fix_of_test1_ref = None
+ session_ref = None
+
+ @pytest.fixture(scope='session')
+ def session_fix():
+ global session_ref
+ obj = SomeObj(name='session-fixture')
+ session_ref = weakref.ref(obj)
+ return obj
+
+ @pytest.fixture
+ def fix(session_fix):
+ global fix_of_test1_ref
+ obj = SomeObj(name='local-fixture')
+ fix_of_test1_ref = weakref.ref(obj)
+ return obj
+
+ def test1(fix):
+ assert fix_of_test1_ref() is fix
+
+ def test2():
+ gc.collect()
+ # fixture "fix" created during test1 must have been destroyed by now
+ assert fix_of_test1_ref() is None
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["* 2 passed *"])
+
+
+def test_fixture_order_respects_scope(testdir):
+ """Ensure that fixtures are created according to scope order, regression test for #2405
+ """
+ testdir.makepyfile(
+ """
+ import pytest
+
+ data = {}
+
+ @pytest.fixture(scope='module')
+ def clean_data():
+ data.clear()
+
+ @pytest.fixture(autouse=True)
+ def add_data():
+ data.update(value=True)
+
+ @pytest.mark.usefixtures('clean_data')
+ def test_value():
+ assert data.get('value')
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 0
+
+
+def test_frame_leak_on_failing_test(testdir):
+ """pytest would leak garbage referencing the frames of tests that failed that could never be reclaimed (#2798)
+
+ Unfortunately it was not possible to remove the actual circles because most of them
+ are made of traceback objects which cannot be weakly referenced. Those objects at least
+ can be eventually claimed by the garbage collector.
+ """
+ testdir.makepyfile(
+ """
+ import gc
+ import weakref
+
+ class Obj:
+ pass
+
+ ref = None
+
+ def test1():
+ obj = Obj()
+ global ref
+ ref = weakref.ref(obj)
+ assert 0
+
+ def test2():
+ gc.collect()
+ assert ref() is None
+ """
+ )
+ result = testdir.runpytest_subprocess()
+ result.stdout.fnmatch_lines(["*1 failed, 1 passed in*"])
diff --git a/third_party/python/pytest/testing/code/test_code.py b/third_party/python/pytest/testing/code/test_code.py
new file mode 100644
index 0000000000..bfae369188
--- /dev/null
+++ b/third_party/python/pytest/testing/code/test_code.py
@@ -0,0 +1,210 @@
+# coding: utf-8
+from __future__ import absolute_import, division, print_function
+import sys
+
+import _pytest._code
+import py
+import pytest
+from test_excinfo import TWMock
+from six import text_type
+
+
+def test_ne():
+ code1 = _pytest._code.Code(compile('foo = "bar"', "", "exec"))
+ assert code1 == code1
+ code2 = _pytest._code.Code(compile('foo = "baz"', "", "exec"))
+ assert code2 != code1
+
+
+def test_code_gives_back_name_for_not_existing_file():
+ name = "abc-123"
+ co_code = compile("pass\n", name, "exec")
+ assert co_code.co_filename == name
+ code = _pytest._code.Code(co_code)
+ assert str(code.path) == name
+ assert code.fullsource is None
+
+
+def test_code_with_class():
+
+ class A(object):
+ pass
+
+ pytest.raises(TypeError, "_pytest._code.Code(A)")
+
+
+if True:
+
+ def x():
+ pass
+
+
+def test_code_fullsource():
+ code = _pytest._code.Code(x)
+ full = code.fullsource
+ assert "test_code_fullsource()" in str(full)
+
+
+def test_code_source():
+ code = _pytest._code.Code(x)
+ src = code.source()
+ expected = """def x():
+ pass"""
+ assert str(src) == expected
+
+
+def test_frame_getsourcelineno_myself():
+
+ def func():
+ return sys._getframe(0)
+
+ f = func()
+ f = _pytest._code.Frame(f)
+ source, lineno = f.code.fullsource, f.lineno
+ assert source[lineno].startswith(" return sys._getframe(0)")
+
+
+def test_getstatement_empty_fullsource():
+
+ def func():
+ return sys._getframe(0)
+
+ f = func()
+ f = _pytest._code.Frame(f)
+ prop = f.code.__class__.fullsource
+ try:
+ f.code.__class__.fullsource = None
+ assert f.statement == _pytest._code.Source("")
+ finally:
+ f.code.__class__.fullsource = prop
+
+
+def test_code_from_func():
+ co = _pytest._code.Code(test_frame_getsourcelineno_myself)
+ assert co.firstlineno
+ assert co.path
+
+
+def test_unicode_handling():
+ value = py.builtin._totext("\xc4\x85\xc4\x87\n", "utf-8").encode("utf8")
+
+ def f():
+ raise Exception(value)
+
+ excinfo = pytest.raises(Exception, f)
+ str(excinfo)
+ if sys.version_info[0] < 3:
+ text_type(excinfo)
+
+
+@pytest.mark.skipif(sys.version_info[0] >= 3, reason="python 2 only issue")
+def test_unicode_handling_syntax_error():
+ value = py.builtin._totext("\xc4\x85\xc4\x87\n", "utf-8").encode("utf8")
+
+ def f():
+ raise SyntaxError("invalid syntax", (None, 1, 3, value))
+
+ excinfo = pytest.raises(Exception, f)
+ str(excinfo)
+ if sys.version_info[0] < 3:
+ text_type(excinfo)
+
+
+def test_code_getargs():
+
+ def f1(x):
+ pass
+
+ c1 = _pytest._code.Code(f1)
+ assert c1.getargs(var=True) == ("x",)
+
+ def f2(x, *y):
+ pass
+
+ c2 = _pytest._code.Code(f2)
+ assert c2.getargs(var=True) == ("x", "y")
+
+ def f3(x, **z):
+ pass
+
+ c3 = _pytest._code.Code(f3)
+ assert c3.getargs(var=True) == ("x", "z")
+
+ def f4(x, *y, **z):
+ pass
+
+ c4 = _pytest._code.Code(f4)
+ assert c4.getargs(var=True) == ("x", "y", "z")
+
+
+def test_frame_getargs():
+
+ def f1(x):
+ return sys._getframe(0)
+
+ fr1 = _pytest._code.Frame(f1("a"))
+ assert fr1.getargs(var=True) == [("x", "a")]
+
+ def f2(x, *y):
+ return sys._getframe(0)
+
+ fr2 = _pytest._code.Frame(f2("a", "b", "c"))
+ assert fr2.getargs(var=True) == [("x", "a"), ("y", ("b", "c"))]
+
+ def f3(x, **z):
+ return sys._getframe(0)
+
+ fr3 = _pytest._code.Frame(f3("a", b="c"))
+ assert fr3.getargs(var=True) == [("x", "a"), ("z", {"b": "c"})]
+
+ def f4(x, *y, **z):
+ return sys._getframe(0)
+
+ fr4 = _pytest._code.Frame(f4("a", "b", c="d"))
+ assert fr4.getargs(var=True) == [("x", "a"), ("y", ("b",)), ("z", {"c": "d"})]
+
+
+class TestExceptionInfo(object):
+
+ def test_bad_getsource(self):
+ try:
+ if False:
+ pass
+ else:
+ assert False
+ except AssertionError:
+ exci = _pytest._code.ExceptionInfo()
+ assert exci.getrepr()
+
+
+class TestTracebackEntry(object):
+
+ def test_getsource(self):
+ try:
+ if False:
+ pass
+ else:
+ assert False
+ except AssertionError:
+ exci = _pytest._code.ExceptionInfo()
+ entry = exci.traceback[0]
+ source = entry.getsource()
+ assert len(source) == 6
+ assert "assert False" in source[5]
+
+
+class TestReprFuncArgs(object):
+
+ def test_not_raise_exception_with_mixed_encoding(self):
+ from _pytest._code.code import ReprFuncArgs
+
+ tw = TWMock()
+
+ args = [("unicode_string", u"São Paulo"), ("utf8_string", "S\xc3\xa3o Paulo")]
+
+ r = ReprFuncArgs(args)
+ r.toterminal(tw)
+ if sys.version_info[0] >= 3:
+ assert tw.lines[0] == "unicode_string = São Paulo, utf8_string = São Paulo"
+ else:
+ assert tw.lines[0] == "unicode_string = São Paulo, utf8_string = São Paulo"
diff --git a/third_party/python/pytest/testing/code/test_excinfo.py b/third_party/python/pytest/testing/code/test_excinfo.py
new file mode 100644
index 0000000000..f4044b6ecc
--- /dev/null
+++ b/third_party/python/pytest/testing/code/test_excinfo.py
@@ -0,0 +1,1357 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import, division, print_function
+
+import operator
+import os
+import sys
+import _pytest
+import py
+import pytest
+from _pytest._code.code import (
+ ExceptionInfo,
+ FormattedExcinfo,
+ ReprExceptionInfo,
+ ExceptionChainRepr,
+)
+from six.moves import queue
+
+from test_source import astonly
+
+try:
+ import importlib
+except ImportError:
+ invalidate_import_caches = None
+else:
+ invalidate_import_caches = getattr(importlib, "invalidate_caches", None)
+
+failsonjython = pytest.mark.xfail("sys.platform.startswith('java')")
+
+pytest_version_info = tuple(map(int, pytest.__version__.split(".")[:3]))
+
+
+class TWMock(object):
+ WRITE = object()
+
+ def __init__(self):
+ self.lines = []
+ self.is_writing = False
+
+ def sep(self, sep, line=None):
+ self.lines.append((sep, line))
+
+ def write(self, msg, **kw):
+ self.lines.append((TWMock.WRITE, msg))
+
+ def line(self, line, **kw):
+ self.lines.append(line)
+
+ def markup(self, text, **kw):
+ return text
+
+ def get_write_msg(self, idx):
+ flag, msg = self.lines[idx]
+ assert flag == TWMock.WRITE
+ return msg
+
+ fullwidth = 80
+
+
+def test_excinfo_simple():
+ try:
+ raise ValueError
+ except ValueError:
+ info = _pytest._code.ExceptionInfo()
+ assert info.type == ValueError
+
+
+def test_excinfo_getstatement():
+
+ def g():
+ raise ValueError
+
+ def f():
+ g()
+
+ try:
+ f()
+ except ValueError:
+ excinfo = _pytest._code.ExceptionInfo()
+ linenumbers = [
+ _pytest._code.getrawcode(f).co_firstlineno - 1 + 4,
+ _pytest._code.getrawcode(f).co_firstlineno - 1 + 1,
+ _pytest._code.getrawcode(g).co_firstlineno - 1 + 1,
+ ]
+ values = list(excinfo.traceback)
+ foundlinenumbers = [x.lineno for x in values]
+ assert foundlinenumbers == linenumbers
+ # for x in info:
+ # print "%s:%d %s" %(x.path.relto(root), x.lineno, x.statement)
+ # xxx
+
+
+# testchain for getentries test below
+
+
+def f():
+ #
+ raise ValueError
+ #
+
+
+def g():
+ #
+ __tracebackhide__ = True
+ f()
+ #
+
+
+def h():
+ #
+ g()
+ #
+
+
+class TestTraceback_f_g_h(object):
+
+ def setup_method(self, method):
+ try:
+ h()
+ except ValueError:
+ self.excinfo = _pytest._code.ExceptionInfo()
+
+ def test_traceback_entries(self):
+ tb = self.excinfo.traceback
+ entries = list(tb)
+ assert len(tb) == 4 # maybe fragile test
+ assert len(entries) == 4 # maybe fragile test
+ names = ["f", "g", "h"]
+ for entry in entries:
+ try:
+ names.remove(entry.frame.code.name)
+ except ValueError:
+ pass
+ assert not names
+
+ def test_traceback_entry_getsource(self):
+ tb = self.excinfo.traceback
+ s = str(tb[-1].getsource())
+ assert s.startswith("def f():")
+ assert s.endswith("raise ValueError")
+
+ @astonly
+ @failsonjython
+ def test_traceback_entry_getsource_in_construct(self):
+ source = _pytest._code.Source(
+ """\
+ def xyz():
+ try:
+ raise ValueError
+ except somenoname:
+ pass
+ xyz()
+ """
+ )
+ try:
+ exec(source.compile())
+ except NameError:
+ tb = _pytest._code.ExceptionInfo().traceback
+ print(tb[-1].getsource())
+ s = str(tb[-1].getsource())
+ assert s.startswith("def xyz():\n try:")
+ assert s.strip().endswith("except somenoname:")
+
+ def test_traceback_cut(self):
+ co = _pytest._code.Code(f)
+ path, firstlineno = co.path, co.firstlineno
+ traceback = self.excinfo.traceback
+ newtraceback = traceback.cut(path=path, firstlineno=firstlineno)
+ assert len(newtraceback) == 1
+ newtraceback = traceback.cut(path=path, lineno=firstlineno + 2)
+ assert len(newtraceback) == 1
+
+ def test_traceback_cut_excludepath(self, testdir):
+ p = testdir.makepyfile("def f(): raise ValueError")
+ excinfo = pytest.raises(ValueError, "p.pyimport().f()")
+ basedir = py.path.local(pytest.__file__).dirpath()
+ newtraceback = excinfo.traceback.cut(excludepath=basedir)
+ for x in newtraceback:
+ if hasattr(x, "path"):
+ assert not py.path.local(x.path).relto(basedir)
+ assert newtraceback[-1].frame.code.path == p
+
+ def test_traceback_filter(self):
+ traceback = self.excinfo.traceback
+ ntraceback = traceback.filter()
+ assert len(ntraceback) == len(traceback) - 1
+
+ @pytest.mark.parametrize(
+ "tracebackhide, matching",
+ [
+ (lambda info: True, True),
+ (lambda info: False, False),
+ (operator.methodcaller("errisinstance", ValueError), True),
+ (operator.methodcaller("errisinstance", IndexError), False),
+ ],
+ )
+ def test_traceback_filter_selective(self, tracebackhide, matching):
+
+ def f():
+ #
+ raise ValueError
+ #
+
+ def g():
+ #
+ __tracebackhide__ = tracebackhide
+ f()
+ #
+
+ def h():
+ #
+ g()
+ #
+
+ excinfo = pytest.raises(ValueError, h)
+ traceback = excinfo.traceback
+ ntraceback = traceback.filter()
+ print("old: {!r}".format(traceback))
+ print("new: {!r}".format(ntraceback))
+
+ if matching:
+ assert len(ntraceback) == len(traceback) - 2
+ else:
+ # -1 because of the __tracebackhide__ in pytest.raises
+ assert len(ntraceback) == len(traceback) - 1
+
+ def test_traceback_recursion_index(self):
+
+ def f(n):
+ if n < 10:
+ n += 1
+ f(n)
+
+ excinfo = pytest.raises(RuntimeError, f, 8)
+ traceback = excinfo.traceback
+ recindex = traceback.recursionindex()
+ assert recindex == 3
+
+ def test_traceback_only_specific_recursion_errors(self, monkeypatch):
+
+ def f(n):
+ if n == 0:
+ raise RuntimeError("hello")
+ f(n - 1)
+
+ excinfo = pytest.raises(RuntimeError, f, 100)
+ monkeypatch.delattr(excinfo.traceback.__class__, "recursionindex")
+ repr = excinfo.getrepr()
+ assert "RuntimeError: hello" in str(repr.reprcrash)
+
+ def test_traceback_no_recursion_index(self):
+
+ def do_stuff():
+ raise RuntimeError
+
+ def reraise_me():
+ import sys
+
+ exc, val, tb = sys.exc_info()
+ py.builtin._reraise(exc, val, tb)
+
+ def f(n):
+ try:
+ do_stuff()
+ except: # noqa
+ reraise_me()
+
+ excinfo = pytest.raises(RuntimeError, f, 8)
+ traceback = excinfo.traceback
+ recindex = traceback.recursionindex()
+ assert recindex is None
+
+ def test_traceback_messy_recursion(self):
+ # XXX: simplified locally testable version
+ decorator = pytest.importorskip("decorator").decorator
+
+ def log(f, *k, **kw):
+ print("%s %s" % (k, kw))
+ f(*k, **kw)
+
+ log = decorator(log)
+
+ def fail():
+ raise ValueError("")
+
+ fail = log(log(fail))
+
+ excinfo = pytest.raises(ValueError, fail)
+ assert excinfo.traceback.recursionindex() is None
+
+ def test_traceback_getcrashentry(self):
+
+ def i():
+ __tracebackhide__ = True
+ raise ValueError
+
+ def h():
+ i()
+
+ def g():
+ __tracebackhide__ = True
+ h()
+
+ def f():
+ g()
+
+ excinfo = pytest.raises(ValueError, f)
+ tb = excinfo.traceback
+ entry = tb.getcrashentry()
+ co = _pytest._code.Code(h)
+ assert entry.frame.code.path == co.path
+ assert entry.lineno == co.firstlineno + 1
+ assert entry.frame.code.name == "h"
+
+ def test_traceback_getcrashentry_empty(self):
+
+ def g():
+ __tracebackhide__ = True
+ raise ValueError
+
+ def f():
+ __tracebackhide__ = True
+ g()
+
+ excinfo = pytest.raises(ValueError, f)
+ tb = excinfo.traceback
+ entry = tb.getcrashentry()
+ co = _pytest._code.Code(g)
+ assert entry.frame.code.path == co.path
+ assert entry.lineno == co.firstlineno + 2
+ assert entry.frame.code.name == "g"
+
+
+def test_excinfo_exconly():
+ excinfo = pytest.raises(ValueError, h)
+ assert excinfo.exconly().startswith("ValueError")
+ excinfo = pytest.raises(ValueError, "raise ValueError('hello\\nworld')")
+ msg = excinfo.exconly(tryshort=True)
+ assert msg.startswith("ValueError")
+ assert msg.endswith("world")
+
+
+def test_excinfo_repr():
+ excinfo = pytest.raises(ValueError, h)
+ s = repr(excinfo)
+ assert s == "<ExceptionInfo ValueError tblen=4>"
+
+
+def test_excinfo_str():
+ excinfo = pytest.raises(ValueError, h)
+ s = str(excinfo)
+ assert s.startswith(__file__[:-9]) # pyc file and $py.class
+ assert s.endswith("ValueError")
+ assert len(s.split(":")) >= 3 # on windows it's 4
+
+
+def test_excinfo_errisinstance():
+ excinfo = pytest.raises(ValueError, h)
+ assert excinfo.errisinstance(ValueError)
+
+
+def test_excinfo_no_sourcecode():
+ try:
+ exec("raise ValueError()")
+ except ValueError:
+ excinfo = _pytest._code.ExceptionInfo()
+ s = str(excinfo.traceback[-1])
+ assert s == " File '<string>':1 in <module>\n ???\n"
+
+
+def test_excinfo_no_python_sourcecode(tmpdir):
+ # XXX: simplified locally testable version
+ tmpdir.join("test.txt").write("{{ h()}}:")
+
+ jinja2 = pytest.importorskip("jinja2")
+ loader = jinja2.FileSystemLoader(str(tmpdir))
+ env = jinja2.Environment(loader=loader)
+ template = env.get_template("test.txt")
+ excinfo = pytest.raises(ValueError, template.render, h=h)
+ for item in excinfo.traceback:
+ print(item) # XXX: for some reason jinja.Template.render is printed in full
+ item.source # shouldnt fail
+ if item.path.basename == "test.txt":
+ assert str(item.source) == "{{ h()}}:"
+
+
+def test_entrysource_Queue_example():
+ try:
+ queue.Queue().get(timeout=0.001)
+ except queue.Empty:
+ excinfo = _pytest._code.ExceptionInfo()
+ entry = excinfo.traceback[-1]
+ source = entry.getsource()
+ assert source is not None
+ s = str(source).strip()
+ assert s.startswith("def get")
+
+
+def test_codepath_Queue_example():
+ try:
+ queue.Queue().get(timeout=0.001)
+ except queue.Empty:
+ excinfo = _pytest._code.ExceptionInfo()
+ entry = excinfo.traceback[-1]
+ path = entry.path
+ assert isinstance(path, py.path.local)
+ assert path.basename.lower() == "queue.py"
+ assert path.check()
+
+
+def test_match_succeeds():
+ with pytest.raises(ZeroDivisionError) as excinfo:
+ 0 // 0
+ excinfo.match(r".*zero.*")
+
+
+def test_match_raises_error(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_division_zero():
+ with pytest.raises(ZeroDivisionError) as excinfo:
+ 0 / 0
+ excinfo.match(r'[123]+')
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(["*AssertionError*Pattern*[123]*not found*"])
+
+
+class TestFormattedExcinfo(object):
+
+ @pytest.fixture
+ def importasmod(self, request):
+
+ def importasmod(source):
+ source = _pytest._code.Source(source)
+ tmpdir = request.getfixturevalue("tmpdir")
+ modpath = tmpdir.join("mod.py")
+ tmpdir.ensure("__init__.py")
+ modpath.write(source)
+ if invalidate_import_caches is not None:
+ invalidate_import_caches()
+ return modpath.pyimport()
+
+ return importasmod
+
+ def excinfo_from_exec(self, source):
+ source = _pytest._code.Source(source).strip()
+ try:
+ exec(source.compile())
+ except KeyboardInterrupt:
+ raise
+ except: # noqa
+ return _pytest._code.ExceptionInfo()
+ assert 0, "did not raise"
+
+ def test_repr_source(self):
+ pr = FormattedExcinfo()
+ source = _pytest._code.Source(
+ """
+ def f(x):
+ pass
+ """
+ ).strip()
+ pr.flow_marker = "|"
+ lines = pr.get_source(source, 0)
+ assert len(lines) == 2
+ assert lines[0] == "| def f(x):"
+ assert lines[1] == " pass"
+
+ def test_repr_source_excinfo(self):
+ """ check if indentation is right """
+ pr = FormattedExcinfo()
+ excinfo = self.excinfo_from_exec(
+ """
+ def f():
+ assert 0
+ f()
+ """
+ )
+ pr = FormattedExcinfo()
+ source = pr._getentrysource(excinfo.traceback[-1])
+ lines = pr.get_source(source, 1, excinfo)
+ assert lines == [" def f():", "> assert 0", "E AssertionError"]
+
+ def test_repr_source_not_existing(self):
+ pr = FormattedExcinfo()
+ co = compile("raise ValueError()", "", "exec")
+ try:
+ exec(co)
+ except ValueError:
+ excinfo = _pytest._code.ExceptionInfo()
+ repr = pr.repr_excinfo(excinfo)
+ assert repr.reprtraceback.reprentries[1].lines[0] == "> ???"
+ if sys.version_info[0] >= 3:
+ assert repr.chain[0][0].reprentries[1].lines[0] == "> ???"
+
+ def test_repr_many_line_source_not_existing(self):
+ pr = FormattedExcinfo()
+ co = compile(
+ """
+a = 1
+raise ValueError()
+""",
+ "",
+ "exec",
+ )
+ try:
+ exec(co)
+ except ValueError:
+ excinfo = _pytest._code.ExceptionInfo()
+ repr = pr.repr_excinfo(excinfo)
+ assert repr.reprtraceback.reprentries[1].lines[0] == "> ???"
+ if sys.version_info[0] >= 3:
+ assert repr.chain[0][0].reprentries[1].lines[0] == "> ???"
+
+ def test_repr_source_failing_fullsource(self):
+ pr = FormattedExcinfo()
+
+ class FakeCode(object):
+
+ class raw(object):
+ co_filename = "?"
+
+ path = "?"
+ firstlineno = 5
+
+ def fullsource(self):
+ return None
+
+ fullsource = property(fullsource)
+
+ class FakeFrame(object):
+ code = FakeCode()
+ f_locals = {}
+ f_globals = {}
+
+ class FakeTracebackEntry(_pytest._code.Traceback.Entry):
+
+ def __init__(self, tb, excinfo=None):
+ self.lineno = 5 + 3
+
+ @property
+ def frame(self):
+ return FakeFrame()
+
+ class Traceback(_pytest._code.Traceback):
+ Entry = FakeTracebackEntry
+
+ class FakeExcinfo(_pytest._code.ExceptionInfo):
+ typename = "Foo"
+ value = Exception()
+
+ def __init__(self):
+ pass
+
+ def exconly(self, tryshort):
+ return "EXC"
+
+ def errisinstance(self, cls):
+ return False
+
+ excinfo = FakeExcinfo()
+
+ class FakeRawTB(object):
+ tb_next = None
+
+ tb = FakeRawTB()
+ excinfo.traceback = Traceback(tb)
+
+ fail = IOError()
+ repr = pr.repr_excinfo(excinfo)
+ assert repr.reprtraceback.reprentries[0].lines[0] == "> ???"
+ if sys.version_info[0] >= 3:
+ assert repr.chain[0][0].reprentries[0].lines[0] == "> ???"
+
+ fail = py.error.ENOENT # noqa
+ repr = pr.repr_excinfo(excinfo)
+ assert repr.reprtraceback.reprentries[0].lines[0] == "> ???"
+ if sys.version_info[0] >= 3:
+ assert repr.chain[0][0].reprentries[0].lines[0] == "> ???"
+
+ def test_repr_local(self):
+ p = FormattedExcinfo(showlocals=True)
+ loc = {"y": 5, "z": 7, "x": 3, "@x": 2, "__builtins__": {}}
+ reprlocals = p.repr_locals(loc)
+ assert reprlocals.lines
+ assert reprlocals.lines[0] == "__builtins__ = <builtins>"
+ assert reprlocals.lines[1] == "x = 3"
+ assert reprlocals.lines[2] == "y = 5"
+ assert reprlocals.lines[3] == "z = 7"
+
+ def test_repr_tracebackentry_lines(self, importasmod):
+ mod = importasmod(
+ """
+ def func1():
+ raise ValueError("hello\\nworld")
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.func1)
+ excinfo.traceback = excinfo.traceback.filter()
+ p = FormattedExcinfo()
+ reprtb = p.repr_traceback_entry(excinfo.traceback[-1])
+
+ # test as intermittent entry
+ lines = reprtb.lines
+ assert lines[0] == " def func1():"
+ assert lines[1] == '> raise ValueError("hello\\nworld")'
+
+ # test as last entry
+ p = FormattedExcinfo(showlocals=True)
+ repr_entry = p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ lines = repr_entry.lines
+ assert lines[0] == " def func1():"
+ assert lines[1] == '> raise ValueError("hello\\nworld")'
+ assert lines[2] == "E ValueError: hello"
+ assert lines[3] == "E world"
+ assert not lines[4:]
+
+ loc = repr_entry.reprlocals is not None
+ loc = repr_entry.reprfileloc
+ assert loc.path == mod.__file__
+ assert loc.lineno == 3
+ # assert loc.message == "ValueError: hello"
+
+ def test_repr_tracebackentry_lines2(self, importasmod):
+ mod = importasmod(
+ """
+ def func1(m, x, y, z):
+ raise ValueError("hello\\nworld")
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.func1, "m" * 90, 5, 13, "z" * 120)
+ excinfo.traceback = excinfo.traceback.filter()
+ entry = excinfo.traceback[-1]
+ p = FormattedExcinfo(funcargs=True)
+ reprfuncargs = p.repr_args(entry)
+ assert reprfuncargs.args[0] == ("m", repr("m" * 90))
+ assert reprfuncargs.args[1] == ("x", "5")
+ assert reprfuncargs.args[2] == ("y", "13")
+ assert reprfuncargs.args[3] == ("z", repr("z" * 120))
+
+ p = FormattedExcinfo(funcargs=True)
+ repr_entry = p.repr_traceback_entry(entry)
+ assert repr_entry.reprfuncargs.args == reprfuncargs.args
+ tw = TWMock()
+ repr_entry.toterminal(tw)
+ assert tw.lines[0] == "m = " + repr("m" * 90)
+ assert tw.lines[1] == "x = 5, y = 13"
+ assert tw.lines[2] == "z = " + repr("z" * 120)
+
+ def test_repr_tracebackentry_lines_var_kw_args(self, importasmod):
+ mod = importasmod(
+ """
+ def func1(x, *y, **z):
+ raise ValueError("hello\\nworld")
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.func1, "a", "b", c="d")
+ excinfo.traceback = excinfo.traceback.filter()
+ entry = excinfo.traceback[-1]
+ p = FormattedExcinfo(funcargs=True)
+ reprfuncargs = p.repr_args(entry)
+ assert reprfuncargs.args[0] == ("x", repr("a"))
+ assert reprfuncargs.args[1] == ("y", repr(("b",)))
+ assert reprfuncargs.args[2] == ("z", repr({"c": "d"}))
+
+ p = FormattedExcinfo(funcargs=True)
+ repr_entry = p.repr_traceback_entry(entry)
+ assert repr_entry.reprfuncargs.args == reprfuncargs.args
+ tw = TWMock()
+ repr_entry.toterminal(tw)
+ assert tw.lines[0] == "x = 'a', y = ('b',), z = {'c': 'd'}"
+
+ def test_repr_tracebackentry_short(self, importasmod):
+ mod = importasmod(
+ """
+ def func1():
+ raise ValueError("hello")
+ def entry():
+ func1()
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+ p = FormattedExcinfo(style="short")
+ reprtb = p.repr_traceback_entry(excinfo.traceback[-2])
+ lines = reprtb.lines
+ basename = py.path.local(mod.__file__).basename
+ assert lines[0] == " func1()"
+ assert basename in str(reprtb.reprfileloc.path)
+ assert reprtb.reprfileloc.lineno == 5
+
+ # test last entry
+ p = FormattedExcinfo(style="short")
+ reprtb = p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ lines = reprtb.lines
+ assert lines[0] == ' raise ValueError("hello")'
+ assert lines[1] == "E ValueError: hello"
+ assert basename in str(reprtb.reprfileloc.path)
+ assert reprtb.reprfileloc.lineno == 3
+
+ def test_repr_tracebackentry_no(self, importasmod):
+ mod = importasmod(
+ """
+ def func1():
+ raise ValueError("hello")
+ def entry():
+ func1()
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+ p = FormattedExcinfo(style="no")
+ p.repr_traceback_entry(excinfo.traceback[-2])
+
+ p = FormattedExcinfo(style="no")
+ reprentry = p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ lines = reprentry.lines
+ assert lines[0] == "E ValueError: hello"
+ assert not lines[1:]
+
+ def test_repr_traceback_tbfilter(self, importasmod):
+ mod = importasmod(
+ """
+ def f(x):
+ raise ValueError(x)
+ def entry():
+ f(0)
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+ p = FormattedExcinfo(tbfilter=True)
+ reprtb = p.repr_traceback(excinfo)
+ assert len(reprtb.reprentries) == 2
+ p = FormattedExcinfo(tbfilter=False)
+ reprtb = p.repr_traceback(excinfo)
+ assert len(reprtb.reprentries) == 3
+
+ def test_traceback_short_no_source(self, importasmod, monkeypatch):
+ mod = importasmod(
+ """
+ def func1():
+ raise ValueError("hello")
+ def entry():
+ func1()
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+ from _pytest._code.code import Code
+
+ monkeypatch.setattr(Code, "path", "bogus")
+ excinfo.traceback[0].frame.code.path = "bogus"
+ p = FormattedExcinfo(style="short")
+ reprtb = p.repr_traceback_entry(excinfo.traceback[-2])
+ lines = reprtb.lines
+ last_p = FormattedExcinfo(style="short")
+ last_reprtb = last_p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ last_lines = last_reprtb.lines
+ monkeypatch.undo()
+ assert lines[0] == " func1()"
+
+ assert last_lines[0] == ' raise ValueError("hello")'
+ assert last_lines[1] == "E ValueError: hello"
+
+ def test_repr_traceback_and_excinfo(self, importasmod):
+ mod = importasmod(
+ """
+ def f(x):
+ raise ValueError(x)
+ def entry():
+ f(0)
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+
+ for style in ("long", "short"):
+ p = FormattedExcinfo(style=style)
+ reprtb = p.repr_traceback(excinfo)
+ assert len(reprtb.reprentries) == 2
+ assert reprtb.style == style
+ assert not reprtb.extraline
+ repr = p.repr_excinfo(excinfo)
+ assert repr.reprtraceback
+ assert len(repr.reprtraceback.reprentries) == len(reprtb.reprentries)
+ if sys.version_info[0] >= 3:
+ assert repr.chain[0][0]
+ assert len(repr.chain[0][0].reprentries) == len(reprtb.reprentries)
+ assert repr.reprcrash.path.endswith("mod.py")
+ assert repr.reprcrash.message == "ValueError: 0"
+
+ def test_repr_traceback_with_invalid_cwd(self, importasmod, monkeypatch):
+ mod = importasmod(
+ """
+ def f(x):
+ raise ValueError(x)
+ def entry():
+ f(0)
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+
+ p = FormattedExcinfo()
+
+ def raiseos():
+ raise OSError(2)
+
+ monkeypatch.setattr(os, "getcwd", raiseos)
+ assert p._makepath(__file__) == __file__
+ p.repr_traceback(excinfo)
+
+ def test_repr_excinfo_addouterr(self, importasmod):
+ mod = importasmod(
+ """
+ def entry():
+ raise ValueError()
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+ repr = excinfo.getrepr()
+ repr.addsection("title", "content")
+ twmock = TWMock()
+ repr.toterminal(twmock)
+ assert twmock.lines[-1] == "content"
+ assert twmock.lines[-2] == ("-", "title")
+
+ def test_repr_excinfo_reprcrash(self, importasmod):
+ mod = importasmod(
+ """
+ def entry():
+ raise ValueError()
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+ repr = excinfo.getrepr()
+ assert repr.reprcrash.path.endswith("mod.py")
+ assert repr.reprcrash.lineno == 3
+ assert repr.reprcrash.message == "ValueError"
+ assert str(repr.reprcrash).endswith("mod.py:3: ValueError")
+
+ def test_repr_traceback_recursion(self, importasmod):
+ mod = importasmod(
+ """
+ def rec2(x):
+ return rec1(x+1)
+ def rec1(x):
+ return rec2(x-1)
+ def entry():
+ rec1(42)
+ """
+ )
+ excinfo = pytest.raises(RuntimeError, mod.entry)
+
+ for style in ("short", "long", "no"):
+ p = FormattedExcinfo(style="short")
+ reprtb = p.repr_traceback(excinfo)
+ assert reprtb.extraline == "!!! Recursion detected (same locals & position)"
+ assert str(reprtb)
+
+ def test_reprexcinfo_getrepr(self, importasmod):
+ mod = importasmod(
+ """
+ def f(x):
+ raise ValueError(x)
+ def entry():
+ f(0)
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+
+ for style in ("short", "long", "no"):
+ for showlocals in (True, False):
+ repr = excinfo.getrepr(style=style, showlocals=showlocals)
+ if sys.version_info[0] < 3:
+ assert isinstance(repr, ReprExceptionInfo)
+ assert repr.reprtraceback.style == style
+ if sys.version_info[0] >= 3:
+ assert isinstance(repr, ExceptionChainRepr)
+ for repr in repr.chain:
+ assert repr[0].style == style
+
+ def test_reprexcinfo_unicode(self):
+ from _pytest._code.code import TerminalRepr
+
+ class MyRepr(TerminalRepr):
+
+ def toterminal(self, tw):
+ tw.line(py.builtin._totext("я", "utf-8"))
+
+ x = py.builtin._totext(MyRepr())
+ assert x == py.builtin._totext("я", "utf-8")
+
+ def test_toterminal_long(self, importasmod):
+ mod = importasmod(
+ """
+ def g(x):
+ raise ValueError(x)
+ def f():
+ g(3)
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.f)
+ excinfo.traceback = excinfo.traceback.filter()
+ repr = excinfo.getrepr()
+ tw = TWMock()
+ repr.toterminal(tw)
+ assert tw.lines[0] == ""
+ tw.lines.pop(0)
+ assert tw.lines[0] == " def f():"
+ assert tw.lines[1] == "> g(3)"
+ assert tw.lines[2] == ""
+ line = tw.get_write_msg(3)
+ assert line.endswith("mod.py")
+ assert tw.lines[4] == (":5: ")
+ assert tw.lines[5] == ("_ ", None)
+ assert tw.lines[6] == ""
+ assert tw.lines[7] == " def g(x):"
+ assert tw.lines[8] == "> raise ValueError(x)"
+ assert tw.lines[9] == "E ValueError: 3"
+ assert tw.lines[10] == ""
+ line = tw.get_write_msg(11)
+ assert line.endswith("mod.py")
+ assert tw.lines[12] == ":3: ValueError"
+
+ def test_toterminal_long_missing_source(self, importasmod, tmpdir):
+ mod = importasmod(
+ """
+ def g(x):
+ raise ValueError(x)
+ def f():
+ g(3)
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.f)
+ tmpdir.join("mod.py").remove()
+ excinfo.traceback = excinfo.traceback.filter()
+ repr = excinfo.getrepr()
+ tw = TWMock()
+ repr.toterminal(tw)
+ assert tw.lines[0] == ""
+ tw.lines.pop(0)
+ assert tw.lines[0] == "> ???"
+ assert tw.lines[1] == ""
+ line = tw.get_write_msg(2)
+ assert line.endswith("mod.py")
+ assert tw.lines[3] == ":5: "
+ assert tw.lines[4] == ("_ ", None)
+ assert tw.lines[5] == ""
+ assert tw.lines[6] == "> ???"
+ assert tw.lines[7] == "E ValueError: 3"
+ assert tw.lines[8] == ""
+ line = tw.get_write_msg(9)
+ assert line.endswith("mod.py")
+ assert tw.lines[10] == ":3: ValueError"
+
+ def test_toterminal_long_incomplete_source(self, importasmod, tmpdir):
+ mod = importasmod(
+ """
+ def g(x):
+ raise ValueError(x)
+ def f():
+ g(3)
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.f)
+ tmpdir.join("mod.py").write("asdf")
+ excinfo.traceback = excinfo.traceback.filter()
+ repr = excinfo.getrepr()
+ tw = TWMock()
+ repr.toterminal(tw)
+ assert tw.lines[0] == ""
+ tw.lines.pop(0)
+ assert tw.lines[0] == "> ???"
+ assert tw.lines[1] == ""
+ line = tw.get_write_msg(2)
+ assert line.endswith("mod.py")
+ assert tw.lines[3] == ":5: "
+ assert tw.lines[4] == ("_ ", None)
+ assert tw.lines[5] == ""
+ assert tw.lines[6] == "> ???"
+ assert tw.lines[7] == "E ValueError: 3"
+ assert tw.lines[8] == ""
+ line = tw.get_write_msg(9)
+ assert line.endswith("mod.py")
+ assert tw.lines[10] == ":3: ValueError"
+
+ def test_toterminal_long_filenames(self, importasmod):
+ mod = importasmod(
+ """
+ def f():
+ raise ValueError()
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.f)
+ tw = TWMock()
+ path = py.path.local(mod.__file__)
+ old = path.dirpath().chdir()
+ try:
+ repr = excinfo.getrepr(abspath=False)
+ repr.toterminal(tw)
+ x = py.path.local().bestrelpath(path)
+ if len(x) < len(str(path)):
+ msg = tw.get_write_msg(-2)
+ assert msg == "mod.py"
+ assert tw.lines[-1] == ":3: ValueError"
+
+ repr = excinfo.getrepr(abspath=True)
+ repr.toterminal(tw)
+ msg = tw.get_write_msg(-2)
+ assert msg == path
+ line = tw.lines[-1]
+ assert line == ":3: ValueError"
+ finally:
+ old.chdir()
+
+ @pytest.mark.parametrize(
+ "reproptions",
+ [
+ {
+ "style": style,
+ "showlocals": showlocals,
+ "funcargs": funcargs,
+ "tbfilter": tbfilter,
+ }
+ for style in ("long", "short", "no")
+ for showlocals in (True, False)
+ for tbfilter in (True, False)
+ for funcargs in (True, False)
+ ],
+ )
+ def test_format_excinfo(self, importasmod, reproptions):
+ mod = importasmod(
+ """
+ def g(x):
+ raise ValueError(x)
+ def f():
+ g(3)
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.f)
+ tw = py.io.TerminalWriter(stringio=True)
+ repr = excinfo.getrepr(**reproptions)
+ repr.toterminal(tw)
+ assert tw.stringio.getvalue()
+
+ def test_traceback_repr_style(self, importasmod):
+ mod = importasmod(
+ """
+ def f():
+ g()
+ def g():
+ h()
+ def h():
+ i()
+ def i():
+ raise ValueError()
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.f)
+ excinfo.traceback = excinfo.traceback.filter()
+ excinfo.traceback[1].set_repr_style("short")
+ excinfo.traceback[2].set_repr_style("short")
+ r = excinfo.getrepr(style="long")
+ tw = TWMock()
+ r.toterminal(tw)
+ for line in tw.lines:
+ print(line)
+ assert tw.lines[0] == ""
+ assert tw.lines[1] == " def f():"
+ assert tw.lines[2] == "> g()"
+ assert tw.lines[3] == ""
+ msg = tw.get_write_msg(4)
+ assert msg.endswith("mod.py")
+ assert tw.lines[5] == ":3: "
+ assert tw.lines[6] == ("_ ", None)
+ tw.get_write_msg(7)
+ assert tw.lines[8].endswith("in g")
+ assert tw.lines[9] == " h()"
+ tw.get_write_msg(10)
+ assert tw.lines[11].endswith("in h")
+ assert tw.lines[12] == " i()"
+ assert tw.lines[13] == ("_ ", None)
+ assert tw.lines[14] == ""
+ assert tw.lines[15] == " def i():"
+ assert tw.lines[16] == "> raise ValueError()"
+ assert tw.lines[17] == "E ValueError"
+ assert tw.lines[18] == ""
+ msg = tw.get_write_msg(19)
+ msg.endswith("mod.py")
+ assert tw.lines[20] == ":9: ValueError"
+
+ @pytest.mark.skipif("sys.version_info[0] < 3")
+ def test_exc_chain_repr(self, importasmod):
+ mod = importasmod(
+ """
+ class Err(Exception):
+ pass
+ def f():
+ try:
+ g()
+ except Exception as e:
+ raise Err() from e
+ finally:
+ h()
+ def g():
+ raise ValueError()
+
+ def h():
+ raise AttributeError()
+ """
+ )
+ excinfo = pytest.raises(AttributeError, mod.f)
+ r = excinfo.getrepr(style="long")
+ tw = TWMock()
+ r.toterminal(tw)
+ for line in tw.lines:
+ print(line)
+ assert tw.lines[0] == ""
+ assert tw.lines[1] == " def f():"
+ assert tw.lines[2] == " try:"
+ assert tw.lines[3] == "> g()"
+ assert tw.lines[4] == ""
+ line = tw.get_write_msg(5)
+ assert line.endswith("mod.py")
+ assert tw.lines[6] == ":6: "
+ assert tw.lines[7] == ("_ ", None)
+ assert tw.lines[8] == ""
+ assert tw.lines[9] == " def g():"
+ assert tw.lines[10] == "> raise ValueError()"
+ assert tw.lines[11] == "E ValueError"
+ assert tw.lines[12] == ""
+ line = tw.get_write_msg(13)
+ assert line.endswith("mod.py")
+ assert tw.lines[14] == ":12: ValueError"
+ assert tw.lines[15] == ""
+ assert (
+ tw.lines[16]
+ == "The above exception was the direct cause of the following exception:"
+ )
+ assert tw.lines[17] == ""
+ assert tw.lines[18] == " def f():"
+ assert tw.lines[19] == " try:"
+ assert tw.lines[20] == " g()"
+ assert tw.lines[21] == " except Exception as e:"
+ assert tw.lines[22] == "> raise Err() from e"
+ assert tw.lines[23] == "E test_exc_chain_repr0.mod.Err"
+ assert tw.lines[24] == ""
+ line = tw.get_write_msg(25)
+ assert line.endswith("mod.py")
+ assert tw.lines[26] == ":8: Err"
+ assert tw.lines[27] == ""
+ assert (
+ tw.lines[28]
+ == "During handling of the above exception, another exception occurred:"
+ )
+ assert tw.lines[29] == ""
+ assert tw.lines[30] == " def f():"
+ assert tw.lines[31] == " try:"
+ assert tw.lines[32] == " g()"
+ assert tw.lines[33] == " except Exception as e:"
+ assert tw.lines[34] == " raise Err() from e"
+ assert tw.lines[35] == " finally:"
+ assert tw.lines[36] == "> h()"
+ assert tw.lines[37] == ""
+ line = tw.get_write_msg(38)
+ assert line.endswith("mod.py")
+ assert tw.lines[39] == ":10: "
+ assert tw.lines[40] == ("_ ", None)
+ assert tw.lines[41] == ""
+ assert tw.lines[42] == " def h():"
+ assert tw.lines[43] == "> raise AttributeError()"
+ assert tw.lines[44] == "E AttributeError"
+ assert tw.lines[45] == ""
+ line = tw.get_write_msg(46)
+ assert line.endswith("mod.py")
+ assert tw.lines[47] == ":15: AttributeError"
+
+ @pytest.mark.skipif("sys.version_info[0] < 3")
+ def test_exc_repr_with_raise_from_none_chain_suppression(self, importasmod):
+ mod = importasmod(
+ """
+ def f():
+ try:
+ g()
+ except Exception:
+ raise AttributeError() from None
+ def g():
+ raise ValueError()
+ """
+ )
+ excinfo = pytest.raises(AttributeError, mod.f)
+ r = excinfo.getrepr(style="long")
+ tw = TWMock()
+ r.toterminal(tw)
+ for line in tw.lines:
+ print(line)
+ assert tw.lines[0] == ""
+ assert tw.lines[1] == " def f():"
+ assert tw.lines[2] == " try:"
+ assert tw.lines[3] == " g()"
+ assert tw.lines[4] == " except Exception:"
+ assert tw.lines[5] == "> raise AttributeError() from None"
+ assert tw.lines[6] == "E AttributeError"
+ assert tw.lines[7] == ""
+ line = tw.get_write_msg(8)
+ assert line.endswith("mod.py")
+ assert tw.lines[9] == ":6: AttributeError"
+ assert len(tw.lines) == 10
+
+ @pytest.mark.skipif("sys.version_info[0] < 3")
+ @pytest.mark.parametrize(
+ "reason, description",
+ [
+ (
+ "cause",
+ "The above exception was the direct cause of the following exception:",
+ ),
+ (
+ "context",
+ "During handling of the above exception, another exception occurred:",
+ ),
+ ],
+ )
+ def test_exc_chain_repr_without_traceback(self, importasmod, reason, description):
+ """
+ Handle representation of exception chains where one of the exceptions doesn't have a
+ real traceback, such as those raised in a subprocess submitted by the multiprocessing
+ module (#1984).
+ """
+ from _pytest.pytester import LineMatcher
+
+ exc_handling_code = " from e" if reason == "cause" else ""
+ mod = importasmod(
+ """
+ def f():
+ try:
+ g()
+ except Exception as e:
+ raise RuntimeError('runtime problem'){exc_handling_code}
+ def g():
+ raise ValueError('invalid value')
+ """.format(
+ exc_handling_code=exc_handling_code
+ )
+ )
+
+ with pytest.raises(RuntimeError) as excinfo:
+ mod.f()
+
+ # emulate the issue described in #1984
+ attr = "__%s__" % reason
+ getattr(excinfo.value, attr).__traceback__ = None
+
+ r = excinfo.getrepr()
+ tw = py.io.TerminalWriter(stringio=True)
+ tw.hasmarkup = False
+ r.toterminal(tw)
+
+ matcher = LineMatcher(tw.stringio.getvalue().splitlines())
+ matcher.fnmatch_lines(
+ [
+ "ValueError: invalid value",
+ description,
+ "* except Exception as e:",
+ "> * raise RuntimeError('runtime problem')" + exc_handling_code,
+ "E *RuntimeError: runtime problem",
+ ]
+ )
+
+
+@pytest.mark.parametrize("style", ["short", "long"])
+@pytest.mark.parametrize("encoding", [None, "utf8", "utf16"])
+def test_repr_traceback_with_unicode(style, encoding):
+ msg = u"☹"
+ if encoding is not None:
+ msg = msg.encode(encoding)
+ try:
+ raise RuntimeError(msg)
+ except RuntimeError:
+ e_info = ExceptionInfo()
+ formatter = FormattedExcinfo(style=style)
+ repr_traceback = formatter.repr_traceback(e_info)
+ assert repr_traceback is not None
+
+
+def test_cwd_deleted(testdir):
+ testdir.makepyfile(
+ """
+ def test(tmpdir):
+ tmpdir.chdir()
+ tmpdir.remove()
+ assert False
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["* 1 failed in *"])
+ assert "INTERNALERROR" not in result.stdout.str() + result.stderr.str()
+
+
+def test_exception_repr_extraction_error_on_recursion():
+ """
+ Ensure we can properly detect a recursion error even
+ if some locals raise error on comparison (#2459).
+ """
+
+ class numpy_like(object):
+
+ def __eq__(self, other):
+ if type(other) is numpy_like:
+ raise ValueError(
+ "The truth value of an array "
+ "with more than one element is ambiguous."
+ )
+
+ def a(x):
+ return b(numpy_like())
+
+ def b(x):
+ return a(numpy_like())
+
+ try:
+ a(numpy_like())
+ except: # noqa
+ from _pytest._code.code import ExceptionInfo
+ from _pytest.pytester import LineMatcher
+
+ exc_info = ExceptionInfo()
+
+ matcher = LineMatcher(str(exc_info.getrepr()).splitlines())
+ matcher.fnmatch_lines(
+ [
+ "!!! Recursion error detected, but an error occurred locating the origin of recursion.",
+ "*The following exception happened*",
+ "*ValueError: The truth value of an array*",
+ ]
+ )
+
+
+def test_no_recursion_index_on_recursion_error():
+ """
+ Ensure that we don't break in case we can't find the recursion index
+ during a recursion error (#2486).
+ """
+ try:
+
+ class RecursionDepthError(object):
+
+ def __getattr__(self, attr):
+ return getattr(self, "_" + attr)
+
+ RecursionDepthError().trigger
+ except: # noqa
+ from _pytest._code.code import ExceptionInfo
+
+ exc_info = ExceptionInfo()
+ assert "maximum recursion" in str(exc_info.getrepr())
+ else:
+ assert 0
diff --git a/third_party/python/pytest/testing/code/test_source.py b/third_party/python/pytest/testing/code/test_source.py
new file mode 100644
index 0000000000..56dad75671
--- /dev/null
+++ b/third_party/python/pytest/testing/code/test_source.py
@@ -0,0 +1,758 @@
+# flake8: noqa
+# disable flake check on this file because some constructs are strange
+# or redundant on purpose and can't be disable on a line-by-line basis
+from __future__ import absolute_import, division, print_function
+import inspect
+import sys
+
+import _pytest._code
+import py
+import pytest
+from _pytest._code import Source
+from _pytest._code.source import ast
+
+
+astonly = pytest.mark.nothing
+failsonjython = pytest.mark.xfail("sys.platform.startswith('java')")
+
+
+def test_source_str_function():
+ x = Source("3")
+ assert str(x) == "3"
+
+ x = Source(" 3")
+ assert str(x) == "3"
+
+ x = Source(
+ """
+ 3
+ """,
+ rstrip=False,
+ )
+ assert str(x) == "\n3\n "
+
+ x = Source(
+ """
+ 3
+ """,
+ rstrip=True,
+ )
+ assert str(x) == "\n3"
+
+
+def test_unicode():
+ try:
+ unicode
+ except NameError:
+ return
+ x = Source(unicode("4"))
+ assert str(x) == "4"
+ co = _pytest._code.compile(unicode('u"\xc3\xa5"', "utf8"), mode="eval")
+ val = eval(co)
+ assert isinstance(val, unicode)
+
+
+def test_source_from_function():
+ source = _pytest._code.Source(test_source_str_function)
+ assert str(source).startswith("def test_source_str_function():")
+
+
+def test_source_from_method():
+
+ class TestClass(object):
+
+ def test_method(self):
+ pass
+
+ source = _pytest._code.Source(TestClass().test_method)
+ assert source.lines == ["def test_method(self):", " pass"]
+
+
+def test_source_from_lines():
+ lines = ["a \n", "b\n", "c"]
+ source = _pytest._code.Source(lines)
+ assert source.lines == ["a ", "b", "c"]
+
+
+def test_source_from_inner_function():
+
+ def f():
+ pass
+
+ source = _pytest._code.Source(f, deindent=False)
+ assert str(source).startswith(" def f():")
+ source = _pytest._code.Source(f)
+ assert str(source).startswith("def f():")
+
+
+def test_source_putaround_simple():
+ source = Source("raise ValueError")
+ source = source.putaround(
+ "try:",
+ """\
+ except ValueError:
+ x = 42
+ else:
+ x = 23""",
+ )
+ assert (
+ str(source)
+ == """\
+try:
+ raise ValueError
+except ValueError:
+ x = 42
+else:
+ x = 23"""
+ )
+
+
+def test_source_putaround():
+ source = Source()
+ source = source.putaround(
+ """
+ if 1:
+ x=1
+ """
+ )
+ assert str(source).strip() == "if 1:\n x=1"
+
+
+def test_source_strips():
+ source = Source("")
+ assert source == Source()
+ assert str(source) == ""
+ assert source.strip() == source
+
+
+def test_source_strip_multiline():
+ source = Source()
+ source.lines = ["", " hello", " "]
+ source2 = source.strip()
+ assert source2.lines == [" hello"]
+
+
+def test_syntaxerror_rerepresentation():
+ ex = pytest.raises(SyntaxError, _pytest._code.compile, "xyz xyz")
+ assert ex.value.lineno == 1
+ assert ex.value.offset in (4, 7) # XXX pypy/jython versus cpython?
+ assert ex.value.text.strip(), "x x"
+
+
+def test_isparseable():
+ assert Source("hello").isparseable()
+ assert Source("if 1:\n pass").isparseable()
+ assert Source(" \nif 1:\n pass").isparseable()
+ assert not Source("if 1:\n").isparseable()
+ assert not Source(" \nif 1:\npass").isparseable()
+ assert not Source(chr(0)).isparseable()
+
+
+class TestAccesses(object):
+ source = Source(
+ """\
+ def f(x):
+ pass
+ def g(x):
+ pass
+ """
+ )
+
+ def test_getrange(self):
+ x = self.source[0:2]
+ assert x.isparseable()
+ assert len(x.lines) == 2
+ assert str(x) == "def f(x):\n pass"
+
+ def test_getline(self):
+ x = self.source[0]
+ assert x == "def f(x):"
+
+ def test_len(self):
+ assert len(self.source) == 4
+
+ def test_iter(self):
+ values = [x for x in self.source]
+ assert len(values) == 4
+
+
+class TestSourceParsingAndCompiling(object):
+ source = Source(
+ """\
+ def f(x):
+ assert (x ==
+ 3 +
+ 4)
+ """
+ ).strip()
+
+ def test_compile(self):
+ co = _pytest._code.compile("x=3")
+ d = {}
+ exec(co, d)
+ assert d["x"] == 3
+
+ def test_compile_and_getsource_simple(self):
+ co = _pytest._code.compile("x=3")
+ exec(co)
+ source = _pytest._code.Source(co)
+ assert str(source) == "x=3"
+
+ def test_compile_and_getsource_through_same_function(self):
+
+ def gensource(source):
+ return _pytest._code.compile(source)
+
+ co1 = gensource(
+ """
+ def f():
+ raise KeyError()
+ """
+ )
+ co2 = gensource(
+ """
+ def f():
+ raise ValueError()
+ """
+ )
+ source1 = inspect.getsource(co1)
+ assert "KeyError" in source1
+ source2 = inspect.getsource(co2)
+ assert "ValueError" in source2
+
+ def test_getstatement(self):
+ # print str(self.source)
+ ass = str(self.source[1:])
+ for i in range(1, 4):
+ # print "trying start in line %r" % self.source[i]
+ s = self.source.getstatement(i)
+ # x = s.deindent()
+ assert str(s) == ass
+
+ def test_getstatementrange_triple_quoted(self):
+ # print str(self.source)
+ source = Source(
+ """hello('''
+ ''')"""
+ )
+ s = source.getstatement(0)
+ assert s == str(source)
+ s = source.getstatement(1)
+ assert s == str(source)
+
+ @astonly
+ def test_getstatementrange_within_constructs(self):
+ source = Source(
+ """\
+ try:
+ try:
+ raise ValueError
+ except SomeThing:
+ pass
+ finally:
+ 42
+ """
+ )
+ assert len(source) == 7
+ # check all lineno's that could occur in a traceback
+ # assert source.getstatementrange(0) == (0, 7)
+ # assert source.getstatementrange(1) == (1, 5)
+ assert source.getstatementrange(2) == (2, 3)
+ assert source.getstatementrange(3) == (3, 4)
+ assert source.getstatementrange(4) == (4, 5)
+ # assert source.getstatementrange(5) == (0, 7)
+ assert source.getstatementrange(6) == (6, 7)
+
+ def test_getstatementrange_bug(self):
+ source = Source(
+ """\
+ try:
+ x = (
+ y +
+ z)
+ except:
+ pass
+ """
+ )
+ assert len(source) == 6
+ assert source.getstatementrange(2) == (1, 4)
+
+ def test_getstatementrange_bug2(self):
+ source = Source(
+ """\
+ assert (
+ 33
+ ==
+ [
+ X(3,
+ b=1, c=2
+ ),
+ ]
+ )
+ """
+ )
+ assert len(source) == 9
+ assert source.getstatementrange(5) == (0, 9)
+
+ def test_getstatementrange_ast_issue58(self):
+ source = Source(
+ """\
+
+ def test_some():
+ for a in [a for a in
+ CAUSE_ERROR]: pass
+
+ x = 3
+ """
+ )
+ assert getstatement(2, source).lines == source.lines[2:3]
+ assert getstatement(3, source).lines == source.lines[3:4]
+
+ def test_getstatementrange_out_of_bounds_py3(self):
+ source = Source("if xxx:\n from .collections import something")
+ r = source.getstatementrange(1)
+ assert r == (1, 2)
+
+ def test_getstatementrange_with_syntaxerror_issue7(self):
+ source = Source(":")
+ pytest.raises(SyntaxError, lambda: source.getstatementrange(0))
+
+ def test_compile_to_ast(self):
+ import ast
+
+ source = Source("x = 4")
+ mod = source.compile(flag=ast.PyCF_ONLY_AST)
+ assert isinstance(mod, ast.Module)
+ compile(mod, "<filename>", "exec")
+
+ def test_compile_and_getsource(self):
+ co = self.source.compile()
+ py.builtin.exec_(co, globals())
+ f(7)
+ excinfo = pytest.raises(AssertionError, "f(6)")
+ frame = excinfo.traceback[-1].frame
+ stmt = frame.code.fullsource.getstatement(frame.lineno)
+ # print "block", str(block)
+ assert str(stmt).strip().startswith("assert")
+
+ @pytest.mark.parametrize("name", ["", None, "my"])
+ def test_compilefuncs_and_path_sanity(self, name):
+
+ def check(comp, name):
+ co = comp(self.source, name)
+ if not name:
+ expected = "codegen %s:%d>" % (mypath, mylineno + 2 + 3)
+ else:
+ expected = "codegen %r %s:%d>" % (name, mypath, mylineno + 2 + 3)
+ fn = co.co_filename
+ assert fn.endswith(expected)
+
+ mycode = _pytest._code.Code(self.test_compilefuncs_and_path_sanity)
+ mylineno = mycode.firstlineno
+ mypath = mycode.path
+
+ for comp in _pytest._code.compile, _pytest._code.Source.compile:
+ check(comp, name)
+
+ def test_offsetless_synerr(self):
+ pytest.raises(SyntaxError, _pytest._code.compile, "lambda a,a: 0", mode="eval")
+
+
+def test_getstartingblock_singleline():
+
+ class A(object):
+
+ def __init__(self, *args):
+ frame = sys._getframe(1)
+ self.source = _pytest._code.Frame(frame).statement
+
+ x = A("x", "y")
+
+ values = [i for i in x.source.lines if i.strip()]
+ assert len(values) == 1
+
+
+def test_getline_finally():
+
+ def c():
+ pass
+
+ excinfo = pytest.raises(
+ TypeError,
+ """
+ teardown = None
+ try:
+ c(1)
+ finally:
+ if teardown:
+ teardown()
+ """,
+ )
+ source = excinfo.traceback[-1].statement
+ assert str(source).strip() == "c(1)"
+
+
+def test_getfuncsource_dynamic():
+ source = """
+ def f():
+ raise ValueError
+
+ def g(): pass
+ """
+ co = _pytest._code.compile(source)
+ py.builtin.exec_(co, globals())
+ assert str(_pytest._code.Source(f)).strip() == "def f():\n raise ValueError"
+ assert str(_pytest._code.Source(g)).strip() == "def g(): pass"
+
+
+def test_getfuncsource_with_multine_string():
+
+ def f():
+ c = """while True:
+ pass
+"""
+
+ assert (
+ str(_pytest._code.Source(f)).strip()
+ == 'def f():\n c = """while True:\n pass\n"""'
+ )
+
+
+def test_deindent():
+ from _pytest._code.source import deindent as deindent
+
+ assert deindent(["\tfoo", "\tbar"]) == ["foo", "bar"]
+
+ def f():
+ c = """while True:
+ pass
+"""
+
+ lines = deindent(inspect.getsource(f).splitlines())
+ assert lines == ["def f():", ' c = """while True:', " pass", '"""']
+
+ source = """
+ def f():
+ def g():
+ pass
+ """
+ lines = deindent(source.splitlines())
+ assert lines == ["", "def f():", " def g():", " pass", " "]
+
+
+def test_source_of_class_at_eof_without_newline(tmpdir):
+ # this test fails because the implicit inspect.getsource(A) below
+ # does not return the "x = 1" last line.
+ source = _pytest._code.Source(
+ """
+ class A(object):
+ def method(self):
+ x = 1
+ """
+ )
+ path = tmpdir.join("a.py")
+ path.write(source)
+ s2 = _pytest._code.Source(tmpdir.join("a.py").pyimport().A)
+ assert str(source).strip() == str(s2).strip()
+
+
+if True:
+
+ def x():
+ pass
+
+
+def test_getsource_fallback():
+ from _pytest._code.source import getsource
+
+ expected = """def x():
+ pass"""
+ src = getsource(x)
+ assert src == expected
+
+
+def test_idem_compile_and_getsource():
+ from _pytest._code.source import getsource
+
+ expected = "def x(): pass"
+ co = _pytest._code.compile(expected)
+ src = getsource(co)
+ assert src == expected
+
+
+def test_findsource_fallback():
+ from _pytest._code.source import findsource
+
+ src, lineno = findsource(x)
+ assert "test_findsource_simple" in str(src)
+ assert src[lineno] == " def x():"
+
+
+def test_findsource():
+ from _pytest._code.source import findsource
+
+ co = _pytest._code.compile(
+ """if 1:
+ def x():
+ pass
+"""
+ )
+
+ src, lineno = findsource(co)
+ assert "if 1:" in str(src)
+
+ d = {}
+ eval(co, d)
+ src, lineno = findsource(d["x"])
+ assert "if 1:" in str(src)
+ assert src[lineno] == " def x():"
+
+
+def test_getfslineno():
+ from _pytest._code import getfslineno
+
+ def f(x):
+ pass
+
+ fspath, lineno = getfslineno(f)
+
+ assert fspath.basename == "test_source.py"
+ assert lineno == _pytest._code.getrawcode(f).co_firstlineno - 1 # see findsource
+
+ class A(object):
+ pass
+
+ fspath, lineno = getfslineno(A)
+
+ _, A_lineno = inspect.findsource(A)
+ assert fspath.basename == "test_source.py"
+ assert lineno == A_lineno
+
+ assert getfslineno(3) == ("", -1)
+
+ class B(object):
+ pass
+
+ B.__name__ = "B2"
+ assert getfslineno(B)[1] == -1
+
+
+def test_code_of_object_instance_with_call():
+
+ class A(object):
+ pass
+
+ pytest.raises(TypeError, lambda: _pytest._code.Source(A()))
+
+ class WithCall(object):
+
+ def __call__(self):
+ pass
+
+ code = _pytest._code.Code(WithCall())
+ assert "pass" in str(code.source())
+
+ class Hello(object):
+
+ def __call__(self):
+ pass
+
+ pytest.raises(TypeError, lambda: _pytest._code.Code(Hello))
+
+
+def getstatement(lineno, source):
+ from _pytest._code.source import getstatementrange_ast
+
+ source = _pytest._code.Source(source, deindent=False)
+ ast, start, end = getstatementrange_ast(lineno, source)
+ return source[start:end]
+
+
+def test_oneline():
+ source = getstatement(0, "raise ValueError")
+ assert str(source) == "raise ValueError"
+
+
+def test_comment_and_no_newline_at_end():
+ from _pytest._code.source import getstatementrange_ast
+
+ source = Source(
+ [
+ "def test_basic_complex():",
+ " assert 1 == 2",
+ "# vim: filetype=pyopencl:fdm=marker",
+ ]
+ )
+ ast, start, end = getstatementrange_ast(1, source)
+ assert end == 2
+
+
+def test_oneline_and_comment():
+ source = getstatement(0, "raise ValueError\n#hello")
+ assert str(source) == "raise ValueError"
+
+
+@pytest.mark.xfail(hasattr(sys, "pypy_version_info"), reason="does not work on pypy")
+def test_comments():
+ source = '''def test():
+ "comment 1"
+ x = 1
+ # comment 2
+ # comment 3
+
+ assert False
+
+"""
+comment 4
+"""
+'''
+ for line in range(2, 6):
+ assert str(getstatement(line, source)) == " x = 1"
+ for line in range(6, 10):
+ assert str(getstatement(line, source)) == " assert False"
+ assert str(getstatement(10, source)) == '"""'
+
+
+def test_comment_in_statement():
+ source = """test(foo=1,
+ # comment 1
+ bar=2)
+"""
+ for line in range(1, 3):
+ assert (
+ str(getstatement(line, source))
+ == "test(foo=1,\n # comment 1\n bar=2)"
+ )
+
+
+def test_single_line_else():
+ source = getstatement(1, "if False: 2\nelse: 3")
+ assert str(source) == "else: 3"
+
+
+def test_single_line_finally():
+ source = getstatement(1, "try: 1\nfinally: 3")
+ assert str(source) == "finally: 3"
+
+
+def test_issue55():
+ source = (
+ "def round_trip(dinp):\n assert 1 == dinp\n"
+ 'def test_rt():\n round_trip("""\n""")\n'
+ )
+ s = getstatement(3, source)
+ assert str(s) == ' round_trip("""\n""")'
+
+
+def XXXtest_multiline():
+ source = getstatement(
+ 0,
+ """\
+raise ValueError(
+ 23
+)
+x = 3
+""",
+ )
+ assert str(source) == "raise ValueError(\n 23\n)"
+
+
+class TestTry(object):
+ pytestmark = astonly
+ source = """\
+try:
+ raise ValueError
+except Something:
+ raise IndexError(1)
+else:
+ raise KeyError()
+"""
+
+ def test_body(self):
+ source = getstatement(1, self.source)
+ assert str(source) == " raise ValueError"
+
+ def test_except_line(self):
+ source = getstatement(2, self.source)
+ assert str(source) == "except Something:"
+
+ def test_except_body(self):
+ source = getstatement(3, self.source)
+ assert str(source) == " raise IndexError(1)"
+
+ def test_else(self):
+ source = getstatement(5, self.source)
+ assert str(source) == " raise KeyError()"
+
+
+class TestTryFinally(object):
+ source = """\
+try:
+ raise ValueError
+finally:
+ raise IndexError(1)
+"""
+
+ def test_body(self):
+ source = getstatement(1, self.source)
+ assert str(source) == " raise ValueError"
+
+ def test_finally(self):
+ source = getstatement(3, self.source)
+ assert str(source) == " raise IndexError(1)"
+
+
+class TestIf(object):
+ pytestmark = astonly
+ source = """\
+if 1:
+ y = 3
+elif False:
+ y = 5
+else:
+ y = 7
+"""
+
+ def test_body(self):
+ source = getstatement(1, self.source)
+ assert str(source) == " y = 3"
+
+ def test_elif_clause(self):
+ source = getstatement(2, self.source)
+ assert str(source) == "elif False:"
+
+ def test_elif(self):
+ source = getstatement(3, self.source)
+ assert str(source) == " y = 5"
+
+ def test_else(self):
+ source = getstatement(5, self.source)
+ assert str(source) == " y = 7"
+
+
+def test_semicolon():
+ s = """\
+hello ; pytest.skip()
+"""
+ source = getstatement(0, s)
+ assert str(source) == s.strip()
+
+
+def test_def_online():
+ s = """\
+def func(): raise ValueError(42)
+
+def something():
+ pass
+"""
+ source = getstatement(0, s)
+ assert str(source) == "def func(): raise ValueError(42)"
+
+
+def XXX_test_expression_multiline():
+ source = """\
+something
+'''
+'''"""
+ result = getstatement(1, source)
+ assert str(result) == "'''\n'''"
diff --git a/third_party/python/pytest/testing/code/test_source_multiline_block.py b/third_party/python/pytest/testing/code/test_source_multiline_block.py
new file mode 100644
index 0000000000..92f7412eb8
--- /dev/null
+++ b/third_party/python/pytest/testing/code/test_source_multiline_block.py
@@ -0,0 +1,29 @@
+# flake8: noqa
+import sys
+
+import _pytest._code
+
+
+def test_getstartingblock_multiline():
+ """
+ This test was originally found in test_source.py, but it depends on the weird
+ formatting of the ``x = A`` construct seen here and our autopep8 tool can only exclude entire
+ files (it does not support excluding lines/blocks using the traditional #noqa comment yet,
+ see hhatto/autopep8#307). It was considered better to just move this single test to its own
+ file and exclude it from autopep8 than try to complicate things.
+ """
+
+ class A(object):
+
+ def __init__(self, *args):
+ frame = sys._getframe(1)
+ self.source = _pytest._code.Frame(frame).statement
+
+ # fmt: off
+ x = A('x',
+ 'y'
+ ,
+ 'z')
+ # fmt: on
+ values = [i for i in x.source.lines if i.strip()]
+ assert len(values) == 4
diff --git a/third_party/python/pytest/testing/deprecated_test.py b/third_party/python/pytest/testing/deprecated_test.py
new file mode 100644
index 0000000000..39ff1f1fc3
--- /dev/null
+++ b/third_party/python/pytest/testing/deprecated_test.py
@@ -0,0 +1,265 @@
+from __future__ import absolute_import, division, print_function
+import pytest
+
+
+def test_yield_tests_deprecation(testdir):
+ testdir.makepyfile(
+ """
+ def func1(arg, arg2):
+ assert arg == arg2
+ def test_gen():
+ yield "m1", func1, 15, 3*5
+ yield "m2", func1, 42, 6*7
+ def test_gen2():
+ for k in range(10):
+ yield func1, 1, 1
+ """
+ )
+ result = testdir.runpytest("-ra")
+ result.stdout.fnmatch_lines(
+ [
+ "*yield tests are deprecated, and scheduled to be removed in pytest 4.0*",
+ "*2 passed*",
+ ]
+ )
+ assert result.stdout.str().count("yield tests are deprecated") == 2
+
+
+def test_funcarg_prefix_deprecation(testdir):
+ testdir.makepyfile(
+ """
+ def pytest_funcarg__value():
+ return 10
+
+ def test_funcarg_prefix(value):
+ assert value == 10
+ """
+ )
+ result = testdir.runpytest("-ra")
+ result.stdout.fnmatch_lines(
+ [
+ (
+ "*pytest_funcarg__value: "
+ 'declaring fixtures using "pytest_funcarg__" prefix is deprecated '
+ "and scheduled to be removed in pytest 4.0. "
+ "Please remove the prefix and use the @pytest.fixture decorator instead."
+ ),
+ "*1 passed*",
+ ]
+ )
+
+
+def test_pytest_setup_cfg_deprecated(testdir):
+ testdir.makefile(
+ ".cfg",
+ setup="""
+ [pytest]
+ addopts = --verbose
+ """,
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ ["*pytest*section in setup.cfg files is deprecated*use*tool:pytest*instead*"]
+ )
+
+
+def test_pytest_custom_cfg_deprecated(testdir):
+ testdir.makefile(
+ ".cfg",
+ custom="""
+ [pytest]
+ addopts = --verbose
+ """,
+ )
+ result = testdir.runpytest("-c", "custom.cfg")
+ result.stdout.fnmatch_lines(
+ ["*pytest*section in custom.cfg files is deprecated*use*tool:pytest*instead*"]
+ )
+
+
+def test_str_args_deprecated(tmpdir, testdir):
+ """Deprecate passing strings to pytest.main(). Scheduled for removal in pytest-4.0."""
+ from _pytest.main import EXIT_NOTESTSCOLLECTED
+
+ warnings = []
+
+ class Collect(object):
+
+ def pytest_logwarning(self, message):
+ warnings.append(message)
+
+ ret = pytest.main("%s -x" % tmpdir, plugins=[Collect()])
+ msg = (
+ "passing a string to pytest.main() is deprecated, "
+ "pass a list of arguments instead."
+ )
+ assert msg in warnings
+ assert ret == EXIT_NOTESTSCOLLECTED
+
+
+def test_getfuncargvalue_is_deprecated(request):
+ pytest.deprecated_call(request.getfuncargvalue, "tmpdir")
+
+
+def test_resultlog_is_deprecated(testdir):
+ result = testdir.runpytest("--help")
+ result.stdout.fnmatch_lines(["*DEPRECATED path for machine-readable result log*"])
+
+ testdir.makepyfile(
+ """
+ def test():
+ pass
+ """
+ )
+ result = testdir.runpytest("--result-log=%s" % testdir.tmpdir.join("result.log"))
+ result.stdout.fnmatch_lines(
+ [
+ "*--result-log is deprecated and scheduled for removal in pytest 4.0*",
+ "*See https://docs.pytest.org/*/usage.html#creating-resultlog-format-files for more information*",
+ ]
+ )
+
+
+@pytest.mark.filterwarnings("always:Metafunc.addcall is deprecated")
+def test_metafunc_addcall_deprecated(testdir):
+ testdir.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ metafunc.addcall({'i': 1})
+ metafunc.addcall({'i': 2})
+ def test_func(i):
+ pass
+ """
+ )
+ res = testdir.runpytest("-s")
+ assert res.ret == 0
+ res.stdout.fnmatch_lines(
+ ["*Metafunc.addcall is deprecated*", "*2 passed, 2 warnings*"]
+ )
+
+
+def test_terminal_reporter_writer_attr(pytestconfig):
+ """Check that TerminalReporter._tw is also available as 'writer' (#2984)
+ This attribute is planned to be deprecated in 3.4.
+ """
+ try:
+ import xdist # noqa
+
+ pytest.skip("xdist workers disable the terminal reporter plugin")
+ except ImportError:
+ pass
+ terminal_reporter = pytestconfig.pluginmanager.get_plugin("terminalreporter")
+ assert terminal_reporter.writer is terminal_reporter._tw
+
+
+@pytest.mark.parametrize("plugin", ["catchlog", "capturelog"])
+def test_pytest_catchlog_deprecated(testdir, plugin):
+ testdir.makepyfile(
+ """
+ def test_func(pytestconfig):
+ pytestconfig.pluginmanager.register(None, 'pytest_{}')
+ """.format(
+ plugin
+ )
+ )
+ res = testdir.runpytest()
+ assert res.ret == 0
+ res.stdout.fnmatch_lines(
+ ["*pytest-*log plugin has been merged into the core*", "*1 passed, 1 warnings*"]
+ )
+
+
+def test_pytest_plugins_in_non_top_level_conftest_deprecated(testdir):
+ from _pytest.deprecated import PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST
+
+ subdirectory = testdir.tmpdir.join("subdirectory")
+ subdirectory.mkdir()
+ # create the inner conftest with makeconftest and then move it to the subdirectory
+ testdir.makeconftest(
+ """
+ pytest_plugins=['capture']
+ """
+ )
+ testdir.tmpdir.join("conftest.py").move(subdirectory.join("conftest.py"))
+ # make the top level conftest
+ testdir.makeconftest(
+ """
+ import warnings
+ warnings.filterwarnings('always', category=DeprecationWarning)
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_func():
+ pass
+ """
+ )
+ res = testdir.runpytest_subprocess()
+ assert res.ret == 0
+ res.stderr.fnmatch_lines(
+ "*" + str(PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST).splitlines()[0]
+ )
+
+
+def test_pytest_plugins_in_non_top_level_conftest_deprecated_no_top_level_conftest(
+ testdir
+):
+ from _pytest.deprecated import PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST
+
+ subdirectory = testdir.tmpdir.join("subdirectory")
+ subdirectory.mkdir()
+ testdir.makeconftest(
+ """
+ import warnings
+ warnings.filterwarnings('always', category=DeprecationWarning)
+ pytest_plugins=['capture']
+ """
+ )
+ testdir.tmpdir.join("conftest.py").move(subdirectory.join("conftest.py"))
+
+ testdir.makepyfile(
+ """
+ def test_func():
+ pass
+ """
+ )
+
+ res = testdir.runpytest_subprocess()
+ assert res.ret == 0
+ res.stderr.fnmatch_lines(
+ "*" + str(PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST).splitlines()[0]
+ )
+
+
+def test_pytest_plugins_in_non_top_level_conftest_deprecated_no_false_positives(
+ testdir
+):
+ from _pytest.deprecated import PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST
+
+ subdirectory = testdir.tmpdir.join("subdirectory")
+ subdirectory.mkdir()
+ testdir.makeconftest(
+ """
+ pass
+ """
+ )
+ testdir.tmpdir.join("conftest.py").move(subdirectory.join("conftest.py"))
+
+ testdir.makeconftest(
+ """
+ import warnings
+ warnings.filterwarnings('always', category=DeprecationWarning)
+ pytest_plugins=['capture']
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_func():
+ pass
+ """
+ )
+ res = testdir.runpytest_subprocess()
+ assert res.ret == 0
+ assert str(PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST).splitlines()[
+ 0
+ ] not in res.stderr.str()
diff --git a/third_party/python/pytest/testing/freeze/.gitignore b/third_party/python/pytest/testing/freeze/.gitignore
new file mode 100644
index 0000000000..b533190872
--- /dev/null
+++ b/third_party/python/pytest/testing/freeze/.gitignore
@@ -0,0 +1,3 @@
+build/
+dist/
+*.spec
diff --git a/third_party/python/pytest/testing/freeze/create_executable.py b/third_party/python/pytest/testing/freeze/create_executable.py
new file mode 100644
index 0000000000..98aa2034c9
--- /dev/null
+++ b/third_party/python/pytest/testing/freeze/create_executable.py
@@ -0,0 +1,12 @@
+"""
+Generates an executable with pytest runner embedded using PyInstaller.
+"""
+if __name__ == "__main__":
+ import pytest
+ import subprocess
+
+ hidden = []
+ for x in pytest.freeze_includes():
+ hidden.extend(["--hidden-import", x])
+ args = ["pyinstaller", "--noconfirm"] + hidden + ["runtests_script.py"]
+ subprocess.check_call(" ".join(args), shell=True)
diff --git a/third_party/python/pytest/testing/freeze/runtests_script.py b/third_party/python/pytest/testing/freeze/runtests_script.py
new file mode 100644
index 0000000000..d03bca8406
--- /dev/null
+++ b/third_party/python/pytest/testing/freeze/runtests_script.py
@@ -0,0 +1,10 @@
+"""
+This is the script that is actually frozen into an executable: simply executes
+py.test main().
+"""
+
+if __name__ == "__main__":
+ import sys
+ import pytest
+
+ sys.exit(pytest.main())
diff --git a/third_party/python/pytest/testing/freeze/tests/test_doctest.txt b/third_party/python/pytest/testing/freeze/tests/test_doctest.txt
new file mode 100644
index 0000000000..e18a4b68cc
--- /dev/null
+++ b/third_party/python/pytest/testing/freeze/tests/test_doctest.txt
@@ -0,0 +1,6 @@
+
+
+Testing doctest::
+
+ >>> 1 + 1
+ 2
diff --git a/third_party/python/pytest/testing/freeze/tests/test_trivial.py b/third_party/python/pytest/testing/freeze/tests/test_trivial.py
new file mode 100644
index 0000000000..08a55552ab
--- /dev/null
+++ b/third_party/python/pytest/testing/freeze/tests/test_trivial.py
@@ -0,0 +1,6 @@
+def test_upper():
+ assert "foo".upper() == "FOO"
+
+
+def test_lower():
+ assert "FOO".lower() == "foo"
diff --git a/third_party/python/pytest/testing/freeze/tox_run.py b/third_party/python/pytest/testing/freeze/tox_run.py
new file mode 100644
index 0000000000..678a69c858
--- /dev/null
+++ b/third_party/python/pytest/testing/freeze/tox_run.py
@@ -0,0 +1,12 @@
+"""
+Called by tox.ini: uses the generated executable to run the tests in ./tests/
+directory.
+"""
+if __name__ == "__main__":
+ import os
+ import sys
+
+ executable = os.path.join(os.getcwd(), "dist", "runtests_script", "runtests_script")
+ if sys.platform.startswith("win"):
+ executable += ".exe"
+ sys.exit(os.system("%s tests" % executable))
diff --git a/third_party/python/pytest/testing/logging/test_fixture.py b/third_party/python/pytest/testing/logging/test_fixture.py
new file mode 100644
index 0000000000..8d9ae6b518
--- /dev/null
+++ b/third_party/python/pytest/testing/logging/test_fixture.py
@@ -0,0 +1,119 @@
+# -*- coding: utf-8 -*-
+import logging
+
+import pytest
+
+logger = logging.getLogger(__name__)
+sublogger = logging.getLogger(__name__ + ".baz")
+
+
+def test_fixture_help(testdir):
+ result = testdir.runpytest("--fixtures")
+ result.stdout.fnmatch_lines(["*caplog*"])
+
+
+def test_change_level(caplog):
+ caplog.set_level(logging.INFO)
+ logger.debug("handler DEBUG level")
+ logger.info("handler INFO level")
+
+ caplog.set_level(logging.CRITICAL, logger=sublogger.name)
+ sublogger.warning("logger WARNING level")
+ sublogger.critical("logger CRITICAL level")
+
+ assert "DEBUG" not in caplog.text
+ assert "INFO" in caplog.text
+ assert "WARNING" not in caplog.text
+ assert "CRITICAL" in caplog.text
+
+
+def test_change_level_undo(testdir):
+ """Ensure that 'set_level' is undone after the end of the test"""
+ testdir.makepyfile(
+ """
+ import logging
+
+ def test1(caplog):
+ caplog.set_level(logging.INFO)
+ # using + operator here so fnmatch_lines doesn't match the code in the traceback
+ logging.info('log from ' + 'test1')
+ assert 0
+
+ def test2(caplog):
+ # using + operator here so fnmatch_lines doesn't match the code in the traceback
+ logging.info('log from ' + 'test2')
+ assert 0
+ """
+ )
+ result = testdir.runpytest_subprocess()
+ result.stdout.fnmatch_lines(["*log from test1*", "*2 failed in *"])
+ assert "log from test2" not in result.stdout.str()
+
+
+def test_with_statement(caplog):
+ with caplog.at_level(logging.INFO):
+ logger.debug("handler DEBUG level")
+ logger.info("handler INFO level")
+
+ with caplog.at_level(logging.CRITICAL, logger=sublogger.name):
+ sublogger.warning("logger WARNING level")
+ sublogger.critical("logger CRITICAL level")
+
+ assert "DEBUG" not in caplog.text
+ assert "INFO" in caplog.text
+ assert "WARNING" not in caplog.text
+ assert "CRITICAL" in caplog.text
+
+
+def test_log_access(caplog):
+ caplog.set_level(logging.INFO)
+ logger.info("boo %s", "arg")
+ assert caplog.records[0].levelname == "INFO"
+ assert caplog.records[0].msg == "boo %s"
+ assert "boo arg" in caplog.text
+
+
+def test_record_tuples(caplog):
+ caplog.set_level(logging.INFO)
+ logger.info("boo %s", "arg")
+
+ assert caplog.record_tuples == [(__name__, logging.INFO, "boo arg")]
+
+
+def test_unicode(caplog):
+ caplog.set_level(logging.INFO)
+ logger.info(u"bū")
+ assert caplog.records[0].levelname == "INFO"
+ assert caplog.records[0].msg == u"bū"
+ assert u"bū" in caplog.text
+
+
+def test_clear(caplog):
+ caplog.set_level(logging.INFO)
+ logger.info(u"bū")
+ assert len(caplog.records)
+ assert caplog.text
+ caplog.clear()
+ assert not len(caplog.records)
+ assert not caplog.text
+
+
+@pytest.fixture
+def logging_during_setup_and_teardown(caplog):
+ caplog.set_level("INFO")
+ logger.info("a_setup_log")
+ yield
+ logger.info("a_teardown_log")
+ assert [x.message for x in caplog.get_records("teardown")] == ["a_teardown_log"]
+
+
+def test_caplog_captures_for_all_stages(caplog, logging_during_setup_and_teardown):
+ assert not caplog.records
+ assert not caplog.get_records("call")
+ logger.info("a_call_log")
+ assert [x.message for x in caplog.get_records("call")] == ["a_call_log"]
+
+ assert [x.message for x in caplog.get_records("setup")] == ["a_setup_log"]
+
+ # This reachers into private API, don't use this type of thing in real tests!
+ assert set(caplog._item.catch_log_handlers.keys()) == {"setup", "call"}
diff --git a/third_party/python/pytest/testing/logging/test_formatter.py b/third_party/python/pytest/testing/logging/test_formatter.py
new file mode 100644
index 0000000000..ca2a410653
--- /dev/null
+++ b/third_party/python/pytest/testing/logging/test_formatter.py
@@ -0,0 +1,37 @@
+import logging
+
+import py.io
+from _pytest.logging import ColoredLevelFormatter
+
+
+def test_coloredlogformatter():
+ logfmt = "%(filename)-25s %(lineno)4d %(levelname)-8s %(message)s"
+
+ record = logging.LogRecord(
+ name="dummy",
+ level=logging.INFO,
+ pathname="dummypath",
+ lineno=10,
+ msg="Test Message",
+ args=(),
+ exc_info=False,
+ )
+
+ class ColorConfig(object):
+
+ class option(object):
+ pass
+
+ tw = py.io.TerminalWriter()
+ tw.hasmarkup = True
+ formatter = ColoredLevelFormatter(tw, logfmt)
+ output = formatter.format(record)
+ assert (
+ output
+ == ("dummypath 10 " "\x1b[32mINFO \x1b[0m Test Message")
+ )
+
+ tw.hasmarkup = False
+ formatter = ColoredLevelFormatter(tw, logfmt)
+ output = formatter.format(record)
+ assert output == ("dummypath 10 " "INFO Test Message")
diff --git a/third_party/python/pytest/testing/logging/test_reporting.py b/third_party/python/pytest/testing/logging/test_reporting.py
new file mode 100644
index 0000000000..91ed2e4758
--- /dev/null
+++ b/third_party/python/pytest/testing/logging/test_reporting.py
@@ -0,0 +1,874 @@
+# -*- coding: utf-8 -*-
+import re
+import os
+
+import six
+
+import pytest
+
+
+def test_nothing_logged(testdir):
+ testdir.makepyfile(
+ """
+ import sys
+
+ def test_foo():
+ sys.stdout.write('text going to stdout')
+ sys.stderr.write('text going to stderr')
+ assert False
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(["*- Captured stdout call -*", "text going to stdout"])
+ result.stdout.fnmatch_lines(["*- Captured stderr call -*", "text going to stderr"])
+ with pytest.raises(pytest.fail.Exception):
+ result.stdout.fnmatch_lines(["*- Captured *log call -*"])
+
+
+def test_messages_logged(testdir):
+ testdir.makepyfile(
+ """
+ import sys
+ import logging
+
+ logger = logging.getLogger(__name__)
+
+ def test_foo():
+ sys.stdout.write('text going to stdout')
+ sys.stderr.write('text going to stderr')
+ logger.info('text going to logger')
+ assert False
+ """
+ )
+ result = testdir.runpytest("--log-level=INFO")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(["*- Captured *log call -*", "*text going to logger*"])
+ result.stdout.fnmatch_lines(["*- Captured stdout call -*", "text going to stdout"])
+ result.stdout.fnmatch_lines(["*- Captured stderr call -*", "text going to stderr"])
+
+
+def test_root_logger_affected(testdir):
+ testdir.makepyfile(
+ """
+ import logging
+ logger = logging.getLogger()
+ def test_foo():
+ logger.info('info text ' + 'going to logger')
+ logger.warning('warning text ' + 'going to logger')
+ logger.error('error text ' + 'going to logger')
+
+ assert 0
+ """
+ )
+ log_file = testdir.tmpdir.join("pytest.log").strpath
+ result = testdir.runpytest("--log-level=ERROR", "--log-file=pytest.log")
+ assert result.ret == 1
+
+ # the capture log calls in the stdout section only contain the
+ # logger.error msg, because --log-level=ERROR
+ result.stdout.fnmatch_lines(["*error text going to logger*"])
+ with pytest.raises(pytest.fail.Exception):
+ result.stdout.fnmatch_lines(["*warning text going to logger*"])
+ with pytest.raises(pytest.fail.Exception):
+ result.stdout.fnmatch_lines(["*info text going to logger*"])
+
+ # the log file should contain the warning and the error log messages and
+ # not the info one, because the default level of the root logger is
+ # WARNING.
+ assert os.path.isfile(log_file)
+ with open(log_file) as rfh:
+ contents = rfh.read()
+ assert "info text going to logger" not in contents
+ assert "warning text going to logger" in contents
+ assert "error text going to logger" in contents
+
+
+def test_log_cli_level_log_level_interaction(testdir):
+ testdir.makepyfile(
+ """
+ import logging
+ logger = logging.getLogger()
+
+ def test_foo():
+ logger.debug('debug text ' + 'going to logger')
+ logger.info('info text ' + 'going to logger')
+ logger.warning('warning text ' + 'going to logger')
+ logger.error('error text ' + 'going to logger')
+ assert 0
+ """
+ )
+
+ result = testdir.runpytest("--log-cli-level=INFO", "--log-level=ERROR")
+ assert result.ret == 1
+
+ result.stdout.fnmatch_lines(
+ [
+ "*-- live log call --*",
+ "*INFO*info text going to logger",
+ "*WARNING*warning text going to logger",
+ "*ERROR*error text going to logger",
+ "=* 1 failed in *=",
+ ]
+ )
+ assert "DEBUG" not in result.stdout.str()
+
+
+def test_setup_logging(testdir):
+ testdir.makepyfile(
+ """
+ import logging
+
+ logger = logging.getLogger(__name__)
+
+ def setup_function(function):
+ logger.info('text going to logger from setup')
+
+ def test_foo():
+ logger.info('text going to logger from call')
+ assert False
+ """
+ )
+ result = testdir.runpytest("--log-level=INFO")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ [
+ "*- Captured *log setup -*",
+ "*text going to logger from setup*",
+ "*- Captured *log call -*",
+ "*text going to logger from call*",
+ ]
+ )
+
+
+def test_teardown_logging(testdir):
+ testdir.makepyfile(
+ """
+ import logging
+
+ logger = logging.getLogger(__name__)
+
+ def test_foo():
+ logger.info('text going to logger from call')
+
+ def teardown_function(function):
+ logger.info('text going to logger from teardown')
+ assert False
+ """
+ )
+ result = testdir.runpytest("--log-level=INFO")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ [
+ "*- Captured *log call -*",
+ "*text going to logger from call*",
+ "*- Captured *log teardown -*",
+ "*text going to logger from teardown*",
+ ]
+ )
+
+
+def test_disable_log_capturing(testdir):
+ testdir.makepyfile(
+ """
+ import sys
+ import logging
+
+ logger = logging.getLogger(__name__)
+
+ def test_foo():
+ sys.stdout.write('text going to stdout')
+ logger.warning('catch me if you can!')
+ sys.stderr.write('text going to stderr')
+ assert False
+ """
+ )
+ result = testdir.runpytest("--no-print-logs")
+ print(result.stdout)
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(["*- Captured stdout call -*", "text going to stdout"])
+ result.stdout.fnmatch_lines(["*- Captured stderr call -*", "text going to stderr"])
+ with pytest.raises(pytest.fail.Exception):
+ result.stdout.fnmatch_lines(["*- Captured *log call -*"])
+
+
+def test_disable_log_capturing_ini(testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ log_print=False
+ """
+ )
+ testdir.makepyfile(
+ """
+ import sys
+ import logging
+
+ logger = logging.getLogger(__name__)
+
+ def test_foo():
+ sys.stdout.write('text going to stdout')
+ logger.warning('catch me if you can!')
+ sys.stderr.write('text going to stderr')
+ assert False
+ """
+ )
+ result = testdir.runpytest()
+ print(result.stdout)
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(["*- Captured stdout call -*", "text going to stdout"])
+ result.stdout.fnmatch_lines(["*- Captured stderr call -*", "text going to stderr"])
+ with pytest.raises(pytest.fail.Exception):
+ result.stdout.fnmatch_lines(["*- Captured *log call -*"])
+
+
+@pytest.mark.parametrize("enabled", [True, False])
+def test_log_cli_enabled_disabled(testdir, enabled):
+ msg = "critical message logged by test"
+ testdir.makepyfile(
+ """
+ import logging
+ def test_log_cli():
+ logging.critical("{}")
+ """.format(
+ msg
+ )
+ )
+ if enabled:
+ testdir.makeini(
+ """
+ [pytest]
+ log_cli=true
+ """
+ )
+ result = testdir.runpytest()
+ if enabled:
+ result.stdout.fnmatch_lines(
+ [
+ "test_log_cli_enabled_disabled.py::test_log_cli ",
+ "*-- live log call --*",
+ "test_log_cli_enabled_disabled.py* CRITICAL critical message logged by test",
+ "PASSED*",
+ ]
+ )
+ else:
+ assert msg not in result.stdout.str()
+
+
+def test_log_cli_default_level(testdir):
+ # Default log file level
+ testdir.makepyfile(
+ """
+ import pytest
+ import logging
+ def test_log_cli(request):
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_cli_handler.level == logging.NOTSET
+ logging.getLogger('catchlog').info("INFO message won't be shown")
+ logging.getLogger('catchlog').warning("WARNING message will be shown")
+ """
+ )
+ testdir.makeini(
+ """
+ [pytest]
+ log_cli=true
+ """
+ )
+
+ result = testdir.runpytest()
+
+ # fnmatch_lines does an assertion internally
+ result.stdout.fnmatch_lines(
+ [
+ "test_log_cli_default_level.py::test_log_cli ",
+ "test_log_cli_default_level.py*WARNING message will be shown*",
+ ]
+ )
+ assert "INFO message won't be shown" not in result.stdout.str()
+ # make sure that that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+
+
+def test_log_cli_default_level_multiple_tests(testdir, request):
+ """Ensure we reset the first newline added by the live logger between tests"""
+ filename = request.node.name + ".py"
+ testdir.makepyfile(
+ """
+ import logging
+
+ def test_log_1():
+ logging.warning("log message from test_log_1")
+
+ def test_log_2():
+ logging.warning("log message from test_log_2")
+ """
+ )
+ testdir.makeini(
+ """
+ [pytest]
+ log_cli=true
+ """
+ )
+
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "{}::test_log_1 ".format(filename),
+ "*WARNING*log message from test_log_1*",
+ "PASSED *50%*",
+ "{}::test_log_2 ".format(filename),
+ "*WARNING*log message from test_log_2*",
+ "PASSED *100%*",
+ "=* 2 passed in *=",
+ ]
+ )
+
+
+def test_log_cli_default_level_sections(testdir, request):
+ """Check that with live logging enable we are printing the correct headers during
+ start/setup/call/teardown/finish."""
+ filename = request.node.name + ".py"
+ testdir.makeconftest(
+ """
+ import pytest
+ import logging
+
+ def pytest_runtest_logstart():
+ logging.warning('>>>>> START >>>>>')
+
+ def pytest_runtest_logfinish():
+ logging.warning('<<<<< END <<<<<<<')
+ """
+ )
+
+ testdir.makepyfile(
+ """
+ import pytest
+ import logging
+
+ @pytest.fixture
+ def fix(request):
+ logging.warning("log message from setup of {}".format(request.node.name))
+ yield
+ logging.warning("log message from teardown of {}".format(request.node.name))
+
+ def test_log_1(fix):
+ logging.warning("log message from test_log_1")
+
+ def test_log_2(fix):
+ logging.warning("log message from test_log_2")
+ """
+ )
+ testdir.makeini(
+ """
+ [pytest]
+ log_cli=true
+ """
+ )
+
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "{}::test_log_1 ".format(filename),
+ "*-- live log start --*",
+ "*WARNING* >>>>> START >>>>>*",
+ "*-- live log setup --*",
+ "*WARNING*log message from setup of test_log_1*",
+ "*-- live log call --*",
+ "*WARNING*log message from test_log_1*",
+ "PASSED *50%*",
+ "*-- live log teardown --*",
+ "*WARNING*log message from teardown of test_log_1*",
+ "*-- live log finish --*",
+ "*WARNING* <<<<< END <<<<<<<*",
+ "{}::test_log_2 ".format(filename),
+ "*-- live log start --*",
+ "*WARNING* >>>>> START >>>>>*",
+ "*-- live log setup --*",
+ "*WARNING*log message from setup of test_log_2*",
+ "*-- live log call --*",
+ "*WARNING*log message from test_log_2*",
+ "PASSED *100%*",
+ "*-- live log teardown --*",
+ "*WARNING*log message from teardown of test_log_2*",
+ "*-- live log finish --*",
+ "*WARNING* <<<<< END <<<<<<<*",
+ "=* 2 passed in *=",
+ ]
+ )
+
+
+def test_live_logs_unknown_sections(testdir, request):
+ """Check that with live logging enable we are printing the correct headers during
+ start/setup/call/teardown/finish."""
+ filename = request.node.name + ".py"
+ testdir.makeconftest(
+ """
+ import pytest
+ import logging
+
+ def pytest_runtest_protocol(item, nextitem):
+ logging.warning('Unknown Section!')
+
+ def pytest_runtest_logstart():
+ logging.warning('>>>>> START >>>>>')
+
+ def pytest_runtest_logfinish():
+ logging.warning('<<<<< END <<<<<<<')
+ """
+ )
+
+ testdir.makepyfile(
+ """
+ import pytest
+ import logging
+
+ @pytest.fixture
+ def fix(request):
+ logging.warning("log message from setup of {}".format(request.node.name))
+ yield
+ logging.warning("log message from teardown of {}".format(request.node.name))
+
+ def test_log_1(fix):
+ logging.warning("log message from test_log_1")
+
+ """
+ )
+ testdir.makeini(
+ """
+ [pytest]
+ log_cli=true
+ """
+ )
+
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*WARNING*Unknown Section*",
+ "{}::test_log_1 ".format(filename),
+ "*WARNING* >>>>> START >>>>>*",
+ "*-- live log setup --*",
+ "*WARNING*log message from setup of test_log_1*",
+ "*-- live log call --*",
+ "*WARNING*log message from test_log_1*",
+ "PASSED *100%*",
+ "*-- live log teardown --*",
+ "*WARNING*log message from teardown of test_log_1*",
+ "*WARNING* <<<<< END <<<<<<<*",
+ "=* 1 passed in *=",
+ ]
+ )
+
+
+def test_sections_single_new_line_after_test_outcome(testdir, request):
+ """Check that only a single new line is written between log messages during
+ teardown/finish."""
+ filename = request.node.name + ".py"
+ testdir.makeconftest(
+ """
+ import pytest
+ import logging
+
+ def pytest_runtest_logstart():
+ logging.warning('>>>>> START >>>>>')
+
+ def pytest_runtest_logfinish():
+ logging.warning('<<<<< END <<<<<<<')
+ logging.warning('<<<<< END <<<<<<<')
+ """
+ )
+
+ testdir.makepyfile(
+ """
+ import pytest
+ import logging
+
+ @pytest.fixture
+ def fix(request):
+ logging.warning("log message from setup of {}".format(request.node.name))
+ yield
+ logging.warning("log message from teardown of {}".format(request.node.name))
+ logging.warning("log message from teardown of {}".format(request.node.name))
+
+ def test_log_1(fix):
+ logging.warning("log message from test_log_1")
+ """
+ )
+ testdir.makeini(
+ """
+ [pytest]
+ log_cli=true
+ """
+ )
+
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "{}::test_log_1 ".format(filename),
+ "*-- live log start --*",
+ "*WARNING* >>>>> START >>>>>*",
+ "*-- live log setup --*",
+ "*WARNING*log message from setup of test_log_1*",
+ "*-- live log call --*",
+ "*WARNING*log message from test_log_1*",
+ "PASSED *100%*",
+ "*-- live log teardown --*",
+ "*WARNING*log message from teardown of test_log_1*",
+ "*-- live log finish --*",
+ "*WARNING* <<<<< END <<<<<<<*",
+ "*WARNING* <<<<< END <<<<<<<*",
+ "=* 1 passed in *=",
+ ]
+ )
+ assert re.search(
+ r"(.+)live log teardown(.+)\n(.+)WARNING(.+)\n(.+)WARNING(.+)",
+ result.stdout.str(),
+ re.MULTILINE,
+ ) is not None
+ assert re.search(
+ r"(.+)live log finish(.+)\n(.+)WARNING(.+)\n(.+)WARNING(.+)",
+ result.stdout.str(),
+ re.MULTILINE,
+ ) is not None
+
+
+def test_log_cli_level(testdir):
+ # Default log file level
+ testdir.makepyfile(
+ """
+ import pytest
+ import logging
+ def test_log_cli(request):
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_cli_handler.level == logging.INFO
+ logging.getLogger('catchlog').debug("This log message won't be shown")
+ logging.getLogger('catchlog').info("This log message will be shown")
+ print('PASSED')
+ """
+ )
+ testdir.makeini(
+ """
+ [pytest]
+ log_cli=true
+ """
+ )
+
+ result = testdir.runpytest("-s", "--log-cli-level=INFO")
+
+ # fnmatch_lines does an assertion internally
+ result.stdout.fnmatch_lines(
+ [
+ "test_log_cli_level.py*This log message will be shown",
+ "PASSED", # 'PASSED' on its own line because the log message prints a new line
+ ]
+ )
+ assert "This log message won't be shown" not in result.stdout.str()
+
+ # make sure that that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+
+ result = testdir.runpytest("-s", "--log-level=INFO")
+
+ # fnmatch_lines does an assertion internally
+ result.stdout.fnmatch_lines(
+ [
+ "test_log_cli_level.py* This log message will be shown",
+ "PASSED", # 'PASSED' on its own line because the log message prints a new line
+ ]
+ )
+ assert "This log message won't be shown" not in result.stdout.str()
+
+ # make sure that that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+
+
+def test_log_cli_ini_level(testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ log_cli=true
+ log_cli_level = INFO
+ """
+ )
+ testdir.makepyfile(
+ """
+ import pytest
+ import logging
+ def test_log_cli(request):
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_cli_handler.level == logging.INFO
+ logging.getLogger('catchlog').debug("This log message won't be shown")
+ logging.getLogger('catchlog').info("This log message will be shown")
+ print('PASSED')
+ """
+ )
+
+ result = testdir.runpytest("-s")
+
+ # fnmatch_lines does an assertion internally
+ result.stdout.fnmatch_lines(
+ [
+ "test_log_cli_ini_level.py* This log message will be shown",
+ "PASSED", # 'PASSED' on its own line because the log message prints a new line
+ ]
+ )
+ assert "This log message won't be shown" not in result.stdout.str()
+
+ # make sure that that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+
+
+@pytest.mark.parametrize(
+ "cli_args",
+ ["", "--log-level=WARNING", "--log-file-level=WARNING", "--log-cli-level=WARNING"],
+)
+def test_log_cli_auto_enable(testdir, request, cli_args):
+ """Check that live logs are enabled if --log-level or --log-cli-level is passed on the CLI.
+ It should not be auto enabled if the same configs are set on the INI file.
+ """
+ testdir.makepyfile(
+ """
+ import pytest
+ import logging
+
+ def test_log_1():
+ logging.info("log message from test_log_1 not to be shown")
+ logging.warning("log message from test_log_1")
+
+ """
+ )
+ testdir.makeini(
+ """
+ [pytest]
+ log_level=INFO
+ log_cli_level=INFO
+ """
+ )
+
+ result = testdir.runpytest(cli_args)
+ if cli_args == "--log-cli-level=WARNING":
+ result.stdout.fnmatch_lines(
+ [
+ "*::test_log_1 ",
+ "*-- live log call --*",
+ "*WARNING*log message from test_log_1*",
+ "PASSED *100%*",
+ "=* 1 passed in *=",
+ ]
+ )
+ assert "INFO" not in result.stdout.str()
+ else:
+ result.stdout.fnmatch_lines(
+ ["*test_log_cli_auto_enable*100%*", "=* 1 passed in *="]
+ )
+ assert "INFO" not in result.stdout.str()
+ assert "WARNING" not in result.stdout.str()
+
+
+def test_log_file_cli(testdir):
+ # Default log file level
+ testdir.makepyfile(
+ """
+ import pytest
+ import logging
+ def test_log_file(request):
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_file_handler.level == logging.WARNING
+ logging.getLogger('catchlog').info("This log message won't be shown")
+ logging.getLogger('catchlog').warning("This log message will be shown")
+ print('PASSED')
+ """
+ )
+
+ log_file = testdir.tmpdir.join("pytest.log").strpath
+
+ result = testdir.runpytest(
+ "-s", "--log-file={}".format(log_file), "--log-file-level=WARNING"
+ )
+
+ # fnmatch_lines does an assertion internally
+ result.stdout.fnmatch_lines(["test_log_file_cli.py PASSED"])
+
+ # make sure that that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+ assert os.path.isfile(log_file)
+ with open(log_file) as rfh:
+ contents = rfh.read()
+ assert "This log message will be shown" in contents
+ assert "This log message won't be shown" not in contents
+
+
+def test_log_file_cli_level(testdir):
+ # Default log file level
+ testdir.makepyfile(
+ """
+ import pytest
+ import logging
+ def test_log_file(request):
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_file_handler.level == logging.INFO
+ logging.getLogger('catchlog').debug("This log message won't be shown")
+ logging.getLogger('catchlog').info("This log message will be shown")
+ print('PASSED')
+ """
+ )
+
+ log_file = testdir.tmpdir.join("pytest.log").strpath
+
+ result = testdir.runpytest(
+ "-s", "--log-file={}".format(log_file), "--log-file-level=INFO"
+ )
+
+ # fnmatch_lines does an assertion internally
+ result.stdout.fnmatch_lines(["test_log_file_cli_level.py PASSED"])
+
+ # make sure that that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+ assert os.path.isfile(log_file)
+ with open(log_file) as rfh:
+ contents = rfh.read()
+ assert "This log message will be shown" in contents
+ assert "This log message won't be shown" not in contents
+
+
+def test_log_level_not_changed_by_default(testdir):
+ testdir.makepyfile(
+ """
+ import logging
+ def test_log_file():
+ assert logging.getLogger().level == logging.WARNING
+ """
+ )
+ result = testdir.runpytest("-s")
+ result.stdout.fnmatch_lines("* 1 passed in *")
+
+
+def test_log_file_ini(testdir):
+ log_file = testdir.tmpdir.join("pytest.log").strpath
+
+ testdir.makeini(
+ """
+ [pytest]
+ log_file={}
+ log_file_level=WARNING
+ """.format(
+ log_file
+ )
+ )
+ testdir.makepyfile(
+ """
+ import pytest
+ import logging
+ def test_log_file(request):
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_file_handler.level == logging.WARNING
+ logging.getLogger('catchlog').info("This log message won't be shown")
+ logging.getLogger('catchlog').warning("This log message will be shown")
+ print('PASSED')
+ """
+ )
+
+ result = testdir.runpytest("-s")
+
+ # fnmatch_lines does an assertion internally
+ result.stdout.fnmatch_lines(["test_log_file_ini.py PASSED"])
+
+ # make sure that that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+ assert os.path.isfile(log_file)
+ with open(log_file) as rfh:
+ contents = rfh.read()
+ assert "This log message will be shown" in contents
+ assert "This log message won't be shown" not in contents
+
+
+def test_log_file_ini_level(testdir):
+ log_file = testdir.tmpdir.join("pytest.log").strpath
+
+ testdir.makeini(
+ """
+ [pytest]
+ log_file={}
+ log_file_level = INFO
+ """.format(
+ log_file
+ )
+ )
+ testdir.makepyfile(
+ """
+ import pytest
+ import logging
+ def test_log_file(request):
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_file_handler.level == logging.INFO
+ logging.getLogger('catchlog').debug("This log message won't be shown")
+ logging.getLogger('catchlog').info("This log message will be shown")
+ print('PASSED')
+ """
+ )
+
+ result = testdir.runpytest("-s")
+
+ # fnmatch_lines does an assertion internally
+ result.stdout.fnmatch_lines(["test_log_file_ini_level.py PASSED"])
+
+ # make sure that that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+ assert os.path.isfile(log_file)
+ with open(log_file) as rfh:
+ contents = rfh.read()
+ assert "This log message will be shown" in contents
+ assert "This log message won't be shown" not in contents
+
+
+@pytest.mark.parametrize("has_capture_manager", [True, False])
+def test_live_logging_suspends_capture(has_capture_manager, request):
+ """Test that capture manager is suspended when we emitting messages for live logging.
+
+ This tests the implementation calls instead of behavior because it is difficult/impossible to do it using
+ ``testdir`` facilities because they do their own capturing.
+
+ We parametrize the test to also make sure _LiveLoggingStreamHandler works correctly if no capture manager plugin
+ is installed.
+ """
+ import logging
+ from functools import partial
+ from _pytest.capture import CaptureManager
+ from _pytest.logging import _LiveLoggingStreamHandler
+
+ class MockCaptureManager:
+ calls = []
+
+ def suspend_global_capture(self):
+ self.calls.append("suspend_global_capture")
+
+ def resume_global_capture(self):
+ self.calls.append("resume_global_capture")
+
+ # sanity check
+ assert CaptureManager.suspend_capture_item
+ assert CaptureManager.resume_global_capture
+
+ class DummyTerminal(six.StringIO):
+
+ def section(self, *args, **kwargs):
+ pass
+
+ out_file = DummyTerminal()
+ capture_manager = MockCaptureManager() if has_capture_manager else None
+ handler = _LiveLoggingStreamHandler(out_file, capture_manager)
+ handler.set_when("call")
+
+ logger = logging.getLogger(__name__ + ".test_live_logging_suspends_capture")
+ logger.addHandler(handler)
+ request.addfinalizer(partial(logger.removeHandler, handler))
+
+ logger.critical("some message")
+ if has_capture_manager:
+ assert (
+ MockCaptureManager.calls
+ == ["suspend_global_capture", "resume_global_capture"]
+ )
+ else:
+ assert MockCaptureManager.calls == []
+ assert out_file.getvalue() == "\nsome message\n"
diff --git a/third_party/python/pytest/testing/python/approx.py b/third_party/python/pytest/testing/python/approx.py
new file mode 100644
index 0000000000..9e25feb0b4
--- /dev/null
+++ b/third_party/python/pytest/testing/python/approx.py
@@ -0,0 +1,406 @@
+# encoding: utf-8
+import operator
+import sys
+import pytest
+import doctest
+
+from pytest import approx
+from operator import eq, ne
+from decimal import Decimal
+from fractions import Fraction
+
+inf, nan = float("inf"), float("nan")
+
+
+class MyDocTestRunner(doctest.DocTestRunner):
+
+ def __init__(self):
+ doctest.DocTestRunner.__init__(self)
+
+ def report_failure(self, out, test, example, got):
+ raise AssertionError(
+ "'{}' evaluates to '{}', not '{}'".format(
+ example.source.strip(), got.strip(), example.want.strip()
+ )
+ )
+
+
+class TestApprox(object):
+
+ def test_repr_string(self):
+ plus_minus = u"\u00b1" if sys.version_info[0] > 2 else u"+-"
+ tol1, tol2, infr = "1.0e-06", "2.0e-06", "inf"
+ assert repr(approx(1.0)) == "1.0 {pm} {tol1}".format(pm=plus_minus, tol1=tol1)
+ assert (
+ repr(approx([1.0, 2.0]))
+ == "approx([1.0 {pm} {tol1}, 2.0 {pm} {tol2}])".format(
+ pm=plus_minus, tol1=tol1, tol2=tol2
+ )
+ )
+ assert (
+ repr(approx((1.0, 2.0)))
+ == "approx((1.0 {pm} {tol1}, 2.0 {pm} {tol2}))".format(
+ pm=plus_minus, tol1=tol1, tol2=tol2
+ )
+ )
+ assert repr(approx(inf)) == "inf"
+ assert repr(approx(1.0, rel=nan)) == "1.0 {pm} ???".format(pm=plus_minus)
+ assert (
+ repr(approx(1.0, rel=inf))
+ == "1.0 {pm} {infr}".format(pm=plus_minus, infr=infr)
+ )
+ assert repr(approx(1.0j, rel=inf)) == "1j"
+
+ # Dictionaries aren't ordered, so we need to check both orders.
+ assert repr(approx({"a": 1.0, "b": 2.0})) in (
+ "approx({{'a': 1.0 {pm} {tol1}, 'b': 2.0 {pm} {tol2}}})".format(
+ pm=plus_minus, tol1=tol1, tol2=tol2
+ ),
+ "approx({{'b': 2.0 {pm} {tol2}, 'a': 1.0 {pm} {tol1}}})".format(
+ pm=plus_minus, tol1=tol1, tol2=tol2
+ ),
+ )
+
+ def test_operator_overloading(self):
+ assert 1 == approx(1, rel=1e-6, abs=1e-12)
+ assert not (1 != approx(1, rel=1e-6, abs=1e-12))
+ assert 10 != approx(1, rel=1e-6, abs=1e-12)
+ assert not (10 == approx(1, rel=1e-6, abs=1e-12))
+
+ def test_exactly_equal(self):
+ examples = [
+ (2.0, 2.0),
+ (0.1e200, 0.1e200),
+ (1.123e-300, 1.123e-300),
+ (12345, 12345.0),
+ (0.0, -0.0),
+ (345678, 345678),
+ (Decimal("1.0001"), Decimal("1.0001")),
+ (Fraction(1, 3), Fraction(-1, -3)),
+ ]
+ for a, x in examples:
+ assert a == approx(x)
+
+ def test_opposite_sign(self):
+ examples = [(eq, 1e-100, -1e-100), (ne, 1e100, -1e100)]
+ for op, a, x in examples:
+ assert op(a, approx(x))
+
+ def test_zero_tolerance(self):
+ within_1e10 = [(1.1e-100, 1e-100), (-1.1e-100, -1e-100)]
+ for a, x in within_1e10:
+ assert x == approx(x, rel=0.0, abs=0.0)
+ assert a != approx(x, rel=0.0, abs=0.0)
+ assert a == approx(x, rel=0.0, abs=5e-101)
+ assert a != approx(x, rel=0.0, abs=5e-102)
+ assert a == approx(x, rel=5e-1, abs=0.0)
+ assert a != approx(x, rel=5e-2, abs=0.0)
+
+ def test_negative_tolerance(self):
+ # Negative tolerances are not allowed.
+ illegal_kwargs = [
+ dict(rel=-1e100),
+ dict(abs=-1e100),
+ dict(rel=1e100, abs=-1e100),
+ dict(rel=-1e100, abs=1e100),
+ dict(rel=-1e100, abs=-1e100),
+ ]
+ for kwargs in illegal_kwargs:
+ with pytest.raises(ValueError):
+ 1.1 == approx(1, **kwargs)
+
+ def test_inf_tolerance(self):
+ # Everything should be equal if the tolerance is infinite.
+ large_diffs = [(1, 1000), (1e-50, 1e50), (-1.0, -1e300), (0.0, 10)]
+ for a, x in large_diffs:
+ assert a != approx(x, rel=0.0, abs=0.0)
+ assert a == approx(x, rel=inf, abs=0.0)
+ assert a == approx(x, rel=0.0, abs=inf)
+ assert a == approx(x, rel=inf, abs=inf)
+
+ def test_inf_tolerance_expecting_zero(self):
+ # If the relative tolerance is zero but the expected value is infinite,
+ # the actual tolerance is a NaN, which should be an error.
+ illegal_kwargs = [dict(rel=inf, abs=0.0), dict(rel=inf, abs=inf)]
+ for kwargs in illegal_kwargs:
+ with pytest.raises(ValueError):
+ 1 == approx(0, **kwargs)
+
+ def test_nan_tolerance(self):
+ illegal_kwargs = [dict(rel=nan), dict(abs=nan), dict(rel=nan, abs=nan)]
+ for kwargs in illegal_kwargs:
+ with pytest.raises(ValueError):
+ 1.1 == approx(1, **kwargs)
+
+ def test_reasonable_defaults(self):
+ # Whatever the defaults are, they should work for numbers close to 1
+ # than have a small amount of floating-point error.
+ assert 0.1 + 0.2 == approx(0.3)
+
+ def test_default_tolerances(self):
+ # This tests the defaults as they are currently set. If you change the
+ # defaults, this test will fail but you should feel free to change it.
+ # None of the other tests (except the doctests) should be affected by
+ # the choice of defaults.
+ examples = [
+ # Relative tolerance used.
+ (eq, 1e100 + 1e94, 1e100),
+ (ne, 1e100 + 2e94, 1e100),
+ (eq, 1e0 + 1e-6, 1e0),
+ (ne, 1e0 + 2e-6, 1e0),
+ # Absolute tolerance used.
+ (eq, 1e-100, +1e-106),
+ (eq, 1e-100, +2e-106),
+ (eq, 1e-100, 0),
+ ]
+ for op, a, x in examples:
+ assert op(a, approx(x))
+
+ def test_custom_tolerances(self):
+ assert 1e8 + 1e0 == approx(1e8, rel=5e-8, abs=5e0)
+ assert 1e8 + 1e0 == approx(1e8, rel=5e-9, abs=5e0)
+ assert 1e8 + 1e0 == approx(1e8, rel=5e-8, abs=5e-1)
+ assert 1e8 + 1e0 != approx(1e8, rel=5e-9, abs=5e-1)
+
+ assert 1e0 + 1e-8 == approx(1e0, rel=5e-8, abs=5e-8)
+ assert 1e0 + 1e-8 == approx(1e0, rel=5e-9, abs=5e-8)
+ assert 1e0 + 1e-8 == approx(1e0, rel=5e-8, abs=5e-9)
+ assert 1e0 + 1e-8 != approx(1e0, rel=5e-9, abs=5e-9)
+
+ assert 1e-8 + 1e-16 == approx(1e-8, rel=5e-8, abs=5e-16)
+ assert 1e-8 + 1e-16 == approx(1e-8, rel=5e-9, abs=5e-16)
+ assert 1e-8 + 1e-16 == approx(1e-8, rel=5e-8, abs=5e-17)
+ assert 1e-8 + 1e-16 != approx(1e-8, rel=5e-9, abs=5e-17)
+
+ def test_relative_tolerance(self):
+ within_1e8_rel = [(1e8 + 1e0, 1e8), (1e0 + 1e-8, 1e0), (1e-8 + 1e-16, 1e-8)]
+ for a, x in within_1e8_rel:
+ assert a == approx(x, rel=5e-8, abs=0.0)
+ assert a != approx(x, rel=5e-9, abs=0.0)
+
+ def test_absolute_tolerance(self):
+ within_1e8_abs = [(1e8 + 9e-9, 1e8), (1e0 + 9e-9, 1e0), (1e-8 + 9e-9, 1e-8)]
+ for a, x in within_1e8_abs:
+ assert a == approx(x, rel=0, abs=5e-8)
+ assert a != approx(x, rel=0, abs=5e-9)
+
+ def test_expecting_zero(self):
+ examples = [
+ (ne, 1e-6, 0.0),
+ (ne, -1e-6, 0.0),
+ (eq, 1e-12, 0.0),
+ (eq, -1e-12, 0.0),
+ (ne, 2e-12, 0.0),
+ (ne, -2e-12, 0.0),
+ (ne, inf, 0.0),
+ (ne, nan, 0.0),
+ ]
+ for op, a, x in examples:
+ assert op(a, approx(x, rel=0.0, abs=1e-12))
+ assert op(a, approx(x, rel=1e-6, abs=1e-12))
+
+ def test_expecting_inf(self):
+ examples = [
+ (eq, inf, inf),
+ (eq, -inf, -inf),
+ (ne, inf, -inf),
+ (ne, 0.0, inf),
+ (ne, nan, inf),
+ ]
+ for op, a, x in examples:
+ assert op(a, approx(x))
+
+ def test_expecting_nan(self):
+ examples = [
+ (eq, nan, nan),
+ (eq, -nan, -nan),
+ (eq, nan, -nan),
+ (ne, 0.0, nan),
+ (ne, inf, nan),
+ ]
+ for op, a, x in examples:
+ # Nothing is equal to NaN by default.
+ assert a != approx(x)
+
+ # If ``nan_ok=True``, then NaN is equal to NaN.
+ assert op(a, approx(x, nan_ok=True))
+
+ def test_int(self):
+ within_1e6 = [(1000001, 1000000), (-1000001, -1000000)]
+ for a, x in within_1e6:
+ assert a == approx(x, rel=5e-6, abs=0)
+ assert a != approx(x, rel=5e-7, abs=0)
+ assert approx(x, rel=5e-6, abs=0) == a
+ assert approx(x, rel=5e-7, abs=0) != a
+
+ def test_decimal(self):
+ within_1e6 = [
+ (Decimal("1.000001"), Decimal("1.0")),
+ (Decimal("-1.000001"), Decimal("-1.0")),
+ ]
+ for a, x in within_1e6:
+ assert a == approx(x)
+ assert a == approx(x, rel=Decimal("5e-6"), abs=0)
+ assert a != approx(x, rel=Decimal("5e-7"), abs=0)
+ assert approx(x, rel=Decimal("5e-6"), abs=0) == a
+ assert approx(x, rel=Decimal("5e-7"), abs=0) != a
+
+ def test_fraction(self):
+ within_1e6 = [
+ (1 + Fraction(1, 1000000), Fraction(1)),
+ (-1 - Fraction(-1, 1000000), Fraction(-1)),
+ ]
+ for a, x in within_1e6:
+ assert a == approx(x, rel=5e-6, abs=0)
+ assert a != approx(x, rel=5e-7, abs=0)
+ assert approx(x, rel=5e-6, abs=0) == a
+ assert approx(x, rel=5e-7, abs=0) != a
+
+ def test_complex(self):
+ within_1e6 = [
+ (1.000001 + 1.0j, 1.0 + 1.0j),
+ (1.0 + 1.000001j, 1.0 + 1.0j),
+ (-1.000001 + 1.0j, -1.0 + 1.0j),
+ (1.0 - 1.000001j, 1.0 - 1.0j),
+ ]
+ for a, x in within_1e6:
+ assert a == approx(x, rel=5e-6, abs=0)
+ assert a != approx(x, rel=5e-7, abs=0)
+ assert approx(x, rel=5e-6, abs=0) == a
+ assert approx(x, rel=5e-7, abs=0) != a
+
+ def test_list(self):
+ actual = [1 + 1e-7, 2 + 1e-8]
+ expected = [1, 2]
+
+ # Return false if any element is outside the tolerance.
+ assert actual == approx(expected, rel=5e-7, abs=0)
+ assert actual != approx(expected, rel=5e-8, abs=0)
+ assert approx(expected, rel=5e-7, abs=0) == actual
+ assert approx(expected, rel=5e-8, abs=0) != actual
+
+ def test_list_wrong_len(self):
+ assert [1, 2] != approx([1])
+ assert [1, 2] != approx([1, 2, 3])
+
+ def test_tuple(self):
+ actual = (1 + 1e-7, 2 + 1e-8)
+ expected = (1, 2)
+
+ # Return false if any element is outside the tolerance.
+ assert actual == approx(expected, rel=5e-7, abs=0)
+ assert actual != approx(expected, rel=5e-8, abs=0)
+ assert approx(expected, rel=5e-7, abs=0) == actual
+ assert approx(expected, rel=5e-8, abs=0) != actual
+
+ def test_tuple_wrong_len(self):
+ assert (1, 2) != approx((1,))
+ assert (1, 2) != approx((1, 2, 3))
+
+ def test_dict(self):
+ actual = {"a": 1 + 1e-7, "b": 2 + 1e-8}
+ # Dictionaries became ordered in python3.6, so switch up the order here
+ # to make sure it doesn't matter.
+ expected = {"b": 2, "a": 1}
+
+ # Return false if any element is outside the tolerance.
+ assert actual == approx(expected, rel=5e-7, abs=0)
+ assert actual != approx(expected, rel=5e-8, abs=0)
+ assert approx(expected, rel=5e-7, abs=0) == actual
+ assert approx(expected, rel=5e-8, abs=0) != actual
+
+ def test_dict_wrong_len(self):
+ assert {"a": 1, "b": 2} != approx({"a": 1})
+ assert {"a": 1, "b": 2} != approx({"a": 1, "c": 2})
+ assert {"a": 1, "b": 2} != approx({"a": 1, "b": 2, "c": 3})
+
+ def test_numpy_array(self):
+ np = pytest.importorskip("numpy")
+
+ actual = np.array([1 + 1e-7, 2 + 1e-8])
+ expected = np.array([1, 2])
+
+ # Return false if any element is outside the tolerance.
+ assert actual == approx(expected, rel=5e-7, abs=0)
+ assert actual != approx(expected, rel=5e-8, abs=0)
+ assert approx(expected, rel=5e-7, abs=0) == expected
+ assert approx(expected, rel=5e-8, abs=0) != actual
+
+ # Should be able to compare lists with numpy arrays.
+ assert list(actual) == approx(expected, rel=5e-7, abs=0)
+ assert list(actual) != approx(expected, rel=5e-8, abs=0)
+ assert actual == approx(list(expected), rel=5e-7, abs=0)
+ assert actual != approx(list(expected), rel=5e-8, abs=0)
+
+ def test_numpy_array_wrong_shape(self):
+ np = pytest.importorskip("numpy")
+
+ a12 = np.array([[1, 2]])
+ a21 = np.array([[1], [2]])
+
+ assert a12 != approx(a21)
+ assert a21 != approx(a12)
+
+ def test_doctests(self):
+ parser = doctest.DocTestParser()
+ test = parser.get_doctest(
+ approx.__doc__, {"approx": approx}, approx.__name__, None, None
+ )
+ runner = MyDocTestRunner()
+ runner.run(test)
+
+ def test_unicode_plus_minus(self, testdir):
+ """
+ Comparing approx instances inside lists should not produce an error in the detailed diff.
+ Integration test for issue #2111.
+ """
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_foo():
+ assert [3] == [pytest.approx(4)]
+ """
+ )
+ expected = "4.0e-06"
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ ["*At index 0 diff: 3 != 4 * {}".format(expected), "=* 1 failed in *="]
+ )
+
+ @pytest.mark.parametrize(
+ "op",
+ [
+ pytest.param(operator.le, id="<="),
+ pytest.param(operator.lt, id="<"),
+ pytest.param(operator.ge, id=">="),
+ pytest.param(operator.gt, id=">"),
+ ],
+ )
+ def test_comparison_operator_type_error(self, op):
+ """
+ pytest.approx should raise TypeError for operators other than == and != (#2003).
+ """
+ with pytest.raises(TypeError):
+ op(1, approx(1, rel=1e-6, abs=1e-12))
+
+ def test_numpy_array_with_scalar(self):
+ np = pytest.importorskip("numpy")
+
+ actual = np.array([1 + 1e-7, 1 - 1e-8])
+ expected = 1.0
+
+ assert actual == approx(expected, rel=5e-7, abs=0)
+ assert actual != approx(expected, rel=5e-8, abs=0)
+ assert approx(expected, rel=5e-7, abs=0) == actual
+ assert approx(expected, rel=5e-8, abs=0) != actual
+
+ def test_numpy_scalar_with_array(self):
+ np = pytest.importorskip("numpy")
+
+ actual = 1.0
+ expected = np.array([1 + 1e-7, 1 - 1e-8])
+
+ assert actual == approx(expected, rel=5e-7, abs=0)
+ assert actual != approx(expected, rel=5e-8, abs=0)
+ assert approx(expected, rel=5e-7, abs=0) == actual
+ assert approx(expected, rel=5e-8, abs=0) != actual
diff --git a/third_party/python/pytest/testing/python/collect.py b/third_party/python/pytest/testing/python/collect.py
new file mode 100644
index 0000000000..724504b1af
--- /dev/null
+++ b/third_party/python/pytest/testing/python/collect.py
@@ -0,0 +1,1555 @@
+# -*- coding: utf-8 -*-
+import os
+import sys
+from textwrap import dedent
+
+import _pytest._code
+import pytest
+from _pytest.main import EXIT_NOTESTSCOLLECTED
+from _pytest.nodes import Collector
+
+ignore_parametrized_marks = pytest.mark.filterwarnings(
+ "ignore:Applying marks directly to parameters"
+)
+
+
+class TestModule(object):
+
+ def test_failing_import(self, testdir):
+ modcol = testdir.getmodulecol("import alksdjalskdjalkjals")
+ pytest.raises(Collector.CollectError, modcol.collect)
+
+ def test_import_duplicate(self, testdir):
+ a = testdir.mkdir("a")
+ b = testdir.mkdir("b")
+ p = a.ensure("test_whatever.py")
+ p.pyimport()
+ del sys.modules["test_whatever"]
+ b.ensure("test_whatever.py")
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*import*mismatch*",
+ "*imported*test_whatever*",
+ "*%s*" % a.join("test_whatever.py"),
+ "*not the same*",
+ "*%s*" % b.join("test_whatever.py"),
+ "*HINT*",
+ ]
+ )
+
+ def test_import_prepend_append(self, testdir, monkeypatch):
+ syspath = list(sys.path)
+ monkeypatch.setattr(sys, "path", syspath)
+ root1 = testdir.mkdir("root1")
+ root2 = testdir.mkdir("root2")
+ root1.ensure("x456.py")
+ root2.ensure("x456.py")
+ p = root2.join("test_x456.py")
+ monkeypatch.syspath_prepend(str(root1))
+ p.write(
+ dedent(
+ """\
+ import x456
+ def test():
+ assert x456.__file__.startswith(%r)
+ """
+ % str(root2)
+ )
+ )
+ with root2.as_cwd():
+ reprec = testdir.inline_run("--import-mode=append")
+ reprec.assertoutcome(passed=0, failed=1)
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_syntax_error_in_module(self, testdir):
+ modcol = testdir.getmodulecol("this is a syntax error")
+ pytest.raises(modcol.CollectError, modcol.collect)
+ pytest.raises(modcol.CollectError, modcol.collect)
+
+ def test_module_considers_pluginmanager_at_import(self, testdir):
+ modcol = testdir.getmodulecol("pytest_plugins='xasdlkj',")
+ pytest.raises(ImportError, lambda: modcol.obj)
+
+ def test_invalid_test_module_name(self, testdir):
+ a = testdir.mkdir("a")
+ a.ensure("test_one.part1.py")
+ result = testdir.runpytest("-rw")
+ result.stdout.fnmatch_lines(
+ [
+ "ImportError while importing test module*test_one.part1*",
+ "Hint: make sure your test modules/packages have valid Python names.",
+ ]
+ )
+
+ @pytest.mark.parametrize("verbose", [0, 1, 2])
+ def test_show_traceback_import_error(self, testdir, verbose):
+ """Import errors when collecting modules should display the traceback (#1976).
+
+ With low verbosity we omit pytest and internal modules, otherwise show all traceback entries.
+ """
+ testdir.makepyfile(
+ foo_traceback_import_error="""
+ from bar_traceback_import_error import NOT_AVAILABLE
+ """,
+ bar_traceback_import_error="",
+ )
+ testdir.makepyfile(
+ """
+ import foo_traceback_import_error
+ """
+ )
+ args = ("-v",) * verbose
+ result = testdir.runpytest(*args)
+ result.stdout.fnmatch_lines(
+ [
+ "ImportError while importing test module*",
+ "Traceback:",
+ "*from bar_traceback_import_error import NOT_AVAILABLE",
+ "*cannot import name *NOT_AVAILABLE*",
+ ]
+ )
+ assert result.ret == 2
+
+ stdout = result.stdout.str()
+ for name in ("_pytest", os.path.join("py", "_path")):
+ if verbose == 2:
+ assert name in stdout
+ else:
+ assert name not in stdout
+
+ def test_show_traceback_import_error_unicode(self, testdir):
+ """Check test modules collected which raise ImportError with unicode messages
+ are handled properly (#2336).
+ """
+ testdir.makepyfile(
+ u"""
+ # -*- coding: utf-8 -*-
+ raise ImportError(u'Something bad happened ☺')
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "ImportError while importing test module*",
+ "Traceback:",
+ "*raise ImportError*Something bad happened*",
+ ]
+ )
+ assert result.ret == 2
+
+
+class TestClass(object):
+
+ def test_class_with_init_warning(self, testdir):
+ testdir.makepyfile(
+ """
+ class TestClass1(object):
+ def __init__(self):
+ pass
+ """
+ )
+ result = testdir.runpytest("-rw")
+ result.stdout.fnmatch_lines(
+ [
+ "*cannot collect test class 'TestClass1' because it has a __init__ constructor"
+ ]
+ )
+
+ def test_class_subclassobject(self, testdir):
+ testdir.getmodulecol(
+ """
+ class test(object):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*collected 0*"])
+
+ def test_static_method(self, testdir):
+ """Support for collecting staticmethod tests (#2528, #2699)"""
+ testdir.getmodulecol(
+ """
+ import pytest
+ class Test(object):
+ @staticmethod
+ def test_something():
+ pass
+
+ @pytest.fixture
+ def fix(self):
+ return 1
+
+ @staticmethod
+ def test_fix(fix):
+ assert fix == 1
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*collected 2 items*", "*2 passed in*"])
+
+ def test_setup_teardown_class_as_classmethod(self, testdir):
+ testdir.makepyfile(
+ test_mod1="""
+ class TestClassMethod(object):
+ @classmethod
+ def setup_class(cls):
+ pass
+ def test_1(self):
+ pass
+ @classmethod
+ def teardown_class(cls):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_issue1035_obj_has_getattr(self, testdir):
+ modcol = testdir.getmodulecol(
+ """
+ class Chameleon(object):
+ def __getattr__(self, name):
+ return True
+ chameleon = Chameleon()
+ """
+ )
+ colitems = modcol.collect()
+ assert len(colitems) == 0
+
+ def test_issue1579_namedtuple(self, testdir):
+ testdir.makepyfile(
+ """
+ import collections
+
+ TestCase = collections.namedtuple('TestCase', ['a'])
+ """
+ )
+ result = testdir.runpytest("-rw")
+ result.stdout.fnmatch_lines(
+ "*cannot collect test class 'TestCase' "
+ "because it has a __new__ constructor*"
+ )
+
+ def test_issue2234_property(self, testdir):
+ testdir.makepyfile(
+ """
+ class TestCase(object):
+ @property
+ def prop(self):
+ raise NotImplementedError()
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+
+
+class TestGenerator(object):
+
+ def test_generative_functions(self, testdir):
+ modcol = testdir.getmodulecol(
+ """
+ def func1(arg, arg2):
+ assert arg == arg2
+
+ def test_gen():
+ yield func1, 17, 3*5
+ yield func1, 42, 6*7
+ """
+ )
+ colitems = modcol.collect()
+ assert len(colitems) == 1
+ gencol = colitems[0]
+ assert isinstance(gencol, pytest.Generator)
+ gencolitems = gencol.collect()
+ assert len(gencolitems) == 2
+ assert isinstance(gencolitems[0], pytest.Function)
+ assert isinstance(gencolitems[1], pytest.Function)
+ assert gencolitems[0].name == "[0]"
+ assert gencolitems[0].obj.__name__ == "func1"
+
+ def test_generative_methods(self, testdir):
+ modcol = testdir.getmodulecol(
+ """
+ def func1(arg, arg2):
+ assert arg == arg2
+ class TestGenMethods(object):
+ def test_gen(self):
+ yield func1, 17, 3*5
+ yield func1, 42, 6*7
+ """
+ )
+ gencol = modcol.collect()[0].collect()[0].collect()[0]
+ assert isinstance(gencol, pytest.Generator)
+ gencolitems = gencol.collect()
+ assert len(gencolitems) == 2
+ assert isinstance(gencolitems[0], pytest.Function)
+ assert isinstance(gencolitems[1], pytest.Function)
+ assert gencolitems[0].name == "[0]"
+ assert gencolitems[0].obj.__name__ == "func1"
+
+ def test_generative_functions_with_explicit_names(self, testdir):
+ modcol = testdir.getmodulecol(
+ """
+ def func1(arg, arg2):
+ assert arg == arg2
+
+ def test_gen():
+ yield "seventeen", func1, 17, 3*5
+ yield "fortytwo", func1, 42, 6*7
+ """
+ )
+ colitems = modcol.collect()
+ assert len(colitems) == 1
+ gencol = colitems[0]
+ assert isinstance(gencol, pytest.Generator)
+ gencolitems = gencol.collect()
+ assert len(gencolitems) == 2
+ assert isinstance(gencolitems[0], pytest.Function)
+ assert isinstance(gencolitems[1], pytest.Function)
+ assert gencolitems[0].name == "['seventeen']"
+ assert gencolitems[0].obj.__name__ == "func1"
+ assert gencolitems[1].name == "['fortytwo']"
+ assert gencolitems[1].obj.__name__ == "func1"
+
+ def test_generative_functions_unique_explicit_names(self, testdir):
+ # generative
+ modcol = testdir.getmodulecol(
+ """
+ def func(): pass
+ def test_gen():
+ yield "name", func
+ yield "name", func
+ """
+ )
+ colitems = modcol.collect()
+ assert len(colitems) == 1
+ gencol = colitems[0]
+ assert isinstance(gencol, pytest.Generator)
+ pytest.raises(ValueError, "gencol.collect()")
+
+ def test_generative_methods_with_explicit_names(self, testdir):
+ modcol = testdir.getmodulecol(
+ """
+ def func1(arg, arg2):
+ assert arg == arg2
+ class TestGenMethods(object):
+ def test_gen(self):
+ yield "m1", func1, 17, 3*5
+ yield "m2", func1, 42, 6*7
+ """
+ )
+ gencol = modcol.collect()[0].collect()[0].collect()[0]
+ assert isinstance(gencol, pytest.Generator)
+ gencolitems = gencol.collect()
+ assert len(gencolitems) == 2
+ assert isinstance(gencolitems[0], pytest.Function)
+ assert isinstance(gencolitems[1], pytest.Function)
+ assert gencolitems[0].name == "['m1']"
+ assert gencolitems[0].obj.__name__ == "func1"
+ assert gencolitems[1].name == "['m2']"
+ assert gencolitems[1].obj.__name__ == "func1"
+
+ def test_order_of_execution_generator_same_codeline(self, testdir, tmpdir):
+ o = testdir.makepyfile(
+ """
+ from __future__ import print_function
+ def test_generative_order_of_execution():
+ import py, pytest
+ test_list = []
+ expected_list = list(range(6))
+
+ def list_append(item):
+ test_list.append(item)
+
+ def assert_order_of_execution():
+ print('expected order', expected_list)
+ print('but got ', test_list)
+ assert test_list == expected_list
+
+ for i in expected_list:
+ yield list_append, i
+ yield assert_order_of_execution
+ """
+ )
+ reprec = testdir.inline_run(o)
+ passed, skipped, failed = reprec.countoutcomes()
+ assert passed == 7
+ assert not skipped and not failed
+
+ def test_order_of_execution_generator_different_codeline(self, testdir):
+ o = testdir.makepyfile(
+ """
+ from __future__ import print_function
+ def test_generative_tests_different_codeline():
+ import py, pytest
+ test_list = []
+ expected_list = list(range(3))
+
+ def list_append_2():
+ test_list.append(2)
+
+ def list_append_1():
+ test_list.append(1)
+
+ def list_append_0():
+ test_list.append(0)
+
+ def assert_order_of_execution():
+ print('expected order', expected_list)
+ print('but got ', test_list)
+ assert test_list == expected_list
+
+ yield list_append_0
+ yield list_append_1
+ yield list_append_2
+ yield assert_order_of_execution
+ """
+ )
+ reprec = testdir.inline_run(o)
+ passed, skipped, failed = reprec.countoutcomes()
+ assert passed == 4
+ assert not skipped and not failed
+
+ def test_setupstate_is_preserved_134(self, testdir):
+ # yield-based tests are messy wrt to setupstate because
+ # during collection they already invoke setup functions
+ # and then again when they are run. For now, we want to make sure
+ # that the old 1.3.4 behaviour is preserved such that all
+ # yielded functions all share the same "self" instance that
+ # has been used during collection.
+ o = testdir.makepyfile(
+ """
+ setuplist = []
+ class TestClass(object):
+ def setup_method(self, func):
+ #print "setup_method", self, func
+ setuplist.append(self)
+ self.init = 42
+
+ def teardown_method(self, func):
+ self.init = None
+
+ def test_func1(self):
+ pass
+
+ def test_func2(self):
+ yield self.func2
+ yield self.func2
+
+ def func2(self):
+ assert self.init
+
+ def test_setuplist():
+ # once for test_func2 during collection
+ # once for test_func1 during test run
+ # once for test_func2 during test run
+ #print setuplist
+ assert len(setuplist) == 3, len(setuplist)
+ assert setuplist[0] == setuplist[2], setuplist
+ assert setuplist[1] != setuplist[2], setuplist
+ """
+ )
+ reprec = testdir.inline_run(o, "-v")
+ passed, skipped, failed = reprec.countoutcomes()
+ assert passed == 4
+ assert not skipped and not failed
+
+
+class TestFunction(object):
+
+ def test_getmodulecollector(self, testdir):
+ item = testdir.getitem("def test_func(): pass")
+ modcol = item.getparent(pytest.Module)
+ assert isinstance(modcol, pytest.Module)
+ assert hasattr(modcol.obj, "test_func")
+
+ def test_function_as_object_instance_ignored(self, testdir):
+ testdir.makepyfile(
+ """
+ class A(object):
+ def __call__(self, tmpdir):
+ 0/0
+
+ test_a = A()
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome()
+
+ def test_function_equality(self, testdir, tmpdir):
+ from _pytest.fixtures import FixtureManager
+
+ config = testdir.parseconfigure()
+ session = testdir.Session(config)
+ session._fixturemanager = FixtureManager(session)
+
+ def func1():
+ pass
+
+ def func2():
+ pass
+
+ f1 = pytest.Function(
+ name="name", parent=session, config=config, args=(1,), callobj=func1
+ )
+ assert f1 == f1
+ f2 = pytest.Function(name="name", config=config, callobj=func2, parent=session)
+ assert f1 != f2
+
+ def test_issue197_parametrize_emptyset(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize('arg', [])
+ def test_function(arg):
+ pass
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(skipped=1)
+
+ def test_single_tuple_unwraps_values(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize(('arg',), [(1,)])
+ def test_function(arg):
+ assert arg == 1
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_issue213_parametrize_value_no_equal(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ class A(object):
+ def __eq__(self, other):
+ raise ValueError("not possible")
+ @pytest.mark.parametrize('arg', [A()])
+ def test_function(arg):
+ assert arg.__class__.__name__ == "A"
+ """
+ )
+ reprec = testdir.inline_run("--fulltrace")
+ reprec.assertoutcome(passed=1)
+
+ def test_parametrize_with_non_hashable_values(self, testdir):
+ """Test parametrization with non-hashable values."""
+ testdir.makepyfile(
+ """
+ archival_mapping = {
+ '1.0': {'tag': '1.0'},
+ '1.2.2a1': {'tag': 'release-1.2.2a1'},
+ }
+
+ import pytest
+ @pytest.mark.parametrize('key value'.split(),
+ archival_mapping.items())
+ def test_archival_to_version(key, value):
+ assert key in archival_mapping
+ assert value == archival_mapping[key]
+ """
+ )
+ rec = testdir.inline_run()
+ rec.assertoutcome(passed=2)
+
+ def test_parametrize_with_non_hashable_values_indirect(self, testdir):
+ """Test parametrization with non-hashable values with indirect parametrization."""
+ testdir.makepyfile(
+ """
+ archival_mapping = {
+ '1.0': {'tag': '1.0'},
+ '1.2.2a1': {'tag': 'release-1.2.2a1'},
+ }
+
+ import pytest
+
+ @pytest.fixture
+ def key(request):
+ return request.param
+
+ @pytest.fixture
+ def value(request):
+ return request.param
+
+ @pytest.mark.parametrize('key value'.split(),
+ archival_mapping.items(), indirect=True)
+ def test_archival_to_version(key, value):
+ assert key in archival_mapping
+ assert value == archival_mapping[key]
+ """
+ )
+ rec = testdir.inline_run()
+ rec.assertoutcome(passed=2)
+
+ def test_parametrize_overrides_fixture(self, testdir):
+ """Test parametrization when parameter overrides existing fixture with same name."""
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def value():
+ return 'value'
+
+ @pytest.mark.parametrize('value',
+ ['overridden'])
+ def test_overridden_via_param(value):
+ assert value == 'overridden'
+
+ @pytest.mark.parametrize('somevalue', ['overridden'])
+ def test_not_overridden(value, somevalue):
+ assert value == 'value'
+ assert somevalue == 'overridden'
+
+ @pytest.mark.parametrize('other,value', [('foo', 'overridden')])
+ def test_overridden_via_multiparam(other, value):
+ assert other == 'foo'
+ assert value == 'overridden'
+ """
+ )
+ rec = testdir.inline_run()
+ rec.assertoutcome(passed=3)
+
+ def test_parametrize_overrides_parametrized_fixture(self, testdir):
+ """Test parametrization when parameter overrides existing parametrized fixture with same name."""
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1, 2])
+ def value(request):
+ return request.param
+
+ @pytest.mark.parametrize('value',
+ ['overridden'])
+ def test_overridden_via_param(value):
+ assert value == 'overridden'
+ """
+ )
+ rec = testdir.inline_run()
+ rec.assertoutcome(passed=1)
+
+ @ignore_parametrized_marks
+ def test_parametrize_with_mark(self, testdir):
+ items = testdir.getitems(
+ """
+ import pytest
+ @pytest.mark.foo
+ @pytest.mark.parametrize('arg', [
+ 1,
+ pytest.mark.bar(pytest.mark.baz(2))
+ ])
+ def test_function(arg):
+ pass
+ """
+ )
+ keywords = [item.keywords for item in items]
+ assert (
+ "foo" in keywords[0]
+ and "bar" not in keywords[0]
+ and "baz" not in keywords[0]
+ )
+ assert "foo" in keywords[1] and "bar" in keywords[1] and "baz" in keywords[1]
+
+ def test_function_equality_with_callspec(self, testdir, tmpdir):
+ items = testdir.getitems(
+ """
+ import pytest
+ @pytest.mark.parametrize('arg', [1,2])
+ def test_function(arg):
+ pass
+ """
+ )
+ assert items[0] != items[1]
+ assert not (items[0] == items[1])
+
+ def test_pyfunc_call(self, testdir):
+ item = testdir.getitem("def test_func(): raise ValueError")
+ config = item.config
+
+ class MyPlugin1(object):
+
+ def pytest_pyfunc_call(self, pyfuncitem):
+ raise ValueError
+
+ class MyPlugin2(object):
+
+ def pytest_pyfunc_call(self, pyfuncitem):
+ return True
+
+ config.pluginmanager.register(MyPlugin1())
+ config.pluginmanager.register(MyPlugin2())
+ config.hook.pytest_runtest_setup(item=item)
+ config.hook.pytest_pyfunc_call(pyfuncitem=item)
+
+ def test_multiple_parametrize(self, testdir):
+ modcol = testdir.getmodulecol(
+ """
+ import pytest
+ @pytest.mark.parametrize('x', [0, 1])
+ @pytest.mark.parametrize('y', [2, 3])
+ def test1(x, y):
+ pass
+ """
+ )
+ colitems = modcol.collect()
+ assert colitems[0].name == "test1[2-0]"
+ assert colitems[1].name == "test1[2-1]"
+ assert colitems[2].name == "test1[3-0]"
+ assert colitems[3].name == "test1[3-1]"
+
+ def test_issue751_multiple_parametrize_with_ids(self, testdir):
+ modcol = testdir.getmodulecol(
+ """
+ import pytest
+ @pytest.mark.parametrize('x', [0], ids=['c'])
+ @pytest.mark.parametrize('y', [0, 1], ids=['a', 'b'])
+ class Test(object):
+ def test1(self, x, y):
+ pass
+ def test2(self, x, y):
+ pass
+ """
+ )
+ colitems = modcol.collect()[0].collect()[0].collect()
+ assert colitems[0].name == "test1[a-c]"
+ assert colitems[1].name == "test1[b-c]"
+ assert colitems[2].name == "test2[a-c]"
+ assert colitems[3].name == "test2[b-c]"
+
+ @ignore_parametrized_marks
+ def test_parametrize_skipif(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ m = pytest.mark.skipif('True')
+
+ @pytest.mark.parametrize('x', [0, 1, m(2)])
+ def test_skip_if(x):
+ assert x < 2
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("* 2 passed, 1 skipped in *")
+
+ @ignore_parametrized_marks
+ def test_parametrize_skip(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ m = pytest.mark.skip('')
+
+ @pytest.mark.parametrize('x', [0, 1, m(2)])
+ def test_skip(x):
+ assert x < 2
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("* 2 passed, 1 skipped in *")
+
+ @ignore_parametrized_marks
+ def test_parametrize_skipif_no_skip(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ m = pytest.mark.skipif('False')
+
+ @pytest.mark.parametrize('x', [0, 1, m(2)])
+ def test_skipif_no_skip(x):
+ assert x < 2
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("* 1 failed, 2 passed in *")
+
+ @ignore_parametrized_marks
+ def test_parametrize_xfail(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ m = pytest.mark.xfail('True')
+
+ @pytest.mark.parametrize('x', [0, 1, m(2)])
+ def test_xfail(x):
+ assert x < 2
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("* 2 passed, 1 xfailed in *")
+
+ @ignore_parametrized_marks
+ def test_parametrize_passed(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ m = pytest.mark.xfail('True')
+
+ @pytest.mark.parametrize('x', [0, 1, m(2)])
+ def test_xfail(x):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("* 2 passed, 1 xpassed in *")
+
+ @ignore_parametrized_marks
+ def test_parametrize_xfail_passed(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ m = pytest.mark.xfail('False')
+
+ @pytest.mark.parametrize('x', [0, 1, m(2)])
+ def test_passed(x):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("* 3 passed in *")
+
+ def test_function_original_name(self, testdir):
+ items = testdir.getitems(
+ """
+ import pytest
+ @pytest.mark.parametrize('arg', [1,2])
+ def test_func(arg):
+ pass
+ """
+ )
+ assert [x.originalname for x in items] == ["test_func", "test_func"]
+
+
+class TestSorting(object):
+
+ def test_check_equality(self, testdir):
+ modcol = testdir.getmodulecol(
+ """
+ def test_pass(): pass
+ def test_fail(): assert 0
+ """
+ )
+ fn1 = testdir.collect_by_name(modcol, "test_pass")
+ assert isinstance(fn1, pytest.Function)
+ fn2 = testdir.collect_by_name(modcol, "test_pass")
+ assert isinstance(fn2, pytest.Function)
+
+ assert fn1 == fn2
+ assert fn1 != modcol
+ if sys.version_info < (3, 0):
+ assert cmp(fn1, fn2) == 0 # NOQA
+ assert hash(fn1) == hash(fn2)
+
+ fn3 = testdir.collect_by_name(modcol, "test_fail")
+ assert isinstance(fn3, pytest.Function)
+ assert not (fn1 == fn3)
+ assert fn1 != fn3
+
+ for fn in fn1, fn2, fn3:
+ assert fn != 3
+ assert fn != modcol
+ assert fn != [1, 2, 3]
+ assert [1, 2, 3] != fn
+ assert modcol != fn
+
+ def test_allow_sane_sorting_for_decorators(self, testdir):
+ modcol = testdir.getmodulecol(
+ """
+ def dec(f):
+ g = lambda: f(2)
+ g.place_as = f
+ return g
+
+
+ def test_b(y):
+ pass
+ test_b = dec(test_b)
+
+ def test_a(y):
+ pass
+ test_a = dec(test_a)
+ """
+ )
+ colitems = modcol.collect()
+ assert len(colitems) == 2
+ assert [item.name for item in colitems] == ["test_b", "test_a"]
+
+
+class TestConftestCustomization(object):
+
+ def test_pytest_pycollect_module(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ class MyModule(pytest.Module):
+ pass
+ def pytest_pycollect_makemodule(path, parent):
+ if path.basename == "test_xyz.py":
+ return MyModule(path, parent)
+ """
+ )
+ testdir.makepyfile("def test_some(): pass")
+ testdir.makepyfile(test_xyz="def test_func(): pass")
+ result = testdir.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*<Module*test_pytest*", "*<MyModule*xyz*"])
+
+ def test_customized_pymakemodule_issue205_subdir(self, testdir):
+ b = testdir.mkdir("a").mkdir("b")
+ b.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ import pytest
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_pycollect_makemodule():
+ outcome = yield
+ mod = outcome.get_result()
+ mod.obj.hello = "world"
+ """
+ )
+ )
+ b.join("test_module.py").write(
+ _pytest._code.Source(
+ """
+ def test_hello():
+ assert hello == "world"
+ """
+ )
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_customized_pymakeitem(self, testdir):
+ b = testdir.mkdir("a").mkdir("b")
+ b.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ import pytest
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_pycollect_makeitem():
+ outcome = yield
+ if outcome.excinfo is None:
+ result = outcome.get_result()
+ if result:
+ for func in result:
+ func._some123 = "world"
+ """
+ )
+ )
+ b.join("test_module.py").write(
+ _pytest._code.Source(
+ """
+ import pytest
+
+ @pytest.fixture()
+ def obj(request):
+ return request.node._some123
+ def test_hello(obj):
+ assert obj == "world"
+ """
+ )
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_pytest_pycollect_makeitem(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ class MyFunction(pytest.Function):
+ pass
+ def pytest_pycollect_makeitem(collector, name, obj):
+ if name == "some":
+ return MyFunction(name, collector)
+ """
+ )
+ testdir.makepyfile("def some(): pass")
+ result = testdir.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*MyFunction*some*"])
+
+ def test_makeitem_non_underscore(self, testdir, monkeypatch):
+ modcol = testdir.getmodulecol("def _hello(): pass")
+ values = []
+ monkeypatch.setattr(
+ pytest.Module, "makeitem", lambda self, name, obj: values.append(name)
+ )
+ values = modcol.collect()
+ assert "_hello" not in values
+
+ def test_issue2369_collect_module_fileext(self, testdir):
+ """Ensure we can collect files with weird file extensions as Python
+ modules (#2369)"""
+ # We'll implement a little finder and loader to import files containing
+ # Python source code whose file extension is ".narf".
+ testdir.makeconftest(
+ """
+ import sys, os, imp
+ from _pytest.python import Module
+
+ class Loader(object):
+ def load_module(self, name):
+ return imp.load_source(name, name + ".narf")
+ class Finder(object):
+ def find_module(self, name, path=None):
+ if os.path.exists(name + ".narf"):
+ return Loader()
+ sys.meta_path.append(Finder())
+
+ def pytest_collect_file(path, parent):
+ if path.ext == ".narf":
+ return Module(path, parent)"""
+ )
+ testdir.makefile(
+ ".narf",
+ """
+ def test_something():
+ assert 1 + 1 == 2""",
+ )
+ # Use runpytest_subprocess, since we're futzing with sys.meta_path.
+ result = testdir.runpytest_subprocess()
+ result.stdout.fnmatch_lines("*1 passed*")
+
+
+def test_setup_only_available_in_subdir(testdir):
+ sub1 = testdir.mkpydir("sub1")
+ sub2 = testdir.mkpydir("sub2")
+ sub1.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ import pytest
+ def pytest_runtest_setup(item):
+ assert item.fspath.purebasename == "test_in_sub1"
+ def pytest_runtest_call(item):
+ assert item.fspath.purebasename == "test_in_sub1"
+ def pytest_runtest_teardown(item):
+ assert item.fspath.purebasename == "test_in_sub1"
+ """
+ )
+ )
+ sub2.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ import pytest
+ def pytest_runtest_setup(item):
+ assert item.fspath.purebasename == "test_in_sub2"
+ def pytest_runtest_call(item):
+ assert item.fspath.purebasename == "test_in_sub2"
+ def pytest_runtest_teardown(item):
+ assert item.fspath.purebasename == "test_in_sub2"
+ """
+ )
+ )
+ sub1.join("test_in_sub1.py").write("def test_1(): pass")
+ sub2.join("test_in_sub2.py").write("def test_2(): pass")
+ result = testdir.runpytest("-v", "-s")
+ result.assert_outcomes(passed=2)
+
+
+def test_modulecol_roundtrip(testdir):
+ modcol = testdir.getmodulecol("pass", withinit=True)
+ trail = modcol.nodeid
+ newcol = modcol.session.perform_collect([trail], genitems=0)[0]
+ assert modcol.name == newcol.name
+
+
+class TestTracebackCutting(object):
+
+ def test_skip_simple(self):
+ excinfo = pytest.raises(pytest.skip.Exception, 'pytest.skip("xxx")')
+ assert excinfo.traceback[-1].frame.code.name == "skip"
+ assert excinfo.traceback[-1].ishidden()
+
+ def test_traceback_argsetup(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture
+ def hello(request):
+ raise ValueError("xyz")
+ """
+ )
+ p = testdir.makepyfile("def test(hello): pass")
+ result = testdir.runpytest(p)
+ assert result.ret != 0
+ out = result.stdout.str()
+ assert "xyz" in out
+ assert "conftest.py:5: ValueError" in out
+ numentries = out.count("_ _ _") # separator for traceback entries
+ assert numentries == 0
+
+ result = testdir.runpytest("--fulltrace", p)
+ out = result.stdout.str()
+ assert "conftest.py:5: ValueError" in out
+ numentries = out.count("_ _ _ _") # separator for traceback entries
+ assert numentries > 3
+
+ def test_traceback_error_during_import(self, testdir):
+ testdir.makepyfile(
+ """
+ x = 1
+ x = 2
+ x = 17
+ asd
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret != 0
+ out = result.stdout.str()
+ assert "x = 1" not in out
+ assert "x = 2" not in out
+ result.stdout.fnmatch_lines([" *asd*", "E*NameError*"])
+ result = testdir.runpytest("--fulltrace")
+ out = result.stdout.str()
+ assert "x = 1" in out
+ assert "x = 2" in out
+ result.stdout.fnmatch_lines([">*asd*", "E*NameError*"])
+
+ def test_traceback_filter_error_during_fixture_collection(self, testdir):
+ """integration test for issue #995.
+ """
+ testdir.makepyfile(
+ """
+ import pytest
+
+ def fail_me(func):
+ ns = {}
+ exec('def w(): raise ValueError("fail me")', ns)
+ return ns['w']
+
+ @pytest.fixture(scope='class')
+ @fail_me
+ def fail_fixture():
+ pass
+
+ def test_failing_fixture(fail_fixture):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret != 0
+ out = result.stdout.str()
+ assert "INTERNALERROR>" not in out
+ result.stdout.fnmatch_lines(["*ValueError: fail me*", "* 1 error in *"])
+
+ def test_filter_traceback_generated_code(self):
+ """test that filter_traceback() works with the fact that
+ py.code.Code.path attribute might return an str object.
+ In this case, one of the entries on the traceback was produced by
+ dynamically generated code.
+ See: https://bitbucket.org/pytest-dev/py/issues/71
+ This fixes #995.
+ """
+ from _pytest.python import filter_traceback
+
+ try:
+ ns = {}
+ exec("def foo(): raise ValueError", ns)
+ ns["foo"]()
+ except ValueError:
+ _, _, tb = sys.exc_info()
+
+ tb = _pytest._code.Traceback(tb)
+ assert isinstance(tb[-1].path, str)
+ assert not filter_traceback(tb[-1])
+
+ def test_filter_traceback_path_no_longer_valid(self, testdir):
+ """test that filter_traceback() works with the fact that
+ py.code.Code.path attribute might return an str object.
+ In this case, one of the files in the traceback no longer exists.
+ This fixes #1133.
+ """
+ from _pytest.python import filter_traceback
+
+ testdir.syspathinsert()
+ testdir.makepyfile(
+ filter_traceback_entry_as_str="""
+ def foo():
+ raise ValueError
+ """
+ )
+ try:
+ import filter_traceback_entry_as_str
+
+ filter_traceback_entry_as_str.foo()
+ except ValueError:
+ _, _, tb = sys.exc_info()
+
+ testdir.tmpdir.join("filter_traceback_entry_as_str.py").remove()
+ tb = _pytest._code.Traceback(tb)
+ assert isinstance(tb[-1].path, str)
+ assert filter_traceback(tb[-1])
+
+
+class TestReportInfo(object):
+
+ def test_itemreport_reportinfo(self, testdir, linecomp):
+ testdir.makeconftest(
+ """
+ import pytest
+ class MyFunction(pytest.Function):
+ def reportinfo(self):
+ return "ABCDE", 42, "custom"
+ def pytest_pycollect_makeitem(collector, name, obj):
+ if name == "test_func":
+ return MyFunction(name, parent=collector)
+ """
+ )
+ item = testdir.getitem("def test_func(): pass")
+ item.config.pluginmanager.getplugin("runner")
+ assert item.location == ("ABCDE", 42, "custom")
+
+ def test_func_reportinfo(self, testdir):
+ item = testdir.getitem("def test_func(): pass")
+ fspath, lineno, modpath = item.reportinfo()
+ assert fspath == item.fspath
+ assert lineno == 0
+ assert modpath == "test_func"
+
+ def test_class_reportinfo(self, testdir):
+ modcol = testdir.getmodulecol(
+ """
+ # lineno 0
+ class TestClass(object):
+ def test_hello(self): pass
+ """
+ )
+ classcol = testdir.collect_by_name(modcol, "TestClass")
+ fspath, lineno, msg = classcol.reportinfo()
+ assert fspath == modcol.fspath
+ assert lineno == 1
+ assert msg == "TestClass"
+
+ def test_generator_reportinfo(self, testdir):
+ modcol = testdir.getmodulecol(
+ """
+ # lineno 0
+ def test_gen():
+ def check(x):
+ assert x
+ yield check, 3
+ """
+ )
+ gencol = testdir.collect_by_name(modcol, "test_gen")
+ fspath, lineno, modpath = gencol.reportinfo()
+ assert fspath == modcol.fspath
+ assert lineno == 1
+ assert modpath == "test_gen"
+
+ genitem = gencol.collect()[0]
+ fspath, lineno, modpath = genitem.reportinfo()
+ assert fspath == modcol.fspath
+ assert lineno == 2
+ assert modpath == "test_gen[0]"
+ """
+ def test_func():
+ pass
+ def test_genfunc():
+ def check(x):
+ pass
+ yield check, 3
+ class TestClass(object):
+ def test_method(self):
+ pass
+ """
+
+ def test_reportinfo_with_nasty_getattr(self, testdir):
+ # https://github.com/pytest-dev/pytest/issues/1204
+ modcol = testdir.getmodulecol(
+ """
+ # lineno 0
+ class TestClass(object):
+ def __getattr__(self, name):
+ return "this is not an int"
+
+ def test_foo(self):
+ pass
+ """
+ )
+ classcol = testdir.collect_by_name(modcol, "TestClass")
+ instance = classcol.collect()[0]
+ fspath, lineno, msg = instance.reportinfo()
+
+
+def test_customized_python_discovery(testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ python_files=check_*.py
+ python_classes=Check
+ python_functions=check
+ """
+ )
+ p = testdir.makepyfile(
+ """
+ def check_simple():
+ pass
+ class CheckMyApp(object):
+ def check_meth(self):
+ pass
+ """
+ )
+ p2 = p.new(basename=p.basename.replace("test", "check"))
+ p.move(p2)
+ result = testdir.runpytest("--collect-only", "-s")
+ result.stdout.fnmatch_lines(
+ ["*check_customized*", "*check_simple*", "*CheckMyApp*", "*check_meth*"]
+ )
+
+ result = testdir.runpytest()
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+
+def test_customized_python_discovery_functions(testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ python_functions=_test
+ """
+ )
+ testdir.makepyfile(
+ """
+ def _test_underscore():
+ pass
+ """
+ )
+ result = testdir.runpytest("--collect-only", "-s")
+ result.stdout.fnmatch_lines(["*_test_underscore*"])
+
+ result = testdir.runpytest()
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_collector_attributes(testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ def pytest_pycollect_makeitem(collector):
+ assert collector.Function == pytest.Function
+ assert collector.Class == pytest.Class
+ assert collector.Instance == pytest.Instance
+ assert collector.Module == pytest.Module
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_hello():
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_customize_through_attributes(testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ class MyFunction(pytest.Function):
+ pass
+ class MyInstance(pytest.Instance):
+ Function = MyFunction
+ class MyClass(pytest.Class):
+ Instance = MyInstance
+
+ def pytest_pycollect_makeitem(collector, name, obj):
+ if name.startswith("MyTestClass"):
+ return MyClass(name, parent=collector)
+ """
+ )
+ testdir.makepyfile(
+ """
+ class MyTestClass(object):
+ def test_hello(self):
+ pass
+ """
+ )
+ result = testdir.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(
+ ["*MyClass*", "*MyInstance*", "*MyFunction*test_hello*"]
+ )
+
+
+def test_unorderable_types(testdir):
+ testdir.makepyfile(
+ """
+ class TestJoinEmpty(object):
+ pass
+
+ def make_test():
+ class Test(object):
+ pass
+ Test.__name__ = "TestFoo"
+ return Test
+ TestFoo = make_test()
+ """
+ )
+ result = testdir.runpytest()
+ assert "TypeError" not in result.stdout.str()
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+
+
+def test_collect_functools_partial(testdir):
+ """
+ Test that collection of functools.partial object works, and arguments
+ to the wrapped functions are dealt correctly (see #811).
+ """
+ testdir.makepyfile(
+ """
+ import functools
+ import pytest
+
+ @pytest.fixture
+ def fix1():
+ return 'fix1'
+
+ @pytest.fixture
+ def fix2():
+ return 'fix2'
+
+ def check1(i, fix1):
+ assert i == 2
+ assert fix1 == 'fix1'
+
+ def check2(fix1, i):
+ assert i == 2
+ assert fix1 == 'fix1'
+
+ def check3(fix1, i, fix2):
+ assert i == 2
+ assert fix1 == 'fix1'
+ assert fix2 == 'fix2'
+
+ test_ok_1 = functools.partial(check1, i=2)
+ test_ok_2 = functools.partial(check1, i=2, fix1='fix1')
+ test_ok_3 = functools.partial(check1, 2)
+ test_ok_4 = functools.partial(check2, i=2)
+ test_ok_5 = functools.partial(check3, i=2)
+ test_ok_6 = functools.partial(check3, i=2, fix1='fix1')
+
+ test_fail_1 = functools.partial(check2, 2)
+ test_fail_2 = functools.partial(check3, 2)
+ """
+ )
+ result = testdir.inline_run()
+ result.assertoutcome(passed=6, failed=2)
+
+
+def test_dont_collect_non_function_callable(testdir):
+ """Test for issue https://github.com/pytest-dev/pytest/issues/331
+
+ In this case an INTERNALERROR occurred trying to report the failure of
+ a test like this one because py test failed to get the source lines.
+ """
+ testdir.makepyfile(
+ """
+ class Oh(object):
+ def __call__(self):
+ pass
+
+ test_a = Oh()
+
+ def test_real():
+ pass
+ """
+ )
+ result = testdir.runpytest("-rw")
+ result.stdout.fnmatch_lines(
+ [
+ "*collected 1 item*",
+ "*cannot collect 'test_a' because it is not a function*",
+ "*1 passed, 1 warnings in *",
+ ]
+ )
+
+
+def test_class_injection_does_not_break_collection(testdir):
+ """Tests whether injection during collection time will terminate testing.
+
+ In this case the error should not occur if the TestClass itself
+ is modified during collection time, and the original method list
+ is still used for collection.
+ """
+ testdir.makeconftest(
+ """
+ from test_inject import TestClass
+ def pytest_generate_tests(metafunc):
+ TestClass.changed_var = {}
+ """
+ )
+ testdir.makepyfile(
+ test_inject='''
+ class TestClass(object):
+ def test_injection(self):
+ """Test being parametrized."""
+ pass
+ '''
+ )
+ result = testdir.runpytest()
+ assert "RuntimeError: dictionary changed size during iteration" not in result.stdout.str()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_syntax_error_with_non_ascii_chars(testdir):
+ """Fix decoding issue while formatting SyntaxErrors during collection (#578)
+ """
+ testdir.makepyfile(
+ u"""
+ # -*- coding: UTF-8 -*-
+
+ ☃
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*ERROR collecting*", "*SyntaxError*", "*1 error in*"])
+
+
+def test_skip_duplicates_by_default(testdir):
+ """Test for issue https://github.com/pytest-dev/pytest/issues/1609 (#1609)
+
+ Ignore duplicate directories.
+ """
+ a = testdir.mkdir("a")
+ fh = a.join("test_a.py")
+ fh.write(
+ _pytest._code.Source(
+ """
+ import pytest
+ def test_real():
+ pass
+ """
+ )
+ )
+ result = testdir.runpytest(a.strpath, a.strpath)
+ result.stdout.fnmatch_lines(["*collected 1 item*"])
+
+
+def test_keep_duplicates(testdir):
+ """Test for issue https://github.com/pytest-dev/pytest/issues/1609 (#1609)
+
+ Use --keep-duplicates to collect tests from duplicate directories.
+ """
+ a = testdir.mkdir("a")
+ fh = a.join("test_a.py")
+ fh.write(
+ _pytest._code.Source(
+ """
+ import pytest
+ def test_real():
+ pass
+ """
+ )
+ )
+ result = testdir.runpytest("--keep-duplicates", a.strpath, a.strpath)
+ result.stdout.fnmatch_lines(["*collected 2 item*"])
diff --git a/third_party/python/pytest/testing/python/fixture.py b/third_party/python/pytest/testing/python/fixture.py
new file mode 100644
index 0000000000..6d2bb663b6
--- /dev/null
+++ b/third_party/python/pytest/testing/python/fixture.py
@@ -0,0 +1,4011 @@
+from textwrap import dedent
+
+import _pytest._code
+import pytest
+from _pytest.pytester import get_public_names
+from _pytest.fixtures import FixtureLookupError, FixtureRequest
+from _pytest import fixtures
+
+
+def test_getfuncargnames():
+
+ def f():
+ pass
+
+ assert not fixtures.getfuncargnames(f)
+
+ def g(arg):
+ pass
+
+ assert fixtures.getfuncargnames(g) == ("arg",)
+
+ def h(arg1, arg2="hello"):
+ pass
+
+ assert fixtures.getfuncargnames(h) == ("arg1",)
+
+ def h(arg1, arg2, arg3="hello"):
+ pass
+
+ assert fixtures.getfuncargnames(h) == ("arg1", "arg2")
+
+ class A(object):
+
+ def f(self, arg1, arg2="hello"):
+ pass
+
+ @staticmethod
+ def static(arg1, arg2):
+ pass
+
+ assert fixtures.getfuncargnames(A().f) == ("arg1",)
+ assert fixtures.getfuncargnames(A.static, cls=A) == ("arg1", "arg2")
+
+
+class TestFillFixtures(object):
+
+ def test_fillfuncargs_exposed(self):
+ # used by oejskit, kept for compatibility
+ assert pytest._fillfuncargs == fixtures.fillfixtures
+
+ def test_funcarg_lookupfails(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def xyzsomething(request):
+ return 42
+
+ def test_func(some):
+ pass
+ """
+ )
+ result = testdir.runpytest() # "--collect-only")
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(
+ ["*def test_func(some)*", "*fixture*some*not found*", "*xyzsomething*"]
+ )
+
+ def test_funcarg_basic(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+
+ @pytest.fixture
+ def some(request):
+ return request.function.__name__
+ @pytest.fixture
+ def other(request):
+ return 42
+ def test_func(some, other):
+ pass
+ """
+ )
+ fixtures.fillfixtures(item)
+ del item.funcargs["request"]
+ assert len(get_public_names(item.funcargs)) == 2
+ assert item.funcargs["some"] == "test_func"
+ assert item.funcargs["other"] == 42
+
+ def test_funcarg_lookup_modulelevel(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def something(request):
+ return request.function.__name__
+
+ class TestClass(object):
+ def test_method(self, something):
+ assert something == "test_method"
+ def test_func(something):
+ assert something == "test_func"
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_funcarg_lookup_classlevel(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ class TestClass(object):
+
+ @pytest.fixture
+ def something(self, request):
+ return request.instance
+
+ def test_method(self, something):
+ assert something is self
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_conftest_funcargs_only_available_in_subdir(self, testdir):
+ sub1 = testdir.mkpydir("sub1")
+ sub2 = testdir.mkpydir("sub2")
+ sub1.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ import pytest
+ @pytest.fixture
+ def arg1(request):
+ pytest.raises(Exception, "request.getfixturevalue('arg2')")
+ """
+ )
+ )
+ sub2.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ import pytest
+ @pytest.fixture
+ def arg2(request):
+ pytest.raises(Exception, "request.getfixturevalue('arg1')")
+ """
+ )
+ )
+
+ sub1.join("test_in_sub1.py").write("def test_1(arg1): pass")
+ sub2.join("test_in_sub2.py").write("def test_2(arg2): pass")
+ result = testdir.runpytest("-v")
+ result.assert_outcomes(passed=2)
+
+ def test_extend_fixture_module_class(self, testdir):
+ testfile = testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def spam():
+ return 'spam'
+
+ class TestSpam(object):
+
+ @pytest.fixture
+ def spam(self, spam):
+ return spam * 2
+
+ def test_spam(self, spam):
+ assert spam == 'spamspam'
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ result = testdir.runpytest(testfile)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_extend_fixture_conftest_module(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture
+ def spam():
+ return 'spam'
+ """
+ )
+ testfile = testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def spam(spam):
+ return spam * 2
+
+ def test_spam(spam):
+ assert spam == 'spamspam'
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ result = testdir.runpytest(testfile)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_extend_fixture_conftest_conftest(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture
+ def spam():
+ return 'spam'
+ """
+ )
+ pkg = testdir.mkpydir("pkg")
+ pkg.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ import pytest
+
+ @pytest.fixture
+ def spam(spam):
+ return spam * 2
+ """
+ )
+ )
+ testfile = pkg.join("test_spam.py")
+ testfile.write(
+ _pytest._code.Source(
+ """
+ def test_spam(spam):
+ assert spam == "spamspam"
+ """
+ )
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ result = testdir.runpytest(testfile)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_extend_fixture_conftest_plugin(self, testdir):
+ testdir.makepyfile(
+ testplugin="""
+ import pytest
+
+ @pytest.fixture
+ def foo():
+ return 7
+ """
+ )
+ testdir.syspathinsert()
+ testdir.makeconftest(
+ """
+ import pytest
+
+ pytest_plugins = 'testplugin'
+
+ @pytest.fixture
+ def foo(foo):
+ return foo + 7
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_foo(foo):
+ assert foo == 14
+ """
+ )
+ result = testdir.runpytest("-s")
+ assert result.ret == 0
+
+ def test_extend_fixture_plugin_plugin(self, testdir):
+ # Two plugins should extend each order in loading order
+ testdir.makepyfile(
+ testplugin0="""
+ import pytest
+
+ @pytest.fixture
+ def foo():
+ return 7
+ """
+ )
+ testdir.makepyfile(
+ testplugin1="""
+ import pytest
+
+ @pytest.fixture
+ def foo(foo):
+ return foo + 7
+ """
+ )
+ testdir.syspathinsert()
+ testdir.makepyfile(
+ """
+ pytest_plugins = ['testplugin0', 'testplugin1']
+
+ def test_foo(foo):
+ assert foo == 14
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 0
+
+ def test_override_parametrized_fixture_conftest_module(self, testdir):
+ """Test override of the parametrized fixture with non-parametrized one on the test module level."""
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1, 2, 3])
+ def spam(request):
+ return request.param
+ """
+ )
+ testfile = testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def spam():
+ return 'spam'
+
+ def test_spam(spam):
+ assert spam == 'spam'
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ result = testdir.runpytest(testfile)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_override_parametrized_fixture_conftest_conftest(self, testdir):
+ """Test override of the parametrized fixture with non-parametrized one on the conftest level."""
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1, 2, 3])
+ def spam(request):
+ return request.param
+ """
+ )
+ subdir = testdir.mkpydir("subdir")
+ subdir.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ import pytest
+
+ @pytest.fixture
+ def spam():
+ return 'spam'
+ """
+ )
+ )
+ testfile = subdir.join("test_spam.py")
+ testfile.write(
+ _pytest._code.Source(
+ """
+ def test_spam(spam):
+ assert spam == "spam"
+ """
+ )
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ result = testdir.runpytest(testfile)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_override_non_parametrized_fixture_conftest_module(self, testdir):
+ """Test override of the non-parametrized fixture with parametrized one on the test module level."""
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture
+ def spam():
+ return 'spam'
+ """
+ )
+ testfile = testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1, 2, 3])
+ def spam(request):
+ return request.param
+
+ params = {'spam': 1}
+
+ def test_spam(spam):
+ assert spam == params['spam']
+ params['spam'] += 1
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*3 passed*"])
+ result = testdir.runpytest(testfile)
+ result.stdout.fnmatch_lines(["*3 passed*"])
+
+ def test_override_non_parametrized_fixture_conftest_conftest(self, testdir):
+ """Test override of the non-parametrized fixture with parametrized one on the conftest level."""
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture
+ def spam():
+ return 'spam'
+ """
+ )
+ subdir = testdir.mkpydir("subdir")
+ subdir.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1, 2, 3])
+ def spam(request):
+ return request.param
+ """
+ )
+ )
+ testfile = subdir.join("test_spam.py")
+ testfile.write(
+ _pytest._code.Source(
+ """
+ params = {'spam': 1}
+
+ def test_spam(spam):
+ assert spam == params['spam']
+ params['spam'] += 1
+ """
+ )
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*3 passed*"])
+ result = testdir.runpytest(testfile)
+ result.stdout.fnmatch_lines(["*3 passed*"])
+
+ def test_override_autouse_fixture_with_parametrized_fixture_conftest_conftest(
+ self, testdir
+ ):
+ """Test override of the autouse fixture with parametrized one on the conftest level.
+ This test covers the issue explained in issue 1601
+ """
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(autouse=True)
+ def spam():
+ return 'spam'
+ """
+ )
+ subdir = testdir.mkpydir("subdir")
+ subdir.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1, 2, 3])
+ def spam(request):
+ return request.param
+ """
+ )
+ )
+ testfile = subdir.join("test_spam.py")
+ testfile.write(
+ _pytest._code.Source(
+ """
+ params = {'spam': 1}
+
+ def test_spam(spam):
+ assert spam == params['spam']
+ params['spam'] += 1
+ """
+ )
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*3 passed*"])
+ result = testdir.runpytest(testfile)
+ result.stdout.fnmatch_lines(["*3 passed*"])
+
+ def test_autouse_fixture_plugin(self, testdir):
+ # A fixture from a plugin has no baseid set, which screwed up
+ # the autouse fixture handling.
+ testdir.makepyfile(
+ testplugin="""
+ import pytest
+
+ @pytest.fixture(autouse=True)
+ def foo(request):
+ request.function.foo = 7
+ """
+ )
+ testdir.syspathinsert()
+ testdir.makepyfile(
+ """
+ pytest_plugins = 'testplugin'
+
+ def test_foo(request):
+ assert request.function.foo == 7
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 0
+
+ def test_funcarg_lookup_error(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture
+ def a_fixture(): pass
+
+ @pytest.fixture
+ def b_fixture(): pass
+
+ @pytest.fixture
+ def c_fixture(): pass
+
+ @pytest.fixture
+ def d_fixture(): pass
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_lookup_error(unknown):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*ERROR at setup of test_lookup_error*",
+ " def test_lookup_error(unknown):*",
+ "E fixture 'unknown' not found",
+ "> available fixtures:*a_fixture,*b_fixture,*c_fixture,*d_fixture*monkeypatch,*", # sorted
+ "> use 'py*test --fixtures *' for help on them.",
+ "*1 error*",
+ ]
+ )
+ assert "INTERNAL" not in result.stdout.str()
+
+ def test_fixture_excinfo_leak(self, testdir):
+ # on python2 sys.excinfo would leak into fixture executions
+ testdir.makepyfile(
+ """
+ import sys
+ import traceback
+ import pytest
+
+ @pytest.fixture
+ def leak():
+ if sys.exc_info()[0]: # python3 bug :)
+ traceback.print_exc()
+ #fails
+ assert sys.exc_info() == (None, None, None)
+
+ def test_leak(leak):
+ if sys.exc_info()[0]: # python3 bug :)
+ traceback.print_exc()
+ assert sys.exc_info() == (None, None, None)
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 0
+
+
+class TestRequestBasic(object):
+
+ def test_request_attributes(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+
+ @pytest.fixture
+ def something(request): pass
+ def test_func(something): pass
+ """
+ )
+ req = fixtures.FixtureRequest(item)
+ assert req.function == item.obj
+ assert req.keywords == item.keywords
+ assert hasattr(req.module, "test_func")
+ assert req.cls is None
+ assert req.function.__name__ == "test_func"
+ assert req.config == item.config
+ assert repr(req).find(req.function.__name__) != -1
+
+ def test_request_attributes_method(self, testdir):
+ item, = testdir.getitems(
+ """
+ import pytest
+ class TestB(object):
+
+ @pytest.fixture
+ def something(self, request):
+ return 1
+ def test_func(self, something):
+ pass
+ """
+ )
+ req = item._request
+ assert req.cls.__name__ == "TestB"
+ assert req.instance.__class__ == req.cls
+
+ def test_request_contains_funcarg_arg2fixturedefs(self, testdir):
+ modcol = testdir.getmodulecol(
+ """
+ import pytest
+ @pytest.fixture
+ def something(request):
+ pass
+ class TestClass(object):
+ def test_method(self, something):
+ pass
+ """
+ )
+ item1, = testdir.genitems([modcol])
+ assert item1.name == "test_method"
+ arg2fixturedefs = fixtures.FixtureRequest(item1)._arg2fixturedefs
+ assert len(arg2fixturedefs) == 1
+ assert arg2fixturedefs["something"][0].argname == "something"
+
+ def test_request_garbage(self, testdir):
+ testdir.makepyfile(
+ """
+ import sys
+ import pytest
+ from _pytest.fixtures import PseudoFixtureDef
+ import gc
+
+ @pytest.fixture(autouse=True)
+ def something(request):
+ # this method of test doesn't work on pypy
+ if hasattr(sys, "pypy_version_info"):
+ yield
+ else:
+ original = gc.get_debug()
+ gc.set_debug(gc.DEBUG_SAVEALL)
+ gc.collect()
+
+ yield
+
+ gc.collect()
+ leaked_types = sum(1 for _ in gc.garbage
+ if isinstance(_, PseudoFixtureDef))
+
+ gc.garbage[:] = []
+
+ try:
+ assert leaked_types == 0
+ finally:
+ gc.set_debug(original)
+
+ def test_func():
+ pass
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_getfixturevalue_recursive(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture
+ def something(request):
+ return 1
+ """
+ )
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def something(request):
+ return request.getfixturevalue("something") + 1
+ def test_func(something):
+ assert something == 2
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ @pytest.mark.parametrize("getfixmethod", ("getfixturevalue", "getfuncargvalue"))
+ def test_getfixturevalue(self, testdir, getfixmethod):
+ item = testdir.getitem(
+ """
+ import pytest
+ values = [2]
+ @pytest.fixture
+ def something(request): return 1
+ @pytest.fixture
+ def other(request):
+ return values.pop()
+ def test_func(something): pass
+ """
+ )
+ import contextlib
+
+ if getfixmethod == "getfuncargvalue":
+ warning_expectation = pytest.warns(DeprecationWarning)
+ else:
+ # see #1830 for a cleaner way to accomplish this
+ @contextlib.contextmanager
+ def expecting_no_warning():
+ yield
+
+ warning_expectation = expecting_no_warning()
+
+ req = item._request
+ with warning_expectation:
+ fixture_fetcher = getattr(req, getfixmethod)
+ with pytest.raises(FixtureLookupError):
+ fixture_fetcher("notexists")
+ val = fixture_fetcher("something")
+ assert val == 1
+ val = fixture_fetcher("something")
+ assert val == 1
+ val2 = fixture_fetcher("other")
+ assert val2 == 2
+ val2 = fixture_fetcher("other") # see about caching
+ assert val2 == 2
+ pytest._fillfuncargs(item)
+ assert item.funcargs["something"] == 1
+ assert len(get_public_names(item.funcargs)) == 2
+ assert "request" in item.funcargs
+
+ def test_request_addfinalizer(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+ teardownlist = []
+ @pytest.fixture
+ def something(request):
+ request.addfinalizer(lambda: teardownlist.append(1))
+ def test_func(something): pass
+ """
+ )
+ item.session._setupstate.prepare(item)
+ pytest._fillfuncargs(item)
+ # successively check finalization calls
+ teardownlist = item.getparent(pytest.Module).obj.teardownlist
+ ss = item.session._setupstate
+ assert not teardownlist
+ ss.teardown_exact(item, None)
+ print(ss.stack)
+ assert teardownlist == [1]
+
+ def test_mark_as_fixture_with_prefix_and_decorator_fails(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture
+ def pytest_funcarg__marked_with_prefix_and_decorator():
+ pass
+ """
+ )
+ result = testdir.runpytest_subprocess()
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(
+ [
+ "*AssertionError: fixtures cannot have*@pytest.fixture*",
+ "*pytest_funcarg__marked_with_prefix_and_decorator*",
+ ]
+ )
+
+ def test_request_addfinalizer_failing_setup(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = [1]
+ @pytest.fixture
+ def myfix(request):
+ request.addfinalizer(values.pop)
+ assert 0
+ def test_fix(myfix):
+ pass
+ def test_finalizer_ran():
+ assert not values
+ """
+ )
+ reprec = testdir.inline_run("-s")
+ reprec.assertoutcome(failed=1, passed=1)
+
+ def test_request_addfinalizer_failing_setup_module(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = [1, 2]
+ @pytest.fixture(scope="module")
+ def myfix(request):
+ request.addfinalizer(values.pop)
+ request.addfinalizer(values.pop)
+ assert 0
+ def test_fix(myfix):
+ pass
+ """
+ )
+ reprec = testdir.inline_run("-s")
+ mod = reprec.getcalls("pytest_runtest_setup")[0].item.module
+ assert not mod.values
+
+ def test_request_addfinalizer_partial_setup_failure(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture
+ def something(request):
+ request.addfinalizer(lambda: values.append(None))
+ def test_func(something, missingarg):
+ pass
+ def test_second():
+ assert len(values) == 1
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(
+ ["*1 error*"] # XXX the whole module collection fails
+ )
+
+ def test_request_subrequest_addfinalizer_exceptions(self, testdir):
+ """
+ Ensure exceptions raised during teardown by a finalizer are suppressed
+ until all finalizers are called, re-raising the first exception (#2440)
+ """
+ testdir.makepyfile(
+ """
+ import pytest
+ values = []
+ def _excepts(where):
+ raise Exception('Error in %s fixture' % where)
+ @pytest.fixture
+ def subrequest(request):
+ return request
+ @pytest.fixture
+ def something(subrequest):
+ subrequest.addfinalizer(lambda: values.append(1))
+ subrequest.addfinalizer(lambda: values.append(2))
+ subrequest.addfinalizer(lambda: _excepts('something'))
+ @pytest.fixture
+ def excepts(subrequest):
+ subrequest.addfinalizer(lambda: _excepts('excepts'))
+ subrequest.addfinalizer(lambda: values.append(3))
+ def test_first(something, excepts):
+ pass
+ def test_second():
+ assert values == [3, 2, 1]
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ ["*Exception: Error in excepts fixture", "* 2 passed, 1 error in *"]
+ )
+
+ def test_request_getmodulepath(self, testdir):
+ modcol = testdir.getmodulecol("def test_somefunc(): pass")
+ item, = testdir.genitems([modcol])
+ req = fixtures.FixtureRequest(item)
+ assert req.fspath == modcol.fspath
+
+ def test_request_fixturenames(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ from _pytest.pytester import get_public_names
+ @pytest.fixture()
+ def arg1():
+ pass
+ @pytest.fixture()
+ def farg(arg1):
+ pass
+ @pytest.fixture(autouse=True)
+ def sarg(tmpdir):
+ pass
+ def test_function(request, farg):
+ assert set(get_public_names(request.fixturenames)) == \
+ set(["tmpdir", "sarg", "arg1", "request", "farg",
+ "tmpdir_factory"])
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_funcargnames_compatattr(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ def pytest_generate_tests(metafunc):
+ assert metafunc.funcargnames == metafunc.fixturenames
+ @pytest.fixture
+ def fn(request):
+ assert request._pyfuncitem.funcargnames == \
+ request._pyfuncitem.fixturenames
+ return request.funcargnames, request.fixturenames
+
+ def test_hello(fn):
+ assert fn[0] == fn[1]
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_setupdecorator_and_xunit(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(scope='module', autouse=True)
+ def setup_module():
+ values.append("module")
+ @pytest.fixture(autouse=True)
+ def setup_function():
+ values.append("function")
+
+ def test_func():
+ pass
+
+ class TestClass(object):
+ @pytest.fixture(scope="class", autouse=True)
+ def setup_class(self):
+ values.append("class")
+ @pytest.fixture(autouse=True)
+ def setup_method(self):
+ values.append("method")
+ def test_method(self):
+ pass
+ def test_all():
+ assert values == ["module", "function", "class",
+ "function", "method", "function"]
+ """
+ )
+ reprec = testdir.inline_run("-v")
+ reprec.assertoutcome(passed=3)
+
+ def test_fixtures_sub_subdir_normalize_sep(self, testdir):
+ # this tests that normalization of nodeids takes place
+ b = testdir.mkdir("tests").mkdir("unit")
+ b.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ import pytest
+ @pytest.fixture
+ def arg1():
+ pass
+ """
+ )
+ )
+ p = b.join("test_module.py")
+ p.write("def test_func(arg1): pass")
+ result = testdir.runpytest(p, "--fixtures")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ """
+ *fixtures defined*conftest*
+ *arg1*
+ """
+ )
+
+ def test_show_fixtures_color_yes(self, testdir):
+ testdir.makepyfile("def test_this(): assert 1")
+ result = testdir.runpytest("--color=yes", "--fixtures")
+ assert "\x1b[32mtmpdir" in result.stdout.str()
+
+ def test_newstyle_with_request(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture()
+ def arg(request):
+ pass
+ def test_1(arg):
+ pass
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_setupcontext_no_param(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(params=[1,2])
+ def arg(request):
+ return request.param
+
+ @pytest.fixture(autouse=True)
+ def mysetup(request, arg):
+ assert not hasattr(request, "param")
+ def test_1(arg):
+ assert arg in (1,2)
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2)
+
+
+class TestRequestMarking(object):
+
+ def test_applymarker(self, testdir):
+ item1, item2 = testdir.getitems(
+ """
+ import pytest
+
+ @pytest.fixture
+ def something(request):
+ pass
+ class TestClass(object):
+ def test_func1(self, something):
+ pass
+ def test_func2(self, something):
+ pass
+ """
+ )
+ req1 = fixtures.FixtureRequest(item1)
+ assert "xfail" not in item1.keywords
+ req1.applymarker(pytest.mark.xfail)
+ assert "xfail" in item1.keywords
+ assert "skipif" not in item1.keywords
+ req1.applymarker(pytest.mark.skipif)
+ assert "skipif" in item1.keywords
+ pytest.raises(ValueError, "req1.applymarker(42)")
+
+ def test_accesskeywords(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture()
+ def keywords(request):
+ return request.keywords
+ @pytest.mark.XYZ
+ def test_function(keywords):
+ assert keywords["XYZ"]
+ assert "abc" not in keywords
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_accessmarker_dynamic(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ @pytest.fixture()
+ def keywords(request):
+ return request.keywords
+
+ @pytest.fixture(scope="class", autouse=True)
+ def marking(request):
+ request.applymarker(pytest.mark.XYZ("hello"))
+ """
+ )
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_fun1(keywords):
+ assert keywords["XYZ"] is not None
+ assert "abc" not in keywords
+ def test_fun2(keywords):
+ assert keywords["XYZ"] is not None
+ assert "abc" not in keywords
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2)
+
+
+class TestRequestCachedSetup(object):
+
+ def test_request_cachedsetup_defaultmodule(self, testdir):
+ reprec = testdir.inline_runsource(
+ """
+ mysetup = ["hello",].pop
+
+ import pytest
+
+ @pytest.fixture
+ def something(request):
+ return request.cached_setup(mysetup, scope="module")
+
+ def test_func1(something):
+ assert something == "hello"
+ class TestClass(object):
+ def test_func1a(self, something):
+ assert something == "hello"
+ """
+ )
+ reprec.assertoutcome(passed=2)
+
+ def test_request_cachedsetup_class(self, testdir):
+ reprec = testdir.inline_runsource(
+ """
+ mysetup = ["hello", "hello2", "hello3"].pop
+
+ import pytest
+ @pytest.fixture
+ def something(request):
+ return request.cached_setup(mysetup, scope="class")
+ def test_func1(something):
+ assert something == "hello3"
+ def test_func2(something):
+ assert something == "hello2"
+ class TestClass(object):
+ def test_func1a(self, something):
+ assert something == "hello"
+ def test_func2b(self, something):
+ assert something == "hello"
+ """
+ )
+ reprec.assertoutcome(passed=4)
+
+ def test_request_cachedsetup_extrakey(self, testdir):
+ item1 = testdir.getitem("def test_func(): pass")
+ req1 = fixtures.FixtureRequest(item1)
+ values = ["hello", "world"]
+
+ def setup():
+ return values.pop()
+
+ ret1 = req1.cached_setup(setup, extrakey=1)
+ ret2 = req1.cached_setup(setup, extrakey=2)
+ assert ret2 == "hello"
+ assert ret1 == "world"
+ ret1b = req1.cached_setup(setup, extrakey=1)
+ ret2b = req1.cached_setup(setup, extrakey=2)
+ assert ret1 == ret1b
+ assert ret2 == ret2b
+
+ def test_request_cachedsetup_cache_deletion(self, testdir):
+ item1 = testdir.getitem("def test_func(): pass")
+ req1 = fixtures.FixtureRequest(item1)
+ values = []
+
+ def setup():
+ values.append("setup")
+
+ def teardown(val):
+ values.append("teardown")
+
+ req1.cached_setup(setup, teardown, scope="function")
+ assert values == ["setup"]
+ # artificial call of finalizer
+ setupstate = req1._pyfuncitem.session._setupstate
+ setupstate._callfinalizers(item1)
+ assert values == ["setup", "teardown"]
+ req1.cached_setup(setup, teardown, scope="function")
+ assert values == ["setup", "teardown", "setup"]
+ setupstate._callfinalizers(item1)
+ assert values == ["setup", "teardown", "setup", "teardown"]
+
+ def test_request_cached_setup_two_args(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def arg1(request):
+ return request.cached_setup(lambda: 42)
+ @pytest.fixture
+ def arg2(request):
+ return request.cached_setup(lambda: 17)
+ def test_two_different_setups(arg1, arg2):
+ assert arg1 != arg2
+ """
+ )
+ result = testdir.runpytest("-v")
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_request_cached_setup_getfixturevalue(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def arg1(request):
+ arg1 = request.getfixturevalue("arg2")
+ return request.cached_setup(lambda: arg1 + 1)
+ @pytest.fixture
+ def arg2(request):
+ return request.cached_setup(lambda: 10)
+ def test_two_funcarg(arg1):
+ assert arg1 == 11
+ """
+ )
+ result = testdir.runpytest("-v")
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_request_cached_setup_functional(self, testdir):
+ testdir.makepyfile(
+ test_0="""
+ import pytest
+ values = []
+ @pytest.fixture
+ def something(request):
+ val = request.cached_setup(fsetup, fteardown)
+ return val
+ def fsetup(mycache=[1]):
+ values.append(mycache.pop())
+ return values
+ def fteardown(something):
+ values.remove(something[0])
+ values.append(2)
+ def test_list_once(something):
+ assert something == [1]
+ def test_list_twice(something):
+ assert something == [1]
+ """
+ )
+ testdir.makepyfile(
+ test_1="""
+ import test_0 # should have run already
+ def test_check_test0_has_teardown_correct():
+ assert test_0.values == [2]
+ """
+ )
+ result = testdir.runpytest("-v")
+ result.stdout.fnmatch_lines(["*3 passed*"])
+
+ def test_issue117_sessionscopeteardown(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def app(request):
+ app = request.cached_setup(
+ scope='session',
+ setup=lambda: 0,
+ teardown=lambda x: 3/x)
+ return app
+ def test_func(app):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(["*3/x*", "*ZeroDivisionError*"])
+
+
+class TestFixtureUsages(object):
+
+ def test_noargfixturedec(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def arg1():
+ return 1
+
+ def test_func(arg1):
+ assert arg1 == 1
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_receives_funcargs(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture()
+ def arg1():
+ return 1
+
+ @pytest.fixture()
+ def arg2(arg1):
+ return arg1 + 1
+
+ def test_add(arg2):
+ assert arg2 == 2
+ def test_all(arg1, arg2):
+ assert arg1 == 1
+ assert arg2 == 2
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_receives_funcargs_scope_mismatch(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope="function")
+ def arg1():
+ return 1
+
+ @pytest.fixture(scope="module")
+ def arg2(arg1):
+ return arg1 + 1
+
+ def test_add(arg2):
+ assert arg2 == 2
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*ScopeMismatch*involved factories*",
+ "* def arg2*",
+ "* def arg1*",
+ "*1 error*",
+ ]
+ )
+
+ def test_receives_funcargs_scope_mismatch_issue660(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope="function")
+ def arg1():
+ return 1
+
+ @pytest.fixture(scope="module")
+ def arg2(arg1):
+ return arg1 + 1
+
+ def test_add(arg1, arg2):
+ assert arg2 == 2
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ ["*ScopeMismatch*involved factories*", "* def arg2*", "*1 error*"]
+ )
+
+ def test_invalid_scope(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope="functions")
+ def badscope():
+ pass
+
+ def test_nothing(badscope):
+ pass
+ """
+ )
+ result = testdir.runpytest_inprocess()
+ result.stdout.fnmatch_lines(
+ (
+ "*ValueError: fixture badscope from test_invalid_scope.py has an unsupported"
+ " scope value 'functions'"
+ )
+ )
+
+ def test_funcarg_parametrized_and_used_twice(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(params=[1,2])
+ def arg1(request):
+ values.append(1)
+ return request.param
+
+ @pytest.fixture()
+ def arg2(arg1):
+ return arg1 + 1
+
+ def test_add(arg1, arg2):
+ assert arg2 == arg1 + 1
+ assert len(values) == arg1
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+ def test_factory_uses_unknown_funcarg_as_dependency_error(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture()
+ def fail(missing):
+ return
+
+ @pytest.fixture()
+ def call_fail(fail):
+ return
+
+ def test_missing(call_fail):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ *pytest.fixture()*
+ *def call_fail(fail)*
+ *pytest.fixture()*
+ *def fail*
+ *fixture*'missing'*not found*
+ """
+ )
+
+ def test_factory_setup_as_classes_fails(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ class arg1(object):
+ def __init__(self, request):
+ self.x = 1
+ arg1 = pytest.fixture()(arg1)
+
+ """
+ )
+ reprec = testdir.inline_run()
+ values = reprec.getfailedcollections()
+ assert len(values) == 1
+
+ def test_request_can_be_overridden(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture()
+ def request(request):
+ request.a = 1
+ return request
+ def test_request(request):
+ assert request.a == 1
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_usefixtures_marker(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ values = []
+
+ @pytest.fixture(scope="class")
+ def myfix(request):
+ request.cls.hello = "world"
+ values.append(1)
+
+ class TestClass(object):
+ def test_one(self):
+ assert self.hello == "world"
+ assert len(values) == 1
+ def test_two(self):
+ assert self.hello == "world"
+ assert len(values) == 1
+ pytest.mark.usefixtures("myfix")(TestClass)
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_usefixtures_ini(self, testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ usefixtures = myfix
+ """
+ )
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(scope="class")
+ def myfix(request):
+ request.cls.hello = "world"
+
+ """
+ )
+ testdir.makepyfile(
+ """
+ class TestClass(object):
+ def test_one(self):
+ assert self.hello == "world"
+ def test_two(self):
+ assert self.hello == "world"
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_usefixtures_seen_in_showmarkers(self, testdir):
+ result = testdir.runpytest("--markers")
+ result.stdout.fnmatch_lines(
+ """
+ *usefixtures(fixturename1*mark tests*fixtures*
+ """
+ )
+
+ def test_request_instance_issue203(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ class TestClass(object):
+ @pytest.fixture
+ def setup1(self, request):
+ assert self == request.instance
+ self.arg1 = 1
+ def test_hello(self, setup1):
+ assert self.arg1 == 1
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_fixture_parametrized_with_iterator(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ values = []
+ def f():
+ yield 1
+ yield 2
+ dec = pytest.fixture(scope="module", params=f())
+
+ @dec
+ def arg(request):
+ return request.param
+ @dec
+ def arg2(request):
+ return request.param
+
+ def test_1(arg):
+ values.append(arg)
+ def test_2(arg2):
+ values.append(arg2*10)
+ """
+ )
+ reprec = testdir.inline_run("-v")
+ reprec.assertoutcome(passed=4)
+ values = reprec.getcalls("pytest_runtest_call")[0].item.module.values
+ assert values == [1, 2, 10, 20]
+
+
+class TestFixtureManagerParseFactories(object):
+
+ @pytest.fixture
+ def testdir(self, request):
+ testdir = request.getfixturevalue("testdir")
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture
+ def hello(request):
+ return "conftest"
+
+ @pytest.fixture
+ def fm(request):
+ return request._fixturemanager
+
+ @pytest.fixture
+ def item(request):
+ return request._pyfuncitem
+ """
+ )
+ return testdir
+
+ def test_parsefactories_evil_objects_issue214(self, testdir):
+ testdir.makepyfile(
+ """
+ class A(object):
+ def __call__(self):
+ pass
+ def __getattr__(self, name):
+ raise RuntimeError()
+ a = A()
+ def test_hello():
+ pass
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1, failed=0)
+
+ def test_parsefactories_conftest(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_hello(item, fm):
+ for name in ("fm", "hello", "item"):
+ faclist = fm.getfixturedefs(name, item.nodeid)
+ assert len(faclist) == 1
+ fac = faclist[0]
+ assert fac.func.__name__ == name
+ """
+ )
+ reprec = testdir.inline_run("-s")
+ reprec.assertoutcome(passed=1)
+
+ def test_parsefactories_conftest_and_module_and_class(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def hello(request):
+ return "module"
+ class TestClass(object):
+ @pytest.fixture
+ def hello(self, request):
+ return "class"
+ def test_hello(self, item, fm):
+ faclist = fm.getfixturedefs("hello", item.nodeid)
+ print (faclist)
+ assert len(faclist) == 3
+ assert faclist[0].func(item._request) == "conftest"
+ assert faclist[1].func(item._request) == "module"
+ assert faclist[2].func(item._request) == "class"
+ """
+ )
+ reprec = testdir.inline_run("-s")
+ reprec.assertoutcome(passed=1)
+
+ def test_parsefactories_relative_node_ids(self, testdir):
+ # example mostly taken from:
+ # https://mail.python.org/pipermail/pytest-dev/2014-September/002617.html
+ runner = testdir.mkdir("runner")
+ package = testdir.mkdir("package")
+ package.join("conftest.py").write(
+ dedent(
+ """\
+ import pytest
+ @pytest.fixture
+ def one():
+ return 1
+ """
+ )
+ )
+ package.join("test_x.py").write(
+ dedent(
+ """\
+ def test_x(one):
+ assert one == 1
+ """
+ )
+ )
+ sub = package.mkdir("sub")
+ sub.join("__init__.py").ensure()
+ sub.join("conftest.py").write(
+ dedent(
+ """\
+ import pytest
+ @pytest.fixture
+ def one():
+ return 2
+ """
+ )
+ )
+ sub.join("test_y.py").write(
+ dedent(
+ """\
+ def test_x(one):
+ assert one == 2
+ """
+ )
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2)
+ with runner.as_cwd():
+ reprec = testdir.inline_run("..")
+ reprec.assertoutcome(passed=2)
+
+
+class TestAutouseDiscovery(object):
+
+ @pytest.fixture
+ def testdir(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ @pytest.fixture(autouse=True)
+ def perfunction(request, tmpdir):
+ pass
+
+ @pytest.fixture()
+ def arg1(tmpdir):
+ pass
+ @pytest.fixture(autouse=True)
+ def perfunction2(arg1):
+ pass
+
+ @pytest.fixture
+ def fm(request):
+ return request._fixturemanager
+
+ @pytest.fixture
+ def item(request):
+ return request._pyfuncitem
+ """
+ )
+ return testdir
+
+ def test_parsefactories_conftest(self, testdir):
+ testdir.makepyfile(
+ """
+ from _pytest.pytester import get_public_names
+ def test_check_setup(item, fm):
+ autousenames = fm._getautousenames(item.nodeid)
+ assert len(get_public_names(autousenames)) == 2
+ assert "perfunction2" in autousenames
+ assert "perfunction" in autousenames
+ """
+ )
+ reprec = testdir.inline_run("-s")
+ reprec.assertoutcome(passed=1)
+
+ def test_two_classes_separated_autouse(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ class TestA(object):
+ values = []
+ @pytest.fixture(autouse=True)
+ def setup1(self):
+ self.values.append(1)
+ def test_setup1(self):
+ assert self.values == [1]
+ class TestB(object):
+ values = []
+ @pytest.fixture(autouse=True)
+ def setup2(self):
+ self.values.append(1)
+ def test_setup2(self):
+ assert self.values == [1]
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_setup_at_classlevel(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ class TestClass(object):
+ @pytest.fixture(autouse=True)
+ def permethod(self, request):
+ request.instance.funcname = request.function.__name__
+ def test_method1(self):
+ assert self.funcname == "test_method1"
+ def test_method2(self):
+ assert self.funcname == "test_method2"
+ """
+ )
+ reprec = testdir.inline_run("-s")
+ reprec.assertoutcome(passed=2)
+
+ @pytest.mark.xfail(reason="'enabled' feature not implemented")
+ def test_setup_enabled_functionnode(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ def enabled(parentnode, markers):
+ return "needsdb" in markers
+
+ @pytest.fixture(params=[1,2])
+ def db(request):
+ return request.param
+
+ @pytest.fixture(enabled=enabled, autouse=True)
+ def createdb(db):
+ pass
+
+ def test_func1(request):
+ assert "db" not in request.fixturenames
+
+ @pytest.mark.needsdb
+ def test_func2(request):
+ assert "db" in request.fixturenames
+ """
+ )
+ reprec = testdir.inline_run("-s")
+ reprec.assertoutcome(passed=2)
+
+ def test_callables_nocode(self, testdir):
+ """
+ an imported mock.call would break setup/factory discovery
+ due to it being callable and __code__ not being a code object
+ """
+ testdir.makepyfile(
+ """
+ class _call(tuple):
+ def __call__(self, *k, **kw):
+ pass
+ def __getattr__(self, k):
+ return self
+
+ call = _call()
+ """
+ )
+ reprec = testdir.inline_run("-s")
+ reprec.assertoutcome(failed=0, passed=0)
+
+ def test_autouse_in_conftests(self, testdir):
+ a = testdir.mkdir("a")
+ b = testdir.mkdir("a1")
+ conftest = testdir.makeconftest(
+ """
+ import pytest
+ @pytest.fixture(autouse=True)
+ def hello():
+ xxx
+ """
+ )
+ conftest.move(a.join(conftest.basename))
+ a.join("test_something.py").write("def test_func(): pass")
+ b.join("test_otherthing.py").write("def test_func(): pass")
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ *1 passed*1 error*
+ """
+ )
+
+ def test_autouse_in_module_and_two_classes(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(autouse=True)
+ def append1():
+ values.append("module")
+ def test_x():
+ assert values == ["module"]
+
+ class TestA(object):
+ @pytest.fixture(autouse=True)
+ def append2(self):
+ values.append("A")
+ def test_hello(self):
+ assert values == ["module", "module", "A"], values
+ class TestA2(object):
+ def test_world(self):
+ assert values == ["module", "module", "A", "module"], values
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=3)
+
+
+class TestAutouseManagement(object):
+
+ def test_autouse_conftest_mid_directory(self, testdir):
+ pkgdir = testdir.mkpydir("xyz123")
+ pkgdir.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ import pytest
+ @pytest.fixture(autouse=True)
+ def app():
+ import sys
+ sys._myapp = "hello"
+ """
+ )
+ )
+ t = pkgdir.ensure("tests", "test_app.py")
+ t.write(
+ _pytest._code.Source(
+ """
+ import sys
+ def test_app():
+ assert sys._myapp == "hello"
+ """
+ )
+ )
+ reprec = testdir.inline_run("-s")
+ reprec.assertoutcome(passed=1)
+
+ def test_autouse_honored_for_yield(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(autouse=True)
+ def tst():
+ global x
+ x = 3
+ def test_gen():
+ def f(hello):
+ assert x == abs(hello)
+ yield f, 3
+ yield f, -3
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_funcarg_and_setup(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(scope="module")
+ def arg():
+ values.append(1)
+ return 0
+ @pytest.fixture(scope="module", autouse=True)
+ def something(arg):
+ values.append(2)
+
+ def test_hello(arg):
+ assert len(values) == 2
+ assert values == [1,2]
+ assert arg == 0
+
+ def test_hello2(arg):
+ assert len(values) == 2
+ assert values == [1,2]
+ assert arg == 0
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_uses_parametrized_resource(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(params=[1,2])
+ def arg(request):
+ return request.param
+
+ @pytest.fixture(autouse=True)
+ def something(arg):
+ values.append(arg)
+
+ def test_hello():
+ if len(values) == 1:
+ assert values == [1]
+ elif len(values) == 2:
+ assert values == [1, 2]
+ else:
+ 0/0
+
+ """
+ )
+ reprec = testdir.inline_run("-s")
+ reprec.assertoutcome(passed=2)
+
+ def test_session_parametrized_function(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ values = []
+
+ @pytest.fixture(scope="session", params=[1,2])
+ def arg(request):
+ return request.param
+
+ @pytest.fixture(scope="function", autouse=True)
+ def append(request, arg):
+ if request.function.__name__ == "test_some":
+ values.append(arg)
+
+ def test_some():
+ pass
+
+ def test_result(arg):
+ assert len(values) == arg
+ assert values[:arg] == [1,2][:arg]
+ """
+ )
+ reprec = testdir.inline_run("-v", "-s")
+ reprec.assertoutcome(passed=4)
+
+ def test_class_function_parametrization_finalization(self, testdir):
+ p = testdir.makeconftest(
+ """
+ import pytest
+ import pprint
+
+ values = []
+
+ @pytest.fixture(scope="function", params=[1,2])
+ def farg(request):
+ return request.param
+
+ @pytest.fixture(scope="class", params=list("ab"))
+ def carg(request):
+ return request.param
+
+ @pytest.fixture(scope="function", autouse=True)
+ def append(request, farg, carg):
+ def fin():
+ values.append("fin_%s%s" % (carg, farg))
+ request.addfinalizer(fin)
+ """
+ )
+ testdir.makepyfile(
+ """
+ import pytest
+
+ class TestClass(object):
+ def test_1(self):
+ pass
+ class TestClass2(object):
+ def test_2(self):
+ pass
+ """
+ )
+ confcut = "--confcutdir={}".format(testdir.tmpdir)
+ reprec = testdir.inline_run("-v", "-s", confcut)
+ reprec.assertoutcome(passed=8)
+ config = reprec.getcalls("pytest_unconfigure")[0].config
+ values = config.pluginmanager._getconftestmodules(p)[0].values
+ assert values == ["fin_a1", "fin_a2", "fin_b1", "fin_b2"] * 2
+
+ def test_scope_ordering(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(scope="function", autouse=True)
+ def fappend2():
+ values.append(2)
+ @pytest.fixture(scope="class", autouse=True)
+ def classappend3():
+ values.append(3)
+ @pytest.fixture(scope="module", autouse=True)
+ def mappend():
+ values.append(1)
+
+ class TestHallo(object):
+ def test_method(self):
+ assert values == [1,3,2]
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_parametrization_setup_teardown_ordering(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = []
+ def pytest_generate_tests(metafunc):
+ if metafunc.cls is None:
+ assert metafunc.function is test_finish
+ if metafunc.cls is not None:
+ metafunc.parametrize("item", [1,2], scope="class")
+ class TestClass(object):
+ @pytest.fixture(scope="class", autouse=True)
+ def addteardown(self, item, request):
+ values.append("setup-%d" % item)
+ request.addfinalizer(lambda: values.append("teardown-%d" % item))
+ def test_step1(self, item):
+ values.append("step1-%d" % item)
+ def test_step2(self, item):
+ values.append("step2-%d" % item)
+
+ def test_finish():
+ print (values)
+ assert values == ["setup-1", "step1-1", "step2-1", "teardown-1",
+ "setup-2", "step1-2", "step2-2", "teardown-2",]
+ """
+ )
+ reprec = testdir.inline_run("-s")
+ reprec.assertoutcome(passed=5)
+
+ def test_ordering_autouse_before_explicit(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ values = []
+ @pytest.fixture(autouse=True)
+ def fix1():
+ values.append(1)
+ @pytest.fixture()
+ def arg1():
+ values.append(2)
+ def test_hello(arg1):
+ assert values == [1,2]
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ @pytest.mark.issue226
+ @pytest.mark.parametrize("param1", ["", "params=[1]"], ids=["p00", "p01"])
+ @pytest.mark.parametrize("param2", ["", "params=[1]"], ids=["p10", "p11"])
+ def test_ordering_dependencies_torndown_first(self, testdir, param1, param2):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(%(param1)s)
+ def arg1(request):
+ request.addfinalizer(lambda: values.append("fin1"))
+ values.append("new1")
+ @pytest.fixture(%(param2)s)
+ def arg2(request, arg1):
+ request.addfinalizer(lambda: values.append("fin2"))
+ values.append("new2")
+
+ def test_arg(arg2):
+ pass
+ def test_check():
+ assert values == ["new1", "new2", "fin2", "fin1"]
+ """
+ % locals()
+ )
+ reprec = testdir.inline_run("-s")
+ reprec.assertoutcome(passed=2)
+
+
+class TestFixtureMarker(object):
+
+ def test_parametrize(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(params=["a", "b", "c"])
+ def arg(request):
+ return request.param
+ values = []
+ def test_param(arg):
+ values.append(arg)
+ def test_result():
+ assert values == list("abc")
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=4)
+
+ def test_multiple_parametrization_issue_736(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1,2,3])
+ def foo(request):
+ return request.param
+
+ @pytest.mark.parametrize('foobar', [4,5,6])
+ def test_issue(foo, foobar):
+ assert foo in [1,2,3]
+ assert foobar in [4,5,6]
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=9)
+
+ @pytest.mark.parametrize(
+ "param_args",
+ ["'fixt, val'", "'fixt,val'", "['fixt', 'val']", "('fixt', 'val')"],
+ )
+ def test_override_parametrized_fixture_issue_979(self, testdir, param_args):
+ """Make sure a parametrized argument can override a parametrized fixture.
+
+ This was a regression introduced in the fix for #736.
+ """
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1, 2])
+ def fixt(request):
+ return request.param
+
+ @pytest.mark.parametrize(%s, [(3, 'x'), (4, 'x')])
+ def test_foo(fixt, val):
+ pass
+ """
+ % param_args
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_scope_session(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(scope="module")
+ def arg():
+ values.append(1)
+ return 1
+
+ def test_1(arg):
+ assert arg == 1
+ def test_2(arg):
+ assert arg == 1
+ assert len(values) == 1
+ class TestClass(object):
+ def test3(self, arg):
+ assert arg == 1
+ assert len(values) == 1
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=3)
+
+ def test_scope_session_exc(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(scope="session")
+ def fix():
+ values.append(1)
+ pytest.skip('skipping')
+
+ def test_1(fix):
+ pass
+ def test_2(fix):
+ pass
+ def test_last():
+ assert values == [1]
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(skipped=2, passed=1)
+
+ def test_scope_session_exc_two_fix(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = []
+ m = []
+ @pytest.fixture(scope="session")
+ def a():
+ values.append(1)
+ pytest.skip('skipping')
+ @pytest.fixture(scope="session")
+ def b(a):
+ m.append(1)
+
+ def test_1(b):
+ pass
+ def test_2(b):
+ pass
+ def test_last():
+ assert values == [1]
+ assert m == []
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(skipped=2, passed=1)
+
+ def test_scope_exc(self, testdir):
+ testdir.makepyfile(
+ test_foo="""
+ def test_foo(fix):
+ pass
+ """,
+ test_bar="""
+ def test_bar(fix):
+ pass
+ """,
+ conftest="""
+ import pytest
+ reqs = []
+ @pytest.fixture(scope="session")
+ def fix(request):
+ reqs.append(1)
+ pytest.skip()
+ @pytest.fixture
+ def req_list():
+ return reqs
+ """,
+ test_real="""
+ def test_last(req_list):
+ assert req_list == [1]
+ """,
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(skipped=2, passed=1)
+
+ def test_scope_module_uses_session(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(scope="module")
+ def arg():
+ values.append(1)
+ return 1
+
+ def test_1(arg):
+ assert arg == 1
+ def test_2(arg):
+ assert arg == 1
+ assert len(values) == 1
+ class TestClass(object):
+ def test3(self, arg):
+ assert arg == 1
+ assert len(values) == 1
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=3)
+
+ def test_scope_module_and_finalizer(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ finalized_list = []
+ created_list = []
+ @pytest.fixture(scope="module")
+ def arg(request):
+ created_list.append(1)
+ assert request.scope == "module"
+ request.addfinalizer(lambda: finalized_list.append(1))
+ @pytest.fixture
+ def created(request):
+ return len(created_list)
+ @pytest.fixture
+ def finalized(request):
+ return len(finalized_list)
+ """
+ )
+ testdir.makepyfile(
+ test_mod1="""
+ def test_1(arg, created, finalized):
+ assert created == 1
+ assert finalized == 0
+ def test_2(arg, created, finalized):
+ assert created == 1
+ assert finalized == 0""",
+ test_mod2="""
+ def test_3(arg, created, finalized):
+ assert created == 2
+ assert finalized == 1""",
+ test_mode3="""
+ def test_4(arg, created, finalized):
+ assert created == 3
+ assert finalized == 2
+ """,
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=4)
+
+ @pytest.mark.parametrize(
+ "method",
+ [
+ 'request.getfixturevalue("arg")',
+ 'request.cached_setup(lambda: None, scope="function")',
+ ],
+ ids=["getfixturevalue", "cached_setup"],
+ )
+ def test_scope_mismatch_various(self, testdir, method):
+ testdir.makeconftest(
+ """
+ import pytest
+ finalized = []
+ created = []
+ @pytest.fixture(scope="function")
+ def arg(request):
+ pass
+ """
+ )
+ testdir.makepyfile(
+ test_mod1="""
+ import pytest
+ @pytest.fixture(scope="session")
+ def arg(request):
+ %s
+ def test_1(arg):
+ pass
+ """
+ % method
+ )
+ result = testdir.runpytest()
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(
+ ["*ScopeMismatch*You tried*function*session*request*"]
+ )
+
+ def test_register_only_with_mark(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ @pytest.fixture()
+ def arg():
+ return 1
+ """
+ )
+ testdir.makepyfile(
+ test_mod1="""
+ import pytest
+ @pytest.fixture()
+ def arg(arg):
+ return arg + 1
+ def test_1(arg):
+ assert arg == 2
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_parametrize_and_scope(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope="module", params=["a", "b", "c"])
+ def arg(request):
+ return request.param
+ values = []
+ def test_param(arg):
+ values.append(arg)
+ """
+ )
+ reprec = testdir.inline_run("-v")
+ reprec.assertoutcome(passed=3)
+ values = reprec.getcalls("pytest_runtest_call")[0].item.module.values
+ assert len(values) == 3
+ assert "a" in values
+ assert "b" in values
+ assert "c" in values
+
+ def test_scope_mismatch(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ @pytest.fixture(scope="function")
+ def arg(request):
+ pass
+ """
+ )
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope="session")
+ def arg(arg):
+ pass
+ def test_mismatch(arg):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*ScopeMismatch*", "*1 error*"])
+
+ def test_parametrize_separated_order(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope="module", params=[1, 2])
+ def arg(request):
+ return request.param
+
+ values = []
+ def test_1(arg):
+ values.append(arg)
+ def test_2(arg):
+ values.append(arg)
+ """
+ )
+ reprec = testdir.inline_run("-v")
+ reprec.assertoutcome(passed=4)
+ values = reprec.getcalls("pytest_runtest_call")[0].item.module.values
+ assert values == [1, 1, 2, 2]
+
+ def test_module_parametrized_ordering(self, testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ console_output_style=classic
+ """
+ )
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(scope="session", params="s1 s2".split())
+ def sarg():
+ pass
+ @pytest.fixture(scope="module", params="m1 m2".split())
+ def marg():
+ pass
+ """
+ )
+ testdir.makepyfile(
+ test_mod1="""
+ def test_func(sarg):
+ pass
+ def test_func1(marg):
+ pass
+ """,
+ test_mod2="""
+ def test_func2(sarg):
+ pass
+ def test_func3(sarg, marg):
+ pass
+ def test_func3b(sarg, marg):
+ pass
+ def test_func4(marg):
+ pass
+ """,
+ )
+ result = testdir.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ """
+ test_mod1.py::test_func[s1] PASSED
+ test_mod2.py::test_func2[s1] PASSED
+ test_mod2.py::test_func3[s1-m1] PASSED
+ test_mod2.py::test_func3b[s1-m1] PASSED
+ test_mod2.py::test_func3[s1-m2] PASSED
+ test_mod2.py::test_func3b[s1-m2] PASSED
+ test_mod1.py::test_func[s2] PASSED
+ test_mod2.py::test_func2[s2] PASSED
+ test_mod2.py::test_func3[s2-m1] PASSED
+ test_mod2.py::test_func3b[s2-m1] PASSED
+ test_mod2.py::test_func4[m1] PASSED
+ test_mod2.py::test_func3[s2-m2] PASSED
+ test_mod2.py::test_func3b[s2-m2] PASSED
+ test_mod2.py::test_func4[m2] PASSED
+ test_mod1.py::test_func1[m1] PASSED
+ test_mod1.py::test_func1[m2] PASSED
+ """
+ )
+
+ def test_dynamic_parametrized_ordering(self, testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ console_output_style=classic
+ """
+ )
+ testdir.makeconftest(
+ """
+ import pytest
+
+ def pytest_configure(config):
+ class DynamicFixturePlugin(object):
+ @pytest.fixture(scope='session', params=['flavor1', 'flavor2'])
+ def flavor(self, request):
+ return request.param
+ config.pluginmanager.register(DynamicFixturePlugin(), 'flavor-fixture')
+
+ @pytest.fixture(scope='session', params=['vxlan', 'vlan'])
+ def encap(request):
+ return request.param
+
+ @pytest.fixture(scope='session', autouse='True')
+ def reprovision(request, flavor, encap):
+ pass
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test(reprovision):
+ pass
+ def test2(reprovision):
+ pass
+ """
+ )
+ result = testdir.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ """
+ test_dynamic_parametrized_ordering.py::test[flavor1-vxlan] PASSED
+ test_dynamic_parametrized_ordering.py::test2[flavor1-vxlan] PASSED
+ test_dynamic_parametrized_ordering.py::test[flavor2-vxlan] PASSED
+ test_dynamic_parametrized_ordering.py::test2[flavor2-vxlan] PASSED
+ test_dynamic_parametrized_ordering.py::test[flavor2-vlan] PASSED
+ test_dynamic_parametrized_ordering.py::test2[flavor2-vlan] PASSED
+ test_dynamic_parametrized_ordering.py::test[flavor1-vlan] PASSED
+ test_dynamic_parametrized_ordering.py::test2[flavor1-vlan] PASSED
+ """
+ )
+
+ def test_class_ordering(self, testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ console_output_style=classic
+ """
+ )
+ testdir.makeconftest(
+ """
+ import pytest
+
+ values = []
+
+ @pytest.fixture(scope="function", params=[1,2])
+ def farg(request):
+ return request.param
+
+ @pytest.fixture(scope="class", params=list("ab"))
+ def carg(request):
+ return request.param
+
+ @pytest.fixture(scope="function", autouse=True)
+ def append(request, farg, carg):
+ def fin():
+ values.append("fin_%s%s" % (carg, farg))
+ request.addfinalizer(fin)
+ """
+ )
+ testdir.makepyfile(
+ """
+ import pytest
+
+ class TestClass2(object):
+ def test_1(self):
+ pass
+ def test_2(self):
+ pass
+ class TestClass(object):
+ def test_3(self):
+ pass
+ """
+ )
+ result = testdir.runpytest("-vs")
+ result.stdout.re_match_lines(
+ r"""
+ test_class_ordering.py::TestClass2::test_1\[a-1\] PASSED
+ test_class_ordering.py::TestClass2::test_1\[a-2\] PASSED
+ test_class_ordering.py::TestClass2::test_2\[a-1\] PASSED
+ test_class_ordering.py::TestClass2::test_2\[a-2\] PASSED
+ test_class_ordering.py::TestClass2::test_1\[b-1\] PASSED
+ test_class_ordering.py::TestClass2::test_1\[b-2\] PASSED
+ test_class_ordering.py::TestClass2::test_2\[b-1\] PASSED
+ test_class_ordering.py::TestClass2::test_2\[b-2\] PASSED
+ test_class_ordering.py::TestClass::test_3\[a-1\] PASSED
+ test_class_ordering.py::TestClass::test_3\[a-2\] PASSED
+ test_class_ordering.py::TestClass::test_3\[b-1\] PASSED
+ test_class_ordering.py::TestClass::test_3\[b-2\] PASSED
+ """
+ )
+
+ def test_parametrize_separated_order_higher_scope_first(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope="function", params=[1, 2])
+ def arg(request):
+ param = request.param
+ request.addfinalizer(lambda: values.append("fin:%s" % param))
+ values.append("create:%s" % param)
+ return request.param
+
+ @pytest.fixture(scope="module", params=["mod1", "mod2"])
+ def modarg(request):
+ param = request.param
+ request.addfinalizer(lambda: values.append("fin:%s" % param))
+ values.append("create:%s" % param)
+ return request.param
+
+ values = []
+ def test_1(arg):
+ values.append("test1")
+ def test_2(modarg):
+ values.append("test2")
+ def test_3(arg, modarg):
+ values.append("test3")
+ def test_4(modarg, arg):
+ values.append("test4")
+ """
+ )
+ reprec = testdir.inline_run("-v")
+ reprec.assertoutcome(passed=12)
+ values = reprec.getcalls("pytest_runtest_call")[0].item.module.values
+ expected = [
+ "create:1",
+ "test1",
+ "fin:1",
+ "create:2",
+ "test1",
+ "fin:2",
+ "create:mod1",
+ "test2",
+ "create:1",
+ "test3",
+ "fin:1",
+ "create:2",
+ "test3",
+ "fin:2",
+ "create:1",
+ "test4",
+ "fin:1",
+ "create:2",
+ "test4",
+ "fin:2",
+ "fin:mod1",
+ "create:mod2",
+ "test2",
+ "create:1",
+ "test3",
+ "fin:1",
+ "create:2",
+ "test3",
+ "fin:2",
+ "create:1",
+ "test4",
+ "fin:1",
+ "create:2",
+ "test4",
+ "fin:2",
+ "fin:mod2",
+ ]
+ import pprint
+
+ pprint.pprint(list(zip(values, expected)))
+ assert values == expected
+
+ def test_parametrized_fixture_teardown_order(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(params=[1,2], scope="class")
+ def param1(request):
+ return request.param
+
+ values = []
+
+ class TestClass(object):
+ @classmethod
+ @pytest.fixture(scope="class", autouse=True)
+ def setup1(self, request, param1):
+ values.append(1)
+ request.addfinalizer(self.teardown1)
+ @classmethod
+ def teardown1(self):
+ assert values.pop() == 1
+ @pytest.fixture(scope="class", autouse=True)
+ def setup2(self, request, param1):
+ values.append(2)
+ request.addfinalizer(self.teardown2)
+ @classmethod
+ def teardown2(self):
+ assert values.pop() == 2
+ def test(self):
+ pass
+
+ def test_finish():
+ assert not values
+ """
+ )
+ result = testdir.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ """
+ *3 passed*
+ """
+ )
+ assert "error" not in result.stdout.str()
+
+ def test_fixture_finalizer(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ import sys
+
+ @pytest.fixture
+ def browser(request):
+
+ def finalize():
+ sys.stdout.write('Finalized')
+ request.addfinalizer(finalize)
+ return {}
+ """
+ )
+ b = testdir.mkdir("subdir")
+ b.join("test_overridden_fixture_finalizer.py").write(
+ dedent(
+ """
+ import pytest
+ @pytest.fixture
+ def browser(browser):
+ browser['visited'] = True
+ return browser
+
+ def test_browser(browser):
+ assert browser['visited'] is True
+ """
+ )
+ )
+ reprec = testdir.runpytest("-s")
+ for test in ["test_browser"]:
+ reprec.stdout.fnmatch_lines("*Finalized*")
+
+ def test_class_scope_with_normal_tests(self, testdir):
+ testpath = testdir.makepyfile(
+ """
+ import pytest
+
+ class Box(object):
+ value = 0
+
+ @pytest.fixture(scope='class')
+ def a(request):
+ Box.value += 1
+ return Box.value
+
+ def test_a(a):
+ assert a == 1
+
+ class Test1(object):
+ def test_b(self, a):
+ assert a == 2
+
+ class Test2(object):
+ def test_c(self, a):
+ assert a == 3"""
+ )
+ reprec = testdir.inline_run(testpath)
+ for test in ["test_a", "test_b", "test_c"]:
+ assert reprec.matchreport(test).passed
+
+ def test_request_is_clean(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(params=[1, 2])
+ def fix(request):
+ request.addfinalizer(lambda: values.append(request.param))
+ def test_fix(fix):
+ pass
+ """
+ )
+ reprec = testdir.inline_run("-s")
+ values = reprec.getcalls("pytest_runtest_call")[0].item.module.values
+ assert values == [1, 2]
+
+ def test_parametrize_separated_lifecycle(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ values = []
+ @pytest.fixture(scope="module", params=[1, 2])
+ def arg(request):
+ x = request.param
+ request.addfinalizer(lambda: values.append("fin%s" % x))
+ return request.param
+ def test_1(arg):
+ values.append(arg)
+ def test_2(arg):
+ values.append(arg)
+ """
+ )
+ reprec = testdir.inline_run("-vs")
+ reprec.assertoutcome(passed=4)
+ values = reprec.getcalls("pytest_runtest_call")[0].item.module.values
+ import pprint
+
+ pprint.pprint(values)
+ # assert len(values) == 6
+ assert values[0] == values[1] == 1
+ assert values[2] == "fin1"
+ assert values[3] == values[4] == 2
+ assert values[5] == "fin2"
+
+ def test_parametrize_function_scoped_finalizers_called(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope="function", params=[1, 2])
+ def arg(request):
+ x = request.param
+ request.addfinalizer(lambda: values.append("fin%s" % x))
+ return request.param
+
+ values = []
+ def test_1(arg):
+ values.append(arg)
+ def test_2(arg):
+ values.append(arg)
+ def test_3():
+ assert len(values) == 8
+ assert values == [1, "fin1", 2, "fin2", 1, "fin1", 2, "fin2"]
+ """
+ )
+ reprec = testdir.inline_run("-v")
+ reprec.assertoutcome(passed=5)
+
+ @pytest.mark.issue246
+ @pytest.mark.parametrize("scope", ["session", "function", "module"])
+ def test_finalizer_order_on_parametrization(self, scope, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = []
+
+ @pytest.fixture(scope=%(scope)r, params=["1"])
+ def fix1(request):
+ return request.param
+
+ @pytest.fixture(scope=%(scope)r)
+ def fix2(request, base):
+ def cleanup_fix2():
+ assert not values, "base should not have been finalized"
+ request.addfinalizer(cleanup_fix2)
+
+ @pytest.fixture(scope=%(scope)r)
+ def base(request, fix1):
+ def cleanup_base():
+ values.append("fin_base")
+ print ("finalizing base")
+ request.addfinalizer(cleanup_base)
+
+ def test_begin():
+ pass
+ def test_baz(base, fix2):
+ pass
+ def test_other():
+ pass
+ """
+ % {"scope": scope}
+ )
+ reprec = testdir.inline_run("-lvs")
+ reprec.assertoutcome(passed=3)
+
+ @pytest.mark.issue396
+ def test_class_scope_parametrization_ordering(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(params=["John", "Doe"], scope="class")
+ def human(request):
+ request.addfinalizer(lambda: values.append("fin %s" % request.param))
+ return request.param
+
+ class TestGreetings(object):
+ def test_hello(self, human):
+ values.append("test_hello")
+
+ class TestMetrics(object):
+ def test_name(self, human):
+ values.append("test_name")
+
+ def test_population(self, human):
+ values.append("test_population")
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=6)
+ values = reprec.getcalls("pytest_runtest_call")[0].item.module.values
+ assert (
+ values
+ == [
+ "test_hello",
+ "fin John",
+ "test_hello",
+ "fin Doe",
+ "test_name",
+ "test_population",
+ "fin John",
+ "test_name",
+ "test_population",
+ "fin Doe",
+ ]
+ )
+
+ def test_parametrize_setup_function(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope="module", params=[1, 2])
+ def arg(request):
+ return request.param
+
+ @pytest.fixture(scope="module", autouse=True)
+ def mysetup(request, arg):
+ request.addfinalizer(lambda: values.append("fin%s" % arg))
+ values.append("setup%s" % arg)
+
+ values = []
+ def test_1(arg):
+ values.append(arg)
+ def test_2(arg):
+ values.append(arg)
+ def test_3():
+ import pprint
+ pprint.pprint(values)
+ if arg == 1:
+ assert values == ["setup1", 1, 1, ]
+ elif arg == 2:
+ assert values == ["setup1", 1, 1, "fin1",
+ "setup2", 2, 2, ]
+
+ """
+ )
+ reprec = testdir.inline_run("-v")
+ reprec.assertoutcome(passed=6)
+
+ def test_fixture_marked_function_not_collected_as_test(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def test_app():
+ return 1
+
+ def test_something(test_app):
+ assert test_app == 1
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_params_and_ids(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=[object(), object()],
+ ids=['alpha', 'beta'])
+ def fix(request):
+ return request.param
+
+ def test_foo(fix):
+ assert 1
+ """
+ )
+ res = testdir.runpytest("-v")
+ res.stdout.fnmatch_lines(["*test_foo*alpha*", "*test_foo*beta*"])
+
+ def test_params_and_ids_yieldfixture(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.yield_fixture(params=[object(), object()],
+ ids=['alpha', 'beta'])
+ def fix(request):
+ yield request.param
+
+ def test_foo(fix):
+ assert 1
+ """
+ )
+ res = testdir.runpytest("-v")
+ res.stdout.fnmatch_lines(["*test_foo*alpha*", "*test_foo*beta*"])
+
+ @pytest.mark.issue920
+ def test_deterministic_fixture_collection(self, testdir, monkeypatch):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope="module",
+ params=["A",
+ "B",
+ "C"])
+ def A(request):
+ return request.param
+
+ @pytest.fixture(scope="module",
+ params=["DDDDDDDDD", "EEEEEEEEEEEE", "FFFFFFFFFFF", "banansda"])
+ def B(request, A):
+ return request.param
+
+ def test_foo(B):
+ # Something funky is going on here.
+ # Despite specified seeds, on what is collected,
+ # sometimes we get unexpected passes. hashing B seems
+ # to help?
+ assert hash(B) or True
+ """
+ )
+ monkeypatch.setenv("PYTHONHASHSEED", "1")
+ out1 = testdir.runpytest_subprocess("-v")
+ monkeypatch.setenv("PYTHONHASHSEED", "2")
+ out2 = testdir.runpytest_subprocess("-v")
+ out1 = [
+ line
+ for line in out1.outlines
+ if line.startswith("test_deterministic_fixture_collection.py::test_foo")
+ ]
+ out2 = [
+ line
+ for line in out2.outlines
+ if line.startswith("test_deterministic_fixture_collection.py::test_foo")
+ ]
+ assert len(out1) == 12
+ assert out1 == out2
+
+
+class TestRequestScopeAccess(object):
+ pytestmark = pytest.mark.parametrize(
+ ("scope", "ok", "error"),
+ [
+ ["session", "", "fspath class function module"],
+ ["module", "module fspath", "cls function"],
+ ["class", "module fspath cls", "function"],
+ ["function", "module fspath cls function", ""],
+ ],
+ )
+
+ def test_setup(self, testdir, scope, ok, error):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope=%r, autouse=True)
+ def myscoped(request):
+ for x in %r:
+ assert hasattr(request, x)
+ for x in %r:
+ pytest.raises(AttributeError, lambda:
+ getattr(request, x))
+ assert request.session
+ assert request.config
+ def test_func():
+ pass
+ """
+ % (scope, ok.split(), error.split())
+ )
+ reprec = testdir.inline_run("-l")
+ reprec.assertoutcome(passed=1)
+
+ def test_funcarg(self, testdir, scope, ok, error):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope=%r)
+ def arg(request):
+ for x in %r:
+ assert hasattr(request, x)
+ for x in %r:
+ pytest.raises(AttributeError, lambda:
+ getattr(request, x))
+ assert request.session
+ assert request.config
+ def test_func(arg):
+ pass
+ """
+ % (scope, ok.split(), error.split())
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+class TestErrors(object):
+
+ def test_subfactory_missing_funcarg(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture()
+ def gen(qwe123):
+ return 1
+ def test_something(gen):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(
+ ["*def gen(qwe123):*", "*fixture*qwe123*not found*", "*1 error*"]
+ )
+
+ def test_issue498_fixture_finalizer_failing(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def fix1(request):
+ def f():
+ raise KeyError
+ request.addfinalizer(f)
+ return object()
+
+ values = []
+ def test_1(fix1):
+ values.append(fix1)
+ def test_2(fix1):
+ values.append(fix1)
+ def test_3():
+ assert values[0] != values[1]
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ *ERROR*teardown*test_1*
+ *KeyError*
+ *ERROR*teardown*test_2*
+ *KeyError*
+ *3 pass*2 error*
+ """
+ )
+
+ def test_setupfunc_missing_funcarg(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(autouse=True)
+ def gen(qwe123):
+ return 1
+ def test_something():
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(
+ ["*def gen(qwe123):*", "*fixture*qwe123*not found*", "*1 error*"]
+ )
+
+
+class TestShowFixtures(object):
+
+ def test_funcarg_compat(self, testdir):
+ config = testdir.parseconfigure("--funcargs")
+ assert config.option.showfixtures
+
+ def test_show_fixtures(self, testdir):
+ result = testdir.runpytest("--fixtures")
+ result.stdout.fnmatch_lines(["*tmpdir*", "*temporary directory*"])
+
+ def test_show_fixtures_verbose(self, testdir):
+ result = testdir.runpytest("--fixtures", "-v")
+ result.stdout.fnmatch_lines(["*tmpdir*--*tmpdir.py*", "*temporary directory*"])
+
+ def test_show_fixtures_testmodule(self, testdir):
+ p = testdir.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def _arg0():
+ """ hidden """
+ @pytest.fixture
+ def arg1():
+ """ hello world """
+ '''
+ )
+ result = testdir.runpytest("--fixtures", p)
+ result.stdout.fnmatch_lines(
+ """
+ *tmpdir
+ *fixtures defined from*
+ *arg1*
+ *hello world*
+ """
+ )
+ assert "arg0" not in result.stdout.str()
+
+ @pytest.mark.parametrize("testmod", [True, False])
+ def test_show_fixtures_conftest(self, testdir, testmod):
+ testdir.makeconftest(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg1():
+ """ hello world """
+ '''
+ )
+ if testmod:
+ testdir.makepyfile(
+ """
+ def test_hello():
+ pass
+ """
+ )
+ result = testdir.runpytest("--fixtures")
+ result.stdout.fnmatch_lines(
+ """
+ *tmpdir*
+ *fixtures defined from*conftest*
+ *arg1*
+ *hello world*
+ """
+ )
+
+ def test_show_fixtures_trimmed_doc(self, testdir):
+ p = testdir.makepyfile(
+ dedent(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg1():
+ """
+ line1
+ line2
+
+ """
+ @pytest.fixture
+ def arg2():
+ """
+ line1
+ line2
+
+ """
+ '''
+ )
+ )
+ result = testdir.runpytest("--fixtures", p)
+ result.stdout.fnmatch_lines(
+ dedent(
+ """
+ * fixtures defined from test_show_fixtures_trimmed_doc *
+ arg2
+ line1
+ line2
+ arg1
+ line1
+ line2
+
+ """
+ )
+ )
+
+ def test_show_fixtures_indented_doc(self, testdir):
+ p = testdir.makepyfile(
+ dedent(
+ '''
+ import pytest
+ @pytest.fixture
+ def fixture1():
+ """
+ line1
+ indented line
+ """
+ '''
+ )
+ )
+ result = testdir.runpytest("--fixtures", p)
+ result.stdout.fnmatch_lines(
+ dedent(
+ """
+ * fixtures defined from test_show_fixtures_indented_doc *
+ fixture1
+ line1
+ indented line
+ """
+ )
+ )
+
+ def test_show_fixtures_indented_doc_first_line_unindented(self, testdir):
+ p = testdir.makepyfile(
+ dedent(
+ '''
+ import pytest
+ @pytest.fixture
+ def fixture1():
+ """line1
+ line2
+ indented line
+ """
+ '''
+ )
+ )
+ result = testdir.runpytest("--fixtures", p)
+ result.stdout.fnmatch_lines(
+ dedent(
+ """
+ * fixtures defined from test_show_fixtures_indented_doc_first_line_unindented *
+ fixture1
+ line1
+ line2
+ indented line
+ """
+ )
+ )
+
+ def test_show_fixtures_indented_in_class(self, testdir):
+ p = testdir.makepyfile(
+ dedent(
+ '''
+ import pytest
+ class TestClass(object):
+ @pytest.fixture
+ def fixture1(self):
+ """line1
+ line2
+ indented line
+ """
+ '''
+ )
+ )
+ result = testdir.runpytest("--fixtures", p)
+ result.stdout.fnmatch_lines(
+ dedent(
+ """
+ * fixtures defined from test_show_fixtures_indented_in_class *
+ fixture1
+ line1
+ line2
+ indented line
+ """
+ )
+ )
+
+ def test_show_fixtures_different_files(self, testdir):
+ """
+ #833: --fixtures only shows fixtures from first file
+ """
+ testdir.makepyfile(
+ test_a='''
+ import pytest
+
+ @pytest.fixture
+ def fix_a():
+ """Fixture A"""
+ pass
+
+ def test_a(fix_a):
+ pass
+ '''
+ )
+ testdir.makepyfile(
+ test_b='''
+ import pytest
+
+ @pytest.fixture
+ def fix_b():
+ """Fixture B"""
+ pass
+
+ def test_b(fix_b):
+ pass
+ '''
+ )
+ result = testdir.runpytest("--fixtures")
+ result.stdout.fnmatch_lines(
+ """
+ * fixtures defined from test_a *
+ fix_a
+ Fixture A
+
+ * fixtures defined from test_b *
+ fix_b
+ Fixture B
+ """
+ )
+
+ def test_show_fixtures_with_same_name(self, testdir):
+ testdir.makeconftest(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg1():
+ """Hello World in conftest.py"""
+ return "Hello World"
+ '''
+ )
+ testdir.makepyfile(
+ """
+ def test_foo(arg1):
+ assert arg1 == "Hello World"
+ """
+ )
+ testdir.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg1():
+ """Hi from test module"""
+ return "Hi"
+ def test_bar(arg1):
+ assert arg1 == "Hi"
+ '''
+ )
+ result = testdir.runpytest("--fixtures")
+ result.stdout.fnmatch_lines(
+ """
+ * fixtures defined from conftest *
+ arg1
+ Hello World in conftest.py
+
+ * fixtures defined from test_show_fixtures_with_same_name *
+ arg1
+ Hi from test module
+ """
+ )
+
+ def test_fixture_disallow_twice(self):
+ """Test that applying @pytest.fixture twice generates an error (#2334)."""
+ with pytest.raises(ValueError):
+
+ @pytest.fixture
+ @pytest.fixture
+ def foo():
+ pass
+
+
+@pytest.mark.parametrize("flavor", ["fixture", "yield_fixture"])
+class TestContextManagerFixtureFuncs(object):
+
+ def test_simple(self, testdir, flavor):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.{flavor}
+ def arg1():
+ print ("setup")
+ yield 1
+ print ("teardown")
+ def test_1(arg1):
+ print ("test1 %s" % arg1)
+ def test_2(arg1):
+ print ("test2 %s" % arg1)
+ assert 0
+ """.format(
+ flavor=flavor
+ )
+ )
+ result = testdir.runpytest("-s")
+ result.stdout.fnmatch_lines(
+ """
+ *setup*
+ *test1 1*
+ *teardown*
+ *setup*
+ *test2 1*
+ *teardown*
+ """
+ )
+
+ def test_scoped(self, testdir, flavor):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.{flavor}(scope="module")
+ def arg1():
+ print ("setup")
+ yield 1
+ print ("teardown")
+ def test_1(arg1):
+ print ("test1 %s" % arg1)
+ def test_2(arg1):
+ print ("test2 %s" % arg1)
+ """.format(
+ flavor=flavor
+ )
+ )
+ result = testdir.runpytest("-s")
+ result.stdout.fnmatch_lines(
+ """
+ *setup*
+ *test1 1*
+ *test2 1*
+ *teardown*
+ """
+ )
+
+ def test_setup_exception(self, testdir, flavor):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.{flavor}(scope="module")
+ def arg1():
+ pytest.fail("setup")
+ yield 1
+ def test_1(arg1):
+ pass
+ """.format(
+ flavor=flavor
+ )
+ )
+ result = testdir.runpytest("-s")
+ result.stdout.fnmatch_lines(
+ """
+ *pytest.fail*setup*
+ *1 error*
+ """
+ )
+
+ def test_teardown_exception(self, testdir, flavor):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.{flavor}(scope="module")
+ def arg1():
+ yield 1
+ pytest.fail("teardown")
+ def test_1(arg1):
+ pass
+ """.format(
+ flavor=flavor
+ )
+ )
+ result = testdir.runpytest("-s")
+ result.stdout.fnmatch_lines(
+ """
+ *pytest.fail*teardown*
+ *1 passed*1 error*
+ """
+ )
+
+ def test_yields_more_than_one(self, testdir, flavor):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.{flavor}(scope="module")
+ def arg1():
+ yield 1
+ yield 2
+ def test_1(arg1):
+ pass
+ """.format(
+ flavor=flavor
+ )
+ )
+ result = testdir.runpytest("-s")
+ result.stdout.fnmatch_lines(
+ """
+ *fixture function*
+ *test_yields*:2*
+ """
+ )
+
+ def test_custom_name(self, testdir, flavor):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.{flavor}(name='meow')
+ def arg1():
+ return 'mew'
+ def test_1(meow):
+ print(meow)
+ """.format(
+ flavor=flavor
+ )
+ )
+ result = testdir.runpytest("-s")
+ result.stdout.fnmatch_lines("*mew*")
+
+
+class TestParameterizedSubRequest(object):
+
+ def test_call_from_fixture(self, testdir):
+ testfile = testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=[0, 1, 2])
+ def fix_with_param(request):
+ return request.param
+
+ @pytest.fixture
+ def get_named_fixture(request):
+ return request.getfixturevalue('fix_with_param')
+
+ def test_foo(request, get_named_fixture):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ E*Failed: The requested fixture has no parameter defined for the current test.
+ E*
+ E*Requested fixture 'fix_with_param' defined in:
+ E*{}:4
+ E*Requested here:
+ E*{}:9
+ *1 error*
+ """.format(
+ testfile.basename, testfile.basename
+ )
+ )
+
+ def test_call_from_test(self, testdir):
+ testfile = testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=[0, 1, 2])
+ def fix_with_param(request):
+ return request.param
+
+ def test_foo(request):
+ request.getfixturevalue('fix_with_param')
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ E*Failed: The requested fixture has no parameter defined for the current test.
+ E*
+ E*Requested fixture 'fix_with_param' defined in:
+ E*{}:4
+ E*Requested here:
+ E*{}:8
+ *1 failed*
+ """.format(
+ testfile.basename, testfile.basename
+ )
+ )
+
+ def test_external_fixture(self, testdir):
+ conffile = testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(params=[0, 1, 2])
+ def fix_with_param(request):
+ return request.param
+ """
+ )
+
+ testfile = testdir.makepyfile(
+ """
+ def test_foo(request):
+ request.getfixturevalue('fix_with_param')
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ E*Failed: The requested fixture has no parameter defined for the current test.
+ E*
+ E*Requested fixture 'fix_with_param' defined in:
+ E*{}:4
+ E*Requested here:
+ E*{}:2
+ *1 failed*
+ """.format(
+ conffile.basename, testfile.basename
+ )
+ )
+
+ def test_non_relative_path(self, testdir):
+ tests_dir = testdir.mkdir("tests")
+ fixdir = testdir.mkdir("fixtures")
+ fixfile = fixdir.join("fix.py")
+ fixfile.write(
+ _pytest._code.Source(
+ """
+ import pytest
+
+ @pytest.fixture(params=[0, 1, 2])
+ def fix_with_param(request):
+ return request.param
+ """
+ )
+ )
+
+ testfile = tests_dir.join("test_foos.py")
+ testfile.write(
+ _pytest._code.Source(
+ """
+ from fix import fix_with_param
+
+ def test_foo(request):
+ request.getfixturevalue('fix_with_param')
+ """
+ )
+ )
+
+ tests_dir.chdir()
+ testdir.syspathinsert(fixdir)
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ E*Failed: The requested fixture has no parameter defined for the current test.
+ E*
+ E*Requested fixture 'fix_with_param' defined in:
+ E*{}:5
+ E*Requested here:
+ E*{}:5
+ *1 failed*
+ """.format(
+ fixfile.strpath, testfile.basename
+ )
+ )
+
+
+def test_pytest_fixture_setup_and_post_finalizer_hook(testdir):
+ testdir.makeconftest(
+ """
+ from __future__ import print_function
+ def pytest_fixture_setup(fixturedef, request):
+ print('ROOT setup hook called for {0} from {1}'.format(fixturedef.argname, request.node.name))
+ def pytest_fixture_post_finalizer(fixturedef, request):
+ print('ROOT finalizer hook called for {0} from {1}'.format(fixturedef.argname, request.node.name))
+ """
+ )
+ testdir.makepyfile(
+ **{
+ "tests/conftest.py": """
+ from __future__ import print_function
+ def pytest_fixture_setup(fixturedef, request):
+ print('TESTS setup hook called for {0} from {1}'.format(fixturedef.argname, request.node.name))
+ def pytest_fixture_post_finalizer(fixturedef, request):
+ print('TESTS finalizer hook called for {0} from {1}'.format(fixturedef.argname, request.node.name))
+ """,
+ "tests/test_hooks.py": """
+ from __future__ import print_function
+ import pytest
+
+ @pytest.fixture()
+ def my_fixture():
+ return 'some'
+
+ def test_func(my_fixture):
+ print('TEST test_func')
+ assert my_fixture == 'some'
+ """,
+ }
+ )
+ result = testdir.runpytest("-s")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ [
+ "*TESTS setup hook called for my_fixture from test_func*",
+ "*ROOT setup hook called for my_fixture from test_func*",
+ "*TEST test_func*",
+ "*TESTS finalizer hook called for my_fixture from test_func*",
+ "*ROOT finalizer hook called for my_fixture from test_func*",
+ ]
+ )
+
+
+class TestScopeOrdering(object):
+ """Class of tests that ensure fixtures are ordered based on their scopes (#2405)"""
+
+ @pytest.mark.parametrize("use_mark", [True, False])
+ def test_func_closure_module_auto(self, testdir, use_mark):
+ """Semantically identical to the example posted in #2405 when ``use_mark=True``"""
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='module', autouse={autouse})
+ def m1(): pass
+
+ if {use_mark}:
+ pytestmark = pytest.mark.usefixtures('m1')
+
+ @pytest.fixture(scope='function', autouse=True)
+ def f1(): pass
+
+ def test_func(m1):
+ pass
+ """.format(
+ autouse=not use_mark, use_mark=use_mark
+ )
+ )
+ items, _ = testdir.inline_genitems()
+ request = FixtureRequest(items[0])
+ assert request.fixturenames == "m1 f1".split()
+
+ def test_func_closure_with_native_fixtures(self, testdir, monkeypatch):
+ """Sanity check that verifies the order returned by the closures and the actual fixture execution order:
+ The execution order may differ because of fixture inter-dependencies.
+ """
+ monkeypatch.setattr(pytest, "FIXTURE_ORDER", [], raising=False)
+ testdir.makepyfile(
+ """
+ import pytest
+
+ FIXTURE_ORDER = pytest.FIXTURE_ORDER
+
+ @pytest.fixture(scope="session")
+ def s1():
+ FIXTURE_ORDER.append('s1')
+
+ @pytest.fixture(scope="module")
+ def m1():
+ FIXTURE_ORDER.append('m1')
+
+ @pytest.fixture(scope='session')
+ def my_tmpdir_factory():
+ FIXTURE_ORDER.append('my_tmpdir_factory')
+
+ @pytest.fixture
+ def my_tmpdir(my_tmpdir_factory):
+ FIXTURE_ORDER.append('my_tmpdir')
+
+ @pytest.fixture
+ def f1(my_tmpdir):
+ FIXTURE_ORDER.append('f1')
+
+ @pytest.fixture
+ def f2():
+ FIXTURE_ORDER.append('f2')
+
+ def test_foo(f1, m1, f2, s1): pass
+ """
+ )
+ items, _ = testdir.inline_genitems()
+ request = FixtureRequest(items[0])
+ # order of fixtures based on their scope and position in the parameter list
+ assert request.fixturenames == "s1 my_tmpdir_factory m1 f1 f2 my_tmpdir".split()
+ testdir.runpytest()
+ # actual fixture execution differs: dependent fixtures must be created first ("my_tmpdir")
+ assert pytest.FIXTURE_ORDER == "s1 my_tmpdir_factory m1 my_tmpdir f1 f2".split()
+
+ def test_func_closure_module(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='module')
+ def m1(): pass
+
+ @pytest.fixture(scope='function')
+ def f1(): pass
+
+ def test_func(f1, m1):
+ pass
+ """
+ )
+ items, _ = testdir.inline_genitems()
+ request = FixtureRequest(items[0])
+ assert request.fixturenames == "m1 f1".split()
+
+ def test_func_closure_scopes_reordered(self, testdir):
+ """Test ensures that fixtures are ordered by scope regardless of the order of the parameters, although
+ fixtures of same scope keep the declared order
+ """
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='session')
+ def s1(): pass
+
+ @pytest.fixture(scope='module')
+ def m1(): pass
+
+ @pytest.fixture(scope='function')
+ def f1(): pass
+
+ @pytest.fixture(scope='function')
+ def f2(): pass
+
+ class Test:
+
+ @pytest.fixture(scope='class')
+ def c1(cls): pass
+
+ def test_func(self, f2, f1, c1, m1, s1):
+ pass
+ """
+ )
+ items, _ = testdir.inline_genitems()
+ request = FixtureRequest(items[0])
+ assert request.fixturenames == "s1 m1 c1 f2 f1".split()
+
+ def test_func_closure_same_scope_closer_root_first(self, testdir):
+ """Auto-use fixtures of same scope are ordered by closer-to-root first"""
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(scope='module', autouse=True)
+ def m_conf(): pass
+ """
+ )
+ testdir.makepyfile(
+ **{
+ "sub/conftest.py": """
+ import pytest
+
+ @pytest.fixture(scope='module', autouse=True)
+ def m_sub(): pass
+ """,
+ "sub/test_func.py": """
+ import pytest
+
+ @pytest.fixture(scope='module', autouse=True)
+ def m_test(): pass
+
+ @pytest.fixture(scope='function')
+ def f1(): pass
+
+ def test_func(m_test, f1):
+ pass
+ """,
+ }
+ )
+ items, _ = testdir.inline_genitems()
+ request = FixtureRequest(items[0])
+ assert request.fixturenames == "m_conf m_sub m_test f1".split()
+
+ def test_func_closure_all_scopes_complex(self, testdir):
+ """Complex test involving all scopes and mixing autouse with normal fixtures"""
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(scope='session')
+ def s1(): pass
+ """
+ )
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='module', autouse=True)
+ def m1(): pass
+
+ @pytest.fixture(scope='module')
+ def m2(s1): pass
+
+ @pytest.fixture(scope='function')
+ def f1(): pass
+
+ @pytest.fixture(scope='function')
+ def f2(): pass
+
+ class Test:
+
+ @pytest.fixture(scope='class', autouse=True)
+ def c1(self):
+ pass
+
+ def test_func(self, f2, f1, m2):
+ pass
+ """
+ )
+ items, _ = testdir.inline_genitems()
+ request = FixtureRequest(items[0])
+ assert request.fixturenames == "s1 m1 m2 c1 f2 f1".split()
diff --git a/third_party/python/pytest/testing/python/integration.py b/third_party/python/pytest/testing/python/integration.py
new file mode 100644
index 0000000000..f348fdc29f
--- /dev/null
+++ b/third_party/python/pytest/testing/python/integration.py
@@ -0,0 +1,453 @@
+import pytest
+from _pytest import python
+from _pytest import runner
+
+
+class TestOEJSKITSpecials(object):
+
+ def test_funcarg_non_pycollectobj(self, testdir): # rough jstests usage
+ testdir.makeconftest(
+ """
+ import pytest
+ def pytest_pycollect_makeitem(collector, name, obj):
+ if name == "MyClass":
+ return MyCollector(name, parent=collector)
+ class MyCollector(pytest.Collector):
+ def reportinfo(self):
+ return self.fspath, 3, "xyz"
+ """
+ )
+ modcol = testdir.getmodulecol(
+ """
+ import pytest
+ @pytest.fixture
+ def arg1(request):
+ return 42
+ class MyClass(object):
+ pass
+ """
+ )
+ # this hook finds funcarg factories
+ rep = runner.collect_one_node(collector=modcol)
+ clscol = rep.result[0]
+ clscol.obj = lambda arg1: None
+ clscol.funcargs = {}
+ pytest._fillfuncargs(clscol)
+ assert clscol.funcargs["arg1"] == 42
+
+ def test_autouse_fixture(self, testdir): # rough jstests usage
+ testdir.makeconftest(
+ """
+ import pytest
+ def pytest_pycollect_makeitem(collector, name, obj):
+ if name == "MyClass":
+ return MyCollector(name, parent=collector)
+ class MyCollector(pytest.Collector):
+ def reportinfo(self):
+ return self.fspath, 3, "xyz"
+ """
+ )
+ modcol = testdir.getmodulecol(
+ """
+ import pytest
+ @pytest.fixture(autouse=True)
+ def hello():
+ pass
+ @pytest.fixture
+ def arg1(request):
+ return 42
+ class MyClass(object):
+ pass
+ """
+ )
+ # this hook finds funcarg factories
+ rep = runner.collect_one_node(modcol)
+ clscol = rep.result[0]
+ clscol.obj = lambda: None
+ clscol.funcargs = {}
+ pytest._fillfuncargs(clscol)
+ assert not clscol.funcargs
+
+
+def test_wrapped_getfslineno():
+
+ def func():
+ pass
+
+ def wrap(f):
+ func.__wrapped__ = f
+ func.patchings = ["qwe"]
+ return func
+
+ @wrap
+ def wrapped_func(x, y, z):
+ pass
+
+ fs, lineno = python.getfslineno(wrapped_func)
+ fs2, lineno2 = python.getfslineno(wrap)
+ assert lineno > lineno2, "getfslineno does not unwrap correctly"
+
+
+class TestMockDecoration(object):
+
+ def test_wrapped_getfuncargnames(self):
+ from _pytest.compat import getfuncargnames
+
+ def wrap(f):
+
+ def func():
+ pass
+
+ func.__wrapped__ = f
+ return func
+
+ @wrap
+ def f(x):
+ pass
+
+ values = getfuncargnames(f)
+ assert values == ("x",)
+
+ @pytest.mark.xfail(
+ strict=False, reason="getfuncargnames breaks if mock is imported"
+ )
+ def test_wrapped_getfuncargnames_patching(self):
+ from _pytest.compat import getfuncargnames
+
+ def wrap(f):
+
+ def func():
+ pass
+
+ func.__wrapped__ = f
+ func.patchings = ["qwe"]
+ return func
+
+ @wrap
+ def f(x, y, z):
+ pass
+
+ values = getfuncargnames(f)
+ assert values == ("y", "z")
+
+ def test_unittest_mock(self, testdir):
+ pytest.importorskip("unittest.mock")
+ testdir.makepyfile(
+ """
+ import unittest.mock
+ class T(unittest.TestCase):
+ @unittest.mock.patch("os.path.abspath")
+ def test_hello(self, abspath):
+ import os
+ os.path.abspath("hello")
+ abspath.assert_any_call("hello")
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_unittest_mock_and_fixture(self, testdir):
+ pytest.importorskip("unittest.mock")
+ testdir.makepyfile(
+ """
+ import os.path
+ import unittest.mock
+ import pytest
+
+ @pytest.fixture
+ def inject_me():
+ pass
+
+ @unittest.mock.patch.object(os.path, "abspath",
+ new=unittest.mock.MagicMock)
+ def test_hello(inject_me):
+ import os
+ os.path.abspath("hello")
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_unittest_mock_and_pypi_mock(self, testdir):
+ pytest.importorskip("unittest.mock")
+ pytest.importorskip("mock", "1.0.1")
+ testdir.makepyfile(
+ """
+ import mock
+ import unittest.mock
+ class TestBoth(object):
+ @unittest.mock.patch("os.path.abspath")
+ def test_hello(self, abspath):
+ import os
+ os.path.abspath("hello")
+ abspath.assert_any_call("hello")
+
+ @mock.patch("os.path.abspath")
+ def test_hello_mock(self, abspath):
+ import os
+ os.path.abspath("hello")
+ abspath.assert_any_call("hello")
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_mock(self, testdir):
+ pytest.importorskip("mock", "1.0.1")
+ testdir.makepyfile(
+ """
+ import os
+ import unittest
+ import mock
+
+ class T(unittest.TestCase):
+ @mock.patch("os.path.abspath")
+ def test_hello(self, abspath):
+ os.path.abspath("hello")
+ abspath.assert_any_call("hello")
+ def mock_basename(path):
+ return "mock_basename"
+ @mock.patch("os.path.abspath")
+ @mock.patch("os.path.normpath")
+ @mock.patch("os.path.basename", new=mock_basename)
+ def test_someting(normpath, abspath, tmpdir):
+ abspath.return_value = "this"
+ os.path.normpath(os.path.abspath("hello"))
+ normpath.assert_any_call("this")
+ assert os.path.basename("123") == "mock_basename"
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2)
+ calls = reprec.getcalls("pytest_runtest_logreport")
+ funcnames = [
+ call.report.location[2] for call in calls if call.report.when == "call"
+ ]
+ assert funcnames == ["T.test_hello", "test_someting"]
+
+ def test_mock_sorting(self, testdir):
+ pytest.importorskip("mock", "1.0.1")
+ testdir.makepyfile(
+ """
+ import os
+ import mock
+
+ @mock.patch("os.path.abspath")
+ def test_one(abspath):
+ pass
+ @mock.patch("os.path.abspath")
+ def test_two(abspath):
+ pass
+ @mock.patch("os.path.abspath")
+ def test_three(abspath):
+ pass
+ """
+ )
+ reprec = testdir.inline_run()
+ calls = reprec.getreports("pytest_runtest_logreport")
+ calls = [x for x in calls if x.when == "call"]
+ names = [x.nodeid.split("::")[-1] for x in calls]
+ assert names == ["test_one", "test_two", "test_three"]
+
+ def test_mock_double_patch_issue473(self, testdir):
+ pytest.importorskip("mock", "1.0.1")
+ testdir.makepyfile(
+ """
+ from mock import patch
+ from pytest import mark
+
+ @patch('os.getcwd')
+ @patch('os.path')
+ @mark.slow
+ class TestSimple(object):
+ def test_simple_thing(self, mock_path, mock_getcwd):
+ pass
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+class TestReRunTests(object):
+
+ def test_rerun(self, testdir):
+ testdir.makeconftest(
+ """
+ from _pytest.runner import runtestprotocol
+ def pytest_runtest_protocol(item, nextitem):
+ runtestprotocol(item, log=False, nextitem=nextitem)
+ runtestprotocol(item, log=True, nextitem=nextitem)
+ """
+ )
+ testdir.makepyfile(
+ """
+ import pytest
+ count = 0
+ req = None
+ @pytest.fixture
+ def fix(request):
+ global count, req
+ assert request != req
+ req = request
+ print ("fix count %s" % count)
+ count += 1
+ def test_fix(fix):
+ pass
+ """
+ )
+ result = testdir.runpytest("-s")
+ result.stdout.fnmatch_lines(
+ """
+ *fix count 0*
+ *fix count 1*
+ """
+ )
+ result.stdout.fnmatch_lines(
+ """
+ *2 passed*
+ """
+ )
+
+
+def test_pytestconfig_is_session_scoped():
+ from _pytest.fixtures import pytestconfig
+
+ assert pytestconfig._pytestfixturefunction.scope == "session"
+
+
+class TestNoselikeTestAttribute(object):
+
+ def test_module_with_global_test(self, testdir):
+ testdir.makepyfile(
+ """
+ __test__ = False
+ def test_hello():
+ pass
+ """
+ )
+ reprec = testdir.inline_run()
+ assert not reprec.getfailedcollections()
+ calls = reprec.getreports("pytest_runtest_logreport")
+ assert not calls
+
+ def test_class_and_method(self, testdir):
+ testdir.makepyfile(
+ """
+ __test__ = True
+ def test_func():
+ pass
+ test_func.__test__ = False
+
+ class TestSome(object):
+ __test__ = False
+ def test_method(self):
+ pass
+ """
+ )
+ reprec = testdir.inline_run()
+ assert not reprec.getfailedcollections()
+ calls = reprec.getreports("pytest_runtest_logreport")
+ assert not calls
+
+ def test_unittest_class(self, testdir):
+ testdir.makepyfile(
+ """
+ import unittest
+ class TC(unittest.TestCase):
+ def test_1(self):
+ pass
+ class TC2(unittest.TestCase):
+ __test__ = False
+ def test_2(self):
+ pass
+ """
+ )
+ reprec = testdir.inline_run()
+ assert not reprec.getfailedcollections()
+ call = reprec.getcalls("pytest_collection_modifyitems")[0]
+ assert len(call.items) == 1
+ assert call.items[0].cls.__name__ == "TC"
+
+ def test_class_with_nasty_getattr(self, testdir):
+ """Make sure we handle classes with a custom nasty __getattr__ right.
+
+ With a custom __getattr__ which e.g. returns a function (like with a
+ RPC wrapper), we shouldn't assume this meant "__test__ = True".
+ """
+ # https://github.com/pytest-dev/pytest/issues/1204
+ testdir.makepyfile(
+ """
+ class MetaModel(type):
+
+ def __getattr__(cls, key):
+ return lambda: None
+
+
+ BaseModel = MetaModel('Model', (), {})
+
+
+ class Model(BaseModel):
+
+ __metaclass__ = MetaModel
+
+ def test_blah(self):
+ pass
+ """
+ )
+ reprec = testdir.inline_run()
+ assert not reprec.getfailedcollections()
+ call = reprec.getcalls("pytest_collection_modifyitems")[0]
+ assert not call.items
+
+
+@pytest.mark.issue351
+class TestParameterize(object):
+
+ def test_idfn_marker(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ def idfn(param):
+ if param == 0:
+ return 'spam'
+ elif param == 1:
+ return 'ham'
+ else:
+ return None
+
+ @pytest.mark.parametrize('a,b', [(0, 2), (1, 2)], ids=idfn)
+ def test_params(a, b):
+ pass
+ """
+ )
+ res = testdir.runpytest("--collect-only")
+ res.stdout.fnmatch_lines(["*spam-2*", "*ham-2*"])
+
+ def test_idfn_fixture(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ def idfn(param):
+ if param == 0:
+ return 'spam'
+ elif param == 1:
+ return 'ham'
+ else:
+ return None
+
+ @pytest.fixture(params=[0, 1], ids=idfn)
+ def a(request):
+ return request.param
+
+ @pytest.fixture(params=[1, 2], ids=idfn)
+ def b(request):
+ return request.param
+
+ def test_params(a, b):
+ pass
+ """
+ )
+ res = testdir.runpytest("--collect-only")
+ res.stdout.fnmatch_lines(["*spam-2*", "*ham-2*"])
diff --git a/third_party/python/pytest/testing/python/metafunc.py b/third_party/python/pytest/testing/python/metafunc.py
new file mode 100644
index 0000000000..e181d3131f
--- /dev/null
+++ b/third_party/python/pytest/testing/python/metafunc.py
@@ -0,0 +1,1768 @@
+# -*- coding: utf-8 -*-
+import re
+import sys
+import attr
+import _pytest._code
+import py
+import pytest
+from _pytest import python, fixtures
+
+import hypothesis
+from hypothesis import strategies
+
+PY3 = sys.version_info >= (3, 0)
+
+
+class TestMetafunc(object):
+
+ def Metafunc(self, func, config=None):
+ # the unit tests of this class check if things work correctly
+ # on the funcarg level, so we don't need a full blown
+ # initiliazation
+ class FixtureInfo(object):
+ name2fixturedefs = None
+
+ def __init__(self, names):
+ self.names_closure = names
+
+ @attr.s
+ class DefinitionMock(object):
+ obj = attr.ib()
+
+ names = fixtures.getfuncargnames(func)
+ fixtureinfo = FixtureInfo(names)
+ definition = DefinitionMock(func)
+ return python.Metafunc(definition, fixtureinfo, config)
+
+ def test_no_funcargs(self, testdir):
+
+ def function():
+ pass
+
+ metafunc = self.Metafunc(function)
+ assert not metafunc.fixturenames
+ repr(metafunc._calls)
+
+ def test_function_basic(self):
+
+ def func(arg1, arg2="qwe"):
+ pass
+
+ metafunc = self.Metafunc(func)
+ assert len(metafunc.fixturenames) == 1
+ assert "arg1" in metafunc.fixturenames
+ assert metafunc.function is func
+ assert metafunc.cls is None
+
+ def test_addcall_no_args(self):
+
+ def func(arg1):
+ pass
+
+ metafunc = self.Metafunc(func)
+ metafunc.addcall()
+ assert len(metafunc._calls) == 1
+ call = metafunc._calls[0]
+ assert call.id == "0"
+ assert not hasattr(call, "param")
+
+ def test_addcall_id(self):
+
+ def func(arg1):
+ pass
+
+ metafunc = self.Metafunc(func)
+ pytest.raises(ValueError, "metafunc.addcall(id=None)")
+
+ metafunc.addcall(id=1)
+ pytest.raises(ValueError, "metafunc.addcall(id=1)")
+ pytest.raises(ValueError, "metafunc.addcall(id='1')")
+ metafunc.addcall(id=2)
+ assert len(metafunc._calls) == 2
+ assert metafunc._calls[0].id == "1"
+ assert metafunc._calls[1].id == "2"
+
+ def test_addcall_param(self):
+
+ def func(arg1):
+ pass
+
+ metafunc = self.Metafunc(func)
+
+ class obj(object):
+ pass
+
+ metafunc.addcall(param=obj)
+ metafunc.addcall(param=obj)
+ metafunc.addcall(param=1)
+ assert len(metafunc._calls) == 3
+ assert metafunc._calls[0].getparam("arg1") == obj
+ assert metafunc._calls[1].getparam("arg1") == obj
+ assert metafunc._calls[2].getparam("arg1") == 1
+
+ def test_addcall_funcargs(self):
+
+ def func(x):
+ pass
+
+ metafunc = self.Metafunc(func)
+
+ class obj(object):
+ pass
+
+ metafunc.addcall(funcargs={"x": 2})
+ metafunc.addcall(funcargs={"x": 3})
+ pytest.raises(pytest.fail.Exception, "metafunc.addcall({'xyz': 0})")
+ assert len(metafunc._calls) == 2
+ assert metafunc._calls[0].funcargs == {"x": 2}
+ assert metafunc._calls[1].funcargs == {"x": 3}
+ assert not hasattr(metafunc._calls[1], "param")
+
+ def test_parametrize_error(self):
+
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+ metafunc.parametrize("x", [1, 2])
+ pytest.raises(ValueError, lambda: metafunc.parametrize("x", [5, 6]))
+ pytest.raises(ValueError, lambda: metafunc.parametrize("x", [5, 6]))
+ metafunc.parametrize("y", [1, 2])
+ pytest.raises(ValueError, lambda: metafunc.parametrize("y", [5, 6]))
+ pytest.raises(ValueError, lambda: metafunc.parametrize("y", [5, 6]))
+
+ def test_parametrize_bad_scope(self, testdir):
+
+ def func(x):
+ pass
+
+ metafunc = self.Metafunc(func)
+ try:
+ metafunc.parametrize("x", [1], scope="doggy")
+ except ValueError as ve:
+ assert "has an unsupported scope value 'doggy'" in str(ve)
+
+ def test_parametrize_and_id(self):
+
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+
+ metafunc.parametrize("x", [1, 2], ids=["basic", "advanced"])
+ metafunc.parametrize("y", ["abc", "def"])
+ ids = [x.id for x in metafunc._calls]
+ assert ids == ["basic-abc", "basic-def", "advanced-abc", "advanced-def"]
+
+ def test_parametrize_and_id_unicode(self):
+ """Allow unicode strings for "ids" parameter in Python 2 (##1905)"""
+
+ def func(x):
+ pass
+
+ metafunc = self.Metafunc(func)
+ metafunc.parametrize("x", [1, 2], ids=[u"basic", u"advanced"])
+ ids = [x.id for x in metafunc._calls]
+ assert ids == [u"basic", u"advanced"]
+
+ def test_parametrize_with_wrong_number_of_ids(self, testdir):
+
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+
+ pytest.raises(
+ ValueError, lambda: metafunc.parametrize("x", [1, 2], ids=["basic"])
+ )
+
+ pytest.raises(
+ ValueError,
+ lambda: metafunc.parametrize(
+ ("x", "y"), [("abc", "def"), ("ghi", "jkl")], ids=["one"]
+ ),
+ )
+
+ @pytest.mark.issue510
+ def test_parametrize_empty_list(self):
+
+ def func(y):
+ pass
+
+ class MockConfig(object):
+
+ def getini(self, name):
+ return ""
+
+ @property
+ def hook(self):
+ return self
+
+ def pytest_make_parametrize_id(self, **kw):
+ pass
+
+ metafunc = self.Metafunc(func, MockConfig())
+ metafunc.parametrize("y", [])
+ assert "skip" == metafunc._calls[0].marks[0].name
+
+ def test_parametrize_with_userobjects(self):
+
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+
+ class A(object):
+ pass
+
+ metafunc.parametrize("x", [A(), A()])
+ metafunc.parametrize("y", list("ab"))
+ assert metafunc._calls[0].id == "x0-a"
+ assert metafunc._calls[1].id == "x0-b"
+ assert metafunc._calls[2].id == "x1-a"
+ assert metafunc._calls[3].id == "x1-b"
+
+ @hypothesis.given(strategies.text() | strategies.binary())
+ @hypothesis.settings(
+ deadline=400.0
+ ) # very close to std deadline and CI boxes are not reliable in CPU power
+ def test_idval_hypothesis(self, value):
+ from _pytest.python import _idval
+
+ escaped = _idval(value, "a", 6, None)
+ assert isinstance(escaped, str)
+ if PY3:
+ escaped.encode("ascii")
+ else:
+ escaped.decode("ascii")
+
+ def test_unicode_idval(self):
+ """This tests that Unicode strings outside the ASCII character set get
+ escaped, using byte escapes if they're in that range or unicode
+ escapes if they're not.
+
+ """
+ from _pytest.python import _idval
+
+ values = [
+ (u"", ""),
+ (u"ascii", "ascii"),
+ (u"ação", "a\\xe7\\xe3o"),
+ (u"josé@blah.com", "jos\\xe9@blah.com"),
+ (
+ u"δοκ.ιμή@παράδειγμα.δοκιμή",
+ "\\u03b4\\u03bf\\u03ba.\\u03b9\\u03bc\\u03ae@\\u03c0\\u03b1\\u03c1\\u03ac\\u03b4\\u03b5\\u03b9\\u03b3"
+ "\\u03bc\\u03b1.\\u03b4\\u03bf\\u03ba\\u03b9\\u03bc\\u03ae",
+ ),
+ ]
+ for val, expected in values:
+ assert _idval(val, "a", 6, None) == expected
+
+ def test_bytes_idval(self):
+ """unittest for the expected behavior to obtain ids for parametrized
+ bytes values:
+ - python2: non-ascii strings are considered bytes and formatted using
+ "binary escape", where any byte < 127 is escaped into its hex form.
+ - python3: bytes objects are always escaped using "binary escape".
+ """
+ from _pytest.python import _idval
+
+ values = [
+ (b"", ""),
+ (b"\xc3\xb4\xff\xe4", "\\xc3\\xb4\\xff\\xe4"),
+ (b"ascii", "ascii"),
+ (u"αρά".encode("utf-8"), "\\xce\\xb1\\xcf\\x81\\xce\\xac"),
+ ]
+ for val, expected in values:
+ assert _idval(val, "a", 6, None) == expected
+
+ def test_class_or_function_idval(self):
+ """unittest for the expected behavior to obtain ids for parametrized
+ values that are classes or functions: their __name__.
+ """
+ from _pytest.python import _idval
+
+ class TestClass(object):
+ pass
+
+ def test_function():
+ pass
+
+ values = [(TestClass, "TestClass"), (test_function, "test_function")]
+ for val, expected in values:
+ assert _idval(val, "a", 6, None) == expected
+
+ @pytest.mark.issue250
+ def test_idmaker_autoname(self):
+ from _pytest.python import idmaker
+
+ result = idmaker(
+ ("a", "b"), [pytest.param("string", 1.0), pytest.param("st-ring", 2.0)]
+ )
+ assert result == ["string-1.0", "st-ring-2.0"]
+
+ result = idmaker(
+ ("a", "b"), [pytest.param(object(), 1.0), pytest.param(object(), object())]
+ )
+ assert result == ["a0-1.0", "a1-b1"]
+ # unicode mixing, issue250
+ result = idmaker(
+ (py.builtin._totext("a"), "b"), [pytest.param({}, b"\xc3\xb4")]
+ )
+ assert result == ["a0-\\xc3\\xb4"]
+
+ def test_idmaker_with_bytes_regex(self):
+ from _pytest.python import idmaker
+
+ result = idmaker(("a"), [pytest.param(re.compile(b"foo"), 1.0)])
+ assert result == ["foo"]
+
+ def test_idmaker_native_strings(self):
+ from _pytest.python import idmaker
+
+ totext = py.builtin._totext
+ result = idmaker(
+ ("a", "b"),
+ [
+ pytest.param(1.0, -1.1),
+ pytest.param(2, -202),
+ pytest.param("three", "three hundred"),
+ pytest.param(True, False),
+ pytest.param(None, None),
+ pytest.param(re.compile("foo"), re.compile("bar")),
+ pytest.param(str, int),
+ pytest.param(list("six"), [66, 66]),
+ pytest.param({7}, set("seven")),
+ pytest.param(tuple("eight"), (8, -8, 8)),
+ pytest.param(b"\xc3\xb4", b"name"),
+ pytest.param(b"\xc3\xb4", totext("other")),
+ ],
+ )
+ assert (
+ result
+ == [
+ "1.0--1.1",
+ "2--202",
+ "three-three hundred",
+ "True-False",
+ "None-None",
+ "foo-bar",
+ "str-int",
+ "a7-b7",
+ "a8-b8",
+ "a9-b9",
+ "\\xc3\\xb4-name",
+ "\\xc3\\xb4-other",
+ ]
+ )
+
+ def test_idmaker_enum(self):
+ from _pytest.python import idmaker
+
+ enum = pytest.importorskip("enum")
+ e = enum.Enum("Foo", "one, two")
+ result = idmaker(("a", "b"), [pytest.param(e.one, e.two)])
+ assert result == ["Foo.one-Foo.two"]
+
+ @pytest.mark.issue351
+ def test_idmaker_idfn(self):
+ from _pytest.python import idmaker
+
+ def ids(val):
+ if isinstance(val, Exception):
+ return repr(val)
+
+ result = idmaker(
+ ("a", "b"),
+ [
+ pytest.param(10.0, IndexError()),
+ pytest.param(20, KeyError()),
+ pytest.param("three", [1, 2, 3]),
+ ],
+ idfn=ids,
+ )
+ assert result == ["10.0-IndexError()", "20-KeyError()", "three-b2"]
+
+ @pytest.mark.issue351
+ def test_idmaker_idfn_unique_names(self):
+ from _pytest.python import idmaker
+
+ def ids(val):
+ return "a"
+
+ result = idmaker(
+ ("a", "b"),
+ [
+ pytest.param(10.0, IndexError()),
+ pytest.param(20, KeyError()),
+ pytest.param("three", [1, 2, 3]),
+ ],
+ idfn=ids,
+ )
+ assert result == ["a-a0", "a-a1", "a-a2"]
+
+ @pytest.mark.issue351
+ def test_idmaker_idfn_exception(self):
+ from _pytest.python import idmaker
+ from _pytest.recwarn import WarningsRecorder
+
+ class BadIdsException(Exception):
+ pass
+
+ def ids(val):
+ raise BadIdsException("ids raised")
+
+ rec = WarningsRecorder()
+ with rec:
+ idmaker(
+ ("a", "b"),
+ [
+ pytest.param(10.0, IndexError()),
+ pytest.param(20, KeyError()),
+ pytest.param("three", [1, 2, 3]),
+ ],
+ idfn=ids,
+ )
+
+ assert (
+ [str(i.message) for i in rec.list]
+ == [
+ "Raised while trying to determine id of parameter a at position 0."
+ "\nUpdate your code as this will raise an error in pytest-4.0.",
+ "Raised while trying to determine id of parameter b at position 0."
+ "\nUpdate your code as this will raise an error in pytest-4.0.",
+ "Raised while trying to determine id of parameter a at position 1."
+ "\nUpdate your code as this will raise an error in pytest-4.0.",
+ "Raised while trying to determine id of parameter b at position 1."
+ "\nUpdate your code as this will raise an error in pytest-4.0.",
+ "Raised while trying to determine id of parameter a at position 2."
+ "\nUpdate your code as this will raise an error in pytest-4.0.",
+ "Raised while trying to determine id of parameter b at position 2."
+ "\nUpdate your code as this will raise an error in pytest-4.0.",
+ ]
+ )
+
+ def test_parametrize_ids_exception(self, testdir):
+ """
+ :param testdir: the instance of Testdir class, a temporary
+ test directory.
+ """
+ testdir.makepyfile(
+ """
+ import pytest
+
+ def ids(arg):
+ raise Exception("bad ids")
+
+ @pytest.mark.parametrize("arg", ["a", "b"], ids=ids)
+ def test_foo(arg):
+ pass
+ """
+ )
+ with pytest.warns(DeprecationWarning):
+ result = testdir.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(
+ [
+ "<Module 'test_parametrize_ids_exception.py'>",
+ " <Function 'test_foo[a]'>",
+ " <Function 'test_foo[b]'>",
+ ]
+ )
+
+ def test_idmaker_with_ids(self):
+ from _pytest.python import idmaker
+
+ result = idmaker(
+ ("a", "b"), [pytest.param(1, 2), pytest.param(3, 4)], ids=["a", None]
+ )
+ assert result == ["a", "3-4"]
+
+ def test_idmaker_with_paramset_id(self):
+ from _pytest.python import idmaker
+
+ result = idmaker(
+ ("a", "b"),
+ [pytest.param(1, 2, id="me"), pytest.param(3, 4, id="you")],
+ ids=["a", None],
+ )
+ assert result == ["me", "you"]
+
+ def test_idmaker_with_ids_unique_names(self):
+ from _pytest.python import idmaker
+
+ result = idmaker(
+ ("a"), map(pytest.param, [1, 2, 3, 4, 5]), ids=["a", "a", "b", "c", "b"]
+ )
+ assert result == ["a0", "a1", "b0", "c", "b1"]
+
+ def test_addcall_and_parametrize(self):
+
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+ metafunc.addcall({"x": 1})
+ metafunc.parametrize("y", [2, 3])
+ assert len(metafunc._calls) == 2
+ assert metafunc._calls[0].funcargs == {"x": 1, "y": 2}
+ assert metafunc._calls[1].funcargs == {"x": 1, "y": 3}
+ assert metafunc._calls[0].id == "0-2"
+ assert metafunc._calls[1].id == "0-3"
+
+ @pytest.mark.issue714
+ def test_parametrize_indirect(self):
+
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+ metafunc.parametrize("x", [1], indirect=True)
+ metafunc.parametrize("y", [2, 3], indirect=True)
+ assert len(metafunc._calls) == 2
+ assert metafunc._calls[0].funcargs == {}
+ assert metafunc._calls[1].funcargs == {}
+ assert metafunc._calls[0].params == dict(x=1, y=2)
+ assert metafunc._calls[1].params == dict(x=1, y=3)
+
+ @pytest.mark.issue714
+ def test_parametrize_indirect_list(self):
+
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+ metafunc.parametrize("x, y", [("a", "b")], indirect=["x"])
+ assert metafunc._calls[0].funcargs == dict(y="b")
+ assert metafunc._calls[0].params == dict(x="a")
+
+ @pytest.mark.issue714
+ def test_parametrize_indirect_list_all(self):
+
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+ metafunc.parametrize("x, y", [("a", "b")], indirect=["x", "y"])
+ assert metafunc._calls[0].funcargs == {}
+ assert metafunc._calls[0].params == dict(x="a", y="b")
+
+ @pytest.mark.issue714
+ def test_parametrize_indirect_list_empty(self):
+
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+ metafunc.parametrize("x, y", [("a", "b")], indirect=[])
+ assert metafunc._calls[0].funcargs == dict(x="a", y="b")
+ assert metafunc._calls[0].params == {}
+
+ @pytest.mark.issue714
+ def test_parametrize_indirect_list_functional(self, testdir):
+ """
+ Test parametrization with 'indirect' parameter applied on
+ particular arguments. As y is is direct, its value should
+ be used directly rather than being passed to the fixture
+ y.
+
+ :param testdir: the instance of Testdir class, a temporary
+ test directory.
+ """
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope='function')
+ def x(request):
+ return request.param * 3
+ @pytest.fixture(scope='function')
+ def y(request):
+ return request.param * 2
+ @pytest.mark.parametrize('x, y', [('a', 'b')], indirect=['x'])
+ def test_simple(x,y):
+ assert len(x) == 3
+ assert len(y) == 1
+ """
+ )
+ result = testdir.runpytest("-v")
+ result.stdout.fnmatch_lines(["*test_simple*a-b*", "*1 passed*"])
+
+ @pytest.mark.issue714
+ def test_parametrize_indirect_list_error(self, testdir):
+
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+ with pytest.raises(ValueError):
+ metafunc.parametrize("x, y", [("a", "b")], indirect=["x", "z"])
+
+ @pytest.mark.issue714
+ def test_parametrize_uses_no_fixture_error_indirect_false(self, testdir):
+ """The 'uses no fixture' error tells the user at collection time
+ that the parametrize data they've set up doesn't correspond to the
+ fixtures in their test function, rather than silently ignoring this
+ and letting the test potentially pass.
+ """
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrize('x, y', [('a', 'b')], indirect=False)
+ def test_simple(x):
+ assert len(x) == 3
+ """
+ )
+ result = testdir.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*uses no argument 'y'*"])
+
+ @pytest.mark.issue714
+ def test_parametrize_uses_no_fixture_error_indirect_true(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope='function')
+ def x(request):
+ return request.param * 3
+ @pytest.fixture(scope='function')
+ def y(request):
+ return request.param * 2
+
+ @pytest.mark.parametrize('x, y', [('a', 'b')], indirect=True)
+ def test_simple(x):
+ assert len(x) == 3
+ """
+ )
+ result = testdir.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*uses no fixture 'y'*"])
+
+ @pytest.mark.issue714
+ def test_parametrize_indirect_uses_no_fixture_error_indirect_string(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope='function')
+ def x(request):
+ return request.param * 3
+
+ @pytest.mark.parametrize('x, y', [('a', 'b')], indirect='y')
+ def test_simple(x):
+ assert len(x) == 3
+ """
+ )
+ result = testdir.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*uses no fixture 'y'*"])
+
+ @pytest.mark.issue714
+ def test_parametrize_indirect_uses_no_fixture_error_indirect_list(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope='function')
+ def x(request):
+ return request.param * 3
+
+ @pytest.mark.parametrize('x, y', [('a', 'b')], indirect=['y'])
+ def test_simple(x):
+ assert len(x) == 3
+ """
+ )
+ result = testdir.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*uses no fixture 'y'*"])
+
+ @pytest.mark.issue714
+ def test_parametrize_argument_not_in_indirect_list(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope='function')
+ def x(request):
+ return request.param * 3
+
+ @pytest.mark.parametrize('x, y', [('a', 'b')], indirect=['x'])
+ def test_simple(x):
+ assert len(x) == 3
+ """
+ )
+ result = testdir.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*uses no argument 'y'*"])
+
+ def test_parametrize_gives_indicative_error_on_function_with_default_argument(
+ self, testdir
+ ):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrize('x, y', [('a', 'b')])
+ def test_simple(x, y=1):
+ assert len(x) == 1
+ """
+ )
+ result = testdir.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(
+ ["*already takes an argument 'y' with a default value"]
+ )
+
+ def test_addcalls_and_parametrize_indirect(self):
+
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+ metafunc.addcall(param="123")
+ metafunc.parametrize("x", [1], indirect=True)
+ metafunc.parametrize("y", [2, 3], indirect=True)
+ assert len(metafunc._calls) == 2
+ assert metafunc._calls[0].funcargs == {}
+ assert metafunc._calls[1].funcargs == {}
+ assert metafunc._calls[0].params == dict(x=1, y=2)
+ assert metafunc._calls[1].params == dict(x=1, y=3)
+
+ def test_parametrize_functional(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize('x', [1,2], indirect=True)
+ metafunc.parametrize('y', [2])
+ @pytest.fixture
+ def x(request):
+ return request.param * 10
+
+ def test_simple(x,y):
+ assert x in (10,20)
+ assert y == 2
+ """
+ )
+ result = testdir.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ ["*test_simple*1-2*", "*test_simple*2-2*", "*2 passed*"]
+ )
+
+ def test_parametrize_onearg(self):
+ metafunc = self.Metafunc(lambda x: None)
+ metafunc.parametrize("x", [1, 2])
+ assert len(metafunc._calls) == 2
+ assert metafunc._calls[0].funcargs == dict(x=1)
+ assert metafunc._calls[0].id == "1"
+ assert metafunc._calls[1].funcargs == dict(x=2)
+ assert metafunc._calls[1].id == "2"
+
+ def test_parametrize_onearg_indirect(self):
+ metafunc = self.Metafunc(lambda x: None)
+ metafunc.parametrize("x", [1, 2], indirect=True)
+ assert metafunc._calls[0].params == dict(x=1)
+ assert metafunc._calls[0].id == "1"
+ assert metafunc._calls[1].params == dict(x=2)
+ assert metafunc._calls[1].id == "2"
+
+ def test_parametrize_twoargs(self):
+ metafunc = self.Metafunc(lambda x, y: None)
+ metafunc.parametrize(("x", "y"), [(1, 2), (3, 4)])
+ assert len(metafunc._calls) == 2
+ assert metafunc._calls[0].funcargs == dict(x=1, y=2)
+ assert metafunc._calls[0].id == "1-2"
+ assert metafunc._calls[1].funcargs == dict(x=3, y=4)
+ assert metafunc._calls[1].id == "3-4"
+
+ def test_parametrize_multiple_times(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ pytestmark = pytest.mark.parametrize("x", [1,2])
+ def test_func(x):
+ assert 0, x
+ class TestClass(object):
+ pytestmark = pytest.mark.parametrize("y", [3,4])
+ def test_meth(self, x, y):
+ assert 0, x
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 1
+ result.assert_outcomes(failed=6)
+
+ def test_parametrize_CSV(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize("x, y,", [(1,2), (2,3)])
+ def test_func(x, y):
+ assert x+1 == y
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_parametrize_class_scenarios(self, testdir):
+ testdir.makepyfile(
+ """
+ # same as doc/en/example/parametrize scenario example
+ def pytest_generate_tests(metafunc):
+ idlist = []
+ argvalues = []
+ for scenario in metafunc.cls.scenarios:
+ idlist.append(scenario[0])
+ items = scenario[1].items()
+ argnames = [x[0] for x in items]
+ argvalues.append(([x[1] for x in items]))
+ metafunc.parametrize(argnames, argvalues, ids=idlist, scope="class")
+
+ class Test(object):
+ scenarios = [['1', {'arg': {1: 2}, "arg2": "value2"}],
+ ['2', {'arg':'value2', "arg2": "value2"}]]
+
+ def test_1(self, arg, arg2):
+ pass
+
+ def test_2(self, arg2, arg):
+ pass
+
+ def test_3(self, arg, arg2):
+ pass
+ """
+ )
+ result = testdir.runpytest("-v")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ """
+ *test_1*1*
+ *test_2*1*
+ *test_3*1*
+ *test_1*2*
+ *test_2*2*
+ *test_3*2*
+ *6 passed*
+ """
+ )
+
+ def test_format_args(self):
+
+ def function1():
+ pass
+
+ assert fixtures._format_args(function1) == "()"
+
+ def function2(arg1):
+ pass
+
+ assert fixtures._format_args(function2) == "(arg1)"
+
+ def function3(arg1, arg2="qwe"):
+ pass
+
+ assert fixtures._format_args(function3) == "(arg1, arg2='qwe')"
+
+ def function4(arg1, *args, **kwargs):
+ pass
+
+ assert fixtures._format_args(function4) == "(arg1, *args, **kwargs)"
+
+
+class TestMetafuncFunctional(object):
+
+ def test_attributes(self, testdir):
+ p = testdir.makepyfile(
+ """
+ # assumes that generate/provide runs in the same process
+ import sys, pytest
+ def pytest_generate_tests(metafunc):
+ metafunc.addcall(param=metafunc)
+
+ @pytest.fixture
+ def metafunc(request):
+ assert request._pyfuncitem._genid == "0"
+ return request.param
+
+ def test_function(metafunc, pytestconfig):
+ assert metafunc.config == pytestconfig
+ assert metafunc.module.__name__ == __name__
+ assert metafunc.function == test_function
+ assert metafunc.cls is None
+
+ class TestClass(object):
+ def test_method(self, metafunc, pytestconfig):
+ assert metafunc.config == pytestconfig
+ assert metafunc.module.__name__ == __name__
+ if sys.version_info > (3, 0):
+ unbound = TestClass.test_method
+ else:
+ unbound = TestClass.test_method.im_func
+ # XXX actually have an unbound test function here?
+ assert metafunc.function == unbound
+ assert metafunc.cls == TestClass
+ """
+ )
+ result = testdir.runpytest(p, "-v")
+ result.assert_outcomes(passed=2)
+
+ def test_addcall_with_two_funcargs_generators(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_generate_tests(metafunc):
+ assert "arg1" in metafunc.fixturenames
+ metafunc.addcall(funcargs=dict(arg1=1, arg2=2))
+ """
+ )
+ p = testdir.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ metafunc.addcall(funcargs=dict(arg1=1, arg2=1))
+
+ class TestClass(object):
+ def test_myfunc(self, arg1, arg2):
+ assert arg1 == arg2
+ """
+ )
+ result = testdir.runpytest("-v", p)
+ result.stdout.fnmatch_lines(
+ ["*test_myfunc*0*PASS*", "*test_myfunc*1*FAIL*", "*1 failed, 1 passed*"]
+ )
+
+ def test_two_functions(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ metafunc.addcall(param=10)
+ metafunc.addcall(param=20)
+
+ import pytest
+ @pytest.fixture
+ def arg1(request):
+ return request.param
+
+ def test_func1(arg1):
+ assert arg1 == 10
+ def test_func2(arg1):
+ assert arg1 in (10, 20)
+ """
+ )
+ result = testdir.runpytest("-v", p)
+ result.stdout.fnmatch_lines(
+ [
+ "*test_func1*0*PASS*",
+ "*test_func1*1*FAIL*",
+ "*test_func2*PASS*",
+ "*1 failed, 3 passed*",
+ ]
+ )
+
+ def test_noself_in_method(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ assert 'xyz' not in metafunc.fixturenames
+
+ class TestHello(object):
+ def test_hello(xyz):
+ pass
+ """
+ )
+ result = testdir.runpytest(p)
+ result.assert_outcomes(passed=1)
+
+ def test_generate_plugin_and_module(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_generate_tests(metafunc):
+ assert "arg1" in metafunc.fixturenames
+ metafunc.addcall(id="world", param=(2,100))
+ """
+ )
+ p = testdir.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ metafunc.addcall(param=(1,1), id="hello")
+
+ import pytest
+ @pytest.fixture
+ def arg1(request):
+ return request.param[0]
+ @pytest.fixture
+ def arg2(request):
+ return request.param[1]
+
+ class TestClass(object):
+ def test_myfunc(self, arg1, arg2):
+ assert arg1 == arg2
+ """
+ )
+ result = testdir.runpytest("-v", p)
+ result.stdout.fnmatch_lines(
+ [
+ "*test_myfunc*hello*PASS*",
+ "*test_myfunc*world*FAIL*",
+ "*1 failed, 1 passed*",
+ ]
+ )
+
+ def test_generate_tests_in_class(self, testdir):
+ p = testdir.makepyfile(
+ """
+ class TestClass(object):
+ def pytest_generate_tests(self, metafunc):
+ metafunc.addcall(funcargs={'hello': 'world'}, id="hello")
+
+ def test_myfunc(self, hello):
+ assert hello == "world"
+ """
+ )
+ result = testdir.runpytest("-v", p)
+ result.stdout.fnmatch_lines(["*test_myfunc*hello*PASS*", "*1 passed*"])
+
+ def test_two_functions_not_same_instance(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ metafunc.addcall({'arg1': 10})
+ metafunc.addcall({'arg1': 20})
+
+ class TestClass(object):
+ def test_func(self, arg1):
+ assert not hasattr(self, 'x')
+ self.x = 1
+ """
+ )
+ result = testdir.runpytest("-v", p)
+ result.stdout.fnmatch_lines(
+ ["*test_func*0*PASS*", "*test_func*1*PASS*", "*2 pass*"]
+ )
+
+ def test_issue28_setup_method_in_generate_tests(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ metafunc.addcall({'arg1': 1})
+
+ class TestClass(object):
+ def test_method(self, arg1):
+ assert arg1 == self.val
+ def setup_method(self, func):
+ self.val = 1
+ """
+ )
+ result = testdir.runpytest(p)
+ result.assert_outcomes(passed=1)
+
+ def test_parametrize_functional2(self, testdir):
+ testdir.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize("arg1", [1,2])
+ metafunc.parametrize("arg2", [4,5])
+ def test_hello(arg1, arg2):
+ assert 0, (arg1, arg2)
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ ["*(1, 4)*", "*(1, 5)*", "*(2, 4)*", "*(2, 5)*", "*4 failed*"]
+ )
+
+ def test_parametrize_and_inner_getfixturevalue(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize("arg1", [1], indirect=True)
+ metafunc.parametrize("arg2", [10], indirect=True)
+
+ import pytest
+ @pytest.fixture
+ def arg1(request):
+ x = request.getfixturevalue("arg2")
+ return x + request.param
+
+ @pytest.fixture
+ def arg2(request):
+ return request.param
+
+ def test_func1(arg1, arg2):
+ assert arg1 == 11
+ """
+ )
+ result = testdir.runpytest("-v", p)
+ result.stdout.fnmatch_lines(["*test_func1*1*PASS*", "*1 passed*"])
+
+ def test_parametrize_on_setup_arg(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ assert "arg1" in metafunc.fixturenames
+ metafunc.parametrize("arg1", [1], indirect=True)
+
+ import pytest
+ @pytest.fixture
+ def arg1(request):
+ return request.param
+
+ @pytest.fixture
+ def arg2(request, arg1):
+ return 10 * arg1
+
+ def test_func(arg2):
+ assert arg2 == 10
+ """
+ )
+ result = testdir.runpytest("-v", p)
+ result.stdout.fnmatch_lines(["*test_func*1*PASS*", "*1 passed*"])
+
+ def test_parametrize_with_ids(self, testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ console_output_style=classic
+ """
+ )
+ testdir.makepyfile(
+ """
+ import pytest
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize(("a", "b"), [(1,1), (1,2)],
+ ids=["basic", "advanced"])
+
+ def test_function(a, b):
+ assert a == b
+ """
+ )
+ result = testdir.runpytest("-v")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines_random(
+ ["*test_function*basic*PASSED", "*test_function*advanced*FAILED"]
+ )
+
+ def test_parametrize_without_ids(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize(("a", "b"),
+ [(1,object()), (1.3,object())])
+
+ def test_function(a, b):
+ assert 1
+ """
+ )
+ result = testdir.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ """
+ *test_function*1-b0*
+ *test_function*1.3-b1*
+ """
+ )
+
+ def test_parametrize_with_None_in_ids(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize(("a", "b"), [(1,1), (1,1), (1,2)],
+ ids=["basic", None, "advanced"])
+
+ def test_function(a, b):
+ assert a == b
+ """
+ )
+ result = testdir.runpytest("-v")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines_random(
+ [
+ "*test_function*basic*PASSED*",
+ "*test_function*1-1*PASSED*",
+ "*test_function*advanced*FAILED*",
+ ]
+ )
+
+ def test_fixture_parametrized_empty_ids(self, testdir):
+ """Fixtures parametrized with empty ids cause an internal error (#1849)."""
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope="module", ids=[], params=[])
+ def temp(request):
+ return request.param
+
+ def test_temp(temp):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["* 1 skipped *"])
+
+ def test_parametrized_empty_ids(self, testdir):
+ """Tests parametrized with empty ids cause an internal error (#1849)."""
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrize('temp', [], ids=list())
+ def test_temp(temp):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["* 1 skipped *"])
+
+ def test_parametrized_ids_invalid_type(self, testdir):
+ """Tests parametrized with ids as non-strings (#1857)."""
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrize("x, expected", [(10, 20), (40, 80)], ids=(None, 2))
+ def test_ids_numbers(x,expected):
+ assert x * 2 == expected
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ ["*ids must be list of strings, found: 2 (type: int)*"]
+ )
+
+ def test_parametrize_with_identical_ids_get_unique_names(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize(("a", "b"), [(1,1), (1,2)],
+ ids=["a", "a"])
+
+ def test_function(a, b):
+ assert a == b
+ """
+ )
+ result = testdir.runpytest("-v")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines_random(
+ ["*test_function*a0*PASSED*", "*test_function*a1*FAILED*"]
+ )
+
+ @pytest.mark.parametrize(("scope", "length"), [("module", 2), ("function", 4)])
+ def test_parametrize_scope_overrides(self, testdir, scope, length):
+ testdir.makepyfile(
+ """
+ import pytest
+ values = []
+ def pytest_generate_tests(metafunc):
+ if "arg" in metafunc.funcargnames:
+ metafunc.parametrize("arg", [1,2], indirect=True,
+ scope=%r)
+ @pytest.fixture
+ def arg(request):
+ values.append(request.param)
+ return request.param
+ def test_hello(arg):
+ assert arg in (1,2)
+ def test_world(arg):
+ assert arg in (1,2)
+ def test_checklength():
+ assert len(values) == %d
+ """
+ % (scope, length)
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=5)
+
+ def test_parametrize_issue323(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='module', params=range(966))
+ def foo(request):
+ return request.param
+
+ def test_it(foo):
+ pass
+ def test_it2(foo):
+ pass
+ """
+ )
+ reprec = testdir.inline_run("--collect-only")
+ assert not reprec.getcalls("pytest_internalerror")
+
+ def test_usefixtures_seen_in_generate_tests(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ def pytest_generate_tests(metafunc):
+ assert "abc" in metafunc.fixturenames
+ metafunc.parametrize("abc", [1])
+
+ @pytest.mark.usefixtures("abc")
+ def test_function():
+ pass
+ """
+ )
+ reprec = testdir.runpytest()
+ reprec.assert_outcomes(passed=1)
+
+ def test_generate_tests_only_done_in_subdir(self, testdir):
+ sub1 = testdir.mkpydir("sub1")
+ sub2 = testdir.mkpydir("sub2")
+ sub1.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ def pytest_generate_tests(metafunc):
+ assert metafunc.function.__name__ == "test_1"
+ """
+ )
+ )
+ sub2.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ def pytest_generate_tests(metafunc):
+ assert metafunc.function.__name__ == "test_2"
+ """
+ )
+ )
+ sub1.join("test_in_sub1.py").write("def test_1(): pass")
+ sub2.join("test_in_sub2.py").write("def test_2(): pass")
+ result = testdir.runpytest("--keep-duplicates", "-v", "-s", sub1, sub2, sub1)
+ result.assert_outcomes(passed=3)
+
+ def test_generate_same_function_names_issue403(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ def make_tests():
+ @pytest.mark.parametrize("x", range(2))
+ def test_foo(x):
+ pass
+ return test_foo
+
+ test_x = make_tests()
+ test_y = make_tests()
+ """
+ )
+ reprec = testdir.runpytest()
+ reprec.assert_outcomes(passed=4)
+
+ @pytest.mark.issue463
+ @pytest.mark.parametrize("attr", ["parametrise", "parameterize", "parameterise"])
+ def test_parametrize_misspelling(self, testdir, attr):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.{}("x", range(2))
+ def test_foo(x):
+ pass
+ """.format(
+ attr
+ )
+ )
+ reprec = testdir.inline_run("--collectonly")
+ failures = reprec.getfailures()
+ assert len(failures) == 1
+ expectederror = "MarkerError: test_foo has '{}', spelling should be 'parametrize'".format(
+ attr
+ )
+ assert expectederror in failures[0].longrepr.reprcrash.message
+
+
+class TestMetafuncFunctionalAuto(object):
+ """
+ Tests related to automatically find out the correct scope for parametrized tests (#1832).
+ """
+
+ def test_parametrize_auto_scope(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='session', autouse=True)
+ def fixture():
+ return 1
+
+ @pytest.mark.parametrize('animal', ["dog", "cat"])
+ def test_1(animal):
+ assert animal in ('dog', 'cat')
+
+ @pytest.mark.parametrize('animal', ['fish'])
+ def test_2(animal):
+ assert animal == 'fish'
+
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["* 3 passed *"])
+
+ def test_parametrize_auto_scope_indirect(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='session')
+ def echo(request):
+ return request.param
+
+ @pytest.mark.parametrize('animal, echo', [("dog", 1), ("cat", 2)], indirect=['echo'])
+ def test_1(animal, echo):
+ assert animal in ('dog', 'cat')
+ assert echo in (1, 2, 3)
+
+ @pytest.mark.parametrize('animal, echo', [('fish', 3)], indirect=['echo'])
+ def test_2(animal, echo):
+ assert animal == 'fish'
+ assert echo in (1, 2, 3)
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["* 3 passed *"])
+
+ def test_parametrize_auto_scope_override_fixture(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='session', autouse=True)
+ def animal():
+ return 'fox'
+
+ @pytest.mark.parametrize('animal', ["dog", "cat"])
+ def test_1(animal):
+ assert animal in ('dog', 'cat')
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["* 2 passed *"])
+
+ def test_parametrize_all_indirects(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture()
+ def animal(request):
+ return request.param
+
+ @pytest.fixture(scope='session')
+ def echo(request):
+ return request.param
+
+ @pytest.mark.parametrize('animal, echo', [("dog", 1), ("cat", 2)], indirect=True)
+ def test_1(animal, echo):
+ assert animal in ('dog', 'cat')
+ assert echo in (1, 2, 3)
+
+ @pytest.mark.parametrize('animal, echo', [("fish", 3)], indirect=True)
+ def test_2(animal, echo):
+ assert animal == 'fish'
+ assert echo in (1, 2, 3)
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["* 3 passed *"])
+
+ def test_parametrize_issue634(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='module')
+ def foo(request):
+ print('preparing foo-%d' % request.param)
+ return 'foo-%d' % request.param
+
+ def test_one(foo):
+ pass
+
+ def test_two(foo):
+ pass
+
+ test_two.test_with = (2, 3)
+
+ def pytest_generate_tests(metafunc):
+ params = (1, 2, 3, 4)
+ if not 'foo' in metafunc.fixturenames:
+ return
+
+ test_with = getattr(metafunc.function, 'test_with', None)
+ if test_with:
+ params = test_with
+ metafunc.parametrize('foo', params, indirect=True)
+ """
+ )
+ result = testdir.runpytest("-s")
+ output = result.stdout.str()
+ assert output.count("preparing foo-2") == 1
+ assert output.count("preparing foo-3") == 1
+
+
+@pytest.mark.filterwarnings("ignore:Applying marks directly to parameters")
+@pytest.mark.issue308
+class TestMarkersWithParametrization(object):
+
+ def test_simple_mark(self, testdir):
+ s = """
+ import pytest
+
+ @pytest.mark.foo
+ @pytest.mark.parametrize(("n", "expected"), [
+ (1, 2),
+ pytest.mark.bar((1, 3)),
+ (2, 3),
+ ])
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """
+ items = testdir.getitems(s)
+ assert len(items) == 3
+ for item in items:
+ assert "foo" in item.keywords
+ assert "bar" not in items[0].keywords
+ assert "bar" in items[1].keywords
+ assert "bar" not in items[2].keywords
+
+ def test_select_based_on_mark(self, testdir):
+ s = """
+ import pytest
+
+ @pytest.mark.parametrize(("n", "expected"), [
+ (1, 2),
+ pytest.mark.foo((2, 3)),
+ (3, 4),
+ ])
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """
+ testdir.makepyfile(s)
+ rec = testdir.inline_run("-m", "foo")
+ passed, skipped, fail = rec.listoutcomes()
+ assert len(passed) == 1
+ assert len(skipped) == 0
+ assert len(fail) == 0
+
+ @pytest.mark.xfail(reason="is this important to support??")
+ def test_nested_marks(self, testdir):
+ s = """
+ import pytest
+ mastermark = pytest.mark.foo(pytest.mark.bar)
+
+ @pytest.mark.parametrize(("n", "expected"), [
+ (1, 2),
+ mastermark((1, 3)),
+ (2, 3),
+ ])
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """
+ items = testdir.getitems(s)
+ assert len(items) == 3
+ for mark in ["foo", "bar"]:
+ assert mark not in items[0].keywords
+ assert mark in items[1].keywords
+ assert mark not in items[2].keywords
+
+ def test_simple_xfail(self, testdir):
+ s = """
+ import pytest
+
+ @pytest.mark.parametrize(("n", "expected"), [
+ (1, 2),
+ pytest.mark.xfail((1, 3)),
+ (2, 3),
+ ])
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """
+ testdir.makepyfile(s)
+ reprec = testdir.inline_run()
+ # xfail is skip??
+ reprec.assertoutcome(passed=2, skipped=1)
+
+ def test_simple_xfail_single_argname(self, testdir):
+ s = """
+ import pytest
+
+ @pytest.mark.parametrize("n", [
+ 2,
+ pytest.mark.xfail(3),
+ 4,
+ ])
+ def test_isEven(n):
+ assert n % 2 == 0
+ """
+ testdir.makepyfile(s)
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2, skipped=1)
+
+ def test_xfail_with_arg(self, testdir):
+ s = """
+ import pytest
+
+ @pytest.mark.parametrize(("n", "expected"), [
+ (1, 2),
+ pytest.mark.xfail("True")((1, 3)),
+ (2, 3),
+ ])
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """
+ testdir.makepyfile(s)
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2, skipped=1)
+
+ def test_xfail_with_kwarg(self, testdir):
+ s = """
+ import pytest
+
+ @pytest.mark.parametrize(("n", "expected"), [
+ (1, 2),
+ pytest.mark.xfail(reason="some bug")((1, 3)),
+ (2, 3),
+ ])
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """
+ testdir.makepyfile(s)
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2, skipped=1)
+
+ def test_xfail_with_arg_and_kwarg(self, testdir):
+ s = """
+ import pytest
+
+ @pytest.mark.parametrize(("n", "expected"), [
+ (1, 2),
+ pytest.mark.xfail("True", reason="some bug")((1, 3)),
+ (2, 3),
+ ])
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """
+ testdir.makepyfile(s)
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2, skipped=1)
+
+ @pytest.mark.parametrize("strict", [True, False])
+ def test_xfail_passing_is_xpass(self, testdir, strict):
+ s = """
+ import pytest
+
+ @pytest.mark.parametrize(("n", "expected"), [
+ (1, 2),
+ pytest.mark.xfail("sys.version_info > (0, 0, 0)", reason="some bug", strict={strict})((2, 3)),
+ (3, 4),
+ ])
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """.format(
+ strict=strict
+ )
+ testdir.makepyfile(s)
+ reprec = testdir.inline_run()
+ passed, failed = (2, 1) if strict else (3, 0)
+ reprec.assertoutcome(passed=passed, failed=failed)
+
+ def test_parametrize_called_in_generate_tests(self, testdir):
+ s = """
+ import pytest
+
+
+ def pytest_generate_tests(metafunc):
+ passingTestData = [(1, 2),
+ (2, 3)]
+ failingTestData = [(1, 3),
+ (2, 2)]
+
+ testData = passingTestData + [pytest.mark.xfail(d)
+ for d in failingTestData]
+ metafunc.parametrize(("n", "expected"), testData)
+
+
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """
+ testdir.makepyfile(s)
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2, skipped=2)
+
+ @pytest.mark.issue290
+ def test_parametrize_ID_generation_string_int_works(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def myfixture():
+ return 'example'
+ @pytest.mark.parametrize(
+ 'limit', (0, '0'))
+ def test_limit(limit, myfixture):
+ return
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ @pytest.mark.parametrize("strict", [True, False])
+ def test_parametrize_marked_value(self, testdir, strict):
+ s = """
+ import pytest
+
+ @pytest.mark.parametrize(("n", "expected"), [
+ pytest.param(
+ 2,3,
+ marks=pytest.mark.xfail("sys.version_info > (0, 0, 0)", reason="some bug", strict={strict}),
+ ),
+ pytest.param(
+ 2,3,
+ marks=[pytest.mark.xfail("sys.version_info > (0, 0, 0)", reason="some bug", strict={strict})],
+ ),
+ ])
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """.format(
+ strict=strict
+ )
+ testdir.makepyfile(s)
+ reprec = testdir.inline_run()
+ passed, failed = (0, 2) if strict else (2, 0)
+ reprec.assertoutcome(passed=passed, failed=failed)
+
+ def test_pytest_make_parametrize_id(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_make_parametrize_id(config, val):
+ return str(val * 2)
+ """
+ )
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrize("x", range(2))
+ def test_func(x):
+ pass
+ """
+ )
+ result = testdir.runpytest("-v")
+ result.stdout.fnmatch_lines(["*test_func*0*PASS*", "*test_func*2*PASS*"])
+
+ def test_pytest_make_parametrize_id_with_argname(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_make_parametrize_id(config, val, argname):
+ return str(val * 2 if argname == 'x' else val * 10)
+ """
+ )
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrize("x", range(2))
+ def test_func_a(x):
+ pass
+
+ @pytest.mark.parametrize("y", [1])
+ def test_func_b(y):
+ pass
+ """
+ )
+ result = testdir.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ ["*test_func_a*0*PASS*", "*test_func_a*2*PASS*", "*test_func_b*10*PASS*"]
+ )
diff --git a/third_party/python/pytest/testing/python/raises.py b/third_party/python/pytest/testing/python/raises.py
new file mode 100644
index 0000000000..64199c3b6f
--- /dev/null
+++ b/third_party/python/pytest/testing/python/raises.py
@@ -0,0 +1,177 @@
+from _pytest.outcomes import Failed
+import pytest
+import sys
+
+
+class TestRaises(object):
+
+ def test_raises(self):
+ source = "int('qwe')"
+ excinfo = pytest.raises(ValueError, source)
+ code = excinfo.traceback[-1].frame.code
+ s = str(code.fullsource)
+ assert s == source
+
+ def test_raises_exec(self):
+ pytest.raises(ValueError, "a,x = []")
+
+ def test_raises_syntax_error(self):
+ pytest.raises(SyntaxError, "qwe qwe qwe")
+
+ def test_raises_function(self):
+ pytest.raises(ValueError, int, "hello")
+
+ def test_raises_callable_no_exception(self):
+
+ class A(object):
+
+ def __call__(self):
+ pass
+
+ try:
+ pytest.raises(ValueError, A())
+ except pytest.raises.Exception:
+ pass
+
+ def test_raises_as_contextmanager(self, testdir):
+ testdir.makepyfile(
+ """
+ from __future__ import with_statement
+ import py, pytest
+ import _pytest._code
+
+ def test_simple():
+ with pytest.raises(ZeroDivisionError) as excinfo:
+ assert isinstance(excinfo, _pytest._code.ExceptionInfo)
+ 1/0
+ print (excinfo)
+ assert excinfo.type == ZeroDivisionError
+ assert isinstance(excinfo.value, ZeroDivisionError)
+
+ def test_noraise():
+ with pytest.raises(pytest.raises.Exception):
+ with pytest.raises(ValueError):
+ int()
+
+ def test_raise_wrong_exception_passes_by():
+ with pytest.raises(ZeroDivisionError):
+ with pytest.raises(ValueError):
+ 1/0
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*3 passed*"])
+
+ def test_noclass(self):
+ with pytest.raises(TypeError):
+ pytest.raises("wrong", lambda: None)
+
+ def test_invalid_arguments_to_raises(self):
+ with pytest.raises(TypeError, match="unknown"):
+ with pytest.raises(TypeError, unknown="bogus"):
+ raise ValueError()
+
+ def test_tuple(self):
+ with pytest.raises((KeyError, ValueError)):
+ raise KeyError("oops")
+
+ def test_no_raise_message(self):
+ try:
+ pytest.raises(ValueError, int, "0")
+ except pytest.raises.Exception as e:
+ assert e.msg == "DID NOT RAISE {}".format(repr(ValueError))
+ else:
+ assert False, "Expected pytest.raises.Exception"
+
+ try:
+ with pytest.raises(ValueError):
+ pass
+ except pytest.raises.Exception as e:
+ assert e.msg == "DID NOT RAISE {}".format(repr(ValueError))
+ else:
+ assert False, "Expected pytest.raises.Exception"
+
+ def test_custom_raise_message(self):
+ message = "TEST_MESSAGE"
+ try:
+ with pytest.raises(ValueError, message=message):
+ pass
+ except pytest.raises.Exception as e:
+ assert e.msg == message
+ else:
+ assert False, "Expected pytest.raises.Exception"
+
+ @pytest.mark.parametrize("method", ["function", "with"])
+ def test_raises_cyclic_reference(self, method):
+ """
+ Ensure pytest.raises does not leave a reference cycle (#1965).
+ """
+ import gc
+
+ class T(object):
+
+ def __call__(self):
+ raise ValueError
+
+ t = T()
+ if method == "function":
+ pytest.raises(ValueError, t)
+ else:
+ with pytest.raises(ValueError):
+ t()
+
+ # ensure both forms of pytest.raises don't leave exceptions in sys.exc_info()
+ assert sys.exc_info() == (None, None, None)
+
+ del t
+
+ # ensure the t instance is not stuck in a cyclic reference
+ for o in gc.get_objects():
+ assert type(o) is not T
+
+ def test_raises_match(self):
+ msg = r"with base \d+"
+ with pytest.raises(ValueError, match=msg):
+ int("asdf")
+
+ msg = "with base 10"
+ with pytest.raises(ValueError, match=msg):
+ int("asdf")
+
+ msg = "with base 16"
+ expr = r"Pattern '{}' not found in 'invalid literal for int\(\) with base 10: 'asdf''".format(
+ msg
+ )
+ with pytest.raises(AssertionError, match=expr):
+ with pytest.raises(ValueError, match=msg):
+ int("asdf", base=10)
+
+ def test_raises_match_wrong_type(self):
+ """Raising an exception with the wrong type and match= given.
+
+ pytest should throw the unexpected exception - the pattern match is not
+ really relevant if we got a different exception.
+ """
+ with pytest.raises(ValueError):
+ with pytest.raises(IndexError, match="nomatch"):
+ int("asdf")
+
+ def test_raises_exception_looks_iterable(self):
+ from six import add_metaclass
+
+ class Meta(type(object)):
+
+ def __getitem__(self, item):
+ return 1 / 0
+
+ def __len__(self):
+ return 1
+
+ @add_metaclass(Meta)
+ class ClassLooksIterableException(Exception):
+ pass
+
+ with pytest.raises(
+ Failed, match="DID NOT RAISE <class 'raises.ClassLooksIterableException'>"
+ ):
+ pytest.raises(ClassLooksIterableException, lambda: None)
diff --git a/third_party/python/pytest/testing/python/setup_only.py b/third_party/python/pytest/testing/python/setup_only.py
new file mode 100644
index 0000000000..4ae24b15a6
--- /dev/null
+++ b/third_party/python/pytest/testing/python/setup_only.py
@@ -0,0 +1,269 @@
+import pytest
+
+
+@pytest.fixture(params=["--setup-only", "--setup-plan", "--setup-show"], scope="module")
+def mode(request):
+ return request.param
+
+
+def test_show_only_active_fixtures(testdir, mode):
+ p = testdir.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def _arg0():
+ """hidden arg0 fixture"""
+ @pytest.fixture
+ def arg1():
+ """arg1 docstring"""
+ def test_arg1(arg1):
+ pass
+ '''
+ )
+
+ result = testdir.runpytest(mode, p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ ["*SETUP F arg1*", "*test_arg1 (fixtures used: arg1)*", "*TEARDOWN F arg1*"]
+ )
+ assert "_arg0" not in result.stdout.str()
+
+
+def test_show_different_scopes(testdir, mode):
+ p = testdir.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg_function():
+ """function scoped fixture"""
+ @pytest.fixture(scope='session')
+ def arg_session():
+ """session scoped fixture"""
+ def test_arg1(arg_session, arg_function):
+ pass
+ '''
+ )
+
+ result = testdir.runpytest(mode, p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "SETUP S arg_session*",
+ "*SETUP F arg_function*",
+ "*test_arg1 (fixtures used: arg_function, arg_session)*",
+ "*TEARDOWN F arg_function*",
+ "TEARDOWN S arg_session*",
+ ]
+ )
+
+
+def test_show_nested_fixtures(testdir, mode):
+ testdir.makeconftest(
+ '''
+ import pytest
+ @pytest.fixture(scope='session')
+ def arg_same():
+ """session scoped fixture"""
+ '''
+ )
+ p = testdir.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture(scope='function')
+ def arg_same(arg_same):
+ """function scoped fixture"""
+ def test_arg1(arg_same):
+ pass
+ '''
+ )
+
+ result = testdir.runpytest(mode, p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "SETUP S arg_same*",
+ "*SETUP F arg_same (fixtures used: arg_same)*",
+ "*test_arg1 (fixtures used: arg_same)*",
+ "*TEARDOWN F arg_same*",
+ "TEARDOWN S arg_same*",
+ ]
+ )
+
+
+def test_show_fixtures_with_autouse(testdir, mode):
+ p = testdir.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg_function():
+ """function scoped fixture"""
+ @pytest.fixture(scope='session', autouse=True)
+ def arg_session():
+ """session scoped fixture"""
+ def test_arg1(arg_function):
+ pass
+ '''
+ )
+
+ result = testdir.runpytest(mode, p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "SETUP S arg_session*",
+ "*SETUP F arg_function*",
+ "*test_arg1 (fixtures used: arg_function, arg_session)*",
+ ]
+ )
+
+
+def test_show_fixtures_with_parameters(testdir, mode):
+ testdir.makeconftest(
+ '''
+ import pytest
+ @pytest.fixture(scope='session', params=['foo', 'bar'])
+ def arg_same():
+ """session scoped fixture"""
+ '''
+ )
+ p = testdir.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture(scope='function')
+ def arg_other(arg_same):
+ """function scoped fixture"""
+ def test_arg1(arg_other):
+ pass
+ '''
+ )
+
+ result = testdir.runpytest(mode, p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "SETUP S arg_same?foo?",
+ "TEARDOWN S arg_same?foo?",
+ "SETUP S arg_same?bar?",
+ "TEARDOWN S arg_same?bar?",
+ ]
+ )
+
+
+def test_show_fixtures_with_parameter_ids(testdir, mode):
+ testdir.makeconftest(
+ '''
+ import pytest
+ @pytest.fixture(
+ scope='session', params=['foo', 'bar'], ids=['spam', 'ham'])
+ def arg_same():
+ """session scoped fixture"""
+ '''
+ )
+ p = testdir.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture(scope='function')
+ def arg_other(arg_same):
+ """function scoped fixture"""
+ def test_arg1(arg_other):
+ pass
+ '''
+ )
+
+ result = testdir.runpytest(mode, p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ ["SETUP S arg_same?spam?", "SETUP S arg_same?ham?"]
+ )
+
+
+def test_show_fixtures_with_parameter_ids_function(testdir, mode):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(params=['foo', 'bar'], ids=lambda p: p.upper())
+ def foobar():
+ pass
+ def test_foobar(foobar):
+ pass
+ """
+ )
+
+ result = testdir.runpytest(mode, p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(["*SETUP F foobar?FOO?", "*SETUP F foobar?BAR?"])
+
+
+def test_dynamic_fixture_request(testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture()
+ def dynamically_requested_fixture():
+ pass
+ @pytest.fixture()
+ def dependent_fixture(request):
+ request.getfixturevalue('dynamically_requested_fixture')
+ def test_dyn(dependent_fixture):
+ pass
+ """
+ )
+
+ result = testdir.runpytest("--setup-only", p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "*SETUP F dynamically_requested_fixture",
+ "*TEARDOWN F dynamically_requested_fixture",
+ ]
+ )
+
+
+def test_capturing(testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest, sys
+ @pytest.fixture()
+ def one():
+ sys.stdout.write('this should be captured')
+ sys.stderr.write('this should also be captured')
+ @pytest.fixture()
+ def two(one):
+ assert 0
+ def test_capturing(two):
+ pass
+ """
+ )
+
+ result = testdir.runpytest("--setup-only", p)
+ result.stdout.fnmatch_lines(
+ ["this should be captured", "this should also be captured"]
+ )
+
+
+def test_show_fixtures_and_execute_test(testdir):
+ """ Verifies that setups are shown and tests are executed. """
+ p = testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def arg():
+ assert True
+ def test_arg(arg):
+ assert False
+ """
+ )
+
+ result = testdir.runpytest("--setup-show", p)
+ assert result.ret == 1
+
+ result.stdout.fnmatch_lines(
+ ["*SETUP F arg*", "*test_arg (fixtures used: arg)F*", "*TEARDOWN F arg*"]
+ )
diff --git a/third_party/python/pytest/testing/python/setup_plan.py b/third_party/python/pytest/testing/python/setup_plan.py
new file mode 100644
index 0000000000..0321939a8a
--- /dev/null
+++ b/third_party/python/pytest/testing/python/setup_plan.py
@@ -0,0 +1,19 @@
+def test_show_fixtures_and_test(testdir):
+ """ Verifies that fixtures are not executed. """
+ p = testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def arg():
+ assert False
+ def test_arg(arg):
+ assert False
+ """
+ )
+
+ result = testdir.runpytest("--setup-plan", p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ ["*SETUP F arg*", "*test_arg (fixtures used: arg)", "*TEARDOWN F arg*"]
+ )
diff --git a/third_party/python/pytest/testing/python/show_fixtures_per_test.py b/third_party/python/pytest/testing/python/show_fixtures_per_test.py
new file mode 100644
index 0000000000..e14344d4eb
--- /dev/null
+++ b/third_party/python/pytest/testing/python/show_fixtures_per_test.py
@@ -0,0 +1,183 @@
+# -*- coding: utf-8 -*-
+
+
+def test_no_items_should_not_show_output(testdir):
+ result = testdir.runpytest("--fixtures-per-test")
+ assert "fixtures used by" not in result.stdout.str()
+ assert result.ret == 0
+
+
+def test_fixtures_in_module(testdir):
+ p = testdir.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def _arg0():
+ """hidden arg0 fixture"""
+ @pytest.fixture
+ def arg1():
+ """arg1 docstring"""
+ def test_arg1(arg1):
+ pass
+ '''
+ )
+
+ result = testdir.runpytest("--fixtures-per-test", p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "*fixtures used by test_arg1*",
+ "*(test_fixtures_in_module.py:9)*",
+ "arg1",
+ " arg1 docstring",
+ ]
+ )
+ assert "_arg0" not in result.stdout.str()
+
+
+def test_fixtures_in_conftest(testdir):
+ testdir.makeconftest(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg1():
+ """arg1 docstring"""
+ @pytest.fixture
+ def arg2():
+ """arg2 docstring"""
+ @pytest.fixture
+ def arg3(arg1, arg2):
+ """arg3
+ docstring
+ """
+ '''
+ )
+ p = testdir.makepyfile(
+ """
+ def test_arg2(arg2):
+ pass
+ def test_arg3(arg3):
+ pass
+ """
+ )
+ result = testdir.runpytest("--fixtures-per-test", p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "*fixtures used by test_arg2*",
+ "*(test_fixtures_in_conftest.py:2)*",
+ "arg2",
+ " arg2 docstring",
+ "*fixtures used by test_arg3*",
+ "*(test_fixtures_in_conftest.py:4)*",
+ "arg1",
+ " arg1 docstring",
+ "arg2",
+ " arg2 docstring",
+ "arg3",
+ " arg3",
+ " docstring",
+ ]
+ )
+
+
+def test_should_show_fixtures_used_by_test(testdir):
+ testdir.makeconftest(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg1():
+ """arg1 from conftest"""
+ @pytest.fixture
+ def arg2():
+ """arg2 from conftest"""
+ '''
+ )
+ p = testdir.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg1():
+ """arg1 from testmodule"""
+ def test_args(arg1, arg2):
+ pass
+ '''
+ )
+ result = testdir.runpytest("--fixtures-per-test", p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "*fixtures used by test_args*",
+ "*(test_should_show_fixtures_used_by_test.py:6)*",
+ "arg1",
+ " arg1 from testmodule",
+ "arg2",
+ " arg2 from conftest",
+ ]
+ )
+
+
+def test_verbose_include_private_fixtures_and_loc(testdir):
+ testdir.makeconftest(
+ '''
+ import pytest
+ @pytest.fixture
+ def _arg1():
+ """_arg1 from conftest"""
+ @pytest.fixture
+ def arg2(_arg1):
+ """arg2 from conftest"""
+ '''
+ )
+ p = testdir.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg3():
+ """arg3 from testmodule"""
+ def test_args(arg2, arg3):
+ pass
+ '''
+ )
+ result = testdir.runpytest("--fixtures-per-test", "-v", p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "*fixtures used by test_args*",
+ "*(test_verbose_include_private_fixtures_and_loc.py:6)*",
+ "_arg1 -- conftest.py:3",
+ " _arg1 from conftest",
+ "arg2 -- conftest.py:6",
+ " arg2 from conftest",
+ "arg3 -- test_verbose_include_private_fixtures_and_loc.py:3",
+ " arg3 from testmodule",
+ ]
+ )
+
+
+def test_doctest_items(testdir):
+ testdir.makepyfile(
+ '''
+ def foo():
+ """
+ >>> 1 + 1
+ 2
+ """
+ '''
+ )
+ testdir.maketxtfile(
+ """
+ >>> 1 + 1
+ 2
+ """
+ )
+ result = testdir.runpytest(
+ "--fixtures-per-test", "--doctest-modules", "--doctest-glob=*.txt", "-v"
+ )
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(["*collected 2 items*"])
diff --git a/third_party/python/pytest/testing/python/test_deprecations.py b/third_party/python/pytest/testing/python/test_deprecations.py
new file mode 100644
index 0000000000..b0c11f0b03
--- /dev/null
+++ b/third_party/python/pytest/testing/python/test_deprecations.py
@@ -0,0 +1,22 @@
+import pytest
+
+from _pytest.python import PyCollector
+
+
+class PyCollectorMock(PyCollector):
+ """evil hack"""
+
+ def __init__(self):
+ self.called = False
+
+ def _makeitem(self, *k):
+ """hack to disable the actual behaviour"""
+ self.called = True
+
+
+def test_pycollector_makeitem_is_deprecated():
+
+ collector = PyCollectorMock()
+ with pytest.deprecated_call():
+ collector.makeitem("foo", "bar")
+ assert collector.called
diff --git a/third_party/python/pytest/testing/test_argcomplete.py b/third_party/python/pytest/testing/test_argcomplete.py
new file mode 100644
index 0000000000..b042de5ce5
--- /dev/null
+++ b/third_party/python/pytest/testing/test_argcomplete.py
@@ -0,0 +1,109 @@
+from __future__ import absolute_import, division, print_function
+import subprocess
+import sys
+import pytest
+
+# test for _argcomplete but not specific for any application
+
+
+def equal_with_bash(prefix, ffc, fc, out=None):
+ res = ffc(prefix)
+ res_bash = set(fc(prefix))
+ retval = set(res) == res_bash
+ if out:
+ out.write("equal_with_bash %s %s\n" % (retval, res))
+ if not retval:
+ out.write(" python - bash: %s\n" % (set(res) - res_bash))
+ out.write(" bash - python: %s\n" % (res_bash - set(res)))
+ return retval
+
+
+# copied from argcomplete.completers as import from there
+# also pulls in argcomplete.__init__ which opens filedescriptor 9
+# this gives an IOError at the end of testrun
+
+
+def _wrapcall(*args, **kargs):
+ try:
+ if sys.version_info > (2, 7):
+ return subprocess.check_output(*args, **kargs).decode().splitlines()
+ if "stdout" in kargs:
+ raise ValueError("stdout argument not allowed, it will be overridden.")
+ process = subprocess.Popen(stdout=subprocess.PIPE, *args, **kargs)
+ output, unused_err = process.communicate()
+ retcode = process.poll()
+ if retcode:
+ cmd = kargs.get("args")
+ if cmd is None:
+ cmd = args[0]
+ raise subprocess.CalledProcessError(retcode, cmd)
+ return output.decode().splitlines()
+ except subprocess.CalledProcessError:
+ return []
+
+
+class FilesCompleter(object):
+ "File completer class, optionally takes a list of allowed extensions"
+
+ def __init__(self, allowednames=(), directories=True):
+ # Fix if someone passes in a string instead of a list
+ if type(allowednames) is str:
+ allowednames = [allowednames]
+
+ self.allowednames = [x.lstrip("*").lstrip(".") for x in allowednames]
+ self.directories = directories
+
+ def __call__(self, prefix, **kwargs):
+ completion = []
+ if self.allowednames:
+ if self.directories:
+ files = _wrapcall(
+ ["bash", "-c", "compgen -A directory -- '{p}'".format(p=prefix)]
+ )
+ completion += [f + "/" for f in files]
+ for x in self.allowednames:
+ completion += _wrapcall(
+ [
+ "bash",
+ "-c",
+ "compgen -A file -X '!*.{0}' -- '{p}'".format(x, p=prefix),
+ ]
+ )
+ else:
+ completion += _wrapcall(
+ ["bash", "-c", "compgen -A file -- '{p}'".format(p=prefix)]
+ )
+
+ anticomp = _wrapcall(
+ ["bash", "-c", "compgen -A directory -- '{p}'".format(p=prefix)]
+ )
+
+ completion = list(set(completion) - set(anticomp))
+
+ if self.directories:
+ completion += [f + "/" for f in anticomp]
+ return completion
+
+
+class TestArgComplete(object):
+
+ @pytest.mark.skipif("sys.platform in ('win32', 'darwin')")
+ def test_compare_with_compgen(self):
+ from _pytest._argcomplete import FastFilesCompleter
+
+ ffc = FastFilesCompleter()
+ fc = FilesCompleter()
+ for x in ["/", "/d", "/data", "qqq", ""]:
+ assert equal_with_bash(x, ffc, fc, out=sys.stdout)
+
+ @pytest.mark.skipif("sys.platform in ('win32', 'darwin')")
+ def test_remove_dir_prefix(self):
+ """this is not compatible with compgen but it is with bash itself:
+ ls /usr/<TAB>
+ """
+ from _pytest._argcomplete import FastFilesCompleter
+
+ ffc = FastFilesCompleter()
+ fc = FilesCompleter()
+ for x in "/usr/".split():
+ assert not equal_with_bash(x, ffc, fc, out=sys.stdout)
diff --git a/third_party/python/pytest/testing/test_assertion.py b/third_party/python/pytest/testing/test_assertion.py
new file mode 100644
index 0000000000..393cf817c7
--- /dev/null
+++ b/third_party/python/pytest/testing/test_assertion.py
@@ -0,0 +1,1172 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import, division, print_function
+import sys
+import textwrap
+
+import _pytest.assertion as plugin
+import py
+import pytest
+from _pytest.assertion import util
+from _pytest.assertion import truncate
+
+PY3 = sys.version_info >= (3, 0)
+
+
+@pytest.fixture
+def mock_config():
+
+ class Config(object):
+ verbose = False
+
+ def getoption(self, name):
+ if name == "verbose":
+ return self.verbose
+ raise KeyError("Not mocked out: %s" % name)
+
+ return Config()
+
+
+class TestImportHookInstallation(object):
+
+ @pytest.mark.parametrize("initial_conftest", [True, False])
+ @pytest.mark.parametrize("mode", ["plain", "rewrite"])
+ def test_conftest_assertion_rewrite(self, testdir, initial_conftest, mode):
+ """Test that conftest files are using assertion rewrite on import.
+ (#1619)
+ """
+ testdir.tmpdir.join("foo/tests").ensure(dir=1)
+ conftest_path = "conftest.py" if initial_conftest else "foo/conftest.py"
+ contents = {
+ conftest_path: """
+ import pytest
+ @pytest.fixture
+ def check_first():
+ def check(values, value):
+ assert values.pop(0) == value
+ return check
+ """,
+ "foo/tests/test_foo.py": """
+ def test(check_first):
+ check_first([10, 30], 30)
+ """,
+ }
+ testdir.makepyfile(**contents)
+ result = testdir.runpytest_subprocess("--assert=%s" % mode)
+ if mode == "plain":
+ expected = "E AssertionError"
+ elif mode == "rewrite":
+ expected = "*assert 10 == 30*"
+ else:
+ assert 0
+ result.stdout.fnmatch_lines([expected])
+
+ def test_rewrite_assertions_pytester_plugin(self, testdir):
+ """
+ Assertions in the pytester plugin must also benefit from assertion
+ rewriting (#1920).
+ """
+ testdir.makepyfile(
+ """
+ pytest_plugins = ['pytester']
+ def test_dummy_failure(testdir): # how meta!
+ testdir.makepyfile('def test(): assert 0')
+ r = testdir.inline_run()
+ r.assertoutcome(passed=1)
+ """
+ )
+ result = testdir.runpytest_subprocess()
+ result.stdout.fnmatch_lines(["*assert 1 == 0*"])
+
+ @pytest.mark.parametrize("mode", ["plain", "rewrite"])
+ def test_pytest_plugins_rewrite(self, testdir, mode):
+ contents = {
+ "conftest.py": """
+ pytest_plugins = ['ham']
+ """,
+ "ham.py": """
+ import pytest
+ @pytest.fixture
+ def check_first():
+ def check(values, value):
+ assert values.pop(0) == value
+ return check
+ """,
+ "test_foo.py": """
+ def test_foo(check_first):
+ check_first([10, 30], 30)
+ """,
+ }
+ testdir.makepyfile(**contents)
+ result = testdir.runpytest_subprocess("--assert=%s" % mode)
+ if mode == "plain":
+ expected = "E AssertionError"
+ elif mode == "rewrite":
+ expected = "*assert 10 == 30*"
+ else:
+ assert 0
+ result.stdout.fnmatch_lines([expected])
+
+ @pytest.mark.parametrize("mode", ["str", "list"])
+ def test_pytest_plugins_rewrite_module_names(self, testdir, mode):
+ """Test that pluginmanager correct marks pytest_plugins variables
+ for assertion rewriting if they are defined as plain strings or
+ list of strings (#1888).
+ """
+ plugins = '"ham"' if mode == "str" else '["ham"]'
+ contents = {
+ "conftest.py": """
+ pytest_plugins = {plugins}
+ """.format(
+ plugins=plugins
+ ),
+ "ham.py": """
+ import pytest
+ """,
+ "test_foo.py": """
+ def test_foo(pytestconfig):
+ assert 'ham' in pytestconfig.pluginmanager.rewrite_hook._must_rewrite
+ """,
+ }
+ testdir.makepyfile(**contents)
+ result = testdir.runpytest_subprocess("--assert=rewrite")
+ assert result.ret == 0
+
+ def test_pytest_plugins_rewrite_module_names_correctly(self, testdir):
+ """Test that we match files correctly when they are marked for rewriting (#2939)."""
+ contents = {
+ "conftest.py": """
+ pytest_plugins = "ham"
+ """,
+ "ham.py": "",
+ "hamster.py": "",
+ "test_foo.py": """
+ def test_foo(pytestconfig):
+ assert pytestconfig.pluginmanager.rewrite_hook.find_module('ham') is not None
+ assert pytestconfig.pluginmanager.rewrite_hook.find_module('hamster') is None
+ """,
+ }
+ testdir.makepyfile(**contents)
+ result = testdir.runpytest_subprocess("--assert=rewrite")
+ assert result.ret == 0
+
+ @pytest.mark.parametrize("mode", ["plain", "rewrite"])
+ @pytest.mark.parametrize("plugin_state", ["development", "installed"])
+ def test_installed_plugin_rewrite(self, testdir, mode, plugin_state):
+ # Make sure the hook is installed early enough so that plugins
+ # installed via setuptools are rewritten.
+ testdir.tmpdir.join("hampkg").ensure(dir=1)
+ contents = {
+ "hampkg/__init__.py": """
+ import pytest
+
+ @pytest.fixture
+ def check_first2():
+ def check(values, value):
+ assert values.pop(0) == value
+ return check
+ """,
+ "spamplugin.py": """
+ import pytest
+ from hampkg import check_first2
+
+ @pytest.fixture
+ def check_first():
+ def check(values, value):
+ assert values.pop(0) == value
+ return check
+ """,
+ "mainwrapper.py": """
+ import pytest, pkg_resources
+
+ plugin_state = "{plugin_state}"
+
+ class DummyDistInfo(object):
+ project_name = 'spam'
+ version = '1.0'
+
+ def _get_metadata(self, name):
+ # 'RECORD' meta-data only available in installed plugins
+ if name == 'RECORD' and plugin_state == "installed":
+ return ['spamplugin.py,sha256=abc,123',
+ 'hampkg/__init__.py,sha256=abc,123']
+ # 'SOURCES.txt' meta-data only available for plugins in development mode
+ elif name == 'SOURCES.txt' and plugin_state == "development":
+ return ['spamplugin.py',
+ 'hampkg/__init__.py']
+ return []
+
+ class DummyEntryPoint(object):
+ name = 'spam'
+ module_name = 'spam.py'
+ attrs = ()
+ extras = None
+ dist = DummyDistInfo()
+
+ def load(self, require=True, *args, **kwargs):
+ import spamplugin
+ return spamplugin
+
+ def iter_entry_points(name):
+ yield DummyEntryPoint()
+
+ pkg_resources.iter_entry_points = iter_entry_points
+ pytest.main()
+ """.format(
+ plugin_state=plugin_state
+ ),
+ "test_foo.py": """
+ def test(check_first):
+ check_first([10, 30], 30)
+
+ def test2(check_first2):
+ check_first([10, 30], 30)
+ """,
+ }
+ testdir.makepyfile(**contents)
+ result = testdir.run(
+ sys.executable, "mainwrapper.py", "-s", "--assert=%s" % mode
+ )
+ if mode == "plain":
+ expected = "E AssertionError"
+ elif mode == "rewrite":
+ expected = "*assert 10 == 30*"
+ else:
+ assert 0
+ result.stdout.fnmatch_lines([expected])
+
+ def test_rewrite_ast(self, testdir):
+ testdir.tmpdir.join("pkg").ensure(dir=1)
+ contents = {
+ "pkg/__init__.py": """
+ import pytest
+ pytest.register_assert_rewrite('pkg.helper')
+ """,
+ "pkg/helper.py": """
+ def tool():
+ a, b = 2, 3
+ assert a == b
+ """,
+ "pkg/plugin.py": """
+ import pytest, pkg.helper
+ @pytest.fixture
+ def tool():
+ return pkg.helper.tool
+ """,
+ "pkg/other.py": """
+ values = [3, 2]
+ def tool():
+ assert values.pop() == 3
+ """,
+ "conftest.py": """
+ pytest_plugins = ['pkg.plugin']
+ """,
+ "test_pkg.py": """
+ import pkg.other
+ def test_tool(tool):
+ tool()
+ def test_other():
+ pkg.other.tool()
+ """,
+ }
+ testdir.makepyfile(**contents)
+ result = testdir.runpytest_subprocess("--assert=rewrite")
+ result.stdout.fnmatch_lines(
+ [
+ ">*assert a == b*",
+ "E*assert 2 == 3*",
+ ">*assert values.pop() == 3*",
+ "E*AssertionError",
+ ]
+ )
+
+ def test_register_assert_rewrite_checks_types(self):
+ with pytest.raises(TypeError):
+ pytest.register_assert_rewrite(["pytest_tests_internal_non_existing"])
+ pytest.register_assert_rewrite(
+ "pytest_tests_internal_non_existing", "pytest_tests_internal_non_existing2"
+ )
+
+
+class TestBinReprIntegration(object):
+
+ def test_pytest_assertrepr_compare_called(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ values = []
+ def pytest_assertrepr_compare(op, left, right):
+ values.append((op, left, right))
+
+ @pytest.fixture
+ def list(request):
+ return values
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_hello():
+ assert 0 == 1
+ def test_check(list):
+ assert list == [("==", 0, 1)]
+ """
+ )
+ result = testdir.runpytest("-v")
+ result.stdout.fnmatch_lines(["*test_hello*FAIL*", "*test_check*PASS*"])
+
+
+def callequal(left, right, verbose=False):
+ config = mock_config()
+ config.verbose = verbose
+ return plugin.pytest_assertrepr_compare(config, "==", left, right)
+
+
+class TestAssert_reprcompare(object):
+
+ def test_different_types(self):
+ assert callequal([0, 1], "foo") is None
+
+ def test_summary(self):
+ summary = callequal([0, 1], [0, 2])[0]
+ assert len(summary) < 65
+
+ def test_text_diff(self):
+ diff = callequal("spam", "eggs")[1:]
+ assert "- spam" in diff
+ assert "+ eggs" in diff
+
+ def test_text_skipping(self):
+ lines = callequal("a" * 50 + "spam", "a" * 50 + "eggs")
+ assert "Skipping" in lines[1]
+ for line in lines:
+ assert "a" * 50 not in line
+
+ def test_text_skipping_verbose(self):
+ lines = callequal("a" * 50 + "spam", "a" * 50 + "eggs", verbose=True)
+ assert "- " + "a" * 50 + "spam" in lines
+ assert "+ " + "a" * 50 + "eggs" in lines
+
+ def test_multiline_text_diff(self):
+ left = "foo\nspam\nbar"
+ right = "foo\neggs\nbar"
+ diff = callequal(left, right)
+ assert "- spam" in diff
+ assert "+ eggs" in diff
+
+ def test_list(self):
+ expl = callequal([0, 1], [0, 2])
+ assert len(expl) > 1
+
+ @pytest.mark.parametrize(
+ ["left", "right", "expected"],
+ [
+ (
+ [0, 1],
+ [0, 2],
+ """
+ Full diff:
+ - [0, 1]
+ ? ^
+ + [0, 2]
+ ? ^
+ """,
+ ),
+ (
+ {0: 1},
+ {0: 2},
+ """
+ Full diff:
+ - {0: 1}
+ ? ^
+ + {0: 2}
+ ? ^
+ """,
+ ),
+ (
+ {0, 1},
+ {0, 2},
+ """
+ Full diff:
+ - set([0, 1])
+ ? ^
+ + set([0, 2])
+ ? ^
+ """
+ if not PY3
+ else """
+ Full diff:
+ - {0, 1}
+ ? ^
+ + {0, 2}
+ ? ^
+ """,
+ ),
+ ],
+ )
+ def test_iterable_full_diff(self, left, right, expected):
+ """Test the full diff assertion failure explanation.
+
+ When verbose is False, then just a -v notice to get the diff is rendered,
+ when verbose is True, then ndiff of the pprint is returned.
+ """
+ expl = callequal(left, right, verbose=False)
+ assert expl[-1] == "Use -v to get the full diff"
+ expl = "\n".join(callequal(left, right, verbose=True))
+ assert expl.endswith(textwrap.dedent(expected).strip())
+
+ def test_list_different_lengths(self):
+ expl = callequal([0, 1], [0, 1, 2])
+ assert len(expl) > 1
+ expl = callequal([0, 1, 2], [0, 1])
+ assert len(expl) > 1
+
+ def test_dict(self):
+ expl = callequal({"a": 0}, {"a": 1})
+ assert len(expl) > 1
+
+ def test_dict_omitting(self):
+ lines = callequal({"a": 0, "b": 1}, {"a": 1, "b": 1})
+ assert lines[1].startswith("Omitting 1 identical item")
+ assert "Common items" not in lines
+ for line in lines[1:]:
+ assert "b" not in line
+
+ def test_dict_omitting_with_verbosity_1(self):
+ """ Ensure differing items are visible for verbosity=1 (#1512) """
+ lines = callequal({"a": 0, "b": 1}, {"a": 1, "b": 1}, verbose=1)
+ assert lines[1].startswith("Omitting 1 identical item")
+ assert lines[2].startswith("Differing items")
+ assert lines[3] == "{'a': 0} != {'a': 1}"
+ assert "Common items" not in lines
+
+ def test_dict_omitting_with_verbosity_2(self):
+ lines = callequal({"a": 0, "b": 1}, {"a": 1, "b": 1}, verbose=2)
+ assert lines[1].startswith("Common items:")
+ assert "Omitting" not in lines[1]
+ assert lines[2] == "{'b': 1}"
+
+ def test_set(self):
+ expl = callequal({0, 1}, {0, 2})
+ assert len(expl) > 1
+
+ def test_frozenzet(self):
+ expl = callequal(frozenset([0, 1]), {0, 2})
+ assert len(expl) > 1
+
+ def test_Sequence(self):
+ col = py.builtin._tryimport("collections.abc", "collections", "sys")
+ if not hasattr(col, "MutableSequence"):
+ pytest.skip("cannot import MutableSequence")
+ MutableSequence = col.MutableSequence
+
+ class TestSequence(MutableSequence): # works with a Sequence subclass
+
+ def __init__(self, iterable):
+ self.elements = list(iterable)
+
+ def __getitem__(self, item):
+ return self.elements[item]
+
+ def __len__(self):
+ return len(self.elements)
+
+ def __setitem__(self, item, value):
+ pass
+
+ def __delitem__(self, item):
+ pass
+
+ def insert(self, item, index):
+ pass
+
+ expl = callequal(TestSequence([0, 1]), list([0, 2]))
+ assert len(expl) > 1
+
+ def test_list_tuples(self):
+ expl = callequal([], [(1, 2)])
+ assert len(expl) > 1
+ expl = callequal([(1, 2)], [])
+ assert len(expl) > 1
+
+ def test_list_bad_repr(self):
+
+ class A(object):
+
+ def __repr__(self):
+ raise ValueError(42)
+
+ expl = callequal([], [A()])
+ assert "ValueError" in "".join(expl)
+ expl = callequal({}, {"1": A()})
+ assert "faulty" in "".join(expl)
+
+ def test_one_repr_empty(self):
+ """
+ the faulty empty string repr did trigger
+ an unbound local error in _diff_text
+ """
+
+ class A(str):
+
+ def __repr__(self):
+ return ""
+
+ expl = callequal(A(), "")
+ assert not expl
+
+ def test_repr_no_exc(self):
+ expl = " ".join(callequal("foo", "bar"))
+ assert "raised in repr()" not in expl
+
+ def test_unicode(self):
+ left = py.builtin._totext("£€", "utf-8")
+ right = py.builtin._totext("£", "utf-8")
+ expl = callequal(left, right)
+ assert expl[0] == py.builtin._totext("'£€' == '£'", "utf-8")
+ assert expl[1] == py.builtin._totext("- £€", "utf-8")
+ assert expl[2] == py.builtin._totext("+ £", "utf-8")
+
+ def test_nonascii_text(self):
+ """
+ :issue: 877
+ non ascii python2 str caused a UnicodeDecodeError
+ """
+
+ class A(str):
+
+ def __repr__(self):
+ return "\xff"
+
+ expl = callequal(A(), "1")
+ assert expl
+
+ def test_format_nonascii_explanation(self):
+ assert util.format_explanation("λ")
+
+ def test_mojibake(self):
+ # issue 429
+ left = "e"
+ right = "\xc3\xa9"
+ if not isinstance(left, bytes):
+ left = bytes(left, "utf-8")
+ right = bytes(right, "utf-8")
+ expl = callequal(left, right)
+ for line in expl:
+ assert isinstance(line, py.builtin.text)
+ msg = py.builtin._totext("\n").join(expl)
+ assert msg
+
+
+class TestFormatExplanation(object):
+
+ def test_special_chars_full(self, testdir):
+ # Issue 453, for the bug this would raise IndexError
+ testdir.makepyfile(
+ """
+ def test_foo():
+ assert '\\n}' == ''
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(["*AssertionError*"])
+
+ def test_fmt_simple(self):
+ expl = "assert foo"
+ assert util.format_explanation(expl) == "assert foo"
+
+ def test_fmt_where(self):
+ expl = "\n".join(["assert 1", "{1 = foo", "} == 2"])
+ res = "\n".join(["assert 1 == 2", " + where 1 = foo"])
+ assert util.format_explanation(expl) == res
+
+ def test_fmt_and(self):
+ expl = "\n".join(["assert 1", "{1 = foo", "} == 2", "{2 = bar", "}"])
+ res = "\n".join(["assert 1 == 2", " + where 1 = foo", " + and 2 = bar"])
+ assert util.format_explanation(expl) == res
+
+ def test_fmt_where_nested(self):
+ expl = "\n".join(["assert 1", "{1 = foo", "{foo = bar", "}", "} == 2"])
+ res = "\n".join(["assert 1 == 2", " + where 1 = foo", " + where foo = bar"])
+ assert util.format_explanation(expl) == res
+
+ def test_fmt_newline(self):
+ expl = "\n".join(['assert "foo" == "bar"', "~- foo", "~+ bar"])
+ res = "\n".join(['assert "foo" == "bar"', " - foo", " + bar"])
+ assert util.format_explanation(expl) == res
+
+ def test_fmt_newline_escaped(self):
+ expl = "\n".join(["assert foo == bar", "baz"])
+ res = "assert foo == bar\\nbaz"
+ assert util.format_explanation(expl) == res
+
+ def test_fmt_newline_before_where(self):
+ expl = "\n".join(
+ [
+ "the assertion message here",
+ ">assert 1",
+ "{1 = foo",
+ "} == 2",
+ "{2 = bar",
+ "}",
+ ]
+ )
+ res = "\n".join(
+ [
+ "the assertion message here",
+ "assert 1 == 2",
+ " + where 1 = foo",
+ " + and 2 = bar",
+ ]
+ )
+ assert util.format_explanation(expl) == res
+
+ def test_fmt_multi_newline_before_where(self):
+ expl = "\n".join(
+ [
+ "the assertion",
+ "~message here",
+ ">assert 1",
+ "{1 = foo",
+ "} == 2",
+ "{2 = bar",
+ "}",
+ ]
+ )
+ res = "\n".join(
+ [
+ "the assertion",
+ " message here",
+ "assert 1 == 2",
+ " + where 1 = foo",
+ " + and 2 = bar",
+ ]
+ )
+ assert util.format_explanation(expl) == res
+
+
+class TestTruncateExplanation(object):
+
+ """ Confirm assertion output is truncated as expected """
+
+ # The number of lines in the truncation explanation message. Used
+ # to calculate that results have the expected length.
+ LINES_IN_TRUNCATION_MSG = 2
+
+ def test_doesnt_truncate_when_input_is_empty_list(self):
+ expl = []
+ result = truncate._truncate_explanation(expl, max_lines=8, max_chars=100)
+ assert result == expl
+
+ def test_doesnt_truncate_at_when_input_is_5_lines_and_LT_max_chars(self):
+ expl = ["a" * 100 for x in range(5)]
+ result = truncate._truncate_explanation(expl, max_lines=8, max_chars=8 * 80)
+ assert result == expl
+
+ def test_truncates_at_8_lines_when_given_list_of_empty_strings(self):
+ expl = ["" for x in range(50)]
+ result = truncate._truncate_explanation(expl, max_lines=8, max_chars=100)
+ assert result != expl
+ assert len(result) == 8 + self.LINES_IN_TRUNCATION_MSG
+ assert "Full output truncated" in result[-1]
+ assert "43 lines hidden" in result[-1]
+ last_line_before_trunc_msg = result[-self.LINES_IN_TRUNCATION_MSG - 1]
+ assert last_line_before_trunc_msg.endswith("...")
+
+ def test_truncates_at_8_lines_when_first_8_lines_are_LT_max_chars(self):
+ expl = ["a" for x in range(100)]
+ result = truncate._truncate_explanation(expl, max_lines=8, max_chars=8 * 80)
+ assert result != expl
+ assert len(result) == 8 + self.LINES_IN_TRUNCATION_MSG
+ assert "Full output truncated" in result[-1]
+ assert "93 lines hidden" in result[-1]
+ last_line_before_trunc_msg = result[-self.LINES_IN_TRUNCATION_MSG - 1]
+ assert last_line_before_trunc_msg.endswith("...")
+
+ def test_truncates_at_8_lines_when_first_8_lines_are_EQ_max_chars(self):
+ expl = ["a" * 80 for x in range(16)]
+ result = truncate._truncate_explanation(expl, max_lines=8, max_chars=8 * 80)
+ assert result != expl
+ assert len(result) == 8 + self.LINES_IN_TRUNCATION_MSG
+ assert "Full output truncated" in result[-1]
+ assert "9 lines hidden" in result[-1]
+ last_line_before_trunc_msg = result[-self.LINES_IN_TRUNCATION_MSG - 1]
+ assert last_line_before_trunc_msg.endswith("...")
+
+ def test_truncates_at_4_lines_when_first_4_lines_are_GT_max_chars(self):
+ expl = ["a" * 250 for x in range(10)]
+ result = truncate._truncate_explanation(expl, max_lines=8, max_chars=999)
+ assert result != expl
+ assert len(result) == 4 + self.LINES_IN_TRUNCATION_MSG
+ assert "Full output truncated" in result[-1]
+ assert "7 lines hidden" in result[-1]
+ last_line_before_trunc_msg = result[-self.LINES_IN_TRUNCATION_MSG - 1]
+ assert last_line_before_trunc_msg.endswith("...")
+
+ def test_truncates_at_1_line_when_first_line_is_GT_max_chars(self):
+ expl = ["a" * 250 for x in range(1000)]
+ result = truncate._truncate_explanation(expl, max_lines=8, max_chars=100)
+ assert result != expl
+ assert len(result) == 1 + self.LINES_IN_TRUNCATION_MSG
+ assert "Full output truncated" in result[-1]
+ assert "1000 lines hidden" in result[-1]
+ last_line_before_trunc_msg = result[-self.LINES_IN_TRUNCATION_MSG - 1]
+ assert last_line_before_trunc_msg.endswith("...")
+
+ def test_full_output_truncated(self, monkeypatch, testdir):
+ """ Test against full runpytest() output. """
+
+ line_count = 7
+ line_len = 100
+ expected_truncated_lines = 2
+ testdir.makepyfile(
+ r"""
+ def test_many_lines():
+ a = list([str(i)[0] * %d for i in range(%d)])
+ b = a[::2]
+ a = '\n'.join(map(str, a))
+ b = '\n'.join(map(str, b))
+ assert a == b
+ """
+ % (line_len, line_count)
+ )
+ monkeypatch.delenv("CI", raising=False)
+
+ result = testdir.runpytest()
+ # without -vv, truncate the message showing a few diff lines only
+ result.stdout.fnmatch_lines(
+ [
+ "*- 1*",
+ "*- 3*",
+ "*- 5*",
+ "*truncated (%d lines hidden)*use*-vv*" % expected_truncated_lines,
+ ]
+ )
+
+ result = testdir.runpytest("-vv")
+ result.stdout.fnmatch_lines(["* 6*"])
+
+ monkeypatch.setenv("CI", "1")
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["* 6*"])
+
+
+def test_python25_compile_issue257(testdir):
+ testdir.makepyfile(
+ """
+ def test_rewritten():
+ assert 1 == 2
+ # some comment
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ """
+ *E*assert 1 == 2*
+ *1 failed*
+ """
+ )
+
+
+def test_rewritten(testdir):
+ testdir.makepyfile(
+ """
+ def test_rewritten():
+ assert "@py_builtins" in globals()
+ """
+ )
+ assert testdir.runpytest().ret == 0
+
+
+def test_reprcompare_notin(mock_config):
+ detail = plugin.pytest_assertrepr_compare(
+ mock_config, "not in", "foo", "aaafoobbb"
+ )[
+ 1:
+ ]
+ assert detail == ["'foo' is contained here:", " aaafoobbb", "? +++"]
+
+
+def test_reprcompare_whitespaces(mock_config):
+ detail = plugin.pytest_assertrepr_compare(mock_config, "==", "\r\n", "\n")
+ assert (
+ detail
+ == [
+ r"'\r\n' == '\n'",
+ r"Strings contain only whitespace, escaping them using repr()",
+ r"- '\r\n'",
+ r"? --",
+ r"+ '\n'",
+ ]
+ )
+
+
+def test_pytest_assertrepr_compare_integration(testdir):
+ testdir.makepyfile(
+ """
+ def test_hello():
+ x = set(range(100))
+ y = x.copy()
+ y.remove(50)
+ assert x == y
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ ["*def test_hello():*", "*assert x == y*", "*E*Extra items*left*", "*E*50*"]
+ )
+
+
+def test_sequence_comparison_uses_repr(testdir):
+ testdir.makepyfile(
+ """
+ def test_hello():
+ x = set("hello x")
+ y = set("hello y")
+ assert x == y
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*def test_hello():*",
+ "*assert x == y*",
+ "*E*Extra items*left*",
+ "*E*'x'*",
+ "*E*Extra items*right*",
+ "*E*'y'*",
+ ]
+ )
+
+
+def test_assertrepr_loaded_per_dir(testdir):
+ testdir.makepyfile(test_base=["def test_base(): assert 1 == 2"])
+ a = testdir.mkdir("a")
+ a_test = a.join("test_a.py")
+ a_test.write("def test_a(): assert 1 == 2")
+ a_conftest = a.join("conftest.py")
+ a_conftest.write('def pytest_assertrepr_compare(): return ["summary a"]')
+ b = testdir.mkdir("b")
+ b_test = b.join("test_b.py")
+ b_test.write("def test_b(): assert 1 == 2")
+ b_conftest = b.join("conftest.py")
+ b_conftest.write('def pytest_assertrepr_compare(): return ["summary b"]')
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*def test_base():*",
+ "*E*assert 1 == 2*",
+ "*def test_a():*",
+ "*E*assert summary a*",
+ "*def test_b():*",
+ "*E*assert summary b*",
+ ]
+ )
+
+
+def test_assertion_options(testdir):
+ testdir.makepyfile(
+ """
+ def test_hello():
+ x = 3
+ assert x == 4
+ """
+ )
+ result = testdir.runpytest()
+ assert "3 == 4" in result.stdout.str()
+ result = testdir.runpytest_subprocess("--assert=plain")
+ assert "3 == 4" not in result.stdout.str()
+
+
+def test_triple_quoted_string_issue113(testdir):
+ testdir.makepyfile(
+ """
+ def test_hello():
+ assert "" == '''
+ '''"""
+ )
+ result = testdir.runpytest("--fulltrace")
+ result.stdout.fnmatch_lines(["*1 failed*"])
+ assert "SyntaxError" not in result.stdout.str()
+
+
+def test_traceback_failure(testdir):
+ p1 = testdir.makepyfile(
+ """
+ def g():
+ return 2
+ def f(x):
+ assert x == g()
+ def test_onefails():
+ f(3)
+ """
+ )
+ result = testdir.runpytest(p1, "--tb=long")
+ result.stdout.fnmatch_lines(
+ [
+ "*test_traceback_failure.py F*",
+ "====* FAILURES *====",
+ "____*____",
+ "",
+ " def test_onefails():",
+ "> f(3)",
+ "",
+ "*test_*.py:6: ",
+ "_ _ _ *",
+ # "",
+ " def f(x):",
+ "> assert x == g()",
+ "E assert 3 == 2",
+ "E + where 2 = g()",
+ "",
+ "*test_traceback_failure.py:4: AssertionError",
+ ]
+ )
+
+ result = testdir.runpytest(p1) # "auto"
+ result.stdout.fnmatch_lines(
+ [
+ "*test_traceback_failure.py F*",
+ "====* FAILURES *====",
+ "____*____",
+ "",
+ " def test_onefails():",
+ "> f(3)",
+ "",
+ "*test_*.py:6: ",
+ "",
+ " def f(x):",
+ "> assert x == g()",
+ "E assert 3 == 2",
+ "E + where 2 = g()",
+ "",
+ "*test_traceback_failure.py:4: AssertionError",
+ ]
+ )
+
+
+@pytest.mark.skipif(
+ sys.version_info[:2] <= (3, 3),
+ reason="Python 3.4+ shows chained exceptions on multiprocess",
+)
+def test_exception_handling_no_traceback(testdir):
+ """
+ Handle chain exceptions in tasks submitted by the multiprocess module (#1984).
+ """
+ p1 = testdir.makepyfile(
+ """
+ from multiprocessing import Pool
+
+ def process_task(n):
+ assert n == 10
+
+ def multitask_job():
+ tasks = [1]
+ with Pool(processes=1) as pool:
+ pool.map(process_task, tasks)
+
+ def test_multitask_job():
+ multitask_job()
+ """
+ )
+ result = testdir.runpytest(p1, "--tb=long")
+ result.stdout.fnmatch_lines(
+ [
+ "====* FAILURES *====",
+ "*multiprocessing.pool.RemoteTraceback:*",
+ "Traceback (most recent call last):",
+ "*assert n == 10",
+ "The above exception was the direct cause of the following exception:",
+ "> * multitask_job()",
+ ]
+ )
+
+
+@pytest.mark.skipif(
+ "'__pypy__' in sys.builtin_module_names or sys.platform.startswith('java')"
+)
+def test_warn_missing(testdir):
+ testdir.makepyfile("")
+ result = testdir.run(sys.executable, "-OO", "-m", "pytest", "-h")
+ result.stderr.fnmatch_lines(["*WARNING*assert statements are not executed*"])
+ result = testdir.run(sys.executable, "-OO", "-m", "pytest")
+ result.stderr.fnmatch_lines(["*WARNING*assert statements are not executed*"])
+
+
+def test_recursion_source_decode(testdir):
+ testdir.makepyfile(
+ """
+ def test_something():
+ pass
+ """
+ )
+ testdir.makeini(
+ """
+ [pytest]
+ python_files = *.py
+ """
+ )
+ result = testdir.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(
+ """
+ <Module*>
+ """
+ )
+
+
+def test_AssertionError_message(testdir):
+ testdir.makepyfile(
+ """
+ def test_hello():
+ x,y = 1,2
+ assert 0, (x,y)
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ *def test_hello*
+ *assert 0, (x,y)*
+ *AssertionError: (1, 2)*
+ """
+ )
+
+
+@pytest.mark.skipif(PY3, reason="This bug does not exist on PY3")
+def test_set_with_unsortable_elements():
+ # issue #718
+ class UnsortableKey(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __lt__(self, other):
+ raise RuntimeError()
+
+ def __repr__(self):
+ return "repr({})".format(self.name)
+
+ def __eq__(self, other):
+ return self.name == other.name
+
+ def __hash__(self):
+ return hash(self.name)
+
+ left_set = {UnsortableKey(str(i)) for i in range(1, 3)}
+ right_set = {UnsortableKey(str(i)) for i in range(2, 4)}
+ expl = callequal(left_set, right_set, verbose=True)
+ # skip first line because it contains the "construction" of the set, which does not have a guaranteed order
+ expl = expl[1:]
+ dedent = textwrap.dedent(
+ """
+ Extra items in the left set:
+ repr(1)
+ Extra items in the right set:
+ repr(3)
+ Full diff (fallback to calling repr on each item):
+ - repr(1)
+ repr(2)
+ + repr(3)
+ """
+ ).strip()
+ assert "\n".join(expl) == dedent
+
+
+def test_diff_newline_at_end(monkeypatch, testdir):
+ testdir.makepyfile(
+ r"""
+ def test_diff():
+ assert 'asdf' == 'asdf\n'
+ """
+ )
+
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ r"""
+ *assert 'asdf' == 'asdf\n'
+ * - asdf
+ * + asdf
+ * ? +
+ """
+ )
+
+
+def test_assert_tuple_warning(testdir):
+ testdir.makepyfile(
+ """
+ def test_tuple():
+ assert(False, 'you shall not pass')
+ """
+ )
+ result = testdir.runpytest("-rw")
+ result.stdout.fnmatch_lines(
+ ["*test_assert_tuple_warning.py:2", "*assertion is always true*"]
+ )
+
+
+def test_assert_indirect_tuple_no_warning(testdir):
+ testdir.makepyfile(
+ """
+ def test_tuple():
+ tpl = ('foo', 'bar')
+ assert tpl
+ """
+ )
+ result = testdir.runpytest("-rw")
+ output = "\n".join(result.stdout.lines)
+ assert "WR1" not in output
+
+
+def test_assert_with_unicode(monkeypatch, testdir):
+ testdir.makepyfile(
+ u"""
+ # -*- coding: utf-8 -*-
+ def test_unicode():
+ assert u'유니코드' == u'Unicode'
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*AssertionError*"])
+
+
+def test_raise_unprintable_assertion_error(testdir):
+ testdir.makepyfile(
+ r"""
+ def test_raise_assertion_error():
+ raise AssertionError('\xff')
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [r"> raise AssertionError('\xff')", "E AssertionError: *"]
+ )
+
+
+def test_raise_assertion_error_raisin_repr(testdir):
+ testdir.makepyfile(
+ u"""
+ class RaisingRepr(object):
+ def __repr__(self):
+ raise Exception()
+ def test_raising_repr():
+ raise AssertionError(RaisingRepr())
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ ["E AssertionError: <unprintable AssertionError object>"]
+ )
+
+
+def test_issue_1944(testdir):
+ testdir.makepyfile(
+ """
+ def f():
+ return
+
+ assert f() == 10
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*1 error*"])
+ assert "AttributeError: 'Module' object has no attribute '_obj'" not in result.stdout.str()
diff --git a/third_party/python/pytest/testing/test_assertrewrite.py b/third_party/python/pytest/testing/test_assertrewrite.py
new file mode 100644
index 0000000000..144f625bc6
--- /dev/null
+++ b/third_party/python/pytest/testing/test_assertrewrite.py
@@ -0,0 +1,1144 @@
+from __future__ import absolute_import, division, print_function
+
+import glob
+import os
+import py_compile
+import stat
+import sys
+import textwrap
+import zipfile
+import py
+import pytest
+
+import _pytest._code
+from _pytest.assertion import util
+from _pytest.assertion.rewrite import (
+ rewrite_asserts,
+ PYTEST_TAG,
+ AssertionRewritingHook,
+)
+from _pytest.main import EXIT_NOTESTSCOLLECTED
+
+ast = pytest.importorskip("ast")
+if sys.platform.startswith("java"):
+ # XXX should be xfail
+ pytest.skip("assert rewrite does currently not work on jython")
+
+
+def setup_module(mod):
+ mod._old_reprcompare = util._reprcompare
+ _pytest._code._reprcompare = None
+
+
+def teardown_module(mod):
+ util._reprcompare = mod._old_reprcompare
+ del mod._old_reprcompare
+
+
+def rewrite(src):
+ tree = ast.parse(src)
+ rewrite_asserts(tree)
+ return tree
+
+
+def getmsg(f, extra_ns=None, must_pass=False):
+ """Rewrite the assertions in f, run it, and get the failure message."""
+ src = "\n".join(_pytest._code.Code(f).source().lines)
+ mod = rewrite(src)
+ code = compile(mod, "<test>", "exec")
+ ns = {}
+ if extra_ns is not None:
+ ns.update(extra_ns)
+ py.builtin.exec_(code, ns)
+ func = ns[f.__name__]
+ try:
+ func()
+ except AssertionError:
+ if must_pass:
+ pytest.fail("shouldn't have raised")
+ s = str(sys.exc_info()[1])
+ if not s.startswith("assert"):
+ return "AssertionError: " + s
+ return s
+ else:
+ if not must_pass:
+ pytest.fail("function didn't raise at all")
+
+
+def adjust_body_for_new_docstring_in_module_node(m):
+ """Module docstrings in 3.8 are part of Module node.
+ This was briefly in 3.7 as well but got reverted in beta 5.
+
+ It's not in the body so we remove it so the following body items have
+ the same indexes on all Python versions:
+
+ TODO:
+
+ We have a complicated sys.version_info if in here to ease testing on
+ various Python 3.7 versions, but we should remove the 3.7 check after
+ 3.7 is released as stable to make this check more straightforward.
+ """
+ if (
+ sys.version_info < (3, 8)
+ and not ((3, 7) <= sys.version_info <= (3, 7, 0, "beta", 4))
+ ):
+ assert len(m.body) > 1
+ assert isinstance(m.body[0], ast.Expr)
+ assert isinstance(m.body[0].value, ast.Str)
+ del m.body[0]
+
+
+class TestAssertionRewrite(object):
+
+ def test_place_initial_imports(self):
+ s = """'Doc string'\nother = stuff"""
+ m = rewrite(s)
+ adjust_body_for_new_docstring_in_module_node(m)
+ for imp in m.body[0:2]:
+ assert isinstance(imp, ast.Import)
+ assert imp.lineno == 2
+ assert imp.col_offset == 0
+ assert isinstance(m.body[2], ast.Assign)
+ s = """from __future__ import with_statement\nother_stuff"""
+ m = rewrite(s)
+ assert isinstance(m.body[0], ast.ImportFrom)
+ for imp in m.body[1:3]:
+ assert isinstance(imp, ast.Import)
+ assert imp.lineno == 2
+ assert imp.col_offset == 0
+ assert isinstance(m.body[3], ast.Expr)
+ s = """'doc string'\nfrom __future__ import with_statement"""
+ m = rewrite(s)
+ adjust_body_for_new_docstring_in_module_node(m)
+ assert isinstance(m.body[0], ast.ImportFrom)
+ for imp in m.body[1:3]:
+ assert isinstance(imp, ast.Import)
+ assert imp.lineno == 2
+ assert imp.col_offset == 0
+ s = """'doc string'\nfrom __future__ import with_statement\nother"""
+ m = rewrite(s)
+ adjust_body_for_new_docstring_in_module_node(m)
+ assert isinstance(m.body[0], ast.ImportFrom)
+ for imp in m.body[1:3]:
+ assert isinstance(imp, ast.Import)
+ assert imp.lineno == 3
+ assert imp.col_offset == 0
+ assert isinstance(m.body[3], ast.Expr)
+ s = """from . import relative\nother_stuff"""
+ m = rewrite(s)
+ for imp in m.body[0:2]:
+ assert isinstance(imp, ast.Import)
+ assert imp.lineno == 1
+ assert imp.col_offset == 0
+ assert isinstance(m.body[3], ast.Expr)
+
+ def test_dont_rewrite(self):
+ s = """'PYTEST_DONT_REWRITE'\nassert 14"""
+ m = rewrite(s)
+ adjust_body_for_new_docstring_in_module_node(m)
+ assert len(m.body) == 1
+ assert m.body[0].msg is None
+
+ def test_dont_rewrite_plugin(self, testdir):
+ contents = {
+ "conftest.py": "pytest_plugins = 'plugin'; import plugin",
+ "plugin.py": "'PYTEST_DONT_REWRITE'",
+ "test_foo.py": "def test_foo(): pass",
+ }
+ testdir.makepyfile(**contents)
+ result = testdir.runpytest_subprocess()
+ assert "warnings" not in "".join(result.outlines)
+
+ def test_name(self):
+
+ def f():
+ assert False
+
+ assert getmsg(f) == "assert False"
+
+ def f():
+ f = False
+ assert f
+
+ assert getmsg(f) == "assert False"
+
+ def f():
+ assert a_global # noqa
+
+ assert getmsg(f, {"a_global": False}) == "assert False"
+
+ def f():
+ assert sys == 42
+
+ assert getmsg(f, {"sys": sys}) == "assert sys == 42"
+
+ def f():
+ assert cls == 42 # noqa
+
+ class X(object):
+ pass
+
+ assert getmsg(f, {"cls": X}) == "assert cls == 42"
+
+ def test_assert_already_has_message(self):
+
+ def f():
+ assert False, "something bad!"
+
+ assert getmsg(f) == "AssertionError: something bad!\nassert False"
+
+ def test_assertion_message(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_foo():
+ assert 1 == 2, "The failure message"
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ ["*AssertionError*The failure message*", "*assert 1 == 2*"]
+ )
+
+ def test_assertion_message_multiline(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_foo():
+ assert 1 == 2, "A multiline\\nfailure message"
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ ["*AssertionError*A multiline*", "*failure message*", "*assert 1 == 2*"]
+ )
+
+ def test_assertion_message_tuple(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_foo():
+ assert 1 == 2, (1, 2)
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ ["*AssertionError*%s*" % repr((1, 2)), "*assert 1 == 2*"]
+ )
+
+ def test_assertion_message_expr(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_foo():
+ assert 1 == 2, 1 + 2
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(["*AssertionError*3*", "*assert 1 == 2*"])
+
+ def test_assertion_message_escape(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_foo():
+ assert 1 == 2, 'To be escaped: %'
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ ["*AssertionError: To be escaped: %", "*assert 1 == 2"]
+ )
+
+ def test_boolop(self):
+
+ def f():
+ f = g = False
+ assert f and g
+
+ assert getmsg(f) == "assert (False)"
+
+ def f():
+ f = True
+ g = False
+ assert f and g
+
+ assert getmsg(f) == "assert (True and False)"
+
+ def f():
+ f = False
+ g = True
+ assert f and g
+
+ assert getmsg(f) == "assert (False)"
+
+ def f():
+ f = g = False
+ assert f or g
+
+ assert getmsg(f) == "assert (False or False)"
+
+ def f():
+ f = g = False
+ assert not f and not g
+
+ getmsg(f, must_pass=True)
+
+ def x():
+ return False
+
+ def f():
+ assert x() and x()
+
+ assert (
+ getmsg(f, {"x": x})
+ == """assert (False)
+ + where False = x()"""
+ )
+
+ def f():
+ assert False or x()
+
+ assert (
+ getmsg(f, {"x": x})
+ == """assert (False or False)
+ + where False = x()"""
+ )
+
+ def f():
+ assert 1 in {} and 2 in {}
+
+ assert getmsg(f) == "assert (1 in {})"
+
+ def f():
+ x = 1
+ y = 2
+ assert x in {1: None} and y in {}
+
+ assert getmsg(f) == "assert (1 in {1: None} and 2 in {})"
+
+ def f():
+ f = True
+ g = False
+ assert f or g
+
+ getmsg(f, must_pass=True)
+
+ def f():
+ f = g = h = lambda: True
+ assert f() and g() and h()
+
+ getmsg(f, must_pass=True)
+
+ def test_short_circuit_evaluation(self):
+
+ def f():
+ assert True or explode # noqa
+
+ getmsg(f, must_pass=True)
+
+ def f():
+ x = 1
+ assert x == 1 or x == 2
+
+ getmsg(f, must_pass=True)
+
+ def test_unary_op(self):
+
+ def f():
+ x = True
+ assert not x
+
+ assert getmsg(f) == "assert not True"
+
+ def f():
+ x = 0
+ assert ~x + 1
+
+ assert getmsg(f) == "assert (~0 + 1)"
+
+ def f():
+ x = 3
+ assert -x + x
+
+ assert getmsg(f) == "assert (-3 + 3)"
+
+ def f():
+ x = 0
+ assert +x + x
+
+ assert getmsg(f) == "assert (+0 + 0)"
+
+ def test_binary_op(self):
+
+ def f():
+ x = 1
+ y = -1
+ assert x + y
+
+ assert getmsg(f) == "assert (1 + -1)"
+
+ def f():
+ assert not 5 % 4
+
+ assert getmsg(f) == "assert not (5 % 4)"
+
+ def test_boolop_percent(self):
+
+ def f():
+ assert 3 % 2 and False
+
+ assert getmsg(f) == "assert ((3 % 2) and False)"
+
+ def f():
+ assert False or 4 % 2
+
+ assert getmsg(f) == "assert (False or (4 % 2))"
+
+ @pytest.mark.skipif("sys.version_info < (3,5)")
+ def test_at_operator_issue1290(self, testdir):
+ testdir.makepyfile(
+ """
+ class Matrix(object):
+ def __init__(self, num):
+ self.num = num
+ def __matmul__(self, other):
+ return self.num * other.num
+
+ def test_multmat_operator():
+ assert Matrix(2) @ Matrix(3) == 6"""
+ )
+ testdir.runpytest().assert_outcomes(passed=1)
+
+ def test_call(self):
+
+ def g(a=42, *args, **kwargs):
+ return False
+
+ ns = {"g": g}
+
+ def f():
+ assert g()
+
+ assert (
+ getmsg(f, ns)
+ == """assert False
+ + where False = g()"""
+ )
+
+ def f():
+ assert g(1)
+
+ assert (
+ getmsg(f, ns)
+ == """assert False
+ + where False = g(1)"""
+ )
+
+ def f():
+ assert g(1, 2)
+
+ assert (
+ getmsg(f, ns)
+ == """assert False
+ + where False = g(1, 2)"""
+ )
+
+ def f():
+ assert g(1, g=42)
+
+ assert (
+ getmsg(f, ns)
+ == """assert False
+ + where False = g(1, g=42)"""
+ )
+
+ def f():
+ assert g(1, 3, g=23)
+
+ assert (
+ getmsg(f, ns)
+ == """assert False
+ + where False = g(1, 3, g=23)"""
+ )
+
+ def f():
+ seq = [1, 2, 3]
+ assert g(*seq)
+
+ assert (
+ getmsg(f, ns)
+ == """assert False
+ + where False = g(*[1, 2, 3])"""
+ )
+
+ def f():
+ x = "a"
+ assert g(**{x: 2})
+
+ assert (
+ getmsg(f, ns)
+ == """assert False
+ + where False = g(**{'a': 2})"""
+ )
+
+ def test_attribute(self):
+
+ class X(object):
+ g = 3
+
+ ns = {"x": X}
+
+ def f():
+ assert not x.g # noqa
+
+ assert (
+ getmsg(f, ns)
+ == """assert not 3
+ + where 3 = x.g"""
+ )
+
+ def f():
+ x.a = False # noqa
+ assert x.a # noqa
+
+ assert (
+ getmsg(f, ns)
+ == """assert False
+ + where False = x.a"""
+ )
+
+ def test_comparisons(self):
+
+ def f():
+ a, b = range(2)
+ assert b < a
+
+ assert getmsg(f) == """assert 1 < 0"""
+
+ def f():
+ a, b, c = range(3)
+ assert a > b > c
+
+ assert getmsg(f) == """assert 0 > 1"""
+
+ def f():
+ a, b, c = range(3)
+ assert a < b > c
+
+ assert getmsg(f) == """assert 1 > 2"""
+
+ def f():
+ a, b, c = range(3)
+ assert a < b <= c
+
+ getmsg(f, must_pass=True)
+
+ def f():
+ a, b, c = range(3)
+ assert a < b
+ assert b < c
+
+ getmsg(f, must_pass=True)
+
+ def test_len(self):
+
+ def f():
+ values = list(range(10))
+ assert len(values) == 11
+
+ assert getmsg(f).startswith(
+ """assert 10 == 11
+ + where 10 = len(["""
+ )
+
+ def test_custom_reprcompare(self, monkeypatch):
+
+ def my_reprcompare(op, left, right):
+ return "42"
+
+ monkeypatch.setattr(util, "_reprcompare", my_reprcompare)
+
+ def f():
+ assert 42 < 3
+
+ assert getmsg(f) == "assert 42"
+
+ def my_reprcompare(op, left, right):
+ return "%s %s %s" % (left, op, right)
+
+ monkeypatch.setattr(util, "_reprcompare", my_reprcompare)
+
+ def f():
+ assert 1 < 3 < 5 <= 4 < 7
+
+ assert getmsg(f) == "assert 5 <= 4"
+
+ def test_assert_raising_nonzero_in_comparison(self):
+
+ def f():
+
+ class A(object):
+
+ def __nonzero__(self):
+ raise ValueError(42)
+
+ def __lt__(self, other):
+ return A()
+
+ def __repr__(self):
+ return "<MY42 object>"
+
+ def myany(x):
+ return False
+
+ assert myany(A() < 0)
+
+ assert "<MY42 object> < 0" in getmsg(f)
+
+ def test_formatchar(self):
+
+ def f():
+ assert "%test" == "test"
+
+ assert getmsg(f).startswith("assert '%test' == 'test'")
+
+ def test_custom_repr(self):
+
+ def f():
+
+ class Foo(object):
+ a = 1
+
+ def __repr__(self):
+ return "\n{ \n~ \n}"
+
+ f = Foo()
+ assert 0 == f.a
+
+ assert r"where 1 = \n{ \n~ \n}.a" in util._format_lines([getmsg(f)])[0]
+
+
+class TestRewriteOnImport(object):
+
+ def test_pycache_is_a_file(self, testdir):
+ testdir.tmpdir.join("__pycache__").write("Hello")
+ testdir.makepyfile(
+ """
+ def test_rewritten():
+ assert "@py_builtins" in globals()"""
+ )
+ assert testdir.runpytest().ret == 0
+
+ def test_pycache_is_readonly(self, testdir):
+ cache = testdir.tmpdir.mkdir("__pycache__")
+ old_mode = cache.stat().mode
+ cache.chmod(old_mode ^ stat.S_IWRITE)
+ testdir.makepyfile(
+ """
+ def test_rewritten():
+ assert "@py_builtins" in globals()"""
+ )
+ try:
+ assert testdir.runpytest().ret == 0
+ finally:
+ cache.chmod(old_mode)
+
+ def test_zipfile(self, testdir):
+ z = testdir.tmpdir.join("myzip.zip")
+ z_fn = str(z)
+ f = zipfile.ZipFile(z_fn, "w")
+ try:
+ f.writestr("test_gum/__init__.py", "")
+ f.writestr("test_gum/test_lizard.py", "")
+ finally:
+ f.close()
+ z.chmod(256)
+ testdir.makepyfile(
+ """
+ import sys
+ sys.path.append(%r)
+ import test_gum.test_lizard"""
+ % (z_fn,)
+ )
+ assert testdir.runpytest().ret == EXIT_NOTESTSCOLLECTED
+
+ def test_readonly(self, testdir):
+ sub = testdir.mkdir("testing")
+ sub.join("test_readonly.py").write(
+ py.builtin._totext(
+ """
+def test_rewritten():
+ assert "@py_builtins" in globals()
+ """
+ ).encode(
+ "utf-8"
+ ),
+ "wb",
+ )
+ old_mode = sub.stat().mode
+ sub.chmod(320)
+ try:
+ assert testdir.runpytest().ret == 0
+ finally:
+ sub.chmod(old_mode)
+
+ def test_dont_write_bytecode(self, testdir, monkeypatch):
+ testdir.makepyfile(
+ """
+ import os
+ def test_no_bytecode():
+ assert "__pycache__" in __cached__
+ assert not os.path.exists(__cached__)
+ assert not os.path.exists(os.path.dirname(__cached__))"""
+ )
+ monkeypatch.setenv("PYTHONDONTWRITEBYTECODE", "1")
+ assert testdir.runpytest_subprocess().ret == 0
+
+ def test_orphaned_pyc_file(self, testdir):
+ if sys.version_info < (3, 0) and hasattr(sys, "pypy_version_info"):
+ pytest.skip("pypy2 doesn't run orphaned pyc files")
+
+ testdir.makepyfile(
+ """
+ import orphan
+ def test_it():
+ assert orphan.value == 17
+ """
+ )
+ testdir.makepyfile(
+ orphan="""
+ value = 17
+ """
+ )
+ py_compile.compile("orphan.py")
+ os.remove("orphan.py")
+
+ # Python 3 puts the .pyc files in a __pycache__ directory, and will
+ # not import from there without source. It will import a .pyc from
+ # the source location though.
+ if not os.path.exists("orphan.pyc"):
+ pycs = glob.glob("__pycache__/orphan.*.pyc")
+ assert len(pycs) == 1
+ os.rename(pycs[0], "orphan.pyc")
+
+ assert testdir.runpytest().ret == 0
+
+ @pytest.mark.skipif('"__pypy__" in sys.modules')
+ def test_pyc_vs_pyo(self, testdir, monkeypatch):
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_optimized():
+ "hello"
+ assert test_optimized.__doc__ is None"""
+ )
+ p = py.path.local.make_numbered_dir(
+ prefix="runpytest-", keep=None, rootdir=testdir.tmpdir
+ )
+ tmp = "--basetemp=%s" % p
+ monkeypatch.setenv("PYTHONOPTIMIZE", "2")
+ monkeypatch.delenv("PYTHONDONTWRITEBYTECODE", raising=False)
+ assert testdir.runpytest_subprocess(tmp).ret == 0
+ tagged = "test_pyc_vs_pyo." + PYTEST_TAG
+ assert tagged + ".pyo" in os.listdir("__pycache__")
+ monkeypatch.undo()
+ monkeypatch.delenv("PYTHONDONTWRITEBYTECODE", raising=False)
+ assert testdir.runpytest_subprocess(tmp).ret == 1
+ assert tagged + ".pyc" in os.listdir("__pycache__")
+
+ def test_package(self, testdir):
+ pkg = testdir.tmpdir.join("pkg")
+ pkg.mkdir()
+ pkg.join("__init__.py").ensure()
+ pkg.join("test_blah.py").write(
+ """
+def test_rewritten():
+ assert "@py_builtins" in globals()"""
+ )
+ assert testdir.runpytest().ret == 0
+
+ def test_translate_newlines(self, testdir):
+ content = "def test_rewritten():\r\n assert '@py_builtins' in globals()"
+ b = content.encode("utf-8")
+ testdir.tmpdir.join("test_newlines.py").write(b, "wb")
+ assert testdir.runpytest().ret == 0
+
+ @pytest.mark.skipif(
+ sys.version_info < (3, 4),
+ reason="packages without __init__.py not supported on python 2",
+ )
+ def test_package_without__init__py(self, testdir):
+ pkg = testdir.mkdir("a_package_without_init_py")
+ pkg.join("module.py").ensure()
+ testdir.makepyfile("import a_package_without_init_py.module")
+ assert testdir.runpytest().ret == EXIT_NOTESTSCOLLECTED
+
+ def test_rewrite_warning(self, pytestconfig, monkeypatch):
+ hook = AssertionRewritingHook(pytestconfig)
+ warnings = []
+
+ def mywarn(code, msg):
+ warnings.append((code, msg))
+
+ monkeypatch.setattr(hook.config, "warn", mywarn)
+ hook.mark_rewrite("_pytest")
+ assert "_pytest" in warnings[0][1]
+
+ def test_rewrite_module_imported_from_conftest(self, testdir):
+ testdir.makeconftest(
+ """
+ import test_rewrite_module_imported
+ """
+ )
+ testdir.makepyfile(
+ test_rewrite_module_imported="""
+ def test_rewritten():
+ assert "@py_builtins" in globals()
+ """
+ )
+ assert testdir.runpytest_subprocess().ret == 0
+
+ def test_remember_rewritten_modules(self, pytestconfig, testdir, monkeypatch):
+ """
+ AssertionRewriteHook should remember rewritten modules so it
+ doesn't give false positives (#2005).
+ """
+ monkeypatch.syspath_prepend(testdir.tmpdir)
+ testdir.makepyfile(test_remember_rewritten_modules="")
+ warnings = []
+ hook = AssertionRewritingHook(pytestconfig)
+ monkeypatch.setattr(hook.config, "warn", lambda code, msg: warnings.append(msg))
+ hook.find_module("test_remember_rewritten_modules")
+ hook.load_module("test_remember_rewritten_modules")
+ hook.mark_rewrite("test_remember_rewritten_modules")
+ hook.mark_rewrite("test_remember_rewritten_modules")
+ assert warnings == []
+
+ def test_rewrite_warning_using_pytest_plugins(self, testdir):
+ testdir.makepyfile(
+ **{
+ "conftest.py": "pytest_plugins = ['core', 'gui', 'sci']",
+ "core.py": "",
+ "gui.py": "pytest_plugins = ['core', 'sci']",
+ "sci.py": "pytest_plugins = ['core']",
+ "test_rewrite_warning_pytest_plugins.py": "def test(): pass",
+ }
+ )
+ testdir.chdir()
+ result = testdir.runpytest_subprocess()
+ result.stdout.fnmatch_lines(["*= 1 passed in *=*"])
+ assert "pytest-warning summary" not in result.stdout.str()
+
+ def test_rewrite_warning_using_pytest_plugins_env_var(self, testdir, monkeypatch):
+ monkeypatch.setenv("PYTEST_PLUGINS", "plugin")
+ testdir.makepyfile(
+ **{
+ "plugin.py": "",
+ "test_rewrite_warning_using_pytest_plugins_env_var.py": """
+ import plugin
+ pytest_plugins = ['plugin']
+ def test():
+ pass
+ """,
+ }
+ )
+ testdir.chdir()
+ result = testdir.runpytest_subprocess()
+ result.stdout.fnmatch_lines(["*= 1 passed in *=*"])
+ assert "pytest-warning summary" not in result.stdout.str()
+
+ @pytest.mark.skipif(sys.version_info[0] > 2, reason="python 2 only")
+ def test_rewrite_future_imports(self, testdir):
+ """Test that rewritten modules don't inherit the __future__ flags
+ from the assertrewrite module.
+
+ assertion.rewrite imports __future__.division (and others), so
+ ensure rewritten modules don't inherit those flags.
+
+ The test below will fail if __future__.division is enabled
+ """
+ testdir.makepyfile(
+ """
+ def test():
+ x = 1 / 2
+ assert type(x) is int
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 0
+
+
+class TestAssertionRewriteHookDetails(object):
+
+ def test_loader_is_package_false_for_module(self, testdir):
+ testdir.makepyfile(
+ test_fun="""
+ def test_loader():
+ assert not __loader__.is_package(__name__)
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["* 1 passed*"])
+
+ def test_loader_is_package_true_for_package(self, testdir):
+ testdir.makepyfile(
+ test_fun="""
+ def test_loader():
+ assert not __loader__.is_package(__name__)
+
+ def test_fun():
+ assert __loader__.is_package('fun')
+
+ def test_missing():
+ assert not __loader__.is_package('pytest_not_there')
+ """
+ )
+ testdir.mkpydir("fun")
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["* 3 passed*"])
+
+ @pytest.mark.skipif("sys.version_info[0] >= 3")
+ @pytest.mark.xfail("hasattr(sys, 'pypy_translation_info')")
+ def test_assume_ascii(self, testdir):
+ content = "u'\xe2\x99\xa5\x01\xfe'"
+ testdir.tmpdir.join("test_encoding.py").write(content, "wb")
+ res = testdir.runpytest()
+ assert res.ret != 0
+ assert "SyntaxError: Non-ASCII character" in res.stdout.str()
+
+ @pytest.mark.skipif("sys.version_info[0] >= 3")
+ def test_detect_coding_cookie(self, testdir):
+ testdir.makepyfile(
+ test_cookie="""
+ # -*- coding: utf-8 -*-
+ u"St\xc3\xa4d"
+ def test_rewritten():
+ assert "@py_builtins" in globals()"""
+ )
+ assert testdir.runpytest().ret == 0
+
+ @pytest.mark.skipif("sys.version_info[0] >= 3")
+ def test_detect_coding_cookie_second_line(self, testdir):
+ testdir.makepyfile(
+ test_cookie="""
+ # -*- coding: utf-8 -*-
+ u"St\xc3\xa4d"
+ def test_rewritten():
+ assert "@py_builtins" in globals()"""
+ )
+ assert testdir.runpytest().ret == 0
+
+ @pytest.mark.skipif("sys.version_info[0] >= 3")
+ def test_detect_coding_cookie_crlf(self, testdir):
+ testdir.makepyfile(
+ test_cookie="""
+ # -*- coding: utf-8 -*-
+ u"St\xc3\xa4d"
+ def test_rewritten():
+ assert "@py_builtins" in globals()"""
+ )
+ assert testdir.runpytest().ret == 0
+
+ def test_sys_meta_path_munged(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_meta_path():
+ import sys; sys.meta_path = []"""
+ )
+ assert testdir.runpytest().ret == 0
+
+ def test_write_pyc(self, testdir, tmpdir, monkeypatch):
+ from _pytest.assertion.rewrite import _write_pyc
+ from _pytest.assertion import AssertionState
+ import atomicwrites
+ from contextlib import contextmanager
+
+ config = testdir.parseconfig([])
+ state = AssertionState(config, "rewrite")
+ source_path = tmpdir.ensure("source.py")
+ pycpath = tmpdir.join("pyc").strpath
+ assert _write_pyc(state, [1], source_path.stat(), pycpath)
+
+ @contextmanager
+ def atomic_write_failed(fn, mode="r", overwrite=False):
+ e = IOError()
+ e.errno = 10
+ raise e
+ yield # noqa
+
+ monkeypatch.setattr(atomicwrites, "atomic_write", atomic_write_failed)
+ assert not _write_pyc(state, [1], source_path.stat(), pycpath)
+
+ def test_resources_provider_for_loader(self, testdir):
+ """
+ Attempts to load resources from a package should succeed normally,
+ even when the AssertionRewriteHook is used to load the modules.
+
+ See #366 for details.
+ """
+ pytest.importorskip("pkg_resources")
+
+ testdir.mkpydir("testpkg")
+ contents = {
+ "testpkg/test_pkg": """
+ import pkg_resources
+
+ import pytest
+ from _pytest.assertion.rewrite import AssertionRewritingHook
+
+ def test_load_resource():
+ assert isinstance(__loader__, AssertionRewritingHook)
+ res = pkg_resources.resource_string(__name__, 'resource.txt')
+ res = res.decode('ascii')
+ assert res == 'Load me please.'
+ """
+ }
+ testdir.makepyfile(**contents)
+ testdir.maketxtfile(**{"testpkg/resource": "Load me please."})
+
+ result = testdir.runpytest_subprocess()
+ result.assert_outcomes(passed=1)
+
+ def test_read_pyc(self, tmpdir):
+ """
+ Ensure that the `_read_pyc` can properly deal with corrupted pyc files.
+ In those circumstances it should just give up instead of generating
+ an exception that is propagated to the caller.
+ """
+ import py_compile
+ from _pytest.assertion.rewrite import _read_pyc
+
+ source = tmpdir.join("source.py")
+ pyc = source + "c"
+
+ source.write("def test(): pass")
+ py_compile.compile(str(source), str(pyc))
+
+ contents = pyc.read(mode="rb")
+ strip_bytes = 20 # header is around 8 bytes, strip a little more
+ assert len(contents) > strip_bytes
+ pyc.write(contents[:strip_bytes], mode="wb")
+
+ assert _read_pyc(source, str(pyc)) is None # no error
+
+ def test_reload_is_same(self, testdir):
+ # A file that will be picked up during collecting.
+ testdir.tmpdir.join("file.py").ensure()
+ testdir.tmpdir.join("pytest.ini").write(
+ textwrap.dedent(
+ """
+ [pytest]
+ python_files = *.py
+ """
+ )
+ )
+
+ testdir.makepyfile(
+ test_fun="""
+ import sys
+ try:
+ from imp import reload
+ except ImportError:
+ pass
+
+ def test_loader():
+ import file
+ assert sys.modules["file"] is reload(file)
+ """
+ )
+ result = testdir.runpytest("-s")
+ result.stdout.fnmatch_lines(["* 1 passed*"])
+
+ def test_get_data_support(self, testdir):
+ """Implement optional PEP302 api (#808).
+ """
+ path = testdir.mkpydir("foo")
+ path.join("test_foo.py").write(
+ _pytest._code.Source(
+ """
+ class Test(object):
+ def test_foo(self):
+ import pkgutil
+ data = pkgutil.get_data('foo.test_foo', 'data.txt')
+ assert data == b'Hey'
+ """
+ )
+ )
+ path.join("data.txt").write("Hey")
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("*1 passed*")
+
+
+def test_issue731(testdir):
+ testdir.makepyfile(
+ """
+ class LongReprWithBraces(object):
+ def __repr__(self):
+ return 'LongReprWithBraces({' + ('a' * 80) + '}' + ('a' * 120) + ')'
+
+ def some_method(self):
+ return False
+
+ def test_long_repr():
+ obj = LongReprWithBraces()
+ assert obj.some_method()
+ """
+ )
+ result = testdir.runpytest()
+ assert "unbalanced braces" not in result.stdout.str()
+
+
+class TestIssue925(object):
+
+ def test_simple_case(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_ternary_display():
+ assert (False == False) == False
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("*E*assert (False == False) == False")
+
+ def test_long_case(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_ternary_display():
+ assert False == (False == True) == True
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("*E*assert (False == True) == True")
+
+ def test_many_brackets(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_ternary_display():
+ assert True == ((False == True) == True)
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("*E*assert True == ((False == True) == True)")
+
+
+class TestIssue2121():
+
+ def test_simple(self, testdir):
+ testdir.tmpdir.join("tests/file.py").ensure().write(
+ """
+def test_simple_failure():
+ assert 1 + 1 == 3
+"""
+ )
+ testdir.tmpdir.join("pytest.ini").write(
+ textwrap.dedent(
+ """
+ [pytest]
+ python_files = tests/**.py
+ """
+ )
+ )
+
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("*E*assert (1 + 1) == 3")
diff --git a/third_party/python/pytest/testing/test_cacheprovider.py b/third_party/python/pytest/testing/test_cacheprovider.py
new file mode 100644
index 0000000000..33d1dd8448
--- /dev/null
+++ b/third_party/python/pytest/testing/test_cacheprovider.py
@@ -0,0 +1,820 @@
+from __future__ import absolute_import, division, print_function
+import sys
+import py
+import _pytest
+import pytest
+import os
+import shutil
+
+pytest_plugins = "pytester",
+
+
+class TestNewAPI(object):
+
+ def test_config_cache_makedir(self, testdir):
+ testdir.makeini("[pytest]")
+ config = testdir.parseconfigure()
+ with pytest.raises(ValueError):
+ config.cache.makedir("key/name")
+
+ p = config.cache.makedir("name")
+ assert p.check()
+
+ def test_config_cache_dataerror(self, testdir):
+ testdir.makeini("[pytest]")
+ config = testdir.parseconfigure()
+ cache = config.cache
+ pytest.raises(TypeError, lambda: cache.set("key/name", cache))
+ config.cache.set("key/name", 0)
+ config.cache._getvaluepath("key/name").write("123invalid")
+ val = config.cache.get("key/name", -2)
+ assert val == -2
+
+ def test_cache_writefail_cachfile_silent(self, testdir):
+ testdir.makeini("[pytest]")
+ testdir.tmpdir.join(".pytest_cache").write("gone wrong")
+ config = testdir.parseconfigure()
+ cache = config.cache
+ cache.set("test/broken", [])
+
+ @pytest.mark.skipif(sys.platform.startswith("win"), reason="no chmod on windows")
+ def test_cache_writefail_permissions(self, testdir):
+ testdir.makeini("[pytest]")
+ testdir.tmpdir.ensure_dir(".pytest_cache").chmod(0)
+ config = testdir.parseconfigure()
+ cache = config.cache
+ cache.set("test/broken", [])
+
+ @pytest.mark.skipif(sys.platform.startswith("win"), reason="no chmod on windows")
+ def test_cache_failure_warns(self, testdir):
+ testdir.tmpdir.ensure_dir(".pytest_cache").chmod(0)
+ testdir.makepyfile(
+ """
+ def test_error():
+ raise Exception
+
+ """
+ )
+ result = testdir.runpytest("-rw")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(["*could not create cache path*", "*2 warnings*"])
+
+ def test_config_cache(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_configure(config):
+ # see that we get cache information early on
+ assert hasattr(config, "cache")
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_session(pytestconfig):
+ assert hasattr(pytestconfig, "cache")
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_cachefuncarg(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_cachefuncarg(cache):
+ val = cache.get("some/thing", None)
+ assert val is None
+ cache.set("some/thing", [1])
+ pytest.raises(TypeError, lambda: cache.get("some/thing"))
+ val = cache.get("some/thing", [])
+ assert val == [1]
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_custom_rel_cache_dir(self, testdir):
+ rel_cache_dir = os.path.join("custom_cache_dir", "subdir")
+ testdir.makeini(
+ """
+ [pytest]
+ cache_dir = {cache_dir}
+ """.format(
+ cache_dir=rel_cache_dir
+ )
+ )
+ testdir.makepyfile(test_errored="def test_error():\n assert False")
+ testdir.runpytest()
+ assert testdir.tmpdir.join(rel_cache_dir).isdir()
+
+ def test_custom_abs_cache_dir(self, testdir, tmpdir_factory):
+ tmp = str(tmpdir_factory.mktemp("tmp"))
+ abs_cache_dir = os.path.join(tmp, "custom_cache_dir")
+ testdir.makeini(
+ """
+ [pytest]
+ cache_dir = {cache_dir}
+ """.format(
+ cache_dir=abs_cache_dir
+ )
+ )
+ testdir.makepyfile(test_errored="def test_error():\n assert False")
+ testdir.runpytest()
+ assert py.path.local(abs_cache_dir).isdir()
+
+ def test_custom_cache_dir_with_env_var(self, testdir, monkeypatch):
+ monkeypatch.setenv("env_var", "custom_cache_dir")
+ testdir.makeini(
+ """
+ [pytest]
+ cache_dir = {cache_dir}
+ """.format(
+ cache_dir="$env_var"
+ )
+ )
+ testdir.makepyfile(test_errored="def test_error():\n assert False")
+ testdir.runpytest()
+ assert testdir.tmpdir.join("custom_cache_dir").isdir()
+
+
+def test_cache_reportheader(testdir):
+ testdir.makepyfile(
+ """
+ def test_hello():
+ pass
+ """
+ )
+ result = testdir.runpytest("-v")
+ result.stdout.fnmatch_lines(["cachedir: .pytest_cache"])
+
+
+def test_cache_show(testdir):
+ result = testdir.runpytest("--cache-show")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*cache is empty*"])
+ testdir.makeconftest(
+ """
+ def pytest_configure(config):
+ config.cache.set("my/name", [1,2,3])
+ config.cache.set("other/some", {1:2})
+ dp = config.cache.makedir("mydb")
+ dp.ensure("hello")
+ dp.ensure("world")
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 5 # no tests executed
+ result = testdir.runpytest("--cache-show")
+ result.stdout.fnmatch_lines_random(
+ [
+ "*cachedir:*",
+ "-*cache values*-",
+ "*my/name contains:",
+ " [1, 2, 3]",
+ "*other/some contains*",
+ " {*1*: 2}",
+ "-*cache directories*-",
+ "*mydb/hello*length 0*",
+ "*mydb/world*length 0*",
+ ]
+ )
+
+
+class TestLastFailed(object):
+
+ def test_lastfailed_usecase(self, testdir, monkeypatch):
+ monkeypatch.setenv("PYTHONDONTWRITEBYTECODE", 1)
+ p = testdir.makepyfile(
+ """
+ def test_1():
+ assert 0
+ def test_2():
+ assert 0
+ def test_3():
+ assert 1
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*2 failed*"])
+ p.write(
+ _pytest._code.Source(
+ """
+ def test_1():
+ assert 1
+
+ def test_2():
+ assert 1
+
+ def test_3():
+ assert 0
+ """
+ )
+ )
+ result = testdir.runpytest("--lf")
+ result.stdout.fnmatch_lines(["*2 passed*1 desel*"])
+ result = testdir.runpytest("--lf")
+ result.stdout.fnmatch_lines(["*1 failed*2 passed*"])
+ result = testdir.runpytest("--lf", "--cache-clear")
+ result.stdout.fnmatch_lines(["*1 failed*2 passed*"])
+
+ # Run this again to make sure clear-cache is robust
+ if os.path.isdir(".pytest_cache"):
+ shutil.rmtree(".pytest_cache")
+ result = testdir.runpytest("--lf", "--cache-clear")
+ result.stdout.fnmatch_lines(["*1 failed*2 passed*"])
+
+ def test_failedfirst_order(self, testdir):
+ testdir.tmpdir.join("test_a.py").write(
+ _pytest._code.Source(
+ """
+ def test_always_passes():
+ assert 1
+ """
+ )
+ )
+ testdir.tmpdir.join("test_b.py").write(
+ _pytest._code.Source(
+ """
+ def test_always_fails():
+ assert 0
+ """
+ )
+ )
+ result = testdir.runpytest()
+ # Test order will be collection order; alphabetical
+ result.stdout.fnmatch_lines(["test_a.py*", "test_b.py*"])
+ result = testdir.runpytest("--ff")
+ # Test order will be failing tests firs
+ result.stdout.fnmatch_lines(["test_b.py*", "test_a.py*"])
+
+ def test_lastfailed_failedfirst_order(self, testdir):
+ testdir.makepyfile(
+ **{
+ "test_a.py": """
+ def test_always_passes():
+ assert 1
+ """,
+ "test_b.py": """
+ def test_always_fails():
+ assert 0
+ """,
+ }
+ )
+ result = testdir.runpytest()
+ # Test order will be collection order; alphabetical
+ result.stdout.fnmatch_lines(["test_a.py*", "test_b.py*"])
+ result = testdir.runpytest("--lf", "--ff")
+ # Test order will be failing tests firs
+ result.stdout.fnmatch_lines(["test_b.py*"])
+ assert "test_a.py" not in result.stdout.str()
+
+ def test_lastfailed_difference_invocations(self, testdir, monkeypatch):
+ monkeypatch.setenv("PYTHONDONTWRITEBYTECODE", 1)
+ testdir.makepyfile(
+ test_a="""
+ def test_a1():
+ assert 0
+ def test_a2():
+ assert 1
+ """,
+ test_b="""
+ def test_b1():
+ assert 0
+ """,
+ )
+ p = testdir.tmpdir.join("test_a.py")
+ p2 = testdir.tmpdir.join("test_b.py")
+
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*2 failed*"])
+ result = testdir.runpytest("--lf", p2)
+ result.stdout.fnmatch_lines(["*1 failed*"])
+ p2.write(
+ _pytest._code.Source(
+ """
+ def test_b1():
+ assert 1
+ """
+ )
+ )
+ result = testdir.runpytest("--lf", p2)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ result = testdir.runpytest("--lf", p)
+ result.stdout.fnmatch_lines(["*1 failed*1 desel*"])
+
+ def test_lastfailed_usecase_splice(self, testdir, monkeypatch):
+ monkeypatch.setenv("PYTHONDONTWRITEBYTECODE", 1)
+ testdir.makepyfile(
+ """
+ def test_1():
+ assert 0
+ """
+ )
+ p2 = testdir.tmpdir.join("test_something.py")
+ p2.write(
+ _pytest._code.Source(
+ """
+ def test_2():
+ assert 0
+ """
+ )
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*2 failed*"])
+ result = testdir.runpytest("--lf", p2)
+ result.stdout.fnmatch_lines(["*1 failed*"])
+ result = testdir.runpytest("--lf")
+ result.stdout.fnmatch_lines(["*2 failed*"])
+
+ def test_lastfailed_xpass(self, testdir):
+ testdir.inline_runsource(
+ """
+ import pytest
+ @pytest.mark.xfail
+ def test_hello():
+ assert 1
+ """
+ )
+ config = testdir.parseconfigure()
+ lastfailed = config.cache.get("cache/lastfailed", -1)
+ assert lastfailed == -1
+
+ def test_non_serializable_parametrize(self, testdir):
+ """Test that failed parametrized tests with unmarshable parameters
+ don't break pytest-cache.
+ """
+ testdir.makepyfile(
+ r"""
+ import pytest
+
+ @pytest.mark.parametrize('val', [
+ b'\xac\x10\x02G',
+ ])
+ def test_fail(val):
+ assert False
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("*1 failed in*")
+
+ def test_terminal_report_lastfailed(self, testdir):
+ test_a = testdir.makepyfile(
+ test_a="""
+ def test_a1():
+ pass
+ def test_a2():
+ pass
+ """
+ )
+ test_b = testdir.makepyfile(
+ test_b="""
+ def test_b1():
+ assert 0
+ def test_b2():
+ assert 0
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["collected 4 items", "*2 failed, 2 passed in*"])
+
+ result = testdir.runpytest("--lf")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 4 items / 2 deselected",
+ "run-last-failure: rerun previous 2 failures",
+ "*2 failed, 2 deselected in*",
+ ]
+ )
+
+ result = testdir.runpytest(test_a, "--lf")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items",
+ "run-last-failure: run all (no recorded failures)",
+ "*2 passed in*",
+ ]
+ )
+
+ result = testdir.runpytest(test_b, "--lf")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items",
+ "run-last-failure: rerun previous 2 failures",
+ "*2 failed in*",
+ ]
+ )
+
+ result = testdir.runpytest("test_b.py::test_b1", "--lf")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 1 item",
+ "run-last-failure: rerun previous 1 failure",
+ "*1 failed in*",
+ ]
+ )
+
+ def test_terminal_report_failedfirst(self, testdir):
+ testdir.makepyfile(
+ test_a="""
+ def test_a1():
+ assert 0
+ def test_a2():
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["collected 2 items", "*1 failed, 1 passed in*"])
+
+ result = testdir.runpytest("--ff")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items",
+ "run-last-failure: rerun previous 1 failure first",
+ "*1 failed, 1 passed in*",
+ ]
+ )
+
+ def test_lastfailed_collectfailure(self, testdir, monkeypatch):
+
+ testdir.makepyfile(
+ test_maybe="""
+ import os
+ env = os.environ
+ if '1' == env['FAILIMPORT']:
+ raise ImportError('fail')
+ def test_hello():
+ assert '0' == env['FAILTEST']
+ """
+ )
+
+ def rlf(fail_import, fail_run):
+ monkeypatch.setenv("FAILIMPORT", fail_import)
+ monkeypatch.setenv("FAILTEST", fail_run)
+
+ testdir.runpytest("-q")
+ config = testdir.parseconfigure()
+ lastfailed = config.cache.get("cache/lastfailed", -1)
+ return lastfailed
+
+ lastfailed = rlf(fail_import=0, fail_run=0)
+ assert lastfailed == -1
+
+ lastfailed = rlf(fail_import=1, fail_run=0)
+ assert list(lastfailed) == ["test_maybe.py"]
+
+ lastfailed = rlf(fail_import=0, fail_run=1)
+ assert list(lastfailed) == ["test_maybe.py::test_hello"]
+
+ def test_lastfailed_failure_subset(self, testdir, monkeypatch):
+
+ testdir.makepyfile(
+ test_maybe="""
+ import os
+ env = os.environ
+ if '1' == env['FAILIMPORT']:
+ raise ImportError('fail')
+ def test_hello():
+ assert '0' == env['FAILTEST']
+ """
+ )
+
+ testdir.makepyfile(
+ test_maybe2="""
+ import os
+ env = os.environ
+ if '1' == env['FAILIMPORT']:
+ raise ImportError('fail')
+ def test_hello():
+ assert '0' == env['FAILTEST']
+
+ def test_pass():
+ pass
+ """
+ )
+
+ def rlf(fail_import, fail_run, args=()):
+ monkeypatch.setenv("FAILIMPORT", fail_import)
+ monkeypatch.setenv("FAILTEST", fail_run)
+
+ result = testdir.runpytest("-q", "--lf", *args)
+ config = testdir.parseconfigure()
+ lastfailed = config.cache.get("cache/lastfailed", -1)
+ return result, lastfailed
+
+ result, lastfailed = rlf(fail_import=0, fail_run=0)
+ assert lastfailed == -1
+ result.stdout.fnmatch_lines(["*3 passed*"])
+
+ result, lastfailed = rlf(fail_import=1, fail_run=0)
+ assert sorted(list(lastfailed)) == ["test_maybe.py", "test_maybe2.py"]
+
+ result, lastfailed = rlf(fail_import=0, fail_run=0, args=("test_maybe2.py",))
+ assert list(lastfailed) == ["test_maybe.py"]
+
+ # edge case of test selection - even if we remember failures
+ # from other tests we still need to run all tests if no test
+ # matches the failures
+ result, lastfailed = rlf(fail_import=0, fail_run=0, args=("test_maybe2.py",))
+ assert list(lastfailed) == ["test_maybe.py"]
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+ def test_lastfailed_creates_cache_when_needed(self, testdir):
+ # Issue #1342
+ testdir.makepyfile(test_empty="")
+ testdir.runpytest("-q", "--lf")
+ assert not os.path.exists(".pytest_cache/v/cache/lastfailed")
+
+ testdir.makepyfile(test_successful="def test_success():\n assert True")
+ testdir.runpytest("-q", "--lf")
+ assert not os.path.exists(".pytest_cache/v/cache/lastfailed")
+
+ testdir.makepyfile(test_errored="def test_error():\n assert False")
+ testdir.runpytest("-q", "--lf")
+ assert os.path.exists(".pytest_cache/v/cache/lastfailed")
+
+ def test_xfail_not_considered_failure(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail
+ def test():
+ assert 0
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("*1 xfailed*")
+ assert self.get_cached_last_failed(testdir) == []
+
+ def test_xfail_strict_considered_failure(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail(strict=True)
+ def test():
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("*1 failed*")
+ assert (
+ self.get_cached_last_failed(testdir)
+ == ["test_xfail_strict_considered_failure.py::test"]
+ )
+
+ @pytest.mark.parametrize("mark", ["mark.xfail", "mark.skip"])
+ def test_failed_changed_to_xfail_or_skip(self, testdir, mark):
+ testdir.makepyfile(
+ """
+ import pytest
+ def test():
+ assert 0
+ """
+ )
+ result = testdir.runpytest()
+ assert (
+ self.get_cached_last_failed(testdir)
+ == ["test_failed_changed_to_xfail_or_skip.py::test"]
+ )
+ assert result.ret == 1
+
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.{mark}
+ def test():
+ assert 0
+ """.format(
+ mark=mark
+ )
+ )
+ result = testdir.runpytest()
+ assert result.ret == 0
+ assert self.get_cached_last_failed(testdir) == []
+ assert result.ret == 0
+
+ def get_cached_last_failed(self, testdir):
+ config = testdir.parseconfigure()
+ return sorted(config.cache.get("cache/lastfailed", {}))
+
+ def test_cache_cumulative(self, testdir):
+ """
+ Test workflow where user fixes errors gradually file by file using --lf.
+ """
+ # 1. initial run
+ test_bar = testdir.makepyfile(
+ test_bar="""
+ def test_bar_1():
+ pass
+ def test_bar_2():
+ assert 0
+ """
+ )
+ test_foo = testdir.makepyfile(
+ test_foo="""
+ def test_foo_3():
+ pass
+ def test_foo_4():
+ assert 0
+ """
+ )
+ testdir.runpytest()
+ assert (
+ self.get_cached_last_failed(testdir)
+ == ["test_bar.py::test_bar_2", "test_foo.py::test_foo_4"]
+ )
+
+ # 2. fix test_bar_2, run only test_bar.py
+ testdir.makepyfile(
+ test_bar="""
+ def test_bar_1():
+ pass
+ def test_bar_2():
+ pass
+ """
+ )
+ result = testdir.runpytest(test_bar)
+ result.stdout.fnmatch_lines("*2 passed*")
+ # ensure cache does not forget that test_foo_4 failed once before
+ assert self.get_cached_last_failed(testdir) == ["test_foo.py::test_foo_4"]
+
+ result = testdir.runpytest("--last-failed")
+ result.stdout.fnmatch_lines("*1 failed, 3 deselected*")
+ assert self.get_cached_last_failed(testdir) == ["test_foo.py::test_foo_4"]
+
+ # 3. fix test_foo_4, run only test_foo.py
+ test_foo = testdir.makepyfile(
+ test_foo="""
+ def test_foo_3():
+ pass
+ def test_foo_4():
+ pass
+ """
+ )
+ result = testdir.runpytest(test_foo, "--last-failed")
+ result.stdout.fnmatch_lines("*1 passed, 1 deselected*")
+ assert self.get_cached_last_failed(testdir) == []
+
+ result = testdir.runpytest("--last-failed")
+ result.stdout.fnmatch_lines("*4 passed*")
+ assert self.get_cached_last_failed(testdir) == []
+
+ def test_lastfailed_no_failures_behavior_all_passed(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_1():
+ assert True
+ def test_2():
+ assert True
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*2 passed*"])
+ result = testdir.runpytest("--lf")
+ result.stdout.fnmatch_lines(["*2 passed*"])
+ result = testdir.runpytest("--lf", "--lfnf", "all")
+ result.stdout.fnmatch_lines(["*2 passed*"])
+ result = testdir.runpytest("--lf", "--lfnf", "none")
+ result.stdout.fnmatch_lines(["*2 desel*"])
+
+ def test_lastfailed_no_failures_behavior_empty_cache(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_1():
+ assert True
+ def test_2():
+ assert False
+ """
+ )
+ result = testdir.runpytest("--lf", "--cache-clear")
+ result.stdout.fnmatch_lines(["*1 failed*1 passed*"])
+ result = testdir.runpytest("--lf", "--cache-clear", "--lfnf", "all")
+ result.stdout.fnmatch_lines(["*1 failed*1 passed*"])
+ result = testdir.runpytest("--lf", "--cache-clear", "--lfnf", "none")
+ result.stdout.fnmatch_lines(["*2 desel*"])
+
+
+class TestNewFirst(object):
+
+ def test_newfirst_usecase(self, testdir):
+ testdir.makepyfile(
+ **{
+ "test_1/test_1.py": """
+ def test_1(): assert 1
+ def test_2(): assert 1
+ def test_3(): assert 1
+ """,
+ "test_2/test_2.py": """
+ def test_1(): assert 1
+ def test_2(): assert 1
+ def test_3(): assert 1
+ """,
+ }
+ )
+
+ testdir.tmpdir.join("test_1/test_1.py").setmtime(1)
+
+ result = testdir.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ [
+ "*test_1/test_1.py::test_1 PASSED*",
+ "*test_1/test_1.py::test_2 PASSED*",
+ "*test_1/test_1.py::test_3 PASSED*",
+ "*test_2/test_2.py::test_1 PASSED*",
+ "*test_2/test_2.py::test_2 PASSED*",
+ "*test_2/test_2.py::test_3 PASSED*",
+ ]
+ )
+
+ result = testdir.runpytest("-v", "--nf")
+
+ result.stdout.fnmatch_lines(
+ [
+ "*test_2/test_2.py::test_1 PASSED*",
+ "*test_2/test_2.py::test_2 PASSED*",
+ "*test_2/test_2.py::test_3 PASSED*",
+ "*test_1/test_1.py::test_1 PASSED*",
+ "*test_1/test_1.py::test_2 PASSED*",
+ "*test_1/test_1.py::test_3 PASSED*",
+ ]
+ )
+
+ testdir.tmpdir.join("test_1/test_1.py").write(
+ "def test_1(): assert 1\n"
+ "def test_2(): assert 1\n"
+ "def test_3(): assert 1\n"
+ "def test_4(): assert 1\n"
+ )
+ testdir.tmpdir.join("test_1/test_1.py").setmtime(1)
+
+ result = testdir.runpytest("-v", "--nf")
+
+ result.stdout.fnmatch_lines(
+ [
+ "*test_1/test_1.py::test_4 PASSED*",
+ "*test_2/test_2.py::test_1 PASSED*",
+ "*test_2/test_2.py::test_2 PASSED*",
+ "*test_2/test_2.py::test_3 PASSED*",
+ "*test_1/test_1.py::test_1 PASSED*",
+ "*test_1/test_1.py::test_2 PASSED*",
+ "*test_1/test_1.py::test_3 PASSED*",
+ ]
+ )
+
+ def test_newfirst_parametrize(self, testdir):
+ testdir.makepyfile(
+ **{
+ "test_1/test_1.py": """
+ import pytest
+ @pytest.mark.parametrize('num', [1, 2])
+ def test_1(num): assert num
+ """,
+ "test_2/test_2.py": """
+ import pytest
+ @pytest.mark.parametrize('num', [1, 2])
+ def test_1(num): assert num
+ """,
+ }
+ )
+
+ testdir.tmpdir.join("test_1/test_1.py").setmtime(1)
+
+ result = testdir.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ [
+ "*test_1/test_1.py::test_1[1*",
+ "*test_1/test_1.py::test_1[2*",
+ "*test_2/test_2.py::test_1[1*",
+ "*test_2/test_2.py::test_1[2*",
+ ]
+ )
+
+ result = testdir.runpytest("-v", "--nf")
+
+ result.stdout.fnmatch_lines(
+ [
+ "*test_2/test_2.py::test_1[1*",
+ "*test_2/test_2.py::test_1[2*",
+ "*test_1/test_1.py::test_1[1*",
+ "*test_1/test_1.py::test_1[2*",
+ ]
+ )
+
+ testdir.tmpdir.join("test_1/test_1.py").write(
+ "import pytest\n"
+ "@pytest.mark.parametrize('num', [1, 2, 3])\n"
+ "def test_1(num): assert num\n"
+ )
+ testdir.tmpdir.join("test_1/test_1.py").setmtime(1)
+
+ result = testdir.runpytest("-v", "--nf")
+
+ result.stdout.fnmatch_lines(
+ [
+ "*test_1/test_1.py::test_1[3*",
+ "*test_2/test_2.py::test_1[1*",
+ "*test_2/test_2.py::test_1[2*",
+ "*test_1/test_1.py::test_1[1*",
+ "*test_1/test_1.py::test_1[2*",
+ ]
+ )
diff --git a/third_party/python/pytest/testing/test_capture.py b/third_party/python/pytest/testing/test_capture.py
new file mode 100644
index 0000000000..e14b3af785
--- /dev/null
+++ b/third_party/python/pytest/testing/test_capture.py
@@ -0,0 +1,1394 @@
+from __future__ import absolute_import, division, print_function
+
+# note: py.io capture tests where copied from
+# pylib 1.4.20.dev2 (rev 13d9af95547e)
+from __future__ import with_statement
+import pickle
+import os
+import sys
+from io import UnsupportedOperation
+
+import _pytest._code
+import py
+import pytest
+import contextlib
+from six import binary_type, text_type
+from _pytest import capture
+from _pytest.capture import CaptureManager
+from _pytest.main import EXIT_NOTESTSCOLLECTED
+
+
+needsosdup = pytest.mark.xfail("not hasattr(os, 'dup')")
+
+
+def tobytes(obj):
+ if isinstance(obj, text_type):
+ obj = obj.encode("UTF-8")
+ assert isinstance(obj, binary_type)
+ return obj
+
+
+def totext(obj):
+ if isinstance(obj, binary_type):
+ obj = text_type(obj, "UTF-8")
+ assert isinstance(obj, text_type)
+ return obj
+
+
+def oswritebytes(fd, obj):
+ os.write(fd, tobytes(obj))
+
+
+def StdCaptureFD(out=True, err=True, in_=True):
+ return capture.MultiCapture(out, err, in_, Capture=capture.FDCapture)
+
+
+def StdCapture(out=True, err=True, in_=True):
+ return capture.MultiCapture(out, err, in_, Capture=capture.SysCapture)
+
+
+class TestCaptureManager(object):
+
+ def test_getmethod_default_no_fd(self, monkeypatch):
+ from _pytest.capture import pytest_addoption
+ from _pytest.config.argparsing import Parser
+
+ parser = Parser()
+ pytest_addoption(parser)
+ default = parser._groups[0].options[0].default
+ assert default == "fd" if hasattr(os, "dup") else "sys"
+ parser = Parser()
+ monkeypatch.delattr(os, "dup", raising=False)
+ pytest_addoption(parser)
+ assert parser._groups[0].options[0].default == "sys"
+
+ @needsosdup
+ @pytest.mark.parametrize(
+ "method", ["no", "sys", pytest.mark.skipif('not hasattr(os, "dup")', "fd")]
+ )
+ def test_capturing_basic_api(self, method):
+ capouter = StdCaptureFD()
+ old = sys.stdout, sys.stderr, sys.stdin
+ try:
+ capman = CaptureManager(method)
+ capman.start_global_capturing()
+ outerr = capman.suspend_global_capture()
+ assert outerr == ("", "")
+ outerr = capman.suspend_global_capture()
+ assert outerr == ("", "")
+ print("hello")
+ out, err = capman.suspend_global_capture()
+ if method == "no":
+ assert old == (sys.stdout, sys.stderr, sys.stdin)
+ else:
+ assert not out
+ capman.resume_global_capture()
+ print("hello")
+ out, err = capman.suspend_global_capture()
+ if method != "no":
+ assert out == "hello\n"
+ capman.stop_global_capturing()
+ finally:
+ capouter.stop_capturing()
+
+ @needsosdup
+ def test_init_capturing(self):
+ capouter = StdCaptureFD()
+ try:
+ capman = CaptureManager("fd")
+ capman.start_global_capturing()
+ pytest.raises(AssertionError, "capman.start_global_capturing()")
+ capman.stop_global_capturing()
+ finally:
+ capouter.stop_capturing()
+
+
+@pytest.mark.parametrize("method", ["fd", "sys"])
+def test_capturing_unicode(testdir, method):
+ if hasattr(sys, "pypy_version_info") and sys.pypy_version_info < (2, 2):
+ pytest.xfail("does not work on pypy < 2.2")
+ if sys.version_info >= (3, 0):
+ obj = "'b\u00f6y'"
+ else:
+ obj = "u'\u00f6y'"
+ testdir.makepyfile(
+ """
+ # coding=utf8
+ # taken from issue 227 from nosetests
+ def test_unicode():
+ import sys
+ print (sys.stdout)
+ print (%s)
+ """
+ % obj
+ )
+ result = testdir.runpytest("--capture=%s" % method)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+@pytest.mark.parametrize("method", ["fd", "sys"])
+def test_capturing_bytes_in_utf8_encoding(testdir, method):
+ testdir.makepyfile(
+ """
+ def test_unicode():
+ print ('b\\u00f6y')
+ """
+ )
+ result = testdir.runpytest("--capture=%s" % method)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_collect_capturing(testdir):
+ p = testdir.makepyfile(
+ """
+ print ("collect %s failure" % 13)
+ import xyz42123
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(["*Captured stdout*", "*collect 13 failure*"])
+
+
+class TestPerTestCapturing(object):
+
+ def test_capture_and_fixtures(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def setup_module(mod):
+ print ("setup module")
+ def setup_function(function):
+ print ("setup " + function.__name__)
+ def test_func1():
+ print ("in func1")
+ assert 0
+ def test_func2():
+ print ("in func2")
+ assert 0
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ "setup module*",
+ "setup test_func1*",
+ "in func1*",
+ "setup test_func2*",
+ "in func2*",
+ ]
+ )
+
+ @pytest.mark.xfail(reason="unimplemented feature")
+ def test_capture_scope_cache(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import sys
+ def setup_module(func):
+ print ("module-setup")
+ def setup_function(func):
+ print ("function-setup")
+ def test_func():
+ print ("in function")
+ assert 0
+ def teardown_function(func):
+ print ("in teardown")
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ "*test_func():*",
+ "*Captured stdout during setup*",
+ "module-setup*",
+ "function-setup*",
+ "*Captured stdout*",
+ "in teardown*",
+ ]
+ )
+
+ def test_no_carry_over(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def test_func1():
+ print ("in func1")
+ def test_func2():
+ print ("in func2")
+ assert 0
+ """
+ )
+ result = testdir.runpytest(p)
+ s = result.stdout.str()
+ assert "in func1" not in s
+ assert "in func2" in s
+
+ def test_teardown_capturing(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def setup_function(function):
+ print ("setup func1")
+ def teardown_function(function):
+ print ("teardown func1")
+ assert 0
+ def test_func1():
+ print ("in func1")
+ pass
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ "*teardown_function*",
+ "*Captured stdout*",
+ "setup func1*",
+ "in func1*",
+ "teardown func1*",
+ # "*1 fixture failure*"
+ ]
+ )
+
+ def test_teardown_capturing_final(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def teardown_module(mod):
+ print ("teardown module")
+ assert 0
+ def test_func():
+ pass
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ "*def teardown_module(mod):*",
+ "*Captured stdout*",
+ "*teardown module*",
+ "*1 error*",
+ ]
+ )
+
+ def test_capturing_outerr(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ import sys
+ def test_capturing():
+ print (42)
+ sys.stderr.write(str(23))
+ def test_capturing_error():
+ print (1)
+ sys.stderr.write(str(2))
+ raise ValueError
+ """
+ )
+ result = testdir.runpytest(p1)
+ result.stdout.fnmatch_lines(
+ [
+ "*test_capturing_outerr.py .F*",
+ "====* FAILURES *====",
+ "____*____",
+ "*test_capturing_outerr.py:8: ValueError",
+ "*--- Captured stdout *call*",
+ "1",
+ "*--- Captured stderr *call*",
+ "2",
+ ]
+ )
+
+
+class TestLoggingInteraction(object):
+
+ def test_logging_stream_ownership(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def test_logging():
+ import logging
+ import pytest
+ stream = capture.CaptureIO()
+ logging.basicConfig(stream=stream)
+ stream.close() # to free memory/release resources
+ """
+ )
+ result = testdir.runpytest_subprocess(p)
+ assert result.stderr.str().find("atexit") == -1
+
+ def test_logging_and_immediate_setupteardown(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import logging
+ def setup_function(function):
+ logging.warn("hello1")
+
+ def test_logging():
+ logging.warn("hello2")
+ assert 0
+
+ def teardown_function(function):
+ logging.warn("hello3")
+ assert 0
+ """
+ )
+ for optargs in (("--capture=sys",), ("--capture=fd",)):
+ print(optargs)
+ result = testdir.runpytest_subprocess(p, *optargs)
+ s = result.stdout.str()
+ result.stdout.fnmatch_lines(
+ ["*WARN*hello3", "*WARN*hello1", "*WARN*hello2"] # errors show first!
+ )
+ # verify proper termination
+ assert "closed" not in s
+
+ def test_logging_and_crossscope_fixtures(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import logging
+ def setup_module(function):
+ logging.warn("hello1")
+
+ def test_logging():
+ logging.warn("hello2")
+ assert 0
+
+ def teardown_module(function):
+ logging.warn("hello3")
+ assert 0
+ """
+ )
+ for optargs in (("--capture=sys",), ("--capture=fd",)):
+ print(optargs)
+ result = testdir.runpytest_subprocess(p, *optargs)
+ s = result.stdout.str()
+ result.stdout.fnmatch_lines(
+ ["*WARN*hello3", "*WARN*hello1", "*WARN*hello2"] # errors come first
+ )
+ # verify proper termination
+ assert "closed" not in s
+
+ def test_conftestlogging_is_shown(self, testdir):
+ testdir.makeconftest(
+ """
+ import logging
+ logging.basicConfig()
+ logging.warn("hello435")
+ """
+ )
+ # make sure that logging is still captured in tests
+ result = testdir.runpytest_subprocess("-s", "-p", "no:capturelog")
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+ result.stderr.fnmatch_lines(["WARNING*hello435*"])
+ assert "operation on closed file" not in result.stderr.str()
+
+ def test_conftestlogging_and_test_logging(self, testdir):
+ testdir.makeconftest(
+ """
+ import logging
+ logging.basicConfig()
+ """
+ )
+ # make sure that logging is still captured in tests
+ p = testdir.makepyfile(
+ """
+ def test_hello():
+ import logging
+ logging.warn("hello433")
+ assert 0
+ """
+ )
+ result = testdir.runpytest_subprocess(p, "-p", "no:capturelog")
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(["WARNING*hello433*"])
+ assert "something" not in result.stderr.str()
+ assert "operation on closed file" not in result.stderr.str()
+
+
+class TestCaptureFixture(object):
+
+ @pytest.mark.parametrize("opt", [[], ["-s"]])
+ def test_std_functional(self, testdir, opt):
+ reprec = testdir.inline_runsource(
+ """
+ def test_hello(capsys):
+ print (42)
+ out, err = capsys.readouterr()
+ assert out.startswith("42")
+ """,
+ *opt
+ )
+ reprec.assertoutcome(passed=1)
+
+ def test_capsyscapfd(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def test_one(capsys, capfd):
+ pass
+ def test_two(capfd, capsys):
+ pass
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ "*ERROR*setup*test_one*",
+ "E*capfd*capsys*same*time*",
+ "*ERROR*setup*test_two*",
+ "E*capsys*capfd*same*time*",
+ "*2 error*",
+ ]
+ )
+
+ def test_capturing_getfixturevalue(self, testdir):
+ """Test that asking for "capfd" and "capsys" using request.getfixturevalue
+ in the same test is an error.
+ """
+ testdir.makepyfile(
+ """
+ def test_one(capsys, request):
+ request.getfixturevalue("capfd")
+ def test_two(capfd, request):
+ request.getfixturevalue("capsys")
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*test_one*",
+ "*capsys*capfd*same*time*",
+ "*test_two*",
+ "*capfd*capsys*same*time*",
+ "*2 failed in*",
+ ]
+ )
+
+ def test_capsyscapfdbinary(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def test_one(capsys, capfdbinary):
+ pass
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(
+ ["*ERROR*setup*test_one*", "E*capfdbinary*capsys*same*time*", "*1 error*"]
+ )
+
+ @pytest.mark.parametrize("method", ["sys", "fd"])
+ def test_capture_is_represented_on_failure_issue128(self, testdir, method):
+ p = testdir.makepyfile(
+ """
+ def test_hello(cap%s):
+ print ("xxx42xxx")
+ assert 0
+ """
+ % method
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(["xxx42xxx"])
+
+ @needsosdup
+ def test_stdfd_functional(self, testdir):
+ reprec = testdir.inline_runsource(
+ """
+ def test_hello(capfd):
+ import os
+ os.write(1, "42".encode('ascii'))
+ out, err = capfd.readouterr()
+ assert out.startswith("42")
+ capfd.close()
+ """
+ )
+ reprec.assertoutcome(passed=1)
+
+ @needsosdup
+ def test_capfdbinary(self, testdir):
+ reprec = testdir.inline_runsource(
+ """
+ def test_hello(capfdbinary):
+ import os
+ # some likely un-decodable bytes
+ os.write(1, b'\\xfe\\x98\\x20')
+ out, err = capfdbinary.readouterr()
+ assert out == b'\\xfe\\x98\\x20'
+ assert err == b''
+ """
+ )
+ reprec.assertoutcome(passed=1)
+
+ @pytest.mark.skipif(
+ sys.version_info < (3,), reason="only have capsysbinary in python 3"
+ )
+ def test_capsysbinary(self, testdir):
+ reprec = testdir.inline_runsource(
+ """
+ def test_hello(capsysbinary):
+ import sys
+ # some likely un-decodable bytes
+ sys.stdout.buffer.write(b'\\xfe\\x98\\x20')
+ out, err = capsysbinary.readouterr()
+ assert out == b'\\xfe\\x98\\x20'
+ assert err == b''
+ """
+ )
+ reprec.assertoutcome(passed=1)
+
+ @pytest.mark.skipif(
+ sys.version_info >= (3,), reason="only have capsysbinary in python 3"
+ )
+ def test_capsysbinary_forbidden_in_python2(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_hello(capsysbinary):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*test_hello*",
+ "*capsysbinary is only supported on python 3*",
+ "*1 error in*",
+ ]
+ )
+
+ def test_partial_setup_failure(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def test_hello(capsys, missingarg):
+ pass
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(["*test_partial_setup_failure*", "*1 error*"])
+
+ @needsosdup
+ def test_keyboardinterrupt_disables_capturing(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def test_hello(capfd):
+ import os
+ os.write(1, str(42).encode('ascii'))
+ raise KeyboardInterrupt()
+ """
+ )
+ result = testdir.runpytest_subprocess(p)
+ result.stdout.fnmatch_lines(["*KeyboardInterrupt*"])
+ assert result.ret == 2
+
+ @pytest.mark.issue14
+ def test_capture_and_logging(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import logging
+ def test_log(capsys):
+ logging.error('x')
+ """
+ )
+ result = testdir.runpytest_subprocess(p)
+ assert "closed" not in result.stderr.str()
+
+ @pytest.mark.parametrize("fixture", ["capsys", "capfd"])
+ @pytest.mark.parametrize("no_capture", [True, False])
+ def test_disabled_capture_fixture(self, testdir, fixture, no_capture):
+ testdir.makepyfile(
+ """
+ def test_disabled({fixture}):
+ print('captured before')
+ with {fixture}.disabled():
+ print('while capture is disabled')
+ print('captured after')
+ assert {fixture}.readouterr() == ('captured before\\ncaptured after\\n', '')
+
+ def test_normal():
+ print('test_normal executed')
+ """.format(
+ fixture=fixture
+ )
+ )
+ args = ("-s",) if no_capture else ()
+ result = testdir.runpytest_subprocess(*args)
+ result.stdout.fnmatch_lines(
+ """
+ *while capture is disabled*
+ """
+ )
+ assert "captured before" not in result.stdout.str()
+ assert "captured after" not in result.stdout.str()
+ if no_capture:
+ assert "test_normal executed" in result.stdout.str()
+ else:
+ assert "test_normal executed" not in result.stdout.str()
+
+ @pytest.mark.parametrize("fixture", ["capsys", "capfd"])
+ def test_fixture_use_by_other_fixtures(self, testdir, fixture):
+ """
+ Ensure that capsys and capfd can be used by other fixtures during setup and teardown.
+ """
+ testdir.makepyfile(
+ """
+ from __future__ import print_function
+ import sys
+ import pytest
+
+ @pytest.fixture
+ def captured_print({fixture}):
+ print('stdout contents begin')
+ print('stderr contents begin', file=sys.stderr)
+ out, err = {fixture}.readouterr()
+
+ yield out, err
+
+ print('stdout contents end')
+ print('stderr contents end', file=sys.stderr)
+ out, err = {fixture}.readouterr()
+ assert out == 'stdout contents end\\n'
+ assert err == 'stderr contents end\\n'
+
+ def test_captured_print(captured_print):
+ out, err = captured_print
+ assert out == 'stdout contents begin\\n'
+ assert err == 'stderr contents begin\\n'
+ """.format(
+ fixture=fixture
+ )
+ )
+ result = testdir.runpytest_subprocess()
+ result.stdout.fnmatch_lines("*1 passed*")
+ assert "stdout contents begin" not in result.stdout.str()
+ assert "stderr contents begin" not in result.stdout.str()
+
+
+def test_setup_failure_does_not_kill_capturing(testdir):
+ sub1 = testdir.mkpydir("sub1")
+ sub1.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ def pytest_runtest_setup(item):
+ raise ValueError(42)
+ """
+ )
+ )
+ sub1.join("test_mod.py").write("def test_func1(): pass")
+ result = testdir.runpytest(testdir.tmpdir, "--traceconfig")
+ result.stdout.fnmatch_lines(["*ValueError(42)*", "*1 error*"])
+
+
+def test_fdfuncarg_skips_on_no_osdup(testdir):
+ testdir.makepyfile(
+ """
+ import os
+ if hasattr(os, 'dup'):
+ del os.dup
+ def test_hello(capfd):
+ pass
+ """
+ )
+ result = testdir.runpytest_subprocess("--capture=no")
+ result.stdout.fnmatch_lines(["*1 skipped*"])
+
+
+def test_capture_conftest_runtest_setup(testdir):
+ testdir.makeconftest(
+ """
+ def pytest_runtest_setup():
+ print ("hello19")
+ """
+ )
+ testdir.makepyfile("def test_func(): pass")
+ result = testdir.runpytest()
+ assert result.ret == 0
+ assert "hello19" not in result.stdout.str()
+
+
+def test_capture_badoutput_issue412(testdir):
+ testdir.makepyfile(
+ """
+ import os
+
+ def test_func():
+ omg = bytearray([1,129,1])
+ os.write(1, omg)
+ assert 0
+ """
+ )
+ result = testdir.runpytest("--cap=fd")
+ result.stdout.fnmatch_lines(
+ """
+ *def test_func*
+ *assert 0*
+ *Captured*
+ *1 failed*
+ """
+ )
+
+
+def test_capture_early_option_parsing(testdir):
+ testdir.makeconftest(
+ """
+ def pytest_runtest_setup():
+ print ("hello19")
+ """
+ )
+ testdir.makepyfile("def test_func(): pass")
+ result = testdir.runpytest("-vs")
+ assert result.ret == 0
+ assert "hello19" in result.stdout.str()
+
+
+def test_capture_binary_output(testdir):
+ testdir.makepyfile(
+ r"""
+ import pytest
+
+ def test_a():
+ import sys
+ import subprocess
+ subprocess.call([sys.executable, __file__])
+
+ def test_foo():
+ import os;os.write(1, b'\xc3')
+
+ if __name__ == '__main__':
+ test_foo()
+ """
+ )
+ result = testdir.runpytest("--assert=plain")
+ result.assert_outcomes(passed=2)
+
+
+def test_error_during_readouterr(testdir):
+ """Make sure we suspend capturing if errors occur during readouterr"""
+ testdir.makepyfile(
+ pytest_xyz="""
+ from _pytest.capture import FDCapture
+ def bad_snap(self):
+ raise Exception('boom')
+ assert FDCapture.snap
+ FDCapture.snap = bad_snap
+ """
+ )
+ result = testdir.runpytest_subprocess(
+ "-p", "pytest_xyz", "--version", syspathinsert=True
+ )
+ result.stderr.fnmatch_lines(
+ ["*in bad_snap", " raise Exception('boom')", "Exception: boom"]
+ )
+
+
+class TestCaptureIO(object):
+
+ def test_text(self):
+ f = capture.CaptureIO()
+ f.write("hello")
+ s = f.getvalue()
+ assert s == "hello"
+ f.close()
+
+ def test_unicode_and_str_mixture(self):
+ f = capture.CaptureIO()
+ if sys.version_info >= (3, 0):
+ f.write("\u00f6")
+ pytest.raises(TypeError, "f.write(bytes('hello', 'UTF-8'))")
+ else:
+ f.write(text_type("\u00f6", "UTF-8"))
+ f.write("hello") # bytes
+ s = f.getvalue()
+ f.close()
+ assert isinstance(s, text_type)
+
+ @pytest.mark.skipif(sys.version_info[0] == 2, reason="python 3 only behaviour")
+ def test_write_bytes_to_buffer(self):
+ """In python3, stdout / stderr are text io wrappers (exposing a buffer
+ property of the underlying bytestream). See issue #1407
+ """
+ f = capture.CaptureIO()
+ f.buffer.write(b"foo\r\n")
+ assert f.getvalue() == "foo\r\n"
+
+
+def test_bytes_io():
+ f = py.io.BytesIO()
+ f.write(tobytes("hello"))
+ pytest.raises(TypeError, "f.write(totext('hello'))")
+ s = f.getvalue()
+ assert s == tobytes("hello")
+
+
+def test_dontreadfrominput():
+ from _pytest.capture import DontReadFromInput
+
+ f = DontReadFromInput()
+ assert not f.isatty()
+ pytest.raises(IOError, f.read)
+ pytest.raises(IOError, f.readlines)
+ iter_f = iter(f)
+ pytest.raises(IOError, next, iter_f)
+ pytest.raises(UnsupportedOperation, f.fileno)
+ f.close() # just for completeness
+
+
+@pytest.mark.skipif("sys.version_info < (3,)", reason="python2 has no buffer")
+def test_dontreadfrominput_buffer_python3():
+ from _pytest.capture import DontReadFromInput
+
+ f = DontReadFromInput()
+ fb = f.buffer
+ assert not fb.isatty()
+ pytest.raises(IOError, fb.read)
+ pytest.raises(IOError, fb.readlines)
+ iter_f = iter(f)
+ pytest.raises(IOError, next, iter_f)
+ pytest.raises(ValueError, fb.fileno)
+ f.close() # just for completeness
+
+
+@pytest.mark.skipif("sys.version_info >= (3,)", reason="python2 has no buffer")
+def test_dontreadfrominput_buffer_python2():
+ from _pytest.capture import DontReadFromInput
+
+ f = DontReadFromInput()
+ with pytest.raises(AttributeError):
+ f.buffer
+ f.close() # just for completeness
+
+
+@pytest.yield_fixture
+def tmpfile(testdir):
+ f = testdir.makepyfile("").open("wb+")
+ yield f
+ if not f.closed:
+ f.close()
+
+
+@needsosdup
+def test_dupfile(tmpfile):
+ flist = []
+ for i in range(5):
+ nf = capture.safe_text_dupfile(tmpfile, "wb")
+ assert nf != tmpfile
+ assert nf.fileno() != tmpfile.fileno()
+ assert nf not in flist
+ print(i, end="", file=nf)
+ flist.append(nf)
+
+ fname_open = flist[0].name
+ assert fname_open == repr(flist[0].buffer)
+
+ for i in range(5):
+ f = flist[i]
+ f.close()
+ fname_closed = flist[0].name
+ assert fname_closed == repr(flist[0].buffer)
+ assert fname_closed != fname_open
+ tmpfile.seek(0)
+ s = tmpfile.read()
+ assert "01234" in repr(s)
+ tmpfile.close()
+ assert fname_closed == repr(flist[0].buffer)
+
+
+def test_dupfile_on_bytesio():
+ io = py.io.BytesIO()
+ f = capture.safe_text_dupfile(io, "wb")
+ f.write("hello")
+ assert io.getvalue() == b"hello"
+ assert "BytesIO object" in f.name
+
+
+def test_dupfile_on_textio():
+ io = py.io.TextIO()
+ f = capture.safe_text_dupfile(io, "wb")
+ f.write("hello")
+ assert io.getvalue() == "hello"
+ assert not hasattr(f, "name")
+
+
+@contextlib.contextmanager
+def lsof_check():
+ pid = os.getpid()
+ try:
+ out = py.process.cmdexec("lsof -p %d" % pid)
+ except (py.process.cmdexec.Error, UnicodeDecodeError):
+ # about UnicodeDecodeError, see note on pytester
+ pytest.skip("could not run 'lsof'")
+ yield
+ out2 = py.process.cmdexec("lsof -p %d" % pid)
+ len1 = len([x for x in out.split("\n") if "REG" in x])
+ len2 = len([x for x in out2.split("\n") if "REG" in x])
+ assert len2 < len1 + 3, out2
+
+
+class TestFDCapture(object):
+ pytestmark = needsosdup
+
+ def test_simple(self, tmpfile):
+ fd = tmpfile.fileno()
+ cap = capture.FDCapture(fd)
+ data = tobytes("hello")
+ os.write(fd, data)
+ s = cap.snap()
+ cap.done()
+ assert not s
+ cap = capture.FDCapture(fd)
+ cap.start()
+ os.write(fd, data)
+ s = cap.snap()
+ cap.done()
+ assert s == "hello"
+
+ def test_simple_many(self, tmpfile):
+ for i in range(10):
+ self.test_simple(tmpfile)
+
+ def test_simple_many_check_open_files(self, testdir):
+ with lsof_check():
+ with testdir.makepyfile("").open("wb+") as tmpfile:
+ self.test_simple_many(tmpfile)
+
+ def test_simple_fail_second_start(self, tmpfile):
+ fd = tmpfile.fileno()
+ cap = capture.FDCapture(fd)
+ cap.done()
+ pytest.raises(ValueError, cap.start)
+
+ def test_stderr(self):
+ cap = capture.FDCapture(2)
+ cap.start()
+ print("hello", file=sys.stderr)
+ s = cap.snap()
+ cap.done()
+ assert s == "hello\n"
+
+ def test_stdin(self, tmpfile):
+ cap = capture.FDCapture(0)
+ cap.start()
+ x = os.read(0, 100).strip()
+ cap.done()
+ assert x == tobytes("")
+
+ def test_writeorg(self, tmpfile):
+ data1, data2 = tobytes("foo"), tobytes("bar")
+ cap = capture.FDCapture(tmpfile.fileno())
+ cap.start()
+ tmpfile.write(data1)
+ tmpfile.flush()
+ cap.writeorg(data2)
+ scap = cap.snap()
+ cap.done()
+ assert scap == totext(data1)
+ with open(tmpfile.name, "rb") as stmp_file:
+ stmp = stmp_file.read()
+ assert stmp == data2
+
+ def test_simple_resume_suspend(self, tmpfile):
+ with saved_fd(1):
+ cap = capture.FDCapture(1)
+ cap.start()
+ data = tobytes("hello")
+ os.write(1, data)
+ sys.stdout.write("whatever")
+ s = cap.snap()
+ assert s == "hellowhatever"
+ cap.suspend()
+ os.write(1, tobytes("world"))
+ sys.stdout.write("qlwkej")
+ assert not cap.snap()
+ cap.resume()
+ os.write(1, tobytes("but now"))
+ sys.stdout.write(" yes\n")
+ s = cap.snap()
+ assert s == "but now yes\n"
+ cap.suspend()
+ cap.done()
+ pytest.raises(AttributeError, cap.suspend)
+
+
+@contextlib.contextmanager
+def saved_fd(fd):
+ new_fd = os.dup(fd)
+ try:
+ yield
+ finally:
+ os.dup2(new_fd, fd)
+ os.close(new_fd)
+
+
+class TestStdCapture(object):
+ captureclass = staticmethod(StdCapture)
+
+ @contextlib.contextmanager
+ def getcapture(self, **kw):
+ cap = self.__class__.captureclass(**kw)
+ cap.start_capturing()
+ try:
+ yield cap
+ finally:
+ cap.stop_capturing()
+
+ def test_capturing_done_simple(self):
+ with self.getcapture() as cap:
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ out, err = cap.readouterr()
+ assert out == "hello"
+ assert err == "world"
+
+ def test_capturing_reset_simple(self):
+ with self.getcapture() as cap:
+ print("hello world")
+ sys.stderr.write("hello error\n")
+ out, err = cap.readouterr()
+ assert out == "hello world\n"
+ assert err == "hello error\n"
+
+ def test_capturing_readouterr(self):
+ with self.getcapture() as cap:
+ print("hello world")
+ sys.stderr.write("hello error\n")
+ out, err = cap.readouterr()
+ assert out == "hello world\n"
+ assert err == "hello error\n"
+ sys.stderr.write("error2")
+ out, err = cap.readouterr()
+ assert err == "error2"
+
+ def test_capture_results_accessible_by_attribute(self):
+ with self.getcapture() as cap:
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ capture_result = cap.readouterr()
+ assert capture_result.out == "hello"
+ assert capture_result.err == "world"
+
+ def test_capturing_readouterr_unicode(self):
+ with self.getcapture() as cap:
+ print("hx\xc4\x85\xc4\x87")
+ out, err = cap.readouterr()
+ assert out == py.builtin._totext("hx\xc4\x85\xc4\x87\n", "utf8")
+
+ @pytest.mark.skipif(
+ "sys.version_info >= (3,)", reason="text output different for bytes on python3"
+ )
+ def test_capturing_readouterr_decode_error_handling(self):
+ with self.getcapture() as cap:
+ # triggered an internal error in pytest
+ print("\xa6")
+ out, err = cap.readouterr()
+ assert out == py.builtin._totext("\ufffd\n", "unicode-escape")
+
+ def test_reset_twice_error(self):
+ with self.getcapture() as cap:
+ print("hello")
+ out, err = cap.readouterr()
+ pytest.raises(ValueError, cap.stop_capturing)
+ assert out == "hello\n"
+ assert not err
+
+ def test_capturing_modify_sysouterr_in_between(self):
+ oldout = sys.stdout
+ olderr = sys.stderr
+ with self.getcapture() as cap:
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ sys.stdout = capture.CaptureIO()
+ sys.stderr = capture.CaptureIO()
+ print("not seen")
+ sys.stderr.write("not seen\n")
+ out, err = cap.readouterr()
+ assert out == "hello"
+ assert err == "world"
+ assert sys.stdout == oldout
+ assert sys.stderr == olderr
+
+ def test_capturing_error_recursive(self):
+ with self.getcapture() as cap1:
+ print("cap1")
+ with self.getcapture() as cap2:
+ print("cap2")
+ out2, err2 = cap2.readouterr()
+ out1, err1 = cap1.readouterr()
+ assert out1 == "cap1\n"
+ assert out2 == "cap2\n"
+
+ def test_just_out_capture(self):
+ with self.getcapture(out=True, err=False) as cap:
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ out, err = cap.readouterr()
+ assert out == "hello"
+ assert not err
+
+ def test_just_err_capture(self):
+ with self.getcapture(out=False, err=True) as cap:
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ out, err = cap.readouterr()
+ assert err == "world"
+ assert not out
+
+ def test_stdin_restored(self):
+ old = sys.stdin
+ with self.getcapture(in_=True):
+ newstdin = sys.stdin
+ assert newstdin != sys.stdin
+ assert sys.stdin is old
+
+ def test_stdin_nulled_by_default(self):
+ print("XXX this test may well hang instead of crashing")
+ print("XXX which indicates an error in the underlying capturing")
+ print("XXX mechanisms")
+ with self.getcapture():
+ pytest.raises(IOError, "sys.stdin.read()")
+
+
+class TestStdCaptureFD(TestStdCapture):
+ pytestmark = needsosdup
+ captureclass = staticmethod(StdCaptureFD)
+
+ def test_simple_only_fd(self, testdir):
+ testdir.makepyfile(
+ """
+ import os
+ def test_x():
+ os.write(1, "hello\\n".encode("ascii"))
+ assert 0
+ """
+ )
+ result = testdir.runpytest_subprocess()
+ result.stdout.fnmatch_lines(
+ """
+ *test_x*
+ *assert 0*
+ *Captured stdout*
+ """
+ )
+
+ def test_intermingling(self):
+ with self.getcapture() as cap:
+ oswritebytes(1, "1")
+ sys.stdout.write(str(2))
+ sys.stdout.flush()
+ oswritebytes(1, "3")
+ oswritebytes(2, "a")
+ sys.stderr.write("b")
+ sys.stderr.flush()
+ oswritebytes(2, "c")
+ out, err = cap.readouterr()
+ assert out == "123"
+ assert err == "abc"
+
+ def test_many(self, capfd):
+ with lsof_check():
+ for i in range(10):
+ cap = StdCaptureFD()
+ cap.stop_capturing()
+
+
+class TestStdCaptureFDinvalidFD(object):
+ pytestmark = needsosdup
+
+ def test_stdcapture_fd_invalid_fd(self, testdir):
+ testdir.makepyfile(
+ """
+ import os
+ from _pytest import capture
+ def StdCaptureFD(out=True, err=True, in_=True):
+ return capture.MultiCapture(out, err, in_,
+ Capture=capture.FDCapture)
+ def test_stdout():
+ os.close(1)
+ cap = StdCaptureFD(out=True, err=False, in_=False)
+ cap.stop_capturing()
+ def test_stderr():
+ os.close(2)
+ cap = StdCaptureFD(out=False, err=True, in_=False)
+ cap.stop_capturing()
+ def test_stdin():
+ os.close(0)
+ cap = StdCaptureFD(out=False, err=False, in_=True)
+ cap.stop_capturing()
+ """
+ )
+ result = testdir.runpytest_subprocess("--capture=fd")
+ assert result.ret == 0
+ assert result.parseoutcomes()["passed"] == 3
+
+
+def test_capture_not_started_but_reset():
+ capsys = StdCapture()
+ capsys.stop_capturing()
+
+
+def test_using_capsys_fixture_works_with_sys_stdout_encoding(capsys):
+ test_text = "test text"
+
+ print(test_text.encode(sys.stdout.encoding, "replace"))
+ (out, err) = capsys.readouterr()
+ assert out
+ assert err == ""
+
+
+def test_capsys_results_accessible_by_attribute(capsys):
+ sys.stdout.write("spam")
+ sys.stderr.write("eggs")
+ capture_result = capsys.readouterr()
+ assert capture_result.out == "spam"
+ assert capture_result.err == "eggs"
+
+
+@needsosdup
+@pytest.mark.parametrize("use", [True, False])
+def test_fdcapture_tmpfile_remains_the_same(tmpfile, use):
+ if not use:
+ tmpfile = True
+ cap = StdCaptureFD(out=False, err=tmpfile)
+ try:
+ cap.start_capturing()
+ capfile = cap.err.tmpfile
+ cap.readouterr()
+ finally:
+ cap.stop_capturing()
+ capfile2 = cap.err.tmpfile
+ assert capfile2 == capfile
+
+
+@needsosdup
+def test_close_and_capture_again(testdir):
+ testdir.makepyfile(
+ """
+ import os
+ def test_close():
+ os.close(1)
+ def test_capture_again():
+ os.write(1, b"hello\\n")
+ assert 0
+ """
+ )
+ result = testdir.runpytest_subprocess()
+ result.stdout.fnmatch_lines(
+ """
+ *test_capture_again*
+ *assert 0*
+ *stdout*
+ *hello*
+ """
+ )
+
+
+@pytest.mark.parametrize("method", ["SysCapture", "FDCapture"])
+def test_capturing_and_logging_fundamentals(testdir, method):
+ if method == "StdCaptureFD" and not hasattr(os, "dup"):
+ pytest.skip("need os.dup")
+ # here we check a fundamental feature
+ p = testdir.makepyfile(
+ """
+ import sys, os
+ import py, logging
+ from _pytest import capture
+ cap = capture.MultiCapture(out=False, in_=False,
+ Capture=capture.%s)
+ cap.start_capturing()
+
+ logging.warn("hello1")
+ outerr = cap.readouterr()
+ print ("suspend, captured %%s" %%(outerr,))
+ logging.warn("hello2")
+
+ cap.pop_outerr_to_orig()
+ logging.warn("hello3")
+
+ outerr = cap.readouterr()
+ print ("suspend2, captured %%s" %% (outerr,))
+ """
+ % (method,)
+ )
+ result = testdir.runpython(p)
+ result.stdout.fnmatch_lines(
+ """
+ suspend, captured*hello1*
+ suspend2, captured*WARNING:root:hello3*
+ """
+ )
+ result.stderr.fnmatch_lines(
+ """
+ WARNING:root:hello2
+ """
+ )
+ assert "atexit" not in result.stderr.str()
+
+
+def test_error_attribute_issue555(testdir):
+ testdir.makepyfile(
+ """
+ import sys
+ def test_capattr():
+ assert sys.stdout.errors == "strict"
+ assert sys.stderr.errors == "strict"
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+@pytest.mark.skipif(
+ not sys.platform.startswith("win") and sys.version_info[:2] >= (3, 6),
+ reason="only py3.6+ on windows",
+)
+def test_py36_windowsconsoleio_workaround_non_standard_streams():
+ """
+ Ensure _py36_windowsconsoleio_workaround function works with objects that
+ do not implement the full ``io``-based stream protocol, for example execnet channels (#2666).
+ """
+ from _pytest.capture import _py36_windowsconsoleio_workaround
+
+ class DummyStream(object):
+
+ def write(self, s):
+ pass
+
+ stream = DummyStream()
+ _py36_windowsconsoleio_workaround(stream)
+
+
+def test_dontreadfrominput_has_encoding(testdir):
+ testdir.makepyfile(
+ """
+ import sys
+ def test_capattr():
+ # should not raise AttributeError
+ assert sys.stdout.encoding
+ assert sys.stderr.encoding
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+def test_crash_on_closing_tmpfile_py27(testdir):
+ testdir.makepyfile(
+ """
+ from __future__ import print_function
+ import time
+ import threading
+ import sys
+
+ def spam():
+ f = sys.stderr
+ while True:
+ print('.', end='', file=f)
+
+ def test_silly():
+ t = threading.Thread(target=spam)
+ t.daemon = True
+ t.start()
+ time.sleep(0.5)
+
+ """
+ )
+ result = testdir.runpytest_subprocess()
+ assert result.ret == 0
+ assert "IOError" not in result.stdout.str()
+
+
+def test_pickling_and_unpickling_encoded_file():
+ # See https://bitbucket.org/pytest-dev/pytest/pull-request/194
+ # pickle.loads() raises infinite recursion if
+ # EncodedFile.__getattr__ is not implemented properly
+ ef = capture.EncodedFile(None, None)
+ ef_as_str = pickle.dumps(ef)
+ pickle.loads(ef_as_str)
diff --git a/third_party/python/pytest/testing/test_collection.py b/third_party/python/pytest/testing/test_collection.py
new file mode 100644
index 0000000000..657d64c74e
--- /dev/null
+++ b/third_party/python/pytest/testing/test_collection.py
@@ -0,0 +1,944 @@
+from __future__ import absolute_import, division, print_function
+import pprint
+import sys
+import pytest
+
+import _pytest._code
+from _pytest.main import Session, EXIT_NOTESTSCOLLECTED, _in_venv
+
+
+class TestCollector(object):
+
+ def test_collect_versus_item(self):
+ from pytest import Collector, Item
+
+ assert not issubclass(Collector, Item)
+ assert not issubclass(Item, Collector)
+
+ def test_compat_attributes(self, testdir, recwarn):
+ modcol = testdir.getmodulecol(
+ """
+ def test_pass(): pass
+ def test_fail(): assert 0
+ """
+ )
+ recwarn.clear()
+ assert modcol.Module == pytest.Module
+ assert modcol.Class == pytest.Class
+ assert modcol.Item == pytest.Item
+ assert modcol.File == pytest.File
+ assert modcol.Function == pytest.Function
+
+ def test_check_equality(self, testdir):
+ modcol = testdir.getmodulecol(
+ """
+ def test_pass(): pass
+ def test_fail(): assert 0
+ """
+ )
+ fn1 = testdir.collect_by_name(modcol, "test_pass")
+ assert isinstance(fn1, pytest.Function)
+ fn2 = testdir.collect_by_name(modcol, "test_pass")
+ assert isinstance(fn2, pytest.Function)
+
+ assert fn1 == fn2
+ assert fn1 != modcol
+ if sys.version_info < (3, 0):
+ assert cmp(fn1, fn2) == 0 # NOQA
+ assert hash(fn1) == hash(fn2)
+
+ fn3 = testdir.collect_by_name(modcol, "test_fail")
+ assert isinstance(fn3, pytest.Function)
+ assert not (fn1 == fn3)
+ assert fn1 != fn3
+
+ for fn in fn1, fn2, fn3:
+ assert fn != 3
+ assert fn != modcol
+ assert fn != [1, 2, 3]
+ assert [1, 2, 3] != fn
+ assert modcol != fn
+
+ def test_getparent(self, testdir):
+ modcol = testdir.getmodulecol(
+ """
+ class TestClass(object):
+ def test_foo():
+ pass
+ """
+ )
+ cls = testdir.collect_by_name(modcol, "TestClass")
+ fn = testdir.collect_by_name(testdir.collect_by_name(cls, "()"), "test_foo")
+
+ parent = fn.getparent(pytest.Module)
+ assert parent is modcol
+
+ parent = fn.getparent(pytest.Function)
+ assert parent is fn
+
+ parent = fn.getparent(pytest.Class)
+ assert parent is cls
+
+ def test_getcustomfile_roundtrip(self, testdir):
+ hello = testdir.makefile(".xxx", hello="world")
+ testdir.makepyfile(
+ conftest="""
+ import pytest
+ class CustomFile(pytest.File):
+ pass
+ def pytest_collect_file(path, parent):
+ if path.ext == ".xxx":
+ return CustomFile(path, parent=parent)
+ """
+ )
+ node = testdir.getpathnode(hello)
+ assert isinstance(node, pytest.File)
+ assert node.name == "hello.xxx"
+ nodes = node.session.perform_collect([node.nodeid], genitems=False)
+ assert len(nodes) == 1
+ assert isinstance(nodes[0], pytest.File)
+
+ def test_can_skip_class_with_test_attr(self, testdir):
+ """Assure test class is skipped when using `__test__=False` (See #2007)."""
+ testdir.makepyfile(
+ """
+ class TestFoo(object):
+ __test__ = False
+ def __init__(self):
+ pass
+ def test_foo():
+ assert True
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["collected 0 items", "*no tests ran in*"])
+
+
+class TestCollectFS(object):
+
+ def test_ignored_certain_directories(self, testdir):
+ tmpdir = testdir.tmpdir
+ tmpdir.ensure("build", "test_notfound.py")
+ tmpdir.ensure("dist", "test_notfound.py")
+ tmpdir.ensure("_darcs", "test_notfound.py")
+ tmpdir.ensure("CVS", "test_notfound.py")
+ tmpdir.ensure("{arch}", "test_notfound.py")
+ tmpdir.ensure(".whatever", "test_notfound.py")
+ tmpdir.ensure(".bzr", "test_notfound.py")
+ tmpdir.ensure("normal", "test_found.py")
+ for x in tmpdir.visit("test_*.py"):
+ x.write("def test_hello(): pass")
+
+ result = testdir.runpytest("--collect-only")
+ s = result.stdout.str()
+ assert "test_notfound" not in s
+ assert "test_found" in s
+
+ @pytest.mark.parametrize(
+ "fname",
+ (
+ "activate",
+ "activate.csh",
+ "activate.fish",
+ "Activate",
+ "Activate.bat",
+ "Activate.ps1",
+ ),
+ )
+ def test_ignored_virtualenvs(self, testdir, fname):
+ bindir = "Scripts" if sys.platform.startswith("win") else "bin"
+ testdir.tmpdir.ensure("virtual", bindir, fname)
+ testfile = testdir.tmpdir.ensure("virtual", "test_invenv.py")
+ testfile.write("def test_hello(): pass")
+
+ # by default, ignore tests inside a virtualenv
+ result = testdir.runpytest()
+ assert "test_invenv" not in result.stdout.str()
+ # allow test collection if user insists
+ result = testdir.runpytest("--collect-in-virtualenv")
+ assert "test_invenv" in result.stdout.str()
+ # allow test collection if user directly passes in the directory
+ result = testdir.runpytest("virtual")
+ assert "test_invenv" in result.stdout.str()
+
+ @pytest.mark.parametrize(
+ "fname",
+ (
+ "activate",
+ "activate.csh",
+ "activate.fish",
+ "Activate",
+ "Activate.bat",
+ "Activate.ps1",
+ ),
+ )
+ def test_ignored_virtualenvs_norecursedirs_precedence(self, testdir, fname):
+ bindir = "Scripts" if sys.platform.startswith("win") else "bin"
+ # norecursedirs takes priority
+ testdir.tmpdir.ensure(".virtual", bindir, fname)
+ testfile = testdir.tmpdir.ensure(".virtual", "test_invenv.py")
+ testfile.write("def test_hello(): pass")
+ result = testdir.runpytest("--collect-in-virtualenv")
+ assert "test_invenv" not in result.stdout.str()
+ # ...unless the virtualenv is explicitly given on the CLI
+ result = testdir.runpytest("--collect-in-virtualenv", ".virtual")
+ assert "test_invenv" in result.stdout.str()
+
+ @pytest.mark.parametrize(
+ "fname",
+ (
+ "activate",
+ "activate.csh",
+ "activate.fish",
+ "Activate",
+ "Activate.bat",
+ "Activate.ps1",
+ ),
+ )
+ def test__in_venv(self, testdir, fname):
+ """Directly test the virtual env detection function"""
+ bindir = "Scripts" if sys.platform.startswith("win") else "bin"
+ # no bin/activate, not a virtualenv
+ base_path = testdir.tmpdir.mkdir("venv")
+ assert _in_venv(base_path) is False
+ # with bin/activate, totally a virtualenv
+ base_path.ensure(bindir, fname)
+ assert _in_venv(base_path) is True
+
+ def test_custom_norecursedirs(self, testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ norecursedirs = mydir xyz*
+ """
+ )
+ tmpdir = testdir.tmpdir
+ tmpdir.ensure("mydir", "test_hello.py").write("def test_1(): pass")
+ tmpdir.ensure("xyz123", "test_2.py").write("def test_2(): 0/0")
+ tmpdir.ensure("xy", "test_ok.py").write("def test_3(): pass")
+ rec = testdir.inline_run()
+ rec.assertoutcome(passed=1)
+ rec = testdir.inline_run("xyz123/test_2.py")
+ rec.assertoutcome(failed=1)
+
+ def test_testpaths_ini(self, testdir, monkeypatch):
+ testdir.makeini(
+ """
+ [pytest]
+ testpaths = gui uts
+ """
+ )
+ tmpdir = testdir.tmpdir
+ tmpdir.ensure("env", "test_1.py").write("def test_env(): pass")
+ tmpdir.ensure("gui", "test_2.py").write("def test_gui(): pass")
+ tmpdir.ensure("uts", "test_3.py").write("def test_uts(): pass")
+
+ # executing from rootdir only tests from `testpaths` directories
+ # are collected
+ items, reprec = testdir.inline_genitems("-v")
+ assert [x.name for x in items] == ["test_gui", "test_uts"]
+
+ # check that explicitly passing directories in the command-line
+ # collects the tests
+ for dirname in ("env", "gui", "uts"):
+ items, reprec = testdir.inline_genitems(tmpdir.join(dirname))
+ assert [x.name for x in items] == ["test_%s" % dirname]
+
+ # changing cwd to each subdirectory and running pytest without
+ # arguments collects the tests in that directory normally
+ for dirname in ("env", "gui", "uts"):
+ monkeypatch.chdir(testdir.tmpdir.join(dirname))
+ items, reprec = testdir.inline_genitems()
+ assert [x.name for x in items] == ["test_%s" % dirname]
+
+
+class TestCollectPluginHookRelay(object):
+
+ def test_pytest_collect_file(self, testdir):
+ wascalled = []
+
+ class Plugin(object):
+
+ def pytest_collect_file(self, path, parent):
+ if not path.basename.startswith("."):
+ # Ignore hidden files, e.g. .testmondata.
+ wascalled.append(path)
+
+ testdir.makefile(".abc", "xyz")
+ pytest.main([testdir.tmpdir], plugins=[Plugin()])
+ assert len(wascalled) == 1
+ assert wascalled[0].ext == ".abc"
+
+ def test_pytest_collect_directory(self, testdir):
+ wascalled = []
+
+ class Plugin(object):
+
+ def pytest_collect_directory(self, path, parent):
+ wascalled.append(path.basename)
+
+ testdir.mkdir("hello")
+ testdir.mkdir("world")
+ pytest.main(testdir.tmpdir, plugins=[Plugin()])
+ assert "hello" in wascalled
+ assert "world" in wascalled
+
+
+class TestPrunetraceback(object):
+
+ def test_custom_repr_failure(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import not_exists
+ """
+ )
+ testdir.makeconftest(
+ """
+ import pytest
+ def pytest_collect_file(path, parent):
+ return MyFile(path, parent)
+ class MyError(Exception):
+ pass
+ class MyFile(pytest.File):
+ def collect(self):
+ raise MyError()
+ def repr_failure(self, excinfo):
+ if excinfo.errisinstance(MyError):
+ return "hello world"
+ return pytest.File.repr_failure(self, excinfo)
+ """
+ )
+
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(["*ERROR collecting*", "*hello world*"])
+
+ @pytest.mark.xfail(reason="other mechanism for adding to reporting needed")
+ def test_collect_report_postprocessing(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import not_exists
+ """
+ )
+ testdir.makeconftest(
+ """
+ import pytest
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_make_collect_report():
+ outcome = yield
+ rep = outcome.get_result()
+ rep.headerlines += ["header1"]
+ outcome.force_result(rep)
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(["*ERROR collecting*", "*header1*"])
+
+
+class TestCustomConftests(object):
+
+ def test_ignore_collect_path(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_ignore_collect(path, config):
+ return path.basename.startswith("x") or \
+ path.basename == "test_one.py"
+ """
+ )
+ sub = testdir.mkdir("xy123")
+ sub.ensure("test_hello.py").write("syntax error")
+ sub.join("conftest.py").write("syntax error")
+ testdir.makepyfile("def test_hello(): pass")
+ testdir.makepyfile(test_one="syntax error")
+ result = testdir.runpytest("--fulltrace")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_ignore_collect_not_called_on_argument(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_ignore_collect(path, config):
+ return True
+ """
+ )
+ p = testdir.makepyfile("def test_hello(): pass")
+ result = testdir.runpytest(p)
+ assert result.ret == 0
+ result.stdout.fnmatch_lines("*1 passed*")
+ result = testdir.runpytest()
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+ result.stdout.fnmatch_lines("*collected 0 items*")
+
+ def test_collectignore_exclude_on_option(self, testdir):
+ testdir.makeconftest(
+ """
+ collect_ignore = ['hello', 'test_world.py']
+ def pytest_addoption(parser):
+ parser.addoption("--XX", action="store_true", default=False)
+ def pytest_configure(config):
+ if config.getvalue("XX"):
+ collect_ignore[:] = []
+ """
+ )
+ testdir.mkdir("hello")
+ testdir.makepyfile(test_world="def test_hello(): pass")
+ result = testdir.runpytest()
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+ assert "passed" not in result.stdout.str()
+ result = testdir.runpytest("--XX")
+ assert result.ret == 0
+ assert "passed" in result.stdout.str()
+
+ def test_pytest_fs_collect_hooks_are_seen(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ class MyModule(pytest.Module):
+ pass
+ def pytest_collect_file(path, parent):
+ if path.ext == ".py":
+ return MyModule(path, parent)
+ """
+ )
+ testdir.mkdir("sub")
+ testdir.makepyfile("def test_x(): pass")
+ result = testdir.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*MyModule*", "*test_x*"])
+
+ def test_pytest_collect_file_from_sister_dir(self, testdir):
+ sub1 = testdir.mkpydir("sub1")
+ sub2 = testdir.mkpydir("sub2")
+ conf1 = testdir.makeconftest(
+ """
+ import pytest
+ class MyModule1(pytest.Module):
+ pass
+ def pytest_collect_file(path, parent):
+ if path.ext == ".py":
+ return MyModule1(path, parent)
+ """
+ )
+ conf1.move(sub1.join(conf1.basename))
+ conf2 = testdir.makeconftest(
+ """
+ import pytest
+ class MyModule2(pytest.Module):
+ pass
+ def pytest_collect_file(path, parent):
+ if path.ext == ".py":
+ return MyModule2(path, parent)
+ """
+ )
+ conf2.move(sub2.join(conf2.basename))
+ p = testdir.makepyfile("def test_x(): pass")
+ p.copy(sub1.join(p.basename))
+ p.copy(sub2.join(p.basename))
+ result = testdir.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*MyModule1*", "*MyModule2*", "*test_x*"])
+
+
+class TestSession(object):
+
+ def test_parsearg(self, testdir):
+ p = testdir.makepyfile("def test_func(): pass")
+ subdir = testdir.mkdir("sub")
+ subdir.ensure("__init__.py")
+ target = subdir.join(p.basename)
+ p.move(target)
+ subdir.chdir()
+ config = testdir.parseconfig(p.basename)
+ rcol = Session(config=config)
+ assert rcol.fspath == subdir
+ parts = rcol._parsearg(p.basename)
+
+ assert parts[0] == target
+ assert len(parts) == 1
+ parts = rcol._parsearg(p.basename + "::test_func")
+ assert parts[0] == target
+ assert parts[1] == "test_func"
+ assert len(parts) == 2
+
+ def test_collect_topdir(self, testdir):
+ p = testdir.makepyfile("def test_func(): pass")
+ id = "::".join([p.basename, "test_func"])
+ # XXX migrate to collectonly? (see below)
+ config = testdir.parseconfig(id)
+ topdir = testdir.tmpdir
+ rcol = Session(config)
+ assert topdir == rcol.fspath
+ # rootid = rcol.nodeid
+ # root2 = rcol.perform_collect([rcol.nodeid], genitems=False)[0]
+ # assert root2 == rcol, rootid
+ colitems = rcol.perform_collect([rcol.nodeid], genitems=False)
+ assert len(colitems) == 1
+ assert colitems[0].fspath == p
+
+ def get_reported_items(self, hookrec):
+ """Return pytest.Item instances reported by the pytest_collectreport hook"""
+ calls = hookrec.getcalls("pytest_collectreport")
+ return [
+ x
+ for call in calls
+ for x in call.report.result
+ if isinstance(x, pytest.Item)
+ ]
+
+ def test_collect_protocol_single_function(self, testdir):
+ p = testdir.makepyfile("def test_func(): pass")
+ id = "::".join([p.basename, "test_func"])
+ items, hookrec = testdir.inline_genitems(id)
+ item, = items
+ assert item.name == "test_func"
+ newid = item.nodeid
+ assert newid == id
+ pprint.pprint(hookrec.calls)
+ topdir = testdir.tmpdir # noqa
+ hookrec.assert_contains(
+ [
+ ("pytest_collectstart", "collector.fspath == topdir"),
+ ("pytest_make_collect_report", "collector.fspath == topdir"),
+ ("pytest_collectstart", "collector.fspath == p"),
+ ("pytest_make_collect_report", "collector.fspath == p"),
+ ("pytest_pycollect_makeitem", "name == 'test_func'"),
+ ("pytest_collectreport", "report.result[0].name == 'test_func'"),
+ ]
+ )
+ # ensure we are reporting the collection of the single test item (#2464)
+ assert [x.name for x in self.get_reported_items(hookrec)] == ["test_func"]
+
+ def test_collect_protocol_method(self, testdir):
+ p = testdir.makepyfile(
+ """
+ class TestClass(object):
+ def test_method(self):
+ pass
+ """
+ )
+ normid = p.basename + "::TestClass::()::test_method"
+ for id in [
+ p.basename,
+ p.basename + "::TestClass",
+ p.basename + "::TestClass::()",
+ normid,
+ ]:
+ items, hookrec = testdir.inline_genitems(id)
+ assert len(items) == 1
+ assert items[0].name == "test_method"
+ newid = items[0].nodeid
+ assert newid == normid
+ # ensure we are reporting the collection of the single test item (#2464)
+ assert [x.name for x in self.get_reported_items(hookrec)] == ["test_method"]
+
+ def test_collect_custom_nodes_multi_id(self, testdir):
+ p = testdir.makepyfile("def test_func(): pass")
+ testdir.makeconftest(
+ """
+ import pytest
+ class SpecialItem(pytest.Item):
+ def runtest(self):
+ return # ok
+ class SpecialFile(pytest.File):
+ def collect(self):
+ return [SpecialItem(name="check", parent=self)]
+ def pytest_collect_file(path, parent):
+ if path.basename == %r:
+ return SpecialFile(fspath=path, parent=parent)
+ """
+ % p.basename
+ )
+ id = p.basename
+
+ items, hookrec = testdir.inline_genitems(id)
+ pprint.pprint(hookrec.calls)
+ assert len(items) == 2
+ hookrec.assert_contains(
+ [
+ ("pytest_collectstart", "collector.fspath == collector.session.fspath"),
+ (
+ "pytest_collectstart",
+ "collector.__class__.__name__ == 'SpecialFile'",
+ ),
+ ("pytest_collectstart", "collector.__class__.__name__ == 'Module'"),
+ ("pytest_pycollect_makeitem", "name == 'test_func'"),
+ ("pytest_collectreport", "report.nodeid.startswith(p.basename)"),
+ ]
+ )
+ assert len(self.get_reported_items(hookrec)) == 2
+
+ def test_collect_subdir_event_ordering(self, testdir):
+ p = testdir.makepyfile("def test_func(): pass")
+ aaa = testdir.mkpydir("aaa")
+ test_aaa = aaa.join("test_aaa.py")
+ p.move(test_aaa)
+
+ items, hookrec = testdir.inline_genitems()
+ assert len(items) == 1
+ pprint.pprint(hookrec.calls)
+ hookrec.assert_contains(
+ [
+ ("pytest_collectstart", "collector.fspath == test_aaa"),
+ ("pytest_pycollect_makeitem", "name == 'test_func'"),
+ ("pytest_collectreport", "report.nodeid.startswith('aaa/test_aaa.py')"),
+ ]
+ )
+
+ def test_collect_two_commandline_args(self, testdir):
+ p = testdir.makepyfile("def test_func(): pass")
+ aaa = testdir.mkpydir("aaa")
+ bbb = testdir.mkpydir("bbb")
+ test_aaa = aaa.join("test_aaa.py")
+ p.copy(test_aaa)
+ test_bbb = bbb.join("test_bbb.py")
+ p.move(test_bbb)
+
+ id = "."
+
+ items, hookrec = testdir.inline_genitems(id)
+ assert len(items) == 2
+ pprint.pprint(hookrec.calls)
+ hookrec.assert_contains(
+ [
+ ("pytest_collectstart", "collector.fspath == test_aaa"),
+ ("pytest_pycollect_makeitem", "name == 'test_func'"),
+ ("pytest_collectreport", "report.nodeid == 'aaa/test_aaa.py'"),
+ ("pytest_collectstart", "collector.fspath == test_bbb"),
+ ("pytest_pycollect_makeitem", "name == 'test_func'"),
+ ("pytest_collectreport", "report.nodeid == 'bbb/test_bbb.py'"),
+ ]
+ )
+
+ def test_serialization_byid(self, testdir):
+ testdir.makepyfile("def test_func(): pass")
+ items, hookrec = testdir.inline_genitems()
+ assert len(items) == 1
+ item, = items
+ items2, hookrec = testdir.inline_genitems(item.nodeid)
+ item2, = items2
+ assert item2.name == item.name
+ assert item2.fspath == item.fspath
+
+ def test_find_byid_without_instance_parents(self, testdir):
+ p = testdir.makepyfile(
+ """
+ class TestClass(object):
+ def test_method(self):
+ pass
+ """
+ )
+ arg = p.basename + "::TestClass::test_method"
+ items, hookrec = testdir.inline_genitems(arg)
+ assert len(items) == 1
+ item, = items
+ assert item.nodeid.endswith("TestClass::()::test_method")
+ # ensure we are reporting the collection of the single test item (#2464)
+ assert [x.name for x in self.get_reported_items(hookrec)] == ["test_method"]
+
+
+class Test_getinitialnodes(object):
+
+ def test_global_file(self, testdir, tmpdir):
+ x = tmpdir.ensure("x.py")
+ with tmpdir.as_cwd():
+ config = testdir.parseconfigure(x)
+ col = testdir.getnode(config, x)
+ assert isinstance(col, pytest.Module)
+ assert col.name == "x.py"
+ assert col.parent.parent is None
+ for col in col.listchain():
+ assert col.config is config
+
+ def test_pkgfile(self, testdir):
+ tmpdir = testdir.tmpdir
+ subdir = tmpdir.join("subdir")
+ x = subdir.ensure("x.py")
+ subdir.ensure("__init__.py")
+ with subdir.as_cwd():
+ config = testdir.parseconfigure(x)
+ col = testdir.getnode(config, x)
+ assert isinstance(col, pytest.Module)
+ assert col.name == "x.py"
+ assert col.parent.parent is None
+ for col in col.listchain():
+ assert col.config is config
+
+
+class Test_genitems(object):
+
+ def test_check_collect_hashes(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def test_1():
+ pass
+
+ def test_2():
+ pass
+ """
+ )
+ p.copy(p.dirpath(p.purebasename + "2" + ".py"))
+ items, reprec = testdir.inline_genitems(p.dirpath())
+ assert len(items) == 4
+ for numi, i in enumerate(items):
+ for numj, j in enumerate(items):
+ if numj != numi:
+ assert hash(i) != hash(j)
+ assert i != j
+
+ def test_example_items1(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def testone():
+ pass
+
+ class TestX(object):
+ def testmethod_one(self):
+ pass
+
+ class TestY(TestX):
+ pass
+ """
+ )
+ items, reprec = testdir.inline_genitems(p)
+ assert len(items) == 3
+ assert items[0].name == "testone"
+ assert items[1].name == "testmethod_one"
+ assert items[2].name == "testmethod_one"
+
+ # let's also test getmodpath here
+ assert items[0].getmodpath() == "testone"
+ assert items[1].getmodpath() == "TestX.testmethod_one"
+ assert items[2].getmodpath() == "TestY.testmethod_one"
+
+ s = items[0].getmodpath(stopatmodule=False)
+ assert s.endswith("test_example_items1.testone")
+ print(s)
+
+ def test_class_and_functions_discovery_using_glob(self, testdir):
+ """
+ tests that python_classes and python_functions config options work
+ as prefixes and glob-like patterns (issue #600).
+ """
+ testdir.makeini(
+ """
+ [pytest]
+ python_classes = *Suite Test
+ python_functions = *_test test
+ """
+ )
+ p = testdir.makepyfile(
+ """
+ class MyTestSuite(object):
+ def x_test(self):
+ pass
+
+ class TestCase(object):
+ def test_y(self):
+ pass
+ """
+ )
+ items, reprec = testdir.inline_genitems(p)
+ ids = [x.getmodpath() for x in items]
+ assert ids == ["MyTestSuite.x_test", "TestCase.test_y"]
+
+
+def test_matchnodes_two_collections_same_file(testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ def pytest_configure(config):
+ config.pluginmanager.register(Plugin2())
+
+ class Plugin2(object):
+ def pytest_collect_file(self, path, parent):
+ if path.ext == ".abc":
+ return MyFile2(path, parent)
+
+ def pytest_collect_file(path, parent):
+ if path.ext == ".abc":
+ return MyFile1(path, parent)
+
+ class MyFile1(pytest.Item, pytest.File):
+ def runtest(self):
+ pass
+ class MyFile2(pytest.File):
+ def collect(self):
+ return [Item2("hello", parent=self)]
+
+ class Item2(pytest.Item):
+ def runtest(self):
+ pass
+ """
+ )
+ p = testdir.makefile(".abc", "")
+ result = testdir.runpytest()
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*2 passed*"])
+ res = testdir.runpytest("%s::hello" % p.basename)
+ res.stdout.fnmatch_lines(["*1 passed*"])
+
+
+class TestNodekeywords(object):
+
+ def test_no_under(self, testdir):
+ modcol = testdir.getmodulecol(
+ """
+ def test_pass(): pass
+ def test_fail(): assert 0
+ """
+ )
+ values = list(modcol.keywords)
+ assert modcol.name in values
+ for x in values:
+ assert not x.startswith("_")
+ assert modcol.name in repr(modcol.keywords)
+
+ def test_issue345(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_should_not_be_selected():
+ assert False, 'I should not have been selected to run'
+
+ def test___repr__():
+ pass
+ """
+ )
+ reprec = testdir.inline_run("-k repr")
+ reprec.assertoutcome(passed=1, failed=0)
+
+
+COLLECTION_ERROR_PY_FILES = dict(
+ test_01_failure="""
+ def test_1():
+ assert False
+ """,
+ test_02_import_error="""
+ import asdfasdfasdf
+ def test_2():
+ assert True
+ """,
+ test_03_import_error="""
+ import asdfasdfasdf
+ def test_3():
+ assert True
+ """,
+ test_04_success="""
+ def test_4():
+ assert True
+ """,
+)
+
+
+def test_exit_on_collection_error(testdir):
+ """Verify that all collection errors are collected and no tests executed"""
+ testdir.makepyfile(**COLLECTION_ERROR_PY_FILES)
+
+ res = testdir.runpytest()
+ assert res.ret == 2
+
+ res.stdout.fnmatch_lines(
+ [
+ "collected 2 items / 2 errors",
+ "*ERROR collecting test_02_import_error.py*",
+ "*No module named *asdfa*",
+ "*ERROR collecting test_03_import_error.py*",
+ "*No module named *asdfa*",
+ ]
+ )
+
+
+def test_exit_on_collection_with_maxfail_smaller_than_n_errors(testdir):
+ """
+ Verify collection is aborted once maxfail errors are encountered ignoring
+ further modules which would cause more collection errors.
+ """
+ testdir.makepyfile(**COLLECTION_ERROR_PY_FILES)
+
+ res = testdir.runpytest("--maxfail=1")
+ assert res.ret == 1
+
+ res.stdout.fnmatch_lines(
+ ["*ERROR collecting test_02_import_error.py*", "*No module named *asdfa*"]
+ )
+
+ assert "test_03" not in res.stdout.str()
+
+
+def test_exit_on_collection_with_maxfail_bigger_than_n_errors(testdir):
+ """
+ Verify the test run aborts due to collection errors even if maxfail count of
+ errors was not reached.
+ """
+ testdir.makepyfile(**COLLECTION_ERROR_PY_FILES)
+
+ res = testdir.runpytest("--maxfail=4")
+ assert res.ret == 2
+
+ res.stdout.fnmatch_lines(
+ [
+ "collected 2 items / 2 errors",
+ "*ERROR collecting test_02_import_error.py*",
+ "*No module named *asdfa*",
+ "*ERROR collecting test_03_import_error.py*",
+ "*No module named *asdfa*",
+ ]
+ )
+
+
+def test_continue_on_collection_errors(testdir):
+ """
+ Verify tests are executed even when collection errors occur when the
+ --continue-on-collection-errors flag is set
+ """
+ testdir.makepyfile(**COLLECTION_ERROR_PY_FILES)
+
+ res = testdir.runpytest("--continue-on-collection-errors")
+ assert res.ret == 1
+
+ res.stdout.fnmatch_lines(
+ ["collected 2 items / 2 errors", "*1 failed, 1 passed, 2 error*"]
+ )
+
+
+def test_continue_on_collection_errors_maxfail(testdir):
+ """
+ Verify tests are executed even when collection errors occur and that maxfail
+ is honoured (including the collection error count).
+ 4 tests: 2 collection errors + 1 failure + 1 success
+ test_4 is never executed because the test run is with --maxfail=3 which
+ means it is interrupted after the 2 collection errors + 1 failure.
+ """
+ testdir.makepyfile(**COLLECTION_ERROR_PY_FILES)
+
+ res = testdir.runpytest("--continue-on-collection-errors", "--maxfail=3")
+ assert res.ret == 1
+
+ res.stdout.fnmatch_lines(["collected 2 items / 2 errors", "*1 failed, 2 error*"])
+
+
+def test_fixture_scope_sibling_conftests(testdir):
+ """Regression test case for https://github.com/pytest-dev/pytest/issues/2836"""
+ foo_path = testdir.mkpydir("foo")
+ foo_path.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ import pytest
+ @pytest.fixture
+ def fix():
+ return 1
+ """
+ )
+ )
+ foo_path.join("test_foo.py").write("def test_foo(fix): assert fix == 1")
+
+ # Tests in `food/` should not see the conftest fixture from `foo/`
+ food_path = testdir.mkpydir("food")
+ food_path.join("test_food.py").write("def test_food(fix): assert fix == 1")
+
+ res = testdir.runpytest()
+ assert res.ret == 1
+
+ res.stdout.fnmatch_lines(
+ [
+ "*ERROR at setup of test_food*",
+ "E*fixture 'fix' not found",
+ "*1 passed, 1 error*",
+ ]
+ )
diff --git a/third_party/python/pytest/testing/test_compat.py b/third_party/python/pytest/testing/test_compat.py
new file mode 100644
index 0000000000..550a8f1b35
--- /dev/null
+++ b/third_party/python/pytest/testing/test_compat.py
@@ -0,0 +1,110 @@
+from __future__ import absolute_import, division, print_function
+import sys
+
+import pytest
+from _pytest.compat import is_generator, get_real_func, safe_getattr
+from _pytest.outcomes import OutcomeException
+
+
+def test_is_generator():
+
+ def zap():
+ yield
+
+ def foo():
+ pass
+
+ assert is_generator(zap)
+ assert not is_generator(foo)
+
+
+def test_real_func_loop_limit():
+
+ class Evil(object):
+
+ def __init__(self):
+ self.left = 1000
+
+ def __repr__(self):
+ return "<Evil left={left}>".format(left=self.left)
+
+ def __getattr__(self, attr):
+ if not self.left:
+ raise RuntimeError("its over")
+ self.left -= 1
+ return self
+
+ evil = Evil()
+
+ with pytest.raises(ValueError):
+ res = get_real_func(evil)
+ print(res)
+
+
+@pytest.mark.skipif(
+ sys.version_info < (3, 4), reason="asyncio available in Python 3.4+"
+)
+def test_is_generator_asyncio(testdir):
+ testdir.makepyfile(
+ """
+ from _pytest.compat import is_generator
+ import asyncio
+ @asyncio.coroutine
+ def baz():
+ yield from [1,2,3]
+
+ def test_is_generator_asyncio():
+ assert not is_generator(baz)
+ """
+ )
+ # avoid importing asyncio into pytest's own process,
+ # which in turn imports logging (#8)
+ result = testdir.runpytest_subprocess()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+@pytest.mark.skipif(
+ sys.version_info < (3, 5), reason="async syntax available in Python 3.5+"
+)
+def test_is_generator_async_syntax(testdir):
+ testdir.makepyfile(
+ """
+ from _pytest.compat import is_generator
+ def test_is_generator_py35():
+ async def foo():
+ await foo()
+
+ async def bar():
+ pass
+
+ assert not is_generator(foo)
+ assert not is_generator(bar)
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+class ErrorsHelper(object):
+
+ @property
+ def raise_exception(self):
+ raise Exception("exception should be catched")
+
+ @property
+ def raise_fail(self):
+ pytest.fail("fail should be catched")
+
+
+def test_helper_failures():
+ helper = ErrorsHelper()
+ with pytest.raises(Exception):
+ helper.raise_exception
+ with pytest.raises(OutcomeException):
+ helper.raise_fail
+
+
+def test_safe_getattr():
+ helper = ErrorsHelper()
+ assert safe_getattr(helper, "raise_exception", "default") == "default"
+ assert safe_getattr(helper, "raise_fail", "default") == "default"
diff --git a/third_party/python/pytest/testing/test_config.py b/third_party/python/pytest/testing/test_config.py
new file mode 100644
index 0000000000..5fb048364f
--- /dev/null
+++ b/third_party/python/pytest/testing/test_config.py
@@ -0,0 +1,1068 @@
+from __future__ import absolute_import, division, print_function
+import sys
+import textwrap
+import pytest
+
+import _pytest._code
+from _pytest.config.findpaths import getcfg, get_common_ancestor, determine_setup
+from _pytest.config import _iter_rewritable_modules
+from _pytest.main import EXIT_NOTESTSCOLLECTED
+
+
+class TestParseIni(object):
+
+ @pytest.mark.parametrize(
+ "section, filename", [("pytest", "pytest.ini"), ("tool:pytest", "setup.cfg")]
+ )
+ def test_getcfg_and_config(self, testdir, tmpdir, section, filename):
+ sub = tmpdir.mkdir("sub")
+ sub.chdir()
+ tmpdir.join(filename).write(
+ _pytest._code.Source(
+ """
+ [{section}]
+ name = value
+ """.format(
+ section=section
+ )
+ )
+ )
+ rootdir, inifile, cfg = getcfg([sub])
+ assert cfg["name"] == "value"
+ config = testdir.parseconfigure(sub)
+ assert config.inicfg["name"] == "value"
+
+ def test_getcfg_empty_path(self):
+ """correctly handle zero length arguments (a la pytest '')"""
+ getcfg([""])
+
+ def test_append_parse_args(self, testdir, tmpdir, monkeypatch):
+ monkeypatch.setenv("PYTEST_ADDOPTS", '--color no -rs --tb="short"')
+ tmpdir.join("pytest.ini").write(
+ _pytest._code.Source(
+ """
+ [pytest]
+ addopts = --verbose
+ """
+ )
+ )
+ config = testdir.parseconfig(tmpdir)
+ assert config.option.color == "no"
+ assert config.option.reportchars == "s"
+ assert config.option.tbstyle == "short"
+ assert config.option.verbose
+
+ def test_tox_ini_wrong_version(self, testdir):
+ testdir.makefile(
+ ".ini",
+ tox="""
+ [pytest]
+ minversion=9.0
+ """,
+ )
+ result = testdir.runpytest()
+ assert result.ret != 0
+ result.stderr.fnmatch_lines(["*tox.ini:2*requires*9.0*actual*"])
+
+ @pytest.mark.parametrize(
+ "section, name",
+ [("tool:pytest", "setup.cfg"), ("pytest", "tox.ini"), ("pytest", "pytest.ini")],
+ )
+ def test_ini_names(self, testdir, name, section):
+ testdir.tmpdir.join(name).write(
+ textwrap.dedent(
+ """
+ [{section}]
+ minversion = 1.0
+ """.format(
+ section=section
+ )
+ )
+ )
+ config = testdir.parseconfig()
+ assert config.getini("minversion") == "1.0"
+
+ def test_toxini_before_lower_pytestini(self, testdir):
+ sub = testdir.tmpdir.mkdir("sub")
+ sub.join("tox.ini").write(
+ textwrap.dedent(
+ """
+ [pytest]
+ minversion = 2.0
+ """
+ )
+ )
+ testdir.tmpdir.join("pytest.ini").write(
+ textwrap.dedent(
+ """
+ [pytest]
+ minversion = 1.5
+ """
+ )
+ )
+ config = testdir.parseconfigure(sub)
+ assert config.getini("minversion") == "2.0"
+
+ @pytest.mark.xfail(reason="probably not needed")
+ def test_confcutdir(self, testdir):
+ sub = testdir.mkdir("sub")
+ sub.chdir()
+ testdir.makeini(
+ """
+ [pytest]
+ addopts = --qwe
+ """
+ )
+ result = testdir.inline_run("--confcutdir=.")
+ assert result.ret == 0
+
+
+class TestConfigCmdlineParsing(object):
+
+ def test_parsing_again_fails(self, testdir):
+ config = testdir.parseconfig()
+ pytest.raises(AssertionError, lambda: config.parse([]))
+
+ def test_explicitly_specified_config_file_is_loaded(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("custom", "")
+ """
+ )
+ testdir.makeini(
+ """
+ [pytest]
+ custom = 0
+ """
+ )
+ testdir.makefile(
+ ".cfg",
+ custom="""
+ [pytest]
+ custom = 1
+ """,
+ )
+ config = testdir.parseconfig("-c", "custom.cfg")
+ assert config.getini("custom") == "1"
+
+ testdir.makefile(
+ ".cfg",
+ custom_tool_pytest_section="""
+ [tool:pytest]
+ custom = 1
+ """,
+ )
+ config = testdir.parseconfig("-c", "custom_tool_pytest_section.cfg")
+ assert config.getini("custom") == "1"
+
+ def test_absolute_win32_path(self, testdir):
+ temp_cfg_file = testdir.makefile(
+ ".cfg",
+ custom="""
+ [pytest]
+ addopts = --version
+ """,
+ )
+ from os.path import normpath
+
+ temp_cfg_file = normpath(str(temp_cfg_file))
+ ret = pytest.main("-c " + temp_cfg_file)
+ assert ret == _pytest.main.EXIT_OK
+
+
+class TestConfigAPI(object):
+
+ def test_config_trace(self, testdir):
+ config = testdir.parseconfig()
+ values = []
+ config.trace.root.setwriter(values.append)
+ config.trace("hello")
+ assert len(values) == 1
+ assert values[0] == "hello [config]\n"
+
+ def test_config_getoption(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addoption("--hello", "-X", dest="hello")
+ """
+ )
+ config = testdir.parseconfig("--hello=this")
+ for x in ("hello", "--hello", "-X"):
+ assert config.getoption(x) == "this"
+ pytest.raises(ValueError, "config.getoption('qweqwe')")
+
+ @pytest.mark.skipif("sys.version_info[0] < 3")
+ def test_config_getoption_unicode(self, testdir):
+ testdir.makeconftest(
+ """
+ from __future__ import unicode_literals
+
+ def pytest_addoption(parser):
+ parser.addoption('--hello', type=str)
+ """
+ )
+ config = testdir.parseconfig("--hello=this")
+ assert config.getoption("hello") == "this"
+
+ def test_config_getvalueorskip(self, testdir):
+ config = testdir.parseconfig()
+ pytest.raises(pytest.skip.Exception, "config.getvalueorskip('hello')")
+ verbose = config.getvalueorskip("verbose")
+ assert verbose == config.option.verbose
+
+ def test_config_getvalueorskip_None(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addoption("--hello")
+ """
+ )
+ config = testdir.parseconfig()
+ with pytest.raises(pytest.skip.Exception):
+ config.getvalueorskip("hello")
+
+ def test_getoption(self, testdir):
+ config = testdir.parseconfig()
+ with pytest.raises(ValueError):
+ config.getvalue("x")
+ assert config.getoption("x", 1) == 1
+
+ def test_getconftest_pathlist(self, testdir, tmpdir):
+ somepath = tmpdir.join("x", "y", "z")
+ p = tmpdir.join("conftest.py")
+ p.write("pathlist = ['.', %r]" % str(somepath))
+ config = testdir.parseconfigure(p)
+ assert config._getconftest_pathlist("notexist", path=tmpdir) is None
+ pl = config._getconftest_pathlist("pathlist", path=tmpdir)
+ print(pl)
+ assert len(pl) == 2
+ assert pl[0] == tmpdir
+ assert pl[1] == somepath
+
+ def test_addini(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("myname", "my new ini value")
+ """
+ )
+ testdir.makeini(
+ """
+ [pytest]
+ myname=hello
+ """
+ )
+ config = testdir.parseconfig()
+ val = config.getini("myname")
+ assert val == "hello"
+ pytest.raises(ValueError, config.getini, "other")
+
+ def test_addini_pathlist(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("paths", "my new ini value", type="pathlist")
+ parser.addini("abc", "abc value")
+ """
+ )
+ p = testdir.makeini(
+ """
+ [pytest]
+ paths=hello world/sub.py
+ """
+ )
+ config = testdir.parseconfig()
+ values = config.getini("paths")
+ assert len(values) == 2
+ assert values[0] == p.dirpath("hello")
+ assert values[1] == p.dirpath("world/sub.py")
+ pytest.raises(ValueError, config.getini, "other")
+
+ def test_addini_args(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("args", "new args", type="args")
+ parser.addini("a2", "", "args", default="1 2 3".split())
+ """
+ )
+ testdir.makeini(
+ """
+ [pytest]
+ args=123 "123 hello" "this"
+ """
+ )
+ config = testdir.parseconfig()
+ values = config.getini("args")
+ assert len(values) == 3
+ assert values == ["123", "123 hello", "this"]
+ values = config.getini("a2")
+ assert values == list("123")
+
+ def test_addini_linelist(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("xy", "", type="linelist")
+ parser.addini("a2", "", "linelist")
+ """
+ )
+ testdir.makeini(
+ """
+ [pytest]
+ xy= 123 345
+ second line
+ """
+ )
+ config = testdir.parseconfig()
+ values = config.getini("xy")
+ assert len(values) == 2
+ assert values == ["123 345", "second line"]
+ values = config.getini("a2")
+ assert values == []
+
+ @pytest.mark.parametrize(
+ "str_val, bool_val", [("True", True), ("no", False), ("no-ini", True)]
+ )
+ def test_addini_bool(self, testdir, str_val, bool_val):
+ testdir.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("strip", "", type="bool", default=True)
+ """
+ )
+ if str_val != "no-ini":
+ testdir.makeini(
+ """
+ [pytest]
+ strip=%s
+ """
+ % str_val
+ )
+ config = testdir.parseconfig()
+ assert config.getini("strip") is bool_val
+
+ def test_addinivalue_line_existing(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("xy", "", type="linelist")
+ """
+ )
+ testdir.makeini(
+ """
+ [pytest]
+ xy= 123
+ """
+ )
+ config = testdir.parseconfig()
+ values = config.getini("xy")
+ assert len(values) == 1
+ assert values == ["123"]
+ config.addinivalue_line("xy", "456")
+ values = config.getini("xy")
+ assert len(values) == 2
+ assert values == ["123", "456"]
+
+ def test_addinivalue_line_new(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("xy", "", type="linelist")
+ """
+ )
+ config = testdir.parseconfig()
+ assert not config.getini("xy")
+ config.addinivalue_line("xy", "456")
+ values = config.getini("xy")
+ assert len(values) == 1
+ assert values == ["456"]
+ config.addinivalue_line("xy", "123")
+ values = config.getini("xy")
+ assert len(values) == 2
+ assert values == ["456", "123"]
+
+ def test_confcutdir_check_isdir(self, testdir):
+ """Give an error if --confcutdir is not a valid directory (#2078)"""
+ with pytest.raises(pytest.UsageError):
+ testdir.parseconfig(
+ "--confcutdir", testdir.tmpdir.join("file").ensure(file=1)
+ )
+ with pytest.raises(pytest.UsageError):
+ testdir.parseconfig("--confcutdir", testdir.tmpdir.join("inexistant"))
+ config = testdir.parseconfig(
+ "--confcutdir", testdir.tmpdir.join("dir").ensure(dir=1)
+ )
+ assert config.getoption("confcutdir") == str(testdir.tmpdir.join("dir"))
+
+ @pytest.mark.parametrize(
+ "names, expected",
+ [
+ (["bar.py"], ["bar"]),
+ (["foo", "bar.py"], []),
+ (["foo", "bar.pyc"], []),
+ (["foo", "__init__.py"], ["foo"]),
+ (["foo", "bar", "__init__.py"], []),
+ ],
+ )
+ def test_iter_rewritable_modules(self, names, expected):
+ assert list(_iter_rewritable_modules(["/".join(names)])) == expected
+
+
+class TestConfigFromdictargs(object):
+
+ def test_basic_behavior(self):
+ from _pytest.config import Config
+
+ option_dict = {"verbose": 444, "foo": "bar", "capture": "no"}
+ args = ["a", "b"]
+
+ config = Config.fromdictargs(option_dict, args)
+ with pytest.raises(AssertionError):
+ config.parse(["should refuse to parse again"])
+ assert config.option.verbose == 444
+ assert config.option.foo == "bar"
+ assert config.option.capture == "no"
+ assert config.args == args
+
+ def test_origargs(self):
+ """Show that fromdictargs can handle args in their "orig" format"""
+ from _pytest.config import Config
+
+ option_dict = {}
+ args = ["-vvvv", "-s", "a", "b"]
+
+ config = Config.fromdictargs(option_dict, args)
+ assert config.args == ["a", "b"]
+ assert config._origargs == args
+ assert config.option.verbose == 4
+ assert config.option.capture == "no"
+
+ def test_inifilename(self, tmpdir):
+ tmpdir.join("foo/bar.ini").ensure().write(
+ _pytest._code.Source(
+ """
+ [pytest]
+ name = value
+ """
+ )
+ )
+
+ from _pytest.config import Config
+
+ inifile = "../../foo/bar.ini"
+ option_dict = {"inifilename": inifile, "capture": "no"}
+
+ cwd = tmpdir.join("a/b")
+ cwd.join("pytest.ini").ensure().write(
+ _pytest._code.Source(
+ """
+ [pytest]
+ name = wrong-value
+ should_not_be_set = true
+ """
+ )
+ )
+ with cwd.ensure(dir=True).as_cwd():
+ config = Config.fromdictargs(option_dict, ())
+
+ assert config.args == [str(cwd)]
+ assert config.option.inifilename == inifile
+ assert config.option.capture == "no"
+
+ # this indicates this is the file used for getting configuration values
+ assert config.inifile == inifile
+ assert config.inicfg.get("name") == "value"
+ assert config.inicfg.get("should_not_be_set") is None
+
+
+def test_options_on_small_file_do_not_blow_up(testdir):
+
+ def runfiletest(opts):
+ reprec = testdir.inline_run(*opts)
+ passed, skipped, failed = reprec.countoutcomes()
+ assert failed == 2
+ assert skipped == passed == 0
+
+ path = testdir.makepyfile(
+ """
+ def test_f1(): assert 0
+ def test_f2(): assert 0
+ """
+ )
+
+ for opts in (
+ [],
+ ["-l"],
+ ["-s"],
+ ["--tb=no"],
+ ["--tb=short"],
+ ["--tb=long"],
+ ["--fulltrace"],
+ ["--traceconfig"],
+ ["-v"],
+ ["-v", "-v"],
+ ):
+ runfiletest(opts + [path])
+
+
+def test_preparse_ordering_with_setuptools(testdir, monkeypatch):
+ pkg_resources = pytest.importorskip("pkg_resources")
+
+ def my_iter(name):
+ assert name == "pytest11"
+
+ class Dist(object):
+ project_name = "spam"
+ version = "1.0"
+
+ def _get_metadata(self, name):
+ return ["foo.txt,sha256=abc,123"]
+
+ class EntryPoint(object):
+ name = "mytestplugin"
+ dist = Dist()
+
+ def load(self):
+
+ class PseudoPlugin(object):
+ x = 42
+
+ return PseudoPlugin()
+
+ return iter([EntryPoint()])
+
+ monkeypatch.setattr(pkg_resources, "iter_entry_points", my_iter)
+ testdir.makeconftest(
+ """
+ pytest_plugins = "mytestplugin",
+ """
+ )
+ monkeypatch.setenv("PYTEST_PLUGINS", "mytestplugin")
+ config = testdir.parseconfig()
+ plugin = config.pluginmanager.getplugin("mytestplugin")
+ assert plugin.x == 42
+
+
+def test_setuptools_importerror_issue1479(testdir, monkeypatch):
+ pkg_resources = pytest.importorskip("pkg_resources")
+
+ def my_iter(name):
+ assert name == "pytest11"
+
+ class Dist(object):
+ project_name = "spam"
+ version = "1.0"
+
+ def _get_metadata(self, name):
+ return ["foo.txt,sha256=abc,123"]
+
+ class EntryPoint(object):
+ name = "mytestplugin"
+ dist = Dist()
+
+ def load(self):
+ raise ImportError("Don't hide me!")
+
+ return iter([EntryPoint()])
+
+ monkeypatch.setattr(pkg_resources, "iter_entry_points", my_iter)
+ with pytest.raises(ImportError):
+ testdir.parseconfig()
+
+
+@pytest.mark.parametrize("block_it", [True, False])
+def test_plugin_preparse_prevents_setuptools_loading(testdir, monkeypatch, block_it):
+ pkg_resources = pytest.importorskip("pkg_resources")
+
+ plugin_module_placeholder = object()
+
+ def my_iter(name):
+ assert name == "pytest11"
+
+ class Dist(object):
+ project_name = "spam"
+ version = "1.0"
+
+ def _get_metadata(self, name):
+ return ["foo.txt,sha256=abc,123"]
+
+ class EntryPoint(object):
+ name = "mytestplugin"
+ dist = Dist()
+
+ def load(self):
+ return plugin_module_placeholder
+
+ return iter([EntryPoint()])
+
+ monkeypatch.setattr(pkg_resources, "iter_entry_points", my_iter)
+ args = ("-p", "no:mytestplugin") if block_it else ()
+ config = testdir.parseconfig(*args)
+ config.pluginmanager.import_plugin("mytestplugin")
+ if block_it:
+ assert "mytestplugin" not in sys.modules
+ assert config.pluginmanager.get_plugin("mytestplugin") is None
+ else:
+ assert config.pluginmanager.get_plugin(
+ "mytestplugin"
+ ) is plugin_module_placeholder
+
+
+def test_cmdline_processargs_simple(testdir):
+ testdir.makeconftest(
+ """
+ def pytest_cmdline_preparse(args):
+ args.append("-h")
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*pytest*", "*-h*"])
+
+
+def test_invalid_options_show_extra_information(testdir):
+ """display extra information when pytest exits due to unrecognized
+ options in the command-line"""
+ testdir.makeini(
+ """
+ [pytest]
+ addopts = --invalid-option
+ """
+ )
+ result = testdir.runpytest()
+ result.stderr.fnmatch_lines(
+ [
+ "*error: unrecognized arguments: --invalid-option*",
+ "* inifile: %s*" % testdir.tmpdir.join("tox.ini"),
+ "* rootdir: %s*" % testdir.tmpdir,
+ ]
+ )
+
+
+@pytest.mark.parametrize(
+ "args",
+ [
+ ["dir1", "dir2", "-v"],
+ ["dir1", "-v", "dir2"],
+ ["dir2", "-v", "dir1"],
+ ["-v", "dir2", "dir1"],
+ ],
+)
+def test_consider_args_after_options_for_rootdir_and_inifile(testdir, args):
+ """
+ Consider all arguments in the command-line for rootdir and inifile
+ discovery, even if they happen to occur after an option. #949
+ """
+ # replace "dir1" and "dir2" from "args" into their real directory
+ root = testdir.tmpdir.mkdir("myroot")
+ d1 = root.mkdir("dir1")
+ d2 = root.mkdir("dir2")
+ for i, arg in enumerate(args):
+ if arg == "dir1":
+ args[i] = d1
+ elif arg == "dir2":
+ args[i] = d2
+ with root.as_cwd():
+ result = testdir.runpytest(*args)
+ result.stdout.fnmatch_lines(["*rootdir: *myroot, inifile:"])
+
+
+@pytest.mark.skipif("sys.platform == 'win32'")
+def test_toolongargs_issue224(testdir):
+ result = testdir.runpytest("-m", "hello" * 500)
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+
+
+def test_config_in_subdirectory_colon_command_line_issue2148(testdir):
+ conftest_source = """
+ def pytest_addoption(parser):
+ parser.addini('foo', 'foo')
+ """
+
+ testdir.makefile(
+ ".ini",
+ **{"pytest": "[pytest]\nfoo = root", "subdir/pytest": "[pytest]\nfoo = subdir"}
+ )
+
+ testdir.makepyfile(
+ **{
+ "conftest": conftest_source,
+ "subdir/conftest": conftest_source,
+ "subdir/test_foo": """
+ def test_foo(pytestconfig):
+ assert pytestconfig.getini('foo') == 'subdir'
+ """,
+ }
+ )
+
+ result = testdir.runpytest("subdir/test_foo.py::test_foo")
+ assert result.ret == 0
+
+
+def test_notify_exception(testdir, capfd):
+ config = testdir.parseconfig()
+ excinfo = pytest.raises(ValueError, "raise ValueError(1)")
+ config.notify_exception(excinfo)
+ out, err = capfd.readouterr()
+ assert "ValueError" in err
+
+ class A(object):
+
+ def pytest_internalerror(self, excrepr):
+ return True
+
+ config.pluginmanager.register(A())
+ config.notify_exception(excinfo)
+ out, err = capfd.readouterr()
+ assert not err
+
+
+def test_load_initial_conftest_last_ordering(testdir):
+ from _pytest.config import get_config
+
+ pm = get_config().pluginmanager
+
+ class My(object):
+
+ def pytest_load_initial_conftests(self):
+ pass
+
+ m = My()
+ pm.register(m)
+ hc = pm.hook.pytest_load_initial_conftests
+ values = hc._nonwrappers + hc._wrappers
+ expected = ["_pytest.config", "test_config", "_pytest.capture"]
+ assert [x.function.__module__ for x in values] == expected
+
+
+def test_get_plugin_specs_as_list():
+ from _pytest.config import _get_plugin_specs_as_list
+
+ with pytest.raises(pytest.UsageError):
+ _get_plugin_specs_as_list({"foo"})
+ with pytest.raises(pytest.UsageError):
+ _get_plugin_specs_as_list(dict())
+
+ assert _get_plugin_specs_as_list(None) == []
+ assert _get_plugin_specs_as_list("") == []
+ assert _get_plugin_specs_as_list("foo") == ["foo"]
+ assert _get_plugin_specs_as_list("foo,bar") == ["foo", "bar"]
+ assert _get_plugin_specs_as_list(["foo", "bar"]) == ["foo", "bar"]
+ assert _get_plugin_specs_as_list(("foo", "bar")) == ["foo", "bar"]
+
+
+class TestWarning(object):
+
+ def test_warn_config(self, testdir):
+ testdir.makeconftest(
+ """
+ values = []
+ def pytest_configure(config):
+ config.warn("C1", "hello")
+ def pytest_logwarning(code, message):
+ if message == "hello" and code == "C1":
+ values.append(1)
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_proper(pytestconfig):
+ import conftest
+ assert conftest.values == [1]
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_warn_on_test_item_from_request(self, testdir, request):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def fix(request):
+ request.node.warn("T1", "hello")
+
+ def test_hello(fix):
+ pass
+ """
+ )
+ result = testdir.runpytest("--disable-pytest-warnings")
+ assert result.parseoutcomes()["warnings"] > 0
+ assert "hello" not in result.stdout.str()
+
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ ===*warnings summary*===
+ *test_warn_on_test_item_from_request.py::test_hello*
+ *hello*
+ """
+ )
+
+
+class TestRootdir(object):
+
+ def test_simple_noini(self, tmpdir):
+ assert get_common_ancestor([tmpdir]) == tmpdir
+ a = tmpdir.mkdir("a")
+ assert get_common_ancestor([a, tmpdir]) == tmpdir
+ assert get_common_ancestor([tmpdir, a]) == tmpdir
+ with tmpdir.as_cwd():
+ assert get_common_ancestor([]) == tmpdir
+ no_path = tmpdir.join("does-not-exist")
+ assert get_common_ancestor([no_path]) == tmpdir
+ assert get_common_ancestor([no_path.join("a")]) == tmpdir
+
+ @pytest.mark.parametrize("name", "setup.cfg tox.ini pytest.ini".split())
+ def test_with_ini(self, tmpdir, name):
+ inifile = tmpdir.join(name)
+ inifile.write("[pytest]\n")
+
+ a = tmpdir.mkdir("a")
+ b = a.mkdir("b")
+ for args in ([tmpdir], [a], [b]):
+ rootdir, inifile, inicfg = determine_setup(None, args)
+ assert rootdir == tmpdir
+ assert inifile == inifile
+ rootdir, inifile, inicfg = determine_setup(None, [b, a])
+ assert rootdir == tmpdir
+ assert inifile == inifile
+
+ @pytest.mark.parametrize("name", "setup.cfg tox.ini".split())
+ def test_pytestini_overides_empty_other(self, tmpdir, name):
+ inifile = tmpdir.ensure("pytest.ini")
+ a = tmpdir.mkdir("a")
+ a.ensure(name)
+ rootdir, inifile, inicfg = determine_setup(None, [a])
+ assert rootdir == tmpdir
+ assert inifile == inifile
+
+ def test_setuppy_fallback(self, tmpdir):
+ a = tmpdir.mkdir("a")
+ a.ensure("setup.cfg")
+ tmpdir.ensure("setup.py")
+ rootdir, inifile, inicfg = determine_setup(None, [a])
+ assert rootdir == tmpdir
+ assert inifile is None
+ assert inicfg == {}
+
+ def test_nothing(self, tmpdir, monkeypatch):
+ monkeypatch.chdir(str(tmpdir))
+ rootdir, inifile, inicfg = determine_setup(None, [tmpdir])
+ assert rootdir == tmpdir
+ assert inifile is None
+ assert inicfg == {}
+
+ def test_with_specific_inifile(self, tmpdir):
+ inifile = tmpdir.ensure("pytest.ini")
+ rootdir, inifile, inicfg = determine_setup(inifile, [tmpdir])
+ assert rootdir == tmpdir
+
+
+class TestOverrideIniArgs(object):
+
+ @pytest.mark.parametrize("name", "setup.cfg tox.ini pytest.ini".split())
+ def test_override_ini_names(self, testdir, name):
+ testdir.tmpdir.join(name).write(
+ textwrap.dedent(
+ """
+ [pytest]
+ custom = 1.0"""
+ )
+ )
+ testdir.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("custom", "")"""
+ )
+ testdir.makepyfile(
+ """
+ def test_pass(pytestconfig):
+ ini_val = pytestconfig.getini("custom")
+ print('\\ncustom_option:%s\\n' % ini_val)"""
+ )
+
+ result = testdir.runpytest("--override-ini", "custom=2.0", "-s")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["custom_option:2.0"])
+
+ result = testdir.runpytest(
+ "--override-ini", "custom=2.0", "--override-ini=custom=3.0", "-s"
+ )
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["custom_option:3.0"])
+
+ def test_override_ini_pathlist(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("paths", "my new ini value", type="pathlist")"""
+ )
+ testdir.makeini(
+ """
+ [pytest]
+ paths=blah.py"""
+ )
+ testdir.makepyfile(
+ """
+ import py.path
+ def test_pathlist(pytestconfig):
+ config_paths = pytestconfig.getini("paths")
+ print(config_paths)
+ for cpf in config_paths:
+ print('\\nuser_path:%s' % cpf.basename)"""
+ )
+ result = testdir.runpytest(
+ "--override-ini", "paths=foo/bar1.py foo/bar2.py", "-s"
+ )
+ result.stdout.fnmatch_lines(["user_path:bar1.py", "user_path:bar2.py"])
+
+ def test_override_multiple_and_default(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_addoption(parser):
+ addini = parser.addini
+ addini("custom_option_1", "", default="o1")
+ addini("custom_option_2", "", default="o2")
+ addini("custom_option_3", "", default=False, type="bool")
+ addini("custom_option_4", "", default=True, type="bool")"""
+ )
+ testdir.makeini(
+ """
+ [pytest]
+ custom_option_1=custom_option_1
+ custom_option_2=custom_option_2
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_multiple_options(pytestconfig):
+ prefix = "custom_option"
+ for x in range(1, 5):
+ ini_value=pytestconfig.getini("%s_%d" % (prefix, x))
+ print('\\nini%d:%s' % (x, ini_value))
+ """
+ )
+ result = testdir.runpytest(
+ "--override-ini",
+ "custom_option_1=fulldir=/tmp/user1",
+ "-o",
+ "custom_option_2=url=/tmp/user2?a=b&d=e",
+ "-o",
+ "custom_option_3=True",
+ "-o",
+ "custom_option_4=no",
+ "-s",
+ )
+ result.stdout.fnmatch_lines(
+ [
+ "ini1:fulldir=/tmp/user1",
+ "ini2:url=/tmp/user2?a=b&d=e",
+ "ini3:True",
+ "ini4:False",
+ ]
+ )
+
+ def test_override_ini_usage_error_bad_style(self, testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ xdist_strict=False
+ """
+ )
+ result = testdir.runpytest("--override-ini", "xdist_strict True", "-s")
+ result.stderr.fnmatch_lines(["*ERROR* *expects option=value*"])
+
+ @pytest.mark.parametrize("with_ini", [True, False])
+ def test_override_ini_handled_asap(self, testdir, with_ini):
+ """-o should be handled as soon as possible and always override what's in ini files (#2238)"""
+ if with_ini:
+ testdir.makeini(
+ """
+ [pytest]
+ python_files=test_*.py
+ """
+ )
+ testdir.makepyfile(
+ unittest_ini_handle="""
+ def test():
+ pass
+ """
+ )
+ result = testdir.runpytest("--override-ini", "python_files=unittest_*.py")
+ result.stdout.fnmatch_lines(["*1 passed in*"])
+
+ def test_with_arg_outside_cwd_without_inifile(self, tmpdir, monkeypatch):
+ monkeypatch.chdir(str(tmpdir))
+ a = tmpdir.mkdir("a")
+ b = tmpdir.mkdir("b")
+ rootdir, inifile, inicfg = determine_setup(None, [a, b])
+ assert rootdir == tmpdir
+ assert inifile is None
+
+ def test_with_arg_outside_cwd_with_inifile(self, tmpdir):
+ a = tmpdir.mkdir("a")
+ b = tmpdir.mkdir("b")
+ inifile = a.ensure("pytest.ini")
+ rootdir, parsed_inifile, inicfg = determine_setup(None, [a, b])
+ assert rootdir == a
+ assert inifile == parsed_inifile
+
+ @pytest.mark.parametrize("dirs", ([], ["does-not-exist"], ["a/does-not-exist"]))
+ def test_with_non_dir_arg(self, dirs, tmpdir):
+ with tmpdir.ensure(dir=True).as_cwd():
+ rootdir, inifile, inicfg = determine_setup(None, dirs)
+ assert rootdir == tmpdir
+ assert inifile is None
+
+ def test_with_existing_file_in_subdir(self, tmpdir):
+ a = tmpdir.mkdir("a")
+ a.ensure("exist")
+ with tmpdir.as_cwd():
+ rootdir, inifile, inicfg = determine_setup(None, ["a/exist"])
+ assert rootdir == tmpdir
+ assert inifile is None
+
+ def test_addopts_before_initini(self, monkeypatch):
+ cache_dir = ".custom_cache"
+ monkeypatch.setenv("PYTEST_ADDOPTS", "-o cache_dir=%s" % cache_dir)
+ from _pytest.config import get_config
+
+ config = get_config()
+ config._preparse([], addopts=True)
+ assert config._override_ini == ["cache_dir=%s" % cache_dir]
+
+ def test_override_ini_does_not_contain_paths(self):
+ """Check that -o no longer swallows all options after it (#3103)"""
+ from _pytest.config import get_config
+
+ config = get_config()
+ config._preparse(["-o", "cache_dir=/cache", "/some/test/path"])
+ assert config._override_ini == ["cache_dir=/cache"]
+
+ def test_multiple_override_ini_options(self, testdir, request):
+ """Ensure a file path following a '-o' option does not generate an error (#3103)"""
+ testdir.makepyfile(
+ **{
+ "conftest.py": """
+ def pytest_addoption(parser):
+ parser.addini('foo', default=None, help='some option')
+ parser.addini('bar', default=None, help='some option')
+ """,
+ "test_foo.py": """
+ def test(pytestconfig):
+ assert pytestconfig.getini('foo') == '1'
+ assert pytestconfig.getini('bar') == '0'
+ """,
+ "test_bar.py": """
+ def test():
+ assert False
+ """,
+ }
+ )
+ result = testdir.runpytest("-o", "foo=1", "-o", "bar=0", "test_foo.py")
+ assert "ERROR:" not in result.stderr.str()
+ result.stdout.fnmatch_lines(["collected 1 item", "*= 1 passed in *="])
diff --git a/third_party/python/pytest/testing/test_conftest.py b/third_party/python/pytest/testing/test_conftest.py
new file mode 100644
index 0000000000..61b640976f
--- /dev/null
+++ b/third_party/python/pytest/testing/test_conftest.py
@@ -0,0 +1,543 @@
+from __future__ import absolute_import, division, print_function
+from textwrap import dedent
+
+import _pytest._code
+import py
+import pytest
+from _pytest.config import PytestPluginManager
+from _pytest.main import EXIT_NOTESTSCOLLECTED, EXIT_USAGEERROR
+
+
+@pytest.fixture(scope="module", params=["global", "inpackage"])
+def basedir(request, tmpdir_factory):
+ from _pytest.tmpdir import tmpdir
+
+ tmpdir = tmpdir(request, tmpdir_factory)
+ tmpdir.ensure("adir/conftest.py").write("a=1 ; Directory = 3")
+ tmpdir.ensure("adir/b/conftest.py").write("b=2 ; a = 1.5")
+ if request.param == "inpackage":
+ tmpdir.ensure("adir/__init__.py")
+ tmpdir.ensure("adir/b/__init__.py")
+ return tmpdir
+
+
+def ConftestWithSetinitial(path):
+ conftest = PytestPluginManager()
+ conftest_setinitial(conftest, [path])
+ return conftest
+
+
+def conftest_setinitial(conftest, args, confcutdir=None):
+
+ class Namespace(object):
+
+ def __init__(self):
+ self.file_or_dir = args
+ self.confcutdir = str(confcutdir)
+ self.noconftest = False
+
+ conftest._set_initial_conftests(Namespace())
+
+
+class TestConftestValueAccessGlobal(object):
+
+ def test_basic_init(self, basedir):
+ conftest = PytestPluginManager()
+ p = basedir.join("adir")
+ assert conftest._rget_with_confmod("a", p)[1] == 1
+
+ def test_immediate_initialiation_and_incremental_are_the_same(self, basedir):
+ conftest = PytestPluginManager()
+ len(conftest._path2confmods)
+ conftest._getconftestmodules(basedir)
+ snap1 = len(conftest._path2confmods)
+ # assert len(conftest._path2confmods) == snap1 + 1
+ conftest._getconftestmodules(basedir.join("adir"))
+ assert len(conftest._path2confmods) == snap1 + 1
+ conftest._getconftestmodules(basedir.join("b"))
+ assert len(conftest._path2confmods) == snap1 + 2
+
+ def test_value_access_not_existing(self, basedir):
+ conftest = ConftestWithSetinitial(basedir)
+ with pytest.raises(KeyError):
+ conftest._rget_with_confmod("a", basedir)
+
+ def test_value_access_by_path(self, basedir):
+ conftest = ConftestWithSetinitial(basedir)
+ adir = basedir.join("adir")
+ assert conftest._rget_with_confmod("a", adir)[1] == 1
+ assert conftest._rget_with_confmod("a", adir.join("b"))[1] == 1.5
+
+ def test_value_access_with_confmod(self, basedir):
+ startdir = basedir.join("adir", "b")
+ startdir.ensure("xx", dir=True)
+ conftest = ConftestWithSetinitial(startdir)
+ mod, value = conftest._rget_with_confmod("a", startdir)
+ assert value == 1.5
+ path = py.path.local(mod.__file__)
+ assert path.dirpath() == basedir.join("adir", "b")
+ assert path.purebasename.startswith("conftest")
+
+
+def test_conftest_in_nonpkg_with_init(tmpdir):
+ tmpdir.ensure("adir-1.0/conftest.py").write("a=1 ; Directory = 3")
+ tmpdir.ensure("adir-1.0/b/conftest.py").write("b=2 ; a = 1.5")
+ tmpdir.ensure("adir-1.0/b/__init__.py")
+ tmpdir.ensure("adir-1.0/__init__.py")
+ ConftestWithSetinitial(tmpdir.join("adir-1.0", "b"))
+
+
+def test_doubledash_considered(testdir):
+ conf = testdir.mkdir("--option")
+ conf.ensure("conftest.py")
+ conftest = PytestPluginManager()
+ conftest_setinitial(conftest, [conf.basename, conf.basename])
+ values = conftest._getconftestmodules(conf)
+ assert len(values) == 1
+
+
+def test_issue151_load_all_conftests(testdir):
+ names = "code proj src".split()
+ for name in names:
+ p = testdir.mkdir(name)
+ p.ensure("conftest.py")
+
+ conftest = PytestPluginManager()
+ conftest_setinitial(conftest, names)
+ d = list(conftest._conftestpath2mod.values())
+ assert len(d) == len(names)
+
+
+def test_conftest_global_import(testdir):
+ testdir.makeconftest("x=3")
+ p = testdir.makepyfile(
+ """
+ import py, pytest
+ from _pytest.config import PytestPluginManager
+ conf = PytestPluginManager()
+ mod = conf._importconftest(py.path.local("conftest.py"))
+ assert mod.x == 3
+ import conftest
+ assert conftest is mod, (conftest, mod)
+ subconf = py.path.local().ensure("sub", "conftest.py")
+ subconf.write("y=4")
+ mod2 = conf._importconftest(subconf)
+ assert mod != mod2
+ assert mod2.y == 4
+ import conftest
+ assert conftest is mod2, (conftest, mod)
+ """
+ )
+ res = testdir.runpython(p)
+ assert res.ret == 0
+
+
+def test_conftestcutdir(testdir):
+ conf = testdir.makeconftest("")
+ p = testdir.mkdir("x")
+ conftest = PytestPluginManager()
+ conftest_setinitial(conftest, [testdir.tmpdir], confcutdir=p)
+ values = conftest._getconftestmodules(p)
+ assert len(values) == 0
+ values = conftest._getconftestmodules(conf.dirpath())
+ assert len(values) == 0
+ assert conf not in conftest._conftestpath2mod
+ # but we can still import a conftest directly
+ conftest._importconftest(conf)
+ values = conftest._getconftestmodules(conf.dirpath())
+ assert values[0].__file__.startswith(str(conf))
+ # and all sub paths get updated properly
+ values = conftest._getconftestmodules(p)
+ assert len(values) == 1
+ assert values[0].__file__.startswith(str(conf))
+
+
+def test_conftestcutdir_inplace_considered(testdir):
+ conf = testdir.makeconftest("")
+ conftest = PytestPluginManager()
+ conftest_setinitial(conftest, [conf.dirpath()], confcutdir=conf.dirpath())
+ values = conftest._getconftestmodules(conf.dirpath())
+ assert len(values) == 1
+ assert values[0].__file__.startswith(str(conf))
+
+
+@pytest.mark.parametrize("name", "test tests whatever .dotdir".split())
+def test_setinitial_conftest_subdirs(testdir, name):
+ sub = testdir.mkdir(name)
+ subconftest = sub.ensure("conftest.py")
+ conftest = PytestPluginManager()
+ conftest_setinitial(conftest, [sub.dirpath()], confcutdir=testdir.tmpdir)
+ if name not in ("whatever", ".dotdir"):
+ assert subconftest in conftest._conftestpath2mod
+ assert len(conftest._conftestpath2mod) == 1
+ else:
+ assert subconftest not in conftest._conftestpath2mod
+ assert len(conftest._conftestpath2mod) == 0
+
+
+def test_conftest_confcutdir(testdir):
+ testdir.makeconftest("assert 0")
+ x = testdir.mkdir("x")
+ x.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ def pytest_addoption(parser):
+ parser.addoption("--xyz", action="store_true")
+ """
+ )
+ )
+ result = testdir.runpytest("-h", "--confcutdir=%s" % x, x)
+ result.stdout.fnmatch_lines(["*--xyz*"])
+ assert "warning: could not load initial" not in result.stdout.str()
+
+
+def test_no_conftest(testdir):
+ testdir.makeconftest("assert 0")
+ result = testdir.runpytest("--noconftest")
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+
+ result = testdir.runpytest()
+ assert result.ret == EXIT_USAGEERROR
+
+
+def test_conftest_existing_resultlog(testdir):
+ x = testdir.mkdir("tests")
+ x.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ def pytest_addoption(parser):
+ parser.addoption("--xyz", action="store_true")
+ """
+ )
+ )
+ testdir.makefile(ext=".log", result="") # Writes result.log
+ result = testdir.runpytest("-h", "--resultlog", "result.log")
+ result.stdout.fnmatch_lines(["*--xyz*"])
+
+
+def test_conftest_existing_junitxml(testdir):
+ x = testdir.mkdir("tests")
+ x.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ def pytest_addoption(parser):
+ parser.addoption("--xyz", action="store_true")
+ """
+ )
+ )
+ testdir.makefile(ext=".xml", junit="") # Writes junit.xml
+ result = testdir.runpytest("-h", "--junitxml", "junit.xml")
+ result.stdout.fnmatch_lines(["*--xyz*"])
+
+
+def test_conftest_import_order(testdir, monkeypatch):
+ ct1 = testdir.makeconftest("")
+ sub = testdir.mkdir("sub")
+ ct2 = sub.join("conftest.py")
+ ct2.write("")
+
+ def impct(p):
+ return p
+
+ conftest = PytestPluginManager()
+ conftest._confcutdir = testdir.tmpdir
+ monkeypatch.setattr(conftest, "_importconftest", impct)
+ assert conftest._getconftestmodules(sub) == [ct1, ct2]
+
+
+def test_fixture_dependency(testdir, monkeypatch):
+ ct1 = testdir.makeconftest("")
+ ct1 = testdir.makepyfile("__init__.py")
+ ct1.write("")
+ sub = testdir.mkdir("sub")
+ sub.join("__init__.py").write("")
+ sub.join("conftest.py").write(
+ dedent(
+ """
+ import pytest
+
+ @pytest.fixture
+ def not_needed():
+ assert False, "Should not be called!"
+
+ @pytest.fixture
+ def foo():
+ assert False, "Should not be called!"
+
+ @pytest.fixture
+ def bar(foo):
+ return 'bar'
+ """
+ )
+ )
+ subsub = sub.mkdir("subsub")
+ subsub.join("__init__.py").write("")
+ subsub.join("test_bar.py").write(
+ dedent(
+ """
+ import pytest
+
+ @pytest.fixture
+ def bar():
+ return 'sub bar'
+
+ def test_event_fixture(bar):
+ assert bar == 'sub bar'
+ """
+ )
+ )
+ result = testdir.runpytest("sub")
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_conftest_found_with_double_dash(testdir):
+ sub = testdir.mkdir("sub")
+ sub.join("conftest.py").write(
+ dedent(
+ """
+ def pytest_addoption(parser):
+ parser.addoption("--hello-world", action="store_true")
+ """
+ )
+ )
+ p = sub.join("test_hello.py")
+ p.write("def test_hello(): pass")
+ result = testdir.runpytest(str(p) + "::test_hello", "-h")
+ result.stdout.fnmatch_lines(
+ """
+ *--hello-world*
+ """
+ )
+
+
+class TestConftestVisibility(object):
+
+ def _setup_tree(self, testdir): # for issue616
+ # example mostly taken from:
+ # https://mail.python.org/pipermail/pytest-dev/2014-September/002617.html
+ runner = testdir.mkdir("empty")
+ package = testdir.mkdir("package")
+
+ package.join("conftest.py").write(
+ dedent(
+ """\
+ import pytest
+ @pytest.fixture
+ def fxtr():
+ return "from-package"
+ """
+ )
+ )
+ package.join("test_pkgroot.py").write(
+ dedent(
+ """\
+ def test_pkgroot(fxtr):
+ assert fxtr == "from-package"
+ """
+ )
+ )
+
+ swc = package.mkdir("swc")
+ swc.join("__init__.py").ensure()
+ swc.join("conftest.py").write(
+ dedent(
+ """\
+ import pytest
+ @pytest.fixture
+ def fxtr():
+ return "from-swc"
+ """
+ )
+ )
+ swc.join("test_with_conftest.py").write(
+ dedent(
+ """\
+ def test_with_conftest(fxtr):
+ assert fxtr == "from-swc"
+
+ """
+ )
+ )
+
+ snc = package.mkdir("snc")
+ snc.join("__init__.py").ensure()
+ snc.join("test_no_conftest.py").write(
+ dedent(
+ """\
+ def test_no_conftest(fxtr):
+ assert fxtr == "from-package" # No local conftest.py, so should
+ # use value from parent dir's
+
+ """
+ )
+ )
+ print("created directory structure:")
+ for x in testdir.tmpdir.visit():
+ print(" " + x.relto(testdir.tmpdir))
+
+ return {"runner": runner, "package": package, "swc": swc, "snc": snc}
+
+ # N.B.: "swc" stands for "subdir with conftest.py"
+ # "snc" stands for "subdir no [i.e. without] conftest.py"
+ @pytest.mark.parametrize(
+ "chdir,testarg,expect_ntests_passed",
+ [
+ # Effective target: package/..
+ ("runner", "..", 3),
+ ("package", "..", 3),
+ ("swc", "../..", 3),
+ ("snc", "../..", 3),
+ # Effective target: package
+ ("runner", "../package", 3),
+ ("package", ".", 3),
+ ("swc", "..", 3),
+ ("snc", "..", 3),
+ # Effective target: package/swc
+ ("runner", "../package/swc", 1),
+ ("package", "./swc", 1),
+ ("swc", ".", 1),
+ ("snc", "../swc", 1),
+ # Effective target: package/snc
+ ("runner", "../package/snc", 1),
+ ("package", "./snc", 1),
+ ("swc", "../snc", 1),
+ ("snc", ".", 1),
+ ],
+ )
+ @pytest.mark.issue616
+ def test_parsefactories_relative_node_ids(
+ self, testdir, chdir, testarg, expect_ntests_passed
+ ):
+ dirs = self._setup_tree(testdir)
+ print("pytest run in cwd: %s" % (dirs[chdir].relto(testdir.tmpdir)))
+ print("pytestarg : %s" % (testarg))
+ print("expected pass : %s" % (expect_ntests_passed))
+ with dirs[chdir].as_cwd():
+ reprec = testdir.inline_run(testarg, "-q", "--traceconfig")
+ reprec.assertoutcome(passed=expect_ntests_passed)
+
+
+@pytest.mark.parametrize(
+ "confcutdir,passed,error", [(".", 2, 0), ("src", 1, 1), (None, 1, 1)]
+)
+def test_search_conftest_up_to_inifile(testdir, confcutdir, passed, error):
+ """Test that conftest files are detected only up to an ini file, unless
+ an explicit --confcutdir option is given.
+ """
+ root = testdir.tmpdir
+ src = root.join("src").ensure(dir=1)
+ src.join("pytest.ini").write("[pytest]")
+ src.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ import pytest
+ @pytest.fixture
+ def fix1(): pass
+ """
+ )
+ )
+ src.join("test_foo.py").write(
+ _pytest._code.Source(
+ """
+ def test_1(fix1):
+ pass
+ def test_2(out_of_reach):
+ pass
+ """
+ )
+ )
+ root.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ import pytest
+ @pytest.fixture
+ def out_of_reach(): pass
+ """
+ )
+ )
+
+ args = [str(src)]
+ if confcutdir:
+ args = ["--confcutdir=%s" % root.join(confcutdir)]
+ result = testdir.runpytest(*args)
+ match = ""
+ if passed:
+ match += "*%d passed*" % passed
+ if error:
+ match += "*%d error*" % error
+ result.stdout.fnmatch_lines(match)
+
+
+def test_issue1073_conftest_special_objects(testdir):
+ testdir.makeconftest(
+ """
+ class DontTouchMe(object):
+ def __getattr__(self, x):
+ raise Exception('cant touch me')
+
+ x = DontTouchMe()
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_some():
+ pass
+ """
+ )
+ res = testdir.runpytest()
+ assert res.ret == 0
+
+
+def test_conftest_exception_handling(testdir):
+ testdir.makeconftest(
+ """
+ raise ValueError()
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_some():
+ pass
+ """
+ )
+ res = testdir.runpytest()
+ assert res.ret == 4
+ assert "raise ValueError()" in [line.strip() for line in res.errlines]
+
+
+def test_hook_proxy(testdir):
+ """Session's gethookproxy() would cache conftests incorrectly (#2016).
+ It was decided to remove the cache altogether.
+ """
+ testdir.makepyfile(
+ **{
+ "root/demo-0/test_foo1.py": "def test1(): pass",
+ "root/demo-a/test_foo2.py": "def test1(): pass",
+ "root/demo-a/conftest.py": """
+ def pytest_ignore_collect(path, config):
+ return True
+ """,
+ "root/demo-b/test_foo3.py": "def test1(): pass",
+ "root/demo-c/test_foo4.py": "def test1(): pass",
+ }
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ ["*test_foo1.py*", "*test_foo3.py*", "*test_foo4.py*", "*3 passed*"]
+ )
+
+
+def test_required_option_help(testdir):
+ testdir.makeconftest("assert 0")
+ x = testdir.mkdir("x")
+ x.join("conftest.py").write(
+ _pytest._code.Source(
+ """
+ def pytest_addoption(parser):
+ parser.addoption("--xyz", action="store_true", required=True)
+ """
+ )
+ )
+ result = testdir.runpytest("-h", x)
+ assert "argument --xyz is required" not in result.stdout.str()
+ assert "general:" in result.stdout.str()
diff --git a/third_party/python/pytest/testing/test_doctest.py b/third_party/python/pytest/testing/test_doctest.py
new file mode 100644
index 0000000000..7f3aff3b0e
--- /dev/null
+++ b/third_party/python/pytest/testing/test_doctest.py
@@ -0,0 +1,1206 @@
+# encoding: utf-8
+from __future__ import absolute_import, division, print_function
+import sys
+import _pytest._code
+from _pytest.compat import MODULE_NOT_FOUND_ERROR
+from _pytest.doctest import DoctestItem, DoctestModule, DoctestTextfile
+import pytest
+
+
+class TestDoctests(object):
+
+ def test_collect_testtextfile(self, testdir):
+ w = testdir.maketxtfile(whatever="")
+ checkfile = testdir.maketxtfile(
+ test_something="""
+ alskdjalsdk
+ >>> i = 5
+ >>> i-1
+ 4
+ """
+ )
+
+ for x in (testdir.tmpdir, checkfile):
+ # print "checking that %s returns custom items" % (x,)
+ items, reprec = testdir.inline_genitems(x)
+ assert len(items) == 1
+ assert isinstance(items[0], DoctestItem)
+ assert isinstance(items[0].parent, DoctestTextfile)
+ # Empty file has no items.
+ items, reprec = testdir.inline_genitems(w)
+ assert len(items) == 0
+
+ def test_collect_module_empty(self, testdir):
+ path = testdir.makepyfile(whatever="#")
+ for p in (path, testdir.tmpdir):
+ items, reprec = testdir.inline_genitems(p, "--doctest-modules")
+ assert len(items) == 0
+
+ def test_collect_module_single_modulelevel_doctest(self, testdir):
+ path = testdir.makepyfile(whatever='""">>> pass"""')
+ for p in (path, testdir.tmpdir):
+ items, reprec = testdir.inline_genitems(p, "--doctest-modules")
+ assert len(items) == 1
+ assert isinstance(items[0], DoctestItem)
+ assert isinstance(items[0].parent, DoctestModule)
+
+ def test_collect_module_two_doctest_one_modulelevel(self, testdir):
+ path = testdir.makepyfile(
+ whatever="""
+ '>>> x = None'
+ def my_func():
+ ">>> magic = 42 "
+ """
+ )
+ for p in (path, testdir.tmpdir):
+ items, reprec = testdir.inline_genitems(p, "--doctest-modules")
+ assert len(items) == 2
+ assert isinstance(items[0], DoctestItem)
+ assert isinstance(items[1], DoctestItem)
+ assert isinstance(items[0].parent, DoctestModule)
+ assert items[0].parent is items[1].parent
+
+ def test_collect_module_two_doctest_no_modulelevel(self, testdir):
+ path = testdir.makepyfile(
+ whatever="""
+ '# Empty'
+ def my_func():
+ ">>> magic = 42 "
+ def unuseful():
+ '''
+ # This is a function
+ # >>> # it doesn't have any doctest
+ '''
+ def another():
+ '''
+ # This is another function
+ >>> import os # this one does have a doctest
+ '''
+ """
+ )
+ for p in (path, testdir.tmpdir):
+ items, reprec = testdir.inline_genitems(p, "--doctest-modules")
+ assert len(items) == 2
+ assert isinstance(items[0], DoctestItem)
+ assert isinstance(items[1], DoctestItem)
+ assert isinstance(items[0].parent, DoctestModule)
+ assert items[0].parent is items[1].parent
+
+ def test_simple_doctestfile(self, testdir):
+ p = testdir.maketxtfile(
+ test_doc="""
+ >>> x = 1
+ >>> x == 1
+ False
+ """
+ )
+ reprec = testdir.inline_run(p)
+ reprec.assertoutcome(failed=1)
+
+ def test_new_pattern(self, testdir):
+ p = testdir.maketxtfile(
+ xdoc="""
+ >>> x = 1
+ >>> x == 1
+ False
+ """
+ )
+ reprec = testdir.inline_run(p, "--doctest-glob=x*.txt")
+ reprec.assertoutcome(failed=1)
+
+ def test_multiple_patterns(self, testdir):
+ """Test support for multiple --doctest-glob arguments (#1255).
+ """
+ testdir.maketxtfile(
+ xdoc="""
+ >>> 1
+ 1
+ """
+ )
+ testdir.makefile(
+ ".foo",
+ test="""
+ >>> 1
+ 1
+ """,
+ )
+ testdir.maketxtfile(
+ test_normal="""
+ >>> 1
+ 1
+ """
+ )
+ expected = {"xdoc.txt", "test.foo", "test_normal.txt"}
+ assert {x.basename for x in testdir.tmpdir.listdir()} == expected
+ args = ["--doctest-glob=xdoc*.txt", "--doctest-glob=*.foo"]
+ result = testdir.runpytest(*args)
+ result.stdout.fnmatch_lines(["*test.foo *", "*xdoc.txt *", "*2 passed*"])
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*test_normal.txt *", "*1 passed*"])
+
+ @pytest.mark.parametrize(
+ " test_string, encoding",
+ [(u"foo", "ascii"), (u"öäü", "latin1"), (u"öäü", "utf-8")],
+ )
+ def test_encoding(self, testdir, test_string, encoding):
+ """Test support for doctest_encoding ini option.
+ """
+ testdir.makeini(
+ """
+ [pytest]
+ doctest_encoding={}
+ """.format(
+ encoding
+ )
+ )
+ doctest = u"""
+ >>> u"{}"
+ {}
+ """.format(
+ test_string, repr(test_string)
+ )
+ testdir._makefile(".txt", [doctest], {}, encoding=encoding)
+
+ result = testdir.runpytest()
+
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_doctest_unexpected_exception(self, testdir):
+ testdir.maketxtfile(
+ """
+ >>> i = 0
+ >>> 0 / i
+ 2
+ """
+ )
+ result = testdir.runpytest("--doctest-modules")
+ result.stdout.fnmatch_lines(
+ [
+ "*unexpected_exception*",
+ "*>>> i = 0*",
+ "*>>> 0 / i*",
+ "*UNEXPECTED*ZeroDivision*",
+ ]
+ )
+
+ def test_docstring_partial_context_around_error(self, testdir):
+ """Test that we show some context before the actual line of a failing
+ doctest.
+ """
+ testdir.makepyfile(
+ '''
+ def foo():
+ """
+ text-line-1
+ text-line-2
+ text-line-3
+ text-line-4
+ text-line-5
+ text-line-6
+ text-line-7
+ text-line-8
+ text-line-9
+ text-line-10
+ text-line-11
+ >>> 1 + 1
+ 3
+
+ text-line-after
+ """
+ '''
+ )
+ result = testdir.runpytest("--doctest-modules")
+ result.stdout.fnmatch_lines(
+ [
+ "*docstring_partial_context_around_error*",
+ "005*text-line-3",
+ "006*text-line-4",
+ "013*text-line-11",
+ "014*>>> 1 + 1",
+ "Expected:",
+ " 3",
+ "Got:",
+ " 2",
+ ]
+ )
+ # lines below should be trimmed out
+ assert "text-line-2" not in result.stdout.str()
+ assert "text-line-after" not in result.stdout.str()
+
+ def test_docstring_full_context_around_error(self, testdir):
+ """Test that we show the whole context before the actual line of a failing
+ doctest, provided that the context is up to 10 lines long.
+ """
+ testdir.makepyfile(
+ '''
+ def foo():
+ """
+ text-line-1
+ text-line-2
+
+ >>> 1 + 1
+ 3
+ """
+ '''
+ )
+ result = testdir.runpytest("--doctest-modules")
+ result.stdout.fnmatch_lines(
+ [
+ "*docstring_full_context_around_error*",
+ "003*text-line-1",
+ "004*text-line-2",
+ "006*>>> 1 + 1",
+ "Expected:",
+ " 3",
+ "Got:",
+ " 2",
+ ]
+ )
+
+ def test_doctest_linedata_missing(self, testdir):
+ testdir.tmpdir.join("hello.py").write(
+ _pytest._code.Source(
+ """
+ class Fun(object):
+ @property
+ def test(self):
+ '''
+ >>> a = 1
+ >>> 1/0
+ '''
+ """
+ )
+ )
+ result = testdir.runpytest("--doctest-modules")
+ result.stdout.fnmatch_lines(
+ [
+ "*hello*",
+ "*EXAMPLE LOCATION UNKNOWN, not showing all tests of that example*",
+ "*1/0*",
+ "*UNEXPECTED*ZeroDivision*",
+ "*1 failed*",
+ ]
+ )
+
+ def test_doctest_unex_importerror_only_txt(self, testdir):
+ testdir.maketxtfile(
+ """
+ >>> import asdalsdkjaslkdjasd
+ >>>
+ """
+ )
+ result = testdir.runpytest()
+ # doctest is never executed because of error during hello.py collection
+ result.stdout.fnmatch_lines(
+ [
+ "*>>> import asdals*",
+ "*UNEXPECTED*{e}*".format(e=MODULE_NOT_FOUND_ERROR),
+ "{e}: No module named *asdal*".format(e=MODULE_NOT_FOUND_ERROR),
+ ]
+ )
+
+ def test_doctest_unex_importerror_with_module(self, testdir):
+ testdir.tmpdir.join("hello.py").write(
+ _pytest._code.Source(
+ """
+ import asdalsdkjaslkdjasd
+ """
+ )
+ )
+ testdir.maketxtfile(
+ """
+ >>> import hello
+ >>>
+ """
+ )
+ result = testdir.runpytest("--doctest-modules")
+ # doctest is never executed because of error during hello.py collection
+ result.stdout.fnmatch_lines(
+ [
+ "*ERROR collecting hello.py*",
+ "*{e}: No module named *asdals*".format(e=MODULE_NOT_FOUND_ERROR),
+ "*Interrupted: 1 errors during collection*",
+ ]
+ )
+
+ def test_doctestmodule(self, testdir):
+ p = testdir.makepyfile(
+ """
+ '''
+ >>> x = 1
+ >>> x == 1
+ False
+
+ '''
+ """
+ )
+ reprec = testdir.inline_run(p, "--doctest-modules")
+ reprec.assertoutcome(failed=1)
+
+ def test_doctestmodule_external_and_issue116(self, testdir):
+ p = testdir.mkpydir("hello")
+ p.join("__init__.py").write(
+ _pytest._code.Source(
+ """
+ def somefunc():
+ '''
+ >>> i = 0
+ >>> i + 1
+ 2
+ '''
+ """
+ )
+ )
+ result = testdir.runpytest(p, "--doctest-modules")
+ result.stdout.fnmatch_lines(
+ [
+ "004 *>>> i = 0",
+ "005 *>>> i + 1",
+ "*Expected:",
+ "* 2",
+ "*Got:",
+ "* 1",
+ "*:5: DocTestFailure",
+ ]
+ )
+
+ def test_txtfile_failing(self, testdir):
+ p = testdir.maketxtfile(
+ """
+ >>> i = 0
+ >>> i + 1
+ 2
+ """
+ )
+ result = testdir.runpytest(p, "-s")
+ result.stdout.fnmatch_lines(
+ [
+ "001 >>> i = 0",
+ "002 >>> i + 1",
+ "Expected:",
+ " 2",
+ "Got:",
+ " 1",
+ "*test_txtfile_failing.txt:2: DocTestFailure",
+ ]
+ )
+
+ def test_txtfile_with_fixtures(self, testdir):
+ p = testdir.maketxtfile(
+ """
+ >>> dir = getfixture('tmpdir')
+ >>> type(dir).__name__
+ 'LocalPath'
+ """
+ )
+ reprec = testdir.inline_run(p)
+ reprec.assertoutcome(passed=1)
+
+ def test_txtfile_with_usefixtures_in_ini(self, testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ usefixtures = myfixture
+ """
+ )
+ testdir.makeconftest(
+ """
+ import pytest
+ @pytest.fixture
+ def myfixture(monkeypatch):
+ monkeypatch.setenv("HELLO", "WORLD")
+ """
+ )
+
+ p = testdir.maketxtfile(
+ """
+ >>> import os
+ >>> os.environ["HELLO"]
+ 'WORLD'
+ """
+ )
+ reprec = testdir.inline_run(p)
+ reprec.assertoutcome(passed=1)
+
+ def test_doctestmodule_with_fixtures(self, testdir):
+ p = testdir.makepyfile(
+ """
+ '''
+ >>> dir = getfixture('tmpdir')
+ >>> type(dir).__name__
+ 'LocalPath'
+ '''
+ """
+ )
+ reprec = testdir.inline_run(p, "--doctest-modules")
+ reprec.assertoutcome(passed=1)
+
+ def test_doctestmodule_three_tests(self, testdir):
+ p = testdir.makepyfile(
+ """
+ '''
+ >>> dir = getfixture('tmpdir')
+ >>> type(dir).__name__
+ 'LocalPath'
+ '''
+ def my_func():
+ '''
+ >>> magic = 42
+ >>> magic - 42
+ 0
+ '''
+ def unuseful():
+ pass
+ def another():
+ '''
+ >>> import os
+ >>> os is os
+ True
+ '''
+ """
+ )
+ reprec = testdir.inline_run(p, "--doctest-modules")
+ reprec.assertoutcome(passed=3)
+
+ def test_doctestmodule_two_tests_one_fail(self, testdir):
+ p = testdir.makepyfile(
+ """
+ class MyClass(object):
+ def bad_meth(self):
+ '''
+ >>> magic = 42
+ >>> magic
+ 0
+ '''
+ def nice_meth(self):
+ '''
+ >>> magic = 42
+ >>> magic - 42
+ 0
+ '''
+ """
+ )
+ reprec = testdir.inline_run(p, "--doctest-modules")
+ reprec.assertoutcome(failed=1, passed=1)
+
+ def test_ignored_whitespace(self, testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ doctest_optionflags = ELLIPSIS NORMALIZE_WHITESPACE
+ """
+ )
+ p = testdir.makepyfile(
+ """
+ class MyClass(object):
+ '''
+ >>> a = "foo "
+ >>> print(a)
+ foo
+ '''
+ pass
+ """
+ )
+ reprec = testdir.inline_run(p, "--doctest-modules")
+ reprec.assertoutcome(passed=1)
+
+ def test_non_ignored_whitespace(self, testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ doctest_optionflags = ELLIPSIS
+ """
+ )
+ p = testdir.makepyfile(
+ """
+ class MyClass(object):
+ '''
+ >>> a = "foo "
+ >>> print(a)
+ foo
+ '''
+ pass
+ """
+ )
+ reprec = testdir.inline_run(p, "--doctest-modules")
+ reprec.assertoutcome(failed=1, passed=0)
+
+ def test_ignored_whitespace_glob(self, testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ doctest_optionflags = ELLIPSIS NORMALIZE_WHITESPACE
+ """
+ )
+ p = testdir.maketxtfile(
+ xdoc="""
+ >>> a = "foo "
+ >>> print(a)
+ foo
+ """
+ )
+ reprec = testdir.inline_run(p, "--doctest-glob=x*.txt")
+ reprec.assertoutcome(passed=1)
+
+ def test_non_ignored_whitespace_glob(self, testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ doctest_optionflags = ELLIPSIS
+ """
+ )
+ p = testdir.maketxtfile(
+ xdoc="""
+ >>> a = "foo "
+ >>> print(a)
+ foo
+ """
+ )
+ reprec = testdir.inline_run(p, "--doctest-glob=x*.txt")
+ reprec.assertoutcome(failed=1, passed=0)
+
+ def test_contains_unicode(self, testdir):
+ """Fix internal error with docstrings containing non-ascii characters.
+ """
+ testdir.makepyfile(
+ u'''
+ # encoding: utf-8
+ def foo():
+ """
+ >>> name = 'с' # not letter 'c' but instead Cyrillic 's'.
+ 'anything'
+ """
+ '''
+ )
+ result = testdir.runpytest("--doctest-modules")
+ result.stdout.fnmatch_lines(["Got nothing", "* 1 failed in*"])
+
+ def test_ignore_import_errors_on_doctest(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import asdf
+
+ def add_one(x):
+ '''
+ >>> add_one(1)
+ 2
+ '''
+ return x + 1
+ """
+ )
+
+ reprec = testdir.inline_run(
+ p, "--doctest-modules", "--doctest-ignore-import-errors"
+ )
+ reprec.assertoutcome(skipped=1, failed=1, passed=0)
+
+ def test_junit_report_for_doctest(self, testdir):
+ """
+ #713: Fix --junit-xml option when used with --doctest-modules.
+ """
+ p = testdir.makepyfile(
+ """
+ def foo():
+ '''
+ >>> 1 + 1
+ 3
+ '''
+ pass
+ """
+ )
+ reprec = testdir.inline_run(p, "--doctest-modules", "--junit-xml=junit.xml")
+ reprec.assertoutcome(failed=1)
+
+ def test_unicode_doctest(self, testdir):
+ """
+ Test case for issue 2434: DecodeError on Python 2 when doctest contains non-ascii
+ characters.
+ """
+ p = testdir.maketxtfile(
+ test_unicode_doctest="""
+ .. doctest::
+
+ >>> print(
+ ... "Hi\\n\\nByé")
+ Hi
+ ...
+ Byé
+ >>> 1/0 # Byé
+ 1
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(
+ ["*UNEXPECTED EXCEPTION: ZeroDivisionError*", "*1 failed*"]
+ )
+
+ def test_unicode_doctest_module(self, testdir):
+ """
+ Test case for issue 2434: DecodeError on Python 2 when doctest docstring
+ contains non-ascii characters.
+ """
+ p = testdir.makepyfile(
+ test_unicode_doctest_module="""
+ # -*- encoding: utf-8 -*-
+ from __future__ import unicode_literals
+
+ def fix_bad_unicode(text):
+ '''
+ >>> print(fix_bad_unicode('único'))
+ único
+ '''
+ return "único"
+ """
+ )
+ result = testdir.runpytest(p, "--doctest-modules")
+ result.stdout.fnmatch_lines(["* 1 passed *"])
+
+ def test_print_unicode_value(self, testdir):
+ """
+ Test case for issue 3583: Printing Unicode in doctest under Python 2.7
+ doesn't work
+ """
+ p = testdir.maketxtfile(
+ test_print_unicode_value=r"""
+ Here is a doctest::
+
+ >>> print(u'\xE5\xE9\xEE\xF8\xFC')
+ åéîøü
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(["* 1 passed *"])
+
+ def test_reportinfo(self, testdir):
+ """
+ Test case to make sure that DoctestItem.reportinfo() returns lineno.
+ """
+ p = testdir.makepyfile(
+ test_reportinfo="""
+ def foo(x):
+ '''
+ >>> foo('a')
+ 'b'
+ '''
+ return 'c'
+ """
+ )
+ items, reprec = testdir.inline_genitems(p, "--doctest-modules")
+ reportinfo = items[0].reportinfo()
+ assert reportinfo[1] == 1
+
+ def test_valid_setup_py(self, testdir):
+ """
+ Test to make sure that pytest ignores valid setup.py files when ran
+ with --doctest-modules
+ """
+ p = testdir.makepyfile(
+ setup="""
+ from setuptools import setup, find_packages
+ setup(name='sample',
+ version='0.0',
+ description='description',
+ packages=find_packages()
+ )
+ """
+ )
+ result = testdir.runpytest(p, "--doctest-modules")
+ result.stdout.fnmatch_lines(["*collected 0 items*"])
+
+ def test_invalid_setup_py(self, testdir):
+ """
+ Test to make sure that pytest reads setup.py files that are not used
+ for python packages when ran with --doctest-modules
+ """
+ p = testdir.makepyfile(
+ setup="""
+ def test_foo():
+ return 'bar'
+ """
+ )
+ result = testdir.runpytest(p, "--doctest-modules")
+ result.stdout.fnmatch_lines(["*collected 1 item*"])
+
+
+class TestLiterals(object):
+
+ @pytest.mark.parametrize("config_mode", ["ini", "comment"])
+ def test_allow_unicode(self, testdir, config_mode):
+ """Test that doctests which output unicode work in all python versions
+ tested by pytest when the ALLOW_UNICODE option is used (either in
+ the ini file or by an inline comment).
+ """
+ if config_mode == "ini":
+ testdir.makeini(
+ """
+ [pytest]
+ doctest_optionflags = ALLOW_UNICODE
+ """
+ )
+ comment = ""
+ else:
+ comment = "#doctest: +ALLOW_UNICODE"
+
+ testdir.maketxtfile(
+ test_doc="""
+ >>> b'12'.decode('ascii') {comment}
+ '12'
+ """.format(
+ comment=comment
+ )
+ )
+ testdir.makepyfile(
+ foo="""
+ def foo():
+ '''
+ >>> b'12'.decode('ascii') {comment}
+ '12'
+ '''
+ """.format(
+ comment=comment
+ )
+ )
+ reprec = testdir.inline_run("--doctest-modules")
+ reprec.assertoutcome(passed=2)
+
+ @pytest.mark.parametrize("config_mode", ["ini", "comment"])
+ def test_allow_bytes(self, testdir, config_mode):
+ """Test that doctests which output bytes work in all python versions
+ tested by pytest when the ALLOW_BYTES option is used (either in
+ the ini file or by an inline comment)(#1287).
+ """
+ if config_mode == "ini":
+ testdir.makeini(
+ """
+ [pytest]
+ doctest_optionflags = ALLOW_BYTES
+ """
+ )
+ comment = ""
+ else:
+ comment = "#doctest: +ALLOW_BYTES"
+
+ testdir.maketxtfile(
+ test_doc="""
+ >>> b'foo' {comment}
+ 'foo'
+ """.format(
+ comment=comment
+ )
+ )
+ testdir.makepyfile(
+ foo="""
+ def foo():
+ '''
+ >>> b'foo' {comment}
+ 'foo'
+ '''
+ """.format(
+ comment=comment
+ )
+ )
+ reprec = testdir.inline_run("--doctest-modules")
+ reprec.assertoutcome(passed=2)
+
+ def test_unicode_string(self, testdir):
+ """Test that doctests which output unicode fail in Python 2 when
+ the ALLOW_UNICODE option is not used. The same test should pass
+ in Python 3.
+ """
+ testdir.maketxtfile(
+ test_doc="""
+ >>> b'12'.decode('ascii')
+ '12'
+ """
+ )
+ reprec = testdir.inline_run()
+ passed = int(sys.version_info[0] >= 3)
+ reprec.assertoutcome(passed=passed, failed=int(not passed))
+
+ def test_bytes_literal(self, testdir):
+ """Test that doctests which output bytes fail in Python 3 when
+ the ALLOW_BYTES option is not used. The same test should pass
+ in Python 2 (#1287).
+ """
+ testdir.maketxtfile(
+ test_doc="""
+ >>> b'foo'
+ 'foo'
+ """
+ )
+ reprec = testdir.inline_run()
+ passed = int(sys.version_info[0] == 2)
+ reprec.assertoutcome(passed=passed, failed=int(not passed))
+
+
+class TestDoctestSkips(object):
+ """
+ If all examples in a doctest are skipped due to the SKIP option, then
+ the tests should be SKIPPED rather than PASSED. (#957)
+ """
+
+ @pytest.fixture(params=["text", "module"])
+ def makedoctest(self, testdir, request):
+
+ def makeit(doctest):
+ mode = request.param
+ if mode == "text":
+ testdir.maketxtfile(doctest)
+ else:
+ assert mode == "module"
+ testdir.makepyfile('"""\n%s"""' % doctest)
+
+ return makeit
+
+ def test_one_skipped(self, testdir, makedoctest):
+ makedoctest(
+ """
+ >>> 1 + 1 # doctest: +SKIP
+ 2
+ >>> 2 + 2
+ 4
+ """
+ )
+ reprec = testdir.inline_run("--doctest-modules")
+ reprec.assertoutcome(passed=1)
+
+ def test_one_skipped_failed(self, testdir, makedoctest):
+ makedoctest(
+ """
+ >>> 1 + 1 # doctest: +SKIP
+ 2
+ >>> 2 + 2
+ 200
+ """
+ )
+ reprec = testdir.inline_run("--doctest-modules")
+ reprec.assertoutcome(failed=1)
+
+ def test_all_skipped(self, testdir, makedoctest):
+ makedoctest(
+ """
+ >>> 1 + 1 # doctest: +SKIP
+ 2
+ >>> 2 + 2 # doctest: +SKIP
+ 200
+ """
+ )
+ reprec = testdir.inline_run("--doctest-modules")
+ reprec.assertoutcome(skipped=1)
+
+ def test_vacuous_all_skipped(self, testdir, makedoctest):
+ makedoctest("")
+ reprec = testdir.inline_run("--doctest-modules")
+ reprec.assertoutcome(passed=0, skipped=0)
+
+ def test_continue_on_failure(self, testdir):
+ testdir.maketxtfile(
+ test_something="""
+ >>> i = 5
+ >>> def foo():
+ ... raise ValueError('error1')
+ >>> foo()
+ >>> i
+ >>> i + 2
+ 7
+ >>> i + 1
+ """
+ )
+ result = testdir.runpytest("--doctest-modules", "--doctest-continue-on-failure")
+ result.assert_outcomes(passed=0, failed=1)
+ # The lines that contains the failure are 4, 5, and 8. The first one
+ # is a stack trace and the other two are mismatches.
+ result.stdout.fnmatch_lines(
+ ["*4: UnexpectedException*", "*5: DocTestFailure*", "*8: DocTestFailure*"]
+ )
+
+
+class TestDoctestAutoUseFixtures(object):
+
+ SCOPES = ["module", "session", "class", "function"]
+
+ def test_doctest_module_session_fixture(self, testdir):
+ """Test that session fixtures are initialized for doctest modules (#768)
+ """
+ # session fixture which changes some global data, which will
+ # be accessed by doctests in a module
+ testdir.makeconftest(
+ """
+ import pytest
+ import sys
+
+ @pytest.yield_fixture(autouse=True, scope='session')
+ def myfixture():
+ assert not hasattr(sys, 'pytest_session_data')
+ sys.pytest_session_data = 1
+ yield
+ del sys.pytest_session_data
+ """
+ )
+ testdir.makepyfile(
+ foo="""
+ import sys
+
+ def foo():
+ '''
+ >>> assert sys.pytest_session_data == 1
+ '''
+
+ def bar():
+ '''
+ >>> assert sys.pytest_session_data == 1
+ '''
+ """
+ )
+ result = testdir.runpytest("--doctest-modules")
+ result.stdout.fnmatch_lines("*2 passed*")
+
+ @pytest.mark.parametrize("scope", SCOPES)
+ @pytest.mark.parametrize("enable_doctest", [True, False])
+ def test_fixture_scopes(self, testdir, scope, enable_doctest):
+ """Test that auto-use fixtures work properly with doctest modules.
+ See #1057 and #1100.
+ """
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(autouse=True, scope="{scope}")
+ def auto(request):
+ return 99
+ """.format(
+ scope=scope
+ )
+ )
+ testdir.makepyfile(
+ test_1='''
+ def test_foo():
+ """
+ >>> getfixture('auto') + 1
+ 100
+ """
+ def test_bar():
+ assert 1
+ '''
+ )
+ params = ("--doctest-modules",) if enable_doctest else ()
+ passes = 3 if enable_doctest else 2
+ result = testdir.runpytest(*params)
+ result.stdout.fnmatch_lines(["*=== %d passed in *" % passes])
+
+ @pytest.mark.parametrize("scope", SCOPES)
+ @pytest.mark.parametrize("autouse", [True, False])
+ @pytest.mark.parametrize("use_fixture_in_doctest", [True, False])
+ def test_fixture_module_doctest_scopes(
+ self, testdir, scope, autouse, use_fixture_in_doctest
+ ):
+ """Test that auto-use fixtures work properly with doctest files.
+ See #1057 and #1100.
+ """
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(autouse={autouse}, scope="{scope}")
+ def auto(request):
+ return 99
+ """.format(
+ scope=scope, autouse=autouse
+ )
+ )
+ if use_fixture_in_doctest:
+ testdir.maketxtfile(
+ test_doc="""
+ >>> getfixture('auto')
+ 99
+ """
+ )
+ else:
+ testdir.maketxtfile(
+ test_doc="""
+ >>> 1 + 1
+ 2
+ """
+ )
+ result = testdir.runpytest("--doctest-modules")
+ assert "FAILURES" not in str(result.stdout.str())
+ result.stdout.fnmatch_lines(["*=== 1 passed in *"])
+
+ @pytest.mark.parametrize("scope", SCOPES)
+ def test_auto_use_request_attributes(self, testdir, scope):
+ """Check that all attributes of a request in an autouse fixture
+ behave as expected when requested for a doctest item.
+ """
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(autouse=True, scope="{scope}")
+ def auto(request):
+ if "{scope}" == 'module':
+ assert request.module is None
+ if "{scope}" == 'class':
+ assert request.cls is None
+ if "{scope}" == 'function':
+ assert request.function is None
+ return 99
+ """.format(
+ scope=scope
+ )
+ )
+ testdir.maketxtfile(
+ test_doc="""
+ >>> 1 + 1
+ 2
+ """
+ )
+ result = testdir.runpytest("--doctest-modules")
+ assert "FAILURES" not in str(result.stdout.str())
+ result.stdout.fnmatch_lines(["*=== 1 passed in *"])
+
+
+class TestDoctestNamespaceFixture(object):
+
+ SCOPES = ["module", "session", "class", "function"]
+
+ @pytest.mark.parametrize("scope", SCOPES)
+ def test_namespace_doctestfile(self, testdir, scope):
+ """
+ Check that inserting something into the namespace works in a
+ simple text file doctest
+ """
+ testdir.makeconftest(
+ """
+ import pytest
+ import contextlib
+
+ @pytest.fixture(autouse=True, scope="{scope}")
+ def add_contextlib(doctest_namespace):
+ doctest_namespace['cl'] = contextlib
+ """.format(
+ scope=scope
+ )
+ )
+ p = testdir.maketxtfile(
+ """
+ >>> print(cl.__name__)
+ contextlib
+ """
+ )
+ reprec = testdir.inline_run(p)
+ reprec.assertoutcome(passed=1)
+
+ @pytest.mark.parametrize("scope", SCOPES)
+ def test_namespace_pyfile(self, testdir, scope):
+ """
+ Check that inserting something into the namespace works in a
+ simple Python file docstring doctest
+ """
+ testdir.makeconftest(
+ """
+ import pytest
+ import contextlib
+
+ @pytest.fixture(autouse=True, scope="{scope}")
+ def add_contextlib(doctest_namespace):
+ doctest_namespace['cl'] = contextlib
+ """.format(
+ scope=scope
+ )
+ )
+ p = testdir.makepyfile(
+ """
+ def foo():
+ '''
+ >>> print(cl.__name__)
+ contextlib
+ '''
+ """
+ )
+ reprec = testdir.inline_run(p, "--doctest-modules")
+ reprec.assertoutcome(passed=1)
+
+
+class TestDoctestReportingOption(object):
+
+ def _run_doctest_report(self, testdir, format):
+ testdir.makepyfile(
+ """
+ def foo():
+ '''
+ >>> foo()
+ a b
+ 0 1 4
+ 1 2 4
+ 2 3 6
+ '''
+ print(' a b\\n'
+ '0 1 4\\n'
+ '1 2 5\\n'
+ '2 3 6')
+ """
+ )
+ return testdir.runpytest("--doctest-modules", "--doctest-report", format)
+
+ @pytest.mark.parametrize("format", ["udiff", "UDIFF", "uDiFf"])
+ def test_doctest_report_udiff(self, testdir, format):
+ result = self._run_doctest_report(testdir, format)
+ result.stdout.fnmatch_lines(
+ [" 0 1 4", " -1 2 4", " +1 2 5", " 2 3 6"]
+ )
+
+ def test_doctest_report_cdiff(self, testdir):
+ result = self._run_doctest_report(testdir, "cdiff")
+ result.stdout.fnmatch_lines(
+ [
+ " a b",
+ " 0 1 4",
+ " ! 1 2 4",
+ " 2 3 6",
+ " --- 1,4 ----",
+ " a b",
+ " 0 1 4",
+ " ! 1 2 5",
+ " 2 3 6",
+ ]
+ )
+
+ def test_doctest_report_ndiff(self, testdir):
+ result = self._run_doctest_report(testdir, "ndiff")
+ result.stdout.fnmatch_lines(
+ [
+ " a b",
+ " 0 1 4",
+ " - 1 2 4",
+ " ? ^",
+ " + 1 2 5",
+ " ? ^",
+ " 2 3 6",
+ ]
+ )
+
+ @pytest.mark.parametrize("format", ["none", "only_first_failure"])
+ def test_doctest_report_none_or_only_first_failure(self, testdir, format):
+ result = self._run_doctest_report(testdir, format)
+ result.stdout.fnmatch_lines(
+ [
+ "Expected:",
+ " a b",
+ " 0 1 4",
+ " 1 2 4",
+ " 2 3 6",
+ "Got:",
+ " a b",
+ " 0 1 4",
+ " 1 2 5",
+ " 2 3 6",
+ ]
+ )
+
+ def test_doctest_report_invalid(self, testdir):
+ result = self._run_doctest_report(testdir, "obviously_invalid_format")
+ result.stderr.fnmatch_lines(
+ [
+ "*error: argument --doctest-report: invalid choice: 'obviously_invalid_format' (choose from*"
+ ]
+ )
diff --git a/third_party/python/pytest/testing/test_entry_points.py b/third_party/python/pytest/testing/test_entry_points.py
new file mode 100644
index 0000000000..8f734778fa
--- /dev/null
+++ b/third_party/python/pytest/testing/test_entry_points.py
@@ -0,0 +1,14 @@
+from __future__ import absolute_import, division, print_function
+import pkg_resources
+
+import pytest
+
+
+@pytest.mark.parametrize("entrypoint", ["py.test", "pytest"])
+def test_entry_point_exist(entrypoint):
+ assert entrypoint in pkg_resources.get_entry_map("pytest")["console_scripts"]
+
+
+def test_pytest_entry_points_are_identical():
+ entryMap = pkg_resources.get_entry_map("pytest")["console_scripts"]
+ assert entryMap["pytest"].module_name == entryMap["py.test"].module_name
diff --git a/third_party/python/pytest/testing/test_helpconfig.py b/third_party/python/pytest/testing/test_helpconfig.py
new file mode 100644
index 0000000000..b5424235b1
--- /dev/null
+++ b/third_party/python/pytest/testing/test_helpconfig.py
@@ -0,0 +1,72 @@
+from __future__ import absolute_import, division, print_function
+from _pytest.main import EXIT_NOTESTSCOLLECTED
+import pytest
+
+
+def test_version(testdir, pytestconfig):
+ result = testdir.runpytest("--version")
+ assert result.ret == 0
+ # p = py.path.local(py.__file__).dirpath()
+ result.stderr.fnmatch_lines(["*pytest*%s*imported from*" % (pytest.__version__,)])
+ if pytestconfig.pluginmanager.list_plugin_distinfo():
+ result.stderr.fnmatch_lines(["*setuptools registered plugins:", "*at*"])
+
+
+def test_help(testdir):
+ result = testdir.runpytest("--help")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ """
+ *-v*verbose*
+ *setup.cfg*
+ *minversion*
+ *to see*markers*pytest --markers*
+ *to see*fixtures*pytest --fixtures*
+ """
+ )
+
+
+def test_hookvalidation_unknown(testdir):
+ testdir.makeconftest(
+ """
+ def pytest_hello(xyz):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(["*unknown hook*pytest_hello*"])
+
+
+def test_hookvalidation_optional(testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ @pytest.hookimpl(optionalhook=True)
+ def pytest_hello(xyz):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+
+
+def test_traceconfig(testdir):
+ result = testdir.runpytest("--traceconfig")
+ result.stdout.fnmatch_lines(["*using*pytest*py*", "*active plugins*"])
+
+
+def test_debug(testdir, monkeypatch):
+ result = testdir.runpytest_subprocess("--debug")
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+ p = testdir.tmpdir.join("pytestdebug.log")
+ assert "pytest_sessionstart" in p.read()
+
+
+def test_PYTEST_DEBUG(testdir, monkeypatch):
+ monkeypatch.setenv("PYTEST_DEBUG", "1")
+ result = testdir.runpytest_subprocess()
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+ result.stderr.fnmatch_lines(
+ ["*pytest_plugin_registered*", "*manager*PluginManager*"]
+ )
diff --git a/third_party/python/pytest/testing/test_junitxml.py b/third_party/python/pytest/testing/test_junitxml.py
new file mode 100644
index 0000000000..d0be5f2675
--- /dev/null
+++ b/third_party/python/pytest/testing/test_junitxml.py
@@ -0,0 +1,1231 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import, division, print_function
+from xml.dom import minidom
+import py
+import sys
+import os
+from _pytest.junitxml import LogXML
+import pytest
+
+
+def runandparse(testdir, *args):
+ resultpath = testdir.tmpdir.join("junit.xml")
+ result = testdir.runpytest("--junitxml=%s" % resultpath, *args)
+ xmldoc = minidom.parse(str(resultpath))
+ return result, DomNode(xmldoc)
+
+
+def assert_attr(node, **kwargs):
+ __tracebackhide__ = True
+
+ def nodeval(node, name):
+ anode = node.getAttributeNode(name)
+ if anode is not None:
+ return anode.value
+
+ expected = {name: str(value) for name, value in kwargs.items()}
+ on_node = {name: nodeval(node, name) for name in expected}
+ assert on_node == expected
+
+
+class DomNode(object):
+
+ def __init__(self, dom):
+ self.__node = dom
+
+ def __repr__(self):
+ return self.__node.toxml()
+
+ def find_first_by_tag(self, tag):
+ return self.find_nth_by_tag(tag, 0)
+
+ def _by_tag(self, tag):
+ return self.__node.getElementsByTagName(tag)
+
+ def find_nth_by_tag(self, tag, n):
+ items = self._by_tag(tag)
+ try:
+ nth = items[n]
+ except IndexError:
+ pass
+ else:
+ return type(self)(nth)
+
+ def find_by_tag(self, tag):
+ t = type(self)
+ return [t(x) for x in self.__node.getElementsByTagName(tag)]
+
+ def __getitem__(self, key):
+ node = self.__node.getAttributeNode(key)
+ if node is not None:
+ return node.value
+
+ def assert_attr(self, **kwargs):
+ __tracebackhide__ = True
+ return assert_attr(self.__node, **kwargs)
+
+ def toxml(self):
+ return self.__node.toxml()
+
+ @property
+ def text(self):
+ return self.__node.childNodes[0].wholeText
+
+ @property
+ def tag(self):
+ return self.__node.tagName
+
+ @property
+ def next_siebling(self):
+ return type(self)(self.__node.nextSibling)
+
+
+class TestPython(object):
+
+ def test_summing_simple(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_pass():
+ pass
+ def test_fail():
+ assert 0
+ def test_skip():
+ pytest.skip("")
+ @pytest.mark.xfail
+ def test_xfail():
+ assert 0
+ @pytest.mark.xfail
+ def test_xpass():
+ assert 1
+ """
+ )
+ result, dom = runandparse(testdir)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(name="pytest", errors=0, failures=1, skips=2, tests=5)
+
+ def test_summing_simple_with_errors(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def fixture():
+ raise Exception()
+ def test_pass():
+ pass
+ def test_fail():
+ assert 0
+ def test_error(fixture):
+ pass
+ @pytest.mark.xfail
+ def test_xfail():
+ assert False
+ @pytest.mark.xfail(strict=True)
+ def test_xpass():
+ assert True
+ """
+ )
+ result, dom = runandparse(testdir)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(name="pytest", errors=1, failures=2, skips=1, tests=5)
+
+ def test_timing_function(self, testdir):
+ testdir.makepyfile(
+ """
+ import time, pytest
+ def setup_module():
+ time.sleep(0.01)
+ def teardown_module():
+ time.sleep(0.01)
+ def test_sleep():
+ time.sleep(0.01)
+ """
+ )
+ result, dom = runandparse(testdir)
+ node = dom.find_first_by_tag("testsuite")
+ tnode = node.find_first_by_tag("testcase")
+ val = tnode["time"]
+ assert round(float(val), 2) >= 0.03
+
+ def test_setup_error(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def arg(request):
+ raise ValueError()
+ def test_function(arg):
+ pass
+ """
+ )
+ result, dom = runandparse(testdir)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(errors=1, tests=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(
+ file="test_setup_error.py",
+ line="5",
+ classname="test_setup_error",
+ name="test_function",
+ )
+ fnode = tnode.find_first_by_tag("error")
+ fnode.assert_attr(message="test setup failure")
+ assert "ValueError" in fnode.toxml()
+
+ def test_teardown_error(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def arg():
+ yield
+ raise ValueError()
+ def test_function(arg):
+ pass
+ """
+ )
+ result, dom = runandparse(testdir)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(
+ file="test_teardown_error.py",
+ line="6",
+ classname="test_teardown_error",
+ name="test_function",
+ )
+ fnode = tnode.find_first_by_tag("error")
+ fnode.assert_attr(message="test teardown failure")
+ assert "ValueError" in fnode.toxml()
+
+ def test_call_failure_teardown_error(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def arg():
+ yield
+ raise Exception("Teardown Exception")
+ def test_function(arg):
+ raise Exception("Call Exception")
+ """
+ )
+ result, dom = runandparse(testdir)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(errors=1, failures=1, tests=1)
+ first, second = dom.find_by_tag("testcase")
+ if not first or not second or first == second:
+ assert 0
+ fnode = first.find_first_by_tag("failure")
+ fnode.assert_attr(message="Exception: Call Exception")
+ snode = second.find_first_by_tag("error")
+ snode.assert_attr(message="test teardown failure")
+
+ def test_skip_contains_name_reason(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_skip():
+ pytest.skip("hello23")
+ """
+ )
+ result, dom = runandparse(testdir)
+ assert result.ret == 0
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(skips=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(
+ file="test_skip_contains_name_reason.py",
+ line="1",
+ classname="test_skip_contains_name_reason",
+ name="test_skip",
+ )
+ snode = tnode.find_first_by_tag("skipped")
+ snode.assert_attr(type="pytest.skip", message="hello23")
+
+ def test_mark_skip_contains_name_reason(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip(reason="hello24")
+ def test_skip():
+ assert True
+ """
+ )
+ result, dom = runandparse(testdir)
+ assert result.ret == 0
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(skips=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(
+ file="test_mark_skip_contains_name_reason.py",
+ line="1",
+ classname="test_mark_skip_contains_name_reason",
+ name="test_skip",
+ )
+ snode = tnode.find_first_by_tag("skipped")
+ snode.assert_attr(type="pytest.skip", message="hello24")
+
+ def test_mark_skipif_contains_name_reason(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ GLOBAL_CONDITION = True
+ @pytest.mark.skipif(GLOBAL_CONDITION, reason="hello25")
+ def test_skip():
+ assert True
+ """
+ )
+ result, dom = runandparse(testdir)
+ assert result.ret == 0
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(skips=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(
+ file="test_mark_skipif_contains_name_reason.py",
+ line="2",
+ classname="test_mark_skipif_contains_name_reason",
+ name="test_skip",
+ )
+ snode = tnode.find_first_by_tag("skipped")
+ snode.assert_attr(type="pytest.skip", message="hello25")
+
+ def test_mark_skip_doesnt_capture_output(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip(reason="foo")
+ def test_skip():
+ print("bar!")
+ """
+ )
+ result, dom = runandparse(testdir)
+ assert result.ret == 0
+ node_xml = dom.find_first_by_tag("testsuite").toxml()
+ assert "bar!" not in node_xml
+
+ def test_classname_instance(self, testdir):
+ testdir.makepyfile(
+ """
+ class TestClass(object):
+ def test_method(self):
+ assert 0
+ """
+ )
+ result, dom = runandparse(testdir)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(failures=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(
+ file="test_classname_instance.py",
+ line="1",
+ classname="test_classname_instance.TestClass",
+ name="test_method",
+ )
+
+ def test_classname_nested_dir(self, testdir):
+ p = testdir.tmpdir.ensure("sub", "test_hello.py")
+ p.write("def test_func(): 0/0")
+ result, dom = runandparse(testdir)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(failures=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(
+ file=os.path.join("sub", "test_hello.py"),
+ line="0",
+ classname="sub.test_hello",
+ name="test_func",
+ )
+
+ def test_internal_error(self, testdir):
+ testdir.makeconftest("def pytest_runtest_protocol(): 0 / 0")
+ testdir.makepyfile("def test_function(): pass")
+ result, dom = runandparse(testdir)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(errors=1, tests=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(classname="pytest", name="internal")
+ fnode = tnode.find_first_by_tag("error")
+ fnode.assert_attr(message="internal error")
+ assert "Division" in fnode.toxml()
+
+ @pytest.mark.parametrize("junit_logging", ["no", "system-out", "system-err"])
+ def test_failure_function(self, testdir, junit_logging):
+ testdir.makepyfile(
+ """
+ import logging
+ import sys
+
+ def test_fail():
+ print ("hello-stdout")
+ sys.stderr.write("hello-stderr\\n")
+ logging.info('info msg')
+ logging.warning('warning msg')
+ raise ValueError(42)
+ """
+ )
+
+ result, dom = runandparse(testdir, "-o", "junit_logging=%s" % junit_logging)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(failures=1, tests=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(
+ file="test_failure_function.py",
+ line="3",
+ classname="test_failure_function",
+ name="test_fail",
+ )
+ fnode = tnode.find_first_by_tag("failure")
+ fnode.assert_attr(message="ValueError: 42")
+ assert "ValueError" in fnode.toxml()
+ systemout = fnode.next_siebling
+ assert systemout.tag == "system-out"
+ assert "hello-stdout" in systemout.toxml()
+ assert "info msg" not in systemout.toxml()
+ systemerr = systemout.next_siebling
+ assert systemerr.tag == "system-err"
+ assert "hello-stderr" in systemerr.toxml()
+ assert "info msg" not in systemerr.toxml()
+
+ if junit_logging == "system-out":
+ assert "warning msg" in systemout.toxml()
+ assert "warning msg" not in systemerr.toxml()
+ elif junit_logging == "system-err":
+ assert "warning msg" not in systemout.toxml()
+ assert "warning msg" in systemerr.toxml()
+ elif junit_logging == "no":
+ assert "warning msg" not in systemout.toxml()
+ assert "warning msg" not in systemerr.toxml()
+
+ def test_failure_verbose_message(self, testdir):
+ testdir.makepyfile(
+ """
+ import sys
+ def test_fail():
+ assert 0, "An error"
+ """
+ )
+
+ result, dom = runandparse(testdir)
+ node = dom.find_first_by_tag("testsuite")
+ tnode = node.find_first_by_tag("testcase")
+ fnode = tnode.find_first_by_tag("failure")
+ fnode.assert_attr(message="AssertionError: An error assert 0")
+
+ def test_failure_escape(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize('arg1', "<&'", ids="<&'")
+ def test_func(arg1):
+ print(arg1)
+ assert 0
+ """
+ )
+ result, dom = runandparse(testdir)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(failures=3, tests=3)
+
+ for index, char in enumerate("<&'"):
+
+ tnode = node.find_nth_by_tag("testcase", index)
+ tnode.assert_attr(
+ file="test_failure_escape.py",
+ line="1",
+ classname="test_failure_escape",
+ name="test_func[%s]" % char,
+ )
+ sysout = tnode.find_first_by_tag("system-out")
+ text = sysout.text
+ assert text == "%s\n" % char
+
+ def test_junit_prefixing(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_func():
+ assert 0
+ class TestHello(object):
+ def test_hello(self):
+ pass
+ """
+ )
+ result, dom = runandparse(testdir, "--junitprefix=xyz")
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(failures=1, tests=2)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(
+ file="test_junit_prefixing.py",
+ line="0",
+ classname="xyz.test_junit_prefixing",
+ name="test_func",
+ )
+ tnode = node.find_nth_by_tag("testcase", 1)
+ tnode.assert_attr(
+ file="test_junit_prefixing.py",
+ line="3",
+ classname="xyz.test_junit_prefixing." "TestHello",
+ name="test_hello",
+ )
+
+ def test_xfailure_function(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_xfail():
+ pytest.xfail("42")
+ """
+ )
+ result, dom = runandparse(testdir)
+ assert not result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(skips=1, tests=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(
+ file="test_xfailure_function.py",
+ line="1",
+ classname="test_xfailure_function",
+ name="test_xfail",
+ )
+ fnode = tnode.find_first_by_tag("skipped")
+ fnode.assert_attr(message="expected test failure")
+ # assert "ValueError" in fnode.toxml()
+
+ def test_xfail_captures_output_once(self, testdir):
+ testdir.makepyfile(
+ """
+ import sys
+ import pytest
+
+ @pytest.mark.xfail()
+ def test_fail():
+ sys.stdout.write('XFAIL This is stdout')
+ sys.stderr.write('XFAIL This is stderr')
+ assert 0
+ """
+ )
+ result, dom = runandparse(testdir)
+ node = dom.find_first_by_tag("testsuite")
+ tnode = node.find_first_by_tag("testcase")
+ assert len(tnode.find_by_tag("system-err")) == 1
+ assert len(tnode.find_by_tag("system-out")) == 1
+
+ def test_xfailure_xpass(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail
+ def test_xpass():
+ pass
+ """
+ )
+ result, dom = runandparse(testdir)
+ # assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(skips=0, tests=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(
+ file="test_xfailure_xpass.py",
+ line="1",
+ classname="test_xfailure_xpass",
+ name="test_xpass",
+ )
+
+ def test_xfailure_xpass_strict(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail(strict=True, reason="This needs to fail!")
+ def test_xpass():
+ pass
+ """
+ )
+ result, dom = runandparse(testdir)
+ # assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(skips=0, tests=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(
+ file="test_xfailure_xpass_strict.py",
+ line="1",
+ classname="test_xfailure_xpass_strict",
+ name="test_xpass",
+ )
+ fnode = tnode.find_first_by_tag("failure")
+ fnode.assert_attr(message="[XPASS(strict)] This needs to fail!")
+
+ def test_collect_error(self, testdir):
+ testdir.makepyfile("syntax error")
+ result, dom = runandparse(testdir)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(errors=1, tests=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(file="test_collect_error.py", name="test_collect_error")
+ assert tnode["line"] is None
+ fnode = tnode.find_first_by_tag("error")
+ fnode.assert_attr(message="collection failure")
+ assert "SyntaxError" in fnode.toxml()
+
+ def test_unicode(self, testdir):
+ value = "hx\xc4\x85\xc4\x87\n"
+ testdir.makepyfile(
+ """
+ # coding: latin1
+ def test_hello():
+ print (%r)
+ assert 0
+ """
+ % value
+ )
+ result, dom = runandparse(testdir)
+ assert result.ret == 1
+ tnode = dom.find_first_by_tag("testcase")
+ fnode = tnode.find_first_by_tag("failure")
+ if not sys.platform.startswith("java"):
+ assert "hx" in fnode.toxml()
+
+ def test_assertion_binchars(self, testdir):
+ """this test did fail when the escaping wasnt strict"""
+ testdir.makepyfile(
+ """
+
+ M1 = '\x01\x02\x03\x04'
+ M2 = '\x01\x02\x03\x05'
+
+ def test_str_compare():
+ assert M1 == M2
+ """
+ )
+ result, dom = runandparse(testdir)
+ print(dom.toxml())
+
+ def test_pass_captures_stdout(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_pass():
+ print('hello-stdout')
+ """
+ )
+ result, dom = runandparse(testdir)
+ node = dom.find_first_by_tag("testsuite")
+ pnode = node.find_first_by_tag("testcase")
+ systemout = pnode.find_first_by_tag("system-out")
+ assert "hello-stdout" in systemout.toxml()
+
+ def test_pass_captures_stderr(self, testdir):
+ testdir.makepyfile(
+ """
+ import sys
+ def test_pass():
+ sys.stderr.write('hello-stderr')
+ """
+ )
+ result, dom = runandparse(testdir)
+ node = dom.find_first_by_tag("testsuite")
+ pnode = node.find_first_by_tag("testcase")
+ systemout = pnode.find_first_by_tag("system-err")
+ assert "hello-stderr" in systemout.toxml()
+
+ def test_setup_error_captures_stdout(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def arg(request):
+ print('hello-stdout')
+ raise ValueError()
+ def test_function(arg):
+ pass
+ """
+ )
+ result, dom = runandparse(testdir)
+ node = dom.find_first_by_tag("testsuite")
+ pnode = node.find_first_by_tag("testcase")
+ systemout = pnode.find_first_by_tag("system-out")
+ assert "hello-stdout" in systemout.toxml()
+
+ def test_setup_error_captures_stderr(self, testdir):
+ testdir.makepyfile(
+ """
+ import sys
+ import pytest
+
+ @pytest.fixture
+ def arg(request):
+ sys.stderr.write('hello-stderr')
+ raise ValueError()
+ def test_function(arg):
+ pass
+ """
+ )
+ result, dom = runandparse(testdir)
+ node = dom.find_first_by_tag("testsuite")
+ pnode = node.find_first_by_tag("testcase")
+ systemout = pnode.find_first_by_tag("system-err")
+ assert "hello-stderr" in systemout.toxml()
+
+ def test_avoid_double_stdout(self, testdir):
+ testdir.makepyfile(
+ """
+ import sys
+ import pytest
+
+ @pytest.fixture
+ def arg(request):
+ yield
+ sys.stdout.write('hello-stdout teardown')
+ raise ValueError()
+ def test_function(arg):
+ sys.stdout.write('hello-stdout call')
+ """
+ )
+ result, dom = runandparse(testdir)
+ node = dom.find_first_by_tag("testsuite")
+ pnode = node.find_first_by_tag("testcase")
+ systemout = pnode.find_first_by_tag("system-out")
+ assert "hello-stdout call" in systemout.toxml()
+ assert "hello-stdout teardown" in systemout.toxml()
+
+
+def test_mangle_test_address():
+ from _pytest.junitxml import mangle_test_address
+
+ address = "::".join(["a/my.py.thing.py", "Class", "()", "method", "[a-1-::]"])
+ newnames = mangle_test_address(address)
+ assert newnames == ["a.my.py.thing", "Class", "method", "[a-1-::]"]
+
+
+def test_dont_configure_on_slaves(tmpdir):
+ gotten = []
+
+ class FakeConfig(object):
+
+ def __init__(self):
+ self.pluginmanager = self
+ self.option = self
+
+ def getini(self, name):
+ return "pytest"
+
+ junitprefix = None
+ # XXX: shouldnt need tmpdir ?
+ xmlpath = str(tmpdir.join("junix.xml"))
+ register = gotten.append
+
+ fake_config = FakeConfig()
+ from _pytest import junitxml
+
+ junitxml.pytest_configure(fake_config)
+ assert len(gotten) == 1
+ FakeConfig.slaveinput = None
+ junitxml.pytest_configure(fake_config)
+ assert len(gotten) == 1
+
+
+class TestNonPython(object):
+
+ def test_summing_simple(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ def pytest_collect_file(path, parent):
+ if path.ext == ".xyz":
+ return MyItem(path, parent)
+ class MyItem(pytest.Item):
+ def __init__(self, path, parent):
+ super(MyItem, self).__init__(path.basename, parent)
+ self.fspath = path
+ def runtest(self):
+ raise ValueError(42)
+ def repr_failure(self, excinfo):
+ return "custom item runtest failed"
+ """
+ )
+ testdir.tmpdir.join("myfile.xyz").write("hello")
+ result, dom = runandparse(testdir)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(errors=0, failures=1, skips=0, tests=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(name="myfile.xyz")
+ fnode = tnode.find_first_by_tag("failure")
+ fnode.assert_attr(message="custom item runtest failed")
+ assert "custom item runtest failed" in fnode.toxml()
+
+
+def test_nullbyte(testdir):
+ # A null byte can not occur in XML (see section 2.2 of the spec)
+ testdir.makepyfile(
+ """
+ import sys
+ def test_print_nullbyte():
+ sys.stdout.write('Here the null -->' + chr(0) + '<--')
+ sys.stdout.write('In repr form -->' + repr(chr(0)) + '<--')
+ assert False
+ """
+ )
+ xmlf = testdir.tmpdir.join("junit.xml")
+ testdir.runpytest("--junitxml=%s" % xmlf)
+ text = xmlf.read()
+ assert "\x00" not in text
+ assert "#x00" in text
+
+
+def test_nullbyte_replace(testdir):
+ # Check if the null byte gets replaced
+ testdir.makepyfile(
+ """
+ import sys
+ def test_print_nullbyte():
+ sys.stdout.write('Here the null -->' + chr(0) + '<--')
+ sys.stdout.write('In repr form -->' + repr(chr(0)) + '<--')
+ assert False
+ """
+ )
+ xmlf = testdir.tmpdir.join("junit.xml")
+ testdir.runpytest("--junitxml=%s" % xmlf)
+ text = xmlf.read()
+ assert "#x0" in text
+
+
+def test_invalid_xml_escape():
+ # Test some more invalid xml chars, the full range should be
+ # tested really but let's just thest the edges of the ranges
+ # intead.
+ # XXX This only tests low unicode character points for now as
+ # there are some issues with the testing infrastructure for
+ # the higher ones.
+ # XXX Testing 0xD (\r) is tricky as it overwrites the just written
+ # line in the output, so we skip it too.
+ global unichr
+ try:
+ unichr(65)
+ except NameError:
+ unichr = chr
+ invalid = (
+ 0x00,
+ 0x1,
+ 0xB,
+ 0xC,
+ 0xE,
+ 0x19,
+ 27, # issue #126
+ 0xD800,
+ 0xDFFF,
+ 0xFFFE,
+ 0x0FFFF,
+ ) # , 0x110000)
+ valid = (0x9, 0xA, 0x20)
+ # 0xD, 0xD7FF, 0xE000, 0xFFFD, 0x10000, 0x10FFFF)
+
+ from _pytest.junitxml import bin_xml_escape
+
+ for i in invalid:
+ got = bin_xml_escape(unichr(i)).uniobj
+ if i <= 0xFF:
+ expected = "#x%02X" % i
+ else:
+ expected = "#x%04X" % i
+ assert got == expected
+ for i in valid:
+ assert chr(i) == bin_xml_escape(unichr(i)).uniobj
+
+
+def test_logxml_path_expansion(tmpdir, monkeypatch):
+ home_tilde = py.path.local(os.path.expanduser("~")).join("test.xml")
+
+ xml_tilde = LogXML("~%stest.xml" % tmpdir.sep, None)
+ assert xml_tilde.logfile == home_tilde
+
+ # this is here for when $HOME is not set correct
+ monkeypatch.setenv("HOME", tmpdir)
+ home_var = os.path.normpath(os.path.expandvars("$HOME/test.xml"))
+
+ xml_var = LogXML("$HOME%stest.xml" % tmpdir.sep, None)
+ assert xml_var.logfile == home_var
+
+
+def test_logxml_changingdir(testdir):
+ testdir.makepyfile(
+ """
+ def test_func():
+ import os
+ os.chdir("a")
+ """
+ )
+ testdir.tmpdir.mkdir("a")
+ result = testdir.runpytest("--junitxml=a/x.xml")
+ assert result.ret == 0
+ assert testdir.tmpdir.join("a/x.xml").check()
+
+
+def test_logxml_makedir(testdir):
+ """--junitxml should automatically create directories for the xml file"""
+ testdir.makepyfile(
+ """
+ def test_pass():
+ pass
+ """
+ )
+ result = testdir.runpytest("--junitxml=path/to/results.xml")
+ assert result.ret == 0
+ assert testdir.tmpdir.join("path/to/results.xml").check()
+
+
+def test_logxml_check_isdir(testdir):
+ """Give an error if --junit-xml is a directory (#2089)"""
+ result = testdir.runpytest("--junit-xml=.")
+ result.stderr.fnmatch_lines(["*--junitxml must be a filename*"])
+
+
+def test_escaped_parametrized_names_xml(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize('char', [u"\\x00"])
+ def test_func(char):
+ assert char
+ """
+ )
+ result, dom = runandparse(testdir)
+ assert result.ret == 0
+ node = dom.find_first_by_tag("testcase")
+ node.assert_attr(name="test_func[\\x00]")
+
+
+def test_double_colon_split_function_issue469(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize('param', ["double::colon"])
+ def test_func(param):
+ pass
+ """
+ )
+ result, dom = runandparse(testdir)
+ assert result.ret == 0
+ node = dom.find_first_by_tag("testcase")
+ node.assert_attr(classname="test_double_colon_split_function_issue469")
+ node.assert_attr(name="test_func[double::colon]")
+
+
+def test_double_colon_split_method_issue469(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ class TestClass(object):
+ @pytest.mark.parametrize('param', ["double::colon"])
+ def test_func(self, param):
+ pass
+ """
+ )
+ result, dom = runandparse(testdir)
+ assert result.ret == 0
+ node = dom.find_first_by_tag("testcase")
+ node.assert_attr(classname="test_double_colon_split_method_issue469.TestClass")
+ node.assert_attr(name="test_func[double::colon]")
+
+
+def test_unicode_issue368(testdir):
+ path = testdir.tmpdir.join("test.xml")
+ log = LogXML(str(path), None)
+ ustr = py.builtin._totext("ВНИ!", "utf-8")
+ from _pytest.runner import BaseReport
+
+ class Report(BaseReport):
+ longrepr = ustr
+ sections = []
+ nodeid = "something"
+ location = "tests/filename.py", 42, "TestClass.method"
+
+ test_report = Report()
+
+ # hopefully this is not too brittle ...
+ log.pytest_sessionstart()
+ node_reporter = log._opentestcase(test_report)
+ node_reporter.append_failure(test_report)
+ node_reporter.append_collect_error(test_report)
+ node_reporter.append_collect_skipped(test_report)
+ node_reporter.append_error(test_report)
+ test_report.longrepr = "filename", 1, ustr
+ node_reporter.append_skipped(test_report)
+ test_report.longrepr = "filename", 1, "Skipped: 卡嘣嘣"
+ node_reporter.append_skipped(test_report)
+ test_report.wasxfail = ustr
+ node_reporter.append_skipped(test_report)
+ log.pytest_sessionfinish()
+
+
+def test_record_property(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def other(record_property):
+ record_property("bar", 1)
+ def test_record(record_property, other):
+ record_property("foo", "<1");
+ """
+ )
+ result, dom = runandparse(testdir, "-rwv")
+ node = dom.find_first_by_tag("testsuite")
+ tnode = node.find_first_by_tag("testcase")
+ psnode = tnode.find_first_by_tag("properties")
+ pnodes = psnode.find_by_tag("property")
+ pnodes[0].assert_attr(name="bar", value="1")
+ pnodes[1].assert_attr(name="foo", value="<1")
+
+
+def test_record_property_same_name(testdir):
+ testdir.makepyfile(
+ """
+ def test_record_with_same_name(record_property):
+ record_property("foo", "bar")
+ record_property("foo", "baz")
+ """
+ )
+ result, dom = runandparse(testdir, "-rw")
+ node = dom.find_first_by_tag("testsuite")
+ tnode = node.find_first_by_tag("testcase")
+ psnode = tnode.find_first_by_tag("properties")
+ pnodes = psnode.find_by_tag("property")
+ pnodes[0].assert_attr(name="foo", value="bar")
+ pnodes[1].assert_attr(name="foo", value="baz")
+
+
+def test_record_attribute(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def other(record_xml_attribute):
+ record_xml_attribute("bar", 1)
+ def test_record(record_xml_attribute, other):
+ record_xml_attribute("foo", "<1");
+ """
+ )
+ result, dom = runandparse(testdir, "-rw")
+ node = dom.find_first_by_tag("testsuite")
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(bar="1")
+ tnode.assert_attr(foo="<1")
+ result.stdout.fnmatch_lines(
+ ["test_record_attribute.py::test_record", "*record_xml_attribute*experimental*"]
+ )
+
+
+def test_random_report_log_xdist(testdir):
+ """xdist calls pytest_runtest_logreport as they are executed by the slaves,
+ with nodes from several nodes overlapping, so junitxml must cope with that
+ to produce correct reports. #1064
+ """
+ pytest.importorskip("xdist")
+ testdir.makepyfile(
+ """
+ import pytest, time
+ @pytest.mark.parametrize('i', list(range(30)))
+ def test_x(i):
+ assert i != 22
+ """
+ )
+ _, dom = runandparse(testdir, "-n2")
+ suite_node = dom.find_first_by_tag("testsuite")
+ failed = []
+ for case_node in suite_node.find_by_tag("testcase"):
+ if case_node.find_first_by_tag("failure"):
+ failed.append(case_node["name"])
+
+ assert failed == ["test_x[22]"]
+
+
+def test_runs_twice(testdir):
+ f = testdir.makepyfile(
+ """
+ def test_pass():
+ pass
+ """
+ )
+
+ result, dom = runandparse(testdir, f, f)
+ assert "INTERNALERROR" not in result.stdout.str()
+ first, second = [x["classname"] for x in dom.find_by_tag("testcase")]
+ assert first == second
+
+
+@pytest.mark.xfail(reason="hangs", run=False)
+def test_runs_twice_xdist(testdir):
+ pytest.importorskip("xdist")
+ f = testdir.makepyfile(
+ """
+ def test_pass():
+ pass
+ """
+ )
+
+ result, dom = runandparse(testdir, f, "--dist", "each", "--tx", "2*popen")
+ assert "INTERNALERROR" not in result.stdout.str()
+ first, second = [x["classname"] for x in dom.find_by_tag("testcase")]
+ assert first == second
+
+
+def test_fancy_items_regression(testdir):
+ # issue 1259
+ testdir.makeconftest(
+ """
+ import pytest
+ class FunItem(pytest.Item):
+ def runtest(self):
+ pass
+ class NoFunItem(pytest.Item):
+ def runtest(self):
+ pass
+
+ class FunCollector(pytest.File):
+ def collect(self):
+ return [
+ FunItem('a', self),
+ NoFunItem('a', self),
+ NoFunItem('b', self),
+ ]
+
+ def pytest_collect_file(path, parent):
+ if path.check(ext='.py'):
+ return FunCollector(path, parent)
+ """
+ )
+
+ testdir.makepyfile(
+ """
+ def test_pass():
+ pass
+ """
+ )
+
+ result, dom = runandparse(testdir)
+
+ assert "INTERNALERROR" not in result.stdout.str()
+
+ items = sorted(
+ "%(classname)s %(name)s %(file)s" % x for x in dom.find_by_tag("testcase")
+ )
+ import pprint
+
+ pprint.pprint(items)
+ assert (
+ items
+ == [
+ u"conftest a conftest.py",
+ u"conftest a conftest.py",
+ u"conftest b conftest.py",
+ u"test_fancy_items_regression a test_fancy_items_regression.py",
+ u"test_fancy_items_regression a test_fancy_items_regression.py",
+ u"test_fancy_items_regression b test_fancy_items_regression.py",
+ u"test_fancy_items_regression test_pass" u" test_fancy_items_regression.py",
+ ]
+ )
+
+
+def test_global_properties(testdir):
+ path = testdir.tmpdir.join("test_global_properties.xml")
+ log = LogXML(str(path), None)
+ from _pytest.runner import BaseReport
+
+ class Report(BaseReport):
+ sections = []
+ nodeid = "test_node_id"
+
+ log.pytest_sessionstart()
+ log.add_global_property("foo", 1)
+ log.add_global_property("bar", 2)
+ log.pytest_sessionfinish()
+
+ dom = minidom.parse(str(path))
+
+ properties = dom.getElementsByTagName("properties")
+
+ assert properties.length == 1, "There must be one <properties> node"
+
+ property_list = dom.getElementsByTagName("property")
+
+ assert property_list.length == 2, "There most be only 2 property nodes"
+
+ expected = {"foo": "1", "bar": "2"}
+ actual = {}
+
+ for p in property_list:
+ k = str(p.getAttribute("name"))
+ v = str(p.getAttribute("value"))
+ actual[k] = v
+
+ assert actual == expected
+
+
+def test_url_property(testdir):
+ test_url = "http://www.github.com/pytest-dev"
+ path = testdir.tmpdir.join("test_url_property.xml")
+ log = LogXML(str(path), None)
+ from _pytest.runner import BaseReport
+
+ class Report(BaseReport):
+ longrepr = "FooBarBaz"
+ sections = []
+ nodeid = "something"
+ location = "tests/filename.py", 42, "TestClass.method"
+ url = test_url
+
+ test_report = Report()
+
+ log.pytest_sessionstart()
+ node_reporter = log._opentestcase(test_report)
+ node_reporter.append_failure(test_report)
+ log.pytest_sessionfinish()
+
+ test_case = minidom.parse(str(path)).getElementsByTagName("testcase")[0]
+
+ assert (
+ test_case.getAttribute("url") == test_url
+ ), "The URL did not get written to the xml"
+
+
+@pytest.mark.parametrize("suite_name", ["my_suite", ""])
+def test_set_suite_name(testdir, suite_name):
+ if suite_name:
+ testdir.makeini(
+ """
+ [pytest]
+ junit_suite_name={}
+ """.format(
+ suite_name
+ )
+ )
+ expected = suite_name
+ else:
+ expected = "pytest"
+ testdir.makepyfile(
+ """
+ import pytest
+
+ def test_func():
+ pass
+ """
+ )
+ result, dom = runandparse(testdir)
+ assert result.ret == 0
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(name=expected)
diff --git a/third_party/python/pytest/testing/test_mark.py b/third_party/python/pytest/testing/test_mark.py
new file mode 100644
index 0000000000..e2e7369dc7
--- /dev/null
+++ b/third_party/python/pytest/testing/test_mark.py
@@ -0,0 +1,1134 @@
+from __future__ import absolute_import, division, print_function
+import os
+import sys
+import mock
+import pytest
+from _pytest.mark import (
+ MarkGenerator as Mark,
+ ParameterSet,
+ transfer_markers,
+ EMPTY_PARAMETERSET_OPTION,
+)
+from _pytest.nodes import Node
+
+ignore_markinfo = pytest.mark.filterwarnings(
+ "ignore:MarkInfo objects:_pytest.deprecated.RemovedInPytest4Warning"
+)
+
+
+class TestMark(object):
+
+ def test_markinfo_repr(self):
+ from _pytest.mark import MarkInfo, Mark
+
+ m = MarkInfo.for_mark(Mark("hello", (1, 2), {}))
+ repr(m)
+
+ @pytest.mark.parametrize("attr", ["mark", "param"])
+ @pytest.mark.parametrize("modulename", ["py.test", "pytest"])
+ def test_pytest_exists_in_namespace_all(self, attr, modulename):
+ module = sys.modules[modulename]
+ assert attr in module.__all__
+
+ def test_pytest_mark_notcallable(self):
+ mark = Mark()
+ pytest.raises((AttributeError, TypeError), mark)
+
+ def test_mark_with_param(self):
+
+ def some_function(abc):
+ pass
+
+ class SomeClass(object):
+ pass
+
+ assert pytest.mark.fun(some_function) is some_function
+ assert pytest.mark.fun.with_args(some_function) is not some_function
+
+ assert pytest.mark.fun(SomeClass) is SomeClass
+ assert pytest.mark.fun.with_args(SomeClass) is not SomeClass
+
+ def test_pytest_mark_name_starts_with_underscore(self):
+ mark = Mark()
+ pytest.raises(AttributeError, getattr, mark, "_some_name")
+
+ def test_pytest_mark_bare(self):
+ mark = Mark()
+
+ def f():
+ pass
+
+ mark.hello(f)
+ assert f.hello
+
+ @ignore_markinfo
+ def test_pytest_mark_keywords(self):
+ mark = Mark()
+
+ def f():
+ pass
+
+ mark.world(x=3, y=4)(f)
+ assert f.world
+ assert f.world.kwargs["x"] == 3
+ assert f.world.kwargs["y"] == 4
+
+ @ignore_markinfo
+ def test_apply_multiple_and_merge(self):
+ mark = Mark()
+
+ def f():
+ pass
+
+ mark.world
+ mark.world(x=3)(f)
+ assert f.world.kwargs["x"] == 3
+ mark.world(y=4)(f)
+ assert f.world.kwargs["x"] == 3
+ assert f.world.kwargs["y"] == 4
+ mark.world(y=1)(f)
+ assert f.world.kwargs["y"] == 1
+ assert len(f.world.args) == 0
+
+ @ignore_markinfo
+ def test_pytest_mark_positional(self):
+ mark = Mark()
+
+ def f():
+ pass
+
+ mark.world("hello")(f)
+ assert f.world.args[0] == "hello"
+ mark.world("world")(f)
+
+ @ignore_markinfo
+ def test_pytest_mark_positional_func_and_keyword(self):
+ mark = Mark()
+
+ def f():
+ raise Exception
+
+ m = mark.world(f, omega="hello")
+
+ def g():
+ pass
+
+ assert m(g) == g
+ assert g.world.args[0] is f
+ assert g.world.kwargs["omega"] == "hello"
+
+ @ignore_markinfo
+ def test_pytest_mark_reuse(self):
+ mark = Mark()
+
+ def f():
+ pass
+
+ w = mark.some
+ w("hello", reason="123")(f)
+ assert f.some.args[0] == "hello"
+ assert f.some.kwargs["reason"] == "123"
+
+ def g():
+ pass
+
+ w("world", reason2="456")(g)
+ assert g.some.args[0] == "world"
+ assert "reason" not in g.some.kwargs
+ assert g.some.kwargs["reason2"] == "456"
+
+
+def test_marked_class_run_twice(testdir, request):
+ """Test fails file is run twice that contains marked class.
+ See issue#683.
+ """
+ py_file = testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize('abc', [1, 2, 3])
+ class Test1(object):
+ def test_1(self, abc):
+ assert abc in [1, 2, 3]
+ """
+ )
+ file_name = os.path.basename(py_file.strpath)
+ rec = testdir.inline_run(file_name, file_name)
+ rec.assertoutcome(passed=6)
+
+
+def test_ini_markers(testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ markers =
+ a1: this is a webtest marker
+ a2: this is a smoke marker
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_markers(pytestconfig):
+ markers = pytestconfig.getini("markers")
+ print (markers)
+ assert len(markers) >= 2
+ assert markers[0].startswith("a1:")
+ assert markers[1].startswith("a2:")
+ """
+ )
+ rec = testdir.inline_run()
+ rec.assertoutcome(passed=1)
+
+
+def test_markers_option(testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ markers =
+ a1: this is a webtest marker
+ a1some: another marker
+ nodescription
+ """
+ )
+ result = testdir.runpytest("--markers")
+ result.stdout.fnmatch_lines(
+ ["*a1*this is a webtest*", "*a1some*another marker", "*nodescription*"]
+ )
+
+
+def test_ini_markers_whitespace(testdir):
+ testdir.makeini(
+ """
+ [pytest]
+ markers =
+ a1 : this is a whitespace marker
+ """
+ )
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.a1
+ def test_markers():
+ assert True
+ """
+ )
+ rec = testdir.inline_run("--strict", "-m", "a1")
+ rec.assertoutcome(passed=1)
+
+
+def test_marker_without_description(testdir):
+ testdir.makefile(
+ ".cfg",
+ setup="""
+ [tool:pytest]
+ markers=slow
+ """,
+ )
+ testdir.makeconftest(
+ """
+ import pytest
+ pytest.mark.xfail('FAIL')
+ """
+ )
+ ftdir = testdir.mkdir("ft1_dummy")
+ testdir.tmpdir.join("conftest.py").move(ftdir.join("conftest.py"))
+ rec = testdir.runpytest_subprocess("--strict")
+ rec.assert_outcomes()
+
+
+def test_markers_option_with_plugin_in_current_dir(testdir):
+ testdir.makeconftest('pytest_plugins = "flip_flop"')
+ testdir.makepyfile(
+ flip_flop="""\
+ def pytest_configure(config):
+ config.addinivalue_line("markers", "flip:flop")
+
+ def pytest_generate_tests(metafunc):
+ try:
+ mark = metafunc.function.flipper
+ except AttributeError:
+ return
+ metafunc.parametrize("x", (10, 20))"""
+ )
+ testdir.makepyfile(
+ """\
+ import pytest
+ @pytest.mark.flipper
+ def test_example(x):
+ assert x"""
+ )
+
+ result = testdir.runpytest("--markers")
+ result.stdout.fnmatch_lines(["*flip*flop*"])
+
+
+def test_mark_on_pseudo_function(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.r(lambda x: 0/0)
+ def test_hello():
+ pass
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+def test_strict_prohibits_unregistered_markers(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.unregisteredmark
+ def test_hello():
+ pass
+ """
+ )
+ result = testdir.runpytest("--strict")
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(["*unregisteredmark*not*registered*"])
+
+
+@pytest.mark.parametrize(
+ "spec",
+ [
+ ("xyz", ("test_one",)),
+ ("xyz and xyz2", ()),
+ ("xyz2", ("test_two",)),
+ ("xyz or xyz2", ("test_one", "test_two")),
+ ],
+)
+def test_mark_option(spec, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xyz
+ def test_one():
+ pass
+ @pytest.mark.xyz2
+ def test_two():
+ pass
+ """
+ )
+ opt, passed_result = spec
+ rec = testdir.inline_run("-m", opt)
+ passed, skipped, fail = rec.listoutcomes()
+ passed = [x.nodeid.split("::")[-1] for x in passed]
+ assert len(passed) == len(passed_result)
+ assert list(passed) == list(passed_result)
+
+
+@pytest.mark.parametrize(
+ "spec", [("interface", ("test_interface",)), ("not interface", ("test_nointer",))]
+)
+def test_mark_option_custom(spec, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ def pytest_collection_modifyitems(items):
+ for item in items:
+ if "interface" in item.nodeid:
+ item.add_marker(pytest.mark.interface)
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_interface():
+ pass
+ def test_nointer():
+ pass
+ """
+ )
+ opt, passed_result = spec
+ rec = testdir.inline_run("-m", opt)
+ passed, skipped, fail = rec.listoutcomes()
+ passed = [x.nodeid.split("::")[-1] for x in passed]
+ assert len(passed) == len(passed_result)
+ assert list(passed) == list(passed_result)
+
+
+@pytest.mark.parametrize(
+ "spec",
+ [
+ ("interface", ("test_interface",)),
+ ("not interface", ("test_nointer", "test_pass")),
+ ("pass", ("test_pass",)),
+ ("not pass", ("test_interface", "test_nointer")),
+ ],
+)
+def test_keyword_option_custom(spec, testdir):
+ testdir.makepyfile(
+ """
+ def test_interface():
+ pass
+ def test_nointer():
+ pass
+ def test_pass():
+ pass
+ """
+ )
+ opt, passed_result = spec
+ rec = testdir.inline_run("-k", opt)
+ passed, skipped, fail = rec.listoutcomes()
+ passed = [x.nodeid.split("::")[-1] for x in passed]
+ assert len(passed) == len(passed_result)
+ assert list(passed) == list(passed_result)
+
+
+@pytest.mark.parametrize(
+ "spec",
+ [
+ ("None", ("test_func[None]",)),
+ ("1.3", ("test_func[1.3]",)),
+ ("2-3", ("test_func[2-3]",)),
+ ],
+)
+def test_keyword_option_parametrize(spec, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize("arg", [None, 1.3, "2-3"])
+ def test_func(arg):
+ pass
+ """
+ )
+ opt, passed_result = spec
+ rec = testdir.inline_run("-k", opt)
+ passed, skipped, fail = rec.listoutcomes()
+ passed = [x.nodeid.split("::")[-1] for x in passed]
+ assert len(passed) == len(passed_result)
+ assert list(passed) == list(passed_result)
+
+
+@pytest.mark.parametrize(
+ "spec",
+ [
+ (
+ "foo or import",
+ "ERROR: Python keyword 'import' not accepted in expressions passed to '-k'",
+ ),
+ ("foo or", "ERROR: Wrong expression passed to '-k': foo or"),
+ ],
+)
+def test_keyword_option_wrong_arguments(spec, testdir, capsys):
+ testdir.makepyfile(
+ """
+ def test_func(arg):
+ pass
+ """
+ )
+ opt, expected_result = spec
+ testdir.inline_run("-k", opt)
+ out = capsys.readouterr().err
+ assert expected_result in out
+
+
+def test_parametrized_collected_from_command_line(testdir):
+ """Parametrized test not collected if test named specified
+ in command line issue#649.
+ """
+ py_file = testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize("arg", [None, 1.3, "2-3"])
+ def test_func(arg):
+ pass
+ """
+ )
+ file_name = os.path.basename(py_file.strpath)
+ rec = testdir.inline_run(file_name + "::" + "test_func")
+ rec.assertoutcome(passed=3)
+
+
+def test_parametrized_collect_with_wrong_args(testdir):
+ """Test collect parametrized func with wrong number of args."""
+ py_file = testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrize('foo, bar', [(1, 2, 3)])
+ def test_func(foo, bar):
+ pass
+ """
+ )
+
+ result = testdir.runpytest(py_file)
+ result.stdout.fnmatch_lines(
+ [
+ 'E ValueError: In "parametrize" the number of values ((1, 2, 3)) '
+ "must be equal to the number of names (['foo', 'bar'])"
+ ]
+ )
+
+
+def test_parametrized_with_kwargs(testdir):
+ """Test collect parametrized func with wrong number of args."""
+ py_file = testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1,2])
+ def a(request):
+ return request.param
+
+ @pytest.mark.parametrize(argnames='b', argvalues=[1, 2])
+ def test_func(a, b):
+ pass
+ """
+ )
+
+ result = testdir.runpytest(py_file)
+ assert result.ret == 0
+
+
+class TestFunctional(object):
+
+ def test_mark_per_function(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.hello
+ def test_hello():
+ assert hasattr(test_hello, 'hello')
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_mark_per_module(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+ pytestmark = pytest.mark.hello
+ def test_func():
+ pass
+ """
+ )
+ keywords = item.keywords
+ assert "hello" in keywords
+
+ def test_marklist_per_class(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+ class TestClass(object):
+ pytestmark = [pytest.mark.hello, pytest.mark.world]
+ def test_func(self):
+ assert TestClass.test_func.hello
+ assert TestClass.test_func.world
+ """
+ )
+ keywords = item.keywords
+ assert "hello" in keywords
+
+ def test_marklist_per_module(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+ pytestmark = [pytest.mark.hello, pytest.mark.world]
+ class TestClass(object):
+ def test_func(self):
+ assert TestClass.test_func.hello
+ assert TestClass.test_func.world
+ """
+ )
+ keywords = item.keywords
+ assert "hello" in keywords
+ assert "world" in keywords
+
+ def test_mark_per_class_decorator(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+ @pytest.mark.hello
+ class TestClass(object):
+ def test_func(self):
+ assert TestClass.test_func.hello
+ """
+ )
+ keywords = item.keywords
+ assert "hello" in keywords
+
+ def test_mark_per_class_decorator_plus_existing_dec(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+ @pytest.mark.hello
+ class TestClass(object):
+ pytestmark = pytest.mark.world
+ def test_func(self):
+ assert TestClass.test_func.hello
+ assert TestClass.test_func.world
+ """
+ )
+ keywords = item.keywords
+ assert "hello" in keywords
+ assert "world" in keywords
+
+ @ignore_markinfo
+ def test_merging_markers(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ pytestmark = pytest.mark.hello("pos1", x=1, y=2)
+ class TestClass(object):
+ # classlevel overrides module level
+ pytestmark = pytest.mark.hello(x=3)
+ @pytest.mark.hello("pos0", z=4)
+ def test_func(self):
+ pass
+ """
+ )
+ items, rec = testdir.inline_genitems(p)
+ item, = items
+ keywords = item.keywords
+ marker = keywords["hello"]
+ assert marker.args == ("pos0", "pos1")
+ assert marker.kwargs == {"x": 1, "y": 2, "z": 4}
+
+ # test the new __iter__ interface
+ values = list(marker)
+ assert len(values) == 3
+ assert values[0].args == ("pos0",)
+ assert values[1].args == ()
+ assert values[2].args == ("pos1",)
+
+ def test_merging_markers_deep(self, testdir):
+ # issue 199 - propagate markers into nested classes
+ p = testdir.makepyfile(
+ """
+ import pytest
+ class TestA(object):
+ pytestmark = pytest.mark.a
+ def test_b(self):
+ assert True
+ class TestC(object):
+ # this one didnt get marked
+ def test_d(self):
+ assert True
+ """
+ )
+ items, rec = testdir.inline_genitems(p)
+ for item in items:
+ print(item, item.keywords)
+ assert [x for x in item.iter_markers() if x.name == "a"]
+
+ def test_mark_decorator_subclass_does_not_propagate_to_base(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.a
+ class Base(object): pass
+
+ @pytest.mark.b
+ class Test1(Base):
+ def test_foo(self): pass
+
+ class Test2(Base):
+ def test_bar(self): pass
+ """
+ )
+ items, rec = testdir.inline_genitems(p)
+ self.assert_markers(items, test_foo=("a", "b"), test_bar=("a",))
+
+ @pytest.mark.issue568
+ def test_mark_should_not_pass_to_siebling_class(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+
+ class TestBase(object):
+ def test_foo(self):
+ pass
+
+ @pytest.mark.b
+ class TestSub(TestBase):
+ pass
+
+
+ class TestOtherSub(TestBase):
+ pass
+
+ """
+ )
+ items, rec = testdir.inline_genitems(p)
+ base_item, sub_item, sub_item_other = items
+ print(items, [x.nodeid for x in items])
+ # legacy api smears
+ assert hasattr(base_item.obj, "b")
+ assert hasattr(sub_item_other.obj, "b")
+ assert hasattr(sub_item.obj, "b")
+
+ # new api seregates
+ assert not list(base_item.iter_markers(name="b"))
+ assert not list(sub_item_other.iter_markers(name="b"))
+ assert list(sub_item.iter_markers(name="b"))
+
+ def test_mark_decorator_baseclasses_merged(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.a
+ class Base(object): pass
+
+ @pytest.mark.b
+ class Base2(Base): pass
+
+ @pytest.mark.c
+ class Test1(Base2):
+ def test_foo(self): pass
+
+ class Test2(Base2):
+ @pytest.mark.d
+ def test_bar(self): pass
+ """
+ )
+ items, rec = testdir.inline_genitems(p)
+ self.assert_markers(items, test_foo=("a", "b", "c"), test_bar=("a", "b", "d"))
+
+ def test_mark_closest(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.c(location="class")
+ class Test:
+ @pytest.mark.c(location="function")
+ def test_has_own():
+ pass
+
+ def test_has_inherited():
+ pass
+
+ """
+ )
+ items, rec = testdir.inline_genitems(p)
+ has_own, has_inherited = items
+ assert has_own.get_closest_marker("c").kwargs == {"location": "function"}
+ assert has_inherited.get_closest_marker("c").kwargs == {"location": "class"}
+ assert has_own.get_closest_marker("missing") is None
+
+ def test_mark_with_wrong_marker(self, testdir):
+ reprec = testdir.inline_runsource(
+ """
+ import pytest
+ class pytestmark(object):
+ pass
+ def test_func():
+ pass
+ """
+ )
+ values = reprec.getfailedcollections()
+ assert len(values) == 1
+ assert "TypeError" in str(values[0].longrepr)
+
+ def test_mark_dynamically_in_funcarg(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ @pytest.fixture
+ def arg(request):
+ request.applymarker(pytest.mark.hello)
+ def pytest_terminal_summary(terminalreporter):
+ values = terminalreporter.stats['passed']
+ terminalreporter._tw.line("keyword: %s" % values[0].keywords)
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_func(arg):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["keyword: *hello*"])
+
+ @ignore_markinfo
+ def test_merging_markers_two_functions(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.hello("pos1", z=4)
+ @pytest.mark.hello("pos0", z=3)
+ def test_func():
+ pass
+ """
+ )
+ items, rec = testdir.inline_genitems(p)
+ item, = items
+ keywords = item.keywords
+ marker = keywords["hello"]
+ values = list(marker)
+ assert len(values) == 2
+ assert values[0].args == ("pos0",)
+ assert values[1].args == ("pos1",)
+
+ def test_no_marker_match_on_unmarked_names(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.shouldmatch
+ def test_marked():
+ assert 1
+
+ def test_unmarked():
+ assert 1
+ """
+ )
+ reprec = testdir.inline_run("-m", "test_unmarked", p)
+ passed, skipped, failed = reprec.listoutcomes()
+ assert len(passed) + len(skipped) + len(failed) == 0
+ dlist = reprec.getcalls("pytest_deselected")
+ deselected_tests = dlist[0].items
+ assert len(deselected_tests) == 2
+
+ def test_keywords_at_node_level(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope="session", autouse=True)
+ def some(request):
+ request.keywords["hello"] = 42
+ assert "world" not in request.keywords
+
+ @pytest.fixture(scope="function", autouse=True)
+ def funcsetup(request):
+ assert "world" in request.keywords
+ assert "hello" in request.keywords
+
+ @pytest.mark.world
+ def test_function():
+ pass
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ @ignore_markinfo
+ def test_keyword_added_for_session(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ def pytest_collection_modifyitems(session):
+ session.add_marker("mark1")
+ session.add_marker(pytest.mark.mark2)
+ session.add_marker(pytest.mark.mark3)
+ pytest.raises(ValueError, lambda:
+ session.add_marker(10))
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_some(request):
+ assert "mark1" in request.keywords
+ assert "mark2" in request.keywords
+ assert "mark3" in request.keywords
+ assert 10 not in request.keywords
+ marker = request.node.get_marker("mark1")
+ assert marker.name == "mark1"
+ assert marker.args == ()
+ assert marker.kwargs == {}
+ """
+ )
+ reprec = testdir.inline_run("-m", "mark1")
+ reprec.assertoutcome(passed=1)
+
+ def assert_markers(self, items, **expected):
+ """assert that given items have expected marker names applied to them.
+ expected should be a dict of (item name -> seq of expected marker names)
+
+ .. note:: this could be moved to ``testdir`` if proven to be useful
+ to other modules.
+ """
+ from _pytest.mark import MarkInfo
+
+ items = {x.name: x for x in items}
+ for name, expected_markers in expected.items():
+ markers = items[name].keywords._markers
+ marker_names = {
+ name for (name, v) in markers.items() if isinstance(v, MarkInfo)
+ }
+ assert marker_names == set(expected_markers)
+
+ @pytest.mark.issue1540
+ @pytest.mark.filterwarnings("ignore")
+ def test_mark_from_parameters(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ pytestmark = pytest.mark.skipif(True, reason='skip all')
+
+ # skipifs inside fixture params
+ params = [pytest.mark.skipif(False, reason='dont skip')('parameter')]
+
+
+ @pytest.fixture(params=params)
+ def parameter(request):
+ return request.param
+
+
+ def test_1(parameter):
+ assert True
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(skipped=1)
+
+
+class TestKeywordSelection(object):
+
+ def test_select_simple(self, testdir):
+ file_test = testdir.makepyfile(
+ """
+ def test_one():
+ assert 0
+ class TestClass(object):
+ def test_method_one(self):
+ assert 42 == 43
+ """
+ )
+
+ def check(keyword, name):
+ reprec = testdir.inline_run("-s", "-k", keyword, file_test)
+ passed, skipped, failed = reprec.listoutcomes()
+ assert len(failed) == 1
+ assert failed[0].nodeid.split("::")[-1] == name
+ assert len(reprec.getcalls("pytest_deselected")) == 1
+
+ for keyword in ["test_one", "est_on"]:
+ check(keyword, "test_one")
+ check("TestClass and test", "test_method_one")
+
+ @pytest.mark.parametrize(
+ "keyword",
+ [
+ "xxx",
+ "xxx and test_2",
+ "TestClass",
+ "xxx and not test_1",
+ "TestClass and test_2",
+ "xxx and TestClass and test_2",
+ ],
+ )
+ def test_select_extra_keywords(self, testdir, keyword):
+ p = testdir.makepyfile(
+ test_select="""
+ def test_1():
+ pass
+ class TestClass(object):
+ def test_2(self):
+ pass
+ """
+ )
+ testdir.makepyfile(
+ conftest="""
+ import pytest
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_pycollect_makeitem(name):
+ outcome = yield
+ if name == "TestClass":
+ item = outcome.get_result()
+ item.extra_keyword_matches.add("xxx")
+ """
+ )
+ reprec = testdir.inline_run(p.dirpath(), "-s", "-k", keyword)
+ print("keyword", repr(keyword))
+ passed, skipped, failed = reprec.listoutcomes()
+ assert len(passed) == 1
+ assert passed[0].nodeid.endswith("test_2")
+ dlist = reprec.getcalls("pytest_deselected")
+ assert len(dlist) == 1
+ assert dlist[0].items[0].name == "test_1"
+
+ def test_select_starton(self, testdir):
+ threepass = testdir.makepyfile(
+ test_threepass="""
+ def test_one(): assert 1
+ def test_two(): assert 1
+ def test_three(): assert 1
+ """
+ )
+ reprec = testdir.inline_run("-k", "test_two:", threepass)
+ passed, skipped, failed = reprec.listoutcomes()
+ assert len(passed) == 2
+ assert not failed
+ dlist = reprec.getcalls("pytest_deselected")
+ assert len(dlist) == 1
+ item = dlist[0].items[0]
+ assert item.name == "test_one"
+
+ def test_keyword_extra(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def test_one():
+ assert 0
+ test_one.mykeyword = True
+ """
+ )
+ reprec = testdir.inline_run("-k", "mykeyword", p)
+ passed, skipped, failed = reprec.countoutcomes()
+ assert failed == 1
+
+ @pytest.mark.xfail
+ def test_keyword_extra_dash(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def test_one():
+ assert 0
+ test_one.mykeyword = True
+ """
+ )
+ # with argparse the argument to an option cannot
+ # start with '-'
+ reprec = testdir.inline_run("-k", "-mykeyword", p)
+ passed, skipped, failed = reprec.countoutcomes()
+ assert passed + skipped + failed == 0
+
+ def test_no_magic_values(self, testdir):
+ """Make sure the tests do not match on magic values,
+ no double underscored values, like '__dict__',
+ and no instance values, like '()'.
+ """
+ p = testdir.makepyfile(
+ """
+ def test_one(): assert 1
+ """
+ )
+
+ def assert_test_is_not_selected(keyword):
+ reprec = testdir.inline_run("-k", keyword, p)
+ passed, skipped, failed = reprec.countoutcomes()
+ dlist = reprec.getcalls("pytest_deselected")
+ assert passed + skipped + failed == 0
+ deselected_tests = dlist[0].items
+ assert len(deselected_tests) == 1
+
+ assert_test_is_not_selected("__")
+ assert_test_is_not_selected("()")
+
+
+@pytest.mark.parametrize(
+ "argval, expected",
+ [
+ (
+ pytest.mark.skip()((1, 2)),
+ ParameterSet(values=(1, 2), marks=[pytest.mark.skip], id=None),
+ ),
+ (
+ pytest.mark.xfail(pytest.mark.skip()((1, 2))),
+ ParameterSet(
+ values=(1, 2), marks=[pytest.mark.xfail, pytest.mark.skip], id=None
+ ),
+ ),
+ ],
+)
+@pytest.mark.filterwarnings("ignore")
+def test_parameterset_extractfrom(argval, expected):
+ extracted = ParameterSet.extract_from(argval)
+ assert extracted == expected
+
+
+def test_legacy_transfer():
+
+ class FakeModule(object):
+ pytestmark = []
+
+ class FakeClass(object):
+ pytestmark = pytest.mark.nofun
+
+ @pytest.mark.fun
+ def fake_method(self):
+ pass
+
+ transfer_markers(fake_method, FakeClass, FakeModule)
+
+ # legacy marks transfer smeared
+ assert fake_method.nofun
+ assert fake_method.fun
+ # pristine marks dont transfer
+ assert fake_method.pytestmark == [pytest.mark.fun.mark]
+
+
+class TestMarkDecorator(object):
+
+ @pytest.mark.parametrize(
+ "lhs, rhs, expected",
+ [
+ (pytest.mark.foo(), pytest.mark.foo(), True),
+ (pytest.mark.foo(), pytest.mark.bar(), False),
+ (pytest.mark.foo(), "bar", False),
+ ("foo", pytest.mark.bar(), False),
+ ],
+ )
+ def test__eq__(self, lhs, rhs, expected):
+ assert (lhs == rhs) == expected
+
+
+@pytest.mark.parametrize("mark", [None, "", "skip", "xfail"])
+def test_parameterset_for_parametrize_marks(testdir, mark):
+ if mark is not None:
+ testdir.makeini("[pytest]\n{}={}".format(EMPTY_PARAMETERSET_OPTION, mark))
+
+ config = testdir.parseconfig()
+ from _pytest.mark import pytest_configure, get_empty_parameterset_mark
+
+ pytest_configure(config)
+ result_mark = get_empty_parameterset_mark(config, ["a"], all)
+ if mark in (None, ""):
+ # normalize to the requested name
+ mark = "skip"
+ assert result_mark.name == mark
+ assert result_mark.kwargs["reason"].startswith("got empty parameter set ")
+ if mark == "xfail":
+ assert result_mark.kwargs.get("run") is False
+
+
+def test_parameterset_for_parametrize_bad_markname(testdir):
+ with pytest.raises(pytest.UsageError):
+ test_parameterset_for_parametrize_marks(testdir, "bad")
+
+
+def test_mark_expressions_no_smear(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ class BaseTests(object):
+ def test_something(self):
+ pass
+
+ @pytest.mark.FOO
+ class TestFooClass(BaseTests):
+ pass
+
+ @pytest.mark.BAR
+ class TestBarClass(BaseTests):
+ pass
+ """
+ )
+
+ reprec = testdir.inline_run("-m", "FOO")
+ passed, skipped, failed = reprec.countoutcomes()
+ dlist = reprec.getcalls("pytest_deselected")
+ assert passed == 1
+ assert skipped == failed == 0
+ deselected_tests = dlist[0].items
+ assert len(deselected_tests) == 1
+
+ # keywords smear - expected behaviour
+ reprec_keywords = testdir.inline_run("-k", "FOO")
+ passed_k, skipped_k, failed_k = reprec_keywords.countoutcomes()
+ assert passed_k == 2
+ assert skipped_k == failed_k == 0
+
+
+def test_addmarker_getmarker():
+ node = Node("Test", config=mock.Mock(), session=mock.Mock(), nodeid="Test")
+ node.add_marker(pytest.mark.a(1))
+ node.add_marker("b")
+ node.get_marker("a").combined
+ node.get_marker("b").combined
diff --git a/third_party/python/pytest/testing/test_modimport.py b/third_party/python/pytest/testing/test_modimport.py
new file mode 100644
index 0000000000..f7b92a0b6f
--- /dev/null
+++ b/third_party/python/pytest/testing/test_modimport.py
@@ -0,0 +1,29 @@
+import py
+import subprocess
+import sys
+import pytest
+import _pytest
+
+MODSET = [
+ x
+ for x in py.path.local(_pytest.__file__).dirpath().visit("*.py")
+ if x.purebasename != "__init__"
+]
+
+
+@pytest.mark.parametrize("modfile", MODSET, ids=lambda x: x.purebasename)
+def test_fileimport(modfile):
+ # this test ensures all internal packages can import
+ # without needing the pytest namespace being set
+ # this is critical for the initialization of xdist
+
+ res = subprocess.call(
+ [
+ sys.executable,
+ "-c",
+ "import sys, py; py.path.local(sys.argv[1]).pyimport()",
+ modfile.strpath,
+ ]
+ )
+ if res:
+ pytest.fail("command result %s" % res)
diff --git a/third_party/python/pytest/testing/test_monkeypatch.py b/third_party/python/pytest/testing/test_monkeypatch.py
new file mode 100644
index 0000000000..c298ce0d92
--- /dev/null
+++ b/third_party/python/pytest/testing/test_monkeypatch.py
@@ -0,0 +1,368 @@
+from __future__ import absolute_import, division, print_function
+import os
+import sys
+import textwrap
+
+import pytest
+from _pytest.monkeypatch import MonkeyPatch
+
+
+@pytest.fixture
+def mp():
+ cwd = os.getcwd()
+ sys_path = list(sys.path)
+ yield MonkeyPatch()
+ sys.path[:] = sys_path
+ os.chdir(cwd)
+
+
+def test_setattr():
+
+ class A(object):
+ x = 1
+
+ monkeypatch = MonkeyPatch()
+ pytest.raises(AttributeError, "monkeypatch.setattr(A, 'notexists', 2)")
+ monkeypatch.setattr(A, "y", 2, raising=False)
+ assert A.y == 2
+ monkeypatch.undo()
+ assert not hasattr(A, "y")
+
+ monkeypatch = MonkeyPatch()
+ monkeypatch.setattr(A, "x", 2)
+ assert A.x == 2
+ monkeypatch.setattr(A, "x", 3)
+ assert A.x == 3
+ monkeypatch.undo()
+ assert A.x == 1
+
+ A.x = 5
+ monkeypatch.undo() # double-undo makes no modification
+ assert A.x == 5
+
+
+class TestSetattrWithImportPath(object):
+
+ def test_string_expression(self, monkeypatch):
+ monkeypatch.setattr("os.path.abspath", lambda x: "hello2")
+ assert os.path.abspath("123") == "hello2"
+
+ def test_string_expression_class(self, monkeypatch):
+ monkeypatch.setattr("_pytest.config.Config", 42)
+ import _pytest
+
+ assert _pytest.config.Config == 42
+
+ def test_unicode_string(self, monkeypatch):
+ monkeypatch.setattr("_pytest.config.Config", 42)
+ import _pytest
+
+ assert _pytest.config.Config == 42
+ monkeypatch.delattr("_pytest.config.Config")
+
+ def test_wrong_target(self, monkeypatch):
+ pytest.raises(TypeError, lambda: monkeypatch.setattr(None, None))
+
+ def test_unknown_import(self, monkeypatch):
+ pytest.raises(ImportError, lambda: monkeypatch.setattr("unkn123.classx", None))
+
+ def test_unknown_attr(self, monkeypatch):
+ pytest.raises(
+ AttributeError, lambda: monkeypatch.setattr("os.path.qweqwe", None)
+ )
+
+ def test_unknown_attr_non_raising(self, monkeypatch):
+ # https://github.com/pytest-dev/pytest/issues/746
+ monkeypatch.setattr("os.path.qweqwe", 42, raising=False)
+ assert os.path.qweqwe == 42
+
+ def test_delattr(self, monkeypatch):
+ monkeypatch.delattr("os.path.abspath")
+ assert not hasattr(os.path, "abspath")
+ monkeypatch.undo()
+ assert os.path.abspath
+
+
+def test_delattr():
+
+ class A(object):
+ x = 1
+
+ monkeypatch = MonkeyPatch()
+ monkeypatch.delattr(A, "x")
+ assert not hasattr(A, "x")
+ monkeypatch.undo()
+ assert A.x == 1
+
+ monkeypatch = MonkeyPatch()
+ monkeypatch.delattr(A, "x")
+ pytest.raises(AttributeError, "monkeypatch.delattr(A, 'y')")
+ monkeypatch.delattr(A, "y", raising=False)
+ monkeypatch.setattr(A, "x", 5, raising=False)
+ assert A.x == 5
+ monkeypatch.undo()
+ assert A.x == 1
+
+
+def test_setitem():
+ d = {"x": 1}
+ monkeypatch = MonkeyPatch()
+ monkeypatch.setitem(d, "x", 2)
+ monkeypatch.setitem(d, "y", 1700)
+ monkeypatch.setitem(d, "y", 1700)
+ assert d["x"] == 2
+ assert d["y"] == 1700
+ monkeypatch.setitem(d, "x", 3)
+ assert d["x"] == 3
+ monkeypatch.undo()
+ assert d["x"] == 1
+ assert "y" not in d
+ d["x"] = 5
+ monkeypatch.undo()
+ assert d["x"] == 5
+
+
+def test_setitem_deleted_meanwhile():
+ d = {}
+ monkeypatch = MonkeyPatch()
+ monkeypatch.setitem(d, "x", 2)
+ del d["x"]
+ monkeypatch.undo()
+ assert not d
+
+
+@pytest.mark.parametrize("before", [True, False])
+def test_setenv_deleted_meanwhile(before):
+ key = "qwpeoip123"
+ if before:
+ os.environ[key] = "world"
+ monkeypatch = MonkeyPatch()
+ monkeypatch.setenv(key, "hello")
+ del os.environ[key]
+ monkeypatch.undo()
+ if before:
+ assert os.environ[key] == "world"
+ del os.environ[key]
+ else:
+ assert key not in os.environ
+
+
+def test_delitem():
+ d = {"x": 1}
+ monkeypatch = MonkeyPatch()
+ monkeypatch.delitem(d, "x")
+ assert "x" not in d
+ monkeypatch.delitem(d, "y", raising=False)
+ pytest.raises(KeyError, "monkeypatch.delitem(d, 'y')")
+ assert not d
+ monkeypatch.setitem(d, "y", 1700)
+ assert d["y"] == 1700
+ d["hello"] = "world"
+ monkeypatch.setitem(d, "x", 1500)
+ assert d["x"] == 1500
+ monkeypatch.undo()
+ assert d == {"hello": "world", "x": 1}
+
+
+def test_setenv():
+ monkeypatch = MonkeyPatch()
+ monkeypatch.setenv("XYZ123", 2)
+ import os
+
+ assert os.environ["XYZ123"] == "2"
+ monkeypatch.undo()
+ assert "XYZ123" not in os.environ
+
+
+def test_delenv():
+ name = "xyz1234"
+ assert name not in os.environ
+ monkeypatch = MonkeyPatch()
+ pytest.raises(KeyError, "monkeypatch.delenv(%r, raising=True)" % name)
+ monkeypatch.delenv(name, raising=False)
+ monkeypatch.undo()
+ os.environ[name] = "1"
+ try:
+ monkeypatch = MonkeyPatch()
+ monkeypatch.delenv(name)
+ assert name not in os.environ
+ monkeypatch.setenv(name, "3")
+ assert os.environ[name] == "3"
+ monkeypatch.undo()
+ assert os.environ[name] == "1"
+ finally:
+ if name in os.environ:
+ del os.environ[name]
+
+
+def test_setenv_prepend():
+ import os
+
+ monkeypatch = MonkeyPatch()
+ monkeypatch.setenv("XYZ123", 2, prepend="-")
+ assert os.environ["XYZ123"] == "2"
+ monkeypatch.setenv("XYZ123", 3, prepend="-")
+ assert os.environ["XYZ123"] == "3-2"
+ monkeypatch.undo()
+ assert "XYZ123" not in os.environ
+
+
+def test_monkeypatch_plugin(testdir):
+ reprec = testdir.inline_runsource(
+ """
+ def test_method(monkeypatch):
+ assert monkeypatch.__class__.__name__ == "MonkeyPatch"
+ """
+ )
+ res = reprec.countoutcomes()
+ assert tuple(res) == (1, 0, 0), res
+
+
+def test_syspath_prepend(mp):
+ old = list(sys.path)
+ mp.syspath_prepend("world")
+ mp.syspath_prepend("hello")
+ assert sys.path[0] == "hello"
+ assert sys.path[1] == "world"
+ mp.undo()
+ assert sys.path == old
+ mp.undo()
+ assert sys.path == old
+
+
+def test_syspath_prepend_double_undo(mp):
+ mp.syspath_prepend("hello world")
+ mp.undo()
+ sys.path.append("more hello world")
+ mp.undo()
+ assert sys.path[-1] == "more hello world"
+
+
+def test_chdir_with_path_local(mp, tmpdir):
+ mp.chdir(tmpdir)
+ assert os.getcwd() == tmpdir.strpath
+
+
+def test_chdir_with_str(mp, tmpdir):
+ mp.chdir(tmpdir.strpath)
+ assert os.getcwd() == tmpdir.strpath
+
+
+def test_chdir_undo(mp, tmpdir):
+ cwd = os.getcwd()
+ mp.chdir(tmpdir)
+ mp.undo()
+ assert os.getcwd() == cwd
+
+
+def test_chdir_double_undo(mp, tmpdir):
+ mp.chdir(tmpdir.strpath)
+ mp.undo()
+ tmpdir.chdir()
+ mp.undo()
+ assert os.getcwd() == tmpdir.strpath
+
+
+def test_issue185_time_breaks(testdir):
+ testdir.makepyfile(
+ """
+ import time
+ def test_m(monkeypatch):
+ def f():
+ raise Exception
+ monkeypatch.setattr(time, "time", f)
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ *1 passed*
+ """
+ )
+
+
+def test_importerror(testdir):
+ p = testdir.mkpydir("package")
+ p.join("a.py").write(
+ textwrap.dedent(
+ """\
+ import doesnotexist
+
+ x = 1
+ """
+ )
+ )
+ testdir.tmpdir.join("test_importerror.py").write(
+ textwrap.dedent(
+ """\
+ def test_importerror(monkeypatch):
+ monkeypatch.setattr('package.a.x', 2)
+ """
+ )
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ *import error in package.a: No module named {0}doesnotexist{0}*
+ """.format(
+ "'" if sys.version_info > (3, 0) else ""
+ )
+ )
+
+
+class SampleNew(object):
+
+ @staticmethod
+ def hello():
+ return True
+
+
+class SampleNewInherit(SampleNew):
+ pass
+
+
+class SampleOld(object):
+ # oldstyle on python2
+ @staticmethod
+ def hello():
+ return True
+
+
+class SampleOldInherit(SampleOld):
+ pass
+
+
+@pytest.mark.parametrize(
+ "Sample",
+ [SampleNew, SampleNewInherit, SampleOld, SampleOldInherit],
+ ids=["new", "new-inherit", "old", "old-inherit"],
+)
+def test_issue156_undo_staticmethod(Sample):
+ monkeypatch = MonkeyPatch()
+
+ monkeypatch.setattr(Sample, "hello", None)
+ assert Sample.hello is None
+
+ monkeypatch.undo()
+ assert Sample.hello()
+
+
+def test_issue1338_name_resolving():
+ pytest.importorskip("requests")
+ monkeypatch = MonkeyPatch()
+ try:
+ monkeypatch.delattr("requests.sessions.Session.request")
+ finally:
+ monkeypatch.undo()
+
+
+def test_context():
+ monkeypatch = MonkeyPatch()
+
+ import functools
+ import inspect
+
+ with monkeypatch.context() as m:
+ m.setattr(functools, "partial", 3)
+ assert not inspect.isclass(functools.partial)
+ assert inspect.isclass(functools.partial)
diff --git a/third_party/python/pytest/testing/test_nodes.py b/third_party/python/pytest/testing/test_nodes.py
new file mode 100644
index 0000000000..eee3ac8e9e
--- /dev/null
+++ b/third_party/python/pytest/testing/test_nodes.py
@@ -0,0 +1,21 @@
+import pytest
+
+from _pytest import nodes
+
+
+@pytest.mark.parametrize(
+ "baseid, nodeid, expected",
+ (
+ ("", "", True),
+ ("", "foo", True),
+ ("", "foo/bar", True),
+ ("", "foo/bar::TestBaz::()", True),
+ ("foo", "food", False),
+ ("foo/bar::TestBaz::()", "foo/bar", False),
+ ("foo/bar::TestBaz::()", "foo/bar::TestBop::()", False),
+ ("foo/bar", "foo/bar::TestBop::()", True),
+ ),
+)
+def test_ischildnode(baseid, nodeid, expected):
+ result = nodes.ischildnode(baseid, nodeid)
+ assert result is expected
diff --git a/third_party/python/pytest/testing/test_nose.py b/third_party/python/pytest/testing/test_nose.py
new file mode 100644
index 0000000000..abe7323754
--- /dev/null
+++ b/third_party/python/pytest/testing/test_nose.py
@@ -0,0 +1,433 @@
+from __future__ import absolute_import, division, print_function
+import pytest
+
+
+def setup_module(mod):
+ mod.nose = pytest.importorskip("nose")
+
+
+def test_nose_setup(testdir):
+ p = testdir.makepyfile(
+ """
+ values = []
+ from nose.tools import with_setup
+
+ @with_setup(lambda: values.append(1), lambda: values.append(2))
+ def test_hello():
+ assert values == [1]
+
+ def test_world():
+ assert values == [1,2]
+
+ test_hello.setup = lambda: values.append(1)
+ test_hello.teardown = lambda: values.append(2)
+ """
+ )
+ result = testdir.runpytest(p, "-p", "nose")
+ result.assert_outcomes(passed=2)
+
+
+def test_setup_func_with_setup_decorator():
+ from _pytest.nose import call_optional
+
+ values = []
+
+ class A(object):
+
+ @pytest.fixture(autouse=True)
+ def f(self):
+ values.append(1)
+
+ call_optional(A(), "f")
+ assert not values
+
+
+def test_setup_func_not_callable():
+ from _pytest.nose import call_optional
+
+ class A(object):
+ f = 1
+
+ call_optional(A(), "f")
+
+
+def test_nose_setup_func(testdir):
+ p = testdir.makepyfile(
+ """
+ from nose.tools import with_setup
+
+ values = []
+
+ def my_setup():
+ a = 1
+ values.append(a)
+
+ def my_teardown():
+ b = 2
+ values.append(b)
+
+ @with_setup(my_setup, my_teardown)
+ def test_hello():
+ print (values)
+ assert values == [1]
+
+ def test_world():
+ print (values)
+ assert values == [1,2]
+
+ """
+ )
+ result = testdir.runpytest(p, "-p", "nose")
+ result.assert_outcomes(passed=2)
+
+
+def test_nose_setup_func_failure(testdir):
+ p = testdir.makepyfile(
+ """
+ from nose.tools import with_setup
+
+ values = []
+ my_setup = lambda x: 1
+ my_teardown = lambda x: 2
+
+ @with_setup(my_setup, my_teardown)
+ def test_hello():
+ print (values)
+ assert values == [1]
+
+ def test_world():
+ print (values)
+ assert values == [1,2]
+
+ """
+ )
+ result = testdir.runpytest(p, "-p", "nose")
+ result.stdout.fnmatch_lines(["*TypeError: <lambda>()*"])
+
+
+def test_nose_setup_func_failure_2(testdir):
+ testdir.makepyfile(
+ """
+ values = []
+
+ my_setup = 1
+ my_teardown = 2
+
+ def test_hello():
+ assert values == []
+
+ test_hello.setup = my_setup
+ test_hello.teardown = my_teardown
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+def test_nose_setup_partial(testdir):
+ pytest.importorskip("functools")
+ p = testdir.makepyfile(
+ """
+ from functools import partial
+
+ values = []
+
+ def my_setup(x):
+ a = x
+ values.append(a)
+
+ def my_teardown(x):
+ b = x
+ values.append(b)
+
+ my_setup_partial = partial(my_setup, 1)
+ my_teardown_partial = partial(my_teardown, 2)
+
+ def test_hello():
+ print (values)
+ assert values == [1]
+
+ def test_world():
+ print (values)
+ assert values == [1,2]
+
+ test_hello.setup = my_setup_partial
+ test_hello.teardown = my_teardown_partial
+ """
+ )
+ result = testdir.runpytest(p, "-p", "nose")
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+
+def test_nose_test_generator_fixtures(testdir):
+ p = testdir.makepyfile(
+ """
+ # taken from nose-0.11.1 unit_tests/test_generator_fixtures.py
+ from nose.tools import eq_
+ called = []
+
+ def outer_setup():
+ called.append('outer_setup')
+
+ def outer_teardown():
+ called.append('outer_teardown')
+
+ def inner_setup():
+ called.append('inner_setup')
+
+ def inner_teardown():
+ called.append('inner_teardown')
+
+ def test_gen():
+ called[:] = []
+ for i in range(0, 5):
+ yield check, i
+
+ def check(i):
+ expect = ['outer_setup']
+ for x in range(0, i):
+ expect.append('inner_setup')
+ expect.append('inner_teardown')
+ expect.append('inner_setup')
+ eq_(called, expect)
+
+
+ test_gen.setup = outer_setup
+ test_gen.teardown = outer_teardown
+ check.setup = inner_setup
+ check.teardown = inner_teardown
+
+ class TestClass(object):
+ def setup(self):
+ print ("setup called in %s" % self)
+ self.called = ['setup']
+
+ def teardown(self):
+ print ("teardown called in %s" % self)
+ eq_(self.called, ['setup'])
+ self.called.append('teardown')
+
+ def test(self):
+ print ("test called in %s" % self)
+ for i in range(0, 5):
+ yield self.check, i
+
+ def check(self, i):
+ print ("check called in %s" % self)
+ expect = ['setup']
+ #for x in range(0, i):
+ # expect.append('setup')
+ # expect.append('teardown')
+ #expect.append('setup')
+ eq_(self.called, expect)
+ """
+ )
+ result = testdir.runpytest(p, "-p", "nose")
+ result.stdout.fnmatch_lines(["*10 passed*"])
+
+
+def test_module_level_setup(testdir):
+ testdir.makepyfile(
+ """
+ from nose.tools import with_setup
+ items = {}
+
+ def setup():
+ items[1]=1
+
+ def teardown():
+ del items[1]
+
+ def setup2():
+ items[2] = 2
+
+ def teardown2():
+ del items[2]
+
+ def test_setup_module_setup():
+ assert items[1] == 1
+
+ @with_setup(setup2, teardown2)
+ def test_local_setup():
+ assert items[2] == 2
+ assert 1 not in items
+ """
+ )
+ result = testdir.runpytest("-p", "nose")
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+
+def test_nose_style_setup_teardown(testdir):
+ testdir.makepyfile(
+ """
+ values = []
+
+ def setup_module():
+ values.append(1)
+
+ def teardown_module():
+ del values[0]
+
+ def test_hello():
+ assert values == [1]
+
+ def test_world():
+ assert values == [1]
+ """
+ )
+ result = testdir.runpytest("-p", "nose")
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+
+def test_nose_setup_ordering(testdir):
+ testdir.makepyfile(
+ """
+ def setup_module(mod):
+ mod.visited = True
+
+ class TestClass(object):
+ def setup(self):
+ assert visited
+ def test_first(self):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_apiwrapper_problem_issue260(testdir):
+ # this would end up trying a call an optional teardown on the class
+ # for plain unittests we dont want nose behaviour
+ testdir.makepyfile(
+ """
+ import unittest
+ class TestCase(unittest.TestCase):
+ def setup(self):
+ #should not be called in unittest testcases
+ assert 0, 'setup'
+ def teardown(self):
+ #should not be called in unittest testcases
+ assert 0, 'teardown'
+ def setUp(self):
+ print('setup')
+ def tearDown(self):
+ print('teardown')
+ def test_fun(self):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.assert_outcomes(passed=1)
+
+
+def test_setup_teardown_linking_issue265(testdir):
+ # we accidentally didnt integrate nose setupstate with normal setupstate
+ # this test ensures that won't happen again
+ testdir.makepyfile(
+ '''
+ import pytest
+
+ class TestGeneric(object):
+ def test_nothing(self):
+ """Tests the API of the implementation (for generic and specialized)."""
+
+ @pytest.mark.skipif("True", reason=
+ "Skip tests to check if teardown is skipped as well.")
+ class TestSkipTeardown(TestGeneric):
+
+ def setup(self):
+ """Sets up my specialized implementation for $COOL_PLATFORM."""
+ raise Exception("should not call setup for skipped tests")
+
+ def teardown(self):
+ """Undoes the setup."""
+ raise Exception("should not call teardown for skipped tests")
+ '''
+ )
+ reprec = testdir.runpytest()
+ reprec.assert_outcomes(passed=1, skipped=1)
+
+
+def test_SkipTest_during_collection(testdir):
+ p = testdir.makepyfile(
+ """
+ import nose
+ raise nose.SkipTest("during collection")
+ def test_failing():
+ assert False
+ """
+ )
+ result = testdir.runpytest(p)
+ result.assert_outcomes(skipped=1)
+
+
+def test_SkipTest_in_test(testdir):
+ testdir.makepyfile(
+ """
+ import nose
+
+ def test_skipping():
+ raise nose.SkipTest("in test")
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(skipped=1)
+
+
+def test_istest_function_decorator(testdir):
+ p = testdir.makepyfile(
+ """
+ import nose.tools
+ @nose.tools.istest
+ def not_test_prefix():
+ pass
+ """
+ )
+ result = testdir.runpytest(p)
+ result.assert_outcomes(passed=1)
+
+
+def test_nottest_function_decorator(testdir):
+ testdir.makepyfile(
+ """
+ import nose.tools
+ @nose.tools.nottest
+ def test_prefix():
+ pass
+ """
+ )
+ reprec = testdir.inline_run()
+ assert not reprec.getfailedcollections()
+ calls = reprec.getreports("pytest_runtest_logreport")
+ assert not calls
+
+
+def test_istest_class_decorator(testdir):
+ p = testdir.makepyfile(
+ """
+ import nose.tools
+ @nose.tools.istest
+ class NotTestPrefix(object):
+ def test_method(self):
+ pass
+ """
+ )
+ result = testdir.runpytest(p)
+ result.assert_outcomes(passed=1)
+
+
+def test_nottest_class_decorator(testdir):
+ testdir.makepyfile(
+ """
+ import nose.tools
+ @nose.tools.nottest
+ class TestPrefix(object):
+ def test_method(self):
+ pass
+ """
+ )
+ reprec = testdir.inline_run()
+ assert not reprec.getfailedcollections()
+ calls = reprec.getreports("pytest_runtest_logreport")
+ assert not calls
diff --git a/third_party/python/pytest/testing/test_parseopt.py b/third_party/python/pytest/testing/test_parseopt.py
new file mode 100644
index 0000000000..79076cafd8
--- /dev/null
+++ b/third_party/python/pytest/testing/test_parseopt.py
@@ -0,0 +1,336 @@
+from __future__ import absolute_import, division, print_function
+import argparse
+import sys
+import os
+import py
+import pytest
+from _pytest.config import argparsing as parseopt
+
+
+@pytest.fixture
+def parser():
+ return parseopt.Parser()
+
+
+class TestParser(object):
+
+ def test_no_help_by_default(self, capsys):
+ parser = parseopt.Parser(usage="xyz")
+ pytest.raises(SystemExit, lambda: parser.parse(["-h"]))
+ out, err = capsys.readouterr()
+ assert err.find("error: unrecognized arguments") != -1
+
+ def test_argument(self):
+ with pytest.raises(parseopt.ArgumentError):
+ # need a short or long option
+ argument = parseopt.Argument()
+ argument = parseopt.Argument("-t")
+ assert argument._short_opts == ["-t"]
+ assert argument._long_opts == []
+ assert argument.dest == "t"
+ argument = parseopt.Argument("-t", "--test")
+ assert argument._short_opts == ["-t"]
+ assert argument._long_opts == ["--test"]
+ assert argument.dest == "test"
+ argument = parseopt.Argument("-t", "--test", dest="abc")
+ assert argument.dest == "abc"
+ assert (
+ str(argument)
+ == ("Argument(_short_opts: ['-t'], _long_opts: ['--test'], dest: 'abc')")
+ )
+
+ def test_argument_type(self):
+ argument = parseopt.Argument("-t", dest="abc", type=int)
+ assert argument.type is int
+ argument = parseopt.Argument("-t", dest="abc", type=str)
+ assert argument.type is str
+ argument = parseopt.Argument("-t", dest="abc", type=float)
+ assert argument.type is float
+ with pytest.warns(DeprecationWarning):
+ with pytest.raises(KeyError):
+ argument = parseopt.Argument("-t", dest="abc", type="choice")
+ argument = parseopt.Argument(
+ "-t", dest="abc", type=str, choices=["red", "blue"]
+ )
+ assert argument.type is str
+
+ def test_argument_processopt(self):
+ argument = parseopt.Argument("-t", type=int)
+ argument.default = 42
+ argument.dest = "abc"
+ res = argument.attrs()
+ assert res["default"] == 42
+ assert res["dest"] == "abc"
+
+ def test_group_add_and_get(self, parser):
+ group = parser.getgroup("hello", description="desc")
+ assert group.name == "hello"
+ assert group.description == "desc"
+
+ def test_getgroup_simple(self, parser):
+ group = parser.getgroup("hello", description="desc")
+ assert group.name == "hello"
+ assert group.description == "desc"
+ group2 = parser.getgroup("hello")
+ assert group2 is group
+
+ def test_group_ordering(self, parser):
+ parser.getgroup("1")
+ parser.getgroup("2")
+ parser.getgroup("3", after="1")
+ groups = parser._groups
+ groups_names = [x.name for x in groups]
+ assert groups_names == list("132")
+
+ def test_group_addoption(self):
+ group = parseopt.OptionGroup("hello")
+ group.addoption("--option1", action="store_true")
+ assert len(group.options) == 1
+ assert isinstance(group.options[0], parseopt.Argument)
+
+ def test_group_addoption_conflict(self):
+ group = parseopt.OptionGroup("hello again")
+ group.addoption("--option1", "--option-1", action="store_true")
+ with pytest.raises(ValueError) as err:
+ group.addoption("--option1", "--option-one", action="store_true")
+ assert str({"--option1"}) in str(err.value)
+
+ def test_group_shortopt_lowercase(self, parser):
+ group = parser.getgroup("hello")
+ pytest.raises(
+ ValueError,
+ """
+ group.addoption("-x", action="store_true")
+ """,
+ )
+ assert len(group.options) == 0
+ group._addoption("-x", action="store_true")
+ assert len(group.options) == 1
+
+ def test_parser_addoption(self, parser):
+ group = parser.getgroup("custom options")
+ assert len(group.options) == 0
+ group.addoption("--option1", action="store_true")
+ assert len(group.options) == 1
+
+ def test_parse(self, parser):
+ parser.addoption("--hello", dest="hello", action="store")
+ args = parser.parse(["--hello", "world"])
+ assert args.hello == "world"
+ assert not getattr(args, parseopt.FILE_OR_DIR)
+
+ def test_parse2(self, parser):
+ args = parser.parse([py.path.local()])
+ assert getattr(args, parseopt.FILE_OR_DIR)[0] == py.path.local()
+
+ def test_parse_known_args(self, parser):
+ parser.parse_known_args([py.path.local()])
+ parser.addoption("--hello", action="store_true")
+ ns = parser.parse_known_args(["x", "--y", "--hello", "this"])
+ assert ns.hello
+ assert ns.file_or_dir == ["x"]
+
+ def test_parse_known_and_unknown_args(self, parser):
+ parser.addoption("--hello", action="store_true")
+ ns, unknown = parser.parse_known_and_unknown_args(
+ ["x", "--y", "--hello", "this"]
+ )
+ assert ns.hello
+ assert ns.file_or_dir == ["x"]
+ assert unknown == ["--y", "this"]
+
+ def test_parse_will_set_default(self, parser):
+ parser.addoption("--hello", dest="hello", default="x", action="store")
+ option = parser.parse([])
+ assert option.hello == "x"
+ del option.hello
+ parser.parse_setoption([], option)
+ assert option.hello == "x"
+
+ def test_parse_setoption(self, parser):
+ parser.addoption("--hello", dest="hello", action="store")
+ parser.addoption("--world", dest="world", default=42)
+
+ class A(object):
+ pass
+
+ option = A()
+ args = parser.parse_setoption(["--hello", "world"], option)
+ assert option.hello == "world"
+ assert option.world == 42
+ assert not args
+
+ def test_parse_special_destination(self, parser):
+ parser.addoption("--ultimate-answer", type=int)
+ args = parser.parse(["--ultimate-answer", "42"])
+ assert args.ultimate_answer == 42
+
+ def test_parse_split_positional_arguments(self, parser):
+ parser.addoption("-R", action="store_true")
+ parser.addoption("-S", action="store_false")
+ args = parser.parse(["-R", "4", "2", "-S"])
+ assert getattr(args, parseopt.FILE_OR_DIR) == ["4", "2"]
+ args = parser.parse(["-R", "-S", "4", "2", "-R"])
+ assert getattr(args, parseopt.FILE_OR_DIR) == ["4", "2"]
+ assert args.R is True
+ assert args.S is False
+ args = parser.parse(["-R", "4", "-S", "2"])
+ assert getattr(args, parseopt.FILE_OR_DIR) == ["4", "2"]
+ assert args.R is True
+ assert args.S is False
+
+ def test_parse_defaultgetter(self):
+
+ def defaultget(option):
+ if not hasattr(option, "type"):
+ return
+ if option.type is int:
+ option.default = 42
+ elif option.type is str:
+ option.default = "world"
+
+ parser = parseopt.Parser(processopt=defaultget)
+ parser.addoption("--this", dest="this", type=int, action="store")
+ parser.addoption("--hello", dest="hello", type=str, action="store")
+ parser.addoption("--no", dest="no", action="store_true")
+ option = parser.parse([])
+ assert option.hello == "world"
+ assert option.this == 42
+ assert option.no is False
+
+ def test_drop_short_helper(self):
+ parser = argparse.ArgumentParser(
+ formatter_class=parseopt.DropShorterLongHelpFormatter
+ )
+ parser.add_argument(
+ "-t", "--twoword", "--duo", "--two-word", "--two", help="foo"
+ ).map_long_option = {
+ "two": "two-word"
+ }
+ # throws error on --deux only!
+ parser.add_argument(
+ "-d", "--deuxmots", "--deux-mots", action="store_true", help="foo"
+ ).map_long_option = {
+ "deux": "deux-mots"
+ }
+ parser.add_argument("-s", action="store_true", help="single short")
+ parser.add_argument("--abc", "-a", action="store_true", help="bar")
+ parser.add_argument("--klm", "-k", "--kl-m", action="store_true", help="bar")
+ parser.add_argument(
+ "-P", "--pq-r", "-p", "--pqr", action="store_true", help="bar"
+ )
+ parser.add_argument(
+ "--zwei-wort", "--zweiwort", "--zweiwort", action="store_true", help="bar"
+ )
+ parser.add_argument(
+ "-x", "--exit-on-first", "--exitfirst", action="store_true", help="spam"
+ ).map_long_option = {
+ "exitfirst": "exit-on-first"
+ }
+ parser.add_argument("files_and_dirs", nargs="*")
+ args = parser.parse_args(["-k", "--duo", "hallo", "--exitfirst"])
+ assert args.twoword == "hallo"
+ assert args.klm is True
+ assert args.zwei_wort is False
+ assert args.exit_on_first is True
+ assert args.s is False
+ args = parser.parse_args(["--deux-mots"])
+ with pytest.raises(AttributeError):
+ assert args.deux_mots is True
+ assert args.deuxmots is True
+ args = parser.parse_args(["file", "dir"])
+ assert "|".join(args.files_and_dirs) == "file|dir"
+
+ def test_drop_short_0(self, parser):
+ parser.addoption("--funcarg", "--func-arg", action="store_true")
+ parser.addoption("--abc-def", "--abc-def", action="store_true")
+ parser.addoption("--klm-hij", action="store_true")
+ args = parser.parse(["--funcarg", "--k"])
+ assert args.funcarg is True
+ assert args.abc_def is False
+ assert args.klm_hij is True
+
+ def test_drop_short_2(self, parser):
+ parser.addoption("--func-arg", "--doit", action="store_true")
+ args = parser.parse(["--doit"])
+ assert args.func_arg is True
+
+ def test_drop_short_3(self, parser):
+ parser.addoption("--func-arg", "--funcarg", "--doit", action="store_true")
+ args = parser.parse(["abcd"])
+ assert args.func_arg is False
+ assert args.file_or_dir == ["abcd"]
+
+ def test_drop_short_help0(self, parser, capsys):
+ parser.addoption("--func-args", "--doit", help="foo", action="store_true")
+ parser.parse([])
+ help = parser.optparser.format_help()
+ assert "--func-args, --doit foo" in help
+
+ # testing would be more helpful with all help generated
+ def test_drop_short_help1(self, parser, capsys):
+ group = parser.getgroup("general")
+ group.addoption("--doit", "--func-args", action="store_true", help="foo")
+ group._addoption(
+ "-h",
+ "--help",
+ action="store_true",
+ dest="help",
+ help="show help message and configuration info",
+ )
+ parser.parse(["-h"])
+ help = parser.optparser.format_help()
+ assert "-doit, --func-args foo" in help
+
+ def test_multiple_metavar_help(self, parser):
+ """
+ Help text for options with a metavar tuple should display help
+ in the form "--preferences=value1 value2 value3" (#2004).
+ """
+ group = parser.getgroup("general")
+ group.addoption(
+ "--preferences", metavar=("value1", "value2", "value3"), nargs=3
+ )
+ group._addoption("-h", "--help", action="store_true", dest="help")
+ parser.parse(["-h"])
+ help = parser.optparser.format_help()
+ assert "--preferences=value1 value2 value3" in help
+
+
+def test_argcomplete(testdir, monkeypatch):
+ if not py.path.local.sysfind("bash"):
+ pytest.skip("bash not available")
+ script = str(testdir.tmpdir.join("test_argcomplete"))
+ pytest_bin = sys.argv[0]
+ if "pytest" not in os.path.basename(pytest_bin):
+ pytest.skip("need to be run with pytest executable, not %s" % (pytest_bin,))
+
+ with open(str(script), "w") as fp:
+ # redirect output from argcomplete to stdin and stderr is not trivial
+ # http://stackoverflow.com/q/12589419/1307905
+ # so we use bash
+ fp.write('COMP_WORDBREAKS="$COMP_WORDBREAKS" %s 8>&1 9>&2' % pytest_bin)
+ # alternative would be exteneded Testdir.{run(),_run(),popen()} to be able
+ # to handle a keyword argument env that replaces os.environ in popen or
+ # extends the copy, advantage: could not forget to restore
+ monkeypatch.setenv("_ARGCOMPLETE", "1")
+ monkeypatch.setenv("_ARGCOMPLETE_IFS", "\x0b")
+ monkeypatch.setenv("COMP_WORDBREAKS", " \\t\\n\"\\'><=;|&(:")
+
+ arg = "--fu"
+ monkeypatch.setenv("COMP_LINE", "pytest " + arg)
+ monkeypatch.setenv("COMP_POINT", str(len("pytest " + arg)))
+ result = testdir.run("bash", str(script), arg)
+ if result.ret == 255:
+ # argcomplete not found
+ pytest.skip("argcomplete not available")
+ elif not result.stdout.str():
+ pytest.skip("bash provided no output, argcomplete not available?")
+ else:
+ result.stdout.fnmatch_lines(["--funcargs", "--fulltrace"])
+ os.mkdir("test_argcomplete.d")
+ arg = "test_argc"
+ monkeypatch.setenv("COMP_LINE", "pytest " + arg)
+ monkeypatch.setenv("COMP_POINT", str(len("pytest " + arg)))
+ result = testdir.run("bash", str(script), arg)
+ result.stdout.fnmatch_lines(["test_argcomplete", "test_argcomplete.d/"])
diff --git a/third_party/python/pytest/testing/test_pastebin.py b/third_party/python/pytest/testing/test_pastebin.py
new file mode 100644
index 0000000000..ad7c4d0c1c
--- /dev/null
+++ b/third_party/python/pytest/testing/test_pastebin.py
@@ -0,0 +1,132 @@
+# encoding: utf-8
+from __future__ import absolute_import, division, print_function
+import sys
+import pytest
+
+
+class TestPasteCapture(object):
+
+ @pytest.fixture
+ def pastebinlist(self, monkeypatch, request):
+ pastebinlist = []
+ plugin = request.config.pluginmanager.getplugin("pastebin")
+ monkeypatch.setattr(plugin, "create_new_paste", pastebinlist.append)
+ return pastebinlist
+
+ def test_failed(self, testdir, pastebinlist):
+ testpath = testdir.makepyfile(
+ """
+ import pytest
+ def test_pass():
+ pass
+ def test_fail():
+ assert 0
+ def test_skip():
+ pytest.skip("")
+ """
+ )
+ reprec = testdir.inline_run(testpath, "--paste=failed")
+ assert len(pastebinlist) == 1
+ s = pastebinlist[0]
+ assert s.find("def test_fail") != -1
+ assert reprec.countoutcomes() == [1, 1, 1]
+
+ def test_all(self, testdir, pastebinlist):
+ from _pytest.pytester import LineMatcher
+
+ testpath = testdir.makepyfile(
+ """
+ import pytest
+ def test_pass():
+ pass
+ def test_fail():
+ assert 0
+ def test_skip():
+ pytest.skip("")
+ """
+ )
+ reprec = testdir.inline_run(testpath, "--pastebin=all", "-v")
+ assert reprec.countoutcomes() == [1, 1, 1]
+ assert len(pastebinlist) == 1
+ contents = pastebinlist[0].decode("utf-8")
+ matcher = LineMatcher(contents.splitlines())
+ matcher.fnmatch_lines(
+ [
+ "*test_pass PASSED*",
+ "*test_fail FAILED*",
+ "*test_skip SKIPPED*",
+ "*== 1 failed, 1 passed, 1 skipped in *",
+ ]
+ )
+
+ def test_non_ascii_paste_text(self, testdir):
+ """Make sure that text which contains non-ascii characters is pasted
+ correctly. See #1219.
+ """
+ testdir.makepyfile(
+ test_unicode="""
+ # encoding: utf-8
+ def test():
+ assert '☺' == 1
+ """
+ )
+ result = testdir.runpytest("--pastebin=all")
+ if sys.version_info[0] == 3:
+ expected_msg = "*assert '☺' == 1*"
+ else:
+ expected_msg = "*assert '\\xe2\\x98\\xba' == 1*"
+ result.stdout.fnmatch_lines(
+ [
+ expected_msg,
+ "*== 1 failed in *",
+ "*Sending information to Paste Service*",
+ ]
+ )
+
+
+class TestPaste(object):
+
+ @pytest.fixture
+ def pastebin(self, request):
+ return request.config.pluginmanager.getplugin("pastebin")
+
+ @pytest.fixture
+ def mocked_urlopen(self, monkeypatch):
+ """
+ monkeypatch the actual urlopen calls done by the internal plugin
+ function that connects to bpaste service.
+ """
+ calls = []
+
+ def mocked(url, data):
+ calls.append((url, data))
+
+ class DummyFile(object):
+
+ def read(self):
+ # part of html of a normal response
+ return b'View <a href="/raw/3c0c6750bd">raw</a>.'
+
+ return DummyFile()
+
+ if sys.version_info < (3, 0):
+ import urllib
+
+ monkeypatch.setattr(urllib, "urlopen", mocked)
+ else:
+ import urllib.request
+
+ monkeypatch.setattr(urllib.request, "urlopen", mocked)
+ return calls
+
+ def test_create_new_paste(self, pastebin, mocked_urlopen):
+ result = pastebin.create_new_paste(b"full-paste-contents")
+ assert result == "https://bpaste.net/show/3c0c6750bd"
+ assert len(mocked_urlopen) == 1
+ url, data = mocked_urlopen[0]
+ assert type(data) is bytes
+ lexer = "python3" if sys.version_info[0] == 3 else "python"
+ assert url == "https://bpaste.net"
+ assert "lexer=%s" % lexer in data.decode()
+ assert "code=full-paste-contents" in data.decode()
+ assert "expiry=1week" in data.decode()
diff --git a/third_party/python/pytest/testing/test_pdb.py b/third_party/python/pytest/testing/test_pdb.py
new file mode 100644
index 0000000000..615d52e834
--- /dev/null
+++ b/third_party/python/pytest/testing/test_pdb.py
@@ -0,0 +1,702 @@
+from __future__ import absolute_import, division, print_function
+import sys
+import platform
+import os
+
+import _pytest._code
+from _pytest.debugging import SUPPORTS_BREAKPOINT_BUILTIN
+import pytest
+
+
+_ENVIRON_PYTHONBREAKPOINT = os.environ.get("PYTHONBREAKPOINT", "")
+
+
+def runpdb_and_get_report(testdir, source):
+ p = testdir.makepyfile(source)
+ result = testdir.runpytest_inprocess("--pdb", p)
+ reports = result.reprec.getreports("pytest_runtest_logreport")
+ assert len(reports) == 3, reports # setup/call/teardown
+ return reports[1]
+
+
+@pytest.fixture
+def custom_pdb_calls():
+ called = []
+
+ # install dummy debugger class and track which methods were called on it
+ class _CustomPdb(object):
+
+ def __init__(self, *args, **kwargs):
+ called.append("init")
+
+ def reset(self):
+ called.append("reset")
+
+ def interaction(self, *args):
+ called.append("interaction")
+
+ _pytest._CustomPdb = _CustomPdb
+ return called
+
+
+@pytest.fixture
+def custom_debugger_hook():
+ called = []
+
+ # install dummy debugger class and track which methods were called on it
+ class _CustomDebugger(object):
+
+ def __init__(self, *args, **kwargs):
+ called.append("init")
+
+ def reset(self):
+ called.append("reset")
+
+ def interaction(self, *args):
+ called.append("interaction")
+
+ def set_trace(self, frame):
+ print("**CustomDebugger**")
+ called.append("set_trace")
+
+ _pytest._CustomDebugger = _CustomDebugger
+ yield called
+ del _pytest._CustomDebugger
+
+
+class TestPDB(object):
+
+ @pytest.fixture
+ def pdblist(self, request):
+ monkeypatch = request.getfixturevalue("monkeypatch")
+ pdblist = []
+
+ def mypdb(*args):
+ pdblist.append(args)
+
+ plugin = request.config.pluginmanager.getplugin("debugging")
+ monkeypatch.setattr(plugin, "post_mortem", mypdb)
+ return pdblist
+
+ def test_pdb_on_fail(self, testdir, pdblist):
+ rep = runpdb_and_get_report(
+ testdir,
+ """
+ def test_func():
+ assert 0
+ """,
+ )
+ assert rep.failed
+ assert len(pdblist) == 1
+ tb = _pytest._code.Traceback(pdblist[0][0])
+ assert tb[-1].name == "test_func"
+
+ def test_pdb_on_xfail(self, testdir, pdblist):
+ rep = runpdb_and_get_report(
+ testdir,
+ """
+ import pytest
+ @pytest.mark.xfail
+ def test_func():
+ assert 0
+ """,
+ )
+ assert "xfail" in rep.keywords
+ assert not pdblist
+
+ def test_pdb_on_skip(self, testdir, pdblist):
+ rep = runpdb_and_get_report(
+ testdir,
+ """
+ import pytest
+ def test_func():
+ pytest.skip("hello")
+ """,
+ )
+ assert rep.skipped
+ assert len(pdblist) == 0
+
+ def test_pdb_on_BdbQuit(self, testdir, pdblist):
+ rep = runpdb_and_get_report(
+ testdir,
+ """
+ import bdb
+ def test_func():
+ raise bdb.BdbQuit
+ """,
+ )
+ assert rep.failed
+ assert len(pdblist) == 0
+
+ def test_pdb_on_KeyboardInterrupt(self, testdir, pdblist):
+ rep = runpdb_and_get_report(
+ testdir,
+ """
+ def test_func():
+ raise KeyboardInterrupt
+ """,
+ )
+ assert rep.failed
+ assert len(pdblist) == 1
+
+ def test_pdb_interaction(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ def test_1():
+ i = 0
+ assert i == 1
+ """
+ )
+ child = testdir.spawn_pytest("--pdb %s" % p1)
+ child.expect(".*def test_1")
+ child.expect(".*i = 0")
+ child.expect("(Pdb)")
+ child.sendeof()
+ rest = child.read().decode("utf8")
+ assert "1 failed" in rest
+ assert "def test_1" not in rest
+ self.flush(child)
+
+ @staticmethod
+ def flush(child):
+ if platform.system() == "Darwin":
+ return
+ if child.isalive():
+ child.wait()
+
+ def test_pdb_unittest_postmortem(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ import unittest
+ class Blub(unittest.TestCase):
+ def tearDown(self):
+ self.filename = None
+ def test_false(self):
+ self.filename = 'debug' + '.me'
+ assert 0
+ """
+ )
+ child = testdir.spawn_pytest("--pdb %s" % p1)
+ child.expect("(Pdb)")
+ child.sendline("p self.filename")
+ child.sendeof()
+ rest = child.read().decode("utf8")
+ assert "debug.me" in rest
+ self.flush(child)
+
+ def test_pdb_unittest_skip(self, testdir):
+ """Test for issue #2137"""
+ p1 = testdir.makepyfile(
+ """
+ import unittest
+ @unittest.skipIf(True, 'Skipping also with pdb active')
+ class MyTestCase(unittest.TestCase):
+ def test_one(self):
+ assert 0
+ """
+ )
+ child = testdir.spawn_pytest("-rs --pdb %s" % p1)
+ child.expect("Skipping also with pdb active")
+ child.expect("1 skipped in")
+ child.sendeof()
+ self.flush(child)
+
+ def test_pdb_print_captured_stdout(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ def test_1():
+ print("get\\x20rekt")
+ assert False
+ """
+ )
+ child = testdir.spawn_pytest("--pdb %s" % p1)
+ child.expect("captured stdout")
+ child.expect("get rekt")
+ child.expect("(Pdb)")
+ child.sendeof()
+ rest = child.read().decode("utf8")
+ assert "1 failed" in rest
+ assert "get rekt" not in rest
+ self.flush(child)
+
+ def test_pdb_print_captured_stderr(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ def test_1():
+ import sys
+ sys.stderr.write("get\\x20rekt")
+ assert False
+ """
+ )
+ child = testdir.spawn_pytest("--pdb %s" % p1)
+ child.expect("captured stderr")
+ child.expect("get rekt")
+ child.expect("(Pdb)")
+ child.sendeof()
+ rest = child.read().decode("utf8")
+ assert "1 failed" in rest
+ assert "get rekt" not in rest
+ self.flush(child)
+
+ def test_pdb_dont_print_empty_captured_stdout_and_stderr(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ def test_1():
+ assert False
+ """
+ )
+ child = testdir.spawn_pytest("--pdb %s" % p1)
+ child.expect("(Pdb)")
+ output = child.before.decode("utf8")
+ child.sendeof()
+ assert "captured stdout" not in output
+ assert "captured stderr" not in output
+ self.flush(child)
+
+ @pytest.mark.parametrize("showcapture", ["all", "no", "log"])
+ def test_pdb_print_captured_logs(self, testdir, showcapture):
+ p1 = testdir.makepyfile(
+ """
+ def test_1():
+ import logging
+ logging.warn("get " + "rekt")
+ assert False
+ """
+ )
+ child = testdir.spawn_pytest("--show-capture=%s --pdb %s" % (showcapture, p1))
+ if showcapture in ("all", "log"):
+ child.expect("captured log")
+ child.expect("get rekt")
+ child.expect("(Pdb)")
+ child.sendeof()
+ rest = child.read().decode("utf8")
+ assert "1 failed" in rest
+ self.flush(child)
+
+ def test_pdb_print_captured_logs_nologging(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ def test_1():
+ import logging
+ logging.warn("get " + "rekt")
+ assert False
+ """
+ )
+ child = testdir.spawn_pytest(
+ "--show-capture=all --pdb " "-p no:logging %s" % p1
+ )
+ child.expect("get rekt")
+ output = child.before.decode("utf8")
+ assert "captured log" not in output
+ child.expect("(Pdb)")
+ child.sendeof()
+ rest = child.read().decode("utf8")
+ assert "1 failed" in rest
+ self.flush(child)
+
+ def test_pdb_interaction_exception(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ import pytest
+ def globalfunc():
+ pass
+ def test_1():
+ pytest.raises(ValueError, globalfunc)
+ """
+ )
+ child = testdir.spawn_pytest("--pdb %s" % p1)
+ child.expect(".*def test_1")
+ child.expect(".*pytest.raises.*globalfunc")
+ child.expect("(Pdb)")
+ child.sendline("globalfunc")
+ child.expect(".*function")
+ child.sendeof()
+ child.expect("1 failed")
+ self.flush(child)
+
+ def test_pdb_interaction_on_collection_issue181(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ import pytest
+ xxx
+ """
+ )
+ child = testdir.spawn_pytest("--pdb %s" % p1)
+ # child.expect(".*import pytest.*")
+ child.expect("(Pdb)")
+ child.sendeof()
+ child.expect("1 error")
+ self.flush(child)
+
+ def test_pdb_interaction_on_internal_error(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_runtest_protocol():
+ 0/0
+ """
+ )
+ p1 = testdir.makepyfile("def test_func(): pass")
+ child = testdir.spawn_pytest("--pdb %s" % p1)
+ # child.expect(".*import pytest.*")
+ child.expect("(Pdb)")
+ child.sendeof()
+ self.flush(child)
+
+ def test_pdb_interaction_capturing_simple(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ import pytest
+ def test_1():
+ i = 0
+ print ("hello17")
+ pytest.set_trace()
+ x = 3
+ """
+ )
+ child = testdir.spawn_pytest(str(p1))
+ child.expect("test_1")
+ child.expect("x = 3")
+ child.expect("(Pdb)")
+ child.sendeof()
+ rest = child.read().decode("utf-8")
+ assert "1 failed" in rest
+ assert "def test_1" in rest
+ assert "hello17" in rest # out is captured
+ self.flush(child)
+
+ def test_pdb_set_trace_interception(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ import pdb
+ def test_1():
+ pdb.set_trace()
+ """
+ )
+ child = testdir.spawn_pytest(str(p1))
+ child.expect("test_1")
+ child.expect("(Pdb)")
+ child.sendeof()
+ rest = child.read().decode("utf8")
+ assert "1 failed" in rest
+ assert "reading from stdin while output" not in rest
+ self.flush(child)
+
+ def test_pdb_and_capsys(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ import pytest
+ def test_1(capsys):
+ print ("hello1")
+ pytest.set_trace()
+ """
+ )
+ child = testdir.spawn_pytest(str(p1))
+ child.expect("test_1")
+ child.send("capsys.readouterr()\n")
+ child.expect("hello1")
+ child.sendeof()
+ child.read()
+ self.flush(child)
+
+ def test_set_trace_capturing_afterwards(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ import pdb
+ def test_1():
+ pdb.set_trace()
+ def test_2():
+ print ("hello")
+ assert 0
+ """
+ )
+ child = testdir.spawn_pytest(str(p1))
+ child.expect("test_1")
+ child.send("c\n")
+ child.expect("test_2")
+ child.expect("Captured")
+ child.expect("hello")
+ child.sendeof()
+ child.read()
+ self.flush(child)
+
+ def test_pdb_interaction_doctest(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ import pytest
+ def function_1():
+ '''
+ >>> i = 0
+ >>> assert i == 1
+ '''
+ """
+ )
+ child = testdir.spawn_pytest("--doctest-modules --pdb %s" % p1)
+ child.expect("(Pdb)")
+ child.sendline("i")
+ child.expect("0")
+ child.expect("(Pdb)")
+ child.sendeof()
+ rest = child.read().decode("utf8")
+ assert "1 failed" in rest
+ self.flush(child)
+
+ def test_pdb_interaction_capturing_twice(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ import pytest
+ def test_1():
+ i = 0
+ print ("hello17")
+ pytest.set_trace()
+ x = 3
+ print ("hello18")
+ pytest.set_trace()
+ x = 4
+ """
+ )
+ child = testdir.spawn_pytest(str(p1))
+ child.expect("test_1")
+ child.expect("x = 3")
+ child.expect("(Pdb)")
+ child.sendline("c")
+ child.expect("x = 4")
+ child.sendeof()
+ rest = child.read().decode("utf8")
+ assert "1 failed" in rest
+ assert "def test_1" in rest
+ assert "hello17" in rest # out is captured
+ assert "hello18" in rest # out is captured
+ self.flush(child)
+
+ def test_pdb_used_outside_test(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ import pytest
+ pytest.set_trace()
+ x = 5
+ """
+ )
+ child = testdir.spawn("%s %s" % (sys.executable, p1))
+ child.expect("x = 5")
+ child.sendeof()
+ self.flush(child)
+
+ def test_pdb_used_in_generate_tests(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ import pytest
+ def pytest_generate_tests(metafunc):
+ pytest.set_trace()
+ x = 5
+ def test_foo(a):
+ pass
+ """
+ )
+ child = testdir.spawn_pytest(str(p1))
+ child.expect("x = 5")
+ child.sendeof()
+ self.flush(child)
+
+ def test_pdb_collection_failure_is_shown(self, testdir):
+ p1 = testdir.makepyfile("xxx")
+ result = testdir.runpytest_subprocess("--pdb", p1)
+ result.stdout.fnmatch_lines(["*NameError*xxx*", "*1 error*"])
+
+ def test_enter_pdb_hook_is_called(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_enter_pdb(config):
+ assert config.testing_verification == 'configured'
+ print 'enter_pdb_hook'
+
+ def pytest_configure(config):
+ config.testing_verification = 'configured'
+ """
+ )
+ p1 = testdir.makepyfile(
+ """
+ import pytest
+
+ def test_foo():
+ pytest.set_trace()
+ """
+ )
+ child = testdir.spawn_pytest(str(p1))
+ child.expect("enter_pdb_hook")
+ child.send("c\n")
+ child.sendeof()
+ self.flush(child)
+
+ def test_pdb_custom_cls(self, testdir, custom_pdb_calls):
+ p1 = testdir.makepyfile("""xxx """)
+ result = testdir.runpytest_inprocess("--pdb", "--pdbcls=_pytest:_CustomPdb", p1)
+ result.stdout.fnmatch_lines(["*NameError*xxx*", "*1 error*"])
+ assert custom_pdb_calls == ["init", "reset", "interaction"]
+
+ def test_pdb_custom_cls_without_pdb(self, testdir, custom_pdb_calls):
+ p1 = testdir.makepyfile("""xxx """)
+ result = testdir.runpytest_inprocess("--pdbcls=_pytest:_CustomPdb", p1)
+ result.stdout.fnmatch_lines(["*NameError*xxx*", "*1 error*"])
+ assert custom_pdb_calls == []
+
+ def test_pdb_custom_cls_with_settrace(self, testdir, monkeypatch):
+ testdir.makepyfile(
+ custom_pdb="""
+ class CustomPdb(object):
+ def set_trace(*args, **kwargs):
+ print 'custom set_trace>'
+ """
+ )
+ p1 = testdir.makepyfile(
+ """
+ import pytest
+
+ def test_foo():
+ pytest.set_trace()
+ """
+ )
+ monkeypatch.setenv("PYTHONPATH", str(testdir.tmpdir))
+ child = testdir.spawn_pytest("--pdbcls=custom_pdb:CustomPdb %s" % str(p1))
+
+ child.expect("custom set_trace>")
+ self.flush(child)
+
+
+class TestDebuggingBreakpoints(object):
+
+ def test_supports_breakpoint_module_global(self):
+ """
+ Test that supports breakpoint global marks on Python 3.7+ and not on
+ CPython 3.5, 2.7
+ """
+ if sys.version_info.major == 3 and sys.version_info.minor >= 7:
+ assert SUPPORTS_BREAKPOINT_BUILTIN is True
+ if sys.version_info.major == 3 and sys.version_info.minor == 5:
+ assert SUPPORTS_BREAKPOINT_BUILTIN is False
+ if sys.version_info.major == 2 and sys.version_info.minor == 7:
+ assert SUPPORTS_BREAKPOINT_BUILTIN is False
+
+ @pytest.mark.skipif(
+ not SUPPORTS_BREAKPOINT_BUILTIN, reason="Requires breakpoint() builtin"
+ )
+ @pytest.mark.parametrize("arg", ["--pdb", ""])
+ def test_sys_breakpointhook_configure_and_unconfigure(self, testdir, arg):
+ """
+ Test that sys.breakpointhook is set to the custom Pdb class once configured, test that
+ hook is reset to system value once pytest has been unconfigured
+ """
+ testdir.makeconftest(
+ """
+ import sys
+ from pytest import hookimpl
+ from _pytest.debugging import pytestPDB
+
+ def pytest_configure(config):
+ config._cleanup.append(check_restored)
+
+ def check_restored():
+ assert sys.breakpointhook == sys.__breakpointhook__
+
+ def test_check():
+ assert sys.breakpointhook == pytestPDB.set_trace
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_nothing(): pass
+ """
+ )
+ args = (arg,) if arg else ()
+ result = testdir.runpytest_subprocess(*args)
+ result.stdout.fnmatch_lines(["*1 passed in *"])
+
+ @pytest.mark.skipif(
+ not SUPPORTS_BREAKPOINT_BUILTIN, reason="Requires breakpoint() builtin"
+ )
+ def test_pdb_custom_cls(self, testdir, custom_debugger_hook):
+ p1 = testdir.makepyfile(
+ """
+ def test_nothing():
+ breakpoint()
+ """
+ )
+ result = testdir.runpytest_inprocess(
+ "--pdb", "--pdbcls=_pytest:_CustomDebugger", p1
+ )
+ result.stdout.fnmatch_lines(["*CustomDebugger*", "*1 passed*"])
+ assert custom_debugger_hook == ["init", "set_trace"]
+
+ @pytest.mark.parametrize("arg", ["--pdb", ""])
+ @pytest.mark.skipif(
+ not SUPPORTS_BREAKPOINT_BUILTIN, reason="Requires breakpoint() builtin"
+ )
+ def test_environ_custom_class(self, testdir, custom_debugger_hook, arg):
+ testdir.makeconftest(
+ """
+ import os
+ import sys
+
+ os.environ['PYTHONBREAKPOINT'] = '_pytest._CustomDebugger.set_trace'
+
+ def pytest_configure(config):
+ config._cleanup.append(check_restored)
+
+ def check_restored():
+ assert sys.breakpointhook == sys.__breakpointhook__
+
+ def test_check():
+ import _pytest
+ assert sys.breakpointhook is _pytest._CustomDebugger.set_trace
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_nothing(): pass
+ """
+ )
+ args = (arg,) if arg else ()
+ result = testdir.runpytest_subprocess(*args)
+ result.stdout.fnmatch_lines(["*1 passed in *"])
+
+ @pytest.mark.skipif(
+ not SUPPORTS_BREAKPOINT_BUILTIN, reason="Requires breakpoint() builtin"
+ )
+ @pytest.mark.skipif(
+ not _ENVIRON_PYTHONBREAKPOINT == "",
+ reason="Requires breakpoint() default value",
+ )
+ def test_sys_breakpoint_interception(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ def test_1():
+ breakpoint()
+ """
+ )
+ child = testdir.spawn_pytest(str(p1))
+ child.expect("test_1")
+ child.expect("(Pdb)")
+ child.sendeof()
+ rest = child.read().decode("utf8")
+ assert "1 failed" in rest
+ assert "reading from stdin while output" not in rest
+ TestPDB.flush(child)
+
+ @pytest.mark.skipif(
+ not SUPPORTS_BREAKPOINT_BUILTIN, reason="Requires breakpoint() builtin"
+ )
+ def test_pdb_not_altered(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ import pdb
+ def test_1():
+ pdb.set_trace()
+ """
+ )
+ child = testdir.spawn_pytest(str(p1))
+ child.expect("test_1")
+ child.expect("(Pdb)")
+ child.sendeof()
+ rest = child.read().decode("utf8")
+ assert "1 failed" in rest
+ assert "reading from stdin while output" not in rest
+ TestPDB.flush(child)
diff --git a/third_party/python/pytest/testing/test_pluginmanager.py b/third_party/python/pytest/testing/test_pluginmanager.py
new file mode 100644
index 0000000000..22cea4207c
--- /dev/null
+++ b/third_party/python/pytest/testing/test_pluginmanager.py
@@ -0,0 +1,386 @@
+# encoding: UTF-8
+from __future__ import absolute_import, division, print_function
+import pytest
+import os
+import re
+import sys
+import types
+
+from _pytest.config import get_config, PytestPluginManager
+from _pytest.main import EXIT_NOTESTSCOLLECTED, Session
+
+
+@pytest.fixture
+def pytestpm():
+ return PytestPluginManager()
+
+
+class TestPytestPluginInteractions(object):
+
+ def test_addhooks_conftestplugin(self, testdir):
+ testdir.makepyfile(
+ newhooks="""
+ def pytest_myhook(xyz):
+ "new hook"
+ """
+ )
+ conf = testdir.makeconftest(
+ """
+ import sys ; sys.path.insert(0, '.')
+ import newhooks
+ def pytest_addhooks(pluginmanager):
+ pluginmanager.addhooks(newhooks)
+ def pytest_myhook(xyz):
+ return xyz + 1
+ """
+ )
+ config = get_config()
+ pm = config.pluginmanager
+ pm.hook.pytest_addhooks.call_historic(
+ kwargs=dict(pluginmanager=config.pluginmanager)
+ )
+ config.pluginmanager._importconftest(conf)
+ # print(config.pluginmanager.get_plugins())
+ res = config.hook.pytest_myhook(xyz=10)
+ assert res == [11]
+
+ def test_addhooks_nohooks(self, testdir):
+ testdir.makeconftest(
+ """
+ import sys
+ def pytest_addhooks(pluginmanager):
+ pluginmanager.addhooks(sys)
+ """
+ )
+ res = testdir.runpytest()
+ assert res.ret != 0
+ res.stderr.fnmatch_lines(["*did not find*sys*"])
+
+ def test_namespace_early_from_import(self, testdir):
+ p = testdir.makepyfile(
+ """
+ from pytest import Item
+ from pytest import Item as Item2
+ assert Item is Item2
+ """
+ )
+ result = testdir.runpython(p)
+ assert result.ret == 0
+
+ def test_do_ext_namespace(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_namespace():
+ return {'hello': 'world'}
+ """
+ )
+ p = testdir.makepyfile(
+ """
+ from pytest import hello
+ import pytest
+ def test_hello():
+ assert hello == "world"
+ assert 'hello' in pytest.__all__
+ """
+ )
+ reprec = testdir.inline_run(p)
+ reprec.assertoutcome(passed=1)
+
+ def test_do_option_postinitialize(self, testdir):
+ config = testdir.parseconfigure()
+ assert not hasattr(config.option, "test123")
+ p = testdir.makepyfile(
+ """
+ def pytest_addoption(parser):
+ parser.addoption('--test123', action="store_true",
+ default=True)
+ """
+ )
+ config.pluginmanager._importconftest(p)
+ assert config.option.test123
+
+ def test_configure(self, testdir):
+ config = testdir.parseconfig()
+ values = []
+
+ class A(object):
+
+ def pytest_configure(self, config):
+ values.append(self)
+
+ config.pluginmanager.register(A())
+ assert len(values) == 0
+ config._do_configure()
+ assert len(values) == 1
+ config.pluginmanager.register(A()) # leads to a configured() plugin
+ assert len(values) == 2
+ assert values[0] != values[1]
+
+ config._ensure_unconfigure()
+ config.pluginmanager.register(A())
+ assert len(values) == 2
+
+ def test_hook_tracing(self):
+ pytestpm = get_config().pluginmanager # fully initialized with plugins
+ saveindent = []
+
+ class api1(object):
+
+ def pytest_plugin_registered(self):
+ saveindent.append(pytestpm.trace.root.indent)
+
+ class api2(object):
+
+ def pytest_plugin_registered(self):
+ saveindent.append(pytestpm.trace.root.indent)
+ raise ValueError()
+
+ values = []
+ pytestpm.trace.root.setwriter(values.append)
+ undo = pytestpm.enable_tracing()
+ try:
+ indent = pytestpm.trace.root.indent
+ p = api1()
+ pytestpm.register(p)
+ assert pytestpm.trace.root.indent == indent
+ assert len(values) >= 2
+ assert "pytest_plugin_registered" in values[0]
+ assert "finish" in values[1]
+
+ values[:] = []
+ with pytest.raises(ValueError):
+ pytestpm.register(api2())
+ assert pytestpm.trace.root.indent == indent
+ assert saveindent[0] > indent
+ finally:
+ undo()
+
+ def test_hook_proxy(self, testdir):
+ """Test the gethookproxy function(#2016)"""
+ config = testdir.parseconfig()
+ session = Session(config)
+ testdir.makepyfile(**{"tests/conftest.py": "", "tests/subdir/conftest.py": ""})
+
+ conftest1 = testdir.tmpdir.join("tests/conftest.py")
+ conftest2 = testdir.tmpdir.join("tests/subdir/conftest.py")
+
+ config.pluginmanager._importconftest(conftest1)
+ ihook_a = session.gethookproxy(testdir.tmpdir.join("tests"))
+ assert ihook_a is not None
+ config.pluginmanager._importconftest(conftest2)
+ ihook_b = session.gethookproxy(testdir.tmpdir.join("tests"))
+ assert ihook_a is not ihook_b
+
+ def test_warn_on_deprecated_addhooks(self, pytestpm):
+ warnings = []
+
+ class get_warnings(object):
+
+ def pytest_logwarning(self, code, fslocation, message, nodeid):
+ warnings.append(message)
+
+ class Plugin(object):
+
+ def pytest_testhook():
+ pass
+
+ pytestpm.register(get_warnings())
+ before = list(warnings)
+ pytestpm.addhooks(Plugin())
+ assert len(warnings) == len(before) + 1
+ assert "deprecated" in warnings[-1]
+
+
+def test_namespace_has_default_and_env_plugins(testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ pytest.mark
+ """
+ )
+ result = testdir.runpython(p)
+ assert result.ret == 0
+
+
+def test_default_markers(testdir):
+ result = testdir.runpytest("--markers")
+ result.stdout.fnmatch_lines(["*tryfirst*first*", "*trylast*last*"])
+
+
+def test_importplugin_error_message(testdir, pytestpm):
+ """Don't hide import errors when importing plugins and provide
+ an easy to debug message.
+
+ See #375 and #1998.
+ """
+ testdir.syspathinsert(testdir.tmpdir)
+ testdir.makepyfile(
+ qwe="""
+ # encoding: UTF-8
+ def test_traceback():
+ raise ImportError(u'Not possible to import: ☺')
+ test_traceback()
+ """
+ )
+ with pytest.raises(ImportError) as excinfo:
+ pytestpm.import_plugin("qwe")
+
+ expected_message = '.*Error importing plugin "qwe": Not possible to import: .'
+ expected_traceback = ".*in test_traceback"
+ assert re.match(expected_message, str(excinfo.value))
+ assert re.match(expected_traceback, str(excinfo.traceback[-1]))
+
+
+class TestPytestPluginManager(object):
+
+ def test_register_imported_modules(self):
+ pm = PytestPluginManager()
+ mod = types.ModuleType("x.y.pytest_hello")
+ pm.register(mod)
+ assert pm.is_registered(mod)
+ values = pm.get_plugins()
+ assert mod in values
+ pytest.raises(ValueError, "pm.register(mod)")
+ pytest.raises(ValueError, lambda: pm.register(mod))
+ # assert not pm.is_registered(mod2)
+ assert pm.get_plugins() == values
+
+ def test_canonical_import(self, monkeypatch):
+ mod = types.ModuleType("pytest_xyz")
+ monkeypatch.setitem(sys.modules, "pytest_xyz", mod)
+ pm = PytestPluginManager()
+ pm.import_plugin("pytest_xyz")
+ assert pm.get_plugin("pytest_xyz") == mod
+ assert pm.is_registered(mod)
+
+ def test_consider_module(self, testdir, pytestpm):
+ testdir.syspathinsert()
+ testdir.makepyfile(pytest_p1="#")
+ testdir.makepyfile(pytest_p2="#")
+ mod = types.ModuleType("temp")
+ mod.pytest_plugins = ["pytest_p1", "pytest_p2"]
+ pytestpm.consider_module(mod)
+ assert pytestpm.get_plugin("pytest_p1").__name__ == "pytest_p1"
+ assert pytestpm.get_plugin("pytest_p2").__name__ == "pytest_p2"
+
+ def test_consider_module_import_module(self, testdir):
+ pytestpm = get_config().pluginmanager
+ mod = types.ModuleType("x")
+ mod.pytest_plugins = "pytest_a"
+ aplugin = testdir.makepyfile(pytest_a="#")
+ reprec = testdir.make_hook_recorder(pytestpm)
+ # syspath.prepend(aplugin.dirpath())
+ sys.path.insert(0, str(aplugin.dirpath()))
+ pytestpm.consider_module(mod)
+ call = reprec.getcall(pytestpm.hook.pytest_plugin_registered.name)
+ assert call.plugin.__name__ == "pytest_a"
+
+ # check that it is not registered twice
+ pytestpm.consider_module(mod)
+ values = reprec.getcalls("pytest_plugin_registered")
+ assert len(values) == 1
+
+ def test_consider_env_fails_to_import(self, monkeypatch, pytestpm):
+ monkeypatch.setenv("PYTEST_PLUGINS", "nonexisting", prepend=",")
+ with pytest.raises(ImportError):
+ pytestpm.consider_env()
+
+ def test_plugin_skip(self, testdir, monkeypatch):
+ p = testdir.makepyfile(
+ skipping1="""
+ import pytest
+ pytest.skip("hello")
+ """
+ )
+ p.copy(p.dirpath("skipping2.py"))
+ monkeypatch.setenv("PYTEST_PLUGINS", "skipping2")
+ result = testdir.runpytest("-rw", "-p", "skipping1", syspathinsert=True)
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+ result.stdout.fnmatch_lines(
+ ["*skipped plugin*skipping1*hello*", "*skipped plugin*skipping2*hello*"]
+ )
+
+ def test_consider_env_plugin_instantiation(self, testdir, monkeypatch, pytestpm):
+ testdir.syspathinsert()
+ testdir.makepyfile(xy123="#")
+ monkeypatch.setitem(os.environ, "PYTEST_PLUGINS", "xy123")
+ l1 = len(pytestpm.get_plugins())
+ pytestpm.consider_env()
+ l2 = len(pytestpm.get_plugins())
+ assert l2 == l1 + 1
+ assert pytestpm.get_plugin("xy123")
+ pytestpm.consider_env()
+ l3 = len(pytestpm.get_plugins())
+ assert l2 == l3
+
+ def test_pluginmanager_ENV_startup(self, testdir, monkeypatch):
+ testdir.makepyfile(pytest_x500="#")
+ p = testdir.makepyfile(
+ """
+ import pytest
+ def test_hello(pytestconfig):
+ plugin = pytestconfig.pluginmanager.get_plugin('pytest_x500')
+ assert plugin is not None
+ """
+ )
+ monkeypatch.setenv("PYTEST_PLUGINS", "pytest_x500", prepend=",")
+ result = testdir.runpytest(p, syspathinsert=True)
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_import_plugin_importname(self, testdir, pytestpm):
+ pytest.raises(ImportError, 'pytestpm.import_plugin("qweqwex.y")')
+ pytest.raises(ImportError, 'pytestpm.import_plugin("pytest_qweqwx.y")')
+
+ testdir.syspathinsert()
+ pluginname = "pytest_hello"
+ testdir.makepyfile(**{pluginname: ""})
+ pytestpm.import_plugin("pytest_hello")
+ len1 = len(pytestpm.get_plugins())
+ pytestpm.import_plugin("pytest_hello")
+ len2 = len(pytestpm.get_plugins())
+ assert len1 == len2
+ plugin1 = pytestpm.get_plugin("pytest_hello")
+ assert plugin1.__name__.endswith("pytest_hello")
+ plugin2 = pytestpm.get_plugin("pytest_hello")
+ assert plugin2 is plugin1
+
+ def test_import_plugin_dotted_name(self, testdir, pytestpm):
+ pytest.raises(ImportError, 'pytestpm.import_plugin("qweqwex.y")')
+ pytest.raises(ImportError, 'pytestpm.import_plugin("pytest_qweqwex.y")')
+
+ testdir.syspathinsert()
+ testdir.mkpydir("pkg").join("plug.py").write("x=3")
+ pluginname = "pkg.plug"
+ pytestpm.import_plugin(pluginname)
+ mod = pytestpm.get_plugin("pkg.plug")
+ assert mod.x == 3
+
+ def test_consider_conftest_deps(self, testdir, pytestpm):
+ mod = testdir.makepyfile("pytest_plugins='xyz'").pyimport()
+ with pytest.raises(ImportError):
+ pytestpm.consider_conftest(mod)
+
+
+class TestPytestPluginManagerBootstrapming(object):
+
+ def test_preparse_args(self, pytestpm):
+ pytest.raises(
+ ImportError, lambda: pytestpm.consider_preparse(["xyz", "-p", "hello123"])
+ )
+
+ def test_plugin_prevent_register(self, pytestpm):
+ pytestpm.consider_preparse(["xyz", "-p", "no:abc"])
+ l1 = pytestpm.get_plugins()
+ pytestpm.register(42, name="abc")
+ l2 = pytestpm.get_plugins()
+ assert len(l2) == len(l1)
+ assert 42 not in l2
+
+ def test_plugin_prevent_register_unregistered_alredy_registered(self, pytestpm):
+ pytestpm.register(42, name="abc")
+ l1 = pytestpm.get_plugins()
+ assert 42 in l1
+ pytestpm.consider_preparse(["xyz", "-p", "no:abc"])
+ l2 = pytestpm.get_plugins()
+ assert 42 not in l2
diff --git a/third_party/python/pytest/testing/test_pytester.py b/third_party/python/pytest/testing/test_pytester.py
new file mode 100644
index 0000000000..b74c0b7f70
--- /dev/null
+++ b/third_party/python/pytest/testing/test_pytester.py
@@ -0,0 +1,401 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import, division, print_function
+import os
+import py.path
+import pytest
+import sys
+import _pytest.pytester as pytester
+from _pytest.pytester import HookRecorder
+from _pytest.pytester import CwdSnapshot, SysModulesSnapshot, SysPathsSnapshot
+from _pytest.config import PytestPluginManager
+from _pytest.main import EXIT_OK, EXIT_TESTSFAILED
+
+
+def test_make_hook_recorder(testdir):
+ item = testdir.getitem("def test_func(): pass")
+ recorder = testdir.make_hook_recorder(item.config.pluginmanager)
+ assert not recorder.getfailures()
+
+ pytest.xfail("internal reportrecorder tests need refactoring")
+
+ class rep(object):
+ excinfo = None
+ passed = False
+ failed = True
+ skipped = False
+ when = "call"
+
+ recorder.hook.pytest_runtest_logreport(report=rep)
+ failures = recorder.getfailures()
+ assert failures == [rep]
+ failures = recorder.getfailures()
+ assert failures == [rep]
+
+ class rep(object):
+ excinfo = None
+ passed = False
+ failed = False
+ skipped = True
+ when = "call"
+
+ rep.passed = False
+ rep.skipped = True
+ recorder.hook.pytest_runtest_logreport(report=rep)
+
+ modcol = testdir.getmodulecol("")
+ rep = modcol.config.hook.pytest_make_collect_report(collector=modcol)
+ rep.passed = False
+ rep.failed = True
+ rep.skipped = False
+ recorder.hook.pytest_collectreport(report=rep)
+
+ passed, skipped, failed = recorder.listoutcomes()
+ assert not passed and skipped and failed
+
+ numpassed, numskipped, numfailed = recorder.countoutcomes()
+ assert numpassed == 0
+ assert numskipped == 1
+ assert numfailed == 1
+ assert len(recorder.getfailedcollections()) == 1
+
+ recorder.unregister()
+ recorder.clear()
+ recorder.hook.pytest_runtest_logreport(report=rep)
+ pytest.raises(ValueError, "recorder.getfailures()")
+
+
+def test_parseconfig(testdir):
+ config1 = testdir.parseconfig()
+ config2 = testdir.parseconfig()
+ assert config2 != config1
+ assert config1 != pytest.config
+
+
+def test_testdir_runs_with_plugin(testdir):
+ testdir.makepyfile(
+ """
+ pytest_plugins = "pytester"
+ def test_hello(testdir):
+ assert 1
+ """
+ )
+ result = testdir.runpytest()
+ result.assert_outcomes(passed=1)
+
+
+def make_holder():
+
+ class apiclass(object):
+
+ def pytest_xyz(self, arg):
+ "x"
+
+ def pytest_xyz_noarg(self):
+ "x"
+
+ apimod = type(os)("api")
+
+ def pytest_xyz(arg):
+ "x"
+
+ def pytest_xyz_noarg():
+ "x"
+
+ apimod.pytest_xyz = pytest_xyz
+ apimod.pytest_xyz_noarg = pytest_xyz_noarg
+ return apiclass, apimod
+
+
+@pytest.mark.parametrize("holder", make_holder())
+def test_hookrecorder_basic(holder):
+ pm = PytestPluginManager()
+ pm.addhooks(holder)
+ rec = HookRecorder(pm)
+ pm.hook.pytest_xyz(arg=123)
+ call = rec.popcall("pytest_xyz")
+ assert call.arg == 123
+ assert call._name == "pytest_xyz"
+ pytest.raises(pytest.fail.Exception, "rec.popcall('abc')")
+ pm.hook.pytest_xyz_noarg()
+ call = rec.popcall("pytest_xyz_noarg")
+ assert call._name == "pytest_xyz_noarg"
+
+
+def test_makepyfile_unicode(testdir):
+ global unichr
+ try:
+ unichr(65)
+ except NameError:
+ unichr = chr
+ testdir.makepyfile(unichr(0xfffd))
+
+
+def test_makepyfile_utf8(testdir):
+ """Ensure makepyfile accepts utf-8 bytes as input (#2738)"""
+ utf8_contents = u"""
+ def setup_function(function):
+ mixed_encoding = u'São Paulo'
+ """.encode(
+ "utf-8"
+ )
+ p = testdir.makepyfile(utf8_contents)
+ assert u"mixed_encoding = u'São Paulo'".encode("utf-8") in p.read("rb")
+
+
+class TestInlineRunModulesCleanup(object):
+
+ def test_inline_run_test_module_not_cleaned_up(self, testdir):
+ test_mod = testdir.makepyfile("def test_foo(): assert True")
+ result = testdir.inline_run(str(test_mod))
+ assert result.ret == EXIT_OK
+ # rewrite module, now test should fail if module was re-imported
+ test_mod.write("def test_foo(): assert False")
+ result2 = testdir.inline_run(str(test_mod))
+ assert result2.ret == EXIT_TESTSFAILED
+
+ def spy_factory(self):
+
+ class SysModulesSnapshotSpy(object):
+ instances = []
+
+ def __init__(self, preserve=None):
+ SysModulesSnapshotSpy.instances.append(self)
+ self._spy_restore_count = 0
+ self._spy_preserve = preserve
+ self.__snapshot = SysModulesSnapshot(preserve=preserve)
+
+ def restore(self):
+ self._spy_restore_count += 1
+ return self.__snapshot.restore()
+
+ return SysModulesSnapshotSpy
+
+ def test_inline_run_taking_and_restoring_a_sys_modules_snapshot(
+ self, testdir, monkeypatch
+ ):
+ spy_factory = self.spy_factory()
+ monkeypatch.setattr(pytester, "SysModulesSnapshot", spy_factory)
+ original = dict(sys.modules)
+ testdir.syspathinsert()
+ testdir.makepyfile(import1="# you son of a silly person")
+ testdir.makepyfile(import2="# my hovercraft is full of eels")
+ test_mod = testdir.makepyfile(
+ """
+ import import1
+ def test_foo(): import import2"""
+ )
+ testdir.inline_run(str(test_mod))
+ assert len(spy_factory.instances) == 1
+ spy = spy_factory.instances[0]
+ assert spy._spy_restore_count == 1
+ assert sys.modules == original
+ assert all(sys.modules[x] is original[x] for x in sys.modules)
+
+ def test_inline_run_sys_modules_snapshot_restore_preserving_modules(
+ self, testdir, monkeypatch
+ ):
+ spy_factory = self.spy_factory()
+ monkeypatch.setattr(pytester, "SysModulesSnapshot", spy_factory)
+ test_mod = testdir.makepyfile("def test_foo(): pass")
+ testdir.inline_run(str(test_mod))
+ spy = spy_factory.instances[0]
+ assert not spy._spy_preserve("black_knight")
+ assert spy._spy_preserve("zope")
+ assert spy._spy_preserve("zope.interface")
+ assert spy._spy_preserve("zopelicious")
+
+ def test_external_test_module_imports_not_cleaned_up(self, testdir):
+ testdir.syspathinsert()
+ testdir.makepyfile(imported="data = 'you son of a silly person'")
+ import imported
+
+ test_mod = testdir.makepyfile(
+ """
+ def test_foo():
+ import imported
+ imported.data = 42"""
+ )
+ testdir.inline_run(str(test_mod))
+ assert imported.data == 42
+
+
+def test_inline_run_clean_sys_paths(testdir):
+
+ def test_sys_path_change_cleanup(self, testdir):
+ test_path1 = testdir.tmpdir.join("boink1").strpath
+ test_path2 = testdir.tmpdir.join("boink2").strpath
+ test_path3 = testdir.tmpdir.join("boink3").strpath
+ sys.path.append(test_path1)
+ sys.meta_path.append(test_path1)
+ original_path = list(sys.path)
+ original_meta_path = list(sys.meta_path)
+ test_mod = testdir.makepyfile(
+ """
+ import sys
+ sys.path.append({:test_path2})
+ sys.meta_path.append({:test_path2})
+ def test_foo():
+ sys.path.append({:test_path3})
+ sys.meta_path.append({:test_path3})""".format(
+ locals()
+ )
+ )
+ testdir.inline_run(str(test_mod))
+ assert sys.path == original_path
+ assert sys.meta_path == original_meta_path
+
+ def spy_factory(self):
+
+ class SysPathsSnapshotSpy(object):
+ instances = []
+
+ def __init__(self):
+ SysPathsSnapshotSpy.instances.append(self)
+ self._spy_restore_count = 0
+ self.__snapshot = SysPathsSnapshot()
+
+ def restore(self):
+ self._spy_restore_count += 1
+ return self.__snapshot.restore()
+
+ return SysPathsSnapshotSpy
+
+ def test_inline_run_taking_and_restoring_a_sys_paths_snapshot(
+ self, testdir, monkeypatch
+ ):
+ spy_factory = self.spy_factory()
+ monkeypatch.setattr(pytester, "SysPathsSnapshot", spy_factory)
+ test_mod = testdir.makepyfile("def test_foo(): pass")
+ testdir.inline_run(str(test_mod))
+ assert len(spy_factory.instances) == 1
+ spy = spy_factory.instances[0]
+ assert spy._spy_restore_count == 1
+
+
+def test_assert_outcomes_after_pytest_error(testdir):
+ testdir.makepyfile("def test_foo(): assert True")
+
+ result = testdir.runpytest("--unexpected-argument")
+ with pytest.raises(ValueError, message="Pytest terminal report not found"):
+ result.assert_outcomes(passed=0)
+
+
+def test_cwd_snapshot(tmpdir):
+ foo = tmpdir.ensure("foo", dir=1)
+ bar = tmpdir.ensure("bar", dir=1)
+ foo.chdir()
+ snapshot = CwdSnapshot()
+ bar.chdir()
+ assert py.path.local() == bar
+ snapshot.restore()
+ assert py.path.local() == foo
+
+
+class TestSysModulesSnapshot(object):
+ key = "my-test-module"
+
+ def test_remove_added(self):
+ original = dict(sys.modules)
+ assert self.key not in sys.modules
+ snapshot = SysModulesSnapshot()
+ sys.modules[self.key] = "something"
+ assert self.key in sys.modules
+ snapshot.restore()
+ assert sys.modules == original
+
+ def test_add_removed(self, monkeypatch):
+ assert self.key not in sys.modules
+ monkeypatch.setitem(sys.modules, self.key, "something")
+ assert self.key in sys.modules
+ original = dict(sys.modules)
+ snapshot = SysModulesSnapshot()
+ del sys.modules[self.key]
+ assert self.key not in sys.modules
+ snapshot.restore()
+ assert sys.modules == original
+
+ def test_restore_reloaded(self, monkeypatch):
+ assert self.key not in sys.modules
+ monkeypatch.setitem(sys.modules, self.key, "something")
+ assert self.key in sys.modules
+ original = dict(sys.modules)
+ snapshot = SysModulesSnapshot()
+ sys.modules[self.key] = "something else"
+ snapshot.restore()
+ assert sys.modules == original
+
+ def test_preserve_modules(self, monkeypatch):
+ key = [self.key + str(i) for i in range(3)]
+ assert not any(k in sys.modules for k in key)
+ for i, k in enumerate(key):
+ monkeypatch.setitem(sys.modules, k, "something" + str(i))
+ original = dict(sys.modules)
+
+ def preserve(name):
+ return name in (key[0], key[1], "some-other-key")
+
+ snapshot = SysModulesSnapshot(preserve=preserve)
+ sys.modules[key[0]] = original[key[0]] = "something else0"
+ sys.modules[key[1]] = original[key[1]] = "something else1"
+ sys.modules[key[2]] = "something else2"
+ snapshot.restore()
+ assert sys.modules == original
+
+ def test_preserve_container(self, monkeypatch):
+ original = dict(sys.modules)
+ assert self.key not in original
+ replacement = dict(sys.modules)
+ replacement[self.key] = "life of brian"
+ snapshot = SysModulesSnapshot()
+ monkeypatch.setattr(sys, "modules", replacement)
+ snapshot.restore()
+ assert sys.modules is replacement
+ assert sys.modules == original
+
+
+@pytest.mark.parametrize("path_type", ("path", "meta_path"))
+class TestSysPathsSnapshot(object):
+ other_path = {"path": "meta_path", "meta_path": "path"}
+
+ @staticmethod
+ def path(n):
+ return "my-dirty-little-secret-" + str(n)
+
+ def test_restore(self, monkeypatch, path_type):
+ other_path_type = self.other_path[path_type]
+ for i in range(10):
+ assert self.path(i) not in getattr(sys, path_type)
+ sys_path = [self.path(i) for i in range(6)]
+ monkeypatch.setattr(sys, path_type, sys_path)
+ original = list(sys_path)
+ original_other = list(getattr(sys, other_path_type))
+ snapshot = SysPathsSnapshot()
+ transformation = {
+ "source": (0, 1, 2, 3, 4, 5), "target": (6, 2, 9, 7, 5, 8)
+ } # noqa: E201
+ assert sys_path == [self.path(x) for x in transformation["source"]]
+ sys_path[1] = self.path(6)
+ sys_path[3] = self.path(7)
+ sys_path.append(self.path(8))
+ del sys_path[4]
+ sys_path[3:3] = [self.path(9)]
+ del sys_path[0]
+ assert sys_path == [self.path(x) for x in transformation["target"]]
+ snapshot.restore()
+ assert getattr(sys, path_type) is sys_path
+ assert getattr(sys, path_type) == original
+ assert getattr(sys, other_path_type) == original_other
+
+ def test_preserve_container(self, monkeypatch, path_type):
+ other_path_type = self.other_path[path_type]
+ original_data = list(getattr(sys, path_type))
+ original_other = getattr(sys, other_path_type)
+ original_other_data = list(original_other)
+ new = []
+ snapshot = SysPathsSnapshot()
+ monkeypatch.setattr(sys, path_type, new)
+ snapshot.restore()
+ assert getattr(sys, path_type) is new
+ assert getattr(sys, path_type) == original_data
+ assert getattr(sys, other_path_type) is original_other
+ assert getattr(sys, other_path_type) == original_other_data
diff --git a/third_party/python/pytest/testing/test_recwarn.py b/third_party/python/pytest/testing/test_recwarn.py
new file mode 100644
index 0000000000..a8e2fb8037
--- /dev/null
+++ b/third_party/python/pytest/testing/test_recwarn.py
@@ -0,0 +1,347 @@
+from __future__ import absolute_import, division, print_function
+import warnings
+import re
+
+import pytest
+from _pytest.recwarn import WarningsRecorder
+
+
+def test_recwarn_functional(testdir):
+ reprec = testdir.inline_runsource(
+ """
+ import warnings
+ def test_method(recwarn):
+ warnings.warn("hello")
+ warn = recwarn.pop()
+ assert isinstance(warn.message, UserWarning)
+ """
+ )
+ res = reprec.countoutcomes()
+ assert tuple(res) == (1, 0, 0), res
+
+
+class TestWarningsRecorderChecker(object):
+
+ def test_recording(self):
+ rec = WarningsRecorder()
+ with rec:
+ assert not rec.list
+ warnings.warn_explicit("hello", UserWarning, "xyz", 13)
+ assert len(rec.list) == 1
+ warnings.warn(DeprecationWarning("hello"))
+ assert len(rec.list) == 2
+ warn = rec.pop()
+ assert str(warn.message) == "hello"
+ values = rec.list
+ rec.clear()
+ assert len(rec.list) == 0
+ assert values is rec.list
+ pytest.raises(AssertionError, "rec.pop()")
+
+ def test_typechecking(self):
+ from _pytest.recwarn import WarningsChecker
+
+ with pytest.raises(TypeError):
+ WarningsChecker(5)
+ with pytest.raises(TypeError):
+ WarningsChecker(("hi", RuntimeWarning))
+ with pytest.raises(TypeError):
+ WarningsChecker([DeprecationWarning, RuntimeWarning])
+
+ def test_invalid_enter_exit(self):
+ # wrap this test in WarningsRecorder to ensure warning state gets reset
+ with WarningsRecorder():
+ with pytest.raises(RuntimeError):
+ rec = WarningsRecorder()
+ rec.__exit__(None, None, None) # can't exit before entering
+
+ with pytest.raises(RuntimeError):
+ rec = WarningsRecorder()
+ with rec:
+ with rec:
+ pass # can't enter twice
+
+
+class TestDeprecatedCall(object):
+ """test pytest.deprecated_call()"""
+
+ def dep(self, i, j=None):
+ if i == 0:
+ warnings.warn("is deprecated", DeprecationWarning, stacklevel=1)
+ return 42
+
+ def dep_explicit(self, i):
+ if i == 0:
+ warnings.warn_explicit(
+ "dep_explicit", category=DeprecationWarning, filename="hello", lineno=3
+ )
+
+ def test_deprecated_call_raises(self):
+ with pytest.raises(AssertionError) as excinfo:
+ pytest.deprecated_call(self.dep, 3, 5)
+ assert "Did not produce" in str(excinfo)
+
+ def test_deprecated_call(self):
+ pytest.deprecated_call(self.dep, 0, 5)
+
+ def test_deprecated_call_ret(self):
+ ret = pytest.deprecated_call(self.dep, 0)
+ assert ret == 42
+
+ def test_deprecated_call_preserves(self):
+ onceregistry = warnings.onceregistry.copy()
+ filters = warnings.filters[:]
+ warn = warnings.warn
+ warn_explicit = warnings.warn_explicit
+ self.test_deprecated_call_raises()
+ self.test_deprecated_call()
+ assert onceregistry == warnings.onceregistry
+ assert filters == warnings.filters
+ assert warn is warnings.warn
+ assert warn_explicit is warnings.warn_explicit
+
+ def test_deprecated_explicit_call_raises(self):
+ with pytest.raises(AssertionError):
+ pytest.deprecated_call(self.dep_explicit, 3)
+
+ def test_deprecated_explicit_call(self):
+ pytest.deprecated_call(self.dep_explicit, 0)
+ pytest.deprecated_call(self.dep_explicit, 0)
+
+ @pytest.mark.parametrize("mode", ["context_manager", "call"])
+ def test_deprecated_call_no_warning(self, mode):
+ """Ensure deprecated_call() raises the expected failure when its block/function does
+ not raise a deprecation warning.
+ """
+
+ def f():
+ pass
+
+ msg = "Did not produce DeprecationWarning or PendingDeprecationWarning"
+ with pytest.raises(AssertionError, match=msg):
+ if mode == "call":
+ pytest.deprecated_call(f)
+ else:
+ with pytest.deprecated_call():
+ f()
+
+ @pytest.mark.parametrize(
+ "warning_type", [PendingDeprecationWarning, DeprecationWarning]
+ )
+ @pytest.mark.parametrize("mode", ["context_manager", "call"])
+ @pytest.mark.parametrize("call_f_first", [True, False])
+ @pytest.mark.filterwarnings("ignore")
+ def test_deprecated_call_modes(self, warning_type, mode, call_f_first):
+ """Ensure deprecated_call() captures a deprecation warning as expected inside its
+ block/function.
+ """
+
+ def f():
+ warnings.warn(warning_type("hi"))
+ return 10
+
+ # ensure deprecated_call() can capture the warning even if it has already been triggered
+ if call_f_first:
+ assert f() == 10
+ if mode == "call":
+ assert pytest.deprecated_call(f) == 10
+ else:
+ with pytest.deprecated_call():
+ assert f() == 10
+
+ @pytest.mark.parametrize("mode", ["context_manager", "call"])
+ def test_deprecated_call_exception_is_raised(self, mode):
+ """If the block of the code being tested by deprecated_call() raises an exception,
+ it must raise the exception undisturbed.
+ """
+
+ def f():
+ raise ValueError("some exception")
+
+ with pytest.raises(ValueError, match="some exception"):
+ if mode == "call":
+ pytest.deprecated_call(f)
+ else:
+ with pytest.deprecated_call():
+ f()
+
+ def test_deprecated_call_specificity(self):
+ other_warnings = [
+ Warning,
+ UserWarning,
+ SyntaxWarning,
+ RuntimeWarning,
+ FutureWarning,
+ ImportWarning,
+ UnicodeWarning,
+ ]
+ for warning in other_warnings:
+
+ def f():
+ warnings.warn(warning("hi"))
+
+ with pytest.raises(AssertionError):
+ pytest.deprecated_call(f)
+ with pytest.raises(AssertionError):
+ with pytest.deprecated_call():
+ f()
+
+
+class TestWarns(object):
+
+ def test_strings(self):
+ # different messages, b/c Python suppresses multiple identical warnings
+ source1 = "warnings.warn('w1', RuntimeWarning)"
+ source2 = "warnings.warn('w2', RuntimeWarning)"
+ source3 = "warnings.warn('w3', RuntimeWarning)"
+ pytest.warns(RuntimeWarning, source1)
+ pytest.raises(pytest.fail.Exception, lambda: pytest.warns(UserWarning, source2))
+ pytest.warns(RuntimeWarning, source3)
+
+ def test_function(self):
+ pytest.warns(
+ SyntaxWarning, lambda msg: warnings.warn(msg, SyntaxWarning), "syntax"
+ )
+
+ def test_warning_tuple(self):
+ pytest.warns(
+ (RuntimeWarning, SyntaxWarning), lambda: warnings.warn("w1", RuntimeWarning)
+ )
+ pytest.warns(
+ (RuntimeWarning, SyntaxWarning), lambda: warnings.warn("w2", SyntaxWarning)
+ )
+ pytest.raises(
+ pytest.fail.Exception,
+ lambda: pytest.warns(
+ (RuntimeWarning, SyntaxWarning),
+ lambda: warnings.warn("w3", UserWarning),
+ ),
+ )
+
+ def test_as_contextmanager(self):
+ with pytest.warns(RuntimeWarning):
+ warnings.warn("runtime", RuntimeWarning)
+
+ with pytest.warns(UserWarning):
+ warnings.warn("user", UserWarning)
+
+ with pytest.raises(pytest.fail.Exception) as excinfo:
+ with pytest.warns(RuntimeWarning):
+ warnings.warn("user", UserWarning)
+ excinfo.match(
+ r"DID NOT WARN. No warnings of type \(.+RuntimeWarning.+,\) was emitted. "
+ r"The list of emitted warnings is: \[UserWarning\('user',?\)\]."
+ )
+
+ with pytest.raises(pytest.fail.Exception) as excinfo:
+ with pytest.warns(UserWarning):
+ warnings.warn("runtime", RuntimeWarning)
+ excinfo.match(
+ r"DID NOT WARN. No warnings of type \(.+UserWarning.+,\) was emitted. "
+ r"The list of emitted warnings is: \[RuntimeWarning\('runtime',?\)\]."
+ )
+
+ with pytest.raises(pytest.fail.Exception) as excinfo:
+ with pytest.warns(UserWarning):
+ pass
+ excinfo.match(
+ r"DID NOT WARN. No warnings of type \(.+UserWarning.+,\) was emitted. "
+ r"The list of emitted warnings is: \[\]."
+ )
+
+ warning_classes = (UserWarning, FutureWarning)
+ with pytest.raises(pytest.fail.Exception) as excinfo:
+ with pytest.warns(warning_classes) as warninfo:
+ warnings.warn("runtime", RuntimeWarning)
+ warnings.warn("import", ImportWarning)
+
+ message_template = (
+ "DID NOT WARN. No warnings of type {0} was emitted. "
+ "The list of emitted warnings is: {1}."
+ )
+ excinfo.match(
+ re.escape(
+ message_template.format(
+ warning_classes, [each.message for each in warninfo]
+ )
+ )
+ )
+
+ def test_record(self):
+ with pytest.warns(UserWarning) as record:
+ warnings.warn("user", UserWarning)
+
+ assert len(record) == 1
+ assert str(record[0].message) == "user"
+
+ def test_record_only(self):
+ with pytest.warns(None) as record:
+ warnings.warn("user", UserWarning)
+ warnings.warn("runtime", RuntimeWarning)
+
+ assert len(record) == 2
+ assert str(record[0].message) == "user"
+ assert str(record[1].message) == "runtime"
+
+ def test_record_by_subclass(self):
+ with pytest.warns(Warning) as record:
+ warnings.warn("user", UserWarning)
+ warnings.warn("runtime", RuntimeWarning)
+
+ assert len(record) == 2
+ assert str(record[0].message) == "user"
+ assert str(record[1].message) == "runtime"
+
+ class MyUserWarning(UserWarning):
+ pass
+
+ class MyRuntimeWarning(RuntimeWarning):
+ pass
+
+ with pytest.warns((UserWarning, RuntimeWarning)) as record:
+ warnings.warn("user", MyUserWarning)
+ warnings.warn("runtime", MyRuntimeWarning)
+
+ assert len(record) == 2
+ assert str(record[0].message) == "user"
+ assert str(record[1].message) == "runtime"
+
+ def test_double_test(self, testdir):
+ """If a test is run again, the warning should still be raised"""
+ testdir.makepyfile(
+ """
+ import pytest
+ import warnings
+
+ @pytest.mark.parametrize('run', [1, 2])
+ def test(run):
+ with pytest.warns(RuntimeWarning):
+ warnings.warn("runtime", RuntimeWarning)
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*2 passed in*"])
+
+ def test_match_regex(self):
+ with pytest.warns(UserWarning, match=r"must be \d+$"):
+ warnings.warn("value must be 42", UserWarning)
+
+ with pytest.raises(pytest.fail.Exception):
+ with pytest.warns(UserWarning, match=r"must be \d+$"):
+ warnings.warn("this is not here", UserWarning)
+
+ with pytest.raises(pytest.fail.Exception):
+ with pytest.warns(FutureWarning, match=r"must be \d+$"):
+ warnings.warn("value must be 42", UserWarning)
+
+ def test_one_from_multiple_warns(self):
+ with pytest.warns(UserWarning, match=r"aaa"):
+ warnings.warn("cccccccccc", UserWarning)
+ warnings.warn("bbbbbbbbbb", UserWarning)
+ warnings.warn("aaaaaaaaaa", UserWarning)
+
+ def test_none_of_multiple_warns(self):
+ with pytest.raises(pytest.fail.Exception):
+ with pytest.warns(UserWarning, match=r"aaa"):
+ warnings.warn("bbbbbbbbbb", UserWarning)
+ warnings.warn("cccccccccc", UserWarning)
diff --git a/third_party/python/pytest/testing/test_resultlog.py b/third_party/python/pytest/testing/test_resultlog.py
new file mode 100644
index 0000000000..173384ffb3
--- /dev/null
+++ b/third_party/python/pytest/testing/test_resultlog.py
@@ -0,0 +1,243 @@
+from __future__ import absolute_import, division, print_function
+import os
+
+import _pytest._code
+import py
+import pytest
+from _pytest.nodes import Node, Item, FSCollector
+from _pytest.resultlog import (
+ generic_path,
+ ResultLog,
+ pytest_configure,
+ pytest_unconfigure,
+)
+
+
+def test_generic_path(testdir):
+ from _pytest.main import Session
+
+ config = testdir.parseconfig()
+ session = Session(config)
+ p1 = Node("a", config=config, session=session, nodeid="a")
+ # assert p1.fspath is None
+ p2 = Node("B", parent=p1)
+ p3 = Node("()", parent=p2)
+ item = Item("c", parent=p3)
+
+ res = generic_path(item)
+ assert res == "a.B().c"
+
+ p0 = FSCollector("proj/test", config=config, session=session)
+ p1 = FSCollector("proj/test/a", parent=p0)
+ p2 = Node("B", parent=p1)
+ p3 = Node("()", parent=p2)
+ p4 = Node("c", parent=p3)
+ item = Item("[1]", parent=p4)
+
+ res = generic_path(item)
+ assert res == "test/a:B().c[1]"
+
+
+def test_write_log_entry():
+ reslog = ResultLog(None, None)
+ reslog.logfile = py.io.TextIO()
+ reslog.write_log_entry("name", ".", "")
+ entry = reslog.logfile.getvalue()
+ assert entry[-1] == "\n"
+ entry_lines = entry.splitlines()
+ assert len(entry_lines) == 1
+ assert entry_lines[0] == ". name"
+
+ reslog.logfile = py.io.TextIO()
+ reslog.write_log_entry("name", "s", "Skipped")
+ entry = reslog.logfile.getvalue()
+ assert entry[-1] == "\n"
+ entry_lines = entry.splitlines()
+ assert len(entry_lines) == 2
+ assert entry_lines[0] == "s name"
+ assert entry_lines[1] == " Skipped"
+
+ reslog.logfile = py.io.TextIO()
+ reslog.write_log_entry("name", "s", "Skipped\n")
+ entry = reslog.logfile.getvalue()
+ assert entry[-1] == "\n"
+ entry_lines = entry.splitlines()
+ assert len(entry_lines) == 2
+ assert entry_lines[0] == "s name"
+ assert entry_lines[1] == " Skipped"
+
+ reslog.logfile = py.io.TextIO()
+ longrepr = " tb1\n tb 2\nE tb3\nSome Error"
+ reslog.write_log_entry("name", "F", longrepr)
+ entry = reslog.logfile.getvalue()
+ assert entry[-1] == "\n"
+ entry_lines = entry.splitlines()
+ assert len(entry_lines) == 5
+ assert entry_lines[0] == "F name"
+ assert entry_lines[1:] == [" " + line for line in longrepr.splitlines()]
+
+
+class TestWithFunctionIntegration(object):
+ # XXX (hpk) i think that the resultlog plugin should
+ # provide a Parser object so that one can remain
+ # ignorant regarding formatting details.
+ def getresultlog(self, testdir, arg):
+ resultlog = testdir.tmpdir.join("resultlog")
+ testdir.plugins.append("resultlog")
+ args = ["--resultlog=%s" % resultlog] + [arg]
+ testdir.runpytest(*args)
+ return [x for x in resultlog.readlines(cr=0) if x]
+
+ def test_collection_report(self, testdir):
+ ok = testdir.makepyfile(test_collection_ok="")
+ fail = testdir.makepyfile(test_collection_fail="XXX")
+ lines = self.getresultlog(testdir, ok)
+ assert not lines
+
+ lines = self.getresultlog(testdir, fail)
+ assert lines
+ assert lines[0].startswith("F ")
+ assert lines[0].endswith("test_collection_fail.py"), lines[0]
+ for x in lines[1:]:
+ assert x.startswith(" ")
+ assert "XXX" in "".join(lines[1:])
+
+ def test_log_test_outcomes(self, testdir):
+ mod = testdir.makepyfile(
+ test_mod="""
+ import pytest
+ def test_pass(): pass
+ def test_skip(): pytest.skip("hello")
+ def test_fail(): raise ValueError("FAIL")
+
+ @pytest.mark.xfail
+ def test_xfail(): raise ValueError("XFAIL")
+ @pytest.mark.xfail
+ def test_xpass(): pass
+
+ """
+ )
+ lines = self.getresultlog(testdir, mod)
+ assert len(lines) >= 3
+ assert lines[0].startswith(". ")
+ assert lines[0].endswith("test_pass")
+ assert lines[1].startswith("s "), lines[1]
+ assert lines[1].endswith("test_skip")
+ assert lines[2].find("hello") != -1
+
+ assert lines[3].startswith("F ")
+ assert lines[3].endswith("test_fail")
+ tb = "".join(lines[4:8])
+ assert tb.find('raise ValueError("FAIL")') != -1
+
+ assert lines[8].startswith("x ")
+ tb = "".join(lines[8:14])
+ assert tb.find('raise ValueError("XFAIL")') != -1
+
+ assert lines[14].startswith("X ")
+ assert len(lines) == 15
+
+ @pytest.mark.parametrize("style", ("native", "long", "short"))
+ def test_internal_exception(self, style):
+ # they are produced for example by a teardown failing
+ # at the end of the run or a failing hook invocation
+ try:
+ raise ValueError
+ except ValueError:
+ excinfo = _pytest._code.ExceptionInfo()
+ reslog = ResultLog(None, py.io.TextIO())
+ reslog.pytest_internalerror(excinfo.getrepr(style=style))
+ entry = reslog.logfile.getvalue()
+ entry_lines = entry.splitlines()
+
+ assert entry_lines[0].startswith("! ")
+ if style != "native":
+ assert os.path.basename(__file__)[:-9] in entry_lines[0] # .pyc/class
+ assert entry_lines[-1][0] == " "
+ assert "ValueError" in entry
+
+
+def test_generic(testdir, LineMatcher):
+ testdir.plugins.append("resultlog")
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_pass():
+ pass
+ def test_fail():
+ assert 0
+ def test_skip():
+ pytest.skip("")
+ @pytest.mark.xfail
+ def test_xfail():
+ assert 0
+ @pytest.mark.xfail(run=False)
+ def test_xfail_norun():
+ assert 0
+ """
+ )
+ testdir.runpytest("--resultlog=result.log")
+ lines = testdir.tmpdir.join("result.log").readlines(cr=0)
+ LineMatcher(lines).fnmatch_lines(
+ [
+ ". *:test_pass",
+ "F *:test_fail",
+ "s *:test_skip",
+ "x *:test_xfail",
+ "x *:test_xfail_norun",
+ ]
+ )
+
+
+def test_makedir_for_resultlog(testdir, LineMatcher):
+ """--resultlog should automatically create directories for the log file"""
+ testdir.plugins.append("resultlog")
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_pass():
+ pass
+ """
+ )
+ testdir.runpytest("--resultlog=path/to/result.log")
+ lines = testdir.tmpdir.join("path/to/result.log").readlines(cr=0)
+ LineMatcher(lines).fnmatch_lines([". *:test_pass"])
+
+
+def test_no_resultlog_on_slaves(testdir):
+ config = testdir.parseconfig("-p", "resultlog", "--resultlog=resultlog")
+
+ assert not hasattr(config, "_resultlog")
+ pytest_configure(config)
+ assert hasattr(config, "_resultlog")
+ pytest_unconfigure(config)
+ assert not hasattr(config, "_resultlog")
+
+ config.slaveinput = {}
+ pytest_configure(config)
+ assert not hasattr(config, "_resultlog")
+ pytest_unconfigure(config)
+ assert not hasattr(config, "_resultlog")
+
+
+def test_failure_issue380(testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ class MyCollector(pytest.File):
+ def collect(self):
+ raise ValueError()
+ def repr_failure(self, excinfo):
+ return "somestring"
+ def pytest_collect_file(path, parent):
+ return MyCollector(parent=parent, fspath=path)
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_func():
+ pass
+ """
+ )
+ result = testdir.runpytest("--resultlog=log")
+ assert result.ret == 2
diff --git a/third_party/python/pytest/testing/test_runner.py b/third_party/python/pytest/testing/test_runner.py
new file mode 100644
index 0000000000..f5430a90da
--- /dev/null
+++ b/third_party/python/pytest/testing/test_runner.py
@@ -0,0 +1,951 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import, division, print_function
+
+import _pytest._code
+import inspect
+import os
+import py
+import pytest
+import sys
+import types
+from _pytest import runner, main, outcomes
+
+
+class TestSetupState(object):
+
+ def test_setup(self, testdir):
+ ss = runner.SetupState()
+ item = testdir.getitem("def test_func(): pass")
+ values = [1]
+ ss.prepare(item)
+ ss.addfinalizer(values.pop, colitem=item)
+ assert values
+ ss._pop_and_teardown()
+ assert not values
+
+ def test_teardown_exact_stack_empty(self, testdir):
+ item = testdir.getitem("def test_func(): pass")
+ ss = runner.SetupState()
+ ss.teardown_exact(item, None)
+ ss.teardown_exact(item, None)
+ ss.teardown_exact(item, None)
+
+ def test_setup_fails_and_failure_is_cached(self, testdir):
+ item = testdir.getitem(
+ """
+ def setup_module(mod):
+ raise ValueError(42)
+ def test_func(): pass
+ """
+ )
+ ss = runner.SetupState()
+ pytest.raises(ValueError, lambda: ss.prepare(item))
+ pytest.raises(ValueError, lambda: ss.prepare(item))
+
+ def test_teardown_multiple_one_fails(self, testdir):
+ r = []
+
+ def fin1():
+ r.append("fin1")
+
+ def fin2():
+ raise Exception("oops")
+
+ def fin3():
+ r.append("fin3")
+
+ item = testdir.getitem("def test_func(): pass")
+ ss = runner.SetupState()
+ ss.addfinalizer(fin1, item)
+ ss.addfinalizer(fin2, item)
+ ss.addfinalizer(fin3, item)
+ with pytest.raises(Exception) as err:
+ ss._callfinalizers(item)
+ assert err.value.args == ("oops",)
+ assert r == ["fin3", "fin1"]
+
+ def test_teardown_multiple_fail(self, testdir):
+ # Ensure the first exception is the one which is re-raised.
+ # Ideally both would be reported however.
+ def fin1():
+ raise Exception("oops1")
+
+ def fin2():
+ raise Exception("oops2")
+
+ item = testdir.getitem("def test_func(): pass")
+ ss = runner.SetupState()
+ ss.addfinalizer(fin1, item)
+ ss.addfinalizer(fin2, item)
+ with pytest.raises(Exception) as err:
+ ss._callfinalizers(item)
+ assert err.value.args == ("oops2",)
+
+ def test_teardown_multiple_scopes_one_fails(self, testdir):
+ module_teardown = []
+
+ def fin_func():
+ raise Exception("oops1")
+
+ def fin_module():
+ module_teardown.append("fin_module")
+
+ item = testdir.getitem("def test_func(): pass")
+ ss = runner.SetupState()
+ ss.addfinalizer(fin_module, item.listchain()[-2])
+ ss.addfinalizer(fin_func, item)
+ ss.prepare(item)
+ with pytest.raises(Exception, match="oops1"):
+ ss.teardown_exact(item, None)
+ assert module_teardown
+
+
+class BaseFunctionalTests(object):
+
+ def test_passfunction(self, testdir):
+ reports = testdir.runitem(
+ """
+ def test_func():
+ pass
+ """
+ )
+ rep = reports[1]
+ assert rep.passed
+ assert not rep.failed
+ assert rep.outcome == "passed"
+ assert not rep.longrepr
+
+ def test_failfunction(self, testdir):
+ reports = testdir.runitem(
+ """
+ def test_func():
+ assert 0
+ """
+ )
+ rep = reports[1]
+ assert not rep.passed
+ assert not rep.skipped
+ assert rep.failed
+ assert rep.when == "call"
+ assert rep.outcome == "failed"
+ # assert isinstance(rep.longrepr, ReprExceptionInfo)
+
+ def test_skipfunction(self, testdir):
+ reports = testdir.runitem(
+ """
+ import pytest
+ def test_func():
+ pytest.skip("hello")
+ """
+ )
+ rep = reports[1]
+ assert not rep.failed
+ assert not rep.passed
+ assert rep.skipped
+ assert rep.outcome == "skipped"
+ # assert rep.skipped.when == "call"
+ # assert rep.skipped.when == "call"
+ # assert rep.skipped == "%sreason == "hello"
+ # assert rep.skipped.location.lineno == 3
+ # assert rep.skipped.location.path
+ # assert not rep.skipped.failurerepr
+
+ def test_skip_in_setup_function(self, testdir):
+ reports = testdir.runitem(
+ """
+ import pytest
+ def setup_function(func):
+ pytest.skip("hello")
+ def test_func():
+ pass
+ """
+ )
+ print(reports)
+ rep = reports[0]
+ assert not rep.failed
+ assert not rep.passed
+ assert rep.skipped
+ # assert rep.skipped.reason == "hello"
+ # assert rep.skipped.location.lineno == 3
+ # assert rep.skipped.location.lineno == 3
+ assert len(reports) == 2
+ assert reports[1].passed # teardown
+
+ def test_failure_in_setup_function(self, testdir):
+ reports = testdir.runitem(
+ """
+ import pytest
+ def setup_function(func):
+ raise ValueError(42)
+ def test_func():
+ pass
+ """
+ )
+ rep = reports[0]
+ assert not rep.skipped
+ assert not rep.passed
+ assert rep.failed
+ assert rep.when == "setup"
+ assert len(reports) == 2
+
+ def test_failure_in_teardown_function(self, testdir):
+ reports = testdir.runitem(
+ """
+ import pytest
+ def teardown_function(func):
+ raise ValueError(42)
+ def test_func():
+ pass
+ """
+ )
+ print(reports)
+ assert len(reports) == 3
+ rep = reports[2]
+ assert not rep.skipped
+ assert not rep.passed
+ assert rep.failed
+ assert rep.when == "teardown"
+ # assert rep.longrepr.reprcrash.lineno == 3
+ # assert rep.longrepr.reprtraceback.reprentries
+
+ def test_custom_failure_repr(self, testdir):
+ testdir.makepyfile(
+ conftest="""
+ import pytest
+ class Function(pytest.Function):
+ def repr_failure(self, excinfo):
+ return "hello"
+ """
+ )
+ reports = testdir.runitem(
+ """
+ import pytest
+ def test_func():
+ assert 0
+ """
+ )
+ rep = reports[1]
+ assert not rep.skipped
+ assert not rep.passed
+ assert rep.failed
+ # assert rep.outcome.when == "call"
+ # assert rep.failed.where.lineno == 3
+ # assert rep.failed.where.path.basename == "test_func.py"
+ # assert rep.failed.failurerepr == "hello"
+
+ def test_teardown_final_returncode(self, testdir):
+ rec = testdir.inline_runsource(
+ """
+ def test_func():
+ pass
+ def teardown_function(func):
+ raise ValueError(42)
+ """
+ )
+ assert rec.ret == 1
+
+ def test_logstart_logfinish_hooks(self, testdir):
+ rec = testdir.inline_runsource(
+ """
+ import pytest
+ def test_func():
+ pass
+ """
+ )
+ reps = rec.getcalls("pytest_runtest_logstart pytest_runtest_logfinish")
+ assert (
+ [x._name for x in reps]
+ == ["pytest_runtest_logstart", "pytest_runtest_logfinish"]
+ )
+ for rep in reps:
+ assert rep.nodeid == "test_logstart_logfinish_hooks.py::test_func"
+ assert rep.location == ("test_logstart_logfinish_hooks.py", 1, "test_func")
+
+ def test_exact_teardown_issue90(self, testdir):
+ rec = testdir.inline_runsource(
+ """
+ import pytest
+
+ class TestClass(object):
+ def test_method(self):
+ pass
+ def teardown_class(cls):
+ raise Exception()
+
+ def test_func():
+ import sys
+ # on python2 exc_info is keept till a function exits
+ # so we would end up calling test functions while
+ # sys.exc_info would return the indexerror
+ # from guessing the lastitem
+ excinfo = sys.exc_info()
+ import traceback
+ assert excinfo[0] is None, \
+ traceback.format_exception(*excinfo)
+ def teardown_function(func):
+ raise ValueError(42)
+ """
+ )
+ reps = rec.getreports("pytest_runtest_logreport")
+ print(reps)
+ for i in range(2):
+ assert reps[i].nodeid.endswith("test_method")
+ assert reps[i].passed
+ assert reps[2].when == "teardown"
+ assert reps[2].failed
+ assert len(reps) == 6
+ for i in range(3, 5):
+ assert reps[i].nodeid.endswith("test_func")
+ assert reps[i].passed
+ assert reps[5].when == "teardown"
+ assert reps[5].nodeid.endswith("test_func")
+ assert reps[5].failed
+
+ def test_exact_teardown_issue1206(self, testdir):
+ """issue shadowing error with wrong number of arguments on teardown_method."""
+ rec = testdir.inline_runsource(
+ """
+ import pytest
+
+ class TestClass(object):
+ def teardown_method(self, x, y, z):
+ pass
+
+ def test_method(self):
+ assert True
+ """
+ )
+ reps = rec.getreports("pytest_runtest_logreport")
+ print(reps)
+ assert len(reps) == 3
+ #
+ assert reps[0].nodeid.endswith("test_method")
+ assert reps[0].passed
+ assert reps[0].when == "setup"
+ #
+ assert reps[1].nodeid.endswith("test_method")
+ assert reps[1].passed
+ assert reps[1].when == "call"
+ #
+ assert reps[2].nodeid.endswith("test_method")
+ assert reps[2].failed
+ assert reps[2].when == "teardown"
+ assert reps[2].longrepr.reprcrash.message in (
+ # python3 error
+ "TypeError: teardown_method() missing 2 required positional arguments: 'y' and 'z'",
+ # python2 error
+ "TypeError: teardown_method() takes exactly 4 arguments (2 given)",
+ )
+
+ def test_failure_in_setup_function_ignores_custom_repr(self, testdir):
+ testdir.makepyfile(
+ conftest="""
+ import pytest
+ class Function(pytest.Function):
+ def repr_failure(self, excinfo):
+ assert 0
+ """
+ )
+ reports = testdir.runitem(
+ """
+ def setup_function(func):
+ raise ValueError(42)
+ def test_func():
+ pass
+ """
+ )
+ assert len(reports) == 2
+ rep = reports[0]
+ print(rep)
+ assert not rep.skipped
+ assert not rep.passed
+ assert rep.failed
+ # assert rep.outcome.when == "setup"
+ # assert rep.outcome.where.lineno == 3
+ # assert rep.outcome.where.path.basename == "test_func.py"
+ # assert instanace(rep.failed.failurerepr, PythonFailureRepr)
+
+ def test_systemexit_does_not_bail_out(self, testdir):
+ try:
+ reports = testdir.runitem(
+ """
+ def test_func():
+ raise SystemExit(42)
+ """
+ )
+ except SystemExit:
+ pytest.fail("runner did not catch SystemExit")
+ rep = reports[1]
+ assert rep.failed
+ assert rep.when == "call"
+
+ def test_exit_propagates(self, testdir):
+ try:
+ testdir.runitem(
+ """
+ import pytest
+ def test_func():
+ raise pytest.exit.Exception()
+ """
+ )
+ except pytest.exit.Exception:
+ pass
+ else:
+ pytest.fail("did not raise")
+
+
+class TestExecutionNonForked(BaseFunctionalTests):
+
+ def getrunner(self):
+
+ def f(item):
+ return runner.runtestprotocol(item, log=False)
+
+ return f
+
+ def test_keyboardinterrupt_propagates(self, testdir):
+ try:
+ testdir.runitem(
+ """
+ def test_func():
+ raise KeyboardInterrupt("fake")
+ """
+ )
+ except KeyboardInterrupt:
+ pass
+ else:
+ pytest.fail("did not raise")
+
+
+class TestExecutionForked(BaseFunctionalTests):
+ pytestmark = pytest.mark.skipif("not hasattr(os, 'fork')")
+
+ def getrunner(self):
+ # XXX re-arrange this test to live in pytest-xdist
+ boxed = pytest.importorskip("xdist.boxed")
+ return boxed.forked_run_report
+
+ def test_suicide(self, testdir):
+ reports = testdir.runitem(
+ """
+ def test_func():
+ import os
+ os.kill(os.getpid(), 15)
+ """
+ )
+ rep = reports[0]
+ assert rep.failed
+ assert rep.when == "???"
+
+
+class TestSessionReports(object):
+
+ def test_collect_result(self, testdir):
+ col = testdir.getmodulecol(
+ """
+ def test_func1():
+ pass
+ class TestClass(object):
+ pass
+ """
+ )
+ rep = runner.collect_one_node(col)
+ assert not rep.failed
+ assert not rep.skipped
+ assert rep.passed
+ locinfo = rep.location
+ assert locinfo[0] == col.fspath.basename
+ assert not locinfo[1]
+ assert locinfo[2] == col.fspath.basename
+ res = rep.result
+ assert len(res) == 2
+ assert res[0].name == "test_func1"
+ assert res[1].name == "TestClass"
+
+
+reporttypes = [
+ runner.BaseReport,
+ runner.TestReport,
+ runner.TeardownErrorReport,
+ runner.CollectReport,
+]
+
+
+@pytest.mark.parametrize(
+ "reporttype", reporttypes, ids=[x.__name__ for x in reporttypes]
+)
+def test_report_extra_parameters(reporttype):
+ if hasattr(inspect, "signature"):
+ args = list(inspect.signature(reporttype.__init__).parameters.keys())[1:]
+ else:
+ args = inspect.getargspec(reporttype.__init__)[0][1:]
+ basekw = dict.fromkeys(args, [])
+ report = reporttype(newthing=1, **basekw)
+ assert report.newthing == 1
+
+
+def test_callinfo():
+ ci = runner.CallInfo(lambda: 0, "123")
+ assert ci.when == "123"
+ assert ci.result == 0
+ assert "result" in repr(ci)
+ ci = runner.CallInfo(lambda: 0 / 0, "123")
+ assert ci.when == "123"
+ assert not hasattr(ci, "result")
+ assert ci.excinfo
+ assert "exc" in repr(ci)
+
+
+# design question: do we want general hooks in python files?
+# then something like the following functional tests makes sense
+
+
+@pytest.mark.xfail
+def test_runtest_in_module_ordering(testdir):
+ p1 = testdir.makepyfile(
+ """
+ import pytest
+ def pytest_runtest_setup(item): # runs after class-level!
+ item.function.mylist.append("module")
+ class TestClass(object):
+ def pytest_runtest_setup(self, item):
+ assert not hasattr(item.function, 'mylist')
+ item.function.mylist = ['class']
+ @pytest.fixture
+ def mylist(self, request):
+ return request.function.mylist
+ def pytest_runtest_call(self, item, __multicall__):
+ try:
+ __multicall__.execute()
+ except ValueError:
+ pass
+ def test_hello1(self, mylist):
+ assert mylist == ['class', 'module'], mylist
+ raise ValueError()
+ def test_hello2(self, mylist):
+ assert mylist == ['class', 'module'], mylist
+ def pytest_runtest_teardown(item):
+ del item.function.mylist
+ """
+ )
+ result = testdir.runpytest(p1)
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+
+def test_outcomeexception_exceptionattributes():
+ outcome = outcomes.OutcomeException("test")
+ assert outcome.args[0] == outcome.msg
+
+
+def test_outcomeexception_passes_except_Exception():
+ with pytest.raises(outcomes.OutcomeException):
+ try:
+ raise outcomes.OutcomeException("test")
+ except Exception:
+ pass
+
+
+def test_pytest_exit():
+ try:
+ pytest.exit("hello")
+ except pytest.exit.Exception:
+ excinfo = _pytest._code.ExceptionInfo()
+ assert excinfo.errisinstance(KeyboardInterrupt)
+
+
+def test_pytest_fail():
+ try:
+ pytest.fail("hello")
+ except pytest.fail.Exception:
+ excinfo = _pytest._code.ExceptionInfo()
+ s = excinfo.exconly(tryshort=True)
+ assert s.startswith("Failed")
+
+
+def test_pytest_exit_msg(testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+
+ def pytest_configure(config):
+ pytest.exit('oh noes')
+ """
+ )
+ result = testdir.runpytest()
+ result.stderr.fnmatch_lines(["Exit: oh noes"])
+
+
+def test_pytest_fail_notrace(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_hello():
+ pytest.fail("hello", pytrace=False)
+ def teardown_function(function):
+ pytest.fail("world", pytrace=False)
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["world", "hello"])
+ assert "def teardown_function" not in result.stdout.str()
+
+
+@pytest.mark.parametrize("str_prefix", ["u", ""])
+def test_pytest_fail_notrace_non_ascii(testdir, str_prefix):
+ """Fix pytest.fail with pytrace=False with non-ascii characters (#1178).
+
+ This tests with native and unicode strings containing non-ascii chars.
+ """
+ testdir.makepyfile(
+ u"""
+ # coding: utf-8
+ import pytest
+
+ def test_hello():
+ pytest.fail(%s'oh oh: ☺', pytrace=False)
+ """
+ % str_prefix
+ )
+ result = testdir.runpytest()
+ if sys.version_info[0] >= 3:
+ result.stdout.fnmatch_lines(["*test_hello*", "oh oh: ☺"])
+ else:
+ result.stdout.fnmatch_lines(["*test_hello*", "oh oh: *"])
+ assert "def test_hello" not in result.stdout.str()
+
+
+def test_pytest_no_tests_collected_exit_status(testdir):
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("*collected 0 items*")
+ assert result.ret == main.EXIT_NOTESTSCOLLECTED
+
+ testdir.makepyfile(
+ test_foo="""
+ def test_foo():
+ assert 1
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("*collected 1 item*")
+ result.stdout.fnmatch_lines("*1 passed*")
+ assert result.ret == main.EXIT_OK
+
+ result = testdir.runpytest("-k nonmatch")
+ result.stdout.fnmatch_lines("*collected 1 item*")
+ result.stdout.fnmatch_lines("*1 deselected*")
+ assert result.ret == main.EXIT_NOTESTSCOLLECTED
+
+
+def test_exception_printing_skip():
+ try:
+ pytest.skip("hello")
+ except pytest.skip.Exception:
+ excinfo = _pytest._code.ExceptionInfo()
+ s = excinfo.exconly(tryshort=True)
+ assert s.startswith("Skipped")
+
+
+def test_importorskip(monkeypatch):
+ importorskip = pytest.importorskip
+
+ def f():
+ importorskip("asdlkj")
+
+ try:
+ sysmod = importorskip("sys")
+ assert sysmod is sys
+ # path = pytest.importorskip("os.path")
+ # assert path == os.path
+ excinfo = pytest.raises(pytest.skip.Exception, f)
+ path = py.path.local(excinfo.getrepr().reprcrash.path)
+ # check that importorskip reports the actual call
+ # in this test the test_runner.py file
+ assert path.purebasename == "test_runner"
+ pytest.raises(SyntaxError, "pytest.importorskip('x y z')")
+ pytest.raises(SyntaxError, "pytest.importorskip('x=y')")
+ mod = types.ModuleType("hello123")
+ mod.__version__ = "1.3"
+ monkeypatch.setitem(sys.modules, "hello123", mod)
+ pytest.raises(
+ pytest.skip.Exception,
+ """
+ pytest.importorskip("hello123", minversion="1.3.1")
+ """,
+ )
+ mod2 = pytest.importorskip("hello123", minversion="1.3")
+ assert mod2 == mod
+ except pytest.skip.Exception:
+ print(_pytest._code.ExceptionInfo())
+ pytest.fail("spurious skip")
+
+
+def test_importorskip_imports_last_module_part():
+ ospath = pytest.importorskip("os.path")
+ assert os.path == ospath
+
+
+def test_importorskip_dev_module(monkeypatch):
+ try:
+ mod = types.ModuleType("mockmodule")
+ mod.__version__ = "0.13.0.dev-43290"
+ monkeypatch.setitem(sys.modules, "mockmodule", mod)
+ mod2 = pytest.importorskip("mockmodule", minversion="0.12.0")
+ assert mod2 == mod
+ pytest.raises(
+ pytest.skip.Exception,
+ """
+ pytest.importorskip('mockmodule1', minversion='0.14.0')""",
+ )
+ except pytest.skip.Exception:
+ print(_pytest._code.ExceptionInfo())
+ pytest.fail("spurious skip")
+
+
+def test_importorskip_module_level(testdir):
+ """importorskip must be able to skip entire modules when used at module level"""
+ testdir.makepyfile(
+ """
+ import pytest
+ foobarbaz = pytest.importorskip("foobarbaz")
+
+ def test_foo():
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*collected 0 items / 1 skipped*"])
+
+
+def test_pytest_cmdline_main(testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ def test_hello():
+ assert 1
+ if __name__ == '__main__':
+ pytest.cmdline.main([__file__])
+ """
+ )
+ import subprocess
+
+ popen = subprocess.Popen([sys.executable, str(p)], stdout=subprocess.PIPE)
+ popen.communicate()
+ ret = popen.wait()
+ assert ret == 0
+
+
+def test_unicode_in_longrepr(testdir):
+ testdir.makeconftest(
+ """
+ # -*- coding: utf-8 -*-
+ import pytest
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_makereport():
+ outcome = yield
+ rep = outcome.get_result()
+ if rep.when == "call":
+ rep.longrepr = u'ä'
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_out():
+ assert 0
+ """
+ )
+ result = testdir.runpytest()
+ assert result.ret == 1
+ assert "UnicodeEncodeError" not in result.stderr.str()
+
+
+def test_failure_in_setup(testdir):
+ testdir.makepyfile(
+ """
+ def setup_module():
+ 0/0
+ def test_func():
+ pass
+ """
+ )
+ result = testdir.runpytest("--tb=line")
+ assert "def setup_module" not in result.stdout.str()
+
+
+def test_makereport_getsource(testdir):
+ testdir.makepyfile(
+ """
+ def test_foo():
+ if False: pass
+ else: assert False
+ """
+ )
+ result = testdir.runpytest()
+ assert "INTERNALERROR" not in result.stdout.str()
+ result.stdout.fnmatch_lines(["*else: assert False*"])
+
+
+def test_makereport_getsource_dynamic_code(testdir, monkeypatch):
+ """Test that exception in dynamically generated code doesn't break getting the source line."""
+ import inspect
+
+ original_findsource = inspect.findsource
+
+ def findsource(obj, *args, **kwargs):
+ # Can be triggered by dynamically created functions
+ if obj.__name__ == "foo":
+ raise IndexError()
+ return original_findsource(obj, *args, **kwargs)
+
+ monkeypatch.setattr(inspect, "findsource", findsource)
+
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def foo(missing):
+ pass
+
+ def test_fix(foo):
+ assert False
+ """
+ )
+ result = testdir.runpytest("-vv")
+ assert "INTERNALERROR" not in result.stdout.str()
+ result.stdout.fnmatch_lines(["*test_fix*", "*fixture*'missing'*not found*"])
+
+
+def test_store_except_info_on_error():
+ """ Test that upon test failure, the exception info is stored on
+ sys.last_traceback and friends.
+ """
+ # Simulate item that might raise a specific exception, depending on `raise_error` class var
+ class ItemMightRaise(object):
+ nodeid = "item_that_raises"
+ raise_error = True
+
+ def runtest(self):
+ if self.raise_error:
+ raise IndexError("TEST")
+
+ try:
+ runner.pytest_runtest_call(ItemMightRaise())
+ except IndexError:
+ pass
+ # Check that exception info is stored on sys
+ assert sys.last_type is IndexError
+ assert sys.last_value.args[0] == "TEST"
+ assert sys.last_traceback
+
+ # The next run should clear the exception info stored by the previous run
+ ItemMightRaise.raise_error = False
+ runner.pytest_runtest_call(ItemMightRaise())
+ assert sys.last_type is None
+ assert sys.last_value is None
+ assert sys.last_traceback is None
+
+
+def test_current_test_env_var(testdir, monkeypatch):
+ pytest_current_test_vars = []
+ monkeypatch.setattr(
+ sys, "pytest_current_test_vars", pytest_current_test_vars, raising=False
+ )
+ testdir.makepyfile(
+ """
+ import pytest
+ import sys
+ import os
+
+ @pytest.fixture
+ def fix():
+ sys.pytest_current_test_vars.append(('setup', os.environ['PYTEST_CURRENT_TEST']))
+ yield
+ sys.pytest_current_test_vars.append(('teardown', os.environ['PYTEST_CURRENT_TEST']))
+
+ def test(fix):
+ sys.pytest_current_test_vars.append(('call', os.environ['PYTEST_CURRENT_TEST']))
+ """
+ )
+ result = testdir.runpytest_inprocess()
+ assert result.ret == 0
+ test_id = "test_current_test_env_var.py::test"
+ assert (
+ pytest_current_test_vars
+ == [
+ ("setup", test_id + " (setup)"),
+ ("call", test_id + " (call)"),
+ ("teardown", test_id + " (teardown)"),
+ ]
+ )
+ assert "PYTEST_CURRENT_TEST" not in os.environ
+
+
+class TestReportContents(object):
+ """
+ Test user-level API of ``TestReport`` objects.
+ """
+
+ def getrunner(self):
+ return lambda item: runner.runtestprotocol(item, log=False)
+
+ def test_longreprtext_pass(self, testdir):
+ reports = testdir.runitem(
+ """
+ def test_func():
+ pass
+ """
+ )
+ rep = reports[1]
+ assert rep.longreprtext == ""
+
+ def test_longreprtext_failure(self, testdir):
+ reports = testdir.runitem(
+ """
+ def test_func():
+ x = 1
+ assert x == 4
+ """
+ )
+ rep = reports[1]
+ assert "assert 1 == 4" in rep.longreprtext
+
+ def test_captured_text(self, testdir):
+ reports = testdir.runitem(
+ """
+ import pytest
+ import sys
+
+ @pytest.fixture
+ def fix():
+ sys.stdout.write('setup: stdout\\n')
+ sys.stderr.write('setup: stderr\\n')
+ yield
+ sys.stdout.write('teardown: stdout\\n')
+ sys.stderr.write('teardown: stderr\\n')
+ assert 0
+
+ def test_func(fix):
+ sys.stdout.write('call: stdout\\n')
+ sys.stderr.write('call: stderr\\n')
+ assert 0
+ """
+ )
+ setup, call, teardown = reports
+ assert setup.capstdout == "setup: stdout\n"
+ assert call.capstdout == "setup: stdout\ncall: stdout\n"
+ assert teardown.capstdout == "setup: stdout\ncall: stdout\nteardown: stdout\n"
+
+ assert setup.capstderr == "setup: stderr\n"
+ assert call.capstderr == "setup: stderr\ncall: stderr\n"
+ assert teardown.capstderr == "setup: stderr\ncall: stderr\nteardown: stderr\n"
+
+ def test_no_captured_text(self, testdir):
+ reports = testdir.runitem(
+ """
+ def test_func():
+ pass
+ """
+ )
+ rep = reports[1]
+ assert rep.capstdout == ""
+ assert rep.capstderr == ""
diff --git a/third_party/python/pytest/testing/test_runner_xunit.py b/third_party/python/pytest/testing/test_runner_xunit.py
new file mode 100644
index 0000000000..8316aafbf5
--- /dev/null
+++ b/third_party/python/pytest/testing/test_runner_xunit.py
@@ -0,0 +1,352 @@
+"""
+ test correct setup/teardowns at
+ module, class, and instance level
+"""
+from __future__ import absolute_import, division, print_function
+import pytest
+
+
+def test_module_and_function_setup(testdir):
+ reprec = testdir.inline_runsource(
+ """
+ modlevel = []
+ def setup_module(module):
+ assert not modlevel
+ module.modlevel.append(42)
+
+ def teardown_module(module):
+ modlevel.pop()
+
+ def setup_function(function):
+ function.answer = 17
+
+ def teardown_function(function):
+ del function.answer
+
+ def test_modlevel():
+ assert modlevel[0] == 42
+ assert test_modlevel.answer == 17
+
+ class TestFromClass(object):
+ def test_module(self):
+ assert modlevel[0] == 42
+ assert not hasattr(test_modlevel, 'answer')
+ """
+ )
+ rep = reprec.matchreport("test_modlevel")
+ assert rep.passed
+ rep = reprec.matchreport("test_module")
+ assert rep.passed
+
+
+def test_module_setup_failure_no_teardown(testdir):
+ reprec = testdir.inline_runsource(
+ """
+ values = []
+ def setup_module(module):
+ values.append(1)
+ 0/0
+
+ def test_nothing():
+ pass
+
+ def teardown_module(module):
+ values.append(2)
+ """
+ )
+ reprec.assertoutcome(failed=1)
+ calls = reprec.getcalls("pytest_runtest_setup")
+ assert calls[0].item.module.values == [1]
+
+
+def test_setup_function_failure_no_teardown(testdir):
+ reprec = testdir.inline_runsource(
+ """
+ modlevel = []
+ def setup_function(function):
+ modlevel.append(1)
+ 0/0
+
+ def teardown_function(module):
+ modlevel.append(2)
+
+ def test_func():
+ pass
+ """
+ )
+ calls = reprec.getcalls("pytest_runtest_setup")
+ assert calls[0].item.module.modlevel == [1]
+
+
+def test_class_setup(testdir):
+ reprec = testdir.inline_runsource(
+ """
+ class TestSimpleClassSetup(object):
+ clslevel = []
+ def setup_class(cls):
+ cls.clslevel.append(23)
+
+ def teardown_class(cls):
+ cls.clslevel.pop()
+
+ def test_classlevel(self):
+ assert self.clslevel[0] == 23
+
+ class TestInheritedClassSetupStillWorks(TestSimpleClassSetup):
+ def test_classlevel_anothertime(self):
+ assert self.clslevel == [23]
+
+ def test_cleanup():
+ assert not TestSimpleClassSetup.clslevel
+ assert not TestInheritedClassSetupStillWorks.clslevel
+ """
+ )
+ reprec.assertoutcome(passed=1 + 2 + 1)
+
+
+def test_class_setup_failure_no_teardown(testdir):
+ reprec = testdir.inline_runsource(
+ """
+ class TestSimpleClassSetup(object):
+ clslevel = []
+ def setup_class(cls):
+ 0/0
+
+ def teardown_class(cls):
+ cls.clslevel.append(1)
+
+ def test_classlevel(self):
+ pass
+
+ def test_cleanup():
+ assert not TestSimpleClassSetup.clslevel
+ """
+ )
+ reprec.assertoutcome(failed=1, passed=1)
+
+
+def test_method_setup(testdir):
+ reprec = testdir.inline_runsource(
+ """
+ class TestSetupMethod(object):
+ def setup_method(self, meth):
+ self.methsetup = meth
+ def teardown_method(self, meth):
+ del self.methsetup
+
+ def test_some(self):
+ assert self.methsetup == self.test_some
+
+ def test_other(self):
+ assert self.methsetup == self.test_other
+ """
+ )
+ reprec.assertoutcome(passed=2)
+
+
+def test_method_setup_failure_no_teardown(testdir):
+ reprec = testdir.inline_runsource(
+ """
+ class TestMethodSetup(object):
+ clslevel = []
+ def setup_method(self, method):
+ self.clslevel.append(1)
+ 0/0
+
+ def teardown_method(self, method):
+ self.clslevel.append(2)
+
+ def test_method(self):
+ pass
+
+ def test_cleanup():
+ assert TestMethodSetup.clslevel == [1]
+ """
+ )
+ reprec.assertoutcome(failed=1, passed=1)
+
+
+def test_method_generator_setup(testdir):
+ reprec = testdir.inline_runsource(
+ """
+ class TestSetupTeardownOnInstance(object):
+ def setup_class(cls):
+ cls.classsetup = True
+
+ def setup_method(self, method):
+ self.methsetup = method
+
+ def test_generate(self):
+ assert self.classsetup
+ assert self.methsetup == self.test_generate
+ yield self.generated, 5
+ yield self.generated, 2
+
+ def generated(self, value):
+ assert self.classsetup
+ assert self.methsetup == self.test_generate
+ assert value == 5
+ """
+ )
+ reprec.assertoutcome(passed=1, failed=1)
+
+
+def test_func_generator_setup(testdir):
+ reprec = testdir.inline_runsource(
+ """
+ import sys
+
+ def setup_module(mod):
+ print ("setup_module")
+ mod.x = []
+
+ def setup_function(fun):
+ print ("setup_function")
+ x.append(1)
+
+ def teardown_function(fun):
+ print ("teardown_function")
+ x.pop()
+
+ def test_one():
+ assert x == [1]
+ def check():
+ print ("check")
+ sys.stderr.write("e\\n")
+ assert x == [1]
+ yield check
+ assert x == [1]
+ """
+ )
+ rep = reprec.matchreport("test_one", names="pytest_runtest_logreport")
+ assert rep.passed
+
+
+def test_method_setup_uses_fresh_instances(testdir):
+ reprec = testdir.inline_runsource(
+ """
+ class TestSelfState1(object):
+ memory = []
+ def test_hello(self):
+ self.memory.append(self)
+
+ def test_afterhello(self):
+ assert self != self.memory[0]
+ """
+ )
+ reprec.assertoutcome(passed=2, failed=0)
+
+
+def test_setup_that_skips_calledagain(testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ def setup_module(mod):
+ pytest.skip("x")
+ def test_function1():
+ pass
+ def test_function2():
+ pass
+ """
+ )
+ reprec = testdir.inline_run(p)
+ reprec.assertoutcome(skipped=2)
+
+
+def test_setup_fails_again_on_all_tests(testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ def setup_module(mod):
+ raise ValueError(42)
+ def test_function1():
+ pass
+ def test_function2():
+ pass
+ """
+ )
+ reprec = testdir.inline_run(p)
+ reprec.assertoutcome(failed=2)
+
+
+def test_setup_funcarg_setup_when_outer_scope_fails(testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ def setup_module(mod):
+ raise ValueError(42)
+ @pytest.fixture
+ def hello(request):
+ raise ValueError("xyz43")
+ def test_function1(hello):
+ pass
+ def test_function2(hello):
+ pass
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ "*function1*",
+ "*ValueError*42*",
+ "*function2*",
+ "*ValueError*42*",
+ "*2 error*",
+ ]
+ )
+ assert "xyz43" not in result.stdout.str()
+
+
+@pytest.mark.parametrize("arg", ["", "arg"])
+def test_setup_teardown_function_level_with_optional_argument(
+ testdir, monkeypatch, arg
+):
+ """parameter to setup/teardown xunit-style functions parameter is now optional (#1728)."""
+ import sys
+
+ trace_setups_teardowns = []
+ monkeypatch.setattr(
+ sys, "trace_setups_teardowns", trace_setups_teardowns, raising=False
+ )
+ p = testdir.makepyfile(
+ """
+ import pytest
+ import sys
+
+ trace = sys.trace_setups_teardowns.append
+
+ def setup_module({arg}): trace('setup_module')
+ def teardown_module({arg}): trace('teardown_module')
+
+ def setup_function({arg}): trace('setup_function')
+ def teardown_function({arg}): trace('teardown_function')
+
+ def test_function_1(): pass
+ def test_function_2(): pass
+
+ class Test(object):
+ def setup_method(self, {arg}): trace('setup_method')
+ def teardown_method(self, {arg}): trace('teardown_method')
+
+ def test_method_1(self): pass
+ def test_method_2(self): pass
+ """.format(
+ arg=arg
+ )
+ )
+ result = testdir.inline_run(p)
+ result.assertoutcome(passed=4)
+
+ expected = [
+ "setup_module",
+ "setup_function",
+ "teardown_function",
+ "setup_function",
+ "teardown_function",
+ "setup_method",
+ "teardown_method",
+ "setup_method",
+ "teardown_method",
+ "teardown_module",
+ ]
+ assert trace_setups_teardowns == expected
diff --git a/third_party/python/pytest/testing/test_session.py b/third_party/python/pytest/testing/test_session.py
new file mode 100644
index 0000000000..4a594009ba
--- /dev/null
+++ b/third_party/python/pytest/testing/test_session.py
@@ -0,0 +1,343 @@
+from __future__ import absolute_import, division, print_function
+
+import pytest
+
+from _pytest.main import EXIT_NOTESTSCOLLECTED
+
+
+class SessionTests(object):
+
+ def test_basic_testitem_events(self, testdir):
+ tfile = testdir.makepyfile(
+ """
+ def test_one():
+ pass
+ def test_one_one():
+ assert 0
+ def test_other():
+ raise ValueError(23)
+ class TestClass(object):
+ def test_two(self, someargs):
+ pass
+ """
+ )
+ reprec = testdir.inline_run(tfile)
+ passed, skipped, failed = reprec.listoutcomes()
+ assert len(skipped) == 0
+ assert len(passed) == 1
+ assert len(failed) == 3
+
+ def end(x):
+ return x.nodeid.split("::")[-1]
+
+ assert end(failed[0]) == "test_one_one"
+ assert end(failed[1]) == "test_other"
+ itemstarted = reprec.getcalls("pytest_itemcollected")
+ assert len(itemstarted) == 4
+ # XXX check for failing funcarg setup
+ # colreports = reprec.getcalls("pytest_collectreport")
+ # assert len(colreports) == 4
+ # assert colreports[1].report.failed
+
+ def test_nested_import_error(self, testdir):
+ tfile = testdir.makepyfile(
+ """
+ import import_fails
+ def test_this():
+ assert import_fails.a == 1
+ """,
+ import_fails="""
+ import does_not_work
+ a = 1
+ """,
+ )
+ reprec = testdir.inline_run(tfile)
+ values = reprec.getfailedcollections()
+ assert len(values) == 1
+ out = str(values[0].longrepr)
+ assert out.find("does_not_work") != -1
+
+ def test_raises_output(self, testdir):
+ reprec = testdir.inline_runsource(
+ """
+ import pytest
+ def test_raises_doesnt():
+ pytest.raises(ValueError, int, "3")
+ """
+ )
+ passed, skipped, failed = reprec.listoutcomes()
+ assert len(failed) == 1
+ out = failed[0].longrepr.reprcrash.message
+ if not out.find("DID NOT RAISE") != -1:
+ print(out)
+ pytest.fail("incorrect raises() output")
+
+ def test_generator_yields_None(self, testdir):
+ reprec = testdir.inline_runsource(
+ """
+ def test_1():
+ yield None
+ """
+ )
+ failures = reprec.getfailedcollections()
+ out = failures[0].longrepr.reprcrash.message
+ i = out.find("TypeError")
+ assert i != -1
+
+ def test_syntax_error_module(self, testdir):
+ reprec = testdir.inline_runsource("this is really not python")
+ values = reprec.getfailedcollections()
+ assert len(values) == 1
+ out = str(values[0].longrepr)
+ assert out.find(str("not python")) != -1
+
+ def test_exit_first_problem(self, testdir):
+ reprec = testdir.inline_runsource(
+ """
+ def test_one(): assert 0
+ def test_two(): assert 0
+ """,
+ "--exitfirst",
+ )
+ passed, skipped, failed = reprec.countoutcomes()
+ assert failed == 1
+ assert passed == skipped == 0
+
+ def test_maxfail(self, testdir):
+ reprec = testdir.inline_runsource(
+ """
+ def test_one(): assert 0
+ def test_two(): assert 0
+ def test_three(): assert 0
+ """,
+ "--maxfail=2",
+ )
+ passed, skipped, failed = reprec.countoutcomes()
+ assert failed == 2
+ assert passed == skipped == 0
+
+ def test_broken_repr(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ class BrokenRepr1(object):
+ foo=0
+ def __repr__(self):
+ raise Exception("Ha Ha fooled you, I'm a broken repr().")
+
+ class TestBrokenClass(object):
+ def test_explicit_bad_repr(self):
+ t = BrokenRepr1()
+ pytest.raises(Exception, 'repr(t)')
+
+ def test_implicit_bad_repr1(self):
+ t = BrokenRepr1()
+ assert t.foo == 1
+
+ """
+ )
+ reprec = testdir.inline_run(p)
+ passed, skipped, failed = reprec.listoutcomes()
+ assert len(failed) == 1
+ out = failed[0].longrepr.reprcrash.message
+ assert (
+ out.find(
+ """[Exception("Ha Ha fooled you, I'm a broken repr().") raised in repr()]"""
+ )
+ != -1
+ ) # '
+
+ def test_skip_file_by_conftest(self, testdir):
+ testdir.makepyfile(
+ conftest="""
+ import pytest
+ def pytest_collect_file():
+ pytest.skip("intentional")
+ """,
+ test_file="""
+ def test_one(): pass
+ """,
+ )
+ try:
+ reprec = testdir.inline_run(testdir.tmpdir)
+ except pytest.skip.Exception:
+ pytest.fail("wrong skipped caught")
+ reports = reprec.getreports("pytest_collectreport")
+ assert len(reports) == 1
+ assert reports[0].skipped
+
+
+class TestNewSession(SessionTests):
+
+ def test_order_of_execution(self, testdir):
+ reprec = testdir.inline_runsource(
+ """
+ values = []
+ def test_1():
+ values.append(1)
+ def test_2():
+ values.append(2)
+ def test_3():
+ assert values == [1,2]
+ class Testmygroup(object):
+ reslist = values
+ def test_1(self):
+ self.reslist.append(1)
+ def test_2(self):
+ self.reslist.append(2)
+ def test_3(self):
+ self.reslist.append(3)
+ def test_4(self):
+ assert self.reslist == [1,2,1,2,3]
+ """
+ )
+ passed, skipped, failed = reprec.countoutcomes()
+ assert failed == skipped == 0
+ assert passed == 7
+ # also test listnames() here ...
+
+ def test_collect_only_with_various_situations(self, testdir):
+ p = testdir.makepyfile(
+ test_one="""
+ def test_one():
+ raise ValueError()
+
+ class TestX(object):
+ def test_method_one(self):
+ pass
+
+ class TestY(TestX):
+ pass
+ """,
+ test_three="xxxdsadsadsadsa",
+ __init__="",
+ )
+ reprec = testdir.inline_run("--collect-only", p.dirpath())
+
+ itemstarted = reprec.getcalls("pytest_itemcollected")
+ assert len(itemstarted) == 3
+ assert not reprec.getreports("pytest_runtest_logreport")
+ started = reprec.getcalls("pytest_collectstart")
+ finished = reprec.getreports("pytest_collectreport")
+ assert len(started) == len(finished)
+ assert len(started) == 7 # XXX extra TopCollector
+ colfail = [x for x in finished if x.failed]
+ assert len(colfail) == 1
+
+ def test_minus_x_import_error(self, testdir):
+ testdir.makepyfile(__init__="")
+ testdir.makepyfile(test_one="xxxx", test_two="yyyy")
+ reprec = testdir.inline_run("-x", testdir.tmpdir)
+ finished = reprec.getreports("pytest_collectreport")
+ colfail = [x for x in finished if x.failed]
+ assert len(colfail) == 1
+
+ def test_minus_x_overridden_by_maxfail(self, testdir):
+ testdir.makepyfile(__init__="")
+ testdir.makepyfile(test_one="xxxx", test_two="yyyy", test_third="zzz")
+ reprec = testdir.inline_run("-x", "--maxfail=2", testdir.tmpdir)
+ finished = reprec.getreports("pytest_collectreport")
+ colfail = [x for x in finished if x.failed]
+ assert len(colfail) == 2
+
+
+def test_plugin_specify(testdir):
+ pytest.raises(
+ ImportError,
+ """
+ testdir.parseconfig("-p", "nqweotexistent")
+ """,
+ )
+ # pytest.raises(ImportError,
+ # "config.do_configure(config)"
+ # )
+
+
+def test_plugin_already_exists(testdir):
+ config = testdir.parseconfig("-p", "terminal")
+ assert config.option.plugins == ["terminal"]
+ config._do_configure()
+ config._ensure_unconfigure()
+
+
+def test_exclude(testdir):
+ hellodir = testdir.mkdir("hello")
+ hellodir.join("test_hello.py").write("x y syntaxerror")
+ hello2dir = testdir.mkdir("hello2")
+ hello2dir.join("test_hello2.py").write("x y syntaxerror")
+ testdir.makepyfile(test_ok="def test_pass(): pass")
+ result = testdir.runpytest("--ignore=hello", "--ignore=hello2")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_deselect(testdir):
+ testdir.makepyfile(
+ test_a="""
+ import pytest
+ def test_a1(): pass
+ @pytest.mark.parametrize('b', range(3))
+ def test_a2(b): pass
+ """
+ )
+ result = testdir.runpytest(
+ "-v", "--deselect=test_a.py::test_a2[1]", "--deselect=test_a.py::test_a2[2]"
+ )
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*2 passed, 2 deselected*"])
+ for line in result.stdout.lines:
+ assert not line.startswith(("test_a.py::test_a2[1]", "test_a.py::test_a2[2]"))
+
+
+def test_sessionfinish_with_start(testdir):
+ testdir.makeconftest(
+ """
+ import os
+ values = []
+ def pytest_sessionstart():
+ values.append(os.getcwd())
+ os.chdir("..")
+
+ def pytest_sessionfinish():
+ assert values[0] == os.getcwd()
+
+ """
+ )
+ res = testdir.runpytest("--collect-only")
+ assert res.ret == EXIT_NOTESTSCOLLECTED
+
+
+@pytest.mark.parametrize("path", ["root", "{relative}/root", "{environment}/root"])
+def test_rootdir_option_arg(testdir, monkeypatch, path):
+ monkeypatch.setenv("PY_ROOTDIR_PATH", str(testdir.tmpdir))
+ path = path.format(relative=str(testdir.tmpdir), environment="$PY_ROOTDIR_PATH")
+
+ rootdir = testdir.mkdir("root")
+ rootdir.mkdir("tests")
+ testdir.makepyfile(
+ """
+ import os
+ def test_one():
+ assert 1
+ """
+ )
+
+ result = testdir.runpytest("--rootdir={}".format(path))
+ result.stdout.fnmatch_lines(
+ ["*rootdir: {}/root, inifile:*".format(testdir.tmpdir), "*1 passed*"]
+ )
+
+
+def test_rootdir_wrong_option_arg(testdir):
+ testdir.makepyfile(
+ """
+ import os
+ def test_one():
+ assert 1
+ """
+ )
+
+ result = testdir.runpytest("--rootdir=wrong_dir")
+ result.stderr.fnmatch_lines(
+ ["*Directory *wrong_dir* not found. Check your '--rootdir' option.*"]
+ )
diff --git a/third_party/python/pytest/testing/test_skipping.py b/third_party/python/pytest/testing/test_skipping.py
new file mode 100644
index 0000000000..5d970e2fea
--- /dev/null
+++ b/third_party/python/pytest/testing/test_skipping.py
@@ -0,0 +1,1194 @@
+from __future__ import absolute_import, division, print_function
+import pytest
+import sys
+
+from _pytest.skipping import MarkEvaluator, folded_skips, pytest_runtest_setup
+from _pytest.runner import runtestprotocol
+
+
+class TestEvaluator(object):
+
+ def test_no_marker(self, testdir):
+ item = testdir.getitem("def test_func(): pass")
+ evalskipif = MarkEvaluator(item, "skipif")
+ assert not evalskipif
+ assert not evalskipif.istrue()
+
+ def test_marked_no_args(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+ @pytest.mark.xyz
+ def test_func():
+ pass
+ """
+ )
+ ev = MarkEvaluator(item, "xyz")
+ assert ev
+ assert ev.istrue()
+ expl = ev.getexplanation()
+ assert expl == ""
+ assert not ev.get("run", False)
+
+ def test_marked_one_arg(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+ @pytest.mark.xyz("hasattr(os, 'sep')")
+ def test_func():
+ pass
+ """
+ )
+ ev = MarkEvaluator(item, "xyz")
+ assert ev
+ assert ev.istrue()
+ expl = ev.getexplanation()
+ assert expl == "condition: hasattr(os, 'sep')"
+
+ @pytest.mark.skipif("sys.version_info[0] >= 3")
+ def test_marked_one_arg_unicode(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+ @pytest.mark.xyz(u"hasattr(os, 'sep')")
+ def test_func():
+ pass
+ """
+ )
+ ev = MarkEvaluator(item, "xyz")
+ assert ev
+ assert ev.istrue()
+ expl = ev.getexplanation()
+ assert expl == "condition: hasattr(os, 'sep')"
+
+ def test_marked_one_arg_with_reason(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+ @pytest.mark.xyz("hasattr(os, 'sep')", attr=2, reason="hello world")
+ def test_func():
+ pass
+ """
+ )
+ ev = MarkEvaluator(item, "xyz")
+ assert ev
+ assert ev.istrue()
+ expl = ev.getexplanation()
+ assert expl == "hello world"
+ assert ev.get("attr") == 2
+
+ def test_marked_one_arg_twice(self, testdir):
+ lines = [
+ """@pytest.mark.skipif("not hasattr(os, 'murks')")""",
+ """@pytest.mark.skipif("hasattr(os, 'murks')")""",
+ ]
+ for i in range(0, 2):
+ item = testdir.getitem(
+ """
+ import pytest
+ %s
+ %s
+ def test_func():
+ pass
+ """
+ % (lines[i], lines[(i + 1) % 2])
+ )
+ ev = MarkEvaluator(item, "skipif")
+ assert ev
+ assert ev.istrue()
+ expl = ev.getexplanation()
+ assert expl == "condition: not hasattr(os, 'murks')"
+
+ def test_marked_one_arg_twice2(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+ @pytest.mark.skipif("hasattr(os, 'murks')")
+ @pytest.mark.skipif("not hasattr(os, 'murks')")
+ def test_func():
+ pass
+ """
+ )
+ ev = MarkEvaluator(item, "skipif")
+ assert ev
+ assert ev.istrue()
+ expl = ev.getexplanation()
+ assert expl == "condition: not hasattr(os, 'murks')"
+
+ def test_marked_skip_with_not_string(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+ @pytest.mark.skipif(False)
+ def test_func():
+ pass
+ """
+ )
+ ev = MarkEvaluator(item, "skipif")
+ exc = pytest.raises(pytest.fail.Exception, ev.istrue)
+ assert """Failed: you need to specify reason=STRING when using booleans as conditions.""" in exc.value.msg
+
+ def test_skipif_class(self, testdir):
+ item, = testdir.getitems(
+ """
+ import pytest
+ class TestClass(object):
+ pytestmark = pytest.mark.skipif("config._hackxyz")
+ def test_func(self):
+ pass
+ """
+ )
+ item.config._hackxyz = 3
+ ev = MarkEvaluator(item, "skipif")
+ assert ev.istrue()
+ expl = ev.getexplanation()
+ assert expl == "condition: config._hackxyz"
+
+
+class TestXFail(object):
+
+ @pytest.mark.parametrize("strict", [True, False])
+ def test_xfail_simple(self, testdir, strict):
+ item = testdir.getitem(
+ """
+ import pytest
+ @pytest.mark.xfail(strict=%s)
+ def test_func():
+ assert 0
+ """
+ % strict
+ )
+ reports = runtestprotocol(item, log=False)
+ assert len(reports) == 3
+ callreport = reports[1]
+ assert callreport.skipped
+ assert callreport.wasxfail == ""
+
+ def test_xfail_xpassed(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+ @pytest.mark.xfail(reason="this is an xfail")
+ def test_func():
+ assert 1
+ """
+ )
+ reports = runtestprotocol(item, log=False)
+ assert len(reports) == 3
+ callreport = reports[1]
+ assert callreport.passed
+ assert callreport.wasxfail == "this is an xfail"
+
+ def test_xfail_using_platform(self, testdir):
+ """
+ Verify that platform can be used with xfail statements.
+ """
+ item = testdir.getitem(
+ """
+ import pytest
+ @pytest.mark.xfail("platform.platform() == platform.platform()")
+ def test_func():
+ assert 0
+ """
+ )
+ reports = runtestprotocol(item, log=False)
+ assert len(reports) == 3
+ callreport = reports[1]
+ assert callreport.wasxfail
+
+ def test_xfail_xpassed_strict(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+ @pytest.mark.xfail(strict=True, reason="nope")
+ def test_func():
+ assert 1
+ """
+ )
+ reports = runtestprotocol(item, log=False)
+ assert len(reports) == 3
+ callreport = reports[1]
+ assert callreport.failed
+ assert callreport.longrepr == "[XPASS(strict)] nope"
+ assert not hasattr(callreport, "wasxfail")
+
+ def test_xfail_run_anyway(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail
+ def test_func():
+ assert 0
+ def test_func2():
+ pytest.xfail("hello")
+ """
+ )
+ result = testdir.runpytest("--runxfail")
+ result.stdout.fnmatch_lines(
+ ["*def test_func():*", "*assert 0*", "*1 failed*1 pass*"]
+ )
+
+ def test_xfail_evalfalse_but_fails(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+ @pytest.mark.xfail('False')
+ def test_func():
+ assert 0
+ """
+ )
+ reports = runtestprotocol(item, log=False)
+ callreport = reports[1]
+ assert callreport.failed
+ assert not hasattr(callreport, "wasxfail")
+ assert "xfail" in callreport.keywords
+
+ def test_xfail_not_report_default(self, testdir):
+ p = testdir.makepyfile(
+ test_one="""
+ import pytest
+ @pytest.mark.xfail
+ def test_this():
+ assert 0
+ """
+ )
+ testdir.runpytest(p, "-v")
+ # result.stdout.fnmatch_lines([
+ # "*HINT*use*-r*"
+ # ])
+
+ def test_xfail_not_run_xfail_reporting(self, testdir):
+ p = testdir.makepyfile(
+ test_one="""
+ import pytest
+ @pytest.mark.xfail(run=False, reason="noway")
+ def test_this():
+ assert 0
+ @pytest.mark.xfail("True", run=False)
+ def test_this_true():
+ assert 0
+ @pytest.mark.xfail("False", run=False, reason="huh")
+ def test_this_false():
+ assert 1
+ """
+ )
+ result = testdir.runpytest(p, "-rx")
+ result.stdout.fnmatch_lines(
+ [
+ "*test_one*test_this*",
+ "*NOTRUN*noway",
+ "*test_one*test_this_true*",
+ "*NOTRUN*condition:*True*",
+ "*1 passed*",
+ ]
+ )
+
+ def test_xfail_not_run_no_setup_run(self, testdir):
+ p = testdir.makepyfile(
+ test_one="""
+ import pytest
+ @pytest.mark.xfail(run=False, reason="hello")
+ def test_this():
+ assert 0
+ def setup_module(mod):
+ raise ValueError(42)
+ """
+ )
+ result = testdir.runpytest(p, "-rx")
+ result.stdout.fnmatch_lines(
+ ["*test_one*test_this*", "*NOTRUN*hello", "*1 xfailed*"]
+ )
+
+ def test_xfail_xpass(self, testdir):
+ p = testdir.makepyfile(
+ test_one="""
+ import pytest
+ @pytest.mark.xfail
+ def test_that():
+ assert 1
+ """
+ )
+ result = testdir.runpytest(p, "-rX")
+ result.stdout.fnmatch_lines(["*XPASS*test_that*", "*1 xpassed*"])
+ assert result.ret == 0
+
+ def test_xfail_imperative(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ def test_this():
+ pytest.xfail("hello")
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(["*1 xfailed*"])
+ result = testdir.runpytest(p, "-rx")
+ result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*reason:*hello*"])
+ result = testdir.runpytest(p, "--runxfail")
+ result.stdout.fnmatch_lines("*1 pass*")
+
+ def test_xfail_imperative_in_setup_function(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ def setup_function(function):
+ pytest.xfail("hello")
+
+ def test_this():
+ assert 0
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(["*1 xfailed*"])
+ result = testdir.runpytest(p, "-rx")
+ result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*reason:*hello*"])
+ result = testdir.runpytest(p, "--runxfail")
+ result.stdout.fnmatch_lines(
+ """
+ *def test_this*
+ *1 fail*
+ """
+ )
+
+ def xtest_dynamic_xfail_set_during_setup(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ def setup_function(function):
+ pytest.mark.xfail(function)
+ def test_this():
+ assert 0
+ def test_that():
+ assert 1
+ """
+ )
+ result = testdir.runpytest(p, "-rxX")
+ result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*XPASS*test_that*"])
+
+ def test_dynamic_xfail_no_run(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def arg(request):
+ request.applymarker(pytest.mark.xfail(run=False))
+ def test_this(arg):
+ assert 0
+ """
+ )
+ result = testdir.runpytest(p, "-rxX")
+ result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*NOTRUN*"])
+
+ def test_dynamic_xfail_set_during_funcarg_setup(self, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def arg(request):
+ request.applymarker(pytest.mark.xfail)
+ def test_this2(arg):
+ assert 0
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(["*1 xfailed*"])
+
+ @pytest.mark.parametrize(
+ "expected, actual, matchline",
+ [
+ ("TypeError", "TypeError", "*1 xfailed*"),
+ ("(AttributeError, TypeError)", "TypeError", "*1 xfailed*"),
+ ("TypeError", "IndexError", "*1 failed*"),
+ ("(AttributeError, TypeError)", "IndexError", "*1 failed*"),
+ ],
+ )
+ def test_xfail_raises(self, expected, actual, matchline, testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail(raises=%s)
+ def test_raises():
+ raise %s()
+ """
+ % (expected, actual)
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines([matchline])
+
+ def test_strict_sanity(self, testdir):
+ """sanity check for xfail(strict=True): a failing test should behave
+ exactly like a normal xfail.
+ """
+ p = testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail(reason='unsupported feature', strict=True)
+ def test_foo():
+ assert 0
+ """
+ )
+ result = testdir.runpytest(p, "-rxX")
+ result.stdout.fnmatch_lines(["*XFAIL*", "*unsupported feature*"])
+ assert result.ret == 0
+
+ @pytest.mark.parametrize("strict", [True, False])
+ def test_strict_xfail(self, testdir, strict):
+ p = testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.xfail(reason='unsupported feature', strict=%s)
+ def test_foo():
+ with open('foo_executed', 'w'): pass # make sure test executes
+ """
+ % strict
+ )
+ result = testdir.runpytest(p, "-rxX")
+ if strict:
+ result.stdout.fnmatch_lines(
+ ["*test_foo*", "*XPASS(strict)*unsupported feature*"]
+ )
+ else:
+ result.stdout.fnmatch_lines(
+ [
+ "*test_strict_xfail*",
+ "XPASS test_strict_xfail.py::test_foo unsupported feature",
+ ]
+ )
+ assert result.ret == (1 if strict else 0)
+ assert testdir.tmpdir.join("foo_executed").isfile()
+
+ @pytest.mark.parametrize("strict", [True, False])
+ def test_strict_xfail_condition(self, testdir, strict):
+ p = testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.xfail(False, reason='unsupported feature', strict=%s)
+ def test_foo():
+ pass
+ """
+ % strict
+ )
+ result = testdir.runpytest(p, "-rxX")
+ result.stdout.fnmatch_lines("*1 passed*")
+ assert result.ret == 0
+
+ @pytest.mark.parametrize("strict", [True, False])
+ def test_xfail_condition_keyword(self, testdir, strict):
+ p = testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.xfail(condition=False, reason='unsupported feature', strict=%s)
+ def test_foo():
+ pass
+ """
+ % strict
+ )
+ result = testdir.runpytest(p, "-rxX")
+ result.stdout.fnmatch_lines("*1 passed*")
+ assert result.ret == 0
+
+ @pytest.mark.parametrize("strict_val", ["true", "false"])
+ def test_strict_xfail_default_from_file(self, testdir, strict_val):
+ testdir.makeini(
+ """
+ [pytest]
+ xfail_strict = %s
+ """
+ % strict_val
+ )
+ p = testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail(reason='unsupported feature')
+ def test_foo():
+ pass
+ """
+ )
+ result = testdir.runpytest(p, "-rxX")
+ strict = strict_val == "true"
+ result.stdout.fnmatch_lines("*1 failed*" if strict else "*1 xpassed*")
+ assert result.ret == (1 if strict else 0)
+
+
+class TestXFailwithSetupTeardown(object):
+
+ def test_failing_setup_issue9(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ def setup_function(func):
+ assert 0
+
+ @pytest.mark.xfail
+ def test_func():
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*1 xfail*"])
+
+ def test_failing_teardown_issue9(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ def teardown_function(func):
+ assert 0
+
+ @pytest.mark.xfail
+ def test_func():
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*1 xfail*"])
+
+
+class TestSkip(object):
+
+ def test_skip_class(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip
+ class TestSomething(object):
+ def test_foo(self):
+ pass
+ def test_bar(self):
+ pass
+
+ def test_baz():
+ pass
+ """
+ )
+ rec = testdir.inline_run()
+ rec.assertoutcome(skipped=2, passed=1)
+
+ def test_skips_on_false_string(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip('False')
+ def test_foo():
+ pass
+ """
+ )
+ rec = testdir.inline_run()
+ rec.assertoutcome(skipped=1)
+
+ def test_arg_as_reason(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip('testing stuff')
+ def test_bar():
+ pass
+ """
+ )
+ result = testdir.runpytest("-rs")
+ result.stdout.fnmatch_lines(["*testing stuff*", "*1 skipped*"])
+
+ def test_skip_no_reason(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip
+ def test_foo():
+ pass
+ """
+ )
+ result = testdir.runpytest("-rs")
+ result.stdout.fnmatch_lines(["*unconditional skip*", "*1 skipped*"])
+
+ def test_skip_with_reason(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip(reason="for lolz")
+ def test_bar():
+ pass
+ """
+ )
+ result = testdir.runpytest("-rs")
+ result.stdout.fnmatch_lines(["*for lolz*", "*1 skipped*"])
+
+ def test_only_skips_marked_test(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip
+ def test_foo():
+ pass
+ @pytest.mark.skip(reason="nothing in particular")
+ def test_bar():
+ pass
+ def test_baz():
+ assert True
+ """
+ )
+ result = testdir.runpytest("-rs")
+ result.stdout.fnmatch_lines(["*nothing in particular*", "*1 passed*2 skipped*"])
+
+ def test_strict_and_skip(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip
+ def test_hello():
+ pass
+ """
+ )
+ result = testdir.runpytest("-rs")
+ result.stdout.fnmatch_lines(["*unconditional skip*", "*1 skipped*"])
+
+
+class TestSkipif(object):
+
+ def test_skipif_conditional(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+ @pytest.mark.skipif("hasattr(os, 'sep')")
+ def test_func():
+ pass
+ """
+ )
+ x = pytest.raises(pytest.skip.Exception, lambda: pytest_runtest_setup(item))
+ assert x.value.msg == "condition: hasattr(os, 'sep')"
+
+ @pytest.mark.parametrize(
+ "params", ["\"hasattr(sys, 'platform')\"", 'True, reason="invalid platform"']
+ )
+ def test_skipif_reporting(self, testdir, params):
+ p = testdir.makepyfile(
+ test_foo="""
+ import pytest
+ @pytest.mark.skipif(%(params)s)
+ def test_that():
+ assert 0
+ """
+ % dict(params=params)
+ )
+ result = testdir.runpytest(p, "-s", "-rs")
+ result.stdout.fnmatch_lines(["*SKIP*1*test_foo.py*platform*", "*1 skipped*"])
+ assert result.ret == 0
+
+ def test_skipif_using_platform(self, testdir):
+ item = testdir.getitem(
+ """
+ import pytest
+ @pytest.mark.skipif("platform.platform() == platform.platform()")
+ def test_func():
+ pass
+ """
+ )
+ pytest.raises(pytest.skip.Exception, lambda: pytest_runtest_setup(item))
+
+ @pytest.mark.parametrize(
+ "marker, msg1, msg2",
+ [("skipif", "SKIP", "skipped"), ("xfail", "XPASS", "xpassed")],
+ )
+ def test_skipif_reporting_multiple(self, testdir, marker, msg1, msg2):
+ testdir.makepyfile(
+ test_foo="""
+ import pytest
+ @pytest.mark.{marker}(False, reason='first_condition')
+ @pytest.mark.{marker}(True, reason='second_condition')
+ def test_foobar():
+ assert 1
+ """.format(
+ marker=marker
+ )
+ )
+ result = testdir.runpytest("-s", "-rsxX")
+ result.stdout.fnmatch_lines(
+ [
+ "*{msg1}*test_foo.py*second_condition*".format(msg1=msg1),
+ "*1 {msg2}*".format(msg2=msg2),
+ ]
+ )
+ assert result.ret == 0
+
+
+def test_skip_not_report_default(testdir):
+ p = testdir.makepyfile(
+ test_one="""
+ import pytest
+ def test_this():
+ pytest.skip("hello")
+ """
+ )
+ result = testdir.runpytest(p, "-v")
+ result.stdout.fnmatch_lines(
+ [
+ # "*HINT*use*-r*",
+ "*1 skipped*"
+ ]
+ )
+
+
+def test_skipif_class(testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+
+ class TestClass(object):
+ pytestmark = pytest.mark.skipif("True")
+ def test_that(self):
+ assert 0
+ def test_though(self):
+ assert 0
+ """
+ )
+ result = testdir.runpytest(p)
+ result.stdout.fnmatch_lines(["*2 skipped*"])
+
+
+def test_skip_reasons_folding():
+ path = "xyz"
+ lineno = 3
+ message = "justso"
+ longrepr = (path, lineno, message)
+
+ class X(object):
+ pass
+
+ ev1 = X()
+ ev1.when = "execute"
+ ev1.skipped = True
+ ev1.longrepr = longrepr
+
+ ev2 = X()
+ ev2.when = "execute"
+ ev2.longrepr = longrepr
+ ev2.skipped = True
+
+ # ev3 might be a collection report
+ ev3 = X()
+ ev3.longrepr = longrepr
+ ev3.skipped = True
+
+ values = folded_skips([ev1, ev2, ev3])
+ assert len(values) == 1
+ num, fspath, lineno, reason = values[0]
+ assert num == 3
+ assert fspath == path
+ assert lineno == lineno
+ assert reason == message
+
+
+def test_skipped_reasons_functional(testdir):
+ testdir.makepyfile(
+ test_one="""
+ from conftest import doskip
+ def setup_function(func):
+ doskip()
+ def test_func():
+ pass
+ class TestClass(object):
+ def test_method(self):
+ doskip()
+ """,
+ conftest="""
+ import pytest
+ def doskip():
+ pytest.skip('test')
+ """,
+ )
+ result = testdir.runpytest("-rs")
+ result.stdout.fnmatch_lines(["*SKIP*2*conftest.py:4: test"])
+ assert result.ret == 0
+
+
+def test_skipped_folding(testdir):
+ testdir.makepyfile(
+ test_one="""
+ import pytest
+ pytestmark = pytest.mark.skip("Folding")
+ def setup_function(func):
+ pass
+ def test_func():
+ pass
+ class TestClass(object):
+ def test_method(self):
+ pass
+ """
+ )
+ result = testdir.runpytest("-rs")
+ result.stdout.fnmatch_lines(["*SKIP*2*test_one.py: Folding"])
+ assert result.ret == 0
+
+
+def test_reportchars(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_1():
+ assert 0
+ @pytest.mark.xfail
+ def test_2():
+ assert 0
+ @pytest.mark.xfail
+ def test_3():
+ pass
+ def test_4():
+ pytest.skip("four")
+ """
+ )
+ result = testdir.runpytest("-rfxXs")
+ result.stdout.fnmatch_lines(
+ ["FAIL*test_1*", "XFAIL*test_2*", "XPASS*test_3*", "SKIP*four*"]
+ )
+
+
+def test_reportchars_error(testdir):
+ testdir.makepyfile(
+ conftest="""
+ def pytest_runtest_teardown():
+ assert 0
+ """,
+ test_simple="""
+ def test_foo():
+ pass
+ """,
+ )
+ result = testdir.runpytest("-rE")
+ result.stdout.fnmatch_lines(["ERROR*test_foo*"])
+
+
+def test_reportchars_all(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_1():
+ assert 0
+ @pytest.mark.xfail
+ def test_2():
+ assert 0
+ @pytest.mark.xfail
+ def test_3():
+ pass
+ def test_4():
+ pytest.skip("four")
+ """
+ )
+ result = testdir.runpytest("-ra")
+ result.stdout.fnmatch_lines(
+ ["FAIL*test_1*", "SKIP*four*", "XFAIL*test_2*", "XPASS*test_3*"]
+ )
+
+
+def test_reportchars_all_error(testdir):
+ testdir.makepyfile(
+ conftest="""
+ def pytest_runtest_teardown():
+ assert 0
+ """,
+ test_simple="""
+ def test_foo():
+ pass
+ """,
+ )
+ result = testdir.runpytest("-ra")
+ result.stdout.fnmatch_lines(["ERROR*test_foo*"])
+
+
+@pytest.mark.xfail("hasattr(sys, 'pypy_version_info')")
+def test_errors_in_xfail_skip_expressions(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skipif("asd")
+ def test_nameerror():
+ pass
+ @pytest.mark.xfail("syntax error")
+ def test_syntax():
+ pass
+
+ def test_func():
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ markline = " ^"
+ if sys.platform.startswith("java"):
+ # XXX report this to java
+ markline = "*" + markline[8:]
+ result.stdout.fnmatch_lines(
+ [
+ "*ERROR*test_nameerror*",
+ "*evaluating*skipif*expression*",
+ "*asd*",
+ "*ERROR*test_syntax*",
+ "*evaluating*xfail*expression*",
+ " syntax error",
+ markline,
+ "SyntaxError: invalid syntax",
+ "*1 pass*2 error*",
+ ]
+ )
+
+
+def test_xfail_skipif_with_globals(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ x = 3
+ @pytest.mark.skipif("x == 3")
+ def test_skip1():
+ pass
+ @pytest.mark.xfail("x == 3")
+ def test_boolean():
+ assert 0
+ """
+ )
+ result = testdir.runpytest("-rsx")
+ result.stdout.fnmatch_lines(["*SKIP*x == 3*", "*XFAIL*test_boolean*", "*x == 3*"])
+
+
+def test_direct_gives_error(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skipif(True)
+ def test_skip1():
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*1 error*"])
+
+
+def test_default_markers(testdir):
+ result = testdir.runpytest("--markers")
+ result.stdout.fnmatch_lines(
+ [
+ "*skipif(*condition)*skip*",
+ "*xfail(*condition, reason=None, run=True, raises=None, strict=False)*expected failure*",
+ ]
+ )
+
+
+def test_xfail_test_setup_exception(testdir):
+ testdir.makeconftest(
+ """
+ def pytest_runtest_setup():
+ 0 / 0
+ """
+ )
+ p = testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail
+ def test_func():
+ assert 0
+ """
+ )
+ result = testdir.runpytest(p)
+ assert result.ret == 0
+ assert "xfailed" in result.stdout.str()
+ assert "xpassed" not in result.stdout.str()
+
+
+def test_imperativeskip_on_xfail_test(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail
+ def test_that_fails():
+ assert 0
+
+ @pytest.mark.skipif("True")
+ def test_hello():
+ pass
+ """
+ )
+ testdir.makeconftest(
+ """
+ import pytest
+ def pytest_runtest_setup(item):
+ pytest.skip("abc")
+ """
+ )
+ result = testdir.runpytest("-rsxX")
+ result.stdout.fnmatch_lines_random(
+ """
+ *SKIP*abc*
+ *SKIP*condition: True*
+ *2 skipped*
+ """
+ )
+
+
+class TestBooleanCondition(object):
+
+ def test_skipif(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skipif(True, reason="True123")
+ def test_func1():
+ pass
+ @pytest.mark.skipif(False, reason="True123")
+ def test_func2():
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ *1 passed*1 skipped*
+ """
+ )
+
+ def test_skipif_noreason(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skipif(True)
+ def test_func():
+ pass
+ """
+ )
+ result = testdir.runpytest("-rs")
+ result.stdout.fnmatch_lines(
+ """
+ *1 error*
+ """
+ )
+
+ def test_xfail(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail(True, reason="True123")
+ def test_func():
+ assert 0
+ """
+ )
+ result = testdir.runpytest("-rxs")
+ result.stdout.fnmatch_lines(
+ """
+ *XFAIL*
+ *True123*
+ *1 xfail*
+ """
+ )
+
+
+def test_xfail_item(testdir):
+ # Ensure pytest.xfail works with non-Python Item
+ testdir.makeconftest(
+ """
+ import pytest
+
+ class MyItem(pytest.Item):
+ nodeid = 'foo'
+ def runtest(self):
+ pytest.xfail("Expected Failure")
+
+ def pytest_collect_file(path, parent):
+ return MyItem("foo", parent)
+ """
+ )
+ result = testdir.inline_run()
+ passed, skipped, failed = result.listoutcomes()
+ assert not failed
+ xfailed = [r for r in skipped if hasattr(r, "wasxfail")]
+ assert xfailed
+
+
+def test_module_level_skip_error(testdir):
+ """
+ Verify that using pytest.skip at module level causes a collection error
+ """
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.skip
+ def test_func():
+ assert True
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("*Using pytest.skip outside of a test is not allowed*")
+
+
+def test_module_level_skip_with_allow_module_level(testdir):
+ """
+ Verify that using pytest.skip(allow_module_level=True) is allowed
+ """
+ testdir.makepyfile(
+ """
+ import pytest
+ pytest.skip("skip_module_level", allow_module_level=True)
+
+ def test_func():
+ assert 0
+ """
+ )
+ result = testdir.runpytest("-rxs")
+ result.stdout.fnmatch_lines("*SKIP*skip_module_level")
+
+
+def test_invalid_skip_keyword_parameter(testdir):
+ """
+ Verify that using pytest.skip() with unknown parameter raises an error
+ """
+ testdir.makepyfile(
+ """
+ import pytest
+ pytest.skip("skip_module_level", unknown=1)
+
+ def test_func():
+ assert 0
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("*TypeError:*['unknown']*")
+
+
+def test_mark_xfail_item(testdir):
+ # Ensure pytest.mark.xfail works with non-Python Item
+ testdir.makeconftest(
+ """
+ import pytest
+
+ class MyItem(pytest.Item):
+ nodeid = 'foo'
+ def setup(self):
+ marker = pytest.mark.xfail(True, reason="Expected failure")
+ self.add_marker(marker)
+ def runtest(self):
+ assert False
+
+ def pytest_collect_file(path, parent):
+ return MyItem("foo", parent)
+ """
+ )
+ result = testdir.inline_run()
+ passed, skipped, failed = result.listoutcomes()
+ assert not failed
+ xfailed = [r for r in skipped if hasattr(r, "wasxfail")]
+ assert xfailed
+
+
+def test_summary_list_after_errors(testdir):
+ """Ensure the list of errors/fails/xfails/skips appears after tracebacks in terminal reporting."""
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_fail():
+ assert 0
+ """
+ )
+ result = testdir.runpytest("-ra")
+ result.stdout.fnmatch_lines(
+ [
+ "=* FAILURES *=",
+ "*= short test summary info =*",
+ "FAIL test_summary_list_after_errors.py::test_fail",
+ ]
+ )
diff --git a/third_party/python/pytest/testing/test_terminal.py b/third_party/python/pytest/testing/test_terminal.py
new file mode 100644
index 0000000000..8f08ad34fa
--- /dev/null
+++ b/third_party/python/pytest/testing/test_terminal.py
@@ -0,0 +1,1267 @@
+"""
+terminal reporting of the full testing process.
+"""
+from __future__ import absolute_import, division, print_function
+import collections
+import sys
+
+import pluggy
+import _pytest._code
+import py
+import pytest
+from _pytest.main import EXIT_NOTESTSCOLLECTED
+from _pytest.terminal import TerminalReporter, repr_pythonversion, getreportopt
+from _pytest.terminal import build_summary_stats_line, _plugin_nameversions
+
+
+DistInfo = collections.namedtuple("DistInfo", ["project_name", "version"])
+
+
+class Option(object):
+
+ def __init__(self, verbose=False, fulltrace=False):
+ self.verbose = verbose
+ self.fulltrace = fulltrace
+
+ @property
+ def args(self):
+ values = []
+ if self.verbose:
+ values.append("-v")
+ if self.fulltrace:
+ values.append("--fulltrace")
+ return values
+
+
+@pytest.fixture(
+ params=[
+ Option(verbose=False),
+ Option(verbose=True),
+ Option(verbose=-1),
+ Option(fulltrace=True),
+ ],
+ ids=["default", "verbose", "quiet", "fulltrace"],
+)
+def option(request):
+ return request.param
+
+
+@pytest.mark.parametrize(
+ "input,expected",
+ [
+ ([DistInfo(project_name="test", version=1)], ["test-1"]),
+ ([DistInfo(project_name="pytest-test", version=1)], ["test-1"]),
+ (
+ [
+ DistInfo(project_name="test", version=1),
+ DistInfo(project_name="test", version=1),
+ ],
+ ["test-1"],
+ ),
+ ],
+ ids=["normal", "prefix-strip", "deduplicate"],
+)
+def test_plugin_nameversion(input, expected):
+ pluginlist = [(None, x) for x in input]
+ result = _plugin_nameversions(pluginlist)
+ assert result == expected
+
+
+class TestTerminal(object):
+
+ def test_pass_skip_fail(self, testdir, option):
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_ok():
+ pass
+ def test_skip():
+ pytest.skip("xx")
+ def test_func():
+ assert 0
+ """
+ )
+ result = testdir.runpytest(*option.args)
+ if option.verbose:
+ result.stdout.fnmatch_lines(
+ [
+ "*test_pass_skip_fail.py::test_ok PASS*",
+ "*test_pass_skip_fail.py::test_skip SKIP*",
+ "*test_pass_skip_fail.py::test_func FAIL*",
+ ]
+ )
+ else:
+ result.stdout.fnmatch_lines(["*test_pass_skip_fail.py .sF*"])
+ result.stdout.fnmatch_lines(
+ [" def test_func():", "> assert 0", "E assert 0"]
+ )
+
+ def test_internalerror(self, testdir, linecomp):
+ modcol = testdir.getmodulecol("def test_one(): pass")
+ rep = TerminalReporter(modcol.config, file=linecomp.stringio)
+ excinfo = pytest.raises(ValueError, "raise ValueError('hello')")
+ rep.pytest_internalerror(excinfo.getrepr())
+ linecomp.assert_contains_lines(["INTERNALERROR> *ValueError*hello*"])
+
+ def test_writeline(self, testdir, linecomp):
+ modcol = testdir.getmodulecol("def test_one(): pass")
+ rep = TerminalReporter(modcol.config, file=linecomp.stringio)
+ rep.write_fspath_result(modcol.nodeid, ".")
+ rep.write_line("hello world")
+ lines = linecomp.stringio.getvalue().split("\n")
+ assert not lines[0]
+ assert lines[1].endswith(modcol.name + " .")
+ assert lines[2] == "hello world"
+
+ def test_show_runtest_logstart(self, testdir, linecomp):
+ item = testdir.getitem("def test_func(): pass")
+ tr = TerminalReporter(item.config, file=linecomp.stringio)
+ item.config.pluginmanager.register(tr)
+ location = item.reportinfo()
+ tr.config.hook.pytest_runtest_logstart(
+ nodeid=item.nodeid, location=location, fspath=str(item.fspath)
+ )
+ linecomp.assert_contains_lines(["*test_show_runtest_logstart.py*"])
+
+ def test_runtest_location_shown_before_test_starts(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_1():
+ import time
+ time.sleep(20)
+ """
+ )
+ child = testdir.spawn_pytest("")
+ child.expect(".*test_runtest_location.*py")
+ child.sendeof()
+ child.kill(15)
+
+ def test_itemreport_subclasses_show_subclassed_file(self, testdir):
+ testdir.makepyfile(
+ test_p1="""
+ class BaseTests(object):
+ def test_p1(self):
+ pass
+ class TestClass(BaseTests):
+ pass
+ """
+ )
+ p2 = testdir.makepyfile(
+ test_p2="""
+ from test_p1 import BaseTests
+ class TestMore(BaseTests):
+ pass
+ """
+ )
+ result = testdir.runpytest(p2)
+ result.stdout.fnmatch_lines(["*test_p2.py .*", "*1 passed*"])
+ result = testdir.runpytest("-v", p2)
+ result.stdout.fnmatch_lines(
+ ["*test_p2.py::TestMore::test_p1* <- *test_p1.py*PASSED*"]
+ )
+
+ def test_itemreport_directclasses_not_shown_as_subclasses(self, testdir):
+ a = testdir.mkpydir("a123")
+ a.join("test_hello123.py").write(
+ _pytest._code.Source(
+ """
+ class TestClass(object):
+ def test_method(self):
+ pass
+ """
+ )
+ )
+ result = testdir.runpytest("-v")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*a123/test_hello123.py*PASS*"])
+ assert " <- " not in result.stdout.str()
+
+ def test_keyboard_interrupt(self, testdir, option):
+ testdir.makepyfile(
+ """
+ def test_foobar():
+ assert 0
+ def test_spamegg():
+ import py; pytest.skip('skip me please!')
+ def test_interrupt_me():
+ raise KeyboardInterrupt # simulating the user
+ """
+ )
+
+ result = testdir.runpytest(*option.args, no_reraise_ctrlc=True)
+ result.stdout.fnmatch_lines(
+ [
+ " def test_foobar():",
+ "> assert 0",
+ "E assert 0",
+ "*_keyboard_interrupt.py:6: KeyboardInterrupt*",
+ ]
+ )
+ if option.fulltrace:
+ result.stdout.fnmatch_lines(
+ ["*raise KeyboardInterrupt # simulating the user*"]
+ )
+ else:
+ result.stdout.fnmatch_lines(
+ ["(to show a full traceback on KeyboardInterrupt use --fulltrace)"]
+ )
+ result.stdout.fnmatch_lines(["*KeyboardInterrupt*"])
+
+ def test_keyboard_in_sessionstart(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_sessionstart():
+ raise KeyboardInterrupt
+ """
+ )
+ testdir.makepyfile(
+ """
+ def test_foobar():
+ pass
+ """
+ )
+
+ result = testdir.runpytest(no_reraise_ctrlc=True)
+ assert result.ret == 2
+ result.stdout.fnmatch_lines(["*KeyboardInterrupt*"])
+
+ def test_collect_single_item(self, testdir):
+ """Use singular 'item' when reporting a single test item"""
+ testdir.makepyfile(
+ """
+ def test_foobar():
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["collected 1 item"])
+
+ def test_rewrite(self, testdir, monkeypatch):
+ config = testdir.parseconfig()
+ f = py.io.TextIO()
+ monkeypatch.setattr(f, "isatty", lambda *args: True)
+ tr = TerminalReporter(config, f)
+ tr._tw.fullwidth = 10
+ tr.write("hello")
+ tr.rewrite("hey", erase=True)
+ assert f.getvalue() == "hello" + "\r" + "hey" + (6 * " ")
+
+
+class TestCollectonly(object):
+
+ def test_collectonly_basic(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_func():
+ pass
+ """
+ )
+ result = testdir.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(
+ ["<Module 'test_collectonly_basic.py'>", " <Function 'test_func'>"]
+ )
+
+ def test_collectonly_skipped_module(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ pytest.skip("hello")
+ """
+ )
+ result = testdir.runpytest("--collect-only", "-rs")
+ result.stdout.fnmatch_lines(["*ERROR collecting*"])
+
+ def test_collectonly_failed_module(self, testdir):
+ testdir.makepyfile("""raise ValueError(0)""")
+ result = testdir.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*raise ValueError*", "*1 error*"])
+
+ def test_collectonly_fatal(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_collectstart(collector):
+ assert 0, "urgs"
+ """
+ )
+ result = testdir.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*INTERNAL*args*"])
+ assert result.ret == 3
+
+ def test_collectonly_simple(self, testdir):
+ p = testdir.makepyfile(
+ """
+ def test_func1():
+ pass
+ class TestClass(object):
+ def test_method(self):
+ pass
+ """
+ )
+ result = testdir.runpytest("--collect-only", p)
+ # assert stderr.startswith("inserting into sys.path")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ [
+ "*<Module '*.py'>",
+ "* <Function 'test_func1'*>",
+ "* <Class 'TestClass'>",
+ # "* <Instance '()'>",
+ "* <Function 'test_method'*>",
+ ]
+ )
+
+ def test_collectonly_error(self, testdir):
+ p = testdir.makepyfile("import Errlkjqweqwe")
+ result = testdir.runpytest("--collect-only", p)
+ assert result.ret == 2
+ result.stdout.fnmatch_lines(
+ _pytest._code.Source(
+ """
+ *ERROR*
+ *ImportError*
+ *No module named *Errlk*
+ *1 error*
+ """
+ ).strip()
+ )
+
+ def test_collectonly_missing_path(self, testdir):
+ """this checks issue 115,
+ failure in parseargs will cause session
+ not to have the items attribute
+ """
+ result = testdir.runpytest("--collect-only", "uhm_missing_path")
+ assert result.ret == 4
+ result.stderr.fnmatch_lines(["*ERROR: file not found*"])
+
+ def test_collectonly_quiet(self, testdir):
+ testdir.makepyfile("def test_foo(): pass")
+ result = testdir.runpytest("--collect-only", "-q")
+ result.stdout.fnmatch_lines(["*test_foo*"])
+
+ def test_collectonly_more_quiet(self, testdir):
+ testdir.makepyfile(test_fun="def test_foo(): pass")
+ result = testdir.runpytest("--collect-only", "-qq")
+ result.stdout.fnmatch_lines(["*test_fun.py: 1*"])
+
+
+def test_repr_python_version(monkeypatch):
+ try:
+ monkeypatch.setattr(sys, "version_info", (2, 5, 1, "final", 0))
+ assert repr_pythonversion() == "2.5.1-final-0"
+ sys.version_info = x = (2, 3)
+ assert repr_pythonversion() == str(x)
+ finally:
+ monkeypatch.undo() # do this early as pytest can get confused
+
+
+class TestFixtureReporting(object):
+
+ def test_setup_fixture_error(self, testdir):
+ testdir.makepyfile(
+ """
+ def setup_function(function):
+ print ("setup func")
+ assert 0
+ def test_nada():
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*ERROR at setup of test_nada*",
+ "*setup_function(function):*",
+ "*setup func*",
+ "*assert 0*",
+ "*1 error*",
+ ]
+ )
+ assert result.ret != 0
+
+ def test_teardown_fixture_error(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_nada():
+ pass
+ def teardown_function(function):
+ print ("teardown func")
+ assert 0
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*ERROR at teardown*",
+ "*teardown_function(function):*",
+ "*assert 0*",
+ "*Captured stdout*",
+ "*teardown func*",
+ "*1 passed*1 error*",
+ ]
+ )
+
+ def test_teardown_fixture_error_and_test_failure(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_fail():
+ assert 0, "failingfunc"
+
+ def teardown_function(function):
+ print ("teardown func")
+ assert False
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*ERROR at teardown of test_fail*",
+ "*teardown_function(function):*",
+ "*assert False*",
+ "*Captured stdout*",
+ "*teardown func*",
+ "*test_fail*",
+ "*def test_fail():",
+ "*failingfunc*",
+ "*1 failed*1 error*",
+ ]
+ )
+
+ def test_setup_teardown_output_and_test_failure(self, testdir):
+ """ Test for issue #442 """
+ testdir.makepyfile(
+ """
+ def setup_function(function):
+ print ("setup func")
+
+ def test_fail():
+ assert 0, "failingfunc"
+
+ def teardown_function(function):
+ print ("teardown func")
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*test_fail*",
+ "*def test_fail():",
+ "*failingfunc*",
+ "*Captured stdout setup*",
+ "*setup func*",
+ "*Captured stdout teardown*",
+ "*teardown func*",
+ "*1 failed*",
+ ]
+ )
+
+
+class TestTerminalFunctional(object):
+
+ def test_deselected(self, testdir):
+ testpath = testdir.makepyfile(
+ """
+ def test_one():
+ pass
+ def test_two():
+ pass
+ def test_three():
+ pass
+ """
+ )
+ result = testdir.runpytest("-k", "test_two:", testpath)
+ result.stdout.fnmatch_lines(
+ ["collected 3 items / 1 deselected", "*test_deselected.py ..*"]
+ )
+ assert result.ret == 0
+
+ def test_show_deselected_items_using_markexpr_before_test_execution(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.foo
+ def test_foobar():
+ pass
+
+ @pytest.mark.bar
+ def test_bar():
+ pass
+
+ def test_pass():
+ pass
+ """
+ )
+ result = testdir.runpytest("-m", "not foo")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 3 items / 1 deselected",
+ "*test_show_des*.py ..*",
+ "*= 2 passed, 1 deselected in * =*",
+ ]
+ )
+ assert "= 1 deselected =" not in result.stdout.str()
+ assert result.ret == 0
+
+ def test_no_skip_summary_if_failure(self, testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_ok():
+ pass
+ def test_fail():
+ assert 0
+ def test_skip():
+ pytest.skip("dontshow")
+ """
+ )
+ result = testdir.runpytest()
+ assert result.stdout.str().find("skip test summary") == -1
+ assert result.ret == 1
+
+ def test_passes(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ def test_passes():
+ pass
+ class TestClass(object):
+ def test_method(self):
+ pass
+ """
+ )
+ old = p1.dirpath().chdir()
+ try:
+ result = testdir.runpytest()
+ finally:
+ old.chdir()
+ result.stdout.fnmatch_lines(["test_passes.py ..*", "* 2 pass*"])
+ assert result.ret == 0
+
+ def test_header_trailer_info(self, testdir):
+ testdir.makepyfile(
+ """
+ def test_passes():
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ verinfo = ".".join(map(str, sys.version_info[:3]))
+ result.stdout.fnmatch_lines(
+ [
+ "*===== test session starts ====*",
+ "platform %s -- Python %s*pytest-%s*py-%s*pluggy-%s"
+ % (
+ sys.platform,
+ verinfo,
+ pytest.__version__,
+ py.__version__,
+ pluggy.__version__,
+ ),
+ "*test_header_trailer_info.py .*",
+ "=* 1 passed*in *.[0-9][0-9] seconds *=",
+ ]
+ )
+ if pytest.config.pluginmanager.list_plugin_distinfo():
+ result.stdout.fnmatch_lines(["plugins: *"])
+
+ def test_showlocals(self, testdir):
+ p1 = testdir.makepyfile(
+ """
+ def test_showlocals():
+ x = 3
+ y = "x" * 5000
+ assert 0
+ """
+ )
+ result = testdir.runpytest(p1, "-l")
+ result.stdout.fnmatch_lines(
+ [
+ # "_ _ * Locals *",
+ "x* = 3",
+ "y* = 'xxxxxx*",
+ ]
+ )
+
+ def test_verbose_reporting(self, testdir, pytestconfig):
+ p1 = testdir.makepyfile(
+ """
+ import pytest
+ def test_fail():
+ raise ValueError()
+ def test_pass():
+ pass
+ class TestClass(object):
+ def test_skip(self):
+ pytest.skip("hello")
+ def test_gen():
+ def check(x):
+ assert x == 1
+ yield check, 0
+ """
+ )
+ result = testdir.runpytest(p1, "-v")
+ result.stdout.fnmatch_lines(
+ [
+ "*test_verbose_reporting.py::test_fail *FAIL*",
+ "*test_verbose_reporting.py::test_pass *PASS*",
+ "*test_verbose_reporting.py::TestClass::test_skip *SKIP*",
+ "*test_verbose_reporting.py::test_gen*0* *FAIL*",
+ ]
+ )
+ assert result.ret == 1
+
+ if not pytestconfig.pluginmanager.get_plugin("xdist"):
+ pytest.skip("xdist plugin not installed")
+
+ result = testdir.runpytest(p1, "-v", "-n 1")
+ result.stdout.fnmatch_lines(["*FAIL*test_verbose_reporting.py::test_fail*"])
+ assert result.ret == 1
+
+ def test_quiet_reporting(self, testdir):
+ p1 = testdir.makepyfile("def test_pass(): pass")
+ result = testdir.runpytest(p1, "-q")
+ s = result.stdout.str()
+ assert "test session starts" not in s
+ assert p1.basename not in s
+ assert "===" not in s
+ assert "passed" in s
+
+ def test_more_quiet_reporting(self, testdir):
+ p1 = testdir.makepyfile("def test_pass(): pass")
+ result = testdir.runpytest(p1, "-qq")
+ s = result.stdout.str()
+ assert "test session starts" not in s
+ assert p1.basename not in s
+ assert "===" not in s
+ assert "passed" not in s
+
+ def test_report_collectionfinish_hook(self, testdir):
+ testdir.makeconftest(
+ """
+ def pytest_report_collectionfinish(config, startdir, items):
+ return ['hello from hook: {0} items'.format(len(items))]
+ """
+ )
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize('i', range(3))
+ def test(i):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["collected 3 items", "hello from hook: 3 items"])
+
+
+def test_fail_extra_reporting(testdir):
+ testdir.makepyfile("def test_this(): assert 0")
+ result = testdir.runpytest()
+ assert "short test summary" not in result.stdout.str()
+ result = testdir.runpytest("-rf")
+ result.stdout.fnmatch_lines(["*test summary*", "FAIL*test_fail_extra_reporting*"])
+
+
+def test_fail_reporting_on_pass(testdir):
+ testdir.makepyfile("def test_this(): assert 1")
+ result = testdir.runpytest("-rf")
+ assert "short test summary" not in result.stdout.str()
+
+
+def test_pass_extra_reporting(testdir):
+ testdir.makepyfile("def test_this(): assert 1")
+ result = testdir.runpytest()
+ assert "short test summary" not in result.stdout.str()
+ result = testdir.runpytest("-rp")
+ result.stdout.fnmatch_lines(["*test summary*", "PASS*test_pass_extra_reporting*"])
+
+
+def test_pass_reporting_on_fail(testdir):
+ testdir.makepyfile("def test_this(): assert 0")
+ result = testdir.runpytest("-rp")
+ assert "short test summary" not in result.stdout.str()
+
+
+def test_pass_output_reporting(testdir):
+ testdir.makepyfile(
+ """
+ def test_pass_output():
+ print("Four score and seven years ago...")
+ """
+ )
+ result = testdir.runpytest()
+ assert "Four score and seven years ago..." not in result.stdout.str()
+ result = testdir.runpytest("-rP")
+ result.stdout.fnmatch_lines(["Four score and seven years ago..."])
+
+
+def test_color_yes(testdir):
+ testdir.makepyfile("def test_this(): assert 1")
+ result = testdir.runpytest("--color=yes")
+ assert "test session starts" in result.stdout.str()
+ assert "\x1b[1m" in result.stdout.str()
+
+
+def test_color_no(testdir):
+ testdir.makepyfile("def test_this(): assert 1")
+ result = testdir.runpytest("--color=no")
+ assert "test session starts" in result.stdout.str()
+ assert "\x1b[1m" not in result.stdout.str()
+
+
+@pytest.mark.parametrize("verbose", [True, False])
+def test_color_yes_collection_on_non_atty(testdir, verbose):
+ """skip collect progress report when working on non-terminals.
+ #1397
+ """
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize('i', range(10))
+ def test_this(i):
+ assert 1
+ """
+ )
+ args = ["--color=yes"]
+ if verbose:
+ args.append("-vv")
+ result = testdir.runpytest(*args)
+ assert "test session starts" in result.stdout.str()
+ assert "\x1b[1m" in result.stdout.str()
+ assert "collecting 10 items" not in result.stdout.str()
+ if verbose:
+ assert "collecting ..." in result.stdout.str()
+ assert "collected 10 items" in result.stdout.str()
+
+
+def test_getreportopt():
+
+ class Config(object):
+
+ class Option(object):
+ reportchars = ""
+ disable_warnings = True
+
+ option = Option()
+
+ config = Config()
+
+ config.option.reportchars = "sf"
+ assert getreportopt(config) == "sf"
+
+ config.option.reportchars = "sfxw"
+ assert getreportopt(config) == "sfx"
+
+ config.option.reportchars = "sfx"
+ config.option.disable_warnings = False
+ assert getreportopt(config) == "sfxw"
+
+ config.option.reportchars = "sfxw"
+ config.option.disable_warnings = False
+ assert getreportopt(config) == "sfxw"
+
+
+def test_terminalreporter_reportopt_addopts(testdir):
+ testdir.makeini("[pytest]\naddopts=-rs")
+ testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def tr(request):
+ tr = request.config.pluginmanager.getplugin("terminalreporter")
+ return tr
+ def test_opt(tr):
+ assert tr.hasopt('skipped')
+ assert not tr.hasopt('qwe')
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_tbstyle_short(testdir):
+ p = testdir.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def arg(request):
+ return 42
+ def test_opt(arg):
+ x = 0
+ assert x
+ """
+ )
+ result = testdir.runpytest("--tb=short")
+ s = result.stdout.str()
+ assert "arg = 42" not in s
+ assert "x = 0" not in s
+ result.stdout.fnmatch_lines(["*%s:8*" % p.basename, " assert x", "E assert*"])
+ result = testdir.runpytest()
+ s = result.stdout.str()
+ assert "x = 0" in s
+ assert "assert x" in s
+
+
+def test_traceconfig(testdir, monkeypatch):
+ result = testdir.runpytest("--traceconfig")
+ result.stdout.fnmatch_lines(["*active plugins*"])
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+
+
+class TestGenericReporting(object):
+ """ this test class can be subclassed with a different option
+ provider to run e.g. distributed tests.
+ """
+
+ def test_collect_fail(self, testdir, option):
+ testdir.makepyfile("import xyz\n")
+ result = testdir.runpytest(*option.args)
+ result.stdout.fnmatch_lines(
+ ["ImportError while importing*", "*No module named *xyz*", "*1 error*"]
+ )
+
+ def test_maxfailures(self, testdir, option):
+ testdir.makepyfile(
+ """
+ def test_1():
+ assert 0
+ def test_2():
+ assert 0
+ def test_3():
+ assert 0
+ """
+ )
+ result = testdir.runpytest("--maxfail=2", *option.args)
+ result.stdout.fnmatch_lines(
+ ["*def test_1():*", "*def test_2():*", "*2 failed*"]
+ )
+
+ def test_tb_option(self, testdir, option):
+ testdir.makepyfile(
+ """
+ import pytest
+ def g():
+ raise IndexError
+ def test_func():
+ print (6*7)
+ g() # --calling--
+ """
+ )
+ for tbopt in ["long", "short", "no"]:
+ print("testing --tb=%s..." % tbopt)
+ result = testdir.runpytest("--tb=%s" % tbopt)
+ s = result.stdout.str()
+ if tbopt == "long":
+ assert "print (6*7)" in s
+ else:
+ assert "print (6*7)" not in s
+ if tbopt != "no":
+ assert "--calling--" in s
+ assert "IndexError" in s
+ else:
+ assert "FAILURES" not in s
+ assert "--calling--" not in s
+ assert "IndexError" not in s
+
+ def test_tb_crashline(self, testdir, option):
+ p = testdir.makepyfile(
+ """
+ import pytest
+ def g():
+ raise IndexError
+ def test_func1():
+ print (6*7)
+ g() # --calling--
+ def test_func2():
+ assert 0, "hello"
+ """
+ )
+ result = testdir.runpytest("--tb=line")
+ bn = p.basename
+ result.stdout.fnmatch_lines(
+ ["*%s:3: IndexError*" % bn, "*%s:8: AssertionError: hello*" % bn]
+ )
+ s = result.stdout.str()
+ assert "def test_func2" not in s
+
+ def test_pytest_report_header(self, testdir, option):
+ testdir.makeconftest(
+ """
+ def pytest_sessionstart(session):
+ session.config._somevalue = 42
+ def pytest_report_header(config):
+ return "hello: %s" % config._somevalue
+ """
+ )
+ testdir.mkdir("a").join("conftest.py").write(
+ """
+def pytest_report_header(config, startdir):
+ return ["line1", str(startdir)]
+"""
+ )
+ result = testdir.runpytest("a")
+ result.stdout.fnmatch_lines(["*hello: 42*", "line1", str(testdir.tmpdir)])
+
+ def test_show_capture(self, testdir):
+ testdir.makepyfile(
+ """
+ import sys
+ import logging
+ def test_one():
+ sys.stdout.write('!This is stdout!')
+ sys.stderr.write('!This is stderr!')
+ logging.warning('!This is a warning log msg!')
+ assert False, 'Something failed'
+ """
+ )
+
+ result = testdir.runpytest("--tb=short")
+ result.stdout.fnmatch_lines(
+ [
+ "!This is stdout!",
+ "!This is stderr!",
+ "*WARNING*!This is a warning log msg!",
+ ]
+ )
+
+ result = testdir.runpytest("--show-capture=all", "--tb=short")
+ result.stdout.fnmatch_lines(
+ [
+ "!This is stdout!",
+ "!This is stderr!",
+ "*WARNING*!This is a warning log msg!",
+ ]
+ )
+
+ stdout = testdir.runpytest("--show-capture=stdout", "--tb=short").stdout.str()
+ assert "!This is stderr!" not in stdout
+ assert "!This is stdout!" in stdout
+ assert "!This is a warning log msg!" not in stdout
+
+ stdout = testdir.runpytest("--show-capture=stderr", "--tb=short").stdout.str()
+ assert "!This is stdout!" not in stdout
+ assert "!This is stderr!" in stdout
+ assert "!This is a warning log msg!" not in stdout
+
+ stdout = testdir.runpytest("--show-capture=log", "--tb=short").stdout.str()
+ assert "!This is stdout!" not in stdout
+ assert "!This is stderr!" not in stdout
+ assert "!This is a warning log msg!" in stdout
+
+ stdout = testdir.runpytest("--show-capture=no", "--tb=short").stdout.str()
+ assert "!This is stdout!" not in stdout
+ assert "!This is stderr!" not in stdout
+ assert "!This is a warning log msg!" not in stdout
+
+
+@pytest.mark.xfail("not hasattr(os, 'dup')")
+def test_fdopen_kept_alive_issue124(testdir):
+ testdir.makepyfile(
+ """
+ import os, sys
+ k = []
+ def test_open_file_and_keep_alive(capfd):
+ stdout = os.fdopen(1, 'w', 1)
+ k.append(stdout)
+
+ def test_close_kept_alive_file():
+ stdout = k.pop()
+ stdout.close()
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+
+def test_tbstyle_native_setup_error(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def setup_error_fixture():
+ raise Exception("error in exception")
+
+ def test_error_fixture(setup_error_fixture):
+ pass
+ """
+ )
+ result = testdir.runpytest("--tb=native")
+ result.stdout.fnmatch_lines(
+ ['*File *test_tbstyle_native_setup_error.py", line *, in setup_error_fixture*']
+ )
+
+
+def test_terminal_summary(testdir):
+ testdir.makeconftest(
+ """
+ def pytest_terminal_summary(terminalreporter, exitstatus):
+ w = terminalreporter
+ w.section("hello")
+ w.line("world")
+ w.line("exitstatus: {0}".format(exitstatus))
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ *==== hello ====*
+ world
+ exitstatus: 5
+ """
+ )
+
+
+def test_terminal_summary_warnings_are_displayed(testdir):
+ """Test that warnings emitted during pytest_terminal_summary are displayed.
+ (#1305).
+ """
+ testdir.makeconftest(
+ """
+ def pytest_terminal_summary(terminalreporter):
+ config = terminalreporter.config
+ config.warn('C1', 'internal warning')
+ """
+ )
+ result = testdir.runpytest("-rw")
+ result.stdout.fnmatch_lines(
+ ["<undetermined location>", "*internal warning", "*== 1 warnings in *"]
+ )
+ assert "None" not in result.stdout.str()
+
+
+@pytest.mark.parametrize(
+ "exp_color, exp_line, stats_arg",
+ [
+ # The method under test only cares about the length of each
+ # dict value, not the actual contents, so tuples of anything
+ # suffice
+ # Important statuses -- the highest priority of these always wins
+ ("red", "1 failed", {"failed": (1,)}),
+ ("red", "1 failed, 1 passed", {"failed": (1,), "passed": (1,)}),
+ ("red", "1 error", {"error": (1,)}),
+ ("red", "1 passed, 1 error", {"error": (1,), "passed": (1,)}),
+ # (a status that's not known to the code)
+ ("yellow", "1 weird", {"weird": (1,)}),
+ ("yellow", "1 passed, 1 weird", {"weird": (1,), "passed": (1,)}),
+ ("yellow", "1 warnings", {"warnings": (1,)}),
+ ("yellow", "1 passed, 1 warnings", {"warnings": (1,), "passed": (1,)}),
+ ("green", "5 passed", {"passed": (1, 2, 3, 4, 5)}),
+ # "Boring" statuses. These have no effect on the color of the summary
+ # line. Thus, if *every* test has a boring status, the summary line stays
+ # at its default color, i.e. yellow, to warn the user that the test run
+ # produced no useful information
+ ("yellow", "1 skipped", {"skipped": (1,)}),
+ ("green", "1 passed, 1 skipped", {"skipped": (1,), "passed": (1,)}),
+ ("yellow", "1 deselected", {"deselected": (1,)}),
+ ("green", "1 passed, 1 deselected", {"deselected": (1,), "passed": (1,)}),
+ ("yellow", "1 xfailed", {"xfailed": (1,)}),
+ ("green", "1 passed, 1 xfailed", {"xfailed": (1,), "passed": (1,)}),
+ ("yellow", "1 xpassed", {"xpassed": (1,)}),
+ ("green", "1 passed, 1 xpassed", {"xpassed": (1,), "passed": (1,)}),
+ # Likewise if no tests were found at all
+ ("yellow", "no tests ran", {}),
+ # Test the empty-key special case
+ ("yellow", "no tests ran", {"": (1,)}),
+ ("green", "1 passed", {"": (1,), "passed": (1,)}),
+ # A couple more complex combinations
+ (
+ "red",
+ "1 failed, 2 passed, 3 xfailed",
+ {"passed": (1, 2), "failed": (1,), "xfailed": (1, 2, 3)},
+ ),
+ (
+ "green",
+ "1 passed, 2 skipped, 3 deselected, 2 xfailed",
+ {
+ "passed": (1,),
+ "skipped": (1, 2),
+ "deselected": (1, 2, 3),
+ "xfailed": (1, 2),
+ },
+ ),
+ ],
+)
+def test_summary_stats(exp_line, exp_color, stats_arg):
+ print("Based on stats: %s" % stats_arg)
+ print('Expect summary: "%s"; with color "%s"' % (exp_line, exp_color))
+ (line, color) = build_summary_stats_line(stats_arg)
+ print('Actually got: "%s"; with color "%s"' % (line, color))
+ assert line == exp_line
+ assert color == exp_color
+
+
+def test_no_trailing_whitespace_after_inifile_word(testdir):
+ result = testdir.runpytest("")
+ assert "inifile:\n" in result.stdout.str()
+
+ testdir.makeini("[pytest]")
+ result = testdir.runpytest("")
+ assert "inifile: tox.ini\n" in result.stdout.str()
+
+
+class TestProgress(object):
+
+ @pytest.fixture
+ def many_tests_files(self, testdir):
+ testdir.makepyfile(
+ test_bar="""
+ import pytest
+ @pytest.mark.parametrize('i', range(10))
+ def test_bar(i): pass
+ """,
+ test_foo="""
+ import pytest
+ @pytest.mark.parametrize('i', range(5))
+ def test_foo(i): pass
+ """,
+ test_foobar="""
+ import pytest
+ @pytest.mark.parametrize('i', range(5))
+ def test_foobar(i): pass
+ """,
+ )
+
+ def test_zero_tests_collected(self, testdir):
+ """Some plugins (testmon for example) might issue pytest_runtest_logreport without any tests being
+ actually collected (#2971)."""
+ testdir.makeconftest(
+ """
+ def pytest_collection_modifyitems(items, config):
+ from _pytest.runner import CollectReport
+ for node_id in ('nodeid1', 'nodeid2'):
+ rep = CollectReport(node_id, 'passed', None, None)
+ rep.when = 'passed'
+ rep.duration = 0.1
+ config.hook.pytest_runtest_logreport(report=rep)
+ """
+ )
+ output = testdir.runpytest()
+ assert "ZeroDivisionError" not in output.stdout.str()
+ output.stdout.fnmatch_lines(["=* 2 passed in *="])
+
+ def test_normal(self, many_tests_files, testdir):
+ output = testdir.runpytest()
+ output.stdout.re_match_lines(
+ [
+ r"test_bar.py \.{10} \s+ \[ 50%\]",
+ r"test_foo.py \.{5} \s+ \[ 75%\]",
+ r"test_foobar.py \.{5} \s+ \[100%\]",
+ ]
+ )
+
+ def test_verbose(self, many_tests_files, testdir):
+ output = testdir.runpytest("-v")
+ output.stdout.re_match_lines(
+ [
+ r"test_bar.py::test_bar\[0\] PASSED \s+ \[ 5%\]",
+ r"test_foo.py::test_foo\[4\] PASSED \s+ \[ 75%\]",
+ r"test_foobar.py::test_foobar\[4\] PASSED \s+ \[100%\]",
+ ]
+ )
+
+ def test_xdist_normal(self, many_tests_files, testdir):
+ pytest.importorskip("xdist")
+ output = testdir.runpytest("-n2")
+ output.stdout.re_match_lines([r"\.{20} \s+ \[100%\]"])
+
+ def test_xdist_verbose(self, many_tests_files, testdir):
+ pytest.importorskip("xdist")
+ output = testdir.runpytest("-n2", "-v")
+ output.stdout.re_match_lines_random(
+ [
+ r"\[gw\d\] \[\s*\d+%\] PASSED test_bar.py::test_bar\[1\]",
+ r"\[gw\d\] \[\s*\d+%\] PASSED test_foo.py::test_foo\[1\]",
+ r"\[gw\d\] \[\s*\d+%\] PASSED test_foobar.py::test_foobar\[1\]",
+ ]
+ )
+
+ def test_capture_no(self, many_tests_files, testdir):
+ output = testdir.runpytest("-s")
+ output.stdout.re_match_lines(
+ [r"test_bar.py \.{10}", r"test_foo.py \.{5}", r"test_foobar.py \.{5}"]
+ )
+
+ output = testdir.runpytest("--capture=no")
+ assert "%]" not in output.stdout.str()
+
+
+class TestProgressWithTeardown(object):
+ """Ensure we show the correct percentages for tests that fail during teardown (#3088)"""
+
+ @pytest.fixture
+ def contest_with_teardown_fixture(self, testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture
+ def fail_teardown():
+ yield
+ assert False
+ """
+ )
+
+ @pytest.fixture
+ def many_files(self, testdir, contest_with_teardown_fixture):
+ testdir.makepyfile(
+ test_bar="""
+ import pytest
+ @pytest.mark.parametrize('i', range(5))
+ def test_bar(fail_teardown, i):
+ pass
+ """,
+ test_foo="""
+ import pytest
+ @pytest.mark.parametrize('i', range(15))
+ def test_foo(fail_teardown, i):
+ pass
+ """,
+ )
+
+ def test_teardown_simple(self, testdir, contest_with_teardown_fixture):
+ testdir.makepyfile(
+ """
+ def test_foo(fail_teardown):
+ pass
+ """
+ )
+ output = testdir.runpytest()
+ output.stdout.re_match_lines([r"test_teardown_simple.py \.E\s+\[100%\]"])
+
+ def test_teardown_with_test_also_failing(
+ self, testdir, contest_with_teardown_fixture
+ ):
+ testdir.makepyfile(
+ """
+ def test_foo(fail_teardown):
+ assert False
+ """
+ )
+ output = testdir.runpytest()
+ output.stdout.re_match_lines(
+ [r"test_teardown_with_test_also_failing.py FE\s+\[100%\]"]
+ )
+
+ def test_teardown_many(self, testdir, many_files):
+ output = testdir.runpytest()
+ output.stdout.re_match_lines(
+ [r"test_bar.py (\.E){5}\s+\[ 25%\]", r"test_foo.py (\.E){15}\s+\[100%\]"]
+ )
+
+ def test_teardown_many_verbose(self, testdir, many_files):
+ output = testdir.runpytest("-v")
+ output.stdout.re_match_lines(
+ [
+ r"test_bar.py::test_bar\[0\] PASSED\s+\[ 5%\]",
+ r"test_bar.py::test_bar\[0\] ERROR\s+\[ 5%\]",
+ r"test_bar.py::test_bar\[4\] PASSED\s+\[ 25%\]",
+ r"test_bar.py::test_bar\[4\] ERROR\s+\[ 25%\]",
+ ]
+ )
+
+ def test_xdist_normal(self, many_files, testdir):
+ pytest.importorskip("xdist")
+ output = testdir.runpytest("-n2")
+ output.stdout.re_match_lines([r"[\.E]{40} \s+ \[100%\]"])
diff --git a/third_party/python/pytest/testing/test_tmpdir.py b/third_party/python/pytest/testing/test_tmpdir.py
new file mode 100644
index 0000000000..3362490949
--- /dev/null
+++ b/third_party/python/pytest/testing/test_tmpdir.py
@@ -0,0 +1,212 @@
+from __future__ import absolute_import, division, print_function
+import sys
+import py
+import pytest
+
+from _pytest.tmpdir import tmpdir
+
+
+def test_funcarg(testdir):
+ testdir.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ metafunc.addcall(id='a')
+ metafunc.addcall(id='b')
+ def test_func(tmpdir): pass
+ """
+ )
+ from _pytest.tmpdir import TempdirFactory
+
+ reprec = testdir.inline_run()
+ calls = reprec.getcalls("pytest_runtest_setup")
+ item = calls[0].item
+ config = item.config
+ tmpdirhandler = TempdirFactory(config)
+ item._initrequest()
+ p = tmpdir(item._request, tmpdirhandler)
+ assert p.check()
+ bn = p.basename.strip("0123456789")
+ assert bn.endswith("test_func_a_")
+ item.name = "qwe/\\abc"
+ p = tmpdir(item._request, tmpdirhandler)
+ assert p.check()
+ bn = p.basename.strip("0123456789")
+ assert bn == "qwe__abc"
+
+
+def test_ensuretemp(recwarn):
+ d1 = pytest.ensuretemp("hello")
+ d2 = pytest.ensuretemp("hello")
+ assert d1 == d2
+ assert d1.check(dir=1)
+
+
+class TestTempdirHandler(object):
+
+ def test_mktemp(self, testdir):
+ from _pytest.tmpdir import TempdirFactory
+
+ config = testdir.parseconfig()
+ config.option.basetemp = testdir.mkdir("hello")
+ t = TempdirFactory(config)
+ tmp = t.mktemp("world")
+ assert tmp.relto(t.getbasetemp()) == "world0"
+ tmp = t.mktemp("this")
+ assert tmp.relto(t.getbasetemp()).startswith("this")
+ tmp2 = t.mktemp("this")
+ assert tmp2.relto(t.getbasetemp()).startswith("this")
+ assert tmp2 != tmp
+
+
+class TestConfigTmpdir(object):
+
+ def test_getbasetemp_custom_removes_old(self, testdir):
+ mytemp = testdir.tmpdir.join("xyz")
+ p = testdir.makepyfile(
+ """
+ def test_1(tmpdir):
+ pass
+ """
+ )
+ testdir.runpytest(p, "--basetemp=%s" % mytemp)
+ mytemp.check()
+ mytemp.ensure("hello")
+
+ testdir.runpytest(p, "--basetemp=%s" % mytemp)
+ mytemp.check()
+ assert not mytemp.join("hello").check()
+
+
+def test_basetemp(testdir):
+ mytemp = testdir.tmpdir.mkdir("mytemp")
+ p = testdir.makepyfile(
+ """
+ import pytest
+ def test_1():
+ pytest.ensuretemp("hello")
+ """
+ )
+ result = testdir.runpytest(p, "--basetemp=%s" % mytemp)
+ assert result.ret == 0
+ assert mytemp.join("hello").check()
+
+
+@pytest.mark.skipif(
+ not hasattr(py.path.local, "mksymlinkto"),
+ reason="symlink not available on this platform",
+)
+def test_tmpdir_always_is_realpath(testdir):
+ # the reason why tmpdir should be a realpath is that
+ # when you cd to it and do "os.getcwd()" you will anyway
+ # get the realpath. Using the symlinked path can thus
+ # easily result in path-inequality
+ # XXX if that proves to be a problem, consider using
+ # os.environ["PWD"]
+ realtemp = testdir.tmpdir.mkdir("myrealtemp")
+ linktemp = testdir.tmpdir.join("symlinktemp")
+ linktemp.mksymlinkto(realtemp)
+ p = testdir.makepyfile(
+ """
+ def test_1(tmpdir):
+ import os
+ assert os.path.realpath(str(tmpdir)) == str(tmpdir)
+ """
+ )
+ result = testdir.runpytest("-s", p, "--basetemp=%s/bt" % linktemp)
+ assert not result.ret
+
+
+def test_tmpdir_too_long_on_parametrization(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize("arg", ["1"*1000])
+ def test_some(arg, tmpdir):
+ tmpdir.ensure("hello")
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+def test_tmpdir_factory(testdir):
+ testdir.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope='session')
+ def session_dir(tmpdir_factory):
+ return tmpdir_factory.mktemp('data', numbered=False)
+ def test_some(session_dir):
+ session_dir.isdir()
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+def test_tmpdir_fallback_tox_env(testdir, monkeypatch):
+ """Test that tmpdir works even if environment variables required by getpass
+ module are missing (#1010).
+ """
+ monkeypatch.delenv("USER", raising=False)
+ monkeypatch.delenv("USERNAME", raising=False)
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_some(tmpdir):
+ assert tmpdir.isdir()
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+@pytest.fixture
+def break_getuser(monkeypatch):
+ monkeypatch.setattr("os.getuid", lambda: -1)
+ # taken from python 2.7/3.4
+ for envvar in ("LOGNAME", "USER", "LNAME", "USERNAME"):
+ monkeypatch.delenv(envvar, raising=False)
+
+
+@pytest.mark.usefixtures("break_getuser")
+@pytest.mark.skipif(sys.platform.startswith("win"), reason="no os.getuid on windows")
+def test_tmpdir_fallback_uid_not_found(testdir):
+ """Test that tmpdir works even if the current process's user id does not
+ correspond to a valid user.
+ """
+
+ testdir.makepyfile(
+ """
+ import pytest
+ def test_some(tmpdir):
+ assert tmpdir.isdir()
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+@pytest.mark.usefixtures("break_getuser")
+@pytest.mark.skipif(sys.platform.startswith("win"), reason="no os.getuid on windows")
+def test_get_user_uid_not_found():
+ """Test that get_user() function works even if the current process's
+ user id does not correspond to a valid user (e.g. running pytest in a
+ Docker container with 'docker run -u'.
+ """
+ from _pytest.tmpdir import get_user
+
+ assert get_user() is None
+
+
+@pytest.mark.skipif(not sys.platform.startswith("win"), reason="win only")
+def test_get_user(monkeypatch):
+ """Test that get_user() function works even if environment variables
+ required by getpass module are missing from the environment on Windows
+ (#1010).
+ """
+ from _pytest.tmpdir import get_user
+
+ monkeypatch.delenv("USER", raising=False)
+ monkeypatch.delenv("USERNAME", raising=False)
+ assert get_user() is None
diff --git a/third_party/python/pytest/testing/test_unittest.py b/third_party/python/pytest/testing/test_unittest.py
new file mode 100644
index 0000000000..65ffdb9753
--- /dev/null
+++ b/third_party/python/pytest/testing/test_unittest.py
@@ -0,0 +1,992 @@
+from __future__ import absolute_import, division, print_function
+from _pytest.main import EXIT_NOTESTSCOLLECTED
+import pytest
+import gc
+
+
+def test_simple_unittest(testdir):
+ testpath = testdir.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ def testpassing(self):
+ self.assertEqual('foo', 'foo')
+ def test_failing(self):
+ self.assertEqual('foo', 'bar')
+ """
+ )
+ reprec = testdir.inline_run(testpath)
+ assert reprec.matchreport("testpassing").passed
+ assert reprec.matchreport("test_failing").failed
+
+
+def test_runTest_method(testdir):
+ testdir.makepyfile(
+ """
+ import unittest
+ class MyTestCaseWithRunTest(unittest.TestCase):
+ def runTest(self):
+ self.assertEqual('foo', 'foo')
+ class MyTestCaseWithoutRunTest(unittest.TestCase):
+ def runTest(self):
+ self.assertEqual('foo', 'foo')
+ def test_something(self):
+ pass
+ """
+ )
+ result = testdir.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ """
+ *MyTestCaseWithRunTest::runTest*
+ *MyTestCaseWithoutRunTest::test_something*
+ *2 passed*
+ """
+ )
+
+
+def test_isclasscheck_issue53(testdir):
+ testpath = testdir.makepyfile(
+ """
+ import unittest
+ class _E(object):
+ def __getattr__(self, tag):
+ pass
+ E = _E()
+ """
+ )
+ result = testdir.runpytest(testpath)
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+
+
+def test_setup(testdir):
+ testpath = testdir.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ def setUp(self):
+ self.foo = 1
+ def setup_method(self, method):
+ self.foo2 = 1
+ def test_both(self):
+ self.assertEqual(1, self.foo)
+ assert self.foo2 == 1
+ def teardown_method(self, method):
+ assert 0, "42"
+
+ """
+ )
+ reprec = testdir.inline_run("-s", testpath)
+ assert reprec.matchreport("test_both", when="call").passed
+ rep = reprec.matchreport("test_both", when="teardown")
+ assert rep.failed and "42" in str(rep.longrepr)
+
+
+def test_setUpModule(testdir):
+ testpath = testdir.makepyfile(
+ """
+ values = []
+
+ def setUpModule():
+ values.append(1)
+
+ def tearDownModule():
+ del values[0]
+
+ def test_hello():
+ assert values == [1]
+
+ def test_world():
+ assert values == [1]
+ """
+ )
+ result = testdir.runpytest(testpath)
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+
+def test_setUpModule_failing_no_teardown(testdir):
+ testpath = testdir.makepyfile(
+ """
+ values = []
+
+ def setUpModule():
+ 0/0
+
+ def tearDownModule():
+ values.append(1)
+
+ def test_hello():
+ pass
+ """
+ )
+ reprec = testdir.inline_run(testpath)
+ reprec.assertoutcome(passed=0, failed=1)
+ call = reprec.getcalls("pytest_runtest_setup")[0]
+ assert not call.item.module.values
+
+
+def test_new_instances(testdir):
+ testpath = testdir.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ def test_func1(self):
+ self.x = 2
+ def test_func2(self):
+ assert not hasattr(self, 'x')
+ """
+ )
+ reprec = testdir.inline_run(testpath)
+ reprec.assertoutcome(passed=2)
+
+
+def test_teardown(testdir):
+ testpath = testdir.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ values = []
+ def test_one(self):
+ pass
+ def tearDown(self):
+ self.values.append(None)
+ class Second(unittest.TestCase):
+ def test_check(self):
+ self.assertEqual(MyTestCase.values, [None])
+ """
+ )
+ reprec = testdir.inline_run(testpath)
+ passed, skipped, failed = reprec.countoutcomes()
+ assert failed == 0, failed
+ assert passed == 2
+ assert passed + skipped + failed == 2
+
+
+def test_teardown_issue1649(testdir):
+ """
+ Are TestCase objects cleaned up? Often unittest TestCase objects set
+ attributes that are large and expensive during setUp.
+
+ The TestCase will not be cleaned up if the test fails, because it
+ would then exist in the stackframe.
+ """
+ testpath = testdir.makepyfile(
+ """
+ import unittest
+ class TestCaseObjectsShouldBeCleanedUp(unittest.TestCase):
+ def setUp(self):
+ self.an_expensive_object = 1
+ def test_demo(self):
+ pass
+
+ """
+ )
+ testdir.inline_run("-s", testpath)
+ gc.collect()
+ for obj in gc.get_objects():
+ assert type(obj).__name__ != "TestCaseObjectsShouldBeCleanedUp"
+
+
+def test_unittest_skip_issue148(testdir):
+ testpath = testdir.makepyfile(
+ """
+ import unittest
+
+ @unittest.skip("hello")
+ class MyTestCase(unittest.TestCase):
+ @classmethod
+ def setUpClass(self):
+ xxx
+ def test_one(self):
+ pass
+ @classmethod
+ def tearDownClass(self):
+ xxx
+ """
+ )
+ reprec = testdir.inline_run(testpath)
+ reprec.assertoutcome(skipped=1)
+
+
+def test_method_and_teardown_failing_reporting(testdir):
+ testdir.makepyfile(
+ """
+ import unittest, pytest
+ class TC(unittest.TestCase):
+ def tearDown(self):
+ assert 0, "down1"
+ def test_method(self):
+ assert False, "down2"
+ """
+ )
+ result = testdir.runpytest("-s")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ [
+ "*tearDown*",
+ "*assert 0*",
+ "*test_method*",
+ "*assert False*",
+ "*1 failed*1 error*",
+ ]
+ )
+
+
+def test_setup_failure_is_shown(testdir):
+ testdir.makepyfile(
+ """
+ import unittest
+ import pytest
+ class TC(unittest.TestCase):
+ def setUp(self):
+ assert 0, "down1"
+ def test_method(self):
+ print ("never42")
+ xyz
+ """
+ )
+ result = testdir.runpytest("-s")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(["*setUp*", "*assert 0*down1*", "*1 failed*"])
+ assert "never42" not in result.stdout.str()
+
+
+def test_setup_setUpClass(testdir):
+ testpath = testdir.makepyfile(
+ """
+ import unittest
+ import pytest
+ class MyTestCase(unittest.TestCase):
+ x = 0
+ @classmethod
+ def setUpClass(cls):
+ cls.x += 1
+ def test_func1(self):
+ assert self.x == 1
+ def test_func2(self):
+ assert self.x == 1
+ @classmethod
+ def tearDownClass(cls):
+ cls.x -= 1
+ def test_teareddown():
+ assert MyTestCase.x == 0
+ """
+ )
+ reprec = testdir.inline_run(testpath)
+ reprec.assertoutcome(passed=3)
+
+
+def test_setup_class(testdir):
+ testpath = testdir.makepyfile(
+ """
+ import unittest
+ import pytest
+ class MyTestCase(unittest.TestCase):
+ x = 0
+ def setup_class(cls):
+ cls.x += 1
+ def test_func1(self):
+ assert self.x == 1
+ def test_func2(self):
+ assert self.x == 1
+ def teardown_class(cls):
+ cls.x -= 1
+ def test_teareddown():
+ assert MyTestCase.x == 0
+ """
+ )
+ reprec = testdir.inline_run(testpath)
+ reprec.assertoutcome(passed=3)
+
+
+@pytest.mark.parametrize("type", ["Error", "Failure"])
+def test_testcase_adderrorandfailure_defers(testdir, type):
+ testdir.makepyfile(
+ """
+ from unittest import TestCase
+ import pytest
+ class MyTestCase(TestCase):
+ def run(self, result):
+ excinfo = pytest.raises(ZeroDivisionError, lambda: 0/0)
+ try:
+ result.add%s(self, excinfo._excinfo)
+ except KeyboardInterrupt:
+ raise
+ except:
+ pytest.fail("add%s should not raise")
+ def test_hello(self):
+ pass
+ """
+ % (type, type)
+ )
+ result = testdir.runpytest()
+ assert "should not raise" not in result.stdout.str()
+
+
+@pytest.mark.parametrize("type", ["Error", "Failure"])
+def test_testcase_custom_exception_info(testdir, type):
+ testdir.makepyfile(
+ """
+ from unittest import TestCase
+ import py, pytest
+ import _pytest._code
+ class MyTestCase(TestCase):
+ def run(self, result):
+ excinfo = pytest.raises(ZeroDivisionError, lambda: 0/0)
+ # we fake an incompatible exception info
+ from _pytest.monkeypatch import MonkeyPatch
+ mp = MonkeyPatch()
+ def t(*args):
+ mp.undo()
+ raise TypeError()
+ mp.setattr(_pytest._code, 'ExceptionInfo', t)
+ try:
+ excinfo = excinfo._excinfo
+ result.add%(type)s(self, excinfo)
+ finally:
+ mp.undo()
+ def test_hello(self):
+ pass
+ """
+ % locals()
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "NOTE: Incompatible Exception Representation*",
+ "*ZeroDivisionError*",
+ "*1 failed*",
+ ]
+ )
+
+
+def test_testcase_totally_incompatible_exception_info(testdir):
+ item, = testdir.getitems(
+ """
+ from unittest import TestCase
+ class MyTestCase(TestCase):
+ def test_hello(self):
+ pass
+ """
+ )
+ item.addError(None, 42)
+ excinfo = item._excinfo.pop(0)
+ assert "ERROR: Unknown Incompatible" in str(excinfo.getrepr())
+
+
+def test_module_level_pytestmark(testdir):
+ testpath = testdir.makepyfile(
+ """
+ import unittest
+ import pytest
+ pytestmark = pytest.mark.xfail
+ class MyTestCase(unittest.TestCase):
+ def test_func1(self):
+ assert 0
+ """
+ )
+ reprec = testdir.inline_run(testpath, "-s")
+ reprec.assertoutcome(skipped=1)
+
+
+class TestTrialUnittest(object):
+
+ def setup_class(cls):
+ cls.ut = pytest.importorskip("twisted.trial.unittest")
+ # on windows trial uses a socket for a reactor and apparently doesn't close it properly
+ # https://twistedmatrix.com/trac/ticket/9227
+ cls.ignore_unclosed_socket_warning = ("-W", "always")
+
+ def test_trial_testcase_runtest_not_collected(self, testdir):
+ testdir.makepyfile(
+ """
+ from twisted.trial.unittest import TestCase
+
+ class TC(TestCase):
+ def test_hello(self):
+ pass
+ """
+ )
+ reprec = testdir.inline_run(*self.ignore_unclosed_socket_warning)
+ reprec.assertoutcome(passed=1)
+ testdir.makepyfile(
+ """
+ from twisted.trial.unittest import TestCase
+
+ class TC(TestCase):
+ def runTest(self):
+ pass
+ """
+ )
+ reprec = testdir.inline_run(*self.ignore_unclosed_socket_warning)
+ reprec.assertoutcome(passed=1)
+
+ def test_trial_exceptions_with_skips(self, testdir):
+ testdir.makepyfile(
+ """
+ from twisted.trial import unittest
+ import pytest
+ class TC(unittest.TestCase):
+ def test_hello(self):
+ pytest.skip("skip_in_method")
+ @pytest.mark.skipif("sys.version_info != 1")
+ def test_hello2(self):
+ pass
+ @pytest.mark.xfail(reason="iwanto")
+ def test_hello3(self):
+ assert 0
+ def test_hello4(self):
+ pytest.xfail("i2wanto")
+ def test_trial_skip(self):
+ pass
+ test_trial_skip.skip = "trialselfskip"
+
+ def test_trial_todo(self):
+ assert 0
+ test_trial_todo.todo = "mytodo"
+
+ def test_trial_todo_success(self):
+ pass
+ test_trial_todo_success.todo = "mytodo"
+
+ class TC2(unittest.TestCase):
+ def setup_class(cls):
+ pytest.skip("skip_in_setup_class")
+ def test_method(self):
+ pass
+ """
+ )
+ from _pytest.compat import _is_unittest_unexpected_success_a_failure
+
+ should_fail = _is_unittest_unexpected_success_a_failure()
+ result = testdir.runpytest("-rxs", *self.ignore_unclosed_socket_warning)
+ result.stdout.fnmatch_lines_random(
+ [
+ "*XFAIL*test_trial_todo*",
+ "*trialselfskip*",
+ "*skip_in_setup_class*",
+ "*iwanto*",
+ "*i2wanto*",
+ "*sys.version_info*",
+ "*skip_in_method*",
+ "*1 failed*4 skipped*3 xfailed*"
+ if should_fail
+ else "*4 skipped*3 xfail*1 xpass*",
+ ]
+ )
+ assert result.ret == (1 if should_fail else 0)
+
+ def test_trial_error(self, testdir):
+ testdir.makepyfile(
+ """
+ from twisted.trial.unittest import TestCase
+ from twisted.internet.defer import Deferred
+ from twisted.internet import reactor
+
+ class TC(TestCase):
+ def test_one(self):
+ crash
+
+ def test_two(self):
+ def f(_):
+ crash
+
+ d = Deferred()
+ d.addCallback(f)
+ reactor.callLater(0.3, d.callback, None)
+ return d
+
+ def test_three(self):
+ def f():
+ pass # will never get called
+ reactor.callLater(0.3, f)
+ # will crash at teardown
+
+ def test_four(self):
+ def f(_):
+ reactor.callLater(0.3, f)
+ crash
+
+ d = Deferred()
+ d.addCallback(f)
+ reactor.callLater(0.3, d.callback, None)
+ return d
+ # will crash both at test time and at teardown
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*ERRORS*",
+ "*DelayedCalls*",
+ "*test_four*",
+ "*NameError*crash*",
+ "*test_one*",
+ "*NameError*crash*",
+ "*test_three*",
+ "*DelayedCalls*",
+ "*test_two*",
+ "*crash*",
+ ]
+ )
+
+ def test_trial_pdb(self, testdir):
+ p = testdir.makepyfile(
+ """
+ from twisted.trial import unittest
+ import pytest
+ class TC(unittest.TestCase):
+ def test_hello(self):
+ assert 0, "hellopdb"
+ """
+ )
+ child = testdir.spawn_pytest(p)
+ child.expect("hellopdb")
+ child.sendeof()
+
+ def test_trial_testcase_skip_property(self, testdir):
+ testpath = testdir.makepyfile(
+ """
+ from twisted.trial import unittest
+ class MyTestCase(unittest.TestCase):
+ skip = 'dont run'
+ def test_func(self):
+ pass
+ """
+ )
+ reprec = testdir.inline_run(testpath, "-s")
+ reprec.assertoutcome(skipped=1)
+
+ def test_trial_testfunction_skip_property(self, testdir):
+ testpath = testdir.makepyfile(
+ """
+ from twisted.trial import unittest
+ class MyTestCase(unittest.TestCase):
+ def test_func(self):
+ pass
+ test_func.skip = 'dont run'
+ """
+ )
+ reprec = testdir.inline_run(testpath, "-s")
+ reprec.assertoutcome(skipped=1)
+
+ def test_trial_testcase_todo_property(self, testdir):
+ testpath = testdir.makepyfile(
+ """
+ from twisted.trial import unittest
+ class MyTestCase(unittest.TestCase):
+ todo = 'dont run'
+ def test_func(self):
+ assert 0
+ """
+ )
+ reprec = testdir.inline_run(testpath, "-s")
+ reprec.assertoutcome(skipped=1)
+
+ def test_trial_testfunction_todo_property(self, testdir):
+ testpath = testdir.makepyfile(
+ """
+ from twisted.trial import unittest
+ class MyTestCase(unittest.TestCase):
+ def test_func(self):
+ assert 0
+ test_func.todo = 'dont run'
+ """
+ )
+ reprec = testdir.inline_run(
+ testpath, "-s", *self.ignore_unclosed_socket_warning
+ )
+ reprec.assertoutcome(skipped=1)
+
+
+def test_djangolike_testcase(testdir):
+ # contributed from Morten Breekevold
+ testdir.makepyfile(
+ """
+ from unittest import TestCase, main
+
+ class DjangoLikeTestCase(TestCase):
+
+ def setUp(self):
+ print ("setUp()")
+
+ def test_presetup_has_been_run(self):
+ print ("test_thing()")
+ self.assertTrue(hasattr(self, 'was_presetup'))
+
+ def tearDown(self):
+ print ("tearDown()")
+
+ def __call__(self, result=None):
+ try:
+ self._pre_setup()
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except Exception:
+ import sys
+ result.addError(self, sys.exc_info())
+ return
+ super(DjangoLikeTestCase, self).__call__(result)
+ try:
+ self._post_teardown()
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except Exception:
+ import sys
+ result.addError(self, sys.exc_info())
+ return
+
+ def _pre_setup(self):
+ print ("_pre_setup()")
+ self.was_presetup = True
+
+ def _post_teardown(self):
+ print ("_post_teardown()")
+ """
+ )
+ result = testdir.runpytest("-s")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ [
+ "*_pre_setup()*",
+ "*setUp()*",
+ "*test_thing()*",
+ "*tearDown()*",
+ "*_post_teardown()*",
+ ]
+ )
+
+
+def test_unittest_not_shown_in_traceback(testdir):
+ testdir.makepyfile(
+ """
+ import unittest
+ class t(unittest.TestCase):
+ def test_hello(self):
+ x = 3
+ self.assertEqual(x, 4)
+ """
+ )
+ res = testdir.runpytest()
+ assert "failUnlessEqual" not in res.stdout.str()
+
+
+def test_unorderable_types(testdir):
+ testdir.makepyfile(
+ """
+ import unittest
+ class TestJoinEmpty(unittest.TestCase):
+ pass
+
+ def make_test():
+ class Test(unittest.TestCase):
+ pass
+ Test.__name__ = "TestFoo"
+ return Test
+ TestFoo = make_test()
+ """
+ )
+ result = testdir.runpytest()
+ assert "TypeError" not in result.stdout.str()
+ assert result.ret == EXIT_NOTESTSCOLLECTED
+
+
+def test_unittest_typerror_traceback(testdir):
+ testdir.makepyfile(
+ """
+ import unittest
+ class TestJoinEmpty(unittest.TestCase):
+ def test_hello(self, arg1):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ assert "TypeError" in result.stdout.str()
+ assert result.ret == 1
+
+
+@pytest.mark.parametrize("runner", ["pytest", "unittest"])
+def test_unittest_expected_failure_for_failing_test_is_xfail(testdir, runner):
+ script = testdir.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ @unittest.expectedFailure
+ def test_failing_test_is_xfail(self):
+ assert False
+ if __name__ == '__main__':
+ unittest.main()
+ """
+ )
+ if runner == "pytest":
+ result = testdir.runpytest("-rxX")
+ result.stdout.fnmatch_lines(
+ ["*XFAIL*MyTestCase*test_failing_test_is_xfail*", "*1 xfailed*"]
+ )
+ else:
+ result = testdir.runpython(script)
+ result.stderr.fnmatch_lines(["*1 test in*", "*OK*(expected failures=1)*"])
+ assert result.ret == 0
+
+
+@pytest.mark.parametrize("runner", ["pytest", "unittest"])
+def test_unittest_expected_failure_for_passing_test_is_fail(testdir, runner):
+ script = testdir.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ @unittest.expectedFailure
+ def test_passing_test_is_fail(self):
+ assert True
+ if __name__ == '__main__':
+ unittest.main()
+ """
+ )
+ from _pytest.compat import _is_unittest_unexpected_success_a_failure
+
+ should_fail = _is_unittest_unexpected_success_a_failure()
+ if runner == "pytest":
+ result = testdir.runpytest("-rxX")
+ result.stdout.fnmatch_lines(
+ [
+ "*MyTestCase*test_passing_test_is_fail*",
+ "*1 failed*" if should_fail else "*1 xpassed*",
+ ]
+ )
+ else:
+ result = testdir.runpython(script)
+ result.stderr.fnmatch_lines(["*1 test in*", "*(unexpected successes=1)*"])
+
+ assert result.ret == (1 if should_fail else 0)
+
+
+@pytest.mark.parametrize(
+ "fix_type, stmt", [("fixture", "return"), ("yield_fixture", "yield")]
+)
+def test_unittest_setup_interaction(testdir, fix_type, stmt):
+ testdir.makepyfile(
+ """
+ import unittest
+ import pytest
+ class MyTestCase(unittest.TestCase):
+ @pytest.{fix_type}(scope="class", autouse=True)
+ def perclass(self, request):
+ request.cls.hello = "world"
+ {stmt}
+ @pytest.{fix_type}(scope="function", autouse=True)
+ def perfunction(self, request):
+ request.instance.funcname = request.function.__name__
+ {stmt}
+
+ def test_method1(self):
+ assert self.funcname == "test_method1"
+ assert self.hello == "world"
+
+ def test_method2(self):
+ assert self.funcname == "test_method2"
+
+ def test_classattr(self):
+ assert self.__class__.hello == "world"
+ """.format(
+ fix_type=fix_type, stmt=stmt
+ )
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines("*3 passed*")
+
+
+def test_non_unittest_no_setupclass_support(testdir):
+ testpath = testdir.makepyfile(
+ """
+ class TestFoo(object):
+ x = 0
+
+ @classmethod
+ def setUpClass(cls):
+ cls.x = 1
+
+ def test_method1(self):
+ assert self.x == 0
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.x = 1
+
+ def test_not_teareddown():
+ assert TestFoo.x == 0
+
+ """
+ )
+ reprec = testdir.inline_run(testpath)
+ reprec.assertoutcome(passed=2)
+
+
+def test_no_teardown_if_setupclass_failed(testdir):
+ testpath = testdir.makepyfile(
+ """
+ import unittest
+
+ class MyTestCase(unittest.TestCase):
+ x = 0
+
+ @classmethod
+ def setUpClass(cls):
+ cls.x = 1
+ assert False
+
+ def test_func1(self):
+ cls.x = 10
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.x = 100
+
+ def test_notTornDown():
+ assert MyTestCase.x == 1
+ """
+ )
+ reprec = testdir.inline_run(testpath)
+ reprec.assertoutcome(passed=1, failed=1)
+
+
+def test_issue333_result_clearing(testdir):
+ testdir.makeconftest(
+ """
+ import pytest
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_call(item):
+ yield
+ assert 0
+ """
+ )
+ testdir.makepyfile(
+ """
+ import unittest
+ class TestIt(unittest.TestCase):
+ def test_func(self):
+ 0/0
+ """
+ )
+
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(failed=1)
+
+
+def test_unittest_raise_skip_issue748(testdir):
+ testdir.makepyfile(
+ test_foo="""
+ import unittest
+
+ class MyTestCase(unittest.TestCase):
+ def test_one(self):
+ raise unittest.SkipTest('skipping due to reasons')
+ """
+ )
+ result = testdir.runpytest("-v", "-rs")
+ result.stdout.fnmatch_lines(
+ """
+ *SKIP*[1]*test_foo.py*skipping due to reasons*
+ *1 skipped*
+ """
+ )
+
+
+def test_unittest_skip_issue1169(testdir):
+ testdir.makepyfile(
+ test_foo="""
+ import unittest
+
+ class MyTestCase(unittest.TestCase):
+ @unittest.skip("skipping due to reasons")
+ def test_skip(self):
+ self.fail()
+ """
+ )
+ result = testdir.runpytest("-v", "-rs")
+ result.stdout.fnmatch_lines(
+ """
+ *SKIP*[1]*skipping due to reasons*
+ *1 skipped*
+ """
+ )
+
+
+def test_class_method_containing_test_issue1558(testdir):
+ testdir.makepyfile(
+ test_foo="""
+ import unittest
+
+ class MyTestCase(unittest.TestCase):
+ def test_should_run(self):
+ pass
+ def test_should_not_run(self):
+ pass
+ test_should_not_run.__test__ = False
+ """
+ )
+ reprec = testdir.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+@pytest.mark.issue(3498)
+@pytest.mark.parametrize(
+ "base", ["six.moves.builtins.object", "unittest.TestCase", "unittest2.TestCase"]
+)
+def test_usefixtures_marker_on_unittest(base, testdir):
+ module = base.rsplit(".", 1)[0]
+ pytest.importorskip(module)
+ testdir.makepyfile(
+ conftest="""
+ import pytest
+
+ @pytest.fixture(scope='function')
+ def fixture1(request, monkeypatch):
+ monkeypatch.setattr(request.instance, 'fixture1', True )
+
+
+ @pytest.fixture(scope='function')
+ def fixture2(request, monkeypatch):
+ monkeypatch.setattr(request.instance, 'fixture2', True )
+
+ def node_and_marks(item):
+ print(item.nodeid)
+ for mark in item.iter_markers():
+ print(" ", mark)
+
+ @pytest.fixture(autouse=True)
+ def my_marks(request):
+ node_and_marks(request.node)
+
+ def pytest_collection_modifyitems(items):
+ for item in items:
+ node_and_marks(item)
+
+ """
+ )
+
+ testdir.makepyfile(
+ """
+ import pytest
+ import {module}
+
+ class Tests({base}):
+ fixture1 = False
+ fixture2 = False
+
+ @pytest.mark.usefixtures("fixture1")
+ def test_one(self):
+ assert self.fixture1
+ assert not self.fixture2
+
+ @pytest.mark.usefixtures("fixture1", "fixture2")
+ def test_two(self):
+ assert self.fixture1
+ assert self.fixture2
+
+
+ """.format(
+ module=module, base=base
+ )
+ )
+
+ result = testdir.runpytest("-s")
+ result.assert_outcomes(passed=2)
diff --git a/third_party/python/pytest/testing/test_warnings.py b/third_party/python/pytest/testing/test_warnings.py
new file mode 100644
index 0000000000..c5bea052a4
--- /dev/null
+++ b/third_party/python/pytest/testing/test_warnings.py
@@ -0,0 +1,290 @@
+# -*- coding: utf8 -*-
+from __future__ import unicode_literals
+
+import sys
+
+import pytest
+
+
+WARNINGS_SUMMARY_HEADER = "warnings summary"
+
+
+@pytest.fixture
+def pyfile_with_warnings(testdir, request):
+ """
+ Create a test file which calls a function in a module which generates warnings.
+ """
+ testdir.syspathinsert()
+ test_name = request.function.__name__
+ module_name = test_name.lstrip("test_") + "_module"
+ testdir.makepyfile(
+ **{
+ module_name: """
+ import warnings
+ def foo():
+ warnings.warn(UserWarning("user warning"))
+ warnings.warn(RuntimeWarning("runtime warning"))
+ return 1
+ """,
+ test_name: """
+ import {module_name}
+ def test_func():
+ assert {module_name}.foo() == 1
+ """.format(
+ module_name=module_name
+ ),
+ }
+ )
+
+
+@pytest.mark.filterwarnings("always")
+def test_normal_flow(testdir, pyfile_with_warnings):
+ """
+ Check that the warnings section is displayed, containing test node ids followed by
+ all warnings generated by that test node.
+ """
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*== %s ==*" % WARNINGS_SUMMARY_HEADER,
+ "*test_normal_flow.py::test_func",
+ "*normal_flow_module.py:3: UserWarning: user warning",
+ '* warnings.warn(UserWarning("user warning"))',
+ "*normal_flow_module.py:4: RuntimeWarning: runtime warning",
+ '* warnings.warn(RuntimeWarning("runtime warning"))',
+ "* 1 passed, 2 warnings*",
+ ]
+ )
+ assert result.stdout.str().count("test_normal_flow.py::test_func") == 1
+
+
+@pytest.mark.filterwarnings("always")
+def test_setup_teardown_warnings(testdir, pyfile_with_warnings):
+ testdir.makepyfile(
+ """
+ import warnings
+ import pytest
+
+ @pytest.fixture
+ def fix():
+ warnings.warn(UserWarning("warning during setup"))
+ yield
+ warnings.warn(UserWarning("warning during teardown"))
+
+ def test_func(fix):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*== %s ==*" % WARNINGS_SUMMARY_HEADER,
+ "*test_setup_teardown_warnings.py:6: UserWarning: warning during setup",
+ '*warnings.warn(UserWarning("warning during setup"))',
+ "*test_setup_teardown_warnings.py:8: UserWarning: warning during teardown",
+ '*warnings.warn(UserWarning("warning during teardown"))',
+ "* 1 passed, 2 warnings*",
+ ]
+ )
+
+
+@pytest.mark.parametrize("method", ["cmdline", "ini"])
+def test_as_errors(testdir, pyfile_with_warnings, method):
+ args = ("-W", "error") if method == "cmdline" else ()
+ if method == "ini":
+ testdir.makeini(
+ """
+ [pytest]
+ filterwarnings= error
+ """
+ )
+ result = testdir.runpytest(*args)
+ result.stdout.fnmatch_lines(
+ [
+ "E UserWarning: user warning",
+ "as_errors_module.py:3: UserWarning",
+ "* 1 failed in *",
+ ]
+ )
+
+
+@pytest.mark.parametrize("method", ["cmdline", "ini"])
+def test_ignore(testdir, pyfile_with_warnings, method):
+ args = ("-W", "ignore") if method == "cmdline" else ()
+ if method == "ini":
+ testdir.makeini(
+ """
+ [pytest]
+ filterwarnings= ignore
+ """
+ )
+
+ result = testdir.runpytest(*args)
+ result.stdout.fnmatch_lines(["* 1 passed in *"])
+ assert WARNINGS_SUMMARY_HEADER not in result.stdout.str()
+
+
+@pytest.mark.skipif(
+ sys.version_info < (3, 0), reason="warnings message is unicode is ok in python3"
+)
+@pytest.mark.filterwarnings("always")
+def test_unicode(testdir, pyfile_with_warnings):
+ testdir.makepyfile(
+ """
+ # -*- coding: utf8 -*-
+ import warnings
+ import pytest
+
+
+ @pytest.fixture
+ def fix():
+ warnings.warn(u"测试")
+ yield
+
+ def test_func(fix):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*== %s ==*" % WARNINGS_SUMMARY_HEADER,
+ "*test_unicode.py:8: UserWarning: \u6d4b\u8bd5*",
+ "* 1 passed, 1 warnings*",
+ ]
+ )
+
+
+@pytest.mark.skipif(
+ sys.version_info >= (3, 0),
+ reason="warnings message is broken as it is not str instance",
+)
+def test_py2_unicode(testdir, pyfile_with_warnings):
+ if (
+ getattr(sys, "pypy_version_info", ())[:2] == (5, 9)
+ and sys.platform.startswith("win")
+ ):
+ pytest.xfail("fails with unicode error on PyPy2 5.9 and Windows (#2905)")
+ testdir.makepyfile(
+ """
+ # -*- coding: utf8 -*-
+ import warnings
+ import pytest
+
+
+ @pytest.fixture
+ def fix():
+ warnings.warn(u"测试")
+ yield
+
+ @pytest.mark.filterwarnings('always')
+ def test_func(fix):
+ pass
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*== %s ==*" % WARNINGS_SUMMARY_HEADER,
+ "*test_py2_unicode.py:8: UserWarning: \\u6d4b\\u8bd5",
+ '*warnings.warn(u"\u6d4b\u8bd5")',
+ "*warnings.py:*: UnicodeWarning: Warning is using unicode non*",
+ "* 1 passed, 2 warnings*",
+ ]
+ )
+
+
+def test_py2_unicode_ascii(testdir):
+ """Ensure that our warning about 'unicode warnings containing non-ascii messages'
+ does not trigger with ascii-convertible messages"""
+ testdir.makeini("[pytest]")
+ testdir.makepyfile(
+ """
+ import pytest
+ import warnings
+
+ @pytest.mark.filterwarnings('always')
+ def test_func():
+ warnings.warn(u"hello")
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*== %s ==*" % WARNINGS_SUMMARY_HEADER,
+ '*warnings.warn(u"hello")',
+ "* 1 passed, 1 warnings in*",
+ ]
+ )
+
+
+def test_works_with_filterwarnings(testdir):
+ """Ensure our warnings capture does not mess with pre-installed filters (#2430)."""
+ testdir.makepyfile(
+ """
+ import warnings
+
+ class MyWarning(Warning):
+ pass
+
+ warnings.filterwarnings("error", category=MyWarning)
+
+ class TestWarnings(object):
+ def test_my_warning(self):
+ try:
+ warnings.warn(MyWarning("warn!"))
+ assert False
+ except MyWarning:
+ assert True
+ """
+ )
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines(["*== 1 passed in *"])
+
+
+@pytest.mark.parametrize("default_config", ["ini", "cmdline"])
+def test_filterwarnings_mark(testdir, default_config):
+ """
+ Test ``filterwarnings`` mark works and takes precedence over command line and ini options.
+ """
+ if default_config == "ini":
+ testdir.makeini(
+ """
+ [pytest]
+ filterwarnings = always
+ """
+ )
+ testdir.makepyfile(
+ """
+ import warnings
+ import pytest
+
+ @pytest.mark.filterwarnings('ignore::RuntimeWarning')
+ def test_ignore_runtime_warning():
+ warnings.warn(RuntimeWarning())
+
+ @pytest.mark.filterwarnings('error')
+ def test_warning_error():
+ warnings.warn(RuntimeWarning())
+
+ def test_show_warning():
+ warnings.warn(RuntimeWarning())
+ """
+ )
+ result = testdir.runpytest("-W always" if default_config == "cmdline" else "")
+ result.stdout.fnmatch_lines(["*= 1 failed, 2 passed, 1 warnings in *"])
+
+
+def test_non_string_warning_argument(testdir):
+ """Non-str argument passed to warning breaks pytest (#2956)"""
+ testdir.makepyfile(
+ """
+ import warnings
+ import pytest
+
+ def test():
+ warnings.warn(UserWarning(1, u'foo'))
+ """
+ )
+ result = testdir.runpytest("-W", "always")
+ result.stdout.fnmatch_lines(["*= 1 passed, 1 warnings in *"])
diff --git a/third_party/python/pytest/tox.ini b/third_party/python/pytest/tox.ini
new file mode 100644
index 0000000000..cb3846d542
--- /dev/null
+++ b/third_party/python/pytest/tox.ini
@@ -0,0 +1,212 @@
+[tox]
+minversion = 2.0
+distshare = {homedir}/.tox/distshare
+# make sure to update environment list in travis.yml and appveyor.yml
+envlist =
+ linting
+ py27
+ py34
+ py35
+ py36
+ py37
+ pypy
+ {py27,py36}-{pexpect,xdist,trial,numpy,pluggymaster}
+ py27-nobyte
+ doctesting
+ py36-freeze
+ docs
+
+[testenv]
+commands = pytest --lsof -ra {posargs:testing}
+passenv = USER USERNAME
+deps =
+ hypothesis>=3.56
+ nose
+ mock
+ requests
+
+[testenv:py27-subprocess]
+changedir = .
+deps =
+ pytest-xdist>=1.13
+ mock
+ nose
+commands =
+ pytest -n3 -ra --runpytest=subprocess {posargs:testing}
+
+
+[testenv:linting]
+skipsdist = True
+usedevelop = True
+basepython = python3.6
+deps = pre-commit
+commands = pre-commit run --all-files --show-diff-on-failure
+
+[testenv:py27-xdist]
+deps =
+ pytest-xdist>=1.13
+ mock
+ nose
+ hypothesis>=3.56
+changedir=testing
+commands =
+ pytest -n8 -ra {posargs:.}
+
+[testenv:py36-xdist]
+deps = {[testenv:py27-xdist]deps}
+commands = {[testenv:py27-xdist]commands}
+
+[testenv:py27-pexpect]
+changedir = testing
+platform = linux|darwin
+deps = pexpect
+commands =
+ pytest -ra test_pdb.py test_terminal.py test_unittest.py
+
+[testenv:py36-pexpect]
+changedir = {[testenv:py27-pexpect]changedir}
+platform = {[testenv:py27-pexpect]platform}
+deps = {[testenv:py27-pexpect]deps}
+commands = {[testenv:py27-pexpect]commands}
+
+[testenv:py27-nobyte]
+deps =
+ pytest-xdist>=1.13
+ hypothesis>=3.56
+ mock
+distribute = true
+changedir=testing
+setenv =
+ PYTHONDONTWRITEBYTECODE=1
+commands =
+ pytest -n3 -ra {posargs:.}
+
+[testenv:py27-trial]
+deps = twisted
+commands =
+ pytest -ra {posargs:testing/test_unittest.py}
+
+[testenv:py36-trial]
+deps = {[testenv:py27-trial]deps}
+commands = {[testenv:py27-trial]commands}
+
+[testenv:py27-numpy]
+deps = numpy
+commands=
+ pytest -ra {posargs:testing/python/approx.py}
+
+[testenv:py36-numpy]
+deps = {[testenv:py27-numpy]deps}
+commands = {[testenv:py27-numpy]commands}
+
+[testenv:py27-pluggymaster]
+setenv=
+ _PYTEST_SETUP_SKIP_PLUGGY_DEP=1
+deps =
+ {[testenv]deps}
+ git+https://github.com/pytest-dev/pluggy.git@master
+
+[testenv:py36-pluggymaster]
+setenv = {[testenv:py27-pluggymaster]setenv}
+deps = {[testenv:py27-pluggymaster]deps}
+
+[testenv:docs]
+skipsdist = True
+usedevelop = True
+changedir = doc/en
+deps =
+ attrs
+ more-itertools
+ PyYAML
+ sphinx
+ sphinxcontrib-trio
+
+commands =
+ sphinx-build -W -b html . _build
+
+[testenv:doctesting]
+basepython = python
+usedevelop = True
+skipsdist = True
+# ensure the given pyargs can't mean anything else
+changedir = doc/
+deps =
+ PyYAML
+commands =
+ pytest -ra en
+ pytest --doctest-modules --pyargs _pytest
+
+[testenv:regen]
+changedir = doc/en
+skipsdist = True
+basepython = python3.5
+deps =
+ sphinx
+ PyYAML
+ regendoc>=0.6.1
+whitelist_externals =
+ rm
+ make
+commands =
+ rm -rf /tmp/doc-exec*
+ make regen
+
+[testenv:fix-lint]
+skipsdist = True
+usedevelop = True
+deps =
+ autopep8
+commands =
+ autopep8 --in-place -r --max-line-length=120 --exclude=test_source_multiline_block.py _pytest testing setup.py pytest.py
+
+[testenv:jython]
+changedir = testing
+commands =
+ {envpython} {envbindir}/py.test-jython -ra {posargs}
+
+[testenv:py36-freeze]
+changedir = testing/freeze
+deps = pyinstaller
+commands =
+ {envpython} create_executable.py
+ {envpython} tox_run.py
+
+
+[testenv:coveralls]
+passenv = TRAVIS TRAVIS_JOB_ID TRAVIS_BRANCH COVERALLS_REPO_TOKEN
+usedevelop = True
+changedir = .
+deps =
+ {[testenv]deps}
+ coveralls
+commands =
+ coverage run --source=_pytest -m pytest testing
+ coverage report -m
+ coveralls
+
+[pytest]
+minversion = 2.0
+plugins = pytester
+addopts = -ra -p pytester --ignore=testing/cx_freeze
+rsyncdirs = tox.ini pytest.py _pytest testing
+python_files = test_*.py *_test.py testing/*/*.py
+python_classes = Test Acceptance
+python_functions = test
+norecursedirs = .tox ja .hg cx_freeze_source
+xfail_strict=true
+filterwarnings =
+ error
+ # produced by path.local
+ ignore:bad escape.*:DeprecationWarning:re
+ # produced by path.readlines
+ ignore:.*U.*mode is deprecated:DeprecationWarning
+ # produced by pytest-xdist
+ ignore:.*type argument to addoption.*:DeprecationWarning
+ # produced by python >=3.5 on execnet (pytest-xdist)
+ ignore:.*inspect.getargspec.*deprecated, use inspect.signature.*:DeprecationWarning
+ # ignore warning about package resolution using __spec__ or __package__
+ # should be a temporary solution, see #3061 for discussion
+ ignore:.*can't resolve package from __spec__ or __package__.*:ImportWarning
+
+[flake8]
+max-line-length = 120
diff --git a/third_party/python/python-hglib/LICENSE b/third_party/python/python-hglib/LICENSE
new file mode 100644
index 0000000000..25d01ceb87
--- /dev/null
+++ b/third_party/python/python-hglib/LICENSE
@@ -0,0 +1,20 @@
+Copyright (c) 2011 Matt Mackall and other contributors
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file
diff --git a/third_party/python/python-hglib/Makefile b/third_party/python/python-hglib/Makefile
new file mode 100644
index 0000000000..ad26093755
--- /dev/null
+++ b/third_party/python/python-hglib/Makefile
@@ -0,0 +1,17 @@
+PYTHON=python
+help:
+ @echo 'Commonly used make targets:'
+ @echo ' tests - run all tests in the automatic test suite'
+
+all: help
+
+.PHONY: tests
+
+MANIFEST.in:
+ hg manifest | sed -e 's/^/include /' > MANIFEST.in
+
+dist: MANIFEST.in
+ TAR_OPTIONS="--owner=root --group=root --mode=u+w,go-w,a+rX-s" $(PYTHON) setup.py -q sdist
+
+tests:
+ $(PYTHON) test.py --with-doctest
diff --git a/third_party/python/python-hglib/PKG-INFO b/third_party/python/python-hglib/PKG-INFO
new file mode 100644
index 0000000000..e32cabb04d
--- /dev/null
+++ b/third_party/python/python-hglib/PKG-INFO
@@ -0,0 +1,26 @@
+Metadata-Version: 1.1
+Name: python-hglib
+Version: 2.4
+Summary: Mercurial Python library
+Home-page: http://selenic.com/repo/python-hglib
+Author: Idan Kamara
+Author-email: idankk86@gmail.com
+License: MIT
+Description: python-hglib
+ ============
+
+ python-hglib is a library with a fast, convenient interface to Mercurial.
+ It uses Mercurial's command server for communication with hg.
+
+ Installation is standard:
+
+ $ python setup.py install
+
+Platform: UNKNOWN
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2.4
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
diff --git a/third_party/python/python-hglib/README b/third_party/python/python-hglib/README
new file mode 100644
index 0000000000..01c45e7a3b
--- /dev/null
+++ b/third_party/python/python-hglib/README
@@ -0,0 +1,9 @@
+python-hglib
+============
+
+python-hglib is a library with a fast, convenient interface to Mercurial.
+It uses Mercurial's command server for communication with hg.
+
+Installation is standard:
+
+ $ python setup.py install
diff --git a/third_party/python/python-hglib/examples/stats.py b/third_party/python/python-hglib/examples/stats.py
new file mode 100644
index 0000000000..f54a59236e
--- /dev/null
+++ b/third_party/python/python-hglib/examples/stats.py
@@ -0,0 +1,35 @@
+# stats - get stats on the given repo
+
+import sys
+import hglib
+
+# figure out what repo path to use
+repo = '.'
+if len(sys.argv) > 1:
+ repo = sys.argv[1]
+
+# connect to hg
+client = hglib.open(repo)
+
+# gather some stats
+revs = int(client.tip().rev)
+files = len(list(client.manifest()))
+heads = len(client.heads())
+branches = len(client.branches())
+tags = len(client.tags()) - 1 # don't count tip
+
+authors = {}
+for e in client.log():
+ authors[e.author] = True
+
+merges = 0
+for e in client.log(onlymerges=True):
+ merges += 1
+
+print "%d revisions" % revs
+print "%d merges" % merges
+print "%d files" % files
+print "%d heads" % heads
+print "%d branches" % branches
+print "%d tags" % tags
+print "%d authors" % len(authors)
diff --git a/third_party/python/python-hglib/hglib/__init__.py b/third_party/python/python-hglib/hglib/__init__.py
new file mode 100644
index 0000000000..a522d33382
--- /dev/null
+++ b/third_party/python/python-hglib/hglib/__init__.py
@@ -0,0 +1,40 @@
+import subprocess
+from hglib import client, util, error
+
+HGPATH = 'hg'
+
+def open(path=None, encoding=None, configs=None):
+ '''starts a cmdserver for the given path (or for a repository found
+ in the cwd). HGENCODING is set to the given encoding. configs is a
+ list of key, value, similar to those passed to hg --config.
+ '''
+ return client.hgclient(path, encoding, configs)
+
+def init(dest=None, ssh=None, remotecmd=None, insecure=False,
+ encoding=None, configs=None):
+ args = util.cmdbuilder('init', dest, e=ssh, remotecmd=remotecmd,
+ insecure=insecure)
+
+ args.insert(0, HGPATH)
+ proc = util.popen(args)
+ out, err = proc.communicate()
+ if proc.returncode:
+ raise error.CommandError(args, proc.returncode, out, err)
+
+ return client.hgclient(dest, encoding, configs, connect=False)
+
+def clone(source=None, dest=None, noupdate=False, updaterev=None, rev=None,
+ branch=None, pull=False, uncompressed=False, ssh=None, remotecmd=None,
+ insecure=False, encoding=None, configs=None):
+ args = util.cmdbuilder('clone', source, dest, noupdate=noupdate,
+ updaterev=updaterev, rev=rev, branch=branch,
+ pull=pull, uncompressed=uncompressed,
+ e=ssh, remotecmd=remotecmd, insecure=insecure)
+
+ args.insert(0, HGPATH)
+ proc = util.popen(args)
+ out, err = proc.communicate()
+ if proc.returncode:
+ raise error.CommandError(args, proc.returncode, out, err)
+
+ return client.hgclient(dest, encoding, configs, connect=False)
diff --git a/third_party/python/python-hglib/hglib/client.py b/third_party/python/python-hglib/hglib/client.py
new file mode 100644
index 0000000000..4eababdf40
--- /dev/null
+++ b/third_party/python/python-hglib/hglib/client.py
@@ -0,0 +1,1717 @@
+import struct, re, datetime
+import hglib
+from hglib import error, util, templates, merge, context
+
+from hglib.util import b, cmdbuilder, BytesIO, strtobytes
+
+class revision(tuple):
+ def __new__(cls, rev, node, tags, branch, author, desc, date):
+ return tuple.__new__(cls, (rev, node, tags, branch, author, desc, date))
+
+ @property
+ def rev(self):
+ return self[0]
+
+ @property
+ def node(self):
+ return self[1]
+
+ @property
+ def tags(self):
+ return self[2]
+
+ @property
+ def branch(self):
+ return self[3]
+
+ @property
+ def author(self):
+ return self[4]
+
+ @property
+ def desc(self):
+ return self[5]
+
+ @property
+ def date(self):
+ return self[6]
+
+class hgclient(object):
+ inputfmt = '>I'
+ outputfmt = '>cI'
+ outputfmtsize = struct.calcsize(outputfmt)
+ retfmt = '>i'
+
+ def __init__(self, path, encoding, configs, connect=True):
+ self._args = [hglib.HGPATH, 'serve', '--cmdserver', 'pipe',
+ '--config', 'ui.interactive=True']
+ if path:
+ self._args += ['-R', path]
+ if configs:
+ for config in configs:
+ self._args += ['--config', config]
+ self._env = {'HGPLAIN': '1'}
+ if encoding:
+ self._env['HGENCODING'] = encoding
+
+ self.server = None
+ self._version = None
+ # include the hidden changesets if True
+ self.hidden = None
+
+ self._cbout = None
+ self._cberr = None
+ self._cbprompt = None
+
+ if connect:
+ self.open()
+
+ self._protocoltracefn = None
+
+ def setcbout(self, cbout):
+ """
+ cbout is a function that will be called with the stdout data of
+ the command as it runs. Call with None to stop getting call backs.
+ """
+ self._cbout = cbout
+
+ def setcberr(self, cberr):
+ """
+ cberr is a function that will be called with the stderr data of
+ the command as it runs.Call with None to stop getting call backs.
+ """
+ self._cberr = cberr
+
+ def setcbprompt(self, cbprompt):
+ """
+ cbprompt is used to reply to prompts by the server
+ It receives the max number of bytes to return and the
+ contents of stdout received so far.
+
+ Call with None to stop getting call backs.
+
+ cbprompt is never called from merge() or import_()
+ which already handle the prompt.
+ """
+ self._cbprompt = cbprompt
+
+ def setprotocoltrace(self, tracefn=None):
+ """
+ if tracefn is None no trace calls will be made.
+ Otherwise tracefn is call as tracefn( direction, channel, data )
+ direction is 'r' for read from server and 'w' for write to server
+ channel is always None when direction is 'w'
+ and the channel-identified when the direction is 'r'
+ """
+ self._protocoltracefn = tracefn
+
+ def __enter__(self):
+ if self.server is None:
+ self.open()
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.close()
+
+ def _readhello(self):
+ """ read the hello message the server sends when started """
+ ch, msg = self._readchannel()
+ assert ch == b('o')
+
+ msg = msg.split(b('\n'))
+
+ self.capabilities = msg[0][len(b('capabilities: ')):]
+ if not self.capabilities:
+ raise error.ResponseError(
+ "bad hello message: expected 'capabilities: '"
+ ", got %r" % msg[0])
+
+ self.capabilities = set(self.capabilities.split())
+
+ # at the very least the server should be able to run commands
+ assert b('runcommand') in self.capabilities
+
+ self._encoding = msg[1][len(b('encoding: ')):]
+ if not self._encoding:
+ raise error.ResponseError("bad hello message: expected 'encoding: '"
+ ", got %r" % msg[1])
+
+ def _readchannel(self):
+ data = self.server.stdout.read(hgclient.outputfmtsize)
+ if not data:
+ raise error.ServerError()
+ channel, length = struct.unpack(hgclient.outputfmt, data)
+ if channel in b('IL'):
+ return channel, length
+ else:
+ return channel, self.server.stdout.read(length)
+
+ @staticmethod
+ def _parserevs(splitted):
+ '''splitted is a list of fields according to our rev.style, where
+ each 6 fields compose one revision.
+ '''
+ revs = []
+ for rev in util.grouper(7, splitted):
+ # truncate the timezone and convert to a local datetime
+ posixtime = float(rev[6].split(b('.'), 1)[0])
+ dt = datetime.datetime.fromtimestamp(posixtime)
+ revs.append(revision(rev[0], rev[1], rev[2], rev[3],
+ rev[4], rev[5], dt))
+ return revs
+
+ def runcommand(self, args, inchannels, outchannels):
+ def writeblock(data):
+ if self._protocoltracefn is not None:
+ self._protocoltracefn('w', None, data)
+ self.server.stdin.write(struct.pack(self.inputfmt, len(data)))
+ self.server.stdin.write(data)
+ self.server.stdin.flush()
+
+ if not self.server:
+ raise ValueError("server not connected")
+
+ self.server.stdin.write(b('runcommand\n'))
+ writeblock(b('\0').join(args))
+
+ while True:
+ channel, data = self._readchannel()
+ if self._protocoltracefn is not None:
+ self._protocoltracefn('r', channel, data)
+
+ # input channels
+ if channel in inchannels:
+ writeblock(inchannels[channel](data))
+ # output channels
+ elif channel in outchannels:
+ outchannels[channel](data)
+ # result channel, command finished
+ elif channel == b('r'):
+ return struct.unpack(hgclient.retfmt, data)[0]
+ # a channel that we don't know and can't ignore
+ elif channel.isupper():
+ raise error.ResponseError(
+ "unexpected data on required channel '%s'" % channel)
+ # optional channel
+ else:
+ pass
+
+ def rawcommand(self, args, eh=None, prompt=None, input=None):
+ """
+ args is the cmdline (usually built using util.cmdbuilder)
+
+ eh is an error handler that is passed the return code, stdout and stderr
+ If no eh is given, we raise a CommandError if ret != 0
+
+ prompt is used to reply to prompts by the server
+ It receives the max number of bytes to return and the contents of stdout
+ received so far
+
+ input is used to reply to bulk data requests by the server
+ It receives the max number of bytes to return
+ """
+ out, err = BytesIO(), BytesIO()
+ outchannels = {}
+ if self._cbout is None:
+ outchannels[b('o')] = out.write
+ else:
+ def out_handler(data):
+ out.write(data)
+ self._cbout(data)
+ outchannels[b('o')] = out_handler
+ if self._cberr is None:
+ outchannels[b('e')] = err.write
+ else:
+ def err_handler(data):
+ err.write(data)
+ self._cberr(data)
+ outchannels[b('e')] = err_handler
+
+ inchannels = {}
+ if prompt is None:
+ prompt = self._cbprompt
+ if prompt is not None:
+ def func(size):
+ reply = prompt(size, out.getvalue())
+ return reply
+ inchannels[b('L')] = func
+ if input is not None:
+ inchannels[b('I')] = input
+
+ ret = self.runcommand(args, inchannels, outchannels)
+ out, err = out.getvalue(), err.getvalue()
+
+ if ret:
+ if eh is None:
+ raise error.CommandError(args, ret, out, err)
+ else:
+ return eh(ret, out, err)
+ return out
+
+ def open(self):
+ if self.server is not None:
+ raise ValueError('server already open')
+
+ self.server = util.popen(self._args, self._env)
+ try:
+ self._readhello()
+ except error.ServerError:
+ ret, serr = self._close()
+ raise error.ServerError('server exited with status %d: %s'
+ % (ret, serr.strip()))
+ return self
+
+ def close(self):
+ """Closes the command server instance and waits for it to exit,
+ returns the exit code.
+
+ Attempting to call any function afterwards that needs to
+ communicate with the server will raise a ValueError.
+ """
+ return self._close()[0]
+
+ def _close(self):
+ _sout, serr = self.server.communicate()
+ ret = self.server.returncode
+ self.server = None
+ return ret, serr
+
+ def add(self, files=[], dryrun=False, subrepos=False, include=None,
+ exclude=None):
+ """
+ Add the specified files on the next commit.
+ If no files are given, add all files to the repository.
+
+ dryrun - do no perform actions
+ subrepos - recurse into subrepositories
+ include - include names matching the given patterns
+ exclude - exclude names matching the given patterns
+
+ Return whether all given files were added.
+ """
+ if not isinstance(files, list):
+ files = [files]
+
+ args = cmdbuilder(b('add'), n=dryrun, S=subrepos, I=include, X=exclude,
+ *files)
+
+ eh = util.reterrorhandler(args)
+ self.rawcommand(args, eh=eh)
+
+ return bool(eh)
+
+ def addremove(self, files=[], similarity=None, dryrun=False, include=None,
+ exclude=None):
+ """Add all new files and remove all missing files from the repository.
+
+ New files are ignored if they match any of the patterns in
+ ".hgignore". As with add, these changes take effect at the
+ next commit.
+
+ similarity - used to detect renamed files. With a parameter
+ greater than 0, this compares every removed file with every
+ added file and records those similar enough as renames. This
+ option takes a percentage between 0 (disabled) and 100 (files
+ must be identical) as its parameter. Detecting renamed files
+ this way can be expensive. After using this option, "hg status
+ -C" can be used to check which files were identified as moved
+ or renamed.
+
+ dryrun - do no perform actions
+ include - include names matching the given patterns
+ exclude - exclude names matching the given patterns
+
+ Return True if all files are successfully added.
+
+ """
+ if not isinstance(files, list):
+ files = [files]
+
+ args = cmdbuilder(b('addremove'), s=similarity, n=dryrun, I=include,
+ X=exclude, *files)
+
+ eh = util.reterrorhandler(args)
+ self.rawcommand(args, eh=eh)
+
+ return bool(eh)
+
+ def annotate(self, files, rev=None, nofollow=False, text=False, user=False,
+ file=False, date=False, number=False, changeset=False,
+ line=False, verbose=False, include=None, exclude=None):
+ """
+ Show changeset information by line for each file in files.
+
+ rev - annotate the specified revision
+ nofollow - don't follow copies and renames
+ text - treat all files as text
+ user - list the author (long with -v)
+ file - list the filename
+ date - list the date
+ number - list the revision number (default)
+ changeset - list the changeset
+ line - show line number at the first appearance
+ include - include names matching the given patterns
+ exclude - exclude names matching the given patterns
+
+ Yields a (info, contents) tuple for each line in a file. Info is a space
+ separated string according to the given options.
+ """
+ if not isinstance(files, list):
+ files = [files]
+
+ args = cmdbuilder(b('annotate'), r=rev, no_follow=nofollow, a=text,
+ u=user, f=file, d=date, n=number, c=changeset,
+ l=line, v=verbose, I=include, X=exclude,
+ hidden=self.hidden, *files)
+
+ out = self.rawcommand(args)
+
+ for line in out.splitlines():
+ yield tuple(line.split(b(': '), 1))
+
+ def archive(self, dest, rev=None, nodecode=False, prefix=None, type=None,
+ subrepos=False, include=None, exclude=None):
+ """Create an unversioned archive of a repository revision.
+
+ The exact name of the destination archive or directory is given using a
+ format string; see export for details.
+
+ Each member added to an archive file has a directory prefix
+ prepended. Use prefix to specify a format string for the
+ prefix. The default is the basename of the archive, with
+ suffixes removed.
+
+ dest - destination path
+ rev - revision to distribute. The revision used is the parent of the
+ working directory if one isn't given.
+
+ nodecode - do not pass files through decoders
+ prefix - directory prefix for files in archive
+ type - type of distribution to create. The archive type is automatically
+ detected based on file extension if one isn't given.
+
+ Valid types are:
+
+ "files" a directory full of files (default)
+ "tar" tar archive, uncompressed
+ "tbz2" tar archive, compressed using bzip2
+ "tgz" tar archive, compressed using gzip
+ "uzip" zip archive, uncompressed
+ "zip" zip archive, compressed using deflate
+
+ subrepos - recurse into subrepositories
+ include - include names matching the given patterns
+ exclude - exclude names matching the given patterns
+
+ """
+ args = cmdbuilder(b('archive'), dest, r=rev,
+ no_decode=nodecode, p=prefix,
+ t=type, S=subrepos, I=include, X=exclude,
+ hidden=self.hidden)
+
+ self.rawcommand(args)
+
+ def backout(self, rev, merge=False, parent=None, tool=None, message=None,
+ logfile=None, date=None, user=None):
+ """Prepare a new changeset with the effect of rev undone in the current
+ working directory.
+
+ If rev is the parent of the working directory, then this new
+ changeset is committed automatically. Otherwise, hg needs to
+ merge the changes and the merged result is left uncommitted.
+
+ rev - revision to backout
+ merge - merge with old dirstate parent after backout
+ parent - parent to choose when backing out merge
+ tool - specify merge tool
+ message - use text as commit message
+ logfile - read commit message from file
+ date - record the specified date as commit date
+ user - record the specified user as committer
+
+ """
+ if message and logfile:
+ raise ValueError("cannot specify both a message and a logfile")
+
+ args = cmdbuilder(b('backout'), r=rev, merge=merge, parent=parent,
+ t=tool, m=message, l=logfile, d=date, u=user,
+ hidden=self.hidden)
+
+ self.rawcommand(args)
+
+ def bookmark(self, name, rev=None, force=False, delete=False,
+ inactive=False, rename=None):
+ """
+ Set a bookmark on the working directory's parent revision or rev,
+ with the given name.
+
+ name - bookmark name
+ rev - revision to bookmark
+ force - bookmark even if another bookmark with the same name exists
+ delete - delete the given bookmark
+ inactive - do not mark the new bookmark active
+ rename - rename the bookmark given by rename to name
+ """
+ args = cmdbuilder(b('bookmark'), name, r=rev, f=force, d=delete,
+ i=inactive, m=rename)
+
+ self.rawcommand(args)
+
+ def bookmarks(self):
+ """
+ Return the bookmarks as a list of (name, rev, node) and the index of the
+ current one.
+
+ If there isn't a current one, -1 is returned as the index.
+ """
+ args = cmdbuilder(b('bookmarks'), hidden=self.hidden)
+ out = self.rawcommand(args)
+
+ bms = []
+ current = -1
+ if out.rstrip() != b('no bookmarks set'):
+ for line in out.splitlines():
+ iscurrent, line = line[0:3], line[3:]
+ if b('*') in iscurrent:
+ current = len(bms)
+ name, line = line.split(b(' '), 1)
+ rev, node = line.split(b(':'))
+ bms.append((name, int(rev), node))
+ return bms, current
+
+ def branch(self, name=None, clean=False, force=False):
+ """When name isn't given, return the current branch name. Otherwise
+ set the working directory branch name (the branch will not
+ exist in the repository until the next commit). Standard
+ practice recommends that primary development take place on the
+ 'default' branch.
+
+ When clean is True, reset and return the working directory
+ branch to that of the parent of the working directory,
+ negating a previous branch change.
+
+ name - new branch name
+ clean - reset branch name to parent branch name
+ force - set branch name even if it shadows an existing branch
+
+ """
+ if name and clean:
+ raise ValueError('cannot use both name and clean')
+
+ args = cmdbuilder(b('branch'), name, f=force, C=clean)
+ out = self.rawcommand(args).rstrip()
+
+ if name:
+ return name
+ elif not clean:
+ return out
+ else:
+ # len('reset working directory to branch ') == 34
+ return out[34:]
+
+ def branches(self, active=False, closed=False):
+ """
+ Returns the repository's named branches as a list of (name, rev, node).
+
+ active - show only branches that have unmerged heads
+ closed - show normal and closed branches
+ """
+ args = cmdbuilder(b('branches'), a=active, c=closed, hidden=self.hidden)
+ out = self.rawcommand(args)
+
+ branches = []
+ for line in out.rstrip().splitlines():
+ namerev, node = line.rsplit(b(':'), 1)
+ name, rev = namerev.rsplit(b(' '), 1)
+ name = name.rstrip()
+ node = node.split()[0] # get rid of ' (inactive)'
+ branches.append((name, int(rev), node))
+ return branches
+
+ def bundle(self, file, destrepo=None, rev=[], branch=[], base=[], all=False,
+ force=False, type=None, ssh=None, remotecmd=None,
+ insecure=False):
+ """Generate a compressed changegroup file collecting changesets not
+ known to be in another repository.
+
+ If destrepo isn't given, then hg assumes the destination will have all
+ the nodes you specify with base. To create a bundle containing all
+ changesets, use all (or set base to 'null').
+
+ file - destination file name
+ destrepo - repository to look for changes
+ rev - a changeset intended to be added to the destination
+ branch - a specific branch you would like to bundle
+ base - a base changeset assumed to be available at the destination
+ all - bundle all changesets in the repository
+ type - bundle compression type to use, available compression
+ methods are: none, bzip2, and gzip (default: bzip2)
+
+ force - run even when the destrepo is unrelated
+ ssh - specify ssh command to use
+ remotecmd - specify hg command to run on the remote side
+ insecure - do not verify server certificate (ignoring
+ web.cacerts config)
+
+ Return True if a bundle was created, False if no changes were found.
+
+ """
+ args = cmdbuilder(b('bundle'), file, destrepo, f=force, r=rev, b=branch,
+ base=base, a=all, t=type, e=ssh, remotecmd=remotecmd,
+ insecure=insecure, hidden=self.hidden)
+
+ eh = util.reterrorhandler(args)
+ self.rawcommand(args, eh=eh)
+
+ return bool(eh)
+
+ def cat(self, files, rev=None, output=None):
+ """Return a string containing the specified files as they were at the
+ given revision. If no revision is given, the parent of the working
+ directory is used, or tip if no revision is checked out.
+
+ If output is given, writes the contents to the specified file.
+ The name of the file is given using a format string. The
+ formatting rules are the same as for the export command, with
+ the following additions:
+
+ "%s" basename of file being printed
+ "%d" dirname of file being printed, or '.' if in repository root
+ "%p" root-relative path name of file being printed
+
+ """
+ args = cmdbuilder(b('cat'), r=rev, o=output, hidden=self.hidden, *files)
+ out = self.rawcommand(args)
+
+ if not output:
+ return out
+
+ def clone(self, source=b('.'), dest=None, branch=None, updaterev=None,
+ revrange=None):
+ """
+ Create a copy of an existing repository specified by source in a new
+ directory dest.
+
+ If dest isn't specified, it defaults to the basename of source.
+
+ branch - clone only the specified branch
+ updaterev - revision, tag or branch to check out
+ revrange - include the specified changeset
+ """
+ args = cmdbuilder(b('clone'), source, dest, b=branch,
+ u=updaterev, r=revrange)
+ self.rawcommand(args)
+
+ def init(self, dest, ssh=None, remotecmd=None, insecure=False):
+ args = util.cmdbuilder('init', dest, e=ssh, remotecmd=remotecmd,
+ insecure=insecure)
+ self.rawcommand(args)
+
+ def commit(self, message=None, logfile=None, addremove=False,
+ closebranch=False, date=None, user=None, include=None,
+ exclude=None, amend=False):
+ """
+ Commit changes reported by status into the repository.
+
+ message - the commit message
+ logfile - read commit message from file
+ addremove - mark new/missing files as added/removed before committing
+ closebranch - mark a branch as closed, hiding it from the branch list
+ date - record the specified date as commit date
+ user - record the specified user as committer
+ include - include names matching the given patterns
+ exclude - exclude names matching the given patterns
+ amend - amend the parent of the working dir
+ """
+ if amend and message is None and logfile is None:
+ # retrieve current commit message
+ message = self.log(b('.'))[0][5]
+ if message is None and logfile is None and not amend:
+ raise ValueError("must provide at least a message or a logfile")
+ elif message and logfile:
+ raise ValueError("cannot specify both a message and a logfile")
+
+ # --debug will print the committed cset
+ args = cmdbuilder(b('commit'), debug=True, m=message, A=addremove,
+ close_branch=closebranch, d=date, u=user, l=logfile,
+ I=include, X=exclude, amend=amend)
+ out = self.rawcommand(args)
+ m = re.search(b(r'^committed changeset (\d+):([0-9a-f]+)'), out,
+ re.MULTILINE)
+ if not m:
+ raise ValueError('revision and node not found in hg output: %r'
+ % out)
+ rev, node = m.groups()
+ return int(rev), node
+
+ def config(self, names=[], untrusted=False, showsource=False):
+ """Return a list of (section, key, value) config settings from all
+ hgrc files
+
+ When showsource is specified, return (source, section, key, value) where
+ source is of the form filename:[line]
+
+ """
+ def splitline(s):
+ k, value = s.rstrip().split(b('='), 1)
+ section, key = k.split(b('.'), 1)
+ return section, key, value
+
+ if not isinstance(names, list):
+ names = [names]
+
+ args = cmdbuilder(b('showconfig'), u=untrusted, debug=showsource,
+ *names)
+ out = self.rawcommand(args)
+
+ conf = []
+ if showsource:
+ out = util.skiplines(out, b('read config from: '))
+ for line in out.splitlines():
+ m = re.match(b(r"(.+?:(?:\d+:)?) (.*)"), line)
+ t = splitline(m.group(2))
+ conf.append((m.group(1)[:-1], t[0], t[1], t[2]))
+ else:
+ for line in out.splitlines():
+ conf.append(splitline(line))
+
+ return conf
+
+ @property
+ def encoding(self):
+ """
+ Return the server's encoding (as reported in the hello message).
+ """
+ if not b('getencoding') in self.capabilities:
+ raise CapabilityError('getencoding')
+
+ if not self._encoding:
+ self.server.stdin.write(b('getencoding\n'))
+ self._encoding = self._readfromchannel('r')
+
+ return self._encoding
+
+ def copy(self, source, dest, after=False, force=False, dryrun=False,
+ include=None, exclude=None):
+ """Mark dest as having copies of source files. If dest is a
+ directory, copies are put in that directory. If dest is a
+ file, then source must be a string.
+
+ Returns True on success, False if errors are encountered.
+
+ source - a file or a list of files
+ dest - a destination file or directory
+ after - record a copy that has already occurred
+ force - forcibly copy over an existing managed file
+ dryrun - do not perform actions, just print output
+ include - include names matching the given patterns
+ exclude - exclude names matching the given patterns
+
+ """
+ if not isinstance(source, list):
+ source = [source]
+
+ source.append(dest)
+ args = cmdbuilder(b('copy'), A=after, f=force, n=dryrun,
+ I=include, X=exclude, *source)
+
+ eh = util.reterrorhandler(args)
+ self.rawcommand(args, eh=eh)
+
+ return bool(eh)
+
+ def diff(self, files=[], revs=[], change=None, text=False,
+ git=False, nodates=False, showfunction=False,
+ reverse=False, ignoreallspace=False,
+ ignorespacechange=False, ignoreblanklines=False,
+ unified=None, stat=False, subrepos=False, include=None,
+ exclude=None):
+ """
+ Return differences between revisions for the specified files.
+
+ revs - a revision or a list of two revisions to diff
+ change - change made by revision
+ text - treat all files as text
+ git - use git extended diff format
+ nodates - omit dates from diff headers
+ showfunction - show which function each change is in
+ reverse - produce a diff that undoes the changes
+ ignoreallspace - ignore white space when comparing lines
+ ignorespacechange - ignore changes in the amount of white space
+ ignoreblanklines - ignore changes whose lines are all blank
+ unified - number of lines of context to show
+ stat - output diffstat-style summary of changes
+ subrepos - recurse into subrepositories
+ include - include names matching the given patterns
+ exclude - exclude names matching the given patterns
+ """
+ if change and revs:
+ raise ValueError('cannot specify both change and rev')
+
+ args = cmdbuilder(b('diff'), r=list(map(strtobytes, revs)), c=change,
+ a=text, g=git, nodates=nodates,
+ p=showfunction, reverse=reverse,
+ w=ignoreallspace, b=ignorespacechange,
+ B=ignoreblanklines, U=unified, stat=stat,
+ S=subrepos, I=include, X=exclude, hidden=self.hidden,
+ *files)
+
+ return self.rawcommand(args)
+
+ def export(self, revs, output=None, switchparent=False,
+ text=False, git=False, nodates=False):
+ """Return the header and diffs for one or more changesets. When
+ output is given, dumps to file. The name of the file is given
+ using a format string. The formatting rules are as follows:
+
+ "%%" literal "%" character
+ "%H" changeset hash (40 hexadecimal digits)
+ "%N" number of patches being generated
+ "%R" changeset revision number
+ "%b" basename of the exporting repository
+ "%h" short-form changeset hash (12 hexadecimal digits)
+ "%n" zero-padded sequence number, starting at 1
+ "%r" zero-padded changeset revision number
+
+ output - print output to file with formatted name
+ switchparent - diff against the second parent
+ rev - a revision or list of revisions to export
+ text - treat all files as text
+ git - use git extended diff format
+ nodates - omit dates from diff headers
+
+ """
+ if not isinstance(revs, list):
+ revs = [revs]
+ args = cmdbuilder(b('export'), o=output, switch_parent=switchparent,
+ a=text, g=git, nodates=nodates, hidden=self.hidden,
+ *revs)
+
+ out = self.rawcommand(args)
+
+ if output is None:
+ return out
+
+ def forget(self, files, include=None, exclude=None):
+ """Mark the specified files so they will no longer be tracked after
+ the next commit.
+
+ This only removes files from the current branch, not from the entire
+ project history, and it does not delete them from the working directory.
+
+ Returns True on success.
+
+ include - include names matching the given patterns
+ exclude - exclude names matching the given patterns
+
+ """
+ if not isinstance(files, list):
+ files = [files]
+
+ args = cmdbuilder(b('forget'), I=include, X=exclude, *files)
+
+ eh = util.reterrorhandler(args)
+ self.rawcommand(args, eh=eh)
+
+ return bool(eh)
+
+ def grep(self, pattern, files=[], all=False, text=False, follow=False,
+ ignorecase=False, fileswithmatches=False, line=False, user=False,
+ date=False, include=None, exclude=None):
+ """Search for a pattern in specified files and revisions.
+
+ This behaves differently than Unix grep. It only accepts Python/Perl
+ regexps. It searches repository history, not the working directory.
+ It always prints the revision number in which a match appears.
+
+ Yields (filename, revision, [line, [match status, [user,
+ [date, [match]]]]]) per match depending on the given options.
+
+ all - print all revisions that match
+ text - treat all files as text
+ follow - follow changeset history, or file history across
+ copies and renames
+ ignorecase - ignore case when matching
+ fileswithmatches - return only filenames and revisions that match
+ line - return line numbers in the result tuple
+ user - return the author in the result tuple
+ date - return the date in the result tuple
+ include - include names matching the given patterns
+ exclude - exclude names matching the given patterns
+
+ """
+ if not isinstance(files, list):
+ files = [files]
+
+ args = cmdbuilder(b('grep'), all=all, a=text, f=follow, i=ignorecase,
+ l=fileswithmatches, n=line, u=user, d=date,
+ I=include, X=exclude, hidden=self.hidden,
+ *[pattern] + files)
+ args.append(b('-0'))
+
+ def eh(ret, out, err):
+ if ret != 1:
+ raise error.CommandError(args, ret, out, err)
+ return b('')
+
+ out = self.rawcommand(args, eh=eh).split(b('\0'))
+
+ fieldcount = 3
+ if user:
+ fieldcount += 1
+ if date:
+ fieldcount += 1
+ if line:
+ fieldcount += 1
+ if all:
+ fieldcount += 1
+ if fileswithmatches:
+ fieldcount -= 1
+
+ return util.grouper(fieldcount, out)
+
+ def heads(self, rev=[], startrev=[], topological=False, closed=False):
+ """Return a list of current repository heads or branch heads.
+
+ rev - return only branch heads on the branches associated with
+ the specified changesets.
+
+ startrev - return only heads which are descendants of the given revs.
+ topological - named branch mechanics will be ignored and only changesets
+ without children will be shown.
+
+ closed - normal and closed branch heads.
+
+ """
+ if not isinstance(rev, list):
+ rev = [rev]
+
+ args = cmdbuilder(b('heads'), r=startrev, t=topological, c=closed,
+ template=templates.changeset, hidden=self.hidden,
+ *rev)
+
+ def eh(ret, out, err):
+ if ret != 1:
+ raise error.CommandError(args, ret, out, err)
+ return b('')
+
+ out = self.rawcommand(args, eh=eh).split(b('\0'))[:-1]
+ return self._parserevs(out)
+
+ def identify(self, rev=None, source=None, num=False, id=False, branch=False,
+ tags=False, bookmarks=False):
+ """Return a summary string identifying the repository state at rev
+ using one or two parent hash identifiers, followed by a "+" if
+ the working directory has uncommitted changes, the branch name
+ (if not default), a list of tags, and a list of bookmarks.
+
+ When rev is not given, return a summary string of the current
+ state of the repository.
+
+ Specifying source as a repository root or Mercurial bundle will cause
+ lookup to operate on that repository/bundle.
+
+ num - show local revision number
+ id - show global revision id
+ branch - show branch
+ tags - show tags
+ bookmarks - show bookmarks
+
+ """
+ args = cmdbuilder(b('identify'), source, r=rev, n=num, i=id,
+ b=branch, t=tags, B=bookmarks,
+ hidden=self.hidden)
+
+ return self.rawcommand(args)
+
+ def import_(self, patches, strip=None, force=False, nocommit=False,
+ bypass=False, exact=False, importbranch=False, message=None,
+ date=None, user=None, similarity=None):
+ """Import the specified patches which can be a list of file names or a
+ file-like object and commit them individually (unless nocommit is
+ specified).
+
+ strip - directory strip option for patch. This has the same
+ meaning as the corresponding patch option (default: 1)
+
+ force - skip check for outstanding uncommitted changes
+ nocommit - don't commit, just update the working directory
+ bypass - apply patch without touching the working directory
+ exact - apply patch to the nodes from which it was generated
+ importbranch - use any branch information in patch (implied by exact)
+ message - the commit message
+ date - record the specified date as commit date
+ user - record the specified user as committer
+ similarity - guess renamed files by similarity (0<=s<=100)
+
+ """
+ if hasattr(patches, 'read') and hasattr(patches, 'readline'):
+ patch = patches
+
+ def readline(size, output):
+ return patch.readline(size)
+
+ stdin = True
+ patches = ()
+ prompt = readline
+ input = patch.read
+ else:
+ stdin = False
+ prompt = None
+ input = None
+
+ args = cmdbuilder(b('import'), strip=strip, force=force,
+ no_commit=nocommit, bypass=bypass, exact=exact,
+ import_branch=importbranch, message=message,
+ date=date, user=user, similarity=similarity, _=stdin,
+ *patches)
+
+ self.rawcommand(args, prompt=prompt, input=input)
+
+ def incoming(self, revrange=None, path=None, force=False, newest=False,
+ bundle=None, bookmarks=False, branch=None, limit=None,
+ nomerges=False, subrepos=False):
+ """Return new changesets found in the specified path or the default pull
+ location.
+
+ When bookmarks=True, return a list of (name, node) of incoming
+ bookmarks.
+
+ revrange - a remote changeset or list of changesets intended to be added
+ force - run even if remote repository is unrelated
+ newest - show newest record first
+ bundle - avoid downloading the changesets twice and store the
+ bundles into the specified file.
+
+ bookmarks - compare bookmarks (this changes the return value)
+ branch - a specific branch you would like to pull
+ limit - limit number of changes returned
+ nomerges - do not show merges
+ ssh - specify ssh command to use
+ remotecmd - specify hg command to run on the remote side
+ insecure- do not verify server certificate (ignoring web.cacerts config)
+ subrepos - recurse into subrepositories
+
+ """
+ args = cmdbuilder(b('incoming'), path,
+ template=templates.changeset, r=revrange,
+ f=force, n=newest, bundle=bundle,
+ B=bookmarks, b=branch, l=limit, M=nomerges,
+ S=subrepos)
+
+ def eh(ret, out, err):
+ if ret != 1:
+ raise error.CommandError(args, ret, out, err)
+
+ out = self.rawcommand(args, eh=eh)
+ if not out:
+ return []
+
+ out = util.eatlines(out, 2)
+ if bookmarks:
+ bms = []
+ for line in out.splitlines():
+ bms.append(tuple(line.split()))
+ return bms
+ else:
+ out = out.split(b('\0'))[:-1]
+ return self._parserevs(out)
+
+ def log(self, revrange=None, files=[], follow=False,
+ followfirst=False, date=None, copies=False, keyword=None,
+ removed=False, onlymerges=False, user=None, branch=None,
+ prune=None, hidden=None, limit=None, nomerges=False,
+ include=None, exclude=None):
+ """Return the revision history of the specified files or the entire
+ project.
+
+ File history is shown without following rename or copy history of files.
+ Use follow with a filename to follow history across renames and copies.
+ follow without a filename will only show ancestors or descendants of the
+ starting revision. followfirst only follows the first parent of merge
+ revisions.
+
+ If revrange isn't specified, the default is "tip:0" unless
+ follow is set, in which case the working directory parent is
+ used as the starting revision.
+
+ The returned changeset is a named tuple with the following
+ string fields:
+
+ - rev
+ - node
+ - tags (space delimited)
+ - branch
+ - author
+ - desc
+
+ follow - follow changeset history, or file history across
+ copies and renames
+ followfirst - only follow the first parent of merge changesets
+ date - show revisions matching date spec
+ copies - show copied files
+ keyword - do case-insensitive search for a given text
+ removed - include revisions where files were removed
+ onlymerges - show only merges
+ user - revisions committed by user
+ branch - show changesets within the given named branch
+ prune - do not display revision or any of its ancestors
+ hidden - show hidden changesets
+ limit - limit number of changes displayed
+ nomerges - do not show merges
+ include - include names matching the given patterns
+ exclude - exclude names matching the given patterns
+
+ """
+ if hidden is None:
+ hidden = self.hidden
+ args = cmdbuilder(b('log'), template=templates.changeset,
+ r=revrange, f=follow, follow_first=followfirst,
+ d=date, C=copies, k=keyword, removed=removed,
+ m=onlymerges, u=user, b=branch, P=prune,
+ l=limit, M=nomerges, I=include, X=exclude,
+ hidden=hidden, *files)
+
+ out = self.rawcommand(args)
+ out = out.split(b('\0'))[:-1]
+
+ return self._parserevs(out)
+
+ def manifest(self, rev=None, all=False):
+ """Yields (nodeid, permission, executable, symlink, file path) tuples
+ for version controlled files for the given revision. If no
+ revision is given, the first parent of the working directory
+ is used, or the null revision if no revision is checked out.
+
+ When all is True, all files from all revisions are yielded
+ (just the name). This includes deleted and renamed files.
+
+ """
+ args = cmdbuilder(b('manifest'), r=rev, all=all, debug=True,
+ hidden=self.hidden)
+
+ out = self.rawcommand(args)
+
+ if all:
+ for line in out.splitlines():
+ yield line
+ else:
+ for line in out.splitlines():
+ node = line[0:40]
+ perm = line[41:44]
+ symlink = line[45:46] == b('@')
+ executable = line[45:46] == b('*')
+ yield node, perm, executable, symlink, line[47:]
+
+ def merge(self, rev=None, force=False, tool=None, cb=merge.handlers.abort):
+ """Merge working directory with rev. If no revision is specified, the
+ working directory's parent is a head revision, and the current
+ branch contains exactly one other head, the other head is
+ merged with by default.
+
+ The current working directory is updated with all changes made in the
+ requested revision since the last common predecessor revision.
+
+ Files that changed between either parent are marked as changed for the
+ next commit and a commit must be performed before any further updates to
+ the repository are allowed. The next commit will have two parents.
+
+ force - force a merge with outstanding changes
+ tool - can be used to specify the merge tool used for file merges. It
+ overrides the HGMERGE environment variable and your configuration files.
+
+ cb - controls the behaviour when Mercurial prompts what to do
+ with regard to a specific file, e.g. when one parent modified
+ a file and the other removed it. It can be one of
+ merge.handlers, or a function that gets a single argument
+ which are the contents of stdout. It should return one of the
+ expected choices (a single character).
+
+ """
+ # we can't really use --preview since merge doesn't support --template
+ args = cmdbuilder(b('merge'), r=rev, f=force, t=tool)
+
+ prompt = None
+ if cb is merge.handlers.abort:
+ prompt = cb
+ elif cb is merge.handlers.noninteractive:
+ args.append(b('-y'))
+ else:
+ prompt = lambda size, output: cb(output) + b('\n')
+
+ self.rawcommand(args, prompt=prompt)
+
+ def move(self, source, dest, after=False, force=False, dryrun=False,
+ include=None, exclude=None):
+ """Mark dest as copies of source; mark source for deletion. If dest
+ is a directory, copies are put in that directory. If dest is a
+ file, then source must be a string.
+
+ Returns True on success, False if errors are encountered.
+
+ source - a file or a list of files
+ dest - a destination file or directory
+ after - record a rename that has already occurred
+ force - forcibly copy over an existing managed file
+ dryrun - do not perform actions, just print output
+ include - include names matching the given patterns
+ exclude - exclude names matching the given patterns
+
+ """
+ if not isinstance(source, list):
+ source = [source]
+
+ source.append(dest)
+ args = cmdbuilder(b('move'), A=after, f=force, n=dryrun,
+ I=include, X=exclude, *source)
+
+ eh = util.reterrorhandler(args)
+ self.rawcommand(args, eh=eh)
+
+ return bool(eh)
+
+ def outgoing(self, revrange=None, path=None, force=False, newest=False,
+ bookmarks=False, branch=None, limit=None, nomerges=False,
+ subrepos=False):
+ """Return changesets not found in the specified path or the default push
+ location.
+
+ When bookmarks=True, return a list of (name, node) of
+ bookmarks that will be pushed.
+
+ revrange - a (list of) changeset intended to be included in
+ the destination force - run even when the destination is
+ unrelated newest - show newest record first branch - a
+ specific branch you would like to push limit - limit number of
+ changes displayed nomerges - do not show merges ssh - specify
+ ssh command to use remotecmd - specify hg command to run on
+ the remote side insecure - do not verify server certificate
+ (ignoring web.cacerts config) subrepos - recurse into
+ subrepositories
+
+ """
+ args = cmdbuilder(b('outgoing'),
+ path,
+ template=templates.changeset, r=revrange,
+ f=force, n=newest, B=bookmarks,
+ b=branch, S=subrepos)
+
+ def eh(ret, out, err):
+ if ret != 1:
+ raise error.CommandError(args, ret, out, err)
+
+ out = self.rawcommand(args, eh=eh)
+ if not out:
+ return []
+
+ out = util.eatlines(out, 2)
+ if bookmarks:
+ bms = []
+ for line in out.splitlines():
+ bms.append(tuple(line.split()))
+ return bms
+ else:
+ out = out.split(b('\0'))[:-1]
+ return self._parserevs(out)
+
+ def parents(self, rev=None, file=None):
+ """Return the working directory's parent revisions. If rev is given,
+ the parent of that revision will be printed. If file is given,
+ the revision in which the file was last changed (before the
+ working directory revision or the revision specified by rev)
+ is returned.
+
+ """
+ args = cmdbuilder(b('parents'), file, template=templates.changeset,
+ r=rev, hidden=self.hidden)
+
+ out = self.rawcommand(args)
+ if not out:
+ return
+
+ out = out.split(b('\0'))[:-1]
+
+ return self._parserevs(out)
+
+ def paths(self, name=None):
+ """
+ Return the definition of given symbolic path name. If no name is given,
+ return a dictionary of pathname : url of all available names.
+
+ Path names are defined in the [paths] section of your configuration file
+ and in "/etc/mercurial/hgrc". If run inside a repository, ".hg/hgrc" is
+ used, too.
+ """
+ if not name:
+ out = self.rawcommand([b('paths')])
+ if not out:
+ return {}
+
+ return dict([s.split(b(' = '))
+ for s in out.rstrip().split(b('\n'))])
+ else:
+ args = cmdbuilder(b('paths'), name)
+ out = self.rawcommand(args)
+ return out.rstrip()
+
+ def pull(self, source=None, rev=None, update=False, force=False,
+ bookmark=None, branch=None, ssh=None, remotecmd=None,
+ insecure=False, tool=None):
+ """Pull changes from a remote repository.
+
+ This finds all changes from the repository specified by source
+ and adds them to this repository. If source is omitted, the
+ 'default' path will be used. By default, this does not update
+ the copy of the project in the working directory.
+
+ Returns True on success, False if update was given and there were
+ unresolved files.
+
+ update - update to new branch head if changesets were pulled
+ force - run even when remote repository is unrelated
+ rev - a (list of) remote changeset intended to be added
+ bookmark - (list of) bookmark to pull
+ branch - a (list of) specific branch you would like to pull
+ ssh - specify ssh command to use
+ remotecmd - specify hg command to run on the remote side
+ insecure - do not verify server certificate (ignoring
+ web.cacerts config)
+ tool - specify merge tool for rebase
+
+ """
+ args = cmdbuilder(b('pull'), source, r=rev, u=update, f=force,
+ B=bookmark, b=branch, e=ssh,
+ remotecmd=remotecmd, insecure=insecure,
+ t=tool)
+
+ eh = util.reterrorhandler(args)
+ self.rawcommand(args, eh=eh)
+
+ return bool(eh)
+
+ def push(self, dest=None, rev=None, force=False, bookmark=None, branch=None,
+ newbranch=False, ssh=None, remotecmd=None, insecure=False):
+ """Push changesets from this repository to the specified destination.
+
+ This operation is symmetrical to pull: it is identical to a pull in the
+ destination repository from the current one.
+
+ Returns True if push was successful, False if nothing to push.
+
+ rev - the (list of) specified revision and all its ancestors
+ will be pushed to the remote repository.
+
+ force - override the default behavior and push all changesets on all
+ branches.
+
+ bookmark - (list of) bookmark to push
+ branch - a (list of) specific branch you would like to push
+ newbranch - allows push to create a new named branch that is
+ not present at the destination. This allows you to only create
+ a new branch without forcing other changes.
+
+ ssh - specify ssh command to use
+ remotecmd - specify hg command to run on the remote side
+ insecure - do not verify server certificate (ignoring
+ web.cacerts config)
+
+ """
+ args = cmdbuilder(b('push'), dest, r=rev, f=force, B=bookmark, b=branch,
+ new_branch=newbranch, e=ssh, remotecmd=remotecmd,
+ insecure=insecure)
+
+ eh = util.reterrorhandler(args)
+ self.rawcommand(args, eh=eh)
+
+ return bool(eh)
+
+ def remove(self, files, after=False, force=False, include=None,
+ exclude=None):
+ """Schedule the indicated files for removal from the repository. This
+ only removes files from the current branch, not from the
+ entire project history.
+
+ Returns True on success, False if any warnings encountered.
+
+ after - used to remove only files that have already been deleted
+ force - remove (and delete) file even if added or modified
+ include - include names matching the given patterns
+ exclude - exclude names matching the given patterns
+
+ """
+ if not isinstance(files, list):
+ files = [files]
+
+ args = cmdbuilder(b('remove'), A=after, f=force, I=include, X=exclude,
+ *files)
+
+ eh = util.reterrorhandler(args)
+ self.rawcommand(args, eh=eh)
+
+ return bool(eh)
+
+ def resolve(self, file=[], all=False, listfiles=False, mark=False,
+ unmark=False, tool=None, include=None, exclude=None):
+ """
+ Redo merges or set/view the merge status of given files.
+
+ Returns True on success, False if any files fail a resolve attempt.
+
+ When listfiles is True, returns a list of (code, file path) of resolved
+ and unresolved files. Code will be 'R' or 'U' accordingly.
+
+ all - select all unresolved files
+ mark - mark files as resolved
+ unmark - mark files as unresolved
+ tool - specify merge tool
+ include - include names matching the given patterns
+ exclude - exclude names matching the given patterns
+ """
+ if not isinstance(file, list):
+ file = [file]
+
+ args = cmdbuilder(b('resolve'), a=all, l=listfiles, m=mark, u=unmark,
+ t=tool, I=include, X=exclude, *file)
+
+ out = self.rawcommand(args)
+
+ if listfiles:
+ l = []
+ for line in out.splitlines():
+ l.append(tuple(line.split(b(' '), 1)))
+ return l
+
+ def revert(self, files, rev=None, all=False, date=None, nobackup=False,
+ dryrun=False, include=None, exclude=None):
+ """With no revision specified, revert the specified files or
+ directories to the contents they had in the parent of the
+ working directory. This restores the contents of files to an
+ unmodified state and unschedules adds, removes, copies, and
+ renames. If the working directory has two parents, you must
+ explicitly specify a revision.
+
+ Specifying rev or date will revert the given files or
+ directories to their states as of a specific revision. Because
+ revert does not change the working directory parents, this
+ will cause these files to appear modified. This can be helpful
+ to "back out" some or all of an earlier change.
+
+ Modified files are saved with a .orig suffix before reverting.
+ To disable these backups, use nobackup.
+
+ Returns True on success.
+
+ all - revert all changes when no arguments given
+ date - tipmost revision matching date
+ rev - revert to the specified revision
+ nobackup - do not save backup copies of files
+ include - include names matching the given patterns
+ exclude - exclude names matching the given patterns
+ dryrun - do not perform actions, just print output
+
+ """
+ if not isinstance(files, list):
+ files = [files]
+
+ args = cmdbuilder(b('revert'), r=rev, a=all, d=date,
+ no_backup=nobackup, n=dryrun, I=include, X=exclude,
+ hidden=self.hidden, *files)
+
+ eh = util.reterrorhandler(args)
+ self.rawcommand(args, eh=eh)
+
+ return bool(eh)
+
+ def root(self):
+ """
+ Return the root directory of the current repository.
+ """
+ return self.rawcommand([b('root')]).rstrip()
+
+ def status(self, rev=None, change=None, all=False, modified=False,
+ added=False, removed=False, deleted=False, clean=False,
+ unknown=False, ignored=False, copies=False,
+ subrepos=False, include=None, exclude=None):
+ """
+ Return status of files in the repository as a list of (code, file path)
+ where code can be:
+
+ M = modified
+ A = added
+ R = removed
+ C = clean
+ ! = missing (deleted by non-hg command, but still tracked)
+ ? = untracked
+ I = ignored
+ = origin of the previous file listed as A (added)
+
+ rev - show difference from (list of) revision
+ change - list the changed files of a revision
+ all - show status of all files
+ modified - show only modified files
+ added - show only added files
+ removed - show only removed files
+ deleted - show only deleted (but tracked) files
+ clean - show only files without changes
+ unknown - show only unknown (not tracked) files
+ ignored - show only ignored files
+ copies - show source of copied files
+ subrepos - recurse into subrepositories
+ include - include names matching the given patterns
+ exclude - exclude names matching the given patterns
+ """
+ if rev and change:
+ raise ValueError('cannot specify both rev and change')
+
+ args = cmdbuilder(b('status'), rev=rev, change=change, A=all,
+ m=modified, a=added, r=removed, d=deleted, c=clean,
+ u=unknown, i=ignored, C=copies, S=subrepos, I=include,
+ X=exclude, hidden=self.hidden)
+
+ args.append(b('-0'))
+
+ out = self.rawcommand(args)
+ l = []
+
+ for entry in out.split(b('\0')):
+ if entry:
+ if entry[0:1] == b(' '):
+ l.append((b(' '), entry[2:]))
+ else:
+ l.append(tuple(entry.split(b(' '), 1)))
+
+ return l
+
+ def tag(self, names, rev=None, message=None, force=False, local=False,
+ remove=False, date=None, user=None):
+ """Add one or more tags specified by names for the current or given
+ revision.
+
+ Changing an existing tag is normally disallowed; use force to override.
+
+ Tag commits are usually made at the head of a branch. If the
+ parent of the working directory is not a branch head, a
+ CommandError will be raised. force can be specified to force
+ the tag commit to be based on a non-head changeset.
+
+ local - make the tag local
+ rev - revision to tag
+ remove - remove a tag
+ message - set commit message
+ date - record the specified date as commit date
+ user - record the specified user as committer
+
+ """
+ if not isinstance(names, list):
+ names = [names]
+
+ args = cmdbuilder(b('tag'), r=rev, m=message, f=force, l=local,
+ remove=remove, d=date, u=user, hidden=self.hidden,
+ *names)
+
+ self.rawcommand(args)
+
+ def tags(self):
+ """
+ Return a list of repository tags as: (name, rev, node, islocal)
+ """
+ args = cmdbuilder(b('tags'), v=True)
+
+ out = self.rawcommand(args)
+
+ t = []
+ for line in out.splitlines():
+ taglocal = line.endswith(b(' local'))
+ if taglocal:
+ line = line[:-6]
+ name, rev = line.rsplit(b(' '), 1)
+ rev, node = rev.split(b(':'))
+ t.append((name.rstrip(), int(rev), node, taglocal))
+ return t
+
+ def phase(self, revs=(), secret=False, draft=False, public=False,
+ force=False):
+ '''Set or show the current phase name.
+
+ revs - target revision(s)
+ public - set changeset phase to public
+ draft - set changeset phase to draft
+ secret - set changeset phase to secret
+ force - allow to move boundary backward
+
+ output format: [(id, phase) ...] for each changeset
+
+ The arguments match the mercurial API.
+ '''
+ if not isinstance(revs, (list, tuple)):
+ revs = [revs]
+ args = util.cmdbuilder(b('phase'), secret=secret, draft=draft,
+ public=public, force=force,
+ hidden=self.hidden, *revs)
+ out = self.rawcommand(args)
+ if draft or public or secret:
+ return
+ else:
+ output = [i.split(b(': '))for i in out.strip().split(b('\n'))]
+ return [(int(num), phase) for (num, phase) in output]
+
+ def summary(self, remote=False):
+ """
+ Return a dictionary with a brief summary of the working directory state,
+ including parents, branch, commit status, and available updates.
+
+ 'parent' : a list of (rev, node, tags, message)
+ 'branch' : the current branch
+ 'commit' : True if the working directory is clean, False otherwise
+ 'update' : number of available updates,
+ ['remote' : (in, in bookmarks, out, out bookmarks),]
+ ['mq': (applied, unapplied) mq patches,]
+
+ unparsed entries will be of them form key : value
+ """
+ args = cmdbuilder(b('summary'), remote=remote, hidden=self.hidden)
+
+ out = self.rawcommand(args).splitlines()
+
+ d = {}
+ while out:
+ line = out.pop(0)
+ name, value = line.split(b(': '), 1)
+
+ if name == b('parent'):
+ parent, tags = value.split(b(' '), 1)
+ rev, node = parent.split(b(':'))
+
+ if tags:
+ tags = tags.replace(b(' (empty repository)'), b(''))
+ else:
+ tags = None
+
+ value = d.get(name, [])
+
+ if rev == b('-1'):
+ value.append((int(rev), node, tags, None))
+ else:
+ message = out.pop(0)[1:]
+ value.append((int(rev), node, tags, message))
+ elif name == b('branch'):
+ pass
+ elif name == b('commit'):
+ value = value == b('(clean)')
+ elif name == b('update'):
+ if value == b('(current)'):
+ value = 0
+ else:
+ value = int(value.split(b(' '), 1)[0])
+ elif remote and name == b('remote'):
+ if value == b('(synced)'):
+ value = 0, 0, 0, 0
+ else:
+ inc = incb = out_ = outb = 0
+
+ for v in value.split(b(', ')):
+ count, v = v.split(b(' '), 1)
+ if v == b('outgoing'):
+ out_ = int(count)
+ elif v.endswith(b('incoming')):
+ inc = int(count)
+ elif v == b('incoming bookmarks'):
+ incb = int(count)
+ elif v == b('outgoing bookmarks'):
+ outb = int(count)
+
+ value = inc, incb, out_, outb
+ elif name == b('mq'):
+ applied = unapplied = 0
+ for v in value.split(b(', ')):
+ count, v = v.split(b(' '), 1)
+ if v == b('applied'):
+ applied = int(count)
+ elif v == b('unapplied'):
+ unapplied = int(count)
+ value = applied, unapplied
+
+ d[name] = value
+
+ return d
+
+ def tip(self):
+ """
+ Return the tip revision (usually just called the tip) which is the
+ changeset most recently added to the repository (and therefore the most
+ recently changed head).
+ """
+ args = cmdbuilder(b('tip'), template=templates.changeset,
+ hidden=self.hidden)
+ out = self.rawcommand(args)
+ out = out.split(b('\0'))
+
+ return self._parserevs(out)[0]
+
+ def update(self, rev=None, clean=False, check=False, date=None):
+ """
+ Update the repository's working directory to changeset specified by rev.
+ If rev isn't specified, update to the tip of the current named branch.
+
+ Return the number of files (updated, merged, removed, unresolved)
+
+ clean - discard uncommitted changes (no backup)
+ check - update across branches if no uncommitted changes
+ date - tipmost revision matching date
+ """
+ if clean and check:
+ raise ValueError('clean and check cannot both be True')
+
+ args = cmdbuilder(b('update'), r=rev, C=clean, c=check, d=date,
+ hidden=self.hidden)
+
+ def eh(ret, out, err):
+ if ret == 1:
+ return out
+
+ raise error.CommandError(args, ret, out, err)
+
+
+ out = self.rawcommand(args, eh=eh)
+
+ m = re.search(b(r'^(\d+).+, (\d+).+, (\d+).+, (\d+)'), out,
+ re.MULTILINE)
+ return tuple(map(int, list(m.groups())))
+
+ @property
+ def version(self):
+ """Return hg version that runs the command server as a 4 fielded
+ tuple: major, minor, micro and local build info. e.g. (1, 9,
+ 1, '+4-3095db9f5c2c')
+ """
+ if self._version is None:
+ v = self.rawcommand(cmdbuilder(b('version'), q=True))
+ v = list(re.match(b(r'.*?(\d+)\.(\d+)\.?(\d+)?(\+[0-9a-f-]+)?'),
+ v).groups())
+
+ for i in range(3):
+ try:
+ v[i] = int(v[i])
+ except TypeError:
+ v[i] = 0
+
+ self._version = tuple(v)
+
+ return self._version
+
+ def __getitem__(self, changeid):
+ try:
+ return context.changectx(self, changeid)
+ except ValueError as e:
+ raise KeyError(*e.args)
+
+ def __contains__(self, changeid):
+ """
+ check if changeid, which can be either a local revision number or a
+ changeset id, matches a changeset in the client.
+ """
+ try:
+ context.changectx(self, changeid)
+ return True
+ except ValueError:
+ return False
diff --git a/third_party/python/python-hglib/hglib/context.py b/third_party/python/python-hglib/hglib/context.py
new file mode 100644
index 0000000000..3ba9abb890
--- /dev/null
+++ b/third_party/python/python-hglib/hglib/context.py
@@ -0,0 +1,238 @@
+import hglib.client # Circular dependency.
+from hglib import util, templates
+from hglib.error import CommandError
+from hglib.util import b, strtobytes, integertypes
+
+_nullcset = [b('-1'), b('0000000000000000000000000000000000000000'), b(''),
+ b(''), b(''), b(''), b('')]
+
+class changectx(object):
+ """A changecontext object makes access to data related to a particular
+ changeset convenient."""
+ def __init__(self, repo, changeid=b('')):
+ """changeid is a revision number, node, or tag"""
+ if changeid == b(''):
+ changeid = b('.')
+ self._repo = repo
+ if isinstance(changeid, hglib.client.revision):
+ cset = changeid
+ elif changeid == -1:
+ cset = _nullcset
+ else:
+ if isinstance(changeid, integertypes):
+ changeid = b('rev(') + strtobytes(changeid) + b(')')
+
+ notfound = False
+ try:
+ cset = self._repo.log(changeid)
+ # hg bbf4f3dfd700 gave a null result for tip+1
+ if (cset and cset[0][1] == _nullcset[1]
+ and cset[0][0] != _nullcset[0]):
+ notfound = True
+ except CommandError:
+ notfound = True
+
+ if notfound or not len(cset):
+ raise ValueError('changeid %r not found in repo' % changeid)
+ if len(cset) > 1:
+ raise ValueError('changeid must yield a single changeset')
+ cset = cset[0]
+
+ self._rev, self._node, self._tags = cset[:3]
+ self._branch, self._author, self._description, self._date = cset[3:]
+
+ self._rev = int(self._rev)
+
+ self._tags = self._tags.split()
+ try:
+ self._tags.remove(b('tip'))
+ except ValueError:
+ pass
+
+ self._ignored = None
+ self._clean = None
+
+ def __str__(self):
+ return self._node[:12].decode('latin-1')
+
+ def __int__(self):
+ return self._rev
+
+ def __repr__(self):
+ return "<changectx %s>" % str(self)
+
+ def __hash__(self):
+ try:
+ return hash(self._rev)
+ except AttributeError:
+ return id(self)
+
+ def __eq__(self, other):
+ try:
+ return self._rev == other._rev
+ except AttributeError:
+ return False
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __nonzero__(self):
+ return self._rev != -1
+
+ def __bool__(self):
+ return self.__nonzero__()
+
+ def __contains__(self, key):
+ return key in self._manifest
+
+ def __iter__(self):
+ for f in sorted(self._manifest):
+ yield f
+
+ @util.propertycache
+ def _status(self):
+ return self._parsestatus(self._repo.status(change=strtobytes(self)))[:4]
+
+ def _parsestatus(self, stat):
+ d = dict((c, [])
+ for c in (b('M'), b('A'), b('R'), b('!'), b('?'), b('I'),
+ b('C'), b(' ')))
+ for k, path in stat:
+ d[k].append(path)
+ return (d[b('M')], d[b('A')], d[b('R')], d[b('!')], d[b('?')],
+ d[b('I')], d[b('C')])
+
+ def status(self, ignored=False, clean=False):
+ """Explicit status query
+ Unless this method is used to query the working copy status, the
+ _status property will implicitly read the status using its default
+ arguments."""
+ stat = self._parsestatus(self._repo.status(change=strtobytes(self),
+ ignored=ignored,
+ clean=clean))
+ self._unknown = self._ignored = self._clean = None
+ if ignored:
+ self._ignored = stat[5]
+ if clean:
+ self._clean = stat[6]
+ self._status = stat[:4]
+ return stat
+
+ def rev(self):
+ return self._rev
+
+ def node(self):
+ return self._node
+
+ def tags(self):
+ return self._tags
+
+ def branch(self):
+ return self._branch
+
+ def author(self):
+ return self._author
+
+ def user(self):
+ return self._author
+
+ def date(self):
+ return self._date
+
+ def description(self):
+ return self._description
+
+ def files(self):
+ return sorted(self._status[0] + self._status[1] + self._status[2])
+
+ def modified(self):
+ return self._status[0]
+
+ def added(self):
+ return self._status[1]
+
+ def removed(self):
+ return self._status[2]
+
+ def ignored(self):
+ if self._ignored is None:
+ self.status(ignored=True)
+ return self._ignored
+
+ def clean(self):
+ if self._clean is None:
+ self.status(clean=True)
+ return self._clean
+
+ @util.propertycache
+ def _manifest(self):
+ d = {}
+ for node, p, e, s, path in self._repo.manifest(rev=strtobytes(self)):
+ d[path] = node
+ return d
+
+ def manifest(self):
+ return self._manifest
+
+ def hex(self):
+ return hex(self._node)
+
+ @util.propertycache
+ def _parents(self):
+ """return contexts for each parent changeset"""
+ par = self._repo.parents(rev=strtobytes(self))
+ if not par:
+ return [changectx(self._repo, -1)]
+ return [changectx(self._repo, int(cset.rev)) for cset in par]
+
+ def parents(self):
+ return self._parents
+
+ def p1(self):
+ return self._parents[0]
+
+ def p2(self):
+ if len(self._parents) == 2:
+ return self._parents[1]
+ return changectx(self._repo, -1)
+
+ @util.propertycache
+ def _bookmarks(self):
+ books = [bm for bm in self._repo.bookmarks()[0] if bm[1] == self._rev]
+
+ bms = []
+ for name, r, n in books:
+ bms.append(name)
+ return bms
+
+ def bookmarks(self):
+ return self._bookmarks
+
+ def hidden(self):
+ """return True if the changeset is hidden, else False"""
+ return bool(self._repo.log(revrange=self._node + b(' and hidden()'),
+ hidden=True))
+
+ def phase(self):
+ """return the phase of the changeset (public, draft or secret)"""
+ return self._repo.phase(strtobytes(self._rev))[0][1]
+
+ def children(self):
+ """return contexts for each child changeset"""
+ for c in self._repo.log(b('children(') + self._node + b(')')):
+ yield changectx(self._repo, c)
+
+ def ancestors(self):
+ for a in self._repo.log(b('ancestors(') + self._node + b(')')):
+ yield changectx(self._repo, a)
+
+ def descendants(self):
+ for d in self._repo.log(b('descendants(') + self._node + b(')')):
+ yield changectx(self._repo, d)
+
+ def ancestor(self, c2):
+ """
+ return the ancestor context of self and c2
+ """
+ return changectx(self._repo,
+ b('ancestor(') + self + b(', ') + c2 + b(')'))
diff --git a/third_party/python/python-hglib/hglib/error.py b/third_party/python/python-hglib/hglib/error.py
new file mode 100644
index 0000000000..e0652dc74d
--- /dev/null
+++ b/third_party/python/python-hglib/hglib/error.py
@@ -0,0 +1,18 @@
+class CommandError(Exception):
+ def __init__(self, args, ret, out, err):
+ self.args = args
+ self.ret = ret
+ self.out = out
+ self.err = err
+
+ def __str__(self):
+ return str((self.ret, self.out.rstrip(), self.err.rstrip()))
+
+class ServerError(Exception):
+ pass
+
+class ResponseError(ServerError, ValueError):
+ pass
+
+class CapabilityError(ServerError):
+ pass
diff --git a/third_party/python/python-hglib/hglib/merge.py b/third_party/python/python-hglib/hglib/merge.py
new file mode 100644
index 0000000000..88bc99d993
--- /dev/null
+++ b/third_party/python/python-hglib/hglib/merge.py
@@ -0,0 +1,21 @@
+from hglib.util import b
+
+class handlers(object):
+ """
+ These can be used as the cb argument to hgclient.merge() to control the
+ behaviour when Mercurial prompts what to do with regard to a specific file,
+ e.g. when one parent modified a file and the other removed it.
+ """
+
+ @staticmethod
+ def abort(size, output):
+ """
+ Abort the merge if a prompt appears.
+ """
+ return b('')
+
+ """
+ This corresponds to Mercurial's -y/--noninteractive global option, which
+ picks the first choice on all prompts.
+ """
+ noninteractive = 'yes'
diff --git a/third_party/python/python-hglib/hglib/templates.py b/third_party/python/python-hglib/hglib/templates.py
new file mode 100644
index 0000000000..f91ee466a7
--- /dev/null
+++ b/third_party/python/python-hglib/hglib/templates.py
@@ -0,0 +1,4 @@
+from hglib.util import b
+
+changeset = b('{rev}\\0{node}\\0{tags}\\0{branch}\\0{author}'
+ '\\0{desc}\\0{date}\\0')
diff --git a/third_party/python/python-hglib/hglib/util.py b/third_party/python/python-hglib/hglib/util.py
new file mode 100644
index 0000000000..b4bfe731f9
--- /dev/null
+++ b/third_party/python/python-hglib/hglib/util.py
@@ -0,0 +1,217 @@
+import os, subprocess, sys
+from hglib import error
+try:
+ from io import BytesIO
+except ImportError:
+ from cStringIO import StringIO as BytesIO
+
+if sys.version_info[0] > 2:
+ izip = zip
+ integertypes = (int,)
+
+ def b(s):
+ """Encode the string as bytes."""
+ return s.encode('latin-1')
+else:
+ from itertools import izip
+ integertypes = (long, int)
+ bytes = str # Defined in Python 2.6/2.7, but to the same value.
+
+ def b(s):
+ """Encode the string as bytes."""
+ return s
+
+def strtobytes(s):
+ """Return the bytes of the string representation of an object."""
+ return str(s).encode('latin-1')
+
+def grouper(n, iterable):
+ ''' list(grouper(2, range(4))) -> [(0, 1), (2, 3)] '''
+ args = [iter(iterable)] * n
+ return izip(*args)
+
+def eatlines(s, n):
+ """
+ >>> eatlines(b("1\\n2"), 1) == b('2')
+ True
+ >>> eatlines(b("1\\n2"), 2) == b('')
+ True
+ >>> eatlines(b("1\\n2"), 3) == b('')
+ True
+ >>> eatlines(b("1\\n2\\n3"), 1) == b('2\\n3')
+ True
+ """
+ cs = BytesIO(s)
+
+ for line in cs:
+ n -= 1
+ if n == 0:
+ return cs.read()
+ return b('')
+
+def skiplines(s, prefix):
+ """
+ Skip lines starting with prefix in s
+
+ >>> skiplines(b('a\\nb\\na\\n'), b('a')) == b('b\\na\\n')
+ True
+ >>> skiplines(b('a\\na\\n'), b('a')) == b('')
+ True
+ >>> skiplines(b(''), b('a')) == b('')
+ True
+ >>> skiplines(b('a\\nb'), b('b')) == b('a\\nb')
+ True
+ """
+ cs = BytesIO(s)
+
+ for line in cs:
+ if not line.startswith(prefix):
+ return line + cs.read()
+
+ return b('')
+
+def _cmdval(val):
+ if isinstance(val, bytes):
+ return val
+ else:
+ return strtobytes(val)
+
+def cmdbuilder(name, *args, **kwargs):
+ """
+ A helper for building the command arguments
+
+ args are the positional arguments
+
+ kwargs are the options
+ keys that are single lettered are prepended with '-', others with '--',
+ underscores are replaced with dashes
+
+ keys with False boolean values are ignored, lists add the key multiple times
+
+ None arguments are skipped
+
+ >>> cmdbuilder(b('cmd'), a=True, b=False, c=None) == [b('cmd'), b('-a')]
+ True
+ >>> cmdbuilder(b('cmd'), long=True) == [b('cmd'), b('--long')]
+ True
+ >>> cmdbuilder(b('cmd'), str=b('s')) == [b('cmd'), b('--str'), b('s')]
+ True
+ >>> cmdbuilder(b('cmd'), d_ash=True) == [b('cmd'), b('--d-ash')]
+ True
+ >>> cmdbuilder(b('cmd'), _=True) == [b('cmd'), b('-')]
+ True
+ >>> expect = [b('cmd'), b('--list'), b('1'), b('--list'), b('2')]
+ >>> cmdbuilder(b('cmd'), list=[1, 2]) == expect
+ True
+ >>> cmdbuilder(b('cmd'), None) == [b('cmd')]
+ True
+ """
+ cmd = [name]
+ for arg, val in kwargs.items():
+ if val is None:
+ continue
+
+ arg = arg.encode('latin-1').replace(b('_'), b('-'))
+ if arg != b('-'):
+ if len(arg) == 1:
+ arg = b('-') + arg
+ else:
+ arg = b('--') + arg
+ if isinstance(val, bool):
+ if val:
+ cmd.append(arg)
+ elif isinstance(val, list):
+ for v in val:
+ cmd.append(arg)
+ cmd.append(_cmdval(v))
+ else:
+ cmd.append(arg)
+ cmd.append(_cmdval(val))
+
+ for a in args:
+ if a is not None:
+ cmd.append(a)
+
+ return cmd
+
+class reterrorhandler(object):
+ """This class is meant to be used with rawcommand() error handler
+ argument. It remembers the return value the command returned if
+ it's one of allowed values, which is only 1 if none are given.
+ Otherwise it raises a CommandError.
+
+ >>> e = reterrorhandler('')
+ >>> bool(e)
+ True
+ >>> e(1, 'a', '')
+ 'a'
+ >>> bool(e)
+ False
+
+ """
+ def __init__(self, args, allowed=None):
+ self.args = args
+ self.ret = 0
+ if allowed is None:
+ self.allowed = [1]
+ else:
+ self.allowed = allowed
+
+ def __call__(self, ret, out, err):
+ self.ret = ret
+ if ret not in self.allowed:
+ raise error.CommandError(self.args, ret, out, err)
+ return out
+
+ def __nonzero__(self):
+ """ Returns True if the return code was 0, False otherwise """
+ return self.ret == 0
+
+ def __bool__(self):
+ return self.__nonzero__()
+
+class propertycache(object):
+ """
+ Decorator that remembers the return value of a function call.
+
+ >>> execcount = 0
+ >>> class obj(object):
+ ... def func(self):
+ ... global execcount
+ ... execcount += 1
+ ... return []
+ ... func = propertycache(func)
+ >>> o = obj()
+ >>> o.func
+ []
+ >>> execcount
+ 1
+ >>> o.func
+ []
+ >>> execcount
+ 1
+ """
+ def __init__(self, func):
+ self.func = func
+ self.name = func.__name__
+ def __get__(self, obj, type=None):
+ result = self.func(obj)
+ setattr(obj, self.name, result)
+ return result
+
+close_fds = os.name == 'posix'
+
+startupinfo = None
+if os.name == 'nt':
+ startupinfo = subprocess.STARTUPINFO()
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+
+def popen(args, env=None):
+ environ = None
+ if env:
+ environ = dict(os.environ)
+ environ.update(env)
+
+ return subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, close_fds=close_fds,
+ startupinfo=startupinfo, env=environ)
diff --git a/third_party/python/python-hglib/setup.py b/third_party/python/python-hglib/setup.py
new file mode 100644
index 0000000000..f565ae7f98
--- /dev/null
+++ b/third_party/python/python-hglib/setup.py
@@ -0,0 +1,54 @@
+import os, time
+from distutils.core import setup
+
+# query Mercurial for version number, or pull from PKG-INFO
+version = 'unknown'
+if os.path.isdir('.hg'):
+ cmd = "hg id -i -t"
+ l = os.popen(cmd).read().split()
+ while len(l) > 1 and l[-1][0].isalpha(): # remove non-numbered tags
+ l.pop()
+ if len(l) > 1: # tag found
+ version = l[-1]
+ if l[0].endswith('+'): # propagate the dirty status to the tag
+ version += '+'
+ elif len(l) == 1: # no tag found
+ cmd = 'hg parents --template "{latesttag}+{latesttagdistance}-"'
+ version = os.popen(cmd).read() + l[0]
+ if version.endswith('+'):
+ version += time.strftime('%Y%m%d')
+elif os.path.exists('.hg_archival.txt'):
+ kw = dict([[t.strip() for t in l.split(':', 1)]
+ for l in open('.hg_archival.txt')])
+ if 'tag' in kw:
+ version = kw['tag']
+ elif 'latesttag' in kw:
+ version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw
+ else:
+ version = kw.get('node', '')[:12]
+elif os.path.exists('PKG-INFO'):
+ kw = dict([[t.strip() for t in l.split(':', 1)]
+ for l in open('PKG-INFO') if ':' in l])
+ version = kw.get('Version', version)
+
+setup(
+ name='python-hglib',
+ version=version,
+ author='Idan Kamara',
+ author_email='idankk86@gmail.com',
+ url='http://selenic.com/repo/python-hglib',
+ description='Mercurial Python library',
+ long_description=open(os.path.join(os.path.dirname(__file__),
+ 'README')).read(),
+ classifiers=[
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2.4',
+ 'Programming Language :: Python :: 2.5',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.4',
+
+ ],
+ license='MIT',
+ packages=['hglib'])
diff --git a/third_party/python/python-hglib/test.py b/third_party/python/python-hglib/test.py
new file mode 100644
index 0000000000..e0b4021f45
--- /dev/null
+++ b/third_party/python/python-hglib/test.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+
+import nose
+from tests import with_hg
+
+if __name__ == '__main__':
+ nose.main(addplugins=[with_hg.WithHgPlugin()])
diff --git a/third_party/python/pytoml/PKG-INFO b/third_party/python/pytoml/PKG-INFO
new file mode 100644
index 0000000000..844436f958
--- /dev/null
+++ b/third_party/python/pytoml/PKG-INFO
@@ -0,0 +1,10 @@
+Metadata-Version: 1.0
+Name: pytoml
+Version: 0.1.10
+Summary: A parser for TOML-0.4.0
+Home-page: https://github.com/avakar/pytoml
+Author: Martin Vejnár
+Author-email: avakar@ratatanek.cz
+License: MIT
+Description: UNKNOWN
+Platform: UNKNOWN
diff --git a/third_party/python/pytoml/pytoml/__init__.py b/third_party/python/pytoml/pytoml/__init__.py
new file mode 100644
index 0000000000..222a1967fd
--- /dev/null
+++ b/third_party/python/pytoml/pytoml/__init__.py
@@ -0,0 +1,3 @@
+from .core import TomlError
+from .parser import load, loads
+from .writer import dump, dumps
diff --git a/third_party/python/pytoml/pytoml/core.py b/third_party/python/pytoml/pytoml/core.py
new file mode 100644
index 0000000000..0fcada48c6
--- /dev/null
+++ b/third_party/python/pytoml/pytoml/core.py
@@ -0,0 +1,13 @@
+class TomlError(RuntimeError):
+ def __init__(self, message, line, col, filename):
+ RuntimeError.__init__(self, message, line, col, filename)
+ self.message = message
+ self.line = line
+ self.col = col
+ self.filename = filename
+
+ def __str__(self):
+ return '{}({}, {}): {}'.format(self.filename, self.line, self.col, self.message)
+
+ def __repr__(self):
+ return 'TomlError({!r}, {!r}, {!r}, {!r})'.format(self.message, self.line, self.col, self.filename)
diff --git a/third_party/python/pytoml/pytoml/parser.py b/third_party/python/pytoml/pytoml/parser.py
new file mode 100644
index 0000000000..d4c4ac187b
--- /dev/null
+++ b/third_party/python/pytoml/pytoml/parser.py
@@ -0,0 +1,366 @@
+import string, re, sys, datetime
+from .core import TomlError
+
+if sys.version_info[0] == 2:
+ _chr = unichr
+else:
+ _chr = chr
+
+def load(fin, translate=lambda t, x, v: v):
+ return loads(fin.read(), translate=translate, filename=fin.name)
+
+def loads(s, filename='<string>', translate=lambda t, x, v: v):
+ if isinstance(s, bytes):
+ s = s.decode('utf-8')
+
+ s = s.replace('\r\n', '\n')
+
+ root = {}
+ tables = {}
+ scope = root
+
+ src = _Source(s, filename=filename)
+ ast = _p_toml(src)
+
+ def error(msg):
+ raise TomlError(msg, pos[0], pos[1], filename)
+
+ def process_value(v):
+ kind, text, value, pos = v
+ if kind == 'str' and value.startswith('\n'):
+ value = value[1:]
+ if kind == 'array':
+ if value and any(k != value[0][0] for k, t, v, p in value[1:]):
+ error('array-type-mismatch')
+ value = [process_value(item) for item in value]
+ elif kind == 'table':
+ value = dict([(k, process_value(value[k])) for k in value])
+ return translate(kind, text, value)
+
+ for kind, value, pos in ast:
+ if kind == 'kv':
+ k, v = value
+ if k in scope:
+ error('duplicate_keys. Key "{0}" was used more than once.'.format(k))
+ scope[k] = process_value(v)
+ else:
+ is_table_array = (kind == 'table_array')
+ cur = tables
+ for name in value[:-1]:
+ if isinstance(cur.get(name), list):
+ d, cur = cur[name][-1]
+ else:
+ d, cur = cur.setdefault(name, (None, {}))
+
+ scope = {}
+ name = value[-1]
+ if name not in cur:
+ if is_table_array:
+ cur[name] = [(scope, {})]
+ else:
+ cur[name] = (scope, {})
+ elif isinstance(cur[name], list):
+ if not is_table_array:
+ error('table_type_mismatch')
+ cur[name].append((scope, {}))
+ else:
+ if is_table_array:
+ error('table_type_mismatch')
+ old_scope, next_table = cur[name]
+ if old_scope is not None:
+ error('duplicate_tables')
+ cur[name] = (scope, next_table)
+
+ def merge_tables(scope, tables):
+ if scope is None:
+ scope = {}
+ for k in tables:
+ if k in scope:
+ error('key_table_conflict')
+ v = tables[k]
+ if isinstance(v, list):
+ scope[k] = [merge_tables(sc, tbl) for sc, tbl in v]
+ else:
+ scope[k] = merge_tables(v[0], v[1])
+ return scope
+
+ return merge_tables(root, tables)
+
+class _Source:
+ def __init__(self, s, filename=None):
+ self.s = s
+ self._pos = (1, 1)
+ self._last = None
+ self._filename = filename
+ self.backtrack_stack = []
+
+ def last(self):
+ return self._last
+
+ def pos(self):
+ return self._pos
+
+ def fail(self):
+ return self._expect(None)
+
+ def consume_dot(self):
+ if self.s:
+ self._last = self.s[0]
+ self.s = self[1:]
+ self._advance(self._last)
+ return self._last
+ return None
+
+ def expect_dot(self):
+ return self._expect(self.consume_dot())
+
+ def consume_eof(self):
+ if not self.s:
+ self._last = ''
+ return True
+ return False
+
+ def expect_eof(self):
+ return self._expect(self.consume_eof())
+
+ def consume(self, s):
+ if self.s.startswith(s):
+ self.s = self.s[len(s):]
+ self._last = s
+ self._advance(s)
+ return True
+ return False
+
+ def expect(self, s):
+ return self._expect(self.consume(s))
+
+ def consume_re(self, re):
+ m = re.match(self.s)
+ if m:
+ self.s = self.s[len(m.group(0)):]
+ self._last = m
+ self._advance(m.group(0))
+ return m
+ return None
+
+ def expect_re(self, re):
+ return self._expect(self.consume_re(re))
+
+ def __enter__(self):
+ self.backtrack_stack.append((self.s, self._pos))
+
+ def __exit__(self, type, value, traceback):
+ if type is None:
+ self.backtrack_stack.pop()
+ else:
+ self.s, self._pos = self.backtrack_stack.pop()
+ return type == TomlError
+
+ def commit(self):
+ self.backtrack_stack[-1] = (self.s, self._pos)
+
+ def _expect(self, r):
+ if not r:
+ raise TomlError('msg', self._pos[0], self._pos[1], self._filename)
+ return r
+
+ def _advance(self, s):
+ suffix_pos = s.rfind('\n')
+ if suffix_pos == -1:
+ self._pos = (self._pos[0], self._pos[1] + len(s))
+ else:
+ self._pos = (self._pos[0] + s.count('\n'), len(s) - suffix_pos)
+
+_ews_re = re.compile(r'(?:[ \t]|#[^\n]*\n|#[^\n]*\Z|\n)*')
+def _p_ews(s):
+ s.expect_re(_ews_re)
+
+_ws_re = re.compile(r'[ \t]*')
+def _p_ws(s):
+ s.expect_re(_ws_re)
+
+_escapes = { 'b': '\b', 'n': '\n', 'r': '\r', 't': '\t', '"': '"', '\'': '\'',
+ '\\': '\\', '/': '/', 'f': '\f' }
+
+_basicstr_re = re.compile(r'[^"\\\000-\037]*')
+_short_uni_re = re.compile(r'u([0-9a-fA-F]{4})')
+_long_uni_re = re.compile(r'U([0-9a-fA-F]{8})')
+_escapes_re = re.compile('[bnrt"\'\\\\/f]')
+_newline_esc_re = re.compile('\n[ \t\n]*')
+def _p_basicstr_content(s, content=_basicstr_re):
+ res = []
+ while True:
+ res.append(s.expect_re(content).group(0))
+ if not s.consume('\\'):
+ break
+ if s.consume_re(_newline_esc_re):
+ pass
+ elif s.consume_re(_short_uni_re) or s.consume_re(_long_uni_re):
+ res.append(_chr(int(s.last().group(1), 16)))
+ else:
+ s.expect_re(_escapes_re)
+ res.append(_escapes[s.last().group(0)])
+ return ''.join(res)
+
+_key_re = re.compile(r'[0-9a-zA-Z-_]+')
+def _p_key(s):
+ with s:
+ s.expect('"')
+ r = _p_basicstr_content(s, _basicstr_re)
+ s.expect('"')
+ return r
+ return s.expect_re(_key_re).group(0)
+
+_float_re = re.compile(r'[+-]?(?:0|[1-9](?:_?\d)*)(?:\.\d(?:_?\d)*)?(?:[eE][+-]?(?:\d(?:_?\d)*))?')
+_datetime_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))')
+
+_basicstr_ml_re = re.compile(r'(?:(?:|"|"")[^"\\\000-\011\013-\037])*')
+_litstr_re = re.compile(r"[^'\000-\037]*")
+_litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\011\013-\037]))*")
+def _p_value(s):
+ pos = s.pos()
+
+ if s.consume('true'):
+ return 'bool', s.last(), True, pos
+ if s.consume('false'):
+ return 'bool', s.last(), False, pos
+
+ if s.consume('"'):
+ if s.consume('""'):
+ r = _p_basicstr_content(s, _basicstr_ml_re)
+ s.expect('"""')
+ else:
+ r = _p_basicstr_content(s, _basicstr_re)
+ s.expect('"')
+ return 'str', r, r, pos
+
+ if s.consume('\''):
+ if s.consume('\'\''):
+ r = s.expect_re(_litstr_ml_re).group(0)
+ s.expect('\'\'\'')
+ else:
+ r = s.expect_re(_litstr_re).group(0)
+ s.expect('\'')
+ return 'str', r, r, pos
+
+ if s.consume_re(_datetime_re):
+ m = s.last()
+ s0 = m.group(0)
+ r = map(int, m.groups()[:6])
+ if m.group(7):
+ micro = float(m.group(7))
+ else:
+ micro = 0
+
+ if m.group(8):
+ g = int(m.group(8), 10) * 60 + int(m.group(9), 10)
+ tz = _TimeZone(datetime.timedelta(0, g * 60))
+ else:
+ tz = _TimeZone(datetime.timedelta(0, 0))
+
+ y, m, d, H, M, S = r
+ dt = datetime.datetime(y, m, d, H, M, S, int(micro * 1000000), tz)
+ return 'datetime', s0, dt, pos
+
+ if s.consume_re(_float_re):
+ m = s.last().group(0)
+ r = m.replace('_','')
+ if '.' in m or 'e' in m or 'E' in m:
+ return 'float', m, float(r), pos
+ else:
+ return 'int', m, int(r, 10), pos
+
+ if s.consume('['):
+ items = []
+ with s:
+ while True:
+ _p_ews(s)
+ items.append(_p_value(s))
+ s.commit()
+ _p_ews(s)
+ s.expect(',')
+ s.commit()
+ _p_ews(s)
+ s.expect(']')
+ return 'array', None, items, pos
+
+ if s.consume('{'):
+ _p_ws(s)
+ items = {}
+ if not s.consume('}'):
+ k = _p_key(s)
+ _p_ws(s)
+ s.expect('=')
+ _p_ws(s)
+ items[k] = _p_value(s)
+ _p_ws(s)
+ while s.consume(','):
+ _p_ws(s)
+ k = _p_key(s)
+ _p_ws(s)
+ s.expect('=')
+ _p_ws(s)
+ items[k] = _p_value(s)
+ _p_ws(s)
+ s.expect('}')
+ return 'table', None, items, pos
+
+ s.fail()
+
+def _p_stmt(s):
+ pos = s.pos()
+ if s.consume( '['):
+ is_array = s.consume('[')
+ _p_ws(s)
+ keys = [_p_key(s)]
+ _p_ws(s)
+ while s.consume('.'):
+ _p_ws(s)
+ keys.append(_p_key(s))
+ _p_ws(s)
+ s.expect(']')
+ if is_array:
+ s.expect(']')
+ return 'table_array' if is_array else 'table', keys, pos
+
+ key = _p_key(s)
+ _p_ws(s)
+ s.expect('=')
+ _p_ws(s)
+ value = _p_value(s)
+ return 'kv', (key, value), pos
+
+_stmtsep_re = re.compile(r'(?:[ \t]*(?:#[^\n]*)?\n)+[ \t]*')
+def _p_toml(s):
+ stmts = []
+ _p_ews(s)
+ with s:
+ stmts.append(_p_stmt(s))
+ while True:
+ s.commit()
+ s.expect_re(_stmtsep_re)
+ stmts.append(_p_stmt(s))
+ _p_ews(s)
+ s.expect_eof()
+ return stmts
+
+class _TimeZone(datetime.tzinfo):
+ def __init__(self, offset):
+ self._offset = offset
+
+ def utcoffset(self, dt):
+ return self._offset
+
+ def dst(self, dt):
+ return None
+
+ def tzname(self, dt):
+ m = self._offset.total_seconds() // 60
+ if m < 0:
+ res = '-'
+ m = -m
+ else:
+ res = '+'
+ h = m // 60
+ m = m - h * 60
+ return '{}{:.02}{:.02}'.format(res, h, m)
diff --git a/third_party/python/pytoml/pytoml/writer.py b/third_party/python/pytoml/pytoml/writer.py
new file mode 100644
index 0000000000..2c9f7c69d9
--- /dev/null
+++ b/third_party/python/pytoml/pytoml/writer.py
@@ -0,0 +1,120 @@
+from __future__ import unicode_literals
+import io, datetime, sys
+
+if sys.version_info[0] == 3:
+ long = int
+ unicode = str
+
+
+def dumps(obj, sort_keys=False):
+ fout = io.StringIO()
+ dump(fout, obj, sort_keys=sort_keys)
+ return fout.getvalue()
+
+
+_escapes = {'\n': 'n', '\r': 'r', '\\': '\\', '\t': 't', '\b': 'b', '\f': 'f', '"': '"'}
+
+
+def _escape_string(s):
+ res = []
+ start = 0
+
+ def flush():
+ if start != i:
+ res.append(s[start:i])
+ return i + 1
+
+ i = 0
+ while i < len(s):
+ c = s[i]
+ if c in '"\\\n\r\t\b\f':
+ start = flush()
+ res.append('\\' + _escapes[c])
+ elif ord(c) < 0x20:
+ start = flush()
+ res.append('\\u%04x' % ord(c))
+ i += 1
+
+ flush()
+ return '"' + ''.join(res) + '"'
+
+
+def _escape_id(s):
+ if any(not c.isalnum() and c not in '-_' for c in s):
+ return _escape_string(s)
+ return s
+
+
+def _format_list(v):
+ return '[{0}]'.format(', '.join(_format_value(obj) for obj in v))
+
+# Formula from:
+# https://docs.python.org/2/library/datetime.html#datetime.timedelta.total_seconds
+# Once support for py26 is dropped, this can be replaced by td.total_seconds()
+def _total_seconds(td):
+ return ((td.microseconds
+ + (td.seconds + td.days * 24 * 3600) * 10**6) / 10.0**6)
+
+def _format_value(v):
+ if isinstance(v, bool):
+ return 'true' if v else 'false'
+ if isinstance(v, int) or isinstance(v, long):
+ return unicode(v)
+ if isinstance(v, float):
+ return '{0:.17f}'.format(v)
+ elif isinstance(v, unicode) or isinstance(v, bytes):
+ return _escape_string(v)
+ elif isinstance(v, datetime.datetime):
+ offs = v.utcoffset()
+ offs = _total_seconds(offs) // 60 if offs is not None else 0
+
+ if offs == 0:
+ suffix = 'Z'
+ else:
+ if offs > 0:
+ suffix = '+'
+ else:
+ suffix = '-'
+ offs = -offs
+ suffix = '{0}{1:.02}{2:.02}'.format(suffix, offs // 60, offs % 60)
+
+ if v.microsecond:
+ return v.strftime('%Y-%m-%dT%H:%M:%S.%f') + suffix
+ else:
+ return v.strftime('%Y-%m-%dT%H:%M:%S') + suffix
+ elif isinstance(v, list):
+ return _format_list(v)
+ else:
+ raise RuntimeError(v)
+
+
+def dump(fout, obj, sort_keys=False):
+ tables = [((), obj, False)]
+
+ while tables:
+ if sort_keys:
+ tables.sort(key=lambda tup: tup[0], reverse=True)
+ name, table, is_array = tables.pop()
+ if name:
+ section_name = '.'.join(_escape_id(c) for c in name)
+ if is_array:
+ fout.write('[[{0}]]\n'.format(section_name))
+ else:
+ fout.write('[{0}]\n'.format(section_name))
+
+ table_keys = sorted(table.keys()) if sort_keys else table.keys()
+ for k in table_keys:
+ v = table[k]
+ if isinstance(v, dict):
+ tables.append((name + (k,), v, False))
+ elif isinstance(v, list) and v and all(isinstance(o, dict) for o in v):
+ tables.extend((name + (k,), d, True) for d in reversed(v))
+ elif v is None:
+ # based on mojombo's comment: https://github.com/toml-lang/toml/issues/146#issuecomment-25019344
+ fout.write(
+ '#{} = null # To use: uncomment and replace null with value\n'.format(_escape_id(k)))
+ else:
+ fout.write('{0} = {1}\n'.format(_escape_id(k), _format_value(v)))
+
+ if tables:
+ fout.write('\n')
diff --git a/third_party/python/pytoml/setup.cfg b/third_party/python/pytoml/setup.cfg
new file mode 100644
index 0000000000..b14b0bc3d4
--- /dev/null
+++ b/third_party/python/pytoml/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/third_party/python/pytoml/setup.py b/third_party/python/pytoml/setup.py
new file mode 100644
index 0000000000..98c08a540c
--- /dev/null
+++ b/third_party/python/pytoml/setup.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+from setuptools import setup
+
+setup(
+ name='pytoml',
+ version='0.1.10',
+
+ description='A parser for TOML-0.4.0',
+ author='Martin Vejnár',
+ author_email='avakar@ratatanek.cz',
+ url='https://github.com/avakar/pytoml',
+ license='MIT',
+
+ packages=['pytoml'],
+ )
diff --git a/third_party/python/redo/PKG-INFO b/third_party/python/redo/PKG-INFO
new file mode 100644
index 0000000000..0bf4bd5d9f
--- /dev/null
+++ b/third_party/python/redo/PKG-INFO
@@ -0,0 +1,10 @@
+Metadata-Version: 1.0
+Name: redo
+Version: 2.0.3
+Summary: Utilities to retry Python callables.
+Home-page: https://github.com/bhearsum/redo
+Author: Ben Hearsum
+Author-email: ben@hearsum.ca
+License: UNKNOWN
+Description: UNKNOWN
+Platform: UNKNOWN
diff --git a/third_party/python/redo/README.md b/third_party/python/redo/README.md
new file mode 100644
index 0000000000..bb9ea4c77a
--- /dev/null
+++ b/third_party/python/redo/README.md
@@ -0,0 +1,147 @@
+
+# Redo - Utilities to retry Python callables
+******************************************
+
+## Introduction
+
+Redo provides various means to add seamless ability to retry to any Python callable. Redo includes a plain function `(redo.retry)`, a decorator `(redo.retriable)`, and a context manager `(redo.retrying)` to enable you to integrate it in the best possible way for your project. As a bonus, a standalone interface is also included `("retry")`.
+
+## Installation
+
+For installing with pip, run following commands
+> pip install redo
+
+## How To Use
+Below is the list of functions available
+* retrier
+* retry
+* retriable
+* retrying (contextmanager)
+
+### retrier(attempts=5, sleeptime=10, max_sleeptime=300, sleepscale=1.5, jitter=1)
+A generator function that sleeps between retries, handles exponential back off and jitter. The action you are retrying is meant to run after retrier yields. At each iteration, we sleep for `sleeptime + random.randint(-jitter, jitter)`. Afterwards sleeptime is multiplied by sleepscale for the next iteration.
+
+**Arguments Detail:**
+
+1. **attempts (int):** maximum number of times to try; defaults to 5
+2. **sleeptime (float):** how many seconds to sleep between tries; defaults to 60s (one minute)
+3. **max_sleeptime (float):** the longest we'll sleep, in seconds; defaults to 300s (five minutes)
+4. **sleepscale (float):** how much to multiply the sleep time by each iteration; defaults to 1.5
+5. **jitter (int):** random jitter to introduce to sleep time each iteration. the amount is chosen at random between `[-jitter, +jitter]` defaults to 1
+
+**Output:**
+None, a maximum of `attempts` number of times
+
+**Example:**
+
+ >>> n = 0
+ >>> for _ in retrier(sleeptime=0, jitter=0):
+ ... if n == 3:
+ ... # We did the thing!
+ ... break
+ ... n += 1
+ >>> n
+ 3
+ >>> n = 0
+ >>> for _ in retrier(sleeptime=0, jitter=0):
+ ... if n == 6:
+ ... # We did the thing!
+ ... break
+ ... n += 1
+ ... else:
+ ... print("max tries hit")
+ max tries hit
+
+### retry(action, attempts=5, sleeptime=60, max_sleeptime=5 * 60, sleepscale=1.5, jitter=1, retry_exceptions=(Exception,), cleanup=None, args=(), kwargs={})
+Calls an action function until it succeeds, or we give up.
+
+**Arguments Detail:**
+
+1. **action (callable):** the function to retry
+2. **attempts (int):** maximum number of times to try; defaults to 5
+3. **sleeptime (float):** how many seconds to sleep between tries; defaults to 60s (one minute)
+4. **max_sleeptime (float):** the longest we'll sleep, in seconds; defaults to 300s (five minutes)
+5. **sleepscale (float):** how much to multiply the sleep time by each iteration; defaults to 1.5
+6. **jitter (int):** random jitter to introduce to sleep time each iteration. The amount is chosen at random between `[-jitter, +jitter]` defaults to 1
+7. **retry_exceptions (tuple):** tuple of exceptions to be caught. If other exceptions are raised by `action()`, then these are immediately re-raised to the caller.
+8. **cleanup (callable):** optional; called if one of `retry_exceptions` is caught. No arguments are passed to the cleanup function; if your cleanup requires arguments, consider using `functools.partial` or a `lambda` function.
+9. **args (tuple):** positional arguments to call `action` with
+10. **kwargs (dict):** keyword arguments to call `action` with
+
+**Output:**
+ Whatever action`(*args, **kwargs)` returns
+
+ **Output:**
+ Whatever action(*args, **kwargs) raises. `retry_exceptions` are caught
+ up until the last attempt, in which case they are re-raised.
+
+**Example:**
+
+ >>> count = 0
+ >>> def foo():
+ ... global count
+ ... count += 1
+ ... print(count)
+ ... if count < 3:
+ ... raise ValueError("count is too small!")
+ ... return "success!"
+ >>> retry(foo, sleeptime=0, jitter=0)
+ 1
+ 2
+ 3
+ 'success!'
+
+### retriable(*retry_args, **retry_kwargs)
+A decorator factory for `retry()`. Wrap your function in `@retriable(...)` to give it retry powers!
+
+**Arguments Detail:**
+ Same as for `retry`, with the exception of `action`, `args`, and `kwargs`,
+ which are left to the normal function definition.
+
+**Output:**
+A function decorator
+
+**Example:**
+
+ >>> count = 0
+ >>> @retriable(sleeptime=0, jitter=0)
+ ... def foo():
+ ... global count
+ ... count += 1
+ ... print(count)
+ ... if count < 3:
+ ... raise ValueError("count too small")
+ ... return "success!"
+ >>> foo()
+ 1
+ 2
+ 3
+ 'success!'
+
+### retrying(func, *retry_args, **retry_kwargs)
+A context manager for wrapping functions with retry functionality.
+
+**Arguments Detail:**
+
+1. **func (callable):** the function to wrap
+other arguments as per `retry`
+
+**Output:**
+A context manager that returns `retriable(func)` on `__enter__`
+
+**Example:**
+
+ >>> count = 0
+ >>> def foo():
+ ... global count
+ ... count += 1
+ ... print(count)
+ ... if count < 3:
+ ... raise ValueError("count too small")
+ ... return "success!"
+ >>> with retrying(foo, sleeptime=0, jitter=0) as f:
+ ... f()
+ 1
+ 2
+ 3
+ 'success!' \ No newline at end of file
diff --git a/third_party/python/redo/redo.egg-info/PKG-INFO b/third_party/python/redo/redo.egg-info/PKG-INFO
new file mode 100644
index 0000000000..0bf4bd5d9f
--- /dev/null
+++ b/third_party/python/redo/redo.egg-info/PKG-INFO
@@ -0,0 +1,10 @@
+Metadata-Version: 1.0
+Name: redo
+Version: 2.0.3
+Summary: Utilities to retry Python callables.
+Home-page: https://github.com/bhearsum/redo
+Author: Ben Hearsum
+Author-email: ben@hearsum.ca
+License: UNKNOWN
+Description: UNKNOWN
+Platform: UNKNOWN
diff --git a/third_party/python/redo/redo.egg-info/SOURCES.txt b/third_party/python/redo/redo.egg-info/SOURCES.txt
new file mode 100644
index 0000000000..a238a81759
--- /dev/null
+++ b/third_party/python/redo/redo.egg-info/SOURCES.txt
@@ -0,0 +1,10 @@
+README.md
+setup.cfg
+setup.py
+redo/__init__.py
+redo/cmd.py
+redo.egg-info/PKG-INFO
+redo.egg-info/SOURCES.txt
+redo.egg-info/dependency_links.txt
+redo.egg-info/entry_points.txt
+redo.egg-info/top_level.txt \ No newline at end of file
diff --git a/third_party/python/redo/redo.egg-info/dependency_links.txt b/third_party/python/redo/redo.egg-info/dependency_links.txt
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/third_party/python/redo/redo.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/third_party/python/redo/redo.egg-info/entry_points.txt b/third_party/python/redo/redo.egg-info/entry_points.txt
new file mode 100644
index 0000000000..44eccdcfca
--- /dev/null
+++ b/third_party/python/redo/redo.egg-info/entry_points.txt
@@ -0,0 +1,3 @@
+[console_scripts]
+retry = redo.cmd:main
+
diff --git a/third_party/python/redo/redo.egg-info/top_level.txt b/third_party/python/redo/redo.egg-info/top_level.txt
new file mode 100644
index 0000000000..f49789cbab
--- /dev/null
+++ b/third_party/python/redo/redo.egg-info/top_level.txt
@@ -0,0 +1 @@
+redo
diff --git a/third_party/python/redo/redo/__init__.py b/third_party/python/redo/redo/__init__.py
new file mode 100644
index 0000000000..9814805990
--- /dev/null
+++ b/third_party/python/redo/redo/__init__.py
@@ -0,0 +1,265 @@
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import time
+from functools import wraps
+from contextlib import contextmanager
+import logging
+import random
+
+log = logging.getLogger(__name__)
+
+
+def retrier(attempts=5, sleeptime=10, max_sleeptime=300, sleepscale=1.5, jitter=1):
+ """
+ A generator function that sleeps between retries, handles exponential
+ backoff and jitter. The action you are retrying is meant to run after
+ retrier yields.
+
+ At each iteration, we sleep for sleeptime + random.uniform(-jitter, jitter).
+ Afterwards sleeptime is multiplied by sleepscale for the next iteration.
+
+ Args:
+ attempts (int): maximum number of times to try; defaults to 5
+ sleeptime (float): how many seconds to sleep between tries; defaults to
+ 10 seconds
+ max_sleeptime (float): the longest we'll sleep, in seconds; defaults to
+ 300s (five minutes)
+ sleepscale (float): how much to multiply the sleep time by each
+ iteration; defaults to 1.5
+ jitter (float): random jitter to introduce to sleep time each iteration.
+ the amount is chosen at random between [-jitter, +jitter]
+ defaults to 1
+
+ Yields:
+ None, a maximum of `attempts` number of times
+
+ Example:
+ >>> n = 0
+ >>> for _ in retrier(sleeptime=0, jitter=0):
+ ... if n == 3:
+ ... # We did the thing!
+ ... break
+ ... n += 1
+ >>> n
+ 3
+
+ >>> n = 0
+ >>> for _ in retrier(sleeptime=0, jitter=0):
+ ... if n == 6:
+ ... # We did the thing!
+ ... break
+ ... n += 1
+ ... else:
+ ... print("max tries hit")
+ max tries hit
+ """
+ jitter = jitter or 0 # py35 barfs on the next line if jitter is None
+ if jitter > sleeptime:
+ # To prevent negative sleep times
+ raise Exception(
+ "jitter ({}) must be less than sleep time ({})".format(jitter, sleeptime)
+ )
+
+ sleeptime_real = sleeptime
+ for _ in range(attempts):
+ log.debug("attempt %i/%i", _ + 1, attempts)
+
+ yield sleeptime_real
+
+ if jitter:
+ sleeptime_real = sleeptime + random.uniform(-jitter, jitter)
+ # our jitter should scale along with the sleeptime
+ jitter = jitter * sleepscale
+ else:
+ sleeptime_real = sleeptime
+
+ sleeptime *= sleepscale
+
+ if sleeptime_real > max_sleeptime:
+ sleeptime_real = max_sleeptime
+
+ # Don't need to sleep the last time
+ if _ < attempts - 1:
+ log.debug(
+ "sleeping for %.2fs (attempt %i/%i)", sleeptime_real, _ + 1, attempts
+ )
+ time.sleep(sleeptime_real)
+
+
+def retry(
+ action,
+ attempts=5,
+ sleeptime=60,
+ max_sleeptime=5 * 60,
+ sleepscale=1.5,
+ jitter=1,
+ retry_exceptions=(Exception,),
+ cleanup=None,
+ args=(),
+ kwargs={},
+ log_args=True,
+):
+ """
+ Calls an action function until it succeeds, or we give up.
+
+ Args:
+ action (callable): the function to retry
+ attempts (int): maximum number of times to try; defaults to 5
+ sleeptime (float): how many seconds to sleep between tries; defaults to
+ 60s (one minute)
+ max_sleeptime (float): the longest we'll sleep, in seconds; defaults to
+ 300s (five minutes)
+ sleepscale (float): how much to multiply the sleep time by each
+ iteration; defaults to 1.5
+ jitter (float): random jitter to introduce to sleep time each iteration.
+ the amount is chosen at random between [-jitter, +jitter]
+ defaults to 1
+ retry_exceptions (tuple): tuple of exceptions to be caught. If other
+ exceptions are raised by action(), then these
+ are immediately re-raised to the caller.
+ cleanup (callable): optional; called if one of `retry_exceptions` is
+ caught. No arguments are passed to the cleanup
+ function; if your cleanup requires arguments,
+ consider using functools.partial or a lambda
+ function.
+ args (tuple): positional arguments to call `action` with
+ kwargs (dict): keyword arguments to call `action` with
+ log_args (bool): whether or not to include args and kwargs in log
+ messages. Defaults to True.
+
+ Returns:
+ Whatever action(*args, **kwargs) returns
+
+ Raises:
+ Whatever action(*args, **kwargs) raises. `retry_exceptions` are caught
+ up until the last attempt, in which case they are re-raised.
+
+ Example:
+ >>> count = 0
+ >>> def foo():
+ ... global count
+ ... count += 1
+ ... print(count)
+ ... if count < 3:
+ ... raise ValueError("count is too small!")
+ ... return "success!"
+ >>> retry(foo, sleeptime=0, jitter=0)
+ 1
+ 2
+ 3
+ 'success!'
+ """
+ assert callable(action)
+ assert not cleanup or callable(cleanup)
+
+ action_name = getattr(action, "__name__", action)
+ if log_args and (args or kwargs):
+ log_attempt_args = (
+ "retry: calling %s with args: %s," " kwargs: %s, attempt #%d",
+ action_name,
+ args,
+ kwargs,
+ )
+ else:
+ log_attempt_args = ("retry: calling %s, attempt #%d", action_name)
+
+ if max_sleeptime < sleeptime:
+ log.debug("max_sleeptime %d less than sleeptime %d", max_sleeptime, sleeptime)
+
+ n = 1
+ for _ in retrier(
+ attempts=attempts,
+ sleeptime=sleeptime,
+ max_sleeptime=max_sleeptime,
+ sleepscale=sleepscale,
+ jitter=jitter,
+ ):
+ try:
+ logfn = log.info if n != 1 else log.debug
+ logfn_args = log_attempt_args + (n,)
+ logfn(*logfn_args)
+ return action(*args, **kwargs)
+ except retry_exceptions:
+ log.debug("retry: Caught exception: ", exc_info=True)
+ if cleanup:
+ cleanup()
+ if n == attempts:
+ log.info("retry: Giving up on %s", action_name)
+ raise
+ continue
+ finally:
+ n += 1
+
+
+def retriable(*retry_args, **retry_kwargs):
+ """
+ A decorator factory for retry(). Wrap your function in @retriable(...) to
+ give it retry powers!
+
+ Arguments:
+ Same as for `retry`, with the exception of `action`, `args`, and `kwargs`,
+ which are left to the normal function definition.
+
+ Returns:
+ A function decorator
+
+ Example:
+ >>> count = 0
+ >>> @retriable(sleeptime=0, jitter=0)
+ ... def foo():
+ ... global count
+ ... count += 1
+ ... print(count)
+ ... if count < 3:
+ ... raise ValueError("count too small")
+ ... return "success!"
+ >>> foo()
+ 1
+ 2
+ 3
+ 'success!'
+ """
+
+ def _retriable_factory(func):
+ @wraps(func)
+ def _retriable_wrapper(*args, **kwargs):
+ return retry(func, args=args, kwargs=kwargs, *retry_args, **retry_kwargs)
+
+ return _retriable_wrapper
+
+ return _retriable_factory
+
+
+@contextmanager
+def retrying(func, *retry_args, **retry_kwargs):
+ """
+ A context manager for wrapping functions with retry functionality.
+
+ Arguments:
+ func (callable): the function to wrap
+ other arguments as per `retry`
+
+ Returns:
+ A context manager that returns retriable(func) on __enter__
+
+ Example:
+ >>> count = 0
+ >>> def foo():
+ ... global count
+ ... count += 1
+ ... print(count)
+ ... if count < 3:
+ ... raise ValueError("count too small")
+ ... return "success!"
+ >>> with retrying(foo, sleeptime=0, jitter=0) as f:
+ ... f()
+ 1
+ 2
+ 3
+ 'success!'
+ """
+ yield retriable(*retry_args, **retry_kwargs)(func)
diff --git a/third_party/python/redo/redo/cmd.py b/third_party/python/redo/redo/cmd.py
new file mode 100644
index 0000000000..aeb65dbb3e
--- /dev/null
+++ b/third_party/python/redo/redo/cmd.py
@@ -0,0 +1,70 @@
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+import logging
+from subprocess import check_call, CalledProcessError
+import sys
+
+from redo import retrying
+
+log = logging.getLogger(__name__)
+
+
+def main(argv):
+ from argparse import ArgumentParser, REMAINDER
+
+ parser = ArgumentParser()
+ parser.add_argument(
+ "-a", "--attempts", type=int, default=5, help="How many times to retry."
+ )
+ parser.add_argument(
+ "-s",
+ "--sleeptime",
+ type=int,
+ default=60,
+ help="How long to sleep between attempts. Sleeptime doubles after each attempt.",
+ )
+ parser.add_argument(
+ "-m",
+ "--max-sleeptime",
+ type=int,
+ default=5 * 60,
+ help="Maximum length of time to sleep between attempts (limits backoff length).",
+ )
+ parser.add_argument("-v", "--verbose", action="store_true", default=False)
+ parser.add_argument(
+ "cmd", nargs=REMAINDER, help="Command to run. Eg: wget http://blah"
+ )
+
+ args = parser.parse_args(argv[1:])
+
+ if args.verbose:
+ logging.basicConfig(level=logging.INFO)
+ logging.getLogger("retry").setLevel(logging.INFO)
+ else:
+ logging.basicConfig(level=logging.ERROR)
+ logging.getLogger("retry").setLevel(logging.ERROR)
+
+ try:
+ with retrying(
+ check_call,
+ attempts=args.attempts,
+ sleeptime=args.sleeptime,
+ max_sleeptime=args.max_sleeptime,
+ retry_exceptions=(CalledProcessError,),
+ ) as r_check_call:
+ r_check_call(args.cmd)
+ except KeyboardInterrupt:
+ sys.exit(-1)
+ except Exception as e:
+ log.error(
+ "Unable to run command after %d attempts" % args.attempts, exc_info=True
+ )
+ rc = getattr(e, "returncode", -2)
+ sys.exit(rc)
+
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/third_party/python/redo/setup.cfg b/third_party/python/redo/setup.cfg
new file mode 100644
index 0000000000..1e3eb367c1
--- /dev/null
+++ b/third_party/python/redo/setup.cfg
@@ -0,0 +1,7 @@
+[wheel]
+universal = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/redo/setup.py b/third_party/python/redo/setup.py
new file mode 100644
index 0000000000..255f80ea52
--- /dev/null
+++ b/third_party/python/redo/setup.py
@@ -0,0 +1,16 @@
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+
+setup(
+ name="redo",
+ version="2.0.3",
+ description="Utilities to retry Python callables.",
+ author="Ben Hearsum",
+ author_email="ben@hearsum.ca",
+ packages=["redo"],
+ entry_points={"console_scripts": ["retry = redo.cmd:main"]},
+ url="https://github.com/bhearsum/redo",
+)
diff --git a/third_party/python/requests-unixsocket/.travis.yml b/third_party/python/requests-unixsocket/.travis.yml
new file mode 100644
index 0000000000..ec0465e2ab
--- /dev/null
+++ b/third_party/python/requests-unixsocket/.travis.yml
@@ -0,0 +1,17 @@
+language: python
+
+env:
+ - TOXENV=py26
+ - TOXENV=py27
+ - TOXENV=py33
+ - TOXENV=py34
+ - TOXENV=py35
+ - TOXENV=pypy
+ - TOXENV=flake8
+ - TOXENV=coverage
+
+install:
+ - travis_retry pip install tox
+
+script:
+ - tox
diff --git a/third_party/python/requests-unixsocket/AUTHORS b/third_party/python/requests-unixsocket/AUTHORS
new file mode 100644
index 0000000000..b9a817ac16
--- /dev/null
+++ b/third_party/python/requests-unixsocket/AUTHORS
@@ -0,0 +1,6 @@
+Ben Jackson <puremourning@gmail.com>
+Esben Haabendal <esben@haabendal.dk>
+Marc Abramowitz <marc@marc-abramowitz.com>
+Tomaz Solc <tomaz.solc@tablix.org>
+Will Rouesnel <w.rouesnel@gmail.com>
+William Rouesnel <William.Rouesnel@netregistry.com.au>
diff --git a/third_party/python/requests-unixsocket/ChangeLog b/third_party/python/requests-unixsocket/ChangeLog
new file mode 100644
index 0000000000..86e52f70e4
--- /dev/null
+++ b/third_party/python/requests-unixsocket/ChangeLog
@@ -0,0 +1,67 @@
+CHANGES
+=======
+
+0.1.5
+-----
+
+* Fix test_unix_domain_adapter_connection_proxies_error
+* .travis.yml tweaks
+* Remove py32; Add py35
+* Only reject proxies if they are relevant (which should be never)
+* Add urllib3 requirement
+* Add basic tests for all supported methods
+* More PEP8 compliance refactoring
+* Fix up some oversights in method parsing
+* Tweak a few things in PR 12
+* Make PEP8 compliant with autopep8
+* Improve the monkey-patching library to replicate requests more closely
+
+0.1.4
+-----
+
+* README.rst: Add PyPI badge
+* Monkeypatch requests.request
+
+0.1.3
+-----
+
+* Fix #6 ("GET parameters stripped from URL")
+* GH-7: Fallback to import from urllib3
+
+0.1.2
+-----
+
+* Tweak monkeypatch code
+* Move/expose testutils like UnixSocketServerThread
+* Make monkeypatch url_scheme arg optional
+
+0.1.1
+-----
+
+* Remove :class: role from README.rst
+
+0.1.0
+-----
+
+* Doc tweaks
+* Expose Session and monkeypatch
+* Add Travis CI build badge
+* Test Python 3.2 with tox and Travis CI
+* Use threading.Event to less chance of race cond
+* Add .travis.yml for Travis CI
+* Change process => thread for test UnixSocketServer
+* Make WSGIApp use server attribute for shutdown
+* Use WSGIApp callable instead of closure
+* In tests, try to gracefully kill waitress server
+* Display text coverage report in tox coverage env
+* Add test for proxies error
+* Use b literal in test; fix py3 test failures
+* tox.ini: Correct name of env pep8 => flake8
+* tox.ini: Rename pep8 => flake8
+* .gitignore: Add AUTHORS and ChangeLog
+* Add pytest-pep8
+* Improve tests
+* Yay, tests are passing
+* .gitignore: Add .eggs/ for setuptools==7.0
+* Rename README.md -> README.rst
+* Initial commit
diff --git a/third_party/python/requests-unixsocket/LICENSE b/third_party/python/requests-unixsocket/LICENSE
new file mode 100644
index 0000000000..e06d208186
--- /dev/null
+++ b/third_party/python/requests-unixsocket/LICENSE
@@ -0,0 +1,202 @@
+Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
diff --git a/third_party/python/requests-unixsocket/PKG-INFO b/third_party/python/requests-unixsocket/PKG-INFO
new file mode 100644
index 0000000000..3c70b88346
--- /dev/null
+++ b/third_party/python/requests-unixsocket/PKG-INFO
@@ -0,0 +1,84 @@
+Metadata-Version: 1.1
+Name: requests-unixsocket
+Version: 0.1.5
+Summary: Use requests to talk HTTP via a UNIX domain socket
+Home-page: https://github.com/msabramo/requests-unixsocket
+Author: Marc Abramowitz
+Author-email: marc@marc-abramowitz.com
+License: Apache-2
+Description: requests-unixsocket
+ ===================
+
+ .. image:: https://pypip.in/version/requests-unixsocket/badge.svg?style=flat
+ :target: https://pypi.python.org/pypi/requests-unixsocket/
+ :alt: Latest Version
+
+ .. image:: https://travis-ci.org/msabramo/requests-unixsocket.svg?branch=master
+ :target: https://travis-ci.org/msabramo/requests-unixsocket
+
+ Use `requests <http://docs.python-requests.org/>`_ to talk HTTP via a UNIX domain socket
+
+ Usage
+ -----
+
+ Explicit
+ ++++++++
+
+ You can use it by instantiating a special ``Session`` object:
+
+ .. code-block:: python
+
+ import requests_unixsocket
+
+ session = requests_unixsocket.Session()
+
+ # Access /path/to/page from /tmp/profilesvc.sock
+ r = session.get('http+unix://%2Ftmp%2Fprofilesvc.sock/path/to/page')
+ assert r.status_code == 200
+
+ Implicit (monkeypatching)
+ +++++++++++++++++++++++++
+
+ Monkeypatching allows you to use the functionality in this module, while making
+ minimal changes to your code. Note that in the above example we had to
+ instantiate a special ``requests_unixsocket.Session`` object and call the
+ ``get`` method on that object. Calling ``requests.get(url)`` (the easiest way
+ to use requests and probably very common), would not work. But we can make it
+ work by doing monkeypatching.
+
+ You can monkeypatch globally:
+
+ .. code-block:: python
+
+ import requests_unixsocket
+
+ requests_unixsocket.monkeypatch()
+
+ # Access /path/to/page from /tmp/profilesvc.sock
+ r = requests.get('http+unix://%2Ftmp%2Fprofilesvc.sock/path/to/page')
+ assert r.status_code == 200
+
+ or you can do it temporarily using a context manager:
+
+ .. code-block:: python
+
+ import requests_unixsocket
+
+ with requests_unixsocket.monkeypatch():
+ # Access /path/to/page from /tmp/profilesvc.sock
+ r = requests.get('http+unix://%2Ftmp%2Fprofilesvc.sock/path/to/page')
+ assert r.status_code == 200
+
+
+Platform: UNKNOWN
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Information Technology
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
diff --git a/third_party/python/requests-unixsocket/README.rst b/third_party/python/requests-unixsocket/README.rst
new file mode 100644
index 0000000000..d582f4c059
--- /dev/null
+++ b/third_party/python/requests-unixsocket/README.rst
@@ -0,0 +1,62 @@
+requests-unixsocket
+===================
+
+.. image:: https://pypip.in/version/requests-unixsocket/badge.svg?style=flat
+ :target: https://pypi.python.org/pypi/requests-unixsocket/
+ :alt: Latest Version
+
+.. image:: https://travis-ci.org/msabramo/requests-unixsocket.svg?branch=master
+ :target: https://travis-ci.org/msabramo/requests-unixsocket
+
+Use `requests <http://docs.python-requests.org/>`_ to talk HTTP via a UNIX domain socket
+
+Usage
+-----
+
+Explicit
+++++++++
+
+You can use it by instantiating a special ``Session`` object:
+
+.. code-block:: python
+
+ import requests_unixsocket
+
+ session = requests_unixsocket.Session()
+
+ # Access /path/to/page from /tmp/profilesvc.sock
+ r = session.get('http+unix://%2Ftmp%2Fprofilesvc.sock/path/to/page')
+ assert r.status_code == 200
+
+Implicit (monkeypatching)
++++++++++++++++++++++++++
+
+Monkeypatching allows you to use the functionality in this module, while making
+minimal changes to your code. Note that in the above example we had to
+instantiate a special ``requests_unixsocket.Session`` object and call the
+``get`` method on that object. Calling ``requests.get(url)`` (the easiest way
+to use requests and probably very common), would not work. But we can make it
+work by doing monkeypatching.
+
+You can monkeypatch globally:
+
+.. code-block:: python
+
+ import requests_unixsocket
+
+ requests_unixsocket.monkeypatch()
+
+ # Access /path/to/page from /tmp/profilesvc.sock
+ r = requests.get('http+unix://%2Ftmp%2Fprofilesvc.sock/path/to/page')
+ assert r.status_code == 200
+
+or you can do it temporarily using a context manager:
+
+.. code-block:: python
+
+ import requests_unixsocket
+
+ with requests_unixsocket.monkeypatch():
+ # Access /path/to/page from /tmp/profilesvc.sock
+ r = requests.get('http+unix://%2Ftmp%2Fprofilesvc.sock/path/to/page')
+ assert r.status_code == 200
diff --git a/third_party/python/requests-unixsocket/pytest.ini b/third_party/python/requests-unixsocket/pytest.ini
new file mode 100644
index 0000000000..dd405b4669
--- /dev/null
+++ b/third_party/python/requests-unixsocket/pytest.ini
@@ -0,0 +1,2 @@
+[pytest]
+addopts = --tb=short --pep8
diff --git a/third_party/python/requests-unixsocket/requests_unixsocket/__init__.py b/third_party/python/requests-unixsocket/requests_unixsocket/__init__.py
new file mode 100644
index 0000000000..0fb5e1fd7f
--- /dev/null
+++ b/third_party/python/requests-unixsocket/requests_unixsocket/__init__.py
@@ -0,0 +1,77 @@
+import requests
+import sys
+
+from .adapters import UnixAdapter
+
+DEFAULT_SCHEME = 'http+unix://'
+
+
+class Session(requests.Session):
+ def __init__(self, url_scheme=DEFAULT_SCHEME, *args, **kwargs):
+ super(Session, self).__init__(*args, **kwargs)
+ self.mount(url_scheme, UnixAdapter())
+
+
+class monkeypatch(object):
+ def __init__(self, url_scheme=DEFAULT_SCHEME):
+ self.session = Session()
+ requests = self._get_global_requests_module()
+
+ # Methods to replace
+ self.methods = ('request', 'get', 'head', 'post',
+ 'patch', 'put', 'delete', 'options')
+ # Store the original methods
+ self.orig_methods = dict(
+ (m, requests.__dict__[m]) for m in self.methods)
+ # Monkey patch
+ g = globals()
+ for m in self.methods:
+ requests.__dict__[m] = g[m]
+
+ def _get_global_requests_module(self):
+ return sys.modules['requests']
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ requests = self._get_global_requests_module()
+ for m in self.methods:
+ requests.__dict__[m] = self.orig_methods[m]
+
+
+# These are the same methods defined for the global requests object
+def request(method, url, **kwargs):
+ session = Session()
+ return session.request(method=method, url=url, **kwargs)
+
+
+def get(url, **kwargs):
+ kwargs.setdefault('allow_redirects', True)
+ return request('get', url, **kwargs)
+
+
+def head(url, **kwargs):
+ kwargs.setdefault('allow_redirects', False)
+ return request('head', url, **kwargs)
+
+
+def post(url, data=None, json=None, **kwargs):
+ return request('post', url, data=data, json=json, **kwargs)
+
+
+def patch(url, data=None, **kwargs):
+ return request('patch', url, data=data, **kwargs)
+
+
+def put(url, data=None, **kwargs):
+ return request('put', url, data=data, **kwargs)
+
+
+def delete(url, **kwargs):
+ return request('delete', url, **kwargs)
+
+
+def options(url, **kwargs):
+ kwargs.setdefault('allow_redirects', True)
+ return request('options', url, **kwargs)
diff --git a/third_party/python/requests-unixsocket/requests_unixsocket/adapters.py b/third_party/python/requests-unixsocket/requests_unixsocket/adapters.py
new file mode 100644
index 0000000000..8449b868c5
--- /dev/null
+++ b/third_party/python/requests-unixsocket/requests_unixsocket/adapters.py
@@ -0,0 +1,60 @@
+import socket
+
+from requests.adapters import HTTPAdapter
+from requests.compat import urlparse, unquote
+try:
+ from requests.packages.urllib3.connection import HTTPConnection
+ from requests.packages.urllib3.connectionpool import HTTPConnectionPool
+except ImportError:
+ from urllib3.connection import HTTPConnection
+ from urllib3.connectionpool import HTTPConnectionPool
+
+
+# The following was adapted from some code from docker-py
+# https://github.com/docker/docker-py/blob/master/docker/unixconn/unixconn.py
+class UnixHTTPConnection(HTTPConnection):
+
+ def __init__(self, unix_socket_url, timeout=60):
+ """Create an HTTP connection to a unix domain socket
+
+ :param unix_socket_url: A URL with a scheme of 'http+unix' and the
+ netloc is a percent-encoded path to a unix domain socket. E.g.:
+ 'http+unix://%2Ftmp%2Fprofilesvc.sock/status/pid'
+ """
+ HTTPConnection.__init__(self, 'localhost', timeout=timeout)
+ self.unix_socket_url = unix_socket_url
+ self.timeout = timeout
+
+ def connect(self):
+ sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ sock.settimeout(self.timeout)
+ socket_path = unquote(urlparse(self.unix_socket_url).netloc)
+ sock.connect(socket_path)
+ self.sock = sock
+
+
+class UnixHTTPConnectionPool(HTTPConnectionPool):
+
+ def __init__(self, socket_path, timeout=60):
+ HTTPConnectionPool.__init__(self, 'localhost', timeout=timeout)
+ self.socket_path = socket_path
+ self.timeout = timeout
+
+ def _new_conn(self):
+ return UnixHTTPConnection(self.socket_path, self.timeout)
+
+
+class UnixAdapter(HTTPAdapter):
+
+ def __init__(self, timeout=60):
+ super(UnixAdapter, self).__init__()
+ self.timeout = timeout
+
+ def get_connection(self, socket_path, proxies=None):
+ proxies = proxies or {}
+ proxy = proxies.get(urlparse(socket_path.lower()).scheme)
+
+ if proxy:
+ raise ValueError('%s does not support specifying proxies'
+ % self.__class__.__name__)
+ return UnixHTTPConnectionPool(socket_path, self.timeout)
diff --git a/third_party/python/requests-unixsocket/requests_unixsocket/tests/test_requests_unixsocket.py b/third_party/python/requests-unixsocket/requests_unixsocket/tests/test_requests_unixsocket.py
new file mode 100755
index 0000000000..34151b2b18
--- /dev/null
+++ b/third_party/python/requests-unixsocket/requests_unixsocket/tests/test_requests_unixsocket.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""Tests for requests_unixsocket"""
+
+import logging
+
+import pytest
+import requests
+
+import requests_unixsocket
+from requests_unixsocket.testutils import UnixSocketServerThread
+
+
+logger = logging.getLogger(__name__)
+
+
+def test_unix_domain_adapter_ok():
+ with UnixSocketServerThread() as usock_thread:
+ session = requests_unixsocket.Session('http+unix://')
+ urlencoded_usock = requests.compat.quote_plus(usock_thread.usock)
+ url = 'http+unix://%s/path/to/page' % urlencoded_usock
+
+ for method in ['get', 'post', 'head', 'patch', 'put', 'delete',
+ 'options']:
+ logger.debug('Calling session.%s(%r) ...', method, url)
+ r = getattr(session, method)(url)
+ logger.debug(
+ 'Received response: %r with text: %r and headers: %r',
+ r, r.text, r.headers)
+ assert r.status_code == 200
+ assert r.headers['server'] == 'waitress'
+ assert r.headers['X-Transport'] == 'unix domain socket'
+ assert r.headers['X-Requested-Path'] == '/path/to/page'
+ assert r.headers['X-Socket-Path'] == usock_thread.usock
+ assert isinstance(r.connection, requests_unixsocket.UnixAdapter)
+ assert r.url == url
+ if method == 'head':
+ assert r.text == ''
+ else:
+ assert r.text == 'Hello world!'
+
+
+def test_unix_domain_adapter_url_with_query_params():
+ with UnixSocketServerThread() as usock_thread:
+ session = requests_unixsocket.Session('http+unix://')
+ urlencoded_usock = requests.compat.quote_plus(usock_thread.usock)
+ url = ('http+unix://%s'
+ '/containers/nginx/logs?timestamp=true' % urlencoded_usock)
+
+ for method in ['get', 'post', 'head', 'patch', 'put', 'delete',
+ 'options']:
+ logger.debug('Calling session.%s(%r) ...', method, url)
+ r = getattr(session, method)(url)
+ logger.debug(
+ 'Received response: %r with text: %r and headers: %r',
+ r, r.text, r.headers)
+ assert r.status_code == 200
+ assert r.headers['server'] == 'waitress'
+ assert r.headers['X-Transport'] == 'unix domain socket'
+ assert r.headers['X-Requested-Path'] == '/containers/nginx/logs'
+ assert r.headers['X-Requested-Query-String'] == 'timestamp=true'
+ assert r.headers['X-Socket-Path'] == usock_thread.usock
+ assert isinstance(r.connection, requests_unixsocket.UnixAdapter)
+ assert r.url == url
+ if method == 'head':
+ assert r.text == ''
+ else:
+ assert r.text == 'Hello world!'
+
+
+def test_unix_domain_adapter_connection_error():
+ session = requests_unixsocket.Session('http+unix://')
+
+ for method in ['get', 'post', 'head', 'patch', 'put', 'delete', 'options']:
+ with pytest.raises(requests.ConnectionError):
+ getattr(session, method)(
+ 'http+unix://socket_does_not_exist/path/to/page')
+
+
+def test_unix_domain_adapter_connection_proxies_error():
+ session = requests_unixsocket.Session('http+unix://')
+
+ for method in ['get', 'post', 'head', 'patch', 'put', 'delete', 'options']:
+ with pytest.raises(ValueError) as excinfo:
+ getattr(session, method)(
+ 'http+unix://socket_does_not_exist/path/to/page',
+ proxies={"http+unix": "http://10.10.1.10:1080"})
+ assert ('UnixAdapter does not support specifying proxies'
+ in str(excinfo.value))
+
+
+def test_unix_domain_adapter_monkeypatch():
+ with UnixSocketServerThread() as usock_thread:
+ with requests_unixsocket.monkeypatch('http+unix://'):
+ urlencoded_usock = requests.compat.quote_plus(usock_thread.usock)
+ url = 'http+unix://%s/path/to/page' % urlencoded_usock
+
+ for method in ['get', 'post', 'head', 'patch', 'put', 'delete',
+ 'options']:
+ logger.debug('Calling session.%s(%r) ...', method, url)
+ r = getattr(requests, method)(url)
+ logger.debug(
+ 'Received response: %r with text: %r and headers: %r',
+ r, r.text, r.headers)
+ assert r.status_code == 200
+ assert r.headers['server'] == 'waitress'
+ assert r.headers['X-Transport'] == 'unix domain socket'
+ assert r.headers['X-Requested-Path'] == '/path/to/page'
+ assert r.headers['X-Socket-Path'] == usock_thread.usock
+ assert isinstance(r.connection,
+ requests_unixsocket.UnixAdapter)
+ assert r.url == url
+ if method == 'head':
+ assert r.text == ''
+ else:
+ assert r.text == 'Hello world!'
+
+ for method in ['get', 'post', 'head', 'patch', 'put', 'delete', 'options']:
+ with pytest.raises(requests.exceptions.InvalidSchema):
+ getattr(requests, method)(url)
diff --git a/third_party/python/requests-unixsocket/requests_unixsocket/testutils.py b/third_party/python/requests-unixsocket/requests_unixsocket/testutils.py
new file mode 100644
index 0000000000..77e572e16f
--- /dev/null
+++ b/third_party/python/requests-unixsocket/requests_unixsocket/testutils.py
@@ -0,0 +1,97 @@
+"""
+Utilities helpful for writing tests
+
+Provides a UnixSocketServerThread that creates a running server, listening on a
+newly created unix socket.
+
+Example usage:
+
+.. code-block:: python
+
+ def test_unix_domain_adapter_monkeypatch():
+ with UnixSocketServerThread() as usock_thread:
+ with requests_unixsocket.monkeypatch('http+unix://'):
+ urlencoded_usock = quote_plus(usock_process.usock)
+ url = 'http+unix://%s/path/to/page' % urlencoded_usock
+ r = requests.get(url)
+"""
+
+import logging
+import os
+import threading
+import time
+import uuid
+import waitress
+
+
+logger = logging.getLogger(__name__)
+
+
+class KillThread(threading.Thread):
+ def __init__(self, server, *args, **kwargs):
+ super(KillThread, self).__init__(*args, **kwargs)
+ self.server = server
+
+ def run(self):
+ time.sleep(1)
+ logger.debug('Sleeping')
+ self.server._map.clear()
+
+
+class WSGIApp:
+ server = None
+
+ def __call__(self, environ, start_response):
+ logger.debug('WSGIApp.__call__: Invoked for %s', environ['PATH_INFO'])
+ logger.debug('WSGIApp.__call__: environ = %r', environ)
+ status_text = '200 OK'
+ response_headers = [
+ ('X-Transport', 'unix domain socket'),
+ ('X-Socket-Path', environ['SERVER_PORT']),
+ ('X-Requested-Query-String', environ['QUERY_STRING']),
+ ('X-Requested-Path', environ['PATH_INFO'])]
+ body_bytes = b'Hello world!'
+ start_response(status_text, response_headers)
+ logger.debug(
+ 'WSGIApp.__call__: Responding with '
+ 'status_text = %r; '
+ 'response_headers = %r; '
+ 'body_bytes = %r',
+ status_text, response_headers, body_bytes)
+ return [body_bytes]
+
+
+class UnixSocketServerThread(threading.Thread):
+ def __init__(self, *args, **kwargs):
+ super(UnixSocketServerThread, self).__init__(*args, **kwargs)
+ self.usock = self.get_tempfile_name()
+ self.server = None
+ self.server_ready_event = threading.Event()
+
+ def get_tempfile_name(self):
+ # I'd rather use tempfile.NamedTemporaryFile but IDNA limits
+ # the hostname to 63 characters and we'll get a "InvalidURL:
+ # URL has an invalid label" error if we exceed that.
+ args = (os.stat(__file__).st_ino, os.getpid(), uuid.uuid4().hex[-8:])
+ return '/tmp/test_requests.%s_%s_%s' % args
+
+ def run(self):
+ logger.debug('Call waitress.serve in %r ...', self)
+ wsgi_app = WSGIApp()
+ server = waitress.create_server(wsgi_app, unix_socket=self.usock)
+ wsgi_app.server = server
+ self.server = server
+ self.server_ready_event.set()
+ server.run()
+
+ def __enter__(self):
+ logger.debug('Starting %r ...' % self)
+ self.start()
+ logger.debug('Started %r.', self)
+ self.server_ready_event.wait()
+ return self
+
+ def __exit__(self, *args):
+ self.server_ready_event.wait()
+ if self.server:
+ KillThread(self.server).start()
diff --git a/third_party/python/requests-unixsocket/requirements.txt b/third_party/python/requests-unixsocket/requirements.txt
new file mode 100644
index 0000000000..df0e6d638a
--- /dev/null
+++ b/third_party/python/requests-unixsocket/requirements.txt
@@ -0,0 +1,2 @@
+requests>=1.1
+urllib3>=1.8
diff --git a/third_party/python/requests-unixsocket/setup.cfg b/third_party/python/requests-unixsocket/setup.cfg
new file mode 100644
index 0000000000..1131013ee5
--- /dev/null
+++ b/third_party/python/requests-unixsocket/setup.cfg
@@ -0,0 +1,33 @@
+[metadata]
+name = requests-unixsocket
+author = Marc Abramowitz
+author-email = marc@marc-abramowitz.com
+summary = Use requests to talk HTTP via a UNIX domain socket
+description-file = README.rst
+license = Apache-2
+home-page = https://github.com/msabramo/requests-unixsocket
+classifier =
+ Development Status :: 3 - Alpha
+ Intended Audience :: Developers
+ Intended Audience :: Information Technology
+ License :: OSI Approved :: Apache Software License
+ Operating System :: OS Independent
+ Programming Language :: Python
+ Programming Language :: Python :: 2
+ Programming Language :: Python :: 2.7
+ Programming Language :: Python :: 2.6
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3.3
+test_suite = requests_unixsocket.tests
+
+[files]
+packages = requests_unixsocket
+
+[wheel]
+universal = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/third_party/python/requests-unixsocket/setup.py b/third_party/python/requests-unixsocket/setup.py
new file mode 100755
index 0000000000..aa2d8a0194
--- /dev/null
+++ b/third_party/python/requests-unixsocket/setup.py
@@ -0,0 +1,8 @@
+#!/usr/bin/env python
+
+from setuptools import setup
+
+setup(
+ setup_requires=['pbr'],
+ pbr=True,
+)
diff --git a/third_party/python/requests-unixsocket/test-requirements.txt b/third_party/python/requests-unixsocket/test-requirements.txt
new file mode 100644
index 0000000000..a10e005013
--- /dev/null
+++ b/third_party/python/requests-unixsocket/test-requirements.txt
@@ -0,0 +1,4 @@
+pytest
+pytest-capturelog
+pytest-pep8
+waitress
diff --git a/third_party/python/requests-unixsocket/tox.ini b/third_party/python/requests-unixsocket/tox.ini
new file mode 100644
index 0000000000..d061cb879e
--- /dev/null
+++ b/third_party/python/requests-unixsocket/tox.ini
@@ -0,0 +1,48 @@
+[tox]
+envlist = py26, py27, py33, py34, py35, pypy, flake8
+
+[testenv]
+commands = py.test {posargs:requests_unixsocket/tests}
+deps =
+ -r{toxinidir}/requirements.txt
+ -r{toxinidir}/test-requirements.txt
+
+[testenv:flake8]
+commands = flake8
+deps =
+ flake8
+ {[testenv]deps}
+
+[testenv:venv]
+commands = {posargs}
+
+[testenv:coverage]
+commands =
+ coverage erase
+ coverage run --source requests_unixsocket -m py.test requests_unixsocket/tests
+ coverage report --show-missing
+ coverage html
+deps =
+ coverage
+ {[testenv]deps}
+
+[testenv:doctest]
+# note this only works under python 3 because of unicode literals
+commands =
+ python -m doctest README.rst
+
+[testenv:sphinx-doctest]
+# note this only works under python 3 because of unicode literals
+commands =
+ mkdir build/sphinx/doctest
+ sphinx-build -b doctest docs build/sphinx/doctest
+deps =
+ pbr
+ {[testenv]deps}
+
+[testenv:docs]
+commands = python setup.py build_sphinx
+
+[flake8]
+max_line_length = 79
+exclude = .git,.tox,dist,docs,*egg
diff --git a/third_party/python/requests/HISTORY.rst b/third_party/python/requests/HISTORY.rst
new file mode 100644
index 0000000000..f8c1a54549
--- /dev/null
+++ b/third_party/python/requests/HISTORY.rst
@@ -0,0 +1,1130 @@
+.. :changelog:
+
+Release History
+---------------
+
+2.9.1 (2015-12-21)
+++++++++++++++++++
+
+**Bugfixes**
+
+- Resolve regression introduced in 2.9.0 that made it impossible to send binary
+ strings as bodies in Python 3.
+- Fixed errors when calculating cookie expiration dates in certain locales.
+
+**Miscellaneous**
+
+- Updated bundled urllib3 to 1.13.1.
+
+2.9.0 (2015-12-15)
+++++++++++++++++++
+
+**Minor Improvements** (Backwards compatible)
+
+- The ``verify`` keyword argument now supports being passed a path to a
+ directory of CA certificates, not just a single-file bundle.
+- Warnings are now emitted when sending files opened in text mode.
+- Added the 511 Network Authentication Required status code to the status code
+ registry.
+
+**Bugfixes**
+
+- For file-like objects that are not seeked to the very beginning, we now
+ send the content length for the number of bytes we will actually read, rather
+ than the total size of the file, allowing partial file uploads.
+- When uploading file-like objects, if they are empty or have no obvious
+ content length we set ``Transfer-Encoding: chunked`` rather than
+ ``Content-Length: 0``.
+- We correctly receive the response in buffered mode when uploading chunked
+ bodies.
+- We now handle being passed a query string as a bytestring on Python 3, by
+ decoding it as UTF-8.
+- Sessions are now closed in all cases (exceptional and not) when using the
+ functional API rather than leaking and waiting for the garbage collector to
+ clean them up.
+- Correctly handle digest auth headers with a malformed ``qop`` directive that
+ contains no token, by treating it the same as if no ``qop`` directive was
+ provided at all.
+- Minor performance improvements when removing specific cookies by name.
+
+**Miscellaneous**
+
+- Updated urllib3 to 1.13.
+
+2.8.1 (2015-10-13)
+++++++++++++++++++
+
+**Bugfixes**
+
+- Update certificate bundle to match ``certifi`` 2015.9.6.2's weak certificate
+ bundle.
+- Fix a bug in 2.8.0 where requests would raise ``ConnectTimeout`` instead of
+ ``ConnectionError``
+- When using the PreparedRequest flow, requests will now correctly respect the
+ ``json`` parameter. Broken in 2.8.0.
+- When using the PreparedRequest flow, requests will now correctly handle a
+ Unicode-string method name on Python 2. Broken in 2.8.0.
+
+2.8.0 (2015-10-05)
+++++++++++++++++++
+
+**Minor Improvements** (Backwards Compatible)
+
+- Requests now supports per-host proxies. This allows the ``proxies``
+ dictionary to have entries of the form
+ ``{'<scheme>://<hostname>': '<proxy>'}``. Host-specific proxies will be used
+ in preference to the previously-supported scheme-specific ones, but the
+ previous syntax will continue to work.
+- ``Response.raise_for_status`` now prints the URL that failed as part of the
+ exception message.
+- ``requests.utils.get_netrc_auth`` now takes an ``raise_errors`` kwarg,
+ defaulting to ``False``. When ``True``, errors parsing ``.netrc`` files cause
+ exceptions to be thrown.
+- Change to bundled projects import logic to make it easier to unbundle
+ requests downstream.
+- Changed the default User-Agent string to avoid leaking data on Linux: now
+ contains only the requests version.
+
+**Bugfixes**
+
+- The ``json`` parameter to ``post()`` and friends will now only be used if
+ neither ``data`` nor ``files`` are present, consistent with the
+ documentation.
+- We now ignore empty fields in the ``NO_PROXY`` environment variable.
+- Fixed problem where ``httplib.BadStatusLine`` would get raised if combining
+ ``stream=True`` with ``contextlib.closing``.
+- Prevented bugs where we would attempt to return the same connection back to
+ the connection pool twice when sending a Chunked body.
+- Miscellaneous minor internal changes.
+- Digest Auth support is now thread safe.
+
+**Updates**
+
+- Updated urllib3 to 1.12.
+
+2.7.0 (2015-05-03)
+++++++++++++++++++
+
+This is the first release that follows our new release process. For more, see
+`our documentation
+<http://docs.python-requests.org/en/latest/community/release-process/>`_.
+
+**Bugfixes**
+
+- Updated urllib3 to 1.10.4, resolving several bugs involving chunked transfer
+ encoding and response framing.
+
+2.6.2 (2015-04-23)
+++++++++++++++++++
+
+**Bugfixes**
+
+- Fix regression where compressed data that was sent as chunked data was not
+ properly decompressed. (#2561)
+
+2.6.1 (2015-04-22)
+++++++++++++++++++
+
+**Bugfixes**
+
+- Remove VendorAlias import machinery introduced in v2.5.2.
+
+- Simplify the PreparedRequest.prepare API: We no longer require the user to
+ pass an empty list to the hooks keyword argument. (c.f. #2552)
+
+- Resolve redirects now receives and forwards all of the original arguments to
+ the adapter. (#2503)
+
+- Handle UnicodeDecodeErrors when trying to deal with a unicode URL that
+ cannot be encoded in ASCII. (#2540)
+
+- Populate the parsed path of the URI field when performing Digest
+ Authentication. (#2426)
+
+- Copy a PreparedRequest's CookieJar more reliably when it is not an instance
+ of RequestsCookieJar. (#2527)
+
+2.6.0 (2015-03-14)
+++++++++++++++++++
+
+**Bugfixes**
+
+- CVE-2015-2296: Fix handling of cookies on redirect. Previously a cookie
+ without a host value set would use the hostname for the redirected URL
+ exposing requests users to session fixation attacks and potentially cookie
+ stealing. This was disclosed privately by Matthew Daley of
+ `BugFuzz <https://bugfuzz.com>`_. This affects all versions of requests from
+ v2.1.0 to v2.5.3 (inclusive on both ends).
+
+- Fix error when requests is an ``install_requires`` dependency and ``python
+ setup.py test`` is run. (#2462)
+
+- Fix error when urllib3 is unbundled and requests continues to use the
+ vendored import location.
+
+- Include fixes to ``urllib3``'s header handling.
+
+- Requests' handling of unvendored dependencies is now more restrictive.
+
+**Features and Improvements**
+
+- Support bytearrays when passed as parameters in the ``files`` argument.
+ (#2468)
+
+- Avoid data duplication when creating a request with ``str``, ``bytes``, or
+ ``bytearray`` input to the ``files`` argument.
+
+2.5.3 (2015-02-24)
+++++++++++++++++++
+
+**Bugfixes**
+
+- Revert changes to our vendored certificate bundle. For more context see
+ (#2455, #2456, and http://bugs.python.org/issue23476)
+
+2.5.2 (2015-02-23)
+++++++++++++++++++
+
+**Features and Improvements**
+
+- Add sha256 fingerprint support. (`shazow/urllib3#540`_)
+
+- Improve the performance of headers. (`shazow/urllib3#544`_)
+
+**Bugfixes**
+
+- Copy pip's import machinery. When downstream redistributors remove
+ requests.packages.urllib3 the import machinery will continue to let those
+ same symbols work. Example usage in requests' documentation and 3rd-party
+ libraries relying on the vendored copies of urllib3 will work without having
+ to fallback to the system urllib3.
+
+- Attempt to quote parts of the URL on redirect if unquoting and then quoting
+ fails. (#2356)
+
+- Fix filename type check for multipart form-data uploads. (#2411)
+
+- Properly handle the case where a server issuing digest authentication
+ challenges provides both auth and auth-int qop-values. (#2408)
+
+- Fix a socket leak. (`shazow/urllib3#549`_)
+
+- Fix multiple ``Set-Cookie`` headers properly. (`shazow/urllib3#534`_)
+
+- Disable the built-in hostname verification. (`shazow/urllib3#526`_)
+
+- Fix the behaviour of decoding an exhausted stream. (`shazow/urllib3#535`_)
+
+**Security**
+
+- Pulled in an updated ``cacert.pem``.
+
+- Drop RC4 from the default cipher list. (`shazow/urllib3#551`_)
+
+.. _shazow/urllib3#551: https://github.com/shazow/urllib3/pull/551
+.. _shazow/urllib3#549: https://github.com/shazow/urllib3/pull/549
+.. _shazow/urllib3#544: https://github.com/shazow/urllib3/pull/544
+.. _shazow/urllib3#540: https://github.com/shazow/urllib3/pull/540
+.. _shazow/urllib3#535: https://github.com/shazow/urllib3/pull/535
+.. _shazow/urllib3#534: https://github.com/shazow/urllib3/pull/534
+.. _shazow/urllib3#526: https://github.com/shazow/urllib3/pull/526
+
+2.5.1 (2014-12-23)
+++++++++++++++++++
+
+**Behavioural Changes**
+
+- Only catch HTTPErrors in raise_for_status (#2382)
+
+**Bugfixes**
+
+- Handle LocationParseError from urllib3 (#2344)
+- Handle file-like object filenames that are not strings (#2379)
+- Unbreak HTTPDigestAuth handler. Allow new nonces to be negotiated (#2389)
+
+2.5.0 (2014-12-01)
+++++++++++++++++++
+
+**Improvements**
+
+- Allow usage of urllib3's Retry object with HTTPAdapters (#2216)
+- The ``iter_lines`` method on a response now accepts a delimiter with which
+ to split the content (#2295)
+
+**Behavioural Changes**
+
+- Add deprecation warnings to functions in requests.utils that will be removed
+ in 3.0 (#2309)
+- Sessions used by the functional API are always closed (#2326)
+- Restrict requests to HTTP/1.1 and HTTP/1.0 (stop accepting HTTP/0.9) (#2323)
+
+**Bugfixes**
+
+- Only parse the URL once (#2353)
+- Allow Content-Length header to always be overridden (#2332)
+- Properly handle files in HTTPDigestAuth (#2333)
+- Cap redirect_cache size to prevent memory abuse (#2299)
+- Fix HTTPDigestAuth handling of redirects after authenticating successfully
+ (#2253)
+- Fix crash with custom method parameter to Session.request (#2317)
+- Fix how Link headers are parsed using the regular expression library (#2271)
+
+**Documentation**
+
+- Add more references for interlinking (#2348)
+- Update CSS for theme (#2290)
+- Update width of buttons and sidebar (#2289)
+- Replace references of Gittip with Gratipay (#2282)
+- Add link to changelog in sidebar (#2273)
+
+2.4.3 (2014-10-06)
+++++++++++++++++++
+
+**Bugfixes**
+
+- Unicode URL improvements for Python 2.
+- Re-order JSON param for backwards compat.
+- Automatically defrag authentication schemes from host/pass URIs. (`#2249 <https://github.com/kennethreitz/requests/issues/2249>`_)
+
+
+2.4.2 (2014-10-05)
+++++++++++++++++++
+
+**Improvements**
+
+- FINALLY! Add json parameter for uploads! (`#2258 <https://github.com/kennethreitz/requests/pull/2258>`_)
+- Support for bytestring URLs on Python 3.x (`#2238 <https://github.com/kennethreitz/requests/pull/2238>`_)
+
+**Bugfixes**
+
+- Avoid getting stuck in a loop (`#2244 <https://github.com/kennethreitz/requests/pull/2244>`_)
+- Multiple calls to iter* fail with unhelpful error. (`#2240 <https://github.com/kennethreitz/requests/issues/2240>`_, `#2241 <https://github.com/kennethreitz/requests/issues/2241>`_)
+
+**Documentation**
+
+- Correct redirection introduction (`#2245 <https://github.com/kennethreitz/requests/pull/2245/>`_)
+- Added example of how to send multiple files in one request. (`#2227 <https://github.com/kennethreitz/requests/pull/2227/>`_)
+- Clarify how to pass a custom set of CAs (`#2248 <https://github.com/kennethreitz/requests/pull/2248/>`_)
+
+
+
+2.4.1 (2014-09-09)
+++++++++++++++++++
+
+- Now has a "security" package extras set, ``$ pip install requests[security]``
+- Requests will now use Certifi if it is available.
+- Capture and re-raise urllib3 ProtocolError
+- Bugfix for responses that attempt to redirect to themselves forever (wtf?).
+
+
+2.4.0 (2014-08-29)
+++++++++++++++++++
+
+**Behavioral Changes**
+
+- ``Connection: keep-alive`` header is now sent automatically.
+
+**Improvements**
+
+- Support for connect timeouts! Timeout now accepts a tuple (connect, read) which is used to set individual connect and read timeouts.
+- Allow copying of PreparedRequests without headers/cookies.
+- Updated bundled urllib3 version.
+- Refactored settings loading from environment -- new `Session.merge_environment_settings`.
+- Handle socket errors in iter_content.
+
+
+2.3.0 (2014-05-16)
+++++++++++++++++++
+
+**API Changes**
+
+- New ``Response`` property ``is_redirect``, which is true when the
+ library could have processed this response as a redirection (whether
+ or not it actually did).
+- The ``timeout`` parameter now affects requests with both ``stream=True`` and
+ ``stream=False`` equally.
+- The change in v2.0.0 to mandate explicit proxy schemes has been reverted.
+ Proxy schemes now default to ``http://``.
+- The ``CaseInsensitiveDict`` used for HTTP headers now behaves like a normal
+ dictionary when references as string or viewed in the interpreter.
+
+**Bugfixes**
+
+- No longer expose Authorization or Proxy-Authorization headers on redirect.
+ Fix CVE-2014-1829 and CVE-2014-1830 respectively.
+- Authorization is re-evaluated each redirect.
+- On redirect, pass url as native strings.
+- Fall-back to autodetected encoding for JSON when Unicode detection fails.
+- Headers set to ``None`` on the ``Session`` are now correctly not sent.
+- Correctly honor ``decode_unicode`` even if it wasn't used earlier in the same
+ response.
+- Stop advertising ``compress`` as a supported Content-Encoding.
+- The ``Response.history`` parameter is now always a list.
+- Many, many ``urllib3`` bugfixes.
+
+2.2.1 (2014-01-23)
+++++++++++++++++++
+
+**Bugfixes**
+
+- Fixes incorrect parsing of proxy credentials that contain a literal or encoded '#' character.
+- Assorted urllib3 fixes.
+
+2.2.0 (2014-01-09)
+++++++++++++++++++
+
+**API Changes**
+
+- New exception: ``ContentDecodingError``. Raised instead of ``urllib3``
+ ``DecodeError`` exceptions.
+
+**Bugfixes**
+
+- Avoid many many exceptions from the buggy implementation of ``proxy_bypass`` on OS X in Python 2.6.
+- Avoid crashing when attempting to get authentication credentials from ~/.netrc when running as a user without a home directory.
+- Use the correct pool size for pools of connections to proxies.
+- Fix iteration of ``CookieJar`` objects.
+- Ensure that cookies are persisted over redirect.
+- Switch back to using chardet, since it has merged with charade.
+
+2.1.0 (2013-12-05)
+++++++++++++++++++
+
+- Updated CA Bundle, of course.
+- Cookies set on individual Requests through a ``Session`` (e.g. via ``Session.get()``) are no longer persisted to the ``Session``.
+- Clean up connections when we hit problems during chunked upload, rather than leaking them.
+- Return connections to the pool when a chunked upload is successful, rather than leaking it.
+- Match the HTTPbis recommendation for HTTP 301 redirects.
+- Prevent hanging when using streaming uploads and Digest Auth when a 401 is received.
+- Values of headers set by Requests are now always the native string type.
+- Fix previously broken SNI support.
+- Fix accessing HTTP proxies using proxy authentication.
+- Unencode HTTP Basic usernames and passwords extracted from URLs.
+- Support for IP address ranges for no_proxy environment variable
+- Parse headers correctly when users override the default ``Host:`` header.
+- Avoid munging the URL in case of case-sensitive servers.
+- Looser URL handling for non-HTTP/HTTPS urls.
+- Accept unicode methods in Python 2.6 and 2.7.
+- More resilient cookie handling.
+- Make ``Response`` objects pickleable.
+- Actually added MD5-sess to Digest Auth instead of pretending to like last time.
+- Updated internal urllib3.
+- Fixed @Lukasa's lack of taste.
+
+2.0.1 (2013-10-24)
+++++++++++++++++++
+
+- Updated included CA Bundle with new mistrusts and automated process for the future
+- Added MD5-sess to Digest Auth
+- Accept per-file headers in multipart file POST messages.
+- Fixed: Don't send the full URL on CONNECT messages.
+- Fixed: Correctly lowercase a redirect scheme.
+- Fixed: Cookies not persisted when set via functional API.
+- Fixed: Translate urllib3 ProxyError into a requests ProxyError derived from ConnectionError.
+- Updated internal urllib3 and chardet.
+
+2.0.0 (2013-09-24)
+++++++++++++++++++
+
+**API Changes:**
+
+- Keys in the Headers dictionary are now native strings on all Python versions,
+ i.e. bytestrings on Python 2, unicode on Python 3.
+- Proxy URLs now *must* have an explicit scheme. A ``MissingSchema`` exception
+ will be raised if they don't.
+- Timeouts now apply to read time if ``Stream=False``.
+- ``RequestException`` is now a subclass of ``IOError``, not ``RuntimeError``.
+- Added new method to ``PreparedRequest`` objects: ``PreparedRequest.copy()``.
+- Added new method to ``Session`` objects: ``Session.update_request()``. This
+ method updates a ``Request`` object with the data (e.g. cookies) stored on
+ the ``Session``.
+- Added new method to ``Session`` objects: ``Session.prepare_request()``. This
+ method updates and prepares a ``Request`` object, and returns the
+ corresponding ``PreparedRequest`` object.
+- Added new method to ``HTTPAdapter`` objects: ``HTTPAdapter.proxy_headers()``.
+ This should not be called directly, but improves the subclass interface.
+- ``httplib.IncompleteRead`` exceptions caused by incorrect chunked encoding
+ will now raise a Requests ``ChunkedEncodingError`` instead.
+- Invalid percent-escape sequences now cause a Requests ``InvalidURL``
+ exception to be raised.
+- HTTP 208 no longer uses reason phrase ``"im_used"``. Correctly uses
+ ``"already_reported"``.
+- HTTP 226 reason added (``"im_used"``).
+
+**Bugfixes:**
+
+- Vastly improved proxy support, including the CONNECT verb. Special thanks to
+ the many contributors who worked towards this improvement.
+- Cookies are now properly managed when 401 authentication responses are
+ received.
+- Chunked encoding fixes.
+- Support for mixed case schemes.
+- Better handling of streaming downloads.
+- Retrieve environment proxies from more locations.
+- Minor cookies fixes.
+- Improved redirect behaviour.
+- Improved streaming behaviour, particularly for compressed data.
+- Miscellaneous small Python 3 text encoding bugs.
+- ``.netrc`` no longer overrides explicit auth.
+- Cookies set by hooks are now correctly persisted on Sessions.
+- Fix problem with cookies that specify port numbers in their host field.
+- ``BytesIO`` can be used to perform streaming uploads.
+- More generous parsing of the ``no_proxy`` environment variable.
+- Non-string objects can be passed in data values alongside files.
+
+1.2.3 (2013-05-25)
+++++++++++++++++++
+
+- Simple packaging fix
+
+
+1.2.2 (2013-05-23)
+++++++++++++++++++
+
+- Simple packaging fix
+
+
+1.2.1 (2013-05-20)
+++++++++++++++++++
+
+- 301 and 302 redirects now change the verb to GET for all verbs, not just
+ POST, improving browser compatibility.
+- Python 3.3.2 compatibility
+- Always percent-encode location headers
+- Fix connection adapter matching to be most-specific first
+- new argument to the default connection adapter for passing a block argument
+- prevent a KeyError when there's no link headers
+
+1.2.0 (2013-03-31)
+++++++++++++++++++
+
+- Fixed cookies on sessions and on requests
+- Significantly change how hooks are dispatched - hooks now receive all the
+ arguments specified by the user when making a request so hooks can make a
+ secondary request with the same parameters. This is especially necessary for
+ authentication handler authors
+- certifi support was removed
+- Fixed bug where using OAuth 1 with body ``signature_type`` sent no data
+- Major proxy work thanks to @Lukasa including parsing of proxy authentication
+ from the proxy url
+- Fix DigestAuth handling too many 401s
+- Update vendored urllib3 to include SSL bug fixes
+- Allow keyword arguments to be passed to ``json.loads()`` via the
+ ``Response.json()`` method
+- Don't send ``Content-Length`` header by default on ``GET`` or ``HEAD``
+ requests
+- Add ``elapsed`` attribute to ``Response`` objects to time how long a request
+ took.
+- Fix ``RequestsCookieJar``
+- Sessions and Adapters are now picklable, i.e., can be used with the
+ multiprocessing library
+- Update charade to version 1.0.3
+
+The change in how hooks are dispatched will likely cause a great deal of
+issues.
+
+1.1.0 (2013-01-10)
+++++++++++++++++++
+
+- CHUNKED REQUESTS
+- Support for iterable response bodies
+- Assume servers persist redirect params
+- Allow explicit content types to be specified for file data
+- Make merge_kwargs case-insensitive when looking up keys
+
+1.0.3 (2012-12-18)
+++++++++++++++++++
+
+- Fix file upload encoding bug
+- Fix cookie behavior
+
+1.0.2 (2012-12-17)
+++++++++++++++++++
+
+- Proxy fix for HTTPAdapter.
+
+1.0.1 (2012-12-17)
+++++++++++++++++++
+
+- Cert verification exception bug.
+- Proxy fix for HTTPAdapter.
+
+1.0.0 (2012-12-17)
+++++++++++++++++++
+
+- Massive Refactor and Simplification
+- Switch to Apache 2.0 license
+- Swappable Connection Adapters
+- Mountable Connection Adapters
+- Mutable ProcessedRequest chain
+- /s/prefetch/stream
+- Removal of all configuration
+- Standard library logging
+- Make Response.json() callable, not property.
+- Usage of new charade project, which provides python 2 and 3 simultaneous chardet.
+- Removal of all hooks except 'response'
+- Removal of all authentication helpers (OAuth, Kerberos)
+
+This is not a backwards compatible change.
+
+0.14.2 (2012-10-27)
++++++++++++++++++++
+
+- Improved mime-compatible JSON handling
+- Proxy fixes
+- Path hack fixes
+- Case-Insensitive Content-Encoding headers
+- Support for CJK parameters in form posts
+
+
+0.14.1 (2012-10-01)
++++++++++++++++++++
+
+- Python 3.3 Compatibility
+- Simply default accept-encoding
+- Bugfixes
+
+
+0.14.0 (2012-09-02)
+++++++++++++++++++++
+
+- No more iter_content errors if already downloaded.
+
+0.13.9 (2012-08-25)
++++++++++++++++++++
+
+- Fix for OAuth + POSTs
+- Remove exception eating from dispatch_hook
+- General bugfixes
+
+0.13.8 (2012-08-21)
++++++++++++++++++++
+
+- Incredible Link header support :)
+
+0.13.7 (2012-08-19)
++++++++++++++++++++
+
+- Support for (key, value) lists everywhere.
+- Digest Authentication improvements.
+- Ensure proxy exclusions work properly.
+- Clearer UnicodeError exceptions.
+- Automatic casting of URLs to strings (fURL and such)
+- Bugfixes.
+
+0.13.6 (2012-08-06)
++++++++++++++++++++
+
+- Long awaited fix for hanging connections!
+
+0.13.5 (2012-07-27)
++++++++++++++++++++
+
+- Packaging fix
+
+0.13.4 (2012-07-27)
++++++++++++++++++++
+
+- GSSAPI/Kerberos authentication!
+- App Engine 2.7 Fixes!
+- Fix leaking connections (from urllib3 update)
+- OAuthlib path hack fix
+- OAuthlib URL parameters fix.
+
+0.13.3 (2012-07-12)
++++++++++++++++++++
+
+- Use simplejson if available.
+- Do not hide SSLErrors behind Timeouts.
+- Fixed param handling with urls containing fragments.
+- Significantly improved information in User Agent.
+- client certificates are ignored when verify=False
+
+0.13.2 (2012-06-28)
++++++++++++++++++++
+
+- Zero dependencies (once again)!
+- New: Response.reason
+- Sign querystring parameters in OAuth 1.0
+- Client certificates no longer ignored when verify=False
+- Add openSUSE certificate support
+
+0.13.1 (2012-06-07)
++++++++++++++++++++
+
+- Allow passing a file or file-like object as data.
+- Allow hooks to return responses that indicate errors.
+- Fix Response.text and Response.json for body-less responses.
+
+0.13.0 (2012-05-29)
++++++++++++++++++++
+
+- Removal of Requests.async in favor of `grequests <https://github.com/kennethreitz/grequests>`_
+- Allow disabling of cookie persistence.
+- New implementation of safe_mode
+- cookies.get now supports default argument
+- Session cookies not saved when Session.request is called with return_response=False
+- Env: no_proxy support.
+- RequestsCookieJar improvements.
+- Various bug fixes.
+
+0.12.1 (2012-05-08)
++++++++++++++++++++
+
+- New ``Response.json`` property.
+- Ability to add string file uploads.
+- Fix out-of-range issue with iter_lines.
+- Fix iter_content default size.
+- Fix POST redirects containing files.
+
+0.12.0 (2012-05-02)
++++++++++++++++++++
+
+- EXPERIMENTAL OAUTH SUPPORT!
+- Proper CookieJar-backed cookies interface with awesome dict-like interface.
+- Speed fix for non-iterated content chunks.
+- Move ``pre_request`` to a more usable place.
+- New ``pre_send`` hook.
+- Lazily encode data, params, files.
+- Load system Certificate Bundle if ``certify`` isn't available.
+- Cleanups, fixes.
+
+0.11.2 (2012-04-22)
++++++++++++++++++++
+
+- Attempt to use the OS's certificate bundle if ``certifi`` isn't available.
+- Infinite digest auth redirect fix.
+- Multi-part file upload improvements.
+- Fix decoding of invalid %encodings in URLs.
+- If there is no content in a response don't throw an error the second time that content is attempted to be read.
+- Upload data on redirects.
+
+0.11.1 (2012-03-30)
++++++++++++++++++++
+
+* POST redirects now break RFC to do what browsers do: Follow up with a GET.
+* New ``strict_mode`` configuration to disable new redirect behavior.
+
+
+0.11.0 (2012-03-14)
++++++++++++++++++++
+
+* Private SSL Certificate support
+* Remove select.poll from Gevent monkeypatching
+* Remove redundant generator for chunked transfer encoding
+* Fix: Response.ok raises Timeout Exception in safe_mode
+
+0.10.8 (2012-03-09)
++++++++++++++++++++
+
+* Generate chunked ValueError fix
+* Proxy configuration by environment variables
+* Simplification of iter_lines.
+* New `trust_env` configuration for disabling system/environment hints.
+* Suppress cookie errors.
+
+0.10.7 (2012-03-07)
++++++++++++++++++++
+
+* `encode_uri` = False
+
+0.10.6 (2012-02-25)
++++++++++++++++++++
+
+* Allow '=' in cookies.
+
+0.10.5 (2012-02-25)
++++++++++++++++++++
+
+* Response body with 0 content-length fix.
+* New async.imap.
+* Don't fail on netrc.
+
+
+0.10.4 (2012-02-20)
++++++++++++++++++++
+
+* Honor netrc.
+
+0.10.3 (2012-02-20)
++++++++++++++++++++
+
+* HEAD requests don't follow redirects anymore.
+* raise_for_status() doesn't raise for 3xx anymore.
+* Make Session objects picklable.
+* ValueError for invalid schema URLs.
+
+0.10.2 (2012-01-15)
++++++++++++++++++++
+
+* Vastly improved URL quoting.
+* Additional allowed cookie key values.
+* Attempted fix for "Too many open files" Error
+* Replace unicode errors on first pass, no need for second pass.
+* Append '/' to bare-domain urls before query insertion.
+* Exceptions now inherit from RuntimeError.
+* Binary uploads + auth fix.
+* Bugfixes.
+
+
+0.10.1 (2012-01-23)
++++++++++++++++++++
+
+* PYTHON 3 SUPPORT!
+* Dropped 2.5 Support. (*Backwards Incompatible*)
+
+0.10.0 (2012-01-21)
++++++++++++++++++++
+
+* ``Response.content`` is now bytes-only. (*Backwards Incompatible*)
+* New ``Response.text`` is unicode-only.
+* If no ``Response.encoding`` is specified and ``chardet`` is available, ``Response.text`` will guess an encoding.
+* Default to ISO-8859-1 (Western) encoding for "text" subtypes.
+* Removal of `decode_unicode`. (*Backwards Incompatible*)
+* New multiple-hooks system.
+* New ``Response.register_hook`` for registering hooks within the pipeline.
+* ``Response.url`` is now Unicode.
+
+0.9.3 (2012-01-18)
+++++++++++++++++++
+
+* SSL verify=False bugfix (apparent on windows machines).
+
+0.9.2 (2012-01-18)
+++++++++++++++++++
+
+* Asynchronous async.send method.
+* Support for proper chunk streams with boundaries.
+* session argument for Session classes.
+* Print entire hook tracebacks, not just exception instance.
+* Fix response.iter_lines from pending next line.
+* Fix but in HTTP-digest auth w/ URI having query strings.
+* Fix in Event Hooks section.
+* Urllib3 update.
+
+
+0.9.1 (2012-01-06)
+++++++++++++++++++
+
+* danger_mode for automatic Response.raise_for_status()
+* Response.iter_lines refactor
+
+0.9.0 (2011-12-28)
+++++++++++++++++++
+
+* verify ssl is default.
+
+
+0.8.9 (2011-12-28)
+++++++++++++++++++
+
+* Packaging fix.
+
+
+0.8.8 (2011-12-28)
+++++++++++++++++++
+
+* SSL CERT VERIFICATION!
+* Release of Cerifi: Mozilla's cert list.
+* New 'verify' argument for SSL requests.
+* Urllib3 update.
+
+0.8.7 (2011-12-24)
+++++++++++++++++++
+
+* iter_lines last-line truncation fix
+* Force safe_mode for async requests
+* Handle safe_mode exceptions more consistently
+* Fix iteration on null responses in safe_mode
+
+0.8.6 (2011-12-18)
+++++++++++++++++++
+
+* Socket timeout fixes.
+* Proxy Authorization support.
+
+0.8.5 (2011-12-14)
+++++++++++++++++++
+
+* Response.iter_lines!
+
+0.8.4 (2011-12-11)
+++++++++++++++++++
+
+* Prefetch bugfix.
+* Added license to installed version.
+
+0.8.3 (2011-11-27)
+++++++++++++++++++
+
+* Converted auth system to use simpler callable objects.
+* New session parameter to API methods.
+* Display full URL while logging.
+
+0.8.2 (2011-11-19)
+++++++++++++++++++
+
+* New Unicode decoding system, based on over-ridable `Response.encoding`.
+* Proper URL slash-quote handling.
+* Cookies with ``[``, ``]``, and ``_`` allowed.
+
+0.8.1 (2011-11-15)
+++++++++++++++++++
+
+* URL Request path fix
+* Proxy fix.
+* Timeouts fix.
+
+0.8.0 (2011-11-13)
+++++++++++++++++++
+
+* Keep-alive support!
+* Complete removal of Urllib2
+* Complete removal of Poster
+* Complete removal of CookieJars
+* New ConnectionError raising
+* Safe_mode for error catching
+* prefetch parameter for request methods
+* OPTION method
+* Async pool size throttling
+* File uploads send real names
+* Vendored in urllib3
+
+0.7.6 (2011-11-07)
+++++++++++++++++++
+
+* Digest authentication bugfix (attach query data to path)
+
+0.7.5 (2011-11-04)
+++++++++++++++++++
+
+* Response.content = None if there was an invalid response.
+* Redirection auth handling.
+
+0.7.4 (2011-10-26)
+++++++++++++++++++
+
+* Session Hooks fix.
+
+0.7.3 (2011-10-23)
+++++++++++++++++++
+
+* Digest Auth fix.
+
+
+0.7.2 (2011-10-23)
+++++++++++++++++++
+
+* PATCH Fix.
+
+
+0.7.1 (2011-10-23)
+++++++++++++++++++
+
+* Move away from urllib2 authentication handling.
+* Fully Remove AuthManager, AuthObject, &c.
+* New tuple-based auth system with handler callbacks.
+
+
+0.7.0 (2011-10-22)
+++++++++++++++++++
+
+* Sessions are now the primary interface.
+* Deprecated InvalidMethodException.
+* PATCH fix.
+* New config system (no more global settings).
+
+
+0.6.6 (2011-10-19)
+++++++++++++++++++
+
+* Session parameter bugfix (params merging).
+
+
+0.6.5 (2011-10-18)
+++++++++++++++++++
+
+* Offline (fast) test suite.
+* Session dictionary argument merging.
+
+
+0.6.4 (2011-10-13)
+++++++++++++++++++
+
+* Automatic decoding of unicode, based on HTTP Headers.
+* New ``decode_unicode`` setting.
+* Removal of ``r.read/close`` methods.
+* New ``r.faw`` interface for advanced response usage.*
+* Automatic expansion of parameterized headers.
+
+
+0.6.3 (2011-10-13)
+++++++++++++++++++
+
+* Beautiful ``requests.async`` module, for making async requests w/ gevent.
+
+
+0.6.2 (2011-10-09)
+++++++++++++++++++
+
+* GET/HEAD obeys allow_redirects=False.
+
+
+0.6.1 (2011-08-20)
+++++++++++++++++++
+
+* Enhanced status codes experience ``\o/``
+* Set a maximum number of redirects (``settings.max_redirects``)
+* Full Unicode URL support
+* Support for protocol-less redirects.
+* Allow for arbitrary request types.
+* Bugfixes
+
+
+0.6.0 (2011-08-17)
+++++++++++++++++++
+
+* New callback hook system
+* New persistent sessions object and context manager
+* Transparent Dict-cookie handling
+* Status code reference object
+* Removed Response.cached
+* Added Response.request
+* All args are kwargs
+* Relative redirect support
+* HTTPError handling improvements
+* Improved https testing
+* Bugfixes
+
+
+0.5.1 (2011-07-23)
+++++++++++++++++++
+
+* International Domain Name Support!
+* Access headers without fetching entire body (``read()``)
+* Use lists as dicts for parameters
+* Add Forced Basic Authentication
+* Forced Basic is default authentication type
+* ``python-requests.org`` default User-Agent header
+* CaseInsensitiveDict lower-case caching
+* Response.history bugfix
+
+
+0.5.0 (2011-06-21)
+++++++++++++++++++
+
+* PATCH Support
+* Support for Proxies
+* HTTPBin Test Suite
+* Redirect Fixes
+* settings.verbose stream writing
+* Querystrings for all methods
+* URLErrors (Connection Refused, Timeout, Invalid URLs) are treated as explicitly raised
+ ``r.requests.get('hwe://blah'); r.raise_for_status()``
+
+
+0.4.1 (2011-05-22)
+++++++++++++++++++
+
+* Improved Redirection Handling
+* New 'allow_redirects' param for following non-GET/HEAD Redirects
+* Settings module refactoring
+
+
+0.4.0 (2011-05-15)
+++++++++++++++++++
+
+* Response.history: list of redirected responses
+* Case-Insensitive Header Dictionaries!
+* Unicode URLs
+
+
+0.3.4 (2011-05-14)
+++++++++++++++++++
+
+* Urllib2 HTTPAuthentication Recursion fix (Basic/Digest)
+* Internal Refactor
+* Bytes data upload Bugfix
+
+
+
+0.3.3 (2011-05-12)
+++++++++++++++++++
+
+* Request timeouts
+* Unicode url-encoded data
+* Settings context manager and module
+
+
+0.3.2 (2011-04-15)
+++++++++++++++++++
+
+* Automatic Decompression of GZip Encoded Content
+* AutoAuth Support for Tupled HTTP Auth
+
+
+0.3.1 (2011-04-01)
+++++++++++++++++++
+
+* Cookie Changes
+* Response.read()
+* Poster fix
+
+
+0.3.0 (2011-02-25)
+++++++++++++++++++
+
+* Automatic Authentication API Change
+* Smarter Query URL Parameterization
+* Allow file uploads and POST data together
+* New Authentication Manager System
+ - Simpler Basic HTTP System
+ - Supports all build-in urllib2 Auths
+ - Allows for custom Auth Handlers
+
+
+0.2.4 (2011-02-19)
+++++++++++++++++++
+
+* Python 2.5 Support
+* PyPy-c v1.4 Support
+* Auto-Authentication tests
+* Improved Request object constructor
+
+0.2.3 (2011-02-15)
+++++++++++++++++++
+
+* New HTTPHandling Methods
+ - Response.__nonzero__ (false if bad HTTP Status)
+ - Response.ok (True if expected HTTP Status)
+ - Response.error (Logged HTTPError if bad HTTP Status)
+ - Response.raise_for_status() (Raises stored HTTPError)
+
+
+0.2.2 (2011-02-14)
+++++++++++++++++++
+
+* Still handles request in the event of an HTTPError. (Issue #2)
+* Eventlet and Gevent Monkeypatch support.
+* Cookie Support (Issue #1)
+
+
+0.2.1 (2011-02-14)
+++++++++++++++++++
+
+* Added file attribute to POST and PUT requests for multipart-encode file uploads.
+* Added Request.url attribute for context and redirects
+
+
+0.2.0 (2011-02-14)
+++++++++++++++++++
+
+* Birth!
+
+
+0.0.1 (2011-02-13)
+++++++++++++++++++
+
+* Frustration
+* Conception
+
diff --git a/third_party/python/requests/LICENSE b/third_party/python/requests/LICENSE
new file mode 100644
index 0000000000..a103fc915e
--- /dev/null
+++ b/third_party/python/requests/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2015 Kenneth Reitz
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/third_party/python/requests/MANIFEST.in b/third_party/python/requests/MANIFEST.in
new file mode 100644
index 0000000000..439de496f0
--- /dev/null
+++ b/third_party/python/requests/MANIFEST.in
@@ -0,0 +1 @@
+include README.rst LICENSE NOTICE HISTORY.rst test_requests.py requirements.txt requests/cacert.pem
diff --git a/third_party/python/requests/NOTICE b/third_party/python/requests/NOTICE
new file mode 100644
index 0000000000..f583e47ab2
--- /dev/null
+++ b/third_party/python/requests/NOTICE
@@ -0,0 +1,54 @@
+Requests includes some vendorized python libraries to ease installation.
+
+Urllib3 License
+===============
+
+This is the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt),
+Modifications copyright 2012 Kenneth Reitz.
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Chardet License
+===============
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2.1 of the License, or (at your option) any later version.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+02110-1301 USA
+
+
+CA Bundle License
+=================
+
+This Source Code Form is subject to the terms of the Mozilla Public
+License, v. 2.0. If a copy of the MPL was not distributed with this
+file, You can obtain one at http://mozilla.org/MPL/2.0/.
diff --git a/third_party/python/requests/PKG-INFO b/third_party/python/requests/PKG-INFO
new file mode 100644
index 0000000000..d75ebac87f
--- /dev/null
+++ b/third_party/python/requests/PKG-INFO
@@ -0,0 +1,1238 @@
+Metadata-Version: 1.1
+Name: requests
+Version: 2.9.1
+Summary: Python HTTP for Humans.
+Home-page: http://python-requests.org
+Author: Kenneth Reitz
+Author-email: me@kennethreitz.com
+License: Apache 2.0
+Description: Requests: HTTP for Humans
+ =========================
+
+ .. image:: https://img.shields.io/pypi/v/requests.svg
+ :target: https://pypi.python.org/pypi/requests
+
+ .. image:: https://img.shields.io/pypi/dm/requests.svg
+ :target: https://pypi.python.org/pypi/requests
+
+
+
+
+ Requests is an Apache2 Licensed HTTP library, written in Python, for human
+ beings.
+
+ Most existing Python modules for sending HTTP requests are extremely
+ verbose and cumbersome. Python's builtin urllib2 module provides most of
+ the HTTP capabilities you should need, but the api is thoroughly broken.
+ It requires an enormous amount of work (even method overrides) to
+ perform the simplest of tasks.
+
+ Things shouldn't be this way. Not in Python.
+
+ .. code-block:: python
+
+ >>> r = requests.get('https://api.github.com', auth=('user', 'pass'))
+ >>> r.status_code
+ 204
+ >>> r.headers['content-type']
+ 'application/json'
+ >>> r.text
+ ...
+
+ See `the same code, without Requests <https://gist.github.com/973705>`_.
+
+ Requests allow you to send HTTP/1.1 requests. You can add headers, form data,
+ multipart files, and parameters with simple Python dictionaries, and access the
+ response data in the same way. It's powered by httplib and `urllib3
+ <https://github.com/shazow/urllib3>`_, but it does all the hard work and crazy
+ hacks for you.
+
+
+ Features
+ --------
+
+ - International Domains and URLs
+ - Keep-Alive & Connection Pooling
+ - Sessions with Cookie Persistence
+ - Browser-style SSL Verification
+ - Basic/Digest Authentication
+ - Elegant Key/Value Cookies
+ - Automatic Decompression
+ - Unicode Response Bodies
+ - Multipart File Uploads
+ - Connection Timeouts
+ - Thread-safety
+ - HTTP(S) proxy support
+
+
+ Installation
+ ------------
+
+ To install Requests, simply:
+
+ .. code-block:: bash
+
+ $ pip install requests
+
+
+ Documentation
+ -------------
+
+ Documentation is available at http://docs.python-requests.org/.
+
+
+ Contribute
+ ----------
+
+ #. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug. There is a `Contributor Friendly`_ tag for issues that should be ideal for people who are not very familiar with the codebase yet.
+ #. Fork `the repository`_ on GitHub to start making your changes to the **master** branch (or branch off of it).
+ #. Write a test which shows that the bug was fixed or that the feature works as expected.
+ #. Send a pull request and bug the maintainer until it gets merged and published. :) Make sure to add yourself to AUTHORS_.
+
+ .. _`the repository`: http://github.com/kennethreitz/requests
+ .. _AUTHORS: https://github.com/kennethreitz/requests/blob/master/AUTHORS.rst
+ .. _Contributor Friendly: https://github.com/kennethreitz/requests/issues?direction=desc&labels=Contributor+Friendly&page=1&sort=updated&state=open
+
+
+ .. :changelog:
+
+ Release History
+ ---------------
+
+ 2.9.1 (2015-12-21)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Resolve regression introduced in 2.9.0 that made it impossible to send binary
+ strings as bodies in Python 3.
+ - Fixed errors when calculating cookie expiration dates in certain locales.
+
+ **Miscellaneous**
+
+ - Updated bundled urllib3 to 1.13.1.
+
+ 2.9.0 (2015-12-15)
+ ++++++++++++++++++
+
+ **Minor Improvements** (Backwards compatible)
+
+ - The ``verify`` keyword argument now supports being passed a path to a
+ directory of CA certificates, not just a single-file bundle.
+ - Warnings are now emitted when sending files opened in text mode.
+ - Added the 511 Network Authentication Required status code to the status code
+ registry.
+
+ **Bugfixes**
+
+ - For file-like objects that are not seeked to the very beginning, we now
+ send the content length for the number of bytes we will actually read, rather
+ than the total size of the file, allowing partial file uploads.
+ - When uploading file-like objects, if they are empty or have no obvious
+ content length we set ``Transfer-Encoding: chunked`` rather than
+ ``Content-Length: 0``.
+ - We correctly receive the response in buffered mode when uploading chunked
+ bodies.
+ - We now handle being passed a query string as a bytestring on Python 3, by
+ decoding it as UTF-8.
+ - Sessions are now closed in all cases (exceptional and not) when using the
+ functional API rather than leaking and waiting for the garbage collector to
+ clean them up.
+ - Correctly handle digest auth headers with a malformed ``qop`` directive that
+ contains no token, by treating it the same as if no ``qop`` directive was
+ provided at all.
+ - Minor performance improvements when removing specific cookies by name.
+
+ **Miscellaneous**
+
+ - Updated urllib3 to 1.13.
+
+ 2.8.1 (2015-10-13)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Update certificate bundle to match ``certifi`` 2015.9.6.2's weak certificate
+ bundle.
+ - Fix a bug in 2.8.0 where requests would raise ``ConnectTimeout`` instead of
+ ``ConnectionError``
+ - When using the PreparedRequest flow, requests will now correctly respect the
+ ``json`` parameter. Broken in 2.8.0.
+ - When using the PreparedRequest flow, requests will now correctly handle a
+ Unicode-string method name on Python 2. Broken in 2.8.0.
+
+ 2.8.0 (2015-10-05)
+ ++++++++++++++++++
+
+ **Minor Improvements** (Backwards Compatible)
+
+ - Requests now supports per-host proxies. This allows the ``proxies``
+ dictionary to have entries of the form
+ ``{'<scheme>://<hostname>': '<proxy>'}``. Host-specific proxies will be used
+ in preference to the previously-supported scheme-specific ones, but the
+ previous syntax will continue to work.
+ - ``Response.raise_for_status`` now prints the URL that failed as part of the
+ exception message.
+ - ``requests.utils.get_netrc_auth`` now takes an ``raise_errors`` kwarg,
+ defaulting to ``False``. When ``True``, errors parsing ``.netrc`` files cause
+ exceptions to be thrown.
+ - Change to bundled projects import logic to make it easier to unbundle
+ requests downstream.
+ - Changed the default User-Agent string to avoid leaking data on Linux: now
+ contains only the requests version.
+
+ **Bugfixes**
+
+ - The ``json`` parameter to ``post()`` and friends will now only be used if
+ neither ``data`` nor ``files`` are present, consistent with the
+ documentation.
+ - We now ignore empty fields in the ``NO_PROXY`` environment variable.
+ - Fixed problem where ``httplib.BadStatusLine`` would get raised if combining
+ ``stream=True`` with ``contextlib.closing``.
+ - Prevented bugs where we would attempt to return the same connection back to
+ the connection pool twice when sending a Chunked body.
+ - Miscellaneous minor internal changes.
+ - Digest Auth support is now thread safe.
+
+ **Updates**
+
+ - Updated urllib3 to 1.12.
+
+ 2.7.0 (2015-05-03)
+ ++++++++++++++++++
+
+ This is the first release that follows our new release process. For more, see
+ `our documentation
+ <http://docs.python-requests.org/en/latest/community/release-process/>`_.
+
+ **Bugfixes**
+
+ - Updated urllib3 to 1.10.4, resolving several bugs involving chunked transfer
+ encoding and response framing.
+
+ 2.6.2 (2015-04-23)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Fix regression where compressed data that was sent as chunked data was not
+ properly decompressed. (#2561)
+
+ 2.6.1 (2015-04-22)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Remove VendorAlias import machinery introduced in v2.5.2.
+
+ - Simplify the PreparedRequest.prepare API: We no longer require the user to
+ pass an empty list to the hooks keyword argument. (c.f. #2552)
+
+ - Resolve redirects now receives and forwards all of the original arguments to
+ the adapter. (#2503)
+
+ - Handle UnicodeDecodeErrors when trying to deal with a unicode URL that
+ cannot be encoded in ASCII. (#2540)
+
+ - Populate the parsed path of the URI field when performing Digest
+ Authentication. (#2426)
+
+ - Copy a PreparedRequest's CookieJar more reliably when it is not an instance
+ of RequestsCookieJar. (#2527)
+
+ 2.6.0 (2015-03-14)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - CVE-2015-2296: Fix handling of cookies on redirect. Previously a cookie
+ without a host value set would use the hostname for the redirected URL
+ exposing requests users to session fixation attacks and potentially cookie
+ stealing. This was disclosed privately by Matthew Daley of
+ `BugFuzz <https://bugfuzz.com>`_. This affects all versions of requests from
+ v2.1.0 to v2.5.3 (inclusive on both ends).
+
+ - Fix error when requests is an ``install_requires`` dependency and ``python
+ setup.py test`` is run. (#2462)
+
+ - Fix error when urllib3 is unbundled and requests continues to use the
+ vendored import location.
+
+ - Include fixes to ``urllib3``'s header handling.
+
+ - Requests' handling of unvendored dependencies is now more restrictive.
+
+ **Features and Improvements**
+
+ - Support bytearrays when passed as parameters in the ``files`` argument.
+ (#2468)
+
+ - Avoid data duplication when creating a request with ``str``, ``bytes``, or
+ ``bytearray`` input to the ``files`` argument.
+
+ 2.5.3 (2015-02-24)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Revert changes to our vendored certificate bundle. For more context see
+ (#2455, #2456, and http://bugs.python.org/issue23476)
+
+ 2.5.2 (2015-02-23)
+ ++++++++++++++++++
+
+ **Features and Improvements**
+
+ - Add sha256 fingerprint support. (`shazow/urllib3#540`_)
+
+ - Improve the performance of headers. (`shazow/urllib3#544`_)
+
+ **Bugfixes**
+
+ - Copy pip's import machinery. When downstream redistributors remove
+ requests.packages.urllib3 the import machinery will continue to let those
+ same symbols work. Example usage in requests' documentation and 3rd-party
+ libraries relying on the vendored copies of urllib3 will work without having
+ to fallback to the system urllib3.
+
+ - Attempt to quote parts of the URL on redirect if unquoting and then quoting
+ fails. (#2356)
+
+ - Fix filename type check for multipart form-data uploads. (#2411)
+
+ - Properly handle the case where a server issuing digest authentication
+ challenges provides both auth and auth-int qop-values. (#2408)
+
+ - Fix a socket leak. (`shazow/urllib3#549`_)
+
+ - Fix multiple ``Set-Cookie`` headers properly. (`shazow/urllib3#534`_)
+
+ - Disable the built-in hostname verification. (`shazow/urllib3#526`_)
+
+ - Fix the behaviour of decoding an exhausted stream. (`shazow/urllib3#535`_)
+
+ **Security**
+
+ - Pulled in an updated ``cacert.pem``.
+
+ - Drop RC4 from the default cipher list. (`shazow/urllib3#551`_)
+
+ .. _shazow/urllib3#551: https://github.com/shazow/urllib3/pull/551
+ .. _shazow/urllib3#549: https://github.com/shazow/urllib3/pull/549
+ .. _shazow/urllib3#544: https://github.com/shazow/urllib3/pull/544
+ .. _shazow/urllib3#540: https://github.com/shazow/urllib3/pull/540
+ .. _shazow/urllib3#535: https://github.com/shazow/urllib3/pull/535
+ .. _shazow/urllib3#534: https://github.com/shazow/urllib3/pull/534
+ .. _shazow/urllib3#526: https://github.com/shazow/urllib3/pull/526
+
+ 2.5.1 (2014-12-23)
+ ++++++++++++++++++
+
+ **Behavioural Changes**
+
+ - Only catch HTTPErrors in raise_for_status (#2382)
+
+ **Bugfixes**
+
+ - Handle LocationParseError from urllib3 (#2344)
+ - Handle file-like object filenames that are not strings (#2379)
+ - Unbreak HTTPDigestAuth handler. Allow new nonces to be negotiated (#2389)
+
+ 2.5.0 (2014-12-01)
+ ++++++++++++++++++
+
+ **Improvements**
+
+ - Allow usage of urllib3's Retry object with HTTPAdapters (#2216)
+ - The ``iter_lines`` method on a response now accepts a delimiter with which
+ to split the content (#2295)
+
+ **Behavioural Changes**
+
+ - Add deprecation warnings to functions in requests.utils that will be removed
+ in 3.0 (#2309)
+ - Sessions used by the functional API are always closed (#2326)
+ - Restrict requests to HTTP/1.1 and HTTP/1.0 (stop accepting HTTP/0.9) (#2323)
+
+ **Bugfixes**
+
+ - Only parse the URL once (#2353)
+ - Allow Content-Length header to always be overridden (#2332)
+ - Properly handle files in HTTPDigestAuth (#2333)
+ - Cap redirect_cache size to prevent memory abuse (#2299)
+ - Fix HTTPDigestAuth handling of redirects after authenticating successfully
+ (#2253)
+ - Fix crash with custom method parameter to Session.request (#2317)
+ - Fix how Link headers are parsed using the regular expression library (#2271)
+
+ **Documentation**
+
+ - Add more references for interlinking (#2348)
+ - Update CSS for theme (#2290)
+ - Update width of buttons and sidebar (#2289)
+ - Replace references of Gittip with Gratipay (#2282)
+ - Add link to changelog in sidebar (#2273)
+
+ 2.4.3 (2014-10-06)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Unicode URL improvements for Python 2.
+ - Re-order JSON param for backwards compat.
+ - Automatically defrag authentication schemes from host/pass URIs. (`#2249 <https://github.com/kennethreitz/requests/issues/2249>`_)
+
+
+ 2.4.2 (2014-10-05)
+ ++++++++++++++++++
+
+ **Improvements**
+
+ - FINALLY! Add json parameter for uploads! (`#2258 <https://github.com/kennethreitz/requests/pull/2258>`_)
+ - Support for bytestring URLs on Python 3.x (`#2238 <https://github.com/kennethreitz/requests/pull/2238>`_)
+
+ **Bugfixes**
+
+ - Avoid getting stuck in a loop (`#2244 <https://github.com/kennethreitz/requests/pull/2244>`_)
+ - Multiple calls to iter* fail with unhelpful error. (`#2240 <https://github.com/kennethreitz/requests/issues/2240>`_, `#2241 <https://github.com/kennethreitz/requests/issues/2241>`_)
+
+ **Documentation**
+
+ - Correct redirection introduction (`#2245 <https://github.com/kennethreitz/requests/pull/2245/>`_)
+ - Added example of how to send multiple files in one request. (`#2227 <https://github.com/kennethreitz/requests/pull/2227/>`_)
+ - Clarify how to pass a custom set of CAs (`#2248 <https://github.com/kennethreitz/requests/pull/2248/>`_)
+
+
+
+ 2.4.1 (2014-09-09)
+ ++++++++++++++++++
+
+ - Now has a "security" package extras set, ``$ pip install requests[security]``
+ - Requests will now use Certifi if it is available.
+ - Capture and re-raise urllib3 ProtocolError
+ - Bugfix for responses that attempt to redirect to themselves forever (wtf?).
+
+
+ 2.4.0 (2014-08-29)
+ ++++++++++++++++++
+
+ **Behavioral Changes**
+
+ - ``Connection: keep-alive`` header is now sent automatically.
+
+ **Improvements**
+
+ - Support for connect timeouts! Timeout now accepts a tuple (connect, read) which is used to set individual connect and read timeouts.
+ - Allow copying of PreparedRequests without headers/cookies.
+ - Updated bundled urllib3 version.
+ - Refactored settings loading from environment -- new `Session.merge_environment_settings`.
+ - Handle socket errors in iter_content.
+
+
+ 2.3.0 (2014-05-16)
+ ++++++++++++++++++
+
+ **API Changes**
+
+ - New ``Response`` property ``is_redirect``, which is true when the
+ library could have processed this response as a redirection (whether
+ or not it actually did).
+ - The ``timeout`` parameter now affects requests with both ``stream=True`` and
+ ``stream=False`` equally.
+ - The change in v2.0.0 to mandate explicit proxy schemes has been reverted.
+ Proxy schemes now default to ``http://``.
+ - The ``CaseInsensitiveDict`` used for HTTP headers now behaves like a normal
+ dictionary when references as string or viewed in the interpreter.
+
+ **Bugfixes**
+
+ - No longer expose Authorization or Proxy-Authorization headers on redirect.
+ Fix CVE-2014-1829 and CVE-2014-1830 respectively.
+ - Authorization is re-evaluated each redirect.
+ - On redirect, pass url as native strings.
+ - Fall-back to autodetected encoding for JSON when Unicode detection fails.
+ - Headers set to ``None`` on the ``Session`` are now correctly not sent.
+ - Correctly honor ``decode_unicode`` even if it wasn't used earlier in the same
+ response.
+ - Stop advertising ``compress`` as a supported Content-Encoding.
+ - The ``Response.history`` parameter is now always a list.
+ - Many, many ``urllib3`` bugfixes.
+
+ 2.2.1 (2014-01-23)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Fixes incorrect parsing of proxy credentials that contain a literal or encoded '#' character.
+ - Assorted urllib3 fixes.
+
+ 2.2.0 (2014-01-09)
+ ++++++++++++++++++
+
+ **API Changes**
+
+ - New exception: ``ContentDecodingError``. Raised instead of ``urllib3``
+ ``DecodeError`` exceptions.
+
+ **Bugfixes**
+
+ - Avoid many many exceptions from the buggy implementation of ``proxy_bypass`` on OS X in Python 2.6.
+ - Avoid crashing when attempting to get authentication credentials from ~/.netrc when running as a user without a home directory.
+ - Use the correct pool size for pools of connections to proxies.
+ - Fix iteration of ``CookieJar`` objects.
+ - Ensure that cookies are persisted over redirect.
+ - Switch back to using chardet, since it has merged with charade.
+
+ 2.1.0 (2013-12-05)
+ ++++++++++++++++++
+
+ - Updated CA Bundle, of course.
+ - Cookies set on individual Requests through a ``Session`` (e.g. via ``Session.get()``) are no longer persisted to the ``Session``.
+ - Clean up connections when we hit problems during chunked upload, rather than leaking them.
+ - Return connections to the pool when a chunked upload is successful, rather than leaking it.
+ - Match the HTTPbis recommendation for HTTP 301 redirects.
+ - Prevent hanging when using streaming uploads and Digest Auth when a 401 is received.
+ - Values of headers set by Requests are now always the native string type.
+ - Fix previously broken SNI support.
+ - Fix accessing HTTP proxies using proxy authentication.
+ - Unencode HTTP Basic usernames and passwords extracted from URLs.
+ - Support for IP address ranges for no_proxy environment variable
+ - Parse headers correctly when users override the default ``Host:`` header.
+ - Avoid munging the URL in case of case-sensitive servers.
+ - Looser URL handling for non-HTTP/HTTPS urls.
+ - Accept unicode methods in Python 2.6 and 2.7.
+ - More resilient cookie handling.
+ - Make ``Response`` objects pickleable.
+ - Actually added MD5-sess to Digest Auth instead of pretending to like last time.
+ - Updated internal urllib3.
+ - Fixed @Lukasa's lack of taste.
+
+ 2.0.1 (2013-10-24)
+ ++++++++++++++++++
+
+ - Updated included CA Bundle with new mistrusts and automated process for the future
+ - Added MD5-sess to Digest Auth
+ - Accept per-file headers in multipart file POST messages.
+ - Fixed: Don't send the full URL on CONNECT messages.
+ - Fixed: Correctly lowercase a redirect scheme.
+ - Fixed: Cookies not persisted when set via functional API.
+ - Fixed: Translate urllib3 ProxyError into a requests ProxyError derived from ConnectionError.
+ - Updated internal urllib3 and chardet.
+
+ 2.0.0 (2013-09-24)
+ ++++++++++++++++++
+
+ **API Changes:**
+
+ - Keys in the Headers dictionary are now native strings on all Python versions,
+ i.e. bytestrings on Python 2, unicode on Python 3.
+ - Proxy URLs now *must* have an explicit scheme. A ``MissingSchema`` exception
+ will be raised if they don't.
+ - Timeouts now apply to read time if ``Stream=False``.
+ - ``RequestException`` is now a subclass of ``IOError``, not ``RuntimeError``.
+ - Added new method to ``PreparedRequest`` objects: ``PreparedRequest.copy()``.
+ - Added new method to ``Session`` objects: ``Session.update_request()``. This
+ method updates a ``Request`` object with the data (e.g. cookies) stored on
+ the ``Session``.
+ - Added new method to ``Session`` objects: ``Session.prepare_request()``. This
+ method updates and prepares a ``Request`` object, and returns the
+ corresponding ``PreparedRequest`` object.
+ - Added new method to ``HTTPAdapter`` objects: ``HTTPAdapter.proxy_headers()``.
+ This should not be called directly, but improves the subclass interface.
+ - ``httplib.IncompleteRead`` exceptions caused by incorrect chunked encoding
+ will now raise a Requests ``ChunkedEncodingError`` instead.
+ - Invalid percent-escape sequences now cause a Requests ``InvalidURL``
+ exception to be raised.
+ - HTTP 208 no longer uses reason phrase ``"im_used"``. Correctly uses
+ ``"already_reported"``.
+ - HTTP 226 reason added (``"im_used"``).
+
+ **Bugfixes:**
+
+ - Vastly improved proxy support, including the CONNECT verb. Special thanks to
+ the many contributors who worked towards this improvement.
+ - Cookies are now properly managed when 401 authentication responses are
+ received.
+ - Chunked encoding fixes.
+ - Support for mixed case schemes.
+ - Better handling of streaming downloads.
+ - Retrieve environment proxies from more locations.
+ - Minor cookies fixes.
+ - Improved redirect behaviour.
+ - Improved streaming behaviour, particularly for compressed data.
+ - Miscellaneous small Python 3 text encoding bugs.
+ - ``.netrc`` no longer overrides explicit auth.
+ - Cookies set by hooks are now correctly persisted on Sessions.
+ - Fix problem with cookies that specify port numbers in their host field.
+ - ``BytesIO`` can be used to perform streaming uploads.
+ - More generous parsing of the ``no_proxy`` environment variable.
+ - Non-string objects can be passed in data values alongside files.
+
+ 1.2.3 (2013-05-25)
+ ++++++++++++++++++
+
+ - Simple packaging fix
+
+
+ 1.2.2 (2013-05-23)
+ ++++++++++++++++++
+
+ - Simple packaging fix
+
+
+ 1.2.1 (2013-05-20)
+ ++++++++++++++++++
+
+ - 301 and 302 redirects now change the verb to GET for all verbs, not just
+ POST, improving browser compatibility.
+ - Python 3.3.2 compatibility
+ - Always percent-encode location headers
+ - Fix connection adapter matching to be most-specific first
+ - new argument to the default connection adapter for passing a block argument
+ - prevent a KeyError when there's no link headers
+
+ 1.2.0 (2013-03-31)
+ ++++++++++++++++++
+
+ - Fixed cookies on sessions and on requests
+ - Significantly change how hooks are dispatched - hooks now receive all the
+ arguments specified by the user when making a request so hooks can make a
+ secondary request with the same parameters. This is especially necessary for
+ authentication handler authors
+ - certifi support was removed
+ - Fixed bug where using OAuth 1 with body ``signature_type`` sent no data
+ - Major proxy work thanks to @Lukasa including parsing of proxy authentication
+ from the proxy url
+ - Fix DigestAuth handling too many 401s
+ - Update vendored urllib3 to include SSL bug fixes
+ - Allow keyword arguments to be passed to ``json.loads()`` via the
+ ``Response.json()`` method
+ - Don't send ``Content-Length`` header by default on ``GET`` or ``HEAD``
+ requests
+ - Add ``elapsed`` attribute to ``Response`` objects to time how long a request
+ took.
+ - Fix ``RequestsCookieJar``
+ - Sessions and Adapters are now picklable, i.e., can be used with the
+ multiprocessing library
+ - Update charade to version 1.0.3
+
+ The change in how hooks are dispatched will likely cause a great deal of
+ issues.
+
+ 1.1.0 (2013-01-10)
+ ++++++++++++++++++
+
+ - CHUNKED REQUESTS
+ - Support for iterable response bodies
+ - Assume servers persist redirect params
+ - Allow explicit content types to be specified for file data
+ - Make merge_kwargs case-insensitive when looking up keys
+
+ 1.0.3 (2012-12-18)
+ ++++++++++++++++++
+
+ - Fix file upload encoding bug
+ - Fix cookie behavior
+
+ 1.0.2 (2012-12-17)
+ ++++++++++++++++++
+
+ - Proxy fix for HTTPAdapter.
+
+ 1.0.1 (2012-12-17)
+ ++++++++++++++++++
+
+ - Cert verification exception bug.
+ - Proxy fix for HTTPAdapter.
+
+ 1.0.0 (2012-12-17)
+ ++++++++++++++++++
+
+ - Massive Refactor and Simplification
+ - Switch to Apache 2.0 license
+ - Swappable Connection Adapters
+ - Mountable Connection Adapters
+ - Mutable ProcessedRequest chain
+ - /s/prefetch/stream
+ - Removal of all configuration
+ - Standard library logging
+ - Make Response.json() callable, not property.
+ - Usage of new charade project, which provides python 2 and 3 simultaneous chardet.
+ - Removal of all hooks except 'response'
+ - Removal of all authentication helpers (OAuth, Kerberos)
+
+ This is not a backwards compatible change.
+
+ 0.14.2 (2012-10-27)
+ +++++++++++++++++++
+
+ - Improved mime-compatible JSON handling
+ - Proxy fixes
+ - Path hack fixes
+ - Case-Insensitive Content-Encoding headers
+ - Support for CJK parameters in form posts
+
+
+ 0.14.1 (2012-10-01)
+ +++++++++++++++++++
+
+ - Python 3.3 Compatibility
+ - Simply default accept-encoding
+ - Bugfixes
+
+
+ 0.14.0 (2012-09-02)
+ ++++++++++++++++++++
+
+ - No more iter_content errors if already downloaded.
+
+ 0.13.9 (2012-08-25)
+ +++++++++++++++++++
+
+ - Fix for OAuth + POSTs
+ - Remove exception eating from dispatch_hook
+ - General bugfixes
+
+ 0.13.8 (2012-08-21)
+ +++++++++++++++++++
+
+ - Incredible Link header support :)
+
+ 0.13.7 (2012-08-19)
+ +++++++++++++++++++
+
+ - Support for (key, value) lists everywhere.
+ - Digest Authentication improvements.
+ - Ensure proxy exclusions work properly.
+ - Clearer UnicodeError exceptions.
+ - Automatic casting of URLs to strings (fURL and such)
+ - Bugfixes.
+
+ 0.13.6 (2012-08-06)
+ +++++++++++++++++++
+
+ - Long awaited fix for hanging connections!
+
+ 0.13.5 (2012-07-27)
+ +++++++++++++++++++
+
+ - Packaging fix
+
+ 0.13.4 (2012-07-27)
+ +++++++++++++++++++
+
+ - GSSAPI/Kerberos authentication!
+ - App Engine 2.7 Fixes!
+ - Fix leaking connections (from urllib3 update)
+ - OAuthlib path hack fix
+ - OAuthlib URL parameters fix.
+
+ 0.13.3 (2012-07-12)
+ +++++++++++++++++++
+
+ - Use simplejson if available.
+ - Do not hide SSLErrors behind Timeouts.
+ - Fixed param handling with urls containing fragments.
+ - Significantly improved information in User Agent.
+ - client certificates are ignored when verify=False
+
+ 0.13.2 (2012-06-28)
+ +++++++++++++++++++
+
+ - Zero dependencies (once again)!
+ - New: Response.reason
+ - Sign querystring parameters in OAuth 1.0
+ - Client certificates no longer ignored when verify=False
+ - Add openSUSE certificate support
+
+ 0.13.1 (2012-06-07)
+ +++++++++++++++++++
+
+ - Allow passing a file or file-like object as data.
+ - Allow hooks to return responses that indicate errors.
+ - Fix Response.text and Response.json for body-less responses.
+
+ 0.13.0 (2012-05-29)
+ +++++++++++++++++++
+
+ - Removal of Requests.async in favor of `grequests <https://github.com/kennethreitz/grequests>`_
+ - Allow disabling of cookie persistence.
+ - New implementation of safe_mode
+ - cookies.get now supports default argument
+ - Session cookies not saved when Session.request is called with return_response=False
+ - Env: no_proxy support.
+ - RequestsCookieJar improvements.
+ - Various bug fixes.
+
+ 0.12.1 (2012-05-08)
+ +++++++++++++++++++
+
+ - New ``Response.json`` property.
+ - Ability to add string file uploads.
+ - Fix out-of-range issue with iter_lines.
+ - Fix iter_content default size.
+ - Fix POST redirects containing files.
+
+ 0.12.0 (2012-05-02)
+ +++++++++++++++++++
+
+ - EXPERIMENTAL OAUTH SUPPORT!
+ - Proper CookieJar-backed cookies interface with awesome dict-like interface.
+ - Speed fix for non-iterated content chunks.
+ - Move ``pre_request`` to a more usable place.
+ - New ``pre_send`` hook.
+ - Lazily encode data, params, files.
+ - Load system Certificate Bundle if ``certify`` isn't available.
+ - Cleanups, fixes.
+
+ 0.11.2 (2012-04-22)
+ +++++++++++++++++++
+
+ - Attempt to use the OS's certificate bundle if ``certifi`` isn't available.
+ - Infinite digest auth redirect fix.
+ - Multi-part file upload improvements.
+ - Fix decoding of invalid %encodings in URLs.
+ - If there is no content in a response don't throw an error the second time that content is attempted to be read.
+ - Upload data on redirects.
+
+ 0.11.1 (2012-03-30)
+ +++++++++++++++++++
+
+ * POST redirects now break RFC to do what browsers do: Follow up with a GET.
+ * New ``strict_mode`` configuration to disable new redirect behavior.
+
+
+ 0.11.0 (2012-03-14)
+ +++++++++++++++++++
+
+ * Private SSL Certificate support
+ * Remove select.poll from Gevent monkeypatching
+ * Remove redundant generator for chunked transfer encoding
+ * Fix: Response.ok raises Timeout Exception in safe_mode
+
+ 0.10.8 (2012-03-09)
+ +++++++++++++++++++
+
+ * Generate chunked ValueError fix
+ * Proxy configuration by environment variables
+ * Simplification of iter_lines.
+ * New `trust_env` configuration for disabling system/environment hints.
+ * Suppress cookie errors.
+
+ 0.10.7 (2012-03-07)
+ +++++++++++++++++++
+
+ * `encode_uri` = False
+
+ 0.10.6 (2012-02-25)
+ +++++++++++++++++++
+
+ * Allow '=' in cookies.
+
+ 0.10.5 (2012-02-25)
+ +++++++++++++++++++
+
+ * Response body with 0 content-length fix.
+ * New async.imap.
+ * Don't fail on netrc.
+
+
+ 0.10.4 (2012-02-20)
+ +++++++++++++++++++
+
+ * Honor netrc.
+
+ 0.10.3 (2012-02-20)
+ +++++++++++++++++++
+
+ * HEAD requests don't follow redirects anymore.
+ * raise_for_status() doesn't raise for 3xx anymore.
+ * Make Session objects picklable.
+ * ValueError for invalid schema URLs.
+
+ 0.10.2 (2012-01-15)
+ +++++++++++++++++++
+
+ * Vastly improved URL quoting.
+ * Additional allowed cookie key values.
+ * Attempted fix for "Too many open files" Error
+ * Replace unicode errors on first pass, no need for second pass.
+ * Append '/' to bare-domain urls before query insertion.
+ * Exceptions now inherit from RuntimeError.
+ * Binary uploads + auth fix.
+ * Bugfixes.
+
+
+ 0.10.1 (2012-01-23)
+ +++++++++++++++++++
+
+ * PYTHON 3 SUPPORT!
+ * Dropped 2.5 Support. (*Backwards Incompatible*)
+
+ 0.10.0 (2012-01-21)
+ +++++++++++++++++++
+
+ * ``Response.content`` is now bytes-only. (*Backwards Incompatible*)
+ * New ``Response.text`` is unicode-only.
+ * If no ``Response.encoding`` is specified and ``chardet`` is available, ``Response.text`` will guess an encoding.
+ * Default to ISO-8859-1 (Western) encoding for "text" subtypes.
+ * Removal of `decode_unicode`. (*Backwards Incompatible*)
+ * New multiple-hooks system.
+ * New ``Response.register_hook`` for registering hooks within the pipeline.
+ * ``Response.url`` is now Unicode.
+
+ 0.9.3 (2012-01-18)
+ ++++++++++++++++++
+
+ * SSL verify=False bugfix (apparent on windows machines).
+
+ 0.9.2 (2012-01-18)
+ ++++++++++++++++++
+
+ * Asynchronous async.send method.
+ * Support for proper chunk streams with boundaries.
+ * session argument for Session classes.
+ * Print entire hook tracebacks, not just exception instance.
+ * Fix response.iter_lines from pending next line.
+ * Fix but in HTTP-digest auth w/ URI having query strings.
+ * Fix in Event Hooks section.
+ * Urllib3 update.
+
+
+ 0.9.1 (2012-01-06)
+ ++++++++++++++++++
+
+ * danger_mode for automatic Response.raise_for_status()
+ * Response.iter_lines refactor
+
+ 0.9.0 (2011-12-28)
+ ++++++++++++++++++
+
+ * verify ssl is default.
+
+
+ 0.8.9 (2011-12-28)
+ ++++++++++++++++++
+
+ * Packaging fix.
+
+
+ 0.8.8 (2011-12-28)
+ ++++++++++++++++++
+
+ * SSL CERT VERIFICATION!
+ * Release of Cerifi: Mozilla's cert list.
+ * New 'verify' argument for SSL requests.
+ * Urllib3 update.
+
+ 0.8.7 (2011-12-24)
+ ++++++++++++++++++
+
+ * iter_lines last-line truncation fix
+ * Force safe_mode for async requests
+ * Handle safe_mode exceptions more consistently
+ * Fix iteration on null responses in safe_mode
+
+ 0.8.6 (2011-12-18)
+ ++++++++++++++++++
+
+ * Socket timeout fixes.
+ * Proxy Authorization support.
+
+ 0.8.5 (2011-12-14)
+ ++++++++++++++++++
+
+ * Response.iter_lines!
+
+ 0.8.4 (2011-12-11)
+ ++++++++++++++++++
+
+ * Prefetch bugfix.
+ * Added license to installed version.
+
+ 0.8.3 (2011-11-27)
+ ++++++++++++++++++
+
+ * Converted auth system to use simpler callable objects.
+ * New session parameter to API methods.
+ * Display full URL while logging.
+
+ 0.8.2 (2011-11-19)
+ ++++++++++++++++++
+
+ * New Unicode decoding system, based on over-ridable `Response.encoding`.
+ * Proper URL slash-quote handling.
+ * Cookies with ``[``, ``]``, and ``_`` allowed.
+
+ 0.8.1 (2011-11-15)
+ ++++++++++++++++++
+
+ * URL Request path fix
+ * Proxy fix.
+ * Timeouts fix.
+
+ 0.8.0 (2011-11-13)
+ ++++++++++++++++++
+
+ * Keep-alive support!
+ * Complete removal of Urllib2
+ * Complete removal of Poster
+ * Complete removal of CookieJars
+ * New ConnectionError raising
+ * Safe_mode for error catching
+ * prefetch parameter for request methods
+ * OPTION method
+ * Async pool size throttling
+ * File uploads send real names
+ * Vendored in urllib3
+
+ 0.7.6 (2011-11-07)
+ ++++++++++++++++++
+
+ * Digest authentication bugfix (attach query data to path)
+
+ 0.7.5 (2011-11-04)
+ ++++++++++++++++++
+
+ * Response.content = None if there was an invalid response.
+ * Redirection auth handling.
+
+ 0.7.4 (2011-10-26)
+ ++++++++++++++++++
+
+ * Session Hooks fix.
+
+ 0.7.3 (2011-10-23)
+ ++++++++++++++++++
+
+ * Digest Auth fix.
+
+
+ 0.7.2 (2011-10-23)
+ ++++++++++++++++++
+
+ * PATCH Fix.
+
+
+ 0.7.1 (2011-10-23)
+ ++++++++++++++++++
+
+ * Move away from urllib2 authentication handling.
+ * Fully Remove AuthManager, AuthObject, &c.
+ * New tuple-based auth system with handler callbacks.
+
+
+ 0.7.0 (2011-10-22)
+ ++++++++++++++++++
+
+ * Sessions are now the primary interface.
+ * Deprecated InvalidMethodException.
+ * PATCH fix.
+ * New config system (no more global settings).
+
+
+ 0.6.6 (2011-10-19)
+ ++++++++++++++++++
+
+ * Session parameter bugfix (params merging).
+
+
+ 0.6.5 (2011-10-18)
+ ++++++++++++++++++
+
+ * Offline (fast) test suite.
+ * Session dictionary argument merging.
+
+
+ 0.6.4 (2011-10-13)
+ ++++++++++++++++++
+
+ * Automatic decoding of unicode, based on HTTP Headers.
+ * New ``decode_unicode`` setting.
+ * Removal of ``r.read/close`` methods.
+ * New ``r.faw`` interface for advanced response usage.*
+ * Automatic expansion of parameterized headers.
+
+
+ 0.6.3 (2011-10-13)
+ ++++++++++++++++++
+
+ * Beautiful ``requests.async`` module, for making async requests w/ gevent.
+
+
+ 0.6.2 (2011-10-09)
+ ++++++++++++++++++
+
+ * GET/HEAD obeys allow_redirects=False.
+
+
+ 0.6.1 (2011-08-20)
+ ++++++++++++++++++
+
+ * Enhanced status codes experience ``\o/``
+ * Set a maximum number of redirects (``settings.max_redirects``)
+ * Full Unicode URL support
+ * Support for protocol-less redirects.
+ * Allow for arbitrary request types.
+ * Bugfixes
+
+
+ 0.6.0 (2011-08-17)
+ ++++++++++++++++++
+
+ * New callback hook system
+ * New persistent sessions object and context manager
+ * Transparent Dict-cookie handling
+ * Status code reference object
+ * Removed Response.cached
+ * Added Response.request
+ * All args are kwargs
+ * Relative redirect support
+ * HTTPError handling improvements
+ * Improved https testing
+ * Bugfixes
+
+
+ 0.5.1 (2011-07-23)
+ ++++++++++++++++++
+
+ * International Domain Name Support!
+ * Access headers without fetching entire body (``read()``)
+ * Use lists as dicts for parameters
+ * Add Forced Basic Authentication
+ * Forced Basic is default authentication type
+ * ``python-requests.org`` default User-Agent header
+ * CaseInsensitiveDict lower-case caching
+ * Response.history bugfix
+
+
+ 0.5.0 (2011-06-21)
+ ++++++++++++++++++
+
+ * PATCH Support
+ * Support for Proxies
+ * HTTPBin Test Suite
+ * Redirect Fixes
+ * settings.verbose stream writing
+ * Querystrings for all methods
+ * URLErrors (Connection Refused, Timeout, Invalid URLs) are treated as explicitly raised
+ ``r.requests.get('hwe://blah'); r.raise_for_status()``
+
+
+ 0.4.1 (2011-05-22)
+ ++++++++++++++++++
+
+ * Improved Redirection Handling
+ * New 'allow_redirects' param for following non-GET/HEAD Redirects
+ * Settings module refactoring
+
+
+ 0.4.0 (2011-05-15)
+ ++++++++++++++++++
+
+ * Response.history: list of redirected responses
+ * Case-Insensitive Header Dictionaries!
+ * Unicode URLs
+
+
+ 0.3.4 (2011-05-14)
+ ++++++++++++++++++
+
+ * Urllib2 HTTPAuthentication Recursion fix (Basic/Digest)
+ * Internal Refactor
+ * Bytes data upload Bugfix
+
+
+
+ 0.3.3 (2011-05-12)
+ ++++++++++++++++++
+
+ * Request timeouts
+ * Unicode url-encoded data
+ * Settings context manager and module
+
+
+ 0.3.2 (2011-04-15)
+ ++++++++++++++++++
+
+ * Automatic Decompression of GZip Encoded Content
+ * AutoAuth Support for Tupled HTTP Auth
+
+
+ 0.3.1 (2011-04-01)
+ ++++++++++++++++++
+
+ * Cookie Changes
+ * Response.read()
+ * Poster fix
+
+
+ 0.3.0 (2011-02-25)
+ ++++++++++++++++++
+
+ * Automatic Authentication API Change
+ * Smarter Query URL Parameterization
+ * Allow file uploads and POST data together
+ * New Authentication Manager System
+ - Simpler Basic HTTP System
+ - Supports all build-in urllib2 Auths
+ - Allows for custom Auth Handlers
+
+
+ 0.2.4 (2011-02-19)
+ ++++++++++++++++++
+
+ * Python 2.5 Support
+ * PyPy-c v1.4 Support
+ * Auto-Authentication tests
+ * Improved Request object constructor
+
+ 0.2.3 (2011-02-15)
+ ++++++++++++++++++
+
+ * New HTTPHandling Methods
+ - Response.__nonzero__ (false if bad HTTP Status)
+ - Response.ok (True if expected HTTP Status)
+ - Response.error (Logged HTTPError if bad HTTP Status)
+ - Response.raise_for_status() (Raises stored HTTPError)
+
+
+ 0.2.2 (2011-02-14)
+ ++++++++++++++++++
+
+ * Still handles request in the event of an HTTPError. (Issue #2)
+ * Eventlet and Gevent Monkeypatch support.
+ * Cookie Support (Issue #1)
+
+
+ 0.2.1 (2011-02-14)
+ ++++++++++++++++++
+
+ * Added file attribute to POST and PUT requests for multipart-encode file uploads.
+ * Added Request.url attribute for context and redirects
+
+
+ 0.2.0 (2011-02-14)
+ ++++++++++++++++++
+
+ * Birth!
+
+
+ 0.0.1 (2011-02-13)
+ ++++++++++++++++++
+
+ * Frustration
+ * Conception
+
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
diff --git a/third_party/python/requests/README.rst b/third_party/python/requests/README.rst
new file mode 100644
index 0000000000..99d30e7246
--- /dev/null
+++ b/third_party/python/requests/README.rst
@@ -0,0 +1,86 @@
+Requests: HTTP for Humans
+=========================
+
+.. image:: https://img.shields.io/pypi/v/requests.svg
+ :target: https://pypi.python.org/pypi/requests
+
+.. image:: https://img.shields.io/pypi/dm/requests.svg
+ :target: https://pypi.python.org/pypi/requests
+
+
+
+
+Requests is an Apache2 Licensed HTTP library, written in Python, for human
+beings.
+
+Most existing Python modules for sending HTTP requests are extremely
+verbose and cumbersome. Python's builtin urllib2 module provides most of
+the HTTP capabilities you should need, but the api is thoroughly broken.
+It requires an enormous amount of work (even method overrides) to
+perform the simplest of tasks.
+
+Things shouldn't be this way. Not in Python.
+
+.. code-block:: python
+
+ >>> r = requests.get('https://api.github.com', auth=('user', 'pass'))
+ >>> r.status_code
+ 204
+ >>> r.headers['content-type']
+ 'application/json'
+ >>> r.text
+ ...
+
+See `the same code, without Requests <https://gist.github.com/973705>`_.
+
+Requests allow you to send HTTP/1.1 requests. You can add headers, form data,
+multipart files, and parameters with simple Python dictionaries, and access the
+response data in the same way. It's powered by httplib and `urllib3
+<https://github.com/shazow/urllib3>`_, but it does all the hard work and crazy
+hacks for you.
+
+
+Features
+--------
+
+- International Domains and URLs
+- Keep-Alive & Connection Pooling
+- Sessions with Cookie Persistence
+- Browser-style SSL Verification
+- Basic/Digest Authentication
+- Elegant Key/Value Cookies
+- Automatic Decompression
+- Unicode Response Bodies
+- Multipart File Uploads
+- Connection Timeouts
+- Thread-safety
+- HTTP(S) proxy support
+
+
+Installation
+------------
+
+To install Requests, simply:
+
+.. code-block:: bash
+
+ $ pip install requests
+
+
+Documentation
+-------------
+
+Documentation is available at http://docs.python-requests.org/.
+
+
+Contribute
+----------
+
+#. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug. There is a `Contributor Friendly`_ tag for issues that should be ideal for people who are not very familiar with the codebase yet.
+#. Fork `the repository`_ on GitHub to start making your changes to the **master** branch (or branch off of it).
+#. Write a test which shows that the bug was fixed or that the feature works as expected.
+#. Send a pull request and bug the maintainer until it gets merged and published. :) Make sure to add yourself to AUTHORS_.
+
+.. _`the repository`: http://github.com/kennethreitz/requests
+.. _AUTHORS: https://github.com/kennethreitz/requests/blob/master/AUTHORS.rst
+.. _Contributor Friendly: https://github.com/kennethreitz/requests/issues?direction=desc&labels=Contributor+Friendly&page=1&sort=updated&state=open
diff --git a/third_party/python/requests/requests.egg-info/PKG-INFO b/third_party/python/requests/requests.egg-info/PKG-INFO
new file mode 100644
index 0000000000..d75ebac87f
--- /dev/null
+++ b/third_party/python/requests/requests.egg-info/PKG-INFO
@@ -0,0 +1,1238 @@
+Metadata-Version: 1.1
+Name: requests
+Version: 2.9.1
+Summary: Python HTTP for Humans.
+Home-page: http://python-requests.org
+Author: Kenneth Reitz
+Author-email: me@kennethreitz.com
+License: Apache 2.0
+Description: Requests: HTTP for Humans
+ =========================
+
+ .. image:: https://img.shields.io/pypi/v/requests.svg
+ :target: https://pypi.python.org/pypi/requests
+
+ .. image:: https://img.shields.io/pypi/dm/requests.svg
+ :target: https://pypi.python.org/pypi/requests
+
+
+
+
+ Requests is an Apache2 Licensed HTTP library, written in Python, for human
+ beings.
+
+ Most existing Python modules for sending HTTP requests are extremely
+ verbose and cumbersome. Python's builtin urllib2 module provides most of
+ the HTTP capabilities you should need, but the api is thoroughly broken.
+ It requires an enormous amount of work (even method overrides) to
+ perform the simplest of tasks.
+
+ Things shouldn't be this way. Not in Python.
+
+ .. code-block:: python
+
+ >>> r = requests.get('https://api.github.com', auth=('user', 'pass'))
+ >>> r.status_code
+ 204
+ >>> r.headers['content-type']
+ 'application/json'
+ >>> r.text
+ ...
+
+ See `the same code, without Requests <https://gist.github.com/973705>`_.
+
+ Requests allow you to send HTTP/1.1 requests. You can add headers, form data,
+ multipart files, and parameters with simple Python dictionaries, and access the
+ response data in the same way. It's powered by httplib and `urllib3
+ <https://github.com/shazow/urllib3>`_, but it does all the hard work and crazy
+ hacks for you.
+
+
+ Features
+ --------
+
+ - International Domains and URLs
+ - Keep-Alive & Connection Pooling
+ - Sessions with Cookie Persistence
+ - Browser-style SSL Verification
+ - Basic/Digest Authentication
+ - Elegant Key/Value Cookies
+ - Automatic Decompression
+ - Unicode Response Bodies
+ - Multipart File Uploads
+ - Connection Timeouts
+ - Thread-safety
+ - HTTP(S) proxy support
+
+
+ Installation
+ ------------
+
+ To install Requests, simply:
+
+ .. code-block:: bash
+
+ $ pip install requests
+
+
+ Documentation
+ -------------
+
+ Documentation is available at http://docs.python-requests.org/.
+
+
+ Contribute
+ ----------
+
+ #. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug. There is a `Contributor Friendly`_ tag for issues that should be ideal for people who are not very familiar with the codebase yet.
+ #. Fork `the repository`_ on GitHub to start making your changes to the **master** branch (or branch off of it).
+ #. Write a test which shows that the bug was fixed or that the feature works as expected.
+ #. Send a pull request and bug the maintainer until it gets merged and published. :) Make sure to add yourself to AUTHORS_.
+
+ .. _`the repository`: http://github.com/kennethreitz/requests
+ .. _AUTHORS: https://github.com/kennethreitz/requests/blob/master/AUTHORS.rst
+ .. _Contributor Friendly: https://github.com/kennethreitz/requests/issues?direction=desc&labels=Contributor+Friendly&page=1&sort=updated&state=open
+
+
+ .. :changelog:
+
+ Release History
+ ---------------
+
+ 2.9.1 (2015-12-21)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Resolve regression introduced in 2.9.0 that made it impossible to send binary
+ strings as bodies in Python 3.
+ - Fixed errors when calculating cookie expiration dates in certain locales.
+
+ **Miscellaneous**
+
+ - Updated bundled urllib3 to 1.13.1.
+
+ 2.9.0 (2015-12-15)
+ ++++++++++++++++++
+
+ **Minor Improvements** (Backwards compatible)
+
+ - The ``verify`` keyword argument now supports being passed a path to a
+ directory of CA certificates, not just a single-file bundle.
+ - Warnings are now emitted when sending files opened in text mode.
+ - Added the 511 Network Authentication Required status code to the status code
+ registry.
+
+ **Bugfixes**
+
+ - For file-like objects that are not seeked to the very beginning, we now
+ send the content length for the number of bytes we will actually read, rather
+ than the total size of the file, allowing partial file uploads.
+ - When uploading file-like objects, if they are empty or have no obvious
+ content length we set ``Transfer-Encoding: chunked`` rather than
+ ``Content-Length: 0``.
+ - We correctly receive the response in buffered mode when uploading chunked
+ bodies.
+ - We now handle being passed a query string as a bytestring on Python 3, by
+ decoding it as UTF-8.
+ - Sessions are now closed in all cases (exceptional and not) when using the
+ functional API rather than leaking and waiting for the garbage collector to
+ clean them up.
+ - Correctly handle digest auth headers with a malformed ``qop`` directive that
+ contains no token, by treating it the same as if no ``qop`` directive was
+ provided at all.
+ - Minor performance improvements when removing specific cookies by name.
+
+ **Miscellaneous**
+
+ - Updated urllib3 to 1.13.
+
+ 2.8.1 (2015-10-13)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Update certificate bundle to match ``certifi`` 2015.9.6.2's weak certificate
+ bundle.
+ - Fix a bug in 2.8.0 where requests would raise ``ConnectTimeout`` instead of
+ ``ConnectionError``
+ - When using the PreparedRequest flow, requests will now correctly respect the
+ ``json`` parameter. Broken in 2.8.0.
+ - When using the PreparedRequest flow, requests will now correctly handle a
+ Unicode-string method name on Python 2. Broken in 2.8.0.
+
+ 2.8.0 (2015-10-05)
+ ++++++++++++++++++
+
+ **Minor Improvements** (Backwards Compatible)
+
+ - Requests now supports per-host proxies. This allows the ``proxies``
+ dictionary to have entries of the form
+ ``{'<scheme>://<hostname>': '<proxy>'}``. Host-specific proxies will be used
+ in preference to the previously-supported scheme-specific ones, but the
+ previous syntax will continue to work.
+ - ``Response.raise_for_status`` now prints the URL that failed as part of the
+ exception message.
+ - ``requests.utils.get_netrc_auth`` now takes an ``raise_errors`` kwarg,
+ defaulting to ``False``. When ``True``, errors parsing ``.netrc`` files cause
+ exceptions to be thrown.
+ - Change to bundled projects import logic to make it easier to unbundle
+ requests downstream.
+ - Changed the default User-Agent string to avoid leaking data on Linux: now
+ contains only the requests version.
+
+ **Bugfixes**
+
+ - The ``json`` parameter to ``post()`` and friends will now only be used if
+ neither ``data`` nor ``files`` are present, consistent with the
+ documentation.
+ - We now ignore empty fields in the ``NO_PROXY`` environment variable.
+ - Fixed problem where ``httplib.BadStatusLine`` would get raised if combining
+ ``stream=True`` with ``contextlib.closing``.
+ - Prevented bugs where we would attempt to return the same connection back to
+ the connection pool twice when sending a Chunked body.
+ - Miscellaneous minor internal changes.
+ - Digest Auth support is now thread safe.
+
+ **Updates**
+
+ - Updated urllib3 to 1.12.
+
+ 2.7.0 (2015-05-03)
+ ++++++++++++++++++
+
+ This is the first release that follows our new release process. For more, see
+ `our documentation
+ <http://docs.python-requests.org/en/latest/community/release-process/>`_.
+
+ **Bugfixes**
+
+ - Updated urllib3 to 1.10.4, resolving several bugs involving chunked transfer
+ encoding and response framing.
+
+ 2.6.2 (2015-04-23)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Fix regression where compressed data that was sent as chunked data was not
+ properly decompressed. (#2561)
+
+ 2.6.1 (2015-04-22)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Remove VendorAlias import machinery introduced in v2.5.2.
+
+ - Simplify the PreparedRequest.prepare API: We no longer require the user to
+ pass an empty list to the hooks keyword argument. (c.f. #2552)
+
+ - Resolve redirects now receives and forwards all of the original arguments to
+ the adapter. (#2503)
+
+ - Handle UnicodeDecodeErrors when trying to deal with a unicode URL that
+ cannot be encoded in ASCII. (#2540)
+
+ - Populate the parsed path of the URI field when performing Digest
+ Authentication. (#2426)
+
+ - Copy a PreparedRequest's CookieJar more reliably when it is not an instance
+ of RequestsCookieJar. (#2527)
+
+ 2.6.0 (2015-03-14)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - CVE-2015-2296: Fix handling of cookies on redirect. Previously a cookie
+ without a host value set would use the hostname for the redirected URL
+ exposing requests users to session fixation attacks and potentially cookie
+ stealing. This was disclosed privately by Matthew Daley of
+ `BugFuzz <https://bugfuzz.com>`_. This affects all versions of requests from
+ v2.1.0 to v2.5.3 (inclusive on both ends).
+
+ - Fix error when requests is an ``install_requires`` dependency and ``python
+ setup.py test`` is run. (#2462)
+
+ - Fix error when urllib3 is unbundled and requests continues to use the
+ vendored import location.
+
+ - Include fixes to ``urllib3``'s header handling.
+
+ - Requests' handling of unvendored dependencies is now more restrictive.
+
+ **Features and Improvements**
+
+ - Support bytearrays when passed as parameters in the ``files`` argument.
+ (#2468)
+
+ - Avoid data duplication when creating a request with ``str``, ``bytes``, or
+ ``bytearray`` input to the ``files`` argument.
+
+ 2.5.3 (2015-02-24)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Revert changes to our vendored certificate bundle. For more context see
+ (#2455, #2456, and http://bugs.python.org/issue23476)
+
+ 2.5.2 (2015-02-23)
+ ++++++++++++++++++
+
+ **Features and Improvements**
+
+ - Add sha256 fingerprint support. (`shazow/urllib3#540`_)
+
+ - Improve the performance of headers. (`shazow/urllib3#544`_)
+
+ **Bugfixes**
+
+ - Copy pip's import machinery. When downstream redistributors remove
+ requests.packages.urllib3 the import machinery will continue to let those
+ same symbols work. Example usage in requests' documentation and 3rd-party
+ libraries relying on the vendored copies of urllib3 will work without having
+ to fallback to the system urllib3.
+
+ - Attempt to quote parts of the URL on redirect if unquoting and then quoting
+ fails. (#2356)
+
+ - Fix filename type check for multipart form-data uploads. (#2411)
+
+ - Properly handle the case where a server issuing digest authentication
+ challenges provides both auth and auth-int qop-values. (#2408)
+
+ - Fix a socket leak. (`shazow/urllib3#549`_)
+
+ - Fix multiple ``Set-Cookie`` headers properly. (`shazow/urllib3#534`_)
+
+ - Disable the built-in hostname verification. (`shazow/urllib3#526`_)
+
+ - Fix the behaviour of decoding an exhausted stream. (`shazow/urllib3#535`_)
+
+ **Security**
+
+ - Pulled in an updated ``cacert.pem``.
+
+ - Drop RC4 from the default cipher list. (`shazow/urllib3#551`_)
+
+ .. _shazow/urllib3#551: https://github.com/shazow/urllib3/pull/551
+ .. _shazow/urllib3#549: https://github.com/shazow/urllib3/pull/549
+ .. _shazow/urllib3#544: https://github.com/shazow/urllib3/pull/544
+ .. _shazow/urllib3#540: https://github.com/shazow/urllib3/pull/540
+ .. _shazow/urllib3#535: https://github.com/shazow/urllib3/pull/535
+ .. _shazow/urllib3#534: https://github.com/shazow/urllib3/pull/534
+ .. _shazow/urllib3#526: https://github.com/shazow/urllib3/pull/526
+
+ 2.5.1 (2014-12-23)
+ ++++++++++++++++++
+
+ **Behavioural Changes**
+
+ - Only catch HTTPErrors in raise_for_status (#2382)
+
+ **Bugfixes**
+
+ - Handle LocationParseError from urllib3 (#2344)
+ - Handle file-like object filenames that are not strings (#2379)
+ - Unbreak HTTPDigestAuth handler. Allow new nonces to be negotiated (#2389)
+
+ 2.5.0 (2014-12-01)
+ ++++++++++++++++++
+
+ **Improvements**
+
+ - Allow usage of urllib3's Retry object with HTTPAdapters (#2216)
+ - The ``iter_lines`` method on a response now accepts a delimiter with which
+ to split the content (#2295)
+
+ **Behavioural Changes**
+
+ - Add deprecation warnings to functions in requests.utils that will be removed
+ in 3.0 (#2309)
+ - Sessions used by the functional API are always closed (#2326)
+ - Restrict requests to HTTP/1.1 and HTTP/1.0 (stop accepting HTTP/0.9) (#2323)
+
+ **Bugfixes**
+
+ - Only parse the URL once (#2353)
+ - Allow Content-Length header to always be overridden (#2332)
+ - Properly handle files in HTTPDigestAuth (#2333)
+ - Cap redirect_cache size to prevent memory abuse (#2299)
+ - Fix HTTPDigestAuth handling of redirects after authenticating successfully
+ (#2253)
+ - Fix crash with custom method parameter to Session.request (#2317)
+ - Fix how Link headers are parsed using the regular expression library (#2271)
+
+ **Documentation**
+
+ - Add more references for interlinking (#2348)
+ - Update CSS for theme (#2290)
+ - Update width of buttons and sidebar (#2289)
+ - Replace references of Gittip with Gratipay (#2282)
+ - Add link to changelog in sidebar (#2273)
+
+ 2.4.3 (2014-10-06)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Unicode URL improvements for Python 2.
+ - Re-order JSON param for backwards compat.
+ - Automatically defrag authentication schemes from host/pass URIs. (`#2249 <https://github.com/kennethreitz/requests/issues/2249>`_)
+
+
+ 2.4.2 (2014-10-05)
+ ++++++++++++++++++
+
+ **Improvements**
+
+ - FINALLY! Add json parameter for uploads! (`#2258 <https://github.com/kennethreitz/requests/pull/2258>`_)
+ - Support for bytestring URLs on Python 3.x (`#2238 <https://github.com/kennethreitz/requests/pull/2238>`_)
+
+ **Bugfixes**
+
+ - Avoid getting stuck in a loop (`#2244 <https://github.com/kennethreitz/requests/pull/2244>`_)
+ - Multiple calls to iter* fail with unhelpful error. (`#2240 <https://github.com/kennethreitz/requests/issues/2240>`_, `#2241 <https://github.com/kennethreitz/requests/issues/2241>`_)
+
+ **Documentation**
+
+ - Correct redirection introduction (`#2245 <https://github.com/kennethreitz/requests/pull/2245/>`_)
+ - Added example of how to send multiple files in one request. (`#2227 <https://github.com/kennethreitz/requests/pull/2227/>`_)
+ - Clarify how to pass a custom set of CAs (`#2248 <https://github.com/kennethreitz/requests/pull/2248/>`_)
+
+
+
+ 2.4.1 (2014-09-09)
+ ++++++++++++++++++
+
+ - Now has a "security" package extras set, ``$ pip install requests[security]``
+ - Requests will now use Certifi if it is available.
+ - Capture and re-raise urllib3 ProtocolError
+ - Bugfix for responses that attempt to redirect to themselves forever (wtf?).
+
+
+ 2.4.0 (2014-08-29)
+ ++++++++++++++++++
+
+ **Behavioral Changes**
+
+ - ``Connection: keep-alive`` header is now sent automatically.
+
+ **Improvements**
+
+ - Support for connect timeouts! Timeout now accepts a tuple (connect, read) which is used to set individual connect and read timeouts.
+ - Allow copying of PreparedRequests without headers/cookies.
+ - Updated bundled urllib3 version.
+ - Refactored settings loading from environment -- new `Session.merge_environment_settings`.
+ - Handle socket errors in iter_content.
+
+
+ 2.3.0 (2014-05-16)
+ ++++++++++++++++++
+
+ **API Changes**
+
+ - New ``Response`` property ``is_redirect``, which is true when the
+ library could have processed this response as a redirection (whether
+ or not it actually did).
+ - The ``timeout`` parameter now affects requests with both ``stream=True`` and
+ ``stream=False`` equally.
+ - The change in v2.0.0 to mandate explicit proxy schemes has been reverted.
+ Proxy schemes now default to ``http://``.
+ - The ``CaseInsensitiveDict`` used for HTTP headers now behaves like a normal
+ dictionary when references as string or viewed in the interpreter.
+
+ **Bugfixes**
+
+ - No longer expose Authorization or Proxy-Authorization headers on redirect.
+ Fix CVE-2014-1829 and CVE-2014-1830 respectively.
+ - Authorization is re-evaluated each redirect.
+ - On redirect, pass url as native strings.
+ - Fall-back to autodetected encoding for JSON when Unicode detection fails.
+ - Headers set to ``None`` on the ``Session`` are now correctly not sent.
+ - Correctly honor ``decode_unicode`` even if it wasn't used earlier in the same
+ response.
+ - Stop advertising ``compress`` as a supported Content-Encoding.
+ - The ``Response.history`` parameter is now always a list.
+ - Many, many ``urllib3`` bugfixes.
+
+ 2.2.1 (2014-01-23)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Fixes incorrect parsing of proxy credentials that contain a literal or encoded '#' character.
+ - Assorted urllib3 fixes.
+
+ 2.2.0 (2014-01-09)
+ ++++++++++++++++++
+
+ **API Changes**
+
+ - New exception: ``ContentDecodingError``. Raised instead of ``urllib3``
+ ``DecodeError`` exceptions.
+
+ **Bugfixes**
+
+ - Avoid many many exceptions from the buggy implementation of ``proxy_bypass`` on OS X in Python 2.6.
+ - Avoid crashing when attempting to get authentication credentials from ~/.netrc when running as a user without a home directory.
+ - Use the correct pool size for pools of connections to proxies.
+ - Fix iteration of ``CookieJar`` objects.
+ - Ensure that cookies are persisted over redirect.
+ - Switch back to using chardet, since it has merged with charade.
+
+ 2.1.0 (2013-12-05)
+ ++++++++++++++++++
+
+ - Updated CA Bundle, of course.
+ - Cookies set on individual Requests through a ``Session`` (e.g. via ``Session.get()``) are no longer persisted to the ``Session``.
+ - Clean up connections when we hit problems during chunked upload, rather than leaking them.
+ - Return connections to the pool when a chunked upload is successful, rather than leaking it.
+ - Match the HTTPbis recommendation for HTTP 301 redirects.
+ - Prevent hanging when using streaming uploads and Digest Auth when a 401 is received.
+ - Values of headers set by Requests are now always the native string type.
+ - Fix previously broken SNI support.
+ - Fix accessing HTTP proxies using proxy authentication.
+ - Unencode HTTP Basic usernames and passwords extracted from URLs.
+ - Support for IP address ranges for no_proxy environment variable
+ - Parse headers correctly when users override the default ``Host:`` header.
+ - Avoid munging the URL in case of case-sensitive servers.
+ - Looser URL handling for non-HTTP/HTTPS urls.
+ - Accept unicode methods in Python 2.6 and 2.7.
+ - More resilient cookie handling.
+ - Make ``Response`` objects pickleable.
+ - Actually added MD5-sess to Digest Auth instead of pretending to like last time.
+ - Updated internal urllib3.
+ - Fixed @Lukasa's lack of taste.
+
+ 2.0.1 (2013-10-24)
+ ++++++++++++++++++
+
+ - Updated included CA Bundle with new mistrusts and automated process for the future
+ - Added MD5-sess to Digest Auth
+ - Accept per-file headers in multipart file POST messages.
+ - Fixed: Don't send the full URL on CONNECT messages.
+ - Fixed: Correctly lowercase a redirect scheme.
+ - Fixed: Cookies not persisted when set via functional API.
+ - Fixed: Translate urllib3 ProxyError into a requests ProxyError derived from ConnectionError.
+ - Updated internal urllib3 and chardet.
+
+ 2.0.0 (2013-09-24)
+ ++++++++++++++++++
+
+ **API Changes:**
+
+ - Keys in the Headers dictionary are now native strings on all Python versions,
+ i.e. bytestrings on Python 2, unicode on Python 3.
+ - Proxy URLs now *must* have an explicit scheme. A ``MissingSchema`` exception
+ will be raised if they don't.
+ - Timeouts now apply to read time if ``Stream=False``.
+ - ``RequestException`` is now a subclass of ``IOError``, not ``RuntimeError``.
+ - Added new method to ``PreparedRequest`` objects: ``PreparedRequest.copy()``.
+ - Added new method to ``Session`` objects: ``Session.update_request()``. This
+ method updates a ``Request`` object with the data (e.g. cookies) stored on
+ the ``Session``.
+ - Added new method to ``Session`` objects: ``Session.prepare_request()``. This
+ method updates and prepares a ``Request`` object, and returns the
+ corresponding ``PreparedRequest`` object.
+ - Added new method to ``HTTPAdapter`` objects: ``HTTPAdapter.proxy_headers()``.
+ This should not be called directly, but improves the subclass interface.
+ - ``httplib.IncompleteRead`` exceptions caused by incorrect chunked encoding
+ will now raise a Requests ``ChunkedEncodingError`` instead.
+ - Invalid percent-escape sequences now cause a Requests ``InvalidURL``
+ exception to be raised.
+ - HTTP 208 no longer uses reason phrase ``"im_used"``. Correctly uses
+ ``"already_reported"``.
+ - HTTP 226 reason added (``"im_used"``).
+
+ **Bugfixes:**
+
+ - Vastly improved proxy support, including the CONNECT verb. Special thanks to
+ the many contributors who worked towards this improvement.
+ - Cookies are now properly managed when 401 authentication responses are
+ received.
+ - Chunked encoding fixes.
+ - Support for mixed case schemes.
+ - Better handling of streaming downloads.
+ - Retrieve environment proxies from more locations.
+ - Minor cookies fixes.
+ - Improved redirect behaviour.
+ - Improved streaming behaviour, particularly for compressed data.
+ - Miscellaneous small Python 3 text encoding bugs.
+ - ``.netrc`` no longer overrides explicit auth.
+ - Cookies set by hooks are now correctly persisted on Sessions.
+ - Fix problem with cookies that specify port numbers in their host field.
+ - ``BytesIO`` can be used to perform streaming uploads.
+ - More generous parsing of the ``no_proxy`` environment variable.
+ - Non-string objects can be passed in data values alongside files.
+
+ 1.2.3 (2013-05-25)
+ ++++++++++++++++++
+
+ - Simple packaging fix
+
+
+ 1.2.2 (2013-05-23)
+ ++++++++++++++++++
+
+ - Simple packaging fix
+
+
+ 1.2.1 (2013-05-20)
+ ++++++++++++++++++
+
+ - 301 and 302 redirects now change the verb to GET for all verbs, not just
+ POST, improving browser compatibility.
+ - Python 3.3.2 compatibility
+ - Always percent-encode location headers
+ - Fix connection adapter matching to be most-specific first
+ - new argument to the default connection adapter for passing a block argument
+ - prevent a KeyError when there's no link headers
+
+ 1.2.0 (2013-03-31)
+ ++++++++++++++++++
+
+ - Fixed cookies on sessions and on requests
+ - Significantly change how hooks are dispatched - hooks now receive all the
+ arguments specified by the user when making a request so hooks can make a
+ secondary request with the same parameters. This is especially necessary for
+ authentication handler authors
+ - certifi support was removed
+ - Fixed bug where using OAuth 1 with body ``signature_type`` sent no data
+ - Major proxy work thanks to @Lukasa including parsing of proxy authentication
+ from the proxy url
+ - Fix DigestAuth handling too many 401s
+ - Update vendored urllib3 to include SSL bug fixes
+ - Allow keyword arguments to be passed to ``json.loads()`` via the
+ ``Response.json()`` method
+ - Don't send ``Content-Length`` header by default on ``GET`` or ``HEAD``
+ requests
+ - Add ``elapsed`` attribute to ``Response`` objects to time how long a request
+ took.
+ - Fix ``RequestsCookieJar``
+ - Sessions and Adapters are now picklable, i.e., can be used with the
+ multiprocessing library
+ - Update charade to version 1.0.3
+
+ The change in how hooks are dispatched will likely cause a great deal of
+ issues.
+
+ 1.1.0 (2013-01-10)
+ ++++++++++++++++++
+
+ - CHUNKED REQUESTS
+ - Support for iterable response bodies
+ - Assume servers persist redirect params
+ - Allow explicit content types to be specified for file data
+ - Make merge_kwargs case-insensitive when looking up keys
+
+ 1.0.3 (2012-12-18)
+ ++++++++++++++++++
+
+ - Fix file upload encoding bug
+ - Fix cookie behavior
+
+ 1.0.2 (2012-12-17)
+ ++++++++++++++++++
+
+ - Proxy fix for HTTPAdapter.
+
+ 1.0.1 (2012-12-17)
+ ++++++++++++++++++
+
+ - Cert verification exception bug.
+ - Proxy fix for HTTPAdapter.
+
+ 1.0.0 (2012-12-17)
+ ++++++++++++++++++
+
+ - Massive Refactor and Simplification
+ - Switch to Apache 2.0 license
+ - Swappable Connection Adapters
+ - Mountable Connection Adapters
+ - Mutable ProcessedRequest chain
+ - /s/prefetch/stream
+ - Removal of all configuration
+ - Standard library logging
+ - Make Response.json() callable, not property.
+ - Usage of new charade project, which provides python 2 and 3 simultaneous chardet.
+ - Removal of all hooks except 'response'
+ - Removal of all authentication helpers (OAuth, Kerberos)
+
+ This is not a backwards compatible change.
+
+ 0.14.2 (2012-10-27)
+ +++++++++++++++++++
+
+ - Improved mime-compatible JSON handling
+ - Proxy fixes
+ - Path hack fixes
+ - Case-Insensitive Content-Encoding headers
+ - Support for CJK parameters in form posts
+
+
+ 0.14.1 (2012-10-01)
+ +++++++++++++++++++
+
+ - Python 3.3 Compatibility
+ - Simply default accept-encoding
+ - Bugfixes
+
+
+ 0.14.0 (2012-09-02)
+ ++++++++++++++++++++
+
+ - No more iter_content errors if already downloaded.
+
+ 0.13.9 (2012-08-25)
+ +++++++++++++++++++
+
+ - Fix for OAuth + POSTs
+ - Remove exception eating from dispatch_hook
+ - General bugfixes
+
+ 0.13.8 (2012-08-21)
+ +++++++++++++++++++
+
+ - Incredible Link header support :)
+
+ 0.13.7 (2012-08-19)
+ +++++++++++++++++++
+
+ - Support for (key, value) lists everywhere.
+ - Digest Authentication improvements.
+ - Ensure proxy exclusions work properly.
+ - Clearer UnicodeError exceptions.
+ - Automatic casting of URLs to strings (fURL and such)
+ - Bugfixes.
+
+ 0.13.6 (2012-08-06)
+ +++++++++++++++++++
+
+ - Long awaited fix for hanging connections!
+
+ 0.13.5 (2012-07-27)
+ +++++++++++++++++++
+
+ - Packaging fix
+
+ 0.13.4 (2012-07-27)
+ +++++++++++++++++++
+
+ - GSSAPI/Kerberos authentication!
+ - App Engine 2.7 Fixes!
+ - Fix leaking connections (from urllib3 update)
+ - OAuthlib path hack fix
+ - OAuthlib URL parameters fix.
+
+ 0.13.3 (2012-07-12)
+ +++++++++++++++++++
+
+ - Use simplejson if available.
+ - Do not hide SSLErrors behind Timeouts.
+ - Fixed param handling with urls containing fragments.
+ - Significantly improved information in User Agent.
+ - client certificates are ignored when verify=False
+
+ 0.13.2 (2012-06-28)
+ +++++++++++++++++++
+
+ - Zero dependencies (once again)!
+ - New: Response.reason
+ - Sign querystring parameters in OAuth 1.0
+ - Client certificates no longer ignored when verify=False
+ - Add openSUSE certificate support
+
+ 0.13.1 (2012-06-07)
+ +++++++++++++++++++
+
+ - Allow passing a file or file-like object as data.
+ - Allow hooks to return responses that indicate errors.
+ - Fix Response.text and Response.json for body-less responses.
+
+ 0.13.0 (2012-05-29)
+ +++++++++++++++++++
+
+ - Removal of Requests.async in favor of `grequests <https://github.com/kennethreitz/grequests>`_
+ - Allow disabling of cookie persistence.
+ - New implementation of safe_mode
+ - cookies.get now supports default argument
+ - Session cookies not saved when Session.request is called with return_response=False
+ - Env: no_proxy support.
+ - RequestsCookieJar improvements.
+ - Various bug fixes.
+
+ 0.12.1 (2012-05-08)
+ +++++++++++++++++++
+
+ - New ``Response.json`` property.
+ - Ability to add string file uploads.
+ - Fix out-of-range issue with iter_lines.
+ - Fix iter_content default size.
+ - Fix POST redirects containing files.
+
+ 0.12.0 (2012-05-02)
+ +++++++++++++++++++
+
+ - EXPERIMENTAL OAUTH SUPPORT!
+ - Proper CookieJar-backed cookies interface with awesome dict-like interface.
+ - Speed fix for non-iterated content chunks.
+ - Move ``pre_request`` to a more usable place.
+ - New ``pre_send`` hook.
+ - Lazily encode data, params, files.
+ - Load system Certificate Bundle if ``certify`` isn't available.
+ - Cleanups, fixes.
+
+ 0.11.2 (2012-04-22)
+ +++++++++++++++++++
+
+ - Attempt to use the OS's certificate bundle if ``certifi`` isn't available.
+ - Infinite digest auth redirect fix.
+ - Multi-part file upload improvements.
+ - Fix decoding of invalid %encodings in URLs.
+ - If there is no content in a response don't throw an error the second time that content is attempted to be read.
+ - Upload data on redirects.
+
+ 0.11.1 (2012-03-30)
+ +++++++++++++++++++
+
+ * POST redirects now break RFC to do what browsers do: Follow up with a GET.
+ * New ``strict_mode`` configuration to disable new redirect behavior.
+
+
+ 0.11.0 (2012-03-14)
+ +++++++++++++++++++
+
+ * Private SSL Certificate support
+ * Remove select.poll from Gevent monkeypatching
+ * Remove redundant generator for chunked transfer encoding
+ * Fix: Response.ok raises Timeout Exception in safe_mode
+
+ 0.10.8 (2012-03-09)
+ +++++++++++++++++++
+
+ * Generate chunked ValueError fix
+ * Proxy configuration by environment variables
+ * Simplification of iter_lines.
+ * New `trust_env` configuration for disabling system/environment hints.
+ * Suppress cookie errors.
+
+ 0.10.7 (2012-03-07)
+ +++++++++++++++++++
+
+ * `encode_uri` = False
+
+ 0.10.6 (2012-02-25)
+ +++++++++++++++++++
+
+ * Allow '=' in cookies.
+
+ 0.10.5 (2012-02-25)
+ +++++++++++++++++++
+
+ * Response body with 0 content-length fix.
+ * New async.imap.
+ * Don't fail on netrc.
+
+
+ 0.10.4 (2012-02-20)
+ +++++++++++++++++++
+
+ * Honor netrc.
+
+ 0.10.3 (2012-02-20)
+ +++++++++++++++++++
+
+ * HEAD requests don't follow redirects anymore.
+ * raise_for_status() doesn't raise for 3xx anymore.
+ * Make Session objects picklable.
+ * ValueError for invalid schema URLs.
+
+ 0.10.2 (2012-01-15)
+ +++++++++++++++++++
+
+ * Vastly improved URL quoting.
+ * Additional allowed cookie key values.
+ * Attempted fix for "Too many open files" Error
+ * Replace unicode errors on first pass, no need for second pass.
+ * Append '/' to bare-domain urls before query insertion.
+ * Exceptions now inherit from RuntimeError.
+ * Binary uploads + auth fix.
+ * Bugfixes.
+
+
+ 0.10.1 (2012-01-23)
+ +++++++++++++++++++
+
+ * PYTHON 3 SUPPORT!
+ * Dropped 2.5 Support. (*Backwards Incompatible*)
+
+ 0.10.0 (2012-01-21)
+ +++++++++++++++++++
+
+ * ``Response.content`` is now bytes-only. (*Backwards Incompatible*)
+ * New ``Response.text`` is unicode-only.
+ * If no ``Response.encoding`` is specified and ``chardet`` is available, ``Response.text`` will guess an encoding.
+ * Default to ISO-8859-1 (Western) encoding for "text" subtypes.
+ * Removal of `decode_unicode`. (*Backwards Incompatible*)
+ * New multiple-hooks system.
+ * New ``Response.register_hook`` for registering hooks within the pipeline.
+ * ``Response.url`` is now Unicode.
+
+ 0.9.3 (2012-01-18)
+ ++++++++++++++++++
+
+ * SSL verify=False bugfix (apparent on windows machines).
+
+ 0.9.2 (2012-01-18)
+ ++++++++++++++++++
+
+ * Asynchronous async.send method.
+ * Support for proper chunk streams with boundaries.
+ * session argument for Session classes.
+ * Print entire hook tracebacks, not just exception instance.
+ * Fix response.iter_lines from pending next line.
+ * Fix but in HTTP-digest auth w/ URI having query strings.
+ * Fix in Event Hooks section.
+ * Urllib3 update.
+
+
+ 0.9.1 (2012-01-06)
+ ++++++++++++++++++
+
+ * danger_mode for automatic Response.raise_for_status()
+ * Response.iter_lines refactor
+
+ 0.9.0 (2011-12-28)
+ ++++++++++++++++++
+
+ * verify ssl is default.
+
+
+ 0.8.9 (2011-12-28)
+ ++++++++++++++++++
+
+ * Packaging fix.
+
+
+ 0.8.8 (2011-12-28)
+ ++++++++++++++++++
+
+ * SSL CERT VERIFICATION!
+ * Release of Cerifi: Mozilla's cert list.
+ * New 'verify' argument for SSL requests.
+ * Urllib3 update.
+
+ 0.8.7 (2011-12-24)
+ ++++++++++++++++++
+
+ * iter_lines last-line truncation fix
+ * Force safe_mode for async requests
+ * Handle safe_mode exceptions more consistently
+ * Fix iteration on null responses in safe_mode
+
+ 0.8.6 (2011-12-18)
+ ++++++++++++++++++
+
+ * Socket timeout fixes.
+ * Proxy Authorization support.
+
+ 0.8.5 (2011-12-14)
+ ++++++++++++++++++
+
+ * Response.iter_lines!
+
+ 0.8.4 (2011-12-11)
+ ++++++++++++++++++
+
+ * Prefetch bugfix.
+ * Added license to installed version.
+
+ 0.8.3 (2011-11-27)
+ ++++++++++++++++++
+
+ * Converted auth system to use simpler callable objects.
+ * New session parameter to API methods.
+ * Display full URL while logging.
+
+ 0.8.2 (2011-11-19)
+ ++++++++++++++++++
+
+ * New Unicode decoding system, based on over-ridable `Response.encoding`.
+ * Proper URL slash-quote handling.
+ * Cookies with ``[``, ``]``, and ``_`` allowed.
+
+ 0.8.1 (2011-11-15)
+ ++++++++++++++++++
+
+ * URL Request path fix
+ * Proxy fix.
+ * Timeouts fix.
+
+ 0.8.0 (2011-11-13)
+ ++++++++++++++++++
+
+ * Keep-alive support!
+ * Complete removal of Urllib2
+ * Complete removal of Poster
+ * Complete removal of CookieJars
+ * New ConnectionError raising
+ * Safe_mode for error catching
+ * prefetch parameter for request methods
+ * OPTION method
+ * Async pool size throttling
+ * File uploads send real names
+ * Vendored in urllib3
+
+ 0.7.6 (2011-11-07)
+ ++++++++++++++++++
+
+ * Digest authentication bugfix (attach query data to path)
+
+ 0.7.5 (2011-11-04)
+ ++++++++++++++++++
+
+ * Response.content = None if there was an invalid response.
+ * Redirection auth handling.
+
+ 0.7.4 (2011-10-26)
+ ++++++++++++++++++
+
+ * Session Hooks fix.
+
+ 0.7.3 (2011-10-23)
+ ++++++++++++++++++
+
+ * Digest Auth fix.
+
+
+ 0.7.2 (2011-10-23)
+ ++++++++++++++++++
+
+ * PATCH Fix.
+
+
+ 0.7.1 (2011-10-23)
+ ++++++++++++++++++
+
+ * Move away from urllib2 authentication handling.
+ * Fully Remove AuthManager, AuthObject, &c.
+ * New tuple-based auth system with handler callbacks.
+
+
+ 0.7.0 (2011-10-22)
+ ++++++++++++++++++
+
+ * Sessions are now the primary interface.
+ * Deprecated InvalidMethodException.
+ * PATCH fix.
+ * New config system (no more global settings).
+
+
+ 0.6.6 (2011-10-19)
+ ++++++++++++++++++
+
+ * Session parameter bugfix (params merging).
+
+
+ 0.6.5 (2011-10-18)
+ ++++++++++++++++++
+
+ * Offline (fast) test suite.
+ * Session dictionary argument merging.
+
+
+ 0.6.4 (2011-10-13)
+ ++++++++++++++++++
+
+ * Automatic decoding of unicode, based on HTTP Headers.
+ * New ``decode_unicode`` setting.
+ * Removal of ``r.read/close`` methods.
+ * New ``r.faw`` interface for advanced response usage.*
+ * Automatic expansion of parameterized headers.
+
+
+ 0.6.3 (2011-10-13)
+ ++++++++++++++++++
+
+ * Beautiful ``requests.async`` module, for making async requests w/ gevent.
+
+
+ 0.6.2 (2011-10-09)
+ ++++++++++++++++++
+
+ * GET/HEAD obeys allow_redirects=False.
+
+
+ 0.6.1 (2011-08-20)
+ ++++++++++++++++++
+
+ * Enhanced status codes experience ``\o/``
+ * Set a maximum number of redirects (``settings.max_redirects``)
+ * Full Unicode URL support
+ * Support for protocol-less redirects.
+ * Allow for arbitrary request types.
+ * Bugfixes
+
+
+ 0.6.0 (2011-08-17)
+ ++++++++++++++++++
+
+ * New callback hook system
+ * New persistent sessions object and context manager
+ * Transparent Dict-cookie handling
+ * Status code reference object
+ * Removed Response.cached
+ * Added Response.request
+ * All args are kwargs
+ * Relative redirect support
+ * HTTPError handling improvements
+ * Improved https testing
+ * Bugfixes
+
+
+ 0.5.1 (2011-07-23)
+ ++++++++++++++++++
+
+ * International Domain Name Support!
+ * Access headers without fetching entire body (``read()``)
+ * Use lists as dicts for parameters
+ * Add Forced Basic Authentication
+ * Forced Basic is default authentication type
+ * ``python-requests.org`` default User-Agent header
+ * CaseInsensitiveDict lower-case caching
+ * Response.history bugfix
+
+
+ 0.5.0 (2011-06-21)
+ ++++++++++++++++++
+
+ * PATCH Support
+ * Support for Proxies
+ * HTTPBin Test Suite
+ * Redirect Fixes
+ * settings.verbose stream writing
+ * Querystrings for all methods
+ * URLErrors (Connection Refused, Timeout, Invalid URLs) are treated as explicitly raised
+ ``r.requests.get('hwe://blah'); r.raise_for_status()``
+
+
+ 0.4.1 (2011-05-22)
+ ++++++++++++++++++
+
+ * Improved Redirection Handling
+ * New 'allow_redirects' param for following non-GET/HEAD Redirects
+ * Settings module refactoring
+
+
+ 0.4.0 (2011-05-15)
+ ++++++++++++++++++
+
+ * Response.history: list of redirected responses
+ * Case-Insensitive Header Dictionaries!
+ * Unicode URLs
+
+
+ 0.3.4 (2011-05-14)
+ ++++++++++++++++++
+
+ * Urllib2 HTTPAuthentication Recursion fix (Basic/Digest)
+ * Internal Refactor
+ * Bytes data upload Bugfix
+
+
+
+ 0.3.3 (2011-05-12)
+ ++++++++++++++++++
+
+ * Request timeouts
+ * Unicode url-encoded data
+ * Settings context manager and module
+
+
+ 0.3.2 (2011-04-15)
+ ++++++++++++++++++
+
+ * Automatic Decompression of GZip Encoded Content
+ * AutoAuth Support for Tupled HTTP Auth
+
+
+ 0.3.1 (2011-04-01)
+ ++++++++++++++++++
+
+ * Cookie Changes
+ * Response.read()
+ * Poster fix
+
+
+ 0.3.0 (2011-02-25)
+ ++++++++++++++++++
+
+ * Automatic Authentication API Change
+ * Smarter Query URL Parameterization
+ * Allow file uploads and POST data together
+ * New Authentication Manager System
+ - Simpler Basic HTTP System
+ - Supports all build-in urllib2 Auths
+ - Allows for custom Auth Handlers
+
+
+ 0.2.4 (2011-02-19)
+ ++++++++++++++++++
+
+ * Python 2.5 Support
+ * PyPy-c v1.4 Support
+ * Auto-Authentication tests
+ * Improved Request object constructor
+
+ 0.2.3 (2011-02-15)
+ ++++++++++++++++++
+
+ * New HTTPHandling Methods
+ - Response.__nonzero__ (false if bad HTTP Status)
+ - Response.ok (True if expected HTTP Status)
+ - Response.error (Logged HTTPError if bad HTTP Status)
+ - Response.raise_for_status() (Raises stored HTTPError)
+
+
+ 0.2.2 (2011-02-14)
+ ++++++++++++++++++
+
+ * Still handles request in the event of an HTTPError. (Issue #2)
+ * Eventlet and Gevent Monkeypatch support.
+ * Cookie Support (Issue #1)
+
+
+ 0.2.1 (2011-02-14)
+ ++++++++++++++++++
+
+ * Added file attribute to POST and PUT requests for multipart-encode file uploads.
+ * Added Request.url attribute for context and redirects
+
+
+ 0.2.0 (2011-02-14)
+ ++++++++++++++++++
+
+ * Birth!
+
+
+ 0.0.1 (2011-02-13)
+ ++++++++++++++++++
+
+ * Frustration
+ * Conception
+
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
diff --git a/third_party/python/requests/requests.egg-info/SOURCES.txt b/third_party/python/requests/requests.egg-info/SOURCES.txt
new file mode 100644
index 0000000000..f769d3bcb6
--- /dev/null
+++ b/third_party/python/requests/requests.egg-info/SOURCES.txt
@@ -0,0 +1,96 @@
+HISTORY.rst
+LICENSE
+MANIFEST.in
+NOTICE
+README.rst
+requirements.txt
+setup.cfg
+setup.py
+test_requests.py
+requests/__init__.py
+requests/adapters.py
+requests/api.py
+requests/auth.py
+requests/cacert.pem
+requests/certs.py
+requests/compat.py
+requests/cookies.py
+requests/exceptions.py
+requests/hooks.py
+requests/models.py
+requests/sessions.py
+requests/status_codes.py
+requests/structures.py
+requests/utils.py
+requests.egg-info/PKG-INFO
+requests.egg-info/SOURCES.txt
+requests.egg-info/dependency_links.txt
+requests.egg-info/not-zip-safe
+requests.egg-info/requires.txt
+requests.egg-info/top_level.txt
+requests/packages/__init__.py
+requests/packages/chardet/__init__.py
+requests/packages/chardet/big5freq.py
+requests/packages/chardet/big5prober.py
+requests/packages/chardet/chardetect.py
+requests/packages/chardet/chardistribution.py
+requests/packages/chardet/charsetgroupprober.py
+requests/packages/chardet/charsetprober.py
+requests/packages/chardet/codingstatemachine.py
+requests/packages/chardet/compat.py
+requests/packages/chardet/constants.py
+requests/packages/chardet/cp949prober.py
+requests/packages/chardet/escprober.py
+requests/packages/chardet/escsm.py
+requests/packages/chardet/eucjpprober.py
+requests/packages/chardet/euckrfreq.py
+requests/packages/chardet/euckrprober.py
+requests/packages/chardet/euctwfreq.py
+requests/packages/chardet/euctwprober.py
+requests/packages/chardet/gb2312freq.py
+requests/packages/chardet/gb2312prober.py
+requests/packages/chardet/hebrewprober.py
+requests/packages/chardet/jisfreq.py
+requests/packages/chardet/jpcntx.py
+requests/packages/chardet/langbulgarianmodel.py
+requests/packages/chardet/langcyrillicmodel.py
+requests/packages/chardet/langgreekmodel.py
+requests/packages/chardet/langhebrewmodel.py
+requests/packages/chardet/langhungarianmodel.py
+requests/packages/chardet/langthaimodel.py
+requests/packages/chardet/latin1prober.py
+requests/packages/chardet/mbcharsetprober.py
+requests/packages/chardet/mbcsgroupprober.py
+requests/packages/chardet/mbcssm.py
+requests/packages/chardet/sbcharsetprober.py
+requests/packages/chardet/sbcsgroupprober.py
+requests/packages/chardet/sjisprober.py
+requests/packages/chardet/universaldetector.py
+requests/packages/chardet/utf8prober.py
+requests/packages/urllib3/__init__.py
+requests/packages/urllib3/_collections.py
+requests/packages/urllib3/connection.py
+requests/packages/urllib3/connectionpool.py
+requests/packages/urllib3/exceptions.py
+requests/packages/urllib3/fields.py
+requests/packages/urllib3/filepost.py
+requests/packages/urllib3/poolmanager.py
+requests/packages/urllib3/request.py
+requests/packages/urllib3/response.py
+requests/packages/urllib3/contrib/__init__.py
+requests/packages/urllib3/contrib/appengine.py
+requests/packages/urllib3/contrib/ntlmpool.py
+requests/packages/urllib3/contrib/pyopenssl.py
+requests/packages/urllib3/packages/__init__.py
+requests/packages/urllib3/packages/ordered_dict.py
+requests/packages/urllib3/packages/six.py
+requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
+requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py
+requests/packages/urllib3/util/__init__.py
+requests/packages/urllib3/util/connection.py
+requests/packages/urllib3/util/request.py
+requests/packages/urllib3/util/response.py
+requests/packages/urllib3/util/retry.py
+requests/packages/urllib3/util/ssl_.py
+requests/packages/urllib3/util/timeout.py
+requests/packages/urllib3/util/url.py \ No newline at end of file
diff --git a/third_party/python/requests/requests.egg-info/dependency_links.txt b/third_party/python/requests/requests.egg-info/dependency_links.txt
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/third_party/python/requests/requests.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/third_party/python/requests/requests.egg-info/not-zip-safe b/third_party/python/requests/requests.egg-info/not-zip-safe
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/third_party/python/requests/requests.egg-info/not-zip-safe
@@ -0,0 +1 @@
+
diff --git a/third_party/python/requests/requests.egg-info/requires.txt b/third_party/python/requests/requests.egg-info/requires.txt
new file mode 100644
index 0000000000..34ddab55d6
--- /dev/null
+++ b/third_party/python/requests/requests.egg-info/requires.txt
@@ -0,0 +1,5 @@
+
+[security]
+pyOpenSSL>=0.13
+ndg-httpsclient
+pyasn1
diff --git a/third_party/python/requests/requests.egg-info/top_level.txt b/third_party/python/requests/requests.egg-info/top_level.txt
new file mode 100644
index 0000000000..f2293605cf
--- /dev/null
+++ b/third_party/python/requests/requests.egg-info/top_level.txt
@@ -0,0 +1 @@
+requests
diff --git a/third_party/python/requests/requests/__init__.py b/third_party/python/requests/requests/__init__.py
new file mode 100644
index 0000000000..bd5b5b9749
--- /dev/null
+++ b/third_party/python/requests/requests/__init__.py
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+
+# __
+# /__) _ _ _ _ _/ _
+# / ( (- (/ (/ (- _) / _)
+# /
+
+"""
+Requests HTTP library
+~~~~~~~~~~~~~~~~~~~~~
+
+Requests is an HTTP library, written in Python, for human beings. Basic GET
+usage:
+
+ >>> import requests
+ >>> r = requests.get('https://www.python.org')
+ >>> r.status_code
+ 200
+ >>> 'Python is a programming language' in r.content
+ True
+
+... or POST:
+
+ >>> payload = dict(key1='value1', key2='value2')
+ >>> r = requests.post('http://httpbin.org/post', data=payload)
+ >>> print(r.text)
+ {
+ ...
+ "form": {
+ "key2": "value2",
+ "key1": "value1"
+ },
+ ...
+ }
+
+The other HTTP methods are supported - see `requests.api`. Full documentation
+is at <http://python-requests.org>.
+
+:copyright: (c) 2015 by Kenneth Reitz.
+:license: Apache 2.0, see LICENSE for more details.
+
+"""
+
+__title__ = 'requests'
+__version__ = '2.9.1'
+__build__ = 0x020901
+__author__ = 'Kenneth Reitz'
+__license__ = 'Apache 2.0'
+__copyright__ = 'Copyright 2015 Kenneth Reitz'
+
+# Attempt to enable urllib3's SNI support, if possible
+try:
+ from .packages.urllib3.contrib import pyopenssl
+ pyopenssl.inject_into_urllib3()
+except ImportError:
+ pass
+
+from . import utils
+from .models import Request, Response, PreparedRequest
+from .api import request, get, head, post, patch, put, delete, options
+from .sessions import session, Session
+from .status_codes import codes
+from .exceptions import (
+ RequestException, Timeout, URLRequired,
+ TooManyRedirects, HTTPError, ConnectionError,
+ FileModeWarning,
+)
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
+try: # Python 2.7+
+ from logging import NullHandler
+except ImportError:
+ class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+logging.getLogger(__name__).addHandler(NullHandler())
+
+import warnings
+
+# FileModeWarnings go off per the default.
+warnings.simplefilter('default', FileModeWarning, append=True)
diff --git a/third_party/python/requests/requests/adapters.py b/third_party/python/requests/requests/adapters.py
new file mode 100644
index 0000000000..6266d5be30
--- /dev/null
+++ b/third_party/python/requests/requests/adapters.py
@@ -0,0 +1,453 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.adapters
+~~~~~~~~~~~~~~~~~
+
+This module contains the transport adapters that Requests uses to define
+and maintain connections.
+"""
+
+import os.path
+import socket
+
+from .models import Response
+from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
+from .packages.urllib3.response import HTTPResponse
+from .packages.urllib3.util import Timeout as TimeoutSauce
+from .packages.urllib3.util.retry import Retry
+from .compat import urlparse, basestring
+from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
+ prepend_scheme_if_needed, get_auth_from_url, urldefragauth,
+ select_proxy)
+from .structures import CaseInsensitiveDict
+from .packages.urllib3.exceptions import ClosedPoolError
+from .packages.urllib3.exceptions import ConnectTimeoutError
+from .packages.urllib3.exceptions import HTTPError as _HTTPError
+from .packages.urllib3.exceptions import MaxRetryError
+from .packages.urllib3.exceptions import NewConnectionError
+from .packages.urllib3.exceptions import ProxyError as _ProxyError
+from .packages.urllib3.exceptions import ProtocolError
+from .packages.urllib3.exceptions import ReadTimeoutError
+from .packages.urllib3.exceptions import SSLError as _SSLError
+from .packages.urllib3.exceptions import ResponseError
+from .cookies import extract_cookies_to_jar
+from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
+ ProxyError, RetryError)
+from .auth import _basic_auth_str
+
+DEFAULT_POOLBLOCK = False
+DEFAULT_POOLSIZE = 10
+DEFAULT_RETRIES = 0
+DEFAULT_POOL_TIMEOUT = None
+
+
+class BaseAdapter(object):
+ """The Base Transport Adapter"""
+
+ def __init__(self):
+ super(BaseAdapter, self).__init__()
+
+ def send(self):
+ raise NotImplementedError
+
+ def close(self):
+ raise NotImplementedError
+
+
+class HTTPAdapter(BaseAdapter):
+ """The built-in HTTP Adapter for urllib3.
+
+ Provides a general-case interface for Requests sessions to contact HTTP and
+ HTTPS urls by implementing the Transport Adapter interface. This class will
+ usually be created by the :class:`Session <Session>` class under the
+ covers.
+
+ :param pool_connections: The number of urllib3 connection pools to cache.
+ :param pool_maxsize: The maximum number of connections to save in the pool.
+ :param int max_retries: The maximum number of retries each connection
+ should attempt. Note, this applies only to failed DNS lookups, socket
+ connections and connection timeouts, never to requests where data has
+ made it to the server. By default, Requests does not retry failed
+ connections. If you need granular control over the conditions under
+ which we retry a request, import urllib3's ``Retry`` class and pass
+ that instead.
+ :param pool_block: Whether the connection pool should block for connections.
+
+ Usage::
+
+ >>> import requests
+ >>> s = requests.Session()
+ >>> a = requests.adapters.HTTPAdapter(max_retries=3)
+ >>> s.mount('http://', a)
+ """
+ __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
+ '_pool_block']
+
+ def __init__(self, pool_connections=DEFAULT_POOLSIZE,
+ pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
+ pool_block=DEFAULT_POOLBLOCK):
+ if max_retries == DEFAULT_RETRIES:
+ self.max_retries = Retry(0, read=False)
+ else:
+ self.max_retries = Retry.from_int(max_retries)
+ self.config = {}
+ self.proxy_manager = {}
+
+ super(HTTPAdapter, self).__init__()
+
+ self._pool_connections = pool_connections
+ self._pool_maxsize = pool_maxsize
+ self._pool_block = pool_block
+
+ self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
+
+ def __getstate__(self):
+ return dict((attr, getattr(self, attr, None)) for attr in
+ self.__attrs__)
+
+ def __setstate__(self, state):
+ # Can't handle by adding 'proxy_manager' to self.__attrs__ because
+ # self.poolmanager uses a lambda function, which isn't pickleable.
+ self.proxy_manager = {}
+ self.config = {}
+
+ for attr, value in state.items():
+ setattr(self, attr, value)
+
+ self.init_poolmanager(self._pool_connections, self._pool_maxsize,
+ block=self._pool_block)
+
+ def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
+ """Initializes a urllib3 PoolManager.
+
+ This method should not be called from user code, and is only
+ exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param connections: The number of urllib3 connection pools to cache.
+ :param maxsize: The maximum number of connections to save in the pool.
+ :param block: Block when no free connections are available.
+ :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
+ """
+ # save these values for pickling
+ self._pool_connections = connections
+ self._pool_maxsize = maxsize
+ self._pool_block = block
+
+ self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
+ block=block, strict=True, **pool_kwargs)
+
+ def proxy_manager_for(self, proxy, **proxy_kwargs):
+ """Return urllib3 ProxyManager for the given proxy.
+
+ This method should not be called from user code, and is only
+ exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param proxy: The proxy to return a urllib3 ProxyManager for.
+ :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
+ :returns: ProxyManager
+ """
+ if not proxy in self.proxy_manager:
+ proxy_headers = self.proxy_headers(proxy)
+ self.proxy_manager[proxy] = proxy_from_url(
+ proxy,
+ proxy_headers=proxy_headers,
+ num_pools=self._pool_connections,
+ maxsize=self._pool_maxsize,
+ block=self._pool_block,
+ **proxy_kwargs)
+
+ return self.proxy_manager[proxy]
+
+ def cert_verify(self, conn, url, verify, cert):
+ """Verify a SSL certificate. This method should not be called from user
+ code, and is only exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param conn: The urllib3 connection object associated with the cert.
+ :param url: The requested URL.
+ :param verify: Whether we should actually verify the certificate.
+ :param cert: The SSL certificate to verify.
+ """
+ if url.lower().startswith('https') and verify:
+
+ cert_loc = None
+
+ # Allow self-specified cert location.
+ if verify is not True:
+ cert_loc = verify
+
+ if not cert_loc:
+ cert_loc = DEFAULT_CA_BUNDLE_PATH
+
+ if not cert_loc:
+ raise Exception("Could not find a suitable SSL CA certificate bundle.")
+
+ conn.cert_reqs = 'CERT_REQUIRED'
+
+ if not os.path.isdir(cert_loc):
+ conn.ca_certs = cert_loc
+ else:
+ conn.ca_cert_dir = cert_loc
+ else:
+ conn.cert_reqs = 'CERT_NONE'
+ conn.ca_certs = None
+ conn.ca_cert_dir = None
+
+ if cert:
+ if not isinstance(cert, basestring):
+ conn.cert_file = cert[0]
+ conn.key_file = cert[1]
+ else:
+ conn.cert_file = cert
+
+ def build_response(self, req, resp):
+ """Builds a :class:`Response <requests.Response>` object from a urllib3
+ response. This should not be called from user code, and is only exposed
+ for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
+
+ :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
+ :param resp: The urllib3 response object.
+ """
+ response = Response()
+
+ # Fallback to None if there's no status_code, for whatever reason.
+ response.status_code = getattr(resp, 'status', None)
+
+ # Make headers case-insensitive.
+ response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
+
+ # Set encoding.
+ response.encoding = get_encoding_from_headers(response.headers)
+ response.raw = resp
+ response.reason = response.raw.reason
+
+ if isinstance(req.url, bytes):
+ response.url = req.url.decode('utf-8')
+ else:
+ response.url = req.url
+
+ # Add new cookies from the server.
+ extract_cookies_to_jar(response.cookies, req, resp)
+
+ # Give the Response some context.
+ response.request = req
+ response.connection = self
+
+ return response
+
+ def get_connection(self, url, proxies=None):
+ """Returns a urllib3 connection for the given URL. This should not be
+ called from user code, and is only exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param url: The URL to connect to.
+ :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
+ """
+ proxy = select_proxy(url, proxies)
+
+ if proxy:
+ proxy = prepend_scheme_if_needed(proxy, 'http')
+ proxy_manager = self.proxy_manager_for(proxy)
+ conn = proxy_manager.connection_from_url(url)
+ else:
+ # Only scheme should be lower case
+ parsed = urlparse(url)
+ url = parsed.geturl()
+ conn = self.poolmanager.connection_from_url(url)
+
+ return conn
+
+ def close(self):
+ """Disposes of any internal state.
+
+ Currently, this just closes the PoolManager, which closes pooled
+ connections.
+ """
+ self.poolmanager.clear()
+
+ def request_url(self, request, proxies):
+ """Obtain the url to use when making the final request.
+
+ If the message is being sent through a HTTP proxy, the full URL has to
+ be used. Otherwise, we should only use the path portion of the URL.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
+ :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
+ """
+ proxy = select_proxy(request.url, proxies)
+ scheme = urlparse(request.url).scheme
+ if proxy and scheme != 'https':
+ url = urldefragauth(request.url)
+ else:
+ url = request.path_url
+
+ return url
+
+ def add_headers(self, request, **kwargs):
+ """Add any headers needed by the connection. As of v2.0 this does
+ nothing by default, but is left for overriding by users that subclass
+ the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
+ :param kwargs: The keyword arguments from the call to send().
+ """
+ pass
+
+ def proxy_headers(self, proxy):
+ """Returns a dictionary of the headers to add to any request sent
+ through a proxy. This works with urllib3 magic to ensure that they are
+ correctly sent to the proxy, rather than in a tunnelled request if
+ CONNECT is being used.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param proxies: The url of the proxy being used for this request.
+ """
+ headers = {}
+ username, password = get_auth_from_url(proxy)
+
+ if username and password:
+ headers['Proxy-Authorization'] = _basic_auth_str(username,
+ password)
+
+ return headers
+
+ def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
+ """Sends PreparedRequest object. Returns Response object.
+
+ :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
+ :param stream: (optional) Whether to stream the request content.
+ :param timeout: (optional) How long to wait for the server to send
+ data before giving up, as a float, or a :ref:`(connect timeout,
+ read timeout) <timeouts>` tuple.
+ :type timeout: float or tuple
+ :param verify: (optional) Whether to verify SSL certificates.
+ :param cert: (optional) Any user-provided SSL certificate to be trusted.
+ :param proxies: (optional) The proxies dictionary to apply to the request.
+ """
+
+ conn = self.get_connection(request.url, proxies)
+
+ self.cert_verify(conn, request.url, verify, cert)
+ url = self.request_url(request, proxies)
+ self.add_headers(request)
+
+ chunked = not (request.body is None or 'Content-Length' in request.headers)
+
+ if isinstance(timeout, tuple):
+ try:
+ connect, read = timeout
+ timeout = TimeoutSauce(connect=connect, read=read)
+ except ValueError as e:
+ # this may raise a string formatting error.
+ err = ("Invalid timeout {0}. Pass a (connect, read) "
+ "timeout tuple, or a single float to set "
+ "both timeouts to the same value".format(timeout))
+ raise ValueError(err)
+ else:
+ timeout = TimeoutSauce(connect=timeout, read=timeout)
+
+ try:
+ if not chunked:
+ resp = conn.urlopen(
+ method=request.method,
+ url=url,
+ body=request.body,
+ headers=request.headers,
+ redirect=False,
+ assert_same_host=False,
+ preload_content=False,
+ decode_content=False,
+ retries=self.max_retries,
+ timeout=timeout
+ )
+
+ # Send the request.
+ else:
+ if hasattr(conn, 'proxy_pool'):
+ conn = conn.proxy_pool
+
+ low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
+
+ try:
+ low_conn.putrequest(request.method,
+ url,
+ skip_accept_encoding=True)
+
+ for header, value in request.headers.items():
+ low_conn.putheader(header, value)
+
+ low_conn.endheaders()
+
+ for i in request.body:
+ low_conn.send(hex(len(i))[2:].encode('utf-8'))
+ low_conn.send(b'\r\n')
+ low_conn.send(i)
+ low_conn.send(b'\r\n')
+ low_conn.send(b'0\r\n\r\n')
+
+ # Receive the response from the server
+ try:
+ # For Python 2.7+ versions, use buffering of HTTP
+ # responses
+ r = low_conn.getresponse(buffering=True)
+ except TypeError:
+ # For compatibility with Python 2.6 versions and back
+ r = low_conn.getresponse()
+
+ resp = HTTPResponse.from_httplib(
+ r,
+ pool=conn,
+ connection=low_conn,
+ preload_content=False,
+ decode_content=False
+ )
+ except:
+ # If we hit any problems here, clean up the connection.
+ # Then, reraise so that we can handle the actual exception.
+ low_conn.close()
+ raise
+
+ except (ProtocolError, socket.error) as err:
+ raise ConnectionError(err, request=request)
+
+ except MaxRetryError as e:
+ if isinstance(e.reason, ConnectTimeoutError):
+ # TODO: Remove this in 3.0.0: see #2811
+ if not isinstance(e.reason, NewConnectionError):
+ raise ConnectTimeout(e, request=request)
+
+ if isinstance(e.reason, ResponseError):
+ raise RetryError(e, request=request)
+
+ raise ConnectionError(e, request=request)
+
+ except ClosedPoolError as e:
+ raise ConnectionError(e, request=request)
+
+ except _ProxyError as e:
+ raise ProxyError(e)
+
+ except (_SSLError, _HTTPError) as e:
+ if isinstance(e, _SSLError):
+ raise SSLError(e, request=request)
+ elif isinstance(e, ReadTimeoutError):
+ raise ReadTimeout(e, request=request)
+ else:
+ raise
+
+ return self.build_response(request, resp)
diff --git a/third_party/python/requests/requests/api.py b/third_party/python/requests/requests/api.py
new file mode 100644
index 0000000000..b21a1a4fa7
--- /dev/null
+++ b/third_party/python/requests/requests/api.py
@@ -0,0 +1,145 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.api
+~~~~~~~~~~~~
+
+This module implements the Requests API.
+
+:copyright: (c) 2012 by Kenneth Reitz.
+:license: Apache2, see LICENSE for more details.
+
+"""
+
+from . import sessions
+
+
+def request(method, url, **kwargs):
+ """Constructs and sends a :class:`Request <Request>`.
+
+ :param method: method for the new :class:`Request` object.
+ :param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
+ :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
+ :param json: (optional) json data to send in the body of the :class:`Request`.
+ :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
+ :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
+ :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': ('filename', fileobj)}``) for multipart encoding upload.
+ :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
+ :param timeout: (optional) How long to wait for the server to send data
+ before giving up, as a float, or a :ref:`(connect timeout, read
+ timeout) <timeouts>` tuple.
+ :type timeout: float or tuple
+ :param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
+ :type allow_redirects: bool
+ :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
+ :param verify: (optional) whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to ``True``.
+ :param stream: (optional) if ``False``, the response content will be immediately downloaded.
+ :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+
+ Usage::
+
+ >>> import requests
+ >>> req = requests.request('GET', 'http://httpbin.org/get')
+ <Response [200]>
+ """
+
+ # By using the 'with' statement we are sure the session is closed, thus we
+ # avoid leaving sockets open which can trigger a ResourceWarning in some
+ # cases, and look like a memory leak in others.
+ with sessions.Session() as session:
+ return session.request(method=method, url=url, **kwargs)
+
+
+def get(url, params=None, **kwargs):
+ """Sends a GET request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ kwargs.setdefault('allow_redirects', True)
+ return request('get', url, params=params, **kwargs)
+
+
+def options(url, **kwargs):
+ """Sends a OPTIONS request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ kwargs.setdefault('allow_redirects', True)
+ return request('options', url, **kwargs)
+
+
+def head(url, **kwargs):
+ """Sends a HEAD request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ kwargs.setdefault('allow_redirects', False)
+ return request('head', url, **kwargs)
+
+
+def post(url, data=None, json=None, **kwargs):
+ """Sends a POST request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
+ :param json: (optional) json data to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request('post', url, data=data, json=json, **kwargs)
+
+
+def put(url, data=None, **kwargs):
+ """Sends a PUT request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request('put', url, data=data, **kwargs)
+
+
+def patch(url, data=None, **kwargs):
+ """Sends a PATCH request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request('patch', url, data=data, **kwargs)
+
+
+def delete(url, **kwargs):
+ """Sends a DELETE request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request('delete', url, **kwargs)
diff --git a/third_party/python/requests/requests/auth.py b/third_party/python/requests/requests/auth.py
new file mode 100644
index 0000000000..2af55fb5e6
--- /dev/null
+++ b/third_party/python/requests/requests/auth.py
@@ -0,0 +1,223 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.auth
+~~~~~~~~~~~~~
+
+This module contains the authentication handlers for Requests.
+"""
+
+import os
+import re
+import time
+import hashlib
+import threading
+
+from base64 import b64encode
+
+from .compat import urlparse, str
+from .cookies import extract_cookies_to_jar
+from .utils import parse_dict_header, to_native_string
+from .status_codes import codes
+
+CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
+CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
+
+
+def _basic_auth_str(username, password):
+ """Returns a Basic Auth string."""
+
+ authstr = 'Basic ' + to_native_string(
+ b64encode(('%s:%s' % (username, password)).encode('latin1')).strip()
+ )
+
+ return authstr
+
+
+class AuthBase(object):
+ """Base class that all auth implementations derive from"""
+
+ def __call__(self, r):
+ raise NotImplementedError('Auth hooks must be callable.')
+
+
+class HTTPBasicAuth(AuthBase):
+ """Attaches HTTP Basic Authentication to the given Request object."""
+ def __init__(self, username, password):
+ self.username = username
+ self.password = password
+
+ def __call__(self, r):
+ r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
+ return r
+
+
+class HTTPProxyAuth(HTTPBasicAuth):
+ """Attaches HTTP Proxy Authentication to a given Request object."""
+ def __call__(self, r):
+ r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
+ return r
+
+
+class HTTPDigestAuth(AuthBase):
+ """Attaches HTTP Digest Authentication to the given Request object."""
+ def __init__(self, username, password):
+ self.username = username
+ self.password = password
+ # Keep state in per-thread local storage
+ self._thread_local = threading.local()
+
+ def init_per_thread_state(self):
+ # Ensure state is initialized just once per-thread
+ if not hasattr(self._thread_local, 'init'):
+ self._thread_local.init = True
+ self._thread_local.last_nonce = ''
+ self._thread_local.nonce_count = 0
+ self._thread_local.chal = {}
+ self._thread_local.pos = None
+ self._thread_local.num_401_calls = None
+
+ def build_digest_header(self, method, url):
+
+ realm = self._thread_local.chal['realm']
+ nonce = self._thread_local.chal['nonce']
+ qop = self._thread_local.chal.get('qop')
+ algorithm = self._thread_local.chal.get('algorithm')
+ opaque = self._thread_local.chal.get('opaque')
+
+ if algorithm is None:
+ _algorithm = 'MD5'
+ else:
+ _algorithm = algorithm.upper()
+ # lambdas assume digest modules are imported at the top level
+ if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
+ def md5_utf8(x):
+ if isinstance(x, str):
+ x = x.encode('utf-8')
+ return hashlib.md5(x).hexdigest()
+ hash_utf8 = md5_utf8
+ elif _algorithm == 'SHA':
+ def sha_utf8(x):
+ if isinstance(x, str):
+ x = x.encode('utf-8')
+ return hashlib.sha1(x).hexdigest()
+ hash_utf8 = sha_utf8
+
+ KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
+
+ if hash_utf8 is None:
+ return None
+
+ # XXX not implemented yet
+ entdig = None
+ p_parsed = urlparse(url)
+ #: path is request-uri defined in RFC 2616 which should not be empty
+ path = p_parsed.path or "/"
+ if p_parsed.query:
+ path += '?' + p_parsed.query
+
+ A1 = '%s:%s:%s' % (self.username, realm, self.password)
+ A2 = '%s:%s' % (method, path)
+
+ HA1 = hash_utf8(A1)
+ HA2 = hash_utf8(A2)
+
+ if nonce == self._thread_local.last_nonce:
+ self._thread_local.nonce_count += 1
+ else:
+ self._thread_local.nonce_count = 1
+ ncvalue = '%08x' % self._thread_local.nonce_count
+ s = str(self._thread_local.nonce_count).encode('utf-8')
+ s += nonce.encode('utf-8')
+ s += time.ctime().encode('utf-8')
+ s += os.urandom(8)
+
+ cnonce = (hashlib.sha1(s).hexdigest()[:16])
+ if _algorithm == 'MD5-SESS':
+ HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
+
+ if not qop:
+ respdig = KD(HA1, "%s:%s" % (nonce, HA2))
+ elif qop == 'auth' or 'auth' in qop.split(','):
+ noncebit = "%s:%s:%s:%s:%s" % (
+ nonce, ncvalue, cnonce, 'auth', HA2
+ )
+ respdig = KD(HA1, noncebit)
+ else:
+ # XXX handle auth-int.
+ return None
+
+ self._thread_local.last_nonce = nonce
+
+ # XXX should the partial digests be encoded too?
+ base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
+ 'response="%s"' % (self.username, realm, nonce, path, respdig)
+ if opaque:
+ base += ', opaque="%s"' % opaque
+ if algorithm:
+ base += ', algorithm="%s"' % algorithm
+ if entdig:
+ base += ', digest="%s"' % entdig
+ if qop:
+ base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
+
+ return 'Digest %s' % (base)
+
+ def handle_redirect(self, r, **kwargs):
+ """Reset num_401_calls counter on redirects."""
+ if r.is_redirect:
+ self._thread_local.num_401_calls = 1
+
+ def handle_401(self, r, **kwargs):
+ """Takes the given response and tries digest-auth, if needed."""
+
+ if self._thread_local.pos is not None:
+ # Rewind the file position indicator of the body to where
+ # it was to resend the request.
+ r.request.body.seek(self._thread_local.pos)
+ s_auth = r.headers.get('www-authenticate', '')
+
+ if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:
+
+ self._thread_local.num_401_calls += 1
+ pat = re.compile(r'digest ', flags=re.IGNORECASE)
+ self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))
+
+ # Consume content and release the original connection
+ # to allow our new request to reuse the same one.
+ r.content
+ r.close()
+ prep = r.request.copy()
+ extract_cookies_to_jar(prep._cookies, r.request, r.raw)
+ prep.prepare_cookies(prep._cookies)
+
+ prep.headers['Authorization'] = self.build_digest_header(
+ prep.method, prep.url)
+ _r = r.connection.send(prep, **kwargs)
+ _r.history.append(r)
+ _r.request = prep
+
+ return _r
+
+ self._thread_local.num_401_calls = 1
+ return r
+
+ def __call__(self, r):
+ # Initialize per-thread state, if needed
+ self.init_per_thread_state()
+ # If we have a saved nonce, skip the 401
+ if self._thread_local.last_nonce:
+ r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
+ try:
+ self._thread_local.pos = r.body.tell()
+ except AttributeError:
+ # In the case of HTTPDigestAuth being reused and the body of
+ # the previous request was a file-like object, pos has the
+ # file position of the previous body. Ensure it's set to
+ # None.
+ self._thread_local.pos = None
+ r.register_hook('response', self.handle_401)
+ r.register_hook('response', self.handle_redirect)
+ self._thread_local.num_401_calls = 1
+
+ return r
diff --git a/third_party/python/requests/requests/cacert.pem b/third_party/python/requests/requests/cacert.pem
new file mode 100644
index 0000000000..6a66daa997
--- /dev/null
+++ b/third_party/python/requests/requests/cacert.pem
@@ -0,0 +1,5616 @@
+
+# Issuer: O=Equifax OU=Equifax Secure Certificate Authority
+# Subject: O=Equifax OU=Equifax Secure Certificate Authority
+# Label: "Equifax Secure CA"
+# Serial: 903804111
+# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4
+# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a
+# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
+UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy
+dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1
+MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx
+dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B
+AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f
+BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A
+cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC
+AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ
+MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm
+aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw
+ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj
+IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF
+MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
+A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y
+7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh
+1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Label: "GlobalSign Root CA"
+# Serial: 4835703278459707669005204
+# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
+# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
+# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Label: "GlobalSign Root CA - R2"
+# Serial: 4835703278459682885658125
+# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
+# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
+# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
+-----BEGIN CERTIFICATE-----
+MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
+MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
+v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
+eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
+tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
+C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
+zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
+mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
+V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
+bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
+3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
+J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
+291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
+ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
+AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
+TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
+# Serial: 206684696279472310254277870180966723415
+# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
+# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
+# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
+N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
+KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
+kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
+CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
+Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
+imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
+2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
+DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
+/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
+F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
+TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 4 Public Primary Certification Authority - G3"
+# Serial: 314531972711909413743075096039378935511
+# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df
+# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d
+# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1
+GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ
++mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd
+U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm
+NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY
+ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/
+ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1
+CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq
+g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm
+fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c
+2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/
+bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Premium 2048 Secure Server CA"
+# Serial: 946069240
+# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90
+# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31
+# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
+MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
+j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
+U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
+zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
+u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
+bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
+fF6adulZkMV8gzURZVE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Label: "Baltimore CyberTrust Root"
+# Serial: 33554617
+# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
+# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
+# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
+RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
+VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
+DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
+ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
+VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
+mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
+IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
+mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
+XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
+dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
+jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
+BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
+DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
+9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
+jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
+Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
+ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
+R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Low-Value Services Root"
+# Serial: 1
+# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc
+# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d
+# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7
+-----BEGIN CERTIFICATE-----
+MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw
+MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML
+QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD
+VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA
+A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul
+CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n
+tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl
+dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch
+PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC
++Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O
+BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl
+MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk
+ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB
+IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X
+7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz
+43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY
+eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl
+pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA
+WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Label: "AddTrust External Root"
+# Serial: 1
+# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
+# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
+# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
+-----BEGIN CERTIFICATE-----
+MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
+IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
+MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
+FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
+bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
+dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
+H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
+uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
+mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
+a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
+E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
+WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
+VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
+Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
+cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
+IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
+AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
+YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
+6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
+Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
+c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
+mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Public Services Root"
+# Serial: 1
+# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f
+# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5
+# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx
+MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB
+ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV
+BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC
+AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV
+6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX
+GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP
+dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH
+1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF
+62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW
+BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw
+AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL
+MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU
+cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv
+b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6
+IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/
+iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao
+GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh
+4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm
+XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Qualified Certificates Root"
+# Serial: 1
+# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb
+# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf
+# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16
+-----BEGIN CERTIFICATE-----
+MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1
+MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK
+EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh
+BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq
+xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G
+87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i
+2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U
+WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1
+0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G
+A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr
+pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL
+ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm
+aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv
+hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm
+hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X
+dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3
+P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y
+iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no
+xqE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Label: "Entrust Root Certification Authority"
+# Serial: 1164660820
+# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
+# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
+# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
+-----BEGIN CERTIFICATE-----
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
+-----END CERTIFICATE-----
+
+# Issuer: O=RSA Security Inc OU=RSA Security 2048 V3
+# Subject: O=RSA Security Inc OU=RSA Security 2048 V3
+# Label: "RSA Security 2048 v3"
+# Serial: 13297492616345471454730593562152402946
+# MD5 Fingerprint: 77:0d:19:b1:21:fd:00:42:9c:3e:0c:a5:dd:0b:02:8e
+# SHA1 Fingerprint: 25:01:90:19:cf:fb:d9:99:1c:b7:68:25:74:8d:94:5f:30:93:95:42
+# SHA256 Fingerprint: af:8b:67:62:a1:e5:28:22:81:61:a9:5d:5c:55:9e:e2:66:27:8f:75:d7:9e:83:01:89:a5:03:50:6a:bd:6b:4c
+-----BEGIN CERTIFICATE-----
+MIIDYTCCAkmgAwIBAgIQCgEBAQAAAnwAAAAKAAAAAjANBgkqhkiG9w0BAQUFADA6
+MRkwFwYDVQQKExBSU0EgU2VjdXJpdHkgSW5jMR0wGwYDVQQLExRSU0EgU2VjdXJp
+dHkgMjA0OCBWMzAeFw0wMTAyMjIyMDM5MjNaFw0yNjAyMjIyMDM5MjNaMDoxGTAX
+BgNVBAoTEFJTQSBTZWN1cml0eSBJbmMxHTAbBgNVBAsTFFJTQSBTZWN1cml0eSAy
+MDQ4IFYzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAt49VcdKA3Xtp
+eafwGFAyPGJn9gqVB93mG/Oe2dJBVGutn3y+Gc37RqtBaB4Y6lXIL5F4iSj7Jylg
+/9+PjDvJSZu1pJTOAeo+tWN7fyb9Gd3AIb2E0S1PRsNO3Ng3OTsor8udGuorryGl
+wSMiuLgbWhOHV4PR8CDn6E8jQrAApX2J6elhc5SYcSa8LWrg903w8bYqODGBDSnh
+AMFRD0xS+ARaqn1y07iHKrtjEAMqs6FPDVpeRrc9DvV07Jmf+T0kgYim3WBU6JU2
+PcYJk5qjEoAAVZkZR73QpXzDuvsf9/UP+Ky5tfQ3mBMY3oVbtwyCO4dvlTlYMNpu
+AWgXIszACwIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAfBgNVHSMEGDAWgBQHw1EwpKrpRa41JPr/JCwz0LGdjDAdBgNVHQ4EFgQUB8NR
+MKSq6UWuNST6/yQsM9CxnYwwDQYJKoZIhvcNAQEFBQADggEBAF8+hnZuuDU8TjYc
+HnmYv/3VEhF5Ug7uMYm83X/50cYVIeiKAVQNOvtUudZj1LGqlk2iQk3UUx+LEN5/
+Zb5gEydxiKRz44Rj0aRV4VCT5hsOedBnvEbIvz8XDZXmxpBp3ue0L96VfdASPz0+
+f00/FGj1EVDVwfSQpQgdMWD/YIwjVAqv/qFuxdF6Kmh4zx6CCiC0H63lhbJqaHVO
+rSU3lIW+vaHU6rcMSzyd6BIA8F+sDeGscGNz9395nzIlQnQFgCi/vcEkllgVsRch
+6YlL2weIZ/QVrXA+L02FO8K32/6YaCOJ4XQP3vTFhGMpG8zLB8kApKnXwiJPZ9d3
+7CAFYd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Label: "GeoTrust Global CA"
+# Serial: 144470
+# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
+# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
+# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
+-----BEGIN CERTIFICATE-----
+MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
+YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
+R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
+9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
+fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
+iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
+1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
+MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
+ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
+uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
+Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
+tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
+PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
+hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
+5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Global CA 2"
+# Serial: 1
+# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9
+# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d
+# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85
+-----BEGIN CERTIFICATE-----
+MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs
+IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg
+R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A
+PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8
+Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL
+TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL
+5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7
+S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe
+2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE
+FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap
+EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td
+EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv
+/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN
+A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0
+abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF
+I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz
+4iIprn2DQKi6bA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA"
+# Serial: 1
+# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
+# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
+# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
+-----BEGIN CERTIFICATE-----
+MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
+BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
+IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
+VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
+cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
+QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
+F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
+c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
+mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
+VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
+teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
+f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
+Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
+nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
+/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
+MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
+9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
+aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
+IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
+ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
+uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
+Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
+QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
+koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
+ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
+DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
+bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA 2"
+# Serial: 1
+# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
+# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
+# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
+-----BEGIN CERTIFICATE-----
+MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
+VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
+c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
+WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
+FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
+XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
+se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
+KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
+IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
+y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
+hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
+QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
+Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
+HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
+KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
+dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
+L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
+Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
+ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
+T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
+GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
+1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
+OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
+6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
+QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
+-----END CERTIFICATE-----
+
+# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
+# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
+# Label: "Visa eCommerce Root"
+# Serial: 25952180776285836048024890241505565794
+# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02
+# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62
+# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22
+-----BEGIN CERTIFICATE-----
+MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr
+MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl
+cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv
+bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw
+CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h
+dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l
+cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h
+2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E
+lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV
+ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq
+299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t
+vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL
+dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF
+AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR
+zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3
+LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd
+7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw
+++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt
+398znM/jra6O1I7mT1GvFpLgXPYHDw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum CA O=Unizeto Sp. z o.o.
+# Subject: CN=Certum CA O=Unizeto Sp. z o.o.
+# Label: "Certum Root CA"
+# Serial: 65568
+# MD5 Fingerprint: 2c:8f:9f:66:1d:18:90:b1:47:26:9d:8e:86:82:8c:a9
+# SHA1 Fingerprint: 62:52:dc:40:f7:11:43:a2:2f:de:9e:f7:34:8e:06:42:51:b1:81:18
+# SHA256 Fingerprint: d8:e0:fe:bc:1d:b2:e3:8d:00:94:0f:37:d2:7d:41:34:4d:99:3e:73:4b:99:d5:65:6d:97:78:d4:d8:14:36:24
+-----BEGIN CERTIFICATE-----
+MIIDDDCCAfSgAwIBAgIDAQAgMA0GCSqGSIb3DQEBBQUAMD4xCzAJBgNVBAYTAlBM
+MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD
+QTAeFw0wMjA2MTExMDQ2MzlaFw0yNzA2MTExMDQ2MzlaMD4xCzAJBgNVBAYTAlBM
+MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD
+QTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM6xwS7TT3zNJc4YPk/E
+jG+AanPIW1H4m9LcuwBcsaD8dQPugfCI7iNS6eYVM42sLQnFdvkrOYCJ5JdLkKWo
+ePhzQ3ukYbDYWMzhbGZ+nPMJXlVjhNWo7/OxLjBos8Q82KxujZlakE403Daaj4GI
+ULdtlkIJ89eVgw1BS7Bqa/j8D35in2fE7SZfECYPCE/wpFcozo+47UX2bu4lXapu
+Ob7kky/ZR6By6/qmW6/KUz/iDsaWVhFu9+lmqSbYf5VT7QqFiLpPKaVCjF62/IUg
+AKpoC6EahQGcxEZjgoi2IrHu/qpGWX7PNSzVttpd90gzFFS269lvzs2I1qsb2pY7
+HVkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEA
+uI3O7+cUus/usESSbLQ5PqKEbq24IXfS1HeCh+YgQYHu4vgRt2PRFze+GXYkHAQa
+TOs9qmdvLdTN/mUxcMUbpgIKumB7bVjCmkn+YzILa+M6wKyrO7Do0wlRjBCDxjTg
+xSvgGrZgFCdsMneMvLJymM/NzD+5yCRCFNZX/OYmQ6kd5YCQzgNUKD73P9P4Te1q
+CjqTE5s7FCMTY5w/0YcneeVMUeMBrYVdGjux1XMQpNPyvG5k9VpWkKjHDkx0Dy5x
+O/fIR/RpbxXyEV6DHpx8Uq79AtoSqFlnGNu8cN2bsWntgM6JQEhqDjXKKWYVIZQs
+6GAqm4VKQPNriiTsBhYscw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
+# Subject: CN=AAA Certificate Services O=Comodo CA Limited
+# Label: "Comodo AAA Services root"
+# Serial: 1
+# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
+# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
+# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
+-----BEGIN CERTIFICATE-----
+MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
+YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
+GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
+BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
+3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
+YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
+rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
+ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
+oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
+MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
+QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
+b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
+AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
+GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
+Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
+G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
+l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
+smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Certificate Services O=Comodo CA Limited
+# Subject: CN=Secure Certificate Services O=Comodo CA Limited
+# Label: "Comodo Secure Services root"
+# Serial: 1
+# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd
+# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1
+# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8
+-----BEGIN CERTIFICATE-----
+MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp
+ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow
+fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV
+BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM
+cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S
+HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996
+CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk
+3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz
+6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV
+HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud
+EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv
+Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw
+Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww
+DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0
+5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj
+Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI
+gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ
+aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl
+izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited
+# Subject: CN=Trusted Certificate Services O=Comodo CA Limited
+# Label: "Comodo Trusted Services root"
+# Serial: 1
+# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27
+# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd
+# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0
+aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla
+MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO
+BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD
+VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW
+fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt
+TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL
+fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW
+1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7
+kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G
+A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v
+ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo
+dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu
+Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/
+HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32
+pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS
+jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+
+xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn
+dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Label: "QuoVadis Root CA"
+# Serial: 985026699
+# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24
+# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9
+# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73
+-----BEGIN CERTIFICATE-----
+MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz
+MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw
+IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR
+dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp
+li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D
+rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ
+WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug
+F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU
+xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC
+Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv
+dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw
+ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl
+IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh
+c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy
+ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh
+Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI
+KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T
+KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq
+y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p
+dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD
+VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL
+MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk
+fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8
+7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R
+cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y
+mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW
+xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK
+SnQ2+Q==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2"
+# Serial: 1289
+# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b
+# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7
+# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86
+-----BEGIN CERTIFICATE-----
+MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
+GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
+Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
+WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
+rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
+ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
+Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
+PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
+/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
+oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
+yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
+EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
+A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
+MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
+ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
+BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
+g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
+fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
+WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
+B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
+hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
+TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
+mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
+ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
+4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
+8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3"
+# Serial: 1478
+# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf
+# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85
+# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35
+-----BEGIN CERTIFICATE-----
+MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
+V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
+4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
+H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
+8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
+vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
+mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
+btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
+T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
+WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
+c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
+4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
+VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
+CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
+aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
+aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
+dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
+czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
+A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
+Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
+7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
+d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
+4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
+t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
+DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
+k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
+zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
+Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
+mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
+4SVhM7JZG+Ju1zdXtg2pEto=
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
+# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
+# Label: "Security Communication Root CA"
+# Serial: 0
+# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
+# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
+# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
+-----BEGIN CERTIFICATE-----
+MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
+MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
+dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
+WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
+VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
+9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
+DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
+Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
+QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
+xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
+A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
+kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
+Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
+Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
+JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
+RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Sonera Class2 CA O=Sonera
+# Subject: CN=Sonera Class2 CA O=Sonera
+# Label: "Sonera Class 2 Root CA"
+# Serial: 29
+# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb
+# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27
+# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP
+MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx
+MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV
+BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o
+Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt
+5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s
+3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej
+vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu
+8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw
+DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG
+MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil
+zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/
+3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD
+FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6
+Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2
+ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA"
+# Serial: 10000010
+# MD5 Fingerprint: 60:84:7c:5a:ce:db:0c:d4:cb:a7:e9:fe:02:c6:a9:c0
+# SHA1 Fingerprint: 10:1d:fa:3f:d5:0b:cb:bb:9b:b5:60:0c:19:55:a4:1a:f4:73:3a:04
+# SHA256 Fingerprint: d4:1d:82:9e:8c:16:59:82:2a:f9:3f:ce:62:bf:fc:de:26:4f:c8:4e:8b:95:0c:5f:f2:75:d0:52:35:46:95:a3
+-----BEGIN CERTIFICATE-----
+MIIDujCCAqKgAwIBAgIEAJiWijANBgkqhkiG9w0BAQUFADBVMQswCQYDVQQGEwJO
+TDEeMBwGA1UEChMVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSYwJAYDVQQDEx1TdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQTAeFw0wMjEyMTcwOTIzNDlaFw0xNTEy
+MTYwOTE1MzhaMFUxCzAJBgNVBAYTAk5MMR4wHAYDVQQKExVTdGFhdCBkZXIgTmVk
+ZXJsYW5kZW4xJjAkBgNVBAMTHVN0YWF0IGRlciBOZWRlcmxhbmRlbiBSb290IENB
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmNK1URF6gaYUmHFtvszn
+ExvWJw56s2oYHLZhWtVhCb/ekBPHZ+7d89rFDBKeNVU+LCeIQGv33N0iYfXCxw71
+9tV2U02PjLwYdjeFnejKScfST5gTCaI+Ioicf9byEGW07l8Y1Rfj+MX94p2i71MO
+hXeiD+EwR+4A5zN9RGcaC1Hoi6CeUJhoNFIfLm0B8mBF8jHrqTFoKbt6QZ7GGX+U
+tFE5A3+y3qcym7RHjm+0Sq7lr7HcsBthvJly3uSJt3omXdozSVtSnA71iq3DuD3o
+BmrC1SoLbHuEvVYFy4ZlkuxEK7COudxwC0barbxjiDn622r+I/q85Ej0ZytqERAh
+SQIDAQABo4GRMIGOMAwGA1UdEwQFMAMBAf8wTwYDVR0gBEgwRjBEBgRVHSAAMDww
+OgYIKwYBBQUHAgEWLmh0dHA6Ly93d3cucGtpb3ZlcmhlaWQubmwvcG9saWNpZXMv
+cm9vdC1wb2xpY3kwDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSofeu8Y6R0E3QA
+7Jbg0zTBLL9s+DANBgkqhkiG9w0BAQUFAAOCAQEABYSHVXQ2YcG70dTGFagTtJ+k
+/rvuFbQvBgwp8qiSpGEN/KtcCFtREytNwiphyPgJWPwtArI5fZlmgb9uXJVFIGzm
+eafR2Bwp/MIgJ1HI8XxdNGdphREwxgDS1/PTfLbwMVcoEoJz6TMvplW0C5GUR5z6
+u3pCMuiufi3IvKwUv9kP2Vv8wfl6leF9fpb8cbDCTMjfRTTJzg3ynGQI0DvDKcWy
+7ZAEwbEpkcUwb8GpcjPM/l0WFywRaed+/sWDCN+83CI6LiBpIzlWYGeQiy52OfsR
+iJf2fL1LuCAWZwWN4jvBcj+UlTfHXbme2JOhF4//DGYVwSR8MnwDHTuhWEUykw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
+# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
+# Label: "UTN DATACorp SGC Root CA"
+# Serial: 91374294542884689855167577680241077609
+# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06
+# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4
+# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48
+-----BEGIN CERTIFICATE-----
+MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB
+kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
+Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
+dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw
+IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG
+EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD
+VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu
+dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6
+E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ
+D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK
+4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq
+lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW
+bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB
+o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT
+MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js
+LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr
+BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB
+AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft
+Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj
+j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH
+KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv
+2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3
+mfnGV/TJVTl4uix5yaaIK/QI
+-----END CERTIFICATE-----
+
+# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
+# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
+# Label: "UTN USERFirst Hardware Root CA"
+# Serial: 91374294542884704022267039221184531197
+# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39
+# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7
+# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37
+-----BEGIN CERTIFICATE-----
+MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB
+lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
+Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
+dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt
+SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG
+A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe
+MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v
+d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh
+cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn
+0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ
+M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a
+MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd
+oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI
+DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy
+oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD
+VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0
+dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy
+bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF
+BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM
+//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli
+CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE
+CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t
+3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS
+KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
+# Subject: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
+# Label: "Camerfirma Chambers of Commerce Root"
+# Serial: 0
+# MD5 Fingerprint: b0:01:ee:14:d9:af:29:18:94:76:8e:f1:69:33:2a:84
+# SHA1 Fingerprint: 6e:3a:55:a4:19:0c:19:5c:93:84:3c:c0:db:72:2e:31:30:61:f0:b1
+# SHA256 Fingerprint: 0c:25:8a:12:a5:67:4a:ef:25:f2:8b:a7:dc:fa:ec:ee:a3:48:e5:41:e6:f5:cc:4e:e6:3b:71:b3:61:60:6a:c3
+-----BEGIN CERTIFICATE-----
+MIIEvTCCA6WgAwIBAgIBADANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJFVTEn
+MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL
+ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEiMCAGA1UEAxMZQ2hhbWJlcnMg
+b2YgQ29tbWVyY2UgUm9vdDAeFw0wMzA5MzAxNjEzNDNaFw0zNzA5MzAxNjEzNDRa
+MH8xCzAJBgNVBAYTAkVVMScwJQYDVQQKEx5BQyBDYW1lcmZpcm1hIFNBIENJRiBB
+ODI3NDMyODcxIzAhBgNVBAsTGmh0dHA6Ly93d3cuY2hhbWJlcnNpZ24ub3JnMSIw
+IAYDVQQDExlDaGFtYmVycyBvZiBDb21tZXJjZSBSb290MIIBIDANBgkqhkiG9w0B
+AQEFAAOCAQ0AMIIBCAKCAQEAtzZV5aVdGDDg2olUkfzIx1L4L1DZ77F1c2VHfRtb
+unXF/KGIJPov7coISjlUxFF6tdpg6jg8gbLL8bvZkSM/SAFwdakFKq0fcfPJVD0d
+BmpAPrMMhe5cG3nCYsS4No41XQEMIwRHNaqbYE6gZj3LJgqcQKH0XZi/caulAGgq
+7YN6D6IUtdQis4CwPAxaUWktWBiP7Zme8a7ileb2R6jWDA+wWFjbw2Y3npuRVDM3
+0pQcakjJyfKl2qUMI/cjDpwyVV5xnIQFUZot/eZOKjRa3spAN2cMVCFVd9oKDMyX
+roDclDZK9D7ONhMeU+SsTjoF7Nuucpw4i9A5O4kKPnf+dQIBA6OCAUQwggFAMBIG
+A1UdEwEB/wQIMAYBAf8CAQwwPAYDVR0fBDUwMzAxoC+gLYYraHR0cDovL2NybC5j
+aGFtYmVyc2lnbi5vcmcvY2hhbWJlcnNyb290LmNybDAdBgNVHQ4EFgQU45T1sU3p
+26EpW1eLTXYGduHRooowDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIA
+BzAnBgNVHREEIDAegRxjaGFtYmVyc3Jvb3RAY2hhbWJlcnNpZ24ub3JnMCcGA1Ud
+EgQgMB6BHGNoYW1iZXJzcm9vdEBjaGFtYmVyc2lnbi5vcmcwWAYDVR0gBFEwTzBN
+BgsrBgEEAYGHLgoDATA+MDwGCCsGAQUFBwIBFjBodHRwOi8vY3BzLmNoYW1iZXJz
+aWduLm9yZy9jcHMvY2hhbWJlcnNyb290Lmh0bWwwDQYJKoZIhvcNAQEFBQADggEB
+AAxBl8IahsAifJ/7kPMa0QOx7xP5IV8EnNrJpY0nbJaHkb5BkAFyk+cefV/2icZd
+p0AJPaxJRUXcLo0waLIJuvvDL8y6C98/d3tGfToSJI6WjzwFCm/SlCgdbQzALogi
+1djPHRPH8EjX1wWnz8dHnjs8NMiAT9QUu/wNUPf6s+xCX6ndbcj0dc97wXImsQEc
+XCz9ek60AcUFV7nnPKoF2YjpB0ZBzu9Bga5Y34OirsrXdx/nADydb47kMgkdTXg0
+eDQ8lJsm7U9xxhl6vSAiSFr+S30Dt+dYvsYyTnQeaN2oaFuzPu5ifdmA6Ap1erfu
+tGWaIZDgqtCYvDi1czyL+Nw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
+# Subject: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
+# Label: "Camerfirma Global Chambersign Root"
+# Serial: 0
+# MD5 Fingerprint: c5:e6:7b:bf:06:d0:4f:43:ed:c4:7a:65:8a:fb:6b:19
+# SHA1 Fingerprint: 33:9b:6b:14:50:24:9b:55:7a:01:87:72:84:d9:e0:2f:c3:d2:d8:e9
+# SHA256 Fingerprint: ef:3c:b4:17:fc:8e:bf:6f:97:87:6c:9e:4e:ce:39:de:1e:a5:fe:64:91:41:d1:02:8b:7d:11:c0:b2:29:8c:ed
+-----BEGIN CERTIFICATE-----
+MIIExTCCA62gAwIBAgIBADANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJFVTEn
+MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL
+ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4GA1UEAxMXR2xvYmFsIENo
+YW1iZXJzaWduIFJvb3QwHhcNMDMwOTMwMTYxNDE4WhcNMzcwOTMwMTYxNDE4WjB9
+MQswCQYDVQQGEwJFVTEnMCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgy
+NzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4G
+A1UEAxMXR2xvYmFsIENoYW1iZXJzaWduIFJvb3QwggEgMA0GCSqGSIb3DQEBAQUA
+A4IBDQAwggEIAoIBAQCicKLQn0KuWxfH2H3PFIP8T8mhtxOviteePgQKkotgVvq0
+Mi+ITaFgCPS3CU6gSS9J1tPfnZdan5QEcOw/Wdm3zGaLmFIoCQLfxS+EjXqXd7/s
+QJ0lcqu1PzKY+7e3/HKE5TWH+VX6ox8Oby4o3Wmg2UIQxvi1RMLQQ3/bvOSiPGpV
+eAp3qdjqGTK3L/5cPxvusZjsyq16aUXjlg9V9ubtdepl6DJWk0aJqCWKZQbua795
+B9Dxt6/tLE2Su8CoX6dnfQTyFQhwrJLWfQTSM/tMtgsL+xrJxI0DqX5c8lCrEqWh
+z0hQpe/SyBoT+rB/sYIcd2oPX9wLlY/vQ37mRQklAgEDo4IBUDCCAUwwEgYDVR0T
+AQH/BAgwBgEB/wIBDDA/BgNVHR8EODA2MDSgMqAwhi5odHRwOi8vY3JsLmNoYW1i
+ZXJzaWduLm9yZy9jaGFtYmVyc2lnbnJvb3QuY3JsMB0GA1UdDgQWBBRDnDafsJ4w
+TcbOX60Qq+UDpfqpFDAOBgNVHQ8BAf8EBAMCAQYwEQYJYIZIAYb4QgEBBAQDAgAH
+MCoGA1UdEQQjMCGBH2NoYW1iZXJzaWducm9vdEBjaGFtYmVyc2lnbi5vcmcwKgYD
+VR0SBCMwIYEfY2hhbWJlcnNpZ25yb290QGNoYW1iZXJzaWduLm9yZzBbBgNVHSAE
+VDBSMFAGCysGAQQBgYcuCgEBMEEwPwYIKwYBBQUHAgEWM2h0dHA6Ly9jcHMuY2hh
+bWJlcnNpZ24ub3JnL2Nwcy9jaGFtYmVyc2lnbnJvb3QuaHRtbDANBgkqhkiG9w0B
+AQUFAAOCAQEAPDtwkfkEVCeR4e3t/mh/YV3lQWVPMvEYBZRqHN4fcNs+ezICNLUM
+bKGKfKX0j//U2K0X1S0E0T9YgOKBWYi+wONGkyT+kL0mojAt6JcmVzWJdJYY9hXi
+ryQZVgICsroPFOrGimbBhkVVi76SvpykBMdJPJ7oKXqJ1/6v/2j1pReQvayZzKWG
+VwlnRtvWFsJG8eSpUPWP0ZIV018+xgBJOm5YstHRJw0lyDL4IBHNfTIzSJRUTN3c
+ecQwn+uOuFW114hcxWokPbLTBQNRxgfvzBRydD1ucs4YKIxKoHflCStFREest2d/
+AYoFWpO+ocH/+OcOZ6RHSXZddZAa9SaP8A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Kozjegyzoi (Class A) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
+# Subject: CN=NetLock Kozjegyzoi (Class A) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
+# Label: "NetLock Notary (Class A) Root"
+# Serial: 259
+# MD5 Fingerprint: 86:38:6d:5e:49:63:6c:85:5c:db:6d:dc:94:b7:d0:f7
+# SHA1 Fingerprint: ac:ed:5f:65:53:fd:25:ce:01:5f:1f:7a:48:3b:6a:74:9f:61:78:c6
+# SHA256 Fingerprint: 7f:12:cd:5f:7e:5e:29:0e:c7:d8:51:79:d5:b7:2c:20:a5:be:75:08:ff:db:5b:f8:1a:b9:68:4a:7f:c9:f6:67
+-----BEGIN CERTIFICATE-----
+MIIGfTCCBWWgAwIBAgICAQMwDQYJKoZIhvcNAQEEBQAwga8xCzAJBgNVBAYTAkhV
+MRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMe
+TmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0
+dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBLb3pqZWd5em9pIChDbGFzcyBB
+KSBUYW51c2l0dmFueWtpYWRvMB4XDTk5MDIyNDIzMTQ0N1oXDTE5MDIxOTIzMTQ0
+N1owga8xCzAJBgNVBAYTAkhVMRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhC
+dWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQu
+MRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBL
+b3pqZWd5em9pIChDbGFzcyBBKSBUYW51c2l0dmFueWtpYWRvMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvHSMD7tM9DceqQWC2ObhbHDqeLVu0ThEDaiD
+zl3S1tWBxdRL51uUcCbbO51qTGL3cfNk1mE7PetzozfZz+qMkjvN9wfcZnSX9EUi
+3fRc4L9t875lM+QVOr/bmJBVOMTtplVjC7B4BPTjbsE/jvxReB+SnoPC/tmwqcm8
+WgD/qaiYdPv2LD4VOQ22BFWoDpggQrOxJa1+mm9dU7GrDPzr4PN6s6iz/0b2Y6LY
+Oph7tqyF/7AlT3Rj5xMHpQqPBffAZG9+pyeAlt7ULoZgx2srXnN7F+eRP2QM2Esi
+NCubMvJIH5+hCoR64sKtlz2O1cH5VqNQ6ca0+pii7pXmKgOM3wIDAQABo4ICnzCC
+ApswDgYDVR0PAQH/BAQDAgAGMBIGA1UdEwEB/wQIMAYBAf8CAQQwEQYJYIZIAYb4
+QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1GSUdZRUxFTSEgRXplbiB0
+YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFub3MgU3pvbGdhbHRhdGFz
+aSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBhbGFwamFuIGtlc3p1bHQu
+IEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExvY2sgS2Z0LiB0ZXJtZWtm
+ZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGlnaXRhbGlzIGFsYWlyYXMg
+ZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0IGVsbGVub3J6ZXNpIGVs
+amFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJhc2EgbWVndGFsYWxoYXRv
+IGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGphbiBhIGh0dHBzOi8vd3d3
+Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJoZXRvIGF6IGVsbGVub3J6
+ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBPUlRBTlQhIFRoZSBpc3N1
+YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmljYXRlIGlzIHN1YmplY3Qg
+dG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBodHRwczovL3d3dy5uZXRs
+b2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNAbmV0bG9jay5uZXQuMA0G
+CSqGSIb3DQEBBAUAA4IBAQBIJEb3ulZv+sgoA0BO5TE5ayZrU3/b39/zcT0mwBQO
+xmd7I6gMc90Bu8bKbjc5VdXHjFYgDigKDtIqpLBJUsY4B/6+CgmM0ZjPytoUMaFP
+0jn8DxEsQ8Pdq5PHVT5HfBgaANzze9jyf1JsIPQLX2lS9O74silg6+NJMSEN1rUQ
+QeJBCWziGppWS3cC9qCbmieH6FUpccKQn0V4GuEVZD3QDtigdp+uxdAu6tYPVuxk
+f1qbFFgBJ34TUMdrKuZoPL9coAob4Q566eKAw+np9v1sEZ7Q5SgnK1QyQhSCdeZK
+8CtmdWOMovsEPoMOmzbwGOQmIMOM8CgHrTwXZoi1/baI
+-----END CERTIFICATE-----
+
+# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Label: "XRamp Global CA Root"
+# Serial: 107108908803651509692980124233745014957
+# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
+# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
+# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
+gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
+MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
+UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
+NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
+dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
+dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
+38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
+KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
+DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
+qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
+JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
+PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
+BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
+jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
+eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
+ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
+vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
+qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
+IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
+i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
+O+7ETPTsJ3xCwnR8gooJybQDJbw=
+-----END CERTIFICATE-----
+
+# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Label: "Go Daddy Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
+# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
+# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
+MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
+YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
+MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
+ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
+MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
+ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
+PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
+wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
+EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
+avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
+sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
+/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
+IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
+OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
+TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
+HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
+dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
+ReYNnyicsbkqWletNw+vHX/bvZ8=
+-----END CERTIFICATE-----
+
+# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Label: "Starfield Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
+# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
+# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
+MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
+U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
+NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
+ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
+ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
+DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
+8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
+X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
+K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
+1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
+A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
+zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
+YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
+bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
+DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
+L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
+eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
+xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
+VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
+WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Label: "StartCom Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16
+# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f
+# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea
+-----BEGIN CERTIFICATE-----
+MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
+Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9
+MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
+U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
+cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
+pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
+OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
+Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
+Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
+HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
+Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
+Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
+Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
+26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
+AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE
+FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j
+ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js
+LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM
+BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0
+Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy
+dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh
+cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh
+YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg
+dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp
+bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ
+YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT
+TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ
+9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8
+jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW
+FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz
+ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1
+ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L
+EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu
+L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq
+yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC
+O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V
+um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh
+NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14=
+-----END CERTIFICATE-----
+
+# Issuer: O=Government Root Certification Authority
+# Subject: O=Government Root Certification Authority
+# Label: "Taiwan GRCA"
+# Serial: 42023070807708724159991140556527066870
+# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e
+# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9
+# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3
+-----BEGIN CERTIFICATE-----
+MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/
+MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow
+PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR
+IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q
+gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy
+yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts
+F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2
+jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx
+ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC
+VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK
+YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH
+EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN
+Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud
+DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE
+MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK
+UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ
+TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf
+qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK
+ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE
+JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7
+hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1
+EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm
+nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX
+udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz
+ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe
+LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl
+pYYsfPQS
+-----END CERTIFICATE-----
+
+# Issuer: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services
+# Subject: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services
+# Label: "Swisscom Root CA 1"
+# Serial: 122348795730808398873664200247279986742
+# MD5 Fingerprint: f8:38:7c:77:88:df:2c:16:68:2e:c2:e2:52:4b:b8:f9
+# SHA1 Fingerprint: 5f:3a:fc:0a:8b:64:f6:86:67:34:74:df:7e:a9:a2:fe:f9:fa:7a:51
+# SHA256 Fingerprint: 21:db:20:12:36:60:bb:2e:d4:18:20:5d:a1:1e:e7:a8:5a:65:e2:bc:6e:55:b5:af:7e:78:99:c8:a2:66:d9:2e
+-----BEGIN CERTIFICATE-----
+MIIF2TCCA8GgAwIBAgIQXAuFXAvnWUHfV8w/f52oNjANBgkqhkiG9w0BAQUFADBk
+MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0
+YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg
+Q0EgMTAeFw0wNTA4MTgxMjA2MjBaFw0yNTA4MTgyMjA2MjBaMGQxCzAJBgNVBAYT
+AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp
+Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAxMIICIjAN
+BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0LmwqAzZuz8h+BvVM5OAFmUgdbI9
+m2BtRsiMMW8Xw/qabFbtPMWRV8PNq5ZJkCoZSx6jbVfd8StiKHVFXqrWW/oLJdih
+FvkcxC7mlSpnzNApbjyFNDhhSbEAn9Y6cV9Nbc5fuankiX9qUvrKm/LcqfmdmUc/
+TilftKaNXXsLmREDA/7n29uj/x2lzZAeAR81sH8A25Bvxn570e56eqeqDFdvpG3F
+EzuwpdntMhy0XmeLVNxzh+XTF3xmUHJd1BpYwdnP2IkCb6dJtDZd0KTeByy2dbco
+kdaXvij1mB7qWybJvbCXc9qukSbraMH5ORXWZ0sKbU/Lz7DkQnGMU3nn7uHbHaBu
+HYwadzVcFh4rUx80i9Fs/PJnB3r1re3WmquhsUvhzDdf/X/NTa64H5xD+SpYVUNF
+vJbNcA78yeNmuk6NO4HLFWR7uZToXTNShXEuT46iBhFRyePLoW4xCGQMwtI89Tbo
+19AOeCMgkckkKmUpWyL3Ic6DXqTz3kvTaI9GdVyDCW4pa8RwjPWd1yAv/0bSKzjC
+L3UcPX7ape8eYIVpQtPM+GP+HkM5haa2Y0EQs3MevNP6yn0WR+Kn1dCjigoIlmJW
+bjTb2QK5MHXjBNLnj8KwEUAKrNVxAmKLMb7dxiNYMUJDLXT5xp6mig/p/r+D5kNX
+JLrvRjSq1xIBOO0CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw
+FDASBgdghXQBUwABBgdghXQBUwABMBIGA1UdEwEB/wQIMAYBAf8CAQcwHwYDVR0j
+BBgwFoAUAyUv3m+CATpcLNwroWm1Z9SM0/0wHQYDVR0OBBYEFAMlL95vggE6XCzc
+K6FptWfUjNP9MA0GCSqGSIb3DQEBBQUAA4ICAQA1EMvspgQNDQ/NwNurqPKIlwzf
+ky9NfEBWMXrrpA9gzXrzvsMnjgM+pN0S734edAY8PzHyHHuRMSG08NBsl9Tpl7Ik
+Vh5WwzW9iAUPWxAaZOHHgjD5Mq2eUCzneAXQMbFamIp1TpBcahQq4FJHgmDmHtqB
+sfsUC1rxn9KVuj7QG9YVHaO+htXbD8BJZLsuUBlL0iT43R4HVtA4oJVwIHaM190e
+3p9xxCPvgxNcoyQVTSlAPGrEqdi3pkSlDfTgnXceQHAm/NrZNuR55LU/vJtlvrsR
+ls/bxig5OgjOR1tTWsWZ/l2p3e9M1MalrQLmjAcSHm8D0W+go/MpvRLHUKKwf4ip
+mXeascClOS5cfGniLLDqN2qk4Vrh9VDlg++luyqI54zb/W1elxmofmZ1a3Hqv7HH
+b6D0jqTsNFFbjCYDcKF31QESVwA12yPeDooomf2xEG9L/zgtYE4snOtnta1J7ksf
+rK/7DZBaZmBwXarNeNQk7shBoJMBkpxqnvy5JMWzFYJ+vq6VK+uxwNrjAWALXmms
+hFZhvnEX/h0TD/7Gh0Xp/jKgGg0TpJRVcaUWi7rKibCyx/yP2FS1k2Kdzs9Z+z0Y
+zirLNRWCXf9UIltxUvu3yf5gmwBBZPCqKuy2QkPOiWaByIufOVQDJdMWNY6E0F/6
+MBr1mmz0DlP5OlvRHA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=Class 2 Primary CA O=Certplus
+# Subject: CN=Class 2 Primary CA O=Certplus
+# Label: "Certplus Class 2 Primary CA"
+# Serial: 177770208045934040241468760488327595043
+# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b
+# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb
+# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb
+-----BEGIN CERTIFICATE-----
+MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw
+PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz
+cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9
+MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz
+IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ
+ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR
+VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL
+kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd
+EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas
+H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0
+HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud
+DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4
+QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu
+Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/
+AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8
+yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR
+FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA
+ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB
+kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7
+l7+ijrRU
+-----END CERTIFICATE-----
+
+# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Label: "DST Root CA X3"
+# Serial: 91299735575339953335919266965803778155
+# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5
+# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13
+# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39
+-----BEGIN CERTIFICATE-----
+MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
+MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
+DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow
+PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
+Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
+rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
+OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
+xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
+7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
+aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
+SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
+ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
+AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
+R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
+JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
+Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES
+# Subject: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES
+# Label: "DST ACES CA X6"
+# Serial: 17771143917277623872238992636097467865
+# MD5 Fingerprint: 21:d8:4c:82:2b:99:09:33:a2:eb:14:24:8d:8e:5f:e8
+# SHA1 Fingerprint: 40:54:da:6f:1c:3f:40:74:ac:ed:0f:ec:cd:db:79:d1:53:fb:90:1d
+# SHA256 Fingerprint: 76:7c:95:5a:76:41:2c:89:af:68:8e:90:a1:c7:0f:55:6c:fd:6b:60:25:db:ea:10:41:6d:7e:b6:83:1f:8c:40
+-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgIQDV6ZCtadt3js2AdWO4YV2TANBgkqhkiG9w0BAQUFADBb
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3Qx
+ETAPBgNVBAsTCERTVCBBQ0VTMRcwFQYDVQQDEw5EU1QgQUNFUyBDQSBYNjAeFw0w
+MzExMjAyMTE5NThaFw0xNzExMjAyMTE5NThaMFsxCzAJBgNVBAYTAlVTMSAwHgYD
+VQQKExdEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdDERMA8GA1UECxMIRFNUIEFDRVMx
+FzAVBgNVBAMTDkRTVCBBQ0VTIENBIFg2MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAuT31LMmU3HWKlV1j6IR3dma5WZFcRt2SPp/5DgO0PWGSvSMmtWPu
+ktKe1jzIDZBfZIGxqAgNTNj50wUoUrQBJcWVHAx+PhCEdc/BGZFjz+iokYi5Q1K7
+gLFViYsx+tC3dr5BPTCapCIlF3PoHuLTrCq9Wzgh1SpL11V94zpVvddtawJXa+ZH
+fAjIgrrep4c9oW24MFbCswKBXy314powGCi4ZtPLAZZv6opFVdbgnf9nKxcCpk4a
+ahELfrd755jWjHZvwTvbUJN+5dCOHze4vbrGn2zpfDPyMjwmR/onJALJfh1biEIT
+ajV8fTXpLmaRcpPVMibEdPVTo7NdmvYJywIDAQABo4HIMIHFMA8GA1UdEwEB/wQF
+MAMBAf8wDgYDVR0PAQH/BAQDAgHGMB8GA1UdEQQYMBaBFHBraS1vcHNAdHJ1c3Rk
+c3QuY29tMGIGA1UdIARbMFkwVwYKYIZIAWUDAgEBATBJMEcGCCsGAQUFBwIBFjto
+dHRwOi8vd3d3LnRydXN0ZHN0LmNvbS9jZXJ0aWZpY2F0ZXMvcG9saWN5L0FDRVMt
+aW5kZXguaHRtbDAdBgNVHQ4EFgQUCXIGThhDD+XWzMNqizF7eI+og7gwDQYJKoZI
+hvcNAQEFBQADggEBAKPYjtay284F5zLNAdMEA+V25FYrnJmQ6AgwbN99Pe7lv7Uk
+QIRJ4dEorsTCOlMwiPH1d25Ryvr/ma8kXxug/fKshMrfqfBfBC6tFr8hlxCBPeP/
+h40y3JTlR4peahPJlJU90u7INJXQgNStMgiAVDzgvVJT11J8smk/f3rPanTK+gQq
+nExaBqXpIK1FZg9p8d2/6eMyi/rgwYZNcjwu2JN4Cir42NInPRmJX1p7ijvMDNpR
+rscL9yuwNwXsvFcj4jjSm2jzVhKIT0J8uDHEtdvkyCE06UgRNe76x5JXxZ805Mf2
+9w4LTJxoeHtxMcfrHuBnQfO3oKfN5XozNmr6mis=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005
+# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005
+# Label: "TURKTRUST Certificate Services Provider Root 2"
+# Serial: 1
+# MD5 Fingerprint: 37:a5:6e:d4:b1:25:84:97:b7:fd:56:15:7a:f9:a2:00
+# SHA1 Fingerprint: b4:35:d4:e1:11:9d:1c:66:90:a7:49:eb:b3:94:bd:63:7b:a7:82:b7
+# SHA256 Fingerprint: c4:70:cf:54:7e:23:02:b9:77:fb:29:dd:71:a8:9a:7b:6c:1f:60:77:7b:03:29:f5:60:17:f3:28:bf:4f:6b:e6
+-----BEGIN CERTIFICATE-----
+MIIEPDCCAySgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvjE/MD0GA1UEAww2VMOc
+UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
+c8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xS
+S1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kg
+SGl6bWV0bGVyaSBBLsWeLiAoYykgS2FzxLFtIDIwMDUwHhcNMDUxMTA3MTAwNzU3
+WhcNMTUwOTE2MTAwNzU3WjCBvjE/MD0GA1UEAww2VMOcUktUUlVTVCBFbGVrdHJv
+bmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMQswCQYDVQQGEwJU
+UjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xSS1RSVVNUIEJpbGdpIMSw
+bGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWe
+LiAoYykgS2FzxLFtIDIwMDUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCpNn7DkUNMwxmYCMjHWHtPFoylzkkBH3MOrHUTpvqeLCDe2JAOCtFp0if7qnef
+J1Il4std2NiDUBd9irWCPwSOtNXwSadktx4uXyCcUHVPr+G1QRT0mJKIx+XlZEdh
+R3n9wFHxwZnn3M5q+6+1ATDcRhzviuyV79z/rxAc653YsKpqhRgNF8k+v/Gb0AmJ
+Qv2gQrSdiVFVKc8bcLyEVK3BEx+Y9C52YItdP5qtygy/p1Zbj3e41Z55SZI/4PGX
+JHpsmxcPbe9TmJEr5A++WXkHeLuXlfSfadRYhwqp48y2WBmfJiGxxFmNskF1wK1p
+zpwACPI2/z7woQ8arBT9pmAPAgMBAAGjQzBBMB0GA1UdDgQWBBTZN7NOBf3Zz58S
+Fq62iS/rJTqIHDAPBgNVHQ8BAf8EBQMDBwYAMA8GA1UdEwEB/wQFMAMBAf8wDQYJ
+KoZIhvcNAQEFBQADggEBAHJglrfJ3NgpXiOFX7KzLXb7iNcX/nttRbj2hWyfIvwq
+ECLsqrkw9qtY1jkQMZkpAL2JZkH7dN6RwRgLn7Vhy506vvWolKMiVW4XSf/SKfE4
+Jl3vpao6+XF75tpYHdN0wgH6PmlYX63LaL4ULptswLbcoCb6dxriJNoaN+BnrdFz
+gw2lGh1uEpJ+hGIAF728JRhX8tepb1mIvDS3LoV4nZbcFMMsilKbloxSZj2GFotH
+uFEJjOp9zYhys2AzsfAKRO8P9Qk3iCQOLGsgOqL6EfJANZxEaGM7rDNvY7wsu/LS
+y3Z9fYjYHcgFHW68lKlmjHdxx/qR+i9Rnuk5UrbnBEI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Label: "SwissSign Gold CA - G2"
+# Serial: 13492815561806991280
+# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93
+# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61
+# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95
+-----BEGIN CERTIFICATE-----
+MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
+BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
+biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
+MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
+d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
+76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
+bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
+6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
+emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
+MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
+MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
+MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
+FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
+aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
+gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
+qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
+lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
+8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
+L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
+45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
+UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
+O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
+bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
+GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
+77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
+hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
+92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
+Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
+ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
+Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Label: "SwissSign Silver CA - G2"
+# Serial: 5700383053117599563
+# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
+# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
+# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
+-----BEGIN CERTIFICATE-----
+MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
+BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
+IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
+RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
+U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
+MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
+Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
+YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
+nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
+6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
+eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
+c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
+MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
+HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
+jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
+5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
+rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
+wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
+cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
+AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
+WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
+xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
+2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
+IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
+aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
+em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
+dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
+OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
+tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Label: "GeoTrust Primary Certification Authority"
+# Serial: 32798226551256963324313806436981982369
+# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
+# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
+# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
+-----BEGIN CERTIFICATE-----
+MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
+MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
+R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
+MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
+Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
+AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
+ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
+7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
+kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
+mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
+KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
+6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
+4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
+oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
+UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
+AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA"
+# Serial: 69529181992039203566298953787712940909
+# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
+# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
+# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
+qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
+BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
+NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
+LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
+A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
+IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
+W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
+3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
+6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
+Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
+NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
+r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
+DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
+YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
+xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
+/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
+LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
+jVaMaA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
+# Serial: 33037644167568058970164719475676101450
+# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
+# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
+# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
+-----BEGIN CERTIFICATE-----
+MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
+yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
+ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
+nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
+t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
+SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
+BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
+rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
+NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
+BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
+BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
+aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
+MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
+p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
+5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
+WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
+4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
+hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
+# Subject: CN=SecureTrust CA O=SecureTrust Corporation
+# Label: "SecureTrust CA"
+# Serial: 17199774589125277788362757014266862032
+# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1
+# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11
+# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73
+-----BEGIN CERTIFICATE-----
+MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
+MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
+cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
+Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
+0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
+wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
+7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
+8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
+BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
+JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
+6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
+3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
+D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
+CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
+3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Global CA O=SecureTrust Corporation
+# Subject: CN=Secure Global CA O=SecureTrust Corporation
+# Label: "Secure Global CA"
+# Serial: 9751836167731051554232119481456978597
+# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de
+# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b
+# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69
+-----BEGIN CERTIFICATE-----
+MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
+MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
+Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
+iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
+/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
+jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
+HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
+sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
+gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
+KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
+AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
+URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
+H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
+I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
+iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
+f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Label: "Network Solutions Certificate Authority"
+# Serial: 116697915152937497490437556386812487904
+# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
+# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
+# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
+-----BEGIN CERTIFICATE-----
+MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
+MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
+MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
+dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
+UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
+ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
+c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
+OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
+mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
+BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
+qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
+gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
+bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
+dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
+6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
+h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
+/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
+wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
+pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
+-----END CERTIFICATE-----
+
+# Issuer: CN=WellsSecure Public Root Certificate Authority O=Wells Fargo WellsSecure OU=Wells Fargo Bank NA
+# Subject: CN=WellsSecure Public Root Certificate Authority O=Wells Fargo WellsSecure OU=Wells Fargo Bank NA
+# Label: "WellsSecure Public Root Certificate Authority"
+# Serial: 1
+# MD5 Fingerprint: 15:ac:a5:c2:92:2d:79:bc:e8:7f:cb:67:ed:02:cf:36
+# SHA1 Fingerprint: e7:b4:f6:9d:61:ec:90:69:db:7e:90:a7:40:1a:3c:f4:7d:4f:e8:ee
+# SHA256 Fingerprint: a7:12:72:ae:aa:a3:cf:e8:72:7f:7f:b3:9f:0f:b3:d1:e5:42:6e:90:60:b0:6e:e6:f1:3e:9a:3c:58:33:cd:43
+-----BEGIN CERTIFICATE-----
+MIIEvTCCA6WgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBhTELMAkGA1UEBhMCVVMx
+IDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxs
+cyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9v
+dCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDcxMjEzMTcwNzU0WhcNMjIxMjE0
+MDAwNzU0WjCBhTELMAkGA1UEBhMCVVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdl
+bGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQD
+DC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkw
+ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDub7S9eeKPCCGeOARBJe+r
+WxxTkqxtnt3CxC5FlAM1iGd0V+PfjLindo8796jE2yljDpFoNoqXjopxaAkH5OjU
+Dk/41itMpBb570OYj7OeUt9tkTmPOL13i0Nj67eT/DBMHAGTthP796EfvyXhdDcs
+HqRePGj4S78NuR4uNuip5Kf4D8uCdXw1LSLWwr8L87T8bJVhHlfXBIEyg1J55oNj
+z7fLY4sR4r1e6/aN7ZVyKLSsEmLpSjPmgzKuBXWVvYSV2ypcm44uDLiBK0HmOFaf
+SZtsdvqKXfcBeYF8wYNABf5x/Qw/zE5gCQ5lRxAvAcAFP4/4s0HvWkJ+We/Slwxl
+AgMBAAGjggE0MIIBMDAPBgNVHRMBAf8EBTADAQH/MDkGA1UdHwQyMDAwLqAsoCqG
+KGh0dHA6Ly9jcmwucGtpLndlbGxzZmFyZ28uY29tL3dzcHJjYS5jcmwwDgYDVR0P
+AQH/BAQDAgHGMB0GA1UdDgQWBBQmlRkQ2eihl5H/3BnZtQQ+0nMKajCBsgYDVR0j
+BIGqMIGngBQmlRkQ2eihl5H/3BnZtQQ+0nMKaqGBi6SBiDCBhTELMAkGA1UEBhMC
+VVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNX
+ZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMg
+Um9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHmCAQEwDQYJKoZIhvcNAQEFBQADggEB
+ALkVsUSRzCPIK0134/iaeycNzXK7mQDKfGYZUMbVmO2rvwNa5U3lHshPcZeG1eMd
+/ZDJPHV3V3p9+N701NX3leZ0bh08rnyd2wIDBSxxSyU+B+NemvVmFymIGjifz6pB
+A4SXa5M4esowRBskRDPQ5NHcKDj0E0M1NSljqHyita04pO2t/caaH/+Xc/77szWn
+k4bGdpEA5qxRFsQnMlzbc9qlk1eOPm01JghZ1edE13YgY+esE2fDbbFwRnzVlhE9
+iW9dqKHrjQrawx0zbKPqZxmamX9LPYNRKh3KL4YMon4QLSvUFpULB6ouFJJJtylv
+2G0xffX8oRAHh84vWdw+WNs=
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=IGC/A O=PM/SGDN OU=DCSSI
+# Subject: CN=IGC/A O=PM/SGDN OU=DCSSI
+# Label: "IGC/A"
+# Serial: 245102874772
+# MD5 Fingerprint: 0c:7f:dd:6a:f4:2a:b9:c8:9b:bd:20:7e:a9:db:5c:37
+# SHA1 Fingerprint: 60:d6:89:74:b5:c2:65:9e:8a:0f:c1:88:7c:88:d2:46:69:1b:18:2c
+# SHA256 Fingerprint: b9:be:a7:86:0a:96:2e:a3:61:1d:ab:97:ab:6d:a3:e2:1c:10:68:b9:7d:55:57:5e:d0:e1:12:79:c1:1c:89:32
+-----BEGIN CERTIFICATE-----
+MIIEAjCCAuqgAwIBAgIFORFFEJQwDQYJKoZIhvcNAQEFBQAwgYUxCzAJBgNVBAYT
+AkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAMBgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQ
+TS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEOMAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG
+9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2LmZyMB4XDTAyMTIxMzE0MjkyM1oXDTIw
+MTAxNzE0MjkyMlowgYUxCzAJBgNVBAYTAkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAM
+BgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQTS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEO
+MAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2
+LmZyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsh/R0GLFMzvABIaI
+s9z4iPf930Pfeo2aSVz2TqrMHLmh6yeJ8kbpO0px1R2OLc/mratjUMdUC24SyZA2
+xtgv2pGqaMVy/hcKshd+ebUyiHDKcMCWSo7kVc0dJ5S/znIq7Fz5cyD+vfcuiWe4
+u0dzEvfRNWk68gq5rv9GQkaiv6GFGvm/5P9JhfejcIYyHF2fYPepraX/z9E0+X1b
+F8bc1g4oa8Ld8fUzaJ1O/Id8NhLWo4DoQw1VYZTqZDdH6nfK0LJYBcNdfrGoRpAx
+Vs5wKpayMLh35nnAvSk7/ZR3TL0gzUEl4C7HG7vupARB0l2tEmqKm0f7yd1GQOGd
+PDPQtQIDAQABo3cwdTAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBRjAVBgNV
+HSAEDjAMMAoGCCqBegF5AQEBMB0GA1UdDgQWBBSjBS8YYFDCiQrdKyFP/45OqDAx
+NjAfBgNVHSMEGDAWgBSjBS8YYFDCiQrdKyFP/45OqDAxNjANBgkqhkiG9w0BAQUF
+AAOCAQEABdwm2Pp3FURo/C9mOnTgXeQp/wYHE4RKq89toB9RlPhJy3Q2FLwV3duJ
+L92PoF189RLrn544pEfMs5bZvpwlqwN+Mw+VgQ39FuCIvjfwbF3QMZsyK10XZZOY
+YLxuj7GoPB7ZHPOpJkL5ZB3C55L29B5aqhlSXa/oovdgoPaN8In1buAKBQGVyYsg
+Crpa/JosPL3Dt8ldeCUFP1YUmwza+zpI/pdpXsoQhvdOlgQITeywvl3cO45Pwf2a
+NjSaTFR+FwNIlQgRHAdvhQh+XU3Endv7rs6y0bO4g2wdsrN58dhwmX7wEwLOXt1R
+0982gaEbeC9xs/FZTEYYKKuF0mBWWg==
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1
+# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1
+# Label: "Security Communication EV RootCA1"
+# Serial: 0
+# MD5 Fingerprint: 22:2d:a6:01:ea:7c:0a:f7:f0:6c:56:43:3f:77:76:d3
+# SHA1 Fingerprint: fe:b8:c4:32:dc:f9:76:9a:ce:ae:3d:d8:90:8f:fd:28:86:65:64:7d
+# SHA256 Fingerprint: a2:2d:ba:68:1e:97:37:6e:2d:39:7d:72:8a:ae:3a:9b:62:96:b9:fd:ba:60:bc:2e:11:f6:47:f2:c6:75:fb:37
+-----BEGIN CERTIFICATE-----
+MIIDfTCCAmWgAwIBAgIBADANBgkqhkiG9w0BAQUFADBgMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEqMCgGA1UECxMh
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBFViBSb290Q0ExMB4XDTA3MDYwNjAyMTIz
+MloXDTM3MDYwNjAyMTIzMlowYDELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09N
+IFRydXN0IFN5c3RlbXMgQ08uLExURC4xKjAoBgNVBAsTIVNlY3VyaXR5IENvbW11
+bmljYXRpb24gRVYgUm9vdENBMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBALx/7FebJOD+nLpCeamIivqA4PUHKUPqjgo0No0c+qe1OXj/l3X3L+SqawSE
+RMqm4miO/VVQYg+kcQ7OBzgtQoVQrTyWb4vVog7P3kmJPdZkLjjlHmy1V4qe70gO
+zXppFodEtZDkBp2uoQSXWHnvIEqCa4wiv+wfD+mEce3xDuS4GBPMVjZd0ZoeUWs5
+bmB2iDQL87PRsJ3KYeJkHcFGB7hj3R4zZbOOCVVSPbW9/wfrrWFVGCypaZhKqkDF
+MxRldAD5kd6vA0jFQFTcD4SQaCDFkpbcLuUCRarAX1T4bepJz11sS6/vmsJWXMY1
+VkJqMF/Cq/biPT+zyRGPMUzXn0kCAwEAAaNCMEAwHQYDVR0OBBYEFDVK9U2vP9eC
+OKyrcWUXdYydVZPmMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G
+CSqGSIb3DQEBBQUAA4IBAQCoh+ns+EBnXcPBZsdAS5f8hxOQWsTvoMpfi7ent/HW
+tWS3irO4G8za+6xmiEHO6Pzk2x6Ipu0nUBsCMCRGef4Eh3CXQHPRwMFXGZpppSeZ
+q51ihPZRwSzJIxXYKLerJRO1RuGGAv8mjMSIkh1W/hln8lXkgKNrnKt34VFxDSDb
+EJrbvXZ5B3eZKK2aXtqxT0QsNY6llsf9g/BYxnnWmHyojf6GPgcWkuF75x3sM3Z+
+Qi5KhfmRiWiEA4Glm5q+4zfFVKtWOxgtQaQM+ELbmaDgcm+7XeEWT1MKZPlO9L9O
+VL14bIjqv5wTJMJwaaJ/D8g8rQjJsJhAoyrniIPtd490
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GA CA"
+# Serial: 86718877871133159090080555911823548314
+# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93
+# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9
+# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5
+-----BEGIN CERTIFICATE-----
+MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB
+ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly
+aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl
+ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w
+NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G
+A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD
+VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX
+SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR
+VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2
+w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF
+mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg
+4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9
+4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw
+EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx
+SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2
+ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8
+vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa
+hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi
+Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ
+/L7fCg0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsec e-Szigno Root CA O=Microsec Ltd. OU=e-Szigno CA
+# Subject: CN=Microsec e-Szigno Root CA O=Microsec Ltd. OU=e-Szigno CA
+# Label: "Microsec e-Szigno Root CA"
+# Serial: 272122594155480254301341951808045322001
+# MD5 Fingerprint: f0:96:b6:2f:c5:10:d5:67:8e:83:25:32:e8:5e:2e:e5
+# SHA1 Fingerprint: 23:88:c9:d3:71:cc:9e:96:3d:ff:7d:3c:a7:ce:fc:d6:25:ec:19:0d
+# SHA256 Fingerprint: 32:7a:3d:76:1a:ba:de:a0:34:eb:99:84:06:27:5c:b1:a4:77:6e:fd:ae:2f:df:6d:01:68:ea:1c:4f:55:67:d0
+-----BEGIN CERTIFICATE-----
+MIIHqDCCBpCgAwIBAgIRAMy4579OKRr9otxmpRwsDxEwDQYJKoZIhvcNAQEFBQAw
+cjELMAkGA1UEBhMCSFUxETAPBgNVBAcTCEJ1ZGFwZXN0MRYwFAYDVQQKEw1NaWNy
+b3NlYyBMdGQuMRQwEgYDVQQLEwtlLVN6aWdubyBDQTEiMCAGA1UEAxMZTWljcm9z
+ZWMgZS1Temlnbm8gUm9vdCBDQTAeFw0wNTA0MDYxMjI4NDRaFw0xNzA0MDYxMjI4
+NDRaMHIxCzAJBgNVBAYTAkhVMREwDwYDVQQHEwhCdWRhcGVzdDEWMBQGA1UEChMN
+TWljcm9zZWMgTHRkLjEUMBIGA1UECxMLZS1Temlnbm8gQ0ExIjAgBgNVBAMTGU1p
+Y3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
+ggEKAoIBAQDtyADVgXvNOABHzNuEwSFpLHSQDCHZU4ftPkNEU6+r+ICbPHiN1I2u
+uO/TEdyB5s87lozWbxXGd36hL+BfkrYn13aaHUM86tnsL+4582pnS4uCzyL4ZVX+
+LMsvfUh6PXX5qqAnu3jCBspRwn5mS6/NoqdNAoI/gqyFxuEPkEeZlApxcpMqyabA
+vjxWTHOSJ/FrtfX9/DAFYJLG65Z+AZHCabEeHXtTRbjcQR/Ji3HWVBTji1R4P770
+Yjtb9aPs1ZJ04nQw7wHb4dSrmZsqa/i9phyGI0Jf7Enemotb9HI6QMVJPqW+jqpx
+62z69Rrkav17fVVA71hu5tnVvCSrwe+3AgMBAAGjggQ3MIIEMzBnBggrBgEFBQcB
+AQRbMFkwKAYIKwYBBQUHMAGGHGh0dHBzOi8vcmNhLmUtc3ppZ25vLmh1L29jc3Aw
+LQYIKwYBBQUHMAKGIWh0dHA6Ly93d3cuZS1zemlnbm8uaHUvUm9vdENBLmNydDAP
+BgNVHRMBAf8EBTADAQH/MIIBcwYDVR0gBIIBajCCAWYwggFiBgwrBgEEAYGoGAIB
+AQEwggFQMCgGCCsGAQUFBwIBFhxodHRwOi8vd3d3LmUtc3ppZ25vLmh1L1NaU1ov
+MIIBIgYIKwYBBQUHAgIwggEUHoIBEABBACAAdABhAG4A+gBzAO0AdAB2AOEAbgB5
+ACAA6QByAHQAZQBsAG0AZQB6AOkAcwDpAGgAZQB6ACAA6QBzACAAZQBsAGYAbwBn
+AGEAZADhAHMA4QBoAG8AegAgAGEAIABTAHoAbwBsAGcA4QBsAHQAYQB0APMAIABT
+AHoAbwBsAGcA4QBsAHQAYQB0AOEAcwBpACAAUwB6AGEAYgDhAGwAeQB6AGEAdABh
+ACAAcwB6AGUAcgBpAG4AdAAgAGsAZQBsAGwAIABlAGwAagDhAHIAbgBpADoAIABo
+AHQAdABwADoALwAvAHcAdwB3AC4AZQAtAHMAegBpAGcAbgBvAC4AaAB1AC8AUwBa
+AFMAWgAvMIHIBgNVHR8EgcAwgb0wgbqggbeggbSGIWh0dHA6Ly93d3cuZS1zemln
+bm8uaHUvUm9vdENBLmNybIaBjmxkYXA6Ly9sZGFwLmUtc3ppZ25vLmh1L0NOPU1p
+Y3Jvc2VjJTIwZS1Temlnbm8lMjBSb290JTIwQ0EsT1U9ZS1Temlnbm8lMjBDQSxP
+PU1pY3Jvc2VjJTIwTHRkLixMPUJ1ZGFwZXN0LEM9SFU/Y2VydGlmaWNhdGVSZXZv
+Y2F0aW9uTGlzdDtiaW5hcnkwDgYDVR0PAQH/BAQDAgEGMIGWBgNVHREEgY4wgYuB
+EGluZm9AZS1zemlnbm8uaHWkdzB1MSMwIQYDVQQDDBpNaWNyb3NlYyBlLVN6aWdu
+w7MgUm9vdCBDQTEWMBQGA1UECwwNZS1TemlnbsOzIEhTWjEWMBQGA1UEChMNTWlj
+cm9zZWMgS2Z0LjERMA8GA1UEBxMIQnVkYXBlc3QxCzAJBgNVBAYTAkhVMIGsBgNV
+HSMEgaQwgaGAFMegSXUWYYTbMUuE0vE3QJDvTtz3oXakdDByMQswCQYDVQQGEwJI
+VTERMA8GA1UEBxMIQnVkYXBlc3QxFjAUBgNVBAoTDU1pY3Jvc2VjIEx0ZC4xFDAS
+BgNVBAsTC2UtU3ppZ25vIENBMSIwIAYDVQQDExlNaWNyb3NlYyBlLVN6aWdubyBS
+b290IENBghEAzLjnv04pGv2i3GalHCwPETAdBgNVHQ4EFgQUx6BJdRZhhNsxS4TS
+8TdAkO9O3PcwDQYJKoZIhvcNAQEFBQADggEBANMTnGZjWS7KXHAM/IO8VbH0jgds
+ZifOwTsgqRy7RlRw7lrMoHfqaEQn6/Ip3Xep1fvj1KcExJW4C+FEaGAHQzAxQmHl
+7tnlJNUb3+FKG6qfx1/4ehHqE5MAyopYse7tDk2016g2JnzgOsHVV4Lxdbb9iV/a
+86g4nzUGCM4ilb7N1fy+W955a9x6qWVmvrElWl/tftOsRm1M9DKHtCAE4Gx4sHfR
+hUZLphK3dehKyVZs15KrnfVJONJPU+NVkBHbmJbGSfI+9J8b4PeI3CVimUTYc78/
+MPMMNz7UwiiAc7EBt51alhQBS6kRnSlqLtBdgcDPsiBDxwPgN05dCtxZICU=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna O=Dhimyotis
+# Subject: CN=Certigna O=Dhimyotis
+# Label: "Certigna"
+# Serial: 18364802974209362175
+# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff
+# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97
+# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d
+-----BEGIN CERTIFICATE-----
+MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
+BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
+DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
+BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
+QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
+gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
+zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
+130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
+JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
+ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
+AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
+AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
+9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
+bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
+fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
+HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
+t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
+WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
+# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
+# Label: "Deutsche Telekom Root CA 2"
+# Serial: 38
+# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08
+# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf
+# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3
+-----BEGIN CERTIFICATE-----
+MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc
+MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj
+IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB
+IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE
+RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl
+U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290
+IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU
+ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC
+QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr
+rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S
+NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc
+QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH
+txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP
+BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
+AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp
+tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa
+IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl
+6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+
+xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU
+Cm26OWMohpLzGITY+9HPBVZkVw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Label: "Cybertrust Global Root"
+# Serial: 4835703278459682877484360
+# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
+# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
+# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
+-----BEGIN CERTIFICATE-----
+MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
+A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
+bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
+ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
+b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
+7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
+J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
+HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
+t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
+FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
+XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
+hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
+MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
+A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
+Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
+XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
+omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
+A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
+WL1WMRJOEcgh4LMRkWXbtKaIOM5V
+-----END CERTIFICATE-----
+
+# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Label: "ePKI Root Certification Authority"
+# Serial: 28956088682735189655030529057352760477
+# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3
+# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0
+# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5
+-----BEGIN CERTIFICATE-----
+MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
+IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
+SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
+SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
+ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
+DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
+TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
+fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
+sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
+WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
+nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
+dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
+NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
+AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
+MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
+ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
+uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
+PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
+JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
+gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
+j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
+5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
+o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
+/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
+Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
+W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
+hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TÜBİTAK UEKAE Kök Sertifika Hizmet Sağlayıcısı - Sürüm 3 O=Türkiye Bilimsel ve Teknolojik Araştırma Kurumu - TÜBİTAK OU=Ulusal Elektronik ve Kriptoloji Araştırma Enstitüsü - UEKAE/Kamu Sertifikasyon Merkezi
+# Subject: CN=TÜBİTAK UEKAE Kök Sertifika Hizmet Sağlayıcısı - Sürüm 3 O=Türkiye Bilimsel ve Teknolojik Araştırma Kurumu - TÜBİTAK OU=Ulusal Elektronik ve Kriptoloji Araştırma Enstitüsü - UEKAE/Kamu Sertifikasyon Merkezi
+# Label: "T\xc3\x9c\x42\xC4\xB0TAK UEKAE K\xC3\xB6k Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1 - S\xC3\xBCr\xC3\xBCm 3"
+# Serial: 17
+# MD5 Fingerprint: ed:41:f5:8c:50:c5:2b:9c:73:e6:ee:6c:eb:c2:a8:26
+# SHA1 Fingerprint: 1b:4b:39:61:26:27:6b:64:91:a2:68:6d:d7:02:43:21:2d:1f:1d:96
+# SHA256 Fingerprint: e4:c7:34:30:d7:a5:b5:09:25:df:43:37:0a:0d:21:6e:9a:79:b9:d6:db:83:73:a0:c6:9e:b1:cc:31:c7:c5:2a
+-----BEGIN CERTIFICATE-----
+MIIFFzCCA/+gAwIBAgIBETANBgkqhkiG9w0BAQUFADCCASsxCzAJBgNVBAYTAlRS
+MRgwFgYDVQQHDA9HZWJ6ZSAtIEtvY2FlbGkxRzBFBgNVBAoMPlTDvHJraXllIEJp
+bGltc2VsIHZlIFRla25vbG9qaWsgQXJhxZ90xLFybWEgS3VydW11IC0gVMOcQsSw
+VEFLMUgwRgYDVQQLDD9VbHVzYWwgRWxla3Ryb25payB2ZSBLcmlwdG9sb2ppIEFy
+YcWfdMSxcm1hIEVuc3RpdMO8c8O8IC0gVUVLQUUxIzAhBgNVBAsMGkthbXUgU2Vy
+dGlmaWthc3lvbiBNZXJrZXppMUowSAYDVQQDDEFUw5xCxLBUQUsgVUVLQUUgS8O2
+ayBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSAtIFPDvHLDvG0gMzAe
+Fw0wNzA4MjQxMTM3MDdaFw0xNzA4MjExMTM3MDdaMIIBKzELMAkGA1UEBhMCVFIx
+GDAWBgNVBAcMD0dlYnplIC0gS29jYWVsaTFHMEUGA1UECgw+VMO8cmtpeWUgQmls
+aW1zZWwgdmUgVGVrbm9sb2ppayBBcmHFn3TEsXJtYSBLdXJ1bXUgLSBUw5xCxLBU
+QUsxSDBGBgNVBAsMP1VsdXNhbCBFbGVrdHJvbmlrIHZlIEtyaXB0b2xvamkgQXJh
+xZ90xLFybWEgRW5zdGl0w7xzw7wgLSBVRUtBRTEjMCEGA1UECwwaS2FtdSBTZXJ0
+aWZpa2FzeW9uIE1lcmtlemkxSjBIBgNVBAMMQVTDnELEsFRBSyBVRUtBRSBLw7Zr
+IFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIC0gU8O8csO8bSAzMIIB
+IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAim1L/xCIOsP2fpTo6iBkcK4h
+gb46ezzb8R1Sf1n68yJMlaCQvEhOEav7t7WNeoMojCZG2E6VQIdhn8WebYGHV2yK
+O7Rm6sxA/OOqbLLLAdsyv9Lrhc+hDVXDWzhXcLh1xnnRFDDtG1hba+818qEhTsXO
+fJlfbLm4IpNQp81McGq+agV/E5wrHur+R84EpW+sky58K5+eeROR6Oqeyjh1jmKw
+lZMq5d/pXpduIF9fhHpEORlAHLpVK/swsoHvhOPc7Jg4OQOFCKlUAwUp8MmPi+oL
+hmUZEdPpCSPeaJMDyTYcIW7OjGbxmTDY17PDHfiBLqi9ggtm/oLL4eAagsNAgQID
+AQABo0IwQDAdBgNVHQ4EFgQUvYiHyY/2pAoLquvF/pEjnatKijIwDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAB18+kmP
+NOm3JpIWmgV050vQbTlswyb2zrgxvMTfvCr4N5EY3ATIZJkrGG2AA1nJrvhY0D7t
+wyOfaTyGOBye79oneNGEN3GKPEs5z35FBtYt2IpNeBLWrcLTy9LQQfMmNkqblWwM
+7uXRQydmwYj3erMgbOqwaSvHIOgMA8RBBZniP+Rr+KCGgceExh/VS4ESshYhLBOh
+gLJeDEoTniDYYkCrkOpkSi+sDQESeUWoL4cZaMjihccwsnX5OD+ywJO0a+IDRM5n
+oN+J1q2MdqMTw5RhK2vZbMEHCiIHhWyFJEapvj+LeISCfiQMnf2BN+MlqO02TpUs
+yZyQ2uypQjyttgI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 2 CA 1 O=Buypass AS-983163327
+# Subject: CN=Buypass Class 2 CA 1 O=Buypass AS-983163327
+# Label: "Buypass Class 2 CA 1"
+# Serial: 1
+# MD5 Fingerprint: b8:08:9a:f0:03:cc:1b:0d:c8:6c:0b:76:a1:75:64:23
+# SHA1 Fingerprint: a0:a1:ab:90:c9:fc:84:7b:3b:12:61:e8:97:7d:5f:d3:22:61:d3:cc
+# SHA256 Fingerprint: 0f:4e:9c:dd:26:4b:02:55:50:d1:70:80:63:40:21:4f:e9:44:34:c9:b0:2f:69:7e:c7:10:fc:5f:ea:fb:5e:38
+-----BEGIN CERTIFICATE-----
+MIIDUzCCAjugAwIBAgIBATANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3Mg
+Q2xhc3MgMiBDQSAxMB4XDTA2MTAxMzEwMjUwOVoXDTE2MTAxMzEwMjUwOVowSzEL
+MAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MR0wGwYD
+VQQDDBRCdXlwYXNzIENsYXNzIDIgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAIs8B0XY9t/mx8q6jUPFR42wWsE425KEHK8T1A9vNkYgxC7McXA0
+ojTTNy7Y3Tp3L8DrKehc0rWpkTSHIln+zNvnma+WwajHQN2lFYxuyHyXA8vmIPLX
+l18xoS830r7uvqmtqEyeIWZDO6i88wmjONVZJMHCR3axiFyCO7srpgTXjAePzdVB
+HfCuuCkslFJgNJQ72uA40Z0zPhX0kzLFANq1KWYOOngPIVJfAuWSeyXTkh4vFZ2B
+5J2O6O+JzhRMVB0cgRJNcKi+EAUXfh/RuFdV7c27UsKwHnjCTTZoy1YmwVLBvXb3
+WNVyfh9EdrsAiR0WnVE1703CVu9r4Iw7DekCAwEAAaNCMEAwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUP42aWYv8e3uco684sDntkHGA1sgwDgYDVR0PAQH/BAQD
+AgEGMA0GCSqGSIb3DQEBBQUAA4IBAQAVGn4TirnoB6NLJzKyQJHyIdFkhb5jatLP
+gcIV1Xp+DCmsNx4cfHZSldq1fyOhKXdlyTKdqC5Wq2B2zha0jX94wNWZUYN/Xtm+
+DKhQ7SLHrQVMdvvt7h5HZPb3J31cKA9FxVxiXqaakZG3Uxcu3K1gnZZkOb1naLKu
+BctN518fV4bVIJwo+28TOPX2EZL2fZleHwzoq0QkKXJAPTZSr4xYkHPB7GEseaHs
+h7U/2k3ZIQAw3pDaDtMaSKk+hQsUi4y8QZ5q9w5wwDX3OaJdZtB7WZ+oRxKaJyOk
+LY4ng5IgodcVf/EuGO70SH8vf/GhGLWhC5SgYiAynB321O+/TIho
+-----END CERTIFICATE-----
+
+# Issuer: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş.
+# Subject: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş.
+# Label: "EBG Elektronik Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1"
+# Serial: 5525761995591021570
+# MD5 Fingerprint: 2c:20:26:9d:cb:1a:4a:00:85:b5:b7:5a:ae:c2:01:37
+# SHA1 Fingerprint: 8c:96:ba:eb:dd:2b:07:07:48:ee:30:32:66:a0:f3:98:6e:7c:ae:58
+# SHA256 Fingerprint: 35:ae:5b:dd:d8:f7:ae:63:5c:ff:ba:56:82:a8:f0:0b:95:f4:84:62:c7:10:8e:e9:a0:e5:29:2b:07:4a:af:b2
+-----BEGIN CERTIFICATE-----
+MIIF5zCCA8+gAwIBAgIITK9zQhyOdAIwDQYJKoZIhvcNAQEFBQAwgYAxODA2BgNV
+BAMML0VCRyBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
+c8SxMTcwNQYDVQQKDC5FQkcgQmlsacWfaW0gVGVrbm9sb2ppbGVyaSB2ZSBIaXpt
+ZXRsZXJpIEEuxZ4uMQswCQYDVQQGEwJUUjAeFw0wNjA4MTcwMDIxMDlaFw0xNjA4
+MTQwMDMxMDlaMIGAMTgwNgYDVQQDDC9FQkcgRWxla3Ryb25payBTZXJ0aWZpa2Eg
+SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTE3MDUGA1UECgwuRUJHIEJpbGnFn2ltIFRl
+a25vbG9qaWxlcmkgdmUgSGl6bWV0bGVyaSBBLsWeLjELMAkGA1UEBhMCVFIwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDuoIRh0DpqZhAy2DE4f6en5f2h
+4fuXd7hxlugTlkaDT7byX3JWbhNgpQGR4lvFzVcfd2NR/y8927k/qqk153nQ9dAk
+tiHq6yOU/im/+4mRDGSaBUorzAzu8T2bgmmkTPiab+ci2hC6X5L8GCcKqKpE+i4s
+tPtGmggDg3KriORqcsnlZR9uKg+ds+g75AxuetpX/dfreYteIAbTdgtsApWjluTL
+dlHRKJ2hGvxEok3MenaoDT2/F08iiFD9rrbskFBKW5+VQarKD7JK/oCZTqNGFav4
+c0JqwmZ2sQomFd2TkuzbqV9UIlKRcF0T6kjsbgNs2d1s/OsNA/+mgxKb8amTD8Um
+TDGyY5lhcucqZJnSuOl14nypqZoaqsNW2xCaPINStnuWt6yHd6i58mcLlEOzrz5z
++kI2sSXFCjEmN1ZnuqMLfdb3ic1nobc6HmZP9qBVFCVMLDMNpkGMvQQxahByCp0O
+Lna9XvNRiYuoP1Vzv9s6xiQFlpJIqkuNKgPlV5EQ9GooFW5Hd4RcUXSfGenmHmMW
+OeMRFeNYGkS9y8RsZteEBt8w9DeiQyJ50hBs37vmExH8nYQKE3vwO9D8owrXieqW
+fo1IhR5kX9tUoqzVegJ5a9KK8GfaZXINFHDk6Y54jzJ0fFfy1tb0Nokb+Clsi7n2
+l9GkLqq+CxnCRelwXQIDAJ3Zo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB
+/wQEAwIBBjAdBgNVHQ4EFgQU587GT/wWZ5b6SqMHwQSny2re2kcwHwYDVR0jBBgw
+FoAU587GT/wWZ5b6SqMHwQSny2re2kcwDQYJKoZIhvcNAQEFBQADggIBAJuYml2+
+8ygjdsZs93/mQJ7ANtyVDR2tFcU22NU57/IeIl6zgrRdu0waypIN30ckHrMk2pGI
+6YNw3ZPX6bqz3xZaPt7gyPvT/Wwp+BVGoGgmzJNSroIBk5DKd8pNSe/iWtkqvTDO
+TLKBtjDOWU/aWR1qeqRFsIImgYZ29fUQALjuswnoT4cCB64kXPBfrAowzIpAoHME
+wfuJJPaaHFy3PApnNgUIMbOv2AFoKuB4j3TeuFGkjGwgPaL7s9QJ/XvCgKqTbCmY
+Iai7FvOpEl90tYeY8pUm3zTvilORiF0alKM/fCL414i6poyWqD1SNGKfAB5UVUJn
+xk1Gj7sURT0KlhaOEKGXmdXTMIXM3rRyt7yKPBgpaP3ccQfuJDlq+u2lrDgv+R4Q
+DgZxGhBM/nV+/x5XOULK1+EVoVZVWRvRo68R2E7DpSvvkL/A7IITW43WciyTTo9q
+Kd+FPNMN4KIYEsxVL0e3p5sC/kH2iExt2qkBR4NkJ2IQgtYSe14DHzSpyZH+r11t
+hie3I6p1GMog57AP14kOpmciY/SDQSsGS7tY1dHXt7kQY9iJSrSq3RZj9W6+YKH4
+7ejWkE8axsWgKdOnIaj1Wjz3x0miIZpKlVIglnKaZsv30oZDfCK+lvm9AahH3eU7
+QPl1K5srRmSGjR70j/sHd9DqSaIcjVIUpgqT
+-----END CERTIFICATE-----
+
+# Issuer: O=certSIGN OU=certSIGN ROOT CA
+# Subject: O=certSIGN OU=certSIGN ROOT CA
+# Label: "certSIGN ROOT CA"
+# Serial: 35210227249154
+# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17
+# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b
+# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb
+-----BEGIN CERTIFICATE-----
+MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
+AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
+QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
+MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
+0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
+UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
+RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
+OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
+JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
+AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
+BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
+LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
+MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
+44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
+Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
+i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
+9u6wWk5JRFRYX0KD
+-----END CERTIFICATE-----
+
+# Issuer: CN=CNNIC ROOT O=CNNIC
+# Subject: CN=CNNIC ROOT O=CNNIC
+# Label: "CNNIC ROOT"
+# Serial: 1228079105
+# MD5 Fingerprint: 21:bc:82:ab:49:c4:13:3b:4b:b2:2b:5c:6b:90:9c:19
+# SHA1 Fingerprint: 8b:af:4c:9b:1d:f0:2a:92:f7:da:12:8e:b9:1b:ac:f4:98:60:4b:6f
+# SHA256 Fingerprint: e2:83:93:77:3d:a8:45:a6:79:f2:08:0c:c7:fb:44:a3:b7:a1:c3:79:2c:b7:eb:77:29:fd:cb:6a:8d:99:ae:a7
+-----BEGIN CERTIFICATE-----
+MIIDVTCCAj2gAwIBAgIESTMAATANBgkqhkiG9w0BAQUFADAyMQswCQYDVQQGEwJD
+TjEOMAwGA1UEChMFQ05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwHhcNMDcwNDE2
+MDcwOTE0WhcNMjcwNDE2MDcwOTE0WjAyMQswCQYDVQQGEwJDTjEOMAwGA1UEChMF
+Q05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwggEiMA0GCSqGSIb3DQEBAQUAA4IB
+DwAwggEKAoIBAQDTNfc/c3et6FtzF8LRb+1VvG7q6KR5smzDo+/hn7E7SIX1mlwh
+IhAsxYLO2uOabjfhhyzcuQxauohV3/2q2x8x6gHx3zkBwRP9SFIhxFXf2tizVHa6
+dLG3fdfA6PZZxU3Iva0fFNrfWEQlMhkqx35+jq44sDB7R3IJMfAw28Mbdim7aXZO
+V/kbZKKTVrdvmW7bCgScEeOAH8tjlBAKqeFkgjH5jCftppkA9nCTGPihNIaj3XrC
+GHn2emU1z5DrvTOTn1OrczvmmzQgLx3vqR1jGqCA2wMv+SYahtKNu6m+UjqHZ0gN
+v7Sg2Ca+I19zN38m5pIEo3/PIKe38zrKy5nLAgMBAAGjczBxMBEGCWCGSAGG+EIB
+AQQEAwIABzAfBgNVHSMEGDAWgBRl8jGtKvf33VKWCscCwQ7vptU7ETAPBgNVHRMB
+Af8EBTADAQH/MAsGA1UdDwQEAwIB/jAdBgNVHQ4EFgQUZfIxrSr3991SlgrHAsEO
+76bVOxEwDQYJKoZIhvcNAQEFBQADggEBAEs17szkrr/Dbq2flTtLP1se31cpolnK
+OOK5Gv+e5m4y3R6u6jW39ZORTtpC4cMXYFDy0VwmuYK36m3knITnA3kXr5g9lNvH
+ugDnuL8BV8F3RTIMO/G0HAiw/VGgod2aHRM2mm23xzy54cXZF/qD1T0VoDy7Hgvi
+yJA/qIYM/PmLXoXLT1tLYhFHxUV8BS9BsZ4QaRuZluBVeftOhpm4lNqGOGqTo+fL
+buXf6iFViZx9fX+Y9QCJ7uOEwFyWtcVG6kbghVW2G8kS1sHNzYDzAgE8yGnLRUhj
+2JTQ7IUOO04RZfSCjKY9ri4ilAnIXOo8gV0WKgOXFlUJ24pBgp5mmxE=
+-----END CERTIFICATE-----
+
+# Issuer: O=Japanese Government OU=ApplicationCA
+# Subject: O=Japanese Government OU=ApplicationCA
+# Label: "ApplicationCA - Japanese Government"
+# Serial: 49
+# MD5 Fingerprint: 7e:23:4e:5b:a7:a5:b4:25:e9:00:07:74:11:62:ae:d6
+# SHA1 Fingerprint: 7f:8a:b0:cf:d0:51:87:6a:66:f3:36:0f:47:c8:8d:8c:d3:35:fc:74
+# SHA256 Fingerprint: 2d:47:43:7d:e1:79:51:21:5a:12:f3:c5:8e:51:c7:29:a5:80:26:ef:1f:cc:0a:5f:b3:d9:dc:01:2f:60:0d:19
+-----BEGIN CERTIFICATE-----
+MIIDoDCCAoigAwIBAgIBMTANBgkqhkiG9w0BAQUFADBDMQswCQYDVQQGEwJKUDEc
+MBoGA1UEChMTSmFwYW5lc2UgR292ZXJubWVudDEWMBQGA1UECxMNQXBwbGljYXRp
+b25DQTAeFw0wNzEyMTIxNTAwMDBaFw0xNzEyMTIxNTAwMDBaMEMxCzAJBgNVBAYT
+AkpQMRwwGgYDVQQKExNKYXBhbmVzZSBHb3Zlcm5tZW50MRYwFAYDVQQLEw1BcHBs
+aWNhdGlvbkNBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp23gdE6H
+j6UG3mii24aZS2QNcfAKBZuOquHMLtJqO8F6tJdhjYq+xpqcBrSGUeQ3DnR4fl+K
+f5Sk10cI/VBaVuRorChzoHvpfxiSQE8tnfWuREhzNgaeZCw7NCPbXCbkcXmP1G55
+IrmTwcrNwVbtiGrXoDkhBFcsovW8R0FPXjQilbUfKW1eSvNNcr5BViCH/OlQR9cw
+FO5cjFW6WY2H/CPek9AEjP3vbb3QesmlOmpyM8ZKDQUXKi17safY1vC+9D/qDiht
+QWEjdnjDuGWk81quzMKq2edY3rZ+nYVunyoKb58DKTCXKB28t89UKU5RMfkntigm
+/qJj5kEW8DOYRwIDAQABo4GeMIGbMB0GA1UdDgQWBBRUWssmP3HMlEYNllPqa0jQ
+k/5CdTAOBgNVHQ8BAf8EBAMCAQYwWQYDVR0RBFIwUKROMEwxCzAJBgNVBAYTAkpQ
+MRgwFgYDVQQKDA/ml6XmnKzlm73mlL/lupwxIzAhBgNVBAsMGuOCouODl+ODquOC
+seODvOOCt+ODp+ODs0NBMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADlqRHZ3ODrso2dGD/mLBqj7apAxzn7s2tGJfHrrLgy9mTLnsCTWw//1sogJ
+hyzjVOGjprIIC8CFqMjSnHH2HZ9g/DgzE+Ge3Atf2hZQKXsvcJEPmbo0NI2VdMV+
+eKlmXb3KIXdCEKxmJj3ekav9FfBv7WxfEPjzFvYDio+nEhEMy/0/ecGc/WLuo89U
+DNErXxc+4z6/wCs+CZv+iKZ+tJIX/COUgb1up8WMwusRRdv4QcmWdupwX3kSa+Sj
+B1oF7ydJzyGfikwJcGapJsErEU4z0g781mzSDjJkaP+tBXhfAx2o45CsJOAPQKdL
+rosot4LKGAfmt1t06SAZf7IbiVQ=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G3"
+# Serial: 28809105769928564313984085209975885599
+# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
+# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
+# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
+MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
+eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
+cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
+BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
+MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
+BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
+hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
+5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
+JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
+DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
+huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
+HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
+AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
+zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
+kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
+AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
+SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
+spki4cErx5z481+oghLrGREt
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G2"
+# Serial: 71758320672825410020661621085256472406
+# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
+# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
+# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
+-----BEGIN CERTIFICATE-----
+MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
+IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
+BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
+MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
+d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
+YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
+dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
+BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
+papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
+DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
+KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
+XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G3"
+# Serial: 127614157056681299805556476275995414779
+# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
+# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
+# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
+rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
+BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
+Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
+LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
+MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
+ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
+gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
+YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
+b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
+9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
+zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
+OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
+HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
+2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
+oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
+t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
+KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
+m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
+MdRAGmI0Nj81Aa6sY6A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G2"
+# Serial: 80682863203381065782177908751794619243
+# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
+# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
+# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
+MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
+KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
+MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
+eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
+BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
+NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
+BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
+MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
+So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
+tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
+CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
+qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
+rD6ogRLQy7rQkgu2npaqBA+K
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Universal Root Certification Authority"
+# Serial: 85209574734084581917763752644031726877
+# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
+# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
+# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
+-----BEGIN CERTIFICATE-----
+MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
+vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
+ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
+IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
+IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
+bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
+9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
+H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
+LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
+/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
+rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
+WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
+exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
+DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
+sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
+seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
+4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
+BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
+lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
+7M2CYfE45k+XmCpajQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
+# Serial: 63143484348153506665311985501458640051
+# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
+# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
+# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
+-----BEGIN CERTIFICATE-----
+MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
+U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
+SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
+biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
+GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
+fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
+aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
+aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
+kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
+4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
+FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services)
+# Subject: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services)
+# Label: "NetLock Arany (Class Gold) Főtanúsítvány"
+# Serial: 80544274841616
+# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88
+# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91
+# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
+EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
+MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
+cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
+dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
+pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
+b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
+aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
+IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
+lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
+AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
+VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
+ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
+BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
+AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
+U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
+bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
+bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
+uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
+XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA - G2"
+# Serial: 10000012
+# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a
+# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16
+# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f
+-----BEGIN CERTIFICATE-----
+MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX
+DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291
+qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp
+uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU
+Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE
+pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp
+5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M
+UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN
+GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy
+5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv
+6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK
+eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6
+B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/
+BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov
+L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG
+SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS
+CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen
+5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897
+IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK
+gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL
++63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL
+vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm
+bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk
+N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC
+Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z
+ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig O=Disig a.s.
+# Subject: CN=CA Disig O=Disig a.s.
+# Label: "CA Disig"
+# Serial: 1
+# MD5 Fingerprint: 3f:45:96:39:e2:50:87:f7:bb:fe:98:0c:3c:20:98:e6
+# SHA1 Fingerprint: 2a:c8:d5:8b:57:ce:bf:2f:49:af:f2:fc:76:8f:51:14:62:90:7a:41
+# SHA256 Fingerprint: 92:bf:51:19:ab:ec:ca:d0:b1:33:2d:c4:e1:d0:5f:ba:75:b5:67:90:44:ee:0c:a2:6e:93:1f:74:4f:2f:33:cf
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBATANBgkqhkiG9w0BAQUFADBKMQswCQYDVQQGEwJTSzET
+MBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcgYS5zLjERMA8GA1UE
+AxMIQ0EgRGlzaWcwHhcNMDYwMzIyMDEzOTM0WhcNMTYwMzIyMDEzOTM0WjBKMQsw
+CQYDVQQGEwJTSzETMBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcg
+YS5zLjERMA8GA1UEAxMIQ0EgRGlzaWcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
+ggEKAoIBAQCS9jHBfYj9mQGp2HvycXXxMcbzdWb6UShGhJd4NLxs/LxFWYgmGErE
+Nx+hSkS943EE9UQX4j/8SFhvXJ56CbpRNyIjZkMhsDxkovhqFQ4/61HhVKndBpnX
+mjxUizkDPw/Fzsbrg3ICqB9x8y34dQjbYkzo+s7552oftms1grrijxaSfQUMbEYD
+XcDtab86wYqg6I7ZuUUohwjstMoVvoLdtUSLLa2GDGhibYVW8qwUYzrG0ZmsNHhW
+S8+2rT+MitcE5eN4TPWGqvWP+j1scaMtymfraHtuM6kMgiioTGohQBUgDCZbg8Kp
+FhXAJIJdKxatymP2dACw30PEEGBWZ2NFAgMBAAGjgf8wgfwwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUjbJJaJ1yCCW5wCf1UJNWSEZx+Y8wDgYDVR0PAQH/BAQD
+AgEGMDYGA1UdEQQvMC2BE2Nhb3BlcmF0b3JAZGlzaWcuc2uGFmh0dHA6Ly93d3cu
+ZGlzaWcuc2svY2EwZgYDVR0fBF8wXTAtoCugKYYnaHR0cDovL3d3dy5kaXNpZy5z
+ay9jYS9jcmwvY2FfZGlzaWcuY3JsMCygKqAohiZodHRwOi8vY2EuZGlzaWcuc2sv
+Y2EvY3JsL2NhX2Rpc2lnLmNybDAaBgNVHSAEEzARMA8GDSuBHpGT5goAAAABAQEw
+DQYJKoZIhvcNAQEFBQADggEBAF00dGFMrzvY/59tWDYcPQuBDRIrRhCA/ec8J9B6
+yKm2fnQwM6M6int0wHl5QpNt/7EpFIKrIYwvF/k/Ji/1WcbvgAa3mkkp7M5+cTxq
+EEHA9tOasnxakZzArFvITV734VP/Q3f8nktnbNfzg9Gg4H8l37iYC5oyOGwwoPP/
+CBUz91BKez6jPiCp3C9WgArtQVCwyfTssuMmRAAOb54GvCKWU3BlxFAKRmukLyeB
+EicTXxChds6KezfqwzlhA5WYOudsiCUI/HloDYd9Yvi0X/vF2Ey9WLw/Q1vUHgFN
+PGO+I++MzVpQuGhU+QqZMxEA4Z7CRneC9VkGjCFMhwnN5ag=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Juur-SK O=AS Sertifitseerimiskeskus
+# Subject: CN=Juur-SK O=AS Sertifitseerimiskeskus
+# Label: "Juur-SK"
+# Serial: 999181308
+# MD5 Fingerprint: aa:8e:5d:d9:f8:db:0a:58:b7:8d:26:87:6c:82:35:55
+# SHA1 Fingerprint: 40:9d:4b:d9:17:b5:5c:27:b6:9b:64:cb:98:22:44:0d:cd:09:b8:89
+# SHA256 Fingerprint: ec:c3:e9:c3:40:75:03:be:e0:91:aa:95:2f:41:34:8f:f8:8b:aa:86:3b:22:64:be:fa:c8:07:90:15:74:e9:39
+-----BEGIN CERTIFICATE-----
+MIIE5jCCA86gAwIBAgIEO45L/DANBgkqhkiG9w0BAQUFADBdMRgwFgYJKoZIhvcN
+AQkBFglwa2lAc2suZWUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKExlBUyBTZXJ0aWZp
+dHNlZXJpbWlza2Vza3VzMRAwDgYDVQQDEwdKdXVyLVNLMB4XDTAxMDgzMDE0MjMw
+MVoXDTE2MDgyNjE0MjMwMVowXTEYMBYGCSqGSIb3DQEJARYJcGtpQHNrLmVlMQsw
+CQYDVQQGEwJFRTEiMCAGA1UEChMZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1czEQ
+MA4GA1UEAxMHSnV1ci1TSzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AIFxNj4zB9bjMI0TfncyRsvPGbJgMUaXhvSYRqTCZUXP00B841oiqBB4M8yIsdOB
+SvZiF3tfTQou0M+LI+5PAk676w7KvRhj6IAcjeEcjT3g/1tf6mTll+g/mX8MCgkz
+ABpTpyHhOEvWgxutr2TC+Rx6jGZITWYfGAriPrsfB2WThbkasLnE+w0R9vXW+RvH
+LCu3GFH+4Hv2qEivbDtPL+/40UceJlfwUR0zlv/vWT3aTdEVNMfqPxZIe5EcgEMP
+PbgFPtGzlc3Yyg/CQ2fbt5PgIoIuvvVoKIO5wTtpeyDaTpxt4brNj3pssAki14sL
+2xzVWiZbDcDq5WDQn/413z8CAwEAAaOCAawwggGoMA8GA1UdEwEB/wQFMAMBAf8w
+ggEWBgNVHSAEggENMIIBCTCCAQUGCisGAQQBzh8BAQEwgfYwgdAGCCsGAQUFBwIC
+MIHDHoHAAFMAZQBlACAAcwBlAHIAdABpAGYAaQBrAGEAYQB0ACAAbwBuACAAdgDk
+AGwAagBhAHMAdABhAHQAdQBkACAAQQBTAC0AaQBzACAAUwBlAHIAdABpAGYAaQB0
+AHMAZQBlAHIAaQBtAGkAcwBrAGUAcwBrAHUAcwAgAGEAbABhAG0ALQBTAEsAIABz
+AGUAcgB0AGkAZgBpAGsAYQBhAHQAaQBkAGUAIABrAGkAbgBuAGkAdABhAG0AaQBz
+AGUAawBzMCEGCCsGAQUFBwIBFhVodHRwOi8vd3d3LnNrLmVlL2Nwcy8wKwYDVR0f
+BCQwIjAgoB6gHIYaaHR0cDovL3d3dy5zay5lZS9qdXVyL2NybC8wHQYDVR0OBBYE
+FASqekej5ImvGs8KQKcYP2/v6X2+MB8GA1UdIwQYMBaAFASqekej5ImvGs8KQKcY
+P2/v6X2+MA4GA1UdDwEB/wQEAwIB5jANBgkqhkiG9w0BAQUFAAOCAQEAe8EYlFOi
+CfP+JmeaUOTDBS8rNXiRTHyoERF5TElZrMj3hWVcRrs7EKACr81Ptcw2Kuxd/u+g
+kcm2k298gFTsxwhwDY77guwqYHhpNjbRxZyLabVAyJRld/JXIWY7zoVAtjNjGr95
+HvxcHdMdkxuLDF2FvZkwMhgJkVLpfKG6/2SSmuz+Ne6ML678IIbsSt4beDI3poHS
+na9aEhbKmVv8b20OxaAehsmR0FyYgl9jDIpaq9iVpszLita/ZEuOyoqysOkhMp6q
+qIWYNIE5ITuoOlIyPfZrN4YGWhWY3PARZv40ILcD9EEQfTmEeZZyY7aWAuVrua0Z
+TbvGRNs2yyqcjg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Label: "Hongkong Post Root CA 1"
+# Serial: 1000
+# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca
+# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58
+# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2
+-----BEGIN CERTIFICATE-----
+MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx
+FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg
+Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG
+A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr
+b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ
+jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn
+PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh
+ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9
+nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h
+q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED
+MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC
+mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3
+7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB
+oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs
+EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO
+fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi
+AmvZWg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Label: "SecureSign RootCA11"
+# Serial: 1
+# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
+# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
+# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
+-----BEGIN CERTIFICATE-----
+MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
+MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
+A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
+MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
+Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
+QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
+i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
+h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
+MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
+UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
+8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
+h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
+VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
+KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
+X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
+QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
+pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
+QSdJQO7e5iNEOdyhIta6A/I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=ACEDICOM Root O=EDICOM OU=PKI
+# Subject: CN=ACEDICOM Root O=EDICOM OU=PKI
+# Label: "ACEDICOM Root"
+# Serial: 7029493972724711941
+# MD5 Fingerprint: 42:81:a0:e2:1c:e3:55:10:de:55:89:42:65:96:22:e6
+# SHA1 Fingerprint: e0:b4:32:2e:b2:f6:a5:68:b6:54:53:84:48:18:4a:50:36:87:43:84
+# SHA256 Fingerprint: 03:95:0f:b4:9a:53:1f:3e:19:91:94:23:98:df:a9:e0:ea:32:d7:ba:1c:dd:9b:c8:5d:b5:7e:d9:40:0b:43:4a
+-----BEGIN CERTIFICATE-----
+MIIFtTCCA52gAwIBAgIIYY3HhjsBggUwDQYJKoZIhvcNAQEFBQAwRDEWMBQGA1UE
+AwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00x
+CzAJBgNVBAYTAkVTMB4XDTA4MDQxODE2MjQyMloXDTI4MDQxMzE2MjQyMlowRDEW
+MBQGA1UEAwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZF
+RElDT00xCzAJBgNVBAYTAkVTMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC
+AgEA/5KV4WgGdrQsyFhIyv2AVClVYyT/kGWbEHV7w2rbYgIB8hiGtXxaOLHkWLn7
+09gtn70yN78sFW2+tfQh0hOR2QetAQXW8713zl9CgQr5auODAKgrLlUTY4HKRxx7
+XBZXehuDYAQ6PmXDzQHe3qTWDLqO3tkE7hdWIpuPY/1NFgu3e3eM+SW10W2ZEi5P
+Grjm6gSSrj0RuVFCPYewMYWveVqc/udOXpJPQ/yrOq2lEiZmueIM15jO1FillUAK
+t0SdE3QrwqXrIhWYENiLxQSfHY9g5QYbm8+5eaA9oiM/Qj9r+hwDezCNzmzAv+Yb
+X79nuIQZ1RXve8uQNjFiybwCq0Zfm/4aaJQ0PZCOrfbkHQl/Sog4P75n/TSW9R28
+MHTLOO7VbKvU/PQAtwBbhTIWdjPp2KOZnQUAqhbm84F9b32qhm2tFXTTxKJxqvQU
+fecyuB+81fFOvW8XAjnXDpVCOscAPukmYxHqC9FK/xidstd7LzrZlvvoHpKuE1XI
+2Sf23EgbsCTBheN3nZqk8wwRHQ3ItBTutYJXCb8gWH8vIiPYcMt5bMlL8qkqyPyH
+K9caUPgn6C9D4zq92Fdx/c6mUlv53U3t5fZvie27k5x2IXXwkkwp9y+cAS7+UEae
+ZAwUswdbxcJzbPEHXEUkFDWug/FqTYl6+rPYLWbwNof1K1MCAwEAAaOBqjCBpzAP
+BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKaz4SsrSbbXc6GqlPUB53NlTKxQ
+MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUprPhKytJttdzoaqU9QHnc2VMrFAw
+RAYDVR0gBD0wOzA5BgRVHSAAMDEwLwYIKwYBBQUHAgEWI2h0dHA6Ly9hY2VkaWNv
+bS5lZGljb21ncm91cC5jb20vZG9jMA0GCSqGSIb3DQEBBQUAA4ICAQDOLAtSUWIm
+fQwng4/F9tqgaHtPkl7qpHMyEVNEskTLnewPeUKzEKbHDZ3Ltvo/Onzqv4hTGzz3
+gvoFNTPhNahXwOf9jU8/kzJPeGYDdwdY6ZXIfj7QeQCM8htRM5u8lOk6e25SLTKe
+I6RF+7YuE7CLGLHdztUdp0J/Vb77W7tH1PwkzQSulgUV1qzOMPPKC8W64iLgpq0i
+5ALudBF/TP94HTXa5gI06xgSYXcGCRZj6hitoocf8seACQl1ThCojz2GuHURwCRi
+ipZ7SkXp7FnFvmuD5uHorLUwHv4FB4D54SMNUI8FmP8sX+g7tq3PgbUhh8oIKiMn
+MCArz+2UW6yyetLHKKGKC5tNSixthT8Jcjxn4tncB7rrZXtaAWPWkFtPF2Y9fwsZ
+o5NjEFIqnxQWWOLcpfShFosOkYuByptZ+thrkQdlVV9SH686+5DdaaVbnG0OLLb6
+zqylfDJKZ0DcMDQj3dcEI2bw/FWAp/tmGYI1Z2JwOV5vx+qQQEQIHriy1tvuWacN
+GHk0vFQYXlPKNFHtRQrmjseCNj6nOGOpMCwXEGCSn1WHElkQwg9naRHMTh5+Spqt
+r0CodaxWkHS4oJyleW/c6RrIaQXpuvoDs3zk4E7Czp3otkYNbn5XOmeUwssfnHdK
+Z05phkOTOPu220+DkdRgfks+KzgHVZhepA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Label: "Microsec e-Szigno Root CA 2009"
+# Serial: 14014712776195784473
+# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1
+# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e
+# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78
+-----BEGIN CERTIFICATE-----
+MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
+ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
+CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
+OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
+FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
+Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
+dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
+kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
+cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
+fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
+N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
+xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
+Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
+SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
+mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
+ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
+tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
+2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
+HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
+# Serial: 6047274297262753887
+# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
+# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
+# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
+-----BEGIN CERTIFICATE-----
+MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
+BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
+cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
+MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
+Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
+thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
+cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
+L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
+NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
+X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
+m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
+Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
+EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
+KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
+6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
+OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
+VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
+VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
+cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
+ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
+AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
+661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
+am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
+ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
+PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
+3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
+SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
+3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
+ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
+StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
+Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
+jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
+-----END CERTIFICATE-----
+
+# Issuer: CN=Izenpe.com O=IZENPE S.A.
+# Subject: CN=Izenpe.com O=IZENPE S.A.
+# Label: "Izenpe.com"
+# Serial: 917563065490389241595536686991402621
+# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73
+# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19
+# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f
+-----BEGIN CERTIFICATE-----
+MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
+MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
+ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
+VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
+b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
+scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
+xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
+LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
+uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
+yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
+rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
+BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
+hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
+QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
+HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
+Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
+QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
+BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
+MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
+A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
+laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
+awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
+JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
+LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
+VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
+LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
+UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
+QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
+QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Label: "Chambers of Commerce Root - 2008"
+# Serial: 11806822484801597146
+# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7
+# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c
+# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0
+-----BEGIN CERTIFICATE-----
+MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz
+IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz
+MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj
+dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw
+EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp
+MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9
+28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq
+VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q
+DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR
+5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL
+ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a
+Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl
+UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s
++12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5
+Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj
+ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx
+hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV
+HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1
++HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN
+YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t
+L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy
+ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt
+IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV
+HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w
+DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW
+PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF
+5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1
+glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH
+FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2
+pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD
+xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG
+tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq
+jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De
+fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg
+OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ
+d0jQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Label: "Global Chambersign Root - 2008"
+# Serial: 14541511773111788494
+# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3
+# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c
+# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca
+-----BEGIN CERTIFICATE-----
+MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD
+aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx
+MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy
+cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG
+A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl
+BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI
+hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed
+KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7
+G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2
+zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4
+ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG
+HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2
+Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V
+yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e
+beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r
+6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh
+wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog
+zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW
+BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr
+ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp
+ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk
+cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt
+YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC
+CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow
+KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI
+hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ
+UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz
+X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x
+fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz
+a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd
+Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd
+SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O
+AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso
+M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge
+v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z
+09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
+# Subject: CN=AffirmTrust Commercial O=AffirmTrust
+# Label: "AffirmTrust Commercial"
+# Serial: 8608355977964138876
+# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
+# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
+# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
+Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
+ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
+MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
+yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
+VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
+nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
+XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
+vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
+Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
+N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
+nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Networking O=AffirmTrust
+# Subject: CN=AffirmTrust Networking O=AffirmTrust
+# Label: "AffirmTrust Networking"
+# Serial: 8957382827206547757
+# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
+# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
+# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
+YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
+kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
+QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
+6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
+yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
+QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
+tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
+QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
+Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
+olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
+x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium O=AffirmTrust
+# Subject: CN=AffirmTrust Premium O=AffirmTrust
+# Label: "AffirmTrust Premium"
+# Serial: 7893706540734352110
+# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
+# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
+# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
+dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
+A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
+cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
+qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
+JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
+s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
+HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
+70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
+V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
+qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
+5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
+C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
+OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
+FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
+KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
+Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
+8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
+MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
+0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
+u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
+u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
+YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
+GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
+RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
+KeC2uAloGRwYQw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Label: "AffirmTrust Premium ECC"
+# Serial: 8401224907861490260
+# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
+# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
+# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
+-----BEGIN CERTIFICATE-----
+MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
+VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
+cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
+BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
+VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
+0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
+ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
+A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
+aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
+flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA"
+# Serial: 279744
+# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78
+# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e
+# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e
+-----BEGIN CERTIFICATE-----
+MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
+MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
+ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
+cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
+WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
+Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
+IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
+UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
+TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
+BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
+kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
+AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
+HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
+sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
+I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
+J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
+VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
+03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certinomis - Autorité Racine O=Certinomis OU=0002 433998903
+# Subject: CN=Certinomis - Autorité Racine O=Certinomis OU=0002 433998903
+# Label: "Certinomis - Autorité Racine"
+# Serial: 1
+# MD5 Fingerprint: 7f:30:78:8c:03:e3:ca:c9:0a:e2:c9:ea:1e:aa:55:1a
+# SHA1 Fingerprint: 2e:14:da:ec:28:f0:fa:1e:8e:38:9a:4e:ab:eb:26:c0:0a:d3:83:c3
+# SHA256 Fingerprint: fc:bf:e2:88:62:06:f7:2b:27:59:3c:8b:07:02:97:e1:2d:76:9e:d1:0e:d7:93:07:05:a8:09:8e:ff:c1:4d:17
+-----BEGIN CERTIFICATE-----
+MIIFnDCCA4SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJGUjET
+MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxJjAk
+BgNVBAMMHUNlcnRpbm9taXMgLSBBdXRvcml0w6kgUmFjaW5lMB4XDTA4MDkxNzA4
+Mjg1OVoXDTI4MDkxNzA4Mjg1OVowYzELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNl
+cnRpbm9taXMxFzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMSYwJAYDVQQDDB1DZXJ0
+aW5vbWlzIC0gQXV0b3JpdMOpIFJhY2luZTCCAiIwDQYJKoZIhvcNAQEBBQADggIP
+ADCCAgoCggIBAJ2Fn4bT46/HsmtuM+Cet0I0VZ35gb5j2CN2DpdUzZlMGvE5x4jY
+F1AMnmHawE5V3udauHpOd4cN5bjr+p5eex7Ezyh0x5P1FMYiKAT5kcOrJ3NqDi5N
+8y4oH3DfVS9O7cdxbwlyLu3VMpfQ8Vh30WC8Tl7bmoT2R2FFK/ZQpn9qcSdIhDWe
+rP5pqZ56XjUl+rSnSTV3lqc2W+HN3yNw2F1MpQiD8aYkOBOo7C+ooWfHpi2GR+6K
+/OybDnT0K0kCe5B1jPyZOQE51kqJ5Z52qz6WKDgmi92NjMD2AR5vpTESOH2VwnHu
+7XSu5DaiQ3XV8QCb4uTXzEIDS3h65X27uK4uIJPT5GHfceF2Z5c/tt9qc1pkIuVC
+28+BA5PY9OMQ4HL2AHCs8MF6DwV/zzRpRbWT5BnbUhYjBYkOjUjkJW+zeL9i9Qf6
+lSTClrLooyPCXQP8w9PlfMl1I9f09bze5N/NgL+RiH2nE7Q5uiy6vdFrzPOlKO1E
+nn1So2+WLhl+HPNbxxaOu2B9d2ZHVIIAEWBsMsGoOBvrbpgT1u449fCfDu/+MYHB
+0iSVL1N6aaLwD4ZFjliCK0wi1F6g530mJ0jfJUaNSih8hp75mxpZuWW/Bd22Ql09
+5gBIgl4g9xGC3srYn+Y3RyYe63j3YcNBZFgCQfna4NH4+ej9Uji29YnfAgMBAAGj
+WzBZMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBQN
+jLZh2kS40RR9w759XkjwzspqsDAXBgNVHSAEEDAOMAwGCiqBegFWAgIAAQEwDQYJ
+KoZIhvcNAQEFBQADggIBACQ+YAZ+He86PtvqrxyaLAEL9MW12Ukx9F1BjYkMTv9s
+ov3/4gbIOZ/xWqndIlgVqIrTseYyCYIDbNc/CMf4uboAbbnW/FIyXaR/pDGUu7ZM
+OH8oMDX/nyNTt7buFHAAQCvaR6s0fl6nVjBhK4tDrP22iCj1a7Y+YEq6QpA0Z43q
+619FVDsXrIvkxmUP7tCMXWY5zjKn2BCXwH40nJ+U8/aGH88bc62UeYdocMMzpXDn
+2NU4lG9jeeu/Cg4I58UvD0KgKxRA/yHgBcUn4YQRE7rWhh1BCxMjidPJC+iKunqj
+o3M3NYB9Ergzd0A4wPpeMNLytqOx1qKVl4GbUu1pTP+A5FPbVFsDbVRfsbjvJL1v
+nxHDx2TCDyhihWZeGnuyt++uNckZM6i4J9szVb9o4XVIRFb7zdNIu0eJOqxp9YDG
+5ERQL1TEqkPFMTFYvZbF6nVsmnWxTfj3l/+WFvKXTej28xH5On2KOG4Ey+HTRRWq
+pdEdnV1j6CTmNhTih60bWfVEm/vXd3wfAXBioSAaosUaKPQhA+4u2cGA6rnZgtZb
+dsLLO7XSAPCjDuGtbkD326C00EauFddEwk01+dIL8hf2rGbVJLJP0RyZwG71fet0
+BLj5TXcJ17TPBzAJ8bgAVtkXFhYKK4bfjwEZGuW7gmP/vgt2Fl43N+bYdJeimUV5
+-----END CERTIFICATE-----
+
+# Issuer: CN=Root CA Generalitat Valenciana O=Generalitat Valenciana OU=PKIGVA
+# Subject: CN=Root CA Generalitat Valenciana O=Generalitat Valenciana OU=PKIGVA
+# Label: "Root CA Generalitat Valenciana"
+# Serial: 994436456
+# MD5 Fingerprint: 2c:8c:17:5e:b1:54:ab:93:17:b5:36:5a:db:d1:c6:f2
+# SHA1 Fingerprint: a0:73:e5:c5:bd:43:61:0d:86:4c:21:13:0a:85:58:57:cc:9c:ea:46
+# SHA256 Fingerprint: 8c:4e:df:d0:43:48:f3:22:96:9e:7e:29:a4:cd:4d:ca:00:46:55:06:1c:16:e1:b0:76:42:2e:f3:42:ad:63:0e
+-----BEGIN CERTIFICATE-----
+MIIGizCCBXOgAwIBAgIEO0XlaDANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJF
+UzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJ
+R1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwHhcN
+MDEwNzA2MTYyMjQ3WhcNMjEwNzAxMTUyMjQ3WjBoMQswCQYDVQQGEwJFUzEfMB0G
+A1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJR1ZBMScw
+JQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGKqtXETcvIorKA3Qdyu0togu8M1JAJke+
+WmmmO3I2F0zo37i7L3bhQEZ0ZQKQUgi0/6iMweDHiVYQOTPvaLRfX9ptI6GJXiKj
+SgbwJ/BXufjpTjJ3Cj9BZPPrZe52/lSqfR0grvPXdMIKX/UIKFIIzFVd0g/bmoGl
+u6GzwZTNVOAydTGRGmKy3nXiz0+J2ZGQD0EbtFpKd71ng+CT516nDOeB0/RSrFOy
+A8dEJvt55cs0YFAQexvba9dHq198aMpunUEDEO5rmXteJajCq+TA81yc477OMUxk
+Hl6AovWDfgzWyoxVjr7gvkkHD6MkQXpYHYTqWBLI4bft75PelAgxAgMBAAGjggM7
+MIIDNzAyBggrBgEFBQcBAQQmMCQwIgYIKwYBBQUHMAGGFmh0dHA6Ly9vY3NwLnBr
+aS5ndmEuZXMwEgYDVR0TAQH/BAgwBgEB/wIBAjCCAjQGA1UdIASCAiswggInMIIC
+IwYKKwYBBAG/VQIBADCCAhMwggHoBggrBgEFBQcCAjCCAdoeggHWAEEAdQB0AG8A
+cgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEAYwBpAPMAbgAgAFIA
+YQDtAHoAIABkAGUAIABsAGEAIABHAGUAbgBlAHIAYQBsAGkAdABhAHQAIABWAGEA
+bABlAG4AYwBpAGEAbgBhAC4ADQAKAEwAYQAgAEQAZQBjAGwAYQByAGEAYwBpAPMA
+bgAgAGQAZQAgAFAAcgDhAGMAdABpAGMAYQBzACAAZABlACAAQwBlAHIAdABpAGYA
+aQBjAGEAYwBpAPMAbgAgAHEAdQBlACAAcgBpAGcAZQAgAGUAbAAgAGYAdQBuAGMA
+aQBvAG4AYQBtAGkAZQBuAHQAbwAgAGQAZQAgAGwAYQAgAHAAcgBlAHMAZQBuAHQA
+ZQAgAEEAdQB0AG8AcgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEA
+YwBpAPMAbgAgAHMAZQAgAGUAbgBjAHUAZQBuAHQAcgBhACAAZQBuACAAbABhACAA
+ZABpAHIAZQBjAGMAaQDzAG4AIAB3AGUAYgAgAGgAdAB0AHAAOgAvAC8AdwB3AHcA
+LgBwAGsAaQAuAGcAdgBhAC4AZQBzAC8AYwBwAHMwJQYIKwYBBQUHAgEWGWh0dHA6
+Ly93d3cucGtpLmd2YS5lcy9jcHMwHQYDVR0OBBYEFHs100DSHHgZZu90ECjcPk+y
+eAT8MIGVBgNVHSMEgY0wgYqAFHs100DSHHgZZu90ECjcPk+yeAT8oWykajBoMQsw
+CQYDVQQGEwJFUzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0G
+A1UECxMGUEtJR1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVu
+Y2lhbmGCBDtF5WgwDQYJKoZIhvcNAQEFBQADggEBACRhTvW1yEICKrNcda3Fbcrn
+lD+laJWIwVTAEGmiEi8YPyVQqHxK6sYJ2fR1xkDar1CdPaUWu20xxsdzCkj+IHLt
+b8zog2EWRpABlUt9jppSCS/2bxzkoXHPjCpaF3ODR00PNvsETUlR4hTJZGH71BTg
+9J63NI8KJr2XXPR5OkowGcytT6CYirQxlyric21+eLj4iIlPsSKRZEv1UN4D2+XF
+ducTZnV+ZfsBn5OHiJ35Rld8TWCvmHMTI6QgkYH60GFmuH3Rr9ZvHmw96RH9qfmC
+IoaZM3Fa6hlXPZHNqcCjbgcTpsnt+GijnsNacgmHKNHEc8RzGF9QdRYxn7fofMM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=A-Trust-nQual-03 O=A-Trust Ges. f. Sicherheitssysteme im elektr. Datenverkehr GmbH OU=A-Trust-nQual-03
+# Subject: CN=A-Trust-nQual-03 O=A-Trust Ges. f. Sicherheitssysteme im elektr. Datenverkehr GmbH OU=A-Trust-nQual-03
+# Label: "A-Trust-nQual-03"
+# Serial: 93214
+# MD5 Fingerprint: 49:63:ae:27:f4:d5:95:3d:d8:db:24:86:b8:9c:07:53
+# SHA1 Fingerprint: d3:c0:63:f2:19:ed:07:3e:34:ad:5d:75:0b:32:76:29:ff:d5:9a:f2
+# SHA256 Fingerprint: 79:3c:bf:45:59:b9:fd:e3:8a:b2:2d:f1:68:69:f6:98:81:ae:14:c4:b0:13:9a:c7:88:a7:8a:1a:fc:ca:02:fb
+-----BEGIN CERTIFICATE-----
+MIIDzzCCAregAwIBAgIDAWweMA0GCSqGSIb3DQEBBQUAMIGNMQswCQYDVQQGEwJB
+VDFIMEYGA1UECgw/QS1UcnVzdCBHZXMuIGYuIFNpY2hlcmhlaXRzc3lzdGVtZSBp
+bSBlbGVrdHIuIERhdGVudmVya2VociBHbWJIMRkwFwYDVQQLDBBBLVRydXN0LW5R
+dWFsLTAzMRkwFwYDVQQDDBBBLVRydXN0LW5RdWFsLTAzMB4XDTA1MDgxNzIyMDAw
+MFoXDTE1MDgxNzIyMDAwMFowgY0xCzAJBgNVBAYTAkFUMUgwRgYDVQQKDD9BLVRy
+dXN0IEdlcy4gZi4gU2ljaGVyaGVpdHNzeXN0ZW1lIGltIGVsZWt0ci4gRGF0ZW52
+ZXJrZWhyIEdtYkgxGTAXBgNVBAsMEEEtVHJ1c3QtblF1YWwtMDMxGTAXBgNVBAMM
+EEEtVHJ1c3QtblF1YWwtMDMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCtPWFuA/OQO8BBC4SAzewqo51ru27CQoT3URThoKgtUaNR8t4j8DRE/5TrzAUj
+lUC5B3ilJfYKvUWG6Nm9wASOhURh73+nyfrBJcyFLGM/BWBzSQXgYHiVEEvc+RFZ
+znF/QJuKqiTfC0Li21a8StKlDJu3Qz7dg9MmEALP6iPESU7l0+m0iKsMrmKS1GWH
+2WrX9IWf5DMiJaXlyDO6w8dB3F/GaswADm0yqLaHNgBid5seHzTLkDx4iHQF63n1
+k3Flyp3HaxgtPVxO59X4PzF9j4fsCiIvI+n+u33J4PTs63zEsMMtYrWacdaxaujs
+2e3Vcuy+VwHOBVWf3tFgiBCzAgMBAAGjNjA0MA8GA1UdEwEB/wQFMAMBAf8wEQYD
+VR0OBAoECERqlWdVeRFPMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
+AQEAVdRU0VlIXLOThaq/Yy/kgM40ozRiPvbY7meIMQQDbwvUB/tOdQ/TLtPAF8fG
+KOwGDREkDg6lXb+MshOWcdzUzg4NCmgybLlBMRmrsQd7TZjTXLDR8KdCoLXEjq/+
+8T/0709GAHbrAvv5ndJAlseIOrifEXnzgGWovR/TeIGgUUw3tKZdJXDRZslo+S4R
+FGjxVJgIrCaSD96JntT6s3kr0qN51OyLrIdTaEJMUVF0HhsnLuP1Hyl0Te2v9+GS
+mYHovjrHF1D2t8b8m7CKa9aIA5GPBnc6hQLdmNVDeD/GMBWsm2vLV7eJUYs66MmE
+DNuxUCAKGkq6ahq97BvIxYSazQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Root Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79
+# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48
+# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
+MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
+V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
+WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
+LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
+AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
+K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
+RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
+rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
+3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
+hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
+MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
+XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
+lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
+aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
+YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Label: "Security Communication RootCA2"
+# Serial: 0
+# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43
+# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74
+# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
+DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
+dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
+YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
+OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
+zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
+VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
+hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
+ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
+awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
+OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
+DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
+coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
+okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
+t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
+1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
+SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2011"
+# Serial: 0
+# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9
+# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d
+# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71
+-----BEGIN CERTIFICATE-----
+MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix
+RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p
+YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw
+NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK
+EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl
+cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz
+dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ
+fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns
+bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD
+75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP
+FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV
+HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp
+5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu
+b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA
+A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p
+6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8
+TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7
+dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys
+Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI
+l7WdmplNsDz4SgCbZN2fOUvRJ9e4
+-----END CERTIFICATE-----
+
+# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Label: "Actalis Authentication Root CA"
+# Serial: 6271844772424770508
+# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6
+# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac
+# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66
+-----BEGIN CERTIFICATE-----
+MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
+BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
+MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
+IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
+SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
+ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
+UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
+4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
+KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
+gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
+rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
+51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
+be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
+KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
+v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
+fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
+jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
+ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
+ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
+e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
+jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
+WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
+SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
+pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
+X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
+fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
+K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
+ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
+LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
+LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Trustis Limited OU=Trustis FPS Root CA
+# Subject: O=Trustis Limited OU=Trustis FPS Root CA
+# Label: "Trustis FPS Root CA"
+# Serial: 36053640375399034304724988975563710553
+# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d
+# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04
+# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d
+-----BEGIN CERTIFICATE-----
+MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF
+MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL
+ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx
+MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc
+MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+
+AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH
+iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj
+vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA
+0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB
+OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/
+BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E
+FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01
+GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW
+zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4
+1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE
+f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F
+jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN
+ZetX2fNXlrtIzYE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Label: "StartCom Certification Authority"
+# Serial: 45
+# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16
+# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0
+# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11
+-----BEGIN CERTIFICATE-----
+MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
+Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9
+MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
+U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
+cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
+pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
+OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
+Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
+Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
+HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
+Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
+Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
+Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
+26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
+AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
+VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul
+F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC
+ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w
+ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk
+aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0
+YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg
+c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93
+d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG
+CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1
+dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF
+wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS
+Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst
+0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc
+pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl
+CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF
+P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK
+1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm
+KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE
+JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ
+8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm
+fyWl8kgAwKQB2j8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd.
+# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd.
+# Label: "StartCom Certification Authority G2"
+# Serial: 59
+# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64
+# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17
+# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95
+-----BEGIN CERTIFICATE-----
+MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1
+OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG
+A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ
+JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD
+vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo
+D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/
+Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW
+RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK
+HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN
+nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM
+0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i
+UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9
+Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg
+TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
+AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL
+BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K
+2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX
+UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl
+6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK
+9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ
+HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI
+wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY
+XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l
+IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo
+hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr
+so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 2 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29
+# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99
+# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
+6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
+L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
+1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
+MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
+QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
+arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
+Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
+FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
+P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
+9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
+uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
+9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
+A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
+OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
+KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
+DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
+H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
+I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
+5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
+3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
+Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 3 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec
+# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57
+# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
+ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
+N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
+tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
+0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
+/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
+KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
+zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
+O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
+34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
+K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
+Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
+QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
+cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
+IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
+HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
+O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
+033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
+dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
+kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
+3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
+u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
+4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 3"
+# Serial: 1
+# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef
+# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1
+# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
+8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
+RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
+hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
+ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
+EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
+A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
+WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
+1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
+6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
+91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
+e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
+TpPDpFQUWw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Label: "EE Certification Centre Root CA"
+# Serial: 112324828676200291871926431888494945866
+# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f
+# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7
+# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76
+-----BEGIN CERTIFICATE-----
+MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1
+MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1
+czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG
+CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy
+MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl
+ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS
+b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB
+AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy
+euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO
+bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw
+WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d
+MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE
+1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD
+VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/
+zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB
+BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF
+BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV
+v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG
+E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u
+uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW
+iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v
+GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Aralık 2007
+# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Aralık 2007
+# Label: "TURKTRUST Certificate Services Provider Root 2007"
+# Serial: 1
+# MD5 Fingerprint: 2b:70:20:56:86:82:a0:18:c8:07:53:12:28:70:21:72
+# SHA1 Fingerprint: f1:7f:6f:b6:31:dc:99:e3:a3:c8:7f:fe:1c:f1:81:10:88:d9:60:33
+# SHA256 Fingerprint: 97:8c:d9:66:f2:fa:a0:7b:a7:aa:95:00:d9:c0:2e:9d:77:f2:cd:ad:a6:ad:6b:a7:4a:f4:b9:1c:66:59:3c:50
+-----BEGIN CERTIFICATE-----
+MIIEPTCCAyWgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvzE/MD0GA1UEAww2VMOc
+UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
+c8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV4wXAYDVQQKDFVUw5xS
+S1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kg
+SGl6bWV0bGVyaSBBLsWeLiAoYykgQXJhbMSxayAyMDA3MB4XDTA3MTIyNTE4Mzcx
+OVoXDTE3MTIyMjE4MzcxOVowgb8xPzA9BgNVBAMMNlTDnFJLVFJVU1QgRWxla3Ry
+b25payBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTELMAkGA1UEBhMC
+VFIxDzANBgNVBAcMBkFua2FyYTFeMFwGA1UECgxVVMOcUktUUlVTVCBCaWxnaSDE
+sGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkgQS7F
+ni4gKGMpIEFyYWzEsWsgMjAwNzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAKu3PgqMyKVYFeaK7yc9SrToJdPNM8Ig3BnuiD9NYvDdE3ePYakqtdTyuTFY
+KTsvP2qcb3N2Je40IIDu6rfwxArNK4aUyeNgsURSsloptJGXg9i3phQvKUmi8wUG
++7RP2qFsmmaf8EMJyupyj+sA1zU511YXRxcw9L6/P8JorzZAwan0qafoEGsIiveG
+HtyaKhUG9qPw9ODHFNRRf8+0222vR5YXm3dx2KdxnSQM9pQ/hTEST7ruToK4uT6P
+IzdezKKqdfcYbwnTrqdUKDT74eA7YH2gvnmJhsifLfkKS8RQouf9eRbHegsYz85M
+733WB2+Y8a+xwXrXgTW4qhe04MsCAwEAAaNCMEAwHQYDVR0OBBYEFCnFkKslrxHk
+Yb+j/4hhkeYO/pyBMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G
+CSqGSIb3DQEBBQUAA4IBAQAQDdr4Ouwo0RSVgrESLFF6QSU2TJ/sPx+EnWVUXKgW
+AkD6bho3hO9ynYYKVZ1WKKxmLNA6VpM0ByWtCLCPyA8JWcqdmBzlVPi5RX9ql2+I
+aE1KBiY3iAIOtsbWcpnOa3faYjGkVh+uX4132l32iPwa2Z61gfAyuOOI0JzzaqC5
+mxRZNTZPz/OOXl0XrRWV2N2y1RVuAE6zS89mlOTgzbUF2mNXi+WzqtvALhyQRNsa
+XRik7r4EW5nVcV9VZWRi1aKbBFmGyGJ353yCRWo9F7/snXUMrqNvWtMvmDb08PUZ
+qxFdyKbjKlhqQgnDvZImZjINXQhVdP+MmNAKpoRq0Tl9
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 2009"
+# Serial: 623603
+# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f
+# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0
+# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1
+-----BEGIN CERTIFICATE-----
+MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha
+ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM
+HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03
+UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42
+tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R
+ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM
+lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp
+/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G
+A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G
+A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj
+dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy
+MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl
+cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js
+L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL
+BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni
+acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0
+o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K
+zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8
+PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y
+Johw1+qRzT65ysCQblrGXnRl11z+o+I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 EV 2009"
+# Serial: 623604
+# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6
+# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83
+# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw
+NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV
+BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn
+ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0
+3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z
+qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR
+p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8
+HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw
+ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea
+HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw
+Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh
+c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E
+RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt
+dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku
+Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp
+3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05
+nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF
+CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na
+xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
+KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Raiz del Estado Venezolano O=Sistema Nacional de Certificacion Electronica OU=Superintendencia de Servicios de Certificacion Electronica
+# Subject: CN=PSCProcert O=Sistema Nacional de Certificacion Electronica OU=Proveedor de Certificados PROCERT
+# Label: "PSCProcert"
+# Serial: 11
+# MD5 Fingerprint: e6:24:e9:12:01:ae:0c:de:8e:85:c4:ce:a3:12:dd:ec
+# SHA1 Fingerprint: 70:c1:8d:74:b4:28:81:0a:e4:fd:a5:75:d7:01:9f:99:b0:3d:50:74
+# SHA256 Fingerprint: 3c:fc:3c:14:d1:f6:84:ff:17:e3:8c:43:ca:44:0c:00:b9:67:ec:93:3e:8b:fe:06:4c:a1:d7:2c:90:f2:ad:b0
+-----BEGIN CERTIFICATE-----
+MIIJhjCCB26gAwIBAgIBCzANBgkqhkiG9w0BAQsFADCCAR4xPjA8BgNVBAMTNUF1
+dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIFJhaXogZGVsIEVzdGFkbyBWZW5lem9s
+YW5vMQswCQYDVQQGEwJWRTEQMA4GA1UEBxMHQ2FyYWNhczEZMBcGA1UECBMQRGlz
+dHJpdG8gQ2FwaXRhbDE2MDQGA1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0
+aWZpY2FjaW9uIEVsZWN0cm9uaWNhMUMwQQYDVQQLEzpTdXBlcmludGVuZGVuY2lh
+IGRlIFNlcnZpY2lvcyBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9uaWNhMSUwIwYJ
+KoZIhvcNAQkBFhZhY3JhaXpAc3VzY2VydGUuZ29iLnZlMB4XDTEwMTIyODE2NTEw
+MFoXDTIwMTIyNTIzNTk1OVowgdExJjAkBgkqhkiG9w0BCQEWF2NvbnRhY3RvQHBy
+b2NlcnQubmV0LnZlMQ8wDQYDVQQHEwZDaGFjYW8xEDAOBgNVBAgTB01pcmFuZGEx
+KjAoBgNVBAsTIVByb3ZlZWRvciBkZSBDZXJ0aWZpY2Fkb3MgUFJPQ0VSVDE2MDQG
+A1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9u
+aWNhMQswCQYDVQQGEwJWRTETMBEGA1UEAxMKUFNDUHJvY2VydDCCAiIwDQYJKoZI
+hvcNAQEBBQADggIPADCCAgoCggIBANW39KOUM6FGqVVhSQ2oh3NekS1wwQYalNo9
+7BVCwfWMrmoX8Yqt/ICV6oNEolt6Vc5Pp6XVurgfoCfAUFM+jbnADrgV3NZs+J74
+BCXfgI8Qhd19L3uA3VcAZCP4bsm+lU/hdezgfl6VzbHvvnpC2Mks0+saGiKLt38G
+ieU89RLAu9MLmV+QfI4tL3czkkohRqipCKzx9hEC2ZUWno0vluYC3XXCFCpa1sl9
+JcLB/KpnheLsvtF8PPqv1W7/U0HU9TI4seJfxPmOEO8GqQKJ/+MMbpfg353bIdD0
+PghpbNjU5Db4g7ayNo+c7zo3Fn2/omnXO1ty0K+qP1xmk6wKImG20qCZyFSTXai2
+0b1dCl53lKItwIKOvMoDKjSuc/HUtQy9vmebVOvh+qBa7Dh+PsHMosdEMXXqP+UH
+0quhJZb25uSgXTcYOWEAM11G1ADEtMo88aKjPvM6/2kwLkDd9p+cJsmWN63nOaK/
+6mnbVSKVUyqUtd+tFjiBdWbjxywbk5yqjKPK2Ww8F22c3HxT4CAnQzb5EuE8XL1m
+v6JpIzi4mWCZDlZTOpx+FIywBm/xhnaQr/2v/pDGj59/i5IjnOcVdo/Vi5QTcmn7
+K2FjiO/mpF7moxdqWEfLcU8UC17IAggmosvpr2uKGcfLFFb14dq12fy/czja+eev
+bqQ34gcnAgMBAAGjggMXMIIDEzASBgNVHRMBAf8ECDAGAQH/AgEBMDcGA1UdEgQw
+MC6CD3N1c2NlcnRlLmdvYi52ZaAbBgVghl4CAqASDBBSSUYtRy0yMDAwNDAzNi0w
+MB0GA1UdDgQWBBRBDxk4qpl/Qguk1yeYVKIXTC1RVDCCAVAGA1UdIwSCAUcwggFD
+gBStuyIdxuDSAaj9dlBSk+2YwU2u06GCASakggEiMIIBHjE+MDwGA1UEAxM1QXV0
+b3JpZGFkIGRlIENlcnRpZmljYWNpb24gUmFpeiBkZWwgRXN0YWRvIFZlbmV6b2xh
+bm8xCzAJBgNVBAYTAlZFMRAwDgYDVQQHEwdDYXJhY2FzMRkwFwYDVQQIExBEaXN0
+cml0byBDYXBpdGFsMTYwNAYDVQQKEy1TaXN0ZW1hIE5hY2lvbmFsIGRlIENlcnRp
+ZmljYWNpb24gRWxlY3Ryb25pY2ExQzBBBgNVBAsTOlN1cGVyaW50ZW5kZW5jaWEg
+ZGUgU2VydmljaW9zIGRlIENlcnRpZmljYWNpb24gRWxlY3Ryb25pY2ExJTAjBgkq
+hkiG9w0BCQEWFmFjcmFpekBzdXNjZXJ0ZS5nb2IudmWCAQowDgYDVR0PAQH/BAQD
+AgEGME0GA1UdEQRGMESCDnByb2NlcnQubmV0LnZloBUGBWCGXgIBoAwMClBTQy0w
+MDAwMDKgGwYFYIZeAgKgEgwQUklGLUotMzE2MzUzNzMtNzB2BgNVHR8EbzBtMEag
+RKBChkBodHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9sY3IvQ0VSVElGSUNBRE8t
+UkFJWi1TSEEzODRDUkxERVIuY3JsMCOgIaAfhh1sZGFwOi8vYWNyYWl6LnN1c2Nl
+cnRlLmdvYi52ZTA3BggrBgEFBQcBAQQrMCkwJwYIKwYBBQUHMAGGG2h0dHA6Ly9v
+Y3NwLnN1c2NlcnRlLmdvYi52ZTBBBgNVHSAEOjA4MDYGBmCGXgMBAjAsMCoGCCsG
+AQUFBwIBFh5odHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9kcGMwDQYJKoZIhvcN
+AQELBQADggIBACtZ6yKZu4SqT96QxtGGcSOeSwORR3C7wJJg7ODU523G0+1ng3dS
+1fLld6c2suNUvtm7CpsR72H0xpkzmfWvADmNg7+mvTV+LFwxNG9s2/NkAZiqlCxB
+3RWGymspThbASfzXg0gTB1GEMVKIu4YXx2sviiCtxQuPcD4quxtxj7mkoP3Yldmv
+Wb8lK5jpY5MvYB7Eqvh39YtsL+1+LrVPQA3uvFd359m21D+VJzog1eWuq2w1n8Gh
+HVnchIHuTQfiSLaeS5UtQbHh6N5+LwUeaO6/u5BlOsju6rEYNxxik6SgMexxbJHm
+pHmJWhSnFFAFTKQAVzAswbVhltw+HoSvOULP5dAssSS830DD7X9jSr3hTxJkhpXz
+sOfIt+FTvZLm8wyWuevo5pLtp4EJFAv8lXrPj9Y0TzYS3F7RNHXGRoAvlQSMx4bE
+qCaJqD8Zm4G7UaRKhqsLEQ+xrmNTbSjq3TNWOByyrYDT13K9mmyZY+gAu0F2Bbdb
+mRiKw7gSXFbPVgx96OLP7bx0R/vu0xdOIk9W/1DzLuY5poLWccret9W6aAjtmcz9
+opLLabid+Qqkpj5PkygqYWwHJgD/ll9ohri4zspV4KuxPX+Y1zMOWj3YeMLEYC/H
+YvBhkdI4sPaeVdtAgAUSM84dkpvRabP/v/GSCmE1P93+hvS84Bpxs2Km
+-----END CERTIFICATE-----
+
+# Issuer: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center
+# Subject: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center
+# Label: "China Internet Network Information Center EV Certificates Root"
+# Serial: 1218379777
+# MD5 Fingerprint: 55:5d:63:00:97:bd:6a:97:f5:67:ab:4b:fb:6e:63:15
+# SHA1 Fingerprint: 4f:99:aa:93:fb:2b:d1:37:26:a1:99:4a:ce:7f:f0:05:f2:93:5d:1e
+# SHA256 Fingerprint: 1c:01:c6:f4:db:b2:fe:fc:22:55:8b:2b:ca:32:56:3f:49:84:4a:cf:c3:2b:7b:e4:b0:ff:59:9f:9e:8c:7a:f7
+-----BEGIN CERTIFICATE-----
+MIID9zCCAt+gAwIBAgIESJ8AATANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMC
+Q04xMjAwBgNVBAoMKUNoaW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24g
+Q2VudGVyMUcwRQYDVQQDDD5DaGluYSBJbnRlcm5ldCBOZXR3b3JrIEluZm9ybWF0
+aW9uIENlbnRlciBFViBDZXJ0aWZpY2F0ZXMgUm9vdDAeFw0xMDA4MzEwNzExMjVa
+Fw0zMDA4MzEwNzExMjVaMIGKMQswCQYDVQQGEwJDTjEyMDAGA1UECgwpQ2hpbmEg
+SW50ZXJuZXQgTmV0d29yayBJbmZvcm1hdGlvbiBDZW50ZXIxRzBFBgNVBAMMPkNo
+aW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24gQ2VudGVyIEVWIENlcnRp
+ZmljYXRlcyBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAm35z
+7r07eKpkQ0H1UN+U8i6yjUqORlTSIRLIOTJCBumD1Z9S7eVnAztUwYyZmczpwA//
+DdmEEbK40ctb3B75aDFk4Zv6dOtouSCV98YPjUesWgbdYavi7NifFy2cyjw1l1Vx
+zUOFsUcW9SxTgHbP0wBkvUCZ3czY28Sf1hNfQYOL+Q2HklY0bBoQCxfVWhyXWIQ8
+hBouXJE0bhlffxdpxWXvayHG1VA6v2G5BY3vbzQ6sm8UY78WO5upKv23KzhmBsUs
+4qpnHkWnjQRmQvaPK++IIGmPMowUc9orhpFjIpryp9vOiYurXccUwVswah+xt54u
+gQEC7c+WXmPbqOY4twIDAQABo2MwYTAfBgNVHSMEGDAWgBR8cks5x8DbYqVPm6oY
+NJKiyoOCWTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4E
+FgQUfHJLOcfA22KlT5uqGDSSosqDglkwDQYJKoZIhvcNAQEFBQADggEBACrDx0M3
+j92tpLIM7twUbY8opJhJywyA6vPtI2Z1fcXTIWd50XPFtQO3WKwMVC/GVhMPMdoG
+52U7HW8228gd+f2ABsqjPWYWqJ1MFn3AlUa1UeTiH9fqBk1jjZaM7+czV0I664zB
+echNdn3e9rG3geCg+aF4RhcaVpjwTj2rHO3sOdwHSPdj/gauwqRcalsyiMXHM4Ws
+ZkJHwlgkmeHlPuV1LI5D1l08eB6olYIpUNHRFrrvwb562bTYzB5MRuF3sTGrvSrI
+zo9uoV1/A3U05K2JRVRevq4opbs/eHnrc7MKDf2+yfdWrPa37S+bISnHOLaVxATy
+wy39FCqQmbkHzJ8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services
+# Subject: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services
+# Label: "Swisscom Root CA 2"
+# Serial: 40698052477090394928831521023204026294
+# MD5 Fingerprint: 5b:04:69:ec:a5:83:94:63:18:a7:86:d0:e4:f2:6e:19
+# SHA1 Fingerprint: 77:47:4f:c6:30:e4:0f:4c:47:64:3f:84:ba:b8:c6:95:4a:8a:41:ec
+# SHA256 Fingerprint: f0:9b:12:2c:71:14:f4:a0:9b:d4:ea:4f:4a:99:d5:58:b4:6e:4c:25:cd:81:14:0d:29:c0:56:13:91:4c:38:41
+-----BEGIN CERTIFICATE-----
+MIIF2TCCA8GgAwIBAgIQHp4o6Ejy5e/DfEoeWhhntjANBgkqhkiG9w0BAQsFADBk
+MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0
+YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg
+Q0EgMjAeFw0xMTA2MjQwODM4MTRaFw0zMTA2MjUwNzM4MTRaMGQxCzAJBgNVBAYT
+AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp
+Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAyMIICIjAN
+BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlUJOhJ1R5tMJ6HJaI2nbeHCOFvEr
+jw0DzpPMLgAIe6szjPTpQOYXTKueuEcUMncy3SgM3hhLX3af+Dk7/E6J2HzFZ++r
+0rk0X2s682Q2zsKwzxNoysjL67XiPS4h3+os1OD5cJZM/2pYmLcX5BtS5X4HAB1f
+2uY+lQS3aYg5oUFgJWFLlTloYhyxCwWJwDaCFCE/rtuh/bxvHGCGtlOUSbkrRsVP
+ACu/obvLP+DHVxxX6NZp+MEkUp2IVd3Chy50I9AU/SpHWrumnf2U5NGKpV+GY3aF
+y6//SSj8gO1MedK75MDvAe5QQQg1I3ArqRa0jG6F6bYRzzHdUyYb3y1aSgJA/MTA
+tukxGggo5WDDH8SQjhBiYEQN7Aq+VRhxLKX0srwVYv8c474d2h5Xszx+zYIdkeNL
+6yxSNLCK/RJOlrDrcH+eOfdmQrGrrFLadkBXeyq96G4DsguAhYidDMfCd7Camlf0
+uPoTXGiTOmekl9AbmbeGMktg2M7v0Ax/lZ9vh0+Hio5fCHyqW/xavqGRn1V9TrAL
+acywlKinh/LTSlDcX3KwFnUey7QYYpqwpzmqm59m2I2mbJYV4+by+PGDYmy7Velh
+k6M99bFXi08jsJvllGov34zflVEpYKELKeRcVVi3qPyZ7iVNTA6z00yPhOgpD/0Q
+VAKFyPnlw4vP5w8CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw
+FDASBgdghXQBUwIBBgdghXQBUwIBMBIGA1UdEwEB/wQIMAYBAf8CAQcwHQYDVR0O
+BBYEFE0mICKJS9PVpAqhb97iEoHF8TwuMB8GA1UdIwQYMBaAFE0mICKJS9PVpAqh
+b97iEoHF8TwuMA0GCSqGSIb3DQEBCwUAA4ICAQAyCrKkG8t9voJXiblqf/P0wS4R
+fbgZPnm3qKhyN2abGu2sEzsOv2LwnN+ee6FTSA5BesogpxcbtnjsQJHzQq0Qw1zv
+/2BZf82Fo4s9SBwlAjxnffUy6S8w5X2lejjQ82YqZh6NM4OKb3xuqFp1mrjX2lhI
+REeoTPpMSQpKwhI3qEAMw8jh0FcNlzKVxzqfl9NX+Ave5XLzo9v/tdhZsnPdTSpx
+srpJ9csc1fV5yJmz/MFMdOO0vSk3FQQoHt5FRnDsr7p4DooqzgB53MBfGWcsa0vv
+aGgLQ+OswWIJ76bdZWGgr4RVSJFSHMYlkSrQwSIjYVmvRRGFHQEkNI/Ps/8XciAT
+woCqISxxOQ7Qj1zB09GOInJGTB2Wrk9xseEFKZZZ9LuedT3PDTcNYtsmjGOpI99n
+Bjx8Oto0QuFmtEYE3saWmA9LSHokMnWRn6z3aOkquVVlzl1h0ydw2Df+n7mvoC5W
+t6NlUe07qxS/TFED6F+KBZvuim6c779o+sjaC+NCydAXFJy3SuCvkychVSa1ZC+N
+8f+mQAWFBVzKBxlcCxMoTFh/wqXvRdpg065lYZ1Tg3TCrvJcwhbtkj6EPnNgiLx2
+9CzP0H1907he0ZESEOnN3col49XtmS++dYFLJPlFRpTJKSFTnCZFqhMX5OfNeOI5
+wSsSnqaeG8XmDtkx2Q==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services
+# Subject: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services
+# Label: "Swisscom Root EV CA 2"
+# Serial: 322973295377129385374608406479535262296
+# MD5 Fingerprint: 7b:30:34:9f:dd:0a:4b:6b:35:ca:31:51:28:5d:ae:ec
+# SHA1 Fingerprint: e7:a1:90:29:d3:d5:52:dc:0d:0f:c6:92:d3:ea:88:0d:15:2e:1a:6b
+# SHA256 Fingerprint: d9:5f:ea:3c:a4:ee:dc:e7:4c:d7:6e:75:fc:6d:1f:f6:2c:44:1f:0f:a8:bc:77:f0:34:b1:9e:5d:b2:58:01:5d
+-----BEGIN CERTIFICATE-----
+MIIF4DCCA8igAwIBAgIRAPL6ZOJ0Y9ON/RAdBB92ylgwDQYJKoZIhvcNAQELBQAw
+ZzELMAkGA1UEBhMCY2gxETAPBgNVBAoTCFN3aXNzY29tMSUwIwYDVQQLExxEaWdp
+dGFsIENlcnRpZmljYXRlIFNlcnZpY2VzMR4wHAYDVQQDExVTd2lzc2NvbSBSb290
+IEVWIENBIDIwHhcNMTEwNjI0MDk0NTA4WhcNMzEwNjI1MDg0NTA4WjBnMQswCQYD
+VQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0YWwgQ2Vy
+dGlmaWNhdGUgU2VydmljZXMxHjAcBgNVBAMTFVN3aXNzY29tIFJvb3QgRVYgQ0Eg
+MjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMT3HS9X6lds93BdY7Bx
+UglgRCgzo3pOCvrY6myLURYaVa5UJsTMRQdBTxB5f3HSek4/OE6zAMaVylvNwSqD
+1ycfMQ4jFrclyxy0uYAyXhqdk/HoPGAsp15XGVhRXrwsVgu42O+LgrQ8uMIkqBPH
+oCE2G3pXKSinLr9xJZDzRINpUKTk4RtiGZQJo/PDvO/0vezbE53PnUgJUmfANykR
+HvvSEaeFGHR55E+FFOtSN+KxRdjMDUN/rhPSays/p8LiqG12W0OfvrSdsyaGOx9/
+5fLoZigWJdBLlzin5M8J0TbDC77aO0RYjb7xnglrPvMyxyuHxuxenPaHZa0zKcQv
+idm5y8kDnftslFGXEBuGCxobP/YCfnvUxVFkKJ3106yDgYjTdLRZncHrYTNaRdHL
+OdAGalNgHa/2+2m8atwBz735j9m9W8E6X47aD0upm50qKGsaCnw8qyIL5XctcfaC
+NYGu+HuB5ur+rPQam3Rc6I8k9l2dRsQs0h4rIWqDJ2dVSqTjyDKXZpBy2uPUZC5f
+46Fq9mDU5zXNysRojddxyNMkM3OxbPlq4SjbX8Y96L5V5jcb7STZDxmPX2MYWFCB
+UWVv8p9+agTnNCRxunZLWB4ZvRVgRaoMEkABnRDixzgHcgplwLa7JSnaFp6LNYth
+7eVxV4O1PHGf40+/fh6Bn0GXAgMBAAGjgYYwgYMwDgYDVR0PAQH/BAQDAgGGMB0G
+A1UdIQQWMBQwEgYHYIV0AVMCAgYHYIV0AVMCAjASBgNVHRMBAf8ECDAGAQH/AgED
+MB0GA1UdDgQWBBRF2aWBbj2ITY1x0kbBbkUe88SAnTAfBgNVHSMEGDAWgBRF2aWB
+bj2ITY1x0kbBbkUe88SAnTANBgkqhkiG9w0BAQsFAAOCAgEAlDpzBp9SSzBc1P6x
+XCX5145v9Ydkn+0UjrgEjihLj6p7jjm02Vj2e6E1CqGdivdj5eu9OYLU43otb98T
+PLr+flaYC/NUn81ETm484T4VvwYmneTwkLbUwp4wLh/vx3rEUMfqe9pQy3omywC0
+Wqu1kx+AiYQElY2NfwmTv9SoqORjbdlk5LgpWgi/UOGED1V7XwgiG/W9mR4U9s70
+WBCCswo9GcG/W6uqmdjyMb3lOGbcWAXH7WMaLgqXfIeTK7KK4/HsGOV1timH59yL
+Gn602MnTihdsfSlEvoqq9X46Lmgxk7lq2prg2+kupYTNHAq4Sgj5nPFhJpiTt3tm
+7JFe3VE/23MPrQRYCd0EApUKPtN236YQHoA96M2kZNEzx5LH4k5E4wnJTsJdhw4S
+nr8PyQUQ3nqjsTzyP6WqJ3mtMX0f/fwZacXduT98zca0wjAefm6S139hdlqP65VN
+vBFuIXxZN5nQBrz5Bm0yFqXZaajh3DyAHmBR3NdUIR7KYndP+tiPsys6DXhyyWhB
+WkdKwqPrGtcKqzwyVcgKEZzfdNbwQBUdyLmPtTbFr/giuMod89a2GQ+fYWVq6nTI
+fI/DT11lgh/ZDYnadXL77/FHZxOzyNEZiCcmmpl5fx7kLD977vHeTYuWl8PVP3wb
+I+2ksx0WckNLIOFZfsLorSa/ovc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig Root R1 O=Disig a.s.
+# Subject: CN=CA Disig Root R1 O=Disig a.s.
+# Label: "CA Disig Root R1"
+# Serial: 14052245610670616104
+# MD5 Fingerprint: be:ec:11:93:9a:f5:69:21:bc:d7:c1:c0:67:89:cc:2a
+# SHA1 Fingerprint: 8e:1c:74:f8:a6:20:b9:e5:8a:f4:61:fa:ec:2b:47:56:51:1a:52:c6
+# SHA256 Fingerprint: f9:6f:23:f4:c3:e7:9c:07:7a:46:98:8d:5a:f5:90:06:76:a0:f0:39:cb:64:5d:d1:75:49:b2:16:c8:24:40:ce
+-----BEGIN CERTIFICATE-----
+MIIFaTCCA1GgAwIBAgIJAMMDmu5QkG4oMA0GCSqGSIb3DQEBBQUAMFIxCzAJBgNV
+BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
+MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIxMB4XDTEyMDcxOTA5MDY1NloXDTQy
+MDcxOTA5MDY1NlowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
+EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjEw
+ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCqw3j33Jijp1pedxiy3QRk
+D2P9m5YJgNXoqqXinCaUOuiZc4yd39ffg/N4T0Dhf9Kn0uXKE5Pn7cZ3Xza1lK/o
+OI7bm+V8u8yN63Vz4STN5qctGS7Y1oprFOsIYgrY3LMATcMjfF9DCCMyEtztDK3A
+fQ+lekLZWnDZv6fXARz2m6uOt0qGeKAeVjGu74IKgEH3G8muqzIm1Cxr7X1r5OJe
+IgpFy4QxTaz+29FHuvlglzmxZcfe+5nkCiKxLU3lSCZpq+Kq8/v8kiky6bM+TR8n
+oc2OuRf7JT7JbvN32g0S9l3HuzYQ1VTW8+DiR0jm3hTaYVKvJrT1cU/J19IG32PK
+/yHoWQbgCNWEFVP3Q+V8xaCJmGtzxmjOZd69fwX3se72V6FglcXM6pM6vpmumwKj
+rckWtc7dXpl4fho5frLABaTAgqWjR56M6ly2vGfb5ipN0gTco65F97yLnByn1tUD
+3AjLLhbKXEAz6GfDLuemROoRRRw1ZS0eRWEkG4IupZ0zXWX4Qfkuy5Q/H6MMMSRE
+7cderVC6xkGbrPAXZcD4XW9boAo0PO7X6oifmPmvTiT6l7Jkdtqr9O3jw2Dv1fkC
+yC2fg69naQanMVXVz0tv/wQFx1isXxYb5dKj6zHbHzMVTdDypVP1y+E9Tmgt2BLd
+qvLmTZtJ5cUoobqwWsagtQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
+DwEB/wQEAwIBBjAdBgNVHQ4EFgQUiQq0OJMa5qvum5EY+fU8PjXQ04IwDQYJKoZI
+hvcNAQEFBQADggIBADKL9p1Kyb4U5YysOMo6CdQbzoaz3evUuii+Eq5FLAR0rBNR
+xVgYZk2C2tXck8An4b58n1KeElb21Zyp9HWc+jcSjxyT7Ff+Bw+r1RL3D65hXlaA
+SfX8MPWbTx9BLxyE04nH4toCdu0Jz2zBuByDHBb6lM19oMgY0sidbvW9adRtPTXo
+HqJPYNcHKfyyo6SdbhWSVhlMCrDpfNIZTUJG7L399ldb3Zh+pE3McgODWF3vkzpB
+emOqfDqo9ayk0d2iLbYq/J8BjuIQscTK5GfbVSUZP/3oNn6z4eGBrxEWi1CXYBmC
+AMBrTXO40RMHPuq2MU/wQppt4hF05ZSsjYSVPCGvxdpHyN85YmLLW1AL14FABZyb
+7bq2ix4Eb5YgOe2kfSnbSM6C3NQCjR0EMVrHS/BsYVLXtFHCgWzN4funodKSds+x
+DzdYpPJScWc/DIh4gInByLUfkmO+p3qKViwaqKactV2zY9ATIKHrkWzQjX2v3wvk
+F7mGnjixlAxYjOBVqjtjbZqJYLhkKpLGN/R+Q0O3c+gB53+XD9fyexn9GtePyfqF
+a3qdnom2piiZk4hA9z7NUaPK6u95RyG1/jLix8NRb76AdPCkwzryT+lf3xkK8jsT
+Q6wxpLPn6/wY1gGp8yqPNg7rtLG8t0zJa7+h89n07eLw4+1knj0vllJPgFOL
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig Root R2 O=Disig a.s.
+# Subject: CN=CA Disig Root R2 O=Disig a.s.
+# Label: "CA Disig Root R2"
+# Serial: 10572350602393338211
+# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03
+# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71
+# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03
+-----BEGIN CERTIFICATE-----
+MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV
+BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
+MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy
+MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
+EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw
+ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe
+NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH
+PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I
+x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe
+QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR
+yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO
+QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912
+H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ
+QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD
+i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs
+nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1
+rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
+DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI
+hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM
+tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf
+GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb
+lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka
++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal
+TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i
+nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3
+gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr
+G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os
+zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x
+L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL
+-----END CERTIFICATE-----
+
+# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Label: "ACCVRAIZ1"
+# Serial: 6828503384748696800
+# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02
+# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17
+# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13
+-----BEGIN CERTIFICATE-----
+MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
+AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
+CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
+BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
+VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
+qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
+HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
+G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
+lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
+IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
+0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
+k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
+4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
+m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
+cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
+uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
+KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
+ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
+AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
+VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
+VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
+CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
+cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
+QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
+7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
+cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
+QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
+czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
+aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
+aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
+DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
+BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
+D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
+JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
+AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
+vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
+tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
+7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
+I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
+h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
+d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
+pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Global Root CA"
+# Serial: 3262
+# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96
+# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65
+# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx
+EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT
+VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5
+NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT
+B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF
+10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz
+0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh
+MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH
+zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc
+46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2
+yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi
+laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP
+oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA
+BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE
+qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm
+4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL
+1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn
+LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF
+H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo
+RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+
+nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh
+15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW
+6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW
+nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j
+wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz
+aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy
+KwbQBM0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Label: "TeliaSonera Root CA v1"
+# Serial: 199041966741090107964904287217786801558
+# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c
+# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37
+# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89
+-----BEGIN CERTIFICATE-----
+MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw
+NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv
+b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD
+VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F
+VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1
+7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X
+Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+
+/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs
+81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm
+dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe
+Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu
+sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4
+pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs
+slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ
+arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD
+VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG
+9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl
+dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx
+0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj
+TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed
+Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7
+Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI
+OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7
+vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW
+t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn
+HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
+SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi
+# Subject: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi
+# Label: "E-Tugra Certification Authority"
+# Serial: 7667447206703254355
+# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49
+# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39
+# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c
+-----BEGIN CERTIFICATE-----
+MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV
+BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC
+aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV
+BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1
+Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz
+MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+
+BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp
+em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN
+ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY
+B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH
+D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF
+Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo
+q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D
+k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH
+fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut
+dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM
+ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8
+zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn
+rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX
+U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6
+Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5
+XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF
+Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR
+HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY
+GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c
+77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3
++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK
+vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6
+FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl
+yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P
+AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD
+y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d
+NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 2"
+# Serial: 1
+# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a
+# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9
+# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd
+AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC
+FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi
+1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq
+jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ
+wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/
+WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy
+NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC
+uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw
+IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6
+g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN
+9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP
+BSeOE6Fuwg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot 2011 O=Atos
+# Subject: CN=Atos TrustedRoot 2011 O=Atos
+# Label: "Atos TrustedRoot 2011"
+# Serial: 6643877497813316402
+# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56
+# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21
+# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE
+AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG
+EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM
+FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC
+REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp
+Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM
+VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+
+SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ
+4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L
+cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi
+eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG
+A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3
+DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j
+vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP
+DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc
+maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D
+lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv
+KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 1 G3"
+# Serial: 687049649626669250736271037606554624078720034195
+# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab
+# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67
+# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00
+MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV
+wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe
+rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341
+68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh
+4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp
+UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o
+abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc
+3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G
+KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt
+hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO
+Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt
+zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD
+ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC
+MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2
+cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN
+qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5
+YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv
+b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2
+8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k
+NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj
+ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp
+q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt
+nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2 G3"
+# Serial: 390156079458959257446133169266079962026824725800
+# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06
+# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36
+# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00
+MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf
+qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW
+n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym
+c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+
+O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1
+o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j
+IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq
+IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz
+8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh
+vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l
+7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG
+cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD
+ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66
+AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC
+roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga
+W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n
+lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE
++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV
+csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd
+dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg
+KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM
+HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4
+WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3 G3"
+# Serial: 268090761170461462463995952157327242137089239581
+# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7
+# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d
+# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00
+MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR
+/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu
+FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR
+U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c
+ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR
+FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k
+A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw
+eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl
+sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp
+VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q
+A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+
+ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD
+ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px
+KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI
+FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv
+oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg
+u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP
+0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf
+3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl
+8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+
+DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN
+PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/
+ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G2"
+# Serial: 15385348160840213938643033620894905419
+# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d
+# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f
+# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85
+-----BEGIN CERTIFICATE-----
+MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA
+n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc
+biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp
+EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA
+bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu
+YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB
+AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW
+BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI
+QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I
+0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni
+lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9
+B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv
+ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo
+IhNzbM8m9Yop5w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G3"
+# Serial: 15459312981008553731928384953135426796
+# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb
+# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89
+# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2
+-----BEGIN CERTIFICATE-----
+MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg
+RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf
+Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q
+RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD
+AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY
+JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv
+6pZjamVFkpUBtA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G2"
+# Serial: 4293743540046975378534879503202253541
+# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44
+# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4
+# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f
+-----BEGIN CERTIFICATE-----
+MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
+MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
+2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
+1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
+q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
+tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
+vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
+5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
+1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
+NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
+Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
+8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
+pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
+MrY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G3"
+# Serial: 7089244469030293291760083333884364146
+# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca
+# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e
+# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0
+-----BEGIN CERTIFICATE-----
+MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe
+Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw
+EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x
+IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG
+fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO
+Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd
+BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx
+AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/
+oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8
+sycX
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Trusted Root G4"
+# Serial: 7451500558977370777930084869016614236
+# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49
+# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4
+# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88
+-----BEGIN CERTIFICATE-----
+MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
+RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
+ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
+xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
+ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
+DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
+jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
+CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
+EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
+fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
+uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
+chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
+9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
+ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
+SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
+fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
+sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
+cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
+0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
+4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
+r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
+/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
+gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certification Authority of WoSign O=WoSign CA Limited
+# Subject: CN=Certification Authority of WoSign O=WoSign CA Limited
+# Label: "WoSign"
+# Serial: 125491772294754854453622855443212256657
+# MD5 Fingerprint: a1:f2:f9:b5:d2:c8:7a:74:b8:f3:05:f1:d7:e1:84:8d
+# SHA1 Fingerprint: b9:42:94:bf:91:ea:8f:b6:4b:e6:10:97:c7:fb:00:13:59:b6:76:cb
+# SHA256 Fingerprint: 4b:22:d5:a6:ae:c9:9f:3c:db:79:aa:5e:c0:68:38:47:9c:d5:ec:ba:71:64:f7:f2:2d:c1:d6:5f:63:d8:57:08
+-----BEGIN CERTIFICATE-----
+MIIFdjCCA16gAwIBAgIQXmjWEXGUY1BWAGjzPsnFkTANBgkqhkiG9w0BAQUFADBV
+MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxKjAoBgNV
+BAMTIUNlcnRpZmljYXRpb24gQXV0aG9yaXR5IG9mIFdvU2lnbjAeFw0wOTA4MDgw
+MTAwMDFaFw0zOTA4MDgwMTAwMDFaMFUxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFX
+b1NpZ24gQ0EgTGltaXRlZDEqMCgGA1UEAxMhQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgb2YgV29TaWduMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvcqN
+rLiRFVaXe2tcesLea9mhsMMQI/qnobLMMfo+2aYpbxY94Gv4uEBf2zmoAHqLoE1U
+fcIiePyOCbiohdfMlZdLdNiefvAA5A6JrkkoRBoQmTIPJYhTpA2zDxIIFgsDcScc
+f+Hb0v1naMQFXQoOXXDX2JegvFNBmpGN9J42Znp+VsGQX+axaCA2pIwkLCxHC1l2
+ZjC1vt7tj/id07sBMOby8w7gLJKA84X5KIq0VC6a7fd2/BVoFutKbOsuEo/Uz/4M
+x1wdC34FMr5esAkqQtXJTpCzWQ27en7N1QhatH/YHGkR+ScPewavVIMYe+HdVHpR
+aG53/Ma/UkpmRqGyZxq7o093oL5d//xWC0Nyd5DKnvnyOfUNqfTq1+ezEC8wQjch
+zDBwyYaYD8xYTYO7feUapTeNtqwylwA6Y3EkHp43xP901DfA4v6IRmAR3Qg/UDar
+uHqklWJqbrDKaiFaafPz+x1wOZXzp26mgYmhiMU7ccqjUu6Du/2gd/Tkb+dC221K
+mYo0SLwX3OSACCK28jHAPwQ+658geda4BmRkAjHXqc1S+4RFaQkAKtxVi8QGRkvA
+Sh0JWzko/amrzgD5LkhLJuYwTKVYyrREgk/nkR4zw7CT/xH8gdLKH3Ep3XZPkiWv
+HYG3Dy+MwwbMLyejSuQOmbp8HkUff6oZRZb9/D0CAwEAAaNCMEAwDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFOFmzw7R8bNLtwYgFP6H
+EtX2/vs+MA0GCSqGSIb3DQEBBQUAA4ICAQCoy3JAsnbBfnv8rWTjMnvMPLZdRtP1
+LOJwXcgu2AZ9mNELIaCJWSQBnfmvCX0KI4I01fx8cpm5o9dU9OpScA7F9dY74ToJ
+MuYhOZO9sxXqT2r09Ys/L3yNWC7F4TmgPsc9SnOeQHrAK2GpZ8nzJLmzbVUsWh2e
+JXLOC62qx1ViC777Y7NhRCOjy+EaDveaBk3e1CNOIZZbOVtXHS9dCF4Jef98l7VN
+g64N1uajeeAz0JmWAjCnPv/So0M/BVoG6kQC2nz4SNAzqfkHx5Xh9T71XXG68pWp
+dIhhWeO/yloTunK0jF02h+mmxTwTv97QRCbut+wucPrXnbes5cVAWubXbHssw1ab
+R80LzvobtCHXt2a49CUwi1wNuepnsvRtrtWhnk/Yn+knArAdBtaP4/tIEp9/EaEQ
+PkxROpaw0RPxx9gmrjrKkcRpnd8BKWRRb2jaFOwIQZeQjdCygPLPwj2/kWjFgGce
+xGATVdVhmVd8upUPYUk6ynW8yQqTP2cOEvIo4jEbwFcW3wh8GcF+Dx+FHgo2fFt+
+J7x6v+Db9NpSvd4MVHAxkUOVyLzwPt0JfjBkUO1/AaQzZ01oT74V77D2AhGiGxMl
+OtzCWfHjXEa7ZywCRuoeSKbmW9m1vFGikpbbqsY3Iqb+zCB0oy2pLmvLwIIRIbWT
+ee5Ehr7XHuQe+w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA 沃通根证书 O=WoSign CA Limited
+# Subject: CN=CA 沃通根证书 O=WoSign CA Limited
+# Label: "WoSign China"
+# Serial: 106921963437422998931660691310149453965
+# MD5 Fingerprint: 78:83:5b:52:16:76:c4:24:3b:83:78:e8:ac:da:9a:93
+# SHA1 Fingerprint: 16:32:47:8d:89:f9:21:3a:92:00:85:63:f5:a4:a7:d3:12:40:8a:d6
+# SHA256 Fingerprint: d6:f0:34:bd:94:aa:23:3f:02:97:ec:a4:24:5b:28:39:73:e4:47:aa:59:0f:31:0c:77:f4:8f:df:83:11:22:54
+-----BEGIN CERTIFICATE-----
+MIIFWDCCA0CgAwIBAgIQUHBrzdgT/BtOOzNy0hFIjTANBgkqhkiG9w0BAQsFADBG
+MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxGzAZBgNV
+BAMMEkNBIOayg+mAmuagueivgeS5pjAeFw0wOTA4MDgwMTAwMDFaFw0zOTA4MDgw
+MTAwMDFaMEYxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFXb1NpZ24gQ0EgTGltaXRl
+ZDEbMBkGA1UEAwwSQ0Eg5rKD6YCa5qC56K+B5LmmMIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA0EkhHiX8h8EqwqzbdoYGTufQdDTc7WU1/FDWiD+k8H/r
+D195L4mx/bxjWDeTmzj4t1up+thxx7S8gJeNbEvxUNUqKaqoGXqW5pWOdO2XCld1
+9AXbbQs5uQF/qvbW2mzmBeCkTVL829B0txGMe41P/4eDrv8FAxNXUDf+jJZSEExf
+v5RxadmWPgxDT74wwJ85dE8GRV2j1lY5aAfMh09Qd5Nx2UQIsYo06Yms25tO4dnk
+UkWMLhQfkWsZHWgpLFbE4h4TV2TwYeO5Ed+w4VegG63XX9Gv2ystP9Bojg/qnw+L
+NVgbExz03jWhCl3W6t8Sb8D7aQdGctyB9gQjF+BNdeFyb7Ao65vh4YOhn0pdr8yb
++gIgthhid5E7o9Vlrdx8kHccREGkSovrlXLp9glk3Kgtn3R46MGiCWOc76DbT52V
+qyBPt7D3h1ymoOQ3OMdc4zUPLK2jgKLsLl3Az+2LBcLmc272idX10kaO6m1jGx6K
+yX2m+Jzr5dVjhU1zZmkR/sgO9MHHZklTfuQZa/HpelmjbX7FF+Ynxu8b22/8DU0G
+AbQOXDBGVWCvOGU6yke6rCzMRh+yRpY/8+0mBe53oWprfi1tWFxK1I5nuPHa1UaK
+J/kR8slC/k7e3x9cxKSGhxYzoacXGKUN5AXlK8IrC6KVkLn9YDxOiT7nnO4fuwEC
+AwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFOBNv9ybQV0T6GTwp+kVpOGBwboxMA0GCSqGSIb3DQEBCwUAA4ICAQBqinA4
+WbbaixjIvirTthnVZil6Xc1bL3McJk6jfW+rtylNpumlEYOnOXOvEESS5iVdT2H6
+yAa+Tkvv/vMx/sZ8cApBWNromUuWyXi8mHwCKe0JgOYKOoICKuLJL8hWGSbueBwj
+/feTZU7n85iYr83d2Z5AiDEoOqsuC7CsDCT6eiaY8xJhEPRdF/d+4niXVOKM6Cm6
+jBAyvd0zaziGfjk9DgNyp115j0WKWa5bIW4xRtVZjc8VX90xJc/bYNaBRHIpAlf2
+ltTW/+op2znFuCyKGo3Oy+dCMYYFaA6eFN0AkLppRQjbbpCBhqcqBT/mhDn4t/lX
+X0ykeVoQDF7Va/81XwVRHmyjdanPUIPTfPRm94KNPQx96N97qA4bLJyuQHCH2u2n
+FoJavjVsIE4iYdm8UXrNemHcSxH5/mc0zy4EZmFcV5cjjPOGG0jfKq+nwf/Yjj4D
+u9gqsPoUJbJRa4ZDhS4HIxaAjUz7tGM7zMN07RujHv41D198HRaG9Q7DlfEvr10l
+O1Hm13ZBONFLAzkopR6RctR9q5czxNM+4Gm2KHmgCY0c0f9BckgG/Jou5yD5m6Le
+ie2uPAmvylezkolwQOQvT8Jwg0DXJCxr5wkf09XHwQj02w47HAcLQxGEIYbpgNR1
+2KvxAmLBsX5VYc8T1yaw15zLKYs4SgsOkI26oQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Label: "COMODO RSA Certification Authority"
+# Serial: 101909084537582093308941363524873193117
+# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18
+# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4
+# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34
+-----BEGIN CERTIFICATE-----
+MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB
+hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV
+BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT
+EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR
+Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR
+6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X
+pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC
+9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV
+/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf
+Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z
++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w
+qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah
+SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC
+u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf
+Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq
+crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E
+FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB
+/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl
+wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM
+4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV
+2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna
+FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ
+CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK
+boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke
+jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL
+S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb
+QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl
+0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB
+NVOFBkpdn627G190
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Label: "USERTrust RSA Certification Authority"
+# Serial: 2645093764781058787591871645665788717
+# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5
+# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e
+# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2
+-----BEGIN CERTIFICATE-----
+MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
+iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
+cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
+BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
+MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
+BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
+aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
+3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
+tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
+Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
+VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
+79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
+c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
+Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
+c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
+UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
+Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
+BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
+A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
+Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
+VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
+ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
+8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
+iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
+Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
+XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
+qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
+VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
+L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
+jjxDah2nGN59PRbxYvnKkKj9
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Label: "USERTrust ECC Certification Authority"
+# Serial: 123013823720199481456569720443997572134
+# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1
+# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0
+# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a
+-----BEGIN CERTIFICATE-----
+MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL
+MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl
+eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT
+JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
+Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg
+VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo
+I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng
+o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G
+A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB
+zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW
+RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Label: "GlobalSign ECC Root CA - R4"
+# Serial: 14367148294922964480859022125800977897474
+# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e
+# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb
+# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c
+-----BEGIN CERTIFICATE-----
+MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ
+FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F
+uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX
+kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs
+ewv4n4Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Label: "GlobalSign ECC Root CA - R5"
+# Serial: 32785792099990507226680698011560947931244
+# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08
+# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa
+# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24
+-----BEGIN CERTIFICATE-----
+MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc
+8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke
+hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI
+KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
+515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO
+xwy8p2Fp8fc74SrL+SvzZpA3
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA - G3"
+# Serial: 10003001
+# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37
+# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc
+# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX
+DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP
+cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW
+IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX
+xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy
+KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR
+9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az
+5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8
+6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7
+Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP
+bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt
+BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt
+XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF
+MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd
+INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD
+U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp
+LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8
+Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp
+gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh
+/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw
+0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A
+fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq
+4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR
+1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/
+QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM
+94B7IWcnMFk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Label: "Staat der Nederlanden EV Root CA"
+# Serial: 10000013
+# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba
+# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb
+# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a
+-----BEGIN CERTIFICATE-----
+MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y
+MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg
+TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS
+b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS
+M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC
+UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d
+Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p
+rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l
+pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb
+j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC
+KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS
+/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X
+cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH
+1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP
+px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7
+MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI
+eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u
+2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS
+v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC
+wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy
+CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e
+vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6
+Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa
+Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL
+eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8
+FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc
+7uzXLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Label: "IdenTrust Commercial Root CA 1"
+# Serial: 13298821034946342390520003877796839426
+# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7
+# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25
+# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu
+VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw
+MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw
+JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT
+3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU
++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp
+S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1
+bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi
+T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL
+vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK
+Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK
+dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT
+c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv
+l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N
+iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD
+ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH
+6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt
+LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93
+nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3
++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK
+W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT
+AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq
+l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG
+4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ
+mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A
+7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Label: "IdenTrust Public Sector Root CA 1"
+# Serial: 13298821034946342390521976156843933698
+# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba
+# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd
+# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu
+VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN
+MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0
+MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7
+ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy
+RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS
+bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF
+/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R
+3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw
+EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy
+9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V
+GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ
+2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV
+WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD
+W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN
+AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj
+t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV
+DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9
+TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G
+lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW
+mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df
+WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5
++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ
+tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA
+GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv
+8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G2"
+# Serial: 1246989352
+# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2
+# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4
+# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39
+-----BEGIN CERTIFICATE-----
+MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
+cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
+IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
+dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
+NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
+dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
+dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
+aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
+RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
+cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
+wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
+U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
+jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
+BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
+jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
+Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
+1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
+nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
+VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - EC1"
+# Serial: 51543124481930649114116133369
+# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc
+# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47
+# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5
+-----BEGIN CERTIFICATE-----
+MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG
+A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3
+d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu
+dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq
+RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy
+MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD
+VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0
+L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g
+Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD
+ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi
+A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt
+ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH
+Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC
+R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX
+hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G
+-----END CERTIFICATE-----
+
+# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Label: "CFCA EV ROOT"
+# Serial: 407555286
+# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30
+# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83
+# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd
+-----BEGIN CERTIFICATE-----
+MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD
+TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx
+MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j
+aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP
+T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03
+sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL
+TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5
+/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp
+7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz
+EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt
+hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP
+a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot
+aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg
+TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV
+PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv
+cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL
+tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd
+BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB
+ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT
+ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL
+jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS
+ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy
+P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19
+xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d
+Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
+5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe
+/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z
+AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
+5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
+-----END CERTIFICATE-----
+
+# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
+# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
+# Label: "TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5"
+# Serial: 156233699172481
+# MD5 Fingerprint: da:70:8e:f0:22:df:93:26:f6:5f:9f:d3:15:06:52:4e
+# SHA1 Fingerprint: c4:18:f6:4d:46:d1:df:00:3d:27:30:13:72:43:a9:12:11:c6:75:fb
+# SHA256 Fingerprint: 49:35:1b:90:34:44:c1:85:cc:dc:5c:69:3d:24:d8:55:5c:b2:08:d6:a8:14:13:07:69:9f:4a:f0:63:19:9d:78
+-----BEGIN CERTIFICATE-----
+MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UE
+BhMCVFIxDzANBgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxn
+aSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkg
+QS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2Eg
+SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAwODA3MDFaFw0yMzA0
+MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYD
+VQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8
+dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBF
+bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIB
+IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApCUZ4WWe60ghUEoI5RHwWrom
+/4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537jVJp45wnEFPzpALFp/kR
+Gml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1mep5Fimh3
+4khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z
+5UNP9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0
+hO8EuPbJbKoCPrZV4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QID
+AQABo0IwQDAdBgNVHQ4EFgQUVpkHHtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAJ5FdnsX
+SDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPoBP5yCccLqh0l
+VX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwq
+URawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nf
+peYVhDfwwvJllpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CF
+Yv4HAqGEVka+lgqaE9chTLd8B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW
++qtB4Uu2NQvAmxU=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
+# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
+# Label: "TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6"
+# Serial: 138134509972618
+# MD5 Fingerprint: f8:c5:ee:2a:6b:be:95:8d:08:f7:25:4a:ea:71:3e:46
+# SHA1 Fingerprint: 8a:5c:8c:ee:a5:03:e6:05:56:ba:d8:1b:d4:f6:c9:b0:ed:e5:2f:e0
+# SHA256 Fingerprint: 8d:e7:86:55:e1:be:7f:78:47:80:0b:93:f6:94:d2:1d:36:8c:c0:6e:03:3e:7f:ab:04:bb:5e:b9:9d:a6:b7:00
+-----BEGIN CERTIFICATE-----
+MIIEJjCCAw6gAwIBAgIGfaHyZeyKMA0GCSqGSIb3DQEBCwUAMIGxMQswCQYDVQQG
+EwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYDVQQKDERUw5xSS1RSVVNUIEJpbGdp
+IMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBB
+LsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBI
+aXptZXQgU2HEn2xhecSxY8Sxc8SxIEg2MB4XDTEzMTIxODA5MDQxMFoXDTIzMTIx
+NjA5MDQxMFowgbExCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExTTBLBgNV
+BAoMRFTDnFJLVFJVU1QgQmlsZ2kgxLBsZXRpxZ9pbSB2ZSBCaWxpxZ9pbSBHw7x2
+ZW5sacSfaSBIaXptZXRsZXJpIEEuxZ4uMUIwQAYDVQQDDDlUw5xSS1RSVVNUIEVs
+ZWt0cm9uaWsgU2VydGlmaWthIEhpem1ldCBTYcSfbGF5xLFjxLFzxLEgSDYwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCdsGjW6L0UlqMACprx9MfMkU1x
+eHe59yEmFXNRFpQJRwXiM/VomjX/3EsvMsew7eKC5W/a2uqsxgbPJQ1BgfbBOCK9
++bGlprMBvD9QFyv26WZV1DOzXPhDIHiTVRZwGTLmiddk671IUP320EEDwnS3/faA
+z1vFq6TWlRKb55cTMgPp1KtDWxbtMyJkKbbSk60vbNg9tvYdDjTu0n2pVQ8g9P0p
+u5FbHH3GQjhtQiht1AH7zYiXSX6484P4tZgvsycLSF5W506jM7NE1qXyGJTtHB6p
+lVxiSvgNZ1GpryHV+DKdeboaX+UEVU0TRv/yz3THGmNtwx8XEsMeED5gCLMxAgMB
+AAGjQjBAMB0GA1UdDgQWBBTdVRcT9qzoSCHK77Wv0QAy7Z6MtTAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAb1gNl0Oq
+FlQ+v6nfkkU/hQu7VtMMUszIv3ZnXuaqs6fvuay0EBQNdH49ba3RfdCaqaXKGDsC
+QC4qnFAUi/5XfldcEQlLNkVS9z2sFP1E34uXI9TDwe7UU5X+LEr+DXCqu4svLcsy
+o4LyVN/Y8t3XSHLuSqMplsNEzm61kod2pLv0kmzOLBQJZo6NrRa1xxsJYTvjIKID
+gI6tflEATseWhvtDmHd9KMeP2Cpu54Rvl0EpABZeTeIT6lnAY2c6RPuY/ATTMHKm
+9ocJV612ph1jmv3XZch4gyt1O6VbuA1df74jrlZVlFjvH4GMKrLN5ptjnhi85WsG
+tAuYSyher4hYyw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
+# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
+# Label: "Certinomis - Root CA"
+# Serial: 1
+# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f
+# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8
+# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58
+-----BEGIN CERTIFICATE-----
+MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET
+MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb
+BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz
+MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx
+FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g
+Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2
+fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl
+LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV
+WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF
+TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb
+5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc
+CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri
+wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ
+wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG
+m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4
+F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng
+WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0
+2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF
+AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/
+0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw
+F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS
+g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj
+qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN
+h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/
+ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V
+btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj
+Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ
+8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW
+gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE=
+-----END CERTIFICATE-----
+# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Secure Server CA"
+# Serial: 927650371
+# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee
+# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39
+# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50
+-----BEGIN CERTIFICATE-----
+MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
+VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
+ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
+KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
+ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1
+MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE
+ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j
+b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF
+bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg
+U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA
+A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/
+I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3
+wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC
+AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb
+oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5
+BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p
+dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk
+MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp
+b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu
+dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0
+MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi
+E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa
+MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI
+hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN
+95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd
+2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
+# Label: "ValiCert Class 2 VA"
+# Serial: 1
+# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87
+# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6
+# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy
+NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY
+dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9
+WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS
+v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v
+UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu
+IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC
+W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
+# Subject: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
+# Label: "NetLock Express (Class C) Root"
+# Serial: 104
+# MD5 Fingerprint: 4f:eb:f1:f0:70:c2:80:63:5d:58:9f:da:12:3c:a9:c4
+# SHA1 Fingerprint: e3:92:51:2f:0a:cf:f5:05:df:f6:de:06:7f:75:37:e1:65:ea:57:4b
+# SHA256 Fingerprint: 0b:5e:ed:4e:84:64:03:cf:55:e0:65:84:84:40:ed:2a:82:75:8b:f5:b9:aa:1f:25:3d:46:13:cf:a0:80:ff:3f
+-----BEGIN CERTIFICATE-----
+MIIFTzCCBLigAwIBAgIBaDANBgkqhkiG9w0BAQQFADCBmzELMAkGA1UEBhMCSFUx
+ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0
+b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTQwMgYDVQQD
+EytOZXRMb2NrIEV4cHJlc3N6IChDbGFzcyBDKSBUYW51c2l0dmFueWtpYWRvMB4X
+DTk5MDIyNTE0MDgxMVoXDTE5MDIyMDE0MDgxMVowgZsxCzAJBgNVBAYTAkhVMREw
+DwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9u
+c2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE0MDIGA1UEAxMr
+TmV0TG9jayBFeHByZXNzeiAoQ2xhc3MgQykgVGFudXNpdHZhbnlraWFkbzCBnzAN
+BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA6+ywbGGKIyWvYCDj2Z/8kwvbXY2wobNA
+OoLO/XXgeDIDhlqGlZHtU/qdQPzm6N3ZW3oDvV3zOwzDUXmbrVWg6dADEK8KuhRC
+2VImESLH0iDMgqSaqf64gXadarfSNnU+sYYJ9m5tfk63euyucYT2BDMIJTLrdKwW
+RMbkQJMdf60CAwEAAaOCAp8wggKbMBIGA1UdEwEB/wQIMAYBAf8CAQQwDgYDVR0P
+AQH/BAQDAgAGMBEGCWCGSAGG+EIBAQQEAwIABzCCAmAGCWCGSAGG+EIBDQSCAlEW
+ggJNRklHWUVMRU0hIEV6ZW4gdGFudXNpdHZhbnkgYSBOZXRMb2NrIEtmdC4gQWx0
+YWxhbm9zIFN6b2xnYWx0YXRhc2kgRmVsdGV0ZWxlaWJlbiBsZWlydCBlbGphcmFz
+b2sgYWxhcGphbiBrZXN6dWx0LiBBIGhpdGVsZXNpdGVzIGZvbHlhbWF0YXQgYSBO
+ZXRMb2NrIEtmdC4gdGVybWVrZmVsZWxvc3NlZy1iaXp0b3NpdGFzYSB2ZWRpLiBB
+IGRpZ2l0YWxpcyBhbGFpcmFzIGVsZm9nYWRhc2FuYWsgZmVsdGV0ZWxlIGF6IGVs
+b2lydCBlbGxlbm9yemVzaSBlbGphcmFzIG1lZ3RldGVsZS4gQXogZWxqYXJhcyBs
+ZWlyYXNhIG1lZ3RhbGFsaGF0byBhIE5ldExvY2sgS2Z0LiBJbnRlcm5ldCBob25s
+YXBqYW4gYSBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIGNpbWVuIHZhZ3kg
+a2VyaGV0byBheiBlbGxlbm9yemVzQG5ldGxvY2submV0IGUtbWFpbCBjaW1lbi4g
+SU1QT1JUQU5UISBUaGUgaXNzdWFuY2UgYW5kIHRoZSB1c2Ugb2YgdGhpcyBjZXJ0
+aWZpY2F0ZSBpcyBzdWJqZWN0IHRvIHRoZSBOZXRMb2NrIENQUyBhdmFpbGFibGUg
+YXQgaHR0cHM6Ly93d3cubmV0bG9jay5uZXQvZG9jcyBvciBieSBlLW1haWwgYXQg
+Y3BzQG5ldGxvY2submV0LjANBgkqhkiG9w0BAQQFAAOBgQAQrX/XDDKACtiG8XmY
+ta3UzbM2xJZIwVzNmtkFLp++UOv0JhQQLdRmF/iewSf98e3ke0ugbLWrmldwpu2g
+pO0u9f38vf5NNwgMvOOWgyL1SRt/Syu0VMGAfJlOHdCM7tCs5ZL6dVb+ZKATj7i4
+Fp1hBWeAyNDYpQcCNJgEjTME1A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
+# Subject: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
+# Label: "NetLock Business (Class B) Root"
+# Serial: 105
+# MD5 Fingerprint: 39:16:aa:b9:6a:41:e1:14:69:df:9e:6c:3b:72:dc:b6
+# SHA1 Fingerprint: 87:9f:4b:ee:05:df:98:58:3b:e3:60:d6:33:e7:0d:3f:fe:98:71:af
+# SHA256 Fingerprint: 39:df:7b:68:2b:7b:93:8f:84:71:54:81:cc:de:8d:60:d8:f2:2e:c5:98:87:7d:0a:aa:c1:2b:59:18:2b:03:12
+-----BEGIN CERTIFICATE-----
+MIIFSzCCBLSgAwIBAgIBaTANBgkqhkiG9w0BAQQFADCBmTELMAkGA1UEBhMCSFUx
+ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0
+b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTIwMAYDVQQD
+EylOZXRMb2NrIFV6bGV0aSAoQ2xhc3MgQikgVGFudXNpdHZhbnlraWFkbzAeFw05
+OTAyMjUxNDEwMjJaFw0xOTAyMjAxNDEwMjJaMIGZMQswCQYDVQQGEwJIVTERMA8G
+A1UEBxMIQnVkYXBlc3QxJzAlBgNVBAoTHk5ldExvY2sgSGFsb3phdGJpenRvbnNh
+Z2kgS2Z0LjEaMBgGA1UECxMRVGFudXNpdHZhbnlraWFkb2sxMjAwBgNVBAMTKU5l
+dExvY2sgVXpsZXRpIChDbGFzcyBCKSBUYW51c2l0dmFueWtpYWRvMIGfMA0GCSqG
+SIb3DQEBAQUAA4GNADCBiQKBgQCx6gTsIKAjwo84YM/HRrPVG/77uZmeBNwcf4xK
+gZjupNTKihe5In+DCnVMm8Bp2GQ5o+2So/1bXHQawEfKOml2mrriRBf8TKPV/riX
+iK+IA4kfpPIEPsgHC+b5sy96YhQJRhTKZPWLgLViqNhr1nGTLbO/CVRY7QbrqHvc
+Q7GhaQIDAQABo4ICnzCCApswEgYDVR0TAQH/BAgwBgEB/wIBBDAOBgNVHQ8BAf8E
+BAMCAAYwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1G
+SUdZRUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFu
+b3MgU3pvbGdhbHRhdGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBh
+bGFwamFuIGtlc3p1bHQuIEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExv
+Y2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGln
+aXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0
+IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJh
+c2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGph
+biBhIGh0dHBzOi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJo
+ZXRvIGF6IGVsbGVub3J6ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBP
+UlRBTlQhIFRoZSBpc3N1YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmlj
+YXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBo
+dHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNA
+bmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4GBAATbrowXr/gOkDFOzT4JwG06
+sPgzTEdM43WIEJessDgVkcYplswhwG08pXTP2IKlOcNl40JwuyKQ433bNXbhoLXa
+n3BukxowOR0w2y7jfLKRstE3Kfq51hdcR0/jHTjrn9V7lagonhVK0dHQKwCXoOKS
+NitjrFgBazMpUIaD8QFI
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
+# Label: "RSA Root Certificate 1"
+# Serial: 1
+# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72
+# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb
+# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy
+NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD
+cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs
+2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY
+JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE
+Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ
+n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A
+PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
+# Label: "ValiCert Class 1 VA"
+# Serial: 1
+# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb
+# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e
+# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy
+NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y
+LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+
+TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y
+TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0
+LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW
+I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw
+nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI
+-----END CERTIFICATE-----
+
+# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
+# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
+# Label: "Equifax Secure eBusiness CA 1"
+# Serial: 4
+# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d
+# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41
+# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73
+-----BEGIN CERTIFICATE-----
+MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT
+ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw
+MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j
+LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ
+KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo
+RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu
+WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw
+Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD
+AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK
+eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM
+zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+
+WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN
+/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
+# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
+# Label: "Equifax Secure Global eBusiness CA"
+# Serial: 1
+# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc
+# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45
+# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07
+-----BEGIN CERTIFICATE-----
+MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT
+ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw
+MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj
+dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l
+c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC
+UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc
+58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/
+o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr
+aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA
+A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA
+Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv
+8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV
+-----END CERTIFICATE-----
+
+# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Label: "Thawte Premium Server CA"
+# Serial: 1
+# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a
+# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a
+# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72
+-----BEGIN CERTIFICATE-----
+MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
+VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
+biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy
+dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t
+MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB
+MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG
+A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp
+b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl
+cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv
+bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE
+VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ
+ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR
+uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG
+9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI
+hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM
+pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Label: "Thawte Server CA"
+# Serial: 1
+# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d
+# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c
+# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9
+-----BEGIN CERTIFICATE-----
+MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
+VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
+biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm
+MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx
+MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT
+DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3
+dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl
+cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3
+DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD
+gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91
+yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX
+L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj
+EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG
+7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e
+QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ
+qdq5snUb9kLy78fyGPmJvKP/iiMucEc=
+-----END CERTIFICATE-----
+
+# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
+# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
+# Label: "Verisign Class 3 Public Primary Certification Authority"
+# Serial: 149843929435818692848040365716851702463
+# MD5 Fingerprint: 10:fc:63:5d:f6:26:3e:0d:f3:25:be:5f:79:cd:67:67
+# SHA1 Fingerprint: 74:2c:31:92:e6:07:e4:24:eb:45:49:54:2b:e1:bb:c5:3e:61:74:e2
+# SHA256 Fingerprint: e7:68:56:34:ef:ac:f6:9a:ce:93:9a:6b:25:5b:7b:4f:ab:ef:42:93:5b:50:a2:65:ac:b5:cb:60:27:e4:4e:70
+-----BEGIN CERTIFICATE-----
+MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
+cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
+MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
+BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
+YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
+ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
+BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
+I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
+CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do
+lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc
+AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k
+-----END CERTIFICATE-----
+
+# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
+# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
+# Label: "Verisign Class 3 Public Primary Certification Authority"
+# Serial: 80507572722862485515306429940691309246
+# MD5 Fingerprint: ef:5a:f1:33:ef:f1:cd:bb:51:02:ee:12:14:4b:96:c4
+# SHA1 Fingerprint: a1:db:63:93:91:6f:17:e4:18:55:09:40:04:15:c7:02:40:b0:ae:6b
+# SHA256 Fingerprint: a4:b6:b3:99:6f:c2:f3:06:b3:fd:86:81:bd:63:41:3d:8c:50:09:cc:4f:a3:29:c2:cc:f0:e2:fa:1b:14:03:05
+-----BEGIN CERTIFICATE-----
+MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
+cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
+MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
+BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
+YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
+ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
+BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
+I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
+CSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i
+2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ
+2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ
+-----END CERTIFICATE-----
+
+# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
+# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
+# Label: "Verisign Class 3 Public Primary Certification Authority - G2"
+# Serial: 167285380242319648451154478808036881606
+# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9
+# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f
+# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b
+-----BEGIN CERTIFICATE-----
+MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
+BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
+c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
+MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
+emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
+DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
+FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg
+UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
+YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
+MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
+AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4
+pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0
+13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID
+AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk
+U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i
+F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY
+oJ2daZH9
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
+# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
+# Label: "GTE CyberTrust Global Root"
+# Serial: 421
+# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db
+# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74
+# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36
+-----BEGIN CERTIFICATE-----
+MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD
+VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv
+bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv
+b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV
+UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
+cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
+b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH
+iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS
+r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4
+04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r
+GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9
+3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P
+lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/
+-----END CERTIFICATE-----
diff --git a/third_party/python/requests/requests/certs.py b/third_party/python/requests/requests/certs.py
new file mode 100644
index 0000000000..07e6475070
--- /dev/null
+++ b/third_party/python/requests/requests/certs.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
+certs.py
+~~~~~~~~
+
+This module returns the preferred default CA certificate bundle.
+
+If you are packaging Requests, e.g., for a Linux distribution or a managed
+environment, you can change the definition of where() to return a separately
+packaged CA bundle.
+"""
+import os.path
+
+try:
+ from certifi import where
+except ImportError:
+ def where():
+ """Return the preferred certificate bundle."""
+ # vendored bundle inside Requests
+ return os.path.join(os.path.dirname(__file__), 'cacert.pem')
+
+if __name__ == '__main__':
+ print(where())
diff --git a/third_party/python/requests/requests/compat.py b/third_party/python/requests/requests/compat.py
new file mode 100644
index 0000000000..70edff7849
--- /dev/null
+++ b/third_party/python/requests/requests/compat.py
@@ -0,0 +1,62 @@
+# -*- coding: utf-8 -*-
+
+"""
+pythoncompat
+"""
+
+from .packages import chardet
+
+import sys
+
+# -------
+# Pythons
+# -------
+
+# Syntax sugar.
+_ver = sys.version_info
+
+#: Python 2.x?
+is_py2 = (_ver[0] == 2)
+
+#: Python 3.x?
+is_py3 = (_ver[0] == 3)
+
+try:
+ import simplejson as json
+except (ImportError, SyntaxError):
+ # simplejson does not support Python 3.2, it throws a SyntaxError
+ # because of u'...' Unicode literals.
+ import json
+
+# ---------
+# Specifics
+# ---------
+
+if is_py2:
+ from urllib import quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, proxy_bypass
+ from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
+ from urllib2 import parse_http_list
+ import cookielib
+ from Cookie import Morsel
+ from StringIO import StringIO
+ from .packages.urllib3.packages.ordered_dict import OrderedDict
+
+ builtin_str = str
+ bytes = str
+ str = unicode
+ basestring = basestring
+ numeric_types = (int, long, float)
+
+elif is_py3:
+ from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
+ from urllib.request import parse_http_list, getproxies, proxy_bypass
+ from http import cookiejar as cookielib
+ from http.cookies import Morsel
+ from io import StringIO
+ from collections import OrderedDict
+
+ builtin_str = str
+ str = str
+ bytes = bytes
+ basestring = (str, bytes)
+ numeric_types = (int, float)
diff --git a/third_party/python/requests/requests/cookies.py b/third_party/python/requests/requests/cookies.py
new file mode 100644
index 0000000000..b85fd2b626
--- /dev/null
+++ b/third_party/python/requests/requests/cookies.py
@@ -0,0 +1,487 @@
+# -*- coding: utf-8 -*-
+
+"""
+Compatibility code to be able to use `cookielib.CookieJar` with requests.
+
+requests.utils imports from here, so be careful with imports.
+"""
+
+import copy
+import time
+import calendar
+import collections
+from .compat import cookielib, urlparse, urlunparse, Morsel
+
+try:
+ import threading
+ # grr, pyflakes: this fixes "redefinition of unused 'threading'"
+ threading
+except ImportError:
+ import dummy_threading as threading
+
+
+class MockRequest(object):
+ """Wraps a `requests.Request` to mimic a `urllib2.Request`.
+
+ The code in `cookielib.CookieJar` expects this interface in order to correctly
+ manage cookie policies, i.e., determine whether a cookie can be set, given the
+ domains of the request and the cookie.
+
+ The original request object is read-only. The client is responsible for collecting
+ the new headers via `get_new_headers()` and interpreting them appropriately. You
+ probably want `get_cookie_header`, defined below.
+ """
+
+ def __init__(self, request):
+ self._r = request
+ self._new_headers = {}
+ self.type = urlparse(self._r.url).scheme
+
+ def get_type(self):
+ return self.type
+
+ def get_host(self):
+ return urlparse(self._r.url).netloc
+
+ def get_origin_req_host(self):
+ return self.get_host()
+
+ def get_full_url(self):
+ # Only return the response's URL if the user hadn't set the Host
+ # header
+ if not self._r.headers.get('Host'):
+ return self._r.url
+ # If they did set it, retrieve it and reconstruct the expected domain
+ host = self._r.headers['Host']
+ parsed = urlparse(self._r.url)
+ # Reconstruct the URL as we expect it
+ return urlunparse([
+ parsed.scheme, host, parsed.path, parsed.params, parsed.query,
+ parsed.fragment
+ ])
+
+ def is_unverifiable(self):
+ return True
+
+ def has_header(self, name):
+ return name in self._r.headers or name in self._new_headers
+
+ def get_header(self, name, default=None):
+ return self._r.headers.get(name, self._new_headers.get(name, default))
+
+ def add_header(self, key, val):
+ """cookielib has no legitimate use for this method; add it back if you find one."""
+ raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
+
+ def add_unredirected_header(self, name, value):
+ self._new_headers[name] = value
+
+ def get_new_headers(self):
+ return self._new_headers
+
+ @property
+ def unverifiable(self):
+ return self.is_unverifiable()
+
+ @property
+ def origin_req_host(self):
+ return self.get_origin_req_host()
+
+ @property
+ def host(self):
+ return self.get_host()
+
+
+class MockResponse(object):
+ """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
+
+ ...what? Basically, expose the parsed HTTP headers from the server response
+ the way `cookielib` expects to see them.
+ """
+
+ def __init__(self, headers):
+ """Make a MockResponse for `cookielib` to read.
+
+ :param headers: a httplib.HTTPMessage or analogous carrying the headers
+ """
+ self._headers = headers
+
+ def info(self):
+ return self._headers
+
+ def getheaders(self, name):
+ self._headers.getheaders(name)
+
+
+def extract_cookies_to_jar(jar, request, response):
+ """Extract the cookies from the response into a CookieJar.
+
+ :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
+ :param request: our own requests.Request object
+ :param response: urllib3.HTTPResponse object
+ """
+ if not (hasattr(response, '_original_response') and
+ response._original_response):
+ return
+ # the _original_response field is the wrapped httplib.HTTPResponse object,
+ req = MockRequest(request)
+ # pull out the HTTPMessage with the headers and put it in the mock:
+ res = MockResponse(response._original_response.msg)
+ jar.extract_cookies(res, req)
+
+
+def get_cookie_header(jar, request):
+ """Produce an appropriate Cookie header string to be sent with `request`, or None."""
+ r = MockRequest(request)
+ jar.add_cookie_header(r)
+ return r.get_new_headers().get('Cookie')
+
+
+def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
+ """Unsets a cookie by name, by default over all domains and paths.
+
+ Wraps CookieJar.clear(), is O(n).
+ """
+ clearables = []
+ for cookie in cookiejar:
+ if cookie.name != name:
+ continue
+ if domain is not None and domain != cookie.domain:
+ continue
+ if path is not None and path != cookie.path:
+ continue
+ clearables.append((cookie.domain, cookie.path, cookie.name))
+
+ for domain, path, name in clearables:
+ cookiejar.clear(domain, path, name)
+
+
+class CookieConflictError(RuntimeError):
+ """There are two cookies that meet the criteria specified in the cookie jar.
+ Use .get and .set and include domain and path args in order to be more specific."""
+
+
+class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
+ """Compatibility class; is a cookielib.CookieJar, but exposes a dict
+ interface.
+
+ This is the CookieJar we create by default for requests and sessions that
+ don't specify one, since some clients may expect response.cookies and
+ session.cookies to support dict operations.
+
+ Requests does not use the dict interface internally; it's just for
+ compatibility with external client code. All requests code should work
+ out of the box with externally provided instances of ``CookieJar``, e.g.
+ ``LWPCookieJar`` and ``FileCookieJar``.
+
+ Unlike a regular CookieJar, this class is pickleable.
+
+ .. warning:: dictionary operations that are normally O(1) may be O(n).
+ """
+ def get(self, name, default=None, domain=None, path=None):
+ """Dict-like get() that also supports optional domain and path args in
+ order to resolve naming collisions from using one cookie jar over
+ multiple domains.
+
+ .. warning:: operation is O(n), not O(1)."""
+ try:
+ return self._find_no_duplicates(name, domain, path)
+ except KeyError:
+ return default
+
+ def set(self, name, value, **kwargs):
+ """Dict-like set() that also supports optional domain and path args in
+ order to resolve naming collisions from using one cookie jar over
+ multiple domains."""
+ # support client code that unsets cookies by assignment of a None value:
+ if value is None:
+ remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
+ return
+
+ if isinstance(value, Morsel):
+ c = morsel_to_cookie(value)
+ else:
+ c = create_cookie(name, value, **kwargs)
+ self.set_cookie(c)
+ return c
+
+ def iterkeys(self):
+ """Dict-like iterkeys() that returns an iterator of names of cookies
+ from the jar. See itervalues() and iteritems()."""
+ for cookie in iter(self):
+ yield cookie.name
+
+ def keys(self):
+ """Dict-like keys() that returns a list of names of cookies from the
+ jar. See values() and items()."""
+ return list(self.iterkeys())
+
+ def itervalues(self):
+ """Dict-like itervalues() that returns an iterator of values of cookies
+ from the jar. See iterkeys() and iteritems()."""
+ for cookie in iter(self):
+ yield cookie.value
+
+ def values(self):
+ """Dict-like values() that returns a list of values of cookies from the
+ jar. See keys() and items()."""
+ return list(self.itervalues())
+
+ def iteritems(self):
+ """Dict-like iteritems() that returns an iterator of name-value tuples
+ from the jar. See iterkeys() and itervalues()."""
+ for cookie in iter(self):
+ yield cookie.name, cookie.value
+
+ def items(self):
+ """Dict-like items() that returns a list of name-value tuples from the
+ jar. See keys() and values(). Allows client-code to call
+ ``dict(RequestsCookieJar)`` and get a vanilla python dict of key value
+ pairs."""
+ return list(self.iteritems())
+
+ def list_domains(self):
+ """Utility method to list all the domains in the jar."""
+ domains = []
+ for cookie in iter(self):
+ if cookie.domain not in domains:
+ domains.append(cookie.domain)
+ return domains
+
+ def list_paths(self):
+ """Utility method to list all the paths in the jar."""
+ paths = []
+ for cookie in iter(self):
+ if cookie.path not in paths:
+ paths.append(cookie.path)
+ return paths
+
+ def multiple_domains(self):
+ """Returns True if there are multiple domains in the jar.
+ Returns False otherwise."""
+ domains = []
+ for cookie in iter(self):
+ if cookie.domain is not None and cookie.domain in domains:
+ return True
+ domains.append(cookie.domain)
+ return False # there is only one domain in jar
+
+ def get_dict(self, domain=None, path=None):
+ """Takes as an argument an optional domain and path and returns a plain
+ old Python dict of name-value pairs of cookies that meet the
+ requirements."""
+ dictionary = {}
+ for cookie in iter(self):
+ if (domain is None or cookie.domain == domain) and (path is None
+ or cookie.path == path):
+ dictionary[cookie.name] = cookie.value
+ return dictionary
+
+ def __getitem__(self, name):
+ """Dict-like __getitem__() for compatibility with client code. Throws
+ exception if there are more than one cookie with name. In that case,
+ use the more explicit get() method instead.
+
+ .. warning:: operation is O(n), not O(1)."""
+
+ return self._find_no_duplicates(name)
+
+ def __setitem__(self, name, value):
+ """Dict-like __setitem__ for compatibility with client code. Throws
+ exception if there is already a cookie of that name in the jar. In that
+ case, use the more explicit set() method instead."""
+
+ self.set(name, value)
+
+ def __delitem__(self, name):
+ """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
+ ``remove_cookie_by_name()``."""
+ remove_cookie_by_name(self, name)
+
+ def set_cookie(self, cookie, *args, **kwargs):
+ if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
+ cookie.value = cookie.value.replace('\\"', '')
+ return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)
+
+ def update(self, other):
+ """Updates this jar with cookies from another CookieJar or dict-like"""
+ if isinstance(other, cookielib.CookieJar):
+ for cookie in other:
+ self.set_cookie(copy.copy(cookie))
+ else:
+ super(RequestsCookieJar, self).update(other)
+
+ def _find(self, name, domain=None, path=None):
+ """Requests uses this method internally to get cookie values. Takes as
+ args name and optional domain and path. Returns a cookie.value. If
+ there are conflicting cookies, _find arbitrarily chooses one. See
+ _find_no_duplicates if you want an exception thrown if there are
+ conflicting cookies."""
+ for cookie in iter(self):
+ if cookie.name == name:
+ if domain is None or cookie.domain == domain:
+ if path is None or cookie.path == path:
+ return cookie.value
+
+ raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
+
+ def _find_no_duplicates(self, name, domain=None, path=None):
+ """Both ``__get_item__`` and ``get`` call this function: it's never
+ used elsewhere in Requests. Takes as args name and optional domain and
+ path. Returns a cookie.value. Throws KeyError if cookie is not found
+ and CookieConflictError if there are multiple cookies that match name
+ and optionally domain and path."""
+ toReturn = None
+ for cookie in iter(self):
+ if cookie.name == name:
+ if domain is None or cookie.domain == domain:
+ if path is None or cookie.path == path:
+ if toReturn is not None: # if there are multiple cookies that meet passed in criteria
+ raise CookieConflictError('There are multiple cookies with name, %r' % (name))
+ toReturn = cookie.value # we will eventually return this as long as no cookie conflict
+
+ if toReturn:
+ return toReturn
+ raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
+
+ def __getstate__(self):
+ """Unlike a normal CookieJar, this class is pickleable."""
+ state = self.__dict__.copy()
+ # remove the unpickleable RLock object
+ state.pop('_cookies_lock')
+ return state
+
+ def __setstate__(self, state):
+ """Unlike a normal CookieJar, this class is pickleable."""
+ self.__dict__.update(state)
+ if '_cookies_lock' not in self.__dict__:
+ self._cookies_lock = threading.RLock()
+
+ def copy(self):
+ """Return a copy of this RequestsCookieJar."""
+ new_cj = RequestsCookieJar()
+ new_cj.update(self)
+ return new_cj
+
+
+def _copy_cookie_jar(jar):
+ if jar is None:
+ return None
+
+ if hasattr(jar, 'copy'):
+ # We're dealing with an instance of RequestsCookieJar
+ return jar.copy()
+ # We're dealing with a generic CookieJar instance
+ new_jar = copy.copy(jar)
+ new_jar.clear()
+ for cookie in jar:
+ new_jar.set_cookie(copy.copy(cookie))
+ return new_jar
+
+
+def create_cookie(name, value, **kwargs):
+ """Make a cookie from underspecified parameters.
+
+ By default, the pair of `name` and `value` will be set for the domain ''
+ and sent on every request (this is sometimes called a "supercookie").
+ """
+ result = dict(
+ version=0,
+ name=name,
+ value=value,
+ port=None,
+ domain='',
+ path='/',
+ secure=False,
+ expires=None,
+ discard=True,
+ comment=None,
+ comment_url=None,
+ rest={'HttpOnly': None},
+ rfc2109=False,)
+
+ badargs = set(kwargs) - set(result)
+ if badargs:
+ err = 'create_cookie() got unexpected keyword arguments: %s'
+ raise TypeError(err % list(badargs))
+
+ result.update(kwargs)
+ result['port_specified'] = bool(result['port'])
+ result['domain_specified'] = bool(result['domain'])
+ result['domain_initial_dot'] = result['domain'].startswith('.')
+ result['path_specified'] = bool(result['path'])
+
+ return cookielib.Cookie(**result)
+
+
+def morsel_to_cookie(morsel):
+ """Convert a Morsel object into a Cookie containing the one k/v pair."""
+
+ expires = None
+ if morsel['max-age']:
+ try:
+ expires = int(time.time() + int(morsel['max-age']))
+ except ValueError:
+ raise TypeError('max-age: %s must be integer' % morsel['max-age'])
+ elif morsel['expires']:
+ time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
+ expires = calendar.timegm(
+ time.strptime(morsel['expires'], time_template)
+ )
+ return create_cookie(
+ comment=morsel['comment'],
+ comment_url=bool(morsel['comment']),
+ discard=False,
+ domain=morsel['domain'],
+ expires=expires,
+ name=morsel.key,
+ path=morsel['path'],
+ port=None,
+ rest={'HttpOnly': morsel['httponly']},
+ rfc2109=False,
+ secure=bool(morsel['secure']),
+ value=morsel.value,
+ version=morsel['version'] or 0,
+ )
+
+
+def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
+ """Returns a CookieJar from a key/value dictionary.
+
+ :param cookie_dict: Dict of key/values to insert into CookieJar.
+ :param cookiejar: (optional) A cookiejar to add the cookies to.
+ :param overwrite: (optional) If False, will not replace cookies
+ already in the jar with new ones.
+ """
+ if cookiejar is None:
+ cookiejar = RequestsCookieJar()
+
+ if cookie_dict is not None:
+ names_from_jar = [cookie.name for cookie in cookiejar]
+ for name in cookie_dict:
+ if overwrite or (name not in names_from_jar):
+ cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
+
+ return cookiejar
+
+
+def merge_cookies(cookiejar, cookies):
+ """Add cookies to cookiejar and returns a merged CookieJar.
+
+ :param cookiejar: CookieJar object to add the cookies to.
+ :param cookies: Dictionary or CookieJar object to be added.
+ """
+ if not isinstance(cookiejar, cookielib.CookieJar):
+ raise ValueError('You can only merge into CookieJar')
+
+ if isinstance(cookies, dict):
+ cookiejar = cookiejar_from_dict(
+ cookies, cookiejar=cookiejar, overwrite=False)
+ elif isinstance(cookies, cookielib.CookieJar):
+ try:
+ cookiejar.update(cookies)
+ except AttributeError:
+ for cookie_in_jar in cookies:
+ cookiejar.set_cookie(cookie_in_jar)
+
+ return cookiejar
diff --git a/third_party/python/requests/requests/exceptions.py b/third_party/python/requests/requests/exceptions.py
new file mode 100644
index 0000000000..ba0b910e31
--- /dev/null
+++ b/third_party/python/requests/requests/exceptions.py
@@ -0,0 +1,114 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.exceptions
+~~~~~~~~~~~~~~~~~~~
+
+This module contains the set of Requests' exceptions.
+
+"""
+from .packages.urllib3.exceptions import HTTPError as BaseHTTPError
+
+
+class RequestException(IOError):
+ """There was an ambiguous exception that occurred while handling your
+ request."""
+
+ def __init__(self, *args, **kwargs):
+ """
+ Initialize RequestException with `request` and `response` objects.
+ """
+ response = kwargs.pop('response', None)
+ self.response = response
+ self.request = kwargs.pop('request', None)
+ if (response is not None and not self.request and
+ hasattr(response, 'request')):
+ self.request = self.response.request
+ super(RequestException, self).__init__(*args, **kwargs)
+
+
+class HTTPError(RequestException):
+ """An HTTP error occurred."""
+
+
+class ConnectionError(RequestException):
+ """A Connection error occurred."""
+
+
+class ProxyError(ConnectionError):
+ """A proxy error occurred."""
+
+
+class SSLError(ConnectionError):
+ """An SSL error occurred."""
+
+
+class Timeout(RequestException):
+ """The request timed out.
+
+ Catching this error will catch both
+ :exc:`~requests.exceptions.ConnectTimeout` and
+ :exc:`~requests.exceptions.ReadTimeout` errors.
+ """
+
+
+class ConnectTimeout(ConnectionError, Timeout):
+ """The request timed out while trying to connect to the remote server.
+
+ Requests that produced this error are safe to retry.
+ """
+
+
+class ReadTimeout(Timeout):
+ """The server did not send any data in the allotted amount of time."""
+
+
+class URLRequired(RequestException):
+ """A valid URL is required to make a request."""
+
+
+class TooManyRedirects(RequestException):
+ """Too many redirects."""
+
+
+class MissingSchema(RequestException, ValueError):
+ """The URL schema (e.g. http or https) is missing."""
+
+
+class InvalidSchema(RequestException, ValueError):
+ """See defaults.py for valid schemas."""
+
+
+class InvalidURL(RequestException, ValueError):
+ """ The URL provided was somehow invalid. """
+
+
+class ChunkedEncodingError(RequestException):
+ """The server declared chunked encoding but sent an invalid chunk."""
+
+
+class ContentDecodingError(RequestException, BaseHTTPError):
+ """Failed to decode response content"""
+
+
+class StreamConsumedError(RequestException, TypeError):
+ """The content for this response was already consumed"""
+
+
+class RetryError(RequestException):
+ """Custom retries logic failed"""
+
+
+# Warnings
+
+
+class RequestsWarning(Warning):
+ """Base warning for Requests."""
+ pass
+
+
+class FileModeWarning(RequestsWarning, DeprecationWarning):
+ """
+ A file was opened in text mode, but Requests determined its binary length.
+ """
+ pass
diff --git a/third_party/python/requests/requests/hooks.py b/third_party/python/requests/requests/hooks.py
new file mode 100644
index 0000000000..9da94366d7
--- /dev/null
+++ b/third_party/python/requests/requests/hooks.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.hooks
+~~~~~~~~~~~~~~
+
+This module provides the capabilities for the Requests hooks system.
+
+Available hooks:
+
+``response``:
+ The response generated from a Request.
+
+"""
+HOOKS = ['response']
+
+def default_hooks():
+ return dict((event, []) for event in HOOKS)
+
+# TODO: response is the only one
+
+
+def dispatch_hook(key, hooks, hook_data, **kwargs):
+ """Dispatches a hook dictionary on a given piece of data."""
+ hooks = hooks or dict()
+ hooks = hooks.get(key)
+ if hooks:
+ if hasattr(hooks, '__call__'):
+ hooks = [hooks]
+ for hook in hooks:
+ _hook_data = hook(hook_data, **kwargs)
+ if _hook_data is not None:
+ hook_data = _hook_data
+ return hook_data
diff --git a/third_party/python/requests/requests/models.py b/third_party/python/requests/requests/models.py
new file mode 100644
index 0000000000..4bcbc5484a
--- /dev/null
+++ b/third_party/python/requests/requests/models.py
@@ -0,0 +1,851 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.models
+~~~~~~~~~~~~~~~
+
+This module contains the primary objects that power Requests.
+"""
+
+import collections
+import datetime
+
+from io import BytesIO, UnsupportedOperation
+from .hooks import default_hooks
+from .structures import CaseInsensitiveDict
+
+from .auth import HTTPBasicAuth
+from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
+from .packages.urllib3.fields import RequestField
+from .packages.urllib3.filepost import encode_multipart_formdata
+from .packages.urllib3.util import parse_url
+from .packages.urllib3.exceptions import (
+ DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
+from .exceptions import (
+ HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
+ ContentDecodingError, ConnectionError, StreamConsumedError)
+from .utils import (
+ guess_filename, get_auth_from_url, requote_uri,
+ stream_decode_response_unicode, to_key_val_list, parse_header_links,
+ iter_slices, guess_json_utf, super_len, to_native_string)
+from .compat import (
+ cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
+ is_py2, chardet, builtin_str, basestring)
+from .compat import json as complexjson
+from .status_codes import codes
+
+#: The set of HTTP status codes that indicate an automatically
+#: processable redirect.
+REDIRECT_STATI = (
+ codes.moved, # 301
+ codes.found, # 302
+ codes.other, # 303
+ codes.temporary_redirect, # 307
+ codes.permanent_redirect, # 308
+)
+
+DEFAULT_REDIRECT_LIMIT = 30
+CONTENT_CHUNK_SIZE = 10 * 1024
+ITER_CHUNK_SIZE = 512
+
+
+class RequestEncodingMixin(object):
+ @property
+ def path_url(self):
+ """Build the path URL to use."""
+
+ url = []
+
+ p = urlsplit(self.url)
+
+ path = p.path
+ if not path:
+ path = '/'
+
+ url.append(path)
+
+ query = p.query
+ if query:
+ url.append('?')
+ url.append(query)
+
+ return ''.join(url)
+
+ @staticmethod
+ def _encode_params(data):
+ """Encode parameters in a piece of data.
+
+ Will successfully encode parameters when passed as a dict or a list of
+ 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
+ if parameters are supplied as a dict.
+ """
+
+ if isinstance(data, (str, bytes)):
+ return data
+ elif hasattr(data, 'read'):
+ return data
+ elif hasattr(data, '__iter__'):
+ result = []
+ for k, vs in to_key_val_list(data):
+ if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
+ vs = [vs]
+ for v in vs:
+ if v is not None:
+ result.append(
+ (k.encode('utf-8') if isinstance(k, str) else k,
+ v.encode('utf-8') if isinstance(v, str) else v))
+ return urlencode(result, doseq=True)
+ else:
+ return data
+
+ @staticmethod
+ def _encode_files(files, data):
+ """Build the body for a multipart/form-data request.
+
+ Will successfully encode files when passed as a dict or a list of
+ 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
+ if parameters are supplied as a dict.
+
+ """
+ if (not files):
+ raise ValueError("Files must be provided.")
+ elif isinstance(data, basestring):
+ raise ValueError("Data must not be a string.")
+
+ new_fields = []
+ fields = to_key_val_list(data or {})
+ files = to_key_val_list(files or {})
+
+ for field, val in fields:
+ if isinstance(val, basestring) or not hasattr(val, '__iter__'):
+ val = [val]
+ for v in val:
+ if v is not None:
+ # Don't call str() on bytestrings: in Py3 it all goes wrong.
+ if not isinstance(v, bytes):
+ v = str(v)
+
+ new_fields.append(
+ (field.decode('utf-8') if isinstance(field, bytes) else field,
+ v.encode('utf-8') if isinstance(v, str) else v))
+
+ for (k, v) in files:
+ # support for explicit filename
+ ft = None
+ fh = None
+ if isinstance(v, (tuple, list)):
+ if len(v) == 2:
+ fn, fp = v
+ elif len(v) == 3:
+ fn, fp, ft = v
+ else:
+ fn, fp, ft, fh = v
+ else:
+ fn = guess_filename(v) or k
+ fp = v
+
+ if isinstance(fp, (str, bytes, bytearray)):
+ fdata = fp
+ else:
+ fdata = fp.read()
+
+ rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
+ rf.make_multipart(content_type=ft)
+ new_fields.append(rf)
+
+ body, content_type = encode_multipart_formdata(new_fields)
+
+ return body, content_type
+
+
+class RequestHooksMixin(object):
+ def register_hook(self, event, hook):
+ """Properly register a hook."""
+
+ if event not in self.hooks:
+ raise ValueError('Unsupported event specified, with event name "%s"' % (event))
+
+ if isinstance(hook, collections.Callable):
+ self.hooks[event].append(hook)
+ elif hasattr(hook, '__iter__'):
+ self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))
+
+ def deregister_hook(self, event, hook):
+ """Deregister a previously registered hook.
+ Returns True if the hook existed, False if not.
+ """
+
+ try:
+ self.hooks[event].remove(hook)
+ return True
+ except ValueError:
+ return False
+
+
+class Request(RequestHooksMixin):
+ """A user-created :class:`Request <Request>` object.
+
+ Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
+
+ :param method: HTTP method to use.
+ :param url: URL to send.
+ :param headers: dictionary of headers to send.
+ :param files: dictionary of {filename: fileobject} files to multipart upload.
+ :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
+ :param json: json for the body to attach to the request (if files or data is not specified).
+ :param params: dictionary of URL parameters to append to the URL.
+ :param auth: Auth handler or (user, pass) tuple.
+ :param cookies: dictionary or CookieJar of cookies to attach to this request.
+ :param hooks: dictionary of callback hooks, for internal usage.
+
+ Usage::
+
+ >>> import requests
+ >>> req = requests.Request('GET', 'http://httpbin.org/get')
+ >>> req.prepare()
+ <PreparedRequest [GET]>
+
+ """
+ def __init__(self, method=None, url=None, headers=None, files=None,
+ data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
+
+ # Default empty dicts for dict params.
+ data = [] if data is None else data
+ files = [] if files is None else files
+ headers = {} if headers is None else headers
+ params = {} if params is None else params
+ hooks = {} if hooks is None else hooks
+
+ self.hooks = default_hooks()
+ for (k, v) in list(hooks.items()):
+ self.register_hook(event=k, hook=v)
+
+ self.method = method
+ self.url = url
+ self.headers = headers
+ self.files = files
+ self.data = data
+ self.json = json
+ self.params = params
+ self.auth = auth
+ self.cookies = cookies
+
+ def __repr__(self):
+ return '<Request [%s]>' % (self.method)
+
+ def prepare(self):
+ """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
+ p = PreparedRequest()
+ p.prepare(
+ method=self.method,
+ url=self.url,
+ headers=self.headers,
+ files=self.files,
+ data=self.data,
+ json=self.json,
+ params=self.params,
+ auth=self.auth,
+ cookies=self.cookies,
+ hooks=self.hooks,
+ )
+ return p
+
+
+class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
+ """The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
+ containing the exact bytes that will be sent to the server.
+
+ Generated from either a :class:`Request <Request>` object or manually.
+
+ Usage::
+
+ >>> import requests
+ >>> req = requests.Request('GET', 'http://httpbin.org/get')
+ >>> r = req.prepare()
+ <PreparedRequest [GET]>
+
+ >>> s = requests.Session()
+ >>> s.send(r)
+ <Response [200]>
+
+ """
+
+ def __init__(self):
+ #: HTTP verb to send to the server.
+ self.method = None
+ #: HTTP URL to send the request to.
+ self.url = None
+ #: dictionary of HTTP headers.
+ self.headers = None
+ # The `CookieJar` used to create the Cookie header will be stored here
+ # after prepare_cookies is called
+ self._cookies = None
+ #: request body to send to the server.
+ self.body = None
+ #: dictionary of callback hooks, for internal usage.
+ self.hooks = default_hooks()
+
+ def prepare(self, method=None, url=None, headers=None, files=None,
+ data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
+ """Prepares the entire request with the given parameters."""
+
+ self.prepare_method(method)
+ self.prepare_url(url, params)
+ self.prepare_headers(headers)
+ self.prepare_cookies(cookies)
+ self.prepare_body(data, files, json)
+ self.prepare_auth(auth, url)
+
+ # Note that prepare_auth must be last to enable authentication schemes
+ # such as OAuth to work on a fully prepared request.
+
+ # This MUST go after prepare_auth. Authenticators could add a hook
+ self.prepare_hooks(hooks)
+
+ def __repr__(self):
+ return '<PreparedRequest [%s]>' % (self.method)
+
+ def copy(self):
+ p = PreparedRequest()
+ p.method = self.method
+ p.url = self.url
+ p.headers = self.headers.copy() if self.headers is not None else None
+ p._cookies = _copy_cookie_jar(self._cookies)
+ p.body = self.body
+ p.hooks = self.hooks
+ return p
+
+ def prepare_method(self, method):
+ """Prepares the given HTTP method."""
+ self.method = method
+ if self.method is not None:
+ self.method = to_native_string(self.method.upper())
+
+ def prepare_url(self, url, params):
+ """Prepares the given HTTP URL."""
+ #: Accept objects that have string representations.
+ #: We're unable to blindly call unicode/str functions
+ #: as this will include the bytestring indicator (b'')
+ #: on python 3.x.
+ #: https://github.com/kennethreitz/requests/pull/2238
+ if isinstance(url, bytes):
+ url = url.decode('utf8')
+ else:
+ url = unicode(url) if is_py2 else str(url)
+
+ # Don't do any URL preparation for non-HTTP schemes like `mailto`,
+ # `data` etc to work around exceptions from `url_parse`, which
+ # handles RFC 3986 only.
+ if ':' in url and not url.lower().startswith('http'):
+ self.url = url
+ return
+
+ # Support for unicode domain names and paths.
+ try:
+ scheme, auth, host, port, path, query, fragment = parse_url(url)
+ except LocationParseError as e:
+ raise InvalidURL(*e.args)
+
+ if not scheme:
+ error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
+ error = error.format(to_native_string(url, 'utf8'))
+
+ raise MissingSchema(error)
+
+ if not host:
+ raise InvalidURL("Invalid URL %r: No host supplied" % url)
+
+ # Only want to apply IDNA to the hostname
+ try:
+ host = host.encode('idna').decode('utf-8')
+ except UnicodeError:
+ raise InvalidURL('URL has an invalid label.')
+
+ # Carefully reconstruct the network location
+ netloc = auth or ''
+ if netloc:
+ netloc += '@'
+ netloc += host
+ if port:
+ netloc += ':' + str(port)
+
+ # Bare domains aren't valid URLs.
+ if not path:
+ path = '/'
+
+ if is_py2:
+ if isinstance(scheme, str):
+ scheme = scheme.encode('utf-8')
+ if isinstance(netloc, str):
+ netloc = netloc.encode('utf-8')
+ if isinstance(path, str):
+ path = path.encode('utf-8')
+ if isinstance(query, str):
+ query = query.encode('utf-8')
+ if isinstance(fragment, str):
+ fragment = fragment.encode('utf-8')
+
+ if isinstance(params, (str, bytes)):
+ params = to_native_string(params)
+
+ enc_params = self._encode_params(params)
+ if enc_params:
+ if query:
+ query = '%s&%s' % (query, enc_params)
+ else:
+ query = enc_params
+
+ url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
+ self.url = url
+
+ def prepare_headers(self, headers):
+ """Prepares the given HTTP headers."""
+
+ if headers:
+ self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items())
+ else:
+ self.headers = CaseInsensitiveDict()
+
+ def prepare_body(self, data, files, json=None):
+ """Prepares the given HTTP body data."""
+
+ # Check if file, fo, generator, iterator.
+ # If not, run through normal process.
+
+ # Nottin' on you.
+ body = None
+ content_type = None
+ length = None
+
+ if not data and json is not None:
+ content_type = 'application/json'
+ body = complexjson.dumps(json)
+
+ is_stream = all([
+ hasattr(data, '__iter__'),
+ not isinstance(data, (basestring, list, tuple, dict))
+ ])
+
+ try:
+ length = super_len(data)
+ except (TypeError, AttributeError, UnsupportedOperation):
+ length = None
+
+ if is_stream:
+ body = data
+
+ if files:
+ raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
+
+ if length:
+ self.headers['Content-Length'] = builtin_str(length)
+ else:
+ self.headers['Transfer-Encoding'] = 'chunked'
+ else:
+ # Multi-part file uploads.
+ if files:
+ (body, content_type) = self._encode_files(files, data)
+ else:
+ if data:
+ body = self._encode_params(data)
+ if isinstance(data, basestring) or hasattr(data, 'read'):
+ content_type = None
+ else:
+ content_type = 'application/x-www-form-urlencoded'
+
+ self.prepare_content_length(body)
+
+ # Add content-type if it wasn't explicitly provided.
+ if content_type and ('content-type' not in self.headers):
+ self.headers['Content-Type'] = content_type
+
+ self.body = body
+
+ def prepare_content_length(self, body):
+ if hasattr(body, 'seek') and hasattr(body, 'tell'):
+ body.seek(0, 2)
+ self.headers['Content-Length'] = builtin_str(body.tell())
+ body.seek(0, 0)
+ elif body is not None:
+ l = super_len(body)
+ if l:
+ self.headers['Content-Length'] = builtin_str(l)
+ elif (self.method not in ('GET', 'HEAD')) and (self.headers.get('Content-Length') is None):
+ self.headers['Content-Length'] = '0'
+
+ def prepare_auth(self, auth, url=''):
+ """Prepares the given HTTP auth data."""
+
+ # If no Auth is explicitly provided, extract it from the URL first.
+ if auth is None:
+ url_auth = get_auth_from_url(self.url)
+ auth = url_auth if any(url_auth) else None
+
+ if auth:
+ if isinstance(auth, tuple) and len(auth) == 2:
+ # special-case basic HTTP auth
+ auth = HTTPBasicAuth(*auth)
+
+ # Allow auth to make its changes.
+ r = auth(self)
+
+ # Update self to reflect the auth changes.
+ self.__dict__.update(r.__dict__)
+
+ # Recompute Content-Length
+ self.prepare_content_length(self.body)
+
+ def prepare_cookies(self, cookies):
+ """Prepares the given HTTP cookie data.
+
+ This function eventually generates a ``Cookie`` header from the
+ given cookies using cookielib. Due to cookielib's design, the header
+ will not be regenerated if it already exists, meaning this function
+ can only be called once for the life of the
+ :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
+ to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
+ header is removed beforehand."""
+
+ if isinstance(cookies, cookielib.CookieJar):
+ self._cookies = cookies
+ else:
+ self._cookies = cookiejar_from_dict(cookies)
+
+ cookie_header = get_cookie_header(self._cookies, self)
+ if cookie_header is not None:
+ self.headers['Cookie'] = cookie_header
+
+ def prepare_hooks(self, hooks):
+ """Prepares the given hooks."""
+ # hooks can be passed as None to the prepare method and to this
+ # method. To prevent iterating over None, simply use an empty list
+ # if hooks is False-y
+ hooks = hooks or []
+ for event in hooks:
+ self.register_hook(event, hooks[event])
+
+
+class Response(object):
+ """The :class:`Response <Response>` object, which contains a
+ server's response to an HTTP request.
+ """
+
+ __attrs__ = [
+ '_content', 'status_code', 'headers', 'url', 'history',
+ 'encoding', 'reason', 'cookies', 'elapsed', 'request'
+ ]
+
+ def __init__(self):
+ super(Response, self).__init__()
+
+ self._content = False
+ self._content_consumed = False
+
+ #: Integer Code of responded HTTP Status, e.g. 404 or 200.
+ self.status_code = None
+
+ #: Case-insensitive Dictionary of Response Headers.
+ #: For example, ``headers['content-encoding']`` will return the
+ #: value of a ``'Content-Encoding'`` response header.
+ self.headers = CaseInsensitiveDict()
+
+ #: File-like object representation of response (for advanced usage).
+ #: Use of ``raw`` requires that ``stream=True`` be set on the request.
+ # This requirement does not apply for use internally to Requests.
+ self.raw = None
+
+ #: Final URL location of Response.
+ self.url = None
+
+ #: Encoding to decode with when accessing r.text.
+ self.encoding = None
+
+ #: A list of :class:`Response <Response>` objects from
+ #: the history of the Request. Any redirect responses will end
+ #: up here. The list is sorted from the oldest to the most recent request.
+ self.history = []
+
+ #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
+ self.reason = None
+
+ #: A CookieJar of Cookies the server sent back.
+ self.cookies = cookiejar_from_dict({})
+
+ #: The amount of time elapsed between sending the request
+ #: and the arrival of the response (as a timedelta).
+ #: This property specifically measures the time taken between sending
+ #: the first byte of the request and finishing parsing the headers. It
+ #: is therefore unaffected by consuming the response content or the
+ #: value of the ``stream`` keyword argument.
+ self.elapsed = datetime.timedelta(0)
+
+ #: The :class:`PreparedRequest <PreparedRequest>` object to which this
+ #: is a response.
+ self.request = None
+
+ def __getstate__(self):
+ # Consume everything; accessing the content attribute makes
+ # sure the content has been fully read.
+ if not self._content_consumed:
+ self.content
+
+ return dict(
+ (attr, getattr(self, attr, None))
+ for attr in self.__attrs__
+ )
+
+ def __setstate__(self, state):
+ for name, value in state.items():
+ setattr(self, name, value)
+
+ # pickled objects do not have .raw
+ setattr(self, '_content_consumed', True)
+ setattr(self, 'raw', None)
+
+ def __repr__(self):
+ return '<Response [%s]>' % (self.status_code)
+
+ def __bool__(self):
+ """Returns true if :attr:`status_code` is 'OK'."""
+ return self.ok
+
+ def __nonzero__(self):
+ """Returns true if :attr:`status_code` is 'OK'."""
+ return self.ok
+
+ def __iter__(self):
+ """Allows you to use a response as an iterator."""
+ return self.iter_content(128)
+
+ @property
+ def ok(self):
+ try:
+ self.raise_for_status()
+ except HTTPError:
+ return False
+ return True
+
+ @property
+ def is_redirect(self):
+ """True if this Response is a well-formed HTTP redirect that could have
+ been processed automatically (by :meth:`Session.resolve_redirects`).
+ """
+ return ('location' in self.headers and self.status_code in REDIRECT_STATI)
+
+ @property
+ def is_permanent_redirect(self):
+ """True if this Response one of the permanent versions of redirect"""
+ return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
+
+ @property
+ def apparent_encoding(self):
+ """The apparent encoding, provided by the chardet library"""
+ return chardet.detect(self.content)['encoding']
+
+ def iter_content(self, chunk_size=1, decode_unicode=False):
+ """Iterates over the response data. When stream=True is set on the
+ request, this avoids reading the content at once into memory for
+ large responses. The chunk size is the number of bytes it should
+ read into memory. This is not necessarily the length of each item
+ returned as decoding can take place.
+
+ If decode_unicode is True, content will be decoded using the best
+ available encoding based on the response.
+ """
+
+ def generate():
+ # Special case for urllib3.
+ if hasattr(self.raw, 'stream'):
+ try:
+ for chunk in self.raw.stream(chunk_size, decode_content=True):
+ yield chunk
+ except ProtocolError as e:
+ raise ChunkedEncodingError(e)
+ except DecodeError as e:
+ raise ContentDecodingError(e)
+ except ReadTimeoutError as e:
+ raise ConnectionError(e)
+ else:
+ # Standard file-like object.
+ while True:
+ chunk = self.raw.read(chunk_size)
+ if not chunk:
+ break
+ yield chunk
+
+ self._content_consumed = True
+
+ if self._content_consumed and isinstance(self._content, bool):
+ raise StreamConsumedError()
+ # simulate reading small chunks of the content
+ reused_chunks = iter_slices(self._content, chunk_size)
+
+ stream_chunks = generate()
+
+ chunks = reused_chunks if self._content_consumed else stream_chunks
+
+ if decode_unicode:
+ chunks = stream_decode_response_unicode(chunks, self)
+
+ return chunks
+
+ def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None):
+ """Iterates over the response data, one line at a time. When
+ stream=True is set on the request, this avoids reading the
+ content at once into memory for large responses.
+
+ .. note:: This method is not reentrant safe.
+ """
+
+ pending = None
+
+ for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
+
+ if pending is not None:
+ chunk = pending + chunk
+
+ if delimiter:
+ lines = chunk.split(delimiter)
+ else:
+ lines = chunk.splitlines()
+
+ if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
+ pending = lines.pop()
+ else:
+ pending = None
+
+ for line in lines:
+ yield line
+
+ if pending is not None:
+ yield pending
+
+ @property
+ def content(self):
+ """Content of the response, in bytes."""
+
+ if self._content is False:
+ # Read the contents.
+ try:
+ if self._content_consumed:
+ raise RuntimeError(
+ 'The content for this response was already consumed')
+
+ if self.status_code == 0:
+ self._content = None
+ else:
+ self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
+
+ except AttributeError:
+ self._content = None
+
+ self._content_consumed = True
+ # don't need to release the connection; that's been handled by urllib3
+ # since we exhausted the data.
+ return self._content
+
+ @property
+ def text(self):
+ """Content of the response, in unicode.
+
+ If Response.encoding is None, encoding will be guessed using
+ ``chardet``.
+
+ The encoding of the response content is determined based solely on HTTP
+ headers, following RFC 2616 to the letter. If you can take advantage of
+ non-HTTP knowledge to make a better guess at the encoding, you should
+ set ``r.encoding`` appropriately before accessing this property.
+ """
+
+ # Try charset from content-type
+ content = None
+ encoding = self.encoding
+
+ if not self.content:
+ return str('')
+
+ # Fallback to auto-detected encoding.
+ if self.encoding is None:
+ encoding = self.apparent_encoding
+
+ # Decode unicode from given encoding.
+ try:
+ content = str(self.content, encoding, errors='replace')
+ except (LookupError, TypeError):
+ # A LookupError is raised if the encoding was not found which could
+ # indicate a misspelling or similar mistake.
+ #
+ # A TypeError can be raised if encoding is None
+ #
+ # So we try blindly encoding.
+ content = str(self.content, errors='replace')
+
+ return content
+
+ def json(self, **kwargs):
+ """Returns the json-encoded content of a response, if any.
+
+ :param \*\*kwargs: Optional arguments that ``json.loads`` takes.
+ """
+
+ if not self.encoding and len(self.content) > 3:
+ # No encoding set. JSON RFC 4627 section 3 states we should expect
+ # UTF-8, -16 or -32. Detect which one to use; If the detection or
+ # decoding fails, fall back to `self.text` (using chardet to make
+ # a best guess).
+ encoding = guess_json_utf(self.content)
+ if encoding is not None:
+ try:
+ return complexjson.loads(
+ self.content.decode(encoding), **kwargs
+ )
+ except UnicodeDecodeError:
+ # Wrong UTF codec detected; usually because it's not UTF-8
+ # but some other 8-bit codec. This is an RFC violation,
+ # and the server didn't bother to tell us what codec *was*
+ # used.
+ pass
+ return complexjson.loads(self.text, **kwargs)
+
+ @property
+ def links(self):
+ """Returns the parsed header links of the response, if any."""
+
+ header = self.headers.get('link')
+
+ # l = MultiDict()
+ l = {}
+
+ if header:
+ links = parse_header_links(header)
+
+ for link in links:
+ key = link.get('rel') or link.get('url')
+ l[key] = link
+
+ return l
+
+ def raise_for_status(self):
+ """Raises stored :class:`HTTPError`, if one occurred."""
+
+ http_error_msg = ''
+
+ if 400 <= self.status_code < 500:
+ http_error_msg = '%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url)
+
+ elif 500 <= self.status_code < 600:
+ http_error_msg = '%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url)
+
+ if http_error_msg:
+ raise HTTPError(http_error_msg, response=self)
+
+ def close(self):
+ """Releases the connection back to the pool. Once this method has been
+ called the underlying ``raw`` object must not be accessed again.
+
+ *Note: Should not normally need to be called explicitly.*
+ """
+ if not self._content_consumed:
+ return self.raw.close()
+
+ return self.raw.release_conn()
diff --git a/third_party/python/requests/requests/packages/__init__.py b/third_party/python/requests/requests/packages/__init__.py
new file mode 100644
index 0000000000..971c2ad024
--- /dev/null
+++ b/third_party/python/requests/requests/packages/__init__.py
@@ -0,0 +1,36 @@
+'''
+Debian and other distributions "unbundle" requests' vendored dependencies, and
+rewrite all imports to use the global versions of ``urllib3`` and ``chardet``.
+The problem with this is that not only requests itself imports those
+dependencies, but third-party code outside of the distros' control too.
+
+In reaction to these problems, the distro maintainers replaced
+``requests.packages`` with a magical "stub module" that imports the correct
+modules. The implementations were varying in quality and all had severe
+problems. For example, a symlink (or hardlink) that links the correct modules
+into place introduces problems regarding object identity, since you now have
+two modules in `sys.modules` with the same API, but different identities::
+
+ requests.packages.urllib3 is not urllib3
+
+With version ``2.5.2``, requests started to maintain its own stub, so that
+distro-specific breakage would be reduced to a minimum, even though the whole
+issue is not requests' fault in the first place. See
+https://github.com/kennethreitz/requests/pull/2375 for the corresponding pull
+request.
+'''
+
+from __future__ import absolute_import
+import sys
+
+try:
+ from . import urllib3
+except ImportError:
+ import urllib3
+ sys.modules['%s.urllib3' % __name__] = urllib3
+
+try:
+ from . import chardet
+except ImportError:
+ import chardet
+ sys.modules['%s.chardet' % __name__] = chardet
diff --git a/third_party/python/requests/requests/packages/chardet/__init__.py b/third_party/python/requests/requests/packages/chardet/__init__.py
new file mode 100644
index 0000000000..82c2a48d29
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/__init__.py
@@ -0,0 +1,32 @@
+######################## BEGIN LICENSE BLOCK ########################
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+__version__ = "2.3.0"
+from sys import version_info
+
+
+def detect(aBuf):
+ if ((version_info < (3, 0) and isinstance(aBuf, unicode)) or
+ (version_info >= (3, 0) and not isinstance(aBuf, bytes))):
+ raise ValueError('Expected a bytes object, not a unicode object')
+
+ from . import universaldetector
+ u = universaldetector.UniversalDetector()
+ u.reset()
+ u.feed(aBuf)
+ u.close()
+ return u.result
diff --git a/third_party/python/requests/requests/packages/chardet/big5freq.py b/third_party/python/requests/requests/packages/chardet/big5freq.py
new file mode 100644
index 0000000000..65bffc04b0
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/big5freq.py
@@ -0,0 +1,925 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# Big5 frequency table
+# by Taiwan's Mandarin Promotion Council
+# <http://www.edu.tw:81/mandr/>
+#
+# 128 --> 0.42261
+# 256 --> 0.57851
+# 512 --> 0.74851
+# 1024 --> 0.89384
+# 2048 --> 0.97583
+#
+# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
+# Random Distribution Ration = 512/(5401-512)=0.105
+#
+# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
+
+BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75
+
+#Char to FreqOrder table
+BIG5_TABLE_SIZE = 5376
+
+Big5CharToFreqOrder = (
+ 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16
+3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32
+1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48
+ 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64
+3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80
+4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96
+5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112
+ 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128
+ 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144
+ 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160
+2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176
+1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192
+3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208
+ 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224
+1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240
+3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256
+2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272
+ 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288
+3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304
+1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320
+5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336
+ 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352
+5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368
+1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384
+ 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400
+ 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416
+3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432
+3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448
+ 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464
+2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480
+2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496
+ 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512
+ 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528
+3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544
+1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560
+1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576
+1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592
+2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608
+ 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624
+4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640
+1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656
+5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672
+2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688
+ 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704
+ 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720
+ 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736
+ 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752
+5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768
+ 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784
+1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800
+ 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816
+ 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832
+5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848
+1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864
+ 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880
+3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896
+4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912
+3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928
+ 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944
+ 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960
+1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976
+4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992
+3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
+3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
+2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
+5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056
+3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
+5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
+1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
+2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
+1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
+ 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
+1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
+4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
+3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
+ 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
+ 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232
+ 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
+2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
+5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
+1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
+2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
+1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
+1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
+5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
+5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
+5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
+3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
+4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
+4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
+2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
+5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
+3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
+ 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
+5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520
+5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
+1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
+2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
+3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
+4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
+5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
+3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
+4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
+1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
+1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680
+4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
+1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
+ 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
+1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
+1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
+3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
+ 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
+5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
+2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
+1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
+1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856
+5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
+ 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
+4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
+ 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
+2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
+ 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
+1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
+1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
+ 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
+4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
+4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
+1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
+3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
+5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
+5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096
+1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
+2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
+1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
+3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
+2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
+3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
+2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
+4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
+4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
+3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
+ 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
+3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
+ 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
+3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
+4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
+3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
+1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
+5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
+ 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
+5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
+1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
+ 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
+4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464
+4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
+ 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
+2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
+2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528
+3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
+1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
+4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
+2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
+1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
+1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
+2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
+3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
+1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
+5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688
+1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
+4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720
+1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
+ 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
+1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
+4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
+4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
+2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
+1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
+4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
+ 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
+5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
+2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
+3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
+4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
+ 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
+5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
+5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
+1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
+4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
+4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
+2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040
+3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
+3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
+2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
+1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
+4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
+3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
+3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
+2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
+4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184
+5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
+3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
+2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
+3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
+1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
+2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
+3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
+4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312
+2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
+2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
+5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
+1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
+2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
+1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
+3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
+4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440
+2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
+3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
+3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
+2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
+4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
+2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
+3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
+4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
+5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
+3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
+ 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
+1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632
+4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
+1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
+4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680
+5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
+ 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
+5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
+5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
+2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
+3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
+2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
+2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
+ 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
+1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
+4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
+3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
+3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
+ 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
+2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
+ 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
+2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
+4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
+1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
+4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
+1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
+3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
+ 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
+3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
+5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
+5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
+3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
+3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
+1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
+2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
+5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
+1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
+1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
+3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
+ 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
+1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
+4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
+5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
+2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
+3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
+ 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
+1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
+2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
+2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
+5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
+5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
+5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
+2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
+2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
+1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
+4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
+3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
+3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
+4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
+4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
+2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
+2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
+5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
+4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
+5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
+4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
+ 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
+ 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
+1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
+3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
+4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
+1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
+5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
+2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
+2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
+3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
+5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
+1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
+3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
+5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
+1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
+5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
+2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
+3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
+2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
+3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
+3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
+3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
+4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
+ 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
+2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
+4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
+3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
+5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
+1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
+5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
+ 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
+1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
+ 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
+4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
+1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
+4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
+1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
+ 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
+3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
+4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
+5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
+ 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
+3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
+ 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
+2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 #last 512
+#Everything below is of no interest for detection purpose
+2522,1613,4812,5799,3345,3945,2523,5800,4162,5801,1637,4163,2471,4813,3946,5802, # 5392
+2500,3034,3800,5803,5804,2195,4814,5805,2163,5806,5807,5808,5809,5810,5811,5812, # 5408
+5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828, # 5424
+5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844, # 5440
+5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856,5857,5858,5859,5860, # 5456
+5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872,5873,5874,5875,5876, # 5472
+5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888,5889,5890,5891,5892, # 5488
+5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,5906,5907,5908, # 5504
+5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,5921,5922,5923,5924, # 5520
+5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936,5937,5938,5939,5940, # 5536
+5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952,5953,5954,5955,5956, # 5552
+5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968,5969,5970,5971,5972, # 5568
+5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984,5985,5986,5987,5988, # 5584
+5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004, # 5600
+6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020, # 5616
+6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032,6033,6034,6035,6036, # 5632
+6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052, # 5648
+6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068, # 5664
+6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084, # 5680
+6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100, # 5696
+6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116, # 5712
+6117,6118,6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,6132, # 5728
+6133,6134,6135,6136,6137,6138,6139,6140,6141,6142,6143,6144,6145,6146,6147,6148, # 5744
+6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,6164, # 5760
+6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,6180, # 5776
+6181,6182,6183,6184,6185,6186,6187,6188,6189,6190,6191,6192,6193,6194,6195,6196, # 5792
+6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,6211,6212, # 5808
+6213,6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,3670,6224,6225,6226,6227, # 5824
+6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,6242,6243, # 5840
+6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,6254,6255,6256,6257,6258,6259, # 5856
+6260,6261,6262,6263,6264,6265,6266,6267,6268,6269,6270,6271,6272,6273,6274,6275, # 5872
+6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,4815,6286,6287,6288,6289,6290, # 5888
+6291,6292,4816,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,6303,6304,6305, # 5904
+6306,6307,6308,6309,6310,6311,4817,4818,6312,6313,6314,6315,6316,6317,6318,4819, # 5920
+6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,6334, # 5936
+6335,6336,6337,4820,6338,6339,6340,6341,6342,6343,6344,6345,6346,6347,6348,6349, # 5952
+6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,6364,6365, # 5968
+6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,6380,6381, # 5984
+6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,6396,6397, # 6000
+6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,3441,6411,6412, # 6016
+6413,6414,6415,6416,6417,6418,6419,6420,6421,6422,6423,6424,6425,4440,6426,6427, # 6032
+6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,6439,6440,6441,6442,6443, # 6048
+6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,4821,6455,6456,6457,6458, # 6064
+6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,6473,6474, # 6080
+6475,6476,6477,3947,3948,6478,6479,6480,6481,3272,4441,6482,6483,6484,6485,4442, # 6096
+6486,6487,6488,6489,6490,6491,6492,6493,6494,6495,6496,4822,6497,6498,6499,6500, # 6112
+6501,6502,6503,6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516, # 6128
+6517,6518,6519,6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532, # 6144
+6533,6534,6535,6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548, # 6160
+6549,6550,6551,6552,6553,6554,6555,6556,2784,6557,4823,6558,6559,6560,6561,6562, # 6176
+6563,6564,6565,6566,6567,6568,6569,3949,6570,6571,6572,4824,6573,6574,6575,6576, # 6192
+6577,6578,6579,6580,6581,6582,6583,4825,6584,6585,6586,3950,2785,6587,6588,6589, # 6208
+6590,6591,6592,6593,6594,6595,6596,6597,6598,6599,6600,6601,6602,6603,6604,6605, # 6224
+6606,6607,6608,6609,6610,6611,6612,4826,6613,6614,6615,4827,6616,6617,6618,6619, # 6240
+6620,6621,6622,6623,6624,6625,4164,6626,6627,6628,6629,6630,6631,6632,6633,6634, # 6256
+3547,6635,4828,6636,6637,6638,6639,6640,6641,6642,3951,2984,6643,6644,6645,6646, # 6272
+6647,6648,6649,4165,6650,4829,6651,6652,4830,6653,6654,6655,6656,6657,6658,6659, # 6288
+6660,6661,6662,4831,6663,6664,6665,6666,6667,6668,6669,6670,6671,4166,6672,4832, # 6304
+3952,6673,6674,6675,6676,4833,6677,6678,6679,4167,6680,6681,6682,3198,6683,6684, # 6320
+6685,6686,6687,6688,6689,6690,6691,6692,6693,6694,6695,6696,6697,4834,6698,6699, # 6336
+6700,6701,6702,6703,6704,6705,6706,6707,6708,6709,6710,6711,6712,6713,6714,6715, # 6352
+6716,6717,6718,6719,6720,6721,6722,6723,6724,6725,6726,6727,6728,6729,6730,6731, # 6368
+6732,6733,6734,4443,6735,6736,6737,6738,6739,6740,6741,6742,6743,6744,6745,4444, # 6384
+6746,6747,6748,6749,6750,6751,6752,6753,6754,6755,6756,6757,6758,6759,6760,6761, # 6400
+6762,6763,6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777, # 6416
+6778,6779,6780,6781,4168,6782,6783,3442,6784,6785,6786,6787,6788,6789,6790,6791, # 6432
+4169,6792,6793,6794,6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806, # 6448
+6807,6808,6809,6810,6811,4835,6812,6813,6814,4445,6815,6816,4446,6817,6818,6819, # 6464
+6820,6821,6822,6823,6824,6825,6826,6827,6828,6829,6830,6831,6832,6833,6834,6835, # 6480
+3548,6836,6837,6838,6839,6840,6841,6842,6843,6844,6845,6846,4836,6847,6848,6849, # 6496
+6850,6851,6852,6853,6854,3953,6855,6856,6857,6858,6859,6860,6861,6862,6863,6864, # 6512
+6865,6866,6867,6868,6869,6870,6871,6872,6873,6874,6875,6876,6877,3199,6878,6879, # 6528
+6880,6881,6882,4447,6883,6884,6885,6886,6887,6888,6889,6890,6891,6892,6893,6894, # 6544
+6895,6896,6897,6898,6899,6900,6901,6902,6903,6904,4170,6905,6906,6907,6908,6909, # 6560
+6910,6911,6912,6913,6914,6915,6916,6917,6918,6919,6920,6921,6922,6923,6924,6925, # 6576
+6926,6927,4837,6928,6929,6930,6931,6932,6933,6934,6935,6936,3346,6937,6938,4838, # 6592
+6939,6940,6941,4448,6942,6943,6944,6945,6946,4449,6947,6948,6949,6950,6951,6952, # 6608
+6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,6967,6968, # 6624
+6969,6970,6971,6972,6973,6974,6975,6976,6977,6978,6979,6980,6981,6982,6983,6984, # 6640
+6985,6986,6987,6988,6989,6990,6991,6992,6993,6994,3671,6995,6996,6997,6998,4839, # 6656
+6999,7000,7001,7002,3549,7003,7004,7005,7006,7007,7008,7009,7010,7011,7012,7013, # 6672
+7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,7028,7029, # 6688
+7030,4840,7031,7032,7033,7034,7035,7036,7037,7038,4841,7039,7040,7041,7042,7043, # 6704
+7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,7059, # 6720
+7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,2985,7071,7072,7073,7074, # 6736
+7075,7076,7077,7078,7079,7080,4842,7081,7082,7083,7084,7085,7086,7087,7088,7089, # 6752
+7090,7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105, # 6768
+7106,7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,4450,7119,7120, # 6784
+7121,7122,7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136, # 6800
+7137,7138,7139,7140,7141,7142,7143,4843,7144,7145,7146,7147,7148,7149,7150,7151, # 6816
+7152,7153,7154,7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167, # 6832
+7168,7169,7170,7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183, # 6848
+7184,7185,7186,7187,7188,4171,4172,7189,7190,7191,7192,7193,7194,7195,7196,7197, # 6864
+7198,7199,7200,7201,7202,7203,7204,7205,7206,7207,7208,7209,7210,7211,7212,7213, # 6880
+7214,7215,7216,7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229, # 6896
+7230,7231,7232,7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245, # 6912
+7246,7247,7248,7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261, # 6928
+7262,7263,7264,7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277, # 6944
+7278,7279,7280,7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293, # 6960
+7294,7295,7296,4844,7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308, # 6976
+7309,7310,7311,7312,7313,7314,7315,7316,4451,7317,7318,7319,7320,7321,7322,7323, # 6992
+7324,7325,7326,7327,7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339, # 7008
+7340,7341,7342,7343,7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,4173,7354, # 7024
+7355,4845,7356,7357,7358,7359,7360,7361,7362,7363,7364,7365,7366,7367,7368,7369, # 7040
+7370,7371,7372,7373,7374,7375,7376,7377,7378,7379,7380,7381,7382,7383,7384,7385, # 7056
+7386,7387,7388,4846,7389,7390,7391,7392,7393,7394,7395,7396,7397,7398,7399,7400, # 7072
+7401,7402,7403,7404,7405,3672,7406,7407,7408,7409,7410,7411,7412,7413,7414,7415, # 7088
+7416,7417,7418,7419,7420,7421,7422,7423,7424,7425,7426,7427,7428,7429,7430,7431, # 7104
+7432,7433,7434,7435,7436,7437,7438,7439,7440,7441,7442,7443,7444,7445,7446,7447, # 7120
+7448,7449,7450,7451,7452,7453,4452,7454,3200,7455,7456,7457,7458,7459,7460,7461, # 7136
+7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,7472,7473,7474,4847,7475,7476, # 7152
+7477,3133,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,7488,7489,7490,7491, # 7168
+7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,3347,7503,7504,7505,7506, # 7184
+7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,7520,7521,4848, # 7200
+7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,7536,7537, # 7216
+7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,3801,4849,7550,7551, # 7232
+7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, # 7248
+7568,7569,3035,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582, # 7264
+7583,7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598, # 7280
+7599,7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614, # 7296
+7615,7616,4850,7617,7618,3802,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628, # 7312
+7629,7630,7631,7632,4851,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643, # 7328
+7644,7645,7646,7647,7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659, # 7344
+7660,7661,7662,7663,7664,7665,7666,7667,7668,7669,7670,4453,7671,7672,7673,7674, # 7360
+7675,7676,7677,7678,7679,7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690, # 7376
+7691,7692,7693,7694,7695,7696,7697,3443,7698,7699,7700,7701,7702,4454,7703,7704, # 7392
+7705,7706,7707,7708,7709,7710,7711,7712,7713,2472,7714,7715,7716,7717,7718,7719, # 7408
+7720,7721,7722,7723,7724,7725,7726,7727,7728,7729,7730,7731,3954,7732,7733,7734, # 7424
+7735,7736,7737,7738,7739,7740,7741,7742,7743,7744,7745,7746,7747,7748,7749,7750, # 7440
+3134,7751,7752,4852,7753,7754,7755,4853,7756,7757,7758,7759,7760,4174,7761,7762, # 7456
+7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,7777,7778, # 7472
+7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,7792,7793,7794, # 7488
+7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,4854,7806,7807,7808,7809, # 7504
+7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824,7825, # 7520
+4855,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7536
+7841,7842,7843,7844,7845,7846,7847,3955,7848,7849,7850,7851,7852,7853,7854,7855, # 7552
+7856,7857,7858,7859,7860,3444,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870, # 7568
+7871,7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886, # 7584
+7887,7888,7889,7890,7891,4175,7892,7893,7894,7895,7896,4856,4857,7897,7898,7899, # 7600
+7900,2598,7901,7902,7903,7904,7905,7906,7907,7908,4455,7909,7910,7911,7912,7913, # 7616
+7914,3201,7915,7916,7917,7918,7919,7920,7921,4858,7922,7923,7924,7925,7926,7927, # 7632
+7928,7929,7930,7931,7932,7933,7934,7935,7936,7937,7938,7939,7940,7941,7942,7943, # 7648
+7944,7945,7946,7947,7948,7949,7950,7951,7952,7953,7954,7955,7956,7957,7958,7959, # 7664
+7960,7961,7962,7963,7964,7965,7966,7967,7968,7969,7970,7971,7972,7973,7974,7975, # 7680
+7976,7977,7978,7979,7980,7981,4859,7982,7983,7984,7985,7986,7987,7988,7989,7990, # 7696
+7991,7992,7993,7994,7995,7996,4860,7997,7998,7999,8000,8001,8002,8003,8004,8005, # 7712
+8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,8016,4176,8017,8018,8019,8020, # 7728
+8021,8022,8023,4861,8024,8025,8026,8027,8028,8029,8030,8031,8032,8033,8034,8035, # 7744
+8036,4862,4456,8037,8038,8039,8040,4863,8041,8042,8043,8044,8045,8046,8047,8048, # 7760
+8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,8064, # 7776
+8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,8080, # 7792
+8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,8096, # 7808
+8097,8098,8099,4864,4177,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110, # 7824
+8111,8112,8113,8114,8115,8116,8117,8118,8119,8120,4178,8121,8122,8123,8124,8125, # 7840
+8126,8127,8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141, # 7856
+8142,8143,8144,8145,4865,4866,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155, # 7872
+8156,8157,8158,8159,8160,8161,8162,8163,8164,8165,4179,8166,8167,8168,8169,8170, # 7888
+8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181,4457,8182,8183,8184,8185, # 7904
+8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201, # 7920
+8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213,8214,8215,8216,8217, # 7936
+8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229,8230,8231,8232,8233, # 7952
+8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245,8246,8247,8248,8249, # 7968
+8250,8251,8252,8253,8254,8255,8256,3445,8257,8258,8259,8260,8261,8262,4458,8263, # 7984
+8264,8265,8266,8267,8268,8269,8270,8271,8272,4459,8273,8274,8275,8276,3550,8277, # 8000
+8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,4460,8290,8291,8292, # 8016
+8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,4867, # 8032
+8308,8309,8310,8311,8312,3551,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322, # 8048
+8323,8324,8325,8326,4868,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337, # 8064
+8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353, # 8080
+8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,4869,4461,8364,8365,8366,8367, # 8096
+8368,8369,8370,4870,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382, # 8112
+8383,8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398, # 8128
+8399,8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,4871,8411,8412,8413, # 8144
+8414,8415,8416,8417,8418,8419,8420,8421,8422,4462,8423,8424,8425,8426,8427,8428, # 8160
+8429,8430,8431,8432,8433,2986,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443, # 8176
+8444,8445,8446,8447,8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459, # 8192
+8460,8461,8462,8463,8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475, # 8208
+8476,8477,8478,4180,8479,8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490, # 8224
+8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506, # 8240
+8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522, # 8256
+8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538, # 8272
+8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554, # 8288
+8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,4872,8565,8566,8567,8568,8569, # 8304
+8570,8571,8572,8573,4873,8574,8575,8576,8577,8578,8579,8580,8581,8582,8583,8584, # 8320
+8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597,8598,8599,8600, # 8336
+8601,8602,8603,8604,8605,3803,8606,8607,8608,8609,8610,8611,8612,8613,4874,3804, # 8352
+8614,8615,8616,8617,8618,8619,8620,8621,3956,8622,8623,8624,8625,8626,8627,8628, # 8368
+8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,2865,8639,8640,8641,8642,8643, # 8384
+8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,4463,8657,8658, # 8400
+8659,4875,4876,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672, # 8416
+8673,8674,8675,8676,8677,8678,8679,8680,8681,4464,8682,8683,8684,8685,8686,8687, # 8432
+8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, # 8448
+8704,8705,8706,8707,8708,8709,2261,8710,8711,8712,8713,8714,8715,8716,8717,8718, # 8464
+8719,8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,4181, # 8480
+8734,8735,8736,8737,8738,8739,8740,8741,8742,8743,8744,8745,8746,8747,8748,8749, # 8496
+8750,8751,8752,8753,8754,8755,8756,8757,8758,8759,8760,8761,8762,8763,4877,8764, # 8512
+8765,8766,8767,8768,8769,8770,8771,8772,8773,8774,8775,8776,8777,8778,8779,8780, # 8528
+8781,8782,8783,8784,8785,8786,8787,8788,4878,8789,4879,8790,8791,8792,4880,8793, # 8544
+8794,8795,8796,8797,8798,8799,8800,8801,4881,8802,8803,8804,8805,8806,8807,8808, # 8560
+8809,8810,8811,8812,8813,8814,8815,3957,8816,8817,8818,8819,8820,8821,8822,8823, # 8576
+8824,8825,8826,8827,8828,8829,8830,8831,8832,8833,8834,8835,8836,8837,8838,8839, # 8592
+8840,8841,8842,8843,8844,8845,8846,8847,4882,8848,8849,8850,8851,8852,8853,8854, # 8608
+8855,8856,8857,8858,8859,8860,8861,8862,8863,8864,8865,8866,8867,8868,8869,8870, # 8624
+8871,8872,8873,8874,8875,8876,8877,8878,8879,8880,8881,8882,8883,8884,3202,8885, # 8640
+8886,8887,8888,8889,8890,8891,8892,8893,8894,8895,8896,8897,8898,8899,8900,8901, # 8656
+8902,8903,8904,8905,8906,8907,8908,8909,8910,8911,8912,8913,8914,8915,8916,8917, # 8672
+8918,8919,8920,8921,8922,8923,8924,4465,8925,8926,8927,8928,8929,8930,8931,8932, # 8688
+4883,8933,8934,8935,8936,8937,8938,8939,8940,8941,8942,8943,2214,8944,8945,8946, # 8704
+8947,8948,8949,8950,8951,8952,8953,8954,8955,8956,8957,8958,8959,8960,8961,8962, # 8720
+8963,8964,8965,4884,8966,8967,8968,8969,8970,8971,8972,8973,8974,8975,8976,8977, # 8736
+8978,8979,8980,8981,8982,8983,8984,8985,8986,8987,8988,8989,8990,8991,8992,4885, # 8752
+8993,8994,8995,8996,8997,8998,8999,9000,9001,9002,9003,9004,9005,9006,9007,9008, # 8768
+9009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,4182,9022,9023, # 8784
+9024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039, # 8800
+9040,9041,9042,9043,9044,9045,9046,9047,9048,9049,9050,9051,9052,9053,9054,9055, # 8816
+9056,9057,9058,9059,9060,9061,9062,9063,4886,9064,9065,9066,9067,9068,9069,4887, # 8832
+9070,9071,9072,9073,9074,9075,9076,9077,9078,9079,9080,9081,9082,9083,9084,9085, # 8848
+9086,9087,9088,9089,9090,9091,9092,9093,9094,9095,9096,9097,9098,9099,9100,9101, # 8864
+9102,9103,9104,9105,9106,9107,9108,9109,9110,9111,9112,9113,9114,9115,9116,9117, # 8880
+9118,9119,9120,9121,9122,9123,9124,9125,9126,9127,9128,9129,9130,9131,9132,9133, # 8896
+9134,9135,9136,9137,9138,9139,9140,9141,3958,9142,9143,9144,9145,9146,9147,9148, # 8912
+9149,9150,9151,4888,9152,9153,9154,9155,9156,9157,9158,9159,9160,9161,9162,9163, # 8928
+9164,9165,9166,9167,9168,9169,9170,9171,9172,9173,9174,9175,4889,9176,9177,9178, # 8944
+9179,9180,9181,9182,9183,9184,9185,9186,9187,9188,9189,9190,9191,9192,9193,9194, # 8960
+9195,9196,9197,9198,9199,9200,9201,9202,9203,4890,9204,9205,9206,9207,9208,9209, # 8976
+9210,9211,9212,9213,9214,9215,9216,9217,9218,9219,9220,9221,9222,4466,9223,9224, # 8992
+9225,9226,9227,9228,9229,9230,9231,9232,9233,9234,9235,9236,9237,9238,9239,9240, # 9008
+9241,9242,9243,9244,9245,4891,9246,9247,9248,9249,9250,9251,9252,9253,9254,9255, # 9024
+9256,9257,4892,9258,9259,9260,9261,4893,4894,9262,9263,9264,9265,9266,9267,9268, # 9040
+9269,9270,9271,9272,9273,4467,9274,9275,9276,9277,9278,9279,9280,9281,9282,9283, # 9056
+9284,9285,3673,9286,9287,9288,9289,9290,9291,9292,9293,9294,9295,9296,9297,9298, # 9072
+9299,9300,9301,9302,9303,9304,9305,9306,9307,9308,9309,9310,9311,9312,9313,9314, # 9088
+9315,9316,9317,9318,9319,9320,9321,9322,4895,9323,9324,9325,9326,9327,9328,9329, # 9104
+9330,9331,9332,9333,9334,9335,9336,9337,9338,9339,9340,9341,9342,9343,9344,9345, # 9120
+9346,9347,4468,9348,9349,9350,9351,9352,9353,9354,9355,9356,9357,9358,9359,9360, # 9136
+9361,9362,9363,9364,9365,9366,9367,9368,9369,9370,9371,9372,9373,4896,9374,4469, # 9152
+9375,9376,9377,9378,9379,4897,9380,9381,9382,9383,9384,9385,9386,9387,9388,9389, # 9168
+9390,9391,9392,9393,9394,9395,9396,9397,9398,9399,9400,9401,9402,9403,9404,9405, # 9184
+9406,4470,9407,2751,9408,9409,3674,3552,9410,9411,9412,9413,9414,9415,9416,9417, # 9200
+9418,9419,9420,9421,4898,9422,9423,9424,9425,9426,9427,9428,9429,3959,9430,9431, # 9216
+9432,9433,9434,9435,9436,4471,9437,9438,9439,9440,9441,9442,9443,9444,9445,9446, # 9232
+9447,9448,9449,9450,3348,9451,9452,9453,9454,9455,9456,9457,9458,9459,9460,9461, # 9248
+9462,9463,9464,9465,9466,9467,9468,9469,9470,9471,9472,4899,9473,9474,9475,9476, # 9264
+9477,4900,9478,9479,9480,9481,9482,9483,9484,9485,9486,9487,9488,3349,9489,9490, # 9280
+9491,9492,9493,9494,9495,9496,9497,9498,9499,9500,9501,9502,9503,9504,9505,9506, # 9296
+9507,9508,9509,9510,9511,9512,9513,9514,9515,9516,9517,9518,9519,9520,4901,9521, # 9312
+9522,9523,9524,9525,9526,4902,9527,9528,9529,9530,9531,9532,9533,9534,9535,9536, # 9328
+9537,9538,9539,9540,9541,9542,9543,9544,9545,9546,9547,9548,9549,9550,9551,9552, # 9344
+9553,9554,9555,9556,9557,9558,9559,9560,9561,9562,9563,9564,9565,9566,9567,9568, # 9360
+9569,9570,9571,9572,9573,9574,9575,9576,9577,9578,9579,9580,9581,9582,9583,9584, # 9376
+3805,9585,9586,9587,9588,9589,9590,9591,9592,9593,9594,9595,9596,9597,9598,9599, # 9392
+9600,9601,9602,4903,9603,9604,9605,9606,9607,4904,9608,9609,9610,9611,9612,9613, # 9408
+9614,4905,9615,9616,9617,9618,9619,9620,9621,9622,9623,9624,9625,9626,9627,9628, # 9424
+9629,9630,9631,9632,4906,9633,9634,9635,9636,9637,9638,9639,9640,9641,9642,9643, # 9440
+4907,9644,9645,9646,9647,9648,9649,9650,9651,9652,9653,9654,9655,9656,9657,9658, # 9456
+9659,9660,9661,9662,9663,9664,9665,9666,9667,9668,9669,9670,9671,9672,4183,9673, # 9472
+9674,9675,9676,9677,4908,9678,9679,9680,9681,4909,9682,9683,9684,9685,9686,9687, # 9488
+9688,9689,9690,4910,9691,9692,9693,3675,9694,9695,9696,2945,9697,9698,9699,9700, # 9504
+9701,9702,9703,9704,9705,4911,9706,9707,9708,9709,9710,9711,9712,9713,9714,9715, # 9520
+9716,9717,9718,9719,9720,9721,9722,9723,9724,9725,9726,9727,9728,9729,9730,9731, # 9536
+9732,9733,9734,9735,4912,9736,9737,9738,9739,9740,4913,9741,9742,9743,9744,9745, # 9552
+9746,9747,9748,9749,9750,9751,9752,9753,9754,9755,9756,9757,9758,4914,9759,9760, # 9568
+9761,9762,9763,9764,9765,9766,9767,9768,9769,9770,9771,9772,9773,9774,9775,9776, # 9584
+9777,9778,9779,9780,9781,9782,4915,9783,9784,9785,9786,9787,9788,9789,9790,9791, # 9600
+9792,9793,4916,9794,9795,9796,9797,9798,9799,9800,9801,9802,9803,9804,9805,9806, # 9616
+9807,9808,9809,9810,9811,9812,9813,9814,9815,9816,9817,9818,9819,9820,9821,9822, # 9632
+9823,9824,9825,9826,9827,9828,9829,9830,9831,9832,9833,9834,9835,9836,9837,9838, # 9648
+9839,9840,9841,9842,9843,9844,9845,9846,9847,9848,9849,9850,9851,9852,9853,9854, # 9664
+9855,9856,9857,9858,9859,9860,9861,9862,9863,9864,9865,9866,9867,9868,4917,9869, # 9680
+9870,9871,9872,9873,9874,9875,9876,9877,9878,9879,9880,9881,9882,9883,9884,9885, # 9696
+9886,9887,9888,9889,9890,9891,9892,4472,9893,9894,9895,9896,9897,3806,9898,9899, # 9712
+9900,9901,9902,9903,9904,9905,9906,9907,9908,9909,9910,9911,9912,9913,9914,4918, # 9728
+9915,9916,9917,4919,9918,9919,9920,9921,4184,9922,9923,9924,9925,9926,9927,9928, # 9744
+9929,9930,9931,9932,9933,9934,9935,9936,9937,9938,9939,9940,9941,9942,9943,9944, # 9760
+9945,9946,4920,9947,9948,9949,9950,9951,9952,9953,9954,9955,4185,9956,9957,9958, # 9776
+9959,9960,9961,9962,9963,9964,9965,4921,9966,9967,9968,4473,9969,9970,9971,9972, # 9792
+9973,9974,9975,9976,9977,4474,9978,9979,9980,9981,9982,9983,9984,9985,9986,9987, # 9808
+9988,9989,9990,9991,9992,9993,9994,9995,9996,9997,9998,9999,10000,10001,10002,10003, # 9824
+10004,10005,10006,10007,10008,10009,10010,10011,10012,10013,10014,10015,10016,10017,10018,10019, # 9840
+10020,10021,4922,10022,4923,10023,10024,10025,10026,10027,10028,10029,10030,10031,10032,10033, # 9856
+10034,10035,10036,10037,10038,10039,10040,10041,10042,10043,10044,10045,10046,10047,10048,4924, # 9872
+10049,10050,10051,10052,10053,10054,10055,10056,10057,10058,10059,10060,10061,10062,10063,10064, # 9888
+10065,10066,10067,10068,10069,10070,10071,10072,10073,10074,10075,10076,10077,10078,10079,10080, # 9904
+10081,10082,10083,10084,10085,10086,10087,4475,10088,10089,10090,10091,10092,10093,10094,10095, # 9920
+10096,10097,4476,10098,10099,10100,10101,10102,10103,10104,10105,10106,10107,10108,10109,10110, # 9936
+10111,2174,10112,10113,10114,10115,10116,10117,10118,10119,10120,10121,10122,10123,10124,10125, # 9952
+10126,10127,10128,10129,10130,10131,10132,10133,10134,10135,10136,10137,10138,10139,10140,3807, # 9968
+4186,4925,10141,10142,10143,10144,10145,10146,10147,4477,4187,10148,10149,10150,10151,10152, # 9984
+10153,4188,10154,10155,10156,10157,10158,10159,10160,10161,4926,10162,10163,10164,10165,10166, #10000
+10167,10168,10169,10170,10171,10172,10173,10174,10175,10176,10177,10178,10179,10180,10181,10182, #10016
+10183,10184,10185,10186,10187,10188,10189,10190,10191,10192,3203,10193,10194,10195,10196,10197, #10032
+10198,10199,10200,4478,10201,10202,10203,10204,4479,10205,10206,10207,10208,10209,10210,10211, #10048
+10212,10213,10214,10215,10216,10217,10218,10219,10220,10221,10222,10223,10224,10225,10226,10227, #10064
+10228,10229,10230,10231,10232,10233,10234,4927,10235,10236,10237,10238,10239,10240,10241,10242, #10080
+10243,10244,10245,10246,10247,10248,10249,10250,10251,10252,10253,10254,10255,10256,10257,10258, #10096
+10259,10260,10261,10262,10263,10264,10265,10266,10267,10268,10269,10270,10271,10272,10273,4480, #10112
+4928,4929,10274,10275,10276,10277,10278,10279,10280,10281,10282,10283,10284,10285,10286,10287, #10128
+10288,10289,10290,10291,10292,10293,10294,10295,10296,10297,10298,10299,10300,10301,10302,10303, #10144
+10304,10305,10306,10307,10308,10309,10310,10311,10312,10313,10314,10315,10316,10317,10318,10319, #10160
+10320,10321,10322,10323,10324,10325,10326,10327,10328,10329,10330,10331,10332,10333,10334,4930, #10176
+10335,10336,10337,10338,10339,10340,10341,10342,4931,10343,10344,10345,10346,10347,10348,10349, #10192
+10350,10351,10352,10353,10354,10355,3088,10356,2786,10357,10358,10359,10360,4189,10361,10362, #10208
+10363,10364,10365,10366,10367,10368,10369,10370,10371,10372,10373,10374,10375,4932,10376,10377, #10224
+10378,10379,10380,10381,10382,10383,10384,10385,10386,10387,10388,10389,10390,10391,10392,4933, #10240
+10393,10394,10395,4934,10396,10397,10398,10399,10400,10401,10402,10403,10404,10405,10406,10407, #10256
+10408,10409,10410,10411,10412,3446,10413,10414,10415,10416,10417,10418,10419,10420,10421,10422, #10272
+10423,4935,10424,10425,10426,10427,10428,10429,10430,4936,10431,10432,10433,10434,10435,10436, #10288
+10437,10438,10439,10440,10441,10442,10443,4937,10444,10445,10446,10447,4481,10448,10449,10450, #10304
+10451,10452,10453,10454,10455,10456,10457,10458,10459,10460,10461,10462,10463,10464,10465,10466, #10320
+10467,10468,10469,10470,10471,10472,10473,10474,10475,10476,10477,10478,10479,10480,10481,10482, #10336
+10483,10484,10485,10486,10487,10488,10489,10490,10491,10492,10493,10494,10495,10496,10497,10498, #10352
+10499,10500,10501,10502,10503,10504,10505,4938,10506,10507,10508,10509,10510,2552,10511,10512, #10368
+10513,10514,10515,10516,3447,10517,10518,10519,10520,10521,10522,10523,10524,10525,10526,10527, #10384
+10528,10529,10530,10531,10532,10533,10534,10535,10536,10537,10538,10539,10540,10541,10542,10543, #10400
+4482,10544,4939,10545,10546,10547,10548,10549,10550,10551,10552,10553,10554,10555,10556,10557, #10416
+10558,10559,10560,10561,10562,10563,10564,10565,10566,10567,3676,4483,10568,10569,10570,10571, #10432
+10572,3448,10573,10574,10575,10576,10577,10578,10579,10580,10581,10582,10583,10584,10585,10586, #10448
+10587,10588,10589,10590,10591,10592,10593,10594,10595,10596,10597,10598,10599,10600,10601,10602, #10464
+10603,10604,10605,10606,10607,10608,10609,10610,10611,10612,10613,10614,10615,10616,10617,10618, #10480
+10619,10620,10621,10622,10623,10624,10625,10626,10627,4484,10628,10629,10630,10631,10632,4940, #10496
+10633,10634,10635,10636,10637,10638,10639,10640,10641,10642,10643,10644,10645,10646,10647,10648, #10512
+10649,10650,10651,10652,10653,10654,10655,10656,4941,10657,10658,10659,2599,10660,10661,10662, #10528
+10663,10664,10665,10666,3089,10667,10668,10669,10670,10671,10672,10673,10674,10675,10676,10677, #10544
+10678,10679,10680,4942,10681,10682,10683,10684,10685,10686,10687,10688,10689,10690,10691,10692, #10560
+10693,10694,10695,10696,10697,4485,10698,10699,10700,10701,10702,10703,10704,4943,10705,3677, #10576
+10706,10707,10708,10709,10710,10711,10712,4944,10713,10714,10715,10716,10717,10718,10719,10720, #10592
+10721,10722,10723,10724,10725,10726,10727,10728,4945,10729,10730,10731,10732,10733,10734,10735, #10608
+10736,10737,10738,10739,10740,10741,10742,10743,10744,10745,10746,10747,10748,10749,10750,10751, #10624
+10752,10753,10754,10755,10756,10757,10758,10759,10760,10761,4946,10762,10763,10764,10765,10766, #10640
+10767,4947,4948,10768,10769,10770,10771,10772,10773,10774,10775,10776,10777,10778,10779,10780, #10656
+10781,10782,10783,10784,10785,10786,10787,10788,10789,10790,10791,10792,10793,10794,10795,10796, #10672
+10797,10798,10799,10800,10801,10802,10803,10804,10805,10806,10807,10808,10809,10810,10811,10812, #10688
+10813,10814,10815,10816,10817,10818,10819,10820,10821,10822,10823,10824,10825,10826,10827,10828, #10704
+10829,10830,10831,10832,10833,10834,10835,10836,10837,10838,10839,10840,10841,10842,10843,10844, #10720
+10845,10846,10847,10848,10849,10850,10851,10852,10853,10854,10855,10856,10857,10858,10859,10860, #10736
+10861,10862,10863,10864,10865,10866,10867,10868,10869,10870,10871,10872,10873,10874,10875,10876, #10752
+10877,10878,4486,10879,10880,10881,10882,10883,10884,10885,4949,10886,10887,10888,10889,10890, #10768
+10891,10892,10893,10894,10895,10896,10897,10898,10899,10900,10901,10902,10903,10904,10905,10906, #10784
+10907,10908,10909,10910,10911,10912,10913,10914,10915,10916,10917,10918,10919,4487,10920,10921, #10800
+10922,10923,10924,10925,10926,10927,10928,10929,10930,10931,10932,4950,10933,10934,10935,10936, #10816
+10937,10938,10939,10940,10941,10942,10943,10944,10945,10946,10947,10948,10949,4488,10950,10951, #10832
+10952,10953,10954,10955,10956,10957,10958,10959,4190,10960,10961,10962,10963,10964,10965,10966, #10848
+10967,10968,10969,10970,10971,10972,10973,10974,10975,10976,10977,10978,10979,10980,10981,10982, #10864
+10983,10984,10985,10986,10987,10988,10989,10990,10991,10992,10993,10994,10995,10996,10997,10998, #10880
+10999,11000,11001,11002,11003,11004,11005,11006,3960,11007,11008,11009,11010,11011,11012,11013, #10896
+11014,11015,11016,11017,11018,11019,11020,11021,11022,11023,11024,11025,11026,11027,11028,11029, #10912
+11030,11031,11032,4951,11033,11034,11035,11036,11037,11038,11039,11040,11041,11042,11043,11044, #10928
+11045,11046,11047,4489,11048,11049,11050,11051,4952,11052,11053,11054,11055,11056,11057,11058, #10944
+4953,11059,11060,11061,11062,11063,11064,11065,11066,11067,11068,11069,11070,11071,4954,11072, #10960
+11073,11074,11075,11076,11077,11078,11079,11080,11081,11082,11083,11084,11085,11086,11087,11088, #10976
+11089,11090,11091,11092,11093,11094,11095,11096,11097,11098,11099,11100,11101,11102,11103,11104, #10992
+11105,11106,11107,11108,11109,11110,11111,11112,11113,11114,11115,3808,11116,11117,11118,11119, #11008
+11120,11121,11122,11123,11124,11125,11126,11127,11128,11129,11130,11131,11132,11133,11134,4955, #11024
+11135,11136,11137,11138,11139,11140,11141,11142,11143,11144,11145,11146,11147,11148,11149,11150, #11040
+11151,11152,11153,11154,11155,11156,11157,11158,11159,11160,11161,4956,11162,11163,11164,11165, #11056
+11166,11167,11168,11169,11170,11171,11172,11173,11174,11175,11176,11177,11178,11179,11180,4957, #11072
+11181,11182,11183,11184,11185,11186,4958,11187,11188,11189,11190,11191,11192,11193,11194,11195, #11088
+11196,11197,11198,11199,11200,3678,11201,11202,11203,11204,11205,11206,4191,11207,11208,11209, #11104
+11210,11211,11212,11213,11214,11215,11216,11217,11218,11219,11220,11221,11222,11223,11224,11225, #11120
+11226,11227,11228,11229,11230,11231,11232,11233,11234,11235,11236,11237,11238,11239,11240,11241, #11136
+11242,11243,11244,11245,11246,11247,11248,11249,11250,11251,4959,11252,11253,11254,11255,11256, #11152
+11257,11258,11259,11260,11261,11262,11263,11264,11265,11266,11267,11268,11269,11270,11271,11272, #11168
+11273,11274,11275,11276,11277,11278,11279,11280,11281,11282,11283,11284,11285,11286,11287,11288, #11184
+11289,11290,11291,11292,11293,11294,11295,11296,11297,11298,11299,11300,11301,11302,11303,11304, #11200
+11305,11306,11307,11308,11309,11310,11311,11312,11313,11314,3679,11315,11316,11317,11318,4490, #11216
+11319,11320,11321,11322,11323,11324,11325,11326,11327,11328,11329,11330,11331,11332,11333,11334, #11232
+11335,11336,11337,11338,11339,11340,11341,11342,11343,11344,11345,11346,11347,4960,11348,11349, #11248
+11350,11351,11352,11353,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365, #11264
+11366,11367,11368,11369,11370,11371,11372,11373,11374,11375,11376,11377,3961,4961,11378,11379, #11280
+11380,11381,11382,11383,11384,11385,11386,11387,11388,11389,11390,11391,11392,11393,11394,11395, #11296
+11396,11397,4192,11398,11399,11400,11401,11402,11403,11404,11405,11406,11407,11408,11409,11410, #11312
+11411,4962,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11424,11425, #11328
+11426,11427,11428,11429,11430,11431,11432,11433,11434,11435,11436,11437,11438,11439,11440,11441, #11344
+11442,11443,11444,11445,11446,11447,11448,11449,11450,11451,11452,11453,11454,11455,11456,11457, #11360
+11458,11459,11460,11461,11462,11463,11464,11465,11466,11467,11468,11469,4963,11470,11471,4491, #11376
+11472,11473,11474,11475,4964,11476,11477,11478,11479,11480,11481,11482,11483,11484,11485,11486, #11392
+11487,11488,11489,11490,11491,11492,4965,11493,11494,11495,11496,11497,11498,11499,11500,11501, #11408
+11502,11503,11504,11505,11506,11507,11508,11509,11510,11511,11512,11513,11514,11515,11516,11517, #11424
+11518,11519,11520,11521,11522,11523,11524,11525,11526,11527,11528,11529,3962,11530,11531,11532, #11440
+11533,11534,11535,11536,11537,11538,11539,11540,11541,11542,11543,11544,11545,11546,11547,11548, #11456
+11549,11550,11551,11552,11553,11554,11555,11556,11557,11558,11559,11560,11561,11562,11563,11564, #11472
+4193,4194,11565,11566,11567,11568,11569,11570,11571,11572,11573,11574,11575,11576,11577,11578, #11488
+11579,11580,11581,11582,11583,11584,11585,11586,11587,11588,11589,11590,11591,4966,4195,11592, #11504
+11593,11594,11595,11596,11597,11598,11599,11600,11601,11602,11603,11604,3090,11605,11606,11607, #11520
+11608,11609,11610,4967,11611,11612,11613,11614,11615,11616,11617,11618,11619,11620,11621,11622, #11536
+11623,11624,11625,11626,11627,11628,11629,11630,11631,11632,11633,11634,11635,11636,11637,11638, #11552
+11639,11640,11641,11642,11643,11644,11645,11646,11647,11648,11649,11650,11651,11652,11653,11654, #11568
+11655,11656,11657,11658,11659,11660,11661,11662,11663,11664,11665,11666,11667,11668,11669,11670, #11584
+11671,11672,11673,11674,4968,11675,11676,11677,11678,11679,11680,11681,11682,11683,11684,11685, #11600
+11686,11687,11688,11689,11690,11691,11692,11693,3809,11694,11695,11696,11697,11698,11699,11700, #11616
+11701,11702,11703,11704,11705,11706,11707,11708,11709,11710,11711,11712,11713,11714,11715,11716, #11632
+11717,11718,3553,11719,11720,11721,11722,11723,11724,11725,11726,11727,11728,11729,11730,4969, #11648
+11731,11732,11733,11734,11735,11736,11737,11738,11739,11740,4492,11741,11742,11743,11744,11745, #11664
+11746,11747,11748,11749,11750,11751,11752,4970,11753,11754,11755,11756,11757,11758,11759,11760, #11680
+11761,11762,11763,11764,11765,11766,11767,11768,11769,11770,11771,11772,11773,11774,11775,11776, #11696
+11777,11778,11779,11780,11781,11782,11783,11784,11785,11786,11787,11788,11789,11790,4971,11791, #11712
+11792,11793,11794,11795,11796,11797,4972,11798,11799,11800,11801,11802,11803,11804,11805,11806, #11728
+11807,11808,11809,11810,4973,11811,11812,11813,11814,11815,11816,11817,11818,11819,11820,11821, #11744
+11822,11823,11824,11825,11826,11827,11828,11829,11830,11831,11832,11833,11834,3680,3810,11835, #11760
+11836,4974,11837,11838,11839,11840,11841,11842,11843,11844,11845,11846,11847,11848,11849,11850, #11776
+11851,11852,11853,11854,11855,11856,11857,11858,11859,11860,11861,11862,11863,11864,11865,11866, #11792
+11867,11868,11869,11870,11871,11872,11873,11874,11875,11876,11877,11878,11879,11880,11881,11882, #11808
+11883,11884,4493,11885,11886,11887,11888,11889,11890,11891,11892,11893,11894,11895,11896,11897, #11824
+11898,11899,11900,11901,11902,11903,11904,11905,11906,11907,11908,11909,11910,11911,11912,11913, #11840
+11914,11915,4975,11916,11917,11918,11919,11920,11921,11922,11923,11924,11925,11926,11927,11928, #11856
+11929,11930,11931,11932,11933,11934,11935,11936,11937,11938,11939,11940,11941,11942,11943,11944, #11872
+11945,11946,11947,11948,11949,4976,11950,11951,11952,11953,11954,11955,11956,11957,11958,11959, #11888
+11960,11961,11962,11963,11964,11965,11966,11967,11968,11969,11970,11971,11972,11973,11974,11975, #11904
+11976,11977,11978,11979,11980,11981,11982,11983,11984,11985,11986,11987,4196,11988,11989,11990, #11920
+11991,11992,4977,11993,11994,11995,11996,11997,11998,11999,12000,12001,12002,12003,12004,12005, #11936
+12006,12007,12008,12009,12010,12011,12012,12013,12014,12015,12016,12017,12018,12019,12020,12021, #11952
+12022,12023,12024,12025,12026,12027,12028,12029,12030,12031,12032,12033,12034,12035,12036,12037, #11968
+12038,12039,12040,12041,12042,12043,12044,12045,12046,12047,12048,12049,12050,12051,12052,12053, #11984
+12054,12055,12056,12057,12058,12059,12060,12061,4978,12062,12063,12064,12065,12066,12067,12068, #12000
+12069,12070,12071,12072,12073,12074,12075,12076,12077,12078,12079,12080,12081,12082,12083,12084, #12016
+12085,12086,12087,12088,12089,12090,12091,12092,12093,12094,12095,12096,12097,12098,12099,12100, #12032
+12101,12102,12103,12104,12105,12106,12107,12108,12109,12110,12111,12112,12113,12114,12115,12116, #12048
+12117,12118,12119,12120,12121,12122,12123,4979,12124,12125,12126,12127,12128,4197,12129,12130, #12064
+12131,12132,12133,12134,12135,12136,12137,12138,12139,12140,12141,12142,12143,12144,12145,12146, #12080
+12147,12148,12149,12150,12151,12152,12153,12154,4980,12155,12156,12157,12158,12159,12160,4494, #12096
+12161,12162,12163,12164,3811,12165,12166,12167,12168,12169,4495,12170,12171,4496,12172,12173, #12112
+12174,12175,12176,3812,12177,12178,12179,12180,12181,12182,12183,12184,12185,12186,12187,12188, #12128
+12189,12190,12191,12192,12193,12194,12195,12196,12197,12198,12199,12200,12201,12202,12203,12204, #12144
+12205,12206,12207,12208,12209,12210,12211,12212,12213,12214,12215,12216,12217,12218,12219,12220, #12160
+12221,4981,12222,12223,12224,12225,12226,12227,12228,12229,12230,12231,12232,12233,12234,12235, #12176
+4982,12236,12237,12238,12239,12240,12241,12242,12243,12244,12245,4983,12246,12247,12248,12249, #12192
+4984,12250,12251,12252,12253,12254,12255,12256,12257,12258,12259,12260,12261,12262,12263,12264, #12208
+4985,12265,4497,12266,12267,12268,12269,12270,12271,12272,12273,12274,12275,12276,12277,12278, #12224
+12279,12280,12281,12282,12283,12284,12285,12286,12287,4986,12288,12289,12290,12291,12292,12293, #12240
+12294,12295,12296,2473,12297,12298,12299,12300,12301,12302,12303,12304,12305,12306,12307,12308, #12256
+12309,12310,12311,12312,12313,12314,12315,12316,12317,12318,12319,3963,12320,12321,12322,12323, #12272
+12324,12325,12326,12327,12328,12329,12330,12331,12332,4987,12333,12334,12335,12336,12337,12338, #12288
+12339,12340,12341,12342,12343,12344,12345,12346,12347,12348,12349,12350,12351,12352,12353,12354, #12304
+12355,12356,12357,12358,12359,3964,12360,12361,12362,12363,12364,12365,12366,12367,12368,12369, #12320
+12370,3965,12371,12372,12373,12374,12375,12376,12377,12378,12379,12380,12381,12382,12383,12384, #12336
+12385,12386,12387,12388,12389,12390,12391,12392,12393,12394,12395,12396,12397,12398,12399,12400, #12352
+12401,12402,12403,12404,12405,12406,12407,12408,4988,12409,12410,12411,12412,12413,12414,12415, #12368
+12416,12417,12418,12419,12420,12421,12422,12423,12424,12425,12426,12427,12428,12429,12430,12431, #12384
+12432,12433,12434,12435,12436,12437,12438,3554,12439,12440,12441,12442,12443,12444,12445,12446, #12400
+12447,12448,12449,12450,12451,12452,12453,12454,12455,12456,12457,12458,12459,12460,12461,12462, #12416
+12463,12464,4989,12465,12466,12467,12468,12469,12470,12471,12472,12473,12474,12475,12476,12477, #12432
+12478,12479,12480,4990,12481,12482,12483,12484,12485,12486,12487,12488,12489,4498,12490,12491, #12448
+12492,12493,12494,12495,12496,12497,12498,12499,12500,12501,12502,12503,12504,12505,12506,12507, #12464
+12508,12509,12510,12511,12512,12513,12514,12515,12516,12517,12518,12519,12520,12521,12522,12523, #12480
+12524,12525,12526,12527,12528,12529,12530,12531,12532,12533,12534,12535,12536,12537,12538,12539, #12496
+12540,12541,12542,12543,12544,12545,12546,12547,12548,12549,12550,12551,4991,12552,12553,12554, #12512
+12555,12556,12557,12558,12559,12560,12561,12562,12563,12564,12565,12566,12567,12568,12569,12570, #12528
+12571,12572,12573,12574,12575,12576,12577,12578,3036,12579,12580,12581,12582,12583,3966,12584, #12544
+12585,12586,12587,12588,12589,12590,12591,12592,12593,12594,12595,12596,12597,12598,12599,12600, #12560
+12601,12602,12603,12604,12605,12606,12607,12608,12609,12610,12611,12612,12613,12614,12615,12616, #12576
+12617,12618,12619,12620,12621,12622,12623,12624,12625,12626,12627,12628,12629,12630,12631,12632, #12592
+12633,12634,12635,12636,12637,12638,12639,12640,12641,12642,12643,12644,12645,12646,4499,12647, #12608
+12648,12649,12650,12651,12652,12653,12654,12655,12656,12657,12658,12659,12660,12661,12662,12663, #12624
+12664,12665,12666,12667,12668,12669,12670,12671,12672,12673,12674,12675,12676,12677,12678,12679, #12640
+12680,12681,12682,12683,12684,12685,12686,12687,12688,12689,12690,12691,12692,12693,12694,12695, #12656
+12696,12697,12698,4992,12699,12700,12701,12702,12703,12704,12705,12706,12707,12708,12709,12710, #12672
+12711,12712,12713,12714,12715,12716,12717,12718,12719,12720,12721,12722,12723,12724,12725,12726, #12688
+12727,12728,12729,12730,12731,12732,12733,12734,12735,12736,12737,12738,12739,12740,12741,12742, #12704
+12743,12744,12745,12746,12747,12748,12749,12750,12751,12752,12753,12754,12755,12756,12757,12758, #12720
+12759,12760,12761,12762,12763,12764,12765,12766,12767,12768,12769,12770,12771,12772,12773,12774, #12736
+12775,12776,12777,12778,4993,2175,12779,12780,12781,12782,12783,12784,12785,12786,4500,12787, #12752
+12788,12789,12790,12791,12792,12793,12794,12795,12796,12797,12798,12799,12800,12801,12802,12803, #12768
+12804,12805,12806,12807,12808,12809,12810,12811,12812,12813,12814,12815,12816,12817,12818,12819, #12784
+12820,12821,12822,12823,12824,12825,12826,4198,3967,12827,12828,12829,12830,12831,12832,12833, #12800
+12834,12835,12836,12837,12838,12839,12840,12841,12842,12843,12844,12845,12846,12847,12848,12849, #12816
+12850,12851,12852,12853,12854,12855,12856,12857,12858,12859,12860,12861,4199,12862,12863,12864, #12832
+12865,12866,12867,12868,12869,12870,12871,12872,12873,12874,12875,12876,12877,12878,12879,12880, #12848
+12881,12882,12883,12884,12885,12886,12887,4501,12888,12889,12890,12891,12892,12893,12894,12895, #12864
+12896,12897,12898,12899,12900,12901,12902,12903,12904,12905,12906,12907,12908,12909,12910,12911, #12880
+12912,4994,12913,12914,12915,12916,12917,12918,12919,12920,12921,12922,12923,12924,12925,12926, #12896
+12927,12928,12929,12930,12931,12932,12933,12934,12935,12936,12937,12938,12939,12940,12941,12942, #12912
+12943,12944,12945,12946,12947,12948,12949,12950,12951,12952,12953,12954,12955,12956,1772,12957, #12928
+12958,12959,12960,12961,12962,12963,12964,12965,12966,12967,12968,12969,12970,12971,12972,12973, #12944
+12974,12975,12976,12977,12978,12979,12980,12981,12982,12983,12984,12985,12986,12987,12988,12989, #12960
+12990,12991,12992,12993,12994,12995,12996,12997,4502,12998,4503,12999,13000,13001,13002,13003, #12976
+4504,13004,13005,13006,13007,13008,13009,13010,13011,13012,13013,13014,13015,13016,13017,13018, #12992
+13019,13020,13021,13022,13023,13024,13025,13026,13027,13028,13029,3449,13030,13031,13032,13033, #13008
+13034,13035,13036,13037,13038,13039,13040,13041,13042,13043,13044,13045,13046,13047,13048,13049, #13024
+13050,13051,13052,13053,13054,13055,13056,13057,13058,13059,13060,13061,13062,13063,13064,13065, #13040
+13066,13067,13068,13069,13070,13071,13072,13073,13074,13075,13076,13077,13078,13079,13080,13081, #13056
+13082,13083,13084,13085,13086,13087,13088,13089,13090,13091,13092,13093,13094,13095,13096,13097, #13072
+13098,13099,13100,13101,13102,13103,13104,13105,13106,13107,13108,13109,13110,13111,13112,13113, #13088
+13114,13115,13116,13117,13118,3968,13119,4995,13120,13121,13122,13123,13124,13125,13126,13127, #13104
+4505,13128,13129,13130,13131,13132,13133,13134,4996,4506,13135,13136,13137,13138,13139,4997, #13120
+13140,13141,13142,13143,13144,13145,13146,13147,13148,13149,13150,13151,13152,13153,13154,13155, #13136
+13156,13157,13158,13159,4998,13160,13161,13162,13163,13164,13165,13166,13167,13168,13169,13170, #13152
+13171,13172,13173,13174,13175,13176,4999,13177,13178,13179,13180,13181,13182,13183,13184,13185, #13168
+13186,13187,13188,13189,13190,13191,13192,13193,13194,13195,13196,13197,13198,13199,13200,13201, #13184
+13202,13203,13204,13205,13206,5000,13207,13208,13209,13210,13211,13212,13213,13214,13215,13216, #13200
+13217,13218,13219,13220,13221,13222,13223,13224,13225,13226,13227,4200,5001,13228,13229,13230, #13216
+13231,13232,13233,13234,13235,13236,13237,13238,13239,13240,3969,13241,13242,13243,13244,3970, #13232
+13245,13246,13247,13248,13249,13250,13251,13252,13253,13254,13255,13256,13257,13258,13259,13260, #13248
+13261,13262,13263,13264,13265,13266,13267,13268,3450,13269,13270,13271,13272,13273,13274,13275, #13264
+13276,5002,13277,13278,13279,13280,13281,13282,13283,13284,13285,13286,13287,13288,13289,13290, #13280
+13291,13292,13293,13294,13295,13296,13297,13298,13299,13300,13301,13302,3813,13303,13304,13305, #13296
+13306,13307,13308,13309,13310,13311,13312,13313,13314,13315,13316,13317,13318,13319,13320,13321, #13312
+13322,13323,13324,13325,13326,13327,13328,4507,13329,13330,13331,13332,13333,13334,13335,13336, #13328
+13337,13338,13339,13340,13341,5003,13342,13343,13344,13345,13346,13347,13348,13349,13350,13351, #13344
+13352,13353,13354,13355,13356,13357,13358,13359,13360,13361,13362,13363,13364,13365,13366,13367, #13360
+5004,13368,13369,13370,13371,13372,13373,13374,13375,13376,13377,13378,13379,13380,13381,13382, #13376
+13383,13384,13385,13386,13387,13388,13389,13390,13391,13392,13393,13394,13395,13396,13397,13398, #13392
+13399,13400,13401,13402,13403,13404,13405,13406,13407,13408,13409,13410,13411,13412,13413,13414, #13408
+13415,13416,13417,13418,13419,13420,13421,13422,13423,13424,13425,13426,13427,13428,13429,13430, #13424
+13431,13432,4508,13433,13434,13435,4201,13436,13437,13438,13439,13440,13441,13442,13443,13444, #13440
+13445,13446,13447,13448,13449,13450,13451,13452,13453,13454,13455,13456,13457,5005,13458,13459, #13456
+13460,13461,13462,13463,13464,13465,13466,13467,13468,13469,13470,4509,13471,13472,13473,13474, #13472
+13475,13476,13477,13478,13479,13480,13481,13482,13483,13484,13485,13486,13487,13488,13489,13490, #13488
+13491,13492,13493,13494,13495,13496,13497,13498,13499,13500,13501,13502,13503,13504,13505,13506, #13504
+13507,13508,13509,13510,13511,13512,13513,13514,13515,13516,13517,13518,13519,13520,13521,13522, #13520
+13523,13524,13525,13526,13527,13528,13529,13530,13531,13532,13533,13534,13535,13536,13537,13538, #13536
+13539,13540,13541,13542,13543,13544,13545,13546,13547,13548,13549,13550,13551,13552,13553,13554, #13552
+13555,13556,13557,13558,13559,13560,13561,13562,13563,13564,13565,13566,13567,13568,13569,13570, #13568
+13571,13572,13573,13574,13575,13576,13577,13578,13579,13580,13581,13582,13583,13584,13585,13586, #13584
+13587,13588,13589,13590,13591,13592,13593,13594,13595,13596,13597,13598,13599,13600,13601,13602, #13600
+13603,13604,13605,13606,13607,13608,13609,13610,13611,13612,13613,13614,13615,13616,13617,13618, #13616
+13619,13620,13621,13622,13623,13624,13625,13626,13627,13628,13629,13630,13631,13632,13633,13634, #13632
+13635,13636,13637,13638,13639,13640,13641,13642,5006,13643,13644,13645,13646,13647,13648,13649, #13648
+13650,13651,5007,13652,13653,13654,13655,13656,13657,13658,13659,13660,13661,13662,13663,13664, #13664
+13665,13666,13667,13668,13669,13670,13671,13672,13673,13674,13675,13676,13677,13678,13679,13680, #13680
+13681,13682,13683,13684,13685,13686,13687,13688,13689,13690,13691,13692,13693,13694,13695,13696, #13696
+13697,13698,13699,13700,13701,13702,13703,13704,13705,13706,13707,13708,13709,13710,13711,13712, #13712
+13713,13714,13715,13716,13717,13718,13719,13720,13721,13722,13723,13724,13725,13726,13727,13728, #13728
+13729,13730,13731,13732,13733,13734,13735,13736,13737,13738,13739,13740,13741,13742,13743,13744, #13744
+13745,13746,13747,13748,13749,13750,13751,13752,13753,13754,13755,13756,13757,13758,13759,13760, #13760
+13761,13762,13763,13764,13765,13766,13767,13768,13769,13770,13771,13772,13773,13774,3273,13775, #13776
+13776,13777,13778,13779,13780,13781,13782,13783,13784,13785,13786,13787,13788,13789,13790,13791, #13792
+13792,13793,13794,13795,13796,13797,13798,13799,13800,13801,13802,13803,13804,13805,13806,13807, #13808
+13808,13809,13810,13811,13812,13813,13814,13815,13816,13817,13818,13819,13820,13821,13822,13823, #13824
+13824,13825,13826,13827,13828,13829,13830,13831,13832,13833,13834,13835,13836,13837,13838,13839, #13840
+13840,13841,13842,13843,13844,13845,13846,13847,13848,13849,13850,13851,13852,13853,13854,13855, #13856
+13856,13857,13858,13859,13860,13861,13862,13863,13864,13865,13866,13867,13868,13869,13870,13871, #13872
+13872,13873,13874,13875,13876,13877,13878,13879,13880,13881,13882,13883,13884,13885,13886,13887, #13888
+13888,13889,13890,13891,13892,13893,13894,13895,13896,13897,13898,13899,13900,13901,13902,13903, #13904
+13904,13905,13906,13907,13908,13909,13910,13911,13912,13913,13914,13915,13916,13917,13918,13919, #13920
+13920,13921,13922,13923,13924,13925,13926,13927,13928,13929,13930,13931,13932,13933,13934,13935, #13936
+13936,13937,13938,13939,13940,13941,13942,13943,13944,13945,13946,13947,13948,13949,13950,13951, #13952
+13952,13953,13954,13955,13956,13957,13958,13959,13960,13961,13962,13963,13964,13965,13966,13967, #13968
+13968,13969,13970,13971,13972) #13973
+
+# flake8: noqa
diff --git a/third_party/python/requests/requests/packages/chardet/big5prober.py b/third_party/python/requests/requests/packages/chardet/big5prober.py
new file mode 100644
index 0000000000..becce81e5e
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/big5prober.py
@@ -0,0 +1,42 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import Big5DistributionAnalysis
+from .mbcssm import Big5SMModel
+
+
+class Big5Prober(MultiByteCharSetProber):
+ def __init__(self):
+ MultiByteCharSetProber.__init__(self)
+ self._mCodingSM = CodingStateMachine(Big5SMModel)
+ self._mDistributionAnalyzer = Big5DistributionAnalysis()
+ self.reset()
+
+ def get_charset_name(self):
+ return "Big5"
diff --git a/third_party/python/requests/requests/packages/chardet/chardetect.py b/third_party/python/requests/requests/packages/chardet/chardetect.py
new file mode 100755
index 0000000000..ffe892f25d
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/chardetect.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+"""
+Script which takes one or more file paths and reports on their detected
+encodings
+
+Example::
+
+ % chardetect somefile someotherfile
+ somefile: windows-1252 with confidence 0.5
+ someotherfile: ascii with confidence 1.0
+
+If no paths are provided, it takes its input from stdin.
+
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import argparse
+import sys
+from io import open
+
+from chardet import __version__
+from chardet.universaldetector import UniversalDetector
+
+
+def description_of(lines, name='stdin'):
+ """
+ Return a string describing the probable encoding of a file or
+ list of strings.
+
+ :param lines: The lines to get the encoding of.
+ :type lines: Iterable of bytes
+ :param name: Name of file or collection of lines
+ :type name: str
+ """
+ u = UniversalDetector()
+ for line in lines:
+ u.feed(line)
+ u.close()
+ result = u.result
+ if result['encoding']:
+ return '{0}: {1} with confidence {2}'.format(name, result['encoding'],
+ result['confidence'])
+ else:
+ return '{0}: no result'.format(name)
+
+
+def main(argv=None):
+ '''
+ Handles command line arguments and gets things started.
+
+ :param argv: List of arguments, as if specified on the command-line.
+ If None, ``sys.argv[1:]`` is used instead.
+ :type argv: list of str
+ '''
+ # Get command line arguments
+ parser = argparse.ArgumentParser(
+ description="Takes one or more file paths and reports their detected \
+ encodings",
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ conflict_handler='resolve')
+ parser.add_argument('input',
+ help='File whose encoding we would like to determine.',
+ type=argparse.FileType('rb'), nargs='*',
+ default=[sys.stdin])
+ parser.add_argument('--version', action='version',
+ version='%(prog)s {0}'.format(__version__))
+ args = parser.parse_args(argv)
+
+ for f in args.input:
+ if f.isatty():
+ print("You are running chardetect interactively. Press " +
+ "CTRL-D twice at the start of a blank line to signal the " +
+ "end of your input. If you want help, run chardetect " +
+ "--help\n", file=sys.stderr)
+ print(description_of(f, f.name))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/python/requests/requests/packages/chardet/chardistribution.py b/third_party/python/requests/requests/packages/chardet/chardistribution.py
new file mode 100644
index 0000000000..4e64a00bef
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/chardistribution.py
@@ -0,0 +1,231 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .euctwfreq import (EUCTWCharToFreqOrder, EUCTW_TABLE_SIZE,
+ EUCTW_TYPICAL_DISTRIBUTION_RATIO)
+from .euckrfreq import (EUCKRCharToFreqOrder, EUCKR_TABLE_SIZE,
+ EUCKR_TYPICAL_DISTRIBUTION_RATIO)
+from .gb2312freq import (GB2312CharToFreqOrder, GB2312_TABLE_SIZE,
+ GB2312_TYPICAL_DISTRIBUTION_RATIO)
+from .big5freq import (Big5CharToFreqOrder, BIG5_TABLE_SIZE,
+ BIG5_TYPICAL_DISTRIBUTION_RATIO)
+from .jisfreq import (JISCharToFreqOrder, JIS_TABLE_SIZE,
+ JIS_TYPICAL_DISTRIBUTION_RATIO)
+from .compat import wrap_ord
+
+ENOUGH_DATA_THRESHOLD = 1024
+SURE_YES = 0.99
+SURE_NO = 0.01
+MINIMUM_DATA_THRESHOLD = 3
+
+
+class CharDistributionAnalysis:
+ def __init__(self):
+ # Mapping table to get frequency order from char order (get from
+ # GetOrder())
+ self._mCharToFreqOrder = None
+ self._mTableSize = None # Size of above table
+ # This is a constant value which varies from language to language,
+ # used in calculating confidence. See
+ # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
+ # for further detail.
+ self._mTypicalDistributionRatio = None
+ self.reset()
+
+ def reset(self):
+ """reset analyser, clear any state"""
+ # If this flag is set to True, detection is done and conclusion has
+ # been made
+ self._mDone = False
+ self._mTotalChars = 0 # Total characters encountered
+ # The number of characters whose frequency order is less than 512
+ self._mFreqChars = 0
+
+ def feed(self, aBuf, aCharLen):
+ """feed a character with known length"""
+ if aCharLen == 2:
+ # we only care about 2-bytes character in our distribution analysis
+ order = self.get_order(aBuf)
+ else:
+ order = -1
+ if order >= 0:
+ self._mTotalChars += 1
+ # order is valid
+ if order < self._mTableSize:
+ if 512 > self._mCharToFreqOrder[order]:
+ self._mFreqChars += 1
+
+ def get_confidence(self):
+ """return confidence based on existing data"""
+ # if we didn't receive any character in our consideration range,
+ # return negative answer
+ if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD:
+ return SURE_NO
+
+ if self._mTotalChars != self._mFreqChars:
+ r = (self._mFreqChars / ((self._mTotalChars - self._mFreqChars)
+ * self._mTypicalDistributionRatio))
+ if r < SURE_YES:
+ return r
+
+ # normalize confidence (we don't want to be 100% sure)
+ return SURE_YES
+
+ def got_enough_data(self):
+ # It is not necessary to receive all data to draw conclusion.
+ # For charset detection, certain amount of data is enough
+ return self._mTotalChars > ENOUGH_DATA_THRESHOLD
+
+ def get_order(self, aBuf):
+ # We do not handle characters based on the original encoding string,
+ # but convert this encoding string to a number, here called order.
+ # This allows multiple encodings of a language to share one frequency
+ # table.
+ return -1
+
+
+class EUCTWDistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ CharDistributionAnalysis.__init__(self)
+ self._mCharToFreqOrder = EUCTWCharToFreqOrder
+ self._mTableSize = EUCTW_TABLE_SIZE
+ self._mTypicalDistributionRatio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, aBuf):
+ # for euc-TW encoding, we are interested
+ # first byte range: 0xc4 -- 0xfe
+ # second byte range: 0xa1 -- 0xfe
+ # no validation needed here. State machine has done that
+ first_char = wrap_ord(aBuf[0])
+ if first_char >= 0xC4:
+ return 94 * (first_char - 0xC4) + wrap_ord(aBuf[1]) - 0xA1
+ else:
+ return -1
+
+
+class EUCKRDistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ CharDistributionAnalysis.__init__(self)
+ self._mCharToFreqOrder = EUCKRCharToFreqOrder
+ self._mTableSize = EUCKR_TABLE_SIZE
+ self._mTypicalDistributionRatio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, aBuf):
+ # for euc-KR encoding, we are interested
+ # first byte range: 0xb0 -- 0xfe
+ # second byte range: 0xa1 -- 0xfe
+ # no validation needed here. State machine has done that
+ first_char = wrap_ord(aBuf[0])
+ if first_char >= 0xB0:
+ return 94 * (first_char - 0xB0) + wrap_ord(aBuf[1]) - 0xA1
+ else:
+ return -1
+
+
+class GB2312DistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ CharDistributionAnalysis.__init__(self)
+ self._mCharToFreqOrder = GB2312CharToFreqOrder
+ self._mTableSize = GB2312_TABLE_SIZE
+ self._mTypicalDistributionRatio = GB2312_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, aBuf):
+ # for GB2312 encoding, we are interested
+ # first byte range: 0xb0 -- 0xfe
+ # second byte range: 0xa1 -- 0xfe
+ # no validation needed here. State machine has done that
+ first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
+ if (first_char >= 0xB0) and (second_char >= 0xA1):
+ return 94 * (first_char - 0xB0) + second_char - 0xA1
+ else:
+ return -1
+
+
+class Big5DistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ CharDistributionAnalysis.__init__(self)
+ self._mCharToFreqOrder = Big5CharToFreqOrder
+ self._mTableSize = BIG5_TABLE_SIZE
+ self._mTypicalDistributionRatio = BIG5_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, aBuf):
+ # for big5 encoding, we are interested
+ # first byte range: 0xa4 -- 0xfe
+ # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
+ # no validation needed here. State machine has done that
+ first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
+ if first_char >= 0xA4:
+ if second_char >= 0xA1:
+ return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
+ else:
+ return 157 * (first_char - 0xA4) + second_char - 0x40
+ else:
+ return -1
+
+
+class SJISDistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ CharDistributionAnalysis.__init__(self)
+ self._mCharToFreqOrder = JISCharToFreqOrder
+ self._mTableSize = JIS_TABLE_SIZE
+ self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, aBuf):
+ # for sjis encoding, we are interested
+ # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
+ # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe
+ # no validation needed here. State machine has done that
+ first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
+ if (first_char >= 0x81) and (first_char <= 0x9F):
+ order = 188 * (first_char - 0x81)
+ elif (first_char >= 0xE0) and (first_char <= 0xEF):
+ order = 188 * (first_char - 0xE0 + 31)
+ else:
+ return -1
+ order = order + second_char - 0x40
+ if second_char > 0x7F:
+ order = -1
+ return order
+
+
+class EUCJPDistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ CharDistributionAnalysis.__init__(self)
+ self._mCharToFreqOrder = JISCharToFreqOrder
+ self._mTableSize = JIS_TABLE_SIZE
+ self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, aBuf):
+ # for euc-JP encoding, we are interested
+ # first byte range: 0xa0 -- 0xfe
+ # second byte range: 0xa1 -- 0xfe
+ # no validation needed here. State machine has done that
+ char = wrap_ord(aBuf[0])
+ if char >= 0xA0:
+ return 94 * (char - 0xA1) + wrap_ord(aBuf[1]) - 0xa1
+ else:
+ return -1
diff --git a/third_party/python/requests/requests/packages/chardet/charsetgroupprober.py b/third_party/python/requests/requests/packages/chardet/charsetgroupprober.py
new file mode 100644
index 0000000000..85e7a1c67d
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/charsetgroupprober.py
@@ -0,0 +1,106 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from . import constants
+import sys
+from .charsetprober import CharSetProber
+
+
+class CharSetGroupProber(CharSetProber):
+ def __init__(self):
+ CharSetProber.__init__(self)
+ self._mActiveNum = 0
+ self._mProbers = []
+ self._mBestGuessProber = None
+
+ def reset(self):
+ CharSetProber.reset(self)
+ self._mActiveNum = 0
+ for prober in self._mProbers:
+ if prober:
+ prober.reset()
+ prober.active = True
+ self._mActiveNum += 1
+ self._mBestGuessProber = None
+
+ def get_charset_name(self):
+ if not self._mBestGuessProber:
+ self.get_confidence()
+ if not self._mBestGuessProber:
+ return None
+# self._mBestGuessProber = self._mProbers[0]
+ return self._mBestGuessProber.get_charset_name()
+
+ def feed(self, aBuf):
+ for prober in self._mProbers:
+ if not prober:
+ continue
+ if not prober.active:
+ continue
+ st = prober.feed(aBuf)
+ if not st:
+ continue
+ if st == constants.eFoundIt:
+ self._mBestGuessProber = prober
+ return self.get_state()
+ elif st == constants.eNotMe:
+ prober.active = False
+ self._mActiveNum -= 1
+ if self._mActiveNum <= 0:
+ self._mState = constants.eNotMe
+ return self.get_state()
+ return self.get_state()
+
+ def get_confidence(self):
+ st = self.get_state()
+ if st == constants.eFoundIt:
+ return 0.99
+ elif st == constants.eNotMe:
+ return 0.01
+ bestConf = 0.0
+ self._mBestGuessProber = None
+ for prober in self._mProbers:
+ if not prober:
+ continue
+ if not prober.active:
+ if constants._debug:
+ sys.stderr.write(prober.get_charset_name()
+ + ' not active\n')
+ continue
+ cf = prober.get_confidence()
+ if constants._debug:
+ sys.stderr.write('%s confidence = %s\n' %
+ (prober.get_charset_name(), cf))
+ if bestConf < cf:
+ bestConf = cf
+ self._mBestGuessProber = prober
+ if not self._mBestGuessProber:
+ return 0.0
+ return bestConf
+# else:
+# self._mBestGuessProber = self._mProbers[0]
+# return self._mBestGuessProber.get_confidence()
diff --git a/third_party/python/requests/requests/packages/chardet/charsetprober.py b/third_party/python/requests/requests/packages/chardet/charsetprober.py
new file mode 100644
index 0000000000..97581712c1
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/charsetprober.py
@@ -0,0 +1,62 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from . import constants
+import re
+
+
+class CharSetProber:
+ def __init__(self):
+ pass
+
+ def reset(self):
+ self._mState = constants.eDetecting
+
+ def get_charset_name(self):
+ return None
+
+ def feed(self, aBuf):
+ pass
+
+ def get_state(self):
+ return self._mState
+
+ def get_confidence(self):
+ return 0.0
+
+ def filter_high_bit_only(self, aBuf):
+ aBuf = re.sub(b'([\x00-\x7F])+', b' ', aBuf)
+ return aBuf
+
+ def filter_without_english_letters(self, aBuf):
+ aBuf = re.sub(b'([A-Za-z])+', b' ', aBuf)
+ return aBuf
+
+ def filter_with_english_letters(self, aBuf):
+ # TODO
+ return aBuf
diff --git a/third_party/python/requests/requests/packages/chardet/codingstatemachine.py b/third_party/python/requests/requests/packages/chardet/codingstatemachine.py
new file mode 100644
index 0000000000..8dd8c91798
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/codingstatemachine.py
@@ -0,0 +1,61 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .constants import eStart
+from .compat import wrap_ord
+
+
+class CodingStateMachine:
+ def __init__(self, sm):
+ self._mModel = sm
+ self._mCurrentBytePos = 0
+ self._mCurrentCharLen = 0
+ self.reset()
+
+ def reset(self):
+ self._mCurrentState = eStart
+
+ def next_state(self, c):
+ # for each byte we get its class
+ # if it is first byte, we also get byte length
+ # PY3K: aBuf is a byte stream, so c is an int, not a byte
+ byteCls = self._mModel['classTable'][wrap_ord(c)]
+ if self._mCurrentState == eStart:
+ self._mCurrentBytePos = 0
+ self._mCurrentCharLen = self._mModel['charLenTable'][byteCls]
+ # from byte's class and stateTable, we get its next state
+ curr_state = (self._mCurrentState * self._mModel['classFactor']
+ + byteCls)
+ self._mCurrentState = self._mModel['stateTable'][curr_state]
+ self._mCurrentBytePos += 1
+ return self._mCurrentState
+
+ def get_current_charlen(self):
+ return self._mCurrentCharLen
+
+ def get_coding_state_machine(self):
+ return self._mModel['name']
diff --git a/third_party/python/requests/requests/packages/chardet/compat.py b/third_party/python/requests/requests/packages/chardet/compat.py
new file mode 100644
index 0000000000..d9e30addf9
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/compat.py
@@ -0,0 +1,34 @@
+######################## BEGIN LICENSE BLOCK ########################
+# Contributor(s):
+# Ian Cordasco - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+import sys
+
+
+if sys.version_info < (3, 0):
+ base_str = (str, unicode)
+else:
+ base_str = (bytes, str)
+
+
+def wrap_ord(a):
+ if sys.version_info < (3, 0) and isinstance(a, base_str):
+ return ord(a)
+ else:
+ return a
diff --git a/third_party/python/requests/requests/packages/chardet/constants.py b/third_party/python/requests/requests/packages/chardet/constants.py
new file mode 100644
index 0000000000..e4d148b3c5
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/constants.py
@@ -0,0 +1,39 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+_debug = 0
+
+eDetecting = 0
+eFoundIt = 1
+eNotMe = 2
+
+eStart = 0
+eError = 1
+eItsMe = 2
+
+SHORTCUT_THRESHOLD = 0.95
diff --git a/third_party/python/requests/requests/packages/chardet/cp949prober.py b/third_party/python/requests/requests/packages/chardet/cp949prober.py
new file mode 100644
index 0000000000..ff4272f82a
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/cp949prober.py
@@ -0,0 +1,44 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import EUCKRDistributionAnalysis
+from .mbcssm import CP949SMModel
+
+
+class CP949Prober(MultiByteCharSetProber):
+ def __init__(self):
+ MultiByteCharSetProber.__init__(self)
+ self._mCodingSM = CodingStateMachine(CP949SMModel)
+ # NOTE: CP949 is a superset of EUC-KR, so the distribution should be
+ # not different.
+ self._mDistributionAnalyzer = EUCKRDistributionAnalysis()
+ self.reset()
+
+ def get_charset_name(self):
+ return "CP949"
diff --git a/third_party/python/requests/requests/packages/chardet/escprober.py b/third_party/python/requests/requests/packages/chardet/escprober.py
new file mode 100644
index 0000000000..80a844ff34
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/escprober.py
@@ -0,0 +1,86 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from . import constants
+from .escsm import (HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel,
+ ISO2022KRSMModel)
+from .charsetprober import CharSetProber
+from .codingstatemachine import CodingStateMachine
+from .compat import wrap_ord
+
+
+class EscCharSetProber(CharSetProber):
+ def __init__(self):
+ CharSetProber.__init__(self)
+ self._mCodingSM = [
+ CodingStateMachine(HZSMModel),
+ CodingStateMachine(ISO2022CNSMModel),
+ CodingStateMachine(ISO2022JPSMModel),
+ CodingStateMachine(ISO2022KRSMModel)
+ ]
+ self.reset()
+
+ def reset(self):
+ CharSetProber.reset(self)
+ for codingSM in self._mCodingSM:
+ if not codingSM:
+ continue
+ codingSM.active = True
+ codingSM.reset()
+ self._mActiveSM = len(self._mCodingSM)
+ self._mDetectedCharset = None
+
+ def get_charset_name(self):
+ return self._mDetectedCharset
+
+ def get_confidence(self):
+ if self._mDetectedCharset:
+ return 0.99
+ else:
+ return 0.00
+
+ def feed(self, aBuf):
+ for c in aBuf:
+ # PY3K: aBuf is a byte array, so c is an int, not a byte
+ for codingSM in self._mCodingSM:
+ if not codingSM:
+ continue
+ if not codingSM.active:
+ continue
+ codingState = codingSM.next_state(wrap_ord(c))
+ if codingState == constants.eError:
+ codingSM.active = False
+ self._mActiveSM -= 1
+ if self._mActiveSM <= 0:
+ self._mState = constants.eNotMe
+ return self.get_state()
+ elif codingState == constants.eItsMe:
+ self._mState = constants.eFoundIt
+ self._mDetectedCharset = codingSM.get_coding_state_machine() # nopep8
+ return self.get_state()
+
+ return self.get_state()
diff --git a/third_party/python/requests/requests/packages/chardet/escsm.py b/third_party/python/requests/requests/packages/chardet/escsm.py
new file mode 100644
index 0000000000..bd302b4c61
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/escsm.py
@@ -0,0 +1,242 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .constants import eStart, eError, eItsMe
+
+HZ_cls = (
+1,0,0,0,0,0,0,0, # 00 - 07
+0,0,0,0,0,0,0,0, # 08 - 0f
+0,0,0,0,0,0,0,0, # 10 - 17
+0,0,0,1,0,0,0,0, # 18 - 1f
+0,0,0,0,0,0,0,0, # 20 - 27
+0,0,0,0,0,0,0,0, # 28 - 2f
+0,0,0,0,0,0,0,0, # 30 - 37
+0,0,0,0,0,0,0,0, # 38 - 3f
+0,0,0,0,0,0,0,0, # 40 - 47
+0,0,0,0,0,0,0,0, # 48 - 4f
+0,0,0,0,0,0,0,0, # 50 - 57
+0,0,0,0,0,0,0,0, # 58 - 5f
+0,0,0,0,0,0,0,0, # 60 - 67
+0,0,0,0,0,0,0,0, # 68 - 6f
+0,0,0,0,0,0,0,0, # 70 - 77
+0,0,0,4,0,5,2,0, # 78 - 7f
+1,1,1,1,1,1,1,1, # 80 - 87
+1,1,1,1,1,1,1,1, # 88 - 8f
+1,1,1,1,1,1,1,1, # 90 - 97
+1,1,1,1,1,1,1,1, # 98 - 9f
+1,1,1,1,1,1,1,1, # a0 - a7
+1,1,1,1,1,1,1,1, # a8 - af
+1,1,1,1,1,1,1,1, # b0 - b7
+1,1,1,1,1,1,1,1, # b8 - bf
+1,1,1,1,1,1,1,1, # c0 - c7
+1,1,1,1,1,1,1,1, # c8 - cf
+1,1,1,1,1,1,1,1, # d0 - d7
+1,1,1,1,1,1,1,1, # d8 - df
+1,1,1,1,1,1,1,1, # e0 - e7
+1,1,1,1,1,1,1,1, # e8 - ef
+1,1,1,1,1,1,1,1, # f0 - f7
+1,1,1,1,1,1,1,1, # f8 - ff
+)
+
+HZ_st = (
+eStart,eError, 3,eStart,eStart,eStart,eError,eError,# 00-07
+eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f
+eItsMe,eItsMe,eError,eError,eStart,eStart, 4,eError,# 10-17
+ 5,eError, 6,eError, 5, 5, 4,eError,# 18-1f
+ 4,eError, 4, 4, 4,eError, 4,eError,# 20-27
+ 4,eItsMe,eStart,eStart,eStart,eStart,eStart,eStart,# 28-2f
+)
+
+HZCharLenTable = (0, 0, 0, 0, 0, 0)
+
+HZSMModel = {'classTable': HZ_cls,
+ 'classFactor': 6,
+ 'stateTable': HZ_st,
+ 'charLenTable': HZCharLenTable,
+ 'name': "HZ-GB-2312"}
+
+ISO2022CN_cls = (
+2,0,0,0,0,0,0,0, # 00 - 07
+0,0,0,0,0,0,0,0, # 08 - 0f
+0,0,0,0,0,0,0,0, # 10 - 17
+0,0,0,1,0,0,0,0, # 18 - 1f
+0,0,0,0,0,0,0,0, # 20 - 27
+0,3,0,0,0,0,0,0, # 28 - 2f
+0,0,0,0,0,0,0,0, # 30 - 37
+0,0,0,0,0,0,0,0, # 38 - 3f
+0,0,0,4,0,0,0,0, # 40 - 47
+0,0,0,0,0,0,0,0, # 48 - 4f
+0,0,0,0,0,0,0,0, # 50 - 57
+0,0,0,0,0,0,0,0, # 58 - 5f
+0,0,0,0,0,0,0,0, # 60 - 67
+0,0,0,0,0,0,0,0, # 68 - 6f
+0,0,0,0,0,0,0,0, # 70 - 77
+0,0,0,0,0,0,0,0, # 78 - 7f
+2,2,2,2,2,2,2,2, # 80 - 87
+2,2,2,2,2,2,2,2, # 88 - 8f
+2,2,2,2,2,2,2,2, # 90 - 97
+2,2,2,2,2,2,2,2, # 98 - 9f
+2,2,2,2,2,2,2,2, # a0 - a7
+2,2,2,2,2,2,2,2, # a8 - af
+2,2,2,2,2,2,2,2, # b0 - b7
+2,2,2,2,2,2,2,2, # b8 - bf
+2,2,2,2,2,2,2,2, # c0 - c7
+2,2,2,2,2,2,2,2, # c8 - cf
+2,2,2,2,2,2,2,2, # d0 - d7
+2,2,2,2,2,2,2,2, # d8 - df
+2,2,2,2,2,2,2,2, # e0 - e7
+2,2,2,2,2,2,2,2, # e8 - ef
+2,2,2,2,2,2,2,2, # f0 - f7
+2,2,2,2,2,2,2,2, # f8 - ff
+)
+
+ISO2022CN_st = (
+eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07
+eStart,eError,eError,eError,eError,eError,eError,eError,# 08-0f
+eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17
+eItsMe,eItsMe,eItsMe,eError,eError,eError, 4,eError,# 18-1f
+eError,eError,eError,eItsMe,eError,eError,eError,eError,# 20-27
+ 5, 6,eError,eError,eError,eError,eError,eError,# 28-2f
+eError,eError,eError,eItsMe,eError,eError,eError,eError,# 30-37
+eError,eError,eError,eError,eError,eItsMe,eError,eStart,# 38-3f
+)
+
+ISO2022CNCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0)
+
+ISO2022CNSMModel = {'classTable': ISO2022CN_cls,
+ 'classFactor': 9,
+ 'stateTable': ISO2022CN_st,
+ 'charLenTable': ISO2022CNCharLenTable,
+ 'name': "ISO-2022-CN"}
+
+ISO2022JP_cls = (
+2,0,0,0,0,0,0,0, # 00 - 07
+0,0,0,0,0,0,2,2, # 08 - 0f
+0,0,0,0,0,0,0,0, # 10 - 17
+0,0,0,1,0,0,0,0, # 18 - 1f
+0,0,0,0,7,0,0,0, # 20 - 27
+3,0,0,0,0,0,0,0, # 28 - 2f
+0,0,0,0,0,0,0,0, # 30 - 37
+0,0,0,0,0,0,0,0, # 38 - 3f
+6,0,4,0,8,0,0,0, # 40 - 47
+0,9,5,0,0,0,0,0, # 48 - 4f
+0,0,0,0,0,0,0,0, # 50 - 57
+0,0,0,0,0,0,0,0, # 58 - 5f
+0,0,0,0,0,0,0,0, # 60 - 67
+0,0,0,0,0,0,0,0, # 68 - 6f
+0,0,0,0,0,0,0,0, # 70 - 77
+0,0,0,0,0,0,0,0, # 78 - 7f
+2,2,2,2,2,2,2,2, # 80 - 87
+2,2,2,2,2,2,2,2, # 88 - 8f
+2,2,2,2,2,2,2,2, # 90 - 97
+2,2,2,2,2,2,2,2, # 98 - 9f
+2,2,2,2,2,2,2,2, # a0 - a7
+2,2,2,2,2,2,2,2, # a8 - af
+2,2,2,2,2,2,2,2, # b0 - b7
+2,2,2,2,2,2,2,2, # b8 - bf
+2,2,2,2,2,2,2,2, # c0 - c7
+2,2,2,2,2,2,2,2, # c8 - cf
+2,2,2,2,2,2,2,2, # d0 - d7
+2,2,2,2,2,2,2,2, # d8 - df
+2,2,2,2,2,2,2,2, # e0 - e7
+2,2,2,2,2,2,2,2, # e8 - ef
+2,2,2,2,2,2,2,2, # f0 - f7
+2,2,2,2,2,2,2,2, # f8 - ff
+)
+
+ISO2022JP_st = (
+eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07
+eStart,eStart,eError,eError,eError,eError,eError,eError,# 08-0f
+eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17
+eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,# 18-1f
+eError, 5,eError,eError,eError, 4,eError,eError,# 20-27
+eError,eError,eError, 6,eItsMe,eError,eItsMe,eError,# 28-2f
+eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,# 30-37
+eError,eError,eError,eItsMe,eError,eError,eError,eError,# 38-3f
+eError,eError,eError,eError,eItsMe,eError,eStart,eStart,# 40-47
+)
+
+ISO2022JPCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
+
+ISO2022JPSMModel = {'classTable': ISO2022JP_cls,
+ 'classFactor': 10,
+ 'stateTable': ISO2022JP_st,
+ 'charLenTable': ISO2022JPCharLenTable,
+ 'name': "ISO-2022-JP"}
+
+ISO2022KR_cls = (
+2,0,0,0,0,0,0,0, # 00 - 07
+0,0,0,0,0,0,0,0, # 08 - 0f
+0,0,0,0,0,0,0,0, # 10 - 17
+0,0,0,1,0,0,0,0, # 18 - 1f
+0,0,0,0,3,0,0,0, # 20 - 27
+0,4,0,0,0,0,0,0, # 28 - 2f
+0,0,0,0,0,0,0,0, # 30 - 37
+0,0,0,0,0,0,0,0, # 38 - 3f
+0,0,0,5,0,0,0,0, # 40 - 47
+0,0,0,0,0,0,0,0, # 48 - 4f
+0,0,0,0,0,0,0,0, # 50 - 57
+0,0,0,0,0,0,0,0, # 58 - 5f
+0,0,0,0,0,0,0,0, # 60 - 67
+0,0,0,0,0,0,0,0, # 68 - 6f
+0,0,0,0,0,0,0,0, # 70 - 77
+0,0,0,0,0,0,0,0, # 78 - 7f
+2,2,2,2,2,2,2,2, # 80 - 87
+2,2,2,2,2,2,2,2, # 88 - 8f
+2,2,2,2,2,2,2,2, # 90 - 97
+2,2,2,2,2,2,2,2, # 98 - 9f
+2,2,2,2,2,2,2,2, # a0 - a7
+2,2,2,2,2,2,2,2, # a8 - af
+2,2,2,2,2,2,2,2, # b0 - b7
+2,2,2,2,2,2,2,2, # b8 - bf
+2,2,2,2,2,2,2,2, # c0 - c7
+2,2,2,2,2,2,2,2, # c8 - cf
+2,2,2,2,2,2,2,2, # d0 - d7
+2,2,2,2,2,2,2,2, # d8 - df
+2,2,2,2,2,2,2,2, # e0 - e7
+2,2,2,2,2,2,2,2, # e8 - ef
+2,2,2,2,2,2,2,2, # f0 - f7
+2,2,2,2,2,2,2,2, # f8 - ff
+)
+
+ISO2022KR_st = (
+eStart, 3,eError,eStart,eStart,eStart,eError,eError,# 00-07
+eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f
+eItsMe,eItsMe,eError,eError,eError, 4,eError,eError,# 10-17
+eError,eError,eError,eError, 5,eError,eError,eError,# 18-1f
+eError,eError,eError,eItsMe,eStart,eStart,eStart,eStart,# 20-27
+)
+
+ISO2022KRCharLenTable = (0, 0, 0, 0, 0, 0)
+
+ISO2022KRSMModel = {'classTable': ISO2022KR_cls,
+ 'classFactor': 6,
+ 'stateTable': ISO2022KR_st,
+ 'charLenTable': ISO2022KRCharLenTable,
+ 'name': "ISO-2022-KR"}
+
+# flake8: noqa
diff --git a/third_party/python/requests/requests/packages/chardet/eucjpprober.py b/third_party/python/requests/requests/packages/chardet/eucjpprober.py
new file mode 100644
index 0000000000..8e64fdcc26
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/eucjpprober.py
@@ -0,0 +1,90 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+import sys
+from . import constants
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import EUCJPDistributionAnalysis
+from .jpcntx import EUCJPContextAnalysis
+from .mbcssm import EUCJPSMModel
+
+
+class EUCJPProber(MultiByteCharSetProber):
+ def __init__(self):
+ MultiByteCharSetProber.__init__(self)
+ self._mCodingSM = CodingStateMachine(EUCJPSMModel)
+ self._mDistributionAnalyzer = EUCJPDistributionAnalysis()
+ self._mContextAnalyzer = EUCJPContextAnalysis()
+ self.reset()
+
+ def reset(self):
+ MultiByteCharSetProber.reset(self)
+ self._mContextAnalyzer.reset()
+
+ def get_charset_name(self):
+ return "EUC-JP"
+
+ def feed(self, aBuf):
+ aLen = len(aBuf)
+ for i in range(0, aLen):
+ # PY3K: aBuf is a byte array, so aBuf[i] is an int, not a byte
+ codingState = self._mCodingSM.next_state(aBuf[i])
+ if codingState == constants.eError:
+ if constants._debug:
+ sys.stderr.write(self.get_charset_name()
+ + ' prober hit error at byte ' + str(i)
+ + '\n')
+ self._mState = constants.eNotMe
+ break
+ elif codingState == constants.eItsMe:
+ self._mState = constants.eFoundIt
+ break
+ elif codingState == constants.eStart:
+ charLen = self._mCodingSM.get_current_charlen()
+ if i == 0:
+ self._mLastChar[1] = aBuf[0]
+ self._mContextAnalyzer.feed(self._mLastChar, charLen)
+ self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
+ else:
+ self._mContextAnalyzer.feed(aBuf[i - 1:i + 1], charLen)
+ self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
+ charLen)
+
+ self._mLastChar[0] = aBuf[aLen - 1]
+
+ if self.get_state() == constants.eDetecting:
+ if (self._mContextAnalyzer.got_enough_data() and
+ (self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
+ self._mState = constants.eFoundIt
+
+ return self.get_state()
+
+ def get_confidence(self):
+ contxtCf = self._mContextAnalyzer.get_confidence()
+ distribCf = self._mDistributionAnalyzer.get_confidence()
+ return max(contxtCf, distribCf)
diff --git a/third_party/python/requests/requests/packages/chardet/euckrfreq.py b/third_party/python/requests/requests/packages/chardet/euckrfreq.py
new file mode 100644
index 0000000000..a179e4c21c
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/euckrfreq.py
@@ -0,0 +1,596 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# Sampling from about 20M text materials include literature and computer technology
+
+# 128 --> 0.79
+# 256 --> 0.92
+# 512 --> 0.986
+# 1024 --> 0.99944
+# 2048 --> 0.99999
+#
+# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
+# Random Distribution Ration = 512 / (2350-512) = 0.279.
+#
+# Typical Distribution Ratio
+
+EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0
+
+EUCKR_TABLE_SIZE = 2352
+
+# Char to FreqOrder table ,
+EUCKRCharToFreqOrder = ( \
+ 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87,
+1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
+1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734,
+ 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
+ 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622,
+ 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750,
+1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
+ 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
+ 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
+1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19,
+1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
+1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
+1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
+1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
+ 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
+1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
+1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
+1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
+1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
+ 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
+1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
+ 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
+ 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
+1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
+ 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
+1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885,
+ 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
+ 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
+1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
+1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841,
+1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910,
+1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610,
+ 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
+1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939,
+ 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
+ 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934,
+1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
+1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
+1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
+1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
+1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
+1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
+ 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
+ 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7,
+ 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
+1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
+ 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
+1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250,
+ 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824,
+ 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
+2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745,
+ 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61,
+ 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
+2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032,
+2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
+2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
+ 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
+ 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
+2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
+ 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
+1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
+2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075,
+1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
+2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
+2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
+1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
+ 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
+2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
+2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
+ 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274,
+ 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
+2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721,
+1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
+2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463,
+2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
+2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285,
+2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
+2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10,
+2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350,
+1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
+2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
+2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
+2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
+2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
+2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247,
+1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
+1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
+2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259,
+1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262,
+2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
+1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273,
+ 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
+2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117,
+ 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
+2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
+ 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312,
+2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229,
+2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315,
+ 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
+2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170,
+1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
+ 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
+1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
+2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
+1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
+2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
+ 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
+2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
+1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
+2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
+1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
+2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
+1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
+ 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
+2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
+2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
+ 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
+ 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485,
+1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
+1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
+ 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
+2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
+2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
+ 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494,
+ 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
+ 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
+2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
+ 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
+ 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
+2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
+2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
+ 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544,
+2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
+1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
+ 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562,
+2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
+2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
+2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
+ 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431,
+ 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
+ 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
+2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406,
+2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
+2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
+1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
+2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
+ 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256
+#Everything below is of no interest for detection purpose
+2643,2644,2645,2646,2647,2648,2649,2650,2651,2652,2653,2654,2655,2656,2657,2658,
+2659,2660,2661,2662,2663,2664,2665,2666,2667,2668,2669,2670,2671,2672,2673,2674,
+2675,2676,2677,2678,2679,2680,2681,2682,2683,2684,2685,2686,2687,2688,2689,2690,
+2691,2692,2693,2694,2695,2696,2697,2698,2699,1542, 880,2700,2701,2702,2703,2704,
+2705,2706,2707,2708,2709,2710,2711,2712,2713,2714,2715,2716,2717,2718,2719,2720,
+2721,2722,2723,2724,2725,1543,2726,2727,2728,2729,2730,2731,2732,1544,2733,2734,
+2735,2736,2737,2738,2739,2740,2741,2742,2743,2744,2745,2746,2747,2748,2749,2750,
+2751,2752,2753,2754,1545,2755,2756,2757,2758,2759,2760,2761,2762,2763,2764,2765,
+2766,1546,2767,1547,2768,2769,2770,2771,2772,2773,2774,2775,2776,2777,2778,2779,
+2780,2781,2782,2783,2784,2785,2786,1548,2787,2788,2789,1109,2790,2791,2792,2793,
+2794,2795,2796,2797,2798,2799,2800,2801,2802,2803,2804,2805,2806,2807,2808,2809,
+2810,2811,2812,1329,2813,2814,2815,2816,2817,2818,2819,2820,2821,2822,2823,2824,
+2825,2826,2827,2828,2829,2830,2831,2832,2833,2834,2835,2836,2837,2838,2839,2840,
+2841,2842,2843,2844,2845,2846,2847,2848,2849,2850,2851,2852,2853,2854,2855,2856,
+1549,2857,2858,2859,2860,1550,2861,2862,1551,2863,2864,2865,2866,2867,2868,2869,
+2870,2871,2872,2873,2874,1110,1330,2875,2876,2877,2878,2879,2880,2881,2882,2883,
+2884,2885,2886,2887,2888,2889,2890,2891,2892,2893,2894,2895,2896,2897,2898,2899,
+2900,2901,2902,2903,2904,2905,2906,2907,2908,2909,2910,2911,2912,2913,2914,2915,
+2916,2917,2918,2919,2920,2921,2922,2923,2924,2925,2926,2927,2928,2929,2930,1331,
+2931,2932,2933,2934,2935,2936,2937,2938,2939,2940,2941,2942,2943,1552,2944,2945,
+2946,2947,2948,2949,2950,2951,2952,2953,2954,2955,2956,2957,2958,2959,2960,2961,
+2962,2963,2964,1252,2965,2966,2967,2968,2969,2970,2971,2972,2973,2974,2975,2976,
+2977,2978,2979,2980,2981,2982,2983,2984,2985,2986,2987,2988,2989,2990,2991,2992,
+2993,2994,2995,2996,2997,2998,2999,3000,3001,3002,3003,3004,3005,3006,3007,3008,
+3009,3010,3011,3012,1553,3013,3014,3015,3016,3017,1554,3018,1332,3019,3020,3021,
+3022,3023,3024,3025,3026,3027,3028,3029,3030,3031,3032,3033,3034,3035,3036,3037,
+3038,3039,3040,3041,3042,3043,3044,3045,3046,3047,3048,3049,3050,1555,3051,3052,
+3053,1556,1557,3054,3055,3056,3057,3058,3059,3060,3061,3062,3063,3064,3065,3066,
+3067,1558,3068,3069,3070,3071,3072,3073,3074,3075,3076,1559,3077,3078,3079,3080,
+3081,3082,3083,1253,3084,3085,3086,3087,3088,3089,3090,3091,3092,3093,3094,3095,
+3096,3097,3098,3099,3100,3101,3102,3103,3104,3105,3106,3107,3108,1152,3109,3110,
+3111,3112,3113,1560,3114,3115,3116,3117,1111,3118,3119,3120,3121,3122,3123,3124,
+3125,3126,3127,3128,3129,3130,3131,3132,3133,3134,3135,3136,3137,3138,3139,3140,
+3141,3142,3143,3144,3145,3146,3147,3148,3149,3150,3151,3152,3153,3154,3155,3156,
+3157,3158,3159,3160,3161,3162,3163,3164,3165,3166,3167,3168,3169,3170,3171,3172,
+3173,3174,3175,3176,1333,3177,3178,3179,3180,3181,3182,3183,3184,3185,3186,3187,
+3188,3189,1561,3190,3191,1334,3192,3193,3194,3195,3196,3197,3198,3199,3200,3201,
+3202,3203,3204,3205,3206,3207,3208,3209,3210,3211,3212,3213,3214,3215,3216,3217,
+3218,3219,3220,3221,3222,3223,3224,3225,3226,3227,3228,3229,3230,3231,3232,3233,
+3234,1562,3235,3236,3237,3238,3239,3240,3241,3242,3243,3244,3245,3246,3247,3248,
+3249,3250,3251,3252,3253,3254,3255,3256,3257,3258,3259,3260,3261,3262,3263,3264,
+3265,3266,3267,3268,3269,3270,3271,3272,3273,3274,3275,3276,3277,1563,3278,3279,
+3280,3281,3282,3283,3284,3285,3286,3287,3288,3289,3290,3291,3292,3293,3294,3295,
+3296,3297,3298,3299,3300,3301,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311,
+3312,3313,3314,3315,3316,3317,3318,3319,3320,3321,3322,3323,3324,3325,3326,3327,
+3328,3329,3330,3331,3332,3333,3334,3335,3336,3337,3338,3339,3340,3341,3342,3343,
+3344,3345,3346,3347,3348,3349,3350,3351,3352,3353,3354,3355,3356,3357,3358,3359,
+3360,3361,3362,3363,3364,1335,3365,3366,3367,3368,3369,3370,3371,3372,3373,3374,
+3375,3376,3377,3378,3379,3380,3381,3382,3383,3384,3385,3386,3387,1336,3388,3389,
+3390,3391,3392,3393,3394,3395,3396,3397,3398,3399,3400,3401,3402,3403,3404,3405,
+3406,3407,3408,3409,3410,3411,3412,3413,3414,1337,3415,3416,3417,3418,3419,1338,
+3420,3421,3422,1564,1565,3423,3424,3425,3426,3427,3428,3429,3430,3431,1254,3432,
+3433,3434,1339,3435,3436,3437,3438,3439,1566,3440,3441,3442,3443,3444,3445,3446,
+3447,3448,3449,3450,3451,3452,3453,3454,1255,3455,3456,3457,3458,3459,1567,1191,
+3460,1568,1569,3461,3462,3463,1570,3464,3465,3466,3467,3468,1571,3469,3470,3471,
+3472,3473,1572,3474,3475,3476,3477,3478,3479,3480,3481,3482,3483,3484,3485,3486,
+1340,3487,3488,3489,3490,3491,3492,1021,3493,3494,3495,3496,3497,3498,1573,3499,
+1341,3500,3501,3502,3503,3504,3505,3506,3507,3508,3509,3510,3511,1342,3512,3513,
+3514,3515,3516,1574,1343,3517,3518,3519,1575,3520,1576,3521,3522,3523,3524,3525,
+3526,3527,3528,3529,3530,3531,3532,3533,3534,3535,3536,3537,3538,3539,3540,3541,
+3542,3543,3544,3545,3546,3547,3548,3549,3550,3551,3552,3553,3554,3555,3556,3557,
+3558,3559,3560,3561,3562,3563,3564,3565,3566,3567,3568,3569,3570,3571,3572,3573,
+3574,3575,3576,3577,3578,3579,3580,1577,3581,3582,1578,3583,3584,3585,3586,3587,
+3588,3589,3590,3591,3592,3593,3594,3595,3596,3597,3598,3599,3600,3601,3602,3603,
+3604,1579,3605,3606,3607,3608,3609,3610,3611,3612,3613,3614,3615,3616,3617,3618,
+3619,3620,3621,3622,3623,3624,3625,3626,3627,3628,3629,1580,3630,3631,1581,3632,
+3633,3634,3635,3636,3637,3638,3639,3640,3641,3642,3643,3644,3645,3646,3647,3648,
+3649,3650,3651,3652,3653,3654,3655,3656,1582,3657,3658,3659,3660,3661,3662,3663,
+3664,3665,3666,3667,3668,3669,3670,3671,3672,3673,3674,3675,3676,3677,3678,3679,
+3680,3681,3682,3683,3684,3685,3686,3687,3688,3689,3690,3691,3692,3693,3694,3695,
+3696,3697,3698,3699,3700,1192,3701,3702,3703,3704,1256,3705,3706,3707,3708,1583,
+1257,3709,3710,3711,3712,3713,3714,3715,3716,1584,3717,3718,3719,3720,3721,3722,
+3723,3724,3725,3726,3727,3728,3729,3730,3731,3732,3733,3734,3735,3736,3737,3738,
+3739,3740,3741,3742,3743,3744,3745,1344,3746,3747,3748,3749,3750,3751,3752,3753,
+3754,3755,3756,1585,3757,3758,3759,3760,3761,3762,3763,3764,3765,3766,1586,3767,
+3768,3769,3770,3771,3772,3773,3774,3775,3776,3777,3778,1345,3779,3780,3781,3782,
+3783,3784,3785,3786,3787,3788,3789,3790,3791,3792,3793,3794,3795,1346,1587,3796,
+3797,1588,3798,3799,3800,3801,3802,3803,3804,3805,3806,1347,3807,3808,3809,3810,
+3811,1589,3812,3813,3814,3815,3816,3817,3818,3819,3820,3821,1590,3822,3823,1591,
+1348,3824,3825,3826,3827,3828,3829,3830,1592,3831,3832,1593,3833,3834,3835,3836,
+3837,3838,3839,3840,3841,3842,3843,3844,1349,3845,3846,3847,3848,3849,3850,3851,
+3852,3853,3854,3855,3856,3857,3858,1594,3859,3860,3861,3862,3863,3864,3865,3866,
+3867,3868,3869,1595,3870,3871,3872,3873,1596,3874,3875,3876,3877,3878,3879,3880,
+3881,3882,3883,3884,3885,3886,1597,3887,3888,3889,3890,3891,3892,3893,3894,3895,
+1598,3896,3897,3898,1599,1600,3899,1350,3900,1351,3901,3902,1352,3903,3904,3905,
+3906,3907,3908,3909,3910,3911,3912,3913,3914,3915,3916,3917,3918,3919,3920,3921,
+3922,3923,3924,1258,3925,3926,3927,3928,3929,3930,3931,1193,3932,1601,3933,3934,
+3935,3936,3937,3938,3939,3940,3941,3942,3943,1602,3944,3945,3946,3947,3948,1603,
+3949,3950,3951,3952,3953,3954,3955,3956,3957,3958,3959,3960,3961,3962,3963,3964,
+3965,1604,3966,3967,3968,3969,3970,3971,3972,3973,3974,3975,3976,3977,1353,3978,
+3979,3980,3981,3982,3983,3984,3985,3986,3987,3988,3989,3990,3991,1354,3992,3993,
+3994,3995,3996,3997,3998,3999,4000,4001,4002,4003,4004,4005,4006,4007,4008,4009,
+4010,4011,4012,4013,4014,4015,4016,4017,4018,4019,4020,4021,4022,4023,1355,4024,
+4025,4026,4027,4028,4029,4030,4031,4032,4033,4034,4035,4036,4037,4038,4039,4040,
+1605,4041,4042,4043,4044,4045,4046,4047,4048,4049,4050,4051,4052,4053,4054,4055,
+4056,4057,4058,4059,4060,1606,4061,4062,4063,4064,1607,4065,4066,4067,4068,4069,
+4070,4071,4072,4073,4074,4075,4076,1194,4077,4078,1608,4079,4080,4081,4082,4083,
+4084,4085,4086,4087,1609,4088,4089,4090,4091,4092,4093,4094,4095,4096,4097,4098,
+4099,4100,4101,4102,4103,4104,4105,4106,4107,4108,1259,4109,4110,4111,4112,4113,
+4114,4115,4116,4117,4118,4119,4120,4121,4122,4123,4124,1195,4125,4126,4127,1610,
+4128,4129,4130,4131,4132,4133,4134,4135,4136,4137,1356,4138,4139,4140,4141,4142,
+4143,4144,1611,4145,4146,4147,4148,4149,4150,4151,4152,4153,4154,4155,4156,4157,
+4158,4159,4160,4161,4162,4163,4164,4165,4166,4167,4168,4169,4170,4171,4172,4173,
+4174,4175,4176,4177,4178,4179,4180,4181,4182,4183,4184,4185,4186,4187,4188,4189,
+4190,4191,4192,4193,4194,4195,4196,4197,4198,4199,4200,4201,4202,4203,4204,4205,
+4206,4207,4208,4209,4210,4211,4212,4213,4214,4215,4216,4217,4218,4219,1612,4220,
+4221,4222,4223,4224,4225,4226,4227,1357,4228,1613,4229,4230,4231,4232,4233,4234,
+4235,4236,4237,4238,4239,4240,4241,4242,4243,1614,4244,4245,4246,4247,4248,4249,
+4250,4251,4252,4253,4254,4255,4256,4257,4258,4259,4260,4261,4262,4263,4264,4265,
+4266,4267,4268,4269,4270,1196,1358,4271,4272,4273,4274,4275,4276,4277,4278,4279,
+4280,4281,4282,4283,4284,4285,4286,4287,1615,4288,4289,4290,4291,4292,4293,4294,
+4295,4296,4297,4298,4299,4300,4301,4302,4303,4304,4305,4306,4307,4308,4309,4310,
+4311,4312,4313,4314,4315,4316,4317,4318,4319,4320,4321,4322,4323,4324,4325,4326,
+4327,4328,4329,4330,4331,4332,4333,4334,1616,4335,4336,4337,4338,4339,4340,4341,
+4342,4343,4344,4345,4346,4347,4348,4349,4350,4351,4352,4353,4354,4355,4356,4357,
+4358,4359,4360,1617,4361,4362,4363,4364,4365,1618,4366,4367,4368,4369,4370,4371,
+4372,4373,4374,4375,4376,4377,4378,4379,4380,4381,4382,4383,4384,4385,4386,4387,
+4388,4389,4390,4391,4392,4393,4394,4395,4396,4397,4398,4399,4400,4401,4402,4403,
+4404,4405,4406,4407,4408,4409,4410,4411,4412,4413,4414,4415,4416,1619,4417,4418,
+4419,4420,4421,4422,4423,4424,4425,1112,4426,4427,4428,4429,4430,1620,4431,4432,
+4433,4434,4435,4436,4437,4438,4439,4440,4441,4442,1260,1261,4443,4444,4445,4446,
+4447,4448,4449,4450,4451,4452,4453,4454,4455,1359,4456,4457,4458,4459,4460,4461,
+4462,4463,4464,4465,1621,4466,4467,4468,4469,4470,4471,4472,4473,4474,4475,4476,
+4477,4478,4479,4480,4481,4482,4483,4484,4485,4486,4487,4488,4489,1055,4490,4491,
+4492,4493,4494,4495,4496,4497,4498,4499,4500,4501,4502,4503,4504,4505,4506,4507,
+4508,4509,4510,4511,4512,4513,4514,4515,4516,4517,4518,1622,4519,4520,4521,1623,
+4522,4523,4524,4525,4526,4527,4528,4529,4530,4531,4532,4533,4534,4535,1360,4536,
+4537,4538,4539,4540,4541,4542,4543, 975,4544,4545,4546,4547,4548,4549,4550,4551,
+4552,4553,4554,4555,4556,4557,4558,4559,4560,4561,4562,4563,4564,4565,4566,4567,
+4568,4569,4570,4571,1624,4572,4573,4574,4575,4576,1625,4577,4578,4579,4580,4581,
+4582,4583,4584,1626,4585,4586,4587,4588,4589,4590,4591,4592,4593,4594,4595,1627,
+4596,4597,4598,4599,4600,4601,4602,4603,4604,4605,4606,4607,4608,4609,4610,4611,
+4612,4613,4614,4615,1628,4616,4617,4618,4619,4620,4621,4622,4623,4624,4625,4626,
+4627,4628,4629,4630,4631,4632,4633,4634,4635,4636,4637,4638,4639,4640,4641,4642,
+4643,4644,4645,4646,4647,4648,4649,1361,4650,4651,4652,4653,4654,4655,4656,4657,
+4658,4659,4660,4661,1362,4662,4663,4664,4665,4666,4667,4668,4669,4670,4671,4672,
+4673,4674,4675,4676,4677,4678,4679,4680,4681,4682,1629,4683,4684,4685,4686,4687,
+1630,4688,4689,4690,4691,1153,4692,4693,4694,1113,4695,4696,4697,4698,4699,4700,
+4701,4702,4703,4704,4705,4706,4707,4708,4709,4710,4711,1197,4712,4713,4714,4715,
+4716,4717,4718,4719,4720,4721,4722,4723,4724,4725,4726,4727,4728,4729,4730,4731,
+4732,4733,4734,4735,1631,4736,1632,4737,4738,4739,4740,4741,4742,4743,4744,1633,
+4745,4746,4747,4748,4749,1262,4750,4751,4752,4753,4754,1363,4755,4756,4757,4758,
+4759,4760,4761,4762,4763,4764,4765,4766,4767,4768,1634,4769,4770,4771,4772,4773,
+4774,4775,4776,4777,4778,1635,4779,4780,4781,4782,4783,4784,4785,4786,4787,4788,
+4789,1636,4790,4791,4792,4793,4794,4795,4796,4797,4798,4799,4800,4801,4802,4803,
+4804,4805,4806,1637,4807,4808,4809,1638,4810,4811,4812,4813,4814,4815,4816,4817,
+4818,1639,4819,4820,4821,4822,4823,4824,4825,4826,4827,4828,4829,4830,4831,4832,
+4833,1077,4834,4835,4836,4837,4838,4839,4840,4841,4842,4843,4844,4845,4846,4847,
+4848,4849,4850,4851,4852,4853,4854,4855,4856,4857,4858,4859,4860,4861,4862,4863,
+4864,4865,4866,4867,4868,4869,4870,4871,4872,4873,4874,4875,4876,4877,4878,4879,
+4880,4881,4882,4883,1640,4884,4885,1641,4886,4887,4888,4889,4890,4891,4892,4893,
+4894,4895,4896,4897,4898,4899,4900,4901,4902,4903,4904,4905,4906,4907,4908,4909,
+4910,4911,1642,4912,4913,4914,1364,4915,4916,4917,4918,4919,4920,4921,4922,4923,
+4924,4925,4926,4927,4928,4929,4930,4931,1643,4932,4933,4934,4935,4936,4937,4938,
+4939,4940,4941,4942,4943,4944,4945,4946,4947,4948,4949,4950,4951,4952,4953,4954,
+4955,4956,4957,4958,4959,4960,4961,4962,4963,4964,4965,4966,4967,4968,4969,4970,
+4971,4972,4973,4974,4975,4976,4977,4978,4979,4980,1644,4981,4982,4983,4984,1645,
+4985,4986,1646,4987,4988,4989,4990,4991,4992,4993,4994,4995,4996,4997,4998,4999,
+5000,5001,5002,5003,5004,5005,1647,5006,1648,5007,5008,5009,5010,5011,5012,1078,
+5013,5014,5015,5016,5017,5018,5019,5020,5021,5022,5023,5024,5025,5026,5027,5028,
+1365,5029,5030,5031,5032,5033,5034,5035,5036,5037,5038,5039,1649,5040,5041,5042,
+5043,5044,5045,1366,5046,5047,5048,5049,5050,5051,5052,5053,5054,5055,1650,5056,
+5057,5058,5059,5060,5061,5062,5063,5064,5065,5066,5067,5068,5069,5070,5071,5072,
+5073,5074,5075,5076,5077,1651,5078,5079,5080,5081,5082,5083,5084,5085,5086,5087,
+5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102,5103,
+5104,5105,5106,5107,5108,5109,5110,1652,5111,5112,5113,5114,5115,5116,5117,5118,
+1367,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,1653,5130,5131,5132,
+5133,5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,
+5149,1368,5150,1654,5151,1369,5152,5153,5154,5155,5156,5157,5158,5159,5160,5161,
+5162,5163,5164,5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,5176,5177,
+5178,1370,5179,5180,5181,5182,5183,5184,5185,5186,5187,5188,5189,5190,5191,5192,
+5193,5194,5195,5196,5197,5198,1655,5199,5200,5201,5202,1656,5203,5204,5205,5206,
+1371,5207,1372,5208,5209,5210,5211,1373,5212,5213,1374,5214,5215,5216,5217,5218,
+5219,5220,5221,5222,5223,5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,
+5235,5236,5237,5238,5239,5240,5241,5242,5243,5244,5245,5246,5247,1657,5248,5249,
+5250,5251,1658,1263,5252,5253,5254,5255,5256,1375,5257,5258,5259,5260,5261,5262,
+5263,5264,5265,5266,5267,5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,
+5279,5280,5281,5282,5283,1659,5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,
+5294,5295,5296,5297,5298,5299,5300,1660,5301,5302,5303,5304,5305,5306,5307,5308,
+5309,5310,5311,5312,5313,5314,5315,5316,5317,5318,5319,5320,5321,1376,5322,5323,
+5324,5325,5326,5327,5328,5329,5330,5331,5332,5333,1198,5334,5335,5336,5337,5338,
+5339,5340,5341,5342,5343,1661,5344,5345,5346,5347,5348,5349,5350,5351,5352,5353,
+5354,5355,5356,5357,5358,5359,5360,5361,5362,5363,5364,5365,5366,5367,5368,5369,
+5370,5371,5372,5373,5374,5375,5376,5377,5378,5379,5380,5381,5382,5383,5384,5385,
+5386,5387,5388,5389,5390,5391,5392,5393,5394,5395,5396,5397,5398,1264,5399,5400,
+5401,5402,5403,5404,5405,5406,5407,5408,5409,5410,5411,5412,1662,5413,5414,5415,
+5416,1663,5417,5418,5419,5420,5421,5422,5423,5424,5425,5426,5427,5428,5429,5430,
+5431,5432,5433,5434,5435,5436,5437,5438,1664,5439,5440,5441,5442,5443,5444,5445,
+5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456,5457,5458,5459,5460,5461,
+5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472,5473,5474,5475,5476,5477,
+5478,1154,5479,5480,5481,5482,5483,5484,5485,1665,5486,5487,5488,5489,5490,5491,
+5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504,5505,5506,5507,
+5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520,5521,5522,5523,
+5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536,5537,5538,5539,
+5540,5541,5542,5543,5544,5545,5546,5547,5548,1377,5549,5550,5551,5552,5553,5554,
+5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568,5569,5570,
+1114,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584,5585,
+5586,5587,5588,5589,5590,5591,5592,1378,5593,5594,5595,5596,5597,5598,5599,5600,
+5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,1379,5615,
+5616,5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,
+5632,5633,5634,1380,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,
+5647,5648,5649,1381,1056,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,
+1666,5661,5662,5663,5664,5665,5666,5667,5668,1667,5669,1668,5670,5671,5672,5673,
+5674,5675,5676,5677,5678,1155,5679,5680,5681,5682,5683,5684,5685,5686,5687,5688,
+5689,5690,5691,5692,5693,5694,5695,5696,5697,5698,1669,5699,5700,5701,5702,5703,
+5704,5705,1670,5706,5707,5708,5709,5710,1671,5711,5712,5713,5714,1382,5715,5716,
+5717,5718,5719,5720,5721,5722,5723,5724,5725,1672,5726,5727,1673,1674,5728,5729,
+5730,5731,5732,5733,5734,5735,5736,1675,5737,5738,5739,5740,5741,5742,5743,5744,
+1676,5745,5746,5747,5748,5749,5750,5751,1383,5752,5753,5754,5755,5756,5757,5758,
+5759,5760,5761,5762,5763,5764,5765,5766,5767,5768,1677,5769,5770,5771,5772,5773,
+1678,5774,5775,5776, 998,5777,5778,5779,5780,5781,5782,5783,5784,5785,1384,5786,
+5787,5788,5789,5790,5791,5792,5793,5794,5795,5796,5797,5798,5799,5800,1679,5801,
+5802,5803,1115,1116,5804,5805,5806,5807,5808,5809,5810,5811,5812,5813,5814,5815,
+5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828,5829,5830,5831,
+5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844,5845,5846,5847,
+5848,5849,5850,5851,5852,5853,5854,5855,1680,5856,5857,5858,5859,5860,5861,5862,
+5863,5864,1681,5865,5866,5867,1682,5868,5869,5870,5871,5872,5873,5874,5875,5876,
+5877,5878,5879,1683,5880,1684,5881,5882,5883,5884,1685,5885,5886,5887,5888,5889,
+5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,
+5906,5907,1686,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,
+5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,1687,
+5936,5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,
+5952,1688,1689,5953,1199,5954,5955,5956,5957,5958,5959,5960,5961,1690,5962,5963,
+5964,5965,5966,5967,5968,5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,
+5980,5981,1385,5982,1386,5983,5984,5985,5986,5987,5988,5989,5990,5991,5992,5993,
+5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004,6005,6006,6007,6008,6009,
+6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020,6021,6022,6023,6024,6025,
+6026,6027,1265,6028,6029,1691,6030,6031,6032,6033,6034,6035,6036,6037,6038,6039,
+6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052,6053,6054,6055,
+6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068,6069,6070,6071,
+6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084,1692,6085,6086,
+6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100,6101,6102,
+6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116,6117,6118,
+6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,1693,6132,6133,
+6134,6135,6136,1694,6137,6138,6139,6140,6141,1695,6142,6143,6144,6145,6146,6147,
+6148,6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,
+6164,6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,
+6180,6181,6182,6183,6184,6185,1696,6186,6187,6188,6189,6190,6191,6192,6193,6194,
+6195,6196,6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,
+6211,6212,6213,6214,6215,6216,6217,6218,6219,1697,6220,6221,6222,6223,6224,6225,
+6226,6227,6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,
+6242,6243,6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,1698,6254,6255,6256,
+6257,6258,6259,6260,6261,6262,6263,1200,6264,6265,6266,6267,6268,6269,6270,6271, #1024
+6272,6273,6274,6275,6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,6286,6287,
+6288,6289,6290,6291,6292,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,1699,
+6303,6304,1700,6305,6306,6307,6308,6309,6310,6311,6312,6313,6314,6315,6316,6317,
+6318,6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,
+6334,6335,6336,6337,6338,6339,1701,6340,6341,6342,6343,6344,1387,6345,6346,6347,
+6348,6349,6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,
+6364,6365,6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,
+6380,6381,6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,
+6396,6397,6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,6411,
+6412,6413,1702,6414,6415,6416,6417,6418,6419,6420,6421,6422,1703,6423,6424,6425,
+6426,6427,6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,1704,6439,6440,
+6441,6442,6443,6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,6455,6456,
+6457,6458,6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,
+6473,6474,6475,6476,6477,6478,6479,6480,6481,6482,6483,6484,6485,6486,6487,6488,
+6489,6490,6491,6492,6493,6494,6495,6496,6497,6498,6499,6500,6501,6502,6503,1266,
+6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,
+6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532,6533,6534,6535,
+6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548,6549,6550,6551,
+1705,1706,6552,6553,6554,6555,6556,6557,6558,6559,6560,6561,6562,6563,6564,6565,
+6566,6567,6568,6569,6570,6571,6572,6573,6574,6575,6576,6577,6578,6579,6580,6581,
+6582,6583,6584,6585,6586,6587,6588,6589,6590,6591,6592,6593,6594,6595,6596,6597,
+6598,6599,6600,6601,6602,6603,6604,6605,6606,6607,6608,6609,6610,6611,6612,6613,
+6614,6615,6616,6617,6618,6619,6620,6621,6622,6623,6624,6625,6626,6627,6628,6629,
+6630,6631,6632,6633,6634,6635,6636,6637,1388,6638,6639,6640,6641,6642,6643,6644,
+1707,6645,6646,6647,6648,6649,6650,6651,6652,6653,6654,6655,6656,6657,6658,6659,
+6660,6661,6662,6663,1708,6664,6665,6666,6667,6668,6669,6670,6671,6672,6673,6674,
+1201,6675,6676,6677,6678,6679,6680,6681,6682,6683,6684,6685,6686,6687,6688,6689,
+6690,6691,6692,6693,6694,6695,6696,6697,6698,6699,6700,6701,6702,6703,6704,6705,
+6706,6707,6708,6709,6710,6711,6712,6713,6714,6715,6716,6717,6718,6719,6720,6721,
+6722,6723,6724,6725,1389,6726,6727,6728,6729,6730,6731,6732,6733,6734,6735,6736,
+1390,1709,6737,6738,6739,6740,6741,6742,1710,6743,6744,6745,6746,1391,6747,6748,
+6749,6750,6751,6752,6753,6754,6755,6756,6757,1392,6758,6759,6760,6761,6762,6763,
+6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777,6778,6779,
+6780,1202,6781,6782,6783,6784,6785,6786,6787,6788,6789,6790,6791,6792,6793,6794,
+6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806,6807,6808,6809,1711,
+6810,6811,6812,6813,6814,6815,6816,6817,6818,6819,6820,6821,6822,6823,6824,6825,
+6826,6827,6828,6829,6830,6831,6832,6833,6834,6835,6836,1393,6837,6838,6839,6840,
+6841,6842,6843,6844,6845,6846,6847,6848,6849,6850,6851,6852,6853,6854,6855,6856,
+6857,6858,6859,6860,6861,6862,6863,6864,6865,6866,6867,6868,6869,6870,6871,6872,
+6873,6874,6875,6876,6877,6878,6879,6880,6881,6882,6883,6884,6885,6886,6887,6888,
+6889,6890,6891,6892,6893,6894,6895,6896,6897,6898,6899,6900,6901,6902,1712,6903,
+6904,6905,6906,6907,6908,6909,6910,1713,6911,6912,6913,6914,6915,6916,6917,6918,
+6919,6920,6921,6922,6923,6924,6925,6926,6927,6928,6929,6930,6931,6932,6933,6934,
+6935,6936,6937,6938,6939,6940,6941,6942,6943,6944,6945,6946,6947,6948,6949,6950,
+6951,6952,6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,
+6967,6968,6969,6970,6971,6972,6973,6974,1714,6975,6976,6977,6978,6979,6980,6981,
+6982,6983,6984,6985,6986,6987,6988,1394,6989,6990,6991,6992,6993,6994,6995,6996,
+6997,6998,6999,7000,1715,7001,7002,7003,7004,7005,7006,7007,7008,7009,7010,7011,
+7012,7013,7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,
+7028,1716,7029,7030,7031,7032,7033,7034,7035,7036,7037,7038,7039,7040,7041,7042,
+7043,7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,
+7059,7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,7071,7072,7073,7074,
+7075,7076,7077,7078,7079,7080,7081,7082,7083,7084,7085,7086,7087,7088,7089,7090,
+7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105,7106,
+7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,7119,7120,7121,7122,
+7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136,7137,7138,
+7139,7140,7141,7142,7143,7144,7145,7146,7147,7148,7149,7150,7151,7152,7153,7154,
+7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167,7168,7169,7170,
+7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183,7184,7185,7186,
+7187,7188,7189,7190,7191,7192,7193,7194,7195,7196,7197,7198,7199,7200,7201,7202,
+7203,7204,7205,7206,7207,1395,7208,7209,7210,7211,7212,7213,1717,7214,7215,7216,
+7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229,7230,7231,7232,
+7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245,7246,7247,7248,
+7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261,7262,7263,7264,
+7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277,7278,7279,7280,
+7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293,7294,7295,7296,
+7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308,7309,7310,7311,7312,
+7313,1718,7314,7315,7316,7317,7318,7319,7320,7321,7322,7323,7324,7325,7326,7327,
+7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339,7340,7341,7342,7343,
+7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,7354,7355,7356,7357,7358,7359,
+7360,7361,7362,7363,7364,7365,7366,7367,7368,7369,7370,7371,7372,7373,7374,7375,
+7376,7377,7378,7379,7380,7381,7382,7383,7384,7385,7386,7387,7388,7389,7390,7391,
+7392,7393,7394,7395,7396,7397,7398,7399,7400,7401,7402,7403,7404,7405,7406,7407,
+7408,7409,7410,7411,7412,7413,7414,7415,7416,7417,7418,7419,7420,7421,7422,7423,
+7424,7425,7426,7427,7428,7429,7430,7431,7432,7433,7434,7435,7436,7437,7438,7439,
+7440,7441,7442,7443,7444,7445,7446,7447,7448,7449,7450,7451,7452,7453,7454,7455,
+7456,7457,7458,7459,7460,7461,7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,
+7472,7473,7474,7475,7476,7477,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,
+7488,7489,7490,7491,7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,7503,
+7504,7505,7506,7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,
+7520,7521,7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,
+7536,7537,7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,7550,7551,
+7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567,
+7568,7569,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582,7583,
+7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598,7599,
+7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614,7615,
+7616,7617,7618,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628,7629,7630,7631,
+7632,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643,7644,7645,7646,7647,
+7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659,7660,7661,7662,7663,
+7664,7665,7666,7667,7668,7669,7670,7671,7672,7673,7674,7675,7676,7677,7678,7679,
+7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690,7691,7692,7693,7694,7695,
+7696,7697,7698,7699,7700,7701,7702,7703,7704,7705,7706,7707,7708,7709,7710,7711,
+7712,7713,7714,7715,7716,7717,7718,7719,7720,7721,7722,7723,7724,7725,7726,7727,
+7728,7729,7730,7731,7732,7733,7734,7735,7736,7737,7738,7739,7740,7741,7742,7743,
+7744,7745,7746,7747,7748,7749,7750,7751,7752,7753,7754,7755,7756,7757,7758,7759,
+7760,7761,7762,7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,
+7776,7777,7778,7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,
+7792,7793,7794,7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,7806,7807,
+7808,7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,
+7824,7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,
+7840,7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,
+7856,7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,
+7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,
+7888,7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,
+7904,7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,
+7920,7921,7922,7923,7924,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935,
+7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951,
+7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967,
+7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983,
+7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999,
+8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,
+8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031,
+8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047,
+8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,
+8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,
+8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,
+8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111,
+8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127,
+8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,
+8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,
+8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,
+8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,
+8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,
+8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,
+8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,
+8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,
+8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,
+8272,8273,8274,8275,8276,8277,8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,
+8288,8289,8290,8291,8292,8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,
+8304,8305,8306,8307,8308,8309,8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,
+8320,8321,8322,8323,8324,8325,8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,
+8336,8337,8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,
+8352,8353,8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,
+8368,8369,8370,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,
+8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,
+8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,
+8416,8417,8418,8419,8420,8421,8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,
+8432,8433,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,
+8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,
+8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,
+8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,
+8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,
+8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,
+8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,
+8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,
+8560,8561,8562,8563,8564,8565,8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,
+8576,8577,8578,8579,8580,8581,8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,
+8592,8593,8594,8595,8596,8597,8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,
+8608,8609,8610,8611,8612,8613,8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,
+8624,8625,8626,8627,8628,8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,
+8640,8641,8642,8643,8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,
+8656,8657,8658,8659,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,
+8672,8673,8674,8675,8676,8677,8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,
+8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,
+8704,8705,8706,8707,8708,8709,8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,
+8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,
+8736,8737,8738,8739,8740,8741)
+
+# flake8: noqa
diff --git a/third_party/python/requests/requests/packages/chardet/euckrprober.py b/third_party/python/requests/requests/packages/chardet/euckrprober.py
new file mode 100644
index 0000000000..5982a46b60
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/euckrprober.py
@@ -0,0 +1,42 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import EUCKRDistributionAnalysis
+from .mbcssm import EUCKRSMModel
+
+
+class EUCKRProber(MultiByteCharSetProber):
+ def __init__(self):
+ MultiByteCharSetProber.__init__(self)
+ self._mCodingSM = CodingStateMachine(EUCKRSMModel)
+ self._mDistributionAnalyzer = EUCKRDistributionAnalysis()
+ self.reset()
+
+ def get_charset_name(self):
+ return "EUC-KR"
diff --git a/third_party/python/requests/requests/packages/chardet/euctwfreq.py b/third_party/python/requests/requests/packages/chardet/euctwfreq.py
new file mode 100644
index 0000000000..576e7504dc
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/euctwfreq.py
@@ -0,0 +1,428 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# EUCTW frequency table
+# Converted from big5 work
+# by Taiwan's Mandarin Promotion Council
+# <http:#www.edu.tw:81/mandr/>
+
+# 128 --> 0.42261
+# 256 --> 0.57851
+# 512 --> 0.74851
+# 1024 --> 0.89384
+# 2048 --> 0.97583
+#
+# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98
+# Random Distribution Ration = 512/(5401-512)=0.105
+#
+# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
+
+EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75
+
+# Char to FreqOrder table ,
+EUCTW_TABLE_SIZE = 8102
+
+EUCTWCharToFreqOrder = (
+ 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742
+3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758
+1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774
+ 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790
+3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806
+4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822
+7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838
+ 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854
+ 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870
+ 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886
+2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902
+1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918
+3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934
+ 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950
+1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966
+3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982
+2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998
+ 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014
+3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030
+1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046
+7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062
+ 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078
+7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094
+1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110
+ 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126
+ 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142
+3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158
+3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174
+ 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190
+2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206
+2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222
+ 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238
+ 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254
+3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270
+1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286
+1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302
+1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318
+2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334
+ 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350
+4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366
+1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382
+7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398
+2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414
+ 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430
+ 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446
+ 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462
+ 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478
+7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494
+ 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510
+1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526
+ 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542
+ 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558
+7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574
+1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590
+ 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606
+3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622
+4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638
+3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654
+ 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670
+ 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686
+1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702
+4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718
+3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734
+3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750
+2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766
+7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782
+3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798
+7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814
+1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830
+2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846
+1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862
+ 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878
+1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894
+4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910
+3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926
+ 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942
+ 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958
+ 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974
+2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990
+7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006
+1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022
+2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038
+1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054
+1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070
+7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086
+7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102
+7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118
+3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134
+4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150
+1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166
+7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182
+2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198
+7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214
+3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230
+3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246
+7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262
+2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278
+7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294
+ 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310
+4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326
+2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342
+7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358
+3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374
+2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390
+2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406
+ 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422
+2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438
+1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454
+1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470
+2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486
+1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502
+7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518
+7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534
+2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550
+4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566
+1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582
+7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598
+ 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614
+4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630
+ 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646
+2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662
+ 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678
+1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694
+1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710
+ 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726
+3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742
+3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758
+1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774
+3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790
+7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806
+7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822
+1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838
+2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854
+1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870
+3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886
+2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902
+3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918
+2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934
+4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950
+4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966
+3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982
+ 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998
+3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014
+ 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030
+3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046
+3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062
+3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078
+1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094
+7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110
+ 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126
+7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142
+1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158
+ 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174
+4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190
+3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206
+ 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222
+2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238
+2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254
+3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270
+1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286
+4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302
+2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318
+1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334
+1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350
+2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366
+3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382
+1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398
+7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414
+1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430
+4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446
+1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462
+ 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478
+1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494
+3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510
+3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526
+2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542
+1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558
+4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574
+ 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590
+7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606
+2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622
+3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638
+4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654
+ 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670
+7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686
+7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702
+1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718
+4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734
+3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750
+2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766
+3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782
+3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798
+2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814
+1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830
+4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846
+3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862
+3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878
+2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894
+4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910
+7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926
+3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942
+2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958
+3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974
+1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990
+2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006
+3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022
+4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038
+2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054
+2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070
+7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086
+1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102
+2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118
+1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134
+3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150
+4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166
+2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182
+3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198
+3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214
+2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230
+4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246
+2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262
+3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278
+4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294
+7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310
+3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326
+ 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342
+1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358
+4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374
+1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390
+4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406
+7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422
+ 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438
+7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454
+2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470
+1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486
+1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502
+3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518
+ 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534
+ 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550
+ 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566
+3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582
+2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598
+ 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614
+7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630
+1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646
+3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662
+7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678
+1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694
+7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710
+4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726
+1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742
+2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758
+2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774
+4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790
+ 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806
+ 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822
+3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838
+3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854
+1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870
+2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886
+7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902
+1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918
+1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934
+3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950
+ 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966
+1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982
+4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998
+7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014
+2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030
+3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046
+ 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062
+1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078
+2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094
+2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110
+7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126
+7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142
+7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158
+2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174
+2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190
+1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206
+4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222
+3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238
+3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254
+4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270
+4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286
+2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302
+2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318
+7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334
+4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350
+7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366
+2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382
+1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398
+3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414
+4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430
+2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446
+ 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462
+2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478
+1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494
+2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510
+2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526
+4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542
+7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558
+1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574
+3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590
+7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606
+1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622
+8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638
+2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654
+8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670
+2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686
+2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702
+8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718
+8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734
+8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750
+ 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766
+8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782
+4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798
+3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814
+8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830
+1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846
+8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862
+ 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878
+1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894
+ 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910
+4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926
+1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942
+4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958
+1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974
+ 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990
+3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006
+4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022
+8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038
+ 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054
+3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070
+ 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086
+2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102
+#Everything below is of no interest for detection purpose
+2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 8118
+2493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 8134
+8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 8150
+8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 8166
+8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 8182
+8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 8198
+8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 8214
+8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 8230
+8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 8246
+8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 8262
+8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 8278
+8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 8294
+8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 8310
+8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 8326
+8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 8342
+8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 8358
+8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 8374
+8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 8390
+8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 8406
+8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 8422
+8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 8438
+8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 8454
+8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 8470
+8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 8486
+8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 8502
+8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 8518
+8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 8534
+8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 8550
+8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 8566
+8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 8582
+8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 8598
+8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 8614
+8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 8630
+8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 8646
+8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 8662
+8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 8678
+8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 8694
+8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 8710
+8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 8726
+8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742
+
+# flake8: noqa
diff --git a/third_party/python/requests/requests/packages/chardet/euctwprober.py b/third_party/python/requests/requests/packages/chardet/euctwprober.py
new file mode 100644
index 0000000000..fe652fe37a
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/euctwprober.py
@@ -0,0 +1,41 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import EUCTWDistributionAnalysis
+from .mbcssm import EUCTWSMModel
+
+class EUCTWProber(MultiByteCharSetProber):
+ def __init__(self):
+ MultiByteCharSetProber.__init__(self)
+ self._mCodingSM = CodingStateMachine(EUCTWSMModel)
+ self._mDistributionAnalyzer = EUCTWDistributionAnalysis()
+ self.reset()
+
+ def get_charset_name(self):
+ return "EUC-TW"
diff --git a/third_party/python/requests/requests/packages/chardet/gb2312freq.py b/third_party/python/requests/requests/packages/chardet/gb2312freq.py
new file mode 100644
index 0000000000..1238f510fc
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/gb2312freq.py
@@ -0,0 +1,472 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# GB2312 most frequently used character table
+#
+# Char to FreqOrder table , from hz6763
+
+# 512 --> 0.79 -- 0.79
+# 1024 --> 0.92 -- 0.13
+# 2048 --> 0.98 -- 0.06
+# 6768 --> 1.00 -- 0.02
+#
+# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79
+# Random Distribution Ration = 512 / (3755 - 512) = 0.157
+#
+# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR
+
+GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9
+
+GB2312_TABLE_SIZE = 3760
+
+GB2312CharToFreqOrder = (
+1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205,
+2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842,
+2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409,
+ 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670,
+1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820,
+1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585,
+ 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566,
+1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575,
+2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853,
+3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061,
+ 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155,
+1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406,
+ 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816,
+2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606,
+ 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023,
+2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414,
+1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513,
+3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052,
+ 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570,
+1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575,
+ 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250,
+2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506,
+1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26,
+3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835,
+1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686,
+2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054,
+1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894,
+ 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105,
+3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403,
+3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694,
+ 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873,
+3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940,
+ 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121,
+1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648,
+3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992,
+2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233,
+1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157,
+ 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807,
+1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094,
+4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258,
+ 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478,
+3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152,
+3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909,
+ 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272,
+1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221,
+2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252,
+1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301,
+1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254,
+ 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070,
+3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461,
+3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360,
+4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124,
+ 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535,
+3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243,
+1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713,
+1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071,
+4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442,
+ 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946,
+ 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257,
+3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180,
+1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427,
+ 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781,
+1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724,
+2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937,
+ 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943,
+ 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789,
+ 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552,
+3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246,
+4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451,
+3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310,
+ 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860,
+2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297,
+2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780,
+2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745,
+ 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936,
+2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032,
+ 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657,
+ 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414,
+ 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976,
+3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436,
+2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254,
+2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536,
+1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238,
+ 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059,
+2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741,
+ 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447,
+ 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601,
+1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269,
+1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894,
+ 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173,
+ 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994,
+1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956,
+2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437,
+3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154,
+2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240,
+2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143,
+2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634,
+3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472,
+1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541,
+1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143,
+2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312,
+1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414,
+3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754,
+1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424,
+1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302,
+3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739,
+ 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004,
+2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484,
+1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739,
+4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535,
+1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641,
+1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307,
+3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573,
+1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533,
+ 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965,
+ 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99,
+1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280,
+ 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505,
+1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012,
+1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039,
+ 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982,
+3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530,
+4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392,
+3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656,
+2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220,
+2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766,
+1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535,
+3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728,
+2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338,
+1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627,
+1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885,
+ 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411,
+2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671,
+2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162,
+3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774,
+4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524,
+3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346,
+ 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040,
+3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188,
+2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280,
+1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131,
+ 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947,
+ 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970,
+3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814,
+4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557,
+2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997,
+1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972,
+1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369,
+ 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376,
+1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480,
+3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610,
+ 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128,
+ 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769,
+1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207,
+ 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392,
+1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623,
+ 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782,
+2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650,
+ 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478,
+2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773,
+2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007,
+1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323,
+1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598,
+2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961,
+ 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302,
+1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409,
+1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683,
+2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191,
+2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616,
+3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302,
+1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774,
+4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147,
+ 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731,
+ 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464,
+3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377,
+1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315,
+ 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557,
+3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903,
+1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060,
+4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261,
+1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092,
+2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810,
+1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708,
+ 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658,
+1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871,
+3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503,
+ 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229,
+2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112,
+ 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504,
+1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389,
+1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27,
+1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542,
+3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861,
+2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845,
+3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700,
+3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469,
+3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582,
+ 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999,
+2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274,
+ 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020,
+2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601,
+ 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628,
+1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31,
+ 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668,
+ 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778,
+1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169,
+3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667,
+3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881,
+1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276,
+1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320,
+3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751,
+2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432,
+2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772,
+1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843,
+3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116,
+ 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904,
+4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652,
+1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664,
+2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770,
+3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283,
+3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626,
+1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713,
+ 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333,
+ 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062,
+2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555,
+ 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014,
+1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510,
+ 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015,
+1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459,
+1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390,
+1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238,
+1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232,
+1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624,
+ 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189,
+ 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, # last 512
+#Everything below is of no interest for detection purpose
+5508,6484,3900,3414,3974,4441,4024,3537,4037,5628,5099,3633,6485,3148,6486,3636,
+5509,3257,5510,5973,5445,5872,4941,4403,3174,4627,5873,6276,2286,4230,5446,5874,
+5122,6102,6103,4162,5447,5123,5323,4849,6277,3980,3851,5066,4246,5774,5067,6278,
+3001,2807,5695,3346,5775,5974,5158,5448,6487,5975,5976,5776,3598,6279,5696,4806,
+4211,4154,6280,6488,6489,6490,6281,4212,5037,3374,4171,6491,4562,4807,4722,4827,
+5977,6104,4532,4079,5159,5324,5160,4404,3858,5359,5875,3975,4288,4610,3486,4512,
+5325,3893,5360,6282,6283,5560,2522,4231,5978,5186,5449,2569,3878,6284,5401,3578,
+4415,6285,4656,5124,5979,2506,4247,4449,3219,3417,4334,4969,4329,6492,4576,4828,
+4172,4416,4829,5402,6286,3927,3852,5361,4369,4830,4477,4867,5876,4173,6493,6105,
+4657,6287,6106,5877,5450,6494,4155,4868,5451,3700,5629,4384,6288,6289,5878,3189,
+4881,6107,6290,6495,4513,6496,4692,4515,4723,5100,3356,6497,6291,3810,4080,5561,
+3570,4430,5980,6498,4355,5697,6499,4724,6108,6109,3764,4050,5038,5879,4093,3226,
+6292,5068,5217,4693,3342,5630,3504,4831,4377,4466,4309,5698,4431,5777,6293,5778,
+4272,3706,6110,5326,3752,4676,5327,4273,5403,4767,5631,6500,5699,5880,3475,5039,
+6294,5562,5125,4348,4301,4482,4068,5126,4593,5700,3380,3462,5981,5563,3824,5404,
+4970,5511,3825,4738,6295,6501,5452,4516,6111,5881,5564,6502,6296,5982,6503,4213,
+4163,3454,6504,6112,4009,4450,6113,4658,6297,6114,3035,6505,6115,3995,4904,4739,
+4563,4942,4110,5040,3661,3928,5362,3674,6506,5292,3612,4791,5565,4149,5983,5328,
+5259,5021,4725,4577,4564,4517,4364,6298,5405,4578,5260,4594,4156,4157,5453,3592,
+3491,6507,5127,5512,4709,4922,5984,5701,4726,4289,6508,4015,6116,5128,4628,3424,
+4241,5779,6299,4905,6509,6510,5454,5702,5780,6300,4365,4923,3971,6511,5161,3270,
+3158,5985,4100, 867,5129,5703,6117,5363,3695,3301,5513,4467,6118,6512,5455,4232,
+4242,4629,6513,3959,4478,6514,5514,5329,5986,4850,5162,5566,3846,4694,6119,5456,
+4869,5781,3779,6301,5704,5987,5515,4710,6302,5882,6120,4392,5364,5705,6515,6121,
+6516,6517,3736,5988,5457,5989,4695,2457,5883,4551,5782,6303,6304,6305,5130,4971,
+6122,5163,6123,4870,3263,5365,3150,4871,6518,6306,5783,5069,5706,3513,3498,4409,
+5330,5632,5366,5458,5459,3991,5990,4502,3324,5991,5784,3696,4518,5633,4119,6519,
+4630,5634,4417,5707,4832,5992,3418,6124,5993,5567,4768,5218,6520,4595,3458,5367,
+6125,5635,6126,4202,6521,4740,4924,6307,3981,4069,4385,6308,3883,2675,4051,3834,
+4302,4483,5568,5994,4972,4101,5368,6309,5164,5884,3922,6127,6522,6523,5261,5460,
+5187,4164,5219,3538,5516,4111,3524,5995,6310,6311,5369,3181,3386,2484,5188,3464,
+5569,3627,5708,6524,5406,5165,4677,4492,6312,4872,4851,5885,4468,5996,6313,5709,
+5710,6128,2470,5886,6314,5293,4882,5785,3325,5461,5101,6129,5711,5786,6525,4906,
+6526,6527,4418,5887,5712,4808,2907,3701,5713,5888,6528,3765,5636,5331,6529,6530,
+3593,5889,3637,4943,3692,5714,5787,4925,6315,6130,5462,4405,6131,6132,6316,5262,
+6531,6532,5715,3859,5716,5070,4696,5102,3929,5788,3987,4792,5997,6533,6534,3920,
+4809,5000,5998,6535,2974,5370,6317,5189,5263,5717,3826,6536,3953,5001,4883,3190,
+5463,5890,4973,5999,4741,6133,6134,3607,5570,6000,4711,3362,3630,4552,5041,6318,
+6001,2950,2953,5637,4646,5371,4944,6002,2044,4120,3429,6319,6537,5103,4833,6538,
+6539,4884,4647,3884,6003,6004,4758,3835,5220,5789,4565,5407,6540,6135,5294,4697,
+4852,6320,6321,3206,4907,6541,6322,4945,6542,6136,6543,6323,6005,4631,3519,6544,
+5891,6545,5464,3784,5221,6546,5571,4659,6547,6324,6137,5190,6548,3853,6549,4016,
+4834,3954,6138,5332,3827,4017,3210,3546,4469,5408,5718,3505,4648,5790,5131,5638,
+5791,5465,4727,4318,6325,6326,5792,4553,4010,4698,3439,4974,3638,4335,3085,6006,
+5104,5042,5166,5892,5572,6327,4356,4519,5222,5573,5333,5793,5043,6550,5639,5071,
+4503,6328,6139,6551,6140,3914,3901,5372,6007,5640,4728,4793,3976,3836,4885,6552,
+4127,6553,4451,4102,5002,6554,3686,5105,6555,5191,5072,5295,4611,5794,5296,6556,
+5893,5264,5894,4975,5466,5265,4699,4976,4370,4056,3492,5044,4886,6557,5795,4432,
+4769,4357,5467,3940,4660,4290,6141,4484,4770,4661,3992,6329,4025,4662,5022,4632,
+4835,4070,5297,4663,4596,5574,5132,5409,5895,6142,4504,5192,4664,5796,5896,3885,
+5575,5797,5023,4810,5798,3732,5223,4712,5298,4084,5334,5468,6143,4052,4053,4336,
+4977,4794,6558,5335,4908,5576,5224,4233,5024,4128,5469,5225,4873,6008,5045,4729,
+4742,4633,3675,4597,6559,5897,5133,5577,5003,5641,5719,6330,6560,3017,2382,3854,
+4406,4811,6331,4393,3964,4946,6561,2420,3722,6562,4926,4378,3247,1736,4442,6332,
+5134,6333,5226,3996,2918,5470,4319,4003,4598,4743,4744,4485,3785,3902,5167,5004,
+5373,4394,5898,6144,4874,1793,3997,6334,4085,4214,5106,5642,4909,5799,6009,4419,
+4189,3330,5899,4165,4420,5299,5720,5227,3347,6145,4081,6335,2876,3930,6146,3293,
+3786,3910,3998,5900,5300,5578,2840,6563,5901,5579,6147,3531,5374,6564,6565,5580,
+4759,5375,6566,6148,3559,5643,6336,6010,5517,6337,6338,5721,5902,3873,6011,6339,
+6567,5518,3868,3649,5722,6568,4771,4947,6569,6149,4812,6570,2853,5471,6340,6341,
+5644,4795,6342,6012,5723,6343,5724,6013,4349,6344,3160,6150,5193,4599,4514,4493,
+5168,4320,6345,4927,3666,4745,5169,5903,5005,4928,6346,5725,6014,4730,4203,5046,
+4948,3395,5170,6015,4150,6016,5726,5519,6347,5047,3550,6151,6348,4197,4310,5904,
+6571,5581,2965,6152,4978,3960,4291,5135,6572,5301,5727,4129,4026,5905,4853,5728,
+5472,6153,6349,4533,2700,4505,5336,4678,3583,5073,2994,4486,3043,4554,5520,6350,
+6017,5800,4487,6351,3931,4103,5376,6352,4011,4321,4311,4190,5136,6018,3988,3233,
+4350,5906,5645,4198,6573,5107,3432,4191,3435,5582,6574,4139,5410,6353,5411,3944,
+5583,5074,3198,6575,6354,4358,6576,5302,4600,5584,5194,5412,6577,6578,5585,5413,
+5303,4248,5414,3879,4433,6579,4479,5025,4854,5415,6355,4760,4772,3683,2978,4700,
+3797,4452,3965,3932,3721,4910,5801,6580,5195,3551,5907,3221,3471,3029,6019,3999,
+5908,5909,5266,5267,3444,3023,3828,3170,4796,5646,4979,4259,6356,5647,5337,3694,
+6357,5648,5338,4520,4322,5802,3031,3759,4071,6020,5586,4836,4386,5048,6581,3571,
+4679,4174,4949,6154,4813,3787,3402,3822,3958,3215,3552,5268,4387,3933,4950,4359,
+6021,5910,5075,3579,6358,4234,4566,5521,6359,3613,5049,6022,5911,3375,3702,3178,
+4911,5339,4521,6582,6583,4395,3087,3811,5377,6023,6360,6155,4027,5171,5649,4421,
+4249,2804,6584,2270,6585,4000,4235,3045,6156,5137,5729,4140,4312,3886,6361,4330,
+6157,4215,6158,3500,3676,4929,4331,3713,4930,5912,4265,3776,3368,5587,4470,4855,
+3038,4980,3631,6159,6160,4132,4680,6161,6362,3923,4379,5588,4255,6586,4121,6587,
+6363,4649,6364,3288,4773,4774,6162,6024,6365,3543,6588,4274,3107,3737,5050,5803,
+4797,4522,5589,5051,5730,3714,4887,5378,4001,4523,6163,5026,5522,4701,4175,2791,
+3760,6589,5473,4224,4133,3847,4814,4815,4775,3259,5416,6590,2738,6164,6025,5304,
+3733,5076,5650,4816,5590,6591,6165,6592,3934,5269,6593,3396,5340,6594,5804,3445,
+3602,4042,4488,5731,5732,3525,5591,4601,5196,6166,6026,5172,3642,4612,3202,4506,
+4798,6366,3818,5108,4303,5138,5139,4776,3332,4304,2915,3415,4434,5077,5109,4856,
+2879,5305,4817,6595,5913,3104,3144,3903,4634,5341,3133,5110,5651,5805,6167,4057,
+5592,2945,4371,5593,6596,3474,4182,6367,6597,6168,4507,4279,6598,2822,6599,4777,
+4713,5594,3829,6169,3887,5417,6170,3653,5474,6368,4216,2971,5228,3790,4579,6369,
+5733,6600,6601,4951,4746,4555,6602,5418,5475,6027,3400,4665,5806,6171,4799,6028,
+5052,6172,3343,4800,4747,5006,6370,4556,4217,5476,4396,5229,5379,5477,3839,5914,
+5652,5807,4714,3068,4635,5808,6173,5342,4192,5078,5419,5523,5734,6174,4557,6175,
+4602,6371,6176,6603,5809,6372,5735,4260,3869,5111,5230,6029,5112,6177,3126,4681,
+5524,5915,2706,3563,4748,3130,6178,4018,5525,6604,6605,5478,4012,4837,6606,4534,
+4193,5810,4857,3615,5479,6030,4082,3697,3539,4086,5270,3662,4508,4931,5916,4912,
+5811,5027,3888,6607,4397,3527,3302,3798,2775,2921,2637,3966,4122,4388,4028,4054,
+1633,4858,5079,3024,5007,3982,3412,5736,6608,3426,3236,5595,3030,6179,3427,3336,
+3279,3110,6373,3874,3039,5080,5917,5140,4489,3119,6374,5812,3405,4494,6031,4666,
+4141,6180,4166,6032,5813,4981,6609,5081,4422,4982,4112,3915,5653,3296,3983,6375,
+4266,4410,5654,6610,6181,3436,5082,6611,5380,6033,3819,5596,4535,5231,5306,5113,
+6612,4952,5918,4275,3113,6613,6376,6182,6183,5814,3073,4731,4838,5008,3831,6614,
+4888,3090,3848,4280,5526,5232,3014,5655,5009,5737,5420,5527,6615,5815,5343,5173,
+5381,4818,6616,3151,4953,6617,5738,2796,3204,4360,2989,4281,5739,5174,5421,5197,
+3132,5141,3849,5142,5528,5083,3799,3904,4839,5480,2880,4495,3448,6377,6184,5271,
+5919,3771,3193,6034,6035,5920,5010,6036,5597,6037,6378,6038,3106,5422,6618,5423,
+5424,4142,6619,4889,5084,4890,4313,5740,6620,3437,5175,5307,5816,4199,5198,5529,
+5817,5199,5656,4913,5028,5344,3850,6185,2955,5272,5011,5818,4567,4580,5029,5921,
+3616,5233,6621,6622,6186,4176,6039,6379,6380,3352,5200,5273,2908,5598,5234,3837,
+5308,6623,6624,5819,4496,4323,5309,5201,6625,6626,4983,3194,3838,4167,5530,5922,
+5274,6381,6382,3860,3861,5599,3333,4292,4509,6383,3553,5481,5820,5531,4778,6187,
+3955,3956,4324,4389,4218,3945,4325,3397,2681,5923,4779,5085,4019,5482,4891,5382,
+5383,6040,4682,3425,5275,4094,6627,5310,3015,5483,5657,4398,5924,3168,4819,6628,
+5925,6629,5532,4932,4613,6041,6630,4636,6384,4780,4204,5658,4423,5821,3989,4683,
+5822,6385,4954,6631,5345,6188,5425,5012,5384,3894,6386,4490,4104,6632,5741,5053,
+6633,5823,5926,5659,5660,5927,6634,5235,5742,5824,4840,4933,4820,6387,4859,5928,
+4955,6388,4143,3584,5825,5346,5013,6635,5661,6389,5014,5484,5743,4337,5176,5662,
+6390,2836,6391,3268,6392,6636,6042,5236,6637,4158,6638,5744,5663,4471,5347,3663,
+4123,5143,4293,3895,6639,6640,5311,5929,5826,3800,6189,6393,6190,5664,5348,3554,
+3594,4749,4603,6641,5385,4801,6043,5827,4183,6642,5312,5426,4761,6394,5665,6191,
+4715,2669,6643,6644,5533,3185,5427,5086,5930,5931,5386,6192,6044,6645,4781,4013,
+5745,4282,4435,5534,4390,4267,6045,5746,4984,6046,2743,6193,3501,4087,5485,5932,
+5428,4184,4095,5747,4061,5054,3058,3862,5933,5600,6646,5144,3618,6395,3131,5055,
+5313,6396,4650,4956,3855,6194,3896,5202,4985,4029,4225,6195,6647,5828,5486,5829,
+3589,3002,6648,6397,4782,5276,6649,6196,6650,4105,3803,4043,5237,5830,6398,4096,
+3643,6399,3528,6651,4453,3315,4637,6652,3984,6197,5535,3182,3339,6653,3096,2660,
+6400,6654,3449,5934,4250,4236,6047,6401,5831,6655,5487,3753,4062,5832,6198,6199,
+6656,3766,6657,3403,4667,6048,6658,4338,2897,5833,3880,2797,3780,4326,6659,5748,
+5015,6660,5387,4351,5601,4411,6661,3654,4424,5935,4339,4072,5277,4568,5536,6402,
+6662,5238,6663,5349,5203,6200,5204,6201,5145,4536,5016,5056,4762,5834,4399,4957,
+6202,6403,5666,5749,6664,4340,6665,5936,5177,5667,6666,6667,3459,4668,6404,6668,
+6669,4543,6203,6670,4276,6405,4480,5537,6671,4614,5205,5668,6672,3348,2193,4763,
+6406,6204,5937,5602,4177,5669,3419,6673,4020,6205,4443,4569,5388,3715,3639,6407,
+6049,4058,6206,6674,5938,4544,6050,4185,4294,4841,4651,4615,5488,6207,6408,6051,
+5178,3241,3509,5835,6208,4958,5836,4341,5489,5278,6209,2823,5538,5350,5206,5429,
+6675,4638,4875,4073,3516,4684,4914,4860,5939,5603,5389,6052,5057,3237,5490,3791,
+6676,6409,6677,4821,4915,4106,5351,5058,4243,5539,4244,5604,4842,4916,5239,3028,
+3716,5837,5114,5605,5390,5940,5430,6210,4332,6678,5540,4732,3667,3840,6053,4305,
+3408,5670,5541,6410,2744,5240,5750,6679,3234,5606,6680,5607,5671,3608,4283,4159,
+4400,5352,4783,6681,6411,6682,4491,4802,6211,6412,5941,6413,6414,5542,5751,6683,
+4669,3734,5942,6684,6415,5943,5059,3328,4670,4144,4268,6685,6686,6687,6688,4372,
+3603,6689,5944,5491,4373,3440,6416,5543,4784,4822,5608,3792,4616,5838,5672,3514,
+5391,6417,4892,6690,4639,6691,6054,5673,5839,6055,6692,6056,5392,6212,4038,5544,
+5674,4497,6057,6693,5840,4284,5675,4021,4545,5609,6418,4454,6419,6213,4113,4472,
+5314,3738,5087,5279,4074,5610,4959,4063,3179,4750,6058,6420,6214,3476,4498,4716,
+5431,4960,4685,6215,5241,6694,6421,6216,6695,5841,5945,6422,3748,5946,5179,3905,
+5752,5545,5947,4374,6217,4455,6423,4412,6218,4803,5353,6696,3832,5280,6219,4327,
+4702,6220,6221,6059,4652,5432,6424,3749,4751,6425,5753,4986,5393,4917,5948,5030,
+5754,4861,4733,6426,4703,6697,6222,4671,5949,4546,4961,5180,6223,5031,3316,5281,
+6698,4862,4295,4934,5207,3644,6427,5842,5950,6428,6429,4570,5843,5282,6430,6224,
+5088,3239,6060,6699,5844,5755,6061,6431,2701,5546,6432,5115,5676,4039,3993,3327,
+4752,4425,5315,6433,3941,6434,5677,4617,4604,3074,4581,6225,5433,6435,6226,6062,
+4823,5756,5116,6227,3717,5678,4717,5845,6436,5679,5846,6063,5847,6064,3977,3354,
+6437,3863,5117,6228,5547,5394,4499,4524,6229,4605,6230,4306,4500,6700,5951,6065,
+3693,5952,5089,4366,4918,6701,6231,5548,6232,6702,6438,4704,5434,6703,6704,5953,
+4168,6705,5680,3420,6706,5242,4407,6066,3812,5757,5090,5954,4672,4525,3481,5681,
+4618,5395,5354,5316,5955,6439,4962,6707,4526,6440,3465,4673,6067,6441,5682,6708,
+5435,5492,5758,5683,4619,4571,4674,4804,4893,4686,5493,4753,6233,6068,4269,6442,
+6234,5032,4705,5146,5243,5208,5848,6235,6443,4963,5033,4640,4226,6236,5849,3387,
+6444,6445,4436,4437,5850,4843,5494,4785,4894,6709,4361,6710,5091,5956,3331,6237,
+4987,5549,6069,6711,4342,3517,4473,5317,6070,6712,6071,4706,6446,5017,5355,6713,
+6714,4988,5436,6447,4734,5759,6715,4735,4547,4456,4754,6448,5851,6449,6450,3547,
+5852,5318,6451,6452,5092,4205,6716,6238,4620,4219,5611,6239,6072,4481,5760,5957,
+5958,4059,6240,6453,4227,4537,6241,5761,4030,4186,5244,5209,3761,4457,4876,3337,
+5495,5181,6242,5959,5319,5612,5684,5853,3493,5854,6073,4169,5613,5147,4895,6074,
+5210,6717,5182,6718,3830,6243,2798,3841,6075,6244,5855,5614,3604,4606,5496,5685,
+5118,5356,6719,6454,5960,5357,5961,6720,4145,3935,4621,5119,5962,4261,6721,6455,
+4786,5963,4375,4582,6245,6246,6247,6076,5437,4877,5856,3376,4380,6248,4160,6722,
+5148,6456,5211,6457,6723,4718,6458,6724,6249,5358,4044,3297,6459,6250,5857,5615,
+5497,5245,6460,5498,6725,6251,6252,5550,3793,5499,2959,5396,6461,6462,4572,5093,
+5500,5964,3806,4146,6463,4426,5762,5858,6077,6253,4755,3967,4220,5965,6254,4989,
+5501,6464,4352,6726,6078,4764,2290,5246,3906,5438,5283,3767,4964,2861,5763,5094,
+6255,6256,4622,5616,5859,5860,4707,6727,4285,4708,4824,5617,6257,5551,4787,5212,
+4965,4935,4687,6465,6728,6466,5686,6079,3494,4413,2995,5247,5966,5618,6729,5967,
+5764,5765,5687,5502,6730,6731,6080,5397,6467,4990,6258,6732,4538,5060,5619,6733,
+4719,5688,5439,5018,5149,5284,5503,6734,6081,4607,6259,5120,3645,5861,4583,6260,
+4584,4675,5620,4098,5440,6261,4863,2379,3306,4585,5552,5689,4586,5285,6735,4864,
+6736,5286,6082,6737,4623,3010,4788,4381,4558,5621,4587,4896,3698,3161,5248,4353,
+4045,6262,3754,5183,4588,6738,6263,6739,6740,5622,3936,6741,6468,6742,6264,5095,
+6469,4991,5968,6743,4992,6744,6083,4897,6745,4256,5766,4307,3108,3968,4444,5287,
+3889,4343,6084,4510,6085,4559,6086,4898,5969,6746,5623,5061,4919,5249,5250,5504,
+5441,6265,5320,4878,3242,5862,5251,3428,6087,6747,4237,5624,5442,6266,5553,4539,
+6748,2585,3533,5398,4262,6088,5150,4736,4438,6089,6267,5505,4966,6749,6268,6750,
+6269,5288,5554,3650,6090,6091,4624,6092,5690,6751,5863,4270,5691,4277,5555,5864,
+6752,5692,4720,4865,6470,5151,4688,4825,6753,3094,6754,6471,3235,4653,6755,5213,
+5399,6756,3201,4589,5865,4967,6472,5866,6473,5019,3016,6757,5321,4756,3957,4573,
+6093,4993,5767,4721,6474,6758,5625,6759,4458,6475,6270,6760,5556,4994,5214,5252,
+6271,3875,5768,6094,5034,5506,4376,5769,6761,2120,6476,5253,5770,6762,5771,5970,
+3990,5971,5557,5558,5772,6477,6095,2787,4641,5972,5121,6096,6097,6272,6763,3703,
+5867,5507,6273,4206,6274,4789,6098,6764,3619,3646,3833,3804,2394,3788,4936,3978,
+4866,4899,6099,6100,5559,6478,6765,3599,5868,6101,5869,5870,6275,6766,4527,6767)
+
+# flake8: noqa
diff --git a/third_party/python/requests/requests/packages/chardet/gb2312prober.py b/third_party/python/requests/requests/packages/chardet/gb2312prober.py
new file mode 100644
index 0000000000..0325a2d861
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/gb2312prober.py
@@ -0,0 +1,41 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import GB2312DistributionAnalysis
+from .mbcssm import GB2312SMModel
+
+class GB2312Prober(MultiByteCharSetProber):
+ def __init__(self):
+ MultiByteCharSetProber.__init__(self)
+ self._mCodingSM = CodingStateMachine(GB2312SMModel)
+ self._mDistributionAnalyzer = GB2312DistributionAnalysis()
+ self.reset()
+
+ def get_charset_name(self):
+ return "GB2312"
diff --git a/third_party/python/requests/requests/packages/chardet/hebrewprober.py b/third_party/python/requests/requests/packages/chardet/hebrewprober.py
new file mode 100644
index 0000000000..ba225c5ef4
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/hebrewprober.py
@@ -0,0 +1,283 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Shy Shalom
+# Portions created by the Initial Developer are Copyright (C) 2005
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetprober import CharSetProber
+from .constants import eNotMe, eDetecting
+from .compat import wrap_ord
+
+# This prober doesn't actually recognize a language or a charset.
+# It is a helper prober for the use of the Hebrew model probers
+
+### General ideas of the Hebrew charset recognition ###
+#
+# Four main charsets exist in Hebrew:
+# "ISO-8859-8" - Visual Hebrew
+# "windows-1255" - Logical Hebrew
+# "ISO-8859-8-I" - Logical Hebrew
+# "x-mac-hebrew" - ?? Logical Hebrew ??
+#
+# Both "ISO" charsets use a completely identical set of code points, whereas
+# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
+# these code points. windows-1255 defines additional characters in the range
+# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
+# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
+# x-mac-hebrew defines similar additional code points but with a different
+# mapping.
+#
+# As far as an average Hebrew text with no diacritics is concerned, all four
+# charsets are identical with respect to code points. Meaning that for the
+# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
+# (including final letters).
+#
+# The dominant difference between these charsets is their directionality.
+# "Visual" directionality means that the text is ordered as if the renderer is
+# not aware of a BIDI rendering algorithm. The renderer sees the text and
+# draws it from left to right. The text itself when ordered naturally is read
+# backwards. A buffer of Visual Hebrew generally looks like so:
+# "[last word of first line spelled backwards] [whole line ordered backwards
+# and spelled backwards] [first word of first line spelled backwards]
+# [end of line] [last word of second line] ... etc' "
+# adding punctuation marks, numbers and English text to visual text is
+# naturally also "visual" and from left to right.
+#
+# "Logical" directionality means the text is ordered "naturally" according to
+# the order it is read. It is the responsibility of the renderer to display
+# the text from right to left. A BIDI algorithm is used to place general
+# punctuation marks, numbers and English text in the text.
+#
+# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
+# what little evidence I could find, it seems that its general directionality
+# is Logical.
+#
+# To sum up all of the above, the Hebrew probing mechanism knows about two
+# charsets:
+# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
+# backwards while line order is natural. For charset recognition purposes
+# the line order is unimportant (In fact, for this implementation, even
+# word order is unimportant).
+# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
+#
+# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
+# specifically identified.
+# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
+# that contain special punctuation marks or diacritics is displayed with
+# some unconverted characters showing as question marks. This problem might
+# be corrected using another model prober for x-mac-hebrew. Due to the fact
+# that x-mac-hebrew texts are so rare, writing another model prober isn't
+# worth the effort and performance hit.
+#
+#### The Prober ####
+#
+# The prober is divided between two SBCharSetProbers and a HebrewProber,
+# all of which are managed, created, fed data, inquired and deleted by the
+# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
+# fact some kind of Hebrew, Logical or Visual. The final decision about which
+# one is it is made by the HebrewProber by combining final-letter scores
+# with the scores of the two SBCharSetProbers to produce a final answer.
+#
+# The SBCSGroupProber is responsible for stripping the original text of HTML
+# tags, English characters, numbers, low-ASCII punctuation characters, spaces
+# and new lines. It reduces any sequence of such characters to a single space.
+# The buffer fed to each prober in the SBCS group prober is pure text in
+# high-ASCII.
+# The two SBCharSetProbers (model probers) share the same language model:
+# Win1255Model.
+# The first SBCharSetProber uses the model normally as any other
+# SBCharSetProber does, to recognize windows-1255, upon which this model was
+# built. The second SBCharSetProber is told to make the pair-of-letter
+# lookup in the language model backwards. This in practice exactly simulates
+# a visual Hebrew model using the windows-1255 logical Hebrew model.
+#
+# The HebrewProber is not using any language model. All it does is look for
+# final-letter evidence suggesting the text is either logical Hebrew or visual
+# Hebrew. Disjointed from the model probers, the results of the HebrewProber
+# alone are meaningless. HebrewProber always returns 0.00 as confidence
+# since it never identifies a charset by itself. Instead, the pointer to the
+# HebrewProber is passed to the model probers as a helper "Name Prober".
+# When the Group prober receives a positive identification from any prober,
+# it asks for the name of the charset identified. If the prober queried is a
+# Hebrew model prober, the model prober forwards the call to the
+# HebrewProber to make the final decision. In the HebrewProber, the
+# decision is made according to the final-letters scores maintained and Both
+# model probers scores. The answer is returned in the form of the name of the
+# charset identified, either "windows-1255" or "ISO-8859-8".
+
+# windows-1255 / ISO-8859-8 code points of interest
+FINAL_KAF = 0xea
+NORMAL_KAF = 0xeb
+FINAL_MEM = 0xed
+NORMAL_MEM = 0xee
+FINAL_NUN = 0xef
+NORMAL_NUN = 0xf0
+FINAL_PE = 0xf3
+NORMAL_PE = 0xf4
+FINAL_TSADI = 0xf5
+NORMAL_TSADI = 0xf6
+
+# Minimum Visual vs Logical final letter score difference.
+# If the difference is below this, don't rely solely on the final letter score
+# distance.
+MIN_FINAL_CHAR_DISTANCE = 5
+
+# Minimum Visual vs Logical model score difference.
+# If the difference is below this, don't rely at all on the model score
+# distance.
+MIN_MODEL_DISTANCE = 0.01
+
+VISUAL_HEBREW_NAME = "ISO-8859-8"
+LOGICAL_HEBREW_NAME = "windows-1255"
+
+
+class HebrewProber(CharSetProber):
+ def __init__(self):
+ CharSetProber.__init__(self)
+ self._mLogicalProber = None
+ self._mVisualProber = None
+ self.reset()
+
+ def reset(self):
+ self._mFinalCharLogicalScore = 0
+ self._mFinalCharVisualScore = 0
+ # The two last characters seen in the previous buffer,
+ # mPrev and mBeforePrev are initialized to space in order to simulate
+ # a word delimiter at the beginning of the data
+ self._mPrev = ' '
+ self._mBeforePrev = ' '
+ # These probers are owned by the group prober.
+
+ def set_model_probers(self, logicalProber, visualProber):
+ self._mLogicalProber = logicalProber
+ self._mVisualProber = visualProber
+
+ def is_final(self, c):
+ return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE,
+ FINAL_TSADI]
+
+ def is_non_final(self, c):
+ # The normal Tsadi is not a good Non-Final letter due to words like
+ # 'lechotet' (to chat) containing an apostrophe after the tsadi. This
+ # apostrophe is converted to a space in FilterWithoutEnglishLetters
+ # causing the Non-Final tsadi to appear at an end of a word even
+ # though this is not the case in the original text.
+ # The letters Pe and Kaf rarely display a related behavior of not being
+ # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
+ # for example legally end with a Non-Final Pe or Kaf. However, the
+ # benefit of these letters as Non-Final letters outweighs the damage
+ # since these words are quite rare.
+ return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE]
+
+ def feed(self, aBuf):
+ # Final letter analysis for logical-visual decision.
+ # Look for evidence that the received buffer is either logical Hebrew
+ # or visual Hebrew.
+ # The following cases are checked:
+ # 1) A word longer than 1 letter, ending with a final letter. This is
+ # an indication that the text is laid out "naturally" since the
+ # final letter really appears at the end. +1 for logical score.
+ # 2) A word longer than 1 letter, ending with a Non-Final letter. In
+ # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
+ # should not end with the Non-Final form of that letter. Exceptions
+ # to this rule are mentioned above in isNonFinal(). This is an
+ # indication that the text is laid out backwards. +1 for visual
+ # score
+ # 3) A word longer than 1 letter, starting with a final letter. Final
+ # letters should not appear at the beginning of a word. This is an
+ # indication that the text is laid out backwards. +1 for visual
+ # score.
+ #
+ # The visual score and logical score are accumulated throughout the
+ # text and are finally checked against each other in GetCharSetName().
+ # No checking for final letters in the middle of words is done since
+ # that case is not an indication for either Logical or Visual text.
+ #
+ # We automatically filter out all 7-bit characters (replace them with
+ # spaces) so the word boundary detection works properly. [MAP]
+
+ if self.get_state() == eNotMe:
+ # Both model probers say it's not them. No reason to continue.
+ return eNotMe
+
+ aBuf = self.filter_high_bit_only(aBuf)
+
+ for cur in aBuf:
+ if cur == ' ':
+ # We stand on a space - a word just ended
+ if self._mBeforePrev != ' ':
+ # next-to-last char was not a space so self._mPrev is not a
+ # 1 letter word
+ if self.is_final(self._mPrev):
+ # case (1) [-2:not space][-1:final letter][cur:space]
+ self._mFinalCharLogicalScore += 1
+ elif self.is_non_final(self._mPrev):
+ # case (2) [-2:not space][-1:Non-Final letter][
+ # cur:space]
+ self._mFinalCharVisualScore += 1
+ else:
+ # Not standing on a space
+ if ((self._mBeforePrev == ' ') and
+ (self.is_final(self._mPrev)) and (cur != ' ')):
+ # case (3) [-2:space][-1:final letter][cur:not space]
+ self._mFinalCharVisualScore += 1
+ self._mBeforePrev = self._mPrev
+ self._mPrev = cur
+
+ # Forever detecting, till the end or until both model probers return
+ # eNotMe (handled above)
+ return eDetecting
+
+ def get_charset_name(self):
+ # Make the decision: is it Logical or Visual?
+ # If the final letter score distance is dominant enough, rely on it.
+ finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore
+ if finalsub >= MIN_FINAL_CHAR_DISTANCE:
+ return LOGICAL_HEBREW_NAME
+ if finalsub <= -MIN_FINAL_CHAR_DISTANCE:
+ return VISUAL_HEBREW_NAME
+
+ # It's not dominant enough, try to rely on the model scores instead.
+ modelsub = (self._mLogicalProber.get_confidence()
+ - self._mVisualProber.get_confidence())
+ if modelsub > MIN_MODEL_DISTANCE:
+ return LOGICAL_HEBREW_NAME
+ if modelsub < -MIN_MODEL_DISTANCE:
+ return VISUAL_HEBREW_NAME
+
+ # Still no good, back to final letter distance, maybe it'll save the
+ # day.
+ if finalsub < 0.0:
+ return VISUAL_HEBREW_NAME
+
+ # (finalsub > 0 - Logical) or (don't know what to do) default to
+ # Logical.
+ return LOGICAL_HEBREW_NAME
+
+ def get_state(self):
+ # Remain active as long as any of the model probers are active.
+ if (self._mLogicalProber.get_state() == eNotMe) and \
+ (self._mVisualProber.get_state() == eNotMe):
+ return eNotMe
+ return eDetecting
diff --git a/third_party/python/requests/requests/packages/chardet/jisfreq.py b/third_party/python/requests/requests/packages/chardet/jisfreq.py
new file mode 100644
index 0000000000..064345b086
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/jisfreq.py
@@ -0,0 +1,569 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# Sampling from about 20M text materials include literature and computer technology
+#
+# Japanese frequency table, applied to both S-JIS and EUC-JP
+# They are sorted in order.
+
+# 128 --> 0.77094
+# 256 --> 0.85710
+# 512 --> 0.92635
+# 1024 --> 0.97130
+# 2048 --> 0.99431
+#
+# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58
+# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191
+#
+# Typical Distribution Ratio, 25% of IDR
+
+JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0
+
+# Char to FreqOrder table ,
+JIS_TABLE_SIZE = 4368
+
+JISCharToFreqOrder = (
+ 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16
+3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32
+1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48
+2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64
+2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80
+5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96
+1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112
+5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128
+5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144
+5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160
+5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176
+5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192
+5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208
+1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224
+1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240
+1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256
+2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272
+3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288
+3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304
+ 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320
+ 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336
+1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352
+ 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368
+5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384
+ 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400
+ 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416
+ 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432
+ 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448
+ 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464
+5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480
+5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496
+5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512
+4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528
+5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544
+5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560
+5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576
+5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592
+5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608
+5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624
+5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640
+5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656
+5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672
+3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688
+5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704
+5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720
+5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736
+5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752
+5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768
+5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784
+5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800
+5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816
+5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832
+5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848
+5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864
+5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880
+5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896
+5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912
+5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928
+5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944
+5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960
+5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976
+5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992
+5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008
+5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024
+5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040
+5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056
+5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072
+5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088
+5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104
+5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120
+5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136
+5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152
+5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168
+5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184
+5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200
+5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216
+5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232
+5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248
+5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264
+5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280
+5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296
+6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312
+6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328
+6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344
+6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360
+6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376
+6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392
+6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408
+6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424
+4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440
+ 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456
+ 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472
+1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488
+1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504
+ 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520
+3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536
+3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552
+ 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568
+3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584
+3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600
+ 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616
+2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632
+ 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648
+3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664
+1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680
+ 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696
+1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712
+ 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728
+2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744
+2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760
+2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776
+2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792
+1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808
+1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824
+1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840
+1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856
+2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872
+1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888
+2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904
+1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920
+1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936
+1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952
+1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968
+1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984
+1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000
+ 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016
+ 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032
+1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048
+2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064
+2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080
+2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096
+3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112
+3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128
+ 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144
+3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160
+1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176
+ 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192
+2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208
+1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224
+ 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240
+3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256
+4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272
+2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288
+1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304
+2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320
+1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336
+ 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352
+ 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368
+1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384
+2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400
+2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416
+2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432
+3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448
+1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464
+2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480
+ 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496
+ 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512
+ 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528
+1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544
+2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560
+ 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576
+1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592
+1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608
+ 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624
+1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640
+1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656
+1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672
+ 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688
+2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704
+ 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720
+2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736
+3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752
+2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768
+1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784
+6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800
+1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816
+2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832
+1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848
+ 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864
+ 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880
+3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896
+3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912
+1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928
+1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944
+1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960
+1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976
+ 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992
+ 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008
+2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024
+ 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040
+3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056
+2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072
+ 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088
+1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104
+2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120
+ 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136
+1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152
+ 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168
+4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184
+2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200
+1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216
+ 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232
+1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248
+2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264
+ 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280
+6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296
+1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312
+1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328
+2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344
+3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360
+ 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376
+3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392
+1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408
+ 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424
+1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440
+ 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456
+3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472
+ 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488
+2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504
+ 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520
+4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536
+2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552
+1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568
+1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584
+1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600
+ 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616
+1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632
+3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648
+1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664
+3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680
+ 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696
+ 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712
+ 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728
+2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744
+1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760
+ 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776
+1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792
+ 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808
+1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824
+ 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840
+ 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856
+ 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872
+1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888
+1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904
+2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920
+4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936
+ 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952
+1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968
+ 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984
+1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000
+3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016
+1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032
+2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048
+2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064
+1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080
+1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096
+2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112
+ 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128
+2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144
+1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160
+1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176
+1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192
+1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208
+3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224
+2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240
+2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256
+ 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272
+3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288
+3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304
+1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320
+2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336
+1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352
+2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512
+#Everything below is of no interest for detection purpose
+2138,2122,3730,2888,1995,1820,1044,6190,6191,6192,6193,6194,6195,6196,6197,6198, # 4384
+6199,6200,6201,6202,6203,6204,6205,4670,6206,6207,6208,6209,6210,6211,6212,6213, # 4400
+6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,6224,6225,6226,6227,6228,6229, # 4416
+6230,6231,6232,6233,6234,6235,6236,6237,3187,6238,6239,3969,6240,6241,6242,6243, # 4432
+6244,4671,6245,6246,4672,6247,6248,4133,6249,6250,4364,6251,2923,2556,2613,4673, # 4448
+4365,3970,6252,6253,6254,6255,4674,6256,6257,6258,2768,2353,4366,4675,4676,3188, # 4464
+4367,3463,6259,4134,4677,4678,6260,2267,6261,3842,3332,4368,3543,6262,6263,6264, # 4480
+3013,1954,1928,4135,4679,6265,6266,2478,3091,6267,4680,4369,6268,6269,1699,6270, # 4496
+3544,4136,4681,6271,4137,6272,4370,2804,6273,6274,2593,3971,3972,4682,6275,2236, # 4512
+4683,6276,6277,4684,6278,6279,4138,3973,4685,6280,6281,3258,6282,6283,6284,6285, # 4528
+3974,4686,2841,3975,6286,6287,3545,6288,6289,4139,4687,4140,6290,4141,6291,4142, # 4544
+6292,6293,3333,6294,6295,6296,4371,6297,3399,6298,6299,4372,3976,6300,6301,6302, # 4560
+4373,6303,6304,3843,3731,6305,4688,4374,6306,6307,3259,2294,6308,3732,2530,4143, # 4576
+6309,4689,6310,6311,6312,3048,6313,6314,4690,3733,2237,6315,6316,2282,3334,6317, # 4592
+6318,3844,6319,6320,4691,6321,3400,4692,6322,4693,6323,3049,6324,4375,6325,3977, # 4608
+6326,6327,6328,3546,6329,4694,3335,6330,4695,4696,6331,6332,6333,6334,4376,3978, # 4624
+6335,4697,3979,4144,6336,3980,4698,6337,6338,6339,6340,6341,4699,4700,4701,6342, # 4640
+6343,4702,6344,6345,4703,6346,6347,4704,6348,4705,4706,3135,6349,4707,6350,4708, # 4656
+6351,4377,6352,4709,3734,4145,6353,2506,4710,3189,6354,3050,4711,3981,6355,3547, # 4672
+3014,4146,4378,3735,2651,3845,3260,3136,2224,1986,6356,3401,6357,4712,2594,3627, # 4688
+3137,2573,3736,3982,4713,3628,4714,4715,2682,3629,4716,6358,3630,4379,3631,6359, # 4704
+6360,6361,3983,6362,6363,6364,6365,4147,3846,4717,6366,6367,3737,2842,6368,4718, # 4720
+2628,6369,3261,6370,2386,6371,6372,3738,3984,4719,3464,4720,3402,6373,2924,3336, # 4736
+4148,2866,6374,2805,3262,4380,2704,2069,2531,3138,2806,2984,6375,2769,6376,4721, # 4752
+4722,3403,6377,6378,3548,6379,6380,2705,3092,1979,4149,2629,3337,2889,6381,3338, # 4768
+4150,2557,3339,4381,6382,3190,3263,3739,6383,4151,4723,4152,2558,2574,3404,3191, # 4784
+6384,6385,4153,6386,4724,4382,6387,6388,4383,6389,6390,4154,6391,4725,3985,6392, # 4800
+3847,4155,6393,6394,6395,6396,6397,3465,6398,4384,6399,6400,6401,6402,6403,6404, # 4816
+4156,6405,6406,6407,6408,2123,6409,6410,2326,3192,4726,6411,6412,6413,6414,4385, # 4832
+4157,6415,6416,4158,6417,3093,3848,6418,3986,6419,6420,3849,6421,6422,6423,4159, # 4848
+6424,6425,4160,6426,3740,6427,6428,6429,6430,3987,6431,4727,6432,2238,6433,6434, # 4864
+4386,3988,6435,6436,3632,6437,6438,2843,6439,6440,6441,6442,3633,6443,2958,6444, # 4880
+6445,3466,6446,2364,4387,3850,6447,4388,2959,3340,6448,3851,6449,4728,6450,6451, # 4896
+3264,4729,6452,3193,6453,4389,4390,2706,3341,4730,6454,3139,6455,3194,6456,3051, # 4912
+2124,3852,1602,4391,4161,3853,1158,3854,4162,3989,4392,3990,4731,4732,4393,2040, # 4928
+4163,4394,3265,6457,2807,3467,3855,6458,6459,6460,3991,3468,4733,4734,6461,3140, # 4944
+2960,6462,4735,6463,6464,6465,6466,4736,4737,4738,4739,6467,6468,4164,2403,3856, # 4960
+6469,6470,2770,2844,6471,4740,6472,6473,6474,6475,6476,6477,6478,3195,6479,4741, # 4976
+4395,6480,2867,6481,4742,2808,6482,2493,4165,6483,6484,6485,6486,2295,4743,6487, # 4992
+6488,6489,3634,6490,6491,6492,6493,6494,6495,6496,2985,4744,6497,6498,4745,6499, # 5008
+6500,2925,3141,4166,6501,6502,4746,6503,6504,4747,6505,6506,6507,2890,6508,6509, # 5024
+6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,3469,4167,6520,6521,6522,4748, # 5040
+4396,3741,4397,4749,4398,3342,2125,4750,6523,4751,4752,4753,3052,6524,2961,4168, # 5056
+6525,4754,6526,4755,4399,2926,4169,6527,3857,6528,4400,4170,6529,4171,6530,6531, # 5072
+2595,6532,6533,6534,6535,3635,6536,6537,6538,6539,6540,6541,6542,4756,6543,6544, # 5088
+6545,6546,6547,6548,4401,6549,6550,6551,6552,4402,3405,4757,4403,6553,6554,6555, # 5104
+4172,3742,6556,6557,6558,3992,3636,6559,6560,3053,2726,6561,3549,4173,3054,4404, # 5120
+6562,6563,3993,4405,3266,3550,2809,4406,6564,6565,6566,4758,4759,6567,3743,6568, # 5136
+4760,3744,4761,3470,6569,6570,6571,4407,6572,3745,4174,6573,4175,2810,4176,3196, # 5152
+4762,6574,4177,6575,6576,2494,2891,3551,6577,6578,3471,6579,4408,6580,3015,3197, # 5168
+6581,3343,2532,3994,3858,6582,3094,3406,4409,6583,2892,4178,4763,4410,3016,4411, # 5184
+6584,3995,3142,3017,2683,6585,4179,6586,6587,4764,4412,6588,6589,4413,6590,2986, # 5200
+6591,2962,3552,6592,2963,3472,6593,6594,4180,4765,6595,6596,2225,3267,4414,6597, # 5216
+3407,3637,4766,6598,6599,3198,6600,4415,6601,3859,3199,6602,3473,4767,2811,4416, # 5232
+1856,3268,3200,2575,3996,3997,3201,4417,6603,3095,2927,6604,3143,6605,2268,6606, # 5248
+3998,3860,3096,2771,6607,6608,3638,2495,4768,6609,3861,6610,3269,2745,4769,4181, # 5264
+3553,6611,2845,3270,6612,6613,6614,3862,6615,6616,4770,4771,6617,3474,3999,4418, # 5280
+4419,6618,3639,3344,6619,4772,4182,6620,2126,6621,6622,6623,4420,4773,6624,3018, # 5296
+6625,4774,3554,6626,4183,2025,3746,6627,4184,2707,6628,4421,4422,3097,1775,4185, # 5312
+3555,6629,6630,2868,6631,6632,4423,6633,6634,4424,2414,2533,2928,6635,4186,2387, # 5328
+6636,4775,6637,4187,6638,1891,4425,3202,3203,6639,6640,4776,6641,3345,6642,6643, # 5344
+3640,6644,3475,3346,3641,4000,6645,3144,6646,3098,2812,4188,3642,3204,6647,3863, # 5360
+3476,6648,3864,6649,4426,4001,6650,6651,6652,2576,6653,4189,4777,6654,6655,6656, # 5376
+2846,6657,3477,3205,4002,6658,4003,6659,3347,2252,6660,6661,6662,4778,6663,6664, # 5392
+6665,6666,6667,6668,6669,4779,4780,2048,6670,3478,3099,6671,3556,3747,4004,6672, # 5408
+6673,6674,3145,4005,3748,6675,6676,6677,6678,6679,3408,6680,6681,6682,6683,3206, # 5424
+3207,6684,6685,4781,4427,6686,4782,4783,4784,6687,6688,6689,4190,6690,6691,3479, # 5440
+6692,2746,6693,4428,6694,6695,6696,6697,6698,6699,4785,6700,6701,3208,2727,6702, # 5456
+3146,6703,6704,3409,2196,6705,4429,6706,6707,6708,2534,1996,6709,6710,6711,2747, # 5472
+6712,6713,6714,4786,3643,6715,4430,4431,6716,3557,6717,4432,4433,6718,6719,6720, # 5488
+6721,3749,6722,4006,4787,6723,6724,3644,4788,4434,6725,6726,4789,2772,6727,6728, # 5504
+6729,6730,6731,2708,3865,2813,4435,6732,6733,4790,4791,3480,6734,6735,6736,6737, # 5520
+4436,3348,6738,3410,4007,6739,6740,4008,6741,6742,4792,3411,4191,6743,6744,6745, # 5536
+6746,6747,3866,6748,3750,6749,6750,6751,6752,6753,6754,6755,3867,6756,4009,6757, # 5552
+4793,4794,6758,2814,2987,6759,6760,6761,4437,6762,6763,6764,6765,3645,6766,6767, # 5568
+3481,4192,6768,3751,6769,6770,2174,6771,3868,3752,6772,6773,6774,4193,4795,4438, # 5584
+3558,4796,4439,6775,4797,6776,6777,4798,6778,4799,3559,4800,6779,6780,6781,3482, # 5600
+6782,2893,6783,6784,4194,4801,4010,6785,6786,4440,6787,4011,6788,6789,6790,6791, # 5616
+6792,6793,4802,6794,6795,6796,4012,6797,6798,6799,6800,3349,4803,3483,6801,4804, # 5632
+4195,6802,4013,6803,6804,4196,6805,4014,4015,6806,2847,3271,2848,6807,3484,6808, # 5648
+6809,6810,4441,6811,4442,4197,4443,3272,4805,6812,3412,4016,1579,6813,6814,4017, # 5664
+6815,3869,6816,2964,6817,4806,6818,6819,4018,3646,6820,6821,4807,4019,4020,6822, # 5680
+6823,3560,6824,6825,4021,4444,6826,4198,6827,6828,4445,6829,6830,4199,4808,6831, # 5696
+6832,6833,3870,3019,2458,6834,3753,3413,3350,6835,4809,3871,4810,3561,4446,6836, # 5712
+6837,4447,4811,4812,6838,2459,4448,6839,4449,6840,6841,4022,3872,6842,4813,4814, # 5728
+6843,6844,4815,4200,4201,4202,6845,4023,6846,6847,4450,3562,3873,6848,6849,4816, # 5744
+4817,6850,4451,4818,2139,6851,3563,6852,6853,3351,6854,6855,3352,4024,2709,3414, # 5760
+4203,4452,6856,4204,6857,6858,3874,3875,6859,6860,4819,6861,6862,6863,6864,4453, # 5776
+3647,6865,6866,4820,6867,6868,6869,6870,4454,6871,2869,6872,6873,4821,6874,3754, # 5792
+6875,4822,4205,6876,6877,6878,3648,4206,4455,6879,4823,6880,4824,3876,6881,3055, # 5808
+4207,6882,3415,6883,6884,6885,4208,4209,6886,4210,3353,6887,3354,3564,3209,3485, # 5824
+2652,6888,2728,6889,3210,3755,6890,4025,4456,6891,4825,6892,6893,6894,6895,4211, # 5840
+6896,6897,6898,4826,6899,6900,4212,6901,4827,6902,2773,3565,6903,4828,6904,6905, # 5856
+6906,6907,3649,3650,6908,2849,3566,6909,3567,3100,6910,6911,6912,6913,6914,6915, # 5872
+4026,6916,3355,4829,3056,4457,3756,6917,3651,6918,4213,3652,2870,6919,4458,6920, # 5888
+2438,6921,6922,3757,2774,4830,6923,3356,4831,4832,6924,4833,4459,3653,2507,6925, # 5904
+4834,2535,6926,6927,3273,4027,3147,6928,3568,6929,6930,6931,4460,6932,3877,4461, # 5920
+2729,3654,6933,6934,6935,6936,2175,4835,2630,4214,4028,4462,4836,4215,6937,3148, # 5936
+4216,4463,4837,4838,4217,6938,6939,2850,4839,6940,4464,6941,6942,6943,4840,6944, # 5952
+4218,3274,4465,6945,6946,2710,6947,4841,4466,6948,6949,2894,6950,6951,4842,6952, # 5968
+4219,3057,2871,6953,6954,6955,6956,4467,6957,2711,6958,6959,6960,3275,3101,4843, # 5984
+6961,3357,3569,6962,4844,6963,6964,4468,4845,3570,6965,3102,4846,3758,6966,4847, # 6000
+3878,4848,4849,4029,6967,2929,3879,4850,4851,6968,6969,1733,6970,4220,6971,6972, # 6016
+6973,6974,6975,6976,4852,6977,6978,6979,6980,6981,6982,3759,6983,6984,6985,3486, # 6032
+3487,6986,3488,3416,6987,6988,6989,6990,6991,6992,6993,6994,6995,6996,6997,4853, # 6048
+6998,6999,4030,7000,7001,3211,7002,7003,4221,7004,7005,3571,4031,7006,3572,7007, # 6064
+2614,4854,2577,7008,7009,2965,3655,3656,4855,2775,3489,3880,4222,4856,3881,4032, # 6080
+3882,3657,2730,3490,4857,7010,3149,7011,4469,4858,2496,3491,4859,2283,7012,7013, # 6096
+7014,2365,4860,4470,7015,7016,3760,7017,7018,4223,1917,7019,7020,7021,4471,7022, # 6112
+2776,4472,7023,7024,7025,7026,4033,7027,3573,4224,4861,4034,4862,7028,7029,1929, # 6128
+3883,4035,7030,4473,3058,7031,2536,3761,3884,7032,4036,7033,2966,2895,1968,4474, # 6144
+3276,4225,3417,3492,4226,2105,7034,7035,1754,2596,3762,4227,4863,4475,3763,4864, # 6160
+3764,2615,2777,3103,3765,3658,3418,4865,2296,3766,2815,7036,7037,7038,3574,2872, # 6176
+3277,4476,7039,4037,4477,7040,7041,4038,7042,7043,7044,7045,7046,7047,2537,7048, # 6192
+7049,7050,7051,7052,7053,7054,4478,7055,7056,3767,3659,4228,3575,7057,7058,4229, # 6208
+7059,7060,7061,3660,7062,3212,7063,3885,4039,2460,7064,7065,7066,7067,7068,7069, # 6224
+7070,7071,7072,7073,7074,4866,3768,4867,7075,7076,7077,7078,4868,3358,3278,2653, # 6240
+7079,7080,4479,3886,7081,7082,4869,7083,7084,7085,7086,7087,7088,2538,7089,7090, # 6256
+7091,4040,3150,3769,4870,4041,2896,3359,4230,2930,7092,3279,7093,2967,4480,3213, # 6272
+4481,3661,7094,7095,7096,7097,7098,7099,7100,7101,7102,2461,3770,7103,7104,4231, # 6288
+3151,7105,7106,7107,4042,3662,7108,7109,4871,3663,4872,4043,3059,7110,7111,7112, # 6304
+3493,2988,7113,4873,7114,7115,7116,3771,4874,7117,7118,4232,4875,7119,3576,2336, # 6320
+4876,7120,4233,3419,4044,4877,4878,4482,4483,4879,4484,4234,7121,3772,4880,1045, # 6336
+3280,3664,4881,4882,7122,7123,7124,7125,4883,7126,2778,7127,4485,4486,7128,4884, # 6352
+3214,3887,7129,7130,3215,7131,4885,4045,7132,7133,4046,7134,7135,7136,7137,7138, # 6368
+7139,7140,7141,7142,7143,4235,7144,4886,7145,7146,7147,4887,7148,7149,7150,4487, # 6384
+4047,4488,7151,7152,4888,4048,2989,3888,7153,3665,7154,4049,7155,7156,7157,7158, # 6400
+7159,7160,2931,4889,4890,4489,7161,2631,3889,4236,2779,7162,7163,4891,7164,3060, # 6416
+7165,1672,4892,7166,4893,4237,3281,4894,7167,7168,3666,7169,3494,7170,7171,4050, # 6432
+7172,7173,3104,3360,3420,4490,4051,2684,4052,7174,4053,7175,7176,7177,2253,4054, # 6448
+7178,7179,4895,7180,3152,3890,3153,4491,3216,7181,7182,7183,2968,4238,4492,4055, # 6464
+7184,2990,7185,2479,7186,7187,4493,7188,7189,7190,7191,7192,4896,7193,4897,2969, # 6480
+4494,4898,7194,3495,7195,7196,4899,4495,7197,3105,2731,7198,4900,7199,7200,7201, # 6496
+4056,7202,3361,7203,7204,4496,4901,4902,7205,4497,7206,7207,2315,4903,7208,4904, # 6512
+7209,4905,2851,7210,7211,3577,7212,3578,4906,7213,4057,3667,4907,7214,4058,2354, # 6528
+3891,2376,3217,3773,7215,7216,7217,7218,7219,4498,7220,4908,3282,2685,7221,3496, # 6544
+4909,2632,3154,4910,7222,2337,7223,4911,7224,7225,7226,4912,4913,3283,4239,4499, # 6560
+7227,2816,7228,7229,7230,7231,7232,7233,7234,4914,4500,4501,7235,7236,7237,2686, # 6576
+7238,4915,7239,2897,4502,7240,4503,7241,2516,7242,4504,3362,3218,7243,7244,7245, # 6592
+4916,7246,7247,4505,3363,7248,7249,7250,7251,3774,4506,7252,7253,4917,7254,7255, # 6608
+3284,2991,4918,4919,3219,3892,4920,3106,3497,4921,7256,7257,7258,4922,7259,4923, # 6624
+3364,4507,4508,4059,7260,4240,3498,7261,7262,4924,7263,2992,3893,4060,3220,7264, # 6640
+7265,7266,7267,7268,7269,4509,3775,7270,2817,7271,4061,4925,4510,3776,7272,4241, # 6656
+4511,3285,7273,7274,3499,7275,7276,7277,4062,4512,4926,7278,3107,3894,7279,7280, # 6672
+4927,7281,4513,7282,7283,3668,7284,7285,4242,4514,4243,7286,2058,4515,4928,4929, # 6688
+4516,7287,3286,4244,7288,4517,7289,7290,7291,3669,7292,7293,4930,4931,4932,2355, # 6704
+4933,7294,2633,4518,7295,4245,7296,7297,4519,7298,7299,4520,4521,4934,7300,4246, # 6720
+4522,7301,7302,7303,3579,7304,4247,4935,7305,4936,7306,7307,7308,7309,3777,7310, # 6736
+4523,7311,7312,7313,4248,3580,7314,4524,3778,4249,7315,3581,7316,3287,7317,3221, # 6752
+7318,4937,7319,7320,7321,7322,7323,7324,4938,4939,7325,4525,7326,7327,7328,4063, # 6768
+7329,7330,4940,7331,7332,4941,7333,4526,7334,3500,2780,1741,4942,2026,1742,7335, # 6784
+7336,3582,4527,2388,7337,7338,7339,4528,7340,4250,4943,7341,7342,7343,4944,7344, # 6800
+7345,7346,3020,7347,4945,7348,7349,7350,7351,3895,7352,3896,4064,3897,7353,7354, # 6816
+7355,4251,7356,7357,3898,7358,3779,7359,3780,3288,7360,7361,4529,7362,4946,4530, # 6832
+2027,7363,3899,4531,4947,3222,3583,7364,4948,7365,7366,7367,7368,4949,3501,4950, # 6848
+3781,4951,4532,7369,2517,4952,4252,4953,3155,7370,4954,4955,4253,2518,4533,7371, # 6864
+7372,2712,4254,7373,7374,7375,3670,4956,3671,7376,2389,3502,4065,7377,2338,7378, # 6880
+7379,7380,7381,3061,7382,4957,7383,7384,7385,7386,4958,4534,7387,7388,2993,7389, # 6896
+3062,7390,4959,7391,7392,7393,4960,3108,4961,7394,4535,7395,4962,3421,4536,7396, # 6912
+4963,7397,4964,1857,7398,4965,7399,7400,2176,3584,4966,7401,7402,3422,4537,3900, # 6928
+3585,7403,3782,7404,2852,7405,7406,7407,4538,3783,2654,3423,4967,4539,7408,3784, # 6944
+3586,2853,4540,4541,7409,3901,7410,3902,7411,7412,3785,3109,2327,3903,7413,7414, # 6960
+2970,4066,2932,7415,7416,7417,3904,3672,3424,7418,4542,4543,4544,7419,4968,7420, # 6976
+7421,4255,7422,7423,7424,7425,7426,4067,7427,3673,3365,4545,7428,3110,2559,3674, # 6992
+7429,7430,3156,7431,7432,3503,7433,3425,4546,7434,3063,2873,7435,3223,4969,4547, # 7008
+4548,2898,4256,4068,7436,4069,3587,3786,2933,3787,4257,4970,4971,3788,7437,4972, # 7024
+3064,7438,4549,7439,7440,7441,7442,7443,4973,3905,7444,2874,7445,7446,7447,7448, # 7040
+3021,7449,4550,3906,3588,4974,7450,7451,3789,3675,7452,2578,7453,4070,7454,7455, # 7056
+7456,4258,3676,7457,4975,7458,4976,4259,3790,3504,2634,4977,3677,4551,4260,7459, # 7072
+7460,7461,7462,3907,4261,4978,7463,7464,7465,7466,4979,4980,7467,7468,2213,4262, # 7088
+7469,7470,7471,3678,4981,7472,2439,7473,4263,3224,3289,7474,3908,2415,4982,7475, # 7104
+4264,7476,4983,2655,7477,7478,2732,4552,2854,2875,7479,7480,4265,7481,4553,4984, # 7120
+7482,7483,4266,7484,3679,3366,3680,2818,2781,2782,3367,3589,4554,3065,7485,4071, # 7136
+2899,7486,7487,3157,2462,4072,4555,4073,4985,4986,3111,4267,2687,3368,4556,4074, # 7152
+3791,4268,7488,3909,2783,7489,2656,1962,3158,4557,4987,1963,3159,3160,7490,3112, # 7168
+4988,4989,3022,4990,4991,3792,2855,7491,7492,2971,4558,7493,7494,4992,7495,7496, # 7184
+7497,7498,4993,7499,3426,4559,4994,7500,3681,4560,4269,4270,3910,7501,4075,4995, # 7200
+4271,7502,7503,4076,7504,4996,7505,3225,4997,4272,4077,2819,3023,7506,7507,2733, # 7216
+4561,7508,4562,7509,3369,3793,7510,3590,2508,7511,7512,4273,3113,2994,2616,7513, # 7232
+7514,7515,7516,7517,7518,2820,3911,4078,2748,7519,7520,4563,4998,7521,7522,7523, # 7248
+7524,4999,4274,7525,4564,3682,2239,4079,4565,7526,7527,7528,7529,5000,7530,7531, # 7264
+5001,4275,3794,7532,7533,7534,3066,5002,4566,3161,7535,7536,4080,7537,3162,7538, # 7280
+7539,4567,7540,7541,7542,7543,7544,7545,5003,7546,4568,7547,7548,7549,7550,7551, # 7296
+7552,7553,7554,7555,7556,5004,7557,7558,7559,5005,7560,3795,7561,4569,7562,7563, # 7312
+7564,2821,3796,4276,4277,4081,7565,2876,7566,5006,7567,7568,2900,7569,3797,3912, # 7328
+7570,7571,7572,4278,7573,7574,7575,5007,7576,7577,5008,7578,7579,4279,2934,7580, # 7344
+7581,5009,7582,4570,7583,4280,7584,7585,7586,4571,4572,3913,7587,4573,3505,7588, # 7360
+5010,7589,7590,7591,7592,3798,4574,7593,7594,5011,7595,4281,7596,7597,7598,4282, # 7376
+5012,7599,7600,5013,3163,7601,5014,7602,3914,7603,7604,2734,4575,4576,4577,7605, # 7392
+7606,7607,7608,7609,3506,5015,4578,7610,4082,7611,2822,2901,2579,3683,3024,4579, # 7408
+3507,7612,4580,7613,3226,3799,5016,7614,7615,7616,7617,7618,7619,7620,2995,3290, # 7424
+7621,4083,7622,5017,7623,7624,7625,7626,7627,4581,3915,7628,3291,7629,5018,7630, # 7440
+7631,7632,7633,4084,7634,7635,3427,3800,7636,7637,4582,7638,5019,4583,5020,7639, # 7456
+3916,7640,3801,5021,4584,4283,7641,7642,3428,3591,2269,7643,2617,7644,4585,3592, # 7472
+7645,4586,2902,7646,7647,3227,5022,7648,4587,7649,4284,7650,7651,7652,4588,2284, # 7488
+7653,5023,7654,7655,7656,4589,5024,3802,7657,7658,5025,3508,4590,7659,7660,7661, # 7504
+1969,5026,7662,7663,3684,1821,2688,7664,2028,2509,4285,7665,2823,1841,7666,2689, # 7520
+3114,7667,3917,4085,2160,5027,5028,2972,7668,5029,7669,7670,7671,3593,4086,7672, # 7536
+4591,4087,5030,3803,7673,7674,7675,7676,7677,7678,7679,4286,2366,4592,4593,3067, # 7552
+2328,7680,7681,4594,3594,3918,2029,4287,7682,5031,3919,3370,4288,4595,2856,7683, # 7568
+3509,7684,7685,5032,5033,7686,7687,3804,2784,7688,7689,7690,7691,3371,7692,7693, # 7584
+2877,5034,7694,7695,3920,4289,4088,7696,7697,7698,5035,7699,5036,4290,5037,5038, # 7600
+5039,7700,7701,7702,5040,5041,3228,7703,1760,7704,5042,3229,4596,2106,4089,7705, # 7616
+4597,2824,5043,2107,3372,7706,4291,4090,5044,7707,4091,7708,5045,3025,3805,4598, # 7632
+4292,4293,4294,3373,7709,4599,7710,5046,7711,7712,5047,5048,3806,7713,7714,7715, # 7648
+5049,7716,7717,7718,7719,4600,5050,7720,7721,7722,5051,7723,4295,3429,7724,7725, # 7664
+7726,7727,3921,7728,3292,5052,4092,7729,7730,7731,7732,7733,7734,7735,5053,5054, # 7680
+7736,7737,7738,7739,3922,3685,7740,7741,7742,7743,2635,5055,7744,5056,4601,7745, # 7696
+7746,2560,7747,7748,7749,7750,3923,7751,7752,7753,7754,7755,4296,2903,7756,7757, # 7712
+7758,7759,7760,3924,7761,5057,4297,7762,7763,5058,4298,7764,4093,7765,7766,5059, # 7728
+3925,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,3595,7777,4299,5060,4094, # 7744
+7778,3293,5061,7779,7780,4300,7781,7782,4602,7783,3596,7784,7785,3430,2367,7786, # 7760
+3164,5062,5063,4301,7787,7788,4095,5064,5065,7789,3374,3115,7790,7791,7792,7793, # 7776
+7794,7795,7796,3597,4603,7797,7798,3686,3116,3807,5066,7799,7800,5067,7801,7802, # 7792
+4604,4302,5068,4303,4096,7803,7804,3294,7805,7806,5069,4605,2690,7807,3026,7808, # 7808
+7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824, # 7824
+7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7840
+7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,7856, # 7856
+7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,7872, # 7872
+7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,7888, # 7888
+7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,7904, # 7904
+7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,7920, # 7920
+7921,7922,7923,7924,3926,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, # 7936
+7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, # 7952
+7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, # 7968
+7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, # 7984
+7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, # 8000
+8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, # 8016
+8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, # 8032
+8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, # 8048
+8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, # 8064
+8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, # 8080
+8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, # 8096
+8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, # 8112
+8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, # 8128
+8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, # 8144
+8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, # 8160
+8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, # 8176
+8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, # 8192
+8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, # 8208
+8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, # 8224
+8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, # 8240
+8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, # 8256
+8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271) # 8272
+
+# flake8: noqa
diff --git a/third_party/python/requests/requests/packages/chardet/jpcntx.py b/third_party/python/requests/requests/packages/chardet/jpcntx.py
new file mode 100644
index 0000000000..59aeb6a878
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/jpcntx.py
@@ -0,0 +1,227 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .compat import wrap_ord
+
+NUM_OF_CATEGORY = 6
+DONT_KNOW = -1
+ENOUGH_REL_THRESHOLD = 100
+MAX_REL_THRESHOLD = 1000
+MINIMUM_DATA_THRESHOLD = 4
+
+# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
+jp2CharContext = (
+(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),
+(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),
+(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),
+(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),
+(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
+(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),
+(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
+(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),
+(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
+(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),
+(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),
+(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),
+(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),
+(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),
+(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),
+(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),
+(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),
+(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),
+(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),
+(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),
+(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),
+(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),
+(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),
+(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),
+(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),
+(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),
+(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),
+(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),
+(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),
+(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),
+(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),
+(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),
+(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),
+(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),
+(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),
+(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),
+(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),
+(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),
+(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),
+(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),
+(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),
+(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),
+(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),
+(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),
+(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),
+(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),
+(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),
+(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),
+(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),
+(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),
+(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),
+(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),
+(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),
+(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),
+(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),
+(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),
+(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),
+(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),
+(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),
+(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),
+(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),
+(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),
+(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),
+(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),
+(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),
+(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),
+(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),
+(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),
+(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),
+(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),
+(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),
+(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),
+(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),
+(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),
+(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),
+(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),
+(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),
+(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
+(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),
+(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),
+(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),
+(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),
+(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
+)
+
+class JapaneseContextAnalysis:
+ def __init__(self):
+ self.reset()
+
+ def reset(self):
+ self._mTotalRel = 0 # total sequence received
+ # category counters, each interger counts sequence in its category
+ self._mRelSample = [0] * NUM_OF_CATEGORY
+ # if last byte in current buffer is not the last byte of a character,
+ # we need to know how many bytes to skip in next buffer
+ self._mNeedToSkipCharNum = 0
+ self._mLastCharOrder = -1 # The order of previous char
+ # If this flag is set to True, detection is done and conclusion has
+ # been made
+ self._mDone = False
+
+ def feed(self, aBuf, aLen):
+ if self._mDone:
+ return
+
+ # The buffer we got is byte oriented, and a character may span in more than one
+ # buffers. In case the last one or two byte in last buffer is not
+ # complete, we record how many byte needed to complete that character
+ # and skip these bytes here. We can choose to record those bytes as
+ # well and analyse the character once it is complete, but since a
+ # character will not make much difference, by simply skipping
+ # this character will simply our logic and improve performance.
+ i = self._mNeedToSkipCharNum
+ while i < aLen:
+ order, charLen = self.get_order(aBuf[i:i + 2])
+ i += charLen
+ if i > aLen:
+ self._mNeedToSkipCharNum = i - aLen
+ self._mLastCharOrder = -1
+ else:
+ if (order != -1) and (self._mLastCharOrder != -1):
+ self._mTotalRel += 1
+ if self._mTotalRel > MAX_REL_THRESHOLD:
+ self._mDone = True
+ break
+ self._mRelSample[jp2CharContext[self._mLastCharOrder][order]] += 1
+ self._mLastCharOrder = order
+
+ def got_enough_data(self):
+ return self._mTotalRel > ENOUGH_REL_THRESHOLD
+
+ def get_confidence(self):
+ # This is just one way to calculate confidence. It works well for me.
+ if self._mTotalRel > MINIMUM_DATA_THRESHOLD:
+ return (self._mTotalRel - self._mRelSample[0]) / self._mTotalRel
+ else:
+ return DONT_KNOW
+
+ def get_order(self, aBuf):
+ return -1, 1
+
+class SJISContextAnalysis(JapaneseContextAnalysis):
+ def __init__(self):
+ self.charset_name = "SHIFT_JIS"
+
+ def get_charset_name(self):
+ return self.charset_name
+
+ def get_order(self, aBuf):
+ if not aBuf:
+ return -1, 1
+ # find out current char's byte length
+ first_char = wrap_ord(aBuf[0])
+ if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)):
+ charLen = 2
+ if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
+ self.charset_name = "CP932"
+ else:
+ charLen = 1
+
+ # return its order if it is hiragana
+ if len(aBuf) > 1:
+ second_char = wrap_ord(aBuf[1])
+ if (first_char == 202) and (0x9F <= second_char <= 0xF1):
+ return second_char - 0x9F, charLen
+
+ return -1, charLen
+
+class EUCJPContextAnalysis(JapaneseContextAnalysis):
+ def get_order(self, aBuf):
+ if not aBuf:
+ return -1, 1
+ # find out current char's byte length
+ first_char = wrap_ord(aBuf[0])
+ if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
+ charLen = 2
+ elif first_char == 0x8F:
+ charLen = 3
+ else:
+ charLen = 1
+
+ # return its order if it is hiragana
+ if len(aBuf) > 1:
+ second_char = wrap_ord(aBuf[1])
+ if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
+ return second_char - 0xA1, charLen
+
+ return -1, charLen
+
+# flake8: noqa
diff --git a/third_party/python/requests/requests/packages/chardet/langbulgarianmodel.py b/third_party/python/requests/requests/packages/chardet/langbulgarianmodel.py
new file mode 100644
index 0000000000..e5788fc64a
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/langbulgarianmodel.py
@@ -0,0 +1,229 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# Character Mapping Table:
+# this table is modified base on win1251BulgarianCharToOrderMap, so
+# only number <64 is sure valid
+
+Latin5_BulgarianCharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
+110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
+253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
+116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
+194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80
+210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90
+ 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0
+ 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0
+ 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0
+ 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0
+ 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0
+ 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0
+)
+
+win1251BulgarianCharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
+110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
+253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
+116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
+206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80
+221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90
+ 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0
+ 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0
+ 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0
+ 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0
+ 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0
+ 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 96.9392%
+# first 1024 sequences:3.0618%
+# rest sequences: 0.2992%
+# negative sequences: 0.0020%
+BulgarianLangModel = (
+0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2,
+3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1,
+0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0,
+0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0,
+0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0,
+1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0,
+0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0,
+0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3,
+2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,
+3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2,
+1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0,
+3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1,
+1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0,
+2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2,
+2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0,
+3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2,
+1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,
+2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2,
+2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
+3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2,
+1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0,
+2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2,
+2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,
+2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2,
+1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0,
+2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2,
+1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,
+3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2,
+1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0,
+3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1,
+1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0,
+2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1,
+1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,
+2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2,
+1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,
+2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1,
+1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0,
+3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
+1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2,
+1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1,
+2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2,
+1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
+2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2,
+1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1,
+0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2,
+1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1,
+1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,
+1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1,
+0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1,
+0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
+0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,
+1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
+0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
+0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
+1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1,
+1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
+1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+)
+
+Latin5BulgarianModel = {
+ 'charToOrderMap': Latin5_BulgarianCharToOrderMap,
+ 'precedenceMatrix': BulgarianLangModel,
+ 'mTypicalPositiveRatio': 0.969392,
+ 'keepEnglishLetter': False,
+ 'charsetName': "ISO-8859-5"
+}
+
+Win1251BulgarianModel = {
+ 'charToOrderMap': win1251BulgarianCharToOrderMap,
+ 'precedenceMatrix': BulgarianLangModel,
+ 'mTypicalPositiveRatio': 0.969392,
+ 'keepEnglishLetter': False,
+ 'charsetName': "windows-1251"
+}
+
+
+# flake8: noqa
diff --git a/third_party/python/requests/requests/packages/chardet/langcyrillicmodel.py b/third_party/python/requests/requests/packages/chardet/langcyrillicmodel.py
new file mode 100644
index 0000000000..a86f54bd54
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/langcyrillicmodel.py
@@ -0,0 +1,329 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# KOI8-R language model
+# Character Mapping Table:
+KOI8R_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80
+207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90
+223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0
+238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0
+ 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0
+ 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0
+ 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0
+ 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0
+)
+
+win1251_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
+207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
+223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
+239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253,
+ 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
+ 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
+ 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
+ 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
+)
+
+latin5_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
+207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
+223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
+ 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
+ 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
+ 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
+ 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
+239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
+)
+
+macCyrillic_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+ 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
+ 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
+191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
+207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
+223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
+239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16,
+ 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
+ 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255,
+)
+
+IBM855_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205,
+206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70,
+ 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219,
+220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229,
+230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243,
+ 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248,
+ 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249,
+250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255,
+)
+
+IBM866_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+ 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
+ 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
+ 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
+191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
+207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
+223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
+ 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
+239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 97.6601%
+# first 1024 sequences: 2.3389%
+# rest sequences: 0.1237%
+# negative sequences: 0.0009%
+RussianLangModel = (
+0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2,
+3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
+0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
+0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1,
+1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1,
+1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0,
+2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1,
+1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0,
+3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1,
+1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0,
+2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2,
+1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1,
+1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1,
+1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
+2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1,
+1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,
+3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2,
+1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1,
+2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1,
+1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0,
+2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1,
+1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0,
+1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1,
+1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0,
+3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1,
+2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1,
+3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1,
+1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,
+1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1,
+0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
+2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1,
+1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0,
+1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,
+0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1,
+1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
+2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2,
+2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1,
+1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0,
+1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0,
+2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,
+1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,
+0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
+2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1,
+1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1,
+1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
+0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,
+0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1,
+0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,
+0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,
+0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
+1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1,
+0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,
+2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0,
+0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
+)
+
+Koi8rModel = {
+ 'charToOrderMap': KOI8R_CharToOrderMap,
+ 'precedenceMatrix': RussianLangModel,
+ 'mTypicalPositiveRatio': 0.976601,
+ 'keepEnglishLetter': False,
+ 'charsetName': "KOI8-R"
+}
+
+Win1251CyrillicModel = {
+ 'charToOrderMap': win1251_CharToOrderMap,
+ 'precedenceMatrix': RussianLangModel,
+ 'mTypicalPositiveRatio': 0.976601,
+ 'keepEnglishLetter': False,
+ 'charsetName': "windows-1251"
+}
+
+Latin5CyrillicModel = {
+ 'charToOrderMap': latin5_CharToOrderMap,
+ 'precedenceMatrix': RussianLangModel,
+ 'mTypicalPositiveRatio': 0.976601,
+ 'keepEnglishLetter': False,
+ 'charsetName': "ISO-8859-5"
+}
+
+MacCyrillicModel = {
+ 'charToOrderMap': macCyrillic_CharToOrderMap,
+ 'precedenceMatrix': RussianLangModel,
+ 'mTypicalPositiveRatio': 0.976601,
+ 'keepEnglishLetter': False,
+ 'charsetName': "MacCyrillic"
+};
+
+Ibm866Model = {
+ 'charToOrderMap': IBM866_CharToOrderMap,
+ 'precedenceMatrix': RussianLangModel,
+ 'mTypicalPositiveRatio': 0.976601,
+ 'keepEnglishLetter': False,
+ 'charsetName': "IBM866"
+}
+
+Ibm855Model = {
+ 'charToOrderMap': IBM855_CharToOrderMap,
+ 'precedenceMatrix': RussianLangModel,
+ 'mTypicalPositiveRatio': 0.976601,
+ 'keepEnglishLetter': False,
+ 'charsetName': "IBM855"
+}
+
+# flake8: noqa
diff --git a/third_party/python/requests/requests/packages/chardet/langgreekmodel.py b/third_party/python/requests/requests/packages/chardet/langgreekmodel.py
new file mode 100644
index 0000000000..ddb5837655
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/langgreekmodel.py
@@ -0,0 +1,225 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# Character Mapping Table:
+Latin7_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40
+ 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50
+253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60
+ 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90
+253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0
+253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0
+110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0
+ 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0
+124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0
+ 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0
+)
+
+win1253_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40
+ 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50
+253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60
+ 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90
+253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0
+253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0
+110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0
+ 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0
+124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0
+ 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 98.2851%
+# first 1024 sequences:1.7001%
+# rest sequences: 0.0359%
+# negative sequences: 0.0148%
+GreekLangModel = (
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0,
+3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
+0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0,
+2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0,
+0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0,
+2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0,
+2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0,
+0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0,
+2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0,
+0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0,
+3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0,
+3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0,
+2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0,
+2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0,
+0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0,
+0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0,
+0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2,
+0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0,
+0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2,
+0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0,
+0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2,
+0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2,
+0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,
+0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2,
+0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0,
+0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0,
+0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0,
+0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,
+0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0,
+0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2,
+0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0,
+0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2,
+0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0,
+0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2,
+0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,
+0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2,
+0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,
+0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1,
+0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,
+0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2,
+0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
+0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2,
+0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2,
+0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,
+0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0,
+0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,
+0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0,
+0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0,
+0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+)
+
+Latin7GreekModel = {
+ 'charToOrderMap': Latin7_CharToOrderMap,
+ 'precedenceMatrix': GreekLangModel,
+ 'mTypicalPositiveRatio': 0.982851,
+ 'keepEnglishLetter': False,
+ 'charsetName': "ISO-8859-7"
+}
+
+Win1253GreekModel = {
+ 'charToOrderMap': win1253_CharToOrderMap,
+ 'precedenceMatrix': GreekLangModel,
+ 'mTypicalPositiveRatio': 0.982851,
+ 'keepEnglishLetter': False,
+ 'charsetName': "windows-1253"
+}
+
+# flake8: noqa
diff --git a/third_party/python/requests/requests/packages/chardet/langhebrewmodel.py b/third_party/python/requests/requests/packages/chardet/langhebrewmodel.py
new file mode 100644
index 0000000000..75f2bc7fe7
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/langhebrewmodel.py
@@ -0,0 +1,201 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Simon Montagu
+# Portions created by the Initial Developer are Copyright (C) 2005
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+# Shoshannah Forbes - original C code (?)
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# Windows-1255 language model
+# Character Mapping Table:
+win1255_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40
+ 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50
+253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60
+ 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70
+124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214,
+215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221,
+ 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227,
+106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234,
+ 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237,
+238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250,
+ 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23,
+ 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253,
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 98.4004%
+# first 1024 sequences: 1.5981%
+# rest sequences: 0.087%
+# negative sequences: 0.0015%
+HebrewLangModel = (
+0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0,
+3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,
+1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,
+1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3,
+1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2,
+1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2,
+1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2,
+0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2,
+0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2,
+1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,
+3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2,
+0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1,
+0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0,
+0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,
+0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2,
+0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,
+3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2,
+0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2,
+0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2,
+0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2,
+0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1,
+0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2,
+0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0,
+3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2,
+0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2,
+0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2,
+0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
+1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2,
+0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,
+0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3,
+0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0,
+0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0,
+0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
+0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0,
+2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0,
+0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,
+0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0,
+0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1,
+1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1,
+0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1,
+2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1,
+1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1,
+2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1,
+1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1,
+2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,
+0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1,
+1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1,
+0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0,
+)
+
+Win1255HebrewModel = {
+ 'charToOrderMap': win1255_CharToOrderMap,
+ 'precedenceMatrix': HebrewLangModel,
+ 'mTypicalPositiveRatio': 0.984004,
+ 'keepEnglishLetter': False,
+ 'charsetName': "windows-1255"
+}
+
+# flake8: noqa
diff --git a/third_party/python/requests/requests/packages/chardet/langhungarianmodel.py b/third_party/python/requests/requests/packages/chardet/langhungarianmodel.py
new file mode 100644
index 0000000000..49d2f0fe75
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/langhungarianmodel.py
@@ -0,0 +1,225 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# Character Mapping Table:
+Latin2_HungarianCharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
+ 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
+253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
+ 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
+159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,
+175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,
+191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205,
+ 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
+221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231,
+232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241,
+ 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85,
+245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253,
+)
+
+win1250HungarianCharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
+ 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
+253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
+ 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
+161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,
+177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190,
+191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205,
+ 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
+221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231,
+232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241,
+ 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87,
+245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253,
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 94.7368%
+# first 1024 sequences:5.2623%
+# rest sequences: 0.8894%
+# negative sequences: 0.0009%
+HungarianLangModel = (
+0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,
+3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2,
+3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
+3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3,
+0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
+3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2,
+0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,
+3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
+3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
+3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0,
+1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0,
+1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0,
+1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1,
+3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1,
+2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1,
+2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1,
+2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1,
+2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0,
+2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
+3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1,
+2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1,
+2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1,
+2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,
+1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1,
+1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1,
+3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0,
+1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1,
+1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1,
+2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,
+2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0,
+2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1,
+3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1,
+2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1,
+1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,
+1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
+2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1,
+2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,
+1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0,
+1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1,
+2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0,
+1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0,
+1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0,
+2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1,
+2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1,
+2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
+1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1,
+1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1,
+1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0,
+0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
+2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1,
+2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1,
+1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1,
+2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,
+1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0,
+1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0,
+2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0,
+2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1,
+2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0,
+1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
+2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0,
+0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
+1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,
+0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
+1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
+0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
+2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,
+0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
+)
+
+Latin2HungarianModel = {
+ 'charToOrderMap': Latin2_HungarianCharToOrderMap,
+ 'precedenceMatrix': HungarianLangModel,
+ 'mTypicalPositiveRatio': 0.947368,
+ 'keepEnglishLetter': True,
+ 'charsetName': "ISO-8859-2"
+}
+
+Win1250HungarianModel = {
+ 'charToOrderMap': win1250HungarianCharToOrderMap,
+ 'precedenceMatrix': HungarianLangModel,
+ 'mTypicalPositiveRatio': 0.947368,
+ 'keepEnglishLetter': True,
+ 'charsetName': "windows-1250"
+}
+
+# flake8: noqa
diff --git a/third_party/python/requests/requests/packages/chardet/langthaimodel.py b/third_party/python/requests/requests/packages/chardet/langthaimodel.py
new file mode 100644
index 0000000000..0508b1b1ab
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/langthaimodel.py
@@ -0,0 +1,200 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# The following result for thai was collected from a limited sample (1M).
+
+# Character Mapping Table:
+TIS620CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40
+188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50
+253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60
+ 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70
+209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222,
+223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235,
+236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57,
+ 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54,
+ 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63,
+ 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244,
+ 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247,
+ 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253,
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 92.6386%
+# first 1024 sequences:7.3177%
+# rest sequences: 1.0230%
+# negative sequences: 0.0436%
+ThaiLangModel = (
+0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3,
+0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2,
+3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3,
+0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1,
+3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2,
+3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1,
+3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2,
+3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1,
+3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1,
+3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,
+3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1,
+2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1,
+3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1,
+0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1,
+0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,
+3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2,
+1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0,
+3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3,
+3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0,
+1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2,
+0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
+2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3,
+0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0,
+3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1,
+2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,
+3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2,
+0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2,
+3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
+3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0,
+2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,
+3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1,
+2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1,
+3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0,
+3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1,
+3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1,
+3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1,
+1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2,
+0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3,
+0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,
+3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0,
+3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1,
+1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0,
+3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1,
+3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2,
+0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0,
+0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0,
+1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1,
+1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,
+3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1,
+0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0,
+0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
+3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0,
+3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0,
+0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1,
+0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0,
+0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1,
+0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,
+0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0,
+0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1,
+0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,
+3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0,
+0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0,
+0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,
+3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1,
+2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,
+0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0,
+3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0,
+0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
+2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0,
+1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,
+1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,
+1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+)
+
+TIS620ThaiModel = {
+ 'charToOrderMap': TIS620CharToOrderMap,
+ 'precedenceMatrix': ThaiLangModel,
+ 'mTypicalPositiveRatio': 0.926386,
+ 'keepEnglishLetter': False,
+ 'charsetName': "TIS-620"
+}
+
+# flake8: noqa
diff --git a/third_party/python/requests/requests/packages/chardet/latin1prober.py b/third_party/python/requests/requests/packages/chardet/latin1prober.py
new file mode 100644
index 0000000000..eef3573543
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/latin1prober.py
@@ -0,0 +1,139 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetprober import CharSetProber
+from .constants import eNotMe
+from .compat import wrap_ord
+
+FREQ_CAT_NUM = 4
+
+UDF = 0 # undefined
+OTH = 1 # other
+ASC = 2 # ascii capital letter
+ASS = 3 # ascii small letter
+ACV = 4 # accent capital vowel
+ACO = 5 # accent capital other
+ASV = 6 # accent small vowel
+ASO = 7 # accent small other
+CLASS_NUM = 8 # total classes
+
+Latin1_CharToClass = (
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F
+ OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47
+ ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F
+ ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57
+ ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F
+ OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67
+ ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F
+ ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77
+ ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F
+ OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87
+ OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F
+ UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97
+ OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF
+ ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7
+ ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF
+ ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7
+ ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF
+ ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7
+ ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF
+ ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7
+ ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF
+)
+
+# 0 : illegal
+# 1 : very unlikely
+# 2 : normal
+# 3 : very likely
+Latin1ClassModel = (
+ # UDF OTH ASC ASS ACV ACO ASV ASO
+ 0, 0, 0, 0, 0, 0, 0, 0, # UDF
+ 0, 3, 3, 3, 3, 3, 3, 3, # OTH
+ 0, 3, 3, 3, 3, 3, 3, 3, # ASC
+ 0, 3, 3, 3, 1, 1, 3, 3, # ASS
+ 0, 3, 3, 3, 1, 2, 1, 2, # ACV
+ 0, 3, 3, 3, 3, 3, 3, 3, # ACO
+ 0, 3, 1, 3, 1, 1, 1, 3, # ASV
+ 0, 3, 1, 3, 1, 1, 3, 3, # ASO
+)
+
+
+class Latin1Prober(CharSetProber):
+ def __init__(self):
+ CharSetProber.__init__(self)
+ self.reset()
+
+ def reset(self):
+ self._mLastCharClass = OTH
+ self._mFreqCounter = [0] * FREQ_CAT_NUM
+ CharSetProber.reset(self)
+
+ def get_charset_name(self):
+ return "windows-1252"
+
+ def feed(self, aBuf):
+ aBuf = self.filter_with_english_letters(aBuf)
+ for c in aBuf:
+ charClass = Latin1_CharToClass[wrap_ord(c)]
+ freq = Latin1ClassModel[(self._mLastCharClass * CLASS_NUM)
+ + charClass]
+ if freq == 0:
+ self._mState = eNotMe
+ break
+ self._mFreqCounter[freq] += 1
+ self._mLastCharClass = charClass
+
+ return self.get_state()
+
+ def get_confidence(self):
+ if self.get_state() == eNotMe:
+ return 0.01
+
+ total = sum(self._mFreqCounter)
+ if total < 0.01:
+ confidence = 0.0
+ else:
+ confidence = ((self._mFreqCounter[3] - self._mFreqCounter[1] * 20.0)
+ / total)
+ if confidence < 0.0:
+ confidence = 0.0
+ # lower the confidence of latin1 so that other more accurate
+ # detector can take priority.
+ confidence = confidence * 0.73
+ return confidence
diff --git a/third_party/python/requests/requests/packages/chardet/mbcharsetprober.py b/third_party/python/requests/requests/packages/chardet/mbcharsetprober.py
new file mode 100644
index 0000000000..bb42f2fb5e
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/mbcharsetprober.py
@@ -0,0 +1,86 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+# Proofpoint, Inc.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+import sys
+from . import constants
+from .charsetprober import CharSetProber
+
+
+class MultiByteCharSetProber(CharSetProber):
+ def __init__(self):
+ CharSetProber.__init__(self)
+ self._mDistributionAnalyzer = None
+ self._mCodingSM = None
+ self._mLastChar = [0, 0]
+
+ def reset(self):
+ CharSetProber.reset(self)
+ if self._mCodingSM:
+ self._mCodingSM.reset()
+ if self._mDistributionAnalyzer:
+ self._mDistributionAnalyzer.reset()
+ self._mLastChar = [0, 0]
+
+ def get_charset_name(self):
+ pass
+
+ def feed(self, aBuf):
+ aLen = len(aBuf)
+ for i in range(0, aLen):
+ codingState = self._mCodingSM.next_state(aBuf[i])
+ if codingState == constants.eError:
+ if constants._debug:
+ sys.stderr.write(self.get_charset_name()
+ + ' prober hit error at byte ' + str(i)
+ + '\n')
+ self._mState = constants.eNotMe
+ break
+ elif codingState == constants.eItsMe:
+ self._mState = constants.eFoundIt
+ break
+ elif codingState == constants.eStart:
+ charLen = self._mCodingSM.get_current_charlen()
+ if i == 0:
+ self._mLastChar[1] = aBuf[0]
+ self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
+ else:
+ self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
+ charLen)
+
+ self._mLastChar[0] = aBuf[aLen - 1]
+
+ if self.get_state() == constants.eDetecting:
+ if (self._mDistributionAnalyzer.got_enough_data() and
+ (self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
+ self._mState = constants.eFoundIt
+
+ return self.get_state()
+
+ def get_confidence(self):
+ return self._mDistributionAnalyzer.get_confidence()
diff --git a/third_party/python/requests/requests/packages/chardet/mbcsgroupprober.py b/third_party/python/requests/requests/packages/chardet/mbcsgroupprober.py
new file mode 100644
index 0000000000..03c9dcf3eb
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/mbcsgroupprober.py
@@ -0,0 +1,54 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+# Proofpoint, Inc.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetgroupprober import CharSetGroupProber
+from .utf8prober import UTF8Prober
+from .sjisprober import SJISProber
+from .eucjpprober import EUCJPProber
+from .gb2312prober import GB2312Prober
+from .euckrprober import EUCKRProber
+from .cp949prober import CP949Prober
+from .big5prober import Big5Prober
+from .euctwprober import EUCTWProber
+
+
+class MBCSGroupProber(CharSetGroupProber):
+ def __init__(self):
+ CharSetGroupProber.__init__(self)
+ self._mProbers = [
+ UTF8Prober(),
+ SJISProber(),
+ EUCJPProber(),
+ GB2312Prober(),
+ EUCKRProber(),
+ CP949Prober(),
+ Big5Prober(),
+ EUCTWProber()
+ ]
+ self.reset()
diff --git a/third_party/python/requests/requests/packages/chardet/mbcssm.py b/third_party/python/requests/requests/packages/chardet/mbcssm.py
new file mode 100644
index 0000000000..efe678ca03
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/mbcssm.py
@@ -0,0 +1,572 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .constants import eStart, eError, eItsMe
+
+# BIG5
+
+BIG5_cls = (
+ 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 1,1,1,1,1,1,1,1, # 30 - 37
+ 1,1,1,1,1,1,1,1, # 38 - 3f
+ 2,2,2,2,2,2,2,2, # 40 - 47
+ 2,2,2,2,2,2,2,2, # 48 - 4f
+ 2,2,2,2,2,2,2,2, # 50 - 57
+ 2,2,2,2,2,2,2,2, # 58 - 5f
+ 2,2,2,2,2,2,2,2, # 60 - 67
+ 2,2,2,2,2,2,2,2, # 68 - 6f
+ 2,2,2,2,2,2,2,2, # 70 - 77
+ 2,2,2,2,2,2,2,1, # 78 - 7f
+ 4,4,4,4,4,4,4,4, # 80 - 87
+ 4,4,4,4,4,4,4,4, # 88 - 8f
+ 4,4,4,4,4,4,4,4, # 90 - 97
+ 4,4,4,4,4,4,4,4, # 98 - 9f
+ 4,3,3,3,3,3,3,3, # a0 - a7
+ 3,3,3,3,3,3,3,3, # a8 - af
+ 3,3,3,3,3,3,3,3, # b0 - b7
+ 3,3,3,3,3,3,3,3, # b8 - bf
+ 3,3,3,3,3,3,3,3, # c0 - c7
+ 3,3,3,3,3,3,3,3, # c8 - cf
+ 3,3,3,3,3,3,3,3, # d0 - d7
+ 3,3,3,3,3,3,3,3, # d8 - df
+ 3,3,3,3,3,3,3,3, # e0 - e7
+ 3,3,3,3,3,3,3,3, # e8 - ef
+ 3,3,3,3,3,3,3,3, # f0 - f7
+ 3,3,3,3,3,3,3,0 # f8 - ff
+)
+
+BIG5_st = (
+ eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
+ eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,#08-0f
+ eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart#10-17
+)
+
+Big5CharLenTable = (0, 1, 1, 2, 0)
+
+Big5SMModel = {'classTable': BIG5_cls,
+ 'classFactor': 5,
+ 'stateTable': BIG5_st,
+ 'charLenTable': Big5CharLenTable,
+ 'name': 'Big5'}
+
+# CP949
+
+CP949_cls = (
+ 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f
+ 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f
+ 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f
+ 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f
+ 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f
+ 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f
+ 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f
+ 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f
+ 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f
+ 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f
+ 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af
+ 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf
+ 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf
+ 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df
+ 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef
+ 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff
+)
+
+CP949_st = (
+#cls= 0 1 2 3 4 5 6 7 8 9 # previous state =
+ eError,eStart, 3,eError,eStart,eStart, 4, 5,eError, 6, # eStart
+ eError,eError,eError,eError,eError,eError,eError,eError,eError,eError, # eError
+ eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe, # eItsMe
+ eError,eError,eStart,eStart,eError,eError,eError,eStart,eStart,eStart, # 3
+ eError,eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 4
+ eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 5
+ eError,eStart,eStart,eStart,eStart,eError,eError,eStart,eStart,eStart, # 6
+)
+
+CP949CharLenTable = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2)
+
+CP949SMModel = {'classTable': CP949_cls,
+ 'classFactor': 10,
+ 'stateTable': CP949_st,
+ 'charLenTable': CP949CharLenTable,
+ 'name': 'CP949'}
+
+# EUC-JP
+
+EUCJP_cls = (
+ 4,4,4,4,4,4,4,4, # 00 - 07
+ 4,4,4,4,4,4,5,5, # 08 - 0f
+ 4,4,4,4,4,4,4,4, # 10 - 17
+ 4,4,4,5,4,4,4,4, # 18 - 1f
+ 4,4,4,4,4,4,4,4, # 20 - 27
+ 4,4,4,4,4,4,4,4, # 28 - 2f
+ 4,4,4,4,4,4,4,4, # 30 - 37
+ 4,4,4,4,4,4,4,4, # 38 - 3f
+ 4,4,4,4,4,4,4,4, # 40 - 47
+ 4,4,4,4,4,4,4,4, # 48 - 4f
+ 4,4,4,4,4,4,4,4, # 50 - 57
+ 4,4,4,4,4,4,4,4, # 58 - 5f
+ 4,4,4,4,4,4,4,4, # 60 - 67
+ 4,4,4,4,4,4,4,4, # 68 - 6f
+ 4,4,4,4,4,4,4,4, # 70 - 77
+ 4,4,4,4,4,4,4,4, # 78 - 7f
+ 5,5,5,5,5,5,5,5, # 80 - 87
+ 5,5,5,5,5,5,1,3, # 88 - 8f
+ 5,5,5,5,5,5,5,5, # 90 - 97
+ 5,5,5,5,5,5,5,5, # 98 - 9f
+ 5,2,2,2,2,2,2,2, # a0 - a7
+ 2,2,2,2,2,2,2,2, # a8 - af
+ 2,2,2,2,2,2,2,2, # b0 - b7
+ 2,2,2,2,2,2,2,2, # b8 - bf
+ 2,2,2,2,2,2,2,2, # c0 - c7
+ 2,2,2,2,2,2,2,2, # c8 - cf
+ 2,2,2,2,2,2,2,2, # d0 - d7
+ 2,2,2,2,2,2,2,2, # d8 - df
+ 0,0,0,0,0,0,0,0, # e0 - e7
+ 0,0,0,0,0,0,0,0, # e8 - ef
+ 0,0,0,0,0,0,0,0, # f0 - f7
+ 0,0,0,0,0,0,0,5 # f8 - ff
+)
+
+EUCJP_st = (
+ 3, 4, 3, 5,eStart,eError,eError,eError,#00-07
+ eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
+ eItsMe,eItsMe,eStart,eError,eStart,eError,eError,eError,#10-17
+ eError,eError,eStart,eError,eError,eError, 3,eError,#18-1f
+ 3,eError,eError,eError,eStart,eStart,eStart,eStart#20-27
+)
+
+EUCJPCharLenTable = (2, 2, 2, 3, 1, 0)
+
+EUCJPSMModel = {'classTable': EUCJP_cls,
+ 'classFactor': 6,
+ 'stateTable': EUCJP_st,
+ 'charLenTable': EUCJPCharLenTable,
+ 'name': 'EUC-JP'}
+
+# EUC-KR
+
+EUCKR_cls = (
+ 1,1,1,1,1,1,1,1, # 00 - 07
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 1,1,1,1,1,1,1,1, # 30 - 37
+ 1,1,1,1,1,1,1,1, # 38 - 3f
+ 1,1,1,1,1,1,1,1, # 40 - 47
+ 1,1,1,1,1,1,1,1, # 48 - 4f
+ 1,1,1,1,1,1,1,1, # 50 - 57
+ 1,1,1,1,1,1,1,1, # 58 - 5f
+ 1,1,1,1,1,1,1,1, # 60 - 67
+ 1,1,1,1,1,1,1,1, # 68 - 6f
+ 1,1,1,1,1,1,1,1, # 70 - 77
+ 1,1,1,1,1,1,1,1, # 78 - 7f
+ 0,0,0,0,0,0,0,0, # 80 - 87
+ 0,0,0,0,0,0,0,0, # 88 - 8f
+ 0,0,0,0,0,0,0,0, # 90 - 97
+ 0,0,0,0,0,0,0,0, # 98 - 9f
+ 0,2,2,2,2,2,2,2, # a0 - a7
+ 2,2,2,2,2,3,3,3, # a8 - af
+ 2,2,2,2,2,2,2,2, # b0 - b7
+ 2,2,2,2,2,2,2,2, # b8 - bf
+ 2,2,2,2,2,2,2,2, # c0 - c7
+ 2,3,2,2,2,2,2,2, # c8 - cf
+ 2,2,2,2,2,2,2,2, # d0 - d7
+ 2,2,2,2,2,2,2,2, # d8 - df
+ 2,2,2,2,2,2,2,2, # e0 - e7
+ 2,2,2,2,2,2,2,2, # e8 - ef
+ 2,2,2,2,2,2,2,2, # f0 - f7
+ 2,2,2,2,2,2,2,0 # f8 - ff
+)
+
+EUCKR_st = (
+ eError,eStart, 3,eError,eError,eError,eError,eError,#00-07
+ eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,eStart #08-0f
+)
+
+EUCKRCharLenTable = (0, 1, 2, 0)
+
+EUCKRSMModel = {'classTable': EUCKR_cls,
+ 'classFactor': 4,
+ 'stateTable': EUCKR_st,
+ 'charLenTable': EUCKRCharLenTable,
+ 'name': 'EUC-KR'}
+
+# EUC-TW
+
+EUCTW_cls = (
+ 2,2,2,2,2,2,2,2, # 00 - 07
+ 2,2,2,2,2,2,0,0, # 08 - 0f
+ 2,2,2,2,2,2,2,2, # 10 - 17
+ 2,2,2,0,2,2,2,2, # 18 - 1f
+ 2,2,2,2,2,2,2,2, # 20 - 27
+ 2,2,2,2,2,2,2,2, # 28 - 2f
+ 2,2,2,2,2,2,2,2, # 30 - 37
+ 2,2,2,2,2,2,2,2, # 38 - 3f
+ 2,2,2,2,2,2,2,2, # 40 - 47
+ 2,2,2,2,2,2,2,2, # 48 - 4f
+ 2,2,2,2,2,2,2,2, # 50 - 57
+ 2,2,2,2,2,2,2,2, # 58 - 5f
+ 2,2,2,2,2,2,2,2, # 60 - 67
+ 2,2,2,2,2,2,2,2, # 68 - 6f
+ 2,2,2,2,2,2,2,2, # 70 - 77
+ 2,2,2,2,2,2,2,2, # 78 - 7f
+ 0,0,0,0,0,0,0,0, # 80 - 87
+ 0,0,0,0,0,0,6,0, # 88 - 8f
+ 0,0,0,0,0,0,0,0, # 90 - 97
+ 0,0,0,0,0,0,0,0, # 98 - 9f
+ 0,3,4,4,4,4,4,4, # a0 - a7
+ 5,5,1,1,1,1,1,1, # a8 - af
+ 1,1,1,1,1,1,1,1, # b0 - b7
+ 1,1,1,1,1,1,1,1, # b8 - bf
+ 1,1,3,1,3,3,3,3, # c0 - c7
+ 3,3,3,3,3,3,3,3, # c8 - cf
+ 3,3,3,3,3,3,3,3, # d0 - d7
+ 3,3,3,3,3,3,3,3, # d8 - df
+ 3,3,3,3,3,3,3,3, # e0 - e7
+ 3,3,3,3,3,3,3,3, # e8 - ef
+ 3,3,3,3,3,3,3,3, # f0 - f7
+ 3,3,3,3,3,3,3,0 # f8 - ff
+)
+
+EUCTW_st = (
+ eError,eError,eStart, 3, 3, 3, 4,eError,#00-07
+ eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
+ eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eStart,eError,#10-17
+ eStart,eStart,eStart,eError,eError,eError,eError,eError,#18-1f
+ 5,eError,eError,eError,eStart,eError,eStart,eStart,#20-27
+ eStart,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f
+)
+
+EUCTWCharLenTable = (0, 0, 1, 2, 2, 2, 3)
+
+EUCTWSMModel = {'classTable': EUCTW_cls,
+ 'classFactor': 7,
+ 'stateTable': EUCTW_st,
+ 'charLenTable': EUCTWCharLenTable,
+ 'name': 'x-euc-tw'}
+
+# GB2312
+
+GB2312_cls = (
+ 1,1,1,1,1,1,1,1, # 00 - 07
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 3,3,3,3,3,3,3,3, # 30 - 37
+ 3,3,1,1,1,1,1,1, # 38 - 3f
+ 2,2,2,2,2,2,2,2, # 40 - 47
+ 2,2,2,2,2,2,2,2, # 48 - 4f
+ 2,2,2,2,2,2,2,2, # 50 - 57
+ 2,2,2,2,2,2,2,2, # 58 - 5f
+ 2,2,2,2,2,2,2,2, # 60 - 67
+ 2,2,2,2,2,2,2,2, # 68 - 6f
+ 2,2,2,2,2,2,2,2, # 70 - 77
+ 2,2,2,2,2,2,2,4, # 78 - 7f
+ 5,6,6,6,6,6,6,6, # 80 - 87
+ 6,6,6,6,6,6,6,6, # 88 - 8f
+ 6,6,6,6,6,6,6,6, # 90 - 97
+ 6,6,6,6,6,6,6,6, # 98 - 9f
+ 6,6,6,6,6,6,6,6, # a0 - a7
+ 6,6,6,6,6,6,6,6, # a8 - af
+ 6,6,6,6,6,6,6,6, # b0 - b7
+ 6,6,6,6,6,6,6,6, # b8 - bf
+ 6,6,6,6,6,6,6,6, # c0 - c7
+ 6,6,6,6,6,6,6,6, # c8 - cf
+ 6,6,6,6,6,6,6,6, # d0 - d7
+ 6,6,6,6,6,6,6,6, # d8 - df
+ 6,6,6,6,6,6,6,6, # e0 - e7
+ 6,6,6,6,6,6,6,6, # e8 - ef
+ 6,6,6,6,6,6,6,6, # f0 - f7
+ 6,6,6,6,6,6,6,0 # f8 - ff
+)
+
+GB2312_st = (
+ eError,eStart,eStart,eStart,eStart,eStart, 3,eError,#00-07
+ eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
+ eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,#10-17
+ 4,eError,eStart,eStart,eError,eError,eError,eError,#18-1f
+ eError,eError, 5,eError,eError,eError,eItsMe,eError,#20-27
+ eError,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f
+)
+
+# To be accurate, the length of class 6 can be either 2 or 4.
+# But it is not necessary to discriminate between the two since
+# it is used for frequency analysis only, and we are validing
+# each code range there as well. So it is safe to set it to be
+# 2 here.
+GB2312CharLenTable = (0, 1, 1, 1, 1, 1, 2)
+
+GB2312SMModel = {'classTable': GB2312_cls,
+ 'classFactor': 7,
+ 'stateTable': GB2312_st,
+ 'charLenTable': GB2312CharLenTable,
+ 'name': 'GB2312'}
+
+# Shift_JIS
+
+SJIS_cls = (
+ 1,1,1,1,1,1,1,1, # 00 - 07
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 1,1,1,1,1,1,1,1, # 30 - 37
+ 1,1,1,1,1,1,1,1, # 38 - 3f
+ 2,2,2,2,2,2,2,2, # 40 - 47
+ 2,2,2,2,2,2,2,2, # 48 - 4f
+ 2,2,2,2,2,2,2,2, # 50 - 57
+ 2,2,2,2,2,2,2,2, # 58 - 5f
+ 2,2,2,2,2,2,2,2, # 60 - 67
+ 2,2,2,2,2,2,2,2, # 68 - 6f
+ 2,2,2,2,2,2,2,2, # 70 - 77
+ 2,2,2,2,2,2,2,1, # 78 - 7f
+ 3,3,3,3,3,2,2,3, # 80 - 87
+ 3,3,3,3,3,3,3,3, # 88 - 8f
+ 3,3,3,3,3,3,3,3, # 90 - 97
+ 3,3,3,3,3,3,3,3, # 98 - 9f
+ #0xa0 is illegal in sjis encoding, but some pages does
+ #contain such byte. We need to be more error forgiven.
+ 2,2,2,2,2,2,2,2, # a0 - a7
+ 2,2,2,2,2,2,2,2, # a8 - af
+ 2,2,2,2,2,2,2,2, # b0 - b7
+ 2,2,2,2,2,2,2,2, # b8 - bf
+ 2,2,2,2,2,2,2,2, # c0 - c7
+ 2,2,2,2,2,2,2,2, # c8 - cf
+ 2,2,2,2,2,2,2,2, # d0 - d7
+ 2,2,2,2,2,2,2,2, # d8 - df
+ 3,3,3,3,3,3,3,3, # e0 - e7
+ 3,3,3,3,3,4,4,4, # e8 - ef
+ 3,3,3,3,3,3,3,3, # f0 - f7
+ 3,3,3,3,3,0,0,0) # f8 - ff
+
+
+SJIS_st = (
+ eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
+ eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
+ eItsMe,eItsMe,eError,eError,eStart,eStart,eStart,eStart #10-17
+)
+
+SJISCharLenTable = (0, 1, 1, 2, 0, 0)
+
+SJISSMModel = {'classTable': SJIS_cls,
+ 'classFactor': 6,
+ 'stateTable': SJIS_st,
+ 'charLenTable': SJISCharLenTable,
+ 'name': 'Shift_JIS'}
+
+# UCS2-BE
+
+UCS2BE_cls = (
+ 0,0,0,0,0,0,0,0, # 00 - 07
+ 0,0,1,0,0,2,0,0, # 08 - 0f
+ 0,0,0,0,0,0,0,0, # 10 - 17
+ 0,0,0,3,0,0,0,0, # 18 - 1f
+ 0,0,0,0,0,0,0,0, # 20 - 27
+ 0,3,3,3,3,3,0,0, # 28 - 2f
+ 0,0,0,0,0,0,0,0, # 30 - 37
+ 0,0,0,0,0,0,0,0, # 38 - 3f
+ 0,0,0,0,0,0,0,0, # 40 - 47
+ 0,0,0,0,0,0,0,0, # 48 - 4f
+ 0,0,0,0,0,0,0,0, # 50 - 57
+ 0,0,0,0,0,0,0,0, # 58 - 5f
+ 0,0,0,0,0,0,0,0, # 60 - 67
+ 0,0,0,0,0,0,0,0, # 68 - 6f
+ 0,0,0,0,0,0,0,0, # 70 - 77
+ 0,0,0,0,0,0,0,0, # 78 - 7f
+ 0,0,0,0,0,0,0,0, # 80 - 87
+ 0,0,0,0,0,0,0,0, # 88 - 8f
+ 0,0,0,0,0,0,0,0, # 90 - 97
+ 0,0,0,0,0,0,0,0, # 98 - 9f
+ 0,0,0,0,0,0,0,0, # a0 - a7
+ 0,0,0,0,0,0,0,0, # a8 - af
+ 0,0,0,0,0,0,0,0, # b0 - b7
+ 0,0,0,0,0,0,0,0, # b8 - bf
+ 0,0,0,0,0,0,0,0, # c0 - c7
+ 0,0,0,0,0,0,0,0, # c8 - cf
+ 0,0,0,0,0,0,0,0, # d0 - d7
+ 0,0,0,0,0,0,0,0, # d8 - df
+ 0,0,0,0,0,0,0,0, # e0 - e7
+ 0,0,0,0,0,0,0,0, # e8 - ef
+ 0,0,0,0,0,0,0,0, # f0 - f7
+ 0,0,0,0,0,0,4,5 # f8 - ff
+)
+
+UCS2BE_st = (
+ 5, 7, 7,eError, 4, 3,eError,eError,#00-07
+ eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
+ eItsMe,eItsMe, 6, 6, 6, 6,eError,eError,#10-17
+ 6, 6, 6, 6, 6,eItsMe, 6, 6,#18-1f
+ 6, 6, 6, 6, 5, 7, 7,eError,#20-27
+ 5, 8, 6, 6,eError, 6, 6, 6,#28-2f
+ 6, 6, 6, 6,eError,eError,eStart,eStart #30-37
+)
+
+UCS2BECharLenTable = (2, 2, 2, 0, 2, 2)
+
+UCS2BESMModel = {'classTable': UCS2BE_cls,
+ 'classFactor': 6,
+ 'stateTable': UCS2BE_st,
+ 'charLenTable': UCS2BECharLenTable,
+ 'name': 'UTF-16BE'}
+
+# UCS2-LE
+
+UCS2LE_cls = (
+ 0,0,0,0,0,0,0,0, # 00 - 07
+ 0,0,1,0,0,2,0,0, # 08 - 0f
+ 0,0,0,0,0,0,0,0, # 10 - 17
+ 0,0,0,3,0,0,0,0, # 18 - 1f
+ 0,0,0,0,0,0,0,0, # 20 - 27
+ 0,3,3,3,3,3,0,0, # 28 - 2f
+ 0,0,0,0,0,0,0,0, # 30 - 37
+ 0,0,0,0,0,0,0,0, # 38 - 3f
+ 0,0,0,0,0,0,0,0, # 40 - 47
+ 0,0,0,0,0,0,0,0, # 48 - 4f
+ 0,0,0,0,0,0,0,0, # 50 - 57
+ 0,0,0,0,0,0,0,0, # 58 - 5f
+ 0,0,0,0,0,0,0,0, # 60 - 67
+ 0,0,0,0,0,0,0,0, # 68 - 6f
+ 0,0,0,0,0,0,0,0, # 70 - 77
+ 0,0,0,0,0,0,0,0, # 78 - 7f
+ 0,0,0,0,0,0,0,0, # 80 - 87
+ 0,0,0,0,0,0,0,0, # 88 - 8f
+ 0,0,0,0,0,0,0,0, # 90 - 97
+ 0,0,0,0,0,0,0,0, # 98 - 9f
+ 0,0,0,0,0,0,0,0, # a0 - a7
+ 0,0,0,0,0,0,0,0, # a8 - af
+ 0,0,0,0,0,0,0,0, # b0 - b7
+ 0,0,0,0,0,0,0,0, # b8 - bf
+ 0,0,0,0,0,0,0,0, # c0 - c7
+ 0,0,0,0,0,0,0,0, # c8 - cf
+ 0,0,0,0,0,0,0,0, # d0 - d7
+ 0,0,0,0,0,0,0,0, # d8 - df
+ 0,0,0,0,0,0,0,0, # e0 - e7
+ 0,0,0,0,0,0,0,0, # e8 - ef
+ 0,0,0,0,0,0,0,0, # f0 - f7
+ 0,0,0,0,0,0,4,5 # f8 - ff
+)
+
+UCS2LE_st = (
+ 6, 6, 7, 6, 4, 3,eError,eError,#00-07
+ eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
+ eItsMe,eItsMe, 5, 5, 5,eError,eItsMe,eError,#10-17
+ 5, 5, 5,eError, 5,eError, 6, 6,#18-1f
+ 7, 6, 8, 8, 5, 5, 5,eError,#20-27
+ 5, 5, 5,eError,eError,eError, 5, 5,#28-2f
+ 5, 5, 5,eError, 5,eError,eStart,eStart #30-37
+)
+
+UCS2LECharLenTable = (2, 2, 2, 2, 2, 2)
+
+UCS2LESMModel = {'classTable': UCS2LE_cls,
+ 'classFactor': 6,
+ 'stateTable': UCS2LE_st,
+ 'charLenTable': UCS2LECharLenTable,
+ 'name': 'UTF-16LE'}
+
+# UTF-8
+
+UTF8_cls = (
+ 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 1,1,1,1,1,1,1,1, # 30 - 37
+ 1,1,1,1,1,1,1,1, # 38 - 3f
+ 1,1,1,1,1,1,1,1, # 40 - 47
+ 1,1,1,1,1,1,1,1, # 48 - 4f
+ 1,1,1,1,1,1,1,1, # 50 - 57
+ 1,1,1,1,1,1,1,1, # 58 - 5f
+ 1,1,1,1,1,1,1,1, # 60 - 67
+ 1,1,1,1,1,1,1,1, # 68 - 6f
+ 1,1,1,1,1,1,1,1, # 70 - 77
+ 1,1,1,1,1,1,1,1, # 78 - 7f
+ 2,2,2,2,3,3,3,3, # 80 - 87
+ 4,4,4,4,4,4,4,4, # 88 - 8f
+ 4,4,4,4,4,4,4,4, # 90 - 97
+ 4,4,4,4,4,4,4,4, # 98 - 9f
+ 5,5,5,5,5,5,5,5, # a0 - a7
+ 5,5,5,5,5,5,5,5, # a8 - af
+ 5,5,5,5,5,5,5,5, # b0 - b7
+ 5,5,5,5,5,5,5,5, # b8 - bf
+ 0,0,6,6,6,6,6,6, # c0 - c7
+ 6,6,6,6,6,6,6,6, # c8 - cf
+ 6,6,6,6,6,6,6,6, # d0 - d7
+ 6,6,6,6,6,6,6,6, # d8 - df
+ 7,8,8,8,8,8,8,8, # e0 - e7
+ 8,8,8,8,8,9,8,8, # e8 - ef
+ 10,11,11,11,11,11,11,11, # f0 - f7
+ 12,13,13,13,14,15,0,0 # f8 - ff
+)
+
+UTF8_st = (
+ eError,eStart,eError,eError,eError,eError, 12, 10,#00-07
+ 9, 11, 8, 7, 6, 5, 4, 3,#08-0f
+ eError,eError,eError,eError,eError,eError,eError,eError,#10-17
+ eError,eError,eError,eError,eError,eError,eError,eError,#18-1f
+ eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#20-27
+ eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#28-2f
+ eError,eError, 5, 5, 5, 5,eError,eError,#30-37
+ eError,eError,eError,eError,eError,eError,eError,eError,#38-3f
+ eError,eError,eError, 5, 5, 5,eError,eError,#40-47
+ eError,eError,eError,eError,eError,eError,eError,eError,#48-4f
+ eError,eError, 7, 7, 7, 7,eError,eError,#50-57
+ eError,eError,eError,eError,eError,eError,eError,eError,#58-5f
+ eError,eError,eError,eError, 7, 7,eError,eError,#60-67
+ eError,eError,eError,eError,eError,eError,eError,eError,#68-6f
+ eError,eError, 9, 9, 9, 9,eError,eError,#70-77
+ eError,eError,eError,eError,eError,eError,eError,eError,#78-7f
+ eError,eError,eError,eError,eError, 9,eError,eError,#80-87
+ eError,eError,eError,eError,eError,eError,eError,eError,#88-8f
+ eError,eError, 12, 12, 12, 12,eError,eError,#90-97
+ eError,eError,eError,eError,eError,eError,eError,eError,#98-9f
+ eError,eError,eError,eError,eError, 12,eError,eError,#a0-a7
+ eError,eError,eError,eError,eError,eError,eError,eError,#a8-af
+ eError,eError, 12, 12, 12,eError,eError,eError,#b0-b7
+ eError,eError,eError,eError,eError,eError,eError,eError,#b8-bf
+ eError,eError,eStart,eStart,eStart,eStart,eError,eError,#c0-c7
+ eError,eError,eError,eError,eError,eError,eError,eError #c8-cf
+)
+
+UTF8CharLenTable = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6)
+
+UTF8SMModel = {'classTable': UTF8_cls,
+ 'classFactor': 16,
+ 'stateTable': UTF8_st,
+ 'charLenTable': UTF8CharLenTable,
+ 'name': 'UTF-8'}
diff --git a/third_party/python/requests/requests/packages/chardet/sbcharsetprober.py b/third_party/python/requests/requests/packages/chardet/sbcharsetprober.py
new file mode 100644
index 0000000000..37291bd27a
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/sbcharsetprober.py
@@ -0,0 +1,120 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+import sys
+from . import constants
+from .charsetprober import CharSetProber
+from .compat import wrap_ord
+
+SAMPLE_SIZE = 64
+SB_ENOUGH_REL_THRESHOLD = 1024
+POSITIVE_SHORTCUT_THRESHOLD = 0.95
+NEGATIVE_SHORTCUT_THRESHOLD = 0.05
+SYMBOL_CAT_ORDER = 250
+NUMBER_OF_SEQ_CAT = 4
+POSITIVE_CAT = NUMBER_OF_SEQ_CAT - 1
+#NEGATIVE_CAT = 0
+
+
+class SingleByteCharSetProber(CharSetProber):
+ def __init__(self, model, reversed=False, nameProber=None):
+ CharSetProber.__init__(self)
+ self._mModel = model
+ # TRUE if we need to reverse every pair in the model lookup
+ self._mReversed = reversed
+ # Optional auxiliary prober for name decision
+ self._mNameProber = nameProber
+ self.reset()
+
+ def reset(self):
+ CharSetProber.reset(self)
+ # char order of last character
+ self._mLastOrder = 255
+ self._mSeqCounters = [0] * NUMBER_OF_SEQ_CAT
+ self._mTotalSeqs = 0
+ self._mTotalChar = 0
+ # characters that fall in our sampling range
+ self._mFreqChar = 0
+
+ def get_charset_name(self):
+ if self._mNameProber:
+ return self._mNameProber.get_charset_name()
+ else:
+ return self._mModel['charsetName']
+
+ def feed(self, aBuf):
+ if not self._mModel['keepEnglishLetter']:
+ aBuf = self.filter_without_english_letters(aBuf)
+ aLen = len(aBuf)
+ if not aLen:
+ return self.get_state()
+ for c in aBuf:
+ order = self._mModel['charToOrderMap'][wrap_ord(c)]
+ if order < SYMBOL_CAT_ORDER:
+ self._mTotalChar += 1
+ if order < SAMPLE_SIZE:
+ self._mFreqChar += 1
+ if self._mLastOrder < SAMPLE_SIZE:
+ self._mTotalSeqs += 1
+ if not self._mReversed:
+ i = (self._mLastOrder * SAMPLE_SIZE) + order
+ model = self._mModel['precedenceMatrix'][i]
+ else: # reverse the order of the letters in the lookup
+ i = (order * SAMPLE_SIZE) + self._mLastOrder
+ model = self._mModel['precedenceMatrix'][i]
+ self._mSeqCounters[model] += 1
+ self._mLastOrder = order
+
+ if self.get_state() == constants.eDetecting:
+ if self._mTotalSeqs > SB_ENOUGH_REL_THRESHOLD:
+ cf = self.get_confidence()
+ if cf > POSITIVE_SHORTCUT_THRESHOLD:
+ if constants._debug:
+ sys.stderr.write('%s confidence = %s, we have a'
+ 'winner\n' %
+ (self._mModel['charsetName'], cf))
+ self._mState = constants.eFoundIt
+ elif cf < NEGATIVE_SHORTCUT_THRESHOLD:
+ if constants._debug:
+ sys.stderr.write('%s confidence = %s, below negative'
+ 'shortcut threshhold %s\n' %
+ (self._mModel['charsetName'], cf,
+ NEGATIVE_SHORTCUT_THRESHOLD))
+ self._mState = constants.eNotMe
+
+ return self.get_state()
+
+ def get_confidence(self):
+ r = 0.01
+ if self._mTotalSeqs > 0:
+ r = ((1.0 * self._mSeqCounters[POSITIVE_CAT]) / self._mTotalSeqs
+ / self._mModel['mTypicalPositiveRatio'])
+ r = r * self._mFreqChar / self._mTotalChar
+ if r >= 1.0:
+ r = 0.99
+ return r
diff --git a/third_party/python/requests/requests/packages/chardet/sbcsgroupprober.py b/third_party/python/requests/requests/packages/chardet/sbcsgroupprober.py
new file mode 100644
index 0000000000..1b6196cd16
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/sbcsgroupprober.py
@@ -0,0 +1,69 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetgroupprober import CharSetGroupProber
+from .sbcharsetprober import SingleByteCharSetProber
+from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel,
+ Latin5CyrillicModel, MacCyrillicModel,
+ Ibm866Model, Ibm855Model)
+from .langgreekmodel import Latin7GreekModel, Win1253GreekModel
+from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel
+from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel
+from .langthaimodel import TIS620ThaiModel
+from .langhebrewmodel import Win1255HebrewModel
+from .hebrewprober import HebrewProber
+
+
+class SBCSGroupProber(CharSetGroupProber):
+ def __init__(self):
+ CharSetGroupProber.__init__(self)
+ self._mProbers = [
+ SingleByteCharSetProber(Win1251CyrillicModel),
+ SingleByteCharSetProber(Koi8rModel),
+ SingleByteCharSetProber(Latin5CyrillicModel),
+ SingleByteCharSetProber(MacCyrillicModel),
+ SingleByteCharSetProber(Ibm866Model),
+ SingleByteCharSetProber(Ibm855Model),
+ SingleByteCharSetProber(Latin7GreekModel),
+ SingleByteCharSetProber(Win1253GreekModel),
+ SingleByteCharSetProber(Latin5BulgarianModel),
+ SingleByteCharSetProber(Win1251BulgarianModel),
+ SingleByteCharSetProber(Latin2HungarianModel),
+ SingleByteCharSetProber(Win1250HungarianModel),
+ SingleByteCharSetProber(TIS620ThaiModel),
+ ]
+ hebrewProber = HebrewProber()
+ logicalHebrewProber = SingleByteCharSetProber(Win1255HebrewModel,
+ False, hebrewProber)
+ visualHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, True,
+ hebrewProber)
+ hebrewProber.set_model_probers(logicalHebrewProber, visualHebrewProber)
+ self._mProbers.extend([hebrewProber, logicalHebrewProber,
+ visualHebrewProber])
+
+ self.reset()
diff --git a/third_party/python/requests/requests/packages/chardet/sjisprober.py b/third_party/python/requests/requests/packages/chardet/sjisprober.py
new file mode 100644
index 0000000000..cd0e9e7078
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/sjisprober.py
@@ -0,0 +1,91 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+import sys
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import SJISDistributionAnalysis
+from .jpcntx import SJISContextAnalysis
+from .mbcssm import SJISSMModel
+from . import constants
+
+
+class SJISProber(MultiByteCharSetProber):
+ def __init__(self):
+ MultiByteCharSetProber.__init__(self)
+ self._mCodingSM = CodingStateMachine(SJISSMModel)
+ self._mDistributionAnalyzer = SJISDistributionAnalysis()
+ self._mContextAnalyzer = SJISContextAnalysis()
+ self.reset()
+
+ def reset(self):
+ MultiByteCharSetProber.reset(self)
+ self._mContextAnalyzer.reset()
+
+ def get_charset_name(self):
+ return self._mContextAnalyzer.get_charset_name()
+
+ def feed(self, aBuf):
+ aLen = len(aBuf)
+ for i in range(0, aLen):
+ codingState = self._mCodingSM.next_state(aBuf[i])
+ if codingState == constants.eError:
+ if constants._debug:
+ sys.stderr.write(self.get_charset_name()
+ + ' prober hit error at byte ' + str(i)
+ + '\n')
+ self._mState = constants.eNotMe
+ break
+ elif codingState == constants.eItsMe:
+ self._mState = constants.eFoundIt
+ break
+ elif codingState == constants.eStart:
+ charLen = self._mCodingSM.get_current_charlen()
+ if i == 0:
+ self._mLastChar[1] = aBuf[0]
+ self._mContextAnalyzer.feed(self._mLastChar[2 - charLen:],
+ charLen)
+ self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
+ else:
+ self._mContextAnalyzer.feed(aBuf[i + 1 - charLen:i + 3
+ - charLen], charLen)
+ self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
+ charLen)
+
+ self._mLastChar[0] = aBuf[aLen - 1]
+
+ if self.get_state() == constants.eDetecting:
+ if (self._mContextAnalyzer.got_enough_data() and
+ (self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
+ self._mState = constants.eFoundIt
+
+ return self.get_state()
+
+ def get_confidence(self):
+ contxtCf = self._mContextAnalyzer.get_confidence()
+ distribCf = self._mDistributionAnalyzer.get_confidence()
+ return max(contxtCf, distribCf)
diff --git a/third_party/python/requests/requests/packages/chardet/universaldetector.py b/third_party/python/requests/requests/packages/chardet/universaldetector.py
new file mode 100644
index 0000000000..476522b999
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/universaldetector.py
@@ -0,0 +1,170 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from . import constants
+import sys
+import codecs
+from .latin1prober import Latin1Prober # windows-1252
+from .mbcsgroupprober import MBCSGroupProber # multi-byte character sets
+from .sbcsgroupprober import SBCSGroupProber # single-byte character sets
+from .escprober import EscCharSetProber # ISO-2122, etc.
+import re
+
+MINIMUM_THRESHOLD = 0.20
+ePureAscii = 0
+eEscAscii = 1
+eHighbyte = 2
+
+
+class UniversalDetector:
+ def __init__(self):
+ self._highBitDetector = re.compile(b'[\x80-\xFF]')
+ self._escDetector = re.compile(b'(\033|~{)')
+ self._mEscCharSetProber = None
+ self._mCharSetProbers = []
+ self.reset()
+
+ def reset(self):
+ self.result = {'encoding': None, 'confidence': 0.0}
+ self.done = False
+ self._mStart = True
+ self._mGotData = False
+ self._mInputState = ePureAscii
+ self._mLastChar = b''
+ if self._mEscCharSetProber:
+ self._mEscCharSetProber.reset()
+ for prober in self._mCharSetProbers:
+ prober.reset()
+
+ def feed(self, aBuf):
+ if self.done:
+ return
+
+ aLen = len(aBuf)
+ if not aLen:
+ return
+
+ if not self._mGotData:
+ # If the data starts with BOM, we know it is UTF
+ if aBuf[:3] == codecs.BOM_UTF8:
+ # EF BB BF UTF-8 with BOM
+ self.result = {'encoding': "UTF-8-SIG", 'confidence': 1.0}
+ elif aBuf[:4] == codecs.BOM_UTF32_LE:
+ # FF FE 00 00 UTF-32, little-endian BOM
+ self.result = {'encoding': "UTF-32LE", 'confidence': 1.0}
+ elif aBuf[:4] == codecs.BOM_UTF32_BE:
+ # 00 00 FE FF UTF-32, big-endian BOM
+ self.result = {'encoding': "UTF-32BE", 'confidence': 1.0}
+ elif aBuf[:4] == b'\xFE\xFF\x00\x00':
+ # FE FF 00 00 UCS-4, unusual octet order BOM (3412)
+ self.result = {
+ 'encoding': "X-ISO-10646-UCS-4-3412",
+ 'confidence': 1.0
+ }
+ elif aBuf[:4] == b'\x00\x00\xFF\xFE':
+ # 00 00 FF FE UCS-4, unusual octet order BOM (2143)
+ self.result = {
+ 'encoding': "X-ISO-10646-UCS-4-2143",
+ 'confidence': 1.0
+ }
+ elif aBuf[:2] == codecs.BOM_LE:
+ # FF FE UTF-16, little endian BOM
+ self.result = {'encoding': "UTF-16LE", 'confidence': 1.0}
+ elif aBuf[:2] == codecs.BOM_BE:
+ # FE FF UTF-16, big endian BOM
+ self.result = {'encoding': "UTF-16BE", 'confidence': 1.0}
+
+ self._mGotData = True
+ if self.result['encoding'] and (self.result['confidence'] > 0.0):
+ self.done = True
+ return
+
+ if self._mInputState == ePureAscii:
+ if self._highBitDetector.search(aBuf):
+ self._mInputState = eHighbyte
+ elif ((self._mInputState == ePureAscii) and
+ self._escDetector.search(self._mLastChar + aBuf)):
+ self._mInputState = eEscAscii
+
+ self._mLastChar = aBuf[-1:]
+
+ if self._mInputState == eEscAscii:
+ if not self._mEscCharSetProber:
+ self._mEscCharSetProber = EscCharSetProber()
+ if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt:
+ self.result = {'encoding': self._mEscCharSetProber.get_charset_name(),
+ 'confidence': self._mEscCharSetProber.get_confidence()}
+ self.done = True
+ elif self._mInputState == eHighbyte:
+ if not self._mCharSetProbers:
+ self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(),
+ Latin1Prober()]
+ for prober in self._mCharSetProbers:
+ if prober.feed(aBuf) == constants.eFoundIt:
+ self.result = {'encoding': prober.get_charset_name(),
+ 'confidence': prober.get_confidence()}
+ self.done = True
+ break
+
+ def close(self):
+ if self.done:
+ return
+ if not self._mGotData:
+ if constants._debug:
+ sys.stderr.write('no data received!\n')
+ return
+ self.done = True
+
+ if self._mInputState == ePureAscii:
+ self.result = {'encoding': 'ascii', 'confidence': 1.0}
+ return self.result
+
+ if self._mInputState == eHighbyte:
+ proberConfidence = None
+ maxProberConfidence = 0.0
+ maxProber = None
+ for prober in self._mCharSetProbers:
+ if not prober:
+ continue
+ proberConfidence = prober.get_confidence()
+ if proberConfidence > maxProberConfidence:
+ maxProberConfidence = proberConfidence
+ maxProber = prober
+ if maxProber and (maxProberConfidence > MINIMUM_THRESHOLD):
+ self.result = {'encoding': maxProber.get_charset_name(),
+ 'confidence': maxProber.get_confidence()}
+ return self.result
+
+ if constants._debug:
+ sys.stderr.write('no probers hit minimum threshhold\n')
+ for prober in self._mCharSetProbers[0].mProbers:
+ if not prober:
+ continue
+ sys.stderr.write('%s confidence = %s\n' %
+ (prober.get_charset_name(),
+ prober.get_confidence()))
diff --git a/third_party/python/requests/requests/packages/chardet/utf8prober.py b/third_party/python/requests/requests/packages/chardet/utf8prober.py
new file mode 100644
index 0000000000..1c0bb5d8fd
--- /dev/null
+++ b/third_party/python/requests/requests/packages/chardet/utf8prober.py
@@ -0,0 +1,76 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from . import constants
+from .charsetprober import CharSetProber
+from .codingstatemachine import CodingStateMachine
+from .mbcssm import UTF8SMModel
+
+ONE_CHAR_PROB = 0.5
+
+
+class UTF8Prober(CharSetProber):
+ def __init__(self):
+ CharSetProber.__init__(self)
+ self._mCodingSM = CodingStateMachine(UTF8SMModel)
+ self.reset()
+
+ def reset(self):
+ CharSetProber.reset(self)
+ self._mCodingSM.reset()
+ self._mNumOfMBChar = 0
+
+ def get_charset_name(self):
+ return "utf-8"
+
+ def feed(self, aBuf):
+ for c in aBuf:
+ codingState = self._mCodingSM.next_state(c)
+ if codingState == constants.eError:
+ self._mState = constants.eNotMe
+ break
+ elif codingState == constants.eItsMe:
+ self._mState = constants.eFoundIt
+ break
+ elif codingState == constants.eStart:
+ if self._mCodingSM.get_current_charlen() >= 2:
+ self._mNumOfMBChar += 1
+
+ if self.get_state() == constants.eDetecting:
+ if self.get_confidence() > constants.SHORTCUT_THRESHOLD:
+ self._mState = constants.eFoundIt
+
+ return self.get_state()
+
+ def get_confidence(self):
+ unlike = 0.99
+ if self._mNumOfMBChar < 6:
+ for i in range(0, self._mNumOfMBChar):
+ unlike = unlike * ONE_CHAR_PROB
+ return 1.0 - unlike
+ else:
+ return unlike
diff --git a/third_party/python/requests/requests/packages/urllib3/__init__.py b/third_party/python/requests/requests/packages/urllib3/__init__.py
new file mode 100644
index 0000000000..e43991a974
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/__init__.py
@@ -0,0 +1,93 @@
+"""
+urllib3 - Thread-safe connection pooling and re-using.
+"""
+
+from __future__ import absolute_import
+import warnings
+
+from .connectionpool import (
+ HTTPConnectionPool,
+ HTTPSConnectionPool,
+ connection_from_url
+)
+
+from . import exceptions
+from .filepost import encode_multipart_formdata
+from .poolmanager import PoolManager, ProxyManager, proxy_from_url
+from .response import HTTPResponse
+from .util.request import make_headers
+from .util.url import get_host
+from .util.timeout import Timeout
+from .util.retry import Retry
+
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
+try: # Python 2.7+
+ from logging import NullHandler
+except ImportError:
+ class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
+__license__ = 'MIT'
+__version__ = '1.13.1'
+
+__all__ = (
+ 'HTTPConnectionPool',
+ 'HTTPSConnectionPool',
+ 'PoolManager',
+ 'ProxyManager',
+ 'HTTPResponse',
+ 'Retry',
+ 'Timeout',
+ 'add_stderr_logger',
+ 'connection_from_url',
+ 'disable_warnings',
+ 'encode_multipart_formdata',
+ 'get_host',
+ 'make_headers',
+ 'proxy_from_url',
+)
+
+logging.getLogger(__name__).addHandler(NullHandler())
+
+
+def add_stderr_logger(level=logging.DEBUG):
+ """
+ Helper for quickly adding a StreamHandler to the logger. Useful for
+ debugging.
+
+ Returns the handler after adding it.
+ """
+ # This method needs to be in this __init__.py to get the __name__ correct
+ # even if urllib3 is vendored within another package.
+ logger = logging.getLogger(__name__)
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
+ logger.addHandler(handler)
+ logger.setLevel(level)
+ logger.debug('Added a stderr logging handler to logger: %s' % __name__)
+ return handler
+
+# ... Clean up.
+del NullHandler
+
+
+# SecurityWarning's always go off by default.
+warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
+# SubjectAltNameWarning's should go off once per host
+warnings.simplefilter('default', exceptions.SubjectAltNameWarning)
+# InsecurePlatformWarning's don't vary between requests, so we keep it default.
+warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
+ append=True)
+# SNIMissingWarnings should go off only once.
+warnings.simplefilter('default', exceptions.SNIMissingWarning)
+
+
+def disable_warnings(category=exceptions.HTTPWarning):
+ """
+ Helper for quickly disabling all urllib3 warnings.
+ """
+ warnings.simplefilter('ignore', category)
diff --git a/third_party/python/requests/requests/packages/urllib3/_collections.py b/third_party/python/requests/requests/packages/urllib3/_collections.py
new file mode 100644
index 0000000000..67f3ce994d
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/_collections.py
@@ -0,0 +1,324 @@
+from __future__ import absolute_import
+from collections import Mapping, MutableMapping
+try:
+ from threading import RLock
+except ImportError: # Platform-specific: No threads available
+ class RLock:
+ def __enter__(self):
+ pass
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ pass
+
+
+try: # Python 2.7+
+ from collections import OrderedDict
+except ImportError:
+ from .packages.ordered_dict import OrderedDict
+from .packages.six import iterkeys, itervalues, PY3
+
+
+__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']
+
+
+_Null = object()
+
+
+class RecentlyUsedContainer(MutableMapping):
+ """
+ Provides a thread-safe dict-like container which maintains up to
+ ``maxsize`` keys while throwing away the least-recently-used keys beyond
+ ``maxsize``.
+
+ :param maxsize:
+ Maximum number of recent elements to retain.
+
+ :param dispose_func:
+ Every time an item is evicted from the container,
+ ``dispose_func(value)`` is called. Callback which will get called
+ """
+
+ ContainerCls = OrderedDict
+
+ def __init__(self, maxsize=10, dispose_func=None):
+ self._maxsize = maxsize
+ self.dispose_func = dispose_func
+
+ self._container = self.ContainerCls()
+ self.lock = RLock()
+
+ def __getitem__(self, key):
+ # Re-insert the item, moving it to the end of the eviction line.
+ with self.lock:
+ item = self._container.pop(key)
+ self._container[key] = item
+ return item
+
+ def __setitem__(self, key, value):
+ evicted_value = _Null
+ with self.lock:
+ # Possibly evict the existing value of 'key'
+ evicted_value = self._container.get(key, _Null)
+ self._container[key] = value
+
+ # If we didn't evict an existing value, we might have to evict the
+ # least recently used item from the beginning of the container.
+ if len(self._container) > self._maxsize:
+ _key, evicted_value = self._container.popitem(last=False)
+
+ if self.dispose_func and evicted_value is not _Null:
+ self.dispose_func(evicted_value)
+
+ def __delitem__(self, key):
+ with self.lock:
+ value = self._container.pop(key)
+
+ if self.dispose_func:
+ self.dispose_func(value)
+
+ def __len__(self):
+ with self.lock:
+ return len(self._container)
+
+ def __iter__(self):
+ raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.')
+
+ def clear(self):
+ with self.lock:
+ # Copy pointers to all values, then wipe the mapping
+ values = list(itervalues(self._container))
+ self._container.clear()
+
+ if self.dispose_func:
+ for value in values:
+ self.dispose_func(value)
+
+ def keys(self):
+ with self.lock:
+ return list(iterkeys(self._container))
+
+
+class HTTPHeaderDict(MutableMapping):
+ """
+ :param headers:
+ An iterable of field-value pairs. Must not contain multiple field names
+ when compared case-insensitively.
+
+ :param kwargs:
+ Additional field-value pairs to pass in to ``dict.update``.
+
+ A ``dict`` like container for storing HTTP Headers.
+
+ Field names are stored and compared case-insensitively in compliance with
+ RFC 7230. Iteration provides the first case-sensitive key seen for each
+ case-insensitive pair.
+
+ Using ``__setitem__`` syntax overwrites fields that compare equal
+ case-insensitively in order to maintain ``dict``'s api. For fields that
+ compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
+ in a loop.
+
+ If multiple fields that are equal case-insensitively are passed to the
+ constructor or ``.update``, the behavior is undefined and some will be
+ lost.
+
+ >>> headers = HTTPHeaderDict()
+ >>> headers.add('Set-Cookie', 'foo=bar')
+ >>> headers.add('set-cookie', 'baz=quxx')
+ >>> headers['content-length'] = '7'
+ >>> headers['SET-cookie']
+ 'foo=bar, baz=quxx'
+ >>> headers['Content-Length']
+ '7'
+ """
+
+ def __init__(self, headers=None, **kwargs):
+ super(HTTPHeaderDict, self).__init__()
+ self._container = {}
+ if headers is not None:
+ if isinstance(headers, HTTPHeaderDict):
+ self._copy_from(headers)
+ else:
+ self.extend(headers)
+ if kwargs:
+ self.extend(kwargs)
+
+ def __setitem__(self, key, val):
+ self._container[key.lower()] = (key, val)
+ return self._container[key.lower()]
+
+ def __getitem__(self, key):
+ val = self._container[key.lower()]
+ return ', '.join(val[1:])
+
+ def __delitem__(self, key):
+ del self._container[key.lower()]
+
+ def __contains__(self, key):
+ return key.lower() in self._container
+
+ def __eq__(self, other):
+ if not isinstance(other, Mapping) and not hasattr(other, 'keys'):
+ return False
+ if not isinstance(other, type(self)):
+ other = type(self)(other)
+ return (dict((k.lower(), v) for k, v in self.itermerged()) ==
+ dict((k.lower(), v) for k, v in other.itermerged()))
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ if not PY3: # Python 2
+ iterkeys = MutableMapping.iterkeys
+ itervalues = MutableMapping.itervalues
+
+ __marker = object()
+
+ def __len__(self):
+ return len(self._container)
+
+ def __iter__(self):
+ # Only provide the originally cased names
+ for vals in self._container.values():
+ yield vals[0]
+
+ def pop(self, key, default=__marker):
+ '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+ If key is not found, d is returned if given, otherwise KeyError is raised.
+ '''
+ # Using the MutableMapping function directly fails due to the private marker.
+ # Using ordinary dict.pop would expose the internal structures.
+ # So let's reinvent the wheel.
+ try:
+ value = self[key]
+ except KeyError:
+ if default is self.__marker:
+ raise
+ return default
+ else:
+ del self[key]
+ return value
+
+ def discard(self, key):
+ try:
+ del self[key]
+ except KeyError:
+ pass
+
+ def add(self, key, val):
+ """Adds a (name, value) pair, doesn't overwrite the value if it already
+ exists.
+
+ >>> headers = HTTPHeaderDict(foo='bar')
+ >>> headers.add('Foo', 'baz')
+ >>> headers['foo']
+ 'bar, baz'
+ """
+ key_lower = key.lower()
+ new_vals = key, val
+ # Keep the common case aka no item present as fast as possible
+ vals = self._container.setdefault(key_lower, new_vals)
+ if new_vals is not vals:
+ # new_vals was not inserted, as there was a previous one
+ if isinstance(vals, list):
+ # If already several items got inserted, we have a list
+ vals.append(val)
+ else:
+ # vals should be a tuple then, i.e. only one item so far
+ # Need to convert the tuple to list for further extension
+ self._container[key_lower] = [vals[0], vals[1], val]
+
+ def extend(self, *args, **kwargs):
+ """Generic import function for any type of header-like object.
+ Adapted version of MutableMapping.update in order to insert items
+ with self.add instead of self.__setitem__
+ """
+ if len(args) > 1:
+ raise TypeError("extend() takes at most 1 positional "
+ "arguments ({0} given)".format(len(args)))
+ other = args[0] if len(args) >= 1 else ()
+
+ if isinstance(other, HTTPHeaderDict):
+ for key, val in other.iteritems():
+ self.add(key, val)
+ elif isinstance(other, Mapping):
+ for key in other:
+ self.add(key, other[key])
+ elif hasattr(other, "keys"):
+ for key in other.keys():
+ self.add(key, other[key])
+ else:
+ for key, value in other:
+ self.add(key, value)
+
+ for key, value in kwargs.items():
+ self.add(key, value)
+
+ def getlist(self, key):
+ """Returns a list of all the values for the named field. Returns an
+ empty list if the key doesn't exist."""
+ try:
+ vals = self._container[key.lower()]
+ except KeyError:
+ return []
+ else:
+ if isinstance(vals, tuple):
+ return [vals[1]]
+ else:
+ return vals[1:]
+
+ # Backwards compatibility for httplib
+ getheaders = getlist
+ getallmatchingheaders = getlist
+ iget = getlist
+
+ def __repr__(self):
+ return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
+
+ def _copy_from(self, other):
+ for key in other:
+ val = other.getlist(key)
+ if isinstance(val, list):
+ # Don't need to convert tuples
+ val = list(val)
+ self._container[key.lower()] = [key] + val
+
+ def copy(self):
+ clone = type(self)()
+ clone._copy_from(self)
+ return clone
+
+ def iteritems(self):
+ """Iterate over all header lines, including duplicate ones."""
+ for key in self:
+ vals = self._container[key.lower()]
+ for val in vals[1:]:
+ yield vals[0], val
+
+ def itermerged(self):
+ """Iterate over all headers, merging duplicate ones together."""
+ for key in self:
+ val = self._container[key.lower()]
+ yield val[0], ', '.join(val[1:])
+
+ def items(self):
+ return list(self.iteritems())
+
+ @classmethod
+ def from_httplib(cls, message): # Python 2
+ """Read headers from a Python 2 httplib message object."""
+ # python2.7 does not expose a proper API for exporting multiheaders
+ # efficiently. This function re-reads raw lines from the message
+ # object and extracts the multiheaders properly.
+ headers = []
+
+ for line in message.headers:
+ if line.startswith((' ', '\t')):
+ key, value = headers[-1]
+ headers[-1] = (key, value + '\r\n' + line.rstrip())
+ continue
+
+ key, value = line.split(':', 1)
+ headers.append((key, value.strip()))
+
+ return cls(headers)
diff --git a/third_party/python/requests/requests/packages/urllib3/connection.py b/third_party/python/requests/requests/packages/urllib3/connection.py
new file mode 100644
index 0000000000..1e4cd41758
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/connection.py
@@ -0,0 +1,288 @@
+from __future__ import absolute_import
+import datetime
+import os
+import sys
+import socket
+from socket import error as SocketError, timeout as SocketTimeout
+import warnings
+from .packages import six
+
+try: # Python 3
+ from http.client import HTTPConnection as _HTTPConnection
+ from http.client import HTTPException # noqa: unused in this module
+except ImportError:
+ from httplib import HTTPConnection as _HTTPConnection
+ from httplib import HTTPException # noqa: unused in this module
+
+try: # Compiled with SSL?
+ import ssl
+ BaseSSLError = ssl.SSLError
+except (ImportError, AttributeError): # Platform-specific: No SSL.
+ ssl = None
+
+ class BaseSSLError(BaseException):
+ pass
+
+
+try: # Python 3:
+ # Not a no-op, we're adding this to the namespace so it can be imported.
+ ConnectionError = ConnectionError
+except NameError: # Python 2:
+ class ConnectionError(Exception):
+ pass
+
+
+from .exceptions import (
+ NewConnectionError,
+ ConnectTimeoutError,
+ SubjectAltNameWarning,
+ SystemTimeWarning,
+)
+from .packages.ssl_match_hostname import match_hostname
+
+from .util.ssl_ import (
+ resolve_cert_reqs,
+ resolve_ssl_version,
+ ssl_wrap_socket,
+ assert_fingerprint,
+)
+
+
+from .util import connection
+
+port_by_scheme = {
+ 'http': 80,
+ 'https': 443,
+}
+
+RECENT_DATE = datetime.date(2014, 1, 1)
+
+
+class DummyConnection(object):
+ """Used to detect a failed ConnectionCls import."""
+ pass
+
+
+class HTTPConnection(_HTTPConnection, object):
+ """
+ Based on httplib.HTTPConnection but provides an extra constructor
+ backwards-compatibility layer between older and newer Pythons.
+
+ Additional keyword parameters are used to configure attributes of the connection.
+ Accepted parameters include:
+
+ - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
+ - ``source_address``: Set the source address for the current connection.
+
+ .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x
+
+ - ``socket_options``: Set specific options on the underlying socket. If not specified, then
+ defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
+ Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
+
+ For example, if you wish to enable TCP Keep Alive in addition to the defaults,
+ you might pass::
+
+ HTTPConnection.default_socket_options + [
+ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
+ ]
+
+ Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
+ """
+
+ default_port = port_by_scheme['http']
+
+ #: Disable Nagle's algorithm by default.
+ #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
+ default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
+
+ #: Whether this connection verifies the host's certificate.
+ is_verified = False
+
+ def __init__(self, *args, **kw):
+ if six.PY3: # Python 3
+ kw.pop('strict', None)
+
+ # Pre-set source_address in case we have an older Python like 2.6.
+ self.source_address = kw.get('source_address')
+
+ if sys.version_info < (2, 7): # Python 2.6
+ # _HTTPConnection on Python 2.6 will balk at this keyword arg, but
+ # not newer versions. We can still use it when creating a
+ # connection though, so we pop it *after* we have saved it as
+ # self.source_address.
+ kw.pop('source_address', None)
+
+ #: The socket options provided by the user. If no options are
+ #: provided, we use the default options.
+ self.socket_options = kw.pop('socket_options', self.default_socket_options)
+
+ # Superclass also sets self.source_address in Python 2.7+.
+ _HTTPConnection.__init__(self, *args, **kw)
+
+ def _new_conn(self):
+ """ Establish a socket connection and set nodelay settings on it.
+
+ :return: New socket connection.
+ """
+ extra_kw = {}
+ if self.source_address:
+ extra_kw['source_address'] = self.source_address
+
+ if self.socket_options:
+ extra_kw['socket_options'] = self.socket_options
+
+ try:
+ conn = connection.create_connection(
+ (self.host, self.port), self.timeout, **extra_kw)
+
+ except SocketTimeout as e:
+ raise ConnectTimeoutError(
+ self, "Connection to %s timed out. (connect timeout=%s)" %
+ (self.host, self.timeout))
+
+ except SocketError as e:
+ raise NewConnectionError(
+ self, "Failed to establish a new connection: %s" % e)
+
+ return conn
+
+ def _prepare_conn(self, conn):
+ self.sock = conn
+ # the _tunnel_host attribute was added in python 2.6.3 (via
+ # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
+ # not have them.
+ if getattr(self, '_tunnel_host', None):
+ # TODO: Fix tunnel so it doesn't depend on self.sock state.
+ self._tunnel()
+ # Mark this connection as not reusable
+ self.auto_open = 0
+
+ def connect(self):
+ conn = self._new_conn()
+ self._prepare_conn(conn)
+
+
+class HTTPSConnection(HTTPConnection):
+ default_port = port_by_scheme['https']
+
+ def __init__(self, host, port=None, key_file=None, cert_file=None,
+ strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw):
+
+ HTTPConnection.__init__(self, host, port, strict=strict,
+ timeout=timeout, **kw)
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+
+ # Required property for Google AppEngine 1.9.0 which otherwise causes
+ # HTTPS requests to go out as HTTP. (See Issue #356)
+ self._protocol = 'https'
+
+ def connect(self):
+ conn = self._new_conn()
+ self._prepare_conn(conn)
+ self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file)
+
+
+class VerifiedHTTPSConnection(HTTPSConnection):
+ """
+ Based on httplib.HTTPSConnection but wraps the socket with
+ SSL certification.
+ """
+ cert_reqs = None
+ ca_certs = None
+ ca_cert_dir = None
+ ssl_version = None
+ assert_fingerprint = None
+
+ def set_cert(self, key_file=None, cert_file=None,
+ cert_reqs=None, ca_certs=None,
+ assert_hostname=None, assert_fingerprint=None,
+ ca_cert_dir=None):
+
+ if (ca_certs or ca_cert_dir) and cert_reqs is None:
+ cert_reqs = 'CERT_REQUIRED'
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.cert_reqs = cert_reqs
+ self.assert_hostname = assert_hostname
+ self.assert_fingerprint = assert_fingerprint
+ self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
+ self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
+
+ def connect(self):
+ # Add certificate verification
+ conn = self._new_conn()
+
+ resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
+ resolved_ssl_version = resolve_ssl_version(self.ssl_version)
+
+ hostname = self.host
+ if getattr(self, '_tunnel_host', None):
+ # _tunnel_host was added in Python 2.6.3
+ # (See: http://hg.python.org/cpython/rev/0f57b30a152f)
+
+ self.sock = conn
+ # Calls self._set_hostport(), so self.host is
+ # self._tunnel_host below.
+ self._tunnel()
+ # Mark this connection as not reusable
+ self.auto_open = 0
+
+ # Override the host with the one we're requesting data from.
+ hostname = self._tunnel_host
+
+ is_time_off = datetime.date.today() < RECENT_DATE
+ if is_time_off:
+ warnings.warn((
+ 'System time is way off (before {0}). This will probably '
+ 'lead to SSL verification errors').format(RECENT_DATE),
+ SystemTimeWarning
+ )
+
+ # Wrap socket using verification with the root certs in
+ # trusted_root_certs
+ self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file,
+ cert_reqs=resolved_cert_reqs,
+ ca_certs=self.ca_certs,
+ ca_cert_dir=self.ca_cert_dir,
+ server_hostname=hostname,
+ ssl_version=resolved_ssl_version)
+
+ if self.assert_fingerprint:
+ assert_fingerprint(self.sock.getpeercert(binary_form=True),
+ self.assert_fingerprint)
+ elif resolved_cert_reqs != ssl.CERT_NONE \
+ and self.assert_hostname is not False:
+ cert = self.sock.getpeercert()
+ if not cert.get('subjectAltName', ()):
+ warnings.warn((
+ 'Certificate for {0} has no `subjectAltName`, falling back to check for a '
+ '`commonName` for now. This feature is being removed by major browsers and '
+ 'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 '
+ 'for details.)'.format(hostname)),
+ SubjectAltNameWarning
+ )
+
+ # In case the hostname is an IPv6 address, strip the square
+ # brackets from it before using it to validate. This is because
+ # a certificate with an IPv6 address in it won't have square
+ # brackets around that address. Sadly, match_hostname won't do this
+ # for us: it expects the plain host part without any extra work
+ # that might have been done to make it palatable to httplib.
+ asserted_hostname = self.assert_hostname or hostname
+ asserted_hostname = asserted_hostname.strip('[]')
+ match_hostname(cert, asserted_hostname)
+
+ self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED or
+ self.assert_fingerprint is not None)
+
+
+if ssl:
+ # Make a copy for testing.
+ UnverifiedHTTPSConnection = HTTPSConnection
+ HTTPSConnection = VerifiedHTTPSConnection
+else:
+ HTTPSConnection = DummyConnection
diff --git a/third_party/python/requests/requests/packages/urllib3/connectionpool.py b/third_party/python/requests/requests/packages/urllib3/connectionpool.py
new file mode 100644
index 0000000000..995b4167b5
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/connectionpool.py
@@ -0,0 +1,818 @@
+from __future__ import absolute_import
+import errno
+import logging
+import sys
+import warnings
+
+from socket import error as SocketError, timeout as SocketTimeout
+import socket
+
+try: # Python 3
+ from queue import LifoQueue, Empty, Full
+except ImportError:
+ from Queue import LifoQueue, Empty, Full
+ # Queue is imported for side effects on MS Windows
+ import Queue as _unused_module_Queue # noqa: unused
+
+
+from .exceptions import (
+ ClosedPoolError,
+ ProtocolError,
+ EmptyPoolError,
+ HeaderParsingError,
+ HostChangedError,
+ LocationValueError,
+ MaxRetryError,
+ ProxyError,
+ ReadTimeoutError,
+ SSLError,
+ TimeoutError,
+ InsecureRequestWarning,
+ NewConnectionError,
+)
+from .packages.ssl_match_hostname import CertificateError
+from .packages import six
+from .connection import (
+ port_by_scheme,
+ DummyConnection,
+ HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
+ HTTPException, BaseSSLError,
+)
+from .request import RequestMethods
+from .response import HTTPResponse
+
+from .util.connection import is_connection_dropped
+from .util.response import assert_header_parsing
+from .util.retry import Retry
+from .util.timeout import Timeout
+from .util.url import get_host, Url
+
+
+xrange = six.moves.xrange
+
+log = logging.getLogger(__name__)
+
+_Default = object()
+
+
+# Pool objects
+class ConnectionPool(object):
+ """
+ Base class for all connection pools, such as
+ :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
+ """
+
+ scheme = None
+ QueueCls = LifoQueue
+
+ def __init__(self, host, port=None):
+ if not host:
+ raise LocationValueError("No host specified.")
+
+ self.host = host
+ self.port = port
+
+ def __str__(self):
+ return '%s(host=%r, port=%r)' % (type(self).__name__,
+ self.host, self.port)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.close()
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def close():
+ """
+ Close all pooled connections and disable the pool.
+ """
+ pass
+
+
+# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
+_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])
+
+
+class HTTPConnectionPool(ConnectionPool, RequestMethods):
+ """
+ Thread-safe connection pool for one host.
+
+ :param host:
+ Host used for this HTTP Connection (e.g. "localhost"), passed into
+ :class:`httplib.HTTPConnection`.
+
+ :param port:
+ Port used for this HTTP Connection (None is equivalent to 80), passed
+ into :class:`httplib.HTTPConnection`.
+
+ :param strict:
+ Causes BadStatusLine to be raised if the status line can't be parsed
+ as a valid HTTP/1.0 or 1.1 status line, passed into
+ :class:`httplib.HTTPConnection`.
+
+ .. note::
+ Only works in Python 2. This parameter is ignored in Python 3.
+
+ :param timeout:
+ Socket timeout in seconds for each individual connection. This can
+ be a float or integer, which sets the timeout for the HTTP request,
+ or an instance of :class:`urllib3.util.Timeout` which gives you more
+ fine-grained control over request timeouts. After the constructor has
+ been parsed, this is always a `urllib3.util.Timeout` object.
+
+ :param maxsize:
+ Number of connections to save that can be reused. More than 1 is useful
+ in multithreaded situations. If ``block`` is set to False, more
+ connections will be created but they will not be saved once they've
+ been used.
+
+ :param block:
+ If set to True, no more than ``maxsize`` connections will be used at
+ a time. When no free connections are available, the call will block
+ until a connection has been released. This is a useful side effect for
+ particular multithreaded situations where one does not want to use more
+ than maxsize connections per host to prevent flooding.
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+
+ :param retries:
+ Retry configuration to use by default with requests in this pool.
+
+ :param _proxy:
+ Parsed proxy URL, should not be used directly, instead, see
+ :class:`urllib3.connectionpool.ProxyManager`"
+
+ :param _proxy_headers:
+ A dictionary with proxy headers, should not be used directly,
+ instead, see :class:`urllib3.connectionpool.ProxyManager`"
+
+ :param \**conn_kw:
+ Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
+ :class:`urllib3.connection.HTTPSConnection` instances.
+ """
+
+ scheme = 'http'
+ ConnectionCls = HTTPConnection
+
+ def __init__(self, host, port=None, strict=False,
+ timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False,
+ headers=None, retries=None,
+ _proxy=None, _proxy_headers=None,
+ **conn_kw):
+ ConnectionPool.__init__(self, host, port)
+ RequestMethods.__init__(self, headers)
+
+ self.strict = strict
+
+ if not isinstance(timeout, Timeout):
+ timeout = Timeout.from_float(timeout)
+
+ if retries is None:
+ retries = Retry.DEFAULT
+
+ self.timeout = timeout
+ self.retries = retries
+
+ self.pool = self.QueueCls(maxsize)
+ self.block = block
+
+ self.proxy = _proxy
+ self.proxy_headers = _proxy_headers or {}
+
+ # Fill the queue up so that doing get() on it will block properly
+ for _ in xrange(maxsize):
+ self.pool.put(None)
+
+ # These are mostly for testing and debugging purposes.
+ self.num_connections = 0
+ self.num_requests = 0
+ self.conn_kw = conn_kw
+
+ if self.proxy:
+ # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
+ # We cannot know if the user has added default socket options, so we cannot replace the
+ # list.
+ self.conn_kw.setdefault('socket_options', [])
+
+ def _new_conn(self):
+ """
+ Return a fresh :class:`HTTPConnection`.
+ """
+ self.num_connections += 1
+ log.info("Starting new HTTP connection (%d): %s" %
+ (self.num_connections, self.host))
+
+ conn = self.ConnectionCls(host=self.host, port=self.port,
+ timeout=self.timeout.connect_timeout,
+ strict=self.strict, **self.conn_kw)
+ return conn
+
+ def _get_conn(self, timeout=None):
+ """
+ Get a connection. Will return a pooled connection if one is available.
+
+ If no connections are available and :prop:`.block` is ``False``, then a
+ fresh connection is returned.
+
+ :param timeout:
+ Seconds to wait before giving up and raising
+ :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
+ :prop:`.block` is ``True``.
+ """
+ conn = None
+ try:
+ conn = self.pool.get(block=self.block, timeout=timeout)
+
+ except AttributeError: # self.pool is None
+ raise ClosedPoolError(self, "Pool is closed.")
+
+ except Empty:
+ if self.block:
+ raise EmptyPoolError(self,
+ "Pool reached maximum size and no more "
+ "connections are allowed.")
+ pass # Oh well, we'll create a new connection then
+
+ # If this is a persistent connection, check if it got disconnected
+ if conn and is_connection_dropped(conn):
+ log.info("Resetting dropped connection: %s" % self.host)
+ conn.close()
+ if getattr(conn, 'auto_open', 1) == 0:
+ # This is a proxied connection that has been mutated by
+ # httplib._tunnel() and cannot be reused (since it would
+ # attempt to bypass the proxy)
+ conn = None
+
+ return conn or self._new_conn()
+
+ def _put_conn(self, conn):
+ """
+ Put a connection back into the pool.
+
+ :param conn:
+ Connection object for the current host and port as returned by
+ :meth:`._new_conn` or :meth:`._get_conn`.
+
+ If the pool is already full, the connection is closed and discarded
+ because we exceeded maxsize. If connections are discarded frequently,
+ then maxsize should be increased.
+
+ If the pool is closed, then the connection will be closed and discarded.
+ """
+ try:
+ self.pool.put(conn, block=False)
+ return # Everything is dandy, done.
+ except AttributeError:
+ # self.pool is None.
+ pass
+ except Full:
+ # This should never happen if self.block == True
+ log.warning(
+ "Connection pool is full, discarding connection: %s" %
+ self.host)
+
+ # Connection never got put back into the pool, close it.
+ if conn:
+ conn.close()
+
+ def _validate_conn(self, conn):
+ """
+ Called right before a request is made, after the socket is created.
+ """
+ pass
+
+ def _prepare_proxy(self, conn):
+ # Nothing to do for HTTP connections.
+ pass
+
+ def _get_timeout(self, timeout):
+ """ Helper that always returns a :class:`urllib3.util.Timeout` """
+ if timeout is _Default:
+ return self.timeout.clone()
+
+ if isinstance(timeout, Timeout):
+ return timeout.clone()
+ else:
+ # User passed us an int/float. This is for backwards compatibility,
+ # can be removed later
+ return Timeout.from_float(timeout)
+
+ def _raise_timeout(self, err, url, timeout_value):
+ """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
+
+ if isinstance(err, SocketTimeout):
+ raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
+
+ # See the above comment about EAGAIN in Python 3. In Python 2 we have
+ # to specifically catch it and throw the timeout error
+ if hasattr(err, 'errno') and err.errno in _blocking_errnos:
+ raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
+
+ # Catch possible read timeouts thrown as SSL errors. If not the
+ # case, rethrow the original. We need to do this because of:
+ # http://bugs.python.org/issue10272
+ if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6
+ raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
+
+ def _make_request(self, conn, method, url, timeout=_Default,
+ **httplib_request_kw):
+ """
+ Perform a request on a given urllib connection object taken from our
+ pool.
+
+ :param conn:
+ a connection from one of our connection pools
+
+ :param timeout:
+ Socket timeout in seconds for the request. This can be a
+ float or integer, which will set the same timeout value for
+ the socket connect and the socket read, or an instance of
+ :class:`urllib3.util.Timeout`, which gives you more fine-grained
+ control over your timeouts.
+ """
+ self.num_requests += 1
+
+ timeout_obj = self._get_timeout(timeout)
+ timeout_obj.start_connect()
+ conn.timeout = timeout_obj.connect_timeout
+
+ # Trigger any extra validation we need to do.
+ try:
+ self._validate_conn(conn)
+ except (SocketTimeout, BaseSSLError) as e:
+ # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
+ self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
+ raise
+
+ # conn.request() calls httplib.*.request, not the method in
+ # urllib3.request. It also calls makefile (recv) on the socket.
+ conn.request(method, url, **httplib_request_kw)
+
+ # Reset the timeout for the recv() on the socket
+ read_timeout = timeout_obj.read_timeout
+
+ # App Engine doesn't have a sock attr
+ if getattr(conn, 'sock', None):
+ # In Python 3 socket.py will catch EAGAIN and return None when you
+ # try and read into the file pointer created by http.client, which
+ # instead raises a BadStatusLine exception. Instead of catching
+ # the exception and assuming all BadStatusLine exceptions are read
+ # timeouts, check for a zero timeout before making the request.
+ if read_timeout == 0:
+ raise ReadTimeoutError(
+ self, url, "Read timed out. (read timeout=%s)" % read_timeout)
+ if read_timeout is Timeout.DEFAULT_TIMEOUT:
+ conn.sock.settimeout(socket.getdefaulttimeout())
+ else: # None or a value
+ conn.sock.settimeout(read_timeout)
+
+ # Receive the response from the server
+ try:
+ try: # Python 2.7, use buffering of HTTP responses
+ httplib_response = conn.getresponse(buffering=True)
+ except TypeError: # Python 2.6 and older
+ httplib_response = conn.getresponse()
+ except (SocketTimeout, BaseSSLError, SocketError) as e:
+ self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
+ raise
+
+ # AppEngine doesn't have a version attr.
+ http_version = getattr(conn, '_http_vsn_str', 'HTTP/?')
+ log.debug("\"%s %s %s\" %s %s" % (method, url, http_version,
+ httplib_response.status,
+ httplib_response.length))
+
+ try:
+ assert_header_parsing(httplib_response.msg)
+ except HeaderParsingError as hpe: # Platform-specific: Python 3
+ log.warning(
+ 'Failed to parse headers (url=%s): %s',
+ self._absolute_url(url), hpe, exc_info=True)
+
+ return httplib_response
+
+ def _absolute_url(self, path):
+ return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
+
+ def close(self):
+ """
+ Close all pooled connections and disable the pool.
+ """
+ # Disable access to the pool
+ old_pool, self.pool = self.pool, None
+
+ try:
+ while True:
+ conn = old_pool.get(block=False)
+ if conn:
+ conn.close()
+
+ except Empty:
+ pass # Done.
+
+ def is_same_host(self, url):
+ """
+ Check if the given ``url`` is a member of the same host as this
+ connection pool.
+ """
+ if url.startswith('/'):
+ return True
+
+ # TODO: Add optional support for socket.gethostbyname checking.
+ scheme, host, port = get_host(url)
+
+ # Use explicit default port for comparison when none is given
+ if self.port and not port:
+ port = port_by_scheme.get(scheme)
+ elif not self.port and port == port_by_scheme.get(scheme):
+ port = None
+
+ return (scheme, host, port) == (self.scheme, self.host, self.port)
+
+ def urlopen(self, method, url, body=None, headers=None, retries=None,
+ redirect=True, assert_same_host=True, timeout=_Default,
+ pool_timeout=None, release_conn=None, **response_kw):
+ """
+ Get a connection from the pool and perform an HTTP request. This is the
+ lowest level call for making a request, so you'll need to specify all
+ the raw details.
+
+ .. note::
+
+ More commonly, it's appropriate to use a convenience method provided
+ by :class:`.RequestMethods`, such as :meth:`request`.
+
+ .. note::
+
+ `release_conn` will only behave as expected if
+ `preload_content=False` because we want to make
+ `preload_content=False` the default behaviour someday soon without
+ breaking backwards compatibility.
+
+ :param method:
+ HTTP request method (such as GET, POST, PUT, etc.)
+
+ :param body:
+ Data to send in the request body (useful for creating
+ POST requests, see HTTPConnectionPool.post_url for
+ more convenience).
+
+ :param headers:
+ Dictionary of custom headers to send, such as User-Agent,
+ If-None-Match, etc. If None, pool headers are used. If provided,
+ these headers completely replace any pool-specific headers.
+
+ :param retries:
+ Configure the number of retries to allow before raising a
+ :class:`~urllib3.exceptions.MaxRetryError` exception.
+
+ Pass ``None`` to retry until you receive a response. Pass a
+ :class:`~urllib3.util.retry.Retry` object for fine-grained control
+ over different types of retries.
+ Pass an integer number to retry connection errors that many times,
+ but no other types of errors. Pass zero to never retry.
+
+ If ``False``, then retries are disabled and any exception is raised
+ immediately. Also, instead of raising a MaxRetryError on redirects,
+ the redirect response will be returned.
+
+ :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
+
+ :param redirect:
+ If True, automatically handle redirects (status codes 301, 302,
+ 303, 307, 308). Each redirect counts as a retry. Disabling retries
+ will disable redirect, too.
+
+ :param assert_same_host:
+ If ``True``, will make sure that the host of the pool requests is
+ consistent else will raise HostChangedError. When False, you can
+ use the pool on an HTTP proxy and request foreign hosts.
+
+ :param timeout:
+ If specified, overrides the default timeout for this one
+ request. It may be a float (in seconds) or an instance of
+ :class:`urllib3.util.Timeout`.
+
+ :param pool_timeout:
+ If set and the pool is set to block=True, then this method will
+ block for ``pool_timeout`` seconds and raise EmptyPoolError if no
+ connection is available within the time period.
+
+ :param release_conn:
+ If False, then the urlopen call will not release the connection
+ back into the pool once a response is received (but will release if
+ you read the entire contents of the response such as when
+ `preload_content=True`). This is useful if you're not preloading
+ the response's content immediately. You will need to call
+ ``r.release_conn()`` on the response ``r`` to return the connection
+ back into the pool. If None, it takes the value of
+ ``response_kw.get('preload_content', True)``.
+
+ :param \**response_kw:
+ Additional parameters are passed to
+ :meth:`urllib3.response.HTTPResponse.from_httplib`
+ """
+ if headers is None:
+ headers = self.headers
+
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
+
+ if release_conn is None:
+ release_conn = response_kw.get('preload_content', True)
+
+ # Check host
+ if assert_same_host and not self.is_same_host(url):
+ raise HostChangedError(self, url, retries)
+
+ conn = None
+
+ # Merge the proxy headers. Only do this in HTTP. We have to copy the
+ # headers dict so we can safely change it without those changes being
+ # reflected in anyone else's copy.
+ if self.scheme == 'http':
+ headers = headers.copy()
+ headers.update(self.proxy_headers)
+
+ # Must keep the exception bound to a separate variable or else Python 3
+ # complains about UnboundLocalError.
+ err = None
+
+ try:
+ # Request a connection from the queue.
+ timeout_obj = self._get_timeout(timeout)
+ conn = self._get_conn(timeout=pool_timeout)
+
+ conn.timeout = timeout_obj.connect_timeout
+
+ is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
+ if is_new_proxy_conn:
+ self._prepare_proxy(conn)
+
+ # Make the request on the httplib connection object.
+ httplib_response = self._make_request(conn, method, url,
+ timeout=timeout_obj,
+ body=body, headers=headers)
+
+ # If we're going to release the connection in ``finally:``, then
+ # the request doesn't need to know about the connection. Otherwise
+ # it will also try to release it and we'll have a double-release
+ # mess.
+ response_conn = not release_conn and conn
+
+ # Import httplib's response into our own wrapper object
+ response = HTTPResponse.from_httplib(httplib_response,
+ pool=self,
+ connection=response_conn,
+ **response_kw)
+
+ # else:
+ # The connection will be put back into the pool when
+ # ``response.release_conn()`` is called (implicitly by
+ # ``response.read()``)
+
+ except Empty:
+ # Timed out by queue.
+ raise EmptyPoolError(self, "No pool connections are available.")
+
+ except (BaseSSLError, CertificateError) as e:
+ # Close the connection. If a connection is reused on which there
+ # was a Certificate error, the next request will certainly raise
+ # another Certificate error.
+ conn = conn and conn.close()
+ release_conn = True
+ raise SSLError(e)
+
+ except SSLError:
+ # Treat SSLError separately from BaseSSLError to preserve
+ # traceback.
+ conn = conn and conn.close()
+ release_conn = True
+ raise
+
+ except (TimeoutError, HTTPException, SocketError, ProtocolError) as e:
+ # Discard the connection for these exceptions. It will be
+ # be replaced during the next _get_conn() call.
+ conn = conn and conn.close()
+ release_conn = True
+
+ if isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
+ e = ProxyError('Cannot connect to proxy.', e)
+ elif isinstance(e, (SocketError, HTTPException)):
+ e = ProtocolError('Connection aborted.', e)
+
+ retries = retries.increment(method, url, error=e, _pool=self,
+ _stacktrace=sys.exc_info()[2])
+ retries.sleep()
+
+ # Keep track of the error for the retry warning.
+ err = e
+
+ finally:
+ if release_conn:
+ # Put the connection back to be reused. If the connection is
+ # expired then it will be None, which will get replaced with a
+ # fresh connection during _get_conn.
+ self._put_conn(conn)
+
+ if not conn:
+ # Try again
+ log.warning("Retrying (%r) after connection "
+ "broken by '%r': %s" % (retries, err, url))
+ return self.urlopen(method, url, body, headers, retries,
+ redirect, assert_same_host,
+ timeout=timeout, pool_timeout=pool_timeout,
+ release_conn=release_conn, **response_kw)
+
+ # Handle redirect?
+ redirect_location = redirect and response.get_redirect_location()
+ if redirect_location:
+ if response.status == 303:
+ method = 'GET'
+
+ try:
+ retries = retries.increment(method, url, response=response, _pool=self)
+ except MaxRetryError:
+ if retries.raise_on_redirect:
+ # Release the connection for this response, since we're not
+ # returning it to be released manually.
+ response.release_conn()
+ raise
+ return response
+
+ log.info("Redirecting %s -> %s" % (url, redirect_location))
+ return self.urlopen(
+ method, redirect_location, body, headers,
+ retries=retries, redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout, pool_timeout=pool_timeout,
+ release_conn=release_conn, **response_kw)
+
+ # Check if we should retry the HTTP response.
+ if retries.is_forced_retry(method, status_code=response.status):
+ retries = retries.increment(method, url, response=response, _pool=self)
+ retries.sleep()
+ log.info("Forced retry: %s" % url)
+ return self.urlopen(
+ method, url, body, headers,
+ retries=retries, redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout, pool_timeout=pool_timeout,
+ release_conn=release_conn, **response_kw)
+
+ return response
+
+
+class HTTPSConnectionPool(HTTPConnectionPool):
+ """
+ Same as :class:`.HTTPConnectionPool`, but HTTPS.
+
+ When Python is compiled with the :mod:`ssl` module, then
+ :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
+ instead of :class:`.HTTPSConnection`.
+
+ :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
+ ``assert_hostname`` and ``host`` in this order to verify connections.
+ If ``assert_hostname`` is False, no verification is done.
+
+ The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
+ ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is
+ available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
+ the connection socket into an SSL socket.
+ """
+
+ scheme = 'https'
+ ConnectionCls = HTTPSConnection
+
+ def __init__(self, host, port=None,
+ strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,
+ block=False, headers=None, retries=None,
+ _proxy=None, _proxy_headers=None,
+ key_file=None, cert_file=None, cert_reqs=None,
+ ca_certs=None, ssl_version=None,
+ assert_hostname=None, assert_fingerprint=None,
+ ca_cert_dir=None, **conn_kw):
+
+ HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,
+ block, headers, retries, _proxy, _proxy_headers,
+ **conn_kw)
+
+ if ca_certs and cert_reqs is None:
+ cert_reqs = 'CERT_REQUIRED'
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.cert_reqs = cert_reqs
+ self.ca_certs = ca_certs
+ self.ca_cert_dir = ca_cert_dir
+ self.ssl_version = ssl_version
+ self.assert_hostname = assert_hostname
+ self.assert_fingerprint = assert_fingerprint
+
+ def _prepare_conn(self, conn):
+ """
+ Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
+ and establish the tunnel if proxy is used.
+ """
+
+ if isinstance(conn, VerifiedHTTPSConnection):
+ conn.set_cert(key_file=self.key_file,
+ cert_file=self.cert_file,
+ cert_reqs=self.cert_reqs,
+ ca_certs=self.ca_certs,
+ ca_cert_dir=self.ca_cert_dir,
+ assert_hostname=self.assert_hostname,
+ assert_fingerprint=self.assert_fingerprint)
+ conn.ssl_version = self.ssl_version
+
+ return conn
+
+ def _prepare_proxy(self, conn):
+ """
+ Establish tunnel connection early, because otherwise httplib
+ would improperly set Host: header to proxy's IP:port.
+ """
+ # Python 2.7+
+ try:
+ set_tunnel = conn.set_tunnel
+ except AttributeError: # Platform-specific: Python 2.6
+ set_tunnel = conn._set_tunnel
+
+ if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older
+ set_tunnel(self.host, self.port)
+ else:
+ set_tunnel(self.host, self.port, self.proxy_headers)
+
+ conn.connect()
+
+ def _new_conn(self):
+ """
+ Return a fresh :class:`httplib.HTTPSConnection`.
+ """
+ self.num_connections += 1
+ log.info("Starting new HTTPS connection (%d): %s"
+ % (self.num_connections, self.host))
+
+ if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
+ raise SSLError("Can't connect to HTTPS URL because the SSL "
+ "module is not available.")
+
+ actual_host = self.host
+ actual_port = self.port
+ if self.proxy is not None:
+ actual_host = self.proxy.host
+ actual_port = self.proxy.port
+
+ conn = self.ConnectionCls(host=actual_host, port=actual_port,
+ timeout=self.timeout.connect_timeout,
+ strict=self.strict, **self.conn_kw)
+
+ return self._prepare_conn(conn)
+
+ def _validate_conn(self, conn):
+ """
+ Called right before a request is made, after the socket is created.
+ """
+ super(HTTPSConnectionPool, self)._validate_conn(conn)
+
+ # Force connect early to allow us to validate the connection.
+ if not getattr(conn, 'sock', None): # AppEngine might not have `.sock`
+ conn.connect()
+
+ if not conn.is_verified:
+ warnings.warn((
+ 'Unverified HTTPS request is being made. '
+ 'Adding certificate verification is strongly advised. See: '
+ 'https://urllib3.readthedocs.org/en/latest/security.html'),
+ InsecureRequestWarning)
+
+
+def connection_from_url(url, **kw):
+ """
+ Given a url, return an :class:`.ConnectionPool` instance of its host.
+
+ This is a shortcut for not having to parse out the scheme, host, and port
+ of the url before creating an :class:`.ConnectionPool` instance.
+
+ :param url:
+ Absolute URL string that must include the scheme. Port is optional.
+
+ :param \**kw:
+ Passes additional parameters to the constructor of the appropriate
+ :class:`.ConnectionPool`. Useful for specifying things like
+ timeout, maxsize, headers, etc.
+
+ Example::
+
+ >>> conn = connection_from_url('http://google.com/')
+ >>> r = conn.request('GET', '/')
+ """
+ scheme, host, port = get_host(url)
+ if scheme == 'https':
+ return HTTPSConnectionPool(host, port=port, **kw)
+ else:
+ return HTTPConnectionPool(host, port=port, **kw)
diff --git a/third_party/python/requests/requests/packages/urllib3/contrib/__init__.py b/third_party/python/requests/requests/packages/urllib3/contrib/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/contrib/__init__.py
diff --git a/third_party/python/requests/requests/packages/urllib3/contrib/appengine.py b/third_party/python/requests/requests/packages/urllib3/contrib/appengine.py
new file mode 100644
index 0000000000..884cdb220d
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/contrib/appengine.py
@@ -0,0 +1,223 @@
+from __future__ import absolute_import
+import logging
+import os
+import warnings
+
+from ..exceptions import (
+ HTTPError,
+ HTTPWarning,
+ MaxRetryError,
+ ProtocolError,
+ TimeoutError,
+ SSLError
+)
+
+from ..packages.six import BytesIO
+from ..request import RequestMethods
+from ..response import HTTPResponse
+from ..util.timeout import Timeout
+from ..util.retry import Retry
+
+try:
+ from google.appengine.api import urlfetch
+except ImportError:
+ urlfetch = None
+
+
+log = logging.getLogger(__name__)
+
+
+class AppEnginePlatformWarning(HTTPWarning):
+ pass
+
+
+class AppEnginePlatformError(HTTPError):
+ pass
+
+
+class AppEngineManager(RequestMethods):
+ """
+ Connection manager for Google App Engine sandbox applications.
+
+ This manager uses the URLFetch service directly instead of using the
+ emulated httplib, and is subject to URLFetch limitations as described in
+ the App Engine documentation here:
+
+ https://cloud.google.com/appengine/docs/python/urlfetch
+
+ Notably it will raise an AppEnginePlatformError if:
+ * URLFetch is not available.
+ * If you attempt to use this on GAEv2 (Managed VMs), as full socket
+ support is available.
+ * If a request size is more than 10 megabytes.
+ * If a response size is more than 32 megabtyes.
+ * If you use an unsupported request method such as OPTIONS.
+
+ Beyond those cases, it will raise normal urllib3 errors.
+ """
+
+ def __init__(self, headers=None, retries=None, validate_certificate=True):
+ if not urlfetch:
+ raise AppEnginePlatformError(
+ "URLFetch is not available in this environment.")
+
+ if is_prod_appengine_mvms():
+ raise AppEnginePlatformError(
+ "Use normal urllib3.PoolManager instead of AppEngineManager"
+ "on Managed VMs, as using URLFetch is not necessary in "
+ "this environment.")
+
+ warnings.warn(
+ "urllib3 is using URLFetch on Google App Engine sandbox instead "
+ "of sockets. To use sockets directly instead of URLFetch see "
+ "https://urllib3.readthedocs.org/en/latest/contrib.html.",
+ AppEnginePlatformWarning)
+
+ RequestMethods.__init__(self, headers)
+ self.validate_certificate = validate_certificate
+
+ self.retries = retries or Retry.DEFAULT
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def urlopen(self, method, url, body=None, headers=None,
+ retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT,
+ **response_kw):
+
+ retries = self._get_retries(retries, redirect)
+
+ try:
+ response = urlfetch.fetch(
+ url,
+ payload=body,
+ method=method,
+ headers=headers or {},
+ allow_truncated=False,
+ follow_redirects=(
+ redirect and
+ retries.redirect != 0 and
+ retries.total),
+ deadline=self._get_absolute_timeout(timeout),
+ validate_certificate=self.validate_certificate,
+ )
+ except urlfetch.DeadlineExceededError as e:
+ raise TimeoutError(self, e)
+
+ except urlfetch.InvalidURLError as e:
+ if 'too large' in str(e):
+ raise AppEnginePlatformError(
+ "URLFetch request too large, URLFetch only "
+ "supports requests up to 10mb in size.", e)
+ raise ProtocolError(e)
+
+ except urlfetch.DownloadError as e:
+ if 'Too many redirects' in str(e):
+ raise MaxRetryError(self, url, reason=e)
+ raise ProtocolError(e)
+
+ except urlfetch.ResponseTooLargeError as e:
+ raise AppEnginePlatformError(
+ "URLFetch response too large, URLFetch only supports"
+ "responses up to 32mb in size.", e)
+
+ except urlfetch.SSLCertificateError as e:
+ raise SSLError(e)
+
+ except urlfetch.InvalidMethodError as e:
+ raise AppEnginePlatformError(
+ "URLFetch does not support method: %s" % method, e)
+
+ http_response = self._urlfetch_response_to_http_response(
+ response, **response_kw)
+
+ # Check for redirect response
+ if (http_response.get_redirect_location() and
+ retries.raise_on_redirect and redirect):
+ raise MaxRetryError(self, url, "too many redirects")
+
+ # Check if we should retry the HTTP response.
+ if retries.is_forced_retry(method, status_code=http_response.status):
+ retries = retries.increment(
+ method, url, response=http_response, _pool=self)
+ log.info("Forced retry: %s" % url)
+ retries.sleep()
+ return self.urlopen(
+ method, url,
+ body=body, headers=headers,
+ retries=retries, redirect=redirect,
+ timeout=timeout, **response_kw)
+
+ return http_response
+
+ def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
+
+ if is_prod_appengine():
+ # Production GAE handles deflate encoding automatically, but does
+ # not remove the encoding header.
+ content_encoding = urlfetch_resp.headers.get('content-encoding')
+
+ if content_encoding == 'deflate':
+ del urlfetch_resp.headers['content-encoding']
+
+ return HTTPResponse(
+ # In order for decoding to work, we must present the content as
+ # a file-like object.
+ body=BytesIO(urlfetch_resp.content),
+ headers=urlfetch_resp.headers,
+ status=urlfetch_resp.status_code,
+ **response_kw
+ )
+
+ def _get_absolute_timeout(self, timeout):
+ if timeout is Timeout.DEFAULT_TIMEOUT:
+ return 5 # 5s is the default timeout for URLFetch.
+ if isinstance(timeout, Timeout):
+ if timeout.read is not timeout.connect:
+ warnings.warn(
+ "URLFetch does not support granular timeout settings, "
+ "reverting to total timeout.", AppEnginePlatformWarning)
+ return timeout.total
+ return timeout
+
+ def _get_retries(self, retries, redirect):
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(
+ retries, redirect=redirect, default=self.retries)
+
+ if retries.connect or retries.read or retries.redirect:
+ warnings.warn(
+ "URLFetch only supports total retries and does not "
+ "recognize connect, read, or redirect retry parameters.",
+ AppEnginePlatformWarning)
+
+ return retries
+
+
+def is_appengine():
+ return (is_local_appengine() or
+ is_prod_appengine() or
+ is_prod_appengine_mvms())
+
+
+def is_appengine_sandbox():
+ return is_appengine() and not is_prod_appengine_mvms()
+
+
+def is_local_appengine():
+ return ('APPENGINE_RUNTIME' in os.environ and
+ 'Development/' in os.environ['SERVER_SOFTWARE'])
+
+
+def is_prod_appengine():
+ return ('APPENGINE_RUNTIME' in os.environ and
+ 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and
+ not is_prod_appengine_mvms())
+
+
+def is_prod_appengine_mvms():
+ return os.environ.get('GAE_VM', False) == 'true'
diff --git a/third_party/python/requests/requests/packages/urllib3/contrib/ntlmpool.py b/third_party/python/requests/requests/packages/urllib3/contrib/ntlmpool.py
new file mode 100644
index 0000000000..c136a238db
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/contrib/ntlmpool.py
@@ -0,0 +1,115 @@
+"""
+NTLM authenticating pool, contributed by erikcederstran
+
+Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
+"""
+from __future__ import absolute_import
+
+try:
+ from http.client import HTTPSConnection
+except ImportError:
+ from httplib import HTTPSConnection
+from logging import getLogger
+from ntlm import ntlm
+
+from urllib3 import HTTPSConnectionPool
+
+
+log = getLogger(__name__)
+
+
+class NTLMConnectionPool(HTTPSConnectionPool):
+ """
+ Implements an NTLM authentication version of an urllib3 connection pool
+ """
+
+ scheme = 'https'
+
+ def __init__(self, user, pw, authurl, *args, **kwargs):
+ """
+ authurl is a random URL on the server that is protected by NTLM.
+ user is the Windows user, probably in the DOMAIN\\username format.
+ pw is the password for the user.
+ """
+ super(NTLMConnectionPool, self).__init__(*args, **kwargs)
+ self.authurl = authurl
+ self.rawuser = user
+ user_parts = user.split('\\', 1)
+ self.domain = user_parts[0].upper()
+ self.user = user_parts[1]
+ self.pw = pw
+
+ def _new_conn(self):
+ # Performs the NTLM handshake that secures the connection. The socket
+ # must be kept open while requests are performed.
+ self.num_connections += 1
+ log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s' %
+ (self.num_connections, self.host, self.authurl))
+
+ headers = {}
+ headers['Connection'] = 'Keep-Alive'
+ req_header = 'Authorization'
+ resp_header = 'www-authenticate'
+
+ conn = HTTPSConnection(host=self.host, port=self.port)
+
+ # Send negotiation message
+ headers[req_header] = (
+ 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser))
+ log.debug('Request headers: %s' % headers)
+ conn.request('GET', self.authurl, None, headers)
+ res = conn.getresponse()
+ reshdr = dict(res.getheaders())
+ log.debug('Response status: %s %s' % (res.status, res.reason))
+ log.debug('Response headers: %s' % reshdr)
+ log.debug('Response data: %s [...]' % res.read(100))
+
+ # Remove the reference to the socket, so that it can not be closed by
+ # the response object (we want to keep the socket open)
+ res.fp = None
+
+ # Server should respond with a challenge message
+ auth_header_values = reshdr[resp_header].split(', ')
+ auth_header_value = None
+ for s in auth_header_values:
+ if s[:5] == 'NTLM ':
+ auth_header_value = s[5:]
+ if auth_header_value is None:
+ raise Exception('Unexpected %s response header: %s' %
+ (resp_header, reshdr[resp_header]))
+
+ # Send authentication message
+ ServerChallenge, NegotiateFlags = \
+ ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value)
+ auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge,
+ self.user,
+ self.domain,
+ self.pw,
+ NegotiateFlags)
+ headers[req_header] = 'NTLM %s' % auth_msg
+ log.debug('Request headers: %s' % headers)
+ conn.request('GET', self.authurl, None, headers)
+ res = conn.getresponse()
+ log.debug('Response status: %s %s' % (res.status, res.reason))
+ log.debug('Response headers: %s' % dict(res.getheaders()))
+ log.debug('Response data: %s [...]' % res.read()[:100])
+ if res.status != 200:
+ if res.status == 401:
+ raise Exception('Server rejected request: wrong '
+ 'username or password')
+ raise Exception('Wrong server response: %s %s' %
+ (res.status, res.reason))
+
+ res.fp = None
+ log.debug('Connection established')
+ return conn
+
+ def urlopen(self, method, url, body=None, headers=None, retries=3,
+ redirect=True, assert_same_host=True):
+ if headers is None:
+ headers = {}
+ headers['Connection'] = 'Keep-Alive'
+ return super(NTLMConnectionPool, self).urlopen(method, url, body,
+ headers, retries,
+ redirect,
+ assert_same_host)
diff --git a/third_party/python/requests/requests/packages/urllib3/contrib/pyopenssl.py b/third_party/python/requests/requests/packages/urllib3/contrib/pyopenssl.py
new file mode 100644
index 0000000000..5996153afe
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/contrib/pyopenssl.py
@@ -0,0 +1,310 @@
+'''SSL with SNI_-support for Python 2. Follow these instructions if you would
+like to verify SSL certificates in Python 2. Note, the default libraries do
+*not* do certificate checking; you need to do additional work to validate
+certificates yourself.
+
+This needs the following packages installed:
+
+* pyOpenSSL (tested with 0.13)
+* ndg-httpsclient (tested with 0.3.2)
+* pyasn1 (tested with 0.1.6)
+
+You can install them with the following command:
+
+ pip install pyopenssl ndg-httpsclient pyasn1
+
+To activate certificate checking, call
+:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
+before you begin making HTTP requests. This can be done in a ``sitecustomize``
+module, or at any other time before your application begins using ``urllib3``,
+like this::
+
+ try:
+ import urllib3.contrib.pyopenssl
+ urllib3.contrib.pyopenssl.inject_into_urllib3()
+ except ImportError:
+ pass
+
+Now you can use :mod:`urllib3` as you normally would, and it will support SNI
+when the required modules are installed.
+
+Activating this module also has the positive side effect of disabling SSL/TLS
+compression in Python 2 (see `CRIME attack`_).
+
+If you want to configure the default list of supported cipher suites, you can
+set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
+
+Module Variables
+----------------
+
+:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
+
+.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
+.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
+
+'''
+from __future__ import absolute_import
+
+try:
+ from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT
+ from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName
+except SyntaxError as e:
+ raise ImportError(e)
+
+import OpenSSL.SSL
+from pyasn1.codec.der import decoder as der_decoder
+from pyasn1.type import univ, constraint
+from socket import _fileobject, timeout, error as SocketError
+import ssl
+import select
+
+from .. import connection
+from .. import util
+
+__all__ = ['inject_into_urllib3', 'extract_from_urllib3']
+
+# SNI only *really* works if we can read the subjectAltName of certificates.
+HAS_SNI = SUBJ_ALT_NAME_SUPPORT
+
+# Map from urllib3 to PyOpenSSL compatible parameter-values.
+_openssl_versions = {
+ ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD,
+ ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
+}
+
+if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'):
+ _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
+
+if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'):
+ _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
+
+try:
+ _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD})
+except AttributeError:
+ pass
+
+_openssl_verify = {
+ ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
+ ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
+ ssl.CERT_REQUIRED:
+ OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
+}
+
+DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS
+
+# OpenSSL will only write 16K at a time
+SSL_WRITE_BLOCKSIZE = 16384
+
+orig_util_HAS_SNI = util.HAS_SNI
+orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket
+
+
+def inject_into_urllib3():
+ 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.'
+
+ connection.ssl_wrap_socket = ssl_wrap_socket
+ util.HAS_SNI = HAS_SNI
+
+
+def extract_from_urllib3():
+ 'Undo monkey-patching by :func:`inject_into_urllib3`.'
+
+ connection.ssl_wrap_socket = orig_connection_ssl_wrap_socket
+ util.HAS_SNI = orig_util_HAS_SNI
+
+
+# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
+class SubjectAltName(BaseSubjectAltName):
+ '''ASN.1 implementation for subjectAltNames support'''
+
+ # There is no limit to how many SAN certificates a certificate may have,
+ # however this needs to have some limit so we'll set an arbitrarily high
+ # limit.
+ sizeSpec = univ.SequenceOf.sizeSpec + \
+ constraint.ValueSizeConstraint(1, 1024)
+
+
+# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
+def get_subj_alt_name(peer_cert):
+ # Search through extensions
+ dns_name = []
+ if not SUBJ_ALT_NAME_SUPPORT:
+ return dns_name
+
+ general_names = SubjectAltName()
+ for i in range(peer_cert.get_extension_count()):
+ ext = peer_cert.get_extension(i)
+ ext_name = ext.get_short_name()
+ if ext_name != 'subjectAltName':
+ continue
+
+ # PyOpenSSL returns extension data in ASN.1 encoded form
+ ext_dat = ext.get_data()
+ decoded_dat = der_decoder.decode(ext_dat,
+ asn1Spec=general_names)
+
+ for name in decoded_dat:
+ if not isinstance(name, SubjectAltName):
+ continue
+ for entry in range(len(name)):
+ component = name.getComponentByPosition(entry)
+ if component.getName() != 'dNSName':
+ continue
+ dns_name.append(str(component.getComponent()))
+
+ return dns_name
+
+
+class WrappedSocket(object):
+ '''API-compatibility wrapper for Python OpenSSL's Connection-class.
+
+ Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
+ collector of pypy.
+ '''
+
+ def __init__(self, connection, socket, suppress_ragged_eofs=True):
+ self.connection = connection
+ self.socket = socket
+ self.suppress_ragged_eofs = suppress_ragged_eofs
+ self._makefile_refs = 0
+
+ def fileno(self):
+ return self.socket.fileno()
+
+ def makefile(self, mode, bufsize=-1):
+ self._makefile_refs += 1
+ return _fileobject(self, mode, bufsize, close=True)
+
+ def recv(self, *args, **kwargs):
+ try:
+ data = self.connection.recv(*args, **kwargs)
+ except OpenSSL.SSL.SysCallError as e:
+ if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
+ return b''
+ else:
+ raise SocketError(e)
+ except OpenSSL.SSL.ZeroReturnError as e:
+ if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
+ return b''
+ else:
+ raise
+ except OpenSSL.SSL.WantReadError:
+ rd, wd, ed = select.select(
+ [self.socket], [], [], self.socket.gettimeout())
+ if not rd:
+ raise timeout('The read operation timed out')
+ else:
+ return self.recv(*args, **kwargs)
+ else:
+ return data
+
+ def settimeout(self, timeout):
+ return self.socket.settimeout(timeout)
+
+ def _send_until_done(self, data):
+ while True:
+ try:
+ return self.connection.send(data)
+ except OpenSSL.SSL.WantWriteError:
+ _, wlist, _ = select.select([], [self.socket], [],
+ self.socket.gettimeout())
+ if not wlist:
+ raise timeout()
+ continue
+
+ def sendall(self, data):
+ total_sent = 0
+ while total_sent < len(data):
+ sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE])
+ total_sent += sent
+
+ def shutdown(self):
+ # FIXME rethrow compatible exceptions should we ever use this
+ self.connection.shutdown()
+
+ def close(self):
+ if self._makefile_refs < 1:
+ try:
+ return self.connection.close()
+ except OpenSSL.SSL.Error:
+ return
+ else:
+ self._makefile_refs -= 1
+
+ def getpeercert(self, binary_form=False):
+ x509 = self.connection.get_peer_certificate()
+
+ if not x509:
+ return x509
+
+ if binary_form:
+ return OpenSSL.crypto.dump_certificate(
+ OpenSSL.crypto.FILETYPE_ASN1,
+ x509)
+
+ return {
+ 'subject': (
+ (('commonName', x509.get_subject().CN),),
+ ),
+ 'subjectAltName': [
+ ('DNS', value)
+ for value in get_subj_alt_name(x509)
+ ]
+ }
+
+ def _reuse(self):
+ self._makefile_refs += 1
+
+ def _drop(self):
+ if self._makefile_refs < 1:
+ self.close()
+ else:
+ self._makefile_refs -= 1
+
+
+def _verify_callback(cnx, x509, err_no, err_depth, return_code):
+ return err_no == 0
+
+
+def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
+ ca_certs=None, server_hostname=None,
+ ssl_version=None, ca_cert_dir=None):
+ ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version])
+ if certfile:
+ keyfile = keyfile or certfile # Match behaviour of the normal python ssl library
+ ctx.use_certificate_file(certfile)
+ if keyfile:
+ ctx.use_privatekey_file(keyfile)
+ if cert_reqs != ssl.CERT_NONE:
+ ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback)
+ if ca_certs or ca_cert_dir:
+ try:
+ ctx.load_verify_locations(ca_certs, ca_cert_dir)
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e)
+ else:
+ ctx.set_default_verify_paths()
+
+ # Disable TLS compression to migitate CRIME attack (issue #309)
+ OP_NO_COMPRESSION = 0x20000
+ ctx.set_options(OP_NO_COMPRESSION)
+
+ # Set list of supported ciphersuites.
+ ctx.set_cipher_list(DEFAULT_SSL_CIPHER_LIST)
+
+ cnx = OpenSSL.SSL.Connection(ctx, sock)
+ cnx.set_tlsext_host_name(server_hostname)
+ cnx.set_connect_state()
+ while True:
+ try:
+ cnx.do_handshake()
+ except OpenSSL.SSL.WantReadError:
+ rd, _, _ = select.select([sock], [], [], sock.gettimeout())
+ if not rd:
+ raise timeout('select timed out')
+ continue
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError('bad handshake: %r' % e)
+ break
+
+ return WrappedSocket(cnx, sock)
diff --git a/third_party/python/requests/requests/packages/urllib3/exceptions.py b/third_party/python/requests/requests/packages/urllib3/exceptions.py
new file mode 100644
index 0000000000..8e07eb6198
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/exceptions.py
@@ -0,0 +1,201 @@
+from __future__ import absolute_import
+# Base Exceptions
+
+
+class HTTPError(Exception):
+ "Base exception used by this module."
+ pass
+
+
+class HTTPWarning(Warning):
+ "Base warning used by this module."
+ pass
+
+
+class PoolError(HTTPError):
+ "Base exception for errors caused within a pool."
+ def __init__(self, pool, message):
+ self.pool = pool
+ HTTPError.__init__(self, "%s: %s" % (pool, message))
+
+ def __reduce__(self):
+ # For pickling purposes.
+ return self.__class__, (None, None)
+
+
+class RequestError(PoolError):
+ "Base exception for PoolErrors that have associated URLs."
+ def __init__(self, pool, url, message):
+ self.url = url
+ PoolError.__init__(self, pool, message)
+
+ def __reduce__(self):
+ # For pickling purposes.
+ return self.__class__, (None, self.url, None)
+
+
+class SSLError(HTTPError):
+ "Raised when SSL certificate fails in an HTTPS connection."
+ pass
+
+
+class ProxyError(HTTPError):
+ "Raised when the connection to a proxy fails."
+ pass
+
+
+class DecodeError(HTTPError):
+ "Raised when automatic decoding based on Content-Type fails."
+ pass
+
+
+class ProtocolError(HTTPError):
+ "Raised when something unexpected happens mid-request/response."
+ pass
+
+
+#: Renamed to ProtocolError but aliased for backwards compatibility.
+ConnectionError = ProtocolError
+
+
+# Leaf Exceptions
+
+class MaxRetryError(RequestError):
+ """Raised when the maximum number of retries is exceeded.
+
+ :param pool: The connection pool
+ :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
+ :param string url: The requested Url
+ :param exceptions.Exception reason: The underlying error
+
+ """
+
+ def __init__(self, pool, url, reason=None):
+ self.reason = reason
+
+ message = "Max retries exceeded with url: %s (Caused by %r)" % (
+ url, reason)
+
+ RequestError.__init__(self, pool, url, message)
+
+
+class HostChangedError(RequestError):
+ "Raised when an existing pool gets a request for a foreign host."
+
+ def __init__(self, pool, url, retries=3):
+ message = "Tried to open a foreign host with url: %s" % url
+ RequestError.__init__(self, pool, url, message)
+ self.retries = retries
+
+
+class TimeoutStateError(HTTPError):
+ """ Raised when passing an invalid state to a timeout """
+ pass
+
+
+class TimeoutError(HTTPError):
+ """ Raised when a socket timeout error occurs.
+
+ Catching this error will catch both :exc:`ReadTimeoutErrors
+ <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
+ """
+ pass
+
+
+class ReadTimeoutError(TimeoutError, RequestError):
+ "Raised when a socket timeout occurs while receiving data from a server"
+ pass
+
+
+# This timeout error does not have a URL attached and needs to inherit from the
+# base HTTPError
+class ConnectTimeoutError(TimeoutError):
+ "Raised when a socket timeout occurs while connecting to a server"
+ pass
+
+
+class NewConnectionError(ConnectTimeoutError, PoolError):
+ "Raised when we fail to establish a new connection. Usually ECONNREFUSED."
+ pass
+
+
+class EmptyPoolError(PoolError):
+ "Raised when a pool runs out of connections and no more are allowed."
+ pass
+
+
+class ClosedPoolError(PoolError):
+ "Raised when a request enters a pool after the pool has been closed."
+ pass
+
+
+class LocationValueError(ValueError, HTTPError):
+ "Raised when there is something wrong with a given URL input."
+ pass
+
+
+class LocationParseError(LocationValueError):
+ "Raised when get_host or similar fails to parse the URL input."
+
+ def __init__(self, location):
+ message = "Failed to parse: %s" % location
+ HTTPError.__init__(self, message)
+
+ self.location = location
+
+
+class ResponseError(HTTPError):
+ "Used as a container for an error reason supplied in a MaxRetryError."
+ GENERIC_ERROR = 'too many error responses'
+ SPECIFIC_ERROR = 'too many {status_code} error responses'
+
+
+class SecurityWarning(HTTPWarning):
+ "Warned when perfoming security reducing actions"
+ pass
+
+
+class SubjectAltNameWarning(SecurityWarning):
+ "Warned when connecting to a host with a certificate missing a SAN."
+ pass
+
+
+class InsecureRequestWarning(SecurityWarning):
+ "Warned when making an unverified HTTPS request."
+ pass
+
+
+class SystemTimeWarning(SecurityWarning):
+ "Warned when system time is suspected to be wrong"
+ pass
+
+
+class InsecurePlatformWarning(SecurityWarning):
+ "Warned when certain SSL configuration is not available on a platform."
+ pass
+
+
+class SNIMissingWarning(HTTPWarning):
+ "Warned when making a HTTPS request without SNI available."
+ pass
+
+
+class ResponseNotChunked(ProtocolError, ValueError):
+ "Response needs to be chunked in order to read it as chunks."
+ pass
+
+
+class ProxySchemeUnknown(AssertionError, ValueError):
+ "ProxyManager does not support the supplied scheme"
+ # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
+
+ def __init__(self, scheme):
+ message = "Not supported proxy scheme %s" % scheme
+ super(ProxySchemeUnknown, self).__init__(message)
+
+
+class HeaderParsingError(HTTPError):
+ "Raised by assert_header_parsing, but we convert it to a log.warning statement."
+ def __init__(self, defects, unparsed_data):
+ message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data)
+ super(HeaderParsingError, self).__init__(message)
diff --git a/third_party/python/requests/requests/packages/urllib3/fields.py b/third_party/python/requests/requests/packages/urllib3/fields.py
new file mode 100644
index 0000000000..c7d48113bd
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/fields.py
@@ -0,0 +1,178 @@
+from __future__ import absolute_import
+import email.utils
+import mimetypes
+
+from .packages import six
+
+
+def guess_content_type(filename, default='application/octet-stream'):
+ """
+ Guess the "Content-Type" of a file.
+
+ :param filename:
+ The filename to guess the "Content-Type" of using :mod:`mimetypes`.
+ :param default:
+ If no "Content-Type" can be guessed, default to `default`.
+ """
+ if filename:
+ return mimetypes.guess_type(filename)[0] or default
+ return default
+
+
+def format_header_param(name, value):
+ """
+ Helper function to format and quote a single header parameter.
+
+ Particularly useful for header parameters which might contain
+ non-ASCII values, like file names. This follows RFC 2231, as
+ suggested by RFC 2388 Section 4.4.
+
+ :param name:
+ The name of the parameter, a string expected to be ASCII only.
+ :param value:
+ The value of the parameter, provided as a unicode string.
+ """
+ if not any(ch in value for ch in '"\\\r\n'):
+ result = '%s="%s"' % (name, value)
+ try:
+ result.encode('ascii')
+ except UnicodeEncodeError:
+ pass
+ else:
+ return result
+ if not six.PY3: # Python 2:
+ value = value.encode('utf-8')
+ value = email.utils.encode_rfc2231(value, 'utf-8')
+ value = '%s*=%s' % (name, value)
+ return value
+
+
+class RequestField(object):
+ """
+ A data container for request body parameters.
+
+ :param name:
+ The name of this request field.
+ :param data:
+ The data/value body.
+ :param filename:
+ An optional filename of the request field.
+ :param headers:
+ An optional dict-like object of headers to initially use for the field.
+ """
+ def __init__(self, name, data, filename=None, headers=None):
+ self._name = name
+ self._filename = filename
+ self.data = data
+ self.headers = {}
+ if headers:
+ self.headers = dict(headers)
+
+ @classmethod
+ def from_tuples(cls, fieldname, value):
+ """
+ A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
+
+ Supports constructing :class:`~urllib3.fields.RequestField` from
+ parameter of key/value strings AND key/filetuple. A filetuple is a
+ (filename, data, MIME type) tuple where the MIME type is optional.
+ For example::
+
+ 'foo': 'bar',
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
+ 'realfile': ('barfile.txt', open('realfile').read()),
+ 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
+ 'nonamefile': 'contents of nonamefile field',
+
+ Field names and filenames must be unicode.
+ """
+ if isinstance(value, tuple):
+ if len(value) == 3:
+ filename, data, content_type = value
+ else:
+ filename, data = value
+ content_type = guess_content_type(filename)
+ else:
+ filename = None
+ content_type = None
+ data = value
+
+ request_param = cls(fieldname, data, filename=filename)
+ request_param.make_multipart(content_type=content_type)
+
+ return request_param
+
+ def _render_part(self, name, value):
+ """
+ Overridable helper function to format a single header parameter.
+
+ :param name:
+ The name of the parameter, a string expected to be ASCII only.
+ :param value:
+ The value of the parameter, provided as a unicode string.
+ """
+ return format_header_param(name, value)
+
+ def _render_parts(self, header_parts):
+ """
+ Helper function to format and quote a single header.
+
+ Useful for single headers that are composed of multiple items. E.g.,
+ 'Content-Disposition' fields.
+
+ :param header_parts:
+ A sequence of (k, v) typles or a :class:`dict` of (k, v) to format
+ as `k1="v1"; k2="v2"; ...`.
+ """
+ parts = []
+ iterable = header_parts
+ if isinstance(header_parts, dict):
+ iterable = header_parts.items()
+
+ for name, value in iterable:
+ if value:
+ parts.append(self._render_part(name, value))
+
+ return '; '.join(parts)
+
+ def render_headers(self):
+ """
+ Renders the headers for this request field.
+ """
+ lines = []
+
+ sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location']
+ for sort_key in sort_keys:
+ if self.headers.get(sort_key, False):
+ lines.append('%s: %s' % (sort_key, self.headers[sort_key]))
+
+ for header_name, header_value in self.headers.items():
+ if header_name not in sort_keys:
+ if header_value:
+ lines.append('%s: %s' % (header_name, header_value))
+
+ lines.append('\r\n')
+ return '\r\n'.join(lines)
+
+ def make_multipart(self, content_disposition=None, content_type=None,
+ content_location=None):
+ """
+ Makes this request field into a multipart request field.
+
+ This method overrides "Content-Disposition", "Content-Type" and
+ "Content-Location" headers to the request parameter.
+
+ :param content_type:
+ The 'Content-Type' of the request body.
+ :param content_location:
+ The 'Content-Location' of the request body.
+
+ """
+ self.headers['Content-Disposition'] = content_disposition or 'form-data'
+ self.headers['Content-Disposition'] += '; '.join([
+ '', self._render_parts(
+ (('name', self._name), ('filename', self._filename))
+ )
+ ])
+ self.headers['Content-Type'] = content_type
+ self.headers['Content-Location'] = content_location
diff --git a/third_party/python/requests/requests/packages/urllib3/filepost.py b/third_party/python/requests/requests/packages/urllib3/filepost.py
new file mode 100644
index 0000000000..97a2843ca4
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/filepost.py
@@ -0,0 +1,94 @@
+from __future__ import absolute_import
+import codecs
+
+from uuid import uuid4
+from io import BytesIO
+
+from .packages import six
+from .packages.six import b
+from .fields import RequestField
+
+writer = codecs.lookup('utf-8')[3]
+
+
+def choose_boundary():
+ """
+ Our embarassingly-simple replacement for mimetools.choose_boundary.
+ """
+ return uuid4().hex
+
+
+def iter_field_objects(fields):
+ """
+ Iterate over fields.
+
+ Supports list of (k, v) tuples and dicts, and lists of
+ :class:`~urllib3.fields.RequestField`.
+
+ """
+ if isinstance(fields, dict):
+ i = six.iteritems(fields)
+ else:
+ i = iter(fields)
+
+ for field in i:
+ if isinstance(field, RequestField):
+ yield field
+ else:
+ yield RequestField.from_tuples(*field)
+
+
+def iter_fields(fields):
+ """
+ .. deprecated:: 1.6
+
+ Iterate over fields.
+
+ The addition of :class:`~urllib3.fields.RequestField` makes this function
+ obsolete. Instead, use :func:`iter_field_objects`, which returns
+ :class:`~urllib3.fields.RequestField` objects.
+
+ Supports list of (k, v) tuples and dicts.
+ """
+ if isinstance(fields, dict):
+ return ((k, v) for k, v in six.iteritems(fields))
+
+ return ((k, v) for k, v in fields)
+
+
+def encode_multipart_formdata(fields, boundary=None):
+ """
+ Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
+
+ :param fields:
+ Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
+
+ :param boundary:
+ If not specified, then a random boundary will be generated using
+ :func:`mimetools.choose_boundary`.
+ """
+ body = BytesIO()
+ if boundary is None:
+ boundary = choose_boundary()
+
+ for field in iter_field_objects(fields):
+ body.write(b('--%s\r\n' % (boundary)))
+
+ writer(body).write(field.render_headers())
+ data = field.data
+
+ if isinstance(data, int):
+ data = str(data) # Backwards compatibility
+
+ if isinstance(data, six.text_type):
+ writer(body).write(data)
+ else:
+ body.write(data)
+
+ body.write(b'\r\n')
+
+ body.write(b('--%s--\r\n' % (boundary)))
+
+ content_type = str('multipart/form-data; boundary=%s' % boundary)
+
+ return body.getvalue(), content_type
diff --git a/third_party/python/requests/requests/packages/urllib3/packages/__init__.py b/third_party/python/requests/requests/packages/urllib3/packages/__init__.py
new file mode 100644
index 0000000000..170e974c15
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/packages/__init__.py
@@ -0,0 +1,5 @@
+from __future__ import absolute_import
+
+from . import ssl_match_hostname
+
+__all__ = ('ssl_match_hostname', )
diff --git a/third_party/python/requests/requests/packages/urllib3/packages/ordered_dict.py b/third_party/python/requests/requests/packages/urllib3/packages/ordered_dict.py
new file mode 100644
index 0000000000..4479363cc4
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/packages/ordered_dict.py
@@ -0,0 +1,259 @@
+# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
+# Passes Python2.7's test suite and incorporates all the latest updates.
+# Copyright 2009 Raymond Hettinger, released under the MIT License.
+# http://code.activestate.com/recipes/576693/
+try:
+ from thread import get_ident as _get_ident
+except ImportError:
+ from dummy_thread import get_ident as _get_ident
+
+try:
+ from _abcoll import KeysView, ValuesView, ItemsView
+except ImportError:
+ pass
+
+
+class OrderedDict(dict):
+ 'Dictionary that remembers insertion order'
+ # An inherited dict maps keys to values.
+ # The inherited dict provides __getitem__, __len__, __contains__, and get.
+ # The remaining methods are order-aware.
+ # Big-O running times for all methods are the same as for regular dictionaries.
+
+ # The internal self.__map dictionary maps keys to links in a doubly linked list.
+ # The circular doubly linked list starts and ends with a sentinel element.
+ # The sentinel element never gets deleted (this simplifies the algorithm).
+ # Each link is stored as a list of length three: [PREV, NEXT, KEY].
+
+ def __init__(self, *args, **kwds):
+ '''Initialize an ordered dictionary. Signature is the same as for
+ regular dictionaries, but keyword arguments are not recommended
+ because their insertion order is arbitrary.
+
+ '''
+ if len(args) > 1:
+ raise TypeError('expected at most 1 arguments, got %d' % len(args))
+ try:
+ self.__root
+ except AttributeError:
+ self.__root = root = [] # sentinel node
+ root[:] = [root, root, None]
+ self.__map = {}
+ self.__update(*args, **kwds)
+
+ def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
+ 'od.__setitem__(i, y) <==> od[i]=y'
+ # Setting a new item creates a new link which goes at the end of the linked
+ # list, and the inherited dictionary is updated with the new key/value pair.
+ if key not in self:
+ root = self.__root
+ last = root[0]
+ last[1] = root[0] = self.__map[key] = [last, root, key]
+ dict_setitem(self, key, value)
+
+ def __delitem__(self, key, dict_delitem=dict.__delitem__):
+ 'od.__delitem__(y) <==> del od[y]'
+ # Deleting an existing item uses self.__map to find the link which is
+ # then removed by updating the links in the predecessor and successor nodes.
+ dict_delitem(self, key)
+ link_prev, link_next, key = self.__map.pop(key)
+ link_prev[1] = link_next
+ link_next[0] = link_prev
+
+ def __iter__(self):
+ 'od.__iter__() <==> iter(od)'
+ root = self.__root
+ curr = root[1]
+ while curr is not root:
+ yield curr[2]
+ curr = curr[1]
+
+ def __reversed__(self):
+ 'od.__reversed__() <==> reversed(od)'
+ root = self.__root
+ curr = root[0]
+ while curr is not root:
+ yield curr[2]
+ curr = curr[0]
+
+ def clear(self):
+ 'od.clear() -> None. Remove all items from od.'
+ try:
+ for node in self.__map.itervalues():
+ del node[:]
+ root = self.__root
+ root[:] = [root, root, None]
+ self.__map.clear()
+ except AttributeError:
+ pass
+ dict.clear(self)
+
+ def popitem(self, last=True):
+ '''od.popitem() -> (k, v), return and remove a (key, value) pair.
+ Pairs are returned in LIFO order if last is true or FIFO order if false.
+
+ '''
+ if not self:
+ raise KeyError('dictionary is empty')
+ root = self.__root
+ if last:
+ link = root[0]
+ link_prev = link[0]
+ link_prev[1] = root
+ root[0] = link_prev
+ else:
+ link = root[1]
+ link_next = link[1]
+ root[1] = link_next
+ link_next[0] = root
+ key = link[2]
+ del self.__map[key]
+ value = dict.pop(self, key)
+ return key, value
+
+ # -- the following methods do not depend on the internal structure --
+
+ def keys(self):
+ 'od.keys() -> list of keys in od'
+ return list(self)
+
+ def values(self):
+ 'od.values() -> list of values in od'
+ return [self[key] for key in self]
+
+ def items(self):
+ 'od.items() -> list of (key, value) pairs in od'
+ return [(key, self[key]) for key in self]
+
+ def iterkeys(self):
+ 'od.iterkeys() -> an iterator over the keys in od'
+ return iter(self)
+
+ def itervalues(self):
+ 'od.itervalues -> an iterator over the values in od'
+ for k in self:
+ yield self[k]
+
+ def iteritems(self):
+ 'od.iteritems -> an iterator over the (key, value) items in od'
+ for k in self:
+ yield (k, self[k])
+
+ def update(*args, **kwds):
+ '''od.update(E, **F) -> None. Update od from dict/iterable E and F.
+
+ If E is a dict instance, does: for k in E: od[k] = E[k]
+ If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
+ Or if E is an iterable of items, does: for k, v in E: od[k] = v
+ In either case, this is followed by: for k, v in F.items(): od[k] = v
+
+ '''
+ if len(args) > 2:
+ raise TypeError('update() takes at most 2 positional '
+ 'arguments (%d given)' % (len(args),))
+ elif not args:
+ raise TypeError('update() takes at least 1 argument (0 given)')
+ self = args[0]
+ # Make progressively weaker assumptions about "other"
+ other = ()
+ if len(args) == 2:
+ other = args[1]
+ if isinstance(other, dict):
+ for key in other:
+ self[key] = other[key]
+ elif hasattr(other, 'keys'):
+ for key in other.keys():
+ self[key] = other[key]
+ else:
+ for key, value in other:
+ self[key] = value
+ for key, value in kwds.items():
+ self[key] = value
+
+ __update = update # let subclasses override update without breaking __init__
+
+ __marker = object()
+
+ def pop(self, key, default=__marker):
+ '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+ If key is not found, d is returned if given, otherwise KeyError is raised.
+
+ '''
+ if key in self:
+ result = self[key]
+ del self[key]
+ return result
+ if default is self.__marker:
+ raise KeyError(key)
+ return default
+
+ def setdefault(self, key, default=None):
+ 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
+ if key in self:
+ return self[key]
+ self[key] = default
+ return default
+
+ def __repr__(self, _repr_running={}):
+ 'od.__repr__() <==> repr(od)'
+ call_key = id(self), _get_ident()
+ if call_key in _repr_running:
+ return '...'
+ _repr_running[call_key] = 1
+ try:
+ if not self:
+ return '%s()' % (self.__class__.__name__,)
+ return '%s(%r)' % (self.__class__.__name__, self.items())
+ finally:
+ del _repr_running[call_key]
+
+ def __reduce__(self):
+ 'Return state information for pickling'
+ items = [[k, self[k]] for k in self]
+ inst_dict = vars(self).copy()
+ for k in vars(OrderedDict()):
+ inst_dict.pop(k, None)
+ if inst_dict:
+ return (self.__class__, (items,), inst_dict)
+ return self.__class__, (items,)
+
+ def copy(self):
+ 'od.copy() -> a shallow copy of od'
+ return self.__class__(self)
+
+ @classmethod
+ def fromkeys(cls, iterable, value=None):
+ '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
+ and values equal to v (which defaults to None).
+
+ '''
+ d = cls()
+ for key in iterable:
+ d[key] = value
+ return d
+
+ def __eq__(self, other):
+ '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
+ while comparison to a regular mapping is order-insensitive.
+
+ '''
+ if isinstance(other, OrderedDict):
+ return len(self)==len(other) and self.items() == other.items()
+ return dict.__eq__(self, other)
+
+ def __ne__(self, other):
+ return not self == other
+
+ # -- the following methods are only used in Python 2.7 --
+
+ def viewkeys(self):
+ "od.viewkeys() -> a set-like object providing a view on od's keys"
+ return KeysView(self)
+
+ def viewvalues(self):
+ "od.viewvalues() -> an object providing a view on od's values"
+ return ValuesView(self)
+
+ def viewitems(self):
+ "od.viewitems() -> a set-like object providing a view on od's items"
+ return ItemsView(self)
diff --git a/third_party/python/requests/requests/packages/urllib3/packages/six.py b/third_party/python/requests/requests/packages/urllib3/packages/six.py
new file mode 100644
index 0000000000..27d80112bf
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/packages/six.py
@@ -0,0 +1,385 @@
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+#Copyright (c) 2010-2011 Benjamin Peterson
+
+#Permission is hereby granted, free of charge, to any person obtaining a copy of
+#this software and associated documentation files (the "Software"), to deal in
+#the Software without restriction, including without limitation the rights to
+#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+#the Software, and to permit persons to whom the Software is furnished to do so,
+#subject to the following conditions:
+
+#The above copyright notice and this permission notice shall be included in all
+#copies or substantial portions of the Software.
+
+#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+#FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+#COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+#IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+#CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.2.0" # Revision 41c74fef2ded
+
+
+# True if we are running on Python 3.
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ if sys.platform.startswith("java"):
+ # Jython always uses 32 bits.
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+ def __len__(self):
+ return 1 << 31
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result)
+ # This is a bit ugly, but it avoids running this again.
+ delattr(tp, self.name)
+ return result
+
+
+class MovedModule(_LazyDescr):
+
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+
+class MovedAttribute(_LazyDescr):
+
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+
+class _MovedItems(types.ModuleType):
+ """Lazy loading of moved objects"""
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser",
+ "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog",
+ "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("winreg", "_winreg"),
+]
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+del attr
+
+moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+
+ _iterkeys = "keys"
+ _itervalues = "values"
+ _iteritems = "items"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+
+ _iterkeys = "iterkeys"
+ _itervalues = "itervalues"
+ _iteritems = "iteritems"
+
+
+try:
+ advance_iterator = next
+except NameError:
+ def advance_iterator(it):
+ return it.next()
+next = advance_iterator
+
+
+if PY3:
+ def get_unbound_function(unbound):
+ return unbound
+
+ Iterator = object
+
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+else:
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+ class Iterator(object):
+
+ def next(self):
+ return type(self).__next__(self)
+
+ callable = callable
+_add_doc(get_unbound_function,
+ """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+
+
+def iterkeys(d):
+ """Return an iterator over the keys of a dictionary."""
+ return iter(getattr(d, _iterkeys)())
+
+def itervalues(d):
+ """Return an iterator over the values of a dictionary."""
+ return iter(getattr(d, _itervalues)())
+
+def iteritems(d):
+ """Return an iterator over the (key, value) pairs of a dictionary."""
+ return iter(getattr(d, _iteritems)())
+
+
+if PY3:
+ def b(s):
+ return s.encode("latin-1")
+ def u(s):
+ return s
+ if sys.version_info[1] <= 1:
+ def int2byte(i):
+ return bytes((i,))
+ else:
+ # This is about 2x faster than the implementation above on 3.2+
+ int2byte = operator.methodcaller("to_bytes", 1, "big")
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+else:
+ def b(s):
+ return s
+ def u(s):
+ return unicode(s, "unicode_escape")
+ int2byte = chr
+ import StringIO
+ StringIO = BytesIO = StringIO.StringIO
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+if PY3:
+ import builtins
+ exec_ = getattr(builtins, "exec")
+
+
+ def reraise(tp, value, tb=None):
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+
+
+ print_ = getattr(builtins, "print")
+ del builtins
+
+else:
+ def exec_(code, globs=None, locs=None):
+ """Execute code in a namespace."""
+ if globs is None:
+ frame = sys._getframe(1)
+ globs = frame.f_globals
+ if locs is None:
+ locs = frame.f_locals
+ del frame
+ elif locs is None:
+ locs = globs
+ exec("""exec code in globs, locs""")
+
+
+ exec_("""def reraise(tp, value, tb=None):
+ raise tp, value, tb
+""")
+
+
+ def print_(*args, **kwargs):
+ """The new-style print function."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ fp.write(data)
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+
+_add_doc(reraise, """Reraise an exception.""")
+
+
+def with_metaclass(meta, base=object):
+ """Create a base class with a metaclass."""
+ return meta("NewBase", (base,), {})
diff --git a/third_party/python/requests/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py b/third_party/python/requests/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
new file mode 100644
index 0000000000..dd59a75fd3
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
@@ -0,0 +1,13 @@
+try:
+ # Python 3.2+
+ from ssl import CertificateError, match_hostname
+except ImportError:
+ try:
+ # Backport of the function from a pypi module
+ from backports.ssl_match_hostname import CertificateError, match_hostname
+ except ImportError:
+ # Our vendored copy
+ from ._implementation import CertificateError, match_hostname
+
+# Not needed, but documenting what we provide.
+__all__ = ('CertificateError', 'match_hostname')
diff --git a/third_party/python/requests/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py b/third_party/python/requests/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py
new file mode 100644
index 0000000000..52f428733d
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py
@@ -0,0 +1,105 @@
+"""The match_hostname() function from Python 3.3.3, essential when using SSL."""
+
+# Note: This file is under the PSF license as the code comes from the python
+# stdlib. http://docs.python.org/3/license.html
+
+import re
+
+__version__ = '3.4.0.2'
+
+class CertificateError(ValueError):
+ pass
+
+
+def _dnsname_match(dn, hostname, max_wildcards=1):
+ """Matching according to RFC 6125, section 6.4.3
+
+ http://tools.ietf.org/html/rfc6125#section-6.4.3
+ """
+ pats = []
+ if not dn:
+ return False
+
+ # Ported from python3-syntax:
+ # leftmost, *remainder = dn.split(r'.')
+ parts = dn.split(r'.')
+ leftmost = parts[0]
+ remainder = parts[1:]
+
+ wildcards = leftmost.count('*')
+ if wildcards > max_wildcards:
+ # Issue #17980: avoid denials of service by refusing more
+ # than one wildcard per fragment. A survey of established
+ # policy among SSL implementations showed it to be a
+ # reasonable choice.
+ raise CertificateError(
+ "too many wildcards in certificate DNS name: " + repr(dn))
+
+ # speed up common case w/o wildcards
+ if not wildcards:
+ return dn.lower() == hostname.lower()
+
+ # RFC 6125, section 6.4.3, subitem 1.
+ # The client SHOULD NOT attempt to match a presented identifier in which
+ # the wildcard character comprises a label other than the left-most label.
+ if leftmost == '*':
+ # When '*' is a fragment by itself, it matches a non-empty dotless
+ # fragment.
+ pats.append('[^.]+')
+ elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
+ # RFC 6125, section 6.4.3, subitem 3.
+ # The client SHOULD NOT attempt to match a presented identifier
+ # where the wildcard character is embedded within an A-label or
+ # U-label of an internationalized domain name.
+ pats.append(re.escape(leftmost))
+ else:
+ # Otherwise, '*' matches any dotless string, e.g. www*
+ pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
+
+ # add the remaining fragments, ignore any wildcards
+ for frag in remainder:
+ pats.append(re.escape(frag))
+
+ pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
+ return pat.match(hostname)
+
+
+def match_hostname(cert, hostname):
+ """Verify that *cert* (in decoded format as returned by
+ SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
+ rules are followed, but IP addresses are not accepted for *hostname*.
+
+ CertificateError is raised on failure. On success, the function
+ returns nothing.
+ """
+ if not cert:
+ raise ValueError("empty or no certificate")
+ dnsnames = []
+ san = cert.get('subjectAltName', ())
+ for key, value in san:
+ if key == 'DNS':
+ if _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ if not dnsnames:
+ # The subject is only checked when there is no dNSName entry
+ # in subjectAltName
+ for sub in cert.get('subject', ()):
+ for key, value in sub:
+ # XXX according to RFC 2818, the most specific Common Name
+ # must be used.
+ if key == 'commonName':
+ if _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ if len(dnsnames) > 1:
+ raise CertificateError("hostname %r "
+ "doesn't match either of %s"
+ % (hostname, ', '.join(map(repr, dnsnames))))
+ elif len(dnsnames) == 1:
+ raise CertificateError("hostname %r "
+ "doesn't match %r"
+ % (hostname, dnsnames[0]))
+ else:
+ raise CertificateError("no appropriate commonName or "
+ "subjectAltName fields were found")
diff --git a/third_party/python/requests/requests/packages/urllib3/poolmanager.py b/third_party/python/requests/requests/packages/urllib3/poolmanager.py
new file mode 100644
index 0000000000..f13e673d1f
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/poolmanager.py
@@ -0,0 +1,281 @@
+from __future__ import absolute_import
+import logging
+
+try: # Python 3
+ from urllib.parse import urljoin
+except ImportError:
+ from urlparse import urljoin
+
+from ._collections import RecentlyUsedContainer
+from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
+from .connectionpool import port_by_scheme
+from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown
+from .request import RequestMethods
+from .util.url import parse_url
+from .util.retry import Retry
+
+
+__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
+
+
+pool_classes_by_scheme = {
+ 'http': HTTPConnectionPool,
+ 'https': HTTPSConnectionPool,
+}
+
+log = logging.getLogger(__name__)
+
+SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
+ 'ssl_version', 'ca_cert_dir')
+
+
+class PoolManager(RequestMethods):
+ """
+ Allows for arbitrary requests while transparently keeping track of
+ necessary connection pools for you.
+
+ :param num_pools:
+ Number of connection pools to cache before discarding the least
+ recently used pool.
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+
+ :param \**connection_pool_kw:
+ Additional parameters are used to create fresh
+ :class:`urllib3.connectionpool.ConnectionPool` instances.
+
+ Example::
+
+ >>> manager = PoolManager(num_pools=2)
+ >>> r = manager.request('GET', 'http://google.com/')
+ >>> r = manager.request('GET', 'http://google.com/mail')
+ >>> r = manager.request('GET', 'http://yahoo.com/')
+ >>> len(manager.pools)
+ 2
+
+ """
+
+ proxy = None
+
+ def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
+ RequestMethods.__init__(self, headers)
+ self.connection_pool_kw = connection_pool_kw
+ self.pools = RecentlyUsedContainer(num_pools,
+ dispose_func=lambda p: p.close())
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.clear()
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def _new_pool(self, scheme, host, port):
+ """
+ Create a new :class:`ConnectionPool` based on host, port and scheme.
+
+ This method is used to actually create the connection pools handed out
+ by :meth:`connection_from_url` and companion methods. It is intended
+ to be overridden for customization.
+ """
+ pool_cls = pool_classes_by_scheme[scheme]
+ kwargs = self.connection_pool_kw
+ if scheme == 'http':
+ kwargs = self.connection_pool_kw.copy()
+ for kw in SSL_KEYWORDS:
+ kwargs.pop(kw, None)
+
+ return pool_cls(host, port, **kwargs)
+
+ def clear(self):
+ """
+ Empty our store of pools and direct them all to close.
+
+ This will not affect in-flight connections, but they will not be
+ re-used after completion.
+ """
+ self.pools.clear()
+
+ def connection_from_host(self, host, port=None, scheme='http'):
+ """
+ Get a :class:`ConnectionPool` based on the host, port, and scheme.
+
+ If ``port`` isn't given, it will be derived from the ``scheme`` using
+ ``urllib3.connectionpool.port_by_scheme``.
+ """
+
+ if not host:
+ raise LocationValueError("No host specified.")
+
+ scheme = scheme or 'http'
+ port = port or port_by_scheme.get(scheme, 80)
+ pool_key = (scheme, host, port)
+
+ with self.pools.lock:
+ # If the scheme, host, or port doesn't match existing open
+ # connections, open a new ConnectionPool.
+ pool = self.pools.get(pool_key)
+ if pool:
+ return pool
+
+ # Make a fresh ConnectionPool of the desired type
+ pool = self._new_pool(scheme, host, port)
+ self.pools[pool_key] = pool
+
+ return pool
+
+ def connection_from_url(self, url):
+ """
+ Similar to :func:`urllib3.connectionpool.connection_from_url` but
+ doesn't pass any additional parameters to the
+ :class:`urllib3.connectionpool.ConnectionPool` constructor.
+
+ Additional parameters are taken from the :class:`.PoolManager`
+ constructor.
+ """
+ u = parse_url(url)
+ return self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
+
+ def urlopen(self, method, url, redirect=True, **kw):
+ """
+ Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
+ with custom cross-host redirect logic and only sends the request-uri
+ portion of the ``url``.
+
+ The given ``url`` parameter must be absolute, such that an appropriate
+ :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
+ """
+ u = parse_url(url)
+ conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
+
+ kw['assert_same_host'] = False
+ kw['redirect'] = False
+ if 'headers' not in kw:
+ kw['headers'] = self.headers
+
+ if self.proxy is not None and u.scheme == "http":
+ response = conn.urlopen(method, url, **kw)
+ else:
+ response = conn.urlopen(method, u.request_uri, **kw)
+
+ redirect_location = redirect and response.get_redirect_location()
+ if not redirect_location:
+ return response
+
+ # Support relative URLs for redirecting.
+ redirect_location = urljoin(url, redirect_location)
+
+ # RFC 7231, Section 6.4.4
+ if response.status == 303:
+ method = 'GET'
+
+ retries = kw.get('retries')
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(retries, redirect=redirect)
+
+ try:
+ retries = retries.increment(method, url, response=response, _pool=conn)
+ except MaxRetryError:
+ if retries.raise_on_redirect:
+ raise
+ return response
+
+ kw['retries'] = retries
+ kw['redirect'] = redirect
+
+ log.info("Redirecting %s -> %s" % (url, redirect_location))
+ return self.urlopen(method, redirect_location, **kw)
+
+
+class ProxyManager(PoolManager):
+ """
+ Behaves just like :class:`PoolManager`, but sends all requests through
+ the defined proxy, using the CONNECT method for HTTPS URLs.
+
+ :param proxy_url:
+ The URL of the proxy to be used.
+
+ :param proxy_headers:
+ A dictionary contaning headers that will be sent to the proxy. In case
+ of HTTP they are being sent with each request, while in the
+ HTTPS/CONNECT case they are sent only once. Could be used for proxy
+ authentication.
+
+ Example:
+ >>> proxy = urllib3.ProxyManager('http://localhost:3128/')
+ >>> r1 = proxy.request('GET', 'http://google.com/')
+ >>> r2 = proxy.request('GET', 'http://httpbin.org/')
+ >>> len(proxy.pools)
+ 1
+ >>> r3 = proxy.request('GET', 'https://httpbin.org/')
+ >>> r4 = proxy.request('GET', 'https://twitter.com/')
+ >>> len(proxy.pools)
+ 3
+
+ """
+
+ def __init__(self, proxy_url, num_pools=10, headers=None,
+ proxy_headers=None, **connection_pool_kw):
+
+ if isinstance(proxy_url, HTTPConnectionPool):
+ proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host,
+ proxy_url.port)
+ proxy = parse_url(proxy_url)
+ if not proxy.port:
+ port = port_by_scheme.get(proxy.scheme, 80)
+ proxy = proxy._replace(port=port)
+
+ if proxy.scheme not in ("http", "https"):
+ raise ProxySchemeUnknown(proxy.scheme)
+
+ self.proxy = proxy
+ self.proxy_headers = proxy_headers or {}
+
+ connection_pool_kw['_proxy'] = self.proxy
+ connection_pool_kw['_proxy_headers'] = self.proxy_headers
+
+ super(ProxyManager, self).__init__(
+ num_pools, headers, **connection_pool_kw)
+
+ def connection_from_host(self, host, port=None, scheme='http'):
+ if scheme == "https":
+ return super(ProxyManager, self).connection_from_host(
+ host, port, scheme)
+
+ return super(ProxyManager, self).connection_from_host(
+ self.proxy.host, self.proxy.port, self.proxy.scheme)
+
+ def _set_proxy_headers(self, url, headers=None):
+ """
+ Sets headers needed by proxies: specifically, the Accept and Host
+ headers. Only sets headers not provided by the user.
+ """
+ headers_ = {'Accept': '*/*'}
+
+ netloc = parse_url(url).netloc
+ if netloc:
+ headers_['Host'] = netloc
+
+ if headers:
+ headers_.update(headers)
+ return headers_
+
+ def urlopen(self, method, url, redirect=True, **kw):
+ "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
+ u = parse_url(url)
+
+ if u.scheme == "http":
+ # For proxied HTTPS requests, httplib sets the necessary headers
+ # on the CONNECT to the proxy. For HTTP, we'll definitely
+ # need to set 'Host' at the very least.
+ headers = kw.get('headers', self.headers)
+ kw['headers'] = self._set_proxy_headers(url, headers)
+
+ return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
+
+
+def proxy_from_url(url, **kw):
+ return ProxyManager(proxy_url=url, **kw)
diff --git a/third_party/python/requests/requests/packages/urllib3/request.py b/third_party/python/requests/requests/packages/urllib3/request.py
new file mode 100644
index 0000000000..d5aa62d887
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/request.py
@@ -0,0 +1,151 @@
+from __future__ import absolute_import
+try:
+ from urllib.parse import urlencode
+except ImportError:
+ from urllib import urlencode
+
+from .filepost import encode_multipart_formdata
+
+
+__all__ = ['RequestMethods']
+
+
+class RequestMethods(object):
+ """
+ Convenience mixin for classes who implement a :meth:`urlopen` method, such
+ as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
+ :class:`~urllib3.poolmanager.PoolManager`.
+
+ Provides behavior for making common types of HTTP request methods and
+ decides which type of request field encoding to use.
+
+ Specifically,
+
+ :meth:`.request_encode_url` is for sending requests whose fields are
+ encoded in the URL (such as GET, HEAD, DELETE).
+
+ :meth:`.request_encode_body` is for sending requests whose fields are
+ encoded in the *body* of the request using multipart or www-form-urlencoded
+ (such as for POST, PUT, PATCH).
+
+ :meth:`.request` is for making any kind of request, it will look up the
+ appropriate encoding format and use one of the above two methods to make
+ the request.
+
+ Initializer parameters:
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+ """
+
+ _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS'])
+
+ def __init__(self, headers=None):
+ self.headers = headers or {}
+
+ def urlopen(self, method, url, body=None, headers=None,
+ encode_multipart=True, multipart_boundary=None,
+ **kw): # Abstract
+ raise NotImplemented("Classes extending RequestMethods must implement "
+ "their own ``urlopen`` method.")
+
+ def request(self, method, url, fields=None, headers=None, **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the appropriate encoding of
+ ``fields`` based on the ``method`` used.
+
+ This is a convenience method that requires the least amount of manual
+ effort. It can be used in most situations, while still having the
+ option to drop down to more specific methods when necessary, such as
+ :meth:`request_encode_url`, :meth:`request_encode_body`,
+ or even the lowest level :meth:`urlopen`.
+ """
+ method = method.upper()
+
+ if method in self._encode_url_methods:
+ return self.request_encode_url(method, url, fields=fields,
+ headers=headers,
+ **urlopen_kw)
+ else:
+ return self.request_encode_body(method, url, fields=fields,
+ headers=headers,
+ **urlopen_kw)
+
+ def request_encode_url(self, method, url, fields=None, headers=None,
+ **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
+ the url. This is useful for request methods like GET, HEAD, DELETE, etc.
+ """
+ if headers is None:
+ headers = self.headers
+
+ extra_kw = {'headers': headers}
+ extra_kw.update(urlopen_kw)
+
+ if fields:
+ url += '?' + urlencode(fields)
+
+ return self.urlopen(method, url, **extra_kw)
+
+ def request_encode_body(self, method, url, fields=None, headers=None,
+ encode_multipart=True, multipart_boundary=None,
+ **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
+ the body. This is useful for request methods like POST, PUT, PATCH, etc.
+
+ When ``encode_multipart=True`` (default), then
+ :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
+ the payload with the appropriate content type. Otherwise
+ :meth:`urllib.urlencode` is used with the
+ 'application/x-www-form-urlencoded' content type.
+
+ Multipart encoding must be used when posting files, and it's reasonably
+ safe to use it in other times too. However, it may break request
+ signing, such as with OAuth.
+
+ Supports an optional ``fields`` parameter of key/value strings AND
+ key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
+ the MIME type is optional. For example::
+
+ fields = {
+ 'foo': 'bar',
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
+ 'realfile': ('barfile.txt', open('realfile').read()),
+ 'typedfile': ('bazfile.bin', open('bazfile').read(),
+ 'image/jpeg'),
+ 'nonamefile': 'contents of nonamefile field',
+ }
+
+ When uploading a file, providing a filename (the first parameter of the
+ tuple) is optional but recommended to best mimick behavior of browsers.
+
+ Note that if ``headers`` are supplied, the 'Content-Type' header will
+ be overwritten because it depends on the dynamic random boundary string
+ which is used to compose the body of the request. The random boundary
+ string can be explicitly set with the ``multipart_boundary`` parameter.
+ """
+ if headers is None:
+ headers = self.headers
+
+ extra_kw = {'headers': {}}
+
+ if fields:
+ if 'body' in urlopen_kw:
+ raise TypeError(
+ "request got values for both 'fields' and 'body', can only specify one.")
+
+ if encode_multipart:
+ body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary)
+ else:
+ body, content_type = urlencode(fields), 'application/x-www-form-urlencoded'
+
+ extra_kw['body'] = body
+ extra_kw['headers'] = {'Content-Type': content_type}
+
+ extra_kw['headers'].update(headers)
+ extra_kw.update(urlopen_kw)
+
+ return self.urlopen(method, url, **extra_kw)
diff --git a/third_party/python/requests/requests/packages/urllib3/response.py b/third_party/python/requests/requests/packages/urllib3/response.py
new file mode 100644
index 0000000000..8f2a1b5c29
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/response.py
@@ -0,0 +1,514 @@
+from __future__ import absolute_import
+from contextlib import contextmanager
+import zlib
+import io
+from socket import timeout as SocketTimeout
+from socket import error as SocketError
+
+from ._collections import HTTPHeaderDict
+from .exceptions import (
+ ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked
+)
+from .packages.six import string_types as basestring, binary_type, PY3
+from .packages.six.moves import http_client as httplib
+from .connection import HTTPException, BaseSSLError
+from .util.response import is_fp_closed, is_response_to_head
+
+
+class DeflateDecoder(object):
+
+ def __init__(self):
+ self._first_try = True
+ self._data = binary_type()
+ self._obj = zlib.decompressobj()
+
+ def __getattr__(self, name):
+ return getattr(self._obj, name)
+
+ def decompress(self, data):
+ if not data:
+ return data
+
+ if not self._first_try:
+ return self._obj.decompress(data)
+
+ self._data += data
+ try:
+ return self._obj.decompress(data)
+ except zlib.error:
+ self._first_try = False
+ self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
+ try:
+ return self.decompress(self._data)
+ finally:
+ self._data = None
+
+
+class GzipDecoder(object):
+
+ def __init__(self):
+ self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
+
+ def __getattr__(self, name):
+ return getattr(self._obj, name)
+
+ def decompress(self, data):
+ if not data:
+ return data
+ return self._obj.decompress(data)
+
+
+def _get_decoder(mode):
+ if mode == 'gzip':
+ return GzipDecoder()
+
+ return DeflateDecoder()
+
+
+class HTTPResponse(io.IOBase):
+ """
+ HTTP Response container.
+
+ Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
+ loaded and decoded on-demand when the ``data`` property is accessed. This
+ class is also compatible with the Python standard library's :mod:`io`
+ module, and can hence be treated as a readable object in the context of that
+ framework.
+
+ Extra parameters for behaviour not present in httplib.HTTPResponse:
+
+ :param preload_content:
+ If True, the response's body will be preloaded during construction.
+
+ :param decode_content:
+ If True, attempts to decode specific content-encoding's based on headers
+ (like 'gzip' and 'deflate') will be skipped and raw data will be used
+ instead.
+
+ :param original_response:
+ When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
+ object, it's convenient to include the original for debug purposes. It's
+ otherwise unused.
+ """
+
+ CONTENT_DECODERS = ['gzip', 'deflate']
+ REDIRECT_STATUSES = [301, 302, 303, 307, 308]
+
+ def __init__(self, body='', headers=None, status=0, version=0, reason=None,
+ strict=0, preload_content=True, decode_content=True,
+ original_response=None, pool=None, connection=None):
+
+ if isinstance(headers, HTTPHeaderDict):
+ self.headers = headers
+ else:
+ self.headers = HTTPHeaderDict(headers)
+ self.status = status
+ self.version = version
+ self.reason = reason
+ self.strict = strict
+ self.decode_content = decode_content
+
+ self._decoder = None
+ self._body = None
+ self._fp = None
+ self._original_response = original_response
+ self._fp_bytes_read = 0
+
+ if body and isinstance(body, (basestring, binary_type)):
+ self._body = body
+
+ self._pool = pool
+ self._connection = connection
+
+ if hasattr(body, 'read'):
+ self._fp = body
+
+ # Are we using the chunked-style of transfer encoding?
+ self.chunked = False
+ self.chunk_left = None
+ tr_enc = self.headers.get('transfer-encoding', '').lower()
+ # Don't incur the penalty of creating a list and then discarding it
+ encodings = (enc.strip() for enc in tr_enc.split(","))
+ if "chunked" in encodings:
+ self.chunked = True
+
+ # If requested, preload the body.
+ if preload_content and not self._body:
+ self._body = self.read(decode_content=decode_content)
+
+ def get_redirect_location(self):
+ """
+ Should we redirect and where to?
+
+ :returns: Truthy redirect location string if we got a redirect status
+ code and valid location. ``None`` if redirect status and no
+ location. ``False`` if not a redirect status code.
+ """
+ if self.status in self.REDIRECT_STATUSES:
+ return self.headers.get('location')
+
+ return False
+
+ def release_conn(self):
+ if not self._pool or not self._connection:
+ return
+
+ self._pool._put_conn(self._connection)
+ self._connection = None
+
+ @property
+ def data(self):
+ # For backwords-compat with earlier urllib3 0.4 and earlier.
+ if self._body:
+ return self._body
+
+ if self._fp:
+ return self.read(cache_content=True)
+
+ def tell(self):
+ """
+ Obtain the number of bytes pulled over the wire so far. May differ from
+ the amount of content returned by :meth:``HTTPResponse.read`` if bytes
+ are encoded on the wire (e.g, compressed).
+ """
+ return self._fp_bytes_read
+
+ def _init_decoder(self):
+ """
+ Set-up the _decoder attribute if necessar.
+ """
+ # Note: content-encoding value should be case-insensitive, per RFC 7230
+ # Section 3.2
+ content_encoding = self.headers.get('content-encoding', '').lower()
+ if self._decoder is None and content_encoding in self.CONTENT_DECODERS:
+ self._decoder = _get_decoder(content_encoding)
+
+ def _decode(self, data, decode_content, flush_decoder):
+ """
+ Decode the data passed in and potentially flush the decoder.
+ """
+ try:
+ if decode_content and self._decoder:
+ data = self._decoder.decompress(data)
+ except (IOError, zlib.error) as e:
+ content_encoding = self.headers.get('content-encoding', '').lower()
+ raise DecodeError(
+ "Received response with content-encoding: %s, but "
+ "failed to decode it." % content_encoding, e)
+
+ if flush_decoder and decode_content:
+ data += self._flush_decoder()
+
+ return data
+
+ def _flush_decoder(self):
+ """
+ Flushes the decoder. Should only be called if the decoder is actually
+ being used.
+ """
+ if self._decoder:
+ buf = self._decoder.decompress(b'')
+ return buf + self._decoder.flush()
+
+ return b''
+
+ @contextmanager
+ def _error_catcher(self):
+ """
+ Catch low-level python exceptions, instead re-raising urllib3
+ variants, so that low-level exceptions are not leaked in the
+ high-level api.
+
+ On exit, release the connection back to the pool.
+ """
+ try:
+ try:
+ yield
+
+ except SocketTimeout:
+ # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
+ # there is yet no clean way to get at it from this context.
+ raise ReadTimeoutError(self._pool, None, 'Read timed out.')
+
+ except BaseSSLError as e:
+ # FIXME: Is there a better way to differentiate between SSLErrors?
+ if 'read operation timed out' not in str(e): # Defensive:
+ # This shouldn't happen but just in case we're missing an edge
+ # case, let's avoid swallowing SSL errors.
+ raise
+
+ raise ReadTimeoutError(self._pool, None, 'Read timed out.')
+
+ except (HTTPException, SocketError) as e:
+ # This includes IncompleteRead.
+ raise ProtocolError('Connection broken: %r' % e, e)
+
+ except Exception:
+ # The response may not be closed but we're not going to use it anymore
+ # so close it now to ensure that the connection is released back to the pool.
+ if self._original_response and not self._original_response.isclosed():
+ self._original_response.close()
+
+ # Closing the response may not actually be sufficient to close
+ # everything, so if we have a hold of the connection close that
+ # too.
+ if self._connection is not None:
+ self._connection.close()
+
+ raise
+ finally:
+ if self._original_response and self._original_response.isclosed():
+ self.release_conn()
+
+ def read(self, amt=None, decode_content=None, cache_content=False):
+ """
+ Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
+ parameters: ``decode_content`` and ``cache_content``.
+
+ :param amt:
+ How much of the content to read. If specified, caching is skipped
+ because it doesn't make sense to cache partial content as the full
+ response.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+
+ :param cache_content:
+ If True, will save the returned data such that the same result is
+ returned despite of the state of the underlying file object. This
+ is useful if you want the ``.data`` property to continue working
+ after having ``.read()`` the file object. (Overridden if ``amt`` is
+ set.)
+ """
+ self._init_decoder()
+ if decode_content is None:
+ decode_content = self.decode_content
+
+ if self._fp is None:
+ return
+
+ flush_decoder = False
+ data = None
+
+ with self._error_catcher():
+ if amt is None:
+ # cStringIO doesn't like amt=None
+ data = self._fp.read()
+ flush_decoder = True
+ else:
+ cache_content = False
+ data = self._fp.read(amt)
+ if amt != 0 and not data: # Platform-specific: Buggy versions of Python.
+ # Close the connection when no data is returned
+ #
+ # This is redundant to what httplib/http.client _should_
+ # already do. However, versions of python released before
+ # December 15, 2012 (http://bugs.python.org/issue16298) do
+ # not properly close the connection in all cases. There is
+ # no harm in redundantly calling close.
+ self._fp.close()
+ flush_decoder = True
+
+ if data:
+ self._fp_bytes_read += len(data)
+
+ data = self._decode(data, decode_content, flush_decoder)
+
+ if cache_content:
+ self._body = data
+
+ return data
+
+ def stream(self, amt=2**16, decode_content=None):
+ """
+ A generator wrapper for the read() method. A call will block until
+ ``amt`` bytes have been read from the connection or until the
+ connection is closed.
+
+ :param amt:
+ How much of the content to read. The generator will return up to
+ much data per iteration, but may return less. This is particularly
+ likely when using compressed data. However, the empty string will
+ never be returned.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+ """
+ if self.chunked:
+ for line in self.read_chunked(amt, decode_content=decode_content):
+ yield line
+ else:
+ while not is_fp_closed(self._fp):
+ data = self.read(amt=amt, decode_content=decode_content)
+
+ if data:
+ yield data
+
+ @classmethod
+ def from_httplib(ResponseCls, r, **response_kw):
+ """
+ Given an :class:`httplib.HTTPResponse` instance ``r``, return a
+ corresponding :class:`urllib3.response.HTTPResponse` object.
+
+ Remaining parameters are passed to the HTTPResponse constructor, along
+ with ``original_response=r``.
+ """
+ headers = r.msg
+
+ if not isinstance(headers, HTTPHeaderDict):
+ if PY3: # Python 3
+ headers = HTTPHeaderDict(headers.items())
+ else: # Python 2
+ headers = HTTPHeaderDict.from_httplib(headers)
+
+ # HTTPResponse objects in Python 3 don't have a .strict attribute
+ strict = getattr(r, 'strict', 0)
+ resp = ResponseCls(body=r,
+ headers=headers,
+ status=r.status,
+ version=r.version,
+ reason=r.reason,
+ strict=strict,
+ original_response=r,
+ **response_kw)
+ return resp
+
+ # Backwards-compatibility methods for httplib.HTTPResponse
+ def getheaders(self):
+ return self.headers
+
+ def getheader(self, name, default=None):
+ return self.headers.get(name, default)
+
+ # Overrides from io.IOBase
+ def close(self):
+ if not self.closed:
+ self._fp.close()
+
+ @property
+ def closed(self):
+ if self._fp is None:
+ return True
+ elif hasattr(self._fp, 'closed'):
+ return self._fp.closed
+ elif hasattr(self._fp, 'isclosed'): # Python 2
+ return self._fp.isclosed()
+ else:
+ return True
+
+ def fileno(self):
+ if self._fp is None:
+ raise IOError("HTTPResponse has no file to get a fileno from")
+ elif hasattr(self._fp, "fileno"):
+ return self._fp.fileno()
+ else:
+ raise IOError("The file-like object this HTTPResponse is wrapped "
+ "around has no file descriptor")
+
+ def flush(self):
+ if self._fp is not None and hasattr(self._fp, 'flush'):
+ return self._fp.flush()
+
+ def readable(self):
+ # This method is required for `io` module compatibility.
+ return True
+
+ def readinto(self, b):
+ # This method is required for `io` module compatibility.
+ temp = self.read(len(b))
+ if len(temp) == 0:
+ return 0
+ else:
+ b[:len(temp)] = temp
+ return len(temp)
+
+ def _update_chunk_length(self):
+ # First, we'll figure out length of a chunk and then
+ # we'll try to read it from socket.
+ if self.chunk_left is not None:
+ return
+ line = self._fp.fp.readline()
+ line = line.split(b';', 1)[0]
+ try:
+ self.chunk_left = int(line, 16)
+ except ValueError:
+ # Invalid chunked protocol response, abort.
+ self.close()
+ raise httplib.IncompleteRead(line)
+
+ def _handle_chunk(self, amt):
+ returned_chunk = None
+ if amt is None:
+ chunk = self._fp._safe_read(self.chunk_left)
+ returned_chunk = chunk
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ elif amt < self.chunk_left:
+ value = self._fp._safe_read(amt)
+ self.chunk_left = self.chunk_left - amt
+ returned_chunk = value
+ elif amt == self.chunk_left:
+ value = self._fp._safe_read(amt)
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ returned_chunk = value
+ else: # amt > self.chunk_left
+ returned_chunk = self._fp._safe_read(self.chunk_left)
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ return returned_chunk
+
+ def read_chunked(self, amt=None, decode_content=None):
+ """
+ Similar to :meth:`HTTPResponse.read`, but with an additional
+ parameter: ``decode_content``.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+ """
+ self._init_decoder()
+ # FIXME: Rewrite this method and make it a class with a better structured logic.
+ if not self.chunked:
+ raise ResponseNotChunked(
+ "Response is not chunked. "
+ "Header 'transfer-encoding: chunked' is missing.")
+
+ # Don't bother reading the body of a HEAD request.
+ if self._original_response and is_response_to_head(self._original_response):
+ self._original_response.close()
+ return
+
+ with self._error_catcher():
+ while True:
+ self._update_chunk_length()
+ if self.chunk_left == 0:
+ break
+ chunk = self._handle_chunk(amt)
+ decoded = self._decode(chunk, decode_content=decode_content,
+ flush_decoder=False)
+ if decoded:
+ yield decoded
+
+ if decode_content:
+ # On CPython and PyPy, we should never need to flush the
+ # decoder. However, on Jython we *might* need to, so
+ # lets defensively do it anyway.
+ decoded = self._flush_decoder()
+ if decoded: # Platform-specific: Jython.
+ yield decoded
+
+ # Chunk content ends with \r\n: discard it.
+ while True:
+ line = self._fp.fp.readline()
+ if not line:
+ # Some sites may not end with '\r\n'.
+ break
+ if line == b'\r\n':
+ break
+
+ # We read everything; close the "file".
+ if self._original_response:
+ self._original_response.close()
diff --git a/third_party/python/requests/requests/packages/urllib3/util/__init__.py b/third_party/python/requests/requests/packages/urllib3/util/__init__.py
new file mode 100644
index 0000000000..c6c6243cf1
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/util/__init__.py
@@ -0,0 +1,44 @@
+from __future__ import absolute_import
+# For backwards compatibility, provide imports that used to be here.
+from .connection import is_connection_dropped
+from .request import make_headers
+from .response import is_fp_closed
+from .ssl_ import (
+ SSLContext,
+ HAS_SNI,
+ assert_fingerprint,
+ resolve_cert_reqs,
+ resolve_ssl_version,
+ ssl_wrap_socket,
+)
+from .timeout import (
+ current_time,
+ Timeout,
+)
+
+from .retry import Retry
+from .url import (
+ get_host,
+ parse_url,
+ split_first,
+ Url,
+)
+
+__all__ = (
+ 'HAS_SNI',
+ 'SSLContext',
+ 'Retry',
+ 'Timeout',
+ 'Url',
+ 'assert_fingerprint',
+ 'current_time',
+ 'is_connection_dropped',
+ 'is_fp_closed',
+ 'get_host',
+ 'parse_url',
+ 'make_headers',
+ 'resolve_cert_reqs',
+ 'resolve_ssl_version',
+ 'split_first',
+ 'ssl_wrap_socket',
+)
diff --git a/third_party/python/requests/requests/packages/urllib3/util/connection.py b/third_party/python/requests/requests/packages/urllib3/util/connection.py
new file mode 100644
index 0000000000..01a4812f21
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/util/connection.py
@@ -0,0 +1,101 @@
+from __future__ import absolute_import
+import socket
+try:
+ from select import poll, POLLIN
+except ImportError: # `poll` doesn't exist on OSX and other platforms
+ poll = False
+ try:
+ from select import select
+ except ImportError: # `select` doesn't exist on AppEngine.
+ select = False
+
+
+def is_connection_dropped(conn): # Platform-specific
+ """
+ Returns True if the connection is dropped and should be closed.
+
+ :param conn:
+ :class:`httplib.HTTPConnection` object.
+
+ Note: For platforms like AppEngine, this will always return ``False`` to
+ let the platform handle connection recycling transparently for us.
+ """
+ sock = getattr(conn, 'sock', False)
+ if sock is False: # Platform-specific: AppEngine
+ return False
+ if sock is None: # Connection already closed (such as by httplib).
+ return True
+
+ if not poll:
+ if not select: # Platform-specific: AppEngine
+ return False
+
+ try:
+ return select([sock], [], [], 0.0)[0]
+ except socket.error:
+ return True
+
+ # This version is better on platforms that support it.
+ p = poll()
+ p.register(sock, POLLIN)
+ for (fno, ev) in p.poll(0.0):
+ if fno == sock.fileno():
+ # Either data is buffered (bad), or the connection is dropped.
+ return True
+
+
+# This function is copied from socket.py in the Python 2.7 standard
+# library test suite. Added to its signature is only `socket_options`.
+def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+ source_address=None, socket_options=None):
+ """Connect to *address* and return the socket object.
+
+ Convenience function. Connect to *address* (a 2-tuple ``(host,
+ port)``) and return the socket object. Passing the optional
+ *timeout* parameter will set the timeout on the socket instance
+ before attempting to connect. If no *timeout* is supplied, the
+ global default timeout setting returned by :func:`getdefaulttimeout`
+ is used. If *source_address* is set it must be a tuple of (host, port)
+ for the socket to bind as a source address before making the connection.
+ An host of '' or port 0 tells the OS to use the default.
+ """
+
+ host, port = address
+ if host.startswith('['):
+ host = host.strip('[]')
+ err = None
+ for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
+ af, socktype, proto, canonname, sa = res
+ sock = None
+ try:
+ sock = socket.socket(af, socktype, proto)
+
+ # If provided, set socket level options before connecting.
+ # This is the only addition urllib3 makes to this function.
+ _set_socket_options(sock, socket_options)
+
+ if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
+ sock.settimeout(timeout)
+ if source_address:
+ sock.bind(source_address)
+ sock.connect(sa)
+ return sock
+
+ except socket.error as e:
+ err = e
+ if sock is not None:
+ sock.close()
+ sock = None
+
+ if err is not None:
+ raise err
+
+ raise socket.error("getaddrinfo returns an empty list")
+
+
+def _set_socket_options(sock, options):
+ if options is None:
+ return
+
+ for opt in options:
+ sock.setsockopt(*opt)
diff --git a/third_party/python/requests/requests/packages/urllib3/util/request.py b/third_party/python/requests/requests/packages/urllib3/util/request.py
new file mode 100644
index 0000000000..73779315f4
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/util/request.py
@@ -0,0 +1,72 @@
+from __future__ import absolute_import
+from base64 import b64encode
+
+from ..packages.six import b
+
+ACCEPT_ENCODING = 'gzip,deflate'
+
+
+def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
+ basic_auth=None, proxy_basic_auth=None, disable_cache=None):
+ """
+ Shortcuts for generating request headers.
+
+ :param keep_alive:
+ If ``True``, adds 'connection: keep-alive' header.
+
+ :param accept_encoding:
+ Can be a boolean, list, or string.
+ ``True`` translates to 'gzip,deflate'.
+ List will get joined by comma.
+ String will be used as provided.
+
+ :param user_agent:
+ String representing the user-agent you want, such as
+ "python-urllib3/0.6"
+
+ :param basic_auth:
+ Colon-separated username:password string for 'authorization: basic ...'
+ auth header.
+
+ :param proxy_basic_auth:
+ Colon-separated username:password string for 'proxy-authorization: basic ...'
+ auth header.
+
+ :param disable_cache:
+ If ``True``, adds 'cache-control: no-cache' header.
+
+ Example::
+
+ >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
+ {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
+ >>> make_headers(accept_encoding=True)
+ {'accept-encoding': 'gzip,deflate'}
+ """
+ headers = {}
+ if accept_encoding:
+ if isinstance(accept_encoding, str):
+ pass
+ elif isinstance(accept_encoding, list):
+ accept_encoding = ','.join(accept_encoding)
+ else:
+ accept_encoding = ACCEPT_ENCODING
+ headers['accept-encoding'] = accept_encoding
+
+ if user_agent:
+ headers['user-agent'] = user_agent
+
+ if keep_alive:
+ headers['connection'] = 'keep-alive'
+
+ if basic_auth:
+ headers['authorization'] = 'Basic ' + \
+ b64encode(b(basic_auth)).decode('utf-8')
+
+ if proxy_basic_auth:
+ headers['proxy-authorization'] = 'Basic ' + \
+ b64encode(b(proxy_basic_auth)).decode('utf-8')
+
+ if disable_cache:
+ headers['cache-control'] = 'no-cache'
+
+ return headers
diff --git a/third_party/python/requests/requests/packages/urllib3/util/response.py b/third_party/python/requests/requests/packages/urllib3/util/response.py
new file mode 100644
index 0000000000..bc7232720d
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/util/response.py
@@ -0,0 +1,74 @@
+from __future__ import absolute_import
+from ..packages.six.moves import http_client as httplib
+
+from ..exceptions import HeaderParsingError
+
+
+def is_fp_closed(obj):
+ """
+ Checks whether a given file-like object is closed.
+
+ :param obj:
+ The file-like object to check.
+ """
+
+ try:
+ # Check via the official file-like-object way.
+ return obj.closed
+ except AttributeError:
+ pass
+
+ try:
+ # Check if the object is a container for another file-like object that
+ # gets released on exhaustion (e.g. HTTPResponse).
+ return obj.fp is None
+ except AttributeError:
+ pass
+
+ raise ValueError("Unable to determine whether fp is closed.")
+
+
+def assert_header_parsing(headers):
+ """
+ Asserts whether all headers have been successfully parsed.
+ Extracts encountered errors from the result of parsing headers.
+
+ Only works on Python 3.
+
+ :param headers: Headers to verify.
+ :type headers: `httplib.HTTPMessage`.
+
+ :raises urllib3.exceptions.HeaderParsingError:
+ If parsing errors are found.
+ """
+
+ # This will fail silently if we pass in the wrong kind of parameter.
+ # To make debugging easier add an explicit check.
+ if not isinstance(headers, httplib.HTTPMessage):
+ raise TypeError('expected httplib.Message, got {0}.'.format(
+ type(headers)))
+
+ defects = getattr(headers, 'defects', None)
+ get_payload = getattr(headers, 'get_payload', None)
+
+ unparsed_data = None
+ if get_payload: # Platform-specific: Python 3.
+ unparsed_data = get_payload()
+
+ if defects or unparsed_data:
+ raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
+
+
+def is_response_to_head(response):
+ """
+ Checks, wether a the request of a response has been a HEAD-request.
+ Handles the quirks of AppEngine.
+
+ :param conn:
+ :type conn: :class:`httplib.HTTPResponse`
+ """
+ # FIXME: Can we do this somehow without accessing private httplib _method?
+ method = response._method
+ if isinstance(method, int): # Platform-specific: Appengine
+ return method == 3
+ return method.upper() == 'HEAD'
diff --git a/third_party/python/requests/requests/packages/urllib3/util/retry.py b/third_party/python/requests/requests/packages/urllib3/util/retry.py
new file mode 100644
index 0000000000..03a01249dd
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/util/retry.py
@@ -0,0 +1,286 @@
+from __future__ import absolute_import
+import time
+import logging
+
+from ..exceptions import (
+ ConnectTimeoutError,
+ MaxRetryError,
+ ProtocolError,
+ ReadTimeoutError,
+ ResponseError,
+)
+from ..packages import six
+
+
+log = logging.getLogger(__name__)
+
+
+class Retry(object):
+ """ Retry configuration.
+
+ Each retry attempt will create a new Retry object with updated values, so
+ they can be safely reused.
+
+ Retries can be defined as a default for a pool::
+
+ retries = Retry(connect=5, read=2, redirect=5)
+ http = PoolManager(retries=retries)
+ response = http.request('GET', 'http://example.com/')
+
+ Or per-request (which overrides the default for the pool)::
+
+ response = http.request('GET', 'http://example.com/', retries=Retry(10))
+
+ Retries can be disabled by passing ``False``::
+
+ response = http.request('GET', 'http://example.com/', retries=False)
+
+ Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
+ retries are disabled, in which case the causing exception will be raised.
+
+ :param int total:
+ Total number of retries to allow. Takes precedence over other counts.
+
+ Set to ``None`` to remove this constraint and fall back on other
+ counts. It's a good idea to set this to some sensibly-high value to
+ account for unexpected edge cases and avoid infinite retry loops.
+
+ Set to ``0`` to fail on the first retry.
+
+ Set to ``False`` to disable and imply ``raise_on_redirect=False``.
+
+ :param int connect:
+ How many connection-related errors to retry on.
+
+ These are errors raised before the request is sent to the remote server,
+ which we assume has not triggered the server to process the request.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param int read:
+ How many times to retry on read errors.
+
+ These errors are raised after the request was sent to the server, so the
+ request may have side-effects.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param int redirect:
+ How many redirects to perform. Limit this to avoid infinite redirect
+ loops.
+
+ A redirect is a HTTP response with a status code 301, 302, 303, 307 or
+ 308.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ Set to ``False`` to disable and imply ``raise_on_redirect=False``.
+
+ :param iterable method_whitelist:
+ Set of uppercased HTTP method verbs that we should retry on.
+
+ By default, we only retry on methods which are considered to be
+ indempotent (multiple requests with the same parameters end with the
+ same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
+
+ :param iterable status_forcelist:
+ A set of HTTP status codes that we should force a retry on.
+
+ By default, this is disabled with ``None``.
+
+ :param float backoff_factor:
+ A backoff factor to apply between attempts. urllib3 will sleep for::
+
+ {backoff factor} * (2 ^ ({number of total retries} - 1))
+
+ seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
+ for [0.1s, 0.2s, 0.4s, ...] between retries. It will never be longer
+ than :attr:`Retry.BACKOFF_MAX`.
+
+ By default, backoff is disabled (set to 0).
+
+ :param bool raise_on_redirect: Whether, if the number of redirects is
+ exhausted, to raise a MaxRetryError, or to return a response with a
+ response code in the 3xx range.
+ """
+
+ DEFAULT_METHOD_WHITELIST = frozenset([
+ 'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE'])
+
+ #: Maximum backoff time.
+ BACKOFF_MAX = 120
+
+ def __init__(self, total=10, connect=None, read=None, redirect=None,
+ method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None,
+ backoff_factor=0, raise_on_redirect=True, _observed_errors=0):
+
+ self.total = total
+ self.connect = connect
+ self.read = read
+
+ if redirect is False or total is False:
+ redirect = 0
+ raise_on_redirect = False
+
+ self.redirect = redirect
+ self.status_forcelist = status_forcelist or set()
+ self.method_whitelist = method_whitelist
+ self.backoff_factor = backoff_factor
+ self.raise_on_redirect = raise_on_redirect
+ self._observed_errors = _observed_errors # TODO: use .history instead?
+
+ def new(self, **kw):
+ params = dict(
+ total=self.total,
+ connect=self.connect, read=self.read, redirect=self.redirect,
+ method_whitelist=self.method_whitelist,
+ status_forcelist=self.status_forcelist,
+ backoff_factor=self.backoff_factor,
+ raise_on_redirect=self.raise_on_redirect,
+ _observed_errors=self._observed_errors,
+ )
+ params.update(kw)
+ return type(self)(**params)
+
+ @classmethod
+ def from_int(cls, retries, redirect=True, default=None):
+ """ Backwards-compatibility for the old retries format."""
+ if retries is None:
+ retries = default if default is not None else cls.DEFAULT
+
+ if isinstance(retries, Retry):
+ return retries
+
+ redirect = bool(redirect) and None
+ new_retries = cls(retries, redirect=redirect)
+ log.debug("Converted retries value: %r -> %r" % (retries, new_retries))
+ return new_retries
+
+ def get_backoff_time(self):
+ """ Formula for computing the current backoff
+
+ :rtype: float
+ """
+ if self._observed_errors <= 1:
+ return 0
+
+ backoff_value = self.backoff_factor * (2 ** (self._observed_errors - 1))
+ return min(self.BACKOFF_MAX, backoff_value)
+
+ def sleep(self):
+ """ Sleep between retry attempts using an exponential backoff.
+
+ By default, the backoff factor is 0 and this method will return
+ immediately.
+ """
+ backoff = self.get_backoff_time()
+ if backoff <= 0:
+ return
+ time.sleep(backoff)
+
+ def _is_connection_error(self, err):
+ """ Errors when we're fairly sure that the server did not receive the
+ request, so it should be safe to retry.
+ """
+ return isinstance(err, ConnectTimeoutError)
+
+ def _is_read_error(self, err):
+ """ Errors that occur after the request has been started, so we should
+ assume that the server began processing it.
+ """
+ return isinstance(err, (ReadTimeoutError, ProtocolError))
+
+ def is_forced_retry(self, method, status_code):
+ """ Is this method/status code retryable? (Based on method/codes whitelists)
+ """
+ if self.method_whitelist and method.upper() not in self.method_whitelist:
+ return False
+
+ return self.status_forcelist and status_code in self.status_forcelist
+
+ def is_exhausted(self):
+ """ Are we out of retries? """
+ retry_counts = (self.total, self.connect, self.read, self.redirect)
+ retry_counts = list(filter(None, retry_counts))
+ if not retry_counts:
+ return False
+
+ return min(retry_counts) < 0
+
+ def increment(self, method=None, url=None, response=None, error=None,
+ _pool=None, _stacktrace=None):
+ """ Return a new Retry object with incremented retry counters.
+
+ :param response: A response object, or None, if the server did not
+ return a response.
+ :type response: :class:`~urllib3.response.HTTPResponse`
+ :param Exception error: An error encountered during the request, or
+ None if the response was received successfully.
+
+ :return: A new ``Retry`` object.
+ """
+ if self.total is False and error:
+ # Disabled, indicate to re-raise the error.
+ raise six.reraise(type(error), error, _stacktrace)
+
+ total = self.total
+ if total is not None:
+ total -= 1
+
+ _observed_errors = self._observed_errors
+ connect = self.connect
+ read = self.read
+ redirect = self.redirect
+ cause = 'unknown'
+
+ if error and self._is_connection_error(error):
+ # Connect retry?
+ if connect is False:
+ raise six.reraise(type(error), error, _stacktrace)
+ elif connect is not None:
+ connect -= 1
+ _observed_errors += 1
+
+ elif error and self._is_read_error(error):
+ # Read retry?
+ if read is False:
+ raise six.reraise(type(error), error, _stacktrace)
+ elif read is not None:
+ read -= 1
+ _observed_errors += 1
+
+ elif response and response.get_redirect_location():
+ # Redirect retry?
+ if redirect is not None:
+ redirect -= 1
+ cause = 'too many redirects'
+
+ else:
+ # Incrementing because of a server error like a 500 in
+ # status_forcelist and a the given method is in the whitelist
+ _observed_errors += 1
+ cause = ResponseError.GENERIC_ERROR
+ if response and response.status:
+ cause = ResponseError.SPECIFIC_ERROR.format(
+ status_code=response.status)
+
+ new_retry = self.new(
+ total=total,
+ connect=connect, read=read, redirect=redirect,
+ _observed_errors=_observed_errors)
+
+ if new_retry.is_exhausted():
+ raise MaxRetryError(_pool, url, error or ResponseError(cause))
+
+ log.debug("Incremented Retry for (url='%s'): %r" % (url, new_retry))
+
+ return new_retry
+
+ def __repr__(self):
+ return ('{cls.__name__}(total={self.total}, connect={self.connect}, '
+ 'read={self.read}, redirect={self.redirect})').format(
+ cls=type(self), self=self)
+
+
+# For backwards compatibility (equivalent to pre-v1.9):
+Retry.DEFAULT = Retry(3)
diff --git a/third_party/python/requests/requests/packages/urllib3/util/ssl_.py b/third_party/python/requests/requests/packages/urllib3/util/ssl_.py
new file mode 100644
index 0000000000..67f83441e2
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/util/ssl_.py
@@ -0,0 +1,317 @@
+from __future__ import absolute_import
+import errno
+import warnings
+import hmac
+
+from binascii import hexlify, unhexlify
+from hashlib import md5, sha1, sha256
+
+from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
+
+
+SSLContext = None
+HAS_SNI = False
+create_default_context = None
+
+# Maps the length of a digest to a possible hash function producing this digest
+HASHFUNC_MAP = {
+ 32: md5,
+ 40: sha1,
+ 64: sha256,
+}
+
+
+def _const_compare_digest_backport(a, b):
+ """
+ Compare two digests of equal length in constant time.
+
+ The digests must be of type str/bytes.
+ Returns True if the digests match, and False otherwise.
+ """
+ result = abs(len(a) - len(b))
+ for l, r in zip(bytearray(a), bytearray(b)):
+ result |= l ^ r
+ return result == 0
+
+
+_const_compare_digest = getattr(hmac, 'compare_digest',
+ _const_compare_digest_backport)
+
+
+try: # Test for SSL features
+ import ssl
+ from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
+ from ssl import HAS_SNI # Has SNI?
+except ImportError:
+ pass
+
+
+try:
+ from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION
+except ImportError:
+ OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
+ OP_NO_COMPRESSION = 0x20000
+
+# A secure default.
+# Sources for more information on TLS ciphers:
+#
+# - https://wiki.mozilla.org/Security/Server_Side_TLS
+# - https://www.ssllabs.com/projects/best-practices/index.html
+# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
+#
+# The general intent is:
+# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
+# - prefer ECDHE over DHE for better performance,
+# - prefer any AES-GCM over any AES-CBC for better performance and security,
+# - use 3DES as fallback which is secure but slow,
+# - disable NULL authentication, MD5 MACs and DSS for security reasons.
+DEFAULT_CIPHERS = (
+ 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
+ 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
+ '!eNULL:!MD5'
+)
+
+try:
+ from ssl import SSLContext # Modern SSL?
+except ImportError:
+ import sys
+
+ class SSLContext(object): # Platform-specific: Python 2 & 3.1
+ supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or
+ (3, 2) <= sys.version_info)
+
+ def __init__(self, protocol_version):
+ self.protocol = protocol_version
+ # Use default values from a real SSLContext
+ self.check_hostname = False
+ self.verify_mode = ssl.CERT_NONE
+ self.ca_certs = None
+ self.options = 0
+ self.certfile = None
+ self.keyfile = None
+ self.ciphers = None
+
+ def load_cert_chain(self, certfile, keyfile):
+ self.certfile = certfile
+ self.keyfile = keyfile
+
+ def load_verify_locations(self, cafile=None, capath=None):
+ self.ca_certs = cafile
+
+ if capath is not None:
+ raise SSLError("CA directories not supported in older Pythons")
+
+ def set_ciphers(self, cipher_suite):
+ if not self.supports_set_ciphers:
+ raise TypeError(
+ 'Your version of Python does not support setting '
+ 'a custom cipher suite. Please upgrade to Python '
+ '2.7, 3.2, or later if you need this functionality.'
+ )
+ self.ciphers = cipher_suite
+
+ def wrap_socket(self, socket, server_hostname=None):
+ warnings.warn(
+ 'A true SSLContext object is not available. This prevents '
+ 'urllib3 from configuring SSL appropriately and may cause '
+ 'certain SSL connections to fail. For more information, see '
+ 'https://urllib3.readthedocs.org/en/latest/security.html'
+ '#insecureplatformwarning.',
+ InsecurePlatformWarning
+ )
+ kwargs = {
+ 'keyfile': self.keyfile,
+ 'certfile': self.certfile,
+ 'ca_certs': self.ca_certs,
+ 'cert_reqs': self.verify_mode,
+ 'ssl_version': self.protocol,
+ }
+ if self.supports_set_ciphers: # Platform-specific: Python 2.7+
+ return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
+ else: # Platform-specific: Python 2.6
+ return wrap_socket(socket, **kwargs)
+
+
+def assert_fingerprint(cert, fingerprint):
+ """
+ Checks if given fingerprint matches the supplied certificate.
+
+ :param cert:
+ Certificate as bytes object.
+ :param fingerprint:
+ Fingerprint as string of hexdigits, can be interspersed by colons.
+ """
+
+ fingerprint = fingerprint.replace(':', '').lower()
+ digest_length = len(fingerprint)
+ hashfunc = HASHFUNC_MAP.get(digest_length)
+ if not hashfunc:
+ raise SSLError(
+ 'Fingerprint of invalid length: {0}'.format(fingerprint))
+
+ # We need encode() here for py32; works on py2 and p33.
+ fingerprint_bytes = unhexlify(fingerprint.encode())
+
+ cert_digest = hashfunc(cert).digest()
+
+ if not _const_compare_digest(cert_digest, fingerprint_bytes):
+ raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
+ .format(fingerprint, hexlify(cert_digest)))
+
+
+def resolve_cert_reqs(candidate):
+ """
+ Resolves the argument to a numeric constant, which can be passed to
+ the wrap_socket function/method from the ssl module.
+ Defaults to :data:`ssl.CERT_NONE`.
+ If given a string it is assumed to be the name of the constant in the
+ :mod:`ssl` module or its abbrevation.
+ (So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
+ If it's neither `None` nor a string we assume it is already the numeric
+ constant which can directly be passed to wrap_socket.
+ """
+ if candidate is None:
+ return CERT_NONE
+
+ if isinstance(candidate, str):
+ res = getattr(ssl, candidate, None)
+ if res is None:
+ res = getattr(ssl, 'CERT_' + candidate)
+ return res
+
+ return candidate
+
+
+def resolve_ssl_version(candidate):
+ """
+ like resolve_cert_reqs
+ """
+ if candidate is None:
+ return PROTOCOL_SSLv23
+
+ if isinstance(candidate, str):
+ res = getattr(ssl, candidate, None)
+ if res is None:
+ res = getattr(ssl, 'PROTOCOL_' + candidate)
+ return res
+
+ return candidate
+
+
+def create_urllib3_context(ssl_version=None, cert_reqs=None,
+ options=None, ciphers=None):
+ """All arguments have the same meaning as ``ssl_wrap_socket``.
+
+ By default, this function does a lot of the same work that
+ ``ssl.create_default_context`` does on Python 3.4+. It:
+
+ - Disables SSLv2, SSLv3, and compression
+ - Sets a restricted set of server ciphers
+
+ If you wish to enable SSLv3, you can do::
+
+ from urllib3.util import ssl_
+ context = ssl_.create_urllib3_context()
+ context.options &= ~ssl_.OP_NO_SSLv3
+
+ You can do the same to enable compression (substituting ``COMPRESSION``
+ for ``SSLv3`` in the last line above).
+
+ :param ssl_version:
+ The desired protocol version to use. This will default to
+ PROTOCOL_SSLv23 which will negotiate the highest protocol that both
+ the server and your installation of OpenSSL support.
+ :param cert_reqs:
+ Whether to require the certificate verification. This defaults to
+ ``ssl.CERT_REQUIRED``.
+ :param options:
+ Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
+ ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
+ :param ciphers:
+ Which cipher suites to allow the server to select.
+ :returns:
+ Constructed SSLContext object with specified options
+ :rtype: SSLContext
+ """
+ context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)
+
+ # Setting the default here, as we may have no ssl module on import
+ cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
+
+ if options is None:
+ options = 0
+ # SSLv2 is easily broken and is considered harmful and dangerous
+ options |= OP_NO_SSLv2
+ # SSLv3 has several problems and is now dangerous
+ options |= OP_NO_SSLv3
+ # Disable compression to prevent CRIME attacks for OpenSSL 1.0+
+ # (issue #309)
+ options |= OP_NO_COMPRESSION
+
+ context.options |= options
+
+ if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6
+ context.set_ciphers(ciphers or DEFAULT_CIPHERS)
+
+ context.verify_mode = cert_reqs
+ if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2
+ # We do our own verification, including fingerprints and alternative
+ # hostnames. So disable it here
+ context.check_hostname = False
+ return context
+
+
+def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
+ ca_certs=None, server_hostname=None,
+ ssl_version=None, ciphers=None, ssl_context=None,
+ ca_cert_dir=None):
+ """
+ All arguments except for server_hostname, ssl_context, and ca_cert_dir have
+ the same meaning as they do when using :func:`ssl.wrap_socket`.
+
+ :param server_hostname:
+ When SNI is supported, the expected hostname of the certificate
+ :param ssl_context:
+ A pre-made :class:`SSLContext` object. If none is provided, one will
+ be created using :func:`create_urllib3_context`.
+ :param ciphers:
+ A string of ciphers we wish the client to support. This is not
+ supported on Python 2.6 as the ssl module does not support it.
+ :param ca_cert_dir:
+ A directory containing CA certificates in multiple separate files, as
+ supported by OpenSSL's -CApath flag or the capath argument to
+ SSLContext.load_verify_locations().
+ """
+ context = ssl_context
+ if context is None:
+ context = create_urllib3_context(ssl_version, cert_reqs,
+ ciphers=ciphers)
+
+ if ca_certs or ca_cert_dir:
+ try:
+ context.load_verify_locations(ca_certs, ca_cert_dir)
+ except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2
+ raise SSLError(e)
+ # Py33 raises FileNotFoundError which subclasses OSError
+ # These are not equivalent unless we check the errno attribute
+ except OSError as e: # Platform-specific: Python 3.3 and beyond
+ if e.errno == errno.ENOENT:
+ raise SSLError(e)
+ raise
+
+ if certfile:
+ context.load_cert_chain(certfile, keyfile)
+ if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
+ return context.wrap_socket(sock, server_hostname=server_hostname)
+
+ warnings.warn(
+ 'An HTTPS request has been made, but the SNI (Subject Name '
+ 'Indication) extension to TLS is not available on this platform. '
+ 'This may cause the server to present an incorrect TLS '
+ 'certificate, which can cause validation failures. For more '
+ 'information, see '
+ 'https://urllib3.readthedocs.org/en/latest/security.html'
+ '#snimissingwarning.',
+ SNIMissingWarning
+ )
+ return context.wrap_socket(sock)
diff --git a/third_party/python/requests/requests/packages/urllib3/util/timeout.py b/third_party/python/requests/requests/packages/urllib3/util/timeout.py
new file mode 100644
index 0000000000..ff62f4764d
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/util/timeout.py
@@ -0,0 +1,242 @@
+from __future__ import absolute_import
+# The default socket timeout, used by httplib to indicate that no timeout was
+# specified by the user
+from socket import _GLOBAL_DEFAULT_TIMEOUT
+import time
+
+from ..exceptions import TimeoutStateError
+
+# A sentinel value to indicate that no timeout was specified by the user in
+# urllib3
+_Default = object()
+
+
+def current_time():
+ """
+ Retrieve the current time. This function is mocked out in unit testing.
+ """
+ return time.time()
+
+
+class Timeout(object):
+ """ Timeout configuration.
+
+ Timeouts can be defined as a default for a pool::
+
+ timeout = Timeout(connect=2.0, read=7.0)
+ http = PoolManager(timeout=timeout)
+ response = http.request('GET', 'http://example.com/')
+
+ Or per-request (which overrides the default for the pool)::
+
+ response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
+
+ Timeouts can be disabled by setting all the parameters to ``None``::
+
+ no_timeout = Timeout(connect=None, read=None)
+ response = http.request('GET', 'http://example.com/, timeout=no_timeout)
+
+
+ :param total:
+ This combines the connect and read timeouts into one; the read timeout
+ will be set to the time leftover from the connect attempt. In the
+ event that both a connect timeout and a total are specified, or a read
+ timeout and a total are specified, the shorter timeout will be applied.
+
+ Defaults to None.
+
+ :type total: integer, float, or None
+
+ :param connect:
+ The maximum amount of time to wait for a connection attempt to a server
+ to succeed. Omitting the parameter will default the connect timeout to
+ the system default, probably `the global default timeout in socket.py
+ <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
+ None will set an infinite timeout for connection attempts.
+
+ :type connect: integer, float, or None
+
+ :param read:
+ The maximum amount of time to wait between consecutive
+ read operations for a response from the server. Omitting
+ the parameter will default the read timeout to the system
+ default, probably `the global default timeout in socket.py
+ <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
+ None will set an infinite timeout.
+
+ :type read: integer, float, or None
+
+ .. note::
+
+ Many factors can affect the total amount of time for urllib3 to return
+ an HTTP response.
+
+ For example, Python's DNS resolver does not obey the timeout specified
+ on the socket. Other factors that can affect total request time include
+ high CPU load, high swap, the program running at a low priority level,
+ or other behaviors.
+
+ In addition, the read and total timeouts only measure the time between
+ read operations on the socket connecting the client and the server,
+ not the total amount of time for the request to return a complete
+ response. For most requests, the timeout is raised because the server
+ has not sent the first byte in the specified time. This is not always
+ the case; if a server streams one byte every fifteen seconds, a timeout
+ of 20 seconds will not trigger, even though the request will take
+ several minutes to complete.
+
+ If your goal is to cut off any request after a set amount of wall clock
+ time, consider having a second "watcher" thread to cut off a slow
+ request.
+ """
+
+ #: A sentinel object representing the default timeout value
+ DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
+
+ def __init__(self, total=None, connect=_Default, read=_Default):
+ self._connect = self._validate_timeout(connect, 'connect')
+ self._read = self._validate_timeout(read, 'read')
+ self.total = self._validate_timeout(total, 'total')
+ self._start_connect = None
+
+ def __str__(self):
+ return '%s(connect=%r, read=%r, total=%r)' % (
+ type(self).__name__, self._connect, self._read, self.total)
+
+ @classmethod
+ def _validate_timeout(cls, value, name):
+ """ Check that a timeout attribute is valid.
+
+ :param value: The timeout value to validate
+ :param name: The name of the timeout attribute to validate. This is
+ used to specify in error messages.
+ :return: The validated and casted version of the given value.
+ :raises ValueError: If the type is not an integer or a float, or if it
+ is a numeric value less than zero.
+ """
+ if value is _Default:
+ return cls.DEFAULT_TIMEOUT
+
+ if value is None or value is cls.DEFAULT_TIMEOUT:
+ return value
+
+ try:
+ float(value)
+ except (TypeError, ValueError):
+ raise ValueError("Timeout value %s was %s, but it must be an "
+ "int or float." % (name, value))
+
+ try:
+ if value < 0:
+ raise ValueError("Attempted to set %s timeout to %s, but the "
+ "timeout cannot be set to a value less "
+ "than 0." % (name, value))
+ except TypeError: # Python 3
+ raise ValueError("Timeout value %s was %s, but it must be an "
+ "int or float." % (name, value))
+
+ return value
+
+ @classmethod
+ def from_float(cls, timeout):
+ """ Create a new Timeout from a legacy timeout value.
+
+ The timeout value used by httplib.py sets the same timeout on the
+ connect(), and recv() socket requests. This creates a :class:`Timeout`
+ object that sets the individual timeouts to the ``timeout`` value
+ passed to this function.
+
+ :param timeout: The legacy timeout value.
+ :type timeout: integer, float, sentinel default object, or None
+ :return: Timeout object
+ :rtype: :class:`Timeout`
+ """
+ return Timeout(read=timeout, connect=timeout)
+
+ def clone(self):
+ """ Create a copy of the timeout object
+
+ Timeout properties are stored per-pool but each request needs a fresh
+ Timeout object to ensure each one has its own start/stop configured.
+
+ :return: a copy of the timeout object
+ :rtype: :class:`Timeout`
+ """
+ # We can't use copy.deepcopy because that will also create a new object
+ # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
+ # detect the user default.
+ return Timeout(connect=self._connect, read=self._read,
+ total=self.total)
+
+ def start_connect(self):
+ """ Start the timeout clock, used during a connect() attempt
+
+ :raises urllib3.exceptions.TimeoutStateError: if you attempt
+ to start a timer that has been started already.
+ """
+ if self._start_connect is not None:
+ raise TimeoutStateError("Timeout timer has already been started.")
+ self._start_connect = current_time()
+ return self._start_connect
+
+ def get_connect_duration(self):
+ """ Gets the time elapsed since the call to :meth:`start_connect`.
+
+ :return: Elapsed time.
+ :rtype: float
+ :raises urllib3.exceptions.TimeoutStateError: if you attempt
+ to get duration for a timer that hasn't been started.
+ """
+ if self._start_connect is None:
+ raise TimeoutStateError("Can't get connect duration for timer "
+ "that has not started.")
+ return current_time() - self._start_connect
+
+ @property
+ def connect_timeout(self):
+ """ Get the value to use when setting a connection timeout.
+
+ This will be a positive float or integer, the value None
+ (never timeout), or the default system timeout.
+
+ :return: Connect timeout.
+ :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
+ """
+ if self.total is None:
+ return self._connect
+
+ if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
+ return self.total
+
+ return min(self._connect, self.total)
+
+ @property
+ def read_timeout(self):
+ """ Get the value for the read timeout.
+
+ This assumes some time has elapsed in the connection timeout and
+ computes the read timeout appropriately.
+
+ If self.total is set, the read timeout is dependent on the amount of
+ time taken by the connect timeout. If the connection time has not been
+ established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
+ raised.
+
+ :return: Value to use for the read timeout.
+ :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
+ :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
+ has not yet been called on this object.
+ """
+ if (self.total is not None and
+ self.total is not self.DEFAULT_TIMEOUT and
+ self._read is not None and
+ self._read is not self.DEFAULT_TIMEOUT):
+ # In case the connect timeout has not yet been established.
+ if self._start_connect is None:
+ return self._read
+ return max(0, min(self.total - self.get_connect_duration(),
+ self._read))
+ elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
+ return max(0, self.total - self.get_connect_duration())
+ else:
+ return self._read
diff --git a/third_party/python/requests/requests/packages/urllib3/util/url.py b/third_party/python/requests/requests/packages/urllib3/util/url.py
new file mode 100644
index 0000000000..e996204a07
--- /dev/null
+++ b/third_party/python/requests/requests/packages/urllib3/util/url.py
@@ -0,0 +1,217 @@
+from __future__ import absolute_import
+from collections import namedtuple
+
+from ..exceptions import LocationParseError
+
+
+url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment']
+
+
+class Url(namedtuple('Url', url_attrs)):
+ """
+ Datastructure for representing an HTTP URL. Used as a return value for
+ :func:`parse_url`.
+ """
+ slots = ()
+
+ def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
+ query=None, fragment=None):
+ if path and not path.startswith('/'):
+ path = '/' + path
+ return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
+ query, fragment)
+
+ @property
+ def hostname(self):
+ """For backwards-compatibility with urlparse. We're nice like that."""
+ return self.host
+
+ @property
+ def request_uri(self):
+ """Absolute path including the query string."""
+ uri = self.path or '/'
+
+ if self.query is not None:
+ uri += '?' + self.query
+
+ return uri
+
+ @property
+ def netloc(self):
+ """Network location including host and port"""
+ if self.port:
+ return '%s:%d' % (self.host, self.port)
+ return self.host
+
+ @property
+ def url(self):
+ """
+ Convert self into a url
+
+ This function should more or less round-trip with :func:`.parse_url`. The
+ returned url may not be exactly the same as the url inputted to
+ :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
+ with a blank port will have : removed).
+
+ Example: ::
+
+ >>> U = parse_url('http://google.com/mail/')
+ >>> U.url
+ 'http://google.com/mail/'
+ >>> Url('http', 'username:password', 'host.com', 80,
+ ... '/path', 'query', 'fragment').url
+ 'http://username:password@host.com:80/path?query#fragment'
+ """
+ scheme, auth, host, port, path, query, fragment = self
+ url = ''
+
+ # We use "is not None" we want things to happen with empty strings (or 0 port)
+ if scheme is not None:
+ url += scheme + '://'
+ if auth is not None:
+ url += auth + '@'
+ if host is not None:
+ url += host
+ if port is not None:
+ url += ':' + str(port)
+ if path is not None:
+ url += path
+ if query is not None:
+ url += '?' + query
+ if fragment is not None:
+ url += '#' + fragment
+
+ return url
+
+ def __str__(self):
+ return self.url
+
+
+def split_first(s, delims):
+ """
+ Given a string and an iterable of delimiters, split on the first found
+ delimiter. Return two split parts and the matched delimiter.
+
+ If not found, then the first part is the full input string.
+
+ Example::
+
+ >>> split_first('foo/bar?baz', '?/=')
+ ('foo', 'bar?baz', '/')
+ >>> split_first('foo/bar?baz', '123')
+ ('foo/bar?baz', '', None)
+
+ Scales linearly with number of delims. Not ideal for large number of delims.
+ """
+ min_idx = None
+ min_delim = None
+ for d in delims:
+ idx = s.find(d)
+ if idx < 0:
+ continue
+
+ if min_idx is None or idx < min_idx:
+ min_idx = idx
+ min_delim = d
+
+ if min_idx is None or min_idx < 0:
+ return s, '', None
+
+ return s[:min_idx], s[min_idx + 1:], min_delim
+
+
+def parse_url(url):
+ """
+ Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
+ performed to parse incomplete urls. Fields not provided will be None.
+
+ Partly backwards-compatible with :mod:`urlparse`.
+
+ Example::
+
+ >>> parse_url('http://google.com/mail/')
+ Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
+ >>> parse_url('google.com:80')
+ Url(scheme=None, host='google.com', port=80, path=None, ...)
+ >>> parse_url('/foo?bar')
+ Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
+ """
+
+ # While this code has overlap with stdlib's urlparse, it is much
+ # simplified for our needs and less annoying.
+ # Additionally, this implementations does silly things to be optimal
+ # on CPython.
+
+ if not url:
+ # Empty
+ return Url()
+
+ scheme = None
+ auth = None
+ host = None
+ port = None
+ path = None
+ fragment = None
+ query = None
+
+ # Scheme
+ if '://' in url:
+ scheme, url = url.split('://', 1)
+
+ # Find the earliest Authority Terminator
+ # (http://tools.ietf.org/html/rfc3986#section-3.2)
+ url, path_, delim = split_first(url, ['/', '?', '#'])
+
+ if delim:
+ # Reassemble the path
+ path = delim + path_
+
+ # Auth
+ if '@' in url:
+ # Last '@' denotes end of auth part
+ auth, url = url.rsplit('@', 1)
+
+ # IPv6
+ if url and url[0] == '[':
+ host, url = url.split(']', 1)
+ host += ']'
+
+ # Port
+ if ':' in url:
+ _host, port = url.split(':', 1)
+
+ if not host:
+ host = _host
+
+ if port:
+ # If given, ports must be integers.
+ if not port.isdigit():
+ raise LocationParseError(url)
+ port = int(port)
+ else:
+ # Blank ports are cool, too. (rfc3986#section-3.2.3)
+ port = None
+
+ elif not host and url:
+ host = url
+
+ if not path:
+ return Url(scheme, auth, host, port, path, query, fragment)
+
+ # Fragment
+ if '#' in path:
+ path, fragment = path.split('#', 1)
+
+ # Query
+ if '?' in path:
+ path, query = path.split('?', 1)
+
+ return Url(scheme, auth, host, port, path, query, fragment)
+
+
+def get_host(url):
+ """
+ Deprecated. Use :func:`.parse_url` instead.
+ """
+ p = parse_url(url)
+ return p.scheme or 'http', p.hostname, p.port
diff --git a/third_party/python/requests/requests/sessions.py b/third_party/python/requests/requests/sessions.py
new file mode 100644
index 0000000000..9eaa36ae43
--- /dev/null
+++ b/third_party/python/requests/requests/sessions.py
@@ -0,0 +1,680 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.session
+~~~~~~~~~~~~~~~~
+
+This module provides a Session object to manage and persist settings across
+requests (cookies, auth, proxies).
+
+"""
+import os
+from collections import Mapping
+from datetime import datetime
+
+from .auth import _basic_auth_str
+from .compat import cookielib, OrderedDict, urljoin, urlparse
+from .cookies import (
+ cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
+from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
+from .hooks import default_hooks, dispatch_hook
+from .utils import to_key_val_list, default_headers, to_native_string
+from .exceptions import (
+ TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
+from .packages.urllib3._collections import RecentlyUsedContainer
+from .structures import CaseInsensitiveDict
+
+from .adapters import HTTPAdapter
+
+from .utils import (
+ requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
+ get_auth_from_url
+)
+
+from .status_codes import codes
+
+# formerly defined here, reexposed here for backward compatibility
+from .models import REDIRECT_STATI
+
+REDIRECT_CACHE_SIZE = 1000
+
+
+def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
+ """
+ Determines appropriate setting for a given request, taking into account the
+ explicit setting on that request, and the setting in the session. If a
+ setting is a dictionary, they will be merged together using `dict_class`
+ """
+
+ if session_setting is None:
+ return request_setting
+
+ if request_setting is None:
+ return session_setting
+
+ # Bypass if not a dictionary (e.g. verify)
+ if not (
+ isinstance(session_setting, Mapping) and
+ isinstance(request_setting, Mapping)
+ ):
+ return request_setting
+
+ merged_setting = dict_class(to_key_val_list(session_setting))
+ merged_setting.update(to_key_val_list(request_setting))
+
+ # Remove keys that are set to None. Extract keys first to avoid altering
+ # the dictionary during iteration.
+ none_keys = [k for (k, v) in merged_setting.items() if v is None]
+ for key in none_keys:
+ del merged_setting[key]
+
+ return merged_setting
+
+
+def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
+ """
+ Properly merges both requests and session hooks.
+
+ This is necessary because when request_hooks == {'response': []}, the
+ merge breaks Session hooks entirely.
+ """
+ if session_hooks is None or session_hooks.get('response') == []:
+ return request_hooks
+
+ if request_hooks is None or request_hooks.get('response') == []:
+ return session_hooks
+
+ return merge_setting(request_hooks, session_hooks, dict_class)
+
+
+class SessionRedirectMixin(object):
+ def resolve_redirects(self, resp, req, stream=False, timeout=None,
+ verify=True, cert=None, proxies=None, **adapter_kwargs):
+ """Receives a Response. Returns a generator of Responses."""
+
+ i = 0
+ hist = [] # keep track of history
+
+ while resp.is_redirect:
+ prepared_request = req.copy()
+
+ if i > 0:
+ # Update history and keep track of redirects.
+ hist.append(resp)
+ new_hist = list(hist)
+ resp.history = new_hist
+
+ try:
+ resp.content # Consume socket so it can be released
+ except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
+ resp.raw.read(decode_content=False)
+
+ if i >= self.max_redirects:
+ raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects)
+
+ # Release the connection back into the pool.
+ resp.close()
+
+ url = resp.headers['location']
+ method = req.method
+
+ # Handle redirection without scheme (see: RFC 1808 Section 4)
+ if url.startswith('//'):
+ parsed_rurl = urlparse(resp.url)
+ url = '%s:%s' % (parsed_rurl.scheme, url)
+
+ # The scheme should be lower case...
+ parsed = urlparse(url)
+ url = parsed.geturl()
+
+ # Facilitate relative 'location' headers, as allowed by RFC 7231.
+ # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
+ # Compliant with RFC3986, we percent encode the url.
+ if not parsed.netloc:
+ url = urljoin(resp.url, requote_uri(url))
+ else:
+ url = requote_uri(url)
+
+ prepared_request.url = to_native_string(url)
+ # Cache the url, unless it redirects to itself.
+ if resp.is_permanent_redirect and req.url != prepared_request.url:
+ self.redirect_cache[req.url] = prepared_request.url
+
+ # http://tools.ietf.org/html/rfc7231#section-6.4.4
+ if (resp.status_code == codes.see_other and
+ method != 'HEAD'):
+ method = 'GET'
+
+ # Do what the browsers do, despite standards...
+ # First, turn 302s into GETs.
+ if resp.status_code == codes.found and method != 'HEAD':
+ method = 'GET'
+
+ # Second, if a POST is responded to with a 301, turn it into a GET.
+ # This bizarre behaviour is explained in Issue 1704.
+ if resp.status_code == codes.moved and method == 'POST':
+ method = 'GET'
+
+ prepared_request.method = method
+
+ # https://github.com/kennethreitz/requests/issues/1084
+ if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
+ if 'Content-Length' in prepared_request.headers:
+ del prepared_request.headers['Content-Length']
+
+ prepared_request.body = None
+
+ headers = prepared_request.headers
+ try:
+ del headers['Cookie']
+ except KeyError:
+ pass
+
+ # Extract any cookies sent on the response to the cookiejar
+ # in the new request. Because we've mutated our copied prepared
+ # request, use the old one that we haven't yet touched.
+ extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
+ prepared_request._cookies.update(self.cookies)
+ prepared_request.prepare_cookies(prepared_request._cookies)
+
+ # Rebuild auth and proxy information.
+ proxies = self.rebuild_proxies(prepared_request, proxies)
+ self.rebuild_auth(prepared_request, resp)
+
+ # Override the original request.
+ req = prepared_request
+
+ resp = self.send(
+ req,
+ stream=stream,
+ timeout=timeout,
+ verify=verify,
+ cert=cert,
+ proxies=proxies,
+ allow_redirects=False,
+ **adapter_kwargs
+ )
+
+ extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
+
+ i += 1
+ yield resp
+
+ def rebuild_auth(self, prepared_request, response):
+ """
+ When being redirected we may want to strip authentication from the
+ request to avoid leaking credentials. This method intelligently removes
+ and reapplies authentication where possible to avoid credential loss.
+ """
+ headers = prepared_request.headers
+ url = prepared_request.url
+
+ if 'Authorization' in headers:
+ # If we get redirected to a new host, we should strip out any
+ # authentication headers.
+ original_parsed = urlparse(response.request.url)
+ redirect_parsed = urlparse(url)
+
+ if (original_parsed.hostname != redirect_parsed.hostname):
+ del headers['Authorization']
+
+ # .netrc might have more auth for us on our new host.
+ new_auth = get_netrc_auth(url) if self.trust_env else None
+ if new_auth is not None:
+ prepared_request.prepare_auth(new_auth)
+
+ return
+
+ def rebuild_proxies(self, prepared_request, proxies):
+ """
+ This method re-evaluates the proxy configuration by considering the
+ environment variables. If we are redirected to a URL covered by
+ NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
+ proxy keys for this URL (in case they were stripped by a previous
+ redirect).
+
+ This method also replaces the Proxy-Authorization header where
+ necessary.
+ """
+ headers = prepared_request.headers
+ url = prepared_request.url
+ scheme = urlparse(url).scheme
+ new_proxies = proxies.copy() if proxies is not None else {}
+
+ if self.trust_env and not should_bypass_proxies(url):
+ environ_proxies = get_environ_proxies(url)
+
+ proxy = environ_proxies.get(scheme)
+
+ if proxy:
+ new_proxies.setdefault(scheme, environ_proxies[scheme])
+
+ if 'Proxy-Authorization' in headers:
+ del headers['Proxy-Authorization']
+
+ try:
+ username, password = get_auth_from_url(new_proxies[scheme])
+ except KeyError:
+ username, password = None, None
+
+ if username and password:
+ headers['Proxy-Authorization'] = _basic_auth_str(username, password)
+
+ return new_proxies
+
+
+class Session(SessionRedirectMixin):
+ """A Requests session.
+
+ Provides cookie persistence, connection-pooling, and configuration.
+
+ Basic Usage::
+
+ >>> import requests
+ >>> s = requests.Session()
+ >>> s.get('http://httpbin.org/get')
+ <Response [200]>
+
+ Or as a context manager::
+
+ >>> with requests.Session() as s:
+ >>> s.get('http://httpbin.org/get')
+ <Response [200]>
+ """
+
+ __attrs__ = [
+ 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',
+ 'cert', 'prefetch', 'adapters', 'stream', 'trust_env',
+ 'max_redirects',
+ ]
+
+ def __init__(self):
+
+ #: A case-insensitive dictionary of headers to be sent on each
+ #: :class:`Request <Request>` sent from this
+ #: :class:`Session <Session>`.
+ self.headers = default_headers()
+
+ #: Default Authentication tuple or object to attach to
+ #: :class:`Request <Request>`.
+ self.auth = None
+
+ #: Dictionary mapping protocol or protocol and host to the URL of the proxy
+ #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to
+ #: be used on each :class:`Request <Request>`.
+ self.proxies = {}
+
+ #: Event-handling hooks.
+ self.hooks = default_hooks()
+
+ #: Dictionary of querystring data to attach to each
+ #: :class:`Request <Request>`. The dictionary values may be lists for
+ #: representing multivalued query parameters.
+ self.params = {}
+
+ #: Stream response content default.
+ self.stream = False
+
+ #: SSL Verification default.
+ self.verify = True
+
+ #: SSL certificate default.
+ self.cert = None
+
+ #: Maximum number of redirects allowed. If the request exceeds this
+ #: limit, a :class:`TooManyRedirects` exception is raised.
+ self.max_redirects = DEFAULT_REDIRECT_LIMIT
+
+ #: Trust environment settings for proxy configuration, default
+ #: authentication and similar.
+ self.trust_env = True
+
+ #: A CookieJar containing all currently outstanding cookies set on this
+ #: session. By default it is a
+ #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
+ #: may be any other ``cookielib.CookieJar`` compatible object.
+ self.cookies = cookiejar_from_dict({})
+
+ # Default connection adapters.
+ self.adapters = OrderedDict()
+ self.mount('https://', HTTPAdapter())
+ self.mount('http://', HTTPAdapter())
+
+ # Only store 1000 redirects to prevent using infinite memory
+ self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
+
+ def prepare_request(self, request):
+ """Constructs a :class:`PreparedRequest <PreparedRequest>` for
+ transmission and returns it. The :class:`PreparedRequest` has settings
+ merged from the :class:`Request <Request>` instance and those of the
+ :class:`Session`.
+
+ :param request: :class:`Request` instance to prepare with this
+ session's settings.
+ """
+ cookies = request.cookies or {}
+
+ # Bootstrap CookieJar.
+ if not isinstance(cookies, cookielib.CookieJar):
+ cookies = cookiejar_from_dict(cookies)
+
+ # Merge with session cookies
+ merged_cookies = merge_cookies(
+ merge_cookies(RequestsCookieJar(), self.cookies), cookies)
+
+
+ # Set environment's basic authentication if not explicitly set.
+ auth = request.auth
+ if self.trust_env and not auth and not self.auth:
+ auth = get_netrc_auth(request.url)
+
+ p = PreparedRequest()
+ p.prepare(
+ method=request.method.upper(),
+ url=request.url,
+ files=request.files,
+ data=request.data,
+ json=request.json,
+ headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),
+ params=merge_setting(request.params, self.params),
+ auth=merge_setting(auth, self.auth),
+ cookies=merged_cookies,
+ hooks=merge_hooks(request.hooks, self.hooks),
+ )
+ return p
+
+ def request(self, method, url,
+ params=None,
+ data=None,
+ headers=None,
+ cookies=None,
+ files=None,
+ auth=None,
+ timeout=None,
+ allow_redirects=True,
+ proxies=None,
+ hooks=None,
+ stream=None,
+ verify=None,
+ cert=None,
+ json=None):
+ """Constructs a :class:`Request <Request>`, prepares it and sends it.
+ Returns :class:`Response <Response>` object.
+
+ :param method: method for the new :class:`Request` object.
+ :param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary or bytes to be sent in the query
+ string for the :class:`Request`.
+ :param data: (optional) Dictionary, bytes, or file-like object to send
+ in the body of the :class:`Request`.
+ :param json: (optional) json to send in the body of the
+ :class:`Request`.
+ :param headers: (optional) Dictionary of HTTP Headers to send with the
+ :class:`Request`.
+ :param cookies: (optional) Dict or CookieJar object to send with the
+ :class:`Request`.
+ :param files: (optional) Dictionary of ``'filename': file-like-objects``
+ for multipart encoding upload.
+ :param auth: (optional) Auth tuple or callable to enable
+ Basic/Digest/Custom HTTP Auth.
+ :param timeout: (optional) How long to wait for the server to send
+ data before giving up, as a float, or a :ref:`(connect timeout,
+ read timeout) <timeouts>` tuple.
+ :type timeout: float or tuple
+ :param allow_redirects: (optional) Set to True by default.
+ :type allow_redirects: bool
+ :param proxies: (optional) Dictionary mapping protocol or protocol and
+ hostname to the URL of the proxy.
+ :param stream: (optional) whether to immediately download the response
+ content. Defaults to ``False``.
+ :param verify: (optional) whether the SSL cert will be verified.
+ A CA_BUNDLE path can also be provided. Defaults to ``True``.
+ :param cert: (optional) if String, path to ssl client cert file (.pem).
+ If Tuple, ('cert', 'key') pair.
+ """
+ # Create the Request.
+ req = Request(
+ method = method.upper(),
+ url = url,
+ headers = headers,
+ files = files,
+ data = data or {},
+ json = json,
+ params = params or {},
+ auth = auth,
+ cookies = cookies,
+ hooks = hooks,
+ )
+ prep = self.prepare_request(req)
+
+ proxies = proxies or {}
+
+ settings = self.merge_environment_settings(
+ prep.url, proxies, stream, verify, cert
+ )
+
+ # Send the request.
+ send_kwargs = {
+ 'timeout': timeout,
+ 'allow_redirects': allow_redirects,
+ }
+ send_kwargs.update(settings)
+ resp = self.send(prep, **send_kwargs)
+
+ return resp
+
+ def get(self, url, **kwargs):
+ """Sends a GET request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ """
+
+ kwargs.setdefault('allow_redirects', True)
+ return self.request('GET', url, **kwargs)
+
+ def options(self, url, **kwargs):
+ """Sends a OPTIONS request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ """
+
+ kwargs.setdefault('allow_redirects', True)
+ return self.request('OPTIONS', url, **kwargs)
+
+ def head(self, url, **kwargs):
+ """Sends a HEAD request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ """
+
+ kwargs.setdefault('allow_redirects', False)
+ return self.request('HEAD', url, **kwargs)
+
+ def post(self, url, data=None, json=None, **kwargs):
+ """Sends a POST request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
+ :param json: (optional) json to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ """
+
+ return self.request('POST', url, data=data, json=json, **kwargs)
+
+ def put(self, url, data=None, **kwargs):
+ """Sends a PUT request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ """
+
+ return self.request('PUT', url, data=data, **kwargs)
+
+ def patch(self, url, data=None, **kwargs):
+ """Sends a PATCH request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ """
+
+ return self.request('PATCH', url, data=data, **kwargs)
+
+ def delete(self, url, **kwargs):
+ """Sends a DELETE request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ """
+
+ return self.request('DELETE', url, **kwargs)
+
+ def send(self, request, **kwargs):
+ """Send a given PreparedRequest."""
+ # Set defaults that the hooks can utilize to ensure they always have
+ # the correct parameters to reproduce the previous request.
+ kwargs.setdefault('stream', self.stream)
+ kwargs.setdefault('verify', self.verify)
+ kwargs.setdefault('cert', self.cert)
+ kwargs.setdefault('proxies', self.proxies)
+
+ # It's possible that users might accidentally send a Request object.
+ # Guard against that specific failure case.
+ if not isinstance(request, PreparedRequest):
+ raise ValueError('You can only send PreparedRequests.')
+
+ checked_urls = set()
+ while request.url in self.redirect_cache:
+ checked_urls.add(request.url)
+ new_url = self.redirect_cache.get(request.url)
+ if new_url in checked_urls:
+ break
+ request.url = new_url
+
+ # Set up variables needed for resolve_redirects and dispatching of hooks
+ allow_redirects = kwargs.pop('allow_redirects', True)
+ stream = kwargs.get('stream')
+ hooks = request.hooks
+
+ # Get the appropriate adapter to use
+ adapter = self.get_adapter(url=request.url)
+
+ # Start time (approximately) of the request
+ start = datetime.utcnow()
+
+ # Send the request
+ r = adapter.send(request, **kwargs)
+
+ # Total elapsed time of the request (approximately)
+ r.elapsed = datetime.utcnow() - start
+
+ # Response manipulation hooks
+ r = dispatch_hook('response', hooks, r, **kwargs)
+
+ # Persist cookies
+ if r.history:
+
+ # If the hooks create history then we want those cookies too
+ for resp in r.history:
+ extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
+
+ extract_cookies_to_jar(self.cookies, request, r.raw)
+
+ # Redirect resolving generator.
+ gen = self.resolve_redirects(r, request, **kwargs)
+
+ # Resolve redirects if allowed.
+ history = [resp for resp in gen] if allow_redirects else []
+
+ # Shuffle things around if there's history.
+ if history:
+ # Insert the first (original) request at the start
+ history.insert(0, r)
+ # Get the last request made
+ r = history.pop()
+ r.history = history
+
+ if not stream:
+ r.content
+
+ return r
+
+ def merge_environment_settings(self, url, proxies, stream, verify, cert):
+ """Check the environment and merge it with some settings."""
+ # Gather clues from the surrounding environment.
+ if self.trust_env:
+ # Set environment's proxies.
+ env_proxies = get_environ_proxies(url) or {}
+ for (k, v) in env_proxies.items():
+ proxies.setdefault(k, v)
+
+ # Look for requests environment configuration and be compatible
+ # with cURL.
+ if verify is True or verify is None:
+ verify = (os.environ.get('REQUESTS_CA_BUNDLE') or
+ os.environ.get('CURL_CA_BUNDLE'))
+
+ # Merge all the kwargs.
+ proxies = merge_setting(proxies, self.proxies)
+ stream = merge_setting(stream, self.stream)
+ verify = merge_setting(verify, self.verify)
+ cert = merge_setting(cert, self.cert)
+
+ return {'verify': verify, 'proxies': proxies, 'stream': stream,
+ 'cert': cert}
+
+ def get_adapter(self, url):
+ """Returns the appropriate connection adapter for the given URL."""
+ for (prefix, adapter) in self.adapters.items():
+
+ if url.lower().startswith(prefix):
+ return adapter
+
+ # Nothing matches :-/
+ raise InvalidSchema("No connection adapters were found for '%s'" % url)
+
+ def close(self):
+ """Closes all adapters and as such the session"""
+ for v in self.adapters.values():
+ v.close()
+
+ def mount(self, prefix, adapter):
+ """Registers a connection adapter to a prefix.
+
+ Adapters are sorted in descending order by key length."""
+
+ self.adapters[prefix] = adapter
+ keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]
+
+ for key in keys_to_move:
+ self.adapters[key] = self.adapters.pop(key)
+
+ def __getstate__(self):
+ state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__)
+ state['redirect_cache'] = dict(self.redirect_cache)
+ return state
+
+ def __setstate__(self, state):
+ redirect_cache = state.pop('redirect_cache', {})
+ for attr, value in state.items():
+ setattr(self, attr, value)
+
+ self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
+ for redirect, to in redirect_cache.items():
+ self.redirect_cache[redirect] = to
+
+
+def session():
+ """Returns a :class:`Session` for context-management."""
+
+ return Session()
diff --git a/third_party/python/requests/requests/status_codes.py b/third_party/python/requests/requests/status_codes.py
new file mode 100644
index 0000000000..a852574a45
--- /dev/null
+++ b/third_party/python/requests/requests/status_codes.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+
+from .structures import LookupDict
+
+_codes = {
+
+ # Informational.
+ 100: ('continue',),
+ 101: ('switching_protocols',),
+ 102: ('processing',),
+ 103: ('checkpoint',),
+ 122: ('uri_too_long', 'request_uri_too_long'),
+ 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'),
+ 201: ('created',),
+ 202: ('accepted',),
+ 203: ('non_authoritative_info', 'non_authoritative_information'),
+ 204: ('no_content',),
+ 205: ('reset_content', 'reset'),
+ 206: ('partial_content', 'partial'),
+ 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
+ 208: ('already_reported',),
+ 226: ('im_used',),
+
+ # Redirection.
+ 300: ('multiple_choices',),
+ 301: ('moved_permanently', 'moved', '\\o-'),
+ 302: ('found',),
+ 303: ('see_other', 'other'),
+ 304: ('not_modified',),
+ 305: ('use_proxy',),
+ 306: ('switch_proxy',),
+ 307: ('temporary_redirect', 'temporary_moved', 'temporary'),
+ 308: ('permanent_redirect',
+ 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0
+
+ # Client Error.
+ 400: ('bad_request', 'bad'),
+ 401: ('unauthorized',),
+ 402: ('payment_required', 'payment'),
+ 403: ('forbidden',),
+ 404: ('not_found', '-o-'),
+ 405: ('method_not_allowed', 'not_allowed'),
+ 406: ('not_acceptable',),
+ 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),
+ 408: ('request_timeout', 'timeout'),
+ 409: ('conflict',),
+ 410: ('gone',),
+ 411: ('length_required',),
+ 412: ('precondition_failed', 'precondition'),
+ 413: ('request_entity_too_large',),
+ 414: ('request_uri_too_large',),
+ 415: ('unsupported_media_type', 'unsupported_media', 'media_type'),
+ 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),
+ 417: ('expectation_failed',),
+ 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),
+ 422: ('unprocessable_entity', 'unprocessable'),
+ 423: ('locked',),
+ 424: ('failed_dependency', 'dependency'),
+ 425: ('unordered_collection', 'unordered'),
+ 426: ('upgrade_required', 'upgrade'),
+ 428: ('precondition_required', 'precondition'),
+ 429: ('too_many_requests', 'too_many'),
+ 431: ('header_fields_too_large', 'fields_too_large'),
+ 444: ('no_response', 'none'),
+ 449: ('retry_with', 'retry'),
+ 450: ('blocked_by_windows_parental_controls', 'parental_controls'),
+ 451: ('unavailable_for_legal_reasons', 'legal_reasons'),
+ 499: ('client_closed_request',),
+
+ # Server Error.
+ 500: ('internal_server_error', 'server_error', '/o\\', '✗'),
+ 501: ('not_implemented',),
+ 502: ('bad_gateway',),
+ 503: ('service_unavailable', 'unavailable'),
+ 504: ('gateway_timeout',),
+ 505: ('http_version_not_supported', 'http_version'),
+ 506: ('variant_also_negotiates',),
+ 507: ('insufficient_storage',),
+ 509: ('bandwidth_limit_exceeded', 'bandwidth'),
+ 510: ('not_extended',),
+ 511: ('network_authentication_required', 'network_auth', 'network_authentication'),
+}
+
+codes = LookupDict(name='status_codes')
+
+for code, titles in _codes.items():
+ for title in titles:
+ setattr(codes, title, code)
+ if not title.startswith('\\'):
+ setattr(codes, title.upper(), code)
diff --git a/third_party/python/requests/requests/structures.py b/third_party/python/requests/requests/structures.py
new file mode 100644
index 0000000000..3e5f2faa2e
--- /dev/null
+++ b/third_party/python/requests/requests/structures.py
@@ -0,0 +1,104 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.structures
+~~~~~~~~~~~~~~~~~~~
+
+Data structures that power Requests.
+
+"""
+
+import collections
+
+
+class CaseInsensitiveDict(collections.MutableMapping):
+ """
+ A case-insensitive ``dict``-like object.
+
+ Implements all methods and operations of
+ ``collections.MutableMapping`` as well as dict's ``copy``. Also
+ provides ``lower_items``.
+
+ All keys are expected to be strings. The structure remembers the
+ case of the last key to be set, and ``iter(instance)``,
+ ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
+ will contain case-sensitive keys. However, querying and contains
+ testing is case insensitive::
+
+ cid = CaseInsensitiveDict()
+ cid['Accept'] = 'application/json'
+ cid['aCCEPT'] == 'application/json' # True
+ list(cid) == ['Accept'] # True
+
+ For example, ``headers['content-encoding']`` will return the
+ value of a ``'Content-Encoding'`` response header, regardless
+ of how the header name was originally stored.
+
+ If the constructor, ``.update``, or equality comparison
+ operations are given keys that have equal ``.lower()``s, the
+ behavior is undefined.
+
+ """
+ def __init__(self, data=None, **kwargs):
+ self._store = dict()
+ if data is None:
+ data = {}
+ self.update(data, **kwargs)
+
+ def __setitem__(self, key, value):
+ # Use the lowercased key for lookups, but store the actual
+ # key alongside the value.
+ self._store[key.lower()] = (key, value)
+
+ def __getitem__(self, key):
+ return self._store[key.lower()][1]
+
+ def __delitem__(self, key):
+ del self._store[key.lower()]
+
+ def __iter__(self):
+ return (casedkey for casedkey, mappedvalue in self._store.values())
+
+ def __len__(self):
+ return len(self._store)
+
+ def lower_items(self):
+ """Like iteritems(), but with all lowercase keys."""
+ return (
+ (lowerkey, keyval[1])
+ for (lowerkey, keyval)
+ in self._store.items()
+ )
+
+ def __eq__(self, other):
+ if isinstance(other, collections.Mapping):
+ other = CaseInsensitiveDict(other)
+ else:
+ return NotImplemented
+ # Compare insensitively
+ return dict(self.lower_items()) == dict(other.lower_items())
+
+ # Copy is required
+ def copy(self):
+ return CaseInsensitiveDict(self._store.values())
+
+ def __repr__(self):
+ return str(dict(self.items()))
+
+class LookupDict(dict):
+ """Dictionary lookup object."""
+
+ def __init__(self, name=None):
+ self.name = name
+ super(LookupDict, self).__init__()
+
+ def __repr__(self):
+ return '<lookup \'%s\'>' % (self.name)
+
+ def __getitem__(self, key):
+ # We allow fall-through here, so values default to None
+
+ return self.__dict__.get(key, None)
+
+ def get(self, key, default=None):
+ return self.__dict__.get(key, default)
diff --git a/third_party/python/requests/requests/utils.py b/third_party/python/requests/requests/utils.py
new file mode 100644
index 0000000000..c5c3fd01d9
--- /dev/null
+++ b/third_party/python/requests/requests/utils.py
@@ -0,0 +1,721 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.utils
+~~~~~~~~~~~~~~
+
+This module provides utility functions that are used within Requests
+that are also useful for external consumption.
+
+"""
+
+import cgi
+import codecs
+import collections
+import io
+import os
+import platform
+import re
+import sys
+import socket
+import struct
+import warnings
+
+from . import __version__
+from . import certs
+from .compat import parse_http_list as _parse_list_header
+from .compat import (quote, urlparse, bytes, str, OrderedDict, unquote, is_py2,
+ builtin_str, getproxies, proxy_bypass, urlunparse,
+ basestring)
+from .cookies import RequestsCookieJar, cookiejar_from_dict
+from .structures import CaseInsensitiveDict
+from .exceptions import InvalidURL, FileModeWarning
+
+_hush_pyflakes = (RequestsCookieJar,)
+
+NETRC_FILES = ('.netrc', '_netrc')
+
+DEFAULT_CA_BUNDLE_PATH = certs.where()
+
+
+def dict_to_sequence(d):
+ """Returns an internal sequence dictionary update."""
+
+ if hasattr(d, 'items'):
+ d = d.items()
+
+ return d
+
+
+def super_len(o):
+ total_length = 0
+ current_position = 0
+
+ if hasattr(o, '__len__'):
+ total_length = len(o)
+
+ elif hasattr(o, 'len'):
+ total_length = o.len
+
+ elif hasattr(o, 'getvalue'):
+ # e.g. BytesIO, cStringIO.StringIO
+ total_length = len(o.getvalue())
+
+ elif hasattr(o, 'fileno'):
+ try:
+ fileno = o.fileno()
+ except io.UnsupportedOperation:
+ pass
+ else:
+ total_length = os.fstat(fileno).st_size
+
+ # Having used fstat to determine the file length, we need to
+ # confirm that this file was opened up in binary mode.
+ if 'b' not in o.mode:
+ warnings.warn((
+ "Requests has determined the content-length for this "
+ "request using the binary size of the file: however, the "
+ "file has been opened in text mode (i.e. without the 'b' "
+ "flag in the mode). This may lead to an incorrect "
+ "content-length. In Requests 3.0, support will be removed "
+ "for files in text mode."),
+ FileModeWarning
+ )
+
+ if hasattr(o, 'tell'):
+ current_position = o.tell()
+
+ return max(0, total_length - current_position)
+
+
+def get_netrc_auth(url, raise_errors=False):
+ """Returns the Requests tuple auth for a given url from netrc."""
+
+ try:
+ from netrc import netrc, NetrcParseError
+
+ netrc_path = None
+
+ for f in NETRC_FILES:
+ try:
+ loc = os.path.expanduser('~/{0}'.format(f))
+ except KeyError:
+ # os.path.expanduser can fail when $HOME is undefined and
+ # getpwuid fails. See http://bugs.python.org/issue20164 &
+ # https://github.com/kennethreitz/requests/issues/1846
+ return
+
+ if os.path.exists(loc):
+ netrc_path = loc
+ break
+
+ # Abort early if there isn't one.
+ if netrc_path is None:
+ return
+
+ ri = urlparse(url)
+
+ # Strip port numbers from netloc. This weird `if...encode`` dance is
+ # used for Python 3.2, which doesn't support unicode literals.
+ splitstr = b':'
+ if isinstance(url, str):
+ splitstr = splitstr.decode('ascii')
+ host = ri.netloc.split(splitstr)[0]
+
+ try:
+ _netrc = netrc(netrc_path).authenticators(host)
+ if _netrc:
+ # Return with login / password
+ login_i = (0 if _netrc[0] else 1)
+ return (_netrc[login_i], _netrc[2])
+ except (NetrcParseError, IOError):
+ # If there was a parsing error or a permissions issue reading the file,
+ # we'll just skip netrc auth unless explicitly asked to raise errors.
+ if raise_errors:
+ raise
+
+ # AppEngine hackiness.
+ except (ImportError, AttributeError):
+ pass
+
+
+def guess_filename(obj):
+ """Tries to guess the filename of the given object."""
+ name = getattr(obj, 'name', None)
+ if (name and isinstance(name, basestring) and name[0] != '<' and
+ name[-1] != '>'):
+ return os.path.basename(name)
+
+
+def from_key_val_list(value):
+ """Take an object and test to see if it can be represented as a
+ dictionary. Unless it can not be represented as such, return an
+ OrderedDict, e.g.,
+
+ ::
+
+ >>> from_key_val_list([('key', 'val')])
+ OrderedDict([('key', 'val')])
+ >>> from_key_val_list('string')
+ ValueError: need more than 1 value to unpack
+ >>> from_key_val_list({'key': 'val'})
+ OrderedDict([('key', 'val')])
+ """
+ if value is None:
+ return None
+
+ if isinstance(value, (str, bytes, bool, int)):
+ raise ValueError('cannot encode objects that are not 2-tuples')
+
+ return OrderedDict(value)
+
+
+def to_key_val_list(value):
+ """Take an object and test to see if it can be represented as a
+ dictionary. If it can be, return a list of tuples, e.g.,
+
+ ::
+
+ >>> to_key_val_list([('key', 'val')])
+ [('key', 'val')]
+ >>> to_key_val_list({'key': 'val'})
+ [('key', 'val')]
+ >>> to_key_val_list('string')
+ ValueError: cannot encode objects that are not 2-tuples.
+ """
+ if value is None:
+ return None
+
+ if isinstance(value, (str, bytes, bool, int)):
+ raise ValueError('cannot encode objects that are not 2-tuples')
+
+ if isinstance(value, collections.Mapping):
+ value = value.items()
+
+ return list(value)
+
+
+# From mitsuhiko/werkzeug (used with permission).
+def parse_list_header(value):
+ """Parse lists as described by RFC 2068 Section 2.
+
+ In particular, parse comma-separated lists where the elements of
+ the list may include quoted-strings. A quoted-string could
+ contain a comma. A non-quoted string could have quotes in the
+ middle. Quotes are removed automatically after parsing.
+
+ It basically works like :func:`parse_set_header` just that items
+ may appear multiple times and case sensitivity is preserved.
+
+ The return value is a standard :class:`list`:
+
+ >>> parse_list_header('token, "quoted value"')
+ ['token', 'quoted value']
+
+ To create a header from the :class:`list` again, use the
+ :func:`dump_header` function.
+
+ :param value: a string with a list header.
+ :return: :class:`list`
+ """
+ result = []
+ for item in _parse_list_header(value):
+ if item[:1] == item[-1:] == '"':
+ item = unquote_header_value(item[1:-1])
+ result.append(item)
+ return result
+
+
+# From mitsuhiko/werkzeug (used with permission).
+def parse_dict_header(value):
+ """Parse lists of key, value pairs as described by RFC 2068 Section 2 and
+ convert them into a python dict:
+
+ >>> d = parse_dict_header('foo="is a fish", bar="as well"')
+ >>> type(d) is dict
+ True
+ >>> sorted(d.items())
+ [('bar', 'as well'), ('foo', 'is a fish')]
+
+ If there is no value for a key it will be `None`:
+
+ >>> parse_dict_header('key_without_value')
+ {'key_without_value': None}
+
+ To create a header from the :class:`dict` again, use the
+ :func:`dump_header` function.
+
+ :param value: a string with a dict header.
+ :return: :class:`dict`
+ """
+ result = {}
+ for item in _parse_list_header(value):
+ if '=' not in item:
+ result[item] = None
+ continue
+ name, value = item.split('=', 1)
+ if value[:1] == value[-1:] == '"':
+ value = unquote_header_value(value[1:-1])
+ result[name] = value
+ return result
+
+
+# From mitsuhiko/werkzeug (used with permission).
+def unquote_header_value(value, is_filename=False):
+ r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
+ This does not use the real unquoting but what browsers are actually
+ using for quoting.
+
+ :param value: the header value to unquote.
+ """
+ if value and value[0] == value[-1] == '"':
+ # this is not the real unquoting, but fixing this so that the
+ # RFC is met will result in bugs with internet explorer and
+ # probably some other browsers as well. IE for example is
+ # uploading files with "C:\foo\bar.txt" as filename
+ value = value[1:-1]
+
+ # if this is a filename and the starting characters look like
+ # a UNC path, then just return the value without quotes. Using the
+ # replace sequence below on a UNC path has the effect of turning
+ # the leading double slash into a single slash and then
+ # _fix_ie_filename() doesn't work correctly. See #458.
+ if not is_filename or value[:2] != '\\\\':
+ return value.replace('\\\\', '\\').replace('\\"', '"')
+ return value
+
+
+def dict_from_cookiejar(cj):
+ """Returns a key/value dictionary from a CookieJar.
+
+ :param cj: CookieJar object to extract cookies from.
+ """
+
+ cookie_dict = {}
+
+ for cookie in cj:
+ cookie_dict[cookie.name] = cookie.value
+
+ return cookie_dict
+
+
+def add_dict_to_cookiejar(cj, cookie_dict):
+ """Returns a CookieJar from a key/value dictionary.
+
+ :param cj: CookieJar to insert cookies into.
+ :param cookie_dict: Dict of key/values to insert into CookieJar.
+ """
+
+ cj2 = cookiejar_from_dict(cookie_dict)
+ cj.update(cj2)
+ return cj
+
+
+def get_encodings_from_content(content):
+ """Returns encodings from given content string.
+
+ :param content: bytestring to extract encodings from.
+ """
+ warnings.warn((
+ 'In requests 3.0, get_encodings_from_content will be removed. For '
+ 'more information, please see the discussion on issue #2266. (This'
+ ' warning should only appear once.)'),
+ DeprecationWarning)
+
+ charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
+ pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
+ xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
+
+ return (charset_re.findall(content) +
+ pragma_re.findall(content) +
+ xml_re.findall(content))
+
+
+def get_encoding_from_headers(headers):
+ """Returns encodings from given HTTP Header Dict.
+
+ :param headers: dictionary to extract encoding from.
+ """
+
+ content_type = headers.get('content-type')
+
+ if not content_type:
+ return None
+
+ content_type, params = cgi.parse_header(content_type)
+
+ if 'charset' in params:
+ return params['charset'].strip("'\"")
+
+ if 'text' in content_type:
+ return 'ISO-8859-1'
+
+
+def stream_decode_response_unicode(iterator, r):
+ """Stream decodes a iterator."""
+
+ if r.encoding is None:
+ for item in iterator:
+ yield item
+ return
+
+ decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')
+ for chunk in iterator:
+ rv = decoder.decode(chunk)
+ if rv:
+ yield rv
+ rv = decoder.decode(b'', final=True)
+ if rv:
+ yield rv
+
+
+def iter_slices(string, slice_length):
+ """Iterate over slices of a string."""
+ pos = 0
+ while pos < len(string):
+ yield string[pos:pos + slice_length]
+ pos += slice_length
+
+
+def get_unicode_from_response(r):
+ """Returns the requested content back in unicode.
+
+ :param r: Response object to get unicode content from.
+
+ Tried:
+
+ 1. charset from content-type
+ 2. fall back and replace all unicode characters
+
+ """
+ warnings.warn((
+ 'In requests 3.0, get_unicode_from_response will be removed. For '
+ 'more information, please see the discussion on issue #2266. (This'
+ ' warning should only appear once.)'),
+ DeprecationWarning)
+
+ tried_encodings = []
+
+ # Try charset from content-type
+ encoding = get_encoding_from_headers(r.headers)
+
+ if encoding:
+ try:
+ return str(r.content, encoding)
+ except UnicodeError:
+ tried_encodings.append(encoding)
+
+ # Fall back:
+ try:
+ return str(r.content, encoding, errors='replace')
+ except TypeError:
+ return r.content
+
+
+# The unreserved URI characters (RFC 3986)
+UNRESERVED_SET = frozenset(
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
+ + "0123456789-._~")
+
+
+def unquote_unreserved(uri):
+ """Un-escape any percent-escape sequences in a URI that are unreserved
+ characters. This leaves all reserved, illegal and non-ASCII bytes encoded.
+ """
+ parts = uri.split('%')
+ for i in range(1, len(parts)):
+ h = parts[i][0:2]
+ if len(h) == 2 and h.isalnum():
+ try:
+ c = chr(int(h, 16))
+ except ValueError:
+ raise InvalidURL("Invalid percent-escape sequence: '%s'" % h)
+
+ if c in UNRESERVED_SET:
+ parts[i] = c + parts[i][2:]
+ else:
+ parts[i] = '%' + parts[i]
+ else:
+ parts[i] = '%' + parts[i]
+ return ''.join(parts)
+
+
+def requote_uri(uri):
+ """Re-quote the given URI.
+
+ This function passes the given URI through an unquote/quote cycle to
+ ensure that it is fully and consistently quoted.
+ """
+ safe_with_percent = "!#$%&'()*+,/:;=?@[]~"
+ safe_without_percent = "!#$&'()*+,/:;=?@[]~"
+ try:
+ # Unquote only the unreserved characters
+ # Then quote only illegal characters (do not quote reserved,
+ # unreserved, or '%')
+ return quote(unquote_unreserved(uri), safe=safe_with_percent)
+ except InvalidURL:
+ # We couldn't unquote the given URI, so let's try quoting it, but
+ # there may be unquoted '%'s in the URI. We need to make sure they're
+ # properly quoted so they do not cause issues elsewhere.
+ return quote(uri, safe=safe_without_percent)
+
+
+def address_in_network(ip, net):
+ """
+ This function allows you to check if on IP belongs to a network subnet
+ Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24
+ returns False if ip = 192.168.1.1 and net = 192.168.100.0/24
+ """
+ ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]
+ netaddr, bits = net.split('/')
+ netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]
+ network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask
+ return (ipaddr & netmask) == (network & netmask)
+
+
+def dotted_netmask(mask):
+ """
+ Converts mask from /xx format to xxx.xxx.xxx.xxx
+ Example: if mask is 24 function returns 255.255.255.0
+ """
+ bits = 0xffffffff ^ (1 << 32 - mask) - 1
+ return socket.inet_ntoa(struct.pack('>I', bits))
+
+
+def is_ipv4_address(string_ip):
+ try:
+ socket.inet_aton(string_ip)
+ except socket.error:
+ return False
+ return True
+
+
+def is_valid_cidr(string_network):
+ """Very simple check of the cidr format in no_proxy variable"""
+ if string_network.count('/') == 1:
+ try:
+ mask = int(string_network.split('/')[1])
+ except ValueError:
+ return False
+
+ if mask < 1 or mask > 32:
+ return False
+
+ try:
+ socket.inet_aton(string_network.split('/')[0])
+ except socket.error:
+ return False
+ else:
+ return False
+ return True
+
+
+def should_bypass_proxies(url):
+ """
+ Returns whether we should bypass proxies or not.
+ """
+ get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
+
+ # First check whether no_proxy is defined. If it is, check that the URL
+ # we're getting isn't in the no_proxy list.
+ no_proxy = get_proxy('no_proxy')
+ netloc = urlparse(url).netloc
+
+ if no_proxy:
+ # We need to check whether we match here. We need to see if we match
+ # the end of the netloc, both with and without the port.
+ no_proxy = (
+ host for host in no_proxy.replace(' ', '').split(',') if host
+ )
+
+ ip = netloc.split(':')[0]
+ if is_ipv4_address(ip):
+ for proxy_ip in no_proxy:
+ if is_valid_cidr(proxy_ip):
+ if address_in_network(ip, proxy_ip):
+ return True
+ else:
+ for host in no_proxy:
+ if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
+ # The URL does match something in no_proxy, so we don't want
+ # to apply the proxies on this URL.
+ return True
+
+ # If the system proxy settings indicate that this URL should be bypassed,
+ # don't proxy.
+ # The proxy_bypass function is incredibly buggy on OS X in early versions
+ # of Python 2.6, so allow this call to fail. Only catch the specific
+ # exceptions we've seen, though: this call failing in other ways can reveal
+ # legitimate problems.
+ try:
+ bypass = proxy_bypass(netloc)
+ except (TypeError, socket.gaierror):
+ bypass = False
+
+ if bypass:
+ return True
+
+ return False
+
+def get_environ_proxies(url):
+ """Return a dict of environment proxies."""
+ if should_bypass_proxies(url):
+ return {}
+ else:
+ return getproxies()
+
+def select_proxy(url, proxies):
+ """Select a proxy for the url, if applicable.
+
+ :param url: The url being for the request
+ :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
+ """
+ proxies = proxies or {}
+ urlparts = urlparse(url)
+ proxy = proxies.get(urlparts.scheme+'://'+urlparts.hostname)
+ if proxy is None:
+ proxy = proxies.get(urlparts.scheme)
+ return proxy
+
+def default_user_agent(name="python-requests"):
+ """Return a string representing the default user agent."""
+ return '%s/%s' % (name, __version__)
+
+
+def default_headers():
+ return CaseInsensitiveDict({
+ 'User-Agent': default_user_agent(),
+ 'Accept-Encoding': ', '.join(('gzip', 'deflate')),
+ 'Accept': '*/*',
+ 'Connection': 'keep-alive',
+ })
+
+
+def parse_header_links(value):
+ """Return a dict of parsed link headers proxies.
+
+ i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
+
+ """
+
+ links = []
+
+ replace_chars = " '\""
+
+ for val in re.split(", *<", value):
+ try:
+ url, params = val.split(";", 1)
+ except ValueError:
+ url, params = val, ''
+
+ link = {}
+
+ link["url"] = url.strip("<> '\"")
+
+ for param in params.split(";"):
+ try:
+ key, value = param.split("=")
+ except ValueError:
+ break
+
+ link[key.strip(replace_chars)] = value.strip(replace_chars)
+
+ links.append(link)
+
+ return links
+
+
+# Null bytes; no need to recreate these on each call to guess_json_utf
+_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3
+_null2 = _null * 2
+_null3 = _null * 3
+
+
+def guess_json_utf(data):
+ # JSON always starts with two ASCII characters, so detection is as
+ # easy as counting the nulls and from their location and count
+ # determine the encoding. Also detect a BOM, if present.
+ sample = data[:4]
+ if sample in (codecs.BOM_UTF32_LE, codecs.BOM32_BE):
+ return 'utf-32' # BOM included
+ if sample[:3] == codecs.BOM_UTF8:
+ return 'utf-8-sig' # BOM included, MS style (discouraged)
+ if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
+ return 'utf-16' # BOM included
+ nullcount = sample.count(_null)
+ if nullcount == 0:
+ return 'utf-8'
+ if nullcount == 2:
+ if sample[::2] == _null2: # 1st and 3rd are null
+ return 'utf-16-be'
+ if sample[1::2] == _null2: # 2nd and 4th are null
+ return 'utf-16-le'
+ # Did not detect 2 valid UTF-16 ascii-range characters
+ if nullcount == 3:
+ if sample[:3] == _null3:
+ return 'utf-32-be'
+ if sample[1:] == _null3:
+ return 'utf-32-le'
+ # Did not detect a valid UTF-32 ascii-range character
+ return None
+
+
+def prepend_scheme_if_needed(url, new_scheme):
+ '''Given a URL that may or may not have a scheme, prepend the given scheme.
+ Does not replace a present scheme with the one provided as an argument.'''
+ scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)
+
+ # urlparse is a finicky beast, and sometimes decides that there isn't a
+ # netloc present. Assume that it's being over-cautious, and switch netloc
+ # and path if urlparse decided there was no netloc.
+ if not netloc:
+ netloc, path = path, netloc
+
+ return urlunparse((scheme, netloc, path, params, query, fragment))
+
+
+def get_auth_from_url(url):
+ """Given a url with authentication components, extract them into a tuple of
+ username,password."""
+ parsed = urlparse(url)
+
+ try:
+ auth = (unquote(parsed.username), unquote(parsed.password))
+ except (AttributeError, TypeError):
+ auth = ('', '')
+
+ return auth
+
+
+def to_native_string(string, encoding='ascii'):
+ """
+ Given a string object, regardless of type, returns a representation of that
+ string in the native string type, encoding and decoding where necessary.
+ This assumes ASCII unless told otherwise.
+ """
+ out = None
+
+ if isinstance(string, builtin_str):
+ out = string
+ else:
+ if is_py2:
+ out = string.encode(encoding)
+ else:
+ out = string.decode(encoding)
+
+ return out
+
+
+def urldefragauth(url):
+ """
+ Given a url remove the fragment and the authentication part
+ """
+ scheme, netloc, path, params, query, fragment = urlparse(url)
+
+ # see func:`prepend_scheme_if_needed`
+ if not netloc:
+ netloc, path = path, netloc
+
+ netloc = netloc.rsplit('@', 1)[-1]
+
+ return urlunparse((scheme, netloc, path, params, query, ''))
diff --git a/third_party/python/requests/requirements.txt b/third_party/python/requests/requirements.txt
new file mode 100644
index 0000000000..ad5da76158
--- /dev/null
+++ b/third_party/python/requests/requirements.txt
@@ -0,0 +1,6 @@
+py==1.4.30
+pytest==2.8.1
+pytest-cov==2.1.0
+pytest-httpbin==0.0.7
+httpbin==0.4.0
+wheel
diff --git a/third_party/python/requests/setup.cfg b/third_party/python/requests/setup.cfg
new file mode 100644
index 0000000000..6c71b612d8
--- /dev/null
+++ b/third_party/python/requests/setup.cfg
@@ -0,0 +1,8 @@
+[wheel]
+universal = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/third_party/python/requests/setup.py b/third_party/python/requests/setup.py
new file mode 100755
index 0000000000..b7ed12ba2c
--- /dev/null
+++ b/third_party/python/requests/setup.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+
+import os
+import re
+import sys
+
+from codecs import open
+
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+if sys.argv[-1] == 'publish':
+ os.system('python setup.py sdist upload')
+ sys.exit()
+
+packages = [
+ 'requests',
+ 'requests.packages',
+ 'requests.packages.chardet',
+ 'requests.packages.urllib3',
+ 'requests.packages.urllib3.packages',
+ 'requests.packages.urllib3.contrib',
+ 'requests.packages.urllib3.util',
+ 'requests.packages.urllib3.packages.ssl_match_hostname',
+]
+
+requires = []
+
+version = ''
+with open('requests/__init__.py', 'r') as fd:
+ version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
+ fd.read(), re.MULTILINE).group(1)
+
+if not version:
+ raise RuntimeError('Cannot find version information')
+
+with open('README.rst', 'r', 'utf-8') as f:
+ readme = f.read()
+with open('HISTORY.rst', 'r', 'utf-8') as f:
+ history = f.read()
+
+setup(
+ name='requests',
+ version=version,
+ description='Python HTTP for Humans.',
+ long_description=readme + '\n\n' + history,
+ author='Kenneth Reitz',
+ author_email='me@kennethreitz.com',
+ url='http://python-requests.org',
+ packages=packages,
+ package_data={'': ['LICENSE', 'NOTICE'], 'requests': ['*.pem']},
+ package_dir={'requests': 'requests'},
+ include_package_data=True,
+ install_requires=requires,
+ license='Apache 2.0',
+ zip_safe=False,
+ classifiers=(
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'Natural Language :: English',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ ),
+ extras_require={
+ 'security': ['pyOpenSSL>=0.13', 'ndg-httpsclient', 'pyasn1'],
+ },
+)
diff --git a/third_party/python/requests/test_requests.py b/third_party/python/requests/test_requests.py
new file mode 100755
index 0000000000..0795241867
--- /dev/null
+++ b/third_party/python/requests/test_requests.py
@@ -0,0 +1,1746 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""Tests for Requests."""
+
+from __future__ import division
+import json
+import os
+import pickle
+import unittest
+import collections
+import contextlib
+
+import io
+import requests
+import pytest
+from requests.adapters import HTTPAdapter
+from requests.auth import HTTPDigestAuth, _basic_auth_str
+from requests.compat import (
+ Morsel, cookielib, getproxies, str, urljoin, urlparse, is_py3,
+ builtin_str, OrderedDict
+ )
+from requests.cookies import cookiejar_from_dict, morsel_to_cookie
+from requests.exceptions import (ConnectionError, ConnectTimeout,
+ InvalidSchema, InvalidURL, MissingSchema,
+ ReadTimeout, Timeout, RetryError)
+from requests.models import PreparedRequest
+from requests.structures import CaseInsensitiveDict
+from requests.sessions import SessionRedirectMixin
+from requests.models import urlencode
+from requests.hooks import default_hooks
+
+try:
+ import StringIO
+except ImportError:
+ import io as StringIO
+
+try:
+ from multiprocessing.pool import ThreadPool
+except ImportError:
+ ThreadPool = None
+
+if is_py3:
+ def u(s):
+ return s
+else:
+ def u(s):
+ return s.decode('unicode-escape')
+
+
+@pytest.fixture
+def httpbin(httpbin):
+ # Issue #1483: Make sure the URL always has a trailing slash
+ httpbin_url = httpbin.url.rstrip('/') + '/'
+
+ def inner(*suffix):
+ return urljoin(httpbin_url, '/'.join(suffix))
+
+ return inner
+
+
+@pytest.fixture
+def httpsbin_url(httpbin_secure):
+ # Issue #1483: Make sure the URL always has a trailing slash
+ httpbin_url = httpbin_secure.url.rstrip('/') + '/'
+
+ def inner(*suffix):
+ return urljoin(httpbin_url, '/'.join(suffix))
+
+ return inner
+
+
+# Requests to this URL should always fail with a connection timeout (nothing
+# listening on that port)
+TARPIT = "http://10.255.255.1"
+
+class TestRequests(object):
+
+ _multiprocess_can_split_ = True
+
+ def setUp(self):
+ """Create simple data set with headers."""
+ pass
+
+ def tearDown(self):
+ """Teardown."""
+ pass
+
+ def test_entry_points(self):
+
+ requests.session
+ requests.session().get
+ requests.session().head
+ requests.get
+ requests.head
+ requests.put
+ requests.patch
+ requests.post
+
+ def test_invalid_url(self):
+ with pytest.raises(MissingSchema):
+ requests.get('hiwpefhipowhefopw')
+ with pytest.raises(InvalidSchema):
+ requests.get('localhost:3128')
+ with pytest.raises(InvalidSchema):
+ requests.get('localhost.localdomain:3128/')
+ with pytest.raises(InvalidSchema):
+ requests.get('10.122.1.1:3128/')
+ with pytest.raises(InvalidURL):
+ requests.get('http://')
+
+ def test_basic_building(self):
+ req = requests.Request()
+ req.url = 'http://kennethreitz.org/'
+ req.data = {'life': '42'}
+
+ pr = req.prepare()
+ assert pr.url == req.url
+ assert pr.body == 'life=42'
+
+ def test_no_content_length(self, httpbin):
+ get_req = requests.Request('GET', httpbin('get')).prepare()
+ assert 'Content-Length' not in get_req.headers
+ head_req = requests.Request('HEAD', httpbin('head')).prepare()
+ assert 'Content-Length' not in head_req.headers
+
+ def test_override_content_length(self, httpbin):
+ headers = {
+ 'Content-Length': 'not zero'
+ }
+ r = requests.Request('POST', httpbin('post'), headers=headers).prepare()
+ assert 'Content-Length' in r.headers
+ assert r.headers['Content-Length'] == 'not zero'
+
+ def test_path_is_not_double_encoded(self):
+ request = requests.Request('GET', "http://0.0.0.0/get/test case").prepare()
+
+ assert request.path_url == '/get/test%20case'
+
+ def test_params_are_added_before_fragment(self):
+ request = requests.Request('GET',
+ "http://example.com/path#fragment", params={"a": "b"}).prepare()
+ assert request.url == "http://example.com/path?a=b#fragment"
+ request = requests.Request('GET',
+ "http://example.com/path?key=value#fragment", params={"a": "b"}).prepare()
+ assert request.url == "http://example.com/path?key=value&a=b#fragment"
+
+ def test_params_original_order_is_preserved_by_default(self):
+ param_ordered_dict = OrderedDict((('z', 1), ('a', 1), ('k', 1), ('d', 1)))
+ session = requests.Session()
+ request = requests.Request('GET', 'http://example.com/', params=param_ordered_dict)
+ prep = session.prepare_request(request)
+ assert prep.url == 'http://example.com/?z=1&a=1&k=1&d=1'
+
+ def test_params_bytes_are_encoded(self):
+ request = requests.Request('GET', 'http://example.com',
+ params=b'test=foo').prepare()
+ assert request.url == 'http://example.com/?test=foo'
+
+ def test_binary_put(self):
+ request = requests.Request('PUT', 'http://example.com',
+ data=u"ööö".encode("utf-8")).prepare()
+ assert isinstance(request.body, bytes)
+
+ def test_mixed_case_scheme_acceptable(self, httpbin):
+ s = requests.Session()
+ s.proxies = getproxies()
+ parts = urlparse(httpbin('get'))
+ schemes = ['http://', 'HTTP://', 'hTTp://', 'HttP://']
+ for scheme in schemes:
+ url = scheme + parts.netloc + parts.path
+ r = requests.Request('GET', url)
+ r = s.send(r.prepare())
+ assert r.status_code == 200, 'failed for scheme {0}'.format(scheme)
+
+ def test_HTTP_200_OK_GET_ALTERNATIVE(self, httpbin):
+ r = requests.Request('GET', httpbin('get'))
+ s = requests.Session()
+ s.proxies = getproxies()
+
+ r = s.send(r.prepare())
+
+ assert r.status_code == 200
+
+ def test_HTTP_302_ALLOW_REDIRECT_GET(self, httpbin):
+ r = requests.get(httpbin('redirect', '1'))
+ assert r.status_code == 200
+ assert r.history[0].status_code == 302
+ assert r.history[0].is_redirect
+
+ # def test_HTTP_302_ALLOW_REDIRECT_POST(self):
+ # r = requests.post(httpbin('status', '302'), data={'some': 'data'})
+ # self.assertEqual(r.status_code, 200)
+
+ def test_HTTP_200_OK_GET_WITH_PARAMS(self, httpbin):
+ heads = {'User-agent': 'Mozilla/5.0'}
+
+ r = requests.get(httpbin('user-agent'), headers=heads)
+
+ assert heads['User-agent'] in r.text
+ assert r.status_code == 200
+
+ def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self, httpbin):
+ heads = {'User-agent': 'Mozilla/5.0'}
+
+ r = requests.get(httpbin('get') + '?test=true', params={'q': 'test'}, headers=heads)
+ assert r.status_code == 200
+
+ def test_set_cookie_on_301(self, httpbin):
+ s = requests.session()
+ url = httpbin('cookies/set?foo=bar')
+ s.get(url)
+ assert s.cookies['foo'] == 'bar'
+
+ def test_cookie_sent_on_redirect(self, httpbin):
+ s = requests.session()
+ s.get(httpbin('cookies/set?foo=bar'))
+ r = s.get(httpbin('redirect/1')) # redirects to httpbin('get')
+ assert 'Cookie' in r.json()['headers']
+
+ def test_cookie_removed_on_expire(self, httpbin):
+ s = requests.session()
+ s.get(httpbin('cookies/set?foo=bar'))
+ assert s.cookies['foo'] == 'bar'
+ s.get(
+ httpbin('response-headers'),
+ params={
+ 'Set-Cookie':
+ 'foo=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT'
+ }
+ )
+ assert 'foo' not in s.cookies
+
+ def test_cookie_quote_wrapped(self, httpbin):
+ s = requests.session()
+ s.get(httpbin('cookies/set?foo="bar:baz"'))
+ assert s.cookies['foo'] == '"bar:baz"'
+
+ def test_cookie_persists_via_api(self, httpbin):
+ s = requests.session()
+ r = s.get(httpbin('redirect/1'), cookies={'foo': 'bar'})
+ assert 'foo' in r.request.headers['Cookie']
+ assert 'foo' in r.history[0].request.headers['Cookie']
+
+ def test_request_cookie_overrides_session_cookie(self, httpbin):
+ s = requests.session()
+ s.cookies['foo'] = 'bar'
+ r = s.get(httpbin('cookies'), cookies={'foo': 'baz'})
+ assert r.json()['cookies']['foo'] == 'baz'
+ # Session cookie should not be modified
+ assert s.cookies['foo'] == 'bar'
+
+ def test_request_cookies_not_persisted(self, httpbin):
+ s = requests.session()
+ s.get(httpbin('cookies'), cookies={'foo': 'baz'})
+ # Sending a request with cookies should not add cookies to the session
+ assert not s.cookies
+
+ def test_generic_cookiejar_works(self, httpbin):
+ cj = cookielib.CookieJar()
+ cookiejar_from_dict({'foo': 'bar'}, cj)
+ s = requests.session()
+ s.cookies = cj
+ r = s.get(httpbin('cookies'))
+ # Make sure the cookie was sent
+ assert r.json()['cookies']['foo'] == 'bar'
+ # Make sure the session cj is still the custom one
+ assert s.cookies is cj
+
+ def test_param_cookiejar_works(self, httpbin):
+ cj = cookielib.CookieJar()
+ cookiejar_from_dict({'foo': 'bar'}, cj)
+ s = requests.session()
+ r = s.get(httpbin('cookies'), cookies=cj)
+ # Make sure the cookie was sent
+ assert r.json()['cookies']['foo'] == 'bar'
+
+ def test_requests_in_history_are_not_overridden(self, httpbin):
+ resp = requests.get(httpbin('redirect/3'))
+ urls = [r.url for r in resp.history]
+ req_urls = [r.request.url for r in resp.history]
+ assert urls == req_urls
+
+ def test_history_is_always_a_list(self, httpbin):
+ """
+ Show that even with redirects, Response.history is always a list.
+ """
+ resp = requests.get(httpbin('get'))
+ assert isinstance(resp.history, list)
+ resp = requests.get(httpbin('redirect/1'))
+ assert isinstance(resp.history, list)
+ assert not isinstance(resp.history, tuple)
+
+ def test_headers_on_session_with_None_are_not_sent(self, httpbin):
+ """Do not send headers in Session.headers with None values."""
+ ses = requests.Session()
+ ses.headers['Accept-Encoding'] = None
+ req = requests.Request('GET', httpbin('get'))
+ prep = ses.prepare_request(req)
+ assert 'Accept-Encoding' not in prep.headers
+
+ def test_user_agent_transfers(self, httpbin):
+
+ heads = {
+ 'User-agent': 'Mozilla/5.0 (github.com/kennethreitz/requests)'
+ }
+
+ r = requests.get(httpbin('user-agent'), headers=heads)
+ assert heads['User-agent'] in r.text
+
+ heads = {
+ 'user-agent': 'Mozilla/5.0 (github.com/kennethreitz/requests)'
+ }
+
+ r = requests.get(httpbin('user-agent'), headers=heads)
+ assert heads['user-agent'] in r.text
+
+ def test_HTTP_200_OK_HEAD(self, httpbin):
+ r = requests.head(httpbin('get'))
+ assert r.status_code == 200
+
+ def test_HTTP_200_OK_PUT(self, httpbin):
+ r = requests.put(httpbin('put'))
+ assert r.status_code == 200
+
+ def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self, httpbin):
+ auth = ('user', 'pass')
+ url = httpbin('basic-auth', 'user', 'pass')
+
+ r = requests.get(url, auth=auth)
+ assert r.status_code == 200
+
+ r = requests.get(url)
+ assert r.status_code == 401
+
+ s = requests.session()
+ s.auth = auth
+ r = s.get(url)
+ assert r.status_code == 200
+
+ def test_connection_error_invalid_domain(self):
+ """Connecting to an unknown domain should raise a ConnectionError"""
+ with pytest.raises(ConnectionError):
+ requests.get("http://doesnotexist.google.com")
+
+ def test_connection_error_invalid_port(self):
+ """Connecting to an invalid port should raise a ConnectionError"""
+ with pytest.raises(ConnectionError):
+ requests.get("http://localhost:1", timeout=1)
+
+ def test_LocationParseError(self):
+ """Inputing a URL that cannot be parsed should raise an InvalidURL error"""
+ with pytest.raises(InvalidURL):
+ requests.get("http://fe80::5054:ff:fe5a:fc0")
+
+ def test_basicauth_with_netrc(self, httpbin):
+ auth = ('user', 'pass')
+ wrong_auth = ('wronguser', 'wrongpass')
+ url = httpbin('basic-auth', 'user', 'pass')
+
+ old_auth = requests.sessions.get_netrc_auth
+
+ try:
+ def get_netrc_auth_mock(url):
+ return auth
+ requests.sessions.get_netrc_auth = get_netrc_auth_mock
+
+ # Should use netrc and work.
+ r = requests.get(url)
+ assert r.status_code == 200
+
+ # Given auth should override and fail.
+ r = requests.get(url, auth=wrong_auth)
+ assert r.status_code == 401
+
+ s = requests.session()
+
+ # Should use netrc and work.
+ r = s.get(url)
+ assert r.status_code == 200
+
+ # Given auth should override and fail.
+ s.auth = wrong_auth
+ r = s.get(url)
+ assert r.status_code == 401
+ finally:
+ requests.sessions.get_netrc_auth = old_auth
+
+ def test_DIGEST_HTTP_200_OK_GET(self, httpbin):
+
+ auth = HTTPDigestAuth('user', 'pass')
+ url = httpbin('digest-auth', 'auth', 'user', 'pass')
+
+ r = requests.get(url, auth=auth)
+ assert r.status_code == 200
+
+ r = requests.get(url)
+ assert r.status_code == 401
+
+ s = requests.session()
+ s.auth = HTTPDigestAuth('user', 'pass')
+ r = s.get(url)
+ assert r.status_code == 200
+
+ def test_DIGEST_AUTH_RETURNS_COOKIE(self, httpbin):
+ url = httpbin('digest-auth', 'auth', 'user', 'pass')
+ auth = HTTPDigestAuth('user', 'pass')
+ r = requests.get(url)
+ assert r.cookies['fake'] == 'fake_value'
+
+ r = requests.get(url, auth=auth)
+ assert r.status_code == 200
+
+ def test_DIGEST_AUTH_SETS_SESSION_COOKIES(self, httpbin):
+ url = httpbin('digest-auth', 'auth', 'user', 'pass')
+ auth = HTTPDigestAuth('user', 'pass')
+ s = requests.Session()
+ s.get(url, auth=auth)
+ assert s.cookies['fake'] == 'fake_value'
+
+ def test_DIGEST_STREAM(self, httpbin):
+
+ auth = HTTPDigestAuth('user', 'pass')
+ url = httpbin('digest-auth', 'auth', 'user', 'pass')
+
+ r = requests.get(url, auth=auth, stream=True)
+ assert r.raw.read() != b''
+
+ r = requests.get(url, auth=auth, stream=False)
+ assert r.raw.read() == b''
+
+ def test_DIGESTAUTH_WRONG_HTTP_401_GET(self, httpbin):
+
+ auth = HTTPDigestAuth('user', 'wrongpass')
+ url = httpbin('digest-auth', 'auth', 'user', 'pass')
+
+ r = requests.get(url, auth=auth)
+ assert r.status_code == 401
+
+ r = requests.get(url)
+ assert r.status_code == 401
+
+ s = requests.session()
+ s.auth = auth
+ r = s.get(url)
+ assert r.status_code == 401
+
+ def test_DIGESTAUTH_QUOTES_QOP_VALUE(self, httpbin):
+
+ auth = HTTPDigestAuth('user', 'pass')
+ url = httpbin('digest-auth', 'auth', 'user', 'pass')
+
+ r = requests.get(url, auth=auth)
+ assert '"auth"' in r.request.headers['Authorization']
+
+ def test_POSTBIN_GET_POST_FILES(self, httpbin):
+
+ url = httpbin('post')
+ post1 = requests.post(url).raise_for_status()
+
+ post1 = requests.post(url, data={'some': 'data'})
+ assert post1.status_code == 200
+
+ with open('requirements.txt') as f:
+ post2 = requests.post(url, files={'some': f})
+ assert post2.status_code == 200
+
+ post4 = requests.post(url, data='[{"some": "json"}]')
+ assert post4.status_code == 200
+
+ with pytest.raises(ValueError):
+ requests.post(url, files=['bad file data'])
+
+ def test_POSTBIN_GET_POST_FILES_WITH_DATA(self, httpbin):
+
+ url = httpbin('post')
+ post1 = requests.post(url).raise_for_status()
+
+ post1 = requests.post(url, data={'some': 'data'})
+ assert post1.status_code == 200
+
+ with open('requirements.txt') as f:
+ post2 = requests.post(url,
+ data={'some': 'data'}, files={'some': f})
+ assert post2.status_code == 200
+
+ post4 = requests.post(url, data='[{"some": "json"}]')
+ assert post4.status_code == 200
+
+ with pytest.raises(ValueError):
+ requests.post(url, files=['bad file data'])
+
+ def test_conflicting_post_params(self, httpbin):
+ url = httpbin('post')
+ with open('requirements.txt') as f:
+ pytest.raises(ValueError, "requests.post(url, data='[{\"some\": \"data\"}]', files={'some': f})")
+ pytest.raises(ValueError, "requests.post(url, data=u('[{\"some\": \"data\"}]'), files={'some': f})")
+
+ def test_request_ok_set(self, httpbin):
+ r = requests.get(httpbin('status', '404'))
+ assert not r.ok
+
+ def test_status_raising(self, httpbin):
+ r = requests.get(httpbin('status', '404'))
+ with pytest.raises(requests.exceptions.HTTPError):
+ r.raise_for_status()
+
+ r = requests.get(httpbin('status', '500'))
+ assert not r.ok
+
+ def test_decompress_gzip(self, httpbin):
+ r = requests.get(httpbin('gzip'))
+ r.content.decode('ascii')
+
+ def test_unicode_get(self, httpbin):
+ url = httpbin('/get')
+ requests.get(url, params={'foo': 'føø'})
+ requests.get(url, params={'føø': 'føø'})
+ requests.get(url, params={'føø': 'føø'})
+ requests.get(url, params={'foo': 'foo'})
+ requests.get(httpbin('ø'), params={'foo': 'foo'})
+
+ def test_unicode_header_name(self, httpbin):
+ requests.put(
+ httpbin('put'),
+ headers={str('Content-Type'): 'application/octet-stream'},
+ data='\xff') # compat.str is unicode.
+
+ def test_pyopenssl_redirect(self, httpsbin_url, httpbin_ca_bundle):
+ requests.get(httpsbin_url('status', '301'), verify=httpbin_ca_bundle)
+
+ def test_urlencoded_get_query_multivalued_param(self, httpbin):
+
+ r = requests.get(httpbin('get'), params=dict(test=['foo', 'baz']))
+ assert r.status_code == 200
+ assert r.url == httpbin('get?test=foo&test=baz')
+
+ def test_different_encodings_dont_break_post(self, httpbin):
+ r = requests.post(httpbin('post'),
+ data={'stuff': json.dumps({'a': 123})},
+ params={'blah': 'asdf1234'},
+ files={'file': ('test_requests.py', open(__file__, 'rb'))})
+ assert r.status_code == 200
+
+ def test_unicode_multipart_post(self, httpbin):
+ r = requests.post(httpbin('post'),
+ data={'stuff': u('ëlïxr')},
+ files={'file': ('test_requests.py', open(__file__, 'rb'))})
+ assert r.status_code == 200
+
+ r = requests.post(httpbin('post'),
+ data={'stuff': u('ëlïxr').encode('utf-8')},
+ files={'file': ('test_requests.py', open(__file__, 'rb'))})
+ assert r.status_code == 200
+
+ r = requests.post(httpbin('post'),
+ data={'stuff': 'elixr'},
+ files={'file': ('test_requests.py', open(__file__, 'rb'))})
+ assert r.status_code == 200
+
+ r = requests.post(httpbin('post'),
+ data={'stuff': 'elixr'.encode('utf-8')},
+ files={'file': ('test_requests.py', open(__file__, 'rb'))})
+ assert r.status_code == 200
+
+ def test_unicode_multipart_post_fieldnames(self, httpbin):
+ filename = os.path.splitext(__file__)[0] + '.py'
+ r = requests.Request(method='POST',
+ url=httpbin('post'),
+ data={'stuff'.encode('utf-8'): 'elixr'},
+ files={'file': ('test_requests.py',
+ open(filename, 'rb'))})
+ prep = r.prepare()
+ assert b'name="stuff"' in prep.body
+ assert b'name="b\'stuff\'"' not in prep.body
+
+ def test_unicode_method_name(self, httpbin):
+ files = {'file': open('test_requests.py', 'rb')}
+ r = requests.request(
+ method=u('POST'), url=httpbin('post'), files=files)
+ assert r.status_code == 200
+
+ def test_unicode_method_name_with_request_object(self, httpbin):
+ files = {'file': open('test_requests.py', 'rb')}
+ s = requests.Session()
+ req = requests.Request(u("POST"), httpbin('post'), files=files)
+ prep = s.prepare_request(req)
+ assert isinstance(prep.method, builtin_str)
+ assert prep.method == "POST"
+
+ resp = s.send(prep)
+ assert resp.status_code == 200
+
+ def test_custom_content_type(self, httpbin):
+ r = requests.post(
+ httpbin('post'),
+ data={'stuff': json.dumps({'a': 123})},
+ files={'file1': ('test_requests.py', open(__file__, 'rb')),
+ 'file2': ('test_requests', open(__file__, 'rb'),
+ 'text/py-content-type')})
+ assert r.status_code == 200
+ assert b"text/py-content-type" in r.request.body
+
+ def test_hook_receives_request_arguments(self, httpbin):
+ def hook(resp, **kwargs):
+ assert resp is not None
+ assert kwargs != {}
+
+ requests.Request('GET', httpbin(), hooks={'response': hook})
+
+ def test_session_hooks_are_used_with_no_request_hooks(self, httpbin):
+ hook = lambda x, *args, **kwargs: x
+ s = requests.Session()
+ s.hooks['response'].append(hook)
+ r = requests.Request('GET', httpbin())
+ prep = s.prepare_request(r)
+ assert prep.hooks['response'] != []
+ assert prep.hooks['response'] == [hook]
+
+ def test_session_hooks_are_overridden_by_request_hooks(self, httpbin):
+ hook1 = lambda x, *args, **kwargs: x
+ hook2 = lambda x, *args, **kwargs: x
+ assert hook1 is not hook2
+ s = requests.Session()
+ s.hooks['response'].append(hook2)
+ r = requests.Request('GET', httpbin(), hooks={'response': [hook1]})
+ prep = s.prepare_request(r)
+ assert prep.hooks['response'] == [hook1]
+
+ def test_prepared_request_hook(self, httpbin):
+ def hook(resp, **kwargs):
+ resp.hook_working = True
+ return resp
+
+ req = requests.Request('GET', httpbin(), hooks={'response': hook})
+ prep = req.prepare()
+
+ s = requests.Session()
+ s.proxies = getproxies()
+ resp = s.send(prep)
+
+ assert hasattr(resp, 'hook_working')
+
+ def test_prepared_from_session(self, httpbin):
+ class DummyAuth(requests.auth.AuthBase):
+ def __call__(self, r):
+ r.headers['Dummy-Auth-Test'] = 'dummy-auth-test-ok'
+ return r
+
+ req = requests.Request('GET', httpbin('headers'))
+ assert not req.auth
+
+ s = requests.Session()
+ s.auth = DummyAuth()
+
+ prep = s.prepare_request(req)
+ resp = s.send(prep)
+
+ assert resp.json()['headers'][
+ 'Dummy-Auth-Test'] == 'dummy-auth-test-ok'
+
+ def test_prepare_request_with_bytestring_url(self):
+ req = requests.Request('GET', b'https://httpbin.org/')
+ s = requests.Session()
+ prep = s.prepare_request(req)
+ assert prep.url == "https://httpbin.org/"
+
+ def test_links(self):
+ r = requests.Response()
+ r.headers = {
+ 'cache-control': 'public, max-age=60, s-maxage=60',
+ 'connection': 'keep-alive',
+ 'content-encoding': 'gzip',
+ 'content-type': 'application/json; charset=utf-8',
+ 'date': 'Sat, 26 Jan 2013 16:47:56 GMT',
+ 'etag': '"6ff6a73c0e446c1f61614769e3ceb778"',
+ 'last-modified': 'Sat, 26 Jan 2013 16:22:39 GMT',
+ 'link': ('<https://api.github.com/users/kennethreitz/repos?'
+ 'page=2&per_page=10>; rel="next", <https://api.github.'
+ 'com/users/kennethreitz/repos?page=7&per_page=10>; '
+ ' rel="last"'),
+ 'server': 'GitHub.com',
+ 'status': '200 OK',
+ 'vary': 'Accept',
+ 'x-content-type-options': 'nosniff',
+ 'x-github-media-type': 'github.beta',
+ 'x-ratelimit-limit': '60',
+ 'x-ratelimit-remaining': '57'
+ }
+ assert r.links['next']['rel'] == 'next'
+
+ def test_cookie_parameters(self):
+ key = 'some_cookie'
+ value = 'some_value'
+ secure = True
+ domain = 'test.com'
+ rest = {'HttpOnly': True}
+
+ jar = requests.cookies.RequestsCookieJar()
+ jar.set(key, value, secure=secure, domain=domain, rest=rest)
+
+ assert len(jar) == 1
+ assert 'some_cookie' in jar
+
+ cookie = list(jar)[0]
+ assert cookie.secure == secure
+ assert cookie.domain == domain
+ assert cookie._rest['HttpOnly'] == rest['HttpOnly']
+
+ def test_cookie_as_dict_keeps_len(self):
+ key = 'some_cookie'
+ value = 'some_value'
+
+ key1 = 'some_cookie1'
+ value1 = 'some_value1'
+
+ jar = requests.cookies.RequestsCookieJar()
+ jar.set(key, value)
+ jar.set(key1, value1)
+
+ d1 = dict(jar)
+ d2 = dict(jar.iteritems())
+ d3 = dict(jar.items())
+
+ assert len(jar) == 2
+ assert len(d1) == 2
+ assert len(d2) == 2
+ assert len(d3) == 2
+
+ def test_cookie_as_dict_keeps_items(self):
+ key = 'some_cookie'
+ value = 'some_value'
+
+ key1 = 'some_cookie1'
+ value1 = 'some_value1'
+
+ jar = requests.cookies.RequestsCookieJar()
+ jar.set(key, value)
+ jar.set(key1, value1)
+
+ d1 = dict(jar)
+ d2 = dict(jar.iteritems())
+ d3 = dict(jar.items())
+
+ assert d1['some_cookie'] == 'some_value'
+ assert d2['some_cookie'] == 'some_value'
+ assert d3['some_cookie1'] == 'some_value1'
+
+ def test_cookie_as_dict_keys(self):
+ key = 'some_cookie'
+ value = 'some_value'
+
+ key1 = 'some_cookie1'
+ value1 = 'some_value1'
+
+ jar = requests.cookies.RequestsCookieJar()
+ jar.set(key, value)
+ jar.set(key1, value1)
+
+ keys = jar.keys()
+ assert keys == list(keys)
+ # make sure one can use keys multiple times
+ assert list(keys) == list(keys)
+
+ def test_cookie_as_dict_values(self):
+ key = 'some_cookie'
+ value = 'some_value'
+
+ key1 = 'some_cookie1'
+ value1 = 'some_value1'
+
+ jar = requests.cookies.RequestsCookieJar()
+ jar.set(key, value)
+ jar.set(key1, value1)
+
+ values = jar.values()
+ assert values == list(values)
+ # make sure one can use values multiple times
+ assert list(values) == list(values)
+
+ def test_cookie_as_dict_items(self):
+ key = 'some_cookie'
+ value = 'some_value'
+
+ key1 = 'some_cookie1'
+ value1 = 'some_value1'
+
+ jar = requests.cookies.RequestsCookieJar()
+ jar.set(key, value)
+ jar.set(key1, value1)
+
+ items = jar.items()
+ assert items == list(items)
+ # make sure one can use items multiple times
+ assert list(items) == list(items)
+
+ def test_time_elapsed_blank(self, httpbin):
+ r = requests.get(httpbin('get'))
+ td = r.elapsed
+ total_seconds = ((td.microseconds + (td.seconds + td.days * 24 * 3600)
+ * 10**6) / 10**6)
+ assert total_seconds > 0.0
+
+ def test_response_is_iterable(self):
+ r = requests.Response()
+ io = StringIO.StringIO('abc')
+ read_ = io.read
+
+ def read_mock(amt, decode_content=None):
+ return read_(amt)
+ setattr(io, 'read', read_mock)
+ r.raw = io
+ assert next(iter(r))
+ io.close()
+
+ def test_response_decode_unicode(self):
+ """
+ When called with decode_unicode, Response.iter_content should always
+ return unicode.
+ """
+ r = requests.Response()
+ r._content_consumed = True
+ r._content = b'the content'
+ r.encoding = 'ascii'
+
+ chunks = r.iter_content(decode_unicode=True)
+ assert all(isinstance(chunk, str) for chunk in chunks)
+
+ # also for streaming
+ r = requests.Response()
+ r.raw = io.BytesIO(b'the content')
+ r.encoding = 'ascii'
+ chunks = r.iter_content(decode_unicode=True)
+ assert all(isinstance(chunk, str) for chunk in chunks)
+
+ def test_request_and_response_are_pickleable(self, httpbin):
+ r = requests.get(httpbin('get'))
+
+ # verify we can pickle the original request
+ assert pickle.loads(pickle.dumps(r.request))
+
+ # verify we can pickle the response and that we have access to
+ # the original request.
+ pr = pickle.loads(pickle.dumps(r))
+ assert r.request.url == pr.request.url
+ assert r.request.headers == pr.request.headers
+
+ def test_get_auth_from_url(self):
+ url = 'http://user:pass@complex.url.com/path?query=yes'
+ assert ('user', 'pass') == requests.utils.get_auth_from_url(url)
+
+ def test_get_auth_from_url_encoded_spaces(self):
+ url = 'http://user:pass%20pass@complex.url.com/path?query=yes'
+ assert ('user', 'pass pass') == requests.utils.get_auth_from_url(url)
+
+ def test_get_auth_from_url_not_encoded_spaces(self):
+ url = 'http://user:pass pass@complex.url.com/path?query=yes'
+ assert ('user', 'pass pass') == requests.utils.get_auth_from_url(url)
+
+ def test_get_auth_from_url_percent_chars(self):
+ url = 'http://user%25user:pass@complex.url.com/path?query=yes'
+ assert ('user%user', 'pass') == requests.utils.get_auth_from_url(url)
+
+ def test_get_auth_from_url_encoded_hashes(self):
+ url = 'http://user:pass%23pass@complex.url.com/path?query=yes'
+ assert ('user', 'pass#pass') == requests.utils.get_auth_from_url(url)
+
+ def test_cannot_send_unprepared_requests(self, httpbin):
+ r = requests.Request(url=httpbin())
+ with pytest.raises(ValueError):
+ requests.Session().send(r)
+
+ def test_http_error(self):
+ error = requests.exceptions.HTTPError()
+ assert not error.response
+ response = requests.Response()
+ error = requests.exceptions.HTTPError(response=response)
+ assert error.response == response
+ error = requests.exceptions.HTTPError('message', response=response)
+ assert str(error) == 'message'
+ assert error.response == response
+
+ def test_session_pickling(self, httpbin):
+ r = requests.Request('GET', httpbin('get'))
+ s = requests.Session()
+
+ s = pickle.loads(pickle.dumps(s))
+ s.proxies = getproxies()
+
+ r = s.send(r.prepare())
+ assert r.status_code == 200
+
+ def test_fixes_1329(self, httpbin):
+ """
+ Ensure that header updates are done case-insensitively.
+ """
+ s = requests.Session()
+ s.headers.update({'ACCEPT': 'BOGUS'})
+ s.headers.update({'accept': 'application/json'})
+ r = s.get(httpbin('get'))
+ headers = r.request.headers
+ assert headers['accept'] == 'application/json'
+ assert headers['Accept'] == 'application/json'
+ assert headers['ACCEPT'] == 'application/json'
+
+ def test_uppercase_scheme_redirect(self, httpbin):
+ parts = urlparse(httpbin('html'))
+ url = "HTTP://" + parts.netloc + parts.path
+ r = requests.get(httpbin('redirect-to'), params={'url': url})
+ assert r.status_code == 200
+ assert r.url.lower() == url.lower()
+
+ def test_transport_adapter_ordering(self):
+ s = requests.Session()
+ order = ['https://', 'http://']
+ assert order == list(s.adapters)
+ s.mount('http://git', HTTPAdapter())
+ s.mount('http://github', HTTPAdapter())
+ s.mount('http://github.com', HTTPAdapter())
+ s.mount('http://github.com/about/', HTTPAdapter())
+ order = [
+ 'http://github.com/about/',
+ 'http://github.com',
+ 'http://github',
+ 'http://git',
+ 'https://',
+ 'http://',
+ ]
+ assert order == list(s.adapters)
+ s.mount('http://gittip', HTTPAdapter())
+ s.mount('http://gittip.com', HTTPAdapter())
+ s.mount('http://gittip.com/about/', HTTPAdapter())
+ order = [
+ 'http://github.com/about/',
+ 'http://gittip.com/about/',
+ 'http://github.com',
+ 'http://gittip.com',
+ 'http://github',
+ 'http://gittip',
+ 'http://git',
+ 'https://',
+ 'http://',
+ ]
+ assert order == list(s.adapters)
+ s2 = requests.Session()
+ s2.adapters = {'http://': HTTPAdapter()}
+ s2.mount('https://', HTTPAdapter())
+ assert 'http://' in s2.adapters
+ assert 'https://' in s2.adapters
+
+ def test_header_remove_is_case_insensitive(self, httpbin):
+ # From issue #1321
+ s = requests.Session()
+ s.headers['foo'] = 'bar'
+ r = s.get(httpbin('get'), headers={'FOO': None})
+ assert 'foo' not in r.request.headers
+
+ def test_params_are_merged_case_sensitive(self, httpbin):
+ s = requests.Session()
+ s.params['foo'] = 'bar'
+ r = s.get(httpbin('get'), params={'FOO': 'bar'})
+ assert r.json()['args'] == {'foo': 'bar', 'FOO': 'bar'}
+
+ def test_long_authinfo_in_url(self):
+ url = 'http://{0}:{1}@{2}:9000/path?query#frag'.format(
+ 'E8A3BE87-9E3F-4620-8858-95478E385B5B',
+ 'EA770032-DA4D-4D84-8CE9-29C6D910BF1E',
+ 'exactly-------------sixty-----------three------------characters',
+ )
+ r = requests.Request('GET', url).prepare()
+ assert r.url == url
+
+ def test_header_keys_are_native(self, httpbin):
+ headers = {u('unicode'): 'blah', 'byte'.encode('ascii'): 'blah'}
+ r = requests.Request('GET', httpbin('get'), headers=headers)
+ p = r.prepare()
+
+ # This is testing that they are builtin strings. A bit weird, but there
+ # we go.
+ assert 'unicode' in p.headers.keys()
+ assert 'byte' in p.headers.keys()
+
+ def test_can_send_nonstring_objects_with_files(self, httpbin):
+ data = {'a': 0.0}
+ files = {'b': 'foo'}
+ r = requests.Request('POST', httpbin('post'), data=data, files=files)
+ p = r.prepare()
+
+ assert 'multipart/form-data' in p.headers['Content-Type']
+
+ def test_can_send_bytes_bytearray_objects_with_files(self, httpbin):
+ # Test bytes:
+ data = {'a': 'this is a string'}
+ files = {'b': b'foo'}
+ r = requests.Request('POST', httpbin('post'), data=data, files=files)
+ p = r.prepare()
+ assert 'multipart/form-data' in p.headers['Content-Type']
+ # Test bytearrays:
+ files = {'b': bytearray(b'foo')}
+ r = requests.Request('POST', httpbin('post'), data=data, files=files)
+ p = r.prepare()
+ assert 'multipart/form-data' in p.headers['Content-Type']
+
+ def test_can_send_file_object_with_non_string_filename(self, httpbin):
+ f = io.BytesIO()
+ f.name = 2
+ r = requests.Request('POST', httpbin('post'), files={'f': f})
+ p = r.prepare()
+
+ assert 'multipart/form-data' in p.headers['Content-Type']
+
+ def test_autoset_header_values_are_native(self, httpbin):
+ data = 'this is a string'
+ length = '16'
+ req = requests.Request('POST', httpbin('post'), data=data)
+ p = req.prepare()
+
+ assert p.headers['Content-Length'] == length
+
+ def test_nonhttp_schemes_dont_check_URLs(self):
+ test_urls = (
+ 'data:image/gif;base64,R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==',
+ 'file:///etc/passwd',
+ 'magnet:?xt=urn:btih:be08f00302bc2d1d3cfa3af02024fa647a271431',
+ )
+ for test_url in test_urls:
+ req = requests.Request('GET', test_url)
+ preq = req.prepare()
+ assert test_url == preq.url
+
+ def test_auth_is_stripped_on_redirect_off_host(self, httpbin):
+ r = requests.get(
+ httpbin('redirect-to'),
+ params={'url': 'http://www.google.co.uk'},
+ auth=('user', 'pass'),
+ )
+ assert r.history[0].request.headers['Authorization']
+ assert not r.request.headers.get('Authorization', '')
+
+ def test_auth_is_retained_for_redirect_on_host(self, httpbin):
+ r = requests.get(httpbin('redirect/1'), auth=('user', 'pass'))
+ h1 = r.history[0].request.headers['Authorization']
+ h2 = r.request.headers['Authorization']
+
+ assert h1 == h2
+
+ def test_manual_redirect_with_partial_body_read(self, httpbin):
+ s = requests.Session()
+ r1 = s.get(httpbin('redirect/2'), allow_redirects=False, stream=True)
+ assert r1.is_redirect
+ rg = s.resolve_redirects(r1, r1.request, stream=True)
+
+ # read only the first eight bytes of the response body,
+ # then follow the redirect
+ r1.iter_content(8)
+ r2 = next(rg)
+ assert r2.is_redirect
+
+ # read all of the response via iter_content,
+ # then follow the redirect
+ for _ in r2.iter_content():
+ pass
+ r3 = next(rg)
+ assert not r3.is_redirect
+
+ def _patch_adapter_gzipped_redirect(self, session, url):
+ adapter = session.get_adapter(url=url)
+ org_build_response = adapter.build_response
+ self._patched_response = False
+
+ def build_response(*args, **kwargs):
+ resp = org_build_response(*args, **kwargs)
+ if not self._patched_response:
+ resp.raw.headers['content-encoding'] = 'gzip'
+ self._patched_response = True
+ return resp
+
+ adapter.build_response = build_response
+
+ def test_redirect_with_wrong_gzipped_header(self, httpbin):
+ s = requests.Session()
+ url = httpbin('redirect/1')
+ self._patch_adapter_gzipped_redirect(s, url)
+ s.get(url)
+
+ def test_basic_auth_str_is_always_native(self):
+ s = _basic_auth_str("test", "test")
+ assert isinstance(s, builtin_str)
+ assert s == "Basic dGVzdDp0ZXN0"
+
+ def test_requests_history_is_saved(self, httpbin):
+ r = requests.get(httpbin('redirect/5'))
+ total = r.history[-1].history
+ i = 0
+ for item in r.history:
+ assert item.history == total[0:i]
+ i = i + 1
+
+ def test_json_param_post_content_type_works(self, httpbin):
+ r = requests.post(
+ httpbin('post'),
+ json={'life': 42}
+ )
+ assert r.status_code == 200
+ assert 'application/json' in r.request.headers['Content-Type']
+ assert {'life': 42} == r.json()['json']
+
+ def test_json_param_post_should_not_override_data_param(self, httpbin):
+ r = requests.Request(method='POST', url=httpbin('post'),
+ data={'stuff': 'elixr'},
+ json={'music': 'flute'})
+ prep = r.prepare()
+ assert 'stuff=elixr' == prep.body
+
+ def test_response_iter_lines(self, httpbin):
+ r = requests.get(httpbin('stream/4'), stream=True)
+ assert r.status_code == 200
+
+ it = r.iter_lines()
+ next(it)
+ assert len(list(it)) == 3
+
+ def test_unconsumed_session_response_closes_connection(self, httpbin):
+ s = requests.session()
+
+ with contextlib.closing(s.get(httpbin('stream/4'), stream=True)) as response:
+ pass
+
+ assert response._content_consumed is False
+ assert response.raw.closed
+
+ @pytest.mark.xfail
+ def test_response_iter_lines_reentrant(self, httpbin):
+ """Response.iter_lines() is not reentrant safe"""
+ r = requests.get(httpbin('stream/4'), stream=True)
+ assert r.status_code == 200
+
+ next(r.iter_lines())
+ assert len(list(r.iter_lines())) == 3
+
+
+class TestContentEncodingDetection(unittest.TestCase):
+
+ def test_none(self):
+ encodings = requests.utils.get_encodings_from_content('')
+ assert not len(encodings)
+
+ def test_html_charset(self):
+ """HTML5 meta charset attribute"""
+ content = '<meta charset="UTF-8">'
+ encodings = requests.utils.get_encodings_from_content(content)
+ assert len(encodings) == 1
+ assert encodings[0] == 'UTF-8'
+
+ def test_html4_pragma(self):
+ """HTML4 pragma directive"""
+ content = '<meta http-equiv="Content-type" content="text/html;charset=UTF-8">'
+ encodings = requests.utils.get_encodings_from_content(content)
+ assert len(encodings) == 1
+ assert encodings[0] == 'UTF-8'
+
+ def test_xhtml_pragma(self):
+ """XHTML 1.x served with text/html MIME type"""
+ content = '<meta http-equiv="Content-type" content="text/html;charset=UTF-8" />'
+ encodings = requests.utils.get_encodings_from_content(content)
+ assert len(encodings) == 1
+ assert encodings[0] == 'UTF-8'
+
+ def test_xml(self):
+ """XHTML 1.x served as XML"""
+ content = '<?xml version="1.0" encoding="UTF-8"?>'
+ encodings = requests.utils.get_encodings_from_content(content)
+ assert len(encodings) == 1
+ assert encodings[0] == 'UTF-8'
+
+ def test_precedence(self):
+ content = '''
+ <?xml version="1.0" encoding="XML"?>
+ <meta charset="HTML5">
+ <meta http-equiv="Content-type" content="text/html;charset=HTML4" />
+ '''.strip()
+ encodings = requests.utils.get_encodings_from_content(content)
+ assert encodings == ['HTML5', 'HTML4', 'XML']
+
+
+class TestCaseInsensitiveDict(unittest.TestCase):
+
+ def test_mapping_init(self):
+ cid = CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'})
+ assert len(cid) == 2
+ assert 'foo' in cid
+ assert 'bar' in cid
+
+ def test_iterable_init(self):
+ cid = CaseInsensitiveDict([('Foo', 'foo'), ('BAr', 'bar')])
+ assert len(cid) == 2
+ assert 'foo' in cid
+ assert 'bar' in cid
+
+ def test_kwargs_init(self):
+ cid = CaseInsensitiveDict(FOO='foo', BAr='bar')
+ assert len(cid) == 2
+ assert 'foo' in cid
+ assert 'bar' in cid
+
+ def test_docstring_example(self):
+ cid = CaseInsensitiveDict()
+ cid['Accept'] = 'application/json'
+ assert cid['aCCEPT'] == 'application/json'
+ assert list(cid) == ['Accept']
+
+ def test_len(self):
+ cid = CaseInsensitiveDict({'a': 'a', 'b': 'b'})
+ cid['A'] = 'a'
+ assert len(cid) == 2
+
+ def test_getitem(self):
+ cid = CaseInsensitiveDict({'Spam': 'blueval'})
+ assert cid['spam'] == 'blueval'
+ assert cid['SPAM'] == 'blueval'
+
+ def test_fixes_649(self):
+ """__setitem__ should behave case-insensitively."""
+ cid = CaseInsensitiveDict()
+ cid['spam'] = 'oneval'
+ cid['Spam'] = 'twoval'
+ cid['sPAM'] = 'redval'
+ cid['SPAM'] = 'blueval'
+ assert cid['spam'] == 'blueval'
+ assert cid['SPAM'] == 'blueval'
+ assert list(cid.keys()) == ['SPAM']
+
+ def test_delitem(self):
+ cid = CaseInsensitiveDict()
+ cid['Spam'] = 'someval'
+ del cid['sPam']
+ assert 'spam' not in cid
+ assert len(cid) == 0
+
+ def test_contains(self):
+ cid = CaseInsensitiveDict()
+ cid['Spam'] = 'someval'
+ assert 'Spam' in cid
+ assert 'spam' in cid
+ assert 'SPAM' in cid
+ assert 'sPam' in cid
+ assert 'notspam' not in cid
+
+ def test_get(self):
+ cid = CaseInsensitiveDict()
+ cid['spam'] = 'oneval'
+ cid['SPAM'] = 'blueval'
+ assert cid.get('spam') == 'blueval'
+ assert cid.get('SPAM') == 'blueval'
+ assert cid.get('sPam') == 'blueval'
+ assert cid.get('notspam', 'default') == 'default'
+
+ def test_update(self):
+ cid = CaseInsensitiveDict()
+ cid['spam'] = 'blueval'
+ cid.update({'sPam': 'notblueval'})
+ assert cid['spam'] == 'notblueval'
+ cid = CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'})
+ cid.update({'fOO': 'anotherfoo', 'bAR': 'anotherbar'})
+ assert len(cid) == 2
+ assert cid['foo'] == 'anotherfoo'
+ assert cid['bar'] == 'anotherbar'
+
+ def test_update_retains_unchanged(self):
+ cid = CaseInsensitiveDict({'foo': 'foo', 'bar': 'bar'})
+ cid.update({'foo': 'newfoo'})
+ assert cid['bar'] == 'bar'
+
+ def test_iter(self):
+ cid = CaseInsensitiveDict({'Spam': 'spam', 'Eggs': 'eggs'})
+ keys = frozenset(['Spam', 'Eggs'])
+ assert frozenset(iter(cid)) == keys
+
+ def test_equality(self):
+ cid = CaseInsensitiveDict({'SPAM': 'blueval', 'Eggs': 'redval'})
+ othercid = CaseInsensitiveDict({'spam': 'blueval', 'eggs': 'redval'})
+ assert cid == othercid
+ del othercid['spam']
+ assert cid != othercid
+ assert cid == {'spam': 'blueval', 'eggs': 'redval'}
+ assert cid != object()
+
+ def test_setdefault(self):
+ cid = CaseInsensitiveDict({'Spam': 'blueval'})
+ assert cid.setdefault('spam', 'notblueval') == 'blueval'
+ assert cid.setdefault('notspam', 'notblueval') == 'notblueval'
+
+ def test_lower_items(self):
+ cid = CaseInsensitiveDict({
+ 'Accept': 'application/json',
+ 'user-Agent': 'requests',
+ })
+ keyset = frozenset(lowerkey for lowerkey, v in cid.lower_items())
+ lowerkeyset = frozenset(['accept', 'user-agent'])
+ assert keyset == lowerkeyset
+
+ def test_preserve_key_case(self):
+ cid = CaseInsensitiveDict({
+ 'Accept': 'application/json',
+ 'user-Agent': 'requests',
+ })
+ keyset = frozenset(['Accept', 'user-Agent'])
+ assert frozenset(i[0] for i in cid.items()) == keyset
+ assert frozenset(cid.keys()) == keyset
+ assert frozenset(cid) == keyset
+
+ def test_preserve_last_key_case(self):
+ cid = CaseInsensitiveDict({
+ 'Accept': 'application/json',
+ 'user-Agent': 'requests',
+ })
+ cid.update({'ACCEPT': 'application/json'})
+ cid['USER-AGENT'] = 'requests'
+ keyset = frozenset(['ACCEPT', 'USER-AGENT'])
+ assert frozenset(i[0] for i in cid.items()) == keyset
+ assert frozenset(cid.keys()) == keyset
+ assert frozenset(cid) == keyset
+
+ def test_copy(self):
+ cid = CaseInsensitiveDict({
+ 'Accept': 'application/json',
+ 'user-Agent': 'requests',
+ })
+ cid_copy = cid.copy()
+ assert cid == cid_copy
+ cid['changed'] = True
+ assert cid != cid_copy
+
+
+class UtilsTestCase(unittest.TestCase):
+
+ def test_super_len_io_streams(self):
+ """ Ensures that we properly deal with different kinds of IO streams. """
+ # uses StringIO or io.StringIO (see import above)
+ from io import BytesIO
+ from requests.utils import super_len
+
+ assert super_len(StringIO.StringIO()) == 0
+ assert super_len(
+ StringIO.StringIO('with so much drama in the LBC')) == 29
+
+ assert super_len(BytesIO()) == 0
+ assert super_len(
+ BytesIO(b"it's kinda hard bein' snoop d-o-double-g")) == 40
+
+ try:
+ import cStringIO
+ except ImportError:
+ pass
+ else:
+ assert super_len(
+ cStringIO.StringIO('but some how, some way...')) == 25
+
+ def test_super_len_correctly_calculates_len_of_partially_read_file(self):
+ """Ensure that we handle partially consumed file like objects."""
+ from requests.utils import super_len
+ s = StringIO.StringIO()
+ s.write('foobarbogus')
+ assert super_len(s) == 0
+
+ def test_get_environ_proxies_ip_ranges(self):
+ """Ensures that IP addresses are correctly matches with ranges
+ in no_proxy variable."""
+ from requests.utils import get_environ_proxies
+ os.environ['no_proxy'] = "192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1"
+ assert get_environ_proxies('http://192.168.0.1:5000/') == {}
+ assert get_environ_proxies('http://192.168.0.1/') == {}
+ assert get_environ_proxies('http://172.16.1.1/') == {}
+ assert get_environ_proxies('http://172.16.1.1:5000/') == {}
+ assert get_environ_proxies('http://192.168.1.1:5000/') != {}
+ assert get_environ_proxies('http://192.168.1.1/') != {}
+
+ def test_get_environ_proxies(self):
+ """Ensures that IP addresses are correctly matches with ranges
+ in no_proxy variable."""
+ from requests.utils import get_environ_proxies
+ os.environ['no_proxy'] = "127.0.0.1,localhost.localdomain,192.168.0.0/24,172.16.1.1"
+ assert get_environ_proxies(
+ 'http://localhost.localdomain:5000/v1.0/') == {}
+ assert get_environ_proxies('http://www.requests.com/') != {}
+
+ def test_select_proxies(self):
+ """Make sure we can select per-host proxies correctly."""
+ from requests.utils import select_proxy
+ proxies = {'http': 'http://http.proxy',
+ 'http://some.host': 'http://some.host.proxy'}
+ assert select_proxy('hTTp://u:p@Some.Host/path', proxies) == 'http://some.host.proxy'
+ assert select_proxy('hTTp://u:p@Other.Host/path', proxies) == 'http://http.proxy'
+ assert select_proxy('hTTps://Other.Host', proxies) is None
+
+ def test_guess_filename_when_int(self):
+ from requests.utils import guess_filename
+ assert None is guess_filename(1)
+
+ def test_guess_filename_when_filename_is_an_int(self):
+ from requests.utils import guess_filename
+ fake = type('Fake', (object,), {'name': 1})()
+ assert None is guess_filename(fake)
+
+ def test_guess_filename_with_file_like_obj(self):
+ from requests.utils import guess_filename
+ from requests import compat
+ fake = type('Fake', (object,), {'name': b'value'})()
+ guessed_name = guess_filename(fake)
+ assert b'value' == guessed_name
+ assert isinstance(guessed_name, compat.bytes)
+
+ def test_guess_filename_with_unicode_name(self):
+ from requests.utils import guess_filename
+ from requests import compat
+ filename = b'value'.decode('utf-8')
+ fake = type('Fake', (object,), {'name': filename})()
+ guessed_name = guess_filename(fake)
+ assert filename == guessed_name
+ assert isinstance(guessed_name, compat.str)
+
+ def test_is_ipv4_address(self):
+ from requests.utils import is_ipv4_address
+ assert is_ipv4_address('8.8.8.8')
+ assert not is_ipv4_address('8.8.8.8.8')
+ assert not is_ipv4_address('localhost.localdomain')
+
+ def test_is_valid_cidr(self):
+ from requests.utils import is_valid_cidr
+ assert not is_valid_cidr('8.8.8.8')
+ assert is_valid_cidr('192.168.1.0/24')
+
+ def test_dotted_netmask(self):
+ from requests.utils import dotted_netmask
+ assert dotted_netmask(8) == '255.0.0.0'
+ assert dotted_netmask(24) == '255.255.255.0'
+ assert dotted_netmask(25) == '255.255.255.128'
+
+ def test_address_in_network(self):
+ from requests.utils import address_in_network
+ assert address_in_network('192.168.1.1', '192.168.1.0/24')
+ assert not address_in_network('172.16.0.1', '192.168.1.0/24')
+
+ def test_get_auth_from_url(self):
+ """Ensures that username and password in well-encoded URI as per
+ RFC 3986 are correclty extracted."""
+ from requests.utils import get_auth_from_url
+ from requests.compat import quote
+ percent_encoding_test_chars = "%!*'();:@&=+$,/?#[] "
+ url_address = "request.com/url.html#test"
+ url = "http://" + quote(
+ percent_encoding_test_chars, '') + ':' + quote(
+ percent_encoding_test_chars, '') + '@' + url_address
+ (username, password) = get_auth_from_url(url)
+ assert username == percent_encoding_test_chars
+ assert password == percent_encoding_test_chars
+
+ def test_requote_uri_with_unquoted_percents(self):
+ """Ensure we handle unquoted percent signs in redirects.
+
+ See: https://github.com/kennethreitz/requests/issues/2356
+ """
+ from requests.utils import requote_uri
+ bad_uri = 'http://example.com/fiz?buz=%ppicture'
+ quoted = 'http://example.com/fiz?buz=%25ppicture'
+ assert quoted == requote_uri(bad_uri)
+
+ def test_requote_uri_properly_requotes(self):
+ """Ensure requoting doesn't break expectations."""
+ from requests.utils import requote_uri
+ quoted = 'http://example.com/fiz?buz=%25ppicture'
+ assert quoted == requote_uri(quoted)
+
+
+class TestMorselToCookieExpires(unittest.TestCase):
+
+ """Tests for morsel_to_cookie when morsel contains expires."""
+
+ def test_expires_valid_str(self):
+ """Test case where we convert expires from string time."""
+
+ morsel = Morsel()
+ morsel['expires'] = 'Thu, 01-Jan-1970 00:00:01 GMT'
+ cookie = morsel_to_cookie(morsel)
+ assert cookie.expires == 1
+
+ def test_expires_invalid_int(self):
+ """Test case where an invalid type is passed for expires."""
+
+ morsel = Morsel()
+ morsel['expires'] = 100
+ with pytest.raises(TypeError):
+ morsel_to_cookie(morsel)
+
+ def test_expires_invalid_str(self):
+ """Test case where an invalid string is input."""
+
+ morsel = Morsel()
+ morsel['expires'] = 'woops'
+ with pytest.raises(ValueError):
+ morsel_to_cookie(morsel)
+
+ def test_expires_none(self):
+ """Test case where expires is None."""
+
+ morsel = Morsel()
+ morsel['expires'] = None
+ cookie = morsel_to_cookie(morsel)
+ assert cookie.expires is None
+
+
+class TestMorselToCookieMaxAge(unittest.TestCase):
+
+ """Tests for morsel_to_cookie when morsel contains max-age."""
+
+ def test_max_age_valid_int(self):
+ """Test case where a valid max age in seconds is passed."""
+
+ morsel = Morsel()
+ morsel['max-age'] = 60
+ cookie = morsel_to_cookie(morsel)
+ assert isinstance(cookie.expires, int)
+
+ def test_max_age_invalid_str(self):
+ """Test case where a invalid max age is passed."""
+
+ morsel = Morsel()
+ morsel['max-age'] = 'woops'
+ with pytest.raises(TypeError):
+ morsel_to_cookie(morsel)
+
+
+class TestTimeout:
+ def test_stream_timeout(self, httpbin):
+ try:
+ requests.get(httpbin('delay/10'), timeout=2.0)
+ except requests.exceptions.Timeout as e:
+ assert 'Read timed out' in e.args[0].args[0]
+
+ def test_invalid_timeout(self, httpbin):
+ with pytest.raises(ValueError) as e:
+ requests.get(httpbin('get'), timeout=(3, 4, 5))
+ assert '(connect, read)' in str(e)
+
+ with pytest.raises(ValueError) as e:
+ requests.get(httpbin('get'), timeout="foo")
+ assert 'must be an int or float' in str(e)
+
+ def test_none_timeout(self, httpbin):
+ """ Check that you can set None as a valid timeout value.
+
+ To actually test this behavior, we'd want to check that setting the
+ timeout to None actually lets the request block past the system default
+ timeout. However, this would make the test suite unbearably slow.
+ Instead we verify that setting the timeout to None does not prevent the
+ request from succeeding.
+ """
+ r = requests.get(httpbin('get'), timeout=None)
+ assert r.status_code == 200
+
+ def test_read_timeout(self, httpbin):
+ try:
+ requests.get(httpbin('delay/10'), timeout=(None, 0.1))
+ assert False, "The recv() request should time out."
+ except ReadTimeout:
+ pass
+
+ def test_connect_timeout(self):
+ try:
+ requests.get(TARPIT, timeout=(0.1, None))
+ assert False, "The connect() request should time out."
+ except ConnectTimeout as e:
+ assert isinstance(e, ConnectionError)
+ assert isinstance(e, Timeout)
+
+ def test_total_timeout_connect(self):
+ try:
+ requests.get(TARPIT, timeout=(0.1, 0.1))
+ assert False, "The connect() request should time out."
+ except ConnectTimeout:
+ pass
+
+ def test_encoded_methods(self, httpbin):
+ """See: https://github.com/kennethreitz/requests/issues/2316"""
+ r = requests.request(b'GET', httpbin('get'))
+ assert r.ok
+
+
+SendCall = collections.namedtuple('SendCall', ('args', 'kwargs'))
+
+
+class RedirectSession(SessionRedirectMixin):
+ def __init__(self, order_of_redirects):
+ self.redirects = order_of_redirects
+ self.calls = []
+ self.max_redirects = 30
+ self.cookies = {}
+ self.trust_env = False
+
+ def send(self, *args, **kwargs):
+ self.calls.append(SendCall(args, kwargs))
+ return self.build_response()
+
+ def build_response(self):
+ request = self.calls[-1].args[0]
+ r = requests.Response()
+
+ try:
+ r.status_code = int(self.redirects.pop(0))
+ except IndexError:
+ r.status_code = 200
+
+ r.headers = CaseInsensitiveDict({'Location': '/'})
+ r.raw = self._build_raw()
+ r.request = request
+ return r
+
+ def _build_raw(self):
+ string = StringIO.StringIO('')
+ setattr(string, 'release_conn', lambda *args: args)
+ return string
+
+
+class TestRedirects:
+ default_keyword_args = {
+ 'stream': False,
+ 'verify': True,
+ 'cert': None,
+ 'timeout': None,
+ 'allow_redirects': False,
+ 'proxies': {},
+ }
+
+ def test_requests_are_updated_each_time(self, httpbin):
+ session = RedirectSession([303, 307])
+ prep = requests.Request('POST', httpbin('post')).prepare()
+ r0 = session.send(prep)
+ assert r0.request.method == 'POST'
+ assert session.calls[-1] == SendCall((r0.request,), {})
+ redirect_generator = session.resolve_redirects(r0, prep)
+ for response in redirect_generator:
+ assert response.request.method == 'GET'
+ send_call = SendCall((response.request,),
+ TestRedirects.default_keyword_args)
+ assert session.calls[-1] == send_call
+
+
+
+@pytest.fixture
+def list_of_tuples():
+ return [
+ (('a', 'b'), ('c', 'd')),
+ (('c', 'd'), ('a', 'b')),
+ (('a', 'b'), ('c', 'd'), ('e', 'f')),
+ ]
+
+
+def test_data_argument_accepts_tuples(list_of_tuples):
+ """
+ Ensure that the data argument will accept tuples of strings
+ and properly encode them.
+ """
+ for data in list_of_tuples:
+ p = PreparedRequest()
+ p.prepare(
+ method='GET',
+ url='http://www.example.com',
+ data=data,
+ hooks=default_hooks()
+ )
+ assert p.body == urlencode(data)
+
+
+def assert_copy(p, p_copy):
+ for attr in ('method', 'url', 'headers', '_cookies', 'body', 'hooks'):
+ assert getattr(p, attr) == getattr(p_copy, attr)
+
+
+def test_prepared_request_empty_copy():
+ p = PreparedRequest()
+ assert_copy(p, p.copy())
+
+
+def test_prepared_request_no_cookies_copy():
+ p = PreparedRequest()
+ p.prepare(
+ method='GET',
+ url='http://www.example.com',
+ data='foo=bar',
+ hooks=default_hooks()
+ )
+ assert_copy(p, p.copy())
+
+
+def test_prepared_request_complete_copy():
+ p = PreparedRequest()
+ p.prepare(
+ method='GET',
+ url='http://www.example.com',
+ data='foo=bar',
+ hooks=default_hooks(),
+ cookies={'foo': 'bar'}
+ )
+ assert_copy(p, p.copy())
+
+
+def test_prepare_unicode_url():
+ p = PreparedRequest()
+ p.prepare(
+ method='GET',
+ url=u('http://www.example.com/üniçø∂é'),
+ )
+ assert_copy(p, p.copy())
+
+
+def test_urllib3_retries(httpbin):
+ from requests.packages.urllib3.util import Retry
+ s = requests.Session()
+ s.mount('http://', HTTPAdapter(max_retries=Retry(
+ total=2, status_forcelist=[500]
+ )))
+
+ with pytest.raises(RetryError):
+ s.get(httpbin('status/500'))
+
+
+def test_urllib3_pool_connection_closed(httpbin):
+ s = requests.Session()
+ s.mount('http://', HTTPAdapter(pool_connections=0, pool_maxsize=0))
+
+ try:
+ s.get(httpbin('status/200'))
+ except ConnectionError as e:
+ assert u"Pool is closed." in str(e)
+
+
+def test_vendor_aliases():
+ from requests.packages import urllib3
+ from requests.packages import chardet
+
+ with pytest.raises(ImportError):
+ from requests.packages import webbrowser
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/python/requirements.in b/third_party/python/requirements.in
new file mode 100644
index 0000000000..b719dc12c2
--- /dev/null
+++ b/third_party/python/requirements.in
@@ -0,0 +1,49 @@
+# ONLY ADD PACKAGES USED BY PYTHON 3 TO THIS LIST!
+#
+# Python 2-only packages should be vendored manually by running:
+#
+# $ pip download <package>==<version>
+#
+# Then for the package and each dependency:
+#
+# $ pip hash <package>.whl # verify the hash against the one on PyPi (e.g https://pypi.org/project/<package>/<version>/#files)
+# $ unzip <package>.whl -d <package>
+# $ echo <version> > <package>/VERSION
+# $ hg add <package>/VERSION
+#
+# Note `pip download` may return `tar.gz` files if there is no `.whl` (wheel)
+# available. When downloading wheels, make sure that they are cross-platform.
+# If not you may need to specify `--no-binary :<package1>,<package2>:` to get
+# the source distribution instead for those particular packages.
+
+attrs==19.1.0
+blessings==1.7
+compare-locales==8.1.0
+cookies==2.2.1
+coverage==5.1
+distro==1.4.0
+ecdsa==0.15
+esprima==4.0.1
+fluent.migrate==0.10
+fluent.syntax==0.18.1
+glean_parser==1.29.0
+jsmin==2.1.0
+json-e==2.7.0
+mozilla-version==0.3.4
+pathlib2==2.3.2
+pathspec==0.8
+pip-tools==5.3.1
+ply==3.10
+pyasn1==0.4.8
+pyflakes==2.2.0
+pytest==3.6.2
+python-hglib==2.4
+pytoml==0.1.10
+pyyaml==5.3.1
+redo==2.0.3
+requests==2.9.1
+responses==0.10.6
+sentry-sdk==0.14.3
+six==1.13.0
+voluptuous==0.11.5
+yamllint==1.23
diff --git a/third_party/python/requirements.txt b/third_party/python/requirements.txt
new file mode 100644
index 0000000000..88213dcef4
--- /dev/null
+++ b/third_party/python/requirements.txt
@@ -0,0 +1,246 @@
+appdirs==1.4.4 \
+ --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41 \
+ --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 \
+ # via glean-parser
+atomicwrites==1.1.5 \
+ --hash=sha256:240831ea22da9ab882b551b31d4225591e5e447a68c5e188db5b89ca1d487585 \
+ --hash=sha256:a24da68318b08ac9c9c45029f4a10371ab5b20e4226738e150e6e7c571630ae6 \
+ # via pytest
+attrs==19.1.0 \
+ --hash=sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79 \
+ --hash=sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399 \
+ # via -r requirements-mach-vendor-python.in, jsonschema, mozilla-version, pytest
+blessings==1.7 \
+ --hash=sha256:98e5854d805f50a5b58ac2333411b0482516a8210f23f43308baeb58d77c157d \
+ --hash=sha256:b1fdd7e7a675295630f9ae71527a8ebc10bfefa236b3d6aa4932ee4462c17ba3 \
+ --hash=sha256:caad5211e7ba5afe04367cdd4cfc68fa886e2e08f6f35e76b7387d2109ccea6e \
+ # via -r requirements-mach-vendor-python.in
+certifi==2018.4.16 \
+ --hash=sha256:13e698f54293db9f89122b0581843a782ad0934a4fe0172d2a980ba77fc61bb7 \
+ --hash=sha256:9fa520c1bacfb634fa7af20a76bcbd3d5fb390481724c597da32c719a7dca4b0 \
+ # via sentry-sdk
+click==7.0 \
+ --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \
+ --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \
+ # via glean-parser, pip-tools
+compare-locales==8.1.0 \
+ --hash=sha256:286270797ce64f7a2f25e734bb437870661409884a4f0971c0bb94fdad6c1f35 \
+ --hash=sha256:3d374ff959d5de2cfd5b94caf6b0fa61445f1d8ede5af384002cb3542aacad3a \
+ # via -r requirements-mach-vendor-python.in, fluent.migrate
+cookies==2.2.1 \
+ --hash=sha256:15bee753002dff684987b8df8c235288eb8d45f8191ae056254812dfd42c81d3 \
+ --hash=sha256:d6b698788cae4cfa4e62ef8643a9ca332b79bd96cb314294b864ae8d7eb3ee8e \
+ # via -r requirements-mach-vendor-python.in
+coverage==5.1 \
+ --hash=sha256:00f1d23f4336efc3b311ed0d807feb45098fc86dee1ca13b3d6768cdab187c8a \
+ --hash=sha256:01333e1bd22c59713ba8a79f088b3955946e293114479bbfc2e37d522be03355 \
+ --hash=sha256:0cb4be7e784dcdc050fc58ef05b71aa8e89b7e6636b99967fadbdba694cf2b65 \
+ --hash=sha256:0e61d9803d5851849c24f78227939c701ced6704f337cad0a91e0972c51c1ee7 \
+ --hash=sha256:1601e480b9b99697a570cea7ef749e88123c04b92d84cedaa01e117436b4a0a9 \
+ --hash=sha256:2742c7515b9eb368718cd091bad1a1b44135cc72468c731302b3d641895b83d1 \
+ --hash=sha256:2d27a3f742c98e5c6b461ee6ef7287400a1956c11421eb574d843d9ec1f772f0 \
+ --hash=sha256:402e1744733df483b93abbf209283898e9f0d67470707e3c7516d84f48524f55 \
+ --hash=sha256:5c542d1e62eece33c306d66fe0a5c4f7f7b3c08fecc46ead86d7916684b36d6c \
+ --hash=sha256:5f2294dbf7875b991c381e3d5af2bcc3494d836affa52b809c91697449d0eda6 \
+ --hash=sha256:6402bd2fdedabbdb63a316308142597534ea8e1895f4e7d8bf7476c5e8751fef \
+ --hash=sha256:66460ab1599d3cf894bb6baee8c684788819b71a5dc1e8fa2ecc152e5d752019 \
+ --hash=sha256:782caea581a6e9ff75eccda79287daefd1d2631cc09d642b6ee2d6da21fc0a4e \
+ --hash=sha256:79a3cfd6346ce6c13145731d39db47b7a7b859c0272f02cdb89a3bdcbae233a0 \
+ --hash=sha256:7a5bdad4edec57b5fb8dae7d3ee58622d626fd3a0be0dfceda162a7035885ecf \
+ --hash=sha256:8fa0cbc7ecad630e5b0f4f35b0f6ad419246b02bc750de7ac66db92667996d24 \
+ --hash=sha256:a027ef0492ede1e03a8054e3c37b8def89a1e3c471482e9f046906ba4f2aafd2 \
+ --hash=sha256:a3f3654d5734a3ece152636aad89f58afc9213c6520062db3978239db122f03c \
+ --hash=sha256:a82b92b04a23d3c8a581fc049228bafde988abacba397d57ce95fe95e0338ab4 \
+ --hash=sha256:acf3763ed01af8410fc36afea23707d4ea58ba7e86a8ee915dfb9ceff9ef69d0 \
+ --hash=sha256:adeb4c5b608574a3d647011af36f7586811a2c1197c861aedb548dd2453b41cd \
+ --hash=sha256:b83835506dfc185a319031cf853fa4bb1b3974b1f913f5bb1a0f3d98bdcded04 \
+ --hash=sha256:bb28a7245de68bf29f6fb199545d072d1036a1917dca17a1e75bbb919e14ee8e \
+ --hash=sha256:bf9cb9a9fd8891e7efd2d44deb24b86d647394b9705b744ff6f8261e6f29a730 \
+ --hash=sha256:c317eaf5ff46a34305b202e73404f55f7389ef834b8dbf4da09b9b9b37f76dd2 \
+ --hash=sha256:dbe8c6ae7534b5b024296464f387d57c13caa942f6d8e6e0346f27e509f0f768 \
+ --hash=sha256:de807ae933cfb7f0c7d9d981a053772452217df2bf38e7e6267c9cbf9545a796 \
+ --hash=sha256:dead2ddede4c7ba6cb3a721870f5141c97dc7d85a079edb4bd8d88c3ad5b20c7 \
+ --hash=sha256:dec5202bfe6f672d4511086e125db035a52b00f1648d6407cc8e526912c0353a \
+ --hash=sha256:e1ea316102ea1e1770724db01998d1603ed921c54a86a2efcb03428d5417e489 \
+ --hash=sha256:f90bfc4ad18450c80b024036eaf91e4a246ae287701aaa88eaebebf150868052 \
+ # via -r requirements-mach-vendor-python.in
+diskcache==4.1.0 \
+ --hash=sha256:69b253a6ffe95bb4bafb483b97c24fca3c2c6c47b82e92b36486969a7e80d47d \
+ --hash=sha256:bcee5a59f9c264e2809e58d01be6569a3bbb1e36a1e0fb83f7ef9b2075f95ce0 \
+ # via glean-parser
+distro==1.4.0 \
+ --hash=sha256:362dde65d846d23baee4b5c058c8586f219b5a54be1cf5fc6ff55c4578392f57 \
+ --hash=sha256:eedf82a470ebe7d010f1872c17237c79ab04097948800029994fa458e52fb4b4 \
+ # via -r requirements-mach-vendor-python.in
+ecdsa==0.15 \
+ --hash=sha256:867ec9cf6df0b03addc8ef66b56359643cb5d0c1dc329df76ba7ecfe256c8061 \
+ --hash=sha256:8f12ac317f8a1318efa75757ef0a651abe12e51fc1af8838fb91079445227277 \
+ # via -r requirements-mach-vendor-python.in
+esprima==4.0.1 \
+ --hash=sha256:08db1a876d3c2910db9cfaeb83108193af5411fc3a3a66ebefacd390d21323ee \
+ # via -r requirements-mach-vendor-python.in
+fluent.migrate==0.10 \
+ --hash=sha256:532322b53c895142cf7c1702f95b54b9d3d128fb92eab38f6e8c8a80c447d8c2 \
+ --hash=sha256:ee1b4d827cff6d1df7f9b6a4b3eb78a75f1dd425e2e71b2013fd0dd411167b3e \
+ # via -r requirements-mach-vendor-python.in
+fluent.syntax==0.18.1 \
+ --hash=sha256:0e63679fa4f1b3042565220a5127b4bab842424f07d6a13c12299e3b3835486a \
+ --hash=sha256:3a55f5e605d1b029a65cc8b6492c86ec4608e15447e73db1495de11fd46c104f \
+ # via -r requirements-mach-vendor-python.in, compare-locales, fluent.migrate
+glean_parser==1.29.0 \
+ --hash=sha256:7cf1b02ef87fad57bf0f6b9711a98c1fd8f89c9df702245d16c09bf1b042a255 \
+ --hash=sha256:df7436e164148594176ec55f7d7c3c5c944daca67c3cc30428514628625b214b \
+ # via -r requirements-mach-vendor-python.in
+jinja2==2.11.2 \
+ --hash=sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0 \
+ --hash=sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035 \
+ # via glean-parser
+jsmin==2.1.0 \
+ --hash=sha256:5d07bf0251a4128e5e8e8eef603849b6b5741c337bff087731a248f9cc774f56 \
+ # via -r requirements-mach-vendor-python.in
+json-e==2.7.0 \
+ --hash=sha256:d8c1ec3f5bbc7728c3a504ebe58829f283c64eca230871e4eefe974b4cdaae4a \
+ # via -r requirements-mach-vendor-python.in
+jsonschema==3.2.0 \
+ --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \
+ --hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a \
+ # via glean-parser
+markupsafe==1.1.1 \
+ --hash=sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473 \
+ --hash=sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161 \
+ --hash=sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235 \
+ --hash=sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5 \
+ --hash=sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42 \
+ --hash=sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff \
+ --hash=sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b \
+ --hash=sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1 \
+ --hash=sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e \
+ --hash=sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183 \
+ --hash=sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66 \
+ --hash=sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b \
+ --hash=sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1 \
+ --hash=sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15 \
+ --hash=sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1 \
+ --hash=sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e \
+ --hash=sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b \
+ --hash=sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905 \
+ --hash=sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735 \
+ --hash=sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d \
+ --hash=sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e \
+ --hash=sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d \
+ --hash=sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c \
+ --hash=sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21 \
+ --hash=sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2 \
+ --hash=sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5 \
+ --hash=sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b \
+ --hash=sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6 \
+ --hash=sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f \
+ --hash=sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f \
+ --hash=sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2 \
+ --hash=sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7 \
+ --hash=sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be \
+ # via jinja2
+more-itertools==4.3.0 \
+ --hash=sha256:c187a73da93e7a8acc0001572aebc7e3c69daf7bf6881a2cea10650bd4420092 \
+ --hash=sha256:c476b5d3a34e12d40130bc2f935028b5f636df8f372dc2c1c01dc19681b2039e \
+ --hash=sha256:fcbfeaea0be121980e15bc97b3817b5202ca73d0eae185b4550cbfce2a3ebb3d \
+ # via pytest
+mozilla-version==0.3.4 \
+ --hash=sha256:3ed4deb7a6fb25c83a5346ef4de08ddff9b2ddc4d16dd8fafb4a84978cc71255 \
+ --hash=sha256:ce5741c2e7d12c30b53de9f79e30d6ac2a8bd4c93be711d30c7a7a08e32a094f \
+ # via -r requirements-mach-vendor-python.in
+pathlib2==2.3.2 \
+ --hash=sha256:8eb170f8d0d61825e09a95b38be068299ddeda82f35e96c3301a8a5e7604cb83 \
+ --hash=sha256:d1aa2a11ba7b8f7b21ab852b1fb5afb277e1bb99d5dfc663380b5015c0d80c5a \
+ # via -r requirements-mach-vendor-python.in
+pathspec==0.8 \
+ --hash=sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0 \
+ --hash=sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061 \
+ # via -r requirements-mach-vendor-python.in, yamllint
+pip-tools==5.3.1 \
+ --hash=sha256:5672c2b6ca0f1fd803f3b45568c2cf7fadf135b4971e7d665232b2075544c0ef \
+ --hash=sha256:73787e23269bf8a9230f376c351297b9037ed0d32ab0f9bef4a187d976acc054 \
+ # via -r requirements-mach-vendor-python.in
+pluggy==0.6.0 \
+ --hash=sha256:7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff \
+ --hash=sha256:d345c8fe681115900d6da8d048ba67c25df42973bda370783cd58826442dcd7c \
+ --hash=sha256:e160a7fcf25762bb60efc7e171d4497ff1d8d2d75a3d0df7a21b76821ecbf5c5 \
+ # via pytest
+ply==3.10 \
+ --hash=sha256:96e94af7dd7031d8d6dd6e2a8e0de593b511c211a86e28a9c9621c275ac8bacb \
+ # via -r requirements-mach-vendor-python.in
+py==1.5.4 \
+ --hash=sha256:3fd59af7435864e1a243790d322d763925431213b6b8529c6ca71081ace3bbf7 \
+ --hash=sha256:e31fb2767eb657cbde86c454f02e99cb846d3cd9d61b318525140214fdc0e98e \
+ # via pytest
+pyasn1==0.4.8 \
+ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \
+ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba \
+ # via -r requirements-mach-vendor-python.in
+pyflakes==2.2.0 \
+ --hash=sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92 \
+ --hash=sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8 \
+ # via -r requirements-mach-vendor-python.in
+pyrsistent==0.16.0 \
+ --hash=sha256:28669905fe725965daa16184933676547c5bb40a5153055a8dee2a4bd7933ad3 \
+ # via jsonschema
+pytest==3.6.2 \
+ --hash=sha256:8ea01fc4fcc8e1b1e305252b4bc80a1528019ab99fd3b88666c9dc38d754406c \
+ --hash=sha256:90898786b3d0b880b47645bae7b51aa9bbf1e9d1e4510c2cfd15dd65c70ea0cd \
+ # via -r requirements-mach-vendor-python.in
+python-hglib==2.4 \
+ --hash=sha256:693d6ed92a6566e78802c7a03c256cda33d08c63ad3f00fcfa11379b184b9462 \
+ # via -r requirements-mach-vendor-python.in
+pytoml==0.1.10 \
+ --hash=sha256:98399eabd927cd3e12457525315b6abbc5abf9a6f392ab578cbcec327f73890c \
+ # via -r requirements-mach-vendor-python.in, compare-locales
+pyyaml==5.3.1 \
+ --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \
+ --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \
+ --hash=sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2 \
+ --hash=sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648 \
+ --hash=sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf \
+ --hash=sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f \
+ --hash=sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2 \
+ --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \
+ --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \
+ --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \
+ --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a \
+ # via -r requirements-mach-vendor-python.in, glean-parser, yamllint
+redo==2.0.3 \
+ --hash=sha256:36784bf8ae766e14f9db0e377ccfa02835d648321d2007b6ae0bf4fd612c0f94 \
+ --hash=sha256:71161cb0e928d824092a5f16203939bbc0867ce4c4685db263cf22c3ae7634a8 \
+ # via -r requirements-mach-vendor-python.in
+requests==2.9.1 \
+ --hash=sha256:113fbba5531a9e34945b7d36b33a084e8ba5d0664b703c81a7c572d91919a5b8 \
+ --hash=sha256:c577815dd00f1394203fc44eb979724b098f88264a9ef898ee45b8e5e9cf587f \
+ # via -r requirements-mach-vendor-python.in, responses
+responses==0.10.6 \
+ --hash=sha256:502d9c0c8008439cfcdef7e251f507fcfdd503b56e8c0c87c3c3e3393953f790 \
+ --hash=sha256:97193c0183d63fba8cd3a041c75464e4b09ea0aff6328800d1546598567dde0b \
+ # via -r requirements-mach-vendor-python.in
+sentry-sdk==0.14.3 \
+ --hash=sha256:23808d571d2461a4ce3784ec12bbee5bdb8c026c143fe79d36cef8a6d653e71f \
+ --hash=sha256:bb90a4e19c7233a580715fc986cc44be2c48fc10b31e71580a2037e1c94b6950 \
+ # via -r requirements-mach-vendor-python.in
+six==1.13.0 \
+ --hash=sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd \
+ --hash=sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66 \
+ # via -r requirements-mach-vendor-python.in, blessings, compare-locales, ecdsa, fluent.migrate, jsonschema, more-itertools, pathlib2, pip-tools, pytest, responses
+urllib3==1.25.9 \
+ --hash=sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527 \
+ --hash=sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115 \
+ # via sentry-sdk
+voluptuous==0.11.5 \
+ --hash=sha256:303542b3fc07fb52ec3d7a1c614b329cdbee13a9d681935353d8ea56a7bfa9f1 \
+ --hash=sha256:567a56286ef82a9d7ae0628c5842f65f516abcb496e74f3f59f1d7b28df314ef \
+ # via -r requirements-mach-vendor-python.in
+yamllint==1.23 \
+ --hash=sha256:0fa69bf8a86182b7fe14918bdd3a30354c869966bbc7cbfff176af71bda9c806 \
+ --hash=sha256:59f3ff77f44e7f46be6aecdb985830f73a1c51e290b7082a7d38c2ae1940f4a9 \
+ # via -r requirements-mach-vendor-python.in, glean-parser
+
+# WARNING: The following packages were not pinned, but pip requires them to be
+# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag.
+# pip
+# setuptools
diff --git a/third_party/python/responses/CHANGES b/third_party/python/responses/CHANGES
new file mode 100644
index 0000000000..569f52f99d
--- /dev/null
+++ b/third_party/python/responses/CHANGES
@@ -0,0 +1,119 @@
+0.10.6
+------
+
+- Improved documentation.
+- Improved installation requirements for py3
+- ConnectionError's raised by responses now indicate which request
+ path/method failed to match a mock.
+- `test_responses.py` is no longer part of the installation targets.
+
+0.10.5
+------
+
+- Improved support for raising exceptions from callback mocks. If a mock
+ callback returns an exception object that exception will be raised.
+
+0.10.4
+------
+
+- Fixed generated wrapper when using `@responses.activate` in Python 3.6+
+ when decorated functions use parameter and/or return annotations.
+
+0.10.3
+------
+
+- Fixed deprecation warnings in python 3.7 for inspect module usage.
+
+0.10.2
+------
+
+- Fixed build setup to use undeprecated `pytest` bin stub.
+- Updated `tox` configuration.
+- Added example of using responses with `pytest.fixture`
+- Removed dependency on `biscuits` in py3. Instead `http.cookies` is being used.
+
+0.10.1
+------
+
+- Packaging fix to distribute wheel (#219)
+
+0.10.0
+------
+
+- Fix passing through extra settings (#207)
+- Fix collections.abc warning on Python 3.7 (#215)
+- Use 'biscuits' library instead of 'cookies' on Python 3.4+ (#218)
+
+0.9.0
+-----
+
+- Support for Python 3.7 (#196)
+- Support streaming responses for BaseResponse (#192)
+- Support custom patch targets for mock (#189)
+- Fix unicode support for passthru urls (#178)
+- Fix support for unicode in domain names and tlds (177)
+
+0.8.0
+-----
+
+- Added the ability to passthru real requests via ``add_passthru()``
+ and ``passthru_prefixes`` configurations.
+
+0.7.0
+-----
+
+- Responses will now be rotated until the final match is hit, and
+ then persist using that response (GH-171).
+
+0.6.2
+-----
+
+- Fixed call counting with exceptions (GH-163).
+- Fixed behavior with arbitrary status codes (GH-164).
+- Fixed handling of multiple responses with the same match (GH-165).
+- Fixed default path behavior with ``match_querystring`` (GH-166).
+
+0.6.1
+-----
+
+- Restored ``adding_headers`` compatibility (GH-160).
+
+0.6.0
+-----
+
+- Allow empty list/dict as json object (GH-100).
+- Added `response_callback` (GH-151).
+- Added ``Response`` interfaces (GH-155).
+- Fixed unicode characters in querystring (GH-153).
+- Added support for streaming IO buffers (GH-154).
+- Added support for empty (unset) Content-Type (GH-139).
+- Added reason to mocked responses (GH-132).
+- ``yapf`` autoformatting now enforced on codebase.
+
+0.5.1
+-----
+
+- Add LICENSE, README and CHANGES to the PyPI distribution (GH-97).
+
+0.5.0
+-----
+
+- Allow passing a JSON body to `response.add` (GH-82)
+- Improve ConnectionError emulation (GH-73)
+- Correct assertion in assert_all_requests_are_fired (GH-71)
+
+0.4.0
+-----
+
+- Requests 2.0+ is required
+- Mocking now happens on the adapter instead of the session
+
+0.3.0
+-----
+
+- Add the ability to mock errors (GH-22)
+- Add responses.mock context manager (GH-36)
+- Support custom adapters (GH-33)
+- Add support for regexp error matching (GH-25)
+- Add support for dynamic bodies via `responses.add_callback` (GH-24)
+- Preserve argspec when using `responses.activate` decorator (GH-18)
diff --git a/third_party/python/responses/LICENSE b/third_party/python/responses/LICENSE
new file mode 100644
index 0000000000..52b44b20a3
--- /dev/null
+++ b/third_party/python/responses/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright 2015 David Cramer
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/third_party/python/responses/MANIFEST.in b/third_party/python/responses/MANIFEST.in
new file mode 100644
index 0000000000..12977a99c8
--- /dev/null
+++ b/third_party/python/responses/MANIFEST.in
@@ -0,0 +1,3 @@
+include README.rst CHANGES LICENSE
+include test_responses.py tox.ini
+global-exclude *~
diff --git a/third_party/python/responses/PKG-INFO b/third_party/python/responses/PKG-INFO
new file mode 100644
index 0000000000..54677a9dd3
--- /dev/null
+++ b/third_party/python/responses/PKG-INFO
@@ -0,0 +1,443 @@
+Metadata-Version: 2.1
+Name: responses
+Version: 0.10.6
+Summary: A utility library for mocking out the `requests` Python library.
+Home-page: https://github.com/getsentry/responses
+Author: David Cramer
+License: Apache 2.0
+Description: Responses
+ =========
+
+ .. image:: https://travis-ci.org/getsentry/responses.svg?branch=master
+ :target: https://travis-ci.org/getsentry/responses
+
+ A utility library for mocking out the `requests` Python library.
+
+ .. note::
+
+ Responses requires Python 2.7 or newer, and requests >= 2.0
+
+
+ Installing
+ ----------
+
+ ``pip install responses``
+
+
+ Basics
+ ------
+
+ The core of ``responses`` comes from registering mock responses:
+
+ .. code-block:: python
+
+ import responses
+ import requests
+
+ @responses.activate
+ def test_simple():
+ responses.add(responses.GET, 'http://twitter.com/api/1/foobar',
+ json={'error': 'not found'}, status=404)
+
+ resp = requests.get('http://twitter.com/api/1/foobar')
+
+ assert resp.json() == {"error": "not found"}
+
+ assert len(responses.calls) == 1
+ assert responses.calls[0].request.url == 'http://twitter.com/api/1/foobar'
+ assert responses.calls[0].response.text == '{"error": "not found"}'
+
+ If you attempt to fetch a url which doesn't hit a match, ``responses`` will raise
+ a ``ConnectionError``:
+
+ .. code-block:: python
+
+ import responses
+ import requests
+
+ from requests.exceptions import ConnectionError
+
+ @responses.activate
+ def test_simple():
+ with pytest.raises(ConnectionError):
+ requests.get('http://twitter.com/api/1/foobar')
+
+ Lastly, you can pass an ``Exception`` as the body to trigger an error on the request:
+
+ .. code-block:: python
+
+ import responses
+ import requests
+
+ @responses.activate
+ def test_simple():
+ responses.add(responses.GET, 'http://twitter.com/api/1/foobar',
+ body=Exception('...'))
+ with pytest.raises(Exception):
+ requests.get('http://twitter.com/api/1/foobar')
+
+
+ Response Parameters
+ -------------------
+
+ Responses are automatically registered via params on ``add``, but can also be
+ passed directly:
+
+ .. code-block:: python
+
+ import responses
+
+ responses.add(
+ responses.Response(
+ method='GET',
+ url='http://example.com',
+ )
+ )
+
+ The following attributes can be passed to a Response mock:
+
+ method (``str``)
+ The HTTP method (GET, POST, etc).
+
+ url (``str`` or compiled regular expression)
+ The full resource URL.
+
+ match_querystring (``bool``)
+ Include the query string when matching requests.
+ Enabled by default if the response URL contains a query string,
+ disabled if it doesn't or the URL is a regular expression.
+
+ body (``str`` or ``BufferedReader``)
+ The response body.
+
+ json
+ A Python object representing the JSON response body. Automatically configures
+ the appropriate Content-Type.
+
+ status (``int``)
+ The HTTP status code.
+
+ content_type (``content_type``)
+ Defaults to ``text/plain``.
+
+ headers (``dict``)
+ Response headers.
+
+ stream (``bool``)
+ Disabled by default. Indicates the response should use the streaming API.
+
+
+ Dynamic Responses
+ -----------------
+
+ You can utilize callbacks to provide dynamic responses. The callback must return
+ a tuple of (``status``, ``headers``, ``body``).
+
+ .. code-block:: python
+
+ import json
+
+ import responses
+ import requests
+
+ @responses.activate
+ def test_calc_api():
+
+ def request_callback(request):
+ payload = json.loads(request.body)
+ resp_body = {'value': sum(payload['numbers'])}
+ headers = {'request-id': '728d329e-0e86-11e4-a748-0c84dc037c13'}
+ return (200, headers, json.dumps(resp_body))
+
+ responses.add_callback(
+ responses.POST, 'http://calc.com/sum',
+ callback=request_callback,
+ content_type='application/json',
+ )
+
+ resp = requests.post(
+ 'http://calc.com/sum',
+ json.dumps({'numbers': [1, 2, 3]}),
+ headers={'content-type': 'application/json'},
+ )
+
+ assert resp.json() == {'value': 6}
+
+ assert len(responses.calls) == 1
+ assert responses.calls[0].request.url == 'http://calc.com/sum'
+ assert responses.calls[0].response.text == '{"value": 6}'
+ assert (
+ responses.calls[0].response.headers['request-id'] ==
+ '728d329e-0e86-11e4-a748-0c84dc037c13'
+ )
+
+ You can also pass a compiled regex to `add_callback` to match multiple urls:
+
+ .. code-block:: python
+
+ import re, json
+
+ from functools import reduce
+
+ import responses
+ import requests
+
+ operators = {
+ 'sum': lambda x, y: x+y,
+ 'prod': lambda x, y: x*y,
+ 'pow': lambda x, y: x**y
+ }
+
+ @responses.activate
+ def test_regex_url():
+
+ def request_callback(request):
+ payload = json.loads(request.body)
+ operator_name = request.path_url[1:]
+
+ operator = operators[operator_name]
+
+ resp_body = {'value': reduce(operator, payload['numbers'])}
+ headers = {'request-id': '728d329e-0e86-11e4-a748-0c84dc037c13'}
+ return (200, headers, json.dumps(resp_body))
+
+ responses.add_callback(
+ responses.POST,
+ re.compile('http://calc.com/(sum|prod|pow|unsupported)'),
+ callback=request_callback,
+ content_type='application/json',
+ )
+
+ resp = requests.post(
+ 'http://calc.com/prod',
+ json.dumps({'numbers': [2, 3, 4]}),
+ headers={'content-type': 'application/json'},
+ )
+ assert resp.json() == {'value': 24}
+
+ test_regex_url()
+
+
+ If you want to pass extra keyword arguments to the callback function, for example when reusing
+ a callback function to give a slightly different result, you can use ``functools.partial``:
+
+ .. code-block:: python
+
+ from functools import partial
+
+ ...
+
+ def request_callback(request, id=None):
+ payload = json.loads(request.body)
+ resp_body = {'value': sum(payload['numbers'])}
+ headers = {'request-id': id}
+ return (200, headers, json.dumps(resp_body))
+
+ responses.add_callback(
+ responses.POST, 'http://calc.com/sum',
+ callback=partial(request_callback, id='728d329e-0e86-11e4-a748-0c84dc037c13'),
+ content_type='application/json',
+ )
+
+
+ Responses as a context manager
+ ------------------------------
+
+ .. code-block:: python
+
+ import responses
+ import requests
+
+ def test_my_api():
+ with responses.RequestsMock() as rsps:
+ rsps.add(responses.GET, 'http://twitter.com/api/1/foobar',
+ body='{}', status=200,
+ content_type='application/json')
+ resp = requests.get('http://twitter.com/api/1/foobar')
+
+ assert resp.status_code == 200
+
+ # outside the context manager requests will hit the remote server
+ resp = requests.get('http://twitter.com/api/1/foobar')
+ resp.status_code == 404
+
+ Responses as a pytest fixture
+ -----------------------------
+
+ .. code-block:: python
+
+ @pytest.fixture
+ def mocked_responses():
+ with responses.RequestsMock() as rsps:
+ yield rsps
+
+ def test_api(mocked_responses):
+ mocked_responses.add(
+ responses.GET, 'http://twitter.com/api/1/foobar',
+ body='{}', status=200,
+ content_type='application/json')
+ resp = requests.get('http://twitter.com/api/1/foobar')
+ assert resp.status_code == 200
+
+ Assertions on declared responses
+ --------------------------------
+
+ When used as a context manager, Responses will, by default, raise an assertion
+ error if a url was registered but not accessed. This can be disabled by passing
+ the ``assert_all_requests_are_fired`` value:
+
+ .. code-block:: python
+
+ import responses
+ import requests
+
+ def test_my_api():
+ with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
+ rsps.add(responses.GET, 'http://twitter.com/api/1/foobar',
+ body='{}', status=200,
+ content_type='application/json')
+
+
+ Multiple Responses
+ ------------------
+
+ You can also add multiple responses for the same url:
+
+ .. code-block:: python
+
+ import responses
+ import requests
+
+ @responses.activate
+ def test_my_api():
+ responses.add(responses.GET, 'http://twitter.com/api/1/foobar', status=500)
+ responses.add(responses.GET, 'http://twitter.com/api/1/foobar',
+ body='{}', status=200,
+ content_type='application/json')
+
+ resp = requests.get('http://twitter.com/api/1/foobar')
+ assert resp.status_code == 500
+ resp = requests.get('http://twitter.com/api/1/foobar')
+ assert resp.status_code == 200
+
+
+ Using a callback to modify the response
+ ---------------------------------------
+
+ If you use customized processing in `requests` via subclassing/mixins, or if you
+ have library tools that interact with `requests` at a low level, you may need
+ to add extended processing to the mocked Response object to fully simulate the
+ environment for your tests. A `response_callback` can be used, which will be
+ wrapped by the library before being returned to the caller. The callback
+ accepts a `response` as it's single argument, and is expected to return a
+ single `response` object.
+
+ .. code-block:: python
+
+ import responses
+ import requests
+
+ def response_callback(resp):
+ resp.callback_processed = True
+ return resp
+
+ with responses.RequestsMock(response_callback=response_callback) as m:
+ m.add(responses.GET, 'http://example.com', body=b'test')
+ resp = requests.get('http://example.com')
+ assert resp.text == "test"
+ assert hasattr(resp, 'callback_processed')
+ assert resp.callback_processed is True
+
+
+ Passing thru real requests
+ --------------------------
+
+ In some cases you may wish to allow for certain requests to pass thru responses
+ and hit a real server. This can be done with the 'passthru' methods:
+
+ .. code-block:: python
+
+ import responses
+
+ @responses.activate
+ def test_my_api():
+ responses.add_passthru('https://percy.io')
+
+ This will allow any requests matching that prefix, that is otherwise not registered
+ as a mock response, to passthru using the standard behavior.
+
+
+ Viewing/Modifying registered responses
+ --------------------------------------
+
+ Registered responses are available as a private attribute of the RequestMock
+ instance. It is sometimes useful for debugging purposes to view the stack of
+ registered responses which can be accessed via ``responses.mock._matches``.
+
+ The ``replace`` function allows a previously registered ``response`` to be
+ changed. The method signature is identical to ``add``. ``response``s are
+ identified using ``method`` and ``url``. Only the first matched ``response`` is
+ replaced.
+
+ .. code-block:: python
+
+ import responses
+ import requests
+
+ @responses.activate
+ def test_replace():
+
+ responses.add(responses.GET, 'http://example.org', json={'data': 1})
+ responses.replace(responses.GET, 'http://example.org', json={'data': 2})
+
+ resp = requests.get('http://example.org')
+
+ assert resp.json() == {'data': 2}
+
+
+ ``remove`` takes a ``method`` and ``url`` argument and will remove *all*
+ matched ``response``s from the registered list.
+
+ Finally, ``clear`` will reset all registered ``response``s
+
+
+
+ Contributing
+ ------------
+
+ Responses uses several linting and autoformatting utilities, so it's important that when
+ submitting patches you use the appropriate toolchain:
+
+ Clone the repository:
+
+ .. code-block:: shell
+
+ git clone https://github.com/getsentry/responses.git
+
+ Create an environment (e.g. with ``virtualenv``):
+
+ .. code-block:: shell
+
+ virtualenv .env && source .env/bin/activate
+
+ Configure development requirements:
+
+ .. code-block:: shell
+
+ make develop
+
+Platform: UNKNOWN
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: System Administrators
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Topic :: Software Development
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
+Provides-Extra: tests
diff --git a/third_party/python/responses/README.rst b/third_party/python/responses/README.rst
new file mode 100644
index 0000000000..73a6d7a8f2
--- /dev/null
+++ b/third_party/python/responses/README.rst
@@ -0,0 +1,420 @@
+Responses
+=========
+
+.. image:: https://travis-ci.org/getsentry/responses.svg?branch=master
+ :target: https://travis-ci.org/getsentry/responses
+
+A utility library for mocking out the `requests` Python library.
+
+.. note::
+
+ Responses requires Python 2.7 or newer, and requests >= 2.0
+
+
+Installing
+----------
+
+``pip install responses``
+
+
+Basics
+------
+
+The core of ``responses`` comes from registering mock responses:
+
+.. code-block:: python
+
+ import responses
+ import requests
+
+ @responses.activate
+ def test_simple():
+ responses.add(responses.GET, 'http://twitter.com/api/1/foobar',
+ json={'error': 'not found'}, status=404)
+
+ resp = requests.get('http://twitter.com/api/1/foobar')
+
+ assert resp.json() == {"error": "not found"}
+
+ assert len(responses.calls) == 1
+ assert responses.calls[0].request.url == 'http://twitter.com/api/1/foobar'
+ assert responses.calls[0].response.text == '{"error": "not found"}'
+
+If you attempt to fetch a url which doesn't hit a match, ``responses`` will raise
+a ``ConnectionError``:
+
+.. code-block:: python
+
+ import responses
+ import requests
+
+ from requests.exceptions import ConnectionError
+
+ @responses.activate
+ def test_simple():
+ with pytest.raises(ConnectionError):
+ requests.get('http://twitter.com/api/1/foobar')
+
+Lastly, you can pass an ``Exception`` as the body to trigger an error on the request:
+
+.. code-block:: python
+
+ import responses
+ import requests
+
+ @responses.activate
+ def test_simple():
+ responses.add(responses.GET, 'http://twitter.com/api/1/foobar',
+ body=Exception('...'))
+ with pytest.raises(Exception):
+ requests.get('http://twitter.com/api/1/foobar')
+
+
+Response Parameters
+-------------------
+
+Responses are automatically registered via params on ``add``, but can also be
+passed directly:
+
+.. code-block:: python
+
+ import responses
+
+ responses.add(
+ responses.Response(
+ method='GET',
+ url='http://example.com',
+ )
+ )
+
+The following attributes can be passed to a Response mock:
+
+method (``str``)
+ The HTTP method (GET, POST, etc).
+
+url (``str`` or compiled regular expression)
+ The full resource URL.
+
+match_querystring (``bool``)
+ Include the query string when matching requests.
+ Enabled by default if the response URL contains a query string,
+ disabled if it doesn't or the URL is a regular expression.
+
+body (``str`` or ``BufferedReader``)
+ The response body.
+
+json
+ A Python object representing the JSON response body. Automatically configures
+ the appropriate Content-Type.
+
+status (``int``)
+ The HTTP status code.
+
+content_type (``content_type``)
+ Defaults to ``text/plain``.
+
+headers (``dict``)
+ Response headers.
+
+stream (``bool``)
+ Disabled by default. Indicates the response should use the streaming API.
+
+
+Dynamic Responses
+-----------------
+
+You can utilize callbacks to provide dynamic responses. The callback must return
+a tuple of (``status``, ``headers``, ``body``).
+
+.. code-block:: python
+
+ import json
+
+ import responses
+ import requests
+
+ @responses.activate
+ def test_calc_api():
+
+ def request_callback(request):
+ payload = json.loads(request.body)
+ resp_body = {'value': sum(payload['numbers'])}
+ headers = {'request-id': '728d329e-0e86-11e4-a748-0c84dc037c13'}
+ return (200, headers, json.dumps(resp_body))
+
+ responses.add_callback(
+ responses.POST, 'http://calc.com/sum',
+ callback=request_callback,
+ content_type='application/json',
+ )
+
+ resp = requests.post(
+ 'http://calc.com/sum',
+ json.dumps({'numbers': [1, 2, 3]}),
+ headers={'content-type': 'application/json'},
+ )
+
+ assert resp.json() == {'value': 6}
+
+ assert len(responses.calls) == 1
+ assert responses.calls[0].request.url == 'http://calc.com/sum'
+ assert responses.calls[0].response.text == '{"value": 6}'
+ assert (
+ responses.calls[0].response.headers['request-id'] ==
+ '728d329e-0e86-11e4-a748-0c84dc037c13'
+ )
+
+You can also pass a compiled regex to `add_callback` to match multiple urls:
+
+.. code-block:: python
+
+ import re, json
+
+ from functools import reduce
+
+ import responses
+ import requests
+
+ operators = {
+ 'sum': lambda x, y: x+y,
+ 'prod': lambda x, y: x*y,
+ 'pow': lambda x, y: x**y
+ }
+
+ @responses.activate
+ def test_regex_url():
+
+ def request_callback(request):
+ payload = json.loads(request.body)
+ operator_name = request.path_url[1:]
+
+ operator = operators[operator_name]
+
+ resp_body = {'value': reduce(operator, payload['numbers'])}
+ headers = {'request-id': '728d329e-0e86-11e4-a748-0c84dc037c13'}
+ return (200, headers, json.dumps(resp_body))
+
+ responses.add_callback(
+ responses.POST,
+ re.compile('http://calc.com/(sum|prod|pow|unsupported)'),
+ callback=request_callback,
+ content_type='application/json',
+ )
+
+ resp = requests.post(
+ 'http://calc.com/prod',
+ json.dumps({'numbers': [2, 3, 4]}),
+ headers={'content-type': 'application/json'},
+ )
+ assert resp.json() == {'value': 24}
+
+ test_regex_url()
+
+
+If you want to pass extra keyword arguments to the callback function, for example when reusing
+a callback function to give a slightly different result, you can use ``functools.partial``:
+
+.. code-block:: python
+
+ from functools import partial
+
+ ...
+
+ def request_callback(request, id=None):
+ payload = json.loads(request.body)
+ resp_body = {'value': sum(payload['numbers'])}
+ headers = {'request-id': id}
+ return (200, headers, json.dumps(resp_body))
+
+ responses.add_callback(
+ responses.POST, 'http://calc.com/sum',
+ callback=partial(request_callback, id='728d329e-0e86-11e4-a748-0c84dc037c13'),
+ content_type='application/json',
+ )
+
+
+Responses as a context manager
+------------------------------
+
+.. code-block:: python
+
+ import responses
+ import requests
+
+ def test_my_api():
+ with responses.RequestsMock() as rsps:
+ rsps.add(responses.GET, 'http://twitter.com/api/1/foobar',
+ body='{}', status=200,
+ content_type='application/json')
+ resp = requests.get('http://twitter.com/api/1/foobar')
+
+ assert resp.status_code == 200
+
+ # outside the context manager requests will hit the remote server
+ resp = requests.get('http://twitter.com/api/1/foobar')
+ resp.status_code == 404
+
+Responses as a pytest fixture
+-----------------------------
+
+.. code-block:: python
+
+ @pytest.fixture
+ def mocked_responses():
+ with responses.RequestsMock() as rsps:
+ yield rsps
+
+ def test_api(mocked_responses):
+ mocked_responses.add(
+ responses.GET, 'http://twitter.com/api/1/foobar',
+ body='{}', status=200,
+ content_type='application/json')
+ resp = requests.get('http://twitter.com/api/1/foobar')
+ assert resp.status_code == 200
+
+Assertions on declared responses
+--------------------------------
+
+When used as a context manager, Responses will, by default, raise an assertion
+error if a url was registered but not accessed. This can be disabled by passing
+the ``assert_all_requests_are_fired`` value:
+
+.. code-block:: python
+
+ import responses
+ import requests
+
+ def test_my_api():
+ with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
+ rsps.add(responses.GET, 'http://twitter.com/api/1/foobar',
+ body='{}', status=200,
+ content_type='application/json')
+
+
+Multiple Responses
+------------------
+
+You can also add multiple responses for the same url:
+
+.. code-block:: python
+
+ import responses
+ import requests
+
+ @responses.activate
+ def test_my_api():
+ responses.add(responses.GET, 'http://twitter.com/api/1/foobar', status=500)
+ responses.add(responses.GET, 'http://twitter.com/api/1/foobar',
+ body='{}', status=200,
+ content_type='application/json')
+
+ resp = requests.get('http://twitter.com/api/1/foobar')
+ assert resp.status_code == 500
+ resp = requests.get('http://twitter.com/api/1/foobar')
+ assert resp.status_code == 200
+
+
+Using a callback to modify the response
+---------------------------------------
+
+If you use customized processing in `requests` via subclassing/mixins, or if you
+have library tools that interact with `requests` at a low level, you may need
+to add extended processing to the mocked Response object to fully simulate the
+environment for your tests. A `response_callback` can be used, which will be
+wrapped by the library before being returned to the caller. The callback
+accepts a `response` as it's single argument, and is expected to return a
+single `response` object.
+
+.. code-block:: python
+
+ import responses
+ import requests
+
+ def response_callback(resp):
+ resp.callback_processed = True
+ return resp
+
+ with responses.RequestsMock(response_callback=response_callback) as m:
+ m.add(responses.GET, 'http://example.com', body=b'test')
+ resp = requests.get('http://example.com')
+ assert resp.text == "test"
+ assert hasattr(resp, 'callback_processed')
+ assert resp.callback_processed is True
+
+
+Passing thru real requests
+--------------------------
+
+In some cases you may wish to allow for certain requests to pass thru responses
+and hit a real server. This can be done with the 'passthru' methods:
+
+.. code-block:: python
+
+ import responses
+
+ @responses.activate
+ def test_my_api():
+ responses.add_passthru('https://percy.io')
+
+This will allow any requests matching that prefix, that is otherwise not registered
+as a mock response, to passthru using the standard behavior.
+
+
+Viewing/Modifying registered responses
+--------------------------------------
+
+Registered responses are available as a private attribute of the RequestMock
+instance. It is sometimes useful for debugging purposes to view the stack of
+registered responses which can be accessed via ``responses.mock._matches``.
+
+The ``replace`` function allows a previously registered ``response`` to be
+changed. The method signature is identical to ``add``. ``response``s are
+identified using ``method`` and ``url``. Only the first matched ``response`` is
+replaced.
+
+.. code-block:: python
+
+ import responses
+ import requests
+
+ @responses.activate
+ def test_replace():
+
+ responses.add(responses.GET, 'http://example.org', json={'data': 1})
+ responses.replace(responses.GET, 'http://example.org', json={'data': 2})
+
+ resp = requests.get('http://example.org')
+
+ assert resp.json() == {'data': 2}
+
+
+``remove`` takes a ``method`` and ``url`` argument and will remove *all*
+matched ``response``s from the registered list.
+
+Finally, ``clear`` will reset all registered ``response``s
+
+
+
+Contributing
+------------
+
+Responses uses several linting and autoformatting utilities, so it's important that when
+submitting patches you use the appropriate toolchain:
+
+Clone the repository:
+
+.. code-block:: shell
+
+ git clone https://github.com/getsentry/responses.git
+
+Create an environment (e.g. with ``virtualenv``):
+
+.. code-block:: shell
+
+ virtualenv .env && source .env/bin/activate
+
+Configure development requirements:
+
+.. code-block:: shell
+
+ make develop
diff --git a/third_party/python/responses/responses.py b/third_party/python/responses/responses.py
new file mode 100644
index 0000000000..9de936805c
--- /dev/null
+++ b/third_party/python/responses/responses.py
@@ -0,0 +1,653 @@
+from __future__ import absolute_import, print_function, division, unicode_literals
+
+import _io
+import inspect
+import json as json_module
+import logging
+import re
+import six
+
+from collections import namedtuple
+from functools import update_wrapper
+from requests.adapters import HTTPAdapter
+from requests.exceptions import ConnectionError
+from requests.sessions import REDIRECT_STATI
+from requests.utils import cookiejar_from_dict
+
+try:
+ from collections.abc import Sequence, Sized
+except ImportError:
+ from collections import Sequence, Sized
+
+try:
+ from requests.packages.urllib3.response import HTTPResponse
+except ImportError:
+ from urllib3.response import HTTPResponse
+
+if six.PY2:
+ from urlparse import urlparse, parse_qsl, urlsplit, urlunsplit
+ from urllib import quote
+else:
+ from urllib.parse import urlparse, parse_qsl, urlsplit, urlunsplit, quote
+
+if six.PY2:
+ try:
+ from six import cStringIO as BufferIO
+ except ImportError:
+ from six import StringIO as BufferIO
+else:
+ from io import BytesIO as BufferIO
+
+try:
+ from unittest import mock as std_mock
+except ImportError:
+ import mock as std_mock
+
+try:
+ Pattern = re._pattern_type
+except AttributeError:
+ # Python 3.7
+ Pattern = re.Pattern
+
+UNSET = object()
+
+Call = namedtuple("Call", ["request", "response"])
+
+_real_send = HTTPAdapter.send
+
+logger = logging.getLogger("responses")
+
+
+def _is_string(s):
+ return isinstance(s, six.string_types)
+
+
+def _has_unicode(s):
+ return any(ord(char) > 128 for char in s)
+
+
+def _clean_unicode(url):
+ # Clean up domain names, which use punycode to handle unicode chars
+ urllist = list(urlsplit(url))
+ netloc = urllist[1]
+ if _has_unicode(netloc):
+ domains = netloc.split(".")
+ for i, d in enumerate(domains):
+ if _has_unicode(d):
+ d = "xn--" + d.encode("punycode").decode("ascii")
+ domains[i] = d
+ urllist[1] = ".".join(domains)
+ url = urlunsplit(urllist)
+
+ # Clean up path/query/params, which use url-encoding to handle unicode chars
+ if isinstance(url.encode("utf8"), six.string_types):
+ url = url.encode("utf8")
+ chars = list(url)
+ for i, x in enumerate(chars):
+ if ord(x) > 128:
+ chars[i] = quote(x)
+
+ return "".join(chars)
+
+
+def _is_redirect(response):
+ try:
+ # 2.0.0 <= requests <= 2.2
+ return response.is_redirect
+
+ except AttributeError:
+ # requests > 2.2
+ return (
+ # use request.sessions conditional
+ response.status_code in REDIRECT_STATI
+ and "location" in response.headers
+ )
+
+
+def _cookies_from_headers(headers):
+ try:
+ import http.cookies as cookies
+
+ resp_cookie = cookies.SimpleCookie()
+ resp_cookie.load(headers["set-cookie"])
+
+ cookies_dict = {name: v.value for name, v in resp_cookie.items()}
+ except ImportError:
+ from cookies import Cookies
+
+ resp_cookies = Cookies.from_request(headers["set-cookie"])
+ cookies_dict = {v.name: v.value for _, v in resp_cookies.items()}
+ return cookiejar_from_dict(cookies_dict)
+
+
+_wrapper_template = """\
+def wrapper%(wrapper_args)s:
+ with responses:
+ return func%(func_args)s
+"""
+
+
+def get_wrapped(func, responses):
+ if six.PY2:
+ args, a, kw, defaults = inspect.getargspec(func)
+ wrapper_args = inspect.formatargspec(args, a, kw, defaults)
+
+ # Preserve the argspec for the wrapped function so that testing
+ # tools such as pytest can continue to use their fixture injection.
+ if hasattr(func, "__self__"):
+ args = args[1:] # Omit 'self'
+ func_args = inspect.formatargspec(args, a, kw, None)
+ else:
+ signature = inspect.signature(func)
+ signature = signature.replace(return_annotation=inspect.Signature.empty)
+ # If the function is wrapped, switch to *args, **kwargs for the parameters
+ # as we can't rely on the signature to give us the arguments the function will
+ # be called with. For example unittest.mock.patch uses required args that are
+ # not actually passed to the function when invoked.
+ if hasattr(func, "__wrapped__"):
+ wrapper_params = [
+ inspect.Parameter("args", inspect.Parameter.VAR_POSITIONAL),
+ inspect.Parameter("kwargs", inspect.Parameter.VAR_KEYWORD),
+ ]
+ else:
+ wrapper_params = [
+ param.replace(annotation=inspect.Parameter.empty)
+ for param in signature.parameters.values()
+ ]
+ signature = signature.replace(parameters=wrapper_params)
+
+ wrapper_args = str(signature)
+ params_without_defaults = [
+ param.replace(
+ annotation=inspect.Parameter.empty, default=inspect.Parameter.empty
+ )
+ for param in signature.parameters.values()
+ ]
+ signature = signature.replace(parameters=params_without_defaults)
+ func_args = str(signature)
+
+ evaldict = {"func": func, "responses": responses}
+ six.exec_(
+ _wrapper_template % {"wrapper_args": wrapper_args, "func_args": func_args},
+ evaldict,
+ )
+ wrapper = evaldict["wrapper"]
+ update_wrapper(wrapper, func)
+ return wrapper
+
+
+class CallList(Sequence, Sized):
+ def __init__(self):
+ self._calls = []
+
+ def __iter__(self):
+ return iter(self._calls)
+
+ def __len__(self):
+ return len(self._calls)
+
+ def __getitem__(self, idx):
+ return self._calls[idx]
+
+ def add(self, request, response):
+ self._calls.append(Call(request, response))
+
+ def reset(self):
+ self._calls = []
+
+
+def _ensure_url_default_path(url):
+ if _is_string(url):
+ url_parts = list(urlsplit(url))
+ if url_parts[2] == "":
+ url_parts[2] = "/"
+ url = urlunsplit(url_parts)
+ return url
+
+
+def _handle_body(body):
+ if isinstance(body, six.text_type):
+ body = body.encode("utf-8")
+ if isinstance(body, _io.BufferedReader):
+ return body
+
+ return BufferIO(body)
+
+
+_unspecified = object()
+
+
+class BaseResponse(object):
+ content_type = None
+ headers = None
+
+ stream = False
+
+ def __init__(self, method, url, match_querystring=_unspecified):
+ self.method = method
+ # ensure the url has a default path set if the url is a string
+ self.url = _ensure_url_default_path(url)
+ self.match_querystring = self._should_match_querystring(match_querystring)
+ self.call_count = 0
+
+ def __eq__(self, other):
+ if not isinstance(other, BaseResponse):
+ return False
+
+ if self.method != other.method:
+ return False
+
+ # Can't simply do a equality check on the objects directly here since __eq__ isn't
+ # implemented for regex. It might seem to work as regex is using a cache to return
+ # the same regex instances, but it doesn't in all cases.
+ self_url = self.url.pattern if isinstance(self.url, Pattern) else self.url
+ other_url = other.url.pattern if isinstance(other.url, Pattern) else other.url
+
+ return self_url == other_url
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def _url_matches_strict(self, url, other):
+ url_parsed = urlparse(url)
+ other_parsed = urlparse(other)
+
+ if url_parsed[:3] != other_parsed[:3]:
+ return False
+
+ url_qsl = sorted(parse_qsl(url_parsed.query))
+ other_qsl = sorted(parse_qsl(other_parsed.query))
+
+ if len(url_qsl) != len(other_qsl):
+ return False
+
+ for (a_k, a_v), (b_k, b_v) in zip(url_qsl, other_qsl):
+ if a_k != b_k:
+ return False
+
+ if a_v != b_v:
+ return False
+
+ return True
+
+ def _should_match_querystring(self, match_querystring_argument):
+ if match_querystring_argument is not _unspecified:
+ return match_querystring_argument
+
+ if isinstance(self.url, Pattern):
+ # the old default from <= 0.9.0
+ return False
+
+ return bool(urlparse(self.url).query)
+
+ def _url_matches(self, url, other, match_querystring=False):
+ if _is_string(url):
+ if _has_unicode(url):
+ url = _clean_unicode(url)
+ if not isinstance(other, six.text_type):
+ other = other.encode("ascii").decode("utf8")
+ if match_querystring:
+ return self._url_matches_strict(url, other)
+
+ else:
+ url_without_qs = url.split("?", 1)[0]
+ other_without_qs = other.split("?", 1)[0]
+ return url_without_qs == other_without_qs
+
+ elif isinstance(url, Pattern) and url.match(other):
+ return True
+
+ else:
+ return False
+
+ def get_headers(self):
+ headers = {}
+ if self.content_type is not None:
+ headers["Content-Type"] = self.content_type
+ if self.headers:
+ headers.update(self.headers)
+ return headers
+
+ def get_response(self, request):
+ raise NotImplementedError
+
+ def matches(self, request):
+ if request.method != self.method:
+ return False
+
+ if not self._url_matches(self.url, request.url, self.match_querystring):
+ return False
+
+ return True
+
+
+class Response(BaseResponse):
+ def __init__(
+ self,
+ method,
+ url,
+ body="",
+ json=None,
+ status=200,
+ headers=None,
+ stream=False,
+ content_type=UNSET,
+ **kwargs
+ ):
+ # if we were passed a `json` argument,
+ # override the body and content_type
+ if json is not None:
+ assert not body
+ body = json_module.dumps(json)
+ if content_type is UNSET:
+ content_type = "application/json"
+
+ if content_type is UNSET:
+ content_type = "text/plain"
+
+ # body must be bytes
+ if isinstance(body, six.text_type):
+ body = body.encode("utf-8")
+
+ self.body = body
+ self.status = status
+ self.headers = headers
+ self.stream = stream
+ self.content_type = content_type
+ super(Response, self).__init__(method, url, **kwargs)
+
+ def get_response(self, request):
+ if self.body and isinstance(self.body, Exception):
+ raise self.body
+
+ headers = self.get_headers()
+ status = self.status
+ body = _handle_body(self.body)
+
+ return HTTPResponse(
+ status=status,
+ reason=six.moves.http_client.responses.get(status),
+ body=body,
+ headers=headers,
+ preload_content=False,
+ )
+
+
+class CallbackResponse(BaseResponse):
+ def __init__(
+ self, method, url, callback, stream=False, content_type="text/plain", **kwargs
+ ):
+ self.callback = callback
+ self.stream = stream
+ self.content_type = content_type
+ super(CallbackResponse, self).__init__(method, url, **kwargs)
+
+ def get_response(self, request):
+ headers = self.get_headers()
+
+ result = self.callback(request)
+ if isinstance(result, Exception):
+ raise result
+
+ status, r_headers, body = result
+ if isinstance(body, Exception):
+ raise body
+
+ body = _handle_body(body)
+ headers.update(r_headers)
+
+ return HTTPResponse(
+ status=status,
+ reason=six.moves.http_client.responses.get(status),
+ body=body,
+ headers=headers,
+ preload_content=False,
+ )
+
+
+class RequestsMock(object):
+ DELETE = "DELETE"
+ GET = "GET"
+ HEAD = "HEAD"
+ OPTIONS = "OPTIONS"
+ PATCH = "PATCH"
+ POST = "POST"
+ PUT = "PUT"
+ response_callback = None
+
+ def __init__(
+ self,
+ assert_all_requests_are_fired=True,
+ response_callback=None,
+ passthru_prefixes=(),
+ target="requests.adapters.HTTPAdapter.send",
+ ):
+ self._calls = CallList()
+ self.reset()
+ self.assert_all_requests_are_fired = assert_all_requests_are_fired
+ self.response_callback = response_callback
+ self.passthru_prefixes = tuple(passthru_prefixes)
+ self.target = target
+
+ def reset(self):
+ self._matches = []
+ self._calls.reset()
+
+ def add(
+ self,
+ method=None, # method or ``Response``
+ url=None,
+ body="",
+ adding_headers=None,
+ *args,
+ **kwargs
+ ):
+ """
+ A basic request:
+
+ >>> responses.add(responses.GET, 'http://example.com')
+
+ You can also directly pass an object which implements the
+ ``BaseResponse`` interface:
+
+ >>> responses.add(Response(...))
+
+ A JSON payload:
+
+ >>> responses.add(
+ >>> method='GET',
+ >>> url='http://example.com',
+ >>> json={'foo': 'bar'},
+ >>> )
+
+ Custom headers:
+
+ >>> responses.add(
+ >>> method='GET',
+ >>> url='http://example.com',
+ >>> headers={'X-Header': 'foo'},
+ >>> )
+
+
+ Strict query string matching:
+
+ >>> responses.add(
+ >>> method='GET',
+ >>> url='http://example.com?foo=bar',
+ >>> match_querystring=True
+ >>> )
+ """
+ if isinstance(method, BaseResponse):
+ self._matches.append(method)
+ return
+
+ if adding_headers is not None:
+ kwargs.setdefault("headers", adding_headers)
+
+ self._matches.append(Response(method=method, url=url, body=body, **kwargs))
+
+ def add_passthru(self, prefix):
+ """
+ Register a URL prefix to passthru any non-matching mock requests to.
+
+ For example, to allow any request to 'https://example.com', but require
+ mocks for the remainder, you would add the prefix as so:
+
+ >>> responses.add_passthru('https://example.com')
+ """
+ if _has_unicode(prefix):
+ prefix = _clean_unicode(prefix)
+ self.passthru_prefixes += (prefix,)
+
+ def remove(self, method_or_response=None, url=None):
+ """
+ Removes a response previously added using ``add()``, identified
+ either by a response object inheriting ``BaseResponse`` or
+ ``method`` and ``url``. Removes all matching responses.
+
+ >>> response.add(responses.GET, 'http://example.org')
+ >>> response.remove(responses.GET, 'http://example.org')
+ """
+ if isinstance(method_or_response, BaseResponse):
+ response = method_or_response
+ else:
+ response = BaseResponse(method=method_or_response, url=url)
+
+ while response in self._matches:
+ self._matches.remove(response)
+
+ def replace(self, method_or_response=None, url=None, body="", *args, **kwargs):
+ """
+ Replaces a response previously added using ``add()``. The signature
+ is identical to ``add()``. The response is identified using ``method``
+ and ``url``, and the first matching response is replaced.
+
+ >>> responses.add(responses.GET, 'http://example.org', json={'data': 1})
+ >>> responses.replace(responses.GET, 'http://example.org', json={'data': 2})
+ """
+ if isinstance(method_or_response, BaseResponse):
+ response = method_or_response
+ else:
+ response = Response(method=method_or_response, url=url, body=body, **kwargs)
+
+ index = self._matches.index(response)
+ self._matches[index] = response
+
+ def add_callback(
+ self, method, url, callback, match_querystring=False, content_type="text/plain"
+ ):
+ # ensure the url has a default path set if the url is a string
+ # url = _ensure_url_default_path(url, match_querystring)
+
+ self._matches.append(
+ CallbackResponse(
+ url=url,
+ method=method,
+ callback=callback,
+ content_type=content_type,
+ match_querystring=match_querystring,
+ )
+ )
+
+ @property
+ def calls(self):
+ return self._calls
+
+ def __enter__(self):
+ self.start()
+ return self
+
+ def __exit__(self, type, value, traceback):
+ success = type is None
+ self.stop(allow_assert=success)
+ self.reset()
+ return success
+
+ def activate(self, func):
+ return get_wrapped(func, self)
+
+ def _find_match(self, request):
+ found = None
+ found_match = None
+ for i, match in enumerate(self._matches):
+ if match.matches(request):
+ if found is None:
+ found = i
+ found_match = match
+ else:
+ # Multiple matches found. Remove & return the first match.
+ return self._matches.pop(found)
+
+ return found_match
+
+ def _on_request(self, adapter, request, **kwargs):
+ match = self._find_match(request)
+ resp_callback = self.response_callback
+
+ if match is None:
+ if request.url.startswith(self.passthru_prefixes):
+ logger.info("request.allowed-passthru", extra={"url": request.url})
+ return _real_send(adapter, request, **kwargs)
+
+ error_msg = (
+ "Connection refused by Responses: {0} {1} doesn't "
+ "match Responses Mock".format(request.method, request.url)
+ )
+ response = ConnectionError(error_msg)
+ response.request = request
+
+ self._calls.add(request, response)
+ response = resp_callback(response) if resp_callback else response
+ raise response
+
+ try:
+ response = adapter.build_response(request, match.get_response(request))
+ except Exception as response:
+ match.call_count += 1
+ self._calls.add(request, response)
+ response = resp_callback(response) if resp_callback else response
+ raise
+
+ if not match.stream:
+ response.content # NOQA
+
+ try:
+ response.cookies = _cookies_from_headers(response.headers)
+ except (KeyError, TypeError):
+ pass
+
+ response = resp_callback(response) if resp_callback else response
+ match.call_count += 1
+ self._calls.add(request, response)
+ return response
+
+ def start(self):
+ def unbound_on_send(adapter, request, *a, **kwargs):
+ return self._on_request(adapter, request, *a, **kwargs)
+
+ self._patcher = std_mock.patch(target=self.target, new=unbound_on_send)
+ self._patcher.start()
+
+ def stop(self, allow_assert=True):
+ self._patcher.stop()
+ if not self.assert_all_requests_are_fired:
+ return
+
+ if not allow_assert:
+ return
+
+ not_called = [m for m in self._matches if m.call_count == 0]
+ if not_called:
+ raise AssertionError(
+ "Not all requests have been executed {0!r}".format(
+ [(match.method, match.url) for match in not_called]
+ )
+ )
+
+
+# expose default mock namespace
+mock = _default_mock = RequestsMock(assert_all_requests_are_fired=False)
+__all__ = ["CallbackResponse", "Response", "RequestsMock"]
+for __attr in (a for a in dir(_default_mock) if not a.startswith("_")):
+ __all__.append(__attr)
+ globals()[__attr] = getattr(_default_mock, __attr)
diff --git a/third_party/python/responses/setup.cfg b/third_party/python/responses/setup.cfg
new file mode 100644
index 0000000000..79de8cb6e5
--- /dev/null
+++ b/third_party/python/responses/setup.cfg
@@ -0,0 +1,16 @@
+[tool:pytest]
+addopts = --tb=short
+
+[bdist_wheel]
+universal = 1
+
+[flake8]
+max-line-length = 100
+
+[yapf]
+based_on_style = pep8
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/responses/setup.py b/third_party/python/responses/setup.py
new file mode 100644
index 0000000000..779aa32f40
--- /dev/null
+++ b/third_party/python/responses/setup.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+"""
+responses
+=========
+
+A utility library for mocking out the `requests` Python library.
+
+:copyright: (c) 2015 David Cramer
+:license: Apache 2.0
+"""
+
+import sys
+
+from setuptools import setup
+from setuptools.command.test import test as TestCommand
+
+setup_requires = []
+
+if "test" in sys.argv:
+ setup_requires.append("pytest")
+
+install_requires = [
+ "cookies; python_version < '3.4'",
+ "mock; python_version < '3.3'",
+ "requests>=2.0",
+ "six",
+]
+
+tests_require = [
+ "pytest",
+ "coverage >= 3.7.1, < 5.0.0",
+ "pytest-cov",
+ "pytest-localserver",
+ "flake8",
+]
+
+extras_require = {"tests": tests_require}
+
+
+class PyTest(TestCommand):
+ def finalize_options(self):
+ TestCommand.finalize_options(self)
+ self.test_args = ["test_responses.py"]
+ self.test_suite = True
+
+ def run_tests(self):
+ # import here, cause outside the eggs aren't loaded
+ import pytest
+
+ errno = pytest.main(self.test_args)
+ sys.exit(errno)
+
+
+setup(
+ name="responses",
+ version="0.10.6",
+ author="David Cramer",
+ description=("A utility library for mocking out the `requests` Python library."),
+ url="https://github.com/getsentry/responses",
+ license="Apache 2.0",
+ long_description=open("README.rst").read(),
+ py_modules=["responses"],
+ zip_safe=False,
+ python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
+ install_requires=install_requires,
+ extras_require=extras_require,
+ tests_require=tests_require,
+ setup_requires=setup_requires,
+ cmdclass={"test": PyTest},
+ include_package_data=True,
+ classifiers=[
+ "Intended Audience :: Developers",
+ "Intended Audience :: System Administrators",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.4",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Topic :: Software Development",
+ ],
+)
diff --git a/third_party/python/responses/test_responses.py b/third_party/python/responses/test_responses.py
new file mode 100644
index 0000000000..0350e49538
--- /dev/null
+++ b/third_party/python/responses/test_responses.py
@@ -0,0 +1,924 @@
+# coding: utf-8
+
+from __future__ import absolute_import, print_function, division, unicode_literals
+
+import inspect
+import re
+import six
+
+import pytest
+import requests
+import responses
+from requests.exceptions import ConnectionError, HTTPError
+from responses import BaseResponse, Response
+
+try:
+ from mock import patch, Mock
+except ImportError:
+ from unittest.mock import patch, Mock
+
+
+def assert_reset():
+ assert len(responses._default_mock._matches) == 0
+ assert len(responses.calls) == 0
+
+
+def assert_response(resp, body=None, content_type="text/plain"):
+ assert resp.status_code == 200
+ assert resp.reason == "OK"
+ if content_type is not None:
+ assert resp.headers["Content-Type"] == content_type
+ else:
+ assert "Content-Type" not in resp.headers
+ assert resp.text == body
+
+
+def test_response():
+ @responses.activate
+ def run():
+ responses.add(responses.GET, "http://example.com", body=b"test")
+ resp = requests.get("http://example.com")
+ assert_response(resp, "test")
+ assert len(responses.calls) == 1
+ assert responses.calls[0].request.url == "http://example.com/"
+ assert responses.calls[0].response.content == b"test"
+
+ resp = requests.get("http://example.com?foo=bar")
+ assert_response(resp, "test")
+ assert len(responses.calls) == 2
+ assert responses.calls[1].request.url == "http://example.com/?foo=bar"
+ assert responses.calls[1].response.content == b"test"
+
+ run()
+ assert_reset()
+
+
+def test_response_encoded():
+ @responses.activate
+ def run():
+ # Path contains urlencoded =/()[]
+ url = "http://example.org/foo.bar%3D%2F%28%29%5B%5D"
+ responses.add(responses.GET, url, body="it works", status=200)
+ resp = requests.get(url)
+ assert_response(resp, "it works")
+
+ run()
+ assert_reset()
+
+
+def test_response_with_instance():
+ @responses.activate
+ def run():
+ responses.add(
+ responses.Response(method=responses.GET, url="http://example.com")
+ )
+ resp = requests.get("http://example.com")
+ assert_response(resp, "")
+ assert len(responses.calls) == 1
+ assert responses.calls[0].request.url == "http://example.com/"
+
+ resp = requests.get("http://example.com?foo=bar")
+ assert_response(resp, "")
+ assert len(responses.calls) == 2
+ assert responses.calls[1].request.url == "http://example.com/?foo=bar"
+
+
+@pytest.mark.parametrize(
+ "original,replacement",
+ [
+ ("http://example.com/two", "http://example.com/two"),
+ (
+ Response(method=responses.GET, url="http://example.com/two"),
+ Response(
+ method=responses.GET, url="http://example.com/two", body="testtwo"
+ ),
+ ),
+ (
+ re.compile(r"http://example\.com/two"),
+ re.compile(r"http://example\.com/two"),
+ ),
+ ],
+)
+def test_replace(original, replacement):
+ @responses.activate
+ def run():
+ responses.add(responses.GET, "http://example.com/one", body="test1")
+
+ if isinstance(original, BaseResponse):
+ responses.add(original)
+ else:
+ responses.add(responses.GET, original, body="test2")
+
+ responses.add(responses.GET, "http://example.com/three", body="test3")
+ responses.add(
+ responses.GET, re.compile(r"http://example\.com/four"), body="test3"
+ )
+
+ if isinstance(replacement, BaseResponse):
+ responses.replace(replacement)
+ else:
+ responses.replace(responses.GET, replacement, body="testtwo")
+
+ resp = requests.get("http://example.com/two")
+ assert_response(resp, "testtwo")
+
+ run()
+ assert_reset()
+
+
+@pytest.mark.parametrize(
+ "original,replacement",
+ [
+ ("http://example.com/one", re.compile(r"http://example\.com/one")),
+ (re.compile(r"http://example\.com/one"), "http://example.com/one"),
+ ],
+)
+def test_replace_error(original, replacement):
+ @responses.activate
+ def run():
+ responses.add(responses.GET, original)
+ with pytest.raises(ValueError):
+ responses.replace(responses.GET, replacement)
+
+ run()
+ assert_reset()
+
+
+def test_remove():
+ @responses.activate
+ def run():
+ responses.add(responses.GET, "http://example.com/zero")
+ responses.add(responses.GET, "http://example.com/one")
+ responses.add(responses.GET, "http://example.com/two")
+ responses.add(responses.GET, re.compile(r"http://example\.com/three"))
+ responses.add(responses.GET, re.compile(r"http://example\.com/four"))
+ re.purge()
+ responses.remove(responses.GET, "http://example.com/two")
+ responses.remove(Response(method=responses.GET, url="http://example.com/zero"))
+ responses.remove(responses.GET, re.compile(r"http://example\.com/four"))
+
+ with pytest.raises(ConnectionError):
+ requests.get("http://example.com/zero")
+ requests.get("http://example.com/one")
+ with pytest.raises(ConnectionError):
+ requests.get("http://example.com/two")
+ requests.get("http://example.com/three")
+ with pytest.raises(ConnectionError):
+ requests.get("http://example.com/four")
+
+ run()
+ assert_reset()
+
+
+@pytest.mark.parametrize(
+ "args1,kwargs1,args2,kwargs2,expected",
+ [
+ ((responses.GET, "a"), {}, (responses.GET, "a"), {}, True),
+ ((responses.GET, "a"), {}, (responses.GET, "b"), {}, False),
+ ((responses.GET, "a"), {}, (responses.POST, "a"), {}, False),
+ (
+ (responses.GET, "a"),
+ {"match_querystring": True},
+ (responses.GET, "a"),
+ {},
+ True,
+ ),
+ ],
+)
+def test_response_equality(args1, kwargs1, args2, kwargs2, expected):
+ o1 = BaseResponse(*args1, **kwargs1)
+ o2 = BaseResponse(*args2, **kwargs2)
+ assert (o1 == o2) is expected
+ assert (o1 != o2) is not expected
+
+
+def test_response_equality_different_objects():
+ o1 = BaseResponse(method=responses.GET, url="a")
+ o2 = "str"
+ assert (o1 == o2) is False
+ assert (o1 != o2) is True
+
+
+def test_connection_error():
+ @responses.activate
+ def run():
+ responses.add(responses.GET, "http://example.com")
+
+ with pytest.raises(ConnectionError):
+ requests.get("http://example.com/foo")
+
+ assert len(responses.calls) == 1
+ assert responses.calls[0].request.url == "http://example.com/foo"
+ assert type(responses.calls[0].response) is ConnectionError
+ assert responses.calls[0].response.request
+
+ run()
+ assert_reset()
+
+
+def test_match_querystring():
+ @responses.activate
+ def run():
+ url = "http://example.com?test=1&foo=bar"
+ responses.add(responses.GET, url, match_querystring=True, body=b"test")
+ resp = requests.get("http://example.com?test=1&foo=bar")
+ assert_response(resp, "test")
+ resp = requests.get("http://example.com?foo=bar&test=1")
+ assert_response(resp, "test")
+ resp = requests.get("http://example.com/?foo=bar&test=1")
+ assert_response(resp, "test")
+
+ run()
+ assert_reset()
+
+
+def test_match_empty_querystring():
+ @responses.activate
+ def run():
+ responses.add(
+ responses.GET, "http://example.com", body=b"test", match_querystring=True
+ )
+ resp = requests.get("http://example.com")
+ assert_response(resp, "test")
+ resp = requests.get("http://example.com/")
+ assert_response(resp, "test")
+ with pytest.raises(ConnectionError):
+ requests.get("http://example.com?query=foo")
+
+ run()
+ assert_reset()
+
+
+def test_match_querystring_error():
+ @responses.activate
+ def run():
+ responses.add(
+ responses.GET, "http://example.com/?test=1", match_querystring=True
+ )
+
+ with pytest.raises(ConnectionError):
+ requests.get("http://example.com/foo/?test=2")
+
+ run()
+ assert_reset()
+
+
+def test_match_querystring_regex():
+ @responses.activate
+ def run():
+ """Note that `match_querystring` value shouldn't matter when passing a
+ regular expression"""
+
+ responses.add(
+ responses.GET,
+ re.compile(r"http://example\.com/foo/\?test=1"),
+ body="test1",
+ match_querystring=True,
+ )
+
+ resp = requests.get("http://example.com/foo/?test=1")
+ assert_response(resp, "test1")
+
+ responses.add(
+ responses.GET,
+ re.compile(r"http://example\.com/foo/\?test=2"),
+ body="test2",
+ match_querystring=False,
+ )
+
+ resp = requests.get("http://example.com/foo/?test=2")
+ assert_response(resp, "test2")
+
+ run()
+ assert_reset()
+
+
+def test_match_querystring_error_regex():
+ @responses.activate
+ def run():
+ """Note that `match_querystring` value shouldn't matter when passing a
+ regular expression"""
+
+ responses.add(
+ responses.GET,
+ re.compile(r"http://example\.com/foo/\?test=1"),
+ match_querystring=True,
+ )
+
+ with pytest.raises(ConnectionError):
+ requests.get("http://example.com/foo/?test=3")
+
+ responses.add(
+ responses.GET,
+ re.compile(r"http://example\.com/foo/\?test=2"),
+ match_querystring=False,
+ )
+
+ with pytest.raises(ConnectionError):
+ requests.get("http://example.com/foo/?test=4")
+
+ run()
+ assert_reset()
+
+
+def test_match_querystring_auto_activates():
+ @responses.activate
+ def run():
+ responses.add(responses.GET, "http://example.com?test=1", body=b"test")
+ resp = requests.get("http://example.com?test=1")
+ assert_response(resp, "test")
+ with pytest.raises(ConnectionError):
+ requests.get("http://example.com/?test=2")
+
+ run()
+ assert_reset()
+
+
+def test_accept_string_body():
+ @responses.activate
+ def run():
+ url = "http://example.com/"
+ responses.add(responses.GET, url, body="test")
+ resp = requests.get(url)
+ assert_response(resp, "test")
+
+ run()
+ assert_reset()
+
+
+def test_accept_json_body():
+ @responses.activate
+ def run():
+ content_type = "application/json"
+
+ url = "http://example.com/"
+ responses.add(responses.GET, url, json={"message": "success"})
+ resp = requests.get(url)
+ assert_response(resp, '{"message": "success"}', content_type)
+
+ url = "http://example.com/1/"
+ responses.add(responses.GET, url, json=[])
+ resp = requests.get(url)
+ assert_response(resp, "[]", content_type)
+
+ run()
+ assert_reset()
+
+
+def test_no_content_type():
+ @responses.activate
+ def run():
+ url = "http://example.com/"
+ responses.add(responses.GET, url, body="test", content_type=None)
+ resp = requests.get(url)
+ assert_response(resp, "test", content_type=None)
+
+ run()
+ assert_reset()
+
+
+def test_arbitrary_status_code():
+ @responses.activate
+ def run():
+ url = "http://example.com/"
+ responses.add(responses.GET, url, body="test", status=418)
+ resp = requests.get(url)
+ assert resp.status_code == 418
+ assert resp.reason is None
+
+ run()
+ assert_reset()
+
+
+def test_throw_connection_error_explicit():
+ @responses.activate
+ def run():
+ url = "http://example.com"
+ exception = HTTPError("HTTP Error")
+ responses.add(responses.GET, url, exception)
+
+ with pytest.raises(HTTPError) as HE:
+ requests.get(url)
+
+ assert str(HE.value) == "HTTP Error"
+
+ run()
+ assert_reset()
+
+
+def test_callback():
+ body = b"test callback"
+ status = 400
+ reason = "Bad Request"
+ headers = {"foo": "bar"}
+ url = "http://example.com/"
+
+ def request_callback(request):
+ return (status, headers, body)
+
+ @responses.activate
+ def run():
+ responses.add_callback(responses.GET, url, request_callback)
+ resp = requests.get(url)
+ assert resp.text == "test callback"
+ assert resp.status_code == status
+ assert resp.reason == reason
+ assert "foo" in resp.headers
+ assert resp.headers["foo"] == "bar"
+
+ run()
+ assert_reset()
+
+
+def test_callback_exception_result():
+ result = Exception()
+ url = "http://example.com/"
+
+ def request_callback(request):
+ return result
+
+ @responses.activate
+ def run():
+ responses.add_callback(responses.GET, url, request_callback)
+
+ with pytest.raises(Exception) as e:
+ requests.get(url)
+
+ assert e.value is result
+
+ run()
+ assert_reset()
+
+
+def test_callback_exception_body():
+ body = Exception()
+ url = "http://example.com/"
+
+ def request_callback(request):
+ return (200, {}, body)
+
+ @responses.activate
+ def run():
+ responses.add_callback(responses.GET, url, request_callback)
+
+ with pytest.raises(Exception) as e:
+ requests.get(url)
+
+ assert e.value is body
+
+ run()
+ assert_reset()
+
+
+def test_callback_no_content_type():
+ body = b"test callback"
+ status = 400
+ reason = "Bad Request"
+ headers = {"foo": "bar"}
+ url = "http://example.com/"
+
+ def request_callback(request):
+ return (status, headers, body)
+
+ @responses.activate
+ def run():
+ responses.add_callback(responses.GET, url, request_callback, content_type=None)
+ resp = requests.get(url)
+ assert resp.text == "test callback"
+ assert resp.status_code == status
+ assert resp.reason == reason
+ assert "foo" in resp.headers
+ assert "Content-Type" not in resp.headers
+
+ run()
+ assert_reset()
+
+
+def test_regular_expression_url():
+ @responses.activate
+ def run():
+ url = re.compile(r"https?://(.*\.)?example.com")
+ responses.add(responses.GET, url, body=b"test")
+
+ resp = requests.get("http://example.com")
+ assert_response(resp, "test")
+
+ resp = requests.get("https://example.com")
+ assert_response(resp, "test")
+
+ resp = requests.get("https://uk.example.com")
+ assert_response(resp, "test")
+
+ with pytest.raises(ConnectionError):
+ requests.get("https://uk.exaaample.com")
+
+ run()
+ assert_reset()
+
+
+def test_custom_adapter():
+ @responses.activate
+ def run():
+ url = "http://example.com"
+ responses.add(responses.GET, url, body=b"test")
+
+ calls = [0]
+
+ class DummyAdapter(requests.adapters.HTTPAdapter):
+ def send(self, *a, **k):
+ calls[0] += 1
+ return super(DummyAdapter, self).send(*a, **k)
+
+ # Test that the adapter is actually used
+ session = requests.Session()
+ session.mount("http://", DummyAdapter())
+
+ resp = session.get(url, allow_redirects=False)
+ assert calls[0] == 1
+
+ # Test that the response is still correctly emulated
+ session = requests.Session()
+ session.mount("http://", DummyAdapter())
+
+ resp = session.get(url)
+ assert_response(resp, "test")
+
+ run()
+
+
+def test_responses_as_context_manager():
+ def run():
+ with responses.mock:
+ responses.add(responses.GET, "http://example.com", body=b"test")
+ resp = requests.get("http://example.com")
+ assert_response(resp, "test")
+ assert len(responses.calls) == 1
+ assert responses.calls[0].request.url == "http://example.com/"
+ assert responses.calls[0].response.content == b"test"
+
+ resp = requests.get("http://example.com?foo=bar")
+ assert_response(resp, "test")
+ assert len(responses.calls) == 2
+ assert responses.calls[1].request.url == "http://example.com/?foo=bar"
+ assert responses.calls[1].response.content == b"test"
+
+ run()
+ assert_reset()
+
+
+def test_activate_doesnt_change_signature():
+ def test_function(a, b=None):
+ return (a, b)
+
+ decorated_test_function = responses.activate(test_function)
+ if hasattr(inspect, "signature"):
+ assert inspect.signature(test_function) == inspect.signature(
+ decorated_test_function
+ )
+ else:
+ assert inspect.getargspec(test_function) == inspect.getargspec(
+ decorated_test_function
+ )
+ assert decorated_test_function(1, 2) == test_function(1, 2)
+ assert decorated_test_function(3) == test_function(3)
+
+
+def test_activate_mock_interaction():
+ @patch("sys.stdout")
+ def test_function(mock_stdout):
+ return mock_stdout
+
+ decorated_test_function = responses.activate(test_function)
+ if hasattr(inspect, "signature"):
+ assert inspect.signature(test_function) == inspect.signature(
+ decorated_test_function
+ )
+ else:
+ assert inspect.getargspec(test_function) == inspect.getargspec(
+ decorated_test_function
+ )
+
+ value = test_function()
+ assert isinstance(value, Mock)
+
+ value = decorated_test_function()
+ assert isinstance(value, Mock)
+
+
+@pytest.mark.skipif(six.PY2, reason="Cannot run in python2")
+def test_activate_doesnt_change_signature_with_return_type():
+ def test_function(a, b=None):
+ return (a, b)
+
+ # Add type annotations as they are syntax errors in py2.
+ # Use a class to test for import errors in evaled code.
+ test_function.__annotations__["return"] = Mock
+ test_function.__annotations__["a"] = Mock
+
+ decorated_test_function = responses.activate(test_function)
+ if hasattr(inspect, "signature"):
+ assert inspect.signature(test_function) == inspect.signature(
+ decorated_test_function
+ )
+ else:
+ assert inspect.getargspec(test_function) == inspect.getargspec(
+ decorated_test_function
+ )
+ assert decorated_test_function(1, 2) == test_function(1, 2)
+ assert decorated_test_function(3) == test_function(3)
+
+
+def test_activate_doesnt_change_signature_for_method():
+ class TestCase(object):
+ def test_function(self, a, b=None):
+ return (self, a, b)
+
+ decorated_test_function = responses.activate(test_function)
+
+ test_case = TestCase()
+ assert test_case.decorated_test_function(1, 2) == test_case.test_function(1, 2)
+ assert test_case.decorated_test_function(3) == test_case.test_function(3)
+
+
+def test_response_cookies():
+ body = b"test callback"
+ status = 200
+ headers = {"set-cookie": "session_id=12345; a=b; c=d"}
+ url = "http://example.com/"
+
+ def request_callback(request):
+ return (status, headers, body)
+
+ @responses.activate
+ def run():
+ responses.add_callback(responses.GET, url, request_callback)
+ resp = requests.get(url)
+ assert resp.text == "test callback"
+ assert resp.status_code == status
+ assert "session_id" in resp.cookies
+ assert resp.cookies["session_id"] == "12345"
+ assert resp.cookies["a"] == "b"
+ assert resp.cookies["c"] == "d"
+
+ run()
+ assert_reset()
+
+
+def test_response_callback():
+ """adds a callback to decorate the response, then checks it"""
+
+ def run():
+ def response_callback(resp):
+ resp._is_mocked = True
+ return resp
+
+ with responses.RequestsMock(response_callback=response_callback) as m:
+ m.add(responses.GET, "http://example.com", body=b"test")
+ resp = requests.get("http://example.com")
+ assert resp.text == "test"
+ assert hasattr(resp, "_is_mocked")
+ assert resp._is_mocked is True
+
+ run()
+ assert_reset()
+
+
+def test_response_filebody():
+ """ Adds the possibility to use actual (binary) files as responses """
+
+ def run():
+ with responses.RequestsMock() as m:
+ with open("README.rst", "rb") as out:
+ m.add(responses.GET, "http://example.com", body=out, stream=True)
+ resp = requests.get("http://example.com")
+ with open("README.rst", "r") as out:
+ assert resp.text == out.read()
+
+
+def test_assert_all_requests_are_fired():
+ def run():
+ with pytest.raises(AssertionError) as excinfo:
+ with responses.RequestsMock(assert_all_requests_are_fired=True) as m:
+ m.add(responses.GET, "http://example.com", body=b"test")
+ assert "http://example.com" in str(excinfo.value)
+ assert responses.GET in str(excinfo)
+
+ # check that assert_all_requests_are_fired default to True
+ with pytest.raises(AssertionError):
+ with responses.RequestsMock() as m:
+ m.add(responses.GET, "http://example.com", body=b"test")
+
+ # check that assert_all_requests_are_fired doesn't swallow exceptions
+ with pytest.raises(ValueError):
+ with responses.RequestsMock() as m:
+ m.add(responses.GET, "http://example.com", body=b"test")
+ raise ValueError()
+
+ # check that assert_all_requests_are_fired=True doesn't remove urls
+ with responses.RequestsMock(assert_all_requests_are_fired=True) as m:
+ m.add(responses.GET, "http://example.com", body=b"test")
+ assert len(m._matches) == 1
+ requests.get("http://example.com")
+ assert len(m._matches) == 1
+
+ # check that assert_all_requests_are_fired=True counts mocked errors
+ with responses.RequestsMock(assert_all_requests_are_fired=True) as m:
+ m.add(responses.GET, "http://example.com", body=Exception())
+ assert len(m._matches) == 1
+ with pytest.raises(Exception):
+ requests.get("http://example.com")
+ assert len(m._matches) == 1
+
+ run()
+ assert_reset()
+
+
+def test_allow_redirects_samehost():
+ redirecting_url = "http://example.com"
+ final_url_path = "/1"
+ final_url = "{0}{1}".format(redirecting_url, final_url_path)
+ url_re = re.compile(r"^http://example.com(/)?(\d+)?$")
+
+ def request_callback(request):
+ # endpoint of chained redirect
+ if request.url.endswith(final_url_path):
+ return 200, (), b"test"
+
+ # otherwise redirect to an integer path
+ else:
+ if request.url.endswith("/0"):
+ n = 1
+ else:
+ n = 0
+ redirect_headers = {"location": "/{0!s}".format(n)}
+ return 301, redirect_headers, None
+
+ def run():
+ # setup redirect
+ with responses.mock:
+ responses.add_callback(responses.GET, url_re, request_callback)
+ resp_no_redirects = requests.get(redirecting_url, allow_redirects=False)
+ assert resp_no_redirects.status_code == 301
+ assert len(responses.calls) == 1 # 1x300
+ assert responses.calls[0][1].status_code == 301
+ assert_reset()
+
+ with responses.mock:
+ responses.add_callback(responses.GET, url_re, request_callback)
+ resp_yes_redirects = requests.get(redirecting_url, allow_redirects=True)
+ assert len(responses.calls) == 3 # 2x300 + 1x200
+ assert len(resp_yes_redirects.history) == 2
+ assert resp_yes_redirects.status_code == 200
+ assert final_url == resp_yes_redirects.url
+ status_codes = [call[1].status_code for call in responses.calls]
+ assert status_codes == [301, 301, 200]
+ assert_reset()
+
+ run()
+ assert_reset()
+
+
+def test_handles_unicode_querystring():
+ url = "http://example.com/test?type=2&ie=utf8&query=汉字"
+
+ @responses.activate
+ def run():
+ responses.add(responses.GET, url, body="test", match_querystring=True)
+
+ resp = requests.get(url)
+
+ assert_response(resp, "test")
+
+ run()
+ assert_reset()
+
+
+def test_handles_unicode_url():
+ url = "http://www.संजाल.भारत/hi/वेबसाइट-डिजाइन"
+
+ @responses.activate
+ def run():
+ responses.add(responses.GET, url, body="test")
+
+ resp = requests.get(url)
+
+ assert_response(resp, "test")
+
+ run()
+ assert_reset()
+
+
+def test_headers():
+ @responses.activate
+ def run():
+ responses.add(
+ responses.GET, "http://example.com", body="", headers={"X-Test": "foo"}
+ )
+ resp = requests.get("http://example.com")
+ assert resp.headers["X-Test"] == "foo"
+
+ run()
+ assert_reset()
+
+
+def test_legacy_adding_headers():
+ @responses.activate
+ def run():
+ responses.add(
+ responses.GET,
+ "http://example.com",
+ body="",
+ adding_headers={"X-Test": "foo"},
+ )
+ resp = requests.get("http://example.com")
+ assert resp.headers["X-Test"] == "foo"
+
+ run()
+ assert_reset()
+
+
+def test_multiple_responses():
+ @responses.activate
+ def run():
+ responses.add(responses.GET, "http://example.com", body="test")
+ responses.add(responses.GET, "http://example.com", body="rest")
+
+ resp = requests.get("http://example.com")
+ assert_response(resp, "test")
+ resp = requests.get("http://example.com")
+ assert_response(resp, "rest")
+ # After all responses are used, last response should be repeated
+ resp = requests.get("http://example.com")
+ assert_response(resp, "rest")
+
+ run()
+ assert_reset()
+
+
+def test_multiple_urls():
+ @responses.activate
+ def run():
+ responses.add(responses.GET, "http://example.com/one", body="one")
+ responses.add(responses.GET, "http://example.com/two", body="two")
+
+ resp = requests.get("http://example.com/two")
+ assert_response(resp, "two")
+ resp = requests.get("http://example.com/one")
+ assert_response(resp, "one")
+
+ run()
+ assert_reset()
+
+
+def test_passthru(httpserver):
+ httpserver.serve_content("OK", headers={"Content-Type": "text/plain"})
+
+ @responses.activate
+ def run():
+ responses.add_passthru(httpserver.url)
+ responses.add(responses.GET, "{}/one".format(httpserver.url), body="one")
+ responses.add(responses.GET, "http://example.com/two", body="two")
+
+ resp = requests.get("http://example.com/two")
+ assert_response(resp, "two")
+ resp = requests.get("{}/one".format(httpserver.url))
+ assert_response(resp, "one")
+ resp = requests.get(httpserver.url)
+ assert_response(resp, "OK")
+
+ run()
+ assert_reset()
+
+
+def test_method_named_param():
+ @responses.activate
+ def run():
+ responses.add(method=responses.GET, url="http://example.com", body="OK")
+ resp = requests.get("http://example.com")
+ assert_response(resp, "OK")
+
+ run()
+ assert_reset()
+
+
+def test_passthru_unicode():
+ @responses.activate
+ def run():
+ with responses.RequestsMock() as m:
+ url = "http://موقع.وزارة-الاتصالات.مصر/"
+ clean_url = "http://xn--4gbrim.xn----ymcbaaajlc6dj7bxne2c.xn--wgbh1c/"
+ m.add_passthru(url)
+ assert m.passthru_prefixes[0] == clean_url
+
+ run()
+ assert_reset()
+
+
+def test_custom_target(monkeypatch):
+ requests_mock = responses.RequestsMock(target="something.else")
+ std_mock_mock = responses.std_mock.MagicMock()
+ patch_mock = std_mock_mock.patch
+ monkeypatch.setattr(responses, "std_mock", std_mock_mock)
+ requests_mock.start()
+ assert len(patch_mock.call_args_list) == 1
+ assert patch_mock.call_args[1]["target"] == "something.else"
diff --git a/third_party/python/responses/tox.ini b/third_party/python/responses/tox.ini
new file mode 100644
index 0000000000..94d030cc37
--- /dev/null
+++ b/third_party/python/responses/tox.ini
@@ -0,0 +1,7 @@
+[tox]
+envlist = py27,py34,py35,py36,py37
+
+[testenv]
+extras = tests
+commands =
+ pytest . --cov responses --cov-report term-missing
diff --git a/third_party/python/rsa/LICENSE b/third_party/python/rsa/LICENSE
new file mode 100644
index 0000000000..da76c9d7f8
--- /dev/null
+++ b/third_party/python/rsa/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/third_party/python/rsa/MANIFEST.in b/third_party/python/rsa/MANIFEST.in
new file mode 100644
index 0000000000..8cf0021b9a
--- /dev/null
+++ b/third_party/python/rsa/MANIFEST.in
@@ -0,0 +1,5 @@
+include README
+include LICENSE
+include *.py
+recursive-include rsa *.py
+recursive-include tests *.py
diff --git a/third_party/python/rsa/PKG-INFO b/third_party/python/rsa/PKG-INFO
new file mode 100644
index 0000000000..399ba7b3e9
--- /dev/null
+++ b/third_party/python/rsa/PKG-INFO
@@ -0,0 +1,18 @@
+Metadata-Version: 1.1
+Name: rsa
+Version: 3.1.4
+Summary: Pure-Python RSA implementation
+Home-page: http://stuvel.eu/rsa
+Author: Sybren A. Stuvel
+Author-email: sybren@stuvel.eu
+License: ASL 2
+Description: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Education
+Classifier: Intended Audience :: Information Technology
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Security :: Cryptography
diff --git a/third_party/python/rsa/README.rst b/third_party/python/rsa/README.rst
new file mode 100644
index 0000000000..9f348636d7
--- /dev/null
+++ b/third_party/python/rsa/README.rst
@@ -0,0 +1,31 @@
+Pure Python RSA implementation
+==============================
+
+`Python-RSA`_ is a pure-Python RSA implementation. It supports
+encryption and decryption, signing and verifying signatures, and key
+generation according to PKCS#1 version 1.5. It can be used as a Python
+library as well as on the commandline. The code was mostly written by
+Sybren A. Stüvel.
+
+Documentation can be found at the Python-RSA homepage:
+http://stuvel.eu/rsa
+
+Download and install using::
+
+ pip install rsa
+
+or::
+
+ easy_install rsa
+
+or download it from the `Python Package Index`_.
+
+The source code is maintained in a `Mercurial repository`_ and is
+licensed under the `Apache License, version 2.0`_
+
+
+.. _`Python-RSA`: http://stuvel.eu/rsa
+.. _`Mercurial repository`: https://bitbucket.org/sybren/python-rsa
+.. _`Python Package Index`: http://pypi.python.org/pypi/rsa
+.. _`Apache License, version 2.0`: http://www.apache.org/licenses/LICENSE-2.0
+
diff --git a/third_party/python/rsa/create_timing_table.py b/third_party/python/rsa/create_timing_table.py
new file mode 100755
index 0000000000..b1b2871b3d
--- /dev/null
+++ b/third_party/python/rsa/create_timing_table.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+import time
+import rsa
+
+poolsize = 8
+accurate = True
+
+def run_speed_test(bitsize):
+
+ iterations = 0
+ start = end = time.time()
+
+ # At least a number of iterations, and at least 2 seconds
+ while iterations < 10 or end - start < 2:
+ iterations += 1
+ rsa.newkeys(bitsize, accurate=accurate, poolsize=poolsize)
+ end = time.time()
+
+ duration = end - start
+ dur_per_call = duration / iterations
+
+ print '%5i bit: %9.3f sec. (%i iterations over %.1f seconds)' % (bitsize,
+ dur_per_call, iterations, duration)
+
+for bitsize in (128, 256, 384, 512, 1024, 2048, 3072, 4096):
+ run_speed_test(bitsize)
+
+
diff --git a/third_party/python/rsa/playstuff.py b/third_party/python/rsa/playstuff.py
new file mode 100755
index 0000000000..bfb941b88c
--- /dev/null
+++ b/third_party/python/rsa/playstuff.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+import re
+import rsa
+
+def _logon( username, password ):
+ # Retrive the public key
+ # network stuff # req = urllib2.Request(AAA_GET_KEY, headers={'User-Agent': CLIENT_ID})
+ # network stuff # response = urllib2.urlopen(req)
+ # network stuff # html = response.read()
+ # network stuff # print response.info() # DEBUG
+ # network stuff # print html # DEBUG
+
+ # replacement for network stuff #
+ html="<x509PublicKey>30820122300d06092a864886f70d01010105000382010f003082010a0282010100dad8e3c084137bab285e869ae99a5de9752a095753680e9128adbe981e8141225704e558b8ee437836ec8c5460514efae61550bfdd883549981458bae388c9490b5ab43475068b169b32da446b0aae2dfbb3a5f425c74b284ced3f57ed33b30ec7b4b95a8216f8b063e34af2c84fef58bab381f3b79b80d06b687e0b5fc7aaeb311a88389ab7aa1422ae0b58956bb9e91c5cbf2b98422b05e1eacb82e29938566f6f05274294a8c596677c950ce97dcd003709d008f1ae6418ce5bf55ad2bf921318c6e31b324bdda4b4f12ff1fd86b5b71e647d1fc175aea137ba0ff869d5fbcf9ed0289fe7da3619c1204fc42d616462ac1b6a4e6ca2655d44bce039db519d0203010001</x509PublicKey>"
+ # end replacement for network stuff #
+
+ # This shall pick the key
+ hexstring = re.compile('<x509PublicKey[^>]*>([0-9a-fA-F]+)</x509PublicKey>')
+
+ # pick the key and convert it to der format
+ hex_pub_der = hexstring.search(html).group(1)
+ pub_der = hex_pub_der.decode('hex')
+
+ # Convert it to a public key
+ pub_key = rsa.PublicKey.load_pkcs1_openssl_der(pub_der)
+
+ # encode the password
+ enc_pass = rsa.encrypt(password, pub_key)
+
+ # and hex-encode it
+ hex_pass = enc_pass.encode('hex')
+
+# _logon('me', 'MyPass')
+
+import timeit
+timeit.timeit('_logon( "me", "MyPass" )',
+ setup='from __main__ import _logon',
+ number=1000)
+
+
diff --git a/third_party/python/rsa/rsa.egg-info/PKG-INFO b/third_party/python/rsa/rsa.egg-info/PKG-INFO
new file mode 100644
index 0000000000..399ba7b3e9
--- /dev/null
+++ b/third_party/python/rsa/rsa.egg-info/PKG-INFO
@@ -0,0 +1,18 @@
+Metadata-Version: 1.1
+Name: rsa
+Version: 3.1.4
+Summary: Pure-Python RSA implementation
+Home-page: http://stuvel.eu/rsa
+Author: Sybren A. Stuvel
+Author-email: sybren@stuvel.eu
+License: ASL 2
+Description: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Education
+Classifier: Intended Audience :: Information Technology
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Security :: Cryptography
diff --git a/third_party/python/rsa/rsa.egg-info/SOURCES.txt b/third_party/python/rsa/rsa.egg-info/SOURCES.txt
new file mode 100644
index 0000000000..58d22b9c2e
--- /dev/null
+++ b/third_party/python/rsa/rsa.egg-info/SOURCES.txt
@@ -0,0 +1,46 @@
+LICENSE
+MANIFEST.in
+README.rst
+create_timing_table.py
+playstuff.py
+run_tests.py
+setup.cfg
+setup.py
+rsa/__init__.py
+rsa/_compat.py
+rsa/_version133.py
+rsa/_version200.py
+rsa/asn1.py
+rsa/bigfile.py
+rsa/cli.py
+rsa/common.py
+rsa/core.py
+rsa/key.py
+rsa/parallel.py
+rsa/pem.py
+rsa/pkcs1.py
+rsa/prime.py
+rsa/randnum.py
+rsa/transform.py
+rsa/util.py
+rsa/varblock.py
+rsa.egg-info/PKG-INFO
+rsa.egg-info/SOURCES.txt
+rsa.egg-info/dependency_links.txt
+rsa.egg-info/entry_points.txt
+rsa.egg-info/requires.txt
+rsa.egg-info/top_level.txt
+tests/__init__.py
+tests/constants.py
+tests/py2kconstants.py
+tests/py3kconstants.py
+tests/test_bigfile.py
+tests/test_common.py
+tests/test_compat.py
+tests/test_integers.py
+tests/test_load_save_keys.py
+tests/test_pem.py
+tests/test_pkcs1.py
+tests/test_strings.py
+tests/test_transform.py
+tests/test_varblock.py \ No newline at end of file
diff --git a/third_party/python/rsa/rsa.egg-info/dependency_links.txt b/third_party/python/rsa/rsa.egg-info/dependency_links.txt
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/third_party/python/rsa/rsa.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/third_party/python/rsa/rsa.egg-info/entry_points.txt b/third_party/python/rsa/rsa.egg-info/entry_points.txt
new file mode 100644
index 0000000000..65d0e29759
--- /dev/null
+++ b/third_party/python/rsa/rsa.egg-info/entry_points.txt
@@ -0,0 +1,10 @@
+[console_scripts]
+pyrsa-encrypt = rsa.cli:encrypt
+pyrsa-keygen = rsa.cli:keygen
+pyrsa-priv2pub = rsa.util:private_to_public
+pyrsa-sign = rsa.cli:sign
+pyrsa-verify = rsa.cli:verify
+pyrsa-encrypt-bigfile = rsa.cli:encrypt_bigfile
+pyrsa-decrypt-bigfile = rsa.cli:decrypt_bigfile
+pyrsa-decrypt = rsa.cli:decrypt
+
diff --git a/third_party/python/rsa/rsa.egg-info/requires.txt b/third_party/python/rsa/rsa.egg-info/requires.txt
new file mode 100644
index 0000000000..ae4e6eb9b9
--- /dev/null
+++ b/third_party/python/rsa/rsa.egg-info/requires.txt
@@ -0,0 +1 @@
+pyasn1 >= 0.1.3 \ No newline at end of file
diff --git a/third_party/python/rsa/rsa.egg-info/top_level.txt b/third_party/python/rsa/rsa.egg-info/top_level.txt
new file mode 100644
index 0000000000..703f551006
--- /dev/null
+++ b/third_party/python/rsa/rsa.egg-info/top_level.txt
@@ -0,0 +1 @@
+rsa
diff --git a/third_party/python/rsa/rsa/__init__.py b/third_party/python/rsa/rsa/__init__.py
new file mode 100644
index 0000000000..2d01c12e0f
--- /dev/null
+++ b/third_party/python/rsa/rsa/__init__.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""RSA module
+
+Module for calculating large primes, and RSA encryption, decryption, signing
+and verification. Includes generating public and private keys.
+
+WARNING: this implementation does not use random padding, compression of the
+cleartext input to prevent repetitions, or other common security improvements.
+Use with care.
+
+If you want to have a more secure implementation, use the functions from the
+``rsa.pkcs1`` module.
+
+"""
+
+__author__ = "Sybren Stuvel, Barry Mead and Yesudeep Mangalapilly"
+__date__ = "2014-02-22"
+__version__ = '3.1.4'
+
+from rsa.key import newkeys, PrivateKey, PublicKey
+from rsa.pkcs1 import encrypt, decrypt, sign, verify, DecryptionError, \
+ VerificationError
+
+# Do doctest if we're run directly
+if __name__ == "__main__":
+ import doctest
+ doctest.testmod()
+
+__all__ = ["newkeys", "encrypt", "decrypt", "sign", "verify", 'PublicKey',
+ 'PrivateKey', 'DecryptionError', 'VerificationError']
+
diff --git a/third_party/python/rsa/rsa/_compat.py b/third_party/python/rsa/rsa/_compat.py
new file mode 100644
index 0000000000..3c4eb81b13
--- /dev/null
+++ b/third_party/python/rsa/rsa/_compat.py
@@ -0,0 +1,160 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Python compatibility wrappers."""
+
+
+from __future__ import absolute_import
+
+import sys
+from struct import pack
+
+try:
+ MAX_INT = sys.maxsize
+except AttributeError:
+ MAX_INT = sys.maxint
+
+MAX_INT64 = (1 << 63) - 1
+MAX_INT32 = (1 << 31) - 1
+MAX_INT16 = (1 << 15) - 1
+
+# Determine the word size of the processor.
+if MAX_INT == MAX_INT64:
+ # 64-bit processor.
+ MACHINE_WORD_SIZE = 64
+elif MAX_INT == MAX_INT32:
+ # 32-bit processor.
+ MACHINE_WORD_SIZE = 32
+else:
+ # Else we just assume 64-bit processor keeping up with modern times.
+ MACHINE_WORD_SIZE = 64
+
+
+try:
+ # < Python3
+ unicode_type = unicode
+ have_python3 = False
+except NameError:
+ # Python3.
+ unicode_type = str
+ have_python3 = True
+
+# Fake byte literals.
+if str is unicode_type:
+ def byte_literal(s):
+ return s.encode('latin1')
+else:
+ def byte_literal(s):
+ return s
+
+# ``long`` is no more. Do type detection using this instead.
+try:
+ integer_types = (int, long)
+except NameError:
+ integer_types = (int,)
+
+b = byte_literal
+
+try:
+ # Python 2.6 or higher.
+ bytes_type = bytes
+except NameError:
+ # Python 2.5
+ bytes_type = str
+
+
+# To avoid calling b() multiple times in tight loops.
+ZERO_BYTE = b('\x00')
+EMPTY_BYTE = b('')
+
+
+def is_bytes(obj):
+ """
+ Determines whether the given value is a byte string.
+
+ :param obj:
+ The value to test.
+ :returns:
+ ``True`` if ``value`` is a byte string; ``False`` otherwise.
+ """
+ return isinstance(obj, bytes_type)
+
+
+def is_integer(obj):
+ """
+ Determines whether the given value is an integer.
+
+ :param obj:
+ The value to test.
+ :returns:
+ ``True`` if ``value`` is an integer; ``False`` otherwise.
+ """
+ return isinstance(obj, integer_types)
+
+
+def byte(num):
+ """
+ Converts a number between 0 and 255 (both inclusive) to a base-256 (byte)
+ representation.
+
+ Use it as a replacement for ``chr`` where you are expecting a byte
+ because this will work on all current versions of Python::
+
+ :param num:
+ An unsigned integer between 0 and 255 (both inclusive).
+ :returns:
+ A single byte.
+ """
+ return pack("B", num)
+
+
+def get_word_alignment(num, force_arch=64,
+ _machine_word_size=MACHINE_WORD_SIZE):
+ """
+ Returns alignment details for the given number based on the platform
+ Python is running on.
+
+ :param num:
+ Unsigned integral number.
+ :param force_arch:
+ If you don't want to use 64-bit unsigned chunks, set this to
+ anything other than 64. 32-bit chunks will be preferred then.
+ Default 64 will be used when on a 64-bit machine.
+ :param _machine_word_size:
+ (Internal) The machine word size used for alignment.
+ :returns:
+ 4-tuple::
+
+ (word_bits, word_bytes,
+ max_uint, packing_format_type)
+ """
+ max_uint64 = 0xffffffffffffffff
+ max_uint32 = 0xffffffff
+ max_uint16 = 0xffff
+ max_uint8 = 0xff
+
+ if force_arch == 64 and _machine_word_size >= 64 and num > max_uint32:
+ # 64-bit unsigned integer.
+ return 64, 8, max_uint64, "Q"
+ elif num > max_uint16:
+ # 32-bit unsigned integer
+ return 32, 4, max_uint32, "L"
+ elif num > max_uint8:
+ # 16-bit unsigned integer.
+ return 16, 2, max_uint16, "H"
+ else:
+ # 8-bit unsigned integer.
+ return 8, 1, max_uint8, "B"
diff --git a/third_party/python/rsa/rsa/_version133.py b/third_party/python/rsa/rsa/_version133.py
new file mode 100644
index 0000000000..230a03c84b
--- /dev/null
+++ b/third_party/python/rsa/rsa/_version133.py
@@ -0,0 +1,442 @@
+"""RSA module
+pri = k[1] //Private part of keys d,p,q
+
+Module for calculating large primes, and RSA encryption, decryption,
+signing and verification. Includes generating public and private keys.
+
+WARNING: this code implements the mathematics of RSA. It is not suitable for
+real-world secure cryptography purposes. It has not been reviewed by a security
+expert. It does not include padding of data. There are many ways in which the
+output of this module, when used without any modification, can be sucessfully
+attacked.
+"""
+
+__author__ = "Sybren Stuvel, Marloes de Boer and Ivo Tamboer"
+__date__ = "2010-02-05"
+__version__ = '1.3.3'
+
+# NOTE: Python's modulo can return negative numbers. We compensate for
+# this behaviour using the abs() function
+
+from cPickle import dumps, loads
+import base64
+import math
+import os
+import random
+import sys
+import types
+import zlib
+
+from rsa._compat import byte
+
+# Display a warning that this insecure version is imported.
+import warnings
+warnings.warn('Insecure version of the RSA module is imported as %s, be careful'
+ % __name__)
+
+def gcd(p, q):
+ """Returns the greatest common divisor of p and q
+
+
+ >>> gcd(42, 6)
+ 6
+ """
+ if p<q: return gcd(q, p)
+ if q == 0: return p
+ return gcd(q, abs(p%q))
+
+def bytes2int(bytes):
+ """Converts a list of bytes or a string to an integer
+
+ >>> (128*256 + 64)*256 + + 15
+ 8405007
+ >>> l = [128, 64, 15]
+ >>> bytes2int(l)
+ 8405007
+ """
+
+ if not (type(bytes) is types.ListType or type(bytes) is types.StringType):
+ raise TypeError("You must pass a string or a list")
+
+ # Convert byte stream to integer
+ integer = 0
+ for byte in bytes:
+ integer *= 256
+ if type(byte) is types.StringType: byte = ord(byte)
+ integer += byte
+
+ return integer
+
+def int2bytes(number):
+ """Converts a number to a string of bytes
+
+ >>> bytes2int(int2bytes(123456789))
+ 123456789
+ """
+
+ if not (type(number) is types.LongType or type(number) is types.IntType):
+ raise TypeError("You must pass a long or an int")
+
+ string = ""
+
+ while number > 0:
+ string = "%s%s" % (byte(number & 0xFF), string)
+ number /= 256
+
+ return string
+
+def fast_exponentiation(a, p, n):
+ """Calculates r = a^p mod n
+ """
+ result = a % n
+ remainders = []
+ while p != 1:
+ remainders.append(p & 1)
+ p = p >> 1
+ while remainders:
+ rem = remainders.pop()
+ result = ((a ** rem) * result ** 2) % n
+ return result
+
+def read_random_int(nbits):
+ """Reads a random integer of approximately nbits bits rounded up
+ to whole bytes"""
+
+ nbytes = ceil(nbits/8.)
+ randomdata = os.urandom(nbytes)
+ return bytes2int(randomdata)
+
+def ceil(x):
+ """ceil(x) -> int(math.ceil(x))"""
+
+ return int(math.ceil(x))
+
+def randint(minvalue, maxvalue):
+ """Returns a random integer x with minvalue <= x <= maxvalue"""
+
+ # Safety - get a lot of random data even if the range is fairly
+ # small
+ min_nbits = 32
+
+ # The range of the random numbers we need to generate
+ range = maxvalue - minvalue
+
+ # Which is this number of bytes
+ rangebytes = ceil(math.log(range, 2) / 8.)
+
+ # Convert to bits, but make sure it's always at least min_nbits*2
+ rangebits = max(rangebytes * 8, min_nbits * 2)
+
+ # Take a random number of bits between min_nbits and rangebits
+ nbits = random.randint(min_nbits, rangebits)
+
+ return (read_random_int(nbits) % range) + minvalue
+
+def fermat_little_theorem(p):
+ """Returns 1 if p may be prime, and something else if p definitely
+ is not prime"""
+
+ a = randint(1, p-1)
+ return fast_exponentiation(a, p-1, p)
+
+def jacobi(a, b):
+ """Calculates the value of the Jacobi symbol (a/b)
+ """
+
+ if a % b == 0:
+ return 0
+ result = 1
+ while a > 1:
+ if a & 1:
+ if ((a-1)*(b-1) >> 2) & 1:
+ result = -result
+ b, a = a, b % a
+ else:
+ if ((b ** 2 - 1) >> 3) & 1:
+ result = -result
+ a = a >> 1
+ return result
+
+def jacobi_witness(x, n):
+ """Returns False if n is an Euler pseudo-prime with base x, and
+ True otherwise.
+ """
+
+ j = jacobi(x, n) % n
+ f = fast_exponentiation(x, (n-1)/2, n)
+
+ if j == f: return False
+ return True
+
+def randomized_primality_testing(n, k):
+ """Calculates whether n is composite (which is always correct) or
+ prime (which is incorrect with error probability 2**-k)
+
+ Returns False if the number if composite, and True if it's
+ probably prime.
+ """
+
+ q = 0.5 # Property of the jacobi_witness function
+
+ # t = int(math.ceil(k / math.log(1/q, 2)))
+ t = ceil(k / math.log(1/q, 2))
+ for i in range(t+1):
+ x = randint(1, n-1)
+ if jacobi_witness(x, n): return False
+
+ return True
+
+def is_prime(number):
+ """Returns True if the number is prime, and False otherwise.
+
+ >>> is_prime(42)
+ 0
+ >>> is_prime(41)
+ 1
+ """
+
+ """
+ if not fermat_little_theorem(number) == 1:
+ # Not prime, according to Fermat's little theorem
+ return False
+ """
+
+ if randomized_primality_testing(number, 5):
+ # Prime, according to Jacobi
+ return True
+
+ # Not prime
+ return False
+
+
+def getprime(nbits):
+ """Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In
+ other words: nbits is rounded up to whole bytes.
+
+ >>> p = getprime(8)
+ >>> is_prime(p-1)
+ 0
+ >>> is_prime(p)
+ 1
+ >>> is_prime(p+1)
+ 0
+ """
+
+ nbytes = int(math.ceil(nbits/8.))
+
+ while True:
+ integer = read_random_int(nbits)
+
+ # Make sure it's odd
+ integer |= 1
+
+ # Test for primeness
+ if is_prime(integer): break
+
+ # Retry if not prime
+
+ return integer
+
+def are_relatively_prime(a, b):
+ """Returns True if a and b are relatively prime, and False if they
+ are not.
+
+ >>> are_relatively_prime(2, 3)
+ 1
+ >>> are_relatively_prime(2, 4)
+ 0
+ """
+
+ d = gcd(a, b)
+ return (d == 1)
+
+def find_p_q(nbits):
+ """Returns a tuple of two different primes of nbits bits"""
+
+ p = getprime(nbits)
+ while True:
+ q = getprime(nbits)
+ if not q == p: break
+
+ return (p, q)
+
+def extended_euclid_gcd(a, b):
+ """Returns a tuple (d, i, j) such that d = gcd(a, b) = ia + jb
+ """
+
+ if b == 0:
+ return (a, 1, 0)
+
+ q = abs(a % b)
+ r = long(a / b)
+ (d, k, l) = extended_euclid_gcd(b, q)
+
+ return (d, l, k - l*r)
+
+# Main function: calculate encryption and decryption keys
+def calculate_keys(p, q, nbits):
+ """Calculates an encryption and a decryption key for p and q, and
+ returns them as a tuple (e, d)"""
+
+ n = p * q
+ phi_n = (p-1) * (q-1)
+
+ while True:
+ # Make sure e has enough bits so we ensure "wrapping" through
+ # modulo n
+ e = getprime(max(8, nbits/2))
+ if are_relatively_prime(e, n) and are_relatively_prime(e, phi_n): break
+
+ (d, i, j) = extended_euclid_gcd(e, phi_n)
+
+ if not d == 1:
+ raise Exception("e (%d) and phi_n (%d) are not relatively prime" % (e, phi_n))
+
+ if not (e * i) % phi_n == 1:
+ raise Exception("e (%d) and i (%d) are not mult. inv. modulo phi_n (%d)" % (e, i, phi_n))
+
+ return (e, i)
+
+
+def gen_keys(nbits):
+ """Generate RSA keys of nbits bits. Returns (p, q, e, d).
+
+ Note: this can take a long time, depending on the key size.
+ """
+
+ while True:
+ (p, q) = find_p_q(nbits)
+ (e, d) = calculate_keys(p, q, nbits)
+
+ # For some reason, d is sometimes negative. We don't know how
+ # to fix it (yet), so we keep trying until everything is shiny
+ if d > 0: break
+
+ return (p, q, e, d)
+
+def gen_pubpriv_keys(nbits):
+ """Generates public and private keys, and returns them as (pub,
+ priv).
+
+ The public key consists of a dict {e: ..., , n: ....). The private
+ key consists of a dict {d: ...., p: ...., q: ....).
+ """
+
+ (p, q, e, d) = gen_keys(nbits)
+
+ return ( {'e': e, 'n': p*q}, {'d': d, 'p': p, 'q': q} )
+
+def encrypt_int(message, ekey, n):
+ """Encrypts a message using encryption key 'ekey', working modulo
+ n"""
+
+ if type(message) is types.IntType:
+ return encrypt_int(long(message), ekey, n)
+
+ if not type(message) is types.LongType:
+ raise TypeError("You must pass a long or an int")
+
+ if message > 0 and \
+ math.floor(math.log(message, 2)) > math.floor(math.log(n, 2)):
+ raise OverflowError("The message is too long")
+
+ return fast_exponentiation(message, ekey, n)
+
+def decrypt_int(cyphertext, dkey, n):
+ """Decrypts a cypher text using the decryption key 'dkey', working
+ modulo n"""
+
+ return encrypt_int(cyphertext, dkey, n)
+
+def sign_int(message, dkey, n):
+ """Signs 'message' using key 'dkey', working modulo n"""
+
+ return decrypt_int(message, dkey, n)
+
+def verify_int(signed, ekey, n):
+ """verifies 'signed' using key 'ekey', working modulo n"""
+
+ return encrypt_int(signed, ekey, n)
+
+def picklechops(chops):
+ """Pickles and base64encodes it's argument chops"""
+
+ value = zlib.compress(dumps(chops))
+ encoded = base64.encodestring(value)
+ return encoded.strip()
+
+def unpicklechops(string):
+ """base64decodes and unpickes it's argument string into chops"""
+
+ return loads(zlib.decompress(base64.decodestring(string)))
+
+def chopstring(message, key, n, funcref):
+ """Splits 'message' into chops that are at most as long as n,
+ converts these into integers, and calls funcref(integer, key, n)
+ for each chop.
+
+ Used by 'encrypt' and 'sign'.
+ """
+
+ msglen = len(message)
+ mbits = msglen * 8
+ nbits = int(math.floor(math.log(n, 2)))
+ nbytes = nbits / 8
+ blocks = msglen / nbytes
+
+ if msglen % nbytes > 0:
+ blocks += 1
+
+ cypher = []
+
+ for bindex in range(blocks):
+ offset = bindex * nbytes
+ block = message[offset:offset+nbytes]
+ value = bytes2int(block)
+ cypher.append(funcref(value, key, n))
+
+ return picklechops(cypher)
+
+def gluechops(chops, key, n, funcref):
+ """Glues chops back together into a string. calls
+ funcref(integer, key, n) for each chop.
+
+ Used by 'decrypt' and 'verify'.
+ """
+ message = ""
+
+ chops = unpicklechops(chops)
+
+ for cpart in chops:
+ mpart = funcref(cpart, key, n)
+ message += int2bytes(mpart)
+
+ return message
+
+def encrypt(message, key):
+ """Encrypts a string 'message' with the public key 'key'"""
+
+ return chopstring(message, key['e'], key['n'], encrypt_int)
+
+def sign(message, key):
+ """Signs a string 'message' with the private key 'key'"""
+
+ return chopstring(message, key['d'], key['p']*key['q'], decrypt_int)
+
+def decrypt(cypher, key):
+ """Decrypts a cypher with the private key 'key'"""
+
+ return gluechops(cypher, key['d'], key['p']*key['q'], decrypt_int)
+
+def verify(cypher, key):
+ """Verifies a cypher with the public key 'key'"""
+
+ return gluechops(cypher, key['e'], key['n'], encrypt_int)
+
+# Do doctest if we're not imported
+if __name__ == "__main__":
+ import doctest
+ doctest.testmod()
+
+__all__ = ["gen_pubpriv_keys", "encrypt", "decrypt", "sign", "verify"]
+
diff --git a/third_party/python/rsa/rsa/_version200.py b/third_party/python/rsa/rsa/_version200.py
new file mode 100644
index 0000000000..f915653857
--- /dev/null
+++ b/third_party/python/rsa/rsa/_version200.py
@@ -0,0 +1,529 @@
+"""RSA module
+
+Module for calculating large primes, and RSA encryption, decryption,
+signing and verification. Includes generating public and private keys.
+
+WARNING: this implementation does not use random padding, compression of the
+cleartext input to prevent repetitions, or other common security improvements.
+Use with care.
+
+"""
+
+__author__ = "Sybren Stuvel, Marloes de Boer, Ivo Tamboer, and Barry Mead"
+__date__ = "2010-02-08"
+__version__ = '2.0'
+
+import math
+import os
+import random
+import sys
+import types
+from rsa._compat import byte
+
+# Display a warning that this insecure version is imported.
+import warnings
+warnings.warn('Insecure version of the RSA module is imported as %s' % __name__)
+
+
+def bit_size(number):
+ """Returns the number of bits required to hold a specific long number"""
+
+ return int(math.ceil(math.log(number,2)))
+
+def gcd(p, q):
+ """Returns the greatest common divisor of p and q
+ >>> gcd(48, 180)
+ 12
+ """
+ # Iterateive Version is faster and uses much less stack space
+ while q != 0:
+ if p < q: (p,q) = (q,p)
+ (p,q) = (q, p % q)
+ return p
+
+
+def bytes2int(bytes):
+ """Converts a list of bytes or a string to an integer
+
+ >>> (((128 * 256) + 64) * 256) + 15
+ 8405007
+ >>> l = [128, 64, 15]
+ >>> bytes2int(l) #same as bytes2int('\x80@\x0f')
+ 8405007
+ """
+
+ if not (type(bytes) is types.ListType or type(bytes) is types.StringType):
+ raise TypeError("You must pass a string or a list")
+
+ # Convert byte stream to integer
+ integer = 0
+ for byte in bytes:
+ integer *= 256
+ if type(byte) is types.StringType: byte = ord(byte)
+ integer += byte
+
+ return integer
+
+def int2bytes(number):
+ """
+ Converts a number to a string of bytes
+ """
+
+ if not (type(number) is types.LongType or type(number) is types.IntType):
+ raise TypeError("You must pass a long or an int")
+
+ string = ""
+
+ while number > 0:
+ string = "%s%s" % (byte(number & 0xFF), string)
+ number /= 256
+
+ return string
+
+def to64(number):
+ """Converts a number in the range of 0 to 63 into base 64 digit
+ character in the range of '0'-'9', 'A'-'Z', 'a'-'z','-','_'.
+
+ >>> to64(10)
+ 'A'
+ """
+
+ if not (type(number) is types.LongType or type(number) is types.IntType):
+ raise TypeError("You must pass a long or an int")
+
+ if 0 <= number <= 9: #00-09 translates to '0' - '9'
+ return byte(number + 48)
+
+ if 10 <= number <= 35:
+ return byte(number + 55) #10-35 translates to 'A' - 'Z'
+
+ if 36 <= number <= 61:
+ return byte(number + 61) #36-61 translates to 'a' - 'z'
+
+ if number == 62: # 62 translates to '-' (minus)
+ return byte(45)
+
+ if number == 63: # 63 translates to '_' (underscore)
+ return byte(95)
+
+ raise ValueError('Invalid Base64 value: %i' % number)
+
+
+def from64(number):
+ """Converts an ordinal character value in the range of
+ 0-9,A-Z,a-z,-,_ to a number in the range of 0-63.
+
+ >>> from64(49)
+ 1
+ """
+
+ if not (type(number) is types.LongType or type(number) is types.IntType):
+ raise TypeError("You must pass a long or an int")
+
+ if 48 <= number <= 57: #ord('0') - ord('9') translates to 0-9
+ return(number - 48)
+
+ if 65 <= number <= 90: #ord('A') - ord('Z') translates to 10-35
+ return(number - 55)
+
+ if 97 <= number <= 122: #ord('a') - ord('z') translates to 36-61
+ return(number - 61)
+
+ if number == 45: #ord('-') translates to 62
+ return(62)
+
+ if number == 95: #ord('_') translates to 63
+ return(63)
+
+ raise ValueError('Invalid Base64 value: %i' % number)
+
+
+def int2str64(number):
+ """Converts a number to a string of base64 encoded characters in
+ the range of '0'-'9','A'-'Z,'a'-'z','-','_'.
+
+ >>> int2str64(123456789)
+ '7MyqL'
+ """
+
+ if not (type(number) is types.LongType or type(number) is types.IntType):
+ raise TypeError("You must pass a long or an int")
+
+ string = ""
+
+ while number > 0:
+ string = "%s%s" % (to64(number & 0x3F), string)
+ number /= 64
+
+ return string
+
+
+def str642int(string):
+ """Converts a base64 encoded string into an integer.
+ The chars of this string in in the range '0'-'9','A'-'Z','a'-'z','-','_'
+
+ >>> str642int('7MyqL')
+ 123456789
+ """
+
+ if not (type(string) is types.ListType or type(string) is types.StringType):
+ raise TypeError("You must pass a string or a list")
+
+ integer = 0
+ for byte in string:
+ integer *= 64
+ if type(byte) is types.StringType: byte = ord(byte)
+ integer += from64(byte)
+
+ return integer
+
+def read_random_int(nbits):
+ """Reads a random integer of approximately nbits bits rounded up
+ to whole bytes"""
+
+ nbytes = int(math.ceil(nbits/8.))
+ randomdata = os.urandom(nbytes)
+ return bytes2int(randomdata)
+
+def randint(minvalue, maxvalue):
+ """Returns a random integer x with minvalue <= x <= maxvalue"""
+
+ # Safety - get a lot of random data even if the range is fairly
+ # small
+ min_nbits = 32
+
+ # The range of the random numbers we need to generate
+ range = (maxvalue - minvalue) + 1
+
+ # Which is this number of bytes
+ rangebytes = ((bit_size(range) + 7) / 8)
+
+ # Convert to bits, but make sure it's always at least min_nbits*2
+ rangebits = max(rangebytes * 8, min_nbits * 2)
+
+ # Take a random number of bits between min_nbits and rangebits
+ nbits = random.randint(min_nbits, rangebits)
+
+ return (read_random_int(nbits) % range) + minvalue
+
+def jacobi(a, b):
+ """Calculates the value of the Jacobi symbol (a/b)
+ where both a and b are positive integers, and b is odd
+ """
+
+ if a == 0: return 0
+ result = 1
+ while a > 1:
+ if a & 1:
+ if ((a-1)*(b-1) >> 2) & 1:
+ result = -result
+ a, b = b % a, a
+ else:
+ if (((b * b) - 1) >> 3) & 1:
+ result = -result
+ a >>= 1
+ if a == 0: return 0
+ return result
+
+def jacobi_witness(x, n):
+ """Returns False if n is an Euler pseudo-prime with base x, and
+ True otherwise.
+ """
+
+ j = jacobi(x, n) % n
+ f = pow(x, (n-1)/2, n)
+
+ if j == f: return False
+ return True
+
+def randomized_primality_testing(n, k):
+ """Calculates whether n is composite (which is always correct) or
+ prime (which is incorrect with error probability 2**-k)
+
+ Returns False if the number is composite, and True if it's
+ probably prime.
+ """
+
+ # 50% of Jacobi-witnesses can report compositness of non-prime numbers
+
+ for i in range(k):
+ x = randint(1, n-1)
+ if jacobi_witness(x, n): return False
+
+ return True
+
+def is_prime(number):
+ """Returns True if the number is prime, and False otherwise.
+
+ >>> is_prime(42)
+ 0
+ >>> is_prime(41)
+ 1
+ """
+
+ if randomized_primality_testing(number, 6):
+ # Prime, according to Jacobi
+ return True
+
+ # Not prime
+ return False
+
+
+def getprime(nbits):
+ """Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In
+ other words: nbits is rounded up to whole bytes.
+
+ >>> p = getprime(8)
+ >>> is_prime(p-1)
+ 0
+ >>> is_prime(p)
+ 1
+ >>> is_prime(p+1)
+ 0
+ """
+
+ while True:
+ integer = read_random_int(nbits)
+
+ # Make sure it's odd
+ integer |= 1
+
+ # Test for primeness
+ if is_prime(integer): break
+
+ # Retry if not prime
+
+ return integer
+
+def are_relatively_prime(a, b):
+ """Returns True if a and b are relatively prime, and False if they
+ are not.
+
+ >>> are_relatively_prime(2, 3)
+ 1
+ >>> are_relatively_prime(2, 4)
+ 0
+ """
+
+ d = gcd(a, b)
+ return (d == 1)
+
+def find_p_q(nbits):
+ """Returns a tuple of two different primes of nbits bits"""
+ pbits = nbits + (nbits/16) #Make sure that p and q aren't too close
+ qbits = nbits - (nbits/16) #or the factoring programs can factor n
+ p = getprime(pbits)
+ while True:
+ q = getprime(qbits)
+ #Make sure p and q are different.
+ if not q == p: break
+ return (p, q)
+
+def extended_gcd(a, b):
+ """Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb
+ """
+ # r = gcd(a,b) i = multiplicitive inverse of a mod b
+ # or j = multiplicitive inverse of b mod a
+ # Neg return values for i or j are made positive mod b or a respectively
+ # Iterateive Version is faster and uses much less stack space
+ x = 0
+ y = 1
+ lx = 1
+ ly = 0
+ oa = a #Remember original a/b to remove
+ ob = b #negative values from return results
+ while b != 0:
+ q = long(a/b)
+ (a, b) = (b, a % b)
+ (x, lx) = ((lx - (q * x)),x)
+ (y, ly) = ((ly - (q * y)),y)
+ if (lx < 0): lx += ob #If neg wrap modulo orignal b
+ if (ly < 0): ly += oa #If neg wrap modulo orignal a
+ return (a, lx, ly) #Return only positive values
+
+# Main function: calculate encryption and decryption keys
+def calculate_keys(p, q, nbits):
+ """Calculates an encryption and a decryption key for p and q, and
+ returns them as a tuple (e, d)"""
+
+ n = p * q
+ phi_n = (p-1) * (q-1)
+
+ while True:
+ # Make sure e has enough bits so we ensure "wrapping" through
+ # modulo n
+ e = max(65537,getprime(nbits/4))
+ if are_relatively_prime(e, n) and are_relatively_prime(e, phi_n): break
+
+ (d, i, j) = extended_gcd(e, phi_n)
+
+ if not d == 1:
+ raise Exception("e (%d) and phi_n (%d) are not relatively prime" % (e, phi_n))
+ if (i < 0):
+ raise Exception("New extended_gcd shouldn't return negative values")
+ if not (e * i) % phi_n == 1:
+ raise Exception("e (%d) and i (%d) are not mult. inv. modulo phi_n (%d)" % (e, i, phi_n))
+
+ return (e, i)
+
+
+def gen_keys(nbits):
+ """Generate RSA keys of nbits bits. Returns (p, q, e, d).
+
+ Note: this can take a long time, depending on the key size.
+ """
+
+ (p, q) = find_p_q(nbits)
+ (e, d) = calculate_keys(p, q, nbits)
+
+ return (p, q, e, d)
+
+def newkeys(nbits):
+ """Generates public and private keys, and returns them as (pub,
+ priv).
+
+ The public key consists of a dict {e: ..., , n: ....). The private
+ key consists of a dict {d: ...., p: ...., q: ....).
+ """
+ nbits = max(9,nbits) # Don't let nbits go below 9 bits
+ (p, q, e, d) = gen_keys(nbits)
+
+ return ( {'e': e, 'n': p*q}, {'d': d, 'p': p, 'q': q} )
+
+def encrypt_int(message, ekey, n):
+ """Encrypts a message using encryption key 'ekey', working modulo n"""
+
+ if type(message) is types.IntType:
+ message = long(message)
+
+ if not type(message) is types.LongType:
+ raise TypeError("You must pass a long or int")
+
+ if message < 0 or message > n:
+ raise OverflowError("The message is too long")
+
+ #Note: Bit exponents start at zero (bit counts start at 1) this is correct
+ safebit = bit_size(n) - 2 #compute safe bit (MSB - 1)
+ message += (1 << safebit) #add safebit to ensure folding
+
+ return pow(message, ekey, n)
+
+def decrypt_int(cyphertext, dkey, n):
+ """Decrypts a cypher text using the decryption key 'dkey', working
+ modulo n"""
+
+ message = pow(cyphertext, dkey, n)
+
+ safebit = bit_size(n) - 2 #compute safe bit (MSB - 1)
+ message -= (1 << safebit) #remove safebit before decode
+
+ return message
+
+def encode64chops(chops):
+ """base64encodes chops and combines them into a ',' delimited string"""
+
+ chips = [] #chips are character chops
+
+ for value in chops:
+ chips.append(int2str64(value))
+
+ #delimit chops with comma
+ encoded = ','.join(chips)
+
+ return encoded
+
+def decode64chops(string):
+ """base64decodes and makes a ',' delimited string into chops"""
+
+ chips = string.split(',') #split chops at commas
+
+ chops = []
+
+ for string in chips: #make char chops (chips) into chops
+ chops.append(str642int(string))
+
+ return chops
+
+def chopstring(message, key, n, funcref):
+ """Chops the 'message' into integers that fit into n,
+ leaving room for a safebit to be added to ensure that all
+ messages fold during exponentiation. The MSB of the number n
+ is not independant modulo n (setting it could cause overflow), so
+ use the next lower bit for the safebit. Therefore reserve 2-bits
+ in the number n for non-data bits. Calls specified encryption
+ function for each chop.
+
+ Used by 'encrypt' and 'sign'.
+ """
+
+ msglen = len(message)
+ mbits = msglen * 8
+ #Set aside 2-bits so setting of safebit won't overflow modulo n.
+ nbits = bit_size(n) - 2 # leave room for safebit
+ nbytes = nbits / 8
+ blocks = msglen / nbytes
+
+ if msglen % nbytes > 0:
+ blocks += 1
+
+ cypher = []
+
+ for bindex in range(blocks):
+ offset = bindex * nbytes
+ block = message[offset:offset+nbytes]
+ value = bytes2int(block)
+ cypher.append(funcref(value, key, n))
+
+ return encode64chops(cypher) #Encode encrypted ints to base64 strings
+
+def gluechops(string, key, n, funcref):
+ """Glues chops back together into a string. calls
+ funcref(integer, key, n) for each chop.
+
+ Used by 'decrypt' and 'verify'.
+ """
+ message = ""
+
+ chops = decode64chops(string) #Decode base64 strings into integer chops
+
+ for cpart in chops:
+ mpart = funcref(cpart, key, n) #Decrypt each chop
+ message += int2bytes(mpart) #Combine decrypted strings into a msg
+
+ return message
+
+def encrypt(message, key):
+ """Encrypts a string 'message' with the public key 'key'"""
+ if 'n' not in key:
+ raise Exception("You must use the public key with encrypt")
+
+ return chopstring(message, key['e'], key['n'], encrypt_int)
+
+def sign(message, key):
+ """Signs a string 'message' with the private key 'key'"""
+ if 'p' not in key:
+ raise Exception("You must use the private key with sign")
+
+ return chopstring(message, key['d'], key['p']*key['q'], encrypt_int)
+
+def decrypt(cypher, key):
+ """Decrypts a string 'cypher' with the private key 'key'"""
+ if 'p' not in key:
+ raise Exception("You must use the private key with decrypt")
+
+ return gluechops(cypher, key['d'], key['p']*key['q'], decrypt_int)
+
+def verify(cypher, key):
+ """Verifies a string 'cypher' with the public key 'key'"""
+ if 'n' not in key:
+ raise Exception("You must use the public key with verify")
+
+ return gluechops(cypher, key['e'], key['n'], decrypt_int)
+
+# Do doctest if we're not imported
+if __name__ == "__main__":
+ import doctest
+ doctest.testmod()
+
+__all__ = ["newkeys", "encrypt", "decrypt", "sign", "verify"]
+
diff --git a/third_party/python/rsa/rsa/asn1.py b/third_party/python/rsa/rsa/asn1.py
new file mode 100644
index 0000000000..706e6cf228
--- /dev/null
+++ b/third_party/python/rsa/rsa/asn1.py
@@ -0,0 +1,35 @@
+'''ASN.1 definitions.
+
+Not all ASN.1-handling code use these definitions, but when it does, they should be here.
+'''
+
+from pyasn1.type import univ, namedtype, tag
+
+class PubKeyHeader(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('oid', univ.ObjectIdentifier()),
+ namedtype.NamedType('parameters', univ.Null()),
+ )
+
+class OpenSSLPubKey(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('header', PubKeyHeader()),
+
+ # This little hack (the implicit tag) allows us to get a Bit String as Octet String
+ namedtype.NamedType('key', univ.OctetString().subtype(
+ implicitTag=tag.Tag(tagClass=0, tagFormat=0, tagId=3))),
+ )
+
+
+class AsnPubKey(univ.Sequence):
+ '''ASN.1 contents of DER encoded public key:
+
+ RSAPublicKey ::= SEQUENCE {
+ modulus INTEGER, -- n
+ publicExponent INTEGER, -- e
+ '''
+
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('modulus', univ.Integer()),
+ namedtype.NamedType('publicExponent', univ.Integer()),
+ )
diff --git a/third_party/python/rsa/rsa/bigfile.py b/third_party/python/rsa/rsa/bigfile.py
new file mode 100644
index 0000000000..516cf56b51
--- /dev/null
+++ b/third_party/python/rsa/rsa/bigfile.py
@@ -0,0 +1,87 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Large file support
+
+ - break a file into smaller blocks, and encrypt them, and store the
+ encrypted blocks in another file.
+
+ - take such an encrypted files, decrypt its blocks, and reconstruct the
+ original file.
+
+The encrypted file format is as follows, where || denotes byte concatenation:
+
+ FILE := VERSION || BLOCK || BLOCK ...
+
+ BLOCK := LENGTH || DATA
+
+ LENGTH := varint-encoded length of the subsequent data. Varint comes from
+ Google Protobuf, and encodes an integer into a variable number of bytes.
+ Each byte uses the 7 lowest bits to encode the value. The highest bit set
+ to 1 indicates the next byte is also part of the varint. The last byte will
+ have this bit set to 0.
+
+This file format is called the VARBLOCK format, in line with the varint format
+used to denote the block sizes.
+
+'''
+
+from rsa import key, common, pkcs1, varblock
+from rsa._compat import byte
+
+def encrypt_bigfile(infile, outfile, pub_key):
+ '''Encrypts a file, writing it to 'outfile' in VARBLOCK format.
+
+ :param infile: file-like object to read the cleartext from
+ :param outfile: file-like object to write the crypto in VARBLOCK format to
+ :param pub_key: :py:class:`rsa.PublicKey` to encrypt with
+
+ '''
+
+ if not isinstance(pub_key, key.PublicKey):
+ raise TypeError('Public key required, but got %r' % pub_key)
+
+ key_bytes = common.bit_size(pub_key.n) // 8
+ blocksize = key_bytes - 11 # keep space for PKCS#1 padding
+
+ # Write the version number to the VARBLOCK file
+ outfile.write(byte(varblock.VARBLOCK_VERSION))
+
+ # Encrypt and write each block
+ for block in varblock.yield_fixedblocks(infile, blocksize):
+ crypto = pkcs1.encrypt(block, pub_key)
+
+ varblock.write_varint(outfile, len(crypto))
+ outfile.write(crypto)
+
+def decrypt_bigfile(infile, outfile, priv_key):
+ '''Decrypts an encrypted VARBLOCK file, writing it to 'outfile'
+
+ :param infile: file-like object to read the crypto in VARBLOCK format from
+ :param outfile: file-like object to write the cleartext to
+ :param priv_key: :py:class:`rsa.PrivateKey` to decrypt with
+
+ '''
+
+ if not isinstance(priv_key, key.PrivateKey):
+ raise TypeError('Private key required, but got %r' % priv_key)
+
+ for block in varblock.yield_varblocks(infile):
+ cleartext = pkcs1.decrypt(block, priv_key)
+ outfile.write(cleartext)
+
+__all__ = ['encrypt_bigfile', 'decrypt_bigfile']
+
diff --git a/third_party/python/rsa/rsa/cli.py b/third_party/python/rsa/rsa/cli.py
new file mode 100644
index 0000000000..527cc4979a
--- /dev/null
+++ b/third_party/python/rsa/rsa/cli.py
@@ -0,0 +1,379 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Commandline scripts.
+
+These scripts are called by the executables defined in setup.py.
+'''
+
+from __future__ import with_statement, print_function
+
+import abc
+import sys
+from optparse import OptionParser
+
+import rsa
+import rsa.bigfile
+import rsa.pkcs1
+
+HASH_METHODS = sorted(rsa.pkcs1.HASH_METHODS.keys())
+
+def keygen():
+ '''Key generator.'''
+
+ # Parse the CLI options
+ parser = OptionParser(usage='usage: %prog [options] keysize',
+ description='Generates a new RSA keypair of "keysize" bits.')
+
+ parser.add_option('--pubout', type='string',
+ help='Output filename for the public key. The public key is '
+ 'not saved if this option is not present. You can use '
+ 'pyrsa-priv2pub to create the public key file later.')
+
+ parser.add_option('-o', '--out', type='string',
+ help='Output filename for the private key. The key is '
+ 'written to stdout if this option is not present.')
+
+ parser.add_option('--form',
+ help='key format of the private and public keys - default PEM',
+ choices=('PEM', 'DER'), default='PEM')
+
+ (cli, cli_args) = parser.parse_args(sys.argv[1:])
+
+ if len(cli_args) != 1:
+ parser.print_help()
+ raise SystemExit(1)
+
+ try:
+ keysize = int(cli_args[0])
+ except ValueError:
+ parser.print_help()
+ print('Not a valid number: %s' % cli_args[0], file=sys.stderr)
+ raise SystemExit(1)
+
+ print('Generating %i-bit key' % keysize, file=sys.stderr)
+ (pub_key, priv_key) = rsa.newkeys(keysize)
+
+
+ # Save public key
+ if cli.pubout:
+ print('Writing public key to %s' % cli.pubout, file=sys.stderr)
+ data = pub_key.save_pkcs1(format=cli.form)
+ with open(cli.pubout, 'wb') as outfile:
+ outfile.write(data)
+
+ # Save private key
+ data = priv_key.save_pkcs1(format=cli.form)
+
+ if cli.out:
+ print('Writing private key to %s' % cli.out, file=sys.stderr)
+ with open(cli.out, 'wb') as outfile:
+ outfile.write(data)
+ else:
+ print('Writing private key to stdout', file=sys.stderr)
+ sys.stdout.write(data)
+
+
+class CryptoOperation(object):
+ '''CLI callable that operates with input, output, and a key.'''
+
+ __metaclass__ = abc.ABCMeta
+
+ keyname = 'public' # or 'private'
+ usage = 'usage: %%prog [options] %(keyname)s_key'
+ description = None
+ operation = 'decrypt'
+ operation_past = 'decrypted'
+ operation_progressive = 'decrypting'
+ input_help = 'Name of the file to %(operation)s. Reads from stdin if ' \
+ 'not specified.'
+ output_help = 'Name of the file to write the %(operation_past)s file ' \
+ 'to. Written to stdout if this option is not present.'
+ expected_cli_args = 1
+ has_output = True
+
+ key_class = rsa.PublicKey
+
+ def __init__(self):
+ self.usage = self.usage % self.__class__.__dict__
+ self.input_help = self.input_help % self.__class__.__dict__
+ self.output_help = self.output_help % self.__class__.__dict__
+
+ @abc.abstractmethod
+ def perform_operation(self, indata, key, cli_args=None):
+ '''Performs the program's operation.
+
+ Implement in a subclass.
+
+ :returns: the data to write to the output.
+ '''
+
+ def __call__(self):
+ '''Runs the program.'''
+
+ (cli, cli_args) = self.parse_cli()
+
+ key = self.read_key(cli_args[0], cli.keyform)
+
+ indata = self.read_infile(cli.input)
+
+ print(self.operation_progressive.title(), file=sys.stderr)
+ outdata = self.perform_operation(indata, key, cli_args)
+
+ if self.has_output:
+ self.write_outfile(outdata, cli.output)
+
+ def parse_cli(self):
+ '''Parse the CLI options
+
+ :returns: (cli_opts, cli_args)
+ '''
+
+ parser = OptionParser(usage=self.usage, description=self.description)
+
+ parser.add_option('-i', '--input', type='string', help=self.input_help)
+
+ if self.has_output:
+ parser.add_option('-o', '--output', type='string', help=self.output_help)
+
+ parser.add_option('--keyform',
+ help='Key format of the %s key - default PEM' % self.keyname,
+ choices=('PEM', 'DER'), default='PEM')
+
+ (cli, cli_args) = parser.parse_args(sys.argv[1:])
+
+ if len(cli_args) != self.expected_cli_args:
+ parser.print_help()
+ raise SystemExit(1)
+
+ return (cli, cli_args)
+
+ def read_key(self, filename, keyform):
+ '''Reads a public or private key.'''
+
+ print('Reading %s key from %s' % (self.keyname, filename), file=sys.stderr)
+ with open(filename, 'rb') as keyfile:
+ keydata = keyfile.read()
+
+ return self.key_class.load_pkcs1(keydata, keyform)
+
+ def read_infile(self, inname):
+ '''Read the input file'''
+
+ if inname:
+ print('Reading input from %s' % inname, file=sys.stderr)
+ with open(inname, 'rb') as infile:
+ return infile.read()
+
+ print('Reading input from stdin', file=sys.stderr)
+ return sys.stdin.read()
+
+ def write_outfile(self, outdata, outname):
+ '''Write the output file'''
+
+ if outname:
+ print('Writing output to %s' % outname, file=sys.stderr)
+ with open(outname, 'wb') as outfile:
+ outfile.write(outdata)
+ else:
+ print('Writing output to stdout', file=sys.stderr)
+ sys.stdout.write(outdata)
+
+class EncryptOperation(CryptoOperation):
+ '''Encrypts a file.'''
+
+ keyname = 'public'
+ description = ('Encrypts a file. The file must be shorter than the key '
+ 'length in order to be encrypted. For larger files, use the '
+ 'pyrsa-encrypt-bigfile command.')
+ operation = 'encrypt'
+ operation_past = 'encrypted'
+ operation_progressive = 'encrypting'
+
+
+ def perform_operation(self, indata, pub_key, cli_args=None):
+ '''Encrypts files.'''
+
+ return rsa.encrypt(indata, pub_key)
+
+class DecryptOperation(CryptoOperation):
+ '''Decrypts a file.'''
+
+ keyname = 'private'
+ description = ('Decrypts a file. The original file must be shorter than '
+ 'the key length in order to have been encrypted. For larger '
+ 'files, use the pyrsa-decrypt-bigfile command.')
+ operation = 'decrypt'
+ operation_past = 'decrypted'
+ operation_progressive = 'decrypting'
+ key_class = rsa.PrivateKey
+
+ def perform_operation(self, indata, priv_key, cli_args=None):
+ '''Decrypts files.'''
+
+ return rsa.decrypt(indata, priv_key)
+
+class SignOperation(CryptoOperation):
+ '''Signs a file.'''
+
+ keyname = 'private'
+ usage = 'usage: %%prog [options] private_key hash_method'
+ description = ('Signs a file, outputs the signature. Choose the hash '
+ 'method from %s' % ', '.join(HASH_METHODS))
+ operation = 'sign'
+ operation_past = 'signature'
+ operation_progressive = 'Signing'
+ key_class = rsa.PrivateKey
+ expected_cli_args = 2
+
+ output_help = ('Name of the file to write the signature to. Written '
+ 'to stdout if this option is not present.')
+
+ def perform_operation(self, indata, priv_key, cli_args):
+ '''Decrypts files.'''
+
+ hash_method = cli_args[1]
+ if hash_method not in HASH_METHODS:
+ raise SystemExit('Invalid hash method, choose one of %s' %
+ ', '.join(HASH_METHODS))
+
+ return rsa.sign(indata, priv_key, hash_method)
+
+class VerifyOperation(CryptoOperation):
+ '''Verify a signature.'''
+
+ keyname = 'public'
+ usage = 'usage: %%prog [options] public_key signature_file'
+ description = ('Verifies a signature, exits with status 0 upon success, '
+ 'prints an error message and exits with status 1 upon error.')
+ operation = 'verify'
+ operation_past = 'verified'
+ operation_progressive = 'Verifying'
+ key_class = rsa.PublicKey
+ expected_cli_args = 2
+ has_output = False
+
+ def perform_operation(self, indata, pub_key, cli_args):
+ '''Decrypts files.'''
+
+ signature_file = cli_args[1]
+
+ with open(signature_file, 'rb') as sigfile:
+ signature = sigfile.read()
+
+ try:
+ rsa.verify(indata, signature, pub_key)
+ except rsa.VerificationError:
+ raise SystemExit('Verification failed.')
+
+ print('Verification OK', file=sys.stderr)
+
+
+class BigfileOperation(CryptoOperation):
+ '''CryptoOperation that doesn't read the entire file into memory.'''
+
+ def __init__(self):
+ CryptoOperation.__init__(self)
+
+ self.file_objects = []
+
+ def __del__(self):
+ '''Closes any open file handles.'''
+
+ for fobj in self.file_objects:
+ fobj.close()
+
+ def __call__(self):
+ '''Runs the program.'''
+
+ (cli, cli_args) = self.parse_cli()
+
+ key = self.read_key(cli_args[0], cli.keyform)
+
+ # Get the file handles
+ infile = self.get_infile(cli.input)
+ outfile = self.get_outfile(cli.output)
+
+ # Call the operation
+ print(self.operation_progressive.title(), file=sys.stderr)
+ self.perform_operation(infile, outfile, key, cli_args)
+
+ def get_infile(self, inname):
+ '''Returns the input file object'''
+
+ if inname:
+ print('Reading input from %s' % inname, file=sys.stderr)
+ fobj = open(inname, 'rb')
+ self.file_objects.append(fobj)
+ else:
+ print('Reading input from stdin', file=sys.stderr)
+ fobj = sys.stdin
+
+ return fobj
+
+ def get_outfile(self, outname):
+ '''Returns the output file object'''
+
+ if outname:
+ print('Will write output to %s' % outname, file=sys.stderr)
+ fobj = open(outname, 'wb')
+ self.file_objects.append(fobj)
+ else:
+ print('Will write output to stdout', file=sys.stderr)
+ fobj = sys.stdout
+
+ return fobj
+
+class EncryptBigfileOperation(BigfileOperation):
+ '''Encrypts a file to VARBLOCK format.'''
+
+ keyname = 'public'
+ description = ('Encrypts a file to an encrypted VARBLOCK file. The file '
+ 'can be larger than the key length, but the output file is only '
+ 'compatible with Python-RSA.')
+ operation = 'encrypt'
+ operation_past = 'encrypted'
+ operation_progressive = 'encrypting'
+
+ def perform_operation(self, infile, outfile, pub_key, cli_args=None):
+ '''Encrypts files to VARBLOCK.'''
+
+ return rsa.bigfile.encrypt_bigfile(infile, outfile, pub_key)
+
+class DecryptBigfileOperation(BigfileOperation):
+ '''Decrypts a file in VARBLOCK format.'''
+
+ keyname = 'private'
+ description = ('Decrypts an encrypted VARBLOCK file that was encrypted '
+ 'with pyrsa-encrypt-bigfile')
+ operation = 'decrypt'
+ operation_past = 'decrypted'
+ operation_progressive = 'decrypting'
+ key_class = rsa.PrivateKey
+
+ def perform_operation(self, infile, outfile, priv_key, cli_args=None):
+ '''Decrypts a VARBLOCK file.'''
+
+ return rsa.bigfile.decrypt_bigfile(infile, outfile, priv_key)
+
+
+encrypt = EncryptOperation()
+decrypt = DecryptOperation()
+sign = SignOperation()
+verify = VerifyOperation()
+encrypt_bigfile = EncryptBigfileOperation()
+decrypt_bigfile = DecryptBigfileOperation()
+
diff --git a/third_party/python/rsa/rsa/common.py b/third_party/python/rsa/rsa/common.py
new file mode 100644
index 0000000000..39feb8c228
--- /dev/null
+++ b/third_party/python/rsa/rsa/common.py
@@ -0,0 +1,185 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Common functionality shared by several modules.'''
+
+
+def bit_size(num):
+ '''
+ Number of bits needed to represent a integer excluding any prefix
+ 0 bits.
+
+ As per definition from http://wiki.python.org/moin/BitManipulation and
+ to match the behavior of the Python 3 API.
+
+ Usage::
+
+ >>> bit_size(1023)
+ 10
+ >>> bit_size(1024)
+ 11
+ >>> bit_size(1025)
+ 11
+
+ :param num:
+ Integer value. If num is 0, returns 0. Only the absolute value of the
+ number is considered. Therefore, signed integers will be abs(num)
+ before the number's bit length is determined.
+ :returns:
+ Returns the number of bits in the integer.
+ '''
+ if num == 0:
+ return 0
+ if num < 0:
+ num = -num
+
+ # Make sure this is an int and not a float.
+ num & 1
+
+ hex_num = "%x" % num
+ return ((len(hex_num) - 1) * 4) + {
+ '0':0, '1':1, '2':2, '3':2,
+ '4':3, '5':3, '6':3, '7':3,
+ '8':4, '9':4, 'a':4, 'b':4,
+ 'c':4, 'd':4, 'e':4, 'f':4,
+ }[hex_num[0]]
+
+
+def _bit_size(number):
+ '''
+ Returns the number of bits required to hold a specific long number.
+ '''
+ if number < 0:
+ raise ValueError('Only nonnegative numbers possible: %s' % number)
+
+ if number == 0:
+ return 0
+
+ # This works, even with very large numbers. When using math.log(number, 2),
+ # you'll get rounding errors and it'll fail.
+ bits = 0
+ while number:
+ bits += 1
+ number >>= 1
+
+ return bits
+
+
+def byte_size(number):
+ '''
+ Returns the number of bytes required to hold a specific long number.
+
+ The number of bytes is rounded up.
+
+ Usage::
+
+ >>> byte_size(1 << 1023)
+ 128
+ >>> byte_size((1 << 1024) - 1)
+ 128
+ >>> byte_size(1 << 1024)
+ 129
+
+ :param number:
+ An unsigned integer
+ :returns:
+ The number of bytes required to hold a specific long number.
+ '''
+ quanta, mod = divmod(bit_size(number), 8)
+ if mod or number == 0:
+ quanta += 1
+ return quanta
+ #return int(math.ceil(bit_size(number) / 8.0))
+
+
+def extended_gcd(a, b):
+ '''Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb
+ '''
+ # r = gcd(a,b) i = multiplicitive inverse of a mod b
+ # or j = multiplicitive inverse of b mod a
+ # Neg return values for i or j are made positive mod b or a respectively
+ # Iterateive Version is faster and uses much less stack space
+ x = 0
+ y = 1
+ lx = 1
+ ly = 0
+ oa = a #Remember original a/b to remove
+ ob = b #negative values from return results
+ while b != 0:
+ q = a // b
+ (a, b) = (b, a % b)
+ (x, lx) = ((lx - (q * x)),x)
+ (y, ly) = ((ly - (q * y)),y)
+ if (lx < 0): lx += ob #If neg wrap modulo orignal b
+ if (ly < 0): ly += oa #If neg wrap modulo orignal a
+ return (a, lx, ly) #Return only positive values
+
+
+def inverse(x, n):
+ '''Returns x^-1 (mod n)
+
+ >>> inverse(7, 4)
+ 3
+ >>> (inverse(143, 4) * 143) % 4
+ 1
+ '''
+
+ (divider, inv, _) = extended_gcd(x, n)
+
+ if divider != 1:
+ raise ValueError("x (%d) and n (%d) are not relatively prime" % (x, n))
+
+ return inv
+
+
+def crt(a_values, modulo_values):
+ '''Chinese Remainder Theorem.
+
+ Calculates x such that x = a[i] (mod m[i]) for each i.
+
+ :param a_values: the a-values of the above equation
+ :param modulo_values: the m-values of the above equation
+ :returns: x such that x = a[i] (mod m[i]) for each i
+
+
+ >>> crt([2, 3], [3, 5])
+ 8
+
+ >>> crt([2, 3, 2], [3, 5, 7])
+ 23
+
+ >>> crt([2, 3, 0], [7, 11, 15])
+ 135
+ '''
+
+ m = 1
+ x = 0
+
+ for modulo in modulo_values:
+ m *= modulo
+
+ for (m_i, a_i) in zip(modulo_values, a_values):
+ M_i = m // m_i
+ inv = inverse(M_i, m_i)
+
+ x = (x + a_i * M_i * inv) % m
+
+ return x
+
+if __name__ == '__main__':
+ import doctest
+ doctest.testmod()
+
diff --git a/third_party/python/rsa/rsa/core.py b/third_party/python/rsa/rsa/core.py
new file mode 100644
index 0000000000..90dfee8e57
--- /dev/null
+++ b/third_party/python/rsa/rsa/core.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Core mathematical operations.
+
+This is the actual core RSA implementation, which is only defined
+mathematically on integers.
+'''
+
+
+from rsa._compat import is_integer
+
+def assert_int(var, name):
+
+ if is_integer(var):
+ return
+
+ raise TypeError('%s should be an integer, not %s' % (name, var.__class__))
+
+def encrypt_int(message, ekey, n):
+ '''Encrypts a message using encryption key 'ekey', working modulo n'''
+
+ assert_int(message, 'message')
+ assert_int(ekey, 'ekey')
+ assert_int(n, 'n')
+
+ if message < 0:
+ raise ValueError('Only non-negative numbers are supported')
+
+ if message > n:
+ raise OverflowError("The message %i is too long for n=%i" % (message, n))
+
+ return pow(message, ekey, n)
+
+def decrypt_int(cyphertext, dkey, n):
+ '''Decrypts a cypher text using the decryption key 'dkey', working
+ modulo n'''
+
+ assert_int(cyphertext, 'cyphertext')
+ assert_int(dkey, 'dkey')
+ assert_int(n, 'n')
+
+ message = pow(cyphertext, dkey, n)
+ return message
+
diff --git a/third_party/python/rsa/rsa/key.py b/third_party/python/rsa/rsa/key.py
new file mode 100644
index 0000000000..b6de7b3f3b
--- /dev/null
+++ b/third_party/python/rsa/rsa/key.py
@@ -0,0 +1,612 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''RSA key generation code.
+
+Create new keys with the newkeys() function. It will give you a PublicKey and a
+PrivateKey object.
+
+Loading and saving keys requires the pyasn1 module. This module is imported as
+late as possible, such that other functionality will remain working in absence
+of pyasn1.
+
+'''
+
+import logging
+from rsa._compat import b, bytes_type
+
+import rsa.prime
+import rsa.pem
+import rsa.common
+
+log = logging.getLogger(__name__)
+
+
+
+class AbstractKey(object):
+ '''Abstract superclass for private and public keys.'''
+
+ @classmethod
+ def load_pkcs1(cls, keyfile, format='PEM'):
+ r'''Loads a key in PKCS#1 DER or PEM format.
+
+ :param keyfile: contents of a DER- or PEM-encoded file that contains
+ the public key.
+ :param format: the format of the file to load; 'PEM' or 'DER'
+
+ :return: a PublicKey object
+
+ '''
+
+ methods = {
+ 'PEM': cls._load_pkcs1_pem,
+ 'DER': cls._load_pkcs1_der,
+ }
+
+ if format not in methods:
+ formats = ', '.join(sorted(methods.keys()))
+ raise ValueError('Unsupported format: %r, try one of %s' % (format,
+ formats))
+
+ method = methods[format]
+ return method(keyfile)
+
+ def save_pkcs1(self, format='PEM'):
+ '''Saves the public key in PKCS#1 DER or PEM format.
+
+ :param format: the format to save; 'PEM' or 'DER'
+ :returns: the DER- or PEM-encoded public key.
+
+ '''
+
+ methods = {
+ 'PEM': self._save_pkcs1_pem,
+ 'DER': self._save_pkcs1_der,
+ }
+
+ if format not in methods:
+ formats = ', '.join(sorted(methods.keys()))
+ raise ValueError('Unsupported format: %r, try one of %s' % (format,
+ formats))
+
+ method = methods[format]
+ return method()
+
+class PublicKey(AbstractKey):
+ '''Represents a public RSA key.
+
+ This key is also known as the 'encryption key'. It contains the 'n' and 'e'
+ values.
+
+ Supports attributes as well as dictionary-like access. Attribute accesss is
+ faster, though.
+
+ >>> PublicKey(5, 3)
+ PublicKey(5, 3)
+
+ >>> key = PublicKey(5, 3)
+ >>> key.n
+ 5
+ >>> key['n']
+ 5
+ >>> key.e
+ 3
+ >>> key['e']
+ 3
+
+ '''
+
+ __slots__ = ('n', 'e')
+
+ def __init__(self, n, e):
+ self.n = n
+ self.e = e
+
+ def __getitem__(self, key):
+ return getattr(self, key)
+
+ def __repr__(self):
+ return 'PublicKey(%i, %i)' % (self.n, self.e)
+
+ def __eq__(self, other):
+ if other is None:
+ return False
+
+ if not isinstance(other, PublicKey):
+ return False
+
+ return self.n == other.n and self.e == other.e
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ @classmethod
+ def _load_pkcs1_der(cls, keyfile):
+ r'''Loads a key in PKCS#1 DER format.
+
+ @param keyfile: contents of a DER-encoded file that contains the public
+ key.
+ @return: a PublicKey object
+
+ First let's construct a DER encoded key:
+
+ >>> import base64
+ >>> b64der = 'MAwCBQCNGmYtAgMBAAE='
+ >>> der = base64.decodestring(b64der)
+
+ This loads the file:
+
+ >>> PublicKey._load_pkcs1_der(der)
+ PublicKey(2367317549, 65537)
+
+ '''
+
+ from pyasn1.codec.der import decoder
+ from rsa.asn1 import AsnPubKey
+
+ (priv, _) = decoder.decode(keyfile, asn1Spec=AsnPubKey())
+ return cls(n=int(priv['modulus']), e=int(priv['publicExponent']))
+
+ def _save_pkcs1_der(self):
+ '''Saves the public key in PKCS#1 DER format.
+
+ @returns: the DER-encoded public key.
+ '''
+
+ from pyasn1.codec.der import encoder
+ from rsa.asn1 import AsnPubKey
+
+ # Create the ASN object
+ asn_key = AsnPubKey()
+ asn_key.setComponentByName('modulus', self.n)
+ asn_key.setComponentByName('publicExponent', self.e)
+
+ return encoder.encode(asn_key)
+
+ @classmethod
+ def _load_pkcs1_pem(cls, keyfile):
+ '''Loads a PKCS#1 PEM-encoded public key file.
+
+ The contents of the file before the "-----BEGIN RSA PUBLIC KEY-----" and
+ after the "-----END RSA PUBLIC KEY-----" lines is ignored.
+
+ @param keyfile: contents of a PEM-encoded file that contains the public
+ key.
+ @return: a PublicKey object
+ '''
+
+ der = rsa.pem.load_pem(keyfile, 'RSA PUBLIC KEY')
+ return cls._load_pkcs1_der(der)
+
+ def _save_pkcs1_pem(self):
+ '''Saves a PKCS#1 PEM-encoded public key file.
+
+ @return: contents of a PEM-encoded file that contains the public key.
+ '''
+
+ der = self._save_pkcs1_der()
+ return rsa.pem.save_pem(der, 'RSA PUBLIC KEY')
+
+ @classmethod
+ def load_pkcs1_openssl_pem(cls, keyfile):
+ '''Loads a PKCS#1.5 PEM-encoded public key file from OpenSSL.
+
+ These files can be recognised in that they start with BEGIN PUBLIC KEY
+ rather than BEGIN RSA PUBLIC KEY.
+
+ The contents of the file before the "-----BEGIN PUBLIC KEY-----" and
+ after the "-----END PUBLIC KEY-----" lines is ignored.
+
+ @param keyfile: contents of a PEM-encoded file that contains the public
+ key, from OpenSSL.
+ @return: a PublicKey object
+ '''
+
+ der = rsa.pem.load_pem(keyfile, 'PUBLIC KEY')
+ return cls.load_pkcs1_openssl_der(der)
+
+ @classmethod
+ def load_pkcs1_openssl_der(cls, keyfile):
+ '''Loads a PKCS#1 DER-encoded public key file from OpenSSL.
+
+ @param keyfile: contents of a DER-encoded file that contains the public
+ key, from OpenSSL.
+ @return: a PublicKey object
+ '''
+
+ from rsa.asn1 import OpenSSLPubKey
+ from pyasn1.codec.der import decoder
+ from pyasn1.type import univ
+
+ (keyinfo, _) = decoder.decode(keyfile, asn1Spec=OpenSSLPubKey())
+
+ if keyinfo['header']['oid'] != univ.ObjectIdentifier('1.2.840.113549.1.1.1'):
+ raise TypeError("This is not a DER-encoded OpenSSL-compatible public key")
+
+ return cls._load_pkcs1_der(keyinfo['key'][1:])
+
+
+
+
+class PrivateKey(AbstractKey):
+ '''Represents a private RSA key.
+
+ This key is also known as the 'decryption key'. It contains the 'n', 'e',
+ 'd', 'p', 'q' and other values.
+
+ Supports attributes as well as dictionary-like access. Attribute accesss is
+ faster, though.
+
+ >>> PrivateKey(3247, 65537, 833, 191, 17)
+ PrivateKey(3247, 65537, 833, 191, 17)
+
+ exp1, exp2 and coef don't have to be given, they will be calculated:
+
+ >>> pk = PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
+ >>> pk.exp1
+ 55063
+ >>> pk.exp2
+ 10095
+ >>> pk.coef
+ 50797
+
+ If you give exp1, exp2 or coef, they will be used as-is:
+
+ >>> pk = PrivateKey(1, 2, 3, 4, 5, 6, 7, 8)
+ >>> pk.exp1
+ 6
+ >>> pk.exp2
+ 7
+ >>> pk.coef
+ 8
+
+ '''
+
+ __slots__ = ('n', 'e', 'd', 'p', 'q', 'exp1', 'exp2', 'coef')
+
+ def __init__(self, n, e, d, p, q, exp1=None, exp2=None, coef=None):
+ self.n = n
+ self.e = e
+ self.d = d
+ self.p = p
+ self.q = q
+
+ # Calculate the other values if they aren't supplied
+ if exp1 is None:
+ self.exp1 = int(d % (p - 1))
+ else:
+ self.exp1 = exp1
+
+ if exp1 is None:
+ self.exp2 = int(d % (q - 1))
+ else:
+ self.exp2 = exp2
+
+ if coef is None:
+ self.coef = rsa.common.inverse(q, p)
+ else:
+ self.coef = coef
+
+ def __getitem__(self, key):
+ return getattr(self, key)
+
+ def __repr__(self):
+ return 'PrivateKey(%(n)i, %(e)i, %(d)i, %(p)i, %(q)i)' % self
+
+ def __eq__(self, other):
+ if other is None:
+ return False
+
+ if not isinstance(other, PrivateKey):
+ return False
+
+ return (self.n == other.n and
+ self.e == other.e and
+ self.d == other.d and
+ self.p == other.p and
+ self.q == other.q and
+ self.exp1 == other.exp1 and
+ self.exp2 == other.exp2 and
+ self.coef == other.coef)
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ @classmethod
+ def _load_pkcs1_der(cls, keyfile):
+ r'''Loads a key in PKCS#1 DER format.
+
+ @param keyfile: contents of a DER-encoded file that contains the private
+ key.
+ @return: a PrivateKey object
+
+ First let's construct a DER encoded key:
+
+ >>> import base64
+ >>> b64der = 'MC4CAQACBQDeKYlRAgMBAAECBQDHn4npAgMA/icCAwDfxwIDANcXAgInbwIDAMZt'
+ >>> der = base64.decodestring(b64der)
+
+ This loads the file:
+
+ >>> PrivateKey._load_pkcs1_der(der)
+ PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
+
+ '''
+
+ from pyasn1.codec.der import decoder
+ (priv, _) = decoder.decode(keyfile)
+
+ # ASN.1 contents of DER encoded private key:
+ #
+ # RSAPrivateKey ::= SEQUENCE {
+ # version Version,
+ # modulus INTEGER, -- n
+ # publicExponent INTEGER, -- e
+ # privateExponent INTEGER, -- d
+ # prime1 INTEGER, -- p
+ # prime2 INTEGER, -- q
+ # exponent1 INTEGER, -- d mod (p-1)
+ # exponent2 INTEGER, -- d mod (q-1)
+ # coefficient INTEGER, -- (inverse of q) mod p
+ # otherPrimeInfos OtherPrimeInfos OPTIONAL
+ # }
+
+ if priv[0] != 0:
+ raise ValueError('Unable to read this file, version %s != 0' % priv[0])
+
+ as_ints = tuple(int(x) for x in priv[1:9])
+ return cls(*as_ints)
+
+ def _save_pkcs1_der(self):
+ '''Saves the private key in PKCS#1 DER format.
+
+ @returns: the DER-encoded private key.
+ '''
+
+ from pyasn1.type import univ, namedtype
+ from pyasn1.codec.der import encoder
+
+ class AsnPrivKey(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', univ.Integer()),
+ namedtype.NamedType('modulus', univ.Integer()),
+ namedtype.NamedType('publicExponent', univ.Integer()),
+ namedtype.NamedType('privateExponent', univ.Integer()),
+ namedtype.NamedType('prime1', univ.Integer()),
+ namedtype.NamedType('prime2', univ.Integer()),
+ namedtype.NamedType('exponent1', univ.Integer()),
+ namedtype.NamedType('exponent2', univ.Integer()),
+ namedtype.NamedType('coefficient', univ.Integer()),
+ )
+
+ # Create the ASN object
+ asn_key = AsnPrivKey()
+ asn_key.setComponentByName('version', 0)
+ asn_key.setComponentByName('modulus', self.n)
+ asn_key.setComponentByName('publicExponent', self.e)
+ asn_key.setComponentByName('privateExponent', self.d)
+ asn_key.setComponentByName('prime1', self.p)
+ asn_key.setComponentByName('prime2', self.q)
+ asn_key.setComponentByName('exponent1', self.exp1)
+ asn_key.setComponentByName('exponent2', self.exp2)
+ asn_key.setComponentByName('coefficient', self.coef)
+
+ return encoder.encode(asn_key)
+
+ @classmethod
+ def _load_pkcs1_pem(cls, keyfile):
+ '''Loads a PKCS#1 PEM-encoded private key file.
+
+ The contents of the file before the "-----BEGIN RSA PRIVATE KEY-----" and
+ after the "-----END RSA PRIVATE KEY-----" lines is ignored.
+
+ @param keyfile: contents of a PEM-encoded file that contains the private
+ key.
+ @return: a PrivateKey object
+ '''
+
+ der = rsa.pem.load_pem(keyfile, b('RSA PRIVATE KEY'))
+ return cls._load_pkcs1_der(der)
+
+ def _save_pkcs1_pem(self):
+ '''Saves a PKCS#1 PEM-encoded private key file.
+
+ @return: contents of a PEM-encoded file that contains the private key.
+ '''
+
+ der = self._save_pkcs1_der()
+ return rsa.pem.save_pem(der, b('RSA PRIVATE KEY'))
+
+def find_p_q(nbits, getprime_func=rsa.prime.getprime, accurate=True):
+ ''''Returns a tuple of two different primes of nbits bits each.
+
+ The resulting p * q has exacty 2 * nbits bits, and the returned p and q
+ will not be equal.
+
+ :param nbits: the number of bits in each of p and q.
+ :param getprime_func: the getprime function, defaults to
+ :py:func:`rsa.prime.getprime`.
+
+ *Introduced in Python-RSA 3.1*
+
+ :param accurate: whether to enable accurate mode or not.
+ :returns: (p, q), where p > q
+
+ >>> (p, q) = find_p_q(128)
+ >>> from rsa import common
+ >>> common.bit_size(p * q)
+ 256
+
+ When not in accurate mode, the number of bits can be slightly less
+
+ >>> (p, q) = find_p_q(128, accurate=False)
+ >>> from rsa import common
+ >>> common.bit_size(p * q) <= 256
+ True
+ >>> common.bit_size(p * q) > 240
+ True
+
+ '''
+
+ total_bits = nbits * 2
+
+ # Make sure that p and q aren't too close or the factoring programs can
+ # factor n.
+ shift = nbits // 16
+ pbits = nbits + shift
+ qbits = nbits - shift
+
+ # Choose the two initial primes
+ log.debug('find_p_q(%i): Finding p', nbits)
+ p = getprime_func(pbits)
+ log.debug('find_p_q(%i): Finding q', nbits)
+ q = getprime_func(qbits)
+
+ def is_acceptable(p, q):
+ '''Returns True iff p and q are acceptable:
+
+ - p and q differ
+ - (p * q) has the right nr of bits (when accurate=True)
+ '''
+
+ if p == q:
+ return False
+
+ if not accurate:
+ return True
+
+ # Make sure we have just the right amount of bits
+ found_size = rsa.common.bit_size(p * q)
+ return total_bits == found_size
+
+ # Keep choosing other primes until they match our requirements.
+ change_p = False
+ while not is_acceptable(p, q):
+ # Change p on one iteration and q on the other
+ if change_p:
+ p = getprime_func(pbits)
+ else:
+ q = getprime_func(qbits)
+
+ change_p = not change_p
+
+ # We want p > q as described on
+ # http://www.di-mgt.com.au/rsa_alg.html#crt
+ return (max(p, q), min(p, q))
+
+def calculate_keys(p, q, nbits):
+ '''Calculates an encryption and a decryption key given p and q, and
+ returns them as a tuple (e, d)
+
+ '''
+
+ phi_n = (p - 1) * (q - 1)
+
+ # A very common choice for e is 65537
+ e = 65537
+
+ try:
+ d = rsa.common.inverse(e, phi_n)
+ except ValueError:
+ raise ValueError("e (%d) and phi_n (%d) are not relatively prime" %
+ (e, phi_n))
+
+ if (e * d) % phi_n != 1:
+ raise ValueError("e (%d) and d (%d) are not mult. inv. modulo "
+ "phi_n (%d)" % (e, d, phi_n))
+
+ return (e, d)
+
+def gen_keys(nbits, getprime_func, accurate=True):
+ '''Generate RSA keys of nbits bits. Returns (p, q, e, d).
+
+ Note: this can take a long time, depending on the key size.
+
+ :param nbits: the total number of bits in ``p`` and ``q``. Both ``p`` and
+ ``q`` will use ``nbits/2`` bits.
+ :param getprime_func: either :py:func:`rsa.prime.getprime` or a function
+ with similar signature.
+ '''
+
+ (p, q) = find_p_q(nbits // 2, getprime_func, accurate)
+ (e, d) = calculate_keys(p, q, nbits // 2)
+
+ return (p, q, e, d)
+
+def newkeys(nbits, accurate=True, poolsize=1):
+ '''Generates public and private keys, and returns them as (pub, priv).
+
+ The public key is also known as the 'encryption key', and is a
+ :py:class:`rsa.PublicKey` object. The private key is also known as the
+ 'decryption key' and is a :py:class:`rsa.PrivateKey` object.
+
+ :param nbits: the number of bits required to store ``n = p*q``.
+ :param accurate: when True, ``n`` will have exactly the number of bits you
+ asked for. However, this makes key generation much slower. When False,
+ `n`` may have slightly less bits.
+ :param poolsize: the number of processes to use to generate the prime
+ numbers. If set to a number > 1, a parallel algorithm will be used.
+ This requires Python 2.6 or newer.
+
+ :returns: a tuple (:py:class:`rsa.PublicKey`, :py:class:`rsa.PrivateKey`)
+
+ The ``poolsize`` parameter was added in *Python-RSA 3.1* and requires
+ Python 2.6 or newer.
+
+ '''
+
+ if nbits < 16:
+ raise ValueError('Key too small')
+
+ if poolsize < 1:
+ raise ValueError('Pool size (%i) should be >= 1' % poolsize)
+
+ # Determine which getprime function to use
+ if poolsize > 1:
+ from rsa import parallel
+ import functools
+
+ getprime_func = functools.partial(parallel.getprime, poolsize=poolsize)
+ else: getprime_func = rsa.prime.getprime
+
+ # Generate the key components
+ (p, q, e, d) = gen_keys(nbits, getprime_func)
+
+ # Create the key objects
+ n = p * q
+
+ return (
+ PublicKey(n, e),
+ PrivateKey(n, e, d, p, q)
+ )
+
+__all__ = ['PublicKey', 'PrivateKey', 'newkeys']
+
+if __name__ == '__main__':
+ import doctest
+
+ try:
+ for count in range(100):
+ (failures, tests) = doctest.testmod()
+ if failures:
+ break
+
+ if (count and count % 10 == 0) or count == 1:
+ print('%i times' % count)
+ except KeyboardInterrupt:
+ print('Aborted')
+ else:
+ print('Doctests done')
diff --git a/third_party/python/rsa/rsa/parallel.py b/third_party/python/rsa/rsa/parallel.py
new file mode 100644
index 0000000000..e5034ac707
--- /dev/null
+++ b/third_party/python/rsa/rsa/parallel.py
@@ -0,0 +1,94 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Functions for parallel computation on multiple cores.
+
+Introduced in Python-RSA 3.1.
+
+.. note::
+
+ Requires Python 2.6 or newer.
+
+'''
+
+from __future__ import print_function
+
+import multiprocessing as mp
+
+import rsa.prime
+import rsa.randnum
+
+def _find_prime(nbits, pipe):
+ while True:
+ integer = rsa.randnum.read_random_int(nbits)
+
+ # Make sure it's odd
+ integer |= 1
+
+ # Test for primeness
+ if rsa.prime.is_prime(integer):
+ pipe.send(integer)
+ return
+
+def getprime(nbits, poolsize):
+ '''Returns a prime number that can be stored in 'nbits' bits.
+
+ Works in multiple threads at the same time.
+
+ >>> p = getprime(128, 3)
+ >>> rsa.prime.is_prime(p-1)
+ False
+ >>> rsa.prime.is_prime(p)
+ True
+ >>> rsa.prime.is_prime(p+1)
+ False
+
+ >>> from rsa import common
+ >>> common.bit_size(p) == 128
+ True
+
+ '''
+
+ (pipe_recv, pipe_send) = mp.Pipe(duplex=False)
+
+ # Create processes
+ procs = [mp.Process(target=_find_prime, args=(nbits, pipe_send))
+ for _ in range(poolsize)]
+ [p.start() for p in procs]
+
+ result = pipe_recv.recv()
+
+ [p.terminate() for p in procs]
+
+ return result
+
+__all__ = ['getprime']
+
+
+if __name__ == '__main__':
+ print('Running doctests 1000x or until failure')
+ import doctest
+
+ for count in range(100):
+ (failures, tests) = doctest.testmod()
+ if failures:
+ break
+
+ if count and count % 10 == 0:
+ print('%i times' % count)
+
+ print('Doctests done')
+
diff --git a/third_party/python/rsa/rsa/pem.py b/third_party/python/rsa/rsa/pem.py
new file mode 100644
index 0000000000..b1c3a0edb4
--- /dev/null
+++ b/third_party/python/rsa/rsa/pem.py
@@ -0,0 +1,120 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Functions that load and write PEM-encoded files.'''
+
+import base64
+from rsa._compat import b, is_bytes
+
+def _markers(pem_marker):
+ '''
+ Returns the start and end PEM markers
+ '''
+
+ if is_bytes(pem_marker):
+ pem_marker = pem_marker.decode('utf-8')
+
+ return (b('-----BEGIN %s-----' % pem_marker),
+ b('-----END %s-----' % pem_marker))
+
+def load_pem(contents, pem_marker):
+ '''Loads a PEM file.
+
+ @param contents: the contents of the file to interpret
+ @param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY'
+ when your file has '-----BEGIN RSA PRIVATE KEY-----' and
+ '-----END RSA PRIVATE KEY-----' markers.
+
+ @return the base64-decoded content between the start and end markers.
+
+ @raise ValueError: when the content is invalid, for example when the start
+ marker cannot be found.
+
+ '''
+
+ (pem_start, pem_end) = _markers(pem_marker)
+
+ pem_lines = []
+ in_pem_part = False
+
+ for line in contents.splitlines():
+ line = line.strip()
+
+ # Skip empty lines
+ if not line:
+ continue
+
+ # Handle start marker
+ if line == pem_start:
+ if in_pem_part:
+ raise ValueError('Seen start marker "%s" twice' % pem_start)
+
+ in_pem_part = True
+ continue
+
+ # Skip stuff before first marker
+ if not in_pem_part:
+ continue
+
+ # Handle end marker
+ if in_pem_part and line == pem_end:
+ in_pem_part = False
+ break
+
+ # Load fields
+ if b(':') in line:
+ continue
+
+ pem_lines.append(line)
+
+ # Do some sanity checks
+ if not pem_lines:
+ raise ValueError('No PEM start marker "%s" found' % pem_start)
+
+ if in_pem_part:
+ raise ValueError('No PEM end marker "%s" found' % pem_end)
+
+ # Base64-decode the contents
+ pem = b('').join(pem_lines)
+ return base64.decodestring(pem)
+
+
+def save_pem(contents, pem_marker):
+ '''Saves a PEM file.
+
+ @param contents: the contents to encode in PEM format
+ @param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY'
+ when your file has '-----BEGIN RSA PRIVATE KEY-----' and
+ '-----END RSA PRIVATE KEY-----' markers.
+
+ @return the base64-encoded content between the start and end markers.
+
+ '''
+
+ (pem_start, pem_end) = _markers(pem_marker)
+
+ b64 = base64.encodestring(contents).replace(b('\n'), b(''))
+ pem_lines = [pem_start]
+
+ for block_start in range(0, len(b64), 64):
+ block = b64[block_start:block_start + 64]
+ pem_lines.append(block)
+
+ pem_lines.append(pem_end)
+ pem_lines.append(b(''))
+
+ return b('\n').join(pem_lines)
+
diff --git a/third_party/python/rsa/rsa/pkcs1.py b/third_party/python/rsa/rsa/pkcs1.py
new file mode 100644
index 0000000000..15e4cf639e
--- /dev/null
+++ b/third_party/python/rsa/rsa/pkcs1.py
@@ -0,0 +1,391 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Functions for PKCS#1 version 1.5 encryption and signing
+
+This module implements certain functionality from PKCS#1 version 1.5. For a
+very clear example, read http://www.di-mgt.com.au/rsa_alg.html#pkcs1schemes
+
+At least 8 bytes of random padding is used when encrypting a message. This makes
+these methods much more secure than the ones in the ``rsa`` module.
+
+WARNING: this module leaks information when decryption or verification fails.
+The exceptions that are raised contain the Python traceback information, which
+can be used to deduce where in the process the failure occurred. DO NOT PASS
+SUCH INFORMATION to your users.
+'''
+
+import hashlib
+import os
+
+from rsa._compat import b
+from rsa import common, transform, core, varblock
+
+# ASN.1 codes that describe the hash algorithm used.
+HASH_ASN1 = {
+ 'MD5': b('\x30\x20\x30\x0c\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x05\x05\x00\x04\x10'),
+ 'SHA-1': b('\x30\x21\x30\x09\x06\x05\x2b\x0e\x03\x02\x1a\x05\x00\x04\x14'),
+ 'SHA-256': b('\x30\x31\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01\x05\x00\x04\x20'),
+ 'SHA-384': b('\x30\x41\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x02\x05\x00\x04\x30'),
+ 'SHA-512': b('\x30\x51\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03\x05\x00\x04\x40'),
+}
+
+HASH_METHODS = {
+ 'MD5': hashlib.md5,
+ 'SHA-1': hashlib.sha1,
+ 'SHA-256': hashlib.sha256,
+ 'SHA-384': hashlib.sha384,
+ 'SHA-512': hashlib.sha512,
+}
+
+class CryptoError(Exception):
+ '''Base class for all exceptions in this module.'''
+
+class DecryptionError(CryptoError):
+ '''Raised when decryption fails.'''
+
+class VerificationError(CryptoError):
+ '''Raised when verification fails.'''
+
+def _pad_for_encryption(message, target_length):
+ r'''Pads the message for encryption, returning the padded message.
+
+ :return: 00 02 RANDOM_DATA 00 MESSAGE
+
+ >>> block = _pad_for_encryption('hello', 16)
+ >>> len(block)
+ 16
+ >>> block[0:2]
+ '\x00\x02'
+ >>> block[-6:]
+ '\x00hello'
+
+ '''
+
+ max_msglength = target_length - 11
+ msglength = len(message)
+
+ if msglength > max_msglength:
+ raise OverflowError('%i bytes needed for message, but there is only'
+ ' space for %i' % (msglength, max_msglength))
+
+ # Get random padding
+ padding = b('')
+ padding_length = target_length - msglength - 3
+
+ # We remove 0-bytes, so we'll end up with less padding than we've asked for,
+ # so keep adding data until we're at the correct length.
+ while len(padding) < padding_length:
+ needed_bytes = padding_length - len(padding)
+
+ # Always read at least 8 bytes more than we need, and trim off the rest
+ # after removing the 0-bytes. This increases the chance of getting
+ # enough bytes, especially when needed_bytes is small
+ new_padding = os.urandom(needed_bytes + 5)
+ new_padding = new_padding.replace(b('\x00'), b(''))
+ padding = padding + new_padding[:needed_bytes]
+
+ assert len(padding) == padding_length
+
+ return b('').join([b('\x00\x02'),
+ padding,
+ b('\x00'),
+ message])
+
+
+def _pad_for_signing(message, target_length):
+ r'''Pads the message for signing, returning the padded message.
+
+ The padding is always a repetition of FF bytes.
+
+ :return: 00 01 PADDING 00 MESSAGE
+
+ >>> block = _pad_for_signing('hello', 16)
+ >>> len(block)
+ 16
+ >>> block[0:2]
+ '\x00\x01'
+ >>> block[-6:]
+ '\x00hello'
+ >>> block[2:-6]
+ '\xff\xff\xff\xff\xff\xff\xff\xff'
+
+ '''
+
+ max_msglength = target_length - 11
+ msglength = len(message)
+
+ if msglength > max_msglength:
+ raise OverflowError('%i bytes needed for message, but there is only'
+ ' space for %i' % (msglength, max_msglength))
+
+ padding_length = target_length - msglength - 3
+
+ return b('').join([b('\x00\x01'),
+ padding_length * b('\xff'),
+ b('\x00'),
+ message])
+
+
+def encrypt(message, pub_key):
+ '''Encrypts the given message using PKCS#1 v1.5
+
+ :param message: the message to encrypt. Must be a byte string no longer than
+ ``k-11`` bytes, where ``k`` is the number of bytes needed to encode
+ the ``n`` component of the public key.
+ :param pub_key: the :py:class:`rsa.PublicKey` to encrypt with.
+ :raise OverflowError: when the message is too large to fit in the padded
+ block.
+
+ >>> from rsa import key, common
+ >>> (pub_key, priv_key) = key.newkeys(256)
+ >>> message = 'hello'
+ >>> crypto = encrypt(message, pub_key)
+
+ The crypto text should be just as long as the public key 'n' component:
+
+ >>> len(crypto) == common.byte_size(pub_key.n)
+ True
+
+ '''
+
+ keylength = common.byte_size(pub_key.n)
+ padded = _pad_for_encryption(message, keylength)
+
+ payload = transform.bytes2int(padded)
+ encrypted = core.encrypt_int(payload, pub_key.e, pub_key.n)
+ block = transform.int2bytes(encrypted, keylength)
+
+ return block
+
+def decrypt(crypto, priv_key):
+ r'''Decrypts the given message using PKCS#1 v1.5
+
+ The decryption is considered 'failed' when the resulting cleartext doesn't
+ start with the bytes 00 02, or when the 00 byte between the padding and
+ the message cannot be found.
+
+ :param crypto: the crypto text as returned by :py:func:`rsa.encrypt`
+ :param priv_key: the :py:class:`rsa.PrivateKey` to decrypt with.
+ :raise DecryptionError: when the decryption fails. No details are given as
+ to why the code thinks the decryption fails, as this would leak
+ information about the private key.
+
+
+ >>> import rsa
+ >>> (pub_key, priv_key) = rsa.newkeys(256)
+
+ It works with strings:
+
+ >>> crypto = encrypt('hello', pub_key)
+ >>> decrypt(crypto, priv_key)
+ 'hello'
+
+ And with binary data:
+
+ >>> crypto = encrypt('\x00\x00\x00\x00\x01', pub_key)
+ >>> decrypt(crypto, priv_key)
+ '\x00\x00\x00\x00\x01'
+
+ Altering the encrypted information will *likely* cause a
+ :py:class:`rsa.pkcs1.DecryptionError`. If you want to be *sure*, use
+ :py:func:`rsa.sign`.
+
+
+ .. warning::
+
+ Never display the stack trace of a
+ :py:class:`rsa.pkcs1.DecryptionError` exception. It shows where in the
+ code the exception occurred, and thus leaks information about the key.
+ It's only a tiny bit of information, but every bit makes cracking the
+ keys easier.
+
+ >>> crypto = encrypt('hello', pub_key)
+ >>> crypto = crypto[0:5] + 'X' + crypto[6:] # change a byte
+ >>> decrypt(crypto, priv_key)
+ Traceback (most recent call last):
+ ...
+ DecryptionError: Decryption failed
+
+ '''
+
+ blocksize = common.byte_size(priv_key.n)
+ encrypted = transform.bytes2int(crypto)
+ decrypted = core.decrypt_int(encrypted, priv_key.d, priv_key.n)
+ cleartext = transform.int2bytes(decrypted, blocksize)
+
+ # If we can't find the cleartext marker, decryption failed.
+ if cleartext[0:2] != b('\x00\x02'):
+ raise DecryptionError('Decryption failed')
+
+ # Find the 00 separator between the padding and the message
+ try:
+ sep_idx = cleartext.index(b('\x00'), 2)
+ except ValueError:
+ raise DecryptionError('Decryption failed')
+
+ return cleartext[sep_idx+1:]
+
+def sign(message, priv_key, hash):
+ '''Signs the message with the private key.
+
+ Hashes the message, then signs the hash with the given key. This is known
+ as a "detached signature", because the message itself isn't altered.
+
+ :param message: the message to sign. Can be an 8-bit string or a file-like
+ object. If ``message`` has a ``read()`` method, it is assumed to be a
+ file-like object.
+ :param priv_key: the :py:class:`rsa.PrivateKey` to sign with
+ :param hash: the hash method used on the message. Use 'MD5', 'SHA-1',
+ 'SHA-256', 'SHA-384' or 'SHA-512'.
+ :return: a message signature block.
+ :raise OverflowError: if the private key is too small to contain the
+ requested hash.
+
+ '''
+
+ # Get the ASN1 code for this hash method
+ if hash not in HASH_ASN1:
+ raise ValueError('Invalid hash method: %s' % hash)
+ asn1code = HASH_ASN1[hash]
+
+ # Calculate the hash
+ hash = _hash(message, hash)
+
+ # Encrypt the hash with the private key
+ cleartext = asn1code + hash
+ keylength = common.byte_size(priv_key.n)
+ padded = _pad_for_signing(cleartext, keylength)
+
+ payload = transform.bytes2int(padded)
+ encrypted = core.encrypt_int(payload, priv_key.d, priv_key.n)
+ block = transform.int2bytes(encrypted, keylength)
+
+ return block
+
+def verify(message, signature, pub_key):
+ '''Verifies that the signature matches the message.
+
+ The hash method is detected automatically from the signature.
+
+ :param message: the signed message. Can be an 8-bit string or a file-like
+ object. If ``message`` has a ``read()`` method, it is assumed to be a
+ file-like object.
+ :param signature: the signature block, as created with :py:func:`rsa.sign`.
+ :param pub_key: the :py:class:`rsa.PublicKey` of the person signing the message.
+ :raise VerificationError: when the signature doesn't match the message.
+
+ .. warning::
+
+ Never display the stack trace of a
+ :py:class:`rsa.pkcs1.VerificationError` exception. It shows where in
+ the code the exception occurred, and thus leaks information about the
+ key. It's only a tiny bit of information, but every bit makes cracking
+ the keys easier.
+
+ '''
+
+ blocksize = common.byte_size(pub_key.n)
+ encrypted = transform.bytes2int(signature)
+ decrypted = core.decrypt_int(encrypted, pub_key.e, pub_key.n)
+ clearsig = transform.int2bytes(decrypted, blocksize)
+
+ # If we can't find the signature marker, verification failed.
+ if clearsig[0:2] != b('\x00\x01'):
+ raise VerificationError('Verification failed')
+
+ # Find the 00 separator between the padding and the payload
+ try:
+ sep_idx = clearsig.index(b('\x00'), 2)
+ except ValueError:
+ raise VerificationError('Verification failed')
+
+ # Get the hash and the hash method
+ (method_name, signature_hash) = _find_method_hash(clearsig[sep_idx+1:])
+ message_hash = _hash(message, method_name)
+
+ # Compare the real hash to the hash in the signature
+ if message_hash != signature_hash:
+ raise VerificationError('Verification failed')
+
+ return True
+
+def _hash(message, method_name):
+ '''Returns the message digest.
+
+ :param message: the signed message. Can be an 8-bit string or a file-like
+ object. If ``message`` has a ``read()`` method, it is assumed to be a
+ file-like object.
+ :param method_name: the hash method, must be a key of
+ :py:const:`HASH_METHODS`.
+
+ '''
+
+ if method_name not in HASH_METHODS:
+ raise ValueError('Invalid hash method: %s' % method_name)
+
+ method = HASH_METHODS[method_name]
+ hasher = method()
+
+ if hasattr(message, 'read') and hasattr(message.read, '__call__'):
+ # read as 1K blocks
+ for block in varblock.yield_fixedblocks(message, 1024):
+ hasher.update(block)
+ else:
+ # hash the message object itself.
+ hasher.update(message)
+
+ return hasher.digest()
+
+
+def _find_method_hash(method_hash):
+ '''Finds the hash method and the hash itself.
+
+ :param method_hash: ASN1 code for the hash method concatenated with the
+ hash itself.
+
+ :return: tuple (method, hash) where ``method`` is the used hash method, and
+ ``hash`` is the hash itself.
+
+ :raise VerificationFailed: when the hash method cannot be found
+
+ '''
+
+ for (hashname, asn1code) in HASH_ASN1.items():
+ if not method_hash.startswith(asn1code):
+ continue
+
+ return (hashname, method_hash[len(asn1code):])
+
+ raise VerificationError('Verification failed')
+
+
+__all__ = ['encrypt', 'decrypt', 'sign', 'verify',
+ 'DecryptionError', 'VerificationError', 'CryptoError']
+
+if __name__ == '__main__':
+ print('Running doctests 1000x or until failure')
+ import doctest
+
+ for count in range(1000):
+ (failures, tests) = doctest.testmod()
+ if failures:
+ break
+
+ if count and count % 100 == 0:
+ print('%i times' % count)
+
+ print('Doctests done')
diff --git a/third_party/python/rsa/rsa/prime.py b/third_party/python/rsa/rsa/prime.py
new file mode 100644
index 0000000000..7422eb1d28
--- /dev/null
+++ b/third_party/python/rsa/rsa/prime.py
@@ -0,0 +1,166 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Numerical functions related to primes.
+
+Implementation based on the book Algorithm Design by Michael T. Goodrich and
+Roberto Tamassia, 2002.
+'''
+
+__all__ = [ 'getprime', 'are_relatively_prime']
+
+import rsa.randnum
+
+def gcd(p, q):
+ '''Returns the greatest common divisor of p and q
+
+ >>> gcd(48, 180)
+ 12
+ '''
+
+ while q != 0:
+ if p < q: (p,q) = (q,p)
+ (p,q) = (q, p % q)
+ return p
+
+
+def jacobi(a, b):
+ '''Calculates the value of the Jacobi symbol (a/b) where both a and b are
+ positive integers, and b is odd
+
+ :returns: -1, 0 or 1
+ '''
+
+ assert a > 0
+ assert b > 0
+
+ if a == 0: return 0
+ result = 1
+ while a > 1:
+ if a & 1:
+ if ((a-1)*(b-1) >> 2) & 1:
+ result = -result
+ a, b = b % a, a
+ else:
+ if (((b * b) - 1) >> 3) & 1:
+ result = -result
+ a >>= 1
+ if a == 0: return 0
+ return result
+
+def jacobi_witness(x, n):
+ '''Returns False if n is an Euler pseudo-prime with base x, and
+ True otherwise.
+ '''
+
+ j = jacobi(x, n) % n
+
+ f = pow(x, n >> 1, n)
+
+ if j == f: return False
+ return True
+
+def randomized_primality_testing(n, k):
+ '''Calculates whether n is composite (which is always correct) or
+ prime (which is incorrect with error probability 2**-k)
+
+ Returns False if the number is composite, and True if it's
+ probably prime.
+ '''
+
+ # 50% of Jacobi-witnesses can report compositness of non-prime numbers
+
+ # The implemented algorithm using the Jacobi witness function has error
+ # probability q <= 0.5, according to Goodrich et. al
+ #
+ # q = 0.5
+ # t = int(math.ceil(k / log(1 / q, 2)))
+ # So t = k / log(2, 2) = k / 1 = k
+ # this means we can use range(k) rather than range(t)
+
+ for _ in range(k):
+ x = rsa.randnum.randint(n-1)
+ if jacobi_witness(x, n): return False
+
+ return True
+
+def is_prime(number):
+ '''Returns True if the number is prime, and False otherwise.
+
+ >>> is_prime(42)
+ False
+ >>> is_prime(41)
+ True
+ '''
+
+ return randomized_primality_testing(number, 6)
+
+def getprime(nbits):
+ '''Returns a prime number that can be stored in 'nbits' bits.
+
+ >>> p = getprime(128)
+ >>> is_prime(p-1)
+ False
+ >>> is_prime(p)
+ True
+ >>> is_prime(p+1)
+ False
+
+ >>> from rsa import common
+ >>> common.bit_size(p) == 128
+ True
+
+ '''
+
+ while True:
+ integer = rsa.randnum.read_random_int(nbits)
+
+ # Make sure it's odd
+ integer |= 1
+
+ # Test for primeness
+ if is_prime(integer):
+ return integer
+
+ # Retry if not prime
+
+
+def are_relatively_prime(a, b):
+ '''Returns True if a and b are relatively prime, and False if they
+ are not.
+
+ >>> are_relatively_prime(2, 3)
+ 1
+ >>> are_relatively_prime(2, 4)
+ 0
+ '''
+
+ d = gcd(a, b)
+ return (d == 1)
+
+if __name__ == '__main__':
+ print('Running doctests 1000x or until failure')
+ import doctest
+
+ for count in range(1000):
+ (failures, tests) = doctest.testmod()
+ if failures:
+ break
+
+ if count and count % 100 == 0:
+ print('%i times' % count)
+
+ print('Doctests done')
diff --git a/third_party/python/rsa/rsa/randnum.py b/third_party/python/rsa/rsa/randnum.py
new file mode 100644
index 0000000000..0e782744c0
--- /dev/null
+++ b/third_party/python/rsa/rsa/randnum.py
@@ -0,0 +1,85 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Functions for generating random numbers.'''
+
+# Source inspired by code by Yesudeep Mangalapilly <yesudeep@gmail.com>
+
+import os
+
+from rsa import common, transform
+from rsa._compat import byte
+
+def read_random_bits(nbits):
+ '''Reads 'nbits' random bits.
+
+ If nbits isn't a whole number of bytes, an extra byte will be appended with
+ only the lower bits set.
+ '''
+
+ nbytes, rbits = divmod(nbits, 8)
+
+ # Get the random bytes
+ randomdata = os.urandom(nbytes)
+
+ # Add the remaining random bits
+ if rbits > 0:
+ randomvalue = ord(os.urandom(1))
+ randomvalue >>= (8 - rbits)
+ randomdata = byte(randomvalue) + randomdata
+
+ return randomdata
+
+
+def read_random_int(nbits):
+ '''Reads a random integer of approximately nbits bits.
+ '''
+
+ randomdata = read_random_bits(nbits)
+ value = transform.bytes2int(randomdata)
+
+ # Ensure that the number is large enough to just fill out the required
+ # number of bits.
+ value |= 1 << (nbits - 1)
+
+ return value
+
+def randint(maxvalue):
+ '''Returns a random integer x with 1 <= x <= maxvalue
+
+ May take a very long time in specific situations. If maxvalue needs N bits
+ to store, the closer maxvalue is to (2 ** N) - 1, the faster this function
+ is.
+ '''
+
+ bit_size = common.bit_size(maxvalue)
+
+ tries = 0
+ while True:
+ value = read_random_int(bit_size)
+ if value <= maxvalue:
+ break
+
+ if tries and tries % 10 == 0:
+ # After a lot of tries to get the right number of bits but still
+ # smaller than maxvalue, decrease the number of bits by 1. That'll
+ # dramatically increase the chances to get a large enough number.
+ bit_size -= 1
+ tries += 1
+
+ return value
+
+
diff --git a/third_party/python/rsa/rsa/transform.py b/third_party/python/rsa/rsa/transform.py
new file mode 100644
index 0000000000..c740b2d275
--- /dev/null
+++ b/third_party/python/rsa/rsa/transform.py
@@ -0,0 +1,220 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Data transformation functions.
+
+From bytes to a number, number to bytes, etc.
+'''
+
+from __future__ import absolute_import
+
+try:
+ # We'll use psyco if available on 32-bit architectures to speed up code.
+ # Using psyco (if available) cuts down the execution time on Python 2.5
+ # at least by half.
+ import psyco
+ psyco.full()
+except ImportError:
+ pass
+
+import binascii
+from struct import pack
+from rsa import common
+from rsa._compat import is_integer, b, byte, get_word_alignment, ZERO_BYTE, EMPTY_BYTE
+
+
+def bytes2int(raw_bytes):
+ r'''Converts a list of bytes or an 8-bit string to an integer.
+
+ When using unicode strings, encode it to some encoding like UTF8 first.
+
+ >>> (((128 * 256) + 64) * 256) + 15
+ 8405007
+ >>> bytes2int('\x80@\x0f')
+ 8405007
+
+ '''
+
+ return int(binascii.hexlify(raw_bytes), 16)
+
+
+def _int2bytes(number, block_size=None):
+ r'''Converts a number to a string of bytes.
+
+ Usage::
+
+ >>> _int2bytes(123456789)
+ '\x07[\xcd\x15'
+ >>> bytes2int(_int2bytes(123456789))
+ 123456789
+
+ >>> _int2bytes(123456789, 6)
+ '\x00\x00\x07[\xcd\x15'
+ >>> bytes2int(_int2bytes(123456789, 128))
+ 123456789
+
+ >>> _int2bytes(123456789, 3)
+ Traceback (most recent call last):
+ ...
+ OverflowError: Needed 4 bytes for number, but block size is 3
+
+ @param number: the number to convert
+ @param block_size: the number of bytes to output. If the number encoded to
+ bytes is less than this, the block will be zero-padded. When not given,
+ the returned block is not padded.
+
+ @throws OverflowError when block_size is given and the number takes up more
+ bytes than fit into the block.
+ '''
+ # Type checking
+ if not is_integer(number):
+ raise TypeError("You must pass an integer for 'number', not %s" %
+ number.__class__)
+
+ if number < 0:
+ raise ValueError('Negative numbers cannot be used: %i' % number)
+
+ # Do some bounds checking
+ if number == 0:
+ needed_bytes = 1
+ raw_bytes = [ZERO_BYTE]
+ else:
+ needed_bytes = common.byte_size(number)
+ raw_bytes = []
+
+ # You cannot compare None > 0 in Python 3x. It will fail with a TypeError.
+ if block_size and block_size > 0:
+ if needed_bytes > block_size:
+ raise OverflowError('Needed %i bytes for number, but block size '
+ 'is %i' % (needed_bytes, block_size))
+
+ # Convert the number to bytes.
+ while number > 0:
+ raw_bytes.insert(0, byte(number & 0xFF))
+ number >>= 8
+
+ # Pad with zeroes to fill the block
+ if block_size and block_size > 0:
+ padding = (block_size - needed_bytes) * ZERO_BYTE
+ else:
+ padding = EMPTY_BYTE
+
+ return padding + EMPTY_BYTE.join(raw_bytes)
+
+
+def bytes_leading(raw_bytes, needle=ZERO_BYTE):
+ '''
+ Finds the number of prefixed byte occurrences in the haystack.
+
+ Useful when you want to deal with padding.
+
+ :param raw_bytes:
+ Raw bytes.
+ :param needle:
+ The byte to count. Default \000.
+ :returns:
+ The number of leading needle bytes.
+ '''
+ leading = 0
+ # Indexing keeps compatibility between Python 2.x and Python 3.x
+ _byte = needle[0]
+ for x in raw_bytes:
+ if x == _byte:
+ leading += 1
+ else:
+ break
+ return leading
+
+
+def int2bytes(number, fill_size=None, chunk_size=None, overflow=False):
+ '''
+ Convert an unsigned integer to bytes (base-256 representation)::
+
+ Does not preserve leading zeros if you don't specify a chunk size or
+ fill size.
+
+ .. NOTE:
+ You must not specify both fill_size and chunk_size. Only one
+ of them is allowed.
+
+ :param number:
+ Integer value
+ :param fill_size:
+ If the optional fill size is given the length of the resulting
+ byte string is expected to be the fill size and will be padded
+ with prefix zero bytes to satisfy that length.
+ :param chunk_size:
+ If optional chunk size is given and greater than zero, pad the front of
+ the byte string with binary zeros so that the length is a multiple of
+ ``chunk_size``.
+ :param overflow:
+ ``False`` (default). If this is ``True``, no ``OverflowError``
+ will be raised when the fill_size is shorter than the length
+ of the generated byte sequence. Instead the byte sequence will
+ be returned as is.
+ :returns:
+ Raw bytes (base-256 representation).
+ :raises:
+ ``OverflowError`` when fill_size is given and the number takes up more
+ bytes than fit into the block. This requires the ``overflow``
+ argument to this function to be set to ``False`` otherwise, no
+ error will be raised.
+ '''
+ if number < 0:
+ raise ValueError("Number must be an unsigned integer: %d" % number)
+
+ if fill_size and chunk_size:
+ raise ValueError("You can either fill or pad chunks, but not both")
+
+ # Ensure these are integers.
+ number & 1
+
+ raw_bytes = b('')
+
+ # Pack the integer one machine word at a time into bytes.
+ num = number
+ word_bits, _, max_uint, pack_type = get_word_alignment(num)
+ pack_format = ">%s" % pack_type
+ while num > 0:
+ raw_bytes = pack(pack_format, num & max_uint) + raw_bytes
+ num >>= word_bits
+ # Obtain the index of the first non-zero byte.
+ zero_leading = bytes_leading(raw_bytes)
+ if number == 0:
+ raw_bytes = ZERO_BYTE
+ # De-padding.
+ raw_bytes = raw_bytes[zero_leading:]
+
+ length = len(raw_bytes)
+ if fill_size and fill_size > 0:
+ if not overflow and length > fill_size:
+ raise OverflowError(
+ "Need %d bytes for number, but fill size is %d" %
+ (length, fill_size)
+ )
+ raw_bytes = raw_bytes.rjust(fill_size, ZERO_BYTE)
+ elif chunk_size and chunk_size > 0:
+ remainder = length % chunk_size
+ if remainder:
+ padding_size = chunk_size - remainder
+ raw_bytes = raw_bytes.rjust(length + padding_size, ZERO_BYTE)
+ return raw_bytes
+
+
+if __name__ == '__main__':
+ import doctest
+ doctest.testmod()
+
diff --git a/third_party/python/rsa/rsa/util.py b/third_party/python/rsa/rsa/util.py
new file mode 100644
index 0000000000..5bbb70be18
--- /dev/null
+++ b/third_party/python/rsa/rsa/util.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Utility functions.'''
+
+from __future__ import with_statement, print_function
+
+import sys
+from optparse import OptionParser
+
+import rsa.key
+
+def private_to_public():
+ '''Reads a private key and outputs the corresponding public key.'''
+
+ # Parse the CLI options
+ parser = OptionParser(usage='usage: %prog [options]',
+ description='Reads a private key and outputs the '
+ 'corresponding public key. Both private and public keys use '
+ 'the format described in PKCS#1 v1.5')
+
+ parser.add_option('-i', '--input', dest='infilename', type='string',
+ help='Input filename. Reads from stdin if not specified')
+ parser.add_option('-o', '--output', dest='outfilename', type='string',
+ help='Output filename. Writes to stdout of not specified')
+
+ parser.add_option('--inform', dest='inform',
+ help='key format of input - default PEM',
+ choices=('PEM', 'DER'), default='PEM')
+
+ parser.add_option('--outform', dest='outform',
+ help='key format of output - default PEM',
+ choices=('PEM', 'DER'), default='PEM')
+
+ (cli, cli_args) = parser.parse_args(sys.argv)
+
+ # Read the input data
+ if cli.infilename:
+ print('Reading private key from %s in %s format' % \
+ (cli.infilename, cli.inform), file=sys.stderr)
+ with open(cli.infilename, 'rb') as infile:
+ in_data = infile.read()
+ else:
+ print('Reading private key from stdin in %s format' % cli.inform,
+ file=sys.stderr)
+ in_data = sys.stdin.read().encode('ascii')
+
+ assert type(in_data) == bytes, type(in_data)
+
+
+ # Take the public fields and create a public key
+ priv_key = rsa.key.PrivateKey.load_pkcs1(in_data, cli.inform)
+ pub_key = rsa.key.PublicKey(priv_key.n, priv_key.e)
+
+ # Save to the output file
+ out_data = pub_key.save_pkcs1(cli.outform)
+
+ if cli.outfilename:
+ print('Writing public key to %s in %s format' % \
+ (cli.outfilename, cli.outform), file=sys.stderr)
+ with open(cli.outfilename, 'wb') as outfile:
+ outfile.write(out_data)
+ else:
+ print('Writing public key to stdout in %s format' % cli.outform,
+ file=sys.stderr)
+ sys.stdout.write(out_data.decode('ascii'))
+
+
diff --git a/third_party/python/rsa/rsa/varblock.py b/third_party/python/rsa/rsa/varblock.py
new file mode 100644
index 0000000000..c7d96ae6a7
--- /dev/null
+++ b/third_party/python/rsa/rsa/varblock.py
@@ -0,0 +1,155 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''VARBLOCK file support
+
+The VARBLOCK file format is as follows, where || denotes byte concatenation:
+
+ FILE := VERSION || BLOCK || BLOCK ...
+
+ BLOCK := LENGTH || DATA
+
+ LENGTH := varint-encoded length of the subsequent data. Varint comes from
+ Google Protobuf, and encodes an integer into a variable number of bytes.
+ Each byte uses the 7 lowest bits to encode the value. The highest bit set
+ to 1 indicates the next byte is also part of the varint. The last byte will
+ have this bit set to 0.
+
+This file format is called the VARBLOCK format, in line with the varint format
+used to denote the block sizes.
+
+'''
+
+from rsa._compat import byte, b
+
+
+ZERO_BYTE = b('\x00')
+VARBLOCK_VERSION = 1
+
+def read_varint(infile):
+ '''Reads a varint from the file.
+
+ When the first byte to be read indicates EOF, (0, 0) is returned. When an
+ EOF occurs when at least one byte has been read, an EOFError exception is
+ raised.
+
+ @param infile: the file-like object to read from. It should have a read()
+ method.
+ @returns (varint, length), the read varint and the number of read bytes.
+ '''
+
+ varint = 0
+ read_bytes = 0
+
+ while True:
+ char = infile.read(1)
+ if len(char) == 0:
+ if read_bytes == 0:
+ return (0, 0)
+ raise EOFError('EOF while reading varint, value is %i so far' %
+ varint)
+
+ byte = ord(char)
+ varint += (byte & 0x7F) << (7 * read_bytes)
+
+ read_bytes += 1
+
+ if not byte & 0x80:
+ return (varint, read_bytes)
+
+
+def write_varint(outfile, value):
+ '''Writes a varint to a file.
+
+ @param outfile: the file-like object to write to. It should have a write()
+ method.
+ @returns the number of written bytes.
+ '''
+
+ # there is a big difference between 'write the value 0' (this case) and
+ # 'there is nothing left to write' (the false-case of the while loop)
+
+ if value == 0:
+ outfile.write(ZERO_BYTE)
+ return 1
+
+ written_bytes = 0
+ while value > 0:
+ to_write = value & 0x7f
+ value = value >> 7
+
+ if value > 0:
+ to_write |= 0x80
+
+ outfile.write(byte(to_write))
+ written_bytes += 1
+
+ return written_bytes
+
+
+def yield_varblocks(infile):
+ '''Generator, yields each block in the input file.
+
+ @param infile: file to read, is expected to have the VARBLOCK format as
+ described in the module's docstring.
+ @yields the contents of each block.
+ '''
+
+ # Check the version number
+ first_char = infile.read(1)
+ if len(first_char) == 0:
+ raise EOFError('Unable to read VARBLOCK version number')
+
+ version = ord(first_char)
+ if version != VARBLOCK_VERSION:
+ raise ValueError('VARBLOCK version %i not supported' % version)
+
+ while True:
+ (block_size, read_bytes) = read_varint(infile)
+
+ # EOF at block boundary, that's fine.
+ if read_bytes == 0 and block_size == 0:
+ break
+
+ block = infile.read(block_size)
+
+ read_size = len(block)
+ if read_size != block_size:
+ raise EOFError('Block size is %i, but could read only %i bytes' %
+ (block_size, read_size))
+
+ yield block
+
+
+def yield_fixedblocks(infile, blocksize):
+ '''Generator, yields each block of ``blocksize`` bytes in the input file.
+
+ :param infile: file to read and separate in blocks.
+ :returns: a generator that yields the contents of each block
+ '''
+
+ while True:
+ block = infile.read(blocksize)
+
+ read_bytes = len(block)
+ if read_bytes == 0:
+ break
+
+ yield block
+
+ if read_bytes < blocksize:
+ break
+
diff --git a/third_party/python/rsa/run_tests.py b/third_party/python/rsa/run_tests.py
new file mode 100644
index 0000000000..e0f249081f
--- /dev/null
+++ b/third_party/python/rsa/run_tests.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import os
+import sys
+import unittest2 as unittest
+
+current_path = os.path.abspath(os.path.dirname(__file__))
+tests_path = os.path.join(current_path, 'tests')
+sys.path[0:0] = [
+ current_path,
+ tests_path,
+]
+
+all_tests = [f[:-3] for f in os.listdir(tests_path)
+ if f.startswith('test_') and f.endswith(".py")]
+
+def get_suite(tests):
+ tests = sorted(tests)
+ suite = unittest.TestSuite()
+ loader = unittest.TestLoader()
+ for test in tests:
+ suite.addTest(loader.loadTestsFromName(test))
+ return suite
+
+if __name__ == '__main__':
+ """
+ To run all tests:
+ $ python run_tests.py
+ To run a single test:
+ $ python run_tests.py app
+ To run a couple of tests:
+ $ python run_tests.py app config sessions
+ To run code coverage:
+ $ coverage run run_tests.py
+ $ coverage report -m
+ """
+ tests = sys.argv[1:]
+ if not tests:
+ tests = all_tests
+ tests = ['%s' % t for t in tests]
+ suite = get_suite(tests)
+ unittest.TextTestRunner(verbosity=1).run(suite)
diff --git a/third_party/python/rsa/setup.cfg b/third_party/python/rsa/setup.cfg
new file mode 100644
index 0000000000..2675c2767c
--- /dev/null
+++ b/third_party/python/rsa/setup.cfg
@@ -0,0 +1,8 @@
+[nosetests]
+verbosity = 2
+
+[egg_info]
+tag_date = 0
+tag_build =
+tag_svn_revision = 0
+
diff --git a/third_party/python/rsa/setup.py b/third_party/python/rsa/setup.py
new file mode 100755
index 0000000000..8a2df8d1f8
--- /dev/null
+++ b/third_party/python/rsa/setup.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+from setuptools import setup
+
+import rsa
+
+setup(name='rsa',
+ version=rsa.__version__,
+ description='Pure-Python RSA implementation',
+ author='Sybren A. Stuvel',
+ author_email='sybren@stuvel.eu',
+ maintainer='Sybren A. Stuvel',
+ maintainer_email='sybren@stuvel.eu',
+ url='http://stuvel.eu/rsa',
+ packages=['rsa'],
+ license='ASL 2',
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: Education',
+ 'Intended Audience :: Information Technology',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Topic :: Security :: Cryptography',
+ ],
+ install_requires=[
+ 'pyasn1 >= 0.1.3',
+ ],
+ entry_points={ 'console_scripts': [
+ 'pyrsa-priv2pub = rsa.util:private_to_public',
+ 'pyrsa-keygen = rsa.cli:keygen',
+ 'pyrsa-encrypt = rsa.cli:encrypt',
+ 'pyrsa-decrypt = rsa.cli:decrypt',
+ 'pyrsa-sign = rsa.cli:sign',
+ 'pyrsa-verify = rsa.cli:verify',
+ 'pyrsa-encrypt-bigfile = rsa.cli:encrypt_bigfile',
+ 'pyrsa-decrypt-bigfile = rsa.cli:decrypt_bigfile',
+ ]},
+
+)
diff --git a/third_party/python/rsa/tests/__init__.py b/third_party/python/rsa/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/rsa/tests/__init__.py
diff --git a/third_party/python/rsa/tests/constants.py b/third_party/python/rsa/tests/constants.py
new file mode 100644
index 0000000000..6a0d081836
--- /dev/null
+++ b/third_party/python/rsa/tests/constants.py
@@ -0,0 +1,9 @@
+# -*- coding: utf-8 -*-
+
+from rsa._compat import have_python3
+
+if have_python3:
+ from py3kconstants import *
+else:
+ from py2kconstants import *
+
diff --git a/third_party/python/rsa/tests/py2kconstants.py b/third_party/python/rsa/tests/py2kconstants.py
new file mode 100644
index 0000000000..5f695dd227
--- /dev/null
+++ b/third_party/python/rsa/tests/py2kconstants.py
@@ -0,0 +1,3 @@
+# -*- coding: utf-8 -*-
+
+unicode_string = u"Euro=\u20ac ABCDEFGHIJKLMNOPQRSTUVWXYZ"
diff --git a/third_party/python/rsa/tests/py3kconstants.py b/third_party/python/rsa/tests/py3kconstants.py
new file mode 100644
index 0000000000..83b67129c9
--- /dev/null
+++ b/third_party/python/rsa/tests/py3kconstants.py
@@ -0,0 +1,3 @@
+# -*- coding: utf-8 -*-
+
+unicode_string = "Euro=\u20ac ABCDEFGHIJKLMNOPQRSTUVWXYZ"
diff --git a/third_party/python/rsa/tests/test_bigfile.py b/third_party/python/rsa/tests/test_bigfile.py
new file mode 100644
index 0000000000..86bcbbac6f
--- /dev/null
+++ b/third_party/python/rsa/tests/test_bigfile.py
@@ -0,0 +1,60 @@
+'''Tests block operations.'''
+from rsa._compat import b
+
+try:
+ from StringIO import StringIO as BytesIO
+except ImportError:
+ from io import BytesIO
+import unittest2
+
+import rsa
+from rsa import bigfile, varblock, pkcs1
+
+class BigfileTest(unittest2.TestCase):
+
+ def test_encrypt_decrypt_bigfile(self):
+
+ # Expected block size + 11 bytes padding
+ pub_key, priv_key = rsa.newkeys((6 + 11) * 8)
+
+ # Encrypt the file
+ message = b('123456Sybren')
+ infile = BytesIO(message)
+ outfile = BytesIO()
+
+ bigfile.encrypt_bigfile(infile, outfile, pub_key)
+
+ # Test
+ crypto = outfile.getvalue()
+
+ cryptfile = BytesIO(crypto)
+ clearfile = BytesIO()
+
+ bigfile.decrypt_bigfile(cryptfile, clearfile, priv_key)
+ self.assertEquals(clearfile.getvalue(), message)
+
+ # We have 2x6 bytes in the message, so that should result in two
+ # bigfile.
+ cryptfile.seek(0)
+ varblocks = list(varblock.yield_varblocks(cryptfile))
+ self.assertEqual(2, len(varblocks))
+
+
+ def test_sign_verify_bigfile(self):
+
+ # Large enough to store MD5-sum and ASN.1 code for MD5
+ pub_key, priv_key = rsa.newkeys((34 + 11) * 8)
+
+ # Sign the file
+ msgfile = BytesIO(b('123456Sybren'))
+ signature = pkcs1.sign(msgfile, priv_key, 'MD5')
+
+ # Check the signature
+ msgfile.seek(0)
+ self.assertTrue(pkcs1.verify(msgfile, signature, pub_key))
+
+ # Alter the message, re-check
+ msgfile = BytesIO(b('123456sybren'))
+ self.assertRaises(pkcs1.VerificationError,
+ pkcs1.verify, msgfile, signature, pub_key)
+
diff --git a/third_party/python/rsa/tests/test_common.py b/third_party/python/rsa/tests/test_common.py
new file mode 100644
index 0000000000..d105dc020f
--- /dev/null
+++ b/third_party/python/rsa/tests/test_common.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import unittest2
+import struct
+from rsa._compat import byte, b
+from rsa.common import byte_size, bit_size, _bit_size
+
+
+class Test_byte(unittest2.TestCase):
+ def test_values(self):
+ self.assertEqual(byte(0), b('\x00'))
+ self.assertEqual(byte(255), b('\xff'))
+
+ def test_struct_error_when_out_of_bounds(self):
+ self.assertRaises(struct.error, byte, 256)
+ self.assertRaises(struct.error, byte, -1)
+
+class Test_byte_size(unittest2.TestCase):
+ def test_values(self):
+ self.assertEqual(byte_size(1 << 1023), 128)
+ self.assertEqual(byte_size((1 << 1024) - 1), 128)
+ self.assertEqual(byte_size(1 << 1024), 129)
+ self.assertEqual(byte_size(255), 1)
+ self.assertEqual(byte_size(256), 2)
+ self.assertEqual(byte_size(0xffff), 2)
+ self.assertEqual(byte_size(0xffffff), 3)
+ self.assertEqual(byte_size(0xffffffff), 4)
+ self.assertEqual(byte_size(0xffffffffff), 5)
+ self.assertEqual(byte_size(0xffffffffffff), 6)
+ self.assertEqual(byte_size(0xffffffffffffff), 7)
+ self.assertEqual(byte_size(0xffffffffffffffff), 8)
+
+ def test_zero(self):
+ self.assertEqual(byte_size(0), 1)
+
+ def test_bad_type(self):
+ self.assertRaises(TypeError, byte_size, [])
+ self.assertRaises(TypeError, byte_size, ())
+ self.assertRaises(TypeError, byte_size, dict())
+ self.assertRaises(TypeError, byte_size, "")
+ self.assertRaises(TypeError, byte_size, None)
+
+class Test_bit_size(unittest2.TestCase):
+ def test_zero(self):
+ self.assertEqual(bit_size(0), 0)
+
+ def test_values(self):
+ self.assertEqual(bit_size(1023), 10)
+ self.assertEqual(bit_size(1024), 11)
+ self.assertEqual(bit_size(1025), 11)
+ self.assertEqual(bit_size(1 << 1024), 1025)
+ self.assertEqual(bit_size((1 << 1024) + 1), 1025)
+ self.assertEqual(bit_size((1 << 1024) - 1), 1024)
+
+ self.assertEqual(_bit_size(1023), 10)
+ self.assertEqual(_bit_size(1024), 11)
+ self.assertEqual(_bit_size(1025), 11)
+ self.assertEqual(_bit_size(1 << 1024), 1025)
+ self.assertEqual(_bit_size((1 << 1024) + 1), 1025)
+ self.assertEqual(_bit_size((1 << 1024) - 1), 1024)
diff --git a/third_party/python/rsa/tests/test_compat.py b/third_party/python/rsa/tests/test_compat.py
new file mode 100644
index 0000000000..3652c82d51
--- /dev/null
+++ b/third_party/python/rsa/tests/test_compat.py
@@ -0,0 +1,17 @@
+# -*- coding: utf-8 -*-
+
+import unittest2
+import struct
+
+from rsa._compat import is_bytes, byte
+
+class Test_byte(unittest2.TestCase):
+ def test_byte(self):
+ for i in range(256):
+ byt = byte(i)
+ self.assertTrue(is_bytes(byt))
+ self.assertEqual(ord(byt), i)
+
+ def test_raises_StructError_on_overflow(self):
+ self.assertRaises(struct.error, byte, 256)
+ self.assertRaises(struct.error, byte, -1)
diff --git a/third_party/python/rsa/tests/test_integers.py b/third_party/python/rsa/tests/test_integers.py
new file mode 100644
index 0000000000..0a712aa0fc
--- /dev/null
+++ b/third_party/python/rsa/tests/test_integers.py
@@ -0,0 +1,36 @@
+'''Tests integer operations.'''
+
+import unittest2
+
+import rsa.core
+
+class IntegerTest(unittest2.TestCase):
+
+ def setUp(self):
+ (self.pub, self.priv) = rsa.newkeys(64)
+
+ def test_enc_dec(self):
+
+ message = 42
+ print("\tMessage: %d" % message)
+
+ encrypted = rsa.core.encrypt_int(message, self.pub.e, self.pub.n)
+ print("\tEncrypted: %d" % encrypted)
+
+ decrypted = rsa.core.decrypt_int(encrypted, self.priv.d, self.pub.n)
+ print("\tDecrypted: %d" % decrypted)
+
+ self.assertEqual(message, decrypted)
+
+ def test_sign_verify(self):
+
+ message = 42
+
+ signed = rsa.core.encrypt_int(message,self.priv.d, self.pub.n)
+ print("\tSigned: %d" % signed)
+
+ verified = rsa.core.decrypt_int(signed, self.pub.e,self.pub.n)
+ print("\tVerified: %d" % verified)
+
+ self.assertEqual(message, verified)
+
diff --git a/third_party/python/rsa/tests/test_load_save_keys.py b/third_party/python/rsa/tests/test_load_save_keys.py
new file mode 100644
index 0000000000..fc1a1aaae7
--- /dev/null
+++ b/third_party/python/rsa/tests/test_load_save_keys.py
@@ -0,0 +1,127 @@
+'''Unittest for saving and loading keys.'''
+
+import base64
+import unittest2
+from rsa._compat import b
+
+import rsa.key
+
+B64PRIV_DER = b('MC4CAQACBQDeKYlRAgMBAAECBQDHn4npAgMA/icCAwDfxwIDANcXAgInbwIDAMZt')
+PRIVATE_DER = base64.decodestring(B64PRIV_DER)
+
+B64PUB_DER = b('MAwCBQDeKYlRAgMBAAE=')
+PUBLIC_DER = base64.decodestring(B64PUB_DER)
+
+PRIVATE_PEM = b('''
+-----BEGIN CONFUSING STUFF-----
+Cruft before the key
+
+-----BEGIN RSA PRIVATE KEY-----
+Comment: something blah
+
+%s
+-----END RSA PRIVATE KEY-----
+
+Stuff after the key
+-----END CONFUSING STUFF-----
+''' % B64PRIV_DER.decode("utf-8"))
+
+CLEAN_PRIVATE_PEM = b('''\
+-----BEGIN RSA PRIVATE KEY-----
+%s
+-----END RSA PRIVATE KEY-----
+''' % B64PRIV_DER.decode("utf-8"))
+
+PUBLIC_PEM = b('''
+-----BEGIN CONFUSING STUFF-----
+Cruft before the key
+
+-----BEGIN RSA PUBLIC KEY-----
+Comment: something blah
+
+%s
+-----END RSA PUBLIC KEY-----
+
+Stuff after the key
+-----END CONFUSING STUFF-----
+''' % B64PUB_DER.decode("utf-8"))
+
+CLEAN_PUBLIC_PEM = b('''\
+-----BEGIN RSA PUBLIC KEY-----
+%s
+-----END RSA PUBLIC KEY-----
+''' % B64PUB_DER.decode("utf-8"))
+
+
+class DerTest(unittest2.TestCase):
+ '''Test saving and loading DER keys.'''
+
+ def test_load_private_key(self):
+ '''Test loading private DER keys.'''
+
+ key = rsa.key.PrivateKey.load_pkcs1(PRIVATE_DER, 'DER')
+ expected = rsa.key.PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
+
+ self.assertEqual(expected, key)
+
+ def test_save_private_key(self):
+ '''Test saving private DER keys.'''
+
+ key = rsa.key.PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
+ der = key.save_pkcs1('DER')
+
+ self.assertEqual(PRIVATE_DER, der)
+
+ def test_load_public_key(self):
+ '''Test loading public DER keys.'''
+
+ key = rsa.key.PublicKey.load_pkcs1(PUBLIC_DER, 'DER')
+ expected = rsa.key.PublicKey(3727264081, 65537)
+
+ self.assertEqual(expected, key)
+
+ def test_save_public_key(self):
+ '''Test saving public DER keys.'''
+
+ key = rsa.key.PublicKey(3727264081, 65537)
+ der = key.save_pkcs1('DER')
+
+ self.assertEqual(PUBLIC_DER, der)
+
+class PemTest(unittest2.TestCase):
+ '''Test saving and loading PEM keys.'''
+
+
+ def test_load_private_key(self):
+ '''Test loading private PEM files.'''
+
+ key = rsa.key.PrivateKey.load_pkcs1(PRIVATE_PEM, 'PEM')
+ expected = rsa.key.PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
+
+ self.assertEqual(expected, key)
+
+ def test_save_private_key(self):
+ '''Test saving private PEM files.'''
+
+ key = rsa.key.PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
+ pem = key.save_pkcs1('PEM')
+
+ self.assertEqual(CLEAN_PRIVATE_PEM, pem)
+
+ def test_load_public_key(self):
+ '''Test loading public PEM files.'''
+
+ key = rsa.key.PublicKey.load_pkcs1(PUBLIC_PEM, 'PEM')
+ expected = rsa.key.PublicKey(3727264081, 65537)
+
+ self.assertEqual(expected, key)
+
+ def test_save_public_key(self):
+ '''Test saving public PEM files.'''
+
+ key = rsa.key.PublicKey(3727264081, 65537)
+ pem = key.save_pkcs1('PEM')
+
+ self.assertEqual(CLEAN_PUBLIC_PEM, pem)
+
+
diff --git a/third_party/python/rsa/tests/test_pem.py b/third_party/python/rsa/tests/test_pem.py
new file mode 100644
index 0000000000..867f678a0e
--- /dev/null
+++ b/third_party/python/rsa/tests/test_pem.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+
+import unittest2
+from rsa._compat import b
+from rsa.pem import _markers
+
+
+class Test__markers(unittest2.TestCase):
+ def test_values(self):
+ self.assertEqual(_markers('RSA PRIVATE KEY'),
+ (b('-----BEGIN RSA PRIVATE KEY-----'),
+ b('-----END RSA PRIVATE KEY-----')))
diff --git a/third_party/python/rsa/tests/test_pkcs1.py b/third_party/python/rsa/tests/test_pkcs1.py
new file mode 100644
index 0000000000..d5882dfd1b
--- /dev/null
+++ b/third_party/python/rsa/tests/test_pkcs1.py
@@ -0,0 +1,94 @@
+'''Tests string operations.'''
+
+import struct
+import unittest2
+
+import rsa
+from rsa import pkcs1
+from rsa._compat import byte, is_integer, b, is_bytes
+
+class BinaryTest(unittest2.TestCase):
+
+ def setUp(self):
+ (self.pub, self.priv) = rsa.newkeys(256)
+
+ def test_enc_dec(self):
+
+ message = struct.pack('>IIII', 0, 0, 0, 1)
+ print("\tMessage: %r" % message)
+
+ encrypted = pkcs1.encrypt(message, self.pub)
+ print("\tEncrypted: %r" % encrypted)
+
+ decrypted = pkcs1.decrypt(encrypted, self.priv)
+ print("\tDecrypted: %r" % decrypted)
+
+ self.assertEqual(message, decrypted)
+
+ def test_decoding_failure(self):
+
+ message = struct.pack('>IIII', 0, 0, 0, 1)
+ encrypted = pkcs1.encrypt(message, self.pub)
+
+ # Alter the encrypted stream
+ a = encrypted[5]
+ if is_bytes(a):
+ a = ord(a)
+ encrypted = encrypted[:5] + byte(a + 1) + encrypted[6:]
+
+ self.assertRaises(pkcs1.DecryptionError, pkcs1.decrypt, encrypted,
+ self.priv)
+
+ def test_randomness(self):
+ '''Encrypting the same message twice should result in different
+ cryptos.
+ '''
+
+ message = struct.pack('>IIII', 0, 0, 0, 1)
+ encrypted1 = pkcs1.encrypt(message, self.pub)
+ encrypted2 = pkcs1.encrypt(message, self.pub)
+
+ self.assertNotEqual(encrypted1, encrypted2)
+
+class SignatureTest(unittest2.TestCase):
+
+ def setUp(self):
+ (self.pub, self.priv) = rsa.newkeys(512)
+
+ def test_sign_verify(self):
+ '''Test happy flow of sign and verify'''
+
+ message = b('je moeder')
+ print("\tMessage: %r" % message)
+
+ signature = pkcs1.sign(message, self.priv, 'SHA-256')
+ print("\tSignature: %r" % signature)
+
+ self.assertTrue(pkcs1.verify(message, signature, self.pub))
+
+ def test_alter_message(self):
+ '''Altering the message should let the verification fail.'''
+
+ signature = pkcs1.sign(b('je moeder'), self.priv, 'SHA-256')
+ self.assertRaises(pkcs1.VerificationError, pkcs1.verify,
+ b('mijn moeder'), signature, self.pub)
+
+ def test_sign_different_key(self):
+ '''Signing with another key should let the verification fail.'''
+
+ (otherpub, _) = rsa.newkeys(512)
+
+ message = b('je moeder')
+ signature = pkcs1.sign(message, self.priv, 'SHA-256')
+ self.assertRaises(pkcs1.VerificationError, pkcs1.verify,
+ message, signature, otherpub)
+
+ def test_multiple_signings(self):
+ '''Signing the same message twice should return the same signatures.'''
+
+ message = struct.pack('>IIII', 0, 0, 0, 1)
+ signature1 = pkcs1.sign(message, self.priv, 'SHA-1')
+ signature2 = pkcs1.sign(message, self.priv, 'SHA-1')
+
+ self.assertEqual(signature1, signature2)
+
diff --git a/third_party/python/rsa/tests/test_strings.py b/third_party/python/rsa/tests/test_strings.py
new file mode 100644
index 0000000000..4af06291d4
--- /dev/null
+++ b/third_party/python/rsa/tests/test_strings.py
@@ -0,0 +1,28 @@
+'''Tests string operations.'''
+
+from __future__ import absolute_import
+
+import unittest2
+
+import rsa
+
+from constants import unicode_string
+
+class StringTest(unittest2.TestCase):
+
+ def setUp(self):
+ (self.pub, self.priv) = rsa.newkeys(384)
+
+ def test_enc_dec(self):
+
+ message = unicode_string.encode('utf-8')
+ print("\tMessage: %s" % message)
+
+ encrypted = rsa.encrypt(message, self.pub)
+ print("\tEncrypted: %s" % encrypted)
+
+ decrypted = rsa.decrypt(encrypted, self.priv)
+ print("\tDecrypted: %s" % decrypted)
+
+ self.assertEqual(message, decrypted)
+
diff --git a/third_party/python/rsa/tests/test_transform.py b/third_party/python/rsa/tests/test_transform.py
new file mode 100644
index 0000000000..ffd9ec892a
--- /dev/null
+++ b/third_party/python/rsa/tests/test_transform.py
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+
+
+import unittest2
+from rsa._compat import b
+from rsa.transform import int2bytes, bytes2int, _int2bytes
+
+
+class Test_int2bytes(unittest2.TestCase):
+ def test_accuracy(self):
+ self.assertEqual(int2bytes(123456789), b('\x07[\xcd\x15'))
+ self.assertEqual(_int2bytes(123456789), b('\x07[\xcd\x15'))
+
+ def test_codec_identity(self):
+ self.assertEqual(bytes2int(int2bytes(123456789, 128)), 123456789)
+ self.assertEqual(bytes2int(_int2bytes(123456789, 128)), 123456789)
+
+ def test_chunk_size(self):
+ self.assertEqual(int2bytes(123456789, 6), b('\x00\x00\x07[\xcd\x15'))
+ self.assertEqual(int2bytes(123456789, 7),
+ b('\x00\x00\x00\x07[\xcd\x15'))
+
+ self.assertEqual(_int2bytes(123456789, 6),
+ b('\x00\x00\x07[\xcd\x15'))
+ self.assertEqual(_int2bytes(123456789, 7),
+ b('\x00\x00\x00\x07[\xcd\x15'))
+
+ def test_zero(self):
+ self.assertEqual(int2bytes(0, 4), b('\x00') * 4)
+ self.assertEqual(int2bytes(0, 7), b('\x00') * 7)
+ self.assertEqual(int2bytes(0), b('\x00'))
+
+ self.assertEqual(_int2bytes(0, 4), b('\x00') * 4)
+ self.assertEqual(_int2bytes(0, 7), b('\x00') * 7)
+ self.assertEqual(_int2bytes(0), b('\x00'))
+
+ def test_correctness_against_base_implementation(self):
+ # Slow test.
+ values = [
+ 1 << 512,
+ 1 << 8192,
+ 1 << 77,
+ ]
+ for value in values:
+ self.assertEqual(int2bytes(value), _int2bytes(value),
+ "Boom %d" % value)
+ self.assertEqual(bytes2int(int2bytes(value)),
+ value,
+ "Boom %d" % value)
+ self.assertEqual(bytes2int(_int2bytes(value)),
+ value,
+ "Boom %d" % value)
+
+ def test_raises_OverflowError_when_chunk_size_is_insufficient(self):
+ self.assertRaises(OverflowError, int2bytes, 123456789, 3)
+ self.assertRaises(OverflowError, int2bytes, 299999999999, 4)
+
+ self.assertRaises(OverflowError, _int2bytes, 123456789, 3)
+ self.assertRaises(OverflowError, _int2bytes, 299999999999, 4)
+
+ def test_raises_ValueError_when_negative_integer(self):
+ self.assertRaises(ValueError, int2bytes, -1)
+ self.assertRaises(ValueError, _int2bytes, -1)
+
+ def test_raises_TypeError_when_not_integer(self):
+ self.assertRaises(TypeError, int2bytes, None)
+ self.assertRaises(TypeError, _int2bytes, None)
diff --git a/third_party/python/rsa/tests/test_varblock.py b/third_party/python/rsa/tests/test_varblock.py
new file mode 100644
index 0000000000..24ea50f1f6
--- /dev/null
+++ b/third_party/python/rsa/tests/test_varblock.py
@@ -0,0 +1,82 @@
+'''Tests varblock operations.'''
+
+
+try:
+ from StringIO import StringIO as BytesIO
+except ImportError:
+ from io import BytesIO
+import unittest
+
+import rsa
+from rsa._compat import b
+from rsa import varblock
+
+class VarintTest(unittest.TestCase):
+
+ def test_read_varint(self):
+
+ encoded = b('\xac\x02crummy')
+ infile = BytesIO(encoded)
+
+ (decoded, read) = varblock.read_varint(infile)
+
+ # Test the returned values
+ self.assertEqual(300, decoded)
+ self.assertEqual(2, read)
+
+ # The rest of the file should be untouched
+ self.assertEqual(b('crummy'), infile.read())
+
+ def test_read_zero(self):
+
+ encoded = b('\x00crummy')
+ infile = BytesIO(encoded)
+
+ (decoded, read) = varblock.read_varint(infile)
+
+ # Test the returned values
+ self.assertEqual(0, decoded)
+ self.assertEqual(1, read)
+
+ # The rest of the file should be untouched
+ self.assertEqual(b('crummy'), infile.read())
+
+ def test_write_varint(self):
+
+ expected = b('\xac\x02')
+ outfile = BytesIO()
+
+ written = varblock.write_varint(outfile, 300)
+
+ # Test the returned values
+ self.assertEqual(expected, outfile.getvalue())
+ self.assertEqual(2, written)
+
+
+ def test_write_zero(self):
+
+ outfile = BytesIO()
+ written = varblock.write_varint(outfile, 0)
+
+ # Test the returned values
+ self.assertEqual(b('\x00'), outfile.getvalue())
+ self.assertEqual(1, written)
+
+
+class VarblockTest(unittest.TestCase):
+
+ def test_yield_varblock(self):
+ infile = BytesIO(b('\x01\x0512345\x06Sybren'))
+
+ varblocks = list(varblock.yield_varblocks(infile))
+ self.assertEqual([b('12345'), b('Sybren')], varblocks)
+
+class FixedblockTest(unittest.TestCase):
+
+ def test_yield_fixedblock(self):
+
+ infile = BytesIO(b('123456Sybren'))
+
+ fixedblocks = list(varblock.yield_fixedblocks(infile, 6))
+ self.assertEqual([b('123456'), b('Sybren')], fixedblocks)
+
diff --git a/third_party/python/scandir/LICENSE.txt b/third_party/python/scandir/LICENSE.txt
new file mode 100644
index 0000000000..0759f503f2
--- /dev/null
+++ b/third_party/python/scandir/LICENSE.txt
@@ -0,0 +1,27 @@
+Copyright (c) 2012, Ben Hoyt
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation
+and/or other materials provided with the distribution.
+
+* Neither the name of Ben Hoyt nor the names of its contributors may be used
+to endorse or promote products derived from this software without specific
+prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/third_party/python/scandir/MANIFEST.in b/third_party/python/scandir/MANIFEST.in
new file mode 100644
index 0000000000..7524c5385a
--- /dev/null
+++ b/third_party/python/scandir/MANIFEST.in
@@ -0,0 +1,6 @@
+include *.py
+include *.c
+include *.h
+include *.txt
+include *.rst
+include test/*.py
diff --git a/third_party/python/scandir/PKG-INFO b/third_party/python/scandir/PKG-INFO
new file mode 100644
index 0000000000..e4625662b1
--- /dev/null
+++ b/third_party/python/scandir/PKG-INFO
@@ -0,0 +1,238 @@
+Metadata-Version: 1.1
+Name: scandir
+Version: 1.9.0
+Summary: scandir, a better directory iterator and faster os.walk()
+Home-page: https://github.com/benhoyt/scandir
+Author: Ben Hoyt
+Author-email: benhoyt@gmail.com
+License: New BSD License
+Description-Content-Type: UNKNOWN
+Description:
+ scandir, a better directory iterator and faster os.walk()
+ =========================================================
+
+ .. image:: https://img.shields.io/pypi/v/scandir.svg
+ :target: https://pypi.python.org/pypi/scandir
+ :alt: scandir on PyPI (Python Package Index)
+
+ .. image:: https://travis-ci.org/benhoyt/scandir.svg?branch=master
+ :target: https://travis-ci.org/benhoyt/scandir
+ :alt: Travis CI tests (Linux)
+
+ .. image:: https://ci.appveyor.com/api/projects/status/github/benhoyt/scandir?branch=master&svg=true
+ :target: https://ci.appveyor.com/project/benhoyt/scandir
+ :alt: Appveyor tests (Windows)
+
+
+ ``scandir()`` is a directory iteration function like ``os.listdir()``,
+ except that instead of returning a list of bare filenames, it yields
+ ``DirEntry`` objects that include file type and stat information along
+ with the name. Using ``scandir()`` increases the speed of ``os.walk()``
+ by 2-20 times (depending on the platform and file system) by avoiding
+ unnecessary calls to ``os.stat()`` in most cases.
+
+
+ Now included in a Python near you!
+ ----------------------------------
+
+ ``scandir`` has been included in the Python 3.5 standard library as
+ ``os.scandir()``, and the related performance improvements to
+ ``os.walk()`` have also been included. So if you're lucky enough to be
+ using Python 3.5 (release date September 13, 2015) you get the benefit
+ immediately, otherwise just
+ `download this module from PyPI <https://pypi.python.org/pypi/scandir>`_,
+ install it with ``pip install scandir``, and then do something like
+ this in your code:
+
+ .. code-block:: python
+
+ # Use the built-in version of scandir/walk if possible, otherwise
+ # use the scandir module version
+ try:
+ from os import scandir, walk
+ except ImportError:
+ from scandir import scandir, walk
+
+ `PEP 471 <https://www.python.org/dev/peps/pep-0471/>`_, which is the
+ PEP that proposes including ``scandir`` in the Python standard library,
+ was `accepted <https://mail.python.org/pipermail/python-dev/2014-July/135561.html>`_
+ in July 2014 by Victor Stinner, the BDFL-delegate for the PEP.
+
+ This ``scandir`` module is intended to work on Python 2.6+ and Python
+ 3.2+ (and it has been tested on those versions).
+
+
+ Background
+ ----------
+
+ Python's built-in ``os.walk()`` is significantly slower than it needs to be,
+ because -- in addition to calling ``listdir()`` on each directory -- it calls
+ ``stat()`` on each file to determine whether the filename is a directory or not.
+ But both ``FindFirstFile`` / ``FindNextFile`` on Windows and ``readdir`` on Linux/OS
+ X already tell you whether the files returned are directories or not, so
+ no further ``stat`` system calls are needed. In short, you can reduce the number
+ of system calls from about 2N to N, where N is the total number of files and
+ directories in the tree.
+
+ In practice, removing all those extra system calls makes ``os.walk()`` about
+ **7-50 times as fast on Windows, and about 3-10 times as fast on Linux and Mac OS
+ X.** So we're not talking about micro-optimizations. See more benchmarks
+ in the "Benchmarks" section below.
+
+ Somewhat relatedly, many people have also asked for a version of
+ ``os.listdir()`` that yields filenames as it iterates instead of returning them
+ as one big list. This improves memory efficiency for iterating very large
+ directories.
+
+ So as well as a faster ``walk()``, scandir adds a new ``scandir()`` function.
+ They're pretty easy to use, but see "The API" below for the full docs.
+
+
+ Benchmarks
+ ----------
+
+ Below are results showing how many times as fast ``scandir.walk()`` is than
+ ``os.walk()`` on various systems, found by running ``benchmark.py`` with no
+ arguments:
+
+ ==================== ============== =============
+ System version Python version Times as fast
+ ==================== ============== =============
+ Windows 7 64-bit 2.7.7 64-bit 10.4
+ Windows 7 64-bit SSD 2.7.7 64-bit 10.3
+ Windows 7 64-bit NFS 2.7.6 64-bit 36.8
+ Windows 7 64-bit SSD 3.4.1 64-bit 9.9
+ Windows 7 64-bit SSD 3.5.0 64-bit 9.5
+ CentOS 6.2 64-bit 2.6.6 64-bit 3.9
+ Ubuntu 14.04 64-bit 2.7.6 64-bit 5.8
+ Mac OS X 10.9.3 2.7.5 64-bit 3.8
+ ==================== ============== =============
+
+ All of the above tests were done using the fast C version of scandir
+ (source code in ``_scandir.c``).
+
+ Note that the gains are less than the above on smaller directories and greater
+ on larger directories. This is why ``benchmark.py`` creates a test directory
+ tree with a standardized size.
+
+
+ The API
+ -------
+
+ walk()
+ ~~~~~~
+
+ The API for ``scandir.walk()`` is exactly the same as ``os.walk()``, so just
+ `read the Python docs <https://docs.python.org/3.5/library/os.html#os.walk>`_.
+
+ scandir()
+ ~~~~~~~~~
+
+ The full docs for ``scandir()`` and the ``DirEntry`` objects it yields are
+ available in the `Python documentation here <https://docs.python.org/3.5/library/os.html#os.scandir>`_.
+ But below is a brief summary as well.
+
+ scandir(path='.') -> iterator of DirEntry objects for given path
+
+ Like ``listdir``, ``scandir`` calls the operating system's directory
+ iteration system calls to get the names of the files in the given
+ ``path``, but it's different from ``listdir`` in two ways:
+
+ * Instead of returning bare filename strings, it returns lightweight
+ ``DirEntry`` objects that hold the filename string and provide
+ simple methods that allow access to the additional data the
+ operating system may have returned.
+
+ * It returns a generator instead of a list, so that ``scandir`` acts
+ as a true iterator instead of returning the full list immediately.
+
+ ``scandir()`` yields a ``DirEntry`` object for each file and
+ sub-directory in ``path``. Just like ``listdir``, the ``'.'``
+ and ``'..'`` pseudo-directories are skipped, and the entries are
+ yielded in system-dependent order. Each ``DirEntry`` object has the
+ following attributes and methods:
+
+ * ``name``: the entry's filename, relative to the scandir ``path``
+ argument (corresponds to the return values of ``os.listdir``)
+
+ * ``path``: the entry's full path name (not necessarily an absolute
+ path) -- the equivalent of ``os.path.join(scandir_path, entry.name)``
+
+ * ``is_dir(*, follow_symlinks=True)``: similar to
+ ``pathlib.Path.is_dir()``, but the return value is cached on the
+ ``DirEntry`` object; doesn't require a system call in most cases;
+ don't follow symbolic links if ``follow_symlinks`` is False
+
+ * ``is_file(*, follow_symlinks=True)``: similar to
+ ``pathlib.Path.is_file()``, but the return value is cached on the
+ ``DirEntry`` object; doesn't require a system call in most cases;
+ don't follow symbolic links if ``follow_symlinks`` is False
+
+ * ``is_symlink()``: similar to ``pathlib.Path.is_symlink()``, but the
+ return value is cached on the ``DirEntry`` object; doesn't require a
+ system call in most cases
+
+ * ``stat(*, follow_symlinks=True)``: like ``os.stat()``, but the
+ return value is cached on the ``DirEntry`` object; does not require a
+ system call on Windows (except for symlinks); don't follow symbolic links
+ (like ``os.lstat()``) if ``follow_symlinks`` is False
+
+ * ``inode()``: return the inode number of the entry; the return value
+ is cached on the ``DirEntry`` object
+
+ Here's a very simple example of ``scandir()`` showing use of the
+ ``DirEntry.name`` attribute and the ``DirEntry.is_dir()`` method:
+
+ .. code-block:: python
+
+ def subdirs(path):
+ """Yield directory names not starting with '.' under given path."""
+ for entry in os.scandir(path):
+ if not entry.name.startswith('.') and entry.is_dir():
+ yield entry.name
+
+ This ``subdirs()`` function will be significantly faster with scandir
+ than ``os.listdir()`` and ``os.path.isdir()`` on both Windows and POSIX
+ systems, especially on medium-sized or large directories.
+
+
+ Further reading
+ ---------------
+
+ * `The Python docs for scandir <https://docs.python.org/3.5/library/os.html#os.scandir>`_
+ * `PEP 471 <https://www.python.org/dev/peps/pep-0471/>`_, the
+ (now-accepted) Python Enhancement Proposal that proposed adding
+ ``scandir`` to the standard library -- a lot of details here,
+ including rejected ideas and previous discussion
+
+
+ Flames, comments, bug reports
+ -----------------------------
+
+ Please send flames, comments, and questions about scandir to Ben Hoyt:
+
+ http://benhoyt.com/
+
+ File bug reports for the version in the Python 3.5 standard library
+ `here <https://docs.python.org/3.5/bugs.html>`_, or file bug reports
+ or feature requests for this module at the GitHub project page:
+
+ https://github.com/benhoyt/scandir
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Operating System :: OS Independent
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python
+Classifier: Topic :: System :: Filesystems
+Classifier: Topic :: System :: Operating System
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: Implementation :: CPython
diff --git a/third_party/python/scandir/README.rst b/third_party/python/scandir/README.rst
new file mode 100644
index 0000000000..a5537517dd
--- /dev/null
+++ b/third_party/python/scandir/README.rst
@@ -0,0 +1,211 @@
+
+scandir, a better directory iterator and faster os.walk()
+=========================================================
+
+.. image:: https://img.shields.io/pypi/v/scandir.svg
+ :target: https://pypi.python.org/pypi/scandir
+ :alt: scandir on PyPI (Python Package Index)
+
+.. image:: https://travis-ci.org/benhoyt/scandir.svg?branch=master
+ :target: https://travis-ci.org/benhoyt/scandir
+ :alt: Travis CI tests (Linux)
+
+.. image:: https://ci.appveyor.com/api/projects/status/github/benhoyt/scandir?branch=master&svg=true
+ :target: https://ci.appveyor.com/project/benhoyt/scandir
+ :alt: Appveyor tests (Windows)
+
+
+``scandir()`` is a directory iteration function like ``os.listdir()``,
+except that instead of returning a list of bare filenames, it yields
+``DirEntry`` objects that include file type and stat information along
+with the name. Using ``scandir()`` increases the speed of ``os.walk()``
+by 2-20 times (depending on the platform and file system) by avoiding
+unnecessary calls to ``os.stat()`` in most cases.
+
+
+Now included in a Python near you!
+----------------------------------
+
+``scandir`` has been included in the Python 3.5 standard library as
+``os.scandir()``, and the related performance improvements to
+``os.walk()`` have also been included. So if you're lucky enough to be
+using Python 3.5 (release date September 13, 2015) you get the benefit
+immediately, otherwise just
+`download this module from PyPI <https://pypi.python.org/pypi/scandir>`_,
+install it with ``pip install scandir``, and then do something like
+this in your code:
+
+.. code-block:: python
+
+ # Use the built-in version of scandir/walk if possible, otherwise
+ # use the scandir module version
+ try:
+ from os import scandir, walk
+ except ImportError:
+ from scandir import scandir, walk
+
+`PEP 471 <https://www.python.org/dev/peps/pep-0471/>`_, which is the
+PEP that proposes including ``scandir`` in the Python standard library,
+was `accepted <https://mail.python.org/pipermail/python-dev/2014-July/135561.html>`_
+in July 2014 by Victor Stinner, the BDFL-delegate for the PEP.
+
+This ``scandir`` module is intended to work on Python 2.6+ and Python
+3.2+ (and it has been tested on those versions).
+
+
+Background
+----------
+
+Python's built-in ``os.walk()`` is significantly slower than it needs to be,
+because -- in addition to calling ``listdir()`` on each directory -- it calls
+``stat()`` on each file to determine whether the filename is a directory or not.
+But both ``FindFirstFile`` / ``FindNextFile`` on Windows and ``readdir`` on Linux/OS
+X already tell you whether the files returned are directories or not, so
+no further ``stat`` system calls are needed. In short, you can reduce the number
+of system calls from about 2N to N, where N is the total number of files and
+directories in the tree.
+
+In practice, removing all those extra system calls makes ``os.walk()`` about
+**7-50 times as fast on Windows, and about 3-10 times as fast on Linux and Mac OS
+X.** So we're not talking about micro-optimizations. See more benchmarks
+in the "Benchmarks" section below.
+
+Somewhat relatedly, many people have also asked for a version of
+``os.listdir()`` that yields filenames as it iterates instead of returning them
+as one big list. This improves memory efficiency for iterating very large
+directories.
+
+So as well as a faster ``walk()``, scandir adds a new ``scandir()`` function.
+They're pretty easy to use, but see "The API" below for the full docs.
+
+
+Benchmarks
+----------
+
+Below are results showing how many times as fast ``scandir.walk()`` is than
+``os.walk()`` on various systems, found by running ``benchmark.py`` with no
+arguments:
+
+==================== ============== =============
+System version Python version Times as fast
+==================== ============== =============
+Windows 7 64-bit 2.7.7 64-bit 10.4
+Windows 7 64-bit SSD 2.7.7 64-bit 10.3
+Windows 7 64-bit NFS 2.7.6 64-bit 36.8
+Windows 7 64-bit SSD 3.4.1 64-bit 9.9
+Windows 7 64-bit SSD 3.5.0 64-bit 9.5
+CentOS 6.2 64-bit 2.6.6 64-bit 3.9
+Ubuntu 14.04 64-bit 2.7.6 64-bit 5.8
+Mac OS X 10.9.3 2.7.5 64-bit 3.8
+==================== ============== =============
+
+All of the above tests were done using the fast C version of scandir
+(source code in ``_scandir.c``).
+
+Note that the gains are less than the above on smaller directories and greater
+on larger directories. This is why ``benchmark.py`` creates a test directory
+tree with a standardized size.
+
+
+The API
+-------
+
+walk()
+~~~~~~
+
+The API for ``scandir.walk()`` is exactly the same as ``os.walk()``, so just
+`read the Python docs <https://docs.python.org/3.5/library/os.html#os.walk>`_.
+
+scandir()
+~~~~~~~~~
+
+The full docs for ``scandir()`` and the ``DirEntry`` objects it yields are
+available in the `Python documentation here <https://docs.python.org/3.5/library/os.html#os.scandir>`_.
+But below is a brief summary as well.
+
+ scandir(path='.') -> iterator of DirEntry objects for given path
+
+Like ``listdir``, ``scandir`` calls the operating system's directory
+iteration system calls to get the names of the files in the given
+``path``, but it's different from ``listdir`` in two ways:
+
+* Instead of returning bare filename strings, it returns lightweight
+ ``DirEntry`` objects that hold the filename string and provide
+ simple methods that allow access to the additional data the
+ operating system may have returned.
+
+* It returns a generator instead of a list, so that ``scandir`` acts
+ as a true iterator instead of returning the full list immediately.
+
+``scandir()`` yields a ``DirEntry`` object for each file and
+sub-directory in ``path``. Just like ``listdir``, the ``'.'``
+and ``'..'`` pseudo-directories are skipped, and the entries are
+yielded in system-dependent order. Each ``DirEntry`` object has the
+following attributes and methods:
+
+* ``name``: the entry's filename, relative to the scandir ``path``
+ argument (corresponds to the return values of ``os.listdir``)
+
+* ``path``: the entry's full path name (not necessarily an absolute
+ path) -- the equivalent of ``os.path.join(scandir_path, entry.name)``
+
+* ``is_dir(*, follow_symlinks=True)``: similar to
+ ``pathlib.Path.is_dir()``, but the return value is cached on the
+ ``DirEntry`` object; doesn't require a system call in most cases;
+ don't follow symbolic links if ``follow_symlinks`` is False
+
+* ``is_file(*, follow_symlinks=True)``: similar to
+ ``pathlib.Path.is_file()``, but the return value is cached on the
+ ``DirEntry`` object; doesn't require a system call in most cases;
+ don't follow symbolic links if ``follow_symlinks`` is False
+
+* ``is_symlink()``: similar to ``pathlib.Path.is_symlink()``, but the
+ return value is cached on the ``DirEntry`` object; doesn't require a
+ system call in most cases
+
+* ``stat(*, follow_symlinks=True)``: like ``os.stat()``, but the
+ return value is cached on the ``DirEntry`` object; does not require a
+ system call on Windows (except for symlinks); don't follow symbolic links
+ (like ``os.lstat()``) if ``follow_symlinks`` is False
+
+* ``inode()``: return the inode number of the entry; the return value
+ is cached on the ``DirEntry`` object
+
+Here's a very simple example of ``scandir()`` showing use of the
+``DirEntry.name`` attribute and the ``DirEntry.is_dir()`` method:
+
+.. code-block:: python
+
+ def subdirs(path):
+ """Yield directory names not starting with '.' under given path."""
+ for entry in os.scandir(path):
+ if not entry.name.startswith('.') and entry.is_dir():
+ yield entry.name
+
+This ``subdirs()`` function will be significantly faster with scandir
+than ``os.listdir()`` and ``os.path.isdir()`` on both Windows and POSIX
+systems, especially on medium-sized or large directories.
+
+
+Further reading
+---------------
+
+* `The Python docs for scandir <https://docs.python.org/3.5/library/os.html#os.scandir>`_
+* `PEP 471 <https://www.python.org/dev/peps/pep-0471/>`_, the
+ (now-accepted) Python Enhancement Proposal that proposed adding
+ ``scandir`` to the standard library -- a lot of details here,
+ including rejected ideas and previous discussion
+
+
+Flames, comments, bug reports
+-----------------------------
+
+Please send flames, comments, and questions about scandir to Ben Hoyt:
+
+http://benhoyt.com/
+
+File bug reports for the version in the Python 3.5 standard library
+`here <https://docs.python.org/3.5/bugs.html>`_, or file bug reports
+or feature requests for this module at the GitHub project page:
+
+https://github.com/benhoyt/scandir
diff --git a/third_party/python/scandir/_scandir.c b/third_party/python/scandir/_scandir.c
new file mode 100644
index 0000000000..b35f17041d
--- /dev/null
+++ b/third_party/python/scandir/_scandir.c
@@ -0,0 +1,1833 @@
+/* C speedups for scandir module
+
+This is divided into four sections (each prefixed with a "SECTION:"
+comment):
+
+1) Python 2/3 compatibility
+2) Helper utilities from posixmodule.c, fileutils.h, etc
+3) SECTION: Main DirEntry and scandir implementation, taken from
+ Python 3.5's posixmodule.c
+4) Module and method definitions and initialization code
+
+*/
+
+#include <Python.h>
+#include <structseq.h>
+#include <structmember.h>
+#include "osdefs.h"
+
+#ifdef MS_WINDOWS
+#include <windows.h>
+#include "winreparse.h"
+#else
+#include <dirent.h>
+#ifndef HAVE_DIRENT_H
+#define HAVE_DIRENT_H 1
+#endif
+#endif
+
+#define MODNAME "scandir"
+
+
+/* SECTION: Python 2/3 compatibility */
+
+#if PY_MAJOR_VERSION >= 3
+#define INIT_ERROR return NULL
+#else
+#define INIT_ERROR return
+// Because on PyPy, Py_FileSystemDefaultEncoding is (was) defined to be NULL
+// (see PyPy Bitbucket issue #2669)
+#define FS_ENCODING (Py_FileSystemDefaultEncoding ? Py_FileSystemDefaultEncoding : "UTF-8")
+#endif
+
+#if PY_MAJOR_VERSION < 3 || PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION <= 2
+#define _Py_IDENTIFIER(name) static char * PyId_##name = #name;
+#define _PyObject_GetAttrId(obj, pyid_name) PyObject_GetAttrString((obj), *(pyid_name))
+#define PyExc_FileNotFoundError PyExc_OSError
+#define PyUnicode_AsUnicodeAndSize(unicode, addr_length) \
+ PyUnicode_AsUnicode(unicode); *(addr_length) = PyUnicode_GetSize(unicode)
+#endif
+
+// Because on PyPy not working without
+#if PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION > 2 && defined(PYPY_VERSION_NUM)
+#define _Py_IDENTIFIER(name) static char * PyId_##name = #name;
+#define _PyObject_GetAttrId(obj, pyid_name) PyObject_GetAttrString((obj), *(pyid_name))
+#endif
+
+/* SECTION: Helper utilities from posixmodule.c, fileutils.h, etc */
+
+#if !defined(MS_WINDOWS) && defined(DT_UNKNOWN)
+#define HAVE_DIRENT_D_TYPE 1
+#endif
+
+#ifdef HAVE_DIRENT_H
+#include <dirent.h>
+#define NAMLEN(dirent) strlen((dirent)->d_name)
+#else
+#if defined(__WATCOMC__) && !defined(__QNX__)
+#include <direct.h>
+#define NAMLEN(dirent) strlen((dirent)->d_name)
+#else
+#define dirent direct
+#define NAMLEN(dirent) (dirent)->d_namlen
+#endif
+#ifdef HAVE_SYS_NDIR_H
+#include <sys/ndir.h>
+#endif
+#ifdef HAVE_SYS_DIR_H
+#include <sys/dir.h>
+#endif
+#ifdef HAVE_NDIR_H
+#include <ndir.h>
+#endif
+#endif
+
+#ifndef Py_CLEANUP_SUPPORTED
+#define Py_CLEANUP_SUPPORTED 0x20000
+#endif
+
+#ifndef S_IFLNK
+/* Windows doesn't define S_IFLNK but posixmodule.c maps
+ * IO_REPARSE_TAG_SYMLINK to S_IFLNK */
+# define S_IFLNK 0120000
+#endif
+
+// _Py_stat_struct is already defined in fileutils.h on Python 3.5+
+// But not in PyPy
+#if PY_MAJOR_VERSION < 3 || (PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION < 5) || defined(PYPY_VERSION_NUM)
+#ifdef MS_WINDOWS
+struct _Py_stat_struct {
+ unsigned long st_dev;
+ unsigned __int64 st_ino;
+ unsigned short st_mode;
+ int st_nlink;
+ int st_uid;
+ int st_gid;
+ unsigned long st_rdev;
+ __int64 st_size;
+ time_t st_atime;
+ int st_atime_nsec;
+ time_t st_mtime;
+ int st_mtime_nsec;
+ time_t st_ctime;
+ int st_ctime_nsec;
+ unsigned long st_file_attributes;
+};
+#else
+# define _Py_stat_struct stat
+#endif
+#endif
+
+/* choose the appropriate stat and fstat functions and return structs */
+#undef STAT
+#undef FSTAT
+#undef STRUCT_STAT
+#ifdef MS_WINDOWS
+# define STAT win32_stat
+# define LSTAT win32_lstat
+# define FSTAT _Py_fstat_noraise
+# define STRUCT_STAT struct _Py_stat_struct
+#else
+# define STAT stat
+# define LSTAT lstat
+# define FSTAT fstat
+# define STRUCT_STAT struct stat
+#endif
+
+#ifdef MS_WINDOWS
+
+static __int64 secs_between_epochs = 11644473600; /* Seconds between 1.1.1601 and 1.1.1970 */
+
+static void
+FILE_TIME_to_time_t_nsec(FILETIME *in_ptr, time_t *time_out, int* nsec_out)
+{
+ /* XXX endianness. Shouldn't matter, as all Windows implementations are little-endian */
+ /* Cannot simply cast and dereference in_ptr,
+ since it might not be aligned properly */
+ __int64 in;
+ memcpy(&in, in_ptr, sizeof(in));
+ *nsec_out = (int)(in % 10000000) * 100; /* FILETIME is in units of 100 nsec. */
+ *time_out = Py_SAFE_DOWNCAST((in / 10000000) - secs_between_epochs, __int64, time_t);
+}
+
+/* Below, we *know* that ugo+r is 0444 */
+#if _S_IREAD != 0400
+#error Unsupported C library
+#endif
+static int
+attributes_to_mode(DWORD attr)
+{
+ int m = 0;
+ if (attr & FILE_ATTRIBUTE_DIRECTORY)
+ m |= _S_IFDIR | 0111; /* IFEXEC for user,group,other */
+ else
+ m |= _S_IFREG;
+ if (attr & FILE_ATTRIBUTE_READONLY)
+ m |= 0444;
+ else
+ m |= 0666;
+ return m;
+}
+
+void
+_Py_attribute_data_to_stat(BY_HANDLE_FILE_INFORMATION *info, ULONG reparse_tag,
+ struct _Py_stat_struct *result)
+{
+ memset(result, 0, sizeof(*result));
+ result->st_mode = attributes_to_mode(info->dwFileAttributes);
+ result->st_size = (((__int64)info->nFileSizeHigh)<<32) + info->nFileSizeLow;
+ result->st_dev = info->dwVolumeSerialNumber;
+ result->st_rdev = result->st_dev;
+ FILE_TIME_to_time_t_nsec(&info->ftCreationTime, &result->st_ctime, &result->st_ctime_nsec);
+ FILE_TIME_to_time_t_nsec(&info->ftLastWriteTime, &result->st_mtime, &result->st_mtime_nsec);
+ FILE_TIME_to_time_t_nsec(&info->ftLastAccessTime, &result->st_atime, &result->st_atime_nsec);
+ result->st_nlink = info->nNumberOfLinks;
+ result->st_ino = (((unsigned __int64)info->nFileIndexHigh)<<32) + info->nFileIndexLow;
+ if (reparse_tag == IO_REPARSE_TAG_SYMLINK) {
+ /* first clear the S_IFMT bits */
+ result->st_mode ^= (result->st_mode & S_IFMT);
+ /* now set the bits that make this a symlink */
+ result->st_mode |= S_IFLNK;
+ }
+ result->st_file_attributes = info->dwFileAttributes;
+}
+
+static BOOL
+get_target_path(HANDLE hdl, wchar_t **target_path)
+{
+ int buf_size, result_length;
+ wchar_t *buf;
+
+ /* We have a good handle to the target, use it to determine
+ the target path name (then we'll call lstat on it). */
+ buf_size = GetFinalPathNameByHandleW(hdl, 0, 0,
+ VOLUME_NAME_DOS);
+ if(!buf_size)
+ return FALSE;
+
+ buf = PyMem_New(wchar_t, buf_size+1);
+ if (!buf) {
+ SetLastError(ERROR_OUTOFMEMORY);
+ return FALSE;
+ }
+
+ result_length = GetFinalPathNameByHandleW(hdl,
+ buf, buf_size, VOLUME_NAME_DOS);
+
+ if(!result_length) {
+ PyMem_Free(buf);
+ return FALSE;
+ }
+
+ if(!CloseHandle(hdl)) {
+ PyMem_Free(buf);
+ return FALSE;
+ }
+
+ buf[result_length] = 0;
+
+ *target_path = buf;
+ return TRUE;
+}
+
+static int
+win32_get_reparse_tag(HANDLE reparse_point_handle, ULONG *reparse_tag)
+{
+ char target_buffer[MAXIMUM_REPARSE_DATA_BUFFER_SIZE];
+ REPARSE_DATA_BUFFER *rdb = (REPARSE_DATA_BUFFER *)target_buffer;
+ DWORD n_bytes_returned;
+
+ if (0 == DeviceIoControl(
+ reparse_point_handle,
+ FSCTL_GET_REPARSE_POINT,
+ NULL, 0, /* in buffer */
+ target_buffer, sizeof(target_buffer),
+ &n_bytes_returned,
+ NULL)) /* we're not using OVERLAPPED_IO */
+ return FALSE;
+
+ if (reparse_tag)
+ *reparse_tag = rdb->ReparseTag;
+
+ return TRUE;
+}
+
+static void
+find_data_to_file_info_w(WIN32_FIND_DATAW *pFileData,
+ BY_HANDLE_FILE_INFORMATION *info,
+ ULONG *reparse_tag)
+{
+ memset(info, 0, sizeof(*info));
+ info->dwFileAttributes = pFileData->dwFileAttributes;
+ info->ftCreationTime = pFileData->ftCreationTime;
+ info->ftLastAccessTime = pFileData->ftLastAccessTime;
+ info->ftLastWriteTime = pFileData->ftLastWriteTime;
+ info->nFileSizeHigh = pFileData->nFileSizeHigh;
+ info->nFileSizeLow = pFileData->nFileSizeLow;
+/* info->nNumberOfLinks = 1; */
+ if (pFileData->dwFileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)
+ *reparse_tag = pFileData->dwReserved0;
+ else
+ *reparse_tag = 0;
+}
+
+static BOOL
+attributes_from_dir_w(LPCWSTR pszFile, BY_HANDLE_FILE_INFORMATION *info, ULONG *reparse_tag)
+{
+ HANDLE hFindFile;
+ WIN32_FIND_DATAW FileData;
+ hFindFile = FindFirstFileW(pszFile, &FileData);
+ if (hFindFile == INVALID_HANDLE_VALUE)
+ return FALSE;
+ FindClose(hFindFile);
+ find_data_to_file_info_w(&FileData, info, reparse_tag);
+ return TRUE;
+}
+
+static int
+win32_xstat_impl_w(const wchar_t *path, struct _Py_stat_struct *result,
+ BOOL traverse)
+{
+ int code;
+ HANDLE hFile, hFile2;
+ BY_HANDLE_FILE_INFORMATION info;
+ ULONG reparse_tag = 0;
+ wchar_t *target_path;
+ const wchar_t *dot;
+
+ hFile = CreateFileW(
+ path,
+ FILE_READ_ATTRIBUTES, /* desired access */
+ 0, /* share mode */
+ NULL, /* security attributes */
+ OPEN_EXISTING,
+ /* FILE_FLAG_BACKUP_SEMANTICS is required to open a directory */
+ /* FILE_FLAG_OPEN_REPARSE_POINT does not follow the symlink.
+ Because of this, calls like GetFinalPathNameByHandle will return
+ the symlink path again and not the actual final path. */
+ FILE_ATTRIBUTE_NORMAL|FILE_FLAG_BACKUP_SEMANTICS|
+ FILE_FLAG_OPEN_REPARSE_POINT,
+ NULL);
+
+ if (hFile == INVALID_HANDLE_VALUE) {
+ /* Either the target doesn't exist, or we don't have access to
+ get a handle to it. If the former, we need to return an error.
+ If the latter, we can use attributes_from_dir. */
+ if (GetLastError() != ERROR_SHARING_VIOLATION)
+ return -1;
+ /* Could not get attributes on open file. Fall back to
+ reading the directory. */
+ if (!attributes_from_dir_w(path, &info, &reparse_tag))
+ /* Very strange. This should not fail now */
+ return -1;
+ if (info.dwFileAttributes & FILE_ATTRIBUTE_REPARSE_POINT) {
+ if (traverse) {
+ /* Should traverse, but could not open reparse point handle */
+ SetLastError(ERROR_SHARING_VIOLATION);
+ return -1;
+ }
+ }
+ } else {
+ if (!GetFileInformationByHandle(hFile, &info)) {
+ CloseHandle(hFile);
+ return -1;
+ }
+ if (info.dwFileAttributes & FILE_ATTRIBUTE_REPARSE_POINT) {
+ if (!win32_get_reparse_tag(hFile, &reparse_tag))
+ return -1;
+
+ /* Close the outer open file handle now that we're about to
+ reopen it with different flags. */
+ if (!CloseHandle(hFile))
+ return -1;
+
+ if (traverse) {
+ /* In order to call GetFinalPathNameByHandle we need to open
+ the file without the reparse handling flag set. */
+ hFile2 = CreateFileW(
+ path, FILE_READ_ATTRIBUTES, FILE_SHARE_READ,
+ NULL, OPEN_EXISTING,
+ FILE_ATTRIBUTE_NORMAL|FILE_FLAG_BACKUP_SEMANTICS,
+ NULL);
+ if (hFile2 == INVALID_HANDLE_VALUE)
+ return -1;
+
+ if (!get_target_path(hFile2, &target_path))
+ return -1;
+
+ code = win32_xstat_impl_w(target_path, result, FALSE);
+ PyMem_Free(target_path);
+ return code;
+ }
+ } else
+ CloseHandle(hFile);
+ }
+ _Py_attribute_data_to_stat(&info, reparse_tag, result);
+
+ /* Set S_IEXEC if it is an .exe, .bat, ... */
+ dot = wcsrchr(path, '.');
+ if (dot) {
+ if (_wcsicmp(dot, L".bat") == 0 || _wcsicmp(dot, L".cmd") == 0 ||
+ _wcsicmp(dot, L".exe") == 0 || _wcsicmp(dot, L".com") == 0)
+ result->st_mode |= 0111;
+ }
+ return 0;
+}
+
+static int
+win32_xstat_w(const wchar_t *path, struct _Py_stat_struct *result, BOOL traverse)
+{
+ /* Protocol violation: we explicitly clear errno, instead of
+ setting it to a POSIX error. Callers should use GetLastError. */
+ int code = win32_xstat_impl_w(path, result, traverse);
+ errno = 0;
+ return code;
+}
+
+static int
+win32_lstat_w(const wchar_t* path, struct _Py_stat_struct *result)
+{
+ return win32_xstat_w(path, result, FALSE);
+}
+
+static int
+win32_stat_w(const wchar_t* path, struct _Py_stat_struct *result)
+{
+ return win32_xstat_w(path, result, TRUE);
+}
+
+#endif /* MS_WINDOWS */
+
+static PyTypeObject StatResultType;
+
+static PyObject *billion = NULL;
+
+static newfunc structseq_new;
+
+static PyObject *
+statresult_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
+{
+ PyStructSequence *result;
+ int i;
+
+ result = (PyStructSequence*)structseq_new(type, args, kwds);
+ if (!result)
+ return NULL;
+ /* If we have been initialized from a tuple,
+ st_?time might be set to None. Initialize it
+ from the int slots. */
+ for (i = 7; i <= 9; i++) {
+ if (result->ob_item[i+3] == Py_None) {
+ Py_DECREF(Py_None);
+ Py_INCREF(result->ob_item[i]);
+ result->ob_item[i+3] = result->ob_item[i];
+ }
+ }
+ return (PyObject*)result;
+}
+
+/* If true, st_?time is float. */
+static int _stat_float_times = 1;
+
+static void
+fill_time(PyObject *v, int index, time_t sec, unsigned long nsec)
+{
+#if SIZEOF_TIME_T > SIZEOF_LONG
+ PyObject *s = PyLong_FromLongLong((PY_LONG_LONG)sec);
+#else
+#if PY_MAJOR_VERSION >= 3
+ PyObject *s = PyLong_FromLong((long)sec);
+#else
+ PyObject *s = PyInt_FromLong((long)sec);
+#endif
+#endif
+ PyObject *ns_fractional = PyLong_FromUnsignedLong(nsec);
+ PyObject *s_in_ns = NULL;
+ PyObject *ns_total = NULL;
+ PyObject *float_s = NULL;
+
+ if (!(s && ns_fractional))
+ goto exit;
+
+ s_in_ns = PyNumber_Multiply(s, billion);
+ if (!s_in_ns)
+ goto exit;
+
+ ns_total = PyNumber_Add(s_in_ns, ns_fractional);
+ if (!ns_total)
+ goto exit;
+
+ if (_stat_float_times) {
+ float_s = PyFloat_FromDouble(sec + 1e-9*nsec);
+ if (!float_s)
+ goto exit;
+ }
+ else {
+ float_s = s;
+ Py_INCREF(float_s);
+ }
+
+ PyStructSequence_SET_ITEM(v, index, s);
+ PyStructSequence_SET_ITEM(v, index+3, float_s);
+ PyStructSequence_SET_ITEM(v, index+6, ns_total);
+ s = NULL;
+ float_s = NULL;
+ ns_total = NULL;
+exit:
+ Py_XDECREF(s);
+ Py_XDECREF(ns_fractional);
+ Py_XDECREF(s_in_ns);
+ Py_XDECREF(ns_total);
+ Py_XDECREF(float_s);
+}
+
+#ifdef MS_WINDOWS
+#define HAVE_STAT_NSEC 1
+#define HAVE_STRUCT_STAT_ST_FILE_ATTRIBUTES 1
+#endif
+
+#ifdef HAVE_STRUCT_STAT_ST_BLKSIZE
+#define ST_BLKSIZE_IDX 16
+#else
+#define ST_BLKSIZE_IDX 15
+#endif
+
+#ifdef HAVE_STRUCT_STAT_ST_BLOCKS
+#define ST_BLOCKS_IDX (ST_BLKSIZE_IDX+1)
+#else
+#define ST_BLOCKS_IDX ST_BLKSIZE_IDX
+#endif
+
+#ifdef HAVE_STRUCT_STAT_ST_RDEV
+#define ST_RDEV_IDX (ST_BLOCKS_IDX+1)
+#else
+#define ST_RDEV_IDX ST_BLOCKS_IDX
+#endif
+
+#ifdef HAVE_STRUCT_STAT_ST_FLAGS
+#define ST_FLAGS_IDX (ST_RDEV_IDX+1)
+#else
+#define ST_FLAGS_IDX ST_RDEV_IDX
+#endif
+
+#ifdef HAVE_STRUCT_STAT_ST_GEN
+#define ST_GEN_IDX (ST_FLAGS_IDX+1)
+#else
+#define ST_GEN_IDX ST_FLAGS_IDX
+#endif
+
+#ifdef HAVE_STRUCT_STAT_ST_BIRTHTIME
+#define ST_BIRTHTIME_IDX (ST_GEN_IDX+1)
+#else
+#define ST_BIRTHTIME_IDX ST_GEN_IDX
+#endif
+
+#ifdef HAVE_STRUCT_STAT_ST_FILE_ATTRIBUTES
+#define ST_FILE_ATTRIBUTES_IDX (ST_BIRTHTIME_IDX+1)
+#else
+#define ST_FILE_ATTRIBUTES_IDX ST_BIRTHTIME_IDX
+#endif
+
+#ifdef HAVE_LONG_LONG
+# define _PyLong_FromDev PyLong_FromLongLong
+#else
+# define _PyLong_FromDev PyLong_FromLong
+#endif
+
+#ifndef MS_WINDOWS
+PyObject *
+_PyLong_FromUid(uid_t uid)
+{
+ if (uid == (uid_t)-1)
+ return PyLong_FromLong(-1);
+ return PyLong_FromUnsignedLong(uid);
+}
+
+PyObject *
+_PyLong_FromGid(gid_t gid)
+{
+ if (gid == (gid_t)-1)
+ return PyLong_FromLong(-1);
+ return PyLong_FromUnsignedLong(gid);
+}
+#endif
+
+/* pack a system stat C structure into the Python stat tuple
+ (used by posix_stat() and posix_fstat()) */
+static PyObject*
+_pystat_fromstructstat(STRUCT_STAT *st)
+{
+ unsigned long ansec, mnsec, cnsec;
+ PyObject *v = PyStructSequence_New(&StatResultType);
+ if (v == NULL)
+ return NULL;
+
+ PyStructSequence_SET_ITEM(v, 0, PyLong_FromLong((long)st->st_mode));
+#ifdef HAVE_LARGEFILE_SUPPORT
+ PyStructSequence_SET_ITEM(v, 1,
+ PyLong_FromUnsignedLongLong(st->st_ino));
+#else
+ PyStructSequence_SET_ITEM(v, 1, PyLong_FromUnsignedLong((unsigned long)st->st_ino));
+#endif
+#ifdef MS_WINDOWS
+ PyStructSequence_SET_ITEM(v, 2, PyLong_FromUnsignedLong(st->st_dev));
+#else
+ PyStructSequence_SET_ITEM(v, 2, _PyLong_FromDev(st->st_dev));
+#endif
+ PyStructSequence_SET_ITEM(v, 3, PyLong_FromLong((long)st->st_nlink));
+#if defined(MS_WINDOWS)
+ PyStructSequence_SET_ITEM(v, 4, PyLong_FromLong(0));
+ PyStructSequence_SET_ITEM(v, 5, PyLong_FromLong(0));
+#else
+ PyStructSequence_SET_ITEM(v, 4, _PyLong_FromUid(st->st_uid));
+ PyStructSequence_SET_ITEM(v, 5, _PyLong_FromGid(st->st_gid));
+#endif
+#ifdef HAVE_LARGEFILE_SUPPORT
+ PyStructSequence_SET_ITEM(v, 6,
+ PyLong_FromLongLong((PY_LONG_LONG)st->st_size));
+#else
+ PyStructSequence_SET_ITEM(v, 6, PyLong_FromLong(st->st_size));
+#endif
+
+#if defined(HAVE_STAT_TV_NSEC)
+ ansec = st->st_atim.tv_nsec;
+ mnsec = st->st_mtim.tv_nsec;
+ cnsec = st->st_ctim.tv_nsec;
+#elif defined(HAVE_STAT_TV_NSEC2)
+ ansec = st->st_atimespec.tv_nsec;
+ mnsec = st->st_mtimespec.tv_nsec;
+ cnsec = st->st_ctimespec.tv_nsec;
+#elif defined(HAVE_STAT_NSEC)
+ ansec = st->st_atime_nsec;
+ mnsec = st->st_mtime_nsec;
+ cnsec = st->st_ctime_nsec;
+#else
+ ansec = mnsec = cnsec = 0;
+#endif
+ fill_time(v, 7, st->st_atime, ansec);
+ fill_time(v, 8, st->st_mtime, mnsec);
+ fill_time(v, 9, st->st_ctime, cnsec);
+
+#ifdef HAVE_STRUCT_STAT_ST_BLKSIZE
+ PyStructSequence_SET_ITEM(v, ST_BLKSIZE_IDX,
+ PyLong_FromLong((long)st->st_blksize));
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_BLOCKS
+ PyStructSequence_SET_ITEM(v, ST_BLOCKS_IDX,
+ PyLong_FromLong((long)st->st_blocks));
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_RDEV
+ PyStructSequence_SET_ITEM(v, ST_RDEV_IDX,
+ PyLong_FromLong((long)st->st_rdev));
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_GEN
+ PyStructSequence_SET_ITEM(v, ST_GEN_IDX,
+ PyLong_FromLong((long)st->st_gen));
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_BIRTHTIME
+ {
+ PyObject *val;
+ unsigned long bsec,bnsec;
+ bsec = (long)st->st_birthtime;
+#ifdef HAVE_STAT_TV_NSEC2
+ bnsec = st->st_birthtimespec.tv_nsec;
+#else
+ bnsec = 0;
+#endif
+ if (_stat_float_times) {
+ val = PyFloat_FromDouble(bsec + 1e-9*bnsec);
+ } else {
+ val = PyLong_FromLong((long)bsec);
+ }
+ PyStructSequence_SET_ITEM(v, ST_BIRTHTIME_IDX,
+ val);
+ }
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_FLAGS
+ PyStructSequence_SET_ITEM(v, ST_FLAGS_IDX,
+ PyLong_FromLong((long)st->st_flags));
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_FILE_ATTRIBUTES
+ PyStructSequence_SET_ITEM(v, ST_FILE_ATTRIBUTES_IDX,
+ PyLong_FromUnsignedLong(st->st_file_attributes));
+#endif
+
+ if (PyErr_Occurred()) {
+ Py_DECREF(v);
+ return NULL;
+ }
+
+ return v;
+}
+
+char *PyStructSequence_UnnamedField = "unnamed field";
+
+PyDoc_STRVAR(stat_result__doc__,
+"stat_result: Result from stat, fstat, or lstat.\n\n\
+This object may be accessed either as a tuple of\n\
+ (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime)\n\
+or via the attributes st_mode, st_ino, st_dev, st_nlink, st_uid, and so on.\n\
+\n\
+Posix/windows: If your platform supports st_blksize, st_blocks, st_rdev,\n\
+or st_flags, they are available as attributes only.\n\
+\n\
+See os.stat for more information.");
+
+static PyStructSequence_Field stat_result_fields[] = {
+ {"st_mode", "protection bits"},
+ {"st_ino", "inode"},
+ {"st_dev", "device"},
+ {"st_nlink", "number of hard links"},
+ {"st_uid", "user ID of owner"},
+ {"st_gid", "group ID of owner"},
+ {"st_size", "total size, in bytes"},
+ /* The NULL is replaced with PyStructSequence_UnnamedField later. */
+ {NULL, "integer time of last access"},
+ {NULL, "integer time of last modification"},
+ {NULL, "integer time of last change"},
+ {"st_atime", "time of last access"},
+ {"st_mtime", "time of last modification"},
+ {"st_ctime", "time of last change"},
+ {"st_atime_ns", "time of last access in nanoseconds"},
+ {"st_mtime_ns", "time of last modification in nanoseconds"},
+ {"st_ctime_ns", "time of last change in nanoseconds"},
+#ifdef HAVE_STRUCT_STAT_ST_BLKSIZE
+ {"st_blksize", "blocksize for filesystem I/O"},
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_BLOCKS
+ {"st_blocks", "number of blocks allocated"},
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_RDEV
+ {"st_rdev", "device type (if inode device)"},
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_FLAGS
+ {"st_flags", "user defined flags for file"},
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_GEN
+ {"st_gen", "generation number"},
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_BIRTHTIME
+ {"st_birthtime", "time of creation"},
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_FILE_ATTRIBUTES
+ {"st_file_attributes", "Windows file attribute bits"},
+#endif
+ {0}
+};
+
+static PyStructSequence_Desc stat_result_desc = {
+ "scandir.stat_result", /* name */
+ stat_result__doc__, /* doc */
+ stat_result_fields,
+ 10
+};
+
+
+#ifdef MS_WINDOWS
+static int
+win32_warn_bytes_api()
+{
+ return PyErr_WarnEx(PyExc_DeprecationWarning,
+ "The Windows bytes API has been deprecated, "
+ "use Unicode filenames instead",
+ 1);
+}
+#endif
+
+typedef struct {
+ const char *function_name;
+ const char *argument_name;
+ int nullable;
+ wchar_t *wide;
+ char *narrow;
+ int fd;
+ Py_ssize_t length;
+ PyObject *object;
+ PyObject *cleanup;
+} path_t;
+
+static void
+path_cleanup(path_t *path) {
+ if (path->cleanup) {
+ Py_CLEAR(path->cleanup);
+ }
+}
+
+static int
+path_converter(PyObject *o, void *p) {
+ path_t *path = (path_t *)p;
+ PyObject *unicode, *bytes;
+ Py_ssize_t length;
+ char *narrow;
+
+#define FORMAT_EXCEPTION(exc, fmt) \
+ PyErr_Format(exc, "%s%s" fmt, \
+ path->function_name ? path->function_name : "", \
+ path->function_name ? ": " : "", \
+ path->argument_name ? path->argument_name : "path")
+
+ /* Py_CLEANUP_SUPPORTED support */
+ if (o == NULL) {
+ path_cleanup(path);
+ return 1;
+ }
+
+ /* ensure it's always safe to call path_cleanup() */
+ path->cleanup = NULL;
+
+ if (o == Py_None) {
+ if (!path->nullable) {
+ FORMAT_EXCEPTION(PyExc_TypeError,
+ "can't specify None for %s argument");
+ return 0;
+ }
+ path->wide = NULL;
+ path->narrow = NULL;
+ path->length = 0;
+ path->object = o;
+ path->fd = -1;
+ return 1;
+ }
+
+ unicode = PyUnicode_FromObject(o);
+ if (unicode) {
+#ifdef MS_WINDOWS
+ wchar_t *wide;
+
+ wide = PyUnicode_AsUnicodeAndSize(unicode, &length);
+ if (!wide) {
+ Py_DECREF(unicode);
+ return 0;
+ }
+ if (length > 32767) {
+ FORMAT_EXCEPTION(PyExc_ValueError, "%s too long for Windows");
+ Py_DECREF(unicode);
+ return 0;
+ }
+ if (wcslen(wide) != length) {
+ FORMAT_EXCEPTION(PyExc_ValueError, "embedded null character");
+ Py_DECREF(unicode);
+ return 0;
+ }
+
+ path->wide = wide;
+ path->narrow = NULL;
+ path->length = length;
+ path->object = o;
+ path->fd = -1;
+ path->cleanup = unicode;
+ return Py_CLEANUP_SUPPORTED;
+#else
+#if PY_MAJOR_VERSION >= 3
+ if (!PyUnicode_FSConverter(unicode, &bytes))
+ bytes = NULL;
+#else
+ bytes = PyUnicode_AsEncodedString(unicode, FS_ENCODING, "strict");
+#endif
+ Py_DECREF(unicode);
+#endif
+ }
+ else {
+ PyErr_Clear();
+#if PY_MAJOR_VERSION >= 3
+ if (PyObject_CheckBuffer(o)) {
+ bytes = PyBytes_FromObject(o);
+ }
+#else
+ if (PyString_Check(o)) {
+ bytes = o;
+ Py_INCREF(bytes);
+ }
+#endif
+ else
+ bytes = NULL;
+ if (!bytes) {
+ PyErr_Clear();
+ }
+ }
+
+ if (!bytes) {
+ if (!PyErr_Occurred())
+ FORMAT_EXCEPTION(PyExc_TypeError, "illegal type for %s parameter");
+ return 0;
+ }
+
+#ifdef MS_WINDOWS
+ if (win32_warn_bytes_api()) {
+ Py_DECREF(bytes);
+ return 0;
+ }
+#endif
+
+ length = PyBytes_GET_SIZE(bytes);
+#ifdef MS_WINDOWS
+ if (length > MAX_PATH-1) {
+ FORMAT_EXCEPTION(PyExc_ValueError, "%s too long for Windows");
+ Py_DECREF(bytes);
+ return 0;
+ }
+#endif
+
+ narrow = PyBytes_AS_STRING(bytes);
+ if ((size_t)length != strlen(narrow)) {
+ FORMAT_EXCEPTION(PyExc_ValueError, "embedded null character in %s");
+ Py_DECREF(bytes);
+ return 0;
+ }
+
+ path->wide = NULL;
+ path->narrow = narrow;
+ path->length = length;
+ path->object = o;
+ path->fd = -1;
+ path->cleanup = bytes;
+ return Py_CLEANUP_SUPPORTED;
+}
+
+static PyObject *
+path_error(path_t *path)
+{
+#ifdef MS_WINDOWS
+ return PyErr_SetExcFromWindowsErrWithFilenameObject(PyExc_OSError,
+ 0, path->object);
+#else
+ return PyErr_SetFromErrnoWithFilenameObject(PyExc_OSError, path->object);
+#endif
+}
+
+
+/* SECTION: Main DirEntry and scandir implementation, taken from
+ Python 3.5's posixmodule.c */
+
+PyDoc_STRVAR(posix_scandir__doc__,
+"scandir(path='.') -> iterator of DirEntry objects for given path");
+
+static char *follow_symlinks_keywords[] = {"follow_symlinks", NULL};
+#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 3
+static char *follow_symlinks_format = "|$p:DirEntry.stat";
+#else
+static char *follow_symlinks_format = "|i:DirEntry.stat";
+#endif
+
+typedef struct {
+ PyObject_HEAD
+ PyObject *name;
+ PyObject *path;
+ PyObject *stat;
+ PyObject *lstat;
+#ifdef MS_WINDOWS
+ struct _Py_stat_struct win32_lstat;
+ unsigned __int64 win32_file_index;
+ int got_file_index;
+#if PY_MAJOR_VERSION < 3
+ int name_path_bytes;
+#endif
+#else /* POSIX */
+#ifdef HAVE_DIRENT_D_TYPE
+ unsigned char d_type;
+#endif
+ ino_t d_ino;
+#endif
+} DirEntry;
+
+static void
+DirEntry_dealloc(DirEntry *entry)
+{
+ Py_XDECREF(entry->name);
+ Py_XDECREF(entry->path);
+ Py_XDECREF(entry->stat);
+ Py_XDECREF(entry->lstat);
+ Py_TYPE(entry)->tp_free((PyObject *)entry);
+}
+
+/* Forward reference */
+static int
+DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits);
+
+/* Set exception and return -1 on error, 0 for False, 1 for True */
+static int
+DirEntry_is_symlink(DirEntry *self)
+{
+#ifdef MS_WINDOWS
+ return (self->win32_lstat.st_mode & S_IFMT) == S_IFLNK;
+#elif defined(HAVE_DIRENT_D_TYPE)
+ /* POSIX */
+ if (self->d_type != DT_UNKNOWN)
+ return self->d_type == DT_LNK;
+ else
+ return DirEntry_test_mode(self, 0, S_IFLNK);
+#else
+ /* POSIX without d_type */
+ return DirEntry_test_mode(self, 0, S_IFLNK);
+#endif
+}
+
+static PyObject *
+DirEntry_py_is_symlink(DirEntry *self)
+{
+ int result;
+
+ result = DirEntry_is_symlink(self);
+ if (result == -1)
+ return NULL;
+ return PyBool_FromLong(result);
+}
+
+static PyObject *
+DirEntry_fetch_stat(DirEntry *self, int follow_symlinks)
+{
+ int result;
+ struct _Py_stat_struct st;
+
+#ifdef MS_WINDOWS
+ wchar_t *path;
+
+ path = PyUnicode_AsUnicode(self->path);
+ if (!path)
+ return NULL;
+
+ if (follow_symlinks)
+ result = win32_stat_w(path, &st);
+ else
+ result = win32_lstat_w(path, &st);
+
+ if (result != 0) {
+ return PyErr_SetExcFromWindowsErrWithFilenameObject(PyExc_OSError,
+ 0, self->path);
+ }
+#else /* POSIX */
+ PyObject *bytes;
+ char *path;
+
+#if PY_MAJOR_VERSION >= 3
+ if (!PyUnicode_FSConverter(self->path, &bytes))
+ return NULL;
+#else
+ if (PyString_Check(self->path)) {
+ bytes = self->path;
+ Py_INCREF(bytes);
+ } else {
+ bytes = PyUnicode_AsEncodedString(self->path, FS_ENCODING, "strict");
+ if (!bytes)
+ return NULL;
+ }
+#endif
+ path = PyBytes_AS_STRING(bytes);
+
+ if (follow_symlinks)
+ result = STAT(path, &st);
+ else
+ result = LSTAT(path, &st);
+ Py_DECREF(bytes);
+
+ if (result != 0)
+ return PyErr_SetFromErrnoWithFilenameObject(PyExc_OSError, self->path);
+#endif
+
+ return _pystat_fromstructstat(&st);
+}
+
+static PyObject *
+DirEntry_get_lstat(DirEntry *self)
+{
+ if (!self->lstat) {
+#ifdef MS_WINDOWS
+ self->lstat = _pystat_fromstructstat(&self->win32_lstat);
+#else /* POSIX */
+ self->lstat = DirEntry_fetch_stat(self, 0);
+#endif
+ }
+ Py_XINCREF(self->lstat);
+ return self->lstat;
+}
+
+static PyObject *
+DirEntry_get_stat(DirEntry *self, int follow_symlinks)
+{
+ if (!follow_symlinks)
+ return DirEntry_get_lstat(self);
+
+ if (!self->stat) {
+ int result = DirEntry_is_symlink(self);
+ if (result == -1)
+ return NULL;
+ else if (result)
+ self->stat = DirEntry_fetch_stat(self, 1);
+ else
+ self->stat = DirEntry_get_lstat(self);
+ }
+
+ Py_XINCREF(self->stat);
+ return self->stat;
+}
+
+static PyObject *
+DirEntry_stat(DirEntry *self, PyObject *args, PyObject *kwargs)
+{
+ int follow_symlinks = 1;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, follow_symlinks_format,
+ follow_symlinks_keywords, &follow_symlinks))
+ return NULL;
+
+ return DirEntry_get_stat(self, follow_symlinks);
+}
+
+/* Set exception and return -1 on error, 0 for False, 1 for True */
+static int
+DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits)
+{
+ PyObject *stat = NULL;
+ PyObject *st_mode = NULL;
+ long mode;
+ int result;
+#if defined(MS_WINDOWS) || defined(HAVE_DIRENT_D_TYPE)
+ int is_symlink;
+ int need_stat;
+#endif
+#ifdef MS_WINDOWS
+ unsigned long dir_bits;
+#endif
+ _Py_IDENTIFIER(st_mode);
+
+#ifdef MS_WINDOWS
+ is_symlink = (self->win32_lstat.st_mode & S_IFMT) == S_IFLNK;
+ need_stat = follow_symlinks && is_symlink;
+#elif defined(HAVE_DIRENT_D_TYPE)
+ is_symlink = self->d_type == DT_LNK;
+ need_stat = self->d_type == DT_UNKNOWN || (follow_symlinks && is_symlink);
+#endif
+
+#if defined(MS_WINDOWS) || defined(HAVE_DIRENT_D_TYPE)
+ if (need_stat) {
+#endif
+ stat = DirEntry_get_stat(self, follow_symlinks);
+ if (!stat) {
+ if (PyErr_ExceptionMatches(PyExc_FileNotFoundError)) {
+ /* If file doesn't exist (anymore), then return False
+ (i.e., say it's not a file/directory) */
+ PyErr_Clear();
+ return 0;
+ }
+ goto error;
+ }
+ st_mode = _PyObject_GetAttrId(stat, &PyId_st_mode);
+ if (!st_mode)
+ goto error;
+
+ mode = PyLong_AsLong(st_mode);
+ if (mode == -1 && PyErr_Occurred())
+ goto error;
+ Py_CLEAR(st_mode);
+ Py_CLEAR(stat);
+ result = (mode & S_IFMT) == mode_bits;
+#if defined(MS_WINDOWS) || defined(HAVE_DIRENT_D_TYPE)
+ }
+ else if (is_symlink) {
+ assert(mode_bits != S_IFLNK);
+ result = 0;
+ }
+ else {
+ assert(mode_bits == S_IFDIR || mode_bits == S_IFREG);
+#ifdef MS_WINDOWS
+ dir_bits = self->win32_lstat.st_file_attributes & FILE_ATTRIBUTE_DIRECTORY;
+ if (mode_bits == S_IFDIR)
+ result = dir_bits != 0;
+ else
+ result = dir_bits == 0;
+#else /* POSIX */
+ if (mode_bits == S_IFDIR)
+ result = self->d_type == DT_DIR;
+ else
+ result = self->d_type == DT_REG;
+#endif
+ }
+#endif
+
+ return result;
+
+error:
+ Py_XDECREF(st_mode);
+ Py_XDECREF(stat);
+ return -1;
+}
+
+static PyObject *
+DirEntry_py_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits)
+{
+ int result;
+
+ result = DirEntry_test_mode(self, follow_symlinks, mode_bits);
+ if (result == -1)
+ return NULL;
+ return PyBool_FromLong(result);
+}
+
+static PyObject *
+DirEntry_is_dir(DirEntry *self, PyObject *args, PyObject *kwargs)
+{
+ int follow_symlinks = 1;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, follow_symlinks_format,
+ follow_symlinks_keywords, &follow_symlinks))
+ return NULL;
+
+ return DirEntry_py_test_mode(self, follow_symlinks, S_IFDIR);
+}
+
+static PyObject *
+DirEntry_is_file(DirEntry *self, PyObject *args, PyObject *kwargs)
+{
+ int follow_symlinks = 1;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, follow_symlinks_format,
+ follow_symlinks_keywords, &follow_symlinks))
+ return NULL;
+
+ return DirEntry_py_test_mode(self, follow_symlinks, S_IFREG);
+}
+
+static PyObject *
+DirEntry_inode(DirEntry *self)
+{
+#ifdef MS_WINDOWS
+ if (!self->got_file_index) {
+ wchar_t *path;
+ struct _Py_stat_struct stat;
+
+ path = PyUnicode_AsUnicode(self->path);
+ if (!path)
+ return NULL;
+
+ if (win32_lstat_w(path, &stat) != 0) {
+ return PyErr_SetExcFromWindowsErrWithFilenameObject(PyExc_OSError,
+ 0, self->path);
+ }
+
+ self->win32_file_index = stat.st_ino;
+ self->got_file_index = 1;
+ }
+ return PyLong_FromUnsignedLongLong(self->win32_file_index);
+#else /* POSIX */
+#ifdef HAVE_LARGEFILE_SUPPORT
+ return PyLong_FromUnsignedLongLong(self->d_ino);
+#else
+ return PyLong_FromUnsignedLong((unsigned long)self->d_ino);
+#endif
+#endif
+}
+
+#if PY_MAJOR_VERSION < 3 && defined(MS_WINDOWS)
+
+PyObject *DirEntry_name_getter(DirEntry *self, void *closure) {
+ if (self->name_path_bytes) {
+ return PyUnicode_EncodeMBCS(PyUnicode_AS_UNICODE(self->name),
+ PyUnicode_GetSize(self->name), "strict");
+ } else {
+ Py_INCREF(self->name);
+ return self->name;
+ }
+}
+
+PyObject *DirEntry_path_getter(DirEntry *self, void *closure) {
+ if (self->name_path_bytes) {
+ return PyUnicode_EncodeMBCS(PyUnicode_AS_UNICODE(self->path),
+ PyUnicode_GetSize(self->path), "strict");
+ } else {
+ Py_INCREF(self->path);
+ return self->path;
+ }
+}
+
+static PyGetSetDef DirEntry_getset[] = {
+ {"name", (getter)DirEntry_name_getter, NULL,
+ "the entry's base filename, relative to scandir() \"path\" argument", NULL},
+ {"path", (getter)DirEntry_path_getter, NULL,
+ "the entry's full path name; equivalent to os.path.join(scandir_path, entry.name)", NULL},
+ {NULL}
+};
+
+#else
+
+static PyMemberDef DirEntry_members[] = {
+ {"name", T_OBJECT_EX, offsetof(DirEntry, name), READONLY,
+ "the entry's base filename, relative to scandir() \"path\" argument"},
+ {"path", T_OBJECT_EX, offsetof(DirEntry, path), READONLY,
+ "the entry's full path name; equivalent to os.path.join(scandir_path, entry.name)"},
+ {NULL}
+};
+
+#endif
+
+static PyObject *
+DirEntry_repr(DirEntry *self)
+{
+#if PY_MAJOR_VERSION >= 3
+ return PyUnicode_FromFormat("<DirEntry %R>", self->name);
+#elif defined(MS_WINDOWS)
+ PyObject *name;
+ PyObject *name_repr;
+ PyObject *entry_repr;
+
+ name = DirEntry_name_getter(self, NULL);
+ if (!name)
+ return NULL;
+ name_repr = PyObject_Repr(name);
+ Py_DECREF(name);
+ if (!name_repr)
+ return NULL;
+ entry_repr = PyString_FromFormat("<DirEntry %s>", PyString_AsString(name_repr));
+ Py_DECREF(name_repr);
+ return entry_repr;
+#else
+ PyObject *name_repr;
+ PyObject *entry_repr;
+
+ name_repr = PyObject_Repr(self->name);
+ if (!name_repr)
+ return NULL;
+ entry_repr = PyString_FromFormat("<DirEntry %s>", PyString_AsString(name_repr));
+ Py_DECREF(name_repr);
+ return entry_repr;
+#endif
+}
+
+static PyMethodDef DirEntry_methods[] = {
+ {"is_dir", (PyCFunction)DirEntry_is_dir, METH_VARARGS | METH_KEYWORDS,
+ "return True if the entry is a directory; cached per entry"
+ },
+ {"is_file", (PyCFunction)DirEntry_is_file, METH_VARARGS | METH_KEYWORDS,
+ "return True if the entry is a file; cached per entry"
+ },
+ {"is_symlink", (PyCFunction)DirEntry_py_is_symlink, METH_NOARGS,
+ "return True if the entry is a symbolic link; cached per entry"
+ },
+ {"stat", (PyCFunction)DirEntry_stat, METH_VARARGS | METH_KEYWORDS,
+ "return stat_result object for the entry; cached per entry"
+ },
+ {"inode", (PyCFunction)DirEntry_inode, METH_NOARGS,
+ "return inode of the entry; cached per entry",
+ },
+ {NULL}
+};
+
+static PyTypeObject DirEntryType = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ MODNAME ".DirEntry", /* tp_name */
+ sizeof(DirEntry), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ /* methods */
+ (destructor)DirEntry_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ (reprfunc)DirEntry_repr, /* tp_repr */
+ 0, /* tp_as_number */
+ 0, /* tp_as_sequence */
+ 0, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ 0, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT, /* tp_flags */
+ 0, /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
+ DirEntry_methods, /* tp_methods */
+#if PY_MAJOR_VERSION < 3 && defined(MS_WINDOWS)
+ NULL, /* tp_members */
+ DirEntry_getset, /* tp_getset */
+#else
+ DirEntry_members, /* tp_members */
+ NULL, /* tp_getset */
+#endif
+};
+
+#ifdef MS_WINDOWS
+
+static wchar_t *
+join_path_filenameW(wchar_t *path_wide, wchar_t* filename)
+{
+ Py_ssize_t path_len;
+ Py_ssize_t size;
+ wchar_t *result;
+ wchar_t ch;
+
+ if (!path_wide) { /* Default arg: "." */
+ path_wide = L".";
+ path_len = 1;
+ }
+ else {
+ path_len = wcslen(path_wide);
+ }
+
+ /* The +1's are for the path separator and the NUL */
+ size = path_len + 1 + wcslen(filename) + 1;
+ result = PyMem_New(wchar_t, size);
+ if (!result) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ wcscpy(result, path_wide);
+ if (path_len > 0) {
+ ch = result[path_len - 1];
+ if (ch != SEP && ch != ALTSEP && ch != L':')
+ result[path_len++] = SEP;
+ wcscpy(result + path_len, filename);
+ }
+ return result;
+}
+
+static PyObject *
+DirEntry_from_find_data(path_t *path, WIN32_FIND_DATAW *dataW)
+{
+ DirEntry *entry;
+ BY_HANDLE_FILE_INFORMATION file_info;
+ ULONG reparse_tag;
+ wchar_t *joined_path;
+
+ entry = PyObject_New(DirEntry, &DirEntryType);
+ if (!entry)
+ return NULL;
+ entry->name = NULL;
+ entry->path = NULL;
+ entry->stat = NULL;
+ entry->lstat = NULL;
+ entry->got_file_index = 0;
+#if PY_MAJOR_VERSION < 3
+ entry->name_path_bytes = path->object && PyBytes_Check(path->object);
+#endif
+
+ entry->name = PyUnicode_FromWideChar(dataW->cFileName, wcslen(dataW->cFileName));
+ if (!entry->name)
+ goto error;
+
+ joined_path = join_path_filenameW(path->wide, dataW->cFileName);
+ if (!joined_path)
+ goto error;
+
+ entry->path = PyUnicode_FromWideChar(joined_path, wcslen(joined_path));
+ PyMem_Free(joined_path);
+ if (!entry->path)
+ goto error;
+
+ find_data_to_file_info_w(dataW, &file_info, &reparse_tag);
+ _Py_attribute_data_to_stat(&file_info, reparse_tag, &entry->win32_lstat);
+
+ return (PyObject *)entry;
+
+error:
+ Py_DECREF(entry);
+ return NULL;
+}
+
+#else /* POSIX */
+
+static char *
+join_path_filename(char *path_narrow, char* filename, Py_ssize_t filename_len)
+{
+ Py_ssize_t path_len;
+ Py_ssize_t size;
+ char *result;
+
+ if (!path_narrow) { /* Default arg: "." */
+ path_narrow = ".";
+ path_len = 1;
+ }
+ else {
+ path_len = strlen(path_narrow);
+ }
+
+ if (filename_len == -1)
+ filename_len = strlen(filename);
+
+ /* The +1's are for the path separator and the NUL */
+ size = path_len + 1 + filename_len + 1;
+ result = PyMem_New(char, size);
+ if (!result) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ strcpy(result, path_narrow);
+ if (path_len > 0 && result[path_len - 1] != '/')
+ result[path_len++] = '/';
+ strcpy(result + path_len, filename);
+ return result;
+}
+
+static PyObject *
+DirEntry_from_posix_info(path_t *path, char *name, Py_ssize_t name_len,
+ ino_t d_ino
+#ifdef HAVE_DIRENT_D_TYPE
+ , unsigned char d_type
+#endif
+ )
+{
+ DirEntry *entry;
+ char *joined_path;
+
+ entry = PyObject_New(DirEntry, &DirEntryType);
+ if (!entry)
+ return NULL;
+ entry->name = NULL;
+ entry->path = NULL;
+ entry->stat = NULL;
+ entry->lstat = NULL;
+
+ joined_path = join_path_filename(path->narrow, name, name_len);
+ if (!joined_path)
+ goto error;
+
+ if (!path->narrow || !PyBytes_Check(path->object)) {
+#if PY_MAJOR_VERSION >= 3
+ entry->name = PyUnicode_DecodeFSDefaultAndSize(name, name_len);
+ entry->path = PyUnicode_DecodeFSDefault(joined_path);
+#else
+ entry->name = PyUnicode_Decode(name, name_len,
+ FS_ENCODING, "strict");
+ entry->path = PyUnicode_Decode(joined_path, strlen(joined_path),
+ FS_ENCODING, "strict");
+#endif
+ }
+ else {
+ entry->name = PyBytes_FromStringAndSize(name, name_len);
+ entry->path = PyBytes_FromString(joined_path);
+ }
+ PyMem_Free(joined_path);
+ if (!entry->name || !entry->path)
+ goto error;
+
+#ifdef HAVE_DIRENT_D_TYPE
+ entry->d_type = d_type;
+#endif
+ entry->d_ino = d_ino;
+
+ return (PyObject *)entry;
+
+error:
+ Py_XDECREF(entry);
+ return NULL;
+}
+
+#endif
+
+
+typedef struct {
+ PyObject_HEAD
+ path_t path;
+#ifdef MS_WINDOWS
+ HANDLE handle;
+ WIN32_FIND_DATAW file_data;
+ int first_time;
+#else /* POSIX */
+ DIR *dirp;
+#endif
+} ScandirIterator;
+
+#ifdef MS_WINDOWS
+
+static void
+ScandirIterator_close(ScandirIterator *iterator)
+{
+ if (iterator->handle == INVALID_HANDLE_VALUE)
+ return;
+
+ Py_BEGIN_ALLOW_THREADS
+ FindClose(iterator->handle);
+ Py_END_ALLOW_THREADS
+ iterator->handle = INVALID_HANDLE_VALUE;
+}
+
+static PyObject *
+ScandirIterator_iternext(ScandirIterator *iterator)
+{
+ WIN32_FIND_DATAW *file_data = &iterator->file_data;
+ BOOL success;
+
+ /* Happens if the iterator is iterated twice */
+ if (iterator->handle == INVALID_HANDLE_VALUE) {
+ PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+ }
+
+ while (1) {
+ if (!iterator->first_time) {
+ Py_BEGIN_ALLOW_THREADS
+ success = FindNextFileW(iterator->handle, file_data);
+ Py_END_ALLOW_THREADS
+ if (!success) {
+ if (GetLastError() != ERROR_NO_MORE_FILES)
+ return path_error(&iterator->path);
+ /* No more files found in directory, stop iterating */
+ break;
+ }
+ }
+ iterator->first_time = 0;
+
+ /* Skip over . and .. */
+ if (wcscmp(file_data->cFileName, L".") != 0 &&
+ wcscmp(file_data->cFileName, L"..") != 0)
+ return DirEntry_from_find_data(&iterator->path, file_data);
+
+ /* Loop till we get a non-dot directory or finish iterating */
+ }
+
+ ScandirIterator_close(iterator);
+
+ PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+}
+
+#else /* POSIX */
+
+static void
+ScandirIterator_close(ScandirIterator *iterator)
+{
+ if (!iterator->dirp)
+ return;
+
+ Py_BEGIN_ALLOW_THREADS
+ closedir(iterator->dirp);
+ Py_END_ALLOW_THREADS
+ iterator->dirp = NULL;
+ return;
+}
+
+static PyObject *
+ScandirIterator_iternext(ScandirIterator *iterator)
+{
+ struct dirent *direntp;
+ Py_ssize_t name_len;
+ int is_dot;
+
+ /* Happens if the iterator is iterated twice */
+ if (!iterator->dirp) {
+ PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+ }
+
+ while (1) {
+ errno = 0;
+ Py_BEGIN_ALLOW_THREADS
+ direntp = readdir(iterator->dirp);
+ Py_END_ALLOW_THREADS
+
+ if (!direntp) {
+ if (errno != 0)
+ return path_error(&iterator->path);
+ /* No more files found in directory, stop iterating */
+ break;
+ }
+
+ /* Skip over . and .. */
+ name_len = NAMLEN(direntp);
+ is_dot = direntp->d_name[0] == '.' &&
+ (name_len == 1 || (direntp->d_name[1] == '.' && name_len == 2));
+ if (!is_dot) {
+ return DirEntry_from_posix_info(&iterator->path, direntp->d_name,
+ name_len, direntp->d_ino
+#ifdef HAVE_DIRENT_D_TYPE
+ , direntp->d_type
+#endif
+ );
+ }
+
+ /* Loop till we get a non-dot directory or finish iterating */
+ }
+
+ ScandirIterator_close(iterator);
+
+ PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+}
+
+#endif
+
+static void
+ScandirIterator_dealloc(ScandirIterator *iterator)
+{
+ ScandirIterator_close(iterator);
+ Py_XDECREF(iterator->path.object);
+ path_cleanup(&iterator->path);
+ Py_TYPE(iterator)->tp_free((PyObject *)iterator);
+}
+
+static PyTypeObject ScandirIteratorType = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ MODNAME ".ScandirIterator", /* tp_name */
+ sizeof(ScandirIterator), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ /* methods */
+ (destructor)ScandirIterator_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ 0, /* tp_repr */
+ 0, /* tp_as_number */
+ 0, /* tp_as_sequence */
+ 0, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ 0, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT, /* tp_flags */
+ 0, /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ PyObject_SelfIter, /* tp_iter */
+ (iternextfunc)ScandirIterator_iternext, /* tp_iternext */
+};
+
+static PyObject *
+posix_scandir(PyObject *self, PyObject *args, PyObject *kwargs)
+{
+ ScandirIterator *iterator;
+ static char *keywords[] = {"path", NULL};
+#ifdef MS_WINDOWS
+ wchar_t *path_strW;
+#else
+ char *path;
+#endif
+
+ iterator = PyObject_New(ScandirIterator, &ScandirIteratorType);
+ if (!iterator)
+ return NULL;
+ memset(&iterator->path, 0, sizeof(path_t));
+ iterator->path.function_name = "scandir";
+ iterator->path.nullable = 1;
+
+#ifdef MS_WINDOWS
+ iterator->handle = INVALID_HANDLE_VALUE;
+#else
+ iterator->dirp = NULL;
+#endif
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|O&:scandir", keywords,
+ path_converter, &iterator->path))
+ goto error;
+
+ /* path_converter doesn't keep path.object around, so do it
+ manually for the lifetime of the iterator here (the refcount
+ is decremented in ScandirIterator_dealloc)
+ */
+ Py_XINCREF(iterator->path.object);
+
+#ifdef MS_WINDOWS
+ if (iterator->path.narrow) {
+ PyErr_SetString(PyExc_TypeError,
+ "os.scandir() doesn't support bytes path on Windows, use Unicode instead");
+ goto error;
+ }
+ iterator->first_time = 1;
+
+ path_strW = join_path_filenameW(iterator->path.wide, L"*.*");
+ if (!path_strW)
+ goto error;
+
+ Py_BEGIN_ALLOW_THREADS
+ iterator->handle = FindFirstFileW(path_strW, &iterator->file_data);
+ Py_END_ALLOW_THREADS
+
+ PyMem_Free(path_strW);
+
+ if (iterator->handle == INVALID_HANDLE_VALUE) {
+ path_error(&iterator->path);
+ goto error;
+ }
+#else /* POSIX */
+ if (iterator->path.narrow)
+ path = iterator->path.narrow;
+ else
+ path = ".";
+
+ errno = 0;
+ Py_BEGIN_ALLOW_THREADS
+ iterator->dirp = opendir(path);
+ Py_END_ALLOW_THREADS
+
+ if (!iterator->dirp) {
+ path_error(&iterator->path);
+ goto error;
+ }
+#endif
+
+ return (PyObject *)iterator;
+
+error:
+ Py_DECREF(iterator);
+ return NULL;
+}
+
+
+/* SECTION: Module and method definitions and initialization code */
+
+static PyMethodDef scandir_methods[] = {
+ {"scandir", (PyCFunction)posix_scandir,
+ METH_VARARGS | METH_KEYWORDS,
+ posix_scandir__doc__},
+ {NULL, NULL},
+};
+
+#if PY_MAJOR_VERSION >= 3
+static struct PyModuleDef moduledef = {
+ PyModuleDef_HEAD_INIT,
+ "_scandir",
+ NULL,
+ 0,
+ scandir_methods,
+ NULL,
+ NULL,
+ NULL,
+ NULL,
+};
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+PyObject *
+PyInit__scandir(void)
+{
+ PyObject *module = PyModule_Create(&moduledef);
+#else
+void
+init_scandir(void)
+{
+ PyObject *module = Py_InitModule("_scandir", scandir_methods);
+#endif
+ if (module == NULL) {
+ INIT_ERROR;
+ }
+
+ billion = PyLong_FromLong(1000000000);
+ if (!billion)
+ INIT_ERROR;
+
+ stat_result_desc.fields[7].name = PyStructSequence_UnnamedField;
+ stat_result_desc.fields[8].name = PyStructSequence_UnnamedField;
+ stat_result_desc.fields[9].name = PyStructSequence_UnnamedField;
+ PyStructSequence_InitType(&StatResultType, &stat_result_desc);
+ structseq_new = StatResultType.tp_new;
+ StatResultType.tp_new = statresult_new;
+
+ if (PyType_Ready(&ScandirIteratorType) < 0)
+ INIT_ERROR;
+ if (PyType_Ready(&DirEntryType) < 0)
+ INIT_ERROR;
+
+ PyModule_AddObject(module, "DirEntry", (PyObject *)&DirEntryType);
+
+#if PY_MAJOR_VERSION >= 3
+ return module;
+#endif
+}
diff --git a/third_party/python/scandir/benchmark.py b/third_party/python/scandir/benchmark.py
new file mode 100644
index 0000000000..89a4b9d891
--- /dev/null
+++ b/third_party/python/scandir/benchmark.py
@@ -0,0 +1,192 @@
+"""Simple benchmark to compare the speed of scandir.walk() with os.walk()."""
+
+import optparse
+import os
+import stat
+import sys
+import timeit
+
+import warnings
+with warnings.catch_warnings(record=True):
+ import scandir
+
+DEPTH = 4
+NUM_DIRS = 5
+NUM_FILES = 50
+
+
+def os_walk_pre_35(top, topdown=True, onerror=None, followlinks=False):
+ """Pre Python 3.5 implementation of os.walk() that doesn't use scandir."""
+ islink, join, isdir = os.path.islink, os.path.join, os.path.isdir
+
+ try:
+ names = os.listdir(top)
+ except OSError as err:
+ if onerror is not None:
+ onerror(err)
+ return
+
+ dirs, nondirs = [], []
+ for name in names:
+ if isdir(join(top, name)):
+ dirs.append(name)
+ else:
+ nondirs.append(name)
+
+ if topdown:
+ yield top, dirs, nondirs
+ for name in dirs:
+ new_path = join(top, name)
+ if followlinks or not islink(new_path):
+ for x in os_walk_pre_35(new_path, topdown, onerror, followlinks):
+ yield x
+ if not topdown:
+ yield top, dirs, nondirs
+
+
+def create_tree(path, depth=DEPTH):
+ """Create a directory tree at path with given depth, and NUM_DIRS and
+ NUM_FILES at each level.
+ """
+ os.mkdir(path)
+ for i in range(NUM_FILES):
+ filename = os.path.join(path, 'file{0:03}.txt'.format(i))
+ with open(filename, 'wb') as f:
+ f.write(b'foo')
+ if depth <= 1:
+ return
+ for i in range(NUM_DIRS):
+ dirname = os.path.join(path, 'dir{0:03}'.format(i))
+ create_tree(dirname, depth - 1)
+
+
+def get_tree_size(path):
+ """Return total size of all files in directory tree at path."""
+ size = 0
+ try:
+ for entry in scandir.scandir(path):
+ if entry.is_symlink():
+ pass
+ elif entry.is_dir():
+ size += get_tree_size(os.path.join(path, entry.name))
+ else:
+ size += entry.stat().st_size
+ except OSError:
+ pass
+ return size
+
+
+def benchmark(path, get_size=False):
+ sizes = {}
+
+ if get_size:
+ def do_os_walk():
+ size = 0
+ for root, dirs, files in os.walk(path):
+ for filename in files:
+ fullname = os.path.join(root, filename)
+ st = os.lstat(fullname)
+ if not stat.S_ISLNK(st.st_mode):
+ size += st.st_size
+ sizes['os_walk'] = size
+
+ def do_scandir_walk():
+ sizes['scandir_walk'] = get_tree_size(path)
+
+ else:
+ def do_os_walk():
+ for root, dirs, files in os.walk(path):
+ pass
+
+ def do_scandir_walk():
+ for root, dirs, files in scandir.walk(path):
+ pass
+
+ # Run this once first to cache things, so we're not benchmarking I/O
+ print("Priming the system's cache...")
+ do_scandir_walk()
+
+ # Use the best of 3 time for each of them to eliminate high outliers
+ os_walk_time = 1000000
+ scandir_walk_time = 1000000
+ N = 3
+ for i in range(N):
+ print('Benchmarking walks on {0}, repeat {1}/{2}...'.format(
+ path, i + 1, N))
+ os_walk_time = min(os_walk_time, timeit.timeit(do_os_walk, number=1))
+ scandir_walk_time = min(scandir_walk_time,
+ timeit.timeit(do_scandir_walk, number=1))
+
+ if get_size:
+ if sizes['os_walk'] == sizes['scandir_walk']:
+ equality = 'equal'
+ else:
+ equality = 'NOT EQUAL!'
+ print('os.walk size {0}, scandir.walk size {1} -- {2}'.format(
+ sizes['os_walk'], sizes['scandir_walk'], equality))
+
+ print('os.walk took {0:.3f}s, scandir.walk took {1:.3f}s -- {2:.1f}x as fast'.format(
+ os_walk_time, scandir_walk_time, os_walk_time / scandir_walk_time))
+
+
+if __name__ == '__main__':
+ usage = """Usage: benchmark.py [-h] [tree_dir]
+
+Create a large directory tree named "benchtree" (relative to this script) and
+benchmark os.walk() versus scandir.walk(). If tree_dir is specified, benchmark
+using it instead of creating a tree."""
+ parser = optparse.OptionParser(usage=usage)
+ parser.add_option('-s', '--size', action='store_true',
+ help='get size of directory tree while walking')
+ parser.add_option('-c', '--scandir', type='choice', choices=['best', 'generic', 'c', 'python', 'os'], default='best',
+ help='version of scandir() to use, default "%default"')
+ options, args = parser.parse_args()
+
+ if args:
+ tree_dir = args[0]
+ else:
+ tree_dir = os.path.join(os.path.dirname(__file__), 'benchtree')
+ if not os.path.exists(tree_dir):
+ print('Creating tree at {0}: depth={1}, num_dirs={2}, num_files={3}'.format(
+ tree_dir, DEPTH, NUM_DIRS, NUM_FILES))
+ create_tree(tree_dir)
+
+ if options.scandir == 'generic':
+ scandir.scandir = scandir.scandir_generic
+ elif options.scandir == 'c':
+ if scandir.scandir_c is None:
+ print("ERROR: Compiled C version of scandir not found!")
+ sys.exit(1)
+ scandir.scandir = scandir.scandir_c
+ elif options.scandir == 'python':
+ if scandir.scandir_python is None:
+ print("ERROR: Python version of scandir not found!")
+ sys.exit(1)
+ scandir.scandir = scandir.scandir_python
+ elif options.scandir == 'os':
+ if not hasattr(os, 'scandir'):
+ print("ERROR: Python 3.5's os.scandir() not found!")
+ sys.exit(1)
+ scandir.scandir = os.scandir
+ elif hasattr(os, 'scandir'):
+ scandir.scandir = os.scandir
+
+ if scandir.scandir == getattr(os, 'scandir', None):
+ print("Using Python 3.5's builtin os.scandir()")
+ elif scandir.scandir == scandir.scandir_c:
+ print('Using fast C version of scandir')
+ elif scandir.scandir == scandir.scandir_python:
+ print('Using slower ctypes version of scandir')
+ elif scandir.scandir == scandir.scandir_generic:
+ print('Using very slow generic version of scandir')
+ else:
+ print('ERROR: Unsure which version of scandir we are using!')
+ sys.exit(1)
+
+ if hasattr(os, 'scandir'):
+ os.walk = os_walk_pre_35
+ print('Comparing against pre-Python 3.5 version of os.walk()')
+ else:
+ print('Comparing against builtin version of os.walk()')
+
+ benchmark(tree_dir, get_size=options.size)
diff --git a/third_party/python/scandir/osdefs.h b/third_party/python/scandir/osdefs.h
new file mode 100644
index 0000000000..d678ca3b4d
--- /dev/null
+++ b/third_party/python/scandir/osdefs.h
@@ -0,0 +1,48 @@
+// from CPython
+#ifndef Py_OSDEFS_H
+#define Py_OSDEFS_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/* Operating system dependencies */
+
+#ifdef MS_WINDOWS
+#define SEP L'\\'
+#define ALTSEP L'/'
+#define MAXPATHLEN 256
+#define DELIM L';'
+#endif
+
+/* Filename separator */
+#ifndef SEP
+#define SEP L'/'
+#endif
+
+/* Max pathname length */
+#ifdef __hpux
+#include <sys/param.h>
+#include <limits.h>
+#ifndef PATH_MAX
+#define PATH_MAX MAXPATHLEN
+#endif
+#endif
+
+#ifndef MAXPATHLEN
+#if defined(PATH_MAX) && PATH_MAX > 1024
+#define MAXPATHLEN PATH_MAX
+#else
+#define MAXPATHLEN 1024
+#endif
+#endif
+
+/* Search path entry delimiter */
+#ifndef DELIM
+#define DELIM L':'
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_OSDEFS_H */
diff --git a/third_party/python/scandir/scandir.py b/third_party/python/scandir/scandir.py
new file mode 100644
index 0000000000..aac7208e8e
--- /dev/null
+++ b/third_party/python/scandir/scandir.py
@@ -0,0 +1,693 @@
+"""scandir, a better directory iterator and faster os.walk(), now in the Python 3.5 stdlib
+
+scandir() is a generator version of os.listdir() that returns an
+iterator over files in a directory, and also exposes the extra
+information most OSes provide while iterating files in a directory
+(such as type and stat information).
+
+This module also includes a version of os.walk() that uses scandir()
+to speed it up significantly.
+
+See README.md or https://github.com/benhoyt/scandir for rationale and
+docs, or read PEP 471 (https://www.python.org/dev/peps/pep-0471/) for
+more details on its inclusion into Python 3.5
+
+scandir is released under the new BSD 3-clause license. See
+LICENSE.txt for the full license text.
+"""
+
+from __future__ import division
+
+from errno import ENOENT
+from os import listdir, lstat, stat, strerror
+from os.path import join, islink
+from stat import S_IFDIR, S_IFLNK, S_IFREG
+import collections
+import sys
+
+try:
+ import _scandir
+except ImportError:
+ _scandir = None
+
+try:
+ import ctypes
+except ImportError:
+ ctypes = None
+
+if _scandir is None and ctypes is None:
+ import warnings
+ warnings.warn("scandir can't find the compiled _scandir C module "
+ "or ctypes, using slow generic fallback")
+
+__version__ = '1.9.0'
+__all__ = ['scandir', 'walk']
+
+# Windows FILE_ATTRIBUTE constants for interpreting the
+# FIND_DATA.dwFileAttributes member
+FILE_ATTRIBUTE_ARCHIVE = 32
+FILE_ATTRIBUTE_COMPRESSED = 2048
+FILE_ATTRIBUTE_DEVICE = 64
+FILE_ATTRIBUTE_DIRECTORY = 16
+FILE_ATTRIBUTE_ENCRYPTED = 16384
+FILE_ATTRIBUTE_HIDDEN = 2
+FILE_ATTRIBUTE_INTEGRITY_STREAM = 32768
+FILE_ATTRIBUTE_NORMAL = 128
+FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 8192
+FILE_ATTRIBUTE_NO_SCRUB_DATA = 131072
+FILE_ATTRIBUTE_OFFLINE = 4096
+FILE_ATTRIBUTE_READONLY = 1
+FILE_ATTRIBUTE_REPARSE_POINT = 1024
+FILE_ATTRIBUTE_SPARSE_FILE = 512
+FILE_ATTRIBUTE_SYSTEM = 4
+FILE_ATTRIBUTE_TEMPORARY = 256
+FILE_ATTRIBUTE_VIRTUAL = 65536
+
+IS_PY3 = sys.version_info >= (3, 0)
+
+if IS_PY3:
+ unicode = str # Because Python <= 3.2 doesn't have u'unicode' syntax
+
+
+class GenericDirEntry(object):
+ __slots__ = ('name', '_stat', '_lstat', '_scandir_path', '_path')
+
+ def __init__(self, scandir_path, name):
+ self._scandir_path = scandir_path
+ self.name = name
+ self._stat = None
+ self._lstat = None
+ self._path = None
+
+ @property
+ def path(self):
+ if self._path is None:
+ self._path = join(self._scandir_path, self.name)
+ return self._path
+
+ def stat(self, follow_symlinks=True):
+ if follow_symlinks:
+ if self._stat is None:
+ self._stat = stat(self.path)
+ return self._stat
+ else:
+ if self._lstat is None:
+ self._lstat = lstat(self.path)
+ return self._lstat
+
+ # The code duplication below is intentional: this is for slightly
+ # better performance on systems that fall back to GenericDirEntry.
+ # It avoids an additional attribute lookup and method call, which
+ # are relatively slow on CPython.
+ def is_dir(self, follow_symlinks=True):
+ try:
+ st = self.stat(follow_symlinks=follow_symlinks)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False # Path doesn't exist or is a broken symlink
+ return st.st_mode & 0o170000 == S_IFDIR
+
+ def is_file(self, follow_symlinks=True):
+ try:
+ st = self.stat(follow_symlinks=follow_symlinks)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False # Path doesn't exist or is a broken symlink
+ return st.st_mode & 0o170000 == S_IFREG
+
+ def is_symlink(self):
+ try:
+ st = self.stat(follow_symlinks=False)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False # Path doesn't exist or is a broken symlink
+ return st.st_mode & 0o170000 == S_IFLNK
+
+ def inode(self):
+ st = self.stat(follow_symlinks=False)
+ return st.st_ino
+
+ def __str__(self):
+ return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name)
+
+ __repr__ = __str__
+
+
+def _scandir_generic(path=unicode('.')):
+ """Like os.listdir(), but yield DirEntry objects instead of returning
+ a list of names.
+ """
+ for name in listdir(path):
+ yield GenericDirEntry(path, name)
+
+
+if IS_PY3 and sys.platform == 'win32':
+ def scandir_generic(path=unicode('.')):
+ if isinstance(path, bytes):
+ raise TypeError("os.scandir() doesn't support bytes path on Windows, use Unicode instead")
+ return _scandir_generic(path)
+ scandir_generic.__doc__ = _scandir_generic.__doc__
+else:
+ scandir_generic = _scandir_generic
+
+
+scandir_c = None
+scandir_python = None
+
+
+if sys.platform == 'win32':
+ if ctypes is not None:
+ from ctypes import wintypes
+
+ # Various constants from windows.h
+ INVALID_HANDLE_VALUE = ctypes.c_void_p(-1).value
+ ERROR_FILE_NOT_FOUND = 2
+ ERROR_NO_MORE_FILES = 18
+ IO_REPARSE_TAG_SYMLINK = 0xA000000C
+
+ # Numer of seconds between 1601-01-01 and 1970-01-01
+ SECONDS_BETWEEN_EPOCHS = 11644473600
+
+ kernel32 = ctypes.windll.kernel32
+
+ # ctypes wrappers for (wide string versions of) FindFirstFile,
+ # FindNextFile, and FindClose
+ FindFirstFile = kernel32.FindFirstFileW
+ FindFirstFile.argtypes = [
+ wintypes.LPCWSTR,
+ ctypes.POINTER(wintypes.WIN32_FIND_DATAW),
+ ]
+ FindFirstFile.restype = wintypes.HANDLE
+
+ FindNextFile = kernel32.FindNextFileW
+ FindNextFile.argtypes = [
+ wintypes.HANDLE,
+ ctypes.POINTER(wintypes.WIN32_FIND_DATAW),
+ ]
+ FindNextFile.restype = wintypes.BOOL
+
+ FindClose = kernel32.FindClose
+ FindClose.argtypes = [wintypes.HANDLE]
+ FindClose.restype = wintypes.BOOL
+
+ Win32StatResult = collections.namedtuple('Win32StatResult', [
+ 'st_mode',
+ 'st_ino',
+ 'st_dev',
+ 'st_nlink',
+ 'st_uid',
+ 'st_gid',
+ 'st_size',
+ 'st_atime',
+ 'st_mtime',
+ 'st_ctime',
+ 'st_atime_ns',
+ 'st_mtime_ns',
+ 'st_ctime_ns',
+ 'st_file_attributes',
+ ])
+
+ def filetime_to_time(filetime):
+ """Convert Win32 FILETIME to time since Unix epoch in seconds."""
+ total = filetime.dwHighDateTime << 32 | filetime.dwLowDateTime
+ return total / 10000000 - SECONDS_BETWEEN_EPOCHS
+
+ def find_data_to_stat(data):
+ """Convert Win32 FIND_DATA struct to stat_result."""
+ # First convert Win32 dwFileAttributes to st_mode
+ attributes = data.dwFileAttributes
+ st_mode = 0
+ if attributes & FILE_ATTRIBUTE_DIRECTORY:
+ st_mode |= S_IFDIR | 0o111
+ else:
+ st_mode |= S_IFREG
+ if attributes & FILE_ATTRIBUTE_READONLY:
+ st_mode |= 0o444
+ else:
+ st_mode |= 0o666
+ if (attributes & FILE_ATTRIBUTE_REPARSE_POINT and
+ data.dwReserved0 == IO_REPARSE_TAG_SYMLINK):
+ st_mode ^= st_mode & 0o170000
+ st_mode |= S_IFLNK
+
+ st_size = data.nFileSizeHigh << 32 | data.nFileSizeLow
+ st_atime = filetime_to_time(data.ftLastAccessTime)
+ st_mtime = filetime_to_time(data.ftLastWriteTime)
+ st_ctime = filetime_to_time(data.ftCreationTime)
+
+ # Some fields set to zero per CPython's posixmodule.c: st_ino, st_dev,
+ # st_nlink, st_uid, st_gid
+ return Win32StatResult(st_mode, 0, 0, 0, 0, 0, st_size,
+ st_atime, st_mtime, st_ctime,
+ int(st_atime * 1000000000),
+ int(st_mtime * 1000000000),
+ int(st_ctime * 1000000000),
+ attributes)
+
+ class Win32DirEntryPython(object):
+ __slots__ = ('name', '_stat', '_lstat', '_find_data', '_scandir_path', '_path', '_inode')
+
+ def __init__(self, scandir_path, name, find_data):
+ self._scandir_path = scandir_path
+ self.name = name
+ self._stat = None
+ self._lstat = None
+ self._find_data = find_data
+ self._path = None
+ self._inode = None
+
+ @property
+ def path(self):
+ if self._path is None:
+ self._path = join(self._scandir_path, self.name)
+ return self._path
+
+ def stat(self, follow_symlinks=True):
+ if follow_symlinks:
+ if self._stat is None:
+ if self.is_symlink():
+ # It's a symlink, call link-following stat()
+ self._stat = stat(self.path)
+ else:
+ # Not a symlink, stat is same as lstat value
+ if self._lstat is None:
+ self._lstat = find_data_to_stat(self._find_data)
+ self._stat = self._lstat
+ return self._stat
+ else:
+ if self._lstat is None:
+ # Lazily convert to stat object, because it's slow
+ # in Python, and often we only need is_dir() etc
+ self._lstat = find_data_to_stat(self._find_data)
+ return self._lstat
+
+ def is_dir(self, follow_symlinks=True):
+ is_symlink = self.is_symlink()
+ if follow_symlinks and is_symlink:
+ try:
+ return self.stat().st_mode & 0o170000 == S_IFDIR
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False
+ elif is_symlink:
+ return False
+ else:
+ return (self._find_data.dwFileAttributes &
+ FILE_ATTRIBUTE_DIRECTORY != 0)
+
+ def is_file(self, follow_symlinks=True):
+ is_symlink = self.is_symlink()
+ if follow_symlinks and is_symlink:
+ try:
+ return self.stat().st_mode & 0o170000 == S_IFREG
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False
+ elif is_symlink:
+ return False
+ else:
+ return (self._find_data.dwFileAttributes &
+ FILE_ATTRIBUTE_DIRECTORY == 0)
+
+ def is_symlink(self):
+ return (self._find_data.dwFileAttributes &
+ FILE_ATTRIBUTE_REPARSE_POINT != 0 and
+ self._find_data.dwReserved0 == IO_REPARSE_TAG_SYMLINK)
+
+ def inode(self):
+ if self._inode is None:
+ self._inode = lstat(self.path).st_ino
+ return self._inode
+
+ def __str__(self):
+ return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name)
+
+ __repr__ = __str__
+
+ def win_error(error, filename):
+ exc = WindowsError(error, ctypes.FormatError(error))
+ exc.filename = filename
+ return exc
+
+ def _scandir_python(path=unicode('.')):
+ """Like os.listdir(), but yield DirEntry objects instead of returning
+ a list of names.
+ """
+ # Call FindFirstFile and handle errors
+ if isinstance(path, bytes):
+ is_bytes = True
+ filename = join(path.decode('mbcs', 'strict'), '*.*')
+ else:
+ is_bytes = False
+ filename = join(path, '*.*')
+ data = wintypes.WIN32_FIND_DATAW()
+ data_p = ctypes.byref(data)
+ handle = FindFirstFile(filename, data_p)
+ if handle == INVALID_HANDLE_VALUE:
+ error = ctypes.GetLastError()
+ if error == ERROR_FILE_NOT_FOUND:
+ # No files, don't yield anything
+ return
+ raise win_error(error, path)
+
+ # Call FindNextFile in a loop, stopping when no more files
+ try:
+ while True:
+ # Skip '.' and '..' (current and parent directory), but
+ # otherwise yield (filename, stat_result) tuple
+ name = data.cFileName
+ if name not in ('.', '..'):
+ if is_bytes:
+ name = name.encode('mbcs', 'replace')
+ yield Win32DirEntryPython(path, name, data)
+
+ data = wintypes.WIN32_FIND_DATAW()
+ data_p = ctypes.byref(data)
+ success = FindNextFile(handle, data_p)
+ if not success:
+ error = ctypes.GetLastError()
+ if error == ERROR_NO_MORE_FILES:
+ break
+ raise win_error(error, path)
+ finally:
+ if not FindClose(handle):
+ raise win_error(ctypes.GetLastError(), path)
+
+ if IS_PY3:
+ def scandir_python(path=unicode('.')):
+ if isinstance(path, bytes):
+ raise TypeError("os.scandir() doesn't support bytes path on Windows, use Unicode instead")
+ return _scandir_python(path)
+ scandir_python.__doc__ = _scandir_python.__doc__
+ else:
+ scandir_python = _scandir_python
+
+ if _scandir is not None:
+ scandir_c = _scandir.scandir
+ DirEntry_c = _scandir.DirEntry
+
+ if _scandir is not None:
+ scandir = scandir_c
+ DirEntry = DirEntry_c
+ elif ctypes is not None:
+ scandir = scandir_python
+ DirEntry = Win32DirEntryPython
+ else:
+ scandir = scandir_generic
+ DirEntry = GenericDirEntry
+
+
+# Linux, OS X, and BSD implementation
+elif sys.platform.startswith(('linux', 'darwin', 'sunos5')) or 'bsd' in sys.platform:
+ have_dirent_d_type = (sys.platform != 'sunos5')
+
+ if ctypes is not None and have_dirent_d_type:
+ import ctypes.util
+
+ DIR_p = ctypes.c_void_p
+
+ # Rather annoying how the dirent struct is slightly different on each
+ # platform. The only fields we care about are d_name and d_type.
+ class Dirent(ctypes.Structure):
+ if sys.platform.startswith('linux'):
+ _fields_ = (
+ ('d_ino', ctypes.c_ulong),
+ ('d_off', ctypes.c_long),
+ ('d_reclen', ctypes.c_ushort),
+ ('d_type', ctypes.c_byte),
+ ('d_name', ctypes.c_char * 256),
+ )
+ elif 'openbsd' in sys.platform:
+ _fields_ = (
+ ('d_ino', ctypes.c_uint64),
+ ('d_off', ctypes.c_uint64),
+ ('d_reclen', ctypes.c_uint16),
+ ('d_type', ctypes.c_uint8),
+ ('d_namlen', ctypes.c_uint8),
+ ('__d_padding', ctypes.c_uint8 * 4),
+ ('d_name', ctypes.c_char * 256),
+ )
+ else:
+ _fields_ = (
+ ('d_ino', ctypes.c_uint32), # must be uint32, not ulong
+ ('d_reclen', ctypes.c_ushort),
+ ('d_type', ctypes.c_byte),
+ ('d_namlen', ctypes.c_byte),
+ ('d_name', ctypes.c_char * 256),
+ )
+
+ DT_UNKNOWN = 0
+ DT_DIR = 4
+ DT_REG = 8
+ DT_LNK = 10
+
+ Dirent_p = ctypes.POINTER(Dirent)
+ Dirent_pp = ctypes.POINTER(Dirent_p)
+
+ libc = ctypes.CDLL(ctypes.util.find_library('c'), use_errno=True)
+ opendir = libc.opendir
+ opendir.argtypes = [ctypes.c_char_p]
+ opendir.restype = DIR_p
+
+ readdir_r = libc.readdir_r
+ readdir_r.argtypes = [DIR_p, Dirent_p, Dirent_pp]
+ readdir_r.restype = ctypes.c_int
+
+ closedir = libc.closedir
+ closedir.argtypes = [DIR_p]
+ closedir.restype = ctypes.c_int
+
+ file_system_encoding = sys.getfilesystemencoding()
+
+ class PosixDirEntry(object):
+ __slots__ = ('name', '_d_type', '_stat', '_lstat', '_scandir_path', '_path', '_inode')
+
+ def __init__(self, scandir_path, name, d_type, inode):
+ self._scandir_path = scandir_path
+ self.name = name
+ self._d_type = d_type
+ self._inode = inode
+ self._stat = None
+ self._lstat = None
+ self._path = None
+
+ @property
+ def path(self):
+ if self._path is None:
+ self._path = join(self._scandir_path, self.name)
+ return self._path
+
+ def stat(self, follow_symlinks=True):
+ if follow_symlinks:
+ if self._stat is None:
+ if self.is_symlink():
+ self._stat = stat(self.path)
+ else:
+ if self._lstat is None:
+ self._lstat = lstat(self.path)
+ self._stat = self._lstat
+ return self._stat
+ else:
+ if self._lstat is None:
+ self._lstat = lstat(self.path)
+ return self._lstat
+
+ def is_dir(self, follow_symlinks=True):
+ if (self._d_type == DT_UNKNOWN or
+ (follow_symlinks and self.is_symlink())):
+ try:
+ st = self.stat(follow_symlinks=follow_symlinks)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False
+ return st.st_mode & 0o170000 == S_IFDIR
+ else:
+ return self._d_type == DT_DIR
+
+ def is_file(self, follow_symlinks=True):
+ if (self._d_type == DT_UNKNOWN or
+ (follow_symlinks and self.is_symlink())):
+ try:
+ st = self.stat(follow_symlinks=follow_symlinks)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False
+ return st.st_mode & 0o170000 == S_IFREG
+ else:
+ return self._d_type == DT_REG
+
+ def is_symlink(self):
+ if self._d_type == DT_UNKNOWN:
+ try:
+ st = self.stat(follow_symlinks=False)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False
+ return st.st_mode & 0o170000 == S_IFLNK
+ else:
+ return self._d_type == DT_LNK
+
+ def inode(self):
+ return self._inode
+
+ def __str__(self):
+ return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name)
+
+ __repr__ = __str__
+
+ def posix_error(filename):
+ errno = ctypes.get_errno()
+ exc = OSError(errno, strerror(errno))
+ exc.filename = filename
+ return exc
+
+ def scandir_python(path=unicode('.')):
+ """Like os.listdir(), but yield DirEntry objects instead of returning
+ a list of names.
+ """
+ if isinstance(path, bytes):
+ opendir_path = path
+ is_bytes = True
+ else:
+ opendir_path = path.encode(file_system_encoding)
+ is_bytes = False
+ dir_p = opendir(opendir_path)
+ if not dir_p:
+ raise posix_error(path)
+ try:
+ result = Dirent_p()
+ while True:
+ entry = Dirent()
+ if readdir_r(dir_p, entry, result):
+ raise posix_error(path)
+ if not result:
+ break
+ name = entry.d_name
+ if name not in (b'.', b'..'):
+ if not is_bytes:
+ name = name.decode(file_system_encoding)
+ yield PosixDirEntry(path, name, entry.d_type, entry.d_ino)
+ finally:
+ if closedir(dir_p):
+ raise posix_error(path)
+
+ if _scandir is not None:
+ scandir_c = _scandir.scandir
+ DirEntry_c = _scandir.DirEntry
+
+ if _scandir is not None:
+ scandir = scandir_c
+ DirEntry = DirEntry_c
+ elif ctypes is not None:
+ scandir = scandir_python
+ DirEntry = PosixDirEntry
+ else:
+ scandir = scandir_generic
+ DirEntry = GenericDirEntry
+
+
+# Some other system -- no d_type or stat information
+else:
+ scandir = scandir_generic
+ DirEntry = GenericDirEntry
+
+
+def _walk(top, topdown=True, onerror=None, followlinks=False):
+ """Like Python 3.5's implementation of os.walk() -- faster than
+ the pre-Python 3.5 version as it uses scandir() internally.
+ """
+ dirs = []
+ nondirs = []
+
+ # We may not have read permission for top, in which case we can't
+ # get a list of the files the directory contains. os.walk
+ # always suppressed the exception then, rather than blow up for a
+ # minor reason when (say) a thousand readable directories are still
+ # left to visit. That logic is copied here.
+ try:
+ scandir_it = scandir(top)
+ except OSError as error:
+ if onerror is not None:
+ onerror(error)
+ return
+
+ while True:
+ try:
+ try:
+ entry = next(scandir_it)
+ except StopIteration:
+ break
+ except OSError as error:
+ if onerror is not None:
+ onerror(error)
+ return
+
+ try:
+ is_dir = entry.is_dir()
+ except OSError:
+ # If is_dir() raises an OSError, consider that the entry is not
+ # a directory, same behaviour than os.path.isdir().
+ is_dir = False
+
+ if is_dir:
+ dirs.append(entry.name)
+ else:
+ nondirs.append(entry.name)
+
+ if not topdown and is_dir:
+ # Bottom-up: recurse into sub-directory, but exclude symlinks to
+ # directories if followlinks is False
+ if followlinks:
+ walk_into = True
+ else:
+ try:
+ is_symlink = entry.is_symlink()
+ except OSError:
+ # If is_symlink() raises an OSError, consider that the
+ # entry is not a symbolic link, same behaviour than
+ # os.path.islink().
+ is_symlink = False
+ walk_into = not is_symlink
+
+ if walk_into:
+ for entry in walk(entry.path, topdown, onerror, followlinks):
+ yield entry
+
+ # Yield before recursion if going top down
+ if topdown:
+ yield top, dirs, nondirs
+
+ # Recurse into sub-directories
+ for name in dirs:
+ new_path = join(top, name)
+ # Issue #23605: os.path.islink() is used instead of caching
+ # entry.is_symlink() result during the loop on os.scandir() because
+ # the caller can replace the directory entry during the "yield"
+ # above.
+ if followlinks or not islink(new_path):
+ for entry in walk(new_path, topdown, onerror, followlinks):
+ yield entry
+ else:
+ # Yield after recursion if going bottom up
+ yield top, dirs, nondirs
+
+
+if IS_PY3 or sys.platform != 'win32':
+ walk = _walk
+else:
+ # Fix for broken unicode handling on Windows on Python 2.x, see:
+ # https://github.com/benhoyt/scandir/issues/54
+ file_system_encoding = sys.getfilesystemencoding()
+
+ def walk(top, topdown=True, onerror=None, followlinks=False):
+ if isinstance(top, bytes):
+ top = top.decode(file_system_encoding)
+ return _walk(top, topdown, onerror, followlinks)
diff --git a/third_party/python/scandir/setup.cfg b/third_party/python/scandir/setup.cfg
new file mode 100644
index 0000000000..8bfd5a12f8
--- /dev/null
+++ b/third_party/python/scandir/setup.cfg
@@ -0,0 +1,4 @@
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/scandir/setup.py b/third_party/python/scandir/setup.py
new file mode 100644
index 0000000000..5987c54ea8
--- /dev/null
+++ b/third_party/python/scandir/setup.py
@@ -0,0 +1,80 @@
+"""Run "python setup.py install" to install scandir."""
+
+try:
+ from setuptools import setup, Extension
+ from setuptools.command.build_ext import build_ext as base_build_ext
+except ImportError:
+ import warnings
+ import sys
+ val = sys.exc_info()[1]
+
+ warnings.warn("import of setuptools failed %r" % val)
+ from distutils.core import setup, Extension
+ from distutils.command.build_ext import build_ext as base_build_ext
+
+import os
+import re
+import sys
+import logging
+
+# Get version without importing scandir because that will lock the
+# .pyd file (if scandir is already installed) so it can't be
+# overwritten during the install process
+with open(os.path.join(os.path.dirname(__file__), 'scandir.py')) as f:
+ for line in f:
+ match = re.match(r"__version__.*'([0-9.]+)'", line)
+ if match:
+ version = match.group(1)
+ break
+ else:
+ raise Exception("Couldn't find version in setup.py")
+
+with open('README.rst') as f:
+ long_description = f.read()
+
+
+class BuildExt(base_build_ext):
+
+ # the extension is optional since in case of lack of c the api
+ # there is a ctypes fallback and a slow python fallback
+
+ def build_extension(self, ext):
+ try:
+ base_build_ext.build_extension(self, ext)
+ except Exception:
+ exception = sys.exc_info()[0]
+ logging.warn("building the %s failed with %s", ext.name, exception)
+
+extension = Extension('_scandir', ['_scandir.c'], optional=True)
+
+
+setup(
+ name='scandir',
+ version=version,
+ author='Ben Hoyt',
+ author_email='benhoyt@gmail.com',
+ url='https://github.com/benhoyt/scandir',
+ license='New BSD License',
+ description='scandir, a better directory iterator and faster os.walk()',
+ long_description=long_description,
+ py_modules=['scandir'],
+ ext_modules=[extension],
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'Operating System :: OS Independent',
+ 'License :: OSI Approved :: BSD License',
+ 'Programming Language :: Python',
+ 'Topic :: System :: Filesystems',
+ 'Topic :: System :: Operating System',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ ], cmdclass={'build_ext': BuildExt},
+)
diff --git a/third_party/python/scandir/test/run_tests.py b/third_party/python/scandir/test/run_tests.py
new file mode 100644
index 0000000000..409ad97c91
--- /dev/null
+++ b/third_party/python/scandir/test/run_tests.py
@@ -0,0 +1,25 @@
+"""Run all unit tests."""
+
+import glob
+import os
+import sys
+
+if sys.version_info[:2] < (2, 7):
+ import unittest2 as unittest
+else:
+ import unittest
+
+
+def main():
+ test_dir = os.path.dirname(os.path.abspath(__file__))
+ test_files = glob.glob(os.path.join(test_dir, 'test_*.py'))
+ test_names = [os.path.basename(f)[:-3] for f in test_files]
+
+ sys.path.insert(0, os.path.join(test_dir, '..'))
+
+ suite = unittest.defaultTestLoader.loadTestsFromNames(test_names)
+ result = unittest.TextTestRunner(verbosity=2).run(suite)
+ sys.exit(1 if (result.errors or result.failures) else 0)
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/python/scandir/test/test_scandir.py b/third_party/python/scandir/test/test_scandir.py
new file mode 100644
index 0000000000..8e8d1a3ed5
--- /dev/null
+++ b/third_party/python/scandir/test/test_scandir.py
@@ -0,0 +1,320 @@
+"""Tests for scandir.scandir()."""
+
+from __future__ import unicode_literals
+
+import os
+import shutil
+import sys
+import time
+
+if sys.version_info[:2] < (2, 7):
+ import unittest2 as unittest
+else:
+ import unittest
+
+try:
+ import scandir
+ has_scandir = True
+except ImportError:
+ has_scandir = False
+
+FILE_ATTRIBUTE_DIRECTORY = 16
+
+TEST_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), 'testdir'))
+
+IS_PY3 = sys.version_info >= (3, 0)
+
+if IS_PY3:
+ int_types = int
+else:
+ int_types = (int, long)
+ str = unicode
+
+
+if hasattr(os, 'symlink'):
+ try:
+ link_name = os.path.join(os.path.dirname(__file__), '_testlink')
+ os.symlink(__file__, link_name)
+ os.remove(link_name)
+ symlinks_supported = True
+ except NotImplementedError:
+ # Windows versions before Vista don't support symbolic links
+ symlinks_supported = False
+else:
+ symlinks_supported = False
+
+
+def create_file(path, contents='1234'):
+ with open(path, 'w') as f:
+ f.write(contents)
+
+
+def setup_main():
+ join = os.path.join
+
+ os.mkdir(TEST_PATH)
+ os.mkdir(join(TEST_PATH, 'subdir'))
+ create_file(join(TEST_PATH, 'file1.txt'))
+ create_file(join(TEST_PATH, 'file2.txt'), contents='12345678')
+
+ os.mkdir(join(TEST_PATH, 'subdir', 'unidir\u018F'))
+ create_file(join(TEST_PATH, 'subdir', 'file1.txt'))
+ create_file(join(TEST_PATH, 'subdir', 'unicod\u018F.txt'))
+
+ create_file(join(TEST_PATH, 'subdir', 'unidir\u018F', 'file1.txt'))
+
+ os.mkdir(join(TEST_PATH, 'linkdir'))
+
+
+def setup_symlinks():
+ join = os.path.join
+
+ os.mkdir(join(TEST_PATH, 'linkdir', 'linksubdir'))
+ create_file(join(TEST_PATH, 'linkdir', 'file1.txt'))
+
+ os.symlink(os.path.abspath(join(TEST_PATH, 'linkdir', 'file1.txt')),
+ join(TEST_PATH, 'linkdir', 'link_to_file'))
+
+ dir_name = os.path.abspath(join(TEST_PATH, 'linkdir', 'linksubdir'))
+ dir_link = join(TEST_PATH, 'linkdir', 'link_to_dir')
+ if sys.version_info >= (3, 3):
+ # "target_is_directory" was only added in Python 3.3
+ os.symlink(dir_name, dir_link, target_is_directory=True)
+ else:
+ os.symlink(dir_name, dir_link)
+
+
+def teardown():
+ try:
+ shutil.rmtree(TEST_PATH)
+ except OSError:
+ # why does the above fail sometimes?
+ time.sleep(0.1)
+ shutil.rmtree(TEST_PATH)
+
+
+class TestMixin(object):
+ def setUp(self):
+ if not os.path.exists(TEST_PATH):
+ setup_main()
+ if symlinks_supported and not os.path.exists(
+ os.path.join(TEST_PATH, 'linkdir', 'linksubdir')):
+ setup_symlinks()
+
+ if not hasattr(unittest.TestCase, 'skipTest'):
+ def skipTest(self, reason):
+ sys.stdout.write('skipped {0!r} '.format(reason))
+
+ def test_basic(self):
+ entries = sorted(self.scandir_func(TEST_PATH), key=lambda e: e.name)
+ self.assertEqual([(e.name, e.is_dir()) for e in entries],
+ [('file1.txt', False), ('file2.txt', False),
+ ('linkdir', True), ('subdir', True)])
+ self.assertEqual([e.path for e in entries],
+ [os.path.join(TEST_PATH, e.name) for e in entries])
+
+ def test_dir_entry(self):
+ entries = dict((e.name, e) for e in self.scandir_func(TEST_PATH))
+ e = entries['file1.txt']
+ self.assertEqual([e.is_dir(), e.is_file(), e.is_symlink()], [False, True, False])
+ e = entries['file2.txt']
+ self.assertEqual([e.is_dir(), e.is_file(), e.is_symlink()], [False, True, False])
+ e = entries['subdir']
+ self.assertEqual([e.is_dir(), e.is_file(), e.is_symlink()], [True, False, False])
+
+ self.assertEqual(entries['file1.txt'].stat().st_size, 4)
+ self.assertEqual(entries['file2.txt'].stat().st_size, 8)
+
+ def test_stat(self):
+ entries = list(self.scandir_func(TEST_PATH))
+ for entry in entries:
+ os_stat = os.stat(os.path.join(TEST_PATH, entry.name))
+ scandir_stat = entry.stat()
+ self.assertEqual(os_stat.st_mode, scandir_stat.st_mode)
+ # TODO: be nice to figure out why these aren't identical on Windows and on PyPy
+ # * Windows: they seem to be a few microseconds to tens of seconds out
+ # * PyPy: for some reason os_stat's times are nanosecond, scandir's are not
+ self.assertAlmostEqual(os_stat.st_mtime, scandir_stat.st_mtime, delta=1)
+ self.assertAlmostEqual(os_stat.st_ctime, scandir_stat.st_ctime, delta=1)
+ if entry.is_file():
+ self.assertEqual(os_stat.st_size, scandir_stat.st_size)
+
+ def test_returns_iter(self):
+ it = self.scandir_func(TEST_PATH)
+ entry = next(it)
+ assert hasattr(entry, 'name')
+
+ def check_file_attributes(self, result):
+ self.assertTrue(hasattr(result, 'st_file_attributes'))
+ self.assertTrue(isinstance(result.st_file_attributes, int_types))
+ self.assertTrue(0 <= result.st_file_attributes <= 0xFFFFFFFF)
+
+ def test_file_attributes(self):
+ if sys.platform != 'win32' or not self.has_file_attributes:
+ # st_file_attributes is Win32 specific (but can't use
+ # unittest.skipUnless on Python 2.6)
+ return self.skipTest('st_file_attributes not supported')
+
+ entries = dict((e.name, e) for e in self.scandir_func(TEST_PATH))
+
+ # test st_file_attributes on a file (FILE_ATTRIBUTE_DIRECTORY not set)
+ result = entries['file1.txt'].stat()
+ self.check_file_attributes(result)
+ self.assertEqual(result.st_file_attributes & FILE_ATTRIBUTE_DIRECTORY, 0)
+
+ # test st_file_attributes on a directory (FILE_ATTRIBUTE_DIRECTORY set)
+ result = entries['subdir'].stat()
+ self.check_file_attributes(result)
+ self.assertEqual(result.st_file_attributes & FILE_ATTRIBUTE_DIRECTORY,
+ FILE_ATTRIBUTE_DIRECTORY)
+
+ def test_path(self):
+ entries = sorted(self.scandir_func(TEST_PATH), key=lambda e: e.name)
+ self.assertEqual([os.path.basename(e.name) for e in entries],
+ ['file1.txt', 'file2.txt', 'linkdir', 'subdir'])
+ self.assertEqual([os.path.normpath(os.path.join(TEST_PATH, e.name)) for e in entries],
+ [os.path.normpath(e.path) for e in entries])
+
+ def test_symlink(self):
+ if not symlinks_supported:
+ return self.skipTest('symbolic links not supported')
+
+ entries = sorted(self.scandir_func(os.path.join(TEST_PATH, 'linkdir')),
+ key=lambda e: e.name)
+
+ self.assertEqual([(e.name, e.is_symlink()) for e in entries],
+ [('file1.txt', False),
+ ('link_to_dir', True),
+ ('link_to_file', True),
+ ('linksubdir', False)])
+
+ self.assertEqual([(e.name, e.is_file(), e.is_file(follow_symlinks=False))
+ for e in entries],
+ [('file1.txt', True, True),
+ ('link_to_dir', False, False),
+ ('link_to_file', True, False),
+ ('linksubdir', False, False)])
+
+ self.assertEqual([(e.name, e.is_dir(), e.is_dir(follow_symlinks=False))
+ for e in entries],
+ [('file1.txt', False, False),
+ ('link_to_dir', True, False),
+ ('link_to_file', False, False),
+ ('linksubdir', True, True)])
+
+ def test_bytes(self):
+ # Check that unicode filenames are returned correctly as bytes in output
+ path = os.path.join(TEST_PATH, 'subdir').encode(sys.getfilesystemencoding(), 'replace')
+ self.assertTrue(isinstance(path, bytes))
+
+ # Python 3.6 on Windows fixes the bytes filename thing by using UTF-8
+ if IS_PY3 and sys.platform == 'win32':
+ if not (sys.version_info >= (3, 6) and self.scandir_func == os.scandir):
+ self.assertRaises(TypeError, self.scandir_func, path)
+ return
+
+ entries = [e for e in self.scandir_func(path) if e.name.startswith(b'unicod')]
+ self.assertEqual(len(entries), 1)
+ entry = entries[0]
+
+ self.assertTrue(isinstance(entry.name, bytes))
+ self.assertTrue(isinstance(entry.path, bytes))
+
+ # b'unicod?.txt' on Windows, b'unicod\xc6\x8f.txt' (UTF-8) or similar on POSIX
+ entry_name = 'unicod\u018f.txt'.encode(sys.getfilesystemencoding(), 'replace')
+ self.assertEqual(entry.name, entry_name)
+ self.assertEqual(entry.path, os.path.join(path, entry_name))
+
+ def test_unicode(self):
+ # Check that unicode filenames are returned correctly as (unicode) str in output
+ path = os.path.join(TEST_PATH, 'subdir')
+ if not IS_PY3:
+ path = path.decode(sys.getfilesystemencoding(), 'replace')
+ self.assertTrue(isinstance(path, str))
+ entries = [e for e in self.scandir_func(path) if e.name.startswith('unicod')]
+ self.assertEqual(len(entries), 1)
+ entry = entries[0]
+
+ self.assertTrue(isinstance(entry.name, str))
+ self.assertTrue(isinstance(entry.path, str))
+
+ entry_name = 'unicod\u018f.txt'
+ self.assertEqual(entry.name, entry_name)
+ self.assertEqual(entry.path, os.path.join(path, 'unicod\u018f.txt'))
+
+ # Check that it handles unicode input properly
+ path = os.path.join(TEST_PATH, 'subdir', 'unidir\u018f')
+ self.assertTrue(isinstance(path, str))
+ entries = list(self.scandir_func(path))
+ self.assertEqual(len(entries), 1)
+ entry = entries[0]
+
+ self.assertTrue(isinstance(entry.name, str))
+ self.assertTrue(isinstance(entry.path, str))
+ self.assertEqual(entry.name, 'file1.txt')
+ self.assertEqual(entry.path, os.path.join(path, 'file1.txt'))
+
+ def test_walk_unicode_handling(self):
+ encoding = sys.getfilesystemencoding()
+ dirname_unicode = u'test_unicode_dir'
+ dirname_bytes = dirname_unicode.encode(encoding)
+ dirpath = os.path.join(TEST_PATH.encode(encoding), dirname_bytes)
+ try:
+ os.makedirs(dirpath)
+
+ if sys.platform != 'win32':
+ # test bytes
+ self.assertTrue(isinstance(dirpath, bytes))
+ for (path, dirs, files) in scandir.walk(dirpath):
+ self.assertTrue(isinstance(path, bytes))
+
+ # test unicode
+ text_type = str if IS_PY3 else unicode
+ dirpath_unicode = text_type(dirpath, encoding)
+ self.assertTrue(isinstance(dirpath_unicode, text_type))
+ for (path, dirs, files) in scandir.walk(dirpath_unicode):
+ self.assertTrue(isinstance(path, text_type))
+ finally:
+ shutil.rmtree(dirpath)
+
+if has_scandir:
+ class TestScandirGeneric(TestMixin, unittest.TestCase):
+ def setUp(self):
+ self.scandir_func = scandir.scandir_generic
+ self.has_file_attributes = False
+ TestMixin.setUp(self)
+
+
+ if getattr(scandir, 'scandir_python', None):
+ class TestScandirPython(TestMixin, unittest.TestCase):
+ def setUp(self):
+ self.scandir_func = scandir.scandir_python
+ self.has_file_attributes = True
+ TestMixin.setUp(self)
+
+
+ if getattr(scandir, 'scandir_c', None):
+ class TestScandirC(TestMixin, unittest.TestCase):
+ def setUp(self):
+ self.scandir_func = scandir.scandir_c
+ self.has_file_attributes = True
+ TestMixin.setUp(self)
+
+
+ class TestScandirDirEntry(unittest.TestCase):
+ def setUp(self):
+ if not os.path.exists(TEST_PATH):
+ setup_main()
+
+ def test_iter_returns_dir_entry(self):
+ it = scandir.scandir(TEST_PATH)
+ entry = next(it)
+ assert isinstance(entry, scandir.DirEntry)
+
+
+if hasattr(os, 'scandir'):
+ class TestScandirOS(TestMixin, unittest.TestCase):
+ def setUp(self):
+ self.scandir_func = os.scandir
+ self.has_file_attributes = True
+ TestMixin.setUp(self)
diff --git a/third_party/python/scandir/test/test_walk.py b/third_party/python/scandir/test/test_walk.py
new file mode 100644
index 0000000000..7995f3adba
--- /dev/null
+++ b/third_party/python/scandir/test/test_walk.py
@@ -0,0 +1,213 @@
+"""Tests for scandir.walk(), copied from CPython's tests for os.walk()."""
+
+import os
+import shutil
+import sys
+
+if sys.version_info[:2] < (2, 7):
+ import unittest2 as unittest
+else:
+ import unittest
+
+import scandir
+
+walk_func = scandir.walk
+
+
+class TestWalk(unittest.TestCase):
+ testfn = os.path.join(os.path.dirname(__file__), 'temp')
+
+ def test_traversal(self):
+ # Build:
+ # TESTFN/
+ # TEST1/ a file kid and two directory kids
+ # tmp1
+ # SUB1/ a file kid and a directory kid
+ # tmp2
+ # SUB11/ no kids
+ # SUB2/ a file kid and a dirsymlink kid
+ # tmp3
+ # link/ a symlink to TESTFN.2
+ # TEST2/
+ # tmp4 a lone file
+ walk_path = os.path.join(self.testfn, "TEST1")
+ sub1_path = os.path.join(walk_path, "SUB1")
+ sub11_path = os.path.join(sub1_path, "SUB11")
+ sub2_path = os.path.join(walk_path, "SUB2")
+ tmp1_path = os.path.join(walk_path, "tmp1")
+ tmp2_path = os.path.join(sub1_path, "tmp2")
+ tmp3_path = os.path.join(sub2_path, "tmp3")
+ link_path = os.path.join(sub2_path, "link")
+ t2_path = os.path.join(self.testfn, "TEST2")
+ tmp4_path = os.path.join(self.testfn, "TEST2", "tmp4")
+
+ # Create stuff.
+ os.makedirs(sub11_path)
+ os.makedirs(sub2_path)
+ os.makedirs(t2_path)
+ for path in tmp1_path, tmp2_path, tmp3_path, tmp4_path:
+ f = open(path, "w")
+ f.write("I'm " + path + " and proud of it. Blame test_os.\n")
+ f.close()
+ has_symlink = hasattr(os, "symlink")
+ if has_symlink:
+ try:
+ if sys.platform == 'win32' and sys.version_info >= (3, 2):
+ # "target_is_directory" was only added in Python 3.2 (on Windows)
+ os.symlink(os.path.abspath(t2_path), link_path, target_is_directory=True)
+ else:
+ os.symlink(os.path.abspath(t2_path), link_path)
+ sub2_tree = (sub2_path, ["link"], ["tmp3"])
+ except NotImplementedError:
+ sub2_tree = (sub2_path, [], ["tmp3"])
+ else:
+ sub2_tree = (sub2_path, [], ["tmp3"])
+
+ # Walk top-down.
+ all = list(walk_func(walk_path))
+ self.assertEqual(len(all), 4)
+ # We can't know which order SUB1 and SUB2 will appear in.
+ # Not flipped: TESTFN, SUB1, SUB11, SUB2
+ # flipped: TESTFN, SUB2, SUB1, SUB11
+ flipped = all[0][1][0] != "SUB1"
+ all[0][1].sort()
+ self.assertEqual(all[0], (walk_path, ["SUB1", "SUB2"], ["tmp1"]))
+ self.assertEqual(all[1 + flipped], (sub1_path, ["SUB11"], ["tmp2"]))
+ self.assertEqual(all[2 + flipped], (sub11_path, [], []))
+ self.assertEqual(all[3 - 2 * flipped], sub2_tree)
+
+ # Prune the search.
+ all = []
+ for root, dirs, files in walk_func(walk_path):
+ all.append((root, dirs, files))
+ # Don't descend into SUB1.
+ if 'SUB1' in dirs:
+ # Note that this also mutates the dirs we appended to all!
+ dirs.remove('SUB1')
+ self.assertEqual(len(all), 2)
+ self.assertEqual(all[0], (walk_path, ["SUB2"], ["tmp1"]))
+ self.assertEqual(all[1], sub2_tree)
+
+ # Walk bottom-up.
+ all = list(walk_func(walk_path, topdown=False))
+ self.assertEqual(len(all), 4)
+ # We can't know which order SUB1 and SUB2 will appear in.
+ # Not flipped: SUB11, SUB1, SUB2, TESTFN
+ # flipped: SUB2, SUB11, SUB1, TESTFN
+ flipped = all[3][1][0] != "SUB1"
+ all[3][1].sort()
+ self.assertEqual(all[3], (walk_path, ["SUB1", "SUB2"], ["tmp1"]))
+ self.assertEqual(all[flipped], (sub11_path, [], []))
+ self.assertEqual(all[flipped + 1], (sub1_path, ["SUB11"], ["tmp2"]))
+ self.assertEqual(all[2 - 2 * flipped], sub2_tree)
+
+ if has_symlink:
+ # Walk, following symlinks.
+ for root, dirs, files in walk_func(walk_path, followlinks=True):
+ if root == link_path:
+ self.assertEqual(dirs, [])
+ self.assertEqual(files, ["tmp4"])
+ break
+ else:
+ self.fail("Didn't follow symlink with followlinks=True")
+
+ # Test creating a directory and adding it to dirnames
+ sub3_path = os.path.join(walk_path, "SUB3")
+ all = []
+ for root, dirs, files in walk_func(walk_path):
+ all.append((root, dirs, files))
+ if 'SUB1' in dirs:
+ os.makedirs(sub3_path)
+ dirs.append('SUB3')
+ all.sort()
+ self.assertEqual(os.path.split(all[-1][0])[1], 'SUB3')
+
+ def tearDown(self):
+ # Tear everything down. This is a decent use for bottom-up on
+ # Windows, which doesn't have a recursive delete command. The
+ # (not so) subtlety is that rmdir will fail unless the dir's
+ # kids are removed first, so bottom up is essential.
+ for root, dirs, files in os.walk(self.testfn, topdown=False):
+ for name in files:
+ os.remove(os.path.join(root, name))
+ for name in dirs:
+ dirname = os.path.join(root, name)
+ if not os.path.islink(dirname):
+ os.rmdir(dirname)
+ else:
+ os.remove(dirname)
+ os.rmdir(self.testfn)
+
+
+class TestWalkSymlink(unittest.TestCase):
+ temp_dir = os.path.join(os.path.dirname(__file__), 'temp')
+
+ def setUp(self):
+ os.mkdir(self.temp_dir)
+ self.dir_name = os.path.join(self.temp_dir, 'dir')
+ os.mkdir(self.dir_name)
+ open(os.path.join(self.dir_name, 'subfile'), 'w').close()
+ self.file_name = os.path.join(self.temp_dir, 'file')
+ open(self.file_name, 'w').close()
+
+ def tearDown(self):
+ shutil.rmtree(self.temp_dir)
+
+ def test_symlink_to_file(self):
+ if not hasattr(os, 'symlink'):
+ return
+
+ try:
+ os.symlink(self.file_name, os.path.join(self.temp_dir,
+ 'link_to_file'))
+ except NotImplementedError:
+ # Windows versions before Vista don't support symbolic links
+ return
+
+ output = sorted(walk_func(self.temp_dir))
+ dirs = sorted(output[0][1])
+ files = sorted(output[0][2])
+ self.assertEqual(dirs, ['dir'])
+ self.assertEqual(files, ['file', 'link_to_file'])
+
+ self.assertEqual(len(output), 2)
+ self.assertEqual(output[1][1], [])
+ self.assertEqual(output[1][2], ['subfile'])
+
+ def test_symlink_to_directory(self):
+ if not hasattr(os, 'symlink'):
+ return
+
+ link_name = os.path.join(self.temp_dir, 'link_to_dir')
+ try:
+ if sys.platform == 'win32' and sys.version_info >= (3, 2):
+ # "target_is_directory" was only added in Python 3.2 (on Windows)
+ os.symlink(self.dir_name, link_name, target_is_directory=True)
+ else:
+ os.symlink(self.dir_name, link_name)
+ except NotImplementedError:
+ # Windows versions before Vista don't support symbolic links
+ return
+
+ output = sorted(walk_func(self.temp_dir))
+ dirs = sorted(output[0][1])
+ files = sorted(output[0][2])
+ self.assertEqual(dirs, ['dir', 'link_to_dir'])
+ self.assertEqual(files, ['file'])
+
+ self.assertEqual(len(output), 2)
+ self.assertEqual(output[1][1], [])
+ self.assertEqual(output[1][2], ['subfile'])
+
+ output = sorted(walk_func(self.temp_dir, followlinks=True))
+ dirs = sorted(output[0][1])
+ files = sorted(output[0][2])
+ self.assertEqual(dirs, ['dir', 'link_to_dir'])
+ self.assertEqual(files, ['file'])
+
+ self.assertEqual(len(output), 3)
+ self.assertEqual(output[1][1], [])
+ self.assertEqual(output[1][2], ['subfile'])
+ self.assertEqual(os.path.basename(output[2][0]), 'link_to_dir')
+ self.assertEqual(output[2][1], [])
+ self.assertEqual(output[2][2], ['subfile'])
diff --git a/third_party/python/scandir/winreparse.h b/third_party/python/scandir/winreparse.h
new file mode 100644
index 0000000000..66f7775dd2
--- /dev/null
+++ b/third_party/python/scandir/winreparse.h
@@ -0,0 +1,53 @@
+#ifndef Py_WINREPARSE_H
+#define Py_WINREPARSE_H
+
+#ifdef MS_WINDOWS
+#include <Windows.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* The following structure was copied from
+ http://msdn.microsoft.com/en-us/library/ff552012.aspx as the required
+ include doesn't seem to be present in the Windows SDK (at least as included
+ with Visual Studio Express). */
+typedef struct _REPARSE_DATA_BUFFER {
+ ULONG ReparseTag;
+ USHORT ReparseDataLength;
+ USHORT Reserved;
+ union {
+ struct {
+ USHORT SubstituteNameOffset;
+ USHORT SubstituteNameLength;
+ USHORT PrintNameOffset;
+ USHORT PrintNameLength;
+ ULONG Flags;
+ WCHAR PathBuffer[1];
+ } SymbolicLinkReparseBuffer;
+
+ struct {
+ USHORT SubstituteNameOffset;
+ USHORT SubstituteNameLength;
+ USHORT PrintNameOffset;
+ USHORT PrintNameLength;
+ WCHAR PathBuffer[1];
+ } MountPointReparseBuffer;
+
+ struct {
+ UCHAR DataBuffer[1];
+ } GenericReparseBuffer;
+ };
+} REPARSE_DATA_BUFFER, *PREPARSE_DATA_BUFFER;
+
+#define REPARSE_DATA_BUFFER_HEADER_SIZE FIELD_OFFSET(REPARSE_DATA_BUFFER,\
+ GenericReparseBuffer)
+#define MAXIMUM_REPARSE_DATA_BUFFER_SIZE ( 16 * 1024 )
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* MS_WINDOWS */
+
+#endif /* !Py_WINREPARSE_H */
diff --git a/third_party/python/sentry-sdk/LICENSE b/third_party/python/sentry-sdk/LICENSE
new file mode 100644
index 0000000000..61555f192e
--- /dev/null
+++ b/third_party/python/sentry-sdk/LICENSE
@@ -0,0 +1,9 @@
+Copyright (c) 2018 Sentry (https://sentry.io) and individual contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/third_party/python/sentry-sdk/MANIFEST.in b/third_party/python/sentry-sdk/MANIFEST.in
new file mode 100644
index 0000000000..86a2426325
--- /dev/null
+++ b/third_party/python/sentry-sdk/MANIFEST.in
@@ -0,0 +1,2 @@
+include LICENSE
+include sentry_sdk/py.typed
diff --git a/third_party/python/sentry-sdk/PKG-INFO b/third_party/python/sentry-sdk/PKG-INFO
new file mode 100644
index 0000000000..88d9a4087e
--- /dev/null
+++ b/third_party/python/sentry-sdk/PKG-INFO
@@ -0,0 +1,43 @@
+Metadata-Version: 2.1
+Name: sentry-sdk
+Version: 0.14.3
+Summary: Python client for Sentry (https://getsentry.com)
+Home-page: https://github.com/getsentry/sentry-python
+Author: Sentry Team and Contributors
+Author-email: hello@getsentry.com
+License: BSD
+Description:
+ Sentry-Python - Sentry SDK for Python
+ =====================================
+
+ **Sentry-Python is an SDK for Sentry.** Check out `GitHub
+ <https://github.com/getsentry/sentry-python>`_ to find out more.
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Provides-Extra: flask
+Provides-Extra: bottle
+Provides-Extra: falcon
+Provides-Extra: django
+Provides-Extra: sanic
+Provides-Extra: celery
+Provides-Extra: beam
+Provides-Extra: rq
+Provides-Extra: aiohttp
+Provides-Extra: tornado
+Provides-Extra: sqlalchemy
+Provides-Extra: pyspark
diff --git a/third_party/python/sentry-sdk/README.md b/third_party/python/sentry-sdk/README.md
new file mode 100644
index 0000000000..0c845d601d
--- /dev/null
+++ b/third_party/python/sentry-sdk/README.md
@@ -0,0 +1,42 @@
+<p align="center">
+ <a href="https://sentry.io" target="_blank" align="center">
+ <img src="https://sentry-brand.storage.googleapis.com/sentry-logo-black.png" width="280">
+ </a>
+</p>
+
+# sentry-python - Sentry SDK for Python
+
+[![Build Status](https://travis-ci.com/getsentry/sentry-python.svg?branch=master)](https://travis-ci.com/getsentry/sentry-python)
+[![PyPi page link -- version](https://img.shields.io/pypi/v/sentry-sdk.svg)](https://pypi.python.org/pypi/sentry-sdk)
+[![Discord](https://img.shields.io/discord/621778831602221064)](https://discord.gg/cWnMQeA)
+
+This is the next line of the Python SDK for [Sentry](http://sentry.io/), intended to replace the `raven` package on PyPI.
+
+```python
+from sentry_sdk import init, capture_message
+
+init("https://mydsn@sentry.io/123")
+
+capture_message("Hello World") # Will create an event.
+
+raise ValueError() # Will also create an event.
+```
+
+To learn more about how to use the SDK:
+
+- [Getting started with the new SDK](https://docs.sentry.io/quickstart/?platform=python)
+- [Configuration options](https://docs.sentry.io/error-reporting/configuration/?platform=python)
+- [Setting context (tags, user, extra information)](https://docs.sentry.io/enriching-error-data/context/?platform=python)
+- [Integrations](https://docs.sentry.io/platforms/python/)
+
+Are you coming from raven-python?
+
+- [Cheatsheet: Migrating to the new SDK from Raven](https://forum.sentry.io/t/switching-to-sentry-python/4733)
+
+To learn about internals:
+
+- [API Reference](https://getsentry.github.io/sentry-python/)
+
+# License
+
+Licensed under the BSD license, see `LICENSE`
diff --git a/third_party/python/sentry-sdk/sentry_sdk.egg-info/PKG-INFO b/third_party/python/sentry-sdk/sentry_sdk.egg-info/PKG-INFO
new file mode 100644
index 0000000000..88d9a4087e
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk.egg-info/PKG-INFO
@@ -0,0 +1,43 @@
+Metadata-Version: 2.1
+Name: sentry-sdk
+Version: 0.14.3
+Summary: Python client for Sentry (https://getsentry.com)
+Home-page: https://github.com/getsentry/sentry-python
+Author: Sentry Team and Contributors
+Author-email: hello@getsentry.com
+License: BSD
+Description:
+ Sentry-Python - Sentry SDK for Python
+ =====================================
+
+ **Sentry-Python is an SDK for Sentry.** Check out `GitHub
+ <https://github.com/getsentry/sentry-python>`_ to find out more.
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Provides-Extra: flask
+Provides-Extra: bottle
+Provides-Extra: falcon
+Provides-Extra: django
+Provides-Extra: sanic
+Provides-Extra: celery
+Provides-Extra: beam
+Provides-Extra: rq
+Provides-Extra: aiohttp
+Provides-Extra: tornado
+Provides-Extra: sqlalchemy
+Provides-Extra: pyspark
diff --git a/third_party/python/sentry-sdk/sentry_sdk.egg-info/SOURCES.txt b/third_party/python/sentry-sdk/sentry_sdk.egg-info/SOURCES.txt
new file mode 100644
index 0000000000..662a5a069f
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk.egg-info/SOURCES.txt
@@ -0,0 +1,64 @@
+LICENSE
+MANIFEST.in
+README.md
+setup.cfg
+setup.py
+sentry_sdk/__init__.py
+sentry_sdk/_compat.py
+sentry_sdk/_types.py
+sentry_sdk/api.py
+sentry_sdk/client.py
+sentry_sdk/consts.py
+sentry_sdk/debug.py
+sentry_sdk/envelope.py
+sentry_sdk/hub.py
+sentry_sdk/py.typed
+sentry_sdk/scope.py
+sentry_sdk/serializer.py
+sentry_sdk/sessions.py
+sentry_sdk/tracing.py
+sentry_sdk/transport.py
+sentry_sdk/utils.py
+sentry_sdk/worker.py
+sentry_sdk.egg-info/PKG-INFO
+sentry_sdk.egg-info/SOURCES.txt
+sentry_sdk.egg-info/dependency_links.txt
+sentry_sdk.egg-info/not-zip-safe
+sentry_sdk.egg-info/requires.txt
+sentry_sdk.egg-info/top_level.txt
+sentry_sdk/integrations/__init__.py
+sentry_sdk/integrations/_wsgi_common.py
+sentry_sdk/integrations/aiohttp.py
+sentry_sdk/integrations/argv.py
+sentry_sdk/integrations/asgi.py
+sentry_sdk/integrations/atexit.py
+sentry_sdk/integrations/aws_lambda.py
+sentry_sdk/integrations/beam.py
+sentry_sdk/integrations/bottle.py
+sentry_sdk/integrations/celery.py
+sentry_sdk/integrations/dedupe.py
+sentry_sdk/integrations/excepthook.py
+sentry_sdk/integrations/falcon.py
+sentry_sdk/integrations/flask.py
+sentry_sdk/integrations/gnu_backtrace.py
+sentry_sdk/integrations/logging.py
+sentry_sdk/integrations/modules.py
+sentry_sdk/integrations/pyramid.py
+sentry_sdk/integrations/redis.py
+sentry_sdk/integrations/rq.py
+sentry_sdk/integrations/sanic.py
+sentry_sdk/integrations/serverless.py
+sentry_sdk/integrations/sqlalchemy.py
+sentry_sdk/integrations/stdlib.py
+sentry_sdk/integrations/threading.py
+sentry_sdk/integrations/tornado.py
+sentry_sdk/integrations/trytond.py
+sentry_sdk/integrations/wsgi.py
+sentry_sdk/integrations/django/__init__.py
+sentry_sdk/integrations/django/asgi.py
+sentry_sdk/integrations/django/middleware.py
+sentry_sdk/integrations/django/templates.py
+sentry_sdk/integrations/django/transactions.py
+sentry_sdk/integrations/spark/__init__.py
+sentry_sdk/integrations/spark/spark_driver.py
+sentry_sdk/integrations/spark/spark_worker.py \ No newline at end of file
diff --git a/third_party/python/sentry-sdk/sentry_sdk.egg-info/dependency_links.txt b/third_party/python/sentry-sdk/sentry_sdk.egg-info/dependency_links.txt
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/third_party/python/sentry-sdk/sentry_sdk.egg-info/not-zip-safe b/third_party/python/sentry-sdk/sentry_sdk.egg-info/not-zip-safe
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk.egg-info/not-zip-safe
@@ -0,0 +1 @@
+
diff --git a/third_party/python/sentry-sdk/sentry_sdk.egg-info/requires.txt b/third_party/python/sentry-sdk/sentry_sdk.egg-info/requires.txt
new file mode 100644
index 0000000000..5f325f6230
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk.egg-info/requires.txt
@@ -0,0 +1,39 @@
+urllib3>=1.10.0
+certifi
+
+[aiohttp]
+aiohttp>=3.5
+
+[beam]
+beam>=2.12
+
+[bottle]
+bottle>=0.12.13
+
+[celery]
+celery>=3
+
+[django]
+django>=1.8
+
+[falcon]
+falcon>=1.4
+
+[flask]
+flask>=0.11
+blinker>=1.1
+
+[pyspark]
+pyspark>=2.4.4
+
+[rq]
+0.6
+
+[sanic]
+sanic>=0.8
+
+[sqlalchemy]
+sqlalchemy>=1.2
+
+[tornado]
+tornado>=5
diff --git a/third_party/python/sentry-sdk/sentry_sdk.egg-info/top_level.txt b/third_party/python/sentry-sdk/sentry_sdk.egg-info/top_level.txt
new file mode 100644
index 0000000000..5051901ecb
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk.egg-info/top_level.txt
@@ -0,0 +1 @@
+sentry_sdk
diff --git a/third_party/python/sentry-sdk/sentry_sdk/__init__.py b/third_party/python/sentry-sdk/sentry_sdk/__init__.py
new file mode 100644
index 0000000000..b211a6c754
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/__init__.py
@@ -0,0 +1,25 @@
+from sentry_sdk.hub import Hub, init
+from sentry_sdk.scope import Scope
+from sentry_sdk.transport import Transport, HttpTransport
+from sentry_sdk.client import Client
+
+from sentry_sdk.api import * # noqa
+from sentry_sdk.api import __all__ as api_all
+
+from sentry_sdk.consts import VERSION # noqa
+
+__all__ = api_all + [ # noqa
+ "Hub",
+ "Scope",
+ "Client",
+ "Transport",
+ "HttpTransport",
+ "init",
+ "integrations",
+]
+
+# Initialize the debug support after everything is loaded
+from sentry_sdk.debug import init_debug_support
+
+init_debug_support()
+del init_debug_support
diff --git a/third_party/python/sentry-sdk/sentry_sdk/_compat.py b/third_party/python/sentry-sdk/sentry_sdk/_compat.py
new file mode 100644
index 0000000000..4db5f44c33
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/_compat.py
@@ -0,0 +1,92 @@
+import sys
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Optional
+ from typing import Tuple
+ from typing import Any
+ from typing import Type
+
+ from typing import TypeVar
+
+ T = TypeVar("T")
+
+
+PY2 = sys.version_info[0] == 2
+
+if PY2:
+ import urlparse # noqa
+
+ text_type = unicode # noqa
+ import Queue as queue # noqa
+
+ string_types = (str, text_type)
+ number_types = (int, long, float) # noqa
+ int_types = (int, long) # noqa
+ iteritems = lambda x: x.iteritems() # noqa: B301
+
+ def implements_str(cls):
+ # type: (T) -> T
+ cls.__unicode__ = cls.__str__
+ cls.__str__ = lambda x: unicode(x).encode("utf-8") # noqa
+ return cls
+
+ exec("def reraise(tp, value, tb=None):\n raise tp, value, tb")
+
+
+else:
+ import urllib.parse as urlparse # noqa
+ import queue # noqa
+
+ text_type = str
+ string_types = (text_type,) # type: Tuple[type]
+ number_types = (int, float) # type: Tuple[type, type]
+ int_types = (int,) # noqa
+ iteritems = lambda x: x.items()
+
+ def implements_str(x):
+ # type: (T) -> T
+ return x
+
+ def reraise(tp, value, tb=None):
+ # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[Any]) -> None
+ assert value is not None
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+
+
+def with_metaclass(meta, *bases):
+ # type: (Any, *Any) -> Any
+ class MetaClass(type):
+ def __new__(metacls, name, this_bases, d):
+ # type: (Any, Any, Any, Any) -> Any
+ return meta(name, bases, d)
+
+ return type.__new__(MetaClass, "temporary_class", (), {})
+
+
+def check_thread_support():
+ # type: () -> None
+ try:
+ from uwsgi import opt # type: ignore
+ except ImportError:
+ return
+
+ # When `threads` is passed in as a uwsgi option,
+ # `enable-threads` is implied on.
+ if "threads" in opt:
+ return
+
+ if str(opt.get("enable-threads", "0")).lower() in ("false", "off", "no", "0"):
+ from warnings import warn
+
+ warn(
+ Warning(
+ "We detected the use of uwsgi with disabled threads. "
+ "This will cause issues with the transport you are "
+ "trying to use. Please enable threading for uwsgi. "
+ '(Enable the "enable-threads" flag).'
+ )
+ )
diff --git a/third_party/python/sentry-sdk/sentry_sdk/_types.py b/third_party/python/sentry-sdk/sentry_sdk/_types.py
new file mode 100644
index 0000000000..74020aea57
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/_types.py
@@ -0,0 +1,37 @@
+try:
+ from typing import TYPE_CHECKING as MYPY
+except ImportError:
+ MYPY = False
+
+
+if MYPY:
+ from types import TracebackType
+ from typing import Any
+ from typing import Callable
+ from typing import Dict
+ from typing import Optional
+ from typing import Tuple
+ from typing import Type
+ from typing_extensions import Literal
+
+ ExcInfo = Tuple[
+ Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]
+ ]
+
+ Event = Dict[str, Any]
+ Hint = Dict[str, Any]
+
+ Breadcrumb = Dict[str, Any]
+ BreadcrumbHint = Dict[str, Any]
+
+ EventProcessor = Callable[[Event, Hint], Optional[Event]]
+ ErrorProcessor = Callable[[Event, ExcInfo], Optional[Event]]
+ BreadcrumbProcessor = Callable[[Breadcrumb, BreadcrumbHint], Optional[Breadcrumb]]
+
+ # https://github.com/python/mypy/issues/5710
+ NotImplementedType = Any
+
+ EventDataCategory = Literal[
+ "default", "error", "crash", "transaction", "security", "attachment", "session"
+ ]
+ SessionStatus = Literal["ok", "exited", "crashed", "abnormal"]
diff --git a/third_party/python/sentry-sdk/sentry_sdk/api.py b/third_party/python/sentry-sdk/sentry_sdk/api.py
new file mode 100644
index 0000000000..0f1cdfc741
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/api.py
@@ -0,0 +1,256 @@
+import inspect
+from contextlib import contextmanager
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.scope import Scope
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Dict
+ from typing import Optional
+ from typing import overload
+ from typing import Callable
+ from typing import TypeVar
+ from typing import ContextManager
+
+ from sentry_sdk._types import Event, Hint, Breadcrumb, BreadcrumbHint
+ from sentry_sdk.tracing import Span
+
+ T = TypeVar("T")
+ F = TypeVar("F", bound=Callable[..., Any])
+else:
+
+ def overload(x):
+ # type: (T) -> T
+ return x
+
+
+__all__ = [
+ "capture_event",
+ "capture_message",
+ "capture_exception",
+ "add_breadcrumb",
+ "configure_scope",
+ "push_scope",
+ "flush",
+ "last_event_id",
+ "start_span",
+ "set_tag",
+ "set_context",
+ "set_extra",
+ "set_user",
+ "set_level",
+]
+
+
+def hubmethod(f):
+ # type: (F) -> F
+ f.__doc__ = "%s\n\n%s" % (
+ "Alias for :py:meth:`sentry_sdk.Hub.%s`" % f.__name__,
+ inspect.getdoc(getattr(Hub, f.__name__)),
+ )
+ return f
+
+
+def scopemethod(f):
+ # type: (F) -> F
+ f.__doc__ = "%s\n\n%s" % (
+ "Alias for :py:meth:`sentry_sdk.Scope.%s`" % f.__name__,
+ inspect.getdoc(getattr(Scope, f.__name__)),
+ )
+ return f
+
+
+@hubmethod
+def capture_event(
+ event, # type: Event
+ hint=None, # type: Optional[Hint]
+ scope=None, # type: Optional[Any]
+ **scope_args # type: Dict[str, Any]
+):
+ # type: (...) -> Optional[str]
+ hub = Hub.current
+ if hub is not None:
+ return hub.capture_event(event, hint, scope=scope, **scope_args)
+ return None
+
+
+@hubmethod
+def capture_message(
+ message, # type: str
+ level=None, # type: Optional[str]
+ scope=None, # type: Optional[Any]
+ **scope_args # type: Dict[str, Any]
+):
+ # type: (...) -> Optional[str]
+ hub = Hub.current
+ if hub is not None:
+ return hub.capture_message(message, level, scope=scope, **scope_args)
+ return None
+
+
+@hubmethod
+def capture_exception(
+ error=None, # type: Optional[BaseException]
+ scope=None, # type: Optional[Any]
+ **scope_args # type: Dict[str, Any]
+):
+ # type: (...) -> Optional[str]
+ hub = Hub.current
+ if hub is not None:
+ return hub.capture_exception(error, scope=scope, **scope_args)
+ return None
+
+
+@hubmethod
+def add_breadcrumb(
+ crumb=None, # type: Optional[Breadcrumb]
+ hint=None, # type: Optional[BreadcrumbHint]
+ **kwargs # type: Any
+):
+ # type: (...) -> None
+ hub = Hub.current
+ if hub is not None:
+ return hub.add_breadcrumb(crumb, hint, **kwargs)
+
+
+@overload # noqa
+def configure_scope():
+ # type: () -> ContextManager[Scope]
+ pass
+
+
+@overload # noqa
+def configure_scope(
+ callback, # type: Callable[[Scope], None]
+):
+ # type: (...) -> None
+ pass
+
+
+@hubmethod # noqa
+def configure_scope(
+ callback=None, # type: Optional[Callable[[Scope], None]]
+):
+ # type: (...) -> Optional[ContextManager[Scope]]
+ hub = Hub.current
+ if hub is not None:
+ return hub.configure_scope(callback)
+ elif callback is None:
+
+ @contextmanager
+ def inner():
+ yield Scope()
+
+ return inner()
+ else:
+ # returned if user provided callback
+ return None
+
+
+@overload # noqa
+def push_scope():
+ # type: () -> ContextManager[Scope]
+ pass
+
+
+@overload # noqa
+def push_scope(
+ callback, # type: Callable[[Scope], None]
+):
+ # type: (...) -> None
+ pass
+
+
+@hubmethod # noqa
+def push_scope(
+ callback=None, # type: Optional[Callable[[Scope], None]]
+):
+ # type: (...) -> Optional[ContextManager[Scope]]
+ hub = Hub.current
+ if hub is not None:
+ return hub.push_scope(callback)
+ elif callback is None:
+
+ @contextmanager
+ def inner():
+ yield Scope()
+
+ return inner()
+ else:
+ # returned if user provided callback
+ return None
+
+
+@scopemethod # noqa
+def set_tag(key, value):
+ # type: (str, Any) -> None
+ hub = Hub.current
+ if hub is not None:
+ hub.scope.set_tag(key, value)
+
+
+@scopemethod # noqa
+def set_context(key, value):
+ # type: (str, Any) -> None
+ hub = Hub.current
+ if hub is not None:
+ hub.scope.set_context(key, value)
+
+
+@scopemethod # noqa
+def set_extra(key, value):
+ # type: (str, Any) -> None
+ hub = Hub.current
+ if hub is not None:
+ hub.scope.set_extra(key, value)
+
+
+@scopemethod # noqa
+def set_user(value):
+ # type: (Dict[str, Any]) -> None
+ hub = Hub.current
+ if hub is not None:
+ hub.scope.set_user(value)
+
+
+@scopemethod # noqa
+def set_level(value):
+ # type: (str) -> None
+ hub = Hub.current
+ if hub is not None:
+ hub.scope.set_level(value)
+
+
+@hubmethod
+def flush(
+ timeout=None, # type: Optional[float]
+ callback=None, # type: Optional[Callable[[int, float], None]]
+):
+ # type: (...) -> None
+ hub = Hub.current
+ if hub is not None:
+ return hub.flush(timeout=timeout, callback=callback)
+
+
+@hubmethod
+def last_event_id():
+ # type: () -> Optional[str]
+ hub = Hub.current
+ if hub is not None:
+ return hub.last_event_id()
+ return None
+
+
+@hubmethod
+def start_span(
+ span=None, # type: Optional[Span]
+ **kwargs # type: Any
+):
+ # type: (...) -> Span
+
+ # TODO: All other functions in this module check for
+ # `Hub.current is None`. That actually should never happen?
+ return Hub.current.start_span(span=span, **kwargs)
diff --git a/third_party/python/sentry-sdk/sentry_sdk/client.py b/third_party/python/sentry-sdk/sentry_sdk/client.py
new file mode 100644
index 0000000000..c0fb8422d8
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/client.py
@@ -0,0 +1,406 @@
+import os
+import uuid
+import random
+from datetime import datetime
+import socket
+
+from sentry_sdk._compat import string_types, text_type, iteritems
+from sentry_sdk.utils import (
+ handle_in_app,
+ get_type_name,
+ capture_internal_exceptions,
+ current_stacktrace,
+ disable_capture_event,
+ logger,
+)
+from sentry_sdk.serializer import serialize
+from sentry_sdk.transport import make_transport
+from sentry_sdk.consts import DEFAULT_OPTIONS, SDK_INFO, ClientConstructor
+from sentry_sdk.integrations import setup_integrations
+from sentry_sdk.utils import ContextVar
+from sentry_sdk.sessions import SessionFlusher
+from sentry_sdk.envelope import Envelope
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Callable
+ from typing import Dict
+ from typing import List
+ from typing import Optional
+
+ from sentry_sdk.scope import Scope
+ from sentry_sdk._types import Event, Hint
+ from sentry_sdk.sessions import Session
+
+
+_client_init_debug = ContextVar("client_init_debug")
+
+
+def _get_options(*args, **kwargs):
+ # type: (*Optional[str], **Any) -> Dict[str, Any]
+ if args and (isinstance(args[0], (text_type, bytes, str)) or args[0] is None):
+ dsn = args[0] # type: Optional[str]
+ args = args[1:]
+ else:
+ dsn = None
+
+ rv = dict(DEFAULT_OPTIONS)
+ options = dict(*args, **kwargs)
+ if dsn is not None and options.get("dsn") is None:
+ options["dsn"] = dsn
+
+ for key, value in iteritems(options):
+ if key not in rv:
+ raise TypeError("Unknown option %r" % (key,))
+ rv[key] = value
+
+ if rv["dsn"] is None:
+ rv["dsn"] = os.environ.get("SENTRY_DSN")
+
+ if rv["release"] is None:
+ rv["release"] = os.environ.get("SENTRY_RELEASE")
+
+ if rv["environment"] is None:
+ rv["environment"] = os.environ.get("SENTRY_ENVIRONMENT")
+
+ if rv["server_name"] is None and hasattr(socket, "gethostname"):
+ rv["server_name"] = socket.gethostname()
+
+ return rv
+
+
+class _Client(object):
+ """The client is internally responsible for capturing the events and
+ forwarding them to sentry through the configured transport. It takes
+ the client options as keyword arguments and optionally the DSN as first
+ argument.
+ """
+
+ def __init__(self, *args, **kwargs):
+ # type: (*Any, **Any) -> None
+ self.options = get_options(*args, **kwargs) # type: Dict[str, Any]
+ self._init_impl()
+
+ def __getstate__(self):
+ # type: () -> Any
+ return {"options": self.options}
+
+ def __setstate__(self, state):
+ # type: (Any) -> None
+ self.options = state["options"]
+ self._init_impl()
+
+ def _init_impl(self):
+ # type: () -> None
+ old_debug = _client_init_debug.get(False)
+
+ def _send_sessions(sessions):
+ # type: (List[Any]) -> None
+ transport = self.transport
+ if sessions and transport:
+ envelope = Envelope()
+ for session in sessions:
+ envelope.add_session(session)
+ transport.capture_envelope(envelope)
+
+ try:
+ _client_init_debug.set(self.options["debug"])
+ self.transport = make_transport(self.options)
+ self.session_flusher = SessionFlusher(flush_func=_send_sessions)
+
+ request_bodies = ("always", "never", "small", "medium")
+ if self.options["request_bodies"] not in request_bodies:
+ raise ValueError(
+ "Invalid value for request_bodies. Must be one of {}".format(
+ request_bodies
+ )
+ )
+
+ self.integrations = setup_integrations(
+ self.options["integrations"],
+ with_defaults=self.options["default_integrations"],
+ with_auto_enabling_integrations=self.options["_experiments"].get(
+ "auto_enabling_integrations", False
+ ),
+ )
+ finally:
+ _client_init_debug.set(old_debug)
+
+ @property
+ def dsn(self):
+ # type: () -> Optional[str]
+ """Returns the configured DSN as string."""
+ return self.options["dsn"]
+
+ def _prepare_event(
+ self,
+ event, # type: Event
+ hint, # type: Optional[Hint]
+ scope, # type: Optional[Scope]
+ ):
+ # type: (...) -> Optional[Event]
+
+ if event.get("timestamp") is None:
+ event["timestamp"] = datetime.utcnow()
+
+ hint = dict(hint or ()) # type: Hint
+
+ if scope is not None:
+ event_ = scope.apply_to_event(event, hint)
+ if event_ is None:
+ return None
+ event = event_
+
+ if (
+ self.options["attach_stacktrace"]
+ and "exception" not in event
+ and "stacktrace" not in event
+ and "threads" not in event
+ ):
+ with capture_internal_exceptions():
+ event["threads"] = {
+ "values": [
+ {
+ "stacktrace": current_stacktrace(
+ self.options["with_locals"]
+ ),
+ "crashed": False,
+ "current": True,
+ }
+ ]
+ }
+
+ for key in "release", "environment", "server_name", "dist":
+ if event.get(key) is None and self.options[key] is not None:
+ event[key] = text_type(self.options[key]).strip()
+ if event.get("sdk") is None:
+ sdk_info = dict(SDK_INFO)
+ sdk_info["integrations"] = sorted(self.integrations.keys())
+ event["sdk"] = sdk_info
+
+ if event.get("platform") is None:
+ event["platform"] = "python"
+
+ event = handle_in_app(
+ event, self.options["in_app_exclude"], self.options["in_app_include"]
+ )
+
+ # Postprocess the event here so that annotated types do
+ # generally not surface in before_send
+ if event is not None:
+ event = serialize(event)
+
+ before_send = self.options["before_send"]
+ if before_send is not None:
+ new_event = None
+ with capture_internal_exceptions():
+ new_event = before_send(event, hint or {})
+ if new_event is None:
+ logger.info("before send dropped event (%s)", event)
+ event = new_event # type: ignore
+
+ return event
+
+ def _is_ignored_error(self, event, hint):
+ # type: (Event, Hint) -> bool
+ exc_info = hint.get("exc_info")
+ if exc_info is None:
+ return False
+
+ type_name = get_type_name(exc_info[0])
+ full_name = "%s.%s" % (exc_info[0].__module__, type_name)
+
+ for errcls in self.options["ignore_errors"]:
+ # String types are matched against the type name in the
+ # exception only
+ if isinstance(errcls, string_types):
+ if errcls == full_name or errcls == type_name:
+ return True
+ else:
+ if issubclass(exc_info[0], errcls):
+ return True
+
+ return False
+
+ def _should_capture(
+ self,
+ event, # type: Event
+ hint, # type: Hint
+ scope=None, # type: Optional[Scope]
+ ):
+ # type: (...) -> bool
+ if scope is not None and not scope._should_capture:
+ return False
+
+ if (
+ self.options["sample_rate"] < 1.0
+ and random.random() >= self.options["sample_rate"]
+ ):
+ return False
+
+ if self._is_ignored_error(event, hint):
+ return False
+
+ return True
+
+ def _update_session_from_event(
+ self,
+ session, # type: Session
+ event, # type: Event
+ ):
+ # type: (...) -> None
+
+ crashed = False
+ errored = False
+ user_agent = None
+
+ # Figure out if this counts as an error and if we should mark the
+ # session as crashed.
+ level = event.get("level")
+ if level == "fatal":
+ crashed = True
+ if not crashed:
+ exceptions = (event.get("exception") or {}).get("values")
+ if exceptions:
+ errored = True
+ for error in exceptions:
+ mechanism = error.get("mechanism")
+ if mechanism and mechanism.get("handled") is False:
+ crashed = True
+ break
+
+ user = event.get("user")
+
+ if session.user_agent is None:
+ headers = (event.get("request") or {}).get("headers")
+ for (k, v) in iteritems(headers or {}):
+ if k.lower() == "user-agent":
+ user_agent = v
+ break
+
+ session.update(
+ status="crashed" if crashed else None,
+ user=user,
+ user_agent=user_agent,
+ errors=session.errors + (errored or crashed),
+ )
+
+ def capture_event(
+ self,
+ event, # type: Event
+ hint=None, # type: Optional[Hint]
+ scope=None, # type: Optional[Scope]
+ ):
+ # type: (...) -> Optional[str]
+ """Captures an event.
+
+ :param event: A ready-made event that can be directly sent to Sentry.
+
+ :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object.
+
+ :returns: An event ID. May be `None` if there is no DSN set or of if the SDK decided to discard the event for other reasons. In such situations setting `debug=True` on `init()` may help.
+ """
+ if disable_capture_event.get(False):
+ return None
+
+ if self.transport is None:
+ return None
+ if hint is None:
+ hint = {}
+ event_id = event.get("event_id")
+ if event_id is None:
+ event["event_id"] = event_id = uuid.uuid4().hex
+ if not self._should_capture(event, hint, scope):
+ return None
+ event_opt = self._prepare_event(event, hint, scope)
+ if event_opt is None:
+ return None
+
+ # whenever we capture an event we also check if the session needs
+ # to be updated based on that information.
+ session = scope._session if scope else None
+ if session:
+ self._update_session_from_event(session, event)
+
+ self.transport.capture_event(event_opt)
+ return event_id
+
+ def capture_session(
+ self, session # type: Session
+ ):
+ # type: (...) -> None
+ if not session.release:
+ logger.info("Discarded session update because of missing release")
+ else:
+ self.session_flusher.add_session(session)
+
+ def close(
+ self,
+ timeout=None, # type: Optional[float]
+ callback=None, # type: Optional[Callable[[int, float], None]]
+ ):
+ # type: (...) -> None
+ """
+ Close the client and shut down the transport. Arguments have the same
+ semantics as :py:meth:`Client.flush`.
+ """
+ if self.transport is not None:
+ self.flush(timeout=timeout, callback=callback)
+ self.session_flusher.kill()
+ self.transport.kill()
+ self.transport = None
+
+ def flush(
+ self,
+ timeout=None, # type: Optional[float]
+ callback=None, # type: Optional[Callable[[int, float], None]]
+ ):
+ # type: (...) -> None
+ """
+ Wait for the current events to be sent.
+
+ :param timeout: Wait for at most `timeout` seconds. If no `timeout` is provided, the `shutdown_timeout` option value is used.
+
+ :param callback: Is invoked with the number of pending events and the configured timeout.
+ """
+ if self.transport is not None:
+ if timeout is None:
+ timeout = self.options["shutdown_timeout"]
+ self.session_flusher.flush()
+ self.transport.flush(timeout=timeout, callback=callback)
+
+ def __enter__(self):
+ # type: () -> _Client
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ # type: (Any, Any, Any) -> None
+ self.close()
+
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ # Make mypy, PyCharm and other static analyzers think `get_options` is a
+ # type to have nicer autocompletion for params.
+ #
+ # Use `ClientConstructor` to define the argument types of `init` and
+ # `Dict[str, Any]` to tell static analyzers about the return type.
+
+ class get_options(ClientConstructor, Dict[str, Any]): # noqa: N801
+ pass
+
+ class Client(ClientConstructor, _Client):
+ pass
+
+
+else:
+ # Alias `get_options` for actual usage. Go through the lambda indirection
+ # to throw PyCharm off of the weakly typed signature (it would otherwise
+ # discover both the weakly typed signature of `_init` and our faked `init`
+ # type).
+
+ get_options = (lambda: _get_options)()
+ Client = (lambda: _Client)()
diff --git a/third_party/python/sentry-sdk/sentry_sdk/consts.py b/third_party/python/sentry-sdk/sentry_sdk/consts.py
new file mode 100644
index 0000000000..2fe012e66d
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/consts.py
@@ -0,0 +1,97 @@
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Optional
+ from typing import Callable
+ from typing import Union
+ from typing import List
+ from typing import Type
+ from typing import Dict
+ from typing import Any
+ from typing import Sequence
+ from typing_extensions import TypedDict
+
+ from sentry_sdk.transport import Transport
+ from sentry_sdk.integrations import Integration
+
+ from sentry_sdk._types import Event, EventProcessor, BreadcrumbProcessor
+
+ # Experiments are feature flags to enable and disable certain unstable SDK
+ # functionality. Changing them from the defaults (`None`) in production
+ # code is highly discouraged. They are not subject to any stability
+ # guarantees such as the ones from semantic versioning.
+ Experiments = TypedDict(
+ "Experiments",
+ {
+ "max_spans": Optional[int],
+ "record_sql_params": Optional[bool],
+ "auto_enabling_integrations": Optional[bool],
+ "auto_session_tracking": Optional[bool],
+ },
+ total=False,
+ )
+
+
+# This type exists to trick mypy and PyCharm into thinking `init` and `Client`
+# take these arguments (even though they take opaque **kwargs)
+class ClientConstructor(object):
+ def __init__(
+ self,
+ dsn=None, # type: Optional[str]
+ with_locals=True, # type: bool
+ max_breadcrumbs=100, # type: int
+ release=None, # type: Optional[str]
+ environment=None, # type: Optional[str]
+ server_name=None, # type: Optional[str]
+ shutdown_timeout=2, # type: int
+ integrations=[], # type: Sequence[Integration] # noqa: B006
+ in_app_include=[], # type: List[str] # noqa: B006
+ in_app_exclude=[], # type: List[str] # noqa: B006
+ default_integrations=True, # type: bool
+ dist=None, # type: Optional[str]
+ transport=None, # type: Optional[Union[Transport, Type[Transport], Callable[[Event], None]]]
+ sample_rate=1.0, # type: float
+ send_default_pii=False, # type: bool
+ http_proxy=None, # type: Optional[str]
+ https_proxy=None, # type: Optional[str]
+ ignore_errors=[], # type: List[Union[type, str]] # noqa: B006
+ request_bodies="medium", # type: str
+ before_send=None, # type: Optional[EventProcessor]
+ before_breadcrumb=None, # type: Optional[BreadcrumbProcessor]
+ debug=False, # type: bool
+ attach_stacktrace=False, # type: bool
+ ca_certs=None, # type: Optional[str]
+ propagate_traces=True, # type: bool
+ # DO NOT ENABLE THIS RIGHT NOW UNLESS YOU WANT TO EXCEED YOUR EVENT QUOTA IMMEDIATELY
+ traces_sample_rate=0.0, # type: float
+ traceparent_v2=False, # type: bool
+ _experiments={}, # type: Experiments # noqa: B006
+ ):
+ # type: (...) -> None
+ pass
+
+
+def _get_default_options():
+ # type: () -> Dict[str, Any]
+ import inspect
+
+ if hasattr(inspect, "getfullargspec"):
+ getargspec = inspect.getfullargspec
+ else:
+ getargspec = inspect.getargspec # type: ignore
+
+ a = getargspec(ClientConstructor.__init__)
+ defaults = a.defaults or ()
+ return dict(zip(a.args[-len(defaults) :], defaults))
+
+
+DEFAULT_OPTIONS = _get_default_options()
+del _get_default_options
+
+
+VERSION = "0.14.3"
+SDK_INFO = {
+ "name": "sentry.python",
+ "version": VERSION,
+ "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}],
+}
diff --git a/third_party/python/sentry-sdk/sentry_sdk/debug.py b/third_party/python/sentry-sdk/sentry_sdk/debug.py
new file mode 100644
index 0000000000..fe8ae50cea
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/debug.py
@@ -0,0 +1,44 @@
+import sys
+import logging
+
+from sentry_sdk import utils
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import logger
+from sentry_sdk.client import _client_init_debug
+from logging import LogRecord
+
+
+class _HubBasedClientFilter(logging.Filter):
+ def filter(self, record):
+ # type: (LogRecord) -> bool
+ if _client_init_debug.get(False):
+ return True
+ hub = Hub.current
+ if hub is not None and hub.client is not None:
+ return hub.client.options["debug"]
+ return False
+
+
+def init_debug_support():
+ # type: () -> None
+ if not logger.handlers:
+ configure_logger()
+ configure_debug_hub()
+
+
+def configure_logger():
+ # type: () -> None
+ _handler = logging.StreamHandler(sys.stderr)
+ _handler.setFormatter(logging.Formatter(" [sentry] %(levelname)s: %(message)s"))
+ logger.addHandler(_handler)
+ logger.setLevel(logging.DEBUG)
+ logger.addFilter(_HubBasedClientFilter())
+
+
+def configure_debug_hub():
+ # type: () -> None
+ def _get_debug_hub():
+ # type: () -> Hub
+ return Hub.current
+
+ utils._get_debug_hub = _get_debug_hub
diff --git a/third_party/python/sentry-sdk/sentry_sdk/envelope.py b/third_party/python/sentry-sdk/sentry_sdk/envelope.py
new file mode 100644
index 0000000000..fd08553249
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/envelope.py
@@ -0,0 +1,293 @@
+import io
+import json
+import shutil
+import mimetypes
+
+from sentry_sdk._compat import text_type
+from sentry_sdk._types import MYPY
+from sentry_sdk.sessions import Session
+
+if MYPY:
+ from typing import Any
+ from typing import Tuple
+ from typing import Optional
+ from typing import Union
+ from typing import Dict
+ from typing import List
+ from typing import Iterator
+
+ from sentry_sdk._types import Event, EventDataCategory
+
+
+def get_event_data_category(event):
+ # type: (Event) -> EventDataCategory
+ if event.get("type") == "transaction":
+ return "transaction"
+ return "error"
+
+
+class Envelope(object):
+ def __init__(
+ self,
+ headers=None, # type: Optional[Dict[str, str]]
+ items=None, # type: Optional[List[Item]]
+ ):
+ # type: (...) -> None
+ if headers is not None:
+ headers = dict(headers)
+ self.headers = headers or {}
+ if items is None:
+ items = []
+ else:
+ items = list(items)
+ self.items = items
+
+ @property
+ def description(self):
+ # type: (...) -> str
+ return "envelope with %s items (%s)" % (
+ len(self.items),
+ ", ".join(x.data_category for x in self.items),
+ )
+
+ def add_event(
+ self, event # type: Event
+ ):
+ # type: (...) -> None
+ self.add_item(Item(payload=PayloadRef(json=event), type="event"))
+
+ def add_session(
+ self, session # type: Union[Session, Any]
+ ):
+ # type: (...) -> None
+ if isinstance(session, Session):
+ session = session.to_json()
+ self.add_item(Item(payload=PayloadRef(json=session), type="session"))
+
+ def add_item(
+ self, item # type: Item
+ ):
+ # type: (...) -> None
+ self.items.append(item)
+
+ def get_event(self):
+ # type: (...) -> Optional[Event]
+ for items in self.items:
+ event = items.get_event()
+ if event is not None:
+ return event
+ return None
+
+ def __iter__(self):
+ # type: (...) -> Iterator[Item]
+ return iter(self.items)
+
+ def serialize_into(
+ self, f # type: Any
+ ):
+ # type: (...) -> None
+ f.write(json.dumps(self.headers).encode("utf-8"))
+ f.write(b"\n")
+ for item in self.items:
+ item.serialize_into(f)
+
+ def serialize(self):
+ # type: (...) -> bytes
+ out = io.BytesIO()
+ self.serialize_into(out)
+ return out.getvalue()
+
+ @classmethod
+ def deserialize_from(
+ cls, f # type: Any
+ ):
+ # type: (...) -> Envelope
+ headers = json.loads(f.readline())
+ items = []
+ while 1:
+ item = Item.deserialize_from(f)
+ if item is None:
+ break
+ items.append(item)
+ return cls(headers=headers, items=items)
+
+ @classmethod
+ def deserialize(
+ cls, bytes # type: bytes
+ ):
+ # type: (...) -> Envelope
+ return cls.deserialize_from(io.BytesIO(bytes))
+
+ def __repr__(self):
+ # type: (...) -> str
+ return "<Envelope headers=%r items=%r>" % (self.headers, self.items)
+
+
+class PayloadRef(object):
+ def __init__(
+ self,
+ bytes=None, # type: Optional[bytes]
+ path=None, # type: Optional[Union[bytes, text_type]]
+ json=None, # type: Optional[Any]
+ ):
+ # type: (...) -> None
+ self.json = json
+ self.bytes = bytes
+ self.path = path
+
+ def get_bytes(self):
+ # type: (...) -> bytes
+ if self.bytes is None:
+ if self.path is not None:
+ with open(self.path, "rb") as f:
+ self.bytes = f.read()
+ elif self.json is not None:
+ self.bytes = json.dumps(self.json).encode("utf-8")
+ else:
+ self.bytes = b""
+ return self.bytes
+
+ def _prepare_serialize(self):
+ # type: (...) -> Tuple[Any, Any]
+ if self.path is not None and self.bytes is None:
+ f = open(self.path, "rb")
+ f.seek(0, 2)
+ length = f.tell()
+ f.seek(0, 0)
+
+ def writer(out):
+ # type: (Any) -> None
+ try:
+ shutil.copyfileobj(f, out)
+ finally:
+ f.close()
+
+ return length, writer
+
+ bytes = self.get_bytes()
+ return len(bytes), lambda f: f.write(bytes)
+
+ @property
+ def inferred_content_type(self):
+ # type: (...) -> str
+ if self.json is not None:
+ return "application/json"
+ elif self.path is not None:
+ path = self.path
+ if isinstance(path, bytes):
+ path = path.decode("utf-8", "replace")
+ ty = mimetypes.guess_type(path)[0]
+ if ty:
+ return ty
+ return "application/octet-stream"
+
+ def __repr__(self):
+ # type: (...) -> str
+ return "<Payload %r>" % (self.inferred_content_type,)
+
+
+class Item(object):
+ def __init__(
+ self,
+ payload, # type: Union[bytes, text_type, PayloadRef]
+ headers=None, # type: Optional[Dict[str, str]]
+ type=None, # type: Optional[str]
+ content_type=None, # type: Optional[str]
+ filename=None, # type: Optional[str]
+ ):
+ if headers is not None:
+ headers = dict(headers)
+ elif headers is None:
+ headers = {}
+ self.headers = headers
+ if isinstance(payload, bytes):
+ payload = PayloadRef(bytes=payload)
+ elif isinstance(payload, text_type):
+ payload = PayloadRef(bytes=payload.encode("utf-8"))
+ else:
+ payload = payload
+
+ if filename is not None:
+ headers["filename"] = filename
+ if type is not None:
+ headers["type"] = type
+ if content_type is not None:
+ headers["content_type"] = content_type
+ elif "content_type" not in headers:
+ headers["content_type"] = payload.inferred_content_type
+
+ self.payload = payload
+
+ def __repr__(self):
+ # type: (...) -> str
+ return "<Item headers=%r payload=%r data_category=%r>" % (
+ self.headers,
+ self.payload,
+ self.data_category,
+ )
+
+ @property
+ def data_category(self):
+ # type: (...) -> EventDataCategory
+ rv = "default" # type: Any
+ event = self.get_event()
+ if event is not None:
+ rv = get_event_data_category(event)
+ else:
+ ty = self.headers.get("type")
+ if ty in ("session", "attachment"):
+ rv = ty
+ return rv
+
+ def get_bytes(self):
+ # type: (...) -> bytes
+ return self.payload.get_bytes()
+
+ def get_event(self):
+ # type: (...) -> Optional[Event]
+ if self.headers.get("type") == "event" and self.payload.json is not None:
+ return self.payload.json
+ return None
+
+ def serialize_into(
+ self, f # type: Any
+ ):
+ # type: (...) -> None
+ headers = dict(self.headers)
+ length, writer = self.payload._prepare_serialize()
+ headers["length"] = length
+ f.write(json.dumps(headers).encode("utf-8"))
+ f.write(b"\n")
+ writer(f)
+ f.write(b"\n")
+
+ def serialize(self):
+ # type: (...) -> bytes
+ out = io.BytesIO()
+ self.serialize_into(out)
+ return out.getvalue()
+
+ @classmethod
+ def deserialize_from(
+ cls, f # type: Any
+ ):
+ # type: (...) -> Optional[Item]
+ line = f.readline().rstrip()
+ if not line:
+ return None
+ headers = json.loads(line)
+ length = headers["length"]
+ payload = f.read(length)
+ if headers.get("type") == "event":
+ rv = cls(headers=headers, payload=PayloadRef(json=json.loads(payload)))
+ else:
+ rv = cls(headers=headers, payload=payload)
+ f.readline()
+ return rv
+
+ @classmethod
+ def deserialize(
+ cls, bytes # type: bytes
+ ):
+ # type: (...) -> Optional[Item]
+ return cls.deserialize_from(io.BytesIO(bytes))
diff --git a/third_party/python/sentry-sdk/sentry_sdk/hub.py b/third_party/python/sentry-sdk/sentry_sdk/hub.py
new file mode 100644
index 0000000000..f0060b9d79
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/hub.py
@@ -0,0 +1,647 @@
+import copy
+import random
+import sys
+
+from datetime import datetime
+from contextlib import contextmanager
+
+from sentry_sdk._compat import with_metaclass
+from sentry_sdk.scope import Scope
+from sentry_sdk.client import Client
+from sentry_sdk.tracing import Span
+from sentry_sdk.sessions import Session
+from sentry_sdk.utils import (
+ exc_info_from_error,
+ event_from_exception,
+ logger,
+ ContextVar,
+)
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Union
+ from typing import Any
+ from typing import Optional
+ from typing import Tuple
+ from typing import Dict
+ from typing import List
+ from typing import Callable
+ from typing import Generator
+ from typing import Type
+ from typing import TypeVar
+ from typing import overload
+ from typing import ContextManager
+
+ from sentry_sdk.integrations import Integration
+ from sentry_sdk._types import (
+ Event,
+ Hint,
+ Breadcrumb,
+ BreadcrumbHint,
+ ExcInfo,
+ )
+ from sentry_sdk.consts import ClientConstructor
+
+ T = TypeVar("T")
+
+else:
+
+ def overload(x):
+ # type: (T) -> T
+ return x
+
+
+_local = ContextVar("sentry_current_hub")
+
+
+def _update_scope(base, scope_change, scope_kwargs):
+ # type: (Scope, Optional[Any], Dict[str, Any]) -> Scope
+ if scope_change and scope_kwargs:
+ raise TypeError("cannot provide scope and kwargs")
+ if scope_change is not None:
+ final_scope = copy.copy(base)
+ if callable(scope_change):
+ scope_change(final_scope)
+ else:
+ final_scope.update_from_scope(scope_change)
+ elif scope_kwargs:
+ final_scope = copy.copy(base)
+ final_scope.update_from_kwargs(scope_kwargs)
+ else:
+ final_scope = base
+ return final_scope
+
+
+def _should_send_default_pii():
+ # type: () -> bool
+ client = Hub.current.client
+ if not client:
+ return False
+ return client.options["send_default_pii"]
+
+
+class _InitGuard(object):
+ def __init__(self, client):
+ # type: (Client) -> None
+ self._client = client
+
+ def __enter__(self):
+ # type: () -> _InitGuard
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ # type: (Any, Any, Any) -> None
+ c = self._client
+ if c is not None:
+ c.close()
+
+
+def _init(*args, **kwargs):
+ # type: (*Optional[str], **Any) -> ContextManager[Any]
+ """Initializes the SDK and optionally integrations.
+
+ This takes the same arguments as the client constructor.
+ """
+ client = Client(*args, **kwargs) # type: ignore
+ Hub.current.bind_client(client)
+ rv = _InitGuard(client)
+ return rv
+
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ # Make mypy, PyCharm and other static analyzers think `init` is a type to
+ # have nicer autocompletion for params.
+ #
+ # Use `ClientConstructor` to define the argument types of `init` and
+ # `ContextManager[Any]` to tell static analyzers about the return type.
+
+ class init(ClientConstructor, ContextManager[Any]): # noqa: N801
+ pass
+
+
+else:
+ # Alias `init` for actual usage. Go through the lambda indirection to throw
+ # PyCharm off of the weakly typed signature (it would otherwise discover
+ # both the weakly typed signature of `_init` and our faked `init` type).
+
+ init = (lambda: _init)()
+
+
+class HubMeta(type):
+ @property
+ def current(cls):
+ # type: () -> Hub
+ """Returns the current instance of the hub."""
+ rv = _local.get(None)
+ if rv is None:
+ rv = Hub(GLOBAL_HUB)
+ _local.set(rv)
+ return rv
+
+ @property
+ def main(cls):
+ # type: () -> Hub
+ """Returns the main instance of the hub."""
+ return GLOBAL_HUB
+
+
+class _ScopeManager(object):
+ def __init__(self, hub):
+ # type: (Hub) -> None
+ self._hub = hub
+ self._original_len = len(hub._stack)
+ self._layer = hub._stack[-1]
+
+ def __enter__(self):
+ # type: () -> Scope
+ scope = self._layer[1]
+ assert scope is not None
+ return scope
+
+ def __exit__(self, exc_type, exc_value, tb):
+ # type: (Any, Any, Any) -> None
+ current_len = len(self._hub._stack)
+ if current_len < self._original_len:
+ logger.error(
+ "Scope popped too soon. Popped %s scopes too many.",
+ self._original_len - current_len,
+ )
+ return
+ elif current_len > self._original_len:
+ logger.warning(
+ "Leaked %s scopes: %s",
+ current_len - self._original_len,
+ self._hub._stack[self._original_len :],
+ )
+
+ layer = self._hub._stack[self._original_len - 1]
+ del self._hub._stack[self._original_len - 1 :]
+
+ if layer[1] != self._layer[1]:
+ logger.error(
+ "Wrong scope found. Meant to pop %s, but popped %s.",
+ layer[1],
+ self._layer[1],
+ )
+ elif layer[0] != self._layer[0]:
+ warning = (
+ "init() called inside of pushed scope. This might be entirely "
+ "legitimate but usually occurs when initializing the SDK inside "
+ "a request handler or task/job function. Try to initialize the "
+ "SDK as early as possible instead."
+ )
+ logger.warning(warning)
+
+
+class Hub(with_metaclass(HubMeta)): # type: ignore
+ """The hub wraps the concurrency management of the SDK. Each thread has
+ its own hub but the hub might transfer with the flow of execution if
+ context vars are available.
+
+ If the hub is used with a with statement it's temporarily activated.
+ """
+
+ _stack = None # type: List[Tuple[Optional[Client], Scope]]
+
+ # Mypy doesn't pick up on the metaclass.
+
+ if MYPY:
+ current = None # type: Hub
+ main = None # type: Hub
+
+ def __init__(
+ self,
+ client_or_hub=None, # type: Optional[Union[Hub, Client]]
+ scope=None, # type: Optional[Any]
+ ):
+ # type: (...) -> None
+ if isinstance(client_or_hub, Hub):
+ hub = client_or_hub
+ client, other_scope = hub._stack[-1]
+ if scope is None:
+ scope = copy.copy(other_scope)
+ else:
+ client = client_or_hub
+ if scope is None:
+ scope = Scope()
+
+ self._stack = [(client, scope)]
+ self._last_event_id = None # type: Optional[str]
+ self._old_hubs = [] # type: List[Hub]
+
+ def __enter__(self):
+ # type: () -> Hub
+ self._old_hubs.append(Hub.current)
+ _local.set(self)
+ return self
+
+ def __exit__(
+ self,
+ exc_type, # type: Optional[type]
+ exc_value, # type: Optional[BaseException]
+ tb, # type: Optional[Any]
+ ):
+ # type: (...) -> None
+ old = self._old_hubs.pop()
+ _local.set(old)
+
+ def run(
+ self, callback # type: Callable[[], T]
+ ):
+ # type: (...) -> T
+ """Runs a callback in the context of the hub. Alternatively the
+ with statement can be used on the hub directly.
+ """
+ with self:
+ return callback()
+
+ def get_integration(
+ self, name_or_class # type: Union[str, Type[Integration]]
+ ):
+ # type: (...) -> Any
+ """Returns the integration for this hub by name or class. If there
+ is no client bound or the client does not have that integration
+ then `None` is returned.
+
+ If the return value is not `None` the hub is guaranteed to have a
+ client attached.
+ """
+ if isinstance(name_or_class, str):
+ integration_name = name_or_class
+ elif name_or_class.identifier is not None:
+ integration_name = name_or_class.identifier
+ else:
+ raise ValueError("Integration has no name")
+
+ client = self._stack[-1][0]
+ if client is not None:
+ rv = client.integrations.get(integration_name)
+ if rv is not None:
+ return rv
+
+ @property
+ def client(self):
+ # type: () -> Optional[Client]
+ """Returns the current client on the hub."""
+ return self._stack[-1][0]
+
+ @property
+ def scope(self):
+ # type: () -> Scope
+ """Returns the current scope on the hub."""
+ return self._stack[-1][1]
+
+ def last_event_id(self):
+ # type: () -> Optional[str]
+ """Returns the last event ID."""
+ return self._last_event_id
+
+ def bind_client(
+ self, new # type: Optional[Client]
+ ):
+ # type: (...) -> None
+ """Binds a new client to the hub."""
+ top = self._stack[-1]
+ self._stack[-1] = (new, top[1])
+
+ def capture_event(
+ self,
+ event, # type: Event
+ hint=None, # type: Optional[Hint]
+ scope=None, # type: Optional[Any]
+ **scope_args # type: Dict[str, Any]
+ ):
+ # type: (...) -> Optional[str]
+ """Captures an event. Alias of :py:meth:`sentry_sdk.Client.capture_event`.
+ """
+ client, top_scope = self._stack[-1]
+ scope = _update_scope(top_scope, scope, scope_args)
+ if client is not None:
+ rv = client.capture_event(event, hint, scope)
+ if rv is not None:
+ self._last_event_id = rv
+ return rv
+ return None
+
+ def capture_message(
+ self,
+ message, # type: str
+ level=None, # type: Optional[str]
+ scope=None, # type: Optional[Any]
+ **scope_args # type: Dict[str, Any]
+ ):
+ # type: (...) -> Optional[str]
+ """Captures a message. The message is just a string. If no level
+ is provided the default level is `info`.
+
+ :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
+ """
+ if self.client is None:
+ return None
+ if level is None:
+ level = "info"
+ return self.capture_event(
+ {"message": message, "level": level}, scope=scope, **scope_args
+ )
+
+ def capture_exception(
+ self,
+ error=None, # type: Optional[Union[BaseException, ExcInfo]]
+ scope=None, # type: Optional[Any]
+ **scope_args # type: Dict[str, Any]
+ ):
+ # type: (...) -> Optional[str]
+ """Captures an exception.
+
+ :param error: An exception to catch. If `None`, `sys.exc_info()` will be used.
+
+ :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
+ """
+ client = self.client
+ if client is None:
+ return None
+ if error is not None:
+ exc_info = exc_info_from_error(error)
+ else:
+ exc_info = sys.exc_info()
+
+ event, hint = event_from_exception(exc_info, client_options=client.options)
+ try:
+ return self.capture_event(event, hint=hint, scope=scope, **scope_args)
+ except Exception:
+ self._capture_internal_exception(sys.exc_info())
+
+ return None
+
+ def _capture_internal_exception(
+ self, exc_info # type: Any
+ ):
+ # type: (...) -> Any
+ """
+ Capture an exception that is likely caused by a bug in the SDK
+ itself.
+
+ These exceptions do not end up in Sentry and are just logged instead.
+ """
+ logger.error("Internal error in sentry_sdk", exc_info=exc_info)
+
+ def add_breadcrumb(
+ self,
+ crumb=None, # type: Optional[Breadcrumb]
+ hint=None, # type: Optional[BreadcrumbHint]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """
+ Adds a breadcrumb.
+
+ :param crumb: Dictionary with the data as the sentry v7/v8 protocol expects.
+
+ :param hint: An optional value that can be used by `before_breadcrumb`
+ to customize the breadcrumbs that are emitted.
+ """
+ client, scope = self._stack[-1]
+ if client is None:
+ logger.info("Dropped breadcrumb because no client bound")
+ return
+
+ crumb = dict(crumb or ()) # type: Breadcrumb
+ crumb.update(kwargs)
+ if not crumb:
+ return
+
+ hint = dict(hint or ()) # type: Hint
+
+ if crumb.get("timestamp") is None:
+ crumb["timestamp"] = datetime.utcnow()
+ if crumb.get("type") is None:
+ crumb["type"] = "default"
+
+ if client.options["before_breadcrumb"] is not None:
+ new_crumb = client.options["before_breadcrumb"](crumb, hint)
+ else:
+ new_crumb = crumb
+
+ if new_crumb is not None:
+ scope._breadcrumbs.append(new_crumb)
+ else:
+ logger.info("before breadcrumb dropped breadcrumb (%s)", crumb)
+
+ max_breadcrumbs = client.options["max_breadcrumbs"] # type: int
+ while len(scope._breadcrumbs) > max_breadcrumbs:
+ scope._breadcrumbs.popleft()
+
+ def start_span(
+ self,
+ span=None, # type: Optional[Span]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Span
+ """
+ Create a new span whose parent span is the currently active
+ span, if any. The return value is the span object that can
+ be used as a context manager to start and stop timing.
+
+ Note that you will not see any span that is not contained
+ within a transaction. Create a transaction with
+ ``start_span(transaction="my transaction")`` if an
+ integration doesn't already do this for you.
+ """
+
+ client, scope = self._stack[-1]
+
+ kwargs.setdefault("hub", self)
+
+ if span is None:
+ span = scope.span
+ if span is not None:
+ span = span.new_span(**kwargs)
+ else:
+ span = Span(**kwargs)
+
+ if span.sampled is None and span.transaction is not None:
+ sample_rate = client and client.options["traces_sample_rate"] or 0
+ span.sampled = random.random() < sample_rate
+
+ if span.sampled:
+ max_spans = (
+ client and client.options["_experiments"].get("max_spans") or 1000
+ )
+ span.init_finished_spans(maxlen=max_spans)
+
+ return span
+
+ @overload # noqa
+ def push_scope(
+ self, callback=None # type: Optional[None]
+ ):
+ # type: (...) -> ContextManager[Scope]
+ pass
+
+ @overload # noqa
+ def push_scope(
+ self, callback # type: Callable[[Scope], None]
+ ):
+ # type: (...) -> None
+ pass
+
+ def push_scope( # noqa
+ self, callback=None # type: Optional[Callable[[Scope], None]]
+ ):
+ # type: (...) -> Optional[ContextManager[Scope]]
+ """
+ Pushes a new layer on the scope stack.
+
+ :param callback: If provided, this method pushes a scope, calls
+ `callback`, and pops the scope again.
+
+ :returns: If no `callback` is provided, a context manager that should
+ be used to pop the scope again.
+ """
+ if callback is not None:
+ with self.push_scope() as scope:
+ callback(scope)
+ return None
+
+ client, scope = self._stack[-1]
+ new_layer = (client, copy.copy(scope))
+ self._stack.append(new_layer)
+
+ return _ScopeManager(self)
+
+ def pop_scope_unsafe(self):
+ # type: () -> Tuple[Optional[Client], Scope]
+ """
+ Pops a scope layer from the stack.
+
+ Try to use the context manager :py:meth:`push_scope` instead.
+ """
+ rv = self._stack.pop()
+ assert self._stack, "stack must have at least one layer"
+ return rv
+
+ @overload # noqa
+ def configure_scope(
+ self, callback=None # type: Optional[None]
+ ):
+ # type: (...) -> ContextManager[Scope]
+ pass
+
+ @overload # noqa
+ def configure_scope(
+ self, callback # type: Callable[[Scope], None]
+ ):
+ # type: (...) -> None
+ pass
+
+ def configure_scope( # noqa
+ self, callback=None # type: Optional[Callable[[Scope], None]]
+ ): # noqa
+ # type: (...) -> Optional[ContextManager[Scope]]
+
+ """
+ Reconfigures the scope.
+
+ :param callback: If provided, call the callback with the current scope.
+
+ :returns: If no callback is provided, returns a context manager that returns the scope.
+ """
+
+ client, scope = self._stack[-1]
+ if callback is not None:
+ if client is not None:
+ callback(scope)
+
+ return None
+
+ @contextmanager
+ def inner():
+ # type: () -> Generator[Scope, None, None]
+ if client is not None:
+ yield scope
+ else:
+ yield Scope()
+
+ return inner()
+
+ def start_session(self):
+ # type: (...) -> None
+ """Starts a new session."""
+ self.end_session()
+ client, scope = self._stack[-1]
+ scope._session = Session(
+ release=client.options["release"] if client else None,
+ environment=client.options["environment"] if client else None,
+ user=scope._user,
+ )
+
+ def end_session(self):
+ # type: (...) -> None
+ """Ends the current session if there is one."""
+ client, scope = self._stack[-1]
+ session = scope._session
+ if session is not None:
+ session.close()
+ if client is not None:
+ client.capture_session(session)
+ self._stack[-1][1]._session = None
+
+ def stop_auto_session_tracking(self):
+ # type: (...) -> None
+ """Stops automatic session tracking.
+
+ This temporarily session tracking for the current scope when called.
+ To resume session tracking call `resume_auto_session_tracking`.
+ """
+ self.end_session()
+ client, scope = self._stack[-1]
+ scope._force_auto_session_tracking = False
+
+ def resume_auto_session_tracking(self):
+ # type: (...) -> None
+ """Resumes automatic session tracking for the current scope if
+ disabled earlier. This requires that generally automatic session
+ tracking is enabled.
+ """
+ client, scope = self._stack[-1]
+ scope._force_auto_session_tracking = None
+
+ def flush(
+ self,
+ timeout=None, # type: Optional[float]
+ callback=None, # type: Optional[Callable[[int, float], None]]
+ ):
+ # type: (...) -> None
+ """
+ Alias for :py:meth:`sentry_sdk.Client.flush`
+ """
+ client, scope = self._stack[-1]
+ if client is not None:
+ return client.flush(timeout=timeout, callback=callback)
+
+ def iter_trace_propagation_headers(self):
+ # type: () -> Generator[Tuple[str, str], None, None]
+ # TODO: Document
+ client, scope = self._stack[-1]
+ span = scope.span
+
+ if span is None:
+ return
+
+ propagate_traces = client and client.options["propagate_traces"]
+ if not propagate_traces:
+ return
+
+ if client and client.options["traceparent_v2"]:
+ traceparent = span.to_traceparent()
+ else:
+ traceparent = span.to_legacy_traceparent()
+
+ yield "sentry-trace", traceparent
+
+
+GLOBAL_HUB = Hub()
+_local.set(GLOBAL_HUB)
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/__init__.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/__init__.py
new file mode 100644
index 0000000000..f264bc4855
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/__init__.py
@@ -0,0 +1,183 @@
+"""This package"""
+from __future__ import absolute_import
+
+from threading import Lock
+
+from sentry_sdk._compat import iteritems
+from sentry_sdk.utils import logger
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Callable
+ from typing import Dict
+ from typing import Iterator
+ from typing import List
+ from typing import Set
+ from typing import Tuple
+ from typing import Type
+
+
+_installer_lock = Lock()
+_installed_integrations = set() # type: Set[str]
+
+
+def _generate_default_integrations_iterator(integrations, auto_enabling_integrations):
+ # type: (Tuple[str, ...], Tuple[str, ...]) -> Callable[[bool], Iterator[Type[Integration]]]
+
+ def iter_default_integrations(with_auto_enabling_integrations):
+ # type: (bool) -> Iterator[Type[Integration]]
+ """Returns an iterator of the default integration classes:
+ """
+ from importlib import import_module
+
+ if with_auto_enabling_integrations:
+ all_import_strings = integrations + auto_enabling_integrations
+ else:
+ all_import_strings = integrations
+
+ for import_string in all_import_strings:
+ try:
+ module, cls = import_string.rsplit(".", 1)
+ yield getattr(import_module(module), cls)
+ except (DidNotEnable, SyntaxError) as e:
+ logger.debug(
+ "Did not import default integration %s: %s", import_string, e
+ )
+
+ if isinstance(iter_default_integrations.__doc__, str):
+ for import_string in integrations:
+ iter_default_integrations.__doc__ += "\n- `{}`".format(import_string)
+
+ return iter_default_integrations
+
+
+_AUTO_ENABLING_INTEGRATIONS = (
+ "sentry_sdk.integrations.django.DjangoIntegration",
+ "sentry_sdk.integrations.flask.FlaskIntegration",
+ "sentry_sdk.integrations.bottle.BottleIntegration",
+ "sentry_sdk.integrations.falcon.FalconIntegration",
+ "sentry_sdk.integrations.sanic.SanicIntegration",
+ "sentry_sdk.integrations.celery.CeleryIntegration",
+ "sentry_sdk.integrations.rq.RqIntegration",
+ "sentry_sdk.integrations.aiohttp.AioHttpIntegration",
+ "sentry_sdk.integrations.tornado.TornadoIntegration",
+ "sentry_sdk.integrations.sqlalchemy.SqlalchemyIntegration",
+)
+
+
+iter_default_integrations = _generate_default_integrations_iterator(
+ integrations=(
+ # stdlib/base runtime integrations
+ "sentry_sdk.integrations.logging.LoggingIntegration",
+ "sentry_sdk.integrations.stdlib.StdlibIntegration",
+ "sentry_sdk.integrations.excepthook.ExcepthookIntegration",
+ "sentry_sdk.integrations.dedupe.DedupeIntegration",
+ "sentry_sdk.integrations.atexit.AtexitIntegration",
+ "sentry_sdk.integrations.modules.ModulesIntegration",
+ "sentry_sdk.integrations.argv.ArgvIntegration",
+ "sentry_sdk.integrations.threading.ThreadingIntegration",
+ ),
+ auto_enabling_integrations=_AUTO_ENABLING_INTEGRATIONS,
+)
+
+del _generate_default_integrations_iterator
+
+
+def setup_integrations(
+ integrations, with_defaults=True, with_auto_enabling_integrations=False
+):
+ # type: (List[Integration], bool, bool) -> Dict[str, Integration]
+ """Given a list of integration instances this installs them all. When
+ `with_defaults` is set to `True` then all default integrations are added
+ unless they were already provided before.
+ """
+ integrations = dict(
+ (integration.identifier, integration) for integration in integrations or ()
+ )
+
+ logger.debug("Setting up integrations (with default = %s)", with_defaults)
+
+ # Integrations that are not explicitly set up by the user.
+ used_as_default_integration = set()
+
+ if with_defaults:
+ for integration_cls in iter_default_integrations(
+ with_auto_enabling_integrations
+ ):
+ if integration_cls.identifier not in integrations:
+ instance = integration_cls()
+ integrations[instance.identifier] = instance
+ used_as_default_integration.add(instance.identifier)
+
+ for identifier, integration in iteritems(integrations):
+ with _installer_lock:
+ if identifier not in _installed_integrations:
+ logger.debug(
+ "Setting up previously not enabled integration %s", identifier
+ )
+ try:
+ type(integration).setup_once()
+ except NotImplementedError:
+ if getattr(integration, "install", None) is not None:
+ logger.warning(
+ "Integration %s: The install method is "
+ "deprecated. Use `setup_once`.",
+ identifier,
+ )
+ integration.install()
+ else:
+ raise
+ except DidNotEnable as e:
+ if identifier not in used_as_default_integration:
+ raise
+
+ logger.debug(
+ "Did not enable default integration %s: %s", identifier, e
+ )
+
+ _installed_integrations.add(identifier)
+
+ for identifier in integrations:
+ logger.debug("Enabling integration %s", identifier)
+
+ return integrations
+
+
+class DidNotEnable(Exception):
+ """
+ The integration could not be enabled due to a trivial user error like
+ `flask` not being installed for the `FlaskIntegration`.
+
+ This exception is silently swallowed for default integrations, but reraised
+ for explicitly enabled integrations.
+ """
+
+
+class Integration(object):
+ """Baseclass for all integrations.
+
+ To accept options for an integration, implement your own constructor that
+ saves those options on `self`.
+ """
+
+ install = None
+ """Legacy method, do not implement."""
+
+ identifier = None # type: str
+ """String unique ID of integration type"""
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ """
+ Initialize the integration.
+
+ This function is only called once, ever. Configuration is not available
+ at this point, so the only thing to do here is to hook into exception
+ handlers, and perhaps do monkeypatches.
+
+ Inside those hooks `Integration.current` can be used to access the
+ instance again.
+ """
+ raise NotImplementedError()
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/_wsgi_common.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/_wsgi_common.py
new file mode 100644
index 0000000000..f874663883
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/_wsgi_common.py
@@ -0,0 +1,180 @@
+import json
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.utils import AnnotatedValue
+from sentry_sdk._compat import text_type, iteritems
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ import sentry_sdk
+
+ from typing import Any
+ from typing import Dict
+ from typing import Optional
+ from typing import Union
+
+
+SENSITIVE_ENV_KEYS = (
+ "REMOTE_ADDR",
+ "HTTP_X_FORWARDED_FOR",
+ "HTTP_SET_COOKIE",
+ "HTTP_COOKIE",
+ "HTTP_AUTHORIZATION",
+ "HTTP_X_FORWARDED_FOR",
+ "HTTP_X_REAL_IP",
+)
+
+SENSITIVE_HEADERS = tuple(
+ x[len("HTTP_") :] for x in SENSITIVE_ENV_KEYS if x.startswith("HTTP_")
+)
+
+
+def request_body_within_bounds(client, content_length):
+ # type: (Optional[sentry_sdk.Client], int) -> bool
+ if client is None:
+ return False
+
+ bodies = client.options["request_bodies"]
+ return not (
+ bodies == "never"
+ or (bodies == "small" and content_length > 10 ** 3)
+ or (bodies == "medium" and content_length > 10 ** 4)
+ )
+
+
+class RequestExtractor(object):
+ def __init__(self, request):
+ # type: (Any) -> None
+ self.request = request
+
+ def extract_into_event(self, event):
+ # type: (Dict[str, Any]) -> None
+ client = Hub.current.client
+ if client is None:
+ return
+
+ data = None # type: Optional[Union[AnnotatedValue, Dict[str, Any]]]
+
+ content_length = self.content_length()
+ request_info = event.get("request", {})
+
+ if _should_send_default_pii():
+ request_info["cookies"] = dict(self.cookies())
+
+ if not request_body_within_bounds(client, content_length):
+ data = AnnotatedValue(
+ "",
+ {"rem": [["!config", "x", 0, content_length]], "len": content_length},
+ )
+ else:
+ parsed_body = self.parsed_body()
+ if parsed_body is not None:
+ data = parsed_body
+ elif self.raw_data():
+ data = AnnotatedValue(
+ "",
+ {"rem": [["!raw", "x", 0, content_length]], "len": content_length},
+ )
+ else:
+ data = None
+
+ if data is not None:
+ request_info["data"] = data
+
+ event["request"] = request_info
+
+ def content_length(self):
+ # type: () -> int
+ try:
+ return int(self.env().get("CONTENT_LENGTH", 0))
+ except ValueError:
+ return 0
+
+ def cookies(self):
+ # type: () -> Dict[str, Any]
+ raise NotImplementedError()
+
+ def raw_data(self):
+ # type: () -> Optional[Union[str, bytes]]
+ raise NotImplementedError()
+
+ def form(self):
+ # type: () -> Optional[Dict[str, Any]]
+ raise NotImplementedError()
+
+ def parsed_body(self):
+ # type: () -> Optional[Dict[str, Any]]
+ form = self.form()
+ files = self.files()
+ if form or files:
+ data = dict(iteritems(form))
+ for k, v in iteritems(files):
+ size = self.size_of_file(v)
+ data[k] = AnnotatedValue(
+ "", {"len": size, "rem": [["!raw", "x", 0, size]]}
+ )
+
+ return data
+
+ return self.json()
+
+ def is_json(self):
+ # type: () -> bool
+ return _is_json_content_type(self.env().get("CONTENT_TYPE"))
+
+ def json(self):
+ # type: () -> Optional[Any]
+ try:
+ if not self.is_json():
+ return None
+
+ raw_data = self.raw_data()
+ if raw_data is None:
+ return None
+
+ if isinstance(raw_data, text_type):
+ return json.loads(raw_data)
+ else:
+ return json.loads(raw_data.decode("utf-8"))
+ except ValueError:
+ pass
+
+ return None
+
+ def files(self):
+ # type: () -> Optional[Dict[str, Any]]
+ raise NotImplementedError()
+
+ def size_of_file(self, file):
+ # type: (Any) -> int
+ raise NotImplementedError()
+
+ def env(self):
+ # type: () -> Dict[str, Any]
+ raise NotImplementedError()
+
+
+def _is_json_content_type(ct):
+ # type: (Optional[str]) -> bool
+ mt = (ct or "").split(";", 1)[0]
+ return (
+ mt == "application/json"
+ or (mt.startswith("application/"))
+ and mt.endswith("+json")
+ )
+
+
+def _filter_headers(headers):
+ # type: (Dict[str, str]) -> Dict[str, str]
+ if _should_send_default_pii():
+ return headers
+
+ return {
+ k: (
+ v
+ if k.upper().replace("-", "_") not in SENSITIVE_HEADERS
+ else AnnotatedValue("", {"rem": [["!config", "x", 0, len(v)]]})
+ )
+ for k, v in iteritems(headers)
+ }
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/aiohttp.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/aiohttp.py
new file mode 100644
index 0000000000..02c76df7ef
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/aiohttp.py
@@ -0,0 +1,211 @@
+import sys
+import weakref
+
+from sentry_sdk._compat import reraise
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.integrations._wsgi_common import (
+ _filter_headers,
+ request_body_within_bounds,
+)
+from sentry_sdk.tracing import Span
+from sentry_sdk.utils import (
+ capture_internal_exceptions,
+ event_from_exception,
+ transaction_from_function,
+ HAS_REAL_CONTEXTVARS,
+ AnnotatedValue,
+)
+
+try:
+ import asyncio
+
+ from aiohttp import __version__ as AIOHTTP_VERSION
+ from aiohttp.web import Application, HTTPException, UrlDispatcher
+except ImportError:
+ raise DidNotEnable("AIOHTTP not installed")
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from aiohttp.web_request import Request
+ from aiohttp.abc import AbstractMatchInfo
+ from typing import Any
+ from typing import Dict
+ from typing import Optional
+ from typing import Tuple
+ from typing import Callable
+ from typing import Union
+
+ from sentry_sdk.utils import ExcInfo
+ from sentry_sdk._types import EventProcessor
+
+
+class AioHttpIntegration(Integration):
+ identifier = "aiohttp"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+
+ try:
+ version = tuple(map(int, AIOHTTP_VERSION.split(".")))
+ except (TypeError, ValueError):
+ raise DidNotEnable("AIOHTTP version unparseable: {}".format(version))
+
+ if version < (3, 4):
+ raise DidNotEnable("AIOHTTP 3.4 or newer required.")
+
+ if not HAS_REAL_CONTEXTVARS:
+ # We better have contextvars or we're going to leak state between
+ # requests.
+ raise RuntimeError(
+ "The aiohttp integration for Sentry requires Python 3.7+ "
+ " or aiocontextvars package"
+ )
+
+ ignore_logger("aiohttp.server")
+
+ old_handle = Application._handle
+
+ async def sentry_app_handle(self, request, *args, **kwargs):
+ # type: (Any, Request, *Any, **Any) -> Any
+ async def inner():
+ # type: () -> Any
+ hub = Hub.current
+ if hub.get_integration(AioHttpIntegration) is None:
+ return await old_handle(self, request, *args, **kwargs)
+
+ weak_request = weakref.ref(request)
+
+ with Hub(Hub.current) as hub:
+ with hub.configure_scope() as scope:
+ scope.clear_breadcrumbs()
+ scope.add_event_processor(_make_request_processor(weak_request))
+
+ span = Span.continue_from_headers(request.headers)
+ span.op = "http.server"
+ # If this transaction name makes it to the UI, AIOHTTP's
+ # URL resolver did not find a route or died trying.
+ span.transaction = "generic AIOHTTP request"
+
+ with hub.start_span(span):
+ try:
+ response = await old_handle(self, request)
+ except HTTPException as e:
+ span.set_http_status(e.status_code)
+ raise
+ except asyncio.CancelledError:
+ span.set_status("cancelled")
+ raise
+ except Exception:
+ # This will probably map to a 500 but seems like we
+ # have no way to tell. Do not set span status.
+ reraise(*_capture_exception(hub))
+
+ span.set_http_status(response.status)
+ return response
+
+ # Explicitly wrap in task such that current contextvar context is
+ # copied. Just doing `return await inner()` will leak scope data
+ # between requests.
+ return await asyncio.get_event_loop().create_task(inner())
+
+ Application._handle = sentry_app_handle
+
+ old_urldispatcher_resolve = UrlDispatcher.resolve
+
+ async def sentry_urldispatcher_resolve(self, request):
+ # type: (UrlDispatcher, Request) -> AbstractMatchInfo
+ rv = await old_urldispatcher_resolve(self, request)
+
+ name = None
+
+ try:
+ name = transaction_from_function(rv.handler)
+ except Exception:
+ pass
+
+ if name is not None:
+ with Hub.current.configure_scope() as scope:
+ scope.transaction = name
+
+ return rv
+
+ UrlDispatcher.resolve = sentry_urldispatcher_resolve
+
+
+def _make_request_processor(weak_request):
+ # type: (Callable[[], Request]) -> EventProcessor
+ def aiohttp_processor(
+ event, # type: Dict[str, Any]
+ hint, # type: Dict[str, Tuple[type, BaseException, Any]]
+ ):
+ # type: (...) -> Dict[str, Any]
+ request = weak_request()
+ if request is None:
+ return event
+
+ with capture_internal_exceptions():
+ request_info = event.setdefault("request", {})
+
+ request_info["url"] = "%s://%s%s" % (
+ request.scheme,
+ request.host,
+ request.path,
+ )
+
+ request_info["query_string"] = request.query_string
+ request_info["method"] = request.method
+ request_info["env"] = {"REMOTE_ADDR": request.remote}
+
+ hub = Hub.current
+ request_info["headers"] = _filter_headers(dict(request.headers))
+
+ # Just attach raw data here if it is within bounds, if available.
+ # Unfortunately there's no way to get structured data from aiohttp
+ # without awaiting on some coroutine.
+ request_info["data"] = get_aiohttp_request_data(hub, request)
+
+ return event
+
+ return aiohttp_processor
+
+
+def _capture_exception(hub):
+ # type: (Hub) -> ExcInfo
+ exc_info = sys.exc_info()
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=hub.client.options, # type: ignore
+ mechanism={"type": "aiohttp", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+ return exc_info
+
+
+BODY_NOT_READ_MESSAGE = "[Can't show request body due to implementation details.]"
+
+
+def get_aiohttp_request_data(hub, request):
+ # type: (Hub, Request) -> Union[Optional[str], AnnotatedValue]
+ bytes_body = request._read_bytes
+
+ if bytes_body is not None:
+ # we have body to show
+ if not request_body_within_bounds(hub.client, len(bytes_body)):
+
+ return AnnotatedValue(
+ "",
+ {"rem": [["!config", "x", 0, len(bytes_body)]], "len": len(bytes_body)},
+ )
+ encoding = request.charset or "utf-8"
+ return bytes_body.decode(encoding, "replace")
+
+ if request.can_read_body:
+ # body exists but we can't show it
+ return BODY_NOT_READ_MESSAGE
+
+ # request has no body
+ return None
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/argv.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/argv.py
new file mode 100644
index 0000000000..f005521d32
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/argv.py
@@ -0,0 +1,33 @@
+from __future__ import absolute_import
+
+import sys
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Optional
+
+ from sentry_sdk._types import Event, Hint
+
+
+class ArgvIntegration(Integration):
+ identifier = "argv"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ @add_global_event_processor
+ def processor(event, hint):
+ # type: (Event, Optional[Hint]) -> Optional[Event]
+ if Hub.current.get_integration(ArgvIntegration) is not None:
+ extra = event.setdefault("extra", {})
+ # If some event processor decided to set extra to e.g. an
+ # `int`, don't crash. Not here.
+ if isinstance(extra, dict):
+ extra["sys.argv"] = sys.argv
+
+ return event
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/asgi.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/asgi.py
new file mode 100644
index 0000000000..762634f82f
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/asgi.py
@@ -0,0 +1,194 @@
+"""
+An ASGI middleware.
+
+Based on Tom Christie's `sentry-asgi <https://github.com/encode/sentry-asgi>`_.
+"""
+
+import asyncio
+import functools
+import inspect
+import urllib
+
+from sentry_sdk._types import MYPY
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+from sentry_sdk.utils import ContextVar, event_from_exception, transaction_from_function
+from sentry_sdk.tracing import Span
+
+if MYPY:
+ from typing import Dict
+ from typing import Any
+ from typing import Optional
+ from typing import Callable
+
+ from sentry_sdk._types import Event, Hint
+
+
+_asgi_middleware_applied = ContextVar("sentry_asgi_middleware_applied")
+
+
+def _capture_exception(hub, exc):
+ # type: (Hub, Any) -> None
+
+ # Check client here as it might have been unset while streaming response
+ if hub.client is not None:
+ event, hint = event_from_exception(
+ exc,
+ client_options=hub.client.options,
+ mechanism={"type": "asgi", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+
+def _looks_like_asgi3(app):
+ # type: (Any) -> bool
+ """
+ Try to figure out if an application object supports ASGI3.
+
+ This is how uvicorn figures out the application version as well.
+ """
+ if inspect.isclass(app):
+ return hasattr(app, "__await__")
+ elif inspect.isfunction(app):
+ return asyncio.iscoroutinefunction(app)
+ else:
+ call = getattr(app, "__call__", None) # noqa
+ return asyncio.iscoroutinefunction(call)
+
+
+class SentryAsgiMiddleware:
+ __slots__ = ("app", "__call__")
+
+ def __init__(self, app):
+ # type: (Any) -> None
+ self.app = app
+
+ if _looks_like_asgi3(app):
+ self.__call__ = self._run_asgi3 # type: Callable[..., Any]
+ else:
+ self.__call__ = self._run_asgi2
+
+ def _run_asgi2(self, scope):
+ # type: (Any) -> Any
+ async def inner(receive, send):
+ # type: (Any, Any) -> Any
+ return await self._run_app(scope, lambda: self.app(scope)(receive, send))
+
+ return inner
+
+ async def _run_asgi3(self, scope, receive, send):
+ # type: (Any, Any, Any) -> Any
+ return await self._run_app(scope, lambda: self.app(scope, receive, send))
+
+ async def _run_app(self, scope, callback):
+ # type: (Any, Any) -> Any
+ if _asgi_middleware_applied.get(False):
+ return await callback()
+
+ _asgi_middleware_applied.set(True)
+ try:
+ hub = Hub(Hub.current)
+ with hub:
+ with hub.configure_scope() as sentry_scope:
+ sentry_scope.clear_breadcrumbs()
+ sentry_scope._name = "asgi"
+ processor = functools.partial(
+ self.event_processor, asgi_scope=scope
+ )
+ sentry_scope.add_event_processor(processor)
+
+ if scope["type"] in ("http", "websocket"):
+ span = Span.continue_from_headers(dict(scope["headers"]))
+ span.op = "{}.server".format(scope["type"])
+ else:
+ span = Span()
+ span.op = "asgi.server"
+
+ span.set_tag("asgi.type", scope["type"])
+ span.transaction = "generic ASGI request"
+
+ with hub.start_span(span) as span:
+ # XXX: Would be cool to have correct span status, but we
+ # would have to wrap send(). That is a bit hard to do with
+ # the current abstraction over ASGI 2/3.
+ try:
+ return await callback()
+ except Exception as exc:
+ _capture_exception(hub, exc)
+ raise exc from None
+ finally:
+ _asgi_middleware_applied.set(False)
+
+ def event_processor(self, event, hint, asgi_scope):
+ # type: (Event, Hint, Any) -> Optional[Event]
+ request_info = event.get("request", {})
+
+ if asgi_scope["type"] in ("http", "websocket"):
+ request_info["url"] = self.get_url(asgi_scope)
+ request_info["method"] = asgi_scope["method"]
+ request_info["headers"] = _filter_headers(self.get_headers(asgi_scope))
+ request_info["query_string"] = self.get_query(asgi_scope)
+
+ if asgi_scope.get("client") and _should_send_default_pii():
+ request_info["env"] = {"REMOTE_ADDR": asgi_scope["client"][0]}
+
+ if asgi_scope.get("endpoint"):
+ # Webframeworks like Starlette mutate the ASGI env once routing is
+ # done, which is sometime after the request has started. If we have
+ # an endpoint, overwrite our path-based transaction name.
+ event["transaction"] = self.get_transaction(asgi_scope)
+
+ event["request"] = request_info
+
+ return event
+
+ def get_url(self, scope):
+ # type: (Any) -> str
+ """
+ Extract URL from the ASGI scope, without also including the querystring.
+ """
+ scheme = scope.get("scheme", "http")
+ server = scope.get("server", None)
+ path = scope.get("root_path", "") + scope["path"]
+
+ for key, value in scope["headers"]:
+ if key == b"host":
+ host_header = value.decode("latin-1")
+ return "%s://%s%s" % (scheme, host_header, path)
+
+ if server is not None:
+ host, port = server
+ default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}[scheme]
+ if port != default_port:
+ return "%s://%s:%s%s" % (scheme, host, port, path)
+ return "%s://%s%s" % (scheme, host, path)
+ return path
+
+ def get_query(self, scope):
+ # type: (Any) -> Any
+ """
+ Extract querystring from the ASGI scope, in the format that the Sentry protocol expects.
+ """
+ return urllib.parse.unquote(scope["query_string"].decode("latin-1"))
+
+ def get_headers(self, scope):
+ # type: (Any) -> Dict[str, Any]
+ """
+ Extract headers from the ASGI scope, in the format that the Sentry protocol expects.
+ """
+ headers = {} # type: Dict[str, str]
+ for raw_key, raw_value in scope["headers"]:
+ key = raw_key.decode("latin-1")
+ value = raw_value.decode("latin-1")
+ if key in headers:
+ headers[key] = headers[key] + ", " + value
+ else:
+ headers[key] = value
+ return headers
+
+ def get_transaction(self, scope):
+ # type: (Any) -> Optional[str]
+ """
+ Return a transaction string to identify the routed endpoint.
+ """
+ return transaction_from_function(scope["endpoint"])
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/atexit.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/atexit.py
new file mode 100644
index 0000000000..18fe657bff
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/atexit.py
@@ -0,0 +1,62 @@
+from __future__ import absolute_import
+
+import os
+import sys
+import atexit
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import logger
+from sentry_sdk.integrations import Integration
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+
+ from typing import Any
+ from typing import Optional
+
+
+def default_callback(pending, timeout):
+ # type: (int, int) -> None
+ """This is the default shutdown callback that is set on the options.
+ It prints out a message to stderr that informs the user that some events
+ are still pending and the process is waiting for them to flush out.
+ """
+
+ def echo(msg):
+ # type: (str) -> None
+ sys.stderr.write(msg + "\n")
+
+ echo("Sentry is attempting to send %i pending error messages" % pending)
+ echo("Waiting up to %s seconds" % timeout)
+ echo("Press Ctrl-%s to quit" % (os.name == "nt" and "Break" or "C"))
+ sys.stderr.flush()
+
+
+class AtexitIntegration(Integration):
+ identifier = "atexit"
+
+ def __init__(self, callback=None):
+ # type: (Optional[Any]) -> None
+ if callback is None:
+ callback = default_callback
+ self.callback = callback
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ @atexit.register
+ def _shutdown():
+ # type: () -> None
+ logger.debug("atexit: got shutdown signal")
+ hub = Hub.main
+ integration = hub.get_integration(AtexitIntegration)
+ if integration is not None:
+ logger.debug("atexit: shutting down client")
+
+ # If there is a session on the hub, close it now.
+ hub.end_session()
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+ client.close(callback=integration.callback)
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/aws_lambda.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/aws_lambda.py
new file mode 100644
index 0000000000..3a08d998db
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/aws_lambda.py
@@ -0,0 +1,254 @@
+from datetime import datetime, timedelta
+from os import environ
+import sys
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk._compat import reraise
+from sentry_sdk.utils import (
+ AnnotatedValue,
+ capture_internal_exceptions,
+ event_from_exception,
+ logger,
+)
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import TypeVar
+ from typing import Callable
+ from typing import Optional
+
+ from sentry_sdk._types import EventProcessor, Event, Hint
+
+ F = TypeVar("F", bound=Callable[..., Any])
+
+
+def _wrap_handler(handler):
+ # type: (F) -> F
+ def sentry_handler(event, context, *args, **kwargs):
+ # type: (Any, Any, *Any, **Any) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(AwsLambdaIntegration)
+ if integration is None:
+ return handler(event, context, *args, **kwargs)
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ with hub.push_scope() as scope:
+ with capture_internal_exceptions():
+ scope.clear_breadcrumbs()
+ scope.transaction = context.function_name
+ scope.add_event_processor(_make_request_event_processor(event, context))
+
+ try:
+ return handler(event, context, *args, **kwargs)
+ except Exception:
+ exc_info = sys.exc_info()
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=client.options,
+ mechanism={"type": "aws_lambda", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+ reraise(*exc_info)
+
+ return sentry_handler # type: ignore
+
+
+def _drain_queue():
+ # type: () -> None
+ with capture_internal_exceptions():
+ hub = Hub.current
+ integration = hub.get_integration(AwsLambdaIntegration)
+ if integration is not None:
+ # Flush out the event queue before AWS kills the
+ # process.
+ hub.flush()
+
+
+class AwsLambdaIntegration(Integration):
+ identifier = "aws_lambda"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ import __main__ as lambda_bootstrap # type: ignore
+
+ pre_37 = True # Python 3.6 or 2.7
+
+ if not hasattr(lambda_bootstrap, "handle_http_request"):
+ try:
+ import bootstrap as lambda_bootstrap # type: ignore
+
+ pre_37 = False # Python 3.7
+ except ImportError:
+ pass
+
+ if not hasattr(lambda_bootstrap, "handle_event_request"):
+ logger.warning(
+ "Not running in AWS Lambda environment, "
+ "AwsLambdaIntegration disabled"
+ )
+ return
+
+ if pre_37:
+ old_handle_event_request = lambda_bootstrap.handle_event_request
+
+ def sentry_handle_event_request(request_handler, *args, **kwargs):
+ # type: (Any, *Any, **Any) -> Any
+ request_handler = _wrap_handler(request_handler)
+ return old_handle_event_request(request_handler, *args, **kwargs)
+
+ lambda_bootstrap.handle_event_request = sentry_handle_event_request
+
+ old_handle_http_request = lambda_bootstrap.handle_http_request
+
+ def sentry_handle_http_request(request_handler, *args, **kwargs):
+ # type: (Any, *Any, **Any) -> Any
+ request_handler = _wrap_handler(request_handler)
+ return old_handle_http_request(request_handler, *args, **kwargs)
+
+ lambda_bootstrap.handle_http_request = sentry_handle_http_request
+
+ # Patch to_json to drain the queue. This should work even when the
+ # SDK is initialized inside of the handler
+
+ old_to_json = lambda_bootstrap.to_json
+
+ def sentry_to_json(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ _drain_queue()
+ return old_to_json(*args, **kwargs)
+
+ lambda_bootstrap.to_json = sentry_to_json
+ else:
+ old_handle_event_request = lambda_bootstrap.handle_event_request
+
+ def sentry_handle_event_request( # type: ignore
+ lambda_runtime_client, request_handler, *args, **kwargs
+ ):
+ request_handler = _wrap_handler(request_handler)
+ return old_handle_event_request(
+ lambda_runtime_client, request_handler, *args, **kwargs
+ )
+
+ lambda_bootstrap.handle_event_request = sentry_handle_event_request
+
+ # Patch the runtime client to drain the queue. This should work
+ # even when the SDK is initialized inside of the handler
+
+ def _wrap_post_function(f):
+ # type: (F) -> F
+ def inner(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ _drain_queue()
+ return f(*args, **kwargs)
+
+ return inner # type: ignore
+
+ lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = _wrap_post_function(
+ lambda_bootstrap.LambdaRuntimeClient.post_invocation_result
+ )
+ lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = _wrap_post_function(
+ lambda_bootstrap.LambdaRuntimeClient.post_invocation_error
+ )
+
+
+def _make_request_event_processor(aws_event, aws_context):
+ # type: (Any, Any) -> EventProcessor
+ start_time = datetime.now()
+
+ def event_processor(event, hint, start_time=start_time):
+ # type: (Event, Hint, datetime) -> Optional[Event]
+ extra = event.setdefault("extra", {})
+ extra["lambda"] = {
+ "function_name": aws_context.function_name,
+ "function_version": aws_context.function_version,
+ "invoked_function_arn": aws_context.invoked_function_arn,
+ "remaining_time_in_millis": aws_context.get_remaining_time_in_millis(),
+ "aws_request_id": aws_context.aws_request_id,
+ }
+
+ extra["cloudwatch logs"] = {
+ "url": _get_cloudwatch_logs_url(aws_context, start_time),
+ "log_group": aws_context.log_group_name,
+ "log_stream": aws_context.log_stream_name,
+ }
+
+ request = event.get("request", {})
+
+ if "httpMethod" in aws_event:
+ request["method"] = aws_event["httpMethod"]
+
+ request["url"] = _get_url(aws_event, aws_context)
+
+ if "queryStringParameters" in aws_event:
+ request["query_string"] = aws_event["queryStringParameters"]
+
+ if "headers" in aws_event:
+ request["headers"] = _filter_headers(aws_event["headers"])
+
+ if aws_event.get("body", None):
+ # Unfortunately couldn't find a way to get structured body from AWS
+ # event. Meaning every body is unstructured to us.
+ request["data"] = AnnotatedValue("", {"rem": [["!raw", "x", 0, 0]]})
+
+ if _should_send_default_pii():
+ user_info = event.setdefault("user", {})
+
+ id = aws_event.get("identity", {}).get("userArn")
+ if id is not None:
+ user_info.setdefault("id", id)
+
+ ip = aws_event.get("identity", {}).get("sourceIp")
+ if ip is not None:
+ user_info.setdefault("ip_address", ip)
+
+ event["request"] = request
+
+ return event
+
+ return event_processor
+
+
+def _get_url(event, context):
+ # type: (Any, Any) -> str
+ path = event.get("path", None)
+ headers = event.get("headers", {})
+ host = headers.get("Host", None)
+ proto = headers.get("X-Forwarded-Proto", None)
+ if proto and host and path:
+ return "{}://{}{}".format(proto, host, path)
+ return "awslambda:///{}".format(context.function_name)
+
+
+def _get_cloudwatch_logs_url(context, start_time):
+ # type: (Any, datetime) -> str
+ """
+ Generates a CloudWatchLogs console URL based on the context object
+
+ Arguments:
+ context {Any} -- context from lambda handler
+
+ Returns:
+ str -- AWS Console URL to logs.
+ """
+ formatstring = "%Y-%m-%dT%H:%M:%S"
+
+ url = (
+ "https://console.aws.amazon.com/cloudwatch/home?region={region}"
+ "#logEventViewer:group={log_group};stream={log_stream}"
+ ";start={start_time};end={end_time}"
+ ).format(
+ region=environ.get("AWS_REGION"),
+ log_group=context.log_group_name,
+ log_stream=context.log_stream_name,
+ start_time=(start_time - timedelta(seconds=1)).strftime(formatstring),
+ end_time=(datetime.now() + timedelta(seconds=2)).strftime(formatstring),
+ )
+
+ return url
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/beam.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/beam.py
new file mode 100644
index 0000000000..7252746a7f
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/beam.py
@@ -0,0 +1,184 @@
+from __future__ import absolute_import
+
+import sys
+import types
+from functools import wraps
+
+from sentry_sdk.hub import Hub
+from sentry_sdk._compat import reraise
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Iterator
+ from typing import TypeVar
+ from typing import Optional
+ from typing import Callable
+
+ from sentry_sdk.client import Client
+ from sentry_sdk._types import ExcInfo
+
+ T = TypeVar("T")
+ F = TypeVar("F", bound=Callable[..., Any])
+
+
+WRAPPED_FUNC = "_wrapped_{}_"
+INSPECT_FUNC = "_inspect_{}" # Required format per apache_beam/transforms/core.py
+USED_FUNC = "_sentry_used_"
+
+
+class BeamIntegration(Integration):
+ identifier = "beam"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ from apache_beam.transforms.core import DoFn, ParDo # type: ignore
+
+ ignore_logger("root")
+ ignore_logger("bundle_processor.create")
+
+ function_patches = ["process", "start_bundle", "finish_bundle", "setup"]
+ for func_name in function_patches:
+ setattr(
+ DoFn,
+ INSPECT_FUNC.format(func_name),
+ _wrap_inspect_call(DoFn, func_name),
+ )
+
+ old_init = ParDo.__init__
+
+ def sentry_init_pardo(self, fn, *args, **kwargs):
+ # type: (ParDo, Any, *Any, **Any) -> Any
+ # Do not monkey patch init twice
+ if not getattr(self, "_sentry_is_patched", False):
+ for func_name in function_patches:
+ if not hasattr(fn, func_name):
+ continue
+ wrapped_func = WRAPPED_FUNC.format(func_name)
+
+ # Check to see if inspect is set and process is not
+ # to avoid monkey patching process twice.
+ # Check to see if function is part of object for
+ # backwards compatibility.
+ process_func = getattr(fn, func_name)
+ inspect_func = getattr(fn, INSPECT_FUNC.format(func_name))
+ if not getattr(inspect_func, USED_FUNC, False) and not getattr(
+ process_func, USED_FUNC, False
+ ):
+ setattr(fn, wrapped_func, process_func)
+ setattr(fn, func_name, _wrap_task_call(process_func))
+
+ self._sentry_is_patched = True
+ old_init(self, fn, *args, **kwargs)
+
+ ParDo.__init__ = sentry_init_pardo
+
+
+def _wrap_inspect_call(cls, func_name):
+ # type: (Any, Any) -> Any
+ from apache_beam.typehints.decorators import getfullargspec # type: ignore
+
+ if not hasattr(cls, func_name):
+ return None
+
+ def _inspect(self):
+ # type: (Any) -> Any
+ """
+ Inspect function overrides the way Beam gets argspec.
+ """
+ wrapped_func = WRAPPED_FUNC.format(func_name)
+ if hasattr(self, wrapped_func):
+ process_func = getattr(self, wrapped_func)
+ else:
+ process_func = getattr(self, func_name)
+ setattr(self, func_name, _wrap_task_call(process_func))
+ setattr(self, wrapped_func, process_func)
+
+ # getfullargspec is deprecated in more recent beam versions and get_function_args_defaults
+ # (which uses Signatures internally) should be used instead.
+ try:
+ from apache_beam.transforms.core import get_function_args_defaults
+
+ return get_function_args_defaults(process_func)
+ except ImportError:
+ return getfullargspec(process_func)
+
+ setattr(_inspect, USED_FUNC, True)
+ return _inspect
+
+
+def _wrap_task_call(func):
+ # type: (F) -> F
+ """
+ Wrap task call with a try catch to get exceptions.
+ Pass the client on to raise_exception so it can get rebinded.
+ """
+ client = Hub.current.client
+
+ @wraps(func)
+ def _inner(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ try:
+ gen = func(*args, **kwargs)
+ except Exception:
+ raise_exception(client)
+
+ if not isinstance(gen, types.GeneratorType):
+ return gen
+ return _wrap_generator_call(gen, client)
+
+ setattr(_inner, USED_FUNC, True)
+ return _inner # type: ignore
+
+
+def _capture_exception(exc_info, hub):
+ # type: (ExcInfo, Hub) -> None
+ """
+ Send Beam exception to Sentry.
+ """
+ integration = hub.get_integration(BeamIntegration)
+ if integration is None:
+ return
+
+ client = hub.client
+ if client is None:
+ return
+
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=client.options,
+ mechanism={"type": "beam", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+
+def raise_exception(client):
+ # type: (Optional[Client]) -> None
+ """
+ Raise an exception. If the client is not in the hub, rebind it.
+ """
+ hub = Hub.current
+ if hub.client is None:
+ hub.bind_client(client)
+ exc_info = sys.exc_info()
+ with capture_internal_exceptions():
+ _capture_exception(exc_info, hub)
+ reraise(*exc_info)
+
+
+def _wrap_generator_call(gen, client):
+ # type: (Iterator[T], Optional[Client]) -> Iterator[T]
+ """
+ Wrap the generator to handle any failures.
+ """
+ while True:
+ try:
+ yield next(gen)
+ except StopIteration:
+ break
+ except Exception:
+ raise_exception(client)
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/bottle.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/bottle.py
new file mode 100644
index 0000000000..80224e4dc4
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/bottle.py
@@ -0,0 +1,199 @@
+from __future__ import absolute_import
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import (
+ capture_internal_exceptions,
+ event_from_exception,
+ transaction_from_function,
+)
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from sentry_sdk.integrations.wsgi import _ScopedResponse
+ from typing import Any
+ from typing import Dict
+ from typing import Callable
+ from typing import Optional
+ from bottle import FileUpload, FormsDict, LocalRequest # type: ignore
+
+ from sentry_sdk._types import EventProcessor
+
+try:
+ from bottle import (
+ Bottle,
+ Route,
+ request as bottle_request,
+ HTTPResponse,
+ __version__ as BOTTLE_VERSION,
+ )
+except ImportError:
+ raise DidNotEnable("Bottle not installed")
+
+
+TRANSACTION_STYLE_VALUES = ("endpoint", "url")
+
+
+class BottleIntegration(Integration):
+ identifier = "bottle"
+
+ transaction_style = None
+
+ def __init__(self, transaction_style="endpoint"):
+ # type: (str) -> None
+
+ if transaction_style not in TRANSACTION_STYLE_VALUES:
+ raise ValueError(
+ "Invalid value for transaction_style: %s (must be in %s)"
+ % (transaction_style, TRANSACTION_STYLE_VALUES)
+ )
+ self.transaction_style = transaction_style
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+
+ try:
+ version = tuple(map(int, BOTTLE_VERSION.split(".")))
+ except (TypeError, ValueError):
+ raise DidNotEnable("Unparseable Bottle version: {}".format(version))
+
+ if version < (0, 12):
+ raise DidNotEnable("Bottle 0.12 or newer required.")
+
+ # monkey patch method Bottle.__call__
+ old_app = Bottle.__call__
+
+ def sentry_patched_wsgi_app(self, environ, start_response):
+ # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
+
+ hub = Hub.current
+ integration = hub.get_integration(BottleIntegration)
+ if integration is None:
+ return old_app(self, environ, start_response)
+
+ return SentryWsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))(
+ environ, start_response
+ )
+
+ Bottle.__call__ = sentry_patched_wsgi_app
+
+ # monkey patch method Bottle._handle
+ old_handle = Bottle._handle
+
+ def _patched_handle(self, environ):
+ # type: (Bottle, Dict[str, Any]) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(BottleIntegration)
+ if integration is None:
+ return old_handle(self, environ)
+
+ # create new scope
+ scope_manager = hub.push_scope()
+
+ with scope_manager:
+ app = self
+ with hub.configure_scope() as scope:
+ scope._name = "bottle"
+ scope.add_event_processor(
+ _make_request_event_processor(app, bottle_request, integration)
+ )
+ res = old_handle(self, environ)
+
+ # scope cleanup
+ return res
+
+ Bottle._handle = _patched_handle
+
+ # monkey patch method Route._make_callback
+ old_make_callback = Route._make_callback
+
+ def patched_make_callback(self, *args, **kwargs):
+ # type: (Route, *object, **object) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(BottleIntegration)
+ prepared_callback = old_make_callback(self, *args, **kwargs)
+ if integration is None:
+ return prepared_callback
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ def wrapped_callback(*args, **kwargs):
+ # type: (*object, **object) -> Any
+
+ try:
+ res = prepared_callback(*args, **kwargs)
+ except HTTPResponse:
+ raise
+ except Exception as exception:
+ event, hint = event_from_exception(
+ exception,
+ client_options=client.options,
+ mechanism={"type": "bottle", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+ raise exception
+
+ return res
+
+ return wrapped_callback
+
+ Route._make_callback = patched_make_callback
+
+
+class BottleRequestExtractor(RequestExtractor):
+ def env(self):
+ # type: () -> Dict[str, str]
+ return self.request.environ
+
+ def cookies(self):
+ # type: () -> Dict[str, str]
+ return self.request.cookies
+
+ def raw_data(self):
+ # type: () -> bytes
+ return self.request.body.read()
+
+ def form(self):
+ # type: () -> FormsDict
+ if self.is_json():
+ return None
+ return self.request.forms.decode()
+
+ def files(self):
+ # type: () -> Optional[Dict[str, str]]
+ if self.is_json():
+ return None
+
+ return self.request.files
+
+ def size_of_file(self, file):
+ # type: (FileUpload) -> int
+ return file.content_length
+
+
+def _make_request_event_processor(app, request, integration):
+ # type: (Bottle, LocalRequest, BottleIntegration) -> EventProcessor
+ def inner(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+
+ try:
+ if integration.transaction_style == "endpoint":
+ event["transaction"] = request.route.name or transaction_from_function(
+ request.route.callback
+ )
+ elif integration.transaction_style == "url":
+ event["transaction"] = request.route.rule
+ except Exception:
+ pass
+
+ with capture_internal_exceptions():
+ BottleRequestExtractor(request).extract_into_event(event)
+
+ return event
+
+ return inner
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/celery.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/celery.py
new file mode 100644
index 0000000000..9b58796173
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/celery.py
@@ -0,0 +1,258 @@
+from __future__ import absolute_import
+
+import functools
+import sys
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.tracing import Span
+from sentry_sdk._compat import reraise
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import TypeVar
+ from typing import Callable
+ from typing import Optional
+
+ from sentry_sdk._types import EventProcessor, Event, Hint, ExcInfo
+
+ F = TypeVar("F", bound=Callable[..., Any])
+
+
+try:
+ from celery import VERSION as CELERY_VERSION # type: ignore
+ from celery.exceptions import ( # type: ignore
+ SoftTimeLimitExceeded,
+ Retry,
+ Ignore,
+ Reject,
+ )
+except ImportError:
+ raise DidNotEnable("Celery not installed")
+
+
+CELERY_CONTROL_FLOW_EXCEPTIONS = (Retry, Ignore, Reject)
+
+
+class CeleryIntegration(Integration):
+ identifier = "celery"
+
+ def __init__(self, propagate_traces=True):
+ # type: (bool) -> None
+ self.propagate_traces = propagate_traces
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ if CELERY_VERSION < (3,):
+ raise DidNotEnable("Celery 3 or newer required.")
+
+ import celery.app.trace as trace # type: ignore
+
+ old_build_tracer = trace.build_tracer
+
+ def sentry_build_tracer(name, task, *args, **kwargs):
+ # type: (Any, Any, *Any, **Any) -> Any
+ if not getattr(task, "_sentry_is_patched", False):
+ # Need to patch both methods because older celery sometimes
+ # short-circuits to task.run if it thinks it's safe.
+ task.__call__ = _wrap_task_call(task, task.__call__)
+ task.run = _wrap_task_call(task, task.run)
+ task.apply_async = _wrap_apply_async(task, task.apply_async)
+
+ # `build_tracer` is apparently called for every task
+ # invocation. Can't wrap every celery task for every invocation
+ # or we will get infinitely nested wrapper functions.
+ task._sentry_is_patched = True
+
+ return _wrap_tracer(task, old_build_tracer(name, task, *args, **kwargs))
+
+ trace.build_tracer = sentry_build_tracer
+
+ _patch_worker_exit()
+
+ # This logger logs every status of every task that ran on the worker.
+ # Meaning that every task's breadcrumbs are full of stuff like "Task
+ # <foo> raised unexpected <bar>".
+ ignore_logger("celery.worker.job")
+ ignore_logger("celery.app.trace")
+
+ # This is stdout/err redirected to a logger, can't deal with this
+ # (need event_level=logging.WARN to reproduce)
+ ignore_logger("celery.redirected")
+
+
+def _wrap_apply_async(task, f):
+ # type: (Any, F) -> F
+ @functools.wraps(f)
+ def apply_async(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(CeleryIntegration)
+ if integration is not None and integration.propagate_traces:
+ headers = None
+ for key, value in hub.iter_trace_propagation_headers():
+ if headers is None:
+ headers = dict(kwargs.get("headers") or {})
+ headers[key] = value
+ if headers is not None:
+ kwargs["headers"] = headers
+
+ with hub.start_span(op="celery.submit", description=task.name):
+ return f(*args, **kwargs)
+ else:
+ return f(*args, **kwargs)
+
+ return apply_async # type: ignore
+
+
+def _wrap_tracer(task, f):
+ # type: (Any, F) -> F
+
+ # Need to wrap tracer for pushing the scope before prerun is sent, and
+ # popping it after postrun is sent.
+ #
+ # This is the reason we don't use signals for hooking in the first place.
+ # Also because in Celery 3, signal dispatch returns early if one handler
+ # crashes.
+ @functools.wraps(f)
+ def _inner(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ hub = Hub.current
+ if hub.get_integration(CeleryIntegration) is None:
+ return f(*args, **kwargs)
+
+ with hub.push_scope() as scope:
+ scope._name = "celery"
+ scope.clear_breadcrumbs()
+ scope.add_event_processor(_make_event_processor(task, *args, **kwargs))
+
+ span = Span.continue_from_headers(args[3].get("headers") or {})
+ span.op = "celery.task"
+ span.transaction = "unknown celery task"
+
+ # Could possibly use a better hook than this one
+ span.set_status("ok")
+
+ with capture_internal_exceptions():
+ # Celery task objects are not a thing to be trusted. Even
+ # something such as attribute access can fail.
+ span.transaction = task.name
+
+ with hub.start_span(span):
+ return f(*args, **kwargs)
+
+ return _inner # type: ignore
+
+
+def _wrap_task_call(task, f):
+ # type: (Any, F) -> F
+
+ # Need to wrap task call because the exception is caught before we get to
+ # see it. Also celery's reported stacktrace is untrustworthy.
+
+ # functools.wraps is important here because celery-once looks at this
+ # method's name.
+ # https://github.com/getsentry/sentry-python/issues/421
+ @functools.wraps(f)
+ def _inner(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ try:
+ return f(*args, **kwargs)
+ except Exception:
+ exc_info = sys.exc_info()
+ with capture_internal_exceptions():
+ _capture_exception(task, exc_info)
+ reraise(*exc_info)
+
+ return _inner # type: ignore
+
+
+def _make_event_processor(task, uuid, args, kwargs, request=None):
+ # type: (Any, Any, Any, Any, Optional[Any]) -> EventProcessor
+ def event_processor(event, hint):
+ # type: (Event, Hint) -> Optional[Event]
+
+ with capture_internal_exceptions():
+ tags = event.setdefault("tags", {})
+ tags["celery_task_id"] = uuid
+ extra = event.setdefault("extra", {})
+ extra["celery-job"] = {
+ "task_name": task.name,
+ "args": args,
+ "kwargs": kwargs,
+ }
+
+ if "exc_info" in hint:
+ with capture_internal_exceptions():
+ if issubclass(hint["exc_info"][0], SoftTimeLimitExceeded):
+ event["fingerprint"] = [
+ "celery",
+ "SoftTimeLimitExceeded",
+ getattr(task, "name", task),
+ ]
+
+ return event
+
+ return event_processor
+
+
+def _capture_exception(task, exc_info):
+ # type: (Any, ExcInfo) -> None
+ hub = Hub.current
+
+ if hub.get_integration(CeleryIntegration) is None:
+ return
+ if isinstance(exc_info[1], CELERY_CONTROL_FLOW_EXCEPTIONS):
+ # ??? Doesn't map to anything
+ _set_status(hub, "aborted")
+ return
+
+ _set_status(hub, "internal_error")
+
+ if hasattr(task, "throws") and isinstance(exc_info[1], task.throws):
+ return
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=client.options,
+ mechanism={"type": "celery", "handled": False},
+ )
+
+ hub.capture_event(event, hint=hint)
+
+
+def _set_status(hub, status):
+ # type: (Hub, str) -> None
+ with capture_internal_exceptions():
+ with hub.configure_scope() as scope:
+ if scope.span is not None:
+ scope.span.set_status(status)
+
+
+def _patch_worker_exit():
+ # type: () -> None
+
+ # Need to flush queue before worker shutdown because a crashing worker will
+ # call os._exit
+ from billiard.pool import Worker # type: ignore
+
+ old_workloop = Worker.workloop
+
+ def sentry_workloop(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ try:
+ return old_workloop(*args, **kwargs)
+ finally:
+ with capture_internal_exceptions():
+ hub = Hub.current
+ if hub.get_integration(CeleryIntegration) is not None:
+ hub.flush()
+
+ Worker.workloop = sentry_workloop
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/dedupe.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/dedupe.py
new file mode 100644
index 0000000000..b023df2042
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/dedupe.py
@@ -0,0 +1,43 @@
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import ContextVar
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Optional
+
+ from sentry_sdk._types import Event, Hint
+
+
+class DedupeIntegration(Integration):
+ identifier = "dedupe"
+
+ def __init__(self):
+ # type: () -> None
+ self._last_seen = ContextVar("last-seen")
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ @add_global_event_processor
+ def processor(event, hint):
+ # type: (Event, Optional[Hint]) -> Optional[Event]
+ if hint is None:
+ return event
+
+ integration = Hub.current.get_integration(DedupeIntegration)
+
+ if integration is None:
+ return event
+
+ exc_info = hint.get("exc_info", None)
+ if exc_info is None:
+ return event
+
+ exc = exc_info[1]
+ if integration._last_seen.get(None) is exc:
+ return None
+ integration._last_seen.set(exc)
+ return event
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/django/__init__.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/django/__init__.py
new file mode 100644
index 0000000000..4e62fe3b74
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/django/__init__.py
@@ -0,0 +1,484 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import sys
+import threading
+import weakref
+
+from sentry_sdk._types import MYPY
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.serializer import add_global_repr_processor
+from sentry_sdk.tracing import record_sql_queries
+from sentry_sdk.utils import (
+ HAS_REAL_CONTEXTVARS,
+ logger,
+ capture_internal_exceptions,
+ event_from_exception,
+ transaction_from_function,
+ walk_exception_chain,
+)
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+
+try:
+ from django import VERSION as DJANGO_VERSION
+ from django.core import signals
+
+ try:
+ from django.urls import resolve
+ except ImportError:
+ from django.core.urlresolvers import resolve
+except ImportError:
+ raise DidNotEnable("Django not installed")
+
+
+from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER
+from sentry_sdk.integrations.django.templates import get_template_frame_from_exception
+from sentry_sdk.integrations.django.middleware import patch_django_middlewares
+
+
+if MYPY:
+ from typing import Any
+ from typing import Callable
+ from typing import Dict
+ from typing import Optional
+ from typing import Union
+ from typing import List
+
+ from django.core.handlers.wsgi import WSGIRequest
+ from django.http.response import HttpResponse
+ from django.http.request import QueryDict
+ from django.utils.datastructures import MultiValueDict
+
+ from sentry_sdk.integrations.wsgi import _ScopedResponse
+ from sentry_sdk._types import Event, Hint, EventProcessor, NotImplementedType
+
+
+if DJANGO_VERSION < (1, 10):
+
+ def is_authenticated(request_user):
+ # type: (Any) -> bool
+ return request_user.is_authenticated()
+
+
+else:
+
+ def is_authenticated(request_user):
+ # type: (Any) -> bool
+ return request_user.is_authenticated
+
+
+TRANSACTION_STYLE_VALUES = ("function_name", "url")
+
+
+class DjangoIntegration(Integration):
+ identifier = "django"
+
+ transaction_style = None
+ middleware_spans = None
+
+ def __init__(self, transaction_style="url", middleware_spans=True):
+ # type: (str, bool) -> None
+ if transaction_style not in TRANSACTION_STYLE_VALUES:
+ raise ValueError(
+ "Invalid value for transaction_style: %s (must be in %s)"
+ % (transaction_style, TRANSACTION_STYLE_VALUES)
+ )
+ self.transaction_style = transaction_style
+ self.middleware_spans = middleware_spans
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+
+ if DJANGO_VERSION < (1, 6):
+ raise DidNotEnable("Django 1.6 or newer is required.")
+
+ install_sql_hook()
+ # Patch in our custom middleware.
+
+ # logs an error for every 500
+ ignore_logger("django.server")
+ ignore_logger("django.request")
+
+ from django.core.handlers.wsgi import WSGIHandler
+
+ old_app = WSGIHandler.__call__
+
+ def sentry_patched_wsgi_handler(self, environ, start_response):
+ # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
+ if Hub.current.get_integration(DjangoIntegration) is None:
+ return old_app(self, environ, start_response)
+
+ bound_old_app = old_app.__get__(self, WSGIHandler)
+
+ return SentryWsgiMiddleware(bound_old_app)(environ, start_response)
+
+ WSGIHandler.__call__ = sentry_patched_wsgi_handler
+
+ _patch_django_asgi_handler()
+
+ # patch get_response, because at that point we have the Django request
+ # object
+ from django.core.handlers.base import BaseHandler
+
+ old_get_response = BaseHandler.get_response
+
+ def sentry_patched_get_response(self, request):
+ # type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException]
+ hub = Hub.current
+ integration = hub.get_integration(DjangoIntegration)
+ if integration is not None:
+ _patch_drf()
+
+ with hub.configure_scope() as scope:
+ # Rely on WSGI middleware to start a trace
+ try:
+ if integration.transaction_style == "function_name":
+ scope.transaction = transaction_from_function(
+ resolve(request.path).func
+ )
+ elif integration.transaction_style == "url":
+ scope.transaction = LEGACY_RESOLVER.resolve(request.path)
+ except Exception:
+ pass
+
+ scope.add_event_processor(
+ _make_event_processor(weakref.ref(request), integration)
+ )
+ return old_get_response(self, request)
+
+ BaseHandler.get_response = sentry_patched_get_response
+
+ signals.got_request_exception.connect(_got_request_exception)
+
+ @add_global_event_processor
+ def process_django_templates(event, hint):
+ # type: (Event, Optional[Hint]) -> Optional[Event]
+ if hint is None:
+ return event
+
+ exc_info = hint.get("exc_info", None)
+
+ if exc_info is None:
+ return event
+
+ exception = event.get("exception", None)
+
+ if exception is None:
+ return event
+
+ values = exception.get("values", None)
+
+ if values is None:
+ return event
+
+ for exception, (_, exc_value, _) in zip(
+ reversed(values), walk_exception_chain(exc_info)
+ ):
+ frame = get_template_frame_from_exception(exc_value)
+ if frame is not None:
+ frames = exception.get("stacktrace", {}).get("frames", [])
+
+ for i in reversed(range(len(frames))):
+ f = frames[i]
+ if (
+ f.get("function") in ("parse", "render")
+ and f.get("module") == "django.template.base"
+ ):
+ i += 1
+ break
+ else:
+ i = len(frames)
+
+ frames.insert(i, frame)
+
+ return event
+
+ @add_global_repr_processor
+ def _django_queryset_repr(value, hint):
+ # type: (Any, Dict[str, Any]) -> Union[NotImplementedType, str]
+ try:
+ # Django 1.6 can fail to import `QuerySet` when Django settings
+ # have not yet been initialized.
+ #
+ # If we fail to import, return `NotImplemented`. It's at least
+ # unlikely that we have a query set in `value` when importing
+ # `QuerySet` fails.
+ from django.db.models.query import QuerySet
+ except Exception:
+ return NotImplemented
+
+ if not isinstance(value, QuerySet) or value._result_cache:
+ return NotImplemented
+
+ # Do not call Hub.get_integration here. It is intentional that
+ # running under a new hub does not suddenly start executing
+ # querysets. This might be surprising to the user but it's likely
+ # less annoying.
+
+ return u"<%s from %s at 0x%x>" % (
+ value.__class__.__name__,
+ value.__module__,
+ id(value),
+ )
+
+ _patch_channels()
+ patch_django_middlewares()
+
+
+_DRF_PATCHED = False
+_DRF_PATCH_LOCK = threading.Lock()
+
+
+def _patch_drf():
+ # type: () -> None
+ """
+ Patch Django Rest Framework for more/better request data. DRF's request
+ type is a wrapper around Django's request type. The attribute we're
+ interested in is `request.data`, which is a cached property containing a
+ parsed request body. Reading a request body from that property is more
+ reliable than reading from any of Django's own properties, as those don't
+ hold payloads in memory and therefore can only be accessed once.
+
+ We patch the Django request object to include a weak backreference to the
+ DRF request object, such that we can later use either in
+ `DjangoRequestExtractor`.
+
+ This function is not called directly on SDK setup, because importing almost
+ any part of Django Rest Framework will try to access Django settings (where
+ `sentry_sdk.init()` might be called from in the first place). Instead we
+ run this function on every request and do the patching on the first
+ request.
+ """
+
+ global _DRF_PATCHED
+
+ if _DRF_PATCHED:
+ # Double-checked locking
+ return
+
+ with _DRF_PATCH_LOCK:
+ if _DRF_PATCHED:
+ return
+
+ # We set this regardless of whether the code below succeeds or fails.
+ # There is no point in trying to patch again on the next request.
+ _DRF_PATCHED = True
+
+ with capture_internal_exceptions():
+ try:
+ from rest_framework.views import APIView # type: ignore
+ except ImportError:
+ pass
+ else:
+ old_drf_initial = APIView.initial
+
+ def sentry_patched_drf_initial(self, request, *args, **kwargs):
+ # type: (APIView, Any, *Any, **Any) -> Any
+ with capture_internal_exceptions():
+ request._request._sentry_drf_request_backref = weakref.ref(
+ request
+ )
+ pass
+ return old_drf_initial(self, request, *args, **kwargs)
+
+ APIView.initial = sentry_patched_drf_initial
+
+
+def _patch_channels():
+ # type: () -> None
+ try:
+ from channels.http import AsgiHandler # type: ignore
+ except ImportError:
+ return
+
+ if not HAS_REAL_CONTEXTVARS:
+ # We better have contextvars or we're going to leak state between
+ # requests.
+ #
+ # We cannot hard-raise here because channels may not be used at all in
+ # the current process.
+ logger.warning(
+ "We detected that you are using Django channels 2.0. To get proper "
+ "instrumentation for ASGI requests, the Sentry SDK requires "
+ "Python 3.7+ or the aiocontextvars package from PyPI."
+ )
+
+ from sentry_sdk.integrations.django.asgi import patch_channels_asgi_handler_impl
+
+ patch_channels_asgi_handler_impl(AsgiHandler)
+
+
+def _patch_django_asgi_handler():
+ # type: () -> None
+ try:
+ from django.core.handlers.asgi import ASGIHandler
+ except ImportError:
+ return
+
+ if not HAS_REAL_CONTEXTVARS:
+ # We better have contextvars or we're going to leak state between
+ # requests.
+ #
+ # We cannot hard-raise here because Django may not be used at all in
+ # the current process.
+ logger.warning(
+ "We detected that you are using Django 3. To get proper "
+ "instrumentation for ASGI requests, the Sentry SDK requires "
+ "Python 3.7+ or the aiocontextvars package from PyPI."
+ )
+
+ from sentry_sdk.integrations.django.asgi import patch_django_asgi_handler_impl
+
+ patch_django_asgi_handler_impl(ASGIHandler)
+
+
+def _make_event_processor(weak_request, integration):
+ # type: (Callable[[], WSGIRequest], DjangoIntegration) -> EventProcessor
+ def event_processor(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ # if the request is gone we are fine not logging the data from
+ # it. This might happen if the processor is pushed away to
+ # another thread.
+ request = weak_request()
+ if request is None:
+ return event
+
+ try:
+ drf_request = request._sentry_drf_request_backref()
+ if drf_request is not None:
+ request = drf_request
+ except AttributeError:
+ pass
+
+ with capture_internal_exceptions():
+ DjangoRequestExtractor(request).extract_into_event(event)
+
+ if _should_send_default_pii():
+ with capture_internal_exceptions():
+ _set_user_info(request, event)
+
+ return event
+
+ return event_processor
+
+
+def _got_request_exception(request=None, **kwargs):
+ # type: (WSGIRequest, **Any) -> None
+ hub = Hub.current
+ integration = hub.get_integration(DjangoIntegration)
+ if integration is not None:
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ event, hint = event_from_exception(
+ sys.exc_info(),
+ client_options=client.options,
+ mechanism={"type": "django", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+
+class DjangoRequestExtractor(RequestExtractor):
+ def env(self):
+ # type: () -> Dict[str, str]
+ return self.request.META
+
+ def cookies(self):
+ # type: () -> Dict[str, str]
+ return self.request.COOKIES
+
+ def raw_data(self):
+ # type: () -> bytes
+ return self.request.body
+
+ def form(self):
+ # type: () -> QueryDict
+ return self.request.POST
+
+ def files(self):
+ # type: () -> MultiValueDict
+ return self.request.FILES
+
+ def size_of_file(self, file):
+ # type: (Any) -> int
+ return file.size
+
+ def parsed_body(self):
+ # type: () -> Optional[Dict[str, Any]]
+ try:
+ return self.request.data
+ except AttributeError:
+ return RequestExtractor.parsed_body(self)
+
+
+def _set_user_info(request, event):
+ # type: (WSGIRequest, Dict[str, Any]) -> None
+ user_info = event.setdefault("user", {})
+
+ user = getattr(request, "user", None)
+
+ if user is None or not is_authenticated(user):
+ return
+
+ try:
+ user_info.setdefault("id", str(user.pk))
+ except Exception:
+ pass
+
+ try:
+ user_info.setdefault("email", user.email)
+ except Exception:
+ pass
+
+ try:
+ user_info.setdefault("username", user.get_username())
+ except Exception:
+ pass
+
+
+def install_sql_hook():
+ # type: () -> None
+ """If installed this causes Django's queries to be captured."""
+ try:
+ from django.db.backends.utils import CursorWrapper
+ except ImportError:
+ from django.db.backends.util import CursorWrapper
+
+ try:
+ real_execute = CursorWrapper.execute
+ real_executemany = CursorWrapper.executemany
+ except AttributeError:
+ # This won't work on Django versions < 1.6
+ return
+
+ def execute(self, sql, params=None):
+ # type: (CursorWrapper, Any, Optional[Any]) -> Any
+ hub = Hub.current
+ if hub.get_integration(DjangoIntegration) is None:
+ return real_execute(self, sql, params)
+
+ with record_sql_queries(
+ hub, self.cursor, sql, params, paramstyle="format", executemany=False
+ ):
+ return real_execute(self, sql, params)
+
+ def executemany(self, sql, param_list):
+ # type: (CursorWrapper, Any, List[Any]) -> Any
+ hub = Hub.current
+ if hub.get_integration(DjangoIntegration) is None:
+ return real_executemany(self, sql, param_list)
+
+ with record_sql_queries(
+ hub, self.cursor, sql, param_list, paramstyle="format", executemany=True
+ ):
+ return real_executemany(self, sql, param_list)
+
+ CursorWrapper.execute = execute
+ CursorWrapper.executemany = executemany
+ ignore_logger("django.db.backends")
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/django/asgi.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/django/asgi.py
new file mode 100644
index 0000000000..96ae3e0809
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/django/asgi.py
@@ -0,0 +1,47 @@
+"""
+Instrumentation for Django 3.0
+
+Since this file contains `async def` it is conditionally imported in
+`sentry_sdk.integrations.django` (depending on the existence of
+`django.core.handlers.asgi`.
+"""
+
+from sentry_sdk import Hub
+from sentry_sdk._types import MYPY
+
+from sentry_sdk.integrations.django import DjangoIntegration
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+
+if MYPY:
+ from typing import Any
+
+
+def patch_django_asgi_handler_impl(cls):
+ # type: (Any) -> None
+ old_app = cls.__call__
+
+ async def sentry_patched_asgi_handler(self, scope, receive, send):
+ # type: (Any, Any, Any, Any) -> Any
+ if Hub.current.get_integration(DjangoIntegration) is None:
+ return await old_app(self, scope, receive, send)
+
+ middleware = SentryAsgiMiddleware(old_app.__get__(self, cls))._run_asgi3
+ return await middleware(scope, receive, send)
+
+ cls.__call__ = sentry_patched_asgi_handler
+
+
+def patch_channels_asgi_handler_impl(cls):
+ # type: (Any) -> None
+ old_app = cls.__call__
+
+ async def sentry_patched_asgi_handler(self, receive, send):
+ # type: (Any, Any, Any) -> Any
+ if Hub.current.get_integration(DjangoIntegration) is None:
+ return await old_app(self, receive, send)
+
+ middleware = SentryAsgiMiddleware(lambda _scope: old_app.__get__(self, cls))
+
+ return await middleware(self.scope)(receive, send)
+
+ cls.__call__ = sentry_patched_asgi_handler
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/django/middleware.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/django/middleware.py
new file mode 100644
index 0000000000..edbeccb093
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/django/middleware.py
@@ -0,0 +1,136 @@
+"""
+Create spans from Django middleware invocations
+"""
+
+from functools import wraps
+
+from django import VERSION as DJANGO_VERSION
+
+from sentry_sdk import Hub
+from sentry_sdk.utils import (
+ ContextVar,
+ transaction_from_function,
+ capture_internal_exceptions,
+)
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Callable
+ from typing import TypeVar
+
+ F = TypeVar("F", bound=Callable[..., Any])
+
+_import_string_should_wrap_middleware = ContextVar(
+ "import_string_should_wrap_middleware"
+)
+
+if DJANGO_VERSION < (1, 7):
+ import_string_name = "import_by_path"
+else:
+ import_string_name = "import_string"
+
+
+def patch_django_middlewares():
+ # type: () -> None
+ from django.core.handlers import base
+
+ old_import_string = getattr(base, import_string_name)
+
+ def sentry_patched_import_string(dotted_path):
+ # type: (str) -> Any
+ rv = old_import_string(dotted_path)
+
+ if _import_string_should_wrap_middleware.get(None):
+ rv = _wrap_middleware(rv, dotted_path)
+
+ return rv
+
+ setattr(base, import_string_name, sentry_patched_import_string)
+
+ old_load_middleware = base.BaseHandler.load_middleware
+
+ def sentry_patched_load_middleware(self):
+ # type: (base.BaseHandler) -> Any
+ _import_string_should_wrap_middleware.set(True)
+ try:
+ return old_load_middleware(self)
+ finally:
+ _import_string_should_wrap_middleware.set(False)
+
+ base.BaseHandler.load_middleware = sentry_patched_load_middleware
+
+
+def _wrap_middleware(middleware, middleware_name):
+ # type: (Any, str) -> Any
+ from sentry_sdk.integrations.django import DjangoIntegration
+
+ def _get_wrapped_method(old_method):
+ # type: (F) -> F
+ with capture_internal_exceptions():
+
+ def sentry_wrapped_method(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(DjangoIntegration)
+ if integration is None or not integration.middleware_spans:
+ return old_method(*args, **kwargs)
+
+ function_name = transaction_from_function(old_method)
+
+ description = middleware_name
+ function_basename = getattr(old_method, "__name__", None)
+ if function_basename:
+ description = "{}.{}".format(description, function_basename)
+
+ with hub.start_span(
+ op="django.middleware", description=description
+ ) as span:
+ span.set_tag("django.function_name", function_name)
+ span.set_tag("django.middleware_name", middleware_name)
+ return old_method(*args, **kwargs)
+
+ try:
+ # fails for __call__ of function on Python 2 (see py2.7-django-1.11)
+ return wraps(old_method)(sentry_wrapped_method) # type: ignore
+ except Exception:
+ return sentry_wrapped_method # type: ignore
+
+ return old_method
+
+ class SentryWrappingMiddleware(object):
+ def __init__(self, *args, **kwargs):
+ # type: (*Any, **Any) -> None
+ self._inner = middleware(*args, **kwargs)
+ self._call_method = None
+
+ # We need correct behavior for `hasattr()`, which we can only determine
+ # when we have an instance of the middleware we're wrapping.
+ def __getattr__(self, method_name):
+ # type: (str) -> Any
+ if method_name not in (
+ "process_request",
+ "process_view",
+ "process_template_response",
+ "process_response",
+ "process_exception",
+ ):
+ raise AttributeError()
+
+ old_method = getattr(self._inner, method_name)
+ rv = _get_wrapped_method(old_method)
+ self.__dict__[method_name] = rv
+ return rv
+
+ def __call__(self, *args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ f = self._call_method
+ if f is None:
+ self._call_method = f = _get_wrapped_method(self._inner.__call__)
+ return f(*args, **kwargs)
+
+ if hasattr(middleware, "__name__"):
+ SentryWrappingMiddleware.__name__ = middleware.__name__
+
+ return SentryWrappingMiddleware
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/django/templates.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/django/templates.py
new file mode 100644
index 0000000000..2285644909
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/django/templates.py
@@ -0,0 +1,121 @@
+from django.template import TemplateSyntaxError
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Dict
+ from typing import Optional
+ from typing import Iterator
+ from typing import Tuple
+
+try:
+ # support Django 1.9
+ from django.template.base import Origin
+except ImportError:
+ # backward compatibility
+ from django.template.loader import LoaderOrigin as Origin
+
+
+def get_template_frame_from_exception(exc_value):
+ # type: (Optional[BaseException]) -> Optional[Dict[str, Any]]
+
+ # As of Django 1.9 or so the new template debug thing showed up.
+ if hasattr(exc_value, "template_debug"):
+ return _get_template_frame_from_debug(exc_value.template_debug) # type: ignore
+
+ # As of r16833 (Django) all exceptions may contain a
+ # ``django_template_source`` attribute (rather than the legacy
+ # ``TemplateSyntaxError.source`` check)
+ if hasattr(exc_value, "django_template_source"):
+ return _get_template_frame_from_source(
+ exc_value.django_template_source # type: ignore
+ )
+
+ if isinstance(exc_value, TemplateSyntaxError) and hasattr(exc_value, "source"):
+ source = exc_value.source
+ if isinstance(source, (tuple, list)) and isinstance(source[0], Origin):
+ return _get_template_frame_from_source(source) # type: ignore
+
+ return None
+
+
+def _get_template_frame_from_debug(debug):
+ # type: (Dict[str, Any]) -> Dict[str, Any]
+ if debug is None:
+ return None
+
+ lineno = debug["line"]
+ filename = debug["name"]
+ if filename is None:
+ filename = "<django template>"
+
+ pre_context = []
+ post_context = []
+ context_line = None
+
+ for i, line in debug["source_lines"]:
+ if i < lineno:
+ pre_context.append(line)
+ elif i > lineno:
+ post_context.append(line)
+ else:
+ context_line = line
+
+ return {
+ "filename": filename,
+ "lineno": lineno,
+ "pre_context": pre_context[-5:],
+ "post_context": post_context[:5],
+ "context_line": context_line,
+ "in_app": True,
+ }
+
+
+def _linebreak_iter(template_source):
+ # type: (str) -> Iterator[int]
+ yield 0
+ p = template_source.find("\n")
+ while p >= 0:
+ yield p + 1
+ p = template_source.find("\n", p + 1)
+
+
+def _get_template_frame_from_source(source):
+ # type: (Tuple[Origin, Tuple[int, int]]) -> Optional[Dict[str, Any]]
+ if not source:
+ return None
+
+ origin, (start, end) = source
+ filename = getattr(origin, "loadname", None)
+ if filename is None:
+ filename = "<django template>"
+ template_source = origin.reload()
+ lineno = None
+ upto = 0
+ pre_context = []
+ post_context = []
+ context_line = None
+
+ for num, next in enumerate(_linebreak_iter(template_source)):
+ line = template_source[upto:next]
+ if start >= upto and end <= next:
+ lineno = num
+ context_line = line
+ elif lineno is None:
+ pre_context.append(line)
+ else:
+ post_context.append(line)
+
+ upto = next
+
+ if context_line is None or lineno is None:
+ return None
+
+ return {
+ "filename": filename,
+ "lineno": lineno,
+ "pre_context": pre_context[-5:],
+ "post_context": post_context[:5],
+ "context_line": context_line,
+ }
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/django/transactions.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/django/transactions.py
new file mode 100644
index 0000000000..f20866ef95
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/django/transactions.py
@@ -0,0 +1,134 @@
+"""
+Copied from raven-python. Used for
+`DjangoIntegration(transaction_fron="raven_legacy")`.
+"""
+
+from __future__ import absolute_import
+
+import re
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from django.urls.resolvers import URLResolver
+ from typing import Dict
+ from typing import List
+ from typing import Optional
+ from django.urls.resolvers import URLPattern
+ from typing import Tuple
+ from typing import Union
+ from re import Pattern
+
+try:
+ from django.urls import get_resolver
+except ImportError:
+ from django.core.urlresolvers import get_resolver
+
+
+def get_regex(resolver_or_pattern):
+ # type: (Union[URLPattern, URLResolver]) -> Pattern[str]
+ """Utility method for django's deprecated resolver.regex"""
+ try:
+ regex = resolver_or_pattern.regex
+ except AttributeError:
+ regex = resolver_or_pattern.pattern.regex
+ return regex
+
+
+class RavenResolver(object):
+ _optional_group_matcher = re.compile(r"\(\?\:([^\)]+)\)")
+ _named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)")
+ _non_named_group_matcher = re.compile(r"\([^\)]+\)")
+ # [foo|bar|baz]
+ _either_option_matcher = re.compile(r"\[([^\]]+)\|([^\]]+)\]")
+ _camel_re = re.compile(r"([A-Z]+)([a-z])")
+
+ _cache = {} # type: Dict[URLPattern, str]
+
+ def _simplify(self, pattern):
+ # type: (str) -> str
+ r"""
+ Clean up urlpattern regexes into something readable by humans:
+
+ From:
+ > "^(?P<sport_slug>\w+)/athletes/(?P<athlete_slug>\w+)/$"
+
+ To:
+ > "{sport_slug}/athletes/{athlete_slug}/"
+ """
+ # remove optional params
+ # TODO(dcramer): it'd be nice to change these into [%s] but it currently
+ # conflicts with the other rules because we're doing regexp matches
+ # rather than parsing tokens
+ result = self._optional_group_matcher.sub(lambda m: "%s" % m.group(1), pattern)
+
+ # handle named groups first
+ result = self._named_group_matcher.sub(lambda m: "{%s}" % m.group(1), result)
+
+ # handle non-named groups
+ result = self._non_named_group_matcher.sub("{var}", result)
+
+ # handle optional params
+ result = self._either_option_matcher.sub(lambda m: m.group(1), result)
+
+ # clean up any outstanding regex-y characters.
+ result = (
+ result.replace("^", "")
+ .replace("$", "")
+ .replace("?", "")
+ .replace("//", "/")
+ .replace("\\", "")
+ )
+
+ return result
+
+ def _resolve(self, resolver, path, parents=None):
+ # type: (URLResolver, str, Optional[List[URLResolver]]) -> Optional[str]
+
+ match = get_regex(resolver).search(path) # Django < 2.0
+
+ if not match:
+ return None
+
+ if parents is None:
+ parents = [resolver]
+ elif resolver not in parents:
+ parents = parents + [resolver]
+
+ new_path = path[match.end() :]
+ for pattern in resolver.url_patterns:
+ # this is an include()
+ if not pattern.callback:
+ match_ = self._resolve(pattern, new_path, parents)
+ if match_:
+ return match_
+ continue
+ elif not get_regex(pattern).search(new_path):
+ continue
+
+ try:
+ return self._cache[pattern]
+ except KeyError:
+ pass
+
+ prefix = "".join(self._simplify(get_regex(p).pattern) for p in parents)
+ result = prefix + self._simplify(get_regex(pattern).pattern)
+ if not result.startswith("/"):
+ result = "/" + result
+ self._cache[pattern] = result
+ return result
+
+ return None
+
+ def resolve(
+ self,
+ path, # type: str
+ urlconf=None, # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]]
+ ):
+ # type: (...) -> str
+ resolver = get_resolver(urlconf)
+ match = self._resolve(resolver, path)
+ return match or path
+
+
+LEGACY_RESOLVER = RavenResolver()
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/excepthook.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/excepthook.py
new file mode 100644
index 0000000000..d8aead097a
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/excepthook.py
@@ -0,0 +1,76 @@
+import sys
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.integrations import Integration
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Callable
+ from typing import Any
+ from typing import Type
+
+ from types import TracebackType
+
+ Excepthook = Callable[
+ [Type[BaseException], BaseException, TracebackType], Any,
+ ]
+
+
+class ExcepthookIntegration(Integration):
+ identifier = "excepthook"
+
+ always_run = False
+
+ def __init__(self, always_run=False):
+ # type: (bool) -> None
+
+ if not isinstance(always_run, bool):
+ raise ValueError(
+ "Invalid value for always_run: %s (must be type boolean)"
+ % (always_run,)
+ )
+ self.always_run = always_run
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ sys.excepthook = _make_excepthook(sys.excepthook)
+
+
+def _make_excepthook(old_excepthook):
+ # type: (Excepthook) -> Excepthook
+ def sentry_sdk_excepthook(type_, value, traceback):
+ # type: (Type[BaseException], BaseException, TracebackType) -> None
+ hub = Hub.current
+ integration = hub.get_integration(ExcepthookIntegration)
+
+ if integration is not None and _should_send(integration.always_run):
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ with capture_internal_exceptions():
+ event, hint = event_from_exception(
+ (type_, value, traceback),
+ client_options=client.options,
+ mechanism={"type": "excepthook", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+ return old_excepthook(type_, value, traceback)
+
+ return sentry_sdk_excepthook
+
+
+def _should_send(always_run=False):
+ # type: (bool) -> bool
+ if always_run:
+ return True
+
+ if hasattr(sys, "ps1"):
+ # Disable the excepthook for interactive Python shells, otherwise
+ # every typo gets sent to Sentry.
+ return False
+
+ return True
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/falcon.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/falcon.py
new file mode 100644
index 0000000000..b24aac41c6
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/falcon.py
@@ -0,0 +1,209 @@
+from __future__ import absolute_import
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Dict
+ from typing import Optional
+
+ from sentry_sdk._types import EventProcessor
+
+try:
+ import falcon # type: ignore
+ import falcon.api_helpers # type: ignore
+
+ from falcon import __version__ as FALCON_VERSION
+except ImportError:
+ raise DidNotEnable("Falcon not installed")
+
+
+class FalconRequestExtractor(RequestExtractor):
+ def env(self):
+ # type: () -> Dict[str, Any]
+ return self.request.env
+
+ def cookies(self):
+ # type: () -> Dict[str, Any]
+ return self.request.cookies
+
+ def form(self):
+ # type: () -> None
+ return None # No such concept in Falcon
+
+ def files(self):
+ # type: () -> None
+ return None # No such concept in Falcon
+
+ def raw_data(self):
+ # type: () -> Optional[str]
+
+ # As request data can only be read once we won't make this available
+ # to Sentry. Just send back a dummy string in case there was a
+ # content length.
+ # TODO(jmagnusson): Figure out if there's a way to support this
+ content_length = self.content_length()
+ if content_length > 0:
+ return "[REQUEST_CONTAINING_RAW_DATA]"
+ else:
+ return None
+
+ def json(self):
+ # type: () -> Optional[Dict[str, Any]]
+ try:
+ return self.request.media
+ except falcon.errors.HTTPBadRequest:
+ # NOTE(jmagnusson): We return `falcon.Request._media` here because
+ # falcon 1.4 doesn't do proper type checking in
+ # `falcon.Request.media`. This has been fixed in 2.0.
+ # Relevant code: https://github.com/falconry/falcon/blob/1.4.1/falcon/request.py#L953
+ return self.request._media
+
+
+class SentryFalconMiddleware(object):
+ """Captures exceptions in Falcon requests and send to Sentry"""
+
+ def process_request(self, req, resp, *args, **kwargs):
+ # type: (Any, Any, *Any, **Any) -> None
+ hub = Hub.current
+ integration = hub.get_integration(FalconIntegration)
+ if integration is None:
+ return
+
+ with hub.configure_scope() as scope:
+ scope._name = "falcon"
+ scope.add_event_processor(_make_request_event_processor(req, integration))
+
+
+TRANSACTION_STYLE_VALUES = ("uri_template", "path")
+
+
+class FalconIntegration(Integration):
+ identifier = "falcon"
+
+ transaction_style = None
+
+ def __init__(self, transaction_style="uri_template"):
+ # type: (str) -> None
+ if transaction_style not in TRANSACTION_STYLE_VALUES:
+ raise ValueError(
+ "Invalid value for transaction_style: %s (must be in %s)"
+ % (transaction_style, TRANSACTION_STYLE_VALUES)
+ )
+ self.transaction_style = transaction_style
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ try:
+ version = tuple(map(int, FALCON_VERSION.split(".")))
+ except (ValueError, TypeError):
+ raise DidNotEnable("Unparseable Falcon version: {}".format(FALCON_VERSION))
+
+ if version < (1, 4):
+ raise DidNotEnable("Falcon 1.4 or newer required.")
+
+ _patch_wsgi_app()
+ _patch_handle_exception()
+ _patch_prepare_middleware()
+
+
+def _patch_wsgi_app():
+ # type: () -> None
+ original_wsgi_app = falcon.API.__call__
+
+ def sentry_patched_wsgi_app(self, env, start_response):
+ # type: (falcon.API, Any, Any) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(FalconIntegration)
+ if integration is None:
+ return original_wsgi_app(self, env, start_response)
+
+ sentry_wrapped = SentryWsgiMiddleware(
+ lambda envi, start_resp: original_wsgi_app(self, envi, start_resp)
+ )
+
+ return sentry_wrapped(env, start_response)
+
+ falcon.API.__call__ = sentry_patched_wsgi_app
+
+
+def _patch_handle_exception():
+ # type: () -> None
+ original_handle_exception = falcon.API._handle_exception
+
+ def sentry_patched_handle_exception(self, *args):
+ # type: (falcon.API, *Any) -> Any
+ # NOTE(jmagnusson): falcon 2.0 changed falcon.API._handle_exception
+ # method signature from `(ex, req, resp, params)` to
+ # `(req, resp, ex, params)`
+ if isinstance(args[0], Exception):
+ ex = args[0]
+ else:
+ ex = args[2]
+
+ was_handled = original_handle_exception(self, *args)
+
+ hub = Hub.current
+ integration = hub.get_integration(FalconIntegration)
+
+ if integration is not None and not _is_falcon_http_error(ex):
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ event, hint = event_from_exception(
+ ex,
+ client_options=client.options,
+ mechanism={"type": "falcon", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+ return was_handled
+
+ falcon.API._handle_exception = sentry_patched_handle_exception
+
+
+def _patch_prepare_middleware():
+ # type: () -> None
+ original_prepare_middleware = falcon.api_helpers.prepare_middleware
+
+ def sentry_patched_prepare_middleware(
+ middleware=None, independent_middleware=False
+ ):
+ # type: (Any, Any) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(FalconIntegration)
+ if integration is not None:
+ middleware = [SentryFalconMiddleware()] + (middleware or [])
+ return original_prepare_middleware(middleware, independent_middleware)
+
+ falcon.api_helpers.prepare_middleware = sentry_patched_prepare_middleware
+
+
+def _is_falcon_http_error(ex):
+ # type: (BaseException) -> bool
+ return isinstance(ex, (falcon.HTTPError, falcon.http_status.HTTPStatus))
+
+
+def _make_request_event_processor(req, integration):
+ # type: (falcon.Request, FalconIntegration) -> EventProcessor
+
+ def inner(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ if integration.transaction_style == "uri_template":
+ event["transaction"] = req.uri_template
+ elif integration.transaction_style == "path":
+ event["transaction"] = req.path
+
+ with capture_internal_exceptions():
+ FalconRequestExtractor(req).extract_into_event(event)
+
+ return event
+
+ return inner
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/flask.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/flask.py
new file mode 100644
index 0000000000..ef6ae0e4f0
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/flask.py
@@ -0,0 +1,260 @@
+from __future__ import absolute_import
+
+import weakref
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from sentry_sdk.integrations.wsgi import _ScopedResponse
+ from typing import Any
+ from typing import Dict
+ from werkzeug.datastructures import ImmutableTypeConversionDict
+ from werkzeug.datastructures import ImmutableMultiDict
+ from werkzeug.datastructures import FileStorage
+ from typing import Union
+ from typing import Callable
+
+ from sentry_sdk._types import EventProcessor
+
+
+try:
+ import flask_login # type: ignore
+except ImportError:
+ flask_login = None
+
+try:
+ from flask import ( # type: ignore
+ Request,
+ Flask,
+ _request_ctx_stack,
+ _app_ctx_stack,
+ __version__ as FLASK_VERSION,
+ )
+ from flask.signals import (
+ appcontext_pushed,
+ appcontext_tearing_down,
+ got_request_exception,
+ request_started,
+ )
+except ImportError:
+ raise DidNotEnable("Flask is not installed")
+
+
+TRANSACTION_STYLE_VALUES = ("endpoint", "url")
+
+
+class FlaskIntegration(Integration):
+ identifier = "flask"
+
+ transaction_style = None
+
+ def __init__(self, transaction_style="endpoint"):
+ # type: (str) -> None
+ if transaction_style not in TRANSACTION_STYLE_VALUES:
+ raise ValueError(
+ "Invalid value for transaction_style: %s (must be in %s)"
+ % (transaction_style, TRANSACTION_STYLE_VALUES)
+ )
+ self.transaction_style = transaction_style
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ try:
+ version = tuple(map(int, FLASK_VERSION.split(".")[:3]))
+ except (ValueError, TypeError):
+ raise DidNotEnable("Unparseable Flask version: {}".format(FLASK_VERSION))
+
+ if version < (0, 11):
+ raise DidNotEnable("Flask 0.11 or newer is required.")
+
+ appcontext_pushed.connect(_push_appctx)
+ appcontext_tearing_down.connect(_pop_appctx)
+ request_started.connect(_request_started)
+ got_request_exception.connect(_capture_exception)
+
+ old_app = Flask.__call__
+
+ def sentry_patched_wsgi_app(self, environ, start_response):
+ # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
+ if Hub.current.get_integration(FlaskIntegration) is None:
+ return old_app(self, environ, start_response)
+
+ return SentryWsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))(
+ environ, start_response
+ )
+
+ Flask.__call__ = sentry_patched_wsgi_app # type: ignore
+
+
+def _push_appctx(*args, **kwargs):
+ # type: (*Flask, **Any) -> None
+ hub = Hub.current
+ if hub.get_integration(FlaskIntegration) is not None:
+ # always want to push scope regardless of whether WSGI app might already
+ # have (not the case for CLI for example)
+ scope_manager = hub.push_scope()
+ scope_manager.__enter__()
+ _app_ctx_stack.top.sentry_sdk_scope_manager = scope_manager
+ with hub.configure_scope() as scope:
+ scope._name = "flask"
+
+
+def _pop_appctx(*args, **kwargs):
+ # type: (*Flask, **Any) -> None
+ scope_manager = getattr(_app_ctx_stack.top, "sentry_sdk_scope_manager", None)
+ if scope_manager is not None:
+ scope_manager.__exit__(None, None, None)
+
+
+def _request_started(sender, **kwargs):
+ # type: (Flask, **Any) -> None
+ hub = Hub.current
+ integration = hub.get_integration(FlaskIntegration)
+ if integration is None:
+ return
+
+ app = _app_ctx_stack.top.app
+ with hub.configure_scope() as scope:
+ request = _request_ctx_stack.top.request
+
+ # Rely on WSGI middleware to start a trace
+ try:
+ if integration.transaction_style == "endpoint":
+ scope.transaction = request.url_rule.endpoint
+ elif integration.transaction_style == "url":
+ scope.transaction = request.url_rule.rule
+ except Exception:
+ pass
+
+ weak_request = weakref.ref(request)
+ evt_processor = _make_request_event_processor(
+ app, weak_request, integration # type: ignore
+ )
+ scope.add_event_processor(evt_processor)
+
+
+class FlaskRequestExtractor(RequestExtractor):
+ def env(self):
+ # type: () -> Dict[str, str]
+ return self.request.environ
+
+ def cookies(self):
+ # type: () -> ImmutableTypeConversionDict[Any, Any]
+ return self.request.cookies
+
+ def raw_data(self):
+ # type: () -> bytes
+ return self.request.get_data()
+
+ def form(self):
+ # type: () -> ImmutableMultiDict[str, Any]
+ return self.request.form
+
+ def files(self):
+ # type: () -> ImmutableMultiDict[str, Any]
+ return self.request.files
+
+ def is_json(self):
+ # type: () -> bool
+ return self.request.is_json
+
+ def json(self):
+ # type: () -> Any
+ return self.request.get_json()
+
+ def size_of_file(self, file):
+ # type: (FileStorage) -> int
+ return file.content_length
+
+
+def _make_request_event_processor(app, weak_request, integration):
+ # type: (Flask, Callable[[], Request], FlaskIntegration) -> EventProcessor
+ def inner(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ request = weak_request()
+
+ # if the request is gone we are fine not logging the data from
+ # it. This might happen if the processor is pushed away to
+ # another thread.
+ if request is None:
+ return event
+
+ with capture_internal_exceptions():
+ FlaskRequestExtractor(request).extract_into_event(event)
+
+ if _should_send_default_pii():
+ with capture_internal_exceptions():
+ _add_user_to_event(event)
+
+ return event
+
+ return inner
+
+
+def _capture_exception(sender, exception, **kwargs):
+ # type: (Flask, Union[ValueError, BaseException], **Any) -> None
+ hub = Hub.current
+ if hub.get_integration(FlaskIntegration) is None:
+ return
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ event, hint = event_from_exception(
+ exception,
+ client_options=client.options,
+ mechanism={"type": "flask", "handled": False},
+ )
+
+ hub.capture_event(event, hint=hint)
+
+
+def _add_user_to_event(event):
+ # type: (Dict[str, Any]) -> None
+ if flask_login is None:
+ return
+
+ user = flask_login.current_user
+ if user is None:
+ return
+
+ with capture_internal_exceptions():
+ # Access this object as late as possible as accessing the user
+ # is relatively costly
+
+ user_info = event.setdefault("user", {})
+
+ try:
+ user_info.setdefault("id", user.get_id())
+ # TODO: more configurable user attrs here
+ except AttributeError:
+ # might happen if:
+ # - flask_login could not be imported
+ # - flask_login is not configured
+ # - no user is logged in
+ pass
+
+ # The following attribute accesses are ineffective for the general
+ # Flask-Login case, because the User interface of Flask-Login does not
+ # care about anything but the ID. However, Flask-User (based on
+ # Flask-Login) documents a few optional extra attributes.
+ #
+ # https://github.com/lingthio/Flask-User/blob/a379fa0a281789618c484b459cb41236779b95b1/docs/source/data_models.rst#fixed-data-model-property-names
+
+ try:
+ user_info.setdefault("email", user.email)
+ except Exception:
+ pass
+
+ try:
+ user_info.setdefault("username", user.username)
+ user_info.setdefault("username", user.email)
+ except Exception:
+ pass
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/gnu_backtrace.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/gnu_backtrace.py
new file mode 100644
index 0000000000..e0ec110547
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/gnu_backtrace.py
@@ -0,0 +1,107 @@
+import re
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.utils import capture_internal_exceptions
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Dict
+
+
+MODULE_RE = r"[a-zA-Z0-9/._:\\-]+"
+TYPE_RE = r"[a-zA-Z0-9._:<>,-]+"
+HEXVAL_RE = r"[A-Fa-f0-9]+"
+
+
+FRAME_RE = r"""
+^(?P<index>\d+)\.\s
+(?P<package>{MODULE_RE})\(
+ (?P<retval>{TYPE_RE}\ )?
+ ((?P<function>{TYPE_RE})
+ (?P<args>\(.*\))?
+ )?
+ ((?P<constoffset>\ const)?\+0x(?P<offset>{HEXVAL_RE}))?
+\)\s
+\[0x(?P<retaddr>{HEXVAL_RE})\]$
+""".format(
+ MODULE_RE=MODULE_RE, HEXVAL_RE=HEXVAL_RE, TYPE_RE=TYPE_RE
+)
+
+FRAME_RE = re.compile(FRAME_RE, re.MULTILINE | re.VERBOSE)
+
+
+class GnuBacktraceIntegration(Integration):
+ identifier = "gnu_backtrace"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ @add_global_event_processor
+ def process_gnu_backtrace(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ with capture_internal_exceptions():
+ return _process_gnu_backtrace(event, hint)
+
+
+def _process_gnu_backtrace(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ if Hub.current.get_integration(GnuBacktraceIntegration) is None:
+ return event
+
+ exc_info = hint.get("exc_info", None)
+
+ if exc_info is None:
+ return event
+
+ exception = event.get("exception", None)
+
+ if exception is None:
+ return event
+
+ values = exception.get("values", None)
+
+ if values is None:
+ return event
+
+ for exception in values:
+ frames = exception.get("stacktrace", {}).get("frames", [])
+ if not frames:
+ continue
+
+ msg = exception.get("value", None)
+ if not msg:
+ continue
+
+ additional_frames = []
+ new_msg = []
+
+ for line in msg.splitlines():
+ match = FRAME_RE.match(line)
+ if match:
+ additional_frames.append(
+ (
+ int(match.group("index")),
+ {
+ "package": match.group("package") or None,
+ "function": match.group("function") or None,
+ "platform": "native",
+ },
+ )
+ )
+ else:
+ # Put garbage lines back into message, not sure what to do with them.
+ new_msg.append(line)
+
+ if additional_frames:
+ additional_frames.sort(key=lambda x: -x[0])
+ for _, frame in additional_frames:
+ frames.append(frame)
+
+ new_msg.append("<stacktrace parsed and removed by GnuBacktraceIntegration>")
+ exception["value"] = "\n".join(new_msg)
+
+ return event
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/logging.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/logging.py
new file mode 100644
index 0000000000..6edd785e91
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/logging.py
@@ -0,0 +1,237 @@
+from __future__ import absolute_import
+
+import logging
+import datetime
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import (
+ to_string,
+ event_from_exception,
+ current_stacktrace,
+ capture_internal_exceptions,
+)
+from sentry_sdk.integrations import Integration
+from sentry_sdk._compat import iteritems
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from logging import LogRecord
+ from typing import Any
+ from typing import Dict
+ from typing import Optional
+
+DEFAULT_LEVEL = logging.INFO
+DEFAULT_EVENT_LEVEL = logging.ERROR
+
+_IGNORED_LOGGERS = set(["sentry_sdk.errors"])
+
+
+def ignore_logger(
+ name, # type: str
+):
+ # type: (...) -> None
+ """This disables recording (both in breadcrumbs and as events) calls to
+ a logger of a specific name. Among other uses, many of our integrations
+ use this to prevent their actions being recorded as breadcrumbs. Exposed
+ to users as a way to quiet spammy loggers.
+
+ :param name: The name of the logger to ignore (same string you would pass to ``logging.getLogger``).
+ """
+ _IGNORED_LOGGERS.add(name)
+
+
+class LoggingIntegration(Integration):
+ identifier = "logging"
+
+ def __init__(self, level=DEFAULT_LEVEL, event_level=DEFAULT_EVENT_LEVEL):
+ # type: (Optional[int], Optional[int]) -> None
+ self._handler = None
+ self._breadcrumb_handler = None
+
+ if level is not None:
+ self._breadcrumb_handler = BreadcrumbHandler(level=level)
+
+ if event_level is not None:
+ self._handler = EventHandler(level=event_level)
+
+ def _handle_record(self, record):
+ # type: (LogRecord) -> None
+ if self._handler is not None and record.levelno >= self._handler.level:
+ self._handler.handle(record)
+
+ if (
+ self._breadcrumb_handler is not None
+ and record.levelno >= self._breadcrumb_handler.level
+ ):
+ self._breadcrumb_handler.handle(record)
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ old_callhandlers = logging.Logger.callHandlers # type: ignore
+
+ def sentry_patched_callhandlers(self, record):
+ # type: (Any, LogRecord) -> Any
+ try:
+ return old_callhandlers(self, record)
+ finally:
+ # This check is done twice, once also here before we even get
+ # the integration. Otherwise we have a high chance of getting
+ # into a recursion error when the integration is resolved
+ # (this also is slower).
+ if record.name not in _IGNORED_LOGGERS:
+ integration = Hub.current.get_integration(LoggingIntegration)
+ if integration is not None:
+ integration._handle_record(record)
+
+ logging.Logger.callHandlers = sentry_patched_callhandlers # type: ignore
+
+
+def _can_record(record):
+ # type: (LogRecord) -> bool
+ return record.name not in _IGNORED_LOGGERS
+
+
+def _breadcrumb_from_record(record):
+ # type: (LogRecord) -> Dict[str, Any]
+ return {
+ "ty": "log",
+ "level": _logging_to_event_level(record.levelname),
+ "category": record.name,
+ "message": record.message,
+ "timestamp": datetime.datetime.utcfromtimestamp(record.created),
+ "data": _extra_from_record(record),
+ }
+
+
+def _logging_to_event_level(levelname):
+ # type: (str) -> str
+ return {"critical": "fatal"}.get(levelname.lower(), levelname.lower())
+
+
+COMMON_RECORD_ATTRS = frozenset(
+ (
+ "args",
+ "created",
+ "exc_info",
+ "exc_text",
+ "filename",
+ "funcName",
+ "levelname",
+ "levelno",
+ "linenno",
+ "lineno",
+ "message",
+ "module",
+ "msecs",
+ "msg",
+ "name",
+ "pathname",
+ "process",
+ "processName",
+ "relativeCreated",
+ "stack",
+ "tags",
+ "thread",
+ "threadName",
+ "stack_info",
+ )
+)
+
+
+def _extra_from_record(record):
+ # type: (LogRecord) -> Dict[str, None]
+ return {
+ k: v
+ for k, v in iteritems(vars(record))
+ if k not in COMMON_RECORD_ATTRS
+ and (not isinstance(k, str) or not k.startswith("_"))
+ }
+
+
+class EventHandler(logging.Handler, object):
+ """
+ A logging handler that emits Sentry events for each log record
+
+ Note that you do not have to use this class if the logging integration is enabled, which it is by default.
+ """
+
+ def emit(self, record):
+ # type: (LogRecord) -> Any
+ with capture_internal_exceptions():
+ self.format(record)
+ return self._emit(record)
+
+ def _emit(self, record):
+ # type: (LogRecord) -> None
+ if not _can_record(record):
+ return
+
+ hub = Hub.current
+ if hub.client is None:
+ return
+
+ client_options = hub.client.options
+
+ # exc_info might be None or (None, None, None)
+ if record.exc_info is not None and record.exc_info[0] is not None:
+ event, hint = event_from_exception(
+ record.exc_info,
+ client_options=client_options,
+ mechanism={"type": "logging", "handled": True},
+ )
+ elif record.exc_info and record.exc_info[0] is None:
+ event = {}
+ hint = {}
+ with capture_internal_exceptions():
+ event["threads"] = {
+ "values": [
+ {
+ "stacktrace": current_stacktrace(
+ client_options["with_locals"]
+ ),
+ "crashed": False,
+ "current": True,
+ }
+ ]
+ }
+ else:
+ event = {}
+ hint = {}
+
+ hint["log_record"] = record
+
+ event["level"] = _logging_to_event_level(record.levelname)
+ event["logger"] = record.name
+ event["logentry"] = {"message": to_string(record.msg), "params": record.args}
+ event["extra"] = _extra_from_record(record)
+
+ hub.capture_event(event, hint=hint)
+
+
+# Legacy name
+SentryHandler = EventHandler
+
+
+class BreadcrumbHandler(logging.Handler, object):
+ """
+ A logging handler that records breadcrumbs for each log record.
+
+ Note that you do not have to use this class if the logging integration is enabled, which it is by default.
+ """
+
+ def emit(self, record):
+ # type: (LogRecord) -> Any
+ with capture_internal_exceptions():
+ self.format(record)
+ return self._emit(record)
+
+ def _emit(self, record):
+ # type: (LogRecord) -> None
+ if not _can_record(record):
+ return
+
+ Hub.current.add_breadcrumb(
+ _breadcrumb_from_record(record), hint={"log_record": record}
+ )
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/modules.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/modules.py
new file mode 100644
index 0000000000..3d78cb89bb
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/modules.py
@@ -0,0 +1,56 @@
+from __future__ import absolute_import
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Dict
+ from typing import Tuple
+ from typing import Iterator
+
+ from sentry_sdk._types import Event
+
+
+_installed_modules = None
+
+
+def _generate_installed_modules():
+ # type: () -> Iterator[Tuple[str, str]]
+ try:
+ import pkg_resources
+ except ImportError:
+ return
+
+ for info in pkg_resources.working_set:
+ yield info.key, info.version
+
+
+def _get_installed_modules():
+ # type: () -> Dict[str, str]
+ global _installed_modules
+ if _installed_modules is None:
+ _installed_modules = dict(_generate_installed_modules())
+ return _installed_modules
+
+
+class ModulesIntegration(Integration):
+ identifier = "modules"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ @add_global_event_processor
+ def processor(event, hint):
+ # type: (Event, Any) -> Dict[str, Any]
+ if event.get("type") == "transaction":
+ return event
+
+ if Hub.current.get_integration(ModulesIntegration) is None:
+ return event
+
+ event["modules"] = _get_installed_modules()
+ return event
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/pyramid.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/pyramid.py
new file mode 100644
index 0000000000..ee9682343a
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/pyramid.py
@@ -0,0 +1,217 @@
+from __future__ import absolute_import
+
+import os
+import sys
+import weakref
+
+from pyramid.httpexceptions import HTTPException
+from pyramid.request import Request
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk._compat import reraise, iteritems
+
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from pyramid.response import Response
+ from typing import Any
+ from sentry_sdk.integrations.wsgi import _ScopedResponse
+ from typing import Callable
+ from typing import Dict
+ from typing import Optional
+ from webob.cookies import RequestCookies # type: ignore
+ from webob.compat import cgi_FieldStorage # type: ignore
+
+ from sentry_sdk.utils import ExcInfo
+ from sentry_sdk._types import EventProcessor
+
+
+if getattr(Request, "authenticated_userid", None):
+
+ def authenticated_userid(request):
+ # type: (Request) -> Optional[Any]
+ return request.authenticated_userid
+
+
+else:
+ # bw-compat for pyramid < 1.5
+ from pyramid.security import authenticated_userid # type: ignore
+
+
+TRANSACTION_STYLE_VALUES = ("route_name", "route_pattern")
+
+
+class PyramidIntegration(Integration):
+ identifier = "pyramid"
+
+ transaction_style = None
+
+ def __init__(self, transaction_style="route_name"):
+ # type: (str) -> None
+ if transaction_style not in TRANSACTION_STYLE_VALUES:
+ raise ValueError(
+ "Invalid value for transaction_style: %s (must be in %s)"
+ % (transaction_style, TRANSACTION_STYLE_VALUES)
+ )
+ self.transaction_style = transaction_style
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ from pyramid.router import Router
+ from pyramid.request import Request
+
+ old_handle_request = Router.handle_request
+
+ def sentry_patched_handle_request(self, request, *args, **kwargs):
+ # type: (Any, Request, *Any, **Any) -> Response
+ hub = Hub.current
+ integration = hub.get_integration(PyramidIntegration)
+ if integration is not None:
+ with hub.configure_scope() as scope:
+ scope.add_event_processor(
+ _make_event_processor(weakref.ref(request), integration)
+ )
+
+ return old_handle_request(self, request, *args, **kwargs)
+
+ Router.handle_request = sentry_patched_handle_request
+
+ if hasattr(Request, "invoke_exception_view"):
+ old_invoke_exception_view = Request.invoke_exception_view
+
+ def sentry_patched_invoke_exception_view(self, *args, **kwargs):
+ # type: (Request, *Any, **Any) -> Any
+ rv = old_invoke_exception_view(self, *args, **kwargs)
+
+ if (
+ self.exc_info
+ and all(self.exc_info)
+ and rv.status_int == 500
+ and Hub.current.get_integration(PyramidIntegration) is not None
+ ):
+ _capture_exception(self.exc_info)
+
+ return rv
+
+ Request.invoke_exception_view = sentry_patched_invoke_exception_view
+
+ old_wsgi_call = Router.__call__
+
+ def sentry_patched_wsgi_call(self, environ, start_response):
+ # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
+ hub = Hub.current
+ integration = hub.get_integration(PyramidIntegration)
+ if integration is None:
+ return old_wsgi_call(self, environ, start_response)
+
+ def sentry_patched_inner_wsgi_call(environ, start_response):
+ # type: (Dict[str, Any], Callable[..., Any]) -> Any
+ try:
+ return old_wsgi_call(self, environ, start_response)
+ except Exception:
+ einfo = sys.exc_info()
+ _capture_exception(einfo)
+ reraise(*einfo)
+
+ return SentryWsgiMiddleware(sentry_patched_inner_wsgi_call)(
+ environ, start_response
+ )
+
+ Router.__call__ = sentry_patched_wsgi_call
+
+
+def _capture_exception(exc_info):
+ # type: (ExcInfo) -> None
+ if exc_info[0] is None or issubclass(exc_info[0], HTTPException):
+ return
+ hub = Hub.current
+ if hub.get_integration(PyramidIntegration) is None:
+ return
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=client.options,
+ mechanism={"type": "pyramid", "handled": False},
+ )
+
+ hub.capture_event(event, hint=hint)
+
+
+class PyramidRequestExtractor(RequestExtractor):
+ def url(self):
+ # type: () -> str
+ return self.request.path_url
+
+ def env(self):
+ # type: () -> Dict[str, str]
+ return self.request.environ
+
+ def cookies(self):
+ # type: () -> RequestCookies
+ return self.request.cookies
+
+ def raw_data(self):
+ # type: () -> str
+ return self.request.text
+
+ def form(self):
+ # type: () -> Dict[str, str]
+ return {
+ key: value
+ for key, value in iteritems(self.request.POST)
+ if not getattr(value, "filename", None)
+ }
+
+ def files(self):
+ # type: () -> Dict[str, cgi_FieldStorage]
+ return {
+ key: value
+ for key, value in iteritems(self.request.POST)
+ if getattr(value, "filename", None)
+ }
+
+ def size_of_file(self, postdata):
+ # type: (cgi_FieldStorage) -> int
+ file = postdata.file
+ try:
+ return os.fstat(file.fileno()).st_size
+ except Exception:
+ return 0
+
+
+def _make_event_processor(weak_request, integration):
+ # type: (Callable[[], Request], PyramidIntegration) -> EventProcessor
+ def event_processor(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ request = weak_request()
+ if request is None:
+ return event
+
+ try:
+ if integration.transaction_style == "route_name":
+ event["transaction"] = request.matched_route.name
+ elif integration.transaction_style == "route_pattern":
+ event["transaction"] = request.matched_route.pattern
+ except Exception:
+ pass
+
+ with capture_internal_exceptions():
+ PyramidRequestExtractor(request).extract_into_event(event)
+
+ if _should_send_default_pii():
+ with capture_internal_exceptions():
+ user_info = event.setdefault("user", {})
+ user_info.setdefault("id", authenticated_userid(request))
+
+ return event
+
+ return event_processor
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/redis.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/redis.py
new file mode 100644
index 0000000000..510fdbb22c
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/redis.py
@@ -0,0 +1,70 @@
+from __future__ import absolute_import
+
+from sentry_sdk import Hub
+from sentry_sdk.utils import capture_internal_exceptions
+from sentry_sdk.integrations import Integration
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+
+
+class RedisIntegration(Integration):
+ identifier = "redis"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ import redis
+
+ patch_redis_client(redis.StrictRedis)
+
+ try:
+ import rb.clients # type: ignore
+ except ImportError:
+ pass
+ else:
+ patch_redis_client(rb.clients.FanoutClient)
+ patch_redis_client(rb.clients.MappingClient)
+ patch_redis_client(rb.clients.RoutingClient)
+
+
+def patch_redis_client(cls):
+ # type: (Any) -> None
+ """
+ This function can be used to instrument custom redis client classes or
+ subclasses.
+ """
+
+ old_execute_command = cls.execute_command
+
+ def sentry_patched_execute_command(self, name, *args, **kwargs):
+ # type: (Any, str, *Any, **Any) -> Any
+ hub = Hub.current
+
+ if hub.get_integration(RedisIntegration) is None:
+ return old_execute_command(self, name, *args, **kwargs)
+
+ description = name
+
+ with capture_internal_exceptions():
+ description_parts = [name]
+ for i, arg in enumerate(args):
+ if i > 10:
+ break
+
+ description_parts.append(repr(arg))
+
+ description = " ".join(description_parts)
+
+ with hub.start_span(op="redis", description=description) as span:
+ if name:
+ span.set_tag("redis.command", name)
+
+ if name and args and name.lower() in ("get", "set", "setex", "setnx"):
+ span.set_tag("redis.key", args[0])
+
+ return old_execute_command(self, name, *args, **kwargs)
+
+ cls.execute_command = sentry_patched_execute_command
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/rq.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/rq.py
new file mode 100644
index 0000000000..fbe8cdda3d
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/rq.py
@@ -0,0 +1,150 @@
+from __future__ import absolute_import
+
+import weakref
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.tracing import Span
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+
+
+try:
+ from rq.version import VERSION as RQ_VERSION
+ from rq.timeouts import JobTimeoutException
+ from rq.worker import Worker
+ from rq.queue import Queue
+except ImportError:
+ raise DidNotEnable("RQ not installed")
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Dict
+ from typing import Callable
+
+ from rq.job import Job
+
+ from sentry_sdk.utils import ExcInfo
+ from sentry_sdk._types import EventProcessor
+
+
+class RqIntegration(Integration):
+ identifier = "rq"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+
+ try:
+ version = tuple(map(int, RQ_VERSION.split(".")[:3]))
+ except (ValueError, TypeError):
+ raise DidNotEnable("Unparseable RQ version: {}".format(RQ_VERSION))
+
+ if version < (0, 6):
+ raise DidNotEnable("RQ 0.6 or newer is required.")
+
+ old_perform_job = Worker.perform_job
+
+ def sentry_patched_perform_job(self, job, *args, **kwargs):
+ # type: (Any, Job, *Queue, **Any) -> bool
+ hub = Hub.current
+ integration = hub.get_integration(RqIntegration)
+
+ if integration is None:
+ return old_perform_job(self, job, *args, **kwargs)
+
+ client = hub.client
+ assert client is not None
+
+ with hub.push_scope() as scope:
+ scope.clear_breadcrumbs()
+ scope.add_event_processor(_make_event_processor(weakref.ref(job)))
+
+ span = Span.continue_from_headers(
+ job.meta.get("_sentry_trace_headers") or {}
+ )
+ span.op = "rq.task"
+
+ with capture_internal_exceptions():
+ span.transaction = job.func_name
+
+ with hub.start_span(span):
+ rv = old_perform_job(self, job, *args, **kwargs)
+
+ if self.is_horse:
+ # We're inside of a forked process and RQ is
+ # about to call `os._exit`. Make sure that our
+ # events get sent out.
+ client.flush()
+
+ return rv
+
+ Worker.perform_job = sentry_patched_perform_job
+
+ old_handle_exception = Worker.handle_exception
+
+ def sentry_patched_handle_exception(self, job, *exc_info, **kwargs):
+ # type: (Worker, Any, *Any, **Any) -> Any
+ _capture_exception(exc_info) # type: ignore
+ return old_handle_exception(self, job, *exc_info, **kwargs)
+
+ Worker.handle_exception = sentry_patched_handle_exception
+
+ old_enqueue_job = Queue.enqueue_job
+
+ def sentry_patched_enqueue_job(self, job, **kwargs):
+ # type: (Queue, Any, **Any) -> Any
+ hub = Hub.current
+ if hub.get_integration(RqIntegration) is not None:
+ job.meta["_sentry_trace_headers"] = dict(
+ hub.iter_trace_propagation_headers()
+ )
+
+ return old_enqueue_job(self, job, **kwargs)
+
+ Queue.enqueue_job = sentry_patched_enqueue_job
+
+
+def _make_event_processor(weak_job):
+ # type: (Callable[[], Job]) -> EventProcessor
+ def event_processor(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ job = weak_job()
+ if job is not None:
+ with capture_internal_exceptions():
+ extra = event.setdefault("extra", {})
+ extra["rq-job"] = {
+ "job_id": job.id,
+ "func": job.func_name,
+ "args": job.args,
+ "kwargs": job.kwargs,
+ "description": job.description,
+ }
+
+ if "exc_info" in hint:
+ with capture_internal_exceptions():
+ if issubclass(hint["exc_info"][0], JobTimeoutException):
+ event["fingerprint"] = ["rq", "JobTimeoutException", job.func_name]
+
+ return event
+
+ return event_processor
+
+
+def _capture_exception(exc_info, **kwargs):
+ # type: (ExcInfo, **Any) -> None
+ hub = Hub.current
+ if hub.get_integration(RqIntegration) is None:
+ return
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=client.options,
+ mechanism={"type": "rq", "handled": False},
+ )
+
+ hub.capture_event(event, hint=hint)
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/sanic.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/sanic.py
new file mode 100644
index 0000000000..e8fdca422a
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/sanic.py
@@ -0,0 +1,233 @@
+import sys
+import weakref
+from inspect import isawaitable
+
+from sentry_sdk._compat import urlparse, reraise
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import (
+ capture_internal_exceptions,
+ event_from_exception,
+ HAS_REAL_CONTEXTVARS,
+)
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers
+from sentry_sdk.integrations.logging import ignore_logger
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Callable
+ from typing import Optional
+ from typing import Union
+ from typing import Tuple
+ from typing import Dict
+
+ from sanic.request import Request, RequestParameters
+
+ from sentry_sdk._types import Event, EventProcessor, Hint
+
+try:
+ from sanic import Sanic, __version__ as SANIC_VERSION
+ from sanic.exceptions import SanicException
+ from sanic.router import Router
+ from sanic.handlers import ErrorHandler
+except ImportError:
+ raise DidNotEnable("Sanic not installed")
+
+
+class SanicIntegration(Integration):
+ identifier = "sanic"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ try:
+ version = tuple(map(int, SANIC_VERSION.split(".")))
+ except (TypeError, ValueError):
+ raise DidNotEnable("Unparseable Sanic version: {}".format(SANIC_VERSION))
+
+ if version < (0, 8):
+ raise DidNotEnable("Sanic 0.8 or newer required.")
+
+ if not HAS_REAL_CONTEXTVARS:
+ # We better have contextvars or we're going to leak state between
+ # requests.
+ raise DidNotEnable(
+ "The sanic integration for Sentry requires Python 3.7+ "
+ " or aiocontextvars package"
+ )
+
+ if SANIC_VERSION.startswith("0.8."):
+ # Sanic 0.8 and older creates a logger named "root" and puts a
+ # stringified version of every exception in there (without exc_info),
+ # which our error deduplication can't detect.
+ #
+ # We explicitly check the version here because it is a very
+ # invasive step to ignore this logger and not necessary in newer
+ # versions at all.
+ #
+ # https://github.com/huge-success/sanic/issues/1332
+ ignore_logger("root")
+
+ old_handle_request = Sanic.handle_request
+
+ async def sentry_handle_request(self, request, *args, **kwargs):
+ # type: (Any, Request, *Any, **Any) -> Any
+ hub = Hub.current
+ if hub.get_integration(SanicIntegration) is None:
+ return old_handle_request(self, request, *args, **kwargs)
+
+ weak_request = weakref.ref(request)
+
+ with Hub(hub) as hub:
+ with hub.configure_scope() as scope:
+ scope.clear_breadcrumbs()
+ scope.add_event_processor(_make_request_processor(weak_request))
+
+ response = old_handle_request(self, request, *args, **kwargs)
+ if isawaitable(response):
+ response = await response
+
+ return response
+
+ Sanic.handle_request = sentry_handle_request
+
+ old_router_get = Router.get
+
+ def sentry_router_get(self, request):
+ # type: (Any, Request) -> Any
+ rv = old_router_get(self, request)
+ hub = Hub.current
+ if hub.get_integration(SanicIntegration) is not None:
+ with capture_internal_exceptions():
+ with hub.configure_scope() as scope:
+ scope.transaction = rv[0].__name__
+ return rv
+
+ Router.get = sentry_router_get
+
+ old_error_handler_lookup = ErrorHandler.lookup
+
+ def sentry_error_handler_lookup(self, exception):
+ # type: (Any, Exception) -> Optional[object]
+ _capture_exception(exception)
+ old_error_handler = old_error_handler_lookup(self, exception)
+
+ if old_error_handler is None:
+ return None
+
+ if Hub.current.get_integration(SanicIntegration) is None:
+ return old_error_handler
+
+ async def sentry_wrapped_error_handler(request, exception):
+ # type: (Request, Exception) -> Any
+ try:
+ response = old_error_handler(request, exception)
+ if isawaitable(response):
+ response = await response
+ return response
+ except Exception:
+ # Report errors that occur in Sanic error handler. These
+ # exceptions will not even show up in Sanic's
+ # `sanic.exceptions` logger.
+ exc_info = sys.exc_info()
+ _capture_exception(exc_info)
+ reraise(*exc_info)
+
+ return sentry_wrapped_error_handler
+
+ ErrorHandler.lookup = sentry_error_handler_lookup
+
+
+def _capture_exception(exception):
+ # type: (Union[Tuple[Optional[type], Optional[BaseException], Any], BaseException]) -> None
+ hub = Hub.current
+ integration = hub.get_integration(SanicIntegration)
+ if integration is None:
+ return
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ with capture_internal_exceptions():
+ event, hint = event_from_exception(
+ exception,
+ client_options=client.options,
+ mechanism={"type": "sanic", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+
+def _make_request_processor(weak_request):
+ # type: (Callable[[], Request]) -> EventProcessor
+ def sanic_processor(event, hint):
+ # type: (Event, Optional[Hint]) -> Optional[Event]
+
+ try:
+ if hint and issubclass(hint["exc_info"][0], SanicException):
+ return None
+ except KeyError:
+ pass
+
+ request = weak_request()
+ if request is None:
+ return event
+
+ with capture_internal_exceptions():
+ extractor = SanicRequestExtractor(request)
+ extractor.extract_into_event(event)
+
+ request_info = event["request"]
+ urlparts = urlparse.urlsplit(request.url)
+
+ request_info["url"] = "%s://%s%s" % (
+ urlparts.scheme,
+ urlparts.netloc,
+ urlparts.path,
+ )
+
+ request_info["query_string"] = urlparts.query
+ request_info["method"] = request.method
+ request_info["env"] = {"REMOTE_ADDR": request.remote_addr}
+ request_info["headers"] = _filter_headers(dict(request.headers))
+
+ return event
+
+ return sanic_processor
+
+
+class SanicRequestExtractor(RequestExtractor):
+ def content_length(self):
+ # type: () -> int
+ if self.request.body is None:
+ return 0
+ return len(self.request.body)
+
+ def cookies(self):
+ # type: () -> Dict[str, str]
+ return dict(self.request.cookies)
+
+ def raw_data(self):
+ # type: () -> bytes
+ return self.request.body
+
+ def form(self):
+ # type: () -> RequestParameters
+ return self.request.form
+
+ def is_json(self):
+ # type: () -> bool
+ raise NotImplementedError()
+
+ def json(self):
+ # type: () -> Optional[Any]
+ return self.request.json
+
+ def files(self):
+ # type: () -> RequestParameters
+ return self.request.files
+
+ def size_of_file(self, file):
+ # type: (Any) -> int
+ return len(file.body or ())
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/serverless.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/serverless.py
new file mode 100644
index 0000000000..6dd90b43d0
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/serverless.py
@@ -0,0 +1,87 @@
+import functools
+import sys
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import event_from_exception
+from sentry_sdk._compat import reraise
+
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Callable
+ from typing import TypeVar
+ from typing import Union
+ from typing import Optional
+
+ from typing import overload
+
+ F = TypeVar("F", bound=Callable[..., Any])
+
+else:
+
+ def overload(x):
+ # type: (F) -> F
+ return x
+
+
+@overload
+def serverless_function(f, flush=True):
+ # type: (F, bool) -> F
+ pass
+
+
+@overload # noqa
+def serverless_function(f=None, flush=True):
+ # type: (None, bool) -> Callable[[F], F]
+ pass
+
+
+def serverless_function(f=None, flush=True): # noqa
+ # type: (Optional[F], bool) -> Union[F, Callable[[F], F]]
+ def wrapper(f):
+ # type: (F) -> F
+ @functools.wraps(f)
+ def inner(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ with Hub(Hub.current) as hub:
+ with hub.configure_scope() as scope:
+ scope.clear_breadcrumbs()
+
+ try:
+ return f(*args, **kwargs)
+ except Exception:
+ _capture_and_reraise()
+ finally:
+ if flush:
+ _flush_client()
+
+ return inner # type: ignore
+
+ if f is None:
+ return wrapper
+ else:
+ return wrapper(f)
+
+
+def _capture_and_reraise():
+ # type: () -> None
+ exc_info = sys.exc_info()
+ hub = Hub.current
+ if hub is not None and hub.client is not None:
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=hub.client.options,
+ mechanism={"type": "serverless", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+ reraise(*exc_info)
+
+
+def _flush_client():
+ # type: () -> None
+ hub = Hub.current
+ if hub is not None:
+ hub.flush()
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/spark/__init__.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/spark/__init__.py
new file mode 100644
index 0000000000..10d94163c5
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/spark/__init__.py
@@ -0,0 +1,4 @@
+from sentry_sdk.integrations.spark.spark_driver import SparkIntegration
+from sentry_sdk.integrations.spark.spark_worker import SparkWorkerIntegration
+
+__all__ = ["SparkIntegration", "SparkWorkerIntegration"]
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/spark/spark_driver.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/spark/spark_driver.py
new file mode 100644
index 0000000000..ea43c37821
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/spark/spark_driver.py
@@ -0,0 +1,263 @@
+from sentry_sdk import configure_scope
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.utils import capture_internal_exceptions
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Optional
+
+ from sentry_sdk._types import Event, Hint
+
+
+class SparkIntegration(Integration):
+ identifier = "spark"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ patch_spark_context_init()
+
+
+def _set_app_properties():
+ # type: () -> None
+ """
+ Set properties in driver that propagate to worker processes, allowing for workers to have access to those properties.
+ This allows worker integration to have access to app_name and application_id.
+ """
+ from pyspark import SparkContext
+
+ spark_context = SparkContext._active_spark_context
+ if spark_context:
+ spark_context.setLocalProperty("sentry_app_name", spark_context.appName)
+ spark_context.setLocalProperty(
+ "sentry_application_id", spark_context.applicationId
+ )
+
+
+def _start_sentry_listener(sc):
+ # type: (Any) -> None
+ """
+ Start java gateway server to add custom `SparkListener`
+ """
+ from pyspark.java_gateway import ensure_callback_server_started
+
+ gw = sc._gateway
+ ensure_callback_server_started(gw)
+ listener = SentryListener()
+ sc._jsc.sc().addSparkListener(listener)
+
+
+def patch_spark_context_init():
+ # type: () -> None
+ from pyspark import SparkContext
+
+ spark_context_init = SparkContext._do_init
+
+ def _sentry_patched_spark_context_init(self, *args, **kwargs):
+ # type: (SparkContext, *Any, **Any) -> Optional[Any]
+ init = spark_context_init(self, *args, **kwargs)
+
+ if Hub.current.get_integration(SparkIntegration) is None:
+ return init
+
+ _start_sentry_listener(self)
+ _set_app_properties()
+
+ with configure_scope() as scope:
+
+ @scope.add_event_processor
+ def process_event(event, hint):
+ # type: (Event, Hint) -> Optional[Event]
+ with capture_internal_exceptions():
+ if Hub.current.get_integration(SparkIntegration) is None:
+ return event
+
+ event.setdefault("user", {}).setdefault("id", self.sparkUser())
+
+ event.setdefault("tags", {}).setdefault(
+ "executor.id", self._conf.get("spark.executor.id")
+ )
+ event["tags"].setdefault(
+ "spark-submit.deployMode",
+ self._conf.get("spark.submit.deployMode"),
+ )
+ event["tags"].setdefault(
+ "driver.host", self._conf.get("spark.driver.host")
+ )
+ event["tags"].setdefault(
+ "driver.port", self._conf.get("spark.driver.port")
+ )
+ event["tags"].setdefault("spark_version", self.version)
+ event["tags"].setdefault("app_name", self.appName)
+ event["tags"].setdefault("application_id", self.applicationId)
+ event["tags"].setdefault("master", self.master)
+ event["tags"].setdefault("spark_home", self.sparkHome)
+
+ event.setdefault("extra", {}).setdefault("web_url", self.uiWebUrl)
+
+ return event
+
+ return init
+
+ SparkContext._do_init = _sentry_patched_spark_context_init
+
+
+class SparkListener(object):
+ def onApplicationEnd(self, applicationEnd): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onApplicationStart(self, applicationStart): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onBlockManagerAdded(self, blockManagerAdded): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onBlockManagerRemoved(self, blockManagerRemoved): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onBlockUpdated(self, blockUpdated): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onEnvironmentUpdate(self, environmentUpdate): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onExecutorAdded(self, executorAdded): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onExecutorBlacklisted(self, executorBlacklisted): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onExecutorBlacklistedForStage( # noqa: N802
+ self, executorBlacklistedForStage # noqa: N803
+ ):
+ # type: (Any) -> None
+ pass
+
+ def onExecutorMetricsUpdate(self, executorMetricsUpdate): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onExecutorRemoved(self, executorRemoved): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onJobEnd(self, jobEnd): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onJobStart(self, jobStart): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onNodeBlacklisted(self, nodeBlacklisted): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onNodeBlacklistedForStage(self, nodeBlacklistedForStage): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onNodeUnblacklisted(self, nodeUnblacklisted): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onOtherEvent(self, event): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onSpeculativeTaskSubmitted(self, speculativeTask): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onStageCompleted(self, stageCompleted): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onTaskEnd(self, taskEnd): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onTaskGettingResult(self, taskGettingResult): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onTaskStart(self, taskStart): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onUnpersistRDD(self, unpersistRDD): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ class Java:
+ implements = ["org.apache.spark.scheduler.SparkListenerInterface"]
+
+
+class SentryListener(SparkListener):
+ def __init__(self):
+ # type: () -> None
+ self.hub = Hub.current
+
+ def onJobStart(self, jobStart): # noqa: N802,N803
+ # type: (Any) -> None
+ message = "Job {} Started".format(jobStart.jobId())
+ self.hub.add_breadcrumb(level="info", message=message)
+ _set_app_properties()
+
+ def onJobEnd(self, jobEnd): # noqa: N802,N803
+ # type: (Any) -> None
+ level = ""
+ message = ""
+ data = {"result": jobEnd.jobResult().toString()}
+
+ if jobEnd.jobResult().toString() == "JobSucceeded":
+ level = "info"
+ message = "Job {} Ended".format(jobEnd.jobId())
+ else:
+ level = "warning"
+ message = "Job {} Failed".format(jobEnd.jobId())
+
+ self.hub.add_breadcrumb(level=level, message=message, data=data)
+
+ def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803
+ # type: (Any) -> None
+ stage_info = stageSubmitted.stageInfo()
+ message = "Stage {} Submitted".format(stage_info.stageId())
+ data = {"attemptId": stage_info.attemptId(), "name": stage_info.name()}
+ self.hub.add_breadcrumb(level="info", message=message, data=data)
+ _set_app_properties()
+
+ def onStageCompleted(self, stageCompleted): # noqa: N802,N803
+ # type: (Any) -> None
+ from py4j.protocol import Py4JJavaError # type: ignore
+
+ stage_info = stageCompleted.stageInfo()
+ message = ""
+ level = ""
+ data = {"attemptId": stage_info.attemptId(), "name": stage_info.name()}
+
+ # Have to Try Except because stageInfo.failureReason() is typed with Scala Option
+ try:
+ data["reason"] = stage_info.failureReason().get()
+ message = "Stage {} Failed".format(stage_info.stageId())
+ level = "warning"
+ except Py4JJavaError:
+ message = "Stage {} Completed".format(stage_info.stageId())
+ level = "info"
+
+ self.hub.add_breadcrumb(level=level, message=message, data=data)
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/spark/spark_worker.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/spark/spark_worker.py
new file mode 100644
index 0000000000..bae4413d11
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/spark/spark_worker.py
@@ -0,0 +1,120 @@
+from __future__ import absolute_import
+
+import sys
+
+from sentry_sdk import configure_scope
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.utils import (
+ capture_internal_exceptions,
+ exc_info_from_error,
+ single_exception_from_error_tuple,
+ walk_exception_chain,
+ event_hint_with_exc_info,
+)
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Optional
+
+ from sentry_sdk._types import ExcInfo, Event, Hint
+
+
+class SparkWorkerIntegration(Integration):
+ identifier = "spark_worker"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ import pyspark.daemon as original_daemon
+
+ original_daemon.worker_main = _sentry_worker_main
+
+
+def _capture_exception(exc_info, hub):
+ # type: (ExcInfo, Hub) -> None
+ client = hub.client
+
+ client_options = client.options # type: ignore
+
+ mechanism = {"type": "spark", "handled": False}
+
+ exc_info = exc_info_from_error(exc_info)
+
+ exc_type, exc_value, tb = exc_info
+ rv = []
+
+ # On Exception worker will call sys.exit(-1), so we can ignore SystemExit and similar errors
+ for exc_type, exc_value, tb in walk_exception_chain(exc_info):
+ if exc_type not in (SystemExit, EOFError, ConnectionResetError):
+ rv.append(
+ single_exception_from_error_tuple(
+ exc_type, exc_value, tb, client_options, mechanism
+ )
+ )
+
+ if rv:
+ rv.reverse()
+ hint = event_hint_with_exc_info(exc_info)
+ event = {"level": "error", "exception": {"values": rv}}
+
+ _tag_task_context()
+
+ hub.capture_event(event, hint=hint)
+
+
+def _tag_task_context():
+ # type: () -> None
+ from pyspark.taskcontext import TaskContext
+
+ with configure_scope() as scope:
+
+ @scope.add_event_processor
+ def process_event(event, hint):
+ # type: (Event, Hint) -> Optional[Event]
+ with capture_internal_exceptions():
+ integration = Hub.current.get_integration(SparkWorkerIntegration)
+ task_context = TaskContext.get()
+
+ if integration is None or task_context is None:
+ return event
+
+ event.setdefault("tags", {}).setdefault(
+ "stageId", task_context.stageId()
+ )
+ event["tags"].setdefault("partitionId", task_context.partitionId())
+ event["tags"].setdefault("attemptNumber", task_context.attemptNumber())
+ event["tags"].setdefault("taskAttemptId", task_context.taskAttemptId())
+
+ if task_context._localProperties:
+ if "sentry_app_name" in task_context._localProperties:
+ event["tags"].setdefault(
+ "app_name", task_context._localProperties["sentry_app_name"]
+ )
+ event["tags"].setdefault(
+ "application_id",
+ task_context._localProperties["sentry_application_id"],
+ )
+
+ if "callSite.short" in task_context._localProperties:
+ event.setdefault("extra", {}).setdefault(
+ "callSite", task_context._localProperties["callSite.short"]
+ )
+
+ return event
+
+
+def _sentry_worker_main(*args, **kwargs):
+ # type: (*Optional[Any], **Optional[Any]) -> None
+ import pyspark.worker as original_worker
+
+ try:
+ original_worker.main(*args, **kwargs)
+ except SystemExit:
+ if Hub.current.get_integration(SparkWorkerIntegration) is not None:
+ hub = Hub.current
+ exc_info = sys.exc_info()
+ with capture_internal_exceptions():
+ _capture_exception(exc_info, hub)
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/sqlalchemy.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/sqlalchemy.py
new file mode 100644
index 0000000000..f24d2f20bf
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/sqlalchemy.py
@@ -0,0 +1,86 @@
+from __future__ import absolute_import
+
+from sentry_sdk._types import MYPY
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.tracing import record_sql_queries
+
+try:
+ from sqlalchemy.engine import Engine # type: ignore
+ from sqlalchemy.event import listen # type: ignore
+ from sqlalchemy import __version__ as SQLALCHEMY_VERSION # type: ignore
+except ImportError:
+ raise DidNotEnable("SQLAlchemy not installed.")
+
+if MYPY:
+ from typing import Any
+ from typing import ContextManager
+ from typing import Optional
+
+ from sentry_sdk.tracing import Span
+
+
+class SqlalchemyIntegration(Integration):
+ identifier = "sqlalchemy"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+
+ try:
+ version = tuple(map(int, SQLALCHEMY_VERSION.split("b")[0].split(".")))
+ except (TypeError, ValueError):
+ raise DidNotEnable(
+ "Unparseable SQLAlchemy version: {}".format(SQLALCHEMY_VERSION)
+ )
+
+ if version < (1, 2):
+ raise DidNotEnable("SQLAlchemy 1.2 or newer required.")
+
+ listen(Engine, "before_cursor_execute", _before_cursor_execute)
+ listen(Engine, "after_cursor_execute", _after_cursor_execute)
+ listen(Engine, "handle_error", _handle_error)
+
+
+def _before_cursor_execute(
+ conn, cursor, statement, parameters, context, executemany, *args
+):
+ # type: (Any, Any, Any, Any, Any, bool, *Any) -> None
+ hub = Hub.current
+ if hub.get_integration(SqlalchemyIntegration) is None:
+ return
+
+ ctx_mgr = record_sql_queries(
+ hub,
+ cursor,
+ statement,
+ parameters,
+ paramstyle=context and context.dialect and context.dialect.paramstyle or None,
+ executemany=executemany,
+ )
+ conn._sentry_sql_span_manager = ctx_mgr
+
+ span = ctx_mgr.__enter__()
+
+ if span is not None:
+ conn._sentry_sql_span = span
+
+
+def _after_cursor_execute(conn, cursor, statement, *args):
+ # type: (Any, Any, Any, *Any) -> None
+ ctx_mgr = getattr(
+ conn, "_sentry_sql_span_manager", None
+ ) # type: ContextManager[Any]
+
+ if ctx_mgr is not None:
+ conn._sentry_sql_span_manager = None
+ ctx_mgr.__exit__(None, None, None)
+
+
+def _handle_error(context, *args):
+ # type: (Any, *Any) -> None
+ conn = context.connection
+ span = getattr(conn, "_sentry_sql_span", None) # type: Optional[Span]
+
+ if span is not None:
+ span.set_status("internal_error")
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/stdlib.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/stdlib.py
new file mode 100644
index 0000000000..56cece70ac
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/stdlib.py
@@ -0,0 +1,230 @@
+import os
+import subprocess
+import sys
+import platform
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.tracing import EnvironHeaders
+from sentry_sdk.utils import capture_internal_exceptions, safe_repr
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Callable
+ from typing import Dict
+ from typing import Optional
+ from typing import List
+
+ from sentry_sdk._types import Event, Hint
+
+
+try:
+ from httplib import HTTPConnection # type: ignore
+except ImportError:
+ from http.client import HTTPConnection
+
+
+_RUNTIME_CONTEXT = {
+ "name": platform.python_implementation(),
+ "version": "%s.%s.%s" % (sys.version_info[:3]),
+ "build": sys.version,
+}
+
+
+class StdlibIntegration(Integration):
+ identifier = "stdlib"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ _install_httplib()
+ _install_subprocess()
+
+ @add_global_event_processor
+ def add_python_runtime_context(event, hint):
+ # type: (Event, Hint) -> Optional[Event]
+ if Hub.current.get_integration(StdlibIntegration) is not None:
+ contexts = event.setdefault("contexts", {})
+ if isinstance(contexts, dict) and "runtime" not in contexts:
+ contexts["runtime"] = _RUNTIME_CONTEXT
+
+ return event
+
+
+def _install_httplib():
+ # type: () -> None
+ real_putrequest = HTTPConnection.putrequest
+ real_getresponse = HTTPConnection.getresponse
+
+ def putrequest(self, method, url, *args, **kwargs):
+ # type: (HTTPConnection, str, str, *Any, **Any) -> Any
+ hub = Hub.current
+ if hub.get_integration(StdlibIntegration) is None:
+ return real_putrequest(self, method, url, *args, **kwargs)
+
+ host = self.host
+ port = self.port
+ default_port = self.default_port
+
+ real_url = url
+ if not real_url.startswith(("http://", "https://")):
+ real_url = "%s://%s%s%s" % (
+ default_port == 443 and "https" or "http",
+ host,
+ port != default_port and ":%s" % port or "",
+ url,
+ )
+
+ span = hub.start_span(op="http", description="%s %s" % (method, real_url))
+
+ span.set_data("method", method)
+ span.set_data("url", real_url)
+
+ rv = real_putrequest(self, method, url, *args, **kwargs)
+
+ for key, value in hub.iter_trace_propagation_headers():
+ self.putheader(key, value)
+
+ self._sentrysdk_span = span
+
+ return rv
+
+ def getresponse(self, *args, **kwargs):
+ # type: (HTTPConnection, *Any, **Any) -> Any
+ span = getattr(self, "_sentrysdk_span", None)
+
+ if span is None:
+ return real_getresponse(self, *args, **kwargs)
+
+ rv = real_getresponse(self, *args, **kwargs)
+
+ span.set_data("status_code", rv.status)
+ span.set_http_status(int(rv.status))
+ span.set_data("reason", rv.reason)
+ span.finish()
+
+ return rv
+
+ HTTPConnection.putrequest = putrequest
+ HTTPConnection.getresponse = getresponse
+
+
+def _init_argument(args, kwargs, name, position, setdefault_callback=None):
+ # type: (List[Any], Dict[Any, Any], str, int, Optional[Callable[[Any], Any]]) -> Any
+ """
+ given (*args, **kwargs) of a function call, retrieve (and optionally set a
+ default for) an argument by either name or position.
+
+ This is useful for wrapping functions with complex type signatures and
+ extracting a few arguments without needing to redefine that function's
+ entire type signature.
+ """
+
+ if name in kwargs:
+ rv = kwargs[name]
+ if setdefault_callback is not None:
+ rv = setdefault_callback(rv)
+ if rv is not None:
+ kwargs[name] = rv
+ elif position < len(args):
+ rv = args[position]
+ if setdefault_callback is not None:
+ rv = setdefault_callback(rv)
+ if rv is not None:
+ args[position] = rv
+ else:
+ rv = setdefault_callback and setdefault_callback(None)
+ if rv is not None:
+ kwargs[name] = rv
+
+ return rv
+
+
+def _install_subprocess():
+ # type: () -> None
+ old_popen_init = subprocess.Popen.__init__
+
+ def sentry_patched_popen_init(self, *a, **kw):
+ # type: (subprocess.Popen[Any], *Any, **Any) -> None
+
+ hub = Hub.current
+ if hub.get_integration(StdlibIntegration) is None:
+ return old_popen_init(self, *a, **kw) # type: ignore
+
+ # Convert from tuple to list to be able to set values.
+ a = list(a)
+
+ args = _init_argument(a, kw, "args", 0) or []
+ cwd = _init_argument(a, kw, "cwd", 9)
+
+ # if args is not a list or tuple (and e.g. some iterator instead),
+ # let's not use it at all. There are too many things that can go wrong
+ # when trying to collect an iterator into a list and setting that list
+ # into `a` again.
+ #
+ # Also invocations where `args` is not a sequence are not actually
+ # legal. They just happen to work under CPython.
+ description = None
+
+ if isinstance(args, (list, tuple)) and len(args) < 100:
+ with capture_internal_exceptions():
+ description = " ".join(map(str, args))
+
+ if description is None:
+ description = safe_repr(args)
+
+ env = None
+
+ for k, v in hub.iter_trace_propagation_headers():
+ if env is None:
+ env = _init_argument(a, kw, "env", 10, lambda x: dict(x or os.environ))
+ env["SUBPROCESS_" + k.upper().replace("-", "_")] = v
+
+ with hub.start_span(op="subprocess", description=description) as span:
+ if cwd:
+ span.set_data("subprocess.cwd", cwd)
+
+ rv = old_popen_init(self, *a, **kw) # type: ignore
+
+ span.set_tag("subprocess.pid", self.pid)
+ return rv
+
+ subprocess.Popen.__init__ = sentry_patched_popen_init # type: ignore
+
+ old_popen_wait = subprocess.Popen.wait
+
+ def sentry_patched_popen_wait(self, *a, **kw):
+ # type: (subprocess.Popen[Any], *Any, **Any) -> Any
+ hub = Hub.current
+
+ if hub.get_integration(StdlibIntegration) is None:
+ return old_popen_wait(self, *a, **kw)
+
+ with hub.start_span(op="subprocess.wait") as span:
+ span.set_tag("subprocess.pid", self.pid)
+ return old_popen_wait(self, *a, **kw)
+
+ subprocess.Popen.wait = sentry_patched_popen_wait # type: ignore
+
+ old_popen_communicate = subprocess.Popen.communicate
+
+ def sentry_patched_popen_communicate(self, *a, **kw):
+ # type: (subprocess.Popen[Any], *Any, **Any) -> Any
+ hub = Hub.current
+
+ if hub.get_integration(StdlibIntegration) is None:
+ return old_popen_communicate(self, *a, **kw)
+
+ with hub.start_span(op="subprocess.communicate") as span:
+ span.set_tag("subprocess.pid", self.pid)
+ return old_popen_communicate(self, *a, **kw)
+
+ subprocess.Popen.communicate = sentry_patched_popen_communicate # type: ignore
+
+
+def get_subprocess_traceparent_headers():
+ # type: () -> EnvironHeaders
+ return EnvironHeaders(os.environ, prefix="SUBPROCESS_")
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/threading.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/threading.py
new file mode 100644
index 0000000000..b750257e2a
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/threading.py
@@ -0,0 +1,90 @@
+from __future__ import absolute_import
+
+import sys
+from threading import Thread, current_thread
+
+from sentry_sdk import Hub
+from sentry_sdk._compat import reraise
+from sentry_sdk._types import MYPY
+from sentry_sdk.integrations import Integration
+from sentry_sdk.utils import event_from_exception, capture_internal_exceptions
+
+if MYPY:
+ from typing import Any
+ from typing import TypeVar
+ from typing import Callable
+ from typing import Optional
+
+ from sentry_sdk._types import ExcInfo
+
+ F = TypeVar("F", bound=Callable[..., Any])
+
+
+class ThreadingIntegration(Integration):
+ identifier = "threading"
+
+ def __init__(self, propagate_hub=False):
+ # type: (bool) -> None
+ self.propagate_hub = propagate_hub
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ old_start = Thread.start
+
+ def sentry_start(self, *a, **kw):
+ # type: (Thread, *Any, **Any) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(ThreadingIntegration)
+ if integration is not None:
+ if not integration.propagate_hub:
+ hub_ = None
+ else:
+ hub_ = Hub(hub)
+ # Patching instance methods in `start()` creates a reference cycle if
+ # done in a naive way. See
+ # https://github.com/getsentry/sentry-python/pull/434
+ #
+ # In threading module, using current_thread API will access current thread instance
+ # without holding it to avoid a reference cycle in an easier way.
+ with capture_internal_exceptions():
+ new_run = _wrap_run(hub_, getattr(self.run, "__func__", self.run))
+ self.run = new_run # type: ignore
+
+ return old_start(self, *a, **kw) # type: ignore
+
+ Thread.start = sentry_start # type: ignore
+
+
+def _wrap_run(parent_hub, old_run_func):
+ # type: (Optional[Hub], F) -> F
+ def run(*a, **kw):
+ # type: (*Any, **Any) -> Any
+ hub = parent_hub or Hub.current
+ with hub:
+ try:
+ self = current_thread()
+ return old_run_func(self, *a, **kw)
+ except Exception:
+ reraise(*_capture_exception())
+
+ return run # type: ignore
+
+
+def _capture_exception():
+ # type: () -> ExcInfo
+ hub = Hub.current
+ exc_info = sys.exc_info()
+
+ if hub.get_integration(ThreadingIntegration) is not None:
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=client.options,
+ mechanism={"type": "threading", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+ return exc_info
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/tornado.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/tornado.py
new file mode 100644
index 0000000000..d3ae065690
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/tornado.py
@@ -0,0 +1,203 @@
+import weakref
+from inspect import iscoroutinefunction
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.utils import (
+ HAS_REAL_CONTEXTVARS,
+ event_from_exception,
+ capture_internal_exceptions,
+ transaction_from_function,
+)
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations._wsgi_common import (
+ RequestExtractor,
+ _filter_headers,
+ _is_json_content_type,
+)
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk._compat import iteritems
+
+try:
+ from tornado import version_info as TORNADO_VERSION # type: ignore
+ from tornado.web import RequestHandler, HTTPError
+ from tornado.gen import coroutine
+except ImportError:
+ raise DidNotEnable("Tornado not installed")
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Optional
+ from typing import Dict
+ from typing import Callable
+
+ from sentry_sdk._types import EventProcessor
+
+
+class TornadoIntegration(Integration):
+ identifier = "tornado"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ if TORNADO_VERSION < (5, 0):
+ raise DidNotEnable("Tornado 5+ required")
+
+ if not HAS_REAL_CONTEXTVARS:
+ # Tornado is async. We better have contextvars or we're going to leak
+ # state between requests.
+ raise DidNotEnable(
+ "The tornado integration for Sentry requires Python 3.6+ or the aiocontextvars package"
+ )
+
+ ignore_logger("tornado.access")
+
+ old_execute = RequestHandler._execute # type: ignore
+
+ awaitable = iscoroutinefunction(old_execute)
+
+ if awaitable:
+ # Starting Tornado 6 RequestHandler._execute method is a standard Python coroutine (async/await)
+ # In that case our method should be a coroutine function too
+ async def sentry_execute_request_handler(self, *args, **kwargs):
+ # type: (Any, *Any, **Any) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(TornadoIntegration)
+ if integration is None:
+ return await old_execute(self, *args, **kwargs)
+
+ weak_handler = weakref.ref(self)
+
+ with Hub(hub) as hub:
+ with hub.configure_scope() as scope:
+ scope.clear_breadcrumbs()
+ processor = _make_event_processor(weak_handler) # type: ignore
+ scope.add_event_processor(processor)
+ return await old_execute(self, *args, **kwargs)
+
+ else:
+
+ @coroutine # type: ignore
+ def sentry_execute_request_handler(self, *args, **kwargs):
+ # type: (RequestHandler, *Any, **Any) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(TornadoIntegration)
+ if integration is None:
+ return old_execute(self, *args, **kwargs)
+
+ weak_handler = weakref.ref(self)
+
+ with Hub(hub) as hub:
+ with hub.configure_scope() as scope:
+ scope.clear_breadcrumbs()
+ processor = _make_event_processor(weak_handler) # type: ignore
+ scope.add_event_processor(processor)
+ result = yield from old_execute(self, *args, **kwargs)
+ return result
+
+ RequestHandler._execute = sentry_execute_request_handler # type: ignore
+
+ old_log_exception = RequestHandler.log_exception
+
+ def sentry_log_exception(self, ty, value, tb, *args, **kwargs):
+ # type: (Any, type, BaseException, Any, *Any, **Any) -> Optional[Any]
+ _capture_exception(ty, value, tb)
+ return old_log_exception(self, ty, value, tb, *args, **kwargs) # type: ignore
+
+ RequestHandler.log_exception = sentry_log_exception # type: ignore
+
+
+def _capture_exception(ty, value, tb):
+ # type: (type, BaseException, Any) -> None
+ hub = Hub.current
+ if hub.get_integration(TornadoIntegration) is None:
+ return
+ if isinstance(value, HTTPError):
+ return
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ event, hint = event_from_exception(
+ (ty, value, tb),
+ client_options=client.options,
+ mechanism={"type": "tornado", "handled": False},
+ )
+
+ hub.capture_event(event, hint=hint)
+
+
+def _make_event_processor(weak_handler):
+ # type: (Callable[[], RequestHandler]) -> EventProcessor
+ def tornado_processor(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ handler = weak_handler()
+ if handler is None:
+ return event
+
+ request = handler.request
+
+ with capture_internal_exceptions():
+ method = getattr(handler, handler.request.method.lower())
+ event["transaction"] = transaction_from_function(method)
+
+ with capture_internal_exceptions():
+ extractor = TornadoRequestExtractor(request)
+ extractor.extract_into_event(event)
+
+ request_info = event["request"]
+
+ request_info["url"] = "%s://%s%s" % (
+ request.protocol,
+ request.host,
+ request.path,
+ )
+
+ request_info["query_string"] = request.query
+ request_info["method"] = request.method
+ request_info["env"] = {"REMOTE_ADDR": request.remote_ip}
+ request_info["headers"] = _filter_headers(dict(request.headers))
+
+ with capture_internal_exceptions():
+ if handler.current_user and _should_send_default_pii():
+ event.setdefault("user", {}).setdefault("is_authenticated", True)
+
+ return event
+
+ return tornado_processor
+
+
+class TornadoRequestExtractor(RequestExtractor):
+ def content_length(self):
+ # type: () -> int
+ if self.request.body is None:
+ return 0
+ return len(self.request.body)
+
+ def cookies(self):
+ # type: () -> Dict[str, str]
+ return {k: v.value for k, v in iteritems(self.request.cookies)}
+
+ def raw_data(self):
+ # type: () -> bytes
+ return self.request.body
+
+ def form(self):
+ # type: () -> Dict[str, Any]
+ return {
+ k: [v.decode("latin1", "replace") for v in vs]
+ for k, vs in iteritems(self.request.body_arguments)
+ }
+
+ def is_json(self):
+ # type: () -> bool
+ return _is_json_content_type(self.request.headers.get("content-type"))
+
+ def files(self):
+ # type: () -> Dict[str, Any]
+ return {k: v[0] for k, v in iteritems(self.request.files) if v}
+
+ def size_of_file(self, file):
+ # type: (Any) -> int
+ return len(file.body or ())
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/trytond.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/trytond.py
new file mode 100644
index 0000000000..062a756993
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/trytond.py
@@ -0,0 +1,55 @@
+import sentry_sdk.hub
+import sentry_sdk.utils
+import sentry_sdk.integrations
+import sentry_sdk.integrations.wsgi
+from sentry_sdk._types import MYPY
+
+from trytond.exceptions import TrytonException # type: ignore
+from trytond.wsgi import app # type: ignore
+
+if MYPY:
+ from typing import Any
+
+
+# TODO: trytond-worker, trytond-cron and trytond-admin intergations
+
+
+class TrytondWSGIIntegration(sentry_sdk.integrations.Integration):
+ identifier = "trytond_wsgi"
+
+ def __init__(self): # type: () -> None
+ pass
+
+ @staticmethod
+ def setup_once(): # type: () -> None
+
+ app.wsgi_app = sentry_sdk.integrations.wsgi.SentryWsgiMiddleware(app.wsgi_app)
+
+ def error_handler(e): # type: (Exception) -> None
+ hub = sentry_sdk.hub.Hub.current
+
+ if hub.get_integration(TrytondWSGIIntegration) is None:
+ return
+ elif isinstance(e, TrytonException):
+ return
+ else:
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+ event, hint = sentry_sdk.utils.event_from_exception(
+ e,
+ client_options=client.options,
+ mechanism={"type": "trytond", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+ # Expected error handlers signature was changed
+ # when the error_handler decorator was introduced
+ # in Tryton-5.4
+ if hasattr(app, "error_handler"):
+
+ @app.error_handler
+ def _(app, request, e): # type: ignore
+ error_handler(e)
+
+ else:
+ app.error_handlers.append(error_handler)
diff --git a/third_party/python/sentry-sdk/sentry_sdk/integrations/wsgi.py b/third_party/python/sentry-sdk/sentry_sdk/integrations/wsgi.py
new file mode 100644
index 0000000000..22982d8bb1
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/integrations/wsgi.py
@@ -0,0 +1,309 @@
+import functools
+import sys
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.utils import (
+ ContextVar,
+ capture_internal_exceptions,
+ event_from_exception,
+)
+from sentry_sdk._compat import PY2, reraise, iteritems
+from sentry_sdk.tracing import Span
+from sentry_sdk.sessions import auto_session_tracking
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Callable
+ from typing import Dict
+ from typing import Iterator
+ from typing import Any
+ from typing import Tuple
+ from typing import Optional
+ from typing import TypeVar
+ from typing import Protocol
+
+ from sentry_sdk.utils import ExcInfo
+ from sentry_sdk._types import EventProcessor
+
+ WsgiResponseIter = TypeVar("WsgiResponseIter")
+ WsgiResponseHeaders = TypeVar("WsgiResponseHeaders")
+ WsgiExcInfo = TypeVar("WsgiExcInfo")
+
+ class StartResponse(Protocol):
+ def __call__(self, status, response_headers, exc_info=None):
+ # type: (str, WsgiResponseHeaders, Optional[WsgiExcInfo]) -> WsgiResponseIter
+ pass
+
+
+_wsgi_middleware_applied = ContextVar("sentry_wsgi_middleware_applied")
+
+
+if PY2:
+
+ def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
+ # type: (str, str, str) -> str
+ return s.decode(charset, errors)
+
+
+else:
+
+ def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
+ # type: (str, str, str) -> str
+ return s.encode("latin1").decode(charset, errors)
+
+
+def get_host(environ):
+ # type: (Dict[str, str]) -> str
+ """Return the host for the given WSGI environment. Yanked from Werkzeug."""
+ if environ.get("HTTP_HOST"):
+ rv = environ["HTTP_HOST"]
+ if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
+ rv = rv[:-3]
+ elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
+ rv = rv[:-4]
+ elif environ.get("SERVER_NAME"):
+ rv = environ["SERVER_NAME"]
+ if (environ["wsgi.url_scheme"], environ["SERVER_PORT"]) not in (
+ ("https", "443"),
+ ("http", "80"),
+ ):
+ rv += ":" + environ["SERVER_PORT"]
+ else:
+ # In spite of the WSGI spec, SERVER_NAME might not be present.
+ rv = "unknown"
+
+ return rv
+
+
+def get_request_url(environ):
+ # type: (Dict[str, str]) -> str
+ """Return the absolute URL without query string for the given WSGI
+ environment."""
+ return "%s://%s/%s" % (
+ environ.get("wsgi.url_scheme"),
+ get_host(environ),
+ wsgi_decoding_dance(environ.get("PATH_INFO") or "").lstrip("/"),
+ )
+
+
+class SentryWsgiMiddleware(object):
+ __slots__ = ("app",)
+
+ def __init__(self, app):
+ # type: (Callable[[Dict[str, str], Callable[..., Any]], Any]) -> None
+ self.app = app
+
+ def __call__(self, environ, start_response):
+ # type: (Dict[str, str], Callable[..., Any]) -> _ScopedResponse
+ if _wsgi_middleware_applied.get(False):
+ return self.app(environ, start_response)
+
+ _wsgi_middleware_applied.set(True)
+ try:
+ hub = Hub(Hub.current)
+ with auto_session_tracking(hub):
+ with hub:
+ with capture_internal_exceptions():
+ with hub.configure_scope() as scope:
+ scope.clear_breadcrumbs()
+ scope._name = "wsgi"
+ scope.add_event_processor(
+ _make_wsgi_event_processor(environ)
+ )
+
+ span = Span.continue_from_environ(environ)
+ span.op = "http.server"
+ span.transaction = "generic WSGI request"
+
+ with hub.start_span(span) as span:
+ try:
+ rv = self.app(
+ environ,
+ functools.partial(
+ _sentry_start_response, start_response, span
+ ),
+ )
+ except BaseException:
+ reraise(*_capture_exception(hub))
+ finally:
+ _wsgi_middleware_applied.set(False)
+
+ return _ScopedResponse(hub, rv)
+
+
+def _sentry_start_response(
+ old_start_response, # type: StartResponse
+ span, # type: Span
+ status, # type: str
+ response_headers, # type: WsgiResponseHeaders
+ exc_info=None, # type: Optional[WsgiExcInfo]
+):
+ # type: (...) -> WsgiResponseIter
+ with capture_internal_exceptions():
+ status_int = int(status.split(" ", 1)[0])
+ span.set_http_status(status_int)
+
+ if exc_info is None:
+ # The Django Rest Framework WSGI test client, and likely other
+ # (incorrect) implementations, cannot deal with the exc_info argument
+ # if one is present. Avoid providing a third argument if not necessary.
+ return old_start_response(status, response_headers)
+ else:
+ return old_start_response(status, response_headers, exc_info)
+
+
+def _get_environ(environ):
+ # type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
+ """
+ Returns our whitelisted environment variables.
+ """
+ keys = ["SERVER_NAME", "SERVER_PORT"]
+ if _should_send_default_pii():
+ # make debugging of proxy setup easier. Proxy headers are
+ # in headers.
+ keys += ["REMOTE_ADDR"]
+
+ for key in keys:
+ if key in environ:
+ yield key, environ[key]
+
+
+# `get_headers` comes from `werkzeug.datastructures.EnvironHeaders`
+#
+# We need this function because Django does not give us a "pure" http header
+# dict. So we might as well use it for all WSGI integrations.
+def _get_headers(environ):
+ # type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
+ """
+ Returns only proper HTTP headers.
+
+ """
+ for key, value in iteritems(environ):
+ key = str(key)
+ if key.startswith("HTTP_") and key not in (
+ "HTTP_CONTENT_TYPE",
+ "HTTP_CONTENT_LENGTH",
+ ):
+ yield key[5:].replace("_", "-").title(), value
+ elif key in ("CONTENT_TYPE", "CONTENT_LENGTH"):
+ yield key.replace("_", "-").title(), value
+
+
+def get_client_ip(environ):
+ # type: (Dict[str, str]) -> Optional[Any]
+ """
+ Infer the user IP address from various headers. This cannot be used in
+ security sensitive situations since the value may be forged from a client,
+ but it's good enough for the event payload.
+ """
+ try:
+ return environ["HTTP_X_FORWARDED_FOR"].split(",")[0].strip()
+ except (KeyError, IndexError):
+ pass
+
+ try:
+ return environ["HTTP_X_REAL_IP"]
+ except KeyError:
+ pass
+
+ return environ.get("REMOTE_ADDR")
+
+
+def _capture_exception(hub):
+ # type: (Hub) -> ExcInfo
+ exc_info = sys.exc_info()
+
+ # Check client here as it might have been unset while streaming response
+ if hub.client is not None:
+ e = exc_info[1]
+
+ # SystemExit(0) is the only uncaught exception that is expected behavior
+ should_skip_capture = isinstance(e, SystemExit) and e.code in (0, None)
+ if not should_skip_capture:
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=hub.client.options,
+ mechanism={"type": "wsgi", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+ return exc_info
+
+
+class _ScopedResponse(object):
+ __slots__ = ("_response", "_hub")
+
+ def __init__(self, hub, response):
+ # type: (Hub, Iterator[bytes]) -> None
+ self._hub = hub
+ self._response = response
+
+ def __iter__(self):
+ # type: () -> Iterator[bytes]
+ iterator = iter(self._response)
+
+ while True:
+ with self._hub:
+ try:
+ chunk = next(iterator)
+ except StopIteration:
+ break
+ except BaseException:
+ reraise(*_capture_exception(self._hub))
+
+ yield chunk
+
+ def close(self):
+ # type: () -> None
+ with self._hub:
+ try:
+ self._response.close() # type: ignore
+ except AttributeError:
+ pass
+ except BaseException:
+ reraise(*_capture_exception(self._hub))
+
+
+def _make_wsgi_event_processor(environ):
+ # type: (Dict[str, str]) -> EventProcessor
+ # It's a bit unfortunate that we have to extract and parse the request data
+ # from the environ so eagerly, but there are a few good reasons for this.
+ #
+ # We might be in a situation where the scope/hub never gets torn down
+ # properly. In that case we will have an unnecessary strong reference to
+ # all objects in the environ (some of which may take a lot of memory) when
+ # we're really just interested in a few of them.
+ #
+ # Keeping the environment around for longer than the request lifecycle is
+ # also not necessarily something uWSGI can deal with:
+ # https://github.com/unbit/uwsgi/issues/1950
+
+ client_ip = get_client_ip(environ)
+ request_url = get_request_url(environ)
+ query_string = environ.get("QUERY_STRING")
+ method = environ.get("REQUEST_METHOD")
+ env = dict(_get_environ(environ))
+ headers = _filter_headers(dict(_get_headers(environ)))
+
+ def event_processor(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ with capture_internal_exceptions():
+ # if the code below fails halfway through we at least have some data
+ request_info = event.setdefault("request", {})
+
+ if _should_send_default_pii():
+ user_info = event.setdefault("user", {})
+ if client_ip:
+ user_info.setdefault("ip_address", client_ip)
+
+ request_info["url"] = request_url
+ request_info["query_string"] = query_string
+ request_info["method"] = method
+ request_info["env"] = env
+ request_info["headers"] = headers
+
+ return event
+
+ return event_processor
diff --git a/third_party/python/sentry-sdk/sentry_sdk/py.typed b/third_party/python/sentry-sdk/sentry_sdk/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/py.typed
diff --git a/third_party/python/sentry-sdk/sentry_sdk/scope.py b/third_party/python/sentry-sdk/sentry_sdk/scope.py
new file mode 100644
index 0000000000..407af3a2cb
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/scope.py
@@ -0,0 +1,408 @@
+from copy import copy
+from collections import deque
+from functools import wraps
+from itertools import chain
+
+from sentry_sdk.utils import logger, capture_internal_exceptions
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Dict
+ from typing import Optional
+ from typing import Deque
+ from typing import List
+ from typing import Callable
+ from typing import TypeVar
+
+ from sentry_sdk._types import (
+ Breadcrumb,
+ Event,
+ EventProcessor,
+ ErrorProcessor,
+ ExcInfo,
+ Hint,
+ Type,
+ )
+
+ from sentry_sdk.tracing import Span
+ from sentry_sdk.sessions import Session
+
+ F = TypeVar("F", bound=Callable[..., Any])
+ T = TypeVar("T")
+
+
+global_event_processors = [] # type: List[EventProcessor]
+
+
+def add_global_event_processor(processor):
+ # type: (EventProcessor) -> None
+ global_event_processors.append(processor)
+
+
+def _attr_setter(fn):
+ # type: (Any) -> Any
+ return property(fset=fn, doc=fn.__doc__)
+
+
+def _disable_capture(fn):
+ # type: (F) -> F
+ @wraps(fn)
+ def wrapper(self, *args, **kwargs):
+ # type: (Any, *Dict[str, Any], **Any) -> Any
+ if not self._should_capture:
+ return
+ try:
+ self._should_capture = False
+ return fn(self, *args, **kwargs)
+ finally:
+ self._should_capture = True
+
+ return wrapper # type: ignore
+
+
+class Scope(object):
+ """The scope holds extra information that should be sent with all
+ events that belong to it.
+ """
+
+ # NOTE: Even though it should not happen, the scope needs to not crash when
+ # accessed by multiple threads. It's fine if it's full of races, but those
+ # races should never make the user application crash.
+ #
+ # The same needs to hold for any accesses of the scope the SDK makes.
+
+ __slots__ = (
+ "_level",
+ "_name",
+ "_fingerprint",
+ "_transaction",
+ "_user",
+ "_tags",
+ "_contexts",
+ "_extras",
+ "_breadcrumbs",
+ "_event_processors",
+ "_error_processors",
+ "_should_capture",
+ "_span",
+ "_session",
+ "_force_auto_session_tracking",
+ )
+
+ def __init__(self):
+ # type: () -> None
+ self._event_processors = [] # type: List[EventProcessor]
+ self._error_processors = [] # type: List[ErrorProcessor]
+
+ self._name = None # type: Optional[str]
+ self.clear()
+
+ def clear(self):
+ # type: () -> None
+ """Clears the entire scope."""
+ self._level = None # type: Optional[str]
+ self._fingerprint = None # type: Optional[List[str]]
+ self._transaction = None # type: Optional[str]
+ self._user = None # type: Optional[Dict[str, Any]]
+
+ self._tags = {} # type: Dict[str, Any]
+ self._contexts = {} # type: Dict[str, Dict[str, Any]]
+ self._extras = {} # type: Dict[str, Any]
+
+ self.clear_breadcrumbs()
+ self._should_capture = True
+
+ self._span = None # type: Optional[Span]
+ self._session = None # type: Optional[Session]
+ self._force_auto_session_tracking = None # type: Optional[bool]
+
+ @_attr_setter
+ def level(self, value):
+ # type: (Optional[str]) -> None
+ """When set this overrides the level. Deprecated in favor of set_level."""
+ self._level = value
+
+ def set_level(self, value):
+ # type: (Optional[str]) -> None
+ """Sets the level for the scope."""
+ self._level = value
+
+ @_attr_setter
+ def fingerprint(self, value):
+ # type: (Optional[List[str]]) -> None
+ """When set this overrides the default fingerprint."""
+ self._fingerprint = value
+
+ @_attr_setter
+ def transaction(self, value):
+ # type: (Optional[str]) -> None
+ """When set this forces a specific transaction name to be set."""
+ self._transaction = value
+ span = self._span
+ if span:
+ span.transaction = value
+
+ @_attr_setter
+ def user(self, value):
+ # type: (Dict[str, Any]) -> None
+ """When set a specific user is bound to the scope. Deprecated in favor of set_user."""
+ self.set_user(value)
+
+ def set_user(self, value):
+ # type: (Dict[str, Any]) -> None
+ """Sets a user for the scope."""
+ self._user = value
+ if self._session is not None:
+ self._session.update(user=value)
+
+ @property
+ def span(self):
+ # type: () -> Optional[Span]
+ """Get/set current tracing span."""
+ return self._span
+
+ @span.setter
+ def span(self, span):
+ # type: (Optional[Span]) -> None
+ self._span = span
+ if span is not None:
+ span_transaction = span.transaction
+ if span_transaction:
+ self._transaction = span_transaction
+
+ def set_tag(
+ self,
+ key, # type: str
+ value, # type: Any
+ ):
+ # type: (...) -> None
+ """Sets a tag for a key to a specific value."""
+ self._tags[key] = value
+
+ def remove_tag(
+ self, key # type: str
+ ):
+ # type: (...) -> None
+ """Removes a specific tag."""
+ self._tags.pop(key, None)
+
+ def set_context(
+ self,
+ key, # type: str
+ value, # type: Any
+ ):
+ # type: (...) -> None
+ """Binds a context at a certain key to a specific value."""
+ self._contexts[key] = value
+
+ def remove_context(
+ self, key # type: str
+ ):
+ # type: (...) -> None
+ """Removes a context."""
+ self._contexts.pop(key, None)
+
+ def set_extra(
+ self,
+ key, # type: str
+ value, # type: Any
+ ):
+ # type: (...) -> None
+ """Sets an extra key to a specific value."""
+ self._extras[key] = value
+
+ def remove_extra(
+ self, key # type: str
+ ):
+ # type: (...) -> None
+ """Removes a specific extra key."""
+ self._extras.pop(key, None)
+
+ def clear_breadcrumbs(self):
+ # type: () -> None
+ """Clears breadcrumb buffer."""
+ self._breadcrumbs = deque() # type: Deque[Breadcrumb]
+
+ def add_event_processor(
+ self, func # type: EventProcessor
+ ):
+ # type: (...) -> None
+ """Register a scope local event processor on the scope.
+
+ :param func: This function behaves like `before_send.`
+ """
+ if len(self._event_processors) > 20:
+ logger.warning(
+ "Too many event processors on scope! Clearing list to free up some memory: %r",
+ self._event_processors,
+ )
+ del self._event_processors[:]
+
+ self._event_processors.append(func)
+
+ def add_error_processor(
+ self,
+ func, # type: ErrorProcessor
+ cls=None, # type: Optional[Type[BaseException]]
+ ):
+ # type: (...) -> None
+ """Register a scope local error processor on the scope.
+
+ :param func: A callback that works similar to an event processor but is invoked with the original exception info triple as second argument.
+
+ :param cls: Optionally, only process exceptions of this type.
+ """
+ if cls is not None:
+ cls_ = cls # For mypy.
+ real_func = func
+
+ def func(event, exc_info):
+ # type: (Event, ExcInfo) -> Optional[Event]
+ try:
+ is_inst = isinstance(exc_info[1], cls_)
+ except Exception:
+ is_inst = False
+ if is_inst:
+ return real_func(event, exc_info)
+ return event
+
+ self._error_processors.append(func)
+
+ @_disable_capture
+ def apply_to_event(
+ self,
+ event, # type: Event
+ hint, # type: Hint
+ ):
+ # type: (...) -> Optional[Event]
+ """Applies the information contained on the scope to the given event."""
+
+ def _drop(event, cause, ty):
+ # type: (Dict[str, Any], Any, str) -> Optional[Any]
+ logger.info("%s (%s) dropped event (%s)", ty, cause, event)
+ return None
+
+ if self._level is not None:
+ event["level"] = self._level
+
+ if event.get("type") != "transaction":
+ event.setdefault("breadcrumbs", []).extend(self._breadcrumbs)
+
+ if event.get("user") is None and self._user is not None:
+ event["user"] = self._user
+
+ if event.get("transaction") is None and self._transaction is not None:
+ event["transaction"] = self._transaction
+
+ if event.get("fingerprint") is None and self._fingerprint is not None:
+ event["fingerprint"] = self._fingerprint
+
+ if self._extras:
+ event.setdefault("extra", {}).update(self._extras)
+
+ if self._tags:
+ event.setdefault("tags", {}).update(self._tags)
+
+ if self._contexts:
+ event.setdefault("contexts", {}).update(self._contexts)
+
+ if self._span is not None:
+ contexts = event.setdefault("contexts", {})
+ if not contexts.get("trace"):
+ contexts["trace"] = self._span.get_trace_context()
+
+ exc_info = hint.get("exc_info")
+ if exc_info is not None:
+ for error_processor in self._error_processors:
+ new_event = error_processor(event, exc_info)
+ if new_event is None:
+ return _drop(event, error_processor, "error processor")
+ event = new_event
+
+ for event_processor in chain(global_event_processors, self._event_processors):
+ new_event = event
+ with capture_internal_exceptions():
+ new_event = event_processor(event, hint)
+ if new_event is None:
+ return _drop(event, event_processor, "event processor")
+ event = new_event
+
+ return event
+
+ def update_from_scope(self, scope):
+ # type: (Scope) -> None
+ if scope._level is not None:
+ self._level = scope._level
+ if scope._fingerprint is not None:
+ self._fingerprint = scope._fingerprint
+ if scope._transaction is not None:
+ self._transaction = scope._transaction
+ if scope._user is not None:
+ self._user = scope._user
+ if scope._tags:
+ self._tags.update(scope._tags)
+ if scope._contexts:
+ self._contexts.update(scope._contexts)
+ if scope._extras:
+ self._extras.update(scope._extras)
+ if scope._breadcrumbs:
+ self._breadcrumbs.extend(scope._breadcrumbs)
+ if scope._span:
+ self._span = scope._span
+
+ def update_from_kwargs(
+ self,
+ user=None, # type: Optional[Any]
+ level=None, # type: Optional[str]
+ extras=None, # type: Optional[Dict[str, Any]]
+ contexts=None, # type: Optional[Dict[str, Any]]
+ tags=None, # type: Optional[Dict[str, str]]
+ fingerprint=None, # type: Optional[List[str]]
+ ):
+ # type: (...) -> None
+ if level is not None:
+ self._level = level
+ if user is not None:
+ self._user = user
+ if extras is not None:
+ self._extras.update(extras)
+ if contexts is not None:
+ self._contexts.update(contexts)
+ if tags is not None:
+ self._tags.update(tags)
+ if fingerprint is not None:
+ self._fingerprint = fingerprint
+
+ def __copy__(self):
+ # type: () -> Scope
+ rv = object.__new__(self.__class__) # type: Scope
+
+ rv._level = self._level
+ rv._name = self._name
+ rv._fingerprint = self._fingerprint
+ rv._transaction = self._transaction
+ rv._user = self._user
+
+ rv._tags = dict(self._tags)
+ rv._contexts = dict(self._contexts)
+ rv._extras = dict(self._extras)
+
+ rv._breadcrumbs = copy(self._breadcrumbs)
+ rv._event_processors = list(self._event_processors)
+ rv._error_processors = list(self._error_processors)
+
+ rv._should_capture = self._should_capture
+ rv._span = self._span
+ rv._session = self._session
+ rv._force_auto_session_tracking = self._force_auto_session_tracking
+
+ return rv
+
+ def __repr__(self):
+ # type: () -> str
+ return "<%s id=%s name=%s>" % (
+ self.__class__.__name__,
+ hex(id(self)),
+ self._name,
+ )
diff --git a/third_party/python/sentry-sdk/sentry_sdk/serializer.py b/third_party/python/sentry-sdk/sentry_sdk/serializer.py
new file mode 100644
index 0000000000..3940947553
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/serializer.py
@@ -0,0 +1,336 @@
+import sys
+
+from datetime import datetime
+
+from sentry_sdk.utils import (
+ AnnotatedValue,
+ capture_internal_exception,
+ disable_capture_event,
+ safe_repr,
+ strip_string,
+ format_timestamp,
+)
+
+from sentry_sdk._compat import text_type, PY2, string_types, number_types, iteritems
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from types import TracebackType
+
+ from typing import Any
+ from typing import Dict
+ from typing import List
+ from typing import Optional
+ from typing import Callable
+ from typing import Union
+ from typing import ContextManager
+ from typing import Type
+
+ from sentry_sdk._types import NotImplementedType, Event
+
+ ReprProcessor = Callable[[Any, Dict[str, Any]], Union[NotImplementedType, str]]
+ Segment = Union[str, int]
+
+
+if PY2:
+ # Importing ABCs from collections is deprecated, and will stop working in 3.8
+ # https://github.com/python/cpython/blob/master/Lib/collections/__init__.py#L49
+ from collections import Mapping, Sequence
+
+ serializable_str_types = string_types
+
+else:
+ # New in 3.3
+ # https://docs.python.org/3/library/collections.abc.html
+ from collections.abc import Mapping, Sequence
+
+ # Bytes are technically not strings in Python 3, but we can serialize them
+ serializable_str_types = (str, bytes)
+
+MAX_DATABAG_DEPTH = 5
+MAX_DATABAG_BREADTH = 10
+CYCLE_MARKER = u"<cyclic>"
+
+
+global_repr_processors = [] # type: List[ReprProcessor]
+
+
+def add_global_repr_processor(processor):
+ # type: (ReprProcessor) -> None
+ global_repr_processors.append(processor)
+
+
+class Memo(object):
+ __slots__ = ("_ids", "_objs")
+
+ def __init__(self):
+ # type: () -> None
+ self._ids = {} # type: Dict[int, Any]
+ self._objs = [] # type: List[Any]
+
+ def memoize(self, obj):
+ # type: (Any) -> ContextManager[bool]
+ self._objs.append(obj)
+ return self
+
+ def __enter__(self):
+ # type: () -> bool
+ obj = self._objs[-1]
+ if id(obj) in self._ids:
+ return True
+ else:
+ self._ids[id(obj)] = obj
+ return False
+
+ def __exit__(
+ self,
+ ty, # type: Optional[Type[BaseException]]
+ value, # type: Optional[BaseException]
+ tb, # type: Optional[TracebackType]
+ ):
+ # type: (...) -> None
+ self._ids.pop(id(self._objs.pop()), None)
+
+
+def serialize(event, **kwargs):
+ # type: (Event, **Any) -> Event
+ memo = Memo()
+ path = [] # type: List[Segment]
+ meta_stack = [] # type: List[Dict[str, Any]]
+
+ def _annotate(**meta):
+ # type: (**Any) -> None
+ while len(meta_stack) <= len(path):
+ try:
+ segment = path[len(meta_stack) - 1]
+ node = meta_stack[-1].setdefault(text_type(segment), {})
+ except IndexError:
+ node = {}
+
+ meta_stack.append(node)
+
+ meta_stack[-1].setdefault("", {}).update(meta)
+
+ def _should_repr_strings():
+ # type: () -> Optional[bool]
+ """
+ By default non-serializable objects are going through
+ safe_repr(). For certain places in the event (local vars) we
+ want to repr() even things that are JSON-serializable to
+ make their type more apparent. For example, it's useful to
+ see the difference between a unicode-string and a bytestring
+ when viewing a stacktrace.
+
+ For container-types we still don't do anything different.
+ Generally we just try to make the Sentry UI present exactly
+ what a pretty-printed repr would look like.
+
+ :returns: `True` if we are somewhere in frame variables, and `False` if
+ we are in a position where we will never encounter frame variables
+ when recursing (for example, we're in `event.extra`). `None` if we
+ are not (yet) in frame variables, but might encounter them when
+ recursing (e.g. we're in `event.exception`)
+ """
+ try:
+ p0 = path[0]
+ if p0 == "stacktrace" and path[1] == "frames" and path[3] == "vars":
+ return True
+
+ if (
+ p0 in ("threads", "exception")
+ and path[1] == "values"
+ and path[3] == "stacktrace"
+ and path[4] == "frames"
+ and path[6] == "vars"
+ ):
+ return True
+ except IndexError:
+ return None
+
+ return False
+
+ def _is_databag():
+ # type: () -> Optional[bool]
+ """
+ A databag is any value that we need to trim.
+
+ :returns: Works like `_should_repr_strings()`. `True` for "yes",
+ `False` for :"no", `None` for "maybe soon".
+ """
+ try:
+ rv = _should_repr_strings()
+ if rv in (True, None):
+ return rv
+
+ p0 = path[0]
+ if p0 == "request" and path[1] == "data":
+ return True
+
+ if p0 == "breadcrumbs":
+ path[1]
+ return True
+
+ if p0 == "extra":
+ return True
+
+ except IndexError:
+ return None
+
+ return False
+
+ def _serialize_node(
+ obj, # type: Any
+ is_databag=None, # type: Optional[bool]
+ should_repr_strings=None, # type: Optional[bool]
+ segment=None, # type: Optional[Segment]
+ remaining_breadth=None, # type: Optional[int]
+ remaining_depth=None, # type: Optional[int]
+ ):
+ # type: (...) -> Any
+ if segment is not None:
+ path.append(segment)
+
+ try:
+ with memo.memoize(obj) as result:
+ if result:
+ return CYCLE_MARKER
+
+ return _serialize_node_impl(
+ obj,
+ is_databag=is_databag,
+ should_repr_strings=should_repr_strings,
+ remaining_depth=remaining_depth,
+ remaining_breadth=remaining_breadth,
+ )
+ except BaseException:
+ capture_internal_exception(sys.exc_info())
+
+ if is_databag:
+ return u"<failed to serialize, use init(debug=True) to see error logs>"
+
+ return None
+ finally:
+ if segment is not None:
+ path.pop()
+ del meta_stack[len(path) + 1 :]
+
+ def _flatten_annotated(obj):
+ # type: (Any) -> Any
+ if isinstance(obj, AnnotatedValue):
+ _annotate(**obj.metadata)
+ obj = obj.value
+ return obj
+
+ def _serialize_node_impl(
+ obj, is_databag, should_repr_strings, remaining_depth, remaining_breadth
+ ):
+ # type: (Any, Optional[bool], Optional[bool], Optional[int], Optional[int]) -> Any
+ if should_repr_strings is None:
+ should_repr_strings = _should_repr_strings()
+
+ if is_databag is None:
+ is_databag = _is_databag()
+
+ if is_databag and remaining_depth is None:
+ remaining_depth = MAX_DATABAG_DEPTH
+ if is_databag and remaining_breadth is None:
+ remaining_breadth = MAX_DATABAG_BREADTH
+
+ obj = _flatten_annotated(obj)
+
+ if remaining_depth is not None and remaining_depth <= 0:
+ _annotate(rem=[["!limit", "x"]])
+ if is_databag:
+ return _flatten_annotated(strip_string(safe_repr(obj)))
+ return None
+
+ if is_databag and global_repr_processors:
+ hints = {"memo": memo, "remaining_depth": remaining_depth}
+ for processor in global_repr_processors:
+ result = processor(obj, hints)
+ if result is not NotImplemented:
+ return _flatten_annotated(result)
+
+ if obj is None or isinstance(obj, (bool, number_types)):
+ return obj if not should_repr_strings else safe_repr(obj)
+
+ elif isinstance(obj, datetime):
+ return (
+ text_type(format_timestamp(obj))
+ if not should_repr_strings
+ else safe_repr(obj)
+ )
+
+ elif isinstance(obj, Mapping):
+ # Create temporary copy here to avoid calling too much code that
+ # might mutate our dictionary while we're still iterating over it.
+ obj = dict(iteritems(obj))
+
+ rv_dict = {} # type: Dict[str, Any]
+ i = 0
+
+ for k, v in iteritems(obj):
+ if remaining_breadth is not None and i >= remaining_breadth:
+ _annotate(len=len(obj))
+ break
+
+ str_k = text_type(k)
+ v = _serialize_node(
+ v,
+ segment=str_k,
+ should_repr_strings=should_repr_strings,
+ is_databag=is_databag,
+ remaining_depth=remaining_depth - 1
+ if remaining_depth is not None
+ else None,
+ remaining_breadth=remaining_breadth,
+ )
+ rv_dict[str_k] = v
+ i += 1
+
+ return rv_dict
+
+ elif not isinstance(obj, serializable_str_types) and isinstance(obj, Sequence):
+ rv_list = []
+
+ for i, v in enumerate(obj):
+ if remaining_breadth is not None and i >= remaining_breadth:
+ _annotate(len=len(obj))
+ break
+
+ rv_list.append(
+ _serialize_node(
+ v,
+ segment=i,
+ should_repr_strings=should_repr_strings,
+ is_databag=is_databag,
+ remaining_depth=remaining_depth - 1
+ if remaining_depth is not None
+ else None,
+ remaining_breadth=remaining_breadth,
+ )
+ )
+
+ return rv_list
+
+ if should_repr_strings:
+ obj = safe_repr(obj)
+ else:
+ if isinstance(obj, bytes):
+ obj = obj.decode("utf-8", "replace")
+
+ if not isinstance(obj, string_types):
+ obj = safe_repr(obj)
+
+ return _flatten_annotated(strip_string(obj))
+
+ disable_capture_event.set(True)
+ try:
+ rv = _serialize_node(event, **kwargs)
+ if meta_stack and isinstance(rv, dict):
+ rv["_meta"] = meta_stack[0]
+
+ return rv
+ finally:
+ disable_capture_event.set(False)
diff --git a/third_party/python/sentry-sdk/sentry_sdk/sessions.py b/third_party/python/sentry-sdk/sentry_sdk/sessions.py
new file mode 100644
index 0000000000..f4f7137cc0
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/sessions.py
@@ -0,0 +1,249 @@
+import os
+import uuid
+import time
+from datetime import datetime
+from threading import Thread, Lock
+from contextlib import contextmanager
+
+from sentry_sdk._types import MYPY
+from sentry_sdk.utils import format_timestamp
+
+if MYPY:
+ import sentry_sdk
+
+ from typing import Optional
+ from typing import Union
+ from typing import Any
+ from typing import Dict
+ from typing import Generator
+
+ from sentry_sdk._types import SessionStatus
+
+
+def is_auto_session_tracking_enabled(hub=None):
+ # type: (Optional[sentry_sdk.Hub]) -> bool
+ """Utility function to find out if session tracking is enabled."""
+ if hub is None:
+ hub = sentry_sdk.Hub.current
+ should_track = hub.scope._force_auto_session_tracking
+ if should_track is None:
+ exp = hub.client.options["_experiments"] if hub.client else {}
+ should_track = exp.get("auto_session_tracking")
+ return should_track
+
+
+@contextmanager
+def auto_session_tracking(hub=None):
+ # type: (Optional[sentry_sdk.Hub]) -> Generator[None, None, None]
+ """Starts and stops a session automatically around a block."""
+ if hub is None:
+ hub = sentry_sdk.Hub.current
+ should_track = is_auto_session_tracking_enabled(hub)
+ if should_track:
+ hub.start_session()
+ try:
+ yield
+ finally:
+ if should_track:
+ hub.end_session()
+
+
+def _make_uuid(
+ val, # type: Union[str, uuid.UUID]
+):
+ # type: (...) -> uuid.UUID
+ if isinstance(val, uuid.UUID):
+ return val
+ return uuid.UUID(val)
+
+
+TERMINAL_SESSION_STATES = ("exited", "abnormal", "crashed")
+
+
+class SessionFlusher(object):
+ def __init__(
+ self,
+ flush_func, # type: Any
+ flush_interval=10, # type: int
+ ):
+ # type: (...) -> None
+ self.flush_func = flush_func
+ self.flush_interval = flush_interval
+ self.pending = {} # type: Dict[str, Any]
+ self._thread = None # type: Optional[Thread]
+ self._thread_lock = Lock()
+ self._thread_for_pid = None # type: Optional[int]
+ self._running = True
+
+ def flush(self):
+ # type: (...) -> None
+ pending = self.pending
+ self.pending = {}
+ self.flush_func(list(pending.values()))
+
+ def _ensure_running(self):
+ # type: (...) -> None
+ if self._thread_for_pid == os.getpid() and self._thread is not None:
+ return None
+ with self._thread_lock:
+ if self._thread_for_pid == os.getpid() and self._thread is not None:
+ return None
+
+ def _thread():
+ # type: (...) -> None
+ while self._running:
+ time.sleep(self.flush_interval)
+ if self.pending and self._running:
+ self.flush()
+
+ thread = Thread(target=_thread)
+ thread.daemon = True
+ thread.start()
+ self._thread = thread
+ self._thread_for_pid = os.getpid()
+ return None
+
+ def add_session(
+ self, session # type: Session
+ ):
+ # type: (...) -> None
+ self.pending[session.sid.hex] = session.to_json()
+ self._ensure_running()
+
+ def kill(self):
+ # type: (...) -> None
+ self._running = False
+
+ def __del__(self):
+ # type: (...) -> None
+ self.kill()
+
+
+class Session(object):
+ def __init__(
+ self,
+ sid=None, # type: Optional[Union[str, uuid.UUID]]
+ did=None, # type: Optional[str]
+ timestamp=None, # type: Optional[datetime]
+ started=None, # type: Optional[datetime]
+ duration=None, # type: Optional[float]
+ status=None, # type: Optional[SessionStatus]
+ release=None, # type: Optional[str]
+ environment=None, # type: Optional[str]
+ user_agent=None, # type: Optional[str]
+ ip_address=None, # type: Optional[str]
+ errors=None, # type: Optional[int]
+ user=None, # type: Optional[Any]
+ ):
+ # type: (...) -> None
+ if sid is None:
+ sid = uuid.uuid4()
+ if started is None:
+ started = datetime.utcnow()
+ if status is None:
+ status = "ok"
+ self.status = status
+ self.did = None # type: Optional[str]
+ self.started = started
+ self.release = None # type: Optional[str]
+ self.environment = None # type: Optional[str]
+ self.duration = None # type: Optional[float]
+ self.user_agent = None # type: Optional[str]
+ self.ip_address = None # type: Optional[str]
+ self.errors = 0
+
+ self.update(
+ sid=sid,
+ did=did,
+ timestamp=timestamp,
+ duration=duration,
+ release=release,
+ environment=environment,
+ user_agent=user_agent,
+ ip_address=ip_address,
+ errors=errors,
+ user=user,
+ )
+
+ def update(
+ self,
+ sid=None, # type: Optional[Union[str, uuid.UUID]]
+ did=None, # type: Optional[str]
+ timestamp=None, # type: Optional[datetime]
+ duration=None, # type: Optional[float]
+ status=None, # type: Optional[SessionStatus]
+ release=None, # type: Optional[str]
+ environment=None, # type: Optional[str]
+ user_agent=None, # type: Optional[str]
+ ip_address=None, # type: Optional[str]
+ errors=None, # type: Optional[int]
+ user=None, # type: Optional[Any]
+ ):
+ # type: (...) -> None
+ # If a user is supplied we pull some data form it
+ if user:
+ if ip_address is None:
+ ip_address = user.get("ip_address")
+ if did is None:
+ did = user.get("id") or user.get("email") or user.get("username")
+
+ if sid is not None:
+ self.sid = _make_uuid(sid)
+ if did is not None:
+ self.did = str(did)
+ if timestamp is None:
+ timestamp = datetime.utcnow()
+ self.timestamp = timestamp
+ if duration is not None:
+ self.duration = duration
+ if release is not None:
+ self.release = release
+ if environment is not None:
+ self.environment = environment
+ if ip_address is not None:
+ self.ip_address = ip_address
+ if user_agent is not None:
+ self.user_agent = user_agent
+ if errors is not None:
+ self.errors = errors
+
+ if status is not None:
+ self.status = status
+
+ def close(
+ self, status=None # type: Optional[SessionStatus]
+ ):
+ # type: (...) -> Any
+ if status is None and self.status == "ok":
+ status = "exited"
+ if status is not None:
+ self.update(status=status)
+
+ def to_json(self):
+ # type: (...) -> Any
+ rv = {
+ "sid": str(self.sid),
+ "init": True,
+ "started": format_timestamp(self.started),
+ "timestamp": format_timestamp(self.timestamp),
+ "status": self.status,
+ } # type: Dict[str, Any]
+ if self.errors:
+ rv["errors"] = self.errors
+ if self.did is not None:
+ rv["did"] = self.did
+ if self.duration is not None:
+ rv["duration"] = self.duration
+
+ attrs = {}
+ if self.release is not None:
+ attrs["release"] = self.release
+ if self.environment is not None:
+ attrs["environment"] = self.environment
+ if self.ip_address is not None:
+ attrs["ip_address"] = self.ip_address
+ if self.user_agent is not None:
+ attrs["user_agent"] = self.user_agent
+ if attrs:
+ rv["attrs"] = attrs
+ return rv
diff --git a/third_party/python/sentry-sdk/sentry_sdk/tracing.py b/third_party/python/sentry-sdk/sentry_sdk/tracing.py
new file mode 100644
index 0000000000..9293365b83
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/tracing.py
@@ -0,0 +1,498 @@
+import re
+import uuid
+import contextlib
+import time
+
+from datetime import datetime, timedelta
+
+import sentry_sdk
+
+from sentry_sdk.utils import capture_internal_exceptions, logger, to_string
+from sentry_sdk._compat import PY2
+from sentry_sdk._types import MYPY
+
+if PY2:
+ from collections import Mapping
+else:
+ from collections.abc import Mapping
+
+if MYPY:
+ import typing
+
+ from typing import Generator
+ from typing import Optional
+ from typing import Any
+ from typing import Dict
+ from typing import List
+ from typing import Tuple
+
+_traceparent_header_format_re = re.compile(
+ "^[ \t]*" # whitespace
+ "([0-9a-f]{32})?" # trace_id
+ "-?([0-9a-f]{16})?" # span_id
+ "-?([01])?" # sampled
+ "[ \t]*$" # whitespace
+)
+
+
+class EnvironHeaders(Mapping): # type: ignore
+ def __init__(
+ self,
+ environ, # type: typing.Mapping[str, str]
+ prefix="HTTP_", # type: str
+ ):
+ # type: (...) -> None
+ self.environ = environ
+ self.prefix = prefix
+
+ def __getitem__(self, key):
+ # type: (str) -> Optional[Any]
+ return self.environ[self.prefix + key.replace("-", "_").upper()]
+
+ def __len__(self):
+ # type: () -> int
+ return sum(1 for _ in iter(self))
+
+ def __iter__(self):
+ # type: () -> Generator[str, None, None]
+ for k in self.environ:
+ if not isinstance(k, str):
+ continue
+
+ k = k.replace("-", "_").upper()
+ if not k.startswith(self.prefix):
+ continue
+
+ yield k[len(self.prefix) :]
+
+
+class _SpanRecorder(object):
+ __slots__ = ("maxlen", "finished_spans", "open_span_count")
+
+ def __init__(self, maxlen):
+ # type: (int) -> None
+ self.maxlen = maxlen
+ self.open_span_count = 0 # type: int
+ self.finished_spans = [] # type: List[Span]
+
+ def start_span(self, span):
+ # type: (Span) -> None
+
+ # This is just so that we don't run out of memory while recording a lot
+ # of spans. At some point we just stop and flush out the start of the
+ # trace tree (i.e. the first n spans with the smallest
+ # start_timestamp).
+ self.open_span_count += 1
+ if self.open_span_count > self.maxlen:
+ span._span_recorder = None
+
+ def finish_span(self, span):
+ # type: (Span) -> None
+ self.finished_spans.append(span)
+
+
+class Span(object):
+ __slots__ = (
+ "trace_id",
+ "span_id",
+ "parent_span_id",
+ "same_process_as_parent",
+ "sampled",
+ "transaction",
+ "op",
+ "description",
+ "start_timestamp",
+ "_start_timestamp_monotonic",
+ "status",
+ "timestamp",
+ "_tags",
+ "_data",
+ "_span_recorder",
+ "hub",
+ "_context_manager_state",
+ )
+
+ def __init__(
+ self,
+ trace_id=None, # type: Optional[str]
+ span_id=None, # type: Optional[str]
+ parent_span_id=None, # type: Optional[str]
+ same_process_as_parent=True, # type: bool
+ sampled=None, # type: Optional[bool]
+ transaction=None, # type: Optional[str]
+ op=None, # type: Optional[str]
+ description=None, # type: Optional[str]
+ hub=None, # type: Optional[sentry_sdk.Hub]
+ status=None, # type: Optional[str]
+ ):
+ # type: (...) -> None
+ self.trace_id = trace_id or uuid.uuid4().hex
+ self.span_id = span_id or uuid.uuid4().hex[16:]
+ self.parent_span_id = parent_span_id
+ self.same_process_as_parent = same_process_as_parent
+ self.sampled = sampled
+ self.transaction = transaction
+ self.op = op
+ self.description = description
+ self.status = status
+ self.hub = hub
+ self._tags = {} # type: Dict[str, str]
+ self._data = {} # type: Dict[str, Any]
+ self.start_timestamp = datetime.utcnow()
+ try:
+ # TODO: For Python 3.7+, we could use a clock with ns resolution:
+ # self._start_timestamp_monotonic = time.perf_counter_ns()
+
+ # Python 3.3+
+ self._start_timestamp_monotonic = time.perf_counter()
+ except AttributeError:
+ pass
+
+ #: End timestamp of span
+ self.timestamp = None # type: Optional[datetime]
+
+ self._span_recorder = None # type: Optional[_SpanRecorder]
+
+ def init_finished_spans(self, maxlen):
+ # type: (int) -> None
+ if self._span_recorder is None:
+ self._span_recorder = _SpanRecorder(maxlen)
+ self._span_recorder.start_span(self)
+
+ def __repr__(self):
+ # type: () -> str
+ return (
+ "<%s(transaction=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>"
+ % (
+ self.__class__.__name__,
+ self.transaction,
+ self.trace_id,
+ self.span_id,
+ self.parent_span_id,
+ self.sampled,
+ )
+ )
+
+ def __enter__(self):
+ # type: () -> Span
+ hub = self.hub or sentry_sdk.Hub.current
+
+ _, scope = hub._stack[-1]
+ old_span = scope.span
+ scope.span = self
+ self._context_manager_state = (hub, scope, old_span)
+ return self
+
+ def __exit__(self, ty, value, tb):
+ # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
+ if value is not None:
+ self.set_status("internal_error")
+
+ hub, scope, old_span = self._context_manager_state
+ del self._context_manager_state
+
+ self.finish(hub)
+ scope.span = old_span
+
+ def new_span(self, **kwargs):
+ # type: (**Any) -> Span
+ rv = type(self)(
+ trace_id=self.trace_id,
+ span_id=None,
+ parent_span_id=self.span_id,
+ sampled=self.sampled,
+ **kwargs
+ )
+
+ rv._span_recorder = self._span_recorder
+ return rv
+
+ @classmethod
+ def continue_from_environ(cls, environ):
+ # type: (typing.Mapping[str, str]) -> Span
+ return cls.continue_from_headers(EnvironHeaders(environ))
+
+ @classmethod
+ def continue_from_headers(cls, headers):
+ # type: (typing.Mapping[str, str]) -> Span
+ parent = cls.from_traceparent(headers.get("sentry-trace"))
+ if parent is None:
+ return cls()
+ parent.same_process_as_parent = False
+ return parent
+
+ def iter_headers(self):
+ # type: () -> Generator[Tuple[str, str], None, None]
+ yield "sentry-trace", self.to_traceparent()
+
+ @classmethod
+ def from_traceparent(cls, traceparent):
+ # type: (Optional[str]) -> Optional[Span]
+ if not traceparent:
+ return None
+
+ if traceparent.startswith("00-") and traceparent.endswith("-00"):
+ traceparent = traceparent[3:-3]
+
+ match = _traceparent_header_format_re.match(str(traceparent))
+ if match is None:
+ return None
+
+ trace_id, span_id, sampled_str = match.groups()
+
+ if trace_id is not None:
+ trace_id = "{:032x}".format(int(trace_id, 16))
+ if span_id is not None:
+ span_id = "{:016x}".format(int(span_id, 16))
+
+ if sampled_str:
+ sampled = sampled_str != "0" # type: Optional[bool]
+ else:
+ sampled = None
+
+ return cls(trace_id=trace_id, parent_span_id=span_id, sampled=sampled)
+
+ def to_traceparent(self):
+ # type: () -> str
+ sampled = ""
+ if self.sampled is True:
+ sampled = "1"
+ if self.sampled is False:
+ sampled = "0"
+ return "%s-%s-%s" % (self.trace_id, self.span_id, sampled)
+
+ def to_legacy_traceparent(self):
+ # type: () -> str
+ return "00-%s-%s-00" % (self.trace_id, self.span_id)
+
+ def set_tag(self, key, value):
+ # type: (str, Any) -> None
+ self._tags[key] = value
+
+ def set_data(self, key, value):
+ # type: (str, Any) -> None
+ self._data[key] = value
+
+ def set_status(self, value):
+ # type: (str) -> None
+ self.status = value
+
+ def set_http_status(self, http_status):
+ # type: (int) -> None
+ self.set_tag("http.status_code", http_status)
+
+ if http_status < 400:
+ self.set_status("ok")
+ elif 400 <= http_status < 500:
+ if http_status == 403:
+ self.set_status("permission_denied")
+ elif http_status == 404:
+ self.set_status("not_found")
+ elif http_status == 429:
+ self.set_status("resource_exhausted")
+ elif http_status == 413:
+ self.set_status("failed_precondition")
+ elif http_status == 401:
+ self.set_status("unauthenticated")
+ elif http_status == 409:
+ self.set_status("already_exists")
+ else:
+ self.set_status("invalid_argument")
+ elif 500 <= http_status < 600:
+ if http_status == 504:
+ self.set_status("deadline_exceeded")
+ elif http_status == 501:
+ self.set_status("unimplemented")
+ elif http_status == 503:
+ self.set_status("unavailable")
+ else:
+ self.set_status("internal_error")
+ else:
+ self.set_status("unknown_error")
+
+ def is_success(self):
+ # type: () -> bool
+ return self.status == "ok"
+
+ def finish(self, hub=None):
+ # type: (Optional[sentry_sdk.Hub]) -> Optional[str]
+ hub = hub or self.hub or sentry_sdk.Hub.current
+
+ if self.timestamp is not None:
+ # This transaction is already finished, so we should not flush it again.
+ return None
+
+ try:
+ duration_seconds = time.perf_counter() - self._start_timestamp_monotonic
+ self.timestamp = self.start_timestamp + timedelta(seconds=duration_seconds)
+ except AttributeError:
+ self.timestamp = datetime.utcnow()
+
+ _maybe_create_breadcrumbs_from_span(hub, self)
+
+ if self._span_recorder is None:
+ return None
+
+ self._span_recorder.finish_span(self)
+
+ if self.transaction is None:
+ # If this has no transaction set we assume there's a parent
+ # transaction for this span that would be flushed out eventually.
+ return None
+
+ client = hub.client
+
+ if client is None:
+ # We have no client and therefore nowhere to send this transaction
+ # event.
+ return None
+
+ if not self.sampled:
+ # At this point a `sampled = None` should have already been
+ # resolved to a concrete decision. If `sampled` is `None`, it's
+ # likely that somebody used `with sentry_sdk.Hub.start_span(..)` on a
+ # non-transaction span and later decided to make it a transaction.
+ if self.sampled is None:
+ logger.warning("Discarding transaction Span without sampling decision")
+
+ return None
+
+ return hub.capture_event(
+ {
+ "type": "transaction",
+ "transaction": self.transaction,
+ "contexts": {"trace": self.get_trace_context()},
+ "tags": self._tags,
+ "timestamp": self.timestamp,
+ "start_timestamp": self.start_timestamp,
+ "spans": [
+ s.to_json(client)
+ for s in self._span_recorder.finished_spans
+ if s is not self
+ ],
+ }
+ )
+
+ def to_json(self, client):
+ # type: (Optional[sentry_sdk.Client]) -> Dict[str, Any]
+ rv = {
+ "trace_id": self.trace_id,
+ "span_id": self.span_id,
+ "parent_span_id": self.parent_span_id,
+ "same_process_as_parent": self.same_process_as_parent,
+ "op": self.op,
+ "description": self.description,
+ "start_timestamp": self.start_timestamp,
+ "timestamp": self.timestamp,
+ } # type: Dict[str, Any]
+
+ transaction = self.transaction
+ if transaction:
+ rv["transaction"] = transaction
+
+ if self.status:
+ self._tags["status"] = self.status
+
+ tags = self._tags
+ if tags:
+ rv["tags"] = tags
+
+ data = self._data
+ if data:
+ rv["data"] = data
+
+ return rv
+
+ def get_trace_context(self):
+ # type: () -> Any
+ rv = {
+ "trace_id": self.trace_id,
+ "span_id": self.span_id,
+ "parent_span_id": self.parent_span_id,
+ "op": self.op,
+ "description": self.description,
+ }
+ if self.status:
+ rv["status"] = self.status
+
+ return rv
+
+
+def _format_sql(cursor, sql):
+ # type: (Any, str) -> Optional[str]
+
+ real_sql = None
+
+ # If we're using psycopg2, it could be that we're
+ # looking at a query that uses Composed objects. Use psycopg2's mogrify
+ # function to format the query. We lose per-parameter trimming but gain
+ # accuracy in formatting.
+ try:
+ if hasattr(cursor, "mogrify"):
+ real_sql = cursor.mogrify(sql)
+ if isinstance(real_sql, bytes):
+ real_sql = real_sql.decode(cursor.connection.encoding)
+ except Exception:
+ real_sql = None
+
+ return real_sql or to_string(sql)
+
+
+@contextlib.contextmanager
+def record_sql_queries(
+ hub, # type: sentry_sdk.Hub
+ cursor, # type: Any
+ query, # type: Any
+ params_list, # type: Any
+ paramstyle, # type: Optional[str]
+ executemany, # type: bool
+):
+ # type: (...) -> Generator[Span, None, None]
+
+ # TODO: Bring back capturing of params by default
+ if hub.client and hub.client.options["_experiments"].get(
+ "record_sql_params", False
+ ):
+ if not params_list or params_list == [None]:
+ params_list = None
+
+ if paramstyle == "pyformat":
+ paramstyle = "format"
+ else:
+ params_list = None
+ paramstyle = None
+
+ query = _format_sql(cursor, query)
+
+ data = {}
+ if params_list is not None:
+ data["db.params"] = params_list
+ if paramstyle is not None:
+ data["db.paramstyle"] = paramstyle
+ if executemany:
+ data["db.executemany"] = True
+
+ with capture_internal_exceptions():
+ hub.add_breadcrumb(message=query, category="query", data=data)
+
+ with hub.start_span(op="db", description=query) as span:
+ for k, v in data.items():
+ span.set_data(k, v)
+ yield span
+
+
+def _maybe_create_breadcrumbs_from_span(hub, span):
+ # type: (sentry_sdk.Hub, Span) -> None
+ if span.op == "redis":
+ hub.add_breadcrumb(
+ message=span.description, type="redis", category="redis", data=span._tags
+ )
+ elif span.op == "http":
+ hub.add_breadcrumb(type="http", category="httplib", data=span._data)
+ elif span.op == "subprocess":
+ hub.add_breadcrumb(
+ type="subprocess",
+ category="subprocess",
+ message=span.description,
+ data=span._data,
+ )
diff --git a/third_party/python/sentry-sdk/sentry_sdk/transport.py b/third_party/python/sentry-sdk/sentry_sdk/transport.py
new file mode 100644
index 0000000000..60ab611c54
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/transport.py
@@ -0,0 +1,365 @@
+from __future__ import print_function
+
+import json
+import io
+import urllib3 # type: ignore
+import certifi
+import gzip
+
+from datetime import datetime, timedelta
+
+from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions
+from sentry_sdk.worker import BackgroundWorker
+from sentry_sdk.envelope import Envelope, get_event_data_category
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Type
+ from typing import Any
+ from typing import Optional
+ from typing import Dict
+ from typing import Union
+ from typing import Callable
+ from urllib3.poolmanager import PoolManager # type: ignore
+ from urllib3.poolmanager import ProxyManager
+
+ from sentry_sdk._types import Event
+
+try:
+ from urllib.request import getproxies
+except ImportError:
+ from urllib import getproxies # type: ignore
+
+
+class Transport(object):
+ """Baseclass for all transports.
+
+ A transport is used to send an event to sentry.
+ """
+
+ parsed_dsn = None # type: Optional[Dsn]
+
+ def __init__(
+ self, options=None # type: Optional[Dict[str, Any]]
+ ):
+ # type: (...) -> None
+ self.options = options
+ if options and options["dsn"] is not None and options["dsn"]:
+ self.parsed_dsn = Dsn(options["dsn"])
+ else:
+ self.parsed_dsn = None
+
+ def capture_event(
+ self, event # type: Event
+ ):
+ # type: (...) -> None
+ """This gets invoked with the event dictionary when an event should
+ be sent to sentry.
+ """
+ raise NotImplementedError()
+
+ def capture_envelope(
+ self, envelope # type: Envelope
+ ):
+ # type: (...) -> None
+ """This gets invoked with an envelope when an event should
+ be sent to sentry. The default implementation invokes `capture_event`
+ if the envelope contains an event and ignores all other envelopes.
+ """
+ event = envelope.get_event()
+ if event is not None:
+ self.capture_event(event)
+ return None
+
+ def flush(
+ self,
+ timeout, # type: float
+ callback=None, # type: Optional[Any]
+ ):
+ # type: (...) -> None
+ """Wait `timeout` seconds for the current events to be sent out."""
+ pass
+
+ def kill(self):
+ # type: () -> None
+ """Forcefully kills the transport."""
+ pass
+
+ def __del__(self):
+ # type: () -> None
+ try:
+ self.kill()
+ except Exception:
+ pass
+
+
+class HttpTransport(Transport):
+ """The default HTTP transport."""
+
+ def __init__(
+ self, options # type: Dict[str, Any]
+ ):
+ # type: (...) -> None
+ from sentry_sdk.consts import VERSION
+
+ Transport.__init__(self, options)
+ assert self.parsed_dsn is not None
+ self._worker = BackgroundWorker()
+ self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION)
+ self._disabled_until = {} # type: Dict[Any, datetime]
+ self._retry = urllib3.util.Retry()
+ self.options = options
+
+ self._pool = self._make_pool(
+ self.parsed_dsn,
+ http_proxy=options["http_proxy"],
+ https_proxy=options["https_proxy"],
+ ca_certs=options["ca_certs"],
+ )
+
+ from sentry_sdk import Hub
+
+ self.hub_cls = Hub
+
+ def _update_rate_limits(self, response):
+ # type: (urllib3.HTTPResponse) -> None
+
+ # new sentries with more rate limit insights. We honor this header
+ # no matter of the status code to update our internal rate limits.
+ header = response.headers.get("x-sentry-rate-limit")
+ if header:
+ for limit in header.split(","):
+ try:
+ retry_after, categories, _ = limit.strip().split(":", 2)
+ retry_after = datetime.utcnow() + timedelta(
+ seconds=int(retry_after)
+ )
+ for category in categories.split(";") or (None,):
+ self._disabled_until[category] = retry_after
+ except (LookupError, ValueError):
+ continue
+
+ # old sentries only communicate global rate limit hits via the
+ # retry-after header on 429. This header can also be emitted on new
+ # sentries if a proxy in front wants to globally slow things down.
+ elif response.status == 429:
+ self._disabled_until[None] = datetime.utcnow() + timedelta(
+ seconds=self._retry.get_retry_after(response) or 60
+ )
+
+ def _send_request(
+ self,
+ body, # type: bytes
+ headers, # type: Dict[str, str]
+ ):
+ # type: (...) -> None
+ headers.update(
+ {
+ "User-Agent": str(self._auth.client),
+ "X-Sentry-Auth": str(self._auth.to_header()),
+ }
+ )
+ response = self._pool.request(
+ "POST", str(self._auth.store_api_url), body=body, headers=headers
+ )
+
+ try:
+ self._update_rate_limits(response)
+
+ if response.status == 429:
+ # if we hit a 429. Something was rate limited but we already
+ # acted on this in `self._update_rate_limits`.
+ pass
+
+ elif response.status >= 300 or response.status < 200:
+ logger.error(
+ "Unexpected status code: %s (body: %s)",
+ response.status,
+ response.data,
+ )
+ finally:
+ response.close()
+
+ def _check_disabled(self, category):
+ # type: (str) -> bool
+ def _disabled(bucket):
+ # type: (Any) -> bool
+ ts = self._disabled_until.get(bucket)
+ return ts is not None and ts > datetime.utcnow()
+
+ return _disabled(category) or _disabled(None)
+
+ def _send_event(
+ self, event # type: Event
+ ):
+ # type: (...) -> None
+ if self._check_disabled(get_event_data_category(event)):
+ return None
+
+ body = io.BytesIO()
+ with gzip.GzipFile(fileobj=body, mode="w") as f:
+ f.write(json.dumps(event, allow_nan=False).encode("utf-8"))
+
+ assert self.parsed_dsn is not None
+ logger.debug(
+ "Sending event, type:%s level:%s event_id:%s project:%s host:%s"
+ % (
+ event.get("type") or "null",
+ event.get("level") or "null",
+ event.get("event_id") or "null",
+ self.parsed_dsn.project_id,
+ self.parsed_dsn.host,
+ )
+ )
+ self._send_request(
+ body.getvalue(),
+ headers={"Content-Type": "application/json", "Content-Encoding": "gzip"},
+ )
+ return None
+
+ def _send_envelope(
+ self, envelope # type: Envelope
+ ):
+ # type: (...) -> None
+
+ # remove all items from the envelope which are over quota
+ envelope.items[:] = [
+ x for x in envelope.items if not self._check_disabled(x.data_category)
+ ]
+ if not envelope.items:
+ return None
+
+ body = io.BytesIO()
+ with gzip.GzipFile(fileobj=body, mode="w") as f:
+ envelope.serialize_into(f)
+
+ assert self.parsed_dsn is not None
+ logger.debug(
+ "Sending envelope [%s] project:%s host:%s",
+ envelope.description,
+ self.parsed_dsn.project_id,
+ self.parsed_dsn.host,
+ )
+ self._send_request(
+ body.getvalue(),
+ headers={
+ "Content-Type": "application/x-sentry-envelope",
+ "Content-Encoding": "gzip",
+ },
+ )
+ return None
+
+ def _get_pool_options(self, ca_certs):
+ # type: (Optional[Any]) -> Dict[str, Any]
+ return {
+ "num_pools": 2,
+ "cert_reqs": "CERT_REQUIRED",
+ "ca_certs": ca_certs or certifi.where(),
+ }
+
+ def _make_pool(
+ self,
+ parsed_dsn, # type: Dsn
+ http_proxy, # type: Optional[str]
+ https_proxy, # type: Optional[str]
+ ca_certs, # type: Optional[Any]
+ ):
+ # type: (...) -> Union[PoolManager, ProxyManager]
+ proxy = None
+
+ # try HTTPS first
+ if parsed_dsn.scheme == "https" and (https_proxy != ""):
+ proxy = https_proxy or getproxies().get("https")
+
+ # maybe fallback to HTTP proxy
+ if not proxy and (http_proxy != ""):
+ proxy = http_proxy or getproxies().get("http")
+
+ opts = self._get_pool_options(ca_certs)
+
+ if proxy:
+ return urllib3.ProxyManager(proxy, **opts)
+ else:
+ return urllib3.PoolManager(**opts)
+
+ def capture_event(
+ self, event # type: Event
+ ):
+ # type: (...) -> None
+ hub = self.hub_cls.current
+
+ def send_event_wrapper():
+ # type: () -> None
+ with hub:
+ with capture_internal_exceptions():
+ self._send_event(event)
+
+ self._worker.submit(send_event_wrapper)
+
+ def capture_envelope(
+ self, envelope # type: Envelope
+ ):
+ # type: (...) -> None
+ hub = self.hub_cls.current
+
+ def send_envelope_wrapper():
+ # type: () -> None
+ with hub:
+ with capture_internal_exceptions():
+ self._send_envelope(envelope)
+
+ self._worker.submit(send_envelope_wrapper)
+
+ def flush(
+ self,
+ timeout, # type: float
+ callback=None, # type: Optional[Any]
+ ):
+ # type: (...) -> None
+ logger.debug("Flushing HTTP transport")
+ if timeout > 0:
+ self._worker.flush(timeout, callback)
+
+ def kill(self):
+ # type: () -> None
+ logger.debug("Killing HTTP transport")
+ self._worker.kill()
+
+
+class _FunctionTransport(Transport):
+ def __init__(
+ self, func # type: Callable[[Event], None]
+ ):
+ # type: (...) -> None
+ Transport.__init__(self)
+ self._func = func
+
+ def capture_event(
+ self, event # type: Event
+ ):
+ # type: (...) -> None
+ self._func(event)
+ return None
+
+
+def make_transport(options):
+ # type: (Dict[str, Any]) -> Optional[Transport]
+ ref_transport = options["transport"]
+
+ # If no transport is given, we use the http transport class
+ if ref_transport is None:
+ transport_cls = HttpTransport # type: Type[Transport]
+ elif isinstance(ref_transport, Transport):
+ return ref_transport
+ elif isinstance(ref_transport, type) and issubclass(ref_transport, Transport):
+ transport_cls = ref_transport
+ elif callable(ref_transport):
+ return _FunctionTransport(ref_transport) # type: ignore
+
+ # if a transport class is given only instanciate it if the dsn is not
+ # empty or None
+ if options["dsn"]:
+ return transport_cls(options)
+
+ return None
diff --git a/third_party/python/sentry-sdk/sentry_sdk/utils.py b/third_party/python/sentry-sdk/sentry_sdk/utils.py
new file mode 100644
index 0000000000..d92309c5f7
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/utils.py
@@ -0,0 +1,831 @@
+import os
+import sys
+import linecache
+import logging
+
+from datetime import datetime
+
+import sentry_sdk
+from sentry_sdk._compat import urlparse, text_type, implements_str, PY2
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from types import FrameType
+ from types import TracebackType
+ from typing import Any
+ from typing import Callable
+ from typing import Dict
+ from typing import ContextManager
+ from typing import Iterator
+ from typing import List
+ from typing import Optional
+ from typing import Set
+ from typing import Tuple
+ from typing import Union
+ from typing import Type
+
+ from sentry_sdk._types import ExcInfo
+
+epoch = datetime(1970, 1, 1)
+
+
+# The logger is created here but initialized in the debug support module
+logger = logging.getLogger("sentry_sdk.errors")
+
+MAX_STRING_LENGTH = 512
+MAX_FORMAT_PARAM_LENGTH = 128
+
+
+def _get_debug_hub():
+ # type: () -> Optional[sentry_sdk.Hub]
+ # This function is replaced by debug.py
+ pass
+
+
+class CaptureInternalException(object):
+ __slots__ = ()
+
+ def __enter__(self):
+ # type: () -> ContextManager[Any]
+ return self
+
+ def __exit__(self, ty, value, tb):
+ # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]) -> bool
+ if ty is not None and value is not None:
+ capture_internal_exception((ty, value, tb))
+
+ return True
+
+
+_CAPTURE_INTERNAL_EXCEPTION = CaptureInternalException()
+
+
+def capture_internal_exceptions():
+ # type: () -> ContextManager[Any]
+ return _CAPTURE_INTERNAL_EXCEPTION
+
+
+def capture_internal_exception(exc_info):
+ # type: (ExcInfo) -> None
+ hub = _get_debug_hub()
+ if hub is not None:
+ hub._capture_internal_exception(exc_info)
+
+
+def to_timestamp(value):
+ # type: (datetime) -> float
+ return (value - epoch).total_seconds()
+
+
+def format_timestamp(value):
+ # type: (datetime) -> str
+ return value.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
+
+
+def event_hint_with_exc_info(exc_info=None):
+ # type: (Optional[ExcInfo]) -> Dict[str, Optional[ExcInfo]]
+ """Creates a hint with the exc info filled in."""
+ if exc_info is None:
+ exc_info = sys.exc_info()
+ else:
+ exc_info = exc_info_from_error(exc_info)
+ if exc_info[0] is None:
+ exc_info = None
+ return {"exc_info": exc_info}
+
+
+class BadDsn(ValueError):
+ """Raised on invalid DSNs."""
+
+
+@implements_str
+class Dsn(object):
+ """Represents a DSN."""
+
+ def __init__(self, value):
+ # type: (Union[Dsn, str]) -> None
+ if isinstance(value, Dsn):
+ self.__dict__ = dict(value.__dict__)
+ return
+ parts = urlparse.urlsplit(text_type(value))
+
+ if parts.scheme not in (u"http", u"https"):
+ raise BadDsn("Unsupported scheme %r" % parts.scheme)
+ self.scheme = parts.scheme
+
+ if parts.hostname is None:
+ raise BadDsn("Missing hostname")
+
+ self.host = parts.hostname
+
+ if parts.port is None:
+ self.port = self.scheme == "https" and 443 or 80
+ else:
+ self.port = parts.port
+
+ if not parts.username:
+ raise BadDsn("Missing public key")
+
+ self.public_key = parts.username
+ self.secret_key = parts.password
+
+ path = parts.path.rsplit("/", 1)
+
+ try:
+ self.project_id = text_type(int(path.pop()))
+ except (ValueError, TypeError):
+ raise BadDsn("Invalid project in DSN (%r)" % (parts.path or "")[1:])
+
+ self.path = "/".join(path) + "/"
+
+ @property
+ def netloc(self):
+ # type: () -> str
+ """The netloc part of a DSN."""
+ rv = self.host
+ if (self.scheme, self.port) not in (("http", 80), ("https", 443)):
+ rv = "%s:%s" % (rv, self.port)
+ return rv
+
+ def to_auth(self, client=None):
+ # type: (Optional[Any]) -> Auth
+ """Returns the auth info object for this dsn."""
+ return Auth(
+ scheme=self.scheme,
+ host=self.netloc,
+ path=self.path,
+ project_id=self.project_id,
+ public_key=self.public_key,
+ secret_key=self.secret_key,
+ client=client,
+ )
+
+ def __str__(self):
+ # type: () -> str
+ return "%s://%s%s@%s%s%s" % (
+ self.scheme,
+ self.public_key,
+ self.secret_key and "@" + self.secret_key or "",
+ self.netloc,
+ self.path,
+ self.project_id,
+ )
+
+
+class Auth(object):
+ """Helper object that represents the auth info."""
+
+ def __init__(
+ self,
+ scheme,
+ host,
+ project_id,
+ public_key,
+ secret_key=None,
+ version=7,
+ client=None,
+ path="/",
+ ):
+ # type: (str, str, str, str, Optional[str], int, Optional[Any], str) -> None
+ self.scheme = scheme
+ self.host = host
+ self.path = path
+ self.project_id = project_id
+ self.public_key = public_key
+ self.secret_key = secret_key
+ self.version = version
+ self.client = client
+
+ @property
+ def store_api_url(self):
+ # type: () -> str
+ """Returns the API url for storing events."""
+ return "%s://%s%sapi/%s/store/" % (
+ self.scheme,
+ self.host,
+ self.path,
+ self.project_id,
+ )
+
+ def to_header(self, timestamp=None):
+ # type: (Optional[datetime]) -> str
+ """Returns the auth header a string."""
+ rv = [("sentry_key", self.public_key), ("sentry_version", self.version)]
+ if timestamp is not None:
+ rv.append(("sentry_timestamp", str(to_timestamp(timestamp))))
+ if self.client is not None:
+ rv.append(("sentry_client", self.client))
+ if self.secret_key is not None:
+ rv.append(("sentry_secret", self.secret_key))
+ return u"Sentry " + u", ".join("%s=%s" % (key, value) for key, value in rv)
+
+
+class AnnotatedValue(object):
+ __slots__ = ("value", "metadata")
+
+ def __init__(self, value, metadata):
+ # type: (Optional[Any], Dict[str, Any]) -> None
+ self.value = value
+ self.metadata = metadata
+
+
+if MYPY:
+ from typing import TypeVar
+
+ T = TypeVar("T")
+ Annotated = Union[AnnotatedValue, T]
+
+
+def get_type_name(cls):
+ # type: (Optional[type]) -> Optional[str]
+ return getattr(cls, "__qualname__", None) or getattr(cls, "__name__", None)
+
+
+def get_type_module(cls):
+ # type: (Optional[type]) -> Optional[str]
+ mod = getattr(cls, "__module__", None)
+ if mod not in (None, "builtins", "__builtins__"):
+ return mod
+ return None
+
+
+def should_hide_frame(frame):
+ # type: (FrameType) -> bool
+ try:
+ mod = frame.f_globals["__name__"]
+ if mod.startswith("sentry_sdk."):
+ return True
+ except (AttributeError, KeyError):
+ pass
+
+ for flag_name in "__traceback_hide__", "__tracebackhide__":
+ try:
+ if frame.f_locals[flag_name]:
+ return True
+ except Exception:
+ pass
+
+ return False
+
+
+def iter_stacks(tb):
+ # type: (Optional[TracebackType]) -> Iterator[TracebackType]
+ tb_ = tb # type: Optional[TracebackType]
+ while tb_ is not None:
+ if not should_hide_frame(tb_.tb_frame):
+ yield tb_
+ tb_ = tb_.tb_next
+
+
+def get_lines_from_file(
+ filename, # type: str
+ lineno, # type: int
+ loader=None, # type: Optional[Any]
+ module=None, # type: Optional[str]
+):
+ # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]]
+ context_lines = 5
+ source = None
+ if loader is not None and hasattr(loader, "get_source"):
+ try:
+ source_str = loader.get_source(module) # type: Optional[str]
+ except (ImportError, IOError):
+ source_str = None
+ if source_str is not None:
+ source = source_str.splitlines()
+
+ if source is None:
+ try:
+ source = linecache.getlines(filename)
+ except (OSError, IOError):
+ return [], None, []
+
+ if not source:
+ return [], None, []
+
+ lower_bound = max(0, lineno - context_lines)
+ upper_bound = min(lineno + 1 + context_lines, len(source))
+
+ try:
+ pre_context = [
+ strip_string(line.strip("\r\n")) for line in source[lower_bound:lineno]
+ ]
+ context_line = strip_string(source[lineno].strip("\r\n"))
+ post_context = [
+ strip_string(line.strip("\r\n"))
+ for line in source[(lineno + 1) : upper_bound]
+ ]
+ return pre_context, context_line, post_context
+ except IndexError:
+ # the file may have changed since it was loaded into memory
+ return [], None, []
+
+
+def get_source_context(
+ frame, # type: FrameType
+ tb_lineno, # type: int
+):
+ # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]]
+ try:
+ abs_path = frame.f_code.co_filename # type: Optional[str]
+ except Exception:
+ abs_path = None
+ try:
+ module = frame.f_globals["__name__"]
+ except Exception:
+ return [], None, []
+ try:
+ loader = frame.f_globals["__loader__"]
+ except Exception:
+ loader = None
+ lineno = tb_lineno - 1
+ if lineno is not None and abs_path:
+ return get_lines_from_file(abs_path, lineno, loader, module)
+ return [], None, []
+
+
+def safe_str(value):
+ # type: (Any) -> str
+ try:
+ return text_type(value)
+ except Exception:
+ return safe_repr(value)
+
+
+if PY2:
+
+ def safe_repr(value):
+ # type: (Any) -> str
+ try:
+ rv = repr(value).decode("utf-8", "replace")
+
+ # At this point `rv` contains a bunch of literal escape codes, like
+ # this (exaggerated example):
+ #
+ # u"\\x2f"
+ #
+ # But we want to show this string as:
+ #
+ # u"/"
+ try:
+ # unicode-escape does this job, but can only decode latin1. So we
+ # attempt to encode in latin1.
+ return rv.encode("latin1").decode("unicode-escape")
+ except Exception:
+ # Since usually strings aren't latin1 this can break. In those
+ # cases we just give up.
+ return rv
+ except Exception:
+ # If e.g. the call to `repr` already fails
+ return u"<broken repr>"
+
+
+else:
+
+ def safe_repr(value):
+ # type: (Any) -> str
+ try:
+ return repr(value)
+ except Exception:
+ return "<broken repr>"
+
+
+def filename_for_module(module, abs_path):
+ # type: (Optional[str], Optional[str]) -> Optional[str]
+ if not abs_path or not module:
+ return abs_path
+
+ try:
+ if abs_path.endswith(".pyc"):
+ abs_path = abs_path[:-1]
+
+ base_module = module.split(".", 1)[0]
+ if base_module == module:
+ return os.path.basename(abs_path)
+
+ base_module_path = sys.modules[base_module].__file__
+ return abs_path.split(base_module_path.rsplit(os.sep, 2)[0], 1)[-1].lstrip(
+ os.sep
+ )
+ except Exception:
+ return abs_path
+
+
+def serialize_frame(frame, tb_lineno=None, with_locals=True):
+ # type: (FrameType, Optional[int], bool) -> Dict[str, Any]
+ f_code = getattr(frame, "f_code", None)
+ if not f_code:
+ abs_path = None
+ function = None
+ else:
+ abs_path = frame.f_code.co_filename
+ function = frame.f_code.co_name
+ try:
+ module = frame.f_globals["__name__"]
+ except Exception:
+ module = None
+
+ if tb_lineno is None:
+ tb_lineno = frame.f_lineno
+
+ pre_context, context_line, post_context = get_source_context(frame, tb_lineno)
+
+ rv = {
+ "filename": filename_for_module(module, abs_path) or None,
+ "abs_path": os.path.abspath(abs_path) if abs_path else None,
+ "function": function or "<unknown>",
+ "module": module,
+ "lineno": tb_lineno,
+ "pre_context": pre_context,
+ "context_line": context_line,
+ "post_context": post_context,
+ } # type: Dict[str, Any]
+ if with_locals:
+ rv["vars"] = frame.f_locals
+
+ return rv
+
+
+def stacktrace_from_traceback(tb=None, with_locals=True):
+ # type: (Optional[TracebackType], bool) -> Dict[str, List[Dict[str, Any]]]
+ return {
+ "frames": [
+ serialize_frame(
+ tb.tb_frame, tb_lineno=tb.tb_lineno, with_locals=with_locals
+ )
+ for tb in iter_stacks(tb)
+ ]
+ }
+
+
+def current_stacktrace(with_locals=True):
+ # type: (bool) -> Any
+ __tracebackhide__ = True
+ frames = []
+
+ f = sys._getframe() # type: Optional[FrameType]
+ while f is not None:
+ if not should_hide_frame(f):
+ frames.append(serialize_frame(f, with_locals=with_locals))
+ f = f.f_back
+
+ frames.reverse()
+
+ return {"frames": frames}
+
+
+def get_errno(exc_value):
+ # type: (BaseException) -> Optional[Any]
+ return getattr(exc_value, "errno", None)
+
+
+def single_exception_from_error_tuple(
+ exc_type, # type: Optional[type]
+ exc_value, # type: Optional[BaseException]
+ tb, # type: Optional[TracebackType]
+ client_options=None, # type: Optional[Dict[str, Any]]
+ mechanism=None, # type: Optional[Dict[str, Any]]
+):
+ # type: (...) -> Dict[str, Any]
+ if exc_value is not None:
+ errno = get_errno(exc_value)
+ else:
+ errno = None
+
+ if errno is not None:
+ mechanism = mechanism or {}
+ mechanism.setdefault("meta", {}).setdefault("errno", {}).setdefault(
+ "number", errno
+ )
+
+ if client_options is None:
+ with_locals = True
+ else:
+ with_locals = client_options["with_locals"]
+
+ return {
+ "module": get_type_module(exc_type),
+ "type": get_type_name(exc_type),
+ "value": safe_str(exc_value),
+ "mechanism": mechanism,
+ "stacktrace": stacktrace_from_traceback(tb, with_locals),
+ }
+
+
+HAS_CHAINED_EXCEPTIONS = hasattr(Exception, "__suppress_context__")
+
+if HAS_CHAINED_EXCEPTIONS:
+
+ def walk_exception_chain(exc_info):
+ # type: (ExcInfo) -> Iterator[ExcInfo]
+ exc_type, exc_value, tb = exc_info
+
+ seen_exceptions = []
+ seen_exception_ids = set() # type: Set[int]
+
+ while (
+ exc_type is not None
+ and exc_value is not None
+ and id(exc_value) not in seen_exception_ids
+ ):
+ yield exc_type, exc_value, tb
+
+ # Avoid hashing random types we don't know anything
+ # about. Use the list to keep a ref so that the `id` is
+ # not used for another object.
+ seen_exceptions.append(exc_value)
+ seen_exception_ids.add(id(exc_value))
+
+ if exc_value.__suppress_context__:
+ cause = exc_value.__cause__
+ else:
+ cause = exc_value.__context__
+ if cause is None:
+ break
+ exc_type = type(cause)
+ exc_value = cause
+ tb = getattr(cause, "__traceback__", None)
+
+
+else:
+
+ def walk_exception_chain(exc_info):
+ # type: (ExcInfo) -> Iterator[ExcInfo]
+ yield exc_info
+
+
+def exceptions_from_error_tuple(
+ exc_info, # type: ExcInfo
+ client_options=None, # type: Optional[Dict[str, Any]]
+ mechanism=None, # type: Optional[Dict[str, Any]]
+):
+ # type: (...) -> List[Dict[str, Any]]
+ exc_type, exc_value, tb = exc_info
+ rv = []
+ for exc_type, exc_value, tb in walk_exception_chain(exc_info):
+ rv.append(
+ single_exception_from_error_tuple(
+ exc_type, exc_value, tb, client_options, mechanism
+ )
+ )
+
+ rv.reverse()
+
+ return rv
+
+
+def to_string(value):
+ # type: (str) -> str
+ try:
+ return text_type(value)
+ except UnicodeDecodeError:
+ return repr(value)[1:-1]
+
+
+def iter_event_stacktraces(event):
+ # type: (Dict[str, Any]) -> Iterator[Dict[str, Any]]
+ if "stacktrace" in event:
+ yield event["stacktrace"]
+ if "threads" in event:
+ for thread in event["threads"].get("values") or ():
+ if "stacktrace" in thread:
+ yield thread["stacktrace"]
+ if "exception" in event:
+ for exception in event["exception"].get("values") or ():
+ if "stacktrace" in exception:
+ yield exception["stacktrace"]
+
+
+def iter_event_frames(event):
+ # type: (Dict[str, Any]) -> Iterator[Dict[str, Any]]
+ for stacktrace in iter_event_stacktraces(event):
+ for frame in stacktrace.get("frames") or ():
+ yield frame
+
+
+def handle_in_app(event, in_app_exclude=None, in_app_include=None):
+ # type: (Dict[str, Any], Optional[List[str]], Optional[List[str]]) -> Dict[str, Any]
+ for stacktrace in iter_event_stacktraces(event):
+ handle_in_app_impl(
+ stacktrace.get("frames"),
+ in_app_exclude=in_app_exclude,
+ in_app_include=in_app_include,
+ )
+
+ return event
+
+
+def handle_in_app_impl(frames, in_app_exclude, in_app_include):
+ # type: (Any, Optional[List[str]], Optional[List[str]]) -> Optional[Any]
+ if not frames:
+ return None
+
+ any_in_app = False
+ for frame in frames:
+ in_app = frame.get("in_app")
+ if in_app is not None:
+ if in_app:
+ any_in_app = True
+ continue
+
+ module = frame.get("module")
+ if not module:
+ continue
+ elif _module_in_set(module, in_app_include):
+ frame["in_app"] = True
+ any_in_app = True
+ elif _module_in_set(module, in_app_exclude):
+ frame["in_app"] = False
+
+ if not any_in_app:
+ for frame in frames:
+ if frame.get("in_app") is None:
+ frame["in_app"] = True
+
+ return frames
+
+
+def exc_info_from_error(error):
+ # type: (Union[BaseException, ExcInfo]) -> ExcInfo
+ if isinstance(error, tuple) and len(error) == 3:
+ exc_type, exc_value, tb = error
+ elif isinstance(error, BaseException):
+ tb = getattr(error, "__traceback__", None)
+ if tb is not None:
+ exc_type = type(error)
+ exc_value = error
+ else:
+ exc_type, exc_value, tb = sys.exc_info()
+ if exc_value is not error:
+ tb = None
+ exc_value = error
+ exc_type = type(error)
+
+ else:
+ raise ValueError("Expected Exception object to report, got %s!" % type(error))
+
+ return exc_type, exc_value, tb
+
+
+def event_from_exception(
+ exc_info, # type: Union[BaseException, ExcInfo]
+ client_options=None, # type: Optional[Dict[str, Any]]
+ mechanism=None, # type: Optional[Dict[str, Any]]
+):
+ # type: (...) -> Tuple[Dict[str, Any], Dict[str, Any]]
+ exc_info = exc_info_from_error(exc_info)
+ hint = event_hint_with_exc_info(exc_info)
+ return (
+ {
+ "level": "error",
+ "exception": {
+ "values": exceptions_from_error_tuple(
+ exc_info, client_options, mechanism
+ )
+ },
+ },
+ hint,
+ )
+
+
+def _module_in_set(name, set):
+ # type: (str, Optional[List[str]]) -> bool
+ if not set:
+ return False
+ for item in set or ():
+ if item == name or name.startswith(item + "."):
+ return True
+ return False
+
+
+def strip_string(value, max_length=None):
+ # type: (str, Optional[int]) -> Union[AnnotatedValue, str]
+ # TODO: read max_length from config
+ if not value:
+ return value
+
+ if max_length is None:
+ # This is intentionally not just the default such that one can patch `MAX_STRING_LENGTH` and affect `strip_string`.
+ max_length = MAX_STRING_LENGTH
+
+ length = len(value)
+
+ if length > max_length:
+ return AnnotatedValue(
+ value=value[: max_length - 3] + u"...",
+ metadata={
+ "len": length,
+ "rem": [["!limit", "x", max_length - 3, max_length]],
+ },
+ )
+ return value
+
+
+def _is_threading_local_monkey_patched():
+ # type: () -> bool
+ try:
+ from gevent.monkey import is_object_patched # type: ignore
+
+ if is_object_patched("threading", "local"):
+ return True
+ except ImportError:
+ pass
+
+ try:
+ from eventlet.patcher import is_monkey_patched # type: ignore
+
+ if is_monkey_patched("thread"):
+ return True
+ except ImportError:
+ pass
+
+ return False
+
+
+def _get_contextvars():
+ # type: () -> Tuple[bool, type]
+ """
+ Try to import contextvars and use it if it's deemed safe. We should not use
+ contextvars if gevent or eventlet have patched thread locals, as
+ contextvars are unaffected by that patch.
+
+ https://github.com/gevent/gevent/issues/1407
+ """
+ if not _is_threading_local_monkey_patched():
+ # aiocontextvars is a PyPI package that ensures that the contextvars
+ # backport (also a PyPI package) works with asyncio under Python 3.6
+ #
+ # Import it if available.
+ if not PY2 and sys.version_info < (3, 7):
+ try:
+ from aiocontextvars import ContextVar # noqa
+
+ return True, ContextVar
+ except ImportError:
+ pass
+
+ try:
+ from contextvars import ContextVar
+
+ return True, ContextVar
+ except ImportError:
+ pass
+
+ from threading import local
+
+ class ContextVar(object):
+ # Super-limited impl of ContextVar
+
+ def __init__(self, name):
+ # type: (str) -> None
+ self._name = name
+ self._local = local()
+
+ def get(self, default):
+ # type: (Any) -> Any
+ return getattr(self._local, "value", default)
+
+ def set(self, value):
+ # type: (Any) -> None
+ self._local.value = value
+
+ return False, ContextVar
+
+
+HAS_REAL_CONTEXTVARS, ContextVar = _get_contextvars()
+
+
+def transaction_from_function(func):
+ # type: (Callable[..., Any]) -> Optional[str]
+ # Methods in Python 2
+ try:
+ return "%s.%s.%s" % (
+ func.im_class.__module__, # type: ignore
+ func.im_class.__name__, # type: ignore
+ func.__name__,
+ )
+ except Exception:
+ pass
+
+ func_qualname = (
+ getattr(func, "__qualname__", None) or getattr(func, "__name__", None) or None
+ ) # type: Optional[str]
+
+ if not func_qualname:
+ # No idea what it is
+ return None
+
+ # Methods in Python 3
+ # Functions
+ # Classes
+ try:
+ return "%s.%s" % (func.__module__, func_qualname)
+ except Exception:
+ pass
+
+ # Possibly a lambda
+ return func_qualname
+
+
+disable_capture_event = ContextVar("disable_capture_event")
diff --git a/third_party/python/sentry-sdk/sentry_sdk/worker.py b/third_party/python/sentry-sdk/sentry_sdk/worker.py
new file mode 100644
index 0000000000..b5f2ea8ae6
--- /dev/null
+++ b/third_party/python/sentry-sdk/sentry_sdk/worker.py
@@ -0,0 +1,142 @@
+import os
+
+from threading import Thread, Lock
+from time import sleep, time
+from sentry_sdk._compat import queue, check_thread_support
+from sentry_sdk.utils import logger
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from queue import Queue
+ from typing import Any
+ from typing import Optional
+ from typing import Callable
+
+
+_TERMINATOR = object()
+
+
+class BackgroundWorker(object):
+ def __init__(self):
+ # type: () -> None
+ check_thread_support()
+ self._queue = queue.Queue(30) # type: Queue[Any]
+ self._lock = Lock()
+ self._thread = None # type: Optional[Thread]
+ self._thread_for_pid = None # type: Optional[int]
+
+ @property
+ def is_alive(self):
+ # type: () -> bool
+ if self._thread_for_pid != os.getpid():
+ return False
+ if not self._thread:
+ return False
+ return self._thread.is_alive()
+
+ def _ensure_thread(self):
+ # type: () -> None
+ if not self.is_alive:
+ self.start()
+
+ def _timed_queue_join(self, timeout):
+ # type: (float) -> bool
+ deadline = time() + timeout
+ queue = self._queue
+
+ real_all_tasks_done = getattr(
+ queue, "all_tasks_done", None
+ ) # type: Optional[Any]
+ if real_all_tasks_done is not None:
+ real_all_tasks_done.acquire()
+ all_tasks_done = real_all_tasks_done # type: Optional[Any]
+ elif queue.__module__.startswith("eventlet."):
+ all_tasks_done = getattr(queue, "_cond", None)
+ else:
+ all_tasks_done = None
+
+ try:
+ while queue.unfinished_tasks:
+ delay = deadline - time()
+ if delay <= 0:
+ return False
+ if all_tasks_done is not None:
+ all_tasks_done.wait(timeout=delay)
+ else:
+ # worst case, we just poll the number of remaining tasks
+ sleep(0.1)
+
+ return True
+ finally:
+ if real_all_tasks_done is not None:
+ real_all_tasks_done.release()
+
+ def start(self):
+ # type: () -> None
+ with self._lock:
+ if not self.is_alive:
+ self._thread = Thread(
+ target=self._target, name="raven-sentry.BackgroundWorker"
+ )
+ self._thread.setDaemon(True)
+ self._thread.start()
+ self._thread_for_pid = os.getpid()
+
+ def kill(self):
+ # type: () -> None
+ """
+ Kill worker thread. Returns immediately. Not useful for
+ waiting on shutdown for events, use `flush` for that.
+ """
+ logger.debug("background worker got kill request")
+ with self._lock:
+ if self._thread:
+ try:
+ self._queue.put_nowait(_TERMINATOR)
+ except queue.Full:
+ logger.debug("background worker queue full, kill failed")
+
+ self._thread = None
+ self._thread_for_pid = None
+
+ def flush(self, timeout, callback=None):
+ # type: (float, Optional[Any]) -> None
+ logger.debug("background worker got flush request")
+ with self._lock:
+ if self.is_alive and timeout > 0.0:
+ self._wait_flush(timeout, callback)
+ logger.debug("background worker flushed")
+
+ def _wait_flush(self, timeout, callback):
+ # type: (float, Optional[Any]) -> None
+ initial_timeout = min(0.1, timeout)
+ if not self._timed_queue_join(initial_timeout):
+ pending = self._queue.qsize()
+ logger.debug("%d event(s) pending on flush", pending)
+ if callback is not None:
+ callback(pending, timeout)
+ self._timed_queue_join(timeout - initial_timeout)
+
+ def submit(self, callback):
+ # type: (Callable[[], None]) -> None
+ self._ensure_thread()
+ try:
+ self._queue.put_nowait(callback)
+ except queue.Full:
+ logger.debug("background worker queue full, dropping event")
+
+ def _target(self):
+ # type: () -> None
+ while True:
+ callback = self._queue.get()
+ try:
+ if callback is _TERMINATOR:
+ break
+ try:
+ callback()
+ except Exception:
+ logger.error("Failed processing job", exc_info=True)
+ finally:
+ self._queue.task_done()
+ sleep(0)
diff --git a/third_party/python/sentry-sdk/setup.cfg b/third_party/python/sentry-sdk/setup.cfg
new file mode 100644
index 0000000000..adf5ed72aa
--- /dev/null
+++ b/third_party/python/sentry-sdk/setup.cfg
@@ -0,0 +1,7 @@
+[bdist_wheel]
+universal = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/sentry-sdk/setup.py b/third_party/python/sentry-sdk/setup.py
new file mode 100644
index 0000000000..045532e7df
--- /dev/null
+++ b/third_party/python/sentry-sdk/setup.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+"""
+Sentry-Python - Sentry SDK for Python
+=====================================
+
+**Sentry-Python is an SDK for Sentry.** Check out `GitHub
+<https://github.com/getsentry/sentry-python>`_ to find out more.
+"""
+
+from setuptools import setup, find_packages
+
+setup(
+ name="sentry-sdk",
+ version="0.14.3",
+ author="Sentry Team and Contributors",
+ author_email="hello@getsentry.com",
+ url="https://github.com/getsentry/sentry-python",
+ description="Python client for Sentry (https://getsentry.com)",
+ long_description=__doc__,
+ packages=find_packages(exclude=("tests", "tests.*")),
+ # PEP 561
+ package_data={"sentry_sdk": ["py.typed"]},
+ zip_safe=False,
+ license="BSD",
+ install_requires=["urllib3>=1.10.0", "certifi"],
+ extras_require={
+ "flask": ["flask>=0.11", "blinker>=1.1"],
+ "bottle": ["bottle>=0.12.13"],
+ "falcon": ["falcon>=1.4"],
+ "django": ["django>=1.8"],
+ "sanic": ["sanic>=0.8"],
+ "celery": ["celery>=3"],
+ "beam": ["beam>=2.12"],
+ "rq": ["0.6"],
+ "aiohttp": ["aiohttp>=3.5"],
+ "tornado": ["tornado>=5"],
+ "sqlalchemy": ["sqlalchemy>=1.2"],
+ "pyspark": ["pyspark>=2.4.4"],
+ },
+ classifiers=[
+ "Development Status :: 5 - Production/Stable",
+ "Environment :: Web Environment",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: BSD License",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.4",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+ ],
+)
diff --git a/third_party/python/six/CHANGES b/third_party/python/six/CHANGES
new file mode 100644
index 0000000000..ffa702601b
--- /dev/null
+++ b/third_party/python/six/CHANGES
@@ -0,0 +1,315 @@
+Changelog for six
+=================
+
+This file lists the changes in each six version.
+
+1.13.0
+------
+
+- Issue #298, pull request #299: Add `six.moves.dbm_ndbm`.
+
+- Issue #155: Add `six.moves.collections_abc`, which aliases the `collections`
+ module on Python 2-3.2 and the `collections.abc` on Python 3.3 and greater.
+
+- Pull request #304: Re-add distutils fallback in `setup.py`.
+
+- Pull request #305: On Python 3.7, `with_metaclass` supports classes using PEP
+ 560 features.
+
+1.12.0
+------
+
+- Issue #259, pull request #260: `six.add_metaclass` now preserves
+ `__qualname__` from the original class.
+
+- Pull request #204: Add `six.ensure_binary`, `six.ensure_text`, and
+ `six.ensure_str`.
+
+1.11.0
+------
+
+- Pull request #178: `with_metaclass` now properly proxies `__prepare__` to the
+ underlying metaclass.
+
+- Pull request #191: Allow `with_metaclass` to work with metaclasses implemented
+ in C.
+
+- Pull request #203: Add parse_http_list and parse_keqv_list to moved
+ urllib.request.
+
+- Pull request #172 and issue #171: Add unquote_to_bytes to moved urllib.parse.
+
+- Pull request #167: Add `six.moves.getoutput`.
+
+- Pull request #80: Add `six.moves.urllib_parse.splitvalue`.
+
+- Pull request #75: Add `six.moves.email_mime_image`.
+
+- Pull request #72: Avoid creating reference cycles through tracebacks in
+ `reraise`.
+
+1.10.0
+------
+
+- Issue #122: Improve the performance of `six.int2byte` on Python 3.
+
+- Pull request #55 and issue #99: Don't add the `winreg` module to `six.moves`
+ on non-Windows platforms.
+
+- Pull request #60 and issue #108: Add `six.moves.getcwd` and
+ `six.moves.getcwdu`.
+
+- Pull request #64: Add `create_unbound_method` to create unbound methods.
+
+1.9.0
+-----
+
+- Issue #106: Support the `flush` parameter to `six.print_`.
+
+- Pull request #48 and issue #15: Add the `python_2_unicode_compatible`
+ decorator.
+
+- Pull request #57 and issue #50: Add several compatibility methods for unittest
+ assertions that were renamed between Python 2 and 3.
+
+- Issue #105 and pull request #58: Ensure `six.wraps` respects the *updated* and
+ *assigned* arguments.
+
+- Issue #102: Add `raise_from` to abstract out Python 3's raise from syntax.
+
+- Issue #97: Optimize `six.iterbytes` on Python 2.
+
+- Issue #98: Fix `six.moves` race condition in multi-threaded code.
+
+- Pull request #51: Add `six.view(keys|values|itmes)`, which provide dictionary
+ views on Python 2.7+.
+
+- Issue #112: `six.moves.reload_module` now uses the importlib module on
+ Python 3.4+.
+
+1.8.0
+-----
+
+- Issue #90: Add `six.moves.shlex_quote`.
+
+- Issue #59: Add `six.moves.intern`.
+
+- Add `six.urllib.parse.uses_(fragment|netloc|params|query|relative)`.
+
+- Issue #88: Fix add_metaclass when the class has `__slots__` containing
+ `__weakref__` or `__dict__`.
+
+- Issue #89: Make six use absolute imports.
+
+- Issue #85: Always accept *updated* and *assigned* arguments for `wraps()`.
+
+- Issue #86: In `reraise()`, instantiate the exception if the second argument is
+ `None`.
+
+- Pull request #45: Add `six.moves.email_mime_nonmultipart`.
+
+- Issue #81: Add `six.urllib.request.splittag` mapping.
+
+- Issue #80: Add `six.urllib.request.splituser` mapping.
+
+1.7.3
+-----
+
+- Issue #77: Fix import six on Python 3.4 with a custom loader.
+
+- Issue #74: `six.moves.xmlrpc_server` should map to `SimpleXMLRPCServer` on Python
+ 2 as documented not `xmlrpclib`.
+
+1.7.2
+-----
+
+- Issue #72: Fix installing on Python 2.
+
+1.7.1
+-----
+
+- Issue #71: Make the six.moves meta path importer handle reloading of the six
+ module gracefully.
+
+1.7.0
+-----
+
+- Pull request #30: Implement six.moves with a PEP 302 meta path hook.
+
+- Pull request #32: Add six.wraps, which is like functools.wraps but always sets
+ the __wrapped__ attribute.
+
+- Pull request #35: Improve add_metaclass, so that it doesn't end up inserting
+ another class into the hierarchy.
+
+- Pull request #34: Add import mappings for dummy_thread.
+
+- Pull request #33: Add import mappings for UserDict and UserList.
+
+- Pull request #31: Select the implementations of dictionary iterator routines
+ at import time for a 20% speed boost.
+
+1.6.1
+-----
+
+- Raise an AttributeError for six.moves.X when X is a module not available in
+ the current interpreter.
+
+1.6.0
+-----
+
+- Raise an AttributeError for every attribute of unimportable modules.
+
+- Issue #56: Make the fake modules six.moves puts into sys.modules appear not to
+ have a __path__ unless they are loaded.
+
+- Pull request #28: Add support for SplitResult.
+
+- Issue #55: Add move mapping for xmlrpc.server.
+
+- Pull request #29: Add move for urllib.parse.splitquery.
+
+1.5.2
+-----
+
+- Issue #53: Make the fake modules six.moves puts into sys.modules appear not to
+ have a __name__ unless they are loaded.
+
+1.5.1
+-----
+
+- Issue #51: Hack around the Django autoreloader after recent six.moves changes.
+
+1.5.0
+-----
+
+- Removed support for Python 2.4. This is because py.test no longer supports
+ 2.4.
+
+- Fix various import problems including issues #19 and #41. six.moves modules
+ are now lazy wrappers over the underlying modules instead of the actual
+ modules themselves.
+
+- Issue #49: Add six.moves mapping for tkinter.ttk.
+
+- Pull request #24: Add __dir__ special method to six.moves modules.
+
+- Issue #47: Fix add_metaclass on classes with a string for the __slots__
+ variable.
+
+- Issue #44: Fix interpretation of backslashes on Python 2 in the u() function.
+
+- Pull request #21: Add import mapping for urllib's proxy_bypass function.
+
+- Issue #43: Add import mapping for the Python 2 xmlrpclib module.
+
+- Issue #39: Add import mapping for the Python 2 thread module.
+
+- Issue #40: Add import mapping for the Python 2 gdbm module.
+
+- Issue #35: On Python versions less than 2.7, print_ now encodes unicode
+ strings when outputing to standard streams. (Python 2.7 handles this
+ automatically.)
+
+1.4.1
+-----
+
+- Issue #32: urllib module wrappings don't work when six is not a toplevel file.
+
+1.4.0
+-----
+
+- Issue #31: Add six.moves mapping for UserString.
+
+- Pull request #12: Add six.add_metaclass, a decorator for adding a metaclass to
+ a class.
+
+- Add six.moves.zip_longest and six.moves.filterfalse, which correspond
+ respectively to itertools.izip_longest and itertools.ifilterfalse on Python 2
+ and itertools.zip_longest and itertools.filterfalse on Python 3.
+
+- Issue #25: Add the unichr function, which returns a string for a Unicode
+ codepoint.
+
+- Issue #26: Add byte2int function, which complements int2byte.
+
+- Add a PY2 constant with obvious semantics.
+
+- Add helpers for indexing and iterating over bytes: iterbytes and indexbytes.
+
+- Add create_bound_method() wrapper.
+
+- Issue #23: Allow multiple base classes to be passed to with_metaclass.
+
+- Issue #24: Add six.moves.range alias. This exactly the same as the current
+ xrange alias.
+
+- Pull request #5: Create six.moves.urllib, which contains abstractions for a
+ bunch of things which are in urllib in Python 3 and spread out across urllib,
+ urllib2, and urlparse in Python 2.
+
+1.3.0
+-----
+
+- Issue #21: Add methods to access the closure and globals of a function.
+
+- In six.iter(items/keys/values/lists), passed keyword arguments through to the
+ underlying method.
+
+- Add six.iterlists().
+
+- Issue #20: Fix tests if tkinter is not available.
+
+- Issue #17: Define callable to be builtin callable when it is available again
+ in Python 3.2+.
+
+- Issue #16: Rename Python 2 exec_'s arguments, so casually calling exec_ with
+ keyword arguments will raise.
+
+- Issue #14: Put the six.moves package in sys.modules based on the name six is
+ imported under.
+
+- Fix Jython detection.
+
+- Pull request #4: Add email_mime_multipart, email_mime_text, and
+ email_mime_base to six.moves.
+
+1.2.0
+-----
+
+- Issue #13: Make iterkeys/itervalues/iteritems return iterators on Python 3
+ instead of iterables.
+
+- Issue #11: Fix maxsize support on Jython.
+
+- Add six.next() as an alias for six.advance_iterator().
+
+- Use the builtin next() function for advance_iterator() where is available
+ (2.6+), not just Python 3.
+
+- Add the Iterator class for writing portable iterators.
+
+1.1.0
+-----
+
+- Add the int2byte function.
+
+- Add compatibility mappings for iterators over the keys, values, and items of a
+ dictionary.
+
+- Fix six.MAXSIZE on platforms where sizeof(long) != sizeof(Py_ssize_t).
+
+- Issue #3: Add six.moves mappings for filter, map, and zip.
+
+1.0.0
+-----
+
+- Issue #2: u() on Python 2.x now resolves unicode escapes.
+
+- Expose an API for adding mappings to six.moves.
+
+1.0 beta 1
+----------
+
+- Reworked six into one .py file. This breaks imports. Please tell me if you
+ are interested in an import compatibility layer.
diff --git a/third_party/python/six/LICENSE b/third_party/python/six/LICENSE
new file mode 100644
index 0000000000..4b05a54526
--- /dev/null
+++ b/third_party/python/six/LICENSE
@@ -0,0 +1,18 @@
+Copyright (c) 2010-2019 Benjamin Peterson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/third_party/python/six/MANIFEST.in b/third_party/python/six/MANIFEST.in
new file mode 100644
index 0000000000..b924e068ee
--- /dev/null
+++ b/third_party/python/six/MANIFEST.in
@@ -0,0 +1,6 @@
+include CHANGES
+include LICENSE
+include test_six.py
+
+recursive-include documentation *
+prune documentation/_build
diff --git a/third_party/python/six/PKG-INFO b/third_party/python/six/PKG-INFO
new file mode 100644
index 0000000000..33d8f432ea
--- /dev/null
+++ b/third_party/python/six/PKG-INFO
@@ -0,0 +1,50 @@
+Metadata-Version: 1.2
+Name: six
+Version: 1.13.0
+Summary: Python 2 and 3 compatibility utilities
+Home-page: https://github.com/benjaminp/six
+Author: Benjamin Peterson
+Author-email: benjamin@python.org
+License: MIT
+Description: .. image:: https://img.shields.io/pypi/v/six.svg
+ :target: https://pypi.org/project/six/
+ :alt: six on PyPI
+
+ .. image:: https://travis-ci.org/benjaminp/six.svg?branch=master
+ :target: https://travis-ci.org/benjaminp/six
+ :alt: six on TravisCI
+
+ .. image:: https://readthedocs.org/projects/six/badge/?version=latest
+ :target: https://six.readthedocs.io/
+ :alt: six's documentation on Read the Docs
+
+ .. image:: https://img.shields.io/badge/license-MIT-green.svg
+ :target: https://github.com/benjaminp/six/blob/master/LICENSE
+ :alt: MIT License badge
+
+ Six is a Python 2 and 3 compatibility library. It provides utility functions
+ for smoothing over the differences between the Python versions with the goal of
+ writing Python code that is compatible on both Python versions. See the
+ documentation for more information on what is provided.
+
+ Six supports every Python version since 2.6. It is contained in only one Python
+ file, so it can be easily copied into your project. (The copyright and license
+ notice must be retained.)
+
+ Online documentation is at https://six.readthedocs.io/.
+
+ Bugs can be reported to https://github.com/benjaminp/six. The code can also
+ be found there.
+
+ For questions about six or porting in general, email the python-porting mailing
+ list: https://mail.python.org/mailman/listinfo/python-porting
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Utilities
+Requires-Python: >=2.6, !=3.0.*, !=3.1.*
diff --git a/third_party/python/six/README.rst b/third_party/python/six/README.rst
new file mode 100644
index 0000000000..a99e6f5585
--- /dev/null
+++ b/third_party/python/six/README.rst
@@ -0,0 +1,32 @@
+.. image:: https://img.shields.io/pypi/v/six.svg
+ :target: https://pypi.org/project/six/
+ :alt: six on PyPI
+
+.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master
+ :target: https://travis-ci.org/benjaminp/six
+ :alt: six on TravisCI
+
+.. image:: https://readthedocs.org/projects/six/badge/?version=latest
+ :target: https://six.readthedocs.io/
+ :alt: six's documentation on Read the Docs
+
+.. image:: https://img.shields.io/badge/license-MIT-green.svg
+ :target: https://github.com/benjaminp/six/blob/master/LICENSE
+ :alt: MIT License badge
+
+Six is a Python 2 and 3 compatibility library. It provides utility functions
+for smoothing over the differences between the Python versions with the goal of
+writing Python code that is compatible on both Python versions. See the
+documentation for more information on what is provided.
+
+Six supports every Python version since 2.6. It is contained in only one Python
+file, so it can be easily copied into your project. (The copyright and license
+notice must be retained.)
+
+Online documentation is at https://six.readthedocs.io/.
+
+Bugs can be reported to https://github.com/benjaminp/six. The code can also
+be found there.
+
+For questions about six or porting in general, email the python-porting mailing
+list: https://mail.python.org/mailman/listinfo/python-porting
diff --git a/third_party/python/six/documentation/Makefile b/third_party/python/six/documentation/Makefile
new file mode 100644
index 0000000000..eebafcd6d6
--- /dev/null
+++ b/third_party/python/six/documentation/Makefile
@@ -0,0 +1,130 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/six.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/six.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/six"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/six"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ make -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/third_party/python/six/documentation/conf.py b/third_party/python/six/documentation/conf.py
new file mode 100644
index 0000000000..b3d1328adc
--- /dev/null
+++ b/third_party/python/six/documentation/conf.py
@@ -0,0 +1,217 @@
+# -*- coding: utf-8 -*-
+#
+# six documentation build configuration file
+
+import os
+import sys
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.append(os.path.abspath('.'))
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+needs_sphinx = "1.0"
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ["sphinx.ext.intersphinx"]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ["_templates"]
+
+# The suffix of source filenames.
+source_suffix = ".rst"
+
+# The encoding of source files.
+#source_encoding = "utf-8-sig"
+
+# The master toctree document.
+master_doc = "index"
+
+# General information about the project.
+project = u"six"
+copyright = u"2010-2019, Benjamin Peterson"
+
+sys.path.append(os.path.abspath(os.path.join(".", "..")))
+from six import __version__ as six_version
+sys.path.pop()
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = six_version[:-2]
+# The full version, including alpha/beta/rc tags.
+release = six_version
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ["_build"]
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = "sphinx"
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = "default"
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ["_static"]
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = ''
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'sixdoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ("index", "six.tex", u"six Documentation",
+ u"Benjamin Peterson", "manual"),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ("index", "six", u"six Documentation",
+ [u"Benjamin Peterson"], 1)
+]
+
+# -- Intersphinx ---------------------------------------------------------------
+
+intersphinx_mapping = {"py2" : ("https://docs.python.org/2/", None),
+ "py3" : ("https://docs.python.org/3/", None)}
diff --git a/third_party/python/six/documentation/index.rst b/third_party/python/six/documentation/index.rst
new file mode 100644
index 0000000000..b7ec2754ec
--- /dev/null
+++ b/third_party/python/six/documentation/index.rst
@@ -0,0 +1,875 @@
+Six: Python 2 and 3 Compatibility Library
+=========================================
+
+.. module:: six
+ :synopsis: Python 2 and 3 compatibility
+
+.. moduleauthor:: Benjamin Peterson <benjamin@python.org>
+.. sectionauthor:: Benjamin Peterson <benjamin@python.org>
+
+
+Six provides simple utilities for wrapping over differences between Python 2 and
+Python 3. It is intended to support codebases that work on both Python 2 and 3
+without modification. six consists of only one Python file, so it is painless
+to copy into a project.
+
+Six can be downloaded on `PyPI <https://pypi.org/project/six/>`_. Its bug
+tracker and code hosting is on `GitHub <https://github.com/benjaminp/six>`_.
+
+The name, "six", comes from the fact that 2*3 equals 6. Why not addition?
+Multiplication is more powerful, and, anyway, "five" has already been snatched
+away by the (admittedly now moribund) Zope Five project.
+
+
+Indices and tables
+------------------
+
+* :ref:`genindex`
+* :ref:`search`
+
+
+Package contents
+----------------
+
+.. data:: PY2
+
+ A boolean indicating if the code is running on Python 2.
+
+.. data:: PY3
+
+ A boolean indicating if the code is running on Python 3.
+
+
+Constants
+>>>>>>>>>
+
+Six provides constants that may differ between Python versions. Ones ending
+``_types`` are mostly useful as the second argument to ``isinstance`` or
+``issubclass``.
+
+
+.. data:: class_types
+
+ Possible class types. In Python 2, this encompasses old-style
+ :data:`py2:types.ClassType` and new-style ``type`` classes. In Python 3,
+ this is just ``type``.
+
+
+.. data:: integer_types
+
+ Possible integer types. In Python 2, this is :func:`py2:long` and
+ :func:`py2:int`, and in Python 3, just :func:`py3:int`.
+
+
+.. data:: string_types
+
+ Possible types for text data. This is :func:`py2:basestring` in Python 2 and
+ :func:`py3:str` in Python 3.
+
+
+.. data:: text_type
+
+ Type for representing (Unicode) textual data. This is :func:`py2:unicode` in
+ Python 2 and :func:`py3:str` in Python 3.
+
+
+.. data:: binary_type
+
+ Type for representing binary data. This is :func:`py2:str` in Python 2 and
+ :func:`py3:bytes` in Python 3. Python 2.6 and 2.7 include ``bytes`` as a
+ builtin alias of ``str``, so six’s version is only necessary for Python 2.5
+ compatibility.
+
+
+.. data:: MAXSIZE
+
+ The maximum size of a container like :func:`py3:list` or :func:`py3:dict`.
+ This is equivalent to :data:`py3:sys.maxsize` in Python 2.6 and later
+ (including 3.x). Note, this is temptingly similar to, but not the same as
+ :data:`py2:sys.maxint` in Python 2. There is no direct equivalent to
+ :data:`py2:sys.maxint` in Python 3 because its integer type has no limits
+ aside from memory.
+
+
+Here's example usage of the module::
+
+ import six
+
+ def dispatch_types(value):
+ if isinstance(value, six.integer_types):
+ handle_integer(value)
+ elif isinstance(value, six.class_types):
+ handle_class(value)
+ elif isinstance(value, six.string_types):
+ handle_string(value)
+
+
+Object model compatibility
+>>>>>>>>>>>>>>>>>>>>>>>>>>
+
+Python 3 renamed the attributes of several interpreter data structures. The
+following accessors are available. Note that the recommended way to inspect
+functions and methods is the stdlib :mod:`py3:inspect` module.
+
+
+.. function:: get_unbound_function(meth)
+
+ Get the function out of unbound method *meth*. In Python 3, unbound methods
+ don't exist, so this function just returns *meth* unchanged. Example
+ usage::
+
+ from six import get_unbound_function
+
+ class X(object):
+ def method(self):
+ pass
+ method_function = get_unbound_function(X.method)
+
+
+.. function:: get_method_function(meth)
+
+ Get the function out of method object *meth*.
+
+
+.. function:: get_method_self(meth)
+
+ Get the ``self`` of bound method *meth*.
+
+
+.. function:: get_function_closure(func)
+
+ Get the closure (list of cells) associated with *func*. This is equivalent
+ to ``func.__closure__`` on Python 2.6+ and ``func.func_closure`` on Python
+ 2.5.
+
+
+.. function:: get_function_code(func)
+
+ Get the code object associated with *func*. This is equivalent to
+ ``func.__code__`` on Python 2.6+ and ``func.func_code`` on Python 2.5.
+
+
+.. function:: get_function_defaults(func)
+
+ Get the defaults tuple associated with *func*. This is equivalent to
+ ``func.__defaults__`` on Python 2.6+ and ``func.func_defaults`` on Python
+ 2.5.
+
+
+.. function:: get_function_globals(func)
+
+ Get the globals of *func*. This is equivalent to ``func.__globals__`` on
+ Python 2.6+ and ``func.func_globals`` on Python 2.5.
+
+
+.. function:: next(it)
+ advance_iterator(it)
+
+ Get the next item of iterator *it*. :exc:`py3:StopIteration` is raised if
+ the iterator is exhausted. This is a replacement for calling ``it.next()``
+ in Python 2 and ``next(it)`` in Python 3. Python 2.6 and above have a
+ builtin ``next`` function, so six's version is only necessary for Python 2.5
+ compatibility.
+
+
+.. function:: callable(obj)
+
+ Check if *obj* can be called. Note ``callable`` has returned in Python 3.2,
+ so using six's version is only necessary when supporting Python 3.0 or 3.1.
+
+
+.. function:: iterkeys(dictionary, **kwargs)
+
+ Returns an iterator over *dictionary*\'s keys. This replaces
+ ``dictionary.iterkeys()`` on Python 2 and ``dictionary.keys()`` on
+ Python 3. *kwargs* are passed through to the underlying method.
+
+
+.. function:: itervalues(dictionary, **kwargs)
+
+ Returns an iterator over *dictionary*\'s values. This replaces
+ ``dictionary.itervalues()`` on Python 2 and ``dictionary.values()`` on
+ Python 3. *kwargs* are passed through to the underlying method.
+
+
+.. function:: iteritems(dictionary, **kwargs)
+
+ Returns an iterator over *dictionary*\'s items. This replaces
+ ``dictionary.iteritems()`` on Python 2 and ``dictionary.items()`` on
+ Python 3. *kwargs* are passed through to the underlying method.
+
+
+.. function:: iterlists(dictionary, **kwargs)
+
+ Calls ``dictionary.iterlists()`` on Python 2 and ``dictionary.lists()`` on
+ Python 3. No builtin Python mapping type has such a method; this method is
+ intended for use with multi-valued dictionaries like `Werkzeug's
+ <http://werkzeug.pocoo.org/docs/datastructures/#werkzeug.datastructures.MultiDict>`_.
+ *kwargs* are passed through to the underlying method.
+
+
+.. function:: viewkeys(dictionary)
+
+ Return a view over *dictionary*\'s keys. This replaces
+ :meth:`py2:dict.viewkeys` on Python 2.7 and :meth:`py3:dict.keys` on
+ Python 3.
+
+
+.. function:: viewvalues(dictionary)
+
+ Return a view over *dictionary*\'s values. This replaces
+ :meth:`py2:dict.viewvalues` on Python 2.7 and :meth:`py3:dict.values` on
+ Python 3.
+
+
+.. function:: viewitems(dictionary)
+
+ Return a view over *dictionary*\'s items. This replaces
+ :meth:`py2:dict.viewitems` on Python 2.7 and :meth:`py3:dict.items` on
+ Python 3.
+
+
+.. function:: create_bound_method(func, obj)
+
+ Return a method object wrapping *func* and bound to *obj*. On both Python 2
+ and 3, this will return a :func:`py3:types.MethodType` object. The reason
+ this wrapper exists is that on Python 2, the ``MethodType`` constructor
+ requires the *obj*'s class to be passed.
+
+
+.. function:: create_unbound_method(func, cls)
+
+ Return an unbound method object wrapping *func*. In Python 2, this will
+ return a :func:`py2:types.MethodType` object. In Python 3, unbound methods
+ do not exist and this wrapper will simply return *func*.
+
+
+.. class:: Iterator
+
+ A class for making portable iterators. The intention is that it be subclassed
+ and subclasses provide a ``__next__`` method. In Python 2, :class:`Iterator`
+ has one method: ``next``. It simply delegates to ``__next__``. An alternate
+ way to do this would be to simply alias ``next`` to ``__next__``. However,
+ this interacts badly with subclasses that override
+ ``__next__``. :class:`Iterator` is empty on Python 3. (In fact, it is just
+ aliased to :class:`py3:object`.)
+
+
+.. decorator:: wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES)
+
+ This is exactly the :func:`py3:functools.wraps` decorator, but it sets the
+ ``__wrapped__`` attribute on what it decorates as :func:`py3:functools.wraps`
+ does on Python versions after 3.2.
+
+
+Syntax compatibility
+>>>>>>>>>>>>>>>>>>>>
+
+These functions smooth over operations which have different syntaxes between
+Python 2 and 3.
+
+
+.. function:: exec_(code, globals=None, locals=None)
+
+ Execute *code* in the scope of *globals* and *locals*. *code* can be a
+ string or a code object. If *globals* or *locals* are not given, they will
+ default to the scope of the caller. If just *globals* is given, it will also
+ be used as *locals*.
+
+ .. note::
+
+ Python 3's :func:`py3:exec` doesn't take keyword arguments, so calling
+ :func:`exec` with them should be avoided.
+
+
+.. function:: print_(*args, *, file=sys.stdout, end="\\n", sep=" ", flush=False)
+
+ Print *args* into *file*. Each argument will be separated with *sep* and
+ *end* will be written to the file after the last argument is printed. If
+ *flush* is true, ``file.flush()`` will be called after all data is written.
+
+ .. note::
+
+ In Python 2, this function imitates Python 3's :func:`py3:print` by not
+ having softspace support. If you don't know what that is, you're probably
+ ok. :)
+
+
+.. function:: raise_from(exc_value, exc_value_from)
+
+ Raise an exception from a context. On Python 3, this is equivalent to
+ ``raise exc_value from exc_value_from``. On Python 2, which does not support
+ exception chaining, it is equivalent to ``raise exc_value``.
+
+
+.. function:: reraise(exc_type, exc_value, exc_traceback=None)
+
+ Reraise an exception, possibly with a different traceback. In the simple
+ case, ``reraise(*sys.exc_info())`` with an active exception (in an except
+ block) reraises the current exception with the last traceback. A different
+ traceback can be specified with the *exc_traceback* parameter. Note that
+ since the exception reraising is done within the :func:`reraise` function,
+ Python will attach the call frame of :func:`reraise` to whatever traceback is
+ raised.
+
+
+.. function:: with_metaclass(metaclass, *bases)
+
+ Create a new class with base classes *bases* and metaclass *metaclass*. This
+ is designed to be used in class declarations like this: ::
+
+ from six import with_metaclass
+
+ class Meta(type):
+ pass
+
+ class Base(object):
+ pass
+
+ class MyClass(with_metaclass(Meta, Base)):
+ pass
+
+ Another way to set a metaclass on a class is with the :func:`add_metaclass`
+ decorator.
+
+
+.. decorator:: add_metaclass(metaclass)
+
+ Class decorator that replaces a normally-constructed class with a
+ metaclass-constructed one. Example usage: ::
+
+ @add_metaclass(Meta)
+ class MyClass(object):
+ pass
+
+ That code produces a class equivalent to ::
+
+ class MyClass(object, metaclass=Meta):
+ pass
+
+ on Python 3 or ::
+
+ class MyClass(object):
+ __metaclass__ = Meta
+
+ on Python 2.
+
+ Note that class decorators require Python 2.6. However, the effect of the
+ decorator can be emulated on Python 2.5 like so::
+
+ class MyClass(object):
+ pass
+ MyClass = add_metaclass(Meta)(MyClass)
+
+
+Binary and text data
+>>>>>>>>>>>>>>>>>>>>
+
+Python 3 enforces the distinction between byte strings and text strings far more
+rigorously than Python 2 does; binary data cannot be automatically coerced to
+or from text data. six provides several functions to assist in classifying
+string data in all Python versions.
+
+
+.. function:: b(data)
+
+ A "fake" bytes literal. *data* should always be a normal string literal. In
+ Python 2, :func:`b` returns an 8-bit string. In Python 3, *data* is encoded
+ with the latin-1 encoding to bytes.
+
+
+ .. note::
+
+ Since all Python versions 2.6 and after support the ``b`` prefix,
+ code without 2.5 support doesn't need :func:`b`.
+
+
+.. function:: u(text)
+
+ A "fake" unicode literal. *text* should always be a normal string literal.
+ In Python 2, :func:`u` returns unicode, and in Python 3, a string. Also, in
+ Python 2, the string is decoded with the ``unicode-escape`` codec, which
+ allows unicode escapes to be used in it.
+
+
+ .. note::
+
+ In Python 3.3, the ``u`` prefix has been reintroduced. Code that only
+ supports Python 3 versions of 3.3 and higher thus does not need
+ :func:`u`.
+
+ .. note::
+
+ On Python 2, :func:`u` doesn't know what the encoding of the literal
+ is. Each byte is converted directly to the unicode codepoint of the same
+ value. Because of this, it's only safe to use :func:`u` with strings of
+ ASCII data.
+
+
+.. function:: unichr(c)
+
+ Return the (Unicode) string representing the codepoint *c*. This is
+ equivalent to :func:`py2:unichr` on Python 2 and :func:`py3:chr` on Python 3.
+
+
+.. function:: int2byte(i)
+
+ Converts *i* to a byte. *i* must be in ``range(0, 256)``. This is
+ equivalent to :func:`py2:chr` in Python 2 and ``bytes((i,))`` in Python 3.
+
+
+.. function:: byte2int(bs)
+
+ Converts the first byte of *bs* to an integer. This is equivalent to
+ ``ord(bs[0])`` on Python 2 and ``bs[0]`` on Python 3.
+
+
+.. function:: indexbytes(buf, i)
+
+ Return the byte at index *i* of *buf* as an integer. This is equivalent to
+ indexing a bytes object in Python 3.
+
+
+.. function:: iterbytes(buf)
+
+ Return an iterator over bytes in *buf* as integers. This is equivalent to
+ a bytes object iterator in Python 3.
+
+
+.. function:: ensure_binary(s, encoding='utf-8', errors='strict')
+
+ Coerce *s* to :data:`binary_type`. *encoding*, *errors* are the same as
+ :meth:`py3:str.encode`
+
+
+.. function:: ensure_str(s, encoding='utf-8', errors='strict')
+
+ Coerce *s* to ``str``. *encoding*, *errors* are the same as
+ :meth:`py3:str.encode`
+
+
+.. function:: ensure_text(s, encoding='utf-8', errors='strict')
+
+ Coerce *s* to :data:`text_type`. *encoding*, *errors* are the same as
+ :meth:`py3:str.encode`
+
+
+.. data:: StringIO
+
+ This is a fake file object for textual data. It's an alias for
+ :class:`py2:StringIO.StringIO` in Python 2 and :class:`py3:io.StringIO` in
+ Python 3.
+
+
+.. data:: BytesIO
+
+ This is a fake file object for binary data. In Python 2, it's an alias for
+ :class:`py2:StringIO.StringIO`, but in Python 3, it's an alias for
+ :class:`py3:io.BytesIO`.
+
+
+.. decorator:: python_2_unicode_compatible
+
+ A class decorator that takes a class defining a ``__str__`` method. On
+ Python 3, the decorator does nothing. On Python 2, it aliases the
+ ``__str__`` method to ``__unicode__`` and creates a new ``__str__`` method
+ that returns the result of ``__unicode__()`` encoded with UTF-8.
+
+
+unittest assertions
+>>>>>>>>>>>>>>>>>>>
+
+Six contains compatibility shims for unittest assertions that have been renamed.
+The parameters are the same as their aliases, but you must pass the test method
+as the first argument. For example::
+
+ import six
+ import unittest
+
+ class TestAssertCountEqual(unittest.TestCase):
+ def test(self):
+ six.assertCountEqual(self, (1, 2), [2, 1])
+
+Note these functions are only available on Python 2.7 or later.
+
+.. function:: assertCountEqual()
+
+ Alias for :meth:`~py3:unittest.TestCase.assertCountEqual` on Python 3 and
+ :meth:`~py2:unittest.TestCase.assertItemsEqual` on Python 2.
+
+
+.. function:: assertRaisesRegex()
+
+ Alias for :meth:`~py3:unittest.TestCase.assertRaisesRegex` on Python 3 and
+ :meth:`~py2:unittest.TestCase.assertRaisesRegexp` on Python 2.
+
+
+.. function:: assertRegex()
+
+ Alias for :meth:`~py3:unittest.TestCase.assertRegex` on Python 3 and
+ :meth:`~py2:unittest.TestCase.assertRegexpMatches` on Python 2.
+
+
+Renamed modules and attributes compatibility
+>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
+
+.. module:: six.moves
+ :synopsis: Renamed modules and attributes compatibility
+
+Python 3 reorganized the standard library and moved several functions to
+different modules. Six provides a consistent interface to them through the fake
+:mod:`six.moves` module. For example, to load the module for parsing HTML on
+Python 2 or 3, write::
+
+ from six.moves import html_parser
+
+Similarly, to get the function to reload modules, which was moved from the
+builtin module to the ``importlib`` module, use::
+
+ from six.moves import reload_module
+
+For the most part, :mod:`six.moves` aliases are the names of the modules in
+Python 3. When the new Python 3 name is a package, the components of the name
+are separated by underscores. For example, ``html.parser`` becomes
+``html_parser``. In some cases where several modules have been combined, the
+Python 2 name is retained. This is so the appropriate modules can be found when
+running on Python 2. For example, ``BaseHTTPServer`` which is in
+``http.server`` in Python 3 is aliased as ``BaseHTTPServer``.
+
+Some modules which had two implementations have been merged in Python 3. For
+example, ``cPickle`` no longer exists in Python 3; it was merged with
+``pickle``. In these cases, fetching the fast version will load the fast one on
+Python 2 and the merged module in Python 3.
+
+The :mod:`py2:urllib`, :mod:`py2:urllib2`, and :mod:`py2:urlparse` modules have
+been combined in the :mod:`py3:urllib` package in Python 3. The
+:mod:`six.moves.urllib` package is a version-independent location for this
+functionality; its structure mimics the structure of the Python 3
+:mod:`py3:urllib` package.
+
+.. note::
+
+ In order to make imports of the form::
+
+ from six.moves.cPickle import loads
+
+ work, six places special proxy objects in :data:`py3:sys.modules`. These
+ proxies lazily load the underlying module when an attribute is fetched. This
+ will fail if the underlying module is not available in the Python
+ interpreter. For example, ``sys.modules["six.moves.winreg"].LoadKey`` would
+ fail on any non-Windows platform. Unfortunately, some applications try to
+ load attributes on every module in :data:`py3:sys.modules`. six mitigates
+ this problem for some applications by pretending attributes on unimportable
+ modules do not exist. This hack does not work in every case, though. If you are
+ encountering problems with the lazy modules and don't use any from imports
+ directly from ``six.moves`` modules, you can workaround the issue by removing
+ the six proxy modules::
+
+ d = [name for name in sys.modules if name.startswith("six.moves.")]
+ for name in d:
+ del sys.modules[name]
+
+Supported renames:
+
++------------------------------+-------------------------------------+---------------------------------------+
+| Name | Python 2 name | Python 3 name |
++==============================+=====================================+=======================================+
+| ``builtins`` | :mod:`py2:__builtin__` | :mod:`py3:builtins` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``configparser`` | :mod:`py2:ConfigParser` | :mod:`py3:configparser` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``copyreg`` | :mod:`py2:copy_reg` | :mod:`py3:copyreg` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``cPickle`` | :mod:`py2:cPickle` | :mod:`py3:pickle` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``cStringIO`` | :func:`py2:cStringIO.StringIO` | :class:`py3:io.StringIO` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``collections_abc`` | :mod:`py2:collections` | :mod:`py3:collections.abc` (3.3+) |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``dbm_gnu`` | :mod:`py2:gdbm` | :mod:`py3:dbm.gnu` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``dbm_ndbm`` | :mod:`py2:dbm` | :mod:`py3:dbm.ndbm` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``_dummy_thread`` | :mod:`py2:dummy_thread` | :mod:`py3:_dummy_thread` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``email_mime_base`` | :mod:`py2:email.MIMEBase` | :mod:`py3:email.mime.base` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``email_mime_image`` | :mod:`py2:email.MIMEImage` | :mod:`py3:email.mime.image` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``email_mime_multipart`` | :mod:`py2:email.MIMEMultipart` | :mod:`py3:email.mime.multipart` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``email_mime_nonmultipart`` | :mod:`py2:email.MIMENonMultipart` | :mod:`py3:email.mime.nonmultipart` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``email_mime_text`` | :mod:`py2:email.MIMEText` | :mod:`py3:email.mime.text` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``filter`` | :func:`py2:itertools.ifilter` | :func:`py3:filter` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``filterfalse`` | :func:`py2:itertools.ifilterfalse` | :func:`py3:itertools.filterfalse` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``getcwd`` | :func:`py2:os.getcwdu` | :func:`py3:os.getcwd` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``getcwdb`` | :func:`py2:os.getcwd` | :func:`py3:os.getcwdb` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``getoutput`` | :func:`py2:commands.getoutput` | :func:`py3:subprocess.getoutput` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``http_cookiejar`` | :mod:`py2:cookielib` | :mod:`py3:http.cookiejar` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``http_cookies`` | :mod:`py2:Cookie` | :mod:`py3:http.cookies` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``html_entities`` | :mod:`py2:htmlentitydefs` | :mod:`py3:html.entities` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``html_parser`` | :mod:`py2:HTMLParser` | :mod:`py3:html.parser` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``http_client`` | :mod:`py2:httplib` | :mod:`py3:http.client` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``BaseHTTPServer`` | :mod:`py2:BaseHTTPServer` | :mod:`py3:http.server` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``CGIHTTPServer`` | :mod:`py2:CGIHTTPServer` | :mod:`py3:http.server` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``SimpleHTTPServer`` | :mod:`py2:SimpleHTTPServer` | :mod:`py3:http.server` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``input`` | :func:`py2:raw_input` | :func:`py3:input` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``intern`` | :func:`py2:intern` | :func:`py3:sys.intern` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``map`` | :func:`py2:itertools.imap` | :func:`py3:map` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``queue`` | :mod:`py2:Queue` | :mod:`py3:queue` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``range`` | :func:`py2:xrange` | :func:`py3:range` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``reduce`` | :func:`py2:reduce` | :func:`py3:functools.reduce` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``reload_module`` | :func:`py2:reload` | :func:`py3:imp.reload`, |
+| | | :func:`py3:importlib.reload` |
+| | | on Python 3.4+ |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``reprlib`` | :mod:`py2:repr` | :mod:`py3:reprlib` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``shlex_quote`` | :mod:`py2:pipes.quote` | :mod:`py3:shlex.quote` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``socketserver`` | :mod:`py2:SocketServer` | :mod:`py3:socketserver` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``_thread`` | :mod:`py2:thread` | :mod:`py3:_thread` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter`` | :mod:`py2:Tkinter` | :mod:`py3:tkinter` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_dialog`` | :mod:`py2:Dialog` | :mod:`py3:tkinter.dialog` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_filedialog`` | :mod:`py2:FileDialog` | :mod:`py3:tkinter.FileDialog` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_scrolledtext`` | :mod:`py2:ScrolledText` | :mod:`py3:tkinter.scrolledtext` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_simpledialog`` | :mod:`py2:SimpleDialog` | :mod:`py3:tkinter.simpledialog` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_ttk`` | :mod:`py2:ttk` | :mod:`py3:tkinter.ttk` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_tix`` | :mod:`py2:Tix` | :mod:`py3:tkinter.tix` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_constants`` | :mod:`py2:Tkconstants` | :mod:`py3:tkinter.constants` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_dnd`` | :mod:`py2:Tkdnd` | :mod:`py3:tkinter.dnd` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_colorchooser`` | :mod:`py2:tkColorChooser` | :mod:`py3:tkinter.colorchooser` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_commondialog`` | :mod:`py2:tkCommonDialog` | :mod:`py3:tkinter.commondialog` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_tkfiledialog`` | :mod:`py2:tkFileDialog` | :mod:`py3:tkinter.filedialog` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_font`` | :mod:`py2:tkFont` | :mod:`py3:tkinter.font` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_messagebox`` | :mod:`py2:tkMessageBox` | :mod:`py3:tkinter.messagebox` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_tksimpledialog`` | :mod:`py2:tkSimpleDialog` | :mod:`py3:tkinter.simpledialog` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``urllib.parse`` | See :mod:`six.moves.urllib.parse` | :mod:`py3:urllib.parse` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``urllib.error`` | See :mod:`six.moves.urllib.error` | :mod:`py3:urllib.error` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``urllib.request`` | See :mod:`six.moves.urllib.request` | :mod:`py3:urllib.request` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``urllib.response`` | See :mod:`six.moves.urllib.response`| :mod:`py3:urllib.response` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``urllib.robotparser`` | :mod:`py2:robotparser` | :mod:`py3:urllib.robotparser` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``urllib_robotparser`` | :mod:`py2:robotparser` | :mod:`py3:urllib.robotparser` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``UserDict`` | :class:`py2:UserDict.UserDict` | :class:`py3:collections.UserDict` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``UserList`` | :class:`py2:UserList.UserList` | :class:`py3:collections.UserList` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``UserString`` | :class:`py2:UserString.UserString` | :class:`py3:collections.UserString` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``winreg`` | :mod:`py2:_winreg` | :mod:`py3:winreg` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``xmlrpc_client`` | :mod:`py2:xmlrpclib` | :mod:`py3:xmlrpc.client` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``xmlrpc_server`` | :mod:`py2:SimpleXMLRPCServer` | :mod:`py3:xmlrpc.server` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``xrange`` | :func:`py2:xrange` | :func:`py3:range` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``zip`` | :func:`py2:itertools.izip` | :func:`py3:zip` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``zip_longest`` | :func:`py2:itertools.izip_longest` | :func:`py3:itertools.zip_longest` |
++------------------------------+-------------------------------------+---------------------------------------+
+
+urllib parse
+<<<<<<<<<<<<
+
+.. module:: six.moves.urllib.parse
+ :synopsis: Stuff from :mod:`py2:urlparse` and :mod:`py2:urllib` in Python 2 and :mod:`py3:urllib.parse` in Python 3
+
+Contains functions from Python 3's :mod:`py3:urllib.parse` and Python 2's:
+
+:mod:`py2:urlparse`:
+
+* :func:`py2:urlparse.ParseResult`
+* :func:`py2:urlparse.SplitResult`
+* :func:`py2:urlparse.urlparse`
+* :func:`py2:urlparse.urlunparse`
+* :func:`py2:urlparse.parse_qs`
+* :func:`py2:urlparse.parse_qsl`
+* :func:`py2:urlparse.urljoin`
+* :func:`py2:urlparse.urldefrag`
+* :func:`py2:urlparse.urlsplit`
+* :func:`py2:urlparse.urlunsplit`
+* :func:`py2:urlparse.splitquery`
+* :func:`py2:urlparse.uses_fragment`
+* :func:`py2:urlparse.uses_netloc`
+* :func:`py2:urlparse.uses_params`
+* :func:`py2:urlparse.uses_query`
+* :func:`py2:urlparse.uses_relative`
+
+and :mod:`py2:urllib`:
+
+* :func:`py2:urllib.quote`
+* :func:`py2:urllib.quote_plus`
+* :func:`py2:urllib.splittag`
+* :func:`py2:urllib.splituser`
+* :func:`py2:urllib.splitvalue`
+* :func:`py2:urllib.unquote` (also exposed as :func:`py3:urllib.parse.unquote_to_bytes`)
+* :func:`py2:urllib.unquote_plus`
+* :func:`py2:urllib.urlencode`
+
+
+urllib error
+<<<<<<<<<<<<
+
+.. module:: six.moves.urllib.error
+ :synopsis: Stuff from :mod:`py2:urllib` and :mod:`py2:urllib2` in Python 2 and :mod:`py3:urllib.error` in Python 3
+
+Contains exceptions from Python 3's :mod:`py3:urllib.error` and Python 2's:
+
+:mod:`py2:urllib`:
+
+* :exc:`py2:urllib.ContentTooShortError`
+
+and :mod:`py2:urllib2`:
+
+* :exc:`py2:urllib2.URLError`
+* :exc:`py2:urllib2.HTTPError`
+
+
+urllib request
+<<<<<<<<<<<<<<
+
+.. module:: six.moves.urllib.request
+ :synopsis: Stuff from :mod:`py2:urllib` and :mod:`py2:urllib2` in Python 2 and :mod:`py3:urllib.request` in Python 3
+
+Contains items from Python 3's :mod:`py3:urllib.request` and Python 2's:
+
+:mod:`py2:urllib`:
+
+* :func:`py2:urllib.pathname2url`
+* :func:`py2:urllib.url2pathname`
+* :func:`py2:urllib.getproxies`
+* :func:`py2:urllib.urlretrieve`
+* :func:`py2:urllib.urlcleanup`
+* :class:`py2:urllib.URLopener`
+* :class:`py2:urllib.FancyURLopener`
+* :func:`py2:urllib.proxy_bypass`
+
+and :mod:`py2:urllib2`:
+
+* :func:`py2:urllib2.urlopen`
+* :func:`py2:urllib2.install_opener`
+* :func:`py2:urllib2.build_opener`
+* :func:`py2:urllib2.parse_http_list`
+* :func:`py2:urllib2.parse_keqv_list`
+* :class:`py2:urllib2.Request`
+* :class:`py2:urllib2.OpenerDirector`
+* :class:`py2:urllib2.HTTPDefaultErrorHandler`
+* :class:`py2:urllib2.HTTPRedirectHandler`
+* :class:`py2:urllib2.HTTPCookieProcessor`
+* :class:`py2:urllib2.ProxyHandler`
+* :class:`py2:urllib2.BaseHandler`
+* :class:`py2:urllib2.HTTPPasswordMgr`
+* :class:`py2:urllib2.HTTPPasswordMgrWithDefaultRealm`
+* :class:`py2:urllib2.AbstractBasicAuthHandler`
+* :class:`py2:urllib2.HTTPBasicAuthHandler`
+* :class:`py2:urllib2.ProxyBasicAuthHandler`
+* :class:`py2:urllib2.AbstractDigestAuthHandler`
+* :class:`py2:urllib2.HTTPDigestAuthHandler`
+* :class:`py2:urllib2.ProxyDigestAuthHandler`
+* :class:`py2:urllib2.HTTPHandler`
+* :class:`py2:urllib2.HTTPSHandler`
+* :class:`py2:urllib2.FileHandler`
+* :class:`py2:urllib2.FTPHandler`
+* :class:`py2:urllib2.CacheFTPHandler`
+* :class:`py2:urllib2.UnknownHandler`
+* :class:`py2:urllib2.HTTPErrorProcessor`
+
+
+urllib response
+<<<<<<<<<<<<<<<
+
+.. module:: six.moves.urllib.response
+ :synopsis: Stuff from :mod:`py2:urllib` in Python 2 and :mod:`py3:urllib.response` in Python 3
+
+Contains classes from Python 3's :mod:`py3:urllib.response` and Python 2's:
+
+:mod:`py2:urllib`:
+
+* :class:`py2:urllib.addbase`
+* :class:`py2:urllib.addclosehook`
+* :class:`py2:urllib.addinfo`
+* :class:`py2:urllib.addinfourl`
+
+
+Advanced - Customizing renames
+<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
+
+.. currentmodule:: six
+
+It is possible to add additional names to the :mod:`six.moves` namespace.
+
+
+.. function:: add_move(item)
+
+ Add *item* to the :mod:`six.moves` mapping. *item* should be a
+ :class:`MovedAttribute` or :class:`MovedModule` instance.
+
+
+.. function:: remove_move(name)
+
+ Remove the :mod:`six.moves` mapping called *name*. *name* should be a
+ string.
+
+
+Instances of the following classes can be passed to :func:`add_move`. Neither
+have any public members.
+
+
+.. class:: MovedModule(name, old_mod, new_mod)
+
+ Create a mapping for :mod:`six.moves` called *name* that references different
+ modules in Python 2 and 3. *old_mod* is the name of the Python 2 module.
+ *new_mod* is the name of the Python 3 module.
+
+
+.. class:: MovedAttribute(name, old_mod, new_mod, old_attr=None, new_attr=None)
+
+ Create a mapping for :mod:`six.moves` called *name* that references different
+ attributes in Python 2 and 3. *old_mod* is the name of the Python 2 module.
+ *new_mod* is the name of the Python 3 module. If *new_attr* is not given, it
+ defaults to *old_attr*. If neither is given, they both default to *name*.
diff --git a/third_party/python/six/setup.cfg b/third_party/python/six/setup.cfg
new file mode 100644
index 0000000000..fb1f5367a4
--- /dev/null
+++ b/third_party/python/six/setup.cfg
@@ -0,0 +1,24 @@
+[bdist_wheel]
+universal = 1
+
+[flake8]
+max-line-length = 100
+ignore = F821
+
+[metadata]
+license_file = LICENSE
+
+[tool:pytest]
+minversion = 2.2.0
+pep8ignore =
+ documentation/*.py ALL
+ test_six.py ALL
+flakes-ignore =
+ documentation/*.py ALL
+ test_six.py ALL
+ six.py UndefinedName
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/six/setup.py b/third_party/python/six/setup.py
new file mode 100644
index 0000000000..97c685b5a5
--- /dev/null
+++ b/third_party/python/six/setup.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2010-2019 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+from __future__ import with_statement
+
+# Six is a dependency of setuptools, so using setuptools creates a
+# circular dependency when building a Python stack from source. We
+# therefore allow falling back to distutils to install six.
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+import six
+
+six_classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 3",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: MIT License",
+ "Topic :: Software Development :: Libraries",
+ "Topic :: Utilities",
+]
+
+with open("README.rst", "r") as fp:
+ six_long_description = fp.read()
+
+setup(name="six",
+ version=six.__version__,
+ author="Benjamin Peterson",
+ author_email="benjamin@python.org",
+ url="https://github.com/benjaminp/six",
+ tests_require=["pytest"],
+ py_modules=["six"],
+ description="Python 2 and 3 compatibility utilities",
+ long_description=six_long_description,
+ license="MIT",
+ classifiers=six_classifiers,
+ python_requires=">=2.6, !=3.0.*, !=3.1.*",
+ )
diff --git a/third_party/python/six/six.py b/third_party/python/six/six.py
new file mode 100644
index 0000000000..357e624abc
--- /dev/null
+++ b/third_party/python/six/six.py
@@ -0,0 +1,963 @@
+# Copyright (c) 2010-2019 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+from __future__ import absolute_import
+
+import functools
+import itertools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.13.0"
+
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ if sys.platform.startswith("java"):
+ # Jython always uses 32 bits.
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+
+ def __len__(self):
+ return 1 << 31
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
+ return result
+
+
+class MovedModule(_LazyDescr):
+
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+ def __getattr__(self, attr):
+ _module = self._resolve()
+ value = getattr(_module, attr)
+ setattr(self, attr, value)
+ return value
+
+
+class _LazyModule(types.ModuleType):
+
+ def __init__(self, name):
+ super(_LazyModule, self).__init__(name)
+ self.__doc__ = self.__class__.__doc__
+
+ def __dir__(self):
+ attrs = ["__doc__", "__name__"]
+ attrs += [attr.name for attr in self._moved_attributes]
+ return attrs
+
+ # Subclasses should override this
+ _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+
+ """
+ A meta path importer to import six.moves and its submodules.
+
+ This class implements a PEP302 finder and loader. It should be compatible
+ with Python 2.5 and all existing versions of Python3
+ """
+
+ def __init__(self, six_module_name):
+ self.name = six_module_name
+ self.known_modules = {}
+
+ def _add_module(self, mod, *fullnames):
+ for fullname in fullnames:
+ self.known_modules[self.name + "." + fullname] = mod
+
+ def _get_module(self, fullname):
+ return self.known_modules[self.name + "." + fullname]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.known_modules:
+ return self
+ return None
+
+ def __get_module(self, fullname):
+ try:
+ return self.known_modules[fullname]
+ except KeyError:
+ raise ImportError("This loader does not know module " + fullname)
+
+ def load_module(self, fullname):
+ try:
+ # in case of a reload
+ return sys.modules[fullname]
+ except KeyError:
+ pass
+ mod = self.__get_module(fullname)
+ if isinstance(mod, MovedModule):
+ mod = mod._resolve()
+ else:
+ mod.__loader__ = self
+ sys.modules[fullname] = mod
+ return mod
+
+ def is_package(self, fullname):
+ """
+ Return true, if the named module is a package.
+
+ We need this method to get correct spec objects with
+ Python 3.4 (see PEP451)
+ """
+ return hasattr(self.__get_module(fullname), "__path__")
+
+ def get_code(self, fullname):
+ """Return None
+
+ Required, if is_package is implemented"""
+ self.__get_module(fullname) # eventually raises ImportError
+ return None
+ get_source = get_code # same as get_code
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+
+ """Lazy loading of moved objects"""
+ __path__ = [] # mark as package
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
+ MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("intern", "__builtin__", "sys"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+ MovedAttribute("getoutput", "commands", "subprocess"),
+ MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("UserDict", "UserDict", "collections"),
+ MovedAttribute("UserList", "UserList", "collections"),
+ MovedAttribute("UserString", "UserString", "collections"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+ MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
+ MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
+ MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
+ MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("_thread", "thread", "_thread"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser",
+ "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog",
+ "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+ MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+]
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
+
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+ if isinstance(attr, MovedModule):
+ _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+ MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+ MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+ MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+ MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+ MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("quote", "urllib", "urllib.parse"),
+ MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
+ MovedAttribute("urlencode", "urllib", "urllib.parse"),
+ MovedAttribute("splitquery", "urllib", "urllib.parse"),
+ MovedAttribute("splittag", "urllib", "urllib.parse"),
+ MovedAttribute("splituser", "urllib", "urllib.parse"),
+ MovedAttribute("splitvalue", "urllib", "urllib.parse"),
+ MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+ setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+ "moves.urllib_parse", "moves.urllib.parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+ MovedAttribute("URLError", "urllib2", "urllib.error"),
+ MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+ MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+ setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+ "moves.urllib_error", "moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+ MovedAttribute("urlopen", "urllib2", "urllib.request"),
+ MovedAttribute("install_opener", "urllib2", "urllib.request"),
+ MovedAttribute("build_opener", "urllib2", "urllib.request"),
+ MovedAttribute("pathname2url", "urllib", "urllib.request"),
+ MovedAttribute("url2pathname", "urllib", "urllib.request"),
+ MovedAttribute("getproxies", "urllib", "urllib.request"),
+ MovedAttribute("Request", "urllib2", "urllib.request"),
+ MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+ MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+ MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+ MovedAttribute("URLopener", "urllib", "urllib.request"),
+ MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+ MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+ MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
+ MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+ setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+ "moves.urllib_request", "moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+ MovedAttribute("addbase", "urllib", "urllib.response"),
+ MovedAttribute("addclosehook", "urllib", "urllib.response"),
+ MovedAttribute("addinfo", "urllib", "urllib.response"),
+ MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+ setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+ "moves.urllib_response", "moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+ setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+ "moves.urllib_robotparser", "moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+
+ """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+ __path__ = [] # mark as package
+ parse = _importer._get_module("moves.urllib_parse")
+ error = _importer._get_module("moves.urllib_error")
+ request = _importer._get_module("moves.urllib_request")
+ response = _importer._get_module("moves.urllib_response")
+ robotparser = _importer._get_module("moves.urllib_robotparser")
+
+ def __dir__(self):
+ return ['parse', 'error', 'request', 'response', 'robotparser']
+
+_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
+ "moves.urllib")
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_closure = "__closure__"
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+ _func_globals = "__globals__"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_closure = "func_closure"
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+ _func_globals = "func_globals"
+
+
+try:
+ advance_iterator = next
+except NameError:
+ def advance_iterator(it):
+ return it.next()
+next = advance_iterator
+
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+ def get_unbound_function(unbound):
+ return unbound
+
+ create_bound_method = types.MethodType
+
+ def create_unbound_method(func, cls):
+ return func
+
+ Iterator = object
+else:
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+ def create_bound_method(func, obj):
+ return types.MethodType(func, obj, obj.__class__)
+
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
+ class Iterator(object):
+
+ def next(self):
+ return type(self).__next__(self)
+
+ callable = callable
+_add_doc(get_unbound_function,
+ """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+ def iterkeys(d, **kw):
+ return iter(d.keys(**kw))
+
+ def itervalues(d, **kw):
+ return iter(d.values(**kw))
+
+ def iteritems(d, **kw):
+ return iter(d.items(**kw))
+
+ def iterlists(d, **kw):
+ return iter(d.lists(**kw))
+
+ viewkeys = operator.methodcaller("keys")
+
+ viewvalues = operator.methodcaller("values")
+
+ viewitems = operator.methodcaller("items")
+else:
+ def iterkeys(d, **kw):
+ return d.iterkeys(**kw)
+
+ def itervalues(d, **kw):
+ return d.itervalues(**kw)
+
+ def iteritems(d, **kw):
+ return d.iteritems(**kw)
+
+ def iterlists(d, **kw):
+ return d.iterlists(**kw)
+
+ viewkeys = operator.methodcaller("viewkeys")
+
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems,
+ "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(iterlists,
+ "Return an iterator over the (key, [values]) pairs of a dictionary.")
+
+
+if PY3:
+ def b(s):
+ return s.encode("latin-1")
+
+ def u(s):
+ return s
+ unichr = chr
+ import struct
+ int2byte = struct.Struct(">B").pack
+ del struct
+ byte2int = operator.itemgetter(0)
+ indexbytes = operator.getitem
+ iterbytes = iter
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+ del io
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
+else:
+ def b(s):
+ return s
+ # Workaround for standalone backslash
+
+ def u(s):
+ return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+ unichr = unichr
+ int2byte = chr
+
+ def byte2int(bs):
+ return ord(bs[0])
+
+ def indexbytes(buf, i):
+ return ord(buf[i])
+ iterbytes = functools.partial(itertools.imap, ord)
+ import StringIO
+ StringIO = BytesIO = StringIO.StringIO
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
+if PY3:
+ exec_ = getattr(moves.builtins, "exec")
+
+ def reraise(tp, value, tb=None):
+ try:
+ if value is None:
+ value = tp()
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+ finally:
+ value = None
+ tb = None
+
+else:
+ def exec_(_code_, _globs_=None, _locs_=None):
+ """Execute code in a namespace."""
+ if _globs_ is None:
+ frame = sys._getframe(1)
+ _globs_ = frame.f_globals
+ if _locs_ is None:
+ _locs_ = frame.f_locals
+ del frame
+ elif _locs_ is None:
+ _locs_ = _globs_
+ exec("""exec _code_ in _globs_, _locs_""")
+
+ exec_("""def reraise(tp, value, tb=None):
+ try:
+ raise tp, value, tb
+ finally:
+ tb = None
+""")
+
+
+if sys.version_info[:2] == (3, 2):
+ exec_("""def raise_from(value, from_value):
+ try:
+ if from_value is None:
+ raise value
+ raise value from from_value
+ finally:
+ value = None
+""")
+elif sys.version_info[:2] > (3, 2):
+ exec_("""def raise_from(value, from_value):
+ try:
+ raise value from from_value
+ finally:
+ value = None
+""")
+else:
+ def raise_from(value, from_value):
+ raise value
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+ def print_(*args, **kwargs):
+ """The new-style print function for Python 2.4 and 2.5."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ # If the file has an encoding, encode unicode with it.
+ if (isinstance(fp, file) and
+ isinstance(data, unicode) and
+ fp.encoding is not None):
+ errors = getattr(fp, "errors", None)
+ if errors is None:
+ errors = "strict"
+ data = data.encode(fp.encoding, errors)
+ fp.write(data)
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+ def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ def wrapper(f):
+ f = functools.wraps(wrapped, assigned, updated)(f)
+ f.__wrapped__ = wrapped
+ return f
+ return wrapper
+else:
+ wraps = functools.wraps
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(type):
+
+ def __new__(cls, name, this_bases, d):
+ if sys.version_info[:2] >= (3, 7):
+ # This version introduced PEP 560 that requires a bit
+ # of extra care (we mimic what is done by __build_class__).
+ resolved_bases = types.resolve_bases(bases)
+ if resolved_bases is not bases:
+ d['__orig_bases__'] = bases
+ else:
+ resolved_bases = bases
+ return meta(name, resolved_bases, d)
+
+ @classmethod
+ def __prepare__(cls, name, this_bases):
+ return meta.__prepare__(name, bases)
+ return type.__new__(metaclass, 'temporary_class', (), {})
+
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ slots = orig_vars.get('__slots__')
+ if slots is not None:
+ if isinstance(slots, str):
+ slots = [slots]
+ for slots_var in slots:
+ orig_vars.pop(slots_var)
+ orig_vars.pop('__dict__', None)
+ orig_vars.pop('__weakref__', None)
+ if hasattr(cls, '__qualname__'):
+ orig_vars['__qualname__'] = cls.__qualname__
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+ return wrapper
+
+
+def ensure_binary(s, encoding='utf-8', errors='strict'):
+ """Coerce **s** to six.binary_type.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> encoded to `bytes`
+ - `bytes` -> `bytes`
+ """
+ if isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ elif isinstance(s, binary_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def ensure_str(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to `str`.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if not isinstance(s, (text_type, binary_type)):
+ raise TypeError("not expecting type '%s'" % type(s))
+ if PY2 and isinstance(s, text_type):
+ s = s.encode(encoding, errors)
+ elif PY3 and isinstance(s, binary_type):
+ s = s.decode(encoding, errors)
+ return s
+
+
+def ensure_text(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to six.text_type.
+
+ For Python 2:
+ - `unicode` -> `unicode`
+ - `str` -> `unicode`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif isinstance(s, text_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+
+def python_2_unicode_compatible(klass):
+ """
+ A decorator that defines __unicode__ and __str__ methods under Python 2.
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if '__str__' not in klass.__dict__:
+ raise ValueError("@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." %
+ klass.__name__)
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
+ return klass
+
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = [] # required for PEP 302 and PEP 451
+__package__ = __name__ # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+ __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+ for i, importer in enumerate(sys.meta_path):
+ # Here's some real nastiness: Another "instance" of the six module might
+ # be floating around. Therefore, we can't use isinstance() to check for
+ # the six meta path importer, since the other six instance will have
+ # inserted an importer with different class.
+ if (type(importer).__name__ == "_SixMetaPathImporter" and
+ importer.name == __name__):
+ del sys.meta_path[i]
+ break
+ del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/third_party/python/six/test_six.py b/third_party/python/six/test_six.py
new file mode 100644
index 0000000000..0b72067416
--- /dev/null
+++ b/third_party/python/six/test_six.py
@@ -0,0 +1,1060 @@
+# Copyright (c) 2010-2019 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+import operator
+import sys
+import types
+import unittest
+import abc
+
+import pytest
+
+import six
+
+
+def test_add_doc():
+ def f():
+ """Icky doc"""
+ pass
+ six._add_doc(f, """New doc""")
+ assert f.__doc__ == "New doc"
+
+
+def test_import_module():
+ from logging import handlers
+ m = six._import_module("logging.handlers")
+ assert m is handlers
+
+
+def test_integer_types():
+ assert isinstance(1, six.integer_types)
+ assert isinstance(-1, six.integer_types)
+ assert isinstance(six.MAXSIZE + 23, six.integer_types)
+ assert not isinstance(.1, six.integer_types)
+
+
+def test_string_types():
+ assert isinstance("hi", six.string_types)
+ assert isinstance(six.u("hi"), six.string_types)
+ assert issubclass(six.text_type, six.string_types)
+
+
+def test_class_types():
+ class X:
+ pass
+ class Y(object):
+ pass
+ assert isinstance(X, six.class_types)
+ assert isinstance(Y, six.class_types)
+ assert not isinstance(X(), six.class_types)
+
+
+def test_text_type():
+ assert type(six.u("hi")) is six.text_type
+
+
+def test_binary_type():
+ assert type(six.b("hi")) is six.binary_type
+
+
+def test_MAXSIZE():
+ try:
+ # This shouldn't raise an overflow error.
+ six.MAXSIZE.__index__()
+ except AttributeError:
+ # Before Python 2.6.
+ pass
+ pytest.raises(
+ (ValueError, OverflowError),
+ operator.mul, [None], six.MAXSIZE + 1)
+
+
+def test_lazy():
+ if six.PY3:
+ html_name = "html.parser"
+ else:
+ html_name = "HTMLParser"
+ assert html_name not in sys.modules
+ mod = six.moves.html_parser
+ assert sys.modules[html_name] is mod
+ assert "htmlparser" not in six._MovedItems.__dict__
+
+
+try:
+ import _tkinter
+except ImportError:
+ have_tkinter = False
+else:
+ have_tkinter = True
+
+have_gdbm = True
+try:
+ import gdbm
+except ImportError:
+ try:
+ import dbm.gnu
+ except ImportError:
+ have_gdbm = False
+
+@pytest.mark.parametrize("item_name",
+ [item.name for item in six._moved_attributes])
+def test_move_items(item_name):
+ """Ensure that everything loads correctly."""
+ try:
+ item = getattr(six.moves, item_name)
+ if isinstance(item, types.ModuleType):
+ __import__("six.moves." + item_name)
+ except AttributeError:
+ if item_name == "zip_longest" and sys.version_info < (2, 6):
+ pytest.skip("zip_longest only available on 2.6+")
+ except ImportError:
+ if item_name == "winreg" and not sys.platform.startswith("win"):
+ pytest.skip("Windows only module")
+ if item_name.startswith("tkinter"):
+ if not have_tkinter:
+ pytest.skip("requires tkinter")
+ if item_name == "tkinter_ttk" and sys.version_info[:2] <= (2, 6):
+ pytest.skip("ttk only available on 2.7+")
+ if item_name.startswith("dbm_gnu") and not have_gdbm:
+ pytest.skip("requires gdbm")
+ raise
+ if sys.version_info[:2] >= (2, 6):
+ assert item_name in dir(six.moves)
+
+
+@pytest.mark.parametrize("item_name",
+ [item.name for item in six._urllib_parse_moved_attributes])
+def test_move_items_urllib_parse(item_name):
+ """Ensure that everything loads correctly."""
+ if item_name == "ParseResult" and sys.version_info < (2, 5):
+ pytest.skip("ParseResult is only found on 2.5+")
+ if item_name in ("parse_qs", "parse_qsl") and sys.version_info < (2, 6):
+ pytest.skip("parse_qs[l] is new in 2.6")
+ if sys.version_info[:2] >= (2, 6):
+ assert item_name in dir(six.moves.urllib.parse)
+ getattr(six.moves.urllib.parse, item_name)
+
+
+@pytest.mark.parametrize("item_name",
+ [item.name for item in six._urllib_error_moved_attributes])
+def test_move_items_urllib_error(item_name):
+ """Ensure that everything loads correctly."""
+ if sys.version_info[:2] >= (2, 6):
+ assert item_name in dir(six.moves.urllib.error)
+ getattr(six.moves.urllib.error, item_name)
+
+
+@pytest.mark.parametrize("item_name",
+ [item.name for item in six._urllib_request_moved_attributes])
+def test_move_items_urllib_request(item_name):
+ """Ensure that everything loads correctly."""
+ if sys.version_info[:2] >= (2, 6):
+ assert item_name in dir(six.moves.urllib.request)
+ getattr(six.moves.urllib.request, item_name)
+
+
+@pytest.mark.parametrize("item_name",
+ [item.name for item in six._urllib_response_moved_attributes])
+def test_move_items_urllib_response(item_name):
+ """Ensure that everything loads correctly."""
+ if sys.version_info[:2] >= (2, 6):
+ assert item_name in dir(six.moves.urllib.response)
+ getattr(six.moves.urllib.response, item_name)
+
+
+@pytest.mark.parametrize("item_name",
+ [item.name for item in six._urllib_robotparser_moved_attributes])
+def test_move_items_urllib_robotparser(item_name):
+ """Ensure that everything loads correctly."""
+ if sys.version_info[:2] >= (2, 6):
+ assert item_name in dir(six.moves.urllib.robotparser)
+ getattr(six.moves.urllib.robotparser, item_name)
+
+
+def test_import_moves_error_1():
+ from six.moves.urllib.parse import urljoin
+ from six import moves
+ # In 1.4.1: AttributeError: 'Module_six_moves_urllib_parse' object has no attribute 'urljoin'
+ assert moves.urllib.parse.urljoin
+
+
+def test_import_moves_error_2():
+ from six import moves
+ assert moves.urllib.parse.urljoin
+ # In 1.4.1: ImportError: cannot import name urljoin
+ from six.moves.urllib.parse import urljoin
+
+
+def test_import_moves_error_3():
+ from six.moves.urllib.parse import urljoin
+ # In 1.4.1: ImportError: cannot import name urljoin
+ from six.moves.urllib_parse import urljoin
+
+
+def test_from_imports():
+ from six.moves.queue import Queue
+ assert isinstance(Queue, six.class_types)
+ from six.moves.configparser import ConfigParser
+ assert isinstance(ConfigParser, six.class_types)
+
+
+def test_filter():
+ from six.moves import filter
+ f = filter(lambda x: x % 2, range(10))
+ assert six.advance_iterator(f) == 1
+
+
+def test_filter_false():
+ from six.moves import filterfalse
+ f = filterfalse(lambda x: x % 3, range(10))
+ assert six.advance_iterator(f) == 0
+ assert six.advance_iterator(f) == 3
+ assert six.advance_iterator(f) == 6
+
+def test_map():
+ from six.moves import map
+ assert six.advance_iterator(map(lambda x: x + 1, range(2))) == 1
+
+
+def test_getoutput():
+ from six.moves import getoutput
+ output = getoutput('echo "foo"')
+ assert output == 'foo'
+
+
+def test_zip():
+ from six.moves import zip
+ assert six.advance_iterator(zip(range(2), range(2))) == (0, 0)
+
+
+@pytest.mark.skipif("sys.version_info < (2, 6)")
+def test_zip_longest():
+ from six.moves import zip_longest
+ it = zip_longest(range(2), range(1))
+
+ assert six.advance_iterator(it) == (0, 0)
+ assert six.advance_iterator(it) == (1, None)
+
+
+class TestCustomizedMoves:
+
+ def teardown_method(self, meth):
+ try:
+ del six._MovedItems.spam
+ except AttributeError:
+ pass
+ try:
+ del six.moves.__dict__["spam"]
+ except KeyError:
+ pass
+
+
+ def test_moved_attribute(self):
+ attr = six.MovedAttribute("spam", "foo", "bar")
+ if six.PY3:
+ assert attr.mod == "bar"
+ else:
+ assert attr.mod == "foo"
+ assert attr.attr == "spam"
+ attr = six.MovedAttribute("spam", "foo", "bar", "lemma")
+ assert attr.attr == "lemma"
+ attr = six.MovedAttribute("spam", "foo", "bar", "lemma", "theorm")
+ if six.PY3:
+ assert attr.attr == "theorm"
+ else:
+ assert attr.attr == "lemma"
+
+
+ def test_moved_module(self):
+ attr = six.MovedModule("spam", "foo")
+ if six.PY3:
+ assert attr.mod == "spam"
+ else:
+ assert attr.mod == "foo"
+ attr = six.MovedModule("spam", "foo", "bar")
+ if six.PY3:
+ assert attr.mod == "bar"
+ else:
+ assert attr.mod == "foo"
+
+
+ def test_custom_move_module(self):
+ attr = six.MovedModule("spam", "six", "six")
+ six.add_move(attr)
+ six.remove_move("spam")
+ assert not hasattr(six.moves, "spam")
+ attr = six.MovedModule("spam", "six", "six")
+ six.add_move(attr)
+ from six.moves import spam
+ assert spam is six
+ six.remove_move("spam")
+ assert not hasattr(six.moves, "spam")
+
+
+ def test_custom_move_attribute(self):
+ attr = six.MovedAttribute("spam", "six", "six", "u", "u")
+ six.add_move(attr)
+ six.remove_move("spam")
+ assert not hasattr(six.moves, "spam")
+ attr = six.MovedAttribute("spam", "six", "six", "u", "u")
+ six.add_move(attr)
+ from six.moves import spam
+ assert spam is six.u
+ six.remove_move("spam")
+ assert not hasattr(six.moves, "spam")
+
+
+ def test_empty_remove(self):
+ pytest.raises(AttributeError, six.remove_move, "eggs")
+
+
+def test_get_unbound_function():
+ class X(object):
+ def m(self):
+ pass
+ assert six.get_unbound_function(X.m) is X.__dict__["m"]
+
+
+def test_get_method_self():
+ class X(object):
+ def m(self):
+ pass
+ x = X()
+ assert six.get_method_self(x.m) is x
+ pytest.raises(AttributeError, six.get_method_self, 42)
+
+
+def test_get_method_function():
+ class X(object):
+ def m(self):
+ pass
+ x = X()
+ assert six.get_method_function(x.m) is X.__dict__["m"]
+ pytest.raises(AttributeError, six.get_method_function, hasattr)
+
+
+def test_get_function_closure():
+ def f():
+ x = 42
+ def g():
+ return x
+ return g
+ cell = six.get_function_closure(f())[0]
+ assert type(cell).__name__ == "cell"
+
+
+def test_get_function_code():
+ def f():
+ pass
+ assert isinstance(six.get_function_code(f), types.CodeType)
+ if not hasattr(sys, "pypy_version_info"):
+ pytest.raises(AttributeError, six.get_function_code, hasattr)
+
+
+def test_get_function_defaults():
+ def f(x, y=3, b=4):
+ pass
+ assert six.get_function_defaults(f) == (3, 4)
+
+
+def test_get_function_globals():
+ def f():
+ pass
+ assert six.get_function_globals(f) is globals()
+
+
+def test_dictionary_iterators(monkeypatch):
+ def stock_method_name(iterwhat):
+ """Given a method suffix like "lists" or "values", return the name
+ of the dict method that delivers those on the version of Python
+ we're running in."""
+ if six.PY3:
+ return iterwhat
+ return 'iter' + iterwhat
+
+ class MyDict(dict):
+ if not six.PY3:
+ def lists(self, **kw):
+ return [1, 2, 3]
+ def iterlists(self, **kw):
+ return iter([1, 2, 3])
+ f = MyDict.iterlists
+ del MyDict.iterlists
+ setattr(MyDict, stock_method_name('lists'), f)
+
+ d = MyDict(zip(range(10), reversed(range(10))))
+ for name in "keys", "values", "items", "lists":
+ meth = getattr(six, "iter" + name)
+ it = meth(d)
+ assert not isinstance(it, list)
+ assert list(it) == list(getattr(d, name)())
+ pytest.raises(StopIteration, six.advance_iterator, it)
+ record = []
+ def with_kw(*args, **kw):
+ record.append(kw["kw"])
+ return old(*args)
+ old = getattr(MyDict, stock_method_name(name))
+ monkeypatch.setattr(MyDict, stock_method_name(name), with_kw)
+ meth(d, kw=42)
+ assert record == [42]
+ monkeypatch.undo()
+
+
+@pytest.mark.skipif("sys.version_info[:2] < (2, 7)",
+ reason="view methods on dictionaries only available on 2.7+")
+def test_dictionary_views():
+ def stock_method_name(viewwhat):
+ """Given a method suffix like "keys" or "values", return the name
+ of the dict method that delivers those on the version of Python
+ we're running in."""
+ if six.PY3:
+ return viewwhat
+ return 'view' + viewwhat
+
+ d = dict(zip(range(10), (range(11, 20))))
+ for name in "keys", "values", "items":
+ meth = getattr(six, "view" + name)
+ view = meth(d)
+ assert set(view) == set(getattr(d, name)())
+
+
+def test_advance_iterator():
+ assert six.next is six.advance_iterator
+ l = [1, 2]
+ it = iter(l)
+ assert six.next(it) == 1
+ assert six.next(it) == 2
+ pytest.raises(StopIteration, six.next, it)
+ pytest.raises(StopIteration, six.next, it)
+
+
+def test_iterator():
+ class myiter(six.Iterator):
+ def __next__(self):
+ return 13
+ assert six.advance_iterator(myiter()) == 13
+ class myitersub(myiter):
+ def __next__(self):
+ return 14
+ assert six.advance_iterator(myitersub()) == 14
+
+
+def test_callable():
+ class X:
+ def __call__(self):
+ pass
+ def method(self):
+ pass
+ assert six.callable(X)
+ assert six.callable(X())
+ assert six.callable(test_callable)
+ assert six.callable(hasattr)
+ assert six.callable(X.method)
+ assert six.callable(X().method)
+ assert not six.callable(4)
+ assert not six.callable("string")
+
+
+def test_create_bound_method():
+ class X(object):
+ pass
+ def f(self):
+ return self
+ x = X()
+ b = six.create_bound_method(f, x)
+ assert isinstance(b, types.MethodType)
+ assert b() is x
+
+
+def test_create_unbound_method():
+ class X(object):
+ pass
+
+ def f(self):
+ return self
+ u = six.create_unbound_method(f, X)
+ pytest.raises(TypeError, u)
+ if six.PY2:
+ assert isinstance(u, types.MethodType)
+ x = X()
+ assert f(x) is x
+
+
+if six.PY3:
+
+ def test_b():
+ data = six.b("\xff")
+ assert isinstance(data, bytes)
+ assert len(data) == 1
+ assert data == bytes([255])
+
+
+ def test_u():
+ s = six.u("hi \u0439 \U00000439 \\ \\\\ \n")
+ assert isinstance(s, str)
+ assert s == "hi \u0439 \U00000439 \\ \\\\ \n"
+
+else:
+
+ def test_b():
+ data = six.b("\xff")
+ assert isinstance(data, str)
+ assert len(data) == 1
+ assert data == "\xff"
+
+
+ def test_u():
+ s = six.u("hi \u0439 \U00000439 \\ \\\\ \n")
+ assert isinstance(s, unicode)
+ assert s == "hi \xd0\xb9 \xd0\xb9 \\ \\\\ \n".decode("utf8")
+
+
+def test_u_escapes():
+ s = six.u("\u1234")
+ assert len(s) == 1
+
+
+def test_unichr():
+ assert six.u("\u1234") == six.unichr(0x1234)
+ assert type(six.u("\u1234")) is type(six.unichr(0x1234))
+
+
+def test_int2byte():
+ assert six.int2byte(3) == six.b("\x03")
+ pytest.raises(Exception, six.int2byte, 256)
+
+
+def test_byte2int():
+ assert six.byte2int(six.b("\x03")) == 3
+ assert six.byte2int(six.b("\x03\x04")) == 3
+ pytest.raises(IndexError, six.byte2int, six.b(""))
+
+
+def test_bytesindex():
+ assert six.indexbytes(six.b("hello"), 3) == ord("l")
+
+
+def test_bytesiter():
+ it = six.iterbytes(six.b("hi"))
+ assert six.next(it) == ord("h")
+ assert six.next(it) == ord("i")
+ pytest.raises(StopIteration, six.next, it)
+
+
+def test_StringIO():
+ fp = six.StringIO()
+ fp.write(six.u("hello"))
+ assert fp.getvalue() == six.u("hello")
+
+
+def test_BytesIO():
+ fp = six.BytesIO()
+ fp.write(six.b("hello"))
+ assert fp.getvalue() == six.b("hello")
+
+
+def test_exec_():
+ def f():
+ l = []
+ six.exec_("l.append(1)")
+ assert l == [1]
+ f()
+ ns = {}
+ six.exec_("x = 42", ns)
+ assert ns["x"] == 42
+ glob = {}
+ loc = {}
+ six.exec_("global y; y = 42; x = 12", glob, loc)
+ assert glob["y"] == 42
+ assert "x" not in glob
+ assert loc["x"] == 12
+ assert "y" not in loc
+
+
+def test_reraise():
+ def get_next(tb):
+ if six.PY3:
+ return tb.tb_next.tb_next
+ else:
+ return tb.tb_next
+ e = Exception("blah")
+ try:
+ raise e
+ except Exception:
+ tp, val, tb = sys.exc_info()
+ try:
+ six.reraise(tp, val, tb)
+ except Exception:
+ tp2, value2, tb2 = sys.exc_info()
+ assert tp2 is Exception
+ assert value2 is e
+ assert tb is get_next(tb2)
+ try:
+ six.reraise(tp, val)
+ except Exception:
+ tp2, value2, tb2 = sys.exc_info()
+ assert tp2 is Exception
+ assert value2 is e
+ assert tb2 is not tb
+ try:
+ six.reraise(tp, val, tb2)
+ except Exception:
+ tp2, value2, tb3 = sys.exc_info()
+ assert tp2 is Exception
+ assert value2 is e
+ assert get_next(tb3) is tb2
+ try:
+ six.reraise(tp, None, tb)
+ except Exception:
+ tp2, value2, tb2 = sys.exc_info()
+ assert tp2 is Exception
+ assert value2 is not val
+ assert isinstance(value2, Exception)
+ assert tb is get_next(tb2)
+
+
+def test_raise_from():
+ try:
+ try:
+ raise Exception("blah")
+ except Exception:
+ ctx = sys.exc_info()[1]
+ f = Exception("foo")
+ six.raise_from(f, None)
+ except Exception:
+ tp, val, tb = sys.exc_info()
+ if sys.version_info[:2] > (3, 0):
+ # We should have done a raise f from None equivalent.
+ assert val.__cause__ is None
+ assert val.__context__ is ctx
+ if sys.version_info[:2] >= (3, 3):
+ # And that should suppress the context on the exception.
+ assert val.__suppress_context__
+ # For all versions the outer exception should have raised successfully.
+ assert str(val) == "foo"
+
+
+def test_print_():
+ save = sys.stdout
+ out = sys.stdout = six.moves.StringIO()
+ try:
+ six.print_("Hello,", "person!")
+ finally:
+ sys.stdout = save
+ assert out.getvalue() == "Hello, person!\n"
+ out = six.StringIO()
+ six.print_("Hello,", "person!", file=out)
+ assert out.getvalue() == "Hello, person!\n"
+ out = six.StringIO()
+ six.print_("Hello,", "person!", file=out, end="")
+ assert out.getvalue() == "Hello, person!"
+ out = six.StringIO()
+ six.print_("Hello,", "person!", file=out, sep="X")
+ assert out.getvalue() == "Hello,Xperson!\n"
+ out = six.StringIO()
+ six.print_(six.u("Hello,"), six.u("person!"), file=out)
+ result = out.getvalue()
+ assert isinstance(result, six.text_type)
+ assert result == six.u("Hello, person!\n")
+ six.print_("Hello", file=None) # This works.
+ out = six.StringIO()
+ six.print_(None, file=out)
+ assert out.getvalue() == "None\n"
+ class FlushableStringIO(six.StringIO):
+ def __init__(self):
+ six.StringIO.__init__(self)
+ self.flushed = False
+ def flush(self):
+ self.flushed = True
+ out = FlushableStringIO()
+ six.print_("Hello", file=out)
+ assert not out.flushed
+ six.print_("Hello", file=out, flush=True)
+ assert out.flushed
+
+
+@pytest.mark.skipif("sys.version_info[:2] >= (2, 6)")
+def test_print_encoding(monkeypatch):
+ # Fool the type checking in print_.
+ monkeypatch.setattr(six, "file", six.BytesIO, raising=False)
+ out = six.BytesIO()
+ out.encoding = "utf-8"
+ out.errors = None
+ six.print_(six.u("\u053c"), end="", file=out)
+ assert out.getvalue() == six.b("\xd4\xbc")
+ out = six.BytesIO()
+ out.encoding = "ascii"
+ out.errors = "strict"
+ pytest.raises(UnicodeEncodeError, six.print_, six.u("\u053c"), file=out)
+ out.errors = "backslashreplace"
+ six.print_(six.u("\u053c"), end="", file=out)
+ assert out.getvalue() == six.b("\\u053c")
+
+
+def test_print_exceptions():
+ pytest.raises(TypeError, six.print_, x=3)
+ pytest.raises(TypeError, six.print_, end=3)
+ pytest.raises(TypeError, six.print_, sep=42)
+
+
+def test_with_metaclass():
+ class Meta(type):
+ pass
+ class X(six.with_metaclass(Meta)):
+ pass
+ assert type(X) is Meta
+ assert issubclass(X, object)
+ class Base(object):
+ pass
+ class X(six.with_metaclass(Meta, Base)):
+ pass
+ assert type(X) is Meta
+ assert issubclass(X, Base)
+ class Base2(object):
+ pass
+ class X(six.with_metaclass(Meta, Base, Base2)):
+ pass
+ assert type(X) is Meta
+ assert issubclass(X, Base)
+ assert issubclass(X, Base2)
+ assert X.__mro__ == (X, Base, Base2, object)
+ class X(six.with_metaclass(Meta)):
+ pass
+ class MetaSub(Meta):
+ pass
+ class Y(six.with_metaclass(MetaSub, X)):
+ pass
+ assert type(Y) is MetaSub
+ assert Y.__mro__ == (Y, X, object)
+
+
+@pytest.mark.skipif("sys.version_info[:2] < (2, 7)")
+def test_with_metaclass_typing():
+ try:
+ import typing
+ except ImportError:
+ pytest.skip("typing module required")
+ class Meta(type):
+ pass
+ if sys.version_info[:2] < (3, 7):
+ # Generics with custom metaclasses were broken on older versions.
+ class Meta(Meta, typing.GenericMeta):
+ pass
+ T = typing.TypeVar('T')
+ class G(six.with_metaclass(Meta, typing.Generic[T])):
+ pass
+ class GA(six.with_metaclass(abc.ABCMeta, typing.Generic[T])):
+ pass
+ assert isinstance(G, Meta)
+ assert isinstance(GA, abc.ABCMeta)
+ assert G[int] is not G[G[int]]
+ assert GA[int] is not GA[GA[int]]
+ assert G.__bases__ == (typing.Generic,)
+ assert G.__orig_bases__ == (typing.Generic[T],)
+
+
+@pytest.mark.skipif("sys.version_info[:2] < (3, 7)")
+def test_with_metaclass_pep_560():
+ class Meta(type):
+ pass
+ class A:
+ pass
+ class B:
+ pass
+ class Fake:
+ def __mro_entries__(self, bases):
+ return (A, B)
+ fake = Fake()
+ class G(six.with_metaclass(Meta, fake)):
+ pass
+ class GA(six.with_metaclass(abc.ABCMeta, fake)):
+ pass
+ assert isinstance(G, Meta)
+ assert isinstance(GA, abc.ABCMeta)
+ assert G.__bases__ == (A, B)
+ assert G.__orig_bases__ == (fake,)
+
+
+@pytest.mark.skipif("sys.version_info[:2] < (3, 0)")
+def test_with_metaclass_prepare():
+ """Test that with_metaclass causes Meta.__prepare__ to be called with the correct arguments."""
+
+ class MyDict(dict):
+ pass
+
+ class Meta(type):
+
+ @classmethod
+ def __prepare__(cls, name, bases):
+ namespace = MyDict(super().__prepare__(name, bases), cls=cls, bases=bases)
+ namespace['namespace'] = namespace
+ return namespace
+
+ class Base(object):
+ pass
+
+ bases = (Base,)
+
+ class X(six.with_metaclass(Meta, *bases)):
+ pass
+
+ assert getattr(X, 'cls', type) is Meta
+ assert getattr(X, 'bases', ()) == bases
+ assert isinstance(getattr(X, 'namespace', {}), MyDict)
+
+
+def test_wraps():
+ def f(g):
+ @six.wraps(g)
+ def w():
+ return 42
+ return w
+ def k():
+ pass
+ original_k = k
+ k = f(f(k))
+ assert hasattr(k, '__wrapped__')
+ k = k.__wrapped__
+ assert hasattr(k, '__wrapped__')
+ k = k.__wrapped__
+ assert k is original_k
+ assert not hasattr(k, '__wrapped__')
+
+ def f(g, assign, update):
+ def w():
+ return 42
+ w.glue = {"foo" : "bar"}
+ return six.wraps(g, assign, update)(w)
+ k.glue = {"melon" : "egg"}
+ k.turnip = 43
+ k = f(k, ["turnip"], ["glue"])
+ assert k.__name__ == "w"
+ assert k.turnip == 43
+ assert k.glue == {"melon" : "egg", "foo" : "bar"}
+
+
+def test_add_metaclass():
+ class Meta(type):
+ pass
+ class X:
+ "success"
+ X = six.add_metaclass(Meta)(X)
+ assert type(X) is Meta
+ assert issubclass(X, object)
+ assert X.__module__ == __name__
+ assert X.__doc__ == "success"
+ class Base(object):
+ pass
+ class X(Base):
+ pass
+ X = six.add_metaclass(Meta)(X)
+ assert type(X) is Meta
+ assert issubclass(X, Base)
+ class Base2(object):
+ pass
+ class X(Base, Base2):
+ pass
+ X = six.add_metaclass(Meta)(X)
+ assert type(X) is Meta
+ assert issubclass(X, Base)
+ assert issubclass(X, Base2)
+
+ # Test a second-generation subclass of a type.
+ class Meta1(type):
+ m1 = "m1"
+ class Meta2(Meta1):
+ m2 = "m2"
+ class Base:
+ b = "b"
+ Base = six.add_metaclass(Meta1)(Base)
+ class X(Base):
+ x = "x"
+ X = six.add_metaclass(Meta2)(X)
+ assert type(X) is Meta2
+ assert issubclass(X, Base)
+ assert type(Base) is Meta1
+ assert "__dict__" not in vars(X)
+ instance = X()
+ instance.attr = "test"
+ assert vars(instance) == {"attr": "test"}
+ assert instance.b == Base.b
+ assert instance.x == X.x
+
+ # Test a class with slots.
+ class MySlots(object):
+ __slots__ = ["a", "b"]
+ MySlots = six.add_metaclass(Meta1)(MySlots)
+
+ assert MySlots.__slots__ == ["a", "b"]
+ instance = MySlots()
+ instance.a = "foo"
+ pytest.raises(AttributeError, setattr, instance, "c", "baz")
+
+ # Test a class with string for slots.
+ class MyStringSlots(object):
+ __slots__ = "ab"
+ MyStringSlots = six.add_metaclass(Meta1)(MyStringSlots)
+ assert MyStringSlots.__slots__ == "ab"
+ instance = MyStringSlots()
+ instance.ab = "foo"
+ pytest.raises(AttributeError, setattr, instance, "a", "baz")
+ pytest.raises(AttributeError, setattr, instance, "b", "baz")
+
+ class MySlotsWeakref(object):
+ __slots__ = "__weakref__",
+ MySlotsWeakref = six.add_metaclass(Meta)(MySlotsWeakref)
+ assert type(MySlotsWeakref) is Meta
+
+
+@pytest.mark.skipif("sys.version_info[:2] < (3, 3)")
+def test_add_metaclass_nested():
+ # Regression test for https://github.com/benjaminp/six/issues/259
+ class Meta(type):
+ pass
+
+ class A:
+ class B: pass
+
+ expected = 'test_add_metaclass_nested.<locals>.A.B'
+
+ assert A.B.__qualname__ == expected
+
+ class A:
+ @six.add_metaclass(Meta)
+ class B: pass
+
+ assert A.B.__qualname__ == expected
+
+
+@pytest.mark.skipif("sys.version_info[:2] < (2, 7) or sys.version_info[:2] in ((3, 0), (3, 1))")
+def test_assertCountEqual():
+ class TestAssertCountEqual(unittest.TestCase):
+ def test(self):
+ with self.assertRaises(AssertionError):
+ six.assertCountEqual(self, (1, 2), [3, 4, 5])
+
+ six.assertCountEqual(self, (1, 2), [2, 1])
+
+ TestAssertCountEqual('test').test()
+
+
+@pytest.mark.skipif("sys.version_info[:2] < (2, 7)")
+def test_assertRegex():
+ class TestAssertRegex(unittest.TestCase):
+ def test(self):
+ with self.assertRaises(AssertionError):
+ six.assertRegex(self, 'test', r'^a')
+
+ six.assertRegex(self, 'test', r'^t')
+
+ TestAssertRegex('test').test()
+
+
+@pytest.mark.skipif("sys.version_info[:2] < (2, 7)")
+def test_assertRaisesRegex():
+ class TestAssertRaisesRegex(unittest.TestCase):
+ def test(self):
+ with six.assertRaisesRegex(self, AssertionError, '^Foo'):
+ raise AssertionError('Foo')
+
+ with self.assertRaises(AssertionError):
+ with six.assertRaisesRegex(self, AssertionError, r'^Foo'):
+ raise AssertionError('Bar')
+
+ TestAssertRaisesRegex('test').test()
+
+
+def test_python_2_unicode_compatible():
+ @six.python_2_unicode_compatible
+ class MyTest(object):
+ def __str__(self):
+ return six.u('hello')
+
+ def __bytes__(self):
+ return six.b('hello')
+
+ my_test = MyTest()
+
+ if six.PY2:
+ assert str(my_test) == six.b("hello")
+ assert unicode(my_test) == six.u("hello")
+ elif six.PY3:
+ assert bytes(my_test) == six.b("hello")
+ assert str(my_test) == six.u("hello")
+
+ assert getattr(six.moves.builtins, 'bytes', str)(my_test) == six.b("hello")
+
+
+class EnsureTests:
+
+ # grinning face emoji
+ UNICODE_EMOJI = six.u("\U0001F600")
+ BINARY_EMOJI = b"\xf0\x9f\x98\x80"
+
+ def test_ensure_binary_raise_type_error(self):
+ with pytest.raises(TypeError):
+ six.ensure_str(8)
+
+ def test_errors_and_encoding(self):
+ six.ensure_binary(self.UNICODE_EMOJI, encoding='latin-1', errors='ignore')
+ with pytest.raises(UnicodeEncodeError):
+ six.ensure_binary(self.UNICODE_EMOJI, encoding='latin-1', errors='strict')
+
+ def test_ensure_binary_raise(self):
+ converted_unicode = six.ensure_binary(self.UNICODE_EMOJI, encoding='utf-8', errors='strict')
+ converted_binary = six.ensure_binary(self.BINARY_EMOJI, encoding="utf-8", errors='strict')
+ if six.PY2:
+ # PY2: unicode -> str
+ assert converted_unicode == self.BINARY_EMOJI and isinstance(converted_unicode, str)
+ # PY2: str -> str
+ assert converted_binary == self.BINARY_EMOJI and isinstance(converted_binary, str)
+ else:
+ # PY3: str -> bytes
+ assert converted_unicode == self.BINARY_EMOJI and isinstance(converted_unicode, bytes)
+ # PY3: bytes -> bytes
+ assert converted_binary == self.BINARY_EMOJI and isinstance(converted_binary, bytes)
+
+ def test_ensure_str(self):
+ converted_unicode = six.ensure_str(self.UNICODE_EMOJI, encoding='utf-8', errors='strict')
+ converted_binary = six.ensure_str(self.BINARY_EMOJI, encoding="utf-8", errors='strict')
+ if six.PY2:
+ # PY2: unicode -> str
+ assert converted_unicode == self.BINARY_EMOJI and isinstance(converted_unicode, str)
+ # PY2: str -> str
+ assert converted_binary == self.BINARY_EMOJI and isinstance(converted_binary, str)
+ else:
+ # PY3: str -> str
+ assert converted_unicode == self.UNICODE_EMOJI and isinstance(converted_unicode, str)
+ # PY3: bytes -> str
+ assert converted_binary == self.UNICODE_EMOJI and isinstance(converted_unicode, str)
+
+ def test_ensure_text(self):
+ converted_unicode = six.ensure_text(self.UNICODE_EMOJI, encoding='utf-8', errors='strict')
+ converted_binary = six.ensure_text(self.BINARY_EMOJI, encoding="utf-8", errors='strict')
+ if six.PY2:
+ # PY2: unicode -> unicode
+ assert converted_unicode == self.UNICODE_EMOJI and isinstance(converted_unicode, unicode)
+ # PY2: str -> unicode
+ assert converted_binary == self.UNICODE_EMOJI and isinstance(converted_unicode, unicode)
+ else:
+ # PY3: str -> str
+ assert converted_unicode == self.UNICODE_EMOJI and isinstance(converted_unicode, str)
+ # PY3: bytes -> str
+ assert converted_binary == self.UNICODE_EMOJI and isinstance(converted_unicode, str)
diff --git a/third_party/python/slugid/PKG-INFO b/third_party/python/slugid/PKG-INFO
new file mode 100644
index 0000000000..ba71c90bab
--- /dev/null
+++ b/third_party/python/slugid/PKG-INFO
@@ -0,0 +1,14 @@
+Metadata-Version: 1.1
+Name: slugid
+Version: 1.0.7
+Summary: Base64 encoded uuid v4 slugs
+Home-page: http://taskcluster.github.io/slugid.py
+Author: Pete Moore
+Author-email: pmoore@mozilla.com
+License: MPL 2.0
+Description: UNKNOWN
+Platform: UNKNOWN
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.5
diff --git a/third_party/python/slugid/README.rst b/third_party/python/slugid/README.rst
new file mode 100644
index 0000000000..6b902b4f51
--- /dev/null
+++ b/third_party/python/slugid/README.rst
@@ -0,0 +1,122 @@
+slugid.py - Compressed UUIDs for python
+=======================================
+
+.. image:: https://tools.taskcluster.net/lib/assets/taskcluster-120.png
+
+|Build Status| |Coverage Status| |License| |pypi Version| |Downloads|
+
+A python 2.7 and python 3.5 compatible module for generating v4 UUIDs and
+encoding them into 22 character URL-safe base64 slug representation (see `RFC
+4648 sec. 5`_).
+
+Slugs are url-safe base64 encoded v4 uuids, stripped of base64 ``=`` padding.
+
+There are two methods for generating slugs - ``slugid.v4()`` and
+``slugid.nice()``.
+
+- The ``slugid.v4()`` method returns a slug from a randomly generated v4 uuid.
+- The ``slugid.nice()`` method returns a v4 slug which conforms to a set of
+ "nice" properties. At the moment the only "nice" property is that the slug
+ starts with ``[A-Za-f]``, which in turn implies that the first (most
+ significant) bit of its associated uuid is set to 0.
+
+The purpose of the ``slugid.nice()`` method is to support having slugids which
+can be used in more contexts safely. Regular slugids can safely be used in
+urls, and for example in AMQP routing keys. However, slugs beginning with ``-``
+may cause problems when used as command line parameters.
+
+In contrast, slugids generated by the ``slugid.nice()`` method can safely be
+used as command line parameters. This comes at a cost to entropy (121 bits vs
+122 bits for regular v4 slugs).
+
+Slug consumers should consider carefully which of these two slug generation
+methods to call. Is it more important to have maximum entropy, or to have
+slugids that do not need special treatment when used as command line
+parameters? This is especially important if you are providing a service which
+supplies slugs to unexpecting tool developers downstream, who may not realise
+the risks of using your regular v4 slugs as command line parameters, especially
+since this would arise only as an intermittent issue (one time in 64).
+
+Generated slugs take the form ``[A-Za-z0-9_-]{22}``, or more precisely:
+
+- ``slugid.v4()`` slugs conform to
+ ``[A-Za-z0-9_-]{8}[Q-T][A-Za-z0-9_-][CGKOSWaeimquy26-][A-Za-z0-9_-]{10}[AQgw]``
+
+- ``slugid.nice()`` slugs conform to
+ ``[A-Za-f][A-Za-z0-9_-]{7}[Q-T][A-Za-z0-9_-][CGKOSWaeimquy26-][A-Za-z0-9_-]{10}[AQgw]``
+
+RFC 4122 defines the setting of 6 bits of the v4 UUID which implies v4 slugs
+provide 128 - 6 = 122 bits entropy. Due to the (un)setting of the first bit
+of "nice" slugs, nice slugs provide therefore 121 bits entropy.
+
+
+Usage
+-----
+
+.. code-block:: python
+
+ import slugid
+
+ # Generate "nice" URL-safe base64 encoded UUID version 4 (random)
+ slug = slugid.nice() # a8_YezW8T7e1jLxG7evy-A
+
+ # Alternative, if slugs will not be used as command line parameters
+ slug = slugid.v4() # -9OpXaCORAaFh4sJRk7PUA
+
+ # Get python uuid.UUID object
+ uuid = slugid.decode(slug)
+
+ # Compress to slug again
+ assert(slug == slugid.encode(uuid))
+
+
+RNG Characteristics
+-------------------
+UUID generation is performed by the built-in python `uuid library`_ which does
+not document its randomness, but falls back to system uuid-generation libraries
+where available, then urandom, then random. Therefore generated slugids match
+these rng characteristics.
+
+License
+-------
+The ``slugid`` library is released on the MPL 2.0 license, see the ``LICENSE``
+for complete license.
+
+Testing
+-------
+
+.. code-block:: bash
+
+ pip install -r requirements.txt
+ tox
+
+Publishing
+----------
+To republish this library to pypi.python.org, update the version number in
+``slugid/__init__.py``, commit it, push to github, and then run:
+
+.. code-block:: bash
+
+ # delete stale versions
+ rm -rf dist
+
+ # build source package
+ python setup.py sdist
+
+ # publish it
+ twine upload -s dist/*
+
+
+.. _RFC 4648 sec. 5: http://tools.ietf.org/html/rfc4648#section-5
+.. _uuid library: https://docs.python.org/2/library/uuid.html
+
+.. |Build Status| image:: https://travis-ci.org/taskcluster/slugid.py.svg?branch=master
+ :target: http://travis-ci.org/taskcluster/slugid.py
+.. |Coverage Status| image:: https://coveralls.io/repos/taskcluster/slugid.py/badge.svg?branch=master&service=github
+ :target: https://coveralls.io/github/taskcluster/slugid.py?branch=master
+.. |License| image:: https://img.shields.io/badge/license-MPL%202.0-orange.svg
+ :target: https://github.com/taskcluster/slugid.py/blob/master/LICENSE
+.. |pypi Version| image:: https://img.shields.io/pypi/v/slugid.svg
+ :target: https://pypi.python.org/pypi/slugid
+.. |Downloads| image:: https://img.shields.io/pypi/dm/slugid.svg
+ :target: https://pypi.python.org/pypi/slugid
diff --git a/third_party/python/slugid/setup.cfg b/third_party/python/slugid/setup.cfg
new file mode 100644
index 0000000000..861a9f5542
--- /dev/null
+++ b/third_party/python/slugid/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/third_party/python/slugid/setup.py b/third_party/python/slugid/setup.py
new file mode 100755
index 0000000000..c45c726bd4
--- /dev/null
+++ b/third_party/python/slugid/setup.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+
+import re
+from codecs import open
+
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+packages = [
+ 'slugid',
+]
+
+version = ''
+with open('slugid/__init__.py', 'r') as fd:
+ version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
+ fd.read(), re.MULTILINE).group(1)
+
+if not version:
+ raise RuntimeError('Cannot find version information')
+
+setup(
+ name='slugid',
+ version=version,
+ description='Base64 encoded uuid v4 slugs',
+ author='Pete Moore',
+ author_email='pmoore@mozilla.com',
+ url='http://taskcluster.github.io/slugid.py',
+ packages=packages,
+ package_data={'': ['LICENSE', 'README.md']},
+ license='MPL 2.0',
+ classifiers=(
+ 'Intended Audience :: Developers',
+ 'Natural Language :: English',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3.5',
+ ),
+)
diff --git a/third_party/python/slugid/slugid/__init__.py b/third_party/python/slugid/slugid/__init__.py
new file mode 100644
index 0000000000..796f07b65f
--- /dev/null
+++ b/third_party/python/slugid/slugid/__init__.py
@@ -0,0 +1,48 @@
+# -*- coding: utf-8 -*-
+
+# **************
+# * Slugid API *
+# **************
+#
+# @)@)
+# _|_| ( )
+# _(___,`\ _,--------------._ (( /`, ))
+# `==` `*-_,' O `~._ ( ( _/ | ) )
+# `, : o } `~._.~` * ',
+# \ - _ O - ,'
+# | ; - - " ; o /
+# | O o ,-`
+# \ _,-:""""""'`:-._ - . O /
+# `""""""~'` `._ _,-`
+# """"""
+
+"""
+SlugID: Base 64 encoded v4 UUIDs
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Usage:
+
+ >>> import slugid
+ >>> s = slugid.nice()
+ >>> s
+ eWIgwMgxSfeXQ36iPbOxiQ
+ >>> u = slugid.decode(s)
+ >>> u
+ UUID('796220c0-c831-49f7-9743-7ea23db3b189')
+ >>> slugid.encode(u)
+ eWIgwMgxSfeXQ36iPbOxiQ
+ >>> slugid.v4()
+ -9OpXaCORAaFh4sJRk7PUA
+"""
+from .slugid import decode, encode, nice, v4
+
+__title__ = 'slugid'
+__version__ = '1.0.7'
+__author__ = 'Peter Moore'
+__license__ = 'MPL 2.0'
+__all__ = [
+ 'decode',
+ 'encode',
+ 'nice',
+ 'v4',
+]
diff --git a/third_party/python/slugid/slugid/slugid.py b/third_party/python/slugid/slugid/slugid.py
new file mode 100644
index 0000000000..2a1377c84b
--- /dev/null
+++ b/third_party/python/slugid/slugid/slugid.py
@@ -0,0 +1,48 @@
+# Licensed under the Mozilla Public Licence 2.0.
+# https://www.mozilla.org/en-US/MPL/2.0
+
+import sys
+import uuid
+import base64
+
+
+def encode(uuid_):
+ """
+ Returns the given uuid.UUID object as a 22 character slug. This can be a
+ regular v4 slug or a "nice" slug.
+ """
+ return base64.urlsafe_b64encode(uuid_.bytes)[:-2] # Drop '==' padding
+
+
+def decode(slug):
+ """
+ Returns the uuid.UUID object represented by the given v4 or "nice" slug
+ """
+ if sys.version_info.major != 2 and isinstance(slug, bytes):
+ slug = slug.decode('ascii')
+ slug = slug + '==' # base64 padding
+ return uuid.UUID(bytes=base64.urlsafe_b64decode(slug))
+
+
+def v4():
+ """
+ Returns a randomly generated uuid v4 compliant slug
+ """
+ return base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2] # Drop '==' padding
+
+
+def nice():
+ """
+ Returns a randomly generated uuid v4 compliant slug which conforms to a set
+ of "nice" properties, at the cost of some entropy. Currently this means one
+ extra fixed bit (the first bit of the uuid is set to 0) which guarantees the
+ slug will begin with [A-Za-f]. For example such slugs don't require special
+ handling when used as command line parameters (whereas non-nice slugs may
+ start with `-` which can confuse command line tools).
+
+ Potentially other "nice" properties may be added in future to further
+ restrict the range of potential uuids that may be generated.
+ """
+ rawBytes = bytearray(uuid.uuid4().bytes)
+ rawBytes[0] = rawBytes[0] & 0x7f # Ensure slug starts with [A-Za-f]
+ return base64.urlsafe_b64encode(rawBytes)[:-2] # Drop '==' padding
diff --git a/third_party/python/taskcluster-urls/LICENSE b/third_party/python/taskcluster-urls/LICENSE
new file mode 100644
index 0000000000..a612ad9813
--- /dev/null
+++ b/third_party/python/taskcluster-urls/LICENSE
@@ -0,0 +1,373 @@
+Mozilla Public License Version 2.0
+==================================
+
+1. Definitions
+--------------
+
+1.1. "Contributor"
+ means each individual or legal entity that creates, contributes to
+ the creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+ means the combination of the Contributions of others (if any) used
+ by a Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+ means Source Code Form to which the initial Contributor has attached
+ the notice in Exhibit A, the Executable Form of such Source Code
+ Form, and Modifications of such Source Code Form, in each case
+ including portions thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ (a) that the initial Contributor has attached the notice described
+ in Exhibit B to the Covered Software; or
+
+ (b) that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the
+ terms of a Secondary License.
+
+1.6. "Executable Form"
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+ means a work that combines Covered Software with other material, in
+ a separate file or files, that is not Covered Software.
+
+1.8. "License"
+ means this document.
+
+1.9. "Licensable"
+ means having the right to grant, to the maximum extent possible,
+ whether at the time of the initial grant or subsequently, any and
+ all of the rights conveyed by this License.
+
+1.10. "Modifications"
+ means any of the following:
+
+ (a) any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered
+ Software; or
+
+ (b) any new file in Source Code Form that contains any Covered
+ Software.
+
+1.11. "Patent Claims" of a Contributor
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the
+ License, by the making, using, selling, offering for sale, having
+ made, import, or transfer of either its Contributions or its
+ Contributor Version.
+
+1.12. "Secondary License"
+ means either the GNU General Public License, Version 2.0, the GNU
+ Lesser General Public License, Version 2.1, the GNU Affero General
+ Public License, Version 3.0, or any later versions of those
+ licenses.
+
+1.13. "Source Code Form"
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that
+ controls, is controlled by, or is under common control with You. For
+ purposes of this definition, "control" means (a) the power, direct
+ or indirect, to cause the direction or management of such entity,
+ whether by contract or otherwise, or (b) ownership of more than
+ fifty percent (50%) of the outstanding shares or beneficial
+ ownership of such entity.
+
+2. License Grants and Conditions
+--------------------------------
+
+2.1. Grants
+
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license:
+
+(a) under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+(b) under Patent Claims of such Contributor to make, use, sell, offer
+ for sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+The licenses granted in Section 2.1 with respect to any Contribution
+become effective for each Contribution on the date the Contributor first
+distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+The licenses granted in this Section 2 are the only rights granted under
+this License. No additional rights or licenses will be implied from the
+distribution or licensing of Covered Software under this License.
+Notwithstanding Section 2.1(b) above, no patent license is granted by a
+Contributor:
+
+(a) for any code that a Contributor has removed from Covered Software;
+ or
+
+(b) for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+(c) under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+This License does not grant any rights in the trademarks, service marks,
+or logos of any Contributor (except as may be necessary to comply with
+the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+No Contributor makes additional grants as a result of Your choice to
+distribute the Covered Software under a subsequent version of this
+License (see Section 10.2) or under the terms of a Secondary License (if
+permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+Each Contributor represents that the Contributor believes its
+Contributions are its original creation(s) or it has sufficient rights
+to grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+This License is not intended to limit any rights You have under
+applicable copyright doctrines of fair use, fair dealing, or other
+equivalents.
+
+2.7. Conditions
+
+Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+in Section 2.1.
+
+3. Responsibilities
+-------------------
+
+3.1. Distribution of Source Form
+
+All distribution of Covered Software in Source Code Form, including any
+Modifications that You create or to which You contribute, must be under
+the terms of this License. You must inform recipients that the Source
+Code Form of the Covered Software is governed by the terms of this
+License, and how they can obtain a copy of this License. You may not
+attempt to alter or restrict the recipients' rights in the Source Code
+Form.
+
+3.2. Distribution of Executable Form
+
+If You distribute Covered Software in Executable Form then:
+
+(a) such Covered Software must also be made available in Source Code
+ Form, as described in Section 3.1, and You must inform recipients of
+ the Executable Form how they can obtain a copy of such Source Code
+ Form by reasonable means in a timely manner, at a charge no more
+ than the cost of distribution to the recipient; and
+
+(b) You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter
+ the recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+You may create and distribute a Larger Work under terms of Your choice,
+provided that You also comply with the requirements of this License for
+the Covered Software. If the Larger Work is a combination of Covered
+Software with a work governed by one or more Secondary Licenses, and the
+Covered Software is not Incompatible With Secondary Licenses, this
+License permits You to additionally distribute such Covered Software
+under the terms of such Secondary License(s), so that the recipient of
+the Larger Work may, at their option, further distribute the Covered
+Software under the terms of either this License or such Secondary
+License(s).
+
+3.4. Notices
+
+You may not remove or alter the substance of any license notices
+(including copyright notices, patent notices, disclaimers of warranty,
+or limitations of liability) contained within the Source Code Form of
+the Covered Software, except that You may alter any license notices to
+the extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+You may choose to offer, and to charge a fee for, warranty, support,
+indemnity or liability obligations to one or more recipients of Covered
+Software. However, You may do so only on Your own behalf, and not on
+behalf of any Contributor. You must make it absolutely clear that any
+such warranty, support, indemnity, or liability obligation is offered by
+You alone, and You hereby agree to indemnify every Contributor for any
+liability incurred by such Contributor as a result of warranty, support,
+indemnity or liability terms You offer. You may include additional
+disclaimers of warranty and limitations of liability specific to any
+jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+---------------------------------------------------
+
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Software due to
+statute, judicial order, or regulation then You must: (a) comply with
+the terms of this License to the maximum extent possible; and (b)
+describe the limitations and the code they affect. Such description must
+be placed in a text file included with all distributions of the Covered
+Software under this License. Except to the extent prohibited by statute
+or regulation, such description must be sufficiently detailed for a
+recipient of ordinary skill to be able to understand it.
+
+5. Termination
+--------------
+
+5.1. The rights granted under this License will terminate automatically
+if You fail to comply with any of its terms. However, if You become
+compliant, then the rights granted under this License from a particular
+Contributor are reinstated (a) provisionally, unless and until such
+Contributor explicitly and finally terminates Your grants, and (b) on an
+ongoing basis, if such Contributor fails to notify You of the
+non-compliance by some reasonable means prior to 60 days after You have
+come back into compliance. Moreover, Your grants from a particular
+Contributor are reinstated on an ongoing basis if such Contributor
+notifies You of the non-compliance by some reasonable means, this is the
+first time You have received notice of non-compliance with this License
+from such Contributor, and You become compliant prior to 30 days after
+Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+infringement claim (excluding declaratory judgment actions,
+counter-claims, and cross-claims) alleging that a Contributor Version
+directly or indirectly infringes any patent, then the rights granted to
+You by any and all Contributors for the Covered Software under Section
+2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+end user license agreements (excluding distributors and resellers) which
+have been validly granted by You or Your distributors under this License
+prior to termination shall survive termination.
+
+************************************************************************
+* *
+* 6. Disclaimer of Warranty *
+* ------------------------- *
+* *
+* Covered Software is provided under this License on an "as is" *
+* basis, without warranty of any kind, either expressed, implied, or *
+* statutory, including, without limitation, warranties that the *
+* Covered Software is free of defects, merchantable, fit for a *
+* particular purpose or non-infringing. The entire risk as to the *
+* quality and performance of the Covered Software is with You. *
+* Should any Covered Software prove defective in any respect, You *
+* (not any Contributor) assume the cost of any necessary servicing, *
+* repair, or correction. This disclaimer of warranty constitutes an *
+* essential part of this License. No use of any Covered Software is *
+* authorized under this License except under this disclaimer. *
+* *
+************************************************************************
+
+************************************************************************
+* *
+* 7. Limitation of Liability *
+* -------------------------- *
+* *
+* Under no circumstances and under no legal theory, whether tort *
+* (including negligence), contract, or otherwise, shall any *
+* Contributor, or anyone who distributes Covered Software as *
+* permitted above, be liable to You for any direct, indirect, *
+* special, incidental, or consequential damages of any character *
+* including, without limitation, damages for lost profits, loss of *
+* goodwill, work stoppage, computer failure or malfunction, or any *
+* and all other commercial damages or losses, even if such party *
+* shall have been informed of the possibility of such damages. This *
+* limitation of liability shall not apply to liability for death or *
+* personal injury resulting from such party's negligence to the *
+* extent applicable law prohibits such limitation. Some *
+* jurisdictions do not allow the exclusion or limitation of *
+* incidental or consequential damages, so this exclusion and *
+* limitation may not apply to You. *
+* *
+************************************************************************
+
+8. Litigation
+-------------
+
+Any litigation relating to this License may be brought only in the
+courts of a jurisdiction where the defendant maintains its principal
+place of business and such litigation shall be governed by laws of that
+jurisdiction, without reference to its conflict-of-law provisions.
+Nothing in this Section shall prevent a party's ability to bring
+cross-claims or counter-claims.
+
+9. Miscellaneous
+----------------
+
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision of this License is held to be
+unenforceable, such provision shall be reformed only to the extent
+necessary to make it enforceable. Any law or regulation which provides
+that the language of a contract shall be construed against the drafter
+shall not be used to construe this License against a Contributor.
+
+10. Versions of the License
+---------------------------
+
+10.1. New Versions
+
+Mozilla Foundation is the license steward. Except as provided in Section
+10.3, no one other than the license steward has the right to modify or
+publish new versions of this License. Each version will be given a
+distinguishing version number.
+
+10.2. Effect of New Versions
+
+You may distribute the Covered Software under the terms of the version
+of the License under which You originally received the Covered Software,
+or under the terms of any subsequent version published by the license
+steward.
+
+10.3. Modified Versions
+
+If you create software not governed by this License, and you want to
+create a new license for such software, you may create and use a
+modified version of this License if you rename the license and remove
+any references to the name of the license steward (except to note that
+such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+Licenses
+
+If You choose to distribute Source Code Form that is Incompatible With
+Secondary Licenses under the terms of this version of the License, the
+notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+-------------------------------------------
+
+ This Source Code Form is subject to the terms of the Mozilla Public
+ License, v. 2.0. If a copy of the MPL was not distributed with this
+ file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular
+file, then You may include the notice in a location (such as a LICENSE
+file in a relevant directory) where a recipient would be likely to look
+for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+---------------------------------------------------------
+
+ This Source Code Form is "Incompatible With Secondary Licenses", as
+ defined by the Mozilla Public License, v. 2.0.
diff --git a/third_party/python/taskcluster-urls/MANIFEST.in b/third_party/python/taskcluster-urls/MANIFEST.in
new file mode 100644
index 0000000000..2451f52823
--- /dev/null
+++ b/third_party/python/taskcluster-urls/MANIFEST.in
@@ -0,0 +1,4 @@
+include LICENSE
+global-exclude *.py[co]
+include specification.yml
+include package.json
diff --git a/third_party/python/taskcluster-urls/PKG-INFO b/third_party/python/taskcluster-urls/PKG-INFO
new file mode 100644
index 0000000000..b22650dafd
--- /dev/null
+++ b/third_party/python/taskcluster-urls/PKG-INFO
@@ -0,0 +1,253 @@
+Metadata-Version: 2.1
+Name: taskcluster-urls
+Version: 11.0.0
+Summary: Standardized url generator for taskcluster resources.
+Home-page: https://github.com/taskcluster/taskcluster-lib-urls
+Author: Brian Stack
+Author-email: bstack@mozilla.com
+License: MPL2
+Description: # Taskcluster URL Building Library
+
+ [![License](https://img.shields.io/badge/license-MPL%202.0-orange.svg)](http://mozilla.org/MPL/2.0)
+
+ A simple library to generate URLs for various Taskcluster resources across our various deployment methods.
+
+ This serves as both a simple shim for projects that use JavaScript but also is the reference implementation for
+ how we define these paths.
+
+ URLs are defined in the 'Taskcluster URL Format' document.
+
+ Changelog
+ ---------
+ View the changelog on the [releases page](https://github.com/taskcluster/taskcluster-lib-urls/releases).
+
+ Requirements
+ ------------
+
+ This is tested on and should run on any of Node.js `{8, 10}`.
+
+ JS Usage
+ --------
+ [![Node.js Build Status](https://travis-ci.org/taskcluster/taskcluster-lib-urls.svg?branch=master)](https://travis-ci.org/taskcluster/taskcluster-lib-urls)
+ [![npm](https://img.shields.io/npm/v/taskcluster-lib-urls.svg?maxAge=2592000)](https://www.npmjs.com/package/taskcluster-lib-urls)
+
+ This package exports several methods for generating URLs conditionally based on
+ a root URL, as well as a few helper classes for generating URLs for a pre-determined
+ root URL:
+
+ * `api(rootUrl, service, version, path)` -> `String`
+ * `apiReference(rootUrl, service, version)` -> `String`
+ * `docs(rootUrl, path)` -> `String`
+ * `exchangeReference(rootUrl, service, version)` -> `String`
+ * `schema(rootUrl, service, schema)` -> `String`
+ * `ui(rootUrl, path)` -> `String`
+ * `servicesManifest(rootUrl)` -> `String`
+ * `testRootUrl()` -> `String`
+ * `withRootUrl(rootUrl)` -> `Class` instance for above methods
+
+ When the `rootUrl` is `https://taskcluster.net`, the generated URLs will be to the Heroku cluster. Otherwise they will follow the
+ [spec defined in this project](https://github.com/taskcluster/taskcluster-lib-urls/tree/master/docs/urls-spec.md).
+
+ `testRootUrl()` is used to share a common fake `rootUrl` between various Taskcluster mocks in testing.
+ The URL does not resolve.
+
+ ```js
+ // Specifying root URL every time:
+ const libUrls = require('taskcluster-lib-urls');
+
+ libUrls.api(rootUrl, 'auth', 'v1', 'foo/bar');
+ libUrls.schema(rootUrl, 'auth', 'v1/foo.yml'); // Note that schema names have versions in them
+ libUrls.apiReference(rootUrl, 'auth', 'v1');
+ libUrls.exchangeReference(rootUrl, 'auth', 'v1');
+ libUrls.ui(rootUrl, 'foo/bar');
+ libUrls.servicesManifest(rootUrl);
+ libUrls.docs(rootUrl, 'foo/bar');
+ ```
+
+ ```js
+ // Specifying root URL in advance:
+ const libUrls = require('taskcluster-lib-urls');
+
+ const urls = libUrls.withRoot(rootUrl);
+
+ urls.api('auth', 'v1', 'foo/bar');
+ urls.schema('auth', 'v1/foo.yml');
+ urls.apiReference('auth', 'v1');
+ urls.exchangeReference('auth', 'v1');
+ urls.ui('foo/bar');
+ urls.servicesManifest();
+ urls.docs('foo/bar');
+ ```
+
+ If you would like, you can set this up via [taskcluster-lib-loader](https://github.com/taskcluster/taskcluster-lib-loader) as follows:
+
+ ```js
+ {
+ libUrlss: {
+ require: ['cfg'],
+ setup: ({cfg}) => withRootUrl(cfg.rootURl),
+ },
+ }
+ ```
+
+ Test with:
+
+ ```
+ yarn install
+ yarn test
+ ```
+
+
+ Go Usage
+ --------
+
+ [![GoDoc](https://godoc.org/github.com/taskcluster/taskcluster-lib-urls?status.svg)](https://godoc.org/github.com/taskcluster/taskcluster-lib-urls)
+
+ The go package exports the following functions:
+
+ ```go
+ func API(rootURL string, service string, version string, path string) string
+ func APIReference(rootURL string, service string, version string) string
+ func Docs(rootURL string, path string) string
+ func ExchangeReference(rootURL string, service string, version string) string
+ func Schema(rootURL string, service string, name string) string
+ func UI(rootURL string, path string) string
+ func ServicesManifest(rootURL string) string
+ ```
+
+ Install with:
+
+ ```
+ go install ./..
+ ```
+
+ Test with:
+
+ ```
+ go test -v ./...
+ ```
+
+ Python Usage
+ ------------
+
+ You can install the python client with `pip install taskcluster-urls`;
+
+ ```python
+ import taskcluster_urls
+
+ taskcluster_urls.api(root_url, 'auth', 'v1', 'foo/bar')
+ taskcluster_urls.schema(root_url, 'auth', 'v1/foo.yml') # Note that schema names have versions in them
+ taskcluster_urls.api_reference(root_url, 'auth', 'v1')
+ taskcluster_urls.exchange_reference(root_url, 'auth', 'v1')
+ taskcluster_urls.ui(root_url, 'foo/bar')
+ taskcluster_urls.servicesManifest(root_url)
+ taskcluster_urls.docs(root_url, 'foo/bar')
+
+ And for testing,
+ ```python
+ taskcluster_urls.test_root_url()
+ ```
+
+ Test with:
+
+ ```
+ tox
+ ```
+
+ Java Usage
+ ----------
+
+ [![JavaDoc](https://img.shields.io/badge/javadoc-reference-blue.svg)](http://taskcluster.github.io/taskcluster-lib-urls/apidocs)
+
+ In order to use this library from your maven project, simply include it as a project dependency:
+
+ ```
+ <project>
+ ...
+ <dependencies>
+ ...
+ <dependency>
+ <groupId>org.mozilla.taskcluster</groupId>
+ <artifactId>taskcluster-lib-urls</artifactId>
+ <version>1.0.0</version>
+ </dependency>
+ </dependencies>
+ </project>
+ ```
+
+ The taskcluster-lib-urls artifacts are now available from the [maven central repository](http://central.sonatype.org/):
+
+ * [Search Results](http://search.maven.org/#search|gav|1|g%3A%22org.mozilla.taskcluster%22%20AND%20a%3A%22taskcluster-lib-urls%22)
+ * [Directory Listing](https://repo1.maven.org/maven2/org/mozilla/taskcluster/taskcluster-lib-urls/)
+
+ To use the library, do as follows:
+
+ ```java
+ import org.mozilla.taskcluster.urls.*;
+
+ ...
+
+ URLProvider urlProvider = URLs.provider("https://mytaskcluster.acme.org");
+
+ String fooBarAPI = urlProvider.api("auth", "v1", "foo/bar");
+ String fooSchema = urlProvider.schema("auth", "v1/foo.yml"); // Note that schema names have versions in them
+ String authAPIRef = urlProvider.apiReference("auth", "v1");
+ String authExchangesRef = urlProvider.exchangeReference("auth", "v1");
+ String uiFooBar = urlProvider.ui("foo/bar");
+ String servicesManifest = urlProvider.servicesManifest();
+ String docsFooBar = urlProvider.docs("foo/bar");
+
+ ...
+ ```
+
+ Install with:
+
+ ```
+ mvn install
+ ```
+
+ Test with:
+
+ ```
+ mvn test
+ ```
+
+
+ Releasing
+ ---------
+
+ New releases should be tested on Travis and Taskcluster to allow for all supported versions of various languages to be tested. Once satisfied that it works, new versions should be created with
+ `npm version` rather than by manually editing `package.json` and tags should be pushed to Github.
+
+ Make the Node release first, as Python's version depends on its `package.json`. This follows the typical tag-and-push-to-publish approach:
+
+ ```sh
+ $ npm version minor # or patch, or major
+ $ git push upstream
+ ```
+
+ Once that's done, build the Python sdists (only possible by the [maintainers on pypi](https://pypi.org/project/taskcluster-urls/#files)):
+
+ ```sh
+ rm -rf dist/*
+ python setup.py sdist bdist_wheel
+ python3 setup.py bdist_wheel
+ pip install twine
+ twine upload dist/*
+ ```
+
+ Make sure to update [the changelog](https://github.com/taskcluster/taskcluster-lib-urls/releases)!
+
+ License
+ -------
+
+ [Mozilla Public License Version 2.0](https://github.com/taskcluster/taskcluster-lib-urls/blob/master/LICENSE)
+
+Platform: UNKNOWN
+Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Description-Content-Type: text/markdown
diff --git a/third_party/python/taskcluster-urls/README.md b/third_party/python/taskcluster-urls/README.md
new file mode 100644
index 0000000000..46a6d835f3
--- /dev/null
+++ b/third_party/python/taskcluster-urls/README.md
@@ -0,0 +1,236 @@
+# Taskcluster URL Building Library
+
+[![License](https://img.shields.io/badge/license-MPL%202.0-orange.svg)](http://mozilla.org/MPL/2.0)
+
+A simple library to generate URLs for various Taskcluster resources across our various deployment methods.
+
+This serves as both a simple shim for projects that use JavaScript but also is the reference implementation for
+how we define these paths.
+
+URLs are defined in the 'Taskcluster URL Format' document.
+
+Changelog
+---------
+View the changelog on the [releases page](https://github.com/taskcluster/taskcluster-lib-urls/releases).
+
+Requirements
+------------
+
+This is tested on and should run on any of Node.js `{8, 10}`.
+
+JS Usage
+--------
+[![Node.js Build Status](https://travis-ci.org/taskcluster/taskcluster-lib-urls.svg?branch=master)](https://travis-ci.org/taskcluster/taskcluster-lib-urls)
+[![npm](https://img.shields.io/npm/v/taskcluster-lib-urls.svg?maxAge=2592000)](https://www.npmjs.com/package/taskcluster-lib-urls)
+
+This package exports several methods for generating URLs conditionally based on
+a root URL, as well as a few helper classes for generating URLs for a pre-determined
+root URL:
+
+* `api(rootUrl, service, version, path)` -> `String`
+* `apiReference(rootUrl, service, version)` -> `String`
+* `docs(rootUrl, path)` -> `String`
+* `exchangeReference(rootUrl, service, version)` -> `String`
+* `schema(rootUrl, service, schema)` -> `String`
+* `ui(rootUrl, path)` -> `String`
+* `servicesManifest(rootUrl)` -> `String`
+* `testRootUrl()` -> `String`
+* `withRootUrl(rootUrl)` -> `Class` instance for above methods
+
+When the `rootUrl` is `https://taskcluster.net`, the generated URLs will be to the Heroku cluster. Otherwise they will follow the
+[spec defined in this project](https://github.com/taskcluster/taskcluster-lib-urls/tree/master/docs/urls-spec.md).
+
+`testRootUrl()` is used to share a common fake `rootUrl` between various Taskcluster mocks in testing.
+The URL does not resolve.
+
+```js
+// Specifying root URL every time:
+const libUrls = require('taskcluster-lib-urls');
+
+libUrls.api(rootUrl, 'auth', 'v1', 'foo/bar');
+libUrls.schema(rootUrl, 'auth', 'v1/foo.yml'); // Note that schema names have versions in them
+libUrls.apiReference(rootUrl, 'auth', 'v1');
+libUrls.exchangeReference(rootUrl, 'auth', 'v1');
+libUrls.ui(rootUrl, 'foo/bar');
+libUrls.servicesManifest(rootUrl);
+libUrls.docs(rootUrl, 'foo/bar');
+```
+
+```js
+// Specifying root URL in advance:
+const libUrls = require('taskcluster-lib-urls');
+
+const urls = libUrls.withRoot(rootUrl);
+
+urls.api('auth', 'v1', 'foo/bar');
+urls.schema('auth', 'v1/foo.yml');
+urls.apiReference('auth', 'v1');
+urls.exchangeReference('auth', 'v1');
+urls.ui('foo/bar');
+urls.servicesManifest();
+urls.docs('foo/bar');
+```
+
+If you would like, you can set this up via [taskcluster-lib-loader](https://github.com/taskcluster/taskcluster-lib-loader) as follows:
+
+```js
+{
+ libUrlss: {
+ require: ['cfg'],
+ setup: ({cfg}) => withRootUrl(cfg.rootURl),
+ },
+}
+```
+
+Test with:
+
+```
+yarn install
+yarn test
+```
+
+
+Go Usage
+--------
+
+[![GoDoc](https://godoc.org/github.com/taskcluster/taskcluster-lib-urls?status.svg)](https://godoc.org/github.com/taskcluster/taskcluster-lib-urls)
+
+The go package exports the following functions:
+
+```go
+func API(rootURL string, service string, version string, path string) string
+func APIReference(rootURL string, service string, version string) string
+func Docs(rootURL string, path string) string
+func ExchangeReference(rootURL string, service string, version string) string
+func Schema(rootURL string, service string, name string) string
+func UI(rootURL string, path string) string
+func ServicesManifest(rootURL string) string
+```
+
+Install with:
+
+```
+go install ./..
+```
+
+Test with:
+
+```
+go test -v ./...
+```
+
+Python Usage
+------------
+
+You can install the python client with `pip install taskcluster-urls`;
+
+```python
+import taskcluster_urls
+
+taskcluster_urls.api(root_url, 'auth', 'v1', 'foo/bar')
+taskcluster_urls.schema(root_url, 'auth', 'v1/foo.yml') # Note that schema names have versions in them
+taskcluster_urls.api_reference(root_url, 'auth', 'v1')
+taskcluster_urls.exchange_reference(root_url, 'auth', 'v1')
+taskcluster_urls.ui(root_url, 'foo/bar')
+taskcluster_urls.servicesManifest(root_url)
+taskcluster_urls.docs(root_url, 'foo/bar')
+
+And for testing,
+```python
+taskcluster_urls.test_root_url()
+```
+
+Test with:
+
+```
+tox
+```
+
+Java Usage
+----------
+
+[![JavaDoc](https://img.shields.io/badge/javadoc-reference-blue.svg)](http://taskcluster.github.io/taskcluster-lib-urls/apidocs)
+
+In order to use this library from your maven project, simply include it as a project dependency:
+
+```
+<project>
+ ...
+ <dependencies>
+ ...
+ <dependency>
+ <groupId>org.mozilla.taskcluster</groupId>
+ <artifactId>taskcluster-lib-urls</artifactId>
+ <version>1.0.0</version>
+ </dependency>
+ </dependencies>
+</project>
+```
+
+The taskcluster-lib-urls artifacts are now available from the [maven central repository](http://central.sonatype.org/):
+
+* [Search Results](http://search.maven.org/#search|gav|1|g%3A%22org.mozilla.taskcluster%22%20AND%20a%3A%22taskcluster-lib-urls%22)
+* [Directory Listing](https://repo1.maven.org/maven2/org/mozilla/taskcluster/taskcluster-lib-urls/)
+
+To use the library, do as follows:
+
+```java
+import org.mozilla.taskcluster.urls.*;
+
+...
+
+ URLProvider urlProvider = URLs.provider("https://mytaskcluster.acme.org");
+
+ String fooBarAPI = urlProvider.api("auth", "v1", "foo/bar");
+ String fooSchema = urlProvider.schema("auth", "v1/foo.yml"); // Note that schema names have versions in them
+ String authAPIRef = urlProvider.apiReference("auth", "v1");
+ String authExchangesRef = urlProvider.exchangeReference("auth", "v1");
+ String uiFooBar = urlProvider.ui("foo/bar");
+ String servicesManifest = urlProvider.servicesManifest();
+ String docsFooBar = urlProvider.docs("foo/bar");
+
+...
+```
+
+Install with:
+
+```
+mvn install
+```
+
+Test with:
+
+```
+mvn test
+```
+
+
+Releasing
+---------
+
+New releases should be tested on Travis and Taskcluster to allow for all supported versions of various languages to be tested. Once satisfied that it works, new versions should be created with
+`npm version` rather than by manually editing `package.json` and tags should be pushed to Github.
+
+Make the Node release first, as Python's version depends on its `package.json`. This follows the typical tag-and-push-to-publish approach:
+
+```sh
+$ npm version minor # or patch, or major
+$ git push upstream
+```
+
+Once that's done, build the Python sdists (only possible by the [maintainers on pypi](https://pypi.org/project/taskcluster-urls/#files)):
+
+```sh
+rm -rf dist/*
+python setup.py sdist bdist_wheel
+python3 setup.py bdist_wheel
+pip install twine
+twine upload dist/*
+```
+
+Make sure to update [the changelog](https://github.com/taskcluster/taskcluster-lib-urls/releases)!
+
+License
+-------
+
+[Mozilla Public License Version 2.0](https://github.com/taskcluster/taskcluster-lib-urls/blob/master/LICENSE)
diff --git a/third_party/python/taskcluster-urls/package.json b/third_party/python/taskcluster-urls/package.json
new file mode 100644
index 0000000000..66829aac4b
--- /dev/null
+++ b/third_party/python/taskcluster-urls/package.json
@@ -0,0 +1,25 @@
+{
+ "name": "taskcluster-lib-urls",
+ "version": "11.0.0",
+ "author": "Brian Stack <bstack@mozilla.com>",
+ "description": "Build urls for taskcluster resources.",
+ "license": "MPL-2.0",
+ "scripts": {
+ "lint": "eslint src/*.js test/*.js",
+ "pretest": "yarn lint",
+ "test": "mocha test/*_test.js"
+ },
+ "files": [
+ "src"
+ ],
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/taskcluster/taskcluster-lib-urls.git"
+ },
+ "main": "./src/index.js",
+ "devDependencies": {
+ "eslint-config-taskcluster": "^3.1.0",
+ "js-yaml": "^3.11.0",
+ "mocha": "^5.1.1"
+ }
+}
diff --git a/third_party/python/taskcluster-urls/setup.cfg b/third_party/python/taskcluster-urls/setup.cfg
new file mode 100644
index 0000000000..3f5dd53607
--- /dev/null
+++ b/third_party/python/taskcluster-urls/setup.cfg
@@ -0,0 +1,7 @@
+[tools:pytest]
+flake8-max-line-length = 120
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/taskcluster-urls/setup.py b/third_party/python/taskcluster-urls/setup.py
new file mode 100644
index 0000000000..f60108151b
--- /dev/null
+++ b/third_party/python/taskcluster-urls/setup.py
@@ -0,0 +1,28 @@
+import json
+import os
+from setuptools import setup
+
+package_json = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'package.json')
+with open(package_json) as f:
+ version = json.load(f)['version']
+
+setup(
+ name='taskcluster-urls',
+ description='Standardized url generator for taskcluster resources.',
+ long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(),
+ long_description_content_type='text/markdown',
+ url='https://github.com/taskcluster/taskcluster-lib-urls',
+ version=version,
+ packages=['taskcluster_urls'],
+ author='Brian Stack',
+ author_email='bstack@mozilla.com',
+ license='MPL2',
+ classifiers=[
+ 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ ],
+)
diff --git a/third_party/python/taskcluster-urls/taskcluster_urls/__init__.py b/third_party/python/taskcluster-urls/taskcluster_urls/__init__.py
new file mode 100644
index 0000000000..0a84845cc6
--- /dev/null
+++ b/third_party/python/taskcluster-urls/taskcluster_urls/__init__.py
@@ -0,0 +1,66 @@
+OLD_ROOT_URL = 'https://taskcluster.net'
+
+def api(root_url, service, version, path):
+ """Generate URL for path in a Taskcluster service."""
+ root_url = root_url.rstrip('/')
+ path = path.lstrip('/')
+ if root_url == OLD_ROOT_URL:
+ return 'https://{}.taskcluster.net/{}/{}'.format(service, version, path)
+ else:
+ return '{}/api/{}/{}/{}'.format(root_url, service, version, path)
+
+def api_reference(root_url, service, version):
+ """Generate URL for a Taskcluster api reference."""
+ root_url = root_url.rstrip('/')
+ if root_url == OLD_ROOT_URL:
+ return 'https://references.taskcluster.net/{}/{}/api.json'.format(service, version)
+ else:
+ return '{}/references/{}/{}/api.json'.format(root_url, service, version)
+
+def docs(root_url, path):
+ """Generate URL for path in the Taskcluster docs."""
+ root_url = root_url.rstrip('/')
+ path = path.lstrip('/')
+ if root_url == OLD_ROOT_URL:
+ return 'https://docs.taskcluster.net/{}'.format(path)
+ else:
+ return '{}/docs/{}'.format(root_url, path)
+
+def exchange_reference(root_url, service, version):
+ """Generate URL for a Taskcluster exchange reference."""
+ root_url = root_url.rstrip('/')
+ if root_url == OLD_ROOT_URL:
+ return 'https://references.taskcluster.net/{}/{}/exchanges.json'.format(service, version)
+ else:
+ return '{}/references/{}/{}/exchanges.json'.format(root_url, service, version)
+
+def schema(root_url, service, name):
+ """Generate URL for a schema in a Taskcluster service."""
+ root_url = root_url.rstrip('/')
+ name = name.lstrip('/')
+ if root_url == OLD_ROOT_URL:
+ return 'https://schemas.taskcluster.net/{}/{}'.format(service, name)
+ else:
+ return '{}/schemas/{}/{}'.format(root_url, service, name)
+
+def ui(root_url, path):
+ """Generate URL for a path in the Taskcluster ui."""
+ root_url = root_url.rstrip('/')
+ path = path.lstrip('/')
+ if root_url == OLD_ROOT_URL:
+ return 'https://tools.taskcluster.net/{}'.format(path)
+ else:
+ return '{}/{}'.format(root_url, path)
+
+def services_manifest(root_url):
+ """Returns a URL for the service manifest of a taskcluster deployment."""
+ root_url = root_url.rstrip('/')
+ if root_url == OLD_ROOT_URL:
+ return 'https://references.taskcluster.net/manifest.json'
+ else:
+ return '{}/references/manifest.json'.format(root_url)
+
+def test_root_url():
+ """Returns a standardized "testing" rootUrl that does not resolve but
+ is easily recognizable in test failures."""
+ return 'https://tc-tests.example.com'
diff --git a/third_party/python/taskcluster/PKG-INFO b/third_party/python/taskcluster/PKG-INFO
new file mode 100644
index 0000000000..9a20850cdc
--- /dev/null
+++ b/third_party/python/taskcluster/PKG-INFO
@@ -0,0 +1,13 @@
+Metadata-Version: 1.1
+Name: taskcluster
+Version: 6.0.0
+Summary: Python client for Taskcluster
+Home-page: https://github.com/taskcluster/taskcluster-client.py
+Author: John Ford
+Author-email: jhford@mozilla.com
+License: UNKNOWN
+Description: UNKNOWN
+Platform: UNKNOWN
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
diff --git a/third_party/python/taskcluster/README.md b/third_party/python/taskcluster/README.md
new file mode 100644
index 0000000000..9a0cee7b4a
--- /dev/null
+++ b/third_party/python/taskcluster/README.md
@@ -0,0 +1,4383 @@
+Taskcluster Client Library in Python
+======================================
+
+[![Build Status](https://travis-ci.org/taskcluster/taskcluster-client.py.svg?branch=master)](https://travis-ci.org/taskcluster/taskcluster-client.py)
+
+This is a library used to interact with Taskcluster within Python programs. It
+presents the entire REST API to consumers as well as being able to generate
+URLs Signed by Hawk credentials. It can also generate routing keys for
+listening to pulse messages from Taskcluster.
+
+The library builds the REST API methods from the same [API Reference
+format](/docs/manual/design/apis/reference-format) as the
+Javascript client library.
+
+## Generating Temporary Credentials
+If you have non-temporary taskcluster credentials you can generate a set of
+temporary credentials as follows. Notice that the credentials cannot last more
+than 31 days, and you can only revoke them by revoking the credentials that was
+used to issue them (this takes up to one hour).
+
+It is not the responsibility of the caller to apply any clock drift adjustment
+to the start or expiry time - this is handled by the auth service directly.
+
+```python
+import datetime
+
+start = datetime.datetime.now()
+expiry = start + datetime.timedelta(0,60)
+scopes = ['ScopeA', 'ScopeB']
+name = 'foo'
+
+credentials = taskcluster.createTemporaryCredentials(
+ # issuing clientId
+ clientId,
+ # issuing accessToken
+ accessToken,
+ # Validity of temporary credentials starts here, in timestamp
+ start,
+ # Expiration of temporary credentials, in timestamp
+ expiry,
+ # Scopes to grant the temporary credentials
+ scopes,
+ # credential name (optional)
+ name
+)
+```
+
+You cannot use temporary credentials to issue new temporary credentials. You
+must have `auth:create-client:<name>` to create a named temporary credential,
+but unnamed temporary credentials can be created regardless of your scopes.
+
+## API Documentation
+
+The REST API methods are documented in the [reference docs](/docs/reference).
+
+## Query-String arguments
+Query string arguments are now supported. In order to use them, you can call
+a method like this:
+
+```python
+queue.listTaskGroup('JzTGxwxhQ76_Tt1dxkaG5g', query={'continuationToken': outcome.get('continuationToken')})
+```
+
+These query-string arguments are only supported using this calling convention
+
+## Sync vs Async
+
+The objects under `taskcluster` (e.g., `taskcluster.Queue`) are
+python2-compatible and operate synchronously.
+
+
+The objects under `taskcluster.aio` (e.g., `taskcluster.aio.Queue`) require
+`python>=3.6`. The async objects use asyncio coroutines for concurrency; this
+allows us to put I/O operations in the background, so operations that require
+the cpu can happen sooner. Given dozens of operations that can run concurrently
+(e.g., cancelling a medium-to-large task graph), this can result in significant
+performance improvements. The code would look something like
+
+```python
+#!/usr/bin/env python
+import aiohttp
+import asyncio
+from taskcluster.aio import Auth
+
+async def do_ping():
+ with aiohttp.ClientSession() as session:
+ a = Auth(session=session)
+ print(await a.ping())
+
+loop = asyncio.get_event_loop()
+loop.run_until_complete(do_ping())
+```
+
+Other async code examples are available [here](#methods-contained-in-the-client-library).
+
+Here's a slide deck for an [introduction to async python](https://gitpitch.com/escapewindow/slides-sf-2017/async-python).
+
+## Usage
+
+* Here's a simple command:
+
+ ```python
+ import taskcluster
+ index = taskcluster.Index({
+ 'rootUrl': 'https://tc.example.com',
+ 'credentials': {'clientId': 'id', 'accessToken': 'accessToken'},
+ })
+ index.ping()
+ ```
+
+* There are four calling conventions for methods:
+
+ ```python
+ client.method(v1, v1, payload)
+ client.method(payload, k1=v1, k2=v2)
+ client.method(payload=payload, query=query, params={k1: v1, k2: v2})
+ client.method(v1, v2, payload=payload, query=query)
+ ```
+
+* Options for the topic exchange methods can be in the form of either a single
+ dictionary argument or keyword arguments. Only one form is allowed
+
+ ```python
+ from taskcluster import client
+ qEvt = client.QueueEvents({rootUrl: 'https://tc.example.com'})
+ # The following calls are equivalent
+ qEvt.taskCompleted({'taskId': 'atask'})
+ qEvt.taskCompleted(taskId='atask')
+ ```
+
+## Root URL
+
+This client requires a `rootUrl` argument to identify the Taskcluster
+deployment to talk to. As of this writing, the production cluster has rootUrl
+`https://taskcluster.net`.
+
+## Environment Variables
+
+As of version 6.0.0, the client does not read the standard `TASKCLUSTER_…`
+environment variables automatically. To fetch their values explicitly, use
+`taskcluster.optionsFromEnvironment()`:
+
+```python
+auth = taskcluster.Auth(taskcluster.optionsFromEnvironment())
+```
+
+## Pagination
+There are two ways to accomplish pagination easily with the python client. The first is
+to implement pagination in your code:
+```python
+import taskcluster
+queue = taskcluster.Queue({'rootUrl': 'https://tc.example.com'})
+i = 0
+tasks = 0
+outcome = queue.listTaskGroup('JzTGxwxhQ76_Tt1dxkaG5g')
+while outcome.get('continuationToken'):
+ print('Response %d gave us %d more tasks' % (i, len(outcome['tasks'])))
+ if outcome.get('continuationToken'):
+ outcome = queue.listTaskGroup('JzTGxwxhQ76_Tt1dxkaG5g', query={'continuationToken': outcome.get('continuationToken')})
+ i += 1
+ tasks += len(outcome.get('tasks', []))
+print('Task Group %s has %d tasks' % (outcome['taskGroupId'], tasks))
+```
+
+There's also an experimental feature to support built in automatic pagination
+in the sync client. This feature allows passing a callback as the
+'paginationHandler' keyword-argument. This function will be passed the
+response body of the API method as its sole positional arugment.
+
+This example of the built in pagination shows how a list of tasks could be
+built and then counted:
+
+```python
+import taskcluster
+queue = taskcluster.Queue({'rootUrl': 'https://tc.example.com'})
+
+responses = []
+
+def handle_page(y):
+ print("%d tasks fetched" % len(y.get('tasks', [])))
+ responses.append(y)
+
+queue.listTaskGroup('JzTGxwxhQ76_Tt1dxkaG5g', paginationHandler=handle_page)
+
+tasks = 0
+for response in responses:
+ tasks += len(response.get('tasks', []))
+
+print("%d requests fetch %d tasks" % (len(responses), tasks))
+```
+
+## Logging
+Logging is set up in `taskcluster/__init__.py`. If the special
+`DEBUG_TASKCLUSTER_CLIENT` environment variable is set, the `__init__.py`
+module will set the `logging` module's level for its logger to `logging.DEBUG`
+and if there are no existing handlers, add a `logging.StreamHandler()`
+instance. This is meant to assist those who do not wish to bother figuring out
+how to configure the python logging module but do want debug messages
+
+
+## Scopes
+The `scopeMatch(assumedScopes, requiredScopeSets)` function determines
+whether one or more of a set of required scopes are satisfied by the assumed
+scopes, taking *-expansion into account. This is useful for making local
+decisions on scope satisfaction, but note that `assumed_scopes` must be the
+*expanded* scopes, as this function cannot perform expansion.
+
+It takes a list of a assumed scopes, and a list of required scope sets on
+disjunctive normal form, and checks if any of the required scope sets are
+satisfied.
+
+Example:
+
+```
+ requiredScopeSets = [
+ ["scopeA", "scopeB"],
+ ["scopeC:*"]
+ ]
+ assert scopesMatch(['scopeA', 'scopeB'], requiredScopeSets)
+ assert scopesMatch(['scopeC:xyz'], requiredScopeSets)
+ assert not scopesMatch(['scopeA'], requiredScopeSets)
+ assert not scopesMatch(['scopeC'], requiredScopeSets)
+```
+
+## Relative Date-time Utilities
+A lot of taskcluster APIs requires ISO 8601 time stamps offset into the future
+as way of providing expiration, deadlines, etc. These can be easily created
+using `datetime.datetime.isoformat()`, however, it can be rather error prone
+and tedious to offset `datetime.datetime` objects into the future. Therefore
+this library comes with two utility functions for this purposes.
+
+```python
+dateObject = taskcluster.fromNow("2 days 3 hours 1 minute")
+# datetime.datetime(2017, 1, 21, 17, 8, 1, 607929)
+dateString = taskcluster.fromNowJSON("2 days 3 hours 1 minute")
+# '2017-01-21T17:09:23.240178Z'
+```
+
+By default it will offset the date time into the future, if the offset strings
+are prefixed minus (`-`) the date object will be offset into the past. This is
+useful in some corner cases.
+
+```python
+dateObject = taskcluster.fromNow("- 1 year 2 months 3 weeks 5 seconds");
+# datetime.datetime(2015, 10, 30, 18, 16, 50, 931161)
+```
+
+The offset string is ignorant of whitespace and case insensitive. It may also
+optionally be prefixed plus `+` (if not prefixed minus), any `+` prefix will be
+ignored. However, entries in the offset string must be given in order from
+high to low, ie. `2 years 1 day`. Additionally, various shorthands may be
+employed, as illustrated below.
+
+```
+ years, year, yr, y
+ months, month, mo
+ weeks, week, w
+ days, day, d
+ hours, hour, h
+ minutes, minute, min
+ seconds, second, sec, s
+```
+
+The `fromNow` method may also be given a date to be relative to as a second
+argument. This is useful if offset the task expiration relative to the the task
+deadline or doing something similar. This argument can also be passed as the
+kwarg `dateObj`
+
+```python
+dateObject1 = taskcluster.fromNow("2 days 3 hours");
+dateObject2 = taskcluster.fromNow("1 year", dateObject1);
+taskcluster.fromNow("1 year", dateObj=dateObject1);
+# datetime.datetime(2018, 1, 21, 17, 59, 0, 328934)
+```
+
+## Methods contained in the client library
+
+<!-- START OF GENERATED DOCS -->
+
+### Methods in `taskcluster.Auth`
+```python
+import asyncio # Only for async
+// Create Auth client instance
+import taskcluster
+import taskcluster.aio
+
+auth = taskcluster.Auth(options)
+# Below only for async instances, assume already in coroutine
+loop = asyncio.get_event_loop()
+session = taskcluster.aio.createSession(loop=loop)
+asyncAuth = taskcluster.aio.Auth(options, session=session)
+```
+Authentication related API end-points for Taskcluster and related
+services. These API end-points are of interest if you wish to:
+ * Authorize a request signed with Taskcluster credentials,
+ * Manage clients and roles,
+ * Inspect or audit clients and roles,
+ * Gain access to various services guarded by this API.
+
+Note that in this service "authentication" refers to validating the
+correctness of the supplied credentials (that the caller posesses the
+appropriate access token). This service does not provide any kind of user
+authentication (identifying a particular person).
+
+### Clients
+The authentication service manages _clients_, at a high-level each client
+consists of a `clientId`, an `accessToken`, scopes, and some metadata.
+The `clientId` and `accessToken` can be used for authentication when
+calling Taskcluster APIs.
+
+The client's scopes control the client's access to Taskcluster resources.
+The scopes are *expanded* by substituting roles, as defined below.
+
+### Roles
+A _role_ consists of a `roleId`, a set of scopes and a description.
+Each role constitutes a simple _expansion rule_ that says if you have
+the scope: `assume:<roleId>` you get the set of scopes the role has.
+Think of the `assume:<roleId>` as a scope that allows a client to assume
+a role.
+
+As in scopes the `*` kleene star also have special meaning if it is
+located at the end of a `roleId`. If you have a role with the following
+`roleId`: `my-prefix*`, then any client which has a scope staring with
+`assume:my-prefix` will be allowed to assume the role.
+
+### Guarded Services
+The authentication service also has API end-points for delegating access
+to some guarded service such as AWS S3, or Azure Table Storage.
+Generally, we add API end-points to this server when we wish to use
+Taskcluster credentials to grant access to a third-party service used
+by many Taskcluster components.
+#### Ping Server
+Respond without doing anything.
+This endpoint is used to check that the service is up.
+
+
+```python
+# Sync calls
+auth.ping() # -> None`
+# Async call
+await asyncAuth.ping() # -> None
+```
+
+#### List Clients
+Get a list of all clients. With `prefix`, only clients for which
+it is a prefix of the clientId are returned.
+
+By default this end-point will try to return up to 1000 clients in one
+request. But it **may return less, even none**.
+It may also return a `continuationToken` even though there are no more
+results. However, you can only be sure to have seen all results if you
+keep calling `listClients` with the last `continuationToken` until you
+get a result without a `continuationToken`.
+
+
+Required [output schema](v1/list-clients-response.json#)
+
+```python
+# Sync calls
+auth.listClients() # -> result`
+# Async call
+await asyncAuth.listClients() # -> result
+```
+
+#### Get Client
+Get information about a single client.
+
+
+
+Takes the following arguments:
+
+ * `clientId`
+
+Required [output schema](v1/get-client-response.json#)
+
+```python
+# Sync calls
+auth.client(clientId) # -> result`
+auth.client(clientId='value') # -> result
+# Async call
+await asyncAuth.client(clientId) # -> result
+await asyncAuth.client(clientId='value') # -> result
+```
+
+#### Create Client
+Create a new client and get the `accessToken` for this client.
+You should store the `accessToken` from this API call as there is no
+other way to retrieve it.
+
+If you loose the `accessToken` you can call `resetAccessToken` to reset
+it, and a new `accessToken` will be returned, but you cannot retrieve the
+current `accessToken`.
+
+If a client with the same `clientId` already exists this operation will
+fail. Use `updateClient` if you wish to update an existing client.
+
+The caller's scopes must satisfy `scopes`.
+
+
+
+Takes the following arguments:
+
+ * `clientId`
+
+Required [input schema](v1/create-client-request.json#)
+
+Required [output schema](v1/create-client-response.json#)
+
+```python
+# Sync calls
+auth.createClient(clientId, payload) # -> result`
+auth.createClient(payload, clientId='value') # -> result
+# Async call
+await asyncAuth.createClient(clientId, payload) # -> result
+await asyncAuth.createClient(payload, clientId='value') # -> result
+```
+
+#### Reset `accessToken`
+Reset a clients `accessToken`, this will revoke the existing
+`accessToken`, generate a new `accessToken` and return it from this
+call.
+
+There is no way to retrieve an existing `accessToken`, so if you loose it
+you must reset the accessToken to acquire it again.
+
+
+
+Takes the following arguments:
+
+ * `clientId`
+
+Required [output schema](v1/create-client-response.json#)
+
+```python
+# Sync calls
+auth.resetAccessToken(clientId) # -> result`
+auth.resetAccessToken(clientId='value') # -> result
+# Async call
+await asyncAuth.resetAccessToken(clientId) # -> result
+await asyncAuth.resetAccessToken(clientId='value') # -> result
+```
+
+#### Update Client
+Update an exisiting client. The `clientId` and `accessToken` cannot be
+updated, but `scopes` can be modified. The caller's scopes must
+satisfy all scopes being added to the client in the update operation.
+If no scopes are given in the request, the client's scopes remain
+unchanged
+
+
+
+Takes the following arguments:
+
+ * `clientId`
+
+Required [input schema](v1/create-client-request.json#)
+
+Required [output schema](v1/get-client-response.json#)
+
+```python
+# Sync calls
+auth.updateClient(clientId, payload) # -> result`
+auth.updateClient(payload, clientId='value') # -> result
+# Async call
+await asyncAuth.updateClient(clientId, payload) # -> result
+await asyncAuth.updateClient(payload, clientId='value') # -> result
+```
+
+#### Enable Client
+Enable a client that was disabled with `disableClient`. If the client
+is already enabled, this does nothing.
+
+This is typically used by identity providers to re-enable clients that
+had been disabled when the corresponding identity's scopes changed.
+
+
+
+Takes the following arguments:
+
+ * `clientId`
+
+Required [output schema](v1/get-client-response.json#)
+
+```python
+# Sync calls
+auth.enableClient(clientId) # -> result`
+auth.enableClient(clientId='value') # -> result
+# Async call
+await asyncAuth.enableClient(clientId) # -> result
+await asyncAuth.enableClient(clientId='value') # -> result
+```
+
+#### Disable Client
+Disable a client. If the client is already disabled, this does nothing.
+
+This is typically used by identity providers to disable clients when the
+corresponding identity's scopes no longer satisfy the client's scopes.
+
+
+
+Takes the following arguments:
+
+ * `clientId`
+
+Required [output schema](v1/get-client-response.json#)
+
+```python
+# Sync calls
+auth.disableClient(clientId) # -> result`
+auth.disableClient(clientId='value') # -> result
+# Async call
+await asyncAuth.disableClient(clientId) # -> result
+await asyncAuth.disableClient(clientId='value') # -> result
+```
+
+#### Delete Client
+Delete a client, please note that any roles related to this client must
+be deleted independently.
+
+
+
+Takes the following arguments:
+
+ * `clientId`
+
+```python
+# Sync calls
+auth.deleteClient(clientId) # -> None`
+auth.deleteClient(clientId='value') # -> None
+# Async call
+await asyncAuth.deleteClient(clientId) # -> None
+await asyncAuth.deleteClient(clientId='value') # -> None
+```
+
+#### List Roles
+Get a list of all roles, each role object also includes the list of
+scopes it expands to.
+
+
+Required [output schema](v1/list-roles-response.json#)
+
+```python
+# Sync calls
+auth.listRoles() # -> result`
+# Async call
+await asyncAuth.listRoles() # -> result
+```
+
+#### Get Role
+Get information about a single role, including the set of scopes that the
+role expands to.
+
+
+
+Takes the following arguments:
+
+ * `roleId`
+
+Required [output schema](v1/get-role-response.json#)
+
+```python
+# Sync calls
+auth.role(roleId) # -> result`
+auth.role(roleId='value') # -> result
+# Async call
+await asyncAuth.role(roleId) # -> result
+await asyncAuth.role(roleId='value') # -> result
+```
+
+#### Create Role
+Create a new role.
+
+The caller's scopes must satisfy the new role's scopes.
+
+If there already exists a role with the same `roleId` this operation
+will fail. Use `updateRole` to modify an existing role.
+
+Creation of a role that will generate an infinite expansion will result
+in an error response.
+
+
+
+Takes the following arguments:
+
+ * `roleId`
+
+Required [input schema](v1/create-role-request.json#)
+
+Required [output schema](v1/get-role-response.json#)
+
+```python
+# Sync calls
+auth.createRole(roleId, payload) # -> result`
+auth.createRole(payload, roleId='value') # -> result
+# Async call
+await asyncAuth.createRole(roleId, payload) # -> result
+await asyncAuth.createRole(payload, roleId='value') # -> result
+```
+
+#### Update Role
+Update an existing role.
+
+The caller's scopes must satisfy all of the new scopes being added, but
+need not satisfy all of the client's existing scopes.
+
+An update of a role that will generate an infinite expansion will result
+in an error response.
+
+
+
+Takes the following arguments:
+
+ * `roleId`
+
+Required [input schema](v1/create-role-request.json#)
+
+Required [output schema](v1/get-role-response.json#)
+
+```python
+# Sync calls
+auth.updateRole(roleId, payload) # -> result`
+auth.updateRole(payload, roleId='value') # -> result
+# Async call
+await asyncAuth.updateRole(roleId, payload) # -> result
+await asyncAuth.updateRole(payload, roleId='value') # -> result
+```
+
+#### Delete Role
+Delete a role. This operation will succeed regardless of whether or not
+the role exists.
+
+
+
+Takes the following arguments:
+
+ * `roleId`
+
+```python
+# Sync calls
+auth.deleteRole(roleId) # -> None`
+auth.deleteRole(roleId='value') # -> None
+# Async call
+await asyncAuth.deleteRole(roleId) # -> None
+await asyncAuth.deleteRole(roleId='value') # -> None
+```
+
+#### Expand Scopes
+Return an expanded copy of the given scopeset, with scopes implied by any
+roles included.
+
+This call uses the GET method with an HTTP body. It remains only for
+backward compatibility.
+
+
+Required [input schema](v1/scopeset.json#)
+
+Required [output schema](v1/scopeset.json#)
+
+```python
+# Sync calls
+auth.expandScopesGet(payload) # -> result`
+# Async call
+await asyncAuth.expandScopesGet(payload) # -> result
+```
+
+#### Expand Scopes
+Return an expanded copy of the given scopeset, with scopes implied by any
+roles included.
+
+
+Required [input schema](v1/scopeset.json#)
+
+Required [output schema](v1/scopeset.json#)
+
+```python
+# Sync calls
+auth.expandScopes(payload) # -> result`
+# Async call
+await asyncAuth.expandScopes(payload) # -> result
+```
+
+#### Get Current Scopes
+Return the expanded scopes available in the request, taking into account all sources
+of scopes and scope restrictions (temporary credentials, assumeScopes, client scopes,
+and roles).
+
+
+Required [output schema](v1/scopeset.json#)
+
+```python
+# Sync calls
+auth.currentScopes() # -> result`
+# Async call
+await asyncAuth.currentScopes() # -> result
+```
+
+#### Get Temporary Read/Write Credentials S3
+Get temporary AWS credentials for `read-write` or `read-only` access to
+a given `bucket` and `prefix` within that bucket.
+The `level` parameter can be `read-write` or `read-only` and determines
+which type of credentials are returned. Please note that the `level`
+parameter is required in the scope guarding access. The bucket name must
+not contain `.`, as recommended by Amazon.
+
+This method can only allow access to a whitelisted set of buckets. To add
+a bucket to that whitelist, contact the Taskcluster team, who will add it to
+the appropriate IAM policy. If the bucket is in a different AWS account, you
+will also need to add a bucket policy allowing access from the Taskcluster
+account. That policy should look like this:
+
+```js
+{
+ "Version": "2012-10-17",
+ "Statement": [
+ {
+ "Sid": "allow-taskcluster-auth-to-delegate-access",
+ "Effect": "Allow",
+ "Principal": {
+ "AWS": "arn:aws:iam::692406183521:root"
+ },
+ "Action": [
+ "s3:ListBucket",
+ "s3:GetObject",
+ "s3:PutObject",
+ "s3:DeleteObject",
+ "s3:GetBucketLocation"
+ ],
+ "Resource": [
+ "arn:aws:s3:::<bucket>",
+ "arn:aws:s3:::<bucket>/*"
+ ]
+ }
+ ]
+}
+```
+
+The credentials are set to expire after an hour, but this behavior is
+subject to change. Hence, you should always read the `expires` property
+from the response, if you intend to maintain active credentials in your
+application.
+
+Please note that your `prefix` may not start with slash `/`. Such a prefix
+is allowed on S3, but we forbid it here to discourage bad behavior.
+
+Also note that if your `prefix` doesn't end in a slash `/`, the STS
+credentials may allow access to unexpected keys, as S3 does not treat
+slashes specially. For example, a prefix of `my-folder` will allow
+access to `my-folder/file.txt` as expected, but also to `my-folder.txt`,
+which may not be intended.
+
+Finally, note that the `PutObjectAcl` call is not allowed. Passing a canned
+ACL other than `private` to `PutObject` is treated as a `PutObjectAcl` call, and
+will result in an access-denied error from AWS. This limitation is due to a
+security flaw in Amazon S3 which might otherwise allow indefinite access to
+uploaded objects.
+
+**EC2 metadata compatibility**, if the querystring parameter
+`?format=iam-role-compat` is given, the response will be compatible
+with the JSON exposed by the EC2 metadata service. This aims to ease
+compatibility for libraries and tools built to auto-refresh credentials.
+For details on the format returned by EC2 metadata service see:
+[EC2 User Guide](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html#instance-metadata-security-credentials).
+
+
+
+Takes the following arguments:
+
+ * `level`
+ * `bucket`
+ * `prefix`
+
+Required [output schema](v1/aws-s3-credentials-response.json#)
+
+```python
+# Sync calls
+auth.awsS3Credentials(level, bucket, prefix) # -> result`
+auth.awsS3Credentials(level='value', bucket='value', prefix='value') # -> result
+# Async call
+await asyncAuth.awsS3Credentials(level, bucket, prefix) # -> result
+await asyncAuth.awsS3Credentials(level='value', bucket='value', prefix='value') # -> result
+```
+
+#### List Accounts Managed by Auth
+Retrieve a list of all Azure accounts managed by Taskcluster Auth.
+
+
+Required [output schema](v1/azure-account-list-response.json#)
+
+```python
+# Sync calls
+auth.azureAccounts() # -> result`
+# Async call
+await asyncAuth.azureAccounts() # -> result
+```
+
+#### List Tables in an Account Managed by Auth
+Retrieve a list of all tables in an account.
+
+
+
+Takes the following arguments:
+
+ * `account`
+
+Required [output schema](v1/azure-table-list-response.json#)
+
+```python
+# Sync calls
+auth.azureTables(account) # -> result`
+auth.azureTables(account='value') # -> result
+# Async call
+await asyncAuth.azureTables(account) # -> result
+await asyncAuth.azureTables(account='value') # -> result
+```
+
+#### Get Shared-Access-Signature for Azure Table
+Get a shared access signature (SAS) string for use with a specific Azure
+Table Storage table.
+
+The `level` parameter can be `read-write` or `read-only` and determines
+which type of credentials are returned. If level is read-write, it will create the
+table if it doesn't already exist.
+
+
+
+Takes the following arguments:
+
+ * `account`
+ * `table`
+ * `level`
+
+Required [output schema](v1/azure-table-access-response.json#)
+
+```python
+# Sync calls
+auth.azureTableSAS(account, table, level) # -> result`
+auth.azureTableSAS(account='value', table='value', level='value') # -> result
+# Async call
+await asyncAuth.azureTableSAS(account, table, level) # -> result
+await asyncAuth.azureTableSAS(account='value', table='value', level='value') # -> result
+```
+
+#### List containers in an Account Managed by Auth
+Retrieve a list of all containers in an account.
+
+
+
+Takes the following arguments:
+
+ * `account`
+
+Required [output schema](v1/azure-container-list-response.json#)
+
+```python
+# Sync calls
+auth.azureContainers(account) # -> result`
+auth.azureContainers(account='value') # -> result
+# Async call
+await asyncAuth.azureContainers(account) # -> result
+await asyncAuth.azureContainers(account='value') # -> result
+```
+
+#### Get Shared-Access-Signature for Azure Container
+Get a shared access signature (SAS) string for use with a specific Azure
+Blob Storage container.
+
+The `level` parameter can be `read-write` or `read-only` and determines
+which type of credentials are returned. If level is read-write, it will create the
+container if it doesn't already exist.
+
+
+
+Takes the following arguments:
+
+ * `account`
+ * `container`
+ * `level`
+
+Required [output schema](v1/azure-container-response.json#)
+
+```python
+# Sync calls
+auth.azureContainerSAS(account, container, level) # -> result`
+auth.azureContainerSAS(account='value', container='value', level='value') # -> result
+# Async call
+await asyncAuth.azureContainerSAS(account, container, level) # -> result
+await asyncAuth.azureContainerSAS(account='value', container='value', level='value') # -> result
+```
+
+#### Get DSN for Sentry Project
+Get temporary DSN (access credentials) for a sentry project.
+The credentials returned can be used with any Sentry client for up to
+24 hours, after which the credentials will be automatically disabled.
+
+If the project doesn't exist it will be created, and assigned to the
+initial team configured for this component. Contact a Sentry admin
+to have the project transferred to a team you have access to if needed
+
+
+
+Takes the following arguments:
+
+ * `project`
+
+Required [output schema](v1/sentry-dsn-response.json#)
+
+```python
+# Sync calls
+auth.sentryDSN(project) # -> result`
+auth.sentryDSN(project='value') # -> result
+# Async call
+await asyncAuth.sentryDSN(project) # -> result
+await asyncAuth.sentryDSN(project='value') # -> result
+```
+
+#### Get Token for Statsum Project
+Get temporary `token` and `baseUrl` for sending metrics to statsum.
+
+The token is valid for 24 hours, clients should refresh after expiration.
+
+
+
+Takes the following arguments:
+
+ * `project`
+
+Required [output schema](v1/statsum-token-response.json#)
+
+```python
+# Sync calls
+auth.statsumToken(project) # -> result`
+auth.statsumToken(project='value') # -> result
+# Async call
+await asyncAuth.statsumToken(project) # -> result
+await asyncAuth.statsumToken(project='value') # -> result
+```
+
+#### Get Token for Webhooktunnel Proxy
+Get temporary `token` and `id` for connecting to webhooktunnel
+The token is valid for 96 hours, clients should refresh after expiration.
+
+
+Required [output schema](v1/webhooktunnel-token-response.json#)
+
+```python
+# Sync calls
+auth.webhooktunnelToken() # -> result`
+# Async call
+await asyncAuth.webhooktunnelToken() # -> result
+```
+
+#### Authenticate Hawk Request
+Validate the request signature given on input and return list of scopes
+that the authenticating client has.
+
+This method is used by other services that wish rely on Taskcluster
+credentials for authentication. This way we can use Hawk without having
+the secret credentials leave this service.
+
+
+Required [input schema](v1/authenticate-hawk-request.json#)
+
+Required [output schema](v1/authenticate-hawk-response.json#)
+
+```python
+# Sync calls
+auth.authenticateHawk(payload) # -> result`
+# Async call
+await asyncAuth.authenticateHawk(payload) # -> result
+```
+
+#### Test Authentication
+Utility method to test client implementations of Taskcluster
+authentication.
+
+Rather than using real credentials, this endpoint accepts requests with
+clientId `tester` and accessToken `no-secret`. That client's scopes are
+based on `clientScopes` in the request body.
+
+The request is validated, with any certificate, authorizedScopes, etc.
+applied, and the resulting scopes are checked against `requiredScopes`
+from the request body. On success, the response contains the clientId
+and scopes as seen by the API method.
+
+
+Required [input schema](v1/test-authenticate-request.json#)
+
+Required [output schema](v1/test-authenticate-response.json#)
+
+```python
+# Sync calls
+auth.testAuthenticate(payload) # -> result`
+# Async call
+await asyncAuth.testAuthenticate(payload) # -> result
+```
+
+#### Test Authentication (GET)
+Utility method similar to `testAuthenticate`, but with the GET method,
+so it can be used with signed URLs (bewits).
+
+Rather than using real credentials, this endpoint accepts requests with
+clientId `tester` and accessToken `no-secret`. That client's scopes are
+`['test:*', 'auth:create-client:test:*']`. The call fails if the
+`test:authenticate-get` scope is not available.
+
+The request is validated, with any certificate, authorizedScopes, etc.
+applied, and the resulting scopes are checked, just like any API call.
+On success, the response contains the clientId and scopes as seen by
+the API method.
+
+This method may later be extended to allow specification of client and
+required scopes via query arguments.
+
+
+Required [output schema](v1/test-authenticate-response.json#)
+
+```python
+# Sync calls
+auth.testAuthenticateGet() # -> result`
+# Async call
+await asyncAuth.testAuthenticateGet() # -> result
+```
+
+
+
+
+### Exchanges in `taskcluster.AuthEvents`
+```python
+// Create AuthEvents client instance
+import taskcluster
+authEvents = taskcluster.AuthEvents(options)
+```
+The auth service is responsible for storing credentials, managing
+assignment of scopes, and validation of request signatures from other
+services.
+
+These exchanges provides notifications when credentials or roles are
+updated. This is mostly so that multiple instances of the auth service
+can purge their caches and synchronize state. But you are of course
+welcome to use these for other purposes, monitoring changes for example.
+#### Client Created Messages
+ * `authEvents.clientCreated(routingKeyPattern) -> routingKey`
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+#### Client Updated Messages
+ * `authEvents.clientUpdated(routingKeyPattern) -> routingKey`
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+#### Client Deleted Messages
+ * `authEvents.clientDeleted(routingKeyPattern) -> routingKey`
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+#### Role Created Messages
+ * `authEvents.roleCreated(routingKeyPattern) -> routingKey`
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+#### Role Updated Messages
+ * `authEvents.roleUpdated(routingKeyPattern) -> routingKey`
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+#### Role Deleted Messages
+ * `authEvents.roleDeleted(routingKeyPattern) -> routingKey`
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+
+
+
+### Methods in `taskcluster.AwsProvisioner`
+```python
+import asyncio # Only for async
+// Create AwsProvisioner client instance
+import taskcluster
+import taskcluster.aio
+
+awsProvisioner = taskcluster.AwsProvisioner(options)
+# Below only for async instances, assume already in coroutine
+loop = asyncio.get_event_loop()
+session = taskcluster.aio.createSession(loop=loop)
+asyncAwsProvisioner = taskcluster.aio.AwsProvisioner(options, session=session)
+```
+The AWS Provisioner is responsible for provisioning instances on EC2 for use in
+Taskcluster. The provisioner maintains a set of worker configurations which
+can be managed with an API that is typically available at
+aws-provisioner.taskcluster.net/v1. This API can also perform basic instance
+management tasks in addition to maintaining the internal state of worker type
+configuration information.
+
+The Provisioner runs at a configurable interval. Each iteration of the
+provisioner fetches a current copy the state that the AWS EC2 api reports. In
+each iteration, we ask the Queue how many tasks are pending for that worker
+type. Based on the number of tasks pending and the scaling ratio, we may
+submit requests for new instances. We use pricing information, capacity and
+utility factor information to decide which instance type in which region would
+be the optimal configuration.
+
+Each EC2 instance type will declare a capacity and utility factor. Capacity is
+the number of tasks that a given machine is capable of running concurrently.
+Utility factor is a relative measure of performance between two instance types.
+We multiply the utility factor by the spot price to compare instance types and
+regions when making the bidding choices.
+
+When a new EC2 instance is instantiated, its user data contains a token in
+`securityToken` that can be used with the `getSecret` method to retrieve
+the worker's credentials and any needed passwords or other restricted
+information. The worker is responsible for deleting the secret after
+retrieving it, to prevent dissemination of the secret to other proceses
+which can read the instance user data.
+
+#### List worker types with details
+Return a list of worker types, including some summary information about
+current capacity for each. While this list includes all defined worker types,
+there may be running EC2 instances for deleted worker types that are not
+included here. The list is unordered.
+
+
+Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/list-worker-types-summaries-response.json#)
+
+```python
+# Sync calls
+awsProvisioner.listWorkerTypeSummaries() # -> result`
+# Async call
+await asyncAwsProvisioner.listWorkerTypeSummaries() # -> result
+```
+
+#### Create new Worker Type
+Create a worker type. A worker type contains all the configuration
+needed for the provisioner to manage the instances. Each worker type
+knows which regions and which instance types are allowed for that
+worker type. Remember that Capacity is the number of concurrent tasks
+that can be run on a given EC2 resource and that Utility is the relative
+performance rate between different instance types. There is no way to
+configure different regions to have different sets of instance types
+so ensure that all instance types are available in all regions.
+This function is idempotent.
+
+Once a worker type is in the provisioner, a back ground process will
+begin creating instances for it based on its capacity bounds and its
+pending task count from the Queue. It is the worker's responsibility
+to shut itself down. The provisioner has a limit (currently 96hours)
+for all instances to prevent zombie instances from running indefinitely.
+
+The provisioner will ensure that all instances created are tagged with
+aws resource tags containing the provisioner id and the worker type.
+
+If provided, the secrets in the global, region and instance type sections
+are available using the secrets api. If specified, the scopes provided
+will be used to generate a set of temporary credentials available with
+the other secrets.
+
+
+
+Takes the following arguments:
+
+ * `workerType`
+
+Required [input schema](http://schemas.taskcluster.net/aws-provisioner/v1/create-worker-type-request.json#)
+
+Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#)
+
+```python
+# Sync calls
+awsProvisioner.createWorkerType(workerType, payload) # -> result`
+awsProvisioner.createWorkerType(payload, workerType='value') # -> result
+# Async call
+await asyncAwsProvisioner.createWorkerType(workerType, payload) # -> result
+await asyncAwsProvisioner.createWorkerType(payload, workerType='value') # -> result
+```
+
+#### Update Worker Type
+Provide a new copy of a worker type to replace the existing one.
+This will overwrite the existing worker type definition if there
+is already a worker type of that name. This method will return a
+200 response along with a copy of the worker type definition created
+Note that if you are using the result of a GET on the worker-type
+end point that you will need to delete the lastModified and workerType
+keys from the object returned, since those fields are not allowed
+the request body for this method
+
+Otherwise, all input requirements and actions are the same as the
+create method.
+
+
+
+Takes the following arguments:
+
+ * `workerType`
+
+Required [input schema](http://schemas.taskcluster.net/aws-provisioner/v1/create-worker-type-request.json#)
+
+Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#)
+
+```python
+# Sync calls
+awsProvisioner.updateWorkerType(workerType, payload) # -> result`
+awsProvisioner.updateWorkerType(payload, workerType='value') # -> result
+# Async call
+await asyncAwsProvisioner.updateWorkerType(workerType, payload) # -> result
+await asyncAwsProvisioner.updateWorkerType(payload, workerType='value') # -> result
+```
+
+#### Get Worker Type Last Modified Time
+This method is provided to allow workers to see when they were
+last modified. The value provided through UserData can be
+compared against this value to see if changes have been made
+If the worker type definition has not been changed, the date
+should be identical as it is the same stored value.
+
+
+
+Takes the following arguments:
+
+ * `workerType`
+
+Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-last-modified.json#)
+
+```python
+# Sync calls
+awsProvisioner.workerTypeLastModified(workerType) # -> result`
+awsProvisioner.workerTypeLastModified(workerType='value') # -> result
+# Async call
+await asyncAwsProvisioner.workerTypeLastModified(workerType) # -> result
+await asyncAwsProvisioner.workerTypeLastModified(workerType='value') # -> result
+```
+
+#### Get Worker Type
+Retrieve a copy of the requested worker type definition.
+This copy contains a lastModified field as well as the worker
+type name. As such, it will require manipulation to be able to
+use the results of this method to submit date to the update
+method.
+
+
+
+Takes the following arguments:
+
+ * `workerType`
+
+Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#)
+
+```python
+# Sync calls
+awsProvisioner.workerType(workerType) # -> result`
+awsProvisioner.workerType(workerType='value') # -> result
+# Async call
+await asyncAwsProvisioner.workerType(workerType) # -> result
+await asyncAwsProvisioner.workerType(workerType='value') # -> result
+```
+
+#### Delete Worker Type
+Delete a worker type definition. This method will only delete
+the worker type definition from the storage table. The actual
+deletion will be handled by a background worker. As soon as this
+method is called for a worker type, the background worker will
+immediately submit requests to cancel all spot requests for this
+worker type as well as killing all instances regardless of their
+state. If you want to gracefully remove a worker type, you must
+either ensure that no tasks are created with that worker type name
+or you could theoretically set maxCapacity to 0, though, this is
+not a supported or tested action
+
+
+
+Takes the following arguments:
+
+ * `workerType`
+
+```python
+# Sync calls
+awsProvisioner.removeWorkerType(workerType) # -> None`
+awsProvisioner.removeWorkerType(workerType='value') # -> None
+# Async call
+await asyncAwsProvisioner.removeWorkerType(workerType) # -> None
+await asyncAwsProvisioner.removeWorkerType(workerType='value') # -> None
+```
+
+#### List Worker Types
+Return a list of string worker type names. These are the names
+of all managed worker types known to the provisioner. This does
+not include worker types which are left overs from a deleted worker
+type definition but are still running in AWS.
+
+
+Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/list-worker-types-response.json#)
+
+```python
+# Sync calls
+awsProvisioner.listWorkerTypes() # -> result`
+# Async call
+await asyncAwsProvisioner.listWorkerTypes() # -> result
+```
+
+#### Create new Secret
+Insert a secret into the secret storage. The supplied secrets will
+be provided verbatime via `getSecret`, while the supplied scopes will
+be converted into credentials by `getSecret`.
+
+This method is not ordinarily used in production; instead, the provisioner
+creates a new secret directly for each spot bid.
+
+
+
+Takes the following arguments:
+
+ * `token`
+
+Required [input schema](http://schemas.taskcluster.net/aws-provisioner/v1/create-secret-request.json#)
+
+```python
+# Sync calls
+awsProvisioner.createSecret(token, payload) # -> None`
+awsProvisioner.createSecret(payload, token='value') # -> None
+# Async call
+await asyncAwsProvisioner.createSecret(token, payload) # -> None
+await asyncAwsProvisioner.createSecret(payload, token='value') # -> None
+```
+
+#### Get a Secret
+Retrieve a secret from storage. The result contains any passwords or
+other restricted information verbatim as well as a temporary credential
+based on the scopes specified when the secret was created.
+
+It is important that this secret is deleted by the consumer (`removeSecret`),
+or else the secrets will be visible to any process which can access the
+user data associated with the instance.
+
+
+
+Takes the following arguments:
+
+ * `token`
+
+Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/get-secret-response.json#)
+
+```python
+# Sync calls
+awsProvisioner.getSecret(token) # -> result`
+awsProvisioner.getSecret(token='value') # -> result
+# Async call
+await asyncAwsProvisioner.getSecret(token) # -> result
+await asyncAwsProvisioner.getSecret(token='value') # -> result
+```
+
+#### Report an instance starting
+An instance will report in by giving its instance id as well
+as its security token. The token is given and checked to ensure
+that it matches a real token that exists to ensure that random
+machines do not check in. We could generate a different token
+but that seems like overkill
+
+
+
+Takes the following arguments:
+
+ * `instanceId`
+ * `token`
+
+```python
+# Sync calls
+awsProvisioner.instanceStarted(instanceId, token) # -> None`
+awsProvisioner.instanceStarted(instanceId='value', token='value') # -> None
+# Async call
+await asyncAwsProvisioner.instanceStarted(instanceId, token) # -> None
+await asyncAwsProvisioner.instanceStarted(instanceId='value', token='value') # -> None
+```
+
+#### Remove a Secret
+Remove a secret. After this call, a call to `getSecret` with the given
+token will return no information.
+
+It is very important that the consumer of a
+secret delete the secret from storage before handing over control
+to untrusted processes to prevent credential and/or secret leakage.
+
+
+
+Takes the following arguments:
+
+ * `token`
+
+```python
+# Sync calls
+awsProvisioner.removeSecret(token) # -> None`
+awsProvisioner.removeSecret(token='value') # -> None
+# Async call
+await asyncAwsProvisioner.removeSecret(token) # -> None
+await asyncAwsProvisioner.removeSecret(token='value') # -> None
+```
+
+#### Get All Launch Specifications for WorkerType
+This method returns a preview of all possible launch specifications
+that this worker type definition could submit to EC2. It is used to
+test worker types, nothing more
+
+**This API end-point is experimental and may be subject to change without warning.**
+
+
+
+Takes the following arguments:
+
+ * `workerType`
+
+Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/get-launch-specs-response.json#)
+
+```python
+# Sync calls
+awsProvisioner.getLaunchSpecs(workerType) # -> result`
+awsProvisioner.getLaunchSpecs(workerType='value') # -> result
+# Async call
+await asyncAwsProvisioner.getLaunchSpecs(workerType) # -> result
+await asyncAwsProvisioner.getLaunchSpecs(workerType='value') # -> result
+```
+
+#### Get AWS State for a worker type
+Return the state of a given workertype as stored by the provisioner.
+This state is stored as three lists: 1 for running instances, 1 for
+pending requests. The `summary` property contains an updated summary
+similar to that returned from `listWorkerTypeSummaries`.
+
+
+
+Takes the following arguments:
+
+ * `workerType`
+
+```python
+# Sync calls
+awsProvisioner.state(workerType) # -> None`
+awsProvisioner.state(workerType='value') # -> None
+# Async call
+await asyncAwsProvisioner.state(workerType) # -> None
+await asyncAwsProvisioner.state(workerType='value') # -> None
+```
+
+#### Backend Status
+This endpoint is used to show when the last time the provisioner
+has checked in. A check in is done through the deadman's snitch
+api. It is done at the conclusion of a provisioning iteration
+and used to tell if the background provisioning process is still
+running.
+
+**Warning** this api end-point is **not stable**.
+
+
+Required [output schema](http://schemas.taskcluster.net/aws-provisioner/v1/backend-status-response.json#)
+
+```python
+# Sync calls
+awsProvisioner.backendStatus() # -> result`
+# Async call
+await asyncAwsProvisioner.backendStatus() # -> result
+```
+
+#### Ping Server
+Respond without doing anything.
+This endpoint is used to check that the service is up.
+
+
+```python
+# Sync calls
+awsProvisioner.ping() # -> None`
+# Async call
+await asyncAwsProvisioner.ping() # -> None
+```
+
+
+
+
+### Exchanges in `taskcluster.AwsProvisionerEvents`
+```python
+// Create AwsProvisionerEvents client instance
+import taskcluster
+awsProvisionerEvents = taskcluster.AwsProvisionerEvents(options)
+```
+Exchanges from the provisioner... more docs later
+#### WorkerType Created Message
+ * `awsProvisionerEvents.workerTypeCreated(routingKeyPattern) -> routingKey`
+ * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key.
+ * `workerType` is required Description: WorkerType that this message concerns.
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+#### WorkerType Updated Message
+ * `awsProvisionerEvents.workerTypeUpdated(routingKeyPattern) -> routingKey`
+ * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key.
+ * `workerType` is required Description: WorkerType that this message concerns.
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+#### WorkerType Removed Message
+ * `awsProvisionerEvents.workerTypeRemoved(routingKeyPattern) -> routingKey`
+ * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key.
+ * `workerType` is required Description: WorkerType that this message concerns.
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+
+
+
+### Methods in `taskcluster.EC2Manager`
+```python
+import asyncio # Only for async
+// Create EC2Manager client instance
+import taskcluster
+import taskcluster.aio
+
+eC2Manager = taskcluster.EC2Manager(options)
+# Below only for async instances, assume already in coroutine
+loop = asyncio.get_event_loop()
+session = taskcluster.aio.createSession(loop=loop)
+asyncEC2Manager = taskcluster.aio.EC2Manager(options, session=session)
+```
+A taskcluster service which manages EC2 instances. This service does not understand any taskcluster concepts intrinsicaly other than using the name `workerType` to refer to a group of associated instances. Unless you are working on building a provisioner for AWS, you almost certainly do not want to use this service
+#### Ping Server
+Respond without doing anything.
+This endpoint is used to check that the service is up.
+
+
+```python
+# Sync calls
+eC2Manager.ping() # -> None`
+# Async call
+await asyncEC2Manager.ping() # -> None
+```
+
+#### See the list of worker types which are known to be managed
+This method is only for debugging the ec2-manager
+
+
+Required [output schema](v1/list-worker-types.json#)
+
+```python
+# Sync calls
+eC2Manager.listWorkerTypes() # -> result`
+# Async call
+await asyncEC2Manager.listWorkerTypes() # -> result
+```
+
+#### Run an instance
+Request an instance of a worker type
+
+
+
+Takes the following arguments:
+
+ * `workerType`
+
+Required [input schema](v1/run-instance-request.json#)
+
+```python
+# Sync calls
+eC2Manager.runInstance(workerType, payload) # -> None`
+eC2Manager.runInstance(payload, workerType='value') # -> None
+# Async call
+await asyncEC2Manager.runInstance(workerType, payload) # -> None
+await asyncEC2Manager.runInstance(payload, workerType='value') # -> None
+```
+
+#### Terminate all resources from a worker type
+Terminate all instances for this worker type
+
+
+
+Takes the following arguments:
+
+ * `workerType`
+
+```python
+# Sync calls
+eC2Manager.terminateWorkerType(workerType) # -> None`
+eC2Manager.terminateWorkerType(workerType='value') # -> None
+# Async call
+await asyncEC2Manager.terminateWorkerType(workerType) # -> None
+await asyncEC2Manager.terminateWorkerType(workerType='value') # -> None
+```
+
+#### Look up the resource stats for a workerType
+Return an object which has a generic state description. This only contains counts of instances
+
+
+
+Takes the following arguments:
+
+ * `workerType`
+
+Required [output schema](v1/worker-type-resources.json#)
+
+```python
+# Sync calls
+eC2Manager.workerTypeStats(workerType) # -> result`
+eC2Manager.workerTypeStats(workerType='value') # -> result
+# Async call
+await asyncEC2Manager.workerTypeStats(workerType) # -> result
+await asyncEC2Manager.workerTypeStats(workerType='value') # -> result
+```
+
+#### Look up the resource health for a workerType
+Return a view of the health of a given worker type
+
+
+
+Takes the following arguments:
+
+ * `workerType`
+
+Required [output schema](v1/health.json#)
+
+```python
+# Sync calls
+eC2Manager.workerTypeHealth(workerType) # -> result`
+eC2Manager.workerTypeHealth(workerType='value') # -> result
+# Async call
+await asyncEC2Manager.workerTypeHealth(workerType) # -> result
+await asyncEC2Manager.workerTypeHealth(workerType='value') # -> result
+```
+
+#### Look up the most recent errors of a workerType
+Return a list of the most recent errors encountered by a worker type
+
+
+
+Takes the following arguments:
+
+ * `workerType`
+
+Required [output schema](v1/errors.json#)
+
+```python
+# Sync calls
+eC2Manager.workerTypeErrors(workerType) # -> result`
+eC2Manager.workerTypeErrors(workerType='value') # -> result
+# Async call
+await asyncEC2Manager.workerTypeErrors(workerType) # -> result
+await asyncEC2Manager.workerTypeErrors(workerType='value') # -> result
+```
+
+#### Look up the resource state for a workerType
+Return state information for a given worker type
+
+
+
+Takes the following arguments:
+
+ * `workerType`
+
+Required [output schema](v1/worker-type-state.json#)
+
+```python
+# Sync calls
+eC2Manager.workerTypeState(workerType) # -> result`
+eC2Manager.workerTypeState(workerType='value') # -> result
+# Async call
+await asyncEC2Manager.workerTypeState(workerType) # -> result
+await asyncEC2Manager.workerTypeState(workerType='value') # -> result
+```
+
+#### Ensure a KeyPair for a given worker type exists
+Idempotently ensure that a keypair of a given name exists
+
+
+
+Takes the following arguments:
+
+ * `name`
+
+Required [input schema](v1/create-key-pair.json#)
+
+```python
+# Sync calls
+eC2Manager.ensureKeyPair(name, payload) # -> None`
+eC2Manager.ensureKeyPair(payload, name='value') # -> None
+# Async call
+await asyncEC2Manager.ensureKeyPair(name, payload) # -> None
+await asyncEC2Manager.ensureKeyPair(payload, name='value') # -> None
+```
+
+#### Ensure a KeyPair for a given worker type does not exist
+Ensure that a keypair of a given name does not exist.
+
+
+
+Takes the following arguments:
+
+ * `name`
+
+```python
+# Sync calls
+eC2Manager.removeKeyPair(name) # -> None`
+eC2Manager.removeKeyPair(name='value') # -> None
+# Async call
+await asyncEC2Manager.removeKeyPair(name) # -> None
+await asyncEC2Manager.removeKeyPair(name='value') # -> None
+```
+
+#### Terminate an instance
+Terminate an instance in a specified region
+
+
+
+Takes the following arguments:
+
+ * `region`
+ * `instanceId`
+
+```python
+# Sync calls
+eC2Manager.terminateInstance(region, instanceId) # -> None`
+eC2Manager.terminateInstance(region='value', instanceId='value') # -> None
+# Async call
+await asyncEC2Manager.terminateInstance(region, instanceId) # -> None
+await asyncEC2Manager.terminateInstance(region='value', instanceId='value') # -> None
+```
+
+#### Request prices for EC2
+Return a list of possible prices for EC2
+
+
+Required [output schema](v1/prices.json#)
+
+```python
+# Sync calls
+eC2Manager.getPrices() # -> result`
+# Async call
+await asyncEC2Manager.getPrices() # -> result
+```
+
+#### Request prices for EC2
+Return a list of possible prices for EC2
+
+
+Required [input schema](v1/prices-request.json#)
+
+Required [output schema](v1/prices.json#)
+
+```python
+# Sync calls
+eC2Manager.getSpecificPrices(payload) # -> result`
+# Async call
+await asyncEC2Manager.getSpecificPrices(payload) # -> result
+```
+
+#### Get EC2 account health metrics
+Give some basic stats on the health of our EC2 account
+
+
+Required [output schema](v1/health.json#)
+
+```python
+# Sync calls
+eC2Manager.getHealth() # -> result`
+# Async call
+await asyncEC2Manager.getHealth() # -> result
+```
+
+#### Look up the most recent errors in the provisioner across all worker types
+Return a list of recent errors encountered
+
+
+Required [output schema](v1/errors.json#)
+
+```python
+# Sync calls
+eC2Manager.getRecentErrors() # -> result`
+# Async call
+await asyncEC2Manager.getRecentErrors() # -> result
+```
+
+#### See the list of regions managed by this ec2-manager
+This method is only for debugging the ec2-manager
+
+
+```python
+# Sync calls
+eC2Manager.regions() # -> None`
+# Async call
+await asyncEC2Manager.regions() # -> None
+```
+
+#### See the list of AMIs and their usage
+List AMIs and their usage by returning a list of objects in the form:
+{
+region: string
+ volumetype: string
+ lastused: timestamp
+}
+
+
+```python
+# Sync calls
+eC2Manager.amiUsage() # -> None`
+# Async call
+await asyncEC2Manager.amiUsage() # -> None
+```
+
+#### See the current EBS volume usage list
+Lists current EBS volume usage by returning a list of objects
+that are uniquely defined by {region, volumetype, state} in the form:
+{
+region: string,
+ volumetype: string,
+ state: string,
+ totalcount: integer,
+ totalgb: integer,
+ touched: timestamp (last time that information was updated),
+}
+
+
+```python
+# Sync calls
+eC2Manager.ebsUsage() # -> None`
+# Async call
+await asyncEC2Manager.ebsUsage() # -> None
+```
+
+#### Statistics on the Database client pool
+This method is only for debugging the ec2-manager
+
+
+```python
+# Sync calls
+eC2Manager.dbpoolStats() # -> None`
+# Async call
+await asyncEC2Manager.dbpoolStats() # -> None
+```
+
+#### List out the entire internal state
+This method is only for debugging the ec2-manager
+
+
+```python
+# Sync calls
+eC2Manager.allState() # -> None`
+# Async call
+await asyncEC2Manager.allState() # -> None
+```
+
+#### Statistics on the sqs queues
+This method is only for debugging the ec2-manager
+
+
+```python
+# Sync calls
+eC2Manager.sqsStats() # -> None`
+# Async call
+await asyncEC2Manager.sqsStats() # -> None
+```
+
+#### Purge the SQS queues
+This method is only for debugging the ec2-manager
+
+
+```python
+# Sync calls
+eC2Manager.purgeQueues() # -> None`
+# Async call
+await asyncEC2Manager.purgeQueues() # -> None
+```
+
+
+
+
+### Methods in `taskcluster.Github`
+```python
+import asyncio # Only for async
+// Create Github client instance
+import taskcluster
+import taskcluster.aio
+
+github = taskcluster.Github(options)
+# Below only for async instances, assume already in coroutine
+loop = asyncio.get_event_loop()
+session = taskcluster.aio.createSession(loop=loop)
+asyncGithub = taskcluster.aio.Github(options, session=session)
+```
+The github service is responsible for creating tasks in reposnse
+to GitHub events, and posting results to the GitHub UI.
+
+This document describes the API end-point for consuming GitHub
+web hooks, as well as some useful consumer APIs.
+
+When Github forbids an action, this service returns an HTTP 403
+with code ForbiddenByGithub.
+#### Ping Server
+Respond without doing anything.
+This endpoint is used to check that the service is up.
+
+
+```python
+# Sync calls
+github.ping() # -> None`
+# Async call
+await asyncGithub.ping() # -> None
+```
+
+#### Consume GitHub WebHook
+Capture a GitHub event and publish it via pulse, if it's a push,
+release or pull request.
+
+
+```python
+# Sync calls
+github.githubWebHookConsumer() # -> None`
+# Async call
+await asyncGithub.githubWebHookConsumer() # -> None
+```
+
+#### List of Builds
+A paginated list of builds that have been run in
+Taskcluster. Can be filtered on various git-specific
+fields.
+
+
+Required [output schema](v1/build-list.json#)
+
+```python
+# Sync calls
+github.builds() # -> result`
+# Async call
+await asyncGithub.builds() # -> result
+```
+
+#### Latest Build Status Badge
+Checks the status of the latest build of a given branch
+and returns corresponding badge svg.
+
+
+
+Takes the following arguments:
+
+ * `owner`
+ * `repo`
+ * `branch`
+
+```python
+# Sync calls
+github.badge(owner, repo, branch) # -> None`
+github.badge(owner='value', repo='value', branch='value') # -> None
+# Async call
+await asyncGithub.badge(owner, repo, branch) # -> None
+await asyncGithub.badge(owner='value', repo='value', branch='value') # -> None
+```
+
+#### Get Repository Info
+Returns any repository metadata that is
+useful within Taskcluster related services.
+
+
+
+Takes the following arguments:
+
+ * `owner`
+ * `repo`
+
+Required [output schema](v1/repository.json#)
+
+```python
+# Sync calls
+github.repository(owner, repo) # -> result`
+github.repository(owner='value', repo='value') # -> result
+# Async call
+await asyncGithub.repository(owner, repo) # -> result
+await asyncGithub.repository(owner='value', repo='value') # -> result
+```
+
+#### Latest Status for Branch
+For a given branch of a repository, this will always point
+to a status page for the most recent task triggered by that
+branch.
+
+Note: This is a redirect rather than a direct link.
+
+
+
+Takes the following arguments:
+
+ * `owner`
+ * `repo`
+ * `branch`
+
+```python
+# Sync calls
+github.latest(owner, repo, branch) # -> None`
+github.latest(owner='value', repo='value', branch='value') # -> None
+# Async call
+await asyncGithub.latest(owner, repo, branch) # -> None
+await asyncGithub.latest(owner='value', repo='value', branch='value') # -> None
+```
+
+#### Post a status against a given changeset
+For a given changeset (SHA) of a repository, this will attach a "commit status"
+on github. These statuses are links displayed next to each revision.
+The status is either OK (green check) or FAILURE (red cross),
+made of a custom title and link.
+
+
+
+Takes the following arguments:
+
+ * `owner`
+ * `repo`
+ * `sha`
+
+Required [input schema](v1/create-status.json#)
+
+```python
+# Sync calls
+github.createStatus(owner, repo, sha, payload) # -> None`
+github.createStatus(payload, owner='value', repo='value', sha='value') # -> None
+# Async call
+await asyncGithub.createStatus(owner, repo, sha, payload) # -> None
+await asyncGithub.createStatus(payload, owner='value', repo='value', sha='value') # -> None
+```
+
+#### Post a comment on a given GitHub Issue or Pull Request
+For a given Issue or Pull Request of a repository, this will write a new message.
+
+
+
+Takes the following arguments:
+
+ * `owner`
+ * `repo`
+ * `number`
+
+Required [input schema](v1/create-comment.json#)
+
+```python
+# Sync calls
+github.createComment(owner, repo, number, payload) # -> None`
+github.createComment(payload, owner='value', repo='value', number='value') # -> None
+# Async call
+await asyncGithub.createComment(owner, repo, number, payload) # -> None
+await asyncGithub.createComment(payload, owner='value', repo='value', number='value') # -> None
+```
+
+
+
+
+### Exchanges in `taskcluster.GithubEvents`
+```python
+// Create GithubEvents client instance
+import taskcluster
+githubEvents = taskcluster.GithubEvents(options)
+```
+The github service publishes a pulse
+message for supported github events, translating Github webhook
+events into pulse messages.
+
+This document describes the exchange offered by the taskcluster
+github service
+#### GitHub Pull Request Event
+ * `githubEvents.pullRequest(routingKeyPattern) -> routingKey`
+ * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key.
+ * `organization` is required Description: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped.
+ * `repository` is required Description: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped.
+ * `action` is required Description: The GitHub `action` which triggered an event. See for possible values see the payload actions property.
+
+#### GitHub push Event
+ * `githubEvents.push(routingKeyPattern) -> routingKey`
+ * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key.
+ * `organization` is required Description: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped.
+ * `repository` is required Description: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped.
+
+#### GitHub release Event
+ * `githubEvents.release(routingKeyPattern) -> routingKey`
+ * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key.
+ * `organization` is required Description: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped.
+ * `repository` is required Description: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped.
+
+#### GitHub release Event
+ * `githubEvents.taskGroupDefined(routingKeyPattern) -> routingKey`
+ * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key.
+ * `organization` is required Description: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped.
+ * `repository` is required Description: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped.
+
+
+
+
+### Methods in `taskcluster.Hooks`
+```python
+import asyncio # Only for async
+// Create Hooks client instance
+import taskcluster
+import taskcluster.aio
+
+hooks = taskcluster.Hooks(options)
+# Below only for async instances, assume already in coroutine
+loop = asyncio.get_event_loop()
+session = taskcluster.aio.createSession(loop=loop)
+asyncHooks = taskcluster.aio.Hooks(options, session=session)
+```
+Hooks are a mechanism for creating tasks in response to events.
+
+Hooks are identified with a `hookGroupId` and a `hookId`.
+
+When an event occurs, the resulting task is automatically created. The
+task is created using the scope `assume:hook-id:<hookGroupId>/<hookId>`,
+which must have scopes to make the createTask call, including satisfying all
+scopes in `task.scopes`. The new task has a `taskGroupId` equal to its
+`taskId`, as is the convention for decision tasks.
+
+Hooks can have a "schedule" indicating specific times that new tasks should
+be created. Each schedule is in a simple cron format, per
+https://www.npmjs.com/package/cron-parser. For example:
+ * `['0 0 1 * * *']` -- daily at 1:00 UTC
+ * `['0 0 9,21 * * 1-5', '0 0 12 * * 0,6']` -- weekdays at 9:00 and 21:00 UTC, weekends at noon
+
+The task definition is used as a JSON-e template, with a context depending on how it is fired. See
+[/docs/reference/core/taskcluster-hooks/docs/firing-hooks](firing-hooks)
+for more information.
+#### Ping Server
+Respond without doing anything.
+This endpoint is used to check that the service is up.
+
+
+```python
+# Sync calls
+hooks.ping() # -> None`
+# Async call
+await asyncHooks.ping() # -> None
+```
+
+#### List hook groups
+This endpoint will return a list of all hook groups with at least one hook.
+
+
+Required [output schema](v1/list-hook-groups-response.json#)
+
+```python
+# Sync calls
+hooks.listHookGroups() # -> result`
+# Async call
+await asyncHooks.listHookGroups() # -> result
+```
+
+#### List hooks in a given group
+This endpoint will return a list of all the hook definitions within a
+given hook group.
+
+
+
+Takes the following arguments:
+
+ * `hookGroupId`
+
+Required [output schema](v1/list-hooks-response.json#)
+
+```python
+# Sync calls
+hooks.listHooks(hookGroupId) # -> result`
+hooks.listHooks(hookGroupId='value') # -> result
+# Async call
+await asyncHooks.listHooks(hookGroupId) # -> result
+await asyncHooks.listHooks(hookGroupId='value') # -> result
+```
+
+#### Get hook definition
+This endpoint will return the hook definition for the given `hookGroupId`
+and hookId.
+
+
+
+Takes the following arguments:
+
+ * `hookGroupId`
+ * `hookId`
+
+Required [output schema](v1/hook-definition.json#)
+
+```python
+# Sync calls
+hooks.hook(hookGroupId, hookId) # -> result`
+hooks.hook(hookGroupId='value', hookId='value') # -> result
+# Async call
+await asyncHooks.hook(hookGroupId, hookId) # -> result
+await asyncHooks.hook(hookGroupId='value', hookId='value') # -> result
+```
+
+#### Get hook status
+This endpoint will return the current status of the hook. This represents a
+snapshot in time and may vary from one call to the next.
+
+
+
+Takes the following arguments:
+
+ * `hookGroupId`
+ * `hookId`
+
+Required [output schema](v1/hook-status.json#)
+
+```python
+# Sync calls
+hooks.getHookStatus(hookGroupId, hookId) # -> result`
+hooks.getHookStatus(hookGroupId='value', hookId='value') # -> result
+# Async call
+await asyncHooks.getHookStatus(hookGroupId, hookId) # -> result
+await asyncHooks.getHookStatus(hookGroupId='value', hookId='value') # -> result
+```
+
+#### Create a hook
+This endpoint will create a new hook.
+
+The caller's credentials must include the role that will be used to
+create the task. That role must satisfy task.scopes as well as the
+necessary scopes to add the task to the queue.
+
+
+
+
+Takes the following arguments:
+
+ * `hookGroupId`
+ * `hookId`
+
+Required [input schema](v1/create-hook-request.json#)
+
+Required [output schema](v1/hook-definition.json#)
+
+```python
+# Sync calls
+hooks.createHook(hookGroupId, hookId, payload) # -> result`
+hooks.createHook(payload, hookGroupId='value', hookId='value') # -> result
+# Async call
+await asyncHooks.createHook(hookGroupId, hookId, payload) # -> result
+await asyncHooks.createHook(payload, hookGroupId='value', hookId='value') # -> result
+```
+
+#### Update a hook
+This endpoint will update an existing hook. All fields except
+`hookGroupId` and `hookId` can be modified.
+
+
+
+Takes the following arguments:
+
+ * `hookGroupId`
+ * `hookId`
+
+Required [input schema](v1/create-hook-request.json#)
+
+Required [output schema](v1/hook-definition.json#)
+
+```python
+# Sync calls
+hooks.updateHook(hookGroupId, hookId, payload) # -> result`
+hooks.updateHook(payload, hookGroupId='value', hookId='value') # -> result
+# Async call
+await asyncHooks.updateHook(hookGroupId, hookId, payload) # -> result
+await asyncHooks.updateHook(payload, hookGroupId='value', hookId='value') # -> result
+```
+
+#### Delete a hook
+This endpoint will remove a hook definition.
+
+
+
+Takes the following arguments:
+
+ * `hookGroupId`
+ * `hookId`
+
+```python
+# Sync calls
+hooks.removeHook(hookGroupId, hookId) # -> None`
+hooks.removeHook(hookGroupId='value', hookId='value') # -> None
+# Async call
+await asyncHooks.removeHook(hookGroupId, hookId) # -> None
+await asyncHooks.removeHook(hookGroupId='value', hookId='value') # -> None
+```
+
+#### Trigger a hook
+This endpoint will trigger the creation of a task from a hook definition.
+
+The HTTP payload must match the hooks `triggerSchema`. If it does, it is
+provided as the `payload` property of the JSON-e context used to render the
+task template.
+
+
+
+Takes the following arguments:
+
+ * `hookGroupId`
+ * `hookId`
+
+Required [input schema](v1/trigger-hook.json#)
+
+Required [output schema](v1/task-status.json#)
+
+```python
+# Sync calls
+hooks.triggerHook(hookGroupId, hookId, payload) # -> result`
+hooks.triggerHook(payload, hookGroupId='value', hookId='value') # -> result
+# Async call
+await asyncHooks.triggerHook(hookGroupId, hookId, payload) # -> result
+await asyncHooks.triggerHook(payload, hookGroupId='value', hookId='value') # -> result
+```
+
+#### Get a trigger token
+Retrieve a unique secret token for triggering the specified hook. This
+token can be deactivated with `resetTriggerToken`.
+
+
+
+Takes the following arguments:
+
+ * `hookGroupId`
+ * `hookId`
+
+Required [output schema](v1/trigger-token-response.json#)
+
+```python
+# Sync calls
+hooks.getTriggerToken(hookGroupId, hookId) # -> result`
+hooks.getTriggerToken(hookGroupId='value', hookId='value') # -> result
+# Async call
+await asyncHooks.getTriggerToken(hookGroupId, hookId) # -> result
+await asyncHooks.getTriggerToken(hookGroupId='value', hookId='value') # -> result
+```
+
+#### Reset a trigger token
+Reset the token for triggering a given hook. This invalidates token that
+may have been issued via getTriggerToken with a new token.
+
+
+
+Takes the following arguments:
+
+ * `hookGroupId`
+ * `hookId`
+
+Required [output schema](v1/trigger-token-response.json#)
+
+```python
+# Sync calls
+hooks.resetTriggerToken(hookGroupId, hookId) # -> result`
+hooks.resetTriggerToken(hookGroupId='value', hookId='value') # -> result
+# Async call
+await asyncHooks.resetTriggerToken(hookGroupId, hookId) # -> result
+await asyncHooks.resetTriggerToken(hookGroupId='value', hookId='value') # -> result
+```
+
+#### Trigger a hook with a token
+This endpoint triggers a defined hook with a valid token.
+
+The HTTP payload must match the hooks `triggerSchema`. If it does, it is
+provided as the `payload` property of the JSON-e context used to render the
+task template.
+
+
+
+Takes the following arguments:
+
+ * `hookGroupId`
+ * `hookId`
+ * `token`
+
+Required [input schema](v1/trigger-hook.json#)
+
+Required [output schema](v1/task-status.json#)
+
+```python
+# Sync calls
+hooks.triggerHookWithToken(hookGroupId, hookId, token, payload) # -> result`
+hooks.triggerHookWithToken(payload, hookGroupId='value', hookId='value', token='value') # -> result
+# Async call
+await asyncHooks.triggerHookWithToken(hookGroupId, hookId, token, payload) # -> result
+await asyncHooks.triggerHookWithToken(payload, hookGroupId='value', hookId='value', token='value') # -> result
+```
+
+
+
+
+### Methods in `taskcluster.Index`
+```python
+import asyncio # Only for async
+// Create Index client instance
+import taskcluster
+import taskcluster.aio
+
+index = taskcluster.Index(options)
+# Below only for async instances, assume already in coroutine
+loop = asyncio.get_event_loop()
+session = taskcluster.aio.createSession(loop=loop)
+asyncIndex = taskcluster.aio.Index(options, session=session)
+```
+The task index, typically available at `index.taskcluster.net`, is
+responsible for indexing tasks. The service ensures that tasks can be
+located by recency and/or arbitrary strings. Common use-cases include:
+
+ * Locate tasks by git or mercurial `<revision>`, or
+ * Locate latest task from given `<branch>`, such as a release.
+
+**Index hierarchy**, tasks are indexed in a dot (`.`) separated hierarchy
+called a namespace. For example a task could be indexed with the index path
+`some-app.<revision>.linux-64.release-build`. In this case the following
+namespaces is created.
+
+ 1. `some-app`,
+ 1. `some-app.<revision>`, and,
+ 2. `some-app.<revision>.linux-64`
+
+Inside the namespace `some-app.<revision>` you can find the namespace
+`some-app.<revision>.linux-64` inside which you can find the indexed task
+`some-app.<revision>.linux-64.release-build`. This is an example of indexing
+builds for a given platform and revision.
+
+**Task Rank**, when a task is indexed, it is assigned a `rank` (defaults
+to `0`). If another task is already indexed in the same namespace with
+lower or equal `rank`, the index for that task will be overwritten. For example
+consider index path `mozilla-central.linux-64.release-build`. In
+this case one might choose to use a UNIX timestamp or mercurial revision
+number as `rank`. This way the latest completed linux 64 bit release
+build is always available at `mozilla-central.linux-64.release-build`.
+
+Note that this does mean index paths are not immutable: the same path may
+point to a different task now than it did a moment ago.
+
+**Indexed Data**, when a task is retrieved from the index the result includes
+a `taskId` and an additional user-defined JSON blob that was indexed with
+the task.
+
+**Entry Expiration**, all indexed entries must have an expiration date.
+Typically this defaults to one year, if not specified. If you are
+indexing tasks to make it easy to find artifacts, consider using the
+artifact's expiration date.
+
+**Valid Characters**, all keys in a namespace `<key1>.<key2>` must be
+in the form `/[a-zA-Z0-9_!~*'()%-]+/`. Observe that this is URL-safe and
+that if you strictly want to put another character you can URL encode it.
+
+**Indexing Routes**, tasks can be indexed using the API below, but the
+most common way to index tasks is adding a custom route to `task.routes` of the
+form `index.<namespace>`. In order to add this route to a task you'll
+need the scope `queue:route:index.<namespace>`. When a task has
+this route, it will be indexed when the task is **completed successfully**.
+The task will be indexed with `rank`, `data` and `expires` as specified
+in `task.extra.index`. See the example below:
+
+```js
+{
+ payload: { /* ... */ },
+ routes: [
+ // index.<namespace> prefixed routes, tasks CC'ed such a route will
+ // be indexed under the given <namespace>
+ "index.mozilla-central.linux-64.release-build",
+ "index.<revision>.linux-64.release-build"
+ ],
+ extra: {
+ // Optional details for indexing service
+ index: {
+ // Ordering, this taskId will overwrite any thing that has
+ // rank <= 4000 (defaults to zero)
+ rank: 4000,
+
+ // Specify when the entries expire (Defaults to 1 year)
+ expires: new Date().toJSON(),
+
+ // A little informal data to store along with taskId
+ // (less 16 kb when encoded as JSON)
+ data: {
+ hgRevision: "...",
+ commitMessae: "...",
+ whatever...
+ }
+ },
+ // Extra properties for other services...
+ }
+ // Other task properties...
+}
+```
+
+**Remark**, when indexing tasks using custom routes, it's also possible
+to listen for messages about these tasks. For
+example one could bind to `route.index.some-app.*.release-build`,
+and pick up all messages about release builds. Hence, it is a
+good idea to document task index hierarchies, as these make up extension
+points in their own.
+#### Ping Server
+Respond without doing anything.
+This endpoint is used to check that the service is up.
+
+
+```python
+# Sync calls
+index.ping() # -> None`
+# Async call
+await asyncIndex.ping() # -> None
+```
+
+#### Find Indexed Task
+Find a task by index path, returning the highest-rank task with that path. If no
+task exists for the given path, this API end-point will respond with a 404 status.
+
+
+
+Takes the following arguments:
+
+ * `indexPath`
+
+Required [output schema](v1/indexed-task-response.json#)
+
+```python
+# Sync calls
+index.findTask(indexPath) # -> result`
+index.findTask(indexPath='value') # -> result
+# Async call
+await asyncIndex.findTask(indexPath) # -> result
+await asyncIndex.findTask(indexPath='value') # -> result
+```
+
+#### List Namespaces
+List the namespaces immediately under a given namespace.
+
+This endpoint
+lists up to 1000 namespaces. If more namespaces are present, a
+`continuationToken` will be returned, which can be given in the next
+request. For the initial request, the payload should be an empty JSON
+object.
+
+
+
+Takes the following arguments:
+
+ * `namespace`
+
+Required [output schema](v1/list-namespaces-response.json#)
+
+```python
+# Sync calls
+index.listNamespaces(namespace) # -> result`
+index.listNamespaces(namespace='value') # -> result
+# Async call
+await asyncIndex.listNamespaces(namespace) # -> result
+await asyncIndex.listNamespaces(namespace='value') # -> result
+```
+
+#### List Tasks
+List the tasks immediately under a given namespace.
+
+This endpoint
+lists up to 1000 tasks. If more tasks are present, a
+`continuationToken` will be returned, which can be given in the next
+request. For the initial request, the payload should be an empty JSON
+object.
+
+**Remark**, this end-point is designed for humans browsing for tasks, not
+services, as that makes little sense.
+
+
+
+Takes the following arguments:
+
+ * `namespace`
+
+Required [output schema](v1/list-tasks-response.json#)
+
+```python
+# Sync calls
+index.listTasks(namespace) # -> result`
+index.listTasks(namespace='value') # -> result
+# Async call
+await asyncIndex.listTasks(namespace) # -> result
+await asyncIndex.listTasks(namespace='value') # -> result
+```
+
+#### Insert Task into Index
+Insert a task into the index. If the new rank is less than the existing rank
+at the given index path, the task is not indexed but the response is still 200 OK.
+
+Please see the introduction above for information
+about indexing successfully completed tasks automatically using custom routes.
+
+
+
+Takes the following arguments:
+
+ * `namespace`
+
+Required [input schema](v1/insert-task-request.json#)
+
+Required [output schema](v1/indexed-task-response.json#)
+
+```python
+# Sync calls
+index.insertTask(namespace, payload) # -> result`
+index.insertTask(payload, namespace='value') # -> result
+# Async call
+await asyncIndex.insertTask(namespace, payload) # -> result
+await asyncIndex.insertTask(payload, namespace='value') # -> result
+```
+
+#### Get Artifact From Indexed Task
+Find a task by index path and redirect to the artifact on the most recent
+run with the given `name`.
+
+Note that multiple calls to this endpoint may return artifacts from differen tasks
+if a new task is inserted into the index between calls. Avoid using this method as
+a stable link to multiple, connected files if the index path does not contain a
+unique identifier. For example, the following two links may return unrelated files:
+* https://index.taskcluster.net/task/some-app.win64.latest.installer/artifacts/public/installer.exe`
+* https://index.taskcluster.net/task/some-app.win64.latest.installer/artifacts/public/debug-symbols.zip`
+
+This problem be remedied by including the revision in the index path or by bundling both
+installer and debug symbols into a single artifact.
+
+If no task exists for the given index path, this API end-point responds with 404.
+
+
+
+Takes the following arguments:
+
+ * `indexPath`
+ * `name`
+
+```python
+# Sync calls
+index.findArtifactFromTask(indexPath, name) # -> None`
+index.findArtifactFromTask(indexPath='value', name='value') # -> None
+# Async call
+await asyncIndex.findArtifactFromTask(indexPath, name) # -> None
+await asyncIndex.findArtifactFromTask(indexPath='value', name='value') # -> None
+```
+
+
+
+
+### Methods in `taskcluster.Login`
+```python
+import asyncio # Only for async
+// Create Login client instance
+import taskcluster
+import taskcluster.aio
+
+login = taskcluster.Login(options)
+# Below only for async instances, assume already in coroutine
+loop = asyncio.get_event_loop()
+session = taskcluster.aio.createSession(loop=loop)
+asyncLogin = taskcluster.aio.Login(options, session=session)
+```
+The Login service serves as the interface between external authentication
+systems and Taskcluster credentials.
+#### Ping Server
+Respond without doing anything.
+This endpoint is used to check that the service is up.
+
+
+```python
+# Sync calls
+login.ping() # -> None`
+# Async call
+await asyncLogin.ping() # -> None
+```
+
+#### Get Taskcluster credentials given a suitable `access_token`
+Given an OIDC `access_token` from a trusted OpenID provider, return a
+set of Taskcluster credentials for use on behalf of the identified
+user.
+
+This method is typically not called with a Taskcluster client library
+and does not accept Hawk credentials. The `access_token` should be
+given in an `Authorization` header:
+```
+Authorization: Bearer abc.xyz
+```
+
+The `access_token` is first verified against the named
+:provider, then passed to the provider's APIBuilder to retrieve a user
+profile. That profile is then used to generate Taskcluster credentials
+appropriate to the user. Note that the resulting credentials may or may
+not include a `certificate` property. Callers should be prepared for either
+alternative.
+
+The given credentials will expire in a relatively short time. Callers should
+monitor this expiration and refresh the credentials if necessary, by calling
+this endpoint again, if they have expired.
+
+
+
+Takes the following arguments:
+
+ * `provider`
+
+Required [output schema](v1/oidc-credentials-response.json#)
+
+```python
+# Sync calls
+login.oidcCredentials(provider) # -> result`
+login.oidcCredentials(provider='value') # -> result
+# Async call
+await asyncLogin.oidcCredentials(provider) # -> result
+await asyncLogin.oidcCredentials(provider='value') # -> result
+```
+
+
+
+
+### Methods in `taskcluster.Notify`
+```python
+import asyncio # Only for async
+// Create Notify client instance
+import taskcluster
+import taskcluster.aio
+
+notify = taskcluster.Notify(options)
+# Below only for async instances, assume already in coroutine
+loop = asyncio.get_event_loop()
+session = taskcluster.aio.createSession(loop=loop)
+asyncNotify = taskcluster.aio.Notify(options, session=session)
+```
+The notification service, typically available at `notify.taskcluster.net`
+listens for tasks with associated notifications and handles requests to
+send emails and post pulse messages.
+#### Ping Server
+Respond without doing anything.
+This endpoint is used to check that the service is up.
+
+
+```python
+# Sync calls
+notify.ping() # -> None`
+# Async call
+await asyncNotify.ping() # -> None
+```
+
+#### Send an Email
+Send an email to `address`. The content is markdown and will be rendered
+to HTML, but both the HTML and raw markdown text will be sent in the
+email. If a link is included, it will be rendered to a nice button in the
+HTML version of the email
+
+
+Required [input schema](v1/email-request.json#)
+
+```python
+# Sync calls
+notify.email(payload) # -> None`
+# Async call
+await asyncNotify.email(payload) # -> None
+```
+
+#### Publish a Pulse Message
+Publish a message on pulse with the given `routingKey`.
+
+
+Required [input schema](v1/pulse-request.json#)
+
+```python
+# Sync calls
+notify.pulse(payload) # -> None`
+# Async call
+await asyncNotify.pulse(payload) # -> None
+```
+
+#### Post IRC Message
+Post a message on IRC to a specific channel or user, or a specific user
+on a specific channel.
+
+Success of this API method does not imply the message was successfully
+posted. This API method merely inserts the IRC message into a queue
+that will be processed by a background process.
+This allows us to re-send the message in face of connection issues.
+
+However, if the user isn't online the message will be dropped without
+error. We maybe improve this behavior in the future. For now just keep
+in mind that IRC is a best-effort service.
+
+
+Required [input schema](v1/irc-request.json#)
+
+```python
+# Sync calls
+notify.irc(payload) # -> None`
+# Async call
+await asyncNotify.irc(payload) # -> None
+```
+
+
+
+
+### Methods in `taskcluster.Pulse`
+```python
+import asyncio # Only for async
+// Create Pulse client instance
+import taskcluster
+import taskcluster.aio
+
+pulse = taskcluster.Pulse(options)
+# Below only for async instances, assume already in coroutine
+loop = asyncio.get_event_loop()
+session = taskcluster.aio.createSession(loop=loop)
+asyncPulse = taskcluster.aio.Pulse(options, session=session)
+```
+The taskcluster-pulse service, typically available at `pulse.taskcluster.net`
+manages pulse credentials for taskcluster users.
+
+A service to manage Pulse credentials for anything using
+Taskcluster credentials. This allows for self-service pulse
+access and greater control within the Taskcluster project.
+#### Ping Server
+Respond without doing anything.
+This endpoint is used to check that the service is up.
+
+
+```python
+# Sync calls
+pulse.ping() # -> None`
+# Async call
+await asyncPulse.ping() # -> None
+```
+
+#### List Namespaces
+List the namespaces managed by this service.
+
+This will list up to 1000 namespaces. If more namespaces are present a
+`continuationToken` will be returned, which can be given in the next
+request. For the initial request, do not provide continuation token.
+
+
+Required [output schema](v1/list-namespaces-response.json#)
+
+```python
+# Sync calls
+pulse.listNamespaces() # -> result`
+# Async call
+await asyncPulse.listNamespaces() # -> result
+```
+
+#### Get a namespace
+Get public information about a single namespace. This is the same information
+as returned by `listNamespaces`.
+
+
+
+Takes the following arguments:
+
+ * `namespace`
+
+Required [output schema](v1/namespace.json#)
+
+```python
+# Sync calls
+pulse.namespace(namespace) # -> result`
+pulse.namespace(namespace='value') # -> result
+# Async call
+await asyncPulse.namespace(namespace) # -> result
+await asyncPulse.namespace(namespace='value') # -> result
+```
+
+#### Claim a namespace
+Claim a namespace, returning a connection string with access to that namespace
+good for use until the `reclaimAt` time in the response body. The connection
+string can be used as many times as desired during this period, but must not
+be used after `reclaimAt`.
+
+Connections made with this connection string may persist beyond `reclaimAt`,
+although it should not persist forever. 24 hours is a good maximum, and this
+service will terminate connections after 72 hours (although this value is
+configurable).
+
+The specified `expires` time updates any existing expiration times. Connections
+for expired namespaces will be terminated.
+
+
+
+Takes the following arguments:
+
+ * `namespace`
+
+Required [input schema](v1/namespace-request.json#)
+
+Required [output schema](v1/namespace-response.json#)
+
+```python
+# Sync calls
+pulse.claimNamespace(namespace, payload) # -> result`
+pulse.claimNamespace(payload, namespace='value') # -> result
+# Async call
+await asyncPulse.claimNamespace(namespace, payload) # -> result
+await asyncPulse.claimNamespace(payload, namespace='value') # -> result
+```
+
+
+
+
+### Methods in `taskcluster.PurgeCache`
+```python
+import asyncio # Only for async
+// Create PurgeCache client instance
+import taskcluster
+import taskcluster.aio
+
+purgeCache = taskcluster.PurgeCache(options)
+# Below only for async instances, assume already in coroutine
+loop = asyncio.get_event_loop()
+session = taskcluster.aio.createSession(loop=loop)
+asyncPurgeCache = taskcluster.aio.PurgeCache(options, session=session)
+```
+The purge-cache service is responsible for publishing a pulse
+message for workers, so they can purge cache upon request.
+
+This document describes the API end-point for publishing the pulse
+message. This is mainly intended to be used by tools.
+#### Ping Server
+Respond without doing anything.
+This endpoint is used to check that the service is up.
+
+
+```python
+# Sync calls
+purgeCache.ping() # -> None`
+# Async call
+await asyncPurgeCache.ping() # -> None
+```
+
+#### Purge Worker Cache
+Publish a purge-cache message to purge caches named `cacheName` with
+`provisionerId` and `workerType` in the routing-key. Workers should
+be listening for this message and purge caches when they see it.
+
+
+
+Takes the following arguments:
+
+ * `provisionerId`
+ * `workerType`
+
+Required [input schema](v1/purge-cache-request.json#)
+
+```python
+# Sync calls
+purgeCache.purgeCache(provisionerId, workerType, payload) # -> None`
+purgeCache.purgeCache(payload, provisionerId='value', workerType='value') # -> None
+# Async call
+await asyncPurgeCache.purgeCache(provisionerId, workerType, payload) # -> None
+await asyncPurgeCache.purgeCache(payload, provisionerId='value', workerType='value') # -> None
+```
+
+#### All Open Purge Requests
+This is useful mostly for administors to view
+the set of open purge requests. It should not
+be used by workers. They should use the purgeRequests
+endpoint that is specific to their workerType and
+provisionerId.
+
+
+Required [output schema](v1/all-purge-cache-request-list.json#)
+
+```python
+# Sync calls
+purgeCache.allPurgeRequests() # -> result`
+# Async call
+await asyncPurgeCache.allPurgeRequests() # -> result
+```
+
+#### Open Purge Requests for a provisionerId/workerType pair
+List of caches that need to be purged if they are from before
+a certain time. This is safe to be used in automation from
+workers.
+
+
+
+Takes the following arguments:
+
+ * `provisionerId`
+ * `workerType`
+
+Required [output schema](v1/purge-cache-request-list.json#)
+
+```python
+# Sync calls
+purgeCache.purgeRequests(provisionerId, workerType) # -> result`
+purgeCache.purgeRequests(provisionerId='value', workerType='value') # -> result
+# Async call
+await asyncPurgeCache.purgeRequests(provisionerId, workerType) # -> result
+await asyncPurgeCache.purgeRequests(provisionerId='value', workerType='value') # -> result
+```
+
+
+
+
+### Exchanges in `taskcluster.PurgeCacheEvents`
+```python
+// Create PurgeCacheEvents client instance
+import taskcluster
+purgeCacheEvents = taskcluster.PurgeCacheEvents(options)
+```
+The purge-cache service, typically available at
+`purge-cache.taskcluster.net`, is responsible for publishing a pulse
+message for workers, so they can purge cache upon request.
+
+This document describes the exchange offered for workers by the
+cache-purge service.
+#### Purge Cache Messages
+ * `purgeCacheEvents.purgeCache(routingKeyPattern) -> routingKey`
+ * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key.
+ * `provisionerId` is required Description: `provisionerId` under which to purge cache.
+ * `workerType` is required Description: `workerType` for which to purge cache.
+
+
+
+
+### Methods in `taskcluster.Queue`
+```python
+import asyncio # Only for async
+// Create Queue client instance
+import taskcluster
+import taskcluster.aio
+
+queue = taskcluster.Queue(options)
+# Below only for async instances, assume already in coroutine
+loop = asyncio.get_event_loop()
+session = taskcluster.aio.createSession(loop=loop)
+asyncQueue = taskcluster.aio.Queue(options, session=session)
+```
+The queue, typically available at `queue.taskcluster.net`, is responsible
+for accepting tasks and track their state as they are executed by
+workers. In order ensure they are eventually resolved.
+
+This document describes the API end-points offered by the queue. These
+end-points targets the following audience:
+ * Schedulers, who create tasks to be executed,
+ * Workers, who execute tasks, and
+ * Tools, that wants to inspect the state of a task.
+#### Ping Server
+Respond without doing anything.
+This endpoint is used to check that the service is up.
+
+
+```python
+# Sync calls
+queue.ping() # -> None`
+# Async call
+await asyncQueue.ping() # -> None
+```
+
+#### Get Task Definition
+This end-point will return the task-definition. Notice that the task
+definition may have been modified by queue, if an optional property is
+not specified the queue may provide a default value.
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+
+Required [output schema](v1/task.json#)
+
+```python
+# Sync calls
+queue.task(taskId) # -> result`
+queue.task(taskId='value') # -> result
+# Async call
+await asyncQueue.task(taskId) # -> result
+await asyncQueue.task(taskId='value') # -> result
+```
+
+#### Get task status
+Get task status structure from `taskId`
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+
+Required [output schema](v1/task-status-response.json#)
+
+```python
+# Sync calls
+queue.status(taskId) # -> result`
+queue.status(taskId='value') # -> result
+# Async call
+await asyncQueue.status(taskId) # -> result
+await asyncQueue.status(taskId='value') # -> result
+```
+
+#### List Task Group
+List tasks sharing the same `taskGroupId`.
+
+As a task-group may contain an unbounded number of tasks, this end-point
+may return a `continuationToken`. To continue listing tasks you must call
+the `listTaskGroup` again with the `continuationToken` as the
+query-string option `continuationToken`.
+
+By default this end-point will try to return up to 1000 members in one
+request. But it **may return less**, even if more tasks are available.
+It may also return a `continuationToken` even though there are no more
+results. However, you can only be sure to have seen all results if you
+keep calling `listTaskGroup` with the last `continuationToken` until you
+get a result without a `continuationToken`.
+
+If you are not interested in listing all the members at once, you may
+use the query-string option `limit` to return fewer.
+
+
+
+Takes the following arguments:
+
+ * `taskGroupId`
+
+Required [output schema](v1/list-task-group-response.json#)
+
+```python
+# Sync calls
+queue.listTaskGroup(taskGroupId) # -> result`
+queue.listTaskGroup(taskGroupId='value') # -> result
+# Async call
+await asyncQueue.listTaskGroup(taskGroupId) # -> result
+await asyncQueue.listTaskGroup(taskGroupId='value') # -> result
+```
+
+#### List Dependent Tasks
+List tasks that depend on the given `taskId`.
+
+As many tasks from different task-groups may dependent on a single tasks,
+this end-point may return a `continuationToken`. To continue listing
+tasks you must call `listDependentTasks` again with the
+`continuationToken` as the query-string option `continuationToken`.
+
+By default this end-point will try to return up to 1000 tasks in one
+request. But it **may return less**, even if more tasks are available.
+It may also return a `continuationToken` even though there are no more
+results. However, you can only be sure to have seen all results if you
+keep calling `listDependentTasks` with the last `continuationToken` until
+you get a result without a `continuationToken`.
+
+If you are not interested in listing all the tasks at once, you may
+use the query-string option `limit` to return fewer.
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+
+Required [output schema](v1/list-dependent-tasks-response.json#)
+
+```python
+# Sync calls
+queue.listDependentTasks(taskId) # -> result`
+queue.listDependentTasks(taskId='value') # -> result
+# Async call
+await asyncQueue.listDependentTasks(taskId) # -> result
+await asyncQueue.listDependentTasks(taskId='value') # -> result
+```
+
+#### Create New Task
+Create a new task, this is an **idempotent** operation, so repeat it if
+you get an internal server error or network connection is dropped.
+
+**Task `deadline`**: the deadline property can be no more than 5 days
+into the future. This is to limit the amount of pending tasks not being
+taken care of. Ideally, you should use a much shorter deadline.
+
+**Task expiration**: the `expires` property must be greater than the
+task `deadline`. If not provided it will default to `deadline` + one
+year. Notice, that artifacts created by task must expire before the task.
+
+**Task specific routing-keys**: using the `task.routes` property you may
+define task specific routing-keys. If a task has a task specific
+routing-key: `<route>`, then when the AMQP message about the task is
+published, the message will be CC'ed with the routing-key:
+`route.<route>`. This is useful if you want another component to listen
+for completed tasks you have posted. The caller must have scope
+`queue:route:<route>` for each route.
+
+**Dependencies**: any tasks referenced in `task.dependencies` must have
+already been created at the time of this call.
+
+**Scopes**: Note that the scopes required to complete this API call depend
+on the content of the `scopes`, `routes`, `schedulerId`, `priority`,
+`provisionerId`, and `workerType` properties of the task definition.
+
+**Legacy Scopes**: The `queue:create-task:..` scope without a priority and
+the `queue:define-task:..` and `queue:task-group-id:..` scopes are considered
+legacy and should not be used. Note that the new, non-legacy scopes require
+a `queue:scheduler-id:..` scope as well as scopes for the proper priority.
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+
+Required [input schema](v1/create-task-request.json#)
+
+Required [output schema](v1/task-status-response.json#)
+
+```python
+# Sync calls
+queue.createTask(taskId, payload) # -> result`
+queue.createTask(payload, taskId='value') # -> result
+# Async call
+await asyncQueue.createTask(taskId, payload) # -> result
+await asyncQueue.createTask(payload, taskId='value') # -> result
+```
+
+#### Define Task
+**Deprecated**, this is the same as `createTask` with a **self-dependency**.
+This is only present for legacy.
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+
+Required [input schema](v1/create-task-request.json#)
+
+Required [output schema](v1/task-status-response.json#)
+
+```python
+# Sync calls
+queue.defineTask(taskId, payload) # -> result`
+queue.defineTask(payload, taskId='value') # -> result
+# Async call
+await asyncQueue.defineTask(taskId, payload) # -> result
+await asyncQueue.defineTask(payload, taskId='value') # -> result
+```
+
+#### Schedule Defined Task
+scheduleTask will schedule a task to be executed, even if it has
+unresolved dependencies. A task would otherwise only be scheduled if
+its dependencies were resolved.
+
+This is useful if you have defined a task that depends on itself or on
+some other task that has not been resolved, but you wish the task to be
+scheduled immediately.
+
+This will announce the task as pending and workers will be allowed to
+claim it and resolve the task.
+
+**Note** this operation is **idempotent** and will not fail or complain
+if called with a `taskId` that is already scheduled, or even resolved.
+To reschedule a task previously resolved, use `rerunTask`.
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+
+Required [output schema](v1/task-status-response.json#)
+
+```python
+# Sync calls
+queue.scheduleTask(taskId) # -> result`
+queue.scheduleTask(taskId='value') # -> result
+# Async call
+await asyncQueue.scheduleTask(taskId) # -> result
+await asyncQueue.scheduleTask(taskId='value') # -> result
+```
+
+#### Rerun a Resolved Task
+This method _reruns_ a previously resolved task, even if it was
+_completed_. This is useful if your task completes unsuccessfully, and
+you just want to run it from scratch again. This will also reset the
+number of `retries` allowed.
+
+Remember that `retries` in the task status counts the number of runs that
+the queue have started because the worker stopped responding, for example
+because a spot node died.
+
+**Remark** this operation is idempotent, if you try to rerun a task that
+is not either `failed` or `completed`, this operation will just return
+the current task status.
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+
+Required [output schema](v1/task-status-response.json#)
+
+```python
+# Sync calls
+queue.rerunTask(taskId) # -> result`
+queue.rerunTask(taskId='value') # -> result
+# Async call
+await asyncQueue.rerunTask(taskId) # -> result
+await asyncQueue.rerunTask(taskId='value') # -> result
+```
+
+#### Cancel Task
+This method will cancel a task that is either `unscheduled`, `pending` or
+`running`. It will resolve the current run as `exception` with
+`reasonResolved` set to `canceled`. If the task isn't scheduled yet, ie.
+it doesn't have any runs, an initial run will be added and resolved as
+described above. Hence, after canceling a task, it cannot be scheduled
+with `queue.scheduleTask`, but a new run can be created with
+`queue.rerun`. These semantics is equivalent to calling
+`queue.scheduleTask` immediately followed by `queue.cancelTask`.
+
+**Remark** this operation is idempotent, if you try to cancel a task that
+isn't `unscheduled`, `pending` or `running`, this operation will just
+return the current task status.
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+
+Required [output schema](v1/task-status-response.json#)
+
+```python
+# Sync calls
+queue.cancelTask(taskId) # -> result`
+queue.cancelTask(taskId='value') # -> result
+# Async call
+await asyncQueue.cancelTask(taskId) # -> result
+await asyncQueue.cancelTask(taskId='value') # -> result
+```
+
+#### Claim Work
+Claim pending task(s) for the given `provisionerId`/`workerType` queue.
+
+If any work is available (even if fewer than the requested number of
+tasks, this will return immediately. Otherwise, it will block for tens of
+seconds waiting for work. If no work appears, it will return an emtpy
+list of tasks. Callers should sleep a short while (to avoid denial of
+service in an error condition) and call the endpoint again. This is a
+simple implementation of "long polling".
+
+
+
+Takes the following arguments:
+
+ * `provisionerId`
+ * `workerType`
+
+Required [input schema](v1/claim-work-request.json#)
+
+Required [output schema](v1/claim-work-response.json#)
+
+```python
+# Sync calls
+queue.claimWork(provisionerId, workerType, payload) # -> result`
+queue.claimWork(payload, provisionerId='value', workerType='value') # -> result
+# Async call
+await asyncQueue.claimWork(provisionerId, workerType, payload) # -> result
+await asyncQueue.claimWork(payload, provisionerId='value', workerType='value') # -> result
+```
+
+#### Claim Task
+claim a task - never documented
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+ * `runId`
+
+Required [input schema](v1/task-claim-request.json#)
+
+Required [output schema](v1/task-claim-response.json#)
+
+```python
+# Sync calls
+queue.claimTask(taskId, runId, payload) # -> result`
+queue.claimTask(payload, taskId='value', runId='value') # -> result
+# Async call
+await asyncQueue.claimTask(taskId, runId, payload) # -> result
+await asyncQueue.claimTask(payload, taskId='value', runId='value') # -> result
+```
+
+#### Reclaim task
+Refresh the claim for a specific `runId` for given `taskId`. This updates
+the `takenUntil` property and returns a new set of temporary credentials
+for performing requests on behalf of the task. These credentials should
+be used in-place of the credentials returned by `claimWork`.
+
+The `reclaimTask` requests serves to:
+ * Postpone `takenUntil` preventing the queue from resolving
+ `claim-expired`,
+ * Refresh temporary credentials used for processing the task, and
+ * Abort execution if the task/run have been resolved.
+
+If the `takenUntil` timestamp is exceeded the queue will resolve the run
+as _exception_ with reason `claim-expired`, and proceeded to retry to the
+task. This ensures that tasks are retried, even if workers disappear
+without warning.
+
+If the task is resolved, this end-point will return `409` reporting
+`RequestConflict`. This typically happens if the task have been canceled
+or the `task.deadline` have been exceeded. If reclaiming fails, workers
+should abort the task and forget about the given `runId`. There is no
+need to resolve the run or upload artifacts.
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+ * `runId`
+
+Required [output schema](v1/task-reclaim-response.json#)
+
+```python
+# Sync calls
+queue.reclaimTask(taskId, runId) # -> result`
+queue.reclaimTask(taskId='value', runId='value') # -> result
+# Async call
+await asyncQueue.reclaimTask(taskId, runId) # -> result
+await asyncQueue.reclaimTask(taskId='value', runId='value') # -> result
+```
+
+#### Report Run Completed
+Report a task completed, resolving the run as `completed`.
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+ * `runId`
+
+Required [output schema](v1/task-status-response.json#)
+
+```python
+# Sync calls
+queue.reportCompleted(taskId, runId) # -> result`
+queue.reportCompleted(taskId='value', runId='value') # -> result
+# Async call
+await asyncQueue.reportCompleted(taskId, runId) # -> result
+await asyncQueue.reportCompleted(taskId='value', runId='value') # -> result
+```
+
+#### Report Run Failed
+Report a run failed, resolving the run as `failed`. Use this to resolve
+a run that failed because the task specific code behaved unexpectedly.
+For example the task exited non-zero, or didn't produce expected output.
+
+Do not use this if the task couldn't be run because if malformed
+payload, or other unexpected condition. In these cases we have a task
+exception, which should be reported with `reportException`.
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+ * `runId`
+
+Required [output schema](v1/task-status-response.json#)
+
+```python
+# Sync calls
+queue.reportFailed(taskId, runId) # -> result`
+queue.reportFailed(taskId='value', runId='value') # -> result
+# Async call
+await asyncQueue.reportFailed(taskId, runId) # -> result
+await asyncQueue.reportFailed(taskId='value', runId='value') # -> result
+```
+
+#### Report Task Exception
+Resolve a run as _exception_. Generally, you will want to report tasks as
+failed instead of exception. You should `reportException` if,
+
+ * The `task.payload` is invalid,
+ * Non-existent resources are referenced,
+ * Declared actions cannot be executed due to unavailable resources,
+ * The worker had to shutdown prematurely,
+ * The worker experienced an unknown error, or,
+ * The task explicitly requested a retry.
+
+Do not use this to signal that some user-specified code crashed for any
+reason specific to this code. If user-specific code hits a resource that
+is temporarily unavailable worker should report task _failed_.
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+ * `runId`
+
+Required [input schema](v1/task-exception-request.json#)
+
+Required [output schema](v1/task-status-response.json#)
+
+```python
+# Sync calls
+queue.reportException(taskId, runId, payload) # -> result`
+queue.reportException(payload, taskId='value', runId='value') # -> result
+# Async call
+await asyncQueue.reportException(taskId, runId, payload) # -> result
+await asyncQueue.reportException(payload, taskId='value', runId='value') # -> result
+```
+
+#### Create Artifact
+This API end-point creates an artifact for a specific run of a task. This
+should **only** be used by a worker currently operating on this task, or
+from a process running within the task (ie. on the worker).
+
+All artifacts must specify when they `expires`, the queue will
+automatically take care of deleting artifacts past their
+expiration point. This features makes it feasible to upload large
+intermediate artifacts from data processing applications, as the
+artifacts can be set to expire a few days later.
+
+We currently support 3 different `storageType`s, each storage type have
+slightly different features and in some cases difference semantics.
+We also have 2 deprecated `storageType`s which are only maintained for
+backwards compatiability and should not be used in new implementations
+
+**Blob artifacts**, are useful for storing large files. Currently, these
+are all stored in S3 but there are facilities for adding support for other
+backends in futre. A call for this type of artifact must provide information
+about the file which will be uploaded. This includes sha256 sums and sizes.
+This method will return a list of general form HTTP requests which are signed
+by AWS S3 credentials managed by the Queue. Once these requests are completed
+the list of `ETag` values returned by the requests must be passed to the
+queue `completeArtifact` method
+
+**S3 artifacts**, DEPRECATED is useful for static files which will be
+stored on S3. When creating an S3 artifact the queue will return a
+pre-signed URL to which you can do a `PUT` request to upload your
+artifact. Note that `PUT` request **must** specify the `content-length`
+header and **must** give the `content-type` header the same value as in
+the request to `createArtifact`.
+
+**Azure artifacts**, DEPRECATED are stored in _Azure Blob Storage_ service
+which given the consistency guarantees and API interface offered by Azure
+is more suitable for artifacts that will be modified during the execution
+of the task. For example docker-worker has a feature that persists the
+task log to Azure Blob Storage every few seconds creating a somewhat
+live log. A request to create an Azure artifact will return a URL
+featuring a [Shared-Access-Signature](http://msdn.microsoft.com/en-us/library/azure/dn140256.aspx),
+refer to MSDN for further information on how to use these.
+**Warning: azure artifact is currently an experimental feature subject
+to changes and data-drops.**
+
+**Reference artifacts**, only consists of meta-data which the queue will
+store for you. These artifacts really only have a `url` property and
+when the artifact is requested the client will be redirect the URL
+provided with a `303` (See Other) redirect. Please note that we cannot
+delete artifacts you upload to other service, we can only delete the
+reference to the artifact, when it expires.
+
+**Error artifacts**, only consists of meta-data which the queue will
+store for you. These artifacts are only meant to indicate that you the
+worker or the task failed to generate a specific artifact, that you
+would otherwise have uploaded. For example docker-worker will upload an
+error artifact, if the file it was supposed to upload doesn't exists or
+turns out to be a directory. Clients requesting an error artifact will
+get a `424` (Failed Dependency) response. This is mainly designed to
+ensure that dependent tasks can distinguish between artifacts that were
+suppose to be generated and artifacts for which the name is misspelled.
+
+**Artifact immutability**, generally speaking you cannot overwrite an
+artifact when created. But if you repeat the request with the same
+properties the request will succeed as the operation is idempotent.
+This is useful if you need to refresh a signed URL while uploading.
+Do not abuse this to overwrite artifacts created by another entity!
+Such as worker-host overwriting artifact created by worker-code.
+
+As a special case the `url` property on _reference artifacts_ can be
+updated. You should only use this to update the `url` property for
+reference artifacts your process has created.
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+ * `runId`
+ * `name`
+
+Required [input schema](v1/post-artifact-request.json#)
+
+Required [output schema](v1/post-artifact-response.json#)
+
+```python
+# Sync calls
+queue.createArtifact(taskId, runId, name, payload) # -> result`
+queue.createArtifact(payload, taskId='value', runId='value', name='value') # -> result
+# Async call
+await asyncQueue.createArtifact(taskId, runId, name, payload) # -> result
+await asyncQueue.createArtifact(payload, taskId='value', runId='value', name='value') # -> result
+```
+
+#### Complete Artifact
+This endpoint finalises an upload done through the blob `storageType`.
+The queue will ensure that the task/run is still allowing artifacts
+to be uploaded. For single-part S3 blob artifacts, this endpoint
+will simply ensure the artifact is present in S3. For multipart S3
+artifacts, the endpoint will perform the commit step of the multipart
+upload flow. As the final step for both multi and single part artifacts,
+the `present` entity field will be set to `true` to reflect that the
+artifact is now present and a message published to pulse. NOTE: This
+endpoint *must* be called for all artifacts of storageType 'blob'
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+ * `runId`
+ * `name`
+
+Required [input schema](v1/put-artifact-request.json#)
+
+```python
+# Sync calls
+queue.completeArtifact(taskId, runId, name, payload) # -> None`
+queue.completeArtifact(payload, taskId='value', runId='value', name='value') # -> None
+# Async call
+await asyncQueue.completeArtifact(taskId, runId, name, payload) # -> None
+await asyncQueue.completeArtifact(payload, taskId='value', runId='value', name='value') # -> None
+```
+
+#### Get Artifact from Run
+Get artifact by `<name>` from a specific run.
+
+**Public Artifacts**, in-order to get an artifact you need the scope
+`queue:get-artifact:<name>`, where `<name>` is the name of the artifact.
+But if the artifact `name` starts with `public/`, authentication and
+authorization is not necessary to fetch the artifact.
+
+**API Clients**, this method will redirect you to the artifact, if it is
+stored externally. Either way, the response may not be JSON. So API
+client users might want to generate a signed URL for this end-point and
+use that URL with an HTTP client that can handle responses correctly.
+
+**Downloading artifacts**
+There are some special considerations for those http clients which download
+artifacts. This api endpoint is designed to be compatible with an HTTP 1.1
+compliant client, but has extra features to ensure the download is valid.
+It is strongly recommend that consumers use either taskcluster-lib-artifact (JS),
+taskcluster-lib-artifact-go (Go) or the CLI written in Go to interact with
+artifacts.
+
+In order to download an artifact the following must be done:
+
+1. Obtain queue url. Building a signed url with a taskcluster client is
+recommended
+1. Make a GET request which does not follow redirects
+1. In all cases, if specified, the
+x-taskcluster-location-{content,transfer}-{sha256,length} values must be
+validated to be equal to the Content-Length and Sha256 checksum of the
+final artifact downloaded. as well as any intermediate redirects
+1. If this response is a 500-series error, retry using an exponential
+backoff. No more than 5 retries should be attempted
+1. If this response is a 400-series error, treat it appropriately for
+your context. This might be an error in responding to this request or
+an Error storage type body. This request should not be retried.
+1. If this response is a 200-series response, the response body is the artifact.
+If the x-taskcluster-location-{content,transfer}-{sha256,length} and
+x-taskcluster-location-content-encoding are specified, they should match
+this response body
+1. If the response type is a 300-series redirect, the artifact will be at the
+location specified by the `Location` header. There are multiple artifact storage
+types which use a 300-series redirect.
+1. For all redirects followed, the user must verify that the content-sha256, content-length,
+transfer-sha256, transfer-length and content-encoding match every further request. The final
+artifact must also be validated against the values specified in the original queue response
+1. Caching of requests with an x-taskcluster-artifact-storage-type value of `reference`
+must not occur
+1. A request which has x-taskcluster-artifact-storage-type value of `blob` and does not
+have x-taskcluster-location-content-sha256 or x-taskcluster-location-content-length
+must be treated as an error
+
+**Headers**
+The following important headers are set on the response to this method:
+
+* location: the url of the artifact if a redirect is to be performed
+* x-taskcluster-artifact-storage-type: the storage type. Example: blob, s3, error
+
+The following important headers are set on responses to this method for Blob artifacts
+
+* x-taskcluster-location-content-sha256: the SHA256 of the artifact
+*after* any content-encoding is undone. Sha256 is hex encoded (e.g. [0-9A-Fa-f]{64})
+* x-taskcluster-location-content-length: the number of bytes *after* any content-encoding
+is undone
+* x-taskcluster-location-transfer-sha256: the SHA256 of the artifact
+*before* any content-encoding is undone. This is the SHA256 of what is sent over
+the wire. Sha256 is hex encoded (e.g. [0-9A-Fa-f]{64})
+* x-taskcluster-location-transfer-length: the number of bytes *after* any content-encoding
+is undone
+* x-taskcluster-location-content-encoding: the content-encoding used. It will either
+be `gzip` or `identity` right now. This is hardcoded to a value set when the artifact
+was created and no content-negotiation occurs
+* x-taskcluster-location-content-type: the content-type of the artifact
+
+**Caching**, artifacts may be cached in data centers closer to the
+workers in-order to reduce bandwidth costs. This can lead to longer
+response times. Caching can be skipped by setting the header
+`x-taskcluster-skip-cache: true`, this should only be used for resources
+where request volume is known to be low, and caching not useful.
+(This feature may be disabled in the future, use is sparingly!)
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+ * `runId`
+ * `name`
+
+```python
+# Sync calls
+queue.getArtifact(taskId, runId, name) # -> None`
+queue.getArtifact(taskId='value', runId='value', name='value') # -> None
+# Async call
+await asyncQueue.getArtifact(taskId, runId, name) # -> None
+await asyncQueue.getArtifact(taskId='value', runId='value', name='value') # -> None
+```
+
+#### Get Artifact from Latest Run
+Get artifact by `<name>` from the last run of a task.
+
+**Public Artifacts**, in-order to get an artifact you need the scope
+`queue:get-artifact:<name>`, where `<name>` is the name of the artifact.
+But if the artifact `name` starts with `public/`, authentication and
+authorization is not necessary to fetch the artifact.
+
+**API Clients**, this method will redirect you to the artifact, if it is
+stored externally. Either way, the response may not be JSON. So API
+client users might want to generate a signed URL for this end-point and
+use that URL with a normal HTTP client.
+
+**Remark**, this end-point is slightly slower than
+`queue.getArtifact`, so consider that if you already know the `runId` of
+the latest run. Otherwise, just us the most convenient API end-point.
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+ * `name`
+
+```python
+# Sync calls
+queue.getLatestArtifact(taskId, name) # -> None`
+queue.getLatestArtifact(taskId='value', name='value') # -> None
+# Async call
+await asyncQueue.getLatestArtifact(taskId, name) # -> None
+await asyncQueue.getLatestArtifact(taskId='value', name='value') # -> None
+```
+
+#### Get Artifacts from Run
+Returns a list of artifacts and associated meta-data for a given run.
+
+As a task may have many artifacts paging may be necessary. If this
+end-point returns a `continuationToken`, you should call the end-point
+again with the `continuationToken` as the query-string option:
+`continuationToken`.
+
+By default this end-point will list up-to 1000 artifacts in a single page
+you may limit this with the query-string parameter `limit`.
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+ * `runId`
+
+Required [output schema](v1/list-artifacts-response.json#)
+
+```python
+# Sync calls
+queue.listArtifacts(taskId, runId) # -> result`
+queue.listArtifacts(taskId='value', runId='value') # -> result
+# Async call
+await asyncQueue.listArtifacts(taskId, runId) # -> result
+await asyncQueue.listArtifacts(taskId='value', runId='value') # -> result
+```
+
+#### Get Artifacts from Latest Run
+Returns a list of artifacts and associated meta-data for the latest run
+from the given task.
+
+As a task may have many artifacts paging may be necessary. If this
+end-point returns a `continuationToken`, you should call the end-point
+again with the `continuationToken` as the query-string option:
+`continuationToken`.
+
+By default this end-point will list up-to 1000 artifacts in a single page
+you may limit this with the query-string parameter `limit`.
+
+
+
+Takes the following arguments:
+
+ * `taskId`
+
+Required [output schema](v1/list-artifacts-response.json#)
+
+```python
+# Sync calls
+queue.listLatestArtifacts(taskId) # -> result`
+queue.listLatestArtifacts(taskId='value') # -> result
+# Async call
+await asyncQueue.listLatestArtifacts(taskId) # -> result
+await asyncQueue.listLatestArtifacts(taskId='value') # -> result
+```
+
+#### Get a list of all active provisioners
+Get all active provisioners.
+
+The term "provisioner" is taken broadly to mean anything with a provisionerId.
+This does not necessarily mean there is an associated service performing any
+provisioning activity.
+
+The response is paged. If this end-point returns a `continuationToken`, you
+should call the end-point again with the `continuationToken` as a query-string
+option. By default this end-point will list up to 1000 provisioners in a single
+page. You may limit this with the query-string parameter `limit`.
+
+
+Required [output schema](v1/list-provisioners-response.json#)
+
+```python
+# Sync calls
+queue.listProvisioners() # -> result`
+# Async call
+await asyncQueue.listProvisioners() # -> result
+```
+
+#### Get an active provisioner
+Get an active provisioner.
+
+The term "provisioner" is taken broadly to mean anything with a provisionerId.
+This does not necessarily mean there is an associated service performing any
+provisioning activity.
+
+
+
+Takes the following arguments:
+
+ * `provisionerId`
+
+Required [output schema](v1/provisioner-response.json#)
+
+```python
+# Sync calls
+queue.getProvisioner(provisionerId) # -> result`
+queue.getProvisioner(provisionerId='value') # -> result
+# Async call
+await asyncQueue.getProvisioner(provisionerId) # -> result
+await asyncQueue.getProvisioner(provisionerId='value') # -> result
+```
+
+#### Update a provisioner
+Declare a provisioner, supplying some details about it.
+
+`declareProvisioner` allows updating one or more properties of a provisioner as long as the required scopes are
+possessed. For example, a request to update the `aws-provisioner-v1`
+provisioner with a body `{description: 'This provisioner is great'}` would require you to have the scope
+`queue:declare-provisioner:aws-provisioner-v1#description`.
+
+The term "provisioner" is taken broadly to mean anything with a provisionerId.
+This does not necessarily mean there is an associated service performing any
+provisioning activity.
+
+
+
+Takes the following arguments:
+
+ * `provisionerId`
+
+Required [input schema](v1/update-provisioner-request.json#)
+
+Required [output schema](v1/provisioner-response.json#)
+
+```python
+# Sync calls
+queue.declareProvisioner(provisionerId, payload) # -> result`
+queue.declareProvisioner(payload, provisionerId='value') # -> result
+# Async call
+await asyncQueue.declareProvisioner(provisionerId, payload) # -> result
+await asyncQueue.declareProvisioner(payload, provisionerId='value') # -> result
+```
+
+#### Get Number of Pending Tasks
+Get an approximate number of pending tasks for the given `provisionerId`
+and `workerType`.
+
+The underlying Azure Storage Queues only promises to give us an estimate.
+Furthermore, we cache the result in memory for 20 seconds. So consumers
+should be no means expect this to be an accurate number.
+It is, however, a solid estimate of the number of pending tasks.
+
+
+
+Takes the following arguments:
+
+ * `provisionerId`
+ * `workerType`
+
+Required [output schema](v1/pending-tasks-response.json#)
+
+```python
+# Sync calls
+queue.pendingTasks(provisionerId, workerType) # -> result`
+queue.pendingTasks(provisionerId='value', workerType='value') # -> result
+# Async call
+await asyncQueue.pendingTasks(provisionerId, workerType) # -> result
+await asyncQueue.pendingTasks(provisionerId='value', workerType='value') # -> result
+```
+
+#### Get a list of all active worker-types
+Get all active worker-types for the given provisioner.
+
+The response is paged. If this end-point returns a `continuationToken`, you
+should call the end-point again with the `continuationToken` as a query-string
+option. By default this end-point will list up to 1000 worker-types in a single
+page. You may limit this with the query-string parameter `limit`.
+
+
+
+Takes the following arguments:
+
+ * `provisionerId`
+
+Required [output schema](v1/list-workertypes-response.json#)
+
+```python
+# Sync calls
+queue.listWorkerTypes(provisionerId) # -> result`
+queue.listWorkerTypes(provisionerId='value') # -> result
+# Async call
+await asyncQueue.listWorkerTypes(provisionerId) # -> result
+await asyncQueue.listWorkerTypes(provisionerId='value') # -> result
+```
+
+#### Get a worker-type
+Get a worker-type from a provisioner.
+
+
+
+Takes the following arguments:
+
+ * `provisionerId`
+ * `workerType`
+
+Required [output schema](v1/workertype-response.json#)
+
+```python
+# Sync calls
+queue.getWorkerType(provisionerId, workerType) # -> result`
+queue.getWorkerType(provisionerId='value', workerType='value') # -> result
+# Async call
+await asyncQueue.getWorkerType(provisionerId, workerType) # -> result
+await asyncQueue.getWorkerType(provisionerId='value', workerType='value') # -> result
+```
+
+#### Update a worker-type
+Declare a workerType, supplying some details about it.
+
+`declareWorkerType` allows updating one or more properties of a worker-type as long as the required scopes are
+possessed. For example, a request to update the `gecko-b-1-w2008` worker-type within the `aws-provisioner-v1`
+provisioner with a body `{description: 'This worker type is great'}` would require you to have the scope
+`queue:declare-worker-type:aws-provisioner-v1/gecko-b-1-w2008#description`.
+
+
+
+Takes the following arguments:
+
+ * `provisionerId`
+ * `workerType`
+
+Required [input schema](v1/update-workertype-request.json#)
+
+Required [output schema](v1/workertype-response.json#)
+
+```python
+# Sync calls
+queue.declareWorkerType(provisionerId, workerType, payload) # -> result`
+queue.declareWorkerType(payload, provisionerId='value', workerType='value') # -> result
+# Async call
+await asyncQueue.declareWorkerType(provisionerId, workerType, payload) # -> result
+await asyncQueue.declareWorkerType(payload, provisionerId='value', workerType='value') # -> result
+```
+
+#### Get a list of all active workers of a workerType
+Get a list of all active workers of a workerType.
+
+`listWorkers` allows a response to be filtered by quarantined and non quarantined workers.
+To filter the query, you should call the end-point with `quarantined` as a query-string option with a
+true or false value.
+
+The response is paged. If this end-point returns a `continuationToken`, you
+should call the end-point again with the `continuationToken` as a query-string
+option. By default this end-point will list up to 1000 workers in a single
+page. You may limit this with the query-string parameter `limit`.
+
+
+
+Takes the following arguments:
+
+ * `provisionerId`
+ * `workerType`
+
+Required [output schema](v1/list-workers-response.json#)
+
+```python
+# Sync calls
+queue.listWorkers(provisionerId, workerType) # -> result`
+queue.listWorkers(provisionerId='value', workerType='value') # -> result
+# Async call
+await asyncQueue.listWorkers(provisionerId, workerType) # -> result
+await asyncQueue.listWorkers(provisionerId='value', workerType='value') # -> result
+```
+
+#### Get a worker-type
+Get a worker from a worker-type.
+
+
+
+Takes the following arguments:
+
+ * `provisionerId`
+ * `workerType`
+ * `workerGroup`
+ * `workerId`
+
+Required [output schema](v1/worker-response.json#)
+
+```python
+# Sync calls
+queue.getWorker(provisionerId, workerType, workerGroup, workerId) # -> result`
+queue.getWorker(provisionerId='value', workerType='value', workerGroup='value', workerId='value') # -> result
+# Async call
+await asyncQueue.getWorker(provisionerId, workerType, workerGroup, workerId) # -> result
+await asyncQueue.getWorker(provisionerId='value', workerType='value', workerGroup='value', workerId='value') # -> result
+```
+
+#### Quarantine a worker
+Quarantine a worker
+
+
+
+Takes the following arguments:
+
+ * `provisionerId`
+ * `workerType`
+ * `workerGroup`
+ * `workerId`
+
+Required [input schema](v1/quarantine-worker-request.json#)
+
+Required [output schema](v1/worker-response.json#)
+
+```python
+# Sync calls
+queue.quarantineWorker(provisionerId, workerType, workerGroup, workerId, payload) # -> result`
+queue.quarantineWorker(payload, provisionerId='value', workerType='value', workerGroup='value', workerId='value') # -> result
+# Async call
+await asyncQueue.quarantineWorker(provisionerId, workerType, workerGroup, workerId, payload) # -> result
+await asyncQueue.quarantineWorker(payload, provisionerId='value', workerType='value', workerGroup='value', workerId='value') # -> result
+```
+
+#### Declare a worker
+Declare a worker, supplying some details about it.
+
+`declareWorker` allows updating one or more properties of a worker as long as the required scopes are
+possessed.
+
+
+
+Takes the following arguments:
+
+ * `provisionerId`
+ * `workerType`
+ * `workerGroup`
+ * `workerId`
+
+Required [input schema](v1/update-worker-request.json#)
+
+Required [output schema](v1/worker-response.json#)
+
+```python
+# Sync calls
+queue.declareWorker(provisionerId, workerType, workerGroup, workerId, payload) # -> result`
+queue.declareWorker(payload, provisionerId='value', workerType='value', workerGroup='value', workerId='value') # -> result
+# Async call
+await asyncQueue.declareWorker(provisionerId, workerType, workerGroup, workerId, payload) # -> result
+await asyncQueue.declareWorker(payload, provisionerId='value', workerType='value', workerGroup='value', workerId='value') # -> result
+```
+
+
+
+
+### Exchanges in `taskcluster.QueueEvents`
+```python
+// Create QueueEvents client instance
+import taskcluster
+queueEvents = taskcluster.QueueEvents(options)
+```
+The queue, typically available at `queue.taskcluster.net`, is responsible
+for accepting tasks and track their state as they are executed by
+workers. In order ensure they are eventually resolved.
+
+This document describes AMQP exchanges offered by the queue, which allows
+third-party listeners to monitor tasks as they progress to resolution.
+These exchanges targets the following audience:
+ * Schedulers, who takes action after tasks are completed,
+ * Workers, who wants to listen for new or canceled tasks (optional),
+ * Tools, that wants to update their view as task progress.
+
+You'll notice that all the exchanges in the document shares the same
+routing key pattern. This makes it very easy to bind to all messages
+about a certain kind tasks.
+
+**Task specific routes**, a task can define a task specific route using
+the `task.routes` property. See task creation documentation for details
+on permissions required to provide task specific routes. If a task has
+the entry `'notify.by-email'` in as task specific route defined in
+`task.routes` all messages about this task will be CC'ed with the
+routing-key `'route.notify.by-email'`.
+
+These routes will always be prefixed `route.`, so that cannot interfere
+with the _primary_ routing key as documented here. Notice that the
+_primary_ routing key is always prefixed `primary.`. This is ensured
+in the routing key reference, so API clients will do this automatically.
+
+Please, note that the way RabbitMQ works, the message will only arrive
+in your queue once, even though you may have bound to the exchange with
+multiple routing key patterns that matches more of the CC'ed routing
+routing keys.
+
+**Delivery guarantees**, most operations on the queue are idempotent,
+which means that if repeated with the same arguments then the requests
+will ensure completion of the operation and return the same response.
+This is useful if the server crashes or the TCP connection breaks, but
+when re-executing an idempotent operation, the queue will also resend
+any related AMQP messages. Hence, messages may be repeated.
+
+This shouldn't be much of a problem, as the best you can achieve using
+confirm messages with AMQP is at-least-once delivery semantics. Hence,
+this only prevents you from obtaining at-most-once delivery semantics.
+
+**Remark**, some message generated by timeouts maybe dropped if the
+server crashes at wrong time. Ideally, we'll address this in the
+future. For now we suggest you ignore this corner case, and notify us
+if this corner case is of concern to you.
+#### Task Defined Messages
+ * `queueEvents.taskDefined(routingKeyPattern) -> routingKey`
+ * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key.
+ * `taskId` is required Description: `taskId` for the task this message concerns
+ * `runId` Description: `runId` of latest run for the task, `_` if no run is exists for the task.
+ * `workerGroup` Description: `workerGroup` of latest run for the task, `_` if no run is exists for the task.
+ * `workerId` Description: `workerId` of latest run for the task, `_` if no run is exists for the task.
+ * `provisionerId` is required Description: `provisionerId` this task is targeted at.
+ * `workerType` is required Description: `workerType` this task must run on.
+ * `schedulerId` is required Description: `schedulerId` this task was created by.
+ * `taskGroupId` is required Description: `taskGroupId` this task was created in.
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+#### Task Pending Messages
+ * `queueEvents.taskPending(routingKeyPattern) -> routingKey`
+ * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key.
+ * `taskId` is required Description: `taskId` for the task this message concerns
+ * `runId` is required Description: `runId` of latest run for the task, `_` if no run is exists for the task.
+ * `workerGroup` Description: `workerGroup` of latest run for the task, `_` if no run is exists for the task.
+ * `workerId` Description: `workerId` of latest run for the task, `_` if no run is exists for the task.
+ * `provisionerId` is required Description: `provisionerId` this task is targeted at.
+ * `workerType` is required Description: `workerType` this task must run on.
+ * `schedulerId` is required Description: `schedulerId` this task was created by.
+ * `taskGroupId` is required Description: `taskGroupId` this task was created in.
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+#### Task Running Messages
+ * `queueEvents.taskRunning(routingKeyPattern) -> routingKey`
+ * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key.
+ * `taskId` is required Description: `taskId` for the task this message concerns
+ * `runId` is required Description: `runId` of latest run for the task, `_` if no run is exists for the task.
+ * `workerGroup` is required Description: `workerGroup` of latest run for the task, `_` if no run is exists for the task.
+ * `workerId` is required Description: `workerId` of latest run for the task, `_` if no run is exists for the task.
+ * `provisionerId` is required Description: `provisionerId` this task is targeted at.
+ * `workerType` is required Description: `workerType` this task must run on.
+ * `schedulerId` is required Description: `schedulerId` this task was created by.
+ * `taskGroupId` is required Description: `taskGroupId` this task was created in.
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+#### Artifact Creation Messages
+ * `queueEvents.artifactCreated(routingKeyPattern) -> routingKey`
+ * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key.
+ * `taskId` is required Description: `taskId` for the task this message concerns
+ * `runId` is required Description: `runId` of latest run for the task, `_` if no run is exists for the task.
+ * `workerGroup` is required Description: `workerGroup` of latest run for the task, `_` if no run is exists for the task.
+ * `workerId` is required Description: `workerId` of latest run for the task, `_` if no run is exists for the task.
+ * `provisionerId` is required Description: `provisionerId` this task is targeted at.
+ * `workerType` is required Description: `workerType` this task must run on.
+ * `schedulerId` is required Description: `schedulerId` this task was created by.
+ * `taskGroupId` is required Description: `taskGroupId` this task was created in.
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+#### Task Completed Messages
+ * `queueEvents.taskCompleted(routingKeyPattern) -> routingKey`
+ * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key.
+ * `taskId` is required Description: `taskId` for the task this message concerns
+ * `runId` is required Description: `runId` of latest run for the task, `_` if no run is exists for the task.
+ * `workerGroup` is required Description: `workerGroup` of latest run for the task, `_` if no run is exists for the task.
+ * `workerId` is required Description: `workerId` of latest run for the task, `_` if no run is exists for the task.
+ * `provisionerId` is required Description: `provisionerId` this task is targeted at.
+ * `workerType` is required Description: `workerType` this task must run on.
+ * `schedulerId` is required Description: `schedulerId` this task was created by.
+ * `taskGroupId` is required Description: `taskGroupId` this task was created in.
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+#### Task Failed Messages
+ * `queueEvents.taskFailed(routingKeyPattern) -> routingKey`
+ * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key.
+ * `taskId` is required Description: `taskId` for the task this message concerns
+ * `runId` Description: `runId` of latest run for the task, `_` if no run is exists for the task.
+ * `workerGroup` Description: `workerGroup` of latest run for the task, `_` if no run is exists for the task.
+ * `workerId` Description: `workerId` of latest run for the task, `_` if no run is exists for the task.
+ * `provisionerId` is required Description: `provisionerId` this task is targeted at.
+ * `workerType` is required Description: `workerType` this task must run on.
+ * `schedulerId` is required Description: `schedulerId` this task was created by.
+ * `taskGroupId` is required Description: `taskGroupId` this task was created in.
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+#### Task Exception Messages
+ * `queueEvents.taskException(routingKeyPattern) -> routingKey`
+ * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key.
+ * `taskId` is required Description: `taskId` for the task this message concerns
+ * `runId` Description: `runId` of latest run for the task, `_` if no run is exists for the task.
+ * `workerGroup` Description: `workerGroup` of latest run for the task, `_` if no run is exists for the task.
+ * `workerId` Description: `workerId` of latest run for the task, `_` if no run is exists for the task.
+ * `provisionerId` is required Description: `provisionerId` this task is targeted at.
+ * `workerType` is required Description: `workerType` this task must run on.
+ * `schedulerId` is required Description: `schedulerId` this task was created by.
+ * `taskGroupId` is required Description: `taskGroupId` this task was created in.
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+#### Task Group Resolved Messages
+ * `queueEvents.taskGroupResolved(routingKeyPattern) -> routingKey`
+ * `routingKeyKind` is constant of `primary` is required Description: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key.
+ * `taskGroupId` is required Description: `taskGroupId` for the task-group this message concerns
+ * `schedulerId` is required Description: `schedulerId` for the task-group this message concerns
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+
+
+
+### Methods in `taskcluster.Secrets`
+```python
+import asyncio # Only for async
+// Create Secrets client instance
+import taskcluster
+import taskcluster.aio
+
+secrets = taskcluster.Secrets(options)
+# Below only for async instances, assume already in coroutine
+loop = asyncio.get_event_loop()
+session = taskcluster.aio.createSession(loop=loop)
+asyncSecrets = taskcluster.aio.Secrets(options, session=session)
+```
+The secrets service provides a simple key/value store for small bits of secret
+data. Access is limited by scopes, so values can be considered secret from
+those who do not have the relevant scopes.
+
+Secrets also have an expiration date, and once a secret has expired it can no
+longer be read. This is useful for short-term secrets such as a temporary
+service credential or a one-time signing key.
+#### Ping Server
+Respond without doing anything.
+This endpoint is used to check that the service is up.
+
+
+```python
+# Sync calls
+secrets.ping() # -> None`
+# Async call
+await asyncSecrets.ping() # -> None
+```
+
+#### Set Secret
+Set the secret associated with some key. If the secret already exists, it is
+updated instead.
+
+
+
+Takes the following arguments:
+
+ * `name`
+
+Required [input schema](v1/secret.json#)
+
+```python
+# Sync calls
+secrets.set(name, payload) # -> None`
+secrets.set(payload, name='value') # -> None
+# Async call
+await asyncSecrets.set(name, payload) # -> None
+await asyncSecrets.set(payload, name='value') # -> None
+```
+
+#### Delete Secret
+Delete the secret associated with some key.
+
+
+
+Takes the following arguments:
+
+ * `name`
+
+```python
+# Sync calls
+secrets.remove(name) # -> None`
+secrets.remove(name='value') # -> None
+# Async call
+await asyncSecrets.remove(name) # -> None
+await asyncSecrets.remove(name='value') # -> None
+```
+
+#### Read Secret
+Read the secret associated with some key. If the secret has recently
+expired, the response code 410 is returned. If the caller lacks the
+scope necessary to get the secret, the call will fail with a 403 code
+regardless of whether the secret exists.
+
+
+
+Takes the following arguments:
+
+ * `name`
+
+Required [output schema](v1/secret.json#)
+
+```python
+# Sync calls
+secrets.get(name) # -> result`
+secrets.get(name='value') # -> result
+# Async call
+await asyncSecrets.get(name) # -> result
+await asyncSecrets.get(name='value') # -> result
+```
+
+#### List Secrets
+List the names of all secrets.
+
+By default this end-point will try to return up to 1000 secret names in one
+request. But it **may return less**, even if more tasks are available.
+It may also return a `continuationToken` even though there are no more
+results. However, you can only be sure to have seen all results if you
+keep calling `listTaskGroup` with the last `continuationToken` until you
+get a result without a `continuationToken`.
+
+If you are not interested in listing all the members at once, you may
+use the query-string option `limit` to return fewer.
+
+
+Required [output schema](v1/secret-list.json#)
+
+```python
+# Sync calls
+secrets.list() # -> result`
+# Async call
+await asyncSecrets.list() # -> result
+```
+
+
+
+
+### Exchanges in `taskcluster.TreeherderEvents`
+```python
+// Create TreeherderEvents client instance
+import taskcluster
+treeherderEvents = taskcluster.TreeherderEvents(options)
+```
+The taskcluster-treeherder service is responsible for processing
+task events published by TaskCluster Queue and producing job messages
+that are consumable by Treeherder.
+
+This exchange provides that job messages to be consumed by any queue that
+attached to the exchange. This could be a production Treeheder instance,
+a local development environment, or a custom dashboard.
+#### Job Messages
+ * `treeherderEvents.jobs(routingKeyPattern) -> routingKey`
+ * `destination` is required Description: destination
+ * `project` is required Description: project
+ * `reserved` Description: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+
+
+
+<!-- END OF GENERATED DOCS -->
diff --git a/third_party/python/taskcluster/setup.cfg b/third_party/python/taskcluster/setup.cfg
new file mode 100644
index 0000000000..fcc4254b59
--- /dev/null
+++ b/third_party/python/taskcluster/setup.cfg
@@ -0,0 +1,8 @@
+[nosetests]
+verbosity = 1
+detailed-errors = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/taskcluster/setup.py b/third_party/python/taskcluster/setup.py
new file mode 100644
index 0000000000..c9c7ff4a22
--- /dev/null
+++ b/third_party/python/taskcluster/setup.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+
+from setuptools import setup
+from setuptools.command.test import test as TestCommand
+import sys
+
+# The VERSION variable is automagically changed
+# by release.sh. Make sure you understand how
+# that script works if you want to change this
+VERSION = '6.0.0'
+
+tests_require = [
+ 'nose==1.3.7',
+ 'nose-exclude==0.5.0',
+ 'httmock==1.2.6',
+ 'rednose==1.2.1',
+ 'mock==1.0.1',
+ 'setuptools-lint==0.3',
+ 'flake8==2.5.0',
+ 'psutil==2.1.3',
+ 'hypothesis==3.6.1',
+ 'tox==2.3.2',
+ 'coverage==4.1b2',
+ 'python-dateutil==2.6.0',
+]
+
+# requests has a policy of not breaking apis between major versions
+# http://docs.python-requests.org/en/latest/community/release-process/
+install_requires = [
+ 'requests>=2.4.3,<3',
+ 'mohawk>=0.3.4,<0.4',
+ 'slugid>=1.0.7,<2',
+ 'taskcluster-urls>=10.1.0,<12',
+ 'six>=1.10.0,<2',
+]
+
+# from http://testrun.org/tox/latest/example/basic.html
+class Tox(TestCommand):
+ user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
+
+ def initialize_options(self):
+ TestCommand.initialize_options(self)
+ self.tox_args = None
+
+ def finalize_options(self):
+ TestCommand.finalize_options(self)
+ self.test_args = []
+ self.test_suite = True
+
+ def run_tests(self):
+ # import here, cause outside the eggs aren't loaded
+ import tox
+ import shlex
+ args = self.tox_args
+ if args:
+ args = shlex.split(self.tox_args)
+ errno = tox.cmdline(args=args)
+ sys.exit(errno)
+
+if sys.version_info.major == 2:
+ tests_require.extend([
+ 'subprocess32==3.2.6',
+ ])
+elif sys.version_info[:2] < (3, 5):
+ raise Exception('This library does not support Python 3 versions below 3.5')
+elif sys.version_info[:2] >= (3, 5):
+ install_requires.extend([
+ 'aiohttp>=2.0.0,<4',
+ 'async_timeout>=2.0.0,<4',
+ ])
+
+if __name__ == '__main__':
+ setup(
+ name='taskcluster',
+ version=VERSION,
+ description='Python client for Taskcluster',
+ author='John Ford',
+ author_email='jhford@mozilla.com',
+ url='https://github.com/taskcluster/taskcluster-client.py',
+ packages=['taskcluster', 'taskcluster.aio'],
+ install_requires=install_requires,
+ test_suite="nose.collector",
+ tests_require=tests_require,
+ cmdclass={'test': Tox},
+ zip_safe=False,
+ classifiers=['Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6'],
+ )
diff --git a/third_party/python/taskcluster/taskcluster/__init__.py b/third_party/python/taskcluster/taskcluster/__init__.py
new file mode 100644
index 0000000000..de2913a2a2
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/__init__.py
@@ -0,0 +1,17 @@
+""" Python client for Taskcluster """
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import logging
+import os
+from .client import createSession # NOQA
+from taskcluster.utils import * # NOQA
+from taskcluster.exceptions import * # NOQA
+from taskcluster._client_importer import * # NOQA
+
+log = logging.getLogger(__name__)
+
+if os.environ.get('DEBUG_TASKCLUSTER_CLIENT'):
+ log.setLevel(logging.DEBUG)
+ if len(log.handlers) == 0:
+ log.addHandler(logging.StreamHandler())
+log.addHandler(logging.NullHandler())
diff --git a/third_party/python/taskcluster/taskcluster/_client_importer.py b/third_party/python/taskcluster/taskcluster/_client_importer.py
new file mode 100644
index 0000000000..c32ba5cc6b
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/_client_importer.py
@@ -0,0 +1,18 @@
+from .auth import Auth # NOQA
+from .authevents import AuthEvents # NOQA
+from .awsprovisioner import AwsProvisioner # NOQA
+from .awsprovisionerevents import AwsProvisionerEvents # NOQA
+from .ec2manager import EC2Manager # NOQA
+from .github import Github # NOQA
+from .githubevents import GithubEvents # NOQA
+from .hooks import Hooks # NOQA
+from .index import Index # NOQA
+from .login import Login # NOQA
+from .notify import Notify # NOQA
+from .pulse import Pulse # NOQA
+from .purgecache import PurgeCache # NOQA
+from .purgecacheevents import PurgeCacheEvents # NOQA
+from .queue import Queue # NOQA
+from .queueevents import QueueEvents # NOQA
+from .secrets import Secrets # NOQA
+from .treeherderevents import TreeherderEvents # NOQA
diff --git a/third_party/python/taskcluster/taskcluster/aio/__init__.py b/third_party/python/taskcluster/taskcluster/aio/__init__.py
new file mode 100644
index 0000000000..6aa80526e1
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/__init__.py
@@ -0,0 +1,16 @@
+""" Python client for Taskcluster """
+
+import logging
+import os
+from .asyncclient import createSession # NOQA
+from taskcluster.utils import * # NOQA
+from taskcluster.exceptions import * # NOQA
+from ._client_importer import * # NOQA
+
+log = logging.getLogger(__name__)
+
+if os.environ.get('DEBUG_TASKCLUSTER_CLIENT'):
+ log.setLevel(logging.DEBUG)
+ if len(log.handlers) == 0:
+ log.addHandler(logging.StreamHandler())
+log.addHandler(logging.NullHandler())
diff --git a/third_party/python/taskcluster/taskcluster/aio/_client_importer.py b/third_party/python/taskcluster/taskcluster/aio/_client_importer.py
new file mode 100644
index 0000000000..c32ba5cc6b
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/_client_importer.py
@@ -0,0 +1,18 @@
+from .auth import Auth # NOQA
+from .authevents import AuthEvents # NOQA
+from .awsprovisioner import AwsProvisioner # NOQA
+from .awsprovisionerevents import AwsProvisionerEvents # NOQA
+from .ec2manager import EC2Manager # NOQA
+from .github import Github # NOQA
+from .githubevents import GithubEvents # NOQA
+from .hooks import Hooks # NOQA
+from .index import Index # NOQA
+from .login import Login # NOQA
+from .notify import Notify # NOQA
+from .pulse import Pulse # NOQA
+from .purgecache import PurgeCache # NOQA
+from .purgecacheevents import PurgeCacheEvents # NOQA
+from .queue import Queue # NOQA
+from .queueevents import QueueEvents # NOQA
+from .secrets import Secrets # NOQA
+from .treeherderevents import TreeherderEvents # NOQA
diff --git a/third_party/python/taskcluster/taskcluster/aio/asyncclient.py b/third_party/python/taskcluster/taskcluster/aio/asyncclient.py
new file mode 100644
index 0000000000..4cafd6f67b
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/asyncclient.py
@@ -0,0 +1,400 @@
+"""This module is used to interact with taskcluster rest apis"""
+
+from __future__ import absolute_import, division, print_function
+
+import os
+import logging
+import hashlib
+import hmac
+import datetime
+import calendar
+import six
+from six.moves import urllib
+
+import mohawk
+import mohawk.bewit
+import aiohttp
+import asyncio
+
+from .. import exceptions
+from .. import utils
+from ..client import BaseClient
+from . import asyncutils
+
+log = logging.getLogger(__name__)
+
+
+# Default configuration
+_defaultConfig = config = {
+ 'credentials': {
+ 'clientId': os.environ.get('TASKCLUSTER_CLIENT_ID'),
+ 'accessToken': os.environ.get('TASKCLUSTER_ACCESS_TOKEN'),
+ 'certificate': os.environ.get('TASKCLUSTER_CERTIFICATE'),
+ },
+ 'maxRetries': 5,
+ 'signedUrlExpiration': 15 * 60,
+}
+
+
+def createSession(*args, **kwargs):
+ """ Create a new aiohttp session. This passes through all positional and
+ keyword arguments to the asyncutils.createSession() constructor.
+
+ It's preferred to do something like
+
+ async with createSession(...) as session:
+ queue = Queue(session=session)
+ await queue.ping()
+
+ or
+
+ async with createSession(...) as session:
+ async with Queue(session=session) as queue:
+ await queue.ping()
+
+ in the client code.
+ """
+ return asyncutils.createSession(*args, **kwargs)
+
+
+class AsyncBaseClient(BaseClient):
+ """ Base Class for API Client Classes. Each individual Client class
+ needs to set up its own methods for REST endpoints and Topic Exchange
+ routing key patterns. The _makeApiCall() and _topicExchange() methods
+ help with this.
+ """
+
+ def __init__(self, *args, **kwargs):
+ super(AsyncBaseClient, self).__init__(*args, **kwargs)
+ self._implicitSession = False
+ if self.session is None:
+ self._implicitSession = True
+
+ def _createSession(self):
+ """ If self.session isn't set, don't create an implicit.
+
+ To avoid `session.close()` warnings at the end of tasks, and
+ various strongly-worded aiohttp warnings about using `async with`,
+ let's set `self.session` to `None` if no session is passed in to
+ `__init__`. The `asyncutils` functions will create a new session
+ per call in that case.
+ """
+ return None
+
+ async def _makeApiCall(self, entry, *args, **kwargs):
+ """ This function is used to dispatch calls to other functions
+ for a given API Reference entry"""
+
+ x = self._processArgs(entry, *args, **kwargs)
+ routeParams, payload, query, paginationHandler, paginationLimit = x
+ route = self._subArgsInRoute(entry, routeParams)
+
+ # TODO: Check for limit being in the Query of the api ref
+ if paginationLimit and 'limit' in entry.get('query', []):
+ query['limit'] = paginationLimit
+
+ if query:
+ _route = route + '?' + urllib.parse.urlencode(query)
+ else:
+ _route = route
+ response = await self._makeHttpRequest(entry['method'], _route, payload)
+
+ if paginationHandler:
+ paginationHandler(response)
+ while response.get('continuationToken'):
+ query['continuationToken'] = response['continuationToken']
+ _route = route + '?' + urllib.parse.urlencode(query)
+ response = await self._makeHttpRequest(entry['method'], _route, payload)
+ paginationHandler(response)
+ else:
+ return response
+
+ async def _makeHttpRequest(self, method, route, payload):
+ """ Make an HTTP Request for the API endpoint. This method wraps
+ the logic about doing failure retry and passes off the actual work
+ of doing an HTTP request to another method."""
+
+ url = self._constructUrl(route)
+ log.debug('Full URL used is: %s', url)
+
+ hawkExt = self.makeHawkExt()
+
+ # Serialize payload if given
+ if payload is not None:
+ payload = utils.dumpJson(payload)
+
+ # Do a loop of retries
+ retry = -1 # we plus first in the loop, and attempt 1 is retry 0
+ retries = self.options['maxRetries']
+ while retry < retries:
+ retry += 1
+ # if this isn't the first retry then we sleep
+ if retry > 0:
+ snooze = float(retry * retry) / 10.0
+ log.info('Sleeping %0.2f seconds for exponential backoff', snooze)
+ await asyncio.sleep(utils.calculateSleepTime(retry))
+ # Construct header
+ if self._hasCredentials():
+ sender = mohawk.Sender(
+ credentials={
+ 'id': self.options['credentials']['clientId'],
+ 'key': self.options['credentials']['accessToken'],
+ 'algorithm': 'sha256',
+ },
+ ext=hawkExt if hawkExt else {},
+ url=url,
+ content=payload if payload else '',
+ content_type='application/json' if payload else '',
+ method=method,
+ )
+
+ headers = {'Authorization': sender.request_header}
+ else:
+ log.debug('Not using hawk!')
+ headers = {}
+ if payload:
+ # Set header for JSON if payload is given, note that we serialize
+ # outside this loop.
+ headers['Content-Type'] = 'application/json'
+
+ log.debug('Making attempt %d', retry)
+ try:
+ response = await asyncutils.makeSingleHttpRequest(
+ method, url, payload, headers, session=self.session
+ )
+ except aiohttp.ClientError as rerr:
+ if retry < retries:
+ log.warn('Retrying because of: %s' % rerr)
+ continue
+ # raise a connection exception
+ raise exceptions.TaskclusterConnectionError(
+ "Failed to establish connection",
+ superExc=rerr
+ )
+
+ status = response.status
+ if status == 204:
+ return None
+
+ # Catch retryable errors and go to the beginning of the loop
+ # to do the retry
+ if 500 <= status and status < 600 and retry < retries:
+ log.warn('Retrying because of a %s status code' % status)
+ continue
+
+ # Throw errors for non-retryable errors
+ if status < 200 or status >= 300:
+ # Parse messages from errors
+ data = {}
+ try:
+ data = await response.json()
+ except:
+ pass # Ignore JSON errors in error messages
+ # Find error message
+ message = "Unknown Server Error"
+ if isinstance(data, dict):
+ message = data.get('message')
+ else:
+ if status == 401:
+ message = "Authentication Error"
+ elif status == 500:
+ message = "Internal Server Error"
+ else:
+ message = "Unknown Server Error %s\n%s" % (str(status), str(data)[:1024])
+ # Raise TaskclusterAuthFailure if this is an auth issue
+ if status == 401:
+ raise exceptions.TaskclusterAuthFailure(
+ message,
+ status_code=status,
+ body=data,
+ superExc=None
+ )
+ # Raise TaskclusterRestFailure for all other issues
+ raise exceptions.TaskclusterRestFailure(
+ message,
+ status_code=status,
+ body=data,
+ superExc=None
+ )
+
+ # Try to load JSON
+ try:
+ await response.release()
+ return await response.json()
+ except (ValueError, aiohttp.client_exceptions.ContentTypeError):
+ return {"response": response}
+
+ # This code-path should be unreachable
+ assert False, "Error from last retry should have been raised!"
+
+ async def __aenter__(self):
+ if self._implicitSession and not self.session:
+ self.session = createSession()
+ return self
+
+ async def __aexit__(self, *args):
+ if self._implicitSession and self.session:
+ await self.session.close()
+ self.session = None
+
+
+def createApiClient(name, api):
+ api = api['reference']
+
+ attributes = dict(
+ name=name,
+ __doc__=api.get('description'),
+ classOptions={},
+ funcinfo={},
+ )
+
+ # apply a default for apiVersion; this can be removed when all services
+ # have apiVersion
+ if 'apiVersion' not in api:
+ api['apiVersion'] = 'v1'
+
+ copiedOptions = ('exchangePrefix',)
+ for opt in copiedOptions:
+ if opt in api:
+ attributes['classOptions'][opt] = api[opt]
+
+ copiedProperties = ('serviceName', 'apiVersion')
+ for opt in copiedProperties:
+ if opt in api:
+ attributes[opt] = api[opt]
+
+ for entry in api['entries']:
+ if entry['type'] == 'function':
+ def addApiCall(e):
+ async def apiCall(self, *args, **kwargs):
+ return await self._makeApiCall(e, *args, **kwargs)
+ return apiCall
+ f = addApiCall(entry)
+
+ docStr = "Call the %s api's %s method. " % (name, entry['name'])
+
+ if entry['args'] and len(entry['args']) > 0:
+ docStr += "This method takes:\n\n"
+ docStr += '\n'.join(['- ``%s``' % x for x in entry['args']])
+ docStr += '\n\n'
+ else:
+ docStr += "This method takes no arguments. "
+
+ if 'input' in entry:
+ docStr += "This method takes input ``%s``. " % entry['input']
+
+ if 'output' in entry:
+ docStr += "This method gives output ``%s``" % entry['output']
+
+ docStr += '\n\nThis method does a ``%s`` to ``%s``.' % (
+ entry['method'].upper(), entry['route'])
+
+ f.__doc__ = docStr
+ attributes['funcinfo'][entry['name']] = entry
+
+ elif entry['type'] == 'topic-exchange':
+ def addTopicExchange(e):
+ def topicExchange(self, *args, **kwargs):
+ return self._makeTopicExchange(e, *args, **kwargs)
+ return topicExchange
+
+ f = addTopicExchange(entry)
+
+ docStr = 'Generate a routing key pattern for the %s exchange. ' % entry['exchange']
+ docStr += 'This method takes a given routing key as a string or a '
+ docStr += 'dictionary. For each given dictionary key, the corresponding '
+ docStr += 'routing key token takes its value. For routing key tokens '
+ docStr += 'which are not specified by the dictionary, the * or # character '
+ docStr += 'is used depending on whether or not the key allows multiple words.\n\n'
+ docStr += 'This exchange takes the following keys:\n\n'
+ docStr += '\n'.join(['- ``%s``' % x['name'] for x in entry['routingKey']])
+
+ f.__doc__ = docStr
+
+ # Add whichever function we created
+ f.__name__ = str(entry['name'])
+ attributes[entry['name']] = f
+
+ return type(utils.toStr(name), (BaseClient,), attributes)
+
+
+def createTemporaryCredentials(clientId, accessToken, start, expiry, scopes, name=None):
+ """ Create a set of temporary credentials
+
+ Callers should not apply any clock skew; clock drift is accounted for by
+ auth service.
+
+ clientId: the issuing clientId
+ accessToken: the issuer's accessToken
+ start: start time of credentials, seconds since epoch
+ expiry: expiration time of credentials, seconds since epoch
+ scopes: list of scopes granted
+ name: credential name (optional)
+
+ Returns a dictionary in the form:
+ { 'clientId': str, 'accessToken: str, 'certificate': str}
+ """
+
+ now = datetime.datetime.utcnow()
+ now = now - datetime.timedelta(minutes=10) # Subtract 5 minutes for clock drift
+
+ for scope in scopes:
+ if not isinstance(scope, six.string_types):
+ raise exceptions.TaskclusterFailure('Scope must be string')
+
+ # Credentials can only be valid for 31 days. I hope that
+ # this is validated on the server somehow...
+
+ if expiry - start > datetime.timedelta(days=31):
+ raise exceptions.TaskclusterFailure('Only 31 days allowed')
+
+ # We multiply times by 1000 because the auth service is JS and as a result
+ # uses milliseconds instead of seconds
+ cert = dict(
+ version=1,
+ scopes=scopes,
+ start=calendar.timegm(start.utctimetuple()) * 1000,
+ expiry=calendar.timegm(expiry.utctimetuple()) * 1000,
+ seed=utils.slugId() + utils.slugId(),
+ )
+
+ # if this is a named temporary credential, include the issuer in the certificate
+ if name:
+ cert['issuer'] = utils.toStr(clientId)
+
+ sig = ['version:' + utils.toStr(cert['version'])]
+ if name:
+ sig.extend([
+ 'clientId:' + utils.toStr(name),
+ 'issuer:' + utils.toStr(clientId),
+ ])
+ sig.extend([
+ 'seed:' + utils.toStr(cert['seed']),
+ 'start:' + utils.toStr(cert['start']),
+ 'expiry:' + utils.toStr(cert['expiry']),
+ 'scopes:'
+ ] + scopes)
+ sigStr = '\n'.join(sig).encode()
+
+ if isinstance(accessToken, six.text_type):
+ accessToken = accessToken.encode()
+ sig = hmac.new(accessToken, sigStr, hashlib.sha256).digest()
+
+ cert['signature'] = utils.encodeStringForB64Header(sig)
+
+ newToken = hmac.new(accessToken, cert['seed'], hashlib.sha256).digest()
+ newToken = utils.makeB64UrlSafe(utils.encodeStringForB64Header(newToken)).replace(b'=', b'')
+
+ return {
+ 'clientId': name or clientId,
+ 'accessToken': newToken,
+ 'certificate': utils.dumpJson(cert),
+ }
+
+
+__all__ = [
+ 'createTemporaryCredentials',
+ 'config',
+ 'BaseClient',
+ 'createApiClient',
+]
diff --git a/third_party/python/taskcluster/taskcluster/aio/asyncutils.py b/third_party/python/taskcluster/taskcluster/aio/asyncutils.py
new file mode 100644
index 0000000000..60c2cbae7b
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/asyncutils.py
@@ -0,0 +1,116 @@
+from __future__ import absolute_import, division, print_function
+import aiohttp
+import aiohttp.hdrs
+import asyncio
+import async_timeout
+import logging
+import os
+import six
+
+import taskcluster.utils as utils
+import taskcluster.exceptions as exceptions
+
+log = logging.getLogger(__name__)
+
+
+def createSession(*args, **kwargs):
+ return aiohttp.ClientSession(*args, **kwargs)
+
+
+# Useful information: https://www.blog.pythonlibrary.org/2016/07/26/python-3-an-intro-to-asyncio/
+async def makeHttpRequest(method, url, payload, headers, retries=utils.MAX_RETRIES, session=None):
+ """ Make an HTTP request and retry it until success, return request """
+ retry = -1
+ response = None
+ implicit = False
+ if session is None:
+ implicit = True
+ session = aiohttp.ClientSession()
+
+ def cleanup():
+ if implicit:
+ loop = asyncio.get_event_loop()
+ loop.run_until_complete(session.close())
+
+ try:
+ while True:
+ retry += 1
+ # if this isn't the first retry then we sleep
+ if retry > 0:
+ snooze = float(retry * retry) / 10.0
+ log.info('Sleeping %0.2f seconds for exponential backoff', snooze)
+ await asyncio.sleep(snooze)
+
+ # Seek payload to start, if it is a file
+ if hasattr(payload, 'seek'):
+ payload.seek(0)
+
+ log.debug('Making attempt %d', retry)
+ try:
+ with async_timeout.timeout(60):
+ response = await makeSingleHttpRequest(method, url, payload, headers, session)
+ except aiohttp.ClientError as rerr:
+ if retry < retries:
+ log.warn('Retrying because of: %s' % rerr)
+ continue
+ # raise a connection exception
+ raise rerr
+ except ValueError as rerr:
+ log.warn('ValueError from aiohttp: redirect to non-http or https')
+ raise rerr
+ except RuntimeError as rerr:
+ log.warn('RuntimeError from aiohttp: session closed')
+ raise rerr
+ # Handle non 2xx status code and retry if possible
+ status = response.status
+ if 500 <= status and status < 600 and retry < retries:
+ if retry < retries:
+ log.warn('Retrying because of: %d status' % status)
+ continue
+ else:
+ raise exceptions.TaskclusterRestFailure("Unknown Server Error", superExc=None)
+ return response
+ finally:
+ cleanup()
+ # This code-path should be unreachable
+ assert False, "Error from last retry should have been raised!"
+
+
+async def makeSingleHttpRequest(method, url, payload, headers, session=None):
+ method = method.upper()
+ log.debug('Making a %s request to %s', method, url)
+ log.debug('HTTP Headers: %s' % str(headers))
+ log.debug('HTTP Payload: %s (limit 100 char)' % str(payload)[:100])
+ implicit = False
+ if session is None:
+ implicit = True
+ session = aiohttp.ClientSession()
+
+ skip_auto_headers = [aiohttp.hdrs.CONTENT_TYPE]
+
+ try:
+ # https://docs.aiohttp.org/en/stable/client_quickstart.html#passing-parameters-in-urls
+ # we must avoid aiohttp's helpful "requoting" functionality, as it breaks Hawk signatures
+ url = aiohttp.client.URL(url, encoded=True)
+ async with session.request(
+ method, url, data=payload, headers=headers,
+ skip_auto_headers=skip_auto_headers, compress=False
+ ) as resp:
+ response_text = await resp.text()
+ log.debug('Received HTTP Status: %s' % resp.status)
+ log.debug('Received HTTP Headers: %s' % str(resp.headers))
+ log.debug('Received HTTP Payload: %s (limit 1024 char)' %
+ six.text_type(response_text)[:1024])
+ return resp
+ finally:
+ if implicit:
+ await session.close()
+
+
+async def putFile(filename, url, contentType, session=None):
+ with open(filename, 'rb') as f:
+ contentLength = os.fstat(f.fileno()).st_size
+ return await makeHttpRequest('put', url, f, headers={
+ 'Content-Length': str(contentLength),
+ 'Content-Type': contentType,
+ }, session=session)
diff --git a/third_party/python/taskcluster/taskcluster/aio/auth.py b/third_party/python/taskcluster/taskcluster/aio/auth.py
new file mode 100644
index 0000000000..755f89e3e9
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/auth.py
@@ -0,0 +1,867 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class Auth(AsyncBaseClient):
+ """
+ Authentication related API end-points for Taskcluster and related
+ services. These API end-points are of interest if you wish to:
+ * Authorize a request signed with Taskcluster credentials,
+ * Manage clients and roles,
+ * Inspect or audit clients and roles,
+ * Gain access to various services guarded by this API.
+
+ Note that in this service "authentication" refers to validating the
+ correctness of the supplied credentials (that the caller posesses the
+ appropriate access token). This service does not provide any kind of user
+ authentication (identifying a particular person).
+
+ ### Clients
+ The authentication service manages _clients_, at a high-level each client
+ consists of a `clientId`, an `accessToken`, scopes, and some metadata.
+ The `clientId` and `accessToken` can be used for authentication when
+ calling Taskcluster APIs.
+
+ The client's scopes control the client's access to Taskcluster resources.
+ The scopes are *expanded* by substituting roles, as defined below.
+
+ ### Roles
+ A _role_ consists of a `roleId`, a set of scopes and a description.
+ Each role constitutes a simple _expansion rule_ that says if you have
+ the scope: `assume:<roleId>` you get the set of scopes the role has.
+ Think of the `assume:<roleId>` as a scope that allows a client to assume
+ a role.
+
+ As in scopes the `*` kleene star also have special meaning if it is
+ located at the end of a `roleId`. If you have a role with the following
+ `roleId`: `my-prefix*`, then any client which has a scope staring with
+ `assume:my-prefix` will be allowed to assume the role.
+
+ ### Guarded Services
+ The authentication service also has API end-points for delegating access
+ to some guarded service such as AWS S3, or Azure Table Storage.
+ Generally, we add API end-points to this server when we wish to use
+ Taskcluster credentials to grant access to a third-party service used
+ by many Taskcluster components.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'auth'
+ apiVersion = 'v1'
+
+ async def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ async def listClients(self, *args, **kwargs):
+ """
+ List Clients
+
+ Get a list of all clients. With `prefix`, only clients for which
+ it is a prefix of the clientId are returned.
+
+ By default this end-point will try to return up to 1000 clients in one
+ request. But it **may return less, even none**.
+ It may also return a `continuationToken` even though there are no more
+ results. However, you can only be sure to have seen all results if you
+ keep calling `listClients` with the last `continuationToken` until you
+ get a result without a `continuationToken`.
+
+ This method gives output: ``v1/list-clients-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["listClients"], *args, **kwargs)
+
+ async def client(self, *args, **kwargs):
+ """
+ Get Client
+
+ Get information about a single client.
+
+ This method gives output: ``v1/get-client-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["client"], *args, **kwargs)
+
+ async def createClient(self, *args, **kwargs):
+ """
+ Create Client
+
+ Create a new client and get the `accessToken` for this client.
+ You should store the `accessToken` from this API call as there is no
+ other way to retrieve it.
+
+ If you loose the `accessToken` you can call `resetAccessToken` to reset
+ it, and a new `accessToken` will be returned, but you cannot retrieve the
+ current `accessToken`.
+
+ If a client with the same `clientId` already exists this operation will
+ fail. Use `updateClient` if you wish to update an existing client.
+
+ The caller's scopes must satisfy `scopes`.
+
+ This method takes input: ``v1/create-client-request.json#``
+
+ This method gives output: ``v1/create-client-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["createClient"], *args, **kwargs)
+
+ async def resetAccessToken(self, *args, **kwargs):
+ """
+ Reset `accessToken`
+
+ Reset a clients `accessToken`, this will revoke the existing
+ `accessToken`, generate a new `accessToken` and return it from this
+ call.
+
+ There is no way to retrieve an existing `accessToken`, so if you loose it
+ you must reset the accessToken to acquire it again.
+
+ This method gives output: ``v1/create-client-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["resetAccessToken"], *args, **kwargs)
+
+ async def updateClient(self, *args, **kwargs):
+ """
+ Update Client
+
+ Update an exisiting client. The `clientId` and `accessToken` cannot be
+ updated, but `scopes` can be modified. The caller's scopes must
+ satisfy all scopes being added to the client in the update operation.
+ If no scopes are given in the request, the client's scopes remain
+ unchanged
+
+ This method takes input: ``v1/create-client-request.json#``
+
+ This method gives output: ``v1/get-client-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["updateClient"], *args, **kwargs)
+
+ async def enableClient(self, *args, **kwargs):
+ """
+ Enable Client
+
+ Enable a client that was disabled with `disableClient`. If the client
+ is already enabled, this does nothing.
+
+ This is typically used by identity providers to re-enable clients that
+ had been disabled when the corresponding identity's scopes changed.
+
+ This method gives output: ``v1/get-client-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["enableClient"], *args, **kwargs)
+
+ async def disableClient(self, *args, **kwargs):
+ """
+ Disable Client
+
+ Disable a client. If the client is already disabled, this does nothing.
+
+ This is typically used by identity providers to disable clients when the
+ corresponding identity's scopes no longer satisfy the client's scopes.
+
+ This method gives output: ``v1/get-client-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["disableClient"], *args, **kwargs)
+
+ async def deleteClient(self, *args, **kwargs):
+ """
+ Delete Client
+
+ Delete a client, please note that any roles related to this client must
+ be deleted independently.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["deleteClient"], *args, **kwargs)
+
+ async def listRoles(self, *args, **kwargs):
+ """
+ List Roles
+
+ Get a list of all roles, each role object also includes the list of
+ scopes it expands to.
+
+ This method gives output: ``v1/list-roles-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["listRoles"], *args, **kwargs)
+
+ async def role(self, *args, **kwargs):
+ """
+ Get Role
+
+ Get information about a single role, including the set of scopes that the
+ role expands to.
+
+ This method gives output: ``v1/get-role-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["role"], *args, **kwargs)
+
+ async def createRole(self, *args, **kwargs):
+ """
+ Create Role
+
+ Create a new role.
+
+ The caller's scopes must satisfy the new role's scopes.
+
+ If there already exists a role with the same `roleId` this operation
+ will fail. Use `updateRole` to modify an existing role.
+
+ Creation of a role that will generate an infinite expansion will result
+ in an error response.
+
+ This method takes input: ``v1/create-role-request.json#``
+
+ This method gives output: ``v1/get-role-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["createRole"], *args, **kwargs)
+
+ async def updateRole(self, *args, **kwargs):
+ """
+ Update Role
+
+ Update an existing role.
+
+ The caller's scopes must satisfy all of the new scopes being added, but
+ need not satisfy all of the client's existing scopes.
+
+ An update of a role that will generate an infinite expansion will result
+ in an error response.
+
+ This method takes input: ``v1/create-role-request.json#``
+
+ This method gives output: ``v1/get-role-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["updateRole"], *args, **kwargs)
+
+ async def deleteRole(self, *args, **kwargs):
+ """
+ Delete Role
+
+ Delete a role. This operation will succeed regardless of whether or not
+ the role exists.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["deleteRole"], *args, **kwargs)
+
+ async def expandScopesGet(self, *args, **kwargs):
+ """
+ Expand Scopes
+
+ Return an expanded copy of the given scopeset, with scopes implied by any
+ roles included.
+
+ This call uses the GET method with an HTTP body. It remains only for
+ backward compatibility.
+
+ This method takes input: ``v1/scopeset.json#``
+
+ This method gives output: ``v1/scopeset.json#``
+
+ This method is ``deprecated``
+ """
+
+ return await self._makeApiCall(self.funcinfo["expandScopesGet"], *args, **kwargs)
+
+ async def expandScopes(self, *args, **kwargs):
+ """
+ Expand Scopes
+
+ Return an expanded copy of the given scopeset, with scopes implied by any
+ roles included.
+
+ This method takes input: ``v1/scopeset.json#``
+
+ This method gives output: ``v1/scopeset.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["expandScopes"], *args, **kwargs)
+
+ async def currentScopes(self, *args, **kwargs):
+ """
+ Get Current Scopes
+
+ Return the expanded scopes available in the request, taking into account all sources
+ of scopes and scope restrictions (temporary credentials, assumeScopes, client scopes,
+ and roles).
+
+ This method gives output: ``v1/scopeset.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["currentScopes"], *args, **kwargs)
+
+ async def awsS3Credentials(self, *args, **kwargs):
+ """
+ Get Temporary Read/Write Credentials S3
+
+ Get temporary AWS credentials for `read-write` or `read-only` access to
+ a given `bucket` and `prefix` within that bucket.
+ The `level` parameter can be `read-write` or `read-only` and determines
+ which type of credentials are returned. Please note that the `level`
+ parameter is required in the scope guarding access. The bucket name must
+ not contain `.`, as recommended by Amazon.
+
+ This method can only allow access to a whitelisted set of buckets. To add
+ a bucket to that whitelist, contact the Taskcluster team, who will add it to
+ the appropriate IAM policy. If the bucket is in a different AWS account, you
+ will also need to add a bucket policy allowing access from the Taskcluster
+ account. That policy should look like this:
+
+ ```
+ {
+ "Version": "2012-10-17",
+ "Statement": [
+ {
+ "Sid": "allow-taskcluster-auth-to-delegate-access",
+ "Effect": "Allow",
+ "Principal": {
+ "AWS": "arn:aws:iam::692406183521:root"
+ },
+ "Action": [
+ "s3:ListBucket",
+ "s3:GetObject",
+ "s3:PutObject",
+ "s3:DeleteObject",
+ "s3:GetBucketLocation"
+ ],
+ "Resource": [
+ "arn:aws:s3:::<bucket>",
+ "arn:aws:s3:::<bucket>/*"
+ ]
+ }
+ ]
+ }
+ ```
+
+ The credentials are set to expire after an hour, but this behavior is
+ subject to change. Hence, you should always read the `expires` property
+ from the response, if you intend to maintain active credentials in your
+ application.
+
+ Please note that your `prefix` may not start with slash `/`. Such a prefix
+ is allowed on S3, but we forbid it here to discourage bad behavior.
+
+ Also note that if your `prefix` doesn't end in a slash `/`, the STS
+ credentials may allow access to unexpected keys, as S3 does not treat
+ slashes specially. For example, a prefix of `my-folder` will allow
+ access to `my-folder/file.txt` as expected, but also to `my-folder.txt`,
+ which may not be intended.
+
+ Finally, note that the `PutObjectAcl` call is not allowed. Passing a canned
+ ACL other than `private` to `PutObject` is treated as a `PutObjectAcl` call, and
+ will result in an access-denied error from AWS. This limitation is due to a
+ security flaw in Amazon S3 which might otherwise allow indefinite access to
+ uploaded objects.
+
+ **EC2 metadata compatibility**, if the querystring parameter
+ `?format=iam-role-compat` is given, the response will be compatible
+ with the JSON exposed by the EC2 metadata service. This aims to ease
+ compatibility for libraries and tools built to auto-refresh credentials.
+ For details on the format returned by EC2 metadata service see:
+ [EC2 User Guide](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html#instance-metadata-security-credentials).
+
+ This method gives output: ``v1/aws-s3-credentials-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["awsS3Credentials"], *args, **kwargs)
+
+ async def azureAccounts(self, *args, **kwargs):
+ """
+ List Accounts Managed by Auth
+
+ Retrieve a list of all Azure accounts managed by Taskcluster Auth.
+
+ This method gives output: ``v1/azure-account-list-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["azureAccounts"], *args, **kwargs)
+
+ async def azureTables(self, *args, **kwargs):
+ """
+ List Tables in an Account Managed by Auth
+
+ Retrieve a list of all tables in an account.
+
+ This method gives output: ``v1/azure-table-list-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["azureTables"], *args, **kwargs)
+
+ async def azureTableSAS(self, *args, **kwargs):
+ """
+ Get Shared-Access-Signature for Azure Table
+
+ Get a shared access signature (SAS) string for use with a specific Azure
+ Table Storage table.
+
+ The `level` parameter can be `read-write` or `read-only` and determines
+ which type of credentials are returned. If level is read-write, it will create the
+ table if it doesn't already exist.
+
+ This method gives output: ``v1/azure-table-access-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["azureTableSAS"], *args, **kwargs)
+
+ async def azureContainers(self, *args, **kwargs):
+ """
+ List containers in an Account Managed by Auth
+
+ Retrieve a list of all containers in an account.
+
+ This method gives output: ``v1/azure-container-list-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["azureContainers"], *args, **kwargs)
+
+ async def azureContainerSAS(self, *args, **kwargs):
+ """
+ Get Shared-Access-Signature for Azure Container
+
+ Get a shared access signature (SAS) string for use with a specific Azure
+ Blob Storage container.
+
+ The `level` parameter can be `read-write` or `read-only` and determines
+ which type of credentials are returned. If level is read-write, it will create the
+ container if it doesn't already exist.
+
+ This method gives output: ``v1/azure-container-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["azureContainerSAS"], *args, **kwargs)
+
+ async def sentryDSN(self, *args, **kwargs):
+ """
+ Get DSN for Sentry Project
+
+ Get temporary DSN (access credentials) for a sentry project.
+ The credentials returned can be used with any Sentry client for up to
+ 24 hours, after which the credentials will be automatically disabled.
+
+ If the project doesn't exist it will be created, and assigned to the
+ initial team configured for this component. Contact a Sentry admin
+ to have the project transferred to a team you have access to if needed
+
+ This method gives output: ``v1/sentry-dsn-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["sentryDSN"], *args, **kwargs)
+
+ async def statsumToken(self, *args, **kwargs):
+ """
+ Get Token for Statsum Project
+
+ Get temporary `token` and `baseUrl` for sending metrics to statsum.
+
+ The token is valid for 24 hours, clients should refresh after expiration.
+
+ This method gives output: ``v1/statsum-token-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["statsumToken"], *args, **kwargs)
+
+ async def webhooktunnelToken(self, *args, **kwargs):
+ """
+ Get Token for Webhooktunnel Proxy
+
+ Get temporary `token` and `id` for connecting to webhooktunnel
+ The token is valid for 96 hours, clients should refresh after expiration.
+
+ This method gives output: ``v1/webhooktunnel-token-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["webhooktunnelToken"], *args, **kwargs)
+
+ async def authenticateHawk(self, *args, **kwargs):
+ """
+ Authenticate Hawk Request
+
+ Validate the request signature given on input and return list of scopes
+ that the authenticating client has.
+
+ This method is used by other services that wish rely on Taskcluster
+ credentials for authentication. This way we can use Hawk without having
+ the secret credentials leave this service.
+
+ This method takes input: ``v1/authenticate-hawk-request.json#``
+
+ This method gives output: ``v1/authenticate-hawk-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["authenticateHawk"], *args, **kwargs)
+
+ async def testAuthenticate(self, *args, **kwargs):
+ """
+ Test Authentication
+
+ Utility method to test client implementations of Taskcluster
+ authentication.
+
+ Rather than using real credentials, this endpoint accepts requests with
+ clientId `tester` and accessToken `no-secret`. That client's scopes are
+ based on `clientScopes` in the request body.
+
+ The request is validated, with any certificate, authorizedScopes, etc.
+ applied, and the resulting scopes are checked against `requiredScopes`
+ from the request body. On success, the response contains the clientId
+ and scopes as seen by the API method.
+
+ This method takes input: ``v1/test-authenticate-request.json#``
+
+ This method gives output: ``v1/test-authenticate-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["testAuthenticate"], *args, **kwargs)
+
+ async def testAuthenticateGet(self, *args, **kwargs):
+ """
+ Test Authentication (GET)
+
+ Utility method similar to `testAuthenticate`, but with the GET method,
+ so it can be used with signed URLs (bewits).
+
+ Rather than using real credentials, this endpoint accepts requests with
+ clientId `tester` and accessToken `no-secret`. That client's scopes are
+ `['test:*', 'auth:create-client:test:*']`. The call fails if the
+ `test:authenticate-get` scope is not available.
+
+ The request is validated, with any certificate, authorizedScopes, etc.
+ applied, and the resulting scopes are checked, just like any API call.
+ On success, the response contains the clientId and scopes as seen by
+ the API method.
+
+ This method may later be extended to allow specification of client and
+ required scopes via query arguments.
+
+ This method gives output: ``v1/test-authenticate-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["testAuthenticateGet"], *args, **kwargs)
+
+ funcinfo = {
+ "authenticateHawk": {
+ 'args': [],
+ 'input': 'v1/authenticate-hawk-request.json#',
+ 'method': 'post',
+ 'name': 'authenticateHawk',
+ 'output': 'v1/authenticate-hawk-response.json#',
+ 'route': '/authenticate-hawk',
+ 'stability': 'stable',
+ },
+ "awsS3Credentials": {
+ 'args': ['level', 'bucket', 'prefix'],
+ 'method': 'get',
+ 'name': 'awsS3Credentials',
+ 'output': 'v1/aws-s3-credentials-response.json#',
+ 'query': ['format'],
+ 'route': '/aws/s3/<level>/<bucket>/<prefix>',
+ 'stability': 'stable',
+ },
+ "azureAccounts": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'azureAccounts',
+ 'output': 'v1/azure-account-list-response.json#',
+ 'route': '/azure/accounts',
+ 'stability': 'stable',
+ },
+ "azureContainerSAS": {
+ 'args': ['account', 'container', 'level'],
+ 'method': 'get',
+ 'name': 'azureContainerSAS',
+ 'output': 'v1/azure-container-response.json#',
+ 'route': '/azure/<account>/containers/<container>/<level>',
+ 'stability': 'stable',
+ },
+ "azureContainers": {
+ 'args': ['account'],
+ 'method': 'get',
+ 'name': 'azureContainers',
+ 'output': 'v1/azure-container-list-response.json#',
+ 'query': ['continuationToken'],
+ 'route': '/azure/<account>/containers',
+ 'stability': 'stable',
+ },
+ "azureTableSAS": {
+ 'args': ['account', 'table', 'level'],
+ 'method': 'get',
+ 'name': 'azureTableSAS',
+ 'output': 'v1/azure-table-access-response.json#',
+ 'route': '/azure/<account>/table/<table>/<level>',
+ 'stability': 'stable',
+ },
+ "azureTables": {
+ 'args': ['account'],
+ 'method': 'get',
+ 'name': 'azureTables',
+ 'output': 'v1/azure-table-list-response.json#',
+ 'query': ['continuationToken'],
+ 'route': '/azure/<account>/tables',
+ 'stability': 'stable',
+ },
+ "client": {
+ 'args': ['clientId'],
+ 'method': 'get',
+ 'name': 'client',
+ 'output': 'v1/get-client-response.json#',
+ 'route': '/clients/<clientId>',
+ 'stability': 'stable',
+ },
+ "createClient": {
+ 'args': ['clientId'],
+ 'input': 'v1/create-client-request.json#',
+ 'method': 'put',
+ 'name': 'createClient',
+ 'output': 'v1/create-client-response.json#',
+ 'route': '/clients/<clientId>',
+ 'stability': 'stable',
+ },
+ "createRole": {
+ 'args': ['roleId'],
+ 'input': 'v1/create-role-request.json#',
+ 'method': 'put',
+ 'name': 'createRole',
+ 'output': 'v1/get-role-response.json#',
+ 'route': '/roles/<roleId>',
+ 'stability': 'stable',
+ },
+ "currentScopes": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'currentScopes',
+ 'output': 'v1/scopeset.json#',
+ 'route': '/scopes/current',
+ 'stability': 'stable',
+ },
+ "deleteClient": {
+ 'args': ['clientId'],
+ 'method': 'delete',
+ 'name': 'deleteClient',
+ 'route': '/clients/<clientId>',
+ 'stability': 'stable',
+ },
+ "deleteRole": {
+ 'args': ['roleId'],
+ 'method': 'delete',
+ 'name': 'deleteRole',
+ 'route': '/roles/<roleId>',
+ 'stability': 'stable',
+ },
+ "disableClient": {
+ 'args': ['clientId'],
+ 'method': 'post',
+ 'name': 'disableClient',
+ 'output': 'v1/get-client-response.json#',
+ 'route': '/clients/<clientId>/disable',
+ 'stability': 'stable',
+ },
+ "enableClient": {
+ 'args': ['clientId'],
+ 'method': 'post',
+ 'name': 'enableClient',
+ 'output': 'v1/get-client-response.json#',
+ 'route': '/clients/<clientId>/enable',
+ 'stability': 'stable',
+ },
+ "expandScopes": {
+ 'args': [],
+ 'input': 'v1/scopeset.json#',
+ 'method': 'post',
+ 'name': 'expandScopes',
+ 'output': 'v1/scopeset.json#',
+ 'route': '/scopes/expand',
+ 'stability': 'stable',
+ },
+ "expandScopesGet": {
+ 'args': [],
+ 'input': 'v1/scopeset.json#',
+ 'method': 'get',
+ 'name': 'expandScopesGet',
+ 'output': 'v1/scopeset.json#',
+ 'route': '/scopes/expand',
+ 'stability': 'deprecated',
+ },
+ "listClients": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'listClients',
+ 'output': 'v1/list-clients-response.json#',
+ 'query': ['prefix', 'continuationToken', 'limit'],
+ 'route': '/clients/',
+ 'stability': 'stable',
+ },
+ "listRoles": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'listRoles',
+ 'output': 'v1/list-roles-response.json#',
+ 'route': '/roles/',
+ 'stability': 'stable',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "resetAccessToken": {
+ 'args': ['clientId'],
+ 'method': 'post',
+ 'name': 'resetAccessToken',
+ 'output': 'v1/create-client-response.json#',
+ 'route': '/clients/<clientId>/reset',
+ 'stability': 'stable',
+ },
+ "role": {
+ 'args': ['roleId'],
+ 'method': 'get',
+ 'name': 'role',
+ 'output': 'v1/get-role-response.json#',
+ 'route': '/roles/<roleId>',
+ 'stability': 'stable',
+ },
+ "sentryDSN": {
+ 'args': ['project'],
+ 'method': 'get',
+ 'name': 'sentryDSN',
+ 'output': 'v1/sentry-dsn-response.json#',
+ 'route': '/sentry/<project>/dsn',
+ 'stability': 'stable',
+ },
+ "statsumToken": {
+ 'args': ['project'],
+ 'method': 'get',
+ 'name': 'statsumToken',
+ 'output': 'v1/statsum-token-response.json#',
+ 'route': '/statsum/<project>/token',
+ 'stability': 'stable',
+ },
+ "testAuthenticate": {
+ 'args': [],
+ 'input': 'v1/test-authenticate-request.json#',
+ 'method': 'post',
+ 'name': 'testAuthenticate',
+ 'output': 'v1/test-authenticate-response.json#',
+ 'route': '/test-authenticate',
+ 'stability': 'stable',
+ },
+ "testAuthenticateGet": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'testAuthenticateGet',
+ 'output': 'v1/test-authenticate-response.json#',
+ 'route': '/test-authenticate-get/',
+ 'stability': 'stable',
+ },
+ "updateClient": {
+ 'args': ['clientId'],
+ 'input': 'v1/create-client-request.json#',
+ 'method': 'post',
+ 'name': 'updateClient',
+ 'output': 'v1/get-client-response.json#',
+ 'route': '/clients/<clientId>',
+ 'stability': 'stable',
+ },
+ "updateRole": {
+ 'args': ['roleId'],
+ 'input': 'v1/create-role-request.json#',
+ 'method': 'post',
+ 'name': 'updateRole',
+ 'output': 'v1/get-role-response.json#',
+ 'route': '/roles/<roleId>',
+ 'stability': 'stable',
+ },
+ "webhooktunnelToken": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'webhooktunnelToken',
+ 'output': 'v1/webhooktunnel-token-response.json#',
+ 'route': '/webhooktunnel',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Auth']
diff --git a/third_party/python/taskcluster/taskcluster/aio/authevents.py b/third_party/python/taskcluster/taskcluster/aio/authevents.py
new file mode 100644
index 0000000000..1d5cc517e3
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/authevents.py
@@ -0,0 +1,180 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class AuthEvents(AsyncBaseClient):
+ """
+ The auth service is responsible for storing credentials, managing
+ assignment of scopes, and validation of request signatures from other
+ services.
+
+ These exchanges provides notifications when credentials or roles are
+ updated. This is mostly so that multiple instances of the auth service
+ can purge their caches and synchronize state. But you are of course
+ welcome to use these for other purposes, monitoring changes for example.
+ """
+
+ classOptions = {
+ "exchangePrefix": "exchange/taskcluster-auth/v1/",
+ }
+ serviceName = 'auth'
+ apiVersion = 'v1'
+
+ def clientCreated(self, *args, **kwargs):
+ """
+ Client Created Messages
+
+ Message that a new client has been created.
+
+ This exchange outputs: ``v1/client-message.json#``This exchange takes the following keys:
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'client-created',
+ 'name': 'clientCreated',
+ 'routingKey': [
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/client-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def clientUpdated(self, *args, **kwargs):
+ """
+ Client Updated Messages
+
+ Message that a new client has been updated.
+
+ This exchange outputs: ``v1/client-message.json#``This exchange takes the following keys:
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'client-updated',
+ 'name': 'clientUpdated',
+ 'routingKey': [
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/client-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def clientDeleted(self, *args, **kwargs):
+ """
+ Client Deleted Messages
+
+ Message that a new client has been deleted.
+
+ This exchange outputs: ``v1/client-message.json#``This exchange takes the following keys:
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'client-deleted',
+ 'name': 'clientDeleted',
+ 'routingKey': [
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/client-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def roleCreated(self, *args, **kwargs):
+ """
+ Role Created Messages
+
+ Message that a new role has been created.
+
+ This exchange outputs: ``v1/role-message.json#``This exchange takes the following keys:
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'role-created',
+ 'name': 'roleCreated',
+ 'routingKey': [
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/role-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def roleUpdated(self, *args, **kwargs):
+ """
+ Role Updated Messages
+
+ Message that a new role has been updated.
+
+ This exchange outputs: ``v1/role-message.json#``This exchange takes the following keys:
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'role-updated',
+ 'name': 'roleUpdated',
+ 'routingKey': [
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/role-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def roleDeleted(self, *args, **kwargs):
+ """
+ Role Deleted Messages
+
+ Message that a new role has been deleted.
+
+ This exchange outputs: ``v1/role-message.json#``This exchange takes the following keys:
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'role-deleted',
+ 'name': 'roleDeleted',
+ 'routingKey': [
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/role-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ funcinfo = {
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'AuthEvents']
diff --git a/third_party/python/taskcluster/taskcluster/aio/awsprovisioner.py b/third_party/python/taskcluster/taskcluster/aio/awsprovisioner.py
new file mode 100644
index 0000000000..1ef30306af
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/awsprovisioner.py
@@ -0,0 +1,450 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class AwsProvisioner(AsyncBaseClient):
+ """
+ The AWS Provisioner is responsible for provisioning instances on EC2 for use in
+ Taskcluster. The provisioner maintains a set of worker configurations which
+ can be managed with an API that is typically available at
+ aws-provisioner.taskcluster.net/v1. This API can also perform basic instance
+ management tasks in addition to maintaining the internal state of worker type
+ configuration information.
+
+ The Provisioner runs at a configurable interval. Each iteration of the
+ provisioner fetches a current copy the state that the AWS EC2 api reports. In
+ each iteration, we ask the Queue how many tasks are pending for that worker
+ type. Based on the number of tasks pending and the scaling ratio, we may
+ submit requests for new instances. We use pricing information, capacity and
+ utility factor information to decide which instance type in which region would
+ be the optimal configuration.
+
+ Each EC2 instance type will declare a capacity and utility factor. Capacity is
+ the number of tasks that a given machine is capable of running concurrently.
+ Utility factor is a relative measure of performance between two instance types.
+ We multiply the utility factor by the spot price to compare instance types and
+ regions when making the bidding choices.
+
+ When a new EC2 instance is instantiated, its user data contains a token in
+ `securityToken` that can be used with the `getSecret` method to retrieve
+ the worker's credentials and any needed passwords or other restricted
+ information. The worker is responsible for deleting the secret after
+ retrieving it, to prevent dissemination of the secret to other proceses
+ which can read the instance user data.
+
+ """
+
+ classOptions = {
+ }
+ serviceName = 'aws-provisioner'
+ apiVersion = 'v1'
+
+ async def listWorkerTypeSummaries(self, *args, **kwargs):
+ """
+ List worker types with details
+
+ Return a list of worker types, including some summary information about
+ current capacity for each. While this list includes all defined worker types,
+ there may be running EC2 instances for deleted worker types that are not
+ included here. The list is unordered.
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/list-worker-types-summaries-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["listWorkerTypeSummaries"], *args, **kwargs)
+
+ async def createWorkerType(self, *args, **kwargs):
+ """
+ Create new Worker Type
+
+ Create a worker type. A worker type contains all the configuration
+ needed for the provisioner to manage the instances. Each worker type
+ knows which regions and which instance types are allowed for that
+ worker type. Remember that Capacity is the number of concurrent tasks
+ that can be run on a given EC2 resource and that Utility is the relative
+ performance rate between different instance types. There is no way to
+ configure different regions to have different sets of instance types
+ so ensure that all instance types are available in all regions.
+ This function is idempotent.
+
+ Once a worker type is in the provisioner, a back ground process will
+ begin creating instances for it based on its capacity bounds and its
+ pending task count from the Queue. It is the worker's responsibility
+ to shut itself down. The provisioner has a limit (currently 96hours)
+ for all instances to prevent zombie instances from running indefinitely.
+
+ The provisioner will ensure that all instances created are tagged with
+ aws resource tags containing the provisioner id and the worker type.
+
+ If provided, the secrets in the global, region and instance type sections
+ are available using the secrets api. If specified, the scopes provided
+ will be used to generate a set of temporary credentials available with
+ the other secrets.
+
+ This method takes input: ``http://schemas.taskcluster.net/aws-provisioner/v1/create-worker-type-request.json#``
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["createWorkerType"], *args, **kwargs)
+
+ async def updateWorkerType(self, *args, **kwargs):
+ """
+ Update Worker Type
+
+ Provide a new copy of a worker type to replace the existing one.
+ This will overwrite the existing worker type definition if there
+ is already a worker type of that name. This method will return a
+ 200 response along with a copy of the worker type definition created
+ Note that if you are using the result of a GET on the worker-type
+ end point that you will need to delete the lastModified and workerType
+ keys from the object returned, since those fields are not allowed
+ the request body for this method
+
+ Otherwise, all input requirements and actions are the same as the
+ create method.
+
+ This method takes input: ``http://schemas.taskcluster.net/aws-provisioner/v1/create-worker-type-request.json#``
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["updateWorkerType"], *args, **kwargs)
+
+ async def workerTypeLastModified(self, *args, **kwargs):
+ """
+ Get Worker Type Last Modified Time
+
+ This method is provided to allow workers to see when they were
+ last modified. The value provided through UserData can be
+ compared against this value to see if changes have been made
+ If the worker type definition has not been changed, the date
+ should be identical as it is the same stored value.
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-last-modified.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["workerTypeLastModified"], *args, **kwargs)
+
+ async def workerType(self, *args, **kwargs):
+ """
+ Get Worker Type
+
+ Retrieve a copy of the requested worker type definition.
+ This copy contains a lastModified field as well as the worker
+ type name. As such, it will require manipulation to be able to
+ use the results of this method to submit date to the update
+ method.
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["workerType"], *args, **kwargs)
+
+ async def removeWorkerType(self, *args, **kwargs):
+ """
+ Delete Worker Type
+
+ Delete a worker type definition. This method will only delete
+ the worker type definition from the storage table. The actual
+ deletion will be handled by a background worker. As soon as this
+ method is called for a worker type, the background worker will
+ immediately submit requests to cancel all spot requests for this
+ worker type as well as killing all instances regardless of their
+ state. If you want to gracefully remove a worker type, you must
+ either ensure that no tasks are created with that worker type name
+ or you could theoretically set maxCapacity to 0, though, this is
+ not a supported or tested action
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["removeWorkerType"], *args, **kwargs)
+
+ async def listWorkerTypes(self, *args, **kwargs):
+ """
+ List Worker Types
+
+ Return a list of string worker type names. These are the names
+ of all managed worker types known to the provisioner. This does
+ not include worker types which are left overs from a deleted worker
+ type definition but are still running in AWS.
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/list-worker-types-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["listWorkerTypes"], *args, **kwargs)
+
+ async def createSecret(self, *args, **kwargs):
+ """
+ Create new Secret
+
+ Insert a secret into the secret storage. The supplied secrets will
+ be provided verbatime via `getSecret`, while the supplied scopes will
+ be converted into credentials by `getSecret`.
+
+ This method is not ordinarily used in production; instead, the provisioner
+ creates a new secret directly for each spot bid.
+
+ This method takes input: ``http://schemas.taskcluster.net/aws-provisioner/v1/create-secret-request.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["createSecret"], *args, **kwargs)
+
+ async def getSecret(self, *args, **kwargs):
+ """
+ Get a Secret
+
+ Retrieve a secret from storage. The result contains any passwords or
+ other restricted information verbatim as well as a temporary credential
+ based on the scopes specified when the secret was created.
+
+ It is important that this secret is deleted by the consumer (`removeSecret`),
+ or else the secrets will be visible to any process which can access the
+ user data associated with the instance.
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/get-secret-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["getSecret"], *args, **kwargs)
+
+ async def instanceStarted(self, *args, **kwargs):
+ """
+ Report an instance starting
+
+ An instance will report in by giving its instance id as well
+ as its security token. The token is given and checked to ensure
+ that it matches a real token that exists to ensure that random
+ machines do not check in. We could generate a different token
+ but that seems like overkill
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["instanceStarted"], *args, **kwargs)
+
+ async def removeSecret(self, *args, **kwargs):
+ """
+ Remove a Secret
+
+ Remove a secret. After this call, a call to `getSecret` with the given
+ token will return no information.
+
+ It is very important that the consumer of a
+ secret delete the secret from storage before handing over control
+ to untrusted processes to prevent credential and/or secret leakage.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["removeSecret"], *args, **kwargs)
+
+ async def getLaunchSpecs(self, *args, **kwargs):
+ """
+ Get All Launch Specifications for WorkerType
+
+ This method returns a preview of all possible launch specifications
+ that this worker type definition could submit to EC2. It is used to
+ test worker types, nothing more
+
+ **This API end-point is experimental and may be subject to change without warning.**
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/get-launch-specs-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["getLaunchSpecs"], *args, **kwargs)
+
+ async def state(self, *args, **kwargs):
+ """
+ Get AWS State for a worker type
+
+ Return the state of a given workertype as stored by the provisioner.
+ This state is stored as three lists: 1 for running instances, 1 for
+ pending requests. The `summary` property contains an updated summary
+ similar to that returned from `listWorkerTypeSummaries`.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["state"], *args, **kwargs)
+
+ async def backendStatus(self, *args, **kwargs):
+ """
+ Backend Status
+
+ This endpoint is used to show when the last time the provisioner
+ has checked in. A check in is done through the deadman's snitch
+ api. It is done at the conclusion of a provisioning iteration
+ and used to tell if the background provisioning process is still
+ running.
+
+ **Warning** this api end-point is **not stable**.
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/backend-status-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["backendStatus"], *args, **kwargs)
+
+ async def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ funcinfo = {
+ "backendStatus": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'backendStatus',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/backend-status-response.json#',
+ 'route': '/backend-status',
+ 'stability': 'experimental',
+ },
+ "createSecret": {
+ 'args': ['token'],
+ 'input': 'http://schemas.taskcluster.net/aws-provisioner/v1/create-secret-request.json#',
+ 'method': 'put',
+ 'name': 'createSecret',
+ 'route': '/secret/<token>',
+ 'stability': 'stable',
+ },
+ "createWorkerType": {
+ 'args': ['workerType'],
+ 'input': 'http://schemas.taskcluster.net/aws-provisioner/v1/create-worker-type-request.json#',
+ 'method': 'put',
+ 'name': 'createWorkerType',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#',
+ 'route': '/worker-type/<workerType>',
+ 'stability': 'stable',
+ },
+ "getLaunchSpecs": {
+ 'args': ['workerType'],
+ 'method': 'get',
+ 'name': 'getLaunchSpecs',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/get-launch-specs-response.json#',
+ 'route': '/worker-type/<workerType>/launch-specifications',
+ 'stability': 'experimental',
+ },
+ "getSecret": {
+ 'args': ['token'],
+ 'method': 'get',
+ 'name': 'getSecret',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/get-secret-response.json#',
+ 'route': '/secret/<token>',
+ 'stability': 'stable',
+ },
+ "instanceStarted": {
+ 'args': ['instanceId', 'token'],
+ 'method': 'get',
+ 'name': 'instanceStarted',
+ 'route': '/instance-started/<instanceId>/<token>',
+ 'stability': 'stable',
+ },
+ "listWorkerTypeSummaries": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'listWorkerTypeSummaries',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/list-worker-types-summaries-response.json#',
+ 'route': '/list-worker-type-summaries',
+ 'stability': 'stable',
+ },
+ "listWorkerTypes": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'listWorkerTypes',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/list-worker-types-response.json#',
+ 'route': '/list-worker-types',
+ 'stability': 'stable',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "removeSecret": {
+ 'args': ['token'],
+ 'method': 'delete',
+ 'name': 'removeSecret',
+ 'route': '/secret/<token>',
+ 'stability': 'stable',
+ },
+ "removeWorkerType": {
+ 'args': ['workerType'],
+ 'method': 'delete',
+ 'name': 'removeWorkerType',
+ 'route': '/worker-type/<workerType>',
+ 'stability': 'stable',
+ },
+ "state": {
+ 'args': ['workerType'],
+ 'method': 'get',
+ 'name': 'state',
+ 'route': '/state/<workerType>',
+ 'stability': 'stable',
+ },
+ "updateWorkerType": {
+ 'args': ['workerType'],
+ 'input': 'http://schemas.taskcluster.net/aws-provisioner/v1/create-worker-type-request.json#',
+ 'method': 'post',
+ 'name': 'updateWorkerType',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#',
+ 'route': '/worker-type/<workerType>/update',
+ 'stability': 'stable',
+ },
+ "workerType": {
+ 'args': ['workerType'],
+ 'method': 'get',
+ 'name': 'workerType',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#',
+ 'route': '/worker-type/<workerType>',
+ 'stability': 'stable',
+ },
+ "workerTypeLastModified": {
+ 'args': ['workerType'],
+ 'method': 'get',
+ 'name': 'workerTypeLastModified',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-last-modified.json#',
+ 'route': '/worker-type-last-modified/<workerType>',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'AwsProvisioner']
diff --git a/third_party/python/taskcluster/taskcluster/aio/awsprovisionerevents.py b/third_party/python/taskcluster/taskcluster/aio/awsprovisionerevents.py
new file mode 100644
index 0000000000..d6ec9114a6
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/awsprovisionerevents.py
@@ -0,0 +1,142 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class AwsProvisionerEvents(AsyncBaseClient):
+ """
+ Exchanges from the provisioner... more docs later
+ """
+
+ classOptions = {
+ "exchangePrefix": "exchange/taskcluster-aws-provisioner/v1/",
+ }
+ apiVersion = 'v1'
+
+ def workerTypeCreated(self, *args, **kwargs):
+ """
+ WorkerType Created Message
+
+ When a new `workerType` is created a message will be published to this
+ exchange.
+
+ This exchange outputs: ``http://schemas.taskcluster.net/aws-provisioner/v1/worker-type-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * workerType: WorkerType that this message concerns. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'worker-type-created',
+ 'name': 'workerTypeCreated',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'http://schemas.taskcluster.net/aws-provisioner/v1/worker-type-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def workerTypeUpdated(self, *args, **kwargs):
+ """
+ WorkerType Updated Message
+
+ When a `workerType` is updated a message will be published to this
+ exchange.
+
+ This exchange outputs: ``http://schemas.taskcluster.net/aws-provisioner/v1/worker-type-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * workerType: WorkerType that this message concerns. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'worker-type-updated',
+ 'name': 'workerTypeUpdated',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'http://schemas.taskcluster.net/aws-provisioner/v1/worker-type-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def workerTypeRemoved(self, *args, **kwargs):
+ """
+ WorkerType Removed Message
+
+ When a `workerType` is removed a message will be published to this
+ exchange.
+
+ This exchange outputs: ``http://schemas.taskcluster.net/aws-provisioner/v1/worker-type-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * workerType: WorkerType that this message concerns. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'worker-type-removed',
+ 'name': 'workerTypeRemoved',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'http://schemas.taskcluster.net/aws-provisioner/v1/worker-type-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ funcinfo = {
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'AwsProvisionerEvents']
diff --git a/third_party/python/taskcluster/taskcluster/aio/ec2manager.py b/third_party/python/taskcluster/taskcluster/aio/ec2manager.py
new file mode 100644
index 0000000000..2b055746fe
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/ec2manager.py
@@ -0,0 +1,475 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class EC2Manager(AsyncBaseClient):
+ """
+ A taskcluster service which manages EC2 instances. This service does not understand any taskcluster concepts intrinsicaly other than using the name `workerType` to refer to a group of associated instances. Unless you are working on building a provisioner for AWS, you almost certainly do not want to use this service
+ """
+
+ classOptions = {
+ }
+ serviceName = 'ec2-manager'
+ apiVersion = 'v1'
+
+ async def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ async def listWorkerTypes(self, *args, **kwargs):
+ """
+ See the list of worker types which are known to be managed
+
+ This method is only for debugging the ec2-manager
+
+ This method gives output: ``v1/list-worker-types.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["listWorkerTypes"], *args, **kwargs)
+
+ async def runInstance(self, *args, **kwargs):
+ """
+ Run an instance
+
+ Request an instance of a worker type
+
+ This method takes input: ``v1/run-instance-request.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["runInstance"], *args, **kwargs)
+
+ async def terminateWorkerType(self, *args, **kwargs):
+ """
+ Terminate all resources from a worker type
+
+ Terminate all instances for this worker type
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["terminateWorkerType"], *args, **kwargs)
+
+ async def workerTypeStats(self, *args, **kwargs):
+ """
+ Look up the resource stats for a workerType
+
+ Return an object which has a generic state description. This only contains counts of instances
+
+ This method gives output: ``v1/worker-type-resources.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["workerTypeStats"], *args, **kwargs)
+
+ async def workerTypeHealth(self, *args, **kwargs):
+ """
+ Look up the resource health for a workerType
+
+ Return a view of the health of a given worker type
+
+ This method gives output: ``v1/health.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["workerTypeHealth"], *args, **kwargs)
+
+ async def workerTypeErrors(self, *args, **kwargs):
+ """
+ Look up the most recent errors of a workerType
+
+ Return a list of the most recent errors encountered by a worker type
+
+ This method gives output: ``v1/errors.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["workerTypeErrors"], *args, **kwargs)
+
+ async def workerTypeState(self, *args, **kwargs):
+ """
+ Look up the resource state for a workerType
+
+ Return state information for a given worker type
+
+ This method gives output: ``v1/worker-type-state.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["workerTypeState"], *args, **kwargs)
+
+ async def ensureKeyPair(self, *args, **kwargs):
+ """
+ Ensure a KeyPair for a given worker type exists
+
+ Idempotently ensure that a keypair of a given name exists
+
+ This method takes input: ``v1/create-key-pair.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["ensureKeyPair"], *args, **kwargs)
+
+ async def removeKeyPair(self, *args, **kwargs):
+ """
+ Ensure a KeyPair for a given worker type does not exist
+
+ Ensure that a keypair of a given name does not exist.
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["removeKeyPair"], *args, **kwargs)
+
+ async def terminateInstance(self, *args, **kwargs):
+ """
+ Terminate an instance
+
+ Terminate an instance in a specified region
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["terminateInstance"], *args, **kwargs)
+
+ async def getPrices(self, *args, **kwargs):
+ """
+ Request prices for EC2
+
+ Return a list of possible prices for EC2
+
+ This method gives output: ``v1/prices.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["getPrices"], *args, **kwargs)
+
+ async def getSpecificPrices(self, *args, **kwargs):
+ """
+ Request prices for EC2
+
+ Return a list of possible prices for EC2
+
+ This method takes input: ``v1/prices-request.json#``
+
+ This method gives output: ``v1/prices.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["getSpecificPrices"], *args, **kwargs)
+
+ async def getHealth(self, *args, **kwargs):
+ """
+ Get EC2 account health metrics
+
+ Give some basic stats on the health of our EC2 account
+
+ This method gives output: ``v1/health.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["getHealth"], *args, **kwargs)
+
+ async def getRecentErrors(self, *args, **kwargs):
+ """
+ Look up the most recent errors in the provisioner across all worker types
+
+ Return a list of recent errors encountered
+
+ This method gives output: ``v1/errors.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["getRecentErrors"], *args, **kwargs)
+
+ async def regions(self, *args, **kwargs):
+ """
+ See the list of regions managed by this ec2-manager
+
+ This method is only for debugging the ec2-manager
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["regions"], *args, **kwargs)
+
+ async def amiUsage(self, *args, **kwargs):
+ """
+ See the list of AMIs and their usage
+
+ List AMIs and their usage by returning a list of objects in the form:
+ {
+ region: string
+ volumetype: string
+ lastused: timestamp
+ }
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["amiUsage"], *args, **kwargs)
+
+ async def ebsUsage(self, *args, **kwargs):
+ """
+ See the current EBS volume usage list
+
+ Lists current EBS volume usage by returning a list of objects
+ that are uniquely defined by {region, volumetype, state} in the form:
+ {
+ region: string,
+ volumetype: string,
+ state: string,
+ totalcount: integer,
+ totalgb: integer,
+ touched: timestamp (last time that information was updated),
+ }
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["ebsUsage"], *args, **kwargs)
+
+ async def dbpoolStats(self, *args, **kwargs):
+ """
+ Statistics on the Database client pool
+
+ This method is only for debugging the ec2-manager
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["dbpoolStats"], *args, **kwargs)
+
+ async def allState(self, *args, **kwargs):
+ """
+ List out the entire internal state
+
+ This method is only for debugging the ec2-manager
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["allState"], *args, **kwargs)
+
+ async def sqsStats(self, *args, **kwargs):
+ """
+ Statistics on the sqs queues
+
+ This method is only for debugging the ec2-manager
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["sqsStats"], *args, **kwargs)
+
+ async def purgeQueues(self, *args, **kwargs):
+ """
+ Purge the SQS queues
+
+ This method is only for debugging the ec2-manager
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["purgeQueues"], *args, **kwargs)
+
+ funcinfo = {
+ "allState": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'allState',
+ 'route': '/internal/all-state',
+ 'stability': 'experimental',
+ },
+ "amiUsage": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'amiUsage',
+ 'route': '/internal/ami-usage',
+ 'stability': 'experimental',
+ },
+ "dbpoolStats": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'dbpoolStats',
+ 'route': '/internal/db-pool-stats',
+ 'stability': 'experimental',
+ },
+ "ebsUsage": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ebsUsage',
+ 'route': '/internal/ebs-usage',
+ 'stability': 'experimental',
+ },
+ "ensureKeyPair": {
+ 'args': ['name'],
+ 'input': 'v1/create-key-pair.json#',
+ 'method': 'get',
+ 'name': 'ensureKeyPair',
+ 'route': '/key-pairs/<name>',
+ 'stability': 'experimental',
+ },
+ "getHealth": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'getHealth',
+ 'output': 'v1/health.json#',
+ 'route': '/health',
+ 'stability': 'experimental',
+ },
+ "getPrices": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'getPrices',
+ 'output': 'v1/prices.json#',
+ 'route': '/prices',
+ 'stability': 'experimental',
+ },
+ "getRecentErrors": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'getRecentErrors',
+ 'output': 'v1/errors.json#',
+ 'route': '/errors',
+ 'stability': 'experimental',
+ },
+ "getSpecificPrices": {
+ 'args': [],
+ 'input': 'v1/prices-request.json#',
+ 'method': 'post',
+ 'name': 'getSpecificPrices',
+ 'output': 'v1/prices.json#',
+ 'route': '/prices',
+ 'stability': 'experimental',
+ },
+ "listWorkerTypes": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'listWorkerTypes',
+ 'output': 'v1/list-worker-types.json#',
+ 'route': '/worker-types',
+ 'stability': 'experimental',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "purgeQueues": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'purgeQueues',
+ 'route': '/internal/purge-queues',
+ 'stability': 'experimental',
+ },
+ "regions": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'regions',
+ 'route': '/internal/regions',
+ 'stability': 'experimental',
+ },
+ "removeKeyPair": {
+ 'args': ['name'],
+ 'method': 'delete',
+ 'name': 'removeKeyPair',
+ 'route': '/key-pairs/<name>',
+ 'stability': 'experimental',
+ },
+ "runInstance": {
+ 'args': ['workerType'],
+ 'input': 'v1/run-instance-request.json#',
+ 'method': 'put',
+ 'name': 'runInstance',
+ 'route': '/worker-types/<workerType>/instance',
+ 'stability': 'experimental',
+ },
+ "sqsStats": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'sqsStats',
+ 'route': '/internal/sqs-stats',
+ 'stability': 'experimental',
+ },
+ "terminateInstance": {
+ 'args': ['region', 'instanceId'],
+ 'method': 'delete',
+ 'name': 'terminateInstance',
+ 'route': '/region/<region>/instance/<instanceId>',
+ 'stability': 'experimental',
+ },
+ "terminateWorkerType": {
+ 'args': ['workerType'],
+ 'method': 'delete',
+ 'name': 'terminateWorkerType',
+ 'route': '/worker-types/<workerType>/resources',
+ 'stability': 'experimental',
+ },
+ "workerTypeErrors": {
+ 'args': ['workerType'],
+ 'method': 'get',
+ 'name': 'workerTypeErrors',
+ 'output': 'v1/errors.json#',
+ 'route': '/worker-types/<workerType>/errors',
+ 'stability': 'experimental',
+ },
+ "workerTypeHealth": {
+ 'args': ['workerType'],
+ 'method': 'get',
+ 'name': 'workerTypeHealth',
+ 'output': 'v1/health.json#',
+ 'route': '/worker-types/<workerType>/health',
+ 'stability': 'experimental',
+ },
+ "workerTypeState": {
+ 'args': ['workerType'],
+ 'method': 'get',
+ 'name': 'workerTypeState',
+ 'output': 'v1/worker-type-state.json#',
+ 'route': '/worker-types/<workerType>/state',
+ 'stability': 'experimental',
+ },
+ "workerTypeStats": {
+ 'args': ['workerType'],
+ 'method': 'get',
+ 'name': 'workerTypeStats',
+ 'output': 'v1/worker-type-resources.json#',
+ 'route': '/worker-types/<workerType>/stats',
+ 'stability': 'experimental',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'EC2Manager']
diff --git a/third_party/python/taskcluster/taskcluster/aio/github.py b/third_party/python/taskcluster/taskcluster/aio/github.py
new file mode 100644
index 0000000000..af6795737b
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/github.py
@@ -0,0 +1,205 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class Github(AsyncBaseClient):
+ """
+ The github service is responsible for creating tasks in reposnse
+ to GitHub events, and posting results to the GitHub UI.
+
+ This document describes the API end-point for consuming GitHub
+ web hooks, as well as some useful consumer APIs.
+
+ When Github forbids an action, this service returns an HTTP 403
+ with code ForbiddenByGithub.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'github'
+ apiVersion = 'v1'
+
+ async def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ async def githubWebHookConsumer(self, *args, **kwargs):
+ """
+ Consume GitHub WebHook
+
+ Capture a GitHub event and publish it via pulse, if it's a push,
+ release or pull request.
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["githubWebHookConsumer"], *args, **kwargs)
+
+ async def builds(self, *args, **kwargs):
+ """
+ List of Builds
+
+ A paginated list of builds that have been run in
+ Taskcluster. Can be filtered on various git-specific
+ fields.
+
+ This method gives output: ``v1/build-list.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["builds"], *args, **kwargs)
+
+ async def badge(self, *args, **kwargs):
+ """
+ Latest Build Status Badge
+
+ Checks the status of the latest build of a given branch
+ and returns corresponding badge svg.
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["badge"], *args, **kwargs)
+
+ async def repository(self, *args, **kwargs):
+ """
+ Get Repository Info
+
+ Returns any repository metadata that is
+ useful within Taskcluster related services.
+
+ This method gives output: ``v1/repository.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["repository"], *args, **kwargs)
+
+ async def latest(self, *args, **kwargs):
+ """
+ Latest Status for Branch
+
+ For a given branch of a repository, this will always point
+ to a status page for the most recent task triggered by that
+ branch.
+
+ Note: This is a redirect rather than a direct link.
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["latest"], *args, **kwargs)
+
+ async def createStatus(self, *args, **kwargs):
+ """
+ Post a status against a given changeset
+
+ For a given changeset (SHA) of a repository, this will attach a "commit status"
+ on github. These statuses are links displayed next to each revision.
+ The status is either OK (green check) or FAILURE (red cross),
+ made of a custom title and link.
+
+ This method takes input: ``v1/create-status.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["createStatus"], *args, **kwargs)
+
+ async def createComment(self, *args, **kwargs):
+ """
+ Post a comment on a given GitHub Issue or Pull Request
+
+ For a given Issue or Pull Request of a repository, this will write a new message.
+
+ This method takes input: ``v1/create-comment.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["createComment"], *args, **kwargs)
+
+ funcinfo = {
+ "badge": {
+ 'args': ['owner', 'repo', 'branch'],
+ 'method': 'get',
+ 'name': 'badge',
+ 'route': '/repository/<owner>/<repo>/<branch>/badge.svg',
+ 'stability': 'experimental',
+ },
+ "builds": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'builds',
+ 'output': 'v1/build-list.json#',
+ 'query': ['continuationToken', 'limit', 'organization', 'repository', 'sha'],
+ 'route': '/builds',
+ 'stability': 'experimental',
+ },
+ "createComment": {
+ 'args': ['owner', 'repo', 'number'],
+ 'input': 'v1/create-comment.json#',
+ 'method': 'post',
+ 'name': 'createComment',
+ 'route': '/repository/<owner>/<repo>/issues/<number>/comments',
+ 'stability': 'experimental',
+ },
+ "createStatus": {
+ 'args': ['owner', 'repo', 'sha'],
+ 'input': 'v1/create-status.json#',
+ 'method': 'post',
+ 'name': 'createStatus',
+ 'route': '/repository/<owner>/<repo>/statuses/<sha>',
+ 'stability': 'experimental',
+ },
+ "githubWebHookConsumer": {
+ 'args': [],
+ 'method': 'post',
+ 'name': 'githubWebHookConsumer',
+ 'route': '/github',
+ 'stability': 'experimental',
+ },
+ "latest": {
+ 'args': ['owner', 'repo', 'branch'],
+ 'method': 'get',
+ 'name': 'latest',
+ 'route': '/repository/<owner>/<repo>/<branch>/latest',
+ 'stability': 'experimental',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "repository": {
+ 'args': ['owner', 'repo'],
+ 'method': 'get',
+ 'name': 'repository',
+ 'output': 'v1/repository.json#',
+ 'route': '/repository/<owner>/<repo>',
+ 'stability': 'experimental',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Github']
diff --git a/third_party/python/taskcluster/taskcluster/aio/githubevents.py b/third_party/python/taskcluster/taskcluster/aio/githubevents.py
new file mode 100644
index 0000000000..aecfcc85ab
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/githubevents.py
@@ -0,0 +1,194 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class GithubEvents(AsyncBaseClient):
+ """
+ The github service publishes a pulse
+ message for supported github events, translating Github webhook
+ events into pulse messages.
+
+ This document describes the exchange offered by the taskcluster
+ github service
+ """
+
+ classOptions = {
+ "exchangePrefix": "exchange/taskcluster-github/v1/",
+ }
+ serviceName = 'github'
+ apiVersion = 'v1'
+
+ def pullRequest(self, *args, **kwargs):
+ """
+ GitHub Pull Request Event
+
+ When a GitHub pull request event is posted it will be broadcast on this
+ exchange with the designated `organization` and `repository`
+ in the routing-key along with event specific metadata in the payload.
+
+ This exchange outputs: ``v1/github-pull-request-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key. (required)
+
+ * organization: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
+
+ * repository: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
+
+ * action: The GitHub `action` which triggered an event. See for possible values see the payload actions property. (required)
+ """
+
+ ref = {
+ 'exchange': 'pull-request',
+ 'name': 'pullRequest',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'organization',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'repository',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'action',
+ },
+ ],
+ 'schema': 'v1/github-pull-request-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def push(self, *args, **kwargs):
+ """
+ GitHub push Event
+
+ When a GitHub push event is posted it will be broadcast on this
+ exchange with the designated `organization` and `repository`
+ in the routing-key along with event specific metadata in the payload.
+
+ This exchange outputs: ``v1/github-push-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key. (required)
+
+ * organization: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
+
+ * repository: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
+ """
+
+ ref = {
+ 'exchange': 'push',
+ 'name': 'push',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'organization',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'repository',
+ },
+ ],
+ 'schema': 'v1/github-push-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def release(self, *args, **kwargs):
+ """
+ GitHub release Event
+
+ When a GitHub release event is posted it will be broadcast on this
+ exchange with the designated `organization` and `repository`
+ in the routing-key along with event specific metadata in the payload.
+
+ This exchange outputs: ``v1/github-release-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key. (required)
+
+ * organization: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
+
+ * repository: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
+ """
+
+ ref = {
+ 'exchange': 'release',
+ 'name': 'release',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'organization',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'repository',
+ },
+ ],
+ 'schema': 'v1/github-release-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def taskGroupDefined(self, *args, **kwargs):
+ """
+ GitHub release Event
+
+ used for creating status indicators in GitHub UI using Statuses API
+
+ This exchange outputs: ``v1/task-group-defined-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key. (required)
+
+ * organization: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
+
+ * repository: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
+ """
+
+ ref = {
+ 'exchange': 'task-group-defined',
+ 'name': 'taskGroupDefined',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'organization',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'repository',
+ },
+ ],
+ 'schema': 'v1/task-group-defined-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ funcinfo = {
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'GithubEvents']
diff --git a/third_party/python/taskcluster/taskcluster/aio/hooks.py b/third_party/python/taskcluster/taskcluster/aio/hooks.py
new file mode 100644
index 0000000000..029702d484
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/hooks.py
@@ -0,0 +1,324 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class Hooks(AsyncBaseClient):
+ """
+ Hooks are a mechanism for creating tasks in response to events.
+
+ Hooks are identified with a `hookGroupId` and a `hookId`.
+
+ When an event occurs, the resulting task is automatically created. The
+ task is created using the scope `assume:hook-id:<hookGroupId>/<hookId>`,
+ which must have scopes to make the createTask call, including satisfying all
+ scopes in `task.scopes`. The new task has a `taskGroupId` equal to its
+ `taskId`, as is the convention for decision tasks.
+
+ Hooks can have a "schedule" indicating specific times that new tasks should
+ be created. Each schedule is in a simple cron format, per
+ https://www.npmjs.com/package/cron-parser. For example:
+ * `['0 0 1 * * *']` -- daily at 1:00 UTC
+ * `['0 0 9,21 * * 1-5', '0 0 12 * * 0,6']` -- weekdays at 9:00 and 21:00 UTC, weekends at noon
+
+ The task definition is used as a JSON-e template, with a context depending on how it is fired. See
+ [/docs/reference/core/taskcluster-hooks/docs/firing-hooks](firing-hooks)
+ for more information.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'hooks'
+ apiVersion = 'v1'
+
+ async def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ async def listHookGroups(self, *args, **kwargs):
+ """
+ List hook groups
+
+ This endpoint will return a list of all hook groups with at least one hook.
+
+ This method gives output: ``v1/list-hook-groups-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["listHookGroups"], *args, **kwargs)
+
+ async def listHooks(self, *args, **kwargs):
+ """
+ List hooks in a given group
+
+ This endpoint will return a list of all the hook definitions within a
+ given hook group.
+
+ This method gives output: ``v1/list-hooks-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["listHooks"], *args, **kwargs)
+
+ async def hook(self, *args, **kwargs):
+ """
+ Get hook definition
+
+ This endpoint will return the hook definition for the given `hookGroupId`
+ and hookId.
+
+ This method gives output: ``v1/hook-definition.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["hook"], *args, **kwargs)
+
+ async def getHookStatus(self, *args, **kwargs):
+ """
+ Get hook status
+
+ This endpoint will return the current status of the hook. This represents a
+ snapshot in time and may vary from one call to the next.
+
+ This method gives output: ``v1/hook-status.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["getHookStatus"], *args, **kwargs)
+
+ async def createHook(self, *args, **kwargs):
+ """
+ Create a hook
+
+ This endpoint will create a new hook.
+
+ The caller's credentials must include the role that will be used to
+ create the task. That role must satisfy task.scopes as well as the
+ necessary scopes to add the task to the queue.
+
+
+ This method takes input: ``v1/create-hook-request.json#``
+
+ This method gives output: ``v1/hook-definition.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["createHook"], *args, **kwargs)
+
+ async def updateHook(self, *args, **kwargs):
+ """
+ Update a hook
+
+ This endpoint will update an existing hook. All fields except
+ `hookGroupId` and `hookId` can be modified.
+
+ This method takes input: ``v1/create-hook-request.json#``
+
+ This method gives output: ``v1/hook-definition.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["updateHook"], *args, **kwargs)
+
+ async def removeHook(self, *args, **kwargs):
+ """
+ Delete a hook
+
+ This endpoint will remove a hook definition.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["removeHook"], *args, **kwargs)
+
+ async def triggerHook(self, *args, **kwargs):
+ """
+ Trigger a hook
+
+ This endpoint will trigger the creation of a task from a hook definition.
+
+ The HTTP payload must match the hooks `triggerSchema`. If it does, it is
+ provided as the `payload` property of the JSON-e context used to render the
+ task template.
+
+ This method takes input: ``v1/trigger-hook.json#``
+
+ This method gives output: ``v1/task-status.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["triggerHook"], *args, **kwargs)
+
+ async def getTriggerToken(self, *args, **kwargs):
+ """
+ Get a trigger token
+
+ Retrieve a unique secret token for triggering the specified hook. This
+ token can be deactivated with `resetTriggerToken`.
+
+ This method gives output: ``v1/trigger-token-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["getTriggerToken"], *args, **kwargs)
+
+ async def resetTriggerToken(self, *args, **kwargs):
+ """
+ Reset a trigger token
+
+ Reset the token for triggering a given hook. This invalidates token that
+ may have been issued via getTriggerToken with a new token.
+
+ This method gives output: ``v1/trigger-token-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["resetTriggerToken"], *args, **kwargs)
+
+ async def triggerHookWithToken(self, *args, **kwargs):
+ """
+ Trigger a hook with a token
+
+ This endpoint triggers a defined hook with a valid token.
+
+ The HTTP payload must match the hooks `triggerSchema`. If it does, it is
+ provided as the `payload` property of the JSON-e context used to render the
+ task template.
+
+ This method takes input: ``v1/trigger-hook.json#``
+
+ This method gives output: ``v1/task-status.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["triggerHookWithToken"], *args, **kwargs)
+
+ funcinfo = {
+ "createHook": {
+ 'args': ['hookGroupId', 'hookId'],
+ 'input': 'v1/create-hook-request.json#',
+ 'method': 'put',
+ 'name': 'createHook',
+ 'output': 'v1/hook-definition.json#',
+ 'route': '/hooks/<hookGroupId>/<hookId>',
+ 'stability': 'stable',
+ },
+ "getHookStatus": {
+ 'args': ['hookGroupId', 'hookId'],
+ 'method': 'get',
+ 'name': 'getHookStatus',
+ 'output': 'v1/hook-status.json#',
+ 'route': '/hooks/<hookGroupId>/<hookId>/status',
+ 'stability': 'stable',
+ },
+ "getTriggerToken": {
+ 'args': ['hookGroupId', 'hookId'],
+ 'method': 'get',
+ 'name': 'getTriggerToken',
+ 'output': 'v1/trigger-token-response.json#',
+ 'route': '/hooks/<hookGroupId>/<hookId>/token',
+ 'stability': 'stable',
+ },
+ "hook": {
+ 'args': ['hookGroupId', 'hookId'],
+ 'method': 'get',
+ 'name': 'hook',
+ 'output': 'v1/hook-definition.json#',
+ 'route': '/hooks/<hookGroupId>/<hookId>',
+ 'stability': 'stable',
+ },
+ "listHookGroups": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'listHookGroups',
+ 'output': 'v1/list-hook-groups-response.json#',
+ 'route': '/hooks',
+ 'stability': 'stable',
+ },
+ "listHooks": {
+ 'args': ['hookGroupId'],
+ 'method': 'get',
+ 'name': 'listHooks',
+ 'output': 'v1/list-hooks-response.json#',
+ 'route': '/hooks/<hookGroupId>',
+ 'stability': 'stable',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "removeHook": {
+ 'args': ['hookGroupId', 'hookId'],
+ 'method': 'delete',
+ 'name': 'removeHook',
+ 'route': '/hooks/<hookGroupId>/<hookId>',
+ 'stability': 'stable',
+ },
+ "resetTriggerToken": {
+ 'args': ['hookGroupId', 'hookId'],
+ 'method': 'post',
+ 'name': 'resetTriggerToken',
+ 'output': 'v1/trigger-token-response.json#',
+ 'route': '/hooks/<hookGroupId>/<hookId>/token',
+ 'stability': 'stable',
+ },
+ "triggerHook": {
+ 'args': ['hookGroupId', 'hookId'],
+ 'input': 'v1/trigger-hook.json#',
+ 'method': 'post',
+ 'name': 'triggerHook',
+ 'output': 'v1/task-status.json#',
+ 'route': '/hooks/<hookGroupId>/<hookId>/trigger',
+ 'stability': 'stable',
+ },
+ "triggerHookWithToken": {
+ 'args': ['hookGroupId', 'hookId', 'token'],
+ 'input': 'v1/trigger-hook.json#',
+ 'method': 'post',
+ 'name': 'triggerHookWithToken',
+ 'output': 'v1/task-status.json#',
+ 'route': '/hooks/<hookGroupId>/<hookId>/trigger/<token>',
+ 'stability': 'stable',
+ },
+ "updateHook": {
+ 'args': ['hookGroupId', 'hookId'],
+ 'input': 'v1/create-hook-request.json#',
+ 'method': 'post',
+ 'name': 'updateHook',
+ 'output': 'v1/hook-definition.json#',
+ 'route': '/hooks/<hookGroupId>/<hookId>',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Hooks']
diff --git a/third_party/python/taskcluster/taskcluster/aio/index.py b/third_party/python/taskcluster/taskcluster/aio/index.py
new file mode 100644
index 0000000000..2eb440b132
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/index.py
@@ -0,0 +1,278 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class Index(AsyncBaseClient):
+ """
+ The task index, typically available at `index.taskcluster.net`, is
+ responsible for indexing tasks. The service ensures that tasks can be
+ located by recency and/or arbitrary strings. Common use-cases include:
+
+ * Locate tasks by git or mercurial `<revision>`, or
+ * Locate latest task from given `<branch>`, such as a release.
+
+ **Index hierarchy**, tasks are indexed in a dot (`.`) separated hierarchy
+ called a namespace. For example a task could be indexed with the index path
+ `some-app.<revision>.linux-64.release-build`. In this case the following
+ namespaces is created.
+
+ 1. `some-app`,
+ 1. `some-app.<revision>`, and,
+ 2. `some-app.<revision>.linux-64`
+
+ Inside the namespace `some-app.<revision>` you can find the namespace
+ `some-app.<revision>.linux-64` inside which you can find the indexed task
+ `some-app.<revision>.linux-64.release-build`. This is an example of indexing
+ builds for a given platform and revision.
+
+ **Task Rank**, when a task is indexed, it is assigned a `rank` (defaults
+ to `0`). If another task is already indexed in the same namespace with
+ lower or equal `rank`, the index for that task will be overwritten. For example
+ consider index path `mozilla-central.linux-64.release-build`. In
+ this case one might choose to use a UNIX timestamp or mercurial revision
+ number as `rank`. This way the latest completed linux 64 bit release
+ build is always available at `mozilla-central.linux-64.release-build`.
+
+ Note that this does mean index paths are not immutable: the same path may
+ point to a different task now than it did a moment ago.
+
+ **Indexed Data**, when a task is retrieved from the index the result includes
+ a `taskId` and an additional user-defined JSON blob that was indexed with
+ the task.
+
+ **Entry Expiration**, all indexed entries must have an expiration date.
+ Typically this defaults to one year, if not specified. If you are
+ indexing tasks to make it easy to find artifacts, consider using the
+ artifact's expiration date.
+
+ **Valid Characters**, all keys in a namespace `<key1>.<key2>` must be
+ in the form `/[a-zA-Z0-9_!~*'()%-]+/`. Observe that this is URL-safe and
+ that if you strictly want to put another character you can URL encode it.
+
+ **Indexing Routes**, tasks can be indexed using the API below, but the
+ most common way to index tasks is adding a custom route to `task.routes` of the
+ form `index.<namespace>`. In order to add this route to a task you'll
+ need the scope `queue:route:index.<namespace>`. When a task has
+ this route, it will be indexed when the task is **completed successfully**.
+ The task will be indexed with `rank`, `data` and `expires` as specified
+ in `task.extra.index`. See the example below:
+
+ ```
+ {
+ payload: { /* ... */ },
+ routes: [
+ // index.<namespace> prefixed routes, tasks CC'ed such a route will
+ // be indexed under the given <namespace>
+ "index.mozilla-central.linux-64.release-build",
+ "index.<revision>.linux-64.release-build"
+ ],
+ extra: {
+ // Optional details for indexing service
+ index: {
+ // Ordering, this taskId will overwrite any thing that has
+ // rank <= 4000 (defaults to zero)
+ rank: 4000,
+
+ // Specify when the entries expire (Defaults to 1 year)
+ expires: new Date().toJSON(),
+
+ // A little informal data to store along with taskId
+ // (less 16 kb when encoded as JSON)
+ data: {
+ hgRevision: "...",
+ commitMessae: "...",
+ whatever...
+ }
+ },
+ // Extra properties for other services...
+ }
+ // Other task properties...
+ }
+ ```
+
+ **Remark**, when indexing tasks using custom routes, it's also possible
+ to listen for messages about these tasks. For
+ example one could bind to `route.index.some-app.*.release-build`,
+ and pick up all messages about release builds. Hence, it is a
+ good idea to document task index hierarchies, as these make up extension
+ points in their own.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'index'
+ apiVersion = 'v1'
+
+ async def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ async def findTask(self, *args, **kwargs):
+ """
+ Find Indexed Task
+
+ Find a task by index path, returning the highest-rank task with that path. If no
+ task exists for the given path, this API end-point will respond with a 404 status.
+
+ This method gives output: ``v1/indexed-task-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["findTask"], *args, **kwargs)
+
+ async def listNamespaces(self, *args, **kwargs):
+ """
+ List Namespaces
+
+ List the namespaces immediately under a given namespace.
+
+ This endpoint
+ lists up to 1000 namespaces. If more namespaces are present, a
+ `continuationToken` will be returned, which can be given in the next
+ request. For the initial request, the payload should be an empty JSON
+ object.
+
+ This method gives output: ``v1/list-namespaces-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["listNamespaces"], *args, **kwargs)
+
+ async def listTasks(self, *args, **kwargs):
+ """
+ List Tasks
+
+ List the tasks immediately under a given namespace.
+
+ This endpoint
+ lists up to 1000 tasks. If more tasks are present, a
+ `continuationToken` will be returned, which can be given in the next
+ request. For the initial request, the payload should be an empty JSON
+ object.
+
+ **Remark**, this end-point is designed for humans browsing for tasks, not
+ services, as that makes little sense.
+
+ This method gives output: ``v1/list-tasks-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["listTasks"], *args, **kwargs)
+
+ async def insertTask(self, *args, **kwargs):
+ """
+ Insert Task into Index
+
+ Insert a task into the index. If the new rank is less than the existing rank
+ at the given index path, the task is not indexed but the response is still 200 OK.
+
+ Please see the introduction above for information
+ about indexing successfully completed tasks automatically using custom routes.
+
+ This method takes input: ``v1/insert-task-request.json#``
+
+ This method gives output: ``v1/indexed-task-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["insertTask"], *args, **kwargs)
+
+ async def findArtifactFromTask(self, *args, **kwargs):
+ """
+ Get Artifact From Indexed Task
+
+ Find a task by index path and redirect to the artifact on the most recent
+ run with the given `name`.
+
+ Note that multiple calls to this endpoint may return artifacts from differen tasks
+ if a new task is inserted into the index between calls. Avoid using this method as
+ a stable link to multiple, connected files if the index path does not contain a
+ unique identifier. For example, the following two links may return unrelated files:
+ * https://index.taskcluster.net/task/some-app.win64.latest.installer/artifacts/public/installer.exe`
+ * https://index.taskcluster.net/task/some-app.win64.latest.installer/artifacts/public/debug-symbols.zip`
+
+ This problem be remedied by including the revision in the index path or by bundling both
+ installer and debug symbols into a single artifact.
+
+ If no task exists for the given index path, this API end-point responds with 404.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["findArtifactFromTask"], *args, **kwargs)
+
+ funcinfo = {
+ "findArtifactFromTask": {
+ 'args': ['indexPath', 'name'],
+ 'method': 'get',
+ 'name': 'findArtifactFromTask',
+ 'route': '/task/<indexPath>/artifacts/<name>',
+ 'stability': 'stable',
+ },
+ "findTask": {
+ 'args': ['indexPath'],
+ 'method': 'get',
+ 'name': 'findTask',
+ 'output': 'v1/indexed-task-response.json#',
+ 'route': '/task/<indexPath>',
+ 'stability': 'stable',
+ },
+ "insertTask": {
+ 'args': ['namespace'],
+ 'input': 'v1/insert-task-request.json#',
+ 'method': 'put',
+ 'name': 'insertTask',
+ 'output': 'v1/indexed-task-response.json#',
+ 'route': '/task/<namespace>',
+ 'stability': 'stable',
+ },
+ "listNamespaces": {
+ 'args': ['namespace'],
+ 'method': 'get',
+ 'name': 'listNamespaces',
+ 'output': 'v1/list-namespaces-response.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/namespaces/<namespace>',
+ 'stability': 'stable',
+ },
+ "listTasks": {
+ 'args': ['namespace'],
+ 'method': 'get',
+ 'name': 'listTasks',
+ 'output': 'v1/list-tasks-response.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/tasks/<namespace>',
+ 'stability': 'stable',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Index']
diff --git a/third_party/python/taskcluster/taskcluster/aio/login.py b/third_party/python/taskcluster/taskcluster/aio/login.py
new file mode 100644
index 0000000000..83515ee157
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/login.py
@@ -0,0 +1,89 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class Login(AsyncBaseClient):
+ """
+ The Login service serves as the interface between external authentication
+ systems and Taskcluster credentials.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'login'
+ apiVersion = 'v1'
+
+ async def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ async def oidcCredentials(self, *args, **kwargs):
+ """
+ Get Taskcluster credentials given a suitable `access_token`
+
+ Given an OIDC `access_token` from a trusted OpenID provider, return a
+ set of Taskcluster credentials for use on behalf of the identified
+ user.
+
+ This method is typically not called with a Taskcluster client library
+ and does not accept Hawk credentials. The `access_token` should be
+ given in an `Authorization` header:
+ ```
+ Authorization: Bearer abc.xyz
+ ```
+
+ The `access_token` is first verified against the named
+ :provider, then passed to the provider's APIBuilder to retrieve a user
+ profile. That profile is then used to generate Taskcluster credentials
+ appropriate to the user. Note that the resulting credentials may or may
+ not include a `certificate` property. Callers should be prepared for either
+ alternative.
+
+ The given credentials will expire in a relatively short time. Callers should
+ monitor this expiration and refresh the credentials if necessary, by calling
+ this endpoint again, if they have expired.
+
+ This method gives output: ``v1/oidc-credentials-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["oidcCredentials"], *args, **kwargs)
+
+ funcinfo = {
+ "oidcCredentials": {
+ 'args': ['provider'],
+ 'method': 'get',
+ 'name': 'oidcCredentials',
+ 'output': 'v1/oidc-credentials-response.json#',
+ 'route': '/oidc-credentials/<provider>',
+ 'stability': 'experimental',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Login']
diff --git a/third_party/python/taskcluster/taskcluster/aio/notify.py b/third_party/python/taskcluster/taskcluster/aio/notify.py
new file mode 100644
index 0000000000..d4aa02eefe
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/notify.py
@@ -0,0 +1,125 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class Notify(AsyncBaseClient):
+ """
+ The notification service, typically available at `notify.taskcluster.net`
+ listens for tasks with associated notifications and handles requests to
+ send emails and post pulse messages.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'notify'
+ apiVersion = 'v1'
+
+ async def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ async def email(self, *args, **kwargs):
+ """
+ Send an Email
+
+ Send an email to `address`. The content is markdown and will be rendered
+ to HTML, but both the HTML and raw markdown text will be sent in the
+ email. If a link is included, it will be rendered to a nice button in the
+ HTML version of the email
+
+ This method takes input: ``v1/email-request.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["email"], *args, **kwargs)
+
+ async def pulse(self, *args, **kwargs):
+ """
+ Publish a Pulse Message
+
+ Publish a message on pulse with the given `routingKey`.
+
+ This method takes input: ``v1/pulse-request.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["pulse"], *args, **kwargs)
+
+ async def irc(self, *args, **kwargs):
+ """
+ Post IRC Message
+
+ Post a message on IRC to a specific channel or user, or a specific user
+ on a specific channel.
+
+ Success of this API method does not imply the message was successfully
+ posted. This API method merely inserts the IRC message into a queue
+ that will be processed by a background process.
+ This allows us to re-send the message in face of connection issues.
+
+ However, if the user isn't online the message will be dropped without
+ error. We maybe improve this behavior in the future. For now just keep
+ in mind that IRC is a best-effort service.
+
+ This method takes input: ``v1/irc-request.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["irc"], *args, **kwargs)
+
+ funcinfo = {
+ "email": {
+ 'args': [],
+ 'input': 'v1/email-request.json#',
+ 'method': 'post',
+ 'name': 'email',
+ 'route': '/email',
+ 'stability': 'experimental',
+ },
+ "irc": {
+ 'args': [],
+ 'input': 'v1/irc-request.json#',
+ 'method': 'post',
+ 'name': 'irc',
+ 'route': '/irc',
+ 'stability': 'experimental',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "pulse": {
+ 'args': [],
+ 'input': 'v1/pulse-request.json#',
+ 'method': 'post',
+ 'name': 'pulse',
+ 'route': '/pulse',
+ 'stability': 'experimental',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Notify']
diff --git a/third_party/python/taskcluster/taskcluster/aio/pulse.py b/third_party/python/taskcluster/taskcluster/aio/pulse.py
new file mode 100644
index 0000000000..1ce4966427
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/pulse.py
@@ -0,0 +1,135 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class Pulse(AsyncBaseClient):
+ """
+ The taskcluster-pulse service, typically available at `pulse.taskcluster.net`
+ manages pulse credentials for taskcluster users.
+
+ A service to manage Pulse credentials for anything using
+ Taskcluster credentials. This allows for self-service pulse
+ access and greater control within the Taskcluster project.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'pulse'
+ apiVersion = 'v1'
+
+ async def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ async def listNamespaces(self, *args, **kwargs):
+ """
+ List Namespaces
+
+ List the namespaces managed by this service.
+
+ This will list up to 1000 namespaces. If more namespaces are present a
+ `continuationToken` will be returned, which can be given in the next
+ request. For the initial request, do not provide continuation token.
+
+ This method gives output: ``v1/list-namespaces-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["listNamespaces"], *args, **kwargs)
+
+ async def namespace(self, *args, **kwargs):
+ """
+ Get a namespace
+
+ Get public information about a single namespace. This is the same information
+ as returned by `listNamespaces`.
+
+ This method gives output: ``v1/namespace.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["namespace"], *args, **kwargs)
+
+ async def claimNamespace(self, *args, **kwargs):
+ """
+ Claim a namespace
+
+ Claim a namespace, returning a connection string with access to that namespace
+ good for use until the `reclaimAt` time in the response body. The connection
+ string can be used as many times as desired during this period, but must not
+ be used after `reclaimAt`.
+
+ Connections made with this connection string may persist beyond `reclaimAt`,
+ although it should not persist forever. 24 hours is a good maximum, and this
+ service will terminate connections after 72 hours (although this value is
+ configurable).
+
+ The specified `expires` time updates any existing expiration times. Connections
+ for expired namespaces will be terminated.
+
+ This method takes input: ``v1/namespace-request.json#``
+
+ This method gives output: ``v1/namespace-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["claimNamespace"], *args, **kwargs)
+
+ funcinfo = {
+ "claimNamespace": {
+ 'args': ['namespace'],
+ 'input': 'v1/namespace-request.json#',
+ 'method': 'post',
+ 'name': 'claimNamespace',
+ 'output': 'v1/namespace-response.json#',
+ 'route': '/namespace/<namespace>',
+ 'stability': 'experimental',
+ },
+ "listNamespaces": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'listNamespaces',
+ 'output': 'v1/list-namespaces-response.json#',
+ 'query': ['limit', 'continuationToken'],
+ 'route': '/namespaces',
+ 'stability': 'experimental',
+ },
+ "namespace": {
+ 'args': ['namespace'],
+ 'method': 'get',
+ 'name': 'namespace',
+ 'output': 'v1/namespace.json#',
+ 'route': '/namespace/<namespace>',
+ 'stability': 'experimental',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Pulse']
diff --git a/third_party/python/taskcluster/taskcluster/aio/purgecache.py b/third_party/python/taskcluster/taskcluster/aio/purgecache.py
new file mode 100644
index 0000000000..b36273d306
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/purgecache.py
@@ -0,0 +1,124 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class PurgeCache(AsyncBaseClient):
+ """
+ The purge-cache service is responsible for publishing a pulse
+ message for workers, so they can purge cache upon request.
+
+ This document describes the API end-point for publishing the pulse
+ message. This is mainly intended to be used by tools.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'purge-cache'
+ apiVersion = 'v1'
+
+ async def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ async def purgeCache(self, *args, **kwargs):
+ """
+ Purge Worker Cache
+
+ Publish a purge-cache message to purge caches named `cacheName` with
+ `provisionerId` and `workerType` in the routing-key. Workers should
+ be listening for this message and purge caches when they see it.
+
+ This method takes input: ``v1/purge-cache-request.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["purgeCache"], *args, **kwargs)
+
+ async def allPurgeRequests(self, *args, **kwargs):
+ """
+ All Open Purge Requests
+
+ This is useful mostly for administors to view
+ the set of open purge requests. It should not
+ be used by workers. They should use the purgeRequests
+ endpoint that is specific to their workerType and
+ provisionerId.
+
+ This method gives output: ``v1/all-purge-cache-request-list.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["allPurgeRequests"], *args, **kwargs)
+
+ async def purgeRequests(self, *args, **kwargs):
+ """
+ Open Purge Requests for a provisionerId/workerType pair
+
+ List of caches that need to be purged if they are from before
+ a certain time. This is safe to be used in automation from
+ workers.
+
+ This method gives output: ``v1/purge-cache-request-list.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["purgeRequests"], *args, **kwargs)
+
+ funcinfo = {
+ "allPurgeRequests": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'allPurgeRequests',
+ 'output': 'v1/all-purge-cache-request-list.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/purge-cache/list',
+ 'stability': 'stable',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "purgeCache": {
+ 'args': ['provisionerId', 'workerType'],
+ 'input': 'v1/purge-cache-request.json#',
+ 'method': 'post',
+ 'name': 'purgeCache',
+ 'route': '/purge-cache/<provisionerId>/<workerType>',
+ 'stability': 'stable',
+ },
+ "purgeRequests": {
+ 'args': ['provisionerId', 'workerType'],
+ 'method': 'get',
+ 'name': 'purgeRequests',
+ 'output': 'v1/purge-cache-request-list.json#',
+ 'query': ['since'],
+ 'route': '/purge-cache/<provisionerId>/<workerType>',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'PurgeCache']
diff --git a/third_party/python/taskcluster/taskcluster/aio/purgecacheevents.py b/third_party/python/taskcluster/taskcluster/aio/purgecacheevents.py
new file mode 100644
index 0000000000..69b8a30551
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/purgecacheevents.py
@@ -0,0 +1,73 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class PurgeCacheEvents(AsyncBaseClient):
+ """
+ The purge-cache service, typically available at
+ `purge-cache.taskcluster.net`, is responsible for publishing a pulse
+ message for workers, so they can purge cache upon request.
+
+ This document describes the exchange offered for workers by the
+ cache-purge service.
+ """
+
+ classOptions = {
+ "exchangePrefix": "exchange/taskcluster-purge-cache/v1/",
+ }
+ serviceName = 'purge-cache'
+ apiVersion = 'v1'
+
+ def purgeCache(self, *args, **kwargs):
+ """
+ Purge Cache Messages
+
+ When a cache purge is requested a message will be posted on this
+ exchange with designated `provisionerId` and `workerType` in the
+ routing-key and the name of the `cacheFolder` as payload
+
+ This exchange outputs: ``v1/purge-cache-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * provisionerId: `provisionerId` under which to purge cache. (required)
+
+ * workerType: `workerType` for which to purge cache. (required)
+ """
+
+ ref = {
+ 'exchange': 'purge-cache',
+ 'name': 'purgeCache',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'provisionerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ ],
+ 'schema': 'v1/purge-cache-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ funcinfo = {
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'PurgeCacheEvents']
diff --git a/third_party/python/taskcluster/taskcluster/aio/queue.py b/third_party/python/taskcluster/taskcluster/aio/queue.py
new file mode 100644
index 0000000000..58fd4d9e71
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/queue.py
@@ -0,0 +1,1134 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class Queue(AsyncBaseClient):
+ """
+ The queue, typically available at `queue.taskcluster.net`, is responsible
+ for accepting tasks and track their state as they are executed by
+ workers. In order ensure they are eventually resolved.
+
+ This document describes the API end-points offered by the queue. These
+ end-points targets the following audience:
+ * Schedulers, who create tasks to be executed,
+ * Workers, who execute tasks, and
+ * Tools, that wants to inspect the state of a task.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'queue'
+ apiVersion = 'v1'
+
+ async def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ async def task(self, *args, **kwargs):
+ """
+ Get Task Definition
+
+ This end-point will return the task-definition. Notice that the task
+ definition may have been modified by queue, if an optional property is
+ not specified the queue may provide a default value.
+
+ This method gives output: ``v1/task.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["task"], *args, **kwargs)
+
+ async def status(self, *args, **kwargs):
+ """
+ Get task status
+
+ Get task status structure from `taskId`
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["status"], *args, **kwargs)
+
+ async def listTaskGroup(self, *args, **kwargs):
+ """
+ List Task Group
+
+ List tasks sharing the same `taskGroupId`.
+
+ As a task-group may contain an unbounded number of tasks, this end-point
+ may return a `continuationToken`. To continue listing tasks you must call
+ the `listTaskGroup` again with the `continuationToken` as the
+ query-string option `continuationToken`.
+
+ By default this end-point will try to return up to 1000 members in one
+ request. But it **may return less**, even if more tasks are available.
+ It may also return a `continuationToken` even though there are no more
+ results. However, you can only be sure to have seen all results if you
+ keep calling `listTaskGroup` with the last `continuationToken` until you
+ get a result without a `continuationToken`.
+
+ If you are not interested in listing all the members at once, you may
+ use the query-string option `limit` to return fewer.
+
+ This method gives output: ``v1/list-task-group-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["listTaskGroup"], *args, **kwargs)
+
+ async def listDependentTasks(self, *args, **kwargs):
+ """
+ List Dependent Tasks
+
+ List tasks that depend on the given `taskId`.
+
+ As many tasks from different task-groups may dependent on a single tasks,
+ this end-point may return a `continuationToken`. To continue listing
+ tasks you must call `listDependentTasks` again with the
+ `continuationToken` as the query-string option `continuationToken`.
+
+ By default this end-point will try to return up to 1000 tasks in one
+ request. But it **may return less**, even if more tasks are available.
+ It may also return a `continuationToken` even though there are no more
+ results. However, you can only be sure to have seen all results if you
+ keep calling `listDependentTasks` with the last `continuationToken` until
+ you get a result without a `continuationToken`.
+
+ If you are not interested in listing all the tasks at once, you may
+ use the query-string option `limit` to return fewer.
+
+ This method gives output: ``v1/list-dependent-tasks-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["listDependentTasks"], *args, **kwargs)
+
+ async def createTask(self, *args, **kwargs):
+ """
+ Create New Task
+
+ Create a new task, this is an **idempotent** operation, so repeat it if
+ you get an internal server error or network connection is dropped.
+
+ **Task `deadline`**: the deadline property can be no more than 5 days
+ into the future. This is to limit the amount of pending tasks not being
+ taken care of. Ideally, you should use a much shorter deadline.
+
+ **Task expiration**: the `expires` property must be greater than the
+ task `deadline`. If not provided it will default to `deadline` + one
+ year. Notice, that artifacts created by task must expire before the task.
+
+ **Task specific routing-keys**: using the `task.routes` property you may
+ define task specific routing-keys. If a task has a task specific
+ routing-key: `<route>`, then when the AMQP message about the task is
+ published, the message will be CC'ed with the routing-key:
+ `route.<route>`. This is useful if you want another component to listen
+ for completed tasks you have posted. The caller must have scope
+ `queue:route:<route>` for each route.
+
+ **Dependencies**: any tasks referenced in `task.dependencies` must have
+ already been created at the time of this call.
+
+ **Scopes**: Note that the scopes required to complete this API call depend
+ on the content of the `scopes`, `routes`, `schedulerId`, `priority`,
+ `provisionerId`, and `workerType` properties of the task definition.
+
+ **Legacy Scopes**: The `queue:create-task:..` scope without a priority and
+ the `queue:define-task:..` and `queue:task-group-id:..` scopes are considered
+ legacy and should not be used. Note that the new, non-legacy scopes require
+ a `queue:scheduler-id:..` scope as well as scopes for the proper priority.
+
+ This method takes input: ``v1/create-task-request.json#``
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["createTask"], *args, **kwargs)
+
+ async def defineTask(self, *args, **kwargs):
+ """
+ Define Task
+
+ **Deprecated**, this is the same as `createTask` with a **self-dependency**.
+ This is only present for legacy.
+
+ This method takes input: ``v1/create-task-request.json#``
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``deprecated``
+ """
+
+ return await self._makeApiCall(self.funcinfo["defineTask"], *args, **kwargs)
+
+ async def scheduleTask(self, *args, **kwargs):
+ """
+ Schedule Defined Task
+
+ scheduleTask will schedule a task to be executed, even if it has
+ unresolved dependencies. A task would otherwise only be scheduled if
+ its dependencies were resolved.
+
+ This is useful if you have defined a task that depends on itself or on
+ some other task that has not been resolved, but you wish the task to be
+ scheduled immediately.
+
+ This will announce the task as pending and workers will be allowed to
+ claim it and resolve the task.
+
+ **Note** this operation is **idempotent** and will not fail or complain
+ if called with a `taskId` that is already scheduled, or even resolved.
+ To reschedule a task previously resolved, use `rerunTask`.
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["scheduleTask"], *args, **kwargs)
+
+ async def rerunTask(self, *args, **kwargs):
+ """
+ Rerun a Resolved Task
+
+ This method _reruns_ a previously resolved task, even if it was
+ _completed_. This is useful if your task completes unsuccessfully, and
+ you just want to run it from scratch again. This will also reset the
+ number of `retries` allowed.
+
+ Remember that `retries` in the task status counts the number of runs that
+ the queue have started because the worker stopped responding, for example
+ because a spot node died.
+
+ **Remark** this operation is idempotent, if you try to rerun a task that
+ is not either `failed` or `completed`, this operation will just return
+ the current task status.
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``deprecated``
+ """
+
+ return await self._makeApiCall(self.funcinfo["rerunTask"], *args, **kwargs)
+
+ async def cancelTask(self, *args, **kwargs):
+ """
+ Cancel Task
+
+ This method will cancel a task that is either `unscheduled`, `pending` or
+ `running`. It will resolve the current run as `exception` with
+ `reasonResolved` set to `canceled`. If the task isn't scheduled yet, ie.
+ it doesn't have any runs, an initial run will be added and resolved as
+ described above. Hence, after canceling a task, it cannot be scheduled
+ with `queue.scheduleTask`, but a new run can be created with
+ `queue.rerun`. These semantics is equivalent to calling
+ `queue.scheduleTask` immediately followed by `queue.cancelTask`.
+
+ **Remark** this operation is idempotent, if you try to cancel a task that
+ isn't `unscheduled`, `pending` or `running`, this operation will just
+ return the current task status.
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["cancelTask"], *args, **kwargs)
+
+ async def claimWork(self, *args, **kwargs):
+ """
+ Claim Work
+
+ Claim pending task(s) for the given `provisionerId`/`workerType` queue.
+
+ If any work is available (even if fewer than the requested number of
+ tasks, this will return immediately. Otherwise, it will block for tens of
+ seconds waiting for work. If no work appears, it will return an emtpy
+ list of tasks. Callers should sleep a short while (to avoid denial of
+ service in an error condition) and call the endpoint again. This is a
+ simple implementation of "long polling".
+
+ This method takes input: ``v1/claim-work-request.json#``
+
+ This method gives output: ``v1/claim-work-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["claimWork"], *args, **kwargs)
+
+ async def claimTask(self, *args, **kwargs):
+ """
+ Claim Task
+
+ claim a task - never documented
+
+ This method takes input: ``v1/task-claim-request.json#``
+
+ This method gives output: ``v1/task-claim-response.json#``
+
+ This method is ``deprecated``
+ """
+
+ return await self._makeApiCall(self.funcinfo["claimTask"], *args, **kwargs)
+
+ async def reclaimTask(self, *args, **kwargs):
+ """
+ Reclaim task
+
+ Refresh the claim for a specific `runId` for given `taskId`. This updates
+ the `takenUntil` property and returns a new set of temporary credentials
+ for performing requests on behalf of the task. These credentials should
+ be used in-place of the credentials returned by `claimWork`.
+
+ The `reclaimTask` requests serves to:
+ * Postpone `takenUntil` preventing the queue from resolving
+ `claim-expired`,
+ * Refresh temporary credentials used for processing the task, and
+ * Abort execution if the task/run have been resolved.
+
+ If the `takenUntil` timestamp is exceeded the queue will resolve the run
+ as _exception_ with reason `claim-expired`, and proceeded to retry to the
+ task. This ensures that tasks are retried, even if workers disappear
+ without warning.
+
+ If the task is resolved, this end-point will return `409` reporting
+ `RequestConflict`. This typically happens if the task have been canceled
+ or the `task.deadline` have been exceeded. If reclaiming fails, workers
+ should abort the task and forget about the given `runId`. There is no
+ need to resolve the run or upload artifacts.
+
+ This method gives output: ``v1/task-reclaim-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["reclaimTask"], *args, **kwargs)
+
+ async def reportCompleted(self, *args, **kwargs):
+ """
+ Report Run Completed
+
+ Report a task completed, resolving the run as `completed`.
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["reportCompleted"], *args, **kwargs)
+
+ async def reportFailed(self, *args, **kwargs):
+ """
+ Report Run Failed
+
+ Report a run failed, resolving the run as `failed`. Use this to resolve
+ a run that failed because the task specific code behaved unexpectedly.
+ For example the task exited non-zero, or didn't produce expected output.
+
+ Do not use this if the task couldn't be run because if malformed
+ payload, or other unexpected condition. In these cases we have a task
+ exception, which should be reported with `reportException`.
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["reportFailed"], *args, **kwargs)
+
+ async def reportException(self, *args, **kwargs):
+ """
+ Report Task Exception
+
+ Resolve a run as _exception_. Generally, you will want to report tasks as
+ failed instead of exception. You should `reportException` if,
+
+ * The `task.payload` is invalid,
+ * Non-existent resources are referenced,
+ * Declared actions cannot be executed due to unavailable resources,
+ * The worker had to shutdown prematurely,
+ * The worker experienced an unknown error, or,
+ * The task explicitly requested a retry.
+
+ Do not use this to signal that some user-specified code crashed for any
+ reason specific to this code. If user-specific code hits a resource that
+ is temporarily unavailable worker should report task _failed_.
+
+ This method takes input: ``v1/task-exception-request.json#``
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["reportException"], *args, **kwargs)
+
+ async def createArtifact(self, *args, **kwargs):
+ """
+ Create Artifact
+
+ This API end-point creates an artifact for a specific run of a task. This
+ should **only** be used by a worker currently operating on this task, or
+ from a process running within the task (ie. on the worker).
+
+ All artifacts must specify when they `expires`, the queue will
+ automatically take care of deleting artifacts past their
+ expiration point. This features makes it feasible to upload large
+ intermediate artifacts from data processing applications, as the
+ artifacts can be set to expire a few days later.
+
+ We currently support 3 different `storageType`s, each storage type have
+ slightly different features and in some cases difference semantics.
+ We also have 2 deprecated `storageType`s which are only maintained for
+ backwards compatiability and should not be used in new implementations
+
+ **Blob artifacts**, are useful for storing large files. Currently, these
+ are all stored in S3 but there are facilities for adding support for other
+ backends in futre. A call for this type of artifact must provide information
+ about the file which will be uploaded. This includes sha256 sums and sizes.
+ This method will return a list of general form HTTP requests which are signed
+ by AWS S3 credentials managed by the Queue. Once these requests are completed
+ the list of `ETag` values returned by the requests must be passed to the
+ queue `completeArtifact` method
+
+ **S3 artifacts**, DEPRECATED is useful for static files which will be
+ stored on S3. When creating an S3 artifact the queue will return a
+ pre-signed URL to which you can do a `PUT` request to upload your
+ artifact. Note that `PUT` request **must** specify the `content-length`
+ header and **must** give the `content-type` header the same value as in
+ the request to `createArtifact`.
+
+ **Azure artifacts**, DEPRECATED are stored in _Azure Blob Storage_ service
+ which given the consistency guarantees and API interface offered by Azure
+ is more suitable for artifacts that will be modified during the execution
+ of the task. For example docker-worker has a feature that persists the
+ task log to Azure Blob Storage every few seconds creating a somewhat
+ live log. A request to create an Azure artifact will return a URL
+ featuring a [Shared-Access-Signature](http://msdn.microsoft.com/en-us/library/azure/dn140256.aspx),
+ refer to MSDN for further information on how to use these.
+ **Warning: azure artifact is currently an experimental feature subject
+ to changes and data-drops.**
+
+ **Reference artifacts**, only consists of meta-data which the queue will
+ store for you. These artifacts really only have a `url` property and
+ when the artifact is requested the client will be redirect the URL
+ provided with a `303` (See Other) redirect. Please note that we cannot
+ delete artifacts you upload to other service, we can only delete the
+ reference to the artifact, when it expires.
+
+ **Error artifacts**, only consists of meta-data which the queue will
+ store for you. These artifacts are only meant to indicate that you the
+ worker or the task failed to generate a specific artifact, that you
+ would otherwise have uploaded. For example docker-worker will upload an
+ error artifact, if the file it was supposed to upload doesn't exists or
+ turns out to be a directory. Clients requesting an error artifact will
+ get a `424` (Failed Dependency) response. This is mainly designed to
+ ensure that dependent tasks can distinguish between artifacts that were
+ suppose to be generated and artifacts for which the name is misspelled.
+
+ **Artifact immutability**, generally speaking you cannot overwrite an
+ artifact when created. But if you repeat the request with the same
+ properties the request will succeed as the operation is idempotent.
+ This is useful if you need to refresh a signed URL while uploading.
+ Do not abuse this to overwrite artifacts created by another entity!
+ Such as worker-host overwriting artifact created by worker-code.
+
+ As a special case the `url` property on _reference artifacts_ can be
+ updated. You should only use this to update the `url` property for
+ reference artifacts your process has created.
+
+ This method takes input: ``v1/post-artifact-request.json#``
+
+ This method gives output: ``v1/post-artifact-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["createArtifact"], *args, **kwargs)
+
+ async def completeArtifact(self, *args, **kwargs):
+ """
+ Complete Artifact
+
+ This endpoint finalises an upload done through the blob `storageType`.
+ The queue will ensure that the task/run is still allowing artifacts
+ to be uploaded. For single-part S3 blob artifacts, this endpoint
+ will simply ensure the artifact is present in S3. For multipart S3
+ artifacts, the endpoint will perform the commit step of the multipart
+ upload flow. As the final step for both multi and single part artifacts,
+ the `present` entity field will be set to `true` to reflect that the
+ artifact is now present and a message published to pulse. NOTE: This
+ endpoint *must* be called for all artifacts of storageType 'blob'
+
+ This method takes input: ``v1/put-artifact-request.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["completeArtifact"], *args, **kwargs)
+
+ async def getArtifact(self, *args, **kwargs):
+ """
+ Get Artifact from Run
+
+ Get artifact by `<name>` from a specific run.
+
+ **Public Artifacts**, in-order to get an artifact you need the scope
+ `queue:get-artifact:<name>`, where `<name>` is the name of the artifact.
+ But if the artifact `name` starts with `public/`, authentication and
+ authorization is not necessary to fetch the artifact.
+
+ **API Clients**, this method will redirect you to the artifact, if it is
+ stored externally. Either way, the response may not be JSON. So API
+ client users might want to generate a signed URL for this end-point and
+ use that URL with an HTTP client that can handle responses correctly.
+
+ **Downloading artifacts**
+ There are some special considerations for those http clients which download
+ artifacts. This api endpoint is designed to be compatible with an HTTP 1.1
+ compliant client, but has extra features to ensure the download is valid.
+ It is strongly recommend that consumers use either taskcluster-lib-artifact (JS),
+ taskcluster-lib-artifact-go (Go) or the CLI written in Go to interact with
+ artifacts.
+
+ In order to download an artifact the following must be done:
+
+ 1. Obtain queue url. Building a signed url with a taskcluster client is
+ recommended
+ 1. Make a GET request which does not follow redirects
+ 1. In all cases, if specified, the
+ x-taskcluster-location-{content,transfer}-{sha256,length} values must be
+ validated to be equal to the Content-Length and Sha256 checksum of the
+ final artifact downloaded. as well as any intermediate redirects
+ 1. If this response is a 500-series error, retry using an exponential
+ backoff. No more than 5 retries should be attempted
+ 1. If this response is a 400-series error, treat it appropriately for
+ your context. This might be an error in responding to this request or
+ an Error storage type body. This request should not be retried.
+ 1. If this response is a 200-series response, the response body is the artifact.
+ If the x-taskcluster-location-{content,transfer}-{sha256,length} and
+ x-taskcluster-location-content-encoding are specified, they should match
+ this response body
+ 1. If the response type is a 300-series redirect, the artifact will be at the
+ location specified by the `Location` header. There are multiple artifact storage
+ types which use a 300-series redirect.
+ 1. For all redirects followed, the user must verify that the content-sha256, content-length,
+ transfer-sha256, transfer-length and content-encoding match every further request. The final
+ artifact must also be validated against the values specified in the original queue response
+ 1. Caching of requests with an x-taskcluster-artifact-storage-type value of `reference`
+ must not occur
+ 1. A request which has x-taskcluster-artifact-storage-type value of `blob` and does not
+ have x-taskcluster-location-content-sha256 or x-taskcluster-location-content-length
+ must be treated as an error
+
+ **Headers**
+ The following important headers are set on the response to this method:
+
+ * location: the url of the artifact if a redirect is to be performed
+ * x-taskcluster-artifact-storage-type: the storage type. Example: blob, s3, error
+
+ The following important headers are set on responses to this method for Blob artifacts
+
+ * x-taskcluster-location-content-sha256: the SHA256 of the artifact
+ *after* any content-encoding is undone. Sha256 is hex encoded (e.g. [0-9A-Fa-f]{64})
+ * x-taskcluster-location-content-length: the number of bytes *after* any content-encoding
+ is undone
+ * x-taskcluster-location-transfer-sha256: the SHA256 of the artifact
+ *before* any content-encoding is undone. This is the SHA256 of what is sent over
+ the wire. Sha256 is hex encoded (e.g. [0-9A-Fa-f]{64})
+ * x-taskcluster-location-transfer-length: the number of bytes *after* any content-encoding
+ is undone
+ * x-taskcluster-location-content-encoding: the content-encoding used. It will either
+ be `gzip` or `identity` right now. This is hardcoded to a value set when the artifact
+ was created and no content-negotiation occurs
+ * x-taskcluster-location-content-type: the content-type of the artifact
+
+ **Caching**, artifacts may be cached in data centers closer to the
+ workers in-order to reduce bandwidth costs. This can lead to longer
+ response times. Caching can be skipped by setting the header
+ `x-taskcluster-skip-cache: true`, this should only be used for resources
+ where request volume is known to be low, and caching not useful.
+ (This feature may be disabled in the future, use is sparingly!)
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["getArtifact"], *args, **kwargs)
+
+ async def getLatestArtifact(self, *args, **kwargs):
+ """
+ Get Artifact from Latest Run
+
+ Get artifact by `<name>` from the last run of a task.
+
+ **Public Artifacts**, in-order to get an artifact you need the scope
+ `queue:get-artifact:<name>`, where `<name>` is the name of the artifact.
+ But if the artifact `name` starts with `public/`, authentication and
+ authorization is not necessary to fetch the artifact.
+
+ **API Clients**, this method will redirect you to the artifact, if it is
+ stored externally. Either way, the response may not be JSON. So API
+ client users might want to generate a signed URL for this end-point and
+ use that URL with a normal HTTP client.
+
+ **Remark**, this end-point is slightly slower than
+ `queue.getArtifact`, so consider that if you already know the `runId` of
+ the latest run. Otherwise, just us the most convenient API end-point.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["getLatestArtifact"], *args, **kwargs)
+
+ async def listArtifacts(self, *args, **kwargs):
+ """
+ Get Artifacts from Run
+
+ Returns a list of artifacts and associated meta-data for a given run.
+
+ As a task may have many artifacts paging may be necessary. If this
+ end-point returns a `continuationToken`, you should call the end-point
+ again with the `continuationToken` as the query-string option:
+ `continuationToken`.
+
+ By default this end-point will list up-to 1000 artifacts in a single page
+ you may limit this with the query-string parameter `limit`.
+
+ This method gives output: ``v1/list-artifacts-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["listArtifacts"], *args, **kwargs)
+
+ async def listLatestArtifacts(self, *args, **kwargs):
+ """
+ Get Artifacts from Latest Run
+
+ Returns a list of artifacts and associated meta-data for the latest run
+ from the given task.
+
+ As a task may have many artifacts paging may be necessary. If this
+ end-point returns a `continuationToken`, you should call the end-point
+ again with the `continuationToken` as the query-string option:
+ `continuationToken`.
+
+ By default this end-point will list up-to 1000 artifacts in a single page
+ you may limit this with the query-string parameter `limit`.
+
+ This method gives output: ``v1/list-artifacts-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["listLatestArtifacts"], *args, **kwargs)
+
+ async def listProvisioners(self, *args, **kwargs):
+ """
+ Get a list of all active provisioners
+
+ Get all active provisioners.
+
+ The term "provisioner" is taken broadly to mean anything with a provisionerId.
+ This does not necessarily mean there is an associated service performing any
+ provisioning activity.
+
+ The response is paged. If this end-point returns a `continuationToken`, you
+ should call the end-point again with the `continuationToken` as a query-string
+ option. By default this end-point will list up to 1000 provisioners in a single
+ page. You may limit this with the query-string parameter `limit`.
+
+ This method gives output: ``v1/list-provisioners-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["listProvisioners"], *args, **kwargs)
+
+ async def getProvisioner(self, *args, **kwargs):
+ """
+ Get an active provisioner
+
+ Get an active provisioner.
+
+ The term "provisioner" is taken broadly to mean anything with a provisionerId.
+ This does not necessarily mean there is an associated service performing any
+ provisioning activity.
+
+ This method gives output: ``v1/provisioner-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["getProvisioner"], *args, **kwargs)
+
+ async def declareProvisioner(self, *args, **kwargs):
+ """
+ Update a provisioner
+
+ Declare a provisioner, supplying some details about it.
+
+ `declareProvisioner` allows updating one or more properties of a provisioner as long as the required scopes are
+ possessed. For example, a request to update the `aws-provisioner-v1`
+ provisioner with a body `{description: 'This provisioner is great'}` would require you to have the scope
+ `queue:declare-provisioner:aws-provisioner-v1#description`.
+
+ The term "provisioner" is taken broadly to mean anything with a provisionerId.
+ This does not necessarily mean there is an associated service performing any
+ provisioning activity.
+
+ This method takes input: ``v1/update-provisioner-request.json#``
+
+ This method gives output: ``v1/provisioner-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["declareProvisioner"], *args, **kwargs)
+
+ async def pendingTasks(self, *args, **kwargs):
+ """
+ Get Number of Pending Tasks
+
+ Get an approximate number of pending tasks for the given `provisionerId`
+ and `workerType`.
+
+ The underlying Azure Storage Queues only promises to give us an estimate.
+ Furthermore, we cache the result in memory for 20 seconds. So consumers
+ should be no means expect this to be an accurate number.
+ It is, however, a solid estimate of the number of pending tasks.
+
+ This method gives output: ``v1/pending-tasks-response.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["pendingTasks"], *args, **kwargs)
+
+ async def listWorkerTypes(self, *args, **kwargs):
+ """
+ Get a list of all active worker-types
+
+ Get all active worker-types for the given provisioner.
+
+ The response is paged. If this end-point returns a `continuationToken`, you
+ should call the end-point again with the `continuationToken` as a query-string
+ option. By default this end-point will list up to 1000 worker-types in a single
+ page. You may limit this with the query-string parameter `limit`.
+
+ This method gives output: ``v1/list-workertypes-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["listWorkerTypes"], *args, **kwargs)
+
+ async def getWorkerType(self, *args, **kwargs):
+ """
+ Get a worker-type
+
+ Get a worker-type from a provisioner.
+
+ This method gives output: ``v1/workertype-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["getWorkerType"], *args, **kwargs)
+
+ async def declareWorkerType(self, *args, **kwargs):
+ """
+ Update a worker-type
+
+ Declare a workerType, supplying some details about it.
+
+ `declareWorkerType` allows updating one or more properties of a worker-type as long as the required scopes are
+ possessed. For example, a request to update the `gecko-b-1-w2008` worker-type within the `aws-provisioner-v1`
+ provisioner with a body `{description: 'This worker type is great'}` would require you to have the scope
+ `queue:declare-worker-type:aws-provisioner-v1/gecko-b-1-w2008#description`.
+
+ This method takes input: ``v1/update-workertype-request.json#``
+
+ This method gives output: ``v1/workertype-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["declareWorkerType"], *args, **kwargs)
+
+ async def listWorkers(self, *args, **kwargs):
+ """
+ Get a list of all active workers of a workerType
+
+ Get a list of all active workers of a workerType.
+
+ `listWorkers` allows a response to be filtered by quarantined and non quarantined workers.
+ To filter the query, you should call the end-point with `quarantined` as a query-string option with a
+ true or false value.
+
+ The response is paged. If this end-point returns a `continuationToken`, you
+ should call the end-point again with the `continuationToken` as a query-string
+ option. By default this end-point will list up to 1000 workers in a single
+ page. You may limit this with the query-string parameter `limit`.
+
+ This method gives output: ``v1/list-workers-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["listWorkers"], *args, **kwargs)
+
+ async def getWorker(self, *args, **kwargs):
+ """
+ Get a worker-type
+
+ Get a worker from a worker-type.
+
+ This method gives output: ``v1/worker-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["getWorker"], *args, **kwargs)
+
+ async def quarantineWorker(self, *args, **kwargs):
+ """
+ Quarantine a worker
+
+ Quarantine a worker
+
+ This method takes input: ``v1/quarantine-worker-request.json#``
+
+ This method gives output: ``v1/worker-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["quarantineWorker"], *args, **kwargs)
+
+ async def declareWorker(self, *args, **kwargs):
+ """
+ Declare a worker
+
+ Declare a worker, supplying some details about it.
+
+ `declareWorker` allows updating one or more properties of a worker as long as the required scopes are
+ possessed.
+
+ This method takes input: ``v1/update-worker-request.json#``
+
+ This method gives output: ``v1/worker-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return await self._makeApiCall(self.funcinfo["declareWorker"], *args, **kwargs)
+
+ funcinfo = {
+ "cancelTask": {
+ 'args': ['taskId'],
+ 'method': 'post',
+ 'name': 'cancelTask',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>/cancel',
+ 'stability': 'stable',
+ },
+ "claimTask": {
+ 'args': ['taskId', 'runId'],
+ 'input': 'v1/task-claim-request.json#',
+ 'method': 'post',
+ 'name': 'claimTask',
+ 'output': 'v1/task-claim-response.json#',
+ 'route': '/task/<taskId>/runs/<runId>/claim',
+ 'stability': 'deprecated',
+ },
+ "claimWork": {
+ 'args': ['provisionerId', 'workerType'],
+ 'input': 'v1/claim-work-request.json#',
+ 'method': 'post',
+ 'name': 'claimWork',
+ 'output': 'v1/claim-work-response.json#',
+ 'route': '/claim-work/<provisionerId>/<workerType>',
+ 'stability': 'stable',
+ },
+ "completeArtifact": {
+ 'args': ['taskId', 'runId', 'name'],
+ 'input': 'v1/put-artifact-request.json#',
+ 'method': 'put',
+ 'name': 'completeArtifact',
+ 'route': '/task/<taskId>/runs/<runId>/artifacts/<name>',
+ 'stability': 'experimental',
+ },
+ "createArtifact": {
+ 'args': ['taskId', 'runId', 'name'],
+ 'input': 'v1/post-artifact-request.json#',
+ 'method': 'post',
+ 'name': 'createArtifact',
+ 'output': 'v1/post-artifact-response.json#',
+ 'route': '/task/<taskId>/runs/<runId>/artifacts/<name>',
+ 'stability': 'stable',
+ },
+ "createTask": {
+ 'args': ['taskId'],
+ 'input': 'v1/create-task-request.json#',
+ 'method': 'put',
+ 'name': 'createTask',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>',
+ 'stability': 'stable',
+ },
+ "declareProvisioner": {
+ 'args': ['provisionerId'],
+ 'input': 'v1/update-provisioner-request.json#',
+ 'method': 'put',
+ 'name': 'declareProvisioner',
+ 'output': 'v1/provisioner-response.json#',
+ 'route': '/provisioners/<provisionerId>',
+ 'stability': 'experimental',
+ },
+ "declareWorker": {
+ 'args': ['provisionerId', 'workerType', 'workerGroup', 'workerId'],
+ 'input': 'v1/update-worker-request.json#',
+ 'method': 'put',
+ 'name': 'declareWorker',
+ 'output': 'v1/worker-response.json#',
+ 'route': '/provisioners/<provisionerId>/worker-types/<workerType>/<workerGroup>/<workerId>',
+ 'stability': 'experimental',
+ },
+ "declareWorkerType": {
+ 'args': ['provisionerId', 'workerType'],
+ 'input': 'v1/update-workertype-request.json#',
+ 'method': 'put',
+ 'name': 'declareWorkerType',
+ 'output': 'v1/workertype-response.json#',
+ 'route': '/provisioners/<provisionerId>/worker-types/<workerType>',
+ 'stability': 'experimental',
+ },
+ "defineTask": {
+ 'args': ['taskId'],
+ 'input': 'v1/create-task-request.json#',
+ 'method': 'post',
+ 'name': 'defineTask',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>/define',
+ 'stability': 'deprecated',
+ },
+ "getArtifact": {
+ 'args': ['taskId', 'runId', 'name'],
+ 'method': 'get',
+ 'name': 'getArtifact',
+ 'route': '/task/<taskId>/runs/<runId>/artifacts/<name>',
+ 'stability': 'stable',
+ },
+ "getLatestArtifact": {
+ 'args': ['taskId', 'name'],
+ 'method': 'get',
+ 'name': 'getLatestArtifact',
+ 'route': '/task/<taskId>/artifacts/<name>',
+ 'stability': 'stable',
+ },
+ "getProvisioner": {
+ 'args': ['provisionerId'],
+ 'method': 'get',
+ 'name': 'getProvisioner',
+ 'output': 'v1/provisioner-response.json#',
+ 'route': '/provisioners/<provisionerId>',
+ 'stability': 'experimental',
+ },
+ "getWorker": {
+ 'args': ['provisionerId', 'workerType', 'workerGroup', 'workerId'],
+ 'method': 'get',
+ 'name': 'getWorker',
+ 'output': 'v1/worker-response.json#',
+ 'route': '/provisioners/<provisionerId>/worker-types/<workerType>/workers/<workerGroup>/<workerId>',
+ 'stability': 'experimental',
+ },
+ "getWorkerType": {
+ 'args': ['provisionerId', 'workerType'],
+ 'method': 'get',
+ 'name': 'getWorkerType',
+ 'output': 'v1/workertype-response.json#',
+ 'route': '/provisioners/<provisionerId>/worker-types/<workerType>',
+ 'stability': 'experimental',
+ },
+ "listArtifacts": {
+ 'args': ['taskId', 'runId'],
+ 'method': 'get',
+ 'name': 'listArtifacts',
+ 'output': 'v1/list-artifacts-response.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/task/<taskId>/runs/<runId>/artifacts',
+ 'stability': 'experimental',
+ },
+ "listDependentTasks": {
+ 'args': ['taskId'],
+ 'method': 'get',
+ 'name': 'listDependentTasks',
+ 'output': 'v1/list-dependent-tasks-response.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/task/<taskId>/dependents',
+ 'stability': 'stable',
+ },
+ "listLatestArtifacts": {
+ 'args': ['taskId'],
+ 'method': 'get',
+ 'name': 'listLatestArtifacts',
+ 'output': 'v1/list-artifacts-response.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/task/<taskId>/artifacts',
+ 'stability': 'experimental',
+ },
+ "listProvisioners": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'listProvisioners',
+ 'output': 'v1/list-provisioners-response.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/provisioners',
+ 'stability': 'experimental',
+ },
+ "listTaskGroup": {
+ 'args': ['taskGroupId'],
+ 'method': 'get',
+ 'name': 'listTaskGroup',
+ 'output': 'v1/list-task-group-response.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/task-group/<taskGroupId>/list',
+ 'stability': 'stable',
+ },
+ "listWorkerTypes": {
+ 'args': ['provisionerId'],
+ 'method': 'get',
+ 'name': 'listWorkerTypes',
+ 'output': 'v1/list-workertypes-response.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/provisioners/<provisionerId>/worker-types',
+ 'stability': 'experimental',
+ },
+ "listWorkers": {
+ 'args': ['provisionerId', 'workerType'],
+ 'method': 'get',
+ 'name': 'listWorkers',
+ 'output': 'v1/list-workers-response.json#',
+ 'query': ['continuationToken', 'limit', 'quarantined'],
+ 'route': '/provisioners/<provisionerId>/worker-types/<workerType>/workers',
+ 'stability': 'experimental',
+ },
+ "pendingTasks": {
+ 'args': ['provisionerId', 'workerType'],
+ 'method': 'get',
+ 'name': 'pendingTasks',
+ 'output': 'v1/pending-tasks-response.json#',
+ 'route': '/pending/<provisionerId>/<workerType>',
+ 'stability': 'stable',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "quarantineWorker": {
+ 'args': ['provisionerId', 'workerType', 'workerGroup', 'workerId'],
+ 'input': 'v1/quarantine-worker-request.json#',
+ 'method': 'put',
+ 'name': 'quarantineWorker',
+ 'output': 'v1/worker-response.json#',
+ 'route': '/provisioners/<provisionerId>/worker-types/<workerType>/workers/<workerGroup>/<workerId>',
+ 'stability': 'experimental',
+ },
+ "reclaimTask": {
+ 'args': ['taskId', 'runId'],
+ 'method': 'post',
+ 'name': 'reclaimTask',
+ 'output': 'v1/task-reclaim-response.json#',
+ 'route': '/task/<taskId>/runs/<runId>/reclaim',
+ 'stability': 'stable',
+ },
+ "reportCompleted": {
+ 'args': ['taskId', 'runId'],
+ 'method': 'post',
+ 'name': 'reportCompleted',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>/runs/<runId>/completed',
+ 'stability': 'stable',
+ },
+ "reportException": {
+ 'args': ['taskId', 'runId'],
+ 'input': 'v1/task-exception-request.json#',
+ 'method': 'post',
+ 'name': 'reportException',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>/runs/<runId>/exception',
+ 'stability': 'stable',
+ },
+ "reportFailed": {
+ 'args': ['taskId', 'runId'],
+ 'method': 'post',
+ 'name': 'reportFailed',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>/runs/<runId>/failed',
+ 'stability': 'stable',
+ },
+ "rerunTask": {
+ 'args': ['taskId'],
+ 'method': 'post',
+ 'name': 'rerunTask',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>/rerun',
+ 'stability': 'deprecated',
+ },
+ "scheduleTask": {
+ 'args': ['taskId'],
+ 'method': 'post',
+ 'name': 'scheduleTask',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>/schedule',
+ 'stability': 'stable',
+ },
+ "status": {
+ 'args': ['taskId'],
+ 'method': 'get',
+ 'name': 'status',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>/status',
+ 'stability': 'stable',
+ },
+ "task": {
+ 'args': ['taskId'],
+ 'method': 'get',
+ 'name': 'task',
+ 'output': 'v1/task.json#',
+ 'route': '/task/<taskId>',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Queue']
diff --git a/third_party/python/taskcluster/taskcluster/aio/queueevents.py b/third_party/python/taskcluster/taskcluster/aio/queueevents.py
new file mode 100644
index 0000000000..3b0b84662d
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/queueevents.py
@@ -0,0 +1,718 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class QueueEvents(AsyncBaseClient):
+ """
+ The queue, typically available at `queue.taskcluster.net`, is responsible
+ for accepting tasks and track their state as they are executed by
+ workers. In order ensure they are eventually resolved.
+
+ This document describes AMQP exchanges offered by the queue, which allows
+ third-party listeners to monitor tasks as they progress to resolution.
+ These exchanges targets the following audience:
+ * Schedulers, who takes action after tasks are completed,
+ * Workers, who wants to listen for new or canceled tasks (optional),
+ * Tools, that wants to update their view as task progress.
+
+ You'll notice that all the exchanges in the document shares the same
+ routing key pattern. This makes it very easy to bind to all messages
+ about a certain kind tasks.
+
+ **Task specific routes**, a task can define a task specific route using
+ the `task.routes` property. See task creation documentation for details
+ on permissions required to provide task specific routes. If a task has
+ the entry `'notify.by-email'` in as task specific route defined in
+ `task.routes` all messages about this task will be CC'ed with the
+ routing-key `'route.notify.by-email'`.
+
+ These routes will always be prefixed `route.`, so that cannot interfere
+ with the _primary_ routing key as documented here. Notice that the
+ _primary_ routing key is always prefixed `primary.`. This is ensured
+ in the routing key reference, so API clients will do this automatically.
+
+ Please, note that the way RabbitMQ works, the message will only arrive
+ in your queue once, even though you may have bound to the exchange with
+ multiple routing key patterns that matches more of the CC'ed routing
+ routing keys.
+
+ **Delivery guarantees**, most operations on the queue are idempotent,
+ which means that if repeated with the same arguments then the requests
+ will ensure completion of the operation and return the same response.
+ This is useful if the server crashes or the TCP connection breaks, but
+ when re-executing an idempotent operation, the queue will also resend
+ any related AMQP messages. Hence, messages may be repeated.
+
+ This shouldn't be much of a problem, as the best you can achieve using
+ confirm messages with AMQP is at-least-once delivery semantics. Hence,
+ this only prevents you from obtaining at-most-once delivery semantics.
+
+ **Remark**, some message generated by timeouts maybe dropped if the
+ server crashes at wrong time. Ideally, we'll address this in the
+ future. For now we suggest you ignore this corner case, and notify us
+ if this corner case is of concern to you.
+ """
+
+ classOptions = {
+ "exchangePrefix": "exchange/taskcluster-queue/v1/",
+ }
+ serviceName = 'queue'
+ apiVersion = 'v1'
+
+ def taskDefined(self, *args, **kwargs):
+ """
+ Task Defined Messages
+
+ When a task is created or just defined a message is posted to this
+ exchange.
+
+ This message exchange is mainly useful when tasks are scheduled by a
+ scheduler that uses `defineTask` as this does not make the task
+ `pending`. Thus, no `taskPending` message is published.
+ Please, note that messages are also published on this exchange if defined
+ using `createTask`.
+
+ This exchange outputs: ``v1/task-defined-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * taskId: `taskId` for the task this message concerns (required)
+
+ * runId: `runId` of latest run for the task, `_` if no run is exists for the task.
+
+ * workerGroup: `workerGroup` of latest run for the task, `_` if no run is exists for the task.
+
+ * workerId: `workerId` of latest run for the task, `_` if no run is exists for the task.
+
+ * provisionerId: `provisionerId` this task is targeted at. (required)
+
+ * workerType: `workerType` this task must run on. (required)
+
+ * schedulerId: `schedulerId` this task was created by. (required)
+
+ * taskGroupId: `taskGroupId` this task was created in. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'task-defined',
+ 'name': 'taskDefined',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'runId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerGroup',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'provisionerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'schedulerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskGroupId',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/task-defined-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def taskPending(self, *args, **kwargs):
+ """
+ Task Pending Messages
+
+ When a task becomes `pending` a message is posted to this exchange.
+
+ This is useful for workers who doesn't want to constantly poll the queue
+ for new tasks. The queue will also be authority for task states and
+ claims. But using this exchange workers should be able to distribute work
+ efficiently and they would be able to reduce their polling interval
+ significantly without affecting general responsiveness.
+
+ This exchange outputs: ``v1/task-pending-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * taskId: `taskId` for the task this message concerns (required)
+
+ * runId: `runId` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * workerGroup: `workerGroup` of latest run for the task, `_` if no run is exists for the task.
+
+ * workerId: `workerId` of latest run for the task, `_` if no run is exists for the task.
+
+ * provisionerId: `provisionerId` this task is targeted at. (required)
+
+ * workerType: `workerType` this task must run on. (required)
+
+ * schedulerId: `schedulerId` this task was created by. (required)
+
+ * taskGroupId: `taskGroupId` this task was created in. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'task-pending',
+ 'name': 'taskPending',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'runId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerGroup',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'provisionerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'schedulerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskGroupId',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/task-pending-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def taskRunning(self, *args, **kwargs):
+ """
+ Task Running Messages
+
+ Whenever a task is claimed by a worker, a run is started on the worker,
+ and a message is posted on this exchange.
+
+ This exchange outputs: ``v1/task-running-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * taskId: `taskId` for the task this message concerns (required)
+
+ * runId: `runId` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * workerGroup: `workerGroup` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * workerId: `workerId` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * provisionerId: `provisionerId` this task is targeted at. (required)
+
+ * workerType: `workerType` this task must run on. (required)
+
+ * schedulerId: `schedulerId` this task was created by. (required)
+
+ * taskGroupId: `taskGroupId` this task was created in. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'task-running',
+ 'name': 'taskRunning',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'runId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerGroup',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'provisionerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'schedulerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskGroupId',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/task-running-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def artifactCreated(self, *args, **kwargs):
+ """
+ Artifact Creation Messages
+
+ Whenever the `createArtifact` end-point is called, the queue will create
+ a record of the artifact and post a message on this exchange. All of this
+ happens before the queue returns a signed URL for the caller to upload
+ the actual artifact with (pending on `storageType`).
+
+ This means that the actual artifact is rarely available when this message
+ is posted. But it is not unreasonable to assume that the artifact will
+ will become available at some point later. Most signatures will expire in
+ 30 minutes or so, forcing the uploader to call `createArtifact` with
+ the same payload again in-order to continue uploading the artifact.
+
+ However, in most cases (especially for small artifacts) it's very
+ reasonable assume the artifact will be available within a few minutes.
+ This property means that this exchange is mostly useful for tools
+ monitoring task evaluation. One could also use it count number of
+ artifacts per task, or _index_ artifacts though in most cases it'll be
+ smarter to index artifacts after the task in question have completed
+ successfully.
+
+ This exchange outputs: ``v1/artifact-created-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * taskId: `taskId` for the task this message concerns (required)
+
+ * runId: `runId` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * workerGroup: `workerGroup` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * workerId: `workerId` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * provisionerId: `provisionerId` this task is targeted at. (required)
+
+ * workerType: `workerType` this task must run on. (required)
+
+ * schedulerId: `schedulerId` this task was created by. (required)
+
+ * taskGroupId: `taskGroupId` this task was created in. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'artifact-created',
+ 'name': 'artifactCreated',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'runId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerGroup',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'provisionerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'schedulerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskGroupId',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/artifact-created-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def taskCompleted(self, *args, **kwargs):
+ """
+ Task Completed Messages
+
+ When a task is successfully completed by a worker a message is posted
+ this exchange.
+ This message is routed using the `runId`, `workerGroup` and `workerId`
+ that completed the task. But information about additional runs is also
+ available from the task status structure.
+
+ This exchange outputs: ``v1/task-completed-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * taskId: `taskId` for the task this message concerns (required)
+
+ * runId: `runId` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * workerGroup: `workerGroup` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * workerId: `workerId` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * provisionerId: `provisionerId` this task is targeted at. (required)
+
+ * workerType: `workerType` this task must run on. (required)
+
+ * schedulerId: `schedulerId` this task was created by. (required)
+
+ * taskGroupId: `taskGroupId` this task was created in. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'task-completed',
+ 'name': 'taskCompleted',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'runId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerGroup',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'provisionerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'schedulerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskGroupId',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/task-completed-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def taskFailed(self, *args, **kwargs):
+ """
+ Task Failed Messages
+
+ When a task ran, but failed to complete successfully a message is posted
+ to this exchange. This is same as worker ran task-specific code, but the
+ task specific code exited non-zero.
+
+ This exchange outputs: ``v1/task-failed-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * taskId: `taskId` for the task this message concerns (required)
+
+ * runId: `runId` of latest run for the task, `_` if no run is exists for the task.
+
+ * workerGroup: `workerGroup` of latest run for the task, `_` if no run is exists for the task.
+
+ * workerId: `workerId` of latest run for the task, `_` if no run is exists for the task.
+
+ * provisionerId: `provisionerId` this task is targeted at. (required)
+
+ * workerType: `workerType` this task must run on. (required)
+
+ * schedulerId: `schedulerId` this task was created by. (required)
+
+ * taskGroupId: `taskGroupId` this task was created in. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'task-failed',
+ 'name': 'taskFailed',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'runId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerGroup',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'provisionerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'schedulerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskGroupId',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/task-failed-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def taskException(self, *args, **kwargs):
+ """
+ Task Exception Messages
+
+ Whenever Taskcluster fails to run a message is posted to this exchange.
+ This happens if the task isn't completed before its `deadlìne`,
+ all retries failed (i.e. workers stopped responding), the task was
+ canceled by another entity, or the task carried a malformed payload.
+
+ The specific _reason_ is evident from that task status structure, refer
+ to the `reasonResolved` property for the last run.
+
+ This exchange outputs: ``v1/task-exception-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * taskId: `taskId` for the task this message concerns (required)
+
+ * runId: `runId` of latest run for the task, `_` if no run is exists for the task.
+
+ * workerGroup: `workerGroup` of latest run for the task, `_` if no run is exists for the task.
+
+ * workerId: `workerId` of latest run for the task, `_` if no run is exists for the task.
+
+ * provisionerId: `provisionerId` this task is targeted at. (required)
+
+ * workerType: `workerType` this task must run on. (required)
+
+ * schedulerId: `schedulerId` this task was created by. (required)
+
+ * taskGroupId: `taskGroupId` this task was created in. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'task-exception',
+ 'name': 'taskException',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'runId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerGroup',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'provisionerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'schedulerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskGroupId',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/task-exception-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def taskGroupResolved(self, *args, **kwargs):
+ """
+ Task Group Resolved Messages
+
+ A message is published on task-group-resolved whenever all submitted
+ tasks (whether scheduled or unscheduled) for a given task group have
+ been resolved, regardless of whether they resolved as successful or
+ not. A task group may be resolved multiple times, since new tasks may
+ be submitted against an already resolved task group.
+
+ This exchange outputs: ``v1/task-group-resolved.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * taskGroupId: `taskGroupId` for the task-group this message concerns (required)
+
+ * schedulerId: `schedulerId` for the task-group this message concerns (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'task-group-resolved',
+ 'name': 'taskGroupResolved',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskGroupId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'schedulerId',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/task-group-resolved.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ funcinfo = {
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'QueueEvents']
diff --git a/third_party/python/taskcluster/taskcluster/aio/secrets.py b/third_party/python/taskcluster/taskcluster/aio/secrets.py
new file mode 100644
index 0000000000..abb20dcf82
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/secrets.py
@@ -0,0 +1,149 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class Secrets(AsyncBaseClient):
+ """
+ The secrets service provides a simple key/value store for small bits of secret
+ data. Access is limited by scopes, so values can be considered secret from
+ those who do not have the relevant scopes.
+
+ Secrets also have an expiration date, and once a secret has expired it can no
+ longer be read. This is useful for short-term secrets such as a temporary
+ service credential or a one-time signing key.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'secrets'
+ apiVersion = 'v1'
+
+ async def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ async def set(self, *args, **kwargs):
+ """
+ Set Secret
+
+ Set the secret associated with some key. If the secret already exists, it is
+ updated instead.
+
+ This method takes input: ``v1/secret.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["set"], *args, **kwargs)
+
+ async def remove(self, *args, **kwargs):
+ """
+ Delete Secret
+
+ Delete the secret associated with some key.
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["remove"], *args, **kwargs)
+
+ async def get(self, *args, **kwargs):
+ """
+ Read Secret
+
+ Read the secret associated with some key. If the secret has recently
+ expired, the response code 410 is returned. If the caller lacks the
+ scope necessary to get the secret, the call will fail with a 403 code
+ regardless of whether the secret exists.
+
+ This method gives output: ``v1/secret.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["get"], *args, **kwargs)
+
+ async def list(self, *args, **kwargs):
+ """
+ List Secrets
+
+ List the names of all secrets.
+
+ By default this end-point will try to return up to 1000 secret names in one
+ request. But it **may return less**, even if more tasks are available.
+ It may also return a `continuationToken` even though there are no more
+ results. However, you can only be sure to have seen all results if you
+ keep calling `listTaskGroup` with the last `continuationToken` until you
+ get a result without a `continuationToken`.
+
+ If you are not interested in listing all the members at once, you may
+ use the query-string option `limit` to return fewer.
+
+ This method gives output: ``v1/secret-list.json#``
+
+ This method is ``stable``
+ """
+
+ return await self._makeApiCall(self.funcinfo["list"], *args, **kwargs)
+
+ funcinfo = {
+ "get": {
+ 'args': ['name'],
+ 'method': 'get',
+ 'name': 'get',
+ 'output': 'v1/secret.json#',
+ 'route': '/secret/<name>',
+ 'stability': 'stable',
+ },
+ "list": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'list',
+ 'output': 'v1/secret-list.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/secrets',
+ 'stability': 'stable',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "remove": {
+ 'args': ['name'],
+ 'method': 'delete',
+ 'name': 'remove',
+ 'route': '/secret/<name>',
+ 'stability': 'stable',
+ },
+ "set": {
+ 'args': ['name'],
+ 'input': 'v1/secret.json#',
+ 'method': 'put',
+ 'name': 'set',
+ 'route': '/secret/<name>',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Secrets']
diff --git a/third_party/python/taskcluster/taskcluster/aio/treeherderevents.py b/third_party/python/taskcluster/taskcluster/aio/treeherderevents.py
new file mode 100644
index 0000000000..8d0f640182
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/aio/treeherderevents.py
@@ -0,0 +1,72 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .asyncclient import AsyncBaseClient
+from .asyncclient import createApiClient
+from .asyncclient import config
+from .asyncclient import createTemporaryCredentials
+from .asyncclient import createSession
+_defaultConfig = config
+
+
+class TreeherderEvents(AsyncBaseClient):
+ """
+ The taskcluster-treeherder service is responsible for processing
+ task events published by TaskCluster Queue and producing job messages
+ that are consumable by Treeherder.
+
+ This exchange provides that job messages to be consumed by any queue that
+ attached to the exchange. This could be a production Treeheder instance,
+ a local development environment, or a custom dashboard.
+ """
+
+ classOptions = {
+ "exchangePrefix": "exchange/taskcluster-treeherder/v1/",
+ }
+ serviceName = 'treeherder'
+ apiVersion = 'v1'
+
+ def jobs(self, *args, **kwargs):
+ """
+ Job Messages
+
+ When a task run is scheduled or resolved, a message is posted to
+ this exchange in a Treeherder consumable format.
+
+ This exchange outputs: ``v1/pulse-job.json#``This exchange takes the following keys:
+
+ * destination: destination (required)
+
+ * project: project (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'jobs',
+ 'name': 'jobs',
+ 'routingKey': [
+ {
+ 'multipleWords': False,
+ 'name': 'destination',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'project',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/pulse-job.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ funcinfo = {
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'TreeherderEvents']
diff --git a/third_party/python/taskcluster/taskcluster/auth.py b/third_party/python/taskcluster/taskcluster/auth.py
new file mode 100644
index 0000000000..42dcf411d2
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/auth.py
@@ -0,0 +1,867 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class Auth(BaseClient):
+ """
+ Authentication related API end-points for Taskcluster and related
+ services. These API end-points are of interest if you wish to:
+ * Authorize a request signed with Taskcluster credentials,
+ * Manage clients and roles,
+ * Inspect or audit clients and roles,
+ * Gain access to various services guarded by this API.
+
+ Note that in this service "authentication" refers to validating the
+ correctness of the supplied credentials (that the caller posesses the
+ appropriate access token). This service does not provide any kind of user
+ authentication (identifying a particular person).
+
+ ### Clients
+ The authentication service manages _clients_, at a high-level each client
+ consists of a `clientId`, an `accessToken`, scopes, and some metadata.
+ The `clientId` and `accessToken` can be used for authentication when
+ calling Taskcluster APIs.
+
+ The client's scopes control the client's access to Taskcluster resources.
+ The scopes are *expanded* by substituting roles, as defined below.
+
+ ### Roles
+ A _role_ consists of a `roleId`, a set of scopes and a description.
+ Each role constitutes a simple _expansion rule_ that says if you have
+ the scope: `assume:<roleId>` you get the set of scopes the role has.
+ Think of the `assume:<roleId>` as a scope that allows a client to assume
+ a role.
+
+ As in scopes the `*` kleene star also have special meaning if it is
+ located at the end of a `roleId`. If you have a role with the following
+ `roleId`: `my-prefix*`, then any client which has a scope staring with
+ `assume:my-prefix` will be allowed to assume the role.
+
+ ### Guarded Services
+ The authentication service also has API end-points for delegating access
+ to some guarded service such as AWS S3, or Azure Table Storage.
+ Generally, we add API end-points to this server when we wish to use
+ Taskcluster credentials to grant access to a third-party service used
+ by many Taskcluster components.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'auth'
+ apiVersion = 'v1'
+
+ def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ def listClients(self, *args, **kwargs):
+ """
+ List Clients
+
+ Get a list of all clients. With `prefix`, only clients for which
+ it is a prefix of the clientId are returned.
+
+ By default this end-point will try to return up to 1000 clients in one
+ request. But it **may return less, even none**.
+ It may also return a `continuationToken` even though there are no more
+ results. However, you can only be sure to have seen all results if you
+ keep calling `listClients` with the last `continuationToken` until you
+ get a result without a `continuationToken`.
+
+ This method gives output: ``v1/list-clients-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["listClients"], *args, **kwargs)
+
+ def client(self, *args, **kwargs):
+ """
+ Get Client
+
+ Get information about a single client.
+
+ This method gives output: ``v1/get-client-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["client"], *args, **kwargs)
+
+ def createClient(self, *args, **kwargs):
+ """
+ Create Client
+
+ Create a new client and get the `accessToken` for this client.
+ You should store the `accessToken` from this API call as there is no
+ other way to retrieve it.
+
+ If you loose the `accessToken` you can call `resetAccessToken` to reset
+ it, and a new `accessToken` will be returned, but you cannot retrieve the
+ current `accessToken`.
+
+ If a client with the same `clientId` already exists this operation will
+ fail. Use `updateClient` if you wish to update an existing client.
+
+ The caller's scopes must satisfy `scopes`.
+
+ This method takes input: ``v1/create-client-request.json#``
+
+ This method gives output: ``v1/create-client-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["createClient"], *args, **kwargs)
+
+ def resetAccessToken(self, *args, **kwargs):
+ """
+ Reset `accessToken`
+
+ Reset a clients `accessToken`, this will revoke the existing
+ `accessToken`, generate a new `accessToken` and return it from this
+ call.
+
+ There is no way to retrieve an existing `accessToken`, so if you loose it
+ you must reset the accessToken to acquire it again.
+
+ This method gives output: ``v1/create-client-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["resetAccessToken"], *args, **kwargs)
+
+ def updateClient(self, *args, **kwargs):
+ """
+ Update Client
+
+ Update an exisiting client. The `clientId` and `accessToken` cannot be
+ updated, but `scopes` can be modified. The caller's scopes must
+ satisfy all scopes being added to the client in the update operation.
+ If no scopes are given in the request, the client's scopes remain
+ unchanged
+
+ This method takes input: ``v1/create-client-request.json#``
+
+ This method gives output: ``v1/get-client-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["updateClient"], *args, **kwargs)
+
+ def enableClient(self, *args, **kwargs):
+ """
+ Enable Client
+
+ Enable a client that was disabled with `disableClient`. If the client
+ is already enabled, this does nothing.
+
+ This is typically used by identity providers to re-enable clients that
+ had been disabled when the corresponding identity's scopes changed.
+
+ This method gives output: ``v1/get-client-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["enableClient"], *args, **kwargs)
+
+ def disableClient(self, *args, **kwargs):
+ """
+ Disable Client
+
+ Disable a client. If the client is already disabled, this does nothing.
+
+ This is typically used by identity providers to disable clients when the
+ corresponding identity's scopes no longer satisfy the client's scopes.
+
+ This method gives output: ``v1/get-client-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["disableClient"], *args, **kwargs)
+
+ def deleteClient(self, *args, **kwargs):
+ """
+ Delete Client
+
+ Delete a client, please note that any roles related to this client must
+ be deleted independently.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["deleteClient"], *args, **kwargs)
+
+ def listRoles(self, *args, **kwargs):
+ """
+ List Roles
+
+ Get a list of all roles, each role object also includes the list of
+ scopes it expands to.
+
+ This method gives output: ``v1/list-roles-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["listRoles"], *args, **kwargs)
+
+ def role(self, *args, **kwargs):
+ """
+ Get Role
+
+ Get information about a single role, including the set of scopes that the
+ role expands to.
+
+ This method gives output: ``v1/get-role-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["role"], *args, **kwargs)
+
+ def createRole(self, *args, **kwargs):
+ """
+ Create Role
+
+ Create a new role.
+
+ The caller's scopes must satisfy the new role's scopes.
+
+ If there already exists a role with the same `roleId` this operation
+ will fail. Use `updateRole` to modify an existing role.
+
+ Creation of a role that will generate an infinite expansion will result
+ in an error response.
+
+ This method takes input: ``v1/create-role-request.json#``
+
+ This method gives output: ``v1/get-role-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["createRole"], *args, **kwargs)
+
+ def updateRole(self, *args, **kwargs):
+ """
+ Update Role
+
+ Update an existing role.
+
+ The caller's scopes must satisfy all of the new scopes being added, but
+ need not satisfy all of the client's existing scopes.
+
+ An update of a role that will generate an infinite expansion will result
+ in an error response.
+
+ This method takes input: ``v1/create-role-request.json#``
+
+ This method gives output: ``v1/get-role-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["updateRole"], *args, **kwargs)
+
+ def deleteRole(self, *args, **kwargs):
+ """
+ Delete Role
+
+ Delete a role. This operation will succeed regardless of whether or not
+ the role exists.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["deleteRole"], *args, **kwargs)
+
+ def expandScopesGet(self, *args, **kwargs):
+ """
+ Expand Scopes
+
+ Return an expanded copy of the given scopeset, with scopes implied by any
+ roles included.
+
+ This call uses the GET method with an HTTP body. It remains only for
+ backward compatibility.
+
+ This method takes input: ``v1/scopeset.json#``
+
+ This method gives output: ``v1/scopeset.json#``
+
+ This method is ``deprecated``
+ """
+
+ return self._makeApiCall(self.funcinfo["expandScopesGet"], *args, **kwargs)
+
+ def expandScopes(self, *args, **kwargs):
+ """
+ Expand Scopes
+
+ Return an expanded copy of the given scopeset, with scopes implied by any
+ roles included.
+
+ This method takes input: ``v1/scopeset.json#``
+
+ This method gives output: ``v1/scopeset.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["expandScopes"], *args, **kwargs)
+
+ def currentScopes(self, *args, **kwargs):
+ """
+ Get Current Scopes
+
+ Return the expanded scopes available in the request, taking into account all sources
+ of scopes and scope restrictions (temporary credentials, assumeScopes, client scopes,
+ and roles).
+
+ This method gives output: ``v1/scopeset.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["currentScopes"], *args, **kwargs)
+
+ def awsS3Credentials(self, *args, **kwargs):
+ """
+ Get Temporary Read/Write Credentials S3
+
+ Get temporary AWS credentials for `read-write` or `read-only` access to
+ a given `bucket` and `prefix` within that bucket.
+ The `level` parameter can be `read-write` or `read-only` and determines
+ which type of credentials are returned. Please note that the `level`
+ parameter is required in the scope guarding access. The bucket name must
+ not contain `.`, as recommended by Amazon.
+
+ This method can only allow access to a whitelisted set of buckets. To add
+ a bucket to that whitelist, contact the Taskcluster team, who will add it to
+ the appropriate IAM policy. If the bucket is in a different AWS account, you
+ will also need to add a bucket policy allowing access from the Taskcluster
+ account. That policy should look like this:
+
+ ```
+ {
+ "Version": "2012-10-17",
+ "Statement": [
+ {
+ "Sid": "allow-taskcluster-auth-to-delegate-access",
+ "Effect": "Allow",
+ "Principal": {
+ "AWS": "arn:aws:iam::692406183521:root"
+ },
+ "Action": [
+ "s3:ListBucket",
+ "s3:GetObject",
+ "s3:PutObject",
+ "s3:DeleteObject",
+ "s3:GetBucketLocation"
+ ],
+ "Resource": [
+ "arn:aws:s3:::<bucket>",
+ "arn:aws:s3:::<bucket>/*"
+ ]
+ }
+ ]
+ }
+ ```
+
+ The credentials are set to expire after an hour, but this behavior is
+ subject to change. Hence, you should always read the `expires` property
+ from the response, if you intend to maintain active credentials in your
+ application.
+
+ Please note that your `prefix` may not start with slash `/`. Such a prefix
+ is allowed on S3, but we forbid it here to discourage bad behavior.
+
+ Also note that if your `prefix` doesn't end in a slash `/`, the STS
+ credentials may allow access to unexpected keys, as S3 does not treat
+ slashes specially. For example, a prefix of `my-folder` will allow
+ access to `my-folder/file.txt` as expected, but also to `my-folder.txt`,
+ which may not be intended.
+
+ Finally, note that the `PutObjectAcl` call is not allowed. Passing a canned
+ ACL other than `private` to `PutObject` is treated as a `PutObjectAcl` call, and
+ will result in an access-denied error from AWS. This limitation is due to a
+ security flaw in Amazon S3 which might otherwise allow indefinite access to
+ uploaded objects.
+
+ **EC2 metadata compatibility**, if the querystring parameter
+ `?format=iam-role-compat` is given, the response will be compatible
+ with the JSON exposed by the EC2 metadata service. This aims to ease
+ compatibility for libraries and tools built to auto-refresh credentials.
+ For details on the format returned by EC2 metadata service see:
+ [EC2 User Guide](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html#instance-metadata-security-credentials).
+
+ This method gives output: ``v1/aws-s3-credentials-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["awsS3Credentials"], *args, **kwargs)
+
+ def azureAccounts(self, *args, **kwargs):
+ """
+ List Accounts Managed by Auth
+
+ Retrieve a list of all Azure accounts managed by Taskcluster Auth.
+
+ This method gives output: ``v1/azure-account-list-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["azureAccounts"], *args, **kwargs)
+
+ def azureTables(self, *args, **kwargs):
+ """
+ List Tables in an Account Managed by Auth
+
+ Retrieve a list of all tables in an account.
+
+ This method gives output: ``v1/azure-table-list-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["azureTables"], *args, **kwargs)
+
+ def azureTableSAS(self, *args, **kwargs):
+ """
+ Get Shared-Access-Signature for Azure Table
+
+ Get a shared access signature (SAS) string for use with a specific Azure
+ Table Storage table.
+
+ The `level` parameter can be `read-write` or `read-only` and determines
+ which type of credentials are returned. If level is read-write, it will create the
+ table if it doesn't already exist.
+
+ This method gives output: ``v1/azure-table-access-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["azureTableSAS"], *args, **kwargs)
+
+ def azureContainers(self, *args, **kwargs):
+ """
+ List containers in an Account Managed by Auth
+
+ Retrieve a list of all containers in an account.
+
+ This method gives output: ``v1/azure-container-list-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["azureContainers"], *args, **kwargs)
+
+ def azureContainerSAS(self, *args, **kwargs):
+ """
+ Get Shared-Access-Signature for Azure Container
+
+ Get a shared access signature (SAS) string for use with a specific Azure
+ Blob Storage container.
+
+ The `level` parameter can be `read-write` or `read-only` and determines
+ which type of credentials are returned. If level is read-write, it will create the
+ container if it doesn't already exist.
+
+ This method gives output: ``v1/azure-container-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["azureContainerSAS"], *args, **kwargs)
+
+ def sentryDSN(self, *args, **kwargs):
+ """
+ Get DSN for Sentry Project
+
+ Get temporary DSN (access credentials) for a sentry project.
+ The credentials returned can be used with any Sentry client for up to
+ 24 hours, after which the credentials will be automatically disabled.
+
+ If the project doesn't exist it will be created, and assigned to the
+ initial team configured for this component. Contact a Sentry admin
+ to have the project transferred to a team you have access to if needed
+
+ This method gives output: ``v1/sentry-dsn-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["sentryDSN"], *args, **kwargs)
+
+ def statsumToken(self, *args, **kwargs):
+ """
+ Get Token for Statsum Project
+
+ Get temporary `token` and `baseUrl` for sending metrics to statsum.
+
+ The token is valid for 24 hours, clients should refresh after expiration.
+
+ This method gives output: ``v1/statsum-token-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["statsumToken"], *args, **kwargs)
+
+ def webhooktunnelToken(self, *args, **kwargs):
+ """
+ Get Token for Webhooktunnel Proxy
+
+ Get temporary `token` and `id` for connecting to webhooktunnel
+ The token is valid for 96 hours, clients should refresh after expiration.
+
+ This method gives output: ``v1/webhooktunnel-token-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["webhooktunnelToken"], *args, **kwargs)
+
+ def authenticateHawk(self, *args, **kwargs):
+ """
+ Authenticate Hawk Request
+
+ Validate the request signature given on input and return list of scopes
+ that the authenticating client has.
+
+ This method is used by other services that wish rely on Taskcluster
+ credentials for authentication. This way we can use Hawk without having
+ the secret credentials leave this service.
+
+ This method takes input: ``v1/authenticate-hawk-request.json#``
+
+ This method gives output: ``v1/authenticate-hawk-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["authenticateHawk"], *args, **kwargs)
+
+ def testAuthenticate(self, *args, **kwargs):
+ """
+ Test Authentication
+
+ Utility method to test client implementations of Taskcluster
+ authentication.
+
+ Rather than using real credentials, this endpoint accepts requests with
+ clientId `tester` and accessToken `no-secret`. That client's scopes are
+ based on `clientScopes` in the request body.
+
+ The request is validated, with any certificate, authorizedScopes, etc.
+ applied, and the resulting scopes are checked against `requiredScopes`
+ from the request body. On success, the response contains the clientId
+ and scopes as seen by the API method.
+
+ This method takes input: ``v1/test-authenticate-request.json#``
+
+ This method gives output: ``v1/test-authenticate-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["testAuthenticate"], *args, **kwargs)
+
+ def testAuthenticateGet(self, *args, **kwargs):
+ """
+ Test Authentication (GET)
+
+ Utility method similar to `testAuthenticate`, but with the GET method,
+ so it can be used with signed URLs (bewits).
+
+ Rather than using real credentials, this endpoint accepts requests with
+ clientId `tester` and accessToken `no-secret`. That client's scopes are
+ `['test:*', 'auth:create-client:test:*']`. The call fails if the
+ `test:authenticate-get` scope is not available.
+
+ The request is validated, with any certificate, authorizedScopes, etc.
+ applied, and the resulting scopes are checked, just like any API call.
+ On success, the response contains the clientId and scopes as seen by
+ the API method.
+
+ This method may later be extended to allow specification of client and
+ required scopes via query arguments.
+
+ This method gives output: ``v1/test-authenticate-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["testAuthenticateGet"], *args, **kwargs)
+
+ funcinfo = {
+ "authenticateHawk": {
+ 'args': [],
+ 'input': 'v1/authenticate-hawk-request.json#',
+ 'method': 'post',
+ 'name': 'authenticateHawk',
+ 'output': 'v1/authenticate-hawk-response.json#',
+ 'route': '/authenticate-hawk',
+ 'stability': 'stable',
+ },
+ "awsS3Credentials": {
+ 'args': ['level', 'bucket', 'prefix'],
+ 'method': 'get',
+ 'name': 'awsS3Credentials',
+ 'output': 'v1/aws-s3-credentials-response.json#',
+ 'query': ['format'],
+ 'route': '/aws/s3/<level>/<bucket>/<prefix>',
+ 'stability': 'stable',
+ },
+ "azureAccounts": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'azureAccounts',
+ 'output': 'v1/azure-account-list-response.json#',
+ 'route': '/azure/accounts',
+ 'stability': 'stable',
+ },
+ "azureContainerSAS": {
+ 'args': ['account', 'container', 'level'],
+ 'method': 'get',
+ 'name': 'azureContainerSAS',
+ 'output': 'v1/azure-container-response.json#',
+ 'route': '/azure/<account>/containers/<container>/<level>',
+ 'stability': 'stable',
+ },
+ "azureContainers": {
+ 'args': ['account'],
+ 'method': 'get',
+ 'name': 'azureContainers',
+ 'output': 'v1/azure-container-list-response.json#',
+ 'query': ['continuationToken'],
+ 'route': '/azure/<account>/containers',
+ 'stability': 'stable',
+ },
+ "azureTableSAS": {
+ 'args': ['account', 'table', 'level'],
+ 'method': 'get',
+ 'name': 'azureTableSAS',
+ 'output': 'v1/azure-table-access-response.json#',
+ 'route': '/azure/<account>/table/<table>/<level>',
+ 'stability': 'stable',
+ },
+ "azureTables": {
+ 'args': ['account'],
+ 'method': 'get',
+ 'name': 'azureTables',
+ 'output': 'v1/azure-table-list-response.json#',
+ 'query': ['continuationToken'],
+ 'route': '/azure/<account>/tables',
+ 'stability': 'stable',
+ },
+ "client": {
+ 'args': ['clientId'],
+ 'method': 'get',
+ 'name': 'client',
+ 'output': 'v1/get-client-response.json#',
+ 'route': '/clients/<clientId>',
+ 'stability': 'stable',
+ },
+ "createClient": {
+ 'args': ['clientId'],
+ 'input': 'v1/create-client-request.json#',
+ 'method': 'put',
+ 'name': 'createClient',
+ 'output': 'v1/create-client-response.json#',
+ 'route': '/clients/<clientId>',
+ 'stability': 'stable',
+ },
+ "createRole": {
+ 'args': ['roleId'],
+ 'input': 'v1/create-role-request.json#',
+ 'method': 'put',
+ 'name': 'createRole',
+ 'output': 'v1/get-role-response.json#',
+ 'route': '/roles/<roleId>',
+ 'stability': 'stable',
+ },
+ "currentScopes": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'currentScopes',
+ 'output': 'v1/scopeset.json#',
+ 'route': '/scopes/current',
+ 'stability': 'stable',
+ },
+ "deleteClient": {
+ 'args': ['clientId'],
+ 'method': 'delete',
+ 'name': 'deleteClient',
+ 'route': '/clients/<clientId>',
+ 'stability': 'stable',
+ },
+ "deleteRole": {
+ 'args': ['roleId'],
+ 'method': 'delete',
+ 'name': 'deleteRole',
+ 'route': '/roles/<roleId>',
+ 'stability': 'stable',
+ },
+ "disableClient": {
+ 'args': ['clientId'],
+ 'method': 'post',
+ 'name': 'disableClient',
+ 'output': 'v1/get-client-response.json#',
+ 'route': '/clients/<clientId>/disable',
+ 'stability': 'stable',
+ },
+ "enableClient": {
+ 'args': ['clientId'],
+ 'method': 'post',
+ 'name': 'enableClient',
+ 'output': 'v1/get-client-response.json#',
+ 'route': '/clients/<clientId>/enable',
+ 'stability': 'stable',
+ },
+ "expandScopes": {
+ 'args': [],
+ 'input': 'v1/scopeset.json#',
+ 'method': 'post',
+ 'name': 'expandScopes',
+ 'output': 'v1/scopeset.json#',
+ 'route': '/scopes/expand',
+ 'stability': 'stable',
+ },
+ "expandScopesGet": {
+ 'args': [],
+ 'input': 'v1/scopeset.json#',
+ 'method': 'get',
+ 'name': 'expandScopesGet',
+ 'output': 'v1/scopeset.json#',
+ 'route': '/scopes/expand',
+ 'stability': 'deprecated',
+ },
+ "listClients": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'listClients',
+ 'output': 'v1/list-clients-response.json#',
+ 'query': ['prefix', 'continuationToken', 'limit'],
+ 'route': '/clients/',
+ 'stability': 'stable',
+ },
+ "listRoles": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'listRoles',
+ 'output': 'v1/list-roles-response.json#',
+ 'route': '/roles/',
+ 'stability': 'stable',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "resetAccessToken": {
+ 'args': ['clientId'],
+ 'method': 'post',
+ 'name': 'resetAccessToken',
+ 'output': 'v1/create-client-response.json#',
+ 'route': '/clients/<clientId>/reset',
+ 'stability': 'stable',
+ },
+ "role": {
+ 'args': ['roleId'],
+ 'method': 'get',
+ 'name': 'role',
+ 'output': 'v1/get-role-response.json#',
+ 'route': '/roles/<roleId>',
+ 'stability': 'stable',
+ },
+ "sentryDSN": {
+ 'args': ['project'],
+ 'method': 'get',
+ 'name': 'sentryDSN',
+ 'output': 'v1/sentry-dsn-response.json#',
+ 'route': '/sentry/<project>/dsn',
+ 'stability': 'stable',
+ },
+ "statsumToken": {
+ 'args': ['project'],
+ 'method': 'get',
+ 'name': 'statsumToken',
+ 'output': 'v1/statsum-token-response.json#',
+ 'route': '/statsum/<project>/token',
+ 'stability': 'stable',
+ },
+ "testAuthenticate": {
+ 'args': [],
+ 'input': 'v1/test-authenticate-request.json#',
+ 'method': 'post',
+ 'name': 'testAuthenticate',
+ 'output': 'v1/test-authenticate-response.json#',
+ 'route': '/test-authenticate',
+ 'stability': 'stable',
+ },
+ "testAuthenticateGet": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'testAuthenticateGet',
+ 'output': 'v1/test-authenticate-response.json#',
+ 'route': '/test-authenticate-get/',
+ 'stability': 'stable',
+ },
+ "updateClient": {
+ 'args': ['clientId'],
+ 'input': 'v1/create-client-request.json#',
+ 'method': 'post',
+ 'name': 'updateClient',
+ 'output': 'v1/get-client-response.json#',
+ 'route': '/clients/<clientId>',
+ 'stability': 'stable',
+ },
+ "updateRole": {
+ 'args': ['roleId'],
+ 'input': 'v1/create-role-request.json#',
+ 'method': 'post',
+ 'name': 'updateRole',
+ 'output': 'v1/get-role-response.json#',
+ 'route': '/roles/<roleId>',
+ 'stability': 'stable',
+ },
+ "webhooktunnelToken": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'webhooktunnelToken',
+ 'output': 'v1/webhooktunnel-token-response.json#',
+ 'route': '/webhooktunnel',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Auth']
diff --git a/third_party/python/taskcluster/taskcluster/authevents.py b/third_party/python/taskcluster/taskcluster/authevents.py
new file mode 100644
index 0000000000..6f7dbfa888
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/authevents.py
@@ -0,0 +1,180 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class AuthEvents(BaseClient):
+ """
+ The auth service is responsible for storing credentials, managing
+ assignment of scopes, and validation of request signatures from other
+ services.
+
+ These exchanges provides notifications when credentials or roles are
+ updated. This is mostly so that multiple instances of the auth service
+ can purge their caches and synchronize state. But you are of course
+ welcome to use these for other purposes, monitoring changes for example.
+ """
+
+ classOptions = {
+ "exchangePrefix": "exchange/taskcluster-auth/v1/",
+ }
+ serviceName = 'auth'
+ apiVersion = 'v1'
+
+ def clientCreated(self, *args, **kwargs):
+ """
+ Client Created Messages
+
+ Message that a new client has been created.
+
+ This exchange outputs: ``v1/client-message.json#``This exchange takes the following keys:
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'client-created',
+ 'name': 'clientCreated',
+ 'routingKey': [
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/client-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def clientUpdated(self, *args, **kwargs):
+ """
+ Client Updated Messages
+
+ Message that a new client has been updated.
+
+ This exchange outputs: ``v1/client-message.json#``This exchange takes the following keys:
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'client-updated',
+ 'name': 'clientUpdated',
+ 'routingKey': [
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/client-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def clientDeleted(self, *args, **kwargs):
+ """
+ Client Deleted Messages
+
+ Message that a new client has been deleted.
+
+ This exchange outputs: ``v1/client-message.json#``This exchange takes the following keys:
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'client-deleted',
+ 'name': 'clientDeleted',
+ 'routingKey': [
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/client-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def roleCreated(self, *args, **kwargs):
+ """
+ Role Created Messages
+
+ Message that a new role has been created.
+
+ This exchange outputs: ``v1/role-message.json#``This exchange takes the following keys:
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'role-created',
+ 'name': 'roleCreated',
+ 'routingKey': [
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/role-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def roleUpdated(self, *args, **kwargs):
+ """
+ Role Updated Messages
+
+ Message that a new role has been updated.
+
+ This exchange outputs: ``v1/role-message.json#``This exchange takes the following keys:
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'role-updated',
+ 'name': 'roleUpdated',
+ 'routingKey': [
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/role-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def roleDeleted(self, *args, **kwargs):
+ """
+ Role Deleted Messages
+
+ Message that a new role has been deleted.
+
+ This exchange outputs: ``v1/role-message.json#``This exchange takes the following keys:
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'role-deleted',
+ 'name': 'roleDeleted',
+ 'routingKey': [
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/role-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ funcinfo = {
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'AuthEvents']
diff --git a/third_party/python/taskcluster/taskcluster/awsprovisioner.py b/third_party/python/taskcluster/taskcluster/awsprovisioner.py
new file mode 100644
index 0000000000..820bbea0e8
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/awsprovisioner.py
@@ -0,0 +1,450 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class AwsProvisioner(BaseClient):
+ """
+ The AWS Provisioner is responsible for provisioning instances on EC2 for use in
+ Taskcluster. The provisioner maintains a set of worker configurations which
+ can be managed with an API that is typically available at
+ aws-provisioner.taskcluster.net/v1. This API can also perform basic instance
+ management tasks in addition to maintaining the internal state of worker type
+ configuration information.
+
+ The Provisioner runs at a configurable interval. Each iteration of the
+ provisioner fetches a current copy the state that the AWS EC2 api reports. In
+ each iteration, we ask the Queue how many tasks are pending for that worker
+ type. Based on the number of tasks pending and the scaling ratio, we may
+ submit requests for new instances. We use pricing information, capacity and
+ utility factor information to decide which instance type in which region would
+ be the optimal configuration.
+
+ Each EC2 instance type will declare a capacity and utility factor. Capacity is
+ the number of tasks that a given machine is capable of running concurrently.
+ Utility factor is a relative measure of performance between two instance types.
+ We multiply the utility factor by the spot price to compare instance types and
+ regions when making the bidding choices.
+
+ When a new EC2 instance is instantiated, its user data contains a token in
+ `securityToken` that can be used with the `getSecret` method to retrieve
+ the worker's credentials and any needed passwords or other restricted
+ information. The worker is responsible for deleting the secret after
+ retrieving it, to prevent dissemination of the secret to other proceses
+ which can read the instance user data.
+
+ """
+
+ classOptions = {
+ }
+ serviceName = 'aws-provisioner'
+ apiVersion = 'v1'
+
+ def listWorkerTypeSummaries(self, *args, **kwargs):
+ """
+ List worker types with details
+
+ Return a list of worker types, including some summary information about
+ current capacity for each. While this list includes all defined worker types,
+ there may be running EC2 instances for deleted worker types that are not
+ included here. The list is unordered.
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/list-worker-types-summaries-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["listWorkerTypeSummaries"], *args, **kwargs)
+
+ def createWorkerType(self, *args, **kwargs):
+ """
+ Create new Worker Type
+
+ Create a worker type. A worker type contains all the configuration
+ needed for the provisioner to manage the instances. Each worker type
+ knows which regions and which instance types are allowed for that
+ worker type. Remember that Capacity is the number of concurrent tasks
+ that can be run on a given EC2 resource and that Utility is the relative
+ performance rate between different instance types. There is no way to
+ configure different regions to have different sets of instance types
+ so ensure that all instance types are available in all regions.
+ This function is idempotent.
+
+ Once a worker type is in the provisioner, a back ground process will
+ begin creating instances for it based on its capacity bounds and its
+ pending task count from the Queue. It is the worker's responsibility
+ to shut itself down. The provisioner has a limit (currently 96hours)
+ for all instances to prevent zombie instances from running indefinitely.
+
+ The provisioner will ensure that all instances created are tagged with
+ aws resource tags containing the provisioner id and the worker type.
+
+ If provided, the secrets in the global, region and instance type sections
+ are available using the secrets api. If specified, the scopes provided
+ will be used to generate a set of temporary credentials available with
+ the other secrets.
+
+ This method takes input: ``http://schemas.taskcluster.net/aws-provisioner/v1/create-worker-type-request.json#``
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["createWorkerType"], *args, **kwargs)
+
+ def updateWorkerType(self, *args, **kwargs):
+ """
+ Update Worker Type
+
+ Provide a new copy of a worker type to replace the existing one.
+ This will overwrite the existing worker type definition if there
+ is already a worker type of that name. This method will return a
+ 200 response along with a copy of the worker type definition created
+ Note that if you are using the result of a GET on the worker-type
+ end point that you will need to delete the lastModified and workerType
+ keys from the object returned, since those fields are not allowed
+ the request body for this method
+
+ Otherwise, all input requirements and actions are the same as the
+ create method.
+
+ This method takes input: ``http://schemas.taskcluster.net/aws-provisioner/v1/create-worker-type-request.json#``
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["updateWorkerType"], *args, **kwargs)
+
+ def workerTypeLastModified(self, *args, **kwargs):
+ """
+ Get Worker Type Last Modified Time
+
+ This method is provided to allow workers to see when they were
+ last modified. The value provided through UserData can be
+ compared against this value to see if changes have been made
+ If the worker type definition has not been changed, the date
+ should be identical as it is the same stored value.
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-last-modified.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["workerTypeLastModified"], *args, **kwargs)
+
+ def workerType(self, *args, **kwargs):
+ """
+ Get Worker Type
+
+ Retrieve a copy of the requested worker type definition.
+ This copy contains a lastModified field as well as the worker
+ type name. As such, it will require manipulation to be able to
+ use the results of this method to submit date to the update
+ method.
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["workerType"], *args, **kwargs)
+
+ def removeWorkerType(self, *args, **kwargs):
+ """
+ Delete Worker Type
+
+ Delete a worker type definition. This method will only delete
+ the worker type definition from the storage table. The actual
+ deletion will be handled by a background worker. As soon as this
+ method is called for a worker type, the background worker will
+ immediately submit requests to cancel all spot requests for this
+ worker type as well as killing all instances regardless of their
+ state. If you want to gracefully remove a worker type, you must
+ either ensure that no tasks are created with that worker type name
+ or you could theoretically set maxCapacity to 0, though, this is
+ not a supported or tested action
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["removeWorkerType"], *args, **kwargs)
+
+ def listWorkerTypes(self, *args, **kwargs):
+ """
+ List Worker Types
+
+ Return a list of string worker type names. These are the names
+ of all managed worker types known to the provisioner. This does
+ not include worker types which are left overs from a deleted worker
+ type definition but are still running in AWS.
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/list-worker-types-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["listWorkerTypes"], *args, **kwargs)
+
+ def createSecret(self, *args, **kwargs):
+ """
+ Create new Secret
+
+ Insert a secret into the secret storage. The supplied secrets will
+ be provided verbatime via `getSecret`, while the supplied scopes will
+ be converted into credentials by `getSecret`.
+
+ This method is not ordinarily used in production; instead, the provisioner
+ creates a new secret directly for each spot bid.
+
+ This method takes input: ``http://schemas.taskcluster.net/aws-provisioner/v1/create-secret-request.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["createSecret"], *args, **kwargs)
+
+ def getSecret(self, *args, **kwargs):
+ """
+ Get a Secret
+
+ Retrieve a secret from storage. The result contains any passwords or
+ other restricted information verbatim as well as a temporary credential
+ based on the scopes specified when the secret was created.
+
+ It is important that this secret is deleted by the consumer (`removeSecret`),
+ or else the secrets will be visible to any process which can access the
+ user data associated with the instance.
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/get-secret-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["getSecret"], *args, **kwargs)
+
+ def instanceStarted(self, *args, **kwargs):
+ """
+ Report an instance starting
+
+ An instance will report in by giving its instance id as well
+ as its security token. The token is given and checked to ensure
+ that it matches a real token that exists to ensure that random
+ machines do not check in. We could generate a different token
+ but that seems like overkill
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["instanceStarted"], *args, **kwargs)
+
+ def removeSecret(self, *args, **kwargs):
+ """
+ Remove a Secret
+
+ Remove a secret. After this call, a call to `getSecret` with the given
+ token will return no information.
+
+ It is very important that the consumer of a
+ secret delete the secret from storage before handing over control
+ to untrusted processes to prevent credential and/or secret leakage.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["removeSecret"], *args, **kwargs)
+
+ def getLaunchSpecs(self, *args, **kwargs):
+ """
+ Get All Launch Specifications for WorkerType
+
+ This method returns a preview of all possible launch specifications
+ that this worker type definition could submit to EC2. It is used to
+ test worker types, nothing more
+
+ **This API end-point is experimental and may be subject to change without warning.**
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/get-launch-specs-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["getLaunchSpecs"], *args, **kwargs)
+
+ def state(self, *args, **kwargs):
+ """
+ Get AWS State for a worker type
+
+ Return the state of a given workertype as stored by the provisioner.
+ This state is stored as three lists: 1 for running instances, 1 for
+ pending requests. The `summary` property contains an updated summary
+ similar to that returned from `listWorkerTypeSummaries`.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["state"], *args, **kwargs)
+
+ def backendStatus(self, *args, **kwargs):
+ """
+ Backend Status
+
+ This endpoint is used to show when the last time the provisioner
+ has checked in. A check in is done through the deadman's snitch
+ api. It is done at the conclusion of a provisioning iteration
+ and used to tell if the background provisioning process is still
+ running.
+
+ **Warning** this api end-point is **not stable**.
+
+ This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/backend-status-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["backendStatus"], *args, **kwargs)
+
+ def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ funcinfo = {
+ "backendStatus": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'backendStatus',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/backend-status-response.json#',
+ 'route': '/backend-status',
+ 'stability': 'experimental',
+ },
+ "createSecret": {
+ 'args': ['token'],
+ 'input': 'http://schemas.taskcluster.net/aws-provisioner/v1/create-secret-request.json#',
+ 'method': 'put',
+ 'name': 'createSecret',
+ 'route': '/secret/<token>',
+ 'stability': 'stable',
+ },
+ "createWorkerType": {
+ 'args': ['workerType'],
+ 'input': 'http://schemas.taskcluster.net/aws-provisioner/v1/create-worker-type-request.json#',
+ 'method': 'put',
+ 'name': 'createWorkerType',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#',
+ 'route': '/worker-type/<workerType>',
+ 'stability': 'stable',
+ },
+ "getLaunchSpecs": {
+ 'args': ['workerType'],
+ 'method': 'get',
+ 'name': 'getLaunchSpecs',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/get-launch-specs-response.json#',
+ 'route': '/worker-type/<workerType>/launch-specifications',
+ 'stability': 'experimental',
+ },
+ "getSecret": {
+ 'args': ['token'],
+ 'method': 'get',
+ 'name': 'getSecret',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/get-secret-response.json#',
+ 'route': '/secret/<token>',
+ 'stability': 'stable',
+ },
+ "instanceStarted": {
+ 'args': ['instanceId', 'token'],
+ 'method': 'get',
+ 'name': 'instanceStarted',
+ 'route': '/instance-started/<instanceId>/<token>',
+ 'stability': 'stable',
+ },
+ "listWorkerTypeSummaries": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'listWorkerTypeSummaries',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/list-worker-types-summaries-response.json#',
+ 'route': '/list-worker-type-summaries',
+ 'stability': 'stable',
+ },
+ "listWorkerTypes": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'listWorkerTypes',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/list-worker-types-response.json#',
+ 'route': '/list-worker-types',
+ 'stability': 'stable',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "removeSecret": {
+ 'args': ['token'],
+ 'method': 'delete',
+ 'name': 'removeSecret',
+ 'route': '/secret/<token>',
+ 'stability': 'stable',
+ },
+ "removeWorkerType": {
+ 'args': ['workerType'],
+ 'method': 'delete',
+ 'name': 'removeWorkerType',
+ 'route': '/worker-type/<workerType>',
+ 'stability': 'stable',
+ },
+ "state": {
+ 'args': ['workerType'],
+ 'method': 'get',
+ 'name': 'state',
+ 'route': '/state/<workerType>',
+ 'stability': 'stable',
+ },
+ "updateWorkerType": {
+ 'args': ['workerType'],
+ 'input': 'http://schemas.taskcluster.net/aws-provisioner/v1/create-worker-type-request.json#',
+ 'method': 'post',
+ 'name': 'updateWorkerType',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#',
+ 'route': '/worker-type/<workerType>/update',
+ 'stability': 'stable',
+ },
+ "workerType": {
+ 'args': ['workerType'],
+ 'method': 'get',
+ 'name': 'workerType',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-response.json#',
+ 'route': '/worker-type/<workerType>',
+ 'stability': 'stable',
+ },
+ "workerTypeLastModified": {
+ 'args': ['workerType'],
+ 'method': 'get',
+ 'name': 'workerTypeLastModified',
+ 'output': 'http://schemas.taskcluster.net/aws-provisioner/v1/get-worker-type-last-modified.json#',
+ 'route': '/worker-type-last-modified/<workerType>',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'AwsProvisioner']
diff --git a/third_party/python/taskcluster/taskcluster/awsprovisionerevents.py b/third_party/python/taskcluster/taskcluster/awsprovisionerevents.py
new file mode 100644
index 0000000000..d24d773ff9
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/awsprovisionerevents.py
@@ -0,0 +1,142 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class AwsProvisionerEvents(BaseClient):
+ """
+ Exchanges from the provisioner... more docs later
+ """
+
+ classOptions = {
+ "exchangePrefix": "exchange/taskcluster-aws-provisioner/v1/",
+ }
+ apiVersion = 'v1'
+
+ def workerTypeCreated(self, *args, **kwargs):
+ """
+ WorkerType Created Message
+
+ When a new `workerType` is created a message will be published to this
+ exchange.
+
+ This exchange outputs: ``http://schemas.taskcluster.net/aws-provisioner/v1/worker-type-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * workerType: WorkerType that this message concerns. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'worker-type-created',
+ 'name': 'workerTypeCreated',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'http://schemas.taskcluster.net/aws-provisioner/v1/worker-type-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def workerTypeUpdated(self, *args, **kwargs):
+ """
+ WorkerType Updated Message
+
+ When a `workerType` is updated a message will be published to this
+ exchange.
+
+ This exchange outputs: ``http://schemas.taskcluster.net/aws-provisioner/v1/worker-type-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * workerType: WorkerType that this message concerns. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'worker-type-updated',
+ 'name': 'workerTypeUpdated',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'http://schemas.taskcluster.net/aws-provisioner/v1/worker-type-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def workerTypeRemoved(self, *args, **kwargs):
+ """
+ WorkerType Removed Message
+
+ When a `workerType` is removed a message will be published to this
+ exchange.
+
+ This exchange outputs: ``http://schemas.taskcluster.net/aws-provisioner/v1/worker-type-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * workerType: WorkerType that this message concerns. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'worker-type-removed',
+ 'name': 'workerTypeRemoved',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'http://schemas.taskcluster.net/aws-provisioner/v1/worker-type-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ funcinfo = {
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'AwsProvisionerEvents']
diff --git a/third_party/python/taskcluster/taskcluster/client.py b/third_party/python/taskcluster/taskcluster/client.py
new file mode 100644
index 0000000000..9ef21956c2
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/client.py
@@ -0,0 +1,712 @@
+"""This module is used to interact with taskcluster rest apis"""
+
+from __future__ import absolute_import, division, print_function
+
+import json
+import logging
+import copy
+import hashlib
+import hmac
+import datetime
+import calendar
+import requests
+import time
+import six
+import warnings
+from six.moves import urllib
+
+import mohawk
+import mohawk.bewit
+
+import taskcluster.exceptions as exceptions
+import taskcluster.utils as utils
+import taskcluster_urls as liburls
+
+log = logging.getLogger(__name__)
+
+
+# Default configuration
+_defaultConfig = config = {
+ 'credentials': {
+ 'clientId': None,
+ 'accessToken': None,
+ 'certificate': None,
+ },
+ 'rootUrl': None,
+ 'maxRetries': 5,
+ 'signedUrlExpiration': 15 * 60,
+}
+
+
+def createSession(*args, **kwargs):
+ """ Create a new requests session. This passes through all positional and
+ keyword arguments to the requests.Session() constructor
+ """
+ return requests.Session(*args, **kwargs)
+
+
+class BaseClient(object):
+ """ Base Class for API Client Classes. Each individual Client class
+ needs to set up its own methods for REST endpoints and Topic Exchange
+ routing key patterns. The _makeApiCall() and _topicExchange() methods
+ help with this.
+ """
+
+ def __init__(self, options=None, session=None):
+ if options and options.get('baseUrl'):
+ raise exceptions.TaskclusterFailure('baseUrl option is no longer allowed')
+ o = copy.deepcopy(self.classOptions)
+ o.update(_defaultConfig)
+ if options:
+ o.update(options)
+ if not o.get('rootUrl'):
+ raise exceptions.TaskclusterFailure('rootUrl option is required')
+
+ credentials = o.get('credentials')
+ if credentials:
+ for x in ('accessToken', 'clientId', 'certificate'):
+ value = credentials.get(x)
+ if value and not isinstance(value, six.binary_type):
+ try:
+ credentials[x] = credentials[x].encode('ascii')
+ except:
+ s = '%s (%s) must be unicode encodable' % (x, credentials[x])
+ raise exceptions.TaskclusterAuthFailure(s)
+
+ self.options = o
+ if 'credentials' in o:
+ log.debug('credentials key scrubbed from logging output')
+ log.debug(dict((k, v) for k, v in o.items() if k != 'credentials'))
+
+ if session:
+ self.session = session
+ else:
+ self.session = self._createSession()
+
+ def _createSession(self):
+ """ Create a requests session.
+
+ Helper method which can be overridden by child classes.
+ """
+ return createSession()
+
+ def makeHawkExt(self):
+ """ Make an 'ext' for Hawk authentication """
+ o = self.options
+ c = o.get('credentials', {})
+ if c.get('clientId') and c.get('accessToken'):
+ ext = {}
+ cert = c.get('certificate')
+ if cert:
+ if six.PY3 and isinstance(cert, six.binary_type):
+ cert = cert.decode()
+ if isinstance(cert, six.string_types):
+ cert = json.loads(cert)
+ ext['certificate'] = cert
+
+ if 'authorizedScopes' in o:
+ ext['authorizedScopes'] = o['authorizedScopes']
+
+ # .encode('base64') inserts a newline, which hawk doesn't
+ # like but doesn't strip itself
+ return utils.makeB64UrlSafe(utils.encodeStringForB64Header(utils.dumpJson(ext)).strip())
+ else:
+ return {}
+
+ def _makeTopicExchange(self, entry, *args, **kwargs):
+ if len(args) == 0 and not kwargs:
+ routingKeyPattern = {}
+ elif len(args) >= 1:
+ if kwargs or len(args) != 1:
+ errStr = 'Pass either a string, single dictionary or only kwargs'
+ raise exceptions.TaskclusterTopicExchangeFailure(errStr)
+ routingKeyPattern = args[0]
+ else:
+ routingKeyPattern = kwargs
+
+ data = {
+ 'exchange': '%s/%s' % (self.options['exchangePrefix'].rstrip('/'),
+ entry['exchange'].lstrip('/'))
+ }
+
+ # If we are passed in a string, we can short-circuit this function
+ if isinstance(routingKeyPattern, six.string_types):
+ log.debug('Passing through string for topic exchange key')
+ data['routingKeyPattern'] = routingKeyPattern
+ return data
+
+ if type(routingKeyPattern) != dict:
+ errStr = 'routingKeyPattern must eventually be a dict'
+ raise exceptions.TaskclusterTopicExchangeFailure(errStr)
+
+ if not routingKeyPattern:
+ routingKeyPattern = {}
+
+ # There is no canonical meaning for the maxSize and required
+ # reference entry in the JS client, so we don't try to define
+ # them here, even though they sound pretty obvious
+
+ routingKey = []
+ for key in entry['routingKey']:
+ if 'constant' in key:
+ value = key['constant']
+ elif key['name'] in routingKeyPattern:
+ log.debug('Found %s in routing key params', key['name'])
+ value = str(routingKeyPattern[key['name']])
+ if not key.get('multipleWords') and '.' in value:
+ raise exceptions.TaskclusterTopicExchangeFailure(
+ 'Cannot have periods in single word keys')
+ else:
+ value = '#' if key.get('multipleWords') else '*'
+ log.debug('Did not find %s in input params, using %s', key['name'], value)
+
+ routingKey.append(value)
+
+ data['routingKeyPattern'] = '.'.join([str(x) for x in routingKey])
+ return data
+
+ def buildUrl(self, methodName, *args, **kwargs):
+ entry = self.funcinfo.get(methodName)
+ if not entry:
+ raise exceptions.TaskclusterFailure(
+ 'Requested method "%s" not found in API Reference' % methodName)
+ routeParams, _, query, _, _ = self._processArgs(entry, *args, **kwargs)
+ route = self._subArgsInRoute(entry, routeParams)
+ if query:
+ route += '?' + urllib.parse.urlencode(query)
+ return liburls.api(self.options['rootUrl'], self.serviceName, self.apiVersion, route)
+
+ def buildSignedUrl(self, methodName, *args, **kwargs):
+ """ Build a signed URL. This URL contains the credentials needed to access
+ a resource."""
+
+ if 'expiration' in kwargs:
+ expiration = kwargs['expiration']
+ del kwargs['expiration']
+ else:
+ expiration = self.options['signedUrlExpiration']
+
+ expiration = int(time.time() + expiration) # Mainly so that we throw if it's not a number
+
+ requestUrl = self.buildUrl(methodName, *args, **kwargs)
+
+ if not self._hasCredentials():
+ raise exceptions.TaskclusterAuthFailure('Invalid Hawk Credentials')
+
+ clientId = utils.toStr(self.options['credentials']['clientId'])
+ accessToken = utils.toStr(self.options['credentials']['accessToken'])
+
+ def genBewit():
+ # We need to fix the output of get_bewit. It returns a url-safe base64
+ # encoded string, which contains a list of tokens separated by '\'.
+ # The first one is the clientId, the second is an int, the third is
+ # url-safe base64 encoded MAC, the fourth is the ext param.
+ # The problem is that the nested url-safe base64 encoded MAC must be
+ # base64 (i.e. not url safe) or server-side will complain.
+
+ # id + '\\' + exp + '\\' + mac + '\\' + options.ext;
+ resource = mohawk.base.Resource(
+ credentials={
+ 'id': clientId,
+ 'key': accessToken,
+ 'algorithm': 'sha256',
+ },
+ method='GET',
+ ext=utils.toStr(self.makeHawkExt()),
+ url=requestUrl,
+ timestamp=expiration,
+ nonce='',
+ # content='',
+ # content_type='',
+ )
+ bewit = mohawk.bewit.get_bewit(resource)
+ return bewit.rstrip('=')
+
+ bewit = genBewit()
+
+ if not bewit:
+ raise exceptions.TaskclusterFailure('Did not receive a bewit')
+
+ u = urllib.parse.urlparse(requestUrl)
+
+ qs = u.query
+ if qs:
+ qs += '&'
+ qs += 'bewit=%s' % bewit
+
+ return urllib.parse.urlunparse((
+ u.scheme,
+ u.netloc,
+ u.path,
+ u.params,
+ qs,
+ u.fragment,
+ ))
+
+ def _constructUrl(self, route):
+ """Construct a URL for the given route on this service, based on the
+ rootUrl"""
+ return liburls.api(
+ self.options['rootUrl'],
+ self.serviceName,
+ self.apiVersion,
+ route.rstrip('/'))
+
+ def _makeApiCall(self, entry, *args, **kwargs):
+ """ This function is used to dispatch calls to other functions
+ for a given API Reference entry"""
+
+ x = self._processArgs(entry, *args, **kwargs)
+ routeParams, payload, query, paginationHandler, paginationLimit = x
+ route = self._subArgsInRoute(entry, routeParams)
+
+ # TODO: Check for limit being in the Query of the api ref
+ if paginationLimit and 'limit' in entry.get('query', []):
+ query['limit'] = paginationLimit
+
+ if query:
+ _route = route + '?' + urllib.parse.urlencode(query)
+ else:
+ _route = route
+ response = self._makeHttpRequest(entry['method'], _route, payload)
+
+ if paginationHandler:
+ paginationHandler(response)
+ while response.get('continuationToken'):
+ query['continuationToken'] = response['continuationToken']
+ _route = route + '?' + urllib.parse.urlencode(query)
+ response = self._makeHttpRequest(entry['method'], _route, payload)
+ paginationHandler(response)
+ else:
+ return response
+
+ def _processArgs(self, entry, *_args, **_kwargs):
+ """ Given an entry, positional and keyword arguments, figure out what
+ the query-string options, payload and api arguments are.
+ """
+
+ # We need the args to be a list so we can mutate them
+ args = list(_args)
+ kwargs = copy.deepcopy(_kwargs)
+
+ reqArgs = entry['args']
+ routeParams = {}
+
+ query = {}
+ payload = None
+ kwApiArgs = {}
+
+ paginationHandler = None
+ paginationLimit = None
+
+ # There are three formats for calling methods:
+ # 1. method(v1, v1, payload)
+ # 2. method(payload, k1=v1, k2=v2)
+ # 3. method(payload=payload, query=query, params={k1: v1, k2: v2})
+ if len(kwargs) == 0:
+ if 'input' in entry and len(args) == len(reqArgs) + 1:
+ payload = args.pop()
+ if len(args) != len(reqArgs):
+ log.debug(args)
+ log.debug(reqArgs)
+ raise exceptions.TaskclusterFailure('Incorrect number of positional arguments')
+ log.debug('Using method(v1, v2, payload) calling convention')
+ else:
+ # We're considering kwargs which are the api route parameters to be
+ # called 'flat' because they're top level keys. We're special
+ # casing calls which have only api-arg kwargs and possibly a payload
+ # value and handling them directly.
+ isFlatKwargs = True
+ if len(kwargs) == len(reqArgs):
+ for arg in reqArgs:
+ if not kwargs.get(arg, False):
+ isFlatKwargs = False
+ break
+ if 'input' in entry and len(args) != 1:
+ isFlatKwargs = False
+ if 'input' not in entry and len(args) != 0:
+ isFlatKwargs = False
+ else:
+ pass # We're using payload=, query= and param=
+ else:
+ isFlatKwargs = False
+
+ # Now we're going to handle the two types of kwargs. The first is
+ # 'flat' ones, which are where the api params
+ if isFlatKwargs:
+ if 'input' in entry:
+ payload = args.pop()
+ kwApiArgs = kwargs
+ log.debug('Using method(payload, k1=v1, k2=v2) calling convention')
+ warnings.warn(
+ "The method(payload, k1=v1, k2=v2) calling convention will soon be deprecated",
+ PendingDeprecationWarning
+ )
+ else:
+ kwApiArgs = kwargs.get('params', {})
+ payload = kwargs.get('payload', None)
+ query = kwargs.get('query', {})
+ paginationHandler = kwargs.get('paginationHandler', None)
+ paginationLimit = kwargs.get('paginationLimit', None)
+ log.debug('Using method(payload=payload, query=query, params={k1: v1, k2: v2}) calling convention')
+
+ if 'input' in entry and isinstance(payload, type(None)):
+ raise exceptions.TaskclusterFailure('Payload is required')
+
+ # These all need to be rendered down to a string, let's just check that
+ # they are up front and fail fast
+ for arg in args:
+ if not isinstance(arg, six.string_types) and not isinstance(arg, int):
+ raise exceptions.TaskclusterFailure(
+ 'Positional arg "%s" to %s is not a string or int' % (arg, entry['name']))
+
+ for name, arg in six.iteritems(kwApiArgs):
+ if not isinstance(arg, six.string_types) and not isinstance(arg, int):
+ raise exceptions.TaskclusterFailure(
+ 'KW arg "%s: %s" to %s is not a string or int' % (name, arg, entry['name']))
+
+ if len(args) > 0 and len(kwApiArgs) > 0:
+ raise exceptions.TaskclusterFailure('Specify either positional or key word arguments')
+
+ # We know for sure that if we don't give enough arguments that the call
+ # should fail. We don't yet know if we should fail because of two many
+ # arguments because we might be overwriting positional ones with kw ones
+ if len(reqArgs) > len(args) + len(kwApiArgs):
+ raise exceptions.TaskclusterFailure(
+ '%s takes %d args, only %d were given' % (
+ entry['name'], len(reqArgs), len(args) + len(kwApiArgs)))
+
+ # We also need to error out when we have more positional args than required
+ # because we'll need to go through the lists of provided and required args
+ # at the same time. Not disqualifying early means we'll get IndexErrors if
+ # there are more positional arguments than required
+ if len(args) > len(reqArgs):
+ raise exceptions.TaskclusterFailure('%s called with too many positional args',
+ entry['name'])
+
+ i = 0
+ for arg in args:
+ log.debug('Found a positional argument: %s', arg)
+ routeParams[reqArgs[i]] = arg
+ i += 1
+
+ log.debug('After processing positional arguments, we have: %s', routeParams)
+
+ routeParams.update(kwApiArgs)
+
+ log.debug('After keyword arguments, we have: %s', routeParams)
+
+ if len(reqArgs) != len(routeParams):
+ errMsg = '%s takes %s args, %s given' % (
+ entry['name'],
+ ','.join(reqArgs),
+ routeParams.keys())
+ log.error(errMsg)
+ raise exceptions.TaskclusterFailure(errMsg)
+
+ for reqArg in reqArgs:
+ if reqArg not in routeParams:
+ errMsg = '%s requires a "%s" argument which was not provided' % (
+ entry['name'], reqArg)
+ log.error(errMsg)
+ raise exceptions.TaskclusterFailure(errMsg)
+
+ return routeParams, payload, query, paginationHandler, paginationLimit
+
+ def _subArgsInRoute(self, entry, args):
+ """ Given a route like "/task/<taskId>/artifacts" and a mapping like
+ {"taskId": "12345"}, return a string like "/task/12345/artifacts"
+ """
+
+ route = entry['route']
+
+ for arg, val in six.iteritems(args):
+ toReplace = "<%s>" % arg
+ if toReplace not in route:
+ raise exceptions.TaskclusterFailure(
+ 'Arg %s not found in route for %s' % (arg, entry['name']))
+ val = urllib.parse.quote(str(val).encode("utf-8"), '')
+ route = route.replace("<%s>" % arg, val)
+
+ return route.lstrip('/')
+
+ def _hasCredentials(self):
+ """ Return True, if credentials is given """
+ cred = self.options.get('credentials')
+ return (
+ cred and
+ 'clientId' in cred and
+ 'accessToken' in cred and
+ cred['clientId'] and
+ cred['accessToken']
+ )
+
+ def _makeHttpRequest(self, method, route, payload):
+ """ Make an HTTP Request for the API endpoint. This method wraps
+ the logic about doing failure retry and passes off the actual work
+ of doing an HTTP request to another method."""
+
+ url = self._constructUrl(route)
+ log.debug('Full URL used is: %s', url)
+
+ hawkExt = self.makeHawkExt()
+
+ # Serialize payload if given
+ if payload is not None:
+ payload = utils.dumpJson(payload)
+
+ # Do a loop of retries
+ retry = -1 # we plus first in the loop, and attempt 1 is retry 0
+ retries = self.options['maxRetries']
+ while retry < retries:
+ retry += 1
+ # if this isn't the first retry then we sleep
+ if retry > 0:
+ time.sleep(utils.calculateSleepTime(retry))
+ # Construct header
+ if self._hasCredentials():
+ sender = mohawk.Sender(
+ credentials={
+ 'id': self.options['credentials']['clientId'],
+ 'key': self.options['credentials']['accessToken'],
+ 'algorithm': 'sha256',
+ },
+ ext=hawkExt if hawkExt else {},
+ url=url,
+ content=payload if payload else '',
+ content_type='application/json' if payload else '',
+ method=method,
+ )
+
+ headers = {'Authorization': sender.request_header}
+ else:
+ log.debug('Not using hawk!')
+ headers = {}
+ if payload:
+ # Set header for JSON if payload is given, note that we serialize
+ # outside this loop.
+ headers['Content-Type'] = 'application/json'
+
+ log.debug('Making attempt %d', retry)
+ try:
+ response = utils.makeSingleHttpRequest(method, url, payload, headers)
+ except requests.exceptions.RequestException as rerr:
+ if retry < retries:
+ log.warn('Retrying because of: %s' % rerr)
+ continue
+ # raise a connection exception
+ raise exceptions.TaskclusterConnectionError(
+ "Failed to establish connection",
+ superExc=rerr
+ )
+
+ # Handle non 2xx status code and retry if possible
+ status = response.status_code
+ if status == 204:
+ return None
+
+ # Catch retryable errors and go to the beginning of the loop
+ # to do the retry
+ if 500 <= status and status < 600 and retry < retries:
+ log.warn('Retrying because of a %s status code' % status)
+ continue
+
+ # Throw errors for non-retryable errors
+ if status < 200 or status >= 300:
+ data = {}
+ try:
+ data = response.json()
+ except:
+ pass # Ignore JSON errors in error messages
+ # Find error message
+ message = "Unknown Server Error"
+ if isinstance(data, dict):
+ message = data.get('message')
+ else:
+ if status == 401:
+ message = "Authentication Error"
+ elif status == 500:
+ message = "Internal Server Error"
+ # Raise TaskclusterAuthFailure if this is an auth issue
+ if status == 401:
+ raise exceptions.TaskclusterAuthFailure(
+ message,
+ status_code=status,
+ body=data,
+ superExc=None
+ )
+ # Raise TaskclusterRestFailure for all other issues
+ raise exceptions.TaskclusterRestFailure(
+ message,
+ status_code=status,
+ body=data,
+ superExc=None
+ )
+
+ # Try to load JSON
+ try:
+ return response.json()
+ except ValueError:
+ return {"response": response}
+
+ # This code-path should be unreachable
+ assert False, "Error from last retry should have been raised!"
+
+
+def createApiClient(name, api):
+ api = api['reference']
+
+ attributes = dict(
+ name=name,
+ __doc__=api.get('description'),
+ classOptions={},
+ funcinfo={},
+ )
+
+ # apply a default for apiVersion; this can be removed when all services
+ # have apiVersion
+ if 'apiVersion' not in api:
+ api['apiVersion'] = 'v1'
+
+ copiedOptions = ('exchangePrefix',)
+ for opt in copiedOptions:
+ if opt in api:
+ attributes['classOptions'][opt] = api[opt]
+
+ copiedProperties = ('serviceName', 'apiVersion')
+ for opt in copiedProperties:
+ if opt in api:
+ attributes[opt] = api[opt]
+
+ for entry in api['entries']:
+ if entry['type'] == 'function':
+ def addApiCall(e):
+ def apiCall(self, *args, **kwargs):
+ return self._makeApiCall(e, *args, **kwargs)
+ return apiCall
+ f = addApiCall(entry)
+
+ docStr = "Call the %s api's %s method. " % (name, entry['name'])
+
+ if entry['args'] and len(entry['args']) > 0:
+ docStr += "This method takes:\n\n"
+ docStr += '\n'.join(['- ``%s``' % x for x in entry['args']])
+ docStr += '\n\n'
+ else:
+ docStr += "This method takes no arguments. "
+
+ if 'input' in entry:
+ docStr += "This method takes input ``%s``. " % entry['input']
+
+ if 'output' in entry:
+ docStr += "This method gives output ``%s``" % entry['output']
+
+ docStr += '\n\nThis method does a ``%s`` to ``%s``.' % (
+ entry['method'].upper(), entry['route'])
+
+ f.__doc__ = docStr
+ attributes['funcinfo'][entry['name']] = entry
+
+ elif entry['type'] == 'topic-exchange':
+ def addTopicExchange(e):
+ def topicExchange(self, *args, **kwargs):
+ return self._makeTopicExchange(e, *args, **kwargs)
+ return topicExchange
+
+ f = addTopicExchange(entry)
+
+ docStr = 'Generate a routing key pattern for the %s exchange. ' % entry['exchange']
+ docStr += 'This method takes a given routing key as a string or a '
+ docStr += 'dictionary. For each given dictionary key, the corresponding '
+ docStr += 'routing key token takes its value. For routing key tokens '
+ docStr += 'which are not specified by the dictionary, the * or # character '
+ docStr += 'is used depending on whether or not the key allows multiple words.\n\n'
+ docStr += 'This exchange takes the following keys:\n\n'
+ docStr += '\n'.join(['- ``%s``' % x['name'] for x in entry['routingKey']])
+
+ f.__doc__ = docStr
+
+ # Add whichever function we created
+ f.__name__ = str(entry['name'])
+ attributes[entry['name']] = f
+
+ return type(utils.toStr(name), (BaseClient,), attributes)
+
+
+def createTemporaryCredentials(clientId, accessToken, start, expiry, scopes, name=None):
+ """ Create a set of temporary credentials
+
+ Callers should not apply any clock skew; clock drift is accounted for by
+ auth service.
+
+ clientId: the issuing clientId
+ accessToken: the issuer's accessToken
+ start: start time of credentials (datetime.datetime)
+ expiry: expiration time of credentials, (datetime.datetime)
+ scopes: list of scopes granted
+ name: credential name (optional)
+
+ Returns a dictionary in the form:
+ { 'clientId': str, 'accessToken: str, 'certificate': str}
+ """
+
+ for scope in scopes:
+ if not isinstance(scope, six.string_types):
+ raise exceptions.TaskclusterFailure('Scope must be string')
+
+ # Credentials can only be valid for 31 days. I hope that
+ # this is validated on the server somehow...
+
+ if expiry - start > datetime.timedelta(days=31):
+ raise exceptions.TaskclusterFailure('Only 31 days allowed')
+
+ # We multiply times by 1000 because the auth service is JS and as a result
+ # uses milliseconds instead of seconds
+ cert = dict(
+ version=1,
+ scopes=scopes,
+ start=calendar.timegm(start.utctimetuple()) * 1000,
+ expiry=calendar.timegm(expiry.utctimetuple()) * 1000,
+ seed=utils.slugId() + utils.slugId(),
+ )
+
+ # if this is a named temporary credential, include the issuer in the certificate
+ if name:
+ cert['issuer'] = utils.toStr(clientId)
+
+ sig = ['version:' + utils.toStr(cert['version'])]
+ if name:
+ sig.extend([
+ 'clientId:' + utils.toStr(name),
+ 'issuer:' + utils.toStr(clientId),
+ ])
+ sig.extend([
+ 'seed:' + utils.toStr(cert['seed']),
+ 'start:' + utils.toStr(cert['start']),
+ 'expiry:' + utils.toStr(cert['expiry']),
+ 'scopes:'
+ ] + scopes)
+ sigStr = '\n'.join(sig).encode()
+
+ if isinstance(accessToken, six.text_type):
+ accessToken = accessToken.encode()
+ sig = hmac.new(accessToken, sigStr, hashlib.sha256).digest()
+
+ cert['signature'] = utils.encodeStringForB64Header(sig)
+
+ newToken = hmac.new(accessToken, cert['seed'], hashlib.sha256).digest()
+ newToken = utils.makeB64UrlSafe(utils.encodeStringForB64Header(newToken)).replace(b'=', b'')
+
+ return {
+ 'clientId': name or clientId,
+ 'accessToken': newToken,
+ 'certificate': utils.dumpJson(cert),
+ }
+
+
+__all__ = [
+ 'createTemporaryCredentials',
+ 'config',
+ 'BaseClient',
+ 'createApiClient',
+]
diff --git a/third_party/python/taskcluster/taskcluster/ec2manager.py b/third_party/python/taskcluster/taskcluster/ec2manager.py
new file mode 100644
index 0000000000..a8707ebe81
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/ec2manager.py
@@ -0,0 +1,475 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class EC2Manager(BaseClient):
+ """
+ A taskcluster service which manages EC2 instances. This service does not understand any taskcluster concepts intrinsicaly other than using the name `workerType` to refer to a group of associated instances. Unless you are working on building a provisioner for AWS, you almost certainly do not want to use this service
+ """
+
+ classOptions = {
+ }
+ serviceName = 'ec2-manager'
+ apiVersion = 'v1'
+
+ def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ def listWorkerTypes(self, *args, **kwargs):
+ """
+ See the list of worker types which are known to be managed
+
+ This method is only for debugging the ec2-manager
+
+ This method gives output: ``v1/list-worker-types.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["listWorkerTypes"], *args, **kwargs)
+
+ def runInstance(self, *args, **kwargs):
+ """
+ Run an instance
+
+ Request an instance of a worker type
+
+ This method takes input: ``v1/run-instance-request.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["runInstance"], *args, **kwargs)
+
+ def terminateWorkerType(self, *args, **kwargs):
+ """
+ Terminate all resources from a worker type
+
+ Terminate all instances for this worker type
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["terminateWorkerType"], *args, **kwargs)
+
+ def workerTypeStats(self, *args, **kwargs):
+ """
+ Look up the resource stats for a workerType
+
+ Return an object which has a generic state description. This only contains counts of instances
+
+ This method gives output: ``v1/worker-type-resources.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["workerTypeStats"], *args, **kwargs)
+
+ def workerTypeHealth(self, *args, **kwargs):
+ """
+ Look up the resource health for a workerType
+
+ Return a view of the health of a given worker type
+
+ This method gives output: ``v1/health.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["workerTypeHealth"], *args, **kwargs)
+
+ def workerTypeErrors(self, *args, **kwargs):
+ """
+ Look up the most recent errors of a workerType
+
+ Return a list of the most recent errors encountered by a worker type
+
+ This method gives output: ``v1/errors.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["workerTypeErrors"], *args, **kwargs)
+
+ def workerTypeState(self, *args, **kwargs):
+ """
+ Look up the resource state for a workerType
+
+ Return state information for a given worker type
+
+ This method gives output: ``v1/worker-type-state.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["workerTypeState"], *args, **kwargs)
+
+ def ensureKeyPair(self, *args, **kwargs):
+ """
+ Ensure a KeyPair for a given worker type exists
+
+ Idempotently ensure that a keypair of a given name exists
+
+ This method takes input: ``v1/create-key-pair.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["ensureKeyPair"], *args, **kwargs)
+
+ def removeKeyPair(self, *args, **kwargs):
+ """
+ Ensure a KeyPair for a given worker type does not exist
+
+ Ensure that a keypair of a given name does not exist.
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["removeKeyPair"], *args, **kwargs)
+
+ def terminateInstance(self, *args, **kwargs):
+ """
+ Terminate an instance
+
+ Terminate an instance in a specified region
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["terminateInstance"], *args, **kwargs)
+
+ def getPrices(self, *args, **kwargs):
+ """
+ Request prices for EC2
+
+ Return a list of possible prices for EC2
+
+ This method gives output: ``v1/prices.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["getPrices"], *args, **kwargs)
+
+ def getSpecificPrices(self, *args, **kwargs):
+ """
+ Request prices for EC2
+
+ Return a list of possible prices for EC2
+
+ This method takes input: ``v1/prices-request.json#``
+
+ This method gives output: ``v1/prices.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["getSpecificPrices"], *args, **kwargs)
+
+ def getHealth(self, *args, **kwargs):
+ """
+ Get EC2 account health metrics
+
+ Give some basic stats on the health of our EC2 account
+
+ This method gives output: ``v1/health.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["getHealth"], *args, **kwargs)
+
+ def getRecentErrors(self, *args, **kwargs):
+ """
+ Look up the most recent errors in the provisioner across all worker types
+
+ Return a list of recent errors encountered
+
+ This method gives output: ``v1/errors.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["getRecentErrors"], *args, **kwargs)
+
+ def regions(self, *args, **kwargs):
+ """
+ See the list of regions managed by this ec2-manager
+
+ This method is only for debugging the ec2-manager
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["regions"], *args, **kwargs)
+
+ def amiUsage(self, *args, **kwargs):
+ """
+ See the list of AMIs and their usage
+
+ List AMIs and their usage by returning a list of objects in the form:
+ {
+ region: string
+ volumetype: string
+ lastused: timestamp
+ }
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["amiUsage"], *args, **kwargs)
+
+ def ebsUsage(self, *args, **kwargs):
+ """
+ See the current EBS volume usage list
+
+ Lists current EBS volume usage by returning a list of objects
+ that are uniquely defined by {region, volumetype, state} in the form:
+ {
+ region: string,
+ volumetype: string,
+ state: string,
+ totalcount: integer,
+ totalgb: integer,
+ touched: timestamp (last time that information was updated),
+ }
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["ebsUsage"], *args, **kwargs)
+
+ def dbpoolStats(self, *args, **kwargs):
+ """
+ Statistics on the Database client pool
+
+ This method is only for debugging the ec2-manager
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["dbpoolStats"], *args, **kwargs)
+
+ def allState(self, *args, **kwargs):
+ """
+ List out the entire internal state
+
+ This method is only for debugging the ec2-manager
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["allState"], *args, **kwargs)
+
+ def sqsStats(self, *args, **kwargs):
+ """
+ Statistics on the sqs queues
+
+ This method is only for debugging the ec2-manager
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["sqsStats"], *args, **kwargs)
+
+ def purgeQueues(self, *args, **kwargs):
+ """
+ Purge the SQS queues
+
+ This method is only for debugging the ec2-manager
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["purgeQueues"], *args, **kwargs)
+
+ funcinfo = {
+ "allState": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'allState',
+ 'route': '/internal/all-state',
+ 'stability': 'experimental',
+ },
+ "amiUsage": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'amiUsage',
+ 'route': '/internal/ami-usage',
+ 'stability': 'experimental',
+ },
+ "dbpoolStats": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'dbpoolStats',
+ 'route': '/internal/db-pool-stats',
+ 'stability': 'experimental',
+ },
+ "ebsUsage": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ebsUsage',
+ 'route': '/internal/ebs-usage',
+ 'stability': 'experimental',
+ },
+ "ensureKeyPair": {
+ 'args': ['name'],
+ 'input': 'v1/create-key-pair.json#',
+ 'method': 'get',
+ 'name': 'ensureKeyPair',
+ 'route': '/key-pairs/<name>',
+ 'stability': 'experimental',
+ },
+ "getHealth": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'getHealth',
+ 'output': 'v1/health.json#',
+ 'route': '/health',
+ 'stability': 'experimental',
+ },
+ "getPrices": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'getPrices',
+ 'output': 'v1/prices.json#',
+ 'route': '/prices',
+ 'stability': 'experimental',
+ },
+ "getRecentErrors": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'getRecentErrors',
+ 'output': 'v1/errors.json#',
+ 'route': '/errors',
+ 'stability': 'experimental',
+ },
+ "getSpecificPrices": {
+ 'args': [],
+ 'input': 'v1/prices-request.json#',
+ 'method': 'post',
+ 'name': 'getSpecificPrices',
+ 'output': 'v1/prices.json#',
+ 'route': '/prices',
+ 'stability': 'experimental',
+ },
+ "listWorkerTypes": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'listWorkerTypes',
+ 'output': 'v1/list-worker-types.json#',
+ 'route': '/worker-types',
+ 'stability': 'experimental',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "purgeQueues": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'purgeQueues',
+ 'route': '/internal/purge-queues',
+ 'stability': 'experimental',
+ },
+ "regions": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'regions',
+ 'route': '/internal/regions',
+ 'stability': 'experimental',
+ },
+ "removeKeyPair": {
+ 'args': ['name'],
+ 'method': 'delete',
+ 'name': 'removeKeyPair',
+ 'route': '/key-pairs/<name>',
+ 'stability': 'experimental',
+ },
+ "runInstance": {
+ 'args': ['workerType'],
+ 'input': 'v1/run-instance-request.json#',
+ 'method': 'put',
+ 'name': 'runInstance',
+ 'route': '/worker-types/<workerType>/instance',
+ 'stability': 'experimental',
+ },
+ "sqsStats": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'sqsStats',
+ 'route': '/internal/sqs-stats',
+ 'stability': 'experimental',
+ },
+ "terminateInstance": {
+ 'args': ['region', 'instanceId'],
+ 'method': 'delete',
+ 'name': 'terminateInstance',
+ 'route': '/region/<region>/instance/<instanceId>',
+ 'stability': 'experimental',
+ },
+ "terminateWorkerType": {
+ 'args': ['workerType'],
+ 'method': 'delete',
+ 'name': 'terminateWorkerType',
+ 'route': '/worker-types/<workerType>/resources',
+ 'stability': 'experimental',
+ },
+ "workerTypeErrors": {
+ 'args': ['workerType'],
+ 'method': 'get',
+ 'name': 'workerTypeErrors',
+ 'output': 'v1/errors.json#',
+ 'route': '/worker-types/<workerType>/errors',
+ 'stability': 'experimental',
+ },
+ "workerTypeHealth": {
+ 'args': ['workerType'],
+ 'method': 'get',
+ 'name': 'workerTypeHealth',
+ 'output': 'v1/health.json#',
+ 'route': '/worker-types/<workerType>/health',
+ 'stability': 'experimental',
+ },
+ "workerTypeState": {
+ 'args': ['workerType'],
+ 'method': 'get',
+ 'name': 'workerTypeState',
+ 'output': 'v1/worker-type-state.json#',
+ 'route': '/worker-types/<workerType>/state',
+ 'stability': 'experimental',
+ },
+ "workerTypeStats": {
+ 'args': ['workerType'],
+ 'method': 'get',
+ 'name': 'workerTypeStats',
+ 'output': 'v1/worker-type-resources.json#',
+ 'route': '/worker-types/<workerType>/stats',
+ 'stability': 'experimental',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'EC2Manager']
diff --git a/third_party/python/taskcluster/taskcluster/exceptions.py b/third_party/python/taskcluster/taskcluster/exceptions.py
new file mode 100644
index 0000000000..dae40de50d
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/exceptions.py
@@ -0,0 +1,36 @@
+""" Taskcluster client exceptions """
+
+
+class TaskclusterFailure(Exception):
+ """ Base exception for all Taskcluster client errors"""
+ pass
+
+
+class TaskclusterRestFailure(TaskclusterFailure):
+ """ Failures in the HTTP Rest API """
+ def __init__(self, msg, superExc, status_code=500, body={}):
+ TaskclusterFailure.__init__(self, msg)
+ self.superExc = superExc
+ self.status_code = status_code
+ self.body = body
+
+
+class TaskclusterConnectionError(TaskclusterFailure):
+ """ Error connecting to resource """
+ def __init__(self, msg, superExc):
+ TaskclusterFailure.__init__(self, msg, superExc)
+ self.superExc = superExc
+
+
+class TaskclusterAuthFailure(TaskclusterFailure):
+ """ Invalid Credentials """
+ def __init__(self, msg, superExc=None, status_code=500, body={}):
+ TaskclusterFailure.__init__(self, msg)
+ self.superExc = superExc
+ self.status_code = status_code
+ self.body = body
+
+
+class TaskclusterTopicExchangeFailure(TaskclusterFailure):
+ """ Error while creating a Topic Exchange routing key """
+ pass
diff --git a/third_party/python/taskcluster/taskcluster/github.py b/third_party/python/taskcluster/taskcluster/github.py
new file mode 100644
index 0000000000..56a432a8a4
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/github.py
@@ -0,0 +1,205 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class Github(BaseClient):
+ """
+ The github service is responsible for creating tasks in reposnse
+ to GitHub events, and posting results to the GitHub UI.
+
+ This document describes the API end-point for consuming GitHub
+ web hooks, as well as some useful consumer APIs.
+
+ When Github forbids an action, this service returns an HTTP 403
+ with code ForbiddenByGithub.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'github'
+ apiVersion = 'v1'
+
+ def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ def githubWebHookConsumer(self, *args, **kwargs):
+ """
+ Consume GitHub WebHook
+
+ Capture a GitHub event and publish it via pulse, if it's a push,
+ release or pull request.
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["githubWebHookConsumer"], *args, **kwargs)
+
+ def builds(self, *args, **kwargs):
+ """
+ List of Builds
+
+ A paginated list of builds that have been run in
+ Taskcluster. Can be filtered on various git-specific
+ fields.
+
+ This method gives output: ``v1/build-list.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["builds"], *args, **kwargs)
+
+ def badge(self, *args, **kwargs):
+ """
+ Latest Build Status Badge
+
+ Checks the status of the latest build of a given branch
+ and returns corresponding badge svg.
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["badge"], *args, **kwargs)
+
+ def repository(self, *args, **kwargs):
+ """
+ Get Repository Info
+
+ Returns any repository metadata that is
+ useful within Taskcluster related services.
+
+ This method gives output: ``v1/repository.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["repository"], *args, **kwargs)
+
+ def latest(self, *args, **kwargs):
+ """
+ Latest Status for Branch
+
+ For a given branch of a repository, this will always point
+ to a status page for the most recent task triggered by that
+ branch.
+
+ Note: This is a redirect rather than a direct link.
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["latest"], *args, **kwargs)
+
+ def createStatus(self, *args, **kwargs):
+ """
+ Post a status against a given changeset
+
+ For a given changeset (SHA) of a repository, this will attach a "commit status"
+ on github. These statuses are links displayed next to each revision.
+ The status is either OK (green check) or FAILURE (red cross),
+ made of a custom title and link.
+
+ This method takes input: ``v1/create-status.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["createStatus"], *args, **kwargs)
+
+ def createComment(self, *args, **kwargs):
+ """
+ Post a comment on a given GitHub Issue or Pull Request
+
+ For a given Issue or Pull Request of a repository, this will write a new message.
+
+ This method takes input: ``v1/create-comment.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["createComment"], *args, **kwargs)
+
+ funcinfo = {
+ "badge": {
+ 'args': ['owner', 'repo', 'branch'],
+ 'method': 'get',
+ 'name': 'badge',
+ 'route': '/repository/<owner>/<repo>/<branch>/badge.svg',
+ 'stability': 'experimental',
+ },
+ "builds": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'builds',
+ 'output': 'v1/build-list.json#',
+ 'query': ['continuationToken', 'limit', 'organization', 'repository', 'sha'],
+ 'route': '/builds',
+ 'stability': 'experimental',
+ },
+ "createComment": {
+ 'args': ['owner', 'repo', 'number'],
+ 'input': 'v1/create-comment.json#',
+ 'method': 'post',
+ 'name': 'createComment',
+ 'route': '/repository/<owner>/<repo>/issues/<number>/comments',
+ 'stability': 'experimental',
+ },
+ "createStatus": {
+ 'args': ['owner', 'repo', 'sha'],
+ 'input': 'v1/create-status.json#',
+ 'method': 'post',
+ 'name': 'createStatus',
+ 'route': '/repository/<owner>/<repo>/statuses/<sha>',
+ 'stability': 'experimental',
+ },
+ "githubWebHookConsumer": {
+ 'args': [],
+ 'method': 'post',
+ 'name': 'githubWebHookConsumer',
+ 'route': '/github',
+ 'stability': 'experimental',
+ },
+ "latest": {
+ 'args': ['owner', 'repo', 'branch'],
+ 'method': 'get',
+ 'name': 'latest',
+ 'route': '/repository/<owner>/<repo>/<branch>/latest',
+ 'stability': 'experimental',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "repository": {
+ 'args': ['owner', 'repo'],
+ 'method': 'get',
+ 'name': 'repository',
+ 'output': 'v1/repository.json#',
+ 'route': '/repository/<owner>/<repo>',
+ 'stability': 'experimental',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Github']
diff --git a/third_party/python/taskcluster/taskcluster/githubevents.py b/third_party/python/taskcluster/taskcluster/githubevents.py
new file mode 100644
index 0000000000..73d37b608f
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/githubevents.py
@@ -0,0 +1,194 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class GithubEvents(BaseClient):
+ """
+ The github service publishes a pulse
+ message for supported github events, translating Github webhook
+ events into pulse messages.
+
+ This document describes the exchange offered by the taskcluster
+ github service
+ """
+
+ classOptions = {
+ "exchangePrefix": "exchange/taskcluster-github/v1/",
+ }
+ serviceName = 'github'
+ apiVersion = 'v1'
+
+ def pullRequest(self, *args, **kwargs):
+ """
+ GitHub Pull Request Event
+
+ When a GitHub pull request event is posted it will be broadcast on this
+ exchange with the designated `organization` and `repository`
+ in the routing-key along with event specific metadata in the payload.
+
+ This exchange outputs: ``v1/github-pull-request-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key. (required)
+
+ * organization: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
+
+ * repository: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
+
+ * action: The GitHub `action` which triggered an event. See for possible values see the payload actions property. (required)
+ """
+
+ ref = {
+ 'exchange': 'pull-request',
+ 'name': 'pullRequest',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'organization',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'repository',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'action',
+ },
+ ],
+ 'schema': 'v1/github-pull-request-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def push(self, *args, **kwargs):
+ """
+ GitHub push Event
+
+ When a GitHub push event is posted it will be broadcast on this
+ exchange with the designated `organization` and `repository`
+ in the routing-key along with event specific metadata in the payload.
+
+ This exchange outputs: ``v1/github-push-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key. (required)
+
+ * organization: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
+
+ * repository: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
+ """
+
+ ref = {
+ 'exchange': 'push',
+ 'name': 'push',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'organization',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'repository',
+ },
+ ],
+ 'schema': 'v1/github-push-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def release(self, *args, **kwargs):
+ """
+ GitHub release Event
+
+ When a GitHub release event is posted it will be broadcast on this
+ exchange with the designated `organization` and `repository`
+ in the routing-key along with event specific metadata in the payload.
+
+ This exchange outputs: ``v1/github-release-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key. (required)
+
+ * organization: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
+
+ * repository: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
+ """
+
+ ref = {
+ 'exchange': 'release',
+ 'name': 'release',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'organization',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'repository',
+ },
+ ],
+ 'schema': 'v1/github-release-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def taskGroupDefined(self, *args, **kwargs):
+ """
+ GitHub release Event
+
+ used for creating status indicators in GitHub UI using Statuses API
+
+ This exchange outputs: ``v1/task-group-defined-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key. (required)
+
+ * organization: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
+
+ * repository: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
+ """
+
+ ref = {
+ 'exchange': 'task-group-defined',
+ 'name': 'taskGroupDefined',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'organization',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'repository',
+ },
+ ],
+ 'schema': 'v1/task-group-defined-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ funcinfo = {
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'GithubEvents']
diff --git a/third_party/python/taskcluster/taskcluster/hooks.py b/third_party/python/taskcluster/taskcluster/hooks.py
new file mode 100644
index 0000000000..b9608e3ec0
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/hooks.py
@@ -0,0 +1,324 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class Hooks(BaseClient):
+ """
+ Hooks are a mechanism for creating tasks in response to events.
+
+ Hooks are identified with a `hookGroupId` and a `hookId`.
+
+ When an event occurs, the resulting task is automatically created. The
+ task is created using the scope `assume:hook-id:<hookGroupId>/<hookId>`,
+ which must have scopes to make the createTask call, including satisfying all
+ scopes in `task.scopes`. The new task has a `taskGroupId` equal to its
+ `taskId`, as is the convention for decision tasks.
+
+ Hooks can have a "schedule" indicating specific times that new tasks should
+ be created. Each schedule is in a simple cron format, per
+ https://www.npmjs.com/package/cron-parser. For example:
+ * `['0 0 1 * * *']` -- daily at 1:00 UTC
+ * `['0 0 9,21 * * 1-5', '0 0 12 * * 0,6']` -- weekdays at 9:00 and 21:00 UTC, weekends at noon
+
+ The task definition is used as a JSON-e template, with a context depending on how it is fired. See
+ [/docs/reference/core/taskcluster-hooks/docs/firing-hooks](firing-hooks)
+ for more information.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'hooks'
+ apiVersion = 'v1'
+
+ def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ def listHookGroups(self, *args, **kwargs):
+ """
+ List hook groups
+
+ This endpoint will return a list of all hook groups with at least one hook.
+
+ This method gives output: ``v1/list-hook-groups-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["listHookGroups"], *args, **kwargs)
+
+ def listHooks(self, *args, **kwargs):
+ """
+ List hooks in a given group
+
+ This endpoint will return a list of all the hook definitions within a
+ given hook group.
+
+ This method gives output: ``v1/list-hooks-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["listHooks"], *args, **kwargs)
+
+ def hook(self, *args, **kwargs):
+ """
+ Get hook definition
+
+ This endpoint will return the hook definition for the given `hookGroupId`
+ and hookId.
+
+ This method gives output: ``v1/hook-definition.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["hook"], *args, **kwargs)
+
+ def getHookStatus(self, *args, **kwargs):
+ """
+ Get hook status
+
+ This endpoint will return the current status of the hook. This represents a
+ snapshot in time and may vary from one call to the next.
+
+ This method gives output: ``v1/hook-status.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["getHookStatus"], *args, **kwargs)
+
+ def createHook(self, *args, **kwargs):
+ """
+ Create a hook
+
+ This endpoint will create a new hook.
+
+ The caller's credentials must include the role that will be used to
+ create the task. That role must satisfy task.scopes as well as the
+ necessary scopes to add the task to the queue.
+
+
+ This method takes input: ``v1/create-hook-request.json#``
+
+ This method gives output: ``v1/hook-definition.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["createHook"], *args, **kwargs)
+
+ def updateHook(self, *args, **kwargs):
+ """
+ Update a hook
+
+ This endpoint will update an existing hook. All fields except
+ `hookGroupId` and `hookId` can be modified.
+
+ This method takes input: ``v1/create-hook-request.json#``
+
+ This method gives output: ``v1/hook-definition.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["updateHook"], *args, **kwargs)
+
+ def removeHook(self, *args, **kwargs):
+ """
+ Delete a hook
+
+ This endpoint will remove a hook definition.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["removeHook"], *args, **kwargs)
+
+ def triggerHook(self, *args, **kwargs):
+ """
+ Trigger a hook
+
+ This endpoint will trigger the creation of a task from a hook definition.
+
+ The HTTP payload must match the hooks `triggerSchema`. If it does, it is
+ provided as the `payload` property of the JSON-e context used to render the
+ task template.
+
+ This method takes input: ``v1/trigger-hook.json#``
+
+ This method gives output: ``v1/task-status.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["triggerHook"], *args, **kwargs)
+
+ def getTriggerToken(self, *args, **kwargs):
+ """
+ Get a trigger token
+
+ Retrieve a unique secret token for triggering the specified hook. This
+ token can be deactivated with `resetTriggerToken`.
+
+ This method gives output: ``v1/trigger-token-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["getTriggerToken"], *args, **kwargs)
+
+ def resetTriggerToken(self, *args, **kwargs):
+ """
+ Reset a trigger token
+
+ Reset the token for triggering a given hook. This invalidates token that
+ may have been issued via getTriggerToken with a new token.
+
+ This method gives output: ``v1/trigger-token-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["resetTriggerToken"], *args, **kwargs)
+
+ def triggerHookWithToken(self, *args, **kwargs):
+ """
+ Trigger a hook with a token
+
+ This endpoint triggers a defined hook with a valid token.
+
+ The HTTP payload must match the hooks `triggerSchema`. If it does, it is
+ provided as the `payload` property of the JSON-e context used to render the
+ task template.
+
+ This method takes input: ``v1/trigger-hook.json#``
+
+ This method gives output: ``v1/task-status.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["triggerHookWithToken"], *args, **kwargs)
+
+ funcinfo = {
+ "createHook": {
+ 'args': ['hookGroupId', 'hookId'],
+ 'input': 'v1/create-hook-request.json#',
+ 'method': 'put',
+ 'name': 'createHook',
+ 'output': 'v1/hook-definition.json#',
+ 'route': '/hooks/<hookGroupId>/<hookId>',
+ 'stability': 'stable',
+ },
+ "getHookStatus": {
+ 'args': ['hookGroupId', 'hookId'],
+ 'method': 'get',
+ 'name': 'getHookStatus',
+ 'output': 'v1/hook-status.json#',
+ 'route': '/hooks/<hookGroupId>/<hookId>/status',
+ 'stability': 'stable',
+ },
+ "getTriggerToken": {
+ 'args': ['hookGroupId', 'hookId'],
+ 'method': 'get',
+ 'name': 'getTriggerToken',
+ 'output': 'v1/trigger-token-response.json#',
+ 'route': '/hooks/<hookGroupId>/<hookId>/token',
+ 'stability': 'stable',
+ },
+ "hook": {
+ 'args': ['hookGroupId', 'hookId'],
+ 'method': 'get',
+ 'name': 'hook',
+ 'output': 'v1/hook-definition.json#',
+ 'route': '/hooks/<hookGroupId>/<hookId>',
+ 'stability': 'stable',
+ },
+ "listHookGroups": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'listHookGroups',
+ 'output': 'v1/list-hook-groups-response.json#',
+ 'route': '/hooks',
+ 'stability': 'stable',
+ },
+ "listHooks": {
+ 'args': ['hookGroupId'],
+ 'method': 'get',
+ 'name': 'listHooks',
+ 'output': 'v1/list-hooks-response.json#',
+ 'route': '/hooks/<hookGroupId>',
+ 'stability': 'stable',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "removeHook": {
+ 'args': ['hookGroupId', 'hookId'],
+ 'method': 'delete',
+ 'name': 'removeHook',
+ 'route': '/hooks/<hookGroupId>/<hookId>',
+ 'stability': 'stable',
+ },
+ "resetTriggerToken": {
+ 'args': ['hookGroupId', 'hookId'],
+ 'method': 'post',
+ 'name': 'resetTriggerToken',
+ 'output': 'v1/trigger-token-response.json#',
+ 'route': '/hooks/<hookGroupId>/<hookId>/token',
+ 'stability': 'stable',
+ },
+ "triggerHook": {
+ 'args': ['hookGroupId', 'hookId'],
+ 'input': 'v1/trigger-hook.json#',
+ 'method': 'post',
+ 'name': 'triggerHook',
+ 'output': 'v1/task-status.json#',
+ 'route': '/hooks/<hookGroupId>/<hookId>/trigger',
+ 'stability': 'stable',
+ },
+ "triggerHookWithToken": {
+ 'args': ['hookGroupId', 'hookId', 'token'],
+ 'input': 'v1/trigger-hook.json#',
+ 'method': 'post',
+ 'name': 'triggerHookWithToken',
+ 'output': 'v1/task-status.json#',
+ 'route': '/hooks/<hookGroupId>/<hookId>/trigger/<token>',
+ 'stability': 'stable',
+ },
+ "updateHook": {
+ 'args': ['hookGroupId', 'hookId'],
+ 'input': 'v1/create-hook-request.json#',
+ 'method': 'post',
+ 'name': 'updateHook',
+ 'output': 'v1/hook-definition.json#',
+ 'route': '/hooks/<hookGroupId>/<hookId>',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Hooks']
diff --git a/third_party/python/taskcluster/taskcluster/index.py b/third_party/python/taskcluster/taskcluster/index.py
new file mode 100644
index 0000000000..fddda2ef1b
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/index.py
@@ -0,0 +1,278 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class Index(BaseClient):
+ """
+ The task index, typically available at `index.taskcluster.net`, is
+ responsible for indexing tasks. The service ensures that tasks can be
+ located by recency and/or arbitrary strings. Common use-cases include:
+
+ * Locate tasks by git or mercurial `<revision>`, or
+ * Locate latest task from given `<branch>`, such as a release.
+
+ **Index hierarchy**, tasks are indexed in a dot (`.`) separated hierarchy
+ called a namespace. For example a task could be indexed with the index path
+ `some-app.<revision>.linux-64.release-build`. In this case the following
+ namespaces is created.
+
+ 1. `some-app`,
+ 1. `some-app.<revision>`, and,
+ 2. `some-app.<revision>.linux-64`
+
+ Inside the namespace `some-app.<revision>` you can find the namespace
+ `some-app.<revision>.linux-64` inside which you can find the indexed task
+ `some-app.<revision>.linux-64.release-build`. This is an example of indexing
+ builds for a given platform and revision.
+
+ **Task Rank**, when a task is indexed, it is assigned a `rank` (defaults
+ to `0`). If another task is already indexed in the same namespace with
+ lower or equal `rank`, the index for that task will be overwritten. For example
+ consider index path `mozilla-central.linux-64.release-build`. In
+ this case one might choose to use a UNIX timestamp or mercurial revision
+ number as `rank`. This way the latest completed linux 64 bit release
+ build is always available at `mozilla-central.linux-64.release-build`.
+
+ Note that this does mean index paths are not immutable: the same path may
+ point to a different task now than it did a moment ago.
+
+ **Indexed Data**, when a task is retrieved from the index the result includes
+ a `taskId` and an additional user-defined JSON blob that was indexed with
+ the task.
+
+ **Entry Expiration**, all indexed entries must have an expiration date.
+ Typically this defaults to one year, if not specified. If you are
+ indexing tasks to make it easy to find artifacts, consider using the
+ artifact's expiration date.
+
+ **Valid Characters**, all keys in a namespace `<key1>.<key2>` must be
+ in the form `/[a-zA-Z0-9_!~*'()%-]+/`. Observe that this is URL-safe and
+ that if you strictly want to put another character you can URL encode it.
+
+ **Indexing Routes**, tasks can be indexed using the API below, but the
+ most common way to index tasks is adding a custom route to `task.routes` of the
+ form `index.<namespace>`. In order to add this route to a task you'll
+ need the scope `queue:route:index.<namespace>`. When a task has
+ this route, it will be indexed when the task is **completed successfully**.
+ The task will be indexed with `rank`, `data` and `expires` as specified
+ in `task.extra.index`. See the example below:
+
+ ```
+ {
+ payload: { /* ... */ },
+ routes: [
+ // index.<namespace> prefixed routes, tasks CC'ed such a route will
+ // be indexed under the given <namespace>
+ "index.mozilla-central.linux-64.release-build",
+ "index.<revision>.linux-64.release-build"
+ ],
+ extra: {
+ // Optional details for indexing service
+ index: {
+ // Ordering, this taskId will overwrite any thing that has
+ // rank <= 4000 (defaults to zero)
+ rank: 4000,
+
+ // Specify when the entries expire (Defaults to 1 year)
+ expires: new Date().toJSON(),
+
+ // A little informal data to store along with taskId
+ // (less 16 kb when encoded as JSON)
+ data: {
+ hgRevision: "...",
+ commitMessae: "...",
+ whatever...
+ }
+ },
+ // Extra properties for other services...
+ }
+ // Other task properties...
+ }
+ ```
+
+ **Remark**, when indexing tasks using custom routes, it's also possible
+ to listen for messages about these tasks. For
+ example one could bind to `route.index.some-app.*.release-build`,
+ and pick up all messages about release builds. Hence, it is a
+ good idea to document task index hierarchies, as these make up extension
+ points in their own.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'index'
+ apiVersion = 'v1'
+
+ def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ def findTask(self, *args, **kwargs):
+ """
+ Find Indexed Task
+
+ Find a task by index path, returning the highest-rank task with that path. If no
+ task exists for the given path, this API end-point will respond with a 404 status.
+
+ This method gives output: ``v1/indexed-task-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["findTask"], *args, **kwargs)
+
+ def listNamespaces(self, *args, **kwargs):
+ """
+ List Namespaces
+
+ List the namespaces immediately under a given namespace.
+
+ This endpoint
+ lists up to 1000 namespaces. If more namespaces are present, a
+ `continuationToken` will be returned, which can be given in the next
+ request. For the initial request, the payload should be an empty JSON
+ object.
+
+ This method gives output: ``v1/list-namespaces-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["listNamespaces"], *args, **kwargs)
+
+ def listTasks(self, *args, **kwargs):
+ """
+ List Tasks
+
+ List the tasks immediately under a given namespace.
+
+ This endpoint
+ lists up to 1000 tasks. If more tasks are present, a
+ `continuationToken` will be returned, which can be given in the next
+ request. For the initial request, the payload should be an empty JSON
+ object.
+
+ **Remark**, this end-point is designed for humans browsing for tasks, not
+ services, as that makes little sense.
+
+ This method gives output: ``v1/list-tasks-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["listTasks"], *args, **kwargs)
+
+ def insertTask(self, *args, **kwargs):
+ """
+ Insert Task into Index
+
+ Insert a task into the index. If the new rank is less than the existing rank
+ at the given index path, the task is not indexed but the response is still 200 OK.
+
+ Please see the introduction above for information
+ about indexing successfully completed tasks automatically using custom routes.
+
+ This method takes input: ``v1/insert-task-request.json#``
+
+ This method gives output: ``v1/indexed-task-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["insertTask"], *args, **kwargs)
+
+ def findArtifactFromTask(self, *args, **kwargs):
+ """
+ Get Artifact From Indexed Task
+
+ Find a task by index path and redirect to the artifact on the most recent
+ run with the given `name`.
+
+ Note that multiple calls to this endpoint may return artifacts from differen tasks
+ if a new task is inserted into the index between calls. Avoid using this method as
+ a stable link to multiple, connected files if the index path does not contain a
+ unique identifier. For example, the following two links may return unrelated files:
+ * https://index.taskcluster.net/task/some-app.win64.latest.installer/artifacts/public/installer.exe`
+ * https://index.taskcluster.net/task/some-app.win64.latest.installer/artifacts/public/debug-symbols.zip`
+
+ This problem be remedied by including the revision in the index path or by bundling both
+ installer and debug symbols into a single artifact.
+
+ If no task exists for the given index path, this API end-point responds with 404.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["findArtifactFromTask"], *args, **kwargs)
+
+ funcinfo = {
+ "findArtifactFromTask": {
+ 'args': ['indexPath', 'name'],
+ 'method': 'get',
+ 'name': 'findArtifactFromTask',
+ 'route': '/task/<indexPath>/artifacts/<name>',
+ 'stability': 'stable',
+ },
+ "findTask": {
+ 'args': ['indexPath'],
+ 'method': 'get',
+ 'name': 'findTask',
+ 'output': 'v1/indexed-task-response.json#',
+ 'route': '/task/<indexPath>',
+ 'stability': 'stable',
+ },
+ "insertTask": {
+ 'args': ['namespace'],
+ 'input': 'v1/insert-task-request.json#',
+ 'method': 'put',
+ 'name': 'insertTask',
+ 'output': 'v1/indexed-task-response.json#',
+ 'route': '/task/<namespace>',
+ 'stability': 'stable',
+ },
+ "listNamespaces": {
+ 'args': ['namespace'],
+ 'method': 'get',
+ 'name': 'listNamespaces',
+ 'output': 'v1/list-namespaces-response.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/namespaces/<namespace>',
+ 'stability': 'stable',
+ },
+ "listTasks": {
+ 'args': ['namespace'],
+ 'method': 'get',
+ 'name': 'listTasks',
+ 'output': 'v1/list-tasks-response.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/tasks/<namespace>',
+ 'stability': 'stable',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Index']
diff --git a/third_party/python/taskcluster/taskcluster/login.py b/third_party/python/taskcluster/taskcluster/login.py
new file mode 100644
index 0000000000..235b60566d
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/login.py
@@ -0,0 +1,89 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class Login(BaseClient):
+ """
+ The Login service serves as the interface between external authentication
+ systems and Taskcluster credentials.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'login'
+ apiVersion = 'v1'
+
+ def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ def oidcCredentials(self, *args, **kwargs):
+ """
+ Get Taskcluster credentials given a suitable `access_token`
+
+ Given an OIDC `access_token` from a trusted OpenID provider, return a
+ set of Taskcluster credentials for use on behalf of the identified
+ user.
+
+ This method is typically not called with a Taskcluster client library
+ and does not accept Hawk credentials. The `access_token` should be
+ given in an `Authorization` header:
+ ```
+ Authorization: Bearer abc.xyz
+ ```
+
+ The `access_token` is first verified against the named
+ :provider, then passed to the provider's APIBuilder to retrieve a user
+ profile. That profile is then used to generate Taskcluster credentials
+ appropriate to the user. Note that the resulting credentials may or may
+ not include a `certificate` property. Callers should be prepared for either
+ alternative.
+
+ The given credentials will expire in a relatively short time. Callers should
+ monitor this expiration and refresh the credentials if necessary, by calling
+ this endpoint again, if they have expired.
+
+ This method gives output: ``v1/oidc-credentials-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["oidcCredentials"], *args, **kwargs)
+
+ funcinfo = {
+ "oidcCredentials": {
+ 'args': ['provider'],
+ 'method': 'get',
+ 'name': 'oidcCredentials',
+ 'output': 'v1/oidc-credentials-response.json#',
+ 'route': '/oidc-credentials/<provider>',
+ 'stability': 'experimental',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Login']
diff --git a/third_party/python/taskcluster/taskcluster/notify.py b/third_party/python/taskcluster/taskcluster/notify.py
new file mode 100644
index 0000000000..adc5b1d315
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/notify.py
@@ -0,0 +1,125 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class Notify(BaseClient):
+ """
+ The notification service, typically available at `notify.taskcluster.net`
+ listens for tasks with associated notifications and handles requests to
+ send emails and post pulse messages.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'notify'
+ apiVersion = 'v1'
+
+ def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ def email(self, *args, **kwargs):
+ """
+ Send an Email
+
+ Send an email to `address`. The content is markdown and will be rendered
+ to HTML, but both the HTML and raw markdown text will be sent in the
+ email. If a link is included, it will be rendered to a nice button in the
+ HTML version of the email
+
+ This method takes input: ``v1/email-request.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["email"], *args, **kwargs)
+
+ def pulse(self, *args, **kwargs):
+ """
+ Publish a Pulse Message
+
+ Publish a message on pulse with the given `routingKey`.
+
+ This method takes input: ``v1/pulse-request.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["pulse"], *args, **kwargs)
+
+ def irc(self, *args, **kwargs):
+ """
+ Post IRC Message
+
+ Post a message on IRC to a specific channel or user, or a specific user
+ on a specific channel.
+
+ Success of this API method does not imply the message was successfully
+ posted. This API method merely inserts the IRC message into a queue
+ that will be processed by a background process.
+ This allows us to re-send the message in face of connection issues.
+
+ However, if the user isn't online the message will be dropped without
+ error. We maybe improve this behavior in the future. For now just keep
+ in mind that IRC is a best-effort service.
+
+ This method takes input: ``v1/irc-request.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["irc"], *args, **kwargs)
+
+ funcinfo = {
+ "email": {
+ 'args': [],
+ 'input': 'v1/email-request.json#',
+ 'method': 'post',
+ 'name': 'email',
+ 'route': '/email',
+ 'stability': 'experimental',
+ },
+ "irc": {
+ 'args': [],
+ 'input': 'v1/irc-request.json#',
+ 'method': 'post',
+ 'name': 'irc',
+ 'route': '/irc',
+ 'stability': 'experimental',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "pulse": {
+ 'args': [],
+ 'input': 'v1/pulse-request.json#',
+ 'method': 'post',
+ 'name': 'pulse',
+ 'route': '/pulse',
+ 'stability': 'experimental',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Notify']
diff --git a/third_party/python/taskcluster/taskcluster/pulse.py b/third_party/python/taskcluster/taskcluster/pulse.py
new file mode 100644
index 0000000000..d6959f46fd
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/pulse.py
@@ -0,0 +1,135 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class Pulse(BaseClient):
+ """
+ The taskcluster-pulse service, typically available at `pulse.taskcluster.net`
+ manages pulse credentials for taskcluster users.
+
+ A service to manage Pulse credentials for anything using
+ Taskcluster credentials. This allows for self-service pulse
+ access and greater control within the Taskcluster project.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'pulse'
+ apiVersion = 'v1'
+
+ def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ def listNamespaces(self, *args, **kwargs):
+ """
+ List Namespaces
+
+ List the namespaces managed by this service.
+
+ This will list up to 1000 namespaces. If more namespaces are present a
+ `continuationToken` will be returned, which can be given in the next
+ request. For the initial request, do not provide continuation token.
+
+ This method gives output: ``v1/list-namespaces-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["listNamespaces"], *args, **kwargs)
+
+ def namespace(self, *args, **kwargs):
+ """
+ Get a namespace
+
+ Get public information about a single namespace. This is the same information
+ as returned by `listNamespaces`.
+
+ This method gives output: ``v1/namespace.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["namespace"], *args, **kwargs)
+
+ def claimNamespace(self, *args, **kwargs):
+ """
+ Claim a namespace
+
+ Claim a namespace, returning a connection string with access to that namespace
+ good for use until the `reclaimAt` time in the response body. The connection
+ string can be used as many times as desired during this period, but must not
+ be used after `reclaimAt`.
+
+ Connections made with this connection string may persist beyond `reclaimAt`,
+ although it should not persist forever. 24 hours is a good maximum, and this
+ service will terminate connections after 72 hours (although this value is
+ configurable).
+
+ The specified `expires` time updates any existing expiration times. Connections
+ for expired namespaces will be terminated.
+
+ This method takes input: ``v1/namespace-request.json#``
+
+ This method gives output: ``v1/namespace-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["claimNamespace"], *args, **kwargs)
+
+ funcinfo = {
+ "claimNamespace": {
+ 'args': ['namespace'],
+ 'input': 'v1/namespace-request.json#',
+ 'method': 'post',
+ 'name': 'claimNamespace',
+ 'output': 'v1/namespace-response.json#',
+ 'route': '/namespace/<namespace>',
+ 'stability': 'experimental',
+ },
+ "listNamespaces": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'listNamespaces',
+ 'output': 'v1/list-namespaces-response.json#',
+ 'query': ['limit', 'continuationToken'],
+ 'route': '/namespaces',
+ 'stability': 'experimental',
+ },
+ "namespace": {
+ 'args': ['namespace'],
+ 'method': 'get',
+ 'name': 'namespace',
+ 'output': 'v1/namespace.json#',
+ 'route': '/namespace/<namespace>',
+ 'stability': 'experimental',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Pulse']
diff --git a/third_party/python/taskcluster/taskcluster/purgecache.py b/third_party/python/taskcluster/taskcluster/purgecache.py
new file mode 100644
index 0000000000..a49e6bc831
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/purgecache.py
@@ -0,0 +1,124 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class PurgeCache(BaseClient):
+ """
+ The purge-cache service is responsible for publishing a pulse
+ message for workers, so they can purge cache upon request.
+
+ This document describes the API end-point for publishing the pulse
+ message. This is mainly intended to be used by tools.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'purge-cache'
+ apiVersion = 'v1'
+
+ def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ def purgeCache(self, *args, **kwargs):
+ """
+ Purge Worker Cache
+
+ Publish a purge-cache message to purge caches named `cacheName` with
+ `provisionerId` and `workerType` in the routing-key. Workers should
+ be listening for this message and purge caches when they see it.
+
+ This method takes input: ``v1/purge-cache-request.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["purgeCache"], *args, **kwargs)
+
+ def allPurgeRequests(self, *args, **kwargs):
+ """
+ All Open Purge Requests
+
+ This is useful mostly for administors to view
+ the set of open purge requests. It should not
+ be used by workers. They should use the purgeRequests
+ endpoint that is specific to their workerType and
+ provisionerId.
+
+ This method gives output: ``v1/all-purge-cache-request-list.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["allPurgeRequests"], *args, **kwargs)
+
+ def purgeRequests(self, *args, **kwargs):
+ """
+ Open Purge Requests for a provisionerId/workerType pair
+
+ List of caches that need to be purged if they are from before
+ a certain time. This is safe to be used in automation from
+ workers.
+
+ This method gives output: ``v1/purge-cache-request-list.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["purgeRequests"], *args, **kwargs)
+
+ funcinfo = {
+ "allPurgeRequests": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'allPurgeRequests',
+ 'output': 'v1/all-purge-cache-request-list.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/purge-cache/list',
+ 'stability': 'stable',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "purgeCache": {
+ 'args': ['provisionerId', 'workerType'],
+ 'input': 'v1/purge-cache-request.json#',
+ 'method': 'post',
+ 'name': 'purgeCache',
+ 'route': '/purge-cache/<provisionerId>/<workerType>',
+ 'stability': 'stable',
+ },
+ "purgeRequests": {
+ 'args': ['provisionerId', 'workerType'],
+ 'method': 'get',
+ 'name': 'purgeRequests',
+ 'output': 'v1/purge-cache-request-list.json#',
+ 'query': ['since'],
+ 'route': '/purge-cache/<provisionerId>/<workerType>',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'PurgeCache']
diff --git a/third_party/python/taskcluster/taskcluster/purgecacheevents.py b/third_party/python/taskcluster/taskcluster/purgecacheevents.py
new file mode 100644
index 0000000000..97637f1904
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/purgecacheevents.py
@@ -0,0 +1,73 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class PurgeCacheEvents(BaseClient):
+ """
+ The purge-cache service, typically available at
+ `purge-cache.taskcluster.net`, is responsible for publishing a pulse
+ message for workers, so they can purge cache upon request.
+
+ This document describes the exchange offered for workers by the
+ cache-purge service.
+ """
+
+ classOptions = {
+ "exchangePrefix": "exchange/taskcluster-purge-cache/v1/",
+ }
+ serviceName = 'purge-cache'
+ apiVersion = 'v1'
+
+ def purgeCache(self, *args, **kwargs):
+ """
+ Purge Cache Messages
+
+ When a cache purge is requested a message will be posted on this
+ exchange with designated `provisionerId` and `workerType` in the
+ routing-key and the name of the `cacheFolder` as payload
+
+ This exchange outputs: ``v1/purge-cache-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * provisionerId: `provisionerId` under which to purge cache. (required)
+
+ * workerType: `workerType` for which to purge cache. (required)
+ """
+
+ ref = {
+ 'exchange': 'purge-cache',
+ 'name': 'purgeCache',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'provisionerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ ],
+ 'schema': 'v1/purge-cache-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ funcinfo = {
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'PurgeCacheEvents']
diff --git a/third_party/python/taskcluster/taskcluster/queue.py b/third_party/python/taskcluster/taskcluster/queue.py
new file mode 100644
index 0000000000..408e526ad2
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/queue.py
@@ -0,0 +1,1134 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class Queue(BaseClient):
+ """
+ The queue, typically available at `queue.taskcluster.net`, is responsible
+ for accepting tasks and track their state as they are executed by
+ workers. In order ensure they are eventually resolved.
+
+ This document describes the API end-points offered by the queue. These
+ end-points targets the following audience:
+ * Schedulers, who create tasks to be executed,
+ * Workers, who execute tasks, and
+ * Tools, that wants to inspect the state of a task.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'queue'
+ apiVersion = 'v1'
+
+ def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ def task(self, *args, **kwargs):
+ """
+ Get Task Definition
+
+ This end-point will return the task-definition. Notice that the task
+ definition may have been modified by queue, if an optional property is
+ not specified the queue may provide a default value.
+
+ This method gives output: ``v1/task.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["task"], *args, **kwargs)
+
+ def status(self, *args, **kwargs):
+ """
+ Get task status
+
+ Get task status structure from `taskId`
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["status"], *args, **kwargs)
+
+ def listTaskGroup(self, *args, **kwargs):
+ """
+ List Task Group
+
+ List tasks sharing the same `taskGroupId`.
+
+ As a task-group may contain an unbounded number of tasks, this end-point
+ may return a `continuationToken`. To continue listing tasks you must call
+ the `listTaskGroup` again with the `continuationToken` as the
+ query-string option `continuationToken`.
+
+ By default this end-point will try to return up to 1000 members in one
+ request. But it **may return less**, even if more tasks are available.
+ It may also return a `continuationToken` even though there are no more
+ results. However, you can only be sure to have seen all results if you
+ keep calling `listTaskGroup` with the last `continuationToken` until you
+ get a result without a `continuationToken`.
+
+ If you are not interested in listing all the members at once, you may
+ use the query-string option `limit` to return fewer.
+
+ This method gives output: ``v1/list-task-group-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["listTaskGroup"], *args, **kwargs)
+
+ def listDependentTasks(self, *args, **kwargs):
+ """
+ List Dependent Tasks
+
+ List tasks that depend on the given `taskId`.
+
+ As many tasks from different task-groups may dependent on a single tasks,
+ this end-point may return a `continuationToken`. To continue listing
+ tasks you must call `listDependentTasks` again with the
+ `continuationToken` as the query-string option `continuationToken`.
+
+ By default this end-point will try to return up to 1000 tasks in one
+ request. But it **may return less**, even if more tasks are available.
+ It may also return a `continuationToken` even though there are no more
+ results. However, you can only be sure to have seen all results if you
+ keep calling `listDependentTasks` with the last `continuationToken` until
+ you get a result without a `continuationToken`.
+
+ If you are not interested in listing all the tasks at once, you may
+ use the query-string option `limit` to return fewer.
+
+ This method gives output: ``v1/list-dependent-tasks-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["listDependentTasks"], *args, **kwargs)
+
+ def createTask(self, *args, **kwargs):
+ """
+ Create New Task
+
+ Create a new task, this is an **idempotent** operation, so repeat it if
+ you get an internal server error or network connection is dropped.
+
+ **Task `deadline`**: the deadline property can be no more than 5 days
+ into the future. This is to limit the amount of pending tasks not being
+ taken care of. Ideally, you should use a much shorter deadline.
+
+ **Task expiration**: the `expires` property must be greater than the
+ task `deadline`. If not provided it will default to `deadline` + one
+ year. Notice, that artifacts created by task must expire before the task.
+
+ **Task specific routing-keys**: using the `task.routes` property you may
+ define task specific routing-keys. If a task has a task specific
+ routing-key: `<route>`, then when the AMQP message about the task is
+ published, the message will be CC'ed with the routing-key:
+ `route.<route>`. This is useful if you want another component to listen
+ for completed tasks you have posted. The caller must have scope
+ `queue:route:<route>` for each route.
+
+ **Dependencies**: any tasks referenced in `task.dependencies` must have
+ already been created at the time of this call.
+
+ **Scopes**: Note that the scopes required to complete this API call depend
+ on the content of the `scopes`, `routes`, `schedulerId`, `priority`,
+ `provisionerId`, and `workerType` properties of the task definition.
+
+ **Legacy Scopes**: The `queue:create-task:..` scope without a priority and
+ the `queue:define-task:..` and `queue:task-group-id:..` scopes are considered
+ legacy and should not be used. Note that the new, non-legacy scopes require
+ a `queue:scheduler-id:..` scope as well as scopes for the proper priority.
+
+ This method takes input: ``v1/create-task-request.json#``
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["createTask"], *args, **kwargs)
+
+ def defineTask(self, *args, **kwargs):
+ """
+ Define Task
+
+ **Deprecated**, this is the same as `createTask` with a **self-dependency**.
+ This is only present for legacy.
+
+ This method takes input: ``v1/create-task-request.json#``
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``deprecated``
+ """
+
+ return self._makeApiCall(self.funcinfo["defineTask"], *args, **kwargs)
+
+ def scheduleTask(self, *args, **kwargs):
+ """
+ Schedule Defined Task
+
+ scheduleTask will schedule a task to be executed, even if it has
+ unresolved dependencies. A task would otherwise only be scheduled if
+ its dependencies were resolved.
+
+ This is useful if you have defined a task that depends on itself or on
+ some other task that has not been resolved, but you wish the task to be
+ scheduled immediately.
+
+ This will announce the task as pending and workers will be allowed to
+ claim it and resolve the task.
+
+ **Note** this operation is **idempotent** and will not fail or complain
+ if called with a `taskId` that is already scheduled, or even resolved.
+ To reschedule a task previously resolved, use `rerunTask`.
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["scheduleTask"], *args, **kwargs)
+
+ def rerunTask(self, *args, **kwargs):
+ """
+ Rerun a Resolved Task
+
+ This method _reruns_ a previously resolved task, even if it was
+ _completed_. This is useful if your task completes unsuccessfully, and
+ you just want to run it from scratch again. This will also reset the
+ number of `retries` allowed.
+
+ Remember that `retries` in the task status counts the number of runs that
+ the queue have started because the worker stopped responding, for example
+ because a spot node died.
+
+ **Remark** this operation is idempotent, if you try to rerun a task that
+ is not either `failed` or `completed`, this operation will just return
+ the current task status.
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``deprecated``
+ """
+
+ return self._makeApiCall(self.funcinfo["rerunTask"], *args, **kwargs)
+
+ def cancelTask(self, *args, **kwargs):
+ """
+ Cancel Task
+
+ This method will cancel a task that is either `unscheduled`, `pending` or
+ `running`. It will resolve the current run as `exception` with
+ `reasonResolved` set to `canceled`. If the task isn't scheduled yet, ie.
+ it doesn't have any runs, an initial run will be added and resolved as
+ described above. Hence, after canceling a task, it cannot be scheduled
+ with `queue.scheduleTask`, but a new run can be created with
+ `queue.rerun`. These semantics is equivalent to calling
+ `queue.scheduleTask` immediately followed by `queue.cancelTask`.
+
+ **Remark** this operation is idempotent, if you try to cancel a task that
+ isn't `unscheduled`, `pending` or `running`, this operation will just
+ return the current task status.
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["cancelTask"], *args, **kwargs)
+
+ def claimWork(self, *args, **kwargs):
+ """
+ Claim Work
+
+ Claim pending task(s) for the given `provisionerId`/`workerType` queue.
+
+ If any work is available (even if fewer than the requested number of
+ tasks, this will return immediately. Otherwise, it will block for tens of
+ seconds waiting for work. If no work appears, it will return an emtpy
+ list of tasks. Callers should sleep a short while (to avoid denial of
+ service in an error condition) and call the endpoint again. This is a
+ simple implementation of "long polling".
+
+ This method takes input: ``v1/claim-work-request.json#``
+
+ This method gives output: ``v1/claim-work-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["claimWork"], *args, **kwargs)
+
+ def claimTask(self, *args, **kwargs):
+ """
+ Claim Task
+
+ claim a task - never documented
+
+ This method takes input: ``v1/task-claim-request.json#``
+
+ This method gives output: ``v1/task-claim-response.json#``
+
+ This method is ``deprecated``
+ """
+
+ return self._makeApiCall(self.funcinfo["claimTask"], *args, **kwargs)
+
+ def reclaimTask(self, *args, **kwargs):
+ """
+ Reclaim task
+
+ Refresh the claim for a specific `runId` for given `taskId`. This updates
+ the `takenUntil` property and returns a new set of temporary credentials
+ for performing requests on behalf of the task. These credentials should
+ be used in-place of the credentials returned by `claimWork`.
+
+ The `reclaimTask` requests serves to:
+ * Postpone `takenUntil` preventing the queue from resolving
+ `claim-expired`,
+ * Refresh temporary credentials used for processing the task, and
+ * Abort execution if the task/run have been resolved.
+
+ If the `takenUntil` timestamp is exceeded the queue will resolve the run
+ as _exception_ with reason `claim-expired`, and proceeded to retry to the
+ task. This ensures that tasks are retried, even if workers disappear
+ without warning.
+
+ If the task is resolved, this end-point will return `409` reporting
+ `RequestConflict`. This typically happens if the task have been canceled
+ or the `task.deadline` have been exceeded. If reclaiming fails, workers
+ should abort the task and forget about the given `runId`. There is no
+ need to resolve the run or upload artifacts.
+
+ This method gives output: ``v1/task-reclaim-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["reclaimTask"], *args, **kwargs)
+
+ def reportCompleted(self, *args, **kwargs):
+ """
+ Report Run Completed
+
+ Report a task completed, resolving the run as `completed`.
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["reportCompleted"], *args, **kwargs)
+
+ def reportFailed(self, *args, **kwargs):
+ """
+ Report Run Failed
+
+ Report a run failed, resolving the run as `failed`. Use this to resolve
+ a run that failed because the task specific code behaved unexpectedly.
+ For example the task exited non-zero, or didn't produce expected output.
+
+ Do not use this if the task couldn't be run because if malformed
+ payload, or other unexpected condition. In these cases we have a task
+ exception, which should be reported with `reportException`.
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["reportFailed"], *args, **kwargs)
+
+ def reportException(self, *args, **kwargs):
+ """
+ Report Task Exception
+
+ Resolve a run as _exception_. Generally, you will want to report tasks as
+ failed instead of exception. You should `reportException` if,
+
+ * The `task.payload` is invalid,
+ * Non-existent resources are referenced,
+ * Declared actions cannot be executed due to unavailable resources,
+ * The worker had to shutdown prematurely,
+ * The worker experienced an unknown error, or,
+ * The task explicitly requested a retry.
+
+ Do not use this to signal that some user-specified code crashed for any
+ reason specific to this code. If user-specific code hits a resource that
+ is temporarily unavailable worker should report task _failed_.
+
+ This method takes input: ``v1/task-exception-request.json#``
+
+ This method gives output: ``v1/task-status-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["reportException"], *args, **kwargs)
+
+ def createArtifact(self, *args, **kwargs):
+ """
+ Create Artifact
+
+ This API end-point creates an artifact for a specific run of a task. This
+ should **only** be used by a worker currently operating on this task, or
+ from a process running within the task (ie. on the worker).
+
+ All artifacts must specify when they `expires`, the queue will
+ automatically take care of deleting artifacts past their
+ expiration point. This features makes it feasible to upload large
+ intermediate artifacts from data processing applications, as the
+ artifacts can be set to expire a few days later.
+
+ We currently support 3 different `storageType`s, each storage type have
+ slightly different features and in some cases difference semantics.
+ We also have 2 deprecated `storageType`s which are only maintained for
+ backwards compatiability and should not be used in new implementations
+
+ **Blob artifacts**, are useful for storing large files. Currently, these
+ are all stored in S3 but there are facilities for adding support for other
+ backends in futre. A call for this type of artifact must provide information
+ about the file which will be uploaded. This includes sha256 sums and sizes.
+ This method will return a list of general form HTTP requests which are signed
+ by AWS S3 credentials managed by the Queue. Once these requests are completed
+ the list of `ETag` values returned by the requests must be passed to the
+ queue `completeArtifact` method
+
+ **S3 artifacts**, DEPRECATED is useful for static files which will be
+ stored on S3. When creating an S3 artifact the queue will return a
+ pre-signed URL to which you can do a `PUT` request to upload your
+ artifact. Note that `PUT` request **must** specify the `content-length`
+ header and **must** give the `content-type` header the same value as in
+ the request to `createArtifact`.
+
+ **Azure artifacts**, DEPRECATED are stored in _Azure Blob Storage_ service
+ which given the consistency guarantees and API interface offered by Azure
+ is more suitable for artifacts that will be modified during the execution
+ of the task. For example docker-worker has a feature that persists the
+ task log to Azure Blob Storage every few seconds creating a somewhat
+ live log. A request to create an Azure artifact will return a URL
+ featuring a [Shared-Access-Signature](http://msdn.microsoft.com/en-us/library/azure/dn140256.aspx),
+ refer to MSDN for further information on how to use these.
+ **Warning: azure artifact is currently an experimental feature subject
+ to changes and data-drops.**
+
+ **Reference artifacts**, only consists of meta-data which the queue will
+ store for you. These artifacts really only have a `url` property and
+ when the artifact is requested the client will be redirect the URL
+ provided with a `303` (See Other) redirect. Please note that we cannot
+ delete artifacts you upload to other service, we can only delete the
+ reference to the artifact, when it expires.
+
+ **Error artifacts**, only consists of meta-data which the queue will
+ store for you. These artifacts are only meant to indicate that you the
+ worker or the task failed to generate a specific artifact, that you
+ would otherwise have uploaded. For example docker-worker will upload an
+ error artifact, if the file it was supposed to upload doesn't exists or
+ turns out to be a directory. Clients requesting an error artifact will
+ get a `424` (Failed Dependency) response. This is mainly designed to
+ ensure that dependent tasks can distinguish between artifacts that were
+ suppose to be generated and artifacts for which the name is misspelled.
+
+ **Artifact immutability**, generally speaking you cannot overwrite an
+ artifact when created. But if you repeat the request with the same
+ properties the request will succeed as the operation is idempotent.
+ This is useful if you need to refresh a signed URL while uploading.
+ Do not abuse this to overwrite artifacts created by another entity!
+ Such as worker-host overwriting artifact created by worker-code.
+
+ As a special case the `url` property on _reference artifacts_ can be
+ updated. You should only use this to update the `url` property for
+ reference artifacts your process has created.
+
+ This method takes input: ``v1/post-artifact-request.json#``
+
+ This method gives output: ``v1/post-artifact-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["createArtifact"], *args, **kwargs)
+
+ def completeArtifact(self, *args, **kwargs):
+ """
+ Complete Artifact
+
+ This endpoint finalises an upload done through the blob `storageType`.
+ The queue will ensure that the task/run is still allowing artifacts
+ to be uploaded. For single-part S3 blob artifacts, this endpoint
+ will simply ensure the artifact is present in S3. For multipart S3
+ artifacts, the endpoint will perform the commit step of the multipart
+ upload flow. As the final step for both multi and single part artifacts,
+ the `present` entity field will be set to `true` to reflect that the
+ artifact is now present and a message published to pulse. NOTE: This
+ endpoint *must* be called for all artifacts of storageType 'blob'
+
+ This method takes input: ``v1/put-artifact-request.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["completeArtifact"], *args, **kwargs)
+
+ def getArtifact(self, *args, **kwargs):
+ """
+ Get Artifact from Run
+
+ Get artifact by `<name>` from a specific run.
+
+ **Public Artifacts**, in-order to get an artifact you need the scope
+ `queue:get-artifact:<name>`, where `<name>` is the name of the artifact.
+ But if the artifact `name` starts with `public/`, authentication and
+ authorization is not necessary to fetch the artifact.
+
+ **API Clients**, this method will redirect you to the artifact, if it is
+ stored externally. Either way, the response may not be JSON. So API
+ client users might want to generate a signed URL for this end-point and
+ use that URL with an HTTP client that can handle responses correctly.
+
+ **Downloading artifacts**
+ There are some special considerations for those http clients which download
+ artifacts. This api endpoint is designed to be compatible with an HTTP 1.1
+ compliant client, but has extra features to ensure the download is valid.
+ It is strongly recommend that consumers use either taskcluster-lib-artifact (JS),
+ taskcluster-lib-artifact-go (Go) or the CLI written in Go to interact with
+ artifacts.
+
+ In order to download an artifact the following must be done:
+
+ 1. Obtain queue url. Building a signed url with a taskcluster client is
+ recommended
+ 1. Make a GET request which does not follow redirects
+ 1. In all cases, if specified, the
+ x-taskcluster-location-{content,transfer}-{sha256,length} values must be
+ validated to be equal to the Content-Length and Sha256 checksum of the
+ final artifact downloaded. as well as any intermediate redirects
+ 1. If this response is a 500-series error, retry using an exponential
+ backoff. No more than 5 retries should be attempted
+ 1. If this response is a 400-series error, treat it appropriately for
+ your context. This might be an error in responding to this request or
+ an Error storage type body. This request should not be retried.
+ 1. If this response is a 200-series response, the response body is the artifact.
+ If the x-taskcluster-location-{content,transfer}-{sha256,length} and
+ x-taskcluster-location-content-encoding are specified, they should match
+ this response body
+ 1. If the response type is a 300-series redirect, the artifact will be at the
+ location specified by the `Location` header. There are multiple artifact storage
+ types which use a 300-series redirect.
+ 1. For all redirects followed, the user must verify that the content-sha256, content-length,
+ transfer-sha256, transfer-length and content-encoding match every further request. The final
+ artifact must also be validated against the values specified in the original queue response
+ 1. Caching of requests with an x-taskcluster-artifact-storage-type value of `reference`
+ must not occur
+ 1. A request which has x-taskcluster-artifact-storage-type value of `blob` and does not
+ have x-taskcluster-location-content-sha256 or x-taskcluster-location-content-length
+ must be treated as an error
+
+ **Headers**
+ The following important headers are set on the response to this method:
+
+ * location: the url of the artifact if a redirect is to be performed
+ * x-taskcluster-artifact-storage-type: the storage type. Example: blob, s3, error
+
+ The following important headers are set on responses to this method for Blob artifacts
+
+ * x-taskcluster-location-content-sha256: the SHA256 of the artifact
+ *after* any content-encoding is undone. Sha256 is hex encoded (e.g. [0-9A-Fa-f]{64})
+ * x-taskcluster-location-content-length: the number of bytes *after* any content-encoding
+ is undone
+ * x-taskcluster-location-transfer-sha256: the SHA256 of the artifact
+ *before* any content-encoding is undone. This is the SHA256 of what is sent over
+ the wire. Sha256 is hex encoded (e.g. [0-9A-Fa-f]{64})
+ * x-taskcluster-location-transfer-length: the number of bytes *after* any content-encoding
+ is undone
+ * x-taskcluster-location-content-encoding: the content-encoding used. It will either
+ be `gzip` or `identity` right now. This is hardcoded to a value set when the artifact
+ was created and no content-negotiation occurs
+ * x-taskcluster-location-content-type: the content-type of the artifact
+
+ **Caching**, artifacts may be cached in data centers closer to the
+ workers in-order to reduce bandwidth costs. This can lead to longer
+ response times. Caching can be skipped by setting the header
+ `x-taskcluster-skip-cache: true`, this should only be used for resources
+ where request volume is known to be low, and caching not useful.
+ (This feature may be disabled in the future, use is sparingly!)
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["getArtifact"], *args, **kwargs)
+
+ def getLatestArtifact(self, *args, **kwargs):
+ """
+ Get Artifact from Latest Run
+
+ Get artifact by `<name>` from the last run of a task.
+
+ **Public Artifacts**, in-order to get an artifact you need the scope
+ `queue:get-artifact:<name>`, where `<name>` is the name of the artifact.
+ But if the artifact `name` starts with `public/`, authentication and
+ authorization is not necessary to fetch the artifact.
+
+ **API Clients**, this method will redirect you to the artifact, if it is
+ stored externally. Either way, the response may not be JSON. So API
+ client users might want to generate a signed URL for this end-point and
+ use that URL with a normal HTTP client.
+
+ **Remark**, this end-point is slightly slower than
+ `queue.getArtifact`, so consider that if you already know the `runId` of
+ the latest run. Otherwise, just us the most convenient API end-point.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["getLatestArtifact"], *args, **kwargs)
+
+ def listArtifacts(self, *args, **kwargs):
+ """
+ Get Artifacts from Run
+
+ Returns a list of artifacts and associated meta-data for a given run.
+
+ As a task may have many artifacts paging may be necessary. If this
+ end-point returns a `continuationToken`, you should call the end-point
+ again with the `continuationToken` as the query-string option:
+ `continuationToken`.
+
+ By default this end-point will list up-to 1000 artifacts in a single page
+ you may limit this with the query-string parameter `limit`.
+
+ This method gives output: ``v1/list-artifacts-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["listArtifacts"], *args, **kwargs)
+
+ def listLatestArtifacts(self, *args, **kwargs):
+ """
+ Get Artifacts from Latest Run
+
+ Returns a list of artifacts and associated meta-data for the latest run
+ from the given task.
+
+ As a task may have many artifacts paging may be necessary. If this
+ end-point returns a `continuationToken`, you should call the end-point
+ again with the `continuationToken` as the query-string option:
+ `continuationToken`.
+
+ By default this end-point will list up-to 1000 artifacts in a single page
+ you may limit this with the query-string parameter `limit`.
+
+ This method gives output: ``v1/list-artifacts-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["listLatestArtifacts"], *args, **kwargs)
+
+ def listProvisioners(self, *args, **kwargs):
+ """
+ Get a list of all active provisioners
+
+ Get all active provisioners.
+
+ The term "provisioner" is taken broadly to mean anything with a provisionerId.
+ This does not necessarily mean there is an associated service performing any
+ provisioning activity.
+
+ The response is paged. If this end-point returns a `continuationToken`, you
+ should call the end-point again with the `continuationToken` as a query-string
+ option. By default this end-point will list up to 1000 provisioners in a single
+ page. You may limit this with the query-string parameter `limit`.
+
+ This method gives output: ``v1/list-provisioners-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["listProvisioners"], *args, **kwargs)
+
+ def getProvisioner(self, *args, **kwargs):
+ """
+ Get an active provisioner
+
+ Get an active provisioner.
+
+ The term "provisioner" is taken broadly to mean anything with a provisionerId.
+ This does not necessarily mean there is an associated service performing any
+ provisioning activity.
+
+ This method gives output: ``v1/provisioner-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["getProvisioner"], *args, **kwargs)
+
+ def declareProvisioner(self, *args, **kwargs):
+ """
+ Update a provisioner
+
+ Declare a provisioner, supplying some details about it.
+
+ `declareProvisioner` allows updating one or more properties of a provisioner as long as the required scopes are
+ possessed. For example, a request to update the `aws-provisioner-v1`
+ provisioner with a body `{description: 'This provisioner is great'}` would require you to have the scope
+ `queue:declare-provisioner:aws-provisioner-v1#description`.
+
+ The term "provisioner" is taken broadly to mean anything with a provisionerId.
+ This does not necessarily mean there is an associated service performing any
+ provisioning activity.
+
+ This method takes input: ``v1/update-provisioner-request.json#``
+
+ This method gives output: ``v1/provisioner-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["declareProvisioner"], *args, **kwargs)
+
+ def pendingTasks(self, *args, **kwargs):
+ """
+ Get Number of Pending Tasks
+
+ Get an approximate number of pending tasks for the given `provisionerId`
+ and `workerType`.
+
+ The underlying Azure Storage Queues only promises to give us an estimate.
+ Furthermore, we cache the result in memory for 20 seconds. So consumers
+ should be no means expect this to be an accurate number.
+ It is, however, a solid estimate of the number of pending tasks.
+
+ This method gives output: ``v1/pending-tasks-response.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["pendingTasks"], *args, **kwargs)
+
+ def listWorkerTypes(self, *args, **kwargs):
+ """
+ Get a list of all active worker-types
+
+ Get all active worker-types for the given provisioner.
+
+ The response is paged. If this end-point returns a `continuationToken`, you
+ should call the end-point again with the `continuationToken` as a query-string
+ option. By default this end-point will list up to 1000 worker-types in a single
+ page. You may limit this with the query-string parameter `limit`.
+
+ This method gives output: ``v1/list-workertypes-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["listWorkerTypes"], *args, **kwargs)
+
+ def getWorkerType(self, *args, **kwargs):
+ """
+ Get a worker-type
+
+ Get a worker-type from a provisioner.
+
+ This method gives output: ``v1/workertype-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["getWorkerType"], *args, **kwargs)
+
+ def declareWorkerType(self, *args, **kwargs):
+ """
+ Update a worker-type
+
+ Declare a workerType, supplying some details about it.
+
+ `declareWorkerType` allows updating one or more properties of a worker-type as long as the required scopes are
+ possessed. For example, a request to update the `gecko-b-1-w2008` worker-type within the `aws-provisioner-v1`
+ provisioner with a body `{description: 'This worker type is great'}` would require you to have the scope
+ `queue:declare-worker-type:aws-provisioner-v1/gecko-b-1-w2008#description`.
+
+ This method takes input: ``v1/update-workertype-request.json#``
+
+ This method gives output: ``v1/workertype-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["declareWorkerType"], *args, **kwargs)
+
+ def listWorkers(self, *args, **kwargs):
+ """
+ Get a list of all active workers of a workerType
+
+ Get a list of all active workers of a workerType.
+
+ `listWorkers` allows a response to be filtered by quarantined and non quarantined workers.
+ To filter the query, you should call the end-point with `quarantined` as a query-string option with a
+ true or false value.
+
+ The response is paged. If this end-point returns a `continuationToken`, you
+ should call the end-point again with the `continuationToken` as a query-string
+ option. By default this end-point will list up to 1000 workers in a single
+ page. You may limit this with the query-string parameter `limit`.
+
+ This method gives output: ``v1/list-workers-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["listWorkers"], *args, **kwargs)
+
+ def getWorker(self, *args, **kwargs):
+ """
+ Get a worker-type
+
+ Get a worker from a worker-type.
+
+ This method gives output: ``v1/worker-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["getWorker"], *args, **kwargs)
+
+ def quarantineWorker(self, *args, **kwargs):
+ """
+ Quarantine a worker
+
+ Quarantine a worker
+
+ This method takes input: ``v1/quarantine-worker-request.json#``
+
+ This method gives output: ``v1/worker-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["quarantineWorker"], *args, **kwargs)
+
+ def declareWorker(self, *args, **kwargs):
+ """
+ Declare a worker
+
+ Declare a worker, supplying some details about it.
+
+ `declareWorker` allows updating one or more properties of a worker as long as the required scopes are
+ possessed.
+
+ This method takes input: ``v1/update-worker-request.json#``
+
+ This method gives output: ``v1/worker-response.json#``
+
+ This method is ``experimental``
+ """
+
+ return self._makeApiCall(self.funcinfo["declareWorker"], *args, **kwargs)
+
+ funcinfo = {
+ "cancelTask": {
+ 'args': ['taskId'],
+ 'method': 'post',
+ 'name': 'cancelTask',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>/cancel',
+ 'stability': 'stable',
+ },
+ "claimTask": {
+ 'args': ['taskId', 'runId'],
+ 'input': 'v1/task-claim-request.json#',
+ 'method': 'post',
+ 'name': 'claimTask',
+ 'output': 'v1/task-claim-response.json#',
+ 'route': '/task/<taskId>/runs/<runId>/claim',
+ 'stability': 'deprecated',
+ },
+ "claimWork": {
+ 'args': ['provisionerId', 'workerType'],
+ 'input': 'v1/claim-work-request.json#',
+ 'method': 'post',
+ 'name': 'claimWork',
+ 'output': 'v1/claim-work-response.json#',
+ 'route': '/claim-work/<provisionerId>/<workerType>',
+ 'stability': 'stable',
+ },
+ "completeArtifact": {
+ 'args': ['taskId', 'runId', 'name'],
+ 'input': 'v1/put-artifact-request.json#',
+ 'method': 'put',
+ 'name': 'completeArtifact',
+ 'route': '/task/<taskId>/runs/<runId>/artifacts/<name>',
+ 'stability': 'experimental',
+ },
+ "createArtifact": {
+ 'args': ['taskId', 'runId', 'name'],
+ 'input': 'v1/post-artifact-request.json#',
+ 'method': 'post',
+ 'name': 'createArtifact',
+ 'output': 'v1/post-artifact-response.json#',
+ 'route': '/task/<taskId>/runs/<runId>/artifacts/<name>',
+ 'stability': 'stable',
+ },
+ "createTask": {
+ 'args': ['taskId'],
+ 'input': 'v1/create-task-request.json#',
+ 'method': 'put',
+ 'name': 'createTask',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>',
+ 'stability': 'stable',
+ },
+ "declareProvisioner": {
+ 'args': ['provisionerId'],
+ 'input': 'v1/update-provisioner-request.json#',
+ 'method': 'put',
+ 'name': 'declareProvisioner',
+ 'output': 'v1/provisioner-response.json#',
+ 'route': '/provisioners/<provisionerId>',
+ 'stability': 'experimental',
+ },
+ "declareWorker": {
+ 'args': ['provisionerId', 'workerType', 'workerGroup', 'workerId'],
+ 'input': 'v1/update-worker-request.json#',
+ 'method': 'put',
+ 'name': 'declareWorker',
+ 'output': 'v1/worker-response.json#',
+ 'route': '/provisioners/<provisionerId>/worker-types/<workerType>/<workerGroup>/<workerId>',
+ 'stability': 'experimental',
+ },
+ "declareWorkerType": {
+ 'args': ['provisionerId', 'workerType'],
+ 'input': 'v1/update-workertype-request.json#',
+ 'method': 'put',
+ 'name': 'declareWorkerType',
+ 'output': 'v1/workertype-response.json#',
+ 'route': '/provisioners/<provisionerId>/worker-types/<workerType>',
+ 'stability': 'experimental',
+ },
+ "defineTask": {
+ 'args': ['taskId'],
+ 'input': 'v1/create-task-request.json#',
+ 'method': 'post',
+ 'name': 'defineTask',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>/define',
+ 'stability': 'deprecated',
+ },
+ "getArtifact": {
+ 'args': ['taskId', 'runId', 'name'],
+ 'method': 'get',
+ 'name': 'getArtifact',
+ 'route': '/task/<taskId>/runs/<runId>/artifacts/<name>',
+ 'stability': 'stable',
+ },
+ "getLatestArtifact": {
+ 'args': ['taskId', 'name'],
+ 'method': 'get',
+ 'name': 'getLatestArtifact',
+ 'route': '/task/<taskId>/artifacts/<name>',
+ 'stability': 'stable',
+ },
+ "getProvisioner": {
+ 'args': ['provisionerId'],
+ 'method': 'get',
+ 'name': 'getProvisioner',
+ 'output': 'v1/provisioner-response.json#',
+ 'route': '/provisioners/<provisionerId>',
+ 'stability': 'experimental',
+ },
+ "getWorker": {
+ 'args': ['provisionerId', 'workerType', 'workerGroup', 'workerId'],
+ 'method': 'get',
+ 'name': 'getWorker',
+ 'output': 'v1/worker-response.json#',
+ 'route': '/provisioners/<provisionerId>/worker-types/<workerType>/workers/<workerGroup>/<workerId>',
+ 'stability': 'experimental',
+ },
+ "getWorkerType": {
+ 'args': ['provisionerId', 'workerType'],
+ 'method': 'get',
+ 'name': 'getWorkerType',
+ 'output': 'v1/workertype-response.json#',
+ 'route': '/provisioners/<provisionerId>/worker-types/<workerType>',
+ 'stability': 'experimental',
+ },
+ "listArtifacts": {
+ 'args': ['taskId', 'runId'],
+ 'method': 'get',
+ 'name': 'listArtifacts',
+ 'output': 'v1/list-artifacts-response.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/task/<taskId>/runs/<runId>/artifacts',
+ 'stability': 'experimental',
+ },
+ "listDependentTasks": {
+ 'args': ['taskId'],
+ 'method': 'get',
+ 'name': 'listDependentTasks',
+ 'output': 'v1/list-dependent-tasks-response.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/task/<taskId>/dependents',
+ 'stability': 'stable',
+ },
+ "listLatestArtifacts": {
+ 'args': ['taskId'],
+ 'method': 'get',
+ 'name': 'listLatestArtifacts',
+ 'output': 'v1/list-artifacts-response.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/task/<taskId>/artifacts',
+ 'stability': 'experimental',
+ },
+ "listProvisioners": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'listProvisioners',
+ 'output': 'v1/list-provisioners-response.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/provisioners',
+ 'stability': 'experimental',
+ },
+ "listTaskGroup": {
+ 'args': ['taskGroupId'],
+ 'method': 'get',
+ 'name': 'listTaskGroup',
+ 'output': 'v1/list-task-group-response.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/task-group/<taskGroupId>/list',
+ 'stability': 'stable',
+ },
+ "listWorkerTypes": {
+ 'args': ['provisionerId'],
+ 'method': 'get',
+ 'name': 'listWorkerTypes',
+ 'output': 'v1/list-workertypes-response.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/provisioners/<provisionerId>/worker-types',
+ 'stability': 'experimental',
+ },
+ "listWorkers": {
+ 'args': ['provisionerId', 'workerType'],
+ 'method': 'get',
+ 'name': 'listWorkers',
+ 'output': 'v1/list-workers-response.json#',
+ 'query': ['continuationToken', 'limit', 'quarantined'],
+ 'route': '/provisioners/<provisionerId>/worker-types/<workerType>/workers',
+ 'stability': 'experimental',
+ },
+ "pendingTasks": {
+ 'args': ['provisionerId', 'workerType'],
+ 'method': 'get',
+ 'name': 'pendingTasks',
+ 'output': 'v1/pending-tasks-response.json#',
+ 'route': '/pending/<provisionerId>/<workerType>',
+ 'stability': 'stable',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "quarantineWorker": {
+ 'args': ['provisionerId', 'workerType', 'workerGroup', 'workerId'],
+ 'input': 'v1/quarantine-worker-request.json#',
+ 'method': 'put',
+ 'name': 'quarantineWorker',
+ 'output': 'v1/worker-response.json#',
+ 'route': '/provisioners/<provisionerId>/worker-types/<workerType>/workers/<workerGroup>/<workerId>',
+ 'stability': 'experimental',
+ },
+ "reclaimTask": {
+ 'args': ['taskId', 'runId'],
+ 'method': 'post',
+ 'name': 'reclaimTask',
+ 'output': 'v1/task-reclaim-response.json#',
+ 'route': '/task/<taskId>/runs/<runId>/reclaim',
+ 'stability': 'stable',
+ },
+ "reportCompleted": {
+ 'args': ['taskId', 'runId'],
+ 'method': 'post',
+ 'name': 'reportCompleted',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>/runs/<runId>/completed',
+ 'stability': 'stable',
+ },
+ "reportException": {
+ 'args': ['taskId', 'runId'],
+ 'input': 'v1/task-exception-request.json#',
+ 'method': 'post',
+ 'name': 'reportException',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>/runs/<runId>/exception',
+ 'stability': 'stable',
+ },
+ "reportFailed": {
+ 'args': ['taskId', 'runId'],
+ 'method': 'post',
+ 'name': 'reportFailed',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>/runs/<runId>/failed',
+ 'stability': 'stable',
+ },
+ "rerunTask": {
+ 'args': ['taskId'],
+ 'method': 'post',
+ 'name': 'rerunTask',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>/rerun',
+ 'stability': 'deprecated',
+ },
+ "scheduleTask": {
+ 'args': ['taskId'],
+ 'method': 'post',
+ 'name': 'scheduleTask',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>/schedule',
+ 'stability': 'stable',
+ },
+ "status": {
+ 'args': ['taskId'],
+ 'method': 'get',
+ 'name': 'status',
+ 'output': 'v1/task-status-response.json#',
+ 'route': '/task/<taskId>/status',
+ 'stability': 'stable',
+ },
+ "task": {
+ 'args': ['taskId'],
+ 'method': 'get',
+ 'name': 'task',
+ 'output': 'v1/task.json#',
+ 'route': '/task/<taskId>',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Queue']
diff --git a/third_party/python/taskcluster/taskcluster/queueevents.py b/third_party/python/taskcluster/taskcluster/queueevents.py
new file mode 100644
index 0000000000..0ece2985d7
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/queueevents.py
@@ -0,0 +1,718 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class QueueEvents(BaseClient):
+ """
+ The queue, typically available at `queue.taskcluster.net`, is responsible
+ for accepting tasks and track their state as they are executed by
+ workers. In order ensure they are eventually resolved.
+
+ This document describes AMQP exchanges offered by the queue, which allows
+ third-party listeners to monitor tasks as they progress to resolution.
+ These exchanges targets the following audience:
+ * Schedulers, who takes action after tasks are completed,
+ * Workers, who wants to listen for new or canceled tasks (optional),
+ * Tools, that wants to update their view as task progress.
+
+ You'll notice that all the exchanges in the document shares the same
+ routing key pattern. This makes it very easy to bind to all messages
+ about a certain kind tasks.
+
+ **Task specific routes**, a task can define a task specific route using
+ the `task.routes` property. See task creation documentation for details
+ on permissions required to provide task specific routes. If a task has
+ the entry `'notify.by-email'` in as task specific route defined in
+ `task.routes` all messages about this task will be CC'ed with the
+ routing-key `'route.notify.by-email'`.
+
+ These routes will always be prefixed `route.`, so that cannot interfere
+ with the _primary_ routing key as documented here. Notice that the
+ _primary_ routing key is always prefixed `primary.`. This is ensured
+ in the routing key reference, so API clients will do this automatically.
+
+ Please, note that the way RabbitMQ works, the message will only arrive
+ in your queue once, even though you may have bound to the exchange with
+ multiple routing key patterns that matches more of the CC'ed routing
+ routing keys.
+
+ **Delivery guarantees**, most operations on the queue are idempotent,
+ which means that if repeated with the same arguments then the requests
+ will ensure completion of the operation and return the same response.
+ This is useful if the server crashes or the TCP connection breaks, but
+ when re-executing an idempotent operation, the queue will also resend
+ any related AMQP messages. Hence, messages may be repeated.
+
+ This shouldn't be much of a problem, as the best you can achieve using
+ confirm messages with AMQP is at-least-once delivery semantics. Hence,
+ this only prevents you from obtaining at-most-once delivery semantics.
+
+ **Remark**, some message generated by timeouts maybe dropped if the
+ server crashes at wrong time. Ideally, we'll address this in the
+ future. For now we suggest you ignore this corner case, and notify us
+ if this corner case is of concern to you.
+ """
+
+ classOptions = {
+ "exchangePrefix": "exchange/taskcluster-queue/v1/",
+ }
+ serviceName = 'queue'
+ apiVersion = 'v1'
+
+ def taskDefined(self, *args, **kwargs):
+ """
+ Task Defined Messages
+
+ When a task is created or just defined a message is posted to this
+ exchange.
+
+ This message exchange is mainly useful when tasks are scheduled by a
+ scheduler that uses `defineTask` as this does not make the task
+ `pending`. Thus, no `taskPending` message is published.
+ Please, note that messages are also published on this exchange if defined
+ using `createTask`.
+
+ This exchange outputs: ``v1/task-defined-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * taskId: `taskId` for the task this message concerns (required)
+
+ * runId: `runId` of latest run for the task, `_` if no run is exists for the task.
+
+ * workerGroup: `workerGroup` of latest run for the task, `_` if no run is exists for the task.
+
+ * workerId: `workerId` of latest run for the task, `_` if no run is exists for the task.
+
+ * provisionerId: `provisionerId` this task is targeted at. (required)
+
+ * workerType: `workerType` this task must run on. (required)
+
+ * schedulerId: `schedulerId` this task was created by. (required)
+
+ * taskGroupId: `taskGroupId` this task was created in. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'task-defined',
+ 'name': 'taskDefined',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'runId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerGroup',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'provisionerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'schedulerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskGroupId',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/task-defined-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def taskPending(self, *args, **kwargs):
+ """
+ Task Pending Messages
+
+ When a task becomes `pending` a message is posted to this exchange.
+
+ This is useful for workers who doesn't want to constantly poll the queue
+ for new tasks. The queue will also be authority for task states and
+ claims. But using this exchange workers should be able to distribute work
+ efficiently and they would be able to reduce their polling interval
+ significantly without affecting general responsiveness.
+
+ This exchange outputs: ``v1/task-pending-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * taskId: `taskId` for the task this message concerns (required)
+
+ * runId: `runId` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * workerGroup: `workerGroup` of latest run for the task, `_` if no run is exists for the task.
+
+ * workerId: `workerId` of latest run for the task, `_` if no run is exists for the task.
+
+ * provisionerId: `provisionerId` this task is targeted at. (required)
+
+ * workerType: `workerType` this task must run on. (required)
+
+ * schedulerId: `schedulerId` this task was created by. (required)
+
+ * taskGroupId: `taskGroupId` this task was created in. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'task-pending',
+ 'name': 'taskPending',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'runId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerGroup',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'provisionerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'schedulerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskGroupId',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/task-pending-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def taskRunning(self, *args, **kwargs):
+ """
+ Task Running Messages
+
+ Whenever a task is claimed by a worker, a run is started on the worker,
+ and a message is posted on this exchange.
+
+ This exchange outputs: ``v1/task-running-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * taskId: `taskId` for the task this message concerns (required)
+
+ * runId: `runId` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * workerGroup: `workerGroup` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * workerId: `workerId` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * provisionerId: `provisionerId` this task is targeted at. (required)
+
+ * workerType: `workerType` this task must run on. (required)
+
+ * schedulerId: `schedulerId` this task was created by. (required)
+
+ * taskGroupId: `taskGroupId` this task was created in. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'task-running',
+ 'name': 'taskRunning',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'runId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerGroup',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'provisionerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'schedulerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskGroupId',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/task-running-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def artifactCreated(self, *args, **kwargs):
+ """
+ Artifact Creation Messages
+
+ Whenever the `createArtifact` end-point is called, the queue will create
+ a record of the artifact and post a message on this exchange. All of this
+ happens before the queue returns a signed URL for the caller to upload
+ the actual artifact with (pending on `storageType`).
+
+ This means that the actual artifact is rarely available when this message
+ is posted. But it is not unreasonable to assume that the artifact will
+ will become available at some point later. Most signatures will expire in
+ 30 minutes or so, forcing the uploader to call `createArtifact` with
+ the same payload again in-order to continue uploading the artifact.
+
+ However, in most cases (especially for small artifacts) it's very
+ reasonable assume the artifact will be available within a few minutes.
+ This property means that this exchange is mostly useful for tools
+ monitoring task evaluation. One could also use it count number of
+ artifacts per task, or _index_ artifacts though in most cases it'll be
+ smarter to index artifacts after the task in question have completed
+ successfully.
+
+ This exchange outputs: ``v1/artifact-created-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * taskId: `taskId` for the task this message concerns (required)
+
+ * runId: `runId` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * workerGroup: `workerGroup` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * workerId: `workerId` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * provisionerId: `provisionerId` this task is targeted at. (required)
+
+ * workerType: `workerType` this task must run on. (required)
+
+ * schedulerId: `schedulerId` this task was created by. (required)
+
+ * taskGroupId: `taskGroupId` this task was created in. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'artifact-created',
+ 'name': 'artifactCreated',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'runId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerGroup',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'provisionerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'schedulerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskGroupId',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/artifact-created-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def taskCompleted(self, *args, **kwargs):
+ """
+ Task Completed Messages
+
+ When a task is successfully completed by a worker a message is posted
+ this exchange.
+ This message is routed using the `runId`, `workerGroup` and `workerId`
+ that completed the task. But information about additional runs is also
+ available from the task status structure.
+
+ This exchange outputs: ``v1/task-completed-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * taskId: `taskId` for the task this message concerns (required)
+
+ * runId: `runId` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * workerGroup: `workerGroup` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * workerId: `workerId` of latest run for the task, `_` if no run is exists for the task. (required)
+
+ * provisionerId: `provisionerId` this task is targeted at. (required)
+
+ * workerType: `workerType` this task must run on. (required)
+
+ * schedulerId: `schedulerId` this task was created by. (required)
+
+ * taskGroupId: `taskGroupId` this task was created in. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'task-completed',
+ 'name': 'taskCompleted',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'runId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerGroup',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'provisionerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'schedulerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskGroupId',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/task-completed-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def taskFailed(self, *args, **kwargs):
+ """
+ Task Failed Messages
+
+ When a task ran, but failed to complete successfully a message is posted
+ to this exchange. This is same as worker ran task-specific code, but the
+ task specific code exited non-zero.
+
+ This exchange outputs: ``v1/task-failed-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * taskId: `taskId` for the task this message concerns (required)
+
+ * runId: `runId` of latest run for the task, `_` if no run is exists for the task.
+
+ * workerGroup: `workerGroup` of latest run for the task, `_` if no run is exists for the task.
+
+ * workerId: `workerId` of latest run for the task, `_` if no run is exists for the task.
+
+ * provisionerId: `provisionerId` this task is targeted at. (required)
+
+ * workerType: `workerType` this task must run on. (required)
+
+ * schedulerId: `schedulerId` this task was created by. (required)
+
+ * taskGroupId: `taskGroupId` this task was created in. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'task-failed',
+ 'name': 'taskFailed',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'runId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerGroup',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'provisionerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'schedulerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskGroupId',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/task-failed-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def taskException(self, *args, **kwargs):
+ """
+ Task Exception Messages
+
+ Whenever Taskcluster fails to run a message is posted to this exchange.
+ This happens if the task isn't completed before its `deadlìne`,
+ all retries failed (i.e. workers stopped responding), the task was
+ canceled by another entity, or the task carried a malformed payload.
+
+ The specific _reason_ is evident from that task status structure, refer
+ to the `reasonResolved` property for the last run.
+
+ This exchange outputs: ``v1/task-exception-message.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * taskId: `taskId` for the task this message concerns (required)
+
+ * runId: `runId` of latest run for the task, `_` if no run is exists for the task.
+
+ * workerGroup: `workerGroup` of latest run for the task, `_` if no run is exists for the task.
+
+ * workerId: `workerId` of latest run for the task, `_` if no run is exists for the task.
+
+ * provisionerId: `provisionerId` this task is targeted at. (required)
+
+ * workerType: `workerType` this task must run on. (required)
+
+ * schedulerId: `schedulerId` this task was created by. (required)
+
+ * taskGroupId: `taskGroupId` this task was created in. (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'task-exception',
+ 'name': 'taskException',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'runId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerGroup',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'provisionerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'workerType',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'schedulerId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskGroupId',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/task-exception-message.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ def taskGroupResolved(self, *args, **kwargs):
+ """
+ Task Group Resolved Messages
+
+ A message is published on task-group-resolved whenever all submitted
+ tasks (whether scheduled or unscheduled) for a given task group have
+ been resolved, regardless of whether they resolved as successful or
+ not. A task group may be resolved multiple times, since new tasks may
+ be submitted against an already resolved task group.
+
+ This exchange outputs: ``v1/task-group-resolved.json#``This exchange takes the following keys:
+
+ * routingKeyKind: Identifier for the routing-key kind. This is always `'primary'` for the formalized routing key. (required)
+
+ * taskGroupId: `taskGroupId` for the task-group this message concerns (required)
+
+ * schedulerId: `schedulerId` for the task-group this message concerns (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'task-group-resolved',
+ 'name': 'taskGroupResolved',
+ 'routingKey': [
+ {
+ 'constant': 'primary',
+ 'multipleWords': False,
+ 'name': 'routingKeyKind',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'taskGroupId',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'schedulerId',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/task-group-resolved.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ funcinfo = {
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'QueueEvents']
diff --git a/third_party/python/taskcluster/taskcluster/secrets.py b/third_party/python/taskcluster/taskcluster/secrets.py
new file mode 100644
index 0000000000..08a4632f33
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/secrets.py
@@ -0,0 +1,149 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class Secrets(BaseClient):
+ """
+ The secrets service provides a simple key/value store for small bits of secret
+ data. Access is limited by scopes, so values can be considered secret from
+ those who do not have the relevant scopes.
+
+ Secrets also have an expiration date, and once a secret has expired it can no
+ longer be read. This is useful for short-term secrets such as a temporary
+ service credential or a one-time signing key.
+ """
+
+ classOptions = {
+ }
+ serviceName = 'secrets'
+ apiVersion = 'v1'
+
+ def ping(self, *args, **kwargs):
+ """
+ Ping Server
+
+ Respond without doing anything.
+ This endpoint is used to check that the service is up.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["ping"], *args, **kwargs)
+
+ def set(self, *args, **kwargs):
+ """
+ Set Secret
+
+ Set the secret associated with some key. If the secret already exists, it is
+ updated instead.
+
+ This method takes input: ``v1/secret.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["set"], *args, **kwargs)
+
+ def remove(self, *args, **kwargs):
+ """
+ Delete Secret
+
+ Delete the secret associated with some key.
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["remove"], *args, **kwargs)
+
+ def get(self, *args, **kwargs):
+ """
+ Read Secret
+
+ Read the secret associated with some key. If the secret has recently
+ expired, the response code 410 is returned. If the caller lacks the
+ scope necessary to get the secret, the call will fail with a 403 code
+ regardless of whether the secret exists.
+
+ This method gives output: ``v1/secret.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["get"], *args, **kwargs)
+
+ def list(self, *args, **kwargs):
+ """
+ List Secrets
+
+ List the names of all secrets.
+
+ By default this end-point will try to return up to 1000 secret names in one
+ request. But it **may return less**, even if more tasks are available.
+ It may also return a `continuationToken` even though there are no more
+ results. However, you can only be sure to have seen all results if you
+ keep calling `listTaskGroup` with the last `continuationToken` until you
+ get a result without a `continuationToken`.
+
+ If you are not interested in listing all the members at once, you may
+ use the query-string option `limit` to return fewer.
+
+ This method gives output: ``v1/secret-list.json#``
+
+ This method is ``stable``
+ """
+
+ return self._makeApiCall(self.funcinfo["list"], *args, **kwargs)
+
+ funcinfo = {
+ "get": {
+ 'args': ['name'],
+ 'method': 'get',
+ 'name': 'get',
+ 'output': 'v1/secret.json#',
+ 'route': '/secret/<name>',
+ 'stability': 'stable',
+ },
+ "list": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'list',
+ 'output': 'v1/secret-list.json#',
+ 'query': ['continuationToken', 'limit'],
+ 'route': '/secrets',
+ 'stability': 'stable',
+ },
+ "ping": {
+ 'args': [],
+ 'method': 'get',
+ 'name': 'ping',
+ 'route': '/ping',
+ 'stability': 'stable',
+ },
+ "remove": {
+ 'args': ['name'],
+ 'method': 'delete',
+ 'name': 'remove',
+ 'route': '/secret/<name>',
+ 'stability': 'stable',
+ },
+ "set": {
+ 'args': ['name'],
+ 'input': 'v1/secret.json#',
+ 'method': 'put',
+ 'name': 'set',
+ 'route': '/secret/<name>',
+ 'stability': 'stable',
+ },
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'Secrets']
diff --git a/third_party/python/taskcluster/taskcluster/treeherderevents.py b/third_party/python/taskcluster/taskcluster/treeherderevents.py
new file mode 100644
index 0000000000..42461f2eeb
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/treeherderevents.py
@@ -0,0 +1,72 @@
+# coding=utf-8
+#####################################################
+# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT #
+#####################################################
+# noqa: E128,E201
+from .client import BaseClient
+from .client import createApiClient
+from .client import config
+from .client import createTemporaryCredentials
+from .client import createSession
+_defaultConfig = config
+
+
+class TreeherderEvents(BaseClient):
+ """
+ The taskcluster-treeherder service is responsible for processing
+ task events published by TaskCluster Queue and producing job messages
+ that are consumable by Treeherder.
+
+ This exchange provides that job messages to be consumed by any queue that
+ attached to the exchange. This could be a production Treeheder instance,
+ a local development environment, or a custom dashboard.
+ """
+
+ classOptions = {
+ "exchangePrefix": "exchange/taskcluster-treeherder/v1/",
+ }
+ serviceName = 'treeherder'
+ apiVersion = 'v1'
+
+ def jobs(self, *args, **kwargs):
+ """
+ Job Messages
+
+ When a task run is scheduled or resolved, a message is posted to
+ this exchange in a Treeherder consumable format.
+
+ This exchange outputs: ``v1/pulse-job.json#``This exchange takes the following keys:
+
+ * destination: destination (required)
+
+ * project: project (required)
+
+ * reserved: Space reserved for future routing-key entries, you should always match this entry with `#`. As automatically done by our tooling, if not specified.
+ """
+
+ ref = {
+ 'exchange': 'jobs',
+ 'name': 'jobs',
+ 'routingKey': [
+ {
+ 'multipleWords': False,
+ 'name': 'destination',
+ },
+ {
+ 'multipleWords': False,
+ 'name': 'project',
+ },
+ {
+ 'multipleWords': True,
+ 'name': 'reserved',
+ },
+ ],
+ 'schema': 'v1/pulse-job.json#',
+ }
+ return self._makeTopicExchange(ref, *args, **kwargs)
+
+ funcinfo = {
+ }
+
+
+__all__ = ['createTemporaryCredentials', 'config', '_defaultConfig', 'createApiClient', 'createSession', 'TreeherderEvents']
diff --git a/third_party/python/taskcluster/taskcluster/utils.py b/third_party/python/taskcluster/taskcluster/utils.py
new file mode 100644
index 0000000000..8fac855d2b
--- /dev/null
+++ b/third_party/python/taskcluster/taskcluster/utils.py
@@ -0,0 +1,348 @@
+# -*- coding: UTF-8 -*-
+from __future__ import absolute_import, division, print_function
+import re
+import json
+import datetime
+import base64
+import logging
+import os
+import requests
+import requests.exceptions
+import slugid
+import time
+import six
+import random
+
+from . import exceptions
+
+MAX_RETRIES = 5
+
+DELAY_FACTOR = 0.1
+RANDOMIZATION_FACTOR = 0.25
+MAX_DELAY = 30
+
+
+log = logging.getLogger(__name__)
+
+# Regular expression matching: X days Y hours Z minutes
+# todo: support hr, wk, yr
+r = re.compile(''.join([
+ '^(\s*(?P<years>\d+)\s*y(ears?)?)?',
+ '(\s*(?P<months>\d+)\s*mo(nths?)?)?',
+ '(\s*(?P<weeks>\d+)\s*w(eeks?)?)?',
+ '(\s*(?P<days>\d+)\s*d(ays?)?)?',
+ '(\s*(?P<hours>\d+)\s*h(ours?)?)?',
+ '(\s*(?P<minutes>\d+)\s*m(in(utes?)?)?)?\s*',
+ '(\s*(?P<seconds>\d+)\s*s(ec(onds?)?)?)?\s*$',
+]))
+
+
+def calculateSleepTime(attempt):
+ """ From the go client
+ https://github.com/taskcluster/go-got/blob/031f55c/backoff.go#L24-L29
+ """
+ if attempt <= 0:
+ return 0
+
+ # We subtract one to get exponents: 1, 2, 3, 4, 5, ..
+ delay = float(2 ** (attempt - 1)) * float(DELAY_FACTOR)
+ # Apply randomization factor
+ delay = delay * (RANDOMIZATION_FACTOR * (random.random() * 2 - 1) + 1)
+ # Always limit with a maximum delay
+ return min(delay, MAX_DELAY)
+
+
+def toStr(obj, encoding='utf-8'):
+ if six.PY3 and isinstance(obj, six.binary_type):
+ obj = obj.decode(encoding)
+ else:
+ obj = str(obj)
+ return obj
+
+
+def fromNow(offset, dateObj=None):
+ """
+ Generate a `datetime.datetime` instance which is offset using a string.
+ See the README.md for a full example, but offset could be '1 day' for
+ a datetime object one day in the future
+ """
+
+ # We want to handle past dates as well as future
+ future = True
+ offset = offset.lstrip()
+ if offset.startswith('-'):
+ future = False
+ offset = offset[1:].lstrip()
+ if offset.startswith('+'):
+ offset = offset[1:].lstrip()
+
+ # Parse offset
+ m = r.match(offset)
+ if m is None:
+ raise ValueError("offset string: '%s' does not parse" % offset)
+
+ # In order to calculate years and months we need to calculate how many days
+ # to offset the offset by, since timedelta only goes as high as weeks
+ days = 0
+ hours = 0
+ minutes = 0
+ seconds = 0
+ if m.group('years'):
+ years = int(m.group('years'))
+ days += 365 * years
+ if m.group('months'):
+ months = int(m.group('months'))
+ days += 30 * months
+ days += int(m.group('days') or 0)
+ hours += int(m.group('hours') or 0)
+ minutes += int(m.group('minutes') or 0)
+ seconds += int(m.group('seconds') or 0)
+
+ # Offset datetime from utc
+ delta = datetime.timedelta(
+ weeks=int(m.group('weeks') or 0),
+ days=days,
+ hours=hours,
+ minutes=minutes,
+ seconds=seconds,
+ )
+
+ if not dateObj:
+ dateObj = datetime.datetime.utcnow()
+
+ return dateObj + delta if future else dateObj - delta
+
+
+def fromNowJSON(offset):
+ """
+ Like fromNow() but returns in a taskcluster-json compatible way
+ """
+ return stringDate(fromNow(offset))
+
+
+def dumpJson(obj, **kwargs):
+ """ Match JS's JSON.stringify. When using the default seperators,
+ base64 encoding JSON results in \n sequences in the output. Hawk
+ barfs in your face if you have that in the text"""
+ def handleDateAndBinaryForJs(x):
+ if six.PY3 and isinstance(x, six.binary_type):
+ x = x.decode()
+ if isinstance(x, datetime.datetime) or isinstance(x, datetime.date):
+ return stringDate(x)
+ else:
+ return x
+ d = json.dumps(obj, separators=(',', ':'), default=handleDateAndBinaryForJs, **kwargs)
+ assert '\n' not in d
+ return d
+
+
+def stringDate(date):
+ # Convert to isoFormat
+ string = date.isoformat()
+
+ # If there is no timezone and no Z added, we'll add one at the end.
+ # This is just to be fully compliant with:
+ # https://tools.ietf.org/html/rfc3339#section-5.6
+ if string.endswith('+00:00'):
+ return string[:-6] + 'Z'
+ if date.utcoffset() is None and string[-1] != 'Z':
+ return string + 'Z'
+ return string
+
+
+def makeB64UrlSafe(b64str):
+ """ Make a base64 string URL Safe """
+ if isinstance(b64str, six.text_type):
+ b64str = b64str.encode()
+ # see RFC 4648, sec. 5
+ return b64str.replace(b'+', b'-').replace(b'/', b'_')
+
+
+def makeB64UrlUnsafe(b64str):
+ """ Make a base64 string URL Unsafe """
+ if isinstance(b64str, six.text_type):
+ b64str = b64str.encode()
+ # see RFC 4648, sec. 5
+ return b64str.replace(b'-', b'+').replace(b'_', b'/')
+
+
+def encodeStringForB64Header(s):
+ """ HTTP Headers can't have new lines in them, let's """
+ if isinstance(s, six.text_type):
+ s = s.encode()
+ return base64.encodestring(s).strip().replace(b'\n', b'')
+
+
+def slugId():
+ """ Generate a taskcluster slugid. This is a V4 UUID encoded into
+ URL-Safe Base64 (RFC 4648, sec 5) with '=' padding removed """
+ return slugid.nice()
+
+
+def stableSlugId():
+ """Returns a closure which can be used to generate stable slugIds.
+ Stable slugIds can be used in a graph to specify task IDs in multiple
+ places without regenerating them, e.g. taskId, requires, etc.
+ """
+ _cache = {}
+
+ def closure(name):
+ if name not in _cache:
+ _cache[name] = slugId()
+ return _cache[name]
+
+ return closure
+
+
+def scopeMatch(assumedScopes, requiredScopeSets):
+ """
+ Take a list of a assumed scopes, and a list of required scope sets on
+ disjunctive normal form, and check if any of the required scope sets are
+ satisfied.
+
+ Example:
+
+ requiredScopeSets = [
+ ["scopeA", "scopeB"],
+ ["scopeC"]
+ ]
+
+ In this case assumed_scopes must contain, either:
+ "scopeA" AND "scopeB", OR just "scopeC".
+ """
+ for scopeSet in requiredScopeSets:
+ for requiredScope in scopeSet:
+ for scope in assumedScopes:
+ if scope == requiredScope:
+ # requiredScope satisifed, no need to check more scopes
+ break
+ if scope.endswith("*") and requiredScope.startswith(scope[:-1]):
+ # requiredScope satisifed, no need to check more scopes
+ break
+ else:
+ # requiredScope not satisfied, stop checking scopeSet
+ break
+ else:
+ # scopeSet satisfied, so we're happy
+ return True
+ # none of the requiredScopeSets were satisfied
+ return False
+
+
+def scope_match(assumed_scopes, required_scope_sets):
+ """ This is a deprecated form of def scopeMatch(assumedScopes, requiredScopeSets).
+ That form should be used.
+ """
+ import warnings
+ warnings.warn('NOTE: scope_match is deprecated. Use scopeMatch')
+ return scopeMatch(assumed_scopes, required_scope_sets)
+
+
+def makeHttpRequest(method, url, payload, headers, retries=MAX_RETRIES, session=None):
+ """ Make an HTTP request and retry it until success, return request """
+ retry = -1
+ response = None
+ while retry < retries:
+ retry += 1
+ # if this isn't the first retry then we sleep
+ if retry > 0:
+ snooze = float(retry * retry) / 10.0
+ log.info('Sleeping %0.2f seconds for exponential backoff', snooze)
+ time.sleep(snooze)
+
+ # Seek payload to start, if it is a file
+ if hasattr(payload, 'seek'):
+ payload.seek(0)
+
+ log.debug('Making attempt %d', retry)
+ try:
+ response = makeSingleHttpRequest(method, url, payload, headers, session)
+ except requests.exceptions.RequestException as rerr:
+ if retry < retries:
+ log.warn('Retrying because of: %s' % rerr)
+ continue
+ # raise a connection exception
+ raise rerr
+ # Handle non 2xx status code and retry if possible
+ try:
+ response.raise_for_status()
+ except requests.exceptions.RequestException as rerr:
+ pass
+ status = response.status_code
+ if 500 <= status and status < 600 and retry < retries:
+ if retry < retries:
+ log.warn('Retrying because of: %d status' % status)
+ continue
+ else:
+ raise exceptions.TaskclusterRestFailure("Unknown Server Error", superExc=None)
+ return response
+
+ # This code-path should be unreachable
+ assert False, "Error from last retry should have been raised!"
+
+
+def makeSingleHttpRequest(method, url, payload, headers, session=None):
+ method = method.upper()
+ log.debug('Making a %s request to %s', method, url)
+ log.debug('HTTP Headers: %s' % str(headers))
+ log.debug('HTTP Payload: %s (limit 100 char)' % str(payload)[:100])
+ obj = session if session else requests
+ response = obj.request(method.upper(), url, data=payload, headers=headers)
+ log.debug('Received HTTP Status: %s' % response.status_code)
+ log.debug('Received HTTP Headers: %s' % str(response.headers))
+
+ return response
+
+
+def putFile(filename, url, contentType):
+ with open(filename, 'rb') as f:
+ contentLength = os.fstat(f.fileno()).st_size
+ return makeHttpRequest('put', url, f, headers={
+ 'Content-Length': str(contentLength),
+ 'Content-Type': contentType,
+ })
+
+
+def encryptEnvVar(taskId, startTime, endTime, name, value, keyFile):
+ raise Exception("Encrypted environment variables are no longer supported")
+
+
+def decryptMessage(message, privateKey):
+ raise Exception("Decryption is no longer supported")
+
+
+def isExpired(certificate):
+ """ Check if certificate is expired """
+ if isinstance(certificate, six.string_types):
+ certificate = json.loads(certificate)
+ expiry = certificate.get('expiry', 0)
+ return expiry < int(time.time() * 1000) + 20 * 60
+
+
+def optionsFromEnvironment(defaults=None):
+ """Fetch root URL and credentials from the standard TASKCLUSTER_…
+ environment variables and return them in a format suitable for passing to a
+ client constructor."""
+ options = defaults or {}
+ credentials = options.get('credentials', {})
+
+ rootUrl = os.environ.get('TASKCLUSTER_ROOT_URL')
+ if rootUrl:
+ options['rootUrl'] = rootUrl
+
+ clientId = os.environ.get('TASKCLUSTER_CLIENT_ID')
+ if clientId:
+ credentials['clientId'] = clientId
+
+ accessToken = os.environ.get('TASKCLUSTER_ACCESS_TOKEN')
+ if accessToken:
+ credentials['accessToken'] = accessToken
+
+ certificate = os.environ.get('TASKCLUSTER_CERTIFICATE')
+ if certificate:
+ credentials['certificate'] = certificate
+
+ if credentials:
+ options['credentials'] = credentials
+
+ return options
diff --git a/third_party/python/taskcluster/test/test_async.py b/third_party/python/taskcluster/test/test_async.py
new file mode 100644
index 0000000000..8a06f422b5
--- /dev/null
+++ b/third_party/python/taskcluster/test/test_async.py
@@ -0,0 +1,63 @@
+from __future__ import division, print_function, absolute_import
+import unittest
+import datetime
+import os
+
+import asyncio
+
+import base
+import taskcluster.aio.auth as subjectAsync
+
+
+@unittest.skipIf(os.environ.get('NO_TESTS_OVER_WIRE'), "Skipping tests over wire")
+class TestAuthenticationAsync(base.TCTest):
+
+ def test_async_works_with_permanent_credentials(self):
+ """we can call methods which require authentication with valid
+ permacreds"""
+
+ loop = asyncio.get_event_loop()
+
+ async def x():
+ async with subjectAsync.createSession(loop=loop) as session:
+ client = subjectAsync.Auth({
+ 'rootUrl': self.real_root_url,
+ 'credentials': {
+ 'clientId': 'tester',
+ 'accessToken': 'no-secret',
+ },
+ }, session=session)
+ result = await client.testAuthenticate({
+ 'clientScopes': ['test:a'],
+ 'requiredScopes': ['test:a'],
+ })
+ self.assertEqual(result, {'scopes': ['test:a'], 'clientId': 'tester'})
+
+ loop.run_until_complete(x())
+
+ def test_async_works_with_temporary_credentials(self):
+ """we can call methods which require authentication with temporary
+ credentials generated by python client"""
+ loop = asyncio.get_event_loop()
+
+ async def x():
+ async with subjectAsync.createSession(loop=loop) as session:
+ tempCred = subjectAsync.createTemporaryCredentials(
+ 'tester',
+ 'no-secret',
+ datetime.datetime.utcnow(),
+ datetime.datetime.utcnow() + datetime.timedelta(hours=1),
+ ['test:xyz'],
+ )
+ client = subjectAsync.Auth({
+ 'rootUrl': self.real_root_url,
+ 'credentials': tempCred,
+ }, session=session)
+
+ result = await client.testAuthenticate({
+ 'clientScopes': ['test:*'],
+ 'requiredScopes': ['test:xyz'],
+ })
+ self.assertEqual(result, {'scopes': ['test:xyz'], 'clientId': 'tester'})
+
+ loop.run_until_complete(x())
diff --git a/third_party/python/taskcluster/test/test_client.py b/third_party/python/taskcluster/test/test_client.py
new file mode 100644
index 0000000000..afe4b254bc
--- /dev/null
+++ b/third_party/python/taskcluster/test/test_client.py
@@ -0,0 +1,955 @@
+from __future__ import division, print_function
+import types
+import unittest
+import time
+import datetime
+from six.moves import urllib
+import os
+import re
+import json
+import copy
+
+import mock
+import httmock
+import requests
+
+import base
+import taskcluster.auth as subject
+import taskcluster.exceptions as exc
+import taskcluster.utils as utils
+import taskcluster_urls as liburls
+
+
+class ClientTest(base.TCTest):
+
+ realTimeSleep = time.sleep
+
+ def setUp(self):
+ subject.config['credentials'] = {
+ 'clientId': 'clientId',
+ 'accessToken': 'accessToken',
+ }
+ keys = [
+ base.createTopicExchangeKey('primary_key', constant='primary'),
+ base.createTopicExchangeKey('norm1'),
+ base.createTopicExchangeKey('norm2'),
+ base.createTopicExchangeKey('norm3'),
+ base.createTopicExchangeKey('multi_key', multipleWords=True),
+ ]
+ topicEntry = base.createApiEntryTopicExchange('topicName', 'topicExchange', routingKey=keys)
+ entries = [
+ base.createApiEntryFunction('no_args_no_input', 0, False),
+ base.createApiEntryFunction('two_args_no_input', 2, False),
+ base.createApiEntryFunction('no_args_with_input', 0, True),
+ base.createApiEntryFunction('two_args_with_input', 2, True),
+ base.createApiEntryFunction('NEVER_CALL_ME', 0, False),
+ topicEntry
+ ]
+ self.apiRef = base.createApiRef(entries=entries)
+ self.clientClass = subject.createApiClient('testApi', self.apiRef)
+ self.client = self.clientClass({'rootUrl': self.test_root_url})
+ # Patch time.sleep so that we don't delay tests
+ sleepPatcher = mock.patch('time.sleep')
+ sleepSleep = sleepPatcher.start()
+ sleepSleep.return_value = None
+ self.addCleanup(sleepSleep.stop)
+
+ def tearDown(self):
+ time.sleep = self.realTimeSleep
+
+
+class TestConstructorOptions(ClientTest):
+
+ def test_baseUrl_not_allowed(self):
+ with self.assertRaises(exc.TaskclusterFailure):
+ self.clientClass({'baseUrl': 'https://bogus.net'})
+
+ def test_rootUrl_set_correctly(self):
+ client = self.clientClass({'rootUrl': self.test_root_url})
+ self.assertEqual(client.options['rootUrl'], self.test_root_url)
+
+ def test_apiVersion_set_correctly(self):
+ client = self.clientClass({'rootUrl': self.test_root_url})
+ self.assertEqual(client.apiVersion, 'v1')
+
+ def test_apiVersion_set_correctly_default(self):
+ apiRef = copy.deepcopy(self.apiRef)
+ del apiRef['reference']['apiVersion']
+ clientClass = subject.createApiClient('testApi', apiRef)
+ client = clientClass({'rootUrl': self.test_root_url})
+ self.assertEqual(client.apiVersion, 'v1')
+
+ def test_serviceName_set_correctly(self):
+ client = self.clientClass({'rootUrl': self.test_root_url})
+ self.assertEqual(client.serviceName, 'fake')
+
+
+class TestSubArgsInRoute(ClientTest):
+
+ def test_valid_no_subs(self):
+ provided = {'route': '/no/args/here', 'name': 'test'}
+ expected = 'no/args/here'
+ result = self.client._subArgsInRoute(provided, {})
+ self.assertEqual(expected, result)
+
+ def test_valid_one_sub(self):
+ provided = {'route': '/one/<argToSub>/here', 'name': 'test'}
+ expected = 'one/value/here'
+ arguments = {'argToSub': 'value'}
+ result = self.client._subArgsInRoute(provided, arguments)
+ self.assertEqual(expected, result)
+
+ def test_invalid_one_sub(self):
+ with self.assertRaises(exc.TaskclusterFailure):
+ self.client._subArgsInRoute({
+ 'route': '/one/<argToSub>/here',
+ 'name': 'test'
+ }, {'unused': 'value'})
+
+ def test_invalid_route_no_sub(self):
+ with self.assertRaises(exc.TaskclusterFailure):
+ self.client._subArgsInRoute({
+ 'route': 'askldjflkasdf',
+ 'name': 'test'
+ }, {'should': 'fail'})
+
+ def test_invalid_route_no_arg(self):
+ with self.assertRaises(exc.TaskclusterFailure):
+ self.client._subArgsInRoute({
+ 'route': 'askldjflkasdf',
+ 'name': 'test'
+ }, {'should': 'fail'})
+
+
+class TestProcessArgs(ClientTest):
+
+ def test_no_args(self):
+ self.assertEqual(({}, None, {}, None, None), self.client._processArgs({'args': [], 'name': 'test'}))
+
+ def test_finds_payload(self):
+ expected = ({}, {'a': 123}, {}, None, None)
+ actual = self.client._processArgs({'args': [], 'name': 'test', 'input': True}, {'a': 123})
+ self.assertEqual(expected, actual)
+
+ def test_positional_args_only(self):
+ expected = {'test': 'works', 'test2': 'still works'}
+ entry = {'args': ['test', 'test2'], 'name': 'test'}
+ actual = self.client._processArgs(entry, 'works', 'still works')
+ self.assertEqual((expected, None, {}, None, None), actual)
+
+ def test_keyword_args_only(self):
+ expected = {'test': 'works', 'test2': 'still works'}
+ entry = {'args': ['test', 'test2'], 'name': 'test'}
+ actual = self.client._processArgs(entry, test2='still works', test='works')
+ self.assertEqual((expected, None, {}, None, None), actual)
+
+ def test_int_args(self):
+ expected = {'test': 'works', 'test2': 42}
+ entry = {'args': ['test', 'test2'], 'name': 'test'}
+ actual = self.client._processArgs(entry, 'works', 42)
+ self.assertEqual((expected, None, {}, None, None), actual)
+
+ def test_keyword_and_positional(self):
+ entry = {'args': ['test'], 'name': 'test'}
+ with self.assertRaises(exc.TaskclusterFailure):
+ self.client._processArgs(entry, ['broken'], test='works')
+
+ def test_invalid_not_enough_args(self):
+ with self.assertRaises(exc.TaskclusterFailure):
+ self.client._processArgs({'args': ['test'], 'name': 'test'})
+
+ def test_invalid_too_many_positional_args(self):
+ with self.assertRaises(exc.TaskclusterFailure):
+ self.client._processArgs({'args': ['test'], 'name': 'test'}, 'enough', 'one too many')
+
+ def test_invalid_too_many_keyword_args(self):
+ with self.assertRaises(exc.TaskclusterFailure):
+ self.client._processArgs({
+ 'args': ['test'],
+ 'name': 'test'
+ }, test='enough', test2='one too many')
+
+ def test_invalid_missing_arg_positional(self):
+ with self.assertRaises(exc.TaskclusterFailure):
+ self.client._processArgs({'args': ['test', 'test2'], 'name': 'test'}, 'enough')
+
+ def test_invalid_not_enough_args_because_of_overwriting(self):
+ with self.assertRaises(exc.TaskclusterFailure):
+ self.client._processArgs({
+ 'args': ['test', 'test2'],
+ 'name': 'test'
+ }, 'enough', test='enough')
+
+ def test_invalid_positional_not_string_empty_dict(self):
+ with self.assertRaises(exc.TaskclusterFailure):
+ self.client._processArgs({'args': ['test'], 'name': 'test'}, {})
+
+ def test_invalid_positional_not_string_non_empty_dict(self):
+ with self.assertRaises(exc.TaskclusterFailure):
+ self.client._processArgs({'args': ['test'], 'name': 'test'}, {'john': 'ford'})
+
+ def test_calling_convention_1_without_payload(self):
+ params, payload, query, _, _ = self.client._processArgs({'args': ['k1', 'k2'], 'name': 'test'}, 1, 2)
+ self.assertEqual(params, {'k1': 1, 'k2': 2})
+ self.assertEqual(payload, None)
+ self.assertEqual(query, {})
+
+ def test_calling_convention_1_with_payload(self):
+ params, payload, query, _, _ = self.client._processArgs(
+ {'args': ['k1', 'k2'], 'name': 'test', 'input': True},
+ 1,
+ 2,
+ {'A': 123}
+ )
+ self.assertEqual(params, {'k1': 1, 'k2': 2})
+ self.assertEqual(payload, {'A': 123})
+ self.assertEqual(query, {})
+
+ def test_calling_convention_2_without_payload(self):
+ params, payload, query, _, _ = self.client._processArgs({'args': ['k1', 'k2'], 'name': 'test'}, k1=1, k2=2)
+ self.assertEqual(params, {'k1': 1, 'k2': 2})
+ self.assertEqual(payload, None)
+ self.assertEqual(query, {})
+
+ def test_calling_convention_2_with_payload(self):
+ params, payload, query, _, _ = self.client._processArgs(
+ {'args': ['k1', 'k2'], 'name': 'test', 'input': True},
+ {'A': 123}, k1=1, k2=2
+ )
+ self.assertEqual(params, {'k1': 1, 'k2': 2})
+ self.assertEqual(payload, {'A': 123})
+ self.assertEqual(query, {})
+
+ def test_calling_convention_3_without_payload_without_query(self):
+ params, payload, query, _, _ = self.client._processArgs(
+ {'args': ['k1', 'k2'], 'name': 'test'},
+ params={'k1': 1, 'k2': 2}
+ )
+ self.assertEqual(params, {'k1': 1, 'k2': 2})
+ self.assertEqual(payload, None)
+ self.assertEqual(query, {})
+
+ def test_calling_convention_3_with_payload_without_query(self):
+ params, payload, query, _, _ = self.client._processArgs(
+ {'args': ['k1', 'k2'], 'name': 'test'},
+ params={'k1': 1, 'k2': 2},
+ payload={'A': 123}
+ )
+ self.assertEqual(params, {'k1': 1, 'k2': 2})
+ self.assertEqual(payload, {'A': 123})
+ self.assertEqual(query, {})
+
+ def test_calling_convention_3_with_payload_with_query(self):
+ params, payload, query, _, _ = self.client._processArgs(
+ {'args': ['k1', 'k2'], 'name': 'test'},
+ params={'k1': 1, 'k2': 2},
+ payload={'A': 123},
+ query={'B': 456}
+ )
+ self.assertEqual(params, {'k1': 1, 'k2': 2})
+ self.assertEqual(payload, {'A': 123})
+ self.assertEqual(query, {'B': 456})
+
+ def test_calling_convention_3_without_payload_with_query(self):
+ params, payload, query, _, _ = self.client._processArgs(
+ {'args': ['k1', 'k2'], 'name': 'test'},
+ params={'k1': 1, 'k2': 2},
+ query={'B': 456}
+ )
+ self.assertEqual(params, {'k1': 1, 'k2': 2})
+ self.assertEqual(payload, None)
+ self.assertEqual(query, {'B': 456})
+
+ def test_calling_convention_3_with_positional_arguments_with_payload_with_query(self):
+ params, payload, query, _, _ = self.client._processArgs(
+ {'args': ['k1', 'k2'], 'name': 'test'},
+ 1,
+ 2,
+ query={'B': 456},
+ payload={'A': 123}
+ )
+ self.assertEqual(params, {'k1': 1, 'k2': 2})
+ self.assertEqual(payload, {'A': 123})
+ self.assertEqual(query, {'B': 456})
+
+ def test_calling_convention_3_with_pagination(self):
+ def a(x):
+ return x
+
+ _, _, _, ph, _ = self.client._processArgs({
+ 'args': ['k1', 'k2'],
+ 'name': 'test',
+ 'query': ['continuationToken', 'limit'],
+ }, 1, 2, paginationHandler=a)
+ self.assertIs(ph, a)
+
+ def test_calling_convention_3_with_pos_args_same_as_param_kwarg_dict_vals_with_payload_with_query(self):
+ with self.assertRaises(exc.TaskclusterFailure):
+ params, payload, query, _, _ = self.client._processArgs(
+ {'args': ['k1', 'k2'], 'name': 'test'},
+ 1,
+ 2,
+ params={'k1': 1, 'k2': 2},
+ query={'B': 456},
+ payload={'A': 123}
+ )
+
+
+# This could probably be done better with Mock
+class ObjWithDotJson(object):
+
+ def __init__(self, status_code, x):
+ self.status_code = status_code
+ self.x = x
+
+ def json(self):
+ return self.x
+
+ def raise_for_status(self):
+ if self.status_code >= 300 or self.status_code < 200:
+ raise requests.exceptions.HTTPError()
+
+
+class TestMakeHttpRequest(ClientTest):
+
+ apiPath = liburls.api(ClientTest.test_root_url, 'fake', 'v1', 'test')
+
+ def setUp(self):
+
+ ClientTest.setUp(self)
+
+ def test_success_first_try(self):
+ with mock.patch.object(utils, 'makeSingleHttpRequest') as p:
+ expected = {'test': 'works'}
+ p.return_value = ObjWithDotJson(200, expected)
+
+ v = self.client._makeHttpRequest('GET', 'test', None)
+ p.assert_called_once_with('GET', self.apiPath, None, mock.ANY)
+ self.assertEqual(expected, v)
+
+ def test_success_first_try_payload(self):
+ with mock.patch.object(utils, 'makeSingleHttpRequest') as p:
+ expected = {'test': 'works'}
+ p.return_value = ObjWithDotJson(200, expected)
+
+ v = self.client._makeHttpRequest('GET', 'test', {'payload': 2})
+ p.assert_called_once_with('GET', self.apiPath,
+ utils.dumpJson({'payload': 2}), mock.ANY)
+ self.assertEqual(expected, v)
+
+ def test_success_fifth_try_status_code(self):
+ with mock.patch.object(utils, 'makeSingleHttpRequest') as p:
+ expected = {'test': 'works'}
+ sideEffect = [
+ ObjWithDotJson(500, None),
+ ObjWithDotJson(500, None),
+ ObjWithDotJson(500, None),
+ ObjWithDotJson(500, None),
+ ObjWithDotJson(200, expected)
+ ]
+ p.side_effect = sideEffect
+ expectedCalls = [mock.call('GET', self.apiPath, None, mock.ANY)
+ for x in range(self.client.options['maxRetries'])]
+
+ v = self.client._makeHttpRequest('GET', 'test', None)
+ p.assert_has_calls(expectedCalls)
+ self.assertEqual(expected, v)
+
+ def test_exhaust_retries_try_status_code(self):
+ with mock.patch.object(utils, 'makeSingleHttpRequest') as p:
+ msg = {'message': 'msg', 'test': 'works'}
+ sideEffect = [
+ ObjWithDotJson(500, msg),
+ ObjWithDotJson(500, msg),
+ ObjWithDotJson(500, msg),
+ ObjWithDotJson(500, msg),
+ ObjWithDotJson(500, msg), # exhaust retries
+ ObjWithDotJson(500, msg),
+ ObjWithDotJson(500, msg),
+ ObjWithDotJson(500, msg),
+ ObjWithDotJson(500, msg),
+ ObjWithDotJson(500, msg),
+ ObjWithDotJson(500, msg),
+ ObjWithDotJson(200, {'got this': 'wrong'})
+ ]
+ p.side_effect = sideEffect
+ expectedCalls = [mock.call('GET', self.apiPath, None, mock.ANY)
+ for x in range(self.client.options['maxRetries'] + 1)]
+
+ with self.assertRaises(exc.TaskclusterRestFailure):
+ try:
+ self.client._makeHttpRequest('GET', 'test', None)
+ except exc.TaskclusterRestFailure as err:
+ self.assertEqual('msg', str(err))
+ self.assertEqual(500, err.status_code)
+ self.assertEqual(msg, err.body)
+ raise err
+ p.assert_has_calls(expectedCalls)
+
+ def test_success_fifth_try_connection_errors(self):
+ with mock.patch.object(utils, 'makeSingleHttpRequest') as p:
+ expected = {'test': 'works'}
+ sideEffect = [
+ requests.exceptions.RequestException,
+ requests.exceptions.RequestException,
+ requests.exceptions.RequestException,
+ requests.exceptions.RequestException,
+ ObjWithDotJson(200, expected)
+ ]
+ p.side_effect = sideEffect
+ expectedCalls = [mock.call('GET', self.apiPath, None, mock.ANY)
+ for x in range(self.client.options['maxRetries'])]
+
+ v = self.client._makeHttpRequest('GET', 'test', None)
+ p.assert_has_calls(expectedCalls)
+ self.assertEqual(expected, v)
+
+ def test_failure_status_code(self):
+ with mock.patch.object(utils, 'makeSingleHttpRequest') as p:
+ p.return_value = ObjWithDotJson(500, None)
+ expectedCalls = [mock.call('GET', self.apiPath, None, mock.ANY)
+ for x in range(self.client.options['maxRetries'])]
+ with self.assertRaises(exc.TaskclusterRestFailure):
+ self.client._makeHttpRequest('GET', 'test', None)
+ p.assert_has_calls(expectedCalls)
+
+ def test_failure_connection_errors(self):
+ with mock.patch.object(utils, 'makeSingleHttpRequest') as p:
+ p.side_effect = requests.exceptions.RequestException
+ expectedCalls = [mock.call('GET', self.apiPath, None, mock.ANY)
+ for x in range(self.client.options['maxRetries'])]
+ with self.assertRaises(exc.TaskclusterConnectionError):
+ self.client._makeHttpRequest('GET', 'test', None)
+ p.assert_has_calls(expectedCalls)
+
+
+class TestOptions(ClientTest):
+
+ def test_change_default_doesnt_change_previous_instances(self):
+ prevMaxRetries = subject._defaultConfig['maxRetries']
+ with mock.patch.dict(subject._defaultConfig, {'maxRetries': prevMaxRetries + 1}):
+ self.assertEqual(self.client.options['maxRetries'], prevMaxRetries)
+
+ def test_credentials_which_cannot_be_encoded_in_unicode_work(self):
+ badCredentials = {
+ 'accessToken': u"\U0001F4A9",
+ 'clientId': u"\U0001F4A9",
+ }
+ with self.assertRaises(exc.TaskclusterAuthFailure):
+ subject.Auth({
+ 'rootUrl': self.real_root_url,
+ 'credentials': badCredentials,
+ })
+
+
+class TestMakeApiCall(ClientTest):
+ """ This class covers both the _makeApiCall function logic as well as the
+ logic involved in setting up the api member functions since these are very
+ related things"""
+
+ def setUp(self):
+ ClientTest.setUp(self)
+ patcher = mock.patch.object(self.client, 'NEVER_CALL_ME')
+ never_call = patcher.start()
+ never_call.side_effect = AssertionError
+ self.addCleanup(never_call.stop)
+
+ def test_creates_methods(self):
+ self.assertIsInstance(self.client.no_args_no_input, types.MethodType)
+
+ def test_methods_setup_correctly(self):
+ # Because of how scoping works, I've had trouble where the last API Entry
+ # dict is used for all entires, which is wrong. This is to make sure that
+ # the scoping stuff isn't broken
+ self.assertIsNot(self.client.NEVER_CALL_ME, self.client.no_args_no_input)
+
+ def test_hits_no_args_no_input(self):
+ expected = 'works'
+ with mock.patch.object(self.client, '_makeHttpRequest') as patcher:
+ patcher.return_value = expected
+
+ actual = self.client.no_args_no_input()
+ self.assertEqual(expected, actual)
+
+ patcher.assert_called_once_with('get', 'no_args_no_input', None)
+
+ def test_hits_two_args_no_input(self):
+ expected = 'works'
+ with mock.patch.object(self.client, '_makeHttpRequest') as patcher:
+ patcher.return_value = expected
+
+ actual = self.client.two_args_no_input('argone', 'argtwo')
+ self.assertEqual(expected, actual)
+
+ patcher.assert_called_once_with('get', 'two_args_no_input/argone/argtwo', None)
+
+ def test_hits_no_args_with_input(self):
+ expected = 'works'
+ with mock.patch.object(self.client, '_makeHttpRequest') as patcher:
+ patcher.return_value = expected
+
+ actual = self.client.no_args_with_input({})
+ self.assertEqual(expected, actual)
+
+ patcher.assert_called_once_with('get', 'no_args_with_input', {})
+
+ def test_hits_two_args_with_input(self):
+ expected = 'works'
+ with mock.patch.object(self.client, '_makeHttpRequest') as patcher:
+ patcher.return_value = expected
+
+ actual = self.client.two_args_with_input('argone', 'argtwo', {})
+ self.assertEqual(expected, actual)
+
+ patcher.assert_called_once_with('get', 'two_args_with_input/argone/argtwo', {})
+
+ def test_input_is_procesed(self):
+ expected = 'works'
+ expected_input = {'test': 'does work'}
+ with mock.patch.object(self.client, '_makeHttpRequest') as patcher:
+ patcher.return_value = expected
+
+ actual = self.client.no_args_with_input(expected_input)
+ self.assertEqual(expected, actual)
+
+ patcher.assert_called_once_with('get', 'no_args_with_input', expected_input)
+
+ def test_kwargs(self):
+ expected = 'works'
+ with mock.patch.object(self.client, '_makeHttpRequest') as patcher:
+ patcher.return_value = expected
+
+ actual = self.client.two_args_with_input({}, arg0='argone', arg1='argtwo')
+ self.assertEqual(expected, actual)
+
+ patcher.assert_called_once_with('get', 'two_args_with_input/argone/argtwo', {})
+
+ def test_mixing_kw_and_positional_fails(self):
+ with self.assertRaises(exc.TaskclusterFailure):
+ self.client.two_args_no_input('arg1', arg2='arg2')
+
+ def test_missing_input_raises(self):
+ with self.assertRaises(exc.TaskclusterFailure):
+ self.client.no_args_with_input()
+
+
+# TODO: I should run the same things through the node client and compare the output
+class TestTopicExchange(ClientTest):
+
+ def test_string_pass_through(self):
+ expected = 'johnwrotethis'
+ actual = self.client.topicName(expected)
+ self.assertEqual(expected, actual['routingKeyPattern'])
+
+ def test_exchange(self):
+ expected = 'exchange/taskcluster-fake/v1/topicExchange'
+ actual = self.client.topicName('')
+ self.assertEqual(expected, actual['exchange'])
+
+ def test_exchange_trailing_slash(self):
+ self.client.options['exchangePrefix'] = 'exchange/taskcluster-fake2/v1/'
+ expected = 'exchange/taskcluster-fake2/v1/topicExchange'
+ actual = self.client.topicName('')
+ self.assertEqual(expected, actual['exchange'])
+
+ def test_constant(self):
+ expected = 'primary.*.*.*.#'
+ actual = self.client.topicName({})
+ self.assertEqual(expected, actual['routingKeyPattern'])
+
+ def test_does_insertion(self):
+ expected = 'primary.*.value2.*.#'
+ actual = self.client.topicName({'norm2': 'value2'})
+ self.assertEqual(expected, actual['routingKeyPattern'])
+
+ def test_too_many_star_args(self):
+ with self.assertRaises(exc.TaskclusterTopicExchangeFailure):
+ self.client.topicName({'taskId': '123'}, 'another')
+
+ def test_both_args_and_kwargs(self):
+ with self.assertRaises(exc.TaskclusterTopicExchangeFailure):
+ self.client.topicName({'taskId': '123'}, taskId='123')
+
+ def test_no_args_no_kwargs(self):
+ expected = 'primary.*.*.*.#'
+ actual = self.client.topicName()
+ self.assertEqual(expected, actual['routingKeyPattern'])
+ actual = self.client.topicName({})
+ self.assertEqual(expected, actual['routingKeyPattern'])
+
+
+class TestBuildUrl(ClientTest):
+
+ apiPath = liburls.api(ClientTest.test_root_url, 'fake', 'v1', 'two_args_no_input/arg0/arg1')
+
+ def test_build_url_positional(self):
+ actual = self.client.buildUrl('two_args_no_input', 'arg0', 'arg1')
+ self.assertEqual(self.apiPath, actual)
+
+ def test_build_url_keyword(self):
+ actual = self.client.buildUrl('two_args_no_input', arg0='arg0', arg1='arg1')
+ self.assertEqual(self.apiPath, actual)
+
+ def test_build_url_query_string(self):
+ actual = self.client.buildUrl(
+ 'two_args_no_input',
+ params={
+ 'arg0': 'arg0',
+ 'arg1': 'arg1'
+ },
+ query={'qs0': 1}
+ )
+ self.assertEqual(self.apiPath + '?qs0=1', actual)
+
+ def test_fails_to_build_url_for_missing_method(self):
+ with self.assertRaises(exc.TaskclusterFailure):
+ self.client.buildUrl('non-existing')
+
+ def test_fails_to_build_not_enough_args(self):
+ with self.assertRaises(exc.TaskclusterFailure):
+ self.client.buildUrl('two_args_no_input', 'not-enough-args')
+
+
+class TestBuildSignedUrl(ClientTest):
+
+ apiPath = liburls.api(ClientTest.test_root_url, 'fake', 'v1', 'two_args_no_input/arg0/arg1')
+
+ def test_builds_surl_positional(self):
+ actual = self.client.buildSignedUrl('two_args_no_input', 'arg0', 'arg1')
+ actual = re.sub('bewit=[^&]*', 'bewit=X', actual)
+ self.assertEqual(self.apiPath + '?bewit=X', actual)
+
+ def test_builds_surl_keyword(self):
+ actual = self.client.buildSignedUrl('two_args_no_input', arg0='arg0', arg1='arg1')
+ actual = re.sub('bewit=[^&]*', 'bewit=X', actual)
+ self.assertEqual(self.apiPath + '?bewit=X', actual)
+
+
+class TestMockHttpCalls(ClientTest):
+
+ """Test entire calls down to the requests layer, ensuring they have
+ well-formed URLs and handle request and response bodies properly. This
+ verifies that we can call real methods with both position and keyword
+ args"""
+
+ def setUp(self):
+ ClientTest.setUp(self)
+ self.fakeResponse = ''
+
+ def fakeSite(url, request):
+ self.gotUrl = urllib.parse.urlunsplit(url)
+ self.gotRequest = request
+ return self.fakeResponse
+ self.fakeSite = fakeSite
+
+ def test_no_args_no_input(self):
+ with httmock.HTTMock(self.fakeSite):
+ self.client.no_args_no_input()
+ self.assertEqual(self.gotUrl, 'https://tc-tests.example.com/api/fake/v1/no_args_no_input')
+
+ def test_two_args_no_input(self):
+ with httmock.HTTMock(self.fakeSite):
+ self.client.two_args_no_input('1', '2')
+ self.assertEqual(self.gotUrl, 'https://tc-tests.example.com/api/fake/v1/two_args_no_input/1/2')
+
+ def test_no_args_with_input(self):
+ with httmock.HTTMock(self.fakeSite):
+ self.client.no_args_with_input({'x': 1})
+ self.assertEqual(self.gotUrl, 'https://tc-tests.example.com/api/fake/v1/no_args_with_input')
+ self.assertEqual(json.loads(self.gotRequest.body), {"x": 1})
+
+ def test_no_args_with_empty_input(self):
+ with httmock.HTTMock(self.fakeSite):
+ self.client.no_args_with_input({})
+ self.assertEqual(self.gotUrl, 'https://tc-tests.example.com/api/fake/v1/no_args_with_input')
+ self.assertEqual(json.loads(self.gotRequest.body), {})
+
+ def test_two_args_with_input(self):
+ with httmock.HTTMock(self.fakeSite):
+ self.client.two_args_with_input('a', 'b', {'x': 1})
+ self.assertEqual(self.gotUrl,
+ 'https://tc-tests.example.com/api/fake/v1/two_args_with_input/a/b')
+ self.assertEqual(json.loads(self.gotRequest.body), {"x": 1})
+
+ def test_kwargs(self):
+ with httmock.HTTMock(self.fakeSite):
+ self.client.two_args_with_input(
+ {'x': 1}, arg0='a', arg1='b')
+ self.assertEqual(self.gotUrl,
+ 'https://tc-tests.example.com/api/fake/v1/two_args_with_input/a/b')
+ self.assertEqual(json.loads(self.gotRequest.body), {"x": 1})
+
+
+@unittest.skipIf(os.environ.get('NO_TESTS_OVER_WIRE'), "Skipping tests over wire")
+class TestAuthentication(base.TCTest):
+
+ def test_no_creds_needed(self):
+ """we can call methods which require no scopes with an unauthenticated
+ client"""
+ # mock this request so we don't depend on the existence of a client
+ @httmock.all_requests
+ def auth_response(url, request):
+ self.assertEqual(urllib.parse.urlunsplit(url),
+ 'https://tc-tests.example.com/api/auth/v1/clients/abc')
+ self.failIf('Authorization' in request.headers)
+ headers = {'content-type': 'application/json'}
+ content = {"clientId": "abc"}
+ return httmock.response(200, content, headers, None, 5, request)
+
+ with httmock.HTTMock(auth_response):
+ client = subject.Auth({"rootUrl": "https://tc-tests.example.com", "credentials": {}})
+ result = client.client('abc')
+ self.assertEqual(result, {"clientId": "abc"})
+
+ def test_permacred_simple(self):
+ """we can call methods which require authentication with valid
+ permacreds"""
+ client = subject.Auth({
+ 'rootUrl': self.real_root_url,
+ 'credentials': {
+ 'clientId': 'tester',
+ 'accessToken': 'no-secret',
+ }
+ })
+ result = client.testAuthenticate({
+ 'clientScopes': ['test:a'],
+ 'requiredScopes': ['test:a'],
+ })
+ self.assertEqual(result, {'scopes': ['test:a'], 'clientId': 'tester'})
+
+ def test_permacred_simple_authorizedScopes(self):
+ client = subject.Auth({
+ 'rootUrl': self.real_root_url,
+ 'credentials': {
+ 'clientId': 'tester',
+ 'accessToken': 'no-secret',
+ },
+ 'authorizedScopes': ['test:a', 'test:b'],
+ })
+ result = client.testAuthenticate({
+ 'clientScopes': ['test:*'],
+ 'requiredScopes': ['test:a'],
+ })
+ self.assertEqual(result, {'scopes': ['test:a', 'test:b'],
+ 'clientId': 'tester'})
+
+ def test_unicode_permacred_simple(self):
+ """Unicode strings that encode to ASCII in credentials do not cause issues"""
+ client = subject.Auth({
+ 'rootUrl': self.real_root_url,
+ 'credentials': {
+ 'clientId': u'tester',
+ 'accessToken': u'no-secret',
+ }
+ })
+ result = client.testAuthenticate({
+ 'clientScopes': ['test:a'],
+ 'requiredScopes': ['test:a'],
+ })
+ self.assertEqual(result, {'scopes': ['test:a'], 'clientId': 'tester'})
+
+ def test_invalid_unicode_permacred_simple(self):
+ """Unicode strings that do not encode to ASCII in credentials cause issues"""
+ with self.assertRaises(exc.TaskclusterAuthFailure):
+ subject.Auth({
+ 'rootUrl': self.test_root_url,
+ 'credentials': {
+ 'clientId': u"\U0001F4A9",
+ 'accessToken': u"\U0001F4A9",
+ }
+ })
+
+ def test_permacred_insufficient_scopes(self):
+ """A call with insufficient scopes results in an error"""
+ client = subject.Auth({
+ 'rootUrl': self.real_root_url,
+ 'credentials': {
+ 'clientId': 'tester',
+ 'accessToken': 'no-secret',
+ }
+ })
+ # TODO: this should be TaskclsuterAuthFailure; most likely the client
+ # is expecting AuthorizationFailure instead of AuthenticationFailure
+ with self.assertRaises(exc.TaskclusterRestFailure):
+ client.testAuthenticate({
+ 'clientScopes': ['test:*'],
+ 'requiredScopes': ['something-more'],
+ })
+
+ def test_temporary_credentials(self):
+ """we can call methods which require authentication with temporary
+ credentials generated by python client"""
+ tempCred = subject.createTemporaryCredentials(
+ 'tester',
+ 'no-secret',
+ datetime.datetime.utcnow(),
+ datetime.datetime.utcnow() + datetime.timedelta(hours=1),
+ ['test:xyz'],
+ )
+ client = subject.Auth({
+ 'rootUrl': self.real_root_url,
+ 'credentials': tempCred,
+ })
+
+ result = client.testAuthenticate({
+ 'clientScopes': ['test:*'],
+ 'requiredScopes': ['test:xyz'],
+ })
+ self.assertEqual(result, {'scopes': ['test:xyz'], 'clientId': 'tester'})
+
+ def test_named_temporary_credentials(self):
+ tempCred = subject.createTemporaryCredentials(
+ 'tester',
+ 'no-secret',
+ datetime.datetime.utcnow(),
+ datetime.datetime.utcnow() + datetime.timedelta(hours=1),
+ ['test:xyz'],
+ name='credName'
+ )
+ client = subject.Auth({
+ 'rootUrl': self.real_root_url,
+ 'credentials': tempCred,
+ })
+
+ result = client.testAuthenticate({
+ 'clientScopes': ['test:*', 'auth:create-client:credName'],
+ 'requiredScopes': ['test:xyz'],
+ })
+ self.assertEqual(result, {'scopes': ['test:xyz'], 'clientId': 'credName'})
+
+ def test_temporary_credentials_authorizedScopes(self):
+ tempCred = subject.createTemporaryCredentials(
+ 'tester',
+ 'no-secret',
+ datetime.datetime.utcnow(),
+ datetime.datetime.utcnow() + datetime.timedelta(hours=1),
+ ['test:xyz:*'],
+ )
+ client = subject.Auth({
+ 'rootUrl': self.real_root_url,
+ 'credentials': tempCred,
+ 'authorizedScopes': ['test:xyz:abc'],
+ })
+
+ result = client.testAuthenticate({
+ 'clientScopes': ['test:*'],
+ 'requiredScopes': ['test:xyz:abc'],
+ })
+ self.assertEqual(result, {'scopes': ['test:xyz:abc'],
+ 'clientId': 'tester'})
+
+ def test_named_temporary_credentials_authorizedScopes(self):
+ tempCred = subject.createTemporaryCredentials(
+ 'tester',
+ 'no-secret',
+ datetime.datetime.utcnow(),
+ datetime.datetime.utcnow() + datetime.timedelta(hours=1),
+ ['test:xyz:*'],
+ name='credName'
+ )
+ client = subject.Auth({
+ 'rootUrl': self.real_root_url,
+ 'credentials': tempCred,
+ 'authorizedScopes': ['test:xyz:abc'],
+ })
+
+ result = client.testAuthenticate({
+ 'clientScopes': ['test:*', 'auth:create-client:credName'],
+ 'requiredScopes': ['test:xyz:abc'],
+ })
+ self.assertEqual(result, {'scopes': ['test:xyz:abc'],
+ 'clientId': 'credName'})
+
+ def test_signed_url(self):
+ """we can use a signed url built with the python client"""
+ client = subject.Auth({
+ 'rootUrl': self.real_root_url,
+ 'credentials': {
+ 'clientId': 'tester',
+ 'accessToken': 'no-secret',
+ }
+ })
+ signedUrl = client.buildSignedUrl('testAuthenticateGet')
+ response = requests.get(signedUrl)
+ response.raise_for_status()
+ response = response.json()
+ response['scopes'].sort()
+ self.assertEqual(response, {
+ 'scopes': sorted(['test:*', u'auth:create-client:test:*']),
+ 'clientId': 'tester',
+ })
+
+ def test_signed_url_bad_credentials(self):
+ client = subject.Auth({
+ 'rootUrl': self.real_root_url,
+ 'credentials': {
+ 'clientId': 'tester',
+ 'accessToken': 'wrong-secret',
+ }
+ })
+ signedUrl = client.buildSignedUrl('testAuthenticateGet')
+ response = requests.get(signedUrl)
+ with self.assertRaises(requests.exceptions.RequestException):
+ response.raise_for_status()
+ self.assertEqual(401, response.status_code)
+
+ def test_temp_credentials_signed_url(self):
+ tempCred = subject.createTemporaryCredentials(
+ 'tester',
+ 'no-secret',
+ datetime.datetime.utcnow(),
+ datetime.datetime.utcnow() + datetime.timedelta(hours=1),
+ ['test:*'],
+ )
+ client = subject.Auth({
+ 'rootUrl': self.real_root_url,
+ 'credentials': tempCred,
+ })
+ signedUrl = client.buildSignedUrl('testAuthenticateGet')
+ response = requests.get(signedUrl)
+ response.raise_for_status()
+ response = response.json()
+ self.assertEqual(response, {
+ 'scopes': ['test:*'],
+ 'clientId': 'tester',
+ })
+
+ def test_signed_url_authorizedScopes(self):
+ client = subject.Auth({
+ 'rootUrl': self.real_root_url,
+ 'credentials': {
+ 'clientId': 'tester',
+ 'accessToken': 'no-secret',
+ },
+ 'authorizedScopes': ['test:authenticate-get'],
+ })
+ signedUrl = client.buildSignedUrl('testAuthenticateGet')
+ response = requests.get(signedUrl)
+ response.raise_for_status()
+ response = response.json()
+ self.assertEqual(response, {
+ 'scopes': ['test:authenticate-get'],
+ 'clientId': 'tester',
+ })
+
+ def test_temp_credentials_signed_url_authorizedScopes(self):
+ tempCred = subject.createTemporaryCredentials(
+ 'tester',
+ 'no-secret',
+ datetime.datetime.utcnow(),
+ datetime.datetime.utcnow() + datetime.timedelta(hours=1),
+ ['test:*'],
+ )
+ client = subject.Auth({
+ 'rootUrl': self.real_root_url,
+ 'credentials': tempCred,
+ 'authorizedScopes': ['test:authenticate-get'],
+ })
+ signedUrl = client.buildSignedUrl('testAuthenticateGet')
+ response = requests.get(signedUrl)
+ response.raise_for_status()
+ response = response.json()
+ self.assertEqual(response, {
+ 'scopes': ['test:authenticate-get'],
+ 'clientId': 'tester',
+ })
diff --git a/third_party/python/taskcluster/test/test_utils.py b/third_party/python/taskcluster/test/test_utils.py
new file mode 100644
index 0000000000..31328199d8
--- /dev/null
+++ b/third_party/python/taskcluster/test/test_utils.py
@@ -0,0 +1,439 @@
+import datetime
+import uuid
+import os
+
+import taskcluster.utils as subject
+import dateutil.parser
+import httmock
+import mock
+import requests
+
+import base
+from unittest import TestCase
+from hypothesis import given
+import hypothesis.strategies as st
+
+
+# https://docs.python.org/2/library/datetime.html#tzinfo-objects
+class UTC(datetime.tzinfo):
+ """UTC"""
+
+ def utcoffset(self, dt):
+ return datetime.timedelta(0)
+
+ def tzname(self, dt):
+ return 'UTC'
+
+ def dst(self, dt):
+ return datetime.timedelta(0)
+
+
+utc = UTC()
+
+
+class StringDateTests(base.TCTest):
+ def test_naive(self):
+ dateObj = datetime.datetime(
+ year=2000,
+ month=1,
+ day=1,
+ hour=1,
+ minute=1,
+ second=1
+ )
+ expected = '2000-01-01T01:01:01Z'
+ actual = subject.stringDate(dateObj)
+ self.assertEqual(expected, actual)
+
+ def test_aware(self):
+ dateObj = datetime.datetime(
+ year=2000,
+ month=1,
+ day=1,
+ hour=1,
+ minute=1,
+ second=1,
+ tzinfo=utc
+ )
+ expected = '2000-01-01T01:01:01Z'
+ actual = subject.stringDate(dateObj)
+ self.assertEqual(expected, actual)
+
+
+class DumpJsonTests(base.TCTest):
+ def test_has_no_spaces(self):
+ expected = [
+ '{"test":"works","doesit":"yes"}',
+ '{"doesit":"yes","test":"works"}'
+ ]
+ actual = subject.dumpJson({'test': 'works', 'doesit': 'yes'})
+ self.assertTrue(actual in expected)
+
+ def test_serializes_naive_date(self):
+ dateObj = datetime.datetime(
+ year=2000,
+ month=1,
+ day=1,
+ hour=1,
+ minute=1,
+ second=1
+ )
+ expected = '{"date":"2000-01-01T01:01:01Z"}'
+ actual = subject.dumpJson({'date': dateObj})
+ self.assertEqual(expected, actual)
+
+ def test_serializes_aware_date(self):
+ dateObj = datetime.datetime(
+ year=2000,
+ month=1,
+ day=1,
+ hour=1,
+ minute=1,
+ second=1,
+ tzinfo=utc
+ )
+ expected = '{"date":"2000-01-01T01:01:01Z"}'
+ actual = subject.dumpJson({'date': dateObj})
+ self.assertEqual(expected, actual)
+
+
+class TestBase64Utils(base.TCTest):
+ def test_encode_string_for_b64_header(self):
+ # Really long strings trigger newlines every 72 ch
+ expected = 'YWJjZGVm' * 500
+ expected = expected.encode('ascii')
+ actual = subject.encodeStringForB64Header('abcdef' * 500)
+ self.assertEqual(expected, actual)
+
+ def test_makeb64urlsafe(self):
+ expected = b'-_'
+ actual = subject.makeB64UrlSafe('+/')
+ self.assertEqual(expected, actual)
+
+ def test_makeb64urlunsafe(self):
+ expected = b'+/'
+ actual = subject.makeB64UrlUnsafe('-_')
+ self.assertEqual(expected, actual)
+
+
+class TestSlugId(base.TCTest):
+ def test_slug_id_is_always_nice(self):
+ with mock.patch('uuid.uuid4') as p:
+ # first bit of uuid set, which should get unset
+ p.return_value = uuid.UUID('bed97923-7616-4ec8-85ed-4b695f67ac2e')
+ expected = b'Ptl5I3YWTsiF7UtpX2esLg'
+ actual = subject.slugId()
+ self.assertEqual(expected, actual)
+
+ def test_slug_id_nice_stays_nice(self):
+ with mock.patch('uuid.uuid4') as p:
+ # first bit of uuid unset, should remain unset
+ p.return_value = uuid.UUID('3ed97923-7616-4ec8-85ed-4b695f67ac2e')
+ expected = b'Ptl5I3YWTsiF7UtpX2esLg'
+ actual = subject.slugId()
+ self.assertEqual(expected, actual)
+
+
+class TestMakeSingleHttpRequest(base.TCTest):
+ def test_success_no_payload(self):
+ @httmock.all_requests
+ def response_content(url, request):
+ return {'status_code': 200, 'content': {}}
+
+ with httmock.HTTMock(response_content):
+ d = subject.makeSingleHttpRequest('GET', 'http://www.example.com', {}, {})
+ self.assertEqual(d.json(), {})
+ self.assertEqual(d.status_code, 200)
+ d.raise_for_status()
+
+ def test_success_payload(self):
+ @httmock.all_requests
+ def response_content(url, request):
+ self.assertEqual(request.body, 'i=j')
+ return {'status_code': 200, 'content': {'k': 'l'}}
+
+ with httmock.HTTMock(response_content):
+ d = subject.makeSingleHttpRequest('GET', 'http://www.example.com', {'i': 'j'}, {})
+ self.assertEqual(d.json(), {'k': 'l'})
+ self.assertEqual(d.status_code, 200)
+ d.raise_for_status()
+
+ def test_failure(self):
+ @httmock.all_requests
+ def response_content(url, requet):
+ return {'status_code': 404}
+
+ with httmock.HTTMock(response_content):
+ d = subject.makeSingleHttpRequest('GET', 'http://www.example.com', {}, {})
+ with self.assertRaises(requests.exceptions.RequestException):
+ d.raise_for_status()
+
+
+class TestPutfile(base.TCTest):
+ def test_success_put_file(self):
+ with mock.patch.object(subject, 'makeSingleHttpRequest') as p:
+ class FakeResp:
+ status_code = 200
+
+ def raise_for_status(self):
+ pass
+
+ p.return_value = FakeResp()
+ subject.putFile('setup.py', 'http://www.example.com', 'text/plain')
+ p.assert_called_once_with('put', 'http://www.example.com', mock.ANY, mock.ANY, mock.ANY)
+
+
+class TestStableSlugIdClosure(TestCase):
+
+ @given(st.text())
+ def test_repeat(self, text):
+ s = subject.stableSlugId()
+ self.assertEqual(s(text), s(text))
+
+ def test_not_equal(self):
+ s = subject.stableSlugId()
+ self.assertNotEqual(s("first"), s("second"))
+
+ @given(st.text())
+ def test_invalidate(self, text):
+ s1 = subject.stableSlugId()
+ s2 = subject.stableSlugId()
+ self.assertNotEqual(s1(text), s2(text))
+
+
+class TestFromNow(TestCase):
+
+ examples = [
+ {"expr": '1 hour', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T17:27:20.974Z'},
+ {"expr": '3h', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T19:27:20.974Z'},
+ {"expr": '1 hours', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T17:27:20.974Z'},
+ {"expr": '-1 hour', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T15:27:20.974Z'},
+ {"expr": '1 m', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T16:28:20.974Z'},
+ {"expr": '1m', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T16:28:20.974Z'},
+ {"expr": '12 min', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T16:39:20.974Z'},
+ {"expr": '12min', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T16:39:20.974Z'},
+ {"expr": '11m', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T16:38:20.974Z'},
+ {"expr": '11 m', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T16:38:20.974Z'},
+ {"expr": '1 day', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-20T16:27:20.974Z'},
+ {"expr": '2 days', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-21T16:27:20.974Z'},
+ {"expr": '1 second', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-19T16:27:21.974Z'},
+ {"expr": '1 week', "from": '2017-01-19T16:27:20.974Z', "result": '2017-01-26T16:27:20.974Z'},
+ {"expr": '1 month', "from": '2017-01-19T16:27:20.974Z', "result": '2017-02-18T16:27:20.974Z'},
+ {"expr": '30 mo', "from": '2017-01-19T16:27:20.974Z', "result": '2019-07-08T16:27:20.974Z'},
+ {"expr": '-30 mo', "from": '2017-01-19T16:27:20.974Z', "result": '2014-08-03T16:27:20.974Z'},
+ {"expr": '1 year', "from": '2017-01-19T16:27:20.974Z', "result": '2018-01-19T16:27:20.974Z'},
+ ]
+
+ def test_examples(self):
+ for example in self.examples:
+ from_ = dateutil.parser.parse(example['from'])
+ res = dateutil.parser.parse(example['result'])
+ self.assertEqual(subject.fromNow(example['expr'], from_), res)
+
+
+class TestScopeMatch(TestCase):
+ def assertScopeMatch(self, assumed, requiredScopeSets, expected):
+ try:
+ result = subject.scopeMatch(assumed, requiredScopeSets)
+ self.assertEqual(result, expected)
+ except:
+ if expected != 'exception':
+ raise
+
+ def test_single_exact_match_string_except_1(self):
+ self.assertScopeMatch(["foo:bar"], "foo:bar", "exception")
+
+ def test_single_exact_match_string_except_2(self):
+ self.assertScopeMatch(["foo:bar"], ["foo:bar"], "exception")
+
+ def test_single_exact_match_string(self):
+ self.assertScopeMatch(["foo:bar"], [["foo:bar"]], True)
+
+ def test_empty_string_in_scopesets_except_1(self):
+ self.assertScopeMatch(["foo:bar"], "", "exception")
+
+ def test_empty_string_in_scopesets_except_2(self):
+ self.assertScopeMatch(["foo:bar"], [""], "exception")
+
+ def test_empty_string_in_scopesets(self):
+ self.assertScopeMatch(["foo:bar"], [[""]], False)
+
+ def test_prefix(self):
+ self.assertScopeMatch(["foo:*"], [["foo:bar"]], True)
+
+ def test_star_not_at_end(self):
+ self.assertScopeMatch(["foo:*:bing"], [["foo:bar:bing"]], False)
+
+ def test_star_at_beginnging(self):
+ self.assertScopeMatch(["*:bar"], [["foo:bar"]], False)
+
+ def test_prefix_with_no_star(self):
+ self.assertScopeMatch(["foo:"], [["foo:bar"]], False)
+
+ def test_star_but_not_prefix_1(self):
+ self.assertScopeMatch(["foo:bar:*"], [["bar:bing"]], False)
+
+ def test_star_but_not_prefix_2(self):
+ self.assertScopeMatch(["bar:*"], [["foo:bar:bing"]], False)
+
+ def test_disjunction_strings_except(self):
+ self.assertScopeMatch(["bar:*"], ["foo:x", "bar:x"], "exception")
+
+ def test_disjunction_strings_2(self):
+ self.assertScopeMatch(["bar:*"], [["foo:x"], ["bar:x"]], True)
+
+ def test_conjunction(self):
+ self.assertScopeMatch(["bar:*", "foo:x"], [["foo:x", "bar:y"]], True)
+
+ def test_empty_pattern(self):
+ self.assertScopeMatch([""], [["foo:bar"]], False)
+
+ def test_empty_patterns(self):
+ self.assertScopeMatch([], [["foo:bar"]], False)
+
+ def test_bare_star(self):
+ self.assertScopeMatch(["*"], [["foo:bar", "bar:bing"]], True)
+
+ def test_empty_conjunction_in_scopesets(self):
+ self.assertScopeMatch(["foo:bar"], [[]], True)
+
+ def test_non_string_scopesets(self):
+ self.assertScopeMatch(["foo:bar"], {}, "exception")
+
+ def test_non_string_scopeset(self):
+ self.assertScopeMatch(["foo:bar"], [{}], "exception")
+
+ def test_non_string_scope(self):
+ self.assertScopeMatch(["foo:bar"], [[{}]], "exception")
+
+ def test_empty_disjunction_in_scopesets(self):
+ self.assertScopeMatch(["foo:bar"], [], False)
+
+
+class TestIsExpired(TestCase):
+
+ def test_not_expired(self):
+ isExpired = subject.isExpired("""
+ {
+ "version":1,
+ "scopes":["*"],
+ "start":1450740520182,
+ "expiry":2451000620182,
+ "seed":"90PyTwYxS96-lBPc0f_MqQGV-hHCUsTYWpXZilv6EqDg",
+ "signature":"HocA2IiCoGzjUQZbrbLSwKMXZSYWCu/hfMPCa/ovggQ="
+ }
+ """)
+ self.assertEqual(isExpired, False)
+
+ def test_expired(self):
+ # Warning we have to test with expiry: 0 as magic python spy thing
+ # mess up time.time() so it won't work.
+ isExpired = subject.isExpired("""
+ {
+ "version":1,
+ "scopes":["*"],
+ "start":1450740520182,
+ "expiry":0,
+ "seed":"90PyTwYxS96-lBPc0f_MqQGV-hHCUsTYWpXZilv6EqDg",
+ "signature":"HocA2IiCoGzjUQZbrbLSwKMXZSYWCu/hfMPCa/ovggQ="
+ }
+ """)
+ self.assertEqual(isExpired, True)
+
+
+class TestFromEnv(TestCase):
+
+ def clear_env(self):
+ for v in 'ROOT_URL', 'CLIENT_ID', 'ACCESS_TOKEN', 'CERTIFICATE':
+ v = 'TASKCLUSTER_' + v
+ if v in os.environ:
+ del os.environ[v]
+
+ @mock.patch.dict(os.environ)
+ def test_empty(self):
+ self.clear_env()
+ self.assertEqual(subject.optionsFromEnvironment(), {})
+
+ @mock.patch.dict(os.environ)
+ def test_all(self):
+ os.environ['TASKCLUSTER_ROOT_URL'] = 'https://tc.example.com'
+ os.environ['TASKCLUSTER_CLIENT_ID'] = 'me'
+ os.environ['TASKCLUSTER_ACCESS_TOKEN'] = 'shave-and-a-haircut'
+ os.environ['TASKCLUSTER_CERTIFICATE'] = '{"bits":2}'
+ self.assertEqual(subject.optionsFromEnvironment(), {
+ 'rootUrl': 'https://tc.example.com',
+ 'credentials': {
+ 'clientId': 'me',
+ 'accessToken': 'shave-and-a-haircut',
+ 'certificate': '{"bits":2}',
+ },
+ })
+
+ @mock.patch.dict(os.environ)
+ def test_cred_only(self):
+ os.environ['TASKCLUSTER_ACCESS_TOKEN'] = 'shave-and-a-haircut'
+ self.assertEqual(subject.optionsFromEnvironment(), {
+ 'credentials': {
+ 'accessToken': 'shave-and-a-haircut',
+ },
+ })
+
+ @mock.patch.dict(os.environ)
+ def test_rooturl_only(self):
+ os.environ['TASKCLUSTER_ROOT_URL'] = 'https://tc.example.com'
+ self.assertEqual(subject.optionsFromEnvironment(), {
+ 'rootUrl': 'https://tc.example.com',
+ })
+
+ @mock.patch.dict(os.environ)
+ def test_default_rooturl(self):
+ os.environ['TASKCLUSTER_CLIENT_ID'] = 'me'
+ os.environ['TASKCLUSTER_ACCESS_TOKEN'] = 'shave-and-a-haircut'
+ os.environ['TASKCLUSTER_CERTIFICATE'] = '{"bits":2}'
+ self.assertEqual(
+ subject.optionsFromEnvironment({'rootUrl': 'https://other.example.com'}), {
+ 'rootUrl': 'https://other.example.com',
+ 'credentials': {
+ 'clientId': 'me',
+ 'accessToken': 'shave-and-a-haircut',
+ 'certificate': '{"bits":2}',
+ },
+ })
+
+ @mock.patch.dict(os.environ)
+ def test_default_rooturl_overridden(self):
+ os.environ['TASKCLUSTER_ROOT_URL'] = 'https://tc.example.com'
+ self.assertEqual(
+ subject.optionsFromEnvironment({'rootUrl': 'https://other.example.com'}),
+ {'rootUrl': 'https://tc.example.com'})
+
+ @mock.patch.dict(os.environ)
+ def test_default_creds(self):
+ os.environ['TASKCLUSTER_ROOT_URL'] = 'https://tc.example.com'
+ os.environ['TASKCLUSTER_ACCESS_TOKEN'] = 'shave-and-a-haircut'
+ os.environ['TASKCLUSTER_CERTIFICATE'] = '{"bits":2}'
+ self.assertEqual(
+ subject.optionsFromEnvironment({'credentials': {'clientId': 'them'}}), {
+ 'rootUrl': 'https://tc.example.com',
+ 'credentials': {
+ 'clientId': 'them',
+ 'accessToken': 'shave-and-a-haircut',
+ 'certificate': '{"bits":2}',
+ },
+ })
+
+ @mock.patch.dict(os.environ)
+ def test_default_creds_overridden(self):
+ os.environ['TASKCLUSTER_ROOT_URL'] = 'https://tc.example.com'
+ os.environ['TASKCLUSTER_CLIENT_ID'] = 'me'
+ os.environ['TASKCLUSTER_ACCESS_TOKEN'] = 'shave-and-a-haircut'
+ os.environ['TASKCLUSTER_CERTIFICATE'] = '{"bits":2}'
+ self.assertEqual(
+ subject.optionsFromEnvironment({'credentials': {'clientId': 'them'}}), {
+ 'rootUrl': 'https://tc.example.com',
+ 'credentials': {
+ 'clientId': 'me',
+ 'accessToken': 'shave-and-a-haircut',
+ 'certificate': '{"bits":2}',
+ },
+ })
diff --git a/third_party/python/urllib3/CHANGES.rst b/third_party/python/urllib3/CHANGES.rst
new file mode 100644
index 0000000000..389980f2b9
--- /dev/null
+++ b/third_party/python/urllib3/CHANGES.rst
@@ -0,0 +1,1112 @@
+Changes
+=======
+
+1.25.9 (2020-04-16)
+-------------------
+
+* Added ``InvalidProxyConfigurationWarning`` which is raised when
+ erroneously specifying an HTTPS proxy URL. urllib3 doesn't currently
+ support connecting to HTTPS proxies but will soon be able to
+ and we would like users to migrate properly without much breakage.
+
+ See `this GitHub issue <https://github.com/urllib3/urllib3/issues/1850>`_
+ for more information on how to fix your proxy config. (Pull #1851)
+
+* Drain connection after ``PoolManager`` redirect (Pull #1817)
+
+* Ensure ``load_verify_locations`` raises ``SSLError`` for all backends (Pull #1812)
+
+* Rename ``VerifiedHTTPSConnection`` to ``HTTPSConnection`` (Pull #1805)
+
+* Allow the CA certificate data to be passed as a string (Pull #1804)
+
+* Raise ``ValueError`` if method contains control characters (Pull #1800)
+
+* Add ``__repr__`` to ``Timeout`` (Pull #1795)
+
+
+1.25.8 (2020-01-20)
+-------------------
+
+* Drop support for EOL Python 3.4 (Pull #1774)
+
+* Optimize _encode_invalid_chars (Pull #1787)
+
+
+1.25.7 (2019-11-11)
+-------------------
+
+* Preserve ``chunked`` parameter on retries (Pull #1715, Pull #1734)
+
+* Allow unset ``SERVER_SOFTWARE`` in App Engine (Pull #1704, Issue #1470)
+
+* Fix issue where URL fragment was sent within the request target. (Pull #1732)
+
+* Fix issue where an empty query section in a URL would fail to parse. (Pull #1732)
+
+* Remove TLS 1.3 support in SecureTransport due to Apple removing support (Pull #1703)
+
+
+1.25.6 (2019-09-24)
+-------------------
+
+* Fix issue where tilde (``~``) characters were incorrectly
+ percent-encoded in the path. (Pull #1692)
+
+
+1.25.5 (2019-09-19)
+-------------------
+
+* Add mitigation for BPO-37428 affecting Python <3.7.4 and OpenSSL 1.1.1+ which
+ caused certificate verification to be enabled when using ``cert_reqs=CERT_NONE``.
+ (Issue #1682)
+
+
+1.25.4 (2019-09-19)
+-------------------
+
+* Propagate Retry-After header settings to subsequent retries. (Pull #1607)
+
+* Fix edge case where Retry-After header was still respected even when
+ explicitly opted out of. (Pull #1607)
+
+* Remove dependency on ``rfc3986`` for URL parsing.
+
+* Fix issue where URLs containing invalid characters within ``Url.auth`` would
+ raise an exception instead of percent-encoding those characters.
+
+* Add support for ``HTTPResponse.auto_close = False`` which makes HTTP responses
+ work well with BufferedReaders and other ``io`` module features. (Pull #1652)
+
+* Percent-encode invalid characters in URL for ``HTTPConnectionPool.request()`` (Pull #1673)
+
+
+1.25.3 (2019-05-23)
+-------------------
+
+* Change ``HTTPSConnection`` to load system CA certificates
+ when ``ca_certs``, ``ca_cert_dir``, and ``ssl_context`` are
+ unspecified. (Pull #1608, Issue #1603)
+
+* Upgrade bundled rfc3986 to v1.3.2. (Pull #1609, Issue #1605)
+
+
+1.25.2 (2019-04-28)
+-------------------
+
+* Change ``is_ipaddress`` to not detect IPvFuture addresses. (Pull #1583)
+
+* Change ``parse_url`` to percent-encode invalid characters within the
+ path, query, and target components. (Pull #1586)
+
+
+1.25.1 (2019-04-24)
+-------------------
+
+* Add support for Google's ``Brotli`` package. (Pull #1572, Pull #1579)
+
+* Upgrade bundled rfc3986 to v1.3.1 (Pull #1578)
+
+
+1.25 (2019-04-22)
+-----------------
+
+* Require and validate certificates by default when using HTTPS (Pull #1507)
+
+* Upgraded ``urllib3.utils.parse_url()`` to be RFC 3986 compliant. (Pull #1487)
+
+* Added support for ``key_password`` for ``HTTPSConnectionPool`` to use
+ encrypted ``key_file`` without creating your own ``SSLContext`` object. (Pull #1489)
+
+* Add TLSv1.3 support to CPython, pyOpenSSL, and SecureTransport ``SSLContext``
+ implementations. (Pull #1496)
+
+* Switched the default multipart header encoder from RFC 2231 to HTML 5 working draft. (Issue #303, PR #1492)
+
+* Fixed issue where OpenSSL would block if an encrypted client private key was
+ given and no password was given. Instead an ``SSLError`` is raised. (Pull #1489)
+
+* Added support for Brotli content encoding. It is enabled automatically if
+ ``brotlipy`` package is installed which can be requested with
+ ``urllib3[brotli]`` extra. (Pull #1532)
+
+* Drop ciphers using DSS key exchange from default TLS cipher suites.
+ Improve default ciphers when using SecureTransport. (Pull #1496)
+
+* Implemented a more efficient ``HTTPResponse.__iter__()`` method. (Issue #1483)
+
+1.24.3 (2019-05-01)
+-------------------
+
+* Apply fix for CVE-2019-9740. (Pull #1591)
+
+1.24.2 (2019-04-17)
+-------------------
+
+* Don't load system certificates by default when any other ``ca_certs``, ``ca_certs_dir`` or
+ ``ssl_context`` parameters are specified.
+
+* Remove Authorization header regardless of case when redirecting to cross-site. (Issue #1510)
+
+* Add support for IPv6 addresses in subjectAltName section of certificates. (Issue #1269)
+
+
+1.24.1 (2018-11-02)
+-------------------
+
+* Remove quadratic behavior within ``GzipDecoder.decompress()`` (Issue #1467)
+
+* Restored functionality of ``ciphers`` parameter for ``create_urllib3_context()``. (Issue #1462)
+
+
+1.24 (2018-10-16)
+-----------------
+
+* Allow key_server_hostname to be specified when initializing a PoolManager to allow custom SNI to be overridden. (Pull #1449)
+
+* Test against Python 3.7 on AppVeyor. (Pull #1453)
+
+* Early-out ipv6 checks when running on App Engine. (Pull #1450)
+
+* Change ambiguous description of backoff_factor (Pull #1436)
+
+* Add ability to handle multiple Content-Encodings (Issue #1441 and Pull #1442)
+
+* Skip DNS names that can't be idna-decoded when using pyOpenSSL (Issue #1405).
+
+* Add a server_hostname parameter to HTTPSConnection which allows for
+ overriding the SNI hostname sent in the handshake. (Pull #1397)
+
+* Drop support for EOL Python 2.6 (Pull #1429 and Pull #1430)
+
+* Fixed bug where responses with header Content-Type: message/* erroneously
+ raised HeaderParsingError, resulting in a warning being logged. (Pull #1439)
+
+* Move urllib3 to src/urllib3 (Pull #1409)
+
+
+1.23 (2018-06-04)
+-----------------
+
+* Allow providing a list of headers to strip from requests when redirecting
+ to a different host. Defaults to the ``Authorization`` header. Different
+ headers can be set via ``Retry.remove_headers_on_redirect``. (Issue #1316)
+
+* Fix ``util.selectors._fileobj_to_fd`` to accept ``long`` (Issue #1247).
+
+* Dropped Python 3.3 support. (Pull #1242)
+
+* Put the connection back in the pool when calling stream() or read_chunked() on
+ a chunked HEAD response. (Issue #1234)
+
+* Fixed pyOpenSSL-specific ssl client authentication issue when clients
+ attempted to auth via certificate + chain (Issue #1060)
+
+* Add the port to the connectionpool connect print (Pull #1251)
+
+* Don't use the ``uuid`` module to create multipart data boundaries. (Pull #1380)
+
+* ``read_chunked()`` on a closed response returns no chunks. (Issue #1088)
+
+* Add Python 2.6 support to ``contrib.securetransport`` (Pull #1359)
+
+* Added support for auth info in url for SOCKS proxy (Pull #1363)
+
+
+1.22 (2017-07-20)
+-----------------
+
+* Fixed missing brackets in ``HTTP CONNECT`` when connecting to IPv6 address via
+ IPv6 proxy. (Issue #1222)
+
+* Made the connection pool retry on ``SSLError``. The original ``SSLError``
+ is available on ``MaxRetryError.reason``. (Issue #1112)
+
+* Drain and release connection before recursing on retry/redirect. Fixes
+ deadlocks with a blocking connectionpool. (Issue #1167)
+
+* Fixed compatibility for cookiejar. (Issue #1229)
+
+* pyopenssl: Use vendored version of ``six``. (Issue #1231)
+
+
+1.21.1 (2017-05-02)
+-------------------
+
+* Fixed SecureTransport issue that would cause long delays in response body
+ delivery. (Pull #1154)
+
+* Fixed regression in 1.21 that threw exceptions when users passed the
+ ``socket_options`` flag to the ``PoolManager``. (Issue #1165)
+
+* Fixed regression in 1.21 that threw exceptions when users passed the
+ ``assert_hostname`` or ``assert_fingerprint`` flag to the ``PoolManager``.
+ (Pull #1157)
+
+
+1.21 (2017-04-25)
+-----------------
+
+* Improved performance of certain selector system calls on Python 3.5 and
+ later. (Pull #1095)
+
+* Resolved issue where the PyOpenSSL backend would not wrap SysCallError
+ exceptions appropriately when sending data. (Pull #1125)
+
+* Selectors now detects a monkey-patched select module after import for modules
+ that patch the select module like eventlet, greenlet. (Pull #1128)
+
+* Reduced memory consumption when streaming zlib-compressed responses
+ (as opposed to raw deflate streams). (Pull #1129)
+
+* Connection pools now use the entire request context when constructing the
+ pool key. (Pull #1016)
+
+* ``PoolManager.connection_from_*`` methods now accept a new keyword argument,
+ ``pool_kwargs``, which are merged with the existing ``connection_pool_kw``.
+ (Pull #1016)
+
+* Add retry counter for ``status_forcelist``. (Issue #1147)
+
+* Added ``contrib`` module for using SecureTransport on macOS:
+ ``urllib3.contrib.securetransport``. (Pull #1122)
+
+* urllib3 now only normalizes the case of ``http://`` and ``https://`` schemes:
+ for schemes it does not recognise, it assumes they are case-sensitive and
+ leaves them unchanged.
+ (Issue #1080)
+
+
+1.20 (2017-01-19)
+-----------------
+
+* Added support for waiting for I/O using selectors other than select,
+ improving urllib3's behaviour with large numbers of concurrent connections.
+ (Pull #1001)
+
+* Updated the date for the system clock check. (Issue #1005)
+
+* ConnectionPools now correctly consider hostnames to be case-insensitive.
+ (Issue #1032)
+
+* Outdated versions of PyOpenSSL now cause the PyOpenSSL contrib module
+ to fail when it is injected, rather than at first use. (Pull #1063)
+
+* Outdated versions of cryptography now cause the PyOpenSSL contrib module
+ to fail when it is injected, rather than at first use. (Issue #1044)
+
+* Automatically attempt to rewind a file-like body object when a request is
+ retried or redirected. (Pull #1039)
+
+* Fix some bugs that occur when modules incautiously patch the queue module.
+ (Pull #1061)
+
+* Prevent retries from occurring on read timeouts for which the request method
+ was not in the method whitelist. (Issue #1059)
+
+* Changed the PyOpenSSL contrib module to lazily load idna to avoid
+ unnecessarily bloating the memory of programs that don't need it. (Pull
+ #1076)
+
+* Add support for IPv6 literals with zone identifiers. (Pull #1013)
+
+* Added support for socks5h:// and socks4a:// schemes when working with SOCKS
+ proxies, and controlled remote DNS appropriately. (Issue #1035)
+
+
+1.19.1 (2016-11-16)
+-------------------
+
+* Fixed AppEngine import that didn't function on Python 3.5. (Pull #1025)
+
+
+1.19 (2016-11-03)
+-----------------
+
+* urllib3 now respects Retry-After headers on 413, 429, and 503 responses when
+ using the default retry logic. (Pull #955)
+
+* Remove markers from setup.py to assist ancient setuptools versions. (Issue
+ #986)
+
+* Disallow superscripts and other integerish things in URL ports. (Issue #989)
+
+* Allow urllib3's HTTPResponse.stream() method to continue to work with
+ non-httplib underlying FPs. (Pull #990)
+
+* Empty filenames in multipart headers are now emitted as such, rather than
+ being suppressed. (Issue #1015)
+
+* Prefer user-supplied Host headers on chunked uploads. (Issue #1009)
+
+
+1.18.1 (2016-10-27)
+-------------------
+
+* CVE-2016-9015. Users who are using urllib3 version 1.17 or 1.18 along with
+ PyOpenSSL injection and OpenSSL 1.1.0 *must* upgrade to this version. This
+ release fixes a vulnerability whereby urllib3 in the above configuration
+ would silently fail to validate TLS certificates due to erroneously setting
+ invalid flags in OpenSSL's ``SSL_CTX_set_verify`` function. These erroneous
+ flags do not cause a problem in OpenSSL versions before 1.1.0, which
+ interprets the presence of any flag as requesting certificate validation.
+
+ There is no PR for this patch, as it was prepared for simultaneous disclosure
+ and release. The master branch received the same fix in PR #1010.
+
+
+1.18 (2016-09-26)
+-----------------
+
+* Fixed incorrect message for IncompleteRead exception. (PR #973)
+
+* Accept ``iPAddress`` subject alternative name fields in TLS certificates.
+ (Issue #258)
+
+* Fixed consistency of ``HTTPResponse.closed`` between Python 2 and 3.
+ (Issue #977)
+
+* Fixed handling of wildcard certificates when using PyOpenSSL. (Issue #979)
+
+
+1.17 (2016-09-06)
+-----------------
+
+* Accept ``SSLContext`` objects for use in SSL/TLS negotiation. (Issue #835)
+
+* ConnectionPool debug log now includes scheme, host, and port. (Issue #897)
+
+* Substantially refactored documentation. (Issue #887)
+
+* Used URLFetch default timeout on AppEngine, rather than hardcoding our own.
+ (Issue #858)
+
+* Normalize the scheme and host in the URL parser (Issue #833)
+
+* ``HTTPResponse`` contains the last ``Retry`` object, which now also
+ contains retries history. (Issue #848)
+
+* Timeout can no longer be set as boolean, and must be greater than zero.
+ (PR #924)
+
+* Removed pyasn1 and ndg-httpsclient from dependencies used for PyOpenSSL. We
+ now use cryptography and idna, both of which are already dependencies of
+ PyOpenSSL. (PR #930)
+
+* Fixed infinite loop in ``stream`` when amt=None. (Issue #928)
+
+* Try to use the operating system's certificates when we are using an
+ ``SSLContext``. (PR #941)
+
+* Updated cipher suite list to allow ChaCha20+Poly1305. AES-GCM is preferred to
+ ChaCha20, but ChaCha20 is then preferred to everything else. (PR #947)
+
+* Updated cipher suite list to remove 3DES-based cipher suites. (PR #958)
+
+* Removed the cipher suite fallback to allow HIGH ciphers. (PR #958)
+
+* Implemented ``length_remaining`` to determine remaining content
+ to be read. (PR #949)
+
+* Implemented ``enforce_content_length`` to enable exceptions when
+ incomplete data chunks are received. (PR #949)
+
+* Dropped connection start, dropped connection reset, redirect, forced retry,
+ and new HTTPS connection log levels to DEBUG, from INFO. (PR #967)
+
+
+1.16 (2016-06-11)
+-----------------
+
+* Disable IPv6 DNS when IPv6 connections are not possible. (Issue #840)
+
+* Provide ``key_fn_by_scheme`` pool keying mechanism that can be
+ overridden. (Issue #830)
+
+* Normalize scheme and host to lowercase for pool keys, and include
+ ``source_address``. (Issue #830)
+
+* Cleaner exception chain in Python 3 for ``_make_request``.
+ (Issue #861)
+
+* Fixed installing ``urllib3[socks]`` extra. (Issue #864)
+
+* Fixed signature of ``ConnectionPool.close`` so it can actually safely be
+ called by subclasses. (Issue #873)
+
+* Retain ``release_conn`` state across retries. (Issues #651, #866)
+
+* Add customizable ``HTTPConnectionPool.ResponseCls``, which defaults to
+ ``HTTPResponse`` but can be replaced with a subclass. (Issue #879)
+
+
+1.15.1 (2016-04-11)
+-------------------
+
+* Fix packaging to include backports module. (Issue #841)
+
+
+1.15 (2016-04-06)
+-----------------
+
+* Added Retry(raise_on_status=False). (Issue #720)
+
+* Always use setuptools, no more distutils fallback. (Issue #785)
+
+* Dropped support for Python 3.2. (Issue #786)
+
+* Chunked transfer encoding when requesting with ``chunked=True``.
+ (Issue #790)
+
+* Fixed regression with IPv6 port parsing. (Issue #801)
+
+* Append SNIMissingWarning messages to allow users to specify it in
+ the PYTHONWARNINGS environment variable. (Issue #816)
+
+* Handle unicode headers in Py2. (Issue #818)
+
+* Log certificate when there is a hostname mismatch. (Issue #820)
+
+* Preserve order of request/response headers. (Issue #821)
+
+
+1.14 (2015-12-29)
+-----------------
+
+* contrib: SOCKS proxy support! (Issue #762)
+
+* Fixed AppEngine handling of transfer-encoding header and bug
+ in Timeout defaults checking. (Issue #763)
+
+
+1.13.1 (2015-12-18)
+-------------------
+
+* Fixed regression in IPv6 + SSL for match_hostname. (Issue #761)
+
+
+1.13 (2015-12-14)
+-----------------
+
+* Fixed ``pip install urllib3[secure]`` on modern pip. (Issue #706)
+
+* pyopenssl: Fixed SSL3_WRITE_PENDING error. (Issue #717)
+
+* pyopenssl: Support for TLSv1.1 and TLSv1.2. (Issue #696)
+
+* Close connections more defensively on exception. (Issue #734)
+
+* Adjusted ``read_chunked`` to handle gzipped, chunk-encoded bodies without
+ repeatedly flushing the decoder, to function better on Jython. (Issue #743)
+
+* Accept ``ca_cert_dir`` for SSL-related PoolManager configuration. (Issue #758)
+
+
+1.12 (2015-09-03)
+-----------------
+
+* Rely on ``six`` for importing ``httplib`` to work around
+ conflicts with other Python 3 shims. (Issue #688)
+
+* Add support for directories of certificate authorities, as supported by
+ OpenSSL. (Issue #701)
+
+* New exception: ``NewConnectionError``, raised when we fail to establish
+ a new connection, usually ``ECONNREFUSED`` socket error.
+
+
+1.11 (2015-07-21)
+-----------------
+
+* When ``ca_certs`` is given, ``cert_reqs`` defaults to
+ ``'CERT_REQUIRED'``. (Issue #650)
+
+* ``pip install urllib3[secure]`` will install Certifi and
+ PyOpenSSL as dependencies. (Issue #678)
+
+* Made ``HTTPHeaderDict`` usable as a ``headers`` input value
+ (Issues #632, #679)
+
+* Added `urllib3.contrib.appengine <https://urllib3.readthedocs.io/en/latest/contrib.html#google-app-engine>`_
+ which has an ``AppEngineManager`` for using ``URLFetch`` in a
+ Google AppEngine environment. (Issue #664)
+
+* Dev: Added test suite for AppEngine. (Issue #631)
+
+* Fix performance regression when using PyOpenSSL. (Issue #626)
+
+* Passing incorrect scheme (e.g. ``foo://``) will raise
+ ``ValueError`` instead of ``AssertionError`` (backwards
+ compatible for now, but please migrate). (Issue #640)
+
+* Fix pools not getting replenished when an error occurs during a
+ request using ``release_conn=False``. (Issue #644)
+
+* Fix pool-default headers not applying for url-encoded requests
+ like GET. (Issue #657)
+
+* log.warning in Python 3 when headers are skipped due to parsing
+ errors. (Issue #642)
+
+* Close and discard connections if an error occurs during read.
+ (Issue #660)
+
+* Fix host parsing for IPv6 proxies. (Issue #668)
+
+* Separate warning type SubjectAltNameWarning, now issued once
+ per host. (Issue #671)
+
+* Fix ``httplib.IncompleteRead`` not getting converted to
+ ``ProtocolError`` when using ``HTTPResponse.stream()``
+ (Issue #674)
+
+1.10.4 (2015-05-03)
+-------------------
+
+* Migrate tests to Tornado 4. (Issue #594)
+
+* Append default warning configuration rather than overwrite.
+ (Issue #603)
+
+* Fix streaming decoding regression. (Issue #595)
+
+* Fix chunked requests losing state across keep-alive connections.
+ (Issue #599)
+
+* Fix hanging when chunked HEAD response has no body. (Issue #605)
+
+
+1.10.3 (2015-04-21)
+-------------------
+
+* Emit ``InsecurePlatformWarning`` when SSLContext object is missing.
+ (Issue #558)
+
+* Fix regression of duplicate header keys being discarded.
+ (Issue #563)
+
+* ``Response.stream()`` returns a generator for chunked responses.
+ (Issue #560)
+
+* Set upper-bound timeout when waiting for a socket in PyOpenSSL.
+ (Issue #585)
+
+* Work on platforms without `ssl` module for plain HTTP requests.
+ (Issue #587)
+
+* Stop relying on the stdlib's default cipher list. (Issue #588)
+
+
+1.10.2 (2015-02-25)
+-------------------
+
+* Fix file descriptor leakage on retries. (Issue #548)
+
+* Removed RC4 from default cipher list. (Issue #551)
+
+* Header performance improvements. (Issue #544)
+
+* Fix PoolManager not obeying redirect retry settings. (Issue #553)
+
+
+1.10.1 (2015-02-10)
+-------------------
+
+* Pools can be used as context managers. (Issue #545)
+
+* Don't re-use connections which experienced an SSLError. (Issue #529)
+
+* Don't fail when gzip decoding an empty stream. (Issue #535)
+
+* Add sha256 support for fingerprint verification. (Issue #540)
+
+* Fixed handling of header values containing commas. (Issue #533)
+
+
+1.10 (2014-12-14)
+-----------------
+
+* Disabled SSLv3. (Issue #473)
+
+* Add ``Url.url`` property to return the composed url string. (Issue #394)
+
+* Fixed PyOpenSSL + gevent ``WantWriteError``. (Issue #412)
+
+* ``MaxRetryError.reason`` will always be an exception, not string.
+ (Issue #481)
+
+* Fixed SSL-related timeouts not being detected as timeouts. (Issue #492)
+
+* Py3: Use ``ssl.create_default_context()`` when available. (Issue #473)
+
+* Emit ``InsecureRequestWarning`` for *every* insecure HTTPS request.
+ (Issue #496)
+
+* Emit ``SecurityWarning`` when certificate has no ``subjectAltName``.
+ (Issue #499)
+
+* Close and discard sockets which experienced SSL-related errors.
+ (Issue #501)
+
+* Handle ``body`` param in ``.request(...)``. (Issue #513)
+
+* Respect timeout with HTTPS proxy. (Issue #505)
+
+* PyOpenSSL: Handle ZeroReturnError exception. (Issue #520)
+
+
+1.9.1 (2014-09-13)
+------------------
+
+* Apply socket arguments before binding. (Issue #427)
+
+* More careful checks if fp-like object is closed. (Issue #435)
+
+* Fixed packaging issues of some development-related files not
+ getting included. (Issue #440)
+
+* Allow performing *only* fingerprint verification. (Issue #444)
+
+* Emit ``SecurityWarning`` if system clock is waaay off. (Issue #445)
+
+* Fixed PyOpenSSL compatibility with PyPy. (Issue #450)
+
+* Fixed ``BrokenPipeError`` and ``ConnectionError`` handling in Py3.
+ (Issue #443)
+
+
+
+1.9 (2014-07-04)
+----------------
+
+* Shuffled around development-related files. If you're maintaining a distro
+ package of urllib3, you may need to tweak things. (Issue #415)
+
+* Unverified HTTPS requests will trigger a warning on the first request. See
+ our new `security documentation
+ <https://urllib3.readthedocs.io/en/latest/security.html>`_ for details.
+ (Issue #426)
+
+* New retry logic and ``urllib3.util.retry.Retry`` configuration object.
+ (Issue #326)
+
+* All raised exceptions should now wrapped in a
+ ``urllib3.exceptions.HTTPException``-extending exception. (Issue #326)
+
+* All errors during a retry-enabled request should be wrapped in
+ ``urllib3.exceptions.MaxRetryError``, including timeout-related exceptions
+ which were previously exempt. Underlying error is accessible from the
+ ``.reason`` property. (Issue #326)
+
+* ``urllib3.exceptions.ConnectionError`` renamed to
+ ``urllib3.exceptions.ProtocolError``. (Issue #326)
+
+* Errors during response read (such as IncompleteRead) are now wrapped in
+ ``urllib3.exceptions.ProtocolError``. (Issue #418)
+
+* Requesting an empty host will raise ``urllib3.exceptions.LocationValueError``.
+ (Issue #417)
+
+* Catch read timeouts over SSL connections as
+ ``urllib3.exceptions.ReadTimeoutError``. (Issue #419)
+
+* Apply socket arguments before connecting. (Issue #427)
+
+
+1.8.3 (2014-06-23)
+------------------
+
+* Fix TLS verification when using a proxy in Python 3.4.1. (Issue #385)
+
+* Add ``disable_cache`` option to ``urllib3.util.make_headers``. (Issue #393)
+
+* Wrap ``socket.timeout`` exception with
+ ``urllib3.exceptions.ReadTimeoutError``. (Issue #399)
+
+* Fixed proxy-related bug where connections were being reused incorrectly.
+ (Issues #366, #369)
+
+* Added ``socket_options`` keyword parameter which allows to define
+ ``setsockopt`` configuration of new sockets. (Issue #397)
+
+* Removed ``HTTPConnection.tcp_nodelay`` in favor of
+ ``HTTPConnection.default_socket_options``. (Issue #397)
+
+* Fixed ``TypeError`` bug in Python 2.6.4. (Issue #411)
+
+
+1.8.2 (2014-04-17)
+------------------
+
+* Fix ``urllib3.util`` not being included in the package.
+
+
+1.8.1 (2014-04-17)
+------------------
+
+* Fix AppEngine bug of HTTPS requests going out as HTTP. (Issue #356)
+
+* Don't install ``dummyserver`` into ``site-packages`` as it's only needed
+ for the test suite. (Issue #362)
+
+* Added support for specifying ``source_address``. (Issue #352)
+
+
+1.8 (2014-03-04)
+----------------
+
+* Improved url parsing in ``urllib3.util.parse_url`` (properly parse '@' in
+ username, and blank ports like 'hostname:').
+
+* New ``urllib3.connection`` module which contains all the HTTPConnection
+ objects.
+
+* Several ``urllib3.util.Timeout``-related fixes. Also changed constructor
+ signature to a more sensible order. [Backwards incompatible]
+ (Issues #252, #262, #263)
+
+* Use ``backports.ssl_match_hostname`` if it's installed. (Issue #274)
+
+* Added ``.tell()`` method to ``urllib3.response.HTTPResponse`` which
+ returns the number of bytes read so far. (Issue #277)
+
+* Support for platforms without threading. (Issue #289)
+
+* Expand default-port comparison in ``HTTPConnectionPool.is_same_host``
+ to allow a pool with no specified port to be considered equal to to an
+ HTTP/HTTPS url with port 80/443 explicitly provided. (Issue #305)
+
+* Improved default SSL/TLS settings to avoid vulnerabilities.
+ (Issue #309)
+
+* Fixed ``urllib3.poolmanager.ProxyManager`` not retrying on connect errors.
+ (Issue #310)
+
+* Disable Nagle's Algorithm on the socket for non-proxies. A subset of requests
+ will send the entire HTTP request ~200 milliseconds faster; however, some of
+ the resulting TCP packets will be smaller. (Issue #254)
+
+* Increased maximum number of SubjectAltNames in ``urllib3.contrib.pyopenssl``
+ from the default 64 to 1024 in a single certificate. (Issue #318)
+
+* Headers are now passed and stored as a custom
+ ``urllib3.collections_.HTTPHeaderDict`` object rather than a plain ``dict``.
+ (Issue #329, #333)
+
+* Headers no longer lose their case on Python 3. (Issue #236)
+
+* ``urllib3.contrib.pyopenssl`` now uses the operating system's default CA
+ certificates on inject. (Issue #332)
+
+* Requests with ``retries=False`` will immediately raise any exceptions without
+ wrapping them in ``MaxRetryError``. (Issue #348)
+
+* Fixed open socket leak with SSL-related failures. (Issue #344, #348)
+
+
+1.7.1 (2013-09-25)
+------------------
+
+* Added granular timeout support with new ``urllib3.util.Timeout`` class.
+ (Issue #231)
+
+* Fixed Python 3.4 support. (Issue #238)
+
+
+1.7 (2013-08-14)
+----------------
+
+* More exceptions are now pickle-able, with tests. (Issue #174)
+
+* Fixed redirecting with relative URLs in Location header. (Issue #178)
+
+* Support for relative urls in ``Location: ...`` header. (Issue #179)
+
+* ``urllib3.response.HTTPResponse`` now inherits from ``io.IOBase`` for bonus
+ file-like functionality. (Issue #187)
+
+* Passing ``assert_hostname=False`` when creating a HTTPSConnectionPool will
+ skip hostname verification for SSL connections. (Issue #194)
+
+* New method ``urllib3.response.HTTPResponse.stream(...)`` which acts as a
+ generator wrapped around ``.read(...)``. (Issue #198)
+
+* IPv6 url parsing enforces brackets around the hostname. (Issue #199)
+
+* Fixed thread race condition in
+ ``urllib3.poolmanager.PoolManager.connection_from_host(...)`` (Issue #204)
+
+* ``ProxyManager`` requests now include non-default port in ``Host: ...``
+ header. (Issue #217)
+
+* Added HTTPS proxy support in ``ProxyManager``. (Issue #170 #139)
+
+* New ``RequestField`` object can be passed to the ``fields=...`` param which
+ can specify headers. (Issue #220)
+
+* Raise ``urllib3.exceptions.ProxyError`` when connecting to proxy fails.
+ (Issue #221)
+
+* Use international headers when posting file names. (Issue #119)
+
+* Improved IPv6 support. (Issue #203)
+
+
+1.6 (2013-04-25)
+----------------
+
+* Contrib: Optional SNI support for Py2 using PyOpenSSL. (Issue #156)
+
+* ``ProxyManager`` automatically adds ``Host: ...`` header if not given.
+
+* Improved SSL-related code. ``cert_req`` now optionally takes a string like
+ "REQUIRED" or "NONE". Same with ``ssl_version`` takes strings like "SSLv23"
+ The string values reflect the suffix of the respective constant variable.
+ (Issue #130)
+
+* Vendored ``socksipy`` now based on Anorov's fork which handles unexpectedly
+ closed proxy connections and larger read buffers. (Issue #135)
+
+* Ensure the connection is closed if no data is received, fixes connection leak
+ on some platforms. (Issue #133)
+
+* Added SNI support for SSL/TLS connections on Py32+. (Issue #89)
+
+* Tests fixed to be compatible with Py26 again. (Issue #125)
+
+* Added ability to choose SSL version by passing an ``ssl.PROTOCOL_*`` constant
+ to the ``ssl_version`` parameter of ``HTTPSConnectionPool``. (Issue #109)
+
+* Allow an explicit content type to be specified when encoding file fields.
+ (Issue #126)
+
+* Exceptions are now pickleable, with tests. (Issue #101)
+
+* Fixed default headers not getting passed in some cases. (Issue #99)
+
+* Treat "content-encoding" header value as case-insensitive, per RFC 2616
+ Section 3.5. (Issue #110)
+
+* "Connection Refused" SocketErrors will get retried rather than raised.
+ (Issue #92)
+
+* Updated vendored ``six``, no longer overrides the global ``six`` module
+ namespace. (Issue #113)
+
+* ``urllib3.exceptions.MaxRetryError`` contains a ``reason`` property holding
+ the exception that prompted the final retry. If ``reason is None`` then it
+ was due to a redirect. (Issue #92, #114)
+
+* Fixed ``PoolManager.urlopen()`` from not redirecting more than once.
+ (Issue #149)
+
+* Don't assume ``Content-Type: text/plain`` for multi-part encoding parameters
+ that are not files. (Issue #111)
+
+* Pass `strict` param down to ``httplib.HTTPConnection``. (Issue #122)
+
+* Added mechanism to verify SSL certificates by fingerprint (md5, sha1) or
+ against an arbitrary hostname (when connecting by IP or for misconfigured
+ servers). (Issue #140)
+
+* Streaming decompression support. (Issue #159)
+
+
+1.5 (2012-08-02)
+----------------
+
+* Added ``urllib3.add_stderr_logger()`` for quickly enabling STDERR debug
+ logging in urllib3.
+
+* Native full URL parsing (including auth, path, query, fragment) available in
+ ``urllib3.util.parse_url(url)``.
+
+* Built-in redirect will switch method to 'GET' if status code is 303.
+ (Issue #11)
+
+* ``urllib3.PoolManager`` strips the scheme and host before sending the request
+ uri. (Issue #8)
+
+* New ``urllib3.exceptions.DecodeError`` exception for when automatic decoding,
+ based on the Content-Type header, fails.
+
+* Fixed bug with pool depletion and leaking connections (Issue #76). Added
+ explicit connection closing on pool eviction. Added
+ ``urllib3.PoolManager.clear()``.
+
+* 99% -> 100% unit test coverage.
+
+
+1.4 (2012-06-16)
+----------------
+
+* Minor AppEngine-related fixes.
+
+* Switched from ``mimetools.choose_boundary`` to ``uuid.uuid4()``.
+
+* Improved url parsing. (Issue #73)
+
+* IPv6 url support. (Issue #72)
+
+
+1.3 (2012-03-25)
+----------------
+
+* Removed pre-1.0 deprecated API.
+
+* Refactored helpers into a ``urllib3.util`` submodule.
+
+* Fixed multipart encoding to support list-of-tuples for keys with multiple
+ values. (Issue #48)
+
+* Fixed multiple Set-Cookie headers in response not getting merged properly in
+ Python 3. (Issue #53)
+
+* AppEngine support with Py27. (Issue #61)
+
+* Minor ``encode_multipart_formdata`` fixes related to Python 3 strings vs
+ bytes.
+
+
+1.2.2 (2012-02-06)
+------------------
+
+* Fixed packaging bug of not shipping ``test-requirements.txt``. (Issue #47)
+
+
+1.2.1 (2012-02-05)
+------------------
+
+* Fixed another bug related to when ``ssl`` module is not available. (Issue #41)
+
+* Location parsing errors now raise ``urllib3.exceptions.LocationParseError``
+ which inherits from ``ValueError``.
+
+
+1.2 (2012-01-29)
+----------------
+
+* Added Python 3 support (tested on 3.2.2)
+
+* Dropped Python 2.5 support (tested on 2.6.7, 2.7.2)
+
+* Use ``select.poll`` instead of ``select.select`` for platforms that support
+ it.
+
+* Use ``Queue.LifoQueue`` instead of ``Queue.Queue`` for more aggressive
+ connection reusing. Configurable by overriding ``ConnectionPool.QueueCls``.
+
+* Fixed ``ImportError`` during install when ``ssl`` module is not available.
+ (Issue #41)
+
+* Fixed ``PoolManager`` redirects between schemes (such as HTTP -> HTTPS) not
+ completing properly. (Issue #28, uncovered by Issue #10 in v1.1)
+
+* Ported ``dummyserver`` to use ``tornado`` instead of ``webob`` +
+ ``eventlet``. Removed extraneous unsupported dummyserver testing backends.
+ Added socket-level tests.
+
+* More tests. Achievement Unlocked: 99% Coverage.
+
+
+1.1 (2012-01-07)
+----------------
+
+* Refactored ``dummyserver`` to its own root namespace module (used for
+ testing).
+
+* Added hostname verification for ``VerifiedHTTPSConnection`` by vendoring in
+ Py32's ``ssl_match_hostname``. (Issue #25)
+
+* Fixed cross-host HTTP redirects when using ``PoolManager``. (Issue #10)
+
+* Fixed ``decode_content`` being ignored when set through ``urlopen``. (Issue
+ #27)
+
+* Fixed timeout-related bugs. (Issues #17, #23)
+
+
+1.0.2 (2011-11-04)
+------------------
+
+* Fixed typo in ``VerifiedHTTPSConnection`` which would only present as a bug if
+ you're using the object manually. (Thanks pyos)
+
+* Made RecentlyUsedContainer (and consequently PoolManager) more thread-safe by
+ wrapping the access log in a mutex. (Thanks @christer)
+
+* Made RecentlyUsedContainer more dict-like (corrected ``__delitem__`` and
+ ``__getitem__`` behaviour), with tests. Shouldn't affect core urllib3 code.
+
+
+1.0.1 (2011-10-10)
+------------------
+
+* Fixed a bug where the same connection would get returned into the pool twice,
+ causing extraneous "HttpConnectionPool is full" log warnings.
+
+
+1.0 (2011-10-08)
+----------------
+
+* Added ``PoolManager`` with LRU expiration of connections (tested and
+ documented).
+* Added ``ProxyManager`` (needs tests, docs, and confirmation that it works
+ with HTTPS proxies).
+* Added optional partial-read support for responses when
+ ``preload_content=False``. You can now make requests and just read the headers
+ without loading the content.
+* Made response decoding optional (default on, same as before).
+* Added optional explicit boundary string for ``encode_multipart_formdata``.
+* Convenience request methods are now inherited from ``RequestMethods``. Old
+ helpers like ``get_url`` and ``post_url`` should be abandoned in favour of
+ the new ``request(method, url, ...)``.
+* Refactored code to be even more decoupled, reusable, and extendable.
+* License header added to ``.py`` files.
+* Embiggened the documentation: Lots of Sphinx-friendly docstrings in the code
+ and docs in ``docs/`` and on https://urllib3.readthedocs.io/.
+* Embettered all the things!
+* Started writing this file.
+
+
+0.4.1 (2011-07-17)
+------------------
+
+* Minor bug fixes, code cleanup.
+
+
+0.4 (2011-03-01)
+----------------
+
+* Better unicode support.
+* Added ``VerifiedHTTPSConnection``.
+* Added ``NTLMConnectionPool`` in contrib.
+* Minor improvements.
+
+
+0.3.1 (2010-07-13)
+------------------
+
+* Added ``assert_host_name`` optional parameter. Now compatible with proxies.
+
+
+0.3 (2009-12-10)
+----------------
+
+* Added HTTPS support.
+* Minor bug fixes.
+* Refactored, broken backwards compatibility with 0.2.
+* API to be treated as stable from this version forward.
+
+
+0.2 (2008-11-17)
+----------------
+
+* Added unit tests.
+* Bug fixes.
+
+
+0.1 (2008-11-16)
+----------------
+
+* First release.
diff --git a/third_party/python/urllib3/CONTRIBUTORS.txt b/third_party/python/urllib3/CONTRIBUTORS.txt
new file mode 100644
index 0000000000..ef41e32b73
--- /dev/null
+++ b/third_party/python/urllib3/CONTRIBUTORS.txt
@@ -0,0 +1,304 @@
+# Contributions to the urllib3 project
+
+## Creator & Maintainer
+
+* Andrey Petrov <andrey.petrov@shazow.net>
+
+
+## Contributors
+
+In chronological order:
+
+* victor.vde <http://code.google.com/u/victor.vde/>
+ * HTTPS patch (which inspired HTTPSConnectionPool)
+
+* erikcederstrand <http://code.google.com/u/erikcederstrand/>
+ * NTLM-authenticated HTTPSConnectionPool
+ * Basic-authenticated HTTPSConnectionPool (merged into make_headers)
+
+* niphlod <niphlod@gmail.com>
+ * Client-verified SSL certificates for HTTPSConnectionPool
+ * Response gzip and deflate encoding support
+ * Better unicode support for filepost using StringIO buffers
+
+* btoconnor <brian@btoconnor.net>
+ * Non-multipart encoding for POST requests
+
+* p.dobrogost <http://code.google.com/u/@WBRSRlBZDhBFXQB6/>
+ * Code review, PEP8 compliance, benchmark fix
+
+* kennethreitz <me@kennethreitz.com>
+ * Bugfixes, suggestions, Requests integration
+
+* georgemarshall <https://github.com/georgemarshall>
+ * Bugfixes, Improvements and Test coverage
+
+* Thomas Kluyver <thomas@kluyver.me.uk>
+ * Python 3 support
+
+* brandon-rhodes <http://rhodesmill.org/brandon>
+ * Design review, bugfixes, test coverage.
+
+* studer <theo.studer@gmail.com>
+ * IPv6 url support and test coverage
+
+* Shivaram Lingamneni <slingamn@cs.stanford.edu>
+ * Support for explicitly closing pooled connections
+
+* hartator <hartator@gmail.com>
+ * Corrected multipart behavior for params
+
+* Thomas Weißschuh <thomas@t-8ch.de>
+ * Support for TLS SNI
+ * API unification of ssl_version/cert_reqs
+ * SSL fingerprint and alternative hostname verification
+ * Bugfixes in testsuite
+
+* Sune Kirkeby <mig@ibofobi.dk>
+ * Optional SNI-support for Python 2 via PyOpenSSL.
+
+* Marc Schlaich <marc.schlaich@gmail.com>
+ * Various bugfixes and test improvements.
+
+* Bryce Boe <bbzbryce@gmail.com>
+ * Correct six.moves conflict
+ * Fixed pickle support of some exceptions
+
+* Boris Figovsky <boris.figovsky@ravellosystems.com>
+ * Allowed to skip SSL hostname verification
+
+* Cory Benfield <https://lukasa.co.uk/about/>
+ * Stream method for Response objects.
+ * Return native strings in header values.
+ * Generate 'Host' header when using proxies.
+
+* Jason Robinson <jaywink@basshero.org>
+ * Add missing WrappedSocket.fileno method in PyOpenSSL
+
+* Audrius Butkevicius <audrius.butkevicius@elastichosts.com>
+ * Fixed a race condition
+
+* Stanislav Vitkovskiy <stas.vitkovsky@gmail.com>
+ * Added HTTPS (CONNECT) proxy support
+
+* Stephen Holsapple <sholsapp@gmail.com>
+ * Added abstraction for granular control of request fields
+
+* Martin von Gagern <Martin.vGagern@gmx.net>
+ * Support for non-ASCII header parameters
+
+* Kevin Burke <kev@inburke.com> and Pavel Kirichenko <juanych@yandex-team.ru>
+ * Support for separate connect and request timeouts
+
+* Peter Waller <p@pwaller.net>
+ * HTTPResponse.tell() for determining amount received over the wire
+
+* Nipunn Koorapati <nipunn1313@gmail.com>
+ * Ignore default ports when comparing hosts for equality
+
+* Danilo @dbrgn <https://dbrgn.ch/>
+ * Disabled TLS compression by default on Python 3.2+
+ * Disabled TLS compression in pyopenssl contrib module
+ * Configurable cipher suites in pyopenssl contrib module
+
+* Roman Bogorodskiy <roman.bogorodskiy@ericsson.com>
+ * Account retries on proxy errors
+
+* Nicolas Delaby <nicolas.delaby@ezeep.com>
+ * Use the platform-specific CA certificate locations
+
+* Josh Schneier <https://github.com/jschneier>
+ * HTTPHeaderDict and associated tests and docs
+ * Bugfixes, docs, test coverage
+
+* Tahia Khan <http://tahia.tk/>
+ * Added Timeout examples in docs
+
+* Arthur Grunseid <https://grunseid.com>
+ * source_address support and tests (with https://github.com/bui)
+
+* Ian Cordasco <graffatcolmingov@gmail.com>
+ * PEP8 Compliance and Linting
+ * Add ability to pass socket options to an HTTP Connection
+
+* Erik Tollerud <erik.tollerud@gmail.com>
+ * Support for standard library io module.
+
+* Krishna Prasad <kprasad.iitd@gmail.com>
+ * Google App Engine documentation
+
+* Aaron Meurer <asmeurer@gmail.com>
+ * Added Url.url, which unparses a Url
+
+* Evgeny Kapun <abacabadabacaba@gmail.com>
+ * Bugfixes
+
+* Benjamen Meyer <bm_witness@yahoo.com>
+ * Security Warning Documentation update for proper capture
+
+* Shivan Sornarajah <github@sornars.com>
+ * Support for using ConnectionPool and PoolManager as context managers.
+
+* Alex Gaynor <alex.gaynor@gmail.com>
+ * Updates to the default SSL configuration
+
+* Tomas Tomecek <ttomecek@redhat.com>
+ * Implemented generator for getting chunks from chunked responses.
+
+* tlynn <https://github.com/tlynn>
+ * Respect the warning preferences at import.
+
+* David D. Riddle <ddriddle@illinois.edu>
+ * IPv6 bugfixes in testsuite
+
+* Thea Flowers <magicalgirl@google.com>
+ * App Engine environment tests.
+ * Documentation re-write.
+
+* John Krauss <https://github.com/talos>
+ * Clues to debugging problems with `cryptography` dependency in docs
+
+* Disassem <https://github.com/Disassem>
+ * Fix pool-default headers not applying for url-encoded requests like GET.
+
+* James Atherfold <jlatherfold@hotmail.com>
+ * Bugfixes relating to cleanup of connections during errors.
+
+* Christian Pedersen <https://github.com/chripede>
+ * IPv6 HTTPS proxy bugfix
+
+* Jordan Moldow <https://github.com/jmoldow>
+ * Fix low-level exceptions leaking from ``HTTPResponse.stream()``.
+ * Bugfix for ``ConnectionPool.urlopen(release_conn=False)``.
+ * Creation of ``HTTPConnectionPool.ResponseCls``.
+
+* Predrag Gruevski <https://github.com/obi1kenobi>
+ * Made cert digest comparison use a constant-time algorithm.
+
+* Adam Talsma <https://github.com/a-tal>
+ * Bugfix to ca_cert file paths.
+
+* Evan Meagher <https://evanmeagher.net>
+ * Bugfix related to `memoryview` usage in PyOpenSSL adapter
+
+* John Vandenberg <jayvdb@gmail.com>
+ * Python 2.6 fixes; pyflakes and pep8 compliance
+
+* Andy Caldwell <andy.m.caldwell@googlemail.com>
+ * Bugfix related to reusing connections in indeterminate states.
+
+* Ville Skyttä <ville.skytta@iki.fi>
+ * Logging efficiency improvements, spelling fixes, Travis config.
+
+* Shige Takeda <smtakeda@gmail.com>
+ * Started Recipes documentation and added a recipe about handling concatenated gzip data in HTTP response
+
+* Jess Shapiro <jesse@jesseshapiro.net>
+ * Various character-encoding fixes/tweaks
+ * Disabling IPv6 DNS when IPv6 connections not supported
+
+* David Foster <http://dafoster.net/>
+ * Ensure order of request and response headers are preserved.
+
+* Jeremy Cline <jeremy@jcline.org>
+ * Added connection pool keys by scheme
+
+* Aviv Palivoda <palaviv@gmail.com>
+ * History list to Retry object.
+ * HTTPResponse contains the last Retry object.
+
+* Nate Prewitt <nate.prewitt@gmail.com>
+ * Ensure timeouts are not booleans and greater than zero.
+ * Fixed infinite loop in ``stream`` when amt=None.
+ * Added length_remaining to determine remaining data to be read.
+ * Added enforce_content_length to raise exception when incorrect content-length received.
+
+* Seth Michael Larson <sethmichaellarson@protonmail.com>
+ * Created selectors backport that supports PEP 475.
+
+* Alexandre Dias <alex.dias@smarkets.com>
+ * Don't retry on timeout if method not in whitelist
+
+* Moinuddin Quadri <moin18@gmail.com>
+ * Lazily load idna package
+
+* Tom White <s6yg1ez3@mail2tor.com>
+ * Made SOCKS handler differentiate socks5h from socks5 and socks4a from socks4.
+
+* Tim Burke <tim.burke@gmail.com>
+ * Stop buffering entire deflate-encoded responses.
+
+* Tuukka Mustonen <tuukka.mustonen@gmail.com>
+ * Add counter for status_forcelist retries.
+
+* Erik Rose <erik@mozilla.com>
+ * Bugfix to pyopenssl vendoring
+
+* Wolfgang Richter <wolfgang.richter@gmail.com>
+ * Bugfix related to loading full certificate chains with PyOpenSSL backend.
+
+* Mike Miller <github@mikeage.net>
+ * Logging improvements to include the HTTP(S) port when opening a new connection
+
+* Ioannis Tziakos <mail@itziakos.gr>
+ * Fix ``util.selectors._fileobj_to_fd`` to accept ``long``.
+ * Update appveyor tox setup to use the 64bit python.
+
+* Akamai (through Jess Shapiro) <jshapiro@akamai.com>
+ * Ongoing maintenance; 2017-2018
+
+* Dominique Leuenberger <dimstar@opensuse.org>
+ * Minor fixes in the test suite
+
+* Will Bond <will@wbond.net>
+ * Add Python 2.6 support to ``contrib.securetransport``
+
+* Aleksei Alekseev <alekseev.yeskela@gmail.com>
+ * using auth info for socks proxy
+
+* Chris Wilcox <git@crwilcox.com>
+ * Improve contribution guide
+ * Add ``HTTPResponse.geturl`` method to provide ``urllib2.urlopen().geturl()`` behavior
+
+* Bruce Merry <https://www.brucemerry.org.za>
+ * Fix leaking exceptions when system calls are interrupted with zero timeout
+
+* Hugo van Kemenade <https://github.com/hugovk>
+ * Drop support for EOL Python 2.6
+
+* Tim Bell <https://github.com/timb07>
+ * Bugfix for responses with Content-Type: message/* logging warnings
+
+* Justin Bramley <https://github.com/jbramleycl>
+ * Add ability to handle multiple Content-Encodings
+
+* Katsuhiko YOSHIDA <https://github.com/kyoshidajp>
+ * Remove Authorization header regardless of case when redirecting to cross-site
+
+* James Meickle <https://permadeath.com/>
+ * Improve handling of Retry-After header
+
+* Chris Jerdonek <chris.jerdonek@gmail.com>
+ * Remove a spurious TypeError from the exception chain inside
+ HTTPConnectionPool._make_request(), also for BaseExceptions.
+
+* Jorge Lopez Silva <https://github.com/jalopezsilva>
+ * Added support for forwarding requests through HTTPS proxies.
+
+* Benno Rice <benno@jeamland.net>
+ * Allow cadata parameter to be passed to underlying ``SSLContext.load_verify_locations()``.
+
+* Keiichi Kobayashi <abok.1k@gmail.com>
+ * Rename VerifiedHTTPSConnection to HTTPSConnection
+
+* Himanshu Garg <garg_himanshu@outlook.com>
+ * DOC & LICENSE Update
+
+* Hod Bin Noon <bin.noon.hod@gmail.com>
+ * Test improvements
+
+* Chris Olufson <tycarac@gmail.com>
+ * Fix for connection not being released on HTTP redirect and response not preloaded
+
+* [Your name or handle] <[email or website]>
+ * [Brief summary of your changes]
diff --git a/third_party/python/urllib3/LICENSE.txt b/third_party/python/urllib3/LICENSE.txt
new file mode 100644
index 0000000000..c89cf27b85
--- /dev/null
+++ b/third_party/python/urllib3/LICENSE.txt
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2008-2019 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/third_party/python/urllib3/MANIFEST.in b/third_party/python/urllib3/MANIFEST.in
new file mode 100644
index 0000000000..4edfedde27
--- /dev/null
+++ b/third_party/python/urllib3/MANIFEST.in
@@ -0,0 +1,5 @@
+include README.rst CHANGES.rst LICENSE.txt CONTRIBUTORS.txt dev-requirements.txt Makefile
+recursive-include dummyserver *
+recursive-include test *
+recursive-include docs *
+recursive-exclude docs/_build *
diff --git a/third_party/python/urllib3/PKG-INFO b/third_party/python/urllib3/PKG-INFO
new file mode 100644
index 0000000000..d95b8c2d27
--- /dev/null
+++ b/third_party/python/urllib3/PKG-INFO
@@ -0,0 +1,1253 @@
+Metadata-Version: 2.1
+Name: urllib3
+Version: 1.25.9
+Summary: HTTP library with thread-safe connection pooling, file post, and more.
+Home-page: https://urllib3.readthedocs.io/
+Author: Andrey Petrov
+Author-email: andrey.petrov@shazow.net
+License: MIT
+Project-URL: Documentation, https://urllib3.readthedocs.io/
+Project-URL: Code, https://github.com/urllib3/urllib3
+Project-URL: Issue tracker, https://github.com/urllib3/urllib3/issues
+Description: urllib3
+ =======
+
+ urllib3 is a powerful, *sanity-friendly* HTTP client for Python. Much of the
+ Python ecosystem already uses urllib3 and you should too.
+ urllib3 brings many critical features that are missing from the Python
+ standard libraries:
+
+ - Thread safety.
+ - Connection pooling.
+ - Client-side SSL/TLS verification.
+ - File uploads with multipart encoding.
+ - Helpers for retrying requests and dealing with HTTP redirects.
+ - Support for gzip, deflate, and brotli encoding.
+ - Proxy support for HTTP and SOCKS.
+ - 100% test coverage.
+
+ urllib3 is powerful and easy to use::
+
+ >>> import urllib3
+ >>> http = urllib3.PoolManager()
+ >>> r = http.request('GET', 'http://httpbin.org/robots.txt')
+ >>> r.status
+ 200
+ >>> r.data
+ 'User-agent: *\nDisallow: /deny\n'
+
+
+ Installing
+ ----------
+
+ urllib3 can be installed with `pip <https://pip.pypa.io>`_::
+
+ $ pip install urllib3
+
+ Alternatively, you can grab the latest source code from `GitHub <https://github.com/urllib3/urllib3>`_::
+
+ $ git clone git://github.com/urllib3/urllib3.git
+ $ python setup.py install
+
+
+ Documentation
+ -------------
+
+ urllib3 has usage and reference documentation at `urllib3.readthedocs.io <https://urllib3.readthedocs.io>`_.
+
+
+ Contributing
+ ------------
+
+ urllib3 happily accepts contributions. Please see our
+ `contributing documentation <https://urllib3.readthedocs.io/en/latest/contributing.html>`_
+ for some tips on getting started.
+
+
+ Security Disclosures
+ --------------------
+
+ To report a security vulnerability, please use the
+ `Tidelift security contact <https://tidelift.com/security>`_.
+ Tidelift will coordinate the fix and disclosure with maintainers.
+
+ Maintainers
+ -----------
+
+ - `@sethmlarson <https://github.com/sethmlarson>`_ (Seth M. Larson)
+ - `@pquentin <https://github.com/pquentin>`_ (Quentin Pradet)
+ - `@theacodes <https://github.com/theacodes>`_ (Thea Flowers)
+ - `@haikuginger <https://github.com/haikuginger>`_ (Jess Shapiro)
+ - `@lukasa <https://github.com/lukasa>`_ (Cory Benfield)
+ - `@sigmavirus24 <https://github.com/sigmavirus24>`_ (Ian Stapleton Cordasco)
+ - `@shazow <https://github.com/shazow>`_ (Andrey Petrov)
+
+ 👋
+
+
+ Sponsorship
+ -----------
+
+ .. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png
+ :width: 75
+ :alt: Tidelift
+
+ .. list-table::
+ :widths: 10 100
+
+ * - |tideliftlogo|
+ - Professional support for urllib3 is available as part of the `Tidelift
+ Subscription`_. Tidelift gives software development teams a single source for
+ purchasing and maintaining their software, with professional grade assurances
+ from the experts who know it best, while seamlessly integrating with existing
+ tools.
+
+ .. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme
+
+ If your company benefits from this library, please consider `sponsoring its
+ development <https://urllib3.readthedocs.io/en/latest/contributing.html#sponsorship-project-grants>`_.
+
+ Sponsors include:
+
+ - Abbott (2018-2019), sponsored `@sethmlarson <https://github.com/sethmlarson>`_'s work on urllib3.
+ - Google Cloud Platform (2018-2019), sponsored `@theacodes <https://github.com/theacodes>`_'s work on urllib3.
+ - Akamai (2017-2018), sponsored `@haikuginger <https://github.com/haikuginger>`_'s work on urllib3
+ - Hewlett Packard Enterprise (2016-2017), sponsored `@Lukasa’s <https://github.com/Lukasa>`_ work on urllib3.
+
+
+ Changes
+ =======
+
+ 1.25.9 (2020-04-16)
+ -------------------
+
+ * Added ``InvalidProxyConfigurationWarning`` which is raised when
+ erroneously specifying an HTTPS proxy URL. urllib3 doesn't currently
+ support connecting to HTTPS proxies but will soon be able to
+ and we would like users to migrate properly without much breakage.
+
+ See `this GitHub issue <https://github.com/urllib3/urllib3/issues/1850>`_
+ for more information on how to fix your proxy config. (Pull #1851)
+
+ * Drain connection after ``PoolManager`` redirect (Pull #1817)
+
+ * Ensure ``load_verify_locations`` raises ``SSLError`` for all backends (Pull #1812)
+
+ * Rename ``VerifiedHTTPSConnection`` to ``HTTPSConnection`` (Pull #1805)
+
+ * Allow the CA certificate data to be passed as a string (Pull #1804)
+
+ * Raise ``ValueError`` if method contains control characters (Pull #1800)
+
+ * Add ``__repr__`` to ``Timeout`` (Pull #1795)
+
+
+ 1.25.8 (2020-01-20)
+ -------------------
+
+ * Drop support for EOL Python 3.4 (Pull #1774)
+
+ * Optimize _encode_invalid_chars (Pull #1787)
+
+
+ 1.25.7 (2019-11-11)
+ -------------------
+
+ * Preserve ``chunked`` parameter on retries (Pull #1715, Pull #1734)
+
+ * Allow unset ``SERVER_SOFTWARE`` in App Engine (Pull #1704, Issue #1470)
+
+ * Fix issue where URL fragment was sent within the request target. (Pull #1732)
+
+ * Fix issue where an empty query section in a URL would fail to parse. (Pull #1732)
+
+ * Remove TLS 1.3 support in SecureTransport due to Apple removing support (Pull #1703)
+
+
+ 1.25.6 (2019-09-24)
+ -------------------
+
+ * Fix issue where tilde (``~``) characters were incorrectly
+ percent-encoded in the path. (Pull #1692)
+
+
+ 1.25.5 (2019-09-19)
+ -------------------
+
+ * Add mitigation for BPO-37428 affecting Python <3.7.4 and OpenSSL 1.1.1+ which
+ caused certificate verification to be enabled when using ``cert_reqs=CERT_NONE``.
+ (Issue #1682)
+
+
+ 1.25.4 (2019-09-19)
+ -------------------
+
+ * Propagate Retry-After header settings to subsequent retries. (Pull #1607)
+
+ * Fix edge case where Retry-After header was still respected even when
+ explicitly opted out of. (Pull #1607)
+
+ * Remove dependency on ``rfc3986`` for URL parsing.
+
+ * Fix issue where URLs containing invalid characters within ``Url.auth`` would
+ raise an exception instead of percent-encoding those characters.
+
+ * Add support for ``HTTPResponse.auto_close = False`` which makes HTTP responses
+ work well with BufferedReaders and other ``io`` module features. (Pull #1652)
+
+ * Percent-encode invalid characters in URL for ``HTTPConnectionPool.request()`` (Pull #1673)
+
+
+ 1.25.3 (2019-05-23)
+ -------------------
+
+ * Change ``HTTPSConnection`` to load system CA certificates
+ when ``ca_certs``, ``ca_cert_dir``, and ``ssl_context`` are
+ unspecified. (Pull #1608, Issue #1603)
+
+ * Upgrade bundled rfc3986 to v1.3.2. (Pull #1609, Issue #1605)
+
+
+ 1.25.2 (2019-04-28)
+ -------------------
+
+ * Change ``is_ipaddress`` to not detect IPvFuture addresses. (Pull #1583)
+
+ * Change ``parse_url`` to percent-encode invalid characters within the
+ path, query, and target components. (Pull #1586)
+
+
+ 1.25.1 (2019-04-24)
+ -------------------
+
+ * Add support for Google's ``Brotli`` package. (Pull #1572, Pull #1579)
+
+ * Upgrade bundled rfc3986 to v1.3.1 (Pull #1578)
+
+
+ 1.25 (2019-04-22)
+ -----------------
+
+ * Require and validate certificates by default when using HTTPS (Pull #1507)
+
+ * Upgraded ``urllib3.utils.parse_url()`` to be RFC 3986 compliant. (Pull #1487)
+
+ * Added support for ``key_password`` for ``HTTPSConnectionPool`` to use
+ encrypted ``key_file`` without creating your own ``SSLContext`` object. (Pull #1489)
+
+ * Add TLSv1.3 support to CPython, pyOpenSSL, and SecureTransport ``SSLContext``
+ implementations. (Pull #1496)
+
+ * Switched the default multipart header encoder from RFC 2231 to HTML 5 working draft. (Issue #303, PR #1492)
+
+ * Fixed issue where OpenSSL would block if an encrypted client private key was
+ given and no password was given. Instead an ``SSLError`` is raised. (Pull #1489)
+
+ * Added support for Brotli content encoding. It is enabled automatically if
+ ``brotlipy`` package is installed which can be requested with
+ ``urllib3[brotli]`` extra. (Pull #1532)
+
+ * Drop ciphers using DSS key exchange from default TLS cipher suites.
+ Improve default ciphers when using SecureTransport. (Pull #1496)
+
+ * Implemented a more efficient ``HTTPResponse.__iter__()`` method. (Issue #1483)
+
+ 1.24.3 (2019-05-01)
+ -------------------
+
+ * Apply fix for CVE-2019-9740. (Pull #1591)
+
+ 1.24.2 (2019-04-17)
+ -------------------
+
+ * Don't load system certificates by default when any other ``ca_certs``, ``ca_certs_dir`` or
+ ``ssl_context`` parameters are specified.
+
+ * Remove Authorization header regardless of case when redirecting to cross-site. (Issue #1510)
+
+ * Add support for IPv6 addresses in subjectAltName section of certificates. (Issue #1269)
+
+
+ 1.24.1 (2018-11-02)
+ -------------------
+
+ * Remove quadratic behavior within ``GzipDecoder.decompress()`` (Issue #1467)
+
+ * Restored functionality of ``ciphers`` parameter for ``create_urllib3_context()``. (Issue #1462)
+
+
+ 1.24 (2018-10-16)
+ -----------------
+
+ * Allow key_server_hostname to be specified when initializing a PoolManager to allow custom SNI to be overridden. (Pull #1449)
+
+ * Test against Python 3.7 on AppVeyor. (Pull #1453)
+
+ * Early-out ipv6 checks when running on App Engine. (Pull #1450)
+
+ * Change ambiguous description of backoff_factor (Pull #1436)
+
+ * Add ability to handle multiple Content-Encodings (Issue #1441 and Pull #1442)
+
+ * Skip DNS names that can't be idna-decoded when using pyOpenSSL (Issue #1405).
+
+ * Add a server_hostname parameter to HTTPSConnection which allows for
+ overriding the SNI hostname sent in the handshake. (Pull #1397)
+
+ * Drop support for EOL Python 2.6 (Pull #1429 and Pull #1430)
+
+ * Fixed bug where responses with header Content-Type: message/* erroneously
+ raised HeaderParsingError, resulting in a warning being logged. (Pull #1439)
+
+ * Move urllib3 to src/urllib3 (Pull #1409)
+
+
+ 1.23 (2018-06-04)
+ -----------------
+
+ * Allow providing a list of headers to strip from requests when redirecting
+ to a different host. Defaults to the ``Authorization`` header. Different
+ headers can be set via ``Retry.remove_headers_on_redirect``. (Issue #1316)
+
+ * Fix ``util.selectors._fileobj_to_fd`` to accept ``long`` (Issue #1247).
+
+ * Dropped Python 3.3 support. (Pull #1242)
+
+ * Put the connection back in the pool when calling stream() or read_chunked() on
+ a chunked HEAD response. (Issue #1234)
+
+ * Fixed pyOpenSSL-specific ssl client authentication issue when clients
+ attempted to auth via certificate + chain (Issue #1060)
+
+ * Add the port to the connectionpool connect print (Pull #1251)
+
+ * Don't use the ``uuid`` module to create multipart data boundaries. (Pull #1380)
+
+ * ``read_chunked()`` on a closed response returns no chunks. (Issue #1088)
+
+ * Add Python 2.6 support to ``contrib.securetransport`` (Pull #1359)
+
+ * Added support for auth info in url for SOCKS proxy (Pull #1363)
+
+
+ 1.22 (2017-07-20)
+ -----------------
+
+ * Fixed missing brackets in ``HTTP CONNECT`` when connecting to IPv6 address via
+ IPv6 proxy. (Issue #1222)
+
+ * Made the connection pool retry on ``SSLError``. The original ``SSLError``
+ is available on ``MaxRetryError.reason``. (Issue #1112)
+
+ * Drain and release connection before recursing on retry/redirect. Fixes
+ deadlocks with a blocking connectionpool. (Issue #1167)
+
+ * Fixed compatibility for cookiejar. (Issue #1229)
+
+ * pyopenssl: Use vendored version of ``six``. (Issue #1231)
+
+
+ 1.21.1 (2017-05-02)
+ -------------------
+
+ * Fixed SecureTransport issue that would cause long delays in response body
+ delivery. (Pull #1154)
+
+ * Fixed regression in 1.21 that threw exceptions when users passed the
+ ``socket_options`` flag to the ``PoolManager``. (Issue #1165)
+
+ * Fixed regression in 1.21 that threw exceptions when users passed the
+ ``assert_hostname`` or ``assert_fingerprint`` flag to the ``PoolManager``.
+ (Pull #1157)
+
+
+ 1.21 (2017-04-25)
+ -----------------
+
+ * Improved performance of certain selector system calls on Python 3.5 and
+ later. (Pull #1095)
+
+ * Resolved issue where the PyOpenSSL backend would not wrap SysCallError
+ exceptions appropriately when sending data. (Pull #1125)
+
+ * Selectors now detects a monkey-patched select module after import for modules
+ that patch the select module like eventlet, greenlet. (Pull #1128)
+
+ * Reduced memory consumption when streaming zlib-compressed responses
+ (as opposed to raw deflate streams). (Pull #1129)
+
+ * Connection pools now use the entire request context when constructing the
+ pool key. (Pull #1016)
+
+ * ``PoolManager.connection_from_*`` methods now accept a new keyword argument,
+ ``pool_kwargs``, which are merged with the existing ``connection_pool_kw``.
+ (Pull #1016)
+
+ * Add retry counter for ``status_forcelist``. (Issue #1147)
+
+ * Added ``contrib`` module for using SecureTransport on macOS:
+ ``urllib3.contrib.securetransport``. (Pull #1122)
+
+ * urllib3 now only normalizes the case of ``http://`` and ``https://`` schemes:
+ for schemes it does not recognise, it assumes they are case-sensitive and
+ leaves them unchanged.
+ (Issue #1080)
+
+
+ 1.20 (2017-01-19)
+ -----------------
+
+ * Added support for waiting for I/O using selectors other than select,
+ improving urllib3's behaviour with large numbers of concurrent connections.
+ (Pull #1001)
+
+ * Updated the date for the system clock check. (Issue #1005)
+
+ * ConnectionPools now correctly consider hostnames to be case-insensitive.
+ (Issue #1032)
+
+ * Outdated versions of PyOpenSSL now cause the PyOpenSSL contrib module
+ to fail when it is injected, rather than at first use. (Pull #1063)
+
+ * Outdated versions of cryptography now cause the PyOpenSSL contrib module
+ to fail when it is injected, rather than at first use. (Issue #1044)
+
+ * Automatically attempt to rewind a file-like body object when a request is
+ retried or redirected. (Pull #1039)
+
+ * Fix some bugs that occur when modules incautiously patch the queue module.
+ (Pull #1061)
+
+ * Prevent retries from occurring on read timeouts for which the request method
+ was not in the method whitelist. (Issue #1059)
+
+ * Changed the PyOpenSSL contrib module to lazily load idna to avoid
+ unnecessarily bloating the memory of programs that don't need it. (Pull
+ #1076)
+
+ * Add support for IPv6 literals with zone identifiers. (Pull #1013)
+
+ * Added support for socks5h:// and socks4a:// schemes when working with SOCKS
+ proxies, and controlled remote DNS appropriately. (Issue #1035)
+
+
+ 1.19.1 (2016-11-16)
+ -------------------
+
+ * Fixed AppEngine import that didn't function on Python 3.5. (Pull #1025)
+
+
+ 1.19 (2016-11-03)
+ -----------------
+
+ * urllib3 now respects Retry-After headers on 413, 429, and 503 responses when
+ using the default retry logic. (Pull #955)
+
+ * Remove markers from setup.py to assist ancient setuptools versions. (Issue
+ #986)
+
+ * Disallow superscripts and other integerish things in URL ports. (Issue #989)
+
+ * Allow urllib3's HTTPResponse.stream() method to continue to work with
+ non-httplib underlying FPs. (Pull #990)
+
+ * Empty filenames in multipart headers are now emitted as such, rather than
+ being suppressed. (Issue #1015)
+
+ * Prefer user-supplied Host headers on chunked uploads. (Issue #1009)
+
+
+ 1.18.1 (2016-10-27)
+ -------------------
+
+ * CVE-2016-9015. Users who are using urllib3 version 1.17 or 1.18 along with
+ PyOpenSSL injection and OpenSSL 1.1.0 *must* upgrade to this version. This
+ release fixes a vulnerability whereby urllib3 in the above configuration
+ would silently fail to validate TLS certificates due to erroneously setting
+ invalid flags in OpenSSL's ``SSL_CTX_set_verify`` function. These erroneous
+ flags do not cause a problem in OpenSSL versions before 1.1.0, which
+ interprets the presence of any flag as requesting certificate validation.
+
+ There is no PR for this patch, as it was prepared for simultaneous disclosure
+ and release. The master branch received the same fix in PR #1010.
+
+
+ 1.18 (2016-09-26)
+ -----------------
+
+ * Fixed incorrect message for IncompleteRead exception. (PR #973)
+
+ * Accept ``iPAddress`` subject alternative name fields in TLS certificates.
+ (Issue #258)
+
+ * Fixed consistency of ``HTTPResponse.closed`` between Python 2 and 3.
+ (Issue #977)
+
+ * Fixed handling of wildcard certificates when using PyOpenSSL. (Issue #979)
+
+
+ 1.17 (2016-09-06)
+ -----------------
+
+ * Accept ``SSLContext`` objects for use in SSL/TLS negotiation. (Issue #835)
+
+ * ConnectionPool debug log now includes scheme, host, and port. (Issue #897)
+
+ * Substantially refactored documentation. (Issue #887)
+
+ * Used URLFetch default timeout on AppEngine, rather than hardcoding our own.
+ (Issue #858)
+
+ * Normalize the scheme and host in the URL parser (Issue #833)
+
+ * ``HTTPResponse`` contains the last ``Retry`` object, which now also
+ contains retries history. (Issue #848)
+
+ * Timeout can no longer be set as boolean, and must be greater than zero.
+ (PR #924)
+
+ * Removed pyasn1 and ndg-httpsclient from dependencies used for PyOpenSSL. We
+ now use cryptography and idna, both of which are already dependencies of
+ PyOpenSSL. (PR #930)
+
+ * Fixed infinite loop in ``stream`` when amt=None. (Issue #928)
+
+ * Try to use the operating system's certificates when we are using an
+ ``SSLContext``. (PR #941)
+
+ * Updated cipher suite list to allow ChaCha20+Poly1305. AES-GCM is preferred to
+ ChaCha20, but ChaCha20 is then preferred to everything else. (PR #947)
+
+ * Updated cipher suite list to remove 3DES-based cipher suites. (PR #958)
+
+ * Removed the cipher suite fallback to allow HIGH ciphers. (PR #958)
+
+ * Implemented ``length_remaining`` to determine remaining content
+ to be read. (PR #949)
+
+ * Implemented ``enforce_content_length`` to enable exceptions when
+ incomplete data chunks are received. (PR #949)
+
+ * Dropped connection start, dropped connection reset, redirect, forced retry,
+ and new HTTPS connection log levels to DEBUG, from INFO. (PR #967)
+
+
+ 1.16 (2016-06-11)
+ -----------------
+
+ * Disable IPv6 DNS when IPv6 connections are not possible. (Issue #840)
+
+ * Provide ``key_fn_by_scheme`` pool keying mechanism that can be
+ overridden. (Issue #830)
+
+ * Normalize scheme and host to lowercase for pool keys, and include
+ ``source_address``. (Issue #830)
+
+ * Cleaner exception chain in Python 3 for ``_make_request``.
+ (Issue #861)
+
+ * Fixed installing ``urllib3[socks]`` extra. (Issue #864)
+
+ * Fixed signature of ``ConnectionPool.close`` so it can actually safely be
+ called by subclasses. (Issue #873)
+
+ * Retain ``release_conn`` state across retries. (Issues #651, #866)
+
+ * Add customizable ``HTTPConnectionPool.ResponseCls``, which defaults to
+ ``HTTPResponse`` but can be replaced with a subclass. (Issue #879)
+
+
+ 1.15.1 (2016-04-11)
+ -------------------
+
+ * Fix packaging to include backports module. (Issue #841)
+
+
+ 1.15 (2016-04-06)
+ -----------------
+
+ * Added Retry(raise_on_status=False). (Issue #720)
+
+ * Always use setuptools, no more distutils fallback. (Issue #785)
+
+ * Dropped support for Python 3.2. (Issue #786)
+
+ * Chunked transfer encoding when requesting with ``chunked=True``.
+ (Issue #790)
+
+ * Fixed regression with IPv6 port parsing. (Issue #801)
+
+ * Append SNIMissingWarning messages to allow users to specify it in
+ the PYTHONWARNINGS environment variable. (Issue #816)
+
+ * Handle unicode headers in Py2. (Issue #818)
+
+ * Log certificate when there is a hostname mismatch. (Issue #820)
+
+ * Preserve order of request/response headers. (Issue #821)
+
+
+ 1.14 (2015-12-29)
+ -----------------
+
+ * contrib: SOCKS proxy support! (Issue #762)
+
+ * Fixed AppEngine handling of transfer-encoding header and bug
+ in Timeout defaults checking. (Issue #763)
+
+
+ 1.13.1 (2015-12-18)
+ -------------------
+
+ * Fixed regression in IPv6 + SSL for match_hostname. (Issue #761)
+
+
+ 1.13 (2015-12-14)
+ -----------------
+
+ * Fixed ``pip install urllib3[secure]`` on modern pip. (Issue #706)
+
+ * pyopenssl: Fixed SSL3_WRITE_PENDING error. (Issue #717)
+
+ * pyopenssl: Support for TLSv1.1 and TLSv1.2. (Issue #696)
+
+ * Close connections more defensively on exception. (Issue #734)
+
+ * Adjusted ``read_chunked`` to handle gzipped, chunk-encoded bodies without
+ repeatedly flushing the decoder, to function better on Jython. (Issue #743)
+
+ * Accept ``ca_cert_dir`` for SSL-related PoolManager configuration. (Issue #758)
+
+
+ 1.12 (2015-09-03)
+ -----------------
+
+ * Rely on ``six`` for importing ``httplib`` to work around
+ conflicts with other Python 3 shims. (Issue #688)
+
+ * Add support for directories of certificate authorities, as supported by
+ OpenSSL. (Issue #701)
+
+ * New exception: ``NewConnectionError``, raised when we fail to establish
+ a new connection, usually ``ECONNREFUSED`` socket error.
+
+
+ 1.11 (2015-07-21)
+ -----------------
+
+ * When ``ca_certs`` is given, ``cert_reqs`` defaults to
+ ``'CERT_REQUIRED'``. (Issue #650)
+
+ * ``pip install urllib3[secure]`` will install Certifi and
+ PyOpenSSL as dependencies. (Issue #678)
+
+ * Made ``HTTPHeaderDict`` usable as a ``headers`` input value
+ (Issues #632, #679)
+
+ * Added `urllib3.contrib.appengine <https://urllib3.readthedocs.io/en/latest/contrib.html#google-app-engine>`_
+ which has an ``AppEngineManager`` for using ``URLFetch`` in a
+ Google AppEngine environment. (Issue #664)
+
+ * Dev: Added test suite for AppEngine. (Issue #631)
+
+ * Fix performance regression when using PyOpenSSL. (Issue #626)
+
+ * Passing incorrect scheme (e.g. ``foo://``) will raise
+ ``ValueError`` instead of ``AssertionError`` (backwards
+ compatible for now, but please migrate). (Issue #640)
+
+ * Fix pools not getting replenished when an error occurs during a
+ request using ``release_conn=False``. (Issue #644)
+
+ * Fix pool-default headers not applying for url-encoded requests
+ like GET. (Issue #657)
+
+ * log.warning in Python 3 when headers are skipped due to parsing
+ errors. (Issue #642)
+
+ * Close and discard connections if an error occurs during read.
+ (Issue #660)
+
+ * Fix host parsing for IPv6 proxies. (Issue #668)
+
+ * Separate warning type SubjectAltNameWarning, now issued once
+ per host. (Issue #671)
+
+ * Fix ``httplib.IncompleteRead`` not getting converted to
+ ``ProtocolError`` when using ``HTTPResponse.stream()``
+ (Issue #674)
+
+ 1.10.4 (2015-05-03)
+ -------------------
+
+ * Migrate tests to Tornado 4. (Issue #594)
+
+ * Append default warning configuration rather than overwrite.
+ (Issue #603)
+
+ * Fix streaming decoding regression. (Issue #595)
+
+ * Fix chunked requests losing state across keep-alive connections.
+ (Issue #599)
+
+ * Fix hanging when chunked HEAD response has no body. (Issue #605)
+
+
+ 1.10.3 (2015-04-21)
+ -------------------
+
+ * Emit ``InsecurePlatformWarning`` when SSLContext object is missing.
+ (Issue #558)
+
+ * Fix regression of duplicate header keys being discarded.
+ (Issue #563)
+
+ * ``Response.stream()`` returns a generator for chunked responses.
+ (Issue #560)
+
+ * Set upper-bound timeout when waiting for a socket in PyOpenSSL.
+ (Issue #585)
+
+ * Work on platforms without `ssl` module for plain HTTP requests.
+ (Issue #587)
+
+ * Stop relying on the stdlib's default cipher list. (Issue #588)
+
+
+ 1.10.2 (2015-02-25)
+ -------------------
+
+ * Fix file descriptor leakage on retries. (Issue #548)
+
+ * Removed RC4 from default cipher list. (Issue #551)
+
+ * Header performance improvements. (Issue #544)
+
+ * Fix PoolManager not obeying redirect retry settings. (Issue #553)
+
+
+ 1.10.1 (2015-02-10)
+ -------------------
+
+ * Pools can be used as context managers. (Issue #545)
+
+ * Don't re-use connections which experienced an SSLError. (Issue #529)
+
+ * Don't fail when gzip decoding an empty stream. (Issue #535)
+
+ * Add sha256 support for fingerprint verification. (Issue #540)
+
+ * Fixed handling of header values containing commas. (Issue #533)
+
+
+ 1.10 (2014-12-14)
+ -----------------
+
+ * Disabled SSLv3. (Issue #473)
+
+ * Add ``Url.url`` property to return the composed url string. (Issue #394)
+
+ * Fixed PyOpenSSL + gevent ``WantWriteError``. (Issue #412)
+
+ * ``MaxRetryError.reason`` will always be an exception, not string.
+ (Issue #481)
+
+ * Fixed SSL-related timeouts not being detected as timeouts. (Issue #492)
+
+ * Py3: Use ``ssl.create_default_context()`` when available. (Issue #473)
+
+ * Emit ``InsecureRequestWarning`` for *every* insecure HTTPS request.
+ (Issue #496)
+
+ * Emit ``SecurityWarning`` when certificate has no ``subjectAltName``.
+ (Issue #499)
+
+ * Close and discard sockets which experienced SSL-related errors.
+ (Issue #501)
+
+ * Handle ``body`` param in ``.request(...)``. (Issue #513)
+
+ * Respect timeout with HTTPS proxy. (Issue #505)
+
+ * PyOpenSSL: Handle ZeroReturnError exception. (Issue #520)
+
+
+ 1.9.1 (2014-09-13)
+ ------------------
+
+ * Apply socket arguments before binding. (Issue #427)
+
+ * More careful checks if fp-like object is closed. (Issue #435)
+
+ * Fixed packaging issues of some development-related files not
+ getting included. (Issue #440)
+
+ * Allow performing *only* fingerprint verification. (Issue #444)
+
+ * Emit ``SecurityWarning`` if system clock is waaay off. (Issue #445)
+
+ * Fixed PyOpenSSL compatibility with PyPy. (Issue #450)
+
+ * Fixed ``BrokenPipeError`` and ``ConnectionError`` handling in Py3.
+ (Issue #443)
+
+
+
+ 1.9 (2014-07-04)
+ ----------------
+
+ * Shuffled around development-related files. If you're maintaining a distro
+ package of urllib3, you may need to tweak things. (Issue #415)
+
+ * Unverified HTTPS requests will trigger a warning on the first request. See
+ our new `security documentation
+ <https://urllib3.readthedocs.io/en/latest/security.html>`_ for details.
+ (Issue #426)
+
+ * New retry logic and ``urllib3.util.retry.Retry`` configuration object.
+ (Issue #326)
+
+ * All raised exceptions should now wrapped in a
+ ``urllib3.exceptions.HTTPException``-extending exception. (Issue #326)
+
+ * All errors during a retry-enabled request should be wrapped in
+ ``urllib3.exceptions.MaxRetryError``, including timeout-related exceptions
+ which were previously exempt. Underlying error is accessible from the
+ ``.reason`` property. (Issue #326)
+
+ * ``urllib3.exceptions.ConnectionError`` renamed to
+ ``urllib3.exceptions.ProtocolError``. (Issue #326)
+
+ * Errors during response read (such as IncompleteRead) are now wrapped in
+ ``urllib3.exceptions.ProtocolError``. (Issue #418)
+
+ * Requesting an empty host will raise ``urllib3.exceptions.LocationValueError``.
+ (Issue #417)
+
+ * Catch read timeouts over SSL connections as
+ ``urllib3.exceptions.ReadTimeoutError``. (Issue #419)
+
+ * Apply socket arguments before connecting. (Issue #427)
+
+
+ 1.8.3 (2014-06-23)
+ ------------------
+
+ * Fix TLS verification when using a proxy in Python 3.4.1. (Issue #385)
+
+ * Add ``disable_cache`` option to ``urllib3.util.make_headers``. (Issue #393)
+
+ * Wrap ``socket.timeout`` exception with
+ ``urllib3.exceptions.ReadTimeoutError``. (Issue #399)
+
+ * Fixed proxy-related bug where connections were being reused incorrectly.
+ (Issues #366, #369)
+
+ * Added ``socket_options`` keyword parameter which allows to define
+ ``setsockopt`` configuration of new sockets. (Issue #397)
+
+ * Removed ``HTTPConnection.tcp_nodelay`` in favor of
+ ``HTTPConnection.default_socket_options``. (Issue #397)
+
+ * Fixed ``TypeError`` bug in Python 2.6.4. (Issue #411)
+
+
+ 1.8.2 (2014-04-17)
+ ------------------
+
+ * Fix ``urllib3.util`` not being included in the package.
+
+
+ 1.8.1 (2014-04-17)
+ ------------------
+
+ * Fix AppEngine bug of HTTPS requests going out as HTTP. (Issue #356)
+
+ * Don't install ``dummyserver`` into ``site-packages`` as it's only needed
+ for the test suite. (Issue #362)
+
+ * Added support for specifying ``source_address``. (Issue #352)
+
+
+ 1.8 (2014-03-04)
+ ----------------
+
+ * Improved url parsing in ``urllib3.util.parse_url`` (properly parse '@' in
+ username, and blank ports like 'hostname:').
+
+ * New ``urllib3.connection`` module which contains all the HTTPConnection
+ objects.
+
+ * Several ``urllib3.util.Timeout``-related fixes. Also changed constructor
+ signature to a more sensible order. [Backwards incompatible]
+ (Issues #252, #262, #263)
+
+ * Use ``backports.ssl_match_hostname`` if it's installed. (Issue #274)
+
+ * Added ``.tell()`` method to ``urllib3.response.HTTPResponse`` which
+ returns the number of bytes read so far. (Issue #277)
+
+ * Support for platforms without threading. (Issue #289)
+
+ * Expand default-port comparison in ``HTTPConnectionPool.is_same_host``
+ to allow a pool with no specified port to be considered equal to to an
+ HTTP/HTTPS url with port 80/443 explicitly provided. (Issue #305)
+
+ * Improved default SSL/TLS settings to avoid vulnerabilities.
+ (Issue #309)
+
+ * Fixed ``urllib3.poolmanager.ProxyManager`` not retrying on connect errors.
+ (Issue #310)
+
+ * Disable Nagle's Algorithm on the socket for non-proxies. A subset of requests
+ will send the entire HTTP request ~200 milliseconds faster; however, some of
+ the resulting TCP packets will be smaller. (Issue #254)
+
+ * Increased maximum number of SubjectAltNames in ``urllib3.contrib.pyopenssl``
+ from the default 64 to 1024 in a single certificate. (Issue #318)
+
+ * Headers are now passed and stored as a custom
+ ``urllib3.collections_.HTTPHeaderDict`` object rather than a plain ``dict``.
+ (Issue #329, #333)
+
+ * Headers no longer lose their case on Python 3. (Issue #236)
+
+ * ``urllib3.contrib.pyopenssl`` now uses the operating system's default CA
+ certificates on inject. (Issue #332)
+
+ * Requests with ``retries=False`` will immediately raise any exceptions without
+ wrapping them in ``MaxRetryError``. (Issue #348)
+
+ * Fixed open socket leak with SSL-related failures. (Issue #344, #348)
+
+
+ 1.7.1 (2013-09-25)
+ ------------------
+
+ * Added granular timeout support with new ``urllib3.util.Timeout`` class.
+ (Issue #231)
+
+ * Fixed Python 3.4 support. (Issue #238)
+
+
+ 1.7 (2013-08-14)
+ ----------------
+
+ * More exceptions are now pickle-able, with tests. (Issue #174)
+
+ * Fixed redirecting with relative URLs in Location header. (Issue #178)
+
+ * Support for relative urls in ``Location: ...`` header. (Issue #179)
+
+ * ``urllib3.response.HTTPResponse`` now inherits from ``io.IOBase`` for bonus
+ file-like functionality. (Issue #187)
+
+ * Passing ``assert_hostname=False`` when creating a HTTPSConnectionPool will
+ skip hostname verification for SSL connections. (Issue #194)
+
+ * New method ``urllib3.response.HTTPResponse.stream(...)`` which acts as a
+ generator wrapped around ``.read(...)``. (Issue #198)
+
+ * IPv6 url parsing enforces brackets around the hostname. (Issue #199)
+
+ * Fixed thread race condition in
+ ``urllib3.poolmanager.PoolManager.connection_from_host(...)`` (Issue #204)
+
+ * ``ProxyManager`` requests now include non-default port in ``Host: ...``
+ header. (Issue #217)
+
+ * Added HTTPS proxy support in ``ProxyManager``. (Issue #170 #139)
+
+ * New ``RequestField`` object can be passed to the ``fields=...`` param which
+ can specify headers. (Issue #220)
+
+ * Raise ``urllib3.exceptions.ProxyError`` when connecting to proxy fails.
+ (Issue #221)
+
+ * Use international headers when posting file names. (Issue #119)
+
+ * Improved IPv6 support. (Issue #203)
+
+
+ 1.6 (2013-04-25)
+ ----------------
+
+ * Contrib: Optional SNI support for Py2 using PyOpenSSL. (Issue #156)
+
+ * ``ProxyManager`` automatically adds ``Host: ...`` header if not given.
+
+ * Improved SSL-related code. ``cert_req`` now optionally takes a string like
+ "REQUIRED" or "NONE". Same with ``ssl_version`` takes strings like "SSLv23"
+ The string values reflect the suffix of the respective constant variable.
+ (Issue #130)
+
+ * Vendored ``socksipy`` now based on Anorov's fork which handles unexpectedly
+ closed proxy connections and larger read buffers. (Issue #135)
+
+ * Ensure the connection is closed if no data is received, fixes connection leak
+ on some platforms. (Issue #133)
+
+ * Added SNI support for SSL/TLS connections on Py32+. (Issue #89)
+
+ * Tests fixed to be compatible with Py26 again. (Issue #125)
+
+ * Added ability to choose SSL version by passing an ``ssl.PROTOCOL_*`` constant
+ to the ``ssl_version`` parameter of ``HTTPSConnectionPool``. (Issue #109)
+
+ * Allow an explicit content type to be specified when encoding file fields.
+ (Issue #126)
+
+ * Exceptions are now pickleable, with tests. (Issue #101)
+
+ * Fixed default headers not getting passed in some cases. (Issue #99)
+
+ * Treat "content-encoding" header value as case-insensitive, per RFC 2616
+ Section 3.5. (Issue #110)
+
+ * "Connection Refused" SocketErrors will get retried rather than raised.
+ (Issue #92)
+
+ * Updated vendored ``six``, no longer overrides the global ``six`` module
+ namespace. (Issue #113)
+
+ * ``urllib3.exceptions.MaxRetryError`` contains a ``reason`` property holding
+ the exception that prompted the final retry. If ``reason is None`` then it
+ was due to a redirect. (Issue #92, #114)
+
+ * Fixed ``PoolManager.urlopen()`` from not redirecting more than once.
+ (Issue #149)
+
+ * Don't assume ``Content-Type: text/plain`` for multi-part encoding parameters
+ that are not files. (Issue #111)
+
+ * Pass `strict` param down to ``httplib.HTTPConnection``. (Issue #122)
+
+ * Added mechanism to verify SSL certificates by fingerprint (md5, sha1) or
+ against an arbitrary hostname (when connecting by IP or for misconfigured
+ servers). (Issue #140)
+
+ * Streaming decompression support. (Issue #159)
+
+
+ 1.5 (2012-08-02)
+ ----------------
+
+ * Added ``urllib3.add_stderr_logger()`` for quickly enabling STDERR debug
+ logging in urllib3.
+
+ * Native full URL parsing (including auth, path, query, fragment) available in
+ ``urllib3.util.parse_url(url)``.
+
+ * Built-in redirect will switch method to 'GET' if status code is 303.
+ (Issue #11)
+
+ * ``urllib3.PoolManager`` strips the scheme and host before sending the request
+ uri. (Issue #8)
+
+ * New ``urllib3.exceptions.DecodeError`` exception for when automatic decoding,
+ based on the Content-Type header, fails.
+
+ * Fixed bug with pool depletion and leaking connections (Issue #76). Added
+ explicit connection closing on pool eviction. Added
+ ``urllib3.PoolManager.clear()``.
+
+ * 99% -> 100% unit test coverage.
+
+
+ 1.4 (2012-06-16)
+ ----------------
+
+ * Minor AppEngine-related fixes.
+
+ * Switched from ``mimetools.choose_boundary`` to ``uuid.uuid4()``.
+
+ * Improved url parsing. (Issue #73)
+
+ * IPv6 url support. (Issue #72)
+
+
+ 1.3 (2012-03-25)
+ ----------------
+
+ * Removed pre-1.0 deprecated API.
+
+ * Refactored helpers into a ``urllib3.util`` submodule.
+
+ * Fixed multipart encoding to support list-of-tuples for keys with multiple
+ values. (Issue #48)
+
+ * Fixed multiple Set-Cookie headers in response not getting merged properly in
+ Python 3. (Issue #53)
+
+ * AppEngine support with Py27. (Issue #61)
+
+ * Minor ``encode_multipart_formdata`` fixes related to Python 3 strings vs
+ bytes.
+
+
+ 1.2.2 (2012-02-06)
+ ------------------
+
+ * Fixed packaging bug of not shipping ``test-requirements.txt``. (Issue #47)
+
+
+ 1.2.1 (2012-02-05)
+ ------------------
+
+ * Fixed another bug related to when ``ssl`` module is not available. (Issue #41)
+
+ * Location parsing errors now raise ``urllib3.exceptions.LocationParseError``
+ which inherits from ``ValueError``.
+
+
+ 1.2 (2012-01-29)
+ ----------------
+
+ * Added Python 3 support (tested on 3.2.2)
+
+ * Dropped Python 2.5 support (tested on 2.6.7, 2.7.2)
+
+ * Use ``select.poll`` instead of ``select.select`` for platforms that support
+ it.
+
+ * Use ``Queue.LifoQueue`` instead of ``Queue.Queue`` for more aggressive
+ connection reusing. Configurable by overriding ``ConnectionPool.QueueCls``.
+
+ * Fixed ``ImportError`` during install when ``ssl`` module is not available.
+ (Issue #41)
+
+ * Fixed ``PoolManager`` redirects between schemes (such as HTTP -> HTTPS) not
+ completing properly. (Issue #28, uncovered by Issue #10 in v1.1)
+
+ * Ported ``dummyserver`` to use ``tornado`` instead of ``webob`` +
+ ``eventlet``. Removed extraneous unsupported dummyserver testing backends.
+ Added socket-level tests.
+
+ * More tests. Achievement Unlocked: 99% Coverage.
+
+
+ 1.1 (2012-01-07)
+ ----------------
+
+ * Refactored ``dummyserver`` to its own root namespace module (used for
+ testing).
+
+ * Added hostname verification for ``VerifiedHTTPSConnection`` by vendoring in
+ Py32's ``ssl_match_hostname``. (Issue #25)
+
+ * Fixed cross-host HTTP redirects when using ``PoolManager``. (Issue #10)
+
+ * Fixed ``decode_content`` being ignored when set through ``urlopen``. (Issue
+ #27)
+
+ * Fixed timeout-related bugs. (Issues #17, #23)
+
+
+ 1.0.2 (2011-11-04)
+ ------------------
+
+ * Fixed typo in ``VerifiedHTTPSConnection`` which would only present as a bug if
+ you're using the object manually. (Thanks pyos)
+
+ * Made RecentlyUsedContainer (and consequently PoolManager) more thread-safe by
+ wrapping the access log in a mutex. (Thanks @christer)
+
+ * Made RecentlyUsedContainer more dict-like (corrected ``__delitem__`` and
+ ``__getitem__`` behaviour), with tests. Shouldn't affect core urllib3 code.
+
+
+ 1.0.1 (2011-10-10)
+ ------------------
+
+ * Fixed a bug where the same connection would get returned into the pool twice,
+ causing extraneous "HttpConnectionPool is full" log warnings.
+
+
+ 1.0 (2011-10-08)
+ ----------------
+
+ * Added ``PoolManager`` with LRU expiration of connections (tested and
+ documented).
+ * Added ``ProxyManager`` (needs tests, docs, and confirmation that it works
+ with HTTPS proxies).
+ * Added optional partial-read support for responses when
+ ``preload_content=False``. You can now make requests and just read the headers
+ without loading the content.
+ * Made response decoding optional (default on, same as before).
+ * Added optional explicit boundary string for ``encode_multipart_formdata``.
+ * Convenience request methods are now inherited from ``RequestMethods``. Old
+ helpers like ``get_url`` and ``post_url`` should be abandoned in favour of
+ the new ``request(method, url, ...)``.
+ * Refactored code to be even more decoupled, reusable, and extendable.
+ * License header added to ``.py`` files.
+ * Embiggened the documentation: Lots of Sphinx-friendly docstrings in the code
+ and docs in ``docs/`` and on https://urllib3.readthedocs.io/.
+ * Embettered all the things!
+ * Started writing this file.
+
+
+ 0.4.1 (2011-07-17)
+ ------------------
+
+ * Minor bug fixes, code cleanup.
+
+
+ 0.4 (2011-03-01)
+ ----------------
+
+ * Better unicode support.
+ * Added ``VerifiedHTTPSConnection``.
+ * Added ``NTLMConnectionPool`` in contrib.
+ * Minor improvements.
+
+
+ 0.3.1 (2010-07-13)
+ ------------------
+
+ * Added ``assert_host_name`` optional parameter. Now compatible with proxies.
+
+
+ 0.3 (2009-12-10)
+ ----------------
+
+ * Added HTTPS support.
+ * Minor bug fixes.
+ * Refactored, broken backwards compatibility with 0.2.
+ * API to be treated as stable from this version forward.
+
+
+ 0.2 (2008-11-17)
+ ----------------
+
+ * Added unit tests.
+ * Bug fixes.
+
+
+ 0.1 (2008-11-16)
+ ----------------
+
+ * First release.
+
+Keywords: urllib httplib threadsafe filepost http https ssl pooling
+Platform: UNKNOWN
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Software Development :: Libraries
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4
+Provides-Extra: brotli
+Provides-Extra: secure
+Provides-Extra: socks
diff --git a/third_party/python/urllib3/README.rst b/third_party/python/urllib3/README.rst
new file mode 100644
index 0000000000..07a5070ccb
--- /dev/null
+++ b/third_party/python/urllib3/README.rst
@@ -0,0 +1,104 @@
+urllib3
+=======
+
+urllib3 is a powerful, *sanity-friendly* HTTP client for Python. Much of the
+Python ecosystem already uses urllib3 and you should too.
+urllib3 brings many critical features that are missing from the Python
+standard libraries:
+
+- Thread safety.
+- Connection pooling.
+- Client-side SSL/TLS verification.
+- File uploads with multipart encoding.
+- Helpers for retrying requests and dealing with HTTP redirects.
+- Support for gzip, deflate, and brotli encoding.
+- Proxy support for HTTP and SOCKS.
+- 100% test coverage.
+
+urllib3 is powerful and easy to use::
+
+ >>> import urllib3
+ >>> http = urllib3.PoolManager()
+ >>> r = http.request('GET', 'http://httpbin.org/robots.txt')
+ >>> r.status
+ 200
+ >>> r.data
+ 'User-agent: *\nDisallow: /deny\n'
+
+
+Installing
+----------
+
+urllib3 can be installed with `pip <https://pip.pypa.io>`_::
+
+ $ pip install urllib3
+
+Alternatively, you can grab the latest source code from `GitHub <https://github.com/urllib3/urllib3>`_::
+
+ $ git clone git://github.com/urllib3/urllib3.git
+ $ python setup.py install
+
+
+Documentation
+-------------
+
+urllib3 has usage and reference documentation at `urllib3.readthedocs.io <https://urllib3.readthedocs.io>`_.
+
+
+Contributing
+------------
+
+urllib3 happily accepts contributions. Please see our
+`contributing documentation <https://urllib3.readthedocs.io/en/latest/contributing.html>`_
+for some tips on getting started.
+
+
+Security Disclosures
+--------------------
+
+To report a security vulnerability, please use the
+`Tidelift security contact <https://tidelift.com/security>`_.
+Tidelift will coordinate the fix and disclosure with maintainers.
+
+Maintainers
+-----------
+
+- `@sethmlarson <https://github.com/sethmlarson>`_ (Seth M. Larson)
+- `@pquentin <https://github.com/pquentin>`_ (Quentin Pradet)
+- `@theacodes <https://github.com/theacodes>`_ (Thea Flowers)
+- `@haikuginger <https://github.com/haikuginger>`_ (Jess Shapiro)
+- `@lukasa <https://github.com/lukasa>`_ (Cory Benfield)
+- `@sigmavirus24 <https://github.com/sigmavirus24>`_ (Ian Stapleton Cordasco)
+- `@shazow <https://github.com/shazow>`_ (Andrey Petrov)
+
+👋
+
+
+Sponsorship
+-----------
+
+.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png
+ :width: 75
+ :alt: Tidelift
+
+.. list-table::
+ :widths: 10 100
+
+ * - |tideliftlogo|
+ - Professional support for urllib3 is available as part of the `Tidelift
+ Subscription`_. Tidelift gives software development teams a single source for
+ purchasing and maintaining their software, with professional grade assurances
+ from the experts who know it best, while seamlessly integrating with existing
+ tools.
+
+.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme
+
+If your company benefits from this library, please consider `sponsoring its
+development <https://urllib3.readthedocs.io/en/latest/contributing.html#sponsorship-project-grants>`_.
+
+Sponsors include:
+
+- Abbott (2018-2019), sponsored `@sethmlarson <https://github.com/sethmlarson>`_'s work on urllib3.
+- Google Cloud Platform (2018-2019), sponsored `@theacodes <https://github.com/theacodes>`_'s work on urllib3.
+- Akamai (2017-2018), sponsored `@haikuginger <https://github.com/haikuginger>`_'s work on urllib3
+- Hewlett Packard Enterprise (2016-2017), sponsored `@Lukasa’s <https://github.com/Lukasa>`_ work on urllib3.
diff --git a/third_party/python/urllib3/dev-requirements.txt b/third_party/python/urllib3/dev-requirements.txt
new file mode 100644
index 0000000000..e6b480eec3
--- /dev/null
+++ b/third_party/python/urllib3/dev-requirements.txt
@@ -0,0 +1,16 @@
+mock==3.0.5
+coverage~=5.0
+tornado==5.1.1;python_version<="2.7"
+tornado==6.0.3;python_version>="3.5"
+PySocks==1.7.1
+# https://github.com/Anorov/PySocks/issues/131
+win-inet-pton==1.1.0
+pytest==4.6.9
+pytest-timeout==1.3.4
+flaky==3.6.1
+trustme==0.5.3
+cryptography==2.8
+gcp-devrel-py-tools==0.0.15
+
+# https://github.com/GoogleCloudPlatform/python-repo-tools/issues/23
+pylint<2.0;python_version<="2.7"
diff --git a/third_party/python/urllib3/dummyserver/__init__.py b/third_party/python/urllib3/dummyserver/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/urllib3/dummyserver/__init__.py
diff --git a/third_party/python/urllib3/dummyserver/certs/README.rst b/third_party/python/urllib3/dummyserver/certs/README.rst
new file mode 100644
index 0000000000..7c712b6e15
--- /dev/null
+++ b/third_party/python/urllib3/dummyserver/certs/README.rst
@@ -0,0 +1,17 @@
+Generating new certificates
+---------------------------
+
+Here's how you can regenerate the certificates::
+
+ import trustme
+
+ ca = trustme.CA()
+ server_cert = ca.issue_cert(u"localhost")
+
+ ca.cert_pem.write_to_path("cacert.pem")
+ ca.private_key_pem.write_to_path("cacert.key")
+ server_cert.cert_chain_pems[0].write_to_path("server.crt")
+ server_cert.private_key_pem.write_to_path("server.key")
+
+This will break a number of tests: you will need to update the
+relevant fingerprints and hashes.
diff --git a/third_party/python/urllib3/dummyserver/certs/cacert.key b/third_party/python/urllib3/dummyserver/certs/cacert.key
new file mode 100644
index 0000000000..58e1c20167
--- /dev/null
+++ b/third_party/python/urllib3/dummyserver/certs/cacert.key
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEAn330NEISY7w+GeZR2jh8Of1x2CtpenWRKuuD2u4FOaN0G1SN
+pm6Owum6xzhv93jmj+tZrp9kYvC9HcvGrgzH9yP566pLLfY2SEEAJYNNeVdqegY5
+9W1wa6vEDN5UYruVEbymgPHoItiYhfndgEDbagPN5dhrdNvaRNI2c5zMmBDlzzhC
+G7AZbXuthTp6OwTVye71f2lnNhKi6rzWtre/ji88fq8Qm406l29O1RTzmNttN5BZ
+nPWU9v4GnCYKXdY6BN1Ub6z8C9hna/oRgBqa0Zbv7kEuAuqhsFQeuFCfBhQm7NdP
+d/7Kh6LS+VIiCu3AOccZHOuFOjtHb/KyndmgCwIDAQABAoIBAGQg9wc308O5kmNA
+LXMKszLU4nwMBRRUaua/JPB1LeKZs3LVCnjKP+YuRox76g87X8RKxOrUNnnHGXNz
+UzBB5ehKNcS2DKy2Pi3uYOEsJZ9gOgCRmCF0q3dtRo+tpNy3V0bjYMTjGhGGWXsC
++wRhs15DNShvTkb3H3jFYFoEvo1YUKsvImBWJGwDbdDMfMZv4aeBWXlOrF+2fwt2
+TM3G1o8xzEEWBB4FLZBW+tq2zfUSa1KwqqyQ4ZIqXepjQcN6nNfuHADA+nxuruVV
+LPUhz4ZmsBEnJ7CL9zWJkLUw/al9/6Q14tleRmiZTTztqAlFgZUpNhaKSzVdsIc/
+Xz3+OgECgYEAzgNu7eFJAOzq+ZVFRrrbA7tu+f318FTCaADJ1kFgAJyj6b9yOGan
+LNL4TfXVjzgqtfQ4AIVqEHwXO7yS+YgTprvgzqRxqNRZ6ikuo2IPkIwXIAXZAlwd
+JsWLPBXOlOFW6LHvhYxjY2xF+A9y4AbuZ3UDRUQ+tp226VfEaeY80+ECgYEAxjDV
+cJqeBO06YRVGmcXfAYwRLJGT4hvIZeiSbxX/kJ0rx+cYLT/XZbAguJYQ5ZK2lkeA
+YneXYDlSTxmBxHxiWwWe3mcmctdE4Jbw8oIZ8a49a4KE/F2ojC4gmisIt3/OqGOw
+C4e/pDCE/QV64LWdazgUWHPGoVEmZx9/oMm/MWsCgYEAsLtlSJFB7ZdRpTcXLSxT
+gwoilDf36mrsNAipHjMLRrsaKwbf197If72k4kyJHspSabHO8TOC4A10aPzHIWZJ
+ZXo7y0prbyhs0mLt7Z/MNnbXx9L8bffT0lUZszwJ8tK1mf47utfK05opFDs8k0+e
+6gYJ/jwjiMoYBmoSx76KZEECgYBagJxHAmQcbdQV1yhZOhFe3H5PMt8sBnHZj32m
++o2slQkUDQRuTVPoHKikgeqPWxLDxzzqOiBHEYXzlvs6JW6okAV/G+1jzcenI2Y9
+54k/YsirWnut3nsEIGBE5lfhq5xMKtGOQlwR9xITlLgK+wQ6nO41ghD3Q15dAvY+
+D0KepwKBgQChHvbyzw0t76J2gLxUSyuG7VsId651bpqTYUsbSDFlRo4g8UbBAkHd
+fdv5BOon3ALJFreSK+a78es0kpiLwrS2SqG/y3mb9aUoLpCVB1haDmmP4Rn4AYXz
+OCfUkusuSoXOR8CMjqkXYl5QjeJAUAt9GTsZnXIbOQKbaZwkeV0HEg==
+-----END RSA PRIVATE KEY-----
diff --git a/third_party/python/urllib3/dummyserver/certs/cacert.pem b/third_party/python/urllib3/dummyserver/certs/cacert.pem
new file mode 100644
index 0000000000..710b8fbd98
--- /dev/null
+++ b/third_party/python/urllib3/dummyserver/certs/cacert.pem
@@ -0,0 +1,21 @@
+-----BEGIN CERTIFICATE-----
+MIIDfzCCAmegAwIBAgIUVGEi+7bkaGRIPoQrp9zFFAT5JYQwDQYJKoZIhvcNAQEL
+BQAwQDEXMBUGA1UECgwOdHJ1c3RtZSB2MC41LjMxJTAjBgNVBAsMHFRlc3Rpbmcg
+Q0EgIzdEUWJWOTBzV2xZSEstY0wwHhcNMDAwMTAxMDAwMDAwWhcNMzgwMTAxMDAw
+MDAwWjBAMRcwFQYDVQQKDA50cnVzdG1lIHYwLjUuMzElMCMGA1UECwwcVGVzdGlu
+ZyBDQSAjN0RRYlY5MHNXbFlISy1jTDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC
+AQoCggEBAJ999DRCEmO8PhnmUdo4fDn9cdgraXp1kSrrg9ruBTmjdBtUjaZujsLp
+usc4b/d45o/rWa6fZGLwvR3Lxq4Mx/cj+euqSy32NkhBACWDTXlXanoGOfVtcGur
+xAzeVGK7lRG8poDx6CLYmIX53YBA22oDzeXYa3Tb2kTSNnOczJgQ5c84QhuwGW17
+rYU6ejsE1cnu9X9pZzYSouq81ra3v44vPH6vEJuNOpdvTtUU85jbbTeQWZz1lPb+
+BpwmCl3WOgTdVG+s/AvYZ2v6EYAamtGW7+5BLgLqobBUHrhQnwYUJuzXT3f+yoei
+0vlSIgrtwDnHGRzrhTo7R2/ysp3ZoAsCAwEAAaNxMG8wHQYDVR0OBBYEFHWf39Hn
+rdChKjsOBoBGn1U+0VgxMBIGA1UdEwEB/wQIMAYBAf8CAQkwDgYDVR0PAQH/BAQD
+AgEGMCoGA1UdJQEB/wQgMB4GCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYBBQUHAwMw
+DQYJKoZIhvcNAQELBQADggEBAIAylnWX2WTB+mrVVpE2W8i0HollTMJIPJA9Jq3Q
+/t2uPjXDEAVAcBmQju8qy2tHpamvzpQseVm3EF3UFNlGxwOGKsTzU4J45qOJITZk
+eLRAcWNEt6cgqj8ml8PuMHU7oDnp7pP6VPe5KQH1a0FYQnDNEwg7MyX+GjnXeRwd
+re6y9nMC+XKCYUAd1/nQcrZdnSsws1M5lzXir2vuyyN9EUkf2xMMKA2E1s0f+5he
+3eNghAXtZw616ITBoMb7ckG6a0+YobbiQ0tKgB8D3MG2544Gx6xhCXf7pX4q4g//
+1nTPeYFsBDyqEOEhcW1o9/MSSbjpUJC+QUmCb2Y1wYeum+w=
+-----END CERTIFICATE-----
diff --git a/third_party/python/urllib3/dummyserver/certs/server.crt b/third_party/python/urllib3/dummyserver/certs/server.crt
new file mode 100644
index 0000000000..24026c367a
--- /dev/null
+++ b/third_party/python/urllib3/dummyserver/certs/server.crt
@@ -0,0 +1,21 @@
+-----BEGIN CERTIFICATE-----
+MIIDeTCCAmGgAwIBAgIUQeadxkH6YMoSecB2rNbFEr1u1kUwDQYJKoZIhvcNAQEL
+BQAwQDEXMBUGA1UECgwOdHJ1c3RtZSB2MC41LjMxJTAjBgNVBAsMHFRlc3Rpbmcg
+Q0EgIzdEUWJWOTBzV2xZSEstY0wwHhcNMDAwMTAxMDAwMDAwWhcNMzgwMTAxMDAw
+MDAwWjBCMRcwFQYDVQQKDA50cnVzdG1lIHYwLjUuMzEnMCUGA1UECwweVGVzdGlu
+ZyBjZXJ0ICMtSGRsMnMyTEYyeVp0NDFOMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEArRLZX+5dyCh4N7q90sH2Q4Ea6QLK8OfoUQPWtpzAtINDUAdfSXCC
+/qYTtGeSCGjB4W0LfvRTI8afHoD/M+YpaCRnx7T1sy1taA2rnGrEVXEHalVP+RI4
+t4ZWtX56aez2M0Fs6o4MtzAuP6fKgSdWzIvOmtCxqn0Zf2KbfEHnQylsy2LgPa/x
+Lg50fbZ195+h4EAB3d2/jqaeFGGhN+7zrrv4+L1eeW3bzOkvPEkTNepq3Gy/8r5e
+0i2icEnM+eBfl8NYgQ1toJYvDIy5Qi2TRzaFxBVmqUOc8EFtHpL7E9YLbTTW15xd
+oLVLdXI5igGxkwPYoeiiAJWxIsC/hL1RRQIDAQABo2kwZzAdBgNVHQ4EFgQUMU6+
+uwNmL8TxLwjrj7jzzlwDPiowDAYDVR0TAQH/BAIwADAfBgNVHSMEGDAWgBR1n9/R
+563QoSo7DgaARp9VPtFYMTAXBgNVHREBAf8EDTALgglsb2NhbGhvc3QwDQYJKoZI
+hvcNAQELBQADggEBAJ6w5neQKw+/efA/I3IHzt8GaSHQ/YehMHx8GxCViJUmLg6P
+Vf74k856Knvh7IsVaqF1uRi6qQaFPik6CwtBCj7/ZftdseOCDljd+8EWyQ+ZWie7
++tzMIdQWZxYSdR9Ov42VD++a6oWJtfJhWV5eyDit99FFK31/M1ZXoceiDS5AsIG6
+wfsxrFj1qV9pLNSIlfrnycYhYx7avVJTf+2mfZgTO9Tx+VPapkZrfCnP/2jpN39u
+zblFFjP9Ir0QqBw7MXjVX+Y1HkQ2TQnEeSsp1HuFRIZYx72Cttnckv1Lxcx/HiQB
+oebTDYiRfxOAEeIMgIhX88Jca8vNIRcXDeGK9mU=
+-----END CERTIFICATE-----
diff --git a/third_party/python/urllib3/dummyserver/certs/server.key b/third_party/python/urllib3/dummyserver/certs/server.key
new file mode 100644
index 0000000000..592ee6bea2
--- /dev/null
+++ b/third_party/python/urllib3/dummyserver/certs/server.key
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpQIBAAKCAQEArRLZX+5dyCh4N7q90sH2Q4Ea6QLK8OfoUQPWtpzAtINDUAdf
+SXCC/qYTtGeSCGjB4W0LfvRTI8afHoD/M+YpaCRnx7T1sy1taA2rnGrEVXEHalVP
++RI4t4ZWtX56aez2M0Fs6o4MtzAuP6fKgSdWzIvOmtCxqn0Zf2KbfEHnQylsy2Lg
+Pa/xLg50fbZ195+h4EAB3d2/jqaeFGGhN+7zrrv4+L1eeW3bzOkvPEkTNepq3Gy/
+8r5e0i2icEnM+eBfl8NYgQ1toJYvDIy5Qi2TRzaFxBVmqUOc8EFtHpL7E9YLbTTW
+15xdoLVLdXI5igGxkwPYoeiiAJWxIsC/hL1RRQIDAQABAoIBAQCZ/62f6G9WDHx7
+yhPhlmjTw+r37l45YYCbpbjFoFDvzeR1LzogFJbak1fxLD8KcHwjY23ZNvlLWg53
+i/yIZ4Hsgog9cM0283LoJVHPykiMZhhdCzAvxYDl/AjnUXUHD6w6CzsoseCql5pv
+VZOgvCpFsxjRNGUB+HJZoJoNRG7MmHaY638pGHGMiVbskT9Ww3emtMLdTKy1rQcj
+9XO/4mlaBGD79wYxy5Hlysbh2UYuQRv8XN5V46Uugk6sC/i2G7VC8KkqPQ2pM+rA
+LaeWSuN9dfBwiKcHtJssMP95ilsXsjqh3NoVuFODDXHv3u+nBAxtg2TnLZFkDZXH
+FvxPJu8BAoGBANwWWzvl/dnTtVDFbhF/61v3AVZV4vVpokXRJKny05CZPZWROTc4
+LXMVw9kxeecNdo0yR4jn1yyNUmKnQpTmpsR9Yo9NYH+Z1CLxswpc7ILfVRZBK6bK
+cCG43lM5xZprG6FXhqkHN2u9z5Y8/PuaMzC8iVs402/gakgPKmn8OjdhAoGBAMlQ
+mmrx24n9YY/dOn55XC5V/iN3Z6mIsHThnDIU515bwLwZVG7chOLSiWHAh4JzUH+v
+bV3NnlE1jhf5ln0WAadCtIeVprJG6msNTQlbTMTTV5kVNdbfYX6sFQEI+hC1LCiV
+yJtuNIa4P5W/PtoC3FWUlcAH8C91S/M4CeZZ0HhlAoGBAIxflgE2SBrO9S53PiTb
+OfqGKMwwK3nrzhxJsODUiCwKEUV8Qsn9gr+MekXlUKMV6y9Tily/wnYgDRPvKoBe
+PK/GaT6NU6cPLka7cj6B1jgCyfpPxs+y/qIDj4n1pxs+hXj6omDcwXRutCBW9eRk
+DZJgLhuIuxL4R9F+GsdOoLMBAoGAKQn1cLe9OXQd32YJ9p5m3EtLc49z4murDSiw
+3sTEJcgukinXvIHX1SV2PCczeLRpRJ5OfUDddVCllt2agAVscNx4UOuA//bU8t3T
+RoUGMVmkEeDxCMyg42HRJlTeJWnJhryCGK1up8gHrk8+UNMkd43CuVLk88fFo99Y
+pUzJ4sECgYEAvBDTo3k3sD18qV6p6tQwy+MVjvQb9V81GHP18tYcVKta3LkkqUFa
+3qSyVxi7gl3JtynG7NJ7+GDx6zxW2xUR72NTcJwWvesLI+1orM288pyNDVw9MJ/j
+AyVFnW5SEYEqdizTnQxL+rQB4CyeHfwZx2/1/Qr0ezLGUJv51lnk4mQ=
+-----END RSA PRIVATE KEY-----
diff --git a/third_party/python/urllib3/dummyserver/handlers.py b/third_party/python/urllib3/dummyserver/handlers.py
new file mode 100644
index 0000000000..696dbab076
--- /dev/null
+++ b/third_party/python/urllib3/dummyserver/handlers.py
@@ -0,0 +1,328 @@
+from __future__ import print_function
+
+import collections
+import contextlib
+import gzip
+import json
+import logging
+import sys
+import time
+import zlib
+
+from io import BytesIO
+from tornado.web import RequestHandler
+from tornado import httputil
+from datetime import datetime
+from datetime import timedelta
+
+from urllib3.packages.six.moves.http_client import responses
+from urllib3.packages.six.moves.urllib.parse import urlsplit
+from urllib3.packages.six import binary_type, ensure_str
+
+log = logging.getLogger(__name__)
+
+
+class Response(object):
+ def __init__(self, body="", status="200 OK", headers=None):
+ self.body = body
+ self.status = status
+ self.headers = headers or [("Content-type", "text/plain")]
+
+ def __call__(self, request_handler):
+ status, reason = self.status.split(" ", 1)
+ request_handler.set_status(int(status), reason)
+ for header, value in self.headers:
+ request_handler.add_header(header, value)
+
+ # chunked
+ if isinstance(self.body, list):
+ for item in self.body:
+ if not isinstance(item, bytes):
+ item = item.encode("utf8")
+ request_handler.write(item)
+ request_handler.flush()
+ else:
+ body = self.body
+ if not isinstance(body, bytes):
+ body = body.encode("utf8")
+
+ request_handler.write(body)
+
+
+RETRY_TEST_NAMES = collections.defaultdict(int)
+
+
+class TestingApp(RequestHandler):
+ """
+ Simple app that performs various operations, useful for testing an HTTP
+ library.
+
+ Given any path, it will attempt to load a corresponding local method if
+ it exists. Status code 200 indicates success, 400 indicates failure. Each
+ method has its own conditions for success/failure.
+ """
+
+ def get(self):
+ """ Handle GET requests """
+ self._call_method()
+
+ def post(self):
+ """ Handle POST requests """
+ self._call_method()
+
+ def put(self):
+ """ Handle PUT requests """
+ self._call_method()
+
+ def options(self):
+ """ Handle OPTIONS requests """
+ self._call_method()
+
+ def head(self):
+ """ Handle HEAD requests """
+ self._call_method()
+
+ def _call_method(self):
+ """ Call the correct method in this class based on the incoming URI """
+ req = self.request
+ req.params = {}
+ for k, v in req.arguments.items():
+ req.params[k] = next(iter(v))
+
+ path = req.path[:]
+ if not path.startswith("/"):
+ path = urlsplit(path).path
+
+ target = path[1:].split("/", 1)[0]
+ method = getattr(self, target, self.index)
+
+ resp = method(req)
+
+ if dict(resp.headers).get("Connection") == "close":
+ # FIXME: Can we kill the connection somehow?
+ pass
+
+ resp(self)
+
+ def index(self, _request):
+ "Render simple message"
+ return Response("Dummy server!")
+
+ def certificate(self, request):
+ """Return the requester's certificate."""
+ cert = request.get_ssl_certificate()
+ subject = dict()
+ if cert is not None:
+ subject = dict((k, v) for (k, v) in [y for z in cert["subject"] for y in z])
+ return Response(json.dumps(subject))
+
+ def source_address(self, request):
+ """Return the requester's IP address."""
+ return Response(request.remote_ip)
+
+ def set_up(self, request):
+ test_type = request.params.get("test_type")
+ test_id = request.params.get("test_id")
+ if test_id:
+ print("\nNew test %s: %s" % (test_type, test_id))
+ else:
+ print("\nNew test %s" % test_type)
+ return Response("Dummy server is ready!")
+
+ def specific_method(self, request):
+ "Confirm that the request matches the desired method type"
+ method = request.params.get("method")
+ if method and not isinstance(method, str):
+ method = method.decode("utf8")
+
+ if request.method != method:
+ return Response(
+ "Wrong method: %s != %s" % (method, request.method),
+ status="400 Bad Request",
+ )
+ return Response()
+
+ def upload(self, request):
+ "Confirm that the uploaded file conforms to specification"
+ # FIXME: This is a huge broken mess
+ param = request.params.get("upload_param", b"myfile").decode("ascii")
+ filename = request.params.get("upload_filename", b"").decode("utf-8")
+ size = int(request.params.get("upload_size", "0"))
+ files_ = request.files.get(param)
+
+ if len(files_) != 1:
+ return Response(
+ "Expected 1 file for '%s', not %d" % (param, len(files_)),
+ status="400 Bad Request",
+ )
+ file_ = files_[0]
+
+ data = file_["body"]
+ if int(size) != len(data):
+ return Response(
+ "Wrong size: %d != %d" % (size, len(data)), status="400 Bad Request"
+ )
+
+ got_filename = file_["filename"]
+ if isinstance(got_filename, binary_type):
+ got_filename = got_filename.decode("utf-8")
+
+ # Tornado can leave the trailing \n in place on the filename.
+ if filename != got_filename:
+ return Response(
+ u"Wrong filename: %s != %s" % (filename, file_.filename),
+ status="400 Bad Request",
+ )
+
+ return Response()
+
+ def redirect(self, request):
+ "Perform a redirect to ``target``"
+ target = request.params.get("target", "/")
+ status = request.params.get("status", "303 See Other")
+ if len(status) == 3:
+ status = "%s Redirect" % status.decode("latin-1")
+
+ headers = [("Location", target)]
+ return Response(status=status, headers=headers)
+
+ def not_found(self, request):
+ return Response("Not found", status="404 Not Found")
+
+ def multi_redirect(self, request):
+ "Performs a redirect chain based on ``redirect_codes``"
+ codes = request.params.get("redirect_codes", b"200").decode("utf-8")
+ head, tail = codes.split(",", 1) if "," in codes else (codes, None)
+ status = "{0} {1}".format(head, responses[int(head)])
+ if not tail:
+ return Response("Done redirecting", status=status)
+
+ headers = [("Location", "/multi_redirect?redirect_codes=%s" % tail)]
+ return Response(status=status, headers=headers)
+
+ def keepalive(self, request):
+ if request.params.get("close", b"0") == b"1":
+ headers = [("Connection", "close")]
+ return Response("Closing", headers=headers)
+
+ headers = [("Connection", "keep-alive")]
+ return Response("Keeping alive", headers=headers)
+
+ def echo_params(self, request):
+ params = sorted(
+ [(ensure_str(k), ensure_str(v)) for k, v in request.params.items()]
+ )
+ return Response(repr(params))
+
+ def sleep(self, request):
+ "Sleep for a specified amount of ``seconds``"
+ # DO NOT USE THIS, IT'S DEPRECATED.
+ # FIXME: Delete this once appengine tests are fixed to not use this handler.
+ seconds = float(request.params.get("seconds", "1"))
+ time.sleep(seconds)
+ return Response()
+
+ def echo(self, request):
+ "Echo back the params"
+ if request.method == "GET":
+ return Response(request.query)
+
+ return Response(request.body)
+
+ def echo_uri(self, request):
+ "Echo back the requested URI"
+ return Response(request.uri)
+
+ def encodingrequest(self, request):
+ "Check for UA accepting gzip/deflate encoding"
+ data = b"hello, world!"
+ encoding = request.headers.get("Accept-Encoding", "")
+ headers = None
+ if encoding == "gzip":
+ headers = [("Content-Encoding", "gzip")]
+ file_ = BytesIO()
+ with contextlib.closing(
+ gzip.GzipFile("", mode="w", fileobj=file_)
+ ) as zipfile:
+ zipfile.write(data)
+ data = file_.getvalue()
+ elif encoding == "deflate":
+ headers = [("Content-Encoding", "deflate")]
+ data = zlib.compress(data)
+ elif encoding == "garbage-gzip":
+ headers = [("Content-Encoding", "gzip")]
+ data = "garbage"
+ elif encoding == "garbage-deflate":
+ headers = [("Content-Encoding", "deflate")]
+ data = "garbage"
+ return Response(data, headers=headers)
+
+ def headers(self, request):
+ return Response(json.dumps(dict(request.headers)))
+
+ def successful_retry(self, request):
+ """ Handler which will return an error and then success
+
+ It's not currently very flexible as the number of retries is hard-coded.
+ """
+ test_name = request.headers.get("test-name", None)
+ if not test_name:
+ return Response("test-name header not set", status="400 Bad Request")
+
+ RETRY_TEST_NAMES[test_name] += 1
+
+ if RETRY_TEST_NAMES[test_name] >= 2:
+ return Response("Retry successful!")
+ else:
+ return Response("need to keep retrying!", status="418 I'm A Teapot")
+
+ def chunked(self, request):
+ return Response(["123"] * 4)
+
+ def chunked_gzip(self, request):
+ chunks = []
+ compressor = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
+
+ for uncompressed in [b"123"] * 4:
+ chunks.append(compressor.compress(uncompressed))
+
+ chunks.append(compressor.flush())
+
+ return Response(chunks, headers=[("Content-Encoding", "gzip")])
+
+ def nbytes(self, request):
+ length = int(request.params.get("length"))
+ data = b"1" * length
+ return Response(data, headers=[("Content-Type", "application/octet-stream")])
+
+ def status(self, request):
+ status = request.params.get("status", "200 OK")
+
+ return Response(status=status)
+
+ def retry_after(self, request):
+ if datetime.now() - self.application.last_req < timedelta(seconds=1):
+ status = request.params.get("status", b"429 Too Many Requests")
+ return Response(
+ status=status.decode("utf-8"), headers=[("Retry-After", "1")]
+ )
+
+ self.application.last_req = datetime.now()
+
+ return Response(status="200 OK")
+
+ def redirect_after(self, request):
+ "Perform a redirect to ``target``"
+ date = request.params.get("date")
+ if date:
+ retry_after = str(
+ httputil.format_timestamp(datetime.fromtimestamp(float(date)))
+ )
+ else:
+ retry_after = "1"
+ target = request.params.get("target", "/")
+ headers = [("Location", target), ("Retry-After", retry_after)]
+ return Response(status="303 See Other", headers=headers)
+
+ def shutdown(self, request):
+ sys.exit()
diff --git a/third_party/python/urllib3/dummyserver/proxy.py b/third_party/python/urllib3/dummyserver/proxy.py
new file mode 100755
index 0000000000..c4f0b824f7
--- /dev/null
+++ b/third_party/python/urllib3/dummyserver/proxy.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python
+#
+# Simple asynchronous HTTP proxy with tunnelling (CONNECT).
+#
+# GET/POST proxying based on
+# http://groups.google.com/group/python-tornado/msg/7bea08e7a049cf26
+#
+# Copyright (C) 2012 Senko Rasic <senko.rasic@dobarkod.hr>
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+
+import sys
+import socket
+
+import tornado.gen
+import tornado.httpserver
+import tornado.ioloop
+import tornado.iostream
+import tornado.web
+import tornado.httpclient
+
+__all__ = ["ProxyHandler", "run_proxy"]
+
+
+class ProxyHandler(tornado.web.RequestHandler):
+ SUPPORTED_METHODS = ["GET", "POST", "CONNECT"]
+
+ @tornado.gen.coroutine
+ def get(self):
+ def handle_response(response):
+ if response.error and not isinstance(
+ response.error, tornado.httpclient.HTTPError
+ ):
+ self.set_status(500)
+ self.write("Internal server error:\n" + str(response.error))
+ self.finish()
+ else:
+ self.set_status(response.code)
+ for header in (
+ "Date",
+ "Cache-Control",
+ "Server",
+ "Content-Type",
+ "Location",
+ ):
+ v = response.headers.get(header)
+ if v:
+ self.set_header(header, v)
+ if response.body:
+ self.write(response.body)
+ self.finish()
+
+ req = tornado.httpclient.HTTPRequest(
+ url=self.request.uri,
+ method=self.request.method,
+ body=self.request.body,
+ headers=self.request.headers,
+ follow_redirects=False,
+ allow_nonstandard_methods=True,
+ )
+
+ client = tornado.httpclient.AsyncHTTPClient()
+ try:
+ response = yield client.fetch(req)
+ yield handle_response(response)
+ except tornado.httpclient.HTTPError as e:
+ if hasattr(e, "response") and e.response:
+ yield handle_response(e.response)
+ else:
+ self.set_status(500)
+ self.write("Internal server error:\n" + str(e))
+ self.finish()
+
+ @tornado.gen.coroutine
+ def post(self):
+ yield self.get()
+
+ @tornado.gen.coroutine
+ def connect(self):
+ host, port = self.request.uri.split(":")
+ client = self.request.connection.stream
+
+ @tornado.gen.coroutine
+ def start_forward(reader, writer):
+ while True:
+ try:
+ data = yield reader.read_bytes(4096, partial=True)
+ except tornado.iostream.StreamClosedError:
+ break
+ if not data:
+ break
+ writer.write(data)
+ writer.close()
+
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
+ upstream = tornado.iostream.IOStream(s)
+ yield upstream.connect((host, int(port)))
+
+ client.write(b"HTTP/1.0 200 Connection established\r\n\r\n")
+ fu1 = start_forward(client, upstream)
+ fu2 = start_forward(upstream, client)
+ yield [fu1, fu2]
+
+
+def run_proxy(port, start_ioloop=True):
+ """
+ Run proxy on the specified port. If start_ioloop is True (default),
+ the tornado IOLoop will be started immediately.
+ """
+ app = tornado.web.Application([(r".*", ProxyHandler)])
+ app.listen(port)
+ ioloop = tornado.ioloop.IOLoop.instance()
+ if start_ioloop:
+ ioloop.start()
+
+
+if __name__ == "__main__":
+ port = 8888
+ if len(sys.argv) > 1:
+ port = int(sys.argv[1])
+
+ print("Starting HTTP proxy on port %d" % port)
+ run_proxy(port)
diff --git a/third_party/python/urllib3/dummyserver/server.py b/third_party/python/urllib3/dummyserver/server.py
new file mode 100755
index 0000000000..68f383534e
--- /dev/null
+++ b/third_party/python/urllib3/dummyserver/server.py
@@ -0,0 +1,188 @@
+#!/usr/bin/env python
+
+"""
+Dummy server used for unit testing.
+"""
+from __future__ import print_function
+
+import logging
+import os
+import sys
+import threading
+import socket
+import warnings
+import ssl
+from datetime import datetime
+
+from urllib3.exceptions import HTTPWarning
+
+from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.primitives import serialization
+import tornado.httpserver
+import tornado.ioloop
+import tornado.netutil
+import tornado.web
+import trustme
+
+
+log = logging.getLogger(__name__)
+
+CERTS_PATH = os.path.join(os.path.dirname(__file__), "certs")
+DEFAULT_CERTS = {
+ "certfile": os.path.join(CERTS_PATH, "server.crt"),
+ "keyfile": os.path.join(CERTS_PATH, "server.key"),
+ "cert_reqs": ssl.CERT_OPTIONAL,
+ "ca_certs": os.path.join(CERTS_PATH, "cacert.pem"),
+}
+DEFAULT_CA = os.path.join(CERTS_PATH, "cacert.pem")
+DEFAULT_CA_KEY = os.path.join(CERTS_PATH, "cacert.key")
+
+
+def _resolves_to_ipv6(host):
+ """ Returns True if the system resolves host to an IPv6 address by default. """
+ resolves_to_ipv6 = False
+ try:
+ for res in socket.getaddrinfo(host, None, socket.AF_UNSPEC):
+ af, _, _, _, _ = res
+ if af == socket.AF_INET6:
+ resolves_to_ipv6 = True
+ except socket.gaierror:
+ pass
+
+ return resolves_to_ipv6
+
+
+def _has_ipv6(host):
+ """ Returns True if the system can bind an IPv6 address. """
+ sock = None
+ has_ipv6 = False
+
+ if socket.has_ipv6:
+ # has_ipv6 returns true if cPython was compiled with IPv6 support.
+ # It does not tell us if the system has IPv6 support enabled. To
+ # determine that we must bind to an IPv6 address.
+ # https://github.com/urllib3/urllib3/pull/611
+ # https://bugs.python.org/issue658327
+ try:
+ sock = socket.socket(socket.AF_INET6)
+ sock.bind((host, 0))
+ has_ipv6 = _resolves_to_ipv6("localhost")
+ except Exception:
+ pass
+
+ if sock:
+ sock.close()
+ return has_ipv6
+
+
+# Some systems may have IPv6 support but DNS may not be configured
+# properly. We can not count that localhost will resolve to ::1 on all
+# systems. See https://github.com/urllib3/urllib3/pull/611 and
+# https://bugs.python.org/issue18792
+HAS_IPV6_AND_DNS = _has_ipv6("localhost")
+HAS_IPV6 = _has_ipv6("::1")
+
+
+# Different types of servers we have:
+
+
+class NoIPv6Warning(HTTPWarning):
+ "IPv6 is not available"
+ pass
+
+
+class SocketServerThread(threading.Thread):
+ """
+ :param socket_handler: Callable which receives a socket argument for one
+ request.
+ :param ready_event: Event which gets set when the socket handler is
+ ready to receive requests.
+ """
+
+ USE_IPV6 = HAS_IPV6_AND_DNS
+
+ def __init__(self, socket_handler, host="localhost", port=8081, ready_event=None):
+ threading.Thread.__init__(self)
+ self.daemon = True
+
+ self.socket_handler = socket_handler
+ self.host = host
+ self.ready_event = ready_event
+
+ def _start_server(self):
+ if self.USE_IPV6:
+ sock = socket.socket(socket.AF_INET6)
+ else:
+ warnings.warn("No IPv6 support. Falling back to IPv4.", NoIPv6Warning)
+ sock = socket.socket(socket.AF_INET)
+ if sys.platform != "win32":
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ sock.bind((self.host, 0))
+ self.port = sock.getsockname()[1]
+
+ # Once listen() returns, the server socket is ready
+ sock.listen(1)
+
+ if self.ready_event:
+ self.ready_event.set()
+
+ self.socket_handler(sock)
+ sock.close()
+
+ def run(self):
+ self.server = self._start_server()
+
+
+def run_tornado_app(app, io_loop, certs, scheme, host):
+ assert io_loop == tornado.ioloop.IOLoop.current()
+
+ # We can't use fromtimestamp(0) because of CPython issue 29097, so we'll
+ # just construct the datetime object directly.
+ app.last_req = datetime(1970, 1, 1)
+
+ if scheme == "https":
+ http_server = tornado.httpserver.HTTPServer(app, ssl_options=certs)
+ else:
+ http_server = tornado.httpserver.HTTPServer(app)
+
+ sockets = tornado.netutil.bind_sockets(None, address=host)
+ port = sockets[0].getsockname()[1]
+ http_server.add_sockets(sockets)
+ return http_server, port
+
+
+def run_loop_in_thread(io_loop):
+ t = threading.Thread(target=io_loop.start)
+ t.start()
+ return t
+
+
+def get_unreachable_address():
+ # reserved as per rfc2606
+ return ("something.invalid", 54321)
+
+
+if __name__ == "__main__":
+ # For debugging dummyserver itself - python -m dummyserver.server
+ from .testcase import TestingApp
+
+ host = "127.0.0.1"
+
+ io_loop = tornado.ioloop.IOLoop.current()
+ app = tornado.web.Application([(r".*", TestingApp)])
+ server, port = run_tornado_app(app, io_loop, None, "http", host)
+ server_thread = run_loop_in_thread(io_loop)
+
+ print("Listening on http://{host}:{port}".format(host=host, port=port))
+
+
+def encrypt_key_pem(private_key_pem, password):
+ private_key = serialization.load_pem_private_key(
+ private_key_pem.bytes(), password=None, backend=default_backend()
+ )
+ encrypted_key = private_key.private_bytes(
+ serialization.Encoding.PEM,
+ serialization.PrivateFormat.TraditionalOpenSSL,
+ serialization.BestAvailableEncryption(password),
+ )
+ return trustme.Blob(encrypted_key)
diff --git a/third_party/python/urllib3/dummyserver/testcase.py b/third_party/python/urllib3/dummyserver/testcase.py
new file mode 100644
index 0000000000..90c6b2240c
--- /dev/null
+++ b/third_party/python/urllib3/dummyserver/testcase.py
@@ -0,0 +1,210 @@
+import threading
+
+import pytest
+from tornado import ioloop, web
+
+from dummyserver.server import (
+ SocketServerThread,
+ run_tornado_app,
+ run_loop_in_thread,
+ DEFAULT_CERTS,
+ HAS_IPV6,
+)
+from dummyserver.handlers import TestingApp
+from dummyserver.proxy import ProxyHandler
+
+
+def consume_socket(sock, chunks=65536):
+ consumed = bytearray()
+ while True:
+ b = sock.recv(chunks)
+ consumed += b
+ if b.endswith(b"\r\n\r\n"):
+ break
+ return consumed
+
+
+class SocketDummyServerTestCase(object):
+ """
+ A simple socket-based server is created for this class that is good for
+ exactly one request.
+ """
+
+ scheme = "http"
+ host = "localhost"
+
+ @classmethod
+ def _start_server(cls, socket_handler):
+ ready_event = threading.Event()
+ cls.server_thread = SocketServerThread(
+ socket_handler=socket_handler, ready_event=ready_event, host=cls.host
+ )
+ cls.server_thread.start()
+ ready_event.wait(5)
+ if not ready_event.is_set():
+ raise Exception("most likely failed to start server")
+ cls.port = cls.server_thread.port
+
+ @classmethod
+ def start_response_handler(cls, response, num=1, block_send=None):
+ ready_event = threading.Event()
+
+ def socket_handler(listener):
+ for _ in range(num):
+ ready_event.set()
+
+ sock = listener.accept()[0]
+ consume_socket(sock)
+ if block_send:
+ block_send.wait()
+ block_send.clear()
+ sock.send(response)
+ sock.close()
+
+ cls._start_server(socket_handler)
+ return ready_event
+
+ @classmethod
+ def start_basic_handler(cls, **kw):
+ return cls.start_response_handler(
+ b"HTTP/1.1 200 OK\r\n" b"Content-Length: 0\r\n" b"\r\n", **kw
+ )
+
+ @classmethod
+ def teardown_class(cls):
+ if hasattr(cls, "server_thread"):
+ cls.server_thread.join(0.1)
+
+ def assert_header_received(
+ self, received_headers, header_name, expected_value=None
+ ):
+ header_name = header_name.encode("ascii")
+ if expected_value is not None:
+ expected_value = expected_value.encode("ascii")
+ header_titles = []
+ for header in received_headers:
+ key, value = header.split(b": ")
+ header_titles.append(key)
+ if key == header_name and expected_value is not None:
+ assert value == expected_value
+ assert header_name in header_titles
+
+
+class IPV4SocketDummyServerTestCase(SocketDummyServerTestCase):
+ @classmethod
+ def _start_server(cls, socket_handler):
+ ready_event = threading.Event()
+ cls.server_thread = SocketServerThread(
+ socket_handler=socket_handler, ready_event=ready_event, host=cls.host
+ )
+ cls.server_thread.USE_IPV6 = False
+ cls.server_thread.start()
+ ready_event.wait(5)
+ if not ready_event.is_set():
+ raise Exception("most likely failed to start server")
+ cls.port = cls.server_thread.port
+
+
+class HTTPDummyServerTestCase(object):
+ """ A simple HTTP server that runs when your test class runs
+
+ Have your test class inherit from this one, and then a simple server
+ will start when your tests run, and automatically shut down when they
+ complete. For examples of what test requests you can send to the server,
+ see the TestingApp in dummyserver/handlers.py.
+ """
+
+ scheme = "http"
+ host = "localhost"
+ host_alt = "127.0.0.1" # Some tests need two hosts
+ certs = DEFAULT_CERTS
+
+ @classmethod
+ def _start_server(cls):
+ cls.io_loop = ioloop.IOLoop.current()
+ app = web.Application([(r".*", TestingApp)])
+ cls.server, cls.port = run_tornado_app(
+ app, cls.io_loop, cls.certs, cls.scheme, cls.host
+ )
+ cls.server_thread = run_loop_in_thread(cls.io_loop)
+
+ @classmethod
+ def _stop_server(cls):
+ cls.io_loop.add_callback(cls.server.stop)
+ cls.io_loop.add_callback(cls.io_loop.stop)
+ cls.server_thread.join()
+
+ @classmethod
+ def setup_class(cls):
+ cls._start_server()
+
+ @classmethod
+ def teardown_class(cls):
+ cls._stop_server()
+
+
+class HTTPSDummyServerTestCase(HTTPDummyServerTestCase):
+ scheme = "https"
+ host = "localhost"
+ certs = DEFAULT_CERTS
+
+
+class HTTPDummyProxyTestCase(object):
+
+ http_host = "localhost"
+ http_host_alt = "127.0.0.1"
+
+ https_host = "localhost"
+ https_host_alt = "127.0.0.1"
+ https_certs = DEFAULT_CERTS
+
+ proxy_host = "localhost"
+ proxy_host_alt = "127.0.0.1"
+
+ @classmethod
+ def setup_class(cls):
+ cls.io_loop = ioloop.IOLoop.current()
+
+ app = web.Application([(r".*", TestingApp)])
+ cls.http_server, cls.http_port = run_tornado_app(
+ app, cls.io_loop, None, "http", cls.http_host
+ )
+
+ app = web.Application([(r".*", TestingApp)])
+ cls.https_server, cls.https_port = run_tornado_app(
+ app, cls.io_loop, cls.https_certs, "https", cls.http_host
+ )
+
+ app = web.Application([(r".*", ProxyHandler)])
+ cls.proxy_server, cls.proxy_port = run_tornado_app(
+ app, cls.io_loop, None, "http", cls.proxy_host
+ )
+
+ cls.server_thread = run_loop_in_thread(cls.io_loop)
+
+ @classmethod
+ def teardown_class(cls):
+ cls.io_loop.add_callback(cls.http_server.stop)
+ cls.io_loop.add_callback(cls.https_server.stop)
+ cls.io_loop.add_callback(cls.proxy_server.stop)
+ cls.io_loop.add_callback(cls.io_loop.stop)
+ cls.server_thread.join()
+
+
+@pytest.mark.skipif(not HAS_IPV6, reason="IPv6 not available")
+class IPv6HTTPDummyServerTestCase(HTTPDummyServerTestCase):
+ host = "::1"
+
+
+@pytest.mark.skipif(not HAS_IPV6, reason="IPv6 not available")
+class IPv6HTTPDummyProxyTestCase(HTTPDummyProxyTestCase):
+
+ http_host = "localhost"
+ http_host_alt = "127.0.0.1"
+
+ https_host = "localhost"
+ https_host_alt = "127.0.0.1"
+ https_certs = DEFAULT_CERTS
+
+ proxy_host = "::1"
+ proxy_host_alt = "127.0.0.1"
diff --git a/third_party/python/urllib3/setup.cfg b/third_party/python/urllib3/setup.cfg
new file mode 100644
index 0000000000..81b5727060
--- /dev/null
+++ b/third_party/python/urllib3/setup.cfg
@@ -0,0 +1,31 @@
+[flake8]
+ignore = E501, E203, W503, W504
+exclude = ./docs/conf.py,./src/urllib3/packages/*
+max-line-length = 99
+
+[bdist_wheel]
+universal = 1
+
+[metadata]
+license_file = LICENSE.txt
+provides-extra =
+ secure
+ socks
+ brotli
+requires-dist =
+ pyOpenSSL>=0.14; extra == 'secure'
+ cryptography>=1.3.4; extra == 'secure'
+ idna>=2.0.0; extra == 'secure'
+ certifi; extra == 'secure'
+ ipaddress; python_version=="2.7" and extra == 'secure'
+ PySocks>=1.5.6,<2.0,!=1.5.7; extra == 'socks'
+ brotlipy>=0.6.0; extra == 'brotli'
+
+[tool:pytest]
+xfail_strict = true
+python_classes = Test *TestCase
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/urllib3/setup.py b/third_party/python/urllib3/setup.py
new file mode 100755
index 0000000000..c0e1796fdc
--- /dev/null
+++ b/third_party/python/urllib3/setup.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+
+from setuptools import setup
+
+import os
+import re
+import codecs
+
+base_path = os.path.dirname(__file__)
+
+# Get the version (borrowed from SQLAlchemy)
+with open(os.path.join(base_path, "src", "urllib3", "__init__.py")) as fp:
+ VERSION = (
+ re.compile(r""".*__version__ = ["'](.*?)['"]""", re.S).match(fp.read()).group(1)
+ )
+
+
+with codecs.open("README.rst", encoding="utf-8") as fp:
+ readme = fp.read()
+
+with codecs.open("CHANGES.rst", encoding="utf-8") as fp:
+ changes = fp.read()
+
+version = VERSION
+
+setup(
+ name="urllib3",
+ version=version,
+ description="HTTP library with thread-safe connection pooling, file post, and more.",
+ long_description=u"\n\n".join([readme, changes]),
+ classifiers=[
+ "Environment :: Web Environment",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: MIT License",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Programming Language :: Python :: Implementation :: PyPy",
+ "Topic :: Internet :: WWW/HTTP",
+ "Topic :: Software Development :: Libraries",
+ ],
+ keywords="urllib httplib threadsafe filepost http https ssl pooling",
+ author="Andrey Petrov",
+ author_email="andrey.petrov@shazow.net",
+ url="https://urllib3.readthedocs.io/",
+ project_urls={
+ "Documentation": "https://urllib3.readthedocs.io/",
+ "Code": "https://github.com/urllib3/urllib3",
+ "Issue tracker": "https://github.com/urllib3/urllib3/issues",
+ },
+ license="MIT",
+ packages=[
+ "urllib3",
+ "urllib3.packages",
+ "urllib3.packages.ssl_match_hostname",
+ "urllib3.packages.backports",
+ "urllib3.contrib",
+ "urllib3.contrib._securetransport",
+ "urllib3.util",
+ ],
+ package_dir={"": "src"},
+ requires=[],
+ python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4",
+ extras_require={
+ "brotli": ["brotlipy>=0.6.0"],
+ "secure": [
+ "pyOpenSSL>=0.14",
+ "cryptography>=1.3.4",
+ "idna>=2.0.0",
+ "certifi",
+ "ipaddress; python_version=='2.7'",
+ ],
+ "socks": ["PySocks>=1.5.6,<2.0,!=1.5.7"],
+ },
+)
diff --git a/third_party/python/urllib3/src/urllib3.egg-info/PKG-INFO b/third_party/python/urllib3/src/urllib3.egg-info/PKG-INFO
new file mode 100644
index 0000000000..d95b8c2d27
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3.egg-info/PKG-INFO
@@ -0,0 +1,1253 @@
+Metadata-Version: 2.1
+Name: urllib3
+Version: 1.25.9
+Summary: HTTP library with thread-safe connection pooling, file post, and more.
+Home-page: https://urllib3.readthedocs.io/
+Author: Andrey Petrov
+Author-email: andrey.petrov@shazow.net
+License: MIT
+Project-URL: Documentation, https://urllib3.readthedocs.io/
+Project-URL: Code, https://github.com/urllib3/urllib3
+Project-URL: Issue tracker, https://github.com/urllib3/urllib3/issues
+Description: urllib3
+ =======
+
+ urllib3 is a powerful, *sanity-friendly* HTTP client for Python. Much of the
+ Python ecosystem already uses urllib3 and you should too.
+ urllib3 brings many critical features that are missing from the Python
+ standard libraries:
+
+ - Thread safety.
+ - Connection pooling.
+ - Client-side SSL/TLS verification.
+ - File uploads with multipart encoding.
+ - Helpers for retrying requests and dealing with HTTP redirects.
+ - Support for gzip, deflate, and brotli encoding.
+ - Proxy support for HTTP and SOCKS.
+ - 100% test coverage.
+
+ urllib3 is powerful and easy to use::
+
+ >>> import urllib3
+ >>> http = urllib3.PoolManager()
+ >>> r = http.request('GET', 'http://httpbin.org/robots.txt')
+ >>> r.status
+ 200
+ >>> r.data
+ 'User-agent: *\nDisallow: /deny\n'
+
+
+ Installing
+ ----------
+
+ urllib3 can be installed with `pip <https://pip.pypa.io>`_::
+
+ $ pip install urllib3
+
+ Alternatively, you can grab the latest source code from `GitHub <https://github.com/urllib3/urllib3>`_::
+
+ $ git clone git://github.com/urllib3/urllib3.git
+ $ python setup.py install
+
+
+ Documentation
+ -------------
+
+ urllib3 has usage and reference documentation at `urllib3.readthedocs.io <https://urllib3.readthedocs.io>`_.
+
+
+ Contributing
+ ------------
+
+ urllib3 happily accepts contributions. Please see our
+ `contributing documentation <https://urllib3.readthedocs.io/en/latest/contributing.html>`_
+ for some tips on getting started.
+
+
+ Security Disclosures
+ --------------------
+
+ To report a security vulnerability, please use the
+ `Tidelift security contact <https://tidelift.com/security>`_.
+ Tidelift will coordinate the fix and disclosure with maintainers.
+
+ Maintainers
+ -----------
+
+ - `@sethmlarson <https://github.com/sethmlarson>`_ (Seth M. Larson)
+ - `@pquentin <https://github.com/pquentin>`_ (Quentin Pradet)
+ - `@theacodes <https://github.com/theacodes>`_ (Thea Flowers)
+ - `@haikuginger <https://github.com/haikuginger>`_ (Jess Shapiro)
+ - `@lukasa <https://github.com/lukasa>`_ (Cory Benfield)
+ - `@sigmavirus24 <https://github.com/sigmavirus24>`_ (Ian Stapleton Cordasco)
+ - `@shazow <https://github.com/shazow>`_ (Andrey Petrov)
+
+ 👋
+
+
+ Sponsorship
+ -----------
+
+ .. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png
+ :width: 75
+ :alt: Tidelift
+
+ .. list-table::
+ :widths: 10 100
+
+ * - |tideliftlogo|
+ - Professional support for urllib3 is available as part of the `Tidelift
+ Subscription`_. Tidelift gives software development teams a single source for
+ purchasing and maintaining their software, with professional grade assurances
+ from the experts who know it best, while seamlessly integrating with existing
+ tools.
+
+ .. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme
+
+ If your company benefits from this library, please consider `sponsoring its
+ development <https://urllib3.readthedocs.io/en/latest/contributing.html#sponsorship-project-grants>`_.
+
+ Sponsors include:
+
+ - Abbott (2018-2019), sponsored `@sethmlarson <https://github.com/sethmlarson>`_'s work on urllib3.
+ - Google Cloud Platform (2018-2019), sponsored `@theacodes <https://github.com/theacodes>`_'s work on urllib3.
+ - Akamai (2017-2018), sponsored `@haikuginger <https://github.com/haikuginger>`_'s work on urllib3
+ - Hewlett Packard Enterprise (2016-2017), sponsored `@Lukasa’s <https://github.com/Lukasa>`_ work on urllib3.
+
+
+ Changes
+ =======
+
+ 1.25.9 (2020-04-16)
+ -------------------
+
+ * Added ``InvalidProxyConfigurationWarning`` which is raised when
+ erroneously specifying an HTTPS proxy URL. urllib3 doesn't currently
+ support connecting to HTTPS proxies but will soon be able to
+ and we would like users to migrate properly without much breakage.
+
+ See `this GitHub issue <https://github.com/urllib3/urllib3/issues/1850>`_
+ for more information on how to fix your proxy config. (Pull #1851)
+
+ * Drain connection after ``PoolManager`` redirect (Pull #1817)
+
+ * Ensure ``load_verify_locations`` raises ``SSLError`` for all backends (Pull #1812)
+
+ * Rename ``VerifiedHTTPSConnection`` to ``HTTPSConnection`` (Pull #1805)
+
+ * Allow the CA certificate data to be passed as a string (Pull #1804)
+
+ * Raise ``ValueError`` if method contains control characters (Pull #1800)
+
+ * Add ``__repr__`` to ``Timeout`` (Pull #1795)
+
+
+ 1.25.8 (2020-01-20)
+ -------------------
+
+ * Drop support for EOL Python 3.4 (Pull #1774)
+
+ * Optimize _encode_invalid_chars (Pull #1787)
+
+
+ 1.25.7 (2019-11-11)
+ -------------------
+
+ * Preserve ``chunked`` parameter on retries (Pull #1715, Pull #1734)
+
+ * Allow unset ``SERVER_SOFTWARE`` in App Engine (Pull #1704, Issue #1470)
+
+ * Fix issue where URL fragment was sent within the request target. (Pull #1732)
+
+ * Fix issue where an empty query section in a URL would fail to parse. (Pull #1732)
+
+ * Remove TLS 1.3 support in SecureTransport due to Apple removing support (Pull #1703)
+
+
+ 1.25.6 (2019-09-24)
+ -------------------
+
+ * Fix issue where tilde (``~``) characters were incorrectly
+ percent-encoded in the path. (Pull #1692)
+
+
+ 1.25.5 (2019-09-19)
+ -------------------
+
+ * Add mitigation for BPO-37428 affecting Python <3.7.4 and OpenSSL 1.1.1+ which
+ caused certificate verification to be enabled when using ``cert_reqs=CERT_NONE``.
+ (Issue #1682)
+
+
+ 1.25.4 (2019-09-19)
+ -------------------
+
+ * Propagate Retry-After header settings to subsequent retries. (Pull #1607)
+
+ * Fix edge case where Retry-After header was still respected even when
+ explicitly opted out of. (Pull #1607)
+
+ * Remove dependency on ``rfc3986`` for URL parsing.
+
+ * Fix issue where URLs containing invalid characters within ``Url.auth`` would
+ raise an exception instead of percent-encoding those characters.
+
+ * Add support for ``HTTPResponse.auto_close = False`` which makes HTTP responses
+ work well with BufferedReaders and other ``io`` module features. (Pull #1652)
+
+ * Percent-encode invalid characters in URL for ``HTTPConnectionPool.request()`` (Pull #1673)
+
+
+ 1.25.3 (2019-05-23)
+ -------------------
+
+ * Change ``HTTPSConnection`` to load system CA certificates
+ when ``ca_certs``, ``ca_cert_dir``, and ``ssl_context`` are
+ unspecified. (Pull #1608, Issue #1603)
+
+ * Upgrade bundled rfc3986 to v1.3.2. (Pull #1609, Issue #1605)
+
+
+ 1.25.2 (2019-04-28)
+ -------------------
+
+ * Change ``is_ipaddress`` to not detect IPvFuture addresses. (Pull #1583)
+
+ * Change ``parse_url`` to percent-encode invalid characters within the
+ path, query, and target components. (Pull #1586)
+
+
+ 1.25.1 (2019-04-24)
+ -------------------
+
+ * Add support for Google's ``Brotli`` package. (Pull #1572, Pull #1579)
+
+ * Upgrade bundled rfc3986 to v1.3.1 (Pull #1578)
+
+
+ 1.25 (2019-04-22)
+ -----------------
+
+ * Require and validate certificates by default when using HTTPS (Pull #1507)
+
+ * Upgraded ``urllib3.utils.parse_url()`` to be RFC 3986 compliant. (Pull #1487)
+
+ * Added support for ``key_password`` for ``HTTPSConnectionPool`` to use
+ encrypted ``key_file`` without creating your own ``SSLContext`` object. (Pull #1489)
+
+ * Add TLSv1.3 support to CPython, pyOpenSSL, and SecureTransport ``SSLContext``
+ implementations. (Pull #1496)
+
+ * Switched the default multipart header encoder from RFC 2231 to HTML 5 working draft. (Issue #303, PR #1492)
+
+ * Fixed issue where OpenSSL would block if an encrypted client private key was
+ given and no password was given. Instead an ``SSLError`` is raised. (Pull #1489)
+
+ * Added support for Brotli content encoding. It is enabled automatically if
+ ``brotlipy`` package is installed which can be requested with
+ ``urllib3[brotli]`` extra. (Pull #1532)
+
+ * Drop ciphers using DSS key exchange from default TLS cipher suites.
+ Improve default ciphers when using SecureTransport. (Pull #1496)
+
+ * Implemented a more efficient ``HTTPResponse.__iter__()`` method. (Issue #1483)
+
+ 1.24.3 (2019-05-01)
+ -------------------
+
+ * Apply fix for CVE-2019-9740. (Pull #1591)
+
+ 1.24.2 (2019-04-17)
+ -------------------
+
+ * Don't load system certificates by default when any other ``ca_certs``, ``ca_certs_dir`` or
+ ``ssl_context`` parameters are specified.
+
+ * Remove Authorization header regardless of case when redirecting to cross-site. (Issue #1510)
+
+ * Add support for IPv6 addresses in subjectAltName section of certificates. (Issue #1269)
+
+
+ 1.24.1 (2018-11-02)
+ -------------------
+
+ * Remove quadratic behavior within ``GzipDecoder.decompress()`` (Issue #1467)
+
+ * Restored functionality of ``ciphers`` parameter for ``create_urllib3_context()``. (Issue #1462)
+
+
+ 1.24 (2018-10-16)
+ -----------------
+
+ * Allow key_server_hostname to be specified when initializing a PoolManager to allow custom SNI to be overridden. (Pull #1449)
+
+ * Test against Python 3.7 on AppVeyor. (Pull #1453)
+
+ * Early-out ipv6 checks when running on App Engine. (Pull #1450)
+
+ * Change ambiguous description of backoff_factor (Pull #1436)
+
+ * Add ability to handle multiple Content-Encodings (Issue #1441 and Pull #1442)
+
+ * Skip DNS names that can't be idna-decoded when using pyOpenSSL (Issue #1405).
+
+ * Add a server_hostname parameter to HTTPSConnection which allows for
+ overriding the SNI hostname sent in the handshake. (Pull #1397)
+
+ * Drop support for EOL Python 2.6 (Pull #1429 and Pull #1430)
+
+ * Fixed bug where responses with header Content-Type: message/* erroneously
+ raised HeaderParsingError, resulting in a warning being logged. (Pull #1439)
+
+ * Move urllib3 to src/urllib3 (Pull #1409)
+
+
+ 1.23 (2018-06-04)
+ -----------------
+
+ * Allow providing a list of headers to strip from requests when redirecting
+ to a different host. Defaults to the ``Authorization`` header. Different
+ headers can be set via ``Retry.remove_headers_on_redirect``. (Issue #1316)
+
+ * Fix ``util.selectors._fileobj_to_fd`` to accept ``long`` (Issue #1247).
+
+ * Dropped Python 3.3 support. (Pull #1242)
+
+ * Put the connection back in the pool when calling stream() or read_chunked() on
+ a chunked HEAD response. (Issue #1234)
+
+ * Fixed pyOpenSSL-specific ssl client authentication issue when clients
+ attempted to auth via certificate + chain (Issue #1060)
+
+ * Add the port to the connectionpool connect print (Pull #1251)
+
+ * Don't use the ``uuid`` module to create multipart data boundaries. (Pull #1380)
+
+ * ``read_chunked()`` on a closed response returns no chunks. (Issue #1088)
+
+ * Add Python 2.6 support to ``contrib.securetransport`` (Pull #1359)
+
+ * Added support for auth info in url for SOCKS proxy (Pull #1363)
+
+
+ 1.22 (2017-07-20)
+ -----------------
+
+ * Fixed missing brackets in ``HTTP CONNECT`` when connecting to IPv6 address via
+ IPv6 proxy. (Issue #1222)
+
+ * Made the connection pool retry on ``SSLError``. The original ``SSLError``
+ is available on ``MaxRetryError.reason``. (Issue #1112)
+
+ * Drain and release connection before recursing on retry/redirect. Fixes
+ deadlocks with a blocking connectionpool. (Issue #1167)
+
+ * Fixed compatibility for cookiejar. (Issue #1229)
+
+ * pyopenssl: Use vendored version of ``six``. (Issue #1231)
+
+
+ 1.21.1 (2017-05-02)
+ -------------------
+
+ * Fixed SecureTransport issue that would cause long delays in response body
+ delivery. (Pull #1154)
+
+ * Fixed regression in 1.21 that threw exceptions when users passed the
+ ``socket_options`` flag to the ``PoolManager``. (Issue #1165)
+
+ * Fixed regression in 1.21 that threw exceptions when users passed the
+ ``assert_hostname`` or ``assert_fingerprint`` flag to the ``PoolManager``.
+ (Pull #1157)
+
+
+ 1.21 (2017-04-25)
+ -----------------
+
+ * Improved performance of certain selector system calls on Python 3.5 and
+ later. (Pull #1095)
+
+ * Resolved issue where the PyOpenSSL backend would not wrap SysCallError
+ exceptions appropriately when sending data. (Pull #1125)
+
+ * Selectors now detects a monkey-patched select module after import for modules
+ that patch the select module like eventlet, greenlet. (Pull #1128)
+
+ * Reduced memory consumption when streaming zlib-compressed responses
+ (as opposed to raw deflate streams). (Pull #1129)
+
+ * Connection pools now use the entire request context when constructing the
+ pool key. (Pull #1016)
+
+ * ``PoolManager.connection_from_*`` methods now accept a new keyword argument,
+ ``pool_kwargs``, which are merged with the existing ``connection_pool_kw``.
+ (Pull #1016)
+
+ * Add retry counter for ``status_forcelist``. (Issue #1147)
+
+ * Added ``contrib`` module for using SecureTransport on macOS:
+ ``urllib3.contrib.securetransport``. (Pull #1122)
+
+ * urllib3 now only normalizes the case of ``http://`` and ``https://`` schemes:
+ for schemes it does not recognise, it assumes they are case-sensitive and
+ leaves them unchanged.
+ (Issue #1080)
+
+
+ 1.20 (2017-01-19)
+ -----------------
+
+ * Added support for waiting for I/O using selectors other than select,
+ improving urllib3's behaviour with large numbers of concurrent connections.
+ (Pull #1001)
+
+ * Updated the date for the system clock check. (Issue #1005)
+
+ * ConnectionPools now correctly consider hostnames to be case-insensitive.
+ (Issue #1032)
+
+ * Outdated versions of PyOpenSSL now cause the PyOpenSSL contrib module
+ to fail when it is injected, rather than at first use. (Pull #1063)
+
+ * Outdated versions of cryptography now cause the PyOpenSSL contrib module
+ to fail when it is injected, rather than at first use. (Issue #1044)
+
+ * Automatically attempt to rewind a file-like body object when a request is
+ retried or redirected. (Pull #1039)
+
+ * Fix some bugs that occur when modules incautiously patch the queue module.
+ (Pull #1061)
+
+ * Prevent retries from occurring on read timeouts for which the request method
+ was not in the method whitelist. (Issue #1059)
+
+ * Changed the PyOpenSSL contrib module to lazily load idna to avoid
+ unnecessarily bloating the memory of programs that don't need it. (Pull
+ #1076)
+
+ * Add support for IPv6 literals with zone identifiers. (Pull #1013)
+
+ * Added support for socks5h:// and socks4a:// schemes when working with SOCKS
+ proxies, and controlled remote DNS appropriately. (Issue #1035)
+
+
+ 1.19.1 (2016-11-16)
+ -------------------
+
+ * Fixed AppEngine import that didn't function on Python 3.5. (Pull #1025)
+
+
+ 1.19 (2016-11-03)
+ -----------------
+
+ * urllib3 now respects Retry-After headers on 413, 429, and 503 responses when
+ using the default retry logic. (Pull #955)
+
+ * Remove markers from setup.py to assist ancient setuptools versions. (Issue
+ #986)
+
+ * Disallow superscripts and other integerish things in URL ports. (Issue #989)
+
+ * Allow urllib3's HTTPResponse.stream() method to continue to work with
+ non-httplib underlying FPs. (Pull #990)
+
+ * Empty filenames in multipart headers are now emitted as such, rather than
+ being suppressed. (Issue #1015)
+
+ * Prefer user-supplied Host headers on chunked uploads. (Issue #1009)
+
+
+ 1.18.1 (2016-10-27)
+ -------------------
+
+ * CVE-2016-9015. Users who are using urllib3 version 1.17 or 1.18 along with
+ PyOpenSSL injection and OpenSSL 1.1.0 *must* upgrade to this version. This
+ release fixes a vulnerability whereby urllib3 in the above configuration
+ would silently fail to validate TLS certificates due to erroneously setting
+ invalid flags in OpenSSL's ``SSL_CTX_set_verify`` function. These erroneous
+ flags do not cause a problem in OpenSSL versions before 1.1.0, which
+ interprets the presence of any flag as requesting certificate validation.
+
+ There is no PR for this patch, as it was prepared for simultaneous disclosure
+ and release. The master branch received the same fix in PR #1010.
+
+
+ 1.18 (2016-09-26)
+ -----------------
+
+ * Fixed incorrect message for IncompleteRead exception. (PR #973)
+
+ * Accept ``iPAddress`` subject alternative name fields in TLS certificates.
+ (Issue #258)
+
+ * Fixed consistency of ``HTTPResponse.closed`` between Python 2 and 3.
+ (Issue #977)
+
+ * Fixed handling of wildcard certificates when using PyOpenSSL. (Issue #979)
+
+
+ 1.17 (2016-09-06)
+ -----------------
+
+ * Accept ``SSLContext`` objects for use in SSL/TLS negotiation. (Issue #835)
+
+ * ConnectionPool debug log now includes scheme, host, and port. (Issue #897)
+
+ * Substantially refactored documentation. (Issue #887)
+
+ * Used URLFetch default timeout on AppEngine, rather than hardcoding our own.
+ (Issue #858)
+
+ * Normalize the scheme and host in the URL parser (Issue #833)
+
+ * ``HTTPResponse`` contains the last ``Retry`` object, which now also
+ contains retries history. (Issue #848)
+
+ * Timeout can no longer be set as boolean, and must be greater than zero.
+ (PR #924)
+
+ * Removed pyasn1 and ndg-httpsclient from dependencies used for PyOpenSSL. We
+ now use cryptography and idna, both of which are already dependencies of
+ PyOpenSSL. (PR #930)
+
+ * Fixed infinite loop in ``stream`` when amt=None. (Issue #928)
+
+ * Try to use the operating system's certificates when we are using an
+ ``SSLContext``. (PR #941)
+
+ * Updated cipher suite list to allow ChaCha20+Poly1305. AES-GCM is preferred to
+ ChaCha20, but ChaCha20 is then preferred to everything else. (PR #947)
+
+ * Updated cipher suite list to remove 3DES-based cipher suites. (PR #958)
+
+ * Removed the cipher suite fallback to allow HIGH ciphers. (PR #958)
+
+ * Implemented ``length_remaining`` to determine remaining content
+ to be read. (PR #949)
+
+ * Implemented ``enforce_content_length`` to enable exceptions when
+ incomplete data chunks are received. (PR #949)
+
+ * Dropped connection start, dropped connection reset, redirect, forced retry,
+ and new HTTPS connection log levels to DEBUG, from INFO. (PR #967)
+
+
+ 1.16 (2016-06-11)
+ -----------------
+
+ * Disable IPv6 DNS when IPv6 connections are not possible. (Issue #840)
+
+ * Provide ``key_fn_by_scheme`` pool keying mechanism that can be
+ overridden. (Issue #830)
+
+ * Normalize scheme and host to lowercase for pool keys, and include
+ ``source_address``. (Issue #830)
+
+ * Cleaner exception chain in Python 3 for ``_make_request``.
+ (Issue #861)
+
+ * Fixed installing ``urllib3[socks]`` extra. (Issue #864)
+
+ * Fixed signature of ``ConnectionPool.close`` so it can actually safely be
+ called by subclasses. (Issue #873)
+
+ * Retain ``release_conn`` state across retries. (Issues #651, #866)
+
+ * Add customizable ``HTTPConnectionPool.ResponseCls``, which defaults to
+ ``HTTPResponse`` but can be replaced with a subclass. (Issue #879)
+
+
+ 1.15.1 (2016-04-11)
+ -------------------
+
+ * Fix packaging to include backports module. (Issue #841)
+
+
+ 1.15 (2016-04-06)
+ -----------------
+
+ * Added Retry(raise_on_status=False). (Issue #720)
+
+ * Always use setuptools, no more distutils fallback. (Issue #785)
+
+ * Dropped support for Python 3.2. (Issue #786)
+
+ * Chunked transfer encoding when requesting with ``chunked=True``.
+ (Issue #790)
+
+ * Fixed regression with IPv6 port parsing. (Issue #801)
+
+ * Append SNIMissingWarning messages to allow users to specify it in
+ the PYTHONWARNINGS environment variable. (Issue #816)
+
+ * Handle unicode headers in Py2. (Issue #818)
+
+ * Log certificate when there is a hostname mismatch. (Issue #820)
+
+ * Preserve order of request/response headers. (Issue #821)
+
+
+ 1.14 (2015-12-29)
+ -----------------
+
+ * contrib: SOCKS proxy support! (Issue #762)
+
+ * Fixed AppEngine handling of transfer-encoding header and bug
+ in Timeout defaults checking. (Issue #763)
+
+
+ 1.13.1 (2015-12-18)
+ -------------------
+
+ * Fixed regression in IPv6 + SSL for match_hostname. (Issue #761)
+
+
+ 1.13 (2015-12-14)
+ -----------------
+
+ * Fixed ``pip install urllib3[secure]`` on modern pip. (Issue #706)
+
+ * pyopenssl: Fixed SSL3_WRITE_PENDING error. (Issue #717)
+
+ * pyopenssl: Support for TLSv1.1 and TLSv1.2. (Issue #696)
+
+ * Close connections more defensively on exception. (Issue #734)
+
+ * Adjusted ``read_chunked`` to handle gzipped, chunk-encoded bodies without
+ repeatedly flushing the decoder, to function better on Jython. (Issue #743)
+
+ * Accept ``ca_cert_dir`` for SSL-related PoolManager configuration. (Issue #758)
+
+
+ 1.12 (2015-09-03)
+ -----------------
+
+ * Rely on ``six`` for importing ``httplib`` to work around
+ conflicts with other Python 3 shims. (Issue #688)
+
+ * Add support for directories of certificate authorities, as supported by
+ OpenSSL. (Issue #701)
+
+ * New exception: ``NewConnectionError``, raised when we fail to establish
+ a new connection, usually ``ECONNREFUSED`` socket error.
+
+
+ 1.11 (2015-07-21)
+ -----------------
+
+ * When ``ca_certs`` is given, ``cert_reqs`` defaults to
+ ``'CERT_REQUIRED'``. (Issue #650)
+
+ * ``pip install urllib3[secure]`` will install Certifi and
+ PyOpenSSL as dependencies. (Issue #678)
+
+ * Made ``HTTPHeaderDict`` usable as a ``headers`` input value
+ (Issues #632, #679)
+
+ * Added `urllib3.contrib.appengine <https://urllib3.readthedocs.io/en/latest/contrib.html#google-app-engine>`_
+ which has an ``AppEngineManager`` for using ``URLFetch`` in a
+ Google AppEngine environment. (Issue #664)
+
+ * Dev: Added test suite for AppEngine. (Issue #631)
+
+ * Fix performance regression when using PyOpenSSL. (Issue #626)
+
+ * Passing incorrect scheme (e.g. ``foo://``) will raise
+ ``ValueError`` instead of ``AssertionError`` (backwards
+ compatible for now, but please migrate). (Issue #640)
+
+ * Fix pools not getting replenished when an error occurs during a
+ request using ``release_conn=False``. (Issue #644)
+
+ * Fix pool-default headers not applying for url-encoded requests
+ like GET. (Issue #657)
+
+ * log.warning in Python 3 when headers are skipped due to parsing
+ errors. (Issue #642)
+
+ * Close and discard connections if an error occurs during read.
+ (Issue #660)
+
+ * Fix host parsing for IPv6 proxies. (Issue #668)
+
+ * Separate warning type SubjectAltNameWarning, now issued once
+ per host. (Issue #671)
+
+ * Fix ``httplib.IncompleteRead`` not getting converted to
+ ``ProtocolError`` when using ``HTTPResponse.stream()``
+ (Issue #674)
+
+ 1.10.4 (2015-05-03)
+ -------------------
+
+ * Migrate tests to Tornado 4. (Issue #594)
+
+ * Append default warning configuration rather than overwrite.
+ (Issue #603)
+
+ * Fix streaming decoding regression. (Issue #595)
+
+ * Fix chunked requests losing state across keep-alive connections.
+ (Issue #599)
+
+ * Fix hanging when chunked HEAD response has no body. (Issue #605)
+
+
+ 1.10.3 (2015-04-21)
+ -------------------
+
+ * Emit ``InsecurePlatformWarning`` when SSLContext object is missing.
+ (Issue #558)
+
+ * Fix regression of duplicate header keys being discarded.
+ (Issue #563)
+
+ * ``Response.stream()`` returns a generator for chunked responses.
+ (Issue #560)
+
+ * Set upper-bound timeout when waiting for a socket in PyOpenSSL.
+ (Issue #585)
+
+ * Work on platforms without `ssl` module for plain HTTP requests.
+ (Issue #587)
+
+ * Stop relying on the stdlib's default cipher list. (Issue #588)
+
+
+ 1.10.2 (2015-02-25)
+ -------------------
+
+ * Fix file descriptor leakage on retries. (Issue #548)
+
+ * Removed RC4 from default cipher list. (Issue #551)
+
+ * Header performance improvements. (Issue #544)
+
+ * Fix PoolManager not obeying redirect retry settings. (Issue #553)
+
+
+ 1.10.1 (2015-02-10)
+ -------------------
+
+ * Pools can be used as context managers. (Issue #545)
+
+ * Don't re-use connections which experienced an SSLError. (Issue #529)
+
+ * Don't fail when gzip decoding an empty stream. (Issue #535)
+
+ * Add sha256 support for fingerprint verification. (Issue #540)
+
+ * Fixed handling of header values containing commas. (Issue #533)
+
+
+ 1.10 (2014-12-14)
+ -----------------
+
+ * Disabled SSLv3. (Issue #473)
+
+ * Add ``Url.url`` property to return the composed url string. (Issue #394)
+
+ * Fixed PyOpenSSL + gevent ``WantWriteError``. (Issue #412)
+
+ * ``MaxRetryError.reason`` will always be an exception, not string.
+ (Issue #481)
+
+ * Fixed SSL-related timeouts not being detected as timeouts. (Issue #492)
+
+ * Py3: Use ``ssl.create_default_context()`` when available. (Issue #473)
+
+ * Emit ``InsecureRequestWarning`` for *every* insecure HTTPS request.
+ (Issue #496)
+
+ * Emit ``SecurityWarning`` when certificate has no ``subjectAltName``.
+ (Issue #499)
+
+ * Close and discard sockets which experienced SSL-related errors.
+ (Issue #501)
+
+ * Handle ``body`` param in ``.request(...)``. (Issue #513)
+
+ * Respect timeout with HTTPS proxy. (Issue #505)
+
+ * PyOpenSSL: Handle ZeroReturnError exception. (Issue #520)
+
+
+ 1.9.1 (2014-09-13)
+ ------------------
+
+ * Apply socket arguments before binding. (Issue #427)
+
+ * More careful checks if fp-like object is closed. (Issue #435)
+
+ * Fixed packaging issues of some development-related files not
+ getting included. (Issue #440)
+
+ * Allow performing *only* fingerprint verification. (Issue #444)
+
+ * Emit ``SecurityWarning`` if system clock is waaay off. (Issue #445)
+
+ * Fixed PyOpenSSL compatibility with PyPy. (Issue #450)
+
+ * Fixed ``BrokenPipeError`` and ``ConnectionError`` handling in Py3.
+ (Issue #443)
+
+
+
+ 1.9 (2014-07-04)
+ ----------------
+
+ * Shuffled around development-related files. If you're maintaining a distro
+ package of urllib3, you may need to tweak things. (Issue #415)
+
+ * Unverified HTTPS requests will trigger a warning on the first request. See
+ our new `security documentation
+ <https://urllib3.readthedocs.io/en/latest/security.html>`_ for details.
+ (Issue #426)
+
+ * New retry logic and ``urllib3.util.retry.Retry`` configuration object.
+ (Issue #326)
+
+ * All raised exceptions should now wrapped in a
+ ``urllib3.exceptions.HTTPException``-extending exception. (Issue #326)
+
+ * All errors during a retry-enabled request should be wrapped in
+ ``urllib3.exceptions.MaxRetryError``, including timeout-related exceptions
+ which were previously exempt. Underlying error is accessible from the
+ ``.reason`` property. (Issue #326)
+
+ * ``urllib3.exceptions.ConnectionError`` renamed to
+ ``urllib3.exceptions.ProtocolError``. (Issue #326)
+
+ * Errors during response read (such as IncompleteRead) are now wrapped in
+ ``urllib3.exceptions.ProtocolError``. (Issue #418)
+
+ * Requesting an empty host will raise ``urllib3.exceptions.LocationValueError``.
+ (Issue #417)
+
+ * Catch read timeouts over SSL connections as
+ ``urllib3.exceptions.ReadTimeoutError``. (Issue #419)
+
+ * Apply socket arguments before connecting. (Issue #427)
+
+
+ 1.8.3 (2014-06-23)
+ ------------------
+
+ * Fix TLS verification when using a proxy in Python 3.4.1. (Issue #385)
+
+ * Add ``disable_cache`` option to ``urllib3.util.make_headers``. (Issue #393)
+
+ * Wrap ``socket.timeout`` exception with
+ ``urllib3.exceptions.ReadTimeoutError``. (Issue #399)
+
+ * Fixed proxy-related bug where connections were being reused incorrectly.
+ (Issues #366, #369)
+
+ * Added ``socket_options`` keyword parameter which allows to define
+ ``setsockopt`` configuration of new sockets. (Issue #397)
+
+ * Removed ``HTTPConnection.tcp_nodelay`` in favor of
+ ``HTTPConnection.default_socket_options``. (Issue #397)
+
+ * Fixed ``TypeError`` bug in Python 2.6.4. (Issue #411)
+
+
+ 1.8.2 (2014-04-17)
+ ------------------
+
+ * Fix ``urllib3.util`` not being included in the package.
+
+
+ 1.8.1 (2014-04-17)
+ ------------------
+
+ * Fix AppEngine bug of HTTPS requests going out as HTTP. (Issue #356)
+
+ * Don't install ``dummyserver`` into ``site-packages`` as it's only needed
+ for the test suite. (Issue #362)
+
+ * Added support for specifying ``source_address``. (Issue #352)
+
+
+ 1.8 (2014-03-04)
+ ----------------
+
+ * Improved url parsing in ``urllib3.util.parse_url`` (properly parse '@' in
+ username, and blank ports like 'hostname:').
+
+ * New ``urllib3.connection`` module which contains all the HTTPConnection
+ objects.
+
+ * Several ``urllib3.util.Timeout``-related fixes. Also changed constructor
+ signature to a more sensible order. [Backwards incompatible]
+ (Issues #252, #262, #263)
+
+ * Use ``backports.ssl_match_hostname`` if it's installed. (Issue #274)
+
+ * Added ``.tell()`` method to ``urllib3.response.HTTPResponse`` which
+ returns the number of bytes read so far. (Issue #277)
+
+ * Support for platforms without threading. (Issue #289)
+
+ * Expand default-port comparison in ``HTTPConnectionPool.is_same_host``
+ to allow a pool with no specified port to be considered equal to to an
+ HTTP/HTTPS url with port 80/443 explicitly provided. (Issue #305)
+
+ * Improved default SSL/TLS settings to avoid vulnerabilities.
+ (Issue #309)
+
+ * Fixed ``urllib3.poolmanager.ProxyManager`` not retrying on connect errors.
+ (Issue #310)
+
+ * Disable Nagle's Algorithm on the socket for non-proxies. A subset of requests
+ will send the entire HTTP request ~200 milliseconds faster; however, some of
+ the resulting TCP packets will be smaller. (Issue #254)
+
+ * Increased maximum number of SubjectAltNames in ``urllib3.contrib.pyopenssl``
+ from the default 64 to 1024 in a single certificate. (Issue #318)
+
+ * Headers are now passed and stored as a custom
+ ``urllib3.collections_.HTTPHeaderDict`` object rather than a plain ``dict``.
+ (Issue #329, #333)
+
+ * Headers no longer lose their case on Python 3. (Issue #236)
+
+ * ``urllib3.contrib.pyopenssl`` now uses the operating system's default CA
+ certificates on inject. (Issue #332)
+
+ * Requests with ``retries=False`` will immediately raise any exceptions without
+ wrapping them in ``MaxRetryError``. (Issue #348)
+
+ * Fixed open socket leak with SSL-related failures. (Issue #344, #348)
+
+
+ 1.7.1 (2013-09-25)
+ ------------------
+
+ * Added granular timeout support with new ``urllib3.util.Timeout`` class.
+ (Issue #231)
+
+ * Fixed Python 3.4 support. (Issue #238)
+
+
+ 1.7 (2013-08-14)
+ ----------------
+
+ * More exceptions are now pickle-able, with tests. (Issue #174)
+
+ * Fixed redirecting with relative URLs in Location header. (Issue #178)
+
+ * Support for relative urls in ``Location: ...`` header. (Issue #179)
+
+ * ``urllib3.response.HTTPResponse`` now inherits from ``io.IOBase`` for bonus
+ file-like functionality. (Issue #187)
+
+ * Passing ``assert_hostname=False`` when creating a HTTPSConnectionPool will
+ skip hostname verification for SSL connections. (Issue #194)
+
+ * New method ``urllib3.response.HTTPResponse.stream(...)`` which acts as a
+ generator wrapped around ``.read(...)``. (Issue #198)
+
+ * IPv6 url parsing enforces brackets around the hostname. (Issue #199)
+
+ * Fixed thread race condition in
+ ``urllib3.poolmanager.PoolManager.connection_from_host(...)`` (Issue #204)
+
+ * ``ProxyManager`` requests now include non-default port in ``Host: ...``
+ header. (Issue #217)
+
+ * Added HTTPS proxy support in ``ProxyManager``. (Issue #170 #139)
+
+ * New ``RequestField`` object can be passed to the ``fields=...`` param which
+ can specify headers. (Issue #220)
+
+ * Raise ``urllib3.exceptions.ProxyError`` when connecting to proxy fails.
+ (Issue #221)
+
+ * Use international headers when posting file names. (Issue #119)
+
+ * Improved IPv6 support. (Issue #203)
+
+
+ 1.6 (2013-04-25)
+ ----------------
+
+ * Contrib: Optional SNI support for Py2 using PyOpenSSL. (Issue #156)
+
+ * ``ProxyManager`` automatically adds ``Host: ...`` header if not given.
+
+ * Improved SSL-related code. ``cert_req`` now optionally takes a string like
+ "REQUIRED" or "NONE". Same with ``ssl_version`` takes strings like "SSLv23"
+ The string values reflect the suffix of the respective constant variable.
+ (Issue #130)
+
+ * Vendored ``socksipy`` now based on Anorov's fork which handles unexpectedly
+ closed proxy connections and larger read buffers. (Issue #135)
+
+ * Ensure the connection is closed if no data is received, fixes connection leak
+ on some platforms. (Issue #133)
+
+ * Added SNI support for SSL/TLS connections on Py32+. (Issue #89)
+
+ * Tests fixed to be compatible with Py26 again. (Issue #125)
+
+ * Added ability to choose SSL version by passing an ``ssl.PROTOCOL_*`` constant
+ to the ``ssl_version`` parameter of ``HTTPSConnectionPool``. (Issue #109)
+
+ * Allow an explicit content type to be specified when encoding file fields.
+ (Issue #126)
+
+ * Exceptions are now pickleable, with tests. (Issue #101)
+
+ * Fixed default headers not getting passed in some cases. (Issue #99)
+
+ * Treat "content-encoding" header value as case-insensitive, per RFC 2616
+ Section 3.5. (Issue #110)
+
+ * "Connection Refused" SocketErrors will get retried rather than raised.
+ (Issue #92)
+
+ * Updated vendored ``six``, no longer overrides the global ``six`` module
+ namespace. (Issue #113)
+
+ * ``urllib3.exceptions.MaxRetryError`` contains a ``reason`` property holding
+ the exception that prompted the final retry. If ``reason is None`` then it
+ was due to a redirect. (Issue #92, #114)
+
+ * Fixed ``PoolManager.urlopen()`` from not redirecting more than once.
+ (Issue #149)
+
+ * Don't assume ``Content-Type: text/plain`` for multi-part encoding parameters
+ that are not files. (Issue #111)
+
+ * Pass `strict` param down to ``httplib.HTTPConnection``. (Issue #122)
+
+ * Added mechanism to verify SSL certificates by fingerprint (md5, sha1) or
+ against an arbitrary hostname (when connecting by IP or for misconfigured
+ servers). (Issue #140)
+
+ * Streaming decompression support. (Issue #159)
+
+
+ 1.5 (2012-08-02)
+ ----------------
+
+ * Added ``urllib3.add_stderr_logger()`` for quickly enabling STDERR debug
+ logging in urllib3.
+
+ * Native full URL parsing (including auth, path, query, fragment) available in
+ ``urllib3.util.parse_url(url)``.
+
+ * Built-in redirect will switch method to 'GET' if status code is 303.
+ (Issue #11)
+
+ * ``urllib3.PoolManager`` strips the scheme and host before sending the request
+ uri. (Issue #8)
+
+ * New ``urllib3.exceptions.DecodeError`` exception for when automatic decoding,
+ based on the Content-Type header, fails.
+
+ * Fixed bug with pool depletion and leaking connections (Issue #76). Added
+ explicit connection closing on pool eviction. Added
+ ``urllib3.PoolManager.clear()``.
+
+ * 99% -> 100% unit test coverage.
+
+
+ 1.4 (2012-06-16)
+ ----------------
+
+ * Minor AppEngine-related fixes.
+
+ * Switched from ``mimetools.choose_boundary`` to ``uuid.uuid4()``.
+
+ * Improved url parsing. (Issue #73)
+
+ * IPv6 url support. (Issue #72)
+
+
+ 1.3 (2012-03-25)
+ ----------------
+
+ * Removed pre-1.0 deprecated API.
+
+ * Refactored helpers into a ``urllib3.util`` submodule.
+
+ * Fixed multipart encoding to support list-of-tuples for keys with multiple
+ values. (Issue #48)
+
+ * Fixed multiple Set-Cookie headers in response not getting merged properly in
+ Python 3. (Issue #53)
+
+ * AppEngine support with Py27. (Issue #61)
+
+ * Minor ``encode_multipart_formdata`` fixes related to Python 3 strings vs
+ bytes.
+
+
+ 1.2.2 (2012-02-06)
+ ------------------
+
+ * Fixed packaging bug of not shipping ``test-requirements.txt``. (Issue #47)
+
+
+ 1.2.1 (2012-02-05)
+ ------------------
+
+ * Fixed another bug related to when ``ssl`` module is not available. (Issue #41)
+
+ * Location parsing errors now raise ``urllib3.exceptions.LocationParseError``
+ which inherits from ``ValueError``.
+
+
+ 1.2 (2012-01-29)
+ ----------------
+
+ * Added Python 3 support (tested on 3.2.2)
+
+ * Dropped Python 2.5 support (tested on 2.6.7, 2.7.2)
+
+ * Use ``select.poll`` instead of ``select.select`` for platforms that support
+ it.
+
+ * Use ``Queue.LifoQueue`` instead of ``Queue.Queue`` for more aggressive
+ connection reusing. Configurable by overriding ``ConnectionPool.QueueCls``.
+
+ * Fixed ``ImportError`` during install when ``ssl`` module is not available.
+ (Issue #41)
+
+ * Fixed ``PoolManager`` redirects between schemes (such as HTTP -> HTTPS) not
+ completing properly. (Issue #28, uncovered by Issue #10 in v1.1)
+
+ * Ported ``dummyserver`` to use ``tornado`` instead of ``webob`` +
+ ``eventlet``. Removed extraneous unsupported dummyserver testing backends.
+ Added socket-level tests.
+
+ * More tests. Achievement Unlocked: 99% Coverage.
+
+
+ 1.1 (2012-01-07)
+ ----------------
+
+ * Refactored ``dummyserver`` to its own root namespace module (used for
+ testing).
+
+ * Added hostname verification for ``VerifiedHTTPSConnection`` by vendoring in
+ Py32's ``ssl_match_hostname``. (Issue #25)
+
+ * Fixed cross-host HTTP redirects when using ``PoolManager``. (Issue #10)
+
+ * Fixed ``decode_content`` being ignored when set through ``urlopen``. (Issue
+ #27)
+
+ * Fixed timeout-related bugs. (Issues #17, #23)
+
+
+ 1.0.2 (2011-11-04)
+ ------------------
+
+ * Fixed typo in ``VerifiedHTTPSConnection`` which would only present as a bug if
+ you're using the object manually. (Thanks pyos)
+
+ * Made RecentlyUsedContainer (and consequently PoolManager) more thread-safe by
+ wrapping the access log in a mutex. (Thanks @christer)
+
+ * Made RecentlyUsedContainer more dict-like (corrected ``__delitem__`` and
+ ``__getitem__`` behaviour), with tests. Shouldn't affect core urllib3 code.
+
+
+ 1.0.1 (2011-10-10)
+ ------------------
+
+ * Fixed a bug where the same connection would get returned into the pool twice,
+ causing extraneous "HttpConnectionPool is full" log warnings.
+
+
+ 1.0 (2011-10-08)
+ ----------------
+
+ * Added ``PoolManager`` with LRU expiration of connections (tested and
+ documented).
+ * Added ``ProxyManager`` (needs tests, docs, and confirmation that it works
+ with HTTPS proxies).
+ * Added optional partial-read support for responses when
+ ``preload_content=False``. You can now make requests and just read the headers
+ without loading the content.
+ * Made response decoding optional (default on, same as before).
+ * Added optional explicit boundary string for ``encode_multipart_formdata``.
+ * Convenience request methods are now inherited from ``RequestMethods``. Old
+ helpers like ``get_url`` and ``post_url`` should be abandoned in favour of
+ the new ``request(method, url, ...)``.
+ * Refactored code to be even more decoupled, reusable, and extendable.
+ * License header added to ``.py`` files.
+ * Embiggened the documentation: Lots of Sphinx-friendly docstrings in the code
+ and docs in ``docs/`` and on https://urllib3.readthedocs.io/.
+ * Embettered all the things!
+ * Started writing this file.
+
+
+ 0.4.1 (2011-07-17)
+ ------------------
+
+ * Minor bug fixes, code cleanup.
+
+
+ 0.4 (2011-03-01)
+ ----------------
+
+ * Better unicode support.
+ * Added ``VerifiedHTTPSConnection``.
+ * Added ``NTLMConnectionPool`` in contrib.
+ * Minor improvements.
+
+
+ 0.3.1 (2010-07-13)
+ ------------------
+
+ * Added ``assert_host_name`` optional parameter. Now compatible with proxies.
+
+
+ 0.3 (2009-12-10)
+ ----------------
+
+ * Added HTTPS support.
+ * Minor bug fixes.
+ * Refactored, broken backwards compatibility with 0.2.
+ * API to be treated as stable from this version forward.
+
+
+ 0.2 (2008-11-17)
+ ----------------
+
+ * Added unit tests.
+ * Bug fixes.
+
+
+ 0.1 (2008-11-16)
+ ----------------
+
+ * First release.
+
+Keywords: urllib httplib threadsafe filepost http https ssl pooling
+Platform: UNKNOWN
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Software Development :: Libraries
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4
+Provides-Extra: brotli
+Provides-Extra: secure
+Provides-Extra: socks
diff --git a/third_party/python/urllib3/src/urllib3.egg-info/SOURCES.txt b/third_party/python/urllib3/src/urllib3.egg-info/SOURCES.txt
new file mode 100644
index 0000000000..78a91adb8c
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3.egg-info/SOURCES.txt
@@ -0,0 +1,115 @@
+CHANGES.rst
+CONTRIBUTORS.txt
+LICENSE.txt
+MANIFEST.in
+README.rst
+dev-requirements.txt
+setup.cfg
+setup.py
+docs/Makefile
+docs/advanced-usage.rst
+docs/conf.py
+docs/contributing.rst
+docs/index.rst
+docs/make.bat
+docs/requirements.txt
+docs/user-guide.rst
+docs/_templates/fonts.html
+docs/images/banner.svg
+docs/images/demo-button.png
+docs/images/learn-more-button.png
+docs/images/logo.png
+docs/images/logo.svg
+docs/reference/index.rst
+docs/reference/urllib3.contrib.rst
+docs/reference/urllib3.util.rst
+dummyserver/__init__.py
+dummyserver/handlers.py
+dummyserver/proxy.py
+dummyserver/server.py
+dummyserver/testcase.py
+dummyserver/certs/README.rst
+dummyserver/certs/cacert.key
+dummyserver/certs/cacert.pem
+dummyserver/certs/server.crt
+dummyserver/certs/server.key
+src/urllib3/__init__.py
+src/urllib3/_collections.py
+src/urllib3/connection.py
+src/urllib3/connectionpool.py
+src/urllib3/exceptions.py
+src/urllib3/fields.py
+src/urllib3/filepost.py
+src/urllib3/poolmanager.py
+src/urllib3/request.py
+src/urllib3/response.py
+src/urllib3.egg-info/PKG-INFO
+src/urllib3.egg-info/SOURCES.txt
+src/urllib3.egg-info/dependency_links.txt
+src/urllib3.egg-info/requires.txt
+src/urllib3.egg-info/top_level.txt
+src/urllib3/contrib/__init__.py
+src/urllib3/contrib/_appengine_environ.py
+src/urllib3/contrib/appengine.py
+src/urllib3/contrib/ntlmpool.py
+src/urllib3/contrib/pyopenssl.py
+src/urllib3/contrib/securetransport.py
+src/urllib3/contrib/socks.py
+src/urllib3/contrib/_securetransport/__init__.py
+src/urllib3/contrib/_securetransport/bindings.py
+src/urllib3/contrib/_securetransport/low_level.py
+src/urllib3/packages/__init__.py
+src/urllib3/packages/six.py
+src/urllib3/packages/backports/__init__.py
+src/urllib3/packages/backports/makefile.py
+src/urllib3/packages/ssl_match_hostname/__init__.py
+src/urllib3/packages/ssl_match_hostname/_implementation.py
+src/urllib3/util/__init__.py
+src/urllib3/util/connection.py
+src/urllib3/util/queue.py
+src/urllib3/util/request.py
+src/urllib3/util/response.py
+src/urllib3/util/retry.py
+src/urllib3/util/ssl_.py
+src/urllib3/util/timeout.py
+src/urllib3/util/url.py
+src/urllib3/util/wait.py
+test/__init__.py
+test/benchmark.py
+test/conftest.py
+test/port_helpers.py
+test/socketpair_helper.py
+test/test_collections.py
+test/test_compatibility.py
+test/test_connection.py
+test/test_connectionpool.py
+test/test_exceptions.py
+test/test_fields.py
+test/test_filepost.py
+test/test_no_ssl.py
+test/test_poolmanager.py
+test/test_proxymanager.py
+test/test_queue_monkeypatch.py
+test/test_response.py
+test/test_retry.py
+test/test_ssl.py
+test/test_util.py
+test/test_wait.py
+test/appengine/__init__.py
+test/appengine/conftest.py
+test/appengine/test_gae_manager.py
+test/appengine/test_urlfetch.py
+test/contrib/__init__.py
+test/contrib/duplicate_san.pem
+test/contrib/test_pyopenssl.py
+test/contrib/test_pyopenssl_dependencies.py
+test/contrib/test_securetransport.py
+test/contrib/test_socks.py
+test/with_dummyserver/__init__.py
+test/with_dummyserver/test_chunked_transfer.py
+test/with_dummyserver/test_connectionpool.py
+test/with_dummyserver/test_https.py
+test/with_dummyserver/test_no_ssl.py
+test/with_dummyserver/test_poolmanager.py
+test/with_dummyserver/test_proxy_poolmanager.py
+test/with_dummyserver/test_socketlevel.py \ No newline at end of file
diff --git a/third_party/python/urllib3/src/urllib3.egg-info/dependency_links.txt b/third_party/python/urllib3/src/urllib3.egg-info/dependency_links.txt
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/third_party/python/urllib3/src/urllib3.egg-info/requires.txt b/third_party/python/urllib3/src/urllib3.egg-info/requires.txt
new file mode 100644
index 0000000000..ba01eaf2e6
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3.egg-info/requires.txt
@@ -0,0 +1,15 @@
+
+[brotli]
+brotlipy>=0.6.0
+
+[secure]
+certifi
+cryptography>=1.3.4
+idna>=2.0.0
+pyOpenSSL>=0.14
+
+[secure:python_version == "2.7"]
+ipaddress
+
+[socks]
+PySocks!=1.5.7,<2.0,>=1.5.6
diff --git a/third_party/python/urllib3/src/urllib3.egg-info/top_level.txt b/third_party/python/urllib3/src/urllib3.egg-info/top_level.txt
new file mode 100644
index 0000000000..a42590bebe
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3.egg-info/top_level.txt
@@ -0,0 +1 @@
+urllib3
diff --git a/third_party/python/urllib3/src/urllib3/__init__.py b/third_party/python/urllib3/src/urllib3/__init__.py
new file mode 100644
index 0000000000..667e9bce9e
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/__init__.py
@@ -0,0 +1,86 @@
+"""
+urllib3 - Thread-safe connection pooling and re-using.
+"""
+from __future__ import absolute_import
+import warnings
+
+from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
+
+from . import exceptions
+from .filepost import encode_multipart_formdata
+from .poolmanager import PoolManager, ProxyManager, proxy_from_url
+from .response import HTTPResponse
+from .util.request import make_headers
+from .util.url import get_host
+from .util.timeout import Timeout
+from .util.retry import Retry
+
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
+from logging import NullHandler
+
+__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
+__license__ = "MIT"
+__version__ = "1.25.9"
+
+__all__ = (
+ "HTTPConnectionPool",
+ "HTTPSConnectionPool",
+ "PoolManager",
+ "ProxyManager",
+ "HTTPResponse",
+ "Retry",
+ "Timeout",
+ "add_stderr_logger",
+ "connection_from_url",
+ "disable_warnings",
+ "encode_multipart_formdata",
+ "get_host",
+ "make_headers",
+ "proxy_from_url",
+)
+
+logging.getLogger(__name__).addHandler(NullHandler())
+
+
+def add_stderr_logger(level=logging.DEBUG):
+ """
+ Helper for quickly adding a StreamHandler to the logger. Useful for
+ debugging.
+
+ Returns the handler after adding it.
+ """
+ # This method needs to be in this __init__.py to get the __name__ correct
+ # even if urllib3 is vendored within another package.
+ logger = logging.getLogger(__name__)
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
+ logger.addHandler(handler)
+ logger.setLevel(level)
+ logger.debug("Added a stderr logging handler to logger: %s", __name__)
+ return handler
+
+
+# ... Clean up.
+del NullHandler
+
+
+# All warning filters *must* be appended unless you're really certain that they
+# shouldn't be: otherwise, it's very hard for users to use most Python
+# mechanisms to silence them.
+# SecurityWarning's always go off by default.
+warnings.simplefilter("always", exceptions.SecurityWarning, append=True)
+# SubjectAltNameWarning's should go off once per host
+warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True)
+# InsecurePlatformWarning's don't vary between requests, so we keep it default.
+warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True)
+# SNIMissingWarnings should go off only once.
+warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True)
+
+
+def disable_warnings(category=exceptions.HTTPWarning):
+ """
+ Helper for quickly disabling all urllib3 warnings.
+ """
+ warnings.simplefilter("ignore", category)
diff --git a/third_party/python/urllib3/src/urllib3/_collections.py b/third_party/python/urllib3/src/urllib3/_collections.py
new file mode 100644
index 0000000000..019d1511d5
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/_collections.py
@@ -0,0 +1,336 @@
+from __future__ import absolute_import
+
+try:
+ from collections.abc import Mapping, MutableMapping
+except ImportError:
+ from collections import Mapping, MutableMapping
+try:
+ from threading import RLock
+except ImportError: # Platform-specific: No threads available
+
+ class RLock:
+ def __enter__(self):
+ pass
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ pass
+
+
+from collections import OrderedDict
+from .exceptions import InvalidHeader
+from .packages.six import iterkeys, itervalues, PY3
+
+
+__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
+
+
+_Null = object()
+
+
+class RecentlyUsedContainer(MutableMapping):
+ """
+ Provides a thread-safe dict-like container which maintains up to
+ ``maxsize`` keys while throwing away the least-recently-used keys beyond
+ ``maxsize``.
+
+ :param maxsize:
+ Maximum number of recent elements to retain.
+
+ :param dispose_func:
+ Every time an item is evicted from the container,
+ ``dispose_func(value)`` is called. Callback which will get called
+ """
+
+ ContainerCls = OrderedDict
+
+ def __init__(self, maxsize=10, dispose_func=None):
+ self._maxsize = maxsize
+ self.dispose_func = dispose_func
+
+ self._container = self.ContainerCls()
+ self.lock = RLock()
+
+ def __getitem__(self, key):
+ # Re-insert the item, moving it to the end of the eviction line.
+ with self.lock:
+ item = self._container.pop(key)
+ self._container[key] = item
+ return item
+
+ def __setitem__(self, key, value):
+ evicted_value = _Null
+ with self.lock:
+ # Possibly evict the existing value of 'key'
+ evicted_value = self._container.get(key, _Null)
+ self._container[key] = value
+
+ # If we didn't evict an existing value, we might have to evict the
+ # least recently used item from the beginning of the container.
+ if len(self._container) > self._maxsize:
+ _key, evicted_value = self._container.popitem(last=False)
+
+ if self.dispose_func and evicted_value is not _Null:
+ self.dispose_func(evicted_value)
+
+ def __delitem__(self, key):
+ with self.lock:
+ value = self._container.pop(key)
+
+ if self.dispose_func:
+ self.dispose_func(value)
+
+ def __len__(self):
+ with self.lock:
+ return len(self._container)
+
+ def __iter__(self):
+ raise NotImplementedError(
+ "Iteration over this class is unlikely to be threadsafe."
+ )
+
+ def clear(self):
+ with self.lock:
+ # Copy pointers to all values, then wipe the mapping
+ values = list(itervalues(self._container))
+ self._container.clear()
+
+ if self.dispose_func:
+ for value in values:
+ self.dispose_func(value)
+
+ def keys(self):
+ with self.lock:
+ return list(iterkeys(self._container))
+
+
+class HTTPHeaderDict(MutableMapping):
+ """
+ :param headers:
+ An iterable of field-value pairs. Must not contain multiple field names
+ when compared case-insensitively.
+
+ :param kwargs:
+ Additional field-value pairs to pass in to ``dict.update``.
+
+ A ``dict`` like container for storing HTTP Headers.
+
+ Field names are stored and compared case-insensitively in compliance with
+ RFC 7230. Iteration provides the first case-sensitive key seen for each
+ case-insensitive pair.
+
+ Using ``__setitem__`` syntax overwrites fields that compare equal
+ case-insensitively in order to maintain ``dict``'s api. For fields that
+ compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
+ in a loop.
+
+ If multiple fields that are equal case-insensitively are passed to the
+ constructor or ``.update``, the behavior is undefined and some will be
+ lost.
+
+ >>> headers = HTTPHeaderDict()
+ >>> headers.add('Set-Cookie', 'foo=bar')
+ >>> headers.add('set-cookie', 'baz=quxx')
+ >>> headers['content-length'] = '7'
+ >>> headers['SET-cookie']
+ 'foo=bar, baz=quxx'
+ >>> headers['Content-Length']
+ '7'
+ """
+
+ def __init__(self, headers=None, **kwargs):
+ super(HTTPHeaderDict, self).__init__()
+ self._container = OrderedDict()
+ if headers is not None:
+ if isinstance(headers, HTTPHeaderDict):
+ self._copy_from(headers)
+ else:
+ self.extend(headers)
+ if kwargs:
+ self.extend(kwargs)
+
+ def __setitem__(self, key, val):
+ self._container[key.lower()] = [key, val]
+ return self._container[key.lower()]
+
+ def __getitem__(self, key):
+ val = self._container[key.lower()]
+ return ", ".join(val[1:])
+
+ def __delitem__(self, key):
+ del self._container[key.lower()]
+
+ def __contains__(self, key):
+ return key.lower() in self._container
+
+ def __eq__(self, other):
+ if not isinstance(other, Mapping) and not hasattr(other, "keys"):
+ return False
+ if not isinstance(other, type(self)):
+ other = type(self)(other)
+ return dict((k.lower(), v) for k, v in self.itermerged()) == dict(
+ (k.lower(), v) for k, v in other.itermerged()
+ )
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ if not PY3: # Python 2
+ iterkeys = MutableMapping.iterkeys
+ itervalues = MutableMapping.itervalues
+
+ __marker = object()
+
+ def __len__(self):
+ return len(self._container)
+
+ def __iter__(self):
+ # Only provide the originally cased names
+ for vals in self._container.values():
+ yield vals[0]
+
+ def pop(self, key, default=__marker):
+ """D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+ If key is not found, d is returned if given, otherwise KeyError is raised.
+ """
+ # Using the MutableMapping function directly fails due to the private marker.
+ # Using ordinary dict.pop would expose the internal structures.
+ # So let's reinvent the wheel.
+ try:
+ value = self[key]
+ except KeyError:
+ if default is self.__marker:
+ raise
+ return default
+ else:
+ del self[key]
+ return value
+
+ def discard(self, key):
+ try:
+ del self[key]
+ except KeyError:
+ pass
+
+ def add(self, key, val):
+ """Adds a (name, value) pair, doesn't overwrite the value if it already
+ exists.
+
+ >>> headers = HTTPHeaderDict(foo='bar')
+ >>> headers.add('Foo', 'baz')
+ >>> headers['foo']
+ 'bar, baz'
+ """
+ key_lower = key.lower()
+ new_vals = [key, val]
+ # Keep the common case aka no item present as fast as possible
+ vals = self._container.setdefault(key_lower, new_vals)
+ if new_vals is not vals:
+ vals.append(val)
+
+ def extend(self, *args, **kwargs):
+ """Generic import function for any type of header-like object.
+ Adapted version of MutableMapping.update in order to insert items
+ with self.add instead of self.__setitem__
+ """
+ if len(args) > 1:
+ raise TypeError(
+ "extend() takes at most 1 positional "
+ "arguments ({0} given)".format(len(args))
+ )
+ other = args[0] if len(args) >= 1 else ()
+
+ if isinstance(other, HTTPHeaderDict):
+ for key, val in other.iteritems():
+ self.add(key, val)
+ elif isinstance(other, Mapping):
+ for key in other:
+ self.add(key, other[key])
+ elif hasattr(other, "keys"):
+ for key in other.keys():
+ self.add(key, other[key])
+ else:
+ for key, value in other:
+ self.add(key, value)
+
+ for key, value in kwargs.items():
+ self.add(key, value)
+
+ def getlist(self, key, default=__marker):
+ """Returns a list of all the values for the named field. Returns an
+ empty list if the key doesn't exist."""
+ try:
+ vals = self._container[key.lower()]
+ except KeyError:
+ if default is self.__marker:
+ return []
+ return default
+ else:
+ return vals[1:]
+
+ # Backwards compatibility for httplib
+ getheaders = getlist
+ getallmatchingheaders = getlist
+ iget = getlist
+
+ # Backwards compatibility for http.cookiejar
+ get_all = getlist
+
+ def __repr__(self):
+ return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
+
+ def _copy_from(self, other):
+ for key in other:
+ val = other.getlist(key)
+ if isinstance(val, list):
+ # Don't need to convert tuples
+ val = list(val)
+ self._container[key.lower()] = [key] + val
+
+ def copy(self):
+ clone = type(self)()
+ clone._copy_from(self)
+ return clone
+
+ def iteritems(self):
+ """Iterate over all header lines, including duplicate ones."""
+ for key in self:
+ vals = self._container[key.lower()]
+ for val in vals[1:]:
+ yield vals[0], val
+
+ def itermerged(self):
+ """Iterate over all headers, merging duplicate ones together."""
+ for key in self:
+ val = self._container[key.lower()]
+ yield val[0], ", ".join(val[1:])
+
+ def items(self):
+ return list(self.iteritems())
+
+ @classmethod
+ def from_httplib(cls, message): # Python 2
+ """Read headers from a Python 2 httplib message object."""
+ # python2.7 does not expose a proper API for exporting multiheaders
+ # efficiently. This function re-reads raw lines from the message
+ # object and extracts the multiheaders properly.
+ obs_fold_continued_leaders = (" ", "\t")
+ headers = []
+
+ for line in message.headers:
+ if line.startswith(obs_fold_continued_leaders):
+ if not headers:
+ # We received a header line that starts with OWS as described
+ # in RFC-7230 S3.2.4. This indicates a multiline header, but
+ # there exists no previous header to which we can attach it.
+ raise InvalidHeader(
+ "Header continuation with no previous header: %s" % line
+ )
+ else:
+ key, value = headers[-1]
+ headers[-1] = (key, value + " " + line.strip())
+ continue
+
+ key, value = line.split(":", 1)
+ headers.append((key, value.strip()))
+
+ return cls(headers)
diff --git a/third_party/python/urllib3/src/urllib3/connection.py b/third_party/python/urllib3/src/urllib3/connection.py
new file mode 100644
index 0000000000..6da1cf4b6d
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/connection.py
@@ -0,0 +1,423 @@
+from __future__ import absolute_import
+import re
+import datetime
+import logging
+import os
+import socket
+from socket import error as SocketError, timeout as SocketTimeout
+import warnings
+from .packages import six
+from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
+from .packages.six.moves.http_client import HTTPException # noqa: F401
+
+try: # Compiled with SSL?
+ import ssl
+
+ BaseSSLError = ssl.SSLError
+except (ImportError, AttributeError): # Platform-specific: No SSL.
+ ssl = None
+
+ class BaseSSLError(BaseException):
+ pass
+
+
+try:
+ # Python 3: not a no-op, we're adding this to the namespace so it can be imported.
+ ConnectionError = ConnectionError
+except NameError:
+ # Python 2
+ class ConnectionError(Exception):
+ pass
+
+
+from .exceptions import (
+ NewConnectionError,
+ ConnectTimeoutError,
+ SubjectAltNameWarning,
+ SystemTimeWarning,
+)
+from .packages.ssl_match_hostname import match_hostname, CertificateError
+
+from .util.ssl_ import (
+ resolve_cert_reqs,
+ resolve_ssl_version,
+ assert_fingerprint,
+ create_urllib3_context,
+ ssl_wrap_socket,
+)
+
+
+from .util import connection
+
+from ._collections import HTTPHeaderDict
+
+log = logging.getLogger(__name__)
+
+port_by_scheme = {"http": 80, "https": 443}
+
+# When it comes time to update this value as a part of regular maintenance
+# (ie test_recent_date is failing) update it to ~6 months before the current date.
+RECENT_DATE = datetime.date(2019, 1, 1)
+
+_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
+
+
+class DummyConnection(object):
+ """Used to detect a failed ConnectionCls import."""
+
+ pass
+
+
+class HTTPConnection(_HTTPConnection, object):
+ """
+ Based on httplib.HTTPConnection but provides an extra constructor
+ backwards-compatibility layer between older and newer Pythons.
+
+ Additional keyword parameters are used to configure attributes of the connection.
+ Accepted parameters include:
+
+ - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
+ - ``source_address``: Set the source address for the current connection.
+ - ``socket_options``: Set specific options on the underlying socket. If not specified, then
+ defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
+ Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
+
+ For example, if you wish to enable TCP Keep Alive in addition to the defaults,
+ you might pass::
+
+ HTTPConnection.default_socket_options + [
+ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
+ ]
+
+ Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
+ """
+
+ default_port = port_by_scheme["http"]
+
+ #: Disable Nagle's algorithm by default.
+ #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
+ default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
+
+ #: Whether this connection verifies the host's certificate.
+ is_verified = False
+
+ def __init__(self, *args, **kw):
+ if not six.PY2:
+ kw.pop("strict", None)
+
+ # Pre-set source_address.
+ self.source_address = kw.get("source_address")
+
+ #: The socket options provided by the user. If no options are
+ #: provided, we use the default options.
+ self.socket_options = kw.pop("socket_options", self.default_socket_options)
+
+ _HTTPConnection.__init__(self, *args, **kw)
+
+ @property
+ def host(self):
+ """
+ Getter method to remove any trailing dots that indicate the hostname is an FQDN.
+
+ In general, SSL certificates don't include the trailing dot indicating a
+ fully-qualified domain name, and thus, they don't validate properly when
+ checked against a domain name that includes the dot. In addition, some
+ servers may not expect to receive the trailing dot when provided.
+
+ However, the hostname with trailing dot is critical to DNS resolution; doing a
+ lookup with the trailing dot will properly only resolve the appropriate FQDN,
+ whereas a lookup without a trailing dot will search the system's search domain
+ list. Thus, it's important to keep the original host around for use only in
+ those cases where it's appropriate (i.e., when doing DNS lookup to establish the
+ actual TCP connection across which we're going to send HTTP requests).
+ """
+ return self._dns_host.rstrip(".")
+
+ @host.setter
+ def host(self, value):
+ """
+ Setter for the `host` property.
+
+ We assume that only urllib3 uses the _dns_host attribute; httplib itself
+ only uses `host`, and it seems reasonable that other libraries follow suit.
+ """
+ self._dns_host = value
+
+ def _new_conn(self):
+ """ Establish a socket connection and set nodelay settings on it.
+
+ :return: New socket connection.
+ """
+ extra_kw = {}
+ if self.source_address:
+ extra_kw["source_address"] = self.source_address
+
+ if self.socket_options:
+ extra_kw["socket_options"] = self.socket_options
+
+ try:
+ conn = connection.create_connection(
+ (self._dns_host, self.port), self.timeout, **extra_kw
+ )
+
+ except SocketTimeout:
+ raise ConnectTimeoutError(
+ self,
+ "Connection to %s timed out. (connect timeout=%s)"
+ % (self.host, self.timeout),
+ )
+
+ except SocketError as e:
+ raise NewConnectionError(
+ self, "Failed to establish a new connection: %s" % e
+ )
+
+ return conn
+
+ def _prepare_conn(self, conn):
+ self.sock = conn
+ # Google App Engine's httplib does not define _tunnel_host
+ if getattr(self, "_tunnel_host", None):
+ # TODO: Fix tunnel so it doesn't depend on self.sock state.
+ self._tunnel()
+ # Mark this connection as not reusable
+ self.auto_open = 0
+
+ def connect(self):
+ conn = self._new_conn()
+ self._prepare_conn(conn)
+
+ def putrequest(self, method, url, *args, **kwargs):
+ """Send a request to the server"""
+ match = _CONTAINS_CONTROL_CHAR_RE.search(method)
+ if match:
+ raise ValueError(
+ "Method cannot contain non-token characters %r (found at least %r)"
+ % (method, match.group())
+ )
+
+ return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
+
+ def request_chunked(self, method, url, body=None, headers=None):
+ """
+ Alternative to the common request method, which sends the
+ body with chunked encoding and not as one block
+ """
+ headers = HTTPHeaderDict(headers if headers is not None else {})
+ skip_accept_encoding = "accept-encoding" in headers
+ skip_host = "host" in headers
+ self.putrequest(
+ method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
+ )
+ for header, value in headers.items():
+ self.putheader(header, value)
+ if "transfer-encoding" not in headers:
+ self.putheader("Transfer-Encoding", "chunked")
+ self.endheaders()
+
+ if body is not None:
+ stringish_types = six.string_types + (bytes,)
+ if isinstance(body, stringish_types):
+ body = (body,)
+ for chunk in body:
+ if not chunk:
+ continue
+ if not isinstance(chunk, bytes):
+ chunk = chunk.encode("utf8")
+ len_str = hex(len(chunk))[2:]
+ self.send(len_str.encode("utf-8"))
+ self.send(b"\r\n")
+ self.send(chunk)
+ self.send(b"\r\n")
+
+ # After the if clause, to always have a closed body
+ self.send(b"0\r\n\r\n")
+
+
+class HTTPSConnection(HTTPConnection):
+ default_port = port_by_scheme["https"]
+
+ cert_reqs = None
+ ca_certs = None
+ ca_cert_dir = None
+ ca_cert_data = None
+ ssl_version = None
+ assert_fingerprint = None
+
+ def __init__(
+ self,
+ host,
+ port=None,
+ key_file=None,
+ cert_file=None,
+ key_password=None,
+ strict=None,
+ timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+ ssl_context=None,
+ server_hostname=None,
+ **kw
+ ):
+
+ HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw)
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.key_password = key_password
+ self.ssl_context = ssl_context
+ self.server_hostname = server_hostname
+
+ # Required property for Google AppEngine 1.9.0 which otherwise causes
+ # HTTPS requests to go out as HTTP. (See Issue #356)
+ self._protocol = "https"
+
+ def set_cert(
+ self,
+ key_file=None,
+ cert_file=None,
+ cert_reqs=None,
+ key_password=None,
+ ca_certs=None,
+ assert_hostname=None,
+ assert_fingerprint=None,
+ ca_cert_dir=None,
+ ca_cert_data=None,
+ ):
+ """
+ This method should only be called once, before the connection is used.
+ """
+ # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also
+ # have an SSLContext object in which case we'll use its verify_mode.
+ if cert_reqs is None:
+ if self.ssl_context is not None:
+ cert_reqs = self.ssl_context.verify_mode
+ else:
+ cert_reqs = resolve_cert_reqs(None)
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.cert_reqs = cert_reqs
+ self.key_password = key_password
+ self.assert_hostname = assert_hostname
+ self.assert_fingerprint = assert_fingerprint
+ self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
+ self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
+ self.ca_cert_data = ca_cert_data
+
+ def connect(self):
+ # Add certificate verification
+ conn = self._new_conn()
+ hostname = self.host
+
+ # Google App Engine's httplib does not define _tunnel_host
+ if getattr(self, "_tunnel_host", None):
+ self.sock = conn
+ # Calls self._set_hostport(), so self.host is
+ # self._tunnel_host below.
+ self._tunnel()
+ # Mark this connection as not reusable
+ self.auto_open = 0
+
+ # Override the host with the one we're requesting data from.
+ hostname = self._tunnel_host
+
+ server_hostname = hostname
+ if self.server_hostname is not None:
+ server_hostname = self.server_hostname
+
+ is_time_off = datetime.date.today() < RECENT_DATE
+ if is_time_off:
+ warnings.warn(
+ (
+ "System time is way off (before {0}). This will probably "
+ "lead to SSL verification errors"
+ ).format(RECENT_DATE),
+ SystemTimeWarning,
+ )
+
+ # Wrap socket using verification with the root certs in
+ # trusted_root_certs
+ default_ssl_context = False
+ if self.ssl_context is None:
+ default_ssl_context = True
+ self.ssl_context = create_urllib3_context(
+ ssl_version=resolve_ssl_version(self.ssl_version),
+ cert_reqs=resolve_cert_reqs(self.cert_reqs),
+ )
+
+ context = self.ssl_context
+ context.verify_mode = resolve_cert_reqs(self.cert_reqs)
+
+ # Try to load OS default certs if none are given.
+ # Works well on Windows (requires Python3.4+)
+ if (
+ not self.ca_certs
+ and not self.ca_cert_dir
+ and not self.ca_cert_data
+ and default_ssl_context
+ and hasattr(context, "load_default_certs")
+ ):
+ context.load_default_certs()
+
+ self.sock = ssl_wrap_socket(
+ sock=conn,
+ keyfile=self.key_file,
+ certfile=self.cert_file,
+ key_password=self.key_password,
+ ca_certs=self.ca_certs,
+ ca_cert_dir=self.ca_cert_dir,
+ ca_cert_data=self.ca_cert_data,
+ server_hostname=server_hostname,
+ ssl_context=context,
+ )
+
+ if self.assert_fingerprint:
+ assert_fingerprint(
+ self.sock.getpeercert(binary_form=True), self.assert_fingerprint
+ )
+ elif (
+ context.verify_mode != ssl.CERT_NONE
+ and not getattr(context, "check_hostname", False)
+ and self.assert_hostname is not False
+ ):
+ # While urllib3 attempts to always turn off hostname matching from
+ # the TLS library, this cannot always be done. So we check whether
+ # the TLS Library still thinks it's matching hostnames.
+ cert = self.sock.getpeercert()
+ if not cert.get("subjectAltName", ()):
+ warnings.warn(
+ (
+ "Certificate for {0} has no `subjectAltName`, falling back to check for a "
+ "`commonName` for now. This feature is being removed by major browsers and "
+ "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
+ "for details.)".format(hostname)
+ ),
+ SubjectAltNameWarning,
+ )
+ _match_hostname(cert, self.assert_hostname or server_hostname)
+
+ self.is_verified = (
+ context.verify_mode == ssl.CERT_REQUIRED
+ or self.assert_fingerprint is not None
+ )
+
+
+def _match_hostname(cert, asserted_hostname):
+ try:
+ match_hostname(cert, asserted_hostname)
+ except CertificateError as e:
+ log.warning(
+ "Certificate did not match expected hostname: %s. Certificate: %s",
+ asserted_hostname,
+ cert,
+ )
+ # Add cert to exception and reraise so client code can inspect
+ # the cert when catching the exception, if they want to
+ e._peer_cert = cert
+ raise
+
+
+if not ssl:
+ HTTPSConnection = DummyConnection # noqa: F811
+
+
+VerifiedHTTPSConnection = HTTPSConnection
diff --git a/third_party/python/urllib3/src/urllib3/connectionpool.py b/third_party/python/urllib3/src/urllib3/connectionpool.py
new file mode 100644
index 0000000000..5f044dbd90
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/connectionpool.py
@@ -0,0 +1,1033 @@
+from __future__ import absolute_import
+import errno
+import logging
+import sys
+import warnings
+
+from socket import error as SocketError, timeout as SocketTimeout
+import socket
+
+
+from .exceptions import (
+ ClosedPoolError,
+ ProtocolError,
+ EmptyPoolError,
+ HeaderParsingError,
+ HostChangedError,
+ LocationValueError,
+ MaxRetryError,
+ ProxyError,
+ ReadTimeoutError,
+ SSLError,
+ TimeoutError,
+ InsecureRequestWarning,
+ NewConnectionError,
+)
+from .packages.ssl_match_hostname import CertificateError
+from .packages import six
+from .packages.six.moves import queue
+from .connection import (
+ port_by_scheme,
+ DummyConnection,
+ HTTPConnection,
+ HTTPSConnection,
+ VerifiedHTTPSConnection,
+ HTTPException,
+ BaseSSLError,
+)
+from .request import RequestMethods
+from .response import HTTPResponse
+
+from .util.connection import is_connection_dropped
+from .util.request import set_file_position
+from .util.response import assert_header_parsing
+from .util.retry import Retry
+from .util.timeout import Timeout
+from .util.url import (
+ get_host,
+ parse_url,
+ Url,
+ _normalize_host as normalize_host,
+ _encode_target,
+)
+from .util.queue import LifoQueue
+
+
+xrange = six.moves.xrange
+
+log = logging.getLogger(__name__)
+
+_Default = object()
+
+
+# Pool objects
+class ConnectionPool(object):
+ """
+ Base class for all connection pools, such as
+ :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
+
+ .. note::
+ ConnectionPool.urlopen() does not normalize or percent-encode target URIs
+ which is useful if your target server doesn't support percent-encoded
+ target URIs.
+ """
+
+ scheme = None
+ QueueCls = LifoQueue
+
+ def __init__(self, host, port=None):
+ if not host:
+ raise LocationValueError("No host specified.")
+
+ self.host = _normalize_host(host, scheme=self.scheme)
+ self._proxy_host = host.lower()
+ self.port = port
+
+ def __str__(self):
+ return "%s(host=%r, port=%r)" % (type(self).__name__, self.host, self.port)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.close()
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def close(self):
+ """
+ Close all pooled connections and disable the pool.
+ """
+ pass
+
+
+# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
+_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK}
+
+
+class HTTPConnectionPool(ConnectionPool, RequestMethods):
+ """
+ Thread-safe connection pool for one host.
+
+ :param host:
+ Host used for this HTTP Connection (e.g. "localhost"), passed into
+ :class:`httplib.HTTPConnection`.
+
+ :param port:
+ Port used for this HTTP Connection (None is equivalent to 80), passed
+ into :class:`httplib.HTTPConnection`.
+
+ :param strict:
+ Causes BadStatusLine to be raised if the status line can't be parsed
+ as a valid HTTP/1.0 or 1.1 status line, passed into
+ :class:`httplib.HTTPConnection`.
+
+ .. note::
+ Only works in Python 2. This parameter is ignored in Python 3.
+
+ :param timeout:
+ Socket timeout in seconds for each individual connection. This can
+ be a float or integer, which sets the timeout for the HTTP request,
+ or an instance of :class:`urllib3.util.Timeout` which gives you more
+ fine-grained control over request timeouts. After the constructor has
+ been parsed, this is always a `urllib3.util.Timeout` object.
+
+ :param maxsize:
+ Number of connections to save that can be reused. More than 1 is useful
+ in multithreaded situations. If ``block`` is set to False, more
+ connections will be created but they will not be saved once they've
+ been used.
+
+ :param block:
+ If set to True, no more than ``maxsize`` connections will be used at
+ a time. When no free connections are available, the call will block
+ until a connection has been released. This is a useful side effect for
+ particular multithreaded situations where one does not want to use more
+ than maxsize connections per host to prevent flooding.
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+
+ :param retries:
+ Retry configuration to use by default with requests in this pool.
+
+ :param _proxy:
+ Parsed proxy URL, should not be used directly, instead, see
+ :class:`urllib3.connectionpool.ProxyManager`"
+
+ :param _proxy_headers:
+ A dictionary with proxy headers, should not be used directly,
+ instead, see :class:`urllib3.connectionpool.ProxyManager`"
+
+ :param \\**conn_kw:
+ Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
+ :class:`urllib3.connection.HTTPSConnection` instances.
+ """
+
+ scheme = "http"
+ ConnectionCls = HTTPConnection
+ ResponseCls = HTTPResponse
+
+ def __init__(
+ self,
+ host,
+ port=None,
+ strict=False,
+ timeout=Timeout.DEFAULT_TIMEOUT,
+ maxsize=1,
+ block=False,
+ headers=None,
+ retries=None,
+ _proxy=None,
+ _proxy_headers=None,
+ **conn_kw
+ ):
+ ConnectionPool.__init__(self, host, port)
+ RequestMethods.__init__(self, headers)
+
+ self.strict = strict
+
+ if not isinstance(timeout, Timeout):
+ timeout = Timeout.from_float(timeout)
+
+ if retries is None:
+ retries = Retry.DEFAULT
+
+ self.timeout = timeout
+ self.retries = retries
+
+ self.pool = self.QueueCls(maxsize)
+ self.block = block
+
+ self.proxy = _proxy
+ self.proxy_headers = _proxy_headers or {}
+
+ # Fill the queue up so that doing get() on it will block properly
+ for _ in xrange(maxsize):
+ self.pool.put(None)
+
+ # These are mostly for testing and debugging purposes.
+ self.num_connections = 0
+ self.num_requests = 0
+ self.conn_kw = conn_kw
+
+ if self.proxy:
+ # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
+ # We cannot know if the user has added default socket options, so we cannot replace the
+ # list.
+ self.conn_kw.setdefault("socket_options", [])
+
+ def _new_conn(self):
+ """
+ Return a fresh :class:`HTTPConnection`.
+ """
+ self.num_connections += 1
+ log.debug(
+ "Starting new HTTP connection (%d): %s:%s",
+ self.num_connections,
+ self.host,
+ self.port or "80",
+ )
+
+ conn = self.ConnectionCls(
+ host=self.host,
+ port=self.port,
+ timeout=self.timeout.connect_timeout,
+ strict=self.strict,
+ **self.conn_kw
+ )
+ return conn
+
+ def _get_conn(self, timeout=None):
+ """
+ Get a connection. Will return a pooled connection if one is available.
+
+ If no connections are available and :prop:`.block` is ``False``, then a
+ fresh connection is returned.
+
+ :param timeout:
+ Seconds to wait before giving up and raising
+ :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
+ :prop:`.block` is ``True``.
+ """
+ conn = None
+ try:
+ conn = self.pool.get(block=self.block, timeout=timeout)
+
+ except AttributeError: # self.pool is None
+ raise ClosedPoolError(self, "Pool is closed.")
+
+ except queue.Empty:
+ if self.block:
+ raise EmptyPoolError(
+ self,
+ "Pool reached maximum size and no more connections are allowed.",
+ )
+ pass # Oh well, we'll create a new connection then
+
+ # If this is a persistent connection, check if it got disconnected
+ if conn and is_connection_dropped(conn):
+ log.debug("Resetting dropped connection: %s", self.host)
+ conn.close()
+ if getattr(conn, "auto_open", 1) == 0:
+ # This is a proxied connection that has been mutated by
+ # httplib._tunnel() and cannot be reused (since it would
+ # attempt to bypass the proxy)
+ conn = None
+
+ return conn or self._new_conn()
+
+ def _put_conn(self, conn):
+ """
+ Put a connection back into the pool.
+
+ :param conn:
+ Connection object for the current host and port as returned by
+ :meth:`._new_conn` or :meth:`._get_conn`.
+
+ If the pool is already full, the connection is closed and discarded
+ because we exceeded maxsize. If connections are discarded frequently,
+ then maxsize should be increased.
+
+ If the pool is closed, then the connection will be closed and discarded.
+ """
+ try:
+ self.pool.put(conn, block=False)
+ return # Everything is dandy, done.
+ except AttributeError:
+ # self.pool is None.
+ pass
+ except queue.Full:
+ # This should never happen if self.block == True
+ log.warning("Connection pool is full, discarding connection: %s", self.host)
+
+ # Connection never got put back into the pool, close it.
+ if conn:
+ conn.close()
+
+ def _validate_conn(self, conn):
+ """
+ Called right before a request is made, after the socket is created.
+ """
+ pass
+
+ def _prepare_proxy(self, conn):
+ # Nothing to do for HTTP connections.
+ pass
+
+ def _get_timeout(self, timeout):
+ """ Helper that always returns a :class:`urllib3.util.Timeout` """
+ if timeout is _Default:
+ return self.timeout.clone()
+
+ if isinstance(timeout, Timeout):
+ return timeout.clone()
+ else:
+ # User passed us an int/float. This is for backwards compatibility,
+ # can be removed later
+ return Timeout.from_float(timeout)
+
+ def _raise_timeout(self, err, url, timeout_value):
+ """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
+
+ if isinstance(err, SocketTimeout):
+ raise ReadTimeoutError(
+ self, url, "Read timed out. (read timeout=%s)" % timeout_value
+ )
+
+ # See the above comment about EAGAIN in Python 3. In Python 2 we have
+ # to specifically catch it and throw the timeout error
+ if hasattr(err, "errno") and err.errno in _blocking_errnos:
+ raise ReadTimeoutError(
+ self, url, "Read timed out. (read timeout=%s)" % timeout_value
+ )
+
+ # Catch possible read timeouts thrown as SSL errors. If not the
+ # case, rethrow the original. We need to do this because of:
+ # http://bugs.python.org/issue10272
+ if "timed out" in str(err) or "did not complete (read)" in str(
+ err
+ ): # Python < 2.7.4
+ raise ReadTimeoutError(
+ self, url, "Read timed out. (read timeout=%s)" % timeout_value
+ )
+
+ def _make_request(
+ self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
+ ):
+ """
+ Perform a request on a given urllib connection object taken from our
+ pool.
+
+ :param conn:
+ a connection from one of our connection pools
+
+ :param timeout:
+ Socket timeout in seconds for the request. This can be a
+ float or integer, which will set the same timeout value for
+ the socket connect and the socket read, or an instance of
+ :class:`urllib3.util.Timeout`, which gives you more fine-grained
+ control over your timeouts.
+ """
+ self.num_requests += 1
+
+ timeout_obj = self._get_timeout(timeout)
+ timeout_obj.start_connect()
+ conn.timeout = timeout_obj.connect_timeout
+
+ # Trigger any extra validation we need to do.
+ try:
+ self._validate_conn(conn)
+ except (SocketTimeout, BaseSSLError) as e:
+ # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
+ self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
+ raise
+
+ # conn.request() calls httplib.*.request, not the method in
+ # urllib3.request. It also calls makefile (recv) on the socket.
+ if chunked:
+ conn.request_chunked(method, url, **httplib_request_kw)
+ else:
+ conn.request(method, url, **httplib_request_kw)
+
+ # Reset the timeout for the recv() on the socket
+ read_timeout = timeout_obj.read_timeout
+
+ # App Engine doesn't have a sock attr
+ if getattr(conn, "sock", None):
+ # In Python 3 socket.py will catch EAGAIN and return None when you
+ # try and read into the file pointer created by http.client, which
+ # instead raises a BadStatusLine exception. Instead of catching
+ # the exception and assuming all BadStatusLine exceptions are read
+ # timeouts, check for a zero timeout before making the request.
+ if read_timeout == 0:
+ raise ReadTimeoutError(
+ self, url, "Read timed out. (read timeout=%s)" % read_timeout
+ )
+ if read_timeout is Timeout.DEFAULT_TIMEOUT:
+ conn.sock.settimeout(socket.getdefaulttimeout())
+ else: # None or a value
+ conn.sock.settimeout(read_timeout)
+
+ # Receive the response from the server
+ try:
+ try:
+ # Python 2.7, use buffering of HTTP responses
+ httplib_response = conn.getresponse(buffering=True)
+ except TypeError:
+ # Python 3
+ try:
+ httplib_response = conn.getresponse()
+ except BaseException as e:
+ # Remove the TypeError from the exception chain in
+ # Python 3 (including for exceptions like SystemExit).
+ # Otherwise it looks like a bug in the code.
+ six.raise_from(e, None)
+ except (SocketTimeout, BaseSSLError, SocketError) as e:
+ self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
+ raise
+
+ # AppEngine doesn't have a version attr.
+ http_version = getattr(conn, "_http_vsn_str", "HTTP/?")
+ log.debug(
+ '%s://%s:%s "%s %s %s" %s %s',
+ self.scheme,
+ self.host,
+ self.port,
+ method,
+ url,
+ http_version,
+ httplib_response.status,
+ httplib_response.length,
+ )
+
+ try:
+ assert_header_parsing(httplib_response.msg)
+ except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3
+ log.warning(
+ "Failed to parse headers (url=%s): %s",
+ self._absolute_url(url),
+ hpe,
+ exc_info=True,
+ )
+
+ return httplib_response
+
+ def _absolute_url(self, path):
+ return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
+
+ def close(self):
+ """
+ Close all pooled connections and disable the pool.
+ """
+ if self.pool is None:
+ return
+ # Disable access to the pool
+ old_pool, self.pool = self.pool, None
+
+ try:
+ while True:
+ conn = old_pool.get(block=False)
+ if conn:
+ conn.close()
+
+ except queue.Empty:
+ pass # Done.
+
+ def is_same_host(self, url):
+ """
+ Check if the given ``url`` is a member of the same host as this
+ connection pool.
+ """
+ if url.startswith("/"):
+ return True
+
+ # TODO: Add optional support for socket.gethostbyname checking.
+ scheme, host, port = get_host(url)
+ if host is not None:
+ host = _normalize_host(host, scheme=scheme)
+
+ # Use explicit default port for comparison when none is given
+ if self.port and not port:
+ port = port_by_scheme.get(scheme)
+ elif not self.port and port == port_by_scheme.get(scheme):
+ port = None
+
+ return (scheme, host, port) == (self.scheme, self.host, self.port)
+
+ def urlopen(
+ self,
+ method,
+ url,
+ body=None,
+ headers=None,
+ retries=None,
+ redirect=True,
+ assert_same_host=True,
+ timeout=_Default,
+ pool_timeout=None,
+ release_conn=None,
+ chunked=False,
+ body_pos=None,
+ **response_kw
+ ):
+ """
+ Get a connection from the pool and perform an HTTP request. This is the
+ lowest level call for making a request, so you'll need to specify all
+ the raw details.
+
+ .. note::
+
+ More commonly, it's appropriate to use a convenience method provided
+ by :class:`.RequestMethods`, such as :meth:`request`.
+
+ .. note::
+
+ `release_conn` will only behave as expected if
+ `preload_content=False` because we want to make
+ `preload_content=False` the default behaviour someday soon without
+ breaking backwards compatibility.
+
+ :param method:
+ HTTP request method (such as GET, POST, PUT, etc.)
+
+ :param body:
+ Data to send in the request body (useful for creating
+ POST requests, see HTTPConnectionPool.post_url for
+ more convenience).
+
+ :param headers:
+ Dictionary of custom headers to send, such as User-Agent,
+ If-None-Match, etc. If None, pool headers are used. If provided,
+ these headers completely replace any pool-specific headers.
+
+ :param retries:
+ Configure the number of retries to allow before raising a
+ :class:`~urllib3.exceptions.MaxRetryError` exception.
+
+ Pass ``None`` to retry until you receive a response. Pass a
+ :class:`~urllib3.util.retry.Retry` object for fine-grained control
+ over different types of retries.
+ Pass an integer number to retry connection errors that many times,
+ but no other types of errors. Pass zero to never retry.
+
+ If ``False``, then retries are disabled and any exception is raised
+ immediately. Also, instead of raising a MaxRetryError on redirects,
+ the redirect response will be returned.
+
+ :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
+
+ :param redirect:
+ If True, automatically handle redirects (status codes 301, 302,
+ 303, 307, 308). Each redirect counts as a retry. Disabling retries
+ will disable redirect, too.
+
+ :param assert_same_host:
+ If ``True``, will make sure that the host of the pool requests is
+ consistent else will raise HostChangedError. When False, you can
+ use the pool on an HTTP proxy and request foreign hosts.
+
+ :param timeout:
+ If specified, overrides the default timeout for this one
+ request. It may be a float (in seconds) or an instance of
+ :class:`urllib3.util.Timeout`.
+
+ :param pool_timeout:
+ If set and the pool is set to block=True, then this method will
+ block for ``pool_timeout`` seconds and raise EmptyPoolError if no
+ connection is available within the time period.
+
+ :param release_conn:
+ If False, then the urlopen call will not release the connection
+ back into the pool once a response is received (but will release if
+ you read the entire contents of the response such as when
+ `preload_content=True`). This is useful if you're not preloading
+ the response's content immediately. You will need to call
+ ``r.release_conn()`` on the response ``r`` to return the connection
+ back into the pool. If None, it takes the value of
+ ``response_kw.get('preload_content', True)``.
+
+ :param chunked:
+ If True, urllib3 will send the body using chunked transfer
+ encoding. Otherwise, urllib3 will send the body using the standard
+ content-length form. Defaults to False.
+
+ :param int body_pos:
+ Position to seek to in file-like body in the event of a retry or
+ redirect. Typically this won't need to be set because urllib3 will
+ auto-populate the value when needed.
+
+ :param \\**response_kw:
+ Additional parameters are passed to
+ :meth:`urllib3.response.HTTPResponse.from_httplib`
+ """
+ if headers is None:
+ headers = self.headers
+
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
+
+ if release_conn is None:
+ release_conn = response_kw.get("preload_content", True)
+
+ # Check host
+ if assert_same_host and not self.is_same_host(url):
+ raise HostChangedError(self, url, retries)
+
+ # Ensure that the URL we're connecting to is properly encoded
+ if url.startswith("/"):
+ url = six.ensure_str(_encode_target(url))
+ else:
+ url = six.ensure_str(parse_url(url).url)
+
+ conn = None
+
+ # Track whether `conn` needs to be released before
+ # returning/raising/recursing. Update this variable if necessary, and
+ # leave `release_conn` constant throughout the function. That way, if
+ # the function recurses, the original value of `release_conn` will be
+ # passed down into the recursive call, and its value will be respected.
+ #
+ # See issue #651 [1] for details.
+ #
+ # [1] <https://github.com/urllib3/urllib3/issues/651>
+ release_this_conn = release_conn
+
+ # Merge the proxy headers. Only do this in HTTP. We have to copy the
+ # headers dict so we can safely change it without those changes being
+ # reflected in anyone else's copy.
+ if self.scheme == "http":
+ headers = headers.copy()
+ headers.update(self.proxy_headers)
+
+ # Must keep the exception bound to a separate variable or else Python 3
+ # complains about UnboundLocalError.
+ err = None
+
+ # Keep track of whether we cleanly exited the except block. This
+ # ensures we do proper cleanup in finally.
+ clean_exit = False
+
+ # Rewind body position, if needed. Record current position
+ # for future rewinds in the event of a redirect/retry.
+ body_pos = set_file_position(body, body_pos)
+
+ try:
+ # Request a connection from the queue.
+ timeout_obj = self._get_timeout(timeout)
+ conn = self._get_conn(timeout=pool_timeout)
+
+ conn.timeout = timeout_obj.connect_timeout
+
+ is_new_proxy_conn = self.proxy is not None and not getattr(
+ conn, "sock", None
+ )
+ if is_new_proxy_conn:
+ self._prepare_proxy(conn)
+
+ # Make the request on the httplib connection object.
+ httplib_response = self._make_request(
+ conn,
+ method,
+ url,
+ timeout=timeout_obj,
+ body=body,
+ headers=headers,
+ chunked=chunked,
+ )
+
+ # If we're going to release the connection in ``finally:``, then
+ # the response doesn't need to know about the connection. Otherwise
+ # it will also try to release it and we'll have a double-release
+ # mess.
+ response_conn = conn if not release_conn else None
+
+ # Pass method to Response for length checking
+ response_kw["request_method"] = method
+
+ # Import httplib's response into our own wrapper object
+ response = self.ResponseCls.from_httplib(
+ httplib_response,
+ pool=self,
+ connection=response_conn,
+ retries=retries,
+ **response_kw
+ )
+
+ # Everything went great!
+ clean_exit = True
+
+ except queue.Empty:
+ # Timed out by queue.
+ raise EmptyPoolError(self, "No pool connections are available.")
+
+ except (
+ TimeoutError,
+ HTTPException,
+ SocketError,
+ ProtocolError,
+ BaseSSLError,
+ SSLError,
+ CertificateError,
+ ) as e:
+ # Discard the connection for these exceptions. It will be
+ # replaced during the next _get_conn() call.
+ clean_exit = False
+ if isinstance(e, (BaseSSLError, CertificateError)):
+ e = SSLError(e)
+ elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
+ e = ProxyError("Cannot connect to proxy.", e)
+ elif isinstance(e, (SocketError, HTTPException)):
+ e = ProtocolError("Connection aborted.", e)
+
+ retries = retries.increment(
+ method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
+ )
+ retries.sleep()
+
+ # Keep track of the error for the retry warning.
+ err = e
+
+ finally:
+ if not clean_exit:
+ # We hit some kind of exception, handled or otherwise. We need
+ # to throw the connection away unless explicitly told not to.
+ # Close the connection, set the variable to None, and make sure
+ # we put the None back in the pool to avoid leaking it.
+ conn = conn and conn.close()
+ release_this_conn = True
+
+ if release_this_conn:
+ # Put the connection back to be reused. If the connection is
+ # expired then it will be None, which will get replaced with a
+ # fresh connection during _get_conn.
+ self._put_conn(conn)
+
+ if not conn:
+ # Try again
+ log.warning(
+ "Retrying (%r) after connection broken by '%r': %s", retries, err, url
+ )
+ return self.urlopen(
+ method,
+ url,
+ body,
+ headers,
+ retries,
+ redirect,
+ assert_same_host,
+ timeout=timeout,
+ pool_timeout=pool_timeout,
+ release_conn=release_conn,
+ chunked=chunked,
+ body_pos=body_pos,
+ **response_kw
+ )
+
+ # Handle redirect?
+ redirect_location = redirect and response.get_redirect_location()
+ if redirect_location:
+ if response.status == 303:
+ method = "GET"
+
+ try:
+ retries = retries.increment(method, url, response=response, _pool=self)
+ except MaxRetryError:
+ if retries.raise_on_redirect:
+ response.drain_conn()
+ raise
+ return response
+
+ response.drain_conn()
+ retries.sleep_for_retry(response)
+ log.debug("Redirecting %s -> %s", url, redirect_location)
+ return self.urlopen(
+ method,
+ redirect_location,
+ body,
+ headers,
+ retries=retries,
+ redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout,
+ pool_timeout=pool_timeout,
+ release_conn=release_conn,
+ chunked=chunked,
+ body_pos=body_pos,
+ **response_kw
+ )
+
+ # Check if we should retry the HTTP response.
+ has_retry_after = bool(response.getheader("Retry-After"))
+ if retries.is_retry(method, response.status, has_retry_after):
+ try:
+ retries = retries.increment(method, url, response=response, _pool=self)
+ except MaxRetryError:
+ if retries.raise_on_status:
+ response.drain_conn()
+ raise
+ return response
+
+ response.drain_conn()
+ retries.sleep(response)
+ log.debug("Retry: %s", url)
+ return self.urlopen(
+ method,
+ url,
+ body,
+ headers,
+ retries=retries,
+ redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout,
+ pool_timeout=pool_timeout,
+ release_conn=release_conn,
+ chunked=chunked,
+ body_pos=body_pos,
+ **response_kw
+ )
+
+ return response
+
+
+class HTTPSConnectionPool(HTTPConnectionPool):
+ """
+ Same as :class:`.HTTPConnectionPool`, but HTTPS.
+
+ When Python is compiled with the :mod:`ssl` module, then
+ :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
+ instead of :class:`.HTTPSConnection`.
+
+ :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
+ ``assert_hostname`` and ``host`` in this order to verify connections.
+ If ``assert_hostname`` is False, no verification is done.
+
+ The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
+ ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl`
+ is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
+ the connection socket into an SSL socket.
+ """
+
+ scheme = "https"
+ ConnectionCls = HTTPSConnection
+
+ def __init__(
+ self,
+ host,
+ port=None,
+ strict=False,
+ timeout=Timeout.DEFAULT_TIMEOUT,
+ maxsize=1,
+ block=False,
+ headers=None,
+ retries=None,
+ _proxy=None,
+ _proxy_headers=None,
+ key_file=None,
+ cert_file=None,
+ cert_reqs=None,
+ key_password=None,
+ ca_certs=None,
+ ssl_version=None,
+ assert_hostname=None,
+ assert_fingerprint=None,
+ ca_cert_dir=None,
+ **conn_kw
+ ):
+
+ HTTPConnectionPool.__init__(
+ self,
+ host,
+ port,
+ strict,
+ timeout,
+ maxsize,
+ block,
+ headers,
+ retries,
+ _proxy,
+ _proxy_headers,
+ **conn_kw
+ )
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.cert_reqs = cert_reqs
+ self.key_password = key_password
+ self.ca_certs = ca_certs
+ self.ca_cert_dir = ca_cert_dir
+ self.ssl_version = ssl_version
+ self.assert_hostname = assert_hostname
+ self.assert_fingerprint = assert_fingerprint
+
+ def _prepare_conn(self, conn):
+ """
+ Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
+ and establish the tunnel if proxy is used.
+ """
+
+ if isinstance(conn, VerifiedHTTPSConnection):
+ conn.set_cert(
+ key_file=self.key_file,
+ key_password=self.key_password,
+ cert_file=self.cert_file,
+ cert_reqs=self.cert_reqs,
+ ca_certs=self.ca_certs,
+ ca_cert_dir=self.ca_cert_dir,
+ assert_hostname=self.assert_hostname,
+ assert_fingerprint=self.assert_fingerprint,
+ )
+ conn.ssl_version = self.ssl_version
+ return conn
+
+ def _prepare_proxy(self, conn):
+ """
+ Establish tunnel connection early, because otherwise httplib
+ would improperly set Host: header to proxy's IP:port.
+ """
+ conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
+ conn.connect()
+
+ def _new_conn(self):
+ """
+ Return a fresh :class:`httplib.HTTPSConnection`.
+ """
+ self.num_connections += 1
+ log.debug(
+ "Starting new HTTPS connection (%d): %s:%s",
+ self.num_connections,
+ self.host,
+ self.port or "443",
+ )
+
+ if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
+ raise SSLError(
+ "Can't connect to HTTPS URL because the SSL module is not available."
+ )
+
+ actual_host = self.host
+ actual_port = self.port
+ if self.proxy is not None:
+ actual_host = self.proxy.host
+ actual_port = self.proxy.port
+
+ conn = self.ConnectionCls(
+ host=actual_host,
+ port=actual_port,
+ timeout=self.timeout.connect_timeout,
+ strict=self.strict,
+ cert_file=self.cert_file,
+ key_file=self.key_file,
+ key_password=self.key_password,
+ **self.conn_kw
+ )
+
+ return self._prepare_conn(conn)
+
+ def _validate_conn(self, conn):
+ """
+ Called right before a request is made, after the socket is created.
+ """
+ super(HTTPSConnectionPool, self)._validate_conn(conn)
+
+ # Force connect early to allow us to validate the connection.
+ if not getattr(conn, "sock", None): # AppEngine might not have `.sock`
+ conn.connect()
+
+ if not conn.is_verified:
+ warnings.warn(
+ (
+ "Unverified HTTPS request is being made to host '%s'. "
+ "Adding certificate verification is strongly advised. See: "
+ "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
+ "#ssl-warnings" % conn.host
+ ),
+ InsecureRequestWarning,
+ )
+
+
+def connection_from_url(url, **kw):
+ """
+ Given a url, return an :class:`.ConnectionPool` instance of its host.
+
+ This is a shortcut for not having to parse out the scheme, host, and port
+ of the url before creating an :class:`.ConnectionPool` instance.
+
+ :param url:
+ Absolute URL string that must include the scheme. Port is optional.
+
+ :param \\**kw:
+ Passes additional parameters to the constructor of the appropriate
+ :class:`.ConnectionPool`. Useful for specifying things like
+ timeout, maxsize, headers, etc.
+
+ Example::
+
+ >>> conn = connection_from_url('http://google.com/')
+ >>> r = conn.request('GET', '/')
+ """
+ scheme, host, port = get_host(url)
+ port = port or port_by_scheme.get(scheme, 80)
+ if scheme == "https":
+ return HTTPSConnectionPool(host, port=port, **kw)
+ else:
+ return HTTPConnectionPool(host, port=port, **kw)
+
+
+def _normalize_host(host, scheme):
+ """
+ Normalize hosts for comparisons and use with sockets.
+ """
+
+ host = normalize_host(host, scheme)
+
+ # httplib doesn't like it when we include brackets in IPv6 addresses
+ # Specifically, if we include brackets but also pass the port then
+ # httplib crazily doubles up the square brackets on the Host header.
+ # Instead, we need to make sure we never pass ``None`` as the port.
+ # However, for backward compatibility reasons we can't actually
+ # *assert* that. See http://bugs.python.org/issue28539
+ if host.startswith("[") and host.endswith("]"):
+ host = host[1:-1]
+ return host
diff --git a/third_party/python/urllib3/src/urllib3/contrib/__init__.py b/third_party/python/urllib3/src/urllib3/contrib/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/contrib/__init__.py
diff --git a/third_party/python/urllib3/src/urllib3/contrib/_appengine_environ.py b/third_party/python/urllib3/src/urllib3/contrib/_appengine_environ.py
new file mode 100644
index 0000000000..8765b907d7
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/contrib/_appengine_environ.py
@@ -0,0 +1,36 @@
+"""
+This module provides means to detect the App Engine environment.
+"""
+
+import os
+
+
+def is_appengine():
+ return is_local_appengine() or is_prod_appengine()
+
+
+def is_appengine_sandbox():
+ """Reports if the app is running in the first generation sandbox.
+
+ The second generation runtimes are technically still in a sandbox, but it
+ is much less restrictive, so generally you shouldn't need to check for it.
+ see https://cloud.google.com/appengine/docs/standard/runtimes
+ """
+ return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27"
+
+
+def is_local_appengine():
+ return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
+ "SERVER_SOFTWARE", ""
+ ).startswith("Development/")
+
+
+def is_prod_appengine():
+ return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
+ "SERVER_SOFTWARE", ""
+ ).startswith("Google App Engine/")
+
+
+def is_prod_appengine_mvms():
+ """Deprecated."""
+ return False
diff --git a/third_party/python/urllib3/src/urllib3/contrib/_securetransport/__init__.py b/third_party/python/urllib3/src/urllib3/contrib/_securetransport/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/contrib/_securetransport/__init__.py
diff --git a/third_party/python/urllib3/src/urllib3/contrib/_securetransport/bindings.py b/third_party/python/urllib3/src/urllib3/contrib/_securetransport/bindings.py
new file mode 100644
index 0000000000..d9b6733318
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/contrib/_securetransport/bindings.py
@@ -0,0 +1,493 @@
+"""
+This module uses ctypes to bind a whole bunch of functions and constants from
+SecureTransport. The goal here is to provide the low-level API to
+SecureTransport. These are essentially the C-level functions and constants, and
+they're pretty gross to work with.
+
+This code is a bastardised version of the code found in Will Bond's oscrypto
+library. An enormous debt is owed to him for blazing this trail for us. For
+that reason, this code should be considered to be covered both by urllib3's
+license and by oscrypto's:
+
+ Copyright (c) 2015-2016 Will Bond <will@wbond.net>
+
+ Permission is hereby granted, free of charge, to any person obtaining a
+ copy of this software and associated documentation files (the "Software"),
+ to deal in the Software without restriction, including without limitation
+ the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ and/or sell copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ DEALINGS IN THE SOFTWARE.
+"""
+from __future__ import absolute_import
+
+import platform
+from ctypes.util import find_library
+from ctypes import (
+ c_void_p,
+ c_int32,
+ c_char_p,
+ c_size_t,
+ c_byte,
+ c_uint32,
+ c_ulong,
+ c_long,
+ c_bool,
+)
+from ctypes import CDLL, POINTER, CFUNCTYPE
+
+
+security_path = find_library("Security")
+if not security_path:
+ raise ImportError("The library Security could not be found")
+
+
+core_foundation_path = find_library("CoreFoundation")
+if not core_foundation_path:
+ raise ImportError("The library CoreFoundation could not be found")
+
+
+version = platform.mac_ver()[0]
+version_info = tuple(map(int, version.split(".")))
+if version_info < (10, 8):
+ raise OSError(
+ "Only OS X 10.8 and newer are supported, not %s.%s"
+ % (version_info[0], version_info[1])
+ )
+
+Security = CDLL(security_path, use_errno=True)
+CoreFoundation = CDLL(core_foundation_path, use_errno=True)
+
+Boolean = c_bool
+CFIndex = c_long
+CFStringEncoding = c_uint32
+CFData = c_void_p
+CFString = c_void_p
+CFArray = c_void_p
+CFMutableArray = c_void_p
+CFDictionary = c_void_p
+CFError = c_void_p
+CFType = c_void_p
+CFTypeID = c_ulong
+
+CFTypeRef = POINTER(CFType)
+CFAllocatorRef = c_void_p
+
+OSStatus = c_int32
+
+CFDataRef = POINTER(CFData)
+CFStringRef = POINTER(CFString)
+CFArrayRef = POINTER(CFArray)
+CFMutableArrayRef = POINTER(CFMutableArray)
+CFDictionaryRef = POINTER(CFDictionary)
+CFArrayCallBacks = c_void_p
+CFDictionaryKeyCallBacks = c_void_p
+CFDictionaryValueCallBacks = c_void_p
+
+SecCertificateRef = POINTER(c_void_p)
+SecExternalFormat = c_uint32
+SecExternalItemType = c_uint32
+SecIdentityRef = POINTER(c_void_p)
+SecItemImportExportFlags = c_uint32
+SecItemImportExportKeyParameters = c_void_p
+SecKeychainRef = POINTER(c_void_p)
+SSLProtocol = c_uint32
+SSLCipherSuite = c_uint32
+SSLContextRef = POINTER(c_void_p)
+SecTrustRef = POINTER(c_void_p)
+SSLConnectionRef = c_uint32
+SecTrustResultType = c_uint32
+SecTrustOptionFlags = c_uint32
+SSLProtocolSide = c_uint32
+SSLConnectionType = c_uint32
+SSLSessionOption = c_uint32
+
+
+try:
+ Security.SecItemImport.argtypes = [
+ CFDataRef,
+ CFStringRef,
+ POINTER(SecExternalFormat),
+ POINTER(SecExternalItemType),
+ SecItemImportExportFlags,
+ POINTER(SecItemImportExportKeyParameters),
+ SecKeychainRef,
+ POINTER(CFArrayRef),
+ ]
+ Security.SecItemImport.restype = OSStatus
+
+ Security.SecCertificateGetTypeID.argtypes = []
+ Security.SecCertificateGetTypeID.restype = CFTypeID
+
+ Security.SecIdentityGetTypeID.argtypes = []
+ Security.SecIdentityGetTypeID.restype = CFTypeID
+
+ Security.SecKeyGetTypeID.argtypes = []
+ Security.SecKeyGetTypeID.restype = CFTypeID
+
+ Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef]
+ Security.SecCertificateCreateWithData.restype = SecCertificateRef
+
+ Security.SecCertificateCopyData.argtypes = [SecCertificateRef]
+ Security.SecCertificateCopyData.restype = CFDataRef
+
+ Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
+ Security.SecCopyErrorMessageString.restype = CFStringRef
+
+ Security.SecIdentityCreateWithCertificate.argtypes = [
+ CFTypeRef,
+ SecCertificateRef,
+ POINTER(SecIdentityRef),
+ ]
+ Security.SecIdentityCreateWithCertificate.restype = OSStatus
+
+ Security.SecKeychainCreate.argtypes = [
+ c_char_p,
+ c_uint32,
+ c_void_p,
+ Boolean,
+ c_void_p,
+ POINTER(SecKeychainRef),
+ ]
+ Security.SecKeychainCreate.restype = OSStatus
+
+ Security.SecKeychainDelete.argtypes = [SecKeychainRef]
+ Security.SecKeychainDelete.restype = OSStatus
+
+ Security.SecPKCS12Import.argtypes = [
+ CFDataRef,
+ CFDictionaryRef,
+ POINTER(CFArrayRef),
+ ]
+ Security.SecPKCS12Import.restype = OSStatus
+
+ SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t))
+ SSLWriteFunc = CFUNCTYPE(
+ OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)
+ )
+
+ Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc]
+ Security.SSLSetIOFuncs.restype = OSStatus
+
+ Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t]
+ Security.SSLSetPeerID.restype = OSStatus
+
+ Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef]
+ Security.SSLSetCertificate.restype = OSStatus
+
+ Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean]
+ Security.SSLSetCertificateAuthorities.restype = OSStatus
+
+ Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef]
+ Security.SSLSetConnection.restype = OSStatus
+
+ Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t]
+ Security.SSLSetPeerDomainName.restype = OSStatus
+
+ Security.SSLHandshake.argtypes = [SSLContextRef]
+ Security.SSLHandshake.restype = OSStatus
+
+ Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
+ Security.SSLRead.restype = OSStatus
+
+ Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
+ Security.SSLWrite.restype = OSStatus
+
+ Security.SSLClose.argtypes = [SSLContextRef]
+ Security.SSLClose.restype = OSStatus
+
+ Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)]
+ Security.SSLGetNumberSupportedCiphers.restype = OSStatus
+
+ Security.SSLGetSupportedCiphers.argtypes = [
+ SSLContextRef,
+ POINTER(SSLCipherSuite),
+ POINTER(c_size_t),
+ ]
+ Security.SSLGetSupportedCiphers.restype = OSStatus
+
+ Security.SSLSetEnabledCiphers.argtypes = [
+ SSLContextRef,
+ POINTER(SSLCipherSuite),
+ c_size_t,
+ ]
+ Security.SSLSetEnabledCiphers.restype = OSStatus
+
+ Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)]
+ Security.SSLGetNumberEnabledCiphers.restype = OSStatus
+
+ Security.SSLGetEnabledCiphers.argtypes = [
+ SSLContextRef,
+ POINTER(SSLCipherSuite),
+ POINTER(c_size_t),
+ ]
+ Security.SSLGetEnabledCiphers.restype = OSStatus
+
+ Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)]
+ Security.SSLGetNegotiatedCipher.restype = OSStatus
+
+ Security.SSLGetNegotiatedProtocolVersion.argtypes = [
+ SSLContextRef,
+ POINTER(SSLProtocol),
+ ]
+ Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus
+
+ Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)]
+ Security.SSLCopyPeerTrust.restype = OSStatus
+
+ Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef]
+ Security.SecTrustSetAnchorCertificates.restype = OSStatus
+
+ Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean]
+ Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus
+
+ Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)]
+ Security.SecTrustEvaluate.restype = OSStatus
+
+ Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef]
+ Security.SecTrustGetCertificateCount.restype = CFIndex
+
+ Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex]
+ Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef
+
+ Security.SSLCreateContext.argtypes = [
+ CFAllocatorRef,
+ SSLProtocolSide,
+ SSLConnectionType,
+ ]
+ Security.SSLCreateContext.restype = SSLContextRef
+
+ Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean]
+ Security.SSLSetSessionOption.restype = OSStatus
+
+ Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol]
+ Security.SSLSetProtocolVersionMin.restype = OSStatus
+
+ Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
+ Security.SSLSetProtocolVersionMax.restype = OSStatus
+
+ Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
+ Security.SecCopyErrorMessageString.restype = CFStringRef
+
+ Security.SSLReadFunc = SSLReadFunc
+ Security.SSLWriteFunc = SSLWriteFunc
+ Security.SSLContextRef = SSLContextRef
+ Security.SSLProtocol = SSLProtocol
+ Security.SSLCipherSuite = SSLCipherSuite
+ Security.SecIdentityRef = SecIdentityRef
+ Security.SecKeychainRef = SecKeychainRef
+ Security.SecTrustRef = SecTrustRef
+ Security.SecTrustResultType = SecTrustResultType
+ Security.SecExternalFormat = SecExternalFormat
+ Security.OSStatus = OSStatus
+
+ Security.kSecImportExportPassphrase = CFStringRef.in_dll(
+ Security, "kSecImportExportPassphrase"
+ )
+ Security.kSecImportItemIdentity = CFStringRef.in_dll(
+ Security, "kSecImportItemIdentity"
+ )
+
+ # CoreFoundation time!
+ CoreFoundation.CFRetain.argtypes = [CFTypeRef]
+ CoreFoundation.CFRetain.restype = CFTypeRef
+
+ CoreFoundation.CFRelease.argtypes = [CFTypeRef]
+ CoreFoundation.CFRelease.restype = None
+
+ CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef]
+ CoreFoundation.CFGetTypeID.restype = CFTypeID
+
+ CoreFoundation.CFStringCreateWithCString.argtypes = [
+ CFAllocatorRef,
+ c_char_p,
+ CFStringEncoding,
+ ]
+ CoreFoundation.CFStringCreateWithCString.restype = CFStringRef
+
+ CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding]
+ CoreFoundation.CFStringGetCStringPtr.restype = c_char_p
+
+ CoreFoundation.CFStringGetCString.argtypes = [
+ CFStringRef,
+ c_char_p,
+ CFIndex,
+ CFStringEncoding,
+ ]
+ CoreFoundation.CFStringGetCString.restype = c_bool
+
+ CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex]
+ CoreFoundation.CFDataCreate.restype = CFDataRef
+
+ CoreFoundation.CFDataGetLength.argtypes = [CFDataRef]
+ CoreFoundation.CFDataGetLength.restype = CFIndex
+
+ CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef]
+ CoreFoundation.CFDataGetBytePtr.restype = c_void_p
+
+ CoreFoundation.CFDictionaryCreate.argtypes = [
+ CFAllocatorRef,
+ POINTER(CFTypeRef),
+ POINTER(CFTypeRef),
+ CFIndex,
+ CFDictionaryKeyCallBacks,
+ CFDictionaryValueCallBacks,
+ ]
+ CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef
+
+ CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef]
+ CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef
+
+ CoreFoundation.CFArrayCreate.argtypes = [
+ CFAllocatorRef,
+ POINTER(CFTypeRef),
+ CFIndex,
+ CFArrayCallBacks,
+ ]
+ CoreFoundation.CFArrayCreate.restype = CFArrayRef
+
+ CoreFoundation.CFArrayCreateMutable.argtypes = [
+ CFAllocatorRef,
+ CFIndex,
+ CFArrayCallBacks,
+ ]
+ CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef
+
+ CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p]
+ CoreFoundation.CFArrayAppendValue.restype = None
+
+ CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef]
+ CoreFoundation.CFArrayGetCount.restype = CFIndex
+
+ CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex]
+ CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p
+
+ CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll(
+ CoreFoundation, "kCFAllocatorDefault"
+ )
+ CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(
+ CoreFoundation, "kCFTypeArrayCallBacks"
+ )
+ CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll(
+ CoreFoundation, "kCFTypeDictionaryKeyCallBacks"
+ )
+ CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll(
+ CoreFoundation, "kCFTypeDictionaryValueCallBacks"
+ )
+
+ CoreFoundation.CFTypeRef = CFTypeRef
+ CoreFoundation.CFArrayRef = CFArrayRef
+ CoreFoundation.CFStringRef = CFStringRef
+ CoreFoundation.CFDictionaryRef = CFDictionaryRef
+
+except (AttributeError):
+ raise ImportError("Error initializing ctypes")
+
+
+class CFConst(object):
+ """
+ A class object that acts as essentially a namespace for CoreFoundation
+ constants.
+ """
+
+ kCFStringEncodingUTF8 = CFStringEncoding(0x08000100)
+
+
+class SecurityConst(object):
+ """
+ A class object that acts as essentially a namespace for Security constants.
+ """
+
+ kSSLSessionOptionBreakOnServerAuth = 0
+
+ kSSLProtocol2 = 1
+ kSSLProtocol3 = 2
+ kTLSProtocol1 = 4
+ kTLSProtocol11 = 7
+ kTLSProtocol12 = 8
+ # SecureTransport does not support TLS 1.3 even if there's a constant for it
+ kTLSProtocol13 = 10
+ kTLSProtocolMaxSupported = 999
+
+ kSSLClientSide = 1
+ kSSLStreamType = 0
+
+ kSecFormatPEMSequence = 10
+
+ kSecTrustResultInvalid = 0
+ kSecTrustResultProceed = 1
+ # This gap is present on purpose: this was kSecTrustResultConfirm, which
+ # is deprecated.
+ kSecTrustResultDeny = 3
+ kSecTrustResultUnspecified = 4
+ kSecTrustResultRecoverableTrustFailure = 5
+ kSecTrustResultFatalTrustFailure = 6
+ kSecTrustResultOtherError = 7
+
+ errSSLProtocol = -9800
+ errSSLWouldBlock = -9803
+ errSSLClosedGraceful = -9805
+ errSSLClosedNoNotify = -9816
+ errSSLClosedAbort = -9806
+
+ errSSLXCertChainInvalid = -9807
+ errSSLCrypto = -9809
+ errSSLInternal = -9810
+ errSSLCertExpired = -9814
+ errSSLCertNotYetValid = -9815
+ errSSLUnknownRootCert = -9812
+ errSSLNoRootCert = -9813
+ errSSLHostNameMismatch = -9843
+ errSSLPeerHandshakeFail = -9824
+ errSSLPeerUserCancelled = -9839
+ errSSLWeakPeerEphemeralDHKey = -9850
+ errSSLServerAuthCompleted = -9841
+ errSSLRecordOverflow = -9847
+
+ errSecVerifyFailed = -67808
+ errSecNoTrustSettings = -25263
+ errSecItemNotFound = -25300
+ errSecInvalidTrustSettings = -25262
+
+ # Cipher suites. We only pick the ones our default cipher string allows.
+ # Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values
+ TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C
+ TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030
+ TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B
+ TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F
+ TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9
+ TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8
+ TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F
+ TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E
+ TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024
+ TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028
+ TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A
+ TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014
+ TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B
+ TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039
+ TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023
+ TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027
+ TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009
+ TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013
+ TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067
+ TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033
+ TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D
+ TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C
+ TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D
+ TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C
+ TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035
+ TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F
+ TLS_AES_128_GCM_SHA256 = 0x1301
+ TLS_AES_256_GCM_SHA384 = 0x1302
+ TLS_AES_128_CCM_8_SHA256 = 0x1305
+ TLS_AES_128_CCM_SHA256 = 0x1304
diff --git a/third_party/python/urllib3/src/urllib3/contrib/_securetransport/low_level.py b/third_party/python/urllib3/src/urllib3/contrib/_securetransport/low_level.py
new file mode 100644
index 0000000000..e60168cac1
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/contrib/_securetransport/low_level.py
@@ -0,0 +1,328 @@
+"""
+Low-level helpers for the SecureTransport bindings.
+
+These are Python functions that are not directly related to the high-level APIs
+but are necessary to get them to work. They include a whole bunch of low-level
+CoreFoundation messing about and memory management. The concerns in this module
+are almost entirely about trying to avoid memory leaks and providing
+appropriate and useful assistance to the higher-level code.
+"""
+import base64
+import ctypes
+import itertools
+import re
+import os
+import ssl
+import tempfile
+
+from .bindings import Security, CoreFoundation, CFConst
+
+
+# This regular expression is used to grab PEM data out of a PEM bundle.
+_PEM_CERTS_RE = re.compile(
+ b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL
+)
+
+
+def _cf_data_from_bytes(bytestring):
+ """
+ Given a bytestring, create a CFData object from it. This CFData object must
+ be CFReleased by the caller.
+ """
+ return CoreFoundation.CFDataCreate(
+ CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring)
+ )
+
+
+def _cf_dictionary_from_tuples(tuples):
+ """
+ Given a list of Python tuples, create an associated CFDictionary.
+ """
+ dictionary_size = len(tuples)
+
+ # We need to get the dictionary keys and values out in the same order.
+ keys = (t[0] for t in tuples)
+ values = (t[1] for t in tuples)
+ cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys)
+ cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values)
+
+ return CoreFoundation.CFDictionaryCreate(
+ CoreFoundation.kCFAllocatorDefault,
+ cf_keys,
+ cf_values,
+ dictionary_size,
+ CoreFoundation.kCFTypeDictionaryKeyCallBacks,
+ CoreFoundation.kCFTypeDictionaryValueCallBacks,
+ )
+
+
+def _cf_string_to_unicode(value):
+ """
+ Creates a Unicode string from a CFString object. Used entirely for error
+ reporting.
+
+ Yes, it annoys me quite a lot that this function is this complex.
+ """
+ value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p))
+
+ string = CoreFoundation.CFStringGetCStringPtr(
+ value_as_void_p, CFConst.kCFStringEncodingUTF8
+ )
+ if string is None:
+ buffer = ctypes.create_string_buffer(1024)
+ result = CoreFoundation.CFStringGetCString(
+ value_as_void_p, buffer, 1024, CFConst.kCFStringEncodingUTF8
+ )
+ if not result:
+ raise OSError("Error copying C string from CFStringRef")
+ string = buffer.value
+ if string is not None:
+ string = string.decode("utf-8")
+ return string
+
+
+def _assert_no_error(error, exception_class=None):
+ """
+ Checks the return code and throws an exception if there is an error to
+ report
+ """
+ if error == 0:
+ return
+
+ cf_error_string = Security.SecCopyErrorMessageString(error, None)
+ output = _cf_string_to_unicode(cf_error_string)
+ CoreFoundation.CFRelease(cf_error_string)
+
+ if output is None or output == u"":
+ output = u"OSStatus %s" % error
+
+ if exception_class is None:
+ exception_class = ssl.SSLError
+
+ raise exception_class(output)
+
+
+def _cert_array_from_pem(pem_bundle):
+ """
+ Given a bundle of certs in PEM format, turns them into a CFArray of certs
+ that can be used to validate a cert chain.
+ """
+ # Normalize the PEM bundle's line endings.
+ pem_bundle = pem_bundle.replace(b"\r\n", b"\n")
+
+ der_certs = [
+ base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle)
+ ]
+ if not der_certs:
+ raise ssl.SSLError("No root certificates specified")
+
+ cert_array = CoreFoundation.CFArrayCreateMutable(
+ CoreFoundation.kCFAllocatorDefault,
+ 0,
+ ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
+ )
+ if not cert_array:
+ raise ssl.SSLError("Unable to allocate memory!")
+
+ try:
+ for der_bytes in der_certs:
+ certdata = _cf_data_from_bytes(der_bytes)
+ if not certdata:
+ raise ssl.SSLError("Unable to allocate memory!")
+ cert = Security.SecCertificateCreateWithData(
+ CoreFoundation.kCFAllocatorDefault, certdata
+ )
+ CoreFoundation.CFRelease(certdata)
+ if not cert:
+ raise ssl.SSLError("Unable to build cert object!")
+
+ CoreFoundation.CFArrayAppendValue(cert_array, cert)
+ CoreFoundation.CFRelease(cert)
+ except Exception:
+ # We need to free the array before the exception bubbles further.
+ # We only want to do that if an error occurs: otherwise, the caller
+ # should free.
+ CoreFoundation.CFRelease(cert_array)
+
+ return cert_array
+
+
+def _is_cert(item):
+ """
+ Returns True if a given CFTypeRef is a certificate.
+ """
+ expected = Security.SecCertificateGetTypeID()
+ return CoreFoundation.CFGetTypeID(item) == expected
+
+
+def _is_identity(item):
+ """
+ Returns True if a given CFTypeRef is an identity.
+ """
+ expected = Security.SecIdentityGetTypeID()
+ return CoreFoundation.CFGetTypeID(item) == expected
+
+
+def _temporary_keychain():
+ """
+ This function creates a temporary Mac keychain that we can use to work with
+ credentials. This keychain uses a one-time password and a temporary file to
+ store the data. We expect to have one keychain per socket. The returned
+ SecKeychainRef must be freed by the caller, including calling
+ SecKeychainDelete.
+
+ Returns a tuple of the SecKeychainRef and the path to the temporary
+ directory that contains it.
+ """
+ # Unfortunately, SecKeychainCreate requires a path to a keychain. This
+ # means we cannot use mkstemp to use a generic temporary file. Instead,
+ # we're going to create a temporary directory and a filename to use there.
+ # This filename will be 8 random bytes expanded into base64. We also need
+ # some random bytes to password-protect the keychain we're creating, so we
+ # ask for 40 random bytes.
+ random_bytes = os.urandom(40)
+ filename = base64.b16encode(random_bytes[:8]).decode("utf-8")
+ password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8
+ tempdirectory = tempfile.mkdtemp()
+
+ keychain_path = os.path.join(tempdirectory, filename).encode("utf-8")
+
+ # We now want to create the keychain itself.
+ keychain = Security.SecKeychainRef()
+ status = Security.SecKeychainCreate(
+ keychain_path, len(password), password, False, None, ctypes.byref(keychain)
+ )
+ _assert_no_error(status)
+
+ # Having created the keychain, we want to pass it off to the caller.
+ return keychain, tempdirectory
+
+
+def _load_items_from_file(keychain, path):
+ """
+ Given a single file, loads all the trust objects from it into arrays and
+ the keychain.
+ Returns a tuple of lists: the first list is a list of identities, the
+ second a list of certs.
+ """
+ certificates = []
+ identities = []
+ result_array = None
+
+ with open(path, "rb") as f:
+ raw_filedata = f.read()
+
+ try:
+ filedata = CoreFoundation.CFDataCreate(
+ CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata)
+ )
+ result_array = CoreFoundation.CFArrayRef()
+ result = Security.SecItemImport(
+ filedata, # cert data
+ None, # Filename, leaving it out for now
+ None, # What the type of the file is, we don't care
+ None, # what's in the file, we don't care
+ 0, # import flags
+ None, # key params, can include passphrase in the future
+ keychain, # The keychain to insert into
+ ctypes.byref(result_array), # Results
+ )
+ _assert_no_error(result)
+
+ # A CFArray is not very useful to us as an intermediary
+ # representation, so we are going to extract the objects we want
+ # and then free the array. We don't need to keep hold of keys: the
+ # keychain already has them!
+ result_count = CoreFoundation.CFArrayGetCount(result_array)
+ for index in range(result_count):
+ item = CoreFoundation.CFArrayGetValueAtIndex(result_array, index)
+ item = ctypes.cast(item, CoreFoundation.CFTypeRef)
+
+ if _is_cert(item):
+ CoreFoundation.CFRetain(item)
+ certificates.append(item)
+ elif _is_identity(item):
+ CoreFoundation.CFRetain(item)
+ identities.append(item)
+ finally:
+ if result_array:
+ CoreFoundation.CFRelease(result_array)
+
+ CoreFoundation.CFRelease(filedata)
+
+ return (identities, certificates)
+
+
+def _load_client_cert_chain(keychain, *paths):
+ """
+ Load certificates and maybe keys from a number of files. Has the end goal
+ of returning a CFArray containing one SecIdentityRef, and then zero or more
+ SecCertificateRef objects, suitable for use as a client certificate trust
+ chain.
+ """
+ # Ok, the strategy.
+ #
+ # This relies on knowing that macOS will not give you a SecIdentityRef
+ # unless you have imported a key into a keychain. This is a somewhat
+ # artificial limitation of macOS (for example, it doesn't necessarily
+ # affect iOS), but there is nothing inside Security.framework that lets you
+ # get a SecIdentityRef without having a key in a keychain.
+ #
+ # So the policy here is we take all the files and iterate them in order.
+ # Each one will use SecItemImport to have one or more objects loaded from
+ # it. We will also point at a keychain that macOS can use to work with the
+ # private key.
+ #
+ # Once we have all the objects, we'll check what we actually have. If we
+ # already have a SecIdentityRef in hand, fab: we'll use that. Otherwise,
+ # we'll take the first certificate (which we assume to be our leaf) and
+ # ask the keychain to give us a SecIdentityRef with that cert's associated
+ # key.
+ #
+ # We'll then return a CFArray containing the trust chain: one
+ # SecIdentityRef and then zero-or-more SecCertificateRef objects. The
+ # responsibility for freeing this CFArray will be with the caller. This
+ # CFArray must remain alive for the entire connection, so in practice it
+ # will be stored with a single SSLSocket, along with the reference to the
+ # keychain.
+ certificates = []
+ identities = []
+
+ # Filter out bad paths.
+ paths = (path for path in paths if path)
+
+ try:
+ for file_path in paths:
+ new_identities, new_certs = _load_items_from_file(keychain, file_path)
+ identities.extend(new_identities)
+ certificates.extend(new_certs)
+
+ # Ok, we have everything. The question is: do we have an identity? If
+ # not, we want to grab one from the first cert we have.
+ if not identities:
+ new_identity = Security.SecIdentityRef()
+ status = Security.SecIdentityCreateWithCertificate(
+ keychain, certificates[0], ctypes.byref(new_identity)
+ )
+ _assert_no_error(status)
+ identities.append(new_identity)
+
+ # We now want to release the original certificate, as we no longer
+ # need it.
+ CoreFoundation.CFRelease(certificates.pop(0))
+
+ # We now need to build a new CFArray that holds the trust chain.
+ trust_chain = CoreFoundation.CFArrayCreateMutable(
+ CoreFoundation.kCFAllocatorDefault,
+ 0,
+ ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
+ )
+ for item in itertools.chain(identities, certificates):
+ # ArrayAppendValue does a CFRetain on the item. That's fine,
+ # because the finally block will release our other refs to them.
+ CoreFoundation.CFArrayAppendValue(trust_chain, item)
+
+ return trust_chain
+ finally:
+ for obj in itertools.chain(identities, certificates):
+ CoreFoundation.CFRelease(obj)
diff --git a/third_party/python/urllib3/src/urllib3/contrib/appengine.py b/third_party/python/urllib3/src/urllib3/contrib/appengine.py
new file mode 100644
index 0000000000..9b7044ffb0
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/contrib/appengine.py
@@ -0,0 +1,314 @@
+"""
+This module provides a pool manager that uses Google App Engine's
+`URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
+
+Example usage::
+
+ from urllib3 import PoolManager
+ from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox
+
+ if is_appengine_sandbox():
+ # AppEngineManager uses AppEngine's URLFetch API behind the scenes
+ http = AppEngineManager()
+ else:
+ # PoolManager uses a socket-level API behind the scenes
+ http = PoolManager()
+
+ r = http.request('GET', 'https://google.com/')
+
+There are `limitations <https://cloud.google.com/appengine/docs/python/\
+urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be
+the best choice for your application. There are three options for using
+urllib3 on Google App Engine:
+
+1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is
+ cost-effective in many circumstances as long as your usage is within the
+ limitations.
+2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets.
+ Sockets also have `limitations and restrictions
+ <https://cloud.google.com/appengine/docs/python/sockets/\
+ #limitations-and-restrictions>`_ and have a lower free quota than URLFetch.
+ To use sockets, be sure to specify the following in your ``app.yaml``::
+
+ env_variables:
+ GAE_USE_SOCKETS_HTTPLIB : 'true'
+
+3. If you are using `App Engine Flexible
+<https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
+:class:`PoolManager` without any configuration or special environment variables.
+"""
+
+from __future__ import absolute_import
+import io
+import logging
+import warnings
+from ..packages.six.moves.urllib.parse import urljoin
+
+from ..exceptions import (
+ HTTPError,
+ HTTPWarning,
+ MaxRetryError,
+ ProtocolError,
+ TimeoutError,
+ SSLError,
+)
+
+from ..request import RequestMethods
+from ..response import HTTPResponse
+from ..util.timeout import Timeout
+from ..util.retry import Retry
+from . import _appengine_environ
+
+try:
+ from google.appengine.api import urlfetch
+except ImportError:
+ urlfetch = None
+
+
+log = logging.getLogger(__name__)
+
+
+class AppEnginePlatformWarning(HTTPWarning):
+ pass
+
+
+class AppEnginePlatformError(HTTPError):
+ pass
+
+
+class AppEngineManager(RequestMethods):
+ """
+ Connection manager for Google App Engine sandbox applications.
+
+ This manager uses the URLFetch service directly instead of using the
+ emulated httplib, and is subject to URLFetch limitations as described in
+ the App Engine documentation `here
+ <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
+
+ Notably it will raise an :class:`AppEnginePlatformError` if:
+ * URLFetch is not available.
+ * If you attempt to use this on App Engine Flexible, as full socket
+ support is available.
+ * If a request size is more than 10 megabytes.
+ * If a response size is more than 32 megabtyes.
+ * If you use an unsupported request method such as OPTIONS.
+
+ Beyond those cases, it will raise normal urllib3 errors.
+ """
+
+ def __init__(
+ self,
+ headers=None,
+ retries=None,
+ validate_certificate=True,
+ urlfetch_retries=True,
+ ):
+ if not urlfetch:
+ raise AppEnginePlatformError(
+ "URLFetch is not available in this environment."
+ )
+
+ warnings.warn(
+ "urllib3 is using URLFetch on Google App Engine sandbox instead "
+ "of sockets. To use sockets directly instead of URLFetch see "
+ "https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.",
+ AppEnginePlatformWarning,
+ )
+
+ RequestMethods.__init__(self, headers)
+ self.validate_certificate = validate_certificate
+ self.urlfetch_retries = urlfetch_retries
+
+ self.retries = retries or Retry.DEFAULT
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def urlopen(
+ self,
+ method,
+ url,
+ body=None,
+ headers=None,
+ retries=None,
+ redirect=True,
+ timeout=Timeout.DEFAULT_TIMEOUT,
+ **response_kw
+ ):
+
+ retries = self._get_retries(retries, redirect)
+
+ try:
+ follow_redirects = redirect and retries.redirect != 0 and retries.total
+ response = urlfetch.fetch(
+ url,
+ payload=body,
+ method=method,
+ headers=headers or {},
+ allow_truncated=False,
+ follow_redirects=self.urlfetch_retries and follow_redirects,
+ deadline=self._get_absolute_timeout(timeout),
+ validate_certificate=self.validate_certificate,
+ )
+ except urlfetch.DeadlineExceededError as e:
+ raise TimeoutError(self, e)
+
+ except urlfetch.InvalidURLError as e:
+ if "too large" in str(e):
+ raise AppEnginePlatformError(
+ "URLFetch request too large, URLFetch only "
+ "supports requests up to 10mb in size.",
+ e,
+ )
+ raise ProtocolError(e)
+
+ except urlfetch.DownloadError as e:
+ if "Too many redirects" in str(e):
+ raise MaxRetryError(self, url, reason=e)
+ raise ProtocolError(e)
+
+ except urlfetch.ResponseTooLargeError as e:
+ raise AppEnginePlatformError(
+ "URLFetch response too large, URLFetch only supports"
+ "responses up to 32mb in size.",
+ e,
+ )
+
+ except urlfetch.SSLCertificateError as e:
+ raise SSLError(e)
+
+ except urlfetch.InvalidMethodError as e:
+ raise AppEnginePlatformError(
+ "URLFetch does not support method: %s" % method, e
+ )
+
+ http_response = self._urlfetch_response_to_http_response(
+ response, retries=retries, **response_kw
+ )
+
+ # Handle redirect?
+ redirect_location = redirect and http_response.get_redirect_location()
+ if redirect_location:
+ # Check for redirect response
+ if self.urlfetch_retries and retries.raise_on_redirect:
+ raise MaxRetryError(self, url, "too many redirects")
+ else:
+ if http_response.status == 303:
+ method = "GET"
+
+ try:
+ retries = retries.increment(
+ method, url, response=http_response, _pool=self
+ )
+ except MaxRetryError:
+ if retries.raise_on_redirect:
+ raise MaxRetryError(self, url, "too many redirects")
+ return http_response
+
+ retries.sleep_for_retry(http_response)
+ log.debug("Redirecting %s -> %s", url, redirect_location)
+ redirect_url = urljoin(url, redirect_location)
+ return self.urlopen(
+ method,
+ redirect_url,
+ body,
+ headers,
+ retries=retries,
+ redirect=redirect,
+ timeout=timeout,
+ **response_kw
+ )
+
+ # Check if we should retry the HTTP response.
+ has_retry_after = bool(http_response.getheader("Retry-After"))
+ if retries.is_retry(method, http_response.status, has_retry_after):
+ retries = retries.increment(method, url, response=http_response, _pool=self)
+ log.debug("Retry: %s", url)
+ retries.sleep(http_response)
+ return self.urlopen(
+ method,
+ url,
+ body=body,
+ headers=headers,
+ retries=retries,
+ redirect=redirect,
+ timeout=timeout,
+ **response_kw
+ )
+
+ return http_response
+
+ def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
+
+ if is_prod_appengine():
+ # Production GAE handles deflate encoding automatically, but does
+ # not remove the encoding header.
+ content_encoding = urlfetch_resp.headers.get("content-encoding")
+
+ if content_encoding == "deflate":
+ del urlfetch_resp.headers["content-encoding"]
+
+ transfer_encoding = urlfetch_resp.headers.get("transfer-encoding")
+ # We have a full response's content,
+ # so let's make sure we don't report ourselves as chunked data.
+ if transfer_encoding == "chunked":
+ encodings = transfer_encoding.split(",")
+ encodings.remove("chunked")
+ urlfetch_resp.headers["transfer-encoding"] = ",".join(encodings)
+
+ original_response = HTTPResponse(
+ # In order for decoding to work, we must present the content as
+ # a file-like object.
+ body=io.BytesIO(urlfetch_resp.content),
+ msg=urlfetch_resp.header_msg,
+ headers=urlfetch_resp.headers,
+ status=urlfetch_resp.status_code,
+ **response_kw
+ )
+
+ return HTTPResponse(
+ body=io.BytesIO(urlfetch_resp.content),
+ headers=urlfetch_resp.headers,
+ status=urlfetch_resp.status_code,
+ original_response=original_response,
+ **response_kw
+ )
+
+ def _get_absolute_timeout(self, timeout):
+ if timeout is Timeout.DEFAULT_TIMEOUT:
+ return None # Defer to URLFetch's default.
+ if isinstance(timeout, Timeout):
+ if timeout._read is not None or timeout._connect is not None:
+ warnings.warn(
+ "URLFetch does not support granular timeout settings, "
+ "reverting to total or default URLFetch timeout.",
+ AppEnginePlatformWarning,
+ )
+ return timeout.total
+ return timeout
+
+ def _get_retries(self, retries, redirect):
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
+
+ if retries.connect or retries.read or retries.redirect:
+ warnings.warn(
+ "URLFetch only supports total retries and does not "
+ "recognize connect, read, or redirect retry parameters.",
+ AppEnginePlatformWarning,
+ )
+
+ return retries
+
+
+# Alias methods from _appengine_environ to maintain public API interface.
+
+is_appengine = _appengine_environ.is_appengine
+is_appengine_sandbox = _appengine_environ.is_appengine_sandbox
+is_local_appengine = _appengine_environ.is_local_appengine
+is_prod_appengine = _appengine_environ.is_prod_appengine
+is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms
diff --git a/third_party/python/urllib3/src/urllib3/contrib/ntlmpool.py b/third_party/python/urllib3/src/urllib3/contrib/ntlmpool.py
new file mode 100644
index 0000000000..1fd242a6e0
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/contrib/ntlmpool.py
@@ -0,0 +1,121 @@
+"""
+NTLM authenticating pool, contributed by erikcederstran
+
+Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
+"""
+from __future__ import absolute_import
+
+from logging import getLogger
+from ntlm import ntlm
+
+from .. import HTTPSConnectionPool
+from ..packages.six.moves.http_client import HTTPSConnection
+
+
+log = getLogger(__name__)
+
+
+class NTLMConnectionPool(HTTPSConnectionPool):
+ """
+ Implements an NTLM authentication version of an urllib3 connection pool
+ """
+
+ scheme = "https"
+
+ def __init__(self, user, pw, authurl, *args, **kwargs):
+ """
+ authurl is a random URL on the server that is protected by NTLM.
+ user is the Windows user, probably in the DOMAIN\\username format.
+ pw is the password for the user.
+ """
+ super(NTLMConnectionPool, self).__init__(*args, **kwargs)
+ self.authurl = authurl
+ self.rawuser = user
+ user_parts = user.split("\\", 1)
+ self.domain = user_parts[0].upper()
+ self.user = user_parts[1]
+ self.pw = pw
+
+ def _new_conn(self):
+ # Performs the NTLM handshake that secures the connection. The socket
+ # must be kept open while requests are performed.
+ self.num_connections += 1
+ log.debug(
+ "Starting NTLM HTTPS connection no. %d: https://%s%s",
+ self.num_connections,
+ self.host,
+ self.authurl,
+ )
+
+ headers = {"Connection": "Keep-Alive"}
+ req_header = "Authorization"
+ resp_header = "www-authenticate"
+
+ conn = HTTPSConnection(host=self.host, port=self.port)
+
+ # Send negotiation message
+ headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE(
+ self.rawuser
+ )
+ log.debug("Request headers: %s", headers)
+ conn.request("GET", self.authurl, None, headers)
+ res = conn.getresponse()
+ reshdr = dict(res.getheaders())
+ log.debug("Response status: %s %s", res.status, res.reason)
+ log.debug("Response headers: %s", reshdr)
+ log.debug("Response data: %s [...]", res.read(100))
+
+ # Remove the reference to the socket, so that it can not be closed by
+ # the response object (we want to keep the socket open)
+ res.fp = None
+
+ # Server should respond with a challenge message
+ auth_header_values = reshdr[resp_header].split(", ")
+ auth_header_value = None
+ for s in auth_header_values:
+ if s[:5] == "NTLM ":
+ auth_header_value = s[5:]
+ if auth_header_value is None:
+ raise Exception(
+ "Unexpected %s response header: %s" % (resp_header, reshdr[resp_header])
+ )
+
+ # Send authentication message
+ ServerChallenge, NegotiateFlags = ntlm.parse_NTLM_CHALLENGE_MESSAGE(
+ auth_header_value
+ )
+ auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(
+ ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags
+ )
+ headers[req_header] = "NTLM %s" % auth_msg
+ log.debug("Request headers: %s", headers)
+ conn.request("GET", self.authurl, None, headers)
+ res = conn.getresponse()
+ log.debug("Response status: %s %s", res.status, res.reason)
+ log.debug("Response headers: %s", dict(res.getheaders()))
+ log.debug("Response data: %s [...]", res.read()[:100])
+ if res.status != 200:
+ if res.status == 401:
+ raise Exception("Server rejected request: wrong username or password")
+ raise Exception("Wrong server response: %s %s" % (res.status, res.reason))
+
+ res.fp = None
+ log.debug("Connection established")
+ return conn
+
+ def urlopen(
+ self,
+ method,
+ url,
+ body=None,
+ headers=None,
+ retries=3,
+ redirect=True,
+ assert_same_host=True,
+ ):
+ if headers is None:
+ headers = {}
+ headers["Connection"] = "Keep-Alive"
+ return super(NTLMConnectionPool, self).urlopen(
+ method, url, body, headers, retries, redirect, assert_same_host
+ )
diff --git a/third_party/python/urllib3/src/urllib3/contrib/pyopenssl.py b/third_party/python/urllib3/src/urllib3/contrib/pyopenssl.py
new file mode 100644
index 0000000000..81a80651d4
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/contrib/pyopenssl.py
@@ -0,0 +1,501 @@
+"""
+SSL with SNI_-support for Python 2. Follow these instructions if you would
+like to verify SSL certificates in Python 2. Note, the default libraries do
+*not* do certificate checking; you need to do additional work to validate
+certificates yourself.
+
+This needs the following packages installed:
+
+* pyOpenSSL (tested with 16.0.0)
+* cryptography (minimum 1.3.4, from pyopenssl)
+* idna (minimum 2.0, from cryptography)
+
+However, pyopenssl depends on cryptography, which depends on idna, so while we
+use all three directly here we end up having relatively few packages required.
+
+You can install them with the following command:
+
+ pip install pyopenssl cryptography idna
+
+To activate certificate checking, call
+:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
+before you begin making HTTP requests. This can be done in a ``sitecustomize``
+module, or at any other time before your application begins using ``urllib3``,
+like this::
+
+ try:
+ import urllib3.contrib.pyopenssl
+ urllib3.contrib.pyopenssl.inject_into_urllib3()
+ except ImportError:
+ pass
+
+Now you can use :mod:`urllib3` as you normally would, and it will support SNI
+when the required modules are installed.
+
+Activating this module also has the positive side effect of disabling SSL/TLS
+compression in Python 2 (see `CRIME attack`_).
+
+If you want to configure the default list of supported cipher suites, you can
+set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
+
+.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
+.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
+"""
+from __future__ import absolute_import
+
+import OpenSSL.SSL
+from cryptography import x509
+from cryptography.hazmat.backends.openssl import backend as openssl_backend
+from cryptography.hazmat.backends.openssl.x509 import _Certificate
+
+try:
+ from cryptography.x509 import UnsupportedExtension
+except ImportError:
+ # UnsupportedExtension is gone in cryptography >= 2.1.0
+ class UnsupportedExtension(Exception):
+ pass
+
+
+from socket import timeout, error as SocketError
+from io import BytesIO
+
+try: # Platform-specific: Python 2
+ from socket import _fileobject
+except ImportError: # Platform-specific: Python 3
+ _fileobject = None
+ from ..packages.backports.makefile import backport_makefile
+
+import logging
+import ssl
+from ..packages import six
+import sys
+
+from .. import util
+
+
+__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
+
+# SNI always works.
+HAS_SNI = True
+
+# Map from urllib3 to PyOpenSSL compatible parameter-values.
+_openssl_versions = {
+ util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
+ ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
+}
+
+if hasattr(ssl, "PROTOCOL_SSLv3") and hasattr(OpenSSL.SSL, "SSLv3_METHOD"):
+ _openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD
+
+if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"):
+ _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
+
+if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"):
+ _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
+
+
+_stdlib_to_openssl_verify = {
+ ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
+ ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
+ ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
+ + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
+}
+_openssl_to_stdlib_verify = dict((v, k) for k, v in _stdlib_to_openssl_verify.items())
+
+# OpenSSL will only write 16K at a time
+SSL_WRITE_BLOCKSIZE = 16384
+
+orig_util_HAS_SNI = util.HAS_SNI
+orig_util_SSLContext = util.ssl_.SSLContext
+
+
+log = logging.getLogger(__name__)
+
+
+def inject_into_urllib3():
+ "Monkey-patch urllib3 with PyOpenSSL-backed SSL-support."
+
+ _validate_dependencies_met()
+
+ util.SSLContext = PyOpenSSLContext
+ util.ssl_.SSLContext = PyOpenSSLContext
+ util.HAS_SNI = HAS_SNI
+ util.ssl_.HAS_SNI = HAS_SNI
+ util.IS_PYOPENSSL = True
+ util.ssl_.IS_PYOPENSSL = True
+
+
+def extract_from_urllib3():
+ "Undo monkey-patching by :func:`inject_into_urllib3`."
+
+ util.SSLContext = orig_util_SSLContext
+ util.ssl_.SSLContext = orig_util_SSLContext
+ util.HAS_SNI = orig_util_HAS_SNI
+ util.ssl_.HAS_SNI = orig_util_HAS_SNI
+ util.IS_PYOPENSSL = False
+ util.ssl_.IS_PYOPENSSL = False
+
+
+def _validate_dependencies_met():
+ """
+ Verifies that PyOpenSSL's package-level dependencies have been met.
+ Throws `ImportError` if they are not met.
+ """
+ # Method added in `cryptography==1.1`; not available in older versions
+ from cryptography.x509.extensions import Extensions
+
+ if getattr(Extensions, "get_extension_for_class", None) is None:
+ raise ImportError(
+ "'cryptography' module missing required functionality. "
+ "Try upgrading to v1.3.4 or newer."
+ )
+
+ # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509
+ # attribute is only present on those versions.
+ from OpenSSL.crypto import X509
+
+ x509 = X509()
+ if getattr(x509, "_x509", None) is None:
+ raise ImportError(
+ "'pyOpenSSL' module missing required functionality. "
+ "Try upgrading to v0.14 or newer."
+ )
+
+
+def _dnsname_to_stdlib(name):
+ """
+ Converts a dNSName SubjectAlternativeName field to the form used by the
+ standard library on the given Python version.
+
+ Cryptography produces a dNSName as a unicode string that was idna-decoded
+ from ASCII bytes. We need to idna-encode that string to get it back, and
+ then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib
+ uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8).
+
+ If the name cannot be idna-encoded then we return None signalling that
+ the name given should be skipped.
+ """
+
+ def idna_encode(name):
+ """
+ Borrowed wholesale from the Python Cryptography Project. It turns out
+ that we can't just safely call `idna.encode`: it can explode for
+ wildcard names. This avoids that problem.
+ """
+ import idna
+
+ try:
+ for prefix in [u"*.", u"."]:
+ if name.startswith(prefix):
+ name = name[len(prefix) :]
+ return prefix.encode("ascii") + idna.encode(name)
+ return idna.encode(name)
+ except idna.core.IDNAError:
+ return None
+
+ # Don't send IPv6 addresses through the IDNA encoder.
+ if ":" in name:
+ return name
+
+ name = idna_encode(name)
+ if name is None:
+ return None
+ elif sys.version_info >= (3, 0):
+ name = name.decode("utf-8")
+ return name
+
+
+def get_subj_alt_name(peer_cert):
+ """
+ Given an PyOpenSSL certificate, provides all the subject alternative names.
+ """
+ # Pass the cert to cryptography, which has much better APIs for this.
+ if hasattr(peer_cert, "to_cryptography"):
+ cert = peer_cert.to_cryptography()
+ else:
+ # This is technically using private APIs, but should work across all
+ # relevant versions before PyOpenSSL got a proper API for this.
+ cert = _Certificate(openssl_backend, peer_cert._x509)
+
+ # We want to find the SAN extension. Ask Cryptography to locate it (it's
+ # faster than looping in Python)
+ try:
+ ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
+ except x509.ExtensionNotFound:
+ # No such extension, return the empty list.
+ return []
+ except (
+ x509.DuplicateExtension,
+ UnsupportedExtension,
+ x509.UnsupportedGeneralNameType,
+ UnicodeError,
+ ) as e:
+ # A problem has been found with the quality of the certificate. Assume
+ # no SAN field is present.
+ log.warning(
+ "A problem was encountered with the certificate that prevented "
+ "urllib3 from finding the SubjectAlternativeName field. This can "
+ "affect certificate validation. The error was %s",
+ e,
+ )
+ return []
+
+ # We want to return dNSName and iPAddress fields. We need to cast the IPs
+ # back to strings because the match_hostname function wants them as
+ # strings.
+ # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8
+ # decoded. This is pretty frustrating, but that's what the standard library
+ # does with certificates, and so we need to attempt to do the same.
+ # We also want to skip over names which cannot be idna encoded.
+ names = [
+ ("DNS", name)
+ for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName))
+ if name is not None
+ ]
+ names.extend(
+ ("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress)
+ )
+
+ return names
+
+
+class WrappedSocket(object):
+ """API-compatibility wrapper for Python OpenSSL's Connection-class.
+
+ Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
+ collector of pypy.
+ """
+
+ def __init__(self, connection, socket, suppress_ragged_eofs=True):
+ self.connection = connection
+ self.socket = socket
+ self.suppress_ragged_eofs = suppress_ragged_eofs
+ self._makefile_refs = 0
+ self._closed = False
+
+ def fileno(self):
+ return self.socket.fileno()
+
+ # Copy-pasted from Python 3.5 source code
+ def _decref_socketios(self):
+ if self._makefile_refs > 0:
+ self._makefile_refs -= 1
+ if self._closed:
+ self.close()
+
+ def recv(self, *args, **kwargs):
+ try:
+ data = self.connection.recv(*args, **kwargs)
+ except OpenSSL.SSL.SysCallError as e:
+ if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
+ return b""
+ else:
+ raise SocketError(str(e))
+ except OpenSSL.SSL.ZeroReturnError:
+ if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
+ return b""
+ else:
+ raise
+ except OpenSSL.SSL.WantReadError:
+ if not util.wait_for_read(self.socket, self.socket.gettimeout()):
+ raise timeout("The read operation timed out")
+ else:
+ return self.recv(*args, **kwargs)
+
+ # TLS 1.3 post-handshake authentication
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError("read error: %r" % e)
+ else:
+ return data
+
+ def recv_into(self, *args, **kwargs):
+ try:
+ return self.connection.recv_into(*args, **kwargs)
+ except OpenSSL.SSL.SysCallError as e:
+ if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
+ return 0
+ else:
+ raise SocketError(str(e))
+ except OpenSSL.SSL.ZeroReturnError:
+ if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
+ return 0
+ else:
+ raise
+ except OpenSSL.SSL.WantReadError:
+ if not util.wait_for_read(self.socket, self.socket.gettimeout()):
+ raise timeout("The read operation timed out")
+ else:
+ return self.recv_into(*args, **kwargs)
+
+ # TLS 1.3 post-handshake authentication
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError("read error: %r" % e)
+
+ def settimeout(self, timeout):
+ return self.socket.settimeout(timeout)
+
+ def _send_until_done(self, data):
+ while True:
+ try:
+ return self.connection.send(data)
+ except OpenSSL.SSL.WantWriteError:
+ if not util.wait_for_write(self.socket, self.socket.gettimeout()):
+ raise timeout()
+ continue
+ except OpenSSL.SSL.SysCallError as e:
+ raise SocketError(str(e))
+
+ def sendall(self, data):
+ total_sent = 0
+ while total_sent < len(data):
+ sent = self._send_until_done(
+ data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]
+ )
+ total_sent += sent
+
+ def shutdown(self):
+ # FIXME rethrow compatible exceptions should we ever use this
+ self.connection.shutdown()
+
+ def close(self):
+ if self._makefile_refs < 1:
+ try:
+ self._closed = True
+ return self.connection.close()
+ except OpenSSL.SSL.Error:
+ return
+ else:
+ self._makefile_refs -= 1
+
+ def getpeercert(self, binary_form=False):
+ x509 = self.connection.get_peer_certificate()
+
+ if not x509:
+ return x509
+
+ if binary_form:
+ return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509)
+
+ return {
+ "subject": ((("commonName", x509.get_subject().CN),),),
+ "subjectAltName": get_subj_alt_name(x509),
+ }
+
+ def version(self):
+ return self.connection.get_protocol_version_name()
+
+ def _reuse(self):
+ self._makefile_refs += 1
+
+ def _drop(self):
+ if self._makefile_refs < 1:
+ self.close()
+ else:
+ self._makefile_refs -= 1
+
+
+if _fileobject: # Platform-specific: Python 2
+
+ def makefile(self, mode, bufsize=-1):
+ self._makefile_refs += 1
+ return _fileobject(self, mode, bufsize, close=True)
+
+
+else: # Platform-specific: Python 3
+ makefile = backport_makefile
+
+WrappedSocket.makefile = makefile
+
+
+class PyOpenSSLContext(object):
+ """
+ I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible
+ for translating the interface of the standard library ``SSLContext`` object
+ to calls into PyOpenSSL.
+ """
+
+ def __init__(self, protocol):
+ self.protocol = _openssl_versions[protocol]
+ self._ctx = OpenSSL.SSL.Context(self.protocol)
+ self._options = 0
+ self.check_hostname = False
+
+ @property
+ def options(self):
+ return self._options
+
+ @options.setter
+ def options(self, value):
+ self._options = value
+ self._ctx.set_options(value)
+
+ @property
+ def verify_mode(self):
+ return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()]
+
+ @verify_mode.setter
+ def verify_mode(self, value):
+ self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback)
+
+ def set_default_verify_paths(self):
+ self._ctx.set_default_verify_paths()
+
+ def set_ciphers(self, ciphers):
+ if isinstance(ciphers, six.text_type):
+ ciphers = ciphers.encode("utf-8")
+ self._ctx.set_cipher_list(ciphers)
+
+ def load_verify_locations(self, cafile=None, capath=None, cadata=None):
+ if cafile is not None:
+ cafile = cafile.encode("utf-8")
+ if capath is not None:
+ capath = capath.encode("utf-8")
+ try:
+ self._ctx.load_verify_locations(cafile, capath)
+ if cadata is not None:
+ self._ctx.load_verify_locations(BytesIO(cadata))
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError("unable to load trusted certificates: %r" % e)
+
+ def load_cert_chain(self, certfile, keyfile=None, password=None):
+ self._ctx.use_certificate_chain_file(certfile)
+ if password is not None:
+ if not isinstance(password, six.binary_type):
+ password = password.encode("utf-8")
+ self._ctx.set_passwd_cb(lambda *_: password)
+ self._ctx.use_privatekey_file(keyfile or certfile)
+
+ def wrap_socket(
+ self,
+ sock,
+ server_side=False,
+ do_handshake_on_connect=True,
+ suppress_ragged_eofs=True,
+ server_hostname=None,
+ ):
+ cnx = OpenSSL.SSL.Connection(self._ctx, sock)
+
+ if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3
+ server_hostname = server_hostname.encode("utf-8")
+
+ if server_hostname is not None:
+ cnx.set_tlsext_host_name(server_hostname)
+
+ cnx.set_connect_state()
+
+ while True:
+ try:
+ cnx.do_handshake()
+ except OpenSSL.SSL.WantReadError:
+ if not util.wait_for_read(sock, sock.gettimeout()):
+ raise timeout("select timed out")
+ continue
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError("bad handshake: %r" % e)
+ break
+
+ return WrappedSocket(cnx, sock)
+
+
+def _verify_callback(cnx, x509, err_no, err_depth, return_code):
+ return err_no == 0
diff --git a/third_party/python/urllib3/src/urllib3/contrib/securetransport.py b/third_party/python/urllib3/src/urllib3/contrib/securetransport.py
new file mode 100644
index 0000000000..a6b7e94ade
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/contrib/securetransport.py
@@ -0,0 +1,864 @@
+"""
+SecureTranport support for urllib3 via ctypes.
+
+This makes platform-native TLS available to urllib3 users on macOS without the
+use of a compiler. This is an important feature because the Python Package
+Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL
+that ships with macOS is not capable of doing TLSv1.2. The only way to resolve
+this is to give macOS users an alternative solution to the problem, and that
+solution is to use SecureTransport.
+
+We use ctypes here because this solution must not require a compiler. That's
+because pip is not allowed to require a compiler either.
+
+This is not intended to be a seriously long-term solution to this problem.
+The hope is that PEP 543 will eventually solve this issue for us, at which
+point we can retire this contrib module. But in the short term, we need to
+solve the impending tire fire that is Python on Mac without this kind of
+contrib module. So...here we are.
+
+To use this module, simply import and inject it::
+
+ import urllib3.contrib.securetransport
+ urllib3.contrib.securetransport.inject_into_urllib3()
+
+Happy TLSing!
+
+This code is a bastardised version of the code found in Will Bond's oscrypto
+library. An enormous debt is owed to him for blazing this trail for us. For
+that reason, this code should be considered to be covered both by urllib3's
+license and by oscrypto's:
+
+ Copyright (c) 2015-2016 Will Bond <will@wbond.net>
+
+ Permission is hereby granted, free of charge, to any person obtaining a
+ copy of this software and associated documentation files (the "Software"),
+ to deal in the Software without restriction, including without limitation
+ the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ and/or sell copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ DEALINGS IN THE SOFTWARE.
+"""
+from __future__ import absolute_import
+
+import contextlib
+import ctypes
+import errno
+import os.path
+import shutil
+import socket
+import ssl
+import threading
+import weakref
+
+from .. import util
+from ._securetransport.bindings import Security, SecurityConst, CoreFoundation
+from ._securetransport.low_level import (
+ _assert_no_error,
+ _cert_array_from_pem,
+ _temporary_keychain,
+ _load_client_cert_chain,
+)
+
+try: # Platform-specific: Python 2
+ from socket import _fileobject
+except ImportError: # Platform-specific: Python 3
+ _fileobject = None
+ from ..packages.backports.makefile import backport_makefile
+
+__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
+
+# SNI always works
+HAS_SNI = True
+
+orig_util_HAS_SNI = util.HAS_SNI
+orig_util_SSLContext = util.ssl_.SSLContext
+
+# This dictionary is used by the read callback to obtain a handle to the
+# calling wrapped socket. This is a pretty silly approach, but for now it'll
+# do. I feel like I should be able to smuggle a handle to the wrapped socket
+# directly in the SSLConnectionRef, but for now this approach will work I
+# guess.
+#
+# We need to lock around this structure for inserts, but we don't do it for
+# reads/writes in the callbacks. The reasoning here goes as follows:
+#
+# 1. It is not possible to call into the callbacks before the dictionary is
+# populated, so once in the callback the id must be in the dictionary.
+# 2. The callbacks don't mutate the dictionary, they only read from it, and
+# so cannot conflict with any of the insertions.
+#
+# This is good: if we had to lock in the callbacks we'd drastically slow down
+# the performance of this code.
+_connection_refs = weakref.WeakValueDictionary()
+_connection_ref_lock = threading.Lock()
+
+# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over
+# for no better reason than we need *a* limit, and this one is right there.
+SSL_WRITE_BLOCKSIZE = 16384
+
+# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to
+# individual cipher suites. We need to do this because this is how
+# SecureTransport wants them.
+CIPHER_SUITES = [
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
+ SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
+ SecurityConst.TLS_AES_256_GCM_SHA384,
+ SecurityConst.TLS_AES_128_GCM_SHA256,
+ SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384,
+ SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256,
+ SecurityConst.TLS_AES_128_CCM_8_SHA256,
+ SecurityConst.TLS_AES_128_CCM_SHA256,
+ SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256,
+ SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256,
+ SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA,
+ SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA,
+]
+
+# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
+# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
+# TLSv1 to 1.2 are supported on macOS 10.8+
+_protocol_to_min_max = {
+ util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12)
+}
+
+if hasattr(ssl, "PROTOCOL_SSLv2"):
+ _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = (
+ SecurityConst.kSSLProtocol2,
+ SecurityConst.kSSLProtocol2,
+ )
+if hasattr(ssl, "PROTOCOL_SSLv3"):
+ _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = (
+ SecurityConst.kSSLProtocol3,
+ SecurityConst.kSSLProtocol3,
+ )
+if hasattr(ssl, "PROTOCOL_TLSv1"):
+ _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = (
+ SecurityConst.kTLSProtocol1,
+ SecurityConst.kTLSProtocol1,
+ )
+if hasattr(ssl, "PROTOCOL_TLSv1_1"):
+ _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = (
+ SecurityConst.kTLSProtocol11,
+ SecurityConst.kTLSProtocol11,
+ )
+if hasattr(ssl, "PROTOCOL_TLSv1_2"):
+ _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = (
+ SecurityConst.kTLSProtocol12,
+ SecurityConst.kTLSProtocol12,
+ )
+
+
+def inject_into_urllib3():
+ """
+ Monkey-patch urllib3 with SecureTransport-backed SSL-support.
+ """
+ util.SSLContext = SecureTransportContext
+ util.ssl_.SSLContext = SecureTransportContext
+ util.HAS_SNI = HAS_SNI
+ util.ssl_.HAS_SNI = HAS_SNI
+ util.IS_SECURETRANSPORT = True
+ util.ssl_.IS_SECURETRANSPORT = True
+
+
+def extract_from_urllib3():
+ """
+ Undo monkey-patching by :func:`inject_into_urllib3`.
+ """
+ util.SSLContext = orig_util_SSLContext
+ util.ssl_.SSLContext = orig_util_SSLContext
+ util.HAS_SNI = orig_util_HAS_SNI
+ util.ssl_.HAS_SNI = orig_util_HAS_SNI
+ util.IS_SECURETRANSPORT = False
+ util.ssl_.IS_SECURETRANSPORT = False
+
+
+def _read_callback(connection_id, data_buffer, data_length_pointer):
+ """
+ SecureTransport read callback. This is called by ST to request that data
+ be returned from the socket.
+ """
+ wrapped_socket = None
+ try:
+ wrapped_socket = _connection_refs.get(connection_id)
+ if wrapped_socket is None:
+ return SecurityConst.errSSLInternal
+ base_socket = wrapped_socket.socket
+
+ requested_length = data_length_pointer[0]
+
+ timeout = wrapped_socket.gettimeout()
+ error = None
+ read_count = 0
+
+ try:
+ while read_count < requested_length:
+ if timeout is None or timeout >= 0:
+ if not util.wait_for_read(base_socket, timeout):
+ raise socket.error(errno.EAGAIN, "timed out")
+
+ remaining = requested_length - read_count
+ buffer = (ctypes.c_char * remaining).from_address(
+ data_buffer + read_count
+ )
+ chunk_size = base_socket.recv_into(buffer, remaining)
+ read_count += chunk_size
+ if not chunk_size:
+ if not read_count:
+ return SecurityConst.errSSLClosedGraceful
+ break
+ except (socket.error) as e:
+ error = e.errno
+
+ if error is not None and error != errno.EAGAIN:
+ data_length_pointer[0] = read_count
+ if error == errno.ECONNRESET or error == errno.EPIPE:
+ return SecurityConst.errSSLClosedAbort
+ raise
+
+ data_length_pointer[0] = read_count
+
+ if read_count != requested_length:
+ return SecurityConst.errSSLWouldBlock
+
+ return 0
+ except Exception as e:
+ if wrapped_socket is not None:
+ wrapped_socket._exception = e
+ return SecurityConst.errSSLInternal
+
+
+def _write_callback(connection_id, data_buffer, data_length_pointer):
+ """
+ SecureTransport write callback. This is called by ST to request that data
+ actually be sent on the network.
+ """
+ wrapped_socket = None
+ try:
+ wrapped_socket = _connection_refs.get(connection_id)
+ if wrapped_socket is None:
+ return SecurityConst.errSSLInternal
+ base_socket = wrapped_socket.socket
+
+ bytes_to_write = data_length_pointer[0]
+ data = ctypes.string_at(data_buffer, bytes_to_write)
+
+ timeout = wrapped_socket.gettimeout()
+ error = None
+ sent = 0
+
+ try:
+ while sent < bytes_to_write:
+ if timeout is None or timeout >= 0:
+ if not util.wait_for_write(base_socket, timeout):
+ raise socket.error(errno.EAGAIN, "timed out")
+ chunk_sent = base_socket.send(data)
+ sent += chunk_sent
+
+ # This has some needless copying here, but I'm not sure there's
+ # much value in optimising this data path.
+ data = data[chunk_sent:]
+ except (socket.error) as e:
+ error = e.errno
+
+ if error is not None and error != errno.EAGAIN:
+ data_length_pointer[0] = sent
+ if error == errno.ECONNRESET or error == errno.EPIPE:
+ return SecurityConst.errSSLClosedAbort
+ raise
+
+ data_length_pointer[0] = sent
+
+ if sent != bytes_to_write:
+ return SecurityConst.errSSLWouldBlock
+
+ return 0
+ except Exception as e:
+ if wrapped_socket is not None:
+ wrapped_socket._exception = e
+ return SecurityConst.errSSLInternal
+
+
+# We need to keep these two objects references alive: if they get GC'd while
+# in use then SecureTransport could attempt to call a function that is in freed
+# memory. That would be...uh...bad. Yeah, that's the word. Bad.
+_read_callback_pointer = Security.SSLReadFunc(_read_callback)
+_write_callback_pointer = Security.SSLWriteFunc(_write_callback)
+
+
+class WrappedSocket(object):
+ """
+ API-compatibility wrapper for Python's OpenSSL wrapped socket object.
+
+ Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage
+ collector of PyPy.
+ """
+
+ def __init__(self, socket):
+ self.socket = socket
+ self.context = None
+ self._makefile_refs = 0
+ self._closed = False
+ self._exception = None
+ self._keychain = None
+ self._keychain_dir = None
+ self._client_cert_chain = None
+
+ # We save off the previously-configured timeout and then set it to
+ # zero. This is done because we use select and friends to handle the
+ # timeouts, but if we leave the timeout set on the lower socket then
+ # Python will "kindly" call select on that socket again for us. Avoid
+ # that by forcing the timeout to zero.
+ self._timeout = self.socket.gettimeout()
+ self.socket.settimeout(0)
+
+ @contextlib.contextmanager
+ def _raise_on_error(self):
+ """
+ A context manager that can be used to wrap calls that do I/O from
+ SecureTransport. If any of the I/O callbacks hit an exception, this
+ context manager will correctly propagate the exception after the fact.
+ This avoids silently swallowing those exceptions.
+
+ It also correctly forces the socket closed.
+ """
+ self._exception = None
+
+ # We explicitly don't catch around this yield because in the unlikely
+ # event that an exception was hit in the block we don't want to swallow
+ # it.
+ yield
+ if self._exception is not None:
+ exception, self._exception = self._exception, None
+ self.close()
+ raise exception
+
+ def _set_ciphers(self):
+ """
+ Sets up the allowed ciphers. By default this matches the set in
+ util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done
+ custom and doesn't allow changing at this time, mostly because parsing
+ OpenSSL cipher strings is going to be a freaking nightmare.
+ """
+ ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES)
+ result = Security.SSLSetEnabledCiphers(
+ self.context, ciphers, len(CIPHER_SUITES)
+ )
+ _assert_no_error(result)
+
+ def _custom_validate(self, verify, trust_bundle):
+ """
+ Called when we have set custom validation. We do this in two cases:
+ first, when cert validation is entirely disabled; and second, when
+ using a custom trust DB.
+ """
+ # If we disabled cert validation, just say: cool.
+ if not verify:
+ return
+
+ # We want data in memory, so load it up.
+ if os.path.isfile(trust_bundle):
+ with open(trust_bundle, "rb") as f:
+ trust_bundle = f.read()
+
+ cert_array = None
+ trust = Security.SecTrustRef()
+
+ try:
+ # Get a CFArray that contains the certs we want.
+ cert_array = _cert_array_from_pem(trust_bundle)
+
+ # Ok, now the hard part. We want to get the SecTrustRef that ST has
+ # created for this connection, shove our CAs into it, tell ST to
+ # ignore everything else it knows, and then ask if it can build a
+ # chain. This is a buuuunch of code.
+ result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
+ _assert_no_error(result)
+ if not trust:
+ raise ssl.SSLError("Failed to copy trust reference")
+
+ result = Security.SecTrustSetAnchorCertificates(trust, cert_array)
+ _assert_no_error(result)
+
+ result = Security.SecTrustSetAnchorCertificatesOnly(trust, True)
+ _assert_no_error(result)
+
+ trust_result = Security.SecTrustResultType()
+ result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result))
+ _assert_no_error(result)
+ finally:
+ if trust:
+ CoreFoundation.CFRelease(trust)
+
+ if cert_array is not None:
+ CoreFoundation.CFRelease(cert_array)
+
+ # Ok, now we can look at what the result was.
+ successes = (
+ SecurityConst.kSecTrustResultUnspecified,
+ SecurityConst.kSecTrustResultProceed,
+ )
+ if trust_result.value not in successes:
+ raise ssl.SSLError(
+ "certificate verify failed, error code: %d" % trust_result.value
+ )
+
+ def handshake(
+ self,
+ server_hostname,
+ verify,
+ trust_bundle,
+ min_version,
+ max_version,
+ client_cert,
+ client_key,
+ client_key_passphrase,
+ ):
+ """
+ Actually performs the TLS handshake. This is run automatically by
+ wrapped socket, and shouldn't be needed in user code.
+ """
+ # First, we do the initial bits of connection setup. We need to create
+ # a context, set its I/O funcs, and set the connection reference.
+ self.context = Security.SSLCreateContext(
+ None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType
+ )
+ result = Security.SSLSetIOFuncs(
+ self.context, _read_callback_pointer, _write_callback_pointer
+ )
+ _assert_no_error(result)
+
+ # Here we need to compute the handle to use. We do this by taking the
+ # id of self modulo 2**31 - 1. If this is already in the dictionary, we
+ # just keep incrementing by one until we find a free space.
+ with _connection_ref_lock:
+ handle = id(self) % 2147483647
+ while handle in _connection_refs:
+ handle = (handle + 1) % 2147483647
+ _connection_refs[handle] = self
+
+ result = Security.SSLSetConnection(self.context, handle)
+ _assert_no_error(result)
+
+ # If we have a server hostname, we should set that too.
+ if server_hostname:
+ if not isinstance(server_hostname, bytes):
+ server_hostname = server_hostname.encode("utf-8")
+
+ result = Security.SSLSetPeerDomainName(
+ self.context, server_hostname, len(server_hostname)
+ )
+ _assert_no_error(result)
+
+ # Setup the ciphers.
+ self._set_ciphers()
+
+ # Set the minimum and maximum TLS versions.
+ result = Security.SSLSetProtocolVersionMin(self.context, min_version)
+ _assert_no_error(result)
+
+ result = Security.SSLSetProtocolVersionMax(self.context, max_version)
+ _assert_no_error(result)
+
+ # If there's a trust DB, we need to use it. We do that by telling
+ # SecureTransport to break on server auth. We also do that if we don't
+ # want to validate the certs at all: we just won't actually do any
+ # authing in that case.
+ if not verify or trust_bundle is not None:
+ result = Security.SSLSetSessionOption(
+ self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True
+ )
+ _assert_no_error(result)
+
+ # If there's a client cert, we need to use it.
+ if client_cert:
+ self._keychain, self._keychain_dir = _temporary_keychain()
+ self._client_cert_chain = _load_client_cert_chain(
+ self._keychain, client_cert, client_key
+ )
+ result = Security.SSLSetCertificate(self.context, self._client_cert_chain)
+ _assert_no_error(result)
+
+ while True:
+ with self._raise_on_error():
+ result = Security.SSLHandshake(self.context)
+
+ if result == SecurityConst.errSSLWouldBlock:
+ raise socket.timeout("handshake timed out")
+ elif result == SecurityConst.errSSLServerAuthCompleted:
+ self._custom_validate(verify, trust_bundle)
+ continue
+ else:
+ _assert_no_error(result)
+ break
+
+ def fileno(self):
+ return self.socket.fileno()
+
+ # Copy-pasted from Python 3.5 source code
+ def _decref_socketios(self):
+ if self._makefile_refs > 0:
+ self._makefile_refs -= 1
+ if self._closed:
+ self.close()
+
+ def recv(self, bufsiz):
+ buffer = ctypes.create_string_buffer(bufsiz)
+ bytes_read = self.recv_into(buffer, bufsiz)
+ data = buffer[:bytes_read]
+ return data
+
+ def recv_into(self, buffer, nbytes=None):
+ # Read short on EOF.
+ if self._closed:
+ return 0
+
+ if nbytes is None:
+ nbytes = len(buffer)
+
+ buffer = (ctypes.c_char * nbytes).from_buffer(buffer)
+ processed_bytes = ctypes.c_size_t(0)
+
+ with self._raise_on_error():
+ result = Security.SSLRead(
+ self.context, buffer, nbytes, ctypes.byref(processed_bytes)
+ )
+
+ # There are some result codes that we want to treat as "not always
+ # errors". Specifically, those are errSSLWouldBlock,
+ # errSSLClosedGraceful, and errSSLClosedNoNotify.
+ if result == SecurityConst.errSSLWouldBlock:
+ # If we didn't process any bytes, then this was just a time out.
+ # However, we can get errSSLWouldBlock in situations when we *did*
+ # read some data, and in those cases we should just read "short"
+ # and return.
+ if processed_bytes.value == 0:
+ # Timed out, no data read.
+ raise socket.timeout("recv timed out")
+ elif result in (
+ SecurityConst.errSSLClosedGraceful,
+ SecurityConst.errSSLClosedNoNotify,
+ ):
+ # The remote peer has closed this connection. We should do so as
+ # well. Note that we don't actually return here because in
+ # principle this could actually be fired along with return data.
+ # It's unlikely though.
+ self.close()
+ else:
+ _assert_no_error(result)
+
+ # Ok, we read and probably succeeded. We should return whatever data
+ # was actually read.
+ return processed_bytes.value
+
+ def settimeout(self, timeout):
+ self._timeout = timeout
+
+ def gettimeout(self):
+ return self._timeout
+
+ def send(self, data):
+ processed_bytes = ctypes.c_size_t(0)
+
+ with self._raise_on_error():
+ result = Security.SSLWrite(
+ self.context, data, len(data), ctypes.byref(processed_bytes)
+ )
+
+ if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0:
+ # Timed out
+ raise socket.timeout("send timed out")
+ else:
+ _assert_no_error(result)
+
+ # We sent, and probably succeeded. Tell them how much we sent.
+ return processed_bytes.value
+
+ def sendall(self, data):
+ total_sent = 0
+ while total_sent < len(data):
+ sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE])
+ total_sent += sent
+
+ def shutdown(self):
+ with self._raise_on_error():
+ Security.SSLClose(self.context)
+
+ def close(self):
+ # TODO: should I do clean shutdown here? Do I have to?
+ if self._makefile_refs < 1:
+ self._closed = True
+ if self.context:
+ CoreFoundation.CFRelease(self.context)
+ self.context = None
+ if self._client_cert_chain:
+ CoreFoundation.CFRelease(self._client_cert_chain)
+ self._client_cert_chain = None
+ if self._keychain:
+ Security.SecKeychainDelete(self._keychain)
+ CoreFoundation.CFRelease(self._keychain)
+ shutil.rmtree(self._keychain_dir)
+ self._keychain = self._keychain_dir = None
+ return self.socket.close()
+ else:
+ self._makefile_refs -= 1
+
+ def getpeercert(self, binary_form=False):
+ # Urgh, annoying.
+ #
+ # Here's how we do this:
+ #
+ # 1. Call SSLCopyPeerTrust to get hold of the trust object for this
+ # connection.
+ # 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf.
+ # 3. To get the CN, call SecCertificateCopyCommonName and process that
+ # string so that it's of the appropriate type.
+ # 4. To get the SAN, we need to do something a bit more complex:
+ # a. Call SecCertificateCopyValues to get the data, requesting
+ # kSecOIDSubjectAltName.
+ # b. Mess about with this dictionary to try to get the SANs out.
+ #
+ # This is gross. Really gross. It's going to be a few hundred LoC extra
+ # just to repeat something that SecureTransport can *already do*. So my
+ # operating assumption at this time is that what we want to do is
+ # instead to just flag to urllib3 that it shouldn't do its own hostname
+ # validation when using SecureTransport.
+ if not binary_form:
+ raise ValueError("SecureTransport only supports dumping binary certs")
+ trust = Security.SecTrustRef()
+ certdata = None
+ der_bytes = None
+
+ try:
+ # Grab the trust store.
+ result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
+ _assert_no_error(result)
+ if not trust:
+ # Probably we haven't done the handshake yet. No biggie.
+ return None
+
+ cert_count = Security.SecTrustGetCertificateCount(trust)
+ if not cert_count:
+ # Also a case that might happen if we haven't handshaked.
+ # Handshook? Handshaken?
+ return None
+
+ leaf = Security.SecTrustGetCertificateAtIndex(trust, 0)
+ assert leaf
+
+ # Ok, now we want the DER bytes.
+ certdata = Security.SecCertificateCopyData(leaf)
+ assert certdata
+
+ data_length = CoreFoundation.CFDataGetLength(certdata)
+ data_buffer = CoreFoundation.CFDataGetBytePtr(certdata)
+ der_bytes = ctypes.string_at(data_buffer, data_length)
+ finally:
+ if certdata:
+ CoreFoundation.CFRelease(certdata)
+ if trust:
+ CoreFoundation.CFRelease(trust)
+
+ return der_bytes
+
+ def version(self):
+ protocol = Security.SSLProtocol()
+ result = Security.SSLGetNegotiatedProtocolVersion(
+ self.context, ctypes.byref(protocol)
+ )
+ _assert_no_error(result)
+ if protocol.value == SecurityConst.kTLSProtocol13:
+ raise ssl.SSLError("SecureTransport does not support TLS 1.3")
+ elif protocol.value == SecurityConst.kTLSProtocol12:
+ return "TLSv1.2"
+ elif protocol.value == SecurityConst.kTLSProtocol11:
+ return "TLSv1.1"
+ elif protocol.value == SecurityConst.kTLSProtocol1:
+ return "TLSv1"
+ elif protocol.value == SecurityConst.kSSLProtocol3:
+ return "SSLv3"
+ elif protocol.value == SecurityConst.kSSLProtocol2:
+ return "SSLv2"
+ else:
+ raise ssl.SSLError("Unknown TLS version: %r" % protocol)
+
+ def _reuse(self):
+ self._makefile_refs += 1
+
+ def _drop(self):
+ if self._makefile_refs < 1:
+ self.close()
+ else:
+ self._makefile_refs -= 1
+
+
+if _fileobject: # Platform-specific: Python 2
+
+ def makefile(self, mode, bufsize=-1):
+ self._makefile_refs += 1
+ return _fileobject(self, mode, bufsize, close=True)
+
+
+else: # Platform-specific: Python 3
+
+ def makefile(self, mode="r", buffering=None, *args, **kwargs):
+ # We disable buffering with SecureTransport because it conflicts with
+ # the buffering that ST does internally (see issue #1153 for more).
+ buffering = 0
+ return backport_makefile(self, mode, buffering, *args, **kwargs)
+
+
+WrappedSocket.makefile = makefile
+
+
+class SecureTransportContext(object):
+ """
+ I am a wrapper class for the SecureTransport library, to translate the
+ interface of the standard library ``SSLContext`` object to calls into
+ SecureTransport.
+ """
+
+ def __init__(self, protocol):
+ self._min_version, self._max_version = _protocol_to_min_max[protocol]
+ self._options = 0
+ self._verify = False
+ self._trust_bundle = None
+ self._client_cert = None
+ self._client_key = None
+ self._client_key_passphrase = None
+
+ @property
+ def check_hostname(self):
+ """
+ SecureTransport cannot have its hostname checking disabled. For more,
+ see the comment on getpeercert() in this file.
+ """
+ return True
+
+ @check_hostname.setter
+ def check_hostname(self, value):
+ """
+ SecureTransport cannot have its hostname checking disabled. For more,
+ see the comment on getpeercert() in this file.
+ """
+ pass
+
+ @property
+ def options(self):
+ # TODO: Well, crap.
+ #
+ # So this is the bit of the code that is the most likely to cause us
+ # trouble. Essentially we need to enumerate all of the SSL options that
+ # users might want to use and try to see if we can sensibly translate
+ # them, or whether we should just ignore them.
+ return self._options
+
+ @options.setter
+ def options(self, value):
+ # TODO: Update in line with above.
+ self._options = value
+
+ @property
+ def verify_mode(self):
+ return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE
+
+ @verify_mode.setter
+ def verify_mode(self, value):
+ self._verify = True if value == ssl.CERT_REQUIRED else False
+
+ def set_default_verify_paths(self):
+ # So, this has to do something a bit weird. Specifically, what it does
+ # is nothing.
+ #
+ # This means that, if we had previously had load_verify_locations
+ # called, this does not undo that. We need to do that because it turns
+ # out that the rest of the urllib3 code will attempt to load the
+ # default verify paths if it hasn't been told about any paths, even if
+ # the context itself was sometime earlier. We resolve that by just
+ # ignoring it.
+ pass
+
+ def load_default_certs(self):
+ return self.set_default_verify_paths()
+
+ def set_ciphers(self, ciphers):
+ # For now, we just require the default cipher string.
+ if ciphers != util.ssl_.DEFAULT_CIPHERS:
+ raise ValueError("SecureTransport doesn't support custom cipher strings")
+
+ def load_verify_locations(self, cafile=None, capath=None, cadata=None):
+ # OK, we only really support cadata and cafile.
+ if capath is not None:
+ raise ValueError("SecureTransport does not support cert directories")
+
+ # Raise if cafile does not exist.
+ if cafile is not None:
+ with open(cafile):
+ pass
+
+ self._trust_bundle = cafile or cadata
+
+ def load_cert_chain(self, certfile, keyfile=None, password=None):
+ self._client_cert = certfile
+ self._client_key = keyfile
+ self._client_cert_passphrase = password
+
+ def wrap_socket(
+ self,
+ sock,
+ server_side=False,
+ do_handshake_on_connect=True,
+ suppress_ragged_eofs=True,
+ server_hostname=None,
+ ):
+ # So, what do we do here? Firstly, we assert some properties. This is a
+ # stripped down shim, so there is some functionality we don't support.
+ # See PEP 543 for the real deal.
+ assert not server_side
+ assert do_handshake_on_connect
+ assert suppress_ragged_eofs
+
+ # Ok, we're good to go. Now we want to create the wrapped socket object
+ # and store it in the appropriate place.
+ wrapped_socket = WrappedSocket(sock)
+
+ # Now we can handshake
+ wrapped_socket.handshake(
+ server_hostname,
+ self._verify,
+ self._trust_bundle,
+ self._min_version,
+ self._max_version,
+ self._client_cert,
+ self._client_key,
+ self._client_key_passphrase,
+ )
+ return wrapped_socket
diff --git a/third_party/python/urllib3/src/urllib3/contrib/socks.py b/third_party/python/urllib3/src/urllib3/contrib/socks.py
new file mode 100644
index 0000000000..9e97f7aa98
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/contrib/socks.py
@@ -0,0 +1,210 @@
+# -*- coding: utf-8 -*-
+"""
+This module contains provisional support for SOCKS proxies from within
+urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and
+SOCKS5. To enable its functionality, either install PySocks or install this
+module with the ``socks`` extra.
+
+The SOCKS implementation supports the full range of urllib3 features. It also
+supports the following SOCKS features:
+
+- SOCKS4A (``proxy_url='socks4a://...``)
+- SOCKS4 (``proxy_url='socks4://...``)
+- SOCKS5 with remote DNS (``proxy_url='socks5h://...``)
+- SOCKS5 with local DNS (``proxy_url='socks5://...``)
+- Usernames and passwords for the SOCKS proxy
+
+ .. note::
+ It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
+ your ``proxy_url`` to ensure that DNS resolution is done from the remote
+ server instead of client-side when connecting to a domain name.
+
+SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
+supports IPv4, IPv6, and domain names.
+
+When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
+will be sent as the ``userid`` section of the SOCKS request::
+
+ proxy_url="socks4a://<userid>@proxy-host"
+
+When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
+of the ``proxy_url`` will be sent as the username/password to authenticate
+with the proxy::
+
+ proxy_url="socks5h://<username>:<password>@proxy-host"
+
+"""
+from __future__ import absolute_import
+
+try:
+ import socks
+except ImportError:
+ import warnings
+ from ..exceptions import DependencyWarning
+
+ warnings.warn(
+ (
+ "SOCKS support in urllib3 requires the installation of optional "
+ "dependencies: specifically, PySocks. For more information, see "
+ "https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies"
+ ),
+ DependencyWarning,
+ )
+ raise
+
+from socket import error as SocketError, timeout as SocketTimeout
+
+from ..connection import HTTPConnection, HTTPSConnection
+from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
+from ..exceptions import ConnectTimeoutError, NewConnectionError
+from ..poolmanager import PoolManager
+from ..util.url import parse_url
+
+try:
+ import ssl
+except ImportError:
+ ssl = None
+
+
+class SOCKSConnection(HTTPConnection):
+ """
+ A plain-text HTTP connection that connects via a SOCKS proxy.
+ """
+
+ def __init__(self, *args, **kwargs):
+ self._socks_options = kwargs.pop("_socks_options")
+ super(SOCKSConnection, self).__init__(*args, **kwargs)
+
+ def _new_conn(self):
+ """
+ Establish a new connection via the SOCKS proxy.
+ """
+ extra_kw = {}
+ if self.source_address:
+ extra_kw["source_address"] = self.source_address
+
+ if self.socket_options:
+ extra_kw["socket_options"] = self.socket_options
+
+ try:
+ conn = socks.create_connection(
+ (self.host, self.port),
+ proxy_type=self._socks_options["socks_version"],
+ proxy_addr=self._socks_options["proxy_host"],
+ proxy_port=self._socks_options["proxy_port"],
+ proxy_username=self._socks_options["username"],
+ proxy_password=self._socks_options["password"],
+ proxy_rdns=self._socks_options["rdns"],
+ timeout=self.timeout,
+ **extra_kw
+ )
+
+ except SocketTimeout:
+ raise ConnectTimeoutError(
+ self,
+ "Connection to %s timed out. (connect timeout=%s)"
+ % (self.host, self.timeout),
+ )
+
+ except socks.ProxyError as e:
+ # This is fragile as hell, but it seems to be the only way to raise
+ # useful errors here.
+ if e.socket_err:
+ error = e.socket_err
+ if isinstance(error, SocketTimeout):
+ raise ConnectTimeoutError(
+ self,
+ "Connection to %s timed out. (connect timeout=%s)"
+ % (self.host, self.timeout),
+ )
+ else:
+ raise NewConnectionError(
+ self, "Failed to establish a new connection: %s" % error
+ )
+ else:
+ raise NewConnectionError(
+ self, "Failed to establish a new connection: %s" % e
+ )
+
+ except SocketError as e: # Defensive: PySocks should catch all these.
+ raise NewConnectionError(
+ self, "Failed to establish a new connection: %s" % e
+ )
+
+ return conn
+
+
+# We don't need to duplicate the Verified/Unverified distinction from
+# urllib3/connection.py here because the HTTPSConnection will already have been
+# correctly set to either the Verified or Unverified form by that module. This
+# means the SOCKSHTTPSConnection will automatically be the correct type.
+class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):
+ pass
+
+
+class SOCKSHTTPConnectionPool(HTTPConnectionPool):
+ ConnectionCls = SOCKSConnection
+
+
+class SOCKSHTTPSConnectionPool(HTTPSConnectionPool):
+ ConnectionCls = SOCKSHTTPSConnection
+
+
+class SOCKSProxyManager(PoolManager):
+ """
+ A version of the urllib3 ProxyManager that routes connections via the
+ defined SOCKS proxy.
+ """
+
+ pool_classes_by_scheme = {
+ "http": SOCKSHTTPConnectionPool,
+ "https": SOCKSHTTPSConnectionPool,
+ }
+
+ def __init__(
+ self,
+ proxy_url,
+ username=None,
+ password=None,
+ num_pools=10,
+ headers=None,
+ **connection_pool_kw
+ ):
+ parsed = parse_url(proxy_url)
+
+ if username is None and password is None and parsed.auth is not None:
+ split = parsed.auth.split(":")
+ if len(split) == 2:
+ username, password = split
+ if parsed.scheme == "socks5":
+ socks_version = socks.PROXY_TYPE_SOCKS5
+ rdns = False
+ elif parsed.scheme == "socks5h":
+ socks_version = socks.PROXY_TYPE_SOCKS5
+ rdns = True
+ elif parsed.scheme == "socks4":
+ socks_version = socks.PROXY_TYPE_SOCKS4
+ rdns = False
+ elif parsed.scheme == "socks4a":
+ socks_version = socks.PROXY_TYPE_SOCKS4
+ rdns = True
+ else:
+ raise ValueError("Unable to determine SOCKS version from %s" % proxy_url)
+
+ self.proxy_url = proxy_url
+
+ socks_options = {
+ "socks_version": socks_version,
+ "proxy_host": parsed.host,
+ "proxy_port": parsed.port,
+ "username": username,
+ "password": password,
+ "rdns": rdns,
+ }
+ connection_pool_kw["_socks_options"] = socks_options
+
+ super(SOCKSProxyManager, self).__init__(
+ num_pools, headers, **connection_pool_kw
+ )
+
+ self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme
diff --git a/third_party/python/urllib3/src/urllib3/exceptions.py b/third_party/python/urllib3/src/urllib3/exceptions.py
new file mode 100644
index 0000000000..5cc4d8a4f1
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/exceptions.py
@@ -0,0 +1,272 @@
+from __future__ import absolute_import
+from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead
+
+# Base Exceptions
+
+
+class HTTPError(Exception):
+ "Base exception used by this module."
+ pass
+
+
+class HTTPWarning(Warning):
+ "Base warning used by this module."
+ pass
+
+
+class PoolError(HTTPError):
+ "Base exception for errors caused within a pool."
+
+ def __init__(self, pool, message):
+ self.pool = pool
+ HTTPError.__init__(self, "%s: %s" % (pool, message))
+
+ def __reduce__(self):
+ # For pickling purposes.
+ return self.__class__, (None, None)
+
+
+class RequestError(PoolError):
+ "Base exception for PoolErrors that have associated URLs."
+
+ def __init__(self, pool, url, message):
+ self.url = url
+ PoolError.__init__(self, pool, message)
+
+ def __reduce__(self):
+ # For pickling purposes.
+ return self.__class__, (None, self.url, None)
+
+
+class SSLError(HTTPError):
+ "Raised when SSL certificate fails in an HTTPS connection."
+ pass
+
+
+class ProxyError(HTTPError):
+ "Raised when the connection to a proxy fails."
+
+ def __init__(self, message, error, *args):
+ super(ProxyError, self).__init__(message, error, *args)
+ self.original_error = error
+
+
+class DecodeError(HTTPError):
+ "Raised when automatic decoding based on Content-Type fails."
+ pass
+
+
+class ProtocolError(HTTPError):
+ "Raised when something unexpected happens mid-request/response."
+ pass
+
+
+#: Renamed to ProtocolError but aliased for backwards compatibility.
+ConnectionError = ProtocolError
+
+
+# Leaf Exceptions
+
+
+class MaxRetryError(RequestError):
+ """Raised when the maximum number of retries is exceeded.
+
+ :param pool: The connection pool
+ :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
+ :param string url: The requested Url
+ :param exceptions.Exception reason: The underlying error
+
+ """
+
+ def __init__(self, pool, url, reason=None):
+ self.reason = reason
+
+ message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason)
+
+ RequestError.__init__(self, pool, url, message)
+
+
+class HostChangedError(RequestError):
+ "Raised when an existing pool gets a request for a foreign host."
+
+ def __init__(self, pool, url, retries=3):
+ message = "Tried to open a foreign host with url: %s" % url
+ RequestError.__init__(self, pool, url, message)
+ self.retries = retries
+
+
+class TimeoutStateError(HTTPError):
+ """ Raised when passing an invalid state to a timeout """
+
+ pass
+
+
+class TimeoutError(HTTPError):
+ """ Raised when a socket timeout error occurs.
+
+ Catching this error will catch both :exc:`ReadTimeoutErrors
+ <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
+ """
+
+ pass
+
+
+class ReadTimeoutError(TimeoutError, RequestError):
+ "Raised when a socket timeout occurs while receiving data from a server"
+ pass
+
+
+# This timeout error does not have a URL attached and needs to inherit from the
+# base HTTPError
+class ConnectTimeoutError(TimeoutError):
+ "Raised when a socket timeout occurs while connecting to a server"
+ pass
+
+
+class NewConnectionError(ConnectTimeoutError, PoolError):
+ "Raised when we fail to establish a new connection. Usually ECONNREFUSED."
+ pass
+
+
+class EmptyPoolError(PoolError):
+ "Raised when a pool runs out of connections and no more are allowed."
+ pass
+
+
+class ClosedPoolError(PoolError):
+ "Raised when a request enters a pool after the pool has been closed."
+ pass
+
+
+class LocationValueError(ValueError, HTTPError):
+ "Raised when there is something wrong with a given URL input."
+ pass
+
+
+class LocationParseError(LocationValueError):
+ "Raised when get_host or similar fails to parse the URL input."
+
+ def __init__(self, location):
+ message = "Failed to parse: %s" % location
+ HTTPError.__init__(self, message)
+
+ self.location = location
+
+
+class ResponseError(HTTPError):
+ "Used as a container for an error reason supplied in a MaxRetryError."
+ GENERIC_ERROR = "too many error responses"
+ SPECIFIC_ERROR = "too many {status_code} error responses"
+
+
+class SecurityWarning(HTTPWarning):
+ "Warned when performing security reducing actions"
+ pass
+
+
+class SubjectAltNameWarning(SecurityWarning):
+ "Warned when connecting to a host with a certificate missing a SAN."
+ pass
+
+
+class InsecureRequestWarning(SecurityWarning):
+ "Warned when making an unverified HTTPS request."
+ pass
+
+
+class SystemTimeWarning(SecurityWarning):
+ "Warned when system time is suspected to be wrong"
+ pass
+
+
+class InsecurePlatformWarning(SecurityWarning):
+ "Warned when certain SSL configuration is not available on a platform."
+ pass
+
+
+class SNIMissingWarning(HTTPWarning):
+ "Warned when making a HTTPS request without SNI available."
+ pass
+
+
+class DependencyWarning(HTTPWarning):
+ """
+ Warned when an attempt is made to import a module with missing optional
+ dependencies.
+ """
+
+ pass
+
+
+class InvalidProxyConfigurationWarning(HTTPWarning):
+ """
+ Warned when using an HTTPS proxy and an HTTPS URL. Currently
+ urllib3 doesn't support HTTPS proxies and the proxy will be
+ contacted via HTTP instead. This warning can be fixed by
+ changing your HTTPS proxy URL into an HTTP proxy URL.
+
+ If you encounter this warning read this:
+ https://github.com/urllib3/urllib3/issues/1850
+ """
+
+ pass
+
+
+class ResponseNotChunked(ProtocolError, ValueError):
+ "Response needs to be chunked in order to read it as chunks."
+ pass
+
+
+class BodyNotHttplibCompatible(HTTPError):
+ """
+ Body should be httplib.HTTPResponse like (have an fp attribute which
+ returns raw chunks) for read_chunked().
+ """
+
+ pass
+
+
+class IncompleteRead(HTTPError, httplib_IncompleteRead):
+ """
+ Response length doesn't match expected Content-Length
+
+ Subclass of http_client.IncompleteRead to allow int value
+ for `partial` to avoid creating large objects on streamed
+ reads.
+ """
+
+ def __init__(self, partial, expected):
+ super(IncompleteRead, self).__init__(partial, expected)
+
+ def __repr__(self):
+ return "IncompleteRead(%i bytes read, %i more expected)" % (
+ self.partial,
+ self.expected,
+ )
+
+
+class InvalidHeader(HTTPError):
+ "The header provided was somehow invalid."
+ pass
+
+
+class ProxySchemeUnknown(AssertionError, ValueError):
+ "ProxyManager does not support the supplied scheme"
+ # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
+
+ def __init__(self, scheme):
+ message = "Not supported proxy scheme %s" % scheme
+ super(ProxySchemeUnknown, self).__init__(message)
+
+
+class HeaderParsingError(HTTPError):
+ "Raised by assert_header_parsing, but we convert it to a log.warning statement."
+
+ def __init__(self, defects, unparsed_data):
+ message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data)
+ super(HeaderParsingError, self).__init__(message)
+
+
+class UnrewindableBodyError(HTTPError):
+ "urllib3 encountered an error when trying to rewind a body"
+ pass
diff --git a/third_party/python/urllib3/src/urllib3/fields.py b/third_party/python/urllib3/src/urllib3/fields.py
new file mode 100644
index 0000000000..8715b2202b
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/fields.py
@@ -0,0 +1,273 @@
+from __future__ import absolute_import
+import email.utils
+import mimetypes
+import re
+
+from .packages import six
+
+
+def guess_content_type(filename, default="application/octet-stream"):
+ """
+ Guess the "Content-Type" of a file.
+
+ :param filename:
+ The filename to guess the "Content-Type" of using :mod:`mimetypes`.
+ :param default:
+ If no "Content-Type" can be guessed, default to `default`.
+ """
+ if filename:
+ return mimetypes.guess_type(filename)[0] or default
+ return default
+
+
+def format_header_param_rfc2231(name, value):
+ """
+ Helper function to format and quote a single header parameter using the
+ strategy defined in RFC 2231.
+
+ Particularly useful for header parameters which might contain
+ non-ASCII values, like file names. This follows RFC 2388 Section 4.4.
+
+ :param name:
+ The name of the parameter, a string expected to be ASCII only.
+ :param value:
+ The value of the parameter, provided as ``bytes`` or `str``.
+ :ret:
+ An RFC-2231-formatted unicode string.
+ """
+ if isinstance(value, six.binary_type):
+ value = value.decode("utf-8")
+
+ if not any(ch in value for ch in '"\\\r\n'):
+ result = u'%s="%s"' % (name, value)
+ try:
+ result.encode("ascii")
+ except (UnicodeEncodeError, UnicodeDecodeError):
+ pass
+ else:
+ return result
+
+ if six.PY2: # Python 2:
+ value = value.encode("utf-8")
+
+ # encode_rfc2231 accepts an encoded string and returns an ascii-encoded
+ # string in Python 2 but accepts and returns unicode strings in Python 3
+ value = email.utils.encode_rfc2231(value, "utf-8")
+ value = "%s*=%s" % (name, value)
+
+ if six.PY2: # Python 2:
+ value = value.decode("utf-8")
+
+ return value
+
+
+_HTML5_REPLACEMENTS = {
+ u"\u0022": u"%22",
+ # Replace "\" with "\\".
+ u"\u005C": u"\u005C\u005C",
+ u"\u005C": u"\u005C\u005C",
+}
+
+# All control characters from 0x00 to 0x1F *except* 0x1B.
+_HTML5_REPLACEMENTS.update(
+ {
+ six.unichr(cc): u"%{:02X}".format(cc)
+ for cc in range(0x00, 0x1F + 1)
+ if cc not in (0x1B,)
+ }
+)
+
+
+def _replace_multiple(value, needles_and_replacements):
+ def replacer(match):
+ return needles_and_replacements[match.group(0)]
+
+ pattern = re.compile(
+ r"|".join([re.escape(needle) for needle in needles_and_replacements.keys()])
+ )
+
+ result = pattern.sub(replacer, value)
+
+ return result
+
+
+def format_header_param_html5(name, value):
+ """
+ Helper function to format and quote a single header parameter using the
+ HTML5 strategy.
+
+ Particularly useful for header parameters which might contain
+ non-ASCII values, like file names. This follows the `HTML5 Working Draft
+ Section 4.10.22.7`_ and matches the behavior of curl and modern browsers.
+
+ .. _HTML5 Working Draft Section 4.10.22.7:
+ https://w3c.github.io/html/sec-forms.html#multipart-form-data
+
+ :param name:
+ The name of the parameter, a string expected to be ASCII only.
+ :param value:
+ The value of the parameter, provided as ``bytes`` or `str``.
+ :ret:
+ A unicode string, stripped of troublesome characters.
+ """
+ if isinstance(value, six.binary_type):
+ value = value.decode("utf-8")
+
+ value = _replace_multiple(value, _HTML5_REPLACEMENTS)
+
+ return u'%s="%s"' % (name, value)
+
+
+# For backwards-compatibility.
+format_header_param = format_header_param_html5
+
+
+class RequestField(object):
+ """
+ A data container for request body parameters.
+
+ :param name:
+ The name of this request field. Must be unicode.
+ :param data:
+ The data/value body.
+ :param filename:
+ An optional filename of the request field. Must be unicode.
+ :param headers:
+ An optional dict-like object of headers to initially use for the field.
+ :param header_formatter:
+ An optional callable that is used to encode and format the headers. By
+ default, this is :func:`format_header_param_html5`.
+ """
+
+ def __init__(
+ self,
+ name,
+ data,
+ filename=None,
+ headers=None,
+ header_formatter=format_header_param_html5,
+ ):
+ self._name = name
+ self._filename = filename
+ self.data = data
+ self.headers = {}
+ if headers:
+ self.headers = dict(headers)
+ self.header_formatter = header_formatter
+
+ @classmethod
+ def from_tuples(cls, fieldname, value, header_formatter=format_header_param_html5):
+ """
+ A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
+
+ Supports constructing :class:`~urllib3.fields.RequestField` from
+ parameter of key/value strings AND key/filetuple. A filetuple is a
+ (filename, data, MIME type) tuple where the MIME type is optional.
+ For example::
+
+ 'foo': 'bar',
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
+ 'realfile': ('barfile.txt', open('realfile').read()),
+ 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
+ 'nonamefile': 'contents of nonamefile field',
+
+ Field names and filenames must be unicode.
+ """
+ if isinstance(value, tuple):
+ if len(value) == 3:
+ filename, data, content_type = value
+ else:
+ filename, data = value
+ content_type = guess_content_type(filename)
+ else:
+ filename = None
+ content_type = None
+ data = value
+
+ request_param = cls(
+ fieldname, data, filename=filename, header_formatter=header_formatter
+ )
+ request_param.make_multipart(content_type=content_type)
+
+ return request_param
+
+ def _render_part(self, name, value):
+ """
+ Overridable helper function to format a single header parameter. By
+ default, this calls ``self.header_formatter``.
+
+ :param name:
+ The name of the parameter, a string expected to be ASCII only.
+ :param value:
+ The value of the parameter, provided as a unicode string.
+ """
+
+ return self.header_formatter(name, value)
+
+ def _render_parts(self, header_parts):
+ """
+ Helper function to format and quote a single header.
+
+ Useful for single headers that are composed of multiple items. E.g.,
+ 'Content-Disposition' fields.
+
+ :param header_parts:
+ A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format
+ as `k1="v1"; k2="v2"; ...`.
+ """
+ parts = []
+ iterable = header_parts
+ if isinstance(header_parts, dict):
+ iterable = header_parts.items()
+
+ for name, value in iterable:
+ if value is not None:
+ parts.append(self._render_part(name, value))
+
+ return u"; ".join(parts)
+
+ def render_headers(self):
+ """
+ Renders the headers for this request field.
+ """
+ lines = []
+
+ sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"]
+ for sort_key in sort_keys:
+ if self.headers.get(sort_key, False):
+ lines.append(u"%s: %s" % (sort_key, self.headers[sort_key]))
+
+ for header_name, header_value in self.headers.items():
+ if header_name not in sort_keys:
+ if header_value:
+ lines.append(u"%s: %s" % (header_name, header_value))
+
+ lines.append(u"\r\n")
+ return u"\r\n".join(lines)
+
+ def make_multipart(
+ self, content_disposition=None, content_type=None, content_location=None
+ ):
+ """
+ Makes this request field into a multipart request field.
+
+ This method overrides "Content-Disposition", "Content-Type" and
+ "Content-Location" headers to the request parameter.
+
+ :param content_type:
+ The 'Content-Type' of the request body.
+ :param content_location:
+ The 'Content-Location' of the request body.
+
+ """
+ self.headers["Content-Disposition"] = content_disposition or u"form-data"
+ self.headers["Content-Disposition"] += u"; ".join(
+ [
+ u"",
+ self._render_parts(
+ ((u"name", self._name), (u"filename", self._filename))
+ ),
+ ]
+ )
+ self.headers["Content-Type"] = content_type
+ self.headers["Content-Location"] = content_location
diff --git a/third_party/python/urllib3/src/urllib3/filepost.py b/third_party/python/urllib3/src/urllib3/filepost.py
new file mode 100644
index 0000000000..b7b00992c6
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/filepost.py
@@ -0,0 +1,98 @@
+from __future__ import absolute_import
+import binascii
+import codecs
+import os
+
+from io import BytesIO
+
+from .packages import six
+from .packages.six import b
+from .fields import RequestField
+
+writer = codecs.lookup("utf-8")[3]
+
+
+def choose_boundary():
+ """
+ Our embarrassingly-simple replacement for mimetools.choose_boundary.
+ """
+ boundary = binascii.hexlify(os.urandom(16))
+ if not six.PY2:
+ boundary = boundary.decode("ascii")
+ return boundary
+
+
+def iter_field_objects(fields):
+ """
+ Iterate over fields.
+
+ Supports list of (k, v) tuples and dicts, and lists of
+ :class:`~urllib3.fields.RequestField`.
+
+ """
+ if isinstance(fields, dict):
+ i = six.iteritems(fields)
+ else:
+ i = iter(fields)
+
+ for field in i:
+ if isinstance(field, RequestField):
+ yield field
+ else:
+ yield RequestField.from_tuples(*field)
+
+
+def iter_fields(fields):
+ """
+ .. deprecated:: 1.6
+
+ Iterate over fields.
+
+ The addition of :class:`~urllib3.fields.RequestField` makes this function
+ obsolete. Instead, use :func:`iter_field_objects`, which returns
+ :class:`~urllib3.fields.RequestField` objects.
+
+ Supports list of (k, v) tuples and dicts.
+ """
+ if isinstance(fields, dict):
+ return ((k, v) for k, v in six.iteritems(fields))
+
+ return ((k, v) for k, v in fields)
+
+
+def encode_multipart_formdata(fields, boundary=None):
+ """
+ Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
+
+ :param fields:
+ Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
+
+ :param boundary:
+ If not specified, then a random boundary will be generated using
+ :func:`urllib3.filepost.choose_boundary`.
+ """
+ body = BytesIO()
+ if boundary is None:
+ boundary = choose_boundary()
+
+ for field in iter_field_objects(fields):
+ body.write(b("--%s\r\n" % (boundary)))
+
+ writer(body).write(field.render_headers())
+ data = field.data
+
+ if isinstance(data, int):
+ data = str(data) # Backwards compatibility
+
+ if isinstance(data, six.text_type):
+ writer(body).write(data)
+ else:
+ body.write(data)
+
+ body.write(b"\r\n")
+
+ body.write(b("--%s--\r\n" % (boundary)))
+
+ content_type = str("multipart/form-data; boundary=%s" % boundary)
+
+ return body.getvalue(), content_type
diff --git a/third_party/python/urllib3/src/urllib3/packages/__init__.py b/third_party/python/urllib3/src/urllib3/packages/__init__.py
new file mode 100644
index 0000000000..fce4caa65d
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/packages/__init__.py
@@ -0,0 +1,5 @@
+from __future__ import absolute_import
+
+from . import ssl_match_hostname
+
+__all__ = ("ssl_match_hostname",)
diff --git a/third_party/python/urllib3/src/urllib3/packages/backports/__init__.py b/third_party/python/urllib3/src/urllib3/packages/backports/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/packages/backports/__init__.py
diff --git a/third_party/python/urllib3/src/urllib3/packages/backports/makefile.py b/third_party/python/urllib3/src/urllib3/packages/backports/makefile.py
new file mode 100644
index 0000000000..a3156a69c0
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/packages/backports/makefile.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+"""
+backports.makefile
+~~~~~~~~~~~~~~~~~~
+
+Backports the Python 3 ``socket.makefile`` method for use with anything that
+wants to create a "fake" socket object.
+"""
+import io
+
+from socket import SocketIO
+
+
+def backport_makefile(
+ self, mode="r", buffering=None, encoding=None, errors=None, newline=None
+):
+ """
+ Backport of ``socket.makefile`` from Python 3.5.
+ """
+ if not set(mode) <= {"r", "w", "b"}:
+ raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
+ writing = "w" in mode
+ reading = "r" in mode or not writing
+ assert reading or writing
+ binary = "b" in mode
+ rawmode = ""
+ if reading:
+ rawmode += "r"
+ if writing:
+ rawmode += "w"
+ raw = SocketIO(self, rawmode)
+ self._makefile_refs += 1
+ if buffering is None:
+ buffering = -1
+ if buffering < 0:
+ buffering = io.DEFAULT_BUFFER_SIZE
+ if buffering == 0:
+ if not binary:
+ raise ValueError("unbuffered streams must be binary")
+ return raw
+ if reading and writing:
+ buffer = io.BufferedRWPair(raw, raw, buffering)
+ elif reading:
+ buffer = io.BufferedReader(raw, buffering)
+ else:
+ assert writing
+ buffer = io.BufferedWriter(raw, buffering)
+ if binary:
+ return buffer
+ text = io.TextIOWrapper(buffer, encoding, errors, newline)
+ text.mode = mode
+ return text
diff --git a/third_party/python/urllib3/src/urllib3/packages/six.py b/third_party/python/urllib3/src/urllib3/packages/six.py
new file mode 100644
index 0000000000..314424099f
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/packages/six.py
@@ -0,0 +1,1021 @@
+# Copyright (c) 2010-2019 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+from __future__ import absolute_import
+
+import functools
+import itertools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.12.0"
+
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
+
+if PY3:
+ string_types = (str,)
+ integer_types = (int,)
+ class_types = (type,)
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = (basestring,)
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ if sys.platform.startswith("java"):
+ # Jython always uses 32 bits.
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+ def __len__(self):
+ return 1 << 31
+
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
+ return result
+
+
+class MovedModule(_LazyDescr):
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+ def __getattr__(self, attr):
+ _module = self._resolve()
+ value = getattr(_module, attr)
+ setattr(self, attr, value)
+ return value
+
+
+class _LazyModule(types.ModuleType):
+ def __init__(self, name):
+ super(_LazyModule, self).__init__(name)
+ self.__doc__ = self.__class__.__doc__
+
+ def __dir__(self):
+ attrs = ["__doc__", "__name__"]
+ attrs += [attr.name for attr in self._moved_attributes]
+ return attrs
+
+ # Subclasses should override this
+ _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+
+ """
+ A meta path importer to import six.moves and its submodules.
+
+ This class implements a PEP302 finder and loader. It should be compatible
+ with Python 2.5 and all existing versions of Python3
+ """
+
+ def __init__(self, six_module_name):
+ self.name = six_module_name
+ self.known_modules = {}
+
+ def _add_module(self, mod, *fullnames):
+ for fullname in fullnames:
+ self.known_modules[self.name + "." + fullname] = mod
+
+ def _get_module(self, fullname):
+ return self.known_modules[self.name + "." + fullname]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.known_modules:
+ return self
+ return None
+
+ def __get_module(self, fullname):
+ try:
+ return self.known_modules[fullname]
+ except KeyError:
+ raise ImportError("This loader does not know module " + fullname)
+
+ def load_module(self, fullname):
+ try:
+ # in case of a reload
+ return sys.modules[fullname]
+ except KeyError:
+ pass
+ mod = self.__get_module(fullname)
+ if isinstance(mod, MovedModule):
+ mod = mod._resolve()
+ else:
+ mod.__loader__ = self
+ sys.modules[fullname] = mod
+ return mod
+
+ def is_package(self, fullname):
+ """
+ Return true, if the named module is a package.
+
+ We need this method to get correct spec objects with
+ Python 3.4 (see PEP451)
+ """
+ return hasattr(self.__get_module(fullname), "__path__")
+
+ def get_code(self, fullname):
+ """Return None
+
+ Required, if is_package is implemented"""
+ self.__get_module(fullname) # eventually raises ImportError
+ return None
+
+ get_source = get_code # same as get_code
+
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+
+ """Lazy loading of moved objects"""
+
+ __path__ = [] # mark as package
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute(
+ "filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"
+ ),
+ MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("intern", "__builtin__", "sys"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+ MovedAttribute("getoutput", "commands", "subprocess"),
+ MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute(
+ "reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"
+ ),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("UserDict", "UserDict", "collections"),
+ MovedAttribute("UserList", "UserList", "collections"),
+ MovedAttribute("UserString", "UserString", "collections"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+ MovedAttribute(
+ "zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"
+ ),
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+ MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
+ MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule(
+ "email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"
+ ),
+ MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("_thread", "thread", "_thread"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"),
+ MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+ MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+]
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [MovedModule("winreg", "_winreg")]
+
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+ if isinstance(attr, MovedModule):
+ _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+ MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+ MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+ MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+ MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+ MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("quote", "urllib", "urllib.parse"),
+ MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+ MovedAttribute(
+ "unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"
+ ),
+ MovedAttribute("urlencode", "urllib", "urllib.parse"),
+ MovedAttribute("splitquery", "urllib", "urllib.parse"),
+ MovedAttribute("splittag", "urllib", "urllib.parse"),
+ MovedAttribute("splituser", "urllib", "urllib.parse"),
+ MovedAttribute("splitvalue", "urllib", "urllib.parse"),
+ MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+ setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(
+ Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+ "moves.urllib_parse",
+ "moves.urllib.parse",
+)
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+ MovedAttribute("URLError", "urllib2", "urllib.error"),
+ MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+ MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+ setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(
+ Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+ "moves.urllib_error",
+ "moves.urllib.error",
+)
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+ MovedAttribute("urlopen", "urllib2", "urllib.request"),
+ MovedAttribute("install_opener", "urllib2", "urllib.request"),
+ MovedAttribute("build_opener", "urllib2", "urllib.request"),
+ MovedAttribute("pathname2url", "urllib", "urllib.request"),
+ MovedAttribute("url2pathname", "urllib", "urllib.request"),
+ MovedAttribute("getproxies", "urllib", "urllib.request"),
+ MovedAttribute("Request", "urllib2", "urllib.request"),
+ MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+ MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+ MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+ MovedAttribute("URLopener", "urllib", "urllib.request"),
+ MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+ MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+ MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
+ MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+ setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(
+ Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+ "moves.urllib_request",
+ "moves.urllib.request",
+)
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+ MovedAttribute("addbase", "urllib", "urllib.response"),
+ MovedAttribute("addclosehook", "urllib", "urllib.response"),
+ MovedAttribute("addinfo", "urllib", "urllib.response"),
+ MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+ setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(
+ Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+ "moves.urllib_response",
+ "moves.urllib.response",
+)
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser")
+]
+for attr in _urllib_robotparser_moved_attributes:
+ setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = (
+ _urllib_robotparser_moved_attributes
+)
+
+_importer._add_module(
+ Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+ "moves.urllib_robotparser",
+ "moves.urllib.robotparser",
+)
+
+
+class Module_six_moves_urllib(types.ModuleType):
+
+ """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+
+ __path__ = [] # mark as package
+ parse = _importer._get_module("moves.urllib_parse")
+ error = _importer._get_module("moves.urllib_error")
+ request = _importer._get_module("moves.urllib_request")
+ response = _importer._get_module("moves.urllib_response")
+ robotparser = _importer._get_module("moves.urllib_robotparser")
+
+ def __dir__(self):
+ return ["parse", "error", "request", "response", "robotparser"]
+
+
+_importer._add_module(
+ Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib"
+)
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_closure = "__closure__"
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+ _func_globals = "__globals__"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_closure = "func_closure"
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+ _func_globals = "func_globals"
+
+
+try:
+ advance_iterator = next
+except NameError:
+
+ def advance_iterator(it):
+ return it.next()
+
+
+next = advance_iterator
+
+
+try:
+ callable = callable
+except NameError:
+
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+
+ def get_unbound_function(unbound):
+ return unbound
+
+ create_bound_method = types.MethodType
+
+ def create_unbound_method(func, cls):
+ return func
+
+ Iterator = object
+else:
+
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+ def create_bound_method(func, obj):
+ return types.MethodType(func, obj, obj.__class__)
+
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
+ class Iterator(object):
+ def next(self):
+ return type(self).__next__(self)
+
+ callable = callable
+_add_doc(
+ get_unbound_function, """Get the function out of a possibly unbound function"""
+)
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+
+ def iterkeys(d, **kw):
+ return iter(d.keys(**kw))
+
+ def itervalues(d, **kw):
+ return iter(d.values(**kw))
+
+ def iteritems(d, **kw):
+ return iter(d.items(**kw))
+
+ def iterlists(d, **kw):
+ return iter(d.lists(**kw))
+
+ viewkeys = operator.methodcaller("keys")
+
+ viewvalues = operator.methodcaller("values")
+
+ viewitems = operator.methodcaller("items")
+else:
+
+ def iterkeys(d, **kw):
+ return d.iterkeys(**kw)
+
+ def itervalues(d, **kw):
+ return d.itervalues(**kw)
+
+ def iteritems(d, **kw):
+ return d.iteritems(**kw)
+
+ def iterlists(d, **kw):
+ return d.iterlists(**kw)
+
+ viewkeys = operator.methodcaller("viewkeys")
+
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(
+ iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary."
+)
+
+
+if PY3:
+
+ def b(s):
+ return s.encode("latin-1")
+
+ def u(s):
+ return s
+
+ unichr = chr
+ import struct
+
+ int2byte = struct.Struct(">B").pack
+ del struct
+ byte2int = operator.itemgetter(0)
+ indexbytes = operator.getitem
+ iterbytes = iter
+ import io
+
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+ del io
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
+else:
+
+ def b(s):
+ return s
+
+ # Workaround for standalone backslash
+
+ def u(s):
+ return unicode(s.replace(r"\\", r"\\\\"), "unicode_escape")
+
+ unichr = unichr
+ int2byte = chr
+
+ def byte2int(bs):
+ return ord(bs[0])
+
+ def indexbytes(buf, i):
+ return ord(buf[i])
+
+ iterbytes = functools.partial(itertools.imap, ord)
+ import StringIO
+
+ StringIO = BytesIO = StringIO.StringIO
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
+if PY3:
+ exec_ = getattr(moves.builtins, "exec")
+
+ def reraise(tp, value, tb=None):
+ try:
+ if value is None:
+ value = tp()
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+ finally:
+ value = None
+ tb = None
+
+
+else:
+
+ def exec_(_code_, _globs_=None, _locs_=None):
+ """Execute code in a namespace."""
+ if _globs_ is None:
+ frame = sys._getframe(1)
+ _globs_ = frame.f_globals
+ if _locs_ is None:
+ _locs_ = frame.f_locals
+ del frame
+ elif _locs_ is None:
+ _locs_ = _globs_
+ exec("""exec _code_ in _globs_, _locs_""")
+
+ exec_(
+ """def reraise(tp, value, tb=None):
+ try:
+ raise tp, value, tb
+ finally:
+ tb = None
+"""
+ )
+
+
+if sys.version_info[:2] == (3, 2):
+ exec_(
+ """def raise_from(value, from_value):
+ try:
+ if from_value is None:
+ raise value
+ raise value from from_value
+ finally:
+ value = None
+"""
+ )
+elif sys.version_info[:2] > (3, 2):
+ exec_(
+ """def raise_from(value, from_value):
+ try:
+ raise value from from_value
+ finally:
+ value = None
+"""
+ )
+else:
+
+ def raise_from(value, from_value):
+ raise value
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+
+ def print_(*args, **kwargs):
+ """The new-style print function for Python 2.4 and 2.5."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ # If the file has an encoding, encode unicode with it.
+ if (
+ isinstance(fp, file)
+ and isinstance(data, unicode)
+ and fp.encoding is not None
+ ):
+ errors = getattr(fp, "errors", None)
+ if errors is None:
+ errors = "strict"
+ data = data.encode(fp.encoding, errors)
+ fp.write(data)
+
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+
+
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
+
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+
+ def wraps(
+ wrapped,
+ assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES,
+ ):
+ def wrapper(f):
+ f = functools.wraps(wrapped, assigned, updated)(f)
+ f.__wrapped__ = wrapped
+ return f
+
+ return wrapper
+
+
+else:
+ wraps = functools.wraps
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(type):
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+
+ @classmethod
+ def __prepare__(cls, name, this_bases):
+ return meta.__prepare__(name, bases)
+
+ return type.__new__(metaclass, "temporary_class", (), {})
+
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ slots = orig_vars.get("__slots__")
+ if slots is not None:
+ if isinstance(slots, str):
+ slots = [slots]
+ for slots_var in slots:
+ orig_vars.pop(slots_var)
+ orig_vars.pop("__dict__", None)
+ orig_vars.pop("__weakref__", None)
+ if hasattr(cls, "__qualname__"):
+ orig_vars["__qualname__"] = cls.__qualname__
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+
+ return wrapper
+
+
+def ensure_binary(s, encoding="utf-8", errors="strict"):
+ """Coerce **s** to six.binary_type.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> encoded to `bytes`
+ - `bytes` -> `bytes`
+ """
+ if isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ elif isinstance(s, binary_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def ensure_str(s, encoding="utf-8", errors="strict"):
+ """Coerce *s* to `str`.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if not isinstance(s, (text_type, binary_type)):
+ raise TypeError("not expecting type '%s'" % type(s))
+ if PY2 and isinstance(s, text_type):
+ s = s.encode(encoding, errors)
+ elif PY3 and isinstance(s, binary_type):
+ s = s.decode(encoding, errors)
+ return s
+
+
+def ensure_text(s, encoding="utf-8", errors="strict"):
+ """Coerce *s* to six.text_type.
+
+ For Python 2:
+ - `unicode` -> `unicode`
+ - `str` -> `unicode`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif isinstance(s, text_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def python_2_unicode_compatible(klass):
+ """
+ A decorator that defines __unicode__ and __str__ methods under Python 2.
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if "__str__" not in klass.__dict__:
+ raise ValueError(
+ "@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." % klass.__name__
+ )
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode("utf-8")
+ return klass
+
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = [] # required for PEP 302 and PEP 451
+__package__ = __name__ # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+ __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+ for i, importer in enumerate(sys.meta_path):
+ # Here's some real nastiness: Another "instance" of the six module might
+ # be floating around. Therefore, we can't use isinstance() to check for
+ # the six meta path importer, since the other six instance will have
+ # inserted an importer with different class.
+ if (
+ type(importer).__name__ == "_SixMetaPathImporter"
+ and importer.name == __name__
+ ):
+ del sys.meta_path[i]
+ break
+ del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/third_party/python/urllib3/src/urllib3/packages/ssl_match_hostname/__init__.py b/third_party/python/urllib3/src/urllib3/packages/ssl_match_hostname/__init__.py
new file mode 100644
index 0000000000..75b6bb1cf0
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/packages/ssl_match_hostname/__init__.py
@@ -0,0 +1,19 @@
+import sys
+
+try:
+ # Our match_hostname function is the same as 3.5's, so we only want to
+ # import the match_hostname function if it's at least that good.
+ if sys.version_info < (3, 5):
+ raise ImportError("Fallback to vendored code")
+
+ from ssl import CertificateError, match_hostname
+except ImportError:
+ try:
+ # Backport of the function from a pypi module
+ from backports.ssl_match_hostname import CertificateError, match_hostname
+ except ImportError:
+ # Our vendored copy
+ from ._implementation import CertificateError, match_hostname
+
+# Not needed, but documenting what we provide.
+__all__ = ("CertificateError", "match_hostname")
diff --git a/third_party/python/urllib3/src/urllib3/packages/ssl_match_hostname/_implementation.py b/third_party/python/urllib3/src/urllib3/packages/ssl_match_hostname/_implementation.py
new file mode 100644
index 0000000000..689208d3c6
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/packages/ssl_match_hostname/_implementation.py
@@ -0,0 +1,160 @@
+"""The match_hostname() function from Python 3.3.3, essential when using SSL."""
+
+# Note: This file is under the PSF license as the code comes from the python
+# stdlib. http://docs.python.org/3/license.html
+
+import re
+import sys
+
+# ipaddress has been backported to 2.6+ in pypi. If it is installed on the
+# system, use it to handle IPAddress ServerAltnames (this was added in
+# python-3.5) otherwise only do DNS matching. This allows
+# backports.ssl_match_hostname to continue to be used in Python 2.7.
+try:
+ import ipaddress
+except ImportError:
+ ipaddress = None
+
+__version__ = "3.5.0.1"
+
+
+class CertificateError(ValueError):
+ pass
+
+
+def _dnsname_match(dn, hostname, max_wildcards=1):
+ """Matching according to RFC 6125, section 6.4.3
+
+ http://tools.ietf.org/html/rfc6125#section-6.4.3
+ """
+ pats = []
+ if not dn:
+ return False
+
+ # Ported from python3-syntax:
+ # leftmost, *remainder = dn.split(r'.')
+ parts = dn.split(r".")
+ leftmost = parts[0]
+ remainder = parts[1:]
+
+ wildcards = leftmost.count("*")
+ if wildcards > max_wildcards:
+ # Issue #17980: avoid denials of service by refusing more
+ # than one wildcard per fragment. A survey of established
+ # policy among SSL implementations showed it to be a
+ # reasonable choice.
+ raise CertificateError(
+ "too many wildcards in certificate DNS name: " + repr(dn)
+ )
+
+ # speed up common case w/o wildcards
+ if not wildcards:
+ return dn.lower() == hostname.lower()
+
+ # RFC 6125, section 6.4.3, subitem 1.
+ # The client SHOULD NOT attempt to match a presented identifier in which
+ # the wildcard character comprises a label other than the left-most label.
+ if leftmost == "*":
+ # When '*' is a fragment by itself, it matches a non-empty dotless
+ # fragment.
+ pats.append("[^.]+")
+ elif leftmost.startswith("xn--") or hostname.startswith("xn--"):
+ # RFC 6125, section 6.4.3, subitem 3.
+ # The client SHOULD NOT attempt to match a presented identifier
+ # where the wildcard character is embedded within an A-label or
+ # U-label of an internationalized domain name.
+ pats.append(re.escape(leftmost))
+ else:
+ # Otherwise, '*' matches any dotless string, e.g. www*
+ pats.append(re.escape(leftmost).replace(r"\*", "[^.]*"))
+
+ # add the remaining fragments, ignore any wildcards
+ for frag in remainder:
+ pats.append(re.escape(frag))
+
+ pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE)
+ return pat.match(hostname)
+
+
+def _to_unicode(obj):
+ if isinstance(obj, str) and sys.version_info < (3,):
+ obj = unicode(obj, encoding="ascii", errors="strict")
+ return obj
+
+
+def _ipaddress_match(ipname, host_ip):
+ """Exact matching of IP addresses.
+
+ RFC 6125 explicitly doesn't define an algorithm for this
+ (section 1.7.2 - "Out of Scope").
+ """
+ # OpenSSL may add a trailing newline to a subjectAltName's IP address
+ # Divergence from upstream: ipaddress can't handle byte str
+ ip = ipaddress.ip_address(_to_unicode(ipname).rstrip())
+ return ip == host_ip
+
+
+def match_hostname(cert, hostname):
+ """Verify that *cert* (in decoded format as returned by
+ SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
+ rules are followed, but IP addresses are not accepted for *hostname*.
+
+ CertificateError is raised on failure. On success, the function
+ returns nothing.
+ """
+ if not cert:
+ raise ValueError(
+ "empty or no certificate, match_hostname needs a "
+ "SSL socket or SSL context with either "
+ "CERT_OPTIONAL or CERT_REQUIRED"
+ )
+ try:
+ # Divergence from upstream: ipaddress can't handle byte str
+ host_ip = ipaddress.ip_address(_to_unicode(hostname))
+ except ValueError:
+ # Not an IP address (common case)
+ host_ip = None
+ except UnicodeError:
+ # Divergence from upstream: Have to deal with ipaddress not taking
+ # byte strings. addresses should be all ascii, so we consider it not
+ # an ipaddress in this case
+ host_ip = None
+ except AttributeError:
+ # Divergence from upstream: Make ipaddress library optional
+ if ipaddress is None:
+ host_ip = None
+ else:
+ raise
+ dnsnames = []
+ san = cert.get("subjectAltName", ())
+ for key, value in san:
+ if key == "DNS":
+ if host_ip is None and _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ elif key == "IP Address":
+ if host_ip is not None and _ipaddress_match(value, host_ip):
+ return
+ dnsnames.append(value)
+ if not dnsnames:
+ # The subject is only checked when there is no dNSName entry
+ # in subjectAltName
+ for sub in cert.get("subject", ()):
+ for key, value in sub:
+ # XXX according to RFC 2818, the most specific Common Name
+ # must be used.
+ if key == "commonName":
+ if _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ if len(dnsnames) > 1:
+ raise CertificateError(
+ "hostname %r "
+ "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames)))
+ )
+ elif len(dnsnames) == 1:
+ raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0]))
+ else:
+ raise CertificateError(
+ "no appropriate commonName or subjectAltName fields were found"
+ )
diff --git a/third_party/python/urllib3/src/urllib3/poolmanager.py b/third_party/python/urllib3/src/urllib3/poolmanager.py
new file mode 100644
index 0000000000..e2bd3bd8db
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/poolmanager.py
@@ -0,0 +1,492 @@
+from __future__ import absolute_import
+import collections
+import functools
+import logging
+import warnings
+
+from ._collections import RecentlyUsedContainer
+from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
+from .connectionpool import port_by_scheme
+from .exceptions import (
+ LocationValueError,
+ MaxRetryError,
+ ProxySchemeUnknown,
+ InvalidProxyConfigurationWarning,
+)
+from .packages import six
+from .packages.six.moves.urllib.parse import urljoin
+from .request import RequestMethods
+from .util.url import parse_url
+from .util.retry import Retry
+
+
+__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
+
+
+log = logging.getLogger(__name__)
+
+SSL_KEYWORDS = (
+ "key_file",
+ "cert_file",
+ "cert_reqs",
+ "ca_certs",
+ "ssl_version",
+ "ca_cert_dir",
+ "ssl_context",
+ "key_password",
+)
+
+# All known keyword arguments that could be provided to the pool manager, its
+# pools, or the underlying connections. This is used to construct a pool key.
+_key_fields = (
+ "key_scheme", # str
+ "key_host", # str
+ "key_port", # int
+ "key_timeout", # int or float or Timeout
+ "key_retries", # int or Retry
+ "key_strict", # bool
+ "key_block", # bool
+ "key_source_address", # str
+ "key_key_file", # str
+ "key_key_password", # str
+ "key_cert_file", # str
+ "key_cert_reqs", # str
+ "key_ca_certs", # str
+ "key_ssl_version", # str
+ "key_ca_cert_dir", # str
+ "key_ssl_context", # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext
+ "key_maxsize", # int
+ "key_headers", # dict
+ "key__proxy", # parsed proxy url
+ "key__proxy_headers", # dict
+ "key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples
+ "key__socks_options", # dict
+ "key_assert_hostname", # bool or string
+ "key_assert_fingerprint", # str
+ "key_server_hostname", # str
+)
+
+#: The namedtuple class used to construct keys for the connection pool.
+#: All custom key schemes should include the fields in this key at a minimum.
+PoolKey = collections.namedtuple("PoolKey", _key_fields)
+
+
+def _default_key_normalizer(key_class, request_context):
+ """
+ Create a pool key out of a request context dictionary.
+
+ According to RFC 3986, both the scheme and host are case-insensitive.
+ Therefore, this function normalizes both before constructing the pool
+ key for an HTTPS request. If you wish to change this behaviour, provide
+ alternate callables to ``key_fn_by_scheme``.
+
+ :param key_class:
+ The class to use when constructing the key. This should be a namedtuple
+ with the ``scheme`` and ``host`` keys at a minimum.
+ :type key_class: namedtuple
+ :param request_context:
+ A dictionary-like object that contain the context for a request.
+ :type request_context: dict
+
+ :return: A namedtuple that can be used as a connection pool key.
+ :rtype: PoolKey
+ """
+ # Since we mutate the dictionary, make a copy first
+ context = request_context.copy()
+ context["scheme"] = context["scheme"].lower()
+ context["host"] = context["host"].lower()
+
+ # These are both dictionaries and need to be transformed into frozensets
+ for key in ("headers", "_proxy_headers", "_socks_options"):
+ if key in context and context[key] is not None:
+ context[key] = frozenset(context[key].items())
+
+ # The socket_options key may be a list and needs to be transformed into a
+ # tuple.
+ socket_opts = context.get("socket_options")
+ if socket_opts is not None:
+ context["socket_options"] = tuple(socket_opts)
+
+ # Map the kwargs to the names in the namedtuple - this is necessary since
+ # namedtuples can't have fields starting with '_'.
+ for key in list(context.keys()):
+ context["key_" + key] = context.pop(key)
+
+ # Default to ``None`` for keys missing from the context
+ for field in key_class._fields:
+ if field not in context:
+ context[field] = None
+
+ return key_class(**context)
+
+
+#: A dictionary that maps a scheme to a callable that creates a pool key.
+#: This can be used to alter the way pool keys are constructed, if desired.
+#: Each PoolManager makes a copy of this dictionary so they can be configured
+#: globally here, or individually on the instance.
+key_fn_by_scheme = {
+ "http": functools.partial(_default_key_normalizer, PoolKey),
+ "https": functools.partial(_default_key_normalizer, PoolKey),
+}
+
+pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool}
+
+
+class PoolManager(RequestMethods):
+ """
+ Allows for arbitrary requests while transparently keeping track of
+ necessary connection pools for you.
+
+ :param num_pools:
+ Number of connection pools to cache before discarding the least
+ recently used pool.
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+
+ :param \\**connection_pool_kw:
+ Additional parameters are used to create fresh
+ :class:`urllib3.connectionpool.ConnectionPool` instances.
+
+ Example::
+
+ >>> manager = PoolManager(num_pools=2)
+ >>> r = manager.request('GET', 'http://google.com/')
+ >>> r = manager.request('GET', 'http://google.com/mail')
+ >>> r = manager.request('GET', 'http://yahoo.com/')
+ >>> len(manager.pools)
+ 2
+
+ """
+
+ proxy = None
+
+ def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
+ RequestMethods.__init__(self, headers)
+ self.connection_pool_kw = connection_pool_kw
+ self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close())
+
+ # Locally set the pool classes and keys so other PoolManagers can
+ # override them.
+ self.pool_classes_by_scheme = pool_classes_by_scheme
+ self.key_fn_by_scheme = key_fn_by_scheme.copy()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.clear()
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def _new_pool(self, scheme, host, port, request_context=None):
+ """
+ Create a new :class:`ConnectionPool` based on host, port, scheme, and
+ any additional pool keyword arguments.
+
+ If ``request_context`` is provided, it is provided as keyword arguments
+ to the pool class used. This method is used to actually create the
+ connection pools handed out by :meth:`connection_from_url` and
+ companion methods. It is intended to be overridden for customization.
+ """
+ pool_cls = self.pool_classes_by_scheme[scheme]
+ if request_context is None:
+ request_context = self.connection_pool_kw.copy()
+
+ # Although the context has everything necessary to create the pool,
+ # this function has historically only used the scheme, host, and port
+ # in the positional args. When an API change is acceptable these can
+ # be removed.
+ for key in ("scheme", "host", "port"):
+ request_context.pop(key, None)
+
+ if scheme == "http":
+ for kw in SSL_KEYWORDS:
+ request_context.pop(kw, None)
+
+ return pool_cls(host, port, **request_context)
+
+ def clear(self):
+ """
+ Empty our store of pools and direct them all to close.
+
+ This will not affect in-flight connections, but they will not be
+ re-used after completion.
+ """
+ self.pools.clear()
+
+ def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
+ """
+ Get a :class:`ConnectionPool` based on the host, port, and scheme.
+
+ If ``port`` isn't given, it will be derived from the ``scheme`` using
+ ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
+ provided, it is merged with the instance's ``connection_pool_kw``
+ variable and used to create the new connection pool, if one is
+ needed.
+ """
+
+ if not host:
+ raise LocationValueError("No host specified.")
+
+ request_context = self._merge_pool_kwargs(pool_kwargs)
+ request_context["scheme"] = scheme or "http"
+ if not port:
+ port = port_by_scheme.get(request_context["scheme"].lower(), 80)
+ request_context["port"] = port
+ request_context["host"] = host
+
+ return self.connection_from_context(request_context)
+
+ def connection_from_context(self, request_context):
+ """
+ Get a :class:`ConnectionPool` based on the request context.
+
+ ``request_context`` must at least contain the ``scheme`` key and its
+ value must be a key in ``key_fn_by_scheme`` instance variable.
+ """
+ scheme = request_context["scheme"].lower()
+ pool_key_constructor = self.key_fn_by_scheme[scheme]
+ pool_key = pool_key_constructor(request_context)
+
+ return self.connection_from_pool_key(pool_key, request_context=request_context)
+
+ def connection_from_pool_key(self, pool_key, request_context=None):
+ """
+ Get a :class:`ConnectionPool` based on the provided pool key.
+
+ ``pool_key`` should be a namedtuple that only contains immutable
+ objects. At a minimum it must have the ``scheme``, ``host``, and
+ ``port`` fields.
+ """
+ with self.pools.lock:
+ # If the scheme, host, or port doesn't match existing open
+ # connections, open a new ConnectionPool.
+ pool = self.pools.get(pool_key)
+ if pool:
+ return pool
+
+ # Make a fresh ConnectionPool of the desired type
+ scheme = request_context["scheme"]
+ host = request_context["host"]
+ port = request_context["port"]
+ pool = self._new_pool(scheme, host, port, request_context=request_context)
+ self.pools[pool_key] = pool
+
+ return pool
+
+ def connection_from_url(self, url, pool_kwargs=None):
+ """
+ Similar to :func:`urllib3.connectionpool.connection_from_url`.
+
+ If ``pool_kwargs`` is not provided and a new pool needs to be
+ constructed, ``self.connection_pool_kw`` is used to initialize
+ the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
+ is provided, it is used instead. Note that if a new pool does not
+ need to be created for the request, the provided ``pool_kwargs`` are
+ not used.
+ """
+ u = parse_url(url)
+ return self.connection_from_host(
+ u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs
+ )
+
+ def _merge_pool_kwargs(self, override):
+ """
+ Merge a dictionary of override values for self.connection_pool_kw.
+
+ This does not modify self.connection_pool_kw and returns a new dict.
+ Any keys in the override dictionary with a value of ``None`` are
+ removed from the merged dictionary.
+ """
+ base_pool_kwargs = self.connection_pool_kw.copy()
+ if override:
+ for key, value in override.items():
+ if value is None:
+ try:
+ del base_pool_kwargs[key]
+ except KeyError:
+ pass
+ else:
+ base_pool_kwargs[key] = value
+ return base_pool_kwargs
+
+ def urlopen(self, method, url, redirect=True, **kw):
+ """
+ Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
+ with custom cross-host redirect logic and only sends the request-uri
+ portion of the ``url``.
+
+ The given ``url`` parameter must be absolute, such that an appropriate
+ :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
+ """
+ u = parse_url(url)
+ conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
+
+ kw["assert_same_host"] = False
+ kw["redirect"] = False
+
+ if "headers" not in kw:
+ kw["headers"] = self.headers.copy()
+
+ if self.proxy is not None and u.scheme == "http":
+ response = conn.urlopen(method, url, **kw)
+ else:
+ response = conn.urlopen(method, u.request_uri, **kw)
+
+ redirect_location = redirect and response.get_redirect_location()
+ if not redirect_location:
+ return response
+
+ # Support relative URLs for redirecting.
+ redirect_location = urljoin(url, redirect_location)
+
+ # RFC 7231, Section 6.4.4
+ if response.status == 303:
+ method = "GET"
+
+ retries = kw.get("retries")
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(retries, redirect=redirect)
+
+ # Strip headers marked as unsafe to forward to the redirected location.
+ # Check remove_headers_on_redirect to avoid a potential network call within
+ # conn.is_same_host() which may use socket.gethostbyname() in the future.
+ if retries.remove_headers_on_redirect and not conn.is_same_host(
+ redirect_location
+ ):
+ headers = list(six.iterkeys(kw["headers"]))
+ for header in headers:
+ if header.lower() in retries.remove_headers_on_redirect:
+ kw["headers"].pop(header, None)
+
+ try:
+ retries = retries.increment(method, url, response=response, _pool=conn)
+ except MaxRetryError:
+ if retries.raise_on_redirect:
+ response.drain_conn()
+ raise
+ return response
+
+ kw["retries"] = retries
+ kw["redirect"] = redirect
+
+ log.info("Redirecting %s -> %s", url, redirect_location)
+
+ response.drain_conn()
+ return self.urlopen(method, redirect_location, **kw)
+
+
+class ProxyManager(PoolManager):
+ """
+ Behaves just like :class:`PoolManager`, but sends all requests through
+ the defined proxy, using the CONNECT method for HTTPS URLs.
+
+ :param proxy_url:
+ The URL of the proxy to be used.
+
+ :param proxy_headers:
+ A dictionary containing headers that will be sent to the proxy. In case
+ of HTTP they are being sent with each request, while in the
+ HTTPS/CONNECT case they are sent only once. Could be used for proxy
+ authentication.
+
+ Example:
+ >>> proxy = urllib3.ProxyManager('http://localhost:3128/')
+ >>> r1 = proxy.request('GET', 'http://google.com/')
+ >>> r2 = proxy.request('GET', 'http://httpbin.org/')
+ >>> len(proxy.pools)
+ 1
+ >>> r3 = proxy.request('GET', 'https://httpbin.org/')
+ >>> r4 = proxy.request('GET', 'https://twitter.com/')
+ >>> len(proxy.pools)
+ 3
+
+ """
+
+ def __init__(
+ self,
+ proxy_url,
+ num_pools=10,
+ headers=None,
+ proxy_headers=None,
+ **connection_pool_kw
+ ):
+
+ if isinstance(proxy_url, HTTPConnectionPool):
+ proxy_url = "%s://%s:%i" % (
+ proxy_url.scheme,
+ proxy_url.host,
+ proxy_url.port,
+ )
+ proxy = parse_url(proxy_url)
+ if not proxy.port:
+ port = port_by_scheme.get(proxy.scheme, 80)
+ proxy = proxy._replace(port=port)
+
+ if proxy.scheme not in ("http", "https"):
+ raise ProxySchemeUnknown(proxy.scheme)
+
+ self.proxy = proxy
+ self.proxy_headers = proxy_headers or {}
+
+ connection_pool_kw["_proxy"] = self.proxy
+ connection_pool_kw["_proxy_headers"] = self.proxy_headers
+
+ super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)
+
+ def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
+ if scheme == "https":
+ return super(ProxyManager, self).connection_from_host(
+ host, port, scheme, pool_kwargs=pool_kwargs
+ )
+
+ return super(ProxyManager, self).connection_from_host(
+ self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs
+ )
+
+ def _set_proxy_headers(self, url, headers=None):
+ """
+ Sets headers needed by proxies: specifically, the Accept and Host
+ headers. Only sets headers not provided by the user.
+ """
+ headers_ = {"Accept": "*/*"}
+
+ netloc = parse_url(url).netloc
+ if netloc:
+ headers_["Host"] = netloc
+
+ if headers:
+ headers_.update(headers)
+ return headers_
+
+ def _validate_proxy_scheme_url_selection(self, url_scheme):
+ if url_scheme == "https" and self.proxy.scheme == "https":
+ warnings.warn(
+ "Your proxy configuration specified an HTTPS scheme for the proxy. "
+ "Are you sure you want to use HTTPS to contact the proxy? "
+ "This most likely indicates an error in your configuration. "
+ "Read this issue for more info: "
+ "https://github.com/urllib3/urllib3/issues/1850",
+ InvalidProxyConfigurationWarning,
+ stacklevel=3,
+ )
+
+ def urlopen(self, method, url, redirect=True, **kw):
+ "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
+ u = parse_url(url)
+ self._validate_proxy_scheme_url_selection(u.scheme)
+
+ if u.scheme == "http":
+ # For proxied HTTPS requests, httplib sets the necessary headers
+ # on the CONNECT to the proxy. For HTTP, we'll definitely
+ # need to set 'Host' at the very least.
+ headers = kw.get("headers", self.headers)
+ kw["headers"] = self._set_proxy_headers(url, headers)
+
+ return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
+
+
+def proxy_from_url(url, **kw):
+ return ProxyManager(proxy_url=url, **kw)
diff --git a/third_party/python/urllib3/src/urllib3/request.py b/third_party/python/urllib3/src/urllib3/request.py
new file mode 100644
index 0000000000..55f160bbf1
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/request.py
@@ -0,0 +1,171 @@
+from __future__ import absolute_import
+
+from .filepost import encode_multipart_formdata
+from .packages.six.moves.urllib.parse import urlencode
+
+
+__all__ = ["RequestMethods"]
+
+
+class RequestMethods(object):
+ """
+ Convenience mixin for classes who implement a :meth:`urlopen` method, such
+ as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
+ :class:`~urllib3.poolmanager.PoolManager`.
+
+ Provides behavior for making common types of HTTP request methods and
+ decides which type of request field encoding to use.
+
+ Specifically,
+
+ :meth:`.request_encode_url` is for sending requests whose fields are
+ encoded in the URL (such as GET, HEAD, DELETE).
+
+ :meth:`.request_encode_body` is for sending requests whose fields are
+ encoded in the *body* of the request using multipart or www-form-urlencoded
+ (such as for POST, PUT, PATCH).
+
+ :meth:`.request` is for making any kind of request, it will look up the
+ appropriate encoding format and use one of the above two methods to make
+ the request.
+
+ Initializer parameters:
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+ """
+
+ _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"}
+
+ def __init__(self, headers=None):
+ self.headers = headers or {}
+
+ def urlopen(
+ self,
+ method,
+ url,
+ body=None,
+ headers=None,
+ encode_multipart=True,
+ multipart_boundary=None,
+ **kw
+ ): # Abstract
+ raise NotImplementedError(
+ "Classes extending RequestMethods must implement "
+ "their own ``urlopen`` method."
+ )
+
+ def request(self, method, url, fields=None, headers=None, **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the appropriate encoding of
+ ``fields`` based on the ``method`` used.
+
+ This is a convenience method that requires the least amount of manual
+ effort. It can be used in most situations, while still having the
+ option to drop down to more specific methods when necessary, such as
+ :meth:`request_encode_url`, :meth:`request_encode_body`,
+ or even the lowest level :meth:`urlopen`.
+ """
+ method = method.upper()
+
+ urlopen_kw["request_url"] = url
+
+ if method in self._encode_url_methods:
+ return self.request_encode_url(
+ method, url, fields=fields, headers=headers, **urlopen_kw
+ )
+ else:
+ return self.request_encode_body(
+ method, url, fields=fields, headers=headers, **urlopen_kw
+ )
+
+ def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
+ the url. This is useful for request methods like GET, HEAD, DELETE, etc.
+ """
+ if headers is None:
+ headers = self.headers
+
+ extra_kw = {"headers": headers}
+ extra_kw.update(urlopen_kw)
+
+ if fields:
+ url += "?" + urlencode(fields)
+
+ return self.urlopen(method, url, **extra_kw)
+
+ def request_encode_body(
+ self,
+ method,
+ url,
+ fields=None,
+ headers=None,
+ encode_multipart=True,
+ multipart_boundary=None,
+ **urlopen_kw
+ ):
+ """
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
+ the body. This is useful for request methods like POST, PUT, PATCH, etc.
+
+ When ``encode_multipart=True`` (default), then
+ :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
+ the payload with the appropriate content type. Otherwise
+ :meth:`urllib.urlencode` is used with the
+ 'application/x-www-form-urlencoded' content type.
+
+ Multipart encoding must be used when posting files, and it's reasonably
+ safe to use it in other times too. However, it may break request
+ signing, such as with OAuth.
+
+ Supports an optional ``fields`` parameter of key/value strings AND
+ key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
+ the MIME type is optional. For example::
+
+ fields = {
+ 'foo': 'bar',
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
+ 'realfile': ('barfile.txt', open('realfile').read()),
+ 'typedfile': ('bazfile.bin', open('bazfile').read(),
+ 'image/jpeg'),
+ 'nonamefile': 'contents of nonamefile field',
+ }
+
+ When uploading a file, providing a filename (the first parameter of the
+ tuple) is optional but recommended to best mimic behavior of browsers.
+
+ Note that if ``headers`` are supplied, the 'Content-Type' header will
+ be overwritten because it depends on the dynamic random boundary string
+ which is used to compose the body of the request. The random boundary
+ string can be explicitly set with the ``multipart_boundary`` parameter.
+ """
+ if headers is None:
+ headers = self.headers
+
+ extra_kw = {"headers": {}}
+
+ if fields:
+ if "body" in urlopen_kw:
+ raise TypeError(
+ "request got values for both 'fields' and 'body', can only specify one."
+ )
+
+ if encode_multipart:
+ body, content_type = encode_multipart_formdata(
+ fields, boundary=multipart_boundary
+ )
+ else:
+ body, content_type = (
+ urlencode(fields),
+ "application/x-www-form-urlencoded",
+ )
+
+ extra_kw["body"] = body
+ extra_kw["headers"] = {"Content-Type": content_type}
+
+ extra_kw["headers"].update(headers)
+ extra_kw.update(urlopen_kw)
+
+ return self.urlopen(method, url, **extra_kw)
diff --git a/third_party/python/urllib3/src/urllib3/response.py b/third_party/python/urllib3/src/urllib3/response.py
new file mode 100644
index 0000000000..7dc9b93cae
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/response.py
@@ -0,0 +1,821 @@
+from __future__ import absolute_import
+from contextlib import contextmanager
+import zlib
+import io
+import logging
+from socket import timeout as SocketTimeout
+from socket import error as SocketError
+
+try:
+ import brotli
+except ImportError:
+ brotli = None
+
+from ._collections import HTTPHeaderDict
+from .exceptions import (
+ BodyNotHttplibCompatible,
+ ProtocolError,
+ DecodeError,
+ ReadTimeoutError,
+ ResponseNotChunked,
+ IncompleteRead,
+ InvalidHeader,
+ HTTPError,
+)
+from .packages.six import string_types as basestring, PY3
+from .packages.six.moves import http_client as httplib
+from .connection import HTTPException, BaseSSLError
+from .util.response import is_fp_closed, is_response_to_head
+
+log = logging.getLogger(__name__)
+
+
+class DeflateDecoder(object):
+ def __init__(self):
+ self._first_try = True
+ self._data = b""
+ self._obj = zlib.decompressobj()
+
+ def __getattr__(self, name):
+ return getattr(self._obj, name)
+
+ def decompress(self, data):
+ if not data:
+ return data
+
+ if not self._first_try:
+ return self._obj.decompress(data)
+
+ self._data += data
+ try:
+ decompressed = self._obj.decompress(data)
+ if decompressed:
+ self._first_try = False
+ self._data = None
+ return decompressed
+ except zlib.error:
+ self._first_try = False
+ self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
+ try:
+ return self.decompress(self._data)
+ finally:
+ self._data = None
+
+
+class GzipDecoderState(object):
+
+ FIRST_MEMBER = 0
+ OTHER_MEMBERS = 1
+ SWALLOW_DATA = 2
+
+
+class GzipDecoder(object):
+ def __init__(self):
+ self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
+ self._state = GzipDecoderState.FIRST_MEMBER
+
+ def __getattr__(self, name):
+ return getattr(self._obj, name)
+
+ def decompress(self, data):
+ ret = bytearray()
+ if self._state == GzipDecoderState.SWALLOW_DATA or not data:
+ return bytes(ret)
+ while True:
+ try:
+ ret += self._obj.decompress(data)
+ except zlib.error:
+ previous_state = self._state
+ # Ignore data after the first error
+ self._state = GzipDecoderState.SWALLOW_DATA
+ if previous_state == GzipDecoderState.OTHER_MEMBERS:
+ # Allow trailing garbage acceptable in other gzip clients
+ return bytes(ret)
+ raise
+ data = self._obj.unused_data
+ if not data:
+ return bytes(ret)
+ self._state = GzipDecoderState.OTHER_MEMBERS
+ self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
+
+
+if brotli is not None:
+
+ class BrotliDecoder(object):
+ # Supports both 'brotlipy' and 'Brotli' packages
+ # since they share an import name. The top branches
+ # are for 'brotlipy' and bottom branches for 'Brotli'
+ def __init__(self):
+ self._obj = brotli.Decompressor()
+
+ def decompress(self, data):
+ if hasattr(self._obj, "decompress"):
+ return self._obj.decompress(data)
+ return self._obj.process(data)
+
+ def flush(self):
+ if hasattr(self._obj, "flush"):
+ return self._obj.flush()
+ return b""
+
+
+class MultiDecoder(object):
+ """
+ From RFC7231:
+ If one or more encodings have been applied to a representation, the
+ sender that applied the encodings MUST generate a Content-Encoding
+ header field that lists the content codings in the order in which
+ they were applied.
+ """
+
+ def __init__(self, modes):
+ self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
+
+ def flush(self):
+ return self._decoders[0].flush()
+
+ def decompress(self, data):
+ for d in reversed(self._decoders):
+ data = d.decompress(data)
+ return data
+
+
+def _get_decoder(mode):
+ if "," in mode:
+ return MultiDecoder(mode)
+
+ if mode == "gzip":
+ return GzipDecoder()
+
+ if brotli is not None and mode == "br":
+ return BrotliDecoder()
+
+ return DeflateDecoder()
+
+
+class HTTPResponse(io.IOBase):
+ """
+ HTTP Response container.
+
+ Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
+ loaded and decoded on-demand when the ``data`` property is accessed. This
+ class is also compatible with the Python standard library's :mod:`io`
+ module, and can hence be treated as a readable object in the context of that
+ framework.
+
+ Extra parameters for behaviour not present in httplib.HTTPResponse:
+
+ :param preload_content:
+ If True, the response's body will be preloaded during construction.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+
+ :param original_response:
+ When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
+ object, it's convenient to include the original for debug purposes. It's
+ otherwise unused.
+
+ :param retries:
+ The retries contains the last :class:`~urllib3.util.retry.Retry` that
+ was used during the request.
+
+ :param enforce_content_length:
+ Enforce content length checking. Body returned by server must match
+ value of Content-Length header, if present. Otherwise, raise error.
+ """
+
+ CONTENT_DECODERS = ["gzip", "deflate"]
+ if brotli is not None:
+ CONTENT_DECODERS += ["br"]
+ REDIRECT_STATUSES = [301, 302, 303, 307, 308]
+
+ def __init__(
+ self,
+ body="",
+ headers=None,
+ status=0,
+ version=0,
+ reason=None,
+ strict=0,
+ preload_content=True,
+ decode_content=True,
+ original_response=None,
+ pool=None,
+ connection=None,
+ msg=None,
+ retries=None,
+ enforce_content_length=False,
+ request_method=None,
+ request_url=None,
+ auto_close=True,
+ ):
+
+ if isinstance(headers, HTTPHeaderDict):
+ self.headers = headers
+ else:
+ self.headers = HTTPHeaderDict(headers)
+ self.status = status
+ self.version = version
+ self.reason = reason
+ self.strict = strict
+ self.decode_content = decode_content
+ self.retries = retries
+ self.enforce_content_length = enforce_content_length
+ self.auto_close = auto_close
+
+ self._decoder = None
+ self._body = None
+ self._fp = None
+ self._original_response = original_response
+ self._fp_bytes_read = 0
+ self.msg = msg
+ self._request_url = request_url
+
+ if body and isinstance(body, (basestring, bytes)):
+ self._body = body
+
+ self._pool = pool
+ self._connection = connection
+
+ if hasattr(body, "read"):
+ self._fp = body
+
+ # Are we using the chunked-style of transfer encoding?
+ self.chunked = False
+ self.chunk_left = None
+ tr_enc = self.headers.get("transfer-encoding", "").lower()
+ # Don't incur the penalty of creating a list and then discarding it
+ encodings = (enc.strip() for enc in tr_enc.split(","))
+ if "chunked" in encodings:
+ self.chunked = True
+
+ # Determine length of response
+ self.length_remaining = self._init_length(request_method)
+
+ # If requested, preload the body.
+ if preload_content and not self._body:
+ self._body = self.read(decode_content=decode_content)
+
+ def get_redirect_location(self):
+ """
+ Should we redirect and where to?
+
+ :returns: Truthy redirect location string if we got a redirect status
+ code and valid location. ``None`` if redirect status and no
+ location. ``False`` if not a redirect status code.
+ """
+ if self.status in self.REDIRECT_STATUSES:
+ return self.headers.get("location")
+
+ return False
+
+ def release_conn(self):
+ if not self._pool or not self._connection:
+ return
+
+ self._pool._put_conn(self._connection)
+ self._connection = None
+
+ def drain_conn(self):
+ """
+ Read and discard any remaining HTTP response data in the response connection.
+
+ Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
+ """
+ try:
+ self.read()
+ except (HTTPError, SocketError, BaseSSLError, HTTPException):
+ pass
+
+ @property
+ def data(self):
+ # For backwords-compat with earlier urllib3 0.4 and earlier.
+ if self._body:
+ return self._body
+
+ if self._fp:
+ return self.read(cache_content=True)
+
+ @property
+ def connection(self):
+ return self._connection
+
+ def isclosed(self):
+ return is_fp_closed(self._fp)
+
+ def tell(self):
+ """
+ Obtain the number of bytes pulled over the wire so far. May differ from
+ the amount of content returned by :meth:``HTTPResponse.read`` if bytes
+ are encoded on the wire (e.g, compressed).
+ """
+ return self._fp_bytes_read
+
+ def _init_length(self, request_method):
+ """
+ Set initial length value for Response content if available.
+ """
+ length = self.headers.get("content-length")
+
+ if length is not None:
+ if self.chunked:
+ # This Response will fail with an IncompleteRead if it can't be
+ # received as chunked. This method falls back to attempt reading
+ # the response before raising an exception.
+ log.warning(
+ "Received response with both Content-Length and "
+ "Transfer-Encoding set. This is expressly forbidden "
+ "by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
+ "attempting to process response as Transfer-Encoding: "
+ "chunked."
+ )
+ return None
+
+ try:
+ # RFC 7230 section 3.3.2 specifies multiple content lengths can
+ # be sent in a single Content-Length header
+ # (e.g. Content-Length: 42, 42). This line ensures the values
+ # are all valid ints and that as long as the `set` length is 1,
+ # all values are the same. Otherwise, the header is invalid.
+ lengths = set([int(val) for val in length.split(",")])
+ if len(lengths) > 1:
+ raise InvalidHeader(
+ "Content-Length contained multiple "
+ "unmatching values (%s)" % length
+ )
+ length = lengths.pop()
+ except ValueError:
+ length = None
+ else:
+ if length < 0:
+ length = None
+
+ # Convert status to int for comparison
+ # In some cases, httplib returns a status of "_UNKNOWN"
+ try:
+ status = int(self.status)
+ except ValueError:
+ status = 0
+
+ # Check for responses that shouldn't include a body
+ if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD":
+ length = 0
+
+ return length
+
+ def _init_decoder(self):
+ """
+ Set-up the _decoder attribute if necessary.
+ """
+ # Note: content-encoding value should be case-insensitive, per RFC 7230
+ # Section 3.2
+ content_encoding = self.headers.get("content-encoding", "").lower()
+ if self._decoder is None:
+ if content_encoding in self.CONTENT_DECODERS:
+ self._decoder = _get_decoder(content_encoding)
+ elif "," in content_encoding:
+ encodings = [
+ e.strip()
+ for e in content_encoding.split(",")
+ if e.strip() in self.CONTENT_DECODERS
+ ]
+ if len(encodings):
+ self._decoder = _get_decoder(content_encoding)
+
+ DECODER_ERROR_CLASSES = (IOError, zlib.error)
+ if brotli is not None:
+ DECODER_ERROR_CLASSES += (brotli.error,)
+
+ def _decode(self, data, decode_content, flush_decoder):
+ """
+ Decode the data passed in and potentially flush the decoder.
+ """
+ if not decode_content:
+ return data
+
+ try:
+ if self._decoder:
+ data = self._decoder.decompress(data)
+ except self.DECODER_ERROR_CLASSES as e:
+ content_encoding = self.headers.get("content-encoding", "").lower()
+ raise DecodeError(
+ "Received response with content-encoding: %s, but "
+ "failed to decode it." % content_encoding,
+ e,
+ )
+ if flush_decoder:
+ data += self._flush_decoder()
+
+ return data
+
+ def _flush_decoder(self):
+ """
+ Flushes the decoder. Should only be called if the decoder is actually
+ being used.
+ """
+ if self._decoder:
+ buf = self._decoder.decompress(b"")
+ return buf + self._decoder.flush()
+
+ return b""
+
+ @contextmanager
+ def _error_catcher(self):
+ """
+ Catch low-level python exceptions, instead re-raising urllib3
+ variants, so that low-level exceptions are not leaked in the
+ high-level api.
+
+ On exit, release the connection back to the pool.
+ """
+ clean_exit = False
+
+ try:
+ try:
+ yield
+
+ except SocketTimeout:
+ # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
+ # there is yet no clean way to get at it from this context.
+ raise ReadTimeoutError(self._pool, None, "Read timed out.")
+
+ except BaseSSLError as e:
+ # FIXME: Is there a better way to differentiate between SSLErrors?
+ if "read operation timed out" not in str(e): # Defensive:
+ # This shouldn't happen but just in case we're missing an edge
+ # case, let's avoid swallowing SSL errors.
+ raise
+
+ raise ReadTimeoutError(self._pool, None, "Read timed out.")
+
+ except (HTTPException, SocketError) as e:
+ # This includes IncompleteRead.
+ raise ProtocolError("Connection broken: %r" % e, e)
+
+ # If no exception is thrown, we should avoid cleaning up
+ # unnecessarily.
+ clean_exit = True
+ finally:
+ # If we didn't terminate cleanly, we need to throw away our
+ # connection.
+ if not clean_exit:
+ # The response may not be closed but we're not going to use it
+ # anymore so close it now to ensure that the connection is
+ # released back to the pool.
+ if self._original_response:
+ self._original_response.close()
+
+ # Closing the response may not actually be sufficient to close
+ # everything, so if we have a hold of the connection close that
+ # too.
+ if self._connection:
+ self._connection.close()
+
+ # If we hold the original response but it's closed now, we should
+ # return the connection back to the pool.
+ if self._original_response and self._original_response.isclosed():
+ self.release_conn()
+
+ def read(self, amt=None, decode_content=None, cache_content=False):
+ """
+ Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
+ parameters: ``decode_content`` and ``cache_content``.
+
+ :param amt:
+ How much of the content to read. If specified, caching is skipped
+ because it doesn't make sense to cache partial content as the full
+ response.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+
+ :param cache_content:
+ If True, will save the returned data such that the same result is
+ returned despite of the state of the underlying file object. This
+ is useful if you want the ``.data`` property to continue working
+ after having ``.read()`` the file object. (Overridden if ``amt`` is
+ set.)
+ """
+ self._init_decoder()
+ if decode_content is None:
+ decode_content = self.decode_content
+
+ if self._fp is None:
+ return
+
+ flush_decoder = False
+ fp_closed = getattr(self._fp, "closed", False)
+
+ with self._error_catcher():
+ if amt is None:
+ # cStringIO doesn't like amt=None
+ data = self._fp.read() if not fp_closed else b""
+ flush_decoder = True
+ else:
+ cache_content = False
+ data = self._fp.read(amt) if not fp_closed else b""
+ if (
+ amt != 0 and not data
+ ): # Platform-specific: Buggy versions of Python.
+ # Close the connection when no data is returned
+ #
+ # This is redundant to what httplib/http.client _should_
+ # already do. However, versions of python released before
+ # December 15, 2012 (http://bugs.python.org/issue16298) do
+ # not properly close the connection in all cases. There is
+ # no harm in redundantly calling close.
+ self._fp.close()
+ flush_decoder = True
+ if self.enforce_content_length and self.length_remaining not in (
+ 0,
+ None,
+ ):
+ # This is an edge case that httplib failed to cover due
+ # to concerns of backward compatibility. We're
+ # addressing it here to make sure IncompleteRead is
+ # raised during streaming, so all calls with incorrect
+ # Content-Length are caught.
+ raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
+
+ if data:
+ self._fp_bytes_read += len(data)
+ if self.length_remaining is not None:
+ self.length_remaining -= len(data)
+
+ data = self._decode(data, decode_content, flush_decoder)
+
+ if cache_content:
+ self._body = data
+
+ return data
+
+ def stream(self, amt=2 ** 16, decode_content=None):
+ """
+ A generator wrapper for the read() method. A call will block until
+ ``amt`` bytes have been read from the connection or until the
+ connection is closed.
+
+ :param amt:
+ How much of the content to read. The generator will return up to
+ much data per iteration, but may return less. This is particularly
+ likely when using compressed data. However, the empty string will
+ never be returned.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+ """
+ if self.chunked and self.supports_chunked_reads():
+ for line in self.read_chunked(amt, decode_content=decode_content):
+ yield line
+ else:
+ while not is_fp_closed(self._fp):
+ data = self.read(amt=amt, decode_content=decode_content)
+
+ if data:
+ yield data
+
+ @classmethod
+ def from_httplib(ResponseCls, r, **response_kw):
+ """
+ Given an :class:`httplib.HTTPResponse` instance ``r``, return a
+ corresponding :class:`urllib3.response.HTTPResponse` object.
+
+ Remaining parameters are passed to the HTTPResponse constructor, along
+ with ``original_response=r``.
+ """
+ headers = r.msg
+
+ if not isinstance(headers, HTTPHeaderDict):
+ if PY3:
+ headers = HTTPHeaderDict(headers.items())
+ else:
+ # Python 2.7
+ headers = HTTPHeaderDict.from_httplib(headers)
+
+ # HTTPResponse objects in Python 3 don't have a .strict attribute
+ strict = getattr(r, "strict", 0)
+ resp = ResponseCls(
+ body=r,
+ headers=headers,
+ status=r.status,
+ version=r.version,
+ reason=r.reason,
+ strict=strict,
+ original_response=r,
+ **response_kw
+ )
+ return resp
+
+ # Backwards-compatibility methods for httplib.HTTPResponse
+ def getheaders(self):
+ return self.headers
+
+ def getheader(self, name, default=None):
+ return self.headers.get(name, default)
+
+ # Backwards compatibility for http.cookiejar
+ def info(self):
+ return self.headers
+
+ # Overrides from io.IOBase
+ def close(self):
+ if not self.closed:
+ self._fp.close()
+
+ if self._connection:
+ self._connection.close()
+
+ if not self.auto_close:
+ io.IOBase.close(self)
+
+ @property
+ def closed(self):
+ if not self.auto_close:
+ return io.IOBase.closed.__get__(self)
+ elif self._fp is None:
+ return True
+ elif hasattr(self._fp, "isclosed"):
+ return self._fp.isclosed()
+ elif hasattr(self._fp, "closed"):
+ return self._fp.closed
+ else:
+ return True
+
+ def fileno(self):
+ if self._fp is None:
+ raise IOError("HTTPResponse has no file to get a fileno from")
+ elif hasattr(self._fp, "fileno"):
+ return self._fp.fileno()
+ else:
+ raise IOError(
+ "The file-like object this HTTPResponse is wrapped "
+ "around has no file descriptor"
+ )
+
+ def flush(self):
+ if (
+ self._fp is not None
+ and hasattr(self._fp, "flush")
+ and not getattr(self._fp, "closed", False)
+ ):
+ return self._fp.flush()
+
+ def readable(self):
+ # This method is required for `io` module compatibility.
+ return True
+
+ def readinto(self, b):
+ # This method is required for `io` module compatibility.
+ temp = self.read(len(b))
+ if len(temp) == 0:
+ return 0
+ else:
+ b[: len(temp)] = temp
+ return len(temp)
+
+ def supports_chunked_reads(self):
+ """
+ Checks if the underlying file-like object looks like a
+ httplib.HTTPResponse object. We do this by testing for the fp
+ attribute. If it is present we assume it returns raw chunks as
+ processed by read_chunked().
+ """
+ return hasattr(self._fp, "fp")
+
+ def _update_chunk_length(self):
+ # First, we'll figure out length of a chunk and then
+ # we'll try to read it from socket.
+ if self.chunk_left is not None:
+ return
+ line = self._fp.fp.readline()
+ line = line.split(b";", 1)[0]
+ try:
+ self.chunk_left = int(line, 16)
+ except ValueError:
+ # Invalid chunked protocol response, abort.
+ self.close()
+ raise httplib.IncompleteRead(line)
+
+ def _handle_chunk(self, amt):
+ returned_chunk = None
+ if amt is None:
+ chunk = self._fp._safe_read(self.chunk_left)
+ returned_chunk = chunk
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ elif amt < self.chunk_left:
+ value = self._fp._safe_read(amt)
+ self.chunk_left = self.chunk_left - amt
+ returned_chunk = value
+ elif amt == self.chunk_left:
+ value = self._fp._safe_read(amt)
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ returned_chunk = value
+ else: # amt > self.chunk_left
+ returned_chunk = self._fp._safe_read(self.chunk_left)
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ return returned_chunk
+
+ def read_chunked(self, amt=None, decode_content=None):
+ """
+ Similar to :meth:`HTTPResponse.read`, but with an additional
+ parameter: ``decode_content``.
+
+ :param amt:
+ How much of the content to read. If specified, caching is skipped
+ because it doesn't make sense to cache partial content as the full
+ response.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+ """
+ self._init_decoder()
+ # FIXME: Rewrite this method and make it a class with a better structured logic.
+ if not self.chunked:
+ raise ResponseNotChunked(
+ "Response is not chunked. "
+ "Header 'transfer-encoding: chunked' is missing."
+ )
+ if not self.supports_chunked_reads():
+ raise BodyNotHttplibCompatible(
+ "Body should be httplib.HTTPResponse like. "
+ "It should have have an fp attribute which returns raw chunks."
+ )
+
+ with self._error_catcher():
+ # Don't bother reading the body of a HEAD request.
+ if self._original_response and is_response_to_head(self._original_response):
+ self._original_response.close()
+ return
+
+ # If a response is already read and closed
+ # then return immediately.
+ if self._fp.fp is None:
+ return
+
+ while True:
+ self._update_chunk_length()
+ if self.chunk_left == 0:
+ break
+ chunk = self._handle_chunk(amt)
+ decoded = self._decode(
+ chunk, decode_content=decode_content, flush_decoder=False
+ )
+ if decoded:
+ yield decoded
+
+ if decode_content:
+ # On CPython and PyPy, we should never need to flush the
+ # decoder. However, on Jython we *might* need to, so
+ # lets defensively do it anyway.
+ decoded = self._flush_decoder()
+ if decoded: # Platform-specific: Jython.
+ yield decoded
+
+ # Chunk content ends with \r\n: discard it.
+ while True:
+ line = self._fp.fp.readline()
+ if not line:
+ # Some sites may not end with '\r\n'.
+ break
+ if line == b"\r\n":
+ break
+
+ # We read everything; close the "file".
+ if self._original_response:
+ self._original_response.close()
+
+ def geturl(self):
+ """
+ Returns the URL that was the source of this response.
+ If the request that generated this response redirected, this method
+ will return the final redirect location.
+ """
+ if self.retries is not None and len(self.retries.history):
+ return self.retries.history[-1].redirect_location
+ else:
+ return self._request_url
+
+ def __iter__(self):
+ buffer = []
+ for chunk in self.stream(decode_content=True):
+ if b"\n" in chunk:
+ chunk = chunk.split(b"\n")
+ yield b"".join(buffer) + chunk[0] + b"\n"
+ for x in chunk[1:-1]:
+ yield x + b"\n"
+ if chunk[-1]:
+ buffer = [chunk[-1]]
+ else:
+ buffer = []
+ else:
+ buffer.append(chunk)
+ if buffer:
+ yield b"".join(buffer)
diff --git a/third_party/python/urllib3/src/urllib3/util/__init__.py b/third_party/python/urllib3/src/urllib3/util/__init__.py
new file mode 100644
index 0000000000..a96c73a9d8
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/util/__init__.py
@@ -0,0 +1,46 @@
+from __future__ import absolute_import
+
+# For backwards compatibility, provide imports that used to be here.
+from .connection import is_connection_dropped
+from .request import make_headers
+from .response import is_fp_closed
+from .ssl_ import (
+ SSLContext,
+ HAS_SNI,
+ IS_PYOPENSSL,
+ IS_SECURETRANSPORT,
+ assert_fingerprint,
+ resolve_cert_reqs,
+ resolve_ssl_version,
+ ssl_wrap_socket,
+ PROTOCOL_TLS,
+)
+from .timeout import current_time, Timeout
+
+from .retry import Retry
+from .url import get_host, parse_url, split_first, Url
+from .wait import wait_for_read, wait_for_write
+
+__all__ = (
+ "HAS_SNI",
+ "IS_PYOPENSSL",
+ "IS_SECURETRANSPORT",
+ "SSLContext",
+ "PROTOCOL_TLS",
+ "Retry",
+ "Timeout",
+ "Url",
+ "assert_fingerprint",
+ "current_time",
+ "is_connection_dropped",
+ "is_fp_closed",
+ "get_host",
+ "parse_url",
+ "make_headers",
+ "resolve_cert_reqs",
+ "resolve_ssl_version",
+ "split_first",
+ "ssl_wrap_socket",
+ "wait_for_read",
+ "wait_for_write",
+)
diff --git a/third_party/python/urllib3/src/urllib3/util/connection.py b/third_party/python/urllib3/src/urllib3/util/connection.py
new file mode 100644
index 0000000000..86f0a3b00e
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/util/connection.py
@@ -0,0 +1,138 @@
+from __future__ import absolute_import
+import socket
+from .wait import NoWayToWaitForSocketError, wait_for_read
+from ..contrib import _appengine_environ
+
+
+def is_connection_dropped(conn): # Platform-specific
+ """
+ Returns True if the connection is dropped and should be closed.
+
+ :param conn:
+ :class:`httplib.HTTPConnection` object.
+
+ Note: For platforms like AppEngine, this will always return ``False`` to
+ let the platform handle connection recycling transparently for us.
+ """
+ sock = getattr(conn, "sock", False)
+ if sock is False: # Platform-specific: AppEngine
+ return False
+ if sock is None: # Connection already closed (such as by httplib).
+ return True
+ try:
+ # Returns True if readable, which here means it's been dropped
+ return wait_for_read(sock, timeout=0.0)
+ except NoWayToWaitForSocketError: # Platform-specific: AppEngine
+ return False
+
+
+# This function is copied from socket.py in the Python 2.7 standard
+# library test suite. Added to its signature is only `socket_options`.
+# One additional modification is that we avoid binding to IPv6 servers
+# discovered in DNS if the system doesn't have IPv6 functionality.
+def create_connection(
+ address,
+ timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+ source_address=None,
+ socket_options=None,
+):
+ """Connect to *address* and return the socket object.
+
+ Convenience function. Connect to *address* (a 2-tuple ``(host,
+ port)``) and return the socket object. Passing the optional
+ *timeout* parameter will set the timeout on the socket instance
+ before attempting to connect. If no *timeout* is supplied, the
+ global default timeout setting returned by :func:`getdefaulttimeout`
+ is used. If *source_address* is set it must be a tuple of (host, port)
+ for the socket to bind as a source address before making the connection.
+ An host of '' or port 0 tells the OS to use the default.
+ """
+
+ host, port = address
+ if host.startswith("["):
+ host = host.strip("[]")
+ err = None
+
+ # Using the value from allowed_gai_family() in the context of getaddrinfo lets
+ # us select whether to work with IPv4 DNS records, IPv6 records, or both.
+ # The original create_connection function always returns all records.
+ family = allowed_gai_family()
+
+ for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
+ af, socktype, proto, canonname, sa = res
+ sock = None
+ try:
+ sock = socket.socket(af, socktype, proto)
+
+ # If provided, set socket level options before connecting.
+ _set_socket_options(sock, socket_options)
+
+ if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
+ sock.settimeout(timeout)
+ if source_address:
+ sock.bind(source_address)
+ sock.connect(sa)
+ return sock
+
+ except socket.error as e:
+ err = e
+ if sock is not None:
+ sock.close()
+ sock = None
+
+ if err is not None:
+ raise err
+
+ raise socket.error("getaddrinfo returns an empty list")
+
+
+def _set_socket_options(sock, options):
+ if options is None:
+ return
+
+ for opt in options:
+ sock.setsockopt(*opt)
+
+
+def allowed_gai_family():
+ """This function is designed to work in the context of
+ getaddrinfo, where family=socket.AF_UNSPEC is the default and
+ will perform a DNS search for both IPv6 and IPv4 records."""
+
+ family = socket.AF_INET
+ if HAS_IPV6:
+ family = socket.AF_UNSPEC
+ return family
+
+
+def _has_ipv6(host):
+ """ Returns True if the system can bind an IPv6 address. """
+ sock = None
+ has_ipv6 = False
+
+ # App Engine doesn't support IPV6 sockets and actually has a quota on the
+ # number of sockets that can be used, so just early out here instead of
+ # creating a socket needlessly.
+ # See https://github.com/urllib3/urllib3/issues/1446
+ if _appengine_environ.is_appengine_sandbox():
+ return False
+
+ if socket.has_ipv6:
+ # has_ipv6 returns true if cPython was compiled with IPv6 support.
+ # It does not tell us if the system has IPv6 support enabled. To
+ # determine that we must bind to an IPv6 address.
+ # https://github.com/urllib3/urllib3/pull/611
+ # https://bugs.python.org/issue658327
+ try:
+ sock = socket.socket(socket.AF_INET6)
+ sock.bind((host, 0))
+ has_ipv6 = True
+ except Exception:
+ pass
+
+ if sock:
+ sock.close()
+ return has_ipv6
+
+
+HAS_IPV6 = _has_ipv6("::1")
diff --git a/third_party/python/urllib3/src/urllib3/util/queue.py b/third_party/python/urllib3/src/urllib3/util/queue.py
new file mode 100644
index 0000000000..d3d379a199
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/util/queue.py
@@ -0,0 +1,21 @@
+import collections
+from ..packages import six
+from ..packages.six.moves import queue
+
+if six.PY2:
+ # Queue is imported for side effects on MS Windows. See issue #229.
+ import Queue as _unused_module_Queue # noqa: F401
+
+
+class LifoQueue(queue.Queue):
+ def _init(self, _):
+ self.queue = collections.deque()
+
+ def _qsize(self, len=len):
+ return len(self.queue)
+
+ def _put(self, item):
+ self.queue.append(item)
+
+ def _get(self):
+ return self.queue.pop()
diff --git a/third_party/python/urllib3/src/urllib3/util/request.py b/third_party/python/urllib3/src/urllib3/util/request.py
new file mode 100644
index 0000000000..3b7bb54daf
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/util/request.py
@@ -0,0 +1,135 @@
+from __future__ import absolute_import
+from base64 import b64encode
+
+from ..packages.six import b, integer_types
+from ..exceptions import UnrewindableBodyError
+
+ACCEPT_ENCODING = "gzip,deflate"
+try:
+ import brotli as _unused_module_brotli # noqa: F401
+except ImportError:
+ pass
+else:
+ ACCEPT_ENCODING += ",br"
+
+_FAILEDTELL = object()
+
+
+def make_headers(
+ keep_alive=None,
+ accept_encoding=None,
+ user_agent=None,
+ basic_auth=None,
+ proxy_basic_auth=None,
+ disable_cache=None,
+):
+ """
+ Shortcuts for generating request headers.
+
+ :param keep_alive:
+ If ``True``, adds 'connection: keep-alive' header.
+
+ :param accept_encoding:
+ Can be a boolean, list, or string.
+ ``True`` translates to 'gzip,deflate'.
+ List will get joined by comma.
+ String will be used as provided.
+
+ :param user_agent:
+ String representing the user-agent you want, such as
+ "python-urllib3/0.6"
+
+ :param basic_auth:
+ Colon-separated username:password string for 'authorization: basic ...'
+ auth header.
+
+ :param proxy_basic_auth:
+ Colon-separated username:password string for 'proxy-authorization: basic ...'
+ auth header.
+
+ :param disable_cache:
+ If ``True``, adds 'cache-control: no-cache' header.
+
+ Example::
+
+ >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
+ {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
+ >>> make_headers(accept_encoding=True)
+ {'accept-encoding': 'gzip,deflate'}
+ """
+ headers = {}
+ if accept_encoding:
+ if isinstance(accept_encoding, str):
+ pass
+ elif isinstance(accept_encoding, list):
+ accept_encoding = ",".join(accept_encoding)
+ else:
+ accept_encoding = ACCEPT_ENCODING
+ headers["accept-encoding"] = accept_encoding
+
+ if user_agent:
+ headers["user-agent"] = user_agent
+
+ if keep_alive:
+ headers["connection"] = "keep-alive"
+
+ if basic_auth:
+ headers["authorization"] = "Basic " + b64encode(b(basic_auth)).decode("utf-8")
+
+ if proxy_basic_auth:
+ headers["proxy-authorization"] = "Basic " + b64encode(
+ b(proxy_basic_auth)
+ ).decode("utf-8")
+
+ if disable_cache:
+ headers["cache-control"] = "no-cache"
+
+ return headers
+
+
+def set_file_position(body, pos):
+ """
+ If a position is provided, move file to that point.
+ Otherwise, we'll attempt to record a position for future use.
+ """
+ if pos is not None:
+ rewind_body(body, pos)
+ elif getattr(body, "tell", None) is not None:
+ try:
+ pos = body.tell()
+ except (IOError, OSError):
+ # This differentiates from None, allowing us to catch
+ # a failed `tell()` later when trying to rewind the body.
+ pos = _FAILEDTELL
+
+ return pos
+
+
+def rewind_body(body, body_pos):
+ """
+ Attempt to rewind body to a certain position.
+ Primarily used for request redirects and retries.
+
+ :param body:
+ File-like object that supports seek.
+
+ :param int pos:
+ Position to seek to in file.
+ """
+ body_seek = getattr(body, "seek", None)
+ if body_seek is not None and isinstance(body_pos, integer_types):
+ try:
+ body_seek(body_pos)
+ except (IOError, OSError):
+ raise UnrewindableBodyError(
+ "An error occurred when rewinding request body for redirect/retry."
+ )
+ elif body_pos is _FAILEDTELL:
+ raise UnrewindableBodyError(
+ "Unable to record file position for rewinding "
+ "request body during a redirect/retry."
+ )
+ else:
+ raise ValueError(
+ "body_pos must be of type integer, instead it was %s." % type(body_pos)
+ )
diff --git a/third_party/python/urllib3/src/urllib3/util/response.py b/third_party/python/urllib3/src/urllib3/util/response.py
new file mode 100644
index 0000000000..715868dd10
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/util/response.py
@@ -0,0 +1,86 @@
+from __future__ import absolute_import
+from ..packages.six.moves import http_client as httplib
+
+from ..exceptions import HeaderParsingError
+
+
+def is_fp_closed(obj):
+ """
+ Checks whether a given file-like object is closed.
+
+ :param obj:
+ The file-like object to check.
+ """
+
+ try:
+ # Check `isclosed()` first, in case Python3 doesn't set `closed`.
+ # GH Issue #928
+ return obj.isclosed()
+ except AttributeError:
+ pass
+
+ try:
+ # Check via the official file-like-object way.
+ return obj.closed
+ except AttributeError:
+ pass
+
+ try:
+ # Check if the object is a container for another file-like object that
+ # gets released on exhaustion (e.g. HTTPResponse).
+ return obj.fp is None
+ except AttributeError:
+ pass
+
+ raise ValueError("Unable to determine whether fp is closed.")
+
+
+def assert_header_parsing(headers):
+ """
+ Asserts whether all headers have been successfully parsed.
+ Extracts encountered errors from the result of parsing headers.
+
+ Only works on Python 3.
+
+ :param headers: Headers to verify.
+ :type headers: `httplib.HTTPMessage`.
+
+ :raises urllib3.exceptions.HeaderParsingError:
+ If parsing errors are found.
+ """
+
+ # This will fail silently if we pass in the wrong kind of parameter.
+ # To make debugging easier add an explicit check.
+ if not isinstance(headers, httplib.HTTPMessage):
+ raise TypeError("expected httplib.Message, got {0}.".format(type(headers)))
+
+ defects = getattr(headers, "defects", None)
+ get_payload = getattr(headers, "get_payload", None)
+
+ unparsed_data = None
+ if get_payload:
+ # get_payload is actually email.message.Message.get_payload;
+ # we're only interested in the result if it's not a multipart message
+ if not headers.is_multipart():
+ payload = get_payload()
+
+ if isinstance(payload, (bytes, str)):
+ unparsed_data = payload
+
+ if defects or unparsed_data:
+ raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
+
+
+def is_response_to_head(response):
+ """
+ Checks whether the request of a response has been a HEAD-request.
+ Handles the quirks of AppEngine.
+
+ :param conn:
+ :type conn: :class:`httplib.HTTPResponse`
+ """
+ # FIXME: Can we do this somehow without accessing private httplib _method?
+ method = response._method
+ if isinstance(method, int): # Platform-specific: Appengine
+ return method == 3
+ return method.upper() == "HEAD"
diff --git a/third_party/python/urllib3/src/urllib3/util/retry.py b/third_party/python/urllib3/src/urllib3/util/retry.py
new file mode 100644
index 0000000000..ee30c91b14
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/util/retry.py
@@ -0,0 +1,453 @@
+from __future__ import absolute_import
+import time
+import logging
+from collections import namedtuple
+from itertools import takewhile
+import email
+import re
+
+from ..exceptions import (
+ ConnectTimeoutError,
+ MaxRetryError,
+ ProtocolError,
+ ReadTimeoutError,
+ ResponseError,
+ InvalidHeader,
+ ProxyError,
+)
+from ..packages import six
+
+
+log = logging.getLogger(__name__)
+
+
+# Data structure for representing the metadata of requests that result in a retry.
+RequestHistory = namedtuple(
+ "RequestHistory", ["method", "url", "error", "status", "redirect_location"]
+)
+
+
+class Retry(object):
+ """ Retry configuration.
+
+ Each retry attempt will create a new Retry object with updated values, so
+ they can be safely reused.
+
+ Retries can be defined as a default for a pool::
+
+ retries = Retry(connect=5, read=2, redirect=5)
+ http = PoolManager(retries=retries)
+ response = http.request('GET', 'http://example.com/')
+
+ Or per-request (which overrides the default for the pool)::
+
+ response = http.request('GET', 'http://example.com/', retries=Retry(10))
+
+ Retries can be disabled by passing ``False``::
+
+ response = http.request('GET', 'http://example.com/', retries=False)
+
+ Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
+ retries are disabled, in which case the causing exception will be raised.
+
+ :param int total:
+ Total number of retries to allow. Takes precedence over other counts.
+
+ Set to ``None`` to remove this constraint and fall back on other
+ counts. It's a good idea to set this to some sensibly-high value to
+ account for unexpected edge cases and avoid infinite retry loops.
+
+ Set to ``0`` to fail on the first retry.
+
+ Set to ``False`` to disable and imply ``raise_on_redirect=False``.
+
+ :param int connect:
+ How many connection-related errors to retry on.
+
+ These are errors raised before the request is sent to the remote server,
+ which we assume has not triggered the server to process the request.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param int read:
+ How many times to retry on read errors.
+
+ These errors are raised after the request was sent to the server, so the
+ request may have side-effects.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param int redirect:
+ How many redirects to perform. Limit this to avoid infinite redirect
+ loops.
+
+ A redirect is a HTTP response with a status code 301, 302, 303, 307 or
+ 308.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ Set to ``False`` to disable and imply ``raise_on_redirect=False``.
+
+ :param int status:
+ How many times to retry on bad status codes.
+
+ These are retries made on responses, where status code matches
+ ``status_forcelist``.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param iterable method_whitelist:
+ Set of uppercased HTTP method verbs that we should retry on.
+
+ By default, we only retry on methods which are considered to be
+ idempotent (multiple requests with the same parameters end with the
+ same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
+
+ Set to a ``False`` value to retry on any verb.
+
+ :param iterable status_forcelist:
+ A set of integer HTTP status codes that we should force a retry on.
+ A retry is initiated if the request method is in ``method_whitelist``
+ and the response status code is in ``status_forcelist``.
+
+ By default, this is disabled with ``None``.
+
+ :param float backoff_factor:
+ A backoff factor to apply between attempts after the second try
+ (most errors are resolved immediately by a second try without a
+ delay). urllib3 will sleep for::
+
+ {backoff factor} * (2 ** ({number of total retries} - 1))
+
+ seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
+ for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
+ than :attr:`Retry.BACKOFF_MAX`.
+
+ By default, backoff is disabled (set to 0).
+
+ :param bool raise_on_redirect: Whether, if the number of redirects is
+ exhausted, to raise a MaxRetryError, or to return a response with a
+ response code in the 3xx range.
+
+ :param bool raise_on_status: Similar meaning to ``raise_on_redirect``:
+ whether we should raise an exception, or return a response,
+ if status falls in ``status_forcelist`` range and retries have
+ been exhausted.
+
+ :param tuple history: The history of the request encountered during
+ each call to :meth:`~Retry.increment`. The list is in the order
+ the requests occurred. Each list item is of class :class:`RequestHistory`.
+
+ :param bool respect_retry_after_header:
+ Whether to respect Retry-After header on status codes defined as
+ :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not.
+
+ :param iterable remove_headers_on_redirect:
+ Sequence of headers to remove from the request when a response
+ indicating a redirect is returned before firing off the redirected
+ request.
+ """
+
+ DEFAULT_METHOD_WHITELIST = frozenset(
+ ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]
+ )
+
+ RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
+
+ DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(["Authorization"])
+
+ #: Maximum backoff time.
+ BACKOFF_MAX = 120
+
+ def __init__(
+ self,
+ total=10,
+ connect=None,
+ read=None,
+ redirect=None,
+ status=None,
+ method_whitelist=DEFAULT_METHOD_WHITELIST,
+ status_forcelist=None,
+ backoff_factor=0,
+ raise_on_redirect=True,
+ raise_on_status=True,
+ history=None,
+ respect_retry_after_header=True,
+ remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST,
+ ):
+
+ self.total = total
+ self.connect = connect
+ self.read = read
+ self.status = status
+
+ if redirect is False or total is False:
+ redirect = 0
+ raise_on_redirect = False
+
+ self.redirect = redirect
+ self.status_forcelist = status_forcelist or set()
+ self.method_whitelist = method_whitelist
+ self.backoff_factor = backoff_factor
+ self.raise_on_redirect = raise_on_redirect
+ self.raise_on_status = raise_on_status
+ self.history = history or tuple()
+ self.respect_retry_after_header = respect_retry_after_header
+ self.remove_headers_on_redirect = frozenset(
+ [h.lower() for h in remove_headers_on_redirect]
+ )
+
+ def new(self, **kw):
+ params = dict(
+ total=self.total,
+ connect=self.connect,
+ read=self.read,
+ redirect=self.redirect,
+ status=self.status,
+ method_whitelist=self.method_whitelist,
+ status_forcelist=self.status_forcelist,
+ backoff_factor=self.backoff_factor,
+ raise_on_redirect=self.raise_on_redirect,
+ raise_on_status=self.raise_on_status,
+ history=self.history,
+ remove_headers_on_redirect=self.remove_headers_on_redirect,
+ respect_retry_after_header=self.respect_retry_after_header,
+ )
+ params.update(kw)
+ return type(self)(**params)
+
+ @classmethod
+ def from_int(cls, retries, redirect=True, default=None):
+ """ Backwards-compatibility for the old retries format."""
+ if retries is None:
+ retries = default if default is not None else cls.DEFAULT
+
+ if isinstance(retries, Retry):
+ return retries
+
+ redirect = bool(redirect) and None
+ new_retries = cls(retries, redirect=redirect)
+ log.debug("Converted retries value: %r -> %r", retries, new_retries)
+ return new_retries
+
+ def get_backoff_time(self):
+ """ Formula for computing the current backoff
+
+ :rtype: float
+ """
+ # We want to consider only the last consecutive errors sequence (Ignore redirects).
+ consecutive_errors_len = len(
+ list(
+ takewhile(lambda x: x.redirect_location is None, reversed(self.history))
+ )
+ )
+ if consecutive_errors_len <= 1:
+ return 0
+
+ backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1))
+ return min(self.BACKOFF_MAX, backoff_value)
+
+ def parse_retry_after(self, retry_after):
+ # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
+ if re.match(r"^\s*[0-9]+\s*$", retry_after):
+ seconds = int(retry_after)
+ else:
+ retry_date_tuple = email.utils.parsedate(retry_after)
+ if retry_date_tuple is None:
+ raise InvalidHeader("Invalid Retry-After header: %s" % retry_after)
+ retry_date = time.mktime(retry_date_tuple)
+ seconds = retry_date - time.time()
+
+ if seconds < 0:
+ seconds = 0
+
+ return seconds
+
+ def get_retry_after(self, response):
+ """ Get the value of Retry-After in seconds. """
+
+ retry_after = response.getheader("Retry-After")
+
+ if retry_after is None:
+ return None
+
+ return self.parse_retry_after(retry_after)
+
+ def sleep_for_retry(self, response=None):
+ retry_after = self.get_retry_after(response)
+ if retry_after:
+ time.sleep(retry_after)
+ return True
+
+ return False
+
+ def _sleep_backoff(self):
+ backoff = self.get_backoff_time()
+ if backoff <= 0:
+ return
+ time.sleep(backoff)
+
+ def sleep(self, response=None):
+ """ Sleep between retry attempts.
+
+ This method will respect a server's ``Retry-After`` response header
+ and sleep the duration of the time requested. If that is not present, it
+ will use an exponential backoff. By default, the backoff factor is 0 and
+ this method will return immediately.
+ """
+
+ if self.respect_retry_after_header and response:
+ slept = self.sleep_for_retry(response)
+ if slept:
+ return
+
+ self._sleep_backoff()
+
+ def _is_connection_error(self, err):
+ """ Errors when we're fairly sure that the server did not receive the
+ request, so it should be safe to retry.
+ """
+ if isinstance(err, ProxyError):
+ err = err.original_error
+ return isinstance(err, ConnectTimeoutError)
+
+ def _is_read_error(self, err):
+ """ Errors that occur after the request has been started, so we should
+ assume that the server began processing it.
+ """
+ return isinstance(err, (ReadTimeoutError, ProtocolError))
+
+ def _is_method_retryable(self, method):
+ """ Checks if a given HTTP method should be retried upon, depending if
+ it is included on the method whitelist.
+ """
+ if self.method_whitelist and method.upper() not in self.method_whitelist:
+ return False
+
+ return True
+
+ def is_retry(self, method, status_code, has_retry_after=False):
+ """ Is this method/status code retryable? (Based on whitelists and control
+ variables such as the number of total retries to allow, whether to
+ respect the Retry-After header, whether this header is present, and
+ whether the returned status code is on the list of status codes to
+ be retried upon on the presence of the aforementioned header)
+ """
+ if not self._is_method_retryable(method):
+ return False
+
+ if self.status_forcelist and status_code in self.status_forcelist:
+ return True
+
+ return (
+ self.total
+ and self.respect_retry_after_header
+ and has_retry_after
+ and (status_code in self.RETRY_AFTER_STATUS_CODES)
+ )
+
+ def is_exhausted(self):
+ """ Are we out of retries? """
+ retry_counts = (self.total, self.connect, self.read, self.redirect, self.status)
+ retry_counts = list(filter(None, retry_counts))
+ if not retry_counts:
+ return False
+
+ return min(retry_counts) < 0
+
+ def increment(
+ self,
+ method=None,
+ url=None,
+ response=None,
+ error=None,
+ _pool=None,
+ _stacktrace=None,
+ ):
+ """ Return a new Retry object with incremented retry counters.
+
+ :param response: A response object, or None, if the server did not
+ return a response.
+ :type response: :class:`~urllib3.response.HTTPResponse`
+ :param Exception error: An error encountered during the request, or
+ None if the response was received successfully.
+
+ :return: A new ``Retry`` object.
+ """
+ if self.total is False and error:
+ # Disabled, indicate to re-raise the error.
+ raise six.reraise(type(error), error, _stacktrace)
+
+ total = self.total
+ if total is not None:
+ total -= 1
+
+ connect = self.connect
+ read = self.read
+ redirect = self.redirect
+ status_count = self.status
+ cause = "unknown"
+ status = None
+ redirect_location = None
+
+ if error and self._is_connection_error(error):
+ # Connect retry?
+ if connect is False:
+ raise six.reraise(type(error), error, _stacktrace)
+ elif connect is not None:
+ connect -= 1
+
+ elif error and self._is_read_error(error):
+ # Read retry?
+ if read is False or not self._is_method_retryable(method):
+ raise six.reraise(type(error), error, _stacktrace)
+ elif read is not None:
+ read -= 1
+
+ elif response and response.get_redirect_location():
+ # Redirect retry?
+ if redirect is not None:
+ redirect -= 1
+ cause = "too many redirects"
+ redirect_location = response.get_redirect_location()
+ status = response.status
+
+ else:
+ # Incrementing because of a server error like a 500 in
+ # status_forcelist and a the given method is in the whitelist
+ cause = ResponseError.GENERIC_ERROR
+ if response and response.status:
+ if status_count is not None:
+ status_count -= 1
+ cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status)
+ status = response.status
+
+ history = self.history + (
+ RequestHistory(method, url, error, status, redirect_location),
+ )
+
+ new_retry = self.new(
+ total=total,
+ connect=connect,
+ read=read,
+ redirect=redirect,
+ status=status_count,
+ history=history,
+ )
+
+ if new_retry.is_exhausted():
+ raise MaxRetryError(_pool, url, error or ResponseError(cause))
+
+ log.debug("Incremented Retry for (url='%s'): %r", url, new_retry)
+
+ return new_retry
+
+ def __repr__(self):
+ return (
+ "{cls.__name__}(total={self.total}, connect={self.connect}, "
+ "read={self.read}, redirect={self.redirect}, status={self.status})"
+ ).format(cls=type(self), self=self)
+
+
+# For backwards compatibility (equivalent to pre-v1.9):
+Retry.DEFAULT = Retry(3)
diff --git a/third_party/python/urllib3/src/urllib3/util/ssl_.py b/third_party/python/urllib3/src/urllib3/util/ssl_.py
new file mode 100644
index 0000000000..f7e2b70558
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/util/ssl_.py
@@ -0,0 +1,414 @@
+from __future__ import absolute_import
+import errno
+import warnings
+import hmac
+import sys
+
+from binascii import hexlify, unhexlify
+from hashlib import md5, sha1, sha256
+
+from .url import IPV4_RE, BRACELESS_IPV6_ADDRZ_RE
+from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
+from ..packages import six
+
+
+SSLContext = None
+HAS_SNI = False
+IS_PYOPENSSL = False
+IS_SECURETRANSPORT = False
+
+# Maps the length of a digest to a possible hash function producing this digest
+HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256}
+
+
+def _const_compare_digest_backport(a, b):
+ """
+ Compare two digests of equal length in constant time.
+
+ The digests must be of type str/bytes.
+ Returns True if the digests match, and False otherwise.
+ """
+ result = abs(len(a) - len(b))
+ for l, r in zip(bytearray(a), bytearray(b)):
+ result |= l ^ r
+ return result == 0
+
+
+_const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_backport)
+
+try: # Test for SSL features
+ import ssl
+ from ssl import wrap_socket, CERT_REQUIRED
+ from ssl import HAS_SNI # Has SNI?
+except ImportError:
+ pass
+
+try: # Platform-specific: Python 3.6
+ from ssl import PROTOCOL_TLS
+
+ PROTOCOL_SSLv23 = PROTOCOL_TLS
+except ImportError:
+ try:
+ from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS
+
+ PROTOCOL_SSLv23 = PROTOCOL_TLS
+ except ImportError:
+ PROTOCOL_SSLv23 = PROTOCOL_TLS = 2
+
+
+try:
+ from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION
+except ImportError:
+ OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
+ OP_NO_COMPRESSION = 0x20000
+
+
+# A secure default.
+# Sources for more information on TLS ciphers:
+#
+# - https://wiki.mozilla.org/Security/Server_Side_TLS
+# - https://www.ssllabs.com/projects/best-practices/index.html
+# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
+#
+# The general intent is:
+# - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
+# - prefer ECDHE over DHE for better performance,
+# - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and
+# security,
+# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common,
+# - disable NULL authentication, MD5 MACs, DSS, and other
+# insecure ciphers for security reasons.
+# - NOTE: TLS 1.3 cipher suites are managed through a different interface
+# not exposed by CPython (yet!) and are enabled by default if they're available.
+DEFAULT_CIPHERS = ":".join(
+ [
+ "ECDHE+AESGCM",
+ "ECDHE+CHACHA20",
+ "DHE+AESGCM",
+ "DHE+CHACHA20",
+ "ECDH+AESGCM",
+ "DH+AESGCM",
+ "ECDH+AES",
+ "DH+AES",
+ "RSA+AESGCM",
+ "RSA+AES",
+ "!aNULL",
+ "!eNULL",
+ "!MD5",
+ "!DSS",
+ ]
+)
+
+try:
+ from ssl import SSLContext # Modern SSL?
+except ImportError:
+
+ class SSLContext(object): # Platform-specific: Python 2
+ def __init__(self, protocol_version):
+ self.protocol = protocol_version
+ # Use default values from a real SSLContext
+ self.check_hostname = False
+ self.verify_mode = ssl.CERT_NONE
+ self.ca_certs = None
+ self.options = 0
+ self.certfile = None
+ self.keyfile = None
+ self.ciphers = None
+
+ def load_cert_chain(self, certfile, keyfile):
+ self.certfile = certfile
+ self.keyfile = keyfile
+
+ def load_verify_locations(self, cafile=None, capath=None, cadata=None):
+ self.ca_certs = cafile
+
+ if capath is not None:
+ raise SSLError("CA directories not supported in older Pythons")
+
+ if cadata is not None:
+ raise SSLError("CA data not supported in older Pythons")
+
+ def set_ciphers(self, cipher_suite):
+ self.ciphers = cipher_suite
+
+ def wrap_socket(self, socket, server_hostname=None, server_side=False):
+ warnings.warn(
+ "A true SSLContext object is not available. This prevents "
+ "urllib3 from configuring SSL appropriately and may cause "
+ "certain SSL connections to fail. You can upgrade to a newer "
+ "version of Python to solve this. For more information, see "
+ "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
+ "#ssl-warnings",
+ InsecurePlatformWarning,
+ )
+ kwargs = {
+ "keyfile": self.keyfile,
+ "certfile": self.certfile,
+ "ca_certs": self.ca_certs,
+ "cert_reqs": self.verify_mode,
+ "ssl_version": self.protocol,
+ "server_side": server_side,
+ }
+ return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
+
+
+def assert_fingerprint(cert, fingerprint):
+ """
+ Checks if given fingerprint matches the supplied certificate.
+
+ :param cert:
+ Certificate as bytes object.
+ :param fingerprint:
+ Fingerprint as string of hexdigits, can be interspersed by colons.
+ """
+
+ fingerprint = fingerprint.replace(":", "").lower()
+ digest_length = len(fingerprint)
+ hashfunc = HASHFUNC_MAP.get(digest_length)
+ if not hashfunc:
+ raise SSLError("Fingerprint of invalid length: {0}".format(fingerprint))
+
+ # We need encode() here for py32; works on py2 and p33.
+ fingerprint_bytes = unhexlify(fingerprint.encode())
+
+ cert_digest = hashfunc(cert).digest()
+
+ if not _const_compare_digest(cert_digest, fingerprint_bytes):
+ raise SSLError(
+ 'Fingerprints did not match. Expected "{0}", got "{1}".'.format(
+ fingerprint, hexlify(cert_digest)
+ )
+ )
+
+
+def resolve_cert_reqs(candidate):
+ """
+ Resolves the argument to a numeric constant, which can be passed to
+ the wrap_socket function/method from the ssl module.
+ Defaults to :data:`ssl.CERT_REQUIRED`.
+ If given a string it is assumed to be the name of the constant in the
+ :mod:`ssl` module or its abbreviation.
+ (So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
+ If it's neither `None` nor a string we assume it is already the numeric
+ constant which can directly be passed to wrap_socket.
+ """
+ if candidate is None:
+ return CERT_REQUIRED
+
+ if isinstance(candidate, str):
+ res = getattr(ssl, candidate, None)
+ if res is None:
+ res = getattr(ssl, "CERT_" + candidate)
+ return res
+
+ return candidate
+
+
+def resolve_ssl_version(candidate):
+ """
+ like resolve_cert_reqs
+ """
+ if candidate is None:
+ return PROTOCOL_TLS
+
+ if isinstance(candidate, str):
+ res = getattr(ssl, candidate, None)
+ if res is None:
+ res = getattr(ssl, "PROTOCOL_" + candidate)
+ return res
+
+ return candidate
+
+
+def create_urllib3_context(
+ ssl_version=None, cert_reqs=None, options=None, ciphers=None
+):
+ """All arguments have the same meaning as ``ssl_wrap_socket``.
+
+ By default, this function does a lot of the same work that
+ ``ssl.create_default_context`` does on Python 3.4+. It:
+
+ - Disables SSLv2, SSLv3, and compression
+ - Sets a restricted set of server ciphers
+
+ If you wish to enable SSLv3, you can do::
+
+ from urllib3.util import ssl_
+ context = ssl_.create_urllib3_context()
+ context.options &= ~ssl_.OP_NO_SSLv3
+
+ You can do the same to enable compression (substituting ``COMPRESSION``
+ for ``SSLv3`` in the last line above).
+
+ :param ssl_version:
+ The desired protocol version to use. This will default to
+ PROTOCOL_SSLv23 which will negotiate the highest protocol that both
+ the server and your installation of OpenSSL support.
+ :param cert_reqs:
+ Whether to require the certificate verification. This defaults to
+ ``ssl.CERT_REQUIRED``.
+ :param options:
+ Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
+ ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
+ :param ciphers:
+ Which cipher suites to allow the server to select.
+ :returns:
+ Constructed SSLContext object with specified options
+ :rtype: SSLContext
+ """
+ context = SSLContext(ssl_version or PROTOCOL_TLS)
+
+ context.set_ciphers(ciphers or DEFAULT_CIPHERS)
+
+ # Setting the default here, as we may have no ssl module on import
+ cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
+
+ if options is None:
+ options = 0
+ # SSLv2 is easily broken and is considered harmful and dangerous
+ options |= OP_NO_SSLv2
+ # SSLv3 has several problems and is now dangerous
+ options |= OP_NO_SSLv3
+ # Disable compression to prevent CRIME attacks for OpenSSL 1.0+
+ # (issue #309)
+ options |= OP_NO_COMPRESSION
+
+ context.options |= options
+
+ # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is
+ # necessary for conditional client cert authentication with TLS 1.3.
+ # The attribute is None for OpenSSL <= 1.1.0 or does not exist in older
+ # versions of Python. We only enable on Python 3.7.4+ or if certificate
+ # verification is enabled to work around Python issue #37428
+ # See: https://bugs.python.org/issue37428
+ if (cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4)) and getattr(
+ context, "post_handshake_auth", None
+ ) is not None:
+ context.post_handshake_auth = True
+
+ context.verify_mode = cert_reqs
+ if (
+ getattr(context, "check_hostname", None) is not None
+ ): # Platform-specific: Python 3.2
+ # We do our own verification, including fingerprints and alternative
+ # hostnames. So disable it here
+ context.check_hostname = False
+ return context
+
+
+def ssl_wrap_socket(
+ sock,
+ keyfile=None,
+ certfile=None,
+ cert_reqs=None,
+ ca_certs=None,
+ server_hostname=None,
+ ssl_version=None,
+ ciphers=None,
+ ssl_context=None,
+ ca_cert_dir=None,
+ key_password=None,
+ ca_cert_data=None,
+):
+ """
+ All arguments except for server_hostname, ssl_context, and ca_cert_dir have
+ the same meaning as they do when using :func:`ssl.wrap_socket`.
+
+ :param server_hostname:
+ When SNI is supported, the expected hostname of the certificate
+ :param ssl_context:
+ A pre-made :class:`SSLContext` object. If none is provided, one will
+ be created using :func:`create_urllib3_context`.
+ :param ciphers:
+ A string of ciphers we wish the client to support.
+ :param ca_cert_dir:
+ A directory containing CA certificates in multiple separate files, as
+ supported by OpenSSL's -CApath flag or the capath argument to
+ SSLContext.load_verify_locations().
+ :param key_password:
+ Optional password if the keyfile is encrypted.
+ :param ca_cert_data:
+ Optional string containing CA certificates in PEM format suitable for
+ passing as the cadata parameter to SSLContext.load_verify_locations()
+ """
+ context = ssl_context
+ if context is None:
+ # Note: This branch of code and all the variables in it are no longer
+ # used by urllib3 itself. We should consider deprecating and removing
+ # this code.
+ context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
+
+ if ca_certs or ca_cert_dir or ca_cert_data:
+ try:
+ context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
+ except IOError as e: # Platform-specific: Python 2.7
+ raise SSLError(e)
+ # Py33 raises FileNotFoundError which subclasses OSError
+ # These are not equivalent unless we check the errno attribute
+ except OSError as e: # Platform-specific: Python 3.3 and beyond
+ if e.errno == errno.ENOENT:
+ raise SSLError(e)
+ raise
+
+ elif ssl_context is None and hasattr(context, "load_default_certs"):
+ # try to load OS default certs; works well on Windows (require Python3.4+)
+ context.load_default_certs()
+
+ # Attempt to detect if we get the goofy behavior of the
+ # keyfile being encrypted and OpenSSL asking for the
+ # passphrase via the terminal and instead error out.
+ if keyfile and key_password is None and _is_key_file_encrypted(keyfile):
+ raise SSLError("Client private key is encrypted, password is required")
+
+ if certfile:
+ if key_password is None:
+ context.load_cert_chain(certfile, keyfile)
+ else:
+ context.load_cert_chain(certfile, keyfile, key_password)
+
+ # If we detect server_hostname is an IP address then the SNI
+ # extension should not be used according to RFC3546 Section 3.1
+ # We shouldn't warn the user if SNI isn't available but we would
+ # not be using SNI anyways due to IP address for server_hostname.
+ if (
+ server_hostname is not None and not is_ipaddress(server_hostname)
+ ) or IS_SECURETRANSPORT:
+ if HAS_SNI and server_hostname is not None:
+ return context.wrap_socket(sock, server_hostname=server_hostname)
+
+ warnings.warn(
+ "An HTTPS request has been made, but the SNI (Server Name "
+ "Indication) extension to TLS is not available on this platform. "
+ "This may cause the server to present an incorrect TLS "
+ "certificate, which can cause validation failures. You can upgrade to "
+ "a newer version of Python to solve this. For more information, see "
+ "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
+ "#ssl-warnings",
+ SNIMissingWarning,
+ )
+
+ return context.wrap_socket(sock)
+
+
+def is_ipaddress(hostname):
+ """Detects whether the hostname given is an IPv4 or IPv6 address.
+ Also detects IPv6 addresses with Zone IDs.
+
+ :param str hostname: Hostname to examine.
+ :return: True if the hostname is an IP address, False otherwise.
+ """
+ if not six.PY2 and isinstance(hostname, bytes):
+ # IDN A-label bytes are ASCII compatible.
+ hostname = hostname.decode("ascii")
+ return bool(IPV4_RE.match(hostname) or BRACELESS_IPV6_ADDRZ_RE.match(hostname))
+
+
+def _is_key_file_encrypted(key_file):
+ """Detects if a key file is encrypted or not."""
+ with open(key_file, "r") as f:
+ for line in f:
+ # Look for Proc-Type: 4,ENCRYPTED
+ if "ENCRYPTED" in line:
+ return True
+
+ return False
diff --git a/third_party/python/urllib3/src/urllib3/util/timeout.py b/third_party/python/urllib3/src/urllib3/util/timeout.py
new file mode 100644
index 0000000000..b61fea75c5
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/util/timeout.py
@@ -0,0 +1,261 @@
+from __future__ import absolute_import
+
+# The default socket timeout, used by httplib to indicate that no timeout was
+# specified by the user
+from socket import _GLOBAL_DEFAULT_TIMEOUT
+import time
+
+from ..exceptions import TimeoutStateError
+
+# A sentinel value to indicate that no timeout was specified by the user in
+# urllib3
+_Default = object()
+
+
+# Use time.monotonic if available.
+current_time = getattr(time, "monotonic", time.time)
+
+
+class Timeout(object):
+ """ Timeout configuration.
+
+ Timeouts can be defined as a default for a pool::
+
+ timeout = Timeout(connect=2.0, read=7.0)
+ http = PoolManager(timeout=timeout)
+ response = http.request('GET', 'http://example.com/')
+
+ Or per-request (which overrides the default for the pool)::
+
+ response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
+
+ Timeouts can be disabled by setting all the parameters to ``None``::
+
+ no_timeout = Timeout(connect=None, read=None)
+ response = http.request('GET', 'http://example.com/, timeout=no_timeout)
+
+
+ :param total:
+ This combines the connect and read timeouts into one; the read timeout
+ will be set to the time leftover from the connect attempt. In the
+ event that both a connect timeout and a total are specified, or a read
+ timeout and a total are specified, the shorter timeout will be applied.
+
+ Defaults to None.
+
+ :type total: integer, float, or None
+
+ :param connect:
+ The maximum amount of time (in seconds) to wait for a connection
+ attempt to a server to succeed. Omitting the parameter will default the
+ connect timeout to the system default, probably `the global default
+ timeout in socket.py
+ <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
+ None will set an infinite timeout for connection attempts.
+
+ :type connect: integer, float, or None
+
+ :param read:
+ The maximum amount of time (in seconds) to wait between consecutive
+ read operations for a response from the server. Omitting the parameter
+ will default the read timeout to the system default, probably `the
+ global default timeout in socket.py
+ <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
+ None will set an infinite timeout.
+
+ :type read: integer, float, or None
+
+ .. note::
+
+ Many factors can affect the total amount of time for urllib3 to return
+ an HTTP response.
+
+ For example, Python's DNS resolver does not obey the timeout specified
+ on the socket. Other factors that can affect total request time include
+ high CPU load, high swap, the program running at a low priority level,
+ or other behaviors.
+
+ In addition, the read and total timeouts only measure the time between
+ read operations on the socket connecting the client and the server,
+ not the total amount of time for the request to return a complete
+ response. For most requests, the timeout is raised because the server
+ has not sent the first byte in the specified time. This is not always
+ the case; if a server streams one byte every fifteen seconds, a timeout
+ of 20 seconds will not trigger, even though the request will take
+ several minutes to complete.
+
+ If your goal is to cut off any request after a set amount of wall clock
+ time, consider having a second "watcher" thread to cut off a slow
+ request.
+ """
+
+ #: A sentinel object representing the default timeout value
+ DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
+
+ def __init__(self, total=None, connect=_Default, read=_Default):
+ self._connect = self._validate_timeout(connect, "connect")
+ self._read = self._validate_timeout(read, "read")
+ self.total = self._validate_timeout(total, "total")
+ self._start_connect = None
+
+ def __repr__(self):
+ return "%s(connect=%r, read=%r, total=%r)" % (
+ type(self).__name__,
+ self._connect,
+ self._read,
+ self.total,
+ )
+
+ # __str__ provided for backwards compatibility
+ __str__ = __repr__
+
+ @classmethod
+ def _validate_timeout(cls, value, name):
+ """ Check that a timeout attribute is valid.
+
+ :param value: The timeout value to validate
+ :param name: The name of the timeout attribute to validate. This is
+ used to specify in error messages.
+ :return: The validated and casted version of the given value.
+ :raises ValueError: If it is a numeric value less than or equal to
+ zero, or the type is not an integer, float, or None.
+ """
+ if value is _Default:
+ return cls.DEFAULT_TIMEOUT
+
+ if value is None or value is cls.DEFAULT_TIMEOUT:
+ return value
+
+ if isinstance(value, bool):
+ raise ValueError(
+ "Timeout cannot be a boolean value. It must "
+ "be an int, float or None."
+ )
+ try:
+ float(value)
+ except (TypeError, ValueError):
+ raise ValueError(
+ "Timeout value %s was %s, but it must be an "
+ "int, float or None." % (name, value)
+ )
+
+ try:
+ if value <= 0:
+ raise ValueError(
+ "Attempted to set %s timeout to %s, but the "
+ "timeout cannot be set to a value less "
+ "than or equal to 0." % (name, value)
+ )
+ except TypeError:
+ # Python 3
+ raise ValueError(
+ "Timeout value %s was %s, but it must be an "
+ "int, float or None." % (name, value)
+ )
+
+ return value
+
+ @classmethod
+ def from_float(cls, timeout):
+ """ Create a new Timeout from a legacy timeout value.
+
+ The timeout value used by httplib.py sets the same timeout on the
+ connect(), and recv() socket requests. This creates a :class:`Timeout`
+ object that sets the individual timeouts to the ``timeout`` value
+ passed to this function.
+
+ :param timeout: The legacy timeout value.
+ :type timeout: integer, float, sentinel default object, or None
+ :return: Timeout object
+ :rtype: :class:`Timeout`
+ """
+ return Timeout(read=timeout, connect=timeout)
+
+ def clone(self):
+ """ Create a copy of the timeout object
+
+ Timeout properties are stored per-pool but each request needs a fresh
+ Timeout object to ensure each one has its own start/stop configured.
+
+ :return: a copy of the timeout object
+ :rtype: :class:`Timeout`
+ """
+ # We can't use copy.deepcopy because that will also create a new object
+ # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
+ # detect the user default.
+ return Timeout(connect=self._connect, read=self._read, total=self.total)
+
+ def start_connect(self):
+ """ Start the timeout clock, used during a connect() attempt
+
+ :raises urllib3.exceptions.TimeoutStateError: if you attempt
+ to start a timer that has been started already.
+ """
+ if self._start_connect is not None:
+ raise TimeoutStateError("Timeout timer has already been started.")
+ self._start_connect = current_time()
+ return self._start_connect
+
+ def get_connect_duration(self):
+ """ Gets the time elapsed since the call to :meth:`start_connect`.
+
+ :return: Elapsed time in seconds.
+ :rtype: float
+ :raises urllib3.exceptions.TimeoutStateError: if you attempt
+ to get duration for a timer that hasn't been started.
+ """
+ if self._start_connect is None:
+ raise TimeoutStateError(
+ "Can't get connect duration for timer that has not started."
+ )
+ return current_time() - self._start_connect
+
+ @property
+ def connect_timeout(self):
+ """ Get the value to use when setting a connection timeout.
+
+ This will be a positive float or integer, the value None
+ (never timeout), or the default system timeout.
+
+ :return: Connect timeout.
+ :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
+ """
+ if self.total is None:
+ return self._connect
+
+ if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
+ return self.total
+
+ return min(self._connect, self.total)
+
+ @property
+ def read_timeout(self):
+ """ Get the value for the read timeout.
+
+ This assumes some time has elapsed in the connection timeout and
+ computes the read timeout appropriately.
+
+ If self.total is set, the read timeout is dependent on the amount of
+ time taken by the connect timeout. If the connection time has not been
+ established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
+ raised.
+
+ :return: Value to use for the read timeout.
+ :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
+ :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
+ has not yet been called on this object.
+ """
+ if (
+ self.total is not None
+ and self.total is not self.DEFAULT_TIMEOUT
+ and self._read is not None
+ and self._read is not self.DEFAULT_TIMEOUT
+ ):
+ # In case the connect timeout has not yet been established.
+ if self._start_connect is None:
+ return self._read
+ return max(0, min(self.total - self.get_connect_duration(), self._read))
+ elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
+ return max(0, self.total - self.get_connect_duration())
+ else:
+ return self._read
diff --git a/third_party/python/urllib3/src/urllib3/util/url.py b/third_party/python/urllib3/src/urllib3/util/url.py
new file mode 100644
index 0000000000..793324e5fd
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/util/url.py
@@ -0,0 +1,430 @@
+from __future__ import absolute_import
+import re
+from collections import namedtuple
+
+from ..exceptions import LocationParseError
+from ..packages import six
+
+
+url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"]
+
+# We only want to normalize urls with an HTTP(S) scheme.
+# urllib3 infers URLs without a scheme (None) to be http.
+NORMALIZABLE_SCHEMES = ("http", "https", None)
+
+# Almost all of these patterns were derived from the
+# 'rfc3986' module: https://github.com/python-hyper/rfc3986
+PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}")
+SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)")
+URI_RE = re.compile(
+ r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?"
+ r"(?://([^\\/?#]*))?"
+ r"([^?#]*)"
+ r"(?:\?([^#]*))?"
+ r"(?:#(.*))?$",
+ re.UNICODE | re.DOTALL,
+)
+
+IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}"
+HEX_PAT = "[0-9A-Fa-f]{1,4}"
+LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=HEX_PAT, ipv4=IPV4_PAT)
+_subs = {"hex": HEX_PAT, "ls32": LS32_PAT}
+_variations = [
+ # 6( h16 ":" ) ls32
+ "(?:%(hex)s:){6}%(ls32)s",
+ # "::" 5( h16 ":" ) ls32
+ "::(?:%(hex)s:){5}%(ls32)s",
+ # [ h16 ] "::" 4( h16 ":" ) ls32
+ "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s",
+ # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
+ "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s",
+ # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
+ "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s",
+ # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32
+ "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s",
+ # [ *4( h16 ":" ) h16 ] "::" ls32
+ "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s",
+ # [ *5( h16 ":" ) h16 ] "::" h16
+ "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s",
+ # [ *6( h16 ":" ) h16 ] "::"
+ "(?:(?:%(hex)s:){0,6}%(hex)s)?::",
+]
+
+UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~"
+IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")"
+ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
+IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]"
+REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*"
+TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$")
+
+IPV4_RE = re.compile("^" + IPV4_PAT + "$")
+IPV6_RE = re.compile("^" + IPV6_PAT + "$")
+IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$")
+BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$")
+ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$")
+
+SUBAUTHORITY_PAT = (u"^(?:(.*)@)?(%s|%s|%s)(?::([0-9]{0,5}))?$") % (
+ REG_NAME_PAT,
+ IPV4_PAT,
+ IPV6_ADDRZ_PAT,
+)
+SUBAUTHORITY_RE = re.compile(SUBAUTHORITY_PAT, re.UNICODE | re.DOTALL)
+
+UNRESERVED_CHARS = set(
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~"
+)
+SUB_DELIM_CHARS = set("!$&'()*+,;=")
+USERINFO_CHARS = UNRESERVED_CHARS | SUB_DELIM_CHARS | {":"}
+PATH_CHARS = USERINFO_CHARS | {"@", "/"}
+QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {"?"}
+
+
+class Url(namedtuple("Url", url_attrs)):
+ """
+ Data structure for representing an HTTP URL. Used as a return value for
+ :func:`parse_url`. Both the scheme and host are normalized as they are
+ both case-insensitive according to RFC 3986.
+ """
+
+ __slots__ = ()
+
+ def __new__(
+ cls,
+ scheme=None,
+ auth=None,
+ host=None,
+ port=None,
+ path=None,
+ query=None,
+ fragment=None,
+ ):
+ if path and not path.startswith("/"):
+ path = "/" + path
+ if scheme is not None:
+ scheme = scheme.lower()
+ return super(Url, cls).__new__(
+ cls, scheme, auth, host, port, path, query, fragment
+ )
+
+ @property
+ def hostname(self):
+ """For backwards-compatibility with urlparse. We're nice like that."""
+ return self.host
+
+ @property
+ def request_uri(self):
+ """Absolute path including the query string."""
+ uri = self.path or "/"
+
+ if self.query is not None:
+ uri += "?" + self.query
+
+ return uri
+
+ @property
+ def netloc(self):
+ """Network location including host and port"""
+ if self.port:
+ return "%s:%d" % (self.host, self.port)
+ return self.host
+
+ @property
+ def url(self):
+ """
+ Convert self into a url
+
+ This function should more or less round-trip with :func:`.parse_url`. The
+ returned url may not be exactly the same as the url inputted to
+ :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
+ with a blank port will have : removed).
+
+ Example: ::
+
+ >>> U = parse_url('http://google.com/mail/')
+ >>> U.url
+ 'http://google.com/mail/'
+ >>> Url('http', 'username:password', 'host.com', 80,
+ ... '/path', 'query', 'fragment').url
+ 'http://username:password@host.com:80/path?query#fragment'
+ """
+ scheme, auth, host, port, path, query, fragment = self
+ url = u""
+
+ # We use "is not None" we want things to happen with empty strings (or 0 port)
+ if scheme is not None:
+ url += scheme + u"://"
+ if auth is not None:
+ url += auth + u"@"
+ if host is not None:
+ url += host
+ if port is not None:
+ url += u":" + str(port)
+ if path is not None:
+ url += path
+ if query is not None:
+ url += u"?" + query
+ if fragment is not None:
+ url += u"#" + fragment
+
+ return url
+
+ def __str__(self):
+ return self.url
+
+
+def split_first(s, delims):
+ """
+ .. deprecated:: 1.25
+
+ Given a string and an iterable of delimiters, split on the first found
+ delimiter. Return two split parts and the matched delimiter.
+
+ If not found, then the first part is the full input string.
+
+ Example::
+
+ >>> split_first('foo/bar?baz', '?/=')
+ ('foo', 'bar?baz', '/')
+ >>> split_first('foo/bar?baz', '123')
+ ('foo/bar?baz', '', None)
+
+ Scales linearly with number of delims. Not ideal for large number of delims.
+ """
+ min_idx = None
+ min_delim = None
+ for d in delims:
+ idx = s.find(d)
+ if idx < 0:
+ continue
+
+ if min_idx is None or idx < min_idx:
+ min_idx = idx
+ min_delim = d
+
+ if min_idx is None or min_idx < 0:
+ return s, "", None
+
+ return s[:min_idx], s[min_idx + 1 :], min_delim
+
+
+def _encode_invalid_chars(component, allowed_chars, encoding="utf-8"):
+ """Percent-encodes a URI component without reapplying
+ onto an already percent-encoded component.
+ """
+ if component is None:
+ return component
+
+ component = six.ensure_text(component)
+
+ # Normalize existing percent-encoded bytes.
+ # Try to see if the component we're encoding is already percent-encoded
+ # so we can skip all '%' characters but still encode all others.
+ component, percent_encodings = PERCENT_RE.subn(
+ lambda match: match.group(0).upper(), component
+ )
+
+ uri_bytes = component.encode("utf-8", "surrogatepass")
+ is_percent_encoded = percent_encodings == uri_bytes.count(b"%")
+ encoded_component = bytearray()
+
+ for i in range(0, len(uri_bytes)):
+ # Will return a single character bytestring on both Python 2 & 3
+ byte = uri_bytes[i : i + 1]
+ byte_ord = ord(byte)
+ if (is_percent_encoded and byte == b"%") or (
+ byte_ord < 128 and byte.decode() in allowed_chars
+ ):
+ encoded_component += byte
+ continue
+ encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper()))
+
+ return encoded_component.decode(encoding)
+
+
+def _remove_path_dot_segments(path):
+ # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code
+ segments = path.split("/") # Turn the path into a list of segments
+ output = [] # Initialize the variable to use to store output
+
+ for segment in segments:
+ # '.' is the current directory, so ignore it, it is superfluous
+ if segment == ".":
+ continue
+ # Anything other than '..', should be appended to the output
+ elif segment != "..":
+ output.append(segment)
+ # In this case segment == '..', if we can, we should pop the last
+ # element
+ elif output:
+ output.pop()
+
+ # If the path starts with '/' and the output is empty or the first string
+ # is non-empty
+ if path.startswith("/") and (not output or output[0]):
+ output.insert(0, "")
+
+ # If the path starts with '/.' or '/..' ensure we add one more empty
+ # string to add a trailing '/'
+ if path.endswith(("/.", "/..")):
+ output.append("")
+
+ return "/".join(output)
+
+
+def _normalize_host(host, scheme):
+ if host:
+ if isinstance(host, six.binary_type):
+ host = six.ensure_str(host)
+
+ if scheme in NORMALIZABLE_SCHEMES:
+ is_ipv6 = IPV6_ADDRZ_RE.match(host)
+ if is_ipv6:
+ match = ZONE_ID_RE.search(host)
+ if match:
+ start, end = match.span(1)
+ zone_id = host[start:end]
+
+ if zone_id.startswith("%25") and zone_id != "%25":
+ zone_id = zone_id[3:]
+ else:
+ zone_id = zone_id[1:]
+ zone_id = "%" + _encode_invalid_chars(zone_id, UNRESERVED_CHARS)
+ return host[:start].lower() + zone_id + host[end:]
+ else:
+ return host.lower()
+ elif not IPV4_RE.match(host):
+ return six.ensure_str(
+ b".".join([_idna_encode(label) for label in host.split(".")])
+ )
+ return host
+
+
+def _idna_encode(name):
+ if name and any([ord(x) > 128 for x in name]):
+ try:
+ import idna
+ except ImportError:
+ six.raise_from(
+ LocationParseError("Unable to parse URL without the 'idna' module"),
+ None,
+ )
+ try:
+ return idna.encode(name.lower(), strict=True, std3_rules=True)
+ except idna.IDNAError:
+ six.raise_from(
+ LocationParseError(u"Name '%s' is not a valid IDNA label" % name), None
+ )
+ return name.lower().encode("ascii")
+
+
+def _encode_target(target):
+ """Percent-encodes a request target so that there are no invalid characters"""
+ path, query = TARGET_RE.match(target).groups()
+ target = _encode_invalid_chars(path, PATH_CHARS)
+ query = _encode_invalid_chars(query, QUERY_CHARS)
+ if query is not None:
+ target += "?" + query
+ return target
+
+
+def parse_url(url):
+ """
+ Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
+ performed to parse incomplete urls. Fields not provided will be None.
+ This parser is RFC 3986 compliant.
+
+ The parser logic and helper functions are based heavily on
+ work done in the ``rfc3986`` module.
+
+ :param str url: URL to parse into a :class:`.Url` namedtuple.
+
+ Partly backwards-compatible with :mod:`urlparse`.
+
+ Example::
+
+ >>> parse_url('http://google.com/mail/')
+ Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
+ >>> parse_url('google.com:80')
+ Url(scheme=None, host='google.com', port=80, path=None, ...)
+ >>> parse_url('/foo?bar')
+ Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
+ """
+ if not url:
+ # Empty
+ return Url()
+
+ source_url = url
+ if not SCHEME_RE.search(url):
+ url = "//" + url
+
+ try:
+ scheme, authority, path, query, fragment = URI_RE.match(url).groups()
+ normalize_uri = scheme is None or scheme.lower() in NORMALIZABLE_SCHEMES
+
+ if scheme:
+ scheme = scheme.lower()
+
+ if authority:
+ auth, host, port = SUBAUTHORITY_RE.match(authority).groups()
+ if auth and normalize_uri:
+ auth = _encode_invalid_chars(auth, USERINFO_CHARS)
+ if port == "":
+ port = None
+ else:
+ auth, host, port = None, None, None
+
+ if port is not None:
+ port = int(port)
+ if not (0 <= port <= 65535):
+ raise LocationParseError(url)
+
+ host = _normalize_host(host, scheme)
+
+ if normalize_uri and path:
+ path = _remove_path_dot_segments(path)
+ path = _encode_invalid_chars(path, PATH_CHARS)
+ if normalize_uri and query:
+ query = _encode_invalid_chars(query, QUERY_CHARS)
+ if normalize_uri and fragment:
+ fragment = _encode_invalid_chars(fragment, FRAGMENT_CHARS)
+
+ except (ValueError, AttributeError):
+ return six.raise_from(LocationParseError(source_url), None)
+
+ # For the sake of backwards compatibility we put empty
+ # string values for path if there are any defined values
+ # beyond the path in the URL.
+ # TODO: Remove this when we break backwards compatibility.
+ if not path:
+ if query is not None or fragment is not None:
+ path = ""
+ else:
+ path = None
+
+ # Ensure that each part of the URL is a `str` for
+ # backwards compatibility.
+ if isinstance(url, six.text_type):
+ ensure_func = six.ensure_text
+ else:
+ ensure_func = six.ensure_str
+
+ def ensure_type(x):
+ return x if x is None else ensure_func(x)
+
+ return Url(
+ scheme=ensure_type(scheme),
+ auth=ensure_type(auth),
+ host=ensure_type(host),
+ port=port,
+ path=ensure_type(path),
+ query=ensure_type(query),
+ fragment=ensure_type(fragment),
+ )
+
+
+def get_host(url):
+ """
+ Deprecated. Use :func:`parse_url` instead.
+ """
+ p = parse_url(url)
+ return p.scheme or "http", p.hostname, p.port
diff --git a/third_party/python/urllib3/src/urllib3/util/wait.py b/third_party/python/urllib3/src/urllib3/util/wait.py
new file mode 100644
index 0000000000..d71d2fd722
--- /dev/null
+++ b/third_party/python/urllib3/src/urllib3/util/wait.py
@@ -0,0 +1,153 @@
+import errno
+from functools import partial
+import select
+import sys
+
+try:
+ from time import monotonic
+except ImportError:
+ from time import time as monotonic
+
+__all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"]
+
+
+class NoWayToWaitForSocketError(Exception):
+ pass
+
+
+# How should we wait on sockets?
+#
+# There are two types of APIs you can use for waiting on sockets: the fancy
+# modern stateful APIs like epoll/kqueue, and the older stateless APIs like
+# select/poll. The stateful APIs are more efficient when you have a lots of
+# sockets to keep track of, because you can set them up once and then use them
+# lots of times. But we only ever want to wait on a single socket at a time
+# and don't want to keep track of state, so the stateless APIs are actually
+# more efficient. So we want to use select() or poll().
+#
+# Now, how do we choose between select() and poll()? On traditional Unixes,
+# select() has a strange calling convention that makes it slow, or fail
+# altogether, for high-numbered file descriptors. The point of poll() is to fix
+# that, so on Unixes, we prefer poll().
+#
+# On Windows, there is no poll() (or at least Python doesn't provide a wrapper
+# for it), but that's OK, because on Windows, select() doesn't have this
+# strange calling convention; plain select() works fine.
+#
+# So: on Windows we use select(), and everywhere else we use poll(). We also
+# fall back to select() in case poll() is somehow broken or missing.
+
+if sys.version_info >= (3, 5):
+ # Modern Python, that retries syscalls by default
+ def _retry_on_intr(fn, timeout):
+ return fn(timeout)
+
+
+else:
+ # Old and broken Pythons.
+ def _retry_on_intr(fn, timeout):
+ if timeout is None:
+ deadline = float("inf")
+ else:
+ deadline = monotonic() + timeout
+
+ while True:
+ try:
+ return fn(timeout)
+ # OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7
+ except (OSError, select.error) as e:
+ # 'e.args[0]' incantation works for both OSError and select.error
+ if e.args[0] != errno.EINTR:
+ raise
+ else:
+ timeout = deadline - monotonic()
+ if timeout < 0:
+ timeout = 0
+ if timeout == float("inf"):
+ timeout = None
+ continue
+
+
+def select_wait_for_socket(sock, read=False, write=False, timeout=None):
+ if not read and not write:
+ raise RuntimeError("must specify at least one of read=True, write=True")
+ rcheck = []
+ wcheck = []
+ if read:
+ rcheck.append(sock)
+ if write:
+ wcheck.append(sock)
+ # When doing a non-blocking connect, most systems signal success by
+ # marking the socket writable. Windows, though, signals success by marked
+ # it as "exceptional". We paper over the difference by checking the write
+ # sockets for both conditions. (The stdlib selectors module does the same
+ # thing.)
+ fn = partial(select.select, rcheck, wcheck, wcheck)
+ rready, wready, xready = _retry_on_intr(fn, timeout)
+ return bool(rready or wready or xready)
+
+
+def poll_wait_for_socket(sock, read=False, write=False, timeout=None):
+ if not read and not write:
+ raise RuntimeError("must specify at least one of read=True, write=True")
+ mask = 0
+ if read:
+ mask |= select.POLLIN
+ if write:
+ mask |= select.POLLOUT
+ poll_obj = select.poll()
+ poll_obj.register(sock, mask)
+
+ # For some reason, poll() takes timeout in milliseconds
+ def do_poll(t):
+ if t is not None:
+ t *= 1000
+ return poll_obj.poll(t)
+
+ return bool(_retry_on_intr(do_poll, timeout))
+
+
+def null_wait_for_socket(*args, **kwargs):
+ raise NoWayToWaitForSocketError("no select-equivalent available")
+
+
+def _have_working_poll():
+ # Apparently some systems have a select.poll that fails as soon as you try
+ # to use it, either due to strange configuration or broken monkeypatching
+ # from libraries like eventlet/greenlet.
+ try:
+ poll_obj = select.poll()
+ _retry_on_intr(poll_obj.poll, 0)
+ except (AttributeError, OSError):
+ return False
+ else:
+ return True
+
+
+def wait_for_socket(*args, **kwargs):
+ # We delay choosing which implementation to use until the first time we're
+ # called. We could do it at import time, but then we might make the wrong
+ # decision if someone goes wild with monkeypatching select.poll after
+ # we're imported.
+ global wait_for_socket
+ if _have_working_poll():
+ wait_for_socket = poll_wait_for_socket
+ elif hasattr(select, "select"):
+ wait_for_socket = select_wait_for_socket
+ else: # Platform-specific: Appengine.
+ wait_for_socket = null_wait_for_socket
+ return wait_for_socket(*args, **kwargs)
+
+
+def wait_for_read(sock, timeout=None):
+ """ Waits for reading to be available on a given socket.
+ Returns True if the socket is readable, or False if the timeout expired.
+ """
+ return wait_for_socket(sock, read=True, timeout=timeout)
+
+
+def wait_for_write(sock, timeout=None):
+ """ Waits for writing to be available on a given socket.
+ Returns True if the socket is readable, or False if the timeout expired.
+ """
+ return wait_for_socket(sock, write=True, timeout=timeout)
diff --git a/third_party/python/virtualenv/README_MOZILLA b/third_party/python/virtualenv/README_MOZILLA
new file mode 100644
index 0000000000..6f40472d9e
--- /dev/null
+++ b/third_party/python/virtualenv/README_MOZILLA
@@ -0,0 +1,10 @@
+The contents of this directory are extracted from virtualenv.pyz as downloaded
+from https://bootstrap.pypa.io/virtualenv.pyz.
+
+The virtualenv.py script is a wrapper that should be preserved and allows to
+use the extracted virtualenv.pyz as-is.
+
+It is not vendored via `mach vendor` because that would require a larger wrapper
+script that handles the same things as the zipapp already does, picking the right
+dependencies to add to the PYTHONPATH depending on the python version.
+
diff --git a/third_party/python/virtualenv/__main__.py b/third_party/python/virtualenv/__main__.py
new file mode 100644
index 0000000000..abf90c7a84
--- /dev/null
+++ b/third_party/python/virtualenv/__main__.py
@@ -0,0 +1,168 @@
+import json
+import os
+import sys
+import zipfile
+
+ABS_HERE = os.path.abspath(os.path.dirname(__file__))
+NEW_IMPORT_SYSTEM = sys.version_info[0] == 3
+
+
+class VersionPlatformSelect(object):
+ def __init__(self):
+ self.archive = ABS_HERE
+ self._zip_file = zipfile.ZipFile(ABS_HERE, "r")
+ self.modules = self._load("modules.json")
+ self.distributions = self._load("distributions.json")
+ self.__cache = {}
+
+ def _load(self, of_file):
+ version = ".".join(str(i) for i in sys.version_info[0:2])
+ per_version = json.loads(self.get_data(of_file).decode("utf-8"))
+ all_platforms = per_version[version] if version in per_version else per_version["3.9"]
+ content = all_platforms.get("==any", {}) # start will all platforms
+ not_us = "!={}".format(sys.platform)
+ for key, value in all_platforms.items(): # now override that with not platform
+ if key.startswith("!=") and key != not_us:
+ content.update(value)
+ content.update(all_platforms.get("=={}".format(sys.platform), {})) # and finish it off with our platform
+ return content
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self._zip_file.close()
+
+ def find_mod(self, fullname):
+ if fullname in self.modules:
+ result = self.modules[fullname]
+ return result
+
+ def get_filename(self, fullname):
+ zip_path = self.find_mod(fullname)
+ return None if zip_path is None else os.path.join(ABS_HERE, zip_path)
+
+ def get_data(self, filename):
+ if filename.startswith(ABS_HERE):
+ # keep paths relative from the zipfile
+ filename = filename[len(ABS_HERE) + 1 :]
+ filename = filename.lstrip(os.sep)
+ if sys.platform == "win32":
+ # paths within the zipfile is always /, fixup on Windows to transform \ to /
+ filename = "/".join(filename.split(os.sep))
+ with self._zip_file.open(filename) as file_handler:
+ return file_handler.read()
+
+ def find_distributions(self, context):
+ dist_class = versioned_distribution_class()
+ name = context.name
+ if name in self.distributions:
+ result = dist_class(file_loader=self.get_data, dist_path=self.distributions[name])
+ yield result
+
+ def __repr__(self):
+ return "{}(path={})".format(self.__class__.__name__, ABS_HERE)
+
+ def _register_distutils_finder(self):
+ if "distlib" not in self.modules:
+ return
+
+ class DistlibFinder(object):
+ def __init__(self, path, loader):
+ self.path = path
+ self.loader = loader
+
+ def find(self, name):
+ class Resource(object):
+ def __init__(self, content):
+ self.bytes = content
+
+ full_path = os.path.join(self.path, name)
+ return Resource(self.loader.get_data(full_path))
+
+ # noinspection PyPackageRequirements
+ from distlib.resources import register_finder
+
+ register_finder(self, lambda module: DistlibFinder(os.path.dirname(module.__file__), self))
+
+
+_VER_DISTRIBUTION_CLASS = None
+
+
+def versioned_distribution_class():
+ global _VER_DISTRIBUTION_CLASS
+ if _VER_DISTRIBUTION_CLASS is None:
+ if sys.version_info >= (3, 8):
+ # noinspection PyCompatibility
+ from importlib.metadata import Distribution
+ else:
+ # noinspection PyUnresolvedReferences
+ from importlib_metadata import Distribution
+
+ class VersionedDistribution(Distribution):
+ def __init__(self, file_loader, dist_path):
+ self.file_loader = file_loader
+ self.dist_path = dist_path
+
+ def read_text(self, filename):
+ return self.file_loader(self.locate_file(filename)).decode("utf-8")
+
+ def locate_file(self, path):
+ return os.path.join(self.dist_path, path)
+
+ _VER_DISTRIBUTION_CLASS = VersionedDistribution
+ return _VER_DISTRIBUTION_CLASS
+
+
+if NEW_IMPORT_SYSTEM:
+ # noinspection PyCompatibility
+ # noinspection PyCompatibility
+ from importlib.abc import SourceLoader
+ from importlib.util import spec_from_file_location
+
+ class VersionedFindLoad(VersionPlatformSelect, SourceLoader):
+ def find_spec(self, fullname, path, target=None):
+ zip_path = self.find_mod(fullname)
+ if zip_path is not None:
+ spec = spec_from_file_location(name=fullname, loader=self)
+ return spec
+
+ def module_repr(self, module):
+ raise NotImplementedError
+
+
+else:
+ # noinspection PyDeprecation
+ from imp import new_module
+
+ class VersionedFindLoad(VersionPlatformSelect):
+ def find_module(self, fullname, path=None):
+ return self if self.find_mod(fullname) else None
+
+ def load_module(self, fullname):
+ filename = self.get_filename(fullname)
+ code = self.get_data(filename)
+ mod = sys.modules.setdefault(fullname, new_module(fullname))
+ mod.__file__ = filename
+ mod.__loader__ = self
+ is_package = filename.endswith("__init__.py")
+ if is_package:
+ mod.__path__ = [os.path.dirname(filename)]
+ mod.__package__ = fullname
+ else:
+ mod.__package__ = fullname.rpartition(".")[0]
+ exec(code, mod.__dict__)
+ return mod
+
+
+def run():
+ with VersionedFindLoad() as finder:
+ sys.meta_path.insert(0, finder)
+ finder._register_distutils_finder()
+ from virtualenv.__main__ import run as run_virtualenv
+
+ run_virtualenv()
+
+
+if __name__ == "__main__":
+ run()
diff --git a/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/LICENSE.txt b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/LICENSE.txt
new file mode 100644
index 0000000000..107c61405e
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/LICENSE.txt
@@ -0,0 +1,23 @@
+# This is the MIT license
+
+Copyright (c) 2010 ActiveState Software Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
diff --git a/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/METADATA
new file mode 100644
index 0000000000..f950731044
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/METADATA
@@ -0,0 +1,264 @@
+Metadata-Version: 2.1
+Name: appdirs
+Version: 1.4.4
+Summary: A small Python module for determining appropriate platform-specific dirs, e.g. a "user data dir".
+Home-page: http://github.com/ActiveState/appdirs
+Author: Trent Mick
+Author-email: trentm@gmail.com
+Maintainer: Jeff Rouse
+Maintainer-email: jr@its.to
+License: MIT
+Keywords: application directory log cache user
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+
+
+.. image:: https://secure.travis-ci.org/ActiveState/appdirs.png
+ :target: http://travis-ci.org/ActiveState/appdirs
+
+the problem
+===========
+
+What directory should your app use for storing user data? If running on Mac OS X, you
+should use::
+
+ ~/Library/Application Support/<AppName>
+
+If on Windows (at least English Win XP) that should be::
+
+ C:\Documents and Settings\<User>\Application Data\Local Settings\<AppAuthor>\<AppName>
+
+or possibly::
+
+ C:\Documents and Settings\<User>\Application Data\<AppAuthor>\<AppName>
+
+for `roaming profiles <http://bit.ly/9yl3b6>`_ but that is another story.
+
+On Linux (and other Unices) the dir, according to the `XDG
+spec <http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_, is::
+
+ ~/.local/share/<AppName>
+
+
+``appdirs`` to the rescue
+=========================
+
+This kind of thing is what the ``appdirs`` module is for. ``appdirs`` will
+help you choose an appropriate:
+
+- user data dir (``user_data_dir``)
+- user config dir (``user_config_dir``)
+- user cache dir (``user_cache_dir``)
+- site data dir (``site_data_dir``)
+- site config dir (``site_config_dir``)
+- user log dir (``user_log_dir``)
+
+and also:
+
+- is a single module so other Python packages can include their own private copy
+- is slightly opinionated on the directory names used. Look for "OPINION" in
+ documentation and code for when an opinion is being applied.
+
+
+some example output
+===================
+
+On Mac OS X::
+
+ >>> from appdirs import *
+ >>> appname = "SuperApp"
+ >>> appauthor = "Acme"
+ >>> user_data_dir(appname, appauthor)
+ '/Users/trentm/Library/Application Support/SuperApp'
+ >>> site_data_dir(appname, appauthor)
+ '/Library/Application Support/SuperApp'
+ >>> user_cache_dir(appname, appauthor)
+ '/Users/trentm/Library/Caches/SuperApp'
+ >>> user_log_dir(appname, appauthor)
+ '/Users/trentm/Library/Logs/SuperApp'
+
+On Windows 7::
+
+ >>> from appdirs import *
+ >>> appname = "SuperApp"
+ >>> appauthor = "Acme"
+ >>> user_data_dir(appname, appauthor)
+ 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp'
+ >>> user_data_dir(appname, appauthor, roaming=True)
+ 'C:\\Users\\trentm\\AppData\\Roaming\\Acme\\SuperApp'
+ >>> user_cache_dir(appname, appauthor)
+ 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Cache'
+ >>> user_log_dir(appname, appauthor)
+ 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Logs'
+
+On Linux::
+
+ >>> from appdirs import *
+ >>> appname = "SuperApp"
+ >>> appauthor = "Acme"
+ >>> user_data_dir(appname, appauthor)
+ '/home/trentm/.local/share/SuperApp
+ >>> site_data_dir(appname, appauthor)
+ '/usr/local/share/SuperApp'
+ >>> site_data_dir(appname, appauthor, multipath=True)
+ '/usr/local/share/SuperApp:/usr/share/SuperApp'
+ >>> user_cache_dir(appname, appauthor)
+ '/home/trentm/.cache/SuperApp'
+ >>> user_log_dir(appname, appauthor)
+ '/home/trentm/.cache/SuperApp/log'
+ >>> user_config_dir(appname)
+ '/home/trentm/.config/SuperApp'
+ >>> site_config_dir(appname)
+ '/etc/xdg/SuperApp'
+ >>> os.environ['XDG_CONFIG_DIRS'] = '/etc:/usr/local/etc'
+ >>> site_config_dir(appname, multipath=True)
+ '/etc/SuperApp:/usr/local/etc/SuperApp'
+
+
+``AppDirs`` for convenience
+===========================
+
+::
+
+ >>> from appdirs import AppDirs
+ >>> dirs = AppDirs("SuperApp", "Acme")
+ >>> dirs.user_data_dir
+ '/Users/trentm/Library/Application Support/SuperApp'
+ >>> dirs.site_data_dir
+ '/Library/Application Support/SuperApp'
+ >>> dirs.user_cache_dir
+ '/Users/trentm/Library/Caches/SuperApp'
+ >>> dirs.user_log_dir
+ '/Users/trentm/Library/Logs/SuperApp'
+
+
+
+Per-version isolation
+=====================
+
+If you have multiple versions of your app in use that you want to be
+able to run side-by-side, then you may want version-isolation for these
+dirs::
+
+ >>> from appdirs import AppDirs
+ >>> dirs = AppDirs("SuperApp", "Acme", version="1.0")
+ >>> dirs.user_data_dir
+ '/Users/trentm/Library/Application Support/SuperApp/1.0'
+ >>> dirs.site_data_dir
+ '/Library/Application Support/SuperApp/1.0'
+ >>> dirs.user_cache_dir
+ '/Users/trentm/Library/Caches/SuperApp/1.0'
+ >>> dirs.user_log_dir
+ '/Users/trentm/Library/Logs/SuperApp/1.0'
+
+
+
+appdirs Changelog
+=================
+
+appdirs 1.4.4
+-------------
+- [PR #92] Don't import appdirs from setup.py
+
+Project officially classified as Stable which is important
+for inclusion in other distros such as ActivePython.
+
+First of several incremental releases to catch up on maintenance.
+
+appdirs 1.4.3
+-------------
+- [PR #76] Python 3.6 invalid escape sequence deprecation fixes
+- Fix for Python 3.6 support
+
+appdirs 1.4.2
+-------------
+- [PR #84] Allow installing without setuptools
+- [PR #86] Fix string delimiters in setup.py description
+- Add Python 3.6 support
+
+appdirs 1.4.1
+-------------
+- [issue #38] Fix _winreg import on Windows Py3
+- [issue #55] Make appname optional
+
+appdirs 1.4.0
+-------------
+- [PR #42] AppAuthor is now optional on Windows
+- [issue 41] Support Jython on Windows, Mac, and Unix-like platforms. Windows
+ support requires `JNA <https://github.com/twall/jna>`_.
+- [PR #44] Fix incorrect behaviour of the site_config_dir method
+
+appdirs 1.3.0
+-------------
+- [Unix, issue 16] Conform to XDG standard, instead of breaking it for
+ everybody
+- [Unix] Removes gratuitous case mangling of the case, since \*nix-es are
+ usually case sensitive, so mangling is not wise
+- [Unix] Fixes the utterly wrong behaviour in ``site_data_dir``, return result
+ based on XDG_DATA_DIRS and make room for respecting the standard which
+ specifies XDG_DATA_DIRS is a multiple-value variable
+- [Issue 6] Add ``*_config_dir`` which are distinct on nix-es, according to
+ XDG specs; on Windows and Mac return the corresponding ``*_data_dir``
+
+appdirs 1.2.0
+-------------
+
+- [Unix] Put ``user_log_dir`` under the *cache* dir on Unix. Seems to be more
+ typical.
+- [issue 9] Make ``unicode`` work on py3k.
+
+appdirs 1.1.0
+-------------
+
+- [issue 4] Add ``AppDirs.user_log_dir``.
+- [Unix, issue 2, issue 7] appdirs now conforms to `XDG base directory spec
+ <http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_.
+- [Mac, issue 5] Fix ``site_data_dir()`` on Mac.
+- [Mac] Drop use of 'Carbon' module in favour of hardcoded paths; supports
+ Python3 now.
+- [Windows] Append "Cache" to ``user_cache_dir`` on Windows by default. Use
+ ``opinion=False`` option to disable this.
+- Add ``appdirs.AppDirs`` convenience class. Usage:
+
+ >>> dirs = AppDirs("SuperApp", "Acme", version="1.0")
+ >>> dirs.user_data_dir
+ '/Users/trentm/Library/Application Support/SuperApp/1.0'
+
+- [Windows] Cherry-pick Komodo's change to downgrade paths to the Windows short
+ paths if there are high bit chars.
+- [Linux] Change default ``user_cache_dir()`` on Linux to be singular, e.g.
+ "~/.superapp/cache".
+- [Windows] Add ``roaming`` option to ``user_data_dir()`` (for use on Windows only)
+ and change the default ``user_data_dir`` behaviour to use a *non*-roaming
+ profile dir (``CSIDL_LOCAL_APPDATA`` instead of ``CSIDL_APPDATA``). Why? Because
+ a large roaming profile can cause login speed issues. The "only syncs on
+ logout" behaviour can cause surprises in appdata info.
+
+
+appdirs 1.0.1 (never released)
+------------------------------
+
+Started this changelog 27 July 2010. Before that this module originated in the
+`Komodo <http://www.activestate.com/komodo>`_ product as ``applib.py`` and then
+as `applib/location.py
+<http://github.com/ActiveState/applib/blob/master/applib/location.py>`_ (used by
+`PyPM <http://code.activestate.com/pypm/>`_ in `ActivePython
+<http://www.activestate.com/activepython>`_). This is basically a fork of
+applib.py 1.0.1 and applib/location.py 1.0.1.
+
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/RECORD
new file mode 100644
index 0000000000..9cbb30620e
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/RECORD
@@ -0,0 +1,6 @@
+appdirs.py,sha256=g99s2sXhnvTEm79oj4bWI0Toapc-_SmKKNXvOXHkVic,24720
+appdirs-1.4.4.dist-info/LICENSE.txt,sha256=Nt200KdFqTqyAyA9cZCBSxuJcn0lTK_0jHp6-71HAAs,1097
+appdirs-1.4.4.dist-info/METADATA,sha256=k5TVfXMNKGHTfp2wm6EJKTuGwGNuoQR5TqQgH8iwG8M,8981
+appdirs-1.4.4.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+appdirs-1.4.4.dist-info/top_level.txt,sha256=nKncE8CUqZERJ6VuQWL4_bkunSPDNfn7KZqb4Tr5YEM,8
+appdirs-1.4.4.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/WHEEL
new file mode 100644
index 0000000000..ef99c6cf32
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/top_level.txt
new file mode 100644
index 0000000000..d64bc321a1
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info/top_level.txt
@@ -0,0 +1 @@
+appdirs
diff --git a/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py
new file mode 100644
index 0000000000..2acd1debeb
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py
@@ -0,0 +1,608 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2005-2010 ActiveState Software Inc.
+# Copyright (c) 2013 Eddy Petrișor
+
+"""Utilities for determining application-specific dirs.
+
+See <http://github.com/ActiveState/appdirs> for details and usage.
+"""
+# Dev Notes:
+# - MSDN on where to store app data files:
+# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
+# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
+# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
+
+__version__ = "1.4.4"
+__version_info__ = tuple(int(segment) for segment in __version__.split("."))
+
+
+import sys
+import os
+
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ unicode = str
+
+if sys.platform.startswith('java'):
+ import platform
+ os_name = platform.java_ver()[3][0]
+ if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
+ system = 'win32'
+ elif os_name.startswith('Mac'): # "Mac OS X", etc.
+ system = 'darwin'
+ else: # "Linux", "SunOS", "FreeBSD", etc.
+ # Setting this to "linux2" is not ideal, but only Windows or Mac
+ # are actually checked for and the rest of the module expects
+ # *sys.platform* style strings.
+ system = 'linux2'
+else:
+ system = sys.platform
+
+
+
+def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
+ r"""Return full path to the user-specific data dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "roaming" (boolean, default False) can be set True to use the Windows
+ roaming appdata directory. That means that for users on a Windows
+ network setup for roaming profiles, this user data will be
+ sync'd on login. See
+ <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
+ for a discussion of issues.
+
+ Typical user data directories are:
+ Mac OS X: ~/Library/Application Support/<AppName>
+ Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
+ Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
+ Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
+ Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
+ Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
+
+ For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
+ That means, by default "~/.local/share/<AppName>".
+ """
+ if system == "win32":
+ if appauthor is None:
+ appauthor = appname
+ const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
+ path = os.path.normpath(_get_win_folder(const))
+ if appname:
+ if appauthor is not False:
+ path = os.path.join(path, appauthor, appname)
+ else:
+ path = os.path.join(path, appname)
+ elif system == 'darwin':
+ path = os.path.expanduser('~/Library/Application Support/')
+ if appname:
+ path = os.path.join(path, appname)
+ else:
+ path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
+ if appname:
+ path = os.path.join(path, appname)
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
+ r"""Return full path to the user-shared data dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "multipath" is an optional parameter only applicable to *nix
+ which indicates that the entire list of data dirs should be
+ returned. By default, the first item from XDG_DATA_DIRS is
+ returned, or '/usr/local/share/<AppName>',
+ if XDG_DATA_DIRS is not set
+
+ Typical site data directories are:
+ Mac OS X: /Library/Application Support/<AppName>
+ Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
+ Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
+ Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
+ Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
+
+ For Unix, this is using the $XDG_DATA_DIRS[0] default.
+
+ WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
+ """
+ if system == "win32":
+ if appauthor is None:
+ appauthor = appname
+ path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
+ if appname:
+ if appauthor is not False:
+ path = os.path.join(path, appauthor, appname)
+ else:
+ path = os.path.join(path, appname)
+ elif system == 'darwin':
+ path = os.path.expanduser('/Library/Application Support')
+ if appname:
+ path = os.path.join(path, appname)
+ else:
+ # XDG default for $XDG_DATA_DIRS
+ # only first, if multipath is False
+ path = os.getenv('XDG_DATA_DIRS',
+ os.pathsep.join(['/usr/local/share', '/usr/share']))
+ pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
+ if appname:
+ if version:
+ appname = os.path.join(appname, version)
+ pathlist = [os.sep.join([x, appname]) for x in pathlist]
+
+ if multipath:
+ path = os.pathsep.join(pathlist)
+ else:
+ path = pathlist[0]
+ return path
+
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
+ r"""Return full path to the user-specific config dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "roaming" (boolean, default False) can be set True to use the Windows
+ roaming appdata directory. That means that for users on a Windows
+ network setup for roaming profiles, this user data will be
+ sync'd on login. See
+ <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
+ for a discussion of issues.
+
+ Typical user config directories are:
+ Mac OS X: same as user_data_dir
+ Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
+ Win *: same as user_data_dir
+
+ For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
+ That means, by default "~/.config/<AppName>".
+ """
+ if system in ["win32", "darwin"]:
+ path = user_data_dir(appname, appauthor, None, roaming)
+ else:
+ path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
+ if appname:
+ path = os.path.join(path, appname)
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
+ r"""Return full path to the user-shared data dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "multipath" is an optional parameter only applicable to *nix
+ which indicates that the entire list of config dirs should be
+ returned. By default, the first item from XDG_CONFIG_DIRS is
+ returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
+
+ Typical site config directories are:
+ Mac OS X: same as site_data_dir
+ Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
+ $XDG_CONFIG_DIRS
+ Win *: same as site_data_dir
+ Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
+
+ For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
+
+ WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
+ """
+ if system in ["win32", "darwin"]:
+ path = site_data_dir(appname, appauthor)
+ if appname and version:
+ path = os.path.join(path, version)
+ else:
+ # XDG default for $XDG_CONFIG_DIRS
+ # only first, if multipath is False
+ path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
+ pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
+ if appname:
+ if version:
+ appname = os.path.join(appname, version)
+ pathlist = [os.sep.join([x, appname]) for x in pathlist]
+
+ if multipath:
+ path = os.pathsep.join(pathlist)
+ else:
+ path = pathlist[0]
+ return path
+
+
+def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
+ r"""Return full path to the user-specific cache dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "opinion" (boolean) can be False to disable the appending of
+ "Cache" to the base app data dir for Windows. See
+ discussion below.
+
+ Typical user cache directories are:
+ Mac OS X: ~/Library/Caches/<AppName>
+ Unix: ~/.cache/<AppName> (XDG default)
+ Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
+ Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
+
+ On Windows the only suggestion in the MSDN docs is that local settings go in
+ the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
+ app data dir (the default returned by `user_data_dir` above). Apps typically
+ put cache data somewhere *under* the given dir here. Some examples:
+ ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
+ ...\Acme\SuperApp\Cache\1.0
+ OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
+ This can be disabled with the `opinion=False` option.
+ """
+ if system == "win32":
+ if appauthor is None:
+ appauthor = appname
+ path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
+ if appname:
+ if appauthor is not False:
+ path = os.path.join(path, appauthor, appname)
+ else:
+ path = os.path.join(path, appname)
+ if opinion:
+ path = os.path.join(path, "Cache")
+ elif system == 'darwin':
+ path = os.path.expanduser('~/Library/Caches')
+ if appname:
+ path = os.path.join(path, appname)
+ else:
+ path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
+ if appname:
+ path = os.path.join(path, appname)
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
+ r"""Return full path to the user-specific state dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "roaming" (boolean, default False) can be set True to use the Windows
+ roaming appdata directory. That means that for users on a Windows
+ network setup for roaming profiles, this user data will be
+ sync'd on login. See
+ <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
+ for a discussion of issues.
+
+ Typical user state directories are:
+ Mac OS X: same as user_data_dir
+ Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
+ Win *: same as user_data_dir
+
+ For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
+ to extend the XDG spec and support $XDG_STATE_HOME.
+
+ That means, by default "~/.local/state/<AppName>".
+ """
+ if system in ["win32", "darwin"]:
+ path = user_data_dir(appname, appauthor, None, roaming)
+ else:
+ path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
+ if appname:
+ path = os.path.join(path, appname)
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
+ r"""Return full path to the user-specific log dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "opinion" (boolean) can be False to disable the appending of
+ "Logs" to the base app data dir for Windows, and "log" to the
+ base cache dir for Unix. See discussion below.
+
+ Typical user log directories are:
+ Mac OS X: ~/Library/Logs/<AppName>
+ Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
+ Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
+ Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
+
+ On Windows the only suggestion in the MSDN docs is that local settings
+ go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
+ examples of what some windows apps use for a logs dir.)
+
+ OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
+ value for Windows and appends "log" to the user cache dir for Unix.
+ This can be disabled with the `opinion=False` option.
+ """
+ if system == "darwin":
+ path = os.path.join(
+ os.path.expanduser('~/Library/Logs'),
+ appname)
+ elif system == "win32":
+ path = user_data_dir(appname, appauthor, version)
+ version = False
+ if opinion:
+ path = os.path.join(path, "Logs")
+ else:
+ path = user_cache_dir(appname, appauthor, version)
+ version = False
+ if opinion:
+ path = os.path.join(path, "log")
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+class AppDirs(object):
+ """Convenience wrapper for getting application dirs."""
+ def __init__(self, appname=None, appauthor=None, version=None,
+ roaming=False, multipath=False):
+ self.appname = appname
+ self.appauthor = appauthor
+ self.version = version
+ self.roaming = roaming
+ self.multipath = multipath
+
+ @property
+ def user_data_dir(self):
+ return user_data_dir(self.appname, self.appauthor,
+ version=self.version, roaming=self.roaming)
+
+ @property
+ def site_data_dir(self):
+ return site_data_dir(self.appname, self.appauthor,
+ version=self.version, multipath=self.multipath)
+
+ @property
+ def user_config_dir(self):
+ return user_config_dir(self.appname, self.appauthor,
+ version=self.version, roaming=self.roaming)
+
+ @property
+ def site_config_dir(self):
+ return site_config_dir(self.appname, self.appauthor,
+ version=self.version, multipath=self.multipath)
+
+ @property
+ def user_cache_dir(self):
+ return user_cache_dir(self.appname, self.appauthor,
+ version=self.version)
+
+ @property
+ def user_state_dir(self):
+ return user_state_dir(self.appname, self.appauthor,
+ version=self.version)
+
+ @property
+ def user_log_dir(self):
+ return user_log_dir(self.appname, self.appauthor,
+ version=self.version)
+
+
+#---- internal support stuff
+
+def _get_win_folder_from_registry(csidl_name):
+ """This is a fallback technique at best. I'm not sure if using the
+ registry for this guarantees us the correct answer for all CSIDL_*
+ names.
+ """
+ if PY3:
+ import winreg as _winreg
+ else:
+ import _winreg
+
+ shell_folder_name = {
+ "CSIDL_APPDATA": "AppData",
+ "CSIDL_COMMON_APPDATA": "Common AppData",
+ "CSIDL_LOCAL_APPDATA": "Local AppData",
+ }[csidl_name]
+
+ key = _winreg.OpenKey(
+ _winreg.HKEY_CURRENT_USER,
+ r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
+ )
+ dir, type = _winreg.QueryValueEx(key, shell_folder_name)
+ return dir
+
+
+def _get_win_folder_with_pywin32(csidl_name):
+ from win32com.shell import shellcon, shell
+ dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
+ # Try to make this a unicode path because SHGetFolderPath does
+ # not return unicode strings when there is unicode data in the
+ # path.
+ try:
+ dir = unicode(dir)
+
+ # Downgrade to short path name if have highbit chars. See
+ # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
+ has_high_char = False
+ for c in dir:
+ if ord(c) > 255:
+ has_high_char = True
+ break
+ if has_high_char:
+ try:
+ import win32api
+ dir = win32api.GetShortPathName(dir)
+ except ImportError:
+ pass
+ except UnicodeError:
+ pass
+ return dir
+
+
+def _get_win_folder_with_ctypes(csidl_name):
+ import ctypes
+
+ csidl_const = {
+ "CSIDL_APPDATA": 26,
+ "CSIDL_COMMON_APPDATA": 35,
+ "CSIDL_LOCAL_APPDATA": 28,
+ }[csidl_name]
+
+ buf = ctypes.create_unicode_buffer(1024)
+ ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
+
+ # Downgrade to short path name if have highbit chars. See
+ # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
+ has_high_char = False
+ for c in buf:
+ if ord(c) > 255:
+ has_high_char = True
+ break
+ if has_high_char:
+ buf2 = ctypes.create_unicode_buffer(1024)
+ if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
+ buf = buf2
+
+ return buf.value
+
+def _get_win_folder_with_jna(csidl_name):
+ import array
+ from com.sun import jna
+ from com.sun.jna.platform import win32
+
+ buf_size = win32.WinDef.MAX_PATH * 2
+ buf = array.zeros('c', buf_size)
+ shell = win32.Shell32.INSTANCE
+ shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
+ dir = jna.Native.toString(buf.tostring()).rstrip("\0")
+
+ # Downgrade to short path name if have highbit chars. See
+ # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
+ has_high_char = False
+ for c in dir:
+ if ord(c) > 255:
+ has_high_char = True
+ break
+ if has_high_char:
+ buf = array.zeros('c', buf_size)
+ kernel = win32.Kernel32.INSTANCE
+ if kernel.GetShortPathName(dir, buf, buf_size):
+ dir = jna.Native.toString(buf.tostring()).rstrip("\0")
+
+ return dir
+
+if system == "win32":
+ try:
+ import win32com.shell
+ _get_win_folder = _get_win_folder_with_pywin32
+ except ImportError:
+ try:
+ from ctypes import windll
+ _get_win_folder = _get_win_folder_with_ctypes
+ except ImportError:
+ try:
+ import com.sun.jna
+ _get_win_folder = _get_win_folder_with_jna
+ except ImportError:
+ _get_win_folder = _get_win_folder_from_registry
+
+
+#---- self test code
+
+if __name__ == "__main__":
+ appname = "MyApp"
+ appauthor = "MyCompany"
+
+ props = ("user_data_dir",
+ "user_config_dir",
+ "user_cache_dir",
+ "user_state_dir",
+ "user_log_dir",
+ "site_data_dir",
+ "site_config_dir")
+
+ print("-- app dirs %s --" % __version__)
+
+ print("-- app dirs (with optional 'version')")
+ dirs = AppDirs(appname, appauthor, version="1.0")
+ for prop in props:
+ print("%s: %s" % (prop, getattr(dirs, prop)))
+
+ print("\n-- app dirs (without optional 'version')")
+ dirs = AppDirs(appname, appauthor)
+ for prop in props:
+ print("%s: %s" % (prop, getattr(dirs, prop)))
+
+ print("\n-- app dirs (without optional 'appauthor')")
+ dirs = AppDirs(appname)
+ for prop in props:
+ print("%s: %s" % (prop, getattr(dirs, prop)))
+
+ print("\n-- app dirs (with disabled 'appauthor')")
+ dirs = AppDirs(appname, appauthor=False)
+ for prop in props:
+ print("%s: %s" % (prop, getattr(dirs, prop)))
diff --git a/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/__init__.py b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/__init__.py
new file mode 100644
index 0000000000..1fc3c62e81
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/__init__.py
@@ -0,0 +1,6 @@
+# A Python "namespace package" http://www.python.org/dev/peps/pep-0382/
+# This always goes inside of a namespace package's __init__.py
+
+from pkgutil import extend_path
+
+__path__ = extend_path(__path__, __name__)
diff --git a/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/configparser/__init__.py b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/configparser/__init__.py
new file mode 100644
index 0000000000..603d604764
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/configparser/__init__.py
@@ -0,0 +1,1473 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# flake8: noqa
+
+"""Configuration file parser.
+
+A configuration file consists of sections, lead by a "[section]" header,
+and followed by "name: value" entries, with continuations and such in
+the style of RFC 822.
+
+Intrinsic defaults can be specified by passing them into the
+ConfigParser constructor as a dictionary.
+
+class:
+
+ConfigParser -- responsible for parsing a list of
+ configuration files, and managing the parsed database.
+
+ methods:
+
+ __init__(defaults=None, dict_type=_default_dict, allow_no_value=False,
+ delimiters=('=', ':'), comment_prefixes=('#', ';'),
+ inline_comment_prefixes=None, strict=True,
+ empty_lines_in_values=True, default_section='DEFAULT',
+ interpolation=<unset>, converters=<unset>):
+ Create the parser. When `defaults' is given, it is initialized into the
+ dictionary or intrinsic defaults. The keys must be strings, the values
+ must be appropriate for %()s string interpolation.
+
+ When `dict_type' is given, it will be used to create the dictionary
+ objects for the list of sections, for the options within a section, and
+ for the default values.
+
+ When `delimiters' is given, it will be used as the set of substrings
+ that divide keys from values.
+
+ When `comment_prefixes' is given, it will be used as the set of
+ substrings that prefix comments in empty lines. Comments can be
+ indented.
+
+ When `inline_comment_prefixes' is given, it will be used as the set of
+ substrings that prefix comments in non-empty lines.
+
+ When `strict` is True, the parser won't allow for any section or option
+ duplicates while reading from a single source (file, string or
+ dictionary). Default is True.
+
+ When `empty_lines_in_values' is False (default: True), each empty line
+ marks the end of an option. Otherwise, internal empty lines of
+ a multiline option are kept as part of the value.
+
+ When `allow_no_value' is True (default: False), options without
+ values are accepted; the value presented for these is None.
+
+ When `default_section' is given, the name of the special section is
+ named accordingly. By default it is called ``"DEFAULT"`` but this can
+ be customized to point to any other valid section name. Its current
+ value can be retrieved using the ``parser_instance.default_section``
+ attribute and may be modified at runtime.
+
+ When `interpolation` is given, it should be an Interpolation subclass
+ instance. It will be used as the handler for option value
+ pre-processing when using getters. RawConfigParser objects don't do
+ any sort of interpolation, whereas ConfigParser uses an instance of
+ BasicInterpolation. The library also provides a ``zc.buildbot``
+ inspired ExtendedInterpolation implementation.
+
+ When `converters` is given, it should be a dictionary where each key
+ represents the name of a type converter and each value is a callable
+ implementing the conversion from string to the desired datatype. Every
+ converter gets its corresponding get*() method on the parser object and
+ section proxies.
+
+ sections()
+ Return all the configuration section names, sans DEFAULT.
+
+ has_section(section)
+ Return whether the given section exists.
+
+ has_option(section, option)
+ Return whether the given option exists in the given section.
+
+ options(section)
+ Return list of configuration options for the named section.
+
+ read(filenames, encoding=None)
+ Read and parse the iterable of named configuration files, given by
+ name. A single filename is also allowed. Non-existing files
+ are ignored. Return list of successfully read files.
+
+ read_file(f, filename=None)
+ Read and parse one configuration file, given as a file object.
+ The filename defaults to f.name; it is only used in error
+ messages (if f has no `name' attribute, the string `<???>' is used).
+
+ read_string(string)
+ Read configuration from a given string.
+
+ read_dict(dictionary)
+ Read configuration from a dictionary. Keys are section names,
+ values are dictionaries with keys and values that should be present
+ in the section. If the used dictionary type preserves order, sections
+ and their keys will be added in order. Values are automatically
+ converted to strings.
+
+ get(section, option, raw=False, vars=None, fallback=_UNSET)
+ Return a string value for the named option. All % interpolations are
+ expanded in the return values, based on the defaults passed into the
+ constructor and the DEFAULT section. Additional substitutions may be
+ provided using the `vars' argument, which must be a dictionary whose
+ contents override any pre-existing defaults. If `option' is a key in
+ `vars', the value from `vars' is used.
+
+ getint(section, options, raw=False, vars=None, fallback=_UNSET)
+ Like get(), but convert value to an integer.
+
+ getfloat(section, options, raw=False, vars=None, fallback=_UNSET)
+ Like get(), but convert value to a float.
+
+ getboolean(section, options, raw=False, vars=None, fallback=_UNSET)
+ Like get(), but convert value to a boolean (currently case
+ insensitively defined as 0, false, no, off for False, and 1, true,
+ yes, on for True). Returns False or True.
+
+ items(section=_UNSET, raw=False, vars=None)
+ If section is given, return a list of tuples with (name, value) for
+ each option in the section. Otherwise, return a list of tuples with
+ (section_name, section_proxy) for each section, including DEFAULTSECT.
+
+ remove_section(section)
+ Remove the given file section and all its options.
+
+ remove_option(section, option)
+ Remove the given option from the given section.
+
+ set(section, option, value)
+ Set the given option.
+
+ write(fp, space_around_delimiters=True)
+ Write the configuration state in .ini format. If
+ `space_around_delimiters' is True (the default), delimiters
+ between keys and values are surrounded by spaces.
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+try:
+ from collections.abc import MutableMapping
+except ImportError:
+ from collections import MutableMapping
+import functools
+import io
+import itertools
+import os
+import re
+import sys
+import warnings
+
+from backports.configparser.helpers import OrderedDict as _default_dict
+from backports.configparser.helpers import ChainMap as _ChainMap
+from backports.configparser.helpers import from_none, open, str, PY2
+from backports.configparser.helpers import PathLike, fspath
+from backports.configparser.helpers import MutableMapping
+
+__all__ = [
+ "NoSectionError",
+ "DuplicateOptionError",
+ "DuplicateSectionError",
+ "NoOptionError",
+ "InterpolationError",
+ "InterpolationDepthError",
+ "InterpolationMissingOptionError",
+ "InterpolationSyntaxError",
+ "ParsingError",
+ "MissingSectionHeaderError",
+ "ConfigParser",
+ "SafeConfigParser",
+ "RawConfigParser",
+ "Interpolation",
+ "BasicInterpolation",
+ "ExtendedInterpolation",
+ "LegacyInterpolation",
+ "SectionProxy",
+ "ConverterMapping",
+ "DEFAULTSECT",
+ "MAX_INTERPOLATION_DEPTH",
+]
+
+DEFAULTSECT = "DEFAULT"
+
+MAX_INTERPOLATION_DEPTH = 10
+
+
+# exception classes
+class Error(Exception):
+ """Base class for ConfigParser exceptions."""
+
+ def __init__(self, msg=''):
+ self.message = msg
+ Exception.__init__(self, msg)
+
+ def __repr__(self):
+ return self.message
+
+ __str__ = __repr__
+
+
+class NoSectionError(Error):
+ """Raised when no section matches a requested option."""
+
+ def __init__(self, section):
+ Error.__init__(self, 'No section: %r' % (section,))
+ self.section = section
+ self.args = (section,)
+
+
+class DuplicateSectionError(Error):
+ """Raised when a section is repeated in an input source.
+
+ Possible repetitions that raise this exception are: multiple creation
+ using the API or in strict parsers when a section is found more than once
+ in a single input file, string or dictionary.
+ """
+
+ def __init__(self, section, source=None, lineno=None):
+ msg = [repr(section), " already exists"]
+ if source is not None:
+ message = ["While reading from ", repr(source)]
+ if lineno is not None:
+ message.append(" [line {0:2d}]".format(lineno))
+ message.append(": section ")
+ message.extend(msg)
+ msg = message
+ else:
+ msg.insert(0, "Section ")
+ Error.__init__(self, "".join(msg))
+ self.section = section
+ self.source = source
+ self.lineno = lineno
+ self.args = (section, source, lineno)
+
+
+class DuplicateOptionError(Error):
+ """Raised by strict parsers when an option is repeated in an input source.
+
+ Current implementation raises this exception only when an option is found
+ more than once in a single file, string or dictionary.
+ """
+
+ def __init__(self, section, option, source=None, lineno=None):
+ msg = [repr(option), " in section ", repr(section), " already exists"]
+ if source is not None:
+ message = ["While reading from ", repr(source)]
+ if lineno is not None:
+ message.append(" [line {0:2d}]".format(lineno))
+ message.append(": option ")
+ message.extend(msg)
+ msg = message
+ else:
+ msg.insert(0, "Option ")
+ Error.__init__(self, "".join(msg))
+ self.section = section
+ self.option = option
+ self.source = source
+ self.lineno = lineno
+ self.args = (section, option, source, lineno)
+
+
+class NoOptionError(Error):
+ """A requested option was not found."""
+
+ def __init__(self, option, section):
+ Error.__init__(self, "No option %r in section: %r" % (option, section))
+ self.option = option
+ self.section = section
+ self.args = (option, section)
+
+
+class InterpolationError(Error):
+ """Base class for interpolation-related exceptions."""
+
+ def __init__(self, option, section, msg):
+ Error.__init__(self, msg)
+ self.option = option
+ self.section = section
+ self.args = (option, section, msg)
+
+
+class InterpolationMissingOptionError(InterpolationError):
+ """A string substitution required a setting which was not available."""
+
+ def __init__(self, option, section, rawval, reference):
+ msg = (
+ "Bad value substitution: option {0!r} in section {1!r} contains "
+ "an interpolation key {2!r} which is not a valid option name. "
+ "Raw value: {3!r}".format(option, section, reference, rawval)
+ )
+ InterpolationError.__init__(self, option, section, msg)
+ self.reference = reference
+ self.args = (option, section, rawval, reference)
+
+
+class InterpolationSyntaxError(InterpolationError):
+ """Raised when the source text contains invalid syntax.
+
+ Current implementation raises this exception when the source text into
+ which substitutions are made does not conform to the required syntax.
+ """
+
+
+class InterpolationDepthError(InterpolationError):
+ """Raised when substitutions are nested too deeply."""
+
+ def __init__(self, option, section, rawval):
+ msg = (
+ "Recursion limit exceeded in value substitution: option {0!r} "
+ "in section {1!r} contains an interpolation key which "
+ "cannot be substituted in {2} steps. Raw value: {3!r}"
+ "".format(option, section, MAX_INTERPOLATION_DEPTH, rawval)
+ )
+ InterpolationError.__init__(self, option, section, msg)
+ self.args = (option, section, rawval)
+
+
+class ParsingError(Error):
+ """Raised when a configuration file does not follow legal syntax."""
+
+ def __init__(self, source=None, filename=None):
+ # Exactly one of `source'/`filename' arguments has to be given.
+ # `filename' kept for compatibility.
+ if filename and source:
+ raise ValueError(
+ "Cannot specify both `filename' and `source'. " "Use `source'."
+ )
+ elif not filename and not source:
+ raise ValueError("Required argument `source' not given.")
+ elif filename:
+ source = filename
+ Error.__init__(self, 'Source contains parsing errors: %r' % source)
+ self.source = source
+ self.errors = []
+ self.args = (source,)
+
+ @property
+ def filename(self):
+ """Deprecated, use `source'."""
+ warnings.warn(
+ "The 'filename' attribute will be removed in future versions. "
+ "Use 'source' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self.source
+
+ @filename.setter
+ def filename(self, value):
+ """Deprecated, user `source'."""
+ warnings.warn(
+ "The 'filename' attribute will be removed in future versions. "
+ "Use 'source' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.source = value
+
+ def append(self, lineno, line):
+ self.errors.append((lineno, line))
+ self.message += '\n\t[line %2d]: %s' % (lineno, line)
+
+
+class MissingSectionHeaderError(ParsingError):
+ """Raised when a key-value pair is found before any section header."""
+
+ def __init__(self, filename, lineno, line):
+ Error.__init__(
+ self,
+ 'File contains no section headers.\nfile: %r, line: %d\n%r'
+ % (filename, lineno, line),
+ )
+ self.source = filename
+ self.lineno = lineno
+ self.line = line
+ self.args = (filename, lineno, line)
+
+
+# Used in parser getters to indicate the default behaviour when a specific
+# option is not found it to raise an exception. Created to enable `None' as
+# a valid fallback value.
+_UNSET = object()
+
+
+class Interpolation(object):
+ """Dummy interpolation that passes the value through with no changes."""
+
+ def before_get(self, parser, section, option, value, defaults):
+ return value
+
+ def before_set(self, parser, section, option, value):
+ return value
+
+ def before_read(self, parser, section, option, value):
+ return value
+
+ def before_write(self, parser, section, option, value):
+ return value
+
+
+class BasicInterpolation(Interpolation):
+ """Interpolation as implemented in the classic ConfigParser.
+
+ The option values can contain format strings which refer to other values in
+ the same section, or values in the special default section.
+
+ For example:
+
+ something: %(dir)s/whatever
+
+ would resolve the "%(dir)s" to the value of dir. All reference
+ expansions are done late, on demand. If a user needs to use a bare % in
+ a configuration file, she can escape it by writing %%. Other % usage
+ is considered a user error and raises `InterpolationSyntaxError'."""
+
+ _KEYCRE = re.compile(r"%\(([^)]+)\)s")
+
+ def before_get(self, parser, section, option, value, defaults):
+ L = []
+ self._interpolate_some(parser, option, L, value, section, defaults, 1)
+ return ''.join(L)
+
+ def before_set(self, parser, section, option, value):
+ tmp_value = value.replace('%%', '') # escaped percent signs
+ tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax
+ if '%' in tmp_value:
+ raise ValueError(
+ "invalid interpolation syntax in %r at "
+ "position %d" % (value, tmp_value.find('%'))
+ )
+ return value
+
+ def _interpolate_some(self, parser, option, accum, rest, section, map, depth):
+ rawval = parser.get(section, option, raw=True, fallback=rest)
+ if depth > MAX_INTERPOLATION_DEPTH:
+ raise InterpolationDepthError(option, section, rawval)
+ while rest:
+ p = rest.find("%")
+ if p < 0:
+ accum.append(rest)
+ return
+ if p > 0:
+ accum.append(rest[:p])
+ rest = rest[p:]
+ # p is no longer used
+ c = rest[1:2]
+ if c == "%":
+ accum.append("%")
+ rest = rest[2:]
+ elif c == "(":
+ m = self._KEYCRE.match(rest)
+ if m is None:
+ raise InterpolationSyntaxError(
+ option,
+ section,
+ "bad interpolation variable reference %r" % rest,
+ )
+ var = parser.optionxform(m.group(1))
+ rest = rest[m.end() :]
+ try:
+ v = map[var]
+ except KeyError:
+ raise from_none(
+ InterpolationMissingOptionError(option, section, rawval, var)
+ )
+ if "%" in v:
+ self._interpolate_some(
+ parser, option, accum, v, section, map, depth + 1
+ )
+ else:
+ accum.append(v)
+ else:
+ raise InterpolationSyntaxError(
+ option,
+ section,
+ "'%%' must be followed by '%%' or '(', " "found: %r" % (rest,),
+ )
+
+
+class ExtendedInterpolation(Interpolation):
+ """Advanced variant of interpolation, supports the syntax used by
+ `zc.buildout'. Enables interpolation between sections."""
+
+ _KEYCRE = re.compile(r"\$\{([^}]+)\}")
+
+ def before_get(self, parser, section, option, value, defaults):
+ L = []
+ self._interpolate_some(parser, option, L, value, section, defaults, 1)
+ return ''.join(L)
+
+ def before_set(self, parser, section, option, value):
+ tmp_value = value.replace('$$', '') # escaped dollar signs
+ tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax
+ if '$' in tmp_value:
+ raise ValueError(
+ "invalid interpolation syntax in %r at "
+ "position %d" % (value, tmp_value.find('$'))
+ )
+ return value
+
+ def _interpolate_some(self, parser, option, accum, rest, section, map, depth):
+ rawval = parser.get(section, option, raw=True, fallback=rest)
+ if depth > MAX_INTERPOLATION_DEPTH:
+ raise InterpolationDepthError(option, section, rawval)
+ while rest:
+ p = rest.find("$")
+ if p < 0:
+ accum.append(rest)
+ return
+ if p > 0:
+ accum.append(rest[:p])
+ rest = rest[p:]
+ # p is no longer used
+ c = rest[1:2]
+ if c == "$":
+ accum.append("$")
+ rest = rest[2:]
+ elif c == "{":
+ m = self._KEYCRE.match(rest)
+ if m is None:
+ raise InterpolationSyntaxError(
+ option,
+ section,
+ "bad interpolation variable reference %r" % rest,
+ )
+ path = m.group(1).split(':')
+ rest = rest[m.end() :]
+ sect = section
+ opt = option
+ try:
+ if len(path) == 1:
+ opt = parser.optionxform(path[0])
+ v = map[opt]
+ elif len(path) == 2:
+ sect = path[0]
+ opt = parser.optionxform(path[1])
+ v = parser.get(sect, opt, raw=True)
+ else:
+ raise InterpolationSyntaxError(
+ option, section, "More than one ':' found: %r" % (rest,)
+ )
+ except (KeyError, NoSectionError, NoOptionError):
+ raise from_none(
+ InterpolationMissingOptionError(
+ option, section, rawval, ":".join(path)
+ )
+ )
+ if "$" in v:
+ self._interpolate_some(
+ parser,
+ opt,
+ accum,
+ v,
+ sect,
+ dict(parser.items(sect, raw=True)),
+ depth + 1,
+ )
+ else:
+ accum.append(v)
+ else:
+ raise InterpolationSyntaxError(
+ option,
+ section,
+ "'$' must be followed by '$' or '{', " "found: %r" % (rest,),
+ )
+
+
+class LegacyInterpolation(Interpolation):
+ """Deprecated interpolation used in old versions of ConfigParser.
+ Use BasicInterpolation or ExtendedInterpolation instead."""
+
+ _KEYCRE = re.compile(r"%\(([^)]*)\)s|.")
+
+ def before_get(self, parser, section, option, value, vars):
+ rawval = value
+ depth = MAX_INTERPOLATION_DEPTH
+ while depth: # Loop through this until it's done
+ depth -= 1
+ if value and "%(" in value:
+ replace = functools.partial(self._interpolation_replace, parser=parser)
+ value = self._KEYCRE.sub(replace, value)
+ try:
+ value = value % vars
+ except KeyError as e:
+ raise from_none(
+ InterpolationMissingOptionError(
+ option, section, rawval, e.args[0]
+ )
+ )
+ else:
+ break
+ if value and "%(" in value:
+ raise InterpolationDepthError(option, section, rawval)
+ return value
+
+ def before_set(self, parser, section, option, value):
+ return value
+
+ @staticmethod
+ def _interpolation_replace(match, parser):
+ s = match.group(1)
+ if s is None:
+ return match.group()
+ else:
+ return "%%(%s)s" % parser.optionxform(s)
+
+
+class RawConfigParser(MutableMapping):
+ """ConfigParser that does not do interpolation."""
+
+ # Regular expressions for parsing section headers and options
+ _SECT_TMPL = r"""
+ \[ # [
+ (?P<header>[^]]+) # very permissive!
+ \] # ]
+ """
+ _OPT_TMPL = r"""
+ (?P<option>.*?) # very permissive!
+ \s*(?P<vi>{delim})\s* # any number of space/tab,
+ # followed by any of the
+ # allowed delimiters,
+ # followed by any space/tab
+ (?P<value>.*)$ # everything up to eol
+ """
+ _OPT_NV_TMPL = r"""
+ (?P<option>.*?) # very permissive!
+ \s*(?: # any number of space/tab,
+ (?P<vi>{delim})\s* # optionally followed by
+ # any of the allowed
+ # delimiters, followed by any
+ # space/tab
+ (?P<value>.*))?$ # everything up to eol
+ """
+ # Interpolation algorithm to be used if the user does not specify another
+ _DEFAULT_INTERPOLATION = Interpolation()
+ # Compiled regular expression for matching sections
+ SECTCRE = re.compile(_SECT_TMPL, re.VERBOSE)
+ # Compiled regular expression for matching options with typical separators
+ OPTCRE = re.compile(_OPT_TMPL.format(delim="=|:"), re.VERBOSE)
+ # Compiled regular expression for matching options with optional values
+ # delimited using typical separators
+ OPTCRE_NV = re.compile(_OPT_NV_TMPL.format(delim="=|:"), re.VERBOSE)
+ # Compiled regular expression for matching leading whitespace in a line
+ NONSPACECRE = re.compile(r"\S")
+ # Possible boolean values in the configuration.
+ BOOLEAN_STATES = {
+ '1': True,
+ 'yes': True,
+ 'true': True,
+ 'on': True,
+ '0': False,
+ 'no': False,
+ 'false': False,
+ 'off': False,
+ }
+
+ def __init__(
+ self, defaults=None, dict_type=_default_dict, allow_no_value=False, **kwargs
+ ):
+
+ # keyword-only arguments
+ delimiters = kwargs.get('delimiters', ('=', ':'))
+ comment_prefixes = kwargs.get('comment_prefixes', ('#', ';'))
+ inline_comment_prefixes = kwargs.get('inline_comment_prefixes', None)
+ strict = kwargs.get('strict', True)
+ empty_lines_in_values = kwargs.get('empty_lines_in_values', True)
+ default_section = kwargs.get('default_section', DEFAULTSECT)
+ interpolation = kwargs.get('interpolation', _UNSET)
+ converters = kwargs.get('converters', _UNSET)
+
+ self._dict = dict_type
+ self._sections = self._dict()
+ self._defaults = self._dict()
+ self._converters = ConverterMapping(self)
+ self._proxies = self._dict()
+ self._proxies[default_section] = SectionProxy(self, default_section)
+ self._delimiters = tuple(delimiters)
+ if delimiters == ('=', ':'):
+ self._optcre = self.OPTCRE_NV if allow_no_value else self.OPTCRE
+ else:
+ d = "|".join(re.escape(d) for d in delimiters)
+ if allow_no_value:
+ self._optcre = re.compile(self._OPT_NV_TMPL.format(delim=d), re.VERBOSE)
+ else:
+ self._optcre = re.compile(self._OPT_TMPL.format(delim=d), re.VERBOSE)
+ self._comment_prefixes = tuple(comment_prefixes or ())
+ self._inline_comment_prefixes = tuple(inline_comment_prefixes or ())
+ self._strict = strict
+ self._allow_no_value = allow_no_value
+ self._empty_lines_in_values = empty_lines_in_values
+ self.default_section = default_section
+ self._interpolation = interpolation
+ if self._interpolation is _UNSET:
+ self._interpolation = self._DEFAULT_INTERPOLATION
+ if self._interpolation is None:
+ self._interpolation = Interpolation()
+ if converters is not _UNSET:
+ self._converters.update(converters)
+ if defaults:
+ self._read_defaults(defaults)
+
+ def defaults(self):
+ return self._defaults
+
+ def sections(self):
+ """Return a list of section names, excluding [DEFAULT]"""
+ # self._sections will never have [DEFAULT] in it
+ return list(self._sections.keys())
+
+ def add_section(self, section):
+ """Create a new section in the configuration.
+
+ Raise DuplicateSectionError if a section by the specified name
+ already exists. Raise ValueError if name is DEFAULT.
+ """
+ if section == self.default_section:
+ raise ValueError('Invalid section name: %r' % section)
+
+ if section in self._sections:
+ raise DuplicateSectionError(section)
+ self._sections[section] = self._dict()
+ self._proxies[section] = SectionProxy(self, section)
+
+ def has_section(self, section):
+ """Indicate whether the named section is present in the configuration.
+
+ The DEFAULT section is not acknowledged.
+ """
+ return section in self._sections
+
+ def options(self, section):
+ """Return a list of option names for the given section name."""
+ try:
+ opts = self._sections[section].copy()
+ except KeyError:
+ raise from_none(NoSectionError(section))
+ opts.update(self._defaults)
+ return list(opts.keys())
+
+ def read(self, filenames, encoding=None):
+ """Read and parse a filename or an iterable of filenames.
+
+ Files that cannot be opened are silently ignored; this is
+ designed so that you can specify an iterable of potential
+ configuration file locations (e.g. current directory, user's
+ home directory, systemwide directory), and all existing
+ configuration files in the iterable will be read. A single
+ filename may also be given.
+
+ Return list of successfully read files.
+ """
+ if isinstance(filenames, (str, bytes, PathLike)):
+ filenames = [filenames]
+ read_ok = []
+ for filename in filenames:
+ if isinstance(filename, PathLike):
+ filename = fspath(filename)
+ try:
+ with open(filename, encoding=encoding) as fp:
+ self._read(fp, filename)
+ except IOError:
+ continue
+ read_ok.append(filename)
+ return read_ok
+
+ def read_file(self, f, source=None):
+ """Like read() but the argument must be a file-like object.
+
+ The `f' argument must be iterable, returning one line at a time.
+ Optional second argument is the `source' specifying the name of the
+ file being read. If not given, it is taken from f.name. If `f' has no
+ `name' attribute, `<???>' is used.
+ """
+ if source is None:
+ try:
+ source = f.name
+ except AttributeError:
+ source = '<???>'
+ self._read(f, source)
+
+ def read_string(self, string, source='<string>'):
+ """Read configuration from a given string."""
+ sfile = io.StringIO(string)
+ self.read_file(sfile, source)
+
+ def read_dict(self, dictionary, source='<dict>'):
+ """Read configuration from a dictionary.
+
+ Keys are section names, values are dictionaries with keys and values
+ that should be present in the section. If the used dictionary type
+ preserves order, sections and their keys will be added in order.
+
+ All types held in the dictionary are converted to strings during
+ reading, including section names, option names and keys.
+
+ Optional second argument is the `source' specifying the name of the
+ dictionary being read.
+ """
+ elements_added = set()
+ for section, keys in dictionary.items():
+ section = str(section)
+ try:
+ self.add_section(section)
+ except (DuplicateSectionError, ValueError):
+ if self._strict and section in elements_added:
+ raise
+ elements_added.add(section)
+ for key, value in keys.items():
+ key = self.optionxform(str(key))
+ if value is not None:
+ value = str(value)
+ if self._strict and (section, key) in elements_added:
+ raise DuplicateOptionError(section, key, source)
+ elements_added.add((section, key))
+ self.set(section, key, value)
+
+ def readfp(self, fp, filename=None):
+ """Deprecated, use read_file instead."""
+ warnings.warn(
+ "This method will be removed in future versions. "
+ "Use 'parser.read_file()' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.read_file(fp, source=filename)
+
+ def get(self, section, option, **kwargs):
+ """Get an option value for a given section.
+
+ If `vars' is provided, it must be a dictionary. The option is looked up
+ in `vars' (if provided), `section', and in `DEFAULTSECT' in that order.
+ If the key is not found and `fallback' is provided, it is used as
+ a fallback value. `None' can be provided as a `fallback' value.
+
+ If interpolation is enabled and the optional argument `raw' is False,
+ all interpolations are expanded in the return values.
+
+ Arguments `raw', `vars', and `fallback' are keyword only.
+
+ The section DEFAULT is special.
+ """
+ # keyword-only arguments
+ raw = kwargs.get('raw', False)
+ vars = kwargs.get('vars', None)
+ fallback = kwargs.get('fallback', _UNSET)
+
+ try:
+ d = self._unify_values(section, vars)
+ except NoSectionError:
+ if fallback is _UNSET:
+ raise
+ else:
+ return fallback
+ option = self.optionxform(option)
+ try:
+ value = d[option]
+ except KeyError:
+ if fallback is _UNSET:
+ raise NoOptionError(option, section)
+ else:
+ return fallback
+
+ if raw or value is None:
+ return value
+ else:
+ return self._interpolation.before_get(self, section, option, value, d)
+
+ def _get(self, section, conv, option, **kwargs):
+ return conv(self.get(section, option, **kwargs))
+
+ def _get_conv(self, section, option, conv, **kwargs):
+ # keyword-only arguments
+ kwargs.setdefault('raw', False)
+ kwargs.setdefault('vars', None)
+ fallback = kwargs.pop('fallback', _UNSET)
+ try:
+ return self._get(section, conv, option, **kwargs)
+ except (NoSectionError, NoOptionError):
+ if fallback is _UNSET:
+ raise
+ return fallback
+
+ # getint, getfloat and getboolean provided directly for backwards compat
+ def getint(self, section, option, **kwargs):
+ # keyword-only arguments
+ kwargs.setdefault('raw', False)
+ kwargs.setdefault('vars', None)
+ kwargs.setdefault('fallback', _UNSET)
+ return self._get_conv(section, option, int, **kwargs)
+
+ def getfloat(self, section, option, **kwargs):
+ # keyword-only arguments
+ kwargs.setdefault('raw', False)
+ kwargs.setdefault('vars', None)
+ kwargs.setdefault('fallback', _UNSET)
+ return self._get_conv(section, option, float, **kwargs)
+
+ def getboolean(self, section, option, **kwargs):
+ # keyword-only arguments
+ kwargs.setdefault('raw', False)
+ kwargs.setdefault('vars', None)
+ kwargs.setdefault('fallback', _UNSET)
+ return self._get_conv(section, option, self._convert_to_boolean, **kwargs)
+
+ def items(self, section=_UNSET, raw=False, vars=None):
+ """Return a list of (name, value) tuples for each option in a section.
+
+ All % interpolations are expanded in the return values, based on the
+ defaults passed into the constructor, unless the optional argument
+ `raw' is true. Additional substitutions may be provided using the
+ `vars' argument, which must be a dictionary whose contents overrides
+ any pre-existing defaults.
+
+ The section DEFAULT is special.
+ """
+ if section is _UNSET:
+ return super(RawConfigParser, self).items()
+ d = self._defaults.copy()
+ try:
+ d.update(self._sections[section])
+ except KeyError:
+ if section != self.default_section:
+ raise NoSectionError(section)
+ orig_keys = list(d.keys())
+ # Update with the entry specific variables
+ if vars:
+ for key, value in vars.items():
+ d[self.optionxform(key)] = value
+ value_getter = lambda option: self._interpolation.before_get(
+ self, section, option, d[option], d
+ )
+ if raw:
+ value_getter = lambda option: d[option]
+ return [(option, value_getter(option)) for option in orig_keys]
+
+ def popitem(self):
+ """Remove a section from the parser and return it as
+ a (section_name, section_proxy) tuple. If no section is present, raise
+ KeyError.
+
+ The section DEFAULT is never returned because it cannot be removed.
+ """
+ for key in self.sections():
+ value = self[key]
+ del self[key]
+ return key, value
+ raise KeyError
+
+ def optionxform(self, optionstr):
+ return optionstr.lower()
+
+ def has_option(self, section, option):
+ """Check for the existence of a given option in a given section.
+ If the specified `section' is None or an empty string, DEFAULT is
+ assumed. If the specified `section' does not exist, returns False."""
+ if not section or section == self.default_section:
+ option = self.optionxform(option)
+ return option in self._defaults
+ elif section not in self._sections:
+ return False
+ else:
+ option = self.optionxform(option)
+ return option in self._sections[section] or option in self._defaults
+
+ def set(self, section, option, value=None):
+ """Set an option."""
+ if value:
+ value = self._interpolation.before_set(self, section, option, value)
+ if not section or section == self.default_section:
+ sectdict = self._defaults
+ else:
+ try:
+ sectdict = self._sections[section]
+ except KeyError:
+ raise from_none(NoSectionError(section))
+ sectdict[self.optionxform(option)] = value
+
+ def write(self, fp, space_around_delimiters=True):
+ """Write an .ini-format representation of the configuration state.
+
+ If `space_around_delimiters' is True (the default), delimiters
+ between keys and values are surrounded by spaces.
+ """
+ if space_around_delimiters:
+ d = " {0} ".format(self._delimiters[0])
+ else:
+ d = self._delimiters[0]
+ if self._defaults:
+ self._write_section(fp, self.default_section, self._defaults.items(), d)
+ for section in self._sections:
+ self._write_section(fp, section, self._sections[section].items(), d)
+
+ def _write_section(self, fp, section_name, section_items, delimiter):
+ """Write a single section to the specified `fp'."""
+ fp.write("[{0}]\n".format(section_name))
+ for key, value in section_items:
+ value = self._interpolation.before_write(self, section_name, key, value)
+ if value is not None or not self._allow_no_value:
+ value = delimiter + str(value).replace('\n', '\n\t')
+ else:
+ value = ""
+ fp.write("{0}{1}\n".format(key, value))
+ fp.write("\n")
+
+ def remove_option(self, section, option):
+ """Remove an option."""
+ if not section or section == self.default_section:
+ sectdict = self._defaults
+ else:
+ try:
+ sectdict = self._sections[section]
+ except KeyError:
+ raise from_none(NoSectionError(section))
+ option = self.optionxform(option)
+ existed = option in sectdict
+ if existed:
+ del sectdict[option]
+ return existed
+
+ def remove_section(self, section):
+ """Remove a file section."""
+ existed = section in self._sections
+ if existed:
+ del self._sections[section]
+ del self._proxies[section]
+ return existed
+
+ def __getitem__(self, key):
+ if key != self.default_section and not self.has_section(key):
+ raise KeyError(key)
+ return self._proxies[key]
+
+ def __setitem__(self, key, value):
+ # To conform with the mapping protocol, overwrites existing values in
+ # the section.
+ if key in self and self[key] is value:
+ return
+ # XXX this is not atomic if read_dict fails at any point. Then again,
+ # no update method in configparser is atomic in this implementation.
+ if key == self.default_section:
+ self._defaults.clear()
+ elif key in self._sections:
+ self._sections[key].clear()
+ self.read_dict({key: value})
+
+ def __delitem__(self, key):
+ if key == self.default_section:
+ raise ValueError("Cannot remove the default section.")
+ if not self.has_section(key):
+ raise KeyError(key)
+ self.remove_section(key)
+
+ def __contains__(self, key):
+ return key == self.default_section or self.has_section(key)
+
+ def __len__(self):
+ return len(self._sections) + 1 # the default section
+
+ def __iter__(self):
+ # XXX does it break when underlying container state changed?
+ return itertools.chain((self.default_section,), self._sections.keys())
+
+ def _read(self, fp, fpname):
+ """Parse a sectioned configuration file.
+
+ Each section in a configuration file contains a header, indicated by
+ a name in square brackets (`[]'), plus key/value options, indicated by
+ `name' and `value' delimited with a specific substring (`=' or `:' by
+ default).
+
+ Values can span multiple lines, as long as they are indented deeper
+ than the first line of the value. Depending on the parser's mode, blank
+ lines may be treated as parts of multiline values or ignored.
+
+ Configuration files may include comments, prefixed by specific
+ characters (`#' and `;' by default). Comments may appear on their own
+ in an otherwise empty line or may be entered in lines holding values or
+ section names.
+ """
+ elements_added = set()
+ cursect = None # None, or a dictionary
+ sectname = None
+ optname = None
+ lineno = 0
+ indent_level = 0
+ e = None # None, or an exception
+ for lineno, line in enumerate(fp, start=1):
+ comment_start = sys.maxsize
+ # strip inline comments
+ inline_prefixes = dict((p, -1) for p in self._inline_comment_prefixes)
+ while comment_start == sys.maxsize and inline_prefixes:
+ next_prefixes = {}
+ for prefix, index in inline_prefixes.items():
+ index = line.find(prefix, index + 1)
+ if index == -1:
+ continue
+ next_prefixes[prefix] = index
+ if index == 0 or (index > 0 and line[index - 1].isspace()):
+ comment_start = min(comment_start, index)
+ inline_prefixes = next_prefixes
+ # strip full line comments
+ for prefix in self._comment_prefixes:
+ if line.strip().startswith(prefix):
+ comment_start = 0
+ break
+ if comment_start == sys.maxsize:
+ comment_start = None
+ value = line[:comment_start].strip()
+ if not value:
+ if self._empty_lines_in_values:
+ # add empty line to the value, but only if there was no
+ # comment on the line
+ if (
+ comment_start is None
+ and cursect is not None
+ and optname
+ and cursect[optname] is not None
+ ):
+ cursect[optname].append('') # newlines added at join
+ else:
+ # empty line marks end of value
+ indent_level = sys.maxsize
+ continue
+ # continuation line?
+ first_nonspace = self.NONSPACECRE.search(line)
+ cur_indent_level = first_nonspace.start() if first_nonspace else 0
+ if cursect is not None and optname and cur_indent_level > indent_level:
+ cursect[optname].append(value)
+ # a section header or option header?
+ else:
+ indent_level = cur_indent_level
+ # is it a section header?
+ mo = self.SECTCRE.match(value)
+ if mo:
+ sectname = mo.group('header')
+ if sectname in self._sections:
+ if self._strict and sectname in elements_added:
+ raise DuplicateSectionError(sectname, fpname, lineno)
+ cursect = self._sections[sectname]
+ elements_added.add(sectname)
+ elif sectname == self.default_section:
+ cursect = self._defaults
+ else:
+ cursect = self._dict()
+ self._sections[sectname] = cursect
+ self._proxies[sectname] = SectionProxy(self, sectname)
+ elements_added.add(sectname)
+ # So sections can't start with a continuation line
+ optname = None
+ # no section header in the file?
+ elif cursect is None:
+ raise MissingSectionHeaderError(fpname, lineno, line)
+ # an option line?
+ else:
+ mo = self._optcre.match(value)
+ if mo:
+ optname, vi, optval = mo.group('option', 'vi', 'value')
+ if not optname:
+ e = self._handle_error(e, fpname, lineno, line)
+ optname = self.optionxform(optname.rstrip())
+ if self._strict and (sectname, optname) in elements_added:
+ raise DuplicateOptionError(
+ sectname, optname, fpname, lineno
+ )
+ elements_added.add((sectname, optname))
+ # This check is fine because the OPTCRE cannot
+ # match if it would set optval to None
+ if optval is not None:
+ optval = optval.strip()
+ cursect[optname] = [optval]
+ else:
+ # valueless option handling
+ cursect[optname] = None
+ else:
+ # a non-fatal parsing error occurred. set up the
+ # exception but keep going. the exception will be
+ # raised at the end of the file and will contain a
+ # list of all bogus lines
+ e = self._handle_error(e, fpname, lineno, line)
+ self._join_multiline_values()
+ # if any parsing errors occurred, raise an exception
+ if e:
+ raise e
+
+ def _join_multiline_values(self):
+ defaults = self.default_section, self._defaults
+ all_sections = itertools.chain((defaults,), self._sections.items())
+ for section, options in all_sections:
+ for name, val in options.items():
+ if isinstance(val, list):
+ val = '\n'.join(val).rstrip()
+ options[name] = self._interpolation.before_read(
+ self, section, name, val
+ )
+
+ def _read_defaults(self, defaults):
+ """Read the defaults passed in the initializer.
+ Note: values can be non-string."""
+ for key, value in defaults.items():
+ self._defaults[self.optionxform(key)] = value
+
+ def _handle_error(self, exc, fpname, lineno, line):
+ if not exc:
+ exc = ParsingError(fpname)
+ exc.append(lineno, repr(line))
+ return exc
+
+ def _unify_values(self, section, vars):
+ """Create a sequence of lookups with 'vars' taking priority over
+ the 'section' which takes priority over the DEFAULTSECT.
+
+ """
+ sectiondict = {}
+ try:
+ sectiondict = self._sections[section]
+ except KeyError:
+ if section != self.default_section:
+ raise NoSectionError(section)
+ # Update with the entry specific variables
+ vardict = {}
+ if vars:
+ for key, value in vars.items():
+ if value is not None:
+ value = str(value)
+ vardict[self.optionxform(key)] = value
+ return _ChainMap(vardict, sectiondict, self._defaults)
+
+ def _convert_to_boolean(self, value):
+ """Return a boolean value translating from other types if necessary.
+ """
+ if value.lower() not in self.BOOLEAN_STATES:
+ raise ValueError('Not a boolean: %s' % value)
+ return self.BOOLEAN_STATES[value.lower()]
+
+ def _validate_value_types(self, **kwargs):
+ """Raises a TypeError for non-string values.
+
+ The only legal non-string value if we allow valueless
+ options is None, so we need to check if the value is a
+ string if:
+ - we do not allow valueless options, or
+ - we allow valueless options but the value is not None
+
+ For compatibility reasons this method is not used in classic set()
+ for RawConfigParsers. It is invoked in every case for mapping protocol
+ access and in ConfigParser.set().
+ """
+ # keyword-only arguments
+ section = kwargs.get('section', "")
+ option = kwargs.get('option', "")
+ value = kwargs.get('value', "")
+
+ if PY2 and bytes in (type(section), type(option), type(value)):
+ # we allow for a little unholy magic for Python 2 so that
+ # people not using unicode_literals can still use the library
+ # conveniently
+ warnings.warn(
+ "You passed a bytestring. Implicitly decoding as UTF-8 string."
+ " This will not work on Python 3. Please switch to using"
+ " Unicode strings across the board.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ if isinstance(section, bytes):
+ section = section.decode('utf8')
+ if isinstance(option, bytes):
+ option = option.decode('utf8')
+ if isinstance(value, bytes):
+ value = value.decode('utf8')
+
+ if not isinstance(section, str):
+ raise TypeError("section names must be strings")
+ if not isinstance(option, str):
+ raise TypeError("option keys must be strings")
+ if not self._allow_no_value or value:
+ if not isinstance(value, str):
+ raise TypeError("option values must be strings")
+
+ return section, option, value
+
+ @property
+ def converters(self):
+ return self._converters
+
+
+class ConfigParser(RawConfigParser):
+ """ConfigParser implementing interpolation."""
+
+ _DEFAULT_INTERPOLATION = BasicInterpolation()
+
+ def set(self, section, option, value=None):
+ """Set an option. Extends RawConfigParser.set by validating type and
+ interpolation syntax on the value."""
+ _, option, value = self._validate_value_types(option=option, value=value)
+ super(ConfigParser, self).set(section, option, value)
+
+ def add_section(self, section):
+ """Create a new section in the configuration. Extends
+ RawConfigParser.add_section by validating if the section name is
+ a string."""
+ section, _, _ = self._validate_value_types(section=section)
+ super(ConfigParser, self).add_section(section)
+
+ def _read_defaults(self, defaults):
+ """Reads the defaults passed in the initializer, implicitly converting
+ values to strings like the rest of the API.
+
+ Does not perform interpolation for backwards compatibility.
+ """
+ try:
+ hold_interpolation = self._interpolation
+ self._interpolation = Interpolation()
+ self.read_dict({self.default_section: defaults})
+ finally:
+ self._interpolation = hold_interpolation
+
+
+class SafeConfigParser(ConfigParser):
+ """ConfigParser alias for backwards compatibility purposes."""
+
+ def __init__(self, *args, **kwargs):
+ super(SafeConfigParser, self).__init__(*args, **kwargs)
+ warnings.warn(
+ "The SafeConfigParser class has been renamed to ConfigParser "
+ "in Python 3.2. This alias will be removed in future versions."
+ " Use ConfigParser directly instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+
+class SectionProxy(MutableMapping):
+ """A proxy for a single section from a parser."""
+
+ def __init__(self, parser, name):
+ """Creates a view on a section of the specified `name` in `parser`."""
+ self._parser = parser
+ self._name = name
+ for conv in parser.converters:
+ key = 'get' + conv
+ getter = functools.partial(self.get, _impl=getattr(parser, key))
+ setattr(self, key, getter)
+
+ def __repr__(self):
+ return '<Section: {0}>'.format(self._name)
+
+ def __getitem__(self, key):
+ if not self._parser.has_option(self._name, key):
+ raise KeyError(key)
+ return self._parser.get(self._name, key)
+
+ def __setitem__(self, key, value):
+ _, key, value = self._parser._validate_value_types(option=key, value=value)
+ return self._parser.set(self._name, key, value)
+
+ def __delitem__(self, key):
+ if not (
+ self._parser.has_option(self._name, key)
+ and self._parser.remove_option(self._name, key)
+ ):
+ raise KeyError(key)
+
+ def __contains__(self, key):
+ return self._parser.has_option(self._name, key)
+
+ def __len__(self):
+ return len(self._options())
+
+ def __iter__(self):
+ return self._options().__iter__()
+
+ def _options(self):
+ if self._name != self._parser.default_section:
+ return self._parser.options(self._name)
+ else:
+ return self._parser.defaults()
+
+ @property
+ def parser(self):
+ # The parser object of the proxy is read-only.
+ return self._parser
+
+ @property
+ def name(self):
+ # The name of the section on a proxy is read-only.
+ return self._name
+
+ def get(self, option, fallback=None, **kwargs):
+ """Get an option value.
+
+ Unless `fallback` is provided, `None` will be returned if the option
+ is not found.
+
+ """
+ # keyword-only arguments
+ kwargs.setdefault('raw', False)
+ kwargs.setdefault('vars', None)
+ _impl = kwargs.pop('_impl', None)
+ # If `_impl` is provided, it should be a getter method on the parser
+ # object that provides the desired type conversion.
+ if not _impl:
+ _impl = self._parser.get
+ return _impl(self._name, option, fallback=fallback, **kwargs)
+
+
+class ConverterMapping(MutableMapping):
+ """Enables reuse of get*() methods between the parser and section proxies.
+
+ If a parser class implements a getter directly, the value for the given
+ key will be ``None``. The presence of the converter name here enables
+ section proxies to find and use the implementation on the parser class.
+ """
+
+ GETTERCRE = re.compile(r"^get(?P<name>.+)$")
+
+ def __init__(self, parser):
+ self._parser = parser
+ self._data = {}
+ for getter in dir(self._parser):
+ m = self.GETTERCRE.match(getter)
+ if not m or not callable(getattr(self._parser, getter)):
+ continue
+ self._data[m.group('name')] = None # See class docstring.
+
+ def __getitem__(self, key):
+ return self._data[key]
+
+ def __setitem__(self, key, value):
+ try:
+ k = 'get' + key
+ except TypeError:
+ raise ValueError(
+ 'Incompatible key: {} (type: {})' ''.format(key, type(key))
+ )
+ if k == 'get':
+ raise ValueError('Incompatible key: cannot use "" as a name')
+ self._data[key] = value
+ func = functools.partial(self._parser._get_conv, conv=value)
+ func.converter = value
+ setattr(self._parser, k, func)
+ for proxy in self._parser.values():
+ getter = functools.partial(proxy.get, _impl=func)
+ setattr(proxy, k, getter)
+
+ def __delitem__(self, key):
+ try:
+ k = 'get' + (key or None)
+ except TypeError:
+ raise KeyError(key)
+ del self._data[key]
+ for inst in itertools.chain((self._parser,), self._parser.values()):
+ try:
+ delattr(inst, k)
+ except AttributeError:
+ # don't raise since the entry was present in _data, silently
+ # clean up
+ continue
+
+ def __iter__(self):
+ return iter(self._data)
+
+ def __len__(self):
+ return len(self._data)
diff --git a/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/configparser/helpers.py b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/configparser/helpers.py
new file mode 100644
index 0000000000..e7eb72243f
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/configparser/helpers.py
@@ -0,0 +1,274 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import abc
+import os
+
+try:
+ from collections.abc import MutableMapping
+except ImportError:
+ from collections import MutableMapping
+
+try:
+ from collections import UserDict
+except ImportError:
+ from UserDict import UserDict
+
+try:
+ from collections import OrderedDict
+except ImportError:
+ from ordereddict import OrderedDict
+
+try:
+ import pathlib
+except ImportError:
+ pathlib = None
+
+from io import open
+import sys
+
+try:
+ from thread import get_ident
+except ImportError:
+ try:
+ from _thread import get_ident
+ except ImportError:
+ from _dummy_thread import get_ident
+
+
+__all__ = ['UserDict', 'OrderedDict', 'open']
+
+
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+
+native_str = str
+str = type('str')
+
+
+def from_none(exc):
+ """raise from_none(ValueError('a')) == raise ValueError('a') from None"""
+ exc.__cause__ = None
+ exc.__suppress_context__ = True
+ return exc
+
+
+# from reprlib 3.2.1
+def recursive_repr(fillvalue='...'):
+ 'Decorator to make a repr function return fillvalue for a recursive call'
+
+ def decorating_function(user_function):
+ repr_running = set()
+
+ def wrapper(self):
+ key = id(self), get_ident()
+ if key in repr_running:
+ return fillvalue
+ repr_running.add(key)
+ try:
+ result = user_function(self)
+ finally:
+ repr_running.discard(key)
+ return result
+
+ # Can't use functools.wraps() here because of bootstrap issues
+ wrapper.__module__ = getattr(user_function, '__module__')
+ wrapper.__doc__ = getattr(user_function, '__doc__')
+ wrapper.__name__ = getattr(user_function, '__name__')
+ wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
+ return wrapper
+
+ return decorating_function
+
+
+# from collections 3.2.1
+class _ChainMap(MutableMapping):
+ ''' A ChainMap groups multiple dicts (or other mappings) together
+ to create a single, updateable view.
+
+ The underlying mappings are stored in a list. That list is public and can
+ accessed or updated using the *maps* attribute. There is no other state.
+
+ Lookups search the underlying mappings successively until a key is found.
+ In contrast, writes, updates, and deletions only operate on the first
+ mapping.
+
+ '''
+
+ def __init__(self, *maps):
+ '''Initialize a ChainMap by setting *maps* to the given mappings.
+ If no mappings are provided, a single empty dictionary is used.
+
+ '''
+ self.maps = list(maps) or [{}] # always at least one map
+
+ def __missing__(self, key):
+ raise KeyError(key)
+
+ def __getitem__(self, key):
+ for mapping in self.maps:
+ try:
+ # can't use 'key in mapping' with defaultdict
+ return mapping[key]
+ except KeyError:
+ pass
+ # support subclasses that define __missing__
+ return self.__missing__(key)
+
+ def get(self, key, default=None):
+ return self[key] if key in self else default
+
+ def __len__(self):
+ # reuses stored hash values if possible
+ return len(set().union(*self.maps))
+
+ def __iter__(self):
+ return iter(set().union(*self.maps))
+
+ def __contains__(self, key):
+ return any(key in m for m in self.maps)
+
+ @recursive_repr()
+ def __repr__(self):
+ return '{0.__class__.__name__}({1})'.format(
+ self, ', '.join(map(repr, self.maps))
+ )
+
+ @classmethod
+ def fromkeys(cls, iterable, *args):
+ 'Create a ChainMap with a single dict created from the iterable.'
+ return cls(dict.fromkeys(iterable, *args))
+
+ def copy(self):
+ """
+ New ChainMap or subclass with a new copy of
+ maps[0] and refs to maps[1:]
+ """
+ return self.__class__(self.maps[0].copy(), *self.maps[1:])
+
+ __copy__ = copy
+
+ def new_child(self): # like Django's Context.push()
+ 'New ChainMap with a new dict followed by all previous maps.'
+ return self.__class__({}, *self.maps)
+
+ @property
+ def parents(self): # like Django's Context.pop()
+ 'New ChainMap from maps[1:].'
+ return self.__class__(*self.maps[1:])
+
+ def __setitem__(self, key, value):
+ self.maps[0][key] = value
+
+ def __delitem__(self, key):
+ try:
+ del self.maps[0][key]
+ except KeyError:
+ raise KeyError('Key not found in the first mapping: {!r}'.format(key))
+
+ def popitem(self):
+ """
+ Remove and return an item pair from maps[0].
+ Raise KeyError is maps[0] is empty.
+ """
+ try:
+ return self.maps[0].popitem()
+ except KeyError:
+ raise KeyError('No keys found in the first mapping.')
+
+ def pop(self, key, *args):
+ """
+ Remove *key* from maps[0] and return its value.
+ Raise KeyError if *key* not in maps[0].
+ """
+
+ try:
+ return self.maps[0].pop(key, *args)
+ except KeyError:
+ raise KeyError('Key not found in the first mapping: {!r}'.format(key))
+
+ def clear(self):
+ 'Clear maps[0], leaving maps[1:] intact.'
+ self.maps[0].clear()
+
+
+try:
+ from collections import ChainMap
+except ImportError:
+ ChainMap = _ChainMap
+
+
+_ABC = getattr(
+ abc,
+ 'ABC',
+ # Python 3.3 compatibility
+ abc.ABCMeta(native_str('__ABC'), (object,), dict(__metaclass__=abc.ABCMeta)),
+)
+
+
+class _PathLike(_ABC):
+
+ """Abstract base class for implementing the file system path protocol."""
+
+ @abc.abstractmethod
+ def __fspath__(self):
+ """Return the file system path representation of the object."""
+ raise NotImplementedError
+
+ @classmethod
+ def __subclasshook__(cls, subclass):
+ return bool(
+ hasattr(subclass, '__fspath__')
+ # workaround for Python 3.5
+ or pathlib
+ and issubclass(subclass, pathlib.Path)
+ )
+
+
+PathLike = getattr(os, 'PathLike', _PathLike)
+
+
+def _fspath(path):
+ """Return the path representation of a path-like object.
+
+ If str or bytes is passed in, it is returned unchanged. Otherwise the
+ os.PathLike interface is used to get the path representation. If the
+ path representation is not str or bytes, TypeError is raised. If the
+ provided path is not str, bytes, or os.PathLike, TypeError is raised.
+ """
+ if isinstance(path, (str, bytes)):
+ return path
+
+ if not hasattr(path, '__fspath__') and isinstance(path, pathlib.Path):
+ # workaround for Python 3.5
+ return str(path)
+
+ # Work from the object's type to match method resolution of other magic
+ # methods.
+ path_type = type(path)
+ try:
+ path_repr = path_type.__fspath__(path)
+ except AttributeError:
+
+ if hasattr(path_type, '__fspath__'):
+ raise
+ else:
+ raise TypeError(
+ "expected str, bytes or os.PathLike object, "
+ "not " + path_type.__name__
+ )
+ if isinstance(path_repr, (str, bytes)):
+ return path_repr
+ else:
+ raise TypeError(
+ "expected {}.__fspath__() to return str or bytes, "
+ "not {}".format(path_type.__name__, type(path_repr).__name__)
+ )
+
+
+fspath = getattr(os, 'fspath', _fspath)
diff --git a/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/LICENSE
new file mode 100644
index 0000000000..5e795a61f3
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/LICENSE
@@ -0,0 +1,7 @@
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/METADATA
new file mode 100644
index 0000000000..e805cc962c
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/METADATA
@@ -0,0 +1,259 @@
+Metadata-Version: 2.1
+Name: configparser
+Version: 4.0.2
+Summary: Updated configparser from Python 3.7 for Python 2.6+.
+Home-page: https://github.com/jaraco/configparser/
+Author: Łukasz Langa
+Author-email: lukasz@langa.pl
+Maintainer: Jason R. Coombs
+Maintainer-email: jaraco@jaraco.com
+License: UNKNOWN
+Keywords: configparser ini parsing conf cfg configuration file
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Requires-Python: >=2.6
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=3.2) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest (!=3.7.3,>=3.5) ; extra == 'testing'
+Requires-Dist: pytest-checkdocs (>=1.2) ; extra == 'testing'
+Requires-Dist: pytest-flake8 ; extra == 'testing'
+Requires-Dist: pytest-black-multipy ; extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/configparser.svg
+ :target: https://pypi.org/project/configparser
+
+.. image:: https://img.shields.io/pypi/pyversions/configparser.svg
+
+.. image:: https://img.shields.io/travis/jaraco/configparser/master.svg
+ :target: https://travis-ci.org/jaraco/configparser
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+ :alt: Code style: Black
+
+.. .. image:: https://img.shields.io/appveyor/ci/jaraco/configparser/master.svg
+.. :target: https://ci.appveyor.com/project/jaraco/configparser/branch/master
+
+.. image:: https://readthedocs.org/projects/configparser/badge/?version=latest
+ :target: https://configparser.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://tidelift.com/badges/package/pypi/configparser
+ :target: https://tidelift.com/subscription/pkg/pypi-configparser?utm_source=pypi-configparser&utm_medium=readme
+
+
+The ancient ``ConfigParser`` module available in the standard library 2.x has
+seen a major update in Python 3.2. This is a backport of those changes so that
+they can be used directly in Python 2.6 - 3.5.
+
+To use the ``configparser`` backport instead of the built-in version on both
+Python 2 and Python 3, simply import it explicitly as a backport::
+
+ from backports import configparser
+
+If you'd like to use the backport on Python 2 and the built-in version on
+Python 3, use that invocation instead::
+
+ import configparser
+
+For detailed documentation consult the vanilla version at
+http://docs.python.org/3/library/configparser.html.
+
+Why you'll love ``configparser``
+--------------------------------
+
+Whereas almost completely compatible with its older brother, ``configparser``
+sports a bunch of interesting new features:
+
+* full mapping protocol access (`more info
+ <http://docs.python.org/3/library/configparser.html#mapping-protocol-access>`_)::
+
+ >>> parser = ConfigParser()
+ >>> parser.read_string("""
+ [DEFAULT]
+ location = upper left
+ visible = yes
+ editable = no
+ color = blue
+
+ [main]
+ title = Main Menu
+ color = green
+
+ [options]
+ title = Options
+ """)
+ >>> parser['main']['color']
+ 'green'
+ >>> parser['main']['editable']
+ 'no'
+ >>> section = parser['options']
+ >>> section['title']
+ 'Options'
+ >>> section['title'] = 'Options (editable: %(editable)s)'
+ >>> section['title']
+ 'Options (editable: no)'
+
+* there's now one default ``ConfigParser`` class, which basically is the old
+ ``SafeConfigParser`` with a bunch of tweaks which make it more predictable for
+ users. Don't need interpolation? Simply use
+ ``ConfigParser(interpolation=None)``, no need to use a distinct
+ ``RawConfigParser`` anymore.
+
+* the parser is highly `customizable upon instantiation
+ <http://docs.python.org/3/library/configparser.html#customizing-parser-behaviour>`__
+ supporting things like changing option delimiters, comment characters, the
+ name of the DEFAULT section, the interpolation syntax, etc.
+
+* you can easily create your own interpolation syntax but there are two powerful
+ implementations built-in (`more info
+ <http://docs.python.org/3/library/configparser.html#interpolation-of-values>`__):
+
+ * the classic ``%(string-like)s`` syntax (called ``BasicInterpolation``)
+
+ * a new ``${buildout:like}`` syntax (called ``ExtendedInterpolation``)
+
+* fallback values may be specified in getters (`more info
+ <http://docs.python.org/3/library/configparser.html#fallback-values>`__)::
+
+ >>> config.get('closet', 'monster',
+ ... fallback='No such things as monsters')
+ 'No such things as monsters'
+
+* ``ConfigParser`` objects can now read data directly `from strings
+ <http://docs.python.org/3/library/configparser.html#configparser.ConfigParser.read_string>`__
+ and `from dictionaries
+ <http://docs.python.org/3/library/configparser.html#configparser.ConfigParser.read_dict>`__.
+ That means importing configuration from JSON or specifying default values for
+ the whole configuration (multiple sections) is now a single line of code. Same
+ goes for copying data from another ``ConfigParser`` instance, thanks to its
+ mapping protocol support.
+
+* many smaller tweaks, updates and fixes
+
+A few words about Unicode
+-------------------------
+
+``configparser`` comes from Python 3 and as such it works well with Unicode.
+The library is generally cleaned up in terms of internal data storage and
+reading/writing files. There are a couple of incompatibilities with the old
+``ConfigParser`` due to that. However, the work required to migrate is well
+worth it as it shows the issues that would likely come up during migration of
+your project to Python 3.
+
+The design assumes that Unicode strings are used whenever possible [1]_. That
+gives you the certainty that what's stored in a configuration object is text.
+Once your configuration is read, the rest of your application doesn't have to
+deal with encoding issues. All you have is text [2]_. The only two phases when
+you should explicitly state encoding is when you either read from an external
+source (e.g. a file) or write back.
+
+Versioning
+----------
+
+This project uses `semver <https://semver.org/spec/v2.0.0.html>`_ to
+communicate the impact of various releases while periodically syncing
+with the upstream implementation in CPython.
+`The changelog <https://github.com/jaraco/configparser/blob/master/CHANGES.rst>`_
+serves as a reference indicating which versions incorporate
+which upstream functionality.
+
+Prior to the ``4.0.0`` release, `another scheme
+<https://github.com/jaraco/configparser/blob/3.8.1/README.rst#versioning>`_
+was used to associate the CPython and backports releases.
+
+Maintenance
+-----------
+
+This backport was originally authored by Łukasz Langa, the current vanilla
+``configparser`` maintainer for CPython and is currently maintained by
+Jason R. Coombs:
+
+* `configparser repository <https://github.com/jaraco/configparser>`_
+
+* `configparser issue tracker <https://github.com/jaraco/configparser/issues>`_
+
+Security Contact
+----------------
+
+To report a security vulnerability, please use the
+`Tidelift security contact <https://tidelift.com/security>`_.
+Tidelift will coordinate the fix and disclosure.
+
+Conversion Process
+------------------
+
+This section is technical and should bother you only if you are wondering how
+this backport is produced. If the implementation details of this backport are
+not important for you, feel free to ignore the following content.
+
+``configparser`` is converted using `python-future
+<http://python-future.org>`_. The project takes the following
+branching approach:
+
+* the ``3.x`` branch holds unchanged files synchronized from the upstream
+ CPython repository. The synchronization is currently done by manually copying
+ the required files and stating from which CPython changeset they come from.
+
+* the ``master`` branch holds a version of the ``3.x`` code with some tweaks
+ that make it independent from libraries and constructions unavailable on 2.x.
+ Code on this branch still *must* work on the corresponding Python 3.x but
+ will also work on Python 2.6 and 2.7 (including PyPy). You can check this
+ running the supplied unit tests with ``tox``.
+
+The process works like this:
+
+1. In the ``3.x`` branch, run ``pip-run -- sync-upstream.py``, which
+ downloads the latest stable release of Python and copies the relevant
+ files from there into their new locations here and then commits those
+ changes with a nice reference to the relevant upstream commit hash.
+
+2. I check for new names in ``__all__`` and update imports in
+ ``configparser.py`` accordingly. I run the tests on Python 3. Commit.
+
+3. I merge the new commit to ``master``. I run ``tox``. Commit.
+
+4. If there are necessary changes, I do them now (on ``master``). Note that
+ the changes should be written in the syntax subset supported by Python
+ 2.6.
+
+5. I run ``tox``. If it works, I update the docs and release the new version.
+ Otherwise, I go back to point 3. I might use ``pasteurize`` to suggest me
+ required changes but usually I do them manually to keep resulting code in
+ a nicer form.
+
+
+Footnotes
+---------
+
+.. [1] To somewhat ease migration, passing bytestrings is still supported but
+ they are converted to Unicode for internal storage anyway. This means
+ that for the vast majority of strings used in configuration files, it
+ won't matter if you pass them as bytestrings or Unicode. However, if you
+ pass a bytestring that cannot be converted to Unicode using the naive
+ ASCII codec, a ``UnicodeDecodeError`` will be raised. This is purposeful
+ and helps you manage proper encoding for all content you store in
+ memory, read from various sources and write back.
+
+.. [2] Life gets much easier when you understand that you basically manage
+ **text** in your application. You don't care about bytes but about
+ letters. In that regard the concept of content encoding is meaningless.
+ The only time when you deal with raw bytes is when you write the data to
+ a file. Then you have to specify how your text should be encoded. On
+ the other end, to get meaningful text from a file, the application
+ reading it has to know which encoding was used during its creation. But
+ once the bytes are read and properly decoded, all you have is text. This
+ is especially powerful when you start interacting with multiple data
+ sources. Even if each of them uses a different encoding, inside your
+ application data is held in abstract text form. You can program your
+ business logic without worrying about which data came from which source.
+ You can freely exchange the data you store between sources. Only
+ reading/writing files requires encoding your text to bytes.
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/RECORD
new file mode 100644
index 0000000000..a4f777392e
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/RECORD
@@ -0,0 +1,9 @@
+configparser.py,sha256=4VADEswCwzy_RDVgvje3BmZhD6iwo3k4EkUZcgzLD4M,1546
+backports/__init__.py,sha256=elt6uFwbaEv80X8iGWsCJ_w_n_h1X8repgOoNrN0Syg,212
+backports/configparser/__init__.py,sha256=thhQqB1qWNKf-F3CpZFYsjC8YT-_I_vF0w4JiuQfiWI,56628
+backports/configparser/helpers.py,sha256=TxT00ldsHvIciQpml1YaoHfdtTl033Km6ywwT-U2nRc,7543
+configparser-4.0.2.dist-info/LICENSE,sha256=pV4v_ptEmY5iHVHYwJS-0JrMS1I27nPX3zlaM7o8GP0,1050
+configparser-4.0.2.dist-info/METADATA,sha256=oDqeXQXq8JhFEDHKDOBmbUtXmQ3CiiXB1Mr4UheTZ8Y,10910
+configparser-4.0.2.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110
+configparser-4.0.2.dist-info/top_level.txt,sha256=mIs8gajd7cvEWhVluv4u6ocaHw_TJ9rOrpkZEFv-7Hc,23
+configparser-4.0.2.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/WHEEL
new file mode 100644
index 0000000000..8b701e93c2
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.33.6)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/top_level.txt
new file mode 100644
index 0000000000..a6cb03ad92
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info/top_level.txt
@@ -0,0 +1,2 @@
+backports
+configparser
diff --git a/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser.py b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser.py
new file mode 100644
index 0000000000..0a18360239
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""Convenience module importing everything from backports.configparser."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from backports.configparser import (
+ RawConfigParser,
+ ConfigParser,
+ SafeConfigParser,
+ SectionProxy,
+ Interpolation,
+ BasicInterpolation,
+ ExtendedInterpolation,
+ LegacyInterpolation,
+ NoSectionError,
+ DuplicateSectionError,
+ DuplicateOptionError,
+ NoOptionError,
+ InterpolationError,
+ InterpolationMissingOptionError,
+ InterpolationSyntaxError,
+ InterpolationDepthError,
+ ParsingError,
+ MissingSectionHeaderError,
+ ConverterMapping,
+ DEFAULTSECT,
+ MAX_INTERPOLATION_DEPTH,
+)
+
+from backports.configparser import Error, _UNSET, _default_dict, _ChainMap # noqa: F401
+
+__all__ = [
+ "NoSectionError",
+ "DuplicateOptionError",
+ "DuplicateSectionError",
+ "NoOptionError",
+ "InterpolationError",
+ "InterpolationDepthError",
+ "InterpolationMissingOptionError",
+ "InterpolationSyntaxError",
+ "ParsingError",
+ "MissingSectionHeaderError",
+ "ConfigParser",
+ "SafeConfigParser",
+ "RawConfigParser",
+ "Interpolation",
+ "BasicInterpolation",
+ "ExtendedInterpolation",
+ "LegacyInterpolation",
+ "SectionProxy",
+ "ConverterMapping",
+ "DEFAULTSECT",
+ "MAX_INTERPOLATION_DEPTH",
+]
+
+# NOTE: names missing from __all__ imported anyway for backwards compatibility.
diff --git a/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/LICENSE.txt b/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/LICENSE.txt
new file mode 100644
index 0000000000..5de20277df
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/LICENSE.txt
@@ -0,0 +1,122 @@
+
+
+A. HISTORY OF THE SOFTWARE
+==========================
+
+contextlib2 is a derivative of the contextlib module distributed by the PSF
+as part of the Python standard library. According, it is itself redistributed
+under the PSF license (reproduced in full below). As the contextlib module
+was added only in Python 2.5, the licenses for earlier Python versions are
+not applicable and have not been included.
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC. Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team. In October of the same
+year, the PythonLabs team moved to Digital Creations (now Zope
+Corporation, see http://www.zope.com). In 2001, the Python Software
+Foundation (PSF, see http://www.python.org/psf/) was formed, a
+non-profit organization created specifically to own Python-related
+Intellectual Property. Zope Corporation is a sponsoring member of
+the PSF.
+
+All Python releases are Open Source (see http://www.opensource.org for
+the Open Source Definition). Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases that included the contextlib module.
+
+ Release Derived Year Owner GPL-
+ from compatible? (1)
+
+ 2.5 2.4 2006 PSF yes
+ 2.5.1 2.5 2007 PSF yes
+ 2.5.2 2.5.1 2008 PSF yes
+ 2.5.3 2.5.2 2008 PSF yes
+ 2.6 2.5 2008 PSF yes
+ 2.6.1 2.6 2008 PSF yes
+ 2.6.2 2.6.1 2009 PSF yes
+ 2.6.3 2.6.2 2009 PSF yes
+ 2.6.4 2.6.3 2009 PSF yes
+ 2.6.5 2.6.4 2010 PSF yes
+ 3.0 2.6 2008 PSF yes
+ 3.0.1 3.0 2009 PSF yes
+ 3.1 3.0.1 2009 PSF yes
+ 3.1.1 3.1 2009 PSF yes
+ 3.1.2 3.1.1 2010 PSF yes
+ 3.1.3 3.1.2 2010 PSF yes
+ 3.1.4 3.1.3 2011 PSF yes
+ 3.2 3.1 2011 PSF yes
+ 3.2.1 3.2 2011 PSF yes
+ 3.2.2 3.2.1 2011 PSF yes
+ 3.3 3.2 2012 PSF yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+ the GPL. All Python licenses, unlike the GPL, let you distribute
+ a modified version without making your changes open source. The
+ GPL-compatible licenses make it possible to combine Python with
+ other software that is released under the GPL; the others don't.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011 Python Software Foundation; All Rights Reserved" are retained in Python
+alone or in any derivative version prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
diff --git a/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/METADATA
new file mode 100644
index 0000000000..c44f02deb5
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/METADATA
@@ -0,0 +1,70 @@
+Metadata-Version: 2.1
+Name: contextlib2
+Version: 0.6.0.post1
+Summary: Backports and enhancements for the contextlib module
+Home-page: http://contextlib2.readthedocs.org
+Author: Nick Coghlan
+Author-email: ncoghlan@gmail.com
+License: PSF License
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
+
+.. image:: https://jazzband.co/static/img/badge.svg
+ :target: https://jazzband.co/
+ :alt: Jazzband
+
+.. image:: https://readthedocs.org/projects/contextlib2/badge/?version=latest
+ :target: https://contextlib2.readthedocs.org/
+ :alt: Latest Docs
+
+.. image:: https://img.shields.io/travis/jazzband/contextlib2/master.svg
+ :target: http://travis-ci.org/jazzband/contextlib2
+
+.. image:: https://coveralls.io/repos/github/jazzband/contextlib2/badge.svg?branch=master
+ :target: https://coveralls.io/github/jazzband/contextlib2?branch=master
+
+.. image:: https://landscape.io/github/jazzband/contextlib2/master/landscape.svg
+ :target: https://landscape.io/github/jazzband/contextlib2/
+
+contextlib2 is a backport of the `standard library's contextlib
+module <https://docs.python.org/3.5/library/contextlib.html>`_ to
+earlier Python versions.
+
+It also serves as a real world proving ground for possible future
+enhancements to the standard library version.
+
+Development
+-----------
+
+contextlib2 has no runtime dependencies, but requires ``unittest2`` for testing
+on Python 2.x, as well as ``setuptools`` and ``wheel`` to generate universal
+wheel archives.
+
+Local testing is just a matter of running ``python test_contextlib2.py``.
+
+You can test against multiple versions of Python with
+`tox <https://tox.testrun.org/>`_::
+
+ pip install tox
+ tox
+
+Versions currently tested in both tox and Travis CI are:
+
+* CPython 2.7
+* CPython 3.4
+* CPython 3.5
+* CPython 3.6
+* CPython 3.7
+* PyPy
+* PyPy3
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/RECORD
new file mode 100644
index 0000000000..f16410863e
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/RECORD
@@ -0,0 +1,6 @@
+contextlib2.py,sha256=5HjGflUzwWAUfcILhSmC2GqvoYdZZzFzVfIDztHigUs,16915
+contextlib2-0.6.0.post1.dist-info/LICENSE.txt,sha256=xqev-sas2tLS3YfS12hDhiSraSYY2x8CvqOxHT85ePA,6054
+contextlib2-0.6.0.post1.dist-info/METADATA,sha256=_kBcf3VJkbe-EMyAM1c5t5sRwBFfFu5YcfWCJMgVO1Q,2297
+contextlib2-0.6.0.post1.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110
+contextlib2-0.6.0.post1.dist-info/top_level.txt,sha256=RxWWBMkHA_rsw1laXJ8L3yE_fyYaBmvt2bVUvj3WbMg,12
+contextlib2-0.6.0.post1.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/WHEEL
new file mode 100644
index 0000000000..8b701e93c2
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.33.6)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/top_level.txt
new file mode 100644
index 0000000000..03fdf8ed24
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info/top_level.txt
@@ -0,0 +1 @@
+contextlib2
diff --git a/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2.py b/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2.py
new file mode 100644
index 0000000000..3aae8f4117
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2.py
@@ -0,0 +1,518 @@
+"""contextlib2 - backports and enhancements to the contextlib module"""
+
+import abc
+import sys
+import warnings
+from collections import deque
+from functools import wraps
+
+__all__ = ["contextmanager", "closing", "nullcontext",
+ "AbstractContextManager",
+ "ContextDecorator", "ExitStack",
+ "redirect_stdout", "redirect_stderr", "suppress"]
+
+# Backwards compatibility
+__all__ += ["ContextStack"]
+
+
+# Backport abc.ABC
+if sys.version_info[:2] >= (3, 4):
+ _abc_ABC = abc.ABC
+else:
+ _abc_ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()})
+
+
+# Backport classic class MRO
+def _classic_mro(C, result):
+ if C in result:
+ return
+ result.append(C)
+ for B in C.__bases__:
+ _classic_mro(B, result)
+ return result
+
+
+# Backport _collections_abc._check_methods
+def _check_methods(C, *methods):
+ try:
+ mro = C.__mro__
+ except AttributeError:
+ mro = tuple(_classic_mro(C, []))
+
+ for method in methods:
+ for B in mro:
+ if method in B.__dict__:
+ if B.__dict__[method] is None:
+ return NotImplemented
+ break
+ else:
+ return NotImplemented
+ return True
+
+
+class AbstractContextManager(_abc_ABC):
+ """An abstract base class for context managers."""
+
+ def __enter__(self):
+ """Return `self` upon entering the runtime context."""
+ return self
+
+ @abc.abstractmethod
+ def __exit__(self, exc_type, exc_value, traceback):
+ """Raise any exception triggered within the runtime context."""
+ return None
+
+ @classmethod
+ def __subclasshook__(cls, C):
+ """Check whether subclass is considered a subclass of this ABC."""
+ if cls is AbstractContextManager:
+ return _check_methods(C, "__enter__", "__exit__")
+ return NotImplemented
+
+
+class ContextDecorator(object):
+ """A base class or mixin that enables context managers to work as decorators."""
+
+ def refresh_cm(self):
+ """Returns the context manager used to actually wrap the call to the
+ decorated function.
+
+ The default implementation just returns *self*.
+
+ Overriding this method allows otherwise one-shot context managers
+ like _GeneratorContextManager to support use as decorators via
+ implicit recreation.
+
+ DEPRECATED: refresh_cm was never added to the standard library's
+ ContextDecorator API
+ """
+ warnings.warn("refresh_cm was never added to the standard library",
+ DeprecationWarning)
+ return self._recreate_cm()
+
+ def _recreate_cm(self):
+ """Return a recreated instance of self.
+
+ Allows an otherwise one-shot context manager like
+ _GeneratorContextManager to support use as
+ a decorator via implicit recreation.
+
+ This is a private interface just for _GeneratorContextManager.
+ See issue #11647 for details.
+ """
+ return self
+
+ def __call__(self, func):
+ @wraps(func)
+ def inner(*args, **kwds):
+ with self._recreate_cm():
+ return func(*args, **kwds)
+ return inner
+
+
+class _GeneratorContextManager(ContextDecorator):
+ """Helper for @contextmanager decorator."""
+
+ def __init__(self, func, args, kwds):
+ self.gen = func(*args, **kwds)
+ self.func, self.args, self.kwds = func, args, kwds
+ # Issue 19330: ensure context manager instances have good docstrings
+ doc = getattr(func, "__doc__", None)
+ if doc is None:
+ doc = type(self).__doc__
+ self.__doc__ = doc
+ # Unfortunately, this still doesn't provide good help output when
+ # inspecting the created context manager instances, since pydoc
+ # currently bypasses the instance docstring and shows the docstring
+ # for the class instead.
+ # See http://bugs.python.org/issue19404 for more details.
+
+ def _recreate_cm(self):
+ # _GCM instances are one-shot context managers, so the
+ # CM must be recreated each time a decorated function is
+ # called
+ return self.__class__(self.func, self.args, self.kwds)
+
+ def __enter__(self):
+ try:
+ return next(self.gen)
+ except StopIteration:
+ raise RuntimeError("generator didn't yield")
+
+ def __exit__(self, type, value, traceback):
+ if type is None:
+ try:
+ next(self.gen)
+ except StopIteration:
+ return
+ else:
+ raise RuntimeError("generator didn't stop")
+ else:
+ if value is None:
+ # Need to force instantiation so we can reliably
+ # tell if we get the same exception back
+ value = type()
+ try:
+ self.gen.throw(type, value, traceback)
+ raise RuntimeError("generator didn't stop after throw()")
+ except StopIteration as exc:
+ # Suppress StopIteration *unless* it's the same exception that
+ # was passed to throw(). This prevents a StopIteration
+ # raised inside the "with" statement from being suppressed.
+ return exc is not value
+ except RuntimeError as exc:
+ # Don't re-raise the passed in exception
+ if exc is value:
+ return False
+ # Likewise, avoid suppressing if a StopIteration exception
+ # was passed to throw() and later wrapped into a RuntimeError
+ # (see PEP 479).
+ if _HAVE_EXCEPTION_CHAINING and exc.__cause__ is value:
+ return False
+ raise
+ except:
+ # only re-raise if it's *not* the exception that was
+ # passed to throw(), because __exit__() must not raise
+ # an exception unless __exit__() itself failed. But throw()
+ # has to raise the exception to signal propagation, so this
+ # fixes the impedance mismatch between the throw() protocol
+ # and the __exit__() protocol.
+ #
+ if sys.exc_info()[1] is not value:
+ raise
+
+
+def contextmanager(func):
+ """@contextmanager decorator.
+
+ Typical usage:
+
+ @contextmanager
+ def some_generator(<arguments>):
+ <setup>
+ try:
+ yield <value>
+ finally:
+ <cleanup>
+
+ This makes this:
+
+ with some_generator(<arguments>) as <variable>:
+ <body>
+
+ equivalent to this:
+
+ <setup>
+ try:
+ <variable> = <value>
+ <body>
+ finally:
+ <cleanup>
+
+ """
+ @wraps(func)
+ def helper(*args, **kwds):
+ return _GeneratorContextManager(func, args, kwds)
+ return helper
+
+
+class closing(object):
+ """Context to automatically close something at the end of a block.
+
+ Code like this:
+
+ with closing(<module>.open(<arguments>)) as f:
+ <block>
+
+ is equivalent to this:
+
+ f = <module>.open(<arguments>)
+ try:
+ <block>
+ finally:
+ f.close()
+
+ """
+ def __init__(self, thing):
+ self.thing = thing
+
+ def __enter__(self):
+ return self.thing
+
+ def __exit__(self, *exc_info):
+ self.thing.close()
+
+
+class _RedirectStream(object):
+
+ _stream = None
+
+ def __init__(self, new_target):
+ self._new_target = new_target
+ # We use a list of old targets to make this CM re-entrant
+ self._old_targets = []
+
+ def __enter__(self):
+ self._old_targets.append(getattr(sys, self._stream))
+ setattr(sys, self._stream, self._new_target)
+ return self._new_target
+
+ def __exit__(self, exctype, excinst, exctb):
+ setattr(sys, self._stream, self._old_targets.pop())
+
+
+class redirect_stdout(_RedirectStream):
+ """Context manager for temporarily redirecting stdout to another file.
+
+ # How to send help() to stderr
+ with redirect_stdout(sys.stderr):
+ help(dir)
+
+ # How to write help() to a file
+ with open('help.txt', 'w') as f:
+ with redirect_stdout(f):
+ help(pow)
+ """
+
+ _stream = "stdout"
+
+
+class redirect_stderr(_RedirectStream):
+ """Context manager for temporarily redirecting stderr to another file."""
+
+ _stream = "stderr"
+
+
+class suppress(object):
+ """Context manager to suppress specified exceptions
+
+ After the exception is suppressed, execution proceeds with the next
+ statement following the with statement.
+
+ with suppress(FileNotFoundError):
+ os.remove(somefile)
+ # Execution still resumes here if the file was already removed
+ """
+
+ def __init__(self, *exceptions):
+ self._exceptions = exceptions
+
+ def __enter__(self):
+ pass
+
+ def __exit__(self, exctype, excinst, exctb):
+ # Unlike isinstance and issubclass, CPython exception handling
+ # currently only looks at the concrete type hierarchy (ignoring
+ # the instance and subclass checking hooks). While Guido considers
+ # that a bug rather than a feature, it's a fairly hard one to fix
+ # due to various internal implementation details. suppress provides
+ # the simpler issubclass based semantics, rather than trying to
+ # exactly reproduce the limitations of the CPython interpreter.
+ #
+ # See http://bugs.python.org/issue12029 for more details
+ return exctype is not None and issubclass(exctype, self._exceptions)
+
+
+# Context manipulation is Python 3 only
+_HAVE_EXCEPTION_CHAINING = sys.version_info[0] >= 3
+if _HAVE_EXCEPTION_CHAINING:
+ def _make_context_fixer(frame_exc):
+ def _fix_exception_context(new_exc, old_exc):
+ # Context may not be correct, so find the end of the chain
+ while 1:
+ exc_context = new_exc.__context__
+ if exc_context is old_exc:
+ # Context is already set correctly (see issue 20317)
+ return
+ if exc_context is None or exc_context is frame_exc:
+ break
+ new_exc = exc_context
+ # Change the end of the chain to point to the exception
+ # we expect it to reference
+ new_exc.__context__ = old_exc
+ return _fix_exception_context
+
+ def _reraise_with_existing_context(exc_details):
+ try:
+ # bare "raise exc_details[1]" replaces our carefully
+ # set-up context
+ fixed_ctx = exc_details[1].__context__
+ raise exc_details[1]
+ except BaseException:
+ exc_details[1].__context__ = fixed_ctx
+ raise
+else:
+ # No exception context in Python 2
+ def _make_context_fixer(frame_exc):
+ return lambda new_exc, old_exc: None
+
+ # Use 3 argument raise in Python 2,
+ # but use exec to avoid SyntaxError in Python 3
+ def _reraise_with_existing_context(exc_details):
+ exc_type, exc_value, exc_tb = exc_details
+ exec("raise exc_type, exc_value, exc_tb")
+
+# Handle old-style classes if they exist
+try:
+ from types import InstanceType
+except ImportError:
+ # Python 3 doesn't have old-style classes
+ _get_type = type
+else:
+ # Need to handle old-style context managers on Python 2
+ def _get_type(obj):
+ obj_type = type(obj)
+ if obj_type is InstanceType:
+ return obj.__class__ # Old-style class
+ return obj_type # New-style class
+
+
+# Inspired by discussions on http://bugs.python.org/issue13585
+class ExitStack(object):
+ """Context manager for dynamic management of a stack of exit callbacks
+
+ For example:
+
+ with ExitStack() as stack:
+ files = [stack.enter_context(open(fname)) for fname in filenames]
+ # All opened files will automatically be closed at the end of
+ # the with statement, even if attempts to open files later
+ # in the list raise an exception
+
+ """
+ def __init__(self):
+ self._exit_callbacks = deque()
+
+ def pop_all(self):
+ """Preserve the context stack by transferring it to a new instance"""
+ new_stack = type(self)()
+ new_stack._exit_callbacks = self._exit_callbacks
+ self._exit_callbacks = deque()
+ return new_stack
+
+ def _push_cm_exit(self, cm, cm_exit):
+ """Helper to correctly register callbacks to __exit__ methods"""
+ def _exit_wrapper(*exc_details):
+ return cm_exit(cm, *exc_details)
+ _exit_wrapper.__self__ = cm
+ self.push(_exit_wrapper)
+
+ def push(self, exit):
+ """Registers a callback with the standard __exit__ method signature
+
+ Can suppress exceptions the same way __exit__ methods can.
+
+ Also accepts any object with an __exit__ method (registering a call
+ to the method instead of the object itself)
+ """
+ # We use an unbound method rather than a bound method to follow
+ # the standard lookup behaviour for special methods
+ _cb_type = _get_type(exit)
+ try:
+ exit_method = _cb_type.__exit__
+ except AttributeError:
+ # Not a context manager, so assume its a callable
+ self._exit_callbacks.append(exit)
+ else:
+ self._push_cm_exit(exit, exit_method)
+ return exit # Allow use as a decorator
+
+ def callback(self, callback, *args, **kwds):
+ """Registers an arbitrary callback and arguments.
+
+ Cannot suppress exceptions.
+ """
+ def _exit_wrapper(exc_type, exc, tb):
+ callback(*args, **kwds)
+ # We changed the signature, so using @wraps is not appropriate, but
+ # setting __wrapped__ may still help with introspection
+ _exit_wrapper.__wrapped__ = callback
+ self.push(_exit_wrapper)
+ return callback # Allow use as a decorator
+
+ def enter_context(self, cm):
+ """Enters the supplied context manager
+
+ If successful, also pushes its __exit__ method as a callback and
+ returns the result of the __enter__ method.
+ """
+ # We look up the special methods on the type to match the with statement
+ _cm_type = _get_type(cm)
+ _exit = _cm_type.__exit__
+ result = _cm_type.__enter__(cm)
+ self._push_cm_exit(cm, _exit)
+ return result
+
+ def close(self):
+ """Immediately unwind the context stack"""
+ self.__exit__(None, None, None)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *exc_details):
+ received_exc = exc_details[0] is not None
+
+ # We manipulate the exception state so it behaves as though
+ # we were actually nesting multiple with statements
+ frame_exc = sys.exc_info()[1]
+ _fix_exception_context = _make_context_fixer(frame_exc)
+
+ # Callbacks are invoked in LIFO order to match the behaviour of
+ # nested context managers
+ suppressed_exc = False
+ pending_raise = False
+ while self._exit_callbacks:
+ cb = self._exit_callbacks.pop()
+ try:
+ if cb(*exc_details):
+ suppressed_exc = True
+ pending_raise = False
+ exc_details = (None, None, None)
+ except:
+ new_exc_details = sys.exc_info()
+ # simulate the stack of exceptions by setting the context
+ _fix_exception_context(new_exc_details[1], exc_details[1])
+ pending_raise = True
+ exc_details = new_exc_details
+ if pending_raise:
+ _reraise_with_existing_context(exc_details)
+ return received_exc and suppressed_exc
+
+
+# Preserve backwards compatibility
+class ContextStack(ExitStack):
+ """Backwards compatibility alias for ExitStack"""
+
+ def __init__(self):
+ warnings.warn("ContextStack has been renamed to ExitStack",
+ DeprecationWarning)
+ super(ContextStack, self).__init__()
+
+ def register_exit(self, callback):
+ return self.push(callback)
+
+ def register(self, callback, *args, **kwds):
+ return self.callback(callback, *args, **kwds)
+
+ def preserve(self):
+ return self.pop_all()
+
+
+class nullcontext(AbstractContextManager):
+ """Context manager that does no additional processing.
+ Used as a stand-in for a normal context manager, when a particular
+ block of code is only sometimes used with a normal context manager:
+ cm = optional_cm if condition else nullcontext()
+ with cm:
+ # Perform operation, using optional_cm if condition is True
+ """
+
+ def __init__(self, enter_result=None):
+ self.enter_result = enter_result
+
+ def __enter__(self):
+ return self.enter_result
+
+ def __exit__(self, *excinfo):
+ pass
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/METADATA
new file mode 100644
index 0000000000..54f5f6497f
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/METADATA
@@ -0,0 +1,24 @@
+Metadata-Version: 1.1
+Name: distlib
+Version: 0.3.1
+Summary: Distribution utilities
+Description: Low-level components of distutils2/packaging, augmented with higher-level APIs for making packaging easier.
+Home-page: https://bitbucket.org/pypa/distlib
+Author: Vinay Sajip
+Author-email: vinay_sajip@red-dove.com
+License: Python license
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Download-URL: https://bitbucket.org/pypa/distlib/downloads/distlib-0.3.1.zip
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/RECORD
new file mode 100644
index 0000000000..93b724c474
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/RECORD
@@ -0,0 +1,26 @@
+distlib/__init__.py,sha256=3veAk2rPznOB2gsK6tjbbh0TQMmGE5P82eE9wXq6NIk,581
+distlib/compat.py,sha256=ADA56xiAxar3mU6qemlBhNbsrFPosXRhO44RzsbJPqk,41408
+distlib/database.py,sha256=Kl0YvPQKc4OcpVi7k5cFziydM1xOK8iqdxLGXgbZHV4,51059
+distlib/index.py,sha256=SXKzpQCERctxYDMp_OLee2f0J0e19ZhGdCIoMlUfUQM,21066
+distlib/locators.py,sha256=c9E4cDEacJ_uKbuE5BqAVocoWp6rsuBGTkiNDQq3zV4,52100
+distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811
+distlib/markers.py,sha256=6Ac3cCfFBERexiESWIOXmg-apIP8l2esafNSX3KMy-8,4387
+distlib/metadata.py,sha256=z2KPy3h3tcDnb9Xs7nAqQ5Oz0bqjWAUFmKWcFKRoodg,38962
+distlib/resources.py,sha256=2FGv0ZHF14KXjLIlL0R991lyQQGcewOS4mJ-5n-JVnc,10766
+distlib/scripts.py,sha256=_MAj3sMuv56kuM8FsiIWXqbT0gmumPGaOR_atOzn4a4,17180
+distlib/t32.exe,sha256=NS3xBCVAld35JVFNmb-1QRyVtThukMrwZVeXn4LhaEQ,96768
+distlib/t64.exe,sha256=oAqHes78rUWVM0OtVqIhUvequl_PKhAhXYQWnUf7zR0,105984
+distlib/util.py,sha256=f2jZCPrcLCt6LcnC0gUy-Fur60tXD8reA7k4rDpHMDw,59845
+distlib/version.py,sha256=_n7F6juvQGAcn769E_SHa7fOcf5ERlEVymJ_EjPRwGw,23391
+distlib/w32.exe,sha256=lJtnZdeUxTZWya_EW5DZos_K5rswRECGspIl8ZJCIXs,90112
+distlib/w64.exe,sha256=0aRzoN2BO9NWW4ENy4_4vHkHR4qZTFZNVSAJJYlODTI,99840
+distlib/wheel.py,sha256=v6DnwTqhNHwrEVFr8_YeiTW6G4ftP_evsywNgrmdb2o,41144
+distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274
+distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971
+distlib/_backport/shutil.py,sha256=IX_G2NPqwecJibkIDje04bqu0xpHkfSQ2GaGdEVqM5Y,25707
+distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617
+distlib/_backport/sysconfig.py,sha256=BQHFlb6pubCl_dvT1NjtzIthylofjKisox239stDg0U,26854
+distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628
+distlib-0.3.1.dist-info/METADATA,sha256=i6wrPilVkro9BXvaHkwVsaemMZCx5xbWc8jS9oR_ZJw,1128
+distlib-0.3.1.dist-info/WHEEL,sha256=R4LNelR33E9ZPEGiPwrdPrrHnwkFEjiMPbVCAWVjsxI,106
+distlib-0.3.1.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/WHEEL
new file mode 100644
index 0000000000..78f54a1910
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: distlib 0.3.1.dev0
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any \ No newline at end of file
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py
new file mode 100644
index 0000000000..63d916e345
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py
@@ -0,0 +1,23 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2019 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+import logging
+
+__version__ = '0.3.1'
+
+class DistlibException(Exception):
+ pass
+
+try:
+ from logging import NullHandler
+except ImportError: # pragma: no cover
+ class NullHandler(logging.Handler):
+ def handle(self, record): pass
+ def emit(self, record): pass
+ def createLock(self): self.lock = None
+
+logger = logging.getLogger(__name__)
+logger.addHandler(NullHandler())
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py
new file mode 100644
index 0000000000..f7dbf4c9aa
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py
@@ -0,0 +1,6 @@
+"""Modules copied from Python 3 standard libraries, for internal use only.
+
+Individual classes and functions are found in d2._backport.misc. Intended
+usage is to always import things missing from 3.1 from that module: the
+built-in/stdlib objects will be used if found.
+"""
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py
new file mode 100644
index 0000000000..cfb318d34f
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""Backports for individual classes and functions."""
+
+import os
+import sys
+
+__all__ = ['cache_from_source', 'callable', 'fsencode']
+
+
+try:
+ from imp import cache_from_source
+except ImportError:
+ def cache_from_source(py_file, debug=__debug__):
+ ext = debug and 'c' or 'o'
+ return py_file + ext
+
+
+try:
+ callable = callable
+except NameError:
+ from collections import Callable
+
+ def callable(obj):
+ return isinstance(obj, Callable)
+
+
+try:
+ fsencode = os.fsencode
+except AttributeError:
+ def fsencode(filename):
+ if isinstance(filename, bytes):
+ return filename
+ elif isinstance(filename, str):
+ return filename.encode(sys.getfilesystemencoding())
+ else:
+ raise TypeError("expect bytes or str, not %s" %
+ type(filename).__name__)
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py
new file mode 100644
index 0000000000..10ed362539
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py
@@ -0,0 +1,764 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""Utility functions for copying and archiving files and directory trees.
+
+XXX The functions here don't copy the resource fork or other metadata on Mac.
+
+"""
+
+import os
+import sys
+import stat
+from os.path import abspath
+import fnmatch
+try:
+ from collections.abc import Callable
+except ImportError:
+ from collections import Callable
+import errno
+from . import tarfile
+
+try:
+ import bz2
+ _BZ2_SUPPORTED = True
+except ImportError:
+ _BZ2_SUPPORTED = False
+
+try:
+ from pwd import getpwnam
+except ImportError:
+ getpwnam = None
+
+try:
+ from grp import getgrnam
+except ImportError:
+ getgrnam = None
+
+__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
+ "copytree", "move", "rmtree", "Error", "SpecialFileError",
+ "ExecError", "make_archive", "get_archive_formats",
+ "register_archive_format", "unregister_archive_format",
+ "get_unpack_formats", "register_unpack_format",
+ "unregister_unpack_format", "unpack_archive", "ignore_patterns"]
+
+class Error(EnvironmentError):
+ pass
+
+class SpecialFileError(EnvironmentError):
+ """Raised when trying to do a kind of operation (e.g. copying) which is
+ not supported on a special file (e.g. a named pipe)"""
+
+class ExecError(EnvironmentError):
+ """Raised when a command could not be executed"""
+
+class ReadError(EnvironmentError):
+ """Raised when an archive cannot be read"""
+
+class RegistryError(Exception):
+ """Raised when a registry operation with the archiving
+ and unpacking registries fails"""
+
+
+try:
+ WindowsError
+except NameError:
+ WindowsError = None
+
+def copyfileobj(fsrc, fdst, length=16*1024):
+ """copy data from file-like object fsrc to file-like object fdst"""
+ while 1:
+ buf = fsrc.read(length)
+ if not buf:
+ break
+ fdst.write(buf)
+
+def _samefile(src, dst):
+ # Macintosh, Unix.
+ if hasattr(os.path, 'samefile'):
+ try:
+ return os.path.samefile(src, dst)
+ except OSError:
+ return False
+
+ # All other platforms: check for same pathname.
+ return (os.path.normcase(os.path.abspath(src)) ==
+ os.path.normcase(os.path.abspath(dst)))
+
+def copyfile(src, dst):
+ """Copy data from src to dst"""
+ if _samefile(src, dst):
+ raise Error("`%s` and `%s` are the same file" % (src, dst))
+
+ for fn in [src, dst]:
+ try:
+ st = os.stat(fn)
+ except OSError:
+ # File most likely does not exist
+ pass
+ else:
+ # XXX What about other special files? (sockets, devices...)
+ if stat.S_ISFIFO(st.st_mode):
+ raise SpecialFileError("`%s` is a named pipe" % fn)
+
+ with open(src, 'rb') as fsrc:
+ with open(dst, 'wb') as fdst:
+ copyfileobj(fsrc, fdst)
+
+def copymode(src, dst):
+ """Copy mode bits from src to dst"""
+ if hasattr(os, 'chmod'):
+ st = os.stat(src)
+ mode = stat.S_IMODE(st.st_mode)
+ os.chmod(dst, mode)
+
+def copystat(src, dst):
+ """Copy all stat info (mode bits, atime, mtime, flags) from src to dst"""
+ st = os.stat(src)
+ mode = stat.S_IMODE(st.st_mode)
+ if hasattr(os, 'utime'):
+ os.utime(dst, (st.st_atime, st.st_mtime))
+ if hasattr(os, 'chmod'):
+ os.chmod(dst, mode)
+ if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
+ try:
+ os.chflags(dst, st.st_flags)
+ except OSError as why:
+ if (not hasattr(errno, 'EOPNOTSUPP') or
+ why.errno != errno.EOPNOTSUPP):
+ raise
+
+def copy(src, dst):
+ """Copy data and mode bits ("cp src dst").
+
+ The destination may be a directory.
+
+ """
+ if os.path.isdir(dst):
+ dst = os.path.join(dst, os.path.basename(src))
+ copyfile(src, dst)
+ copymode(src, dst)
+
+def copy2(src, dst):
+ """Copy data and all stat info ("cp -p src dst").
+
+ The destination may be a directory.
+
+ """
+ if os.path.isdir(dst):
+ dst = os.path.join(dst, os.path.basename(src))
+ copyfile(src, dst)
+ copystat(src, dst)
+
+def ignore_patterns(*patterns):
+ """Function that can be used as copytree() ignore parameter.
+
+ Patterns is a sequence of glob-style patterns
+ that are used to exclude files"""
+ def _ignore_patterns(path, names):
+ ignored_names = []
+ for pattern in patterns:
+ ignored_names.extend(fnmatch.filter(names, pattern))
+ return set(ignored_names)
+ return _ignore_patterns
+
+def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,
+ ignore_dangling_symlinks=False):
+ """Recursively copy a directory tree.
+
+ The destination directory must not already exist.
+ If exception(s) occur, an Error is raised with a list of reasons.
+
+ If the optional symlinks flag is true, symbolic links in the
+ source tree result in symbolic links in the destination tree; if
+ it is false, the contents of the files pointed to by symbolic
+ links are copied. If the file pointed by the symlink doesn't
+ exist, an exception will be added in the list of errors raised in
+ an Error exception at the end of the copy process.
+
+ You can set the optional ignore_dangling_symlinks flag to true if you
+ want to silence this exception. Notice that this has no effect on
+ platforms that don't support os.symlink.
+
+ The optional ignore argument is a callable. If given, it
+ is called with the `src` parameter, which is the directory
+ being visited by copytree(), and `names` which is the list of
+ `src` contents, as returned by os.listdir():
+
+ callable(src, names) -> ignored_names
+
+ Since copytree() is called recursively, the callable will be
+ called once for each directory that is copied. It returns a
+ list of names relative to the `src` directory that should
+ not be copied.
+
+ The optional copy_function argument is a callable that will be used
+ to copy each file. It will be called with the source path and the
+ destination path as arguments. By default, copy2() is used, but any
+ function that supports the same signature (like copy()) can be used.
+
+ """
+ names = os.listdir(src)
+ if ignore is not None:
+ ignored_names = ignore(src, names)
+ else:
+ ignored_names = set()
+
+ os.makedirs(dst)
+ errors = []
+ for name in names:
+ if name in ignored_names:
+ continue
+ srcname = os.path.join(src, name)
+ dstname = os.path.join(dst, name)
+ try:
+ if os.path.islink(srcname):
+ linkto = os.readlink(srcname)
+ if symlinks:
+ os.symlink(linkto, dstname)
+ else:
+ # ignore dangling symlink if the flag is on
+ if not os.path.exists(linkto) and ignore_dangling_symlinks:
+ continue
+ # otherwise let the copy occurs. copy2 will raise an error
+ copy_function(srcname, dstname)
+ elif os.path.isdir(srcname):
+ copytree(srcname, dstname, symlinks, ignore, copy_function)
+ else:
+ # Will raise a SpecialFileError for unsupported file types
+ copy_function(srcname, dstname)
+ # catch the Error from the recursive copytree so that we can
+ # continue with other files
+ except Error as err:
+ errors.extend(err.args[0])
+ except EnvironmentError as why:
+ errors.append((srcname, dstname, str(why)))
+ try:
+ copystat(src, dst)
+ except OSError as why:
+ if WindowsError is not None and isinstance(why, WindowsError):
+ # Copying file access times may fail on Windows
+ pass
+ else:
+ errors.extend((src, dst, str(why)))
+ if errors:
+ raise Error(errors)
+
+def rmtree(path, ignore_errors=False, onerror=None):
+ """Recursively delete a directory tree.
+
+ If ignore_errors is set, errors are ignored; otherwise, if onerror
+ is set, it is called to handle the error with arguments (func,
+ path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
+ path is the argument to that function that caused it to fail; and
+ exc_info is a tuple returned by sys.exc_info(). If ignore_errors
+ is false and onerror is None, an exception is raised.
+
+ """
+ if ignore_errors:
+ def onerror(*args):
+ pass
+ elif onerror is None:
+ def onerror(*args):
+ raise
+ try:
+ if os.path.islink(path):
+ # symlinks to directories are forbidden, see bug #1669
+ raise OSError("Cannot call rmtree on a symbolic link")
+ except OSError:
+ onerror(os.path.islink, path, sys.exc_info())
+ # can't continue even if onerror hook returns
+ return
+ names = []
+ try:
+ names = os.listdir(path)
+ except os.error:
+ onerror(os.listdir, path, sys.exc_info())
+ for name in names:
+ fullname = os.path.join(path, name)
+ try:
+ mode = os.lstat(fullname).st_mode
+ except os.error:
+ mode = 0
+ if stat.S_ISDIR(mode):
+ rmtree(fullname, ignore_errors, onerror)
+ else:
+ try:
+ os.remove(fullname)
+ except os.error:
+ onerror(os.remove, fullname, sys.exc_info())
+ try:
+ os.rmdir(path)
+ except os.error:
+ onerror(os.rmdir, path, sys.exc_info())
+
+
+def _basename(path):
+ # A basename() variant which first strips the trailing slash, if present.
+ # Thus we always get the last component of the path, even for directories.
+ return os.path.basename(path.rstrip(os.path.sep))
+
+def move(src, dst):
+ """Recursively move a file or directory to another location. This is
+ similar to the Unix "mv" command.
+
+ If the destination is a directory or a symlink to a directory, the source
+ is moved inside the directory. The destination path must not already
+ exist.
+
+ If the destination already exists but is not a directory, it may be
+ overwritten depending on os.rename() semantics.
+
+ If the destination is on our current filesystem, then rename() is used.
+ Otherwise, src is copied to the destination and then removed.
+ A lot more could be done here... A look at a mv.c shows a lot of
+ the issues this implementation glosses over.
+
+ """
+ real_dst = dst
+ if os.path.isdir(dst):
+ if _samefile(src, dst):
+ # We might be on a case insensitive filesystem,
+ # perform the rename anyway.
+ os.rename(src, dst)
+ return
+
+ real_dst = os.path.join(dst, _basename(src))
+ if os.path.exists(real_dst):
+ raise Error("Destination path '%s' already exists" % real_dst)
+ try:
+ os.rename(src, real_dst)
+ except OSError:
+ if os.path.isdir(src):
+ if _destinsrc(src, dst):
+ raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst))
+ copytree(src, real_dst, symlinks=True)
+ rmtree(src)
+ else:
+ copy2(src, real_dst)
+ os.unlink(src)
+
+def _destinsrc(src, dst):
+ src = abspath(src)
+ dst = abspath(dst)
+ if not src.endswith(os.path.sep):
+ src += os.path.sep
+ if not dst.endswith(os.path.sep):
+ dst += os.path.sep
+ return dst.startswith(src)
+
+def _get_gid(name):
+ """Returns a gid, given a group name."""
+ if getgrnam is None or name is None:
+ return None
+ try:
+ result = getgrnam(name)
+ except KeyError:
+ result = None
+ if result is not None:
+ return result[2]
+ return None
+
+def _get_uid(name):
+ """Returns an uid, given a user name."""
+ if getpwnam is None or name is None:
+ return None
+ try:
+ result = getpwnam(name)
+ except KeyError:
+ result = None
+ if result is not None:
+ return result[2]
+ return None
+
+def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
+ owner=None, group=None, logger=None):
+ """Create a (possibly compressed) tar file from all the files under
+ 'base_dir'.
+
+ 'compress' must be "gzip" (the default), "bzip2", or None.
+
+ 'owner' and 'group' can be used to define an owner and a group for the
+ archive that is being built. If not provided, the current owner and group
+ will be used.
+
+ The output tar file will be named 'base_name' + ".tar", possibly plus
+ the appropriate compression extension (".gz", or ".bz2").
+
+ Returns the output filename.
+ """
+ tar_compression = {'gzip': 'gz', None: ''}
+ compress_ext = {'gzip': '.gz'}
+
+ if _BZ2_SUPPORTED:
+ tar_compression['bzip2'] = 'bz2'
+ compress_ext['bzip2'] = '.bz2'
+
+ # flags for compression program, each element of list will be an argument
+ if compress is not None and compress not in compress_ext:
+ raise ValueError("bad value for 'compress', or compression format not "
+ "supported : {0}".format(compress))
+
+ archive_name = base_name + '.tar' + compress_ext.get(compress, '')
+ archive_dir = os.path.dirname(archive_name)
+
+ if not os.path.exists(archive_dir):
+ if logger is not None:
+ logger.info("creating %s", archive_dir)
+ if not dry_run:
+ os.makedirs(archive_dir)
+
+ # creating the tarball
+ if logger is not None:
+ logger.info('Creating tar archive')
+
+ uid = _get_uid(owner)
+ gid = _get_gid(group)
+
+ def _set_uid_gid(tarinfo):
+ if gid is not None:
+ tarinfo.gid = gid
+ tarinfo.gname = group
+ if uid is not None:
+ tarinfo.uid = uid
+ tarinfo.uname = owner
+ return tarinfo
+
+ if not dry_run:
+ tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
+ try:
+ tar.add(base_dir, filter=_set_uid_gid)
+ finally:
+ tar.close()
+
+ return archive_name
+
+def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False):
+ # XXX see if we want to keep an external call here
+ if verbose:
+ zipoptions = "-r"
+ else:
+ zipoptions = "-rq"
+ from distutils.errors import DistutilsExecError
+ from distutils.spawn import spawn
+ try:
+ spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
+ except DistutilsExecError:
+ # XXX really should distinguish between "couldn't find
+ # external 'zip' command" and "zip failed".
+ raise ExecError("unable to create zip file '%s': "
+ "could neither import the 'zipfile' module nor "
+ "find a standalone zip utility") % zip_filename
+
+def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None):
+ """Create a zip file from all the files under 'base_dir'.
+
+ The output zip file will be named 'base_name' + ".zip". Uses either the
+ "zipfile" Python module (if available) or the InfoZIP "zip" utility
+ (if installed and found on the default search path). If neither tool is
+ available, raises ExecError. Returns the name of the output zip
+ file.
+ """
+ zip_filename = base_name + ".zip"
+ archive_dir = os.path.dirname(base_name)
+
+ if not os.path.exists(archive_dir):
+ if logger is not None:
+ logger.info("creating %s", archive_dir)
+ if not dry_run:
+ os.makedirs(archive_dir)
+
+ # If zipfile module is not available, try spawning an external 'zip'
+ # command.
+ try:
+ import zipfile
+ except ImportError:
+ zipfile = None
+
+ if zipfile is None:
+ _call_external_zip(base_dir, zip_filename, verbose, dry_run)
+ else:
+ if logger is not None:
+ logger.info("creating '%s' and adding '%s' to it",
+ zip_filename, base_dir)
+
+ if not dry_run:
+ zip = zipfile.ZipFile(zip_filename, "w",
+ compression=zipfile.ZIP_DEFLATED)
+
+ for dirpath, dirnames, filenames in os.walk(base_dir):
+ for name in filenames:
+ path = os.path.normpath(os.path.join(dirpath, name))
+ if os.path.isfile(path):
+ zip.write(path, path)
+ if logger is not None:
+ logger.info("adding '%s'", path)
+ zip.close()
+
+ return zip_filename
+
+_ARCHIVE_FORMATS = {
+ 'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
+ 'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
+ 'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"),
+ 'zip': (_make_zipfile, [], "ZIP file"),
+ }
+
+if _BZ2_SUPPORTED:
+ _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')],
+ "bzip2'ed tar-file")
+
+def get_archive_formats():
+ """Returns a list of supported formats for archiving and unarchiving.
+
+ Each element of the returned sequence is a tuple (name, description)
+ """
+ formats = [(name, registry[2]) for name, registry in
+ _ARCHIVE_FORMATS.items()]
+ formats.sort()
+ return formats
+
+def register_archive_format(name, function, extra_args=None, description=''):
+ """Registers an archive format.
+
+ name is the name of the format. function is the callable that will be
+ used to create archives. If provided, extra_args is a sequence of
+ (name, value) tuples that will be passed as arguments to the callable.
+ description can be provided to describe the format, and will be returned
+ by the get_archive_formats() function.
+ """
+ if extra_args is None:
+ extra_args = []
+ if not isinstance(function, Callable):
+ raise TypeError('The %s object is not callable' % function)
+ if not isinstance(extra_args, (tuple, list)):
+ raise TypeError('extra_args needs to be a sequence')
+ for element in extra_args:
+ if not isinstance(element, (tuple, list)) or len(element) !=2:
+ raise TypeError('extra_args elements are : (arg_name, value)')
+
+ _ARCHIVE_FORMATS[name] = (function, extra_args, description)
+
+def unregister_archive_format(name):
+ del _ARCHIVE_FORMATS[name]
+
+def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
+ dry_run=0, owner=None, group=None, logger=None):
+ """Create an archive file (eg. zip or tar).
+
+ 'base_name' is the name of the file to create, minus any format-specific
+ extension; 'format' is the archive format: one of "zip", "tar", "bztar"
+ or "gztar".
+
+ 'root_dir' is a directory that will be the root directory of the
+ archive; ie. we typically chdir into 'root_dir' before creating the
+ archive. 'base_dir' is the directory where we start archiving from;
+ ie. 'base_dir' will be the common prefix of all files and
+ directories in the archive. 'root_dir' and 'base_dir' both default
+ to the current directory. Returns the name of the archive file.
+
+ 'owner' and 'group' are used when creating a tar archive. By default,
+ uses the current owner and group.
+ """
+ save_cwd = os.getcwd()
+ if root_dir is not None:
+ if logger is not None:
+ logger.debug("changing into '%s'", root_dir)
+ base_name = os.path.abspath(base_name)
+ if not dry_run:
+ os.chdir(root_dir)
+
+ if base_dir is None:
+ base_dir = os.curdir
+
+ kwargs = {'dry_run': dry_run, 'logger': logger}
+
+ try:
+ format_info = _ARCHIVE_FORMATS[format]
+ except KeyError:
+ raise ValueError("unknown archive format '%s'" % format)
+
+ func = format_info[0]
+ for arg, val in format_info[1]:
+ kwargs[arg] = val
+
+ if format != 'zip':
+ kwargs['owner'] = owner
+ kwargs['group'] = group
+
+ try:
+ filename = func(base_name, base_dir, **kwargs)
+ finally:
+ if root_dir is not None:
+ if logger is not None:
+ logger.debug("changing back to '%s'", save_cwd)
+ os.chdir(save_cwd)
+
+ return filename
+
+
+def get_unpack_formats():
+ """Returns a list of supported formats for unpacking.
+
+ Each element of the returned sequence is a tuple
+ (name, extensions, description)
+ """
+ formats = [(name, info[0], info[3]) for name, info in
+ _UNPACK_FORMATS.items()]
+ formats.sort()
+ return formats
+
+def _check_unpack_options(extensions, function, extra_args):
+ """Checks what gets registered as an unpacker."""
+ # first make sure no other unpacker is registered for this extension
+ existing_extensions = {}
+ for name, info in _UNPACK_FORMATS.items():
+ for ext in info[0]:
+ existing_extensions[ext] = name
+
+ for extension in extensions:
+ if extension in existing_extensions:
+ msg = '%s is already registered for "%s"'
+ raise RegistryError(msg % (extension,
+ existing_extensions[extension]))
+
+ if not isinstance(function, Callable):
+ raise TypeError('The registered function must be a callable')
+
+
+def register_unpack_format(name, extensions, function, extra_args=None,
+ description=''):
+ """Registers an unpack format.
+
+ `name` is the name of the format. `extensions` is a list of extensions
+ corresponding to the format.
+
+ `function` is the callable that will be
+ used to unpack archives. The callable will receive archives to unpack.
+ If it's unable to handle an archive, it needs to raise a ReadError
+ exception.
+
+ If provided, `extra_args` is a sequence of
+ (name, value) tuples that will be passed as arguments to the callable.
+ description can be provided to describe the format, and will be returned
+ by the get_unpack_formats() function.
+ """
+ if extra_args is None:
+ extra_args = []
+ _check_unpack_options(extensions, function, extra_args)
+ _UNPACK_FORMATS[name] = extensions, function, extra_args, description
+
+def unregister_unpack_format(name):
+ """Removes the pack format from the registry."""
+ del _UNPACK_FORMATS[name]
+
+def _ensure_directory(path):
+ """Ensure that the parent directory of `path` exists"""
+ dirname = os.path.dirname(path)
+ if not os.path.isdir(dirname):
+ os.makedirs(dirname)
+
+def _unpack_zipfile(filename, extract_dir):
+ """Unpack zip `filename` to `extract_dir`
+ """
+ try:
+ import zipfile
+ except ImportError:
+ raise ReadError('zlib not supported, cannot unpack this archive.')
+
+ if not zipfile.is_zipfile(filename):
+ raise ReadError("%s is not a zip file" % filename)
+
+ zip = zipfile.ZipFile(filename)
+ try:
+ for info in zip.infolist():
+ name = info.filename
+
+ # don't extract absolute paths or ones with .. in them
+ if name.startswith('/') or '..' in name:
+ continue
+
+ target = os.path.join(extract_dir, *name.split('/'))
+ if not target:
+ continue
+
+ _ensure_directory(target)
+ if not name.endswith('/'):
+ # file
+ data = zip.read(info.filename)
+ f = open(target, 'wb')
+ try:
+ f.write(data)
+ finally:
+ f.close()
+ del data
+ finally:
+ zip.close()
+
+def _unpack_tarfile(filename, extract_dir):
+ """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
+ """
+ try:
+ tarobj = tarfile.open(filename)
+ except tarfile.TarError:
+ raise ReadError(
+ "%s is not a compressed or uncompressed tar file" % filename)
+ try:
+ tarobj.extractall(extract_dir)
+ finally:
+ tarobj.close()
+
+_UNPACK_FORMATS = {
+ 'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"),
+ 'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"),
+ 'zip': (['.zip'], _unpack_zipfile, [], "ZIP file")
+ }
+
+if _BZ2_SUPPORTED:
+ _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [],
+ "bzip2'ed tar-file")
+
+def _find_unpack_format(filename):
+ for name, info in _UNPACK_FORMATS.items():
+ for extension in info[0]:
+ if filename.endswith(extension):
+ return name
+ return None
+
+def unpack_archive(filename, extract_dir=None, format=None):
+ """Unpack an archive.
+
+ `filename` is the name of the archive.
+
+ `extract_dir` is the name of the target directory, where the archive
+ is unpacked. If not provided, the current working directory is used.
+
+ `format` is the archive format: one of "zip", "tar", or "gztar". Or any
+ other registered format. If not provided, unpack_archive will use the
+ filename extension and see if an unpacker was registered for that
+ extension.
+
+ In case none is found, a ValueError is raised.
+ """
+ if extract_dir is None:
+ extract_dir = os.getcwd()
+
+ if format is not None:
+ try:
+ format_info = _UNPACK_FORMATS[format]
+ except KeyError:
+ raise ValueError("Unknown unpack format '{0}'".format(format))
+
+ func = format_info[1]
+ func(filename, extract_dir, **dict(format_info[2]))
+ else:
+ # we need to look at the registered unpackers supported extensions
+ format = _find_unpack_format(filename)
+ if format is None:
+ raise ReadError("Unknown archive format '{0}'".format(filename))
+
+ func = _UNPACK_FORMATS[format][1]
+ kwargs = dict(_UNPACK_FORMATS[format][2])
+ func(filename, extract_dir, **kwargs)
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.cfg b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.cfg
new file mode 100644
index 0000000000..1746bd01c1
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.cfg
@@ -0,0 +1,84 @@
+[posix_prefix]
+# Configuration directories. Some of these come straight out of the
+# configure script. They are for implementing the other variables, not to
+# be used directly in [resource_locations].
+confdir = /etc
+datadir = /usr/share
+libdir = /usr/lib
+statedir = /var
+# User resource directory
+local = ~/.local/{distribution.name}
+
+stdlib = {base}/lib/python{py_version_short}
+platstdlib = {platbase}/lib/python{py_version_short}
+purelib = {base}/lib/python{py_version_short}/site-packages
+platlib = {platbase}/lib/python{py_version_short}/site-packages
+include = {base}/include/python{py_version_short}{abiflags}
+platinclude = {platbase}/include/python{py_version_short}{abiflags}
+data = {base}
+
+[posix_home]
+stdlib = {base}/lib/python
+platstdlib = {base}/lib/python
+purelib = {base}/lib/python
+platlib = {base}/lib/python
+include = {base}/include/python
+platinclude = {base}/include/python
+scripts = {base}/bin
+data = {base}
+
+[nt]
+stdlib = {base}/Lib
+platstdlib = {base}/Lib
+purelib = {base}/Lib/site-packages
+platlib = {base}/Lib/site-packages
+include = {base}/Include
+platinclude = {base}/Include
+scripts = {base}/Scripts
+data = {base}
+
+[os2]
+stdlib = {base}/Lib
+platstdlib = {base}/Lib
+purelib = {base}/Lib/site-packages
+platlib = {base}/Lib/site-packages
+include = {base}/Include
+platinclude = {base}/Include
+scripts = {base}/Scripts
+data = {base}
+
+[os2_home]
+stdlib = {userbase}/lib/python{py_version_short}
+platstdlib = {userbase}/lib/python{py_version_short}
+purelib = {userbase}/lib/python{py_version_short}/site-packages
+platlib = {userbase}/lib/python{py_version_short}/site-packages
+include = {userbase}/include/python{py_version_short}
+scripts = {userbase}/bin
+data = {userbase}
+
+[nt_user]
+stdlib = {userbase}/Python{py_version_nodot}
+platstdlib = {userbase}/Python{py_version_nodot}
+purelib = {userbase}/Python{py_version_nodot}/site-packages
+platlib = {userbase}/Python{py_version_nodot}/site-packages
+include = {userbase}/Python{py_version_nodot}/Include
+scripts = {userbase}/Scripts
+data = {userbase}
+
+[posix_user]
+stdlib = {userbase}/lib/python{py_version_short}
+platstdlib = {userbase}/lib/python{py_version_short}
+purelib = {userbase}/lib/python{py_version_short}/site-packages
+platlib = {userbase}/lib/python{py_version_short}/site-packages
+include = {userbase}/include/python{py_version_short}
+scripts = {userbase}/bin
+data = {userbase}
+
+[osx_framework_user]
+stdlib = {userbase}/lib/python
+platstdlib = {userbase}/lib/python
+purelib = {userbase}/lib/python/site-packages
+platlib = {userbase}/lib/python/site-packages
+include = {userbase}/include
+scripts = {userbase}/bin
+data = {userbase}
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py
new file mode 100644
index 0000000000..b470a373c8
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py
@@ -0,0 +1,786 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""Access to Python's configuration information."""
+
+import codecs
+import os
+import re
+import sys
+from os.path import pardir, realpath
+try:
+ import configparser
+except ImportError:
+ import ConfigParser as configparser
+
+
+__all__ = [
+ 'get_config_h_filename',
+ 'get_config_var',
+ 'get_config_vars',
+ 'get_makefile_filename',
+ 'get_path',
+ 'get_path_names',
+ 'get_paths',
+ 'get_platform',
+ 'get_python_version',
+ 'get_scheme_names',
+ 'parse_config_h',
+]
+
+
+def _safe_realpath(path):
+ try:
+ return realpath(path)
+ except OSError:
+ return path
+
+
+if sys.executable:
+ _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable))
+else:
+ # sys.executable can be empty if argv[0] has been changed and Python is
+ # unable to retrieve the real program name
+ _PROJECT_BASE = _safe_realpath(os.getcwd())
+
+if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower():
+ _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir))
+# PC/VS7.1
+if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower():
+ _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
+# PC/AMD64
+if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower():
+ _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
+
+
+def is_python_build():
+ for fn in ("Setup.dist", "Setup.local"):
+ if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)):
+ return True
+ return False
+
+_PYTHON_BUILD = is_python_build()
+
+_cfg_read = False
+
+def _ensure_cfg_read():
+ global _cfg_read
+ if not _cfg_read:
+ from ..resources import finder
+ backport_package = __name__.rsplit('.', 1)[0]
+ _finder = finder(backport_package)
+ _cfgfile = _finder.find('sysconfig.cfg')
+ assert _cfgfile, 'sysconfig.cfg exists'
+ with _cfgfile.as_stream() as s:
+ _SCHEMES.readfp(s)
+ if _PYTHON_BUILD:
+ for scheme in ('posix_prefix', 'posix_home'):
+ _SCHEMES.set(scheme, 'include', '{srcdir}/Include')
+ _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.')
+
+ _cfg_read = True
+
+
+_SCHEMES = configparser.RawConfigParser()
+_VAR_REPL = re.compile(r'\{([^{]*?)\}')
+
+def _expand_globals(config):
+ _ensure_cfg_read()
+ if config.has_section('globals'):
+ globals = config.items('globals')
+ else:
+ globals = tuple()
+
+ sections = config.sections()
+ for section in sections:
+ if section == 'globals':
+ continue
+ for option, value in globals:
+ if config.has_option(section, option):
+ continue
+ config.set(section, option, value)
+ config.remove_section('globals')
+
+ # now expanding local variables defined in the cfg file
+ #
+ for section in config.sections():
+ variables = dict(config.items(section))
+
+ def _replacer(matchobj):
+ name = matchobj.group(1)
+ if name in variables:
+ return variables[name]
+ return matchobj.group(0)
+
+ for option, value in config.items(section):
+ config.set(section, option, _VAR_REPL.sub(_replacer, value))
+
+#_expand_globals(_SCHEMES)
+
+_PY_VERSION = '%s.%s.%s' % sys.version_info[:3]
+_PY_VERSION_SHORT = '%s.%s' % sys.version_info[:2]
+_PY_VERSION_SHORT_NO_DOT = '%s%s' % sys.version_info[:2]
+_PREFIX = os.path.normpath(sys.prefix)
+_EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
+_CONFIG_VARS = None
+_USER_BASE = None
+
+
+def _subst_vars(path, local_vars):
+ """In the string `path`, replace tokens like {some.thing} with the
+ corresponding value from the map `local_vars`.
+
+ If there is no corresponding value, leave the token unchanged.
+ """
+ def _replacer(matchobj):
+ name = matchobj.group(1)
+ if name in local_vars:
+ return local_vars[name]
+ elif name in os.environ:
+ return os.environ[name]
+ return matchobj.group(0)
+ return _VAR_REPL.sub(_replacer, path)
+
+
+def _extend_dict(target_dict, other_dict):
+ target_keys = target_dict.keys()
+ for key, value in other_dict.items():
+ if key in target_keys:
+ continue
+ target_dict[key] = value
+
+
+def _expand_vars(scheme, vars):
+ res = {}
+ if vars is None:
+ vars = {}
+ _extend_dict(vars, get_config_vars())
+
+ for key, value in _SCHEMES.items(scheme):
+ if os.name in ('posix', 'nt'):
+ value = os.path.expanduser(value)
+ res[key] = os.path.normpath(_subst_vars(value, vars))
+ return res
+
+
+def format_value(value, vars):
+ def _replacer(matchobj):
+ name = matchobj.group(1)
+ if name in vars:
+ return vars[name]
+ return matchobj.group(0)
+ return _VAR_REPL.sub(_replacer, value)
+
+
+def _get_default_scheme():
+ if os.name == 'posix':
+ # the default scheme for posix is posix_prefix
+ return 'posix_prefix'
+ return os.name
+
+
+def _getuserbase():
+ env_base = os.environ.get("PYTHONUSERBASE", None)
+
+ def joinuser(*args):
+ return os.path.expanduser(os.path.join(*args))
+
+ # what about 'os2emx', 'riscos' ?
+ if os.name == "nt":
+ base = os.environ.get("APPDATA") or "~"
+ if env_base:
+ return env_base
+ else:
+ return joinuser(base, "Python")
+
+ if sys.platform == "darwin":
+ framework = get_config_var("PYTHONFRAMEWORK")
+ if framework:
+ if env_base:
+ return env_base
+ else:
+ return joinuser("~", "Library", framework, "%d.%d" %
+ sys.version_info[:2])
+
+ if env_base:
+ return env_base
+ else:
+ return joinuser("~", ".local")
+
+
+def _parse_makefile(filename, vars=None):
+ """Parse a Makefile-style file.
+
+ A dictionary containing name/value pairs is returned. If an
+ optional dictionary is passed in as the second argument, it is
+ used instead of a new dictionary.
+ """
+ # Regexes needed for parsing Makefile (and similar syntaxes,
+ # like old-style Setup files).
+ _variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
+ _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
+ _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
+
+ if vars is None:
+ vars = {}
+ done = {}
+ notdone = {}
+
+ with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f:
+ lines = f.readlines()
+
+ for line in lines:
+ if line.startswith('#') or line.strip() == '':
+ continue
+ m = _variable_rx.match(line)
+ if m:
+ n, v = m.group(1, 2)
+ v = v.strip()
+ # `$$' is a literal `$' in make
+ tmpv = v.replace('$$', '')
+
+ if "$" in tmpv:
+ notdone[n] = v
+ else:
+ try:
+ v = int(v)
+ except ValueError:
+ # insert literal `$'
+ done[n] = v.replace('$$', '$')
+ else:
+ done[n] = v
+
+ # do variable interpolation here
+ variables = list(notdone.keys())
+
+ # Variables with a 'PY_' prefix in the makefile. These need to
+ # be made available without that prefix through sysconfig.
+ # Special care is needed to ensure that variable expansion works, even
+ # if the expansion uses the name without a prefix.
+ renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')
+
+ while len(variables) > 0:
+ for name in tuple(variables):
+ value = notdone[name]
+ m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
+ if m is not None:
+ n = m.group(1)
+ found = True
+ if n in done:
+ item = str(done[n])
+ elif n in notdone:
+ # get it on a subsequent round
+ found = False
+ elif n in os.environ:
+ # do it like make: fall back to environment
+ item = os.environ[n]
+
+ elif n in renamed_variables:
+ if (name.startswith('PY_') and
+ name[3:] in renamed_variables):
+ item = ""
+
+ elif 'PY_' + n in notdone:
+ found = False
+
+ else:
+ item = str(done['PY_' + n])
+
+ else:
+ done[n] = item = ""
+
+ if found:
+ after = value[m.end():]
+ value = value[:m.start()] + item + after
+ if "$" in after:
+ notdone[name] = value
+ else:
+ try:
+ value = int(value)
+ except ValueError:
+ done[name] = value.strip()
+ else:
+ done[name] = value
+ variables.remove(name)
+
+ if (name.startswith('PY_') and
+ name[3:] in renamed_variables):
+
+ name = name[3:]
+ if name not in done:
+ done[name] = value
+
+ else:
+ # bogus variable reference (e.g. "prefix=$/opt/python");
+ # just drop it since we can't deal
+ done[name] = value
+ variables.remove(name)
+
+ # strip spurious spaces
+ for k, v in done.items():
+ if isinstance(v, str):
+ done[k] = v.strip()
+
+ # save the results in the global dictionary
+ vars.update(done)
+ return vars
+
+
+def get_makefile_filename():
+ """Return the path of the Makefile."""
+ if _PYTHON_BUILD:
+ return os.path.join(_PROJECT_BASE, "Makefile")
+ if hasattr(sys, 'abiflags'):
+ config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags)
+ else:
+ config_dir_name = 'config'
+ return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile')
+
+
+def _init_posix(vars):
+ """Initialize the module as appropriate for POSIX systems."""
+ # load the installed Makefile:
+ makefile = get_makefile_filename()
+ try:
+ _parse_makefile(makefile, vars)
+ except IOError as e:
+ msg = "invalid Python installation: unable to open %s" % makefile
+ if hasattr(e, "strerror"):
+ msg = msg + " (%s)" % e.strerror
+ raise IOError(msg)
+ # load the installed pyconfig.h:
+ config_h = get_config_h_filename()
+ try:
+ with open(config_h) as f:
+ parse_config_h(f, vars)
+ except IOError as e:
+ msg = "invalid Python installation: unable to open %s" % config_h
+ if hasattr(e, "strerror"):
+ msg = msg + " (%s)" % e.strerror
+ raise IOError(msg)
+ # On AIX, there are wrong paths to the linker scripts in the Makefile
+ # -- these paths are relative to the Python source, but when installed
+ # the scripts are in another directory.
+ if _PYTHON_BUILD:
+ vars['LDSHARED'] = vars['BLDSHARED']
+
+
+def _init_non_posix(vars):
+ """Initialize the module as appropriate for NT"""
+ # set basic install directories
+ vars['LIBDEST'] = get_path('stdlib')
+ vars['BINLIBDEST'] = get_path('platstdlib')
+ vars['INCLUDEPY'] = get_path('include')
+ vars['SO'] = '.pyd'
+ vars['EXE'] = '.exe'
+ vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT
+ vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable))
+
+#
+# public APIs
+#
+
+
+def parse_config_h(fp, vars=None):
+ """Parse a config.h-style file.
+
+ A dictionary containing name/value pairs is returned. If an
+ optional dictionary is passed in as the second argument, it is
+ used instead of a new dictionary.
+ """
+ if vars is None:
+ vars = {}
+ define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
+ undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
+
+ while True:
+ line = fp.readline()
+ if not line:
+ break
+ m = define_rx.match(line)
+ if m:
+ n, v = m.group(1, 2)
+ try:
+ v = int(v)
+ except ValueError:
+ pass
+ vars[n] = v
+ else:
+ m = undef_rx.match(line)
+ if m:
+ vars[m.group(1)] = 0
+ return vars
+
+
+def get_config_h_filename():
+ """Return the path of pyconfig.h."""
+ if _PYTHON_BUILD:
+ if os.name == "nt":
+ inc_dir = os.path.join(_PROJECT_BASE, "PC")
+ else:
+ inc_dir = _PROJECT_BASE
+ else:
+ inc_dir = get_path('platinclude')
+ return os.path.join(inc_dir, 'pyconfig.h')
+
+
+def get_scheme_names():
+ """Return a tuple containing the schemes names."""
+ return tuple(sorted(_SCHEMES.sections()))
+
+
+def get_path_names():
+ """Return a tuple containing the paths names."""
+ # xxx see if we want a static list
+ return _SCHEMES.options('posix_prefix')
+
+
+def get_paths(scheme=_get_default_scheme(), vars=None, expand=True):
+ """Return a mapping containing an install scheme.
+
+ ``scheme`` is the install scheme name. If not provided, it will
+ return the default scheme for the current platform.
+ """
+ _ensure_cfg_read()
+ if expand:
+ return _expand_vars(scheme, vars)
+ else:
+ return dict(_SCHEMES.items(scheme))
+
+
+def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True):
+ """Return a path corresponding to the scheme.
+
+ ``scheme`` is the install scheme name.
+ """
+ return get_paths(scheme, vars, expand)[name]
+
+
+def get_config_vars(*args):
+ """With no arguments, return a dictionary of all configuration
+ variables relevant for the current platform.
+
+ On Unix, this means every variable defined in Python's installed Makefile;
+ On Windows and Mac OS it's a much smaller set.
+
+ With arguments, return a list of values that result from looking up
+ each argument in the configuration variable dictionary.
+ """
+ global _CONFIG_VARS
+ if _CONFIG_VARS is None:
+ _CONFIG_VARS = {}
+ # Normalized versions of prefix and exec_prefix are handy to have;
+ # in fact, these are the standard versions used most places in the
+ # distutils2 module.
+ _CONFIG_VARS['prefix'] = _PREFIX
+ _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX
+ _CONFIG_VARS['py_version'] = _PY_VERSION
+ _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT
+ _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2]
+ _CONFIG_VARS['base'] = _PREFIX
+ _CONFIG_VARS['platbase'] = _EXEC_PREFIX
+ _CONFIG_VARS['projectbase'] = _PROJECT_BASE
+ try:
+ _CONFIG_VARS['abiflags'] = sys.abiflags
+ except AttributeError:
+ # sys.abiflags may not be defined on all platforms.
+ _CONFIG_VARS['abiflags'] = ''
+
+ if os.name in ('nt', 'os2'):
+ _init_non_posix(_CONFIG_VARS)
+ if os.name == 'posix':
+ _init_posix(_CONFIG_VARS)
+ # Setting 'userbase' is done below the call to the
+ # init function to enable using 'get_config_var' in
+ # the init-function.
+ if sys.version >= '2.6':
+ _CONFIG_VARS['userbase'] = _getuserbase()
+
+ if 'srcdir' not in _CONFIG_VARS:
+ _CONFIG_VARS['srcdir'] = _PROJECT_BASE
+ else:
+ _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir'])
+
+ # Convert srcdir into an absolute path if it appears necessary.
+ # Normally it is relative to the build directory. However, during
+ # testing, for example, we might be running a non-installed python
+ # from a different directory.
+ if _PYTHON_BUILD and os.name == "posix":
+ base = _PROJECT_BASE
+ try:
+ cwd = os.getcwd()
+ except OSError:
+ cwd = None
+ if (not os.path.isabs(_CONFIG_VARS['srcdir']) and
+ base != cwd):
+ # srcdir is relative and we are not in the same directory
+ # as the executable. Assume executable is in the build
+ # directory and make srcdir absolute.
+ srcdir = os.path.join(base, _CONFIG_VARS['srcdir'])
+ _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir)
+
+ if sys.platform == 'darwin':
+ kernel_version = os.uname()[2] # Kernel version (8.4.3)
+ major_version = int(kernel_version.split('.')[0])
+
+ if major_version < 8:
+ # On Mac OS X before 10.4, check if -arch and -isysroot
+ # are in CFLAGS or LDFLAGS and remove them if they are.
+ # This is needed when building extensions on a 10.3 system
+ # using a universal build of python.
+ for key in ('LDFLAGS', 'BASECFLAGS',
+ # a number of derived variables. These need to be
+ # patched up as well.
+ 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
+ flags = _CONFIG_VARS[key]
+ flags = re.sub(r'-arch\s+\w+\s', ' ', flags)
+ flags = re.sub('-isysroot [^ \t]*', ' ', flags)
+ _CONFIG_VARS[key] = flags
+ else:
+ # Allow the user to override the architecture flags using
+ # an environment variable.
+ # NOTE: This name was introduced by Apple in OSX 10.5 and
+ # is used by several scripting languages distributed with
+ # that OS release.
+ if 'ARCHFLAGS' in os.environ:
+ arch = os.environ['ARCHFLAGS']
+ for key in ('LDFLAGS', 'BASECFLAGS',
+ # a number of derived variables. These need to be
+ # patched up as well.
+ 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
+
+ flags = _CONFIG_VARS[key]
+ flags = re.sub(r'-arch\s+\w+\s', ' ', flags)
+ flags = flags + ' ' + arch
+ _CONFIG_VARS[key] = flags
+
+ # If we're on OSX 10.5 or later and the user tries to
+ # compiles an extension using an SDK that is not present
+ # on the current machine it is better to not use an SDK
+ # than to fail.
+ #
+ # The major usecase for this is users using a Python.org
+ # binary installer on OSX 10.6: that installer uses
+ # the 10.4u SDK, but that SDK is not installed by default
+ # when you install Xcode.
+ #
+ CFLAGS = _CONFIG_VARS.get('CFLAGS', '')
+ m = re.search(r'-isysroot\s+(\S+)', CFLAGS)
+ if m is not None:
+ sdk = m.group(1)
+ if not os.path.exists(sdk):
+ for key in ('LDFLAGS', 'BASECFLAGS',
+ # a number of derived variables. These need to be
+ # patched up as well.
+ 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
+
+ flags = _CONFIG_VARS[key]
+ flags = re.sub(r'-isysroot\s+\S+(\s|$)', ' ', flags)
+ _CONFIG_VARS[key] = flags
+
+ if args:
+ vals = []
+ for name in args:
+ vals.append(_CONFIG_VARS.get(name))
+ return vals
+ else:
+ return _CONFIG_VARS
+
+
+def get_config_var(name):
+ """Return the value of a single variable using the dictionary returned by
+ 'get_config_vars()'.
+
+ Equivalent to get_config_vars().get(name)
+ """
+ return get_config_vars().get(name)
+
+
+def get_platform():
+ """Return a string that identifies the current platform.
+
+ This is used mainly to distinguish platform-specific build directories and
+ platform-specific built distributions. Typically includes the OS name
+ and version and the architecture (as supplied by 'os.uname()'),
+ although the exact information included depends on the OS; eg. for IRIX
+ the architecture isn't particularly important (IRIX only runs on SGI
+ hardware), but for Linux the kernel version isn't particularly
+ important.
+
+ Examples of returned values:
+ linux-i586
+ linux-alpha (?)
+ solaris-2.6-sun4u
+ irix-5.3
+ irix64-6.2
+
+ Windows will return one of:
+ win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
+ win-ia64 (64bit Windows on Itanium)
+ win32 (all others - specifically, sys.platform is returned)
+
+ For other non-POSIX platforms, currently just returns 'sys.platform'.
+ """
+ if os.name == 'nt':
+ # sniff sys.version for architecture.
+ prefix = " bit ("
+ i = sys.version.find(prefix)
+ if i == -1:
+ return sys.platform
+ j = sys.version.find(")", i)
+ look = sys.version[i+len(prefix):j].lower()
+ if look == 'amd64':
+ return 'win-amd64'
+ if look == 'itanium':
+ return 'win-ia64'
+ return sys.platform
+
+ if os.name != "posix" or not hasattr(os, 'uname'):
+ # XXX what about the architecture? NT is Intel or Alpha,
+ # Mac OS is M68k or PPC, etc.
+ return sys.platform
+
+ # Try to distinguish various flavours of Unix
+ osname, host, release, version, machine = os.uname()
+
+ # Convert the OS name to lowercase, remove '/' characters
+ # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")
+ osname = osname.lower().replace('/', '')
+ machine = machine.replace(' ', '_')
+ machine = machine.replace('/', '-')
+
+ if osname[:5] == "linux":
+ # At least on Linux/Intel, 'machine' is the processor --
+ # i386, etc.
+ # XXX what about Alpha, SPARC, etc?
+ return "%s-%s" % (osname, machine)
+ elif osname[:5] == "sunos":
+ if release[0] >= "5": # SunOS 5 == Solaris 2
+ osname = "solaris"
+ release = "%d.%s" % (int(release[0]) - 3, release[2:])
+ # fall through to standard osname-release-machine representation
+ elif osname[:4] == "irix": # could be "irix64"!
+ return "%s-%s" % (osname, release)
+ elif osname[:3] == "aix":
+ return "%s-%s.%s" % (osname, version, release)
+ elif osname[:6] == "cygwin":
+ osname = "cygwin"
+ rel_re = re.compile(r'[\d.]+')
+ m = rel_re.match(release)
+ if m:
+ release = m.group()
+ elif osname[:6] == "darwin":
+ #
+ # For our purposes, we'll assume that the system version from
+ # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
+ # to. This makes the compatibility story a bit more sane because the
+ # machine is going to compile and link as if it were
+ # MACOSX_DEPLOYMENT_TARGET.
+ cfgvars = get_config_vars()
+ macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')
+
+ if True:
+ # Always calculate the release of the running machine,
+ # needed to determine if we can build fat binaries or not.
+
+ macrelease = macver
+ # Get the system version. Reading this plist is a documented
+ # way to get the system version (see the documentation for
+ # the Gestalt Manager)
+ try:
+ f = open('/System/Library/CoreServices/SystemVersion.plist')
+ except IOError:
+ # We're on a plain darwin box, fall back to the default
+ # behaviour.
+ pass
+ else:
+ try:
+ m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'
+ r'<string>(.*?)</string>', f.read())
+ finally:
+ f.close()
+ if m is not None:
+ macrelease = '.'.join(m.group(1).split('.')[:2])
+ # else: fall back to the default behaviour
+
+ if not macver:
+ macver = macrelease
+
+ if macver:
+ release = macver
+ osname = "macosx"
+
+ if ((macrelease + '.') >= '10.4.' and
+ '-arch' in get_config_vars().get('CFLAGS', '').strip()):
+ # The universal build will build fat binaries, but not on
+ # systems before 10.4
+ #
+ # Try to detect 4-way universal builds, those have machine-type
+ # 'universal' instead of 'fat'.
+
+ machine = 'fat'
+ cflags = get_config_vars().get('CFLAGS')
+
+ archs = re.findall(r'-arch\s+(\S+)', cflags)
+ archs = tuple(sorted(set(archs)))
+
+ if len(archs) == 1:
+ machine = archs[0]
+ elif archs == ('i386', 'ppc'):
+ machine = 'fat'
+ elif archs == ('i386', 'x86_64'):
+ machine = 'intel'
+ elif archs == ('i386', 'ppc', 'x86_64'):
+ machine = 'fat3'
+ elif archs == ('ppc64', 'x86_64'):
+ machine = 'fat64'
+ elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
+ machine = 'universal'
+ else:
+ raise ValueError(
+ "Don't know machine value for archs=%r" % (archs,))
+
+ elif machine == 'i386':
+ # On OSX the machine type returned by uname is always the
+ # 32-bit variant, even if the executable architecture is
+ # the 64-bit variant
+ if sys.maxsize >= 2**32:
+ machine = 'x86_64'
+
+ elif machine in ('PowerPC', 'Power_Macintosh'):
+ # Pick a sane name for the PPC architecture.
+ # See 'i386' case
+ if sys.maxsize >= 2**32:
+ machine = 'ppc64'
+ else:
+ machine = 'ppc'
+
+ return "%s-%s-%s" % (osname, release, machine)
+
+
+def get_python_version():
+ return _PY_VERSION_SHORT
+
+
+def _print_dict(title, data):
+ for index, (key, value) in enumerate(sorted(data.items())):
+ if index == 0:
+ print('%s: ' % (title))
+ print('\t%s = "%s"' % (key, value))
+
+
+def _main():
+ """Display all information sysconfig detains."""
+ print('Platform: "%s"' % get_platform())
+ print('Python version: "%s"' % get_python_version())
+ print('Current installation scheme: "%s"' % _get_default_scheme())
+ print()
+ _print_dict('Paths', get_paths())
+ print()
+ _print_dict('Variables', get_config_vars())
+
+
+if __name__ == '__main__':
+ _main()
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py
new file mode 100644
index 0000000000..d66d856637
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py
@@ -0,0 +1,2607 @@
+#-------------------------------------------------------------------
+# tarfile.py
+#-------------------------------------------------------------------
+# Copyright (C) 2002 Lars Gustaebel <lars@gustaebel.de>
+# All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation
+# files (the "Software"), to deal in the Software without
+# restriction, including without limitation the rights to use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+from __future__ import print_function
+
+"""Read from and write to tar format archives.
+"""
+
+__version__ = "$Revision$"
+
+version = "0.9.0"
+__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)"
+__date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $"
+__cvsid__ = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $"
+__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
+
+#---------
+# Imports
+#---------
+import sys
+import os
+import stat
+import errno
+import time
+import struct
+import copy
+import re
+
+try:
+ import grp, pwd
+except ImportError:
+ grp = pwd = None
+
+# os.symlink on Windows prior to 6.0 raises NotImplementedError
+symlink_exception = (AttributeError, NotImplementedError)
+try:
+ # WindowsError (1314) will be raised if the caller does not hold the
+ # SeCreateSymbolicLinkPrivilege privilege
+ symlink_exception += (WindowsError,)
+except NameError:
+ pass
+
+# from tarfile import *
+__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]
+
+if sys.version_info[0] < 3:
+ import __builtin__ as builtins
+else:
+ import builtins
+
+_open = builtins.open # Since 'open' is TarFile.open
+
+#---------------------------------------------------------
+# tar constants
+#---------------------------------------------------------
+NUL = b"\0" # the null character
+BLOCKSIZE = 512 # length of processing blocks
+RECORDSIZE = BLOCKSIZE * 20 # length of records
+GNU_MAGIC = b"ustar \0" # magic gnu tar string
+POSIX_MAGIC = b"ustar\x0000" # magic posix tar string
+
+LENGTH_NAME = 100 # maximum length of a filename
+LENGTH_LINK = 100 # maximum length of a linkname
+LENGTH_PREFIX = 155 # maximum length of the prefix field
+
+REGTYPE = b"0" # regular file
+AREGTYPE = b"\0" # regular file
+LNKTYPE = b"1" # link (inside tarfile)
+SYMTYPE = b"2" # symbolic link
+CHRTYPE = b"3" # character special device
+BLKTYPE = b"4" # block special device
+DIRTYPE = b"5" # directory
+FIFOTYPE = b"6" # fifo special device
+CONTTYPE = b"7" # contiguous file
+
+GNUTYPE_LONGNAME = b"L" # GNU tar longname
+GNUTYPE_LONGLINK = b"K" # GNU tar longlink
+GNUTYPE_SPARSE = b"S" # GNU tar sparse file
+
+XHDTYPE = b"x" # POSIX.1-2001 extended header
+XGLTYPE = b"g" # POSIX.1-2001 global header
+SOLARIS_XHDTYPE = b"X" # Solaris extended header
+
+USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
+GNU_FORMAT = 1 # GNU tar format
+PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
+DEFAULT_FORMAT = GNU_FORMAT
+
+#---------------------------------------------------------
+# tarfile constants
+#---------------------------------------------------------
+# File types that tarfile supports:
+SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
+ SYMTYPE, DIRTYPE, FIFOTYPE,
+ CONTTYPE, CHRTYPE, BLKTYPE,
+ GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
+ GNUTYPE_SPARSE)
+
+# File types that will be treated as a regular file.
+REGULAR_TYPES = (REGTYPE, AREGTYPE,
+ CONTTYPE, GNUTYPE_SPARSE)
+
+# File types that are part of the GNU tar format.
+GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
+ GNUTYPE_SPARSE)
+
+# Fields from a pax header that override a TarInfo attribute.
+PAX_FIELDS = ("path", "linkpath", "size", "mtime",
+ "uid", "gid", "uname", "gname")
+
+# Fields from a pax header that are affected by hdrcharset.
+PAX_NAME_FIELDS = set(("path", "linkpath", "uname", "gname"))
+
+# Fields in a pax header that are numbers, all other fields
+# are treated as strings.
+PAX_NUMBER_FIELDS = {
+ "atime": float,
+ "ctime": float,
+ "mtime": float,
+ "uid": int,
+ "gid": int,
+ "size": int
+}
+
+#---------------------------------------------------------
+# Bits used in the mode field, values in octal.
+#---------------------------------------------------------
+S_IFLNK = 0o120000 # symbolic link
+S_IFREG = 0o100000 # regular file
+S_IFBLK = 0o060000 # block device
+S_IFDIR = 0o040000 # directory
+S_IFCHR = 0o020000 # character device
+S_IFIFO = 0o010000 # fifo
+
+TSUID = 0o4000 # set UID on execution
+TSGID = 0o2000 # set GID on execution
+TSVTX = 0o1000 # reserved
+
+TUREAD = 0o400 # read by owner
+TUWRITE = 0o200 # write by owner
+TUEXEC = 0o100 # execute/search by owner
+TGREAD = 0o040 # read by group
+TGWRITE = 0o020 # write by group
+TGEXEC = 0o010 # execute/search by group
+TOREAD = 0o004 # read by other
+TOWRITE = 0o002 # write by other
+TOEXEC = 0o001 # execute/search by other
+
+#---------------------------------------------------------
+# initialization
+#---------------------------------------------------------
+if os.name in ("nt", "ce"):
+ ENCODING = "utf-8"
+else:
+ ENCODING = sys.getfilesystemencoding()
+
+#---------------------------------------------------------
+# Some useful functions
+#---------------------------------------------------------
+
+def stn(s, length, encoding, errors):
+ """Convert a string to a null-terminated bytes object.
+ """
+ s = s.encode(encoding, errors)
+ return s[:length] + (length - len(s)) * NUL
+
+def nts(s, encoding, errors):
+ """Convert a null-terminated bytes object to a string.
+ """
+ p = s.find(b"\0")
+ if p != -1:
+ s = s[:p]
+ return s.decode(encoding, errors)
+
+def nti(s):
+ """Convert a number field to a python number.
+ """
+ # There are two possible encodings for a number field, see
+ # itn() below.
+ if s[0] != chr(0o200):
+ try:
+ n = int(nts(s, "ascii", "strict") or "0", 8)
+ except ValueError:
+ raise InvalidHeaderError("invalid header")
+ else:
+ n = 0
+ for i in range(len(s) - 1):
+ n <<= 8
+ n += ord(s[i + 1])
+ return n
+
+def itn(n, digits=8, format=DEFAULT_FORMAT):
+ """Convert a python number to a number field.
+ """
+ # POSIX 1003.1-1988 requires numbers to be encoded as a string of
+ # octal digits followed by a null-byte, this allows values up to
+ # (8**(digits-1))-1. GNU tar allows storing numbers greater than
+ # that if necessary. A leading 0o200 byte indicates this particular
+ # encoding, the following digits-1 bytes are a big-endian
+ # representation. This allows values up to (256**(digits-1))-1.
+ if 0 <= n < 8 ** (digits - 1):
+ s = ("%0*o" % (digits - 1, n)).encode("ascii") + NUL
+ else:
+ if format != GNU_FORMAT or n >= 256 ** (digits - 1):
+ raise ValueError("overflow in number field")
+
+ if n < 0:
+ # XXX We mimic GNU tar's behaviour with negative numbers,
+ # this could raise OverflowError.
+ n = struct.unpack("L", struct.pack("l", n))[0]
+
+ s = bytearray()
+ for i in range(digits - 1):
+ s.insert(0, n & 0o377)
+ n >>= 8
+ s.insert(0, 0o200)
+ return s
+
+def calc_chksums(buf):
+ """Calculate the checksum for a member's header by summing up all
+ characters except for the chksum field which is treated as if
+ it was filled with spaces. According to the GNU tar sources,
+ some tars (Sun and NeXT) calculate chksum with signed char,
+ which will be different if there are chars in the buffer with
+ the high bit set. So we calculate two checksums, unsigned and
+ signed.
+ """
+ unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512]))
+ signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512]))
+ return unsigned_chksum, signed_chksum
+
+def copyfileobj(src, dst, length=None):
+ """Copy length bytes from fileobj src to fileobj dst.
+ If length is None, copy the entire content.
+ """
+ if length == 0:
+ return
+ if length is None:
+ while True:
+ buf = src.read(16*1024)
+ if not buf:
+ break
+ dst.write(buf)
+ return
+
+ BUFSIZE = 16 * 1024
+ blocks, remainder = divmod(length, BUFSIZE)
+ for b in range(blocks):
+ buf = src.read(BUFSIZE)
+ if len(buf) < BUFSIZE:
+ raise IOError("end of file reached")
+ dst.write(buf)
+
+ if remainder != 0:
+ buf = src.read(remainder)
+ if len(buf) < remainder:
+ raise IOError("end of file reached")
+ dst.write(buf)
+ return
+
+filemode_table = (
+ ((S_IFLNK, "l"),
+ (S_IFREG, "-"),
+ (S_IFBLK, "b"),
+ (S_IFDIR, "d"),
+ (S_IFCHR, "c"),
+ (S_IFIFO, "p")),
+
+ ((TUREAD, "r"),),
+ ((TUWRITE, "w"),),
+ ((TUEXEC|TSUID, "s"),
+ (TSUID, "S"),
+ (TUEXEC, "x")),
+
+ ((TGREAD, "r"),),
+ ((TGWRITE, "w"),),
+ ((TGEXEC|TSGID, "s"),
+ (TSGID, "S"),
+ (TGEXEC, "x")),
+
+ ((TOREAD, "r"),),
+ ((TOWRITE, "w"),),
+ ((TOEXEC|TSVTX, "t"),
+ (TSVTX, "T"),
+ (TOEXEC, "x"))
+)
+
+def filemode(mode):
+ """Convert a file's mode to a string of the form
+ -rwxrwxrwx.
+ Used by TarFile.list()
+ """
+ perm = []
+ for table in filemode_table:
+ for bit, char in table:
+ if mode & bit == bit:
+ perm.append(char)
+ break
+ else:
+ perm.append("-")
+ return "".join(perm)
+
+class TarError(Exception):
+ """Base exception."""
+ pass
+class ExtractError(TarError):
+ """General exception for extract errors."""
+ pass
+class ReadError(TarError):
+ """Exception for unreadable tar archives."""
+ pass
+class CompressionError(TarError):
+ """Exception for unavailable compression methods."""
+ pass
+class StreamError(TarError):
+ """Exception for unsupported operations on stream-like TarFiles."""
+ pass
+class HeaderError(TarError):
+ """Base exception for header errors."""
+ pass
+class EmptyHeaderError(HeaderError):
+ """Exception for empty headers."""
+ pass
+class TruncatedHeaderError(HeaderError):
+ """Exception for truncated headers."""
+ pass
+class EOFHeaderError(HeaderError):
+ """Exception for end of file headers."""
+ pass
+class InvalidHeaderError(HeaderError):
+ """Exception for invalid headers."""
+ pass
+class SubsequentHeaderError(HeaderError):
+ """Exception for missing and invalid extended headers."""
+ pass
+
+#---------------------------
+# internal stream interface
+#---------------------------
+class _LowLevelFile(object):
+ """Low-level file object. Supports reading and writing.
+ It is used instead of a regular file object for streaming
+ access.
+ """
+
+ def __init__(self, name, mode):
+ mode = {
+ "r": os.O_RDONLY,
+ "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
+ }[mode]
+ if hasattr(os, "O_BINARY"):
+ mode |= os.O_BINARY
+ self.fd = os.open(name, mode, 0o666)
+
+ def close(self):
+ os.close(self.fd)
+
+ def read(self, size):
+ return os.read(self.fd, size)
+
+ def write(self, s):
+ os.write(self.fd, s)
+
+class _Stream(object):
+ """Class that serves as an adapter between TarFile and
+ a stream-like object. The stream-like object only
+ needs to have a read() or write() method and is accessed
+ blockwise. Use of gzip or bzip2 compression is possible.
+ A stream-like object could be for example: sys.stdin,
+ sys.stdout, a socket, a tape device etc.
+
+ _Stream is intended to be used only internally.
+ """
+
+ def __init__(self, name, mode, comptype, fileobj, bufsize):
+ """Construct a _Stream object.
+ """
+ self._extfileobj = True
+ if fileobj is None:
+ fileobj = _LowLevelFile(name, mode)
+ self._extfileobj = False
+
+ if comptype == '*':
+ # Enable transparent compression detection for the
+ # stream interface
+ fileobj = _StreamProxy(fileobj)
+ comptype = fileobj.getcomptype()
+
+ self.name = name or ""
+ self.mode = mode
+ self.comptype = comptype
+ self.fileobj = fileobj
+ self.bufsize = bufsize
+ self.buf = b""
+ self.pos = 0
+ self.closed = False
+
+ try:
+ if comptype == "gz":
+ try:
+ import zlib
+ except ImportError:
+ raise CompressionError("zlib module is not available")
+ self.zlib = zlib
+ self.crc = zlib.crc32(b"")
+ if mode == "r":
+ self._init_read_gz()
+ else:
+ self._init_write_gz()
+
+ if comptype == "bz2":
+ try:
+ import bz2
+ except ImportError:
+ raise CompressionError("bz2 module is not available")
+ if mode == "r":
+ self.dbuf = b""
+ self.cmp = bz2.BZ2Decompressor()
+ else:
+ self.cmp = bz2.BZ2Compressor()
+ except:
+ if not self._extfileobj:
+ self.fileobj.close()
+ self.closed = True
+ raise
+
+ def __del__(self):
+ if hasattr(self, "closed") and not self.closed:
+ self.close()
+
+ def _init_write_gz(self):
+ """Initialize for writing with gzip compression.
+ """
+ self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
+ -self.zlib.MAX_WBITS,
+ self.zlib.DEF_MEM_LEVEL,
+ 0)
+ timestamp = struct.pack("<L", int(time.time()))
+ self.__write(b"\037\213\010\010" + timestamp + b"\002\377")
+ if self.name.endswith(".gz"):
+ self.name = self.name[:-3]
+ # RFC1952 says we must use ISO-8859-1 for the FNAME field.
+ self.__write(self.name.encode("iso-8859-1", "replace") + NUL)
+
+ def write(self, s):
+ """Write string s to the stream.
+ """
+ if self.comptype == "gz":
+ self.crc = self.zlib.crc32(s, self.crc)
+ self.pos += len(s)
+ if self.comptype != "tar":
+ s = self.cmp.compress(s)
+ self.__write(s)
+
+ def __write(self, s):
+ """Write string s to the stream if a whole new block
+ is ready to be written.
+ """
+ self.buf += s
+ while len(self.buf) > self.bufsize:
+ self.fileobj.write(self.buf[:self.bufsize])
+ self.buf = self.buf[self.bufsize:]
+
+ def close(self):
+ """Close the _Stream object. No operation should be
+ done on it afterwards.
+ """
+ if self.closed:
+ return
+
+ if self.mode == "w" and self.comptype != "tar":
+ self.buf += self.cmp.flush()
+
+ if self.mode == "w" and self.buf:
+ self.fileobj.write(self.buf)
+ self.buf = b""
+ if self.comptype == "gz":
+ # The native zlib crc is an unsigned 32-bit integer, but
+ # the Python wrapper implicitly casts that to a signed C
+ # long. So, on a 32-bit box self.crc may "look negative",
+ # while the same crc on a 64-bit box may "look positive".
+ # To avoid irksome warnings from the `struct` module, force
+ # it to look positive on all boxes.
+ self.fileobj.write(struct.pack("<L", self.crc & 0xffffffff))
+ self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFF))
+
+ if not self._extfileobj:
+ self.fileobj.close()
+
+ self.closed = True
+
+ def _init_read_gz(self):
+ """Initialize for reading a gzip compressed fileobj.
+ """
+ self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
+ self.dbuf = b""
+
+ # taken from gzip.GzipFile with some alterations
+ if self.__read(2) != b"\037\213":
+ raise ReadError("not a gzip file")
+ if self.__read(1) != b"\010":
+ raise CompressionError("unsupported compression method")
+
+ flag = ord(self.__read(1))
+ self.__read(6)
+
+ if flag & 4:
+ xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
+ self.read(xlen)
+ if flag & 8:
+ while True:
+ s = self.__read(1)
+ if not s or s == NUL:
+ break
+ if flag & 16:
+ while True:
+ s = self.__read(1)
+ if not s or s == NUL:
+ break
+ if flag & 2:
+ self.__read(2)
+
+ def tell(self):
+ """Return the stream's file pointer position.
+ """
+ return self.pos
+
+ def seek(self, pos=0):
+ """Set the stream's file pointer to pos. Negative seeking
+ is forbidden.
+ """
+ if pos - self.pos >= 0:
+ blocks, remainder = divmod(pos - self.pos, self.bufsize)
+ for i in range(blocks):
+ self.read(self.bufsize)
+ self.read(remainder)
+ else:
+ raise StreamError("seeking backwards is not allowed")
+ return self.pos
+
+ def read(self, size=None):
+ """Return the next size number of bytes from the stream.
+ If size is not defined, return all bytes of the stream
+ up to EOF.
+ """
+ if size is None:
+ t = []
+ while True:
+ buf = self._read(self.bufsize)
+ if not buf:
+ break
+ t.append(buf)
+ buf = "".join(t)
+ else:
+ buf = self._read(size)
+ self.pos += len(buf)
+ return buf
+
+ def _read(self, size):
+ """Return size bytes from the stream.
+ """
+ if self.comptype == "tar":
+ return self.__read(size)
+
+ c = len(self.dbuf)
+ while c < size:
+ buf = self.__read(self.bufsize)
+ if not buf:
+ break
+ try:
+ buf = self.cmp.decompress(buf)
+ except IOError:
+ raise ReadError("invalid compressed data")
+ self.dbuf += buf
+ c += len(buf)
+ buf = self.dbuf[:size]
+ self.dbuf = self.dbuf[size:]
+ return buf
+
+ def __read(self, size):
+ """Return size bytes from stream. If internal buffer is empty,
+ read another block from the stream.
+ """
+ c = len(self.buf)
+ while c < size:
+ buf = self.fileobj.read(self.bufsize)
+ if not buf:
+ break
+ self.buf += buf
+ c += len(buf)
+ buf = self.buf[:size]
+ self.buf = self.buf[size:]
+ return buf
+# class _Stream
+
+class _StreamProxy(object):
+ """Small proxy class that enables transparent compression
+ detection for the Stream interface (mode 'r|*').
+ """
+
+ def __init__(self, fileobj):
+ self.fileobj = fileobj
+ self.buf = self.fileobj.read(BLOCKSIZE)
+
+ def read(self, size):
+ self.read = self.fileobj.read
+ return self.buf
+
+ def getcomptype(self):
+ if self.buf.startswith(b"\037\213\010"):
+ return "gz"
+ if self.buf.startswith(b"BZh91"):
+ return "bz2"
+ return "tar"
+
+ def close(self):
+ self.fileobj.close()
+# class StreamProxy
+
+class _BZ2Proxy(object):
+ """Small proxy class that enables external file object
+ support for "r:bz2" and "w:bz2" modes. This is actually
+ a workaround for a limitation in bz2 module's BZ2File
+ class which (unlike gzip.GzipFile) has no support for
+ a file object argument.
+ """
+
+ blocksize = 16 * 1024
+
+ def __init__(self, fileobj, mode):
+ self.fileobj = fileobj
+ self.mode = mode
+ self.name = getattr(self.fileobj, "name", None)
+ self.init()
+
+ def init(self):
+ import bz2
+ self.pos = 0
+ if self.mode == "r":
+ self.bz2obj = bz2.BZ2Decompressor()
+ self.fileobj.seek(0)
+ self.buf = b""
+ else:
+ self.bz2obj = bz2.BZ2Compressor()
+
+ def read(self, size):
+ x = len(self.buf)
+ while x < size:
+ raw = self.fileobj.read(self.blocksize)
+ if not raw:
+ break
+ data = self.bz2obj.decompress(raw)
+ self.buf += data
+ x += len(data)
+
+ buf = self.buf[:size]
+ self.buf = self.buf[size:]
+ self.pos += len(buf)
+ return buf
+
+ def seek(self, pos):
+ if pos < self.pos:
+ self.init()
+ self.read(pos - self.pos)
+
+ def tell(self):
+ return self.pos
+
+ def write(self, data):
+ self.pos += len(data)
+ raw = self.bz2obj.compress(data)
+ self.fileobj.write(raw)
+
+ def close(self):
+ if self.mode == "w":
+ raw = self.bz2obj.flush()
+ self.fileobj.write(raw)
+# class _BZ2Proxy
+
+#------------------------
+# Extraction file object
+#------------------------
+class _FileInFile(object):
+ """A thin wrapper around an existing file object that
+ provides a part of its data as an individual file
+ object.
+ """
+
+ def __init__(self, fileobj, offset, size, blockinfo=None):
+ self.fileobj = fileobj
+ self.offset = offset
+ self.size = size
+ self.position = 0
+
+ if blockinfo is None:
+ blockinfo = [(0, size)]
+
+ # Construct a map with data and zero blocks.
+ self.map_index = 0
+ self.map = []
+ lastpos = 0
+ realpos = self.offset
+ for offset, size in blockinfo:
+ if offset > lastpos:
+ self.map.append((False, lastpos, offset, None))
+ self.map.append((True, offset, offset + size, realpos))
+ realpos += size
+ lastpos = offset + size
+ if lastpos < self.size:
+ self.map.append((False, lastpos, self.size, None))
+
+ def seekable(self):
+ if not hasattr(self.fileobj, "seekable"):
+ # XXX gzip.GzipFile and bz2.BZ2File
+ return True
+ return self.fileobj.seekable()
+
+ def tell(self):
+ """Return the current file position.
+ """
+ return self.position
+
+ def seek(self, position):
+ """Seek to a position in the file.
+ """
+ self.position = position
+
+ def read(self, size=None):
+ """Read data from the file.
+ """
+ if size is None:
+ size = self.size - self.position
+ else:
+ size = min(size, self.size - self.position)
+
+ buf = b""
+ while size > 0:
+ while True:
+ data, start, stop, offset = self.map[self.map_index]
+ if start <= self.position < stop:
+ break
+ else:
+ self.map_index += 1
+ if self.map_index == len(self.map):
+ self.map_index = 0
+ length = min(size, stop - self.position)
+ if data:
+ self.fileobj.seek(offset + (self.position - start))
+ buf += self.fileobj.read(length)
+ else:
+ buf += NUL * length
+ size -= length
+ self.position += length
+ return buf
+#class _FileInFile
+
+
+class ExFileObject(object):
+ """File-like object for reading an archive member.
+ Is returned by TarFile.extractfile().
+ """
+ blocksize = 1024
+
+ def __init__(self, tarfile, tarinfo):
+ self.fileobj = _FileInFile(tarfile.fileobj,
+ tarinfo.offset_data,
+ tarinfo.size,
+ tarinfo.sparse)
+ self.name = tarinfo.name
+ self.mode = "r"
+ self.closed = False
+ self.size = tarinfo.size
+
+ self.position = 0
+ self.buffer = b""
+
+ def readable(self):
+ return True
+
+ def writable(self):
+ return False
+
+ def seekable(self):
+ return self.fileobj.seekable()
+
+ def read(self, size=None):
+ """Read at most size bytes from the file. If size is not
+ present or None, read all data until EOF is reached.
+ """
+ if self.closed:
+ raise ValueError("I/O operation on closed file")
+
+ buf = b""
+ if self.buffer:
+ if size is None:
+ buf = self.buffer
+ self.buffer = b""
+ else:
+ buf = self.buffer[:size]
+ self.buffer = self.buffer[size:]
+
+ if size is None:
+ buf += self.fileobj.read()
+ else:
+ buf += self.fileobj.read(size - len(buf))
+
+ self.position += len(buf)
+ return buf
+
+ # XXX TextIOWrapper uses the read1() method.
+ read1 = read
+
+ def readline(self, size=-1):
+ """Read one entire line from the file. If size is present
+ and non-negative, return a string with at most that
+ size, which may be an incomplete line.
+ """
+ if self.closed:
+ raise ValueError("I/O operation on closed file")
+
+ pos = self.buffer.find(b"\n") + 1
+ if pos == 0:
+ # no newline found.
+ while True:
+ buf = self.fileobj.read(self.blocksize)
+ self.buffer += buf
+ if not buf or b"\n" in buf:
+ pos = self.buffer.find(b"\n") + 1
+ if pos == 0:
+ # no newline found.
+ pos = len(self.buffer)
+ break
+
+ if size != -1:
+ pos = min(size, pos)
+
+ buf = self.buffer[:pos]
+ self.buffer = self.buffer[pos:]
+ self.position += len(buf)
+ return buf
+
+ def readlines(self):
+ """Return a list with all remaining lines.
+ """
+ result = []
+ while True:
+ line = self.readline()
+ if not line: break
+ result.append(line)
+ return result
+
+ def tell(self):
+ """Return the current file position.
+ """
+ if self.closed:
+ raise ValueError("I/O operation on closed file")
+
+ return self.position
+
+ def seek(self, pos, whence=os.SEEK_SET):
+ """Seek to a position in the file.
+ """
+ if self.closed:
+ raise ValueError("I/O operation on closed file")
+
+ if whence == os.SEEK_SET:
+ self.position = min(max(pos, 0), self.size)
+ elif whence == os.SEEK_CUR:
+ if pos < 0:
+ self.position = max(self.position + pos, 0)
+ else:
+ self.position = min(self.position + pos, self.size)
+ elif whence == os.SEEK_END:
+ self.position = max(min(self.size + pos, self.size), 0)
+ else:
+ raise ValueError("Invalid argument")
+
+ self.buffer = b""
+ self.fileobj.seek(self.position)
+
+ def close(self):
+ """Close the file object.
+ """
+ self.closed = True
+
+ def __iter__(self):
+ """Get an iterator over the file's lines.
+ """
+ while True:
+ line = self.readline()
+ if not line:
+ break
+ yield line
+#class ExFileObject
+
+#------------------
+# Exported Classes
+#------------------
+class TarInfo(object):
+ """Informational class which holds the details about an
+ archive member given by a tar header block.
+ TarInfo objects are returned by TarFile.getmember(),
+ TarFile.getmembers() and TarFile.gettarinfo() and are
+ usually created internally.
+ """
+
+ __slots__ = ("name", "mode", "uid", "gid", "size", "mtime",
+ "chksum", "type", "linkname", "uname", "gname",
+ "devmajor", "devminor",
+ "offset", "offset_data", "pax_headers", "sparse",
+ "tarfile", "_sparse_structs", "_link_target")
+
+ def __init__(self, name=""):
+ """Construct a TarInfo object. name is the optional name
+ of the member.
+ """
+ self.name = name # member name
+ self.mode = 0o644 # file permissions
+ self.uid = 0 # user id
+ self.gid = 0 # group id
+ self.size = 0 # file size
+ self.mtime = 0 # modification time
+ self.chksum = 0 # header checksum
+ self.type = REGTYPE # member type
+ self.linkname = "" # link name
+ self.uname = "" # user name
+ self.gname = "" # group name
+ self.devmajor = 0 # device major number
+ self.devminor = 0 # device minor number
+
+ self.offset = 0 # the tar header starts here
+ self.offset_data = 0 # the file's data starts here
+
+ self.sparse = None # sparse member information
+ self.pax_headers = {} # pax header information
+
+ # In pax headers the "name" and "linkname" field are called
+ # "path" and "linkpath".
+ def _getpath(self):
+ return self.name
+ def _setpath(self, name):
+ self.name = name
+ path = property(_getpath, _setpath)
+
+ def _getlinkpath(self):
+ return self.linkname
+ def _setlinkpath(self, linkname):
+ self.linkname = linkname
+ linkpath = property(_getlinkpath, _setlinkpath)
+
+ def __repr__(self):
+ return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
+
+ def get_info(self):
+ """Return the TarInfo's attributes as a dictionary.
+ """
+ info = {
+ "name": self.name,
+ "mode": self.mode & 0o7777,
+ "uid": self.uid,
+ "gid": self.gid,
+ "size": self.size,
+ "mtime": self.mtime,
+ "chksum": self.chksum,
+ "type": self.type,
+ "linkname": self.linkname,
+ "uname": self.uname,
+ "gname": self.gname,
+ "devmajor": self.devmajor,
+ "devminor": self.devminor
+ }
+
+ if info["type"] == DIRTYPE and not info["name"].endswith("/"):
+ info["name"] += "/"
+
+ return info
+
+ def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
+ """Return a tar header as a string of 512 byte blocks.
+ """
+ info = self.get_info()
+
+ if format == USTAR_FORMAT:
+ return self.create_ustar_header(info, encoding, errors)
+ elif format == GNU_FORMAT:
+ return self.create_gnu_header(info, encoding, errors)
+ elif format == PAX_FORMAT:
+ return self.create_pax_header(info, encoding)
+ else:
+ raise ValueError("invalid format")
+
+ def create_ustar_header(self, info, encoding, errors):
+ """Return the object as a ustar header block.
+ """
+ info["magic"] = POSIX_MAGIC
+
+ if len(info["linkname"]) > LENGTH_LINK:
+ raise ValueError("linkname is too long")
+
+ if len(info["name"]) > LENGTH_NAME:
+ info["prefix"], info["name"] = self._posix_split_name(info["name"])
+
+ return self._create_header(info, USTAR_FORMAT, encoding, errors)
+
+ def create_gnu_header(self, info, encoding, errors):
+ """Return the object as a GNU header block sequence.
+ """
+ info["magic"] = GNU_MAGIC
+
+ buf = b""
+ if len(info["linkname"]) > LENGTH_LINK:
+ buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
+
+ if len(info["name"]) > LENGTH_NAME:
+ buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
+
+ return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
+
+ def create_pax_header(self, info, encoding):
+ """Return the object as a ustar header block. If it cannot be
+ represented this way, prepend a pax extended header sequence
+ with supplement information.
+ """
+ info["magic"] = POSIX_MAGIC
+ pax_headers = self.pax_headers.copy()
+
+ # Test string fields for values that exceed the field length or cannot
+ # be represented in ASCII encoding.
+ for name, hname, length in (
+ ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
+ ("uname", "uname", 32), ("gname", "gname", 32)):
+
+ if hname in pax_headers:
+ # The pax header has priority.
+ continue
+
+ # Try to encode the string as ASCII.
+ try:
+ info[name].encode("ascii", "strict")
+ except UnicodeEncodeError:
+ pax_headers[hname] = info[name]
+ continue
+
+ if len(info[name]) > length:
+ pax_headers[hname] = info[name]
+
+ # Test number fields for values that exceed the field limit or values
+ # that like to be stored as float.
+ for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
+ if name in pax_headers:
+ # The pax header has priority. Avoid overflow.
+ info[name] = 0
+ continue
+
+ val = info[name]
+ if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
+ pax_headers[name] = str(val)
+ info[name] = 0
+
+ # Create a pax extended header if necessary.
+ if pax_headers:
+ buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
+ else:
+ buf = b""
+
+ return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
+
+ @classmethod
+ def create_pax_global_header(cls, pax_headers):
+ """Return the object as a pax global header block sequence.
+ """
+ return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8")
+
+ def _posix_split_name(self, name):
+ """Split a name longer than 100 chars into a prefix
+ and a name part.
+ """
+ prefix = name[:LENGTH_PREFIX + 1]
+ while prefix and prefix[-1] != "/":
+ prefix = prefix[:-1]
+
+ name = name[len(prefix):]
+ prefix = prefix[:-1]
+
+ if not prefix or len(name) > LENGTH_NAME:
+ raise ValueError("name is too long")
+ return prefix, name
+
+ @staticmethod
+ def _create_header(info, format, encoding, errors):
+ """Return a header block. info is a dictionary with file
+ information, format must be one of the *_FORMAT constants.
+ """
+ parts = [
+ stn(info.get("name", ""), 100, encoding, errors),
+ itn(info.get("mode", 0) & 0o7777, 8, format),
+ itn(info.get("uid", 0), 8, format),
+ itn(info.get("gid", 0), 8, format),
+ itn(info.get("size", 0), 12, format),
+ itn(info.get("mtime", 0), 12, format),
+ b" ", # checksum field
+ info.get("type", REGTYPE),
+ stn(info.get("linkname", ""), 100, encoding, errors),
+ info.get("magic", POSIX_MAGIC),
+ stn(info.get("uname", ""), 32, encoding, errors),
+ stn(info.get("gname", ""), 32, encoding, errors),
+ itn(info.get("devmajor", 0), 8, format),
+ itn(info.get("devminor", 0), 8, format),
+ stn(info.get("prefix", ""), 155, encoding, errors)
+ ]
+
+ buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
+ chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
+ buf = buf[:-364] + ("%06o\0" % chksum).encode("ascii") + buf[-357:]
+ return buf
+
+ @staticmethod
+ def _create_payload(payload):
+ """Return the string payload filled with zero bytes
+ up to the next 512 byte border.
+ """
+ blocks, remainder = divmod(len(payload), BLOCKSIZE)
+ if remainder > 0:
+ payload += (BLOCKSIZE - remainder) * NUL
+ return payload
+
+ @classmethod
+ def _create_gnu_long_header(cls, name, type, encoding, errors):
+ """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
+ for name.
+ """
+ name = name.encode(encoding, errors) + NUL
+
+ info = {}
+ info["name"] = "././@LongLink"
+ info["type"] = type
+ info["size"] = len(name)
+ info["magic"] = GNU_MAGIC
+
+ # create extended header + name blocks.
+ return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
+ cls._create_payload(name)
+
+ @classmethod
+ def _create_pax_generic_header(cls, pax_headers, type, encoding):
+ """Return a POSIX.1-2008 extended or global header sequence
+ that contains a list of keyword, value pairs. The values
+ must be strings.
+ """
+ # Check if one of the fields contains surrogate characters and thereby
+ # forces hdrcharset=BINARY, see _proc_pax() for more information.
+ binary = False
+ for keyword, value in pax_headers.items():
+ try:
+ value.encode("utf8", "strict")
+ except UnicodeEncodeError:
+ binary = True
+ break
+
+ records = b""
+ if binary:
+ # Put the hdrcharset field at the beginning of the header.
+ records += b"21 hdrcharset=BINARY\n"
+
+ for keyword, value in pax_headers.items():
+ keyword = keyword.encode("utf8")
+ if binary:
+ # Try to restore the original byte representation of `value'.
+ # Needless to say, that the encoding must match the string.
+ value = value.encode(encoding, "surrogateescape")
+ else:
+ value = value.encode("utf8")
+
+ l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
+ n = p = 0
+ while True:
+ n = l + len(str(p))
+ if n == p:
+ break
+ p = n
+ records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
+
+ # We use a hardcoded "././@PaxHeader" name like star does
+ # instead of the one that POSIX recommends.
+ info = {}
+ info["name"] = "././@PaxHeader"
+ info["type"] = type
+ info["size"] = len(records)
+ info["magic"] = POSIX_MAGIC
+
+ # Create pax header + record blocks.
+ return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
+ cls._create_payload(records)
+
+ @classmethod
+ def frombuf(cls, buf, encoding, errors):
+ """Construct a TarInfo object from a 512 byte bytes object.
+ """
+ if len(buf) == 0:
+ raise EmptyHeaderError("empty header")
+ if len(buf) != BLOCKSIZE:
+ raise TruncatedHeaderError("truncated header")
+ if buf.count(NUL) == BLOCKSIZE:
+ raise EOFHeaderError("end of file header")
+
+ chksum = nti(buf[148:156])
+ if chksum not in calc_chksums(buf):
+ raise InvalidHeaderError("bad checksum")
+
+ obj = cls()
+ obj.name = nts(buf[0:100], encoding, errors)
+ obj.mode = nti(buf[100:108])
+ obj.uid = nti(buf[108:116])
+ obj.gid = nti(buf[116:124])
+ obj.size = nti(buf[124:136])
+ obj.mtime = nti(buf[136:148])
+ obj.chksum = chksum
+ obj.type = buf[156:157]
+ obj.linkname = nts(buf[157:257], encoding, errors)
+ obj.uname = nts(buf[265:297], encoding, errors)
+ obj.gname = nts(buf[297:329], encoding, errors)
+ obj.devmajor = nti(buf[329:337])
+ obj.devminor = nti(buf[337:345])
+ prefix = nts(buf[345:500], encoding, errors)
+
+ # Old V7 tar format represents a directory as a regular
+ # file with a trailing slash.
+ if obj.type == AREGTYPE and obj.name.endswith("/"):
+ obj.type = DIRTYPE
+
+ # The old GNU sparse format occupies some of the unused
+ # space in the buffer for up to 4 sparse structures.
+ # Save the them for later processing in _proc_sparse().
+ if obj.type == GNUTYPE_SPARSE:
+ pos = 386
+ structs = []
+ for i in range(4):
+ try:
+ offset = nti(buf[pos:pos + 12])
+ numbytes = nti(buf[pos + 12:pos + 24])
+ except ValueError:
+ break
+ structs.append((offset, numbytes))
+ pos += 24
+ isextended = bool(buf[482])
+ origsize = nti(buf[483:495])
+ obj._sparse_structs = (structs, isextended, origsize)
+
+ # Remove redundant slashes from directories.
+ if obj.isdir():
+ obj.name = obj.name.rstrip("/")
+
+ # Reconstruct a ustar longname.
+ if prefix and obj.type not in GNU_TYPES:
+ obj.name = prefix + "/" + obj.name
+ return obj
+
+ @classmethod
+ def fromtarfile(cls, tarfile):
+ """Return the next TarInfo object from TarFile object
+ tarfile.
+ """
+ buf = tarfile.fileobj.read(BLOCKSIZE)
+ obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
+ obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
+ return obj._proc_member(tarfile)
+
+ #--------------------------------------------------------------------------
+ # The following are methods that are called depending on the type of a
+ # member. The entry point is _proc_member() which can be overridden in a
+ # subclass to add custom _proc_*() methods. A _proc_*() method MUST
+ # implement the following
+ # operations:
+ # 1. Set self.offset_data to the position where the data blocks begin,
+ # if there is data that follows.
+ # 2. Set tarfile.offset to the position where the next member's header will
+ # begin.
+ # 3. Return self or another valid TarInfo object.
+ def _proc_member(self, tarfile):
+ """Choose the right processing method depending on
+ the type and call it.
+ """
+ if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
+ return self._proc_gnulong(tarfile)
+ elif self.type == GNUTYPE_SPARSE:
+ return self._proc_sparse(tarfile)
+ elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
+ return self._proc_pax(tarfile)
+ else:
+ return self._proc_builtin(tarfile)
+
+ def _proc_builtin(self, tarfile):
+ """Process a builtin type or an unknown type which
+ will be treated as a regular file.
+ """
+ self.offset_data = tarfile.fileobj.tell()
+ offset = self.offset_data
+ if self.isreg() or self.type not in SUPPORTED_TYPES:
+ # Skip the following data blocks.
+ offset += self._block(self.size)
+ tarfile.offset = offset
+
+ # Patch the TarInfo object with saved global
+ # header information.
+ self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
+
+ return self
+
+ def _proc_gnulong(self, tarfile):
+ """Process the blocks that hold a GNU longname
+ or longlink member.
+ """
+ buf = tarfile.fileobj.read(self._block(self.size))
+
+ # Fetch the next header and process it.
+ try:
+ next = self.fromtarfile(tarfile)
+ except HeaderError:
+ raise SubsequentHeaderError("missing or bad subsequent header")
+
+ # Patch the TarInfo object from the next header with
+ # the longname information.
+ next.offset = self.offset
+ if self.type == GNUTYPE_LONGNAME:
+ next.name = nts(buf, tarfile.encoding, tarfile.errors)
+ elif self.type == GNUTYPE_LONGLINK:
+ next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
+
+ return next
+
+ def _proc_sparse(self, tarfile):
+ """Process a GNU sparse header plus extra headers.
+ """
+ # We already collected some sparse structures in frombuf().
+ structs, isextended, origsize = self._sparse_structs
+ del self._sparse_structs
+
+ # Collect sparse structures from extended header blocks.
+ while isextended:
+ buf = tarfile.fileobj.read(BLOCKSIZE)
+ pos = 0
+ for i in range(21):
+ try:
+ offset = nti(buf[pos:pos + 12])
+ numbytes = nti(buf[pos + 12:pos + 24])
+ except ValueError:
+ break
+ if offset and numbytes:
+ structs.append((offset, numbytes))
+ pos += 24
+ isextended = bool(buf[504])
+ self.sparse = structs
+
+ self.offset_data = tarfile.fileobj.tell()
+ tarfile.offset = self.offset_data + self._block(self.size)
+ self.size = origsize
+ return self
+
+ def _proc_pax(self, tarfile):
+ """Process an extended or global header as described in
+ POSIX.1-2008.
+ """
+ # Read the header information.
+ buf = tarfile.fileobj.read(self._block(self.size))
+
+ # A pax header stores supplemental information for either
+ # the following file (extended) or all following files
+ # (global).
+ if self.type == XGLTYPE:
+ pax_headers = tarfile.pax_headers
+ else:
+ pax_headers = tarfile.pax_headers.copy()
+
+ # Check if the pax header contains a hdrcharset field. This tells us
+ # the encoding of the path, linkpath, uname and gname fields. Normally,
+ # these fields are UTF-8 encoded but since POSIX.1-2008 tar
+ # implementations are allowed to store them as raw binary strings if
+ # the translation to UTF-8 fails.
+ match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
+ if match is not None:
+ pax_headers["hdrcharset"] = match.group(1).decode("utf8")
+
+ # For the time being, we don't care about anything other than "BINARY".
+ # The only other value that is currently allowed by the standard is
+ # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
+ hdrcharset = pax_headers.get("hdrcharset")
+ if hdrcharset == "BINARY":
+ encoding = tarfile.encoding
+ else:
+ encoding = "utf8"
+
+ # Parse pax header information. A record looks like that:
+ # "%d %s=%s\n" % (length, keyword, value). length is the size
+ # of the complete record including the length field itself and
+ # the newline. keyword and value are both UTF-8 encoded strings.
+ regex = re.compile(br"(\d+) ([^=]+)=")
+ pos = 0
+ while True:
+ match = regex.match(buf, pos)
+ if not match:
+ break
+
+ length, keyword = match.groups()
+ length = int(length)
+ value = buf[match.end(2) + 1:match.start(1) + length - 1]
+
+ # Normally, we could just use "utf8" as the encoding and "strict"
+ # as the error handler, but we better not take the risk. For
+ # example, GNU tar <= 1.23 is known to store filenames it cannot
+ # translate to UTF-8 as raw strings (unfortunately without a
+ # hdrcharset=BINARY header).
+ # We first try the strict standard encoding, and if that fails we
+ # fall back on the user's encoding and error handler.
+ keyword = self._decode_pax_field(keyword, "utf8", "utf8",
+ tarfile.errors)
+ if keyword in PAX_NAME_FIELDS:
+ value = self._decode_pax_field(value, encoding, tarfile.encoding,
+ tarfile.errors)
+ else:
+ value = self._decode_pax_field(value, "utf8", "utf8",
+ tarfile.errors)
+
+ pax_headers[keyword] = value
+ pos += length
+
+ # Fetch the next header.
+ try:
+ next = self.fromtarfile(tarfile)
+ except HeaderError:
+ raise SubsequentHeaderError("missing or bad subsequent header")
+
+ # Process GNU sparse information.
+ if "GNU.sparse.map" in pax_headers:
+ # GNU extended sparse format version 0.1.
+ self._proc_gnusparse_01(next, pax_headers)
+
+ elif "GNU.sparse.size" in pax_headers:
+ # GNU extended sparse format version 0.0.
+ self._proc_gnusparse_00(next, pax_headers, buf)
+
+ elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
+ # GNU extended sparse format version 1.0.
+ self._proc_gnusparse_10(next, pax_headers, tarfile)
+
+ if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
+ # Patch the TarInfo object with the extended header info.
+ next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
+ next.offset = self.offset
+
+ if "size" in pax_headers:
+ # If the extended header replaces the size field,
+ # we need to recalculate the offset where the next
+ # header starts.
+ offset = next.offset_data
+ if next.isreg() or next.type not in SUPPORTED_TYPES:
+ offset += next._block(next.size)
+ tarfile.offset = offset
+
+ return next
+
+ def _proc_gnusparse_00(self, next, pax_headers, buf):
+ """Process a GNU tar extended sparse header, version 0.0.
+ """
+ offsets = []
+ for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
+ offsets.append(int(match.group(1)))
+ numbytes = []
+ for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
+ numbytes.append(int(match.group(1)))
+ next.sparse = list(zip(offsets, numbytes))
+
+ def _proc_gnusparse_01(self, next, pax_headers):
+ """Process a GNU tar extended sparse header, version 0.1.
+ """
+ sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
+ next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+ def _proc_gnusparse_10(self, next, pax_headers, tarfile):
+ """Process a GNU tar extended sparse header, version 1.0.
+ """
+ fields = None
+ sparse = []
+ buf = tarfile.fileobj.read(BLOCKSIZE)
+ fields, buf = buf.split(b"\n", 1)
+ fields = int(fields)
+ while len(sparse) < fields * 2:
+ if b"\n" not in buf:
+ buf += tarfile.fileobj.read(BLOCKSIZE)
+ number, buf = buf.split(b"\n", 1)
+ sparse.append(int(number))
+ next.offset_data = tarfile.fileobj.tell()
+ next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+ def _apply_pax_info(self, pax_headers, encoding, errors):
+ """Replace fields with supplemental information from a previous
+ pax extended or global header.
+ """
+ for keyword, value in pax_headers.items():
+ if keyword == "GNU.sparse.name":
+ setattr(self, "path", value)
+ elif keyword == "GNU.sparse.size":
+ setattr(self, "size", int(value))
+ elif keyword == "GNU.sparse.realsize":
+ setattr(self, "size", int(value))
+ elif keyword in PAX_FIELDS:
+ if keyword in PAX_NUMBER_FIELDS:
+ try:
+ value = PAX_NUMBER_FIELDS[keyword](value)
+ except ValueError:
+ value = 0
+ if keyword == "path":
+ value = value.rstrip("/")
+ setattr(self, keyword, value)
+
+ self.pax_headers = pax_headers.copy()
+
+ def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
+ """Decode a single field from a pax record.
+ """
+ try:
+ return value.decode(encoding, "strict")
+ except UnicodeDecodeError:
+ return value.decode(fallback_encoding, fallback_errors)
+
+ def _block(self, count):
+ """Round up a byte count by BLOCKSIZE and return it,
+ e.g. _block(834) => 1024.
+ """
+ blocks, remainder = divmod(count, BLOCKSIZE)
+ if remainder:
+ blocks += 1
+ return blocks * BLOCKSIZE
+
+ def isreg(self):
+ return self.type in REGULAR_TYPES
+ def isfile(self):
+ return self.isreg()
+ def isdir(self):
+ return self.type == DIRTYPE
+ def issym(self):
+ return self.type == SYMTYPE
+ def islnk(self):
+ return self.type == LNKTYPE
+ def ischr(self):
+ return self.type == CHRTYPE
+ def isblk(self):
+ return self.type == BLKTYPE
+ def isfifo(self):
+ return self.type == FIFOTYPE
+ def issparse(self):
+ return self.sparse is not None
+ def isdev(self):
+ return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
+# class TarInfo
+
+class TarFile(object):
+ """The TarFile Class provides an interface to tar archives.
+ """
+
+ debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)
+
+ dereference = False # If true, add content of linked file to the
+ # tar file, else the link.
+
+ ignore_zeros = False # If true, skips empty or invalid blocks and
+ # continues processing.
+
+ errorlevel = 1 # If 0, fatal errors only appear in debug
+ # messages (if debug >= 0). If > 0, errors
+ # are passed to the caller as exceptions.
+
+ format = DEFAULT_FORMAT # The format to use when creating an archive.
+
+ encoding = ENCODING # Encoding for 8-bit character strings.
+
+ errors = None # Error handler for unicode conversion.
+
+ tarinfo = TarInfo # The default TarInfo class to use.
+
+ fileobject = ExFileObject # The default ExFileObject class to use.
+
+ def __init__(self, name=None, mode="r", fileobj=None, format=None,
+ tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
+ errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None):
+ """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
+ read from an existing archive, 'a' to append data to an existing
+ file or 'w' to create a new file overwriting an existing one. `mode'
+ defaults to 'r'.
+ If `fileobj' is given, it is used for reading or writing data. If it
+ can be determined, `mode' is overridden by `fileobj's mode.
+ `fileobj' is not closed, when TarFile is closed.
+ """
+ if len(mode) > 1 or mode not in "raw":
+ raise ValueError("mode must be 'r', 'a' or 'w'")
+ self.mode = mode
+ self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode]
+
+ if not fileobj:
+ if self.mode == "a" and not os.path.exists(name):
+ # Create nonexistent files in append mode.
+ self.mode = "w"
+ self._mode = "wb"
+ fileobj = bltn_open(name, self._mode)
+ self._extfileobj = False
+ else:
+ if name is None and hasattr(fileobj, "name"):
+ name = fileobj.name
+ if hasattr(fileobj, "mode"):
+ self._mode = fileobj.mode
+ self._extfileobj = True
+ self.name = os.path.abspath(name) if name else None
+ self.fileobj = fileobj
+
+ # Init attributes.
+ if format is not None:
+ self.format = format
+ if tarinfo is not None:
+ self.tarinfo = tarinfo
+ if dereference is not None:
+ self.dereference = dereference
+ if ignore_zeros is not None:
+ self.ignore_zeros = ignore_zeros
+ if encoding is not None:
+ self.encoding = encoding
+ self.errors = errors
+
+ if pax_headers is not None and self.format == PAX_FORMAT:
+ self.pax_headers = pax_headers
+ else:
+ self.pax_headers = {}
+
+ if debug is not None:
+ self.debug = debug
+ if errorlevel is not None:
+ self.errorlevel = errorlevel
+
+ # Init datastructures.
+ self.closed = False
+ self.members = [] # list of members as TarInfo objects
+ self._loaded = False # flag if all members have been read
+ self.offset = self.fileobj.tell()
+ # current position in the archive file
+ self.inodes = {} # dictionary caching the inodes of
+ # archive members already added
+
+ try:
+ if self.mode == "r":
+ self.firstmember = None
+ self.firstmember = self.next()
+
+ if self.mode == "a":
+ # Move to the end of the archive,
+ # before the first empty block.
+ while True:
+ self.fileobj.seek(self.offset)
+ try:
+ tarinfo = self.tarinfo.fromtarfile(self)
+ self.members.append(tarinfo)
+ except EOFHeaderError:
+ self.fileobj.seek(self.offset)
+ break
+ except HeaderError as e:
+ raise ReadError(str(e))
+
+ if self.mode in "aw":
+ self._loaded = True
+
+ if self.pax_headers:
+ buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
+ self.fileobj.write(buf)
+ self.offset += len(buf)
+ except:
+ if not self._extfileobj:
+ self.fileobj.close()
+ self.closed = True
+ raise
+
+ #--------------------------------------------------------------------------
+ # Below are the classmethods which act as alternate constructors to the
+ # TarFile class. The open() method is the only one that is needed for
+ # public use; it is the "super"-constructor and is able to select an
+ # adequate "sub"-constructor for a particular compression using the mapping
+ # from OPEN_METH.
+ #
+ # This concept allows one to subclass TarFile without losing the comfort of
+ # the super-constructor. A sub-constructor is registered and made available
+ # by adding it to the mapping in OPEN_METH.
+
+ @classmethod
+ def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
+ """Open a tar archive for reading, writing or appending. Return
+ an appropriate TarFile class.
+
+ mode:
+ 'r' or 'r:*' open for reading with transparent compression
+ 'r:' open for reading exclusively uncompressed
+ 'r:gz' open for reading with gzip compression
+ 'r:bz2' open for reading with bzip2 compression
+ 'a' or 'a:' open for appending, creating the file if necessary
+ 'w' or 'w:' open for writing without compression
+ 'w:gz' open for writing with gzip compression
+ 'w:bz2' open for writing with bzip2 compression
+
+ 'r|*' open a stream of tar blocks with transparent compression
+ 'r|' open an uncompressed stream of tar blocks for reading
+ 'r|gz' open a gzip compressed stream of tar blocks
+ 'r|bz2' open a bzip2 compressed stream of tar blocks
+ 'w|' open an uncompressed stream for writing
+ 'w|gz' open a gzip compressed stream for writing
+ 'w|bz2' open a bzip2 compressed stream for writing
+ """
+
+ if not name and not fileobj:
+ raise ValueError("nothing to open")
+
+ if mode in ("r", "r:*"):
+ # Find out which *open() is appropriate for opening the file.
+ for comptype in cls.OPEN_METH:
+ func = getattr(cls, cls.OPEN_METH[comptype])
+ if fileobj is not None:
+ saved_pos = fileobj.tell()
+ try:
+ return func(name, "r", fileobj, **kwargs)
+ except (ReadError, CompressionError) as e:
+ if fileobj is not None:
+ fileobj.seek(saved_pos)
+ continue
+ raise ReadError("file could not be opened successfully")
+
+ elif ":" in mode:
+ filemode, comptype = mode.split(":", 1)
+ filemode = filemode or "r"
+ comptype = comptype or "tar"
+
+ # Select the *open() function according to
+ # given compression.
+ if comptype in cls.OPEN_METH:
+ func = getattr(cls, cls.OPEN_METH[comptype])
+ else:
+ raise CompressionError("unknown compression type %r" % comptype)
+ return func(name, filemode, fileobj, **kwargs)
+
+ elif "|" in mode:
+ filemode, comptype = mode.split("|", 1)
+ filemode = filemode or "r"
+ comptype = comptype or "tar"
+
+ if filemode not in "rw":
+ raise ValueError("mode must be 'r' or 'w'")
+
+ stream = _Stream(name, filemode, comptype, fileobj, bufsize)
+ try:
+ t = cls(name, filemode, stream, **kwargs)
+ except:
+ stream.close()
+ raise
+ t._extfileobj = False
+ return t
+
+ elif mode in "aw":
+ return cls.taropen(name, mode, fileobj, **kwargs)
+
+ raise ValueError("undiscernible mode")
+
+ @classmethod
+ def taropen(cls, name, mode="r", fileobj=None, **kwargs):
+ """Open uncompressed tar archive name for reading or writing.
+ """
+ if len(mode) > 1 or mode not in "raw":
+ raise ValueError("mode must be 'r', 'a' or 'w'")
+ return cls(name, mode, fileobj, **kwargs)
+
+ @classmethod
+ def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
+ """Open gzip compressed tar archive name for reading or writing.
+ Appending is not allowed.
+ """
+ if len(mode) > 1 or mode not in "rw":
+ raise ValueError("mode must be 'r' or 'w'")
+
+ try:
+ import gzip
+ gzip.GzipFile
+ except (ImportError, AttributeError):
+ raise CompressionError("gzip module is not available")
+
+ extfileobj = fileobj is not None
+ try:
+ fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj)
+ t = cls.taropen(name, mode, fileobj, **kwargs)
+ except IOError:
+ if not extfileobj and fileobj is not None:
+ fileobj.close()
+ if fileobj is None:
+ raise
+ raise ReadError("not a gzip file")
+ except:
+ if not extfileobj and fileobj is not None:
+ fileobj.close()
+ raise
+ t._extfileobj = extfileobj
+ return t
+
+ @classmethod
+ def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
+ """Open bzip2 compressed tar archive name for reading or writing.
+ Appending is not allowed.
+ """
+ if len(mode) > 1 or mode not in "rw":
+ raise ValueError("mode must be 'r' or 'w'.")
+
+ try:
+ import bz2
+ except ImportError:
+ raise CompressionError("bz2 module is not available")
+
+ if fileobj is not None:
+ fileobj = _BZ2Proxy(fileobj, mode)
+ else:
+ fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel)
+
+ try:
+ t = cls.taropen(name, mode, fileobj, **kwargs)
+ except (IOError, EOFError):
+ fileobj.close()
+ raise ReadError("not a bzip2 file")
+ t._extfileobj = False
+ return t
+
+ # All *open() methods are registered here.
+ OPEN_METH = {
+ "tar": "taropen", # uncompressed tar
+ "gz": "gzopen", # gzip compressed tar
+ "bz2": "bz2open" # bzip2 compressed tar
+ }
+
+ #--------------------------------------------------------------------------
+ # The public methods which TarFile provides:
+
+ def close(self):
+ """Close the TarFile. In write-mode, two finishing zero blocks are
+ appended to the archive.
+ """
+ if self.closed:
+ return
+
+ if self.mode in "aw":
+ self.fileobj.write(NUL * (BLOCKSIZE * 2))
+ self.offset += (BLOCKSIZE * 2)
+ # fill up the end with zero-blocks
+ # (like option -b20 for tar does)
+ blocks, remainder = divmod(self.offset, RECORDSIZE)
+ if remainder > 0:
+ self.fileobj.write(NUL * (RECORDSIZE - remainder))
+
+ if not self._extfileobj:
+ self.fileobj.close()
+ self.closed = True
+
+ def getmember(self, name):
+ """Return a TarInfo object for member `name'. If `name' can not be
+ found in the archive, KeyError is raised. If a member occurs more
+ than once in the archive, its last occurrence is assumed to be the
+ most up-to-date version.
+ """
+ tarinfo = self._getmember(name)
+ if tarinfo is None:
+ raise KeyError("filename %r not found" % name)
+ return tarinfo
+
+ def getmembers(self):
+ """Return the members of the archive as a list of TarInfo objects. The
+ list has the same order as the members in the archive.
+ """
+ self._check()
+ if not self._loaded: # if we want to obtain a list of
+ self._load() # all members, we first have to
+ # scan the whole archive.
+ return self.members
+
+ def getnames(self):
+ """Return the members of the archive as a list of their names. It has
+ the same order as the list returned by getmembers().
+ """
+ return [tarinfo.name for tarinfo in self.getmembers()]
+
+ def gettarinfo(self, name=None, arcname=None, fileobj=None):
+ """Create a TarInfo object for either the file `name' or the file
+ object `fileobj' (using os.fstat on its file descriptor). You can
+ modify some of the TarInfo's attributes before you add it using
+ addfile(). If given, `arcname' specifies an alternative name for the
+ file in the archive.
+ """
+ self._check("aw")
+
+ # When fileobj is given, replace name by
+ # fileobj's real name.
+ if fileobj is not None:
+ name = fileobj.name
+
+ # Building the name of the member in the archive.
+ # Backward slashes are converted to forward slashes,
+ # Absolute paths are turned to relative paths.
+ if arcname is None:
+ arcname = name
+ drv, arcname = os.path.splitdrive(arcname)
+ arcname = arcname.replace(os.sep, "/")
+ arcname = arcname.lstrip("/")
+
+ # Now, fill the TarInfo object with
+ # information specific for the file.
+ tarinfo = self.tarinfo()
+ tarinfo.tarfile = self
+
+ # Use os.stat or os.lstat, depending on platform
+ # and if symlinks shall be resolved.
+ if fileobj is None:
+ if hasattr(os, "lstat") and not self.dereference:
+ statres = os.lstat(name)
+ else:
+ statres = os.stat(name)
+ else:
+ statres = os.fstat(fileobj.fileno())
+ linkname = ""
+
+ stmd = statres.st_mode
+ if stat.S_ISREG(stmd):
+ inode = (statres.st_ino, statres.st_dev)
+ if not self.dereference and statres.st_nlink > 1 and \
+ inode in self.inodes and arcname != self.inodes[inode]:
+ # Is it a hardlink to an already
+ # archived file?
+ type = LNKTYPE
+ linkname = self.inodes[inode]
+ else:
+ # The inode is added only if its valid.
+ # For win32 it is always 0.
+ type = REGTYPE
+ if inode[0]:
+ self.inodes[inode] = arcname
+ elif stat.S_ISDIR(stmd):
+ type = DIRTYPE
+ elif stat.S_ISFIFO(stmd):
+ type = FIFOTYPE
+ elif stat.S_ISLNK(stmd):
+ type = SYMTYPE
+ linkname = os.readlink(name)
+ elif stat.S_ISCHR(stmd):
+ type = CHRTYPE
+ elif stat.S_ISBLK(stmd):
+ type = BLKTYPE
+ else:
+ return None
+
+ # Fill the TarInfo object with all
+ # information we can get.
+ tarinfo.name = arcname
+ tarinfo.mode = stmd
+ tarinfo.uid = statres.st_uid
+ tarinfo.gid = statres.st_gid
+ if type == REGTYPE:
+ tarinfo.size = statres.st_size
+ else:
+ tarinfo.size = 0
+ tarinfo.mtime = statres.st_mtime
+ tarinfo.type = type
+ tarinfo.linkname = linkname
+ if pwd:
+ try:
+ tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
+ except KeyError:
+ pass
+ if grp:
+ try:
+ tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
+ except KeyError:
+ pass
+
+ if type in (CHRTYPE, BLKTYPE):
+ if hasattr(os, "major") and hasattr(os, "minor"):
+ tarinfo.devmajor = os.major(statres.st_rdev)
+ tarinfo.devminor = os.minor(statres.st_rdev)
+ return tarinfo
+
+ def list(self, verbose=True):
+ """Print a table of contents to sys.stdout. If `verbose' is False, only
+ the names of the members are printed. If it is True, an `ls -l'-like
+ output is produced.
+ """
+ self._check()
+
+ for tarinfo in self:
+ if verbose:
+ print(filemode(tarinfo.mode), end=' ')
+ print("%s/%s" % (tarinfo.uname or tarinfo.uid,
+ tarinfo.gname or tarinfo.gid), end=' ')
+ if tarinfo.ischr() or tarinfo.isblk():
+ print("%10s" % ("%d,%d" \
+ % (tarinfo.devmajor, tarinfo.devminor)), end=' ')
+ else:
+ print("%10d" % tarinfo.size, end=' ')
+ print("%d-%02d-%02d %02d:%02d:%02d" \
+ % time.localtime(tarinfo.mtime)[:6], end=' ')
+
+ print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ')
+
+ if verbose:
+ if tarinfo.issym():
+ print("->", tarinfo.linkname, end=' ')
+ if tarinfo.islnk():
+ print("link to", tarinfo.linkname, end=' ')
+ print()
+
+ def add(self, name, arcname=None, recursive=True, exclude=None, filter=None):
+ """Add the file `name' to the archive. `name' may be any type of file
+ (directory, fifo, symbolic link, etc.). If given, `arcname'
+ specifies an alternative name for the file in the archive.
+ Directories are added recursively by default. This can be avoided by
+ setting `recursive' to False. `exclude' is a function that should
+ return True for each filename to be excluded. `filter' is a function
+ that expects a TarInfo object argument and returns the changed
+ TarInfo object, if it returns None the TarInfo object will be
+ excluded from the archive.
+ """
+ self._check("aw")
+
+ if arcname is None:
+ arcname = name
+
+ # Exclude pathnames.
+ if exclude is not None:
+ import warnings
+ warnings.warn("use the filter argument instead",
+ DeprecationWarning, 2)
+ if exclude(name):
+ self._dbg(2, "tarfile: Excluded %r" % name)
+ return
+
+ # Skip if somebody tries to archive the archive...
+ if self.name is not None and os.path.abspath(name) == self.name:
+ self._dbg(2, "tarfile: Skipped %r" % name)
+ return
+
+ self._dbg(1, name)
+
+ # Create a TarInfo object from the file.
+ tarinfo = self.gettarinfo(name, arcname)
+
+ if tarinfo is None:
+ self._dbg(1, "tarfile: Unsupported type %r" % name)
+ return
+
+ # Change or exclude the TarInfo object.
+ if filter is not None:
+ tarinfo = filter(tarinfo)
+ if tarinfo is None:
+ self._dbg(2, "tarfile: Excluded %r" % name)
+ return
+
+ # Append the tar header and data to the archive.
+ if tarinfo.isreg():
+ f = bltn_open(name, "rb")
+ self.addfile(tarinfo, f)
+ f.close()
+
+ elif tarinfo.isdir():
+ self.addfile(tarinfo)
+ if recursive:
+ for f in os.listdir(name):
+ self.add(os.path.join(name, f), os.path.join(arcname, f),
+ recursive, exclude, filter=filter)
+
+ else:
+ self.addfile(tarinfo)
+
+ def addfile(self, tarinfo, fileobj=None):
+ """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
+ given, tarinfo.size bytes are read from it and added to the archive.
+ You can create TarInfo objects using gettarinfo().
+ On Windows platforms, `fileobj' should always be opened with mode
+ 'rb' to avoid irritation about the file size.
+ """
+ self._check("aw")
+
+ tarinfo = copy.copy(tarinfo)
+
+ buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
+ self.fileobj.write(buf)
+ self.offset += len(buf)
+
+ # If there's data to follow, append it.
+ if fileobj is not None:
+ copyfileobj(fileobj, self.fileobj, tarinfo.size)
+ blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
+ if remainder > 0:
+ self.fileobj.write(NUL * (BLOCKSIZE - remainder))
+ blocks += 1
+ self.offset += blocks * BLOCKSIZE
+
+ self.members.append(tarinfo)
+
+ def extractall(self, path=".", members=None):
+ """Extract all members from the archive to the current working
+ directory and set owner, modification time and permissions on
+ directories afterwards. `path' specifies a different directory
+ to extract to. `members' is optional and must be a subset of the
+ list returned by getmembers().
+ """
+ directories = []
+
+ if members is None:
+ members = self
+
+ for tarinfo in members:
+ if tarinfo.isdir():
+ # Extract directories with a safe mode.
+ directories.append(tarinfo)
+ tarinfo = copy.copy(tarinfo)
+ tarinfo.mode = 0o700
+ # Do not set_attrs directories, as we will do that further down
+ self.extract(tarinfo, path, set_attrs=not tarinfo.isdir())
+
+ # Reverse sort directories.
+ directories.sort(key=lambda a: a.name)
+ directories.reverse()
+
+ # Set correct owner, mtime and filemode on directories.
+ for tarinfo in directories:
+ dirpath = os.path.join(path, tarinfo.name)
+ try:
+ self.chown(tarinfo, dirpath)
+ self.utime(tarinfo, dirpath)
+ self.chmod(tarinfo, dirpath)
+ except ExtractError as e:
+ if self.errorlevel > 1:
+ raise
+ else:
+ self._dbg(1, "tarfile: %s" % e)
+
+ def extract(self, member, path="", set_attrs=True):
+ """Extract a member from the archive to the current working directory,
+ using its full name. Its file information is extracted as accurately
+ as possible. `member' may be a filename or a TarInfo object. You can
+ specify a different directory using `path'. File attributes (owner,
+ mtime, mode) are set unless `set_attrs' is False.
+ """
+ self._check("r")
+
+ if isinstance(member, str):
+ tarinfo = self.getmember(member)
+ else:
+ tarinfo = member
+
+ # Prepare the link target for makelink().
+ if tarinfo.islnk():
+ tarinfo._link_target = os.path.join(path, tarinfo.linkname)
+
+ try:
+ self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
+ set_attrs=set_attrs)
+ except EnvironmentError as e:
+ if self.errorlevel > 0:
+ raise
+ else:
+ if e.filename is None:
+ self._dbg(1, "tarfile: %s" % e.strerror)
+ else:
+ self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
+ except ExtractError as e:
+ if self.errorlevel > 1:
+ raise
+ else:
+ self._dbg(1, "tarfile: %s" % e)
+
+ def extractfile(self, member):
+ """Extract a member from the archive as a file object. `member' may be
+ a filename or a TarInfo object. If `member' is a regular file, a
+ file-like object is returned. If `member' is a link, a file-like
+ object is constructed from the link's target. If `member' is none of
+ the above, None is returned.
+ The file-like object is read-only and provides the following
+ methods: read(), readline(), readlines(), seek() and tell()
+ """
+ self._check("r")
+
+ if isinstance(member, str):
+ tarinfo = self.getmember(member)
+ else:
+ tarinfo = member
+
+ if tarinfo.isreg():
+ return self.fileobject(self, tarinfo)
+
+ elif tarinfo.type not in SUPPORTED_TYPES:
+ # If a member's type is unknown, it is treated as a
+ # regular file.
+ return self.fileobject(self, tarinfo)
+
+ elif tarinfo.islnk() or tarinfo.issym():
+ if isinstance(self.fileobj, _Stream):
+ # A small but ugly workaround for the case that someone tries
+ # to extract a (sym)link as a file-object from a non-seekable
+ # stream of tar blocks.
+ raise StreamError("cannot extract (sym)link as file object")
+ else:
+ # A (sym)link's file object is its target's file object.
+ return self.extractfile(self._find_link_target(tarinfo))
+ else:
+ # If there's no data associated with the member (directory, chrdev,
+ # blkdev, etc.), return None instead of a file object.
+ return None
+
+ def _extract_member(self, tarinfo, targetpath, set_attrs=True):
+ """Extract the TarInfo object tarinfo to a physical
+ file called targetpath.
+ """
+ # Fetch the TarInfo object for the given name
+ # and build the destination pathname, replacing
+ # forward slashes to platform specific separators.
+ targetpath = targetpath.rstrip("/")
+ targetpath = targetpath.replace("/", os.sep)
+
+ # Create all upper directories.
+ upperdirs = os.path.dirname(targetpath)
+ if upperdirs and not os.path.exists(upperdirs):
+ # Create directories that are not part of the archive with
+ # default permissions.
+ os.makedirs(upperdirs)
+
+ if tarinfo.islnk() or tarinfo.issym():
+ self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
+ else:
+ self._dbg(1, tarinfo.name)
+
+ if tarinfo.isreg():
+ self.makefile(tarinfo, targetpath)
+ elif tarinfo.isdir():
+ self.makedir(tarinfo, targetpath)
+ elif tarinfo.isfifo():
+ self.makefifo(tarinfo, targetpath)
+ elif tarinfo.ischr() or tarinfo.isblk():
+ self.makedev(tarinfo, targetpath)
+ elif tarinfo.islnk() or tarinfo.issym():
+ self.makelink(tarinfo, targetpath)
+ elif tarinfo.type not in SUPPORTED_TYPES:
+ self.makeunknown(tarinfo, targetpath)
+ else:
+ self.makefile(tarinfo, targetpath)
+
+ if set_attrs:
+ self.chown(tarinfo, targetpath)
+ if not tarinfo.issym():
+ self.chmod(tarinfo, targetpath)
+ self.utime(tarinfo, targetpath)
+
+ #--------------------------------------------------------------------------
+ # Below are the different file methods. They are called via
+ # _extract_member() when extract() is called. They can be replaced in a
+ # subclass to implement other functionality.
+
+ def makedir(self, tarinfo, targetpath):
+ """Make a directory called targetpath.
+ """
+ try:
+ # Use a safe mode for the directory, the real mode is set
+ # later in _extract_member().
+ os.mkdir(targetpath, 0o700)
+ except EnvironmentError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ def makefile(self, tarinfo, targetpath):
+ """Make a file called targetpath.
+ """
+ source = self.fileobj
+ source.seek(tarinfo.offset_data)
+ target = bltn_open(targetpath, "wb")
+ if tarinfo.sparse is not None:
+ for offset, size in tarinfo.sparse:
+ target.seek(offset)
+ copyfileobj(source, target, size)
+ else:
+ copyfileobj(source, target, tarinfo.size)
+ target.seek(tarinfo.size)
+ target.truncate()
+ target.close()
+
+ def makeunknown(self, tarinfo, targetpath):
+ """Make a file from a TarInfo object with an unknown type
+ at targetpath.
+ """
+ self.makefile(tarinfo, targetpath)
+ self._dbg(1, "tarfile: Unknown file type %r, " \
+ "extracted as regular file." % tarinfo.type)
+
+ def makefifo(self, tarinfo, targetpath):
+ """Make a fifo called targetpath.
+ """
+ if hasattr(os, "mkfifo"):
+ os.mkfifo(targetpath)
+ else:
+ raise ExtractError("fifo not supported by system")
+
+ def makedev(self, tarinfo, targetpath):
+ """Make a character or block device called targetpath.
+ """
+ if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
+ raise ExtractError("special devices not supported by system")
+
+ mode = tarinfo.mode
+ if tarinfo.isblk():
+ mode |= stat.S_IFBLK
+ else:
+ mode |= stat.S_IFCHR
+
+ os.mknod(targetpath, mode,
+ os.makedev(tarinfo.devmajor, tarinfo.devminor))
+
+ def makelink(self, tarinfo, targetpath):
+ """Make a (symbolic) link called targetpath. If it cannot be created
+ (platform limitation), we try to make a copy of the referenced file
+ instead of a link.
+ """
+ try:
+ # For systems that support symbolic and hard links.
+ if tarinfo.issym():
+ os.symlink(tarinfo.linkname, targetpath)
+ else:
+ # See extract().
+ if os.path.exists(tarinfo._link_target):
+ os.link(tarinfo._link_target, targetpath)
+ else:
+ self._extract_member(self._find_link_target(tarinfo),
+ targetpath)
+ except symlink_exception:
+ if tarinfo.issym():
+ linkpath = os.path.join(os.path.dirname(tarinfo.name),
+ tarinfo.linkname)
+ else:
+ linkpath = tarinfo.linkname
+ else:
+ try:
+ self._extract_member(self._find_link_target(tarinfo),
+ targetpath)
+ except KeyError:
+ raise ExtractError("unable to resolve link inside archive")
+
+ def chown(self, tarinfo, targetpath):
+ """Set owner of targetpath according to tarinfo.
+ """
+ if pwd and hasattr(os, "geteuid") and os.geteuid() == 0:
+ # We have to be root to do so.
+ try:
+ g = grp.getgrnam(tarinfo.gname)[2]
+ except KeyError:
+ g = tarinfo.gid
+ try:
+ u = pwd.getpwnam(tarinfo.uname)[2]
+ except KeyError:
+ u = tarinfo.uid
+ try:
+ if tarinfo.issym() and hasattr(os, "lchown"):
+ os.lchown(targetpath, u, g)
+ else:
+ if sys.platform != "os2emx":
+ os.chown(targetpath, u, g)
+ except EnvironmentError as e:
+ raise ExtractError("could not change owner")
+
+ def chmod(self, tarinfo, targetpath):
+ """Set file permissions of targetpath according to tarinfo.
+ """
+ if hasattr(os, 'chmod'):
+ try:
+ os.chmod(targetpath, tarinfo.mode)
+ except EnvironmentError as e:
+ raise ExtractError("could not change mode")
+
+ def utime(self, tarinfo, targetpath):
+ """Set modification time of targetpath according to tarinfo.
+ """
+ if not hasattr(os, 'utime'):
+ return
+ try:
+ os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
+ except EnvironmentError as e:
+ raise ExtractError("could not change modification time")
+
+ #--------------------------------------------------------------------------
+ def next(self):
+ """Return the next member of the archive as a TarInfo object, when
+ TarFile is opened for reading. Return None if there is no more
+ available.
+ """
+ self._check("ra")
+ if self.firstmember is not None:
+ m = self.firstmember
+ self.firstmember = None
+ return m
+
+ # Read the next block.
+ self.fileobj.seek(self.offset)
+ tarinfo = None
+ while True:
+ try:
+ tarinfo = self.tarinfo.fromtarfile(self)
+ except EOFHeaderError as e:
+ if self.ignore_zeros:
+ self._dbg(2, "0x%X: %s" % (self.offset, e))
+ self.offset += BLOCKSIZE
+ continue
+ except InvalidHeaderError as e:
+ if self.ignore_zeros:
+ self._dbg(2, "0x%X: %s" % (self.offset, e))
+ self.offset += BLOCKSIZE
+ continue
+ elif self.offset == 0:
+ raise ReadError(str(e))
+ except EmptyHeaderError:
+ if self.offset == 0:
+ raise ReadError("empty file")
+ except TruncatedHeaderError as e:
+ if self.offset == 0:
+ raise ReadError(str(e))
+ except SubsequentHeaderError as e:
+ raise ReadError(str(e))
+ break
+
+ if tarinfo is not None:
+ self.members.append(tarinfo)
+ else:
+ self._loaded = True
+
+ return tarinfo
+
+ #--------------------------------------------------------------------------
+ # Little helper methods:
+
+ def _getmember(self, name, tarinfo=None, normalize=False):
+ """Find an archive member by name from bottom to top.
+ If tarinfo is given, it is used as the starting point.
+ """
+ # Ensure that all members have been loaded.
+ members = self.getmembers()
+
+ # Limit the member search list up to tarinfo.
+ if tarinfo is not None:
+ members = members[:members.index(tarinfo)]
+
+ if normalize:
+ name = os.path.normpath(name)
+
+ for member in reversed(members):
+ if normalize:
+ member_name = os.path.normpath(member.name)
+ else:
+ member_name = member.name
+
+ if name == member_name:
+ return member
+
+ def _load(self):
+ """Read through the entire archive file and look for readable
+ members.
+ """
+ while True:
+ tarinfo = self.next()
+ if tarinfo is None:
+ break
+ self._loaded = True
+
+ def _check(self, mode=None):
+ """Check if TarFile is still open, and if the operation's mode
+ corresponds to TarFile's mode.
+ """
+ if self.closed:
+ raise IOError("%s is closed" % self.__class__.__name__)
+ if mode is not None and self.mode not in mode:
+ raise IOError("bad operation for mode %r" % self.mode)
+
+ def _find_link_target(self, tarinfo):
+ """Find the target member of a symlink or hardlink member in the
+ archive.
+ """
+ if tarinfo.issym():
+ # Always search the entire archive.
+ linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname
+ limit = None
+ else:
+ # Search the archive before the link, because a hard link is
+ # just a reference to an already archived file.
+ linkname = tarinfo.linkname
+ limit = tarinfo
+
+ member = self._getmember(linkname, tarinfo=limit, normalize=True)
+ if member is None:
+ raise KeyError("linkname %r not found" % linkname)
+ return member
+
+ def __iter__(self):
+ """Provide an iterator object.
+ """
+ if self._loaded:
+ return iter(self.members)
+ else:
+ return TarIter(self)
+
+ def _dbg(self, level, msg):
+ """Write debugging output to sys.stderr.
+ """
+ if level <= self.debug:
+ print(msg, file=sys.stderr)
+
+ def __enter__(self):
+ self._check()
+ return self
+
+ def __exit__(self, type, value, traceback):
+ if type is None:
+ self.close()
+ else:
+ # An exception occurred. We must not call close() because
+ # it would try to write end-of-archive blocks and padding.
+ if not self._extfileobj:
+ self.fileobj.close()
+ self.closed = True
+# class TarFile
+
+class TarIter(object):
+ """Iterator Class.
+
+ for tarinfo in TarFile(...):
+ suite...
+ """
+
+ def __init__(self, tarfile):
+ """Construct a TarIter object.
+ """
+ self.tarfile = tarfile
+ self.index = 0
+ def __iter__(self):
+ """Return iterator object.
+ """
+ return self
+
+ def __next__(self):
+ """Return the next item using TarFile's next() method.
+ When all members have been read, set TarFile as _loaded.
+ """
+ # Fix for SF #1100429: Under rare circumstances it can
+ # happen that getmembers() is called during iteration,
+ # which will cause TarIter to stop prematurely.
+ if not self.tarfile._loaded:
+ tarinfo = self.tarfile.next()
+ if not tarinfo:
+ self.tarfile._loaded = True
+ raise StopIteration
+ else:
+ try:
+ tarinfo = self.tarfile.members[self.index]
+ except IndexError:
+ raise StopIteration
+ self.index += 1
+ return tarinfo
+
+ next = __next__ # for Python 2.x
+
+#--------------------
+# exported functions
+#--------------------
+def is_tarfile(name):
+ """Return True if name points to a tar archive that we
+ are able to handle, else return False.
+ """
+ try:
+ t = open(name)
+ t.close()
+ return True
+ except TarError:
+ return False
+
+bltn_open = open
+open = TarFile.open
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py
new file mode 100644
index 0000000000..c316fd973a
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py
@@ -0,0 +1,1120 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013-2017 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+from __future__ import absolute_import
+
+import os
+import re
+import sys
+
+try:
+ import ssl
+except ImportError: # pragma: no cover
+ ssl = None
+
+if sys.version_info[0] < 3: # pragma: no cover
+ from StringIO import StringIO
+ string_types = basestring,
+ text_type = unicode
+ from types import FileType as file_type
+ import __builtin__ as builtins
+ import ConfigParser as configparser
+ from ._backport import shutil
+ from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit
+ from urllib import (urlretrieve, quote as _quote, unquote, url2pathname,
+ pathname2url, ContentTooShortError, splittype)
+
+ def quote(s):
+ if isinstance(s, unicode):
+ s = s.encode('utf-8')
+ return _quote(s)
+
+ import urllib2
+ from urllib2 import (Request, urlopen, URLError, HTTPError,
+ HTTPBasicAuthHandler, HTTPPasswordMgr,
+ HTTPHandler, HTTPRedirectHandler,
+ build_opener)
+ if ssl:
+ from urllib2 import HTTPSHandler
+ import httplib
+ import xmlrpclib
+ import Queue as queue
+ from HTMLParser import HTMLParser
+ import htmlentitydefs
+ raw_input = raw_input
+ from itertools import ifilter as filter
+ from itertools import ifilterfalse as filterfalse
+
+ _userprog = None
+ def splituser(host):
+ """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
+ global _userprog
+ if _userprog is None:
+ import re
+ _userprog = re.compile('^(.*)@(.*)$')
+
+ match = _userprog.match(host)
+ if match: return match.group(1, 2)
+ return None, host
+
+else: # pragma: no cover
+ from io import StringIO
+ string_types = str,
+ text_type = str
+ from io import TextIOWrapper as file_type
+ import builtins
+ import configparser
+ import shutil
+ from urllib.parse import (urlparse, urlunparse, urljoin, splituser, quote,
+ unquote, urlsplit, urlunsplit, splittype)
+ from urllib.request import (urlopen, urlretrieve, Request, url2pathname,
+ pathname2url,
+ HTTPBasicAuthHandler, HTTPPasswordMgr,
+ HTTPHandler, HTTPRedirectHandler,
+ build_opener)
+ if ssl:
+ from urllib.request import HTTPSHandler
+ from urllib.error import HTTPError, URLError, ContentTooShortError
+ import http.client as httplib
+ import urllib.request as urllib2
+ import xmlrpc.client as xmlrpclib
+ import queue
+ from html.parser import HTMLParser
+ import html.entities as htmlentitydefs
+ raw_input = input
+ from itertools import filterfalse
+ filter = filter
+
+try:
+ from ssl import match_hostname, CertificateError
+except ImportError: # pragma: no cover
+ class CertificateError(ValueError):
+ pass
+
+
+ def _dnsname_match(dn, hostname, max_wildcards=1):
+ """Matching according to RFC 6125, section 6.4.3
+
+ http://tools.ietf.org/html/rfc6125#section-6.4.3
+ """
+ pats = []
+ if not dn:
+ return False
+
+ parts = dn.split('.')
+ leftmost, remainder = parts[0], parts[1:]
+
+ wildcards = leftmost.count('*')
+ if wildcards > max_wildcards:
+ # Issue #17980: avoid denials of service by refusing more
+ # than one wildcard per fragment. A survey of established
+ # policy among SSL implementations showed it to be a
+ # reasonable choice.
+ raise CertificateError(
+ "too many wildcards in certificate DNS name: " + repr(dn))
+
+ # speed up common case w/o wildcards
+ if not wildcards:
+ return dn.lower() == hostname.lower()
+
+ # RFC 6125, section 6.4.3, subitem 1.
+ # The client SHOULD NOT attempt to match a presented identifier in which
+ # the wildcard character comprises a label other than the left-most label.
+ if leftmost == '*':
+ # When '*' is a fragment by itself, it matches a non-empty dotless
+ # fragment.
+ pats.append('[^.]+')
+ elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
+ # RFC 6125, section 6.4.3, subitem 3.
+ # The client SHOULD NOT attempt to match a presented identifier
+ # where the wildcard character is embedded within an A-label or
+ # U-label of an internationalized domain name.
+ pats.append(re.escape(leftmost))
+ else:
+ # Otherwise, '*' matches any dotless string, e.g. www*
+ pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
+
+ # add the remaining fragments, ignore any wildcards
+ for frag in remainder:
+ pats.append(re.escape(frag))
+
+ pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
+ return pat.match(hostname)
+
+
+ def match_hostname(cert, hostname):
+ """Verify that *cert* (in decoded format as returned by
+ SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
+ rules are followed, but IP addresses are not accepted for *hostname*.
+
+ CertificateError is raised on failure. On success, the function
+ returns nothing.
+ """
+ if not cert:
+ raise ValueError("empty or no certificate, match_hostname needs a "
+ "SSL socket or SSL context with either "
+ "CERT_OPTIONAL or CERT_REQUIRED")
+ dnsnames = []
+ san = cert.get('subjectAltName', ())
+ for key, value in san:
+ if key == 'DNS':
+ if _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ if not dnsnames:
+ # The subject is only checked when there is no dNSName entry
+ # in subjectAltName
+ for sub in cert.get('subject', ()):
+ for key, value in sub:
+ # XXX according to RFC 2818, the most specific Common Name
+ # must be used.
+ if key == 'commonName':
+ if _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ if len(dnsnames) > 1:
+ raise CertificateError("hostname %r "
+ "doesn't match either of %s"
+ % (hostname, ', '.join(map(repr, dnsnames))))
+ elif len(dnsnames) == 1:
+ raise CertificateError("hostname %r "
+ "doesn't match %r"
+ % (hostname, dnsnames[0]))
+ else:
+ raise CertificateError("no appropriate commonName or "
+ "subjectAltName fields were found")
+
+
+try:
+ from types import SimpleNamespace as Container
+except ImportError: # pragma: no cover
+ class Container(object):
+ """
+ A generic container for when multiple values need to be returned
+ """
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)
+
+
+try:
+ from shutil import which
+except ImportError: # pragma: no cover
+ # Implementation from Python 3.3
+ def which(cmd, mode=os.F_OK | os.X_OK, path=None):
+ """Given a command, mode, and a PATH string, return the path which
+ conforms to the given mode on the PATH, or None if there is no such
+ file.
+
+ `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
+ of os.environ.get("PATH"), or can be overridden with a custom search
+ path.
+
+ """
+ # Check that a given file can be accessed with the correct mode.
+ # Additionally check that `file` is not a directory, as on Windows
+ # directories pass the os.access check.
+ def _access_check(fn, mode):
+ return (os.path.exists(fn) and os.access(fn, mode)
+ and not os.path.isdir(fn))
+
+ # If we're given a path with a directory part, look it up directly rather
+ # than referring to PATH directories. This includes checking relative to the
+ # current directory, e.g. ./script
+ if os.path.dirname(cmd):
+ if _access_check(cmd, mode):
+ return cmd
+ return None
+
+ if path is None:
+ path = os.environ.get("PATH", os.defpath)
+ if not path:
+ return None
+ path = path.split(os.pathsep)
+
+ if sys.platform == "win32":
+ # The current directory takes precedence on Windows.
+ if not os.curdir in path:
+ path.insert(0, os.curdir)
+
+ # PATHEXT is necessary to check on Windows.
+ pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
+ # See if the given file matches any of the expected path extensions.
+ # This will allow us to short circuit when given "python.exe".
+ # If it does match, only test that one, otherwise we have to try
+ # others.
+ if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
+ files = [cmd]
+ else:
+ files = [cmd + ext for ext in pathext]
+ else:
+ # On other platforms you don't have things like PATHEXT to tell you
+ # what file suffixes are executable, so just pass on cmd as-is.
+ files = [cmd]
+
+ seen = set()
+ for dir in path:
+ normdir = os.path.normcase(dir)
+ if not normdir in seen:
+ seen.add(normdir)
+ for thefile in files:
+ name = os.path.join(dir, thefile)
+ if _access_check(name, mode):
+ return name
+ return None
+
+
+# ZipFile is a context manager in 2.7, but not in 2.6
+
+from zipfile import ZipFile as BaseZipFile
+
+if hasattr(BaseZipFile, '__enter__'): # pragma: no cover
+ ZipFile = BaseZipFile
+else: # pragma: no cover
+ from zipfile import ZipExtFile as BaseZipExtFile
+
+ class ZipExtFile(BaseZipExtFile):
+ def __init__(self, base):
+ self.__dict__.update(base.__dict__)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *exc_info):
+ self.close()
+ # return None, so if an exception occurred, it will propagate
+
+ class ZipFile(BaseZipFile):
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *exc_info):
+ self.close()
+ # return None, so if an exception occurred, it will propagate
+
+ def open(self, *args, **kwargs):
+ base = BaseZipFile.open(self, *args, **kwargs)
+ return ZipExtFile(base)
+
+try:
+ from platform import python_implementation
+except ImportError: # pragma: no cover
+ def python_implementation():
+ """Return a string identifying the Python implementation."""
+ if 'PyPy' in sys.version:
+ return 'PyPy'
+ if os.name == 'java':
+ return 'Jython'
+ if sys.version.startswith('IronPython'):
+ return 'IronPython'
+ return 'CPython'
+
+try:
+ import sysconfig
+except ImportError: # pragma: no cover
+ from ._backport import sysconfig
+
+try:
+ callable = callable
+except NameError: # pragma: no cover
+ from collections.abc import Callable
+
+ def callable(obj):
+ return isinstance(obj, Callable)
+
+
+try:
+ fsencode = os.fsencode
+ fsdecode = os.fsdecode
+except AttributeError: # pragma: no cover
+ # Issue #99: on some systems (e.g. containerised),
+ # sys.getfilesystemencoding() returns None, and we need a real value,
+ # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and
+ # sys.getfilesystemencoding(): the return value is "the user’s preference
+ # according to the result of nl_langinfo(CODESET), or None if the
+ # nl_langinfo(CODESET) failed."
+ _fsencoding = sys.getfilesystemencoding() or 'utf-8'
+ if _fsencoding == 'mbcs':
+ _fserrors = 'strict'
+ else:
+ _fserrors = 'surrogateescape'
+
+ def fsencode(filename):
+ if isinstance(filename, bytes):
+ return filename
+ elif isinstance(filename, text_type):
+ return filename.encode(_fsencoding, _fserrors)
+ else:
+ raise TypeError("expect bytes or str, not %s" %
+ type(filename).__name__)
+
+ def fsdecode(filename):
+ if isinstance(filename, text_type):
+ return filename
+ elif isinstance(filename, bytes):
+ return filename.decode(_fsencoding, _fserrors)
+ else:
+ raise TypeError("expect bytes or str, not %s" %
+ type(filename).__name__)
+
+try:
+ from tokenize import detect_encoding
+except ImportError: # pragma: no cover
+ from codecs import BOM_UTF8, lookup
+ import re
+
+ cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)")
+
+ def _get_normal_name(orig_enc):
+ """Imitates get_normal_name in tokenizer.c."""
+ # Only care about the first 12 characters.
+ enc = orig_enc[:12].lower().replace("_", "-")
+ if enc == "utf-8" or enc.startswith("utf-8-"):
+ return "utf-8"
+ if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
+ enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
+ return "iso-8859-1"
+ return orig_enc
+
+ def detect_encoding(readline):
+ """
+ The detect_encoding() function is used to detect the encoding that should
+ be used to decode a Python source file. It requires one argument, readline,
+ in the same way as the tokenize() generator.
+
+ It will call readline a maximum of twice, and return the encoding used
+ (as a string) and a list of any lines (left as bytes) it has read in.
+
+ It detects the encoding from the presence of a utf-8 bom or an encoding
+ cookie as specified in pep-0263. If both a bom and a cookie are present,
+ but disagree, a SyntaxError will be raised. If the encoding cookie is an
+ invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found,
+ 'utf-8-sig' is returned.
+
+ If no encoding is specified, then the default of 'utf-8' will be returned.
+ """
+ try:
+ filename = readline.__self__.name
+ except AttributeError:
+ filename = None
+ bom_found = False
+ encoding = None
+ default = 'utf-8'
+ def read_or_stop():
+ try:
+ return readline()
+ except StopIteration:
+ return b''
+
+ def find_cookie(line):
+ try:
+ # Decode as UTF-8. Either the line is an encoding declaration,
+ # in which case it should be pure ASCII, or it must be UTF-8
+ # per default encoding.
+ line_string = line.decode('utf-8')
+ except UnicodeDecodeError:
+ msg = "invalid or missing encoding declaration"
+ if filename is not None:
+ msg = '{} for {!r}'.format(msg, filename)
+ raise SyntaxError(msg)
+
+ matches = cookie_re.findall(line_string)
+ if not matches:
+ return None
+ encoding = _get_normal_name(matches[0])
+ try:
+ codec = lookup(encoding)
+ except LookupError:
+ # This behaviour mimics the Python interpreter
+ if filename is None:
+ msg = "unknown encoding: " + encoding
+ else:
+ msg = "unknown encoding for {!r}: {}".format(filename,
+ encoding)
+ raise SyntaxError(msg)
+
+ if bom_found:
+ if codec.name != 'utf-8':
+ # This behaviour mimics the Python interpreter
+ if filename is None:
+ msg = 'encoding problem: utf-8'
+ else:
+ msg = 'encoding problem for {!r}: utf-8'.format(filename)
+ raise SyntaxError(msg)
+ encoding += '-sig'
+ return encoding
+
+ first = read_or_stop()
+ if first.startswith(BOM_UTF8):
+ bom_found = True
+ first = first[3:]
+ default = 'utf-8-sig'
+ if not first:
+ return default, []
+
+ encoding = find_cookie(first)
+ if encoding:
+ return encoding, [first]
+
+ second = read_or_stop()
+ if not second:
+ return default, [first]
+
+ encoding = find_cookie(second)
+ if encoding:
+ return encoding, [first, second]
+
+ return default, [first, second]
+
+# For converting & <-> &amp; etc.
+try:
+ from html import escape
+except ImportError:
+ from cgi import escape
+if sys.version_info[:2] < (3, 4):
+ unescape = HTMLParser().unescape
+else:
+ from html import unescape
+
+try:
+ from collections import ChainMap
+except ImportError: # pragma: no cover
+ from collections import MutableMapping
+
+ try:
+ from reprlib import recursive_repr as _recursive_repr
+ except ImportError:
+ def _recursive_repr(fillvalue='...'):
+ '''
+ Decorator to make a repr function return fillvalue for a recursive
+ call
+ '''
+
+ def decorating_function(user_function):
+ repr_running = set()
+
+ def wrapper(self):
+ key = id(self), get_ident()
+ if key in repr_running:
+ return fillvalue
+ repr_running.add(key)
+ try:
+ result = user_function(self)
+ finally:
+ repr_running.discard(key)
+ return result
+
+ # Can't use functools.wraps() here because of bootstrap issues
+ wrapper.__module__ = getattr(user_function, '__module__')
+ wrapper.__doc__ = getattr(user_function, '__doc__')
+ wrapper.__name__ = getattr(user_function, '__name__')
+ wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
+ return wrapper
+
+ return decorating_function
+
+ class ChainMap(MutableMapping):
+ ''' A ChainMap groups multiple dicts (or other mappings) together
+ to create a single, updateable view.
+
+ The underlying mappings are stored in a list. That list is public and can
+ accessed or updated using the *maps* attribute. There is no other state.
+
+ Lookups search the underlying mappings successively until a key is found.
+ In contrast, writes, updates, and deletions only operate on the first
+ mapping.
+
+ '''
+
+ def __init__(self, *maps):
+ '''Initialize a ChainMap by setting *maps* to the given mappings.
+ If no mappings are provided, a single empty dictionary is used.
+
+ '''
+ self.maps = list(maps) or [{}] # always at least one map
+
+ def __missing__(self, key):
+ raise KeyError(key)
+
+ def __getitem__(self, key):
+ for mapping in self.maps:
+ try:
+ return mapping[key] # can't use 'key in mapping' with defaultdict
+ except KeyError:
+ pass
+ return self.__missing__(key) # support subclasses that define __missing__
+
+ def get(self, key, default=None):
+ return self[key] if key in self else default
+
+ def __len__(self):
+ return len(set().union(*self.maps)) # reuses stored hash values if possible
+
+ def __iter__(self):
+ return iter(set().union(*self.maps))
+
+ def __contains__(self, key):
+ return any(key in m for m in self.maps)
+
+ def __bool__(self):
+ return any(self.maps)
+
+ @_recursive_repr()
+ def __repr__(self):
+ return '{0.__class__.__name__}({1})'.format(
+ self, ', '.join(map(repr, self.maps)))
+
+ @classmethod
+ def fromkeys(cls, iterable, *args):
+ 'Create a ChainMap with a single dict created from the iterable.'
+ return cls(dict.fromkeys(iterable, *args))
+
+ def copy(self):
+ 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
+ return self.__class__(self.maps[0].copy(), *self.maps[1:])
+
+ __copy__ = copy
+
+ def new_child(self): # like Django's Context.push()
+ 'New ChainMap with a new dict followed by all previous maps.'
+ return self.__class__({}, *self.maps)
+
+ @property
+ def parents(self): # like Django's Context.pop()
+ 'New ChainMap from maps[1:].'
+ return self.__class__(*self.maps[1:])
+
+ def __setitem__(self, key, value):
+ self.maps[0][key] = value
+
+ def __delitem__(self, key):
+ try:
+ del self.maps[0][key]
+ except KeyError:
+ raise KeyError('Key not found in the first mapping: {!r}'.format(key))
+
+ def popitem(self):
+ 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
+ try:
+ return self.maps[0].popitem()
+ except KeyError:
+ raise KeyError('No keys found in the first mapping.')
+
+ def pop(self, key, *args):
+ 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
+ try:
+ return self.maps[0].pop(key, *args)
+ except KeyError:
+ raise KeyError('Key not found in the first mapping: {!r}'.format(key))
+
+ def clear(self):
+ 'Clear maps[0], leaving maps[1:] intact.'
+ self.maps[0].clear()
+
+try:
+ from importlib.util import cache_from_source # Python >= 3.4
+except ImportError: # pragma: no cover
+ try:
+ from imp import cache_from_source
+ except ImportError: # pragma: no cover
+ def cache_from_source(path, debug_override=None):
+ assert path.endswith('.py')
+ if debug_override is None:
+ debug_override = __debug__
+ if debug_override:
+ suffix = 'c'
+ else:
+ suffix = 'o'
+ return path + suffix
+
+try:
+ from collections import OrderedDict
+except ImportError: # pragma: no cover
+## {{{ http://code.activestate.com/recipes/576693/ (r9)
+# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
+# Passes Python2.7's test suite and incorporates all the latest updates.
+ try:
+ from thread import get_ident as _get_ident
+ except ImportError:
+ from dummy_thread import get_ident as _get_ident
+
+ try:
+ from _abcoll import KeysView, ValuesView, ItemsView
+ except ImportError:
+ pass
+
+
+ class OrderedDict(dict):
+ 'Dictionary that remembers insertion order'
+ # An inherited dict maps keys to values.
+ # The inherited dict provides __getitem__, __len__, __contains__, and get.
+ # The remaining methods are order-aware.
+ # Big-O running times for all methods are the same as for regular dictionaries.
+
+ # The internal self.__map dictionary maps keys to links in a doubly linked list.
+ # The circular doubly linked list starts and ends with a sentinel element.
+ # The sentinel element never gets deleted (this simplifies the algorithm).
+ # Each link is stored as a list of length three: [PREV, NEXT, KEY].
+
+ def __init__(self, *args, **kwds):
+ '''Initialize an ordered dictionary. Signature is the same as for
+ regular dictionaries, but keyword arguments are not recommended
+ because their insertion order is arbitrary.
+
+ '''
+ if len(args) > 1:
+ raise TypeError('expected at most 1 arguments, got %d' % len(args))
+ try:
+ self.__root
+ except AttributeError:
+ self.__root = root = [] # sentinel node
+ root[:] = [root, root, None]
+ self.__map = {}
+ self.__update(*args, **kwds)
+
+ def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
+ 'od.__setitem__(i, y) <==> od[i]=y'
+ # Setting a new item creates a new link which goes at the end of the linked
+ # list, and the inherited dictionary is updated with the new key/value pair.
+ if key not in self:
+ root = self.__root
+ last = root[0]
+ last[1] = root[0] = self.__map[key] = [last, root, key]
+ dict_setitem(self, key, value)
+
+ def __delitem__(self, key, dict_delitem=dict.__delitem__):
+ 'od.__delitem__(y) <==> del od[y]'
+ # Deleting an existing item uses self.__map to find the link which is
+ # then removed by updating the links in the predecessor and successor nodes.
+ dict_delitem(self, key)
+ link_prev, link_next, key = self.__map.pop(key)
+ link_prev[1] = link_next
+ link_next[0] = link_prev
+
+ def __iter__(self):
+ 'od.__iter__() <==> iter(od)'
+ root = self.__root
+ curr = root[1]
+ while curr is not root:
+ yield curr[2]
+ curr = curr[1]
+
+ def __reversed__(self):
+ 'od.__reversed__() <==> reversed(od)'
+ root = self.__root
+ curr = root[0]
+ while curr is not root:
+ yield curr[2]
+ curr = curr[0]
+
+ def clear(self):
+ 'od.clear() -> None. Remove all items from od.'
+ try:
+ for node in self.__map.itervalues():
+ del node[:]
+ root = self.__root
+ root[:] = [root, root, None]
+ self.__map.clear()
+ except AttributeError:
+ pass
+ dict.clear(self)
+
+ def popitem(self, last=True):
+ '''od.popitem() -> (k, v), return and remove a (key, value) pair.
+ Pairs are returned in LIFO order if last is true or FIFO order if false.
+
+ '''
+ if not self:
+ raise KeyError('dictionary is empty')
+ root = self.__root
+ if last:
+ link = root[0]
+ link_prev = link[0]
+ link_prev[1] = root
+ root[0] = link_prev
+ else:
+ link = root[1]
+ link_next = link[1]
+ root[1] = link_next
+ link_next[0] = root
+ key = link[2]
+ del self.__map[key]
+ value = dict.pop(self, key)
+ return key, value
+
+ # -- the following methods do not depend on the internal structure --
+
+ def keys(self):
+ 'od.keys() -> list of keys in od'
+ return list(self)
+
+ def values(self):
+ 'od.values() -> list of values in od'
+ return [self[key] for key in self]
+
+ def items(self):
+ 'od.items() -> list of (key, value) pairs in od'
+ return [(key, self[key]) for key in self]
+
+ def iterkeys(self):
+ 'od.iterkeys() -> an iterator over the keys in od'
+ return iter(self)
+
+ def itervalues(self):
+ 'od.itervalues -> an iterator over the values in od'
+ for k in self:
+ yield self[k]
+
+ def iteritems(self):
+ 'od.iteritems -> an iterator over the (key, value) items in od'
+ for k in self:
+ yield (k, self[k])
+
+ def update(*args, **kwds):
+ '''od.update(E, **F) -> None. Update od from dict/iterable E and F.
+
+ If E is a dict instance, does: for k in E: od[k] = E[k]
+ If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
+ Or if E is an iterable of items, does: for k, v in E: od[k] = v
+ In either case, this is followed by: for k, v in F.items(): od[k] = v
+
+ '''
+ if len(args) > 2:
+ raise TypeError('update() takes at most 2 positional '
+ 'arguments (%d given)' % (len(args),))
+ elif not args:
+ raise TypeError('update() takes at least 1 argument (0 given)')
+ self = args[0]
+ # Make progressively weaker assumptions about "other"
+ other = ()
+ if len(args) == 2:
+ other = args[1]
+ if isinstance(other, dict):
+ for key in other:
+ self[key] = other[key]
+ elif hasattr(other, 'keys'):
+ for key in other.keys():
+ self[key] = other[key]
+ else:
+ for key, value in other:
+ self[key] = value
+ for key, value in kwds.items():
+ self[key] = value
+
+ __update = update # let subclasses override update without breaking __init__
+
+ __marker = object()
+
+ def pop(self, key, default=__marker):
+ '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+ If key is not found, d is returned if given, otherwise KeyError is raised.
+
+ '''
+ if key in self:
+ result = self[key]
+ del self[key]
+ return result
+ if default is self.__marker:
+ raise KeyError(key)
+ return default
+
+ def setdefault(self, key, default=None):
+ 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
+ if key in self:
+ return self[key]
+ self[key] = default
+ return default
+
+ def __repr__(self, _repr_running=None):
+ 'od.__repr__() <==> repr(od)'
+ if not _repr_running: _repr_running = {}
+ call_key = id(self), _get_ident()
+ if call_key in _repr_running:
+ return '...'
+ _repr_running[call_key] = 1
+ try:
+ if not self:
+ return '%s()' % (self.__class__.__name__,)
+ return '%s(%r)' % (self.__class__.__name__, self.items())
+ finally:
+ del _repr_running[call_key]
+
+ def __reduce__(self):
+ 'Return state information for pickling'
+ items = [[k, self[k]] for k in self]
+ inst_dict = vars(self).copy()
+ for k in vars(OrderedDict()):
+ inst_dict.pop(k, None)
+ if inst_dict:
+ return (self.__class__, (items,), inst_dict)
+ return self.__class__, (items,)
+
+ def copy(self):
+ 'od.copy() -> a shallow copy of od'
+ return self.__class__(self)
+
+ @classmethod
+ def fromkeys(cls, iterable, value=None):
+ '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
+ and values equal to v (which defaults to None).
+
+ '''
+ d = cls()
+ for key in iterable:
+ d[key] = value
+ return d
+
+ def __eq__(self, other):
+ '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
+ while comparison to a regular mapping is order-insensitive.
+
+ '''
+ if isinstance(other, OrderedDict):
+ return len(self)==len(other) and self.items() == other.items()
+ return dict.__eq__(self, other)
+
+ def __ne__(self, other):
+ return not self == other
+
+ # -- the following methods are only used in Python 2.7 --
+
+ def viewkeys(self):
+ "od.viewkeys() -> a set-like object providing a view on od's keys"
+ return KeysView(self)
+
+ def viewvalues(self):
+ "od.viewvalues() -> an object providing a view on od's values"
+ return ValuesView(self)
+
+ def viewitems(self):
+ "od.viewitems() -> a set-like object providing a view on od's items"
+ return ItemsView(self)
+
+try:
+ from logging.config import BaseConfigurator, valid_ident
+except ImportError: # pragma: no cover
+ IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
+
+
+ def valid_ident(s):
+ m = IDENTIFIER.match(s)
+ if not m:
+ raise ValueError('Not a valid Python identifier: %r' % s)
+ return True
+
+
+ # The ConvertingXXX classes are wrappers around standard Python containers,
+ # and they serve to convert any suitable values in the container. The
+ # conversion converts base dicts, lists and tuples to their wrapped
+ # equivalents, whereas strings which match a conversion format are converted
+ # appropriately.
+ #
+ # Each wrapper should have a configurator attribute holding the actual
+ # configurator to use for conversion.
+
+ class ConvertingDict(dict):
+ """A converting dictionary wrapper."""
+
+ def __getitem__(self, key):
+ value = dict.__getitem__(self, key)
+ result = self.configurator.convert(value)
+ #If the converted value is different, save for next time
+ if value is not result:
+ self[key] = result
+ if type(result) in (ConvertingDict, ConvertingList,
+ ConvertingTuple):
+ result.parent = self
+ result.key = key
+ return result
+
+ def get(self, key, default=None):
+ value = dict.get(self, key, default)
+ result = self.configurator.convert(value)
+ #If the converted value is different, save for next time
+ if value is not result:
+ self[key] = result
+ if type(result) in (ConvertingDict, ConvertingList,
+ ConvertingTuple):
+ result.parent = self
+ result.key = key
+ return result
+
+ def pop(self, key, default=None):
+ value = dict.pop(self, key, default)
+ result = self.configurator.convert(value)
+ if value is not result:
+ if type(result) in (ConvertingDict, ConvertingList,
+ ConvertingTuple):
+ result.parent = self
+ result.key = key
+ return result
+
+ class ConvertingList(list):
+ """A converting list wrapper."""
+ def __getitem__(self, key):
+ value = list.__getitem__(self, key)
+ result = self.configurator.convert(value)
+ #If the converted value is different, save for next time
+ if value is not result:
+ self[key] = result
+ if type(result) in (ConvertingDict, ConvertingList,
+ ConvertingTuple):
+ result.parent = self
+ result.key = key
+ return result
+
+ def pop(self, idx=-1):
+ value = list.pop(self, idx)
+ result = self.configurator.convert(value)
+ if value is not result:
+ if type(result) in (ConvertingDict, ConvertingList,
+ ConvertingTuple):
+ result.parent = self
+ return result
+
+ class ConvertingTuple(tuple):
+ """A converting tuple wrapper."""
+ def __getitem__(self, key):
+ value = tuple.__getitem__(self, key)
+ result = self.configurator.convert(value)
+ if value is not result:
+ if type(result) in (ConvertingDict, ConvertingList,
+ ConvertingTuple):
+ result.parent = self
+ result.key = key
+ return result
+
+ class BaseConfigurator(object):
+ """
+ The configurator base class which defines some useful defaults.
+ """
+
+ CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
+
+ WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
+ DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
+ INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
+ DIGIT_PATTERN = re.compile(r'^\d+$')
+
+ value_converters = {
+ 'ext' : 'ext_convert',
+ 'cfg' : 'cfg_convert',
+ }
+
+ # We might want to use a different one, e.g. importlib
+ importer = staticmethod(__import__)
+
+ def __init__(self, config):
+ self.config = ConvertingDict(config)
+ self.config.configurator = self
+
+ def resolve(self, s):
+ """
+ Resolve strings to objects using standard import and attribute
+ syntax.
+ """
+ name = s.split('.')
+ used = name.pop(0)
+ try:
+ found = self.importer(used)
+ for frag in name:
+ used += '.' + frag
+ try:
+ found = getattr(found, frag)
+ except AttributeError:
+ self.importer(used)
+ found = getattr(found, frag)
+ return found
+ except ImportError:
+ e, tb = sys.exc_info()[1:]
+ v = ValueError('Cannot resolve %r: %s' % (s, e))
+ v.__cause__, v.__traceback__ = e, tb
+ raise v
+
+ def ext_convert(self, value):
+ """Default converter for the ext:// protocol."""
+ return self.resolve(value)
+
+ def cfg_convert(self, value):
+ """Default converter for the cfg:// protocol."""
+ rest = value
+ m = self.WORD_PATTERN.match(rest)
+ if m is None:
+ raise ValueError("Unable to convert %r" % value)
+ else:
+ rest = rest[m.end():]
+ d = self.config[m.groups()[0]]
+ #print d, rest
+ while rest:
+ m = self.DOT_PATTERN.match(rest)
+ if m:
+ d = d[m.groups()[0]]
+ else:
+ m = self.INDEX_PATTERN.match(rest)
+ if m:
+ idx = m.groups()[0]
+ if not self.DIGIT_PATTERN.match(idx):
+ d = d[idx]
+ else:
+ try:
+ n = int(idx) # try as number first (most likely)
+ d = d[n]
+ except TypeError:
+ d = d[idx]
+ if m:
+ rest = rest[m.end():]
+ else:
+ raise ValueError('Unable to convert '
+ '%r at %r' % (value, rest))
+ #rest should be empty
+ return d
+
+ def convert(self, value):
+ """
+ Convert values to an appropriate type. dicts, lists and tuples are
+ replaced by their converting alternatives. Strings are checked to
+ see if they have a conversion format and are converted if they do.
+ """
+ if not isinstance(value, ConvertingDict) and isinstance(value, dict):
+ value = ConvertingDict(value)
+ value.configurator = self
+ elif not isinstance(value, ConvertingList) and isinstance(value, list):
+ value = ConvertingList(value)
+ value.configurator = self
+ elif not isinstance(value, ConvertingTuple) and\
+ isinstance(value, tuple):
+ value = ConvertingTuple(value)
+ value.configurator = self
+ elif isinstance(value, string_types):
+ m = self.CONVERT_PATTERN.match(value)
+ if m:
+ d = m.groupdict()
+ prefix = d['prefix']
+ converter = self.value_converters.get(prefix, None)
+ if converter:
+ suffix = d['suffix']
+ converter = getattr(self, converter)
+ value = converter(suffix)
+ return value
+
+ def configure_custom(self, config):
+ """Configure an object with a user-supplied factory."""
+ c = config.pop('()')
+ if not callable(c):
+ c = self.resolve(c)
+ props = config.pop('.', None)
+ # Check for valid identifiers
+ kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
+ result = c(**kwargs)
+ if props:
+ for name, value in props.items():
+ setattr(result, name, value)
+ return result
+
+ def as_tuple(self, value):
+ """Utility function which converts lists to tuples."""
+ if isinstance(value, list):
+ value = tuple(value)
+ return value
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py
new file mode 100644
index 0000000000..0a90c300ba
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py
@@ -0,0 +1,1339 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2017 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""PEP 376 implementation."""
+
+from __future__ import unicode_literals
+
+import base64
+import codecs
+import contextlib
+import hashlib
+import logging
+import os
+import posixpath
+import sys
+import zipimport
+
+from . import DistlibException, resources
+from .compat import StringIO
+from .version import get_scheme, UnsupportedVersionError
+from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
+ LEGACY_METADATA_FILENAME)
+from .util import (parse_requirement, cached_property, parse_name_and_version,
+ read_exports, write_exports, CSVReader, CSVWriter)
+
+
+__all__ = ['Distribution', 'BaseInstalledDistribution',
+ 'InstalledDistribution', 'EggInfoDistribution',
+ 'DistributionPath']
+
+
+logger = logging.getLogger(__name__)
+
+EXPORTS_FILENAME = 'pydist-exports.json'
+COMMANDS_FILENAME = 'pydist-commands.json'
+
+DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED',
+ 'RESOURCES', EXPORTS_FILENAME, 'SHARED')
+
+DISTINFO_EXT = '.dist-info'
+
+
+class _Cache(object):
+ """
+ A simple cache mapping names and .dist-info paths to distributions
+ """
+ def __init__(self):
+ """
+ Initialise an instance. There is normally one for each DistributionPath.
+ """
+ self.name = {}
+ self.path = {}
+ self.generated = False
+
+ def clear(self):
+ """
+ Clear the cache, setting it to its initial state.
+ """
+ self.name.clear()
+ self.path.clear()
+ self.generated = False
+
+ def add(self, dist):
+ """
+ Add a distribution to the cache.
+ :param dist: The distribution to add.
+ """
+ if dist.path not in self.path:
+ self.path[dist.path] = dist
+ self.name.setdefault(dist.key, []).append(dist)
+
+
+class DistributionPath(object):
+ """
+ Represents a set of distributions installed on a path (typically sys.path).
+ """
+ def __init__(self, path=None, include_egg=False):
+ """
+ Create an instance from a path, optionally including legacy (distutils/
+ setuptools/distribute) distributions.
+ :param path: The path to use, as a list of directories. If not specified,
+ sys.path is used.
+ :param include_egg: If True, this instance will look for and return legacy
+ distributions as well as those based on PEP 376.
+ """
+ if path is None:
+ path = sys.path
+ self.path = path
+ self._include_dist = True
+ self._include_egg = include_egg
+
+ self._cache = _Cache()
+ self._cache_egg = _Cache()
+ self._cache_enabled = True
+ self._scheme = get_scheme('default')
+
+ def _get_cache_enabled(self):
+ return self._cache_enabled
+
+ def _set_cache_enabled(self, value):
+ self._cache_enabled = value
+
+ cache_enabled = property(_get_cache_enabled, _set_cache_enabled)
+
+ def clear_cache(self):
+ """
+ Clears the internal cache.
+ """
+ self._cache.clear()
+ self._cache_egg.clear()
+
+
+ def _yield_distributions(self):
+ """
+ Yield .dist-info and/or .egg(-info) distributions.
+ """
+ # We need to check if we've seen some resources already, because on
+ # some Linux systems (e.g. some Debian/Ubuntu variants) there are
+ # symlinks which alias other files in the environment.
+ seen = set()
+ for path in self.path:
+ finder = resources.finder_for_path(path)
+ if finder is None:
+ continue
+ r = finder.find('')
+ if not r or not r.is_container:
+ continue
+ rset = sorted(r.resources)
+ for entry in rset:
+ r = finder.find(entry)
+ if not r or r.path in seen:
+ continue
+ if self._include_dist and entry.endswith(DISTINFO_EXT):
+ possible_filenames = [METADATA_FILENAME,
+ WHEEL_METADATA_FILENAME,
+ LEGACY_METADATA_FILENAME]
+ for metadata_filename in possible_filenames:
+ metadata_path = posixpath.join(entry, metadata_filename)
+ pydist = finder.find(metadata_path)
+ if pydist:
+ break
+ else:
+ continue
+
+ with contextlib.closing(pydist.as_stream()) as stream:
+ metadata = Metadata(fileobj=stream, scheme='legacy')
+ logger.debug('Found %s', r.path)
+ seen.add(r.path)
+ yield new_dist_class(r.path, metadata=metadata,
+ env=self)
+ elif self._include_egg and entry.endswith(('.egg-info',
+ '.egg')):
+ logger.debug('Found %s', r.path)
+ seen.add(r.path)
+ yield old_dist_class(r.path, self)
+
+ def _generate_cache(self):
+ """
+ Scan the path for distributions and populate the cache with
+ those that are found.
+ """
+ gen_dist = not self._cache.generated
+ gen_egg = self._include_egg and not self._cache_egg.generated
+ if gen_dist or gen_egg:
+ for dist in self._yield_distributions():
+ if isinstance(dist, InstalledDistribution):
+ self._cache.add(dist)
+ else:
+ self._cache_egg.add(dist)
+
+ if gen_dist:
+ self._cache.generated = True
+ if gen_egg:
+ self._cache_egg.generated = True
+
+ @classmethod
+ def distinfo_dirname(cls, name, version):
+ """
+ The *name* and *version* parameters are converted into their
+ filename-escaped form, i.e. any ``'-'`` characters are replaced
+ with ``'_'`` other than the one in ``'dist-info'`` and the one
+ separating the name from the version number.
+
+ :parameter name: is converted to a standard distribution name by replacing
+ any runs of non- alphanumeric characters with a single
+ ``'-'``.
+ :type name: string
+ :parameter version: is converted to a standard version string. Spaces
+ become dots, and all other non-alphanumeric characters
+ (except dots) become dashes, with runs of multiple
+ dashes condensed to a single dash.
+ :type version: string
+ :returns: directory name
+ :rtype: string"""
+ name = name.replace('-', '_')
+ return '-'.join([name, version]) + DISTINFO_EXT
+
+ def get_distributions(self):
+ """
+ Provides an iterator that looks for distributions and returns
+ :class:`InstalledDistribution` or
+ :class:`EggInfoDistribution` instances for each one of them.
+
+ :rtype: iterator of :class:`InstalledDistribution` and
+ :class:`EggInfoDistribution` instances
+ """
+ if not self._cache_enabled:
+ for dist in self._yield_distributions():
+ yield dist
+ else:
+ self._generate_cache()
+
+ for dist in self._cache.path.values():
+ yield dist
+
+ if self._include_egg:
+ for dist in self._cache_egg.path.values():
+ yield dist
+
+ def get_distribution(self, name):
+ """
+ Looks for a named distribution on the path.
+
+ This function only returns the first result found, as no more than one
+ value is expected. If nothing is found, ``None`` is returned.
+
+ :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution`
+ or ``None``
+ """
+ result = None
+ name = name.lower()
+ if not self._cache_enabled:
+ for dist in self._yield_distributions():
+ if dist.key == name:
+ result = dist
+ break
+ else:
+ self._generate_cache()
+
+ if name in self._cache.name:
+ result = self._cache.name[name][0]
+ elif self._include_egg and name in self._cache_egg.name:
+ result = self._cache_egg.name[name][0]
+ return result
+
+ def provides_distribution(self, name, version=None):
+ """
+ Iterates over all distributions to find which distributions provide *name*.
+ If a *version* is provided, it will be used to filter the results.
+
+ This function only returns the first result found, since no more than
+ one values are expected. If the directory is not found, returns ``None``.
+
+ :parameter version: a version specifier that indicates the version
+ required, conforming to the format in ``PEP-345``
+
+ :type name: string
+ :type version: string
+ """
+ matcher = None
+ if version is not None:
+ try:
+ matcher = self._scheme.matcher('%s (%s)' % (name, version))
+ except ValueError:
+ raise DistlibException('invalid name or version: %r, %r' %
+ (name, version))
+
+ for dist in self.get_distributions():
+ # We hit a problem on Travis where enum34 was installed and doesn't
+ # have a provides attribute ...
+ if not hasattr(dist, 'provides'):
+ logger.debug('No "provides": %s', dist)
+ else:
+ provided = dist.provides
+
+ for p in provided:
+ p_name, p_ver = parse_name_and_version(p)
+ if matcher is None:
+ if p_name == name:
+ yield dist
+ break
+ else:
+ if p_name == name and matcher.match(p_ver):
+ yield dist
+ break
+
+ def get_file_path(self, name, relative_path):
+ """
+ Return the path to a resource file.
+ """
+ dist = self.get_distribution(name)
+ if dist is None:
+ raise LookupError('no distribution named %r found' % name)
+ return dist.get_resource_path(relative_path)
+
+ def get_exported_entries(self, category, name=None):
+ """
+ Return all of the exported entries in a particular category.
+
+ :param category: The category to search for entries.
+ :param name: If specified, only entries with that name are returned.
+ """
+ for dist in self.get_distributions():
+ r = dist.exports
+ if category in r:
+ d = r[category]
+ if name is not None:
+ if name in d:
+ yield d[name]
+ else:
+ for v in d.values():
+ yield v
+
+
+class Distribution(object):
+ """
+ A base class for distributions, whether installed or from indexes.
+ Either way, it must have some metadata, so that's all that's needed
+ for construction.
+ """
+
+ build_time_dependency = False
+ """
+ Set to True if it's known to be only a build-time dependency (i.e.
+ not needed after installation).
+ """
+
+ requested = False
+ """A boolean that indicates whether the ``REQUESTED`` metadata file is
+ present (in other words, whether the package was installed by user
+ request or it was installed as a dependency)."""
+
+ def __init__(self, metadata):
+ """
+ Initialise an instance.
+ :param metadata: The instance of :class:`Metadata` describing this
+ distribution.
+ """
+ self.metadata = metadata
+ self.name = metadata.name
+ self.key = self.name.lower() # for case-insensitive comparisons
+ self.version = metadata.version
+ self.locator = None
+ self.digest = None
+ self.extras = None # additional features requested
+ self.context = None # environment marker overrides
+ self.download_urls = set()
+ self.digests = {}
+
+ @property
+ def source_url(self):
+ """
+ The source archive download URL for this distribution.
+ """
+ return self.metadata.source_url
+
+ download_url = source_url # Backward compatibility
+
+ @property
+ def name_and_version(self):
+ """
+ A utility property which displays the name and version in parentheses.
+ """
+ return '%s (%s)' % (self.name, self.version)
+
+ @property
+ def provides(self):
+ """
+ A set of distribution names and versions provided by this distribution.
+ :return: A set of "name (version)" strings.
+ """
+ plist = self.metadata.provides
+ s = '%s (%s)' % (self.name, self.version)
+ if s not in plist:
+ plist.append(s)
+ return plist
+
+ def _get_requirements(self, req_attr):
+ md = self.metadata
+ logger.debug('Getting requirements from metadata %r', md.todict())
+ reqts = getattr(md, req_attr)
+ return set(md.get_requirements(reqts, extras=self.extras,
+ env=self.context))
+
+ @property
+ def run_requires(self):
+ return self._get_requirements('run_requires')
+
+ @property
+ def meta_requires(self):
+ return self._get_requirements('meta_requires')
+
+ @property
+ def build_requires(self):
+ return self._get_requirements('build_requires')
+
+ @property
+ def test_requires(self):
+ return self._get_requirements('test_requires')
+
+ @property
+ def dev_requires(self):
+ return self._get_requirements('dev_requires')
+
+ def matches_requirement(self, req):
+ """
+ Say if this instance matches (fulfills) a requirement.
+ :param req: The requirement to match.
+ :rtype req: str
+ :return: True if it matches, else False.
+ """
+ # Requirement may contain extras - parse to lose those
+ # from what's passed to the matcher
+ r = parse_requirement(req)
+ scheme = get_scheme(self.metadata.scheme)
+ try:
+ matcher = scheme.matcher(r.requirement)
+ except UnsupportedVersionError:
+ # XXX compat-mode if cannot read the version
+ logger.warning('could not read version %r - using name only',
+ req)
+ name = req.split()[0]
+ matcher = scheme.matcher(name)
+
+ name = matcher.key # case-insensitive
+
+ result = False
+ for p in self.provides:
+ p_name, p_ver = parse_name_and_version(p)
+ if p_name != name:
+ continue
+ try:
+ result = matcher.match(p_ver)
+ break
+ except UnsupportedVersionError:
+ pass
+ return result
+
+ def __repr__(self):
+ """
+ Return a textual representation of this instance,
+ """
+ if self.source_url:
+ suffix = ' [%s]' % self.source_url
+ else:
+ suffix = ''
+ return '<Distribution %s (%s)%s>' % (self.name, self.version, suffix)
+
+ def __eq__(self, other):
+ """
+ See if this distribution is the same as another.
+ :param other: The distribution to compare with. To be equal to one
+ another. distributions must have the same type, name,
+ version and source_url.
+ :return: True if it is the same, else False.
+ """
+ if type(other) is not type(self):
+ result = False
+ else:
+ result = (self.name == other.name and
+ self.version == other.version and
+ self.source_url == other.source_url)
+ return result
+
+ def __hash__(self):
+ """
+ Compute hash in a way which matches the equality test.
+ """
+ return hash(self.name) + hash(self.version) + hash(self.source_url)
+
+
+class BaseInstalledDistribution(Distribution):
+ """
+ This is the base class for installed distributions (whether PEP 376 or
+ legacy).
+ """
+
+ hasher = None
+
+ def __init__(self, metadata, path, env=None):
+ """
+ Initialise an instance.
+ :param metadata: An instance of :class:`Metadata` which describes the
+ distribution. This will normally have been initialised
+ from a metadata file in the ``path``.
+ :param path: The path of the ``.dist-info`` or ``.egg-info``
+ directory for the distribution.
+ :param env: This is normally the :class:`DistributionPath`
+ instance where this distribution was found.
+ """
+ super(BaseInstalledDistribution, self).__init__(metadata)
+ self.path = path
+ self.dist_path = env
+
+ def get_hash(self, data, hasher=None):
+ """
+ Get the hash of some data, using a particular hash algorithm, if
+ specified.
+
+ :param data: The data to be hashed.
+ :type data: bytes
+ :param hasher: The name of a hash implementation, supported by hashlib,
+ or ``None``. Examples of valid values are ``'sha1'``,
+ ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and
+ ``'sha512'``. If no hasher is specified, the ``hasher``
+ attribute of the :class:`InstalledDistribution` instance
+ is used. If the hasher is determined to be ``None``, MD5
+ is used as the hashing algorithm.
+ :returns: The hash of the data. If a hasher was explicitly specified,
+ the returned hash will be prefixed with the specified hasher
+ followed by '='.
+ :rtype: str
+ """
+ if hasher is None:
+ hasher = self.hasher
+ if hasher is None:
+ hasher = hashlib.md5
+ prefix = ''
+ else:
+ hasher = getattr(hashlib, hasher)
+ prefix = '%s=' % self.hasher
+ digest = hasher(data).digest()
+ digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii')
+ return '%s%s' % (prefix, digest)
+
+
+class InstalledDistribution(BaseInstalledDistribution):
+ """
+ Created with the *path* of the ``.dist-info`` directory provided to the
+ constructor. It reads the metadata contained in ``pydist.json`` when it is
+ instantiated., or uses a passed in Metadata instance (useful for when
+ dry-run mode is being used).
+ """
+
+ hasher = 'sha256'
+
+ def __init__(self, path, metadata=None, env=None):
+ self.modules = []
+ self.finder = finder = resources.finder_for_path(path)
+ if finder is None:
+ raise ValueError('finder unavailable for %s' % path)
+ if env and env._cache_enabled and path in env._cache.path:
+ metadata = env._cache.path[path].metadata
+ elif metadata is None:
+ r = finder.find(METADATA_FILENAME)
+ # Temporary - for Wheel 0.23 support
+ if r is None:
+ r = finder.find(WHEEL_METADATA_FILENAME)
+ # Temporary - for legacy support
+ if r is None:
+ r = finder.find(LEGACY_METADATA_FILENAME)
+ if r is None:
+ raise ValueError('no %s found in %s' % (METADATA_FILENAME,
+ path))
+ with contextlib.closing(r.as_stream()) as stream:
+ metadata = Metadata(fileobj=stream, scheme='legacy')
+
+ super(InstalledDistribution, self).__init__(metadata, path, env)
+
+ if env and env._cache_enabled:
+ env._cache.add(self)
+
+ r = finder.find('REQUESTED')
+ self.requested = r is not None
+ p = os.path.join(path, 'top_level.txt')
+ if os.path.exists(p):
+ with open(p, 'rb') as f:
+ data = f.read().decode('utf-8')
+ self.modules = data.splitlines()
+
+ def __repr__(self):
+ return '<InstalledDistribution %r %s at %r>' % (
+ self.name, self.version, self.path)
+
+ def __str__(self):
+ return "%s %s" % (self.name, self.version)
+
+ def _get_records(self):
+ """
+ Get the list of installed files for the distribution
+ :return: A list of tuples of path, hash and size. Note that hash and
+ size might be ``None`` for some entries. The path is exactly
+ as stored in the file (which is as in PEP 376).
+ """
+ results = []
+ r = self.get_distinfo_resource('RECORD')
+ with contextlib.closing(r.as_stream()) as stream:
+ with CSVReader(stream=stream) as record_reader:
+ # Base location is parent dir of .dist-info dir
+ #base_location = os.path.dirname(self.path)
+ #base_location = os.path.abspath(base_location)
+ for row in record_reader:
+ missing = [None for i in range(len(row), 3)]
+ path, checksum, size = row + missing
+ #if not os.path.isabs(path):
+ # path = path.replace('/', os.sep)
+ # path = os.path.join(base_location, path)
+ results.append((path, checksum, size))
+ return results
+
+ @cached_property
+ def exports(self):
+ """
+ Return the information exported by this distribution.
+ :return: A dictionary of exports, mapping an export category to a dict
+ of :class:`ExportEntry` instances describing the individual
+ export entries, and keyed by name.
+ """
+ result = {}
+ r = self.get_distinfo_resource(EXPORTS_FILENAME)
+ if r:
+ result = self.read_exports()
+ return result
+
+ def read_exports(self):
+ """
+ Read exports data from a file in .ini format.
+
+ :return: A dictionary of exports, mapping an export category to a list
+ of :class:`ExportEntry` instances describing the individual
+ export entries.
+ """
+ result = {}
+ r = self.get_distinfo_resource(EXPORTS_FILENAME)
+ if r:
+ with contextlib.closing(r.as_stream()) as stream:
+ result = read_exports(stream)
+ return result
+
+ def write_exports(self, exports):
+ """
+ Write a dictionary of exports to a file in .ini format.
+ :param exports: A dictionary of exports, mapping an export category to
+ a list of :class:`ExportEntry` instances describing the
+ individual export entries.
+ """
+ rf = self.get_distinfo_file(EXPORTS_FILENAME)
+ with open(rf, 'w') as f:
+ write_exports(exports, f)
+
+ def get_resource_path(self, relative_path):
+ """
+ NOTE: This API may change in the future.
+
+ Return the absolute path to a resource file with the given relative
+ path.
+
+ :param relative_path: The path, relative to .dist-info, of the resource
+ of interest.
+ :return: The absolute path where the resource is to be found.
+ """
+ r = self.get_distinfo_resource('RESOURCES')
+ with contextlib.closing(r.as_stream()) as stream:
+ with CSVReader(stream=stream) as resources_reader:
+ for relative, destination in resources_reader:
+ if relative == relative_path:
+ return destination
+ raise KeyError('no resource file with relative path %r '
+ 'is installed' % relative_path)
+
+ def list_installed_files(self):
+ """
+ Iterates over the ``RECORD`` entries and returns a tuple
+ ``(path, hash, size)`` for each line.
+
+ :returns: iterator of (path, hash, size)
+ """
+ for result in self._get_records():
+ yield result
+
+ def write_installed_files(self, paths, prefix, dry_run=False):
+ """
+ Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any
+ existing ``RECORD`` file is silently overwritten.
+
+ prefix is used to determine when to write absolute paths.
+ """
+ prefix = os.path.join(prefix, '')
+ base = os.path.dirname(self.path)
+ base_under_prefix = base.startswith(prefix)
+ base = os.path.join(base, '')
+ record_path = self.get_distinfo_file('RECORD')
+ logger.info('creating %s', record_path)
+ if dry_run:
+ return None
+ with CSVWriter(record_path) as writer:
+ for path in paths:
+ if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')):
+ # do not put size and hash, as in PEP-376
+ hash_value = size = ''
+ else:
+ size = '%d' % os.path.getsize(path)
+ with open(path, 'rb') as fp:
+ hash_value = self.get_hash(fp.read())
+ if path.startswith(base) or (base_under_prefix and
+ path.startswith(prefix)):
+ path = os.path.relpath(path, base)
+ writer.writerow((path, hash_value, size))
+
+ # add the RECORD file itself
+ if record_path.startswith(base):
+ record_path = os.path.relpath(record_path, base)
+ writer.writerow((record_path, '', ''))
+ return record_path
+
+ def check_installed_files(self):
+ """
+ Checks that the hashes and sizes of the files in ``RECORD`` are
+ matched by the files themselves. Returns a (possibly empty) list of
+ mismatches. Each entry in the mismatch list will be a tuple consisting
+ of the path, 'exists', 'size' or 'hash' according to what didn't match
+ (existence is checked first, then size, then hash), the expected
+ value and the actual value.
+ """
+ mismatches = []
+ base = os.path.dirname(self.path)
+ record_path = self.get_distinfo_file('RECORD')
+ for path, hash_value, size in self.list_installed_files():
+ if not os.path.isabs(path):
+ path = os.path.join(base, path)
+ if path == record_path:
+ continue
+ if not os.path.exists(path):
+ mismatches.append((path, 'exists', True, False))
+ elif os.path.isfile(path):
+ actual_size = str(os.path.getsize(path))
+ if size and actual_size != size:
+ mismatches.append((path, 'size', size, actual_size))
+ elif hash_value:
+ if '=' in hash_value:
+ hasher = hash_value.split('=', 1)[0]
+ else:
+ hasher = None
+
+ with open(path, 'rb') as f:
+ actual_hash = self.get_hash(f.read(), hasher)
+ if actual_hash != hash_value:
+ mismatches.append((path, 'hash', hash_value, actual_hash))
+ return mismatches
+
+ @cached_property
+ def shared_locations(self):
+ """
+ A dictionary of shared locations whose keys are in the set 'prefix',
+ 'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'.
+ The corresponding value is the absolute path of that category for
+ this distribution, and takes into account any paths selected by the
+ user at installation time (e.g. via command-line arguments). In the
+ case of the 'namespace' key, this would be a list of absolute paths
+ for the roots of namespace packages in this distribution.
+
+ The first time this property is accessed, the relevant information is
+ read from the SHARED file in the .dist-info directory.
+ """
+ result = {}
+ shared_path = os.path.join(self.path, 'SHARED')
+ if os.path.isfile(shared_path):
+ with codecs.open(shared_path, 'r', encoding='utf-8') as f:
+ lines = f.read().splitlines()
+ for line in lines:
+ key, value = line.split('=', 1)
+ if key == 'namespace':
+ result.setdefault(key, []).append(value)
+ else:
+ result[key] = value
+ return result
+
+ def write_shared_locations(self, paths, dry_run=False):
+ """
+ Write shared location information to the SHARED file in .dist-info.
+ :param paths: A dictionary as described in the documentation for
+ :meth:`shared_locations`.
+ :param dry_run: If True, the action is logged but no file is actually
+ written.
+ :return: The path of the file written to.
+ """
+ shared_path = os.path.join(self.path, 'SHARED')
+ logger.info('creating %s', shared_path)
+ if dry_run:
+ return None
+ lines = []
+ for key in ('prefix', 'lib', 'headers', 'scripts', 'data'):
+ path = paths[key]
+ if os.path.isdir(paths[key]):
+ lines.append('%s=%s' % (key, path))
+ for ns in paths.get('namespace', ()):
+ lines.append('namespace=%s' % ns)
+
+ with codecs.open(shared_path, 'w', encoding='utf-8') as f:
+ f.write('\n'.join(lines))
+ return shared_path
+
+ def get_distinfo_resource(self, path):
+ if path not in DIST_FILES:
+ raise DistlibException('invalid path for a dist-info file: '
+ '%r at %r' % (path, self.path))
+ finder = resources.finder_for_path(self.path)
+ if finder is None:
+ raise DistlibException('Unable to get a finder for %s' % self.path)
+ return finder.find(path)
+
+ def get_distinfo_file(self, path):
+ """
+ Returns a path located under the ``.dist-info`` directory. Returns a
+ string representing the path.
+
+ :parameter path: a ``'/'``-separated path relative to the
+ ``.dist-info`` directory or an absolute path;
+ If *path* is an absolute path and doesn't start
+ with the ``.dist-info`` directory path,
+ a :class:`DistlibException` is raised
+ :type path: str
+ :rtype: str
+ """
+ # Check if it is an absolute path # XXX use relpath, add tests
+ if path.find(os.sep) >= 0:
+ # it's an absolute path?
+ distinfo_dirname, path = path.split(os.sep)[-2:]
+ if distinfo_dirname != self.path.split(os.sep)[-1]:
+ raise DistlibException(
+ 'dist-info file %r does not belong to the %r %s '
+ 'distribution' % (path, self.name, self.version))
+
+ # The file must be relative
+ if path not in DIST_FILES:
+ raise DistlibException('invalid path for a dist-info file: '
+ '%r at %r' % (path, self.path))
+
+ return os.path.join(self.path, path)
+
+ def list_distinfo_files(self):
+ """
+ Iterates over the ``RECORD`` entries and returns paths for each line if
+ the path is pointing to a file located in the ``.dist-info`` directory
+ or one of its subdirectories.
+
+ :returns: iterator of paths
+ """
+ base = os.path.dirname(self.path)
+ for path, checksum, size in self._get_records():
+ # XXX add separator or use real relpath algo
+ if not os.path.isabs(path):
+ path = os.path.join(base, path)
+ if path.startswith(self.path):
+ yield path
+
+ def __eq__(self, other):
+ return (isinstance(other, InstalledDistribution) and
+ self.path == other.path)
+
+ # See http://docs.python.org/reference/datamodel#object.__hash__
+ __hash__ = object.__hash__
+
+
+class EggInfoDistribution(BaseInstalledDistribution):
+ """Created with the *path* of the ``.egg-info`` directory or file provided
+ to the constructor. It reads the metadata contained in the file itself, or
+ if the given path happens to be a directory, the metadata is read from the
+ file ``PKG-INFO`` under that directory."""
+
+ requested = True # as we have no way of knowing, assume it was
+ shared_locations = {}
+
+ def __init__(self, path, env=None):
+ def set_name_and_version(s, n, v):
+ s.name = n
+ s.key = n.lower() # for case-insensitive comparisons
+ s.version = v
+
+ self.path = path
+ self.dist_path = env
+ if env and env._cache_enabled and path in env._cache_egg.path:
+ metadata = env._cache_egg.path[path].metadata
+ set_name_and_version(self, metadata.name, metadata.version)
+ else:
+ metadata = self._get_metadata(path)
+
+ # Need to be set before caching
+ set_name_and_version(self, metadata.name, metadata.version)
+
+ if env and env._cache_enabled:
+ env._cache_egg.add(self)
+ super(EggInfoDistribution, self).__init__(metadata, path, env)
+
+ def _get_metadata(self, path):
+ requires = None
+
+ def parse_requires_data(data):
+ """Create a list of dependencies from a requires.txt file.
+
+ *data*: the contents of a setuptools-produced requires.txt file.
+ """
+ reqs = []
+ lines = data.splitlines()
+ for line in lines:
+ line = line.strip()
+ if line.startswith('['):
+ logger.warning('Unexpected line: quitting requirement scan: %r',
+ line)
+ break
+ r = parse_requirement(line)
+ if not r:
+ logger.warning('Not recognised as a requirement: %r', line)
+ continue
+ if r.extras:
+ logger.warning('extra requirements in requires.txt are '
+ 'not supported')
+ if not r.constraints:
+ reqs.append(r.name)
+ else:
+ cons = ', '.join('%s%s' % c for c in r.constraints)
+ reqs.append('%s (%s)' % (r.name, cons))
+ return reqs
+
+ def parse_requires_path(req_path):
+ """Create a list of dependencies from a requires.txt file.
+
+ *req_path*: the path to a setuptools-produced requires.txt file.
+ """
+
+ reqs = []
+ try:
+ with codecs.open(req_path, 'r', 'utf-8') as fp:
+ reqs = parse_requires_data(fp.read())
+ except IOError:
+ pass
+ return reqs
+
+ tl_path = tl_data = None
+ if path.endswith('.egg'):
+ if os.path.isdir(path):
+ p = os.path.join(path, 'EGG-INFO')
+ meta_path = os.path.join(p, 'PKG-INFO')
+ metadata = Metadata(path=meta_path, scheme='legacy')
+ req_path = os.path.join(p, 'requires.txt')
+ tl_path = os.path.join(p, 'top_level.txt')
+ requires = parse_requires_path(req_path)
+ else:
+ # FIXME handle the case where zipfile is not available
+ zipf = zipimport.zipimporter(path)
+ fileobj = StringIO(
+ zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8'))
+ metadata = Metadata(fileobj=fileobj, scheme='legacy')
+ try:
+ data = zipf.get_data('EGG-INFO/requires.txt')
+ tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8')
+ requires = parse_requires_data(data.decode('utf-8'))
+ except IOError:
+ requires = None
+ elif path.endswith('.egg-info'):
+ if os.path.isdir(path):
+ req_path = os.path.join(path, 'requires.txt')
+ requires = parse_requires_path(req_path)
+ path = os.path.join(path, 'PKG-INFO')
+ tl_path = os.path.join(path, 'top_level.txt')
+ metadata = Metadata(path=path, scheme='legacy')
+ else:
+ raise DistlibException('path must end with .egg-info or .egg, '
+ 'got %r' % path)
+
+ if requires:
+ metadata.add_requirements(requires)
+ # look for top-level modules in top_level.txt, if present
+ if tl_data is None:
+ if tl_path is not None and os.path.exists(tl_path):
+ with open(tl_path, 'rb') as f:
+ tl_data = f.read().decode('utf-8')
+ if not tl_data:
+ tl_data = []
+ else:
+ tl_data = tl_data.splitlines()
+ self.modules = tl_data
+ return metadata
+
+ def __repr__(self):
+ return '<EggInfoDistribution %r %s at %r>' % (
+ self.name, self.version, self.path)
+
+ def __str__(self):
+ return "%s %s" % (self.name, self.version)
+
+ def check_installed_files(self):
+ """
+ Checks that the hashes and sizes of the files in ``RECORD`` are
+ matched by the files themselves. Returns a (possibly empty) list of
+ mismatches. Each entry in the mismatch list will be a tuple consisting
+ of the path, 'exists', 'size' or 'hash' according to what didn't match
+ (existence is checked first, then size, then hash), the expected
+ value and the actual value.
+ """
+ mismatches = []
+ record_path = os.path.join(self.path, 'installed-files.txt')
+ if os.path.exists(record_path):
+ for path, _, _ in self.list_installed_files():
+ if path == record_path:
+ continue
+ if not os.path.exists(path):
+ mismatches.append((path, 'exists', True, False))
+ return mismatches
+
+ def list_installed_files(self):
+ """
+ Iterates over the ``installed-files.txt`` entries and returns a tuple
+ ``(path, hash, size)`` for each line.
+
+ :returns: a list of (path, hash, size)
+ """
+
+ def _md5(path):
+ f = open(path, 'rb')
+ try:
+ content = f.read()
+ finally:
+ f.close()
+ return hashlib.md5(content).hexdigest()
+
+ def _size(path):
+ return os.stat(path).st_size
+
+ record_path = os.path.join(self.path, 'installed-files.txt')
+ result = []
+ if os.path.exists(record_path):
+ with codecs.open(record_path, 'r', encoding='utf-8') as f:
+ for line in f:
+ line = line.strip()
+ p = os.path.normpath(os.path.join(self.path, line))
+ # "./" is present as a marker between installed files
+ # and installation metadata files
+ if not os.path.exists(p):
+ logger.warning('Non-existent file: %s', p)
+ if p.endswith(('.pyc', '.pyo')):
+ continue
+ #otherwise fall through and fail
+ if not os.path.isdir(p):
+ result.append((p, _md5(p), _size(p)))
+ result.append((record_path, None, None))
+ return result
+
+ def list_distinfo_files(self, absolute=False):
+ """
+ Iterates over the ``installed-files.txt`` entries and returns paths for
+ each line if the path is pointing to a file located in the
+ ``.egg-info`` directory or one of its subdirectories.
+
+ :parameter absolute: If *absolute* is ``True``, each returned path is
+ transformed into a local absolute path. Otherwise the
+ raw value from ``installed-files.txt`` is returned.
+ :type absolute: boolean
+ :returns: iterator of paths
+ """
+ record_path = os.path.join(self.path, 'installed-files.txt')
+ if os.path.exists(record_path):
+ skip = True
+ with codecs.open(record_path, 'r', encoding='utf-8') as f:
+ for line in f:
+ line = line.strip()
+ if line == './':
+ skip = False
+ continue
+ if not skip:
+ p = os.path.normpath(os.path.join(self.path, line))
+ if p.startswith(self.path):
+ if absolute:
+ yield p
+ else:
+ yield line
+
+ def __eq__(self, other):
+ return (isinstance(other, EggInfoDistribution) and
+ self.path == other.path)
+
+ # See http://docs.python.org/reference/datamodel#object.__hash__
+ __hash__ = object.__hash__
+
+new_dist_class = InstalledDistribution
+old_dist_class = EggInfoDistribution
+
+
+class DependencyGraph(object):
+ """
+ Represents a dependency graph between distributions.
+
+ The dependency relationships are stored in an ``adjacency_list`` that maps
+ distributions to a list of ``(other, label)`` tuples where ``other``
+ is a distribution and the edge is labeled with ``label`` (i.e. the version
+ specifier, if such was provided). Also, for more efficient traversal, for
+ every distribution ``x``, a list of predecessors is kept in
+ ``reverse_list[x]``. An edge from distribution ``a`` to
+ distribution ``b`` means that ``a`` depends on ``b``. If any missing
+ dependencies are found, they are stored in ``missing``, which is a
+ dictionary that maps distributions to a list of requirements that were not
+ provided by any other distributions.
+ """
+
+ def __init__(self):
+ self.adjacency_list = {}
+ self.reverse_list = {}
+ self.missing = {}
+
+ def add_distribution(self, distribution):
+ """Add the *distribution* to the graph.
+
+ :type distribution: :class:`distutils2.database.InstalledDistribution`
+ or :class:`distutils2.database.EggInfoDistribution`
+ """
+ self.adjacency_list[distribution] = []
+ self.reverse_list[distribution] = []
+ #self.missing[distribution] = []
+
+ def add_edge(self, x, y, label=None):
+ """Add an edge from distribution *x* to distribution *y* with the given
+ *label*.
+
+ :type x: :class:`distutils2.database.InstalledDistribution` or
+ :class:`distutils2.database.EggInfoDistribution`
+ :type y: :class:`distutils2.database.InstalledDistribution` or
+ :class:`distutils2.database.EggInfoDistribution`
+ :type label: ``str`` or ``None``
+ """
+ self.adjacency_list[x].append((y, label))
+ # multiple edges are allowed, so be careful
+ if x not in self.reverse_list[y]:
+ self.reverse_list[y].append(x)
+
+ def add_missing(self, distribution, requirement):
+ """
+ Add a missing *requirement* for the given *distribution*.
+
+ :type distribution: :class:`distutils2.database.InstalledDistribution`
+ or :class:`distutils2.database.EggInfoDistribution`
+ :type requirement: ``str``
+ """
+ logger.debug('%s missing %r', distribution, requirement)
+ self.missing.setdefault(distribution, []).append(requirement)
+
+ def _repr_dist(self, dist):
+ return '%s %s' % (dist.name, dist.version)
+
+ def repr_node(self, dist, level=1):
+ """Prints only a subgraph"""
+ output = [self._repr_dist(dist)]
+ for other, label in self.adjacency_list[dist]:
+ dist = self._repr_dist(other)
+ if label is not None:
+ dist = '%s [%s]' % (dist, label)
+ output.append(' ' * level + str(dist))
+ suboutput = self.repr_node(other, level + 1)
+ subs = suboutput.split('\n')
+ output.extend(subs[1:])
+ return '\n'.join(output)
+
+ def to_dot(self, f, skip_disconnected=True):
+ """Writes a DOT output for the graph to the provided file *f*.
+
+ If *skip_disconnected* is set to ``True``, then all distributions
+ that are not dependent on any other distribution are skipped.
+
+ :type f: has to support ``file``-like operations
+ :type skip_disconnected: ``bool``
+ """
+ disconnected = []
+
+ f.write("digraph dependencies {\n")
+ for dist, adjs in self.adjacency_list.items():
+ if len(adjs) == 0 and not skip_disconnected:
+ disconnected.append(dist)
+ for other, label in adjs:
+ if not label is None:
+ f.write('"%s" -> "%s" [label="%s"]\n' %
+ (dist.name, other.name, label))
+ else:
+ f.write('"%s" -> "%s"\n' % (dist.name, other.name))
+ if not skip_disconnected and len(disconnected) > 0:
+ f.write('subgraph disconnected {\n')
+ f.write('label = "Disconnected"\n')
+ f.write('bgcolor = red\n')
+
+ for dist in disconnected:
+ f.write('"%s"' % dist.name)
+ f.write('\n')
+ f.write('}\n')
+ f.write('}\n')
+
+ def topological_sort(self):
+ """
+ Perform a topological sort of the graph.
+ :return: A tuple, the first element of which is a topologically sorted
+ list of distributions, and the second element of which is a
+ list of distributions that cannot be sorted because they have
+ circular dependencies and so form a cycle.
+ """
+ result = []
+ # Make a shallow copy of the adjacency list
+ alist = {}
+ for k, v in self.adjacency_list.items():
+ alist[k] = v[:]
+ while True:
+ # See what we can remove in this run
+ to_remove = []
+ for k, v in list(alist.items())[:]:
+ if not v:
+ to_remove.append(k)
+ del alist[k]
+ if not to_remove:
+ # What's left in alist (if anything) is a cycle.
+ break
+ # Remove from the adjacency list of others
+ for k, v in alist.items():
+ alist[k] = [(d, r) for d, r in v if d not in to_remove]
+ logger.debug('Moving to result: %s',
+ ['%s (%s)' % (d.name, d.version) for d in to_remove])
+ result.extend(to_remove)
+ return result, list(alist.keys())
+
+ def __repr__(self):
+ """Representation of the graph"""
+ output = []
+ for dist, adjs in self.adjacency_list.items():
+ output.append(self.repr_node(dist))
+ return '\n'.join(output)
+
+
+def make_graph(dists, scheme='default'):
+ """Makes a dependency graph from the given distributions.
+
+ :parameter dists: a list of distributions
+ :type dists: list of :class:`distutils2.database.InstalledDistribution` and
+ :class:`distutils2.database.EggInfoDistribution` instances
+ :rtype: a :class:`DependencyGraph` instance
+ """
+ scheme = get_scheme(scheme)
+ graph = DependencyGraph()
+ provided = {} # maps names to lists of (version, dist) tuples
+
+ # first, build the graph and find out what's provided
+ for dist in dists:
+ graph.add_distribution(dist)
+
+ for p in dist.provides:
+ name, version = parse_name_and_version(p)
+ logger.debug('Add to provided: %s, %s, %s', name, version, dist)
+ provided.setdefault(name, []).append((version, dist))
+
+ # now make the edges
+ for dist in dists:
+ requires = (dist.run_requires | dist.meta_requires |
+ dist.build_requires | dist.dev_requires)
+ for req in requires:
+ try:
+ matcher = scheme.matcher(req)
+ except UnsupportedVersionError:
+ # XXX compat-mode if cannot read the version
+ logger.warning('could not read version %r - using name only',
+ req)
+ name = req.split()[0]
+ matcher = scheme.matcher(name)
+
+ name = matcher.key # case-insensitive
+
+ matched = False
+ if name in provided:
+ for version, provider in provided[name]:
+ try:
+ match = matcher.match(version)
+ except UnsupportedVersionError:
+ match = False
+
+ if match:
+ graph.add_edge(dist, provider, req)
+ matched = True
+ break
+ if not matched:
+ graph.add_missing(dist, req)
+ return graph
+
+
+def get_dependent_dists(dists, dist):
+ """Recursively generate a list of distributions from *dists* that are
+ dependent on *dist*.
+
+ :param dists: a list of distributions
+ :param dist: a distribution, member of *dists* for which we are interested
+ """
+ if dist not in dists:
+ raise DistlibException('given distribution %r is not a member '
+ 'of the list' % dist.name)
+ graph = make_graph(dists)
+
+ dep = [dist] # dependent distributions
+ todo = graph.reverse_list[dist] # list of nodes we should inspect
+
+ while todo:
+ d = todo.pop()
+ dep.append(d)
+ for succ in graph.reverse_list[d]:
+ if succ not in dep:
+ todo.append(succ)
+
+ dep.pop(0) # remove dist from dep, was there to prevent infinite loops
+ return dep
+
+
+def get_required_dists(dists, dist):
+ """Recursively generate a list of distributions from *dists* that are
+ required by *dist*.
+
+ :param dists: a list of distributions
+ :param dist: a distribution, member of *dists* for which we are interested
+ """
+ if dist not in dists:
+ raise DistlibException('given distribution %r is not a member '
+ 'of the list' % dist.name)
+ graph = make_graph(dists)
+
+ req = [] # required distributions
+ todo = graph.adjacency_list[dist] # list of nodes we should inspect
+
+ while todo:
+ d = todo.pop()[0]
+ req.append(d)
+ for pred in graph.adjacency_list[d]:
+ if pred not in req:
+ todo.append(pred)
+
+ return req
+
+
+def make_dist(name, version, **kwargs):
+ """
+ A convenience method for making a dist given just a name and version.
+ """
+ summary = kwargs.pop('summary', 'Placeholder for summary')
+ md = Metadata(**kwargs)
+ md.name = name
+ md.version = version
+ md.summary = summary or 'Placeholder for summary'
+ return Distribution(md)
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py
new file mode 100644
index 0000000000..7a87cdcf7a
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py
@@ -0,0 +1,516 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+import hashlib
+import logging
+import os
+import shutil
+import subprocess
+import tempfile
+try:
+ from threading import Thread
+except ImportError:
+ from dummy_threading import Thread
+
+from . import DistlibException
+from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,
+ urlparse, build_opener, string_types)
+from .util import cached_property, zip_dir, ServerProxy
+
+logger = logging.getLogger(__name__)
+
+DEFAULT_INDEX = 'https://pypi.org/pypi'
+DEFAULT_REALM = 'pypi'
+
+class PackageIndex(object):
+ """
+ This class represents a package index compatible with PyPI, the Python
+ Package Index.
+ """
+
+ boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'
+
+ def __init__(self, url=None):
+ """
+ Initialise an instance.
+
+ :param url: The URL of the index. If not specified, the URL for PyPI is
+ used.
+ """
+ self.url = url or DEFAULT_INDEX
+ self.read_configuration()
+ scheme, netloc, path, params, query, frag = urlparse(self.url)
+ if params or query or frag or scheme not in ('http', 'https'):
+ raise DistlibException('invalid repository: %s' % self.url)
+ self.password_handler = None
+ self.ssl_verifier = None
+ self.gpg = None
+ self.gpg_home = None
+ with open(os.devnull, 'w') as sink:
+ # Use gpg by default rather than gpg2, as gpg2 insists on
+ # prompting for passwords
+ for s in ('gpg', 'gpg2'):
+ try:
+ rc = subprocess.check_call([s, '--version'], stdout=sink,
+ stderr=sink)
+ if rc == 0:
+ self.gpg = s
+ break
+ except OSError:
+ pass
+
+ def _get_pypirc_command(self):
+ """
+ Get the distutils command for interacting with PyPI configurations.
+ :return: the command.
+ """
+ from distutils.core import Distribution
+ from distutils.config import PyPIRCCommand
+ d = Distribution()
+ return PyPIRCCommand(d)
+
+ def read_configuration(self):
+ """
+ Read the PyPI access configuration as supported by distutils, getting
+ PyPI to do the actual work. This populates ``username``, ``password``,
+ ``realm`` and ``url`` attributes from the configuration.
+ """
+ # get distutils to do the work
+ c = self._get_pypirc_command()
+ c.repository = self.url
+ cfg = c._read_pypirc()
+ self.username = cfg.get('username')
+ self.password = cfg.get('password')
+ self.realm = cfg.get('realm', 'pypi')
+ self.url = cfg.get('repository', self.url)
+
+ def save_configuration(self):
+ """
+ Save the PyPI access configuration. You must have set ``username`` and
+ ``password`` attributes before calling this method.
+
+ Again, distutils is used to do the actual work.
+ """
+ self.check_credentials()
+ # get distutils to do the work
+ c = self._get_pypirc_command()
+ c._store_pypirc(self.username, self.password)
+
+ def check_credentials(self):
+ """
+ Check that ``username`` and ``password`` have been set, and raise an
+ exception if not.
+ """
+ if self.username is None or self.password is None:
+ raise DistlibException('username and password must be set')
+ pm = HTTPPasswordMgr()
+ _, netloc, _, _, _, _ = urlparse(self.url)
+ pm.add_password(self.realm, netloc, self.username, self.password)
+ self.password_handler = HTTPBasicAuthHandler(pm)
+
+ def register(self, metadata):
+ """
+ Register a distribution on PyPI, using the provided metadata.
+
+ :param metadata: A :class:`Metadata` instance defining at least a name
+ and version number for the distribution to be
+ registered.
+ :return: The HTTP response received from PyPI upon submission of the
+ request.
+ """
+ self.check_credentials()
+ metadata.validate()
+ d = metadata.todict()
+ d[':action'] = 'verify'
+ request = self.encode_request(d.items(), [])
+ response = self.send_request(request)
+ d[':action'] = 'submit'
+ request = self.encode_request(d.items(), [])
+ return self.send_request(request)
+
+ def _reader(self, name, stream, outbuf):
+ """
+ Thread runner for reading lines of from a subprocess into a buffer.
+
+ :param name: The logical name of the stream (used for logging only).
+ :param stream: The stream to read from. This will typically a pipe
+ connected to the output stream of a subprocess.
+ :param outbuf: The list to append the read lines to.
+ """
+ while True:
+ s = stream.readline()
+ if not s:
+ break
+ s = s.decode('utf-8').rstrip()
+ outbuf.append(s)
+ logger.debug('%s: %s' % (name, s))
+ stream.close()
+
+ def get_sign_command(self, filename, signer, sign_password,
+ keystore=None):
+ """
+ Return a suitable command for signing a file.
+
+ :param filename: The pathname to the file to be signed.
+ :param signer: The identifier of the signer of the file.
+ :param sign_password: The passphrase for the signer's
+ private key used for signing.
+ :param keystore: The path to a directory which contains the keys
+ used in verification. If not specified, the
+ instance's ``gpg_home`` attribute is used instead.
+ :return: The signing command as a list suitable to be
+ passed to :class:`subprocess.Popen`.
+ """
+ cmd = [self.gpg, '--status-fd', '2', '--no-tty']
+ if keystore is None:
+ keystore = self.gpg_home
+ if keystore:
+ cmd.extend(['--homedir', keystore])
+ if sign_password is not None:
+ cmd.extend(['--batch', '--passphrase-fd', '0'])
+ td = tempfile.mkdtemp()
+ sf = os.path.join(td, os.path.basename(filename) + '.asc')
+ cmd.extend(['--detach-sign', '--armor', '--local-user',
+ signer, '--output', sf, filename])
+ logger.debug('invoking: %s', ' '.join(cmd))
+ return cmd, sf
+
+ def run_command(self, cmd, input_data=None):
+ """
+ Run a command in a child process , passing it any input data specified.
+
+ :param cmd: The command to run.
+ :param input_data: If specified, this must be a byte string containing
+ data to be sent to the child process.
+ :return: A tuple consisting of the subprocess' exit code, a list of
+ lines read from the subprocess' ``stdout``, and a list of
+ lines read from the subprocess' ``stderr``.
+ """
+ kwargs = {
+ 'stdout': subprocess.PIPE,
+ 'stderr': subprocess.PIPE,
+ }
+ if input_data is not None:
+ kwargs['stdin'] = subprocess.PIPE
+ stdout = []
+ stderr = []
+ p = subprocess.Popen(cmd, **kwargs)
+ # We don't use communicate() here because we may need to
+ # get clever with interacting with the command
+ t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout))
+ t1.start()
+ t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr))
+ t2.start()
+ if input_data is not None:
+ p.stdin.write(input_data)
+ p.stdin.close()
+
+ p.wait()
+ t1.join()
+ t2.join()
+ return p.returncode, stdout, stderr
+
+ def sign_file(self, filename, signer, sign_password, keystore=None):
+ """
+ Sign a file.
+
+ :param filename: The pathname to the file to be signed.
+ :param signer: The identifier of the signer of the file.
+ :param sign_password: The passphrase for the signer's
+ private key used for signing.
+ :param keystore: The path to a directory which contains the keys
+ used in signing. If not specified, the instance's
+ ``gpg_home`` attribute is used instead.
+ :return: The absolute pathname of the file where the signature is
+ stored.
+ """
+ cmd, sig_file = self.get_sign_command(filename, signer, sign_password,
+ keystore)
+ rc, stdout, stderr = self.run_command(cmd,
+ sign_password.encode('utf-8'))
+ if rc != 0:
+ raise DistlibException('sign command failed with error '
+ 'code %s' % rc)
+ return sig_file
+
+ def upload_file(self, metadata, filename, signer=None, sign_password=None,
+ filetype='sdist', pyversion='source', keystore=None):
+ """
+ Upload a release file to the index.
+
+ :param metadata: A :class:`Metadata` instance defining at least a name
+ and version number for the file to be uploaded.
+ :param filename: The pathname of the file to be uploaded.
+ :param signer: The identifier of the signer of the file.
+ :param sign_password: The passphrase for the signer's
+ private key used for signing.
+ :param filetype: The type of the file being uploaded. This is the
+ distutils command which produced that file, e.g.
+ ``sdist`` or ``bdist_wheel``.
+ :param pyversion: The version of Python which the release relates
+ to. For code compatible with any Python, this would
+ be ``source``, otherwise it would be e.g. ``3.2``.
+ :param keystore: The path to a directory which contains the keys
+ used in signing. If not specified, the instance's
+ ``gpg_home`` attribute is used instead.
+ :return: The HTTP response received from PyPI upon submission of the
+ request.
+ """
+ self.check_credentials()
+ if not os.path.exists(filename):
+ raise DistlibException('not found: %s' % filename)
+ metadata.validate()
+ d = metadata.todict()
+ sig_file = None
+ if signer:
+ if not self.gpg:
+ logger.warning('no signing program available - not signed')
+ else:
+ sig_file = self.sign_file(filename, signer, sign_password,
+ keystore)
+ with open(filename, 'rb') as f:
+ file_data = f.read()
+ md5_digest = hashlib.md5(file_data).hexdigest()
+ sha256_digest = hashlib.sha256(file_data).hexdigest()
+ d.update({
+ ':action': 'file_upload',
+ 'protocol_version': '1',
+ 'filetype': filetype,
+ 'pyversion': pyversion,
+ 'md5_digest': md5_digest,
+ 'sha256_digest': sha256_digest,
+ })
+ files = [('content', os.path.basename(filename), file_data)]
+ if sig_file:
+ with open(sig_file, 'rb') as f:
+ sig_data = f.read()
+ files.append(('gpg_signature', os.path.basename(sig_file),
+ sig_data))
+ shutil.rmtree(os.path.dirname(sig_file))
+ request = self.encode_request(d.items(), files)
+ return self.send_request(request)
+
+ def upload_documentation(self, metadata, doc_dir):
+ """
+ Upload documentation to the index.
+
+ :param metadata: A :class:`Metadata` instance defining at least a name
+ and version number for the documentation to be
+ uploaded.
+ :param doc_dir: The pathname of the directory which contains the
+ documentation. This should be the directory that
+ contains the ``index.html`` for the documentation.
+ :return: The HTTP response received from PyPI upon submission of the
+ request.
+ """
+ self.check_credentials()
+ if not os.path.isdir(doc_dir):
+ raise DistlibException('not a directory: %r' % doc_dir)
+ fn = os.path.join(doc_dir, 'index.html')
+ if not os.path.exists(fn):
+ raise DistlibException('not found: %r' % fn)
+ metadata.validate()
+ name, version = metadata.name, metadata.version
+ zip_data = zip_dir(doc_dir).getvalue()
+ fields = [(':action', 'doc_upload'),
+ ('name', name), ('version', version)]
+ files = [('content', name, zip_data)]
+ request = self.encode_request(fields, files)
+ return self.send_request(request)
+
+ def get_verify_command(self, signature_filename, data_filename,
+ keystore=None):
+ """
+ Return a suitable command for verifying a file.
+
+ :param signature_filename: The pathname to the file containing the
+ signature.
+ :param data_filename: The pathname to the file containing the
+ signed data.
+ :param keystore: The path to a directory which contains the keys
+ used in verification. If not specified, the
+ instance's ``gpg_home`` attribute is used instead.
+ :return: The verifying command as a list suitable to be
+ passed to :class:`subprocess.Popen`.
+ """
+ cmd = [self.gpg, '--status-fd', '2', '--no-tty']
+ if keystore is None:
+ keystore = self.gpg_home
+ if keystore:
+ cmd.extend(['--homedir', keystore])
+ cmd.extend(['--verify', signature_filename, data_filename])
+ logger.debug('invoking: %s', ' '.join(cmd))
+ return cmd
+
+ def verify_signature(self, signature_filename, data_filename,
+ keystore=None):
+ """
+ Verify a signature for a file.
+
+ :param signature_filename: The pathname to the file containing the
+ signature.
+ :param data_filename: The pathname to the file containing the
+ signed data.
+ :param keystore: The path to a directory which contains the keys
+ used in verification. If not specified, the
+ instance's ``gpg_home`` attribute is used instead.
+ :return: True if the signature was verified, else False.
+ """
+ if not self.gpg:
+ raise DistlibException('verification unavailable because gpg '
+ 'unavailable')
+ cmd = self.get_verify_command(signature_filename, data_filename,
+ keystore)
+ rc, stdout, stderr = self.run_command(cmd)
+ if rc not in (0, 1):
+ raise DistlibException('verify command failed with error '
+ 'code %s' % rc)
+ return rc == 0
+
+ def download_file(self, url, destfile, digest=None, reporthook=None):
+ """
+ This is a convenience method for downloading a file from an URL.
+ Normally, this will be a file from the index, though currently
+ no check is made for this (i.e. a file can be downloaded from
+ anywhere).
+
+ The method is just like the :func:`urlretrieve` function in the
+ standard library, except that it allows digest computation to be
+ done during download and checking that the downloaded data
+ matched any expected value.
+
+ :param url: The URL of the file to be downloaded (assumed to be
+ available via an HTTP GET request).
+ :param destfile: The pathname where the downloaded file is to be
+ saved.
+ :param digest: If specified, this must be a (hasher, value)
+ tuple, where hasher is the algorithm used (e.g.
+ ``'md5'``) and ``value`` is the expected value.
+ :param reporthook: The same as for :func:`urlretrieve` in the
+ standard library.
+ """
+ if digest is None:
+ digester = None
+ logger.debug('No digest specified')
+ else:
+ if isinstance(digest, (list, tuple)):
+ hasher, digest = digest
+ else:
+ hasher = 'md5'
+ digester = getattr(hashlib, hasher)()
+ logger.debug('Digest specified: %s' % digest)
+ # The following code is equivalent to urlretrieve.
+ # We need to do it this way so that we can compute the
+ # digest of the file as we go.
+ with open(destfile, 'wb') as dfp:
+ # addinfourl is not a context manager on 2.x
+ # so we have to use try/finally
+ sfp = self.send_request(Request(url))
+ try:
+ headers = sfp.info()
+ blocksize = 8192
+ size = -1
+ read = 0
+ blocknum = 0
+ if "content-length" in headers:
+ size = int(headers["Content-Length"])
+ if reporthook:
+ reporthook(blocknum, blocksize, size)
+ while True:
+ block = sfp.read(blocksize)
+ if not block:
+ break
+ read += len(block)
+ dfp.write(block)
+ if digester:
+ digester.update(block)
+ blocknum += 1
+ if reporthook:
+ reporthook(blocknum, blocksize, size)
+ finally:
+ sfp.close()
+
+ # check that we got the whole file, if we can
+ if size >= 0 and read < size:
+ raise DistlibException(
+ 'retrieval incomplete: got only %d out of %d bytes'
+ % (read, size))
+ # if we have a digest, it must match.
+ if digester:
+ actual = digester.hexdigest()
+ if digest != actual:
+ raise DistlibException('%s digest mismatch for %s: expected '
+ '%s, got %s' % (hasher, destfile,
+ digest, actual))
+ logger.debug('Digest verified: %s', digest)
+
+ def send_request(self, req):
+ """
+ Send a standard library :class:`Request` to PyPI and return its
+ response.
+
+ :param req: The request to send.
+ :return: The HTTP response from PyPI (a standard library HTTPResponse).
+ """
+ handlers = []
+ if self.password_handler:
+ handlers.append(self.password_handler)
+ if self.ssl_verifier:
+ handlers.append(self.ssl_verifier)
+ opener = build_opener(*handlers)
+ return opener.open(req)
+
+ def encode_request(self, fields, files):
+ """
+ Encode fields and files for posting to an HTTP server.
+
+ :param fields: The fields to send as a list of (fieldname, value)
+ tuples.
+ :param files: The files to send as a list of (fieldname, filename,
+ file_bytes) tuple.
+ """
+ # Adapted from packaging, which in turn was adapted from
+ # http://code.activestate.com/recipes/146306
+
+ parts = []
+ boundary = self.boundary
+ for k, values in fields:
+ if not isinstance(values, (list, tuple)):
+ values = [values]
+
+ for v in values:
+ parts.extend((
+ b'--' + boundary,
+ ('Content-Disposition: form-data; name="%s"' %
+ k).encode('utf-8'),
+ b'',
+ v.encode('utf-8')))
+ for key, filename, value in files:
+ parts.extend((
+ b'--' + boundary,
+ ('Content-Disposition: form-data; name="%s"; filename="%s"' %
+ (key, filename)).encode('utf-8'),
+ b'',
+ value))
+
+ parts.extend((b'--' + boundary + b'--', b''))
+
+ body = b'\r\n'.join(parts)
+ ct = b'multipart/form-data; boundary=' + boundary
+ headers = {
+ 'Content-type': ct,
+ 'Content-length': str(len(body))
+ }
+ return Request(self.url, body, headers)
+
+ def search(self, terms, operator=None):
+ if isinstance(terms, string_types):
+ terms = {'name': terms}
+ rpc_proxy = ServerProxy(self.url, timeout=3.0)
+ try:
+ return rpc_proxy.search(terms, operator or 'and')
+ finally:
+ rpc_proxy('close')()
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py
new file mode 100644
index 0000000000..12a1d06351
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py
@@ -0,0 +1,1302 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2015 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+
+import gzip
+from io import BytesIO
+import json
+import logging
+import os
+import posixpath
+import re
+try:
+ import threading
+except ImportError: # pragma: no cover
+ import dummy_threading as threading
+import zlib
+
+from . import DistlibException
+from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url,
+ queue, quote, unescape, string_types, build_opener,
+ HTTPRedirectHandler as BaseRedirectHandler, text_type,
+ Request, HTTPError, URLError)
+from .database import Distribution, DistributionPath, make_dist
+from .metadata import Metadata, MetadataInvalidError
+from .util import (cached_property, parse_credentials, ensure_slash,
+ split_filename, get_project_data, parse_requirement,
+ parse_name_and_version, ServerProxy, normalize_name)
+from .version import get_scheme, UnsupportedVersionError
+from .wheel import Wheel, is_compatible
+
+logger = logging.getLogger(__name__)
+
+HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)')
+CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I)
+HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml')
+DEFAULT_INDEX = 'https://pypi.org/pypi'
+
+def get_all_distribution_names(url=None):
+ """
+ Return all distribution names known by an index.
+ :param url: The URL of the index.
+ :return: A list of all known distribution names.
+ """
+ if url is None:
+ url = DEFAULT_INDEX
+ client = ServerProxy(url, timeout=3.0)
+ try:
+ return client.list_packages()
+ finally:
+ client('close')()
+
+class RedirectHandler(BaseRedirectHandler):
+ """
+ A class to work around a bug in some Python 3.2.x releases.
+ """
+ # There's a bug in the base version for some 3.2.x
+ # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header
+ # returns e.g. /abc, it bails because it says the scheme ''
+ # is bogus, when actually it should use the request's
+ # URL for the scheme. See Python issue #13696.
+ def http_error_302(self, req, fp, code, msg, headers):
+ # Some servers (incorrectly) return multiple Location headers
+ # (so probably same goes for URI). Use first header.
+ newurl = None
+ for key in ('location', 'uri'):
+ if key in headers:
+ newurl = headers[key]
+ break
+ if newurl is None: # pragma: no cover
+ return
+ urlparts = urlparse(newurl)
+ if urlparts.scheme == '':
+ newurl = urljoin(req.get_full_url(), newurl)
+ if hasattr(headers, 'replace_header'):
+ headers.replace_header(key, newurl)
+ else:
+ headers[key] = newurl
+ return BaseRedirectHandler.http_error_302(self, req, fp, code, msg,
+ headers)
+
+ http_error_301 = http_error_303 = http_error_307 = http_error_302
+
+class Locator(object):
+ """
+ A base class for locators - things that locate distributions.
+ """
+ source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz')
+ binary_extensions = ('.egg', '.exe', '.whl')
+ excluded_extensions = ('.pdf',)
+
+ # A list of tags indicating which wheels you want to match. The default
+ # value of None matches against the tags compatible with the running
+ # Python. If you want to match other values, set wheel_tags on a locator
+ # instance to a list of tuples (pyver, abi, arch) which you want to match.
+ wheel_tags = None
+
+ downloadable_extensions = source_extensions + ('.whl',)
+
+ def __init__(self, scheme='default'):
+ """
+ Initialise an instance.
+ :param scheme: Because locators look for most recent versions, they
+ need to know the version scheme to use. This specifies
+ the current PEP-recommended scheme - use ``'legacy'``
+ if you need to support existing distributions on PyPI.
+ """
+ self._cache = {}
+ self.scheme = scheme
+ # Because of bugs in some of the handlers on some of the platforms,
+ # we use our own opener rather than just using urlopen.
+ self.opener = build_opener(RedirectHandler())
+ # If get_project() is called from locate(), the matcher instance
+ # is set from the requirement passed to locate(). See issue #18 for
+ # why this can be useful to know.
+ self.matcher = None
+ self.errors = queue.Queue()
+
+ def get_errors(self):
+ """
+ Return any errors which have occurred.
+ """
+ result = []
+ while not self.errors.empty(): # pragma: no cover
+ try:
+ e = self.errors.get(False)
+ result.append(e)
+ except self.errors.Empty:
+ continue
+ self.errors.task_done()
+ return result
+
+ def clear_errors(self):
+ """
+ Clear any errors which may have been logged.
+ """
+ # Just get the errors and throw them away
+ self.get_errors()
+
+ def clear_cache(self):
+ self._cache.clear()
+
+ def _get_scheme(self):
+ return self._scheme
+
+ def _set_scheme(self, value):
+ self._scheme = value
+
+ scheme = property(_get_scheme, _set_scheme)
+
+ def _get_project(self, name):
+ """
+ For a given project, get a dictionary mapping available versions to Distribution
+ instances.
+
+ This should be implemented in subclasses.
+
+ If called from a locate() request, self.matcher will be set to a
+ matcher for the requirement to satisfy, otherwise it will be None.
+ """
+ raise NotImplementedError('Please implement in the subclass')
+
+ def get_distribution_names(self):
+ """
+ Return all the distribution names known to this locator.
+ """
+ raise NotImplementedError('Please implement in the subclass')
+
+ def get_project(self, name):
+ """
+ For a given project, get a dictionary mapping available versions to Distribution
+ instances.
+
+ This calls _get_project to do all the work, and just implements a caching layer on top.
+ """
+ if self._cache is None: # pragma: no cover
+ result = self._get_project(name)
+ elif name in self._cache:
+ result = self._cache[name]
+ else:
+ self.clear_errors()
+ result = self._get_project(name)
+ self._cache[name] = result
+ return result
+
+ def score_url(self, url):
+ """
+ Give an url a score which can be used to choose preferred URLs
+ for a given project release.
+ """
+ t = urlparse(url)
+ basename = posixpath.basename(t.path)
+ compatible = True
+ is_wheel = basename.endswith('.whl')
+ is_downloadable = basename.endswith(self.downloadable_extensions)
+ if is_wheel:
+ compatible = is_compatible(Wheel(basename), self.wheel_tags)
+ return (t.scheme == 'https', 'pypi.org' in t.netloc,
+ is_downloadable, is_wheel, compatible, basename)
+
+ def prefer_url(self, url1, url2):
+ """
+ Choose one of two URLs where both are candidates for distribution
+ archives for the same version of a distribution (for example,
+ .tar.gz vs. zip).
+
+ The current implementation favours https:// URLs over http://, archives
+ from PyPI over those from other locations, wheel compatibility (if a
+ wheel) and then the archive name.
+ """
+ result = url2
+ if url1:
+ s1 = self.score_url(url1)
+ s2 = self.score_url(url2)
+ if s1 > s2:
+ result = url1
+ if result != url2:
+ logger.debug('Not replacing %r with %r', url1, url2)
+ else:
+ logger.debug('Replacing %r with %r', url1, url2)
+ return result
+
+ def split_filename(self, filename, project_name):
+ """
+ Attempt to split a filename in project name, version and Python version.
+ """
+ return split_filename(filename, project_name)
+
+ def convert_url_to_download_info(self, url, project_name):
+ """
+ See if a URL is a candidate for a download URL for a project (the URL
+ has typically been scraped from an HTML page).
+
+ If it is, a dictionary is returned with keys "name", "version",
+ "filename" and "url"; otherwise, None is returned.
+ """
+ def same_project(name1, name2):
+ return normalize_name(name1) == normalize_name(name2)
+
+ result = None
+ scheme, netloc, path, params, query, frag = urlparse(url)
+ if frag.lower().startswith('egg='): # pragma: no cover
+ logger.debug('%s: version hint in fragment: %r',
+ project_name, frag)
+ m = HASHER_HASH.match(frag)
+ if m:
+ algo, digest = m.groups()
+ else:
+ algo, digest = None, None
+ origpath = path
+ if path and path[-1] == '/': # pragma: no cover
+ path = path[:-1]
+ if path.endswith('.whl'):
+ try:
+ wheel = Wheel(path)
+ if not is_compatible(wheel, self.wheel_tags):
+ logger.debug('Wheel not compatible: %s', path)
+ else:
+ if project_name is None:
+ include = True
+ else:
+ include = same_project(wheel.name, project_name)
+ if include:
+ result = {
+ 'name': wheel.name,
+ 'version': wheel.version,
+ 'filename': wheel.filename,
+ 'url': urlunparse((scheme, netloc, origpath,
+ params, query, '')),
+ 'python-version': ', '.join(
+ ['.'.join(list(v[2:])) for v in wheel.pyver]),
+ }
+ except Exception as e: # pragma: no cover
+ logger.warning('invalid path for wheel: %s', path)
+ elif not path.endswith(self.downloadable_extensions): # pragma: no cover
+ logger.debug('Not downloadable: %s', path)
+ else: # downloadable extension
+ path = filename = posixpath.basename(path)
+ for ext in self.downloadable_extensions:
+ if path.endswith(ext):
+ path = path[:-len(ext)]
+ t = self.split_filename(path, project_name)
+ if not t: # pragma: no cover
+ logger.debug('No match for project/version: %s', path)
+ else:
+ name, version, pyver = t
+ if not project_name or same_project(project_name, name):
+ result = {
+ 'name': name,
+ 'version': version,
+ 'filename': filename,
+ 'url': urlunparse((scheme, netloc, origpath,
+ params, query, '')),
+ #'packagetype': 'sdist',
+ }
+ if pyver: # pragma: no cover
+ result['python-version'] = pyver
+ break
+ if result and algo:
+ result['%s_digest' % algo] = digest
+ return result
+
+ def _get_digest(self, info):
+ """
+ Get a digest from a dictionary by looking at a "digests" dictionary
+ or keys of the form 'algo_digest'.
+
+ Returns a 2-tuple (algo, digest) if found, else None. Currently
+ looks only for SHA256, then MD5.
+ """
+ result = None
+ if 'digests' in info:
+ digests = info['digests']
+ for algo in ('sha256', 'md5'):
+ if algo in digests:
+ result = (algo, digests[algo])
+ break
+ if not result:
+ for algo in ('sha256', 'md5'):
+ key = '%s_digest' % algo
+ if key in info:
+ result = (algo, info[key])
+ break
+ return result
+
+ def _update_version_data(self, result, info):
+ """
+ Update a result dictionary (the final result from _get_project) with a
+ dictionary for a specific version, which typically holds information
+ gleaned from a filename or URL for an archive for the distribution.
+ """
+ name = info.pop('name')
+ version = info.pop('version')
+ if version in result:
+ dist = result[version]
+ md = dist.metadata
+ else:
+ dist = make_dist(name, version, scheme=self.scheme)
+ md = dist.metadata
+ dist.digest = digest = self._get_digest(info)
+ url = info['url']
+ result['digests'][url] = digest
+ if md.source_url != info['url']:
+ md.source_url = self.prefer_url(md.source_url, url)
+ result['urls'].setdefault(version, set()).add(url)
+ dist.locator = self
+ result[version] = dist
+
+ def locate(self, requirement, prereleases=False):
+ """
+ Find the most recent distribution which matches the given
+ requirement.
+
+ :param requirement: A requirement of the form 'foo (1.0)' or perhaps
+ 'foo (>= 1.0, < 2.0, != 1.3)'
+ :param prereleases: If ``True``, allow pre-release versions
+ to be located. Otherwise, pre-release versions
+ are not returned.
+ :return: A :class:`Distribution` instance, or ``None`` if no such
+ distribution could be located.
+ """
+ result = None
+ r = parse_requirement(requirement)
+ if r is None: # pragma: no cover
+ raise DistlibException('Not a valid requirement: %r' % requirement)
+ scheme = get_scheme(self.scheme)
+ self.matcher = matcher = scheme.matcher(r.requirement)
+ logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__)
+ versions = self.get_project(r.name)
+ if len(versions) > 2: # urls and digests keys are present
+ # sometimes, versions are invalid
+ slist = []
+ vcls = matcher.version_class
+ for k in versions:
+ if k in ('urls', 'digests'):
+ continue
+ try:
+ if not matcher.match(k):
+ logger.debug('%s did not match %r', matcher, k)
+ else:
+ if prereleases or not vcls(k).is_prerelease:
+ slist.append(k)
+ else:
+ logger.debug('skipping pre-release '
+ 'version %s of %s', k, matcher.name)
+ except Exception: # pragma: no cover
+ logger.warning('error matching %s with %r', matcher, k)
+ pass # slist.append(k)
+ if len(slist) > 1:
+ slist = sorted(slist, key=scheme.key)
+ if slist:
+ logger.debug('sorted list: %s', slist)
+ version = slist[-1]
+ result = versions[version]
+ if result:
+ if r.extras:
+ result.extras = r.extras
+ result.download_urls = versions.get('urls', {}).get(version, set())
+ d = {}
+ sd = versions.get('digests', {})
+ for url in result.download_urls:
+ if url in sd: # pragma: no cover
+ d[url] = sd[url]
+ result.digests = d
+ self.matcher = None
+ return result
+
+
+class PyPIRPCLocator(Locator):
+ """
+ This locator uses XML-RPC to locate distributions. It therefore
+ cannot be used with simple mirrors (that only mirror file content).
+ """
+ def __init__(self, url, **kwargs):
+ """
+ Initialise an instance.
+
+ :param url: The URL to use for XML-RPC.
+ :param kwargs: Passed to the superclass constructor.
+ """
+ super(PyPIRPCLocator, self).__init__(**kwargs)
+ self.base_url = url
+ self.client = ServerProxy(url, timeout=3.0)
+
+ def get_distribution_names(self):
+ """
+ Return all the distribution names known to this locator.
+ """
+ return set(self.client.list_packages())
+
+ def _get_project(self, name):
+ result = {'urls': {}, 'digests': {}}
+ versions = self.client.package_releases(name, True)
+ for v in versions:
+ urls = self.client.release_urls(name, v)
+ data = self.client.release_data(name, v)
+ metadata = Metadata(scheme=self.scheme)
+ metadata.name = data['name']
+ metadata.version = data['version']
+ metadata.license = data.get('license')
+ metadata.keywords = data.get('keywords', [])
+ metadata.summary = data.get('summary')
+ dist = Distribution(metadata)
+ if urls:
+ info = urls[0]
+ metadata.source_url = info['url']
+ dist.digest = self._get_digest(info)
+ dist.locator = self
+ result[v] = dist
+ for info in urls:
+ url = info['url']
+ digest = self._get_digest(info)
+ result['urls'].setdefault(v, set()).add(url)
+ result['digests'][url] = digest
+ return result
+
+class PyPIJSONLocator(Locator):
+ """
+ This locator uses PyPI's JSON interface. It's very limited in functionality
+ and probably not worth using.
+ """
+ def __init__(self, url, **kwargs):
+ super(PyPIJSONLocator, self).__init__(**kwargs)
+ self.base_url = ensure_slash(url)
+
+ def get_distribution_names(self):
+ """
+ Return all the distribution names known to this locator.
+ """
+ raise NotImplementedError('Not available from this locator')
+
+ def _get_project(self, name):
+ result = {'urls': {}, 'digests': {}}
+ url = urljoin(self.base_url, '%s/json' % quote(name))
+ try:
+ resp = self.opener.open(url)
+ data = resp.read().decode() # for now
+ d = json.loads(data)
+ md = Metadata(scheme=self.scheme)
+ data = d['info']
+ md.name = data['name']
+ md.version = data['version']
+ md.license = data.get('license')
+ md.keywords = data.get('keywords', [])
+ md.summary = data.get('summary')
+ dist = Distribution(md)
+ dist.locator = self
+ urls = d['urls']
+ result[md.version] = dist
+ for info in d['urls']:
+ url = info['url']
+ dist.download_urls.add(url)
+ dist.digests[url] = self._get_digest(info)
+ result['urls'].setdefault(md.version, set()).add(url)
+ result['digests'][url] = self._get_digest(info)
+ # Now get other releases
+ for version, infos in d['releases'].items():
+ if version == md.version:
+ continue # already done
+ omd = Metadata(scheme=self.scheme)
+ omd.name = md.name
+ omd.version = version
+ odist = Distribution(omd)
+ odist.locator = self
+ result[version] = odist
+ for info in infos:
+ url = info['url']
+ odist.download_urls.add(url)
+ odist.digests[url] = self._get_digest(info)
+ result['urls'].setdefault(version, set()).add(url)
+ result['digests'][url] = self._get_digest(info)
+# for info in urls:
+# md.source_url = info['url']
+# dist.digest = self._get_digest(info)
+# dist.locator = self
+# for info in urls:
+# url = info['url']
+# result['urls'].setdefault(md.version, set()).add(url)
+# result['digests'][url] = self._get_digest(info)
+ except Exception as e:
+ self.errors.put(text_type(e))
+ logger.exception('JSON fetch failed: %s', e)
+ return result
+
+
+class Page(object):
+ """
+ This class represents a scraped HTML page.
+ """
+ # The following slightly hairy-looking regex just looks for the contents of
+ # an anchor link, which has an attribute "href" either immediately preceded
+ # or immediately followed by a "rel" attribute. The attribute values can be
+ # declared with double quotes, single quotes or no quotes - which leads to
+ # the length of the expression.
+ _href = re.compile("""
+(rel\\s*=\\s*(?:"(?P<rel1>[^"]*)"|'(?P<rel2>[^']*)'|(?P<rel3>[^>\\s\n]*))\\s+)?
+href\\s*=\\s*(?:"(?P<url1>[^"]*)"|'(?P<url2>[^']*)'|(?P<url3>[^>\\s\n]*))
+(\\s+rel\\s*=\\s*(?:"(?P<rel4>[^"]*)"|'(?P<rel5>[^']*)'|(?P<rel6>[^>\\s\n]*)))?
+""", re.I | re.S | re.X)
+ _base = re.compile(r"""<base\s+href\s*=\s*['"]?([^'">]+)""", re.I | re.S)
+
+ def __init__(self, data, url):
+ """
+ Initialise an instance with the Unicode page contents and the URL they
+ came from.
+ """
+ self.data = data
+ self.base_url = self.url = url
+ m = self._base.search(self.data)
+ if m:
+ self.base_url = m.group(1)
+
+ _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
+
+ @cached_property
+ def links(self):
+ """
+ Return the URLs of all the links on a page together with information
+ about their "rel" attribute, for determining which ones to treat as
+ downloads and which ones to queue for further scraping.
+ """
+ def clean(url):
+ "Tidy up an URL."
+ scheme, netloc, path, params, query, frag = urlparse(url)
+ return urlunparse((scheme, netloc, quote(path),
+ params, query, frag))
+
+ result = set()
+ for match in self._href.finditer(self.data):
+ d = match.groupdict('')
+ rel = (d['rel1'] or d['rel2'] or d['rel3'] or
+ d['rel4'] or d['rel5'] or d['rel6'])
+ url = d['url1'] or d['url2'] or d['url3']
+ url = urljoin(self.base_url, url)
+ url = unescape(url)
+ url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url)
+ result.add((url, rel))
+ # We sort the result, hoping to bring the most recent versions
+ # to the front
+ result = sorted(result, key=lambda t: t[0], reverse=True)
+ return result
+
+
+class SimpleScrapingLocator(Locator):
+ """
+ A locator which scrapes HTML pages to locate downloads for a distribution.
+ This runs multiple threads to do the I/O; performance is at least as good
+ as pip's PackageFinder, which works in an analogous fashion.
+ """
+
+ # These are used to deal with various Content-Encoding schemes.
+ decoders = {
+ 'deflate': zlib.decompress,
+ 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(d)).read(),
+ 'none': lambda b: b,
+ }
+
+ def __init__(self, url, timeout=None, num_workers=10, **kwargs):
+ """
+ Initialise an instance.
+ :param url: The root URL to use for scraping.
+ :param timeout: The timeout, in seconds, to be applied to requests.
+ This defaults to ``None`` (no timeout specified).
+ :param num_workers: The number of worker threads you want to do I/O,
+ This defaults to 10.
+ :param kwargs: Passed to the superclass.
+ """
+ super(SimpleScrapingLocator, self).__init__(**kwargs)
+ self.base_url = ensure_slash(url)
+ self.timeout = timeout
+ self._page_cache = {}
+ self._seen = set()
+ self._to_fetch = queue.Queue()
+ self._bad_hosts = set()
+ self.skip_externals = False
+ self.num_workers = num_workers
+ self._lock = threading.RLock()
+ # See issue #45: we need to be resilient when the locator is used
+ # in a thread, e.g. with concurrent.futures. We can't use self._lock
+ # as it is for coordinating our internal threads - the ones created
+ # in _prepare_threads.
+ self._gplock = threading.RLock()
+ self.platform_check = False # See issue #112
+
+ def _prepare_threads(self):
+ """
+ Threads are created only when get_project is called, and terminate
+ before it returns. They are there primarily to parallelise I/O (i.e.
+ fetching web pages).
+ """
+ self._threads = []
+ for i in range(self.num_workers):
+ t = threading.Thread(target=self._fetch)
+ t.setDaemon(True)
+ t.start()
+ self._threads.append(t)
+
+ def _wait_threads(self):
+ """
+ Tell all the threads to terminate (by sending a sentinel value) and
+ wait for them to do so.
+ """
+ # Note that you need two loops, since you can't say which
+ # thread will get each sentinel
+ for t in self._threads:
+ self._to_fetch.put(None) # sentinel
+ for t in self._threads:
+ t.join()
+ self._threads = []
+
+ def _get_project(self, name):
+ result = {'urls': {}, 'digests': {}}
+ with self._gplock:
+ self.result = result
+ self.project_name = name
+ url = urljoin(self.base_url, '%s/' % quote(name))
+ self._seen.clear()
+ self._page_cache.clear()
+ self._prepare_threads()
+ try:
+ logger.debug('Queueing %s', url)
+ self._to_fetch.put(url)
+ self._to_fetch.join()
+ finally:
+ self._wait_threads()
+ del self.result
+ return result
+
+ platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|'
+ r'win(32|_amd64)|macosx_?\d+)\b', re.I)
+
+ def _is_platform_dependent(self, url):
+ """
+ Does an URL refer to a platform-specific download?
+ """
+ return self.platform_dependent.search(url)
+
+ def _process_download(self, url):
+ """
+ See if an URL is a suitable download for a project.
+
+ If it is, register information in the result dictionary (for
+ _get_project) about the specific version it's for.
+
+ Note that the return value isn't actually used other than as a boolean
+ value.
+ """
+ if self.platform_check and self._is_platform_dependent(url):
+ info = None
+ else:
+ info = self.convert_url_to_download_info(url, self.project_name)
+ logger.debug('process_download: %s -> %s', url, info)
+ if info:
+ with self._lock: # needed because self.result is shared
+ self._update_version_data(self.result, info)
+ return info
+
+ def _should_queue(self, link, referrer, rel):
+ """
+ Determine whether a link URL from a referring page and with a
+ particular "rel" attribute should be queued for scraping.
+ """
+ scheme, netloc, path, _, _, _ = urlparse(link)
+ if path.endswith(self.source_extensions + self.binary_extensions +
+ self.excluded_extensions):
+ result = False
+ elif self.skip_externals and not link.startswith(self.base_url):
+ result = False
+ elif not referrer.startswith(self.base_url):
+ result = False
+ elif rel not in ('homepage', 'download'):
+ result = False
+ elif scheme not in ('http', 'https', 'ftp'):
+ result = False
+ elif self._is_platform_dependent(link):
+ result = False
+ else:
+ host = netloc.split(':', 1)[0]
+ if host.lower() == 'localhost':
+ result = False
+ else:
+ result = True
+ logger.debug('should_queue: %s (%s) from %s -> %s', link, rel,
+ referrer, result)
+ return result
+
+ def _fetch(self):
+ """
+ Get a URL to fetch from the work queue, get the HTML page, examine its
+ links for download candidates and candidates for further scraping.
+
+ This is a handy method to run in a thread.
+ """
+ while True:
+ url = self._to_fetch.get()
+ try:
+ if url:
+ page = self.get_page(url)
+ if page is None: # e.g. after an error
+ continue
+ for link, rel in page.links:
+ if link not in self._seen:
+ try:
+ self._seen.add(link)
+ if (not self._process_download(link) and
+ self._should_queue(link, url, rel)):
+ logger.debug('Queueing %s from %s', link, url)
+ self._to_fetch.put(link)
+ except MetadataInvalidError: # e.g. invalid versions
+ pass
+ except Exception as e: # pragma: no cover
+ self.errors.put(text_type(e))
+ finally:
+ # always do this, to avoid hangs :-)
+ self._to_fetch.task_done()
+ if not url:
+ #logger.debug('Sentinel seen, quitting.')
+ break
+
+ def get_page(self, url):
+ """
+ Get the HTML for an URL, possibly from an in-memory cache.
+
+ XXX TODO Note: this cache is never actually cleared. It's assumed that
+ the data won't get stale over the lifetime of a locator instance (not
+ necessarily true for the default_locator).
+ """
+ # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api
+ scheme, netloc, path, _, _, _ = urlparse(url)
+ if scheme == 'file' and os.path.isdir(url2pathname(path)):
+ url = urljoin(ensure_slash(url), 'index.html')
+
+ if url in self._page_cache:
+ result = self._page_cache[url]
+ logger.debug('Returning %s from cache: %s', url, result)
+ else:
+ host = netloc.split(':', 1)[0]
+ result = None
+ if host in self._bad_hosts:
+ logger.debug('Skipping %s due to bad host %s', url, host)
+ else:
+ req = Request(url, headers={'Accept-encoding': 'identity'})
+ try:
+ logger.debug('Fetching %s', url)
+ resp = self.opener.open(req, timeout=self.timeout)
+ logger.debug('Fetched %s', url)
+ headers = resp.info()
+ content_type = headers.get('Content-Type', '')
+ if HTML_CONTENT_TYPE.match(content_type):
+ final_url = resp.geturl()
+ data = resp.read()
+ encoding = headers.get('Content-Encoding')
+ if encoding:
+ decoder = self.decoders[encoding] # fail if not found
+ data = decoder(data)
+ encoding = 'utf-8'
+ m = CHARSET.search(content_type)
+ if m:
+ encoding = m.group(1)
+ try:
+ data = data.decode(encoding)
+ except UnicodeError: # pragma: no cover
+ data = data.decode('latin-1') # fallback
+ result = Page(data, final_url)
+ self._page_cache[final_url] = result
+ except HTTPError as e:
+ if e.code != 404:
+ logger.exception('Fetch failed: %s: %s', url, e)
+ except URLError as e: # pragma: no cover
+ logger.exception('Fetch failed: %s: %s', url, e)
+ with self._lock:
+ self._bad_hosts.add(host)
+ except Exception as e: # pragma: no cover
+ logger.exception('Fetch failed: %s: %s', url, e)
+ finally:
+ self._page_cache[url] = result # even if None (failure)
+ return result
+
+ _distname_re = re.compile('<a href=[^>]*>([^<]+)<')
+
+ def get_distribution_names(self):
+ """
+ Return all the distribution names known to this locator.
+ """
+ result = set()
+ page = self.get_page(self.base_url)
+ if not page:
+ raise DistlibException('Unable to get %s' % self.base_url)
+ for match in self._distname_re.finditer(page.data):
+ result.add(match.group(1))
+ return result
+
+class DirectoryLocator(Locator):
+ """
+ This class locates distributions in a directory tree.
+ """
+
+ def __init__(self, path, **kwargs):
+ """
+ Initialise an instance.
+ :param path: The root of the directory tree to search.
+ :param kwargs: Passed to the superclass constructor,
+ except for:
+ * recursive - if True (the default), subdirectories are
+ recursed into. If False, only the top-level directory
+ is searched,
+ """
+ self.recursive = kwargs.pop('recursive', True)
+ super(DirectoryLocator, self).__init__(**kwargs)
+ path = os.path.abspath(path)
+ if not os.path.isdir(path): # pragma: no cover
+ raise DistlibException('Not a directory: %r' % path)
+ self.base_dir = path
+
+ def should_include(self, filename, parent):
+ """
+ Should a filename be considered as a candidate for a distribution
+ archive? As well as the filename, the directory which contains it
+ is provided, though not used by the current implementation.
+ """
+ return filename.endswith(self.downloadable_extensions)
+
+ def _get_project(self, name):
+ result = {'urls': {}, 'digests': {}}
+ for root, dirs, files in os.walk(self.base_dir):
+ for fn in files:
+ if self.should_include(fn, root):
+ fn = os.path.join(root, fn)
+ url = urlunparse(('file', '',
+ pathname2url(os.path.abspath(fn)),
+ '', '', ''))
+ info = self.convert_url_to_download_info(url, name)
+ if info:
+ self._update_version_data(result, info)
+ if not self.recursive:
+ break
+ return result
+
+ def get_distribution_names(self):
+ """
+ Return all the distribution names known to this locator.
+ """
+ result = set()
+ for root, dirs, files in os.walk(self.base_dir):
+ for fn in files:
+ if self.should_include(fn, root):
+ fn = os.path.join(root, fn)
+ url = urlunparse(('file', '',
+ pathname2url(os.path.abspath(fn)),
+ '', '', ''))
+ info = self.convert_url_to_download_info(url, None)
+ if info:
+ result.add(info['name'])
+ if not self.recursive:
+ break
+ return result
+
+class JSONLocator(Locator):
+ """
+ This locator uses special extended metadata (not available on PyPI) and is
+ the basis of performant dependency resolution in distlib. Other locators
+ require archive downloads before dependencies can be determined! As you
+ might imagine, that can be slow.
+ """
+ def get_distribution_names(self):
+ """
+ Return all the distribution names known to this locator.
+ """
+ raise NotImplementedError('Not available from this locator')
+
+ def _get_project(self, name):
+ result = {'urls': {}, 'digests': {}}
+ data = get_project_data(name)
+ if data:
+ for info in data.get('files', []):
+ if info['ptype'] != 'sdist' or info['pyversion'] != 'source':
+ continue
+ # We don't store summary in project metadata as it makes
+ # the data bigger for no benefit during dependency
+ # resolution
+ dist = make_dist(data['name'], info['version'],
+ summary=data.get('summary',
+ 'Placeholder for summary'),
+ scheme=self.scheme)
+ md = dist.metadata
+ md.source_url = info['url']
+ # TODO SHA256 digest
+ if 'digest' in info and info['digest']:
+ dist.digest = ('md5', info['digest'])
+ md.dependencies = info.get('requirements', {})
+ dist.exports = info.get('exports', {})
+ result[dist.version] = dist
+ result['urls'].setdefault(dist.version, set()).add(info['url'])
+ return result
+
+class DistPathLocator(Locator):
+ """
+ This locator finds installed distributions in a path. It can be useful for
+ adding to an :class:`AggregatingLocator`.
+ """
+ def __init__(self, distpath, **kwargs):
+ """
+ Initialise an instance.
+
+ :param distpath: A :class:`DistributionPath` instance to search.
+ """
+ super(DistPathLocator, self).__init__(**kwargs)
+ assert isinstance(distpath, DistributionPath)
+ self.distpath = distpath
+
+ def _get_project(self, name):
+ dist = self.distpath.get_distribution(name)
+ if dist is None:
+ result = {'urls': {}, 'digests': {}}
+ else:
+ result = {
+ dist.version: dist,
+ 'urls': {dist.version: set([dist.source_url])},
+ 'digests': {dist.version: set([None])}
+ }
+ return result
+
+
+class AggregatingLocator(Locator):
+ """
+ This class allows you to chain and/or merge a list of locators.
+ """
+ def __init__(self, *locators, **kwargs):
+ """
+ Initialise an instance.
+
+ :param locators: The list of locators to search.
+ :param kwargs: Passed to the superclass constructor,
+ except for:
+ * merge - if False (the default), the first successful
+ search from any of the locators is returned. If True,
+ the results from all locators are merged (this can be
+ slow).
+ """
+ self.merge = kwargs.pop('merge', False)
+ self.locators = locators
+ super(AggregatingLocator, self).__init__(**kwargs)
+
+ def clear_cache(self):
+ super(AggregatingLocator, self).clear_cache()
+ for locator in self.locators:
+ locator.clear_cache()
+
+ def _set_scheme(self, value):
+ self._scheme = value
+ for locator in self.locators:
+ locator.scheme = value
+
+ scheme = property(Locator.scheme.fget, _set_scheme)
+
+ def _get_project(self, name):
+ result = {}
+ for locator in self.locators:
+ d = locator.get_project(name)
+ if d:
+ if self.merge:
+ files = result.get('urls', {})
+ digests = result.get('digests', {})
+ # next line could overwrite result['urls'], result['digests']
+ result.update(d)
+ df = result.get('urls')
+ if files and df:
+ for k, v in files.items():
+ if k in df:
+ df[k] |= v
+ else:
+ df[k] = v
+ dd = result.get('digests')
+ if digests and dd:
+ dd.update(digests)
+ else:
+ # See issue #18. If any dists are found and we're looking
+ # for specific constraints, we only return something if
+ # a match is found. For example, if a DirectoryLocator
+ # returns just foo (1.0) while we're looking for
+ # foo (>= 2.0), we'll pretend there was nothing there so
+ # that subsequent locators can be queried. Otherwise we
+ # would just return foo (1.0) which would then lead to a
+ # failure to find foo (>= 2.0), because other locators
+ # weren't searched. Note that this only matters when
+ # merge=False.
+ if self.matcher is None:
+ found = True
+ else:
+ found = False
+ for k in d:
+ if self.matcher.match(k):
+ found = True
+ break
+ if found:
+ result = d
+ break
+ return result
+
+ def get_distribution_names(self):
+ """
+ Return all the distribution names known to this locator.
+ """
+ result = set()
+ for locator in self.locators:
+ try:
+ result |= locator.get_distribution_names()
+ except NotImplementedError:
+ pass
+ return result
+
+
+# We use a legacy scheme simply because most of the dists on PyPI use legacy
+# versions which don't conform to PEP 426 / PEP 440.
+default_locator = AggregatingLocator(
+ JSONLocator(),
+ SimpleScrapingLocator('https://pypi.org/simple/',
+ timeout=3.0),
+ scheme='legacy')
+
+locate = default_locator.locate
+
+NAME_VERSION_RE = re.compile(r'(?P<name>[\w-]+)\s*'
+ r'\(\s*(==\s*)?(?P<ver>[^)]+)\)$')
+
+class DependencyFinder(object):
+ """
+ Locate dependencies for distributions.
+ """
+
+ def __init__(self, locator=None):
+ """
+ Initialise an instance, using the specified locator
+ to locate distributions.
+ """
+ self.locator = locator or default_locator
+ self.scheme = get_scheme(self.locator.scheme)
+
+ def add_distribution(self, dist):
+ """
+ Add a distribution to the finder. This will update internal information
+ about who provides what.
+ :param dist: The distribution to add.
+ """
+ logger.debug('adding distribution %s', dist)
+ name = dist.key
+ self.dists_by_name[name] = dist
+ self.dists[(name, dist.version)] = dist
+ for p in dist.provides:
+ name, version = parse_name_and_version(p)
+ logger.debug('Add to provided: %s, %s, %s', name, version, dist)
+ self.provided.setdefault(name, set()).add((version, dist))
+
+ def remove_distribution(self, dist):
+ """
+ Remove a distribution from the finder. This will update internal
+ information about who provides what.
+ :param dist: The distribution to remove.
+ """
+ logger.debug('removing distribution %s', dist)
+ name = dist.key
+ del self.dists_by_name[name]
+ del self.dists[(name, dist.version)]
+ for p in dist.provides:
+ name, version = parse_name_and_version(p)
+ logger.debug('Remove from provided: %s, %s, %s', name, version, dist)
+ s = self.provided[name]
+ s.remove((version, dist))
+ if not s:
+ del self.provided[name]
+
+ def get_matcher(self, reqt):
+ """
+ Get a version matcher for a requirement.
+ :param reqt: The requirement
+ :type reqt: str
+ :return: A version matcher (an instance of
+ :class:`distlib.version.Matcher`).
+ """
+ try:
+ matcher = self.scheme.matcher(reqt)
+ except UnsupportedVersionError: # pragma: no cover
+ # XXX compat-mode if cannot read the version
+ name = reqt.split()[0]
+ matcher = self.scheme.matcher(name)
+ return matcher
+
+ def find_providers(self, reqt):
+ """
+ Find the distributions which can fulfill a requirement.
+
+ :param reqt: The requirement.
+ :type reqt: str
+ :return: A set of distribution which can fulfill the requirement.
+ """
+ matcher = self.get_matcher(reqt)
+ name = matcher.key # case-insensitive
+ result = set()
+ provided = self.provided
+ if name in provided:
+ for version, provider in provided[name]:
+ try:
+ match = matcher.match(version)
+ except UnsupportedVersionError:
+ match = False
+
+ if match:
+ result.add(provider)
+ break
+ return result
+
+ def try_to_replace(self, provider, other, problems):
+ """
+ Attempt to replace one provider with another. This is typically used
+ when resolving dependencies from multiple sources, e.g. A requires
+ (B >= 1.0) while C requires (B >= 1.1).
+
+ For successful replacement, ``provider`` must meet all the requirements
+ which ``other`` fulfills.
+
+ :param provider: The provider we are trying to replace with.
+ :param other: The provider we're trying to replace.
+ :param problems: If False is returned, this will contain what
+ problems prevented replacement. This is currently
+ a tuple of the literal string 'cantreplace',
+ ``provider``, ``other`` and the set of requirements
+ that ``provider`` couldn't fulfill.
+ :return: True if we can replace ``other`` with ``provider``, else
+ False.
+ """
+ rlist = self.reqts[other]
+ unmatched = set()
+ for s in rlist:
+ matcher = self.get_matcher(s)
+ if not matcher.match(provider.version):
+ unmatched.add(s)
+ if unmatched:
+ # can't replace other with provider
+ problems.add(('cantreplace', provider, other,
+ frozenset(unmatched)))
+ result = False
+ else:
+ # can replace other with provider
+ self.remove_distribution(other)
+ del self.reqts[other]
+ for s in rlist:
+ self.reqts.setdefault(provider, set()).add(s)
+ self.add_distribution(provider)
+ result = True
+ return result
+
+ def find(self, requirement, meta_extras=None, prereleases=False):
+ """
+ Find a distribution and all distributions it depends on.
+
+ :param requirement: The requirement specifying the distribution to
+ find, or a Distribution instance.
+ :param meta_extras: A list of meta extras such as :test:, :build: and
+ so on.
+ :param prereleases: If ``True``, allow pre-release versions to be
+ returned - otherwise, don't return prereleases
+ unless they're all that's available.
+
+ Return a set of :class:`Distribution` instances and a set of
+ problems.
+
+ The distributions returned should be such that they have the
+ :attr:`required` attribute set to ``True`` if they were
+ from the ``requirement`` passed to ``find()``, and they have the
+ :attr:`build_time_dependency` attribute set to ``True`` unless they
+ are post-installation dependencies of the ``requirement``.
+
+ The problems should be a tuple consisting of the string
+ ``'unsatisfied'`` and the requirement which couldn't be satisfied
+ by any distribution known to the locator.
+ """
+
+ self.provided = {}
+ self.dists = {}
+ self.dists_by_name = {}
+ self.reqts = {}
+
+ meta_extras = set(meta_extras or [])
+ if ':*:' in meta_extras:
+ meta_extras.remove(':*:')
+ # :meta: and :run: are implicitly included
+ meta_extras |= set([':test:', ':build:', ':dev:'])
+
+ if isinstance(requirement, Distribution):
+ dist = odist = requirement
+ logger.debug('passed %s as requirement', odist)
+ else:
+ dist = odist = self.locator.locate(requirement,
+ prereleases=prereleases)
+ if dist is None:
+ raise DistlibException('Unable to locate %r' % requirement)
+ logger.debug('located %s', odist)
+ dist.requested = True
+ problems = set()
+ todo = set([dist])
+ install_dists = set([odist])
+ while todo:
+ dist = todo.pop()
+ name = dist.key # case-insensitive
+ if name not in self.dists_by_name:
+ self.add_distribution(dist)
+ else:
+ #import pdb; pdb.set_trace()
+ other = self.dists_by_name[name]
+ if other != dist:
+ self.try_to_replace(dist, other, problems)
+
+ ireqts = dist.run_requires | dist.meta_requires
+ sreqts = dist.build_requires
+ ereqts = set()
+ if meta_extras and dist in install_dists:
+ for key in ('test', 'build', 'dev'):
+ e = ':%s:' % key
+ if e in meta_extras:
+ ereqts |= getattr(dist, '%s_requires' % key)
+ all_reqts = ireqts | sreqts | ereqts
+ for r in all_reqts:
+ providers = self.find_providers(r)
+ if not providers:
+ logger.debug('No providers found for %r', r)
+ provider = self.locator.locate(r, prereleases=prereleases)
+ # If no provider is found and we didn't consider
+ # prereleases, consider them now.
+ if provider is None and not prereleases:
+ provider = self.locator.locate(r, prereleases=True)
+ if provider is None:
+ logger.debug('Cannot satisfy %r', r)
+ problems.add(('unsatisfied', r))
+ else:
+ n, v = provider.key, provider.version
+ if (n, v) not in self.dists:
+ todo.add(provider)
+ providers.add(provider)
+ if r in ireqts and dist in install_dists:
+ install_dists.add(provider)
+ logger.debug('Adding %s to install_dists',
+ provider.name_and_version)
+ for p in providers:
+ name = p.key
+ if name not in self.dists_by_name:
+ self.reqts.setdefault(p, set()).add(r)
+ else:
+ other = self.dists_by_name[name]
+ if other != p:
+ # see if other can be replaced by p
+ self.try_to_replace(p, other, problems)
+
+ dists = set(self.dists.values())
+ for dist in dists:
+ dist.build_time_dependency = dist not in install_dists
+ if dist.build_time_dependency:
+ logger.debug('%s is a build-time dependency only.',
+ dist.name_and_version)
+ logger.debug('find done for %s', odist)
+ return dists, problems
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py
new file mode 100644
index 0000000000..ca0fe442d9
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py
@@ -0,0 +1,393 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2013 Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""
+Class representing the list of files in a distribution.
+
+Equivalent to distutils.filelist, but fixes some problems.
+"""
+import fnmatch
+import logging
+import os
+import re
+import sys
+
+from . import DistlibException
+from .compat import fsdecode
+from .util import convert_path
+
+
+__all__ = ['Manifest']
+
+logger = logging.getLogger(__name__)
+
+# a \ followed by some spaces + EOL
+_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M)
+_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)
+
+#
+# Due to the different results returned by fnmatch.translate, we need
+# to do slightly different processing for Python 2.7 and 3.2 ... this needed
+# to be brought in for Python 3.6 onwards.
+#
+_PYTHON_VERSION = sys.version_info[:2]
+
+class Manifest(object):
+ """A list of files built by on exploring the filesystem and filtered by
+ applying various patterns to what we find there.
+ """
+
+ def __init__(self, base=None):
+ """
+ Initialise an instance.
+
+ :param base: The base directory to explore under.
+ """
+ self.base = os.path.abspath(os.path.normpath(base or os.getcwd()))
+ self.prefix = self.base + os.sep
+ self.allfiles = None
+ self.files = set()
+
+ #
+ # Public API
+ #
+
+ def findall(self):
+ """Find all files under the base and set ``allfiles`` to the absolute
+ pathnames of files found.
+ """
+ from stat import S_ISREG, S_ISDIR, S_ISLNK
+
+ self.allfiles = allfiles = []
+ root = self.base
+ stack = [root]
+ pop = stack.pop
+ push = stack.append
+
+ while stack:
+ root = pop()
+ names = os.listdir(root)
+
+ for name in names:
+ fullname = os.path.join(root, name)
+
+ # Avoid excess stat calls -- just one will do, thank you!
+ stat = os.stat(fullname)
+ mode = stat.st_mode
+ if S_ISREG(mode):
+ allfiles.append(fsdecode(fullname))
+ elif S_ISDIR(mode) and not S_ISLNK(mode):
+ push(fullname)
+
+ def add(self, item):
+ """
+ Add a file to the manifest.
+
+ :param item: The pathname to add. This can be relative to the base.
+ """
+ if not item.startswith(self.prefix):
+ item = os.path.join(self.base, item)
+ self.files.add(os.path.normpath(item))
+
+ def add_many(self, items):
+ """
+ Add a list of files to the manifest.
+
+ :param items: The pathnames to add. These can be relative to the base.
+ """
+ for item in items:
+ self.add(item)
+
+ def sorted(self, wantdirs=False):
+ """
+ Return sorted files in directory order
+ """
+
+ def add_dir(dirs, d):
+ dirs.add(d)
+ logger.debug('add_dir added %s', d)
+ if d != self.base:
+ parent, _ = os.path.split(d)
+ assert parent not in ('', '/')
+ add_dir(dirs, parent)
+
+ result = set(self.files) # make a copy!
+ if wantdirs:
+ dirs = set()
+ for f in result:
+ add_dir(dirs, os.path.dirname(f))
+ result |= dirs
+ return [os.path.join(*path_tuple) for path_tuple in
+ sorted(os.path.split(path) for path in result)]
+
+ def clear(self):
+ """Clear all collected files."""
+ self.files = set()
+ self.allfiles = []
+
+ def process_directive(self, directive):
+ """
+ Process a directive which either adds some files from ``allfiles`` to
+ ``files``, or removes some files from ``files``.
+
+ :param directive: The directive to process. This should be in a format
+ compatible with distutils ``MANIFEST.in`` files:
+
+ http://docs.python.org/distutils/sourcedist.html#commands
+ """
+ # Parse the line: split it up, make sure the right number of words
+ # is there, and return the relevant words. 'action' is always
+ # defined: it's the first word of the line. Which of the other
+ # three are defined depends on the action; it'll be either
+ # patterns, (dir and patterns), or (dirpattern).
+ action, patterns, thedir, dirpattern = self._parse_directive(directive)
+
+ # OK, now we know that the action is valid and we have the
+ # right number of words on the line for that action -- so we
+ # can proceed with minimal error-checking.
+ if action == 'include':
+ for pattern in patterns:
+ if not self._include_pattern(pattern, anchor=True):
+ logger.warning('no files found matching %r', pattern)
+
+ elif action == 'exclude':
+ for pattern in patterns:
+ found = self._exclude_pattern(pattern, anchor=True)
+ #if not found:
+ # logger.warning('no previously-included files '
+ # 'found matching %r', pattern)
+
+ elif action == 'global-include':
+ for pattern in patterns:
+ if not self._include_pattern(pattern, anchor=False):
+ logger.warning('no files found matching %r '
+ 'anywhere in distribution', pattern)
+
+ elif action == 'global-exclude':
+ for pattern in patterns:
+ found = self._exclude_pattern(pattern, anchor=False)
+ #if not found:
+ # logger.warning('no previously-included files '
+ # 'matching %r found anywhere in '
+ # 'distribution', pattern)
+
+ elif action == 'recursive-include':
+ for pattern in patterns:
+ if not self._include_pattern(pattern, prefix=thedir):
+ logger.warning('no files found matching %r '
+ 'under directory %r', pattern, thedir)
+
+ elif action == 'recursive-exclude':
+ for pattern in patterns:
+ found = self._exclude_pattern(pattern, prefix=thedir)
+ #if not found:
+ # logger.warning('no previously-included files '
+ # 'matching %r found under directory %r',
+ # pattern, thedir)
+
+ elif action == 'graft':
+ if not self._include_pattern(None, prefix=dirpattern):
+ logger.warning('no directories found matching %r',
+ dirpattern)
+
+ elif action == 'prune':
+ if not self._exclude_pattern(None, prefix=dirpattern):
+ logger.warning('no previously-included directories found '
+ 'matching %r', dirpattern)
+ else: # pragma: no cover
+ # This should never happen, as it should be caught in
+ # _parse_template_line
+ raise DistlibException(
+ 'invalid action %r' % action)
+
+ #
+ # Private API
+ #
+
+ def _parse_directive(self, directive):
+ """
+ Validate a directive.
+ :param directive: The directive to validate.
+ :return: A tuple of action, patterns, thedir, dir_patterns
+ """
+ words = directive.split()
+ if len(words) == 1 and words[0] not in ('include', 'exclude',
+ 'global-include',
+ 'global-exclude',
+ 'recursive-include',
+ 'recursive-exclude',
+ 'graft', 'prune'):
+ # no action given, let's use the default 'include'
+ words.insert(0, 'include')
+
+ action = words[0]
+ patterns = thedir = dir_pattern = None
+
+ if action in ('include', 'exclude',
+ 'global-include', 'global-exclude'):
+ if len(words) < 2:
+ raise DistlibException(
+ '%r expects <pattern1> <pattern2> ...' % action)
+
+ patterns = [convert_path(word) for word in words[1:]]
+
+ elif action in ('recursive-include', 'recursive-exclude'):
+ if len(words) < 3:
+ raise DistlibException(
+ '%r expects <dir> <pattern1> <pattern2> ...' % action)
+
+ thedir = convert_path(words[1])
+ patterns = [convert_path(word) for word in words[2:]]
+
+ elif action in ('graft', 'prune'):
+ if len(words) != 2:
+ raise DistlibException(
+ '%r expects a single <dir_pattern>' % action)
+
+ dir_pattern = convert_path(words[1])
+
+ else:
+ raise DistlibException('unknown action %r' % action)
+
+ return action, patterns, thedir, dir_pattern
+
+ def _include_pattern(self, pattern, anchor=True, prefix=None,
+ is_regex=False):
+ """Select strings (presumably filenames) from 'self.files' that
+ match 'pattern', a Unix-style wildcard (glob) pattern.
+
+ Patterns are not quite the same as implemented by the 'fnmatch'
+ module: '*' and '?' match non-special characters, where "special"
+ is platform-dependent: slash on Unix; colon, slash, and backslash on
+ DOS/Windows; and colon on Mac OS.
+
+ If 'anchor' is true (the default), then the pattern match is more
+ stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
+ 'anchor' is false, both of these will match.
+
+ If 'prefix' is supplied, then only filenames starting with 'prefix'
+ (itself a pattern) and ending with 'pattern', with anything in between
+ them, will match. 'anchor' is ignored in this case.
+
+ If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
+ 'pattern' is assumed to be either a string containing a regex or a
+ regex object -- no translation is done, the regex is just compiled
+ and used as-is.
+
+ Selected strings will be added to self.files.
+
+ Return True if files are found.
+ """
+ # XXX docstring lying about what the special chars are?
+ found = False
+ pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
+
+ # delayed loading of allfiles list
+ if self.allfiles is None:
+ self.findall()
+
+ for name in self.allfiles:
+ if pattern_re.search(name):
+ self.files.add(name)
+ found = True
+ return found
+
+ def _exclude_pattern(self, pattern, anchor=True, prefix=None,
+ is_regex=False):
+ """Remove strings (presumably filenames) from 'files' that match
+ 'pattern'.
+
+ Other parameters are the same as for 'include_pattern()', above.
+ The list 'self.files' is modified in place. Return True if files are
+ found.
+
+ This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
+ packaging source distributions
+ """
+ found = False
+ pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
+ for f in list(self.files):
+ if pattern_re.search(f):
+ self.files.remove(f)
+ found = True
+ return found
+
+ def _translate_pattern(self, pattern, anchor=True, prefix=None,
+ is_regex=False):
+ """Translate a shell-like wildcard pattern to a compiled regular
+ expression.
+
+ Return the compiled regex. If 'is_regex' true,
+ then 'pattern' is directly compiled to a regex (if it's a string)
+ or just returned as-is (assumes it's a regex object).
+ """
+ if is_regex:
+ if isinstance(pattern, str):
+ return re.compile(pattern)
+ else:
+ return pattern
+
+ if _PYTHON_VERSION > (3, 2):
+ # ditch start and end characters
+ start, _, end = self._glob_to_re('_').partition('_')
+
+ if pattern:
+ pattern_re = self._glob_to_re(pattern)
+ if _PYTHON_VERSION > (3, 2):
+ assert pattern_re.startswith(start) and pattern_re.endswith(end)
+ else:
+ pattern_re = ''
+
+ base = re.escape(os.path.join(self.base, ''))
+ if prefix is not None:
+ # ditch end of pattern character
+ if _PYTHON_VERSION <= (3, 2):
+ empty_pattern = self._glob_to_re('')
+ prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)]
+ else:
+ prefix_re = self._glob_to_re(prefix)
+ assert prefix_re.startswith(start) and prefix_re.endswith(end)
+ prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
+ sep = os.sep
+ if os.sep == '\\':
+ sep = r'\\'
+ if _PYTHON_VERSION <= (3, 2):
+ pattern_re = '^' + base + sep.join((prefix_re,
+ '.*' + pattern_re))
+ else:
+ pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
+ pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep,
+ pattern_re, end)
+ else: # no prefix -- respect anchor flag
+ if anchor:
+ if _PYTHON_VERSION <= (3, 2):
+ pattern_re = '^' + base + pattern_re
+ else:
+ pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):])
+
+ return re.compile(pattern_re)
+
+ def _glob_to_re(self, pattern):
+ """Translate a shell-like glob pattern to a regular expression.
+
+ Return a string containing the regex. Differs from
+ 'fnmatch.translate()' in that '*' does not match "special characters"
+ (which are platform-specific).
+ """
+ pattern_re = fnmatch.translate(pattern)
+
+ # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
+ # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
+ # and by extension they shouldn't match such "special characters" under
+ # any OS. So change all non-escaped dots in the RE to match any
+ # character except the special characters (currently: just os.sep).
+ sep = os.sep
+ if os.sep == '\\':
+ # we're using a regex to manipulate a regex, so we need
+ # to escape the backslash twice
+ sep = r'\\\\'
+ escaped = r'\1[^%s]' % sep
+ pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
+ return pattern_re
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py
new file mode 100644
index 0000000000..ee1f3e2365
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py
@@ -0,0 +1,131 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2017 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""
+Parser for the environment markers micro-language defined in PEP 508.
+"""
+
+# Note: In PEP 345, the micro-language was Python compatible, so the ast
+# module could be used to parse it. However, PEP 508 introduced operators such
+# as ~= and === which aren't in Python, necessitating a different approach.
+
+import os
+import sys
+import platform
+import re
+
+from .compat import python_implementation, urlparse, string_types
+from .util import in_venv, parse_marker
+
+__all__ = ['interpret']
+
+def _is_literal(o):
+ if not isinstance(o, string_types) or not o:
+ return False
+ return o[0] in '\'"'
+
+class Evaluator(object):
+ """
+ This class is used to evaluate marker expessions.
+ """
+
+ operations = {
+ '==': lambda x, y: x == y,
+ '===': lambda x, y: x == y,
+ '~=': lambda x, y: x == y or x > y,
+ '!=': lambda x, y: x != y,
+ '<': lambda x, y: x < y,
+ '<=': lambda x, y: x == y or x < y,
+ '>': lambda x, y: x > y,
+ '>=': lambda x, y: x == y or x > y,
+ 'and': lambda x, y: x and y,
+ 'or': lambda x, y: x or y,
+ 'in': lambda x, y: x in y,
+ 'not in': lambda x, y: x not in y,
+ }
+
+ def evaluate(self, expr, context):
+ """
+ Evaluate a marker expression returned by the :func:`parse_requirement`
+ function in the specified context.
+ """
+ if isinstance(expr, string_types):
+ if expr[0] in '\'"':
+ result = expr[1:-1]
+ else:
+ if expr not in context:
+ raise SyntaxError('unknown variable: %s' % expr)
+ result = context[expr]
+ else:
+ assert isinstance(expr, dict)
+ op = expr['op']
+ if op not in self.operations:
+ raise NotImplementedError('op not implemented: %s' % op)
+ elhs = expr['lhs']
+ erhs = expr['rhs']
+ if _is_literal(expr['lhs']) and _is_literal(expr['rhs']):
+ raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs))
+
+ lhs = self.evaluate(elhs, context)
+ rhs = self.evaluate(erhs, context)
+ result = self.operations[op](lhs, rhs)
+ return result
+
+def default_context():
+ def format_full_version(info):
+ version = '%s.%s.%s' % (info.major, info.minor, info.micro)
+ kind = info.releaselevel
+ if kind != 'final':
+ version += kind[0] + str(info.serial)
+ return version
+
+ if hasattr(sys, 'implementation'):
+ implementation_version = format_full_version(sys.implementation.version)
+ implementation_name = sys.implementation.name
+ else:
+ implementation_version = '0'
+ implementation_name = ''
+
+ result = {
+ 'implementation_name': implementation_name,
+ 'implementation_version': implementation_version,
+ 'os_name': os.name,
+ 'platform_machine': platform.machine(),
+ 'platform_python_implementation': platform.python_implementation(),
+ 'platform_release': platform.release(),
+ 'platform_system': platform.system(),
+ 'platform_version': platform.version(),
+ 'platform_in_venv': str(in_venv()),
+ 'python_full_version': platform.python_version(),
+ 'python_version': platform.python_version()[:3],
+ 'sys_platform': sys.platform,
+ }
+ return result
+
+DEFAULT_CONTEXT = default_context()
+del default_context
+
+evaluator = Evaluator()
+
+def interpret(marker, execution_context=None):
+ """
+ Interpret a marker and return a result depending on environment.
+
+ :param marker: The marker to interpret.
+ :type marker: str
+ :param execution_context: The context used for name lookup.
+ :type execution_context: mapping
+ """
+ try:
+ expr, rest = parse_marker(marker)
+ except Exception as e:
+ raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e))
+ if rest and rest[0] != '#':
+ raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest))
+ context = dict(DEFAULT_CONTEXT)
+ if execution_context:
+ context.update(execution_context)
+ return evaluator.evaluate(expr, context)
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py
new file mode 100644
index 0000000000..6d5e236090
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py
@@ -0,0 +1,1056 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""Implementation of the Metadata for Python packages PEPs.
+
+Supports all metadata formats (1.0, 1.1, 1.2, 1.3/2.1 and withdrawn 2.0).
+"""
+from __future__ import unicode_literals
+
+import codecs
+from email import message_from_file
+import json
+import logging
+import re
+
+
+from . import DistlibException, __version__
+from .compat import StringIO, string_types, text_type
+from .markers import interpret
+from .util import extract_by_key, get_extras
+from .version import get_scheme, PEP440_VERSION_RE
+
+logger = logging.getLogger(__name__)
+
+
+class MetadataMissingError(DistlibException):
+ """A required metadata is missing"""
+
+
+class MetadataConflictError(DistlibException):
+ """Attempt to read or write metadata fields that are conflictual."""
+
+
+class MetadataUnrecognizedVersionError(DistlibException):
+ """Unknown metadata version number."""
+
+
+class MetadataInvalidError(DistlibException):
+ """A metadata value is invalid"""
+
+# public API of this module
+__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION']
+
+# Encoding used for the PKG-INFO files
+PKG_INFO_ENCODING = 'utf-8'
+
+# preferred version. Hopefully will be changed
+# to 1.2 once PEP 345 is supported everywhere
+PKG_INFO_PREFERRED_VERSION = '1.1'
+
+_LINE_PREFIX_1_2 = re.compile('\n \\|')
+_LINE_PREFIX_PRE_1_2 = re.compile('\n ')
+_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+ 'Summary', 'Description',
+ 'Keywords', 'Home-page', 'Author', 'Author-email',
+ 'License')
+
+_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+ 'Supported-Platform', 'Summary', 'Description',
+ 'Keywords', 'Home-page', 'Author', 'Author-email',
+ 'License', 'Classifier', 'Download-URL', 'Obsoletes',
+ 'Provides', 'Requires')
+
+_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier',
+ 'Download-URL')
+
+_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+ 'Supported-Platform', 'Summary', 'Description',
+ 'Keywords', 'Home-page', 'Author', 'Author-email',
+ 'Maintainer', 'Maintainer-email', 'License',
+ 'Classifier', 'Download-URL', 'Obsoletes-Dist',
+ 'Project-URL', 'Provides-Dist', 'Requires-Dist',
+ 'Requires-Python', 'Requires-External')
+
+_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python',
+ 'Obsoletes-Dist', 'Requires-External', 'Maintainer',
+ 'Maintainer-email', 'Project-URL')
+
+_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+ 'Supported-Platform', 'Summary', 'Description',
+ 'Keywords', 'Home-page', 'Author', 'Author-email',
+ 'Maintainer', 'Maintainer-email', 'License',
+ 'Classifier', 'Download-URL', 'Obsoletes-Dist',
+ 'Project-URL', 'Provides-Dist', 'Requires-Dist',
+ 'Requires-Python', 'Requires-External', 'Private-Version',
+ 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension',
+ 'Provides-Extra')
+
+_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By',
+ 'Setup-Requires-Dist', 'Extension')
+
+# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in
+# the metadata. Include them in the tuple literal below to allow them
+# (for now).
+_566_FIELDS = _426_FIELDS + ('Description-Content-Type',
+ 'Requires', 'Provides')
+
+_566_MARKERS = ('Description-Content-Type',)
+
+_ALL_FIELDS = set()
+_ALL_FIELDS.update(_241_FIELDS)
+_ALL_FIELDS.update(_314_FIELDS)
+_ALL_FIELDS.update(_345_FIELDS)
+_ALL_FIELDS.update(_426_FIELDS)
+_ALL_FIELDS.update(_566_FIELDS)
+
+EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''')
+
+
+def _version2fieldlist(version):
+ if version == '1.0':
+ return _241_FIELDS
+ elif version == '1.1':
+ return _314_FIELDS
+ elif version == '1.2':
+ return _345_FIELDS
+ elif version in ('1.3', '2.1'):
+ return _345_FIELDS + _566_FIELDS
+ elif version == '2.0':
+ return _426_FIELDS
+ raise MetadataUnrecognizedVersionError(version)
+
+
+def _best_version(fields):
+ """Detect the best version depending on the fields used."""
+ def _has_marker(keys, markers):
+ for marker in markers:
+ if marker in keys:
+ return True
+ return False
+
+ keys = []
+ for key, value in fields.items():
+ if value in ([], 'UNKNOWN', None):
+ continue
+ keys.append(key)
+
+ possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.0', '2.1']
+
+ # first let's try to see if a field is not part of one of the version
+ for key in keys:
+ if key not in _241_FIELDS and '1.0' in possible_versions:
+ possible_versions.remove('1.0')
+ logger.debug('Removed 1.0 due to %s', key)
+ if key not in _314_FIELDS and '1.1' in possible_versions:
+ possible_versions.remove('1.1')
+ logger.debug('Removed 1.1 due to %s', key)
+ if key not in _345_FIELDS and '1.2' in possible_versions:
+ possible_versions.remove('1.2')
+ logger.debug('Removed 1.2 due to %s', key)
+ if key not in _566_FIELDS and '1.3' in possible_versions:
+ possible_versions.remove('1.3')
+ logger.debug('Removed 1.3 due to %s', key)
+ if key not in _566_FIELDS and '2.1' in possible_versions:
+ if key != 'Description': # In 2.1, description allowed after headers
+ possible_versions.remove('2.1')
+ logger.debug('Removed 2.1 due to %s', key)
+ if key not in _426_FIELDS and '2.0' in possible_versions:
+ possible_versions.remove('2.0')
+ logger.debug('Removed 2.0 due to %s', key)
+
+ # possible_version contains qualified versions
+ if len(possible_versions) == 1:
+ return possible_versions[0] # found !
+ elif len(possible_versions) == 0:
+ logger.debug('Out of options - unknown metadata set: %s', fields)
+ raise MetadataConflictError('Unknown metadata set')
+
+ # let's see if one unique marker is found
+ is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS)
+ is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS)
+ is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS)
+ is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS)
+ if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_0) > 1:
+ raise MetadataConflictError('You used incompatible 1.1/1.2/2.0/2.1 fields')
+
+ # we have the choice, 1.0, or 1.2, or 2.0
+ # - 1.0 has a broken Summary field but works with all tools
+ # - 1.1 is to avoid
+ # - 1.2 fixes Summary but has little adoption
+ # - 2.0 adds more features and is very new
+ if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_0:
+ # we couldn't find any specific marker
+ if PKG_INFO_PREFERRED_VERSION in possible_versions:
+ return PKG_INFO_PREFERRED_VERSION
+ if is_1_1:
+ return '1.1'
+ if is_1_2:
+ return '1.2'
+ if is_2_1:
+ return '2.1'
+
+ return '2.0'
+
+# This follows the rules about transforming keys as described in
+# https://www.python.org/dev/peps/pep-0566/#id17
+_ATTR2FIELD = {
+ name.lower().replace("-", "_"): name for name in _ALL_FIELDS
+}
+_FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()}
+
+_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist')
+_VERSIONS_FIELDS = ('Requires-Python',)
+_VERSION_FIELDS = ('Version',)
+_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes',
+ 'Requires', 'Provides', 'Obsoletes-Dist',
+ 'Provides-Dist', 'Requires-Dist', 'Requires-External',
+ 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist',
+ 'Provides-Extra', 'Extension')
+_LISTTUPLEFIELDS = ('Project-URL',)
+
+_ELEMENTSFIELD = ('Keywords',)
+
+_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description')
+
+_MISSING = object()
+
+_FILESAFE = re.compile('[^A-Za-z0-9.]+')
+
+
+def _get_name_and_version(name, version, for_filename=False):
+ """Return the distribution name with version.
+
+ If for_filename is true, return a filename-escaped form."""
+ if for_filename:
+ # For both name and version any runs of non-alphanumeric or '.'
+ # characters are replaced with a single '-'. Additionally any
+ # spaces in the version string become '.'
+ name = _FILESAFE.sub('-', name)
+ version = _FILESAFE.sub('-', version.replace(' ', '.'))
+ return '%s-%s' % (name, version)
+
+
+class LegacyMetadata(object):
+ """The legacy metadata of a release.
+
+ Supports versions 1.0, 1.1, 1.2, 2.0 and 1.3/2.1 (auto-detected). You can
+ instantiate the class with one of these arguments (or none):
+ - *path*, the path to a metadata file
+ - *fileobj* give a file-like object with metadata as content
+ - *mapping* is a dict-like object
+ - *scheme* is a version scheme name
+ """
+ # TODO document the mapping API and UNKNOWN default key
+
+ def __init__(self, path=None, fileobj=None, mapping=None,
+ scheme='default'):
+ if [path, fileobj, mapping].count(None) < 2:
+ raise TypeError('path, fileobj and mapping are exclusive')
+ self._fields = {}
+ self.requires_files = []
+ self._dependencies = None
+ self.scheme = scheme
+ if path is not None:
+ self.read(path)
+ elif fileobj is not None:
+ self.read_file(fileobj)
+ elif mapping is not None:
+ self.update(mapping)
+ self.set_metadata_version()
+
+ def set_metadata_version(self):
+ self._fields['Metadata-Version'] = _best_version(self._fields)
+
+ def _write_field(self, fileobj, name, value):
+ fileobj.write('%s: %s\n' % (name, value))
+
+ def __getitem__(self, name):
+ return self.get(name)
+
+ def __setitem__(self, name, value):
+ return self.set(name, value)
+
+ def __delitem__(self, name):
+ field_name = self._convert_name(name)
+ try:
+ del self._fields[field_name]
+ except KeyError:
+ raise KeyError(name)
+
+ def __contains__(self, name):
+ return (name in self._fields or
+ self._convert_name(name) in self._fields)
+
+ def _convert_name(self, name):
+ if name in _ALL_FIELDS:
+ return name
+ name = name.replace('-', '_').lower()
+ return _ATTR2FIELD.get(name, name)
+
+ def _default_value(self, name):
+ if name in _LISTFIELDS or name in _ELEMENTSFIELD:
+ return []
+ return 'UNKNOWN'
+
+ def _remove_line_prefix(self, value):
+ if self.metadata_version in ('1.0', '1.1'):
+ return _LINE_PREFIX_PRE_1_2.sub('\n', value)
+ else:
+ return _LINE_PREFIX_1_2.sub('\n', value)
+
+ def __getattr__(self, name):
+ if name in _ATTR2FIELD:
+ return self[name]
+ raise AttributeError(name)
+
+ #
+ # Public API
+ #
+
+# dependencies = property(_get_dependencies, _set_dependencies)
+
+ def get_fullname(self, filesafe=False):
+ """Return the distribution name with version.
+
+ If filesafe is true, return a filename-escaped form."""
+ return _get_name_and_version(self['Name'], self['Version'], filesafe)
+
+ def is_field(self, name):
+ """return True if name is a valid metadata key"""
+ name = self._convert_name(name)
+ return name in _ALL_FIELDS
+
+ def is_multi_field(self, name):
+ name = self._convert_name(name)
+ return name in _LISTFIELDS
+
+ def read(self, filepath):
+ """Read the metadata values from a file path."""
+ fp = codecs.open(filepath, 'r', encoding='utf-8')
+ try:
+ self.read_file(fp)
+ finally:
+ fp.close()
+
+ def read_file(self, fileob):
+ """Read the metadata values from a file object."""
+ msg = message_from_file(fileob)
+ self._fields['Metadata-Version'] = msg['metadata-version']
+
+ # When reading, get all the fields we can
+ for field in _ALL_FIELDS:
+ if field not in msg:
+ continue
+ if field in _LISTFIELDS:
+ # we can have multiple lines
+ values = msg.get_all(field)
+ if field in _LISTTUPLEFIELDS and values is not None:
+ values = [tuple(value.split(',')) for value in values]
+ self.set(field, values)
+ else:
+ # single line
+ value = msg[field]
+ if value is not None and value != 'UNKNOWN':
+ self.set(field, value)
+
+ # PEP 566 specifies that the body be used for the description, if
+ # available
+ body = msg.get_payload()
+ self["Description"] = body if body else self["Description"]
+ # logger.debug('Attempting to set metadata for %s', self)
+ # self.set_metadata_version()
+
+ def write(self, filepath, skip_unknown=False):
+ """Write the metadata fields to filepath."""
+ fp = codecs.open(filepath, 'w', encoding='utf-8')
+ try:
+ self.write_file(fp, skip_unknown)
+ finally:
+ fp.close()
+
+ def write_file(self, fileobject, skip_unknown=False):
+ """Write the PKG-INFO format data to a file object."""
+ self.set_metadata_version()
+
+ for field in _version2fieldlist(self['Metadata-Version']):
+ values = self.get(field)
+ if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']):
+ continue
+ if field in _ELEMENTSFIELD:
+ self._write_field(fileobject, field, ','.join(values))
+ continue
+ if field not in _LISTFIELDS:
+ if field == 'Description':
+ if self.metadata_version in ('1.0', '1.1'):
+ values = values.replace('\n', '\n ')
+ else:
+ values = values.replace('\n', '\n |')
+ values = [values]
+
+ if field in _LISTTUPLEFIELDS:
+ values = [','.join(value) for value in values]
+
+ for value in values:
+ self._write_field(fileobject, field, value)
+
+ def update(self, other=None, **kwargs):
+ """Set metadata values from the given iterable `other` and kwargs.
+
+ Behavior is like `dict.update`: If `other` has a ``keys`` method,
+ they are looped over and ``self[key]`` is assigned ``other[key]``.
+ Else, ``other`` is an iterable of ``(key, value)`` iterables.
+
+ Keys that don't match a metadata field or that have an empty value are
+ dropped.
+ """
+ def _set(key, value):
+ if key in _ATTR2FIELD and value:
+ self.set(self._convert_name(key), value)
+
+ if not other:
+ # other is None or empty container
+ pass
+ elif hasattr(other, 'keys'):
+ for k in other.keys():
+ _set(k, other[k])
+ else:
+ for k, v in other:
+ _set(k, v)
+
+ if kwargs:
+ for k, v in kwargs.items():
+ _set(k, v)
+
+ def set(self, name, value):
+ """Control then set a metadata field."""
+ name = self._convert_name(name)
+
+ if ((name in _ELEMENTSFIELD or name == 'Platform') and
+ not isinstance(value, (list, tuple))):
+ if isinstance(value, string_types):
+ value = [v.strip() for v in value.split(',')]
+ else:
+ value = []
+ elif (name in _LISTFIELDS and
+ not isinstance(value, (list, tuple))):
+ if isinstance(value, string_types):
+ value = [value]
+ else:
+ value = []
+
+ if logger.isEnabledFor(logging.WARNING):
+ project_name = self['Name']
+
+ scheme = get_scheme(self.scheme)
+ if name in _PREDICATE_FIELDS and value is not None:
+ for v in value:
+ # check that the values are valid
+ if not scheme.is_valid_matcher(v.split(';')[0]):
+ logger.warning(
+ "'%s': '%s' is not valid (field '%s')",
+ project_name, v, name)
+ # FIXME this rejects UNKNOWN, is that right?
+ elif name in _VERSIONS_FIELDS and value is not None:
+ if not scheme.is_valid_constraint_list(value):
+ logger.warning("'%s': '%s' is not a valid version (field '%s')",
+ project_name, value, name)
+ elif name in _VERSION_FIELDS and value is not None:
+ if not scheme.is_valid_version(value):
+ logger.warning("'%s': '%s' is not a valid version (field '%s')",
+ project_name, value, name)
+
+ if name in _UNICODEFIELDS:
+ if name == 'Description':
+ value = self._remove_line_prefix(value)
+
+ self._fields[name] = value
+
+ def get(self, name, default=_MISSING):
+ """Get a metadata field."""
+ name = self._convert_name(name)
+ if name not in self._fields:
+ if default is _MISSING:
+ default = self._default_value(name)
+ return default
+ if name in _UNICODEFIELDS:
+ value = self._fields[name]
+ return value
+ elif name in _LISTFIELDS:
+ value = self._fields[name]
+ if value is None:
+ return []
+ res = []
+ for val in value:
+ if name not in _LISTTUPLEFIELDS:
+ res.append(val)
+ else:
+ # That's for Project-URL
+ res.append((val[0], val[1]))
+ return res
+
+ elif name in _ELEMENTSFIELD:
+ value = self._fields[name]
+ if isinstance(value, string_types):
+ return value.split(',')
+ return self._fields[name]
+
+ def check(self, strict=False):
+ """Check if the metadata is compliant. If strict is True then raise if
+ no Name or Version are provided"""
+ self.set_metadata_version()
+
+ # XXX should check the versions (if the file was loaded)
+ missing, warnings = [], []
+
+ for attr in ('Name', 'Version'): # required by PEP 345
+ if attr not in self:
+ missing.append(attr)
+
+ if strict and missing != []:
+ msg = 'missing required metadata: %s' % ', '.join(missing)
+ raise MetadataMissingError(msg)
+
+ for attr in ('Home-page', 'Author'):
+ if attr not in self:
+ missing.append(attr)
+
+ # checking metadata 1.2 (XXX needs to check 1.1, 1.0)
+ if self['Metadata-Version'] != '1.2':
+ return missing, warnings
+
+ scheme = get_scheme(self.scheme)
+
+ def are_valid_constraints(value):
+ for v in value:
+ if not scheme.is_valid_matcher(v.split(';')[0]):
+ return False
+ return True
+
+ for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints),
+ (_VERSIONS_FIELDS,
+ scheme.is_valid_constraint_list),
+ (_VERSION_FIELDS,
+ scheme.is_valid_version)):
+ for field in fields:
+ value = self.get(field, None)
+ if value is not None and not controller(value):
+ warnings.append("Wrong value for '%s': %s" % (field, value))
+
+ return missing, warnings
+
+ def todict(self, skip_missing=False):
+ """Return fields as a dict.
+
+ Field names will be converted to use the underscore-lowercase style
+ instead of hyphen-mixed case (i.e. home_page instead of Home-page).
+ This is as per https://www.python.org/dev/peps/pep-0566/#id17.
+ """
+ self.set_metadata_version()
+
+ fields = _version2fieldlist(self['Metadata-Version'])
+
+ data = {}
+
+ for field_name in fields:
+ if not skip_missing or field_name in self._fields:
+ key = _FIELD2ATTR[field_name]
+ if key != 'project_url':
+ data[key] = self[field_name]
+ else:
+ data[key] = [','.join(u) for u in self[field_name]]
+
+ return data
+
+ def add_requirements(self, requirements):
+ if self['Metadata-Version'] == '1.1':
+ # we can't have 1.1 metadata *and* Setuptools requires
+ for field in ('Obsoletes', 'Requires', 'Provides'):
+ if field in self:
+ del self[field]
+ self['Requires-Dist'] += requirements
+
+ # Mapping API
+ # TODO could add iter* variants
+
+ def keys(self):
+ return list(_version2fieldlist(self['Metadata-Version']))
+
+ def __iter__(self):
+ for key in self.keys():
+ yield key
+
+ def values(self):
+ return [self[key] for key in self.keys()]
+
+ def items(self):
+ return [(key, self[key]) for key in self.keys()]
+
+ def __repr__(self):
+ return '<%s %s %s>' % (self.__class__.__name__, self.name,
+ self.version)
+
+
+METADATA_FILENAME = 'pydist.json'
+WHEEL_METADATA_FILENAME = 'metadata.json'
+LEGACY_METADATA_FILENAME = 'METADATA'
+
+
+class Metadata(object):
+ """
+ The metadata of a release. This implementation uses 2.0 (JSON)
+ metadata where possible. If not possible, it wraps a LegacyMetadata
+ instance which handles the key-value metadata format.
+ """
+
+ METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$')
+
+ NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I)
+
+ VERSION_MATCHER = PEP440_VERSION_RE
+
+ SUMMARY_MATCHER = re.compile('.{1,2047}')
+
+ METADATA_VERSION = '2.0'
+
+ GENERATOR = 'distlib (%s)' % __version__
+
+ MANDATORY_KEYS = {
+ 'name': (),
+ 'version': (),
+ 'summary': ('legacy',),
+ }
+
+ INDEX_KEYS = ('name version license summary description author '
+ 'author_email keywords platform home_page classifiers '
+ 'download_url')
+
+ DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires '
+ 'dev_requires provides meta_requires obsoleted_by '
+ 'supports_environments')
+
+ SYNTAX_VALIDATORS = {
+ 'metadata_version': (METADATA_VERSION_MATCHER, ()),
+ 'name': (NAME_MATCHER, ('legacy',)),
+ 'version': (VERSION_MATCHER, ('legacy',)),
+ 'summary': (SUMMARY_MATCHER, ('legacy',)),
+ }
+
+ __slots__ = ('_legacy', '_data', 'scheme')
+
+ def __init__(self, path=None, fileobj=None, mapping=None,
+ scheme='default'):
+ if [path, fileobj, mapping].count(None) < 2:
+ raise TypeError('path, fileobj and mapping are exclusive')
+ self._legacy = None
+ self._data = None
+ self.scheme = scheme
+ #import pdb; pdb.set_trace()
+ if mapping is not None:
+ try:
+ self._validate_mapping(mapping, scheme)
+ self._data = mapping
+ except MetadataUnrecognizedVersionError:
+ self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme)
+ self.validate()
+ else:
+ data = None
+ if path:
+ with open(path, 'rb') as f:
+ data = f.read()
+ elif fileobj:
+ data = fileobj.read()
+ if data is None:
+ # Initialised with no args - to be added
+ self._data = {
+ 'metadata_version': self.METADATA_VERSION,
+ 'generator': self.GENERATOR,
+ }
+ else:
+ if not isinstance(data, text_type):
+ data = data.decode('utf-8')
+ try:
+ self._data = json.loads(data)
+ self._validate_mapping(self._data, scheme)
+ except ValueError:
+ # Note: MetadataUnrecognizedVersionError does not
+ # inherit from ValueError (it's a DistlibException,
+ # which should not inherit from ValueError).
+ # The ValueError comes from the json.load - if that
+ # succeeds and we get a validation error, we want
+ # that to propagate
+ self._legacy = LegacyMetadata(fileobj=StringIO(data),
+ scheme=scheme)
+ self.validate()
+
+ common_keys = set(('name', 'version', 'license', 'keywords', 'summary'))
+
+ none_list = (None, list)
+ none_dict = (None, dict)
+
+ mapped_keys = {
+ 'run_requires': ('Requires-Dist', list),
+ 'build_requires': ('Setup-Requires-Dist', list),
+ 'dev_requires': none_list,
+ 'test_requires': none_list,
+ 'meta_requires': none_list,
+ 'extras': ('Provides-Extra', list),
+ 'modules': none_list,
+ 'namespaces': none_list,
+ 'exports': none_dict,
+ 'commands': none_dict,
+ 'classifiers': ('Classifier', list),
+ 'source_url': ('Download-URL', None),
+ 'metadata_version': ('Metadata-Version', None),
+ }
+
+ del none_list, none_dict
+
+ def __getattribute__(self, key):
+ common = object.__getattribute__(self, 'common_keys')
+ mapped = object.__getattribute__(self, 'mapped_keys')
+ if key in mapped:
+ lk, maker = mapped[key]
+ if self._legacy:
+ if lk is None:
+ result = None if maker is None else maker()
+ else:
+ result = self._legacy.get(lk)
+ else:
+ value = None if maker is None else maker()
+ if key not in ('commands', 'exports', 'modules', 'namespaces',
+ 'classifiers'):
+ result = self._data.get(key, value)
+ else:
+ # special cases for PEP 459
+ sentinel = object()
+ result = sentinel
+ d = self._data.get('extensions')
+ if d:
+ if key == 'commands':
+ result = d.get('python.commands', value)
+ elif key == 'classifiers':
+ d = d.get('python.details')
+ if d:
+ result = d.get(key, value)
+ else:
+ d = d.get('python.exports')
+ if not d:
+ d = self._data.get('python.exports')
+ if d:
+ result = d.get(key, value)
+ if result is sentinel:
+ result = value
+ elif key not in common:
+ result = object.__getattribute__(self, key)
+ elif self._legacy:
+ result = self._legacy.get(key)
+ else:
+ result = self._data.get(key)
+ return result
+
+ def _validate_value(self, key, value, scheme=None):
+ if key in self.SYNTAX_VALIDATORS:
+ pattern, exclusions = self.SYNTAX_VALIDATORS[key]
+ if (scheme or self.scheme) not in exclusions:
+ m = pattern.match(value)
+ if not m:
+ raise MetadataInvalidError("'%s' is an invalid value for "
+ "the '%s' property" % (value,
+ key))
+
+ def __setattr__(self, key, value):
+ self._validate_value(key, value)
+ common = object.__getattribute__(self, 'common_keys')
+ mapped = object.__getattribute__(self, 'mapped_keys')
+ if key in mapped:
+ lk, _ = mapped[key]
+ if self._legacy:
+ if lk is None:
+ raise NotImplementedError
+ self._legacy[lk] = value
+ elif key not in ('commands', 'exports', 'modules', 'namespaces',
+ 'classifiers'):
+ self._data[key] = value
+ else:
+ # special cases for PEP 459
+ d = self._data.setdefault('extensions', {})
+ if key == 'commands':
+ d['python.commands'] = value
+ elif key == 'classifiers':
+ d = d.setdefault('python.details', {})
+ d[key] = value
+ else:
+ d = d.setdefault('python.exports', {})
+ d[key] = value
+ elif key not in common:
+ object.__setattr__(self, key, value)
+ else:
+ if key == 'keywords':
+ if isinstance(value, string_types):
+ value = value.strip()
+ if value:
+ value = value.split()
+ else:
+ value = []
+ if self._legacy:
+ self._legacy[key] = value
+ else:
+ self._data[key] = value
+
+ @property
+ def name_and_version(self):
+ return _get_name_and_version(self.name, self.version, True)
+
+ @property
+ def provides(self):
+ if self._legacy:
+ result = self._legacy['Provides-Dist']
+ else:
+ result = self._data.setdefault('provides', [])
+ s = '%s (%s)' % (self.name, self.version)
+ if s not in result:
+ result.append(s)
+ return result
+
+ @provides.setter
+ def provides(self, value):
+ if self._legacy:
+ self._legacy['Provides-Dist'] = value
+ else:
+ self._data['provides'] = value
+
+ def get_requirements(self, reqts, extras=None, env=None):
+ """
+ Base method to get dependencies, given a set of extras
+ to satisfy and an optional environment context.
+ :param reqts: A list of sometimes-wanted dependencies,
+ perhaps dependent on extras and environment.
+ :param extras: A list of optional components being requested.
+ :param env: An optional environment for marker evaluation.
+ """
+ if self._legacy:
+ result = reqts
+ else:
+ result = []
+ extras = get_extras(extras or [], self.extras)
+ for d in reqts:
+ if 'extra' not in d and 'environment' not in d:
+ # unconditional
+ include = True
+ else:
+ if 'extra' not in d:
+ # Not extra-dependent - only environment-dependent
+ include = True
+ else:
+ include = d.get('extra') in extras
+ if include:
+ # Not excluded because of extras, check environment
+ marker = d.get('environment')
+ if marker:
+ include = interpret(marker, env)
+ if include:
+ result.extend(d['requires'])
+ for key in ('build', 'dev', 'test'):
+ e = ':%s:' % key
+ if e in extras:
+ extras.remove(e)
+ # A recursive call, but it should terminate since 'test'
+ # has been removed from the extras
+ reqts = self._data.get('%s_requires' % key, [])
+ result.extend(self.get_requirements(reqts, extras=extras,
+ env=env))
+ return result
+
+ @property
+ def dictionary(self):
+ if self._legacy:
+ return self._from_legacy()
+ return self._data
+
+ @property
+ def dependencies(self):
+ if self._legacy:
+ raise NotImplementedError
+ else:
+ return extract_by_key(self._data, self.DEPENDENCY_KEYS)
+
+ @dependencies.setter
+ def dependencies(self, value):
+ if self._legacy:
+ raise NotImplementedError
+ else:
+ self._data.update(value)
+
+ def _validate_mapping(self, mapping, scheme):
+ if mapping.get('metadata_version') != self.METADATA_VERSION:
+ raise MetadataUnrecognizedVersionError()
+ missing = []
+ for key, exclusions in self.MANDATORY_KEYS.items():
+ if key not in mapping:
+ if scheme not in exclusions:
+ missing.append(key)
+ if missing:
+ msg = 'Missing metadata items: %s' % ', '.join(missing)
+ raise MetadataMissingError(msg)
+ for k, v in mapping.items():
+ self._validate_value(k, v, scheme)
+
+ def validate(self):
+ if self._legacy:
+ missing, warnings = self._legacy.check(True)
+ if missing or warnings:
+ logger.warning('Metadata: missing: %s, warnings: %s',
+ missing, warnings)
+ else:
+ self._validate_mapping(self._data, self.scheme)
+
+ def todict(self):
+ if self._legacy:
+ return self._legacy.todict(True)
+ else:
+ result = extract_by_key(self._data, self.INDEX_KEYS)
+ return result
+
+ def _from_legacy(self):
+ assert self._legacy and not self._data
+ result = {
+ 'metadata_version': self.METADATA_VERSION,
+ 'generator': self.GENERATOR,
+ }
+ lmd = self._legacy.todict(True) # skip missing ones
+ for k in ('name', 'version', 'license', 'summary', 'description',
+ 'classifier'):
+ if k in lmd:
+ if k == 'classifier':
+ nk = 'classifiers'
+ else:
+ nk = k
+ result[nk] = lmd[k]
+ kw = lmd.get('Keywords', [])
+ if kw == ['']:
+ kw = []
+ result['keywords'] = kw
+ keys = (('requires_dist', 'run_requires'),
+ ('setup_requires_dist', 'build_requires'))
+ for ok, nk in keys:
+ if ok in lmd and lmd[ok]:
+ result[nk] = [{'requires': lmd[ok]}]
+ result['provides'] = self.provides
+ author = {}
+ maintainer = {}
+ return result
+
+ LEGACY_MAPPING = {
+ 'name': 'Name',
+ 'version': 'Version',
+ ('extensions', 'python.details', 'license'): 'License',
+ 'summary': 'Summary',
+ 'description': 'Description',
+ ('extensions', 'python.project', 'project_urls', 'Home'): 'Home-page',
+ ('extensions', 'python.project', 'contacts', 0, 'name'): 'Author',
+ ('extensions', 'python.project', 'contacts', 0, 'email'): 'Author-email',
+ 'source_url': 'Download-URL',
+ ('extensions', 'python.details', 'classifiers'): 'Classifier',
+ }
+
+ def _to_legacy(self):
+ def process_entries(entries):
+ reqts = set()
+ for e in entries:
+ extra = e.get('extra')
+ env = e.get('environment')
+ rlist = e['requires']
+ for r in rlist:
+ if not env and not extra:
+ reqts.add(r)
+ else:
+ marker = ''
+ if extra:
+ marker = 'extra == "%s"' % extra
+ if env:
+ if marker:
+ marker = '(%s) and %s' % (env, marker)
+ else:
+ marker = env
+ reqts.add(';'.join((r, marker)))
+ return reqts
+
+ assert self._data and not self._legacy
+ result = LegacyMetadata()
+ nmd = self._data
+ # import pdb; pdb.set_trace()
+ for nk, ok in self.LEGACY_MAPPING.items():
+ if not isinstance(nk, tuple):
+ if nk in nmd:
+ result[ok] = nmd[nk]
+ else:
+ d = nmd
+ found = True
+ for k in nk:
+ try:
+ d = d[k]
+ except (KeyError, IndexError):
+ found = False
+ break
+ if found:
+ result[ok] = d
+ r1 = process_entries(self.run_requires + self.meta_requires)
+ r2 = process_entries(self.build_requires + self.dev_requires)
+ if self.extras:
+ result['Provides-Extra'] = sorted(self.extras)
+ result['Requires-Dist'] = sorted(r1)
+ result['Setup-Requires-Dist'] = sorted(r2)
+ # TODO: any other fields wanted
+ return result
+
+ def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True):
+ if [path, fileobj].count(None) != 1:
+ raise ValueError('Exactly one of path and fileobj is needed')
+ self.validate()
+ if legacy:
+ if self._legacy:
+ legacy_md = self._legacy
+ else:
+ legacy_md = self._to_legacy()
+ if path:
+ legacy_md.write(path, skip_unknown=skip_unknown)
+ else:
+ legacy_md.write_file(fileobj, skip_unknown=skip_unknown)
+ else:
+ if self._legacy:
+ d = self._from_legacy()
+ else:
+ d = self._data
+ if fileobj:
+ json.dump(d, fileobj, ensure_ascii=True, indent=2,
+ sort_keys=True)
+ else:
+ with codecs.open(path, 'w', 'utf-8') as f:
+ json.dump(d, f, ensure_ascii=True, indent=2,
+ sort_keys=True)
+
+ def add_requirements(self, requirements):
+ if self._legacy:
+ self._legacy.add_requirements(requirements)
+ else:
+ run_requires = self._data.setdefault('run_requires', [])
+ always = None
+ for entry in run_requires:
+ if 'environment' not in entry and 'extra' not in entry:
+ always = entry
+ break
+ if always is None:
+ always = { 'requires': requirements }
+ run_requires.insert(0, always)
+ else:
+ rset = set(always['requires']) | set(requirements)
+ always['requires'] = sorted(rset)
+
+ def __repr__(self):
+ name = self.name or '(no name)'
+ version = self.version or 'no version'
+ return '<%s %s %s (%s)>' % (self.__class__.__name__,
+ self.metadata_version, name, version)
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py
new file mode 100644
index 0000000000..18840167a9
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py
@@ -0,0 +1,355 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013-2017 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+from __future__ import unicode_literals
+
+import bisect
+import io
+import logging
+import os
+import pkgutil
+import shutil
+import sys
+import types
+import zipimport
+
+from . import DistlibException
+from .util import cached_property, get_cache_base, path_to_cache_dir, Cache
+
+logger = logging.getLogger(__name__)
+
+
+cache = None # created when needed
+
+
+class ResourceCache(Cache):
+ def __init__(self, base=None):
+ if base is None:
+ # Use native string to avoid issues on 2.x: see Python #20140.
+ base = os.path.join(get_cache_base(), str('resource-cache'))
+ super(ResourceCache, self).__init__(base)
+
+ def is_stale(self, resource, path):
+ """
+ Is the cache stale for the given resource?
+
+ :param resource: The :class:`Resource` being cached.
+ :param path: The path of the resource in the cache.
+ :return: True if the cache is stale.
+ """
+ # Cache invalidation is a hard problem :-)
+ return True
+
+ def get(self, resource):
+ """
+ Get a resource into the cache,
+
+ :param resource: A :class:`Resource` instance.
+ :return: The pathname of the resource in the cache.
+ """
+ prefix, path = resource.finder.get_cache_info(resource)
+ if prefix is None:
+ result = path
+ else:
+ result = os.path.join(self.base, self.prefix_to_dir(prefix), path)
+ dirname = os.path.dirname(result)
+ if not os.path.isdir(dirname):
+ os.makedirs(dirname)
+ if not os.path.exists(result):
+ stale = True
+ else:
+ stale = self.is_stale(resource, path)
+ if stale:
+ # write the bytes of the resource to the cache location
+ with open(result, 'wb') as f:
+ f.write(resource.bytes)
+ return result
+
+
+class ResourceBase(object):
+ def __init__(self, finder, name):
+ self.finder = finder
+ self.name = name
+
+
+class Resource(ResourceBase):
+ """
+ A class representing an in-package resource, such as a data file. This is
+ not normally instantiated by user code, but rather by a
+ :class:`ResourceFinder` which manages the resource.
+ """
+ is_container = False # Backwards compatibility
+
+ def as_stream(self):
+ """
+ Get the resource as a stream.
+
+ This is not a property to make it obvious that it returns a new stream
+ each time.
+ """
+ return self.finder.get_stream(self)
+
+ @cached_property
+ def file_path(self):
+ global cache
+ if cache is None:
+ cache = ResourceCache()
+ return cache.get(self)
+
+ @cached_property
+ def bytes(self):
+ return self.finder.get_bytes(self)
+
+ @cached_property
+ def size(self):
+ return self.finder.get_size(self)
+
+
+class ResourceContainer(ResourceBase):
+ is_container = True # Backwards compatibility
+
+ @cached_property
+ def resources(self):
+ return self.finder.get_resources(self)
+
+
+class ResourceFinder(object):
+ """
+ Resource finder for file system resources.
+ """
+
+ if sys.platform.startswith('java'):
+ skipped_extensions = ('.pyc', '.pyo', '.class')
+ else:
+ skipped_extensions = ('.pyc', '.pyo')
+
+ def __init__(self, module):
+ self.module = module
+ self.loader = getattr(module, '__loader__', None)
+ self.base = os.path.dirname(getattr(module, '__file__', ''))
+
+ def _adjust_path(self, path):
+ return os.path.realpath(path)
+
+ def _make_path(self, resource_name):
+ # Issue #50: need to preserve type of path on Python 2.x
+ # like os.path._get_sep
+ if isinstance(resource_name, bytes): # should only happen on 2.x
+ sep = b'/'
+ else:
+ sep = '/'
+ parts = resource_name.split(sep)
+ parts.insert(0, self.base)
+ result = os.path.join(*parts)
+ return self._adjust_path(result)
+
+ def _find(self, path):
+ return os.path.exists(path)
+
+ def get_cache_info(self, resource):
+ return None, resource.path
+
+ def find(self, resource_name):
+ path = self._make_path(resource_name)
+ if not self._find(path):
+ result = None
+ else:
+ if self._is_directory(path):
+ result = ResourceContainer(self, resource_name)
+ else:
+ result = Resource(self, resource_name)
+ result.path = path
+ return result
+
+ def get_stream(self, resource):
+ return open(resource.path, 'rb')
+
+ def get_bytes(self, resource):
+ with open(resource.path, 'rb') as f:
+ return f.read()
+
+ def get_size(self, resource):
+ return os.path.getsize(resource.path)
+
+ def get_resources(self, resource):
+ def allowed(f):
+ return (f != '__pycache__' and not
+ f.endswith(self.skipped_extensions))
+ return set([f for f in os.listdir(resource.path) if allowed(f)])
+
+ def is_container(self, resource):
+ return self._is_directory(resource.path)
+
+ _is_directory = staticmethod(os.path.isdir)
+
+ def iterator(self, resource_name):
+ resource = self.find(resource_name)
+ if resource is not None:
+ todo = [resource]
+ while todo:
+ resource = todo.pop(0)
+ yield resource
+ if resource.is_container:
+ rname = resource.name
+ for name in resource.resources:
+ if not rname:
+ new_name = name
+ else:
+ new_name = '/'.join([rname, name])
+ child = self.find(new_name)
+ if child.is_container:
+ todo.append(child)
+ else:
+ yield child
+
+
+class ZipResourceFinder(ResourceFinder):
+ """
+ Resource finder for resources in .zip files.
+ """
+ def __init__(self, module):
+ super(ZipResourceFinder, self).__init__(module)
+ archive = self.loader.archive
+ self.prefix_len = 1 + len(archive)
+ # PyPy doesn't have a _files attr on zipimporter, and you can't set one
+ if hasattr(self.loader, '_files'):
+ self._files = self.loader._files
+ else:
+ self._files = zipimport._zip_directory_cache[archive]
+ self.index = sorted(self._files)
+
+ def _adjust_path(self, path):
+ return path
+
+ def _find(self, path):
+ path = path[self.prefix_len:]
+ if path in self._files:
+ result = True
+ else:
+ if path and path[-1] != os.sep:
+ path = path + os.sep
+ i = bisect.bisect(self.index, path)
+ try:
+ result = self.index[i].startswith(path)
+ except IndexError:
+ result = False
+ if not result:
+ logger.debug('_find failed: %r %r', path, self.loader.prefix)
+ else:
+ logger.debug('_find worked: %r %r', path, self.loader.prefix)
+ return result
+
+ def get_cache_info(self, resource):
+ prefix = self.loader.archive
+ path = resource.path[1 + len(prefix):]
+ return prefix, path
+
+ def get_bytes(self, resource):
+ return self.loader.get_data(resource.path)
+
+ def get_stream(self, resource):
+ return io.BytesIO(self.get_bytes(resource))
+
+ def get_size(self, resource):
+ path = resource.path[self.prefix_len:]
+ return self._files[path][3]
+
+ def get_resources(self, resource):
+ path = resource.path[self.prefix_len:]
+ if path and path[-1] != os.sep:
+ path += os.sep
+ plen = len(path)
+ result = set()
+ i = bisect.bisect(self.index, path)
+ while i < len(self.index):
+ if not self.index[i].startswith(path):
+ break
+ s = self.index[i][plen:]
+ result.add(s.split(os.sep, 1)[0]) # only immediate children
+ i += 1
+ return result
+
+ def _is_directory(self, path):
+ path = path[self.prefix_len:]
+ if path and path[-1] != os.sep:
+ path += os.sep
+ i = bisect.bisect(self.index, path)
+ try:
+ result = self.index[i].startswith(path)
+ except IndexError:
+ result = False
+ return result
+
+_finder_registry = {
+ type(None): ResourceFinder,
+ zipimport.zipimporter: ZipResourceFinder
+}
+
+try:
+ # In Python 3.6, _frozen_importlib -> _frozen_importlib_external
+ try:
+ import _frozen_importlib_external as _fi
+ except ImportError:
+ import _frozen_importlib as _fi
+ _finder_registry[_fi.SourceFileLoader] = ResourceFinder
+ _finder_registry[_fi.FileFinder] = ResourceFinder
+ del _fi
+except (ImportError, AttributeError):
+ pass
+
+
+def register_finder(loader, finder_maker):
+ _finder_registry[type(loader)] = finder_maker
+
+_finder_cache = {}
+
+
+def finder(package):
+ """
+ Return a resource finder for a package.
+ :param package: The name of the package.
+ :return: A :class:`ResourceFinder` instance for the package.
+ """
+ if package in _finder_cache:
+ result = _finder_cache[package]
+ else:
+ if package not in sys.modules:
+ __import__(package)
+ module = sys.modules[package]
+ path = getattr(module, '__path__', None)
+ if path is None:
+ raise DistlibException('You cannot get a finder for a module, '
+ 'only for a package')
+ loader = getattr(module, '__loader__', None)
+ finder_maker = _finder_registry.get(type(loader))
+ if finder_maker is None:
+ raise DistlibException('Unable to locate finder for %r' % package)
+ result = finder_maker(module)
+ _finder_cache[package] = result
+ return result
+
+
+_dummy_module = types.ModuleType(str('__dummy__'))
+
+
+def finder_for_path(path):
+ """
+ Return a resource finder for a path, which should represent a container.
+
+ :param path: The path.
+ :return: A :class:`ResourceFinder` instance for the path.
+ """
+ result = None
+ # calls any path hooks, gets importer into cache
+ pkgutil.get_importer(path)
+ loader = sys.path_importer_cache.get(path)
+ finder = _finder_registry.get(type(loader))
+ if finder:
+ module = _dummy_module
+ module.__file__ = os.path.join(path, '')
+ module.__loader__ = loader
+ result = finder(module)
+ return result
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py
new file mode 100644
index 0000000000..03f8f21e0f
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py
@@ -0,0 +1,419 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013-2015 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+from io import BytesIO
+import logging
+import os
+import re
+import struct
+import sys
+
+from .compat import sysconfig, detect_encoding, ZipFile
+from .resources import finder
+from .util import (FileOperator, get_export_entry, convert_path,
+ get_executable, in_venv)
+
+logger = logging.getLogger(__name__)
+
+_DEFAULT_MANIFEST = '''
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+ <assemblyIdentity version="1.0.0.0"
+ processorArchitecture="X86"
+ name="%s"
+ type="win32"/>
+
+ <!-- Identify the application security requirements. -->
+ <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+ <security>
+ <requestedPrivileges>
+ <requestedExecutionLevel level="asInvoker" uiAccess="false"/>
+ </requestedPrivileges>
+ </security>
+ </trustInfo>
+</assembly>'''.strip()
+
+# check if Python is called on the first line with this expression
+FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
+SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*-
+import re
+import sys
+from %(module)s import %(import_name)s
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(%(func)s())
+'''
+
+
+def enquote_executable(executable):
+ if ' ' in executable:
+ # make sure we quote only the executable in case of env
+ # for example /usr/bin/env "/dir with spaces/bin/jython"
+ # instead of "/usr/bin/env /dir with spaces/bin/jython"
+ # otherwise whole
+ if executable.startswith('/usr/bin/env '):
+ env, _executable = executable.split(' ', 1)
+ if ' ' in _executable and not _executable.startswith('"'):
+ executable = '%s "%s"' % (env, _executable)
+ else:
+ if not executable.startswith('"'):
+ executable = '"%s"' % executable
+ return executable
+
+# Keep the old name around (for now), as there is at least one project using it!
+_enquote_executable = enquote_executable
+
+class ScriptMaker(object):
+ """
+ A class to copy or create scripts from source scripts or callable
+ specifications.
+ """
+ script_template = SCRIPT_TEMPLATE
+
+ executable = None # for shebangs
+
+ def __init__(self, source_dir, target_dir, add_launchers=True,
+ dry_run=False, fileop=None):
+ self.source_dir = source_dir
+ self.target_dir = target_dir
+ self.add_launchers = add_launchers
+ self.force = False
+ self.clobber = False
+ # It only makes sense to set mode bits on POSIX.
+ self.set_mode = (os.name == 'posix') or (os.name == 'java' and
+ os._name == 'posix')
+ self.variants = set(('', 'X.Y'))
+ self._fileop = fileop or FileOperator(dry_run)
+
+ self._is_nt = os.name == 'nt' or (
+ os.name == 'java' and os._name == 'nt')
+ self.version_info = sys.version_info
+
+ def _get_alternate_executable(self, executable, options):
+ if options.get('gui', False) and self._is_nt: # pragma: no cover
+ dn, fn = os.path.split(executable)
+ fn = fn.replace('python', 'pythonw')
+ executable = os.path.join(dn, fn)
+ return executable
+
+ if sys.platform.startswith('java'): # pragma: no cover
+ def _is_shell(self, executable):
+ """
+ Determine if the specified executable is a script
+ (contains a #! line)
+ """
+ try:
+ with open(executable) as fp:
+ return fp.read(2) == '#!'
+ except (OSError, IOError):
+ logger.warning('Failed to open %s', executable)
+ return False
+
+ def _fix_jython_executable(self, executable):
+ if self._is_shell(executable):
+ # Workaround for Jython is not needed on Linux systems.
+ import java
+
+ if java.lang.System.getProperty('os.name') == 'Linux':
+ return executable
+ elif executable.lower().endswith('jython.exe'):
+ # Use wrapper exe for Jython on Windows
+ return executable
+ return '/usr/bin/env %s' % executable
+
+ def _build_shebang(self, executable, post_interp):
+ """
+ Build a shebang line. In the simple case (on Windows, or a shebang line
+ which is not too long or contains spaces) use a simple formulation for
+ the shebang. Otherwise, use /bin/sh as the executable, with a contrived
+ shebang which allows the script to run either under Python or sh, using
+ suitable quoting. Thanks to Harald Nordgren for his input.
+
+ See also: http://www.in-ulm.de/~mascheck/various/shebang/#length
+ https://hg.mozilla.org/mozilla-central/file/tip/mach
+ """
+ if os.name != 'posix':
+ simple_shebang = True
+ else:
+ # Add 3 for '#!' prefix and newline suffix.
+ shebang_length = len(executable) + len(post_interp) + 3
+ if sys.platform == 'darwin':
+ max_shebang_length = 512
+ else:
+ max_shebang_length = 127
+ simple_shebang = ((b' ' not in executable) and
+ (shebang_length <= max_shebang_length))
+
+ if simple_shebang:
+ result = b'#!' + executable + post_interp + b'\n'
+ else:
+ result = b'#!/bin/sh\n'
+ result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n'
+ result += b"' '''"
+ return result
+
+ def _get_shebang(self, encoding, post_interp=b'', options=None):
+ enquote = True
+ if self.executable:
+ executable = self.executable
+ enquote = False # assume this will be taken care of
+ elif not sysconfig.is_python_build():
+ executable = get_executable()
+ elif in_venv(): # pragma: no cover
+ executable = os.path.join(sysconfig.get_path('scripts'),
+ 'python%s' % sysconfig.get_config_var('EXE'))
+ else: # pragma: no cover
+ executable = os.path.join(
+ sysconfig.get_config_var('BINDIR'),
+ 'python%s%s' % (sysconfig.get_config_var('VERSION'),
+ sysconfig.get_config_var('EXE')))
+ if options:
+ executable = self._get_alternate_executable(executable, options)
+
+ if sys.platform.startswith('java'): # pragma: no cover
+ executable = self._fix_jython_executable(executable)
+
+ # Normalise case for Windows - COMMENTED OUT
+ # executable = os.path.normcase(executable)
+ # N.B. The normalising operation above has been commented out: See
+ # issue #124. Although paths in Windows are generally case-insensitive,
+ # they aren't always. For example, a path containing a ẞ (which is a
+ # LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a
+ # LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by
+ # Windows as equivalent in path names.
+
+ # If the user didn't specify an executable, it may be necessary to
+ # cater for executable paths with spaces (not uncommon on Windows)
+ if enquote:
+ executable = enquote_executable(executable)
+ # Issue #51: don't use fsencode, since we later try to
+ # check that the shebang is decodable using utf-8.
+ executable = executable.encode('utf-8')
+ # in case of IronPython, play safe and enable frames support
+ if (sys.platform == 'cli' and '-X:Frames' not in post_interp
+ and '-X:FullFrames' not in post_interp): # pragma: no cover
+ post_interp += b' -X:Frames'
+ shebang = self._build_shebang(executable, post_interp)
+ # Python parser starts to read a script using UTF-8 until
+ # it gets a #coding:xxx cookie. The shebang has to be the
+ # first line of a file, the #coding:xxx cookie cannot be
+ # written before. So the shebang has to be decodable from
+ # UTF-8.
+ try:
+ shebang.decode('utf-8')
+ except UnicodeDecodeError: # pragma: no cover
+ raise ValueError(
+ 'The shebang (%r) is not decodable from utf-8' % shebang)
+ # If the script is encoded to a custom encoding (use a
+ # #coding:xxx cookie), the shebang has to be decodable from
+ # the script encoding too.
+ if encoding != 'utf-8':
+ try:
+ shebang.decode(encoding)
+ except UnicodeDecodeError: # pragma: no cover
+ raise ValueError(
+ 'The shebang (%r) is not decodable '
+ 'from the script encoding (%r)' % (shebang, encoding))
+ return shebang
+
+ def _get_script_text(self, entry):
+ return self.script_template % dict(module=entry.prefix,
+ import_name=entry.suffix.split('.')[0],
+ func=entry.suffix)
+
+ manifest = _DEFAULT_MANIFEST
+
+ def get_manifest(self, exename):
+ base = os.path.basename(exename)
+ return self.manifest % base
+
+ def _write_script(self, names, shebang, script_bytes, filenames, ext):
+ use_launcher = self.add_launchers and self._is_nt
+ linesep = os.linesep.encode('utf-8')
+ if not shebang.endswith(linesep):
+ shebang += linesep
+ if not use_launcher:
+ script_bytes = shebang + script_bytes
+ else: # pragma: no cover
+ if ext == 'py':
+ launcher = self._get_launcher('t')
+ else:
+ launcher = self._get_launcher('w')
+ stream = BytesIO()
+ with ZipFile(stream, 'w') as zf:
+ zf.writestr('__main__.py', script_bytes)
+ zip_data = stream.getvalue()
+ script_bytes = launcher + shebang + zip_data
+ for name in names:
+ outname = os.path.join(self.target_dir, name)
+ if use_launcher: # pragma: no cover
+ n, e = os.path.splitext(outname)
+ if e.startswith('.py'):
+ outname = n
+ outname = '%s.exe' % outname
+ try:
+ self._fileop.write_binary_file(outname, script_bytes)
+ except Exception:
+ # Failed writing an executable - it might be in use.
+ logger.warning('Failed to write executable - trying to '
+ 'use .deleteme logic')
+ dfname = '%s.deleteme' % outname
+ if os.path.exists(dfname):
+ os.remove(dfname) # Not allowed to fail here
+ os.rename(outname, dfname) # nor here
+ self._fileop.write_binary_file(outname, script_bytes)
+ logger.debug('Able to replace executable using '
+ '.deleteme logic')
+ try:
+ os.remove(dfname)
+ except Exception:
+ pass # still in use - ignore error
+ else:
+ if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover
+ outname = '%s.%s' % (outname, ext)
+ if os.path.exists(outname) and not self.clobber:
+ logger.warning('Skipping existing file %s', outname)
+ continue
+ self._fileop.write_binary_file(outname, script_bytes)
+ if self.set_mode:
+ self._fileop.set_executable_mode([outname])
+ filenames.append(outname)
+
+ def _make_script(self, entry, filenames, options=None):
+ post_interp = b''
+ if options:
+ args = options.get('interpreter_args', [])
+ if args:
+ args = ' %s' % ' '.join(args)
+ post_interp = args.encode('utf-8')
+ shebang = self._get_shebang('utf-8', post_interp, options=options)
+ script = self._get_script_text(entry).encode('utf-8')
+ name = entry.name
+ scriptnames = set()
+ if '' in self.variants:
+ scriptnames.add(name)
+ if 'X' in self.variants:
+ scriptnames.add('%s%s' % (name, self.version_info[0]))
+ if 'X.Y' in self.variants:
+ scriptnames.add('%s-%s.%s' % (name, self.version_info[0],
+ self.version_info[1]))
+ if options and options.get('gui', False):
+ ext = 'pyw'
+ else:
+ ext = 'py'
+ self._write_script(scriptnames, shebang, script, filenames, ext)
+
+ def _copy_script(self, script, filenames):
+ adjust = False
+ script = os.path.join(self.source_dir, convert_path(script))
+ outname = os.path.join(self.target_dir, os.path.basename(script))
+ if not self.force and not self._fileop.newer(script, outname):
+ logger.debug('not copying %s (up-to-date)', script)
+ return
+
+ # Always open the file, but ignore failures in dry-run mode --
+ # that way, we'll get accurate feedback if we can read the
+ # script.
+ try:
+ f = open(script, 'rb')
+ except IOError: # pragma: no cover
+ if not self.dry_run:
+ raise
+ f = None
+ else:
+ first_line = f.readline()
+ if not first_line: # pragma: no cover
+ logger.warning('%s: %s is an empty file (skipping)',
+ self.get_command_name(), script)
+ return
+
+ match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
+ if match:
+ adjust = True
+ post_interp = match.group(1) or b''
+
+ if not adjust:
+ if f:
+ f.close()
+ self._fileop.copy_file(script, outname)
+ if self.set_mode:
+ self._fileop.set_executable_mode([outname])
+ filenames.append(outname)
+ else:
+ logger.info('copying and adjusting %s -> %s', script,
+ self.target_dir)
+ if not self._fileop.dry_run:
+ encoding, lines = detect_encoding(f.readline)
+ f.seek(0)
+ shebang = self._get_shebang(encoding, post_interp)
+ if b'pythonw' in first_line: # pragma: no cover
+ ext = 'pyw'
+ else:
+ ext = 'py'
+ n = os.path.basename(outname)
+ self._write_script([n], shebang, f.read(), filenames, ext)
+ if f:
+ f.close()
+
+ @property
+ def dry_run(self):
+ return self._fileop.dry_run
+
+ @dry_run.setter
+ def dry_run(self, value):
+ self._fileop.dry_run = value
+
+ if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover
+ # Executable launcher support.
+ # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
+
+ def _get_launcher(self, kind):
+ if struct.calcsize('P') == 8: # 64-bit
+ bits = '64'
+ else:
+ bits = '32'
+ name = '%s%s.exe' % (kind, bits)
+ # Issue 31: don't hardcode an absolute package name, but
+ # determine it relative to the current package
+ distlib_package = __name__.rsplit('.', 1)[0]
+ resource = finder(distlib_package).find(name)
+ if not resource:
+ msg = ('Unable to find resource %s in package %s' % (name,
+ distlib_package))
+ raise ValueError(msg)
+ return resource.bytes
+
+ # Public API follows
+
+ def make(self, specification, options=None):
+ """
+ Make a script.
+
+ :param specification: The specification, which is either a valid export
+ entry specification (to make a script from a
+ callable) or a filename (to make a script by
+ copying from a source location).
+ :param options: A dictionary of options controlling script generation.
+ :return: A list of all absolute pathnames written to.
+ """
+ filenames = []
+ entry = get_export_entry(specification)
+ if entry is None:
+ self._copy_script(specification, filenames)
+ else:
+ self._make_script(entry, filenames, options=options)
+ return filenames
+
+ def make_multiple(self, specifications, options=None):
+ """
+ Take a list of specifications and make scripts from them,
+ :param specifications: A list of specifications.
+ :return: A list of all absolute pathnames written to,
+ """
+ filenames = []
+ for specification in specifications:
+ filenames.extend(self.make(specification, options))
+ return filenames
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/t32.exe b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/t32.exe
new file mode 100644
index 0000000000..8932a18e45
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/t32.exe
Binary files differ
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/t64.exe b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/t64.exe
new file mode 100644
index 0000000000..325b8057c0
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/t64.exe
Binary files differ
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py
new file mode 100644
index 0000000000..01324eae46
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py
@@ -0,0 +1,1761 @@
+#
+# Copyright (C) 2012-2017 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+import codecs
+from collections import deque
+import contextlib
+import csv
+from glob import iglob as std_iglob
+import io
+import json
+import logging
+import os
+import py_compile
+import re
+import socket
+try:
+ import ssl
+except ImportError: # pragma: no cover
+ ssl = None
+import subprocess
+import sys
+import tarfile
+import tempfile
+import textwrap
+
+try:
+ import threading
+except ImportError: # pragma: no cover
+ import dummy_threading as threading
+import time
+
+from . import DistlibException
+from .compat import (string_types, text_type, shutil, raw_input, StringIO,
+ cache_from_source, urlopen, urljoin, httplib, xmlrpclib,
+ splittype, HTTPHandler, BaseConfigurator, valid_ident,
+ Container, configparser, URLError, ZipFile, fsdecode,
+ unquote, urlparse)
+
+logger = logging.getLogger(__name__)
+
+#
+# Requirement parsing code as per PEP 508
+#
+
+IDENTIFIER = re.compile(r'^([\w\.-]+)\s*')
+VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*')
+COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*')
+MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*')
+OR = re.compile(r'^or\b\s*')
+AND = re.compile(r'^and\b\s*')
+NON_SPACE = re.compile(r'(\S+)\s*')
+STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)')
+
+
+def parse_marker(marker_string):
+ """
+ Parse a marker string and return a dictionary containing a marker expression.
+
+ The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in
+ the expression grammar, or strings. A string contained in quotes is to be
+ interpreted as a literal string, and a string not contained in quotes is a
+ variable (such as os_name).
+ """
+ def marker_var(remaining):
+ # either identifier, or literal string
+ m = IDENTIFIER.match(remaining)
+ if m:
+ result = m.groups()[0]
+ remaining = remaining[m.end():]
+ elif not remaining:
+ raise SyntaxError('unexpected end of input')
+ else:
+ q = remaining[0]
+ if q not in '\'"':
+ raise SyntaxError('invalid expression: %s' % remaining)
+ oq = '\'"'.replace(q, '')
+ remaining = remaining[1:]
+ parts = [q]
+ while remaining:
+ # either a string chunk, or oq, or q to terminate
+ if remaining[0] == q:
+ break
+ elif remaining[0] == oq:
+ parts.append(oq)
+ remaining = remaining[1:]
+ else:
+ m = STRING_CHUNK.match(remaining)
+ if not m:
+ raise SyntaxError('error in string literal: %s' % remaining)
+ parts.append(m.groups()[0])
+ remaining = remaining[m.end():]
+ else:
+ s = ''.join(parts)
+ raise SyntaxError('unterminated string: %s' % s)
+ parts.append(q)
+ result = ''.join(parts)
+ remaining = remaining[1:].lstrip() # skip past closing quote
+ return result, remaining
+
+ def marker_expr(remaining):
+ if remaining and remaining[0] == '(':
+ result, remaining = marker(remaining[1:].lstrip())
+ if remaining[0] != ')':
+ raise SyntaxError('unterminated parenthesis: %s' % remaining)
+ remaining = remaining[1:].lstrip()
+ else:
+ lhs, remaining = marker_var(remaining)
+ while remaining:
+ m = MARKER_OP.match(remaining)
+ if not m:
+ break
+ op = m.groups()[0]
+ remaining = remaining[m.end():]
+ rhs, remaining = marker_var(remaining)
+ lhs = {'op': op, 'lhs': lhs, 'rhs': rhs}
+ result = lhs
+ return result, remaining
+
+ def marker_and(remaining):
+ lhs, remaining = marker_expr(remaining)
+ while remaining:
+ m = AND.match(remaining)
+ if not m:
+ break
+ remaining = remaining[m.end():]
+ rhs, remaining = marker_expr(remaining)
+ lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs}
+ return lhs, remaining
+
+ def marker(remaining):
+ lhs, remaining = marker_and(remaining)
+ while remaining:
+ m = OR.match(remaining)
+ if not m:
+ break
+ remaining = remaining[m.end():]
+ rhs, remaining = marker_and(remaining)
+ lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs}
+ return lhs, remaining
+
+ return marker(marker_string)
+
+
+def parse_requirement(req):
+ """
+ Parse a requirement passed in as a string. Return a Container
+ whose attributes contain the various parts of the requirement.
+ """
+ remaining = req.strip()
+ if not remaining or remaining.startswith('#'):
+ return None
+ m = IDENTIFIER.match(remaining)
+ if not m:
+ raise SyntaxError('name expected: %s' % remaining)
+ distname = m.groups()[0]
+ remaining = remaining[m.end():]
+ extras = mark_expr = versions = uri = None
+ if remaining and remaining[0] == '[':
+ i = remaining.find(']', 1)
+ if i < 0:
+ raise SyntaxError('unterminated extra: %s' % remaining)
+ s = remaining[1:i]
+ remaining = remaining[i + 1:].lstrip()
+ extras = []
+ while s:
+ m = IDENTIFIER.match(s)
+ if not m:
+ raise SyntaxError('malformed extra: %s' % s)
+ extras.append(m.groups()[0])
+ s = s[m.end():]
+ if not s:
+ break
+ if s[0] != ',':
+ raise SyntaxError('comma expected in extras: %s' % s)
+ s = s[1:].lstrip()
+ if not extras:
+ extras = None
+ if remaining:
+ if remaining[0] == '@':
+ # it's a URI
+ remaining = remaining[1:].lstrip()
+ m = NON_SPACE.match(remaining)
+ if not m:
+ raise SyntaxError('invalid URI: %s' % remaining)
+ uri = m.groups()[0]
+ t = urlparse(uri)
+ # there are issues with Python and URL parsing, so this test
+ # is a bit crude. See bpo-20271, bpo-23505. Python doesn't
+ # always parse invalid URLs correctly - it should raise
+ # exceptions for malformed URLs
+ if not (t.scheme and t.netloc):
+ raise SyntaxError('Invalid URL: %s' % uri)
+ remaining = remaining[m.end():].lstrip()
+ else:
+
+ def get_versions(ver_remaining):
+ """
+ Return a list of operator, version tuples if any are
+ specified, else None.
+ """
+ m = COMPARE_OP.match(ver_remaining)
+ versions = None
+ if m:
+ versions = []
+ while True:
+ op = m.groups()[0]
+ ver_remaining = ver_remaining[m.end():]
+ m = VERSION_IDENTIFIER.match(ver_remaining)
+ if not m:
+ raise SyntaxError('invalid version: %s' % ver_remaining)
+ v = m.groups()[0]
+ versions.append((op, v))
+ ver_remaining = ver_remaining[m.end():]
+ if not ver_remaining or ver_remaining[0] != ',':
+ break
+ ver_remaining = ver_remaining[1:].lstrip()
+ m = COMPARE_OP.match(ver_remaining)
+ if not m:
+ raise SyntaxError('invalid constraint: %s' % ver_remaining)
+ if not versions:
+ versions = None
+ return versions, ver_remaining
+
+ if remaining[0] != '(':
+ versions, remaining = get_versions(remaining)
+ else:
+ i = remaining.find(')', 1)
+ if i < 0:
+ raise SyntaxError('unterminated parenthesis: %s' % remaining)
+ s = remaining[1:i]
+ remaining = remaining[i + 1:].lstrip()
+ # As a special diversion from PEP 508, allow a version number
+ # a.b.c in parentheses as a synonym for ~= a.b.c (because this
+ # is allowed in earlier PEPs)
+ if COMPARE_OP.match(s):
+ versions, _ = get_versions(s)
+ else:
+ m = VERSION_IDENTIFIER.match(s)
+ if not m:
+ raise SyntaxError('invalid constraint: %s' % s)
+ v = m.groups()[0]
+ s = s[m.end():].lstrip()
+ if s:
+ raise SyntaxError('invalid constraint: %s' % s)
+ versions = [('~=', v)]
+
+ if remaining:
+ if remaining[0] != ';':
+ raise SyntaxError('invalid requirement: %s' % remaining)
+ remaining = remaining[1:].lstrip()
+
+ mark_expr, remaining = parse_marker(remaining)
+
+ if remaining and remaining[0] != '#':
+ raise SyntaxError('unexpected trailing data: %s' % remaining)
+
+ if not versions:
+ rs = distname
+ else:
+ rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions]))
+ return Container(name=distname, extras=extras, constraints=versions,
+ marker=mark_expr, url=uri, requirement=rs)
+
+
+def get_resources_dests(resources_root, rules):
+ """Find destinations for resources files"""
+
+ def get_rel_path(root, path):
+ # normalizes and returns a lstripped-/-separated path
+ root = root.replace(os.path.sep, '/')
+ path = path.replace(os.path.sep, '/')
+ assert path.startswith(root)
+ return path[len(root):].lstrip('/')
+
+ destinations = {}
+ for base, suffix, dest in rules:
+ prefix = os.path.join(resources_root, base)
+ for abs_base in iglob(prefix):
+ abs_glob = os.path.join(abs_base, suffix)
+ for abs_path in iglob(abs_glob):
+ resource_file = get_rel_path(resources_root, abs_path)
+ if dest is None: # remove the entry if it was here
+ destinations.pop(resource_file, None)
+ else:
+ rel_path = get_rel_path(abs_base, abs_path)
+ rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
+ destinations[resource_file] = rel_dest + '/' + rel_path
+ return destinations
+
+
+def in_venv():
+ if hasattr(sys, 'real_prefix'):
+ # virtualenv venvs
+ result = True
+ else:
+ # PEP 405 venvs
+ result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix)
+ return result
+
+
+def get_executable():
+# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as
+# changes to the stub launcher mean that sys.executable always points
+# to the stub on OS X
+# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__'
+# in os.environ):
+# result = os.environ['__PYVENV_LAUNCHER__']
+# else:
+# result = sys.executable
+# return result
+ result = os.path.normcase(sys.executable)
+ if not isinstance(result, text_type):
+ result = fsdecode(result)
+ return result
+
+
+def proceed(prompt, allowed_chars, error_prompt=None, default=None):
+ p = prompt
+ while True:
+ s = raw_input(p)
+ p = prompt
+ if not s and default:
+ s = default
+ if s:
+ c = s[0].lower()
+ if c in allowed_chars:
+ break
+ if error_prompt:
+ p = '%c: %s\n%s' % (c, error_prompt, prompt)
+ return c
+
+
+def extract_by_key(d, keys):
+ if isinstance(keys, string_types):
+ keys = keys.split()
+ result = {}
+ for key in keys:
+ if key in d:
+ result[key] = d[key]
+ return result
+
+def read_exports(stream):
+ if sys.version_info[0] >= 3:
+ # needs to be a text stream
+ stream = codecs.getreader('utf-8')(stream)
+ # Try to load as JSON, falling back on legacy format
+ data = stream.read()
+ stream = StringIO(data)
+ try:
+ jdata = json.load(stream)
+ result = jdata['extensions']['python.exports']['exports']
+ for group, entries in result.items():
+ for k, v in entries.items():
+ s = '%s = %s' % (k, v)
+ entry = get_export_entry(s)
+ assert entry is not None
+ entries[k] = entry
+ return result
+ except Exception:
+ stream.seek(0, 0)
+
+ def read_stream(cp, stream):
+ if hasattr(cp, 'read_file'):
+ cp.read_file(stream)
+ else:
+ cp.readfp(stream)
+
+ cp = configparser.ConfigParser()
+ try:
+ read_stream(cp, stream)
+ except configparser.MissingSectionHeaderError:
+ stream.close()
+ data = textwrap.dedent(data)
+ stream = StringIO(data)
+ read_stream(cp, stream)
+
+ result = {}
+ for key in cp.sections():
+ result[key] = entries = {}
+ for name, value in cp.items(key):
+ s = '%s = %s' % (name, value)
+ entry = get_export_entry(s)
+ assert entry is not None
+ #entry.dist = self
+ entries[name] = entry
+ return result
+
+
+def write_exports(exports, stream):
+ if sys.version_info[0] >= 3:
+ # needs to be a text stream
+ stream = codecs.getwriter('utf-8')(stream)
+ cp = configparser.ConfigParser()
+ for k, v in exports.items():
+ # TODO check k, v for valid values
+ cp.add_section(k)
+ for entry in v.values():
+ if entry.suffix is None:
+ s = entry.prefix
+ else:
+ s = '%s:%s' % (entry.prefix, entry.suffix)
+ if entry.flags:
+ s = '%s [%s]' % (s, ', '.join(entry.flags))
+ cp.set(k, entry.name, s)
+ cp.write(stream)
+
+
+@contextlib.contextmanager
+def tempdir():
+ td = tempfile.mkdtemp()
+ try:
+ yield td
+ finally:
+ shutil.rmtree(td)
+
+@contextlib.contextmanager
+def chdir(d):
+ cwd = os.getcwd()
+ try:
+ os.chdir(d)
+ yield
+ finally:
+ os.chdir(cwd)
+
+
+@contextlib.contextmanager
+def socket_timeout(seconds=15):
+ cto = socket.getdefaulttimeout()
+ try:
+ socket.setdefaulttimeout(seconds)
+ yield
+ finally:
+ socket.setdefaulttimeout(cto)
+
+
+class cached_property(object):
+ def __init__(self, func):
+ self.func = func
+ #for attr in ('__name__', '__module__', '__doc__'):
+ # setattr(self, attr, getattr(func, attr, None))
+
+ def __get__(self, obj, cls=None):
+ if obj is None:
+ return self
+ value = self.func(obj)
+ object.__setattr__(obj, self.func.__name__, value)
+ #obj.__dict__[self.func.__name__] = value = self.func(obj)
+ return value
+
+def convert_path(pathname):
+ """Return 'pathname' as a name that will work on the native filesystem.
+
+ The path is split on '/' and put back together again using the current
+ directory separator. Needed because filenames in the setup script are
+ always supplied in Unix style, and have to be converted to the local
+ convention before we can actually use them in the filesystem. Raises
+ ValueError on non-Unix-ish systems if 'pathname' either starts or
+ ends with a slash.
+ """
+ if os.sep == '/':
+ return pathname
+ if not pathname:
+ return pathname
+ if pathname[0] == '/':
+ raise ValueError("path '%s' cannot be absolute" % pathname)
+ if pathname[-1] == '/':
+ raise ValueError("path '%s' cannot end with '/'" % pathname)
+
+ paths = pathname.split('/')
+ while os.curdir in paths:
+ paths.remove(os.curdir)
+ if not paths:
+ return os.curdir
+ return os.path.join(*paths)
+
+
+class FileOperator(object):
+ def __init__(self, dry_run=False):
+ self.dry_run = dry_run
+ self.ensured = set()
+ self._init_record()
+
+ def _init_record(self):
+ self.record = False
+ self.files_written = set()
+ self.dirs_created = set()
+
+ def record_as_written(self, path):
+ if self.record:
+ self.files_written.add(path)
+
+ def newer(self, source, target):
+ """Tell if the target is newer than the source.
+
+ Returns true if 'source' exists and is more recently modified than
+ 'target', or if 'source' exists and 'target' doesn't.
+
+ Returns false if both exist and 'target' is the same age or younger
+ than 'source'. Raise PackagingFileError if 'source' does not exist.
+
+ Note that this test is not very accurate: files created in the same
+ second will have the same "age".
+ """
+ if not os.path.exists(source):
+ raise DistlibException("file '%r' does not exist" %
+ os.path.abspath(source))
+ if not os.path.exists(target):
+ return True
+
+ return os.stat(source).st_mtime > os.stat(target).st_mtime
+
+ def copy_file(self, infile, outfile, check=True):
+ """Copy a file respecting dry-run and force flags.
+ """
+ self.ensure_dir(os.path.dirname(outfile))
+ logger.info('Copying %s to %s', infile, outfile)
+ if not self.dry_run:
+ msg = None
+ if check:
+ if os.path.islink(outfile):
+ msg = '%s is a symlink' % outfile
+ elif os.path.exists(outfile) and not os.path.isfile(outfile):
+ msg = '%s is a non-regular file' % outfile
+ if msg:
+ raise ValueError(msg + ' which would be overwritten')
+ shutil.copyfile(infile, outfile)
+ self.record_as_written(outfile)
+
+ def copy_stream(self, instream, outfile, encoding=None):
+ assert not os.path.isdir(outfile)
+ self.ensure_dir(os.path.dirname(outfile))
+ logger.info('Copying stream %s to %s', instream, outfile)
+ if not self.dry_run:
+ if encoding is None:
+ outstream = open(outfile, 'wb')
+ else:
+ outstream = codecs.open(outfile, 'w', encoding=encoding)
+ try:
+ shutil.copyfileobj(instream, outstream)
+ finally:
+ outstream.close()
+ self.record_as_written(outfile)
+
+ def write_binary_file(self, path, data):
+ self.ensure_dir(os.path.dirname(path))
+ if not self.dry_run:
+ if os.path.exists(path):
+ os.remove(path)
+ with open(path, 'wb') as f:
+ f.write(data)
+ self.record_as_written(path)
+
+ def write_text_file(self, path, data, encoding):
+ self.write_binary_file(path, data.encode(encoding))
+
+ def set_mode(self, bits, mask, files):
+ if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'):
+ # Set the executable bits (owner, group, and world) on
+ # all the files specified.
+ for f in files:
+ if self.dry_run:
+ logger.info("changing mode of %s", f)
+ else:
+ mode = (os.stat(f).st_mode | bits) & mask
+ logger.info("changing mode of %s to %o", f, mode)
+ os.chmod(f, mode)
+
+ set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f)
+
+ def ensure_dir(self, path):
+ path = os.path.abspath(path)
+ if path not in self.ensured and not os.path.exists(path):
+ self.ensured.add(path)
+ d, f = os.path.split(path)
+ self.ensure_dir(d)
+ logger.info('Creating %s' % path)
+ if not self.dry_run:
+ os.mkdir(path)
+ if self.record:
+ self.dirs_created.add(path)
+
+ def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False):
+ dpath = cache_from_source(path, not optimize)
+ logger.info('Byte-compiling %s to %s', path, dpath)
+ if not self.dry_run:
+ if force or self.newer(path, dpath):
+ if not prefix:
+ diagpath = None
+ else:
+ assert path.startswith(prefix)
+ diagpath = path[len(prefix):]
+ compile_kwargs = {}
+ if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'):
+ compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH
+ py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error
+ self.record_as_written(dpath)
+ return dpath
+
+ def ensure_removed(self, path):
+ if os.path.exists(path):
+ if os.path.isdir(path) and not os.path.islink(path):
+ logger.debug('Removing directory tree at %s', path)
+ if not self.dry_run:
+ shutil.rmtree(path)
+ if self.record:
+ if path in self.dirs_created:
+ self.dirs_created.remove(path)
+ else:
+ if os.path.islink(path):
+ s = 'link'
+ else:
+ s = 'file'
+ logger.debug('Removing %s %s', s, path)
+ if not self.dry_run:
+ os.remove(path)
+ if self.record:
+ if path in self.files_written:
+ self.files_written.remove(path)
+
+ def is_writable(self, path):
+ result = False
+ while not result:
+ if os.path.exists(path):
+ result = os.access(path, os.W_OK)
+ break
+ parent = os.path.dirname(path)
+ if parent == path:
+ break
+ path = parent
+ return result
+
+ def commit(self):
+ """
+ Commit recorded changes, turn off recording, return
+ changes.
+ """
+ assert self.record
+ result = self.files_written, self.dirs_created
+ self._init_record()
+ return result
+
+ def rollback(self):
+ if not self.dry_run:
+ for f in list(self.files_written):
+ if os.path.exists(f):
+ os.remove(f)
+ # dirs should all be empty now, except perhaps for
+ # __pycache__ subdirs
+ # reverse so that subdirs appear before their parents
+ dirs = sorted(self.dirs_created, reverse=True)
+ for d in dirs:
+ flist = os.listdir(d)
+ if flist:
+ assert flist == ['__pycache__']
+ sd = os.path.join(d, flist[0])
+ os.rmdir(sd)
+ os.rmdir(d) # should fail if non-empty
+ self._init_record()
+
+def resolve(module_name, dotted_path):
+ if module_name in sys.modules:
+ mod = sys.modules[module_name]
+ else:
+ mod = __import__(module_name)
+ if dotted_path is None:
+ result = mod
+ else:
+ parts = dotted_path.split('.')
+ result = getattr(mod, parts.pop(0))
+ for p in parts:
+ result = getattr(result, p)
+ return result
+
+
+class ExportEntry(object):
+ def __init__(self, name, prefix, suffix, flags):
+ self.name = name
+ self.prefix = prefix
+ self.suffix = suffix
+ self.flags = flags
+
+ @cached_property
+ def value(self):
+ return resolve(self.prefix, self.suffix)
+
+ def __repr__(self): # pragma: no cover
+ return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix,
+ self.suffix, self.flags)
+
+ def __eq__(self, other):
+ if not isinstance(other, ExportEntry):
+ result = False
+ else:
+ result = (self.name == other.name and
+ self.prefix == other.prefix and
+ self.suffix == other.suffix and
+ self.flags == other.flags)
+ return result
+
+ __hash__ = object.__hash__
+
+
+ENTRY_RE = re.compile(r'''(?P<name>(\w|[-.+])+)
+ \s*=\s*(?P<callable>(\w+)([:\.]\w+)*)
+ \s*(\[\s*(?P<flags>[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
+ ''', re.VERBOSE)
+
+def get_export_entry(specification):
+ m = ENTRY_RE.search(specification)
+ if not m:
+ result = None
+ if '[' in specification or ']' in specification:
+ raise DistlibException("Invalid specification "
+ "'%s'" % specification)
+ else:
+ d = m.groupdict()
+ name = d['name']
+ path = d['callable']
+ colons = path.count(':')
+ if colons == 0:
+ prefix, suffix = path, None
+ else:
+ if colons != 1:
+ raise DistlibException("Invalid specification "
+ "'%s'" % specification)
+ prefix, suffix = path.split(':')
+ flags = d['flags']
+ if flags is None:
+ if '[' in specification or ']' in specification:
+ raise DistlibException("Invalid specification "
+ "'%s'" % specification)
+ flags = []
+ else:
+ flags = [f.strip() for f in flags.split(',')]
+ result = ExportEntry(name, prefix, suffix, flags)
+ return result
+
+
+def get_cache_base(suffix=None):
+ """
+ Return the default base location for distlib caches. If the directory does
+ not exist, it is created. Use the suffix provided for the base directory,
+ and default to '.distlib' if it isn't provided.
+
+ On Windows, if LOCALAPPDATA is defined in the environment, then it is
+ assumed to be a directory, and will be the parent directory of the result.
+ On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
+ directory - using os.expanduser('~') - will be the parent directory of
+ the result.
+
+ The result is just the directory '.distlib' in the parent directory as
+ determined above, or with the name specified with ``suffix``.
+ """
+ if suffix is None:
+ suffix = '.distlib'
+ if os.name == 'nt' and 'LOCALAPPDATA' in os.environ:
+ result = os.path.expandvars('$localappdata')
+ else:
+ # Assume posix, or old Windows
+ result = os.path.expanduser('~')
+ # we use 'isdir' instead of 'exists', because we want to
+ # fail if there's a file with that name
+ if os.path.isdir(result):
+ usable = os.access(result, os.W_OK)
+ if not usable:
+ logger.warning('Directory exists but is not writable: %s', result)
+ else:
+ try:
+ os.makedirs(result)
+ usable = True
+ except OSError:
+ logger.warning('Unable to create %s', result, exc_info=True)
+ usable = False
+ if not usable:
+ result = tempfile.mkdtemp()
+ logger.warning('Default location unusable, using %s', result)
+ return os.path.join(result, suffix)
+
+
+def path_to_cache_dir(path):
+ """
+ Convert an absolute path to a directory name for use in a cache.
+
+ The algorithm used is:
+
+ #. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
+ #. Any occurrence of ``os.sep`` is replaced with ``'--'``.
+ #. ``'.cache'`` is appended.
+ """
+ d, p = os.path.splitdrive(os.path.abspath(path))
+ if d:
+ d = d.replace(':', '---')
+ p = p.replace(os.sep, '--')
+ return d + p + '.cache'
+
+
+def ensure_slash(s):
+ if not s.endswith('/'):
+ return s + '/'
+ return s
+
+
+def parse_credentials(netloc):
+ username = password = None
+ if '@' in netloc:
+ prefix, netloc = netloc.rsplit('@', 1)
+ if ':' not in prefix:
+ username = prefix
+ else:
+ username, password = prefix.split(':', 1)
+ if username:
+ username = unquote(username)
+ if password:
+ password = unquote(password)
+ return username, password, netloc
+
+
+def get_process_umask():
+ result = os.umask(0o22)
+ os.umask(result)
+ return result
+
+def is_string_sequence(seq):
+ result = True
+ i = None
+ for i, s in enumerate(seq):
+ if not isinstance(s, string_types):
+ result = False
+ break
+ assert i is not None
+ return result
+
+PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-'
+ '([a-z0-9_.+-]+)', re.I)
+PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)')
+
+
+def split_filename(filename, project_name=None):
+ """
+ Extract name, version, python version from a filename (no extension)
+
+ Return name, version, pyver or None
+ """
+ result = None
+ pyver = None
+ filename = unquote(filename).replace(' ', '-')
+ m = PYTHON_VERSION.search(filename)
+ if m:
+ pyver = m.group(1)
+ filename = filename[:m.start()]
+ if project_name and len(filename) > len(project_name) + 1:
+ m = re.match(re.escape(project_name) + r'\b', filename)
+ if m:
+ n = m.end()
+ result = filename[:n], filename[n + 1:], pyver
+ if result is None:
+ m = PROJECT_NAME_AND_VERSION.match(filename)
+ if m:
+ result = m.group(1), m.group(3), pyver
+ return result
+
+# Allow spaces in name because of legacy dists like "Twisted Core"
+NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*'
+ r'\(\s*(?P<ver>[^\s)]+)\)$')
+
+def parse_name_and_version(p):
+ """
+ A utility method used to get name and version from a string.
+
+ From e.g. a Provides-Dist value.
+
+ :param p: A value in a form 'foo (1.0)'
+ :return: The name and version as a tuple.
+ """
+ m = NAME_VERSION_RE.match(p)
+ if not m:
+ raise DistlibException('Ill-formed name/version string: \'%s\'' % p)
+ d = m.groupdict()
+ return d['name'].strip().lower(), d['ver']
+
+def get_extras(requested, available):
+ result = set()
+ requested = set(requested or [])
+ available = set(available or [])
+ if '*' in requested:
+ requested.remove('*')
+ result |= available
+ for r in requested:
+ if r == '-':
+ result.add(r)
+ elif r.startswith('-'):
+ unwanted = r[1:]
+ if unwanted not in available:
+ logger.warning('undeclared extra: %s' % unwanted)
+ if unwanted in result:
+ result.remove(unwanted)
+ else:
+ if r not in available:
+ logger.warning('undeclared extra: %s' % r)
+ result.add(r)
+ return result
+#
+# Extended metadata functionality
+#
+
+def _get_external_data(url):
+ result = {}
+ try:
+ # urlopen might fail if it runs into redirections,
+ # because of Python issue #13696. Fixed in locators
+ # using a custom redirect handler.
+ resp = urlopen(url)
+ headers = resp.info()
+ ct = headers.get('Content-Type')
+ if not ct.startswith('application/json'):
+ logger.debug('Unexpected response for JSON request: %s', ct)
+ else:
+ reader = codecs.getreader('utf-8')(resp)
+ #data = reader.read().decode('utf-8')
+ #result = json.loads(data)
+ result = json.load(reader)
+ except Exception as e:
+ logger.exception('Failed to get external data for %s: %s', url, e)
+ return result
+
+_external_data_base_url = 'https://www.red-dove.com/pypi/projects/'
+
+def get_project_data(name):
+ url = '%s/%s/project.json' % (name[0].upper(), name)
+ url = urljoin(_external_data_base_url, url)
+ result = _get_external_data(url)
+ return result
+
+def get_package_data(name, version):
+ url = '%s/%s/package-%s.json' % (name[0].upper(), name, version)
+ url = urljoin(_external_data_base_url, url)
+ return _get_external_data(url)
+
+
+class Cache(object):
+ """
+ A class implementing a cache for resources that need to live in the file system
+ e.g. shared libraries. This class was moved from resources to here because it
+ could be used by other modules, e.g. the wheel module.
+ """
+
+ def __init__(self, base):
+ """
+ Initialise an instance.
+
+ :param base: The base directory where the cache should be located.
+ """
+ # we use 'isdir' instead of 'exists', because we want to
+ # fail if there's a file with that name
+ if not os.path.isdir(base): # pragma: no cover
+ os.makedirs(base)
+ if (os.stat(base).st_mode & 0o77) != 0:
+ logger.warning('Directory \'%s\' is not private', base)
+ self.base = os.path.abspath(os.path.normpath(base))
+
+ def prefix_to_dir(self, prefix):
+ """
+ Converts a resource prefix to a directory name in the cache.
+ """
+ return path_to_cache_dir(prefix)
+
+ def clear(self):
+ """
+ Clear the cache.
+ """
+ not_removed = []
+ for fn in os.listdir(self.base):
+ fn = os.path.join(self.base, fn)
+ try:
+ if os.path.islink(fn) or os.path.isfile(fn):
+ os.remove(fn)
+ elif os.path.isdir(fn):
+ shutil.rmtree(fn)
+ except Exception:
+ not_removed.append(fn)
+ return not_removed
+
+
+class EventMixin(object):
+ """
+ A very simple publish/subscribe system.
+ """
+ def __init__(self):
+ self._subscribers = {}
+
+ def add(self, event, subscriber, append=True):
+ """
+ Add a subscriber for an event.
+
+ :param event: The name of an event.
+ :param subscriber: The subscriber to be added (and called when the
+ event is published).
+ :param append: Whether to append or prepend the subscriber to an
+ existing subscriber list for the event.
+ """
+ subs = self._subscribers
+ if event not in subs:
+ subs[event] = deque([subscriber])
+ else:
+ sq = subs[event]
+ if append:
+ sq.append(subscriber)
+ else:
+ sq.appendleft(subscriber)
+
+ def remove(self, event, subscriber):
+ """
+ Remove a subscriber for an event.
+
+ :param event: The name of an event.
+ :param subscriber: The subscriber to be removed.
+ """
+ subs = self._subscribers
+ if event not in subs:
+ raise ValueError('No subscribers: %r' % event)
+ subs[event].remove(subscriber)
+
+ def get_subscribers(self, event):
+ """
+ Return an iterator for the subscribers for an event.
+ :param event: The event to return subscribers for.
+ """
+ return iter(self._subscribers.get(event, ()))
+
+ def publish(self, event, *args, **kwargs):
+ """
+ Publish a event and return a list of values returned by its
+ subscribers.
+
+ :param event: The event to publish.
+ :param args: The positional arguments to pass to the event's
+ subscribers.
+ :param kwargs: The keyword arguments to pass to the event's
+ subscribers.
+ """
+ result = []
+ for subscriber in self.get_subscribers(event):
+ try:
+ value = subscriber(event, *args, **kwargs)
+ except Exception:
+ logger.exception('Exception during event publication')
+ value = None
+ result.append(value)
+ logger.debug('publish %s: args = %s, kwargs = %s, result = %s',
+ event, args, kwargs, result)
+ return result
+
+#
+# Simple sequencing
+#
+class Sequencer(object):
+ def __init__(self):
+ self._preds = {}
+ self._succs = {}
+ self._nodes = set() # nodes with no preds/succs
+
+ def add_node(self, node):
+ self._nodes.add(node)
+
+ def remove_node(self, node, edges=False):
+ if node in self._nodes:
+ self._nodes.remove(node)
+ if edges:
+ for p in set(self._preds.get(node, ())):
+ self.remove(p, node)
+ for s in set(self._succs.get(node, ())):
+ self.remove(node, s)
+ # Remove empties
+ for k, v in list(self._preds.items()):
+ if not v:
+ del self._preds[k]
+ for k, v in list(self._succs.items()):
+ if not v:
+ del self._succs[k]
+
+ def add(self, pred, succ):
+ assert pred != succ
+ self._preds.setdefault(succ, set()).add(pred)
+ self._succs.setdefault(pred, set()).add(succ)
+
+ def remove(self, pred, succ):
+ assert pred != succ
+ try:
+ preds = self._preds[succ]
+ succs = self._succs[pred]
+ except KeyError: # pragma: no cover
+ raise ValueError('%r not a successor of anything' % succ)
+ try:
+ preds.remove(pred)
+ succs.remove(succ)
+ except KeyError: # pragma: no cover
+ raise ValueError('%r not a successor of %r' % (succ, pred))
+
+ def is_step(self, step):
+ return (step in self._preds or step in self._succs or
+ step in self._nodes)
+
+ def get_steps(self, final):
+ if not self.is_step(final):
+ raise ValueError('Unknown: %r' % final)
+ result = []
+ todo = []
+ seen = set()
+ todo.append(final)
+ while todo:
+ step = todo.pop(0)
+ if step in seen:
+ # if a step was already seen,
+ # move it to the end (so it will appear earlier
+ # when reversed on return) ... but not for the
+ # final step, as that would be confusing for
+ # users
+ if step != final:
+ result.remove(step)
+ result.append(step)
+ else:
+ seen.add(step)
+ result.append(step)
+ preds = self._preds.get(step, ())
+ todo.extend(preds)
+ return reversed(result)
+
+ @property
+ def strong_connections(self):
+ #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
+ index_counter = [0]
+ stack = []
+ lowlinks = {}
+ index = {}
+ result = []
+
+ graph = self._succs
+
+ def strongconnect(node):
+ # set the depth index for this node to the smallest unused index
+ index[node] = index_counter[0]
+ lowlinks[node] = index_counter[0]
+ index_counter[0] += 1
+ stack.append(node)
+
+ # Consider successors
+ try:
+ successors = graph[node]
+ except Exception:
+ successors = []
+ for successor in successors:
+ if successor not in lowlinks:
+ # Successor has not yet been visited
+ strongconnect(successor)
+ lowlinks[node] = min(lowlinks[node],lowlinks[successor])
+ elif successor in stack:
+ # the successor is in the stack and hence in the current
+ # strongly connected component (SCC)
+ lowlinks[node] = min(lowlinks[node],index[successor])
+
+ # If `node` is a root node, pop the stack and generate an SCC
+ if lowlinks[node] == index[node]:
+ connected_component = []
+
+ while True:
+ successor = stack.pop()
+ connected_component.append(successor)
+ if successor == node: break
+ component = tuple(connected_component)
+ # storing the result
+ result.append(component)
+
+ for node in graph:
+ if node not in lowlinks:
+ strongconnect(node)
+
+ return result
+
+ @property
+ def dot(self):
+ result = ['digraph G {']
+ for succ in self._preds:
+ preds = self._preds[succ]
+ for pred in preds:
+ result.append(' %s -> %s;' % (pred, succ))
+ for node in self._nodes:
+ result.append(' %s;' % node)
+ result.append('}')
+ return '\n'.join(result)
+
+#
+# Unarchiving functionality for zip, tar, tgz, tbz, whl
+#
+
+ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip',
+ '.tgz', '.tbz', '.whl')
+
+def unarchive(archive_filename, dest_dir, format=None, check=True):
+
+ def check_path(path):
+ if not isinstance(path, text_type):
+ path = path.decode('utf-8')
+ p = os.path.abspath(os.path.join(dest_dir, path))
+ if not p.startswith(dest_dir) or p[plen] != os.sep:
+ raise ValueError('path outside destination: %r' % p)
+
+ dest_dir = os.path.abspath(dest_dir)
+ plen = len(dest_dir)
+ archive = None
+ if format is None:
+ if archive_filename.endswith(('.zip', '.whl')):
+ format = 'zip'
+ elif archive_filename.endswith(('.tar.gz', '.tgz')):
+ format = 'tgz'
+ mode = 'r:gz'
+ elif archive_filename.endswith(('.tar.bz2', '.tbz')):
+ format = 'tbz'
+ mode = 'r:bz2'
+ elif archive_filename.endswith('.tar'):
+ format = 'tar'
+ mode = 'r'
+ else: # pragma: no cover
+ raise ValueError('Unknown format for %r' % archive_filename)
+ try:
+ if format == 'zip':
+ archive = ZipFile(archive_filename, 'r')
+ if check:
+ names = archive.namelist()
+ for name in names:
+ check_path(name)
+ else:
+ archive = tarfile.open(archive_filename, mode)
+ if check:
+ names = archive.getnames()
+ for name in names:
+ check_path(name)
+ if format != 'zip' and sys.version_info[0] < 3:
+ # See Python issue 17153. If the dest path contains Unicode,
+ # tarfile extraction fails on Python 2.x if a member path name
+ # contains non-ASCII characters - it leads to an implicit
+ # bytes -> unicode conversion using ASCII to decode.
+ for tarinfo in archive.getmembers():
+ if not isinstance(tarinfo.name, text_type):
+ tarinfo.name = tarinfo.name.decode('utf-8')
+ archive.extractall(dest_dir)
+
+ finally:
+ if archive:
+ archive.close()
+
+
+def zip_dir(directory):
+ """zip a directory tree into a BytesIO object"""
+ result = io.BytesIO()
+ dlen = len(directory)
+ with ZipFile(result, "w") as zf:
+ for root, dirs, files in os.walk(directory):
+ for name in files:
+ full = os.path.join(root, name)
+ rel = root[dlen:]
+ dest = os.path.join(rel, name)
+ zf.write(full, dest)
+ return result
+
+#
+# Simple progress bar
+#
+
+UNITS = ('', 'K', 'M', 'G','T','P')
+
+
+class Progress(object):
+ unknown = 'UNKNOWN'
+
+ def __init__(self, minval=0, maxval=100):
+ assert maxval is None or maxval >= minval
+ self.min = self.cur = minval
+ self.max = maxval
+ self.started = None
+ self.elapsed = 0
+ self.done = False
+
+ def update(self, curval):
+ assert self.min <= curval
+ assert self.max is None or curval <= self.max
+ self.cur = curval
+ now = time.time()
+ if self.started is None:
+ self.started = now
+ else:
+ self.elapsed = now - self.started
+
+ def increment(self, incr):
+ assert incr >= 0
+ self.update(self.cur + incr)
+
+ def start(self):
+ self.update(self.min)
+ return self
+
+ def stop(self):
+ if self.max is not None:
+ self.update(self.max)
+ self.done = True
+
+ @property
+ def maximum(self):
+ return self.unknown if self.max is None else self.max
+
+ @property
+ def percentage(self):
+ if self.done:
+ result = '100 %'
+ elif self.max is None:
+ result = ' ?? %'
+ else:
+ v = 100.0 * (self.cur - self.min) / (self.max - self.min)
+ result = '%3d %%' % v
+ return result
+
+ def format_duration(self, duration):
+ if (duration <= 0) and self.max is None or self.cur == self.min:
+ result = '??:??:??'
+ #elif duration < 1:
+ # result = '--:--:--'
+ else:
+ result = time.strftime('%H:%M:%S', time.gmtime(duration))
+ return result
+
+ @property
+ def ETA(self):
+ if self.done:
+ prefix = 'Done'
+ t = self.elapsed
+ #import pdb; pdb.set_trace()
+ else:
+ prefix = 'ETA '
+ if self.max is None:
+ t = -1
+ elif self.elapsed == 0 or (self.cur == self.min):
+ t = 0
+ else:
+ #import pdb; pdb.set_trace()
+ t = float(self.max - self.min)
+ t /= self.cur - self.min
+ t = (t - 1) * self.elapsed
+ return '%s: %s' % (prefix, self.format_duration(t))
+
+ @property
+ def speed(self):
+ if self.elapsed == 0:
+ result = 0.0
+ else:
+ result = (self.cur - self.min) / self.elapsed
+ for unit in UNITS:
+ if result < 1000:
+ break
+ result /= 1000.0
+ return '%d %sB/s' % (result, unit)
+
+#
+# Glob functionality
+#
+
+RICH_GLOB = re.compile(r'\{([^}]*)\}')
+_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
+_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
+
+
+def iglob(path_glob):
+ """Extended globbing function that supports ** and {opt1,opt2,opt3}."""
+ if _CHECK_RECURSIVE_GLOB.search(path_glob):
+ msg = """invalid glob %r: recursive glob "**" must be used alone"""
+ raise ValueError(msg % path_glob)
+ if _CHECK_MISMATCH_SET.search(path_glob):
+ msg = """invalid glob %r: mismatching set marker '{' or '}'"""
+ raise ValueError(msg % path_glob)
+ return _iglob(path_glob)
+
+
+def _iglob(path_glob):
+ rich_path_glob = RICH_GLOB.split(path_glob, 1)
+ if len(rich_path_glob) > 1:
+ assert len(rich_path_glob) == 3, rich_path_glob
+ prefix, set, suffix = rich_path_glob
+ for item in set.split(','):
+ for path in _iglob(''.join((prefix, item, suffix))):
+ yield path
+ else:
+ if '**' not in path_glob:
+ for item in std_iglob(path_glob):
+ yield item
+ else:
+ prefix, radical = path_glob.split('**', 1)
+ if prefix == '':
+ prefix = '.'
+ if radical == '':
+ radical = '*'
+ else:
+ # we support both
+ radical = radical.lstrip('/')
+ radical = radical.lstrip('\\')
+ for path, dir, files in os.walk(prefix):
+ path = os.path.normpath(path)
+ for fn in _iglob(os.path.join(path, radical)):
+ yield fn
+
+if ssl:
+ from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname,
+ CertificateError)
+
+
+#
+# HTTPSConnection which verifies certificates/matches domains
+#
+
+ class HTTPSConnection(httplib.HTTPSConnection):
+ ca_certs = None # set this to the path to the certs file (.pem)
+ check_domain = True # only used if ca_certs is not None
+
+ # noinspection PyPropertyAccess
+ def connect(self):
+ sock = socket.create_connection((self.host, self.port), self.timeout)
+ if getattr(self, '_tunnel_host', False):
+ self.sock = sock
+ self._tunnel()
+
+ if not hasattr(ssl, 'SSLContext'):
+ # For 2.x
+ if self.ca_certs:
+ cert_reqs = ssl.CERT_REQUIRED
+ else:
+ cert_reqs = ssl.CERT_NONE
+ self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
+ cert_reqs=cert_reqs,
+ ssl_version=ssl.PROTOCOL_SSLv23,
+ ca_certs=self.ca_certs)
+ else: # pragma: no cover
+ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+ if hasattr(ssl, 'OP_NO_SSLv2'):
+ context.options |= ssl.OP_NO_SSLv2
+ if self.cert_file:
+ context.load_cert_chain(self.cert_file, self.key_file)
+ kwargs = {}
+ if self.ca_certs:
+ context.verify_mode = ssl.CERT_REQUIRED
+ context.load_verify_locations(cafile=self.ca_certs)
+ if getattr(ssl, 'HAS_SNI', False):
+ kwargs['server_hostname'] = self.host
+ self.sock = context.wrap_socket(sock, **kwargs)
+ if self.ca_certs and self.check_domain:
+ try:
+ match_hostname(self.sock.getpeercert(), self.host)
+ logger.debug('Host verified: %s', self.host)
+ except CertificateError: # pragma: no cover
+ self.sock.shutdown(socket.SHUT_RDWR)
+ self.sock.close()
+ raise
+
+ class HTTPSHandler(BaseHTTPSHandler):
+ def __init__(self, ca_certs, check_domain=True):
+ BaseHTTPSHandler.__init__(self)
+ self.ca_certs = ca_certs
+ self.check_domain = check_domain
+
+ def _conn_maker(self, *args, **kwargs):
+ """
+ This is called to create a connection instance. Normally you'd
+ pass a connection class to do_open, but it doesn't actually check for
+ a class, and just expects a callable. As long as we behave just as a
+ constructor would have, we should be OK. If it ever changes so that
+ we *must* pass a class, we'll create an UnsafeHTTPSConnection class
+ which just sets check_domain to False in the class definition, and
+ choose which one to pass to do_open.
+ """
+ result = HTTPSConnection(*args, **kwargs)
+ if self.ca_certs:
+ result.ca_certs = self.ca_certs
+ result.check_domain = self.check_domain
+ return result
+
+ def https_open(self, req):
+ try:
+ return self.do_open(self._conn_maker, req)
+ except URLError as e:
+ if 'certificate verify failed' in str(e.reason):
+ raise CertificateError('Unable to verify server certificate '
+ 'for %s' % req.host)
+ else:
+ raise
+
+ #
+ # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The-
+ # Middle proxy using HTTP listens on port 443, or an index mistakenly serves
+ # HTML containing a http://xyz link when it should be https://xyz),
+ # you can use the following handler class, which does not allow HTTP traffic.
+ #
+ # It works by inheriting from HTTPHandler - so build_opener won't add a
+ # handler for HTTP itself.
+ #
+ class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler):
+ def http_open(self, req):
+ raise URLError('Unexpected HTTP request on what should be a secure '
+ 'connection: %s' % req)
+
+#
+# XML-RPC with timeouts
+#
+
+_ver_info = sys.version_info[:2]
+
+if _ver_info == (2, 6):
+ class HTTP(httplib.HTTP):
+ def __init__(self, host='', port=None, **kwargs):
+ if port == 0: # 0 means use port 0, not the default port
+ port = None
+ self._setup(self._connection_class(host, port, **kwargs))
+
+
+ if ssl:
+ class HTTPS(httplib.HTTPS):
+ def __init__(self, host='', port=None, **kwargs):
+ if port == 0: # 0 means use port 0, not the default port
+ port = None
+ self._setup(self._connection_class(host, port, **kwargs))
+
+
+class Transport(xmlrpclib.Transport):
+ def __init__(self, timeout, use_datetime=0):
+ self.timeout = timeout
+ xmlrpclib.Transport.__init__(self, use_datetime)
+
+ def make_connection(self, host):
+ h, eh, x509 = self.get_host_info(host)
+ if _ver_info == (2, 6):
+ result = HTTP(h, timeout=self.timeout)
+ else:
+ if not self._connection or host != self._connection[0]:
+ self._extra_headers = eh
+ self._connection = host, httplib.HTTPConnection(h)
+ result = self._connection[1]
+ return result
+
+if ssl:
+ class SafeTransport(xmlrpclib.SafeTransport):
+ def __init__(self, timeout, use_datetime=0):
+ self.timeout = timeout
+ xmlrpclib.SafeTransport.__init__(self, use_datetime)
+
+ def make_connection(self, host):
+ h, eh, kwargs = self.get_host_info(host)
+ if not kwargs:
+ kwargs = {}
+ kwargs['timeout'] = self.timeout
+ if _ver_info == (2, 6):
+ result = HTTPS(host, None, **kwargs)
+ else:
+ if not self._connection or host != self._connection[0]:
+ self._extra_headers = eh
+ self._connection = host, httplib.HTTPSConnection(h, None,
+ **kwargs)
+ result = self._connection[1]
+ return result
+
+
+class ServerProxy(xmlrpclib.ServerProxy):
+ def __init__(self, uri, **kwargs):
+ self.timeout = timeout = kwargs.pop('timeout', None)
+ # The above classes only come into play if a timeout
+ # is specified
+ if timeout is not None:
+ scheme, _ = splittype(uri)
+ use_datetime = kwargs.get('use_datetime', 0)
+ if scheme == 'https':
+ tcls = SafeTransport
+ else:
+ tcls = Transport
+ kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime)
+ self.transport = t
+ xmlrpclib.ServerProxy.__init__(self, uri, **kwargs)
+
+#
+# CSV functionality. This is provided because on 2.x, the csv module can't
+# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files.
+#
+
+def _csv_open(fn, mode, **kwargs):
+ if sys.version_info[0] < 3:
+ mode += 'b'
+ else:
+ kwargs['newline'] = ''
+ # Python 3 determines encoding from locale. Force 'utf-8'
+ # file encoding to match other forced utf-8 encoding
+ kwargs['encoding'] = 'utf-8'
+ return open(fn, mode, **kwargs)
+
+
+class CSVBase(object):
+ defaults = {
+ 'delimiter': str(','), # The strs are used because we need native
+ 'quotechar': str('"'), # str in the csv API (2.x won't take
+ 'lineterminator': str('\n') # Unicode)
+ }
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *exc_info):
+ self.stream.close()
+
+
+class CSVReader(CSVBase):
+ def __init__(self, **kwargs):
+ if 'stream' in kwargs:
+ stream = kwargs['stream']
+ if sys.version_info[0] >= 3:
+ # needs to be a text stream
+ stream = codecs.getreader('utf-8')(stream)
+ self.stream = stream
+ else:
+ self.stream = _csv_open(kwargs['path'], 'r')
+ self.reader = csv.reader(self.stream, **self.defaults)
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ result = next(self.reader)
+ if sys.version_info[0] < 3:
+ for i, item in enumerate(result):
+ if not isinstance(item, text_type):
+ result[i] = item.decode('utf-8')
+ return result
+
+ __next__ = next
+
+class CSVWriter(CSVBase):
+ def __init__(self, fn, **kwargs):
+ self.stream = _csv_open(fn, 'w')
+ self.writer = csv.writer(self.stream, **self.defaults)
+
+ def writerow(self, row):
+ if sys.version_info[0] < 3:
+ r = []
+ for item in row:
+ if isinstance(item, text_type):
+ item = item.encode('utf-8')
+ r.append(item)
+ row = r
+ self.writer.writerow(row)
+
+#
+# Configurator functionality
+#
+
+class Configurator(BaseConfigurator):
+
+ value_converters = dict(BaseConfigurator.value_converters)
+ value_converters['inc'] = 'inc_convert'
+
+ def __init__(self, config, base=None):
+ super(Configurator, self).__init__(config)
+ self.base = base or os.getcwd()
+
+ def configure_custom(self, config):
+ def convert(o):
+ if isinstance(o, (list, tuple)):
+ result = type(o)([convert(i) for i in o])
+ elif isinstance(o, dict):
+ if '()' in o:
+ result = self.configure_custom(o)
+ else:
+ result = {}
+ for k in o:
+ result[k] = convert(o[k])
+ else:
+ result = self.convert(o)
+ return result
+
+ c = config.pop('()')
+ if not callable(c):
+ c = self.resolve(c)
+ props = config.pop('.', None)
+ # Check for valid identifiers
+ args = config.pop('[]', ())
+ if args:
+ args = tuple([convert(o) for o in args])
+ items = [(k, convert(config[k])) for k in config if valid_ident(k)]
+ kwargs = dict(items)
+ result = c(*args, **kwargs)
+ if props:
+ for n, v in props.items():
+ setattr(result, n, convert(v))
+ return result
+
+ def __getitem__(self, key):
+ result = self.config[key]
+ if isinstance(result, dict) and '()' in result:
+ self.config[key] = result = self.configure_custom(result)
+ return result
+
+ def inc_convert(self, value):
+ """Default converter for the inc:// protocol."""
+ if not os.path.isabs(value):
+ value = os.path.join(self.base, value)
+ with codecs.open(value, 'r', encoding='utf-8') as f:
+ result = json.load(f)
+ return result
+
+
+class SubprocessMixin(object):
+ """
+ Mixin for running subprocesses and capturing their output
+ """
+ def __init__(self, verbose=False, progress=None):
+ self.verbose = verbose
+ self.progress = progress
+
+ def reader(self, stream, context):
+ """
+ Read lines from a subprocess' output stream and either pass to a progress
+ callable (if specified) or write progress information to sys.stderr.
+ """
+ progress = self.progress
+ verbose = self.verbose
+ while True:
+ s = stream.readline()
+ if not s:
+ break
+ if progress is not None:
+ progress(s, context)
+ else:
+ if not verbose:
+ sys.stderr.write('.')
+ else:
+ sys.stderr.write(s.decode('utf-8'))
+ sys.stderr.flush()
+ stream.close()
+
+ def run_command(self, cmd, **kwargs):
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, **kwargs)
+ t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))
+ t1.start()
+ t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr'))
+ t2.start()
+ p.wait()
+ t1.join()
+ t2.join()
+ if self.progress is not None:
+ self.progress('done.', 'main')
+ elif self.verbose:
+ sys.stderr.write('done.\n')
+ return p
+
+
+def normalize_name(name):
+ """Normalize a python package name a la PEP 503"""
+ # https://www.python.org/dev/peps/pep-0503/#normalized-names
+ return re.sub('[-_.]+', '-', name).lower()
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py
new file mode 100644
index 0000000000..3eebe18ee8
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py
@@ -0,0 +1,736 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2017 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""
+Implementation of a flexible versioning scheme providing support for PEP-440,
+setuptools-compatible and semantic versioning.
+"""
+
+import logging
+import re
+
+from .compat import string_types
+from .util import parse_requirement
+
+__all__ = ['NormalizedVersion', 'NormalizedMatcher',
+ 'LegacyVersion', 'LegacyMatcher',
+ 'SemanticVersion', 'SemanticMatcher',
+ 'UnsupportedVersionError', 'get_scheme']
+
+logger = logging.getLogger(__name__)
+
+
+class UnsupportedVersionError(ValueError):
+ """This is an unsupported version."""
+ pass
+
+
+class Version(object):
+ def __init__(self, s):
+ self._string = s = s.strip()
+ self._parts = parts = self.parse(s)
+ assert isinstance(parts, tuple)
+ assert len(parts) > 0
+
+ def parse(self, s):
+ raise NotImplementedError('please implement in a subclass')
+
+ def _check_compatible(self, other):
+ if type(self) != type(other):
+ raise TypeError('cannot compare %r and %r' % (self, other))
+
+ def __eq__(self, other):
+ self._check_compatible(other)
+ return self._parts == other._parts
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __lt__(self, other):
+ self._check_compatible(other)
+ return self._parts < other._parts
+
+ def __gt__(self, other):
+ return not (self.__lt__(other) or self.__eq__(other))
+
+ def __le__(self, other):
+ return self.__lt__(other) or self.__eq__(other)
+
+ def __ge__(self, other):
+ return self.__gt__(other) or self.__eq__(other)
+
+ # See http://docs.python.org/reference/datamodel#object.__hash__
+ def __hash__(self):
+ return hash(self._parts)
+
+ def __repr__(self):
+ return "%s('%s')" % (self.__class__.__name__, self._string)
+
+ def __str__(self):
+ return self._string
+
+ @property
+ def is_prerelease(self):
+ raise NotImplementedError('Please implement in subclasses.')
+
+
+class Matcher(object):
+ version_class = None
+
+ # value is either a callable or the name of a method
+ _operators = {
+ '<': lambda v, c, p: v < c,
+ '>': lambda v, c, p: v > c,
+ '<=': lambda v, c, p: v == c or v < c,
+ '>=': lambda v, c, p: v == c or v > c,
+ '==': lambda v, c, p: v == c,
+ '===': lambda v, c, p: v == c,
+ # by default, compatible => >=.
+ '~=': lambda v, c, p: v == c or v > c,
+ '!=': lambda v, c, p: v != c,
+ }
+
+ # this is a method only to support alternative implementations
+ # via overriding
+ def parse_requirement(self, s):
+ return parse_requirement(s)
+
+ def __init__(self, s):
+ if self.version_class is None:
+ raise ValueError('Please specify a version class')
+ self._string = s = s.strip()
+ r = self.parse_requirement(s)
+ if not r:
+ raise ValueError('Not valid: %r' % s)
+ self.name = r.name
+ self.key = self.name.lower() # for case-insensitive comparisons
+ clist = []
+ if r.constraints:
+ # import pdb; pdb.set_trace()
+ for op, s in r.constraints:
+ if s.endswith('.*'):
+ if op not in ('==', '!='):
+ raise ValueError('\'.*\' not allowed for '
+ '%r constraints' % op)
+ # Could be a partial version (e.g. for '2.*') which
+ # won't parse as a version, so keep it as a string
+ vn, prefix = s[:-2], True
+ # Just to check that vn is a valid version
+ self.version_class(vn)
+ else:
+ # Should parse as a version, so we can create an
+ # instance for the comparison
+ vn, prefix = self.version_class(s), False
+ clist.append((op, vn, prefix))
+ self._parts = tuple(clist)
+
+ def match(self, version):
+ """
+ Check if the provided version matches the constraints.
+
+ :param version: The version to match against this instance.
+ :type version: String or :class:`Version` instance.
+ """
+ if isinstance(version, string_types):
+ version = self.version_class(version)
+ for operator, constraint, prefix in self._parts:
+ f = self._operators.get(operator)
+ if isinstance(f, string_types):
+ f = getattr(self, f)
+ if not f:
+ msg = ('%r not implemented '
+ 'for %s' % (operator, self.__class__.__name__))
+ raise NotImplementedError(msg)
+ if not f(version, constraint, prefix):
+ return False
+ return True
+
+ @property
+ def exact_version(self):
+ result = None
+ if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='):
+ result = self._parts[0][1]
+ return result
+
+ def _check_compatible(self, other):
+ if type(self) != type(other) or self.name != other.name:
+ raise TypeError('cannot compare %s and %s' % (self, other))
+
+ def __eq__(self, other):
+ self._check_compatible(other)
+ return self.key == other.key and self._parts == other._parts
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ # See http://docs.python.org/reference/datamodel#object.__hash__
+ def __hash__(self):
+ return hash(self.key) + hash(self._parts)
+
+ def __repr__(self):
+ return "%s(%r)" % (self.__class__.__name__, self._string)
+
+ def __str__(self):
+ return self._string
+
+
+PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?'
+ r'(\.(post)(\d+))?(\.(dev)(\d+))?'
+ r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$')
+
+
+def _pep_440_key(s):
+ s = s.strip()
+ m = PEP440_VERSION_RE.match(s)
+ if not m:
+ raise UnsupportedVersionError('Not a valid version: %s' % s)
+ groups = m.groups()
+ nums = tuple(int(v) for v in groups[1].split('.'))
+ while len(nums) > 1 and nums[-1] == 0:
+ nums = nums[:-1]
+
+ if not groups[0]:
+ epoch = 0
+ else:
+ epoch = int(groups[0])
+ pre = groups[4:6]
+ post = groups[7:9]
+ dev = groups[10:12]
+ local = groups[13]
+ if pre == (None, None):
+ pre = ()
+ else:
+ pre = pre[0], int(pre[1])
+ if post == (None, None):
+ post = ()
+ else:
+ post = post[0], int(post[1])
+ if dev == (None, None):
+ dev = ()
+ else:
+ dev = dev[0], int(dev[1])
+ if local is None:
+ local = ()
+ else:
+ parts = []
+ for part in local.split('.'):
+ # to ensure that numeric compares as > lexicographic, avoid
+ # comparing them directly, but encode a tuple which ensures
+ # correct sorting
+ if part.isdigit():
+ part = (1, int(part))
+ else:
+ part = (0, part)
+ parts.append(part)
+ local = tuple(parts)
+ if not pre:
+ # either before pre-release, or final release and after
+ if not post and dev:
+ # before pre-release
+ pre = ('a', -1) # to sort before a0
+ else:
+ pre = ('z',) # to sort after all pre-releases
+ # now look at the state of post and dev.
+ if not post:
+ post = ('_',) # sort before 'a'
+ if not dev:
+ dev = ('final',)
+
+ #print('%s -> %s' % (s, m.groups()))
+ return epoch, nums, pre, post, dev, local
+
+
+_normalized_key = _pep_440_key
+
+
+class NormalizedVersion(Version):
+ """A rational version.
+
+ Good:
+ 1.2 # equivalent to "1.2.0"
+ 1.2.0
+ 1.2a1
+ 1.2.3a2
+ 1.2.3b1
+ 1.2.3c1
+ 1.2.3.4
+ TODO: fill this out
+
+ Bad:
+ 1 # minimum two numbers
+ 1.2a # release level must have a release serial
+ 1.2.3b
+ """
+ def parse(self, s):
+ result = _normalized_key(s)
+ # _normalized_key loses trailing zeroes in the release
+ # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0
+ # However, PEP 440 prefix matching needs it: for example,
+ # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0).
+ m = PEP440_VERSION_RE.match(s) # must succeed
+ groups = m.groups()
+ self._release_clause = tuple(int(v) for v in groups[1].split('.'))
+ return result
+
+ PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev'])
+
+ @property
+ def is_prerelease(self):
+ return any(t[0] in self.PREREL_TAGS for t in self._parts if t)
+
+
+def _match_prefix(x, y):
+ x = str(x)
+ y = str(y)
+ if x == y:
+ return True
+ if not x.startswith(y):
+ return False
+ n = len(y)
+ return x[n] == '.'
+
+
+class NormalizedMatcher(Matcher):
+ version_class = NormalizedVersion
+
+ # value is either a callable or the name of a method
+ _operators = {
+ '~=': '_match_compatible',
+ '<': '_match_lt',
+ '>': '_match_gt',
+ '<=': '_match_le',
+ '>=': '_match_ge',
+ '==': '_match_eq',
+ '===': '_match_arbitrary',
+ '!=': '_match_ne',
+ }
+
+ def _adjust_local(self, version, constraint, prefix):
+ if prefix:
+ strip_local = '+' not in constraint and version._parts[-1]
+ else:
+ # both constraint and version are
+ # NormalizedVersion instances.
+ # If constraint does not have a local component,
+ # ensure the version doesn't, either.
+ strip_local = not constraint._parts[-1] and version._parts[-1]
+ if strip_local:
+ s = version._string.split('+', 1)[0]
+ version = self.version_class(s)
+ return version, constraint
+
+ def _match_lt(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
+ if version >= constraint:
+ return False
+ release_clause = constraint._release_clause
+ pfx = '.'.join([str(i) for i in release_clause])
+ return not _match_prefix(version, pfx)
+
+ def _match_gt(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
+ if version <= constraint:
+ return False
+ release_clause = constraint._release_clause
+ pfx = '.'.join([str(i) for i in release_clause])
+ return not _match_prefix(version, pfx)
+
+ def _match_le(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
+ return version <= constraint
+
+ def _match_ge(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
+ return version >= constraint
+
+ def _match_eq(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
+ if not prefix:
+ result = (version == constraint)
+ else:
+ result = _match_prefix(version, constraint)
+ return result
+
+ def _match_arbitrary(self, version, constraint, prefix):
+ return str(version) == str(constraint)
+
+ def _match_ne(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
+ if not prefix:
+ result = (version != constraint)
+ else:
+ result = not _match_prefix(version, constraint)
+ return result
+
+ def _match_compatible(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
+ if version == constraint:
+ return True
+ if version < constraint:
+ return False
+# if not prefix:
+# return True
+ release_clause = constraint._release_clause
+ if len(release_clause) > 1:
+ release_clause = release_clause[:-1]
+ pfx = '.'.join([str(i) for i in release_clause])
+ return _match_prefix(version, pfx)
+
+_REPLACEMENTS = (
+ (re.compile('[.+-]$'), ''), # remove trailing puncts
+ (re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start
+ (re.compile('^[.-]'), ''), # remove leading puncts
+ (re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses
+ (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
+ (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
+ (re.compile('[.]{2,}'), '.'), # multiple runs of '.'
+ (re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha
+ (re.compile(r'\b(pre-alpha|prealpha)\b'),
+ 'pre.alpha'), # standardise
+ (re.compile(r'\(beta\)$'), 'beta'), # remove parentheses
+)
+
+_SUFFIX_REPLACEMENTS = (
+ (re.compile('^[:~._+-]+'), ''), # remove leading puncts
+ (re.compile('[,*")([\\]]'), ''), # remove unwanted chars
+ (re.compile('[~:+_ -]'), '.'), # replace illegal chars
+ (re.compile('[.]{2,}'), '.'), # multiple runs of '.'
+ (re.compile(r'\.$'), ''), # trailing '.'
+)
+
+_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)')
+
+
+def _suggest_semantic_version(s):
+ """
+ Try to suggest a semantic form for a version for which
+ _suggest_normalized_version couldn't come up with anything.
+ """
+ result = s.strip().lower()
+ for pat, repl in _REPLACEMENTS:
+ result = pat.sub(repl, result)
+ if not result:
+ result = '0.0.0'
+
+ # Now look for numeric prefix, and separate it out from
+ # the rest.
+ #import pdb; pdb.set_trace()
+ m = _NUMERIC_PREFIX.match(result)
+ if not m:
+ prefix = '0.0.0'
+ suffix = result
+ else:
+ prefix = m.groups()[0].split('.')
+ prefix = [int(i) for i in prefix]
+ while len(prefix) < 3:
+ prefix.append(0)
+ if len(prefix) == 3:
+ suffix = result[m.end():]
+ else:
+ suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():]
+ prefix = prefix[:3]
+ prefix = '.'.join([str(i) for i in prefix])
+ suffix = suffix.strip()
+ if suffix:
+ #import pdb; pdb.set_trace()
+ # massage the suffix.
+ for pat, repl in _SUFFIX_REPLACEMENTS:
+ suffix = pat.sub(repl, suffix)
+
+ if not suffix:
+ result = prefix
+ else:
+ sep = '-' if 'dev' in suffix else '+'
+ result = prefix + sep + suffix
+ if not is_semver(result):
+ result = None
+ return result
+
+
+def _suggest_normalized_version(s):
+ """Suggest a normalized version close to the given version string.
+
+ If you have a version string that isn't rational (i.e. NormalizedVersion
+ doesn't like it) then you might be able to get an equivalent (or close)
+ rational version from this function.
+
+ This does a number of simple normalizations to the given string, based
+ on observation of versions currently in use on PyPI. Given a dump of
+ those version during PyCon 2009, 4287 of them:
+ - 2312 (53.93%) match NormalizedVersion without change
+ with the automatic suggestion
+ - 3474 (81.04%) match when using this suggestion method
+
+ @param s {str} An irrational version string.
+ @returns A rational version string, or None, if couldn't determine one.
+ """
+ try:
+ _normalized_key(s)
+ return s # already rational
+ except UnsupportedVersionError:
+ pass
+
+ rs = s.lower()
+
+ # part of this could use maketrans
+ for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
+ ('beta', 'b'), ('rc', 'c'), ('-final', ''),
+ ('-pre', 'c'),
+ ('-release', ''), ('.release', ''), ('-stable', ''),
+ ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
+ ('final', '')):
+ rs = rs.replace(orig, repl)
+
+ # if something ends with dev or pre, we add a 0
+ rs = re.sub(r"pre$", r"pre0", rs)
+ rs = re.sub(r"dev$", r"dev0", rs)
+
+ # if we have something like "b-2" or "a.2" at the end of the
+ # version, that is probably beta, alpha, etc
+ # let's remove the dash or dot
+ rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)
+
+ # 1.0-dev-r371 -> 1.0.dev371
+ # 0.1-dev-r79 -> 0.1.dev79
+ rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
+
+ # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
+ rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
+
+ # Clean: v0.3, v1.0
+ if rs.startswith('v'):
+ rs = rs[1:]
+
+ # Clean leading '0's on numbers.
+ #TODO: unintended side-effect on, e.g., "2003.05.09"
+ # PyPI stats: 77 (~2%) better
+ rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
+
+ # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
+ # zero.
+ # PyPI stats: 245 (7.56%) better
+ rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
+
+ # the 'dev-rNNN' tag is a dev tag
+ rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
+
+ # clean the - when used as a pre delimiter
+ rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
+
+ # a terminal "dev" or "devel" can be changed into ".dev0"
+ rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
+
+ # a terminal "dev" can be changed into ".dev0"
+ rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
+
+ # a terminal "final" or "stable" can be removed
+ rs = re.sub(r"(final|stable)$", "", rs)
+
+ # The 'r' and the '-' tags are post release tags
+ # 0.4a1.r10 -> 0.4a1.post10
+ # 0.9.33-17222 -> 0.9.33.post17222
+ # 0.9.33-r17222 -> 0.9.33.post17222
+ rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
+
+ # Clean 'r' instead of 'dev' usage:
+ # 0.9.33+r17222 -> 0.9.33.dev17222
+ # 1.0dev123 -> 1.0.dev123
+ # 1.0.git123 -> 1.0.dev123
+ # 1.0.bzr123 -> 1.0.dev123
+ # 0.1a0dev.123 -> 0.1a0.dev123
+ # PyPI stats: ~150 (~4%) better
+ rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
+
+ # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
+ # 0.2.pre1 -> 0.2c1
+ # 0.2-c1 -> 0.2c1
+ # 1.0preview123 -> 1.0c123
+ # PyPI stats: ~21 (0.62%) better
+ rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
+
+ # Tcl/Tk uses "px" for their post release markers
+ rs = re.sub(r"p(\d+)$", r".post\1", rs)
+
+ try:
+ _normalized_key(rs)
+ except UnsupportedVersionError:
+ rs = None
+ return rs
+
+#
+# Legacy version processing (distribute-compatible)
+#
+
+_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I)
+_VERSION_REPLACE = {
+ 'pre': 'c',
+ 'preview': 'c',
+ '-': 'final-',
+ 'rc': 'c',
+ 'dev': '@',
+ '': None,
+ '.': None,
+}
+
+
+def _legacy_key(s):
+ def get_parts(s):
+ result = []
+ for p in _VERSION_PART.split(s.lower()):
+ p = _VERSION_REPLACE.get(p, p)
+ if p:
+ if '0' <= p[:1] <= '9':
+ p = p.zfill(8)
+ else:
+ p = '*' + p
+ result.append(p)
+ result.append('*final')
+ return result
+
+ result = []
+ for p in get_parts(s):
+ if p.startswith('*'):
+ if p < '*final':
+ while result and result[-1] == '*final-':
+ result.pop()
+ while result and result[-1] == '00000000':
+ result.pop()
+ result.append(p)
+ return tuple(result)
+
+
+class LegacyVersion(Version):
+ def parse(self, s):
+ return _legacy_key(s)
+
+ @property
+ def is_prerelease(self):
+ result = False
+ for x in self._parts:
+ if (isinstance(x, string_types) and x.startswith('*') and
+ x < '*final'):
+ result = True
+ break
+ return result
+
+
+class LegacyMatcher(Matcher):
+ version_class = LegacyVersion
+
+ _operators = dict(Matcher._operators)
+ _operators['~='] = '_match_compatible'
+
+ numeric_re = re.compile(r'^(\d+(\.\d+)*)')
+
+ def _match_compatible(self, version, constraint, prefix):
+ if version < constraint:
+ return False
+ m = self.numeric_re.match(str(constraint))
+ if not m:
+ logger.warning('Cannot compute compatible match for version %s '
+ ' and constraint %s', version, constraint)
+ return True
+ s = m.groups()[0]
+ if '.' in s:
+ s = s.rsplit('.', 1)[0]
+ return _match_prefix(version, s)
+
+#
+# Semantic versioning
+#
+
+_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)'
+ r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?'
+ r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I)
+
+
+def is_semver(s):
+ return _SEMVER_RE.match(s)
+
+
+def _semantic_key(s):
+ def make_tuple(s, absent):
+ if s is None:
+ result = (absent,)
+ else:
+ parts = s[1:].split('.')
+ # We can't compare ints and strings on Python 3, so fudge it
+ # by zero-filling numeric values so simulate a numeric comparison
+ result = tuple([p.zfill(8) if p.isdigit() else p for p in parts])
+ return result
+
+ m = is_semver(s)
+ if not m:
+ raise UnsupportedVersionError(s)
+ groups = m.groups()
+ major, minor, patch = [int(i) for i in groups[:3]]
+ # choose the '|' and '*' so that versions sort correctly
+ pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*')
+ return (major, minor, patch), pre, build
+
+
+class SemanticVersion(Version):
+ def parse(self, s):
+ return _semantic_key(s)
+
+ @property
+ def is_prerelease(self):
+ return self._parts[1][0] != '|'
+
+
+class SemanticMatcher(Matcher):
+ version_class = SemanticVersion
+
+
+class VersionScheme(object):
+ def __init__(self, key, matcher, suggester=None):
+ self.key = key
+ self.matcher = matcher
+ self.suggester = suggester
+
+ def is_valid_version(self, s):
+ try:
+ self.matcher.version_class(s)
+ result = True
+ except UnsupportedVersionError:
+ result = False
+ return result
+
+ def is_valid_matcher(self, s):
+ try:
+ self.matcher(s)
+ result = True
+ except UnsupportedVersionError:
+ result = False
+ return result
+
+ def is_valid_constraint_list(self, s):
+ """
+ Used for processing some metadata fields
+ """
+ return self.is_valid_matcher('dummy_name (%s)' % s)
+
+ def suggest(self, s):
+ if self.suggester is None:
+ result = None
+ else:
+ result = self.suggester(s)
+ return result
+
+_SCHEMES = {
+ 'normalized': VersionScheme(_normalized_key, NormalizedMatcher,
+ _suggest_normalized_version),
+ 'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s),
+ 'semantic': VersionScheme(_semantic_key, SemanticMatcher,
+ _suggest_semantic_version),
+}
+
+_SCHEMES['default'] = _SCHEMES['normalized']
+
+
+def get_scheme(name):
+ if name not in _SCHEMES:
+ raise ValueError('unknown scheme name: %r' % name)
+ return _SCHEMES[name]
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/w32.exe b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/w32.exe
new file mode 100644
index 0000000000..e6439e9e45
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/w32.exe
Binary files differ
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/w64.exe b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/w64.exe
new file mode 100644
index 0000000000..46139dbf94
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/w64.exe
Binary files differ
diff --git a/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py
new file mode 100644
index 0000000000..1e2c7a020c
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py
@@ -0,0 +1,1018 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013-2017 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+from __future__ import unicode_literals
+
+import base64
+import codecs
+import datetime
+import distutils.util
+from email import message_from_file
+import hashlib
+import imp
+import json
+import logging
+import os
+import posixpath
+import re
+import shutil
+import sys
+import tempfile
+import zipfile
+
+from . import __version__, DistlibException
+from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
+from .database import InstalledDistribution
+from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
+ LEGACY_METADATA_FILENAME)
+from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache,
+ cached_property, get_cache_base, read_exports, tempdir)
+from .version import NormalizedVersion, UnsupportedVersionError
+
+logger = logging.getLogger(__name__)
+
+cache = None # created when needed
+
+if hasattr(sys, 'pypy_version_info'): # pragma: no cover
+ IMP_PREFIX = 'pp'
+elif sys.platform.startswith('java'): # pragma: no cover
+ IMP_PREFIX = 'jy'
+elif sys.platform == 'cli': # pragma: no cover
+ IMP_PREFIX = 'ip'
+else:
+ IMP_PREFIX = 'cp'
+
+VER_SUFFIX = sysconfig.get_config_var('py_version_nodot')
+if not VER_SUFFIX: # pragma: no cover
+ VER_SUFFIX = '%s%s' % sys.version_info[:2]
+PYVER = 'py' + VER_SUFFIX
+IMPVER = IMP_PREFIX + VER_SUFFIX
+
+ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_')
+
+ABI = sysconfig.get_config_var('SOABI')
+if ABI and ABI.startswith('cpython-'):
+ ABI = ABI.replace('cpython-', 'cp')
+else:
+ def _derive_abi():
+ parts = ['cp', VER_SUFFIX]
+ if sysconfig.get_config_var('Py_DEBUG'):
+ parts.append('d')
+ if sysconfig.get_config_var('WITH_PYMALLOC'):
+ parts.append('m')
+ if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4:
+ parts.append('u')
+ return ''.join(parts)
+ ABI = _derive_abi()
+ del _derive_abi
+
+FILENAME_RE = re.compile(r'''
+(?P<nm>[^-]+)
+-(?P<vn>\d+[^-]*)
+(-(?P<bn>\d+[^-]*))?
+-(?P<py>\w+\d+(\.\w+\d+)*)
+-(?P<bi>\w+)
+-(?P<ar>\w+(\.\w+)*)
+\.whl$
+''', re.IGNORECASE | re.VERBOSE)
+
+NAME_VERSION_RE = re.compile(r'''
+(?P<nm>[^-]+)
+-(?P<vn>\d+[^-]*)
+(-(?P<bn>\d+[^-]*))?$
+''', re.IGNORECASE | re.VERBOSE)
+
+SHEBANG_RE = re.compile(br'\s*#![^\r\n]*')
+SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$')
+SHEBANG_PYTHON = b'#!python'
+SHEBANG_PYTHONW = b'#!pythonw'
+
+if os.sep == '/':
+ to_posix = lambda o: o
+else:
+ to_posix = lambda o: o.replace(os.sep, '/')
+
+
+class Mounter(object):
+ def __init__(self):
+ self.impure_wheels = {}
+ self.libs = {}
+
+ def add(self, pathname, extensions):
+ self.impure_wheels[pathname] = extensions
+ self.libs.update(extensions)
+
+ def remove(self, pathname):
+ extensions = self.impure_wheels.pop(pathname)
+ for k, v in extensions:
+ if k in self.libs:
+ del self.libs[k]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.libs:
+ result = self
+ else:
+ result = None
+ return result
+
+ def load_module(self, fullname):
+ if fullname in sys.modules:
+ result = sys.modules[fullname]
+ else:
+ if fullname not in self.libs:
+ raise ImportError('unable to find extension for %s' % fullname)
+ result = imp.load_dynamic(fullname, self.libs[fullname])
+ result.__loader__ = self
+ parts = fullname.rsplit('.', 1)
+ if len(parts) > 1:
+ result.__package__ = parts[0]
+ return result
+
+_hook = Mounter()
+
+
+class Wheel(object):
+ """
+ Class to build and install from Wheel files (PEP 427).
+ """
+
+ wheel_version = (1, 1)
+ hash_kind = 'sha256'
+
+ def __init__(self, filename=None, sign=False, verify=False):
+ """
+ Initialise an instance using a (valid) filename.
+ """
+ self.sign = sign
+ self.should_verify = verify
+ self.buildver = ''
+ self.pyver = [PYVER]
+ self.abi = ['none']
+ self.arch = ['any']
+ self.dirname = os.getcwd()
+ if filename is None:
+ self.name = 'dummy'
+ self.version = '0.1'
+ self._filename = self.filename
+ else:
+ m = NAME_VERSION_RE.match(filename)
+ if m:
+ info = m.groupdict('')
+ self.name = info['nm']
+ # Reinstate the local version separator
+ self.version = info['vn'].replace('_', '-')
+ self.buildver = info['bn']
+ self._filename = self.filename
+ else:
+ dirname, filename = os.path.split(filename)
+ m = FILENAME_RE.match(filename)
+ if not m:
+ raise DistlibException('Invalid name or '
+ 'filename: %r' % filename)
+ if dirname:
+ self.dirname = os.path.abspath(dirname)
+ self._filename = filename
+ info = m.groupdict('')
+ self.name = info['nm']
+ self.version = info['vn']
+ self.buildver = info['bn']
+ self.pyver = info['py'].split('.')
+ self.abi = info['bi'].split('.')
+ self.arch = info['ar'].split('.')
+
+ @property
+ def filename(self):
+ """
+ Build and return a filename from the various components.
+ """
+ if self.buildver:
+ buildver = '-' + self.buildver
+ else:
+ buildver = ''
+ pyver = '.'.join(self.pyver)
+ abi = '.'.join(self.abi)
+ arch = '.'.join(self.arch)
+ # replace - with _ as a local version separator
+ version = self.version.replace('-', '_')
+ return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver,
+ pyver, abi, arch)
+
+ @property
+ def exists(self):
+ path = os.path.join(self.dirname, self.filename)
+ return os.path.isfile(path)
+
+ @property
+ def tags(self):
+ for pyver in self.pyver:
+ for abi in self.abi:
+ for arch in self.arch:
+ yield pyver, abi, arch
+
+ @cached_property
+ def metadata(self):
+ pathname = os.path.join(self.dirname, self.filename)
+ name_ver = '%s-%s' % (self.name, self.version)
+ info_dir = '%s.dist-info' % name_ver
+ wrapper = codecs.getreader('utf-8')
+ with ZipFile(pathname, 'r') as zf:
+ wheel_metadata = self.get_wheel_metadata(zf)
+ wv = wheel_metadata['Wheel-Version'].split('.', 1)
+ file_version = tuple([int(i) for i in wv])
+ # if file_version < (1, 1):
+ # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME,
+ # LEGACY_METADATA_FILENAME]
+ # else:
+ # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME]
+ fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME]
+ result = None
+ for fn in fns:
+ try:
+ metadata_filename = posixpath.join(info_dir, fn)
+ with zf.open(metadata_filename) as bf:
+ wf = wrapper(bf)
+ result = Metadata(fileobj=wf)
+ if result:
+ break
+ except KeyError:
+ pass
+ if not result:
+ raise ValueError('Invalid wheel, because metadata is '
+ 'missing: looked in %s' % ', '.join(fns))
+ return result
+
+ def get_wheel_metadata(self, zf):
+ name_ver = '%s-%s' % (self.name, self.version)
+ info_dir = '%s.dist-info' % name_ver
+ metadata_filename = posixpath.join(info_dir, 'WHEEL')
+ with zf.open(metadata_filename) as bf:
+ wf = codecs.getreader('utf-8')(bf)
+ message = message_from_file(wf)
+ return dict(message)
+
+ @cached_property
+ def info(self):
+ pathname = os.path.join(self.dirname, self.filename)
+ with ZipFile(pathname, 'r') as zf:
+ result = self.get_wheel_metadata(zf)
+ return result
+
+ def process_shebang(self, data):
+ m = SHEBANG_RE.match(data)
+ if m:
+ end = m.end()
+ shebang, data_after_shebang = data[:end], data[end:]
+ # Preserve any arguments after the interpreter
+ if b'pythonw' in shebang.lower():
+ shebang_python = SHEBANG_PYTHONW
+ else:
+ shebang_python = SHEBANG_PYTHON
+ m = SHEBANG_DETAIL_RE.match(shebang)
+ if m:
+ args = b' ' + m.groups()[-1]
+ else:
+ args = b''
+ shebang = shebang_python + args
+ data = shebang + data_after_shebang
+ else:
+ cr = data.find(b'\r')
+ lf = data.find(b'\n')
+ if cr < 0 or cr > lf:
+ term = b'\n'
+ else:
+ if data[cr:cr + 2] == b'\r\n':
+ term = b'\r\n'
+ else:
+ term = b'\r'
+ data = SHEBANG_PYTHON + term + data
+ return data
+
+ def get_hash(self, data, hash_kind=None):
+ if hash_kind is None:
+ hash_kind = self.hash_kind
+ try:
+ hasher = getattr(hashlib, hash_kind)
+ except AttributeError:
+ raise DistlibException('Unsupported hash algorithm: %r' % hash_kind)
+ result = hasher(data).digest()
+ result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii')
+ return hash_kind, result
+
+ def write_record(self, records, record_path, base):
+ records = list(records) # make a copy, as mutated
+ p = to_posix(os.path.relpath(record_path, base))
+ records.append((p, '', ''))
+ with CSVWriter(record_path) as writer:
+ for row in records:
+ writer.writerow(row)
+
+ def write_records(self, info, libdir, archive_paths):
+ records = []
+ distinfo, info_dir = info
+ hasher = getattr(hashlib, self.hash_kind)
+ for ap, p in archive_paths:
+ with open(p, 'rb') as f:
+ data = f.read()
+ digest = '%s=%s' % self.get_hash(data)
+ size = os.path.getsize(p)
+ records.append((ap, digest, size))
+
+ p = os.path.join(distinfo, 'RECORD')
+ self.write_record(records, p, libdir)
+ ap = to_posix(os.path.join(info_dir, 'RECORD'))
+ archive_paths.append((ap, p))
+
+ def build_zip(self, pathname, archive_paths):
+ with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:
+ for ap, p in archive_paths:
+ logger.debug('Wrote %s to %s in wheel', p, ap)
+ zf.write(p, ap)
+
+ def build(self, paths, tags=None, wheel_version=None):
+ """
+ Build a wheel from files in specified paths, and use any specified tags
+ when determining the name of the wheel.
+ """
+ if tags is None:
+ tags = {}
+
+ libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]
+ if libkey == 'platlib':
+ is_pure = 'false'
+ default_pyver = [IMPVER]
+ default_abi = [ABI]
+ default_arch = [ARCH]
+ else:
+ is_pure = 'true'
+ default_pyver = [PYVER]
+ default_abi = ['none']
+ default_arch = ['any']
+
+ self.pyver = tags.get('pyver', default_pyver)
+ self.abi = tags.get('abi', default_abi)
+ self.arch = tags.get('arch', default_arch)
+
+ libdir = paths[libkey]
+
+ name_ver = '%s-%s' % (self.name, self.version)
+ data_dir = '%s.data' % name_ver
+ info_dir = '%s.dist-info' % name_ver
+
+ archive_paths = []
+
+ # First, stuff which is not in site-packages
+ for key in ('data', 'headers', 'scripts'):
+ if key not in paths:
+ continue
+ path = paths[key]
+ if os.path.isdir(path):
+ for root, dirs, files in os.walk(path):
+ for fn in files:
+ p = fsdecode(os.path.join(root, fn))
+ rp = os.path.relpath(p, path)
+ ap = to_posix(os.path.join(data_dir, key, rp))
+ archive_paths.append((ap, p))
+ if key == 'scripts' and not p.endswith('.exe'):
+ with open(p, 'rb') as f:
+ data = f.read()
+ data = self.process_shebang(data)
+ with open(p, 'wb') as f:
+ f.write(data)
+
+ # Now, stuff which is in site-packages, other than the
+ # distinfo stuff.
+ path = libdir
+ distinfo = None
+ for root, dirs, files in os.walk(path):
+ if root == path:
+ # At the top level only, save distinfo for later
+ # and skip it for now
+ for i, dn in enumerate(dirs):
+ dn = fsdecode(dn)
+ if dn.endswith('.dist-info'):
+ distinfo = os.path.join(root, dn)
+ del dirs[i]
+ break
+ assert distinfo, '.dist-info directory expected, not found'
+
+ for fn in files:
+ # comment out next suite to leave .pyc files in
+ if fsdecode(fn).endswith(('.pyc', '.pyo')):
+ continue
+ p = os.path.join(root, fn)
+ rp = to_posix(os.path.relpath(p, path))
+ archive_paths.append((rp, p))
+
+ # Now distinfo. Assumed to be flat, i.e. os.listdir is enough.
+ files = os.listdir(distinfo)
+ for fn in files:
+ if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):
+ p = fsdecode(os.path.join(distinfo, fn))
+ ap = to_posix(os.path.join(info_dir, fn))
+ archive_paths.append((ap, p))
+
+ wheel_metadata = [
+ 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),
+ 'Generator: distlib %s' % __version__,
+ 'Root-Is-Purelib: %s' % is_pure,
+ ]
+ for pyver, abi, arch in self.tags:
+ wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))
+ p = os.path.join(distinfo, 'WHEEL')
+ with open(p, 'w') as f:
+ f.write('\n'.join(wheel_metadata))
+ ap = to_posix(os.path.join(info_dir, 'WHEEL'))
+ archive_paths.append((ap, p))
+
+ # sort the entries by archive path. Not needed by any spec, but it
+ # keeps the archive listing and RECORD tidier than they would otherwise
+ # be. Use the number of path segments to keep directory entries together,
+ # and keep the dist-info stuff at the end.
+ def sorter(t):
+ ap = t[0]
+ n = ap.count('/')
+ if '.dist-info' in ap:
+ n += 10000
+ return (n, ap)
+ archive_paths = sorted(archive_paths, key=sorter)
+
+ # Now, at last, RECORD.
+ # Paths in here are archive paths - nothing else makes sense.
+ self.write_records((distinfo, info_dir), libdir, archive_paths)
+ # Now, ready to build the zip file
+ pathname = os.path.join(self.dirname, self.filename)
+ self.build_zip(pathname, archive_paths)
+ return pathname
+
+ def skip_entry(self, arcname):
+ """
+ Determine whether an archive entry should be skipped when verifying
+ or installing.
+ """
+ # The signature file won't be in RECORD,
+ # and we don't currently don't do anything with it
+ # We also skip directories, as they won't be in RECORD
+ # either. See:
+ #
+ # https://github.com/pypa/wheel/issues/294
+ # https://github.com/pypa/wheel/issues/287
+ # https://github.com/pypa/wheel/pull/289
+ #
+ return arcname.endswith(('/', '/RECORD.jws'))
+
+ def install(self, paths, maker, **kwargs):
+ """
+ Install a wheel to the specified paths. If kwarg ``warner`` is
+ specified, it should be a callable, which will be called with two
+ tuples indicating the wheel version of this software and the wheel
+ version in the file, if there is a discrepancy in the versions.
+ This can be used to issue any warnings to raise any exceptions.
+ If kwarg ``lib_only`` is True, only the purelib/platlib files are
+ installed, and the headers, scripts, data and dist-info metadata are
+ not written. If kwarg ``bytecode_hashed_invalidation`` is True, written
+ bytecode will try to use file-hash based invalidation (PEP-552) on
+ supported interpreter versions (CPython 2.7+).
+
+ The return value is a :class:`InstalledDistribution` instance unless
+ ``options.lib_only`` is True, in which case the return value is ``None``.
+ """
+
+ dry_run = maker.dry_run
+ warner = kwargs.get('warner')
+ lib_only = kwargs.get('lib_only', False)
+ bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False)
+
+ pathname = os.path.join(self.dirname, self.filename)
+ name_ver = '%s-%s' % (self.name, self.version)
+ data_dir = '%s.data' % name_ver
+ info_dir = '%s.dist-info' % name_ver
+
+ metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
+ wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
+ record_name = posixpath.join(info_dir, 'RECORD')
+
+ wrapper = codecs.getreader('utf-8')
+
+ with ZipFile(pathname, 'r') as zf:
+ with zf.open(wheel_metadata_name) as bwf:
+ wf = wrapper(bwf)
+ message = message_from_file(wf)
+ wv = message['Wheel-Version'].split('.', 1)
+ file_version = tuple([int(i) for i in wv])
+ if (file_version != self.wheel_version) and warner:
+ warner(self.wheel_version, file_version)
+
+ if message['Root-Is-Purelib'] == 'true':
+ libdir = paths['purelib']
+ else:
+ libdir = paths['platlib']
+
+ records = {}
+ with zf.open(record_name) as bf:
+ with CSVReader(stream=bf) as reader:
+ for row in reader:
+ p = row[0]
+ records[p] = row
+
+ data_pfx = posixpath.join(data_dir, '')
+ info_pfx = posixpath.join(info_dir, '')
+ script_pfx = posixpath.join(data_dir, 'scripts', '')
+
+ # make a new instance rather than a copy of maker's,
+ # as we mutate it
+ fileop = FileOperator(dry_run=dry_run)
+ fileop.record = True # so we can rollback if needed
+
+ bc = not sys.dont_write_bytecode # Double negatives. Lovely!
+
+ outfiles = [] # for RECORD writing
+
+ # for script copying/shebang processing
+ workdir = tempfile.mkdtemp()
+ # set target dir later
+ # we default add_launchers to False, as the
+ # Python Launcher should be used instead
+ maker.source_dir = workdir
+ maker.target_dir = None
+ try:
+ for zinfo in zf.infolist():
+ arcname = zinfo.filename
+ if isinstance(arcname, text_type):
+ u_arcname = arcname
+ else:
+ u_arcname = arcname.decode('utf-8')
+ if self.skip_entry(u_arcname):
+ continue
+ row = records[u_arcname]
+ if row[2] and str(zinfo.file_size) != row[2]:
+ raise DistlibException('size mismatch for '
+ '%s' % u_arcname)
+ if row[1]:
+ kind, value = row[1].split('=', 1)
+ with zf.open(arcname) as bf:
+ data = bf.read()
+ _, digest = self.get_hash(data, kind)
+ if digest != value:
+ raise DistlibException('digest mismatch for '
+ '%s' % arcname)
+
+ if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
+ logger.debug('lib_only: skipping %s', u_arcname)
+ continue
+ is_script = (u_arcname.startswith(script_pfx)
+ and not u_arcname.endswith('.exe'))
+
+ if u_arcname.startswith(data_pfx):
+ _, where, rp = u_arcname.split('/', 2)
+ outfile = os.path.join(paths[where], convert_path(rp))
+ else:
+ # meant for site-packages.
+ if u_arcname in (wheel_metadata_name, record_name):
+ continue
+ outfile = os.path.join(libdir, convert_path(u_arcname))
+ if not is_script:
+ with zf.open(arcname) as bf:
+ fileop.copy_stream(bf, outfile)
+ outfiles.append(outfile)
+ # Double check the digest of the written file
+ if not dry_run and row[1]:
+ with open(outfile, 'rb') as bf:
+ data = bf.read()
+ _, newdigest = self.get_hash(data, kind)
+ if newdigest != digest:
+ raise DistlibException('digest mismatch '
+ 'on write for '
+ '%s' % outfile)
+ if bc and outfile.endswith('.py'):
+ try:
+ pyc = fileop.byte_compile(outfile,
+ hashed_invalidation=bc_hashed_invalidation)
+ outfiles.append(pyc)
+ except Exception:
+ # Don't give up if byte-compilation fails,
+ # but log it and perhaps warn the user
+ logger.warning('Byte-compilation failed',
+ exc_info=True)
+ else:
+ fn = os.path.basename(convert_path(arcname))
+ workname = os.path.join(workdir, fn)
+ with zf.open(arcname) as bf:
+ fileop.copy_stream(bf, workname)
+
+ dn, fn = os.path.split(outfile)
+ maker.target_dir = dn
+ filenames = maker.make(fn)
+ fileop.set_executable_mode(filenames)
+ outfiles.extend(filenames)
+
+ if lib_only:
+ logger.debug('lib_only: returning None')
+ dist = None
+ else:
+ # Generate scripts
+
+ # Try to get pydist.json so we can see if there are
+ # any commands to generate. If this fails (e.g. because
+ # of a legacy wheel), log a warning but don't give up.
+ commands = None
+ file_version = self.info['Wheel-Version']
+ if file_version == '1.0':
+ # Use legacy info
+ ep = posixpath.join(info_dir, 'entry_points.txt')
+ try:
+ with zf.open(ep) as bwf:
+ epdata = read_exports(bwf)
+ commands = {}
+ for key in ('console', 'gui'):
+ k = '%s_scripts' % key
+ if k in epdata:
+ commands['wrap_%s' % key] = d = {}
+ for v in epdata[k].values():
+ s = '%s:%s' % (v.prefix, v.suffix)
+ if v.flags:
+ s += ' [%s]' % ','.join(v.flags)
+ d[v.name] = s
+ except Exception:
+ logger.warning('Unable to read legacy script '
+ 'metadata, so cannot generate '
+ 'scripts')
+ else:
+ try:
+ with zf.open(metadata_name) as bwf:
+ wf = wrapper(bwf)
+ commands = json.load(wf).get('extensions')
+ if commands:
+ commands = commands.get('python.commands')
+ except Exception:
+ logger.warning('Unable to read JSON metadata, so '
+ 'cannot generate scripts')
+ if commands:
+ console_scripts = commands.get('wrap_console', {})
+ gui_scripts = commands.get('wrap_gui', {})
+ if console_scripts or gui_scripts:
+ script_dir = paths.get('scripts', '')
+ if not os.path.isdir(script_dir):
+ raise ValueError('Valid script path not '
+ 'specified')
+ maker.target_dir = script_dir
+ for k, v in console_scripts.items():
+ script = '%s = %s' % (k, v)
+ filenames = maker.make(script)
+ fileop.set_executable_mode(filenames)
+
+ if gui_scripts:
+ options = {'gui': True }
+ for k, v in gui_scripts.items():
+ script = '%s = %s' % (k, v)
+ filenames = maker.make(script, options)
+ fileop.set_executable_mode(filenames)
+
+ p = os.path.join(libdir, info_dir)
+ dist = InstalledDistribution(p)
+
+ # Write SHARED
+ paths = dict(paths) # don't change passed in dict
+ del paths['purelib']
+ del paths['platlib']
+ paths['lib'] = libdir
+ p = dist.write_shared_locations(paths, dry_run)
+ if p:
+ outfiles.append(p)
+
+ # Write RECORD
+ dist.write_installed_files(outfiles, paths['prefix'],
+ dry_run)
+ return dist
+ except Exception: # pragma: no cover
+ logger.exception('installation failed.')
+ fileop.rollback()
+ raise
+ finally:
+ shutil.rmtree(workdir)
+
+ def _get_dylib_cache(self):
+ global cache
+ if cache is None:
+ # Use native string to avoid issues on 2.x: see Python #20140.
+ base = os.path.join(get_cache_base(), str('dylib-cache'),
+ '%s.%s' % sys.version_info[:2])
+ cache = Cache(base)
+ return cache
+
+ def _get_extensions(self):
+ pathname = os.path.join(self.dirname, self.filename)
+ name_ver = '%s-%s' % (self.name, self.version)
+ info_dir = '%s.dist-info' % name_ver
+ arcname = posixpath.join(info_dir, 'EXTENSIONS')
+ wrapper = codecs.getreader('utf-8')
+ result = []
+ with ZipFile(pathname, 'r') as zf:
+ try:
+ with zf.open(arcname) as bf:
+ wf = wrapper(bf)
+ extensions = json.load(wf)
+ cache = self._get_dylib_cache()
+ prefix = cache.prefix_to_dir(pathname)
+ cache_base = os.path.join(cache.base, prefix)
+ if not os.path.isdir(cache_base):
+ os.makedirs(cache_base)
+ for name, relpath in extensions.items():
+ dest = os.path.join(cache_base, convert_path(relpath))
+ if not os.path.exists(dest):
+ extract = True
+ else:
+ file_time = os.stat(dest).st_mtime
+ file_time = datetime.datetime.fromtimestamp(file_time)
+ info = zf.getinfo(relpath)
+ wheel_time = datetime.datetime(*info.date_time)
+ extract = wheel_time > file_time
+ if extract:
+ zf.extract(relpath, cache_base)
+ result.append((name, dest))
+ except KeyError:
+ pass
+ return result
+
+ def is_compatible(self):
+ """
+ Determine if a wheel is compatible with the running system.
+ """
+ return is_compatible(self)
+
+ def is_mountable(self):
+ """
+ Determine if a wheel is asserted as mountable by its metadata.
+ """
+ return True # for now - metadata details TBD
+
+ def mount(self, append=False):
+ pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
+ if not self.is_compatible():
+ msg = 'Wheel %s not compatible with this Python.' % pathname
+ raise DistlibException(msg)
+ if not self.is_mountable():
+ msg = 'Wheel %s is marked as not mountable.' % pathname
+ raise DistlibException(msg)
+ if pathname in sys.path:
+ logger.debug('%s already in path', pathname)
+ else:
+ if append:
+ sys.path.append(pathname)
+ else:
+ sys.path.insert(0, pathname)
+ extensions = self._get_extensions()
+ if extensions:
+ if _hook not in sys.meta_path:
+ sys.meta_path.append(_hook)
+ _hook.add(pathname, extensions)
+
+ def unmount(self):
+ pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
+ if pathname not in sys.path:
+ logger.debug('%s not in path', pathname)
+ else:
+ sys.path.remove(pathname)
+ if pathname in _hook.impure_wheels:
+ _hook.remove(pathname)
+ if not _hook.impure_wheels:
+ if _hook in sys.meta_path:
+ sys.meta_path.remove(_hook)
+
+ def verify(self):
+ pathname = os.path.join(self.dirname, self.filename)
+ name_ver = '%s-%s' % (self.name, self.version)
+ data_dir = '%s.data' % name_ver
+ info_dir = '%s.dist-info' % name_ver
+
+ metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
+ wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
+ record_name = posixpath.join(info_dir, 'RECORD')
+
+ wrapper = codecs.getreader('utf-8')
+
+ with ZipFile(pathname, 'r') as zf:
+ with zf.open(wheel_metadata_name) as bwf:
+ wf = wrapper(bwf)
+ message = message_from_file(wf)
+ wv = message['Wheel-Version'].split('.', 1)
+ file_version = tuple([int(i) for i in wv])
+ # TODO version verification
+
+ records = {}
+ with zf.open(record_name) as bf:
+ with CSVReader(stream=bf) as reader:
+ for row in reader:
+ p = row[0]
+ records[p] = row
+
+ for zinfo in zf.infolist():
+ arcname = zinfo.filename
+ if isinstance(arcname, text_type):
+ u_arcname = arcname
+ else:
+ u_arcname = arcname.decode('utf-8')
+ # See issue #115: some wheels have .. in their entries, but
+ # in the filename ... e.g. __main__..py ! So the check is
+ # updated to look for .. in the directory portions
+ p = u_arcname.split('/')
+ if '..' in p:
+ raise DistlibException('invalid entry in '
+ 'wheel: %r' % u_arcname)
+
+ if self.skip_entry(u_arcname):
+ continue
+ row = records[u_arcname]
+ if row[2] and str(zinfo.file_size) != row[2]:
+ raise DistlibException('size mismatch for '
+ '%s' % u_arcname)
+ if row[1]:
+ kind, value = row[1].split('=', 1)
+ with zf.open(arcname) as bf:
+ data = bf.read()
+ _, digest = self.get_hash(data, kind)
+ if digest != value:
+ raise DistlibException('digest mismatch for '
+ '%s' % arcname)
+
+ def update(self, modifier, dest_dir=None, **kwargs):
+ """
+ Update the contents of a wheel in a generic way. The modifier should
+ be a callable which expects a dictionary argument: its keys are
+ archive-entry paths, and its values are absolute filesystem paths
+ where the contents the corresponding archive entries can be found. The
+ modifier is free to change the contents of the files pointed to, add
+ new entries and remove entries, before returning. This method will
+ extract the entire contents of the wheel to a temporary location, call
+ the modifier, and then use the passed (and possibly updated)
+ dictionary to write a new wheel. If ``dest_dir`` is specified, the new
+ wheel is written there -- otherwise, the original wheel is overwritten.
+
+ The modifier should return True if it updated the wheel, else False.
+ This method returns the same value the modifier returns.
+ """
+
+ def get_version(path_map, info_dir):
+ version = path = None
+ key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME)
+ if key not in path_map:
+ key = '%s/PKG-INFO' % info_dir
+ if key in path_map:
+ path = path_map[key]
+ version = Metadata(path=path).version
+ return version, path
+
+ def update_version(version, path):
+ updated = None
+ try:
+ v = NormalizedVersion(version)
+ i = version.find('-')
+ if i < 0:
+ updated = '%s+1' % version
+ else:
+ parts = [int(s) for s in version[i + 1:].split('.')]
+ parts[-1] += 1
+ updated = '%s+%s' % (version[:i],
+ '.'.join(str(i) for i in parts))
+ except UnsupportedVersionError:
+ logger.debug('Cannot update non-compliant (PEP-440) '
+ 'version %r', version)
+ if updated:
+ md = Metadata(path=path)
+ md.version = updated
+ legacy = path.endswith(LEGACY_METADATA_FILENAME)
+ md.write(path=path, legacy=legacy)
+ logger.debug('Version updated from %r to %r', version,
+ updated)
+
+ pathname = os.path.join(self.dirname, self.filename)
+ name_ver = '%s-%s' % (self.name, self.version)
+ info_dir = '%s.dist-info' % name_ver
+ record_name = posixpath.join(info_dir, 'RECORD')
+ with tempdir() as workdir:
+ with ZipFile(pathname, 'r') as zf:
+ path_map = {}
+ for zinfo in zf.infolist():
+ arcname = zinfo.filename
+ if isinstance(arcname, text_type):
+ u_arcname = arcname
+ else:
+ u_arcname = arcname.decode('utf-8')
+ if u_arcname == record_name:
+ continue
+ if '..' in u_arcname:
+ raise DistlibException('invalid entry in '
+ 'wheel: %r' % u_arcname)
+ zf.extract(zinfo, workdir)
+ path = os.path.join(workdir, convert_path(u_arcname))
+ path_map[u_arcname] = path
+
+ # Remember the version.
+ original_version, _ = get_version(path_map, info_dir)
+ # Files extracted. Call the modifier.
+ modified = modifier(path_map, **kwargs)
+ if modified:
+ # Something changed - need to build a new wheel.
+ current_version, path = get_version(path_map, info_dir)
+ if current_version and (current_version == original_version):
+ # Add or update local version to signify changes.
+ update_version(current_version, path)
+ # Decide where the new wheel goes.
+ if dest_dir is None:
+ fd, newpath = tempfile.mkstemp(suffix='.whl',
+ prefix='wheel-update-',
+ dir=workdir)
+ os.close(fd)
+ else:
+ if not os.path.isdir(dest_dir):
+ raise DistlibException('Not a directory: %r' % dest_dir)
+ newpath = os.path.join(dest_dir, self.filename)
+ archive_paths = list(path_map.items())
+ distinfo = os.path.join(workdir, info_dir)
+ info = distinfo, info_dir
+ self.write_records(info, workdir, archive_paths)
+ self.build_zip(newpath, archive_paths)
+ if dest_dir is None:
+ shutil.copyfile(newpath, pathname)
+ return modified
+
+def compatible_tags():
+ """
+ Return (pyver, abi, arch) tuples compatible with this Python.
+ """
+ versions = [VER_SUFFIX]
+ major = VER_SUFFIX[0]
+ for minor in range(sys.version_info[1] - 1, - 1, -1):
+ versions.append(''.join([major, str(minor)]))
+
+ abis = []
+ for suffix, _, _ in imp.get_suffixes():
+ if suffix.startswith('.abi'):
+ abis.append(suffix.split('.', 2)[1])
+ abis.sort()
+ if ABI != 'none':
+ abis.insert(0, ABI)
+ abis.append('none')
+ result = []
+
+ arches = [ARCH]
+ if sys.platform == 'darwin':
+ m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
+ if m:
+ name, major, minor, arch = m.groups()
+ minor = int(minor)
+ matches = [arch]
+ if arch in ('i386', 'ppc'):
+ matches.append('fat')
+ if arch in ('i386', 'ppc', 'x86_64'):
+ matches.append('fat3')
+ if arch in ('ppc64', 'x86_64'):
+ matches.append('fat64')
+ if arch in ('i386', 'x86_64'):
+ matches.append('intel')
+ if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
+ matches.append('universal')
+ while minor >= 0:
+ for match in matches:
+ s = '%s_%s_%s_%s' % (name, major, minor, match)
+ if s != ARCH: # already there
+ arches.append(s)
+ minor -= 1
+
+ # Most specific - our Python version, ABI and arch
+ for abi in abis:
+ for arch in arches:
+ result.append((''.join((IMP_PREFIX, versions[0])), abi, arch))
+
+ # where no ABI / arch dependency, but IMP_PREFIX dependency
+ for i, version in enumerate(versions):
+ result.append((''.join((IMP_PREFIX, version)), 'none', 'any'))
+ if i == 0:
+ result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any'))
+
+ # no IMP_PREFIX, ABI or arch dependency
+ for i, version in enumerate(versions):
+ result.append((''.join(('py', version)), 'none', 'any'))
+ if i == 0:
+ result.append((''.join(('py', version[0])), 'none', 'any'))
+ return set(result)
+
+
+COMPATIBLE_TAGS = compatible_tags()
+
+del compatible_tags
+
+
+def is_compatible(wheel, tags=None):
+ if not isinstance(wheel, Wheel):
+ wheel = Wheel(wheel) # assume it's a filename
+ result = False
+ if tags is None:
+ tags = COMPATIBLE_TAGS
+ for ver, abi, arch in tags:
+ if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch:
+ result = True
+ break
+ return result
diff --git a/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/LICENSE
new file mode 100644
index 0000000000..cf1ab25da0
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/LICENSE
@@ -0,0 +1,24 @@
+This is free and unencumbered software released into the public domain.
+
+Anyone is free to copy, modify, publish, use, compile, sell, or
+distribute this software, either in source code form or as a compiled
+binary, for any purpose, commercial or non-commercial, and by any
+means.
+
+In jurisdictions that recognize copyright laws, the author or authors
+of this software dedicate any and all copyright interest in the
+software to the public domain. We make this dedication for the benefit
+of the public at large and to the detriment of our heirs and
+successors. We intend this dedication to be an overt act of
+relinquishment in perpetuity of all present and future rights to this
+software under copyright law.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
+
+For more information, please refer to <http://unlicense.org>
diff --git a/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/METADATA
new file mode 100644
index 0000000000..79d8d47990
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/METADATA
@@ -0,0 +1,156 @@
+Metadata-Version: 2.1
+Name: filelock
+Version: 3.0.12
+Summary: A platform independent file lock.
+Home-page: https://github.com/benediktschmitt/py-filelock
+Author: Benedikt Schmitt
+Author-email: benedikt@benediktschmitt.de
+License: Public Domain <http://unlicense.org>
+Download-URL: https://github.com/benediktschmitt/py-filelock/archive/master.zip
+Platform: UNKNOWN
+Classifier: License :: Public Domain
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Intended Audience :: Developers
+Classifier: Topic :: System
+Classifier: Topic :: Internet
+Classifier: Topic :: Software Development :: Libraries
+Description-Content-Type: text/markdown
+
+# py-filelock
+
+![travis-ci](https://travis-ci.org/benediktschmitt/py-filelock.svg?branch=master)
+
+This package contains a single module, which implements a platform independent
+file lock in Python, which provides a simple way of inter-process communication:
+
+```Python
+from filelock import Timeout, FileLock
+
+lock = FileLock("high_ground.txt.lock")
+with lock:
+ open("high_ground.txt", "a").write("You were the chosen one.")
+```
+
+**Don't use** a *FileLock* to lock the file you want to write to, instead create
+a separate *.lock* file as shown above.
+
+![animated example](https://raw.githubusercontent.com/benediktschmitt/py-filelock/master/example/example.gif)
+
+
+## Similar libraries
+
+Perhaps you are looking for something like
+
+* https://pypi.python.org/pypi/pid/2.1.1
+* https://docs.python.org/3.6/library/msvcrt.html#msvcrt.locking
+* or https://docs.python.org/3/library/fcntl.html#fcntl.flock
+
+
+## Installation
+
+*py-filelock* is available via PyPi:
+
+```
+$ pip3 install filelock
+```
+
+
+## Documentation
+
+The documentation for the API is available on
+[readthedocs.org](https://filelock.readthedocs.io/).
+
+
+### Examples
+
+A *FileLock* is used to indicate another process of your application that a
+resource or working
+directory is currently used. To do so, create a *FileLock* first:
+
+```Python
+from filelock import Timeout, FileLock
+
+file_path = "high_ground.txt"
+lock_path = "high_ground.txt.lock"
+
+lock = FileLock(lock_path, timeout=1)
+```
+
+The lock object supports multiple ways for acquiring the lock, including the
+ones used to acquire standard Python thread locks:
+
+```Python
+with lock:
+ open(file_path, "a").write("Hello there!")
+
+lock.acquire()
+try:
+ open(file_path, "a").write("General Kenobi!")
+finally:
+ lock.release()
+```
+
+The *acquire()* method accepts also a *timeout* parameter. If the lock cannot be
+acquired within *timeout* seconds, a *Timeout* exception is raised:
+
+```Python
+try:
+ with lock.acquire(timeout=10):
+ open(file_path, "a").write("I have a bad feeling about this.")
+except Timeout:
+ print("Another instance of this application currently holds the lock.")
+```
+
+The lock objects are recursive locks, which means that once acquired, they will
+not block on successive lock requests:
+
+```Python
+def cite1():
+ with lock:
+ open(file_path, "a").write("I hate it when he does that.")
+
+def cite2():
+ with lock:
+ open(file_path, "a").write("You don't want to sell me death sticks.")
+
+# The lock is acquired here.
+with lock:
+ cite1()
+ cite2()
+
+# And released here.
+```
+
+
+## FileLock vs SoftFileLock
+
+The *FileLock* is platform dependent while the *SoftFileLock* is not. Use the
+*FileLock* if all instances of your application are running on the same host and
+a *SoftFileLock* otherwise.
+
+The *SoftFileLock* only watches the existence of the lock file. This makes it
+ultra portable, but also more prone to dead locks if the application crashes.
+You can simply delete the lock file in such cases.
+
+
+## Contributions
+
+Contributions are always welcome, please make sure they pass all tests before
+creating a pull request. Never hesitate to open a new issue, although it may
+take some time for me to respond.
+
+
+## License
+
+This package is [public domain](./LICENSE.rst).
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/RECORD
new file mode 100644
index 0000000000..c5f2e1f5cf
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/RECORD
@@ -0,0 +1,6 @@
+filelock.py,sha256=5DQTtOaQq7-vgLkZzvOhqhVMh_umfydWgSA8Vuzmf8M,13229
+filelock-3.0.12.dist-info/LICENSE,sha256=iNm062BXnBkew5HKBMFhMFctfu3EqG2qWL8oxuFMm80,1210
+filelock-3.0.12.dist-info/METADATA,sha256=gjzbv9nxtD-Rj2ysjUuG7SLZCHUQl5hMy68Jij8soPw,4343
+filelock-3.0.12.dist-info/WHEEL,sha256=EVRjI69F5qVjm_YgqcTXPnTAv3BfSUr0WVAHuSP3Xoo,92
+filelock-3.0.12.dist-info/top_level.txt,sha256=NDrf9i5BNogz4hEdsr6Hi7Ws3TlSSKY4Q2Y9_-i2GwU,9
+filelock-3.0.12.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/WHEEL
new file mode 100644
index 0000000000..83ff02e961
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/top_level.txt
new file mode 100644
index 0000000000..83c2e35706
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info/top_level.txt
@@ -0,0 +1 @@
+filelock
diff --git a/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py
new file mode 100644
index 0000000000..978ff5e865
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py
@@ -0,0 +1,451 @@
+# This is free and unencumbered software released into the public domain.
+#
+# Anyone is free to copy, modify, publish, use, compile, sell, or
+# distribute this software, either in source code form or as a compiled
+# binary, for any purpose, commercial or non-commercial, and by any
+# means.
+#
+# In jurisdictions that recognize copyright laws, the author or authors
+# of this software dedicate any and all copyright interest in the
+# software to the public domain. We make this dedication for the benefit
+# of the public at large and to the detriment of our heirs and
+# successors. We intend this dedication to be an overt act of
+# relinquishment in perpetuity of all present and future rights to this
+# software under copyright law.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+# For more information, please refer to <http://unlicense.org>
+
+"""
+A platform independent file lock that supports the with-statement.
+"""
+
+
+# Modules
+# ------------------------------------------------
+import logging
+import os
+import threading
+import time
+try:
+ import warnings
+except ImportError:
+ warnings = None
+
+try:
+ import msvcrt
+except ImportError:
+ msvcrt = None
+
+try:
+ import fcntl
+except ImportError:
+ fcntl = None
+
+
+# Backward compatibility
+# ------------------------------------------------
+try:
+ TimeoutError
+except NameError:
+ TimeoutError = OSError
+
+
+# Data
+# ------------------------------------------------
+__all__ = [
+ "Timeout",
+ "BaseFileLock",
+ "WindowsFileLock",
+ "UnixFileLock",
+ "SoftFileLock",
+ "FileLock"
+]
+
+__version__ = "3.0.12"
+
+
+_logger = None
+def logger():
+ """Returns the logger instance used in this module."""
+ global _logger
+ _logger = _logger or logging.getLogger(__name__)
+ return _logger
+
+
+# Exceptions
+# ------------------------------------------------
+class Timeout(TimeoutError):
+ """
+ Raised when the lock could not be acquired in *timeout*
+ seconds.
+ """
+
+ def __init__(self, lock_file):
+ """
+ """
+ #: The path of the file lock.
+ self.lock_file = lock_file
+ return None
+
+ def __str__(self):
+ temp = "The file lock '{}' could not be acquired."\
+ .format(self.lock_file)
+ return temp
+
+
+# Classes
+# ------------------------------------------------
+
+# This is a helper class which is returned by :meth:`BaseFileLock.acquire`
+# and wraps the lock to make sure __enter__ is not called twice when entering
+# the with statement.
+# If we would simply return *self*, the lock would be acquired again
+# in the *__enter__* method of the BaseFileLock, but not released again
+# automatically.
+#
+# :seealso: issue #37 (memory leak)
+class _Acquire_ReturnProxy(object):
+
+ def __init__(self, lock):
+ self.lock = lock
+ return None
+
+ def __enter__(self):
+ return self.lock
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.lock.release()
+ return None
+
+
+class BaseFileLock(object):
+ """
+ Implements the base class of a file lock.
+ """
+
+ def __init__(self, lock_file, timeout = -1):
+ """
+ """
+ # The path to the lock file.
+ self._lock_file = lock_file
+
+ # The file descriptor for the *_lock_file* as it is returned by the
+ # os.open() function.
+ # This file lock is only NOT None, if the object currently holds the
+ # lock.
+ self._lock_file_fd = None
+
+ # The default timeout value.
+ self.timeout = timeout
+
+ # We use this lock primarily for the lock counter.
+ self._thread_lock = threading.Lock()
+
+ # The lock counter is used for implementing the nested locking
+ # mechanism. Whenever the lock is acquired, the counter is increased and
+ # the lock is only released, when this value is 0 again.
+ self._lock_counter = 0
+ return None
+
+ @property
+ def lock_file(self):
+ """
+ The path to the lock file.
+ """
+ return self._lock_file
+
+ @property
+ def timeout(self):
+ """
+ You can set a default timeout for the filelock. It will be used as
+ fallback value in the acquire method, if no timeout value (*None*) is
+ given.
+
+ If you want to disable the timeout, set it to a negative value.
+
+ A timeout of 0 means, that there is exactly one attempt to acquire the
+ file lock.
+
+ .. versionadded:: 2.0.0
+ """
+ return self._timeout
+
+ @timeout.setter
+ def timeout(self, value):
+ """
+ """
+ self._timeout = float(value)
+ return None
+
+ # Platform dependent locking
+ # --------------------------------------------
+
+ def _acquire(self):
+ """
+ Platform dependent. If the file lock could be
+ acquired, self._lock_file_fd holds the file descriptor
+ of the lock file.
+ """
+ raise NotImplementedError()
+
+ def _release(self):
+ """
+ Releases the lock and sets self._lock_file_fd to None.
+ """
+ raise NotImplementedError()
+
+ # Platform independent methods
+ # --------------------------------------------
+
+ @property
+ def is_locked(self):
+ """
+ True, if the object holds the file lock.
+
+ .. versionchanged:: 2.0.0
+
+ This was previously a method and is now a property.
+ """
+ return self._lock_file_fd is not None
+
+ def acquire(self, timeout=None, poll_intervall=0.05):
+ """
+ Acquires the file lock or fails with a :exc:`Timeout` error.
+
+ .. code-block:: python
+
+ # You can use this method in the context manager (recommended)
+ with lock.acquire():
+ pass
+
+ # Or use an equivalent try-finally construct:
+ lock.acquire()
+ try:
+ pass
+ finally:
+ lock.release()
+
+ :arg float timeout:
+ The maximum time waited for the file lock.
+ If ``timeout < 0``, there is no timeout and this method will
+ block until the lock could be acquired.
+ If ``timeout`` is None, the default :attr:`~timeout` is used.
+
+ :arg float poll_intervall:
+ We check once in *poll_intervall* seconds if we can acquire the
+ file lock.
+
+ :raises Timeout:
+ if the lock could not be acquired in *timeout* seconds.
+
+ .. versionchanged:: 2.0.0
+
+ This method returns now a *proxy* object instead of *self*,
+ so that it can be used in a with statement without side effects.
+ """
+ # Use the default timeout, if no timeout is provided.
+ if timeout is None:
+ timeout = self.timeout
+
+ # Increment the number right at the beginning.
+ # We can still undo it, if something fails.
+ with self._thread_lock:
+ self._lock_counter += 1
+
+ lock_id = id(self)
+ lock_filename = self._lock_file
+ start_time = time.time()
+ try:
+ while True:
+ with self._thread_lock:
+ if not self.is_locked:
+ logger().debug('Attempting to acquire lock %s on %s', lock_id, lock_filename)
+ self._acquire()
+
+ if self.is_locked:
+ logger().info('Lock %s acquired on %s', lock_id, lock_filename)
+ break
+ elif timeout >= 0 and time.time() - start_time > timeout:
+ logger().debug('Timeout on acquiring lock %s on %s', lock_id, lock_filename)
+ raise Timeout(self._lock_file)
+ else:
+ logger().debug(
+ 'Lock %s not acquired on %s, waiting %s seconds ...',
+ lock_id, lock_filename, poll_intervall
+ )
+ time.sleep(poll_intervall)
+ except:
+ # Something did go wrong, so decrement the counter.
+ with self._thread_lock:
+ self._lock_counter = max(0, self._lock_counter - 1)
+
+ raise
+ return _Acquire_ReturnProxy(lock = self)
+
+ def release(self, force = False):
+ """
+ Releases the file lock.
+
+ Please note, that the lock is only completly released, if the lock
+ counter is 0.
+
+ Also note, that the lock file itself is not automatically deleted.
+
+ :arg bool force:
+ If true, the lock counter is ignored and the lock is released in
+ every case.
+ """
+ with self._thread_lock:
+
+ if self.is_locked:
+ self._lock_counter -= 1
+
+ if self._lock_counter == 0 or force:
+ lock_id = id(self)
+ lock_filename = self._lock_file
+
+ logger().debug('Attempting to release lock %s on %s', lock_id, lock_filename)
+ self._release()
+ self._lock_counter = 0
+ logger().info('Lock %s released on %s', lock_id, lock_filename)
+
+ return None
+
+ def __enter__(self):
+ self.acquire()
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.release()
+ return None
+
+ def __del__(self):
+ self.release(force = True)
+ return None
+
+
+# Windows locking mechanism
+# ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+class WindowsFileLock(BaseFileLock):
+ """
+ Uses the :func:`msvcrt.locking` function to hard lock the lock file on
+ windows systems.
+ """
+
+ def _acquire(self):
+ open_mode = os.O_RDWR | os.O_CREAT | os.O_TRUNC
+
+ try:
+ fd = os.open(self._lock_file, open_mode)
+ except OSError:
+ pass
+ else:
+ try:
+ msvcrt.locking(fd, msvcrt.LK_NBLCK, 1)
+ except (IOError, OSError):
+ os.close(fd)
+ else:
+ self._lock_file_fd = fd
+ return None
+
+ def _release(self):
+ fd = self._lock_file_fd
+ self._lock_file_fd = None
+ msvcrt.locking(fd, msvcrt.LK_UNLCK, 1)
+ os.close(fd)
+
+ try:
+ os.remove(self._lock_file)
+ # Probably another instance of the application
+ # that acquired the file lock.
+ except OSError:
+ pass
+ return None
+
+# Unix locking mechanism
+# ~~~~~~~~~~~~~~~~~~~~~~
+
+class UnixFileLock(BaseFileLock):
+ """
+ Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems.
+ """
+
+ def _acquire(self):
+ open_mode = os.O_RDWR | os.O_CREAT | os.O_TRUNC
+ fd = os.open(self._lock_file, open_mode)
+
+ try:
+ fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
+ except (IOError, OSError):
+ os.close(fd)
+ else:
+ self._lock_file_fd = fd
+ return None
+
+ def _release(self):
+ # Do not remove the lockfile:
+ #
+ # https://github.com/benediktschmitt/py-filelock/issues/31
+ # https://stackoverflow.com/questions/17708885/flock-removing-locked-file-without-race-condition
+ fd = self._lock_file_fd
+ self._lock_file_fd = None
+ fcntl.flock(fd, fcntl.LOCK_UN)
+ os.close(fd)
+ return None
+
+# Soft lock
+# ~~~~~~~~~
+
+class SoftFileLock(BaseFileLock):
+ """
+ Simply watches the existence of the lock file.
+ """
+
+ def _acquire(self):
+ open_mode = os.O_WRONLY | os.O_CREAT | os.O_EXCL | os.O_TRUNC
+ try:
+ fd = os.open(self._lock_file, open_mode)
+ except (IOError, OSError):
+ pass
+ else:
+ self._lock_file_fd = fd
+ return None
+
+ def _release(self):
+ os.close(self._lock_file_fd)
+ self._lock_file_fd = None
+
+ try:
+ os.remove(self._lock_file)
+ # The file is already deleted and that's what we want.
+ except OSError:
+ pass
+ return None
+
+
+# Platform filelock
+# ~~~~~~~~~~~~~~~~~
+
+#: Alias for the lock, which should be used for the current platform. On
+#: Windows, this is an alias for :class:`WindowsFileLock`, on Unix for
+#: :class:`UnixFileLock` and otherwise for :class:`SoftFileLock`.
+FileLock = None
+
+if msvcrt:
+ FileLock = WindowsFileLock
+elif fcntl:
+ FileLock = UnixFileLock
+else:
+ FileLock = SoftFileLock
+
+ if warnings is not None:
+ warnings.warn("only soft file lock is available")
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/LICENSE
new file mode 100644
index 0000000000..be7e092b0b
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2017-2019 Jason R. Coombs, Barry Warsaw
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/METADATA
new file mode 100644
index 0000000000..165a67ded5
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/METADATA
@@ -0,0 +1,65 @@
+Metadata-Version: 2.1
+Name: importlib-metadata
+Version: 1.1.3
+Summary: Read metadata from Python packages
+Home-page: http://importlib-metadata.readthedocs.io/
+Author: Barry Warsaw
+Author-email: barry@python.org
+License: Apache Software License
+Platform: UNKNOWN
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 2
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7
+Requires-Dist: zipp (>=0.5)
+Requires-Dist: contextlib2 ; python_version < "3"
+Requires-Dist: configparser (>=3.5) ; python_version < "3"
+Requires-Dist: pathlib2 ; python_version == "3.4.*" or python_version < "3"
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: rst.linker ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: packaging ; extra == 'testing'
+Requires-Dist: importlib-resources ; (python_version < "3.7") and extra == 'testing'
+
+=========================
+ ``importlib_metadata``
+=========================
+
+``importlib_metadata`` is a library to access the metadata for a Python
+package. It is intended to be ported to Python 3.8.
+
+
+Usage
+=====
+
+See the `online documentation <https://importlib_metadata.readthedocs.io/>`_
+for usage details.
+
+`Finder authors
+<https://docs.python.org/3/reference/import.html#finders-and-loaders>`_ can
+also add support for custom package installers. See the above documentation
+for details.
+
+
+Caveats
+=======
+
+This project primarily supports third-party packages installed by PyPA
+tools (or other conforming packages). It does not support:
+
+- Packages in the stdlib.
+- Packages installed without metadata.
+
+Project details
+===============
+
+ * Project home: https://gitlab.com/python-devs/importlib_metadata
+ * Report bugs at: https://gitlab.com/python-devs/importlib_metadata/issues
+ * Code hosting: https://gitlab.com/python-devs/importlib_metadata.git
+ * Documentation: http://importlib_metadata.readthedocs.io/
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/RECORD
new file mode 100644
index 0000000000..d0eac8875c
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/RECORD
@@ -0,0 +1,21 @@
+importlib_metadata/__init__.py,sha256=wjIJ8vwgfW6r1J8Yckbk2mqOk_ZDPe7fQvsDj1oG-aQ,16840
+importlib_metadata/_compat.py,sha256=EwnYmvejrDFHENaQEutLz7L1rvyK6jJv9-xwk_bWVTI,4265
+importlib_metadata/docs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_metadata/docs/changelog.rst,sha256=QZ-WVYSgPlbeva4C8z5o58Ufpku4_JGfEOvEoPK-qt4,7086
+importlib_metadata/docs/conf.py,sha256=DM_-W8bvIar_YqWeRQUcgWT1_phXe-H2IcYgM8JIkiY,5468
+importlib_metadata/docs/index.rst,sha256=bHIGj1koPACV8OV02uHTGRMax46lGj00KLOji1aPl_c,2165
+importlib_metadata/docs/using.rst,sha256=2S6KGhJ66t8kM3cik7K03X1AJUGX0TWr6byaHEsJjnc,9826
+importlib_metadata/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_metadata/tests/fixtures.py,sha256=sshuoJ4ezljeouUddVg-76K1UOStKWBecovZOKOBguk,5004
+importlib_metadata/tests/test_api.py,sha256=YMAGTsRENrtvpw2CSLmRndJMBeT4q_M0GSe-QsnnMZ4,5544
+importlib_metadata/tests/test_integration.py,sha256=kzqav9qAePjz7UR-GNna65xLwXlRcxEDYDwmuOFwpKE,686
+importlib_metadata/tests/test_main.py,sha256=nnKTmcIA14lhynepCfXtiTYWH35hNFuFfIcKBkzShuY,7179
+importlib_metadata/tests/test_zip.py,sha256=qG3IquiTFLSrUtpxEJblqiUtgEcOTfjU2yM35REk0fo,2372
+importlib_metadata/tests/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_metadata/tests/data/example-21.12-py3-none-any.whl,sha256=I-kYufETid-tDYyR8f1OFJ3t5u_Io23k0cbQxJTUN4I,1455
+importlib_metadata/tests/data/example-21.12-py3.6.egg,sha256=-EeugFAijkdUO9xyQHTZkQwZoFXK0_QxICBj6R5AAJo,1497
+importlib_metadata-1.1.3.dist-info/LICENSE,sha256=wNe6dAchmJ1VvVB8D9oTc-gHHadCuaSBAev36sYEM6U,571
+importlib_metadata-1.1.3.dist-info/METADATA,sha256=zI5ihvOML51dmmsBF9_GrpnlUCgU8PTWXYa0Eb47nZU,2114
+importlib_metadata-1.1.3.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110
+importlib_metadata-1.1.3.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19
+importlib_metadata-1.1.3.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/WHEEL
new file mode 100644
index 0000000000..8b701e93c2
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.33.6)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/top_level.txt
new file mode 100644
index 0000000000..bbb07547a1
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info/top_level.txt
@@ -0,0 +1 @@
+importlib_metadata
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/__init__.py b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/__init__.py
new file mode 100644
index 0000000000..31ff8462f3
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/__init__.py
@@ -0,0 +1,541 @@
+from __future__ import unicode_literals, absolute_import
+
+import io
+import os
+import re
+import abc
+import csv
+import sys
+import zipp
+import operator
+import functools
+import itertools
+import collections
+
+from ._compat import (
+ install,
+ NullFinder,
+ ConfigParser,
+ suppress,
+ map,
+ FileNotFoundError,
+ IsADirectoryError,
+ NotADirectoryError,
+ PermissionError,
+ pathlib,
+ PYPY_OPEN_BUG,
+ ModuleNotFoundError,
+ MetaPathFinder,
+ email_message_from_string,
+ ensure_is_path,
+ PyPy_repr,
+ )
+from importlib import import_module
+from itertools import starmap
+
+
+__metaclass__ = type
+
+
+__all__ = [
+ 'Distribution',
+ 'DistributionFinder',
+ 'PackageNotFoundError',
+ 'distribution',
+ 'distributions',
+ 'entry_points',
+ 'files',
+ 'metadata',
+ 'requires',
+ 'version',
+ ]
+
+
+class PackageNotFoundError(ModuleNotFoundError):
+ """The package was not found."""
+
+
+class EntryPoint(
+ PyPy_repr,
+ collections.namedtuple('EntryPointBase', 'name value group')):
+ """An entry point as defined by Python packaging conventions.
+
+ See `the packaging docs on entry points
+ <https://packaging.python.org/specifications/entry-points/>`_
+ for more information.
+ """
+
+ pattern = re.compile(
+ r'(?P<module>[\w.]+)\s*'
+ r'(:\s*(?P<attr>[\w.]+))?\s*'
+ r'(?P<extras>\[.*\])?\s*$'
+ )
+ """
+ A regular expression describing the syntax for an entry point,
+ which might look like:
+
+ - module
+ - package.module
+ - package.module:attribute
+ - package.module:object.attribute
+ - package.module:attr [extra1, extra2]
+
+ Other combinations are possible as well.
+
+ The expression is lenient about whitespace around the ':',
+ following the attr, and following any extras.
+ """
+
+ def load(self):
+ """Load the entry point from its definition. If only a module
+ is indicated by the value, return that module. Otherwise,
+ return the named object.
+ """
+ match = self.pattern.match(self.value)
+ module = import_module(match.group('module'))
+ attrs = filter(None, (match.group('attr') or '').split('.'))
+ return functools.reduce(getattr, attrs, module)
+
+ @property
+ def extras(self):
+ match = self.pattern.match(self.value)
+ return list(re.finditer(r'\w+', match.group('extras') or ''))
+
+ @classmethod
+ def _from_config(cls, config):
+ return [
+ cls(name, value, group)
+ for group in config.sections()
+ for name, value in config.items(group)
+ ]
+
+ @classmethod
+ def _from_text(cls, text):
+ config = ConfigParser(delimiters='=')
+ # case sensitive: https://stackoverflow.com/q/1611799/812183
+ config.optionxform = str
+ try:
+ config.read_string(text)
+ except AttributeError: # pragma: nocover
+ # Python 2 has no read_string
+ config.readfp(io.StringIO(text))
+ return EntryPoint._from_config(config)
+
+ def __iter__(self):
+ """
+ Supply iter so one may construct dicts of EntryPoints easily.
+ """
+ return iter((self.name, self))
+
+ def __reduce__(self):
+ return (
+ self.__class__,
+ (self.name, self.value, self.group),
+ )
+
+
+class PackagePath(pathlib.PurePosixPath):
+ """A reference to a path in a package"""
+
+ def read_text(self, encoding='utf-8'):
+ with self.locate().open(encoding=encoding) as stream:
+ return stream.read()
+
+ def read_binary(self):
+ with self.locate().open('rb') as stream:
+ return stream.read()
+
+ def locate(self):
+ """Return a path-like object for this path"""
+ return self.dist.locate_file(self)
+
+
+class FileHash:
+ def __init__(self, spec):
+ self.mode, _, self.value = spec.partition('=')
+
+ def __repr__(self):
+ return '<FileHash mode: {} value: {}>'.format(self.mode, self.value)
+
+
+class Distribution:
+ """A Python distribution package."""
+
+ @abc.abstractmethod
+ def read_text(self, filename):
+ """Attempt to load metadata file given by the name.
+
+ :param filename: The name of the file in the distribution info.
+ :return: The text if found, otherwise None.
+ """
+
+ @abc.abstractmethod
+ def locate_file(self, path):
+ """
+ Given a path to a file in this distribution, return a path
+ to it.
+ """
+
+ @classmethod
+ def from_name(cls, name):
+ """Return the Distribution for the given package name.
+
+ :param name: The name of the distribution package to search for.
+ :return: The Distribution instance (or subclass thereof) for the named
+ package, if found.
+ :raises PackageNotFoundError: When the named package's distribution
+ metadata cannot be found.
+ """
+ for resolver in cls._discover_resolvers():
+ dists = resolver(DistributionFinder.Context(name=name))
+ dist = next(dists, None)
+ if dist is not None:
+ return dist
+ else:
+ raise PackageNotFoundError(name)
+
+ @classmethod
+ def discover(cls, **kwargs):
+ """Return an iterable of Distribution objects for all packages.
+
+ Pass a ``context`` or pass keyword arguments for constructing
+ a context.
+
+ :context: A ``DistributionFinder.Context`` object.
+ :return: Iterable of Distribution objects for all packages.
+ """
+ context = kwargs.pop('context', None)
+ if context and kwargs:
+ raise ValueError("cannot accept context and kwargs")
+ context = context or DistributionFinder.Context(**kwargs)
+ return itertools.chain.from_iterable(
+ resolver(context)
+ for resolver in cls._discover_resolvers()
+ )
+
+ @staticmethod
+ def at(path):
+ """Return a Distribution for the indicated metadata path
+
+ :param path: a string or path-like object
+ :return: a concrete Distribution instance for the path
+ """
+ return PathDistribution(ensure_is_path(path))
+
+ @staticmethod
+ def _discover_resolvers():
+ """Search the meta_path for resolvers."""
+ declared = (
+ getattr(finder, 'find_distributions', None)
+ for finder in sys.meta_path
+ )
+ return filter(None, declared)
+
+ @property
+ def metadata(self):
+ """Return the parsed metadata for this Distribution.
+
+ The returned object will have keys that name the various bits of
+ metadata. See PEP 566 for details.
+ """
+ text = (
+ self.read_text('METADATA')
+ or self.read_text('PKG-INFO')
+ # This last clause is here to support old egg-info files. Its
+ # effect is to just end up using the PathDistribution's self._path
+ # (which points to the egg-info file) attribute unchanged.
+ or self.read_text('')
+ )
+ return email_message_from_string(text)
+
+ @property
+ def version(self):
+ """Return the 'Version' metadata for the distribution package."""
+ return self.metadata['Version']
+
+ @property
+ def entry_points(self):
+ return EntryPoint._from_text(self.read_text('entry_points.txt'))
+
+ @property
+ def files(self):
+ """Files in this distribution.
+
+ :return: List of PackagePath for this distribution or None
+
+ Result is `None` if the metadata file that enumerates files
+ (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
+ missing.
+ Result may be empty if the metadata exists but is empty.
+ """
+ file_lines = self._read_files_distinfo() or self._read_files_egginfo()
+
+ def make_file(name, hash=None, size_str=None):
+ result = PackagePath(name)
+ result.hash = FileHash(hash) if hash else None
+ result.size = int(size_str) if size_str else None
+ result.dist = self
+ return result
+
+ return file_lines and list(starmap(make_file, csv.reader(file_lines)))
+
+ def _read_files_distinfo(self):
+ """
+ Read the lines of RECORD
+ """
+ text = self.read_text('RECORD')
+ return text and text.splitlines()
+
+ def _read_files_egginfo(self):
+ """
+ SOURCES.txt might contain literal commas, so wrap each line
+ in quotes.
+ """
+ text = self.read_text('SOURCES.txt')
+ return text and map('"{}"'.format, text.splitlines())
+
+ @property
+ def requires(self):
+ """Generated requirements specified for this Distribution"""
+ reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
+ return reqs and list(reqs)
+
+ def _read_dist_info_reqs(self):
+ return self.metadata.get_all('Requires-Dist')
+
+ def _read_egg_info_reqs(self):
+ source = self.read_text('requires.txt')
+ return source and self._deps_from_requires_text(source)
+
+ @classmethod
+ def _deps_from_requires_text(cls, source):
+ section_pairs = cls._read_sections(source.splitlines())
+ sections = {
+ section: list(map(operator.itemgetter('line'), results))
+ for section, results in
+ itertools.groupby(section_pairs, operator.itemgetter('section'))
+ }
+ return cls._convert_egg_info_reqs_to_simple_reqs(sections)
+
+ @staticmethod
+ def _read_sections(lines):
+ section = None
+ for line in filter(None, lines):
+ section_match = re.match(r'\[(.*)\]$', line)
+ if section_match:
+ section = section_match.group(1)
+ continue
+ yield locals()
+
+ @staticmethod
+ def _convert_egg_info_reqs_to_simple_reqs(sections):
+ """
+ Historically, setuptools would solicit and store 'extra'
+ requirements, including those with environment markers,
+ in separate sections. More modern tools expect each
+ dependency to be defined separately, with any relevant
+ extras and environment markers attached directly to that
+ requirement. This method converts the former to the
+ latter. See _test_deps_from_requires_text for an example.
+ """
+ def make_condition(name):
+ return name and 'extra == "{name}"'.format(name=name)
+
+ def parse_condition(section):
+ section = section or ''
+ extra, sep, markers = section.partition(':')
+ if extra and markers:
+ markers = '({markers})'.format(markers=markers)
+ conditions = list(filter(None, [markers, make_condition(extra)]))
+ return '; ' + ' and '.join(conditions) if conditions else ''
+
+ for section, deps in sections.items():
+ for dep in deps:
+ yield dep + parse_condition(section)
+
+
+class DistributionFinder(MetaPathFinder):
+ """
+ A MetaPathFinder capable of discovering installed distributions.
+ """
+
+ class Context:
+
+ name = None
+ """
+ Specific name for which a distribution finder should match.
+ """
+
+ def __init__(self, **kwargs):
+ vars(self).update(kwargs)
+
+ @property
+ def path(self):
+ """
+ The path that a distribution finder should search.
+ """
+ return vars(self).get('path', sys.path)
+
+ @property
+ def pattern(self):
+ return '.*' if self.name is None else re.escape(self.name)
+
+ @abc.abstractmethod
+ def find_distributions(self, context=Context()):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching the ``context``,
+ a DistributionFinder.Context instance.
+ """
+
+
+@install
+class MetadataPathFinder(NullFinder, DistributionFinder):
+ """A degenerate finder for distribution packages on the file system.
+
+ This finder supplies only a find_distributions() method for versions
+ of Python that do not have a PathFinder find_distributions().
+ """
+
+ def find_distributions(self, context=DistributionFinder.Context()):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching ``context.name``
+ (or all names if ``None`` indicated) along the paths in the list
+ of directories ``context.path``.
+ """
+ found = self._search_paths(context.pattern, context.path)
+ return map(PathDistribution, found)
+
+ @classmethod
+ def _search_paths(cls, pattern, paths):
+ """Find metadata directories in paths heuristically."""
+ return itertools.chain.from_iterable(
+ cls._search_path(path, pattern)
+ for path in map(cls._switch_path, paths)
+ )
+
+ @staticmethod
+ def _switch_path(path):
+ if not PYPY_OPEN_BUG or os.path.isfile(path): # pragma: no branch
+ with suppress(Exception):
+ return zipp.Path(path)
+ return pathlib.Path(path)
+
+ @classmethod
+ def _matches_info(cls, normalized, item):
+ template = r'{pattern}(-.*)?\.(dist|egg)-info'
+ manifest = template.format(pattern=normalized)
+ return re.match(manifest, item.name, flags=re.IGNORECASE)
+
+ @classmethod
+ def _matches_legacy(cls, normalized, item):
+ template = r'{pattern}-.*\.egg[\\/]EGG-INFO'
+ manifest = template.format(pattern=normalized)
+ return re.search(manifest, str(item), flags=re.IGNORECASE)
+
+ @classmethod
+ def _search_path(cls, root, pattern):
+ if not root.is_dir():
+ return ()
+ normalized = pattern.replace('-', '_')
+ return (item for item in root.iterdir()
+ if cls._matches_info(normalized, item)
+ or cls._matches_legacy(normalized, item))
+
+
+class PathDistribution(Distribution):
+ def __init__(self, path):
+ """Construct a distribution from a path to the metadata directory.
+
+ :param path: A pathlib.Path or similar object supporting
+ .joinpath(), __div__, .parent, and .read_text().
+ """
+ self._path = path
+
+ def read_text(self, filename):
+ with suppress(FileNotFoundError, IsADirectoryError, KeyError,
+ NotADirectoryError, PermissionError):
+ return self._path.joinpath(filename).read_text(encoding='utf-8')
+ read_text.__doc__ = Distribution.read_text.__doc__
+
+ def locate_file(self, path):
+ return self._path.parent / path
+
+
+def distribution(distribution_name):
+ """Get the ``Distribution`` instance for the named package.
+
+ :param distribution_name: The name of the distribution package as a string.
+ :return: A ``Distribution`` instance (or subclass thereof).
+ """
+ return Distribution.from_name(distribution_name)
+
+
+def distributions(**kwargs):
+ """Get all ``Distribution`` instances in the current environment.
+
+ :return: An iterable of ``Distribution`` instances.
+ """
+ return Distribution.discover(**kwargs)
+
+
+def metadata(distribution_name):
+ """Get the metadata for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: An email.Message containing the parsed metadata.
+ """
+ return Distribution.from_name(distribution_name).metadata
+
+
+def version(distribution_name):
+ """Get the version string for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: The version string for the package as defined in the package's
+ "Version" metadata key.
+ """
+ return distribution(distribution_name).version
+
+
+def entry_points():
+ """Return EntryPoint objects for all installed packages.
+
+ :return: EntryPoint objects for all installed packages.
+ """
+ eps = itertools.chain.from_iterable(
+ dist.entry_points for dist in distributions())
+ by_group = operator.attrgetter('group')
+ ordered = sorted(eps, key=by_group)
+ grouped = itertools.groupby(ordered, by_group)
+ return {
+ group: tuple(eps)
+ for group, eps in grouped
+ }
+
+
+def files(distribution_name):
+ """Return a list of files for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: List of files composing the distribution.
+ """
+ return distribution(distribution_name).files
+
+
+def requires(distribution_name):
+ """
+ Return a list of requirements for the named package.
+
+ :return: An iterator of requirements, suitable for
+ packaging.requirement.Requirement.
+ """
+ return distribution(distribution_name).requires
+
+
+__version__ = version(__name__)
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/_compat.py b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/_compat.py
new file mode 100644
index 0000000000..6e663662d2
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/_compat.py
@@ -0,0 +1,143 @@
+from __future__ import absolute_import
+
+import io
+import abc
+import sys
+import email
+
+
+if sys.version_info > (3,): # pragma: nocover
+ import builtins
+ from configparser import ConfigParser
+ from contextlib import suppress
+ FileNotFoundError = builtins.FileNotFoundError
+ IsADirectoryError = builtins.IsADirectoryError
+ NotADirectoryError = builtins.NotADirectoryError
+ PermissionError = builtins.PermissionError
+ map = builtins.map
+else: # pragma: nocover
+ from backports.configparser import ConfigParser
+ from itertools import imap as map # type: ignore
+ from contextlib2 import suppress # noqa
+ FileNotFoundError = IOError, OSError
+ IsADirectoryError = IOError, OSError
+ NotADirectoryError = IOError, OSError
+ PermissionError = IOError, OSError
+
+if sys.version_info > (3, 5): # pragma: nocover
+ import pathlib
+else: # pragma: nocover
+ import pathlib2 as pathlib
+
+try:
+ ModuleNotFoundError = builtins.FileNotFoundError
+except (NameError, AttributeError): # pragma: nocover
+ ModuleNotFoundError = ImportError # type: ignore
+
+
+if sys.version_info >= (3,): # pragma: nocover
+ from importlib.abc import MetaPathFinder
+else: # pragma: nocover
+ class MetaPathFinder(object):
+ __metaclass__ = abc.ABCMeta
+
+
+__metaclass__ = type
+__all__ = [
+ 'install', 'NullFinder', 'MetaPathFinder', 'ModuleNotFoundError',
+ 'pathlib', 'ConfigParser', 'map', 'suppress', 'FileNotFoundError',
+ 'NotADirectoryError', 'email_message_from_string',
+ ]
+
+
+def install(cls):
+ """
+ Class decorator for installation on sys.meta_path.
+
+ Adds the backport DistributionFinder to sys.meta_path and
+ attempts to disable the finder functionality of the stdlib
+ DistributionFinder.
+ """
+ sys.meta_path.append(cls())
+ disable_stdlib_finder()
+ return cls
+
+
+def disable_stdlib_finder():
+ """
+ Give the backport primacy for discovering path-based distributions
+ by monkey-patching the stdlib O_O.
+
+ See #91 for more background for rationale on this sketchy
+ behavior.
+ """
+ def matches(finder):
+ return (
+ finder.__module__ == '_frozen_importlib_external'
+ and hasattr(finder, 'find_distributions')
+ )
+ for finder in filter(matches, sys.meta_path): # pragma: nocover
+ del finder.find_distributions
+
+
+class NullFinder:
+ """
+ A "Finder" (aka "MetaClassFinder") that never finds any modules,
+ but may find distributions.
+ """
+ @staticmethod
+ def find_spec(*args, **kwargs):
+ return None
+
+ # In Python 2, the import system requires finders
+ # to have a find_module() method, but this usage
+ # is deprecated in Python 3 in favor of find_spec().
+ # For the purposes of this finder (i.e. being present
+ # on sys.meta_path but having no other import
+ # system functionality), the two methods are identical.
+ find_module = find_spec
+
+
+def py2_message_from_string(text): # nocoverpy3
+ # Work around https://bugs.python.org/issue25545 where
+ # email.message_from_string cannot handle Unicode on Python 2.
+ io_buffer = io.StringIO(text)
+ return email.message_from_file(io_buffer)
+
+
+email_message_from_string = (
+ py2_message_from_string
+ if sys.version_info < (3,) else
+ email.message_from_string
+ )
+
+# https://bitbucket.org/pypy/pypy/issues/3021/ioopen-directory-leaks-a-file-descriptor
+PYPY_OPEN_BUG = getattr(sys, 'pypy_version_info', (9, 9, 9))[:3] <= (7, 1, 1)
+
+
+def ensure_is_path(ob):
+ """Construct a Path from ob even if it's already one.
+ Specialized for Python 3.4.
+ """
+ if (3,) < sys.version_info < (3, 5):
+ ob = str(ob) # pragma: nocover
+ return pathlib.Path(ob)
+
+
+class PyPy_repr:
+ """
+ Override repr for EntryPoint objects on PyPy to avoid __iter__ access.
+ Ref #97, #102.
+ """
+ affected = hasattr(sys, 'pypy_version_info')
+
+ def __compat_repr__(self): # pragma: nocover
+ def make_param(name):
+ value = getattr(self, name)
+ return '{name}={value!r}'.format(**locals())
+ params = ', '.join(map(make_param, self._fields))
+ return 'EntryPoint({params})'.format(**locals())
+
+ if affected: # pragma: nocover
+ __repr__ = __compat_repr__
+ del affected
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/LICENSE
new file mode 100644
index 0000000000..be7e092b0b
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2017-2019 Jason R. Coombs, Barry Warsaw
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/METADATA
new file mode 100644
index 0000000000..cda63e8502
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/METADATA
@@ -0,0 +1,73 @@
+Metadata-Version: 2.1
+Name: importlib-metadata
+Version: 2.1.1
+Summary: Read metadata from Python packages
+Home-page: http://importlib-metadata.readthedocs.io/
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+License: Apache Software License
+Platform: UNKNOWN
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 2
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7
+Requires-Dist: zipp (>=0.5)
+Requires-Dist: pathlib2 ; python_version < "3"
+Requires-Dist: contextlib2 ; python_version < "3"
+Requires-Dist: configparser (>=3.5) ; python_version < "3"
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: rst.linker ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: packaging ; extra == 'testing'
+Requires-Dist: pep517 ; extra == 'testing'
+Requires-Dist: unittest2 ; (python_version < "3") and extra == 'testing'
+Requires-Dist: importlib-resources (>=1.3) ; (python_version < "3.9") and extra == 'testing'
+
+=========================
+ ``importlib_metadata``
+=========================
+
+``importlib_metadata`` is a library to access the metadata for a
+Python package.
+
+As of Python 3.8, this functionality has been added to the
+`Python standard library
+<https://docs.python.org/3/library/importlib.metadata.html>`_.
+This package supplies backports of that functionality including
+improvements added to subsequent Python versions.
+
+
+Usage
+=====
+
+See the `online documentation <https://importlib_metadata.readthedocs.io/>`_
+for usage details.
+
+`Finder authors
+<https://docs.python.org/3/reference/import.html#finders-and-loaders>`_ can
+also add support for custom package installers. See the above documentation
+for details.
+
+
+Caveats
+=======
+
+This project primarily supports third-party packages installed by PyPA
+tools (or other conforming packages). It does not support:
+
+- Packages in the stdlib.
+- Packages installed without metadata.
+
+Project details
+===============
+
+ * Project home: https://github.com/python/importlib_metadata
+ * Report bugs at: https://github.com/python/importlib_metadata/issues
+ * Code hosting: https://github.com/python/importlib_metadata
+ * Documentation: https://importlib_metadata.readthedocs.io/
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/RECORD
new file mode 100644
index 0000000000..301525c826
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/RECORD
@@ -0,0 +1,7 @@
+importlib_metadata/__init__.py,sha256=uaN7KDLs3-irvgwsxg4VZIuY3ZEo3jhu1dShjE7fR88,19587
+importlib_metadata/_compat.py,sha256=DnM55BbJKFCcZmJOkArmyO76-0g7pA6HEfzSYWXN88k,4417
+importlib_metadata-2.1.1.dist-info/LICENSE,sha256=wNe6dAchmJ1VvVB8D9oTc-gHHadCuaSBAev36sYEM6U,571
+importlib_metadata-2.1.1.dist-info/METADATA,sha256=gBf5nX-Ff6_Ue9dSH4dkWg2FCNHHtQrs6mhunTAac8k,2421
+importlib_metadata-2.1.1.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
+importlib_metadata-2.1.1.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19
+importlib_metadata-2.1.1.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/WHEEL
new file mode 100644
index 0000000000..6d38aa0601
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/top_level.txt
new file mode 100644
index 0000000000..bbb07547a1
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+importlib_metadata
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata/__init__.py b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata/__init__.py
new file mode 100644
index 0000000000..e296a2c7bd
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata/__init__.py
@@ -0,0 +1,644 @@
+from __future__ import unicode_literals, absolute_import
+
+import io
+import os
+import re
+import abc
+import csv
+import sys
+import zipp
+import operator
+import functools
+import itertools
+import posixpath
+import collections
+
+from ._compat import (
+ install,
+ NullFinder,
+ ConfigParser,
+ suppress,
+ map,
+ FileNotFoundError,
+ IsADirectoryError,
+ NotADirectoryError,
+ PermissionError,
+ pathlib,
+ ModuleNotFoundError,
+ MetaPathFinder,
+ email_message_from_string,
+ PyPy_repr,
+ unique_ordered,
+ str,
+ )
+from importlib import import_module
+from itertools import starmap
+
+
+__metaclass__ = type
+
+
+__all__ = [
+ 'Distribution',
+ 'DistributionFinder',
+ 'PackageNotFoundError',
+ 'distribution',
+ 'distributions',
+ 'entry_points',
+ 'files',
+ 'metadata',
+ 'requires',
+ 'version',
+ ]
+
+
+class PackageNotFoundError(ModuleNotFoundError):
+ """The package was not found."""
+
+ def __str__(self):
+ tmpl = "No package metadata was found for {self.name}"
+ return tmpl.format(**locals())
+
+ @property
+ def name(self):
+ name, = self.args
+ return name
+
+
+class EntryPoint(
+ PyPy_repr,
+ collections.namedtuple('EntryPointBase', 'name value group')):
+ """An entry point as defined by Python packaging conventions.
+
+ See `the packaging docs on entry points
+ <https://packaging.python.org/specifications/entry-points/>`_
+ for more information.
+ """
+
+ pattern = re.compile(
+ r'(?P<module>[\w.]+)\s*'
+ r'(:\s*(?P<attr>[\w.]+))?\s*'
+ r'(?P<extras>\[.*\])?\s*$'
+ )
+ """
+ A regular expression describing the syntax for an entry point,
+ which might look like:
+
+ - module
+ - package.module
+ - package.module:attribute
+ - package.module:object.attribute
+ - package.module:attr [extra1, extra2]
+
+ Other combinations are possible as well.
+
+ The expression is lenient about whitespace around the ':',
+ following the attr, and following any extras.
+ """
+
+ def load(self):
+ """Load the entry point from its definition. If only a module
+ is indicated by the value, return that module. Otherwise,
+ return the named object.
+ """
+ match = self.pattern.match(self.value)
+ module = import_module(match.group('module'))
+ attrs = filter(None, (match.group('attr') or '').split('.'))
+ return functools.reduce(getattr, attrs, module)
+
+ @property
+ def module(self):
+ match = self.pattern.match(self.value)
+ return match.group('module')
+
+ @property
+ def attr(self):
+ match = self.pattern.match(self.value)
+ return match.group('attr')
+
+ @property
+ def extras(self):
+ match = self.pattern.match(self.value)
+ return list(re.finditer(r'\w+', match.group('extras') or ''))
+
+ @classmethod
+ def _from_config(cls, config):
+ return [
+ cls(name, value, group)
+ for group in config.sections()
+ for name, value in config.items(group)
+ ]
+
+ @classmethod
+ def _from_text(cls, text):
+ config = ConfigParser(delimiters='=')
+ # case sensitive: https://stackoverflow.com/q/1611799/812183
+ config.optionxform = str
+ try:
+ config.read_string(text)
+ except AttributeError: # pragma: nocover
+ # Python 2 has no read_string
+ config.readfp(io.StringIO(text))
+ return EntryPoint._from_config(config)
+
+ def __iter__(self):
+ """
+ Supply iter so one may construct dicts of EntryPoints easily.
+ """
+ return iter((self.name, self))
+
+ def __reduce__(self):
+ return (
+ self.__class__,
+ (self.name, self.value, self.group),
+ )
+
+
+class PackagePath(pathlib.PurePosixPath):
+ """A reference to a path in a package"""
+
+ def read_text(self, encoding='utf-8'):
+ with self.locate().open(encoding=encoding) as stream:
+ return stream.read()
+
+ def read_binary(self):
+ with self.locate().open('rb') as stream:
+ return stream.read()
+
+ def locate(self):
+ """Return a path-like object for this path"""
+ return self.dist.locate_file(self)
+
+
+class FileHash:
+ def __init__(self, spec):
+ self.mode, _, self.value = spec.partition('=')
+
+ def __repr__(self):
+ return '<FileHash mode: {} value: {}>'.format(self.mode, self.value)
+
+
+class Distribution:
+ """A Python distribution package."""
+
+ @abc.abstractmethod
+ def read_text(self, filename):
+ """Attempt to load metadata file given by the name.
+
+ :param filename: The name of the file in the distribution info.
+ :return: The text if found, otherwise None.
+ """
+
+ @abc.abstractmethod
+ def locate_file(self, path):
+ """
+ Given a path to a file in this distribution, return a path
+ to it.
+ """
+
+ @classmethod
+ def from_name(cls, name):
+ """Return the Distribution for the given package name.
+
+ :param name: The name of the distribution package to search for.
+ :return: The Distribution instance (or subclass thereof) for the named
+ package, if found.
+ :raises PackageNotFoundError: When the named package's distribution
+ metadata cannot be found.
+ """
+ for resolver in cls._discover_resolvers():
+ dists = resolver(DistributionFinder.Context(name=name))
+ dist = next(iter(dists), None)
+ if dist is not None:
+ return dist
+ else:
+ raise PackageNotFoundError(name)
+
+ @classmethod
+ def discover(cls, **kwargs):
+ """Return an iterable of Distribution objects for all packages.
+
+ Pass a ``context`` or pass keyword arguments for constructing
+ a context.
+
+ :context: A ``DistributionFinder.Context`` object.
+ :return: Iterable of Distribution objects for all packages.
+ """
+ context = kwargs.pop('context', None)
+ if context and kwargs:
+ raise ValueError("cannot accept context and kwargs")
+ context = context or DistributionFinder.Context(**kwargs)
+ return itertools.chain.from_iterable(
+ resolver(context)
+ for resolver in cls._discover_resolvers()
+ )
+
+ @staticmethod
+ def at(path):
+ """Return a Distribution for the indicated metadata path
+
+ :param path: a string or path-like object
+ :return: a concrete Distribution instance for the path
+ """
+ return PathDistribution(pathlib.Path(path))
+
+ @staticmethod
+ def _discover_resolvers():
+ """Search the meta_path for resolvers."""
+ declared = (
+ getattr(finder, 'find_distributions', None)
+ for finder in sys.meta_path
+ )
+ return filter(None, declared)
+
+ @classmethod
+ def _local(cls, root='.'):
+ from pep517 import build, meta
+ system = build.compat_system(root)
+ builder = functools.partial(
+ meta.build,
+ source_dir=root,
+ system=system,
+ )
+ return PathDistribution(zipp.Path(meta.build_as_zip(builder)))
+
+ @property
+ def metadata(self):
+ """Return the parsed metadata for this Distribution.
+
+ The returned object will have keys that name the various bits of
+ metadata. See PEP 566 for details.
+ """
+ text = (
+ self.read_text('METADATA')
+ or self.read_text('PKG-INFO')
+ # This last clause is here to support old egg-info files. Its
+ # effect is to just end up using the PathDistribution's self._path
+ # (which points to the egg-info file) attribute unchanged.
+ or self.read_text('')
+ )
+ return email_message_from_string(text)
+
+ @property
+ def version(self):
+ """Return the 'Version' metadata for the distribution package."""
+ return self.metadata['Version']
+
+ @property
+ def entry_points(self):
+ return EntryPoint._from_text(self.read_text('entry_points.txt'))
+
+ @property
+ def files(self):
+ """Files in this distribution.
+
+ :return: List of PackagePath for this distribution or None
+
+ Result is `None` if the metadata file that enumerates files
+ (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
+ missing.
+ Result may be empty if the metadata exists but is empty.
+ """
+ file_lines = self._read_files_distinfo() or self._read_files_egginfo()
+
+ def make_file(name, hash=None, size_str=None):
+ result = PackagePath(name)
+ result.hash = FileHash(hash) if hash else None
+ result.size = int(size_str) if size_str else None
+ result.dist = self
+ return result
+
+ return file_lines and list(starmap(make_file, csv.reader(file_lines)))
+
+ def _read_files_distinfo(self):
+ """
+ Read the lines of RECORD
+ """
+ text = self.read_text('RECORD')
+ return text and text.splitlines()
+
+ def _read_files_egginfo(self):
+ """
+ SOURCES.txt might contain literal commas, so wrap each line
+ in quotes.
+ """
+ text = self.read_text('SOURCES.txt')
+ return text and map('"{}"'.format, text.splitlines())
+
+ @property
+ def requires(self):
+ """Generated requirements specified for this Distribution"""
+ reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
+ return reqs and list(reqs)
+
+ def _read_dist_info_reqs(self):
+ return self.metadata.get_all('Requires-Dist')
+
+ def _read_egg_info_reqs(self):
+ source = self.read_text('requires.txt')
+ return source and self._deps_from_requires_text(source)
+
+ @classmethod
+ def _deps_from_requires_text(cls, source):
+ section_pairs = cls._read_sections(source.splitlines())
+ sections = {
+ section: list(map(operator.itemgetter('line'), results))
+ for section, results in
+ itertools.groupby(section_pairs, operator.itemgetter('section'))
+ }
+ return cls._convert_egg_info_reqs_to_simple_reqs(sections)
+
+ @staticmethod
+ def _read_sections(lines):
+ section = None
+ for line in filter(None, lines):
+ section_match = re.match(r'\[(.*)\]$', line)
+ if section_match:
+ section = section_match.group(1)
+ continue
+ yield locals()
+
+ @staticmethod
+ def _convert_egg_info_reqs_to_simple_reqs(sections):
+ """
+ Historically, setuptools would solicit and store 'extra'
+ requirements, including those with environment markers,
+ in separate sections. More modern tools expect each
+ dependency to be defined separately, with any relevant
+ extras and environment markers attached directly to that
+ requirement. This method converts the former to the
+ latter. See _test_deps_from_requires_text for an example.
+ """
+ def make_condition(name):
+ return name and 'extra == "{name}"'.format(name=name)
+
+ def parse_condition(section):
+ section = section or ''
+ extra, sep, markers = section.partition(':')
+ if extra and markers:
+ markers = '({markers})'.format(markers=markers)
+ conditions = list(filter(None, [markers, make_condition(extra)]))
+ return '; ' + ' and '.join(conditions) if conditions else ''
+
+ for section, deps in sections.items():
+ for dep in deps:
+ yield dep + parse_condition(section)
+
+
+class DistributionFinder(MetaPathFinder):
+ """
+ A MetaPathFinder capable of discovering installed distributions.
+ """
+
+ class Context:
+ """
+ Keyword arguments presented by the caller to
+ ``distributions()`` or ``Distribution.discover()``
+ to narrow the scope of a search for distributions
+ in all DistributionFinders.
+
+ Each DistributionFinder may expect any parameters
+ and should attempt to honor the canonical
+ parameters defined below when appropriate.
+ """
+
+ name = None
+ """
+ Specific name for which a distribution finder should match.
+ A name of ``None`` matches all distributions.
+ """
+
+ def __init__(self, **kwargs):
+ vars(self).update(kwargs)
+
+ @property
+ def path(self):
+ """
+ The path that a distribution finder should search.
+
+ Typically refers to Python package paths and defaults
+ to ``sys.path``.
+ """
+ return vars(self).get('path', sys.path)
+
+ @abc.abstractmethod
+ def find_distributions(self, context=Context()):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching the ``context``,
+ a DistributionFinder.Context instance.
+ """
+
+
+class FastPath:
+ """
+ Micro-optimized class for searching a path for
+ children.
+ """
+
+ def __init__(self, root):
+ self.root = str(root)
+ self.base = os.path.basename(self.root).lower()
+
+ def joinpath(self, child):
+ return pathlib.Path(self.root, child)
+
+ def children(self):
+ with suppress(Exception):
+ return os.listdir(self.root or '')
+ with suppress(Exception):
+ return self.zip_children()
+ return []
+
+ def zip_children(self):
+ zip_path = zipp.Path(self.root)
+ names = zip_path.root.namelist()
+ self.joinpath = zip_path.joinpath
+
+ return unique_ordered(
+ child.split(posixpath.sep, 1)[0]
+ for child in names
+ )
+
+ def search(self, name):
+ return (
+ self.joinpath(child)
+ for child in self.children()
+ if name.matches(child, self.base)
+ )
+
+
+class Prepared:
+ """
+ A prepared search for metadata on a possibly-named package.
+ """
+ normalized = None
+ suffixes = '.dist-info', '.egg-info'
+ exact_matches = [''][:0]
+
+ def __init__(self, name):
+ self.name = name
+ if name is None:
+ return
+ self.normalized = self.normalize(name)
+ self.exact_matches = [
+ self.normalized + suffix for suffix in self.suffixes]
+
+ @staticmethod
+ def normalize(name):
+ """
+ PEP 503 normalization plus dashes as underscores.
+ """
+ return re.sub(r"[-_.]+", "-", name).lower().replace('-', '_')
+
+ @staticmethod
+ def legacy_normalize(name):
+ """
+ Normalize the package name as found in the convention in
+ older packaging tools versions and specs.
+ """
+ return name.lower().replace('-', '_')
+
+ def matches(self, cand, base):
+ low = cand.lower()
+ pre, ext = os.path.splitext(low)
+ name, sep, rest = pre.partition('-')
+ return (
+ low in self.exact_matches
+ or ext in self.suffixes and (
+ not self.normalized or
+ name.replace('.', '_') == self.normalized
+ )
+ # legacy case:
+ or self.is_egg(base) and low == 'egg-info'
+ )
+
+ def is_egg(self, base):
+ normalized = self.legacy_normalize(self.name or '')
+ prefix = normalized + '-' if normalized else ''
+ versionless_egg_name = normalized + '.egg' if self.name else ''
+ return (
+ base == versionless_egg_name
+ or base.startswith(prefix)
+ and base.endswith('.egg'))
+
+
+@install
+class MetadataPathFinder(NullFinder, DistributionFinder):
+ """A degenerate finder for distribution packages on the file system.
+
+ This finder supplies only a find_distributions() method for versions
+ of Python that do not have a PathFinder find_distributions().
+ """
+
+ def find_distributions(self, context=DistributionFinder.Context()):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching ``context.name``
+ (or all names if ``None`` indicated) along the paths in the list
+ of directories ``context.path``.
+ """
+ found = self._search_paths(context.name, context.path)
+ return map(PathDistribution, found)
+
+ @classmethod
+ def _search_paths(cls, name, paths):
+ """Find metadata directories in paths heuristically."""
+ return itertools.chain.from_iterable(
+ path.search(Prepared(name))
+ for path in map(FastPath, paths)
+ )
+
+
+class PathDistribution(Distribution):
+ def __init__(self, path):
+ """Construct a distribution from a path to the metadata directory.
+
+ :param path: A pathlib.Path or similar object supporting
+ .joinpath(), __div__, .parent, and .read_text().
+ """
+ self._path = path
+
+ def read_text(self, filename):
+ with suppress(FileNotFoundError, IsADirectoryError, KeyError,
+ NotADirectoryError, PermissionError):
+ return self._path.joinpath(filename).read_text(encoding='utf-8')
+ read_text.__doc__ = Distribution.read_text.__doc__
+
+ def locate_file(self, path):
+ return self._path.parent / path
+
+
+def distribution(distribution_name):
+ """Get the ``Distribution`` instance for the named package.
+
+ :param distribution_name: The name of the distribution package as a string.
+ :return: A ``Distribution`` instance (or subclass thereof).
+ """
+ return Distribution.from_name(distribution_name)
+
+
+def distributions(**kwargs):
+ """Get all ``Distribution`` instances in the current environment.
+
+ :return: An iterable of ``Distribution`` instances.
+ """
+ return Distribution.discover(**kwargs)
+
+
+def metadata(distribution_name):
+ """Get the metadata for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: An email.Message containing the parsed metadata.
+ """
+ return Distribution.from_name(distribution_name).metadata
+
+
+def version(distribution_name):
+ """Get the version string for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: The version string for the package as defined in the package's
+ "Version" metadata key.
+ """
+ return distribution(distribution_name).version
+
+
+def entry_points():
+ """Return EntryPoint objects for all installed packages.
+
+ :return: EntryPoint objects for all installed packages.
+ """
+ eps = itertools.chain.from_iterable(
+ dist.entry_points for dist in distributions())
+ by_group = operator.attrgetter('group')
+ ordered = sorted(eps, key=by_group)
+ grouped = itertools.groupby(ordered, by_group)
+ return {
+ group: tuple(eps)
+ for group, eps in grouped
+ }
+
+
+def files(distribution_name):
+ """Return a list of files for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: List of files composing the distribution.
+ """
+ return distribution(distribution_name).files
+
+
+def requires(distribution_name):
+ """
+ Return a list of requirements for the named package.
+
+ :return: An iterator of requirements, suitable for
+ packaging.requirement.Requirement.
+ """
+ return distribution(distribution_name).requires
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata/_compat.py b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata/_compat.py
new file mode 100644
index 0000000000..303d4a22e8
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata/_compat.py
@@ -0,0 +1,152 @@
+from __future__ import absolute_import, unicode_literals
+
+import io
+import abc
+import sys
+import email
+
+
+if sys.version_info > (3,): # pragma: nocover
+ import builtins
+ from configparser import ConfigParser
+ import contextlib
+ FileNotFoundError = builtins.FileNotFoundError
+ IsADirectoryError = builtins.IsADirectoryError
+ NotADirectoryError = builtins.NotADirectoryError
+ PermissionError = builtins.PermissionError
+ map = builtins.map
+ from itertools import filterfalse
+else: # pragma: nocover
+ from backports.configparser import ConfigParser
+ from itertools import imap as map # type: ignore
+ from itertools import ifilterfalse as filterfalse
+ import contextlib2 as contextlib
+ FileNotFoundError = IOError, OSError
+ IsADirectoryError = IOError, OSError
+ NotADirectoryError = IOError, OSError
+ PermissionError = IOError, OSError
+
+str = type('')
+
+suppress = contextlib.suppress
+
+if sys.version_info > (3, 5): # pragma: nocover
+ import pathlib
+else: # pragma: nocover
+ import pathlib2 as pathlib
+
+try:
+ ModuleNotFoundError = builtins.FileNotFoundError
+except (NameError, AttributeError): # pragma: nocover
+ ModuleNotFoundError = ImportError # type: ignore
+
+
+if sys.version_info >= (3,): # pragma: nocover
+ from importlib.abc import MetaPathFinder
+else: # pragma: nocover
+ class MetaPathFinder(object):
+ __metaclass__ = abc.ABCMeta
+
+
+__metaclass__ = type
+__all__ = [
+ 'install', 'NullFinder', 'MetaPathFinder', 'ModuleNotFoundError',
+ 'pathlib', 'ConfigParser', 'map', 'suppress', 'FileNotFoundError',
+ 'NotADirectoryError', 'email_message_from_string',
+ ]
+
+
+def install(cls):
+ """
+ Class decorator for installation on sys.meta_path.
+
+ Adds the backport DistributionFinder to sys.meta_path and
+ attempts to disable the finder functionality of the stdlib
+ DistributionFinder.
+ """
+ sys.meta_path.append(cls())
+ disable_stdlib_finder()
+ return cls
+
+
+def disable_stdlib_finder():
+ """
+ Give the backport primacy for discovering path-based distributions
+ by monkey-patching the stdlib O_O.
+
+ See #91 for more background for rationale on this sketchy
+ behavior.
+ """
+ def matches(finder):
+ return (
+ getattr(finder, '__module__', None) == '_frozen_importlib_external'
+ and hasattr(finder, 'find_distributions')
+ )
+ for finder in filter(matches, sys.meta_path): # pragma: nocover
+ del finder.find_distributions
+
+
+class NullFinder:
+ """
+ A "Finder" (aka "MetaClassFinder") that never finds any modules,
+ but may find distributions.
+ """
+ @staticmethod
+ def find_spec(*args, **kwargs):
+ return None
+
+ # In Python 2, the import system requires finders
+ # to have a find_module() method, but this usage
+ # is deprecated in Python 3 in favor of find_spec().
+ # For the purposes of this finder (i.e. being present
+ # on sys.meta_path but having no other import
+ # system functionality), the two methods are identical.
+ find_module = find_spec
+
+
+def py2_message_from_string(text): # nocoverpy3
+ # Work around https://bugs.python.org/issue25545 where
+ # email.message_from_string cannot handle Unicode on Python 2.
+ io_buffer = io.StringIO(text)
+ return email.message_from_file(io_buffer)
+
+
+email_message_from_string = (
+ py2_message_from_string
+ if sys.version_info < (3,) else
+ email.message_from_string
+ )
+
+
+class PyPy_repr:
+ """
+ Override repr for EntryPoint objects on PyPy to avoid __iter__ access.
+ Ref #97, #102.
+ """
+ affected = hasattr(sys, 'pypy_version_info')
+
+ def __compat_repr__(self): # pragma: nocover
+ def make_param(name):
+ value = getattr(self, name)
+ return '{name}={value!r}'.format(**locals())
+ params = ', '.join(map(make_param, self._fields))
+ return 'EntryPoint({params})'.format(**locals())
+
+ if affected: # pragma: nocover
+ __repr__ = __compat_repr__
+ del affected
+
+
+# from itertools recipes
+def unique_everseen(iterable): # pragma: nocover
+ "List unique elements, preserving order. Remember all elements ever seen."
+ seen = set()
+ seen_add = seen.add
+
+ for element in filterfalse(seen.__contains__, iterable):
+ seen_add(element)
+ yield element
+
+
+unique_ordered = (
+ unique_everseen if sys.version_info < (3, 7) else dict.fromkeys)
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/LICENSE
new file mode 100644
index 0000000000..be7e092b0b
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2017-2019 Jason R. Coombs, Barry Warsaw
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/METADATA
new file mode 100644
index 0000000000..ce9f563a78
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/METADATA
@@ -0,0 +1,94 @@
+Metadata-Version: 2.1
+Name: importlib-metadata
+Version: 3.1.1
+Summary: Read metadata from Python packages
+Home-page: https://github.com/python/importlib_metadata
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+License: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.6
+Requires-Dist: zipp (>=0.5)
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=3.2) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest (!=3.7.3,>=3.5) ; extra == 'testing'
+Requires-Dist: pytest-checkdocs (>=1.2.3) ; extra == 'testing'
+Requires-Dist: pytest-flake8 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: jaraco.test (>=3.2.0) ; extra == 'testing'
+Requires-Dist: packaging ; extra == 'testing'
+Requires-Dist: pep517 ; extra == 'testing'
+Requires-Dist: pyfakefs ; extra == 'testing'
+Requires-Dist: flufl.flake8 ; extra == 'testing'
+Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: importlib-resources (>=1.3) ; (python_version < "3.9") and extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/importlib_metadata.svg
+ :target: `PyPI link`_
+
+.. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg
+ :target: `PyPI link`_
+
+.. _PyPI link: https://pypi.org/project/importlib_metadata
+
+.. image:: https://github.com/python/importlib_metadata/workflows/Automated%20Tests/badge.svg
+ :target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22Automated+Tests%22
+ :alt: Automated Tests
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest
+ :target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest
+
+
+``importlib_metadata`` is a library to access the metadata for a
+Python package.
+
+As of Python 3.8, this functionality has been added to the
+`Python standard library
+<https://docs.python.org/3/library/importlib.metadata.html>`_.
+This package supplies backports of that functionality including
+improvements added to subsequent Python versions.
+
+
+Usage
+=====
+
+See the `online documentation <https://importlib_metadata.readthedocs.io/>`_
+for usage details.
+
+`Finder authors
+<https://docs.python.org/3/reference/import.html#finders-and-loaders>`_ can
+also add support for custom package installers. See the above documentation
+for details.
+
+
+Caveats
+=======
+
+This project primarily supports third-party packages installed by PyPA
+tools (or other conforming packages). It does not support:
+
+- Packages in the stdlib.
+- Packages installed without metadata.
+
+Project details
+===============
+
+ * Project home: https://github.com/python/importlib_metadata
+ * Report bugs at: https://github.com/python/importlib_metadata/issues
+ * Code hosting: https://github.com/python/importlib_metadata
+ * Documentation: https://importlib_metadata.readthedocs.io/
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/RECORD
new file mode 100644
index 0000000000..89bbf7e386
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/RECORD
@@ -0,0 +1,7 @@
+importlib_metadata/__init__.py,sha256=QM4Oo096u6JYeokkDUwHgazI_h3o0w9tISPjHtVko_U,19266
+importlib_metadata/_compat.py,sha256=OS4joET_vaQClxhumw0NWYdS5N3FX1Ii895aZXLpQaA,2028
+importlib_metadata-3.1.1.dist-info/LICENSE,sha256=wNe6dAchmJ1VvVB8D9oTc-gHHadCuaSBAev36sYEM6U,571
+importlib_metadata-3.1.1.dist-info/METADATA,sha256=rdblRVlpAdjDcYkqWhn2yVNwrpBqpamdKvxrgA6EWE0,3442
+importlib_metadata-3.1.1.dist-info/WHEEL,sha256=gm79cMopkncyn0iSnI0vQNiDJ8t9on0H4_iz-CrpXMk,92
+importlib_metadata-3.1.1.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19
+importlib_metadata-3.1.1.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/WHEEL
new file mode 100644
index 0000000000..0863016bc2
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.36.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/top_level.txt
new file mode 100644
index 0000000000..bbb07547a1
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+importlib_metadata
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/__init__.py b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/__init__.py
new file mode 100644
index 0000000000..eec9195367
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/__init__.py
@@ -0,0 +1,631 @@
+import io
+import os
+import re
+import abc
+import csv
+import sys
+import zipp
+import email
+import pathlib
+import operator
+import functools
+import itertools
+import posixpath
+import collections
+
+from ._compat import (
+ NullFinder,
+ PyPy_repr,
+ install,
+)
+
+from configparser import ConfigParser
+from contextlib import suppress
+from importlib import import_module
+from importlib.abc import MetaPathFinder
+from itertools import starmap
+
+
+__all__ = [
+ 'Distribution',
+ 'DistributionFinder',
+ 'PackageNotFoundError',
+ 'distribution',
+ 'distributions',
+ 'entry_points',
+ 'files',
+ 'metadata',
+ 'requires',
+ 'version',
+]
+
+
+class PackageNotFoundError(ModuleNotFoundError):
+ """The package was not found."""
+
+ def __str__(self):
+ tmpl = "No package metadata was found for {self.name}"
+ return tmpl.format(**locals())
+
+ @property
+ def name(self):
+ (name,) = self.args
+ return name
+
+
+class EntryPoint(
+ PyPy_repr, collections.namedtuple('EntryPointBase', 'name value group')
+):
+ """An entry point as defined by Python packaging conventions.
+
+ See `the packaging docs on entry points
+ <https://packaging.python.org/specifications/entry-points/>`_
+ for more information.
+ """
+
+ pattern = re.compile(
+ r'(?P<module>[\w.]+)\s*'
+ r'(:\s*(?P<attr>[\w.]+))?\s*'
+ r'(?P<extras>\[.*\])?\s*$'
+ )
+ """
+ A regular expression describing the syntax for an entry point,
+ which might look like:
+
+ - module
+ - package.module
+ - package.module:attribute
+ - package.module:object.attribute
+ - package.module:attr [extra1, extra2]
+
+ Other combinations are possible as well.
+
+ The expression is lenient about whitespace around the ':',
+ following the attr, and following any extras.
+ """
+
+ def load(self):
+ """Load the entry point from its definition. If only a module
+ is indicated by the value, return that module. Otherwise,
+ return the named object.
+ """
+ match = self.pattern.match(self.value)
+ module = import_module(match.group('module'))
+ attrs = filter(None, (match.group('attr') or '').split('.'))
+ return functools.reduce(getattr, attrs, module)
+
+ @property
+ def module(self):
+ match = self.pattern.match(self.value)
+ return match.group('module')
+
+ @property
+ def attr(self):
+ match = self.pattern.match(self.value)
+ return match.group('attr')
+
+ @property
+ def extras(self):
+ match = self.pattern.match(self.value)
+ return list(re.finditer(r'\w+', match.group('extras') or ''))
+
+ @classmethod
+ def _from_config(cls, config):
+ return [
+ cls(name, value, group)
+ for group in config.sections()
+ for name, value in config.items(group)
+ ]
+
+ @classmethod
+ def _from_text(cls, text):
+ config = ConfigParser(delimiters='=')
+ # case sensitive: https://stackoverflow.com/q/1611799/812183
+ config.optionxform = str
+ try:
+ config.read_string(text)
+ except AttributeError: # pragma: nocover
+ # Python 2 has no read_string
+ config.readfp(io.StringIO(text))
+ return EntryPoint._from_config(config)
+
+ def __iter__(self):
+ """
+ Supply iter so one may construct dicts of EntryPoints easily.
+ """
+ return iter((self.name, self))
+
+ def __reduce__(self):
+ return (
+ self.__class__,
+ (self.name, self.value, self.group),
+ )
+
+
+class PackagePath(pathlib.PurePosixPath):
+ """A reference to a path in a package"""
+
+ def read_text(self, encoding='utf-8'):
+ with self.locate().open(encoding=encoding) as stream:
+ return stream.read()
+
+ def read_binary(self):
+ with self.locate().open('rb') as stream:
+ return stream.read()
+
+ def locate(self):
+ """Return a path-like object for this path"""
+ return self.dist.locate_file(self)
+
+
+class FileHash:
+ def __init__(self, spec):
+ self.mode, _, self.value = spec.partition('=')
+
+ def __repr__(self):
+ return '<FileHash mode: {} value: {}>'.format(self.mode, self.value)
+
+
+class Distribution:
+ """A Python distribution package."""
+
+ @abc.abstractmethod
+ def read_text(self, filename):
+ """Attempt to load metadata file given by the name.
+
+ :param filename: The name of the file in the distribution info.
+ :return: The text if found, otherwise None.
+ """
+
+ @abc.abstractmethod
+ def locate_file(self, path):
+ """
+ Given a path to a file in this distribution, return a path
+ to it.
+ """
+
+ @classmethod
+ def from_name(cls, name):
+ """Return the Distribution for the given package name.
+
+ :param name: The name of the distribution package to search for.
+ :return: The Distribution instance (or subclass thereof) for the named
+ package, if found.
+ :raises PackageNotFoundError: When the named package's distribution
+ metadata cannot be found.
+ """
+ for resolver in cls._discover_resolvers():
+ dists = resolver(DistributionFinder.Context(name=name))
+ dist = next(iter(dists), None)
+ if dist is not None:
+ return dist
+ else:
+ raise PackageNotFoundError(name)
+
+ @classmethod
+ def discover(cls, **kwargs):
+ """Return an iterable of Distribution objects for all packages.
+
+ Pass a ``context`` or pass keyword arguments for constructing
+ a context.
+
+ :context: A ``DistributionFinder.Context`` object.
+ :return: Iterable of Distribution objects for all packages.
+ """
+ context = kwargs.pop('context', None)
+ if context and kwargs:
+ raise ValueError("cannot accept context and kwargs")
+ context = context or DistributionFinder.Context(**kwargs)
+ return itertools.chain.from_iterable(
+ resolver(context) for resolver in cls._discover_resolvers()
+ )
+
+ @staticmethod
+ def at(path):
+ """Return a Distribution for the indicated metadata path
+
+ :param path: a string or path-like object
+ :return: a concrete Distribution instance for the path
+ """
+ return PathDistribution(pathlib.Path(path))
+
+ @staticmethod
+ def _discover_resolvers():
+ """Search the meta_path for resolvers."""
+ declared = (
+ getattr(finder, 'find_distributions', None) for finder in sys.meta_path
+ )
+ return filter(None, declared)
+
+ @classmethod
+ def _local(cls, root='.'):
+ from pep517 import build, meta
+
+ system = build.compat_system(root)
+ builder = functools.partial(
+ meta.build,
+ source_dir=root,
+ system=system,
+ )
+ return PathDistribution(zipp.Path(meta.build_as_zip(builder)))
+
+ @property
+ def metadata(self):
+ """Return the parsed metadata for this Distribution.
+
+ The returned object will have keys that name the various bits of
+ metadata. See PEP 566 for details.
+ """
+ text = (
+ self.read_text('METADATA')
+ or self.read_text('PKG-INFO')
+ # This last clause is here to support old egg-info files. Its
+ # effect is to just end up using the PathDistribution's self._path
+ # (which points to the egg-info file) attribute unchanged.
+ or self.read_text('')
+ )
+ return email.message_from_string(text)
+
+ @property
+ def version(self):
+ """Return the 'Version' metadata for the distribution package."""
+ return self.metadata['Version']
+
+ @property
+ def entry_points(self):
+ return EntryPoint._from_text(self.read_text('entry_points.txt'))
+
+ @property
+ def files(self):
+ """Files in this distribution.
+
+ :return: List of PackagePath for this distribution or None
+
+ Result is `None` if the metadata file that enumerates files
+ (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
+ missing.
+ Result may be empty if the metadata exists but is empty.
+ """
+ file_lines = self._read_files_distinfo() or self._read_files_egginfo()
+
+ def make_file(name, hash=None, size_str=None):
+ result = PackagePath(name)
+ result.hash = FileHash(hash) if hash else None
+ result.size = int(size_str) if size_str else None
+ result.dist = self
+ return result
+
+ return file_lines and list(starmap(make_file, csv.reader(file_lines)))
+
+ def _read_files_distinfo(self):
+ """
+ Read the lines of RECORD
+ """
+ text = self.read_text('RECORD')
+ return text and text.splitlines()
+
+ def _read_files_egginfo(self):
+ """
+ SOURCES.txt might contain literal commas, so wrap each line
+ in quotes.
+ """
+ text = self.read_text('SOURCES.txt')
+ return text and map('"{}"'.format, text.splitlines())
+
+ @property
+ def requires(self):
+ """Generated requirements specified for this Distribution"""
+ reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
+ return reqs and list(reqs)
+
+ def _read_dist_info_reqs(self):
+ return self.metadata.get_all('Requires-Dist')
+
+ def _read_egg_info_reqs(self):
+ source = self.read_text('requires.txt')
+ return source and self._deps_from_requires_text(source)
+
+ @classmethod
+ def _deps_from_requires_text(cls, source):
+ section_pairs = cls._read_sections(source.splitlines())
+ sections = {
+ section: list(map(operator.itemgetter('line'), results))
+ for section, results in itertools.groupby(
+ section_pairs, operator.itemgetter('section')
+ )
+ }
+ return cls._convert_egg_info_reqs_to_simple_reqs(sections)
+
+ @staticmethod
+ def _read_sections(lines):
+ section = None
+ for line in filter(None, lines):
+ section_match = re.match(r'\[(.*)\]$', line)
+ if section_match:
+ section = section_match.group(1)
+ continue
+ yield locals()
+
+ @staticmethod
+ def _convert_egg_info_reqs_to_simple_reqs(sections):
+ """
+ Historically, setuptools would solicit and store 'extra'
+ requirements, including those with environment markers,
+ in separate sections. More modern tools expect each
+ dependency to be defined separately, with any relevant
+ extras and environment markers attached directly to that
+ requirement. This method converts the former to the
+ latter. See _test_deps_from_requires_text for an example.
+ """
+
+ def make_condition(name):
+ return name and 'extra == "{name}"'.format(name=name)
+
+ def parse_condition(section):
+ section = section or ''
+ extra, sep, markers = section.partition(':')
+ if extra and markers:
+ markers = '({markers})'.format(markers=markers)
+ conditions = list(filter(None, [markers, make_condition(extra)]))
+ return '; ' + ' and '.join(conditions) if conditions else ''
+
+ for section, deps in sections.items():
+ for dep in deps:
+ yield dep + parse_condition(section)
+
+
+class DistributionFinder(MetaPathFinder):
+ """
+ A MetaPathFinder capable of discovering installed distributions.
+ """
+
+ class Context:
+ """
+ Keyword arguments presented by the caller to
+ ``distributions()`` or ``Distribution.discover()``
+ to narrow the scope of a search for distributions
+ in all DistributionFinders.
+
+ Each DistributionFinder may expect any parameters
+ and should attempt to honor the canonical
+ parameters defined below when appropriate.
+ """
+
+ name = None
+ """
+ Specific name for which a distribution finder should match.
+ A name of ``None`` matches all distributions.
+ """
+
+ def __init__(self, **kwargs):
+ vars(self).update(kwargs)
+
+ @property
+ def path(self):
+ """
+ The path that a distribution finder should search.
+
+ Typically refers to Python package paths and defaults
+ to ``sys.path``.
+ """
+ return vars(self).get('path', sys.path)
+
+ @abc.abstractmethod
+ def find_distributions(self, context=Context()):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching the ``context``,
+ a DistributionFinder.Context instance.
+ """
+
+
+class FastPath:
+ """
+ Micro-optimized class for searching a path for
+ children.
+ """
+
+ def __init__(self, root):
+ self.root = str(root)
+ self.base = os.path.basename(self.root).lower()
+
+ def joinpath(self, child):
+ return pathlib.Path(self.root, child)
+
+ def children(self):
+ with suppress(Exception):
+ return os.listdir(self.root or '')
+ with suppress(Exception):
+ return self.zip_children()
+ return []
+
+ def zip_children(self):
+ zip_path = zipp.Path(self.root)
+ names = zip_path.root.namelist()
+ self.joinpath = zip_path.joinpath
+
+ return dict.fromkeys(child.split(posixpath.sep, 1)[0] for child in names)
+
+ def search(self, name):
+ return (
+ self.joinpath(child)
+ for child in self.children()
+ if name.matches(child, self.base)
+ )
+
+
+class Prepared:
+ """
+ A prepared search for metadata on a possibly-named package.
+ """
+
+ normalized = None
+ suffixes = '.dist-info', '.egg-info'
+ exact_matches = [''][:0]
+
+ def __init__(self, name):
+ self.name = name
+ if name is None:
+ return
+ self.normalized = self.normalize(name)
+ self.exact_matches = [self.normalized + suffix for suffix in self.suffixes]
+
+ @staticmethod
+ def normalize(name):
+ """
+ PEP 503 normalization plus dashes as underscores.
+ """
+ return re.sub(r"[-_.]+", "-", name).lower().replace('-', '_')
+
+ @staticmethod
+ def legacy_normalize(name):
+ """
+ Normalize the package name as found in the convention in
+ older packaging tools versions and specs.
+ """
+ return name.lower().replace('-', '_')
+
+ def matches(self, cand, base):
+ low = cand.lower()
+ pre, ext = os.path.splitext(low)
+ name, sep, rest = pre.partition('-')
+ return (
+ low in self.exact_matches
+ or ext in self.suffixes
+ and (not self.normalized or name.replace('.', '_') == self.normalized)
+ # legacy case:
+ or self.is_egg(base)
+ and low == 'egg-info'
+ )
+
+ def is_egg(self, base):
+ normalized = self.legacy_normalize(self.name or '')
+ prefix = normalized + '-' if normalized else ''
+ versionless_egg_name = normalized + '.egg' if self.name else ''
+ return (
+ base == versionless_egg_name
+ or base.startswith(prefix)
+ and base.endswith('.egg')
+ )
+
+
+@install
+class MetadataPathFinder(NullFinder, DistributionFinder):
+ """A degenerate finder for distribution packages on the file system.
+
+ This finder supplies only a find_distributions() method for versions
+ of Python that do not have a PathFinder find_distributions().
+ """
+
+ def find_distributions(self, context=DistributionFinder.Context()):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching ``context.name``
+ (or all names if ``None`` indicated) along the paths in the list
+ of directories ``context.path``.
+ """
+ found = self._search_paths(context.name, context.path)
+ return map(PathDistribution, found)
+
+ @classmethod
+ def _search_paths(cls, name, paths):
+ """Find metadata directories in paths heuristically."""
+ return itertools.chain.from_iterable(
+ path.search(Prepared(name)) for path in map(FastPath, paths)
+ )
+
+
+class PathDistribution(Distribution):
+ def __init__(self, path):
+ """Construct a distribution from a path to the metadata directory.
+
+ :param path: A pathlib.Path or similar object supporting
+ .joinpath(), __div__, .parent, and .read_text().
+ """
+ self._path = path
+
+ def read_text(self, filename):
+ with suppress(
+ FileNotFoundError,
+ IsADirectoryError,
+ KeyError,
+ NotADirectoryError,
+ PermissionError,
+ ):
+ return self._path.joinpath(filename).read_text(encoding='utf-8')
+
+ read_text.__doc__ = Distribution.read_text.__doc__
+
+ def locate_file(self, path):
+ return self._path.parent / path
+
+
+def distribution(distribution_name):
+ """Get the ``Distribution`` instance for the named package.
+
+ :param distribution_name: The name of the distribution package as a string.
+ :return: A ``Distribution`` instance (or subclass thereof).
+ """
+ return Distribution.from_name(distribution_name)
+
+
+def distributions(**kwargs):
+ """Get all ``Distribution`` instances in the current environment.
+
+ :return: An iterable of ``Distribution`` instances.
+ """
+ return Distribution.discover(**kwargs)
+
+
+def metadata(distribution_name):
+ """Get the metadata for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: An email.Message containing the parsed metadata.
+ """
+ return Distribution.from_name(distribution_name).metadata
+
+
+def version(distribution_name):
+ """Get the version string for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: The version string for the package as defined in the package's
+ "Version" metadata key.
+ """
+ return distribution(distribution_name).version
+
+
+def entry_points():
+ """Return EntryPoint objects for all installed packages.
+
+ :return: EntryPoint objects for all installed packages.
+ """
+ eps = itertools.chain.from_iterable(dist.entry_points for dist in distributions())
+ by_group = operator.attrgetter('group')
+ ordered = sorted(eps, key=by_group)
+ grouped = itertools.groupby(ordered, by_group)
+ return {group: tuple(eps) for group, eps in grouped}
+
+
+def files(distribution_name):
+ """Return a list of files for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: List of files composing the distribution.
+ """
+ return distribution(distribution_name).files
+
+
+def requires(distribution_name):
+ """
+ Return a list of requirements for the named package.
+
+ :return: An iterator of requirements, suitable for
+ packaging.requirement.Requirement.
+ """
+ return distribution(distribution_name).requires
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/_compat.py b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/_compat.py
new file mode 100644
index 0000000000..c1362d5360
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/_compat.py
@@ -0,0 +1,75 @@
+import sys
+
+
+__all__ = ['install', 'NullFinder', 'PyPy_repr']
+
+
+def install(cls):
+ """
+ Class decorator for installation on sys.meta_path.
+
+ Adds the backport DistributionFinder to sys.meta_path and
+ attempts to disable the finder functionality of the stdlib
+ DistributionFinder.
+ """
+ sys.meta_path.append(cls())
+ disable_stdlib_finder()
+ return cls
+
+
+def disable_stdlib_finder():
+ """
+ Give the backport primacy for discovering path-based distributions
+ by monkey-patching the stdlib O_O.
+
+ See #91 for more background for rationale on this sketchy
+ behavior.
+ """
+
+ def matches(finder):
+ return getattr(
+ finder, '__module__', None
+ ) == '_frozen_importlib_external' and hasattr(finder, 'find_distributions')
+
+ for finder in filter(matches, sys.meta_path): # pragma: nocover
+ del finder.find_distributions
+
+
+class NullFinder:
+ """
+ A "Finder" (aka "MetaClassFinder") that never finds any modules,
+ but may find distributions.
+ """
+
+ @staticmethod
+ def find_spec(*args, **kwargs):
+ return None
+
+ # In Python 2, the import system requires finders
+ # to have a find_module() method, but this usage
+ # is deprecated in Python 3 in favor of find_spec().
+ # For the purposes of this finder (i.e. being present
+ # on sys.meta_path but having no other import
+ # system functionality), the two methods are identical.
+ find_module = find_spec
+
+
+class PyPy_repr:
+ """
+ Override repr for EntryPoint objects on PyPy to avoid __iter__ access.
+ Ref #97, #102.
+ """
+
+ affected = hasattr(sys, 'pypy_version_info')
+
+ def __compat_repr__(self): # pragma: nocover
+ def make_param(name):
+ value = getattr(self, name)
+ return '{name}={value!r}'.format(**locals())
+
+ params = ', '.join(map(make_param, self._fields))
+ return 'EntryPoint({params})'.format(**locals())
+
+ if affected: # pragma: nocover
+ __repr__ = __compat_repr__
+ del affected
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/LICENSE
new file mode 100644
index 0000000000..7e4791068d
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2017-2018 Brett Cannon, Barry Warsaw
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/METADATA
new file mode 100644
index 0000000000..8eb23366fa
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/METADATA
@@ -0,0 +1,49 @@
+Metadata-Version: 2.1
+Name: importlib-resources
+Version: 1.0.2
+Summary: Read resources from Python packages
+Home-page: http://importlib-resources.readthedocs.io/
+Author: Barry Warsaw
+Author-email: barry@python.org
+License: Apache Software License
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Requires-Python: >=2.7,!=3.0,!=3.1,!=3.2,!=3.3
+Requires-Dist: pathlib2; python_version < "3"
+Requires-Dist: typing; python_version < "3.5"
+
+=========================
+ ``importlib_resources``
+=========================
+
+``importlib_resources`` is a backport of Python 3.7's standard library
+`importlib.resources
+<https://docs.python.org/3.7/library/importlib.html#module-importlib.resources>`_
+module for Python 2.7, and 3.4 through 3.6. Users of Python 3.7 and beyond
+should use the standard library module, since for these versions,
+``importlib_resources`` just delegates to that module.
+
+The key goal of this module is to replace parts of `pkg_resources
+<https://setuptools.readthedocs.io/en/latest/pkg_resources.html>`_ with a
+solution in Python's stdlib that relies on well-defined APIs. This makes
+reading resources included in packages easier, with more stable and consistent
+semantics.
+
+Note that ``pip 10`` is required if you are going to ``pip install
+importlib_resources``.
+
+
+Project details
+===============
+
+ * Project home: https://gitlab.com/python-devs/importlib_resources
+ * Report bugs at: https://gitlab.com/python-devs/importlib_resources/issues
+ * Code hosting: https://gitlab.com/python-devs/importlib_resources.git
+ * Documentation: http://importlib_resources.readthedocs.io/
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/RECORD
new file mode 100644
index 0000000000..b728f0e05a
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/RECORD
@@ -0,0 +1,39 @@
+importlib_resources/__init__.py,sha256=rzQAetwEwMAwz3RonnegirJDoyexftQNVgVAtPqs91k,1064
+importlib_resources/_compat.py,sha256=ldJ5ebXghEZdDKuFIbigsNuSp4VrBi8a6XAG2tXtebc,581
+importlib_resources/_py2.py,sha256=EypZLeKb03aScgvtpzJxcr-E6CjL8DJLWTls8ql3QVY,11601
+importlib_resources/_py3.py,sha256=hUfpyjcsu13D57VyJKSRzYnA4RTp7FaQYwPous33yEk,12882
+importlib_resources/abc.py,sha256=U9Q4qZImO0rpCF9aoV1a5tS1IrXDhrAoT5PUFReSZs0,1946
+importlib_resources/version.txt,sha256=n9KGQtOsoZHlx_wjg8_W-rsqrIdD8Cnau4mJrFhOMbw,6
+importlib_resources/docs/changelog.rst,sha256=uWSJrcIlTNTj2tGRpGLzeaz9eLcM3pu_6yVqnQH_F94,2020
+importlib_resources/docs/conf.py,sha256=x7IPypqIitt3ztWBP4KKAxDHMfDI6eEVSD1K-fs000w,5557
+importlib_resources/docs/index.rst,sha256=ZgWQVxUPNyYZYUS5pRZXboxfc1-S0z8NBhcCQz0_YTQ,2138
+importlib_resources/docs/migration.rst,sha256=RdJE8S_bh50d6-63UrjrKuojcfYxv2gx3qcyHSy42DA,6329
+importlib_resources/docs/using.rst,sha256=epgk0GWhEwKGWCzL3DvU3GnGalp1jwxiU-XZL5eaC5w,8586
+importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/test_open.py,sha256=yDXmTGXQspByj6WU0prnoVwab1yWWEA3fwz_XIx7TQU,2288
+importlib_resources/tests/test_path.py,sha256=yVYMwuECJiivtubCGnYA0-6e-LSpbnTKjcBHuKk-oMc,1178
+importlib_resources/tests/test_read.py,sha256=DpA7tzxSQlU0_YQuWibB3E5PDL9fQUdzeKoEUGnAx78,2046
+importlib_resources/tests/test_resource.py,sha256=X77DzU2BRoM6d59iEh74zDHHw3pKOBGLCg3lP3dH4BI,6467
+importlib_resources/tests/util.py,sha256=f0RZU-RkEkybJjXRd7C5HcWMsoLFRWJL4FIUF1CJ2wo,6980
+importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
+importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
+importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13
+importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13
+importlib_resources/tests/data03/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data03/namespace/resource1.txt,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=gAC1vleFnNtdAHuNyYQ30gvIZ5itNRfZtaF0hxGHAi4,876
+importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=kL-RiB7ndv8FHBCJde6oj34_C90gtrSCYgYk98osm6M,698
+importlib_resources-1.0.2.dist-info/LICENSE,sha256=xS4YxCplVSZiTNBwkotq9YkkHJ8nlkctJpFZvlLA9NM,568
+importlib_resources-1.0.2.dist-info/METADATA,sha256=WiWlAvBr3XA3pXUg2NJ08qHO-NM93m6v1aXlega5BMk,1881
+importlib_resources-1.0.2.dist-info/WHEEL,sha256=CihQvCnsGZQBGAHLEUMf0IdA4fRduS_NBUTMgCTtvPM,110
+importlib_resources-1.0.2.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20
+importlib_resources-1.0.2.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/WHEEL
new file mode 100644
index 0000000000..dea0e20ccd
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.32.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/top_level.txt
new file mode 100644
index 0000000000..58ad1bd333
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+importlib_resources
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/__init__.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/__init__.py
new file mode 100644
index 0000000000..fab437a4ad
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/__init__.py
@@ -0,0 +1,36 @@
+"""Read resources contained within a package."""
+
+import sys
+
+
+__all__ = [
+ 'contents',
+ 'is_resource',
+ 'open_binary',
+ 'open_text',
+ 'path',
+ 'read_binary',
+ 'read_text',
+ ]
+
+
+# Use the Python 3.7 stdlib implementation if available.
+if sys.version_info >= (3, 7):
+ from importlib.resources import (
+ Package, Resource, contents, is_resource, open_binary, open_text, path,
+ read_binary, read_text)
+ from importlib.abc import ResourceReader
+ __all__.extend(['Package', 'Resource', 'ResourceReader'])
+elif sys.version_info >= (3,):
+ from importlib_resources._py3 import (
+ Package, Resource, contents, is_resource, open_binary, open_text, path,
+ read_binary, read_text)
+ from importlib_resources.abc import ResourceReader
+ __all__.extend(['Package', 'Resource', 'ResourceReader'])
+else:
+ from importlib_resources._py2 import (
+ contents, is_resource, open_binary, open_text, path, read_binary,
+ read_text)
+
+
+__version__ = read_text('importlib_resources', 'version.txt').strip()
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_compat.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_compat.py
new file mode 100644
index 0000000000..28d61276e0
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_compat.py
@@ -0,0 +1,23 @@
+from __future__ import absolute_import
+
+# flake8: noqa
+
+try:
+ from pathlib import Path, PurePath
+except ImportError:
+ from pathlib2 import Path, PurePath # type: ignore
+
+
+try:
+ from abc import ABC # type: ignore
+except ImportError:
+ from abc import ABCMeta
+
+ class ABC(object): # type: ignore
+ __metaclass__ = ABCMeta
+
+
+try:
+ FileNotFoundError = FileNotFoundError # type: ignore
+except NameError:
+ FileNotFoundError = OSError
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py2.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py2.py
new file mode 100644
index 0000000000..376f0e3813
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py2.py
@@ -0,0 +1,270 @@
+import os
+import errno
+import tempfile
+
+from ._compat import FileNotFoundError
+from contextlib import contextmanager
+from importlib import import_module
+from io import BytesIO, TextIOWrapper, open as io_open
+from pathlib2 import Path
+from zipfile import ZipFile
+
+
+def _get_package(package):
+ """Normalize a path by ensuring it is a string.
+
+ If the resulting string contains path separators, an exception is raised.
+ """
+ if isinstance(package, basestring): # noqa: F821
+ module = import_module(package)
+ else:
+ module = package
+ if not hasattr(module, '__path__'):
+ raise TypeError("{!r} is not a package".format(package))
+ return module
+
+
+def _normalize_path(path):
+ """Normalize a path by ensuring it is a string.
+
+ If the resulting string contains path separators, an exception is raised.
+ """
+ str_path = str(path)
+ parent, file_name = os.path.split(str_path)
+ if parent:
+ raise ValueError("{!r} must be only a file name".format(path))
+ else:
+ return file_name
+
+
+def open_binary(package, resource):
+ """Return a file-like object opened for binary reading of the resource."""
+ resource = _normalize_path(resource)
+ package = _get_package(package)
+ # Using pathlib doesn't work well here due to the lack of 'strict' argument
+ # for pathlib.Path.resolve() prior to Python 3.6.
+ package_path = os.path.dirname(package.__file__)
+ relative_path = os.path.join(package_path, resource)
+ full_path = os.path.abspath(relative_path)
+ try:
+ return io_open(full_path, 'rb')
+ except IOError:
+ # This might be a package in a zip file. zipimport provides a loader
+ # with a functioning get_data() method, however we have to strip the
+ # archive (i.e. the .zip file's name) off the front of the path. This
+ # is because the zipimport loader in Python 2 doesn't actually follow
+ # PEP 302. It should allow the full path, but actually requires that
+ # the path be relative to the zip file.
+ try:
+ loader = package.__loader__
+ full_path = relative_path[len(loader.archive)+1:]
+ data = loader.get_data(full_path)
+ except (IOError, AttributeError):
+ package_name = package.__name__
+ message = '{!r} resource not found in {!r}'.format(
+ resource, package_name)
+ raise FileNotFoundError(message)
+ else:
+ return BytesIO(data)
+
+
+def open_text(package, resource, encoding='utf-8', errors='strict'):
+ """Return a file-like object opened for text reading of the resource."""
+ resource = _normalize_path(resource)
+ package = _get_package(package)
+ # Using pathlib doesn't work well here due to the lack of 'strict' argument
+ # for pathlib.Path.resolve() prior to Python 3.6.
+ package_path = os.path.dirname(package.__file__)
+ relative_path = os.path.join(package_path, resource)
+ full_path = os.path.abspath(relative_path)
+ try:
+ return io_open(full_path, mode='r', encoding=encoding, errors=errors)
+ except IOError:
+ # This might be a package in a zip file. zipimport provides a loader
+ # with a functioning get_data() method, however we have to strip the
+ # archive (i.e. the .zip file's name) off the front of the path. This
+ # is because the zipimport loader in Python 2 doesn't actually follow
+ # PEP 302. It should allow the full path, but actually requires that
+ # the path be relative to the zip file.
+ try:
+ loader = package.__loader__
+ full_path = relative_path[len(loader.archive)+1:]
+ data = loader.get_data(full_path)
+ except (IOError, AttributeError):
+ package_name = package.__name__
+ message = '{!r} resource not found in {!r}'.format(
+ resource, package_name)
+ raise FileNotFoundError(message)
+ else:
+ return TextIOWrapper(BytesIO(data), encoding, errors)
+
+
+def read_binary(package, resource):
+ """Return the binary contents of the resource."""
+ resource = _normalize_path(resource)
+ package = _get_package(package)
+ with open_binary(package, resource) as fp:
+ return fp.read()
+
+
+def read_text(package, resource, encoding='utf-8', errors='strict'):
+ """Return the decoded string of the resource.
+
+ The decoding-related arguments have the same semantics as those of
+ bytes.decode().
+ """
+ resource = _normalize_path(resource)
+ package = _get_package(package)
+ with open_text(package, resource, encoding, errors) as fp:
+ return fp.read()
+
+
+@contextmanager
+def path(package, resource):
+ """A context manager providing a file path object to the resource.
+
+ If the resource does not already exist on its own on the file system,
+ a temporary file will be created. If the file was created, the file
+ will be deleted upon exiting the context manager (no exception is
+ raised if the file was deleted prior to the context manager
+ exiting).
+ """
+ resource = _normalize_path(resource)
+ package = _get_package(package)
+ package_directory = Path(package.__file__).parent
+ file_path = package_directory / resource
+ # If the file actually exists on the file system, just return it.
+ # Otherwise, it's probably in a zip file, so we need to create a temporary
+ # file and copy the contents into that file, hence the contextmanager to
+ # clean up the temp file resource.
+ if file_path.exists():
+ yield file_path
+ else:
+ with open_binary(package, resource) as fp:
+ data = fp.read()
+ # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
+ # blocks due to the need to close the temporary file to work on Windows
+ # properly.
+ fd, raw_path = tempfile.mkstemp()
+ try:
+ os.write(fd, data)
+ os.close(fd)
+ yield Path(raw_path)
+ finally:
+ try:
+ os.remove(raw_path)
+ except FileNotFoundError:
+ pass
+
+
+def is_resource(package, name):
+ """True if name is a resource inside package.
+
+ Directories are *not* resources.
+ """
+ package = _get_package(package)
+ _normalize_path(name)
+ try:
+ package_contents = set(contents(package))
+ except OSError as error:
+ if error.errno not in (errno.ENOENT, errno.ENOTDIR):
+ # We won't hit this in the Python 2 tests, so it'll appear
+ # uncovered. We could mock os.listdir() to return a non-ENOENT or
+ # ENOTDIR, but then we'd have to depend on another external
+ # library since Python 2 doesn't have unittest.mock. It's not
+ # worth it.
+ raise # pragma: nocover
+ return False
+ if name not in package_contents:
+ return False
+ # Just because the given file_name lives as an entry in the package's
+ # contents doesn't necessarily mean it's a resource. Directories are not
+ # resources, so let's try to find out if it's a directory or not.
+ path = Path(package.__file__).parent / name
+ if path.is_file():
+ return True
+ if path.is_dir():
+ return False
+ # If it's not a file and it's not a directory, what is it? Well, this
+ # means the file doesn't exist on the file system, so it probably lives
+ # inside a zip file. We have to crack open the zip, look at its table of
+ # contents, and make sure that this entry doesn't have sub-entries.
+ archive_path = package.__loader__.archive # type: ignore
+ package_directory = Path(package.__file__).parent
+ with ZipFile(archive_path) as zf:
+ toc = zf.namelist()
+ relpath = package_directory.relative_to(archive_path)
+ candidate_path = relpath / name
+ for entry in toc: # pragma: nobranch
+ try:
+ relative_to_candidate = Path(entry).relative_to(candidate_path)
+ except ValueError:
+ # The two paths aren't relative to each other so we can ignore it.
+ continue
+ # Since directories aren't explicitly listed in the zip file, we must
+ # infer their 'directory-ness' by looking at the number of path
+ # components in the path relative to the package resource we're
+ # looking up. If there are zero additional parts, it's a file, i.e. a
+ # resource. If there are more than zero it's a directory, i.e. not a
+ # resource. It has to be one of these two cases.
+ return len(relative_to_candidate.parts) == 0
+ # I think it's impossible to get here. It would mean that we are looking
+ # for a resource in a zip file, there's an entry matching it in the return
+ # value of contents(), but we never actually found it in the zip's table of
+ # contents.
+ raise AssertionError('Impossible situation')
+
+
+def contents(package):
+ """Return an iterable of entries in `package`.
+
+ Note that not all entries are resources. Specifically, directories are
+ not considered resources. Use `is_resource()` on each entry returned here
+ to check if it is a resource or not.
+ """
+ package = _get_package(package)
+ package_directory = Path(package.__file__).parent
+ try:
+ return os.listdir(str(package_directory))
+ except OSError as error:
+ if error.errno not in (errno.ENOENT, errno.ENOTDIR):
+ # We won't hit this in the Python 2 tests, so it'll appear
+ # uncovered. We could mock os.listdir() to return a non-ENOENT or
+ # ENOTDIR, but then we'd have to depend on another external
+ # library since Python 2 doesn't have unittest.mock. It's not
+ # worth it.
+ raise # pragma: nocover
+ # The package is probably in a zip file.
+ archive_path = getattr(package.__loader__, 'archive', None)
+ if archive_path is None:
+ raise
+ relpath = package_directory.relative_to(archive_path)
+ with ZipFile(archive_path) as zf:
+ toc = zf.namelist()
+ subdirs_seen = set() # type: Set
+ subdirs_returned = []
+ for filename in toc:
+ path = Path(filename)
+ # Strip off any path component parts that are in common with the
+ # package directory, relative to the zip archive's file system
+ # path. This gives us all the parts that live under the named
+ # package inside the zip file. If the length of these subparts is
+ # exactly 1, then it is situated inside the package. The resulting
+ # length will be 0 if it's above the package, and it will be
+ # greater than 1 if it lives in a subdirectory of the package
+ # directory.
+ #
+ # However, since directories themselves don't appear in the zip
+ # archive as a separate entry, we need to return the first path
+ # component for any case that has > 1 subparts -- but only once!
+ if path.parts[:len(relpath.parts)] != relpath.parts:
+ continue
+ subparts = path.parts[len(relpath.parts):]
+ if len(subparts) == 1:
+ subdirs_returned.append(subparts[0])
+ elif len(subparts) > 1: # pragma: nobranch
+ subdir = subparts[0]
+ if subdir not in subdirs_seen:
+ subdirs_seen.add(subdir)
+ subdirs_returned.append(subdir)
+ return subdirs_returned
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py3.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py3.py
new file mode 100644
index 0000000000..00781bd918
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py3.py
@@ -0,0 +1,312 @@
+import os
+import sys
+import tempfile
+
+from . import abc as resources_abc
+from contextlib import contextmanager, suppress
+from importlib import import_module
+from importlib.abc import ResourceLoader
+from io import BytesIO, TextIOWrapper
+from pathlib import Path
+from types import ModuleType
+from typing import Iterable, Iterator, Optional, Set, Union # noqa: F401
+from typing import cast
+from typing.io import BinaryIO, TextIO
+from zipfile import ZipFile
+
+
+Package = Union[ModuleType, str]
+if sys.version_info >= (3, 6):
+ Resource = Union[str, os.PathLike] # pragma: <=35
+else:
+ Resource = str # pragma: >=36
+
+
+def _get_package(package) -> ModuleType:
+ """Take a package name or module object and return the module.
+
+ If a name, the module is imported. If the passed or imported module
+ object is not a package, raise an exception.
+ """
+ if hasattr(package, '__spec__'):
+ if package.__spec__.submodule_search_locations is None:
+ raise TypeError('{!r} is not a package'.format(
+ package.__spec__.name))
+ else:
+ return package
+ else:
+ module = import_module(package)
+ if module.__spec__.submodule_search_locations is None:
+ raise TypeError('{!r} is not a package'.format(package))
+ else:
+ return module
+
+
+def _normalize_path(path) -> str:
+ """Normalize a path by ensuring it is a string.
+
+ If the resulting string contains path separators, an exception is raised.
+ """
+ str_path = str(path)
+ parent, file_name = os.path.split(str_path)
+ if parent:
+ raise ValueError('{!r} must be only a file name'.format(path))
+ else:
+ return file_name
+
+
+def _get_resource_reader(
+ package: ModuleType) -> Optional[resources_abc.ResourceReader]:
+ # Return the package's loader if it's a ResourceReader. We can't use
+ # a issubclass() check here because apparently abc.'s __subclasscheck__()
+ # hook wants to create a weak reference to the object, but
+ # zipimport.zipimporter does not support weak references, resulting in a
+ # TypeError. That seems terrible.
+ spec = package.__spec__
+ reader = getattr(spec.loader, 'get_resource_reader', None)
+ if reader is None:
+ return None
+ return cast(resources_abc.ResourceReader, reader(spec.name))
+
+
+def open_binary(package: Package, resource: Resource) -> BinaryIO:
+ """Return a file-like object opened for binary reading of the resource."""
+ resource = _normalize_path(resource)
+ package = _get_package(package)
+ reader = _get_resource_reader(package)
+ if reader is not None:
+ return reader.open_resource(resource)
+ # Using pathlib doesn't work well here due to the lack of 'strict'
+ # argument for pathlib.Path.resolve() prior to Python 3.6.
+ absolute_package_path = os.path.abspath(package.__spec__.origin)
+ package_path = os.path.dirname(absolute_package_path)
+ full_path = os.path.join(package_path, resource)
+ try:
+ return open(full_path, mode='rb')
+ except OSError:
+ # Just assume the loader is a resource loader; all the relevant
+ # importlib.machinery loaders are and an AttributeError for
+ # get_data() will make it clear what is needed from the loader.
+ loader = cast(ResourceLoader, package.__spec__.loader)
+ data = None
+ if hasattr(package.__spec__.loader, 'get_data'):
+ with suppress(OSError):
+ data = loader.get_data(full_path)
+ if data is None:
+ package_name = package.__spec__.name
+ message = '{!r} resource not found in {!r}'.format(
+ resource, package_name)
+ raise FileNotFoundError(message)
+ else:
+ return BytesIO(data)
+
+
+def open_text(package: Package,
+ resource: Resource,
+ encoding: str = 'utf-8',
+ errors: str = 'strict') -> TextIO:
+ """Return a file-like object opened for text reading of the resource."""
+ resource = _normalize_path(resource)
+ package = _get_package(package)
+ reader = _get_resource_reader(package)
+ if reader is not None:
+ return TextIOWrapper(reader.open_resource(resource), encoding, errors)
+ # Using pathlib doesn't work well here due to the lack of 'strict'
+ # argument for pathlib.Path.resolve() prior to Python 3.6.
+ absolute_package_path = os.path.abspath(package.__spec__.origin)
+ package_path = os.path.dirname(absolute_package_path)
+ full_path = os.path.join(package_path, resource)
+ try:
+ return open(full_path, mode='r', encoding=encoding, errors=errors)
+ except OSError:
+ # Just assume the loader is a resource loader; all the relevant
+ # importlib.machinery loaders are and an AttributeError for
+ # get_data() will make it clear what is needed from the loader.
+ loader = cast(ResourceLoader, package.__spec__.loader)
+ data = None
+ if hasattr(package.__spec__.loader, 'get_data'):
+ with suppress(OSError):
+ data = loader.get_data(full_path)
+ if data is None:
+ package_name = package.__spec__.name
+ message = '{!r} resource not found in {!r}'.format(
+ resource, package_name)
+ raise FileNotFoundError(message)
+ else:
+ return TextIOWrapper(BytesIO(data), encoding, errors)
+
+
+def read_binary(package: Package, resource: Resource) -> bytes:
+ """Return the binary contents of the resource."""
+ resource = _normalize_path(resource)
+ package = _get_package(package)
+ with open_binary(package, resource) as fp:
+ return fp.read()
+
+
+def read_text(package: Package,
+ resource: Resource,
+ encoding: str = 'utf-8',
+ errors: str = 'strict') -> str:
+ """Return the decoded string of the resource.
+
+ The decoding-related arguments have the same semantics as those of
+ bytes.decode().
+ """
+ resource = _normalize_path(resource)
+ package = _get_package(package)
+ with open_text(package, resource, encoding, errors) as fp:
+ return fp.read()
+
+
+@contextmanager
+def path(package: Package, resource: Resource) -> Iterator[Path]:
+ """A context manager providing a file path object to the resource.
+
+ If the resource does not already exist on its own on the file system,
+ a temporary file will be created. If the file was created, the file
+ will be deleted upon exiting the context manager (no exception is
+ raised if the file was deleted prior to the context manager
+ exiting).
+ """
+ resource = _normalize_path(resource)
+ package = _get_package(package)
+ reader = _get_resource_reader(package)
+ if reader is not None:
+ try:
+ yield Path(reader.resource_path(resource))
+ return
+ except FileNotFoundError:
+ pass
+ # Fall-through for both the lack of resource_path() *and* if
+ # resource_path() raises FileNotFoundError.
+ package_directory = Path(package.__spec__.origin).parent
+ file_path = package_directory / resource
+ if file_path.exists():
+ yield file_path
+ else:
+ with open_binary(package, resource) as fp:
+ data = fp.read()
+ # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
+ # blocks due to the need to close the temporary file to work on
+ # Windows properly.
+ fd, raw_path = tempfile.mkstemp()
+ try:
+ os.write(fd, data)
+ os.close(fd)
+ yield Path(raw_path)
+ finally:
+ try:
+ os.remove(raw_path)
+ except FileNotFoundError:
+ pass
+
+
+def is_resource(package: Package, name: str) -> bool:
+ """True if `name` is a resource inside `package`.
+
+ Directories are *not* resources.
+ """
+ package = _get_package(package)
+ _normalize_path(name)
+ reader = _get_resource_reader(package)
+ if reader is not None:
+ return reader.is_resource(name)
+ try:
+ package_contents = set(contents(package))
+ except (NotADirectoryError, FileNotFoundError):
+ return False
+ if name not in package_contents:
+ return False
+ # Just because the given file_name lives as an entry in the package's
+ # contents doesn't necessarily mean it's a resource. Directories are not
+ # resources, so let's try to find out if it's a directory or not.
+ path = Path(package.__spec__.origin).parent / name
+ if path.is_file():
+ return True
+ if path.is_dir():
+ return False
+ # If it's not a file and it's not a directory, what is it? Well, this
+ # means the file doesn't exist on the file system, so it probably lives
+ # inside a zip file. We have to crack open the zip, look at its table of
+ # contents, and make sure that this entry doesn't have sub-entries.
+ archive_path = package.__spec__.loader.archive # type: ignore
+ package_directory = Path(package.__spec__.origin).parent
+ with ZipFile(archive_path) as zf:
+ toc = zf.namelist()
+ relpath = package_directory.relative_to(archive_path)
+ candidate_path = relpath / name
+ for entry in toc: # pragma: nobranch
+ try:
+ relative_to_candidate = Path(entry).relative_to(candidate_path)
+ except ValueError:
+ # The two paths aren't relative to each other so we can ignore it.
+ continue
+ # Since directories aren't explicitly listed in the zip file, we must
+ # infer their 'directory-ness' by looking at the number of path
+ # components in the path relative to the package resource we're
+ # looking up. If there are zero additional parts, it's a file, i.e. a
+ # resource. If there are more than zero it's a directory, i.e. not a
+ # resource. It has to be one of these two cases.
+ return len(relative_to_candidate.parts) == 0
+ # I think it's impossible to get here. It would mean that we are looking
+ # for a resource in a zip file, there's an entry matching it in the return
+ # value of contents(), but we never actually found it in the zip's table of
+ # contents.
+ raise AssertionError('Impossible situation')
+
+
+def contents(package: Package) -> Iterable[str]:
+ """Return an iterable of entries in `package`.
+
+ Note that not all entries are resources. Specifically, directories are
+ not considered resources. Use `is_resource()` on each entry returned here
+ to check if it is a resource or not.
+ """
+ package = _get_package(package)
+ reader = _get_resource_reader(package)
+ if reader is not None:
+ return reader.contents()
+ # Is the package a namespace package? By definition, namespace packages
+ # cannot have resources.
+ if (package.__spec__.origin == 'namespace' and
+ not package.__spec__.has_location):
+ return ()
+ package_directory = Path(package.__spec__.origin).parent
+ try:
+ return os.listdir(str(package_directory))
+ except (NotADirectoryError, FileNotFoundError):
+ # The package is probably in a zip file.
+ archive_path = getattr(package.__spec__.loader, 'archive', None)
+ if archive_path is None:
+ raise
+ relpath = package_directory.relative_to(archive_path)
+ with ZipFile(archive_path) as zf:
+ toc = zf.namelist()
+ subdirs_seen = set() # type: Set
+ subdirs_returned = []
+ for filename in toc:
+ path = Path(filename)
+ # Strip off any path component parts that are in common with the
+ # package directory, relative to the zip archive's file system
+ # path. This gives us all the parts that live under the named
+ # package inside the zip file. If the length of these subparts is
+ # exactly 1, then it is situated inside the package. The resulting
+ # length will be 0 if it's above the package, and it will be
+ # greater than 1 if it lives in a subdirectory of the package
+ # directory.
+ #
+ # However, since directories themselves don't appear in the zip
+ # archive as a separate entry, we need to return the first path
+ # component for any case that has > 1 subparts -- but only once!
+ if path.parts[:len(relpath.parts)] != relpath.parts:
+ continue
+ subparts = path.parts[len(relpath.parts):]
+ if len(subparts) == 1:
+ subdirs_returned.append(subparts[0])
+ elif len(subparts) > 1: # pragma: nobranch
+ subdir = subparts[0]
+ if subdir not in subdirs_seen:
+ subdirs_seen.add(subdir)
+ subdirs_returned.append(subdir)
+ return subdirs_returned
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/abc.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/abc.py
new file mode 100644
index 0000000000..f49e8c7008
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/abc.py
@@ -0,0 +1,58 @@
+from __future__ import absolute_import
+
+from ._compat import ABC, FileNotFoundError
+from abc import abstractmethod
+
+# We use mypy's comment syntax here since this file must be compatible with
+# both Python 2 and 3.
+try:
+ from typing import BinaryIO, Iterable, Text # noqa: F401
+except ImportError:
+ # Python 2
+ pass
+
+
+class ResourceReader(ABC):
+ """Abstract base class for loaders to provide resource reading support."""
+
+ @abstractmethod
+ def open_resource(self, resource):
+ # type: (Text) -> BinaryIO
+ """Return an opened, file-like object for binary reading.
+
+ The 'resource' argument is expected to represent only a file name.
+ If the resource cannot be found, FileNotFoundError is raised.
+ """
+ # This deliberately raises FileNotFoundError instead of
+ # NotImplementedError so that if this method is accidentally called,
+ # it'll still do the right thing.
+ raise FileNotFoundError
+
+ @abstractmethod
+ def resource_path(self, resource):
+ # type: (Text) -> Text
+ """Return the file system path to the specified resource.
+
+ The 'resource' argument is expected to represent only a file name.
+ If the resource does not exist on the file system, raise
+ FileNotFoundError.
+ """
+ # This deliberately raises FileNotFoundError instead of
+ # NotImplementedError so that if this method is accidentally called,
+ # it'll still do the right thing.
+ raise FileNotFoundError
+
+ @abstractmethod
+ def is_resource(self, path):
+ # type: (Text) -> bool
+ """Return True if the named 'path' is a resource.
+
+ Files are resources, directories are not.
+ """
+ raise FileNotFoundError
+
+ @abstractmethod
+ def contents(self):
+ # type: () -> Iterable[str]
+ """Return an iterable of entries in `package`."""
+ raise FileNotFoundError
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/version.txt b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/version.txt
new file mode 100644
index 0000000000..6d7de6e6ab
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/version.txt
@@ -0,0 +1 @@
+1.0.2
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/LICENSE
new file mode 100644
index 0000000000..378b991a4d
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2017-2019 Brett Cannon, Barry Warsaw
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/METADATA
new file mode 100644
index 0000000000..78b550540d
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/METADATA
@@ -0,0 +1,41 @@
+Metadata-Version: 2.1
+Name: importlib-resources
+Version: 3.2.1
+Summary: Read resources from Python packages
+Home-page: https://github.com/python/importlib_resources
+Author: Barry Warsaw
+Author-email: barry@python.org
+License: UNKNOWN
+Project-URL: Documentation, https://importlib-resources.readthedocs.io/
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7
+Requires-Dist: pathlib2 ; python_version < "3"
+Requires-Dist: contextlib2 ; python_version < "3"
+Requires-Dist: singledispatch ; python_version < "3.4"
+Requires-Dist: typing ; python_version < "3.5"
+Requires-Dist: zipp (>=0.4) ; python_version < "3.8"
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: rst.linker ; extra == 'docs'
+Requires-Dist: jaraco.packaging ; extra == 'docs'
+
+``importlib_resources`` is a backport of Python standard library
+`importlib.resources
+<https://docs.python.org/3.9/library/importlib.html#module-importlib.resources>`_
+module for Python 2.7, and 3.4 through 3.8. Users of Python 3.9 and beyond
+should use the standard library module, since for these versions,
+``importlib_resources`` just delegates to that module.
+
+The key goal of this module is to replace parts of `pkg_resources
+<https://setuptools.readthedocs.io/en/latest/pkg_resources.html>`_ with a
+solution in Python's stdlib that relies on well-defined APIs. This makes
+reading resources included in packages easier, with more stable and consistent
+semantics.
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/RECORD
new file mode 100644
index 0000000000..dc92a03ce4
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/RECORD
@@ -0,0 +1,42 @@
+importlib_resources/__init__.py,sha256=hswDmLAH0IUlLWwmdHXPN2mgus2bk5IwDP-BFzg7VKo,977
+importlib_resources/_common.py,sha256=RN8cXOZtlygvlbyTewd-ni9wC1hwXpfbZnrl7kbx0nI,3121
+importlib_resources/_compat.py,sha256=NDCXOf1097aDJJx-_pQ0UIktzVx2G1aPIQTRFGx0FHI,3694
+importlib_resources/_py2.py,sha256=G9M5mv1ILl8NARGdNX0v9_F_Hb4HUKCS-FCNK63Ajvw,4146
+importlib_resources/_py3.py,sha256=5_FhUUHWFG1c3HcLrmDy65ZFB7EYxmHfOV3ybv4uTHM,5710
+importlib_resources/abc.py,sha256=6PX4Nprv39YnAht3NymhHIuSso0ocAKqDJZf-A6BgIw,3894
+importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/readers.py,sha256=fGuSBoMeeERUVrscN9Grhp0s-wKMy7nMVbCx92vIlGs,3674
+importlib_resources/trees.py,sha256=U3FlQSI5--eF4AdzOjBvW4xnjL21OFX8ivk82Quwv_M,117
+importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/_compat.py,sha256=geKWJe8UGXjC181JxmtxR3A_o5VrR4yxolS0xbnxMlw,801
+importlib_resources/tests/py27compat.py,sha256=9lDJkGV2swPVQJg6isOorRNFWuP6KeoWd4D2bFNmzLI,965
+importlib_resources/tests/test_files.py,sha256=91rf4C74_aJsKNSt-a-03slVpY9QSAuCbogFWnsaPjE,1017
+importlib_resources/tests/test_open.py,sha256=pIYWvuTDpQOJKX0SEuOKGotssZcEeY_xNPDqLGCvP_U,2565
+importlib_resources/tests/test_path.py,sha256=GnUOu-338o9offnC8xwbXjH9JIQJpD7JujgQkGB106Q,1548
+importlib_resources/tests/test_read.py,sha256=DpA7tzxSQlU0_YQuWibB3E5PDL9fQUdzeKoEUGnAx78,2046
+importlib_resources/tests/test_reader.py,sha256=yEO0xyrYDcGRmsBC6A1n99GXiTZpVvp-uGA313s6aao,4638
+importlib_resources/tests/test_resource.py,sha256=GbrMeHJ74N6KJG38TDodCp--nsRnFHXJc7NrAEqUPaU,8766
+importlib_resources/tests/util.py,sha256=8hBFwqIZRJFNvkboEB7aWsCqTtgUjlWI_iQ0KV158Yk,5914
+importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
+importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
+importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13
+importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13
+importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
+importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
+importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=AYf51fj80OKCRis93v2DlZjt5rM-VQOPptSHJbFtkXw,1131
+importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=e6HXvTEObXvJcNxyX5I8tu5M8_6mSN8ALahHfqE7ADA,698
+importlib_resources-3.2.1.dist-info/LICENSE,sha256=uWRjFdYGataJX2ziXk048ItUglQmjng3GWBALaWA36U,568
+importlib_resources-3.2.1.dist-info/METADATA,sha256=d_tMNLHsZ_lPU-wq04MWr0yEfpwbNFKgfO_CU5GCC9g,1783
+importlib_resources-3.2.1.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
+importlib_resources-3.2.1.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20
+importlib_resources-3.2.1.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/WHEEL
new file mode 100644
index 0000000000..6d38aa0601
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/top_level.txt
new file mode 100644
index 0000000000..58ad1bd333
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+importlib_resources
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/__init__.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/__init__.py
new file mode 100644
index 0000000000..f122f95e87
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/__init__.py
@@ -0,0 +1,53 @@
+"""Read resources contained within a package."""
+
+import sys
+
+from ._common import (
+ as_file, files,
+ )
+
+# For compatibility. Ref #88.
+# Also requires hook-importlib_resources.py (Ref #101).
+__import__('importlib_resources.trees')
+
+
+__all__ = [
+ 'Package',
+ 'Resource',
+ 'ResourceReader',
+ 'as_file',
+ 'contents',
+ 'files',
+ 'is_resource',
+ 'open_binary',
+ 'open_text',
+ 'path',
+ 'read_binary',
+ 'read_text',
+ ]
+
+
+if sys.version_info >= (3,):
+ from importlib_resources._py3 import (
+ Package,
+ Resource,
+ contents,
+ is_resource,
+ open_binary,
+ open_text,
+ path,
+ read_binary,
+ read_text,
+ )
+ from importlib_resources.abc import ResourceReader
+else:
+ from importlib_resources._py2 import (
+ contents,
+ is_resource,
+ open_binary,
+ open_text,
+ path,
+ read_binary,
+ read_text,
+ )
+ del __all__[:3]
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_common.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_common.py
new file mode 100644
index 0000000000..a7c2bf815d
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_common.py
@@ -0,0 +1,120 @@
+from __future__ import absolute_import
+
+import os
+import tempfile
+import contextlib
+import types
+import importlib
+
+from ._compat import (
+ Path, FileNotFoundError,
+ singledispatch, package_spec,
+ )
+
+if False: # TYPE_CHECKING
+ from typing import Union, Any, Optional
+ from .abc import ResourceReader
+ Package = Union[types.ModuleType, str]
+
+
+def files(package):
+ """
+ Get a Traversable resource from a package
+ """
+ return from_package(get_package(package))
+
+
+def normalize_path(path):
+ # type: (Any) -> str
+ """Normalize a path by ensuring it is a string.
+
+ If the resulting string contains path separators, an exception is raised.
+ """
+ str_path = str(path)
+ parent, file_name = os.path.split(str_path)
+ if parent:
+ raise ValueError('{!r} must be only a file name'.format(path))
+ return file_name
+
+
+def get_resource_reader(package):
+ # type: (types.ModuleType) -> Optional[ResourceReader]
+ """
+ Return the package's loader if it's a ResourceReader.
+ """
+ # We can't use
+ # a issubclass() check here because apparently abc.'s __subclasscheck__()
+ # hook wants to create a weak reference to the object, but
+ # zipimport.zipimporter does not support weak references, resulting in a
+ # TypeError. That seems terrible.
+ spec = package.__spec__
+ reader = getattr(spec.loader, 'get_resource_reader', None)
+ if reader is None:
+ return None
+ return reader(spec.name)
+
+
+def resolve(cand):
+ # type: (Package) -> types.ModuleType
+ return (
+ cand if isinstance(cand, types.ModuleType)
+ else importlib.import_module(cand)
+ )
+
+
+def get_package(package):
+ # type: (Package) -> types.ModuleType
+ """Take a package name or module object and return the module.
+
+ Raise an exception if the resolved module is not a package.
+ """
+ resolved = resolve(package)
+ if package_spec(resolved).submodule_search_locations is None:
+ raise TypeError('{!r} is not a package'.format(package))
+ return resolved
+
+
+def from_package(package):
+ """
+ Return a Traversable object for the given package.
+
+ """
+ spec = package_spec(package)
+ reader = spec.loader.get_resource_reader(spec.name)
+ return reader.files()
+
+
+@contextlib.contextmanager
+def _tempfile(reader, suffix=''):
+ # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
+ # blocks due to the need to close the temporary file to work on Windows
+ # properly.
+ fd, raw_path = tempfile.mkstemp(suffix=suffix)
+ try:
+ os.write(fd, reader())
+ os.close(fd)
+ del reader
+ yield Path(raw_path)
+ finally:
+ try:
+ os.remove(raw_path)
+ except FileNotFoundError:
+ pass
+
+
+@singledispatch
+def as_file(path):
+ """
+ Given a Traversable object, return that object as a
+ path on the local file system in a context manager.
+ """
+ return _tempfile(path.read_bytes, suffix=path.name)
+
+
+@as_file.register(Path)
+@contextlib.contextmanager
+def _(path):
+ """
+ Degenerate behavior for pathlib.Path objects.
+ """
+ yield path
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_compat.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_compat.py
new file mode 100644
index 0000000000..70b0f6b4a4
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_compat.py
@@ -0,0 +1,139 @@
+from __future__ import absolute_import
+import sys
+
+# flake8: noqa
+
+if sys.version_info > (3,5):
+ from pathlib import Path, PurePath
+else:
+ from pathlib2 import Path, PurePath # type: ignore
+
+
+if sys.version_info > (3,):
+ from contextlib import suppress
+else:
+ from contextlib2 import suppress # type: ignore
+
+
+try:
+ from functools import singledispatch
+except ImportError:
+ from singledispatch import singledispatch # type: ignore
+
+
+try:
+ from abc import ABC # type: ignore
+except ImportError:
+ from abc import ABCMeta
+
+ class ABC(object): # type: ignore
+ __metaclass__ = ABCMeta
+
+
+try:
+ FileNotFoundError = FileNotFoundError # type: ignore
+except NameError:
+ FileNotFoundError = OSError # type: ignore
+
+
+try:
+ NotADirectoryError = NotADirectoryError # type: ignore
+except NameError:
+ NotADirectoryError = OSError # type: ignore
+
+
+try:
+ from zipfile import Path as ZipPath # type: ignore
+except ImportError:
+ from zipp import Path as ZipPath # type: ignore
+
+
+try:
+ from typing import runtime_checkable # type: ignore
+except ImportError:
+ def runtime_checkable(cls): # type: ignore
+ return cls
+
+
+try:
+ from typing import Protocol # type: ignore
+except ImportError:
+ Protocol = ABC # type: ignore
+
+
+__metaclass__ = type
+
+
+class PackageSpec:
+ def __init__(self, **kwargs):
+ vars(self).update(kwargs)
+
+
+class TraversableResourcesAdapter:
+ def __init__(self, spec):
+ self.spec = spec
+ self.loader = LoaderAdapter(spec)
+
+ def __getattr__(self, name):
+ return getattr(self.spec, name)
+
+
+class LoaderAdapter:
+ """
+ Adapt loaders to provide TraversableResources and other
+ compatibility.
+ """
+ def __init__(self, spec):
+ self.spec = spec
+
+ @property
+ def path(self):
+ # Python < 3
+ return self.spec.origin
+
+ def get_resource_reader(self, name):
+ # Python < 3.9
+ from . import readers
+
+ def _zip_reader(spec):
+ with suppress(AttributeError):
+ return readers.ZipReader(spec.loader, spec.name)
+
+ def _namespace_reader(spec):
+ with suppress(AttributeError, ValueError):
+ return readers.NamespaceReader(spec.submodule_search_locations)
+
+ def _available_reader(spec):
+ with suppress(AttributeError):
+ return spec.loader.get_resource_reader(spec.name)
+
+ def _native_reader(spec):
+ reader = _available_reader(spec)
+ return reader if hasattr(reader, 'files') else None
+
+ return (
+ # native reader if it supplies 'files'
+ _native_reader(self.spec) or
+ # local ZipReader if a zip module
+ _zip_reader(self.spec) or
+ # local NamespaceReader if a namespace module
+ _namespace_reader(self.spec) or
+ # local FileReader
+ readers.FileReader(self)
+ )
+
+
+def package_spec(package):
+ """
+ Construct a minimal package spec suitable for
+ matching the interfaces this library relies upon
+ in later Python versions.
+ """
+ spec = getattr(package, '__spec__', None) or \
+ PackageSpec(
+ origin=package.__file__,
+ loader=getattr(package, '__loader__', None),
+ name=package.__name__,
+ submodule_search_locations=getattr(package, '__path__', None),
+ )
+ return TraversableResourcesAdapter(spec)
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_py2.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_py2.py
new file mode 100644
index 0000000000..dd8c7d627d
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_py2.py
@@ -0,0 +1,107 @@
+import os
+import errno
+
+from . import _common
+from ._compat import FileNotFoundError
+from io import BytesIO, TextIOWrapper, open as io_open
+
+
+def open_binary(package, resource):
+ """Return a file-like object opened for binary reading of the resource."""
+ resource = _common.normalize_path(resource)
+ package = _common.get_package(package)
+ # Using pathlib doesn't work well here due to the lack of 'strict' argument
+ # for pathlib.Path.resolve() prior to Python 3.6.
+ package_path = os.path.dirname(package.__file__)
+ relative_path = os.path.join(package_path, resource)
+ full_path = os.path.abspath(relative_path)
+ try:
+ return io_open(full_path, 'rb')
+ except IOError:
+ # This might be a package in a zip file. zipimport provides a loader
+ # with a functioning get_data() method, however we have to strip the
+ # archive (i.e. the .zip file's name) off the front of the path. This
+ # is because the zipimport loader in Python 2 doesn't actually follow
+ # PEP 302. It should allow the full path, but actually requires that
+ # the path be relative to the zip file.
+ try:
+ loader = package.__loader__
+ full_path = relative_path[len(loader.archive)+1:]
+ data = loader.get_data(full_path)
+ except (IOError, AttributeError):
+ package_name = package.__name__
+ message = '{!r} resource not found in {!r}'.format(
+ resource, package_name)
+ raise FileNotFoundError(message)
+ return BytesIO(data)
+
+
+def open_text(package, resource, encoding='utf-8', errors='strict'):
+ """Return a file-like object opened for text reading of the resource."""
+ return TextIOWrapper(
+ open_binary(package, resource), encoding=encoding, errors=errors)
+
+
+def read_binary(package, resource):
+ """Return the binary contents of the resource."""
+ with open_binary(package, resource) as fp:
+ return fp.read()
+
+
+def read_text(package, resource, encoding='utf-8', errors='strict'):
+ """Return the decoded string of the resource.
+
+ The decoding-related arguments have the same semantics as those of
+ bytes.decode().
+ """
+ with open_text(package, resource, encoding, errors) as fp:
+ return fp.read()
+
+
+def path(package, resource):
+ """A context manager providing a file path object to the resource.
+
+ If the resource does not already exist on its own on the file system,
+ a temporary file will be created. If the file was created, the file
+ will be deleted upon exiting the context manager (no exception is
+ raised if the file was deleted prior to the context manager
+ exiting).
+ """
+ path = _common.files(package).joinpath(_common.normalize_path(resource))
+ if not path.is_file():
+ raise FileNotFoundError(path)
+ return _common.as_file(path)
+
+
+def is_resource(package, name):
+ """True if name is a resource inside package.
+
+ Directories are *not* resources.
+ """
+ package = _common.get_package(package)
+ _common.normalize_path(name)
+ try:
+ package_contents = set(contents(package))
+ except OSError as error:
+ if error.errno not in (errno.ENOENT, errno.ENOTDIR):
+ # We won't hit this in the Python 2 tests, so it'll appear
+ # uncovered. We could mock os.listdir() to return a non-ENOENT or
+ # ENOTDIR, but then we'd have to depend on another external
+ # library since Python 2 doesn't have unittest.mock. It's not
+ # worth it.
+ raise # pragma: nocover
+ return False
+ if name not in package_contents:
+ return False
+ return (_common.from_package(package) / name).is_file()
+
+
+def contents(package):
+ """Return an iterable of entries in `package`.
+
+ Note that not all entries are resources. Specifically, directories are
+ not considered resources. Use `is_resource()` on each entry returned here
+ to check if it is a resource or not.
+ """
+ package = _common.get_package(package)
+ return list(item.name for item in _common.from_package(package).iterdir())
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_py3.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_py3.py
new file mode 100644
index 0000000000..7aa2773f81
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_py3.py
@@ -0,0 +1,164 @@
+import os
+import sys
+import io
+
+from . import _common
+from contextlib import suppress
+from importlib.abc import ResourceLoader
+from io import BytesIO, TextIOWrapper
+from pathlib import Path
+from types import ModuleType
+from typing import Iterable, Iterator, Optional, Set, Union # noqa: F401
+from typing import cast
+from typing.io import BinaryIO, TextIO
+from collections.abc import Sequence
+from ._compat import singledispatch
+
+if False: # TYPE_CHECKING
+ from typing import ContextManager
+
+Package = Union[ModuleType, str]
+if sys.version_info >= (3, 6):
+ Resource = Union[str, os.PathLike] # pragma: <=35
+else:
+ Resource = str # pragma: >=36
+
+
+def open_binary(package: Package, resource: Resource) -> BinaryIO:
+ """Return a file-like object opened for binary reading of the resource."""
+ resource = _common.normalize_path(resource)
+ package = _common.get_package(package)
+ reader = _common.get_resource_reader(package)
+ if reader is not None:
+ return reader.open_resource(resource)
+ # Using pathlib doesn't work well here due to the lack of 'strict'
+ # argument for pathlib.Path.resolve() prior to Python 3.6.
+ if package.__spec__.submodule_search_locations is not None:
+ paths = package.__spec__.submodule_search_locations
+ elif package.__spec__.origin is not None:
+ paths = [os.path.dirname(os.path.abspath(package.__spec__.origin))]
+
+ for package_path in paths:
+ full_path = os.path.join(package_path, resource)
+ try:
+ return open(full_path, mode='rb')
+ except OSError:
+ # Just assume the loader is a resource loader; all the relevant
+ # importlib.machinery loaders are and an AttributeError for
+ # get_data() will make it clear what is needed from the loader.
+ loader = cast(ResourceLoader, package.__spec__.loader)
+ data = None
+ if hasattr(package.__spec__.loader, 'get_data'):
+ with suppress(OSError):
+ data = loader.get_data(full_path)
+ if data is not None:
+ return BytesIO(data)
+
+ raise FileNotFoundError('{!r} resource not found in {!r}'.format(
+ resource, package.__spec__.name))
+
+
+def open_text(package: Package,
+ resource: Resource,
+ encoding: str = 'utf-8',
+ errors: str = 'strict') -> TextIO:
+ """Return a file-like object opened for text reading of the resource."""
+ return TextIOWrapper(
+ open_binary(package, resource), encoding=encoding, errors=errors)
+
+
+def read_binary(package: Package, resource: Resource) -> bytes:
+ """Return the binary contents of the resource."""
+ with open_binary(package, resource) as fp:
+ return fp.read()
+
+
+def read_text(package: Package,
+ resource: Resource,
+ encoding: str = 'utf-8',
+ errors: str = 'strict') -> str:
+ """Return the decoded string of the resource.
+
+ The decoding-related arguments have the same semantics as those of
+ bytes.decode().
+ """
+ with open_text(package, resource, encoding, errors) as fp:
+ return fp.read()
+
+
+def path(
+ package: Package, resource: Resource,
+ ) -> 'ContextManager[Path]':
+ """A context manager providing a file path object to the resource.
+
+ If the resource does not already exist on its own on the file system,
+ a temporary file will be created. If the file was created, the file
+ will be deleted upon exiting the context manager (no exception is
+ raised if the file was deleted prior to the context manager
+ exiting).
+ """
+ reader = _common.get_resource_reader(_common.get_package(package))
+ return (
+ _path_from_reader(reader, _common.normalize_path(resource))
+ if reader else
+ _common.as_file(
+ _common.files(package).joinpath(_common.normalize_path(resource)))
+ )
+
+
+def _path_from_reader(reader, resource):
+ return _path_from_resource_path(reader, resource) or \
+ _path_from_open_resource(reader, resource)
+
+
+def _path_from_resource_path(reader, resource):
+ with suppress(FileNotFoundError):
+ return Path(reader.resource_path(resource))
+
+
+def _path_from_open_resource(reader, resource):
+ saved = io.BytesIO(reader.open_resource(resource).read())
+ return _common._tempfile(saved.read, suffix=resource)
+
+
+def is_resource(package: Package, name: str) -> bool:
+ """True if `name` is a resource inside `package`.
+
+ Directories are *not* resources.
+ """
+ package = _common.get_package(package)
+ _common.normalize_path(name)
+ reader = _common.get_resource_reader(package)
+ if reader is not None:
+ return reader.is_resource(name)
+ package_contents = set(contents(package))
+ if name not in package_contents:
+ return False
+ return (_common.from_package(package) / name).is_file()
+
+
+def contents(package: Package) -> Iterable[str]:
+ """Return an iterable of entries in `package`.
+
+ Note that not all entries are resources. Specifically, directories are
+ not considered resources. Use `is_resource()` on each entry returned here
+ to check if it is a resource or not.
+ """
+ package = _common.get_package(package)
+ reader = _common.get_resource_reader(package)
+ if reader is not None:
+ return _ensure_sequence(reader.contents())
+ transversable = _common.from_package(package)
+ if transversable.is_dir():
+ return list(item.name for item in transversable.iterdir())
+ return []
+
+
+@singledispatch
+def _ensure_sequence(iterable):
+ return list(iterable)
+
+
+@_ensure_sequence.register(Sequence)
+def _(iterable):
+ return iterable
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/abc.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/abc.py
new file mode 100644
index 0000000000..18bc4ef876
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/abc.py
@@ -0,0 +1,142 @@
+from __future__ import absolute_import
+
+import abc
+
+from ._compat import ABC, FileNotFoundError, runtime_checkable, Protocol
+
+# Use mypy's comment syntax for Python 2 compatibility
+try:
+ from typing import BinaryIO, Iterable, Text
+except ImportError:
+ pass
+
+
+class ResourceReader(ABC):
+ """Abstract base class for loaders to provide resource reading support."""
+
+ @abc.abstractmethod
+ def open_resource(self, resource):
+ # type: (Text) -> BinaryIO
+ """Return an opened, file-like object for binary reading.
+
+ The 'resource' argument is expected to represent only a file name.
+ If the resource cannot be found, FileNotFoundError is raised.
+ """
+ # This deliberately raises FileNotFoundError instead of
+ # NotImplementedError so that if this method is accidentally called,
+ # it'll still do the right thing.
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def resource_path(self, resource):
+ # type: (Text) -> Text
+ """Return the file system path to the specified resource.
+
+ The 'resource' argument is expected to represent only a file name.
+ If the resource does not exist on the file system, raise
+ FileNotFoundError.
+ """
+ # This deliberately raises FileNotFoundError instead of
+ # NotImplementedError so that if this method is accidentally called,
+ # it'll still do the right thing.
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def is_resource(self, path):
+ # type: (Text) -> bool
+ """Return True if the named 'path' is a resource.
+
+ Files are resources, directories are not.
+ """
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def contents(self):
+ # type: () -> Iterable[str]
+ """Return an iterable of entries in `package`."""
+ raise FileNotFoundError
+
+
+@runtime_checkable
+class Traversable(Protocol):
+ """
+ An object with a subset of pathlib.Path methods suitable for
+ traversing directories and opening files.
+ """
+
+ @abc.abstractmethod
+ def iterdir(self):
+ """
+ Yield Traversable objects in self
+ """
+
+ @abc.abstractmethod
+ def read_bytes(self):
+ """
+ Read contents of self as bytes
+ """
+
+ @abc.abstractmethod
+ def read_text(self, encoding=None):
+ """
+ Read contents of self as bytes
+ """
+
+ @abc.abstractmethod
+ def is_dir(self):
+ """
+ Return True if self is a dir
+ """
+
+ @abc.abstractmethod
+ def is_file(self):
+ """
+ Return True if self is a file
+ """
+
+ @abc.abstractmethod
+ def joinpath(self, child):
+ """
+ Return Traversable child in self
+ """
+
+ @abc.abstractmethod
+ def __truediv__(self, child):
+ """
+ Return Traversable child in self
+ """
+
+ @abc.abstractmethod
+ def open(self, mode='r', *args, **kwargs):
+ """
+ mode may be 'r' or 'rb' to open as text or binary. Return a handle
+ suitable for reading (same as pathlib.Path.open).
+
+ When opening as text, accepts encoding parameters such as those
+ accepted by io.TextIOWrapper.
+ """
+
+ @abc.abstractproperty
+ def name(self):
+ # type: () -> str
+ """
+ The base name of this object without any parent references.
+ """
+
+
+class TraversableResources(ResourceReader):
+ @abc.abstractmethod
+ def files(self):
+ """Return a Traversable object for the loaded package."""
+
+ def open_resource(self, resource):
+ return self.files().joinpath(resource).open('rb')
+
+ def resource_path(self, resource):
+ raise FileNotFoundError(resource)
+
+ def is_resource(self, path):
+ return self.files().joinpath(path).is_file()
+
+ def contents(self):
+ return (item.name for item in self.files().iterdir())
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/py.typed b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/py.typed
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/readers.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/readers.py
new file mode 100644
index 0000000000..ce9c0caec4
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/readers.py
@@ -0,0 +1,123 @@
+import os.path
+
+from collections import OrderedDict
+
+from . import abc
+
+from ._compat import Path, ZipPath
+from ._compat import FileNotFoundError, NotADirectoryError
+
+
+class FileReader(abc.TraversableResources):
+ def __init__(self, loader):
+ self.path = Path(loader.path).parent
+
+ def resource_path(self, resource):
+ """
+ Return the file system path to prevent
+ `resources.path()` from creating a temporary
+ copy.
+ """
+ return str(self.path.joinpath(resource))
+
+ def files(self):
+ return self.path
+
+
+class ZipReader(abc.TraversableResources):
+ def __init__(self, loader, module):
+ _, _, name = module.rpartition('.')
+ self.prefix = loader.prefix.replace('\\', '/') + name + '/'
+ self.archive = loader.archive
+
+ def open_resource(self, resource):
+ try:
+ return super().open_resource(resource)
+ except KeyError as exc:
+ raise FileNotFoundError(exc.args[0])
+
+ def is_resource(self, path):
+ # workaround for `zipfile.Path.is_file` returning true
+ # for non-existent paths.
+ target = self.files().joinpath(path)
+ return target.is_file() and target.exists()
+
+ def files(self):
+ return ZipPath(self.archive, self.prefix)
+
+
+class MultiplexedPath(abc.Traversable):
+ """
+ Given a series of Traversable objects, implement a merged
+ version of the interface across all objects. Useful for
+ namespace packages which may be multihomed at a single
+ name.
+ """
+ def __init__(self, *paths):
+ paths = list(OrderedDict.fromkeys(paths)) # remove duplicates
+ self._paths = list(map(Path, paths))
+ if not self._paths:
+ message = 'MultiplexedPath must contain at least one path'
+ raise FileNotFoundError(message)
+ if any(not path.is_dir() for path in self._paths):
+ raise NotADirectoryError(
+ 'MultiplexedPath only supports directories')
+
+ def iterdir(self):
+ visited = []
+ for path in self._paths:
+ for file in path.iterdir():
+ if file.name in visited:
+ continue
+ visited.append(file.name)
+ yield file
+
+ def read_bytes(self):
+ raise FileNotFoundError('{} is not a file'.format(self))
+
+ def read_text(self, *args, **kwargs):
+ raise FileNotFoundError('{} is not a file'.format(self))
+
+ def is_dir(self):
+ return True
+
+ def is_file(self):
+ return False
+
+ def joinpath(self, child):
+ # first try to find child in current paths
+ for file in self.iterdir():
+ if file.name == child:
+ return file
+ # if it does not exist, construct it with the first path
+ return self._paths[0] / child
+
+ __truediv__ = joinpath
+
+ def open(self, *args, **kwargs):
+ raise FileNotFoundError('{} is not a file'.format(self))
+
+ def name(self):
+ return os.path.basename(self._paths[0])
+
+ def __repr__(self):
+ return 'MultiplexedPath({})'.format(
+ ', '.join("'{}'".format(path) for path in self._paths))
+
+
+class NamespaceReader(abc.TraversableResources):
+ def __init__(self, namespace_path):
+ if 'NamespacePath' not in str(namespace_path):
+ raise ValueError('Invalid path')
+ self.path = MultiplexedPath(*list(namespace_path))
+
+ def resource_path(self, resource):
+ """
+ Return the file system path to prevent
+ `resources.path()` from creating a temporary
+ copy.
+ """
+ return str(self.path.joinpath(resource))
+
+ def files(self):
+ return self.path
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/trees.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/trees.py
new file mode 100644
index 0000000000..ba42bb55b7
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/trees.py
@@ -0,0 +1,6 @@
+# for compatibility with 1.1, continue to expose as_file here.
+
+from ._common import as_file
+
+
+__all__ = ['as_file']
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/LICENSE
new file mode 100644
index 0000000000..378b991a4d
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2017-2019 Brett Cannon, Barry Warsaw
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/METADATA
new file mode 100644
index 0000000000..66db8b78aa
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/METADATA
@@ -0,0 +1,41 @@
+Metadata-Version: 2.1
+Name: importlib-resources
+Version: 3.3.0
+Summary: Read resources from Python packages
+Home-page: https://github.com/python/importlib_resources
+Author: Barry Warsaw
+Author-email: barry@python.org
+License: UNKNOWN
+Project-URL: Documentation, https://importlib-resources.readthedocs.io/
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7
+Requires-Dist: pathlib2 ; python_version < "3"
+Requires-Dist: contextlib2 ; python_version < "3"
+Requires-Dist: singledispatch ; python_version < "3.4"
+Requires-Dist: typing ; python_version < "3.5"
+Requires-Dist: zipp (>=0.4) ; python_version < "3.8"
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: rst.linker ; extra == 'docs'
+Requires-Dist: jaraco.packaging ; extra == 'docs'
+
+``importlib_resources`` is a backport of Python standard library
+`importlib.resources
+<https://docs.python.org/3.9/library/importlib.html#module-importlib.resources>`_
+module for Python 2.7, and 3.4 through 3.8. Users of Python 3.9 and beyond
+should use the standard library module, since for these versions,
+``importlib_resources`` just delegates to that module.
+
+The key goal of this module is to replace parts of `pkg_resources
+<https://setuptools.readthedocs.io/en/latest/pkg_resources.html>`_ with a
+solution in Python's stdlib that relies on well-defined APIs. This makes
+reading resources included in packages easier, with more stable and consistent
+semantics.
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/RECORD
new file mode 100644
index 0000000000..20e1b9b44a
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/RECORD
@@ -0,0 +1,42 @@
+importlib_resources/__init__.py,sha256=hswDmLAH0IUlLWwmdHXPN2mgus2bk5IwDP-BFzg7VKo,977
+importlib_resources/_common.py,sha256=RN8cXOZtlygvlbyTewd-ni9wC1hwXpfbZnrl7kbx0nI,3121
+importlib_resources/_compat.py,sha256=NDCXOf1097aDJJx-_pQ0UIktzVx2G1aPIQTRFGx0FHI,3694
+importlib_resources/_py2.py,sha256=G9M5mv1ILl8NARGdNX0v9_F_Hb4HUKCS-FCNK63Ajvw,4146
+importlib_resources/_py3.py,sha256=Bc-p0UYfPVWXFJ21HLNfVvbVrPJFXBA0g80rqPInkq8,5564
+importlib_resources/abc.py,sha256=6PX4Nprv39YnAht3NymhHIuSso0ocAKqDJZf-A6BgIw,3894
+importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/readers.py,sha256=fGuSBoMeeERUVrscN9Grhp0s-wKMy7nMVbCx92vIlGs,3674
+importlib_resources/trees.py,sha256=U3FlQSI5--eF4AdzOjBvW4xnjL21OFX8ivk82Quwv_M,117
+importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/_compat.py,sha256=geKWJe8UGXjC181JxmtxR3A_o5VrR4yxolS0xbnxMlw,801
+importlib_resources/tests/py27compat.py,sha256=9lDJkGV2swPVQJg6isOorRNFWuP6KeoWd4D2bFNmzLI,965
+importlib_resources/tests/test_files.py,sha256=91rf4C74_aJsKNSt-a-03slVpY9QSAuCbogFWnsaPjE,1017
+importlib_resources/tests/test_open.py,sha256=pIYWvuTDpQOJKX0SEuOKGotssZcEeY_xNPDqLGCvP_U,2565
+importlib_resources/tests/test_path.py,sha256=GnUOu-338o9offnC8xwbXjH9JIQJpD7JujgQkGB106Q,1548
+importlib_resources/tests/test_read.py,sha256=DpA7tzxSQlU0_YQuWibB3E5PDL9fQUdzeKoEUGnAx78,2046
+importlib_resources/tests/test_reader.py,sha256=yEO0xyrYDcGRmsBC6A1n99GXiTZpVvp-uGA313s6aao,4638
+importlib_resources/tests/test_resource.py,sha256=GbrMeHJ74N6KJG38TDodCp--nsRnFHXJc7NrAEqUPaU,8766
+importlib_resources/tests/util.py,sha256=8hBFwqIZRJFNvkboEB7aWsCqTtgUjlWI_iQ0KV158Yk,5914
+importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
+importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
+importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13
+importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13
+importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
+importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
+importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=AYf51fj80OKCRis93v2DlZjt5rM-VQOPptSHJbFtkXw,1131
+importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=e6HXvTEObXvJcNxyX5I8tu5M8_6mSN8ALahHfqE7ADA,698
+importlib_resources-3.3.0.dist-info/LICENSE,sha256=uWRjFdYGataJX2ziXk048ItUglQmjng3GWBALaWA36U,568
+importlib_resources-3.3.0.dist-info/METADATA,sha256=GxPMbCwUwlCuHNCiPJvP4IC_mTKqP4b_W7UqqNidcF4,1791
+importlib_resources-3.3.0.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
+importlib_resources-3.3.0.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20
+importlib_resources-3.3.0.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/WHEEL
new file mode 100644
index 0000000000..6d38aa0601
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/top_level.txt
new file mode 100644
index 0000000000..58ad1bd333
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+importlib_resources
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/__init__.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/__init__.py
new file mode 100644
index 0000000000..f122f95e87
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/__init__.py
@@ -0,0 +1,53 @@
+"""Read resources contained within a package."""
+
+import sys
+
+from ._common import (
+ as_file, files,
+ )
+
+# For compatibility. Ref #88.
+# Also requires hook-importlib_resources.py (Ref #101).
+__import__('importlib_resources.trees')
+
+
+__all__ = [
+ 'Package',
+ 'Resource',
+ 'ResourceReader',
+ 'as_file',
+ 'contents',
+ 'files',
+ 'is_resource',
+ 'open_binary',
+ 'open_text',
+ 'path',
+ 'read_binary',
+ 'read_text',
+ ]
+
+
+if sys.version_info >= (3,):
+ from importlib_resources._py3 import (
+ Package,
+ Resource,
+ contents,
+ is_resource,
+ open_binary,
+ open_text,
+ path,
+ read_binary,
+ read_text,
+ )
+ from importlib_resources.abc import ResourceReader
+else:
+ from importlib_resources._py2 import (
+ contents,
+ is_resource,
+ open_binary,
+ open_text,
+ path,
+ read_binary,
+ read_text,
+ )
+ del __all__[:3]
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_common.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_common.py
new file mode 100644
index 0000000000..a7c2bf815d
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_common.py
@@ -0,0 +1,120 @@
+from __future__ import absolute_import
+
+import os
+import tempfile
+import contextlib
+import types
+import importlib
+
+from ._compat import (
+ Path, FileNotFoundError,
+ singledispatch, package_spec,
+ )
+
+if False: # TYPE_CHECKING
+ from typing import Union, Any, Optional
+ from .abc import ResourceReader
+ Package = Union[types.ModuleType, str]
+
+
+def files(package):
+ """
+ Get a Traversable resource from a package
+ """
+ return from_package(get_package(package))
+
+
+def normalize_path(path):
+ # type: (Any) -> str
+ """Normalize a path by ensuring it is a string.
+
+ If the resulting string contains path separators, an exception is raised.
+ """
+ str_path = str(path)
+ parent, file_name = os.path.split(str_path)
+ if parent:
+ raise ValueError('{!r} must be only a file name'.format(path))
+ return file_name
+
+
+def get_resource_reader(package):
+ # type: (types.ModuleType) -> Optional[ResourceReader]
+ """
+ Return the package's loader if it's a ResourceReader.
+ """
+ # We can't use
+ # a issubclass() check here because apparently abc.'s __subclasscheck__()
+ # hook wants to create a weak reference to the object, but
+ # zipimport.zipimporter does not support weak references, resulting in a
+ # TypeError. That seems terrible.
+ spec = package.__spec__
+ reader = getattr(spec.loader, 'get_resource_reader', None)
+ if reader is None:
+ return None
+ return reader(spec.name)
+
+
+def resolve(cand):
+ # type: (Package) -> types.ModuleType
+ return (
+ cand if isinstance(cand, types.ModuleType)
+ else importlib.import_module(cand)
+ )
+
+
+def get_package(package):
+ # type: (Package) -> types.ModuleType
+ """Take a package name or module object and return the module.
+
+ Raise an exception if the resolved module is not a package.
+ """
+ resolved = resolve(package)
+ if package_spec(resolved).submodule_search_locations is None:
+ raise TypeError('{!r} is not a package'.format(package))
+ return resolved
+
+
+def from_package(package):
+ """
+ Return a Traversable object for the given package.
+
+ """
+ spec = package_spec(package)
+ reader = spec.loader.get_resource_reader(spec.name)
+ return reader.files()
+
+
+@contextlib.contextmanager
+def _tempfile(reader, suffix=''):
+ # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
+ # blocks due to the need to close the temporary file to work on Windows
+ # properly.
+ fd, raw_path = tempfile.mkstemp(suffix=suffix)
+ try:
+ os.write(fd, reader())
+ os.close(fd)
+ del reader
+ yield Path(raw_path)
+ finally:
+ try:
+ os.remove(raw_path)
+ except FileNotFoundError:
+ pass
+
+
+@singledispatch
+def as_file(path):
+ """
+ Given a Traversable object, return that object as a
+ path on the local file system in a context manager.
+ """
+ return _tempfile(path.read_bytes, suffix=path.name)
+
+
+@as_file.register(Path)
+@contextlib.contextmanager
+def _(path):
+ """
+ Degenerate behavior for pathlib.Path objects.
+ """
+ yield path
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_compat.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_compat.py
new file mode 100644
index 0000000000..70b0f6b4a4
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_compat.py
@@ -0,0 +1,139 @@
+from __future__ import absolute_import
+import sys
+
+# flake8: noqa
+
+if sys.version_info > (3,5):
+ from pathlib import Path, PurePath
+else:
+ from pathlib2 import Path, PurePath # type: ignore
+
+
+if sys.version_info > (3,):
+ from contextlib import suppress
+else:
+ from contextlib2 import suppress # type: ignore
+
+
+try:
+ from functools import singledispatch
+except ImportError:
+ from singledispatch import singledispatch # type: ignore
+
+
+try:
+ from abc import ABC # type: ignore
+except ImportError:
+ from abc import ABCMeta
+
+ class ABC(object): # type: ignore
+ __metaclass__ = ABCMeta
+
+
+try:
+ FileNotFoundError = FileNotFoundError # type: ignore
+except NameError:
+ FileNotFoundError = OSError # type: ignore
+
+
+try:
+ NotADirectoryError = NotADirectoryError # type: ignore
+except NameError:
+ NotADirectoryError = OSError # type: ignore
+
+
+try:
+ from zipfile import Path as ZipPath # type: ignore
+except ImportError:
+ from zipp import Path as ZipPath # type: ignore
+
+
+try:
+ from typing import runtime_checkable # type: ignore
+except ImportError:
+ def runtime_checkable(cls): # type: ignore
+ return cls
+
+
+try:
+ from typing import Protocol # type: ignore
+except ImportError:
+ Protocol = ABC # type: ignore
+
+
+__metaclass__ = type
+
+
+class PackageSpec:
+ def __init__(self, **kwargs):
+ vars(self).update(kwargs)
+
+
+class TraversableResourcesAdapter:
+ def __init__(self, spec):
+ self.spec = spec
+ self.loader = LoaderAdapter(spec)
+
+ def __getattr__(self, name):
+ return getattr(self.spec, name)
+
+
+class LoaderAdapter:
+ """
+ Adapt loaders to provide TraversableResources and other
+ compatibility.
+ """
+ def __init__(self, spec):
+ self.spec = spec
+
+ @property
+ def path(self):
+ # Python < 3
+ return self.spec.origin
+
+ def get_resource_reader(self, name):
+ # Python < 3.9
+ from . import readers
+
+ def _zip_reader(spec):
+ with suppress(AttributeError):
+ return readers.ZipReader(spec.loader, spec.name)
+
+ def _namespace_reader(spec):
+ with suppress(AttributeError, ValueError):
+ return readers.NamespaceReader(spec.submodule_search_locations)
+
+ def _available_reader(spec):
+ with suppress(AttributeError):
+ return spec.loader.get_resource_reader(spec.name)
+
+ def _native_reader(spec):
+ reader = _available_reader(spec)
+ return reader if hasattr(reader, 'files') else None
+
+ return (
+ # native reader if it supplies 'files'
+ _native_reader(self.spec) or
+ # local ZipReader if a zip module
+ _zip_reader(self.spec) or
+ # local NamespaceReader if a namespace module
+ _namespace_reader(self.spec) or
+ # local FileReader
+ readers.FileReader(self)
+ )
+
+
+def package_spec(package):
+ """
+ Construct a minimal package spec suitable for
+ matching the interfaces this library relies upon
+ in later Python versions.
+ """
+ spec = getattr(package, '__spec__', None) or \
+ PackageSpec(
+ origin=package.__file__,
+ loader=getattr(package, '__loader__', None),
+ name=package.__name__,
+ submodule_search_locations=getattr(package, '__path__', None),
+ )
+ return TraversableResourcesAdapter(spec)
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_py2.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_py2.py
new file mode 100644
index 0000000000..dd8c7d627d
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_py2.py
@@ -0,0 +1,107 @@
+import os
+import errno
+
+from . import _common
+from ._compat import FileNotFoundError
+from io import BytesIO, TextIOWrapper, open as io_open
+
+
+def open_binary(package, resource):
+ """Return a file-like object opened for binary reading of the resource."""
+ resource = _common.normalize_path(resource)
+ package = _common.get_package(package)
+ # Using pathlib doesn't work well here due to the lack of 'strict' argument
+ # for pathlib.Path.resolve() prior to Python 3.6.
+ package_path = os.path.dirname(package.__file__)
+ relative_path = os.path.join(package_path, resource)
+ full_path = os.path.abspath(relative_path)
+ try:
+ return io_open(full_path, 'rb')
+ except IOError:
+ # This might be a package in a zip file. zipimport provides a loader
+ # with a functioning get_data() method, however we have to strip the
+ # archive (i.e. the .zip file's name) off the front of the path. This
+ # is because the zipimport loader in Python 2 doesn't actually follow
+ # PEP 302. It should allow the full path, but actually requires that
+ # the path be relative to the zip file.
+ try:
+ loader = package.__loader__
+ full_path = relative_path[len(loader.archive)+1:]
+ data = loader.get_data(full_path)
+ except (IOError, AttributeError):
+ package_name = package.__name__
+ message = '{!r} resource not found in {!r}'.format(
+ resource, package_name)
+ raise FileNotFoundError(message)
+ return BytesIO(data)
+
+
+def open_text(package, resource, encoding='utf-8', errors='strict'):
+ """Return a file-like object opened for text reading of the resource."""
+ return TextIOWrapper(
+ open_binary(package, resource), encoding=encoding, errors=errors)
+
+
+def read_binary(package, resource):
+ """Return the binary contents of the resource."""
+ with open_binary(package, resource) as fp:
+ return fp.read()
+
+
+def read_text(package, resource, encoding='utf-8', errors='strict'):
+ """Return the decoded string of the resource.
+
+ The decoding-related arguments have the same semantics as those of
+ bytes.decode().
+ """
+ with open_text(package, resource, encoding, errors) as fp:
+ return fp.read()
+
+
+def path(package, resource):
+ """A context manager providing a file path object to the resource.
+
+ If the resource does not already exist on its own on the file system,
+ a temporary file will be created. If the file was created, the file
+ will be deleted upon exiting the context manager (no exception is
+ raised if the file was deleted prior to the context manager
+ exiting).
+ """
+ path = _common.files(package).joinpath(_common.normalize_path(resource))
+ if not path.is_file():
+ raise FileNotFoundError(path)
+ return _common.as_file(path)
+
+
+def is_resource(package, name):
+ """True if name is a resource inside package.
+
+ Directories are *not* resources.
+ """
+ package = _common.get_package(package)
+ _common.normalize_path(name)
+ try:
+ package_contents = set(contents(package))
+ except OSError as error:
+ if error.errno not in (errno.ENOENT, errno.ENOTDIR):
+ # We won't hit this in the Python 2 tests, so it'll appear
+ # uncovered. We could mock os.listdir() to return a non-ENOENT or
+ # ENOTDIR, but then we'd have to depend on another external
+ # library since Python 2 doesn't have unittest.mock. It's not
+ # worth it.
+ raise # pragma: nocover
+ return False
+ if name not in package_contents:
+ return False
+ return (_common.from_package(package) / name).is_file()
+
+
+def contents(package):
+ """Return an iterable of entries in `package`.
+
+ Note that not all entries are resources. Specifically, directories are
+ not considered resources. Use `is_resource()` on each entry returned here
+ to check if it is a resource or not.
+ """
+ package = _common.get_package(package)
+ return list(item.name for item in _common.from_package(package).iterdir())
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_py3.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_py3.py
new file mode 100644
index 0000000000..ffeb616d6e
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_py3.py
@@ -0,0 +1,160 @@
+import os
+import io
+
+from . import _common
+from contextlib import suppress
+from importlib.abc import ResourceLoader
+from io import BytesIO, TextIOWrapper
+from pathlib import Path
+from types import ModuleType
+from typing import Iterable, Iterator, Optional, Set, Union # noqa: F401
+from typing import cast
+from typing.io import BinaryIO, TextIO
+from collections.abc import Sequence
+from functools import singledispatch
+
+if False: # TYPE_CHECKING
+ from typing import ContextManager
+
+Package = Union[ModuleType, str]
+Resource = Union[str, os.PathLike]
+
+
+def open_binary(package: Package, resource: Resource) -> BinaryIO:
+ """Return a file-like object opened for binary reading of the resource."""
+ resource = _common.normalize_path(resource)
+ package = _common.get_package(package)
+ reader = _common.get_resource_reader(package)
+ if reader is not None:
+ return reader.open_resource(resource)
+ # Using pathlib doesn't work well here due to the lack of 'strict'
+ # argument for pathlib.Path.resolve() prior to Python 3.6.
+ if package.__spec__.submodule_search_locations is not None:
+ paths = package.__spec__.submodule_search_locations
+ elif package.__spec__.origin is not None:
+ paths = [os.path.dirname(os.path.abspath(package.__spec__.origin))]
+
+ for package_path in paths:
+ full_path = os.path.join(package_path, resource)
+ try:
+ return open(full_path, mode='rb')
+ except OSError:
+ # Just assume the loader is a resource loader; all the relevant
+ # importlib.machinery loaders are and an AttributeError for
+ # get_data() will make it clear what is needed from the loader.
+ loader = cast(ResourceLoader, package.__spec__.loader)
+ data = None
+ if hasattr(package.__spec__.loader, 'get_data'):
+ with suppress(OSError):
+ data = loader.get_data(full_path)
+ if data is not None:
+ return BytesIO(data)
+
+ raise FileNotFoundError('{!r} resource not found in {!r}'.format(
+ resource, package.__spec__.name))
+
+
+def open_text(package: Package,
+ resource: Resource,
+ encoding: str = 'utf-8',
+ errors: str = 'strict') -> TextIO:
+ """Return a file-like object opened for text reading of the resource."""
+ return TextIOWrapper(
+ open_binary(package, resource), encoding=encoding, errors=errors)
+
+
+def read_binary(package: Package, resource: Resource) -> bytes:
+ """Return the binary contents of the resource."""
+ with open_binary(package, resource) as fp:
+ return fp.read()
+
+
+def read_text(package: Package,
+ resource: Resource,
+ encoding: str = 'utf-8',
+ errors: str = 'strict') -> str:
+ """Return the decoded string of the resource.
+
+ The decoding-related arguments have the same semantics as those of
+ bytes.decode().
+ """
+ with open_text(package, resource, encoding, errors) as fp:
+ return fp.read()
+
+
+def path(
+ package: Package, resource: Resource,
+ ) -> 'ContextManager[Path]':
+ """A context manager providing a file path object to the resource.
+
+ If the resource does not already exist on its own on the file system,
+ a temporary file will be created. If the file was created, the file
+ will be deleted upon exiting the context manager (no exception is
+ raised if the file was deleted prior to the context manager
+ exiting).
+ """
+ reader = _common.get_resource_reader(_common.get_package(package))
+ return (
+ _path_from_reader(reader, _common.normalize_path(resource))
+ if reader else
+ _common.as_file(
+ _common.files(package).joinpath(_common.normalize_path(resource)))
+ )
+
+
+def _path_from_reader(reader, resource):
+ return _path_from_resource_path(reader, resource) or \
+ _path_from_open_resource(reader, resource)
+
+
+def _path_from_resource_path(reader, resource):
+ with suppress(FileNotFoundError):
+ return Path(reader.resource_path(resource))
+
+
+def _path_from_open_resource(reader, resource):
+ saved = io.BytesIO(reader.open_resource(resource).read())
+ return _common._tempfile(saved.read, suffix=resource)
+
+
+def is_resource(package: Package, name: str) -> bool:
+ """True if `name` is a resource inside `package`.
+
+ Directories are *not* resources.
+ """
+ package = _common.get_package(package)
+ _common.normalize_path(name)
+ reader = _common.get_resource_reader(package)
+ if reader is not None:
+ return reader.is_resource(name)
+ package_contents = set(contents(package))
+ if name not in package_contents:
+ return False
+ return (_common.from_package(package) / name).is_file()
+
+
+def contents(package: Package) -> Iterable[str]:
+ """Return an iterable of entries in `package`.
+
+ Note that not all entries are resources. Specifically, directories are
+ not considered resources. Use `is_resource()` on each entry returned here
+ to check if it is a resource or not.
+ """
+ package = _common.get_package(package)
+ reader = _common.get_resource_reader(package)
+ if reader is not None:
+ return _ensure_sequence(reader.contents())
+ transversable = _common.from_package(package)
+ if transversable.is_dir():
+ return list(item.name for item in transversable.iterdir())
+ return []
+
+
+@singledispatch
+def _ensure_sequence(iterable):
+ return list(iterable)
+
+
+@_ensure_sequence.register(Sequence)
+def _(iterable):
+ return iterable
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/abc.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/abc.py
new file mode 100644
index 0000000000..18bc4ef876
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/abc.py
@@ -0,0 +1,142 @@
+from __future__ import absolute_import
+
+import abc
+
+from ._compat import ABC, FileNotFoundError, runtime_checkable, Protocol
+
+# Use mypy's comment syntax for Python 2 compatibility
+try:
+ from typing import BinaryIO, Iterable, Text
+except ImportError:
+ pass
+
+
+class ResourceReader(ABC):
+ """Abstract base class for loaders to provide resource reading support."""
+
+ @abc.abstractmethod
+ def open_resource(self, resource):
+ # type: (Text) -> BinaryIO
+ """Return an opened, file-like object for binary reading.
+
+ The 'resource' argument is expected to represent only a file name.
+ If the resource cannot be found, FileNotFoundError is raised.
+ """
+ # This deliberately raises FileNotFoundError instead of
+ # NotImplementedError so that if this method is accidentally called,
+ # it'll still do the right thing.
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def resource_path(self, resource):
+ # type: (Text) -> Text
+ """Return the file system path to the specified resource.
+
+ The 'resource' argument is expected to represent only a file name.
+ If the resource does not exist on the file system, raise
+ FileNotFoundError.
+ """
+ # This deliberately raises FileNotFoundError instead of
+ # NotImplementedError so that if this method is accidentally called,
+ # it'll still do the right thing.
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def is_resource(self, path):
+ # type: (Text) -> bool
+ """Return True if the named 'path' is a resource.
+
+ Files are resources, directories are not.
+ """
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def contents(self):
+ # type: () -> Iterable[str]
+ """Return an iterable of entries in `package`."""
+ raise FileNotFoundError
+
+
+@runtime_checkable
+class Traversable(Protocol):
+ """
+ An object with a subset of pathlib.Path methods suitable for
+ traversing directories and opening files.
+ """
+
+ @abc.abstractmethod
+ def iterdir(self):
+ """
+ Yield Traversable objects in self
+ """
+
+ @abc.abstractmethod
+ def read_bytes(self):
+ """
+ Read contents of self as bytes
+ """
+
+ @abc.abstractmethod
+ def read_text(self, encoding=None):
+ """
+ Read contents of self as bytes
+ """
+
+ @abc.abstractmethod
+ def is_dir(self):
+ """
+ Return True if self is a dir
+ """
+
+ @abc.abstractmethod
+ def is_file(self):
+ """
+ Return True if self is a file
+ """
+
+ @abc.abstractmethod
+ def joinpath(self, child):
+ """
+ Return Traversable child in self
+ """
+
+ @abc.abstractmethod
+ def __truediv__(self, child):
+ """
+ Return Traversable child in self
+ """
+
+ @abc.abstractmethod
+ def open(self, mode='r', *args, **kwargs):
+ """
+ mode may be 'r' or 'rb' to open as text or binary. Return a handle
+ suitable for reading (same as pathlib.Path.open).
+
+ When opening as text, accepts encoding parameters such as those
+ accepted by io.TextIOWrapper.
+ """
+
+ @abc.abstractproperty
+ def name(self):
+ # type: () -> str
+ """
+ The base name of this object without any parent references.
+ """
+
+
+class TraversableResources(ResourceReader):
+ @abc.abstractmethod
+ def files(self):
+ """Return a Traversable object for the loaded package."""
+
+ def open_resource(self, resource):
+ return self.files().joinpath(resource).open('rb')
+
+ def resource_path(self, resource):
+ raise FileNotFoundError(resource)
+
+ def is_resource(self, path):
+ return self.files().joinpath(path).is_file()
+
+ def contents(self):
+ return (item.name for item in self.files().iterdir())
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/py.typed b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/py.typed
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/readers.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/readers.py
new file mode 100644
index 0000000000..ce9c0caec4
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/readers.py
@@ -0,0 +1,123 @@
+import os.path
+
+from collections import OrderedDict
+
+from . import abc
+
+from ._compat import Path, ZipPath
+from ._compat import FileNotFoundError, NotADirectoryError
+
+
+class FileReader(abc.TraversableResources):
+ def __init__(self, loader):
+ self.path = Path(loader.path).parent
+
+ def resource_path(self, resource):
+ """
+ Return the file system path to prevent
+ `resources.path()` from creating a temporary
+ copy.
+ """
+ return str(self.path.joinpath(resource))
+
+ def files(self):
+ return self.path
+
+
+class ZipReader(abc.TraversableResources):
+ def __init__(self, loader, module):
+ _, _, name = module.rpartition('.')
+ self.prefix = loader.prefix.replace('\\', '/') + name + '/'
+ self.archive = loader.archive
+
+ def open_resource(self, resource):
+ try:
+ return super().open_resource(resource)
+ except KeyError as exc:
+ raise FileNotFoundError(exc.args[0])
+
+ def is_resource(self, path):
+ # workaround for `zipfile.Path.is_file` returning true
+ # for non-existent paths.
+ target = self.files().joinpath(path)
+ return target.is_file() and target.exists()
+
+ def files(self):
+ return ZipPath(self.archive, self.prefix)
+
+
+class MultiplexedPath(abc.Traversable):
+ """
+ Given a series of Traversable objects, implement a merged
+ version of the interface across all objects. Useful for
+ namespace packages which may be multihomed at a single
+ name.
+ """
+ def __init__(self, *paths):
+ paths = list(OrderedDict.fromkeys(paths)) # remove duplicates
+ self._paths = list(map(Path, paths))
+ if not self._paths:
+ message = 'MultiplexedPath must contain at least one path'
+ raise FileNotFoundError(message)
+ if any(not path.is_dir() for path in self._paths):
+ raise NotADirectoryError(
+ 'MultiplexedPath only supports directories')
+
+ def iterdir(self):
+ visited = []
+ for path in self._paths:
+ for file in path.iterdir():
+ if file.name in visited:
+ continue
+ visited.append(file.name)
+ yield file
+
+ def read_bytes(self):
+ raise FileNotFoundError('{} is not a file'.format(self))
+
+ def read_text(self, *args, **kwargs):
+ raise FileNotFoundError('{} is not a file'.format(self))
+
+ def is_dir(self):
+ return True
+
+ def is_file(self):
+ return False
+
+ def joinpath(self, child):
+ # first try to find child in current paths
+ for file in self.iterdir():
+ if file.name == child:
+ return file
+ # if it does not exist, construct it with the first path
+ return self._paths[0] / child
+
+ __truediv__ = joinpath
+
+ def open(self, *args, **kwargs):
+ raise FileNotFoundError('{} is not a file'.format(self))
+
+ def name(self):
+ return os.path.basename(self._paths[0])
+
+ def __repr__(self):
+ return 'MultiplexedPath({})'.format(
+ ', '.join("'{}'".format(path) for path in self._paths))
+
+
+class NamespaceReader(abc.TraversableResources):
+ def __init__(self, namespace_path):
+ if 'NamespacePath' not in str(namespace_path):
+ raise ValueError('Invalid path')
+ self.path = MultiplexedPath(*list(namespace_path))
+
+ def resource_path(self, resource):
+ """
+ Return the file system path to prevent
+ `resources.path()` from creating a temporary
+ copy.
+ """
+ return str(self.path.joinpath(resource))
+
+ def files(self):
+ return self.path
diff --git a/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/trees.py b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/trees.py
new file mode 100644
index 0000000000..ba42bb55b7
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/trees.py
@@ -0,0 +1,6 @@
+# for compatibility with 1.1, continue to expose as_file here.
+
+from ._common import as_file
+
+
+__all__ = ['as_file']
diff --git a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/DESCRIPTION.rst b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/DESCRIPTION.rst
new file mode 100644
index 0000000000..74e3bab198
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/DESCRIPTION.rst
@@ -0,0 +1,61 @@
+The `old pathlib <https://bitbucket.org/pitrou/pathlib>`_
+module on bitbucket is in bugfix-only mode.
+The goal of pathlib2 is to provide a backport of
+`standard pathlib <http://docs.python.org/dev/library/pathlib.html>`_
+module which tracks the standard library module,
+so all the newest features of the standard pathlib can be
+used also on older Python versions.
+
+Download
+--------
+
+Standalone releases are available on PyPI:
+http://pypi.python.org/pypi/pathlib2/
+
+Development
+-----------
+
+The main development takes place in the Python standard library: see
+the `Python developer's guide <http://docs.python.org/devguide/>`_.
+In particular, new features should be submitted to the
+`Python bug tracker <http://bugs.python.org/>`_.
+
+Issues that occur in this backport, but that do not occur not in the
+standard Python pathlib module can be submitted on
+the `pathlib2 bug tracker <https://github.com/mcmtroffaes/pathlib2/issues>`_.
+
+Documentation
+-------------
+
+Refer to the
+`standard pathlib <http://docs.python.org/dev/library/pathlib.html>`_
+documentation.
+
+Known Issues
+------------
+
+For historic reasons, pathlib2 still uses bytes to represent file paths internally.
+Unfortunately, on Windows with Python 2.7, the file system encoder (``mcbs``)
+has only poor support for non-ascii characters,
+and can silently replace non-ascii characters without warning.
+For example, ``u'тест'.encode(sys.getfilesystemencoding())`` results in ``????``
+which is obviously completely useless.
+
+Therefore, on Windows with Python 2.7, until this problem is fixed upstream,
+unfortunately you cannot rely on pathlib2 to support the full unicode range for filenames.
+See `issue #56 <https://github.com/mcmtroffaes/pathlib2/issues/56>`_ for more details.
+
+.. |travis| image:: https://travis-ci.org/mcmtroffaes/pathlib2.png?branch=develop
+ :target: https://travis-ci.org/mcmtroffaes/pathlib2
+ :alt: travis-ci
+
+.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/baddx3rpet2wyi2c?svg=true
+ :target: https://ci.appveyor.com/project/mcmtroffaes/pathlib2
+ :alt: appveyor
+
+.. |codecov| image:: https://codecov.io/gh/mcmtroffaes/pathlib2/branch/develop/graph/badge.svg
+ :target: https://codecov.io/gh/mcmtroffaes/pathlib2
+ :alt: codecov
+
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/METADATA
new file mode 100644
index 0000000000..df7284e078
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/METADATA
@@ -0,0 +1,88 @@
+Metadata-Version: 2.0
+Name: pathlib2
+Version: 2.3.5
+Summary: Object-oriented filesystem paths
+Home-page: https://github.com/mcmtroffaes/pathlib2
+Author: Matthias C. M. Troffaes
+Author-email: matthias.troffaes@gmail.com
+License: MIT
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: System :: Filesystems
+Requires-Dist: six
+Requires-Dist: scandir; python_version<"3.5"
+
+The `old pathlib <https://bitbucket.org/pitrou/pathlib>`_
+module on bitbucket is in bugfix-only mode.
+The goal of pathlib2 is to provide a backport of
+`standard pathlib <http://docs.python.org/dev/library/pathlib.html>`_
+module which tracks the standard library module,
+so all the newest features of the standard pathlib can be
+used also on older Python versions.
+
+Download
+--------
+
+Standalone releases are available on PyPI:
+http://pypi.python.org/pypi/pathlib2/
+
+Development
+-----------
+
+The main development takes place in the Python standard library: see
+the `Python developer's guide <http://docs.python.org/devguide/>`_.
+In particular, new features should be submitted to the
+`Python bug tracker <http://bugs.python.org/>`_.
+
+Issues that occur in this backport, but that do not occur not in the
+standard Python pathlib module can be submitted on
+the `pathlib2 bug tracker <https://github.com/mcmtroffaes/pathlib2/issues>`_.
+
+Documentation
+-------------
+
+Refer to the
+`standard pathlib <http://docs.python.org/dev/library/pathlib.html>`_
+documentation.
+
+Known Issues
+------------
+
+For historic reasons, pathlib2 still uses bytes to represent file paths internally.
+Unfortunately, on Windows with Python 2.7, the file system encoder (``mcbs``)
+has only poor support for non-ascii characters,
+and can silently replace non-ascii characters without warning.
+For example, ``u'тест'.encode(sys.getfilesystemencoding())`` results in ``????``
+which is obviously completely useless.
+
+Therefore, on Windows with Python 2.7, until this problem is fixed upstream,
+unfortunately you cannot rely on pathlib2 to support the full unicode range for filenames.
+See `issue #56 <https://github.com/mcmtroffaes/pathlib2/issues/56>`_ for more details.
+
+.. |travis| image:: https://travis-ci.org/mcmtroffaes/pathlib2.png?branch=develop
+ :target: https://travis-ci.org/mcmtroffaes/pathlib2
+ :alt: travis-ci
+
+.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/baddx3rpet2wyi2c?svg=true
+ :target: https://ci.appveyor.com/project/mcmtroffaes/pathlib2
+ :alt: appveyor
+
+.. |codecov| image:: https://codecov.io/gh/mcmtroffaes/pathlib2/branch/develop/graph/badge.svg
+ :target: https://codecov.io/gh/mcmtroffaes/pathlib2
+ :alt: codecov
+
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/RECORD
new file mode 100644
index 0000000000..6f922ae1f7
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/RECORD
@@ -0,0 +1,7 @@
+pathlib2-2.3.5.dist-info/metadata.json,sha256=yGoZ-uMKSmkZuZplsz2mNc8SWNIVEbnBaSyya01u5PI,1177
+pathlib2-2.3.5.dist-info/top_level.txt,sha256=tNPkisFiGBFsPUnCIHg62vSFlkx_1NO86Id8lbJmfFQ,9
+pathlib2-2.3.5.dist-info/METADATA,sha256=PEsNR-yYpbPUheyBje2_-GdAJfwXPDtWMSeSsR9VMY0,3300
+pathlib2-2.3.5.dist-info/RECORD,,
+pathlib2-2.3.5.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110
+pathlib2-2.3.5.dist-info/DESCRIPTION.rst,sha256=E6WnieIR9MTnqUQ1746RCpdq3fqlkvqX0Z51-Wpxga8,2250
+pathlib2/__init__.py,sha256=NBfu5wacps1y1YtlXVSPJ8FbE4WtIXucrp5uOYNOO-U,59133
diff --git a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/WHEEL
new file mode 100644
index 0000000000..9dff69d861
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.24.0)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/metadata.json b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/metadata.json
new file mode 100644
index 0000000000..575c5271d2
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/metadata.json
@@ -0,0 +1 @@
+{"license": "MIT", "name": "pathlib2", "metadata_version": "2.0", "generator": "bdist_wheel (0.24.0)", "summary": "Object-oriented filesystem paths", "run_requires": [{"environment": "python_version<\"3.5\"", "requires": ["scandir"]}, {"requires": ["six"]}], "version": "2.3.5", "extensions": {"python.details": {"project_urls": {"Home": "https://github.com/mcmtroffaes/pathlib2"}, "document_names": {"description": "DESCRIPTION.rst"}, "contacts": [{"role": "author", "email": "matthias.troffaes@gmail.com", "name": "Matthias C. M. Troffaes"}]}}, "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Topic :: Software Development :: Libraries", "Topic :: System :: Filesystems"], "extras": []} \ No newline at end of file
diff --git a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/top_level.txt
new file mode 100644
index 0000000000..83f3ebe0dd
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info/top_level.txt
@@ -0,0 +1 @@
+pathlib2
diff --git a/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2/__init__.py b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2/__init__.py
new file mode 100644
index 0000000000..d5a47a66c6
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2/__init__.py
@@ -0,0 +1,1809 @@
+# Copyright (c) 2014-2017 Matthias C. M. Troffaes
+# Copyright (c) 2012-2014 Antoine Pitrou and contributors
+# Distributed under the terms of the MIT License.
+
+import ctypes
+import fnmatch
+import functools
+import io
+import ntpath
+import os
+import posixpath
+import re
+import six
+import sys
+
+from errno import EINVAL, ENOENT, ENOTDIR, EBADF
+from errno import EEXIST, EPERM, EACCES
+from operator import attrgetter
+from stat import (
+ S_ISDIR, S_ISLNK, S_ISREG, S_ISSOCK, S_ISBLK, S_ISCHR, S_ISFIFO)
+
+try:
+ from collections.abc import Sequence
+except ImportError:
+ from collections import Sequence
+
+try:
+ from urllib import quote as urlquote_from_bytes
+except ImportError:
+ from urllib.parse import quote_from_bytes as urlquote_from_bytes
+
+
+try:
+ intern = intern
+except NameError:
+ intern = sys.intern
+
+supports_symlinks = True
+if os.name == 'nt':
+ import nt
+ if sys.getwindowsversion()[:2] >= (6, 0) and sys.version_info >= (3, 2):
+ from nt import _getfinalpathname
+ else:
+ supports_symlinks = False
+ _getfinalpathname = None
+else:
+ nt = None
+
+try:
+ from os import scandir as os_scandir
+except ImportError:
+ from scandir import scandir as os_scandir
+
+__all__ = [
+ "PurePath", "PurePosixPath", "PureWindowsPath",
+ "Path", "PosixPath", "WindowsPath",
+ ]
+
+#
+# Internals
+#
+
+# EBADF - guard agains macOS `stat` throwing EBADF
+_IGNORED_ERROS = (ENOENT, ENOTDIR, EBADF)
+
+_IGNORED_WINERRORS = (
+ 21, # ERROR_NOT_READY - drive exists but is not accessible
+)
+
+
+def _ignore_error(exception):
+ return (getattr(exception, 'errno', None) in _IGNORED_ERROS or
+ getattr(exception, 'winerror', None) in _IGNORED_WINERRORS)
+
+
+def _py2_fsencode(parts):
+ # py2 => minimal unicode support
+ assert six.PY2
+ return [part.encode('ascii') if isinstance(part, six.text_type)
+ else part for part in parts]
+
+
+def _try_except_fileexistserror(try_func, except_func, else_func=None):
+ if sys.version_info >= (3, 3):
+ try:
+ try_func()
+ except FileExistsError as exc:
+ except_func(exc)
+ else:
+ if else_func is not None:
+ else_func()
+ else:
+ try:
+ try_func()
+ except EnvironmentError as exc:
+ if exc.errno != EEXIST:
+ raise
+ else:
+ except_func(exc)
+ else:
+ if else_func is not None:
+ else_func()
+
+
+def _try_except_filenotfounderror(try_func, except_func):
+ if sys.version_info >= (3, 3):
+ try:
+ try_func()
+ except FileNotFoundError as exc:
+ except_func(exc)
+ elif os.name != 'nt':
+ try:
+ try_func()
+ except EnvironmentError as exc:
+ if exc.errno != ENOENT:
+ raise
+ else:
+ except_func(exc)
+ else:
+ try:
+ try_func()
+ except WindowsError as exc:
+ # errno contains winerror
+ # 2 = file not found
+ # 3 = path not found
+ if exc.errno not in (2, 3):
+ raise
+ else:
+ except_func(exc)
+ except EnvironmentError as exc:
+ if exc.errno != ENOENT:
+ raise
+ else:
+ except_func(exc)
+
+
+def _try_except_permissionerror_iter(try_iter, except_iter):
+ if sys.version_info >= (3, 3):
+ try:
+ for x in try_iter():
+ yield x
+ except PermissionError as exc:
+ for x in except_iter(exc):
+ yield x
+ else:
+ try:
+ for x in try_iter():
+ yield x
+ except EnvironmentError as exc:
+ if exc.errno not in (EPERM, EACCES):
+ raise
+ else:
+ for x in except_iter(exc):
+ yield x
+
+
+def _win32_get_unique_path_id(path):
+ # get file information, needed for samefile on older Python versions
+ # see http://timgolden.me.uk/python/win32_how_do_i/
+ # see_if_two_files_are_the_same_file.html
+ from ctypes import POINTER, Structure, WinError
+ from ctypes.wintypes import DWORD, HANDLE, BOOL
+
+ class FILETIME(Structure):
+ _fields_ = [("datetime_lo", DWORD),
+ ("datetime_hi", DWORD),
+ ]
+
+ class BY_HANDLE_FILE_INFORMATION(Structure):
+ _fields_ = [("attributes", DWORD),
+ ("created_at", FILETIME),
+ ("accessed_at", FILETIME),
+ ("written_at", FILETIME),
+ ("volume", DWORD),
+ ("file_hi", DWORD),
+ ("file_lo", DWORD),
+ ("n_links", DWORD),
+ ("index_hi", DWORD),
+ ("index_lo", DWORD),
+ ]
+
+ CreateFile = ctypes.windll.kernel32.CreateFileW
+ CreateFile.argtypes = [ctypes.c_wchar_p, DWORD, DWORD, ctypes.c_void_p,
+ DWORD, DWORD, HANDLE]
+ CreateFile.restype = HANDLE
+ GetFileInformationByHandle = (
+ ctypes.windll.kernel32.GetFileInformationByHandle)
+ GetFileInformationByHandle.argtypes = [
+ HANDLE, POINTER(BY_HANDLE_FILE_INFORMATION)]
+ GetFileInformationByHandle.restype = BOOL
+ CloseHandle = ctypes.windll.kernel32.CloseHandle
+ CloseHandle.argtypes = [HANDLE]
+ CloseHandle.restype = BOOL
+ GENERIC_READ = 0x80000000
+ FILE_SHARE_READ = 0x00000001
+ FILE_FLAG_BACKUP_SEMANTICS = 0x02000000
+ OPEN_EXISTING = 3
+ if os.path.isdir(path):
+ flags = FILE_FLAG_BACKUP_SEMANTICS
+ else:
+ flags = 0
+ hfile = CreateFile(path, GENERIC_READ, FILE_SHARE_READ,
+ None, OPEN_EXISTING, flags, None)
+ if hfile == 0xffffffff:
+ if sys.version_info >= (3, 3):
+ raise FileNotFoundError(path)
+ else:
+ exc = OSError("file not found: path")
+ exc.errno = ENOENT
+ raise exc
+ info = BY_HANDLE_FILE_INFORMATION()
+ success = GetFileInformationByHandle(hfile, info)
+ CloseHandle(hfile)
+ if success == 0:
+ raise WinError()
+ return info.volume, info.index_hi, info.index_lo
+
+
+def _is_wildcard_pattern(pat):
+ # Whether this pattern needs actual matching using fnmatch, or can
+ # be looked up directly as a file.
+ return "*" in pat or "?" in pat or "[" in pat
+
+
+class _Flavour(object):
+
+ """A flavour implements a particular (platform-specific) set of path
+ semantics."""
+
+ def __init__(self):
+ self.join = self.sep.join
+
+ def parse_parts(self, parts):
+ if six.PY2:
+ parts = _py2_fsencode(parts)
+ parsed = []
+ sep = self.sep
+ altsep = self.altsep
+ drv = root = ''
+ it = reversed(parts)
+ for part in it:
+ if not part:
+ continue
+ if altsep:
+ part = part.replace(altsep, sep)
+ drv, root, rel = self.splitroot(part)
+ if sep in rel:
+ for x in reversed(rel.split(sep)):
+ if x and x != '.':
+ parsed.append(intern(x))
+ else:
+ if rel and rel != '.':
+ parsed.append(intern(rel))
+ if drv or root:
+ if not drv:
+ # If no drive is present, try to find one in the previous
+ # parts. This makes the result of parsing e.g.
+ # ("C:", "/", "a") reasonably intuitive.
+ for part in it:
+ if not part:
+ continue
+ if altsep:
+ part = part.replace(altsep, sep)
+ drv = self.splitroot(part)[0]
+ if drv:
+ break
+ break
+ if drv or root:
+ parsed.append(drv + root)
+ parsed.reverse()
+ return drv, root, parsed
+
+ def join_parsed_parts(self, drv, root, parts, drv2, root2, parts2):
+ """
+ Join the two paths represented by the respective
+ (drive, root, parts) tuples. Return a new (drive, root, parts) tuple.
+ """
+ if root2:
+ if not drv2 and drv:
+ return drv, root2, [drv + root2] + parts2[1:]
+ elif drv2:
+ if drv2 == drv or self.casefold(drv2) == self.casefold(drv):
+ # Same drive => second path is relative to the first
+ return drv, root, parts + parts2[1:]
+ else:
+ # Second path is non-anchored (common case)
+ return drv, root, parts + parts2
+ return drv2, root2, parts2
+
+
+class _WindowsFlavour(_Flavour):
+ # Reference for Windows paths can be found at
+ # http://msdn.microsoft.com/en-us/library/aa365247%28v=vs.85%29.aspx
+
+ sep = '\\'
+ altsep = '/'
+ has_drv = True
+ pathmod = ntpath
+
+ is_supported = (os.name == 'nt')
+
+ drive_letters = set('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ')
+ ext_namespace_prefix = '\\\\?\\'
+
+ reserved_names = (
+ set(['CON', 'PRN', 'AUX', 'NUL']) |
+ set(['COM%d' % i for i in range(1, 10)]) |
+ set(['LPT%d' % i for i in range(1, 10)])
+ )
+
+ # Interesting findings about extended paths:
+ # - '\\?\c:\a', '//?/c:\a' and '//?/c:/a' are all supported
+ # but '\\?\c:/a' is not
+ # - extended paths are always absolute; "relative" extended paths will
+ # fail.
+
+ def splitroot(self, part, sep=sep):
+ first = part[0:1]
+ second = part[1:2]
+ if (second == sep and first == sep):
+ # XXX extended paths should also disable the collapsing of "."
+ # components (according to MSDN docs).
+ prefix, part = self._split_extended_path(part)
+ first = part[0:1]
+ second = part[1:2]
+ else:
+ prefix = ''
+ third = part[2:3]
+ if (second == sep and first == sep and third != sep):
+ # is a UNC path:
+ # vvvvvvvvvvvvvvvvvvvvv root
+ # \\machine\mountpoint\directory\etc\...
+ # directory ^^^^^^^^^^^^^^
+ index = part.find(sep, 2)
+ if index != -1:
+ index2 = part.find(sep, index + 1)
+ # a UNC path can't have two slashes in a row
+ # (after the initial two)
+ if index2 != index + 1:
+ if index2 == -1:
+ index2 = len(part)
+ if prefix:
+ return prefix + part[1:index2], sep, part[index2 + 1:]
+ else:
+ return part[:index2], sep, part[index2 + 1:]
+ drv = root = ''
+ if second == ':' and first in self.drive_letters:
+ drv = part[:2]
+ part = part[2:]
+ first = third
+ if first == sep:
+ root = first
+ part = part.lstrip(sep)
+ return prefix + drv, root, part
+
+ def casefold(self, s):
+ return s.lower()
+
+ def casefold_parts(self, parts):
+ return [p.lower() for p in parts]
+
+ def resolve(self, path, strict=False):
+ s = str(path)
+ if not s:
+ return os.getcwd()
+ previous_s = None
+ if _getfinalpathname is not None:
+ if strict:
+ return self._ext_to_normal(_getfinalpathname(s))
+ else:
+ # End of the path after the first one not found
+ tail_parts = []
+
+ def _try_func():
+ result[0] = self._ext_to_normal(_getfinalpathname(s))
+ # if there was no exception, set flag to 0
+ result[1] = 0
+
+ def _exc_func(exc):
+ pass
+
+ while True:
+ result = [None, 1]
+ _try_except_filenotfounderror(_try_func, _exc_func)
+ if result[1] == 1: # file not found exception raised
+ previous_s = s
+ s, tail = os.path.split(s)
+ tail_parts.append(tail)
+ if previous_s == s:
+ return path
+ else:
+ s = result[0]
+ return os.path.join(s, *reversed(tail_parts))
+ # Means fallback on absolute
+ return None
+
+ def _split_extended_path(self, s, ext_prefix=ext_namespace_prefix):
+ prefix = ''
+ if s.startswith(ext_prefix):
+ prefix = s[:4]
+ s = s[4:]
+ if s.startswith('UNC\\'):
+ prefix += s[:3]
+ s = '\\' + s[3:]
+ return prefix, s
+
+ def _ext_to_normal(self, s):
+ # Turn back an extended path into a normal DOS-like path
+ return self._split_extended_path(s)[1]
+
+ def is_reserved(self, parts):
+ # NOTE: the rules for reserved names seem somewhat complicated
+ # (e.g. r"..\NUL" is reserved but not r"foo\NUL").
+ # We err on the side of caution and return True for paths which are
+ # not considered reserved by Windows.
+ if not parts:
+ return False
+ if parts[0].startswith('\\\\'):
+ # UNC paths are never reserved
+ return False
+ return parts[-1].partition('.')[0].upper() in self.reserved_names
+
+ def make_uri(self, path):
+ # Under Windows, file URIs use the UTF-8 encoding.
+ drive = path.drive
+ if len(drive) == 2 and drive[1] == ':':
+ # It's a path on a local drive => 'file:///c:/a/b'
+ rest = path.as_posix()[2:].lstrip('/')
+ return 'file:///%s/%s' % (
+ drive, urlquote_from_bytes(rest.encode('utf-8')))
+ else:
+ # It's a path on a network drive => 'file://host/share/a/b'
+ return 'file:' + urlquote_from_bytes(
+ path.as_posix().encode('utf-8'))
+
+ def gethomedir(self, username):
+ if 'HOME' in os.environ:
+ userhome = os.environ['HOME']
+ elif 'USERPROFILE' in os.environ:
+ userhome = os.environ['USERPROFILE']
+ elif 'HOMEPATH' in os.environ:
+ try:
+ drv = os.environ['HOMEDRIVE']
+ except KeyError:
+ drv = ''
+ userhome = drv + os.environ['HOMEPATH']
+ else:
+ raise RuntimeError("Can't determine home directory")
+
+ if username:
+ # Try to guess user home directory. By default all users
+ # directories are located in the same place and are named by
+ # corresponding usernames. If current user home directory points
+ # to nonstandard place, this guess is likely wrong.
+ if os.environ['USERNAME'] != username:
+ drv, root, parts = self.parse_parts((userhome,))
+ if parts[-1] != os.environ['USERNAME']:
+ raise RuntimeError("Can't determine home directory "
+ "for %r" % username)
+ parts[-1] = username
+ if drv or root:
+ userhome = drv + root + self.join(parts[1:])
+ else:
+ userhome = self.join(parts)
+ return userhome
+
+
+class _PosixFlavour(_Flavour):
+ sep = '/'
+ altsep = ''
+ has_drv = False
+ pathmod = posixpath
+
+ is_supported = (os.name != 'nt')
+
+ def splitroot(self, part, sep=sep):
+ if part and part[0] == sep:
+ stripped_part = part.lstrip(sep)
+ # According to POSIX path resolution:
+ # http://pubs.opengroup.org/onlinepubs/009695399/basedefs/
+ # xbd_chap04.html#tag_04_11
+ # "A pathname that begins with two successive slashes may be
+ # interpreted in an implementation-defined manner, although more
+ # than two leading slashes shall be treated as a single slash".
+ if len(part) - len(stripped_part) == 2:
+ return '', sep * 2, stripped_part
+ else:
+ return '', sep, stripped_part
+ else:
+ return '', '', part
+
+ def casefold(self, s):
+ return s
+
+ def casefold_parts(self, parts):
+ return parts
+
+ def resolve(self, path, strict=False):
+ sep = self.sep
+ accessor = path._accessor
+ seen = {}
+
+ def _resolve(path, rest):
+ if rest.startswith(sep):
+ path = ''
+
+ for name in rest.split(sep):
+ if not name or name == '.':
+ # current dir
+ continue
+ if name == '..':
+ # parent dir
+ path, _, _ = path.rpartition(sep)
+ continue
+ newpath = path + sep + name
+ if newpath in seen:
+ # Already seen this path
+ path = seen[newpath]
+ if path is not None:
+ # use cached value
+ continue
+ # The symlink is not resolved, so we must have a symlink
+ # loop.
+ raise RuntimeError("Symlink loop from %r" % newpath)
+ # Resolve the symbolic link
+ try:
+ target = accessor.readlink(newpath)
+ except OSError as e:
+ if e.errno != EINVAL and strict:
+ raise
+ # Not a symlink, or non-strict mode. We just leave the path
+ # untouched.
+ path = newpath
+ else:
+ seen[newpath] = None # not resolved symlink
+ path = _resolve(path, target)
+ seen[newpath] = path # resolved symlink
+
+ return path
+ # NOTE: according to POSIX, getcwd() cannot contain path components
+ # which are symlinks.
+ base = '' if path.is_absolute() else os.getcwd()
+ return _resolve(base, str(path)) or sep
+
+ def is_reserved(self, parts):
+ return False
+
+ def make_uri(self, path):
+ # We represent the path using the local filesystem encoding,
+ # for portability to other applications.
+ bpath = bytes(path)
+ return 'file://' + urlquote_from_bytes(bpath)
+
+ def gethomedir(self, username):
+ if not username:
+ try:
+ return os.environ['HOME']
+ except KeyError:
+ import pwd
+ return pwd.getpwuid(os.getuid()).pw_dir
+ else:
+ import pwd
+ try:
+ return pwd.getpwnam(username).pw_dir
+ except KeyError:
+ raise RuntimeError("Can't determine home directory "
+ "for %r" % username)
+
+
+_windows_flavour = _WindowsFlavour()
+_posix_flavour = _PosixFlavour()
+
+
+class _Accessor:
+
+ """An accessor implements a particular (system-specific or not) way of
+ accessing paths on the filesystem."""
+
+
+class _NormalAccessor(_Accessor):
+
+ def _wrap_strfunc(strfunc):
+ @functools.wraps(strfunc)
+ def wrapped(pathobj, *args):
+ return strfunc(str(pathobj), *args)
+ return staticmethod(wrapped)
+
+ def _wrap_binary_strfunc(strfunc):
+ @functools.wraps(strfunc)
+ def wrapped(pathobjA, pathobjB, *args):
+ return strfunc(str(pathobjA), str(pathobjB), *args)
+ return staticmethod(wrapped)
+
+ stat = _wrap_strfunc(os.stat)
+
+ lstat = _wrap_strfunc(os.lstat)
+
+ open = _wrap_strfunc(os.open)
+
+ listdir = _wrap_strfunc(os.listdir)
+
+ scandir = _wrap_strfunc(os_scandir)
+
+ chmod = _wrap_strfunc(os.chmod)
+
+ if hasattr(os, "lchmod"):
+ lchmod = _wrap_strfunc(os.lchmod)
+ else:
+ def lchmod(self, pathobj, mode):
+ raise NotImplementedError("lchmod() not available on this system")
+
+ mkdir = _wrap_strfunc(os.mkdir)
+
+ unlink = _wrap_strfunc(os.unlink)
+
+ rmdir = _wrap_strfunc(os.rmdir)
+
+ rename = _wrap_binary_strfunc(os.rename)
+
+ if sys.version_info >= (3, 3):
+ replace = _wrap_binary_strfunc(os.replace)
+
+ if nt:
+ if supports_symlinks:
+ symlink = _wrap_binary_strfunc(os.symlink)
+ else:
+ def symlink(a, b, target_is_directory):
+ raise NotImplementedError(
+ "symlink() not available on this system")
+ else:
+ # Under POSIX, os.symlink() takes two args
+ @staticmethod
+ def symlink(a, b, target_is_directory):
+ return os.symlink(str(a), str(b))
+
+ utime = _wrap_strfunc(os.utime)
+
+ # Helper for resolve()
+ def readlink(self, path):
+ return os.readlink(path)
+
+
+_normal_accessor = _NormalAccessor()
+
+
+#
+# Globbing helpers
+#
+
+def _make_selector(pattern_parts):
+ pat = pattern_parts[0]
+ child_parts = pattern_parts[1:]
+ if pat == '**':
+ cls = _RecursiveWildcardSelector
+ elif '**' in pat:
+ raise ValueError(
+ "Invalid pattern: '**' can only be an entire path component")
+ elif _is_wildcard_pattern(pat):
+ cls = _WildcardSelector
+ else:
+ cls = _PreciseSelector
+ return cls(pat, child_parts)
+
+
+if hasattr(functools, "lru_cache"):
+ _make_selector = functools.lru_cache()(_make_selector)
+
+
+class _Selector:
+
+ """A selector matches a specific glob pattern part against the children
+ of a given path."""
+
+ def __init__(self, child_parts):
+ self.child_parts = child_parts
+ if child_parts:
+ self.successor = _make_selector(child_parts)
+ self.dironly = True
+ else:
+ self.successor = _TerminatingSelector()
+ self.dironly = False
+
+ def select_from(self, parent_path):
+ """Iterate over all child paths of `parent_path` matched by this
+ selector. This can contain parent_path itself."""
+ path_cls = type(parent_path)
+ is_dir = path_cls.is_dir
+ exists = path_cls.exists
+ scandir = parent_path._accessor.scandir
+ if not is_dir(parent_path):
+ return iter([])
+ return self._select_from(parent_path, is_dir, exists, scandir)
+
+
+class _TerminatingSelector:
+
+ def _select_from(self, parent_path, is_dir, exists, scandir):
+ yield parent_path
+
+
+class _PreciseSelector(_Selector):
+
+ def __init__(self, name, child_parts):
+ self.name = name
+ _Selector.__init__(self, child_parts)
+
+ def _select_from(self, parent_path, is_dir, exists, scandir):
+ def try_iter():
+ path = parent_path._make_child_relpath(self.name)
+ if (is_dir if self.dironly else exists)(path):
+ for p in self.successor._select_from(
+ path, is_dir, exists, scandir):
+ yield p
+
+ def except_iter(exc):
+ return
+ yield
+
+ for x in _try_except_permissionerror_iter(try_iter, except_iter):
+ yield x
+
+
+class _WildcardSelector(_Selector):
+
+ def __init__(self, pat, child_parts):
+ self.pat = re.compile(fnmatch.translate(pat))
+ _Selector.__init__(self, child_parts)
+
+ def _select_from(self, parent_path, is_dir, exists, scandir):
+ def try_iter():
+ cf = parent_path._flavour.casefold
+ entries = list(scandir(parent_path))
+ for entry in entries:
+ if not self.dironly or entry.is_dir():
+ name = entry.name
+ casefolded = cf(name)
+ if self.pat.match(casefolded):
+ path = parent_path._make_child_relpath(name)
+ for p in self.successor._select_from(
+ path, is_dir, exists, scandir):
+ yield p
+
+ def except_iter(exc):
+ return
+ yield
+
+ for x in _try_except_permissionerror_iter(try_iter, except_iter):
+ yield x
+
+
+class _RecursiveWildcardSelector(_Selector):
+
+ def __init__(self, pat, child_parts):
+ _Selector.__init__(self, child_parts)
+
+ def _iterate_directories(self, parent_path, is_dir, scandir):
+ yield parent_path
+
+ def try_iter():
+ entries = list(scandir(parent_path))
+ for entry in entries:
+ entry_is_dir = False
+ try:
+ entry_is_dir = entry.is_dir()
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ if entry_is_dir and not entry.is_symlink():
+ path = parent_path._make_child_relpath(entry.name)
+ for p in self._iterate_directories(path, is_dir, scandir):
+ yield p
+
+ def except_iter(exc):
+ return
+ yield
+
+ for x in _try_except_permissionerror_iter(try_iter, except_iter):
+ yield x
+
+ def _select_from(self, parent_path, is_dir, exists, scandir):
+ def try_iter():
+ yielded = set()
+ try:
+ successor_select = self.successor._select_from
+ for starting_point in self._iterate_directories(
+ parent_path, is_dir, scandir):
+ for p in successor_select(
+ starting_point, is_dir, exists, scandir):
+ if p not in yielded:
+ yield p
+ yielded.add(p)
+ finally:
+ yielded.clear()
+
+ def except_iter(exc):
+ return
+ yield
+
+ for x in _try_except_permissionerror_iter(try_iter, except_iter):
+ yield x
+
+
+#
+# Public API
+#
+
+class _PathParents(Sequence):
+
+ """This object provides sequence-like access to the logical ancestors
+ of a path. Don't try to construct it yourself."""
+ __slots__ = ('_pathcls', '_drv', '_root', '_parts')
+
+ def __init__(self, path):
+ # We don't store the instance to avoid reference cycles
+ self._pathcls = type(path)
+ self._drv = path._drv
+ self._root = path._root
+ self._parts = path._parts
+
+ def __len__(self):
+ if self._drv or self._root:
+ return len(self._parts) - 1
+ else:
+ return len(self._parts)
+
+ def __getitem__(self, idx):
+ if idx < 0 or idx >= len(self):
+ raise IndexError(idx)
+ return self._pathcls._from_parsed_parts(self._drv, self._root,
+ self._parts[:-idx - 1])
+
+ def __repr__(self):
+ return "<{0}.parents>".format(self._pathcls.__name__)
+
+
+class PurePath(object):
+
+ """PurePath represents a filesystem path and offers operations which
+ don't imply any actual filesystem I/O. Depending on your system,
+ instantiating a PurePath will return either a PurePosixPath or a
+ PureWindowsPath object. You can also instantiate either of these classes
+ directly, regardless of your system.
+ """
+ __slots__ = (
+ '_drv', '_root', '_parts',
+ '_str', '_hash', '_pparts', '_cached_cparts',
+ )
+
+ def __new__(cls, *args):
+ """Construct a PurePath from one or several strings and or existing
+ PurePath objects. The strings and path objects are combined so as
+ to yield a canonicalized path, which is incorporated into the
+ new PurePath object.
+ """
+ if cls is PurePath:
+ cls = PureWindowsPath if os.name == 'nt' else PurePosixPath
+ return cls._from_parts(args)
+
+ def __reduce__(self):
+ # Using the parts tuple helps share interned path parts
+ # when pickling related paths.
+ return (self.__class__, tuple(self._parts))
+
+ @classmethod
+ def _parse_args(cls, args):
+ # This is useful when you don't want to create an instance, just
+ # canonicalize some constructor arguments.
+ parts = []
+ for a in args:
+ if isinstance(a, PurePath):
+ parts += a._parts
+ else:
+ if sys.version_info >= (3, 6):
+ a = os.fspath(a)
+ else:
+ # duck typing for older Python versions
+ if hasattr(a, "__fspath__"):
+ a = a.__fspath__()
+ if isinstance(a, str):
+ # Force-cast str subclasses to str (issue #21127)
+ parts.append(str(a))
+ # also handle unicode for PY2 (six.text_type = unicode)
+ elif six.PY2 and isinstance(a, six.text_type):
+ # cast to str using filesystem encoding
+ # note: in rare circumstances, on Python < 3.2,
+ # getfilesystemencoding can return None, in that
+ # case fall back to ascii
+ parts.append(a.encode(
+ sys.getfilesystemencoding() or "ascii"))
+ else:
+ raise TypeError(
+ "argument should be a str object or an os.PathLike "
+ "object returning str, not %r"
+ % type(a))
+ return cls._flavour.parse_parts(parts)
+
+ @classmethod
+ def _from_parts(cls, args, init=True):
+ # We need to call _parse_args on the instance, so as to get the
+ # right flavour.
+ self = object.__new__(cls)
+ drv, root, parts = self._parse_args(args)
+ self._drv = drv
+ self._root = root
+ self._parts = parts
+ if init:
+ self._init()
+ return self
+
+ @classmethod
+ def _from_parsed_parts(cls, drv, root, parts, init=True):
+ self = object.__new__(cls)
+ self._drv = drv
+ self._root = root
+ self._parts = parts
+ if init:
+ self._init()
+ return self
+
+ @classmethod
+ def _format_parsed_parts(cls, drv, root, parts):
+ if drv or root:
+ return drv + root + cls._flavour.join(parts[1:])
+ else:
+ return cls._flavour.join(parts)
+
+ def _init(self):
+ # Overridden in concrete Path
+ pass
+
+ def _make_child(self, args):
+ drv, root, parts = self._parse_args(args)
+ drv, root, parts = self._flavour.join_parsed_parts(
+ self._drv, self._root, self._parts, drv, root, parts)
+ return self._from_parsed_parts(drv, root, parts)
+
+ def __str__(self):
+ """Return the string representation of the path, suitable for
+ passing to system calls."""
+ try:
+ return self._str
+ except AttributeError:
+ self._str = self._format_parsed_parts(self._drv, self._root,
+ self._parts) or '.'
+ return self._str
+
+ def __fspath__(self):
+ return str(self)
+
+ def as_posix(self):
+ """Return the string representation of the path with forward (/)
+ slashes."""
+ f = self._flavour
+ return str(self).replace(f.sep, '/')
+
+ def __bytes__(self):
+ """Return the bytes representation of the path. This is only
+ recommended to use under Unix."""
+ if sys.version_info < (3, 2):
+ raise NotImplementedError("needs Python 3.2 or later")
+ return os.fsencode(str(self))
+
+ def __repr__(self):
+ return "{0}({1!r})".format(self.__class__.__name__, self.as_posix())
+
+ def as_uri(self):
+ """Return the path as a 'file' URI."""
+ if not self.is_absolute():
+ raise ValueError("relative path can't be expressed as a file URI")
+ return self._flavour.make_uri(self)
+
+ @property
+ def _cparts(self):
+ # Cached casefolded parts, for hashing and comparison
+ try:
+ return self._cached_cparts
+ except AttributeError:
+ self._cached_cparts = self._flavour.casefold_parts(self._parts)
+ return self._cached_cparts
+
+ def __eq__(self, other):
+ if not isinstance(other, PurePath):
+ return NotImplemented
+ return (
+ self._cparts == other._cparts
+ and self._flavour is other._flavour)
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ try:
+ return self._hash
+ except AttributeError:
+ self._hash = hash(tuple(self._cparts))
+ return self._hash
+
+ def __lt__(self, other):
+ if (not isinstance(other, PurePath)
+ or self._flavour is not other._flavour):
+ return NotImplemented
+ return self._cparts < other._cparts
+
+ def __le__(self, other):
+ if (not isinstance(other, PurePath)
+ or self._flavour is not other._flavour):
+ return NotImplemented
+ return self._cparts <= other._cparts
+
+ def __gt__(self, other):
+ if (not isinstance(other, PurePath)
+ or self._flavour is not other._flavour):
+ return NotImplemented
+ return self._cparts > other._cparts
+
+ def __ge__(self, other):
+ if (not isinstance(other, PurePath)
+ or self._flavour is not other._flavour):
+ return NotImplemented
+ return self._cparts >= other._cparts
+
+ drive = property(attrgetter('_drv'),
+ doc="""The drive prefix (letter or UNC path), if any.""")
+
+ root = property(attrgetter('_root'),
+ doc="""The root of the path, if any.""")
+
+ @property
+ def anchor(self):
+ """The concatenation of the drive and root, or ''."""
+ anchor = self._drv + self._root
+ return anchor
+
+ @property
+ def name(self):
+ """The final path component, if any."""
+ parts = self._parts
+ if len(parts) == (1 if (self._drv or self._root) else 0):
+ return ''
+ return parts[-1]
+
+ @property
+ def suffix(self):
+ """The final component's last suffix, if any."""
+ name = self.name
+ i = name.rfind('.')
+ if 0 < i < len(name) - 1:
+ return name[i:]
+ else:
+ return ''
+
+ @property
+ def suffixes(self):
+ """A list of the final component's suffixes, if any."""
+ name = self.name
+ if name.endswith('.'):
+ return []
+ name = name.lstrip('.')
+ return ['.' + suffix for suffix in name.split('.')[1:]]
+
+ @property
+ def stem(self):
+ """The final path component, minus its last suffix."""
+ name = self.name
+ i = name.rfind('.')
+ if 0 < i < len(name) - 1:
+ return name[:i]
+ else:
+ return name
+
+ def with_name(self, name):
+ """Return a new path with the file name changed."""
+ if not self.name:
+ raise ValueError("%r has an empty name" % (self,))
+ drv, root, parts = self._flavour.parse_parts((name,))
+ if (not name or name[-1] in [self._flavour.sep, self._flavour.altsep]
+ or drv or root or len(parts) != 1):
+ raise ValueError("Invalid name %r" % (name))
+ return self._from_parsed_parts(self._drv, self._root,
+ self._parts[:-1] + [name])
+
+ def with_suffix(self, suffix):
+ """Return a new path with the file suffix changed. If the path
+ has no suffix, add given suffix. If the given suffix is an empty
+ string, remove the suffix from the path.
+ """
+ # XXX if suffix is None, should the current suffix be removed?
+ f = self._flavour
+ if f.sep in suffix or f.altsep and f.altsep in suffix:
+ raise ValueError("Invalid suffix %r" % (suffix))
+ if suffix and not suffix.startswith('.') or suffix == '.':
+ raise ValueError("Invalid suffix %r" % (suffix))
+ name = self.name
+ if not name:
+ raise ValueError("%r has an empty name" % (self,))
+ old_suffix = self.suffix
+ if not old_suffix:
+ name = name + suffix
+ else:
+ name = name[:-len(old_suffix)] + suffix
+ return self._from_parsed_parts(self._drv, self._root,
+ self._parts[:-1] + [name])
+
+ def relative_to(self, *other):
+ """Return the relative path to another path identified by the passed
+ arguments. If the operation is not possible (because this is not
+ a subpath of the other path), raise ValueError.
+ """
+ # For the purpose of this method, drive and root are considered
+ # separate parts, i.e.:
+ # Path('c:/').relative_to('c:') gives Path('/')
+ # Path('c:/').relative_to('/') raise ValueError
+ if not other:
+ raise TypeError("need at least one argument")
+ parts = self._parts
+ drv = self._drv
+ root = self._root
+ if root:
+ abs_parts = [drv, root] + parts[1:]
+ else:
+ abs_parts = parts
+ to_drv, to_root, to_parts = self._parse_args(other)
+ if to_root:
+ to_abs_parts = [to_drv, to_root] + to_parts[1:]
+ else:
+ to_abs_parts = to_parts
+ n = len(to_abs_parts)
+ cf = self._flavour.casefold_parts
+ if (root or drv) if n == 0 else cf(abs_parts[:n]) != cf(to_abs_parts):
+ formatted = self._format_parsed_parts(to_drv, to_root, to_parts)
+ raise ValueError("{0!r} does not start with {1!r}"
+ .format(str(self), str(formatted)))
+ return self._from_parsed_parts('', root if n == 1 else '',
+ abs_parts[n:])
+
+ @property
+ def parts(self):
+ """An object providing sequence-like access to the
+ components in the filesystem path."""
+ # We cache the tuple to avoid building a new one each time .parts
+ # is accessed. XXX is this necessary?
+ try:
+ return self._pparts
+ except AttributeError:
+ self._pparts = tuple(self._parts)
+ return self._pparts
+
+ def joinpath(self, *args):
+ """Combine this path with one or several arguments, and return a
+ new path representing either a subpath (if all arguments are relative
+ paths) or a totally different path (if one of the arguments is
+ anchored).
+ """
+ return self._make_child(args)
+
+ def __truediv__(self, key):
+ return self._make_child((key,))
+
+ def __rtruediv__(self, key):
+ return self._from_parts([key] + self._parts)
+
+ if six.PY2:
+ __div__ = __truediv__
+ __rdiv__ = __rtruediv__
+
+ @property
+ def parent(self):
+ """The logical parent of the path."""
+ drv = self._drv
+ root = self._root
+ parts = self._parts
+ if len(parts) == 1 and (drv or root):
+ return self
+ return self._from_parsed_parts(drv, root, parts[:-1])
+
+ @property
+ def parents(self):
+ """A sequence of this path's logical parents."""
+ return _PathParents(self)
+
+ def is_absolute(self):
+ """True if the path is absolute (has both a root and, if applicable,
+ a drive)."""
+ if not self._root:
+ return False
+ return not self._flavour.has_drv or bool(self._drv)
+
+ def is_reserved(self):
+ """Return True if the path contains one of the special names reserved
+ by the system, if any."""
+ return self._flavour.is_reserved(self._parts)
+
+ def match(self, path_pattern):
+ """
+ Return True if this path matches the given pattern.
+ """
+ cf = self._flavour.casefold
+ path_pattern = cf(path_pattern)
+ drv, root, pat_parts = self._flavour.parse_parts((path_pattern,))
+ if not pat_parts:
+ raise ValueError("empty pattern")
+ if drv and drv != cf(self._drv):
+ return False
+ if root and root != cf(self._root):
+ return False
+ parts = self._cparts
+ if drv or root:
+ if len(pat_parts) != len(parts):
+ return False
+ pat_parts = pat_parts[1:]
+ elif len(pat_parts) > len(parts):
+ return False
+ for part, pat in zip(reversed(parts), reversed(pat_parts)):
+ if not fnmatch.fnmatchcase(part, pat):
+ return False
+ return True
+
+
+# Can't subclass os.PathLike from PurePath and keep the constructor
+# optimizations in PurePath._parse_args().
+if sys.version_info >= (3, 6):
+ os.PathLike.register(PurePath)
+
+
+class PurePosixPath(PurePath):
+ _flavour = _posix_flavour
+ __slots__ = ()
+
+
+class PureWindowsPath(PurePath):
+ """PurePath subclass for Windows systems.
+
+ On a Windows system, instantiating a PurePath should return this object.
+ However, you can also instantiate it directly on any system.
+ """
+ _flavour = _windows_flavour
+ __slots__ = ()
+
+
+# Filesystem-accessing classes
+
+
+class Path(PurePath):
+ """PurePath subclass that can make system calls.
+
+ Path represents a filesystem path but unlike PurePath, also offers
+ methods to do system calls on path objects. Depending on your system,
+ instantiating a Path will return either a PosixPath or a WindowsPath
+ object. You can also instantiate a PosixPath or WindowsPath directly,
+ but cannot instantiate a WindowsPath on a POSIX system or vice versa.
+ """
+ __slots__ = (
+ '_accessor',
+ '_closed',
+ )
+
+ def __new__(cls, *args, **kwargs):
+ if cls is Path:
+ cls = WindowsPath if os.name == 'nt' else PosixPath
+ self = cls._from_parts(args, init=False)
+ if not self._flavour.is_supported:
+ raise NotImplementedError("cannot instantiate %r on your system"
+ % (cls.__name__,))
+ self._init()
+ return self
+
+ def _init(self,
+ # Private non-constructor arguments
+ template=None,
+ ):
+ self._closed = False
+ if template is not None:
+ self._accessor = template._accessor
+ else:
+ self._accessor = _normal_accessor
+
+ def _make_child_relpath(self, part):
+ # This is an optimization used for dir walking. `part` must be
+ # a single part relative to this path.
+ parts = self._parts + [part]
+ return self._from_parsed_parts(self._drv, self._root, parts)
+
+ def __enter__(self):
+ if self._closed:
+ self._raise_closed()
+ return self
+
+ def __exit__(self, t, v, tb):
+ self._closed = True
+
+ def _raise_closed(self):
+ raise ValueError("I/O operation on closed path")
+
+ def _opener(self, name, flags, mode=0o666):
+ # A stub for the opener argument to built-in open()
+ return self._accessor.open(self, flags, mode)
+
+ def _raw_open(self, flags, mode=0o777):
+ """
+ Open the file pointed by this path and return a file descriptor,
+ as os.open() does.
+ """
+ if self._closed:
+ self._raise_closed()
+ return self._accessor.open(self, flags, mode)
+
+ # Public API
+
+ @classmethod
+ def cwd(cls):
+ """Return a new path pointing to the current working directory
+ (as returned by os.getcwd()).
+ """
+ return cls(os.getcwd())
+
+ @classmethod
+ def home(cls):
+ """Return a new path pointing to the user's home directory (as
+ returned by os.path.expanduser('~')).
+ """
+ return cls(cls()._flavour.gethomedir(None))
+
+ def samefile(self, other_path):
+ """Return whether other_path is the same or not as this file
+ (as returned by os.path.samefile()).
+ """
+ if hasattr(os.path, "samestat"):
+ st = self.stat()
+ try:
+ other_st = other_path.stat()
+ except AttributeError:
+ other_st = os.stat(other_path)
+ return os.path.samestat(st, other_st)
+ else:
+ filename1 = six.text_type(self)
+ filename2 = six.text_type(other_path)
+ st1 = _win32_get_unique_path_id(filename1)
+ st2 = _win32_get_unique_path_id(filename2)
+ return st1 == st2
+
+ def iterdir(self):
+ """Iterate over the files in this directory. Does not yield any
+ result for the special paths '.' and '..'.
+ """
+ if self._closed:
+ self._raise_closed()
+ for name in self._accessor.listdir(self):
+ if name in ('.', '..'):
+ # Yielding a path object for these makes little sense
+ continue
+ yield self._make_child_relpath(name)
+ if self._closed:
+ self._raise_closed()
+
+ def glob(self, pattern):
+ """Iterate over this subtree and yield all existing files (of any
+ kind, including directories) matching the given relative pattern.
+ """
+ if not pattern:
+ raise ValueError("Unacceptable pattern: {0!r}".format(pattern))
+ pattern = self._flavour.casefold(pattern)
+ drv, root, pattern_parts = self._flavour.parse_parts((pattern,))
+ if drv or root:
+ raise NotImplementedError("Non-relative patterns are unsupported")
+ selector = _make_selector(tuple(pattern_parts))
+ for p in selector.select_from(self):
+ yield p
+
+ def rglob(self, pattern):
+ """Recursively yield all existing files (of any kind, including
+ directories) matching the given relative pattern, anywhere in
+ this subtree.
+ """
+ pattern = self._flavour.casefold(pattern)
+ drv, root, pattern_parts = self._flavour.parse_parts((pattern,))
+ if drv or root:
+ raise NotImplementedError("Non-relative patterns are unsupported")
+ selector = _make_selector(("**",) + tuple(pattern_parts))
+ for p in selector.select_from(self):
+ yield p
+
+ def absolute(self):
+ """Return an absolute version of this path. This function works
+ even if the path doesn't point to anything.
+
+ No normalization is done, i.e. all '.' and '..' will be kept along.
+ Use resolve() to get the canonical path to a file.
+ """
+ # XXX untested yet!
+ if self._closed:
+ self._raise_closed()
+ if self.is_absolute():
+ return self
+ # FIXME this must defer to the specific flavour (and, under Windows,
+ # use nt._getfullpathname())
+ obj = self._from_parts([os.getcwd()] + self._parts, init=False)
+ obj._init(template=self)
+ return obj
+
+ def resolve(self, strict=False):
+ """
+ Make the path absolute, resolving all symlinks on the way and also
+ normalizing it (for example turning slashes into backslashes under
+ Windows).
+ """
+ if self._closed:
+ self._raise_closed()
+ s = self._flavour.resolve(self, strict=strict)
+ if s is None:
+ # No symlink resolution => for consistency, raise an error if
+ # the path is forbidden
+ # but not raise error if file does not exist (see issue #54).
+
+ def _try_func():
+ self.stat()
+
+ def _exc_func(exc):
+ pass
+
+ _try_except_filenotfounderror(_try_func, _exc_func)
+ s = str(self.absolute())
+ else:
+ # ensure s is a string (normpath requires this on older python)
+ s = str(s)
+ # Now we have no symlinks in the path, it's safe to normalize it.
+ normed = self._flavour.pathmod.normpath(s)
+ obj = self._from_parts((normed,), init=False)
+ obj._init(template=self)
+ return obj
+
+ def stat(self):
+ """
+ Return the result of the stat() system call on this path, like
+ os.stat() does.
+ """
+ return self._accessor.stat(self)
+
+ def owner(self):
+ """
+ Return the login name of the file owner.
+ """
+ import pwd
+ return pwd.getpwuid(self.stat().st_uid).pw_name
+
+ def group(self):
+ """
+ Return the group name of the file gid.
+ """
+ import grp
+ return grp.getgrgid(self.stat().st_gid).gr_name
+
+ def open(self, mode='r', buffering=-1, encoding=None,
+ errors=None, newline=None):
+ """
+ Open the file pointed by this path and return a file object, as
+ the built-in open() function does.
+ """
+ if self._closed:
+ self._raise_closed()
+ if sys.version_info >= (3, 3):
+ return io.open(
+ str(self), mode, buffering, encoding, errors, newline,
+ opener=self._opener)
+ else:
+ return io.open(str(self), mode, buffering,
+ encoding, errors, newline)
+
+ def read_bytes(self):
+ """
+ Open the file in bytes mode, read it, and close the file.
+ """
+ with self.open(mode='rb') as f:
+ return f.read()
+
+ def read_text(self, encoding=None, errors=None):
+ """
+ Open the file in text mode, read it, and close the file.
+ """
+ with self.open(mode='r', encoding=encoding, errors=errors) as f:
+ return f.read()
+
+ def write_bytes(self, data):
+ """
+ Open the file in bytes mode, write to it, and close the file.
+ """
+ if not isinstance(data, six.binary_type):
+ raise TypeError(
+ 'data must be %s, not %s' %
+ (six.binary_type.__name__, data.__class__.__name__))
+ with self.open(mode='wb') as f:
+ return f.write(data)
+
+ def write_text(self, data, encoding=None, errors=None):
+ """
+ Open the file in text mode, write to it, and close the file.
+ """
+ if not isinstance(data, six.text_type):
+ raise TypeError(
+ 'data must be %s, not %s' %
+ (six.text_type.__name__, data.__class__.__name__))
+ with self.open(mode='w', encoding=encoding, errors=errors) as f:
+ return f.write(data)
+
+ def touch(self, mode=0o666, exist_ok=True):
+ """
+ Create this file with the given access mode, if it doesn't exist.
+ """
+ if self._closed:
+ self._raise_closed()
+ if exist_ok:
+ # First try to bump modification time
+ # Implementation note: GNU touch uses the UTIME_NOW option of
+ # the utimensat() / futimens() functions.
+ try:
+ self._accessor.utime(self, None)
+ except OSError:
+ # Avoid exception chaining
+ pass
+ else:
+ return
+ flags = os.O_CREAT | os.O_WRONLY
+ if not exist_ok:
+ flags |= os.O_EXCL
+ fd = self._raw_open(flags, mode)
+ os.close(fd)
+
+ def mkdir(self, mode=0o777, parents=False, exist_ok=False):
+ """
+ Create a new directory at this given path.
+ """
+ if self._closed:
+ self._raise_closed()
+
+ def _try_func():
+ self._accessor.mkdir(self, mode)
+
+ def _exc_func(exc):
+ if not parents or self.parent == self:
+ raise exc
+ self.parent.mkdir(parents=True, exist_ok=True)
+ self.mkdir(mode, parents=False, exist_ok=exist_ok)
+
+ try:
+ _try_except_filenotfounderror(_try_func, _exc_func)
+ except OSError:
+ # Cannot rely on checking for EEXIST, since the operating system
+ # could give priority to other errors like EACCES or EROFS
+ if not exist_ok or not self.is_dir():
+ raise
+
+ def chmod(self, mode):
+ """
+ Change the permissions of the path, like os.chmod().
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.chmod(self, mode)
+
+ def lchmod(self, mode):
+ """
+ Like chmod(), except if the path points to a symlink, the symlink's
+ permissions are changed, rather than its target's.
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.lchmod(self, mode)
+
+ def unlink(self):
+ """
+ Remove this file or link.
+ If the path is a directory, use rmdir() instead.
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.unlink(self)
+
+ def rmdir(self):
+ """
+ Remove this directory. The directory must be empty.
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.rmdir(self)
+
+ def lstat(self):
+ """
+ Like stat(), except if the path points to a symlink, the symlink's
+ status information is returned, rather than its target's.
+ """
+ if self._closed:
+ self._raise_closed()
+ return self._accessor.lstat(self)
+
+ def rename(self, target):
+ """
+ Rename this path to the given path.
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.rename(self, target)
+
+ def replace(self, target):
+ """
+ Rename this path to the given path, clobbering the existing
+ destination if it exists.
+ """
+ if sys.version_info < (3, 3):
+ raise NotImplementedError("replace() is only available "
+ "with Python 3.3 and later")
+ if self._closed:
+ self._raise_closed()
+ self._accessor.replace(self, target)
+
+ def symlink_to(self, target, target_is_directory=False):
+ """
+ Make this path a symlink pointing to the given path.
+ Note the order of arguments (self, target) is the reverse of
+ os.symlink's.
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.symlink(target, self, target_is_directory)
+
+ # Convenience functions for querying the stat results
+
+ def exists(self):
+ """
+ Whether this path exists.
+ """
+ try:
+ self.stat()
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ return False
+ except ValueError:
+ # Non-encodable path
+ return False
+ return True
+
+ def is_dir(self):
+ """
+ Whether this path is a directory.
+ """
+ try:
+ return S_ISDIR(self.stat().st_mode)
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+ except ValueError:
+ # Non-encodable path
+ return False
+
+ def is_file(self):
+ """
+ Whether this path is a regular file (also True for symlinks pointing
+ to regular files).
+ """
+ try:
+ return S_ISREG(self.stat().st_mode)
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+ except ValueError:
+ # Non-encodable path
+ return False
+
+ def is_mount(self):
+ """
+ Check if this path is a POSIX mount point
+ """
+ # Need to exist and be a dir
+ if not self.exists() or not self.is_dir():
+ return False
+
+ parent = Path(self.parent)
+ try:
+ parent_dev = parent.stat().st_dev
+ except OSError:
+ return False
+
+ dev = self.stat().st_dev
+ if dev != parent_dev:
+ return True
+ ino = self.stat().st_ino
+ parent_ino = parent.stat().st_ino
+ return ino == parent_ino
+
+ def is_symlink(self):
+ """
+ Whether this path is a symbolic link.
+ """
+ try:
+ return S_ISLNK(self.lstat().st_mode)
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ # Path doesn't exist
+ return False
+ except ValueError:
+ # Non-encodable path
+ return False
+
+ def is_block_device(self):
+ """
+ Whether this path is a block device.
+ """
+ try:
+ return S_ISBLK(self.stat().st_mode)
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+ except ValueError:
+ # Non-encodable path
+ return False
+
+ def is_char_device(self):
+ """
+ Whether this path is a character device.
+ """
+ try:
+ return S_ISCHR(self.stat().st_mode)
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+ except ValueError:
+ # Non-encodable path
+ return False
+
+ def is_fifo(self):
+ """
+ Whether this path is a FIFO.
+ """
+ try:
+ return S_ISFIFO(self.stat().st_mode)
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+ except ValueError:
+ # Non-encodable path
+ return False
+
+ def is_socket(self):
+ """
+ Whether this path is a socket.
+ """
+ try:
+ return S_ISSOCK(self.stat().st_mode)
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+ except ValueError:
+ # Non-encodable path
+ return False
+
+ def expanduser(self):
+ """ Return a new path with expanded ~ and ~user constructs
+ (as returned by os.path.expanduser)
+ """
+ if (not (self._drv or self._root)
+ and self._parts and self._parts[0][:1] == '~'):
+ homedir = self._flavour.gethomedir(self._parts[0][1:])
+ return self._from_parts([homedir] + self._parts[1:])
+
+ return self
+
+
+class PosixPath(Path, PurePosixPath):
+ """Path subclass for non-Windows systems.
+
+ On a POSIX system, instantiating a Path should return this object.
+ """
+ __slots__ = ()
+
+
+class WindowsPath(Path, PureWindowsPath):
+ """Path subclass for Windows systems.
+
+ On a Windows system, instantiating a Path should return this object.
+ """
+ __slots__ = ()
+
+ def owner(self):
+ raise NotImplementedError("Path.owner() is unsupported on this system")
+
+ def group(self):
+ raise NotImplementedError("Path.group() is unsupported on this system")
+
+ def is_mount(self):
+ raise NotImplementedError(
+ "Path.is_mount() is unsupported on this system")
diff --git a/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/LICENSE.txt b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/LICENSE.txt
new file mode 100644
index 0000000000..0759f503f2
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/LICENSE.txt
@@ -0,0 +1,27 @@
+Copyright (c) 2012, Ben Hoyt
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation
+and/or other materials provided with the distribution.
+
+* Neither the name of Ben Hoyt nor the names of its contributors may be used
+to endorse or promote products derived from this software without specific
+prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/METADATA
new file mode 100644
index 0000000000..ee4b11a523
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/METADATA
@@ -0,0 +1,238 @@
+Metadata-Version: 2.1
+Name: scandir
+Version: 1.10.0
+Summary: scandir, a better directory iterator and faster os.walk()
+Home-page: https://github.com/benhoyt/scandir
+Author: Ben Hoyt
+Author-email: benhoyt@gmail.com
+License: New BSD License
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Operating System :: OS Independent
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python
+Classifier: Topic :: System :: Filesystems
+Classifier: Topic :: System :: Operating System
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: Implementation :: CPython
+
+
+scandir, a better directory iterator and faster os.walk()
+=========================================================
+
+.. image:: https://img.shields.io/pypi/v/scandir.svg
+ :target: https://pypi.python.org/pypi/scandir
+ :alt: scandir on PyPI (Python Package Index)
+
+.. image:: https://travis-ci.org/benhoyt/scandir.svg?branch=master
+ :target: https://travis-ci.org/benhoyt/scandir
+ :alt: Travis CI tests (Linux)
+
+.. image:: https://ci.appveyor.com/api/projects/status/github/benhoyt/scandir?branch=master&svg=true
+ :target: https://ci.appveyor.com/project/benhoyt/scandir
+ :alt: Appveyor tests (Windows)
+
+
+``scandir()`` is a directory iteration function like ``os.listdir()``,
+except that instead of returning a list of bare filenames, it yields
+``DirEntry`` objects that include file type and stat information along
+with the name. Using ``scandir()`` increases the speed of ``os.walk()``
+by 2-20 times (depending on the platform and file system) by avoiding
+unnecessary calls to ``os.stat()`` in most cases.
+
+
+Now included in a Python near you!
+----------------------------------
+
+``scandir`` has been included in the Python 3.5 standard library as
+``os.scandir()``, and the related performance improvements to
+``os.walk()`` have also been included. So if you're lucky enough to be
+using Python 3.5 (release date September 13, 2015) you get the benefit
+immediately, otherwise just
+`download this module from PyPI <https://pypi.python.org/pypi/scandir>`_,
+install it with ``pip install scandir``, and then do something like
+this in your code:
+
+.. code-block:: python
+
+ # Use the built-in version of scandir/walk if possible, otherwise
+ # use the scandir module version
+ try:
+ from os import scandir, walk
+ except ImportError:
+ from scandir import scandir, walk
+
+`PEP 471 <https://www.python.org/dev/peps/pep-0471/>`_, which is the
+PEP that proposes including ``scandir`` in the Python standard library,
+was `accepted <https://mail.python.org/pipermail/python-dev/2014-July/135561.html>`_
+in July 2014 by Victor Stinner, the BDFL-delegate for the PEP.
+
+This ``scandir`` module is intended to work on Python 2.7+ and Python
+3.4+ (and it has been tested on those versions).
+
+
+Background
+----------
+
+Python's built-in ``os.walk()`` is significantly slower than it needs to be,
+because -- in addition to calling ``listdir()`` on each directory -- it calls
+``stat()`` on each file to determine whether the filename is a directory or not.
+But both ``FindFirstFile`` / ``FindNextFile`` on Windows and ``readdir`` on Linux/OS
+X already tell you whether the files returned are directories or not, so
+no further ``stat`` system calls are needed. In short, you can reduce the number
+of system calls from about 2N to N, where N is the total number of files and
+directories in the tree.
+
+In practice, removing all those extra system calls makes ``os.walk()`` about
+**7-50 times as fast on Windows, and about 3-10 times as fast on Linux and Mac OS
+X.** So we're not talking about micro-optimizations. See more benchmarks
+in the "Benchmarks" section below.
+
+Somewhat relatedly, many people have also asked for a version of
+``os.listdir()`` that yields filenames as it iterates instead of returning them
+as one big list. This improves memory efficiency for iterating very large
+directories.
+
+So as well as a faster ``walk()``, scandir adds a new ``scandir()`` function.
+They're pretty easy to use, but see "The API" below for the full docs.
+
+
+Benchmarks
+----------
+
+Below are results showing how many times as fast ``scandir.walk()`` is than
+``os.walk()`` on various systems, found by running ``benchmark.py`` with no
+arguments:
+
+==================== ============== =============
+System version Python version Times as fast
+==================== ============== =============
+Windows 7 64-bit 2.7.7 64-bit 10.4
+Windows 7 64-bit SSD 2.7.7 64-bit 10.3
+Windows 7 64-bit NFS 2.7.6 64-bit 36.8
+Windows 7 64-bit SSD 3.4.1 64-bit 9.9
+Windows 7 64-bit SSD 3.5.0 64-bit 9.5
+Ubuntu 14.04 64-bit 2.7.6 64-bit 5.8
+Mac OS X 10.9.3 2.7.5 64-bit 3.8
+==================== ============== =============
+
+All of the above tests were done using the fast C version of scandir
+(source code in ``_scandir.c``).
+
+Note that the gains are less than the above on smaller directories and greater
+on larger directories. This is why ``benchmark.py`` creates a test directory
+tree with a standardized size.
+
+
+The API
+-------
+
+walk()
+~~~~~~
+
+The API for ``scandir.walk()`` is exactly the same as ``os.walk()``, so just
+`read the Python docs <https://docs.python.org/3.5/library/os.html#os.walk>`_.
+
+scandir()
+~~~~~~~~~
+
+The full docs for ``scandir()`` and the ``DirEntry`` objects it yields are
+available in the `Python documentation here <https://docs.python.org/3.5/library/os.html#os.scandir>`_.
+But below is a brief summary as well.
+
+ scandir(path='.') -> iterator of DirEntry objects for given path
+
+Like ``listdir``, ``scandir`` calls the operating system's directory
+iteration system calls to get the names of the files in the given
+``path``, but it's different from ``listdir`` in two ways:
+
+* Instead of returning bare filename strings, it returns lightweight
+ ``DirEntry`` objects that hold the filename string and provide
+ simple methods that allow access to the additional data the
+ operating system may have returned.
+
+* It returns a generator instead of a list, so that ``scandir`` acts
+ as a true iterator instead of returning the full list immediately.
+
+``scandir()`` yields a ``DirEntry`` object for each file and
+sub-directory in ``path``. Just like ``listdir``, the ``'.'``
+and ``'..'`` pseudo-directories are skipped, and the entries are
+yielded in system-dependent order. Each ``DirEntry`` object has the
+following attributes and methods:
+
+* ``name``: the entry's filename, relative to the scandir ``path``
+ argument (corresponds to the return values of ``os.listdir``)
+
+* ``path``: the entry's full path name (not necessarily an absolute
+ path) -- the equivalent of ``os.path.join(scandir_path, entry.name)``
+
+* ``is_dir(*, follow_symlinks=True)``: similar to
+ ``pathlib.Path.is_dir()``, but the return value is cached on the
+ ``DirEntry`` object; doesn't require a system call in most cases;
+ don't follow symbolic links if ``follow_symlinks`` is False
+
+* ``is_file(*, follow_symlinks=True)``: similar to
+ ``pathlib.Path.is_file()``, but the return value is cached on the
+ ``DirEntry`` object; doesn't require a system call in most cases;
+ don't follow symbolic links if ``follow_symlinks`` is False
+
+* ``is_symlink()``: similar to ``pathlib.Path.is_symlink()``, but the
+ return value is cached on the ``DirEntry`` object; doesn't require a
+ system call in most cases
+
+* ``stat(*, follow_symlinks=True)``: like ``os.stat()``, but the
+ return value is cached on the ``DirEntry`` object; does not require a
+ system call on Windows (except for symlinks); don't follow symbolic links
+ (like ``os.lstat()``) if ``follow_symlinks`` is False
+
+* ``inode()``: return the inode number of the entry; the return value
+ is cached on the ``DirEntry`` object
+
+Here's a very simple example of ``scandir()`` showing use of the
+``DirEntry.name`` attribute and the ``DirEntry.is_dir()`` method:
+
+.. code-block:: python
+
+ def subdirs(path):
+ """Yield directory names not starting with '.' under given path."""
+ for entry in os.scandir(path):
+ if not entry.name.startswith('.') and entry.is_dir():
+ yield entry.name
+
+This ``subdirs()`` function will be significantly faster with scandir
+than ``os.listdir()`` and ``os.path.isdir()`` on both Windows and POSIX
+systems, especially on medium-sized or large directories.
+
+
+Further reading
+---------------
+
+* `The Python docs for scandir <https://docs.python.org/3.5/library/os.html#os.scandir>`_
+* `PEP 471 <https://www.python.org/dev/peps/pep-0471/>`_, the
+ (now-accepted) Python Enhancement Proposal that proposed adding
+ ``scandir`` to the standard library -- a lot of details here,
+ including rejected ideas and previous discussion
+
+
+Flames, comments, bug reports
+-----------------------------
+
+Please send flames, comments, and questions about scandir to Ben Hoyt:
+
+http://benhoyt.com/
+
+File bug reports for the version in the Python 3.5 standard library
+`here <https://docs.python.org/3.5/bugs.html>`_, or file bug reports
+or feature requests for this module at the GitHub project page:
+
+https://github.com/benhoyt/scandir
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/RECORD
new file mode 100644
index 0000000000..2140d975b4
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/RECORD
@@ -0,0 +1,6 @@
+scandir.py,sha256=97C2AQInuKk-Phb3aXM7fJomhc-00pZMcBur23NUmrE,24827
+scandir-1.10.0.dist-info/LICENSE.txt,sha256=peL73COXREGdKUB828knk8TZwdlWwXT3y3-W-m0FjIY,1464
+scandir-1.10.0.dist-info/METADATA,sha256=cv1fZ5DeC3DJqnMByWGiprvGhLpQCkWOZiJduweakGk,9559
+scandir-1.10.0.dist-info/WHEEL,sha256=WO4o60shExe_A5pkiO6Yb-8OHLGhlAGcs2oJ7aUuE5Q,110
+scandir-1.10.0.dist-info/top_level.txt,sha256=Ixze5mNjmis99ql7JEtAYc9-djJMbfRx-FFw3R_zZf8,17
+scandir-1.10.0.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/WHEEL
new file mode 100644
index 0000000000..310051fe9e
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: false
+Tag: cp39-cp39-macosx_10_15_x86_64
+
diff --git a/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/top_level.txt
new file mode 100644
index 0000000000..b13832ba1d
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info/top_level.txt
@@ -0,0 +1,2 @@
+_scandir
+scandir
diff --git a/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir.py b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir.py
new file mode 100644
index 0000000000..c565b23f89
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir.py
@@ -0,0 +1,693 @@
+"""scandir, a better directory iterator and faster os.walk(), now in the Python 3.5 stdlib
+
+scandir() is a generator version of os.listdir() that returns an
+iterator over files in a directory, and also exposes the extra
+information most OSes provide while iterating files in a directory
+(such as type and stat information).
+
+This module also includes a version of os.walk() that uses scandir()
+to speed it up significantly.
+
+See README.md or https://github.com/benhoyt/scandir for rationale and
+docs, or read PEP 471 (https://www.python.org/dev/peps/pep-0471/) for
+more details on its inclusion into Python 3.5
+
+scandir is released under the new BSD 3-clause license. See
+LICENSE.txt for the full license text.
+"""
+
+from __future__ import division
+
+from errno import ENOENT
+from os import listdir, lstat, stat, strerror
+from os.path import join, islink
+from stat import S_IFDIR, S_IFLNK, S_IFREG
+import collections
+import sys
+
+try:
+ import _scandir
+except ImportError:
+ _scandir = None
+
+try:
+ import ctypes
+except ImportError:
+ ctypes = None
+
+if _scandir is None and ctypes is None:
+ import warnings
+ warnings.warn("scandir can't find the compiled _scandir C module "
+ "or ctypes, using slow generic fallback")
+
+__version__ = '1.10.0'
+__all__ = ['scandir', 'walk']
+
+# Windows FILE_ATTRIBUTE constants for interpreting the
+# FIND_DATA.dwFileAttributes member
+FILE_ATTRIBUTE_ARCHIVE = 32
+FILE_ATTRIBUTE_COMPRESSED = 2048
+FILE_ATTRIBUTE_DEVICE = 64
+FILE_ATTRIBUTE_DIRECTORY = 16
+FILE_ATTRIBUTE_ENCRYPTED = 16384
+FILE_ATTRIBUTE_HIDDEN = 2
+FILE_ATTRIBUTE_INTEGRITY_STREAM = 32768
+FILE_ATTRIBUTE_NORMAL = 128
+FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 8192
+FILE_ATTRIBUTE_NO_SCRUB_DATA = 131072
+FILE_ATTRIBUTE_OFFLINE = 4096
+FILE_ATTRIBUTE_READONLY = 1
+FILE_ATTRIBUTE_REPARSE_POINT = 1024
+FILE_ATTRIBUTE_SPARSE_FILE = 512
+FILE_ATTRIBUTE_SYSTEM = 4
+FILE_ATTRIBUTE_TEMPORARY = 256
+FILE_ATTRIBUTE_VIRTUAL = 65536
+
+IS_PY3 = sys.version_info >= (3, 0)
+
+if IS_PY3:
+ unicode = str # Because Python <= 3.2 doesn't have u'unicode' syntax
+
+
+class GenericDirEntry(object):
+ __slots__ = ('name', '_stat', '_lstat', '_scandir_path', '_path')
+
+ def __init__(self, scandir_path, name):
+ self._scandir_path = scandir_path
+ self.name = name
+ self._stat = None
+ self._lstat = None
+ self._path = None
+
+ @property
+ def path(self):
+ if self._path is None:
+ self._path = join(self._scandir_path, self.name)
+ return self._path
+
+ def stat(self, follow_symlinks=True):
+ if follow_symlinks:
+ if self._stat is None:
+ self._stat = stat(self.path)
+ return self._stat
+ else:
+ if self._lstat is None:
+ self._lstat = lstat(self.path)
+ return self._lstat
+
+ # The code duplication below is intentional: this is for slightly
+ # better performance on systems that fall back to GenericDirEntry.
+ # It avoids an additional attribute lookup and method call, which
+ # are relatively slow on CPython.
+ def is_dir(self, follow_symlinks=True):
+ try:
+ st = self.stat(follow_symlinks=follow_symlinks)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False # Path doesn't exist or is a broken symlink
+ return st.st_mode & 0o170000 == S_IFDIR
+
+ def is_file(self, follow_symlinks=True):
+ try:
+ st = self.stat(follow_symlinks=follow_symlinks)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False # Path doesn't exist or is a broken symlink
+ return st.st_mode & 0o170000 == S_IFREG
+
+ def is_symlink(self):
+ try:
+ st = self.stat(follow_symlinks=False)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False # Path doesn't exist or is a broken symlink
+ return st.st_mode & 0o170000 == S_IFLNK
+
+ def inode(self):
+ st = self.stat(follow_symlinks=False)
+ return st.st_ino
+
+ def __str__(self):
+ return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name)
+
+ __repr__ = __str__
+
+
+def _scandir_generic(path=unicode('.')):
+ """Like os.listdir(), but yield DirEntry objects instead of returning
+ a list of names.
+ """
+ for name in listdir(path):
+ yield GenericDirEntry(path, name)
+
+
+if IS_PY3 and sys.platform == 'win32':
+ def scandir_generic(path=unicode('.')):
+ if isinstance(path, bytes):
+ raise TypeError("os.scandir() doesn't support bytes path on Windows, use Unicode instead")
+ return _scandir_generic(path)
+ scandir_generic.__doc__ = _scandir_generic.__doc__
+else:
+ scandir_generic = _scandir_generic
+
+
+scandir_c = None
+scandir_python = None
+
+
+if sys.platform == 'win32':
+ if ctypes is not None:
+ from ctypes import wintypes
+
+ # Various constants from windows.h
+ INVALID_HANDLE_VALUE = ctypes.c_void_p(-1).value
+ ERROR_FILE_NOT_FOUND = 2
+ ERROR_NO_MORE_FILES = 18
+ IO_REPARSE_TAG_SYMLINK = 0xA000000C
+
+ # Numer of seconds between 1601-01-01 and 1970-01-01
+ SECONDS_BETWEEN_EPOCHS = 11644473600
+
+ kernel32 = ctypes.windll.kernel32
+
+ # ctypes wrappers for (wide string versions of) FindFirstFile,
+ # FindNextFile, and FindClose
+ FindFirstFile = kernel32.FindFirstFileW
+ FindFirstFile.argtypes = [
+ wintypes.LPCWSTR,
+ ctypes.POINTER(wintypes.WIN32_FIND_DATAW),
+ ]
+ FindFirstFile.restype = wintypes.HANDLE
+
+ FindNextFile = kernel32.FindNextFileW
+ FindNextFile.argtypes = [
+ wintypes.HANDLE,
+ ctypes.POINTER(wintypes.WIN32_FIND_DATAW),
+ ]
+ FindNextFile.restype = wintypes.BOOL
+
+ FindClose = kernel32.FindClose
+ FindClose.argtypes = [wintypes.HANDLE]
+ FindClose.restype = wintypes.BOOL
+
+ Win32StatResult = collections.namedtuple('Win32StatResult', [
+ 'st_mode',
+ 'st_ino',
+ 'st_dev',
+ 'st_nlink',
+ 'st_uid',
+ 'st_gid',
+ 'st_size',
+ 'st_atime',
+ 'st_mtime',
+ 'st_ctime',
+ 'st_atime_ns',
+ 'st_mtime_ns',
+ 'st_ctime_ns',
+ 'st_file_attributes',
+ ])
+
+ def filetime_to_time(filetime):
+ """Convert Win32 FILETIME to time since Unix epoch in seconds."""
+ total = filetime.dwHighDateTime << 32 | filetime.dwLowDateTime
+ return total / 10000000 - SECONDS_BETWEEN_EPOCHS
+
+ def find_data_to_stat(data):
+ """Convert Win32 FIND_DATA struct to stat_result."""
+ # First convert Win32 dwFileAttributes to st_mode
+ attributes = data.dwFileAttributes
+ st_mode = 0
+ if attributes & FILE_ATTRIBUTE_DIRECTORY:
+ st_mode |= S_IFDIR | 0o111
+ else:
+ st_mode |= S_IFREG
+ if attributes & FILE_ATTRIBUTE_READONLY:
+ st_mode |= 0o444
+ else:
+ st_mode |= 0o666
+ if (attributes & FILE_ATTRIBUTE_REPARSE_POINT and
+ data.dwReserved0 == IO_REPARSE_TAG_SYMLINK):
+ st_mode ^= st_mode & 0o170000
+ st_mode |= S_IFLNK
+
+ st_size = data.nFileSizeHigh << 32 | data.nFileSizeLow
+ st_atime = filetime_to_time(data.ftLastAccessTime)
+ st_mtime = filetime_to_time(data.ftLastWriteTime)
+ st_ctime = filetime_to_time(data.ftCreationTime)
+
+ # Some fields set to zero per CPython's posixmodule.c: st_ino, st_dev,
+ # st_nlink, st_uid, st_gid
+ return Win32StatResult(st_mode, 0, 0, 0, 0, 0, st_size,
+ st_atime, st_mtime, st_ctime,
+ int(st_atime * 1000000000),
+ int(st_mtime * 1000000000),
+ int(st_ctime * 1000000000),
+ attributes)
+
+ class Win32DirEntryPython(object):
+ __slots__ = ('name', '_stat', '_lstat', '_find_data', '_scandir_path', '_path', '_inode')
+
+ def __init__(self, scandir_path, name, find_data):
+ self._scandir_path = scandir_path
+ self.name = name
+ self._stat = None
+ self._lstat = None
+ self._find_data = find_data
+ self._path = None
+ self._inode = None
+
+ @property
+ def path(self):
+ if self._path is None:
+ self._path = join(self._scandir_path, self.name)
+ return self._path
+
+ def stat(self, follow_symlinks=True):
+ if follow_symlinks:
+ if self._stat is None:
+ if self.is_symlink():
+ # It's a symlink, call link-following stat()
+ self._stat = stat(self.path)
+ else:
+ # Not a symlink, stat is same as lstat value
+ if self._lstat is None:
+ self._lstat = find_data_to_stat(self._find_data)
+ self._stat = self._lstat
+ return self._stat
+ else:
+ if self._lstat is None:
+ # Lazily convert to stat object, because it's slow
+ # in Python, and often we only need is_dir() etc
+ self._lstat = find_data_to_stat(self._find_data)
+ return self._lstat
+
+ def is_dir(self, follow_symlinks=True):
+ is_symlink = self.is_symlink()
+ if follow_symlinks and is_symlink:
+ try:
+ return self.stat().st_mode & 0o170000 == S_IFDIR
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False
+ elif is_symlink:
+ return False
+ else:
+ return (self._find_data.dwFileAttributes &
+ FILE_ATTRIBUTE_DIRECTORY != 0)
+
+ def is_file(self, follow_symlinks=True):
+ is_symlink = self.is_symlink()
+ if follow_symlinks and is_symlink:
+ try:
+ return self.stat().st_mode & 0o170000 == S_IFREG
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False
+ elif is_symlink:
+ return False
+ else:
+ return (self._find_data.dwFileAttributes &
+ FILE_ATTRIBUTE_DIRECTORY == 0)
+
+ def is_symlink(self):
+ return (self._find_data.dwFileAttributes &
+ FILE_ATTRIBUTE_REPARSE_POINT != 0 and
+ self._find_data.dwReserved0 == IO_REPARSE_TAG_SYMLINK)
+
+ def inode(self):
+ if self._inode is None:
+ self._inode = lstat(self.path).st_ino
+ return self._inode
+
+ def __str__(self):
+ return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name)
+
+ __repr__ = __str__
+
+ def win_error(error, filename):
+ exc = WindowsError(error, ctypes.FormatError(error))
+ exc.filename = filename
+ return exc
+
+ def _scandir_python(path=unicode('.')):
+ """Like os.listdir(), but yield DirEntry objects instead of returning
+ a list of names.
+ """
+ # Call FindFirstFile and handle errors
+ if isinstance(path, bytes):
+ is_bytes = True
+ filename = join(path.decode('mbcs', 'strict'), '*.*')
+ else:
+ is_bytes = False
+ filename = join(path, '*.*')
+ data = wintypes.WIN32_FIND_DATAW()
+ data_p = ctypes.byref(data)
+ handle = FindFirstFile(filename, data_p)
+ if handle == INVALID_HANDLE_VALUE:
+ error = ctypes.GetLastError()
+ if error == ERROR_FILE_NOT_FOUND:
+ # No files, don't yield anything
+ return
+ raise win_error(error, path)
+
+ # Call FindNextFile in a loop, stopping when no more files
+ try:
+ while True:
+ # Skip '.' and '..' (current and parent directory), but
+ # otherwise yield (filename, stat_result) tuple
+ name = data.cFileName
+ if name not in ('.', '..'):
+ if is_bytes:
+ name = name.encode('mbcs', 'replace')
+ yield Win32DirEntryPython(path, name, data)
+
+ data = wintypes.WIN32_FIND_DATAW()
+ data_p = ctypes.byref(data)
+ success = FindNextFile(handle, data_p)
+ if not success:
+ error = ctypes.GetLastError()
+ if error == ERROR_NO_MORE_FILES:
+ break
+ raise win_error(error, path)
+ finally:
+ if not FindClose(handle):
+ raise win_error(ctypes.GetLastError(), path)
+
+ if IS_PY3:
+ def scandir_python(path=unicode('.')):
+ if isinstance(path, bytes):
+ raise TypeError("os.scandir() doesn't support bytes path on Windows, use Unicode instead")
+ return _scandir_python(path)
+ scandir_python.__doc__ = _scandir_python.__doc__
+ else:
+ scandir_python = _scandir_python
+
+ if _scandir is not None:
+ scandir_c = _scandir.scandir
+ DirEntry_c = _scandir.DirEntry
+
+ if _scandir is not None:
+ scandir = scandir_c
+ DirEntry = DirEntry_c
+ elif ctypes is not None:
+ scandir = scandir_python
+ DirEntry = Win32DirEntryPython
+ else:
+ scandir = scandir_generic
+ DirEntry = GenericDirEntry
+
+
+# Linux, OS X, and BSD implementation
+elif sys.platform.startswith(('linux', 'darwin', 'sunos5')) or 'bsd' in sys.platform:
+ have_dirent_d_type = (sys.platform != 'sunos5')
+
+ if ctypes is not None and have_dirent_d_type:
+ import ctypes.util
+
+ DIR_p = ctypes.c_void_p
+
+ # Rather annoying how the dirent struct is slightly different on each
+ # platform. The only fields we care about are d_name and d_type.
+ class Dirent(ctypes.Structure):
+ if sys.platform.startswith('linux'):
+ _fields_ = (
+ ('d_ino', ctypes.c_ulong),
+ ('d_off', ctypes.c_long),
+ ('d_reclen', ctypes.c_ushort),
+ ('d_type', ctypes.c_byte),
+ ('d_name', ctypes.c_char * 256),
+ )
+ elif 'openbsd' in sys.platform:
+ _fields_ = (
+ ('d_ino', ctypes.c_uint64),
+ ('d_off', ctypes.c_uint64),
+ ('d_reclen', ctypes.c_uint16),
+ ('d_type', ctypes.c_uint8),
+ ('d_namlen', ctypes.c_uint8),
+ ('__d_padding', ctypes.c_uint8 * 4),
+ ('d_name', ctypes.c_char * 256),
+ )
+ else:
+ _fields_ = (
+ ('d_ino', ctypes.c_uint32), # must be uint32, not ulong
+ ('d_reclen', ctypes.c_ushort),
+ ('d_type', ctypes.c_byte),
+ ('d_namlen', ctypes.c_byte),
+ ('d_name', ctypes.c_char * 256),
+ )
+
+ DT_UNKNOWN = 0
+ DT_DIR = 4
+ DT_REG = 8
+ DT_LNK = 10
+
+ Dirent_p = ctypes.POINTER(Dirent)
+ Dirent_pp = ctypes.POINTER(Dirent_p)
+
+ libc = ctypes.CDLL(ctypes.util.find_library('c'), use_errno=True)
+ opendir = libc.opendir
+ opendir.argtypes = [ctypes.c_char_p]
+ opendir.restype = DIR_p
+
+ readdir_r = libc.readdir_r
+ readdir_r.argtypes = [DIR_p, Dirent_p, Dirent_pp]
+ readdir_r.restype = ctypes.c_int
+
+ closedir = libc.closedir
+ closedir.argtypes = [DIR_p]
+ closedir.restype = ctypes.c_int
+
+ file_system_encoding = sys.getfilesystemencoding()
+
+ class PosixDirEntry(object):
+ __slots__ = ('name', '_d_type', '_stat', '_lstat', '_scandir_path', '_path', '_inode')
+
+ def __init__(self, scandir_path, name, d_type, inode):
+ self._scandir_path = scandir_path
+ self.name = name
+ self._d_type = d_type
+ self._inode = inode
+ self._stat = None
+ self._lstat = None
+ self._path = None
+
+ @property
+ def path(self):
+ if self._path is None:
+ self._path = join(self._scandir_path, self.name)
+ return self._path
+
+ def stat(self, follow_symlinks=True):
+ if follow_symlinks:
+ if self._stat is None:
+ if self.is_symlink():
+ self._stat = stat(self.path)
+ else:
+ if self._lstat is None:
+ self._lstat = lstat(self.path)
+ self._stat = self._lstat
+ return self._stat
+ else:
+ if self._lstat is None:
+ self._lstat = lstat(self.path)
+ return self._lstat
+
+ def is_dir(self, follow_symlinks=True):
+ if (self._d_type == DT_UNKNOWN or
+ (follow_symlinks and self.is_symlink())):
+ try:
+ st = self.stat(follow_symlinks=follow_symlinks)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False
+ return st.st_mode & 0o170000 == S_IFDIR
+ else:
+ return self._d_type == DT_DIR
+
+ def is_file(self, follow_symlinks=True):
+ if (self._d_type == DT_UNKNOWN or
+ (follow_symlinks and self.is_symlink())):
+ try:
+ st = self.stat(follow_symlinks=follow_symlinks)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False
+ return st.st_mode & 0o170000 == S_IFREG
+ else:
+ return self._d_type == DT_REG
+
+ def is_symlink(self):
+ if self._d_type == DT_UNKNOWN:
+ try:
+ st = self.stat(follow_symlinks=False)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False
+ return st.st_mode & 0o170000 == S_IFLNK
+ else:
+ return self._d_type == DT_LNK
+
+ def inode(self):
+ return self._inode
+
+ def __str__(self):
+ return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name)
+
+ __repr__ = __str__
+
+ def posix_error(filename):
+ errno = ctypes.get_errno()
+ exc = OSError(errno, strerror(errno))
+ exc.filename = filename
+ return exc
+
+ def scandir_python(path=unicode('.')):
+ """Like os.listdir(), but yield DirEntry objects instead of returning
+ a list of names.
+ """
+ if isinstance(path, bytes):
+ opendir_path = path
+ is_bytes = True
+ else:
+ opendir_path = path.encode(file_system_encoding)
+ is_bytes = False
+ dir_p = opendir(opendir_path)
+ if not dir_p:
+ raise posix_error(path)
+ try:
+ result = Dirent_p()
+ while True:
+ entry = Dirent()
+ if readdir_r(dir_p, entry, result):
+ raise posix_error(path)
+ if not result:
+ break
+ name = entry.d_name
+ if name not in (b'.', b'..'):
+ if not is_bytes:
+ name = name.decode(file_system_encoding)
+ yield PosixDirEntry(path, name, entry.d_type, entry.d_ino)
+ finally:
+ if closedir(dir_p):
+ raise posix_error(path)
+
+ if _scandir is not None:
+ scandir_c = _scandir.scandir
+ DirEntry_c = _scandir.DirEntry
+
+ if _scandir is not None:
+ scandir = scandir_c
+ DirEntry = DirEntry_c
+ elif ctypes is not None and have_dirent_d_type:
+ scandir = scandir_python
+ DirEntry = PosixDirEntry
+ else:
+ scandir = scandir_generic
+ DirEntry = GenericDirEntry
+
+
+# Some other system -- no d_type or stat information
+else:
+ scandir = scandir_generic
+ DirEntry = GenericDirEntry
+
+
+def _walk(top, topdown=True, onerror=None, followlinks=False):
+ """Like Python 3.5's implementation of os.walk() -- faster than
+ the pre-Python 3.5 version as it uses scandir() internally.
+ """
+ dirs = []
+ nondirs = []
+
+ # We may not have read permission for top, in which case we can't
+ # get a list of the files the directory contains. os.walk
+ # always suppressed the exception then, rather than blow up for a
+ # minor reason when (say) a thousand readable directories are still
+ # left to visit. That logic is copied here.
+ try:
+ scandir_it = scandir(top)
+ except OSError as error:
+ if onerror is not None:
+ onerror(error)
+ return
+
+ while True:
+ try:
+ try:
+ entry = next(scandir_it)
+ except StopIteration:
+ break
+ except OSError as error:
+ if onerror is not None:
+ onerror(error)
+ return
+
+ try:
+ is_dir = entry.is_dir()
+ except OSError:
+ # If is_dir() raises an OSError, consider that the entry is not
+ # a directory, same behaviour than os.path.isdir().
+ is_dir = False
+
+ if is_dir:
+ dirs.append(entry.name)
+ else:
+ nondirs.append(entry.name)
+
+ if not topdown and is_dir:
+ # Bottom-up: recurse into sub-directory, but exclude symlinks to
+ # directories if followlinks is False
+ if followlinks:
+ walk_into = True
+ else:
+ try:
+ is_symlink = entry.is_symlink()
+ except OSError:
+ # If is_symlink() raises an OSError, consider that the
+ # entry is not a symbolic link, same behaviour than
+ # os.path.islink().
+ is_symlink = False
+ walk_into = not is_symlink
+
+ if walk_into:
+ for entry in walk(entry.path, topdown, onerror, followlinks):
+ yield entry
+
+ # Yield before recursion if going top down
+ if topdown:
+ yield top, dirs, nondirs
+
+ # Recurse into sub-directories
+ for name in dirs:
+ new_path = join(top, name)
+ # Issue #23605: os.path.islink() is used instead of caching
+ # entry.is_symlink() result during the loop on os.scandir() because
+ # the caller can replace the directory entry during the "yield"
+ # above.
+ if followlinks or not islink(new_path):
+ for entry in walk(new_path, topdown, onerror, followlinks):
+ yield entry
+ else:
+ # Yield after recursion if going bottom up
+ yield top, dirs, nondirs
+
+
+if IS_PY3 or sys.platform != 'win32':
+ walk = _walk
+else:
+ # Fix for broken unicode handling on Windows on Python 2.x, see:
+ # https://github.com/benhoyt/scandir/issues/54
+ file_system_encoding = sys.getfilesystemencoding()
+
+ def walk(top, topdown=True, onerror=None, followlinks=False):
+ if isinstance(top, bytes):
+ top = top.decode(file_system_encoding)
+ return _walk(top, topdown, onerror, followlinks)
diff --git a/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/LICENSE
new file mode 100644
index 0000000000..de6633112c
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/LICENSE
@@ -0,0 +1,18 @@
+Copyright (c) 2010-2020 Benjamin Peterson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/METADATA
new file mode 100644
index 0000000000..869bf25a88
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/METADATA
@@ -0,0 +1,49 @@
+Metadata-Version: 2.1
+Name: six
+Version: 1.15.0
+Summary: Python 2 and 3 compatibility utilities
+Home-page: https://github.com/benjaminp/six
+Author: Benjamin Peterson
+Author-email: benjamin@python.org
+License: MIT
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Utilities
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*
+
+.. image:: https://img.shields.io/pypi/v/six.svg
+ :target: https://pypi.org/project/six/
+ :alt: six on PyPI
+
+.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master
+ :target: https://travis-ci.org/benjaminp/six
+ :alt: six on TravisCI
+
+.. image:: https://readthedocs.org/projects/six/badge/?version=latest
+ :target: https://six.readthedocs.io/
+ :alt: six's documentation on Read the Docs
+
+.. image:: https://img.shields.io/badge/license-MIT-green.svg
+ :target: https://github.com/benjaminp/six/blob/master/LICENSE
+ :alt: MIT License badge
+
+Six is a Python 2 and 3 compatibility library. It provides utility functions
+for smoothing over the differences between the Python versions with the goal of
+writing Python code that is compatible on both Python versions. See the
+documentation for more information on what is provided.
+
+Six supports Python 2.7 and 3.3+. It is contained in only one Python
+file, so it can be easily copied into your project. (The copyright and license
+notice must be retained.)
+
+Online documentation is at https://six.readthedocs.io/.
+
+Bugs can be reported to https://github.com/benjaminp/six. The code can also
+be found there.
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/RECORD
new file mode 100644
index 0000000000..4cccdb4af6
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/RECORD
@@ -0,0 +1,6 @@
+six.py,sha256=U4Z_yv534W5CNyjY9i8V1OXY2SjAny8y2L5vDLhhThM,34159
+six-1.15.0.dist-info/LICENSE,sha256=i7hQxWWqOJ_cFvOkaWWtI9gq3_YPI5P8J2K2MYXo5sk,1066
+six-1.15.0.dist-info/METADATA,sha256=W6rlyoeMZHXh6srP9NXNsm0rjAf_660re8WdH5TBT8E,1795
+six-1.15.0.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+six-1.15.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4
+six-1.15.0.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/WHEEL
new file mode 100644
index 0000000000..ef99c6cf32
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/top_level.txt
new file mode 100644
index 0000000000..ffe2fce498
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+six
diff --git a/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six.py b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six.py
new file mode 100644
index 0000000000..83f69783d1
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/six-1.15.0-py2.py3-none-any/six.py
@@ -0,0 +1,982 @@
+# Copyright (c) 2010-2020 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+from __future__ import absolute_import
+
+import functools
+import itertools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.15.0"
+
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ if sys.platform.startswith("java"):
+ # Jython always uses 32 bits.
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+
+ def __len__(self):
+ return 1 << 31
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
+ return result
+
+
+class MovedModule(_LazyDescr):
+
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+ def __getattr__(self, attr):
+ _module = self._resolve()
+ value = getattr(_module, attr)
+ setattr(self, attr, value)
+ return value
+
+
+class _LazyModule(types.ModuleType):
+
+ def __init__(self, name):
+ super(_LazyModule, self).__init__(name)
+ self.__doc__ = self.__class__.__doc__
+
+ def __dir__(self):
+ attrs = ["__doc__", "__name__"]
+ attrs += [attr.name for attr in self._moved_attributes]
+ return attrs
+
+ # Subclasses should override this
+ _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+
+ """
+ A meta path importer to import six.moves and its submodules.
+
+ This class implements a PEP302 finder and loader. It should be compatible
+ with Python 2.5 and all existing versions of Python3
+ """
+
+ def __init__(self, six_module_name):
+ self.name = six_module_name
+ self.known_modules = {}
+
+ def _add_module(self, mod, *fullnames):
+ for fullname in fullnames:
+ self.known_modules[self.name + "." + fullname] = mod
+
+ def _get_module(self, fullname):
+ return self.known_modules[self.name + "." + fullname]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.known_modules:
+ return self
+ return None
+
+ def __get_module(self, fullname):
+ try:
+ return self.known_modules[fullname]
+ except KeyError:
+ raise ImportError("This loader does not know module " + fullname)
+
+ def load_module(self, fullname):
+ try:
+ # in case of a reload
+ return sys.modules[fullname]
+ except KeyError:
+ pass
+ mod = self.__get_module(fullname)
+ if isinstance(mod, MovedModule):
+ mod = mod._resolve()
+ else:
+ mod.__loader__ = self
+ sys.modules[fullname] = mod
+ return mod
+
+ def is_package(self, fullname):
+ """
+ Return true, if the named module is a package.
+
+ We need this method to get correct spec objects with
+ Python 3.4 (see PEP451)
+ """
+ return hasattr(self.__get_module(fullname), "__path__")
+
+ def get_code(self, fullname):
+ """Return None
+
+ Required, if is_package is implemented"""
+ self.__get_module(fullname) # eventually raises ImportError
+ return None
+ get_source = get_code # same as get_code
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+
+ """Lazy loading of moved objects"""
+ __path__ = [] # mark as package
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
+ MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("intern", "__builtin__", "sys"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+ MovedAttribute("getoutput", "commands", "subprocess"),
+ MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("UserDict", "UserDict", "collections"),
+ MovedAttribute("UserList", "UserList", "collections"),
+ MovedAttribute("UserString", "UserString", "collections"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+ MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
+ MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
+ MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
+ MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("_thread", "thread", "_thread"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser",
+ "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog",
+ "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+ MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+]
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
+
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+ if isinstance(attr, MovedModule):
+ _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+ MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+ MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+ MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+ MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+ MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("quote", "urllib", "urllib.parse"),
+ MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
+ MovedAttribute("urlencode", "urllib", "urllib.parse"),
+ MovedAttribute("splitquery", "urllib", "urllib.parse"),
+ MovedAttribute("splittag", "urllib", "urllib.parse"),
+ MovedAttribute("splituser", "urllib", "urllib.parse"),
+ MovedAttribute("splitvalue", "urllib", "urllib.parse"),
+ MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+ setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+ "moves.urllib_parse", "moves.urllib.parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+ MovedAttribute("URLError", "urllib2", "urllib.error"),
+ MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+ MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+ setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+ "moves.urllib_error", "moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+ MovedAttribute("urlopen", "urllib2", "urllib.request"),
+ MovedAttribute("install_opener", "urllib2", "urllib.request"),
+ MovedAttribute("build_opener", "urllib2", "urllib.request"),
+ MovedAttribute("pathname2url", "urllib", "urllib.request"),
+ MovedAttribute("url2pathname", "urllib", "urllib.request"),
+ MovedAttribute("getproxies", "urllib", "urllib.request"),
+ MovedAttribute("Request", "urllib2", "urllib.request"),
+ MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+ MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+ MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+ MovedAttribute("URLopener", "urllib", "urllib.request"),
+ MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+ MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+ MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
+ MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+ setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+ "moves.urllib_request", "moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+ MovedAttribute("addbase", "urllib", "urllib.response"),
+ MovedAttribute("addclosehook", "urllib", "urllib.response"),
+ MovedAttribute("addinfo", "urllib", "urllib.response"),
+ MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+ setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+ "moves.urllib_response", "moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+ setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+ "moves.urllib_robotparser", "moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+
+ """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+ __path__ = [] # mark as package
+ parse = _importer._get_module("moves.urllib_parse")
+ error = _importer._get_module("moves.urllib_error")
+ request = _importer._get_module("moves.urllib_request")
+ response = _importer._get_module("moves.urllib_response")
+ robotparser = _importer._get_module("moves.urllib_robotparser")
+
+ def __dir__(self):
+ return ['parse', 'error', 'request', 'response', 'robotparser']
+
+_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
+ "moves.urllib")
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_closure = "__closure__"
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+ _func_globals = "__globals__"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_closure = "func_closure"
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+ _func_globals = "func_globals"
+
+
+try:
+ advance_iterator = next
+except NameError:
+ def advance_iterator(it):
+ return it.next()
+next = advance_iterator
+
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+ def get_unbound_function(unbound):
+ return unbound
+
+ create_bound_method = types.MethodType
+
+ def create_unbound_method(func, cls):
+ return func
+
+ Iterator = object
+else:
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+ def create_bound_method(func, obj):
+ return types.MethodType(func, obj, obj.__class__)
+
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
+ class Iterator(object):
+
+ def next(self):
+ return type(self).__next__(self)
+
+ callable = callable
+_add_doc(get_unbound_function,
+ """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+ def iterkeys(d, **kw):
+ return iter(d.keys(**kw))
+
+ def itervalues(d, **kw):
+ return iter(d.values(**kw))
+
+ def iteritems(d, **kw):
+ return iter(d.items(**kw))
+
+ def iterlists(d, **kw):
+ return iter(d.lists(**kw))
+
+ viewkeys = operator.methodcaller("keys")
+
+ viewvalues = operator.methodcaller("values")
+
+ viewitems = operator.methodcaller("items")
+else:
+ def iterkeys(d, **kw):
+ return d.iterkeys(**kw)
+
+ def itervalues(d, **kw):
+ return d.itervalues(**kw)
+
+ def iteritems(d, **kw):
+ return d.iteritems(**kw)
+
+ def iterlists(d, **kw):
+ return d.iterlists(**kw)
+
+ viewkeys = operator.methodcaller("viewkeys")
+
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems,
+ "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(iterlists,
+ "Return an iterator over the (key, [values]) pairs of a dictionary.")
+
+
+if PY3:
+ def b(s):
+ return s.encode("latin-1")
+
+ def u(s):
+ return s
+ unichr = chr
+ import struct
+ int2byte = struct.Struct(">B").pack
+ del struct
+ byte2int = operator.itemgetter(0)
+ indexbytes = operator.getitem
+ iterbytes = iter
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+ del io
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ _assertNotRegex = "assertNotRegexpMatches"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
+ _assertNotRegex = "assertNotRegex"
+else:
+ def b(s):
+ return s
+ # Workaround for standalone backslash
+
+ def u(s):
+ return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+ unichr = unichr
+ int2byte = chr
+
+ def byte2int(bs):
+ return ord(bs[0])
+
+ def indexbytes(buf, i):
+ return ord(buf[i])
+ iterbytes = functools.partial(itertools.imap, ord)
+ import StringIO
+ StringIO = BytesIO = StringIO.StringIO
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ _assertNotRegex = "assertNotRegexpMatches"
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
+def assertNotRegex(self, *args, **kwargs):
+ return getattr(self, _assertNotRegex)(*args, **kwargs)
+
+
+if PY3:
+ exec_ = getattr(moves.builtins, "exec")
+
+ def reraise(tp, value, tb=None):
+ try:
+ if value is None:
+ value = tp()
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+ finally:
+ value = None
+ tb = None
+
+else:
+ def exec_(_code_, _globs_=None, _locs_=None):
+ """Execute code in a namespace."""
+ if _globs_ is None:
+ frame = sys._getframe(1)
+ _globs_ = frame.f_globals
+ if _locs_ is None:
+ _locs_ = frame.f_locals
+ del frame
+ elif _locs_ is None:
+ _locs_ = _globs_
+ exec("""exec _code_ in _globs_, _locs_""")
+
+ exec_("""def reraise(tp, value, tb=None):
+ try:
+ raise tp, value, tb
+ finally:
+ tb = None
+""")
+
+
+if sys.version_info[:2] > (3,):
+ exec_("""def raise_from(value, from_value):
+ try:
+ raise value from from_value
+ finally:
+ value = None
+""")
+else:
+ def raise_from(value, from_value):
+ raise value
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+ def print_(*args, **kwargs):
+ """The new-style print function for Python 2.4 and 2.5."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ # If the file has an encoding, encode unicode with it.
+ if (isinstance(fp, file) and
+ isinstance(data, unicode) and
+ fp.encoding is not None):
+ errors = getattr(fp, "errors", None)
+ if errors is None:
+ errors = "strict"
+ data = data.encode(fp.encoding, errors)
+ fp.write(data)
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+ # This does exactly the same what the :func:`py3:functools.update_wrapper`
+ # function does on Python versions after 3.2. It sets the ``__wrapped__``
+ # attribute on ``wrapper`` object and it doesn't raise an error if any of
+ # the attributes mentioned in ``assigned`` and ``updated`` are missing on
+ # ``wrapped`` object.
+ def _update_wrapper(wrapper, wrapped,
+ assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ for attr in assigned:
+ try:
+ value = getattr(wrapped, attr)
+ except AttributeError:
+ continue
+ else:
+ setattr(wrapper, attr, value)
+ for attr in updated:
+ getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
+ wrapper.__wrapped__ = wrapped
+ return wrapper
+ _update_wrapper.__doc__ = functools.update_wrapper.__doc__
+
+ def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ return functools.partial(_update_wrapper, wrapped=wrapped,
+ assigned=assigned, updated=updated)
+ wraps.__doc__ = functools.wraps.__doc__
+
+else:
+ wraps = functools.wraps
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(type):
+
+ def __new__(cls, name, this_bases, d):
+ if sys.version_info[:2] >= (3, 7):
+ # This version introduced PEP 560 that requires a bit
+ # of extra care (we mimic what is done by __build_class__).
+ resolved_bases = types.resolve_bases(bases)
+ if resolved_bases is not bases:
+ d['__orig_bases__'] = bases
+ else:
+ resolved_bases = bases
+ return meta(name, resolved_bases, d)
+
+ @classmethod
+ def __prepare__(cls, name, this_bases):
+ return meta.__prepare__(name, bases)
+ return type.__new__(metaclass, 'temporary_class', (), {})
+
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ slots = orig_vars.get('__slots__')
+ if slots is not None:
+ if isinstance(slots, str):
+ slots = [slots]
+ for slots_var in slots:
+ orig_vars.pop(slots_var)
+ orig_vars.pop('__dict__', None)
+ orig_vars.pop('__weakref__', None)
+ if hasattr(cls, '__qualname__'):
+ orig_vars['__qualname__'] = cls.__qualname__
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+ return wrapper
+
+
+def ensure_binary(s, encoding='utf-8', errors='strict'):
+ """Coerce **s** to six.binary_type.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> encoded to `bytes`
+ - `bytes` -> `bytes`
+ """
+ if isinstance(s, binary_type):
+ return s
+ if isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def ensure_str(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to `str`.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ # Optimization: Fast return for the common case.
+ if type(s) is str:
+ return s
+ if PY2 and isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ elif PY3 and isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif not isinstance(s, (text_type, binary_type)):
+ raise TypeError("not expecting type '%s'" % type(s))
+ return s
+
+
+def ensure_text(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to six.text_type.
+
+ For Python 2:
+ - `unicode` -> `unicode`
+ - `str` -> `unicode`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif isinstance(s, text_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def python_2_unicode_compatible(klass):
+ """
+ A class decorator that defines __unicode__ and __str__ methods under Python 2.
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if '__str__' not in klass.__dict__:
+ raise ValueError("@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." %
+ klass.__name__)
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
+ return klass
+
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = [] # required for PEP 302 and PEP 451
+__package__ = __name__ # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+ __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+ for i, importer in enumerate(sys.meta_path):
+ # Here's some real nastiness: Another "instance" of the six module might
+ # be floating around. Therefore, we can't use isinstance() to check for
+ # the six meta path importer, since the other six instance will have
+ # inserted an importer with different class.
+ if (type(importer).__name__ == "_SixMetaPathImporter" and
+ importer.name == __name__):
+ del sys.meta_path[i]
+ break
+ del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/LICENSE
new file mode 100644
index 0000000000..583f9f6e61
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/LICENSE
@@ -0,0 +1,254 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC. Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team. In October of the same
+year, the PythonLabs team moved to Digital Creations (now Zope
+Corporation, see http://www.zope.com). In 2001, the Python Software
+Foundation (PSF, see http://www.python.org/psf/) was formed, a
+non-profit organization created specifically to own Python-related
+Intellectual Property. Zope Corporation is a sponsoring member of
+the PSF.
+
+All Python releases are Open Source (see http://www.opensource.org for
+the Open Source Definition). Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+ Release Derived Year Owner GPL-
+ from compatible? (1)
+
+ 0.9.0 thru 1.2 1991-1995 CWI yes
+ 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
+ 1.6 1.5.2 2000 CNRI no
+ 2.0 1.6 2000 BeOpen.com no
+ 1.6.1 1.6 2001 CNRI yes (2)
+ 2.1 2.0+1.6.1 2001 PSF no
+ 2.0.1 2.0+1.6.1 2001 PSF yes
+ 2.1.1 2.1+2.0.1 2001 PSF yes
+ 2.1.2 2.1.1 2002 PSF yes
+ 2.1.3 2.1.2 2002 PSF yes
+ 2.2 and above 2.1.1 2001-now PSF yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+ the GPL. All Python licenses, unlike the GPL, let you distribute
+ a modified version without making your changes open source. The
+ GPL-compatible licenses make it possible to combine Python with
+ other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+ because its license has a choice of law clause. According to
+ CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+ is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are
+retained in Python alone or in any derivative version prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions. Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee. This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party. As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee. Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement. This Agreement together with
+Python 1.6.1 may be located on the Internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013. This
+Agreement may also be obtained from a proxy server on the Internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee. This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+ ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands. All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/METADATA
new file mode 100644
index 0000000000..d98aa65996
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/METADATA
@@ -0,0 +1,41 @@
+Metadata-Version: 2.1
+Name: typing
+Version: 3.7.4.1
+Summary: Type Hints for Python
+Home-page: https://docs.python.org/3/library/typing.html
+Author: Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Ivan Levkivskyi
+Author-email: jukka.lehtosalo@iki.fi
+License: PSF
+Keywords: typing function annotations type hints hinting checking checker typehints typehinting typechecking backport
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Topic :: Software Development
+
+Typing -- Type Hints for Python
+
+This is a backport of the standard library typing module to Python
+versions older than 3.5. (See note below for newer versions.)
+
+Typing defines a standard notation for Python function and variable
+type annotations. The notation can be used for documenting code in a
+concise, standard format, and it has been designed to also be used by
+static and runtime type checkers, static analyzers, IDEs and other
+tools.
+
+NOTE: in Python 3.5 and later, the typing module lives in the stdlib,
+and installing this package has NO EFFECT. To get a newer version of
+the typing module in Python 3.5 or later, you have to upgrade to a
+newer Python (bugfix) version. For example, typing in Python 3.6.0 is
+missing the definition of 'Type' -- upgrading to 3.6.2 will fix this.
+
+Also note that most improvements to the typing module in Python 3.7
+will not be included in this package, since Python 3.7 has some
+built-in support that is not present in older versions (See PEP 560.)
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/RECORD
new file mode 100644
index 0000000000..921edbf40f
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/RECORD
@@ -0,0 +1,6 @@
+typing.py,sha256=JfGga08eJ_AJ-n_EX5EHtDjNNI5h79rYSXucibO6yNg,80432
+typing-3.7.4.1.dist-info/LICENSE,sha256=_xfOlOECAk3raHc-scx0ynbaTmWPNzUx8Kwi1oprsa0,12755
+typing-3.7.4.1.dist-info/METADATA,sha256=bDK323dZ06sy5ADWZkwBpgq6jS9nwECYjA2oysfGjeg,1798
+typing-3.7.4.1.dist-info/WHEEL,sha256=p46_5Uhzqz6AzeSosiOnxK-zmFja1i22CrQCjmYe8ec,92
+typing-3.7.4.1.dist-info/top_level.txt,sha256=oG8QCMTRcfcgGpEVbdwBU2DM8MthjmZSDaaQ6WWHx4o,7
+typing-3.7.4.1.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/WHEEL
new file mode 100644
index 0000000000..3b5c4038dd
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.33.6)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/top_level.txt
new file mode 100644
index 0000000000..c997f364b4
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+typing
diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing.py b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing.py
new file mode 100644
index 0000000000..62a677eee3
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.1-py3-none-any/typing.py
@@ -0,0 +1,2422 @@
+import abc
+from abc import abstractmethod, abstractproperty
+import collections
+import contextlib
+import functools
+import re as stdlib_re # Avoid confusion with the re we export.
+import sys
+import types
+try:
+ import collections.abc as collections_abc
+except ImportError:
+ import collections as collections_abc # Fallback for PY3.2.
+if sys.version_info[:2] >= (3, 6):
+ import _collections_abc # Needed for private function _check_methods # noqa
+try:
+ from types import WrapperDescriptorType, MethodWrapperType, MethodDescriptorType
+except ImportError:
+ WrapperDescriptorType = type(object.__init__)
+ MethodWrapperType = type(object().__str__)
+ MethodDescriptorType = type(str.join)
+
+
+# Please keep __all__ alphabetized within each category.
+__all__ = [
+ # Super-special typing primitives.
+ 'Any',
+ 'Callable',
+ 'ClassVar',
+ 'Generic',
+ 'Optional',
+ 'Tuple',
+ 'Type',
+ 'TypeVar',
+ 'Union',
+
+ # ABCs (from collections.abc).
+ 'AbstractSet', # collections.abc.Set.
+ 'GenericMeta', # subclass of abc.ABCMeta and a metaclass
+ # for 'Generic' and ABCs below.
+ 'ByteString',
+ 'Container',
+ 'ContextManager',
+ 'Hashable',
+ 'ItemsView',
+ 'Iterable',
+ 'Iterator',
+ 'KeysView',
+ 'Mapping',
+ 'MappingView',
+ 'MutableMapping',
+ 'MutableSequence',
+ 'MutableSet',
+ 'Sequence',
+ 'Sized',
+ 'ValuesView',
+ # The following are added depending on presence
+ # of their non-generic counterparts in stdlib:
+ # Awaitable,
+ # AsyncIterator,
+ # AsyncIterable,
+ # Coroutine,
+ # Collection,
+ # AsyncGenerator,
+ # AsyncContextManager
+
+ # Structural checks, a.k.a. protocols.
+ 'Reversible',
+ 'SupportsAbs',
+ 'SupportsBytes',
+ 'SupportsComplex',
+ 'SupportsFloat',
+ 'SupportsIndex',
+ 'SupportsInt',
+ 'SupportsRound',
+
+ # Concrete collection types.
+ 'Counter',
+ 'Deque',
+ 'Dict',
+ 'DefaultDict',
+ 'List',
+ 'Set',
+ 'FrozenSet',
+ 'NamedTuple', # Not really a type.
+ 'Generator',
+
+ # One-off things.
+ 'AnyStr',
+ 'cast',
+ 'get_type_hints',
+ 'NewType',
+ 'no_type_check',
+ 'no_type_check_decorator',
+ 'NoReturn',
+ 'overload',
+ 'Text',
+ 'TYPE_CHECKING',
+]
+
+# The pseudo-submodules 're' and 'io' are part of the public
+# namespace, but excluded from __all__ because they might stomp on
+# legitimate imports of those modules.
+
+
+def _qualname(x):
+ if sys.version_info[:2] >= (3, 3):
+ return x.__qualname__
+ else:
+ # Fall back to just name.
+ return x.__name__
+
+
+def _trim_name(nm):
+ whitelist = ('_TypeAlias', '_ForwardRef', '_TypingBase', '_FinalTypingBase')
+ if nm.startswith('_') and nm not in whitelist:
+ nm = nm[1:]
+ return nm
+
+
+class TypingMeta(type):
+ """Metaclass for most types defined in typing module
+ (not a part of public API).
+
+ This overrides __new__() to require an extra keyword parameter
+ '_root', which serves as a guard against naive subclassing of the
+ typing classes. Any legitimate class defined using a metaclass
+ derived from TypingMeta must pass _root=True.
+
+ This also defines a dummy constructor (all the work for most typing
+ constructs is done in __new__) and a nicer repr().
+ """
+
+ _is_protocol = False
+
+ def __new__(cls, name, bases, namespace, *, _root=False):
+ if not _root:
+ raise TypeError("Cannot subclass %s" %
+ (', '.join(map(_type_repr, bases)) or '()'))
+ return super().__new__(cls, name, bases, namespace)
+
+ def __init__(self, *args, **kwds):
+ pass
+
+ def _eval_type(self, globalns, localns):
+ """Override this in subclasses to interpret forward references.
+
+ For example, List['C'] is internally stored as
+ List[_ForwardRef('C')], which should evaluate to List[C],
+ where C is an object found in globalns or localns (searching
+ localns first, of course).
+ """
+ return self
+
+ def _get_type_vars(self, tvars):
+ pass
+
+ def __repr__(self):
+ qname = _trim_name(_qualname(self))
+ return '%s.%s' % (self.__module__, qname)
+
+
+class _TypingBase(metaclass=TypingMeta, _root=True):
+ """Internal indicator of special typing constructs."""
+
+ __slots__ = ('__weakref__',)
+
+ def __init__(self, *args, **kwds):
+ pass
+
+ def __new__(cls, *args, **kwds):
+ """Constructor.
+
+ This only exists to give a better error message in case
+ someone tries to subclass a special typing object (not a good idea).
+ """
+ if (len(args) == 3 and
+ isinstance(args[0], str) and
+ isinstance(args[1], tuple)):
+ # Close enough.
+ raise TypeError("Cannot subclass %r" % cls)
+ return super().__new__(cls)
+
+ # Things that are not classes also need these.
+ def _eval_type(self, globalns, localns):
+ return self
+
+ def _get_type_vars(self, tvars):
+ pass
+
+ def __repr__(self):
+ cls = type(self)
+ qname = _trim_name(_qualname(cls))
+ return '%s.%s' % (cls.__module__, qname)
+
+ def __call__(self, *args, **kwds):
+ raise TypeError("Cannot instantiate %r" % type(self))
+
+
+class _FinalTypingBase(_TypingBase, _root=True):
+ """Internal mix-in class to prevent instantiation.
+
+ Prevents instantiation unless _root=True is given in class call.
+ It is used to create pseudo-singleton instances Any, Union, Optional, etc.
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, *args, _root=False, **kwds):
+ self = super().__new__(cls, *args, **kwds)
+ if _root is True:
+ return self
+ raise TypeError("Cannot instantiate %r" % cls)
+
+ def __reduce__(self):
+ return _trim_name(type(self).__name__)
+
+
+class _ForwardRef(_TypingBase, _root=True):
+ """Internal wrapper to hold a forward reference."""
+
+ __slots__ = ('__forward_arg__', '__forward_code__',
+ '__forward_evaluated__', '__forward_value__')
+
+ def __init__(self, arg):
+ super().__init__(arg)
+ if not isinstance(arg, str):
+ raise TypeError('Forward reference must be a string -- got %r' % (arg,))
+ try:
+ code = compile(arg, '<string>', 'eval')
+ except SyntaxError:
+ raise SyntaxError('Forward reference must be an expression -- got %r' %
+ (arg,))
+ self.__forward_arg__ = arg
+ self.__forward_code__ = code
+ self.__forward_evaluated__ = False
+ self.__forward_value__ = None
+
+ def _eval_type(self, globalns, localns):
+ if not self.__forward_evaluated__ or localns is not globalns:
+ if globalns is None and localns is None:
+ globalns = localns = {}
+ elif globalns is None:
+ globalns = localns
+ elif localns is None:
+ localns = globalns
+ self.__forward_value__ = _type_check(
+ eval(self.__forward_code__, globalns, localns),
+ "Forward references must evaluate to types.")
+ self.__forward_evaluated__ = True
+ return self.__forward_value__
+
+ def __eq__(self, other):
+ if not isinstance(other, _ForwardRef):
+ return NotImplemented
+ return (self.__forward_arg__ == other.__forward_arg__ and
+ self.__forward_value__ == other.__forward_value__)
+
+ def __hash__(self):
+ return hash((self.__forward_arg__, self.__forward_value__))
+
+ def __instancecheck__(self, obj):
+ raise TypeError("Forward references cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("Forward references cannot be used with issubclass().")
+
+ def __repr__(self):
+ return '_ForwardRef(%r)' % (self.__forward_arg__,)
+
+
+class _TypeAlias(_TypingBase, _root=True):
+ """Internal helper class for defining generic variants of concrete types.
+
+ Note that this is not a type; let's call it a pseudo-type. It cannot
+ be used in instance and subclass checks in parameterized form, i.e.
+ ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning
+ ``False``.
+ """
+
+ __slots__ = ('name', 'type_var', 'impl_type', 'type_checker')
+
+ def __init__(self, name, type_var, impl_type, type_checker):
+ """Initializer.
+
+ Args:
+ name: The name, e.g. 'Pattern'.
+ type_var: The type parameter, e.g. AnyStr, or the
+ specific type, e.g. str.
+ impl_type: The implementation type.
+ type_checker: Function that takes an impl_type instance.
+ and returns a value that should be a type_var instance.
+ """
+ assert isinstance(name, str), repr(name)
+ assert isinstance(impl_type, type), repr(impl_type)
+ assert not isinstance(impl_type, TypingMeta), repr(impl_type)
+ assert isinstance(type_var, (type, _TypingBase)), repr(type_var)
+ self.name = name
+ self.type_var = type_var
+ self.impl_type = impl_type
+ self.type_checker = type_checker
+
+ def __repr__(self):
+ return "%s[%s]" % (self.name, _type_repr(self.type_var))
+
+ def __getitem__(self, parameter):
+ if not isinstance(self.type_var, TypeVar):
+ raise TypeError("%s cannot be further parameterized." % self)
+ if self.type_var.__constraints__ and isinstance(parameter, type):
+ if not issubclass(parameter, self.type_var.__constraints__):
+ raise TypeError("%s is not a valid substitution for %s." %
+ (parameter, self.type_var))
+ if isinstance(parameter, TypeVar) and parameter is not self.type_var:
+ raise TypeError("%s cannot be re-parameterized." % self)
+ return self.__class__(self.name, parameter,
+ self.impl_type, self.type_checker)
+
+ def __eq__(self, other):
+ if not isinstance(other, _TypeAlias):
+ return NotImplemented
+ return self.name == other.name and self.type_var == other.type_var
+
+ def __hash__(self):
+ return hash((self.name, self.type_var))
+
+ def __instancecheck__(self, obj):
+ if not isinstance(self.type_var, TypeVar):
+ raise TypeError("Parameterized type aliases cannot be used "
+ "with isinstance().")
+ return isinstance(obj, self.impl_type)
+
+ def __subclasscheck__(self, cls):
+ if not isinstance(self.type_var, TypeVar):
+ raise TypeError("Parameterized type aliases cannot be used "
+ "with issubclass().")
+ return issubclass(cls, self.impl_type)
+
+
+def _get_type_vars(types, tvars):
+ for t in types:
+ if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
+ t._get_type_vars(tvars)
+
+
+def _type_vars(types):
+ tvars = []
+ _get_type_vars(types, tvars)
+ return tuple(tvars)
+
+
+def _eval_type(t, globalns, localns):
+ if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
+ return t._eval_type(globalns, localns)
+ return t
+
+
+def _type_check(arg, msg):
+ """Check that the argument is a type, and return it (internal helper).
+
+ As a special case, accept None and return type(None) instead.
+ Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable.
+
+ The msg argument is a human-readable error message, e.g.
+
+ "Union[arg, ...]: arg should be a type."
+
+ We append the repr() of the actual value (truncated to 100 chars).
+ """
+ if arg is None:
+ return type(None)
+ if isinstance(arg, str):
+ arg = _ForwardRef(arg)
+ if (
+ isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or
+ not isinstance(arg, (type, _TypingBase)) and not callable(arg)
+ ):
+ raise TypeError(msg + " Got %.100r." % (arg,))
+ # Bare Union etc. are not valid as type arguments
+ if (
+ type(arg).__name__ in ('_Union', '_Optional') and
+ not getattr(arg, '__origin__', None) or
+ isinstance(arg, TypingMeta) and arg._gorg in (Generic, _Protocol)
+ ):
+ raise TypeError("Plain %s is not valid as type argument" % arg)
+ return arg
+
+
+def _type_repr(obj):
+ """Return the repr() of an object, special-casing types (internal helper).
+
+ If obj is a type, we return a shorter version than the default
+ type.__repr__, based on the module and qualified name, which is
+ typically enough to uniquely identify a type. For everything
+ else, we fall back on repr(obj).
+ """
+ if isinstance(obj, type) and not isinstance(obj, TypingMeta):
+ if obj.__module__ == 'builtins':
+ return _qualname(obj)
+ return '%s.%s' % (obj.__module__, _qualname(obj))
+ if obj is ...:
+ return('...')
+ if isinstance(obj, types.FunctionType):
+ return obj.__name__
+ return repr(obj)
+
+
+class _Any(_FinalTypingBase, _root=True):
+ """Special type indicating an unconstrained type.
+
+ - Any is compatible with every type.
+ - Any assumed to have all methods.
+ - All values assumed to be instances of Any.
+
+ Note that all the above statements are true from the point of view of
+ static type checkers. At runtime, Any should not be used with instance
+ or class checks.
+ """
+
+ __slots__ = ()
+
+ def __instancecheck__(self, obj):
+ raise TypeError("Any cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("Any cannot be used with issubclass().")
+
+
+Any = _Any(_root=True)
+
+
+class _NoReturn(_FinalTypingBase, _root=True):
+ """Special type indicating functions that never return.
+ Example::
+
+ from typing import NoReturn
+
+ def stop() -> NoReturn:
+ raise Exception('no way')
+
+ This type is invalid in other positions, e.g., ``List[NoReturn]``
+ will fail in static type checkers.
+ """
+
+ __slots__ = ()
+
+ def __instancecheck__(self, obj):
+ raise TypeError("NoReturn cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("NoReturn cannot be used with issubclass().")
+
+
+NoReturn = _NoReturn(_root=True)
+
+
+class TypeVar(_TypingBase, _root=True):
+ """Type variable.
+
+ Usage::
+
+ T = TypeVar('T') # Can be anything
+ A = TypeVar('A', str, bytes) # Must be str or bytes
+
+ Type variables exist primarily for the benefit of static type
+ checkers. They serve as the parameters for generic types as well
+ as for generic function definitions. See class Generic for more
+ information on generic types. Generic functions work as follows:
+
+ def repeat(x: T, n: int) -> List[T]:
+ '''Return a list containing n references to x.'''
+ return [x]*n
+
+ def longest(x: A, y: A) -> A:
+ '''Return the longest of two strings.'''
+ return x if len(x) >= len(y) else y
+
+ The latter example's signature is essentially the overloading
+ of (str, str) -> str and (bytes, bytes) -> bytes. Also note
+ that if the arguments are instances of some subclass of str,
+ the return type is still plain str.
+
+ At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError.
+
+ Type variables defined with covariant=True or contravariant=True
+ can be used do declare covariant or contravariant generic types.
+ See PEP 484 for more details. By default generic types are invariant
+ in all type variables.
+
+ Type variables can be introspected. e.g.:
+
+ T.__name__ == 'T'
+ T.__constraints__ == ()
+ T.__covariant__ == False
+ T.__contravariant__ = False
+ A.__constraints__ == (str, bytes)
+ """
+
+ __slots__ = ('__name__', '__bound__', '__constraints__',
+ '__covariant__', '__contravariant__')
+
+ def __init__(self, name, *constraints, bound=None,
+ covariant=False, contravariant=False):
+ super().__init__(name, *constraints, bound=bound,
+ covariant=covariant, contravariant=contravariant)
+ self.__name__ = name
+ if covariant and contravariant:
+ raise ValueError("Bivariant types are not supported.")
+ self.__covariant__ = bool(covariant)
+ self.__contravariant__ = bool(contravariant)
+ if constraints and bound is not None:
+ raise TypeError("Constraints cannot be combined with bound=...")
+ if constraints and len(constraints) == 1:
+ raise TypeError("A single constraint is not allowed")
+ msg = "TypeVar(name, constraint, ...): constraints must be types."
+ self.__constraints__ = tuple(_type_check(t, msg) for t in constraints)
+ if bound:
+ self.__bound__ = _type_check(bound, "Bound must be a type.")
+ else:
+ self.__bound__ = None
+
+ def _get_type_vars(self, tvars):
+ if self not in tvars:
+ tvars.append(self)
+
+ def __repr__(self):
+ if self.__covariant__:
+ prefix = '+'
+ elif self.__contravariant__:
+ prefix = '-'
+ else:
+ prefix = '~'
+ return prefix + self.__name__
+
+ def __instancecheck__(self, instance):
+ raise TypeError("Type variables cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("Type variables cannot be used with issubclass().")
+
+
+# Some unconstrained type variables. These are used by the container types.
+# (These are not for export.)
+T = TypeVar('T') # Any type.
+KT = TypeVar('KT') # Key type.
+VT = TypeVar('VT') # Value type.
+T_co = TypeVar('T_co', covariant=True) # Any type covariant containers.
+V_co = TypeVar('V_co', covariant=True) # Any type covariant containers.
+VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers.
+T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant.
+
+# A useful type variable with constraints. This represents string types.
+# (This one *is* for export!)
+AnyStr = TypeVar('AnyStr', bytes, str)
+
+
+def _replace_arg(arg, tvars, args):
+ """An internal helper function: replace arg if it is a type variable
+ found in tvars with corresponding substitution from args or
+ with corresponding substitution sub-tree if arg is a generic type.
+ """
+
+ if tvars is None:
+ tvars = []
+ if hasattr(arg, '_subs_tree') and isinstance(arg, (GenericMeta, _TypingBase)):
+ return arg._subs_tree(tvars, args)
+ if isinstance(arg, TypeVar):
+ for i, tvar in enumerate(tvars):
+ if arg == tvar:
+ return args[i]
+ return arg
+
+
+# Special typing constructs Union, Optional, Generic, Callable and Tuple
+# use three special attributes for internal bookkeeping of generic types:
+# * __parameters__ is a tuple of unique free type parameters of a generic
+# type, for example, Dict[T, T].__parameters__ == (T,);
+# * __origin__ keeps a reference to a type that was subscripted,
+# e.g., Union[T, int].__origin__ == Union;
+# * __args__ is a tuple of all arguments used in subscripting,
+# e.g., Dict[T, int].__args__ == (T, int).
+
+
+def _subs_tree(cls, tvars=None, args=None):
+ """An internal helper function: calculate substitution tree
+ for generic cls after replacing its type parameters with
+ substitutions in tvars -> args (if any).
+ Repeat the same following __origin__'s.
+
+ Return a list of arguments with all possible substitutions
+ performed. Arguments that are generic classes themselves are represented
+ as tuples (so that no new classes are created by this function).
+ For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)]
+ """
+
+ if cls.__origin__ is None:
+ return cls
+ # Make of chain of origins (i.e. cls -> cls.__origin__)
+ current = cls.__origin__
+ orig_chain = []
+ while current.__origin__ is not None:
+ orig_chain.append(current)
+ current = current.__origin__
+ # Replace type variables in __args__ if asked ...
+ tree_args = []
+ for arg in cls.__args__:
+ tree_args.append(_replace_arg(arg, tvars, args))
+ # ... then continue replacing down the origin chain.
+ for ocls in orig_chain:
+ new_tree_args = []
+ for arg in ocls.__args__:
+ new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args))
+ tree_args = new_tree_args
+ return tree_args
+
+
+def _remove_dups_flatten(parameters):
+ """An internal helper for Union creation and substitution: flatten Union's
+ among parameters, then remove duplicates and strict subclasses.
+ """
+
+ # Flatten out Union[Union[...], ...].
+ params = []
+ for p in parameters:
+ if isinstance(p, _Union) and p.__origin__ is Union:
+ params.extend(p.__args__)
+ elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union:
+ params.extend(p[1:])
+ else:
+ params.append(p)
+ # Weed out strict duplicates, preserving the first of each occurrence.
+ all_params = set(params)
+ if len(all_params) < len(params):
+ new_params = []
+ for t in params:
+ if t in all_params:
+ new_params.append(t)
+ all_params.remove(t)
+ params = new_params
+ assert not all_params, all_params
+ # Weed out subclasses.
+ # E.g. Union[int, Employee, Manager] == Union[int, Employee].
+ # If object is present it will be sole survivor among proper classes.
+ # Never discard type variables.
+ # (In particular, Union[str, AnyStr] != AnyStr.)
+ all_params = set(params)
+ for t1 in params:
+ if not isinstance(t1, type):
+ continue
+ if any(isinstance(t2, type) and issubclass(t1, t2)
+ for t2 in all_params - {t1}
+ if not (isinstance(t2, GenericMeta) and
+ t2.__origin__ is not None)):
+ all_params.remove(t1)
+ return tuple(t for t in params if t in all_params)
+
+
+def _check_generic(cls, parameters):
+ # Check correct count for parameters of a generic cls (internal helper).
+ if not cls.__parameters__:
+ raise TypeError("%s is not a generic class" % repr(cls))
+ alen = len(parameters)
+ elen = len(cls.__parameters__)
+ if alen != elen:
+ raise TypeError("Too %s parameters for %s; actual %s, expected %s" %
+ ("many" if alen > elen else "few", repr(cls), alen, elen))
+
+
+_cleanups = []
+
+
+def _tp_cache(func):
+ """Internal wrapper caching __getitem__ of generic types with a fallback to
+ original function for non-hashable arguments.
+ """
+
+ cached = functools.lru_cache()(func)
+ _cleanups.append(cached.cache_clear)
+
+ @functools.wraps(func)
+ def inner(*args, **kwds):
+ try:
+ return cached(*args, **kwds)
+ except TypeError:
+ pass # All real errors (not unhashable args) are raised below.
+ return func(*args, **kwds)
+ return inner
+
+
+class _Union(_FinalTypingBase, _root=True):
+ """Union type; Union[X, Y] means either X or Y.
+
+ To define a union, use e.g. Union[int, str]. Details:
+
+ - The arguments must be types and there must be at least one.
+
+ - None as an argument is a special case and is replaced by
+ type(None).
+
+ - Unions of unions are flattened, e.g.::
+
+ Union[Union[int, str], float] == Union[int, str, float]
+
+ - Unions of a single argument vanish, e.g.::
+
+ Union[int] == int # The constructor actually returns int
+
+ - Redundant arguments are skipped, e.g.::
+
+ Union[int, str, int] == Union[int, str]
+
+ - When comparing unions, the argument order is ignored, e.g.::
+
+ Union[int, str] == Union[str, int]
+
+ - When two arguments have a subclass relationship, the least
+ derived argument is kept, e.g.::
+
+ class Employee: pass
+ class Manager(Employee): pass
+ Union[int, Employee, Manager] == Union[int, Employee]
+ Union[Manager, int, Employee] == Union[int, Employee]
+ Union[Employee, Manager] == Employee
+
+ - Similar for object::
+
+ Union[int, object] == object
+
+ - You cannot subclass or instantiate a union.
+
+ - You can use Optional[X] as a shorthand for Union[X, None].
+ """
+
+ __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__')
+
+ def __new__(cls, parameters=None, origin=None, *args, _root=False):
+ self = super().__new__(cls, parameters, origin, *args, _root=_root)
+ if origin is None:
+ self.__parameters__ = None
+ self.__args__ = None
+ self.__origin__ = None
+ self.__tree_hash__ = hash(frozenset(('Union',)))
+ return self
+ if not isinstance(parameters, tuple):
+ raise TypeError("Expected parameters=<tuple>")
+ if origin is Union:
+ parameters = _remove_dups_flatten(parameters)
+ # It's not a union if there's only one type left.
+ if len(parameters) == 1:
+ return parameters[0]
+ self.__parameters__ = _type_vars(parameters)
+ self.__args__ = parameters
+ self.__origin__ = origin
+ # Pre-calculate the __hash__ on instantiation.
+ # This improves speed for complex substitutions.
+ subs_tree = self._subs_tree()
+ if isinstance(subs_tree, tuple):
+ self.__tree_hash__ = hash(frozenset(subs_tree))
+ else:
+ self.__tree_hash__ = hash(subs_tree)
+ return self
+
+ def _eval_type(self, globalns, localns):
+ if self.__args__ is None:
+ return self
+ ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__)
+ ev_origin = _eval_type(self.__origin__, globalns, localns)
+ if ev_args == self.__args__ and ev_origin == self.__origin__:
+ # Everything is already evaluated.
+ return self
+ return self.__class__(ev_args, ev_origin, _root=True)
+
+ def _get_type_vars(self, tvars):
+ if self.__origin__ and self.__parameters__:
+ _get_type_vars(self.__parameters__, tvars)
+
+ def __repr__(self):
+ if self.__origin__ is None:
+ return super().__repr__()
+ tree = self._subs_tree()
+ if not isinstance(tree, tuple):
+ return repr(tree)
+ return tree[0]._tree_repr(tree)
+
+ def _tree_repr(self, tree):
+ arg_list = []
+ for arg in tree[1:]:
+ if not isinstance(arg, tuple):
+ arg_list.append(_type_repr(arg))
+ else:
+ arg_list.append(arg[0]._tree_repr(arg))
+ return super().__repr__() + '[%s]' % ', '.join(arg_list)
+
+ @_tp_cache
+ def __getitem__(self, parameters):
+ if parameters == ():
+ raise TypeError("Cannot take a Union of no types.")
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ if self.__origin__ is None:
+ msg = "Union[arg, ...]: each arg must be a type."
+ else:
+ msg = "Parameters to generic types must be types."
+ parameters = tuple(_type_check(p, msg) for p in parameters)
+ if self is not Union:
+ _check_generic(self, parameters)
+ return self.__class__(parameters, origin=self, _root=True)
+
+ def _subs_tree(self, tvars=None, args=None):
+ if self is Union:
+ return Union # Nothing to substitute
+ tree_args = _subs_tree(self, tvars, args)
+ tree_args = _remove_dups_flatten(tree_args)
+ if len(tree_args) == 1:
+ return tree_args[0] # Union of a single type is that type
+ return (Union,) + tree_args
+
+ def __eq__(self, other):
+ if isinstance(other, _Union):
+ return self.__tree_hash__ == other.__tree_hash__
+ elif self is not Union:
+ return self._subs_tree() == other
+ else:
+ return self is other
+
+ def __hash__(self):
+ return self.__tree_hash__
+
+ def __instancecheck__(self, obj):
+ raise TypeError("Unions cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("Unions cannot be used with issubclass().")
+
+
+Union = _Union(_root=True)
+
+
+class _Optional(_FinalTypingBase, _root=True):
+ """Optional type.
+
+ Optional[X] is equivalent to Union[X, None].
+ """
+
+ __slots__ = ()
+
+ @_tp_cache
+ def __getitem__(self, arg):
+ arg = _type_check(arg, "Optional[t] requires a single type.")
+ return Union[arg, type(None)]
+
+
+Optional = _Optional(_root=True)
+
+
+def _next_in_mro(cls):
+ """Helper for Generic.__new__.
+
+ Returns the class after the last occurrence of Generic or
+ Generic[...] in cls.__mro__.
+ """
+ next_in_mro = object
+ # Look for the last occurrence of Generic or Generic[...].
+ for i, c in enumerate(cls.__mro__[:-1]):
+ if isinstance(c, GenericMeta) and c._gorg is Generic:
+ next_in_mro = cls.__mro__[i + 1]
+ return next_in_mro
+
+
+def _make_subclasshook(cls):
+ """Construct a __subclasshook__ callable that incorporates
+ the associated __extra__ class in subclass checks performed
+ against cls.
+ """
+ if isinstance(cls.__extra__, abc.ABCMeta):
+ # The logic mirrors that of ABCMeta.__subclasscheck__.
+ # Registered classes need not be checked here because
+ # cls and its extra share the same _abc_registry.
+ def __extrahook__(subclass):
+ res = cls.__extra__.__subclasshook__(subclass)
+ if res is not NotImplemented:
+ return res
+ if cls.__extra__ in subclass.__mro__:
+ return True
+ for scls in cls.__extra__.__subclasses__():
+ if isinstance(scls, GenericMeta):
+ continue
+ if issubclass(subclass, scls):
+ return True
+ return NotImplemented
+ else:
+ # For non-ABC extras we'll just call issubclass().
+ def __extrahook__(subclass):
+ if cls.__extra__ and issubclass(subclass, cls.__extra__):
+ return True
+ return NotImplemented
+ return __extrahook__
+
+
+def _no_slots_copy(dct):
+ """Internal helper: copy class __dict__ and clean slots class variables.
+ (They will be re-created if necessary by normal class machinery.)
+ """
+ dict_copy = dict(dct)
+ if '__slots__' in dict_copy:
+ for slot in dict_copy['__slots__']:
+ dict_copy.pop(slot, None)
+ return dict_copy
+
+
+class GenericMeta(TypingMeta, abc.ABCMeta):
+ """Metaclass for generic types.
+
+ This is a metaclass for typing.Generic and generic ABCs defined in
+ typing module. User defined subclasses of GenericMeta can override
+ __new__ and invoke super().__new__. Note that GenericMeta.__new__
+ has strict rules on what is allowed in its bases argument:
+ * plain Generic is disallowed in bases;
+ * Generic[...] should appear in bases at most once;
+ * if Generic[...] is present, then it should list all type variables
+ that appear in other bases.
+ In addition, type of all generic bases is erased, e.g., C[int] is
+ stripped to plain C.
+ """
+
+ def __new__(cls, name, bases, namespace,
+ tvars=None, args=None, origin=None, extra=None, orig_bases=None):
+ """Create a new generic class. GenericMeta.__new__ accepts
+ keyword arguments that are used for internal bookkeeping, therefore
+ an override should pass unused keyword arguments to super().
+ """
+ if tvars is not None:
+ # Called from __getitem__() below.
+ assert origin is not None
+ assert all(isinstance(t, TypeVar) for t in tvars), tvars
+ else:
+ # Called from class statement.
+ assert tvars is None, tvars
+ assert args is None, args
+ assert origin is None, origin
+
+ # Get the full set of tvars from the bases.
+ tvars = _type_vars(bases)
+ # Look for Generic[T1, ..., Tn].
+ # If found, tvars must be a subset of it.
+ # If not found, tvars is it.
+ # Also check for and reject plain Generic,
+ # and reject multiple Generic[...].
+ gvars = None
+ for base in bases:
+ if base is Generic:
+ raise TypeError("Cannot inherit from plain Generic")
+ if (isinstance(base, GenericMeta) and
+ base.__origin__ is Generic):
+ if gvars is not None:
+ raise TypeError(
+ "Cannot inherit from Generic[...] multiple types.")
+ gvars = base.__parameters__
+ if gvars is None:
+ gvars = tvars
+ else:
+ tvarset = set(tvars)
+ gvarset = set(gvars)
+ if not tvarset <= gvarset:
+ raise TypeError(
+ "Some type variables (%s) "
+ "are not listed in Generic[%s]" %
+ (", ".join(str(t) for t in tvars if t not in gvarset),
+ ", ".join(str(g) for g in gvars)))
+ tvars = gvars
+
+ initial_bases = bases
+ if extra is not None and type(extra) is abc.ABCMeta and extra not in bases:
+ bases = (extra,) + bases
+ bases = tuple(b._gorg if isinstance(b, GenericMeta) else b for b in bases)
+
+ # remove bare Generic from bases if there are other generic bases
+ if any(isinstance(b, GenericMeta) and b is not Generic for b in bases):
+ bases = tuple(b for b in bases if b is not Generic)
+ namespace.update({'__origin__': origin, '__extra__': extra,
+ '_gorg': None if not origin else origin._gorg})
+ self = super().__new__(cls, name, bases, namespace, _root=True)
+ super(GenericMeta, self).__setattr__('_gorg',
+ self if not origin else origin._gorg)
+ self.__parameters__ = tvars
+ # Be prepared that GenericMeta will be subclassed by TupleMeta
+ # and CallableMeta, those two allow ..., (), or [] in __args___.
+ self.__args__ = tuple(... if a is _TypingEllipsis else
+ () if a is _TypingEmpty else
+ a for a in args) if args else None
+ # Speed hack (https://github.com/python/typing/issues/196).
+ self.__next_in_mro__ = _next_in_mro(self)
+ # Preserve base classes on subclassing (__bases__ are type erased now).
+ if orig_bases is None:
+ self.__orig_bases__ = initial_bases
+
+ # This allows unparameterized generic collections to be used
+ # with issubclass() and isinstance() in the same way as their
+ # collections.abc counterparts (e.g., isinstance([], Iterable)).
+ if (
+ '__subclasshook__' not in namespace and extra or
+ # allow overriding
+ getattr(self.__subclasshook__, '__name__', '') == '__extrahook__'
+ ):
+ self.__subclasshook__ = _make_subclasshook(self)
+ if isinstance(extra, abc.ABCMeta):
+ self._abc_registry = extra._abc_registry
+ self._abc_cache = extra._abc_cache
+ elif origin is not None:
+ self._abc_registry = origin._abc_registry
+ self._abc_cache = origin._abc_cache
+
+ if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2.
+ self.__qualname__ = origin.__qualname__
+ self.__tree_hash__ = (hash(self._subs_tree()) if origin else
+ super(GenericMeta, self).__hash__())
+ return self
+
+ # _abc_negative_cache and _abc_negative_cache_version
+ # realised as descriptors, since GenClass[t1, t2, ...] always
+ # share subclass info with GenClass.
+ # This is an important memory optimization.
+ @property
+ def _abc_negative_cache(self):
+ if isinstance(self.__extra__, abc.ABCMeta):
+ return self.__extra__._abc_negative_cache
+ return self._gorg._abc_generic_negative_cache
+
+ @_abc_negative_cache.setter
+ def _abc_negative_cache(self, value):
+ if self.__origin__ is None:
+ if isinstance(self.__extra__, abc.ABCMeta):
+ self.__extra__._abc_negative_cache = value
+ else:
+ self._abc_generic_negative_cache = value
+
+ @property
+ def _abc_negative_cache_version(self):
+ if isinstance(self.__extra__, abc.ABCMeta):
+ return self.__extra__._abc_negative_cache_version
+ return self._gorg._abc_generic_negative_cache_version
+
+ @_abc_negative_cache_version.setter
+ def _abc_negative_cache_version(self, value):
+ if self.__origin__ is None:
+ if isinstance(self.__extra__, abc.ABCMeta):
+ self.__extra__._abc_negative_cache_version = value
+ else:
+ self._abc_generic_negative_cache_version = value
+
+ def _get_type_vars(self, tvars):
+ if self.__origin__ and self.__parameters__:
+ _get_type_vars(self.__parameters__, tvars)
+
+ def _eval_type(self, globalns, localns):
+ ev_origin = (self.__origin__._eval_type(globalns, localns)
+ if self.__origin__ else None)
+ ev_args = tuple(_eval_type(a, globalns, localns) for a
+ in self.__args__) if self.__args__ else None
+ if ev_origin == self.__origin__ and ev_args == self.__args__:
+ return self
+ return self.__class__(self.__name__,
+ self.__bases__,
+ _no_slots_copy(self.__dict__),
+ tvars=_type_vars(ev_args) if ev_args else None,
+ args=ev_args,
+ origin=ev_origin,
+ extra=self.__extra__,
+ orig_bases=self.__orig_bases__)
+
+ def __repr__(self):
+ if self.__origin__ is None:
+ return super().__repr__()
+ return self._tree_repr(self._subs_tree())
+
+ def _tree_repr(self, tree):
+ arg_list = []
+ for arg in tree[1:]:
+ if arg == ():
+ arg_list.append('()')
+ elif not isinstance(arg, tuple):
+ arg_list.append(_type_repr(arg))
+ else:
+ arg_list.append(arg[0]._tree_repr(arg))
+ return super().__repr__() + '[%s]' % ', '.join(arg_list)
+
+ def _subs_tree(self, tvars=None, args=None):
+ if self.__origin__ is None:
+ return self
+ tree_args = _subs_tree(self, tvars, args)
+ return (self._gorg,) + tuple(tree_args)
+
+ def __eq__(self, other):
+ if not isinstance(other, GenericMeta):
+ return NotImplemented
+ if self.__origin__ is None or other.__origin__ is None:
+ return self is other
+ return self.__tree_hash__ == other.__tree_hash__
+
+ def __hash__(self):
+ return self.__tree_hash__
+
+ @_tp_cache
+ def __getitem__(self, params):
+ if not isinstance(params, tuple):
+ params = (params,)
+ if not params and self._gorg is not Tuple:
+ raise TypeError(
+ "Parameter list to %s[...] cannot be empty" % _qualname(self))
+ msg = "Parameters to generic types must be types."
+ params = tuple(_type_check(p, msg) for p in params)
+ if self is Generic:
+ # Generic can only be subscripted with unique type variables.
+ if not all(isinstance(p, TypeVar) for p in params):
+ raise TypeError(
+ "Parameters to Generic[...] must all be type variables")
+ if len(set(params)) != len(params):
+ raise TypeError(
+ "Parameters to Generic[...] must all be unique")
+ tvars = params
+ args = params
+ elif self in (Tuple, Callable):
+ tvars = _type_vars(params)
+ args = params
+ elif self is _Protocol:
+ # _Protocol is internal, don't check anything.
+ tvars = params
+ args = params
+ elif self.__origin__ in (Generic, _Protocol):
+ # Can't subscript Generic[...] or _Protocol[...].
+ raise TypeError("Cannot subscript already-subscripted %s" %
+ repr(self))
+ else:
+ # Subscripting a regular Generic subclass.
+ _check_generic(self, params)
+ tvars = _type_vars(params)
+ args = params
+
+ prepend = (self,) if self.__origin__ is None else ()
+ return self.__class__(self.__name__,
+ prepend + self.__bases__,
+ _no_slots_copy(self.__dict__),
+ tvars=tvars,
+ args=args,
+ origin=self,
+ extra=self.__extra__,
+ orig_bases=self.__orig_bases__)
+
+ def __subclasscheck__(self, cls):
+ if self.__origin__ is not None:
+ if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']:
+ raise TypeError("Parameterized generics cannot be used with class "
+ "or instance checks")
+ return False
+ if self is Generic:
+ raise TypeError("Class %r cannot be used with class "
+ "or instance checks" % self)
+ return super().__subclasscheck__(cls)
+
+ def __instancecheck__(self, instance):
+ # Since we extend ABC.__subclasscheck__ and
+ # ABC.__instancecheck__ inlines the cache checking done by the
+ # latter, we must extend __instancecheck__ too. For simplicity
+ # we just skip the cache check -- instance checks for generic
+ # classes are supposed to be rare anyways.
+ return issubclass(instance.__class__, self)
+
+ def __setattr__(self, attr, value):
+ # We consider all the subscripted generics as proxies for original class
+ if (
+ attr.startswith('__') and attr.endswith('__') or
+ attr.startswith('_abc_') or
+ self._gorg is None # The class is not fully created, see #typing/506
+ ):
+ super(GenericMeta, self).__setattr__(attr, value)
+ else:
+ super(GenericMeta, self._gorg).__setattr__(attr, value)
+
+
+# Prevent checks for Generic to crash when defining Generic.
+Generic = None
+
+
+def _generic_new(base_cls, cls, *args, **kwds):
+ # Assure type is erased on instantiation,
+ # but attempt to store it in __orig_class__
+ if cls.__origin__ is None:
+ if (base_cls.__new__ is object.__new__ and
+ cls.__init__ is not object.__init__):
+ return base_cls.__new__(cls)
+ else:
+ return base_cls.__new__(cls, *args, **kwds)
+ else:
+ origin = cls._gorg
+ if (base_cls.__new__ is object.__new__ and
+ cls.__init__ is not object.__init__):
+ obj = base_cls.__new__(origin)
+ else:
+ obj = base_cls.__new__(origin, *args, **kwds)
+ try:
+ obj.__orig_class__ = cls
+ except AttributeError:
+ pass
+ obj.__init__(*args, **kwds)
+ return obj
+
+
+class Generic(metaclass=GenericMeta):
+ """Abstract base class for generic types.
+
+ A generic type is typically declared by inheriting from
+ this class parameterized with one or more type variables.
+ For example, a generic mapping type might be defined as::
+
+ class Mapping(Generic[KT, VT]):
+ def __getitem__(self, key: KT) -> VT:
+ ...
+ # Etc.
+
+ This class can then be used as follows::
+
+ def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT:
+ try:
+ return mapping[key]
+ except KeyError:
+ return default
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Generic:
+ raise TypeError("Type Generic cannot be instantiated; "
+ "it can be used only as a base class")
+ return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
+
+
+class _TypingEmpty:
+ """Internal placeholder for () or []. Used by TupleMeta and CallableMeta
+ to allow empty list/tuple in specific places, without allowing them
+ to sneak in where prohibited.
+ """
+
+
+class _TypingEllipsis:
+ """Internal placeholder for ... (ellipsis)."""
+
+
+class TupleMeta(GenericMeta):
+ """Metaclass for Tuple (internal)."""
+
+ @_tp_cache
+ def __getitem__(self, parameters):
+ if self.__origin__ is not None or self._gorg is not Tuple:
+ # Normal generic rules apply if this is not the first subscription
+ # or a subscription of a subclass.
+ return super().__getitem__(parameters)
+ if parameters == ():
+ return super().__getitem__((_TypingEmpty,))
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ if len(parameters) == 2 and parameters[1] is ...:
+ msg = "Tuple[t, ...]: t must be a type."
+ p = _type_check(parameters[0], msg)
+ return super().__getitem__((p, _TypingEllipsis))
+ msg = "Tuple[t0, t1, ...]: each t must be a type."
+ parameters = tuple(_type_check(p, msg) for p in parameters)
+ return super().__getitem__(parameters)
+
+ def __instancecheck__(self, obj):
+ if self.__args__ is None:
+ return isinstance(obj, tuple)
+ raise TypeError("Parameterized Tuple cannot be used "
+ "with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ if self.__args__ is None:
+ return issubclass(cls, tuple)
+ raise TypeError("Parameterized Tuple cannot be used "
+ "with issubclass().")
+
+
+class Tuple(tuple, extra=tuple, metaclass=TupleMeta):
+ """Tuple type; Tuple[X, Y] is the cross-product type of X and Y.
+
+ Example: Tuple[T1, T2] is a tuple of two elements corresponding
+ to type variables T1 and T2. Tuple[int, float, str] is a tuple
+ of an int, a float and a string.
+
+ To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Tuple:
+ raise TypeError("Type Tuple cannot be instantiated; "
+ "use tuple() instead")
+ return _generic_new(tuple, cls, *args, **kwds)
+
+
+class CallableMeta(GenericMeta):
+ """Metaclass for Callable (internal)."""
+
+ def __repr__(self):
+ if self.__origin__ is None:
+ return super().__repr__()
+ return self._tree_repr(self._subs_tree())
+
+ def _tree_repr(self, tree):
+ if self._gorg is not Callable:
+ return super()._tree_repr(tree)
+ # For actual Callable (not its subclass) we override
+ # super()._tree_repr() for nice formatting.
+ arg_list = []
+ for arg in tree[1:]:
+ if not isinstance(arg, tuple):
+ arg_list.append(_type_repr(arg))
+ else:
+ arg_list.append(arg[0]._tree_repr(arg))
+ if arg_list[0] == '...':
+ return repr(tree[0]) + '[..., %s]' % arg_list[1]
+ return (repr(tree[0]) +
+ '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1]))
+
+ def __getitem__(self, parameters):
+ """A thin wrapper around __getitem_inner__ to provide the latter
+ with hashable arguments to improve speed.
+ """
+
+ if self.__origin__ is not None or self._gorg is not Callable:
+ return super().__getitem__(parameters)
+ if not isinstance(parameters, tuple) or len(parameters) != 2:
+ raise TypeError("Callable must be used as "
+ "Callable[[arg, ...], result].")
+ args, result = parameters
+ if args is Ellipsis:
+ parameters = (Ellipsis, result)
+ else:
+ if not isinstance(args, list):
+ raise TypeError("Callable[args, result]: args must be a list."
+ " Got %.100r." % (args,))
+ parameters = (tuple(args), result)
+ return self.__getitem_inner__(parameters)
+
+ @_tp_cache
+ def __getitem_inner__(self, parameters):
+ args, result = parameters
+ msg = "Callable[args, result]: result must be a type."
+ result = _type_check(result, msg)
+ if args is Ellipsis:
+ return super().__getitem__((_TypingEllipsis, result))
+ msg = "Callable[[arg, ...], result]: each arg must be a type."
+ args = tuple(_type_check(arg, msg) for arg in args)
+ parameters = args + (result,)
+ return super().__getitem__(parameters)
+
+
+class Callable(extra=collections_abc.Callable, metaclass=CallableMeta):
+ """Callable type; Callable[[int], str] is a function of (int) -> str.
+
+ The subscription syntax must always be used with exactly two
+ values: the argument list and the return type. The argument list
+ must be a list of types or ellipsis; the return type must be a single type.
+
+ There is no syntax to indicate optional or keyword arguments,
+ such function types are rarely used as callback types.
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Callable:
+ raise TypeError("Type Callable cannot be instantiated; "
+ "use a non-abstract subclass instead")
+ return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
+
+
+class _ClassVar(_FinalTypingBase, _root=True):
+ """Special type construct to mark class variables.
+
+ An annotation wrapped in ClassVar indicates that a given
+ attribute is intended to be used as a class variable and
+ should not be set on instances of that class. Usage::
+
+ class Starship:
+ stats: ClassVar[Dict[str, int]] = {} # class variable
+ damage: int = 10 # instance variable
+
+ ClassVar accepts only types and cannot be further subscribed.
+
+ Note that ClassVar is not a class itself, and should not
+ be used with isinstance() or issubclass().
+ """
+
+ __slots__ = ('__type__',)
+
+ def __init__(self, tp=None, **kwds):
+ self.__type__ = tp
+
+ def __getitem__(self, item):
+ cls = type(self)
+ if self.__type__ is None:
+ return cls(_type_check(item,
+ '{} accepts only single type.'.format(cls.__name__[1:])),
+ _root=True)
+ raise TypeError('{} cannot be further subscripted'
+ .format(cls.__name__[1:]))
+
+ def _eval_type(self, globalns, localns):
+ new_tp = _eval_type(self.__type__, globalns, localns)
+ if new_tp == self.__type__:
+ return self
+ return type(self)(new_tp, _root=True)
+
+ def __repr__(self):
+ r = super().__repr__()
+ if self.__type__ is not None:
+ r += '[{}]'.format(_type_repr(self.__type__))
+ return r
+
+ def __hash__(self):
+ return hash((type(self).__name__, self.__type__))
+
+ def __eq__(self, other):
+ if not isinstance(other, _ClassVar):
+ return NotImplemented
+ if self.__type__ is not None:
+ return self.__type__ == other.__type__
+ return self is other
+
+
+ClassVar = _ClassVar(_root=True)
+
+
+def cast(typ, val):
+ """Cast a value to a type.
+
+ This returns the value unchanged. To the type checker this
+ signals that the return value has the designated type, but at
+ runtime we intentionally don't check anything (we want this
+ to be as fast as possible).
+ """
+ return val
+
+
+def _get_defaults(func):
+ """Internal helper to extract the default arguments, by name."""
+ try:
+ code = func.__code__
+ except AttributeError:
+ # Some built-in functions don't have __code__, __defaults__, etc.
+ return {}
+ pos_count = code.co_argcount
+ arg_names = code.co_varnames
+ arg_names = arg_names[:pos_count]
+ defaults = func.__defaults__ or ()
+ kwdefaults = func.__kwdefaults__
+ res = dict(kwdefaults) if kwdefaults else {}
+ pos_offset = pos_count - len(defaults)
+ for name, value in zip(arg_names[pos_offset:], defaults):
+ assert name not in res
+ res[name] = value
+ return res
+
+
+_allowed_types = (types.FunctionType, types.BuiltinFunctionType,
+ types.MethodType, types.ModuleType,
+ WrapperDescriptorType, MethodWrapperType, MethodDescriptorType)
+
+
+def get_type_hints(obj, globalns=None, localns=None):
+ """Return type hints for an object.
+
+ This is often the same as obj.__annotations__, but it handles
+ forward references encoded as string literals, and if necessary
+ adds Optional[t] if a default value equal to None is set.
+
+ The argument may be a module, class, method, or function. The annotations
+ are returned as a dictionary. For classes, annotations include also
+ inherited members.
+
+ TypeError is raised if the argument is not of a type that can contain
+ annotations, and an empty dictionary is returned if no annotations are
+ present.
+
+ BEWARE -- the behavior of globalns and localns is counterintuitive
+ (unless you are familiar with how eval() and exec() work). The
+ search order is locals first, then globals.
+
+ - If no dict arguments are passed, an attempt is made to use the
+ globals from obj (or the respective module's globals for classes),
+ and these are also used as the locals. If the object does not appear
+ to have globals, an empty dictionary is used.
+
+ - If one dict argument is passed, it is used for both globals and
+ locals.
+
+ - If two dict arguments are passed, they specify globals and
+ locals, respectively.
+ """
+
+ if getattr(obj, '__no_type_check__', None):
+ return {}
+ # Classes require a special treatment.
+ if isinstance(obj, type):
+ hints = {}
+ for base in reversed(obj.__mro__):
+ if globalns is None:
+ base_globals = sys.modules[base.__module__].__dict__
+ else:
+ base_globals = globalns
+ ann = base.__dict__.get('__annotations__', {})
+ for name, value in ann.items():
+ if value is None:
+ value = type(None)
+ if isinstance(value, str):
+ value = _ForwardRef(value)
+ value = _eval_type(value, base_globals, localns)
+ hints[name] = value
+ return hints
+
+ if globalns is None:
+ if isinstance(obj, types.ModuleType):
+ globalns = obj.__dict__
+ else:
+ globalns = getattr(obj, '__globals__', {})
+ if localns is None:
+ localns = globalns
+ elif localns is None:
+ localns = globalns
+ hints = getattr(obj, '__annotations__', None)
+ if hints is None:
+ # Return empty annotations for something that _could_ have them.
+ if isinstance(obj, _allowed_types):
+ return {}
+ else:
+ raise TypeError('{!r} is not a module, class, method, '
+ 'or function.'.format(obj))
+ defaults = _get_defaults(obj)
+ hints = dict(hints)
+ for name, value in hints.items():
+ if value is None:
+ value = type(None)
+ if isinstance(value, str):
+ value = _ForwardRef(value)
+ value = _eval_type(value, globalns, localns)
+ if name in defaults and defaults[name] is None:
+ value = Optional[value]
+ hints[name] = value
+ return hints
+
+
+def no_type_check(arg):
+ """Decorator to indicate that annotations are not type hints.
+
+ The argument must be a class or function; if it is a class, it
+ applies recursively to all methods and classes defined in that class
+ (but not to methods defined in its superclasses or subclasses).
+
+ This mutates the function(s) or class(es) in place.
+ """
+ if isinstance(arg, type):
+ arg_attrs = arg.__dict__.copy()
+ for attr, val in arg.__dict__.items():
+ if val in arg.__bases__ + (arg,):
+ arg_attrs.pop(attr)
+ for obj in arg_attrs.values():
+ if isinstance(obj, types.FunctionType):
+ obj.__no_type_check__ = True
+ if isinstance(obj, type):
+ no_type_check(obj)
+ try:
+ arg.__no_type_check__ = True
+ except TypeError: # built-in classes
+ pass
+ return arg
+
+
+def no_type_check_decorator(decorator):
+ """Decorator to give another decorator the @no_type_check effect.
+
+ This wraps the decorator with something that wraps the decorated
+ function in @no_type_check.
+ """
+
+ @functools.wraps(decorator)
+ def wrapped_decorator(*args, **kwds):
+ func = decorator(*args, **kwds)
+ func = no_type_check(func)
+ return func
+
+ return wrapped_decorator
+
+
+def _overload_dummy(*args, **kwds):
+ """Helper for @overload to raise when called."""
+ raise NotImplementedError(
+ "You should not call an overloaded function. "
+ "A series of @overload-decorated functions "
+ "outside a stub module should always be followed "
+ "by an implementation that is not @overload-ed.")
+
+
+def overload(func):
+ """Decorator for overloaded functions/methods.
+
+ In a stub file, place two or more stub definitions for the same
+ function in a row, each decorated with @overload. For example:
+
+ @overload
+ def utf8(value: None) -> None: ...
+ @overload
+ def utf8(value: bytes) -> bytes: ...
+ @overload
+ def utf8(value: str) -> bytes: ...
+
+ In a non-stub file (i.e. a regular .py file), do the same but
+ follow it with an implementation. The implementation should *not*
+ be decorated with @overload. For example:
+
+ @overload
+ def utf8(value: None) -> None: ...
+ @overload
+ def utf8(value: bytes) -> bytes: ...
+ @overload
+ def utf8(value: str) -> bytes: ...
+ def utf8(value):
+ # implementation goes here
+ """
+ return _overload_dummy
+
+
+class _ProtocolMeta(GenericMeta):
+ """Internal metaclass for _Protocol.
+
+ This exists so _Protocol classes can be generic without deriving
+ from Generic.
+ """
+
+ def __instancecheck__(self, obj):
+ if _Protocol not in self.__bases__:
+ return super().__instancecheck__(obj)
+ raise TypeError("Protocols cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ if not self._is_protocol:
+ # No structural checks since this isn't a protocol.
+ return NotImplemented
+
+ if self is _Protocol:
+ # Every class is a subclass of the empty protocol.
+ return True
+
+ # Find all attributes defined in the protocol.
+ attrs = self._get_protocol_attrs()
+
+ for attr in attrs:
+ if not any(attr in d.__dict__ for d in cls.__mro__):
+ return False
+ return True
+
+ def _get_protocol_attrs(self):
+ # Get all Protocol base classes.
+ protocol_bases = []
+ for c in self.__mro__:
+ if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol':
+ protocol_bases.append(c)
+
+ # Get attributes included in protocol.
+ attrs = set()
+ for base in protocol_bases:
+ for attr in base.__dict__.keys():
+ # Include attributes not defined in any non-protocol bases.
+ for c in self.__mro__:
+ if (c is not base and attr in c.__dict__ and
+ not getattr(c, '_is_protocol', False)):
+ break
+ else:
+ if (not attr.startswith('_abc_') and
+ attr != '__abstractmethods__' and
+ attr != '__annotations__' and
+ attr != '__weakref__' and
+ attr != '_is_protocol' and
+ attr != '_gorg' and
+ attr != '__dict__' and
+ attr != '__args__' and
+ attr != '__slots__' and
+ attr != '_get_protocol_attrs' and
+ attr != '__next_in_mro__' and
+ attr != '__parameters__' and
+ attr != '__origin__' and
+ attr != '__orig_bases__' and
+ attr != '__extra__' and
+ attr != '__tree_hash__' and
+ attr != '__module__'):
+ attrs.add(attr)
+
+ return attrs
+
+
+class _Protocol(metaclass=_ProtocolMeta):
+ """Internal base class for protocol classes.
+
+ This implements a simple-minded structural issubclass check
+ (similar but more general than the one-offs in collections.abc
+ such as Hashable).
+ """
+
+ __slots__ = ()
+
+ _is_protocol = True
+
+
+# Various ABCs mimicking those in collections.abc.
+# A few are simply re-exported for completeness.
+
+Hashable = collections_abc.Hashable # Not generic.
+
+
+if hasattr(collections_abc, 'Awaitable'):
+ class Awaitable(Generic[T_co], extra=collections_abc.Awaitable):
+ __slots__ = ()
+
+ __all__.append('Awaitable')
+
+
+if hasattr(collections_abc, 'Coroutine'):
+ class Coroutine(Awaitable[V_co], Generic[T_co, T_contra, V_co],
+ extra=collections_abc.Coroutine):
+ __slots__ = ()
+
+ __all__.append('Coroutine')
+
+
+if hasattr(collections_abc, 'AsyncIterable'):
+
+ class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable):
+ __slots__ = ()
+
+ class AsyncIterator(AsyncIterable[T_co],
+ extra=collections_abc.AsyncIterator):
+ __slots__ = ()
+
+ __all__.append('AsyncIterable')
+ __all__.append('AsyncIterator')
+
+
+class Iterable(Generic[T_co], extra=collections_abc.Iterable):
+ __slots__ = ()
+
+
+class Iterator(Iterable[T_co], extra=collections_abc.Iterator):
+ __slots__ = ()
+
+
+class SupportsInt(_Protocol):
+ __slots__ = ()
+
+ @abstractmethod
+ def __int__(self) -> int:
+ pass
+
+
+class SupportsFloat(_Protocol):
+ __slots__ = ()
+
+ @abstractmethod
+ def __float__(self) -> float:
+ pass
+
+
+class SupportsComplex(_Protocol):
+ __slots__ = ()
+
+ @abstractmethod
+ def __complex__(self) -> complex:
+ pass
+
+
+class SupportsBytes(_Protocol):
+ __slots__ = ()
+
+ @abstractmethod
+ def __bytes__(self) -> bytes:
+ pass
+
+
+class SupportsIndex(_Protocol):
+ __slots__ = ()
+
+ @abstractmethod
+ def __index__(self) -> int:
+ pass
+
+
+class SupportsAbs(_Protocol[T_co]):
+ __slots__ = ()
+
+ @abstractmethod
+ def __abs__(self) -> T_co:
+ pass
+
+
+class SupportsRound(_Protocol[T_co]):
+ __slots__ = ()
+
+ @abstractmethod
+ def __round__(self, ndigits: int = 0) -> T_co:
+ pass
+
+
+if hasattr(collections_abc, 'Reversible'):
+ class Reversible(Iterable[T_co], extra=collections_abc.Reversible):
+ __slots__ = ()
+else:
+ class Reversible(_Protocol[T_co]):
+ __slots__ = ()
+
+ @abstractmethod
+ def __reversed__(self) -> 'Iterator[T_co]':
+ pass
+
+
+Sized = collections_abc.Sized # Not generic.
+
+
+class Container(Generic[T_co], extra=collections_abc.Container):
+ __slots__ = ()
+
+
+if hasattr(collections_abc, 'Collection'):
+ class Collection(Sized, Iterable[T_co], Container[T_co],
+ extra=collections_abc.Collection):
+ __slots__ = ()
+
+ __all__.append('Collection')
+
+
+# Callable was defined earlier.
+
+if hasattr(collections_abc, 'Collection'):
+ class AbstractSet(Collection[T_co],
+ extra=collections_abc.Set):
+ __slots__ = ()
+else:
+ class AbstractSet(Sized, Iterable[T_co], Container[T_co],
+ extra=collections_abc.Set):
+ __slots__ = ()
+
+
+class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet):
+ __slots__ = ()
+
+
+# NOTE: It is only covariant in the value type.
+if hasattr(collections_abc, 'Collection'):
+ class Mapping(Collection[KT], Generic[KT, VT_co],
+ extra=collections_abc.Mapping):
+ __slots__ = ()
+else:
+ class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co],
+ extra=collections_abc.Mapping):
+ __slots__ = ()
+
+
+class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping):
+ __slots__ = ()
+
+
+if hasattr(collections_abc, 'Reversible'):
+ if hasattr(collections_abc, 'Collection'):
+ class Sequence(Reversible[T_co], Collection[T_co],
+ extra=collections_abc.Sequence):
+ __slots__ = ()
+ else:
+ class Sequence(Sized, Reversible[T_co], Container[T_co],
+ extra=collections_abc.Sequence):
+ __slots__ = ()
+else:
+ class Sequence(Sized, Iterable[T_co], Container[T_co],
+ extra=collections_abc.Sequence):
+ __slots__ = ()
+
+
+class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence):
+ __slots__ = ()
+
+
+class ByteString(Sequence[int], extra=collections_abc.ByteString):
+ __slots__ = ()
+
+
+class List(list, MutableSequence[T], extra=list):
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is List:
+ raise TypeError("Type List cannot be instantiated; "
+ "use list() instead")
+ return _generic_new(list, cls, *args, **kwds)
+
+
+class Deque(collections.deque, MutableSequence[T], extra=collections.deque):
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Deque:
+ return collections.deque(*args, **kwds)
+ return _generic_new(collections.deque, cls, *args, **kwds)
+
+
+class Set(set, MutableSet[T], extra=set):
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Set:
+ raise TypeError("Type Set cannot be instantiated; "
+ "use set() instead")
+ return _generic_new(set, cls, *args, **kwds)
+
+
+class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset):
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is FrozenSet:
+ raise TypeError("Type FrozenSet cannot be instantiated; "
+ "use frozenset() instead")
+ return _generic_new(frozenset, cls, *args, **kwds)
+
+
+class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView):
+ __slots__ = ()
+
+
+class KeysView(MappingView[KT], AbstractSet[KT],
+ extra=collections_abc.KeysView):
+ __slots__ = ()
+
+
+class ItemsView(MappingView[Tuple[KT, VT_co]],
+ AbstractSet[Tuple[KT, VT_co]],
+ Generic[KT, VT_co],
+ extra=collections_abc.ItemsView):
+ __slots__ = ()
+
+
+class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView):
+ __slots__ = ()
+
+
+if hasattr(contextlib, 'AbstractContextManager'):
+ class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager):
+ __slots__ = ()
+else:
+ class ContextManager(Generic[T_co]):
+ __slots__ = ()
+
+ def __enter__(self):
+ return self
+
+ @abc.abstractmethod
+ def __exit__(self, exc_type, exc_value, traceback):
+ return None
+
+ @classmethod
+ def __subclasshook__(cls, C):
+ if cls is ContextManager:
+ # In Python 3.6+, it is possible to set a method to None to
+ # explicitly indicate that the class does not implement an ABC
+ # (https://bugs.python.org/issue25958), but we do not support
+ # that pattern here because this fallback class is only used
+ # in Python 3.5 and earlier.
+ if (any("__enter__" in B.__dict__ for B in C.__mro__) and
+ any("__exit__" in B.__dict__ for B in C.__mro__)):
+ return True
+ return NotImplemented
+
+
+if hasattr(contextlib, 'AbstractAsyncContextManager'):
+ class AsyncContextManager(Generic[T_co],
+ extra=contextlib.AbstractAsyncContextManager):
+ __slots__ = ()
+
+ __all__.append('AsyncContextManager')
+elif sys.version_info[:2] >= (3, 5):
+ exec("""
+class AsyncContextManager(Generic[T_co]):
+ __slots__ = ()
+
+ async def __aenter__(self):
+ return self
+
+ @abc.abstractmethod
+ async def __aexit__(self, exc_type, exc_value, traceback):
+ return None
+
+ @classmethod
+ def __subclasshook__(cls, C):
+ if cls is AsyncContextManager:
+ if sys.version_info[:2] >= (3, 6):
+ return _collections_abc._check_methods(C, "__aenter__", "__aexit__")
+ if (any("__aenter__" in B.__dict__ for B in C.__mro__) and
+ any("__aexit__" in B.__dict__ for B in C.__mro__)):
+ return True
+ return NotImplemented
+
+__all__.append('AsyncContextManager')
+""")
+
+
+class Dict(dict, MutableMapping[KT, VT], extra=dict):
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Dict:
+ raise TypeError("Type Dict cannot be instantiated; "
+ "use dict() instead")
+ return _generic_new(dict, cls, *args, **kwds)
+
+
+class DefaultDict(collections.defaultdict, MutableMapping[KT, VT],
+ extra=collections.defaultdict):
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is DefaultDict:
+ return collections.defaultdict(*args, **kwds)
+ return _generic_new(collections.defaultdict, cls, *args, **kwds)
+
+
+class Counter(collections.Counter, Dict[T, int], extra=collections.Counter):
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Counter:
+ return collections.Counter(*args, **kwds)
+ return _generic_new(collections.Counter, cls, *args, **kwds)
+
+
+if hasattr(collections, 'ChainMap'):
+ # ChainMap only exists in 3.3+
+ __all__.append('ChainMap')
+
+ class ChainMap(collections.ChainMap, MutableMapping[KT, VT],
+ extra=collections.ChainMap):
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is ChainMap:
+ return collections.ChainMap(*args, **kwds)
+ return _generic_new(collections.ChainMap, cls, *args, **kwds)
+
+
+# Determine what base class to use for Generator.
+if hasattr(collections_abc, 'Generator'):
+ # Sufficiently recent versions of 3.5 have a Generator ABC.
+ _G_base = collections_abc.Generator
+else:
+ # Fall back on the exact type.
+ _G_base = types.GeneratorType
+
+
+class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co],
+ extra=_G_base):
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Generator:
+ raise TypeError("Type Generator cannot be instantiated; "
+ "create a subclass instead")
+ return _generic_new(_G_base, cls, *args, **kwds)
+
+
+if hasattr(collections_abc, 'AsyncGenerator'):
+ class AsyncGenerator(AsyncIterator[T_co], Generic[T_co, T_contra],
+ extra=collections_abc.AsyncGenerator):
+ __slots__ = ()
+
+ __all__.append('AsyncGenerator')
+
+
+# Internal type variable used for Type[].
+CT_co = TypeVar('CT_co', covariant=True, bound=type)
+
+
+# This is not a real generic class. Don't use outside annotations.
+class Type(Generic[CT_co], extra=type):
+ """A special construct usable to annotate class objects.
+
+ For example, suppose we have the following classes::
+
+ class User: ... # Abstract base for User classes
+ class BasicUser(User): ...
+ class ProUser(User): ...
+ class TeamUser(User): ...
+
+ And a function that takes a class argument that's a subclass of
+ User and returns an instance of the corresponding class::
+
+ U = TypeVar('U', bound=User)
+ def new_user(user_class: Type[U]) -> U:
+ user = user_class()
+ # (Here we could write the user object to a database)
+ return user
+
+ joe = new_user(BasicUser)
+
+ At this point the type checker knows that joe has type BasicUser.
+ """
+
+ __slots__ = ()
+
+
+def _make_nmtuple(name, types):
+ msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type"
+ types = [(n, _type_check(t, msg)) for n, t in types]
+ nm_tpl = collections.namedtuple(name, [n for n, t in types])
+ # Prior to PEP 526, only _field_types attribute was assigned.
+ # Now, both __annotations__ and _field_types are used to maintain compatibility.
+ nm_tpl.__annotations__ = nm_tpl._field_types = collections.OrderedDict(types)
+ try:
+ nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__')
+ except (AttributeError, ValueError):
+ pass
+ return nm_tpl
+
+
+_PY36 = sys.version_info[:2] >= (3, 6)
+
+# attributes prohibited to set in NamedTuple class syntax
+_prohibited = ('__new__', '__init__', '__slots__', '__getnewargs__',
+ '_fields', '_field_defaults', '_field_types',
+ '_make', '_replace', '_asdict', '_source')
+
+_special = ('__module__', '__name__', '__qualname__', '__annotations__')
+
+
+class NamedTupleMeta(type):
+
+ def __new__(cls, typename, bases, ns):
+ if ns.get('_root', False):
+ return super().__new__(cls, typename, bases, ns)
+ if not _PY36:
+ raise TypeError("Class syntax for NamedTuple is only supported"
+ " in Python 3.6+")
+ types = ns.get('__annotations__', {})
+ nm_tpl = _make_nmtuple(typename, types.items())
+ defaults = []
+ defaults_dict = {}
+ for field_name in types:
+ if field_name in ns:
+ default_value = ns[field_name]
+ defaults.append(default_value)
+ defaults_dict[field_name] = default_value
+ elif defaults:
+ raise TypeError("Non-default namedtuple field {field_name} cannot "
+ "follow default field(s) {default_names}"
+ .format(field_name=field_name,
+ default_names=', '.join(defaults_dict.keys())))
+ nm_tpl.__new__.__annotations__ = collections.OrderedDict(types)
+ nm_tpl.__new__.__defaults__ = tuple(defaults)
+ nm_tpl._field_defaults = defaults_dict
+ # update from user namespace without overriding special namedtuple attributes
+ for key in ns:
+ if key in _prohibited:
+ raise AttributeError("Cannot overwrite NamedTuple attribute " + key)
+ elif key not in _special and key not in nm_tpl._fields:
+ setattr(nm_tpl, key, ns[key])
+ return nm_tpl
+
+
+class NamedTuple(metaclass=NamedTupleMeta):
+ """Typed version of namedtuple.
+
+ Usage in Python versions >= 3.6::
+
+ class Employee(NamedTuple):
+ name: str
+ id: int
+
+ This is equivalent to::
+
+ Employee = collections.namedtuple('Employee', ['name', 'id'])
+
+ The resulting class has extra __annotations__ and _field_types
+ attributes, giving an ordered dict mapping field names to types.
+ __annotations__ should be preferred, while _field_types
+ is kept to maintain pre PEP 526 compatibility. (The field names
+ are in the _fields attribute, which is part of the namedtuple
+ API.) Alternative equivalent keyword syntax is also accepted::
+
+ Employee = NamedTuple('Employee', name=str, id=int)
+
+ In Python versions <= 3.5 use::
+
+ Employee = NamedTuple('Employee', [('name', str), ('id', int)])
+ """
+ _root = True
+
+ def __new__(self, typename, fields=None, **kwargs):
+ if kwargs and not _PY36:
+ raise TypeError("Keyword syntax for NamedTuple is only supported"
+ " in Python 3.6+")
+ if fields is None:
+ fields = kwargs.items()
+ elif kwargs:
+ raise TypeError("Either list of fields or keywords"
+ " can be provided to NamedTuple, not both")
+ return _make_nmtuple(typename, fields)
+
+
+def NewType(name, tp):
+ """NewType creates simple unique types with almost zero
+ runtime overhead. NewType(name, tp) is considered a subtype of tp
+ by static type checkers. At runtime, NewType(name, tp) returns
+ a dummy function that simply returns its argument. Usage::
+
+ UserId = NewType('UserId', int)
+
+ def name_by_id(user_id: UserId) -> str:
+ ...
+
+ UserId('user') # Fails type check
+
+ name_by_id(42) # Fails type check
+ name_by_id(UserId(42)) # OK
+
+ num = UserId(5) + 1 # type: int
+ """
+
+ def new_type(x):
+ return x
+
+ new_type.__name__ = name
+ new_type.__supertype__ = tp
+ return new_type
+
+
+# Python-version-specific alias (Python 2: unicode; Python 3: str)
+Text = str
+
+
+# Constant that's True when type checking, but False here.
+TYPE_CHECKING = False
+
+
+class IO(Generic[AnyStr]):
+ """Generic base class for TextIO and BinaryIO.
+
+ This is an abstract, generic version of the return of open().
+
+ NOTE: This does not distinguish between the different possible
+ classes (text vs. binary, read vs. write vs. read/write,
+ append-only, unbuffered). The TextIO and BinaryIO subclasses
+ below capture the distinctions between text vs. binary, which is
+ pervasive in the interface; however we currently do not offer a
+ way to track the other distinctions in the type system.
+ """
+
+ __slots__ = ()
+
+ @abstractproperty
+ def mode(self) -> str:
+ pass
+
+ @abstractproperty
+ def name(self) -> str:
+ pass
+
+ @abstractmethod
+ def close(self) -> None:
+ pass
+
+ @abstractproperty
+ def closed(self) -> bool:
+ pass
+
+ @abstractmethod
+ def fileno(self) -> int:
+ pass
+
+ @abstractmethod
+ def flush(self) -> None:
+ pass
+
+ @abstractmethod
+ def isatty(self) -> bool:
+ pass
+
+ @abstractmethod
+ def read(self, n: int = -1) -> AnyStr:
+ pass
+
+ @abstractmethod
+ def readable(self) -> bool:
+ pass
+
+ @abstractmethod
+ def readline(self, limit: int = -1) -> AnyStr:
+ pass
+
+ @abstractmethod
+ def readlines(self, hint: int = -1) -> List[AnyStr]:
+ pass
+
+ @abstractmethod
+ def seek(self, offset: int, whence: int = 0) -> int:
+ pass
+
+ @abstractmethod
+ def seekable(self) -> bool:
+ pass
+
+ @abstractmethod
+ def tell(self) -> int:
+ pass
+
+ @abstractmethod
+ def truncate(self, size: int = None) -> int:
+ pass
+
+ @abstractmethod
+ def writable(self) -> bool:
+ pass
+
+ @abstractmethod
+ def write(self, s: AnyStr) -> int:
+ pass
+
+ @abstractmethod
+ def writelines(self, lines: List[AnyStr]) -> None:
+ pass
+
+ @abstractmethod
+ def __enter__(self) -> 'IO[AnyStr]':
+ pass
+
+ @abstractmethod
+ def __exit__(self, type, value, traceback) -> None:
+ pass
+
+
+class BinaryIO(IO[bytes]):
+ """Typed version of the return of open() in binary mode."""
+
+ __slots__ = ()
+
+ @abstractmethod
+ def write(self, s: Union[bytes, bytearray]) -> int:
+ pass
+
+ @abstractmethod
+ def __enter__(self) -> 'BinaryIO':
+ pass
+
+
+class TextIO(IO[str]):
+ """Typed version of the return of open() in text mode."""
+
+ __slots__ = ()
+
+ @abstractproperty
+ def buffer(self) -> BinaryIO:
+ pass
+
+ @abstractproperty
+ def encoding(self) -> str:
+ pass
+
+ @abstractproperty
+ def errors(self) -> Optional[str]:
+ pass
+
+ @abstractproperty
+ def line_buffering(self) -> bool:
+ pass
+
+ @abstractproperty
+ def newlines(self) -> Any:
+ pass
+
+ @abstractmethod
+ def __enter__(self) -> 'TextIO':
+ pass
+
+
+class io:
+ """Wrapper namespace for IO generic classes."""
+
+ __all__ = ['IO', 'TextIO', 'BinaryIO']
+ IO = IO
+ TextIO = TextIO
+ BinaryIO = BinaryIO
+
+
+io.__name__ = __name__ + '.io'
+sys.modules[io.__name__] = io
+
+
+Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')),
+ lambda p: p.pattern)
+Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')),
+ lambda m: m.re.pattern)
+
+
+class re:
+ """Wrapper namespace for re type aliases."""
+
+ __all__ = ['Pattern', 'Match']
+ Pattern = Pattern
+ Match = Match
+
+
+re.__name__ = __name__ + '.re'
+sys.modules[re.__name__] = re
diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/LICENSE
new file mode 100644
index 0000000000..583f9f6e61
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/LICENSE
@@ -0,0 +1,254 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC. Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team. In October of the same
+year, the PythonLabs team moved to Digital Creations (now Zope
+Corporation, see http://www.zope.com). In 2001, the Python Software
+Foundation (PSF, see http://www.python.org/psf/) was formed, a
+non-profit organization created specifically to own Python-related
+Intellectual Property. Zope Corporation is a sponsoring member of
+the PSF.
+
+All Python releases are Open Source (see http://www.opensource.org for
+the Open Source Definition). Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+ Release Derived Year Owner GPL-
+ from compatible? (1)
+
+ 0.9.0 thru 1.2 1991-1995 CWI yes
+ 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
+ 1.6 1.5.2 2000 CNRI no
+ 2.0 1.6 2000 BeOpen.com no
+ 1.6.1 1.6 2001 CNRI yes (2)
+ 2.1 2.0+1.6.1 2001 PSF no
+ 2.0.1 2.0+1.6.1 2001 PSF yes
+ 2.1.1 2.1+2.0.1 2001 PSF yes
+ 2.1.2 2.1.1 2002 PSF yes
+ 2.1.3 2.1.2 2002 PSF yes
+ 2.2 and above 2.1.1 2001-now PSF yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+ the GPL. All Python licenses, unlike the GPL, let you distribute
+ a modified version without making your changes open source. The
+ GPL-compatible licenses make it possible to combine Python with
+ other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+ because its license has a choice of law clause. According to
+ CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+ is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are
+retained in Python alone or in any derivative version prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions. Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee. This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party. As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee. Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement. This Agreement together with
+Python 1.6.1 may be located on the Internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013. This
+Agreement may also be obtained from a proxy server on the Internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee. This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+ ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands. All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/METADATA
new file mode 100644
index 0000000000..d685344892
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/METADATA
@@ -0,0 +1,50 @@
+Metadata-Version: 2.1
+Name: typing
+Version: 3.7.4.3
+Summary: Type Hints for Python
+Home-page: https://docs.python.org/3/library/typing.html
+Author: Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Ivan Levkivskyi
+Author-email: jukka.lehtosalo@iki.fi
+License: PSF
+Project-URL: Source, https://github.com/python/typing
+Keywords: typing function annotations type hints hinting checking checker typehints typehinting typechecking backport
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Topic :: Software Development
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
+
+Typing -- Type Hints for Python
+
+This is a backport of the standard library typing module to Python
+versions older than 3.5. (See note below for newer versions.)
+
+Typing defines a standard notation for Python function and variable
+type annotations. The notation can be used for documenting code in a
+concise, standard format, and it has been designed to also be used by
+static and runtime type checkers, static analyzers, IDEs and other
+tools.
+
+NOTE: in Python 3.5 and later, the typing module lives in the stdlib,
+and installing this package has NO EFFECT, because stdlib takes higher
+precedence than the installation directory. To get a newer version of
+the typing module in Python 3.5 or later, you have to upgrade to a
+newer Python (bugfix) version. For example, typing in Python 3.6.0 is
+missing the definition of 'Type' -- upgrading to 3.6.2 will fix this.
+
+Also note that most improvements to the typing module in Python 3.7
+will not be included in this package, since Python 3.7 has some
+built-in support that is not present in older versions (See PEP 560.)
+
+For package maintainers, it is preferred to use
+``typing;python_version<"3.5"`` if your package requires it to support
+earlier Python versions. This will avoid shadowing the stdlib typing
+module when your package is installed via ``pip install -t .`` on
+Python 3.5 or later.
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/RECORD
new file mode 100644
index 0000000000..3d79342232
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/RECORD
@@ -0,0 +1,6 @@
+typing.py,sha256=yP2fxy8eprK-cHMe9bAcvU7QL7n_YGtoTFOG3bsWVJQ,84492
+typing-3.7.4.3.dist-info/LICENSE,sha256=_xfOlOECAk3raHc-scx0ynbaTmWPNzUx8Kwi1oprsa0,12755
+typing-3.7.4.3.dist-info/METADATA,sha256=t3uvms3cJatf6uhsaHM3PP7HWbkjVUh4AE9tb8xCSsQ,2258
+typing-3.7.4.3.dist-info/WHEEL,sha256=CbUdLTqD3-4zWemf83rgR_2_MC4TeXw9qXwrXte5w4w,92
+typing-3.7.4.3.dist-info/top_level.txt,sha256=oG8QCMTRcfcgGpEVbdwBU2DM8MthjmZSDaaQ6WWHx4o,7
+typing-3.7.4.3.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/WHEEL
new file mode 100644
index 0000000000..2b098df983
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+
diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/top_level.txt
new file mode 100644
index 0000000000..c997f364b4
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info/top_level.txt
@@ -0,0 +1 @@
+typing
diff --git a/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing.py b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing.py
new file mode 100644
index 0000000000..dd16d9af96
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/typing-3.7.4.3-py2-none-any/typing.py
@@ -0,0 +1,2550 @@
+from __future__ import absolute_import, unicode_literals
+
+import abc
+from abc import abstractmethod, abstractproperty
+import collections
+import functools
+import re as stdlib_re # Avoid confusion with the re we export.
+import sys
+import types
+import copy
+try:
+ import collections.abc as collections_abc
+except ImportError:
+ import collections as collections_abc # Fallback for PY3.2.
+
+
+# Please keep __all__ alphabetized within each category.
+__all__ = [
+ # Super-special typing primitives.
+ 'Any',
+ 'Callable',
+ 'ClassVar',
+ 'Final',
+ 'Generic',
+ 'Literal',
+ 'Optional',
+ 'Protocol',
+ 'Tuple',
+ 'Type',
+ 'TypeVar',
+ 'Union',
+
+ # ABCs (from collections.abc).
+ 'AbstractSet', # collections.abc.Set.
+ 'GenericMeta', # subclass of abc.ABCMeta and a metaclass
+ # for 'Generic' and ABCs below.
+ 'ByteString',
+ 'Container',
+ 'ContextManager',
+ 'Hashable',
+ 'ItemsView',
+ 'Iterable',
+ 'Iterator',
+ 'KeysView',
+ 'Mapping',
+ 'MappingView',
+ 'MutableMapping',
+ 'MutableSequence',
+ 'MutableSet',
+ 'Sequence',
+ 'Sized',
+ 'ValuesView',
+
+ # Structural checks, a.k.a. protocols.
+ 'Reversible',
+ 'SupportsAbs',
+ 'SupportsComplex',
+ 'SupportsFloat',
+ 'SupportsIndex',
+ 'SupportsInt',
+
+ # Concrete collection types.
+ 'Counter',
+ 'Deque',
+ 'Dict',
+ 'DefaultDict',
+ 'List',
+ 'Set',
+ 'FrozenSet',
+ 'NamedTuple', # Not really a type.
+ 'TypedDict', # Not really a type.
+ 'Generator',
+
+ # One-off things.
+ 'AnyStr',
+ 'cast',
+ 'final',
+ 'get_type_hints',
+ 'NewType',
+ 'no_type_check',
+ 'no_type_check_decorator',
+ 'NoReturn',
+ 'overload',
+ 'runtime_checkable',
+ 'Text',
+ 'TYPE_CHECKING',
+]
+
+# The pseudo-submodules 're' and 'io' are part of the public
+# namespace, but excluded from __all__ because they might stomp on
+# legitimate imports of those modules.
+
+
+def _qualname(x):
+ if sys.version_info[:2] >= (3, 3):
+ return x.__qualname__
+ else:
+ # Fall back to just name.
+ return x.__name__
+
+
+def _trim_name(nm):
+ whitelist = ('_TypeAlias', '_ForwardRef', '_TypingBase', '_FinalTypingBase')
+ if nm.startswith('_') and nm not in whitelist:
+ nm = nm[1:]
+ return nm
+
+
+class TypingMeta(type):
+ """Metaclass for most types defined in typing module
+ (not a part of public API).
+
+ This also defines a dummy constructor (all the work for most typing
+ constructs is done in __new__) and a nicer repr().
+ """
+
+ _is_protocol = False
+
+ def __new__(cls, name, bases, namespace):
+ return super(TypingMeta, cls).__new__(cls, str(name), bases, namespace)
+
+ @classmethod
+ def assert_no_subclassing(cls, bases):
+ for base in bases:
+ if isinstance(base, cls):
+ raise TypeError("Cannot subclass %s" %
+ (', '.join(map(_type_repr, bases)) or '()'))
+
+ def __init__(self, *args, **kwds):
+ pass
+
+ def _eval_type(self, globalns, localns):
+ """Override this in subclasses to interpret forward references.
+
+ For example, List['C'] is internally stored as
+ List[_ForwardRef('C')], which should evaluate to List[C],
+ where C is an object found in globalns or localns (searching
+ localns first, of course).
+ """
+ return self
+
+ def _get_type_vars(self, tvars):
+ pass
+
+ def __repr__(self):
+ qname = _trim_name(_qualname(self))
+ return '%s.%s' % (self.__module__, qname)
+
+
+class _TypingBase(object):
+ """Internal indicator of special typing constructs."""
+ __metaclass__ = TypingMeta
+ __slots__ = ('__weakref__',)
+
+ def __init__(self, *args, **kwds):
+ pass
+
+ def __new__(cls, *args, **kwds):
+ """Constructor.
+
+ This only exists to give a better error message in case
+ someone tries to subclass a special typing object (not a good idea).
+ """
+ if (len(args) == 3 and
+ isinstance(args[0], str) and
+ isinstance(args[1], tuple)):
+ # Close enough.
+ raise TypeError("Cannot subclass %r" % cls)
+ return super(_TypingBase, cls).__new__(cls)
+
+ # Things that are not classes also need these.
+ def _eval_type(self, globalns, localns):
+ return self
+
+ def _get_type_vars(self, tvars):
+ pass
+
+ def __repr__(self):
+ cls = type(self)
+ qname = _trim_name(_qualname(cls))
+ return '%s.%s' % (cls.__module__, qname)
+
+ def __call__(self, *args, **kwds):
+ raise TypeError("Cannot instantiate %r" % type(self))
+
+
+class _FinalTypingBase(_TypingBase):
+ """Internal mix-in class to prevent instantiation.
+
+ Prevents instantiation unless _root=True is given in class call.
+ It is used to create pseudo-singleton instances Any, Union, Optional, etc.
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ self = super(_FinalTypingBase, cls).__new__(cls, *args, **kwds)
+ if '_root' in kwds and kwds['_root'] is True:
+ return self
+ raise TypeError("Cannot instantiate %r" % cls)
+
+ def __reduce__(self):
+ return _trim_name(type(self).__name__)
+
+
+class _ForwardRef(_TypingBase):
+ """Internal wrapper to hold a forward reference."""
+
+ __slots__ = ('__forward_arg__', '__forward_code__',
+ '__forward_evaluated__', '__forward_value__')
+
+ def __init__(self, arg):
+ super(_ForwardRef, self).__init__(arg)
+ if not isinstance(arg, basestring):
+ raise TypeError('Forward reference must be a string -- got %r' % (arg,))
+ try:
+ code = compile(arg, '<string>', 'eval')
+ except SyntaxError:
+ raise SyntaxError('Forward reference must be an expression -- got %r' %
+ (arg,))
+ self.__forward_arg__ = arg
+ self.__forward_code__ = code
+ self.__forward_evaluated__ = False
+ self.__forward_value__ = None
+
+ def _eval_type(self, globalns, localns):
+ if not self.__forward_evaluated__ or localns is not globalns:
+ if globalns is None and localns is None:
+ globalns = localns = {}
+ elif globalns is None:
+ globalns = localns
+ elif localns is None:
+ localns = globalns
+ self.__forward_value__ = _type_check(
+ eval(self.__forward_code__, globalns, localns),
+ "Forward references must evaluate to types.")
+ self.__forward_evaluated__ = True
+ return self.__forward_value__
+
+ def __eq__(self, other):
+ if not isinstance(other, _ForwardRef):
+ return NotImplemented
+ return (self.__forward_arg__ == other.__forward_arg__ and
+ self.__forward_value__ == other.__forward_value__)
+
+ def __hash__(self):
+ return hash((self.__forward_arg__, self.__forward_value__))
+
+ def __instancecheck__(self, obj):
+ raise TypeError("Forward references cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("Forward references cannot be used with issubclass().")
+
+ def __repr__(self):
+ return '_ForwardRef(%r)' % (self.__forward_arg__,)
+
+
+class _TypeAlias(_TypingBase):
+ """Internal helper class for defining generic variants of concrete types.
+
+ Note that this is not a type; let's call it a pseudo-type. It cannot
+ be used in instance and subclass checks in parameterized form, i.e.
+ ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning
+ ``False``.
+ """
+
+ __slots__ = ('name', 'type_var', 'impl_type', 'type_checker')
+
+ def __init__(self, name, type_var, impl_type, type_checker):
+ """Initializer.
+
+ Args:
+ name: The name, e.g. 'Pattern'.
+ type_var: The type parameter, e.g. AnyStr, or the
+ specific type, e.g. str.
+ impl_type: The implementation type.
+ type_checker: Function that takes an impl_type instance.
+ and returns a value that should be a type_var instance.
+ """
+ assert isinstance(name, basestring), repr(name)
+ assert isinstance(impl_type, type), repr(impl_type)
+ assert not isinstance(impl_type, TypingMeta), repr(impl_type)
+ assert isinstance(type_var, (type, _TypingBase)), repr(type_var)
+ self.name = name
+ self.type_var = type_var
+ self.impl_type = impl_type
+ self.type_checker = type_checker
+
+ def __repr__(self):
+ return "%s[%s]" % (self.name, _type_repr(self.type_var))
+
+ def __getitem__(self, parameter):
+ if not isinstance(self.type_var, TypeVar):
+ raise TypeError("%s cannot be further parameterized." % self)
+ if self.type_var.__constraints__ and isinstance(parameter, type):
+ if not issubclass(parameter, self.type_var.__constraints__):
+ raise TypeError("%s is not a valid substitution for %s." %
+ (parameter, self.type_var))
+ if isinstance(parameter, TypeVar) and parameter is not self.type_var:
+ raise TypeError("%s cannot be re-parameterized." % self)
+ return self.__class__(self.name, parameter,
+ self.impl_type, self.type_checker)
+
+ def __eq__(self, other):
+ if not isinstance(other, _TypeAlias):
+ return NotImplemented
+ return self.name == other.name and self.type_var == other.type_var
+
+ def __hash__(self):
+ return hash((self.name, self.type_var))
+
+ def __instancecheck__(self, obj):
+ if not isinstance(self.type_var, TypeVar):
+ raise TypeError("Parameterized type aliases cannot be used "
+ "with isinstance().")
+ return isinstance(obj, self.impl_type)
+
+ def __subclasscheck__(self, cls):
+ if not isinstance(self.type_var, TypeVar):
+ raise TypeError("Parameterized type aliases cannot be used "
+ "with issubclass().")
+ return issubclass(cls, self.impl_type)
+
+
+def _get_type_vars(types, tvars):
+ for t in types:
+ if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
+ t._get_type_vars(tvars)
+
+
+def _type_vars(types):
+ tvars = []
+ _get_type_vars(types, tvars)
+ return tuple(tvars)
+
+
+def _eval_type(t, globalns, localns):
+ if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
+ return t._eval_type(globalns, localns)
+ return t
+
+
+def _type_check(arg, msg):
+ """Check that the argument is a type, and return it (internal helper).
+
+ As a special case, accept None and return type(None) instead.
+ Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable.
+
+ The msg argument is a human-readable error message, e.g.
+
+ "Union[arg, ...]: arg should be a type."
+
+ We append the repr() of the actual value (truncated to 100 chars).
+ """
+ if arg is None:
+ return type(None)
+ if isinstance(arg, basestring):
+ arg = _ForwardRef(arg)
+ if (
+ isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or
+ not isinstance(arg, (type, _TypingBase)) and not callable(arg)
+ ):
+ raise TypeError(msg + " Got %.100r." % (arg,))
+ # Bare Union etc. are not valid as type arguments
+ if (
+ type(arg).__name__ in ('_Union', '_Optional') and
+ not getattr(arg, '__origin__', None) or
+ isinstance(arg, TypingMeta) and arg._gorg in (Generic, Protocol)
+ ):
+ raise TypeError("Plain %s is not valid as type argument" % arg)
+ return arg
+
+
+def _type_repr(obj):
+ """Return the repr() of an object, special-casing types (internal helper).
+
+ If obj is a type, we return a shorter version than the default
+ type.__repr__, based on the module and qualified name, which is
+ typically enough to uniquely identify a type. For everything
+ else, we fall back on repr(obj).
+ """
+ if isinstance(obj, type) and not isinstance(obj, TypingMeta):
+ if obj.__module__ == '__builtin__':
+ return _qualname(obj)
+ return '%s.%s' % (obj.__module__, _qualname(obj))
+ if obj is Ellipsis:
+ return '...'
+ if isinstance(obj, types.FunctionType):
+ return obj.__name__
+ return repr(obj)
+
+
+class ClassVarMeta(TypingMeta):
+ """Metaclass for _ClassVar"""
+
+ def __new__(cls, name, bases, namespace):
+ cls.assert_no_subclassing(bases)
+ self = super(ClassVarMeta, cls).__new__(cls, name, bases, namespace)
+ return self
+
+
+class _ClassVar(_FinalTypingBase):
+ """Special type construct to mark class variables.
+
+ An annotation wrapped in ClassVar indicates that a given
+ attribute is intended to be used as a class variable and
+ should not be set on instances of that class. Usage::
+
+ class Starship:
+ stats = {} # type: ClassVar[Dict[str, int]] # class variable
+ damage = 10 # type: int # instance variable
+
+ ClassVar accepts only types and cannot be further subscribed.
+
+ Note that ClassVar is not a class itself, and should not
+ be used with isinstance() or issubclass().
+ """
+
+ __metaclass__ = ClassVarMeta
+ __slots__ = ('__type__',)
+
+ def __init__(self, tp=None, _root=False):
+ self.__type__ = tp
+
+ def __getitem__(self, item):
+ cls = type(self)
+ if self.__type__ is None:
+ return cls(_type_check(item,
+ '{} accepts only types.'.format(cls.__name__[1:])),
+ _root=True)
+ raise TypeError('{} cannot be further subscripted'
+ .format(cls.__name__[1:]))
+
+ def _eval_type(self, globalns, localns):
+ return type(self)(_eval_type(self.__type__, globalns, localns),
+ _root=True)
+
+ def __repr__(self):
+ r = super(_ClassVar, self).__repr__()
+ if self.__type__ is not None:
+ r += '[{}]'.format(_type_repr(self.__type__))
+ return r
+
+ def __hash__(self):
+ return hash((type(self).__name__, self.__type__))
+
+ def __eq__(self, other):
+ if not isinstance(other, _ClassVar):
+ return NotImplemented
+ if self.__type__ is not None:
+ return self.__type__ == other.__type__
+ return self is other
+
+
+ClassVar = _ClassVar(_root=True)
+
+
+class _FinalMeta(TypingMeta):
+ """Metaclass for _Final"""
+
+ def __new__(cls, name, bases, namespace):
+ cls.assert_no_subclassing(bases)
+ self = super(_FinalMeta, cls).__new__(cls, name, bases, namespace)
+ return self
+
+
+class _Final(_FinalTypingBase):
+ """A special typing construct to indicate that a name
+ cannot be re-assigned or overridden in a subclass.
+ For example:
+
+ MAX_SIZE: Final = 9000
+ MAX_SIZE += 1 # Error reported by type checker
+
+ class Connection:
+ TIMEOUT: Final[int] = 10
+ class FastConnector(Connection):
+ TIMEOUT = 1 # Error reported by type checker
+
+ There is no runtime checking of these properties.
+ """
+
+ __metaclass__ = _FinalMeta
+ __slots__ = ('__type__',)
+
+ def __init__(self, tp=None, **kwds):
+ self.__type__ = tp
+
+ def __getitem__(self, item):
+ cls = type(self)
+ if self.__type__ is None:
+ return cls(_type_check(item,
+ '{} accepts only single type.'.format(cls.__name__[1:])),
+ _root=True)
+ raise TypeError('{} cannot be further subscripted'
+ .format(cls.__name__[1:]))
+
+ def _eval_type(self, globalns, localns):
+ new_tp = _eval_type(self.__type__, globalns, localns)
+ if new_tp == self.__type__:
+ return self
+ return type(self)(new_tp, _root=True)
+
+ def __repr__(self):
+ r = super(_Final, self).__repr__()
+ if self.__type__ is not None:
+ r += '[{}]'.format(_type_repr(self.__type__))
+ return r
+
+ def __hash__(self):
+ return hash((type(self).__name__, self.__type__))
+
+ def __eq__(self, other):
+ if not isinstance(other, _Final):
+ return NotImplemented
+ if self.__type__ is not None:
+ return self.__type__ == other.__type__
+ return self is other
+
+
+Final = _Final(_root=True)
+
+
+def final(f):
+ """This decorator can be used to indicate to type checkers that
+ the decorated method cannot be overridden, and decorated class
+ cannot be subclassed. For example:
+
+ class Base:
+ @final
+ def done(self) -> None:
+ ...
+ class Sub(Base):
+ def done(self) -> None: # Error reported by type checker
+ ...
+ @final
+ class Leaf:
+ ...
+ class Other(Leaf): # Error reported by type checker
+ ...
+
+ There is no runtime checking of these properties.
+ """
+ return f
+
+
+class _LiteralMeta(TypingMeta):
+ """Metaclass for _Literal"""
+
+ def __new__(cls, name, bases, namespace):
+ cls.assert_no_subclassing(bases)
+ self = super(_LiteralMeta, cls).__new__(cls, name, bases, namespace)
+ return self
+
+
+class _Literal(_FinalTypingBase):
+ """A type that can be used to indicate to type checkers that the
+ corresponding value has a value literally equivalent to the
+ provided parameter. For example:
+
+ var: Literal[4] = 4
+
+ The type checker understands that 'var' is literally equal to the
+ value 4 and no other value.
+
+ Literal[...] cannot be subclassed. There is no runtime checking
+ verifying that the parameter is actually a value instead of a type.
+ """
+
+ __metaclass__ = _LiteralMeta
+ __slots__ = ('__values__',)
+
+ def __init__(self, values=None, **kwds):
+ self.__values__ = values
+
+ def __getitem__(self, item):
+ cls = type(self)
+ if self.__values__ is None:
+ if not isinstance(item, tuple):
+ item = (item,)
+ return cls(values=item,
+ _root=True)
+ raise TypeError('{} cannot be further subscripted'
+ .format(cls.__name__[1:]))
+
+ def _eval_type(self, globalns, localns):
+ return self
+
+ def __repr__(self):
+ r = super(_Literal, self).__repr__()
+ if self.__values__ is not None:
+ r += '[{}]'.format(', '.join(map(_type_repr, self.__values__)))
+ return r
+
+ def __hash__(self):
+ return hash((type(self).__name__, self.__values__))
+
+ def __eq__(self, other):
+ if not isinstance(other, _Literal):
+ return NotImplemented
+ if self.__values__ is not None:
+ return self.__values__ == other.__values__
+ return self is other
+
+
+Literal = _Literal(_root=True)
+
+
+class AnyMeta(TypingMeta):
+ """Metaclass for Any."""
+
+ def __new__(cls, name, bases, namespace):
+ cls.assert_no_subclassing(bases)
+ self = super(AnyMeta, cls).__new__(cls, name, bases, namespace)
+ return self
+
+
+class _Any(_FinalTypingBase):
+ """Special type indicating an unconstrained type.
+
+ - Any is compatible with every type.
+ - Any assumed to have all methods.
+ - All values assumed to be instances of Any.
+
+ Note that all the above statements are true from the point of view of
+ static type checkers. At runtime, Any should not be used with instance
+ or class checks.
+ """
+ __metaclass__ = AnyMeta
+ __slots__ = ()
+
+ def __instancecheck__(self, obj):
+ raise TypeError("Any cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("Any cannot be used with issubclass().")
+
+
+Any = _Any(_root=True)
+
+
+class NoReturnMeta(TypingMeta):
+ """Metaclass for NoReturn."""
+
+ def __new__(cls, name, bases, namespace):
+ cls.assert_no_subclassing(bases)
+ self = super(NoReturnMeta, cls).__new__(cls, name, bases, namespace)
+ return self
+
+
+class _NoReturn(_FinalTypingBase):
+ """Special type indicating functions that never return.
+ Example::
+
+ from typing import NoReturn
+
+ def stop() -> NoReturn:
+ raise Exception('no way')
+
+ This type is invalid in other positions, e.g., ``List[NoReturn]``
+ will fail in static type checkers.
+ """
+ __metaclass__ = NoReturnMeta
+ __slots__ = ()
+
+ def __instancecheck__(self, obj):
+ raise TypeError("NoReturn cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("NoReturn cannot be used with issubclass().")
+
+
+NoReturn = _NoReturn(_root=True)
+
+
+class TypeVarMeta(TypingMeta):
+ def __new__(cls, name, bases, namespace):
+ cls.assert_no_subclassing(bases)
+ return super(TypeVarMeta, cls).__new__(cls, name, bases, namespace)
+
+
+class TypeVar(_TypingBase):
+ """Type variable.
+
+ Usage::
+
+ T = TypeVar('T') # Can be anything
+ A = TypeVar('A', str, bytes) # Must be str or bytes
+
+ Type variables exist primarily for the benefit of static type
+ checkers. They serve as the parameters for generic types as well
+ as for generic function definitions. See class Generic for more
+ information on generic types. Generic functions work as follows:
+
+ def repeat(x: T, n: int) -> List[T]:
+ '''Return a list containing n references to x.'''
+ return [x]*n
+
+ def longest(x: A, y: A) -> A:
+ '''Return the longest of two strings.'''
+ return x if len(x) >= len(y) else y
+
+ The latter example's signature is essentially the overloading
+ of (str, str) -> str and (bytes, bytes) -> bytes. Also note
+ that if the arguments are instances of some subclass of str,
+ the return type is still plain str.
+
+ At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError.
+
+ Type variables defined with covariant=True or contravariant=True
+ can be used do declare covariant or contravariant generic types.
+ See PEP 484 for more details. By default generic types are invariant
+ in all type variables.
+
+ Type variables can be introspected. e.g.:
+
+ T.__name__ == 'T'
+ T.__constraints__ == ()
+ T.__covariant__ == False
+ T.__contravariant__ = False
+ A.__constraints__ == (str, bytes)
+ """
+
+ __metaclass__ = TypeVarMeta
+ __slots__ = ('__name__', '__bound__', '__constraints__',
+ '__covariant__', '__contravariant__')
+
+ def __init__(self, name, *constraints, **kwargs):
+ super(TypeVar, self).__init__(name, *constraints, **kwargs)
+ bound = kwargs.get('bound', None)
+ covariant = kwargs.get('covariant', False)
+ contravariant = kwargs.get('contravariant', False)
+ self.__name__ = name
+ if covariant and contravariant:
+ raise ValueError("Bivariant types are not supported.")
+ self.__covariant__ = bool(covariant)
+ self.__contravariant__ = bool(contravariant)
+ if constraints and bound is not None:
+ raise TypeError("Constraints cannot be combined with bound=...")
+ if constraints and len(constraints) == 1:
+ raise TypeError("A single constraint is not allowed")
+ msg = "TypeVar(name, constraint, ...): constraints must be types."
+ self.__constraints__ = tuple(_type_check(t, msg) for t in constraints)
+ if bound:
+ self.__bound__ = _type_check(bound, "Bound must be a type.")
+ else:
+ self.__bound__ = None
+
+ def _get_type_vars(self, tvars):
+ if self not in tvars:
+ tvars.append(self)
+
+ def __repr__(self):
+ if self.__covariant__:
+ prefix = '+'
+ elif self.__contravariant__:
+ prefix = '-'
+ else:
+ prefix = '~'
+ return prefix + self.__name__
+
+ def __instancecheck__(self, instance):
+ raise TypeError("Type variables cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("Type variables cannot be used with issubclass().")
+
+
+# Some unconstrained type variables. These are used by the container types.
+# (These are not for export.)
+T = TypeVar('T') # Any type.
+KT = TypeVar('KT') # Key type.
+VT = TypeVar('VT') # Value type.
+T_co = TypeVar('T_co', covariant=True) # Any type covariant containers.
+V_co = TypeVar('V_co', covariant=True) # Any type covariant containers.
+VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers.
+T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant.
+
+# A useful type variable with constraints. This represents string types.
+# (This one *is* for export!)
+AnyStr = TypeVar('AnyStr', bytes, unicode)
+
+
+def _replace_arg(arg, tvars, args):
+ """An internal helper function: replace arg if it is a type variable
+ found in tvars with corresponding substitution from args or
+ with corresponding substitution sub-tree if arg is a generic type.
+ """
+
+ if tvars is None:
+ tvars = []
+ if hasattr(arg, '_subs_tree') and isinstance(arg, (GenericMeta, _TypingBase)):
+ return arg._subs_tree(tvars, args)
+ if isinstance(arg, TypeVar):
+ for i, tvar in enumerate(tvars):
+ if arg == tvar:
+ return args[i]
+ return arg
+
+
+# Special typing constructs Union, Optional, Generic, Callable and Tuple
+# use three special attributes for internal bookkeeping of generic types:
+# * __parameters__ is a tuple of unique free type parameters of a generic
+# type, for example, Dict[T, T].__parameters__ == (T,);
+# * __origin__ keeps a reference to a type that was subscripted,
+# e.g., Union[T, int].__origin__ == Union;
+# * __args__ is a tuple of all arguments used in subscripting,
+# e.g., Dict[T, int].__args__ == (T, int).
+
+
+def _subs_tree(cls, tvars=None, args=None):
+ """An internal helper function: calculate substitution tree
+ for generic cls after replacing its type parameters with
+ substitutions in tvars -> args (if any).
+ Repeat the same following __origin__'s.
+
+ Return a list of arguments with all possible substitutions
+ performed. Arguments that are generic classes themselves are represented
+ as tuples (so that no new classes are created by this function).
+ For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)]
+ """
+
+ if cls.__origin__ is None:
+ return cls
+ # Make of chain of origins (i.e. cls -> cls.__origin__)
+ current = cls.__origin__
+ orig_chain = []
+ while current.__origin__ is not None:
+ orig_chain.append(current)
+ current = current.__origin__
+ # Replace type variables in __args__ if asked ...
+ tree_args = []
+ for arg in cls.__args__:
+ tree_args.append(_replace_arg(arg, tvars, args))
+ # ... then continue replacing down the origin chain.
+ for ocls in orig_chain:
+ new_tree_args = []
+ for arg in ocls.__args__:
+ new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args))
+ tree_args = new_tree_args
+ return tree_args
+
+
+def _remove_dups_flatten(parameters):
+ """An internal helper for Union creation and substitution: flatten Union's
+ among parameters, then remove duplicates and strict subclasses.
+ """
+
+ # Flatten out Union[Union[...], ...].
+ params = []
+ for p in parameters:
+ if isinstance(p, _Union) and p.__origin__ is Union:
+ params.extend(p.__args__)
+ elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union:
+ params.extend(p[1:])
+ else:
+ params.append(p)
+ # Weed out strict duplicates, preserving the first of each occurrence.
+ all_params = set(params)
+ if len(all_params) < len(params):
+ new_params = []
+ for t in params:
+ if t in all_params:
+ new_params.append(t)
+ all_params.remove(t)
+ params = new_params
+ assert not all_params, all_params
+ # Weed out subclasses.
+ # E.g. Union[int, Employee, Manager] == Union[int, Employee].
+ # If object is present it will be sole survivor among proper classes.
+ # Never discard type variables.
+ # (In particular, Union[str, AnyStr] != AnyStr.)
+ all_params = set(params)
+ for t1 in params:
+ if not isinstance(t1, type):
+ continue
+ if any(isinstance(t2, type) and issubclass(t1, t2)
+ for t2 in all_params - {t1}
+ if not (isinstance(t2, GenericMeta) and
+ t2.__origin__ is not None)):
+ all_params.remove(t1)
+ return tuple(t for t in params if t in all_params)
+
+
+def _check_generic(cls, parameters):
+ # Check correct count for parameters of a generic cls (internal helper).
+ if not cls.__parameters__:
+ raise TypeError("%s is not a generic class" % repr(cls))
+ alen = len(parameters)
+ elen = len(cls.__parameters__)
+ if alen != elen:
+ raise TypeError("Too %s parameters for %s; actual %s, expected %s" %
+ ("many" if alen > elen else "few", repr(cls), alen, elen))
+
+
+_cleanups = []
+
+
+def _tp_cache(func):
+ maxsize = 128
+ cache = {}
+ _cleanups.append(cache.clear)
+
+ @functools.wraps(func)
+ def inner(*args):
+ key = args
+ try:
+ return cache[key]
+ except TypeError:
+ # Assume it's an unhashable argument.
+ return func(*args)
+ except KeyError:
+ value = func(*args)
+ if len(cache) >= maxsize:
+ # If the cache grows too much, just start over.
+ cache.clear()
+ cache[key] = value
+ return value
+
+ return inner
+
+
+class UnionMeta(TypingMeta):
+ """Metaclass for Union."""
+
+ def __new__(cls, name, bases, namespace):
+ cls.assert_no_subclassing(bases)
+ return super(UnionMeta, cls).__new__(cls, name, bases, namespace)
+
+
+class _Union(_FinalTypingBase):
+ """Union type; Union[X, Y] means either X or Y.
+
+ To define a union, use e.g. Union[int, str]. Details:
+
+ - The arguments must be types and there must be at least one.
+
+ - None as an argument is a special case and is replaced by
+ type(None).
+
+ - Unions of unions are flattened, e.g.::
+
+ Union[Union[int, str], float] == Union[int, str, float]
+
+ - Unions of a single argument vanish, e.g.::
+
+ Union[int] == int # The constructor actually returns int
+
+ - Redundant arguments are skipped, e.g.::
+
+ Union[int, str, int] == Union[int, str]
+
+ - When comparing unions, the argument order is ignored, e.g.::
+
+ Union[int, str] == Union[str, int]
+
+ - When two arguments have a subclass relationship, the least
+ derived argument is kept, e.g.::
+
+ class Employee: pass
+ class Manager(Employee): pass
+ Union[int, Employee, Manager] == Union[int, Employee]
+ Union[Manager, int, Employee] == Union[int, Employee]
+ Union[Employee, Manager] == Employee
+
+ - Similar for object::
+
+ Union[int, object] == object
+
+ - You cannot subclass or instantiate a union.
+
+ - You can use Optional[X] as a shorthand for Union[X, None].
+ """
+
+ __metaclass__ = UnionMeta
+ __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__')
+
+ def __new__(cls, parameters=None, origin=None, *args, **kwds):
+ self = super(_Union, cls).__new__(cls, parameters, origin, *args, **kwds)
+ if origin is None:
+ self.__parameters__ = None
+ self.__args__ = None
+ self.__origin__ = None
+ self.__tree_hash__ = hash(frozenset(('Union',)))
+ return self
+ if not isinstance(parameters, tuple):
+ raise TypeError("Expected parameters=<tuple>")
+ if origin is Union:
+ parameters = _remove_dups_flatten(parameters)
+ # It's not a union if there's only one type left.
+ if len(parameters) == 1:
+ return parameters[0]
+ self.__parameters__ = _type_vars(parameters)
+ self.__args__ = parameters
+ self.__origin__ = origin
+ # Pre-calculate the __hash__ on instantiation.
+ # This improves speed for complex substitutions.
+ subs_tree = self._subs_tree()
+ if isinstance(subs_tree, tuple):
+ self.__tree_hash__ = hash(frozenset(subs_tree))
+ else:
+ self.__tree_hash__ = hash(subs_tree)
+ return self
+
+ def _eval_type(self, globalns, localns):
+ if self.__args__ is None:
+ return self
+ ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__)
+ ev_origin = _eval_type(self.__origin__, globalns, localns)
+ if ev_args == self.__args__ and ev_origin == self.__origin__:
+ # Everything is already evaluated.
+ return self
+ return self.__class__(ev_args, ev_origin, _root=True)
+
+ def _get_type_vars(self, tvars):
+ if self.__origin__ and self.__parameters__:
+ _get_type_vars(self.__parameters__, tvars)
+
+ def __repr__(self):
+ if self.__origin__ is None:
+ return super(_Union, self).__repr__()
+ tree = self._subs_tree()
+ if not isinstance(tree, tuple):
+ return repr(tree)
+ return tree[0]._tree_repr(tree)
+
+ def _tree_repr(self, tree):
+ arg_list = []
+ for arg in tree[1:]:
+ if not isinstance(arg, tuple):
+ arg_list.append(_type_repr(arg))
+ else:
+ arg_list.append(arg[0]._tree_repr(arg))
+ return super(_Union, self).__repr__() + '[%s]' % ', '.join(arg_list)
+
+ @_tp_cache
+ def __getitem__(self, parameters):
+ if parameters == ():
+ raise TypeError("Cannot take a Union of no types.")
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ if self.__origin__ is None:
+ msg = "Union[arg, ...]: each arg must be a type."
+ else:
+ msg = "Parameters to generic types must be types."
+ parameters = tuple(_type_check(p, msg) for p in parameters)
+ if self is not Union:
+ _check_generic(self, parameters)
+ return self.__class__(parameters, origin=self, _root=True)
+
+ def _subs_tree(self, tvars=None, args=None):
+ if self is Union:
+ return Union # Nothing to substitute
+ tree_args = _subs_tree(self, tvars, args)
+ tree_args = _remove_dups_flatten(tree_args)
+ if len(tree_args) == 1:
+ return tree_args[0] # Union of a single type is that type
+ return (Union,) + tree_args
+
+ def __eq__(self, other):
+ if isinstance(other, _Union):
+ return self.__tree_hash__ == other.__tree_hash__
+ elif self is not Union:
+ return self._subs_tree() == other
+ else:
+ return self is other
+
+ def __hash__(self):
+ return self.__tree_hash__
+
+ def __instancecheck__(self, obj):
+ raise TypeError("Unions cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("Unions cannot be used with issubclass().")
+
+
+Union = _Union(_root=True)
+
+
+class OptionalMeta(TypingMeta):
+ """Metaclass for Optional."""
+
+ def __new__(cls, name, bases, namespace):
+ cls.assert_no_subclassing(bases)
+ return super(OptionalMeta, cls).__new__(cls, name, bases, namespace)
+
+
+class _Optional(_FinalTypingBase):
+ """Optional type.
+
+ Optional[X] is equivalent to Union[X, None].
+ """
+
+ __metaclass__ = OptionalMeta
+ __slots__ = ()
+
+ @_tp_cache
+ def __getitem__(self, arg):
+ arg = _type_check(arg, "Optional[t] requires a single type.")
+ return Union[arg, type(None)]
+
+
+Optional = _Optional(_root=True)
+
+
+def _next_in_mro(cls):
+ """Helper for Generic.__new__.
+
+ Returns the class after the last occurrence of Generic or
+ Generic[...] in cls.__mro__.
+ """
+ next_in_mro = object
+ # Look for the last occurrence of Generic or Generic[...].
+ for i, c in enumerate(cls.__mro__[:-1]):
+ if isinstance(c, GenericMeta) and c._gorg is Generic:
+ next_in_mro = cls.__mro__[i + 1]
+ return next_in_mro
+
+
+def _make_subclasshook(cls):
+ """Construct a __subclasshook__ callable that incorporates
+ the associated __extra__ class in subclass checks performed
+ against cls.
+ """
+ if isinstance(cls.__extra__, abc.ABCMeta):
+ # The logic mirrors that of ABCMeta.__subclasscheck__.
+ # Registered classes need not be checked here because
+ # cls and its extra share the same _abc_registry.
+ def __extrahook__(cls, subclass):
+ res = cls.__extra__.__subclasshook__(subclass)
+ if res is not NotImplemented:
+ return res
+ if cls.__extra__ in getattr(subclass, '__mro__', ()):
+ return True
+ for scls in cls.__extra__.__subclasses__():
+ if isinstance(scls, GenericMeta):
+ continue
+ if issubclass(subclass, scls):
+ return True
+ return NotImplemented
+ else:
+ # For non-ABC extras we'll just call issubclass().
+ def __extrahook__(cls, subclass):
+ if cls.__extra__ and issubclass(subclass, cls.__extra__):
+ return True
+ return NotImplemented
+ return classmethod(__extrahook__)
+
+
+class GenericMeta(TypingMeta, abc.ABCMeta):
+ """Metaclass for generic types.
+
+ This is a metaclass for typing.Generic and generic ABCs defined in
+ typing module. User defined subclasses of GenericMeta can override
+ __new__ and invoke super().__new__. Note that GenericMeta.__new__
+ has strict rules on what is allowed in its bases argument:
+ * plain Generic is disallowed in bases;
+ * Generic[...] should appear in bases at most once;
+ * if Generic[...] is present, then it should list all type variables
+ that appear in other bases.
+ In addition, type of all generic bases is erased, e.g., C[int] is
+ stripped to plain C.
+ """
+
+ def __new__(cls, name, bases, namespace,
+ tvars=None, args=None, origin=None, extra=None, orig_bases=None):
+ """Create a new generic class. GenericMeta.__new__ accepts
+ keyword arguments that are used for internal bookkeeping, therefore
+ an override should pass unused keyword arguments to super().
+ """
+ if tvars is not None:
+ # Called from __getitem__() below.
+ assert origin is not None
+ assert all(isinstance(t, TypeVar) for t in tvars), tvars
+ else:
+ # Called from class statement.
+ assert tvars is None, tvars
+ assert args is None, args
+ assert origin is None, origin
+
+ # Get the full set of tvars from the bases.
+ tvars = _type_vars(bases)
+ # Look for Generic[T1, ..., Tn].
+ # If found, tvars must be a subset of it.
+ # If not found, tvars is it.
+ # Also check for and reject plain Generic,
+ # and reject multiple Generic[...].
+ gvars = None
+ for base in bases:
+ if base is Generic:
+ raise TypeError("Cannot inherit from plain Generic")
+ if (isinstance(base, GenericMeta) and
+ base.__origin__ in (Generic, Protocol)):
+ if gvars is not None:
+ raise TypeError(
+ "Cannot inherit from Generic[...] or"
+ " Protocol[...] multiple times.")
+ gvars = base.__parameters__
+ if gvars is None:
+ gvars = tvars
+ else:
+ tvarset = set(tvars)
+ gvarset = set(gvars)
+ if not tvarset <= gvarset:
+ raise TypeError(
+ "Some type variables (%s) "
+ "are not listed in %s[%s]" %
+ (", ".join(str(t) for t in tvars if t not in gvarset),
+ "Generic" if any(b.__origin__ is Generic
+ for b in bases) else "Protocol",
+ ", ".join(str(g) for g in gvars)))
+ tvars = gvars
+
+ initial_bases = bases
+ if extra is None:
+ extra = namespace.get('__extra__')
+ if extra is not None and type(extra) is abc.ABCMeta and extra not in bases:
+ bases = (extra,) + bases
+ bases = tuple(b._gorg if isinstance(b, GenericMeta) else b for b in bases)
+
+ # remove bare Generic from bases if there are other generic bases
+ if any(isinstance(b, GenericMeta) and b is not Generic for b in bases):
+ bases = tuple(b for b in bases if b is not Generic)
+ namespace.update({'__origin__': origin, '__extra__': extra})
+ self = super(GenericMeta, cls).__new__(cls, name, bases, namespace)
+ super(GenericMeta, self).__setattr__('_gorg',
+ self if not origin else origin._gorg)
+
+ self.__parameters__ = tvars
+ # Be prepared that GenericMeta will be subclassed by TupleMeta
+ # and CallableMeta, those two allow ..., (), or [] in __args___.
+ self.__args__ = tuple(Ellipsis if a is _TypingEllipsis else
+ () if a is _TypingEmpty else
+ a for a in args) if args else None
+ # Speed hack (https://github.com/python/typing/issues/196).
+ self.__next_in_mro__ = _next_in_mro(self)
+ # Preserve base classes on subclassing (__bases__ are type erased now).
+ if orig_bases is None:
+ self.__orig_bases__ = initial_bases
+
+ # This allows unparameterized generic collections to be used
+ # with issubclass() and isinstance() in the same way as their
+ # collections.abc counterparts (e.g., isinstance([], Iterable)).
+ if (
+ '__subclasshook__' not in namespace and extra or
+ # allow overriding
+ getattr(self.__subclasshook__, '__name__', '') == '__extrahook__'
+ ):
+ self.__subclasshook__ = _make_subclasshook(self)
+
+ if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2.
+ self.__qualname__ = origin.__qualname__
+ self.__tree_hash__ = (hash(self._subs_tree()) if origin else
+ super(GenericMeta, self).__hash__())
+ return self
+
+ def __init__(self, *args, **kwargs):
+ super(GenericMeta, self).__init__(*args, **kwargs)
+ if isinstance(self.__extra__, abc.ABCMeta):
+ self._abc_registry = self.__extra__._abc_registry
+ self._abc_cache = self.__extra__._abc_cache
+ elif self.__origin__ is not None:
+ self._abc_registry = self.__origin__._abc_registry
+ self._abc_cache = self.__origin__._abc_cache
+
+ # _abc_negative_cache and _abc_negative_cache_version
+ # realised as descriptors, since GenClass[t1, t2, ...] always
+ # share subclass info with GenClass.
+ # This is an important memory optimization.
+ @property
+ def _abc_negative_cache(self):
+ if isinstance(self.__extra__, abc.ABCMeta):
+ return self.__extra__._abc_negative_cache
+ return self._gorg._abc_generic_negative_cache
+
+ @_abc_negative_cache.setter
+ def _abc_negative_cache(self, value):
+ if self.__origin__ is None:
+ if isinstance(self.__extra__, abc.ABCMeta):
+ self.__extra__._abc_negative_cache = value
+ else:
+ self._abc_generic_negative_cache = value
+
+ @property
+ def _abc_negative_cache_version(self):
+ if isinstance(self.__extra__, abc.ABCMeta):
+ return self.__extra__._abc_negative_cache_version
+ return self._gorg._abc_generic_negative_cache_version
+
+ @_abc_negative_cache_version.setter
+ def _abc_negative_cache_version(self, value):
+ if self.__origin__ is None:
+ if isinstance(self.__extra__, abc.ABCMeta):
+ self.__extra__._abc_negative_cache_version = value
+ else:
+ self._abc_generic_negative_cache_version = value
+
+ def _get_type_vars(self, tvars):
+ if self.__origin__ and self.__parameters__:
+ _get_type_vars(self.__parameters__, tvars)
+
+ def _eval_type(self, globalns, localns):
+ ev_origin = (self.__origin__._eval_type(globalns, localns)
+ if self.__origin__ else None)
+ ev_args = tuple(_eval_type(a, globalns, localns) for a
+ in self.__args__) if self.__args__ else None
+ if ev_origin == self.__origin__ and ev_args == self.__args__:
+ return self
+ return self.__class__(self.__name__,
+ self.__bases__,
+ dict(self.__dict__),
+ tvars=_type_vars(ev_args) if ev_args else None,
+ args=ev_args,
+ origin=ev_origin,
+ extra=self.__extra__,
+ orig_bases=self.__orig_bases__)
+
+ def __repr__(self):
+ if self.__origin__ is None:
+ return super(GenericMeta, self).__repr__()
+ return self._tree_repr(self._subs_tree())
+
+ def _tree_repr(self, tree):
+ arg_list = []
+ for arg in tree[1:]:
+ if arg == ():
+ arg_list.append('()')
+ elif not isinstance(arg, tuple):
+ arg_list.append(_type_repr(arg))
+ else:
+ arg_list.append(arg[0]._tree_repr(arg))
+ return super(GenericMeta, self).__repr__() + '[%s]' % ', '.join(arg_list)
+
+ def _subs_tree(self, tvars=None, args=None):
+ if self.__origin__ is None:
+ return self
+ tree_args = _subs_tree(self, tvars, args)
+ return (self._gorg,) + tuple(tree_args)
+
+ def __eq__(self, other):
+ if not isinstance(other, GenericMeta):
+ return NotImplemented
+ if self.__origin__ is None or other.__origin__ is None:
+ return self is other
+ return self.__tree_hash__ == other.__tree_hash__
+
+ def __hash__(self):
+ return self.__tree_hash__
+
+ @_tp_cache
+ def __getitem__(self, params):
+ if not isinstance(params, tuple):
+ params = (params,)
+ if not params and self._gorg is not Tuple:
+ raise TypeError(
+ "Parameter list to %s[...] cannot be empty" % _qualname(self))
+ msg = "Parameters to generic types must be types."
+ params = tuple(_type_check(p, msg) for p in params)
+ if self in (Generic, Protocol):
+ # Generic can only be subscripted with unique type variables.
+ if not all(isinstance(p, TypeVar) for p in params):
+ raise TypeError(
+ "Parameters to %s[...] must all be type variables" % self.__name__)
+ if len(set(params)) != len(params):
+ raise TypeError(
+ "Parameters to %s[...] must all be unique" % self.__name__)
+ tvars = params
+ args = params
+ elif self in (Tuple, Callable):
+ tvars = _type_vars(params)
+ args = params
+ elif self.__origin__ in (Generic, Protocol):
+ # Can't subscript Generic[...] or Protocol[...].
+ raise TypeError("Cannot subscript already-subscripted %s" %
+ repr(self))
+ else:
+ # Subscripting a regular Generic subclass.
+ _check_generic(self, params)
+ tvars = _type_vars(params)
+ args = params
+
+ prepend = (self,) if self.__origin__ is None else ()
+ return self.__class__(self.__name__,
+ prepend + self.__bases__,
+ dict(self.__dict__),
+ tvars=tvars,
+ args=args,
+ origin=self,
+ extra=self.__extra__,
+ orig_bases=self.__orig_bases__)
+
+ def __subclasscheck__(self, cls):
+ if self.__origin__ is not None:
+ # These should only be modules within the standard library.
+ # singledispatch is an exception, because it's a Python 2 backport
+ # of functools.singledispatch.
+ whitelist = ['abc', 'functools', 'singledispatch']
+ if (sys._getframe(1).f_globals['__name__'] in whitelist or
+ # The second frame is needed for the case where we came
+ # from _ProtocolMeta.__subclasscheck__.
+ sys._getframe(2).f_globals['__name__'] in whitelist):
+ return False
+ raise TypeError("Parameterized generics cannot be used with class "
+ "or instance checks")
+ if self is Generic:
+ raise TypeError("Class %r cannot be used with class "
+ "or instance checks" % self)
+ return super(GenericMeta, self).__subclasscheck__(cls)
+
+ def __instancecheck__(self, instance):
+ # Since we extend ABC.__subclasscheck__ and
+ # ABC.__instancecheck__ inlines the cache checking done by the
+ # latter, we must extend __instancecheck__ too. For simplicity
+ # we just skip the cache check -- instance checks for generic
+ # classes are supposed to be rare anyways.
+ if hasattr(instance, "__class__"):
+ return issubclass(instance.__class__, self)
+ return False
+
+ def __setattr__(self, attr, value):
+ # We consider all the subscripted genrics as proxies for original class
+ if (
+ attr.startswith('__') and attr.endswith('__') or
+ attr.startswith('_abc_')
+ ):
+ super(GenericMeta, self).__setattr__(attr, value)
+ else:
+ super(GenericMeta, self._gorg).__setattr__(attr, value)
+
+
+def _copy_generic(self):
+ """Hack to work around https://bugs.python.org/issue11480 on Python 2"""
+ return self.__class__(self.__name__, self.__bases__, dict(self.__dict__),
+ self.__parameters__, self.__args__, self.__origin__,
+ self.__extra__, self.__orig_bases__)
+
+
+copy._copy_dispatch[GenericMeta] = _copy_generic
+
+
+# Prevent checks for Generic to crash when defining Generic.
+Generic = None
+
+
+def _generic_new(base_cls, cls, *args, **kwds):
+ # Assure type is erased on instantiation,
+ # but attempt to store it in __orig_class__
+ if cls.__origin__ is None:
+ if (base_cls.__new__ is object.__new__ and
+ cls.__init__ is not object.__init__):
+ return base_cls.__new__(cls)
+ else:
+ return base_cls.__new__(cls, *args, **kwds)
+ else:
+ origin = cls._gorg
+ if (base_cls.__new__ is object.__new__ and
+ cls.__init__ is not object.__init__):
+ obj = base_cls.__new__(origin)
+ else:
+ obj = base_cls.__new__(origin, *args, **kwds)
+ try:
+ obj.__orig_class__ = cls
+ except AttributeError:
+ pass
+ obj.__init__(*args, **kwds)
+ return obj
+
+
+class Generic(object):
+ """Abstract base class for generic types.
+
+ A generic type is typically declared by inheriting from
+ this class parameterized with one or more type variables.
+ For example, a generic mapping type might be defined as::
+
+ class Mapping(Generic[KT, VT]):
+ def __getitem__(self, key: KT) -> VT:
+ ...
+ # Etc.
+
+ This class can then be used as follows::
+
+ def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT:
+ try:
+ return mapping[key]
+ except KeyError:
+ return default
+ """
+
+ __metaclass__ = GenericMeta
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Generic:
+ raise TypeError("Type Generic cannot be instantiated; "
+ "it can be used only as a base class")
+ return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
+
+
+class _TypingEmpty(object):
+ """Internal placeholder for () or []. Used by TupleMeta and CallableMeta
+ to allow empty list/tuple in specific places, without allowing them
+ to sneak in where prohibited.
+ """
+
+
+class _TypingEllipsis(object):
+ """Internal placeholder for ... (ellipsis)."""
+
+
+class TupleMeta(GenericMeta):
+ """Metaclass for Tuple (internal)."""
+
+ @_tp_cache
+ def __getitem__(self, parameters):
+ if self.__origin__ is not None or self._gorg is not Tuple:
+ # Normal generic rules apply if this is not the first subscription
+ # or a subscription of a subclass.
+ return super(TupleMeta, self).__getitem__(parameters)
+ if parameters == ():
+ return super(TupleMeta, self).__getitem__((_TypingEmpty,))
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ if len(parameters) == 2 and parameters[1] is Ellipsis:
+ msg = "Tuple[t, ...]: t must be a type."
+ p = _type_check(parameters[0], msg)
+ return super(TupleMeta, self).__getitem__((p, _TypingEllipsis))
+ msg = "Tuple[t0, t1, ...]: each t must be a type."
+ parameters = tuple(_type_check(p, msg) for p in parameters)
+ return super(TupleMeta, self).__getitem__(parameters)
+
+ def __instancecheck__(self, obj):
+ if self.__args__ is None:
+ return isinstance(obj, tuple)
+ raise TypeError("Parameterized Tuple cannot be used "
+ "with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ if self.__args__ is None:
+ return issubclass(cls, tuple)
+ raise TypeError("Parameterized Tuple cannot be used "
+ "with issubclass().")
+
+
+copy._copy_dispatch[TupleMeta] = _copy_generic
+
+
+class Tuple(tuple):
+ """Tuple type; Tuple[X, Y] is the cross-product type of X and Y.
+
+ Example: Tuple[T1, T2] is a tuple of two elements corresponding
+ to type variables T1 and T2. Tuple[int, float, str] is a tuple
+ of an int, a float and a string.
+
+ To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].
+ """
+
+ __metaclass__ = TupleMeta
+ __extra__ = tuple
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Tuple:
+ raise TypeError("Type Tuple cannot be instantiated; "
+ "use tuple() instead")
+ return _generic_new(tuple, cls, *args, **kwds)
+
+
+class CallableMeta(GenericMeta):
+ """ Metaclass for Callable."""
+
+ def __repr__(self):
+ if self.__origin__ is None:
+ return super(CallableMeta, self).__repr__()
+ return self._tree_repr(self._subs_tree())
+
+ def _tree_repr(self, tree):
+ if self._gorg is not Callable:
+ return super(CallableMeta, self)._tree_repr(tree)
+ # For actual Callable (not its subclass) we override
+ # super(CallableMeta, self)._tree_repr() for nice formatting.
+ arg_list = []
+ for arg in tree[1:]:
+ if not isinstance(arg, tuple):
+ arg_list.append(_type_repr(arg))
+ else:
+ arg_list.append(arg[0]._tree_repr(arg))
+ if arg_list[0] == '...':
+ return repr(tree[0]) + '[..., %s]' % arg_list[1]
+ return (repr(tree[0]) +
+ '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1]))
+
+ def __getitem__(self, parameters):
+ """A thin wrapper around __getitem_inner__ to provide the latter
+ with hashable arguments to improve speed.
+ """
+
+ if self.__origin__ is not None or self._gorg is not Callable:
+ return super(CallableMeta, self).__getitem__(parameters)
+ if not isinstance(parameters, tuple) or len(parameters) != 2:
+ raise TypeError("Callable must be used as "
+ "Callable[[arg, ...], result].")
+ args, result = parameters
+ if args is Ellipsis:
+ parameters = (Ellipsis, result)
+ else:
+ if not isinstance(args, list):
+ raise TypeError("Callable[args, result]: args must be a list."
+ " Got %.100r." % (args,))
+ parameters = (tuple(args), result)
+ return self.__getitem_inner__(parameters)
+
+ @_tp_cache
+ def __getitem_inner__(self, parameters):
+ args, result = parameters
+ msg = "Callable[args, result]: result must be a type."
+ result = _type_check(result, msg)
+ if args is Ellipsis:
+ return super(CallableMeta, self).__getitem__((_TypingEllipsis, result))
+ msg = "Callable[[arg, ...], result]: each arg must be a type."
+ args = tuple(_type_check(arg, msg) for arg in args)
+ parameters = args + (result,)
+ return super(CallableMeta, self).__getitem__(parameters)
+
+
+copy._copy_dispatch[CallableMeta] = _copy_generic
+
+
+class Callable(object):
+ """Callable type; Callable[[int], str] is a function of (int) -> str.
+
+ The subscription syntax must always be used with exactly two
+ values: the argument list and the return type. The argument list
+ must be a list of types or ellipsis; the return type must be a single type.
+
+ There is no syntax to indicate optional or keyword arguments,
+ such function types are rarely used as callback types.
+ """
+
+ __metaclass__ = CallableMeta
+ __extra__ = collections_abc.Callable
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Callable:
+ raise TypeError("Type Callable cannot be instantiated; "
+ "use a non-abstract subclass instead")
+ return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
+
+
+def cast(typ, val):
+ """Cast a value to a type.
+
+ This returns the value unchanged. To the type checker this
+ signals that the return value has the designated type, but at
+ runtime we intentionally don't check anything (we want this
+ to be as fast as possible).
+ """
+ return val
+
+
+def _get_defaults(func):
+ """Internal helper to extract the default arguments, by name."""
+ code = func.__code__
+ pos_count = code.co_argcount
+ arg_names = code.co_varnames
+ arg_names = arg_names[:pos_count]
+ defaults = func.__defaults__ or ()
+ kwdefaults = func.__kwdefaults__
+ res = dict(kwdefaults) if kwdefaults else {}
+ pos_offset = pos_count - len(defaults)
+ for name, value in zip(arg_names[pos_offset:], defaults):
+ assert name not in res
+ res[name] = value
+ return res
+
+
+def get_type_hints(obj, globalns=None, localns=None):
+ """In Python 2 this is not supported and always returns None."""
+ return None
+
+
+def no_type_check(arg):
+ """Decorator to indicate that annotations are not type hints.
+
+ The argument must be a class or function; if it is a class, it
+ applies recursively to all methods and classes defined in that class
+ (but not to methods defined in its superclasses or subclasses).
+
+ This mutates the function(s) or class(es) in place.
+ """
+ if isinstance(arg, type):
+ arg_attrs = arg.__dict__.copy()
+ for attr, val in arg.__dict__.items():
+ if val in arg.__bases__ + (arg,):
+ arg_attrs.pop(attr)
+ for obj in arg_attrs.values():
+ if isinstance(obj, types.FunctionType):
+ obj.__no_type_check__ = True
+ if isinstance(obj, type):
+ no_type_check(obj)
+ try:
+ arg.__no_type_check__ = True
+ except TypeError: # built-in classes
+ pass
+ return arg
+
+
+def no_type_check_decorator(decorator):
+ """Decorator to give another decorator the @no_type_check effect.
+
+ This wraps the decorator with something that wraps the decorated
+ function in @no_type_check.
+ """
+
+ @functools.wraps(decorator)
+ def wrapped_decorator(*args, **kwds):
+ func = decorator(*args, **kwds)
+ func = no_type_check(func)
+ return func
+
+ return wrapped_decorator
+
+
+def _overload_dummy(*args, **kwds):
+ """Helper for @overload to raise when called."""
+ raise NotImplementedError(
+ "You should not call an overloaded function. "
+ "A series of @overload-decorated functions "
+ "outside a stub module should always be followed "
+ "by an implementation that is not @overload-ed.")
+
+
+def overload(func):
+ """Decorator for overloaded functions/methods.
+
+ In a stub file, place two or more stub definitions for the same
+ function in a row, each decorated with @overload. For example:
+
+ @overload
+ def utf8(value: None) -> None: ...
+ @overload
+ def utf8(value: bytes) -> bytes: ...
+ @overload
+ def utf8(value: str) -> bytes: ...
+
+ In a non-stub file (i.e. a regular .py file), do the same but
+ follow it with an implementation. The implementation should *not*
+ be decorated with @overload. For example:
+
+ @overload
+ def utf8(value: None) -> None: ...
+ @overload
+ def utf8(value: bytes) -> bytes: ...
+ @overload
+ def utf8(value: str) -> bytes: ...
+ def utf8(value):
+ # implementation goes here
+ """
+ return _overload_dummy
+
+
+_PROTO_WHITELIST = ['Callable', 'Iterable', 'Iterator',
+ 'Hashable', 'Sized', 'Container', 'Collection',
+ 'Reversible', 'ContextManager']
+
+
+class _ProtocolMeta(GenericMeta):
+ """Internal metaclass for Protocol.
+
+ This exists so Protocol classes can be generic without deriving
+ from Generic.
+ """
+ def __init__(cls, *args, **kwargs):
+ super(_ProtocolMeta, cls).__init__(*args, **kwargs)
+ if not cls.__dict__.get('_is_protocol', None):
+ cls._is_protocol = any(b is Protocol or
+ isinstance(b, _ProtocolMeta) and
+ b.__origin__ is Protocol
+ for b in cls.__bases__)
+ if cls._is_protocol:
+ for base in cls.__mro__[1:]:
+ if not (base in (object, Generic) or
+ base.__module__ == '_abcoll' and
+ base.__name__ in _PROTO_WHITELIST or
+ isinstance(base, TypingMeta) and base._is_protocol or
+ isinstance(base, GenericMeta) and base.__origin__ is Generic):
+ raise TypeError('Protocols can only inherit from other protocols,'
+ ' got %r' % base)
+ cls._callable_members_only = all(callable(getattr(cls, attr))
+ for attr in cls._get_protocol_attrs())
+
+ def _no_init(self, *args, **kwargs):
+ if type(self)._is_protocol:
+ raise TypeError('Protocols cannot be instantiated')
+ cls.__init__ = _no_init
+
+ def _proto_hook(cls, other):
+ if not cls.__dict__.get('_is_protocol', None):
+ return NotImplemented
+ if not isinstance(other, type):
+ # Similar error as for issubclass(1, int)
+ # (also not a chance for old-style classes)
+ raise TypeError('issubclass() arg 1 must be a new-style class')
+ for attr in cls._get_protocol_attrs():
+ for base in other.__mro__:
+ if attr in base.__dict__:
+ if base.__dict__[attr] is None:
+ return NotImplemented
+ break
+ else:
+ return NotImplemented
+ return True
+ if '__subclasshook__' not in cls.__dict__:
+ cls.__subclasshook__ = classmethod(_proto_hook)
+
+ def __instancecheck__(self, instance):
+ # We need this method for situations where attributes are assigned in __init__
+ if isinstance(instance, type):
+ # This looks like a fundamental limitation of Python 2.
+ # It cannot support runtime protocol metaclasses, On Python 2 classes
+ # cannot be correctly inspected as instances of protocols.
+ return False
+ if ((not getattr(self, '_is_protocol', False) or
+ self._callable_members_only) and
+ issubclass(instance.__class__, self)):
+ return True
+ if self._is_protocol:
+ if all(hasattr(instance, attr) and
+ (not callable(getattr(self, attr)) or
+ getattr(instance, attr) is not None)
+ for attr in self._get_protocol_attrs()):
+ return True
+ return super(GenericMeta, self).__instancecheck__(instance)
+
+ def __subclasscheck__(self, cls):
+ if (self.__dict__.get('_is_protocol', None) and
+ not self.__dict__.get('_is_runtime_protocol', None)):
+ if (sys._getframe(1).f_globals['__name__'] in ['abc', 'functools'] or
+ # This is needed because we remove subclasses from unions on Python 2.
+ sys._getframe(2).f_globals['__name__'] == 'typing'):
+ return False
+ raise TypeError("Instance and class checks can only be used with"
+ " @runtime_checkable protocols")
+ if (self.__dict__.get('_is_runtime_protocol', None) and
+ not self._callable_members_only):
+ if sys._getframe(1).f_globals['__name__'] in ['abc', 'functools']:
+ return super(GenericMeta, self).__subclasscheck__(cls)
+ raise TypeError("Protocols with non-method members"
+ " don't support issubclass()")
+ return super(_ProtocolMeta, self).__subclasscheck__(cls)
+
+ def _get_protocol_attrs(self):
+ attrs = set()
+ for base in self.__mro__[:-1]: # without object
+ if base.__name__ in ('Protocol', 'Generic'):
+ continue
+ annotations = getattr(base, '__annotations__', {})
+ for attr in list(base.__dict__.keys()) + list(annotations.keys()):
+ if (not attr.startswith('_abc_') and attr not in (
+ '__abstractmethods__', '__annotations__', '__weakref__',
+ '_is_protocol', '_is_runtime_protocol', '__dict__',
+ '__args__', '__slots__', '_get_protocol_attrs',
+ '__next_in_mro__', '__parameters__', '__origin__',
+ '__orig_bases__', '__extra__', '__tree_hash__',
+ '__doc__', '__subclasshook__', '__init__', '__new__',
+ '__module__', '_MutableMapping__marker',
+ '__metaclass__', '_gorg', '_callable_members_only')):
+ attrs.add(attr)
+ return attrs
+
+
+class Protocol(object):
+ """Base class for protocol classes. Protocol classes are defined as::
+
+ class Proto(Protocol):
+ def meth(self):
+ # type: () -> int
+ pass
+
+ Such classes are primarily used with static type checkers that recognize
+ structural subtyping (static duck-typing), for example::
+
+ class C:
+ def meth(self):
+ # type: () -> int
+ return 0
+
+ def func(x):
+ # type: (Proto) -> int
+ return x.meth()
+
+ func(C()) # Passes static type check
+
+ See PEP 544 for details. Protocol classes decorated with @typing.runtime_checkable
+ act as simple-minded runtime protocols that checks only the presence of
+ given attributes, ignoring their type signatures.
+
+ Protocol classes can be generic, they are defined as::
+
+ class GenProto(Protocol[T]):
+ def meth(self):
+ # type: () -> T
+ pass
+ """
+
+ __metaclass__ = _ProtocolMeta
+ __slots__ = ()
+ _is_protocol = True
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Protocol:
+ raise TypeError("Type Protocol cannot be instantiated; "
+ "it can be used only as a base class")
+ return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
+
+
+def runtime_checkable(cls):
+ """Mark a protocol class as a runtime protocol, so that it
+ can be used with isinstance() and issubclass(). Raise TypeError
+ if applied to a non-protocol class.
+
+ This allows a simple-minded structural check very similar to the
+ one-offs in collections.abc such as Hashable.
+ """
+ if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol:
+ raise TypeError('@runtime_checkable can be only applied to protocol classes,'
+ ' got %r' % cls)
+ cls._is_runtime_protocol = True
+ return cls
+
+
+# Various ABCs mimicking those in collections.abc.
+# A few are simply re-exported for completeness.
+
+Hashable = collections_abc.Hashable # Not generic.
+
+
+class Iterable(Generic[T_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.Iterable
+
+
+class Iterator(Iterable[T_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.Iterator
+
+
+@runtime_checkable
+class SupportsInt(Protocol):
+ __slots__ = ()
+
+ @abstractmethod
+ def __int__(self):
+ pass
+
+
+@runtime_checkable
+class SupportsFloat(Protocol):
+ __slots__ = ()
+
+ @abstractmethod
+ def __float__(self):
+ pass
+
+
+@runtime_checkable
+class SupportsComplex(Protocol):
+ __slots__ = ()
+
+ @abstractmethod
+ def __complex__(self):
+ pass
+
+
+@runtime_checkable
+class SupportsIndex(Protocol):
+ __slots__ = ()
+
+ @abstractmethod
+ def __index__(self):
+ pass
+
+
+@runtime_checkable
+class SupportsAbs(Protocol[T_co]):
+ __slots__ = ()
+
+ @abstractmethod
+ def __abs__(self):
+ pass
+
+
+if hasattr(collections_abc, 'Reversible'):
+ class Reversible(Iterable[T_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.Reversible
+else:
+ @runtime_checkable
+ class Reversible(Protocol[T_co]):
+ __slots__ = ()
+
+ @abstractmethod
+ def __reversed__(self):
+ pass
+
+
+Sized = collections_abc.Sized # Not generic.
+
+
+class Container(Generic[T_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.Container
+
+
+# Callable was defined earlier.
+
+
+class AbstractSet(Sized, Iterable[T_co], Container[T_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.Set
+
+
+class MutableSet(AbstractSet[T]):
+ __slots__ = ()
+ __extra__ = collections_abc.MutableSet
+
+
+# NOTE: It is only covariant in the value type.
+class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.Mapping
+
+
+class MutableMapping(Mapping[KT, VT]):
+ __slots__ = ()
+ __extra__ = collections_abc.MutableMapping
+
+
+if hasattr(collections_abc, 'Reversible'):
+ class Sequence(Sized, Reversible[T_co], Container[T_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.Sequence
+else:
+ class Sequence(Sized, Iterable[T_co], Container[T_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.Sequence
+
+
+class MutableSequence(Sequence[T]):
+ __slots__ = ()
+ __extra__ = collections_abc.MutableSequence
+
+
+class ByteString(Sequence[int]):
+ pass
+
+
+ByteString.register(str)
+ByteString.register(bytearray)
+
+
+class List(list, MutableSequence[T]):
+ __slots__ = ()
+ __extra__ = list
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is List:
+ raise TypeError("Type List cannot be instantiated; "
+ "use list() instead")
+ return _generic_new(list, cls, *args, **kwds)
+
+
+class Deque(collections.deque, MutableSequence[T]):
+ __slots__ = ()
+ __extra__ = collections.deque
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Deque:
+ return collections.deque(*args, **kwds)
+ return _generic_new(collections.deque, cls, *args, **kwds)
+
+
+class Set(set, MutableSet[T]):
+ __slots__ = ()
+ __extra__ = set
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Set:
+ raise TypeError("Type Set cannot be instantiated; "
+ "use set() instead")
+ return _generic_new(set, cls, *args, **kwds)
+
+
+class FrozenSet(frozenset, AbstractSet[T_co]):
+ __slots__ = ()
+ __extra__ = frozenset
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is FrozenSet:
+ raise TypeError("Type FrozenSet cannot be instantiated; "
+ "use frozenset() instead")
+ return _generic_new(frozenset, cls, *args, **kwds)
+
+
+class MappingView(Sized, Iterable[T_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.MappingView
+
+
+class KeysView(MappingView[KT], AbstractSet[KT]):
+ __slots__ = ()
+ __extra__ = collections_abc.KeysView
+
+
+class ItemsView(MappingView[Tuple[KT, VT_co]],
+ AbstractSet[Tuple[KT, VT_co]],
+ Generic[KT, VT_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.ItemsView
+
+
+class ValuesView(MappingView[VT_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.ValuesView
+
+
+class ContextManager(Generic[T_co]):
+ __slots__ = ()
+
+ def __enter__(self):
+ return self
+
+ @abc.abstractmethod
+ def __exit__(self, exc_type, exc_value, traceback):
+ return None
+
+ @classmethod
+ def __subclasshook__(cls, C):
+ if cls is ContextManager:
+ # In Python 3.6+, it is possible to set a method to None to
+ # explicitly indicate that the class does not implement an ABC
+ # (https://bugs.python.org/issue25958), but we do not support
+ # that pattern here because this fallback class is only used
+ # in Python 3.5 and earlier.
+ if (any("__enter__" in B.__dict__ for B in C.__mro__) and
+ any("__exit__" in B.__dict__ for B in C.__mro__)):
+ return True
+ return NotImplemented
+
+
+class Dict(dict, MutableMapping[KT, VT]):
+ __slots__ = ()
+ __extra__ = dict
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Dict:
+ raise TypeError("Type Dict cannot be instantiated; "
+ "use dict() instead")
+ return _generic_new(dict, cls, *args, **kwds)
+
+
+class DefaultDict(collections.defaultdict, MutableMapping[KT, VT]):
+ __slots__ = ()
+ __extra__ = collections.defaultdict
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is DefaultDict:
+ return collections.defaultdict(*args, **kwds)
+ return _generic_new(collections.defaultdict, cls, *args, **kwds)
+
+
+class Counter(collections.Counter, Dict[T, int]):
+ __slots__ = ()
+ __extra__ = collections.Counter
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Counter:
+ return collections.Counter(*args, **kwds)
+ return _generic_new(collections.Counter, cls, *args, **kwds)
+
+
+# Determine what base class to use for Generator.
+if hasattr(collections_abc, 'Generator'):
+ # Sufficiently recent versions of 3.5 have a Generator ABC.
+ _G_base = collections_abc.Generator
+else:
+ # Fall back on the exact type.
+ _G_base = types.GeneratorType
+
+
+class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co]):
+ __slots__ = ()
+ __extra__ = _G_base
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Generator:
+ raise TypeError("Type Generator cannot be instantiated; "
+ "create a subclass instead")
+ return _generic_new(_G_base, cls, *args, **kwds)
+
+
+# Internal type variable used for Type[].
+CT_co = TypeVar('CT_co', covariant=True, bound=type)
+
+
+# This is not a real generic class. Don't use outside annotations.
+class Type(Generic[CT_co]):
+ """A special construct usable to annotate class objects.
+
+ For example, suppose we have the following classes::
+
+ class User: ... # Abstract base for User classes
+ class BasicUser(User): ...
+ class ProUser(User): ...
+ class TeamUser(User): ...
+
+ And a function that takes a class argument that's a subclass of
+ User and returns an instance of the corresponding class::
+
+ U = TypeVar('U', bound=User)
+ def new_user(user_class: Type[U]) -> U:
+ user = user_class()
+ # (Here we could write the user object to a database)
+ return user
+
+ joe = new_user(BasicUser)
+
+ At this point the type checker knows that joe has type BasicUser.
+ """
+ __slots__ = ()
+ __extra__ = type
+
+
+def NamedTuple(typename, fields):
+ """Typed version of namedtuple.
+
+ Usage::
+
+ Employee = typing.NamedTuple('Employee', [('name', str), ('id', int)])
+
+ This is equivalent to::
+
+ Employee = collections.namedtuple('Employee', ['name', 'id'])
+
+ The resulting class has one extra attribute: _field_types,
+ giving a dict mapping field names to types. (The field names
+ are in the _fields attribute, which is part of the namedtuple
+ API.)
+ """
+ fields = [(n, t) for n, t in fields]
+ cls = collections.namedtuple(typename, [n for n, t in fields])
+ cls._field_types = dict(fields)
+ # Set the module to the caller's module (otherwise it'd be 'typing').
+ try:
+ cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__')
+ except (AttributeError, ValueError):
+ pass
+ return cls
+
+
+def _check_fails(cls, other):
+ try:
+ if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools', 'typing']:
+ # Typed dicts are only for static structural subtyping.
+ raise TypeError('TypedDict does not support instance and class checks')
+ except (AttributeError, ValueError):
+ pass
+ return False
+
+
+def _dict_new(cls, *args, **kwargs):
+ return dict(*args, **kwargs)
+
+
+def _typeddict_new(cls, _typename, _fields=None, **kwargs):
+ total = kwargs.pop('total', True)
+ if _fields is None:
+ _fields = kwargs
+ elif kwargs:
+ raise TypeError("TypedDict takes either a dict or keyword arguments,"
+ " but not both")
+
+ ns = {'__annotations__': dict(_fields), '__total__': total}
+ try:
+ # Setting correct module is necessary to make typed dict classes pickleable.
+ ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
+ except (AttributeError, ValueError):
+ pass
+
+ return _TypedDictMeta(_typename, (), ns)
+
+
+class _TypedDictMeta(type):
+ def __new__(cls, name, bases, ns, total=True):
+ # Create new typed dict class object.
+ # This method is called directly when TypedDict is subclassed,
+ # or via _typeddict_new when TypedDict is instantiated. This way
+ # TypedDict supports all three syntaxes described in its docstring.
+ # Subclasses and instances of TypedDict return actual dictionaries
+ # via _dict_new.
+ ns['__new__'] = _typeddict_new if name == b'TypedDict' else _dict_new
+ tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns)
+
+ anns = ns.get('__annotations__', {})
+ msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
+ anns = {n: _type_check(tp, msg) for n, tp in anns.items()}
+ for base in bases:
+ anns.update(base.__dict__.get('__annotations__', {}))
+ tp_dict.__annotations__ = anns
+ if not hasattr(tp_dict, '__total__'):
+ tp_dict.__total__ = total
+ return tp_dict
+
+ __instancecheck__ = __subclasscheck__ = _check_fails
+
+
+TypedDict = _TypedDictMeta(b'TypedDict', (dict,), {})
+TypedDict.__module__ = __name__
+TypedDict.__doc__ = \
+ """A simple typed name space. At runtime it is equivalent to a plain dict.
+
+ TypedDict creates a dictionary type that expects all of its
+ instances to have a certain set of keys, with each key
+ associated with a value of a consistent type. This expectation
+ is not checked at runtime but is only enforced by type checkers.
+ Usage::
+
+ Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
+
+ a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
+ b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
+
+ assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
+
+ The type info could be accessed via Point2D.__annotations__. TypedDict
+ supports an additional equivalent form::
+
+ Point2D = TypedDict('Point2D', x=int, y=int, label=str)
+ """
+
+
+def NewType(name, tp):
+ """NewType creates simple unique types with almost zero
+ runtime overhead. NewType(name, tp) is considered a subtype of tp
+ by static type checkers. At runtime, NewType(name, tp) returns
+ a dummy function that simply returns its argument. Usage::
+
+ UserId = NewType('UserId', int)
+
+ def name_by_id(user_id):
+ # type: (UserId) -> str
+ ...
+
+ UserId('user') # Fails type check
+
+ name_by_id(42) # Fails type check
+ name_by_id(UserId(42)) # OK
+
+ num = UserId(5) + 1 # type: int
+ """
+
+ def new_type(x):
+ return x
+
+ # Some versions of Python 2 complain because of making all strings unicode
+ new_type.__name__ = str(name)
+ new_type.__supertype__ = tp
+ return new_type
+
+
+# Python-version-specific alias (Python 2: unicode; Python 3: str)
+Text = unicode
+
+
+# Constant that's True when type checking, but False here.
+TYPE_CHECKING = False
+
+
+class IO(Generic[AnyStr]):
+ """Generic base class for TextIO and BinaryIO.
+
+ This is an abstract, generic version of the return of open().
+
+ NOTE: This does not distinguish between the different possible
+ classes (text vs. binary, read vs. write vs. read/write,
+ append-only, unbuffered). The TextIO and BinaryIO subclasses
+ below capture the distinctions between text vs. binary, which is
+ pervasive in the interface; however we currently do not offer a
+ way to track the other distinctions in the type system.
+ """
+
+ __slots__ = ()
+
+ @abstractproperty
+ def mode(self):
+ pass
+
+ @abstractproperty
+ def name(self):
+ pass
+
+ @abstractmethod
+ def close(self):
+ pass
+
+ @abstractproperty
+ def closed(self):
+ pass
+
+ @abstractmethod
+ def fileno(self):
+ pass
+
+ @abstractmethod
+ def flush(self):
+ pass
+
+ @abstractmethod
+ def isatty(self):
+ pass
+
+ @abstractmethod
+ def read(self, n=-1):
+ pass
+
+ @abstractmethod
+ def readable(self):
+ pass
+
+ @abstractmethod
+ def readline(self, limit=-1):
+ pass
+
+ @abstractmethod
+ def readlines(self, hint=-1):
+ pass
+
+ @abstractmethod
+ def seek(self, offset, whence=0):
+ pass
+
+ @abstractmethod
+ def seekable(self):
+ pass
+
+ @abstractmethod
+ def tell(self):
+ pass
+
+ @abstractmethod
+ def truncate(self, size=None):
+ pass
+
+ @abstractmethod
+ def writable(self):
+ pass
+
+ @abstractmethod
+ def write(self, s):
+ pass
+
+ @abstractmethod
+ def writelines(self, lines):
+ pass
+
+ @abstractmethod
+ def __enter__(self):
+ pass
+
+ @abstractmethod
+ def __exit__(self, type, value, traceback):
+ pass
+
+
+class BinaryIO(IO[bytes]):
+ """Typed version of the return of open() in binary mode."""
+
+ __slots__ = ()
+
+ @abstractmethod
+ def write(self, s):
+ pass
+
+ @abstractmethod
+ def __enter__(self):
+ pass
+
+
+class TextIO(IO[unicode]):
+ """Typed version of the return of open() in text mode."""
+
+ __slots__ = ()
+
+ @abstractproperty
+ def buffer(self):
+ pass
+
+ @abstractproperty
+ def encoding(self):
+ pass
+
+ @abstractproperty
+ def errors(self):
+ pass
+
+ @abstractproperty
+ def line_buffering(self):
+ pass
+
+ @abstractproperty
+ def newlines(self):
+ pass
+
+ @abstractmethod
+ def __enter__(self):
+ pass
+
+
+class io(object):
+ """Wrapper namespace for IO generic classes."""
+
+ __all__ = ['IO', 'TextIO', 'BinaryIO']
+ IO = IO
+ TextIO = TextIO
+ BinaryIO = BinaryIO
+
+
+io.__name__ = __name__ + b'.io'
+sys.modules[io.__name__] = io
+
+
+Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')),
+ lambda p: p.pattern)
+Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')),
+ lambda m: m.re.pattern)
+
+
+class re(object):
+ """Wrapper namespace for re type aliases."""
+
+ __all__ = ['Pattern', 'Match']
+ Pattern = Pattern
+ Match = Match
+
+
+re.__name__ = __name__ + b'.re'
+sys.modules[re.__name__] = re
diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/LICENSE
new file mode 100644
index 0000000000..5e795a61f3
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/LICENSE
@@ -0,0 +1,7 @@
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/METADATA
new file mode 100644
index 0000000000..723cd1b4d8
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/METADATA
@@ -0,0 +1,49 @@
+Metadata-Version: 2.1
+Name: zipp
+Version: 1.2.0
+Summary: Backport of pathlib-compatible object wrapper for zip files
+Home-page: https://github.com/jaraco/zipp
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+License: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Requires-Python: >=2.7
+Requires-Dist: contextlib2 ; python_version < "3.4"
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=3.2) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pathlib2 ; extra == 'testing'
+Requires-Dist: unittest2 ; extra == 'testing'
+Requires-Dist: jaraco.itertools ; extra == 'testing'
+Requires-Dist: func-timeout ; extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/zipp.svg
+ :target: https://pypi.org/project/zipp
+
+.. image:: https://img.shields.io/pypi/pyversions/zipp.svg
+
+.. image:: https://img.shields.io/travis/jaraco/zipp/master.svg
+ :target: https://travis-ci.org/jaraco/zipp
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+ :alt: Code style: Black
+
+.. image:: https://img.shields.io/appveyor/ci/jaraco/zipp/master.svg
+ :target: https://ci.appveyor.com/project/jaraco/zipp/branch/master
+
+.. .. image:: https://readthedocs.org/projects/zipp/badge/?version=latest
+.. :target: https://zipp.readthedocs.io/en/latest/?badge=latest
+
+
+A pathlib-compatible Zipfile object wrapper. A backport of the
+`Path object <https://docs.python.org/3.8/library/zipfile.html#path-objects>`_.
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/RECORD
new file mode 100644
index 0000000000..be9f067d8f
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/RECORD
@@ -0,0 +1,6 @@
+zipp.py,sha256=uw2fKH3c8O07ReW9L27THYalYWMMWv_juvr-5BG-3zA,7039
+zipp-1.2.0.dist-info/LICENSE,sha256=pV4v_ptEmY5iHVHYwJS-0JrMS1I27nPX3zlaM7o8GP0,1050
+zipp-1.2.0.dist-info/METADATA,sha256=5-1p1kkPwGBPcQe6naFJUH024SFYA4rOmDzqvrLFRGM,1795
+zipp-1.2.0.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+zipp-1.2.0.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5
+zipp-1.2.0.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/WHEEL
new file mode 100644
index 0000000000..ef99c6cf32
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/top_level.txt
new file mode 100644
index 0000000000..e82f676f82
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+zipp
diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp.py b/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp.py
new file mode 100644
index 0000000000..892205834a
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp.py
@@ -0,0 +1,286 @@
+# coding: utf-8
+
+from __future__ import division
+
+import io
+import sys
+import posixpath
+import zipfile
+import functools
+import itertools
+from collections import OrderedDict
+
+try:
+ from contextlib import suppress
+except ImportError:
+ from contextlib2 import suppress
+
+__metaclass__ = type
+
+
+def _parents(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all parents of that path.
+
+ >>> list(_parents('b/d'))
+ ['b']
+ >>> list(_parents('/b/d/'))
+ ['/b']
+ >>> list(_parents('b/d/f/'))
+ ['b/d', 'b']
+ >>> list(_parents('b'))
+ []
+ >>> list(_parents(''))
+ []
+ """
+ return itertools.islice(_ancestry(path), 1, None)
+
+
+def _ancestry(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all elements of that path
+
+ >>> list(_ancestry('b/d'))
+ ['b/d', 'b']
+ >>> list(_ancestry('/b/d/'))
+ ['/b/d', '/b']
+ >>> list(_ancestry('b/d/f/'))
+ ['b/d/f', 'b/d', 'b']
+ >>> list(_ancestry('b'))
+ ['b']
+ >>> list(_ancestry(''))
+ []
+ """
+ path = path.rstrip(posixpath.sep)
+ while path and path != posixpath.sep:
+ yield path
+ path, tail = posixpath.split(path)
+
+
+class CompleteDirs(zipfile.ZipFile):
+ """
+ A ZipFile subclass that ensures that implied directories
+ are always included in the namelist.
+ """
+
+ @staticmethod
+ def _implied_dirs(names):
+ parents = itertools.chain.from_iterable(map(_parents, names))
+ # Cast names to a set for O(1) lookups
+ existing = set(names)
+ # Deduplicate entries in original order
+ implied_dirs = OrderedDict.fromkeys(
+ p + posixpath.sep for p in parents
+ if p + posixpath.sep not in existing
+ )
+ return implied_dirs
+
+ def namelist(self):
+ names = super(CompleteDirs, self).namelist()
+ return names + list(self._implied_dirs(names))
+
+ def _name_set(self):
+ return set(self.namelist())
+
+ def resolve_dir(self, name):
+ """
+ If the name represents a directory, return that name
+ as a directory (with the trailing slash).
+ """
+ names = self._name_set()
+ dirname = name + '/'
+ dir_match = name not in names and dirname in names
+ return dirname if dir_match else name
+
+ @classmethod
+ def make(cls, source):
+ """
+ Given a source (filename or zipfile), return an
+ appropriate CompleteDirs subclass.
+ """
+ if isinstance(source, CompleteDirs):
+ return source
+
+ if not isinstance(source, zipfile.ZipFile):
+ return cls(_pathlib_compat(source))
+
+ # Only allow for FastPath when supplied zipfile is read-only
+ if 'r' not in source.mode:
+ cls = CompleteDirs
+
+ res = cls.__new__(cls)
+ vars(res).update(vars(source))
+ return res
+
+
+class FastLookup(CompleteDirs):
+ """
+ ZipFile subclass to ensure implicit
+ dirs exist and are resolved rapidly.
+ """
+ def namelist(self):
+ with suppress(AttributeError):
+ return self.__names
+ self.__names = super(FastLookup, self).namelist()
+ return self.__names
+
+ def _name_set(self):
+ with suppress(AttributeError):
+ return self.__lookup
+ self.__lookup = super(FastLookup, self)._name_set()
+ return self.__lookup
+
+
+def _pathlib_compat(path):
+ """
+ For path-like objects, convert to a filename for compatibility
+ on Python 3.6.1 and earlier.
+ """
+ try:
+ return path.__fspath__()
+ except AttributeError:
+ return str(path)
+
+
+class Path:
+ """
+ A pathlib-compatible interface for zip files.
+
+ Consider a zip file with this structure::
+
+ .
+ ├── a.txt
+ └── b
+ ├── c.txt
+ └── d
+ └── e.txt
+
+ >>> data = io.BytesIO()
+ >>> zf = zipfile.ZipFile(data, 'w')
+ >>> zf.writestr('a.txt', 'content of a')
+ >>> zf.writestr('b/c.txt', 'content of c')
+ >>> zf.writestr('b/d/e.txt', 'content of e')
+ >>> zf.filename = 'abcde.zip'
+
+ Path accepts the zipfile object itself or a filename
+
+ >>> root = Path(zf)
+
+ From there, several path operations are available.
+
+ Directory iteration (including the zip file itself):
+
+ >>> a, b = root.iterdir()
+ >>> a
+ Path('abcde.zip', 'a.txt')
+ >>> b
+ Path('abcde.zip', 'b/')
+
+ name property:
+
+ >>> b.name
+ 'b'
+
+ join with divide operator:
+
+ >>> c = b / 'c.txt'
+ >>> c
+ Path('abcde.zip', 'b/c.txt')
+ >>> c.name
+ 'c.txt'
+
+ Read text:
+
+ >>> c.read_text()
+ 'content of c'
+
+ existence:
+
+ >>> c.exists()
+ True
+ >>> (b / 'missing.txt').exists()
+ False
+
+ Coercion to string:
+
+ >>> str(c)
+ 'abcde.zip/b/c.txt'
+ """
+
+ __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
+
+ def __init__(self, root, at=""):
+ self.root = FastLookup.make(root)
+ self.at = at
+
+ def open(self, mode='r', *args, **kwargs):
+ """
+ Open this entry as text or binary following the semantics
+ of ``pathlib.Path.open()`` by passing arguments through
+ to io.TextIOWrapper().
+ """
+ pwd = kwargs.pop('pwd', None)
+ zip_mode = mode[0]
+ stream = self.root.open(self.at, zip_mode, pwd=pwd)
+ if 'b' in mode:
+ if args or kwargs:
+ raise ValueError("encoding args invalid for binary operation")
+ return stream
+ return io.TextIOWrapper(stream, *args, **kwargs)
+
+ @property
+ def name(self):
+ return posixpath.basename(self.at.rstrip("/"))
+
+ def read_text(self, *args, **kwargs):
+ with self.open('r', *args, **kwargs) as strm:
+ return strm.read()
+
+ def read_bytes(self):
+ with self.open('rb') as strm:
+ return strm.read()
+
+ def _is_child(self, path):
+ return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
+
+ def _next(self, at):
+ return Path(self.root, at)
+
+ def is_dir(self):
+ return not self.at or self.at.endswith("/")
+
+ def is_file(self):
+ return not self.is_dir()
+
+ def exists(self):
+ return self.at in self.root._name_set()
+
+ def iterdir(self):
+ if not self.is_dir():
+ raise ValueError("Can't listdir a file")
+ subs = map(self._next, self.root.namelist())
+ return filter(self._is_child, subs)
+
+ def __str__(self):
+ return posixpath.join(self.root.filename, self.at)
+
+ def __repr__(self):
+ return self.__repr.format(self=self)
+
+ def joinpath(self, add):
+ next = posixpath.join(self.at, _pathlib_compat(add))
+ return self._next(self.root.resolve_dir(next))
+
+ __truediv__ = joinpath
+
+ @property
+ def parent(self):
+ parent_at = posixpath.dirname(self.at.rstrip('/'))
+ if parent_at:
+ parent_at += '/'
+ return self._next(parent_at)
+
+ if sys.version_info < (3,):
+ __div__ = __truediv__
diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/LICENSE b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/LICENSE
new file mode 100644
index 0000000000..353924be0e
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/LICENSE
@@ -0,0 +1,19 @@
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/METADATA b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/METADATA
new file mode 100644
index 0000000000..9e71c5a8c4
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/METADATA
@@ -0,0 +1,54 @@
+Metadata-Version: 2.1
+Name: zipp
+Version: 3.4.0
+Summary: Backport of pathlib-compatible object wrapper for zip files
+Home-page: https://github.com/jaraco/zipp
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+License: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.6
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=3.2) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest (!=3.7.3,>=3.5) ; extra == 'testing'
+Requires-Dist: pytest-checkdocs (>=1.2.3) ; extra == 'testing'
+Requires-Dist: pytest-flake8 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: jaraco.test (>=3.2.0) ; extra == 'testing'
+Requires-Dist: jaraco.itertools ; extra == 'testing'
+Requires-Dist: func-timeout ; extra == 'testing'
+Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/zipp.svg
+ :target: `PyPI link`_
+
+.. image:: https://img.shields.io/pypi/pyversions/zipp.svg
+ :target: `PyPI link`_
+
+.. _PyPI link: https://pypi.org/project/zipp
+
+.. image:: https://github.com/jaraco/zipp/workflows/Automated%20Tests/badge.svg
+ :target: https://github.com/jaraco/zipp/actions?query=workflow%3A%22Automated+Tests%22
+ :alt: Automated Tests
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. .. image:: https://readthedocs.org/projects/zipp/badge/?version=latest
+.. :target: https://zipp.readthedocs.io/en/latest/?badge=latest
+
+
+A pathlib-compatible Zipfile object wrapper. A backport of the
+`Path object <https://docs.python.org/3.8/library/zipfile.html#path-objects>`_.
+
+
diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/RECORD b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/RECORD
new file mode 100644
index 0000000000..3c441ec9bd
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/RECORD
@@ -0,0 +1,6 @@
+zipp.py,sha256=wMSoYxAIPgYnqJAW0JcAl5sWaIcFc5xk3dNjf6ElGgU,8089
+zipp-3.4.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+zipp-3.4.0.dist-info/METADATA,sha256=noSfks-ReGCmOSTxll7TELBJy0P_yAvVLa0FCFyhMeM,2134
+zipp-3.4.0.dist-info/WHEEL,sha256=EVRjI69F5qVjm_YgqcTXPnTAv3BfSUr0WVAHuSP3Xoo,92
+zipp-3.4.0.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5
+zipp-3.4.0.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/WHEEL b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/WHEEL
new file mode 100644
index 0000000000..83ff02e961
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/top_level.txt b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/top_level.txt
new file mode 100644
index 0000000000..e82f676f82
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+zipp
diff --git a/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp.py b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp.py
new file mode 100644
index 0000000000..25ef06e929
--- /dev/null
+++ b/third_party/python/virtualenv/__virtualenv__/zipp-3.4.0-py3-none-any/zipp.py
@@ -0,0 +1,314 @@
+import io
+import posixpath
+import zipfile
+import itertools
+import contextlib
+import sys
+import pathlib
+
+if sys.version_info < (3, 7):
+ from collections import OrderedDict
+else:
+ OrderedDict = dict
+
+
+def _parents(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all parents of that path.
+
+ >>> list(_parents('b/d'))
+ ['b']
+ >>> list(_parents('/b/d/'))
+ ['/b']
+ >>> list(_parents('b/d/f/'))
+ ['b/d', 'b']
+ >>> list(_parents('b'))
+ []
+ >>> list(_parents(''))
+ []
+ """
+ return itertools.islice(_ancestry(path), 1, None)
+
+
+def _ancestry(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all elements of that path
+
+ >>> list(_ancestry('b/d'))
+ ['b/d', 'b']
+ >>> list(_ancestry('/b/d/'))
+ ['/b/d', '/b']
+ >>> list(_ancestry('b/d/f/'))
+ ['b/d/f', 'b/d', 'b']
+ >>> list(_ancestry('b'))
+ ['b']
+ >>> list(_ancestry(''))
+ []
+ """
+ path = path.rstrip(posixpath.sep)
+ while path and path != posixpath.sep:
+ yield path
+ path, tail = posixpath.split(path)
+
+
+_dedupe = OrderedDict.fromkeys
+"""Deduplicate an iterable in original order"""
+
+
+def _difference(minuend, subtrahend):
+ """
+ Return items in minuend not in subtrahend, retaining order
+ with O(1) lookup.
+ """
+ return itertools.filterfalse(set(subtrahend).__contains__, minuend)
+
+
+class CompleteDirs(zipfile.ZipFile):
+ """
+ A ZipFile subclass that ensures that implied directories
+ are always included in the namelist.
+ """
+
+ @staticmethod
+ def _implied_dirs(names):
+ parents = itertools.chain.from_iterable(map(_parents, names))
+ as_dirs = (p + posixpath.sep for p in parents)
+ return _dedupe(_difference(as_dirs, names))
+
+ def namelist(self):
+ names = super(CompleteDirs, self).namelist()
+ return names + list(self._implied_dirs(names))
+
+ def _name_set(self):
+ return set(self.namelist())
+
+ def resolve_dir(self, name):
+ """
+ If the name represents a directory, return that name
+ as a directory (with the trailing slash).
+ """
+ names = self._name_set()
+ dirname = name + '/'
+ dir_match = name not in names and dirname in names
+ return dirname if dir_match else name
+
+ @classmethod
+ def make(cls, source):
+ """
+ Given a source (filename or zipfile), return an
+ appropriate CompleteDirs subclass.
+ """
+ if isinstance(source, CompleteDirs):
+ return source
+
+ if not isinstance(source, zipfile.ZipFile):
+ return cls(_pathlib_compat(source))
+
+ # Only allow for FastLookup when supplied zipfile is read-only
+ if 'r' not in source.mode:
+ cls = CompleteDirs
+
+ source.__class__ = cls
+ return source
+
+
+class FastLookup(CompleteDirs):
+ """
+ ZipFile subclass to ensure implicit
+ dirs exist and are resolved rapidly.
+ """
+
+ def namelist(self):
+ with contextlib.suppress(AttributeError):
+ return self.__names
+ self.__names = super(FastLookup, self).namelist()
+ return self.__names
+
+ def _name_set(self):
+ with contextlib.suppress(AttributeError):
+ return self.__lookup
+ self.__lookup = super(FastLookup, self)._name_set()
+ return self.__lookup
+
+
+def _pathlib_compat(path):
+ """
+ For path-like objects, convert to a filename for compatibility
+ on Python 3.6.1 and earlier.
+ """
+ try:
+ return path.__fspath__()
+ except AttributeError:
+ return str(path)
+
+
+class Path:
+ """
+ A pathlib-compatible interface for zip files.
+
+ Consider a zip file with this structure::
+
+ .
+ ├── a.txt
+ └── b
+ ├── c.txt
+ └── d
+ └── e.txt
+
+ >>> data = io.BytesIO()
+ >>> zf = zipfile.ZipFile(data, 'w')
+ >>> zf.writestr('a.txt', 'content of a')
+ >>> zf.writestr('b/c.txt', 'content of c')
+ >>> zf.writestr('b/d/e.txt', 'content of e')
+ >>> zf.filename = 'mem/abcde.zip'
+
+ Path accepts the zipfile object itself or a filename
+
+ >>> root = Path(zf)
+
+ From there, several path operations are available.
+
+ Directory iteration (including the zip file itself):
+
+ >>> a, b = root.iterdir()
+ >>> a
+ Path('mem/abcde.zip', 'a.txt')
+ >>> b
+ Path('mem/abcde.zip', 'b/')
+
+ name property:
+
+ >>> b.name
+ 'b'
+
+ join with divide operator:
+
+ >>> c = b / 'c.txt'
+ >>> c
+ Path('mem/abcde.zip', 'b/c.txt')
+ >>> c.name
+ 'c.txt'
+
+ Read text:
+
+ >>> c.read_text()
+ 'content of c'
+
+ existence:
+
+ >>> c.exists()
+ True
+ >>> (b / 'missing.txt').exists()
+ False
+
+ Coercion to string:
+
+ >>> import os
+ >>> str(c).replace(os.sep, posixpath.sep)
+ 'mem/abcde.zip/b/c.txt'
+
+ At the root, ``name``, ``filename``, and ``parent``
+ resolve to the zipfile. Note these attributes are not
+ valid and will raise a ``ValueError`` if the zipfile
+ has no filename.
+
+ >>> root.name
+ 'abcde.zip'
+ >>> str(root.filename).replace(os.sep, posixpath.sep)
+ 'mem/abcde.zip'
+ >>> str(root.parent)
+ 'mem'
+ """
+
+ __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
+
+ def __init__(self, root, at=""):
+ """
+ Construct a Path from a ZipFile or filename.
+
+ Note: When the source is an existing ZipFile object,
+ its type (__class__) will be mutated to a
+ specialized type. If the caller wishes to retain the
+ original type, the caller should either create a
+ separate ZipFile object or pass a filename.
+ """
+ self.root = FastLookup.make(root)
+ self.at = at
+
+ def open(self, mode='r', *args, pwd=None, **kwargs):
+ """
+ Open this entry as text or binary following the semantics
+ of ``pathlib.Path.open()`` by passing arguments through
+ to io.TextIOWrapper().
+ """
+ if self.is_dir():
+ raise IsADirectoryError(self)
+ zip_mode = mode[0]
+ if not self.exists() and zip_mode == 'r':
+ raise FileNotFoundError(self)
+ stream = self.root.open(self.at, zip_mode, pwd=pwd)
+ if 'b' in mode:
+ if args or kwargs:
+ raise ValueError("encoding args invalid for binary operation")
+ return stream
+ return io.TextIOWrapper(stream, *args, **kwargs)
+
+ @property
+ def name(self):
+ return pathlib.Path(self.at).name or self.filename.name
+
+ @property
+ def filename(self):
+ return pathlib.Path(self.root.filename).joinpath(self.at)
+
+ def read_text(self, *args, **kwargs):
+ with self.open('r', *args, **kwargs) as strm:
+ return strm.read()
+
+ def read_bytes(self):
+ with self.open('rb') as strm:
+ return strm.read()
+
+ def _is_child(self, path):
+ return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
+
+ def _next(self, at):
+ return self.__class__(self.root, at)
+
+ def is_dir(self):
+ return not self.at or self.at.endswith("/")
+
+ def is_file(self):
+ return self.exists() and not self.is_dir()
+
+ def exists(self):
+ return self.at in self.root._name_set()
+
+ def iterdir(self):
+ if not self.is_dir():
+ raise ValueError("Can't listdir a file")
+ subs = map(self._next, self.root.namelist())
+ return filter(self._is_child, subs)
+
+ def __str__(self):
+ return posixpath.join(self.root.filename, self.at)
+
+ def __repr__(self):
+ return self.__repr.format(self=self)
+
+ def joinpath(self, *other):
+ next = posixpath.join(self.at, *map(_pathlib_compat, other))
+ return self._next(self.root.resolve_dir(next))
+
+ __truediv__ = joinpath
+
+ @property
+ def parent(self):
+ if not self.at:
+ return self.filename.parent
+ parent_at = posixpath.dirname(self.at.rstrip('/'))
+ if parent_at:
+ parent_at += '/'
+ return self._next(parent_at)
diff --git a/third_party/python/virtualenv/distributions.json b/third_party/python/virtualenv/distributions.json
new file mode 100644
index 0000000000..7a56846d1c
--- /dev/null
+++ b/third_party/python/virtualenv/distributions.json
@@ -0,0 +1,83 @@
+{
+ "3.9": {
+ "==any": {
+ "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info",
+ "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info",
+ "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info",
+ "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info"
+ }
+ },
+ "3.8": {
+ "==any": {
+ "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info",
+ "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info",
+ "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info",
+ "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info"
+ }
+ },
+ "3.7": {
+ "==any": {
+ "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info",
+ "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info",
+ "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info",
+ "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info",
+ "importlib_metadata": "__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info",
+ "zipp": "__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info"
+ }
+ },
+ "3.6": {
+ "==any": {
+ "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info",
+ "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info",
+ "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info",
+ "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info",
+ "importlib_metadata": "__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata-3.1.1.dist-info",
+ "zipp": "__virtualenv__/zipp-3.4.0-py3-none-any/zipp-3.4.0.dist-info",
+ "importlib_resources": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources-3.3.0.dist-info"
+ }
+ },
+ "3.5": {
+ "==any": {
+ "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info",
+ "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info",
+ "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info",
+ "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info",
+ "importlib_metadata": "__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata-2.1.1.dist-info",
+ "zipp": "__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info",
+ "importlib_resources": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources-3.2.1.dist-info"
+ }
+ },
+ "3.4": {
+ "==any": {
+ "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info",
+ "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info",
+ "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info",
+ "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info",
+ "importlib_metadata": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info",
+ "zipp": "__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info",
+ "importlib_resources": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info",
+ "typing": "__virtualenv__/typing-3.7.4.1-py3-none-any/typing-3.7.4.1.dist-info",
+ "pathlib2": "__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info",
+ "scandir": "__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info"
+ }
+ },
+ "2.7": {
+ "==any": {
+ "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs-1.4.4.dist-info",
+ "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib-0.3.1.dist-info",
+ "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock-3.0.12.dist-info",
+ "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six-1.15.0.dist-info",
+ "importlib_metadata": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata-1.1.3.dist-info",
+ "zipp": "__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp-1.2.0.dist-info",
+ "importlib_resources": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources-1.0.2.dist-info",
+ "typing": "__virtualenv__/typing-3.7.4.3-py2-none-any/typing-3.7.4.3.dist-info",
+ "pathlib2": "__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info",
+ "scandir": "__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir-1.10.0.dist-info",
+ "contextlib2": "__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2-0.6.0.post1.dist-info",
+ "configparser": "__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser-4.0.2.dist-info"
+ },
+ "!=win32": {
+ "pathlib2": "__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2-2.3.5.dist-info"
+ }
+ }
+} \ No newline at end of file
diff --git a/third_party/python/virtualenv/modules.json b/third_party/python/virtualenv/modules.json
new file mode 100644
index 0000000000..de3c039da7
--- /dev/null
+++ b/third_party/python/virtualenv/modules.json
@@ -0,0 +1,314 @@
+{
+ "3.9": {
+ "==any": {
+ "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py",
+ "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py",
+ "distlib.compat": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py",
+ "distlib.database": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py",
+ "distlib.index": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py",
+ "distlib.locators": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py",
+ "distlib.manifest": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py",
+ "distlib.markers": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py",
+ "distlib.metadata": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py",
+ "distlib.resources": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py",
+ "distlib.scripts": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py",
+ "distlib.util": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py",
+ "distlib.version": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py",
+ "distlib.wheel": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py",
+ "distlib._backport": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py",
+ "distlib._backport.misc": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py",
+ "distlib._backport.shutil": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py",
+ "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py",
+ "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py",
+ "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py",
+ "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six.py"
+ }
+ },
+ "3.8": {
+ "==any": {
+ "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py",
+ "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py",
+ "distlib.compat": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py",
+ "distlib.database": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py",
+ "distlib.index": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py",
+ "distlib.locators": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py",
+ "distlib.manifest": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py",
+ "distlib.markers": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py",
+ "distlib.metadata": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py",
+ "distlib.resources": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py",
+ "distlib.scripts": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py",
+ "distlib.util": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py",
+ "distlib.version": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py",
+ "distlib.wheel": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py",
+ "distlib._backport": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py",
+ "distlib._backport.misc": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py",
+ "distlib._backport.shutil": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py",
+ "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py",
+ "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py",
+ "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py",
+ "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six.py"
+ }
+ },
+ "3.7": {
+ "==any": {
+ "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py",
+ "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py",
+ "distlib.compat": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py",
+ "distlib.database": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py",
+ "distlib.index": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py",
+ "distlib.locators": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py",
+ "distlib.manifest": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py",
+ "distlib.markers": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py",
+ "distlib.metadata": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py",
+ "distlib.resources": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py",
+ "distlib.scripts": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py",
+ "distlib.util": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py",
+ "distlib.version": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py",
+ "distlib.wheel": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py",
+ "distlib._backport": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py",
+ "distlib._backport.misc": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py",
+ "distlib._backport.shutil": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py",
+ "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py",
+ "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py",
+ "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py",
+ "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six.py",
+ "importlib_metadata": "__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/__init__.py",
+ "importlib_metadata._compat": "__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/_compat.py",
+ "zipp": "__virtualenv__/zipp-3.4.0-py3-none-any/zipp.py"
+ }
+ },
+ "3.6": {
+ "==any": {
+ "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py",
+ "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py",
+ "distlib.compat": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py",
+ "distlib.database": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py",
+ "distlib.index": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py",
+ "distlib.locators": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py",
+ "distlib.manifest": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py",
+ "distlib.markers": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py",
+ "distlib.metadata": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py",
+ "distlib.resources": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py",
+ "distlib.scripts": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py",
+ "distlib.util": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py",
+ "distlib.version": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py",
+ "distlib.wheel": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py",
+ "distlib._backport": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py",
+ "distlib._backport.misc": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py",
+ "distlib._backport.shutil": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py",
+ "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py",
+ "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py",
+ "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py",
+ "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six.py",
+ "importlib_metadata": "__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/__init__.py",
+ "importlib_metadata._compat": "__virtualenv__/importlib_metadata-3.1.1-py3-none-any/importlib_metadata/_compat.py",
+ "zipp": "__virtualenv__/zipp-3.4.0-py3-none-any/zipp.py",
+ "importlib_resources": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/__init__.py",
+ "importlib_resources._common": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_common.py",
+ "importlib_resources._compat": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_compat.py",
+ "importlib_resources._py2": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_py2.py",
+ "importlib_resources._py3": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/_py3.py",
+ "importlib_resources.abc": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/abc.py",
+ "importlib_resources.readers": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/readers.py",
+ "importlib_resources.trees": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/trees.py",
+ "importlib_resources.tests": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/__init__.py",
+ "importlib_resources.tests._compat": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/_compat.py",
+ "importlib_resources.tests.py27compat": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/py27compat.py",
+ "importlib_resources.tests.test_files": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/test_files.py",
+ "importlib_resources.tests.test_open": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/test_open.py",
+ "importlib_resources.tests.test_path": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/test_path.py",
+ "importlib_resources.tests.test_read": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/test_read.py",
+ "importlib_resources.tests.test_reader": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/test_reader.py",
+ "importlib_resources.tests.test_resource": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/test_resource.py",
+ "importlib_resources.tests.util": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/util.py",
+ "importlib_resources.tests.data01": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/data01/__init__.py",
+ "importlib_resources.tests.data01.subdirectory": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/data01/subdirectory/__init__.py",
+ "importlib_resources.tests.data02": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/data02/__init__.py",
+ "importlib_resources.tests.data02.one": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/data02/one/__init__.py",
+ "importlib_resources.tests.data02.two": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/data02/two/__init__.py",
+ "importlib_resources.tests.zipdata01": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/zipdata01/__init__.py",
+ "importlib_resources.tests.zipdata02": "__virtualenv__/importlib_resources-3.3.0-py2.py3-none-any/importlib_resources/tests/zipdata02/__init__.py"
+ }
+ },
+ "3.5": {
+ "==any": {
+ "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py",
+ "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py",
+ "distlib.compat": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py",
+ "distlib.database": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py",
+ "distlib.index": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py",
+ "distlib.locators": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py",
+ "distlib.manifest": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py",
+ "distlib.markers": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py",
+ "distlib.metadata": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py",
+ "distlib.resources": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py",
+ "distlib.scripts": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py",
+ "distlib.util": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py",
+ "distlib.version": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py",
+ "distlib.wheel": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py",
+ "distlib._backport": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py",
+ "distlib._backport.misc": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py",
+ "distlib._backport.shutil": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py",
+ "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py",
+ "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py",
+ "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py",
+ "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six.py",
+ "importlib_metadata": "__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata/__init__.py",
+ "importlib_metadata._compat": "__virtualenv__/importlib_metadata-2.1.1-py2.py3-none-any/importlib_metadata/_compat.py",
+ "zipp": "__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp.py",
+ "importlib_resources": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/__init__.py",
+ "importlib_resources._common": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_common.py",
+ "importlib_resources._compat": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_compat.py",
+ "importlib_resources._py2": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_py2.py",
+ "importlib_resources._py3": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/_py3.py",
+ "importlib_resources.abc": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/abc.py",
+ "importlib_resources.readers": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/readers.py",
+ "importlib_resources.trees": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/trees.py",
+ "importlib_resources.tests": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/__init__.py",
+ "importlib_resources.tests._compat": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/_compat.py",
+ "importlib_resources.tests.py27compat": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/py27compat.py",
+ "importlib_resources.tests.test_files": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/test_files.py",
+ "importlib_resources.tests.test_open": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/test_open.py",
+ "importlib_resources.tests.test_path": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/test_path.py",
+ "importlib_resources.tests.test_read": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/test_read.py",
+ "importlib_resources.tests.test_reader": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/test_reader.py",
+ "importlib_resources.tests.test_resource": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/test_resource.py",
+ "importlib_resources.tests.util": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/util.py",
+ "importlib_resources.tests.data01": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/data01/__init__.py",
+ "importlib_resources.tests.data01.subdirectory": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/data01/subdirectory/__init__.py",
+ "importlib_resources.tests.data02": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/data02/__init__.py",
+ "importlib_resources.tests.data02.one": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/data02/one/__init__.py",
+ "importlib_resources.tests.data02.two": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/data02/two/__init__.py",
+ "importlib_resources.tests.zipdata01": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/zipdata01/__init__.py",
+ "importlib_resources.tests.zipdata02": "__virtualenv__/importlib_resources-3.2.1-py2.py3-none-any/importlib_resources/tests/zipdata02/__init__.py"
+ }
+ },
+ "3.4": {
+ "==any": {
+ "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py",
+ "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py",
+ "distlib.compat": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py",
+ "distlib.database": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py",
+ "distlib.index": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py",
+ "distlib.locators": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py",
+ "distlib.manifest": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py",
+ "distlib.markers": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py",
+ "distlib.metadata": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py",
+ "distlib.resources": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py",
+ "distlib.scripts": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py",
+ "distlib.util": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py",
+ "distlib.version": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py",
+ "distlib.wheel": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py",
+ "distlib._backport": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py",
+ "distlib._backport.misc": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py",
+ "distlib._backport.shutil": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py",
+ "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py",
+ "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py",
+ "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py",
+ "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six.py",
+ "importlib_metadata": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/__init__.py",
+ "importlib_metadata._compat": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/_compat.py",
+ "importlib_metadata.docs": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/docs/__init__.py",
+ "importlib_metadata.docs.conf": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/docs/conf.py",
+ "importlib_metadata.tests": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/__init__.py",
+ "importlib_metadata.tests.fixtures": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/fixtures.py",
+ "importlib_metadata.tests.test_api": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/test_api.py",
+ "importlib_metadata.tests.test_integration": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/test_integration.py",
+ "importlib_metadata.tests.test_main": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/test_main.py",
+ "importlib_metadata.tests.test_zip": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/test_zip.py",
+ "importlib_metadata.tests.data": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/data/__init__.py",
+ "zipp": "__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp.py",
+ "importlib_resources": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/__init__.py",
+ "importlib_resources._compat": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_compat.py",
+ "importlib_resources._py2": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py2.py",
+ "importlib_resources._py3": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py3.py",
+ "importlib_resources.abc": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/abc.py",
+ "importlib_resources.docs.conf": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/docs/conf.py",
+ "importlib_resources.tests": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/__init__.py",
+ "importlib_resources.tests.test_open": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/test_open.py",
+ "importlib_resources.tests.test_path": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/test_path.py",
+ "importlib_resources.tests.test_read": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/test_read.py",
+ "importlib_resources.tests.test_resource": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/test_resource.py",
+ "importlib_resources.tests.util": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/util.py",
+ "importlib_resources.tests.data01": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data01/__init__.py",
+ "importlib_resources.tests.data01.subdirectory": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data01/subdirectory/__init__.py",
+ "importlib_resources.tests.data02": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data02/__init__.py",
+ "importlib_resources.tests.data02.one": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data02/one/__init__.py",
+ "importlib_resources.tests.data02.two": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data02/two/__init__.py",
+ "importlib_resources.tests.data03": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data03/__init__.py",
+ "importlib_resources.tests.zipdata01": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/zipdata01/__init__.py",
+ "importlib_resources.tests.zipdata02": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/zipdata02/__init__.py",
+ "typing": "__virtualenv__/typing-3.7.4.1-py3-none-any/typing.py",
+ "pathlib2": "__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2/__init__.py",
+ "scandir": "__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir.py"
+ }
+ },
+ "2.7": {
+ "==any": {
+ "appdirs": "__virtualenv__/appdirs-1.4.4-py2.py3-none-any/appdirs.py",
+ "distlib": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/__init__.py",
+ "distlib.compat": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/compat.py",
+ "distlib.database": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/database.py",
+ "distlib.index": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/index.py",
+ "distlib.locators": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/locators.py",
+ "distlib.manifest": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/manifest.py",
+ "distlib.markers": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/markers.py",
+ "distlib.metadata": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/metadata.py",
+ "distlib.resources": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/resources.py",
+ "distlib.scripts": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/scripts.py",
+ "distlib.util": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/util.py",
+ "distlib.version": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/version.py",
+ "distlib.wheel": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/wheel.py",
+ "distlib._backport": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/__init__.py",
+ "distlib._backport.misc": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/misc.py",
+ "distlib._backport.shutil": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/shutil.py",
+ "distlib._backport.sysconfig": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/sysconfig.py",
+ "distlib._backport.tarfile": "__virtualenv__/distlib-0.3.1-py2.py3-none-any/distlib/_backport/tarfile.py",
+ "filelock": "__virtualenv__/filelock-3.0.12-py3-none-any/filelock.py",
+ "six": "__virtualenv__/six-1.15.0-py2.py3-none-any/six.py",
+ "importlib_metadata": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/__init__.py",
+ "importlib_metadata._compat": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/_compat.py",
+ "importlib_metadata.docs": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/docs/__init__.py",
+ "importlib_metadata.docs.conf": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/docs/conf.py",
+ "importlib_metadata.tests": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/__init__.py",
+ "importlib_metadata.tests.fixtures": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/fixtures.py",
+ "importlib_metadata.tests.test_api": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/test_api.py",
+ "importlib_metadata.tests.test_integration": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/test_integration.py",
+ "importlib_metadata.tests.test_main": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/test_main.py",
+ "importlib_metadata.tests.test_zip": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/test_zip.py",
+ "importlib_metadata.tests.data": "__virtualenv__/importlib_metadata-1.1.3-py2.py3-none-any/importlib_metadata/tests/data/__init__.py",
+ "zipp": "__virtualenv__/zipp-1.2.0-py2.py3-none-any/zipp.py",
+ "importlib_resources": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/__init__.py",
+ "importlib_resources._compat": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_compat.py",
+ "importlib_resources._py2": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py2.py",
+ "importlib_resources._py3": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/_py3.py",
+ "importlib_resources.abc": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/abc.py",
+ "importlib_resources.docs.conf": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/docs/conf.py",
+ "importlib_resources.tests": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/__init__.py",
+ "importlib_resources.tests.test_open": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/test_open.py",
+ "importlib_resources.tests.test_path": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/test_path.py",
+ "importlib_resources.tests.test_read": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/test_read.py",
+ "importlib_resources.tests.test_resource": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/test_resource.py",
+ "importlib_resources.tests.util": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/util.py",
+ "importlib_resources.tests.data01": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data01/__init__.py",
+ "importlib_resources.tests.data01.subdirectory": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data01/subdirectory/__init__.py",
+ "importlib_resources.tests.data02": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data02/__init__.py",
+ "importlib_resources.tests.data02.one": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data02/one/__init__.py",
+ "importlib_resources.tests.data02.two": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data02/two/__init__.py",
+ "importlib_resources.tests.data03": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/data03/__init__.py",
+ "importlib_resources.tests.zipdata01": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/zipdata01/__init__.py",
+ "importlib_resources.tests.zipdata02": "__virtualenv__/importlib_resources-1.0.2-py2.py3-none-any/importlib_resources/tests/zipdata02/__init__.py",
+ "typing": "__virtualenv__/typing-3.7.4.3-py2-none-any/typing.py",
+ "pathlib2": "__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2/__init__.py",
+ "scandir": "__virtualenv__/scandir-1.10.0-cp39-cp39-macosx_10_15_x86_64/scandir.py",
+ "contextlib2": "__virtualenv__/contextlib2-0.6.0.post1-py2.py3-none-any/contextlib2.py",
+ "configparser": "__virtualenv__/configparser-4.0.2-py2.py3-none-any/configparser.py",
+ "backports": "__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/__init__.py",
+ "backports.configparser": "__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/configparser/__init__.py",
+ "backports.configparser.helpers": "__virtualenv__/configparser-4.0.2-py2.py3-none-any/backports/configparser/helpers.py"
+ },
+ "!=win32": {
+ "pathlib2": "__virtualenv__/pathlib2-2.3.5-py2.py3-none-any/pathlib2/__init__.py"
+ }
+ }
+} \ No newline at end of file
diff --git a/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/LICENSE b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/LICENSE
new file mode 100644
index 0000000000..be9700d61a
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/LICENSE
@@ -0,0 +1,20 @@
+Copyright (c) 2020-202x The virtualenv developers
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/METADATA b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/METADATA
new file mode 100644
index 0000000000..9c0e3ad119
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/METADATA
@@ -0,0 +1,92 @@
+Metadata-Version: 2.1
+Name: virtualenv
+Version: 20.2.2
+Summary: Virtual Python Environment builder
+Home-page: https://virtualenv.pypa.io/
+Author: Bernat Gabor
+Author-email: gaborjbernat@gmail.com
+Maintainer: Bernat Gabor
+Maintainer-email: gaborjbernat@gmail.com
+License: MIT
+Project-URL: Source, https://github.com/pypa/virtualenv
+Project-URL: Tracker, https://github.com/pypa/virtualenv/issues
+Keywords: virtual,environments,isolated
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: POSIX
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Software Development :: Testing
+Classifier: Topic :: Utilities
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7
+Description-Content-Type: text/markdown
+Requires-Dist: appdirs (<2,>=1.4.3)
+Requires-Dist: distlib (<1,>=0.3.1)
+Requires-Dist: filelock (<4,>=3.0.0)
+Requires-Dist: six (<2,>=1.9.0)
+Requires-Dist: pathlib2 (<3,>=2.3.3) ; python_version < "3.4" and sys_platform != "win32"
+Requires-Dist: importlib-resources (>=1.0) ; python_version < "3.7"
+Requires-Dist: importlib-metadata (>=0.12) ; python_version < "3.8"
+Provides-Extra: docs
+Requires-Dist: proselint (>=0.10.2) ; extra == 'docs'
+Requires-Dist: sphinx (>=3) ; extra == 'docs'
+Requires-Dist: sphinx-argparse (>=0.2.5) ; extra == 'docs'
+Requires-Dist: sphinx-rtd-theme (>=0.4.3) ; extra == 'docs'
+Requires-Dist: towncrier (>=19.9.0rc1) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: coverage (>=4) ; extra == 'testing'
+Requires-Dist: coverage-enable-subprocess (>=1) ; extra == 'testing'
+Requires-Dist: flaky (>=3) ; extra == 'testing'
+Requires-Dist: pytest (>=4) ; extra == 'testing'
+Requires-Dist: pytest-env (>=0.6.2) ; extra == 'testing'
+Requires-Dist: pytest-freezegun (>=0.4.1) ; extra == 'testing'
+Requires-Dist: pytest-mock (>=2) ; extra == 'testing'
+Requires-Dist: pytest-randomly (>=1) ; extra == 'testing'
+Requires-Dist: pytest-timeout (>=1) ; extra == 'testing'
+Requires-Dist: pytest-xdist (>=1.31.0) ; extra == 'testing'
+Requires-Dist: packaging (>=20.0) ; (python_version > "3.4") and extra == 'testing'
+Requires-Dist: xonsh (>=0.9.16) ; (python_version > "3.4" and python_version != "3.9") and extra == 'testing'
+
+# virtualenv
+
+[![PyPI](https://img.shields.io/pypi/v/virtualenv?style=flat-square)](https://pypi.org/project/virtualenv)
+[![PyPI - Implementation](https://img.shields.io/pypi/implementation/virtualenv?style=flat-square)](https://pypi.org/project/virtualenv)
+[![PyPI - Python Version](https://img.shields.io/pypi/pyversions/virtualenv?style=flat-square)](https://pypi.org/project/virtualenv)
+[![Documentation](https://readthedocs.org/projects/virtualenv/badge/?version=latest&style=flat-square)](http://virtualenv.pypa.io)
+[![Gitter Chat](https://img.shields.io/gitter/room/pypa/virtualenv?color=FF004F&style=flat-square)](https://gitter.im/pypa/virtualenv)
+[![PyPI - Downloads](https://img.shields.io/pypi/dm/virtualenv?style=flat-square)](https://pypistats.org/packages/virtualenv)
+[![PyPI - License](https://img.shields.io/pypi/l/virtualenv?style=flat-square)](https://opensource.org/licenses/MIT)
+[![Build Status](https://github.com/pypa/virtualenv/workflows/check/badge.svg?branch=main&event=push)](https://github.com/pypa/virtualenv/actions?query=workflow%3Acheck)
+[![codecov](https://codecov.io/gh/pypa/virtualenv/branch/main/graph/badge.svg)](https://codecov.io/gh/pypa/virtualenv)
+[![Code style:
+black](https://img.shields.io/badge/code%20style-black-000000.svg?style=flat-square)](https://github.com/psf/black)
+
+A tool for creating isolated `virtual` python environments.
+
+- [Installation](https://virtualenv.pypa.io/en/latest/installation.html)
+- [Documentation](https://virtualenv.pypa.io)
+- [Changelog](https://virtualenv.pypa.io/en/latest/changelog.html)
+- [Issues](https://github.com/pypa/virtualenv/issues)
+- [PyPI](https://pypi.org/project/virtualenv)
+- [Github](https://github.com/pypa/virtualenv)
+
+## Code of Conduct
+
+Everyone interacting in the virtualenv project's codebases, issue trackers, chat rooms, and mailing lists is expected to
+follow the [PSF Code of Conduct](https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md).
+
+
diff --git a/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/RECORD b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/RECORD
new file mode 100644
index 0000000000..b8808400e6
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/RECORD
@@ -0,0 +1,122 @@
+virtualenv/__init__.py,sha256=SMvpjz4VJ3vJ_yfDDPzJAdi2GJOYd_UBXXuvImO07gk,205
+virtualenv/__main__.py,sha256=QMwDqrR4QbhEivl8yoRmAr6G1BY92gr4n1ConcDIxc4,2770
+virtualenv/info.py,sha256=-2pI_kyC9fNj5OR8AQWkKjlpOk4_96Lmbco3atYYBdY,1921
+virtualenv/report.py,sha256=M2OHHCWdOHZsn74tj1MYYKmaI3QRJF8VA1FZIdkQTMQ,1594
+virtualenv/version.py,sha256=T9L0FIrWWe1IEvi_PNtZQcEIf_WbHAtFeLA1_hwZ07I,65
+virtualenv/activation/__init__.py,sha256=jLIERxJXMnHq2fH49RdWqBoaiASres4CTKMdUJOeos0,480
+virtualenv/activation/activator.py,sha256=CXomkRvhzcAeygYlDwQdDjfPyZQG85aBab5GIVQPv2M,1341
+virtualenv/activation/via_template.py,sha256=U8LgH-lyTjXIQBUdbd0xOZpXNICpiKhsfpiZwzQg7tU,2372
+virtualenv/activation/bash/__init__.py,sha256=7aC1WfvyzgFrIQs13jOuESuAbuiAnTsKkOe0iReRoaE,312
+virtualenv/activation/bash/activate.sh,sha256=aHia5vyXg2JwymkvRXCp29Aswcg88Mz5UrssXbX9Jjc,2398
+virtualenv/activation/batch/__init__.py,sha256=K0gVfwuXV7uoaMDL7moWGCq7uTDzI64giZzQQ8s2qnU,733
+virtualenv/activation/batch/activate.bat,sha256=PeQnWWsjvHT-jIWhYI7hbdzkDBZx5UOstnsCmq5PYtw,1031
+virtualenv/activation/batch/deactivate.bat,sha256=6OznnO-HC2wnWUN7YAT-bj815zeKMXEPC0keyBYwKUU,510
+virtualenv/activation/batch/pydoc.bat,sha256=pVuxn8mn9P_Rd0349fiBEiwIuMvfJQSfgJ2dljUT2fA,24
+virtualenv/activation/cshell/__init__.py,sha256=pw4s5idqQhaEccPxadETEvilBcoxW-UkVQ-RNqPyVCQ,344
+virtualenv/activation/cshell/activate.csh,sha256=jYwms8OTiVu9MJwXltuEm43HU09BJUqkrVqyj4sjpDA,1468
+virtualenv/activation/fish/__init__.py,sha256=hDkJq1P1wK2qm6BXydXWA9GMkBpj-TaejbKSceFnGZU,251
+virtualenv/activation/fish/activate.fish,sha256=V7nVwSI_nsFEMlJjSQxCayNWkjubXi1KSgSw1bEakh8,3099
+virtualenv/activation/powershell/__init__.py,sha256=EA-73s5TUMkgxAhLwucFg3gsBwW5huNh7qB4I7uEU-U,256
+virtualenv/activation/powershell/activate.ps1,sha256=jVw_FwfVJzcByQ3Sku-wlnOo_a0-OSpAQ8R17kXVgIM,1807
+virtualenv/activation/python/__init__.py,sha256=Uv53LqOrIT_2dO1FXcUYAnwH1eypG8CJ2InhSx1GRI4,1323
+virtualenv/activation/python/activate_this.py,sha256=Xpz7exdGSjmWk0KfwHLofIpDPUtazNSNGrxT0-5ZG_s,1208
+virtualenv/activation/xonsh/__init__.py,sha256=7NUevd5EpHRMZdSyR1KgFTe9QQBO94zZOwFH6MR6zjo,355
+virtualenv/activation/xonsh/activate.xsh,sha256=qkKgWfrUjYKrgrmhf45VuBz99EMadtiNU8GMfHZZ7AU,1172
+virtualenv/app_data/__init__.py,sha256=nwgqY-Our_SYcDisLfRLmWrTSPytDkjck9-lzg-pOI8,1462
+virtualenv/app_data/base.py,sha256=dbS5Maob1-Cqs6EVqTmmbjAGeNYA1iw3pmdgYPWCJak,2129
+virtualenv/app_data/na.py,sha256=iMRVpCe4m5Q5WM5bC3ee1wYyfkfHvkcQ-8tgIw4druc,1306
+virtualenv/app_data/read_only.py,sha256=MD-4Bl2SZZiGw0g8qZy0YLBGZGCuFYXnAEvWboF1PSc,1006
+virtualenv/app_data/via_disk_folder.py,sha256=CdNXQkenyH178MtSs2Ve6uDUs30-oZpkOz_1guTtTz0,5597
+virtualenv/app_data/via_tempdir.py,sha256=Z_-PoU7qeZe-idzi3nqys4FX0rfsRgOQ9_7XwX3hxSA,770
+virtualenv/config/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57
+virtualenv/config/convert.py,sha256=WYGjMRKVriZkfTH3z1fI0sDQRZxCxAedqWbOGsaquyg,2693
+virtualenv/config/env_var.py,sha256=48XpOurSLLjMX-kXjvOpZuAoOUP-LvnbotTlmebhhFk,844
+virtualenv/config/ini.py,sha256=neMqXrA6IOkLF_M_MCQWQSeqNm4CT8tj_h3GdbJv1Cg,2783
+virtualenv/config/cli/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57
+virtualenv/config/cli/parser.py,sha256=y5IqHccLBqFpocpE75X611nVrP8v394VW94a9GAojvE,4524
+virtualenv/create/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57
+virtualenv/create/creator.py,sha256=4jxxEGXCWd6tInT37QNt-13_yDtcIJdPB6EkoYzDkbM,8889
+virtualenv/create/debug.py,sha256=ETOke8w4Ib8fiufAHVeOkH3v0zrztljw3WjGvZyE0Mk,3342
+virtualenv/create/describe.py,sha256=bm0V2wpFOjdN_MkzZuJAEBSttmi5YGPVwxtwGYU5zQU,3561
+virtualenv/create/pyenv_cfg.py,sha256=VsOGfzUpaVCO3J29zrhIeip4jZ4b7llbe45iOQAIRGg,1717
+virtualenv/create/via_global_ref/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+virtualenv/create/via_global_ref/_virtualenv.py,sha256=aEuMB5MrpKhKwuWumv5J7wTpK6w9jUGR1FXPCdCT5fw,5662
+virtualenv/create/via_global_ref/api.py,sha256=5MPq3XJBuUOBj53oIigeWWPm68M-J_E644WWbz37qOU,4357
+virtualenv/create/via_global_ref/store.py,sha256=cqLBEhQ979xHnlidqmxlDjsvj2Wr-mBo7shvGQSEBxU,685
+virtualenv/create/via_global_ref/venv.py,sha256=p5RkDcXhr1pmOwnl1dpS06UYHmfNVy2ld4sTwsYjYWU,2955
+virtualenv/create/via_global_ref/builtin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+virtualenv/create/via_global_ref/builtin/builtin_way.py,sha256=hO22nT-itVoYiy8wXrXXYzHw86toCp_Uq-cURR7w6ck,546
+virtualenv/create/via_global_ref/builtin/ref.py,sha256=xCTICJhE-OiopBxl6ymo1P1NqgK3KEF8ZUOtQDtDTVA,5477
+virtualenv/create/via_global_ref/builtin/via_global_self_do.py,sha256=d569fX7fjq5vHvGGXDjo-1Xi__HhqU2xjDJOuYrmGjw,4552
+virtualenv/create/via_global_ref/builtin/cpython/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57
+virtualenv/create/via_global_ref/builtin/cpython/common.py,sha256=U7EvB9-2DlOQTGrTyPrEcItEbJ1sFBzo1EAOcAIjQ5Q,2392
+virtualenv/create/via_global_ref/builtin/cpython/cpython2.py,sha256=p41H2g6wAqhJzeUU48nH3u05-yWEbwCzhyj4pn8rnm4,3757
+virtualenv/create/via_global_ref/builtin/cpython/cpython3.py,sha256=gguQAhTQb0PH7Xg-G-mgQm5LlhyyW0V0piV3LwI-PeM,3111
+virtualenv/create/via_global_ref/builtin/cpython/mac_os.py,sha256=B0Lqgo8geZBSKSpHWUB46lDYRggW4Kg2AZUp3Z7xn9M,12382
+virtualenv/create/via_global_ref/builtin/pypy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+virtualenv/create/via_global_ref/builtin/pypy/common.py,sha256=-t-TZxCTJwpIh_oRsDyv5IilH19jKqJrZa27zWN_8Ws,1816
+virtualenv/create/via_global_ref/builtin/pypy/pypy2.py,sha256=bmMY_KJZ1iD_ifq-X9ZBOlOpJ1aN7839qigBgnWRIdA,3535
+virtualenv/create/via_global_ref/builtin/pypy/pypy3.py,sha256=ti6hmOIC4HiTBnEYKytO-d9wH-eLeMoQxQ0kZRhnNrw,1751
+virtualenv/create/via_global_ref/builtin/python2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+virtualenv/create/via_global_ref/builtin/python2/python2.py,sha256=jkJwmkeJVTzwzo95eMIptTfdBA-qmyIqZcpt48iOitU,4276
+virtualenv/create/via_global_ref/builtin/python2/site.py,sha256=4uguJDuWPmB25yBmpsMYKLOnIVXkerck0UO8CP8F2c4,6078
+virtualenv/discovery/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57
+virtualenv/discovery/builtin.py,sha256=rB6XaQwuK1HfvJsrla3BoSQUH9QkJnwKHGWBdbK4QGM,5432
+virtualenv/discovery/cached_py_info.py,sha256=l2lELE8YkwKXCNopImY2VjmpHPTawh1d3qmdsXMtkRs,5043
+virtualenv/discovery/discover.py,sha256=evJYn4APkwjNmdolNeIBSHiOudkvN59c5oVYI2Zsjlg,1209
+virtualenv/discovery/py_info.py,sha256=QtZFq0xav1tEpKI5seEJaEOkc_FXer21Gzgl_Ccqy98,21793
+virtualenv/discovery/py_spec.py,sha256=wQhLzCfXoSPsAAO9nm5-I2lNolVDux4W2vPSUfJGjlc,4790
+virtualenv/discovery/windows/__init__.py,sha256=TPbnzCtRyw47pRVHTO8ikwljNcczxmSLDdWtwasxvQU,1036
+virtualenv/discovery/windows/pep514.py,sha256=YYiaJzo-XuMtO78BMFMAudqkeJiLQkFnUTOuQZ5lJz8,5451
+virtualenv/run/__init__.py,sha256=lVIiIq_LoMHUGYkrTSx0tpFG_aYywy_u6GWUReHRcUA,5777
+virtualenv/run/session.py,sha256=S4NZiHzij1vp895mN9s9ZwYobJjjdP37QOHCb1o-Ufo,2563
+virtualenv/run/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+virtualenv/run/plugin/activators.py,sha256=kmHShj36eHfbnsiAJzX0U5LYvGhe0WkRYjbuKDz6gVM,2117
+virtualenv/run/plugin/base.py,sha256=-2185C01PaxOG7gnMbWWyZlo24n_FYo5J5_naeNZw8s,1934
+virtualenv/run/plugin/creators.py,sha256=PIxJ85KmrQU7lUO-r8Znxbb4lTEzwHggc9lcDqmt2tc,3494
+virtualenv/run/plugin/discovery.py,sha256=3ykxRvPA1FJMkqsbr2TV0LBRPT5UCFeJdzEHfuEjxRM,1002
+virtualenv/run/plugin/seeders.py,sha256=c1mhzu0HNzKdif6YUV35fuAOS0XHFJz3TtccLW5fWG0,1074
+virtualenv/seed/__init__.py,sha256=8ArZTco6Meo0W9i4dqnwmDO8BJYTaHX7oQx1o06vCm4,57
+virtualenv/seed/seeder.py,sha256=DSGE_8Ycj01vj8mkppUBA9h7JG76XsVBMt-5MWlMF6k,1178
+virtualenv/seed/embed/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+virtualenv/seed/embed/base_embed.py,sha256=46mWtqWj_MjOQEqMJyosL0RWGL6HwrHAL2r1Jxc9DuI,4182
+virtualenv/seed/embed/pip_invoke.py,sha256=EMVwIeoW15SuorJ8z_-vBxPXwQJLS0ILA0Va9zNoOLI,2127
+virtualenv/seed/embed/via_app_data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+virtualenv/seed/embed/via_app_data/via_app_data.py,sha256=NkVhEFv4iuKG0qvEg4AAmucMwmQgNaPLB-Syepzgps0,5994
+virtualenv/seed/embed/via_app_data/pip_install/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+virtualenv/seed/embed/via_app_data/pip_install/base.py,sha256=rnR60JzM7G04cPDo2eH-aR8-iQuFXBgHJ2lQnSf0Gfs,6355
+virtualenv/seed/embed/via_app_data/pip_install/copy.py,sha256=gG2NePFHOYh-bsCf6TpsaQ_qrYhdBy67k0RWuwHSAwo,1307
+virtualenv/seed/embed/via_app_data/pip_install/symlink.py,sha256=wHCpfKobvjjaZLUSwM3FSCblZfiBFw4IQYsxwlfEEu0,2362
+virtualenv/seed/wheels/__init__.py,sha256=1J7el7lNjAwGxM4dmricrbVhSbYxs5sPzv9PTx2A6qA,226
+virtualenv/seed/wheels/acquire.py,sha256=qchqlIynLi2VP2VtdAfVfZJHbUPcLY2Ui5r7Eh-aZz8,4426
+virtualenv/seed/wheels/bundle.py,sha256=W0uVjClv9IBa50jRvPKm0jMwWnrYTEfDny2Z6bw2W7c,1835
+virtualenv/seed/wheels/periodic_update.py,sha256=HNVEuU2OYdWHW7lVO0h3NkpLkC8bu-5R7igJRXBnGDc,12792
+virtualenv/seed/wheels/util.py,sha256=Zdo76KEDqbNmM5u9JTuyu5uzEN_fQ4oj6qHOt0h0o1M,3960
+virtualenv/seed/wheels/embed/__init__.py,sha256=CLMKoeveDRyiNAdZjEtD38cepgNXkg65xzFu5OSHEus,1995
+virtualenv/seed/wheels/embed/pip-19.1.1-py2.py3-none-any.whl,sha256=mTE08EdUcbkUUsoCnUOQ3I8pisY6cSgU8QHNG220ZnY,1360957
+virtualenv/seed/wheels/embed/pip-20.3.1-py2.py3-none-any.whl,sha256=Ql55sgk5q7_6djOpEVGogq7cd1ZNkxPjWE6wQWwoxVg,1518513
+virtualenv/seed/wheels/embed/setuptools-43.0.0-py2.py3-none-any.whl,sha256=pn-qUVGe8ozYJhr_DiIbbkw3D4-4utqKo-etiUUZmWM,583228
+virtualenv/seed/wheels/embed/setuptools-44.1.1-py2.py3-none-any.whl,sha256=J6cUwJJTE05gpvpoEw94xwN-VWLE8h-PMY8q6QDRUtU,583493
+virtualenv/seed/wheels/embed/setuptools-50.3.2-py3-none-any.whl,sha256=LCQqCFb7rX775WDfSnrdkyTzQM9I30NlHpYEkkRmeUo,785194
+virtualenv/seed/wheels/embed/setuptools-51.0.0-py3-none-any.whl,sha256=jBd5NiFZRcmjfvgJraD6s2UZGVL3oSNhhDK7-sNTxSk,785164
+virtualenv/seed/wheels/embed/wheel-0.33.6-py2.py3-none-any.whl,sha256=9NoXY9O-zy4s2SoUp8kg8PAOyjD93p6pksg2aFufryg,21556
+virtualenv/seed/wheels/embed/wheel-0.36.1-py2.py3-none-any.whl,sha256=kGhk-3IsCrXy-cNbLGXjrzwAlALBCKcJwKyie8LJGHs,34788
+virtualenv/util/__init__.py,sha256=om6Hs2lH5igf5lkcSmQFiU7iMZ0Wx4dmSlMc6XW_Llg,199
+virtualenv/util/error.py,sha256=SRSZlXvMYQuJwxoUfNhlAyo3VwrAnIsZemSwPOxpjns,352
+virtualenv/util/lock.py,sha256=oFa0FcbE_TVDHOol44Mgtfa4D3ZjnVy-HSQx-y7ERKQ,4727
+virtualenv/util/six.py,sha256=_8KWXUWi3-AaFmz4LkdyNra-uNuf70vlxwjN7oeRo8g,1463
+virtualenv/util/zipapp.py,sha256=jtf4Vn7XBnjPs_B_ObIQv_x4pFlIlPKAWHYLFV59h6U,1054
+virtualenv/util/path/__init__.py,sha256=YaBAxtzGBdMu0uUtppe0ZeCHw5HhO-5zjeb3-fzyMoI,336
+virtualenv/util/path/_permission.py,sha256=XpO2vGAk_92_biD4MEQcAQq2Zc8_rpm3M3n_hMUA1rw,745
+virtualenv/util/path/_sync.py,sha256=rheUrGsCqmhMwNs-uc5rDthNSUlsOrBJPoK8KZj3O1o,2393
+virtualenv/util/path/_pathlib/__init__.py,sha256=FjKCi8scB5MnHg2fLX5REoE0bOPkMXqpBEILVTeJZGQ,2130
+virtualenv/util/path/_pathlib/via_os_path.py,sha256=fYDFAX483zVvC9hAOAC9FYtrGdZethS0vtYtKsL5r-s,3772
+virtualenv/util/subprocess/__init__.py,sha256=1UmFrdBv2sVeUfZbDcO2yZpe28AE0ULOu9dRKlpJaa0,801
+virtualenv/util/subprocess/_win_subprocess.py,sha256=SChkXAKVbpehyrHod1ld76RSdTIalrgME1rtz5jUfm0,5655
+virtualenv-20.2.2.dist-info/LICENSE,sha256=XBWRk3jFsqqrexnOpw2M3HX3aHnjJFTkwDmfi3HRcek,1074
+virtualenv-20.2.2.dist-info/METADATA,sha256=OWyC_GXU3AvST-YiGhmI2iE4ntdcBm-6Q1yCaU9Bx_U,4965
+virtualenv-20.2.2.dist-info/WHEEL,sha256=oh0NKYrTcu1i1-wgrI1cnhkjYIi8WJ-8qd9Jrr5_y4E,110
+virtualenv-20.2.2.dist-info/entry_points.txt,sha256=1DALKzYOcffJa7Q15TQlMQu0yeFXEy5W124y0aJEfYU,1615
+virtualenv-20.2.2.dist-info/top_level.txt,sha256=JV-LVlC8YeIw1DgiYI0hEot7tgFy5IWdKVcSG7NyzaI,11
+virtualenv-20.2.2.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
+virtualenv-20.2.2.dist-info/RECORD,,
diff --git a/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/WHEEL b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/WHEEL
new file mode 100644
index 0000000000..1f227afa9f
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.36.1)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/entry_points.txt b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/entry_points.txt
new file mode 100644
index 0000000000..3effb4ba11
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/entry_points.txt
@@ -0,0 +1,32 @@
+[console_scripts]
+virtualenv = virtualenv.__main__:run_with_catch
+
+[virtualenv.activate]
+bash = virtualenv.activation.bash:BashActivator
+batch = virtualenv.activation.batch:BatchActivator
+cshell = virtualenv.activation.cshell:CShellActivator
+fish = virtualenv.activation.fish:FishActivator
+powershell = virtualenv.activation.powershell:PowerShellActivator
+python = virtualenv.activation.python:PythonActivator
+xonsh = virtualenv.activation.xonsh:XonshActivator
+
+[virtualenv.create]
+cpython2-mac-framework = virtualenv.create.via_global_ref.builtin.cpython.mac_os:CPython2macOsFramework
+cpython2-posix = virtualenv.create.via_global_ref.builtin.cpython.cpython2:CPython2Posix
+cpython2-win = virtualenv.create.via_global_ref.builtin.cpython.cpython2:CPython2Windows
+cpython3-mac-framework = virtualenv.create.via_global_ref.builtin.cpython.mac_os:CPython3macOsFramework
+cpython3-posix = virtualenv.create.via_global_ref.builtin.cpython.cpython3:CPython3Posix
+cpython3-win = virtualenv.create.via_global_ref.builtin.cpython.cpython3:CPython3Windows
+pypy2-posix = virtualenv.create.via_global_ref.builtin.pypy.pypy2:PyPy2Posix
+pypy2-win = virtualenv.create.via_global_ref.builtin.pypy.pypy2:Pypy2Windows
+pypy3-posix = virtualenv.create.via_global_ref.builtin.pypy.pypy3:PyPy3Posix
+pypy3-win = virtualenv.create.via_global_ref.builtin.pypy.pypy3:Pypy3Windows
+venv = virtualenv.create.via_global_ref.venv:Venv
+
+[virtualenv.discovery]
+builtin = virtualenv.discovery.builtin:Builtin
+
+[virtualenv.seed]
+app-data = virtualenv.seed.embed.via_app_data.via_app_data:FromAppData
+pip = virtualenv.seed.embed.pip_invoke:PipInvoke
+
diff --git a/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/top_level.txt b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/top_level.txt
new file mode 100644
index 0000000000..66072c7645
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+virtualenv
diff --git a/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/zip-safe b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/zip-safe
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv-20.2.2.dist-info/zip-safe
@@ -0,0 +1 @@
+
diff --git a/third_party/python/virtualenv/virtualenv.py b/third_party/python/virtualenv/virtualenv.py
new file mode 100644
index 0000000000..2435b9dfdd
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv.py
@@ -0,0 +1,55 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script is not to be imported.
+assert __name__ == '__main__'
+import os
+import sys
+# If some older version of virtualenv is installed, it may interfere with this one.
+# So filter-out site-packages and dist-packages directories where it might be installed.
+# This is kinda sorta like invoking python with -S, but invoking a virtualenv python 2.7
+# with -S is broken (sys.path becomes completely wrong, and even `import os` fails).
+# (And while yes, it is kind of silly to use a virtualenv python to run virtualenv, we
+# do)
+sys.path = [p for p in sys.path if os.path.basename(p) not in ('site-packages', 'dist-packages')]
+try:
+ import importlib.util
+ spec = importlib.util.spec_from_file_location('main', os.path.join(os.path.dirname(__file__), '__main__.py'))
+ mod = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(mod)
+except ImportError:
+ import imp
+ mod_info = imp.find_module('__main__', [os.path.dirname(__file__)])
+ mod = imp.load_module('main', *mod_info)
+
+
+# Fake zipfile module to make `mod.VersionedFindLoad` able to read the extracted
+# files rather than the zipapp.
+class fake_zipfile(object):
+ class ZipFile(object):
+ def __init__(self, path, mode):
+ self._path = path
+ self._mode = mode
+
+ def open(self, path):
+ # The caller expects a raw file object, with no unicode handling.
+ return open(os.path.join(self._path, path), self._mode + 'b')
+
+ def close(self):
+ pass
+
+mod.zipfile = fake_zipfile
+
+
+def run():
+ with mod.VersionedFindLoad() as finder:
+ sys.meta_path.insert(0, finder)
+ finder._register_distutils_finder()
+ from virtualenv.__main__ import run as run_virtualenv
+
+ run_virtualenv()
+
+
+if __name__ == "__main__":
+ run()
diff --git a/third_party/python/virtualenv/virtualenv/__init__.py b/third_party/python/virtualenv/virtualenv/__init__.py
new file mode 100644
index 0000000000..5f74e3ef21
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/__init__.py
@@ -0,0 +1,10 @@
+from __future__ import absolute_import, unicode_literals
+
+from .run import cli_run, session_via_cli
+from .version import __version__
+
+__all__ = (
+ "__version__",
+ "cli_run",
+ "session_via_cli",
+)
diff --git a/third_party/python/virtualenv/virtualenv/__main__.py b/third_party/python/virtualenv/virtualenv/__main__.py
new file mode 100644
index 0000000000..0995e4c18b
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/__main__.py
@@ -0,0 +1,77 @@
+from __future__ import absolute_import, print_function, unicode_literals
+
+import logging
+import sys
+from datetime import datetime
+
+
+def run(args=None, options=None):
+ start = datetime.now()
+ from virtualenv.run import cli_run
+ from virtualenv.util.error import ProcessCallFailed
+
+ if args is None:
+ args = sys.argv[1:]
+ try:
+ session = cli_run(args, options)
+ logging.warning(LogSession(session, start))
+ except ProcessCallFailed as exception:
+ print("subprocess call failed for {} with code {}".format(exception.cmd, exception.code))
+ print(exception.out, file=sys.stdout, end="")
+ print(exception.err, file=sys.stderr, end="")
+ raise SystemExit(exception.code)
+
+
+class LogSession(object):
+ def __init__(self, session, start):
+ self.session = session
+ self.start = start
+
+ def __str__(self):
+ from virtualenv.util.six import ensure_text
+
+ spec = self.session.creator.interpreter.spec
+ elapsed = (datetime.now() - self.start).total_seconds() * 1000
+ lines = [
+ "created virtual environment {} in {:.0f}ms".format(spec, elapsed),
+ " creator {}".format(ensure_text(str(self.session.creator))),
+ ]
+ if self.session.seeder.enabled:
+ lines += (
+ " seeder {}".format(ensure_text(str(self.session.seeder))),
+ " added seed packages: {}".format(
+ ", ".join(
+ sorted(
+ "==".join(i.stem.split("-"))
+ for i in self.session.creator.purelib.iterdir()
+ if i.suffix == ".dist-info"
+ ),
+ ),
+ ),
+ )
+ if self.session.activators:
+ lines.append(" activators {}".format(",".join(i.__class__.__name__ for i in self.session.activators)))
+ return "\n".join(lines)
+
+
+def run_with_catch(args=None):
+ from virtualenv.config.cli.parser import VirtualEnvOptions
+
+ options = VirtualEnvOptions()
+ try:
+ run(args, options)
+ except (KeyboardInterrupt, SystemExit, Exception) as exception:
+ try:
+ if getattr(options, "with_traceback", False):
+ raise
+ else:
+ if not (isinstance(exception, SystemExit) and exception.code == 0):
+ logging.error("%s: %s", type(exception).__name__, exception)
+ code = exception.code if isinstance(exception, SystemExit) else 1
+ sys.exit(code)
+ finally:
+ logging.shutdown() # force flush of log messages before the trace is printed
+
+
+if __name__ == "__main__": # pragma: no cov
+ run_with_catch() # pragma: no cov
diff --git a/third_party/python/virtualenv/virtualenv/activation/__init__.py b/third_party/python/virtualenv/virtualenv/activation/__init__.py
new file mode 100644
index 0000000000..fa2f0b4af7
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/__init__.py
@@ -0,0 +1,19 @@
+from __future__ import absolute_import, unicode_literals
+
+from .bash import BashActivator
+from .batch import BatchActivator
+from .cshell import CShellActivator
+from .fish import FishActivator
+from .powershell import PowerShellActivator
+from .python import PythonActivator
+from .xonsh import XonshActivator
+
+__all__ = [
+ "BashActivator",
+ "PowerShellActivator",
+ "XonshActivator",
+ "CShellActivator",
+ "PythonActivator",
+ "BatchActivator",
+ "FishActivator",
+]
diff --git a/third_party/python/virtualenv/virtualenv/activation/activator.py b/third_party/python/virtualenv/virtualenv/activation/activator.py
new file mode 100644
index 0000000000..587ac105bc
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/activator.py
@@ -0,0 +1,44 @@
+from __future__ import absolute_import, unicode_literals
+
+from abc import ABCMeta, abstractmethod
+
+from six import add_metaclass
+
+
+@add_metaclass(ABCMeta)
+class Activator(object):
+ """Generates an activate script for the virtual environment"""
+
+ def __init__(self, options):
+ """Create a new activator generator.
+
+ :param options: the parsed options as defined within :meth:`add_parser_arguments`
+ """
+ self.flag_prompt = options.prompt
+
+ @classmethod
+ def supports(cls, interpreter):
+ """Check if the activation script is supported in the given interpreter.
+
+ :param interpreter: the interpreter we need to support
+ :return: ``True`` if supported, ``False`` otherwise
+ """
+ return True
+
+ @classmethod
+ def add_parser_arguments(cls, parser, interpreter):
+ """
+ Add CLI arguments for this activation script.
+
+ :param parser: the CLI parser
+ :param interpreter: the interpreter this virtual environment is based of
+ """
+
+ @abstractmethod
+ def generate(self, creator):
+ """Generate the activate script for the given creator.
+
+ :param creator: the creator (based of :class:`virtualenv.create.creator.Creator`) we used to create this \
+ virtual environment
+ """
+ raise NotImplementedError
diff --git a/third_party/python/virtualenv/virtualenv/activation/bash/__init__.py b/third_party/python/virtualenv/virtualenv/activation/bash/__init__.py
new file mode 100644
index 0000000000..22c90c3827
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/bash/__init__.py
@@ -0,0 +1,13 @@
+from __future__ import absolute_import, unicode_literals
+
+from virtualenv.util.path import Path
+
+from ..via_template import ViaTemplateActivator
+
+
+class BashActivator(ViaTemplateActivator):
+ def templates(self):
+ yield Path("activate.sh")
+
+ def as_name(self, template):
+ return template.stem
diff --git a/third_party/python/virtualenv/virtualenv/activation/bash/activate.sh b/third_party/python/virtualenv/virtualenv/activation/bash/activate.sh
new file mode 100644
index 0000000000..222d982043
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/bash/activate.sh
@@ -0,0 +1,87 @@
+# This file must be used with "source bin/activate" *from bash*
+# you cannot run it directly
+
+
+if [ "${BASH_SOURCE-}" = "$0" ]; then
+ echo "You must source this script: \$ source $0" >&2
+ exit 33
+fi
+
+deactivate () {
+ unset -f pydoc >/dev/null 2>&1
+
+ # reset old environment variables
+ # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
+ if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then
+ PATH="$_OLD_VIRTUAL_PATH"
+ export PATH
+ unset _OLD_VIRTUAL_PATH
+ fi
+ if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
+ PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
+ export PYTHONHOME
+ unset _OLD_VIRTUAL_PYTHONHOME
+ fi
+
+ # This should detect bash and zsh, which have a hash command that must
+ # be called to get it to forget past commands. Without forgetting
+ # past commands the $PATH changes we made may not be respected
+ if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
+ hash -r 2>/dev/null
+ fi
+
+ if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
+ PS1="$_OLD_VIRTUAL_PS1"
+ export PS1
+ unset _OLD_VIRTUAL_PS1
+ fi
+
+ unset VIRTUAL_ENV
+ if [ ! "${1-}" = "nondestructive" ] ; then
+ # Self destruct!
+ unset -f deactivate
+ fi
+}
+
+# unset irrelevant variables
+deactivate nondestructive
+
+VIRTUAL_ENV='__VIRTUAL_ENV__'
+if ([ "$OSTYPE" = "cygwin" ] || [ "$OSTYPE" = "msys" ]) && $(command -v cygpath &> /dev/null) ; then
+ VIRTUAL_ENV=$(cygpath -u "$VIRTUAL_ENV")
+fi
+export VIRTUAL_ENV
+
+_OLD_VIRTUAL_PATH="$PATH"
+PATH="$VIRTUAL_ENV/__BIN_NAME__:$PATH"
+export PATH
+
+# unset PYTHONHOME if set
+if ! [ -z "${PYTHONHOME+_}" ] ; then
+ _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
+ unset PYTHONHOME
+fi
+
+if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
+ _OLD_VIRTUAL_PS1="${PS1-}"
+ if [ "x__VIRTUAL_PROMPT__" != x ] ; then
+ PS1="__VIRTUAL_PROMPT__${PS1-}"
+ else
+ PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
+ fi
+ export PS1
+fi
+
+# Make sure to unalias pydoc if it's already there
+alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true
+
+pydoc () {
+ python -m pydoc "$@"
+}
+
+# This should detect bash and zsh, which have a hash command that must
+# be called to get it to forget past commands. Without forgetting
+# past commands the $PATH changes we made may not be respected
+if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
+ hash -r 2>/dev/null
+fi
diff --git a/third_party/python/virtualenv/virtualenv/activation/batch/__init__.py b/third_party/python/virtualenv/virtualenv/activation/batch/__init__.py
new file mode 100644
index 0000000000..4149712d87
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/batch/__init__.py
@@ -0,0 +1,23 @@
+from __future__ import absolute_import, unicode_literals
+
+import os
+
+from virtualenv.util.path import Path
+
+from ..via_template import ViaTemplateActivator
+
+
+class BatchActivator(ViaTemplateActivator):
+ @classmethod
+ def supports(cls, interpreter):
+ return interpreter.os == "nt"
+
+ def templates(self):
+ yield Path("activate.bat")
+ yield Path("deactivate.bat")
+ yield Path("pydoc.bat")
+
+ def instantiate_template(self, replacements, template, creator):
+ # ensure the text has all newlines as \r\n - required by batch
+ base = super(BatchActivator, self).instantiate_template(replacements, template, creator)
+ return base.replace(os.linesep, "\n").replace("\n", os.linesep)
diff --git a/third_party/python/virtualenv/virtualenv/activation/batch/activate.bat b/third_party/python/virtualenv/virtualenv/activation/batch/activate.bat
new file mode 100644
index 0000000000..8dae28d19a
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/batch/activate.bat
@@ -0,0 +1,40 @@
+@echo off
+
+set "VIRTUAL_ENV=__VIRTUAL_ENV__"
+
+if defined _OLD_VIRTUAL_PROMPT (
+ set "PROMPT=%_OLD_VIRTUAL_PROMPT%"
+) else (
+ if not defined PROMPT (
+ set "PROMPT=$P$G"
+ )
+ if not defined VIRTUAL_ENV_DISABLE_PROMPT (
+ set "_OLD_VIRTUAL_PROMPT=%PROMPT%"
+ )
+)
+if not defined VIRTUAL_ENV_DISABLE_PROMPT (
+ set "ENV_PROMPT=__VIRTUAL_PROMPT__"
+ if NOT DEFINED ENV_PROMPT (
+ for %%d in ("%VIRTUAL_ENV%") do set "ENV_PROMPT=(%%~nxd) "
+ )
+ )
+ set "PROMPT=%ENV_PROMPT%%PROMPT%"
+)
+
+REM Don't use () to avoid problems with them in %PATH%
+if defined _OLD_VIRTUAL_PYTHONHOME goto ENDIFVHOME
+ set "_OLD_VIRTUAL_PYTHONHOME=%PYTHONHOME%"
+:ENDIFVHOME
+
+set PYTHONHOME=
+
+REM if defined _OLD_VIRTUAL_PATH (
+if not defined _OLD_VIRTUAL_PATH goto ENDIFVPATH1
+ set "PATH=%_OLD_VIRTUAL_PATH%"
+:ENDIFVPATH1
+REM ) else (
+if defined _OLD_VIRTUAL_PATH goto ENDIFVPATH2
+ set "_OLD_VIRTUAL_PATH=%PATH%"
+:ENDIFVPATH2
+
+set "PATH=%VIRTUAL_ENV%\__BIN_NAME__;%PATH%"
diff --git a/third_party/python/virtualenv/virtualenv/activation/batch/deactivate.bat b/third_party/python/virtualenv/virtualenv/activation/batch/deactivate.bat
new file mode 100644
index 0000000000..c33186657f
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/batch/deactivate.bat
@@ -0,0 +1,19 @@
+@echo off
+
+set VIRTUAL_ENV=
+
+REM Don't use () to avoid problems with them in %PATH%
+if not defined _OLD_VIRTUAL_PROMPT goto ENDIFVPROMPT
+ set "PROMPT=%_OLD_VIRTUAL_PROMPT%"
+ set _OLD_VIRTUAL_PROMPT=
+:ENDIFVPROMPT
+
+if not defined _OLD_VIRTUAL_PYTHONHOME goto ENDIFVHOME
+ set "PYTHONHOME=%_OLD_VIRTUAL_PYTHONHOME%"
+ set _OLD_VIRTUAL_PYTHONHOME=
+:ENDIFVHOME
+
+if not defined _OLD_VIRTUAL_PATH goto ENDIFVPATH
+ set "PATH=%_OLD_VIRTUAL_PATH%"
+ set _OLD_VIRTUAL_PATH=
+:ENDIFVPATH
diff --git a/third_party/python/virtualenv/virtualenv/activation/batch/pydoc.bat b/third_party/python/virtualenv/virtualenv/activation/batch/pydoc.bat
new file mode 100644
index 0000000000..45ddc13275
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/batch/pydoc.bat
@@ -0,0 +1 @@
+python.exe -m pydoc %*
diff --git a/third_party/python/virtualenv/virtualenv/activation/cshell/__init__.py b/third_party/python/virtualenv/virtualenv/activation/cshell/__init__.py
new file mode 100644
index 0000000000..b25c602a58
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/cshell/__init__.py
@@ -0,0 +1,14 @@
+from __future__ import absolute_import, unicode_literals
+
+from virtualenv.util.path import Path
+
+from ..via_template import ViaTemplateActivator
+
+
+class CShellActivator(ViaTemplateActivator):
+ @classmethod
+ def supports(cls, interpreter):
+ return interpreter.os != "nt"
+
+ def templates(self):
+ yield Path("activate.csh")
diff --git a/third_party/python/virtualenv/virtualenv/activation/cshell/activate.csh b/third_party/python/virtualenv/virtualenv/activation/cshell/activate.csh
new file mode 100644
index 0000000000..72b2cf8eff
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/cshell/activate.csh
@@ -0,0 +1,55 @@
+# This file must be used with "source bin/activate.csh" *from csh*.
+# You cannot run it directly.
+# Created by Davide Di Blasi <davidedb@gmail.com>.
+
+set newline='\
+'
+
+alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH:q" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT:q" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate && unalias pydoc'
+
+# Unset irrelevant variables.
+deactivate nondestructive
+
+setenv VIRTUAL_ENV '__VIRTUAL_ENV__'
+
+set _OLD_VIRTUAL_PATH="$PATH:q"
+setenv PATH "$VIRTUAL_ENV:q/__BIN_NAME__:$PATH:q"
+
+
+
+if ('__VIRTUAL_PROMPT__' != "") then
+ set env_name = '__VIRTUAL_PROMPT__'
+else
+ set env_name = '('"$VIRTUAL_ENV:t:q"') '
+endif
+
+if ( $?VIRTUAL_ENV_DISABLE_PROMPT ) then
+ if ( $VIRTUAL_ENV_DISABLE_PROMPT == "" ) then
+ set do_prompt = "1"
+ else
+ set do_prompt = "0"
+ endif
+else
+ set do_prompt = "1"
+endif
+
+if ( $do_prompt == "1" ) then
+ # Could be in a non-interactive environment,
+ # in which case, $prompt is undefined and we wouldn't
+ # care about the prompt anyway.
+ if ( $?prompt ) then
+ set _OLD_VIRTUAL_PROMPT="$prompt:q"
+ if ( "$prompt:q" =~ *"$newline:q"* ) then
+ :
+ else
+ set prompt = "$env_name:q$prompt:q"
+ endif
+ endif
+endif
+
+unset env_name
+unset do_prompt
+
+alias pydoc python -m pydoc
+
+rehash
diff --git a/third_party/python/virtualenv/virtualenv/activation/fish/__init__.py b/third_party/python/virtualenv/virtualenv/activation/fish/__init__.py
new file mode 100644
index 0000000000..8d0e19c2cd
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/fish/__init__.py
@@ -0,0 +1,10 @@
+from __future__ import absolute_import, unicode_literals
+
+from virtualenv.util.path import Path
+
+from ..via_template import ViaTemplateActivator
+
+
+class FishActivator(ViaTemplateActivator):
+ def templates(self):
+ yield Path("activate.fish")
diff --git a/third_party/python/virtualenv/virtualenv/activation/fish/activate.fish b/third_party/python/virtualenv/virtualenv/activation/fish/activate.fish
new file mode 100644
index 0000000000..faa262270a
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/fish/activate.fish
@@ -0,0 +1,100 @@
+# This file must be used using `source bin/activate.fish` *within a running fish ( http://fishshell.com ) session*.
+# Do not run it directly.
+
+function _bashify_path -d "Converts a fish path to something bash can recognize"
+ set fishy_path $argv
+ set bashy_path $fishy_path[1]
+ for path_part in $fishy_path[2..-1]
+ set bashy_path "$bashy_path:$path_part"
+ end
+ echo $bashy_path
+end
+
+function _fishify_path -d "Converts a bash path to something fish can recognize"
+ echo $argv | tr ':' '\n'
+end
+
+function deactivate -d 'Exit virtualenv mode and return to the normal environment.'
+ # reset old environment variables
+ if test -n "$_OLD_VIRTUAL_PATH"
+ # https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling
+ if test (echo $FISH_VERSION | head -c 1) -lt 3
+ set -gx PATH (_fishify_path "$_OLD_VIRTUAL_PATH")
+ else
+ set -gx PATH "$_OLD_VIRTUAL_PATH"
+ end
+ set -e _OLD_VIRTUAL_PATH
+ end
+
+ if test -n "$_OLD_VIRTUAL_PYTHONHOME"
+ set -gx PYTHONHOME "$_OLD_VIRTUAL_PYTHONHOME"
+ set -e _OLD_VIRTUAL_PYTHONHOME
+ end
+
+ if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
+ and functions -q _old_fish_prompt
+ # Set an empty local `$fish_function_path` to allow the removal of `fish_prompt` using `functions -e`.
+ set -l fish_function_path
+
+ # Erase virtualenv's `fish_prompt` and restore the original.
+ functions -e fish_prompt
+ functions -c _old_fish_prompt fish_prompt
+ functions -e _old_fish_prompt
+ set -e _OLD_FISH_PROMPT_OVERRIDE
+ end
+
+ set -e VIRTUAL_ENV
+
+ if test "$argv[1]" != 'nondestructive'
+ # Self-destruct!
+ functions -e pydoc
+ functions -e deactivate
+ functions -e _bashify_path
+ functions -e _fishify_path
+ end
+end
+
+# Unset irrelevant variables.
+deactivate nondestructive
+
+set -gx VIRTUAL_ENV '__VIRTUAL_ENV__'
+
+# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling
+if test (echo $FISH_VERSION | head -c 1) -lt 3
+ set -gx _OLD_VIRTUAL_PATH (_bashify_path $PATH)
+else
+ set -gx _OLD_VIRTUAL_PATH "$PATH"
+end
+set -gx PATH "$VIRTUAL_ENV"'/__BIN_NAME__' $PATH
+
+# Unset `$PYTHONHOME` if set.
+if set -q PYTHONHOME
+ set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
+ set -e PYTHONHOME
+end
+
+function pydoc
+ python -m pydoc $argv
+end
+
+if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
+ # Copy the current `fish_prompt` function as `_old_fish_prompt`.
+ functions -c fish_prompt _old_fish_prompt
+
+ function fish_prompt
+ # Run the user's prompt first; it might depend on (pipe)status.
+ set -l prompt (_old_fish_prompt)
+
+ # Prompt override provided?
+ # If not, just prepend the environment name.
+ if test -n '__VIRTUAL_PROMPT__'
+ printf '%s%s' '__VIRTUAL_PROMPT__' (set_color normal)
+ else
+ printf '%s(%s) ' (set_color normal) (basename "$VIRTUAL_ENV")
+ end
+
+ string join -- \n $prompt # handle multi-line prompts
+ end
+
+ set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
+end
diff --git a/third_party/python/virtualenv/virtualenv/activation/powershell/__init__.py b/third_party/python/virtualenv/virtualenv/activation/powershell/__init__.py
new file mode 100644
index 0000000000..4fadc63bc1
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/powershell/__init__.py
@@ -0,0 +1,10 @@
+from __future__ import absolute_import, unicode_literals
+
+from virtualenv.util.path import Path
+
+from ..via_template import ViaTemplateActivator
+
+
+class PowerShellActivator(ViaTemplateActivator):
+ def templates(self):
+ yield Path("activate.ps1")
diff --git a/third_party/python/virtualenv/virtualenv/activation/powershell/activate.ps1 b/third_party/python/virtualenv/virtualenv/activation/powershell/activate.ps1
new file mode 100644
index 0000000000..a370a63f55
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/powershell/activate.ps1
@@ -0,0 +1,60 @@
+$script:THIS_PATH = $myinvocation.mycommand.path
+$script:BASE_DIR = Split-Path (Resolve-Path "$THIS_PATH/..") -Parent
+
+function global:deactivate([switch] $NonDestructive) {
+ if (Test-Path variable:_OLD_VIRTUAL_PATH) {
+ $env:PATH = $variable:_OLD_VIRTUAL_PATH
+ Remove-Variable "_OLD_VIRTUAL_PATH" -Scope global
+ }
+
+ if (Test-Path function:_old_virtual_prompt) {
+ $function:prompt = $function:_old_virtual_prompt
+ Remove-Item function:\_old_virtual_prompt
+ }
+
+ if ($env:VIRTUAL_ENV) {
+ Remove-Item env:VIRTUAL_ENV -ErrorAction SilentlyContinue
+ }
+
+ if (!$NonDestructive) {
+ # Self destruct!
+ Remove-Item function:deactivate
+ Remove-Item function:pydoc
+ }
+}
+
+function global:pydoc {
+ python -m pydoc $args
+}
+
+# unset irrelevant variables
+deactivate -nondestructive
+
+$VIRTUAL_ENV = $BASE_DIR
+$env:VIRTUAL_ENV = $VIRTUAL_ENV
+
+New-Variable -Scope global -Name _OLD_VIRTUAL_PATH -Value $env:PATH
+
+$env:PATH = "$env:VIRTUAL_ENV/__BIN_NAME____PATH_SEP__" + $env:PATH
+if (!$env:VIRTUAL_ENV_DISABLE_PROMPT) {
+ function global:_old_virtual_prompt {
+ ""
+ }
+ $function:_old_virtual_prompt = $function:prompt
+
+ if ("__VIRTUAL_PROMPT__" -ne "") {
+ function global:prompt {
+ # Add the custom prefix to the existing prompt
+ $previous_prompt_value = & $function:_old_virtual_prompt
+ ("__VIRTUAL_PROMPT__" + $previous_prompt_value)
+ }
+ }
+ else {
+ function global:prompt {
+ # Add a prefix to the current prompt, but don't discard it.
+ $previous_prompt_value = & $function:_old_virtual_prompt
+ $new_prompt_value = "($( Split-Path $env:VIRTUAL_ENV -Leaf )) "
+ ($new_prompt_value + $previous_prompt_value)
+ }
+ }
+}
diff --git a/third_party/python/virtualenv/virtualenv/activation/python/__init__.py b/third_party/python/virtualenv/virtualenv/activation/python/__init__.py
new file mode 100644
index 0000000000..9e579124d7
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/python/__init__.py
@@ -0,0 +1,35 @@
+from __future__ import absolute_import, unicode_literals
+
+import os
+import sys
+from collections import OrderedDict
+
+from virtualenv.util.path import Path
+from virtualenv.util.six import ensure_text
+
+from ..via_template import ViaTemplateActivator
+
+
+class PythonActivator(ViaTemplateActivator):
+ def templates(self):
+ yield Path("activate_this.py")
+
+ def replacements(self, creator, dest_folder):
+ replacements = super(PythonActivator, self).replacements(creator, dest_folder)
+ lib_folders = OrderedDict((os.path.relpath(str(i), str(dest_folder)), None) for i in creator.libs)
+ win_py2 = creator.interpreter.platform == "win32" and creator.interpreter.version_info.major == 2
+ replacements.update(
+ {
+ "__LIB_FOLDERS__": ensure_text(os.pathsep.join(lib_folders.keys())),
+ "__DECODE_PATH__": ("yes" if win_py2 else ""),
+ },
+ )
+ return replacements
+
+ @staticmethod
+ def _repr_unicode(creator, value):
+ py2 = creator.interpreter.version_info.major == 2
+ if py2: # on Python 2 we need to encode this into explicit utf-8, py3 supports unicode literals
+ start = 2 if sys.version_info[0] == 3 else 1
+ value = ensure_text(repr(value.encode("utf-8"))[start:-1])
+ return value
diff --git a/third_party/python/virtualenv/virtualenv/activation/python/activate_this.py b/third_party/python/virtualenv/virtualenv/activation/python/activate_this.py
new file mode 100644
index 0000000000..29debe3e74
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/python/activate_this.py
@@ -0,0 +1,32 @@
+# -*- coding: utf-8 -*-
+"""Activate virtualenv for current interpreter:
+
+Use exec(open(this_file).read(), {'__file__': this_file}).
+
+This can be used when you must use an existing Python interpreter, not the virtualenv bin/python.
+"""
+import os
+import site
+import sys
+
+try:
+ abs_file = os.path.abspath(__file__)
+except NameError:
+ raise AssertionError("You must use exec(open(this_file).read(), {'__file__': this_file}))")
+
+bin_dir = os.path.dirname(abs_file)
+base = bin_dir[: -len("__BIN_NAME__") - 1] # strip away the bin part from the __file__, plus the path separator
+
+# prepend bin to PATH (this file is inside the bin directory)
+os.environ["PATH"] = os.pathsep.join([bin_dir] + os.environ.get("PATH", "").split(os.pathsep))
+os.environ["VIRTUAL_ENV"] = base # virtual env is right above bin directory
+
+# add the virtual environments libraries to the host python import mechanism
+prev_length = len(sys.path)
+for lib in "__LIB_FOLDERS__".split(os.pathsep):
+ path = os.path.realpath(os.path.join(bin_dir, lib))
+ site.addsitedir(path.decode("utf-8") if "__DECODE_PATH__" else path)
+sys.path[:] = sys.path[prev_length:] + sys.path[0:prev_length]
+
+sys.real_prefix = sys.prefix
+sys.prefix = base
diff --git a/third_party/python/virtualenv/virtualenv/activation/via_template.py b/third_party/python/virtualenv/virtualenv/activation/via_template.py
new file mode 100644
index 0000000000..14f097973f
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/via_template.py
@@ -0,0 +1,67 @@
+from __future__ import absolute_import, unicode_literals
+
+import os
+import sys
+from abc import ABCMeta, abstractmethod
+
+from six import add_metaclass
+
+from virtualenv.util.six import ensure_text
+
+from .activator import Activator
+
+if sys.version_info >= (3, 7):
+ from importlib.resources import read_binary
+else:
+ from importlib_resources import read_binary
+
+
+@add_metaclass(ABCMeta)
+class ViaTemplateActivator(Activator):
+ @abstractmethod
+ def templates(self):
+ raise NotImplementedError
+
+ def generate(self, creator):
+ dest_folder = creator.bin_dir
+ replacements = self.replacements(creator, dest_folder)
+ generated = self._generate(replacements, self.templates(), dest_folder, creator)
+ if self.flag_prompt is not None:
+ creator.pyenv_cfg["prompt"] = self.flag_prompt
+ return generated
+
+ def replacements(self, creator, dest_folder):
+ return {
+ "__VIRTUAL_PROMPT__": "" if self.flag_prompt is None else self.flag_prompt,
+ "__VIRTUAL_ENV__": ensure_text(str(creator.dest)),
+ "__VIRTUAL_NAME__": creator.env_name,
+ "__BIN_NAME__": ensure_text(str(creator.bin_dir.relative_to(creator.dest))),
+ "__PATH_SEP__": ensure_text(os.pathsep),
+ }
+
+ def _generate(self, replacements, templates, to_folder, creator):
+ generated = []
+ for template in templates:
+ text = self.instantiate_template(replacements, template, creator)
+ dest = to_folder / self.as_name(template)
+ # use write_bytes to avoid platform specific line normalization (\n -> \r\n)
+ dest.write_bytes(text.encode("utf-8"))
+ generated.append(dest)
+ return generated
+
+ def as_name(self, template):
+ return template.name
+
+ def instantiate_template(self, replacements, template, creator):
+ # read content as binary to avoid platform specific line normalization (\n -> \r\n)
+ binary = read_binary(self.__module__, str(template))
+ text = binary.decode("utf-8", errors="strict")
+ for key, value in replacements.items():
+ value = self._repr_unicode(creator, value)
+ text = text.replace(key, value)
+ return text
+
+ @staticmethod
+ def _repr_unicode(creator, value):
+ # by default we just let it be unicode
+ return value
diff --git a/third_party/python/virtualenv/virtualenv/activation/xonsh/__init__.py b/third_party/python/virtualenv/virtualenv/activation/xonsh/__init__.py
new file mode 100644
index 0000000000..d92411c20f
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/xonsh/__init__.py
@@ -0,0 +1,14 @@
+from __future__ import absolute_import, unicode_literals
+
+from virtualenv.util.path import Path
+
+from ..via_template import ViaTemplateActivator
+
+
+class XonshActivator(ViaTemplateActivator):
+ def templates(self):
+ yield Path("activate.xsh")
+
+ @classmethod
+ def supports(cls, interpreter):
+ return interpreter.version_info >= (3, 5)
diff --git a/third_party/python/virtualenv/virtualenv/activation/xonsh/activate.xsh b/third_party/python/virtualenv/virtualenv/activation/xonsh/activate.xsh
new file mode 100644
index 0000000000..c77ea62786
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/activation/xonsh/activate.xsh
@@ -0,0 +1,46 @@
+"""Xonsh activate script for virtualenv"""
+from xonsh.tools import get_sep as _get_sep
+
+def _deactivate(args):
+ if "pydoc" in aliases:
+ del aliases["pydoc"]
+
+ if ${...}.get("_OLD_VIRTUAL_PATH", ""):
+ $PATH = $_OLD_VIRTUAL_PATH
+ del $_OLD_VIRTUAL_PATH
+
+ if ${...}.get("_OLD_VIRTUAL_PYTHONHOME", ""):
+ $PYTHONHOME = $_OLD_VIRTUAL_PYTHONHOME
+ del $_OLD_VIRTUAL_PYTHONHOME
+
+ if "VIRTUAL_ENV" in ${...}:
+ del $VIRTUAL_ENV
+
+ if "VIRTUAL_ENV_PROMPT" in ${...}:
+ del $VIRTUAL_ENV_PROMPT
+
+ if "nondestructive" not in args:
+ # Self destruct!
+ del aliases["deactivate"]
+
+
+# unset irrelevant variables
+_deactivate(["nondestructive"])
+aliases["deactivate"] = _deactivate
+
+$VIRTUAL_ENV = r"__VIRTUAL_ENV__"
+
+$_OLD_VIRTUAL_PATH = $PATH
+$PATH = $PATH[:]
+$PATH.add($VIRTUAL_ENV + _get_sep() + "__BIN_NAME__", front=True, replace=True)
+
+if ${...}.get("PYTHONHOME", ""):
+ # unset PYTHONHOME if set
+ $_OLD_VIRTUAL_PYTHONHOME = $PYTHONHOME
+ del $PYTHONHOME
+
+$VIRTUAL_ENV_PROMPT = "__VIRTUAL_PROMPT__"
+if not $VIRTUAL_ENV_PROMPT:
+ del $VIRTUAL_ENV_PROMPT
+
+aliases["pydoc"] = ["python", "-m", "pydoc"]
diff --git a/third_party/python/virtualenv/virtualenv/app_data/__init__.py b/third_party/python/virtualenv/virtualenv/app_data/__init__.py
new file mode 100644
index 0000000000..2df0cae5d3
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/app_data/__init__.py
@@ -0,0 +1,57 @@
+"""
+Application data stored by virtualenv.
+"""
+from __future__ import absolute_import, unicode_literals
+
+import logging
+import os
+
+from appdirs import user_data_dir
+
+from .na import AppDataDisabled
+from .read_only import ReadOnlyAppData
+from .via_disk_folder import AppDataDiskFolder
+from .via_tempdir import TempAppData
+
+
+def _default_app_data_dir(): # type: () -> str
+ key = str("VIRTUALENV_OVERRIDE_APP_DATA")
+ if key in os.environ:
+ return os.environ[key]
+ else:
+ return user_data_dir(appname="virtualenv", appauthor="pypa")
+
+
+def make_app_data(folder, **kwargs):
+ read_only = kwargs.pop("read_only")
+ if kwargs: # py3+ kwonly
+ raise TypeError("unexpected keywords: {}")
+
+ if folder is None:
+ folder = _default_app_data_dir()
+ folder = os.path.abspath(folder)
+
+ if read_only:
+ return ReadOnlyAppData(folder)
+
+ if not os.path.isdir(folder):
+ try:
+ os.makedirs(folder)
+ logging.debug("created app data folder %s", folder)
+ except OSError as exception:
+ logging.info("could not create app data folder %s due to %r", folder, exception)
+
+ if os.access(folder, os.W_OK):
+ return AppDataDiskFolder(folder)
+ else:
+ logging.debug("app data folder %s has no write access", folder)
+ return TempAppData()
+
+
+__all__ = (
+ "AppDataDisabled",
+ "AppDataDiskFolder",
+ "ReadOnlyAppData",
+ "TempAppData",
+ "make_app_data",
+)
diff --git a/third_party/python/virtualenv/virtualenv/app_data/base.py b/third_party/python/virtualenv/virtualenv/app_data/base.py
new file mode 100644
index 0000000000..4ea54d9f64
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/app_data/base.py
@@ -0,0 +1,95 @@
+"""
+Application data stored by virtualenv.
+"""
+from __future__ import absolute_import, unicode_literals
+
+from abc import ABCMeta, abstractmethod
+from contextlib import contextmanager
+
+import six
+
+from virtualenv.info import IS_ZIPAPP
+
+
+@six.add_metaclass(ABCMeta)
+class AppData(object):
+ """Abstract storage interface for the virtualenv application"""
+
+ @abstractmethod
+ def close(self):
+ """called before virtualenv exits"""
+
+ @abstractmethod
+ def reset(self):
+ """called when the user passes in the reset app data"""
+
+ @abstractmethod
+ def py_info(self, path):
+ raise NotImplementedError
+
+ @abstractmethod
+ def py_info_clear(self):
+ raise NotImplementedError
+
+ @property
+ def can_update(self):
+ raise NotImplementedError
+
+ @abstractmethod
+ def embed_update_log(self, distribution, for_py_version):
+ raise NotImplementedError
+
+ @property
+ def house(self):
+ raise NotImplementedError
+
+ @property
+ def transient(self):
+ raise NotImplementedError
+
+ @abstractmethod
+ def wheel_image(self, for_py_version, name):
+ raise NotImplementedError
+
+ @contextmanager
+ def ensure_extracted(self, path, to_folder=None):
+ """Some paths might be within the zipapp, unzip these to a path on the disk"""
+ if IS_ZIPAPP:
+ with self.extract(path, to_folder) as result:
+ yield result
+ else:
+ yield path
+
+ @abstractmethod
+ @contextmanager
+ def extract(self, path, to_folder):
+ raise NotImplementedError
+
+ @abstractmethod
+ @contextmanager
+ def locked(self, path):
+ raise NotImplementedError
+
+
+@six.add_metaclass(ABCMeta)
+class ContentStore(object):
+ @abstractmethod
+ def exists(self):
+ raise NotImplementedError
+
+ @abstractmethod
+ def read(self):
+ raise NotImplementedError
+
+ @abstractmethod
+ def write(self, content):
+ raise NotImplementedError
+
+ @abstractmethod
+ def remove(self):
+ raise NotImplementedError
+
+ @abstractmethod
+ @contextmanager
+ def locked(self):
+ pass
diff --git a/third_party/python/virtualenv/virtualenv/app_data/na.py b/third_party/python/virtualenv/virtualenv/app_data/na.py
new file mode 100644
index 0000000000..5f7200d3a3
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/app_data/na.py
@@ -0,0 +1,66 @@
+from __future__ import absolute_import, unicode_literals
+
+from contextlib import contextmanager
+
+from .base import AppData, ContentStore
+
+
+class AppDataDisabled(AppData):
+ """No application cache available (most likely as we don't have write permissions)"""
+
+ transient = True
+ can_update = False
+
+ def __init__(self):
+ pass
+
+ error = RuntimeError("no app data folder available, probably no write access to the folder")
+
+ def close(self):
+ """do nothing"""
+
+ def reset(self):
+ """do nothing"""
+
+ def py_info(self, path):
+ return ContentStoreNA()
+
+ def embed_update_log(self, distribution, for_py_version):
+ return ContentStoreNA()
+
+ def extract(self, path, to_folder):
+ raise self.error
+
+ @contextmanager
+ def locked(self, path):
+ """do nothing"""
+ yield
+
+ @property
+ def house(self):
+ raise self.error
+
+ def wheel_image(self, for_py_version, name):
+ raise self.error
+
+ def py_info_clear(self):
+ """"""
+
+
+class ContentStoreNA(ContentStore):
+ def exists(self):
+ return False
+
+ def read(self):
+ """"""
+ return None
+
+ def write(self, content):
+ """"""
+
+ def remove(self):
+ """"""
+
+ @contextmanager
+ def locked(self):
+ yield
diff --git a/third_party/python/virtualenv/virtualenv/app_data/read_only.py b/third_party/python/virtualenv/virtualenv/app_data/read_only.py
new file mode 100644
index 0000000000..858978cd08
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/app_data/read_only.py
@@ -0,0 +1,34 @@
+import os.path
+
+from virtualenv.util.lock import NoOpFileLock
+
+from .via_disk_folder import AppDataDiskFolder, PyInfoStoreDisk
+
+
+class ReadOnlyAppData(AppDataDiskFolder):
+ can_update = False
+
+ def __init__(self, folder): # type: (str) -> None
+ if not os.path.isdir(folder):
+ raise RuntimeError("read-only app data directory {} does not exist".format(folder))
+ self.lock = NoOpFileLock(folder)
+
+ def reset(self): # type: () -> None
+ raise RuntimeError("read-only app data does not support reset")
+
+ def py_info_clear(self): # type: () -> None
+ raise NotImplementedError
+
+ def py_info(self, path):
+ return _PyInfoStoreDiskReadOnly(self.py_info_at, path)
+
+ def embed_update_log(self, distribution, for_py_version):
+ raise NotImplementedError
+
+
+class _PyInfoStoreDiskReadOnly(PyInfoStoreDisk):
+ def write(self, content):
+ raise RuntimeError("read-only app data python info cannot be updated")
+
+
+__all__ = ("ReadOnlyAppData",)
diff --git a/third_party/python/virtualenv/virtualenv/app_data/via_disk_folder.py b/third_party/python/virtualenv/virtualenv/app_data/via_disk_folder.py
new file mode 100644
index 0000000000..2243f1670e
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/app_data/via_disk_folder.py
@@ -0,0 +1,177 @@
+# -*- coding: utf-8 -*-
+"""
+A rough layout of the current storage goes as:
+
+virtualenv-app-data
+├── py - <version> <cache information about python interpreters>
+│   └── *.json/lock
+├── wheel <cache wheels used for seeding>
+│   ├── house
+│ │ └── *.whl <wheels downloaded go here>
+│ └── <python major.minor> -> 3.9
+│ ├── img-<version>
+│ │ └── image
+│ │ └── <install class> -> CopyPipInstall / SymlinkPipInstall
+│ │ └── <wheel name> -> pip-20.1.1-py2.py3-none-any
+│ └── embed
+│ └── 1
+│ └── *.json -> for every distribution contains data about newer embed versions and releases
+└─── unzip <in zip app we cannot refer to some internal files, so first extract them>
+ └── <virtualenv version>
+ ├── py_info.py
+ ├── debug.py
+ └── _virtualenv.py
+"""
+from __future__ import absolute_import, unicode_literals
+
+import json
+import logging
+from abc import ABCMeta
+from contextlib import contextmanager
+from hashlib import sha256
+
+import six
+
+from virtualenv.util.lock import ReentrantFileLock
+from virtualenv.util.path import safe_delete
+from virtualenv.util.six import ensure_text
+from virtualenv.util.zipapp import extract
+from virtualenv.version import __version__
+
+from .base import AppData, ContentStore
+
+
+class AppDataDiskFolder(AppData):
+ """
+ Store the application data on the disk within a folder layout.
+ """
+
+ transient = False
+ can_update = True
+
+ def __init__(self, folder):
+ self.lock = ReentrantFileLock(folder)
+
+ def __repr__(self):
+ return "{}({})".format(type(self).__name__, self.lock.path)
+
+ def __str__(self):
+ return str(self.lock.path)
+
+ def reset(self):
+ logging.debug("reset app data folder %s", self.lock.path)
+ safe_delete(self.lock.path)
+
+ def close(self):
+ """do nothing"""
+
+ @contextmanager
+ def locked(self, path):
+ path_lock = self.lock / path
+ with path_lock:
+ yield path_lock.path
+
+ @contextmanager
+ def extract(self, path, to_folder):
+ if to_folder is not None:
+ root = ReentrantFileLock(to_folder())
+ else:
+ root = self.lock / "unzip" / __version__
+ with root.lock_for_key(path.name):
+ dest = root.path / path.name
+ if not dest.exists():
+ extract(path, dest)
+ yield dest
+
+ @property
+ def py_info_at(self):
+ return self.lock / "py_info" / "1"
+
+ def py_info(self, path):
+ return PyInfoStoreDisk(self.py_info_at, path)
+
+ def py_info_clear(self):
+ """"""
+ py_info_folder = self.py_info_at
+ with py_info_folder:
+ for filename in py_info_folder.path.iterdir():
+ if filename.suffix == ".json":
+ with py_info_folder.lock_for_key(filename.stem):
+ if filename.exists():
+ filename.unlink()
+
+ def embed_update_log(self, distribution, for_py_version):
+ return EmbedDistributionUpdateStoreDisk(self.lock / "wheel" / for_py_version / "embed" / "1", distribution)
+
+ @property
+ def house(self):
+ path = self.lock.path / "wheel" / "house"
+ path.mkdir(parents=True, exist_ok=True)
+ return path
+
+ def wheel_image(self, for_py_version, name):
+ return self.lock.path / "wheel" / for_py_version / "image" / "1" / name
+
+
+@six.add_metaclass(ABCMeta)
+class JSONStoreDisk(ContentStore):
+ def __init__(self, in_folder, key, msg, msg_args):
+ self.in_folder = in_folder
+ self.key = key
+ self.msg = msg
+ self.msg_args = msg_args + (self.file,)
+
+ @property
+ def file(self):
+ return self.in_folder.path / "{}.json".format(self.key)
+
+ def exists(self):
+ return self.file.exists()
+
+ def read(self):
+ data, bad_format = None, False
+ try:
+ data = json.loads(self.file.read_text())
+ logging.debug("got {} from %s".format(self.msg), *self.msg_args)
+ return data
+ except ValueError:
+ bad_format = True
+ except Exception: # noqa
+ pass
+ if bad_format:
+ try:
+ self.remove()
+ except OSError: # reading and writing on the same file may cause race on multiple processes
+ pass
+ return None
+
+ def remove(self):
+ self.file.unlink()
+ logging.debug("removed {} at %s".format(self.msg), *self.msg_args)
+
+ @contextmanager
+ def locked(self):
+ with self.in_folder.lock_for_key(self.key):
+ yield
+
+ def write(self, content):
+ folder = self.file.parent
+ folder.mkdir(parents=True, exist_ok=True)
+ self.file.write_text(ensure_text(json.dumps(content, sort_keys=True, indent=2)))
+ logging.debug("wrote {} at %s".format(self.msg), *self.msg_args)
+
+
+class PyInfoStoreDisk(JSONStoreDisk):
+ def __init__(self, in_folder, path):
+ key = sha256(str(path).encode("utf-8") if six.PY3 else str(path)).hexdigest()
+ super(PyInfoStoreDisk, self).__init__(in_folder, key, "python info of %s", (path,))
+
+
+class EmbedDistributionUpdateStoreDisk(JSONStoreDisk):
+ def __init__(self, in_folder, distribution):
+ super(EmbedDistributionUpdateStoreDisk, self).__init__(
+ in_folder,
+ distribution,
+ "embed update of distribution %s",
+ (distribution,),
+ )
diff --git a/third_party/python/virtualenv/virtualenv/app_data/via_tempdir.py b/third_party/python/virtualenv/virtualenv/app_data/via_tempdir.py
new file mode 100644
index 0000000000..112a3fe6bc
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/app_data/via_tempdir.py
@@ -0,0 +1,27 @@
+from __future__ import absolute_import, unicode_literals
+
+import logging
+from tempfile import mkdtemp
+
+from virtualenv.util.path import safe_delete
+
+from .via_disk_folder import AppDataDiskFolder
+
+
+class TempAppData(AppDataDiskFolder):
+ transient = True
+ can_update = False
+
+ def __init__(self):
+ super(TempAppData, self).__init__(folder=mkdtemp())
+ logging.debug("created temporary app data folder %s", self.lock.path)
+
+ def reset(self):
+ """this is a temporary folder, is already empty to start with"""
+
+ def close(self):
+ logging.debug("remove temporary app data folder %s", self.lock.path)
+ safe_delete(self.lock.path)
+
+ def embed_update_log(self, distribution, for_py_version):
+ raise NotImplementedError
diff --git a/third_party/python/virtualenv/virtualenv/config/__init__.py b/third_party/python/virtualenv/virtualenv/config/__init__.py
new file mode 100644
index 0000000000..01e6d4f49d
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/config/__init__.py
@@ -0,0 +1 @@
+from __future__ import absolute_import, unicode_literals
diff --git a/third_party/python/virtualenv/virtualenv/config/cli/__init__.py b/third_party/python/virtualenv/virtualenv/config/cli/__init__.py
new file mode 100644
index 0000000000..01e6d4f49d
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/config/cli/__init__.py
@@ -0,0 +1 @@
+from __future__ import absolute_import, unicode_literals
diff --git a/third_party/python/virtualenv/virtualenv/config/cli/parser.py b/third_party/python/virtualenv/virtualenv/config/cli/parser.py
new file mode 100644
index 0000000000..eb4db30a70
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/config/cli/parser.py
@@ -0,0 +1,120 @@
+from __future__ import absolute_import, unicode_literals
+
+from argparse import SUPPRESS, ArgumentDefaultsHelpFormatter, ArgumentParser, Namespace
+from collections import OrderedDict
+
+from virtualenv.config.convert import get_type
+
+from ..env_var import get_env_var
+from ..ini import IniConfig
+
+
+class VirtualEnvOptions(Namespace):
+ def __init__(self, **kwargs):
+ super(VirtualEnvOptions, self).__init__(**kwargs)
+ self._src = None
+ self._sources = {}
+
+ def set_src(self, key, value, src):
+ setattr(self, key, value)
+ if src.startswith("env var"):
+ src = "env var"
+ self._sources[key] = src
+
+ def __setattr__(self, key, value):
+ if getattr(self, "_src", None) is not None:
+ self._sources[key] = self._src
+ super(VirtualEnvOptions, self).__setattr__(key, value)
+
+ def get_source(self, key):
+ return self._sources.get(key)
+
+ @property
+ def verbosity(self):
+ if not hasattr(self, "verbose") and not hasattr(self, "quiet"):
+ return None
+ return max(self.verbose - self.quiet, 0)
+
+ def __repr__(self):
+ return "{}({})".format(
+ type(self).__name__,
+ ", ".join("{}={}".format(k, v) for k, v in vars(self).items() if not k.startswith("_")),
+ )
+
+
+class VirtualEnvConfigParser(ArgumentParser):
+ """
+ Custom option parser which updates its defaults by checking the configuration files and environmental variables
+ """
+
+ def __init__(self, options=None, *args, **kwargs):
+ self.file_config = IniConfig()
+ self.epilog_list = []
+ kwargs["epilog"] = self.file_config.epilog
+ kwargs["add_help"] = False
+ kwargs["formatter_class"] = HelpFormatter
+ kwargs["prog"] = "virtualenv"
+ super(VirtualEnvConfigParser, self).__init__(*args, **kwargs)
+ self._fixed = set()
+ if options is not None and not isinstance(options, VirtualEnvOptions):
+ raise TypeError("options must be of type VirtualEnvOptions")
+ self.options = VirtualEnvOptions() if options is None else options
+ self._interpreter = None
+ self._app_data = None
+
+ def _fix_defaults(self):
+ for action in self._actions:
+ action_id = id(action)
+ if action_id not in self._fixed:
+ self._fix_default(action)
+ self._fixed.add(action_id)
+
+ def _fix_default(self, action):
+ if hasattr(action, "default") and hasattr(action, "dest") and action.default != SUPPRESS:
+ as_type = get_type(action)
+ names = OrderedDict((i.lstrip("-").replace("-", "_"), None) for i in action.option_strings)
+ outcome = None
+ for name in names:
+ outcome = get_env_var(name, as_type)
+ if outcome is not None:
+ break
+ if outcome is None and self.file_config:
+ for name in names:
+ outcome = self.file_config.get(name, as_type)
+ if outcome is not None:
+ break
+ if outcome is not None:
+ action.default, action.default_source = outcome
+ else:
+ outcome = action.default, "default"
+ self.options.set_src(action.dest, *outcome)
+
+ def enable_help(self):
+ self._fix_defaults()
+ self.add_argument("-h", "--help", action="help", default=SUPPRESS, help="show this help message and exit")
+
+ def parse_known_args(self, args=None, namespace=None):
+ if namespace is None:
+ namespace = self.options
+ elif namespace is not self.options:
+ raise ValueError("can only pass in parser.options")
+ self._fix_defaults()
+ self.options._src = "cli"
+ try:
+ return super(VirtualEnvConfigParser, self).parse_known_args(args, namespace=namespace)
+ finally:
+ self.options._src = None
+
+
+class HelpFormatter(ArgumentDefaultsHelpFormatter):
+ def __init__(self, prog):
+ super(HelpFormatter, self).__init__(prog, max_help_position=32, width=240)
+
+ def _get_help_string(self, action):
+ # noinspection PyProtectedMember
+ text = super(HelpFormatter, self)._get_help_string(action)
+ if hasattr(action, "default_source"):
+ default = " (default: %(default)s)"
+ if text.endswith(default):
+ text = "{} (default: %(default)s -> from %(default_source)s)".format(text[: -len(default)])
+ return text
diff --git a/third_party/python/virtualenv/virtualenv/config/convert.py b/third_party/python/virtualenv/virtualenv/config/convert.py
new file mode 100644
index 0000000000..562720a57e
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/config/convert.py
@@ -0,0 +1,98 @@
+from __future__ import absolute_import, unicode_literals
+
+import logging
+import os
+
+
+class TypeData(object):
+ def __init__(self, default_type, as_type):
+ self.default_type = default_type
+ self.as_type = as_type
+
+ def __repr__(self):
+ return "{}(base={}, as={})".format(self.__class__.__name__, self.default_type, self.as_type)
+
+ def convert(self, value):
+ return self.default_type(value)
+
+
+class BoolType(TypeData):
+ BOOLEAN_STATES = {
+ "1": True,
+ "yes": True,
+ "true": True,
+ "on": True,
+ "0": False,
+ "no": False,
+ "false": False,
+ "off": False,
+ }
+
+ def convert(self, value):
+ if value.lower() not in self.BOOLEAN_STATES:
+ raise ValueError("Not a boolean: %s" % value)
+ return self.BOOLEAN_STATES[value.lower()]
+
+
+class NoneType(TypeData):
+ def convert(self, value):
+ if not value:
+ return None
+ return str(value)
+
+
+class ListType(TypeData):
+ def _validate(self):
+ """"""
+
+ def convert(self, value, flatten=True):
+ values = self.split_values(value)
+ result = []
+ for value in values:
+ sub_values = value.split(os.pathsep)
+ result.extend(sub_values)
+ converted = [self.as_type(i) for i in result]
+ return converted
+
+ def split_values(self, value):
+ """Split the provided value into a list.
+
+ First this is done by newlines. If there were no newlines in the text,
+ then we next try to split by comma.
+ """
+ if isinstance(value, (str, bytes)):
+ # Use `splitlines` rather than a custom check for whether there is
+ # more than one line. This ensures that the full `splitlines()`
+ # logic is supported here.
+ values = value.splitlines()
+ if len(values) <= 1:
+ values = value.split(",")
+ values = filter(None, [x.strip() for x in values])
+ else:
+ values = list(value)
+
+ return values
+
+
+def convert(value, as_type, source):
+ """Convert the value as a given type where the value comes from the given source"""
+ try:
+ return as_type.convert(value)
+ except Exception as exception:
+ logging.warning("%s failed to convert %r as %r because %r", source, value, as_type, exception)
+ raise
+
+
+_CONVERT = {bool: BoolType, type(None): NoneType, list: ListType}
+
+
+def get_type(action):
+ default_type = type(action.default)
+ as_type = default_type if action.type is None else action.type
+ return _CONVERT.get(default_type, TypeData)(default_type, as_type)
+
+
+__all__ = (
+ "convert",
+ "get_type",
+)
diff --git a/third_party/python/virtualenv/virtualenv/config/env_var.py b/third_party/python/virtualenv/virtualenv/config/env_var.py
new file mode 100644
index 0000000000..259399a705
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/config/env_var.py
@@ -0,0 +1,29 @@
+from __future__ import absolute_import, unicode_literals
+
+import os
+
+from virtualenv.util.six import ensure_str, ensure_text
+
+from .convert import convert
+
+
+def get_env_var(key, as_type):
+ """Get the environment variable option.
+
+ :param key: the config key requested
+ :param as_type: the type we would like to convert it to
+ :return:
+ """
+ environ_key = ensure_str("VIRTUALENV_{}".format(key.upper()))
+ if os.environ.get(environ_key):
+ value = os.environ[environ_key]
+ # noinspection PyBroadException
+ try:
+ source = "env var {}".format(ensure_text(environ_key))
+ as_type = convert(value, as_type, source)
+ return as_type, source
+ except Exception: # note the converter already logs a warning when failures happen
+ pass
+
+
+__all__ = ("get_env_var",)
diff --git a/third_party/python/virtualenv/virtualenv/config/ini.py b/third_party/python/virtualenv/virtualenv/config/ini.py
new file mode 100644
index 0000000000..4dec629a97
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/config/ini.py
@@ -0,0 +1,83 @@
+from __future__ import absolute_import, unicode_literals
+
+import logging
+import os
+
+from appdirs import user_config_dir
+
+from virtualenv.info import PY3
+from virtualenv.util import ConfigParser
+from virtualenv.util.path import Path
+from virtualenv.util.six import ensure_str
+
+from .convert import convert
+
+
+class IniConfig(object):
+ VIRTUALENV_CONFIG_FILE_ENV_VAR = ensure_str("VIRTUALENV_CONFIG_FILE")
+ STATE = {None: "failed to parse", True: "active", False: "missing"}
+
+ section = "virtualenv"
+
+ def __init__(self):
+ config_file = os.environ.get(self.VIRTUALENV_CONFIG_FILE_ENV_VAR, None)
+ self.is_env_var = config_file is not None
+ config_file = (
+ Path(config_file)
+ if config_file is not None
+ else Path(user_config_dir(appname="virtualenv", appauthor="pypa")) / "virtualenv.ini"
+ )
+ self.config_file = config_file
+ self._cache = {}
+
+ exception = None
+ self.has_config_file = None
+ try:
+ self.has_config_file = self.config_file.exists()
+ except OSError as exc:
+ exception = exc
+ else:
+ if self.has_config_file:
+ self.config_file = self.config_file.resolve()
+ self.config_parser = ConfigParser.ConfigParser()
+ try:
+ self._load()
+ self.has_virtualenv_section = self.config_parser.has_section(self.section)
+ except Exception as exc:
+ exception = exc
+ if exception is not None:
+ logging.error("failed to read config file %s because %r", config_file, exception)
+
+ def _load(self):
+ with self.config_file.open("rt") as file_handler:
+ reader = getattr(self.config_parser, "read_file" if PY3 else "readfp")
+ reader(file_handler)
+
+ def get(self, key, as_type):
+ cache_key = key, as_type
+ if cache_key in self._cache:
+ return self._cache[cache_key]
+ # noinspection PyBroadException
+ try:
+ source = "file"
+ raw_value = self.config_parser.get(self.section, key.lower())
+ value = convert(raw_value, as_type, source)
+ result = value, source
+ except Exception:
+ result = None
+ self._cache[cache_key] = result
+ return result
+
+ def __bool__(self):
+ return bool(self.has_config_file) and bool(self.has_virtualenv_section)
+
+ @property
+ def epilog(self):
+ msg = "{}config file {} {} (change{} via env var {})"
+ return msg.format(
+ "\n",
+ self.config_file,
+ self.STATE[self.has_config_file],
+ "d" if self.is_env_var else "",
+ self.VIRTUALENV_CONFIG_FILE_ENV_VAR,
+ )
diff --git a/third_party/python/virtualenv/virtualenv/create/__init__.py b/third_party/python/virtualenv/virtualenv/create/__init__.py
new file mode 100644
index 0000000000..01e6d4f49d
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/__init__.py
@@ -0,0 +1 @@
+from __future__ import absolute_import, unicode_literals
diff --git a/third_party/python/virtualenv/virtualenv/create/creator.py b/third_party/python/virtualenv/virtualenv/create/creator.py
new file mode 100644
index 0000000000..1b4ea69f66
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/creator.py
@@ -0,0 +1,238 @@
+from __future__ import absolute_import, print_function, unicode_literals
+
+import json
+import logging
+import os
+import sys
+from abc import ABCMeta, abstractmethod
+from argparse import ArgumentTypeError
+from ast import literal_eval
+from collections import OrderedDict
+from textwrap import dedent
+
+from six import add_metaclass
+
+from virtualenv.discovery.cached_py_info import LogCmd
+from virtualenv.info import WIN_CPYTHON_2
+from virtualenv.util.path import Path, safe_delete
+from virtualenv.util.six import ensure_str, ensure_text
+from virtualenv.util.subprocess import run_cmd
+from virtualenv.version import __version__
+
+from .pyenv_cfg import PyEnvCfg
+
+HERE = Path(os.path.abspath(__file__)).parent
+DEBUG_SCRIPT = HERE / "debug.py"
+
+
+class CreatorMeta(object):
+ def __init__(self):
+ self.error = None
+
+
+@add_metaclass(ABCMeta)
+class Creator(object):
+ """A class that given a python Interpreter creates a virtual environment"""
+
+ def __init__(self, options, interpreter):
+ """Construct a new virtual environment creator.
+
+ :param options: the CLI option as parsed from :meth:`add_parser_arguments`
+ :param interpreter: the interpreter to create virtual environment from
+ """
+ self.interpreter = interpreter
+ self._debug = None
+ self.dest = Path(options.dest)
+ self.clear = options.clear
+ self.no_vcs_ignore = options.no_vcs_ignore
+ self.pyenv_cfg = PyEnvCfg.from_folder(self.dest)
+ self.app_data = options.app_data
+
+ def __repr__(self):
+ return ensure_str(self.__unicode__())
+
+ def __unicode__(self):
+ return "{}({})".format(self.__class__.__name__, ", ".join("{}={}".format(k, v) for k, v in self._args()))
+
+ def _args(self):
+ return [
+ ("dest", ensure_text(str(self.dest))),
+ ("clear", self.clear),
+ ("no_vcs_ignore", self.no_vcs_ignore),
+ ]
+
+ @classmethod
+ def can_create(cls, interpreter):
+ """Determine if we can create a virtual environment.
+
+ :param interpreter: the interpreter in question
+ :return: ``None`` if we can't create, any other object otherwise that will be forwarded to \
+ :meth:`add_parser_arguments`
+ """
+ return True
+
+ @classmethod
+ def add_parser_arguments(cls, parser, interpreter, meta, app_data):
+ """Add CLI arguments for the creator.
+
+ :param parser: the CLI parser
+ :param app_data: the application data folder
+ :param interpreter: the interpreter we're asked to create virtual environment for
+ :param meta: value as returned by :meth:`can_create`
+ """
+ parser.add_argument(
+ "dest",
+ help="directory to create virtualenv at",
+ type=cls.validate_dest,
+ )
+ parser.add_argument(
+ "--clear",
+ dest="clear",
+ action="store_true",
+ help="remove the destination directory if exist before starting (will overwrite files otherwise)",
+ default=False,
+ )
+ parser.add_argument(
+ "--no-vcs-ignore",
+ dest="no_vcs_ignore",
+ action="store_true",
+ help="don't create VCS ignore directive in the destination directory",
+ default=False,
+ )
+
+ @abstractmethod
+ def create(self):
+ """Perform the virtual environment creation."""
+ raise NotImplementedError
+
+ @classmethod
+ def validate_dest(cls, raw_value):
+ """No path separator in the path, valid chars and must be write-able"""
+
+ def non_write_able(dest, value):
+ common = Path(*os.path.commonprefix([value.parts, dest.parts]))
+ raise ArgumentTypeError(
+ "the destination {} is not write-able at {}".format(dest.relative_to(common), common),
+ )
+
+ # the file system must be able to encode
+ # note in newer CPython this is always utf-8 https://www.python.org/dev/peps/pep-0529/
+ encoding = sys.getfilesystemencoding()
+ refused = OrderedDict()
+ kwargs = {"errors": "ignore"} if encoding != "mbcs" else {}
+ for char in ensure_text(raw_value):
+ try:
+ trip = char.encode(encoding, **kwargs).decode(encoding)
+ if trip == char:
+ continue
+ raise ValueError(trip)
+ except ValueError:
+ refused[char] = None
+ if refused:
+ raise ArgumentTypeError(
+ "the file system codec ({}) cannot handle characters {!r} within {!r}".format(
+ encoding,
+ "".join(refused.keys()),
+ raw_value,
+ ),
+ )
+ if os.pathsep in raw_value:
+ raise ArgumentTypeError(
+ "destination {!r} must not contain the path separator ({}) as this would break "
+ "the activation scripts".format(raw_value, os.pathsep),
+ )
+
+ value = Path(raw_value)
+ if value.exists() and value.is_file():
+ raise ArgumentTypeError("the destination {} already exists and is a file".format(value))
+ if (3, 3) <= sys.version_info <= (3, 6):
+ # pre 3.6 resolve is always strict, aka must exists, sidestep by using os.path operation
+ dest = Path(os.path.realpath(raw_value))
+ else:
+ dest = Path(os.path.abspath(str(value))).resolve() # on Windows absolute does not imply resolve so use both
+ value = dest
+ while dest:
+ if dest.exists():
+ if os.access(ensure_text(str(dest)), os.W_OK):
+ break
+ else:
+ non_write_able(dest, value)
+ base, _ = dest.parent, dest.name
+ if base == dest:
+ non_write_able(dest, value) # pragma: no cover
+ dest = base
+ return str(value)
+
+ def run(self):
+ if self.dest.exists() and self.clear:
+ logging.debug("delete %s", self.dest)
+ safe_delete(self.dest)
+ self.create()
+ self.set_pyenv_cfg()
+ if not self.no_vcs_ignore:
+ self.setup_ignore_vcs()
+
+ def set_pyenv_cfg(self):
+ self.pyenv_cfg.content = OrderedDict()
+ self.pyenv_cfg["home"] = self.interpreter.system_exec_prefix
+ self.pyenv_cfg["implementation"] = self.interpreter.implementation
+ self.pyenv_cfg["version_info"] = ".".join(str(i) for i in self.interpreter.version_info)
+ self.pyenv_cfg["virtualenv"] = __version__
+
+ def setup_ignore_vcs(self):
+ """Generate ignore instructions for version control systems."""
+ # mark this folder to be ignored by VCS, handle https://www.python.org/dev/peps/pep-0610/#registered-vcs
+ git_ignore = self.dest / ".gitignore"
+ if not git_ignore.exists():
+ git_ignore.write_text(
+ dedent(
+ """
+ # created by virtualenv automatically
+ *
+ """,
+ ).lstrip(),
+ )
+ # Mercurial - does not support the .hgignore file inside a subdirectory directly, but only if included via the
+ # subinclude directive from root, at which point on might as well ignore the directory itself, see
+ # https://www.selenic.com/mercurial/hgignore.5.html for more details
+ # Bazaar - does not support ignore files in sub-directories, only at root level via .bzrignore
+ # Subversion - does not support ignore files, requires direct manipulation with the svn tool
+
+ @property
+ def debug(self):
+ """
+ :return: debug information about the virtual environment (only valid after :meth:`create` has run)
+ """
+ if self._debug is None and self.exe is not None:
+ self._debug = get_env_debug_info(self.exe, self.debug_script(), self.app_data)
+ return self._debug
+
+ # noinspection PyMethodMayBeStatic
+ def debug_script(self):
+ return DEBUG_SCRIPT
+
+
+def get_env_debug_info(env_exe, debug_script, app_data):
+ env = os.environ.copy()
+ env.pop(str("PYTHONPATH"), None)
+
+ with app_data.ensure_extracted(debug_script) as debug_script:
+ cmd = [str(env_exe), str(debug_script)]
+ if WIN_CPYTHON_2:
+ cmd = [ensure_text(i) for i in cmd]
+ logging.debug(str("debug via %r"), LogCmd(cmd))
+ code, out, err = run_cmd(cmd)
+
+ # noinspection PyBroadException
+ try:
+ if code != 0:
+ result = literal_eval(out)
+ else:
+ result = json.loads(out)
+ if err:
+ result["err"] = err
+ except Exception as exception:
+ return {"out": out, "err": err, "returncode": code, "exception": repr(exception)}
+ if "sys" in result and "path" in result["sys"]:
+ del result["sys"]["path"][0]
+ return result
diff --git a/third_party/python/virtualenv/virtualenv/create/debug.py b/third_party/python/virtualenv/virtualenv/create/debug.py
new file mode 100644
index 0000000000..0cdaa49412
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/debug.py
@@ -0,0 +1,110 @@
+"""Inspect a target Python interpreter virtual environment wise"""
+import sys # built-in
+
+PYPY2_WIN = hasattr(sys, "pypy_version_info") and sys.platform != "win32" and sys.version_info[0] == 2
+
+
+def encode_path(value):
+ if value is None:
+ return None
+ if not isinstance(value, (str, bytes)):
+ if isinstance(value, type):
+ value = repr(value)
+ else:
+ value = repr(type(value))
+ if isinstance(value, bytes) and not PYPY2_WIN:
+ value = value.decode(sys.getfilesystemencoding())
+ return value
+
+
+def encode_list_path(value):
+ return [encode_path(i) for i in value]
+
+
+def run():
+ """print debug data about the virtual environment"""
+ try:
+ from collections import OrderedDict
+ except ImportError: # pragma: no cover
+ # this is possible if the standard library cannot be accessed
+ # noinspection PyPep8Naming
+ OrderedDict = dict # pragma: no cover
+ result = OrderedDict([("sys", OrderedDict())])
+ path_keys = (
+ "executable",
+ "_base_executable",
+ "prefix",
+ "base_prefix",
+ "real_prefix",
+ "exec_prefix",
+ "base_exec_prefix",
+ "path",
+ "meta_path",
+ )
+ for key in path_keys:
+ value = getattr(sys, key, None)
+ if isinstance(value, list):
+ value = encode_list_path(value)
+ else:
+ value = encode_path(value)
+ result["sys"][key] = value
+ result["sys"]["fs_encoding"] = sys.getfilesystemencoding()
+ result["sys"]["io_encoding"] = getattr(sys.stdout, "encoding", None)
+ result["version"] = sys.version
+
+ try:
+ import sysconfig
+
+ # https://bugs.python.org/issue22199
+ makefile = getattr(sysconfig, "get_makefile_filename", getattr(sysconfig, "_get_makefile_filename", None))
+ result["makefile_filename"] = encode_path(makefile())
+ except ImportError:
+ pass
+
+ import os # landmark
+
+ result["os"] = repr(os)
+
+ try:
+ # noinspection PyUnresolvedReferences
+ import site # site
+
+ result["site"] = repr(site)
+ except ImportError as exception: # pragma: no cover
+ result["site"] = repr(exception) # pragma: no cover
+
+ try:
+ # noinspection PyUnresolvedReferences
+ import datetime # site
+
+ result["datetime"] = repr(datetime)
+ except ImportError as exception: # pragma: no cover
+ result["datetime"] = repr(exception) # pragma: no cover
+
+ try:
+ # noinspection PyUnresolvedReferences
+ import math # site
+
+ result["math"] = repr(math)
+ except ImportError as exception: # pragma: no cover
+ result["math"] = repr(exception) # pragma: no cover
+
+ # try to print out, this will validate if other core modules are available (json in this case)
+ try:
+ import json
+
+ result["json"] = repr(json)
+ except ImportError as exception:
+ result["json"] = repr(exception)
+ else:
+ try:
+ content = json.dumps(result, indent=2)
+ sys.stdout.write(content)
+ except (ValueError, TypeError) as exception: # pragma: no cover
+ sys.stderr.write(repr(exception))
+ sys.stdout.write(repr(result)) # pragma: no cover
+ raise SystemExit(1) # pragma: no cover
+
+
+if __name__ == "__main__":
+ run()
diff --git a/third_party/python/virtualenv/virtualenv/create/describe.py b/third_party/python/virtualenv/virtualenv/create/describe.py
new file mode 100644
index 0000000000..1e59aaeae0
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/describe.py
@@ -0,0 +1,117 @@
+from __future__ import absolute_import, print_function, unicode_literals
+
+from abc import ABCMeta
+from collections import OrderedDict
+
+from six import add_metaclass
+
+from virtualenv.info import IS_WIN
+from virtualenv.util.path import Path
+from virtualenv.util.six import ensure_text
+
+
+@add_metaclass(ABCMeta)
+class Describe(object):
+ """Given a host interpreter tell us information about what the created interpreter might look like"""
+
+ suffix = ".exe" if IS_WIN else ""
+
+ def __init__(self, dest, interpreter):
+ self.interpreter = interpreter
+ self.dest = dest
+ self._stdlib = None
+ self._stdlib_platform = None
+ self._system_stdlib = None
+ self._conf_vars = None
+
+ @property
+ def bin_dir(self):
+ return self.script_dir
+
+ @property
+ def script_dir(self):
+ return self.dest / Path(self.interpreter.distutils_install["scripts"])
+
+ @property
+ def purelib(self):
+ return self.dest / self.interpreter.distutils_install["purelib"]
+
+ @property
+ def platlib(self):
+ return self.dest / self.interpreter.distutils_install["platlib"]
+
+ @property
+ def libs(self):
+ return list(OrderedDict(((self.platlib, None), (self.purelib, None))).keys())
+
+ @property
+ def stdlib(self):
+ if self._stdlib is None:
+ self._stdlib = Path(self.interpreter.sysconfig_path("stdlib", config_var=self._config_vars))
+ return self._stdlib
+
+ @property
+ def stdlib_platform(self):
+ if self._stdlib_platform is None:
+ self._stdlib_platform = Path(self.interpreter.sysconfig_path("platstdlib", config_var=self._config_vars))
+ return self._stdlib_platform
+
+ @property
+ def _config_vars(self):
+ if self._conf_vars is None:
+ self._conf_vars = self._calc_config_vars(ensure_text(str(self.dest)))
+ return self._conf_vars
+
+ def _calc_config_vars(self, to):
+ return {
+ k: (to if v.startswith(self.interpreter.prefix) else v) for k, v in self.interpreter.sysconfig_vars.items()
+ }
+
+ @classmethod
+ def can_describe(cls, interpreter):
+ """Knows means it knows how the output will look"""
+ return True
+
+ @property
+ def env_name(self):
+ return ensure_text(self.dest.parts[-1])
+
+ @property
+ def exe(self):
+ return self.bin_dir / "{}{}".format(self.exe_stem(), self.suffix)
+
+ @classmethod
+ def exe_stem(cls):
+ """executable name without suffix - there seems to be no standard way to get this without creating it"""
+ raise NotImplementedError
+
+ def script(self, name):
+ return self.script_dir / "{}{}".format(name, self.suffix)
+
+
+@add_metaclass(ABCMeta)
+class Python2Supports(Describe):
+ @classmethod
+ def can_describe(cls, interpreter):
+ return interpreter.version_info.major == 2 and super(Python2Supports, cls).can_describe(interpreter)
+
+
+@add_metaclass(ABCMeta)
+class Python3Supports(Describe):
+ @classmethod
+ def can_describe(cls, interpreter):
+ return interpreter.version_info.major == 3 and super(Python3Supports, cls).can_describe(interpreter)
+
+
+@add_metaclass(ABCMeta)
+class PosixSupports(Describe):
+ @classmethod
+ def can_describe(cls, interpreter):
+ return interpreter.os == "posix" and super(PosixSupports, cls).can_describe(interpreter)
+
+
+@add_metaclass(ABCMeta)
+class WindowsSupports(Describe):
+ @classmethod
+ def can_describe(cls, interpreter):
+ return interpreter.os == "nt" and super(WindowsSupports, cls).can_describe(interpreter)
diff --git a/third_party/python/virtualenv/virtualenv/create/pyenv_cfg.py b/third_party/python/virtualenv/virtualenv/create/pyenv_cfg.py
new file mode 100644
index 0000000000..1a8d824401
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/pyenv_cfg.py
@@ -0,0 +1,61 @@
+from __future__ import absolute_import, unicode_literals
+
+import logging
+from collections import OrderedDict
+
+from virtualenv.util.six import ensure_text
+
+
+class PyEnvCfg(object):
+ def __init__(self, content, path):
+ self.content = content
+ self.path = path
+
+ @classmethod
+ def from_folder(cls, folder):
+ return cls.from_file(folder / "pyvenv.cfg")
+
+ @classmethod
+ def from_file(cls, path):
+ content = cls._read_values(path) if path.exists() else OrderedDict()
+ return PyEnvCfg(content, path)
+
+ @staticmethod
+ def _read_values(path):
+ content = OrderedDict()
+ for line in path.read_text(encoding="utf-8").splitlines():
+ equals_at = line.index("=")
+ key = line[:equals_at].strip()
+ value = line[equals_at + 1 :].strip()
+ content[key] = value
+ return content
+
+ def write(self):
+ logging.debug("write %s", ensure_text(str(self.path)))
+ text = ""
+ for key, value in self.content.items():
+ line = "{} = {}".format(key, value)
+ logging.debug("\t%s", line)
+ text += line
+ text += "\n"
+ self.path.write_text(text, encoding="utf-8")
+
+ def refresh(self):
+ self.content = self._read_values(self.path)
+ return self.content
+
+ def __setitem__(self, key, value):
+ self.content[key] = value
+
+ def __getitem__(self, key):
+ return self.content[key]
+
+ def __contains__(self, item):
+ return item in self.content
+
+ def update(self, other):
+ self.content.update(other)
+ return self
+
+ def __repr__(self):
+ return "{}(path={})".format(self.__class__.__name__, self.path)
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/__init__.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/__init__.py
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/_virtualenv.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/_virtualenv.py
new file mode 100644
index 0000000000..da98b827a2
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/_virtualenv.py
@@ -0,0 +1,130 @@
+"""Patches that are applied at runtime to the virtual environment"""
+# -*- coding: utf-8 -*-
+
+import os
+import sys
+
+VIRTUALENV_PATCH_FILE = os.path.join(__file__)
+
+
+def patch_dist(dist):
+ """
+ Distutils allows user to configure some arguments via a configuration file:
+ https://docs.python.org/3/install/index.html#distutils-configuration-files
+
+ Some of this arguments though don't make sense in context of the virtual environment files, let's fix them up.
+ """
+ # we cannot allow some install config as that would get packages installed outside of the virtual environment
+ old_parse_config_files = dist.Distribution.parse_config_files
+
+ def parse_config_files(self, *args, **kwargs):
+ result = old_parse_config_files(self, *args, **kwargs)
+ install = self.get_option_dict("install")
+
+ if "prefix" in install: # the prefix governs where to install the libraries
+ install["prefix"] = VIRTUALENV_PATCH_FILE, os.path.abspath(sys.prefix)
+ for base in ("purelib", "platlib", "headers", "scripts", "data"):
+ key = "install_{}".format(base)
+ if key in install: # do not allow global configs to hijack venv paths
+ install.pop(key, None)
+ return result
+
+ dist.Distribution.parse_config_files = parse_config_files
+
+
+# Import hook that patches some modules to ignore configuration values that break package installation in case
+# of virtual environments.
+_DISTUTILS_PATCH = "distutils.dist", "setuptools.dist"
+if sys.version_info > (3, 4):
+ # https://docs.python.org/3/library/importlib.html#setting-up-an-importer
+ from functools import partial
+ from importlib.abc import MetaPathFinder
+ from importlib.util import find_spec
+
+ class _Finder(MetaPathFinder):
+ """A meta path finder that allows patching the imported distutils modules"""
+
+ fullname = None
+
+ # lock[0] is threading.Lock(), but initialized lazily to avoid importing threading very early at startup,
+ # because there are gevent-based applications that need to be first to import threading by themselves.
+ # See https://github.com/pypa/virtualenv/issues/1895 for details.
+ lock = []
+
+ def find_spec(self, fullname, path, target=None):
+ if fullname in _DISTUTILS_PATCH and self.fullname is None:
+ # initialize lock[0] lazily
+ if len(self.lock) == 0:
+ import threading
+
+ lock = threading.Lock()
+ # there is possibility that two threads T1 and T2 are simultaneously running into find_spec,
+ # observing .lock as empty, and further going into hereby initialization. However due to the GIL,
+ # list.append() operation is atomic and this way only one of the threads will "win" to put the lock
+ # - that every thread will use - into .lock[0].
+ # https://docs.python.org/3/faq/library.html#what-kinds-of-global-value-mutation-are-thread-safe
+ self.lock.append(lock)
+
+ with self.lock[0]:
+ self.fullname = fullname
+ try:
+ spec = find_spec(fullname, path)
+ if spec is not None:
+ # https://www.python.org/dev/peps/pep-0451/#how-loading-will-work
+ is_new_api = hasattr(spec.loader, "exec_module")
+ func_name = "exec_module" if is_new_api else "load_module"
+ old = getattr(spec.loader, func_name)
+ func = self.exec_module if is_new_api else self.load_module
+ if old is not func:
+ try:
+ setattr(spec.loader, func_name, partial(func, old))
+ except AttributeError:
+ pass # C-Extension loaders are r/o such as zipimporter with <python 3.7
+ return spec
+ finally:
+ self.fullname = None
+
+ @staticmethod
+ def exec_module(old, module):
+ old(module)
+ if module.__name__ in _DISTUTILS_PATCH:
+ patch_dist(module)
+
+ @staticmethod
+ def load_module(old, name):
+ module = old(name)
+ if module.__name__ in _DISTUTILS_PATCH:
+ patch_dist(module)
+ return module
+
+ sys.meta_path.insert(0, _Finder())
+else:
+ # https://www.python.org/dev/peps/pep-0302/
+ from imp import find_module
+ from pkgutil import ImpImporter, ImpLoader
+
+ class _VirtualenvImporter(object, ImpImporter):
+ def __init__(self, path=None):
+ object.__init__(self)
+ ImpImporter.__init__(self, path)
+
+ def find_module(self, fullname, path=None):
+ if fullname in _DISTUTILS_PATCH:
+ try:
+ return _VirtualenvLoader(fullname, *find_module(fullname.split(".")[-1], path))
+ except ImportError:
+ pass
+ return None
+
+ class _VirtualenvLoader(object, ImpLoader):
+ def __init__(self, fullname, file, filename, etc):
+ object.__init__(self)
+ ImpLoader.__init__(self, fullname, file, filename, etc)
+
+ def load_module(self, fullname):
+ module = super(_VirtualenvLoader, self).load_module(fullname)
+ patch_dist(module)
+ module.__loader__ = None # distlib fallback
+ return module
+
+ sys.meta_path.append(_VirtualenvImporter())
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/api.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/api.py
new file mode 100644
index 0000000000..6f296f452d
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/api.py
@@ -0,0 +1,112 @@
+from __future__ import absolute_import, unicode_literals
+
+import logging
+import os
+from abc import ABCMeta
+
+from six import add_metaclass
+
+from virtualenv.info import fs_supports_symlink
+from virtualenv.util.path import Path
+from virtualenv.util.six import ensure_text
+
+from ..creator import Creator, CreatorMeta
+
+
+class ViaGlobalRefMeta(CreatorMeta):
+ def __init__(self):
+ super(ViaGlobalRefMeta, self).__init__()
+ self.copy_error = None
+ self.symlink_error = None
+ if not fs_supports_symlink():
+ self.symlink_error = "the filesystem does not supports symlink"
+
+ @property
+ def can_copy(self):
+ return not self.copy_error
+
+ @property
+ def can_symlink(self):
+ return not self.symlink_error
+
+
+@add_metaclass(ABCMeta)
+class ViaGlobalRefApi(Creator):
+ def __init__(self, options, interpreter):
+ super(ViaGlobalRefApi, self).__init__(options, interpreter)
+ self.symlinks = self._should_symlink(options)
+ self.enable_system_site_package = options.system_site
+
+ @staticmethod
+ def _should_symlink(options):
+ # Priority of where the option is set to follow the order: CLI, env var, file, hardcoded.
+ # If both set at same level prefers copy over symlink.
+ copies, symlinks = getattr(options, "copies", False), getattr(options, "symlinks", False)
+ copy_src, sym_src = options.get_source("copies"), options.get_source("symlinks")
+ for level in ["cli", "env var", "file", "default"]:
+ s_opt = symlinks if sym_src == level else None
+ c_opt = copies if copy_src == level else None
+ if s_opt is True and c_opt is True:
+ return False
+ if s_opt is True:
+ return True
+ if c_opt is True:
+ return False
+ return False # fallback to copy
+
+ @classmethod
+ def add_parser_arguments(cls, parser, interpreter, meta, app_data):
+ super(ViaGlobalRefApi, cls).add_parser_arguments(parser, interpreter, meta, app_data)
+ parser.add_argument(
+ "--system-site-packages",
+ default=False,
+ action="store_true",
+ dest="system_site",
+ help="give the virtual environment access to the system site-packages dir",
+ )
+ group = parser.add_mutually_exclusive_group()
+ if not meta.can_symlink and not meta.can_copy:
+ raise RuntimeError("neither symlink or copy method supported")
+ if meta.can_symlink:
+ group.add_argument(
+ "--symlinks",
+ default=True,
+ action="store_true",
+ dest="symlinks",
+ help="try to use symlinks rather than copies, when symlinks are not the default for the platform",
+ )
+ if meta.can_copy:
+ group.add_argument(
+ "--copies",
+ "--always-copy",
+ default=not meta.can_symlink,
+ action="store_true",
+ dest="copies",
+ help="try to use copies rather than symlinks, even when symlinks are the default for the platform",
+ )
+
+ def create(self):
+ self.install_patch()
+
+ def install_patch(self):
+ text = self.env_patch_text()
+ if text:
+ pth = self.purelib / "_virtualenv.pth"
+ logging.debug("create virtualenv import hook file %s", ensure_text(str(pth)))
+ pth.write_text("import _virtualenv")
+ dest_path = self.purelib / "_virtualenv.py"
+ logging.debug("create %s", ensure_text(str(dest_path)))
+ dest_path.write_text(text)
+
+ def env_patch_text(self):
+ """Patch the distutils package to not be derailed by its configuration files"""
+ with self.app_data.ensure_extracted(Path(__file__).parent / "_virtualenv.py") as resolved_path:
+ text = resolved_path.read_text()
+ return text.replace('"__SCRIPT_DIR__"', repr(os.path.relpath(str(self.script_dir), str(self.purelib))))
+
+ def _args(self):
+ return super(ViaGlobalRefApi, self)._args() + [("global", self.enable_system_site_package)]
+
+ def set_pyenv_cfg(self):
+ super(ViaGlobalRefApi, self).set_pyenv_cfg()
+ self.pyenv_cfg["include-system-site-packages"] = "true" if self.enable_system_site_package else "false"
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/__init__.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/__init__.py
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/builtin_way.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/builtin_way.py
new file mode 100644
index 0000000000..279ee8095a
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/builtin_way.py
@@ -0,0 +1,17 @@
+from __future__ import absolute_import, unicode_literals
+
+from abc import ABCMeta
+
+from six import add_metaclass
+
+from virtualenv.create.creator import Creator
+from virtualenv.create.describe import Describe
+
+
+@add_metaclass(ABCMeta)
+class VirtualenvBuiltin(Creator, Describe):
+ """A creator that does operations itself without delegation, if we can create it we can also describe it"""
+
+ def __init__(self, options, interpreter):
+ Creator.__init__(self, options, interpreter)
+ Describe.__init__(self, self.dest, interpreter)
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/__init__.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/__init__.py
new file mode 100644
index 0000000000..01e6d4f49d
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/__init__.py
@@ -0,0 +1 @@
+from __future__ import absolute_import, unicode_literals
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/common.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/common.py
new file mode 100644
index 0000000000..c93f9f31e6
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/common.py
@@ -0,0 +1,65 @@
+from __future__ import absolute_import, unicode_literals
+
+from abc import ABCMeta
+from collections import OrderedDict
+
+from six import add_metaclass
+
+from virtualenv.create.describe import PosixSupports, WindowsSupports
+from virtualenv.create.via_global_ref.builtin.ref import RefMust, RefWhen
+from virtualenv.util.path import Path
+
+from ..via_global_self_do import ViaGlobalRefVirtualenvBuiltin
+
+
+@add_metaclass(ABCMeta)
+class CPython(ViaGlobalRefVirtualenvBuiltin):
+ @classmethod
+ def can_describe(cls, interpreter):
+ return interpreter.implementation == "CPython" and super(CPython, cls).can_describe(interpreter)
+
+ @classmethod
+ def exe_stem(cls):
+ return "python"
+
+
+@add_metaclass(ABCMeta)
+class CPythonPosix(CPython, PosixSupports):
+ """Create a CPython virtual environment on POSIX platforms"""
+
+ @classmethod
+ def _executables(cls, interpreter):
+ host_exe = Path(interpreter.system_executable)
+ major, minor = interpreter.version_info.major, interpreter.version_info.minor
+ targets = OrderedDict(
+ (i, None) for i in ["python", "python{}".format(major), "python{}.{}".format(major, minor), host_exe.name]
+ )
+ must = RefMust.COPY if interpreter.version_info.major == 2 else RefMust.NA
+ yield host_exe, list(targets.keys()), must, RefWhen.ANY
+
+
+@add_metaclass(ABCMeta)
+class CPythonWindows(CPython, WindowsSupports):
+ @classmethod
+ def _executables(cls, interpreter):
+ # symlink of the python executables does not work reliably, copy always instead
+ # - https://bugs.python.org/issue42013
+ # - venv
+ host = cls.host_python(interpreter)
+ for path in (host.parent / n for n in {"python.exe", host.name}):
+ yield host, [path.name], RefMust.COPY, RefWhen.ANY
+ # for more info on pythonw.exe see https://stackoverflow.com/a/30313091
+ python_w = host.parent / "pythonw.exe"
+ yield python_w, [python_w.name], RefMust.COPY, RefWhen.ANY
+
+ @classmethod
+ def host_python(cls, interpreter):
+ return Path(interpreter.system_executable)
+
+
+def is_mac_os_framework(interpreter):
+ if interpreter.platform == "darwin":
+ framework_var = interpreter.sysconfig_vars.get("PYTHONFRAMEWORK")
+ value = "Python3" if interpreter.version_info.major == 3 else "Python"
+ return framework_var == value
+ return False
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/cpython2.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/cpython2.py
new file mode 100644
index 0000000000..555b0c50fc
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/cpython2.py
@@ -0,0 +1,102 @@
+from __future__ import absolute_import, unicode_literals
+
+import abc
+import logging
+
+from six import add_metaclass
+
+from virtualenv.create.via_global_ref.builtin.ref import PathRefToDest
+from virtualenv.util.path import Path
+
+from ..python2.python2 import Python2
+from .common import CPython, CPythonPosix, CPythonWindows, is_mac_os_framework
+
+
+@add_metaclass(abc.ABCMeta)
+class CPython2(CPython, Python2):
+ """Create a CPython version 2 virtual environment"""
+
+ @classmethod
+ def sources(cls, interpreter):
+ for src in super(CPython2, cls).sources(interpreter):
+ yield src
+ # include folder needed on Python 2 as we don't have pyenv.cfg
+ host_include_marker = cls.host_include_marker(interpreter)
+ if host_include_marker.exists():
+ yield PathRefToDest(host_include_marker.parent, dest=lambda self, _: self.include)
+
+ @classmethod
+ def needs_stdlib_py_module(cls):
+ return False
+
+ @classmethod
+ def host_include_marker(cls, interpreter):
+ return Path(interpreter.system_include) / "Python.h"
+
+ @property
+ def include(self):
+ # the pattern include the distribution name too at the end, remove that via the parent call
+ return (self.dest / self.interpreter.distutils_install["headers"]).parent
+
+ @classmethod
+ def modules(cls):
+ return [
+ "os", # landmark to set sys.prefix
+ ]
+
+ def ensure_directories(self):
+ dirs = super(CPython2, self).ensure_directories()
+ host_include_marker = self.host_include_marker(self.interpreter)
+ if host_include_marker.exists():
+ dirs.add(self.include.parent)
+ else:
+ logging.debug("no include folders as can't find include marker %s", host_include_marker)
+ return dirs
+
+
+@add_metaclass(abc.ABCMeta)
+class CPython2PosixBase(CPython2, CPythonPosix):
+ """common to macOs framework builds and other posix CPython2"""
+
+ @classmethod
+ def sources(cls, interpreter):
+ for src in super(CPython2PosixBase, cls).sources(interpreter):
+ yield src
+
+ # check if the makefile exists and if so make it available under the virtual environment
+ make_file = Path(interpreter.sysconfig["makefile_filename"])
+ if make_file.exists() and str(make_file).startswith(interpreter.prefix):
+ under_prefix = make_file.relative_to(Path(interpreter.prefix))
+ yield PathRefToDest(make_file, dest=lambda self, s: self.dest / under_prefix)
+
+
+class CPython2Posix(CPython2PosixBase):
+ """CPython 2 on POSIX (excluding macOs framework builds)"""
+
+ @classmethod
+ def can_describe(cls, interpreter):
+ return is_mac_os_framework(interpreter) is False and super(CPython2Posix, cls).can_describe(interpreter)
+
+ @classmethod
+ def sources(cls, interpreter):
+ for src in super(CPython2Posix, cls).sources(interpreter):
+ yield src
+ # landmark for exec_prefix
+ exec_marker_file, to_path, _ = cls.from_stdlib(cls.mappings(interpreter), "lib-dynload")
+ yield PathRefToDest(exec_marker_file, dest=to_path)
+
+
+class CPython2Windows(CPython2, CPythonWindows):
+ """CPython 2 on Windows"""
+
+ @classmethod
+ def sources(cls, interpreter):
+ for src in super(CPython2Windows, cls).sources(interpreter):
+ yield src
+ py27_dll = Path(interpreter.system_executable).parent / "python27.dll"
+ if py27_dll.exists(): # this might be global in the Windows folder in which case it's alright to be missing
+ yield PathRefToDest(py27_dll, dest=cls.to_bin)
+
+ libs = Path(interpreter.system_prefix) / "libs"
+ if libs.exists():
+ yield PathRefToDest(libs, dest=lambda self, s: self.dest / s.name)
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/cpython3.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/cpython3.py
new file mode 100644
index 0000000000..19385095f7
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/cpython3.py
@@ -0,0 +1,84 @@
+from __future__ import absolute_import, unicode_literals
+
+import abc
+from textwrap import dedent
+
+from six import add_metaclass
+
+from virtualenv.create.describe import Python3Supports
+from virtualenv.create.via_global_ref.builtin.ref import PathRefToDest
+from virtualenv.create.via_global_ref.store import is_store_python
+from virtualenv.util.path import Path
+
+from .common import CPython, CPythonPosix, CPythonWindows, is_mac_os_framework
+
+
+@add_metaclass(abc.ABCMeta)
+class CPython3(CPython, Python3Supports):
+ """"""
+
+
+class CPython3Posix(CPythonPosix, CPython3):
+ @classmethod
+ def can_describe(cls, interpreter):
+ return is_mac_os_framework(interpreter) is False and super(CPython3Posix, cls).can_describe(interpreter)
+
+ def env_patch_text(self):
+ text = super(CPython3Posix, self).env_patch_text()
+ if self.pyvenv_launch_patch_active(self.interpreter):
+ text += dedent(
+ """
+ # for https://github.com/python/cpython/pull/9516, see https://github.com/pypa/virtualenv/issues/1704
+ import os
+ if "__PYVENV_LAUNCHER__" in os.environ:
+ del os.environ["__PYVENV_LAUNCHER__"]
+ """,
+ )
+ return text
+
+ @classmethod
+ def pyvenv_launch_patch_active(cls, interpreter):
+ ver = interpreter.version_info
+ return interpreter.platform == "darwin" and ((3, 7, 8) > ver >= (3, 7) or (3, 8, 3) > ver >= (3, 8))
+
+
+class CPython3Windows(CPythonWindows, CPython3):
+ """"""
+
+ @classmethod
+ def setup_meta(cls, interpreter):
+ if is_store_python(interpreter): # store python is not supported here
+ return None
+ return super(CPython3Windows, cls).setup_meta(interpreter)
+
+ @classmethod
+ def sources(cls, interpreter):
+ for src in super(CPython3Windows, cls).sources(interpreter):
+ yield src
+ if not cls.venv_37p(interpreter):
+ for src in cls.include_dll_and_pyd(interpreter):
+ yield src
+
+ @staticmethod
+ def venv_37p(interpreter):
+ return interpreter.version_info.minor >= 7
+
+ @classmethod
+ def host_python(cls, interpreter):
+ if cls.venv_37p(interpreter):
+ # starting with CPython 3.7 Windows ships with a venvlauncher.exe that avoids the need for dll/pyd copies
+ # it also means the wrapper must be copied to avoid bugs such as https://bugs.python.org/issue42013
+ return Path(interpreter.system_stdlib) / "venv" / "scripts" / "nt" / "python.exe"
+ return super(CPython3Windows, cls).host_python(interpreter)
+
+ @classmethod
+ def include_dll_and_pyd(cls, interpreter):
+ dll_folder = Path(interpreter.system_prefix) / "DLLs"
+ host_exe_folder = Path(interpreter.system_executable).parent
+ for folder in [host_exe_folder, dll_folder]:
+ for file in folder.iterdir():
+ if file.suffix in (".pyd", ".dll"):
+ yield PathRefToDest(file, dest=cls.to_dll_and_pyd)
+
+ def to_dll_and_pyd(self, src):
+ return self.bin_dir / src.name
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/mac_os.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/mac_os.py
new file mode 100644
index 0000000000..53f65e3418
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/cpython/mac_os.py
@@ -0,0 +1,298 @@
+# -*- coding: utf-8 -*-
+"""The Apple Framework builds require their own customization"""
+import logging
+import os
+import struct
+import subprocess
+from abc import ABCMeta, abstractmethod
+from textwrap import dedent
+
+from six import add_metaclass
+
+from virtualenv.create.via_global_ref.builtin.ref import ExePathRefToDest, PathRefToDest, RefMust
+from virtualenv.util.path import Path
+from virtualenv.util.six import ensure_text
+
+from .common import CPython, CPythonPosix, is_mac_os_framework
+from .cpython2 import CPython2PosixBase
+from .cpython3 import CPython3
+
+
+@add_metaclass(ABCMeta)
+class CPythonmacOsFramework(CPython):
+ @classmethod
+ def can_describe(cls, interpreter):
+ return is_mac_os_framework(interpreter) and super(CPythonmacOsFramework, cls).can_describe(interpreter)
+
+ @classmethod
+ def sources(cls, interpreter):
+ for src in super(CPythonmacOsFramework, cls).sources(interpreter):
+ yield src
+ # add a symlink to the host python image
+ exe = cls.image_ref(interpreter)
+ ref = PathRefToDest(exe, dest=lambda self, _: self.dest / ".Python", must=RefMust.SYMLINK)
+ yield ref
+
+ def create(self):
+ super(CPythonmacOsFramework, self).create()
+
+ # change the install_name of the copied python executables
+ target = "@executable_path/../.Python"
+ current = self.current_mach_o_image_path()
+ for src in self._sources:
+ if isinstance(src, ExePathRefToDest):
+ if src.must == RefMust.COPY or not self.symlinks:
+ exes = [self.bin_dir / src.base]
+ if not self.symlinks:
+ exes.extend(self.bin_dir / a for a in src.aliases)
+ for exe in exes:
+ fix_mach_o(str(exe), current, target, self.interpreter.max_size)
+
+ @classmethod
+ def _executables(cls, interpreter):
+ for _, targets, must, when in super(CPythonmacOsFramework, cls)._executables(interpreter):
+ # Make sure we use the embedded interpreter inside the framework, even if sys.executable points to the
+ # stub executable in ${sys.prefix}/bin.
+ # See http://groups.google.com/group/python-virtualenv/browse_thread/thread/17cab2f85da75951
+ fixed_host_exe = Path(interpreter.prefix) / "Resources" / "Python.app" / "Contents" / "MacOS" / "Python"
+ yield fixed_host_exe, targets, must, when
+
+ @abstractmethod
+ def current_mach_o_image_path(self):
+ raise NotImplementedError
+
+ @classmethod
+ def image_ref(cls, interpreter):
+ raise NotImplementedError
+
+
+class CPython2macOsFramework(CPythonmacOsFramework, CPython2PosixBase):
+ @classmethod
+ def image_ref(cls, interpreter):
+ return Path(interpreter.prefix) / "Python"
+
+ def current_mach_o_image_path(self):
+ return os.path.join(self.interpreter.prefix, "Python")
+
+ @classmethod
+ def sources(cls, interpreter):
+ for src in super(CPython2macOsFramework, cls).sources(interpreter):
+ yield src
+ # landmark for exec_prefix
+ exec_marker_file, to_path, _ = cls.from_stdlib(cls.mappings(interpreter), "lib-dynload")
+ yield PathRefToDest(exec_marker_file, dest=to_path)
+
+ @property
+ def reload_code(self):
+ result = super(CPython2macOsFramework, self).reload_code
+ result = dedent(
+ """
+ # the bundled site.py always adds the global site package if we're on python framework build, escape this
+ import sysconfig
+ config = sysconfig.get_config_vars()
+ before = config["PYTHONFRAMEWORK"]
+ try:
+ config["PYTHONFRAMEWORK"] = ""
+ {}
+ finally:
+ config["PYTHONFRAMEWORK"] = before
+ """.format(
+ result,
+ ),
+ )
+ return result
+
+
+class CPython3macOsFramework(CPythonmacOsFramework, CPython3, CPythonPosix):
+ @classmethod
+ def image_ref(cls, interpreter):
+ return Path(interpreter.prefix) / "Python3"
+
+ def current_mach_o_image_path(self):
+ return "@executable_path/../../../../Python3"
+
+ @property
+ def reload_code(self):
+ result = super(CPython3macOsFramework, self).reload_code
+ result = dedent(
+ """
+ # the bundled site.py always adds the global site package if we're on python framework build, escape this
+ import sys
+ before = sys._framework
+ try:
+ sys._framework = None
+ {}
+ finally:
+ sys._framework = before
+ """.format(
+ result,
+ ),
+ )
+ return result
+
+
+def fix_mach_o(exe, current, new, max_size):
+ """
+ https://en.wikipedia.org/wiki/Mach-O
+
+ Mach-O, short for Mach object file format, is a file format for executables, object code, shared libraries,
+ dynamically-loaded code, and core dumps. A replacement for the a.out format, Mach-O offers more extensibility and
+ faster access to information in the symbol table.
+
+ Each Mach-O file is made up of one Mach-O header, followed by a series of load commands, followed by one or more
+ segments, each of which contains between 0 and 255 sections. Mach-O uses the REL relocation format to handle
+ references to symbols. When looking up symbols Mach-O uses a two-level namespace that encodes each symbol into an
+ 'object/symbol name' pair that is then linearly searched for by first the object and then the symbol name.
+
+ The basic structure—a list of variable-length "load commands" that reference pages of data elsewhere in the file—was
+ also used in the executable file format for Accent. The Accent file format was in turn, based on an idea from Spice
+ Lisp.
+
+ With the introduction of Mac OS X 10.6 platform the Mach-O file underwent a significant modification that causes
+ binaries compiled on a computer running 10.6 or later to be (by default) executable only on computers running Mac
+ OS X 10.6 or later. The difference stems from load commands that the dynamic linker, in previous Mac OS X versions,
+ does not understand. Another significant change to the Mach-O format is the change in how the Link Edit tables
+ (found in the __LINKEDIT section) function. In 10.6 these new Link Edit tables are compressed by removing unused and
+ unneeded bits of information, however Mac OS X 10.5 and earlier cannot read this new Link Edit table format.
+ """
+ try:
+ logging.debug(u"change Mach-O for %s from %s to %s", ensure_text(exe), current, ensure_text(new))
+ _builtin_change_mach_o(max_size)(exe, current, new)
+ except Exception as e:
+ logging.warning("Could not call _builtin_change_mac_o: %s. " "Trying to call install_name_tool instead.", e)
+ try:
+ cmd = ["install_name_tool", "-change", current, new, exe]
+ subprocess.check_call(cmd)
+ except Exception:
+ logging.fatal("Could not call install_name_tool -- you must " "have Apple's development tools installed")
+ raise
+
+
+def _builtin_change_mach_o(maxint):
+ MH_MAGIC = 0xFEEDFACE
+ MH_CIGAM = 0xCEFAEDFE
+ MH_MAGIC_64 = 0xFEEDFACF
+ MH_CIGAM_64 = 0xCFFAEDFE
+ FAT_MAGIC = 0xCAFEBABE
+ BIG_ENDIAN = ">"
+ LITTLE_ENDIAN = "<"
+ LC_LOAD_DYLIB = 0xC
+
+ class FileView(object):
+ """A proxy for file-like objects that exposes a given view of a file. Modified from macholib."""
+
+ def __init__(self, file_obj, start=0, size=maxint):
+ if isinstance(file_obj, FileView):
+ self._file_obj = file_obj._file_obj
+ else:
+ self._file_obj = file_obj
+ self._start = start
+ self._end = start + size
+ self._pos = 0
+
+ def __repr__(self):
+ return "<fileview [{:d}, {:d}] {!r}>".format(self._start, self._end, self._file_obj)
+
+ def tell(self):
+ return self._pos
+
+ def _checkwindow(self, seek_to, op):
+ if not (self._start <= seek_to <= self._end):
+ msg = "{} to offset {:d} is outside window [{:d}, {:d}]".format(op, seek_to, self._start, self._end)
+ raise IOError(msg)
+
+ def seek(self, offset, whence=0):
+ seek_to = offset
+ if whence == os.SEEK_SET:
+ seek_to += self._start
+ elif whence == os.SEEK_CUR:
+ seek_to += self._start + self._pos
+ elif whence == os.SEEK_END:
+ seek_to += self._end
+ else:
+ raise IOError("Invalid whence argument to seek: {!r}".format(whence))
+ self._checkwindow(seek_to, "seek")
+ self._file_obj.seek(seek_to)
+ self._pos = seek_to - self._start
+
+ def write(self, content):
+ here = self._start + self._pos
+ self._checkwindow(here, "write")
+ self._checkwindow(here + len(content), "write")
+ self._file_obj.seek(here, os.SEEK_SET)
+ self._file_obj.write(content)
+ self._pos += len(content)
+
+ def read(self, size=maxint):
+ assert size >= 0
+ here = self._start + self._pos
+ self._checkwindow(here, "read")
+ size = min(size, self._end - here)
+ self._file_obj.seek(here, os.SEEK_SET)
+ read_bytes = self._file_obj.read(size)
+ self._pos += len(read_bytes)
+ return read_bytes
+
+ def read_data(file, endian, num=1):
+ """Read a given number of 32-bits unsigned integers from the given file with the given endianness."""
+ res = struct.unpack(endian + "L" * num, file.read(num * 4))
+ if len(res) == 1:
+ return res[0]
+ return res
+
+ def mach_o_change(at_path, what, value):
+ """Replace a given name (what) in any LC_LOAD_DYLIB command found in the given binary with a new name (value),
+ provided it's shorter."""
+
+ def do_macho(file, bits, endian):
+ # Read Mach-O header (the magic number is assumed read by the caller)
+ cpu_type, cpu_sub_type, file_type, n_commands, size_of_commands, flags = read_data(file, endian, 6)
+ # 64-bits header has one more field.
+ if bits == 64:
+ read_data(file, endian)
+ # The header is followed by n commands
+ for _ in range(n_commands):
+ where = file.tell()
+ # Read command header
+ cmd, cmd_size = read_data(file, endian, 2)
+ if cmd == LC_LOAD_DYLIB:
+ # The first data field in LC_LOAD_DYLIB commands is the offset of the name, starting from the
+ # beginning of the command.
+ name_offset = read_data(file, endian)
+ file.seek(where + name_offset, os.SEEK_SET)
+ # Read the NUL terminated string
+ load = file.read(cmd_size - name_offset).decode()
+ load = load[: load.index("\0")]
+ # If the string is what is being replaced, overwrite it.
+ if load == what:
+ file.seek(where + name_offset, os.SEEK_SET)
+ file.write(value.encode() + b"\0")
+ # Seek to the next command
+ file.seek(where + cmd_size, os.SEEK_SET)
+
+ def do_file(file, offset=0, size=maxint):
+ file = FileView(file, offset, size)
+ # Read magic number
+ magic = read_data(file, BIG_ENDIAN)
+ if magic == FAT_MAGIC:
+ # Fat binaries contain nfat_arch Mach-O binaries
+ n_fat_arch = read_data(file, BIG_ENDIAN)
+ for _ in range(n_fat_arch):
+ # Read arch header
+ cpu_type, cpu_sub_type, offset, size, align = read_data(file, BIG_ENDIAN, 5)
+ do_file(file, offset, size)
+ elif magic == MH_MAGIC:
+ do_macho(file, 32, BIG_ENDIAN)
+ elif magic == MH_CIGAM:
+ do_macho(file, 32, LITTLE_ENDIAN)
+ elif magic == MH_MAGIC_64:
+ do_macho(file, 64, BIG_ENDIAN)
+ elif magic == MH_CIGAM_64:
+ do_macho(file, 64, LITTLE_ENDIAN)
+
+ assert len(what) >= len(value)
+
+ with open(at_path, "r+b") as f:
+ do_file(f)
+
+ return mach_o_change
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/__init__.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/__init__.py
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/common.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/common.py
new file mode 100644
index 0000000000..cc03b4293e
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/common.py
@@ -0,0 +1,53 @@
+from __future__ import absolute_import, unicode_literals
+
+import abc
+
+from six import add_metaclass
+
+from virtualenv.create.via_global_ref.builtin.ref import PathRefToDest, RefMust, RefWhen
+from virtualenv.util.path import Path
+
+from ..via_global_self_do import ViaGlobalRefVirtualenvBuiltin
+
+
+@add_metaclass(abc.ABCMeta)
+class PyPy(ViaGlobalRefVirtualenvBuiltin):
+ @classmethod
+ def can_describe(cls, interpreter):
+ return interpreter.implementation == "PyPy" and super(PyPy, cls).can_describe(interpreter)
+
+ @classmethod
+ def _executables(cls, interpreter):
+ host = Path(interpreter.system_executable)
+ targets = sorted("{}{}".format(name, PyPy.suffix) for name in cls.exe_names(interpreter))
+ must = RefMust.COPY if interpreter.version_info.major == 2 else RefMust.NA
+ yield host, targets, must, RefWhen.ANY
+
+ @classmethod
+ def exe_names(cls, interpreter):
+ return {
+ cls.exe_stem(),
+ "python",
+ "python{}".format(interpreter.version_info.major),
+ "python{}.{}".format(*interpreter.version_info),
+ }
+
+ @classmethod
+ def sources(cls, interpreter):
+ for src in super(PyPy, cls).sources(interpreter):
+ yield src
+ for host in cls._add_shared_libs(interpreter):
+ yield PathRefToDest(host, dest=lambda self, s: self.bin_dir / s.name)
+
+ @classmethod
+ def _add_shared_libs(cls, interpreter):
+ # https://bitbucket.org/pypy/pypy/issue/1922/future-proofing-virtualenv
+ python_dir = Path(interpreter.system_executable).resolve().parent
+ for libname in cls._shared_libs():
+ src = python_dir / libname
+ if src.exists():
+ yield src
+
+ @classmethod
+ def _shared_libs(cls):
+ raise NotImplementedError
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py
new file mode 100644
index 0000000000..020000b342
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py
@@ -0,0 +1,121 @@
+from __future__ import absolute_import, unicode_literals
+
+import abc
+import logging
+import os
+
+from six import add_metaclass
+
+from virtualenv.create.describe import PosixSupports, WindowsSupports
+from virtualenv.create.via_global_ref.builtin.ref import PathRefToDest
+from virtualenv.util.path import Path
+
+from ..python2.python2 import Python2
+from .common import PyPy
+
+
+@add_metaclass(abc.ABCMeta)
+class PyPy2(PyPy, Python2):
+ """"""
+
+ @classmethod
+ def exe_stem(cls):
+ return "pypy"
+
+ @classmethod
+ def sources(cls, interpreter):
+ for src in super(PyPy2, cls).sources(interpreter):
+ yield src
+ # include folder needed on Python 2 as we don't have pyenv.cfg
+ host_include_marker = cls.host_include_marker(interpreter)
+ if host_include_marker.exists():
+ yield PathRefToDest(host_include_marker.parent, dest=lambda self, _: self.include)
+
+ @classmethod
+ def needs_stdlib_py_module(cls):
+ return True
+
+ @classmethod
+ def host_include_marker(cls, interpreter):
+ return Path(interpreter.system_include) / "PyPy.h"
+
+ @property
+ def include(self):
+ return self.dest / self.interpreter.distutils_install["headers"]
+
+ @classmethod
+ def modules(cls):
+ # pypy2 uses some modules before the site.py loads, so we need to include these too
+ return super(PyPy2, cls).modules() + [
+ "os",
+ "copy_reg",
+ "genericpath",
+ "linecache",
+ "stat",
+ "UserDict",
+ "warnings",
+ ]
+
+ @property
+ def lib_pypy(self):
+ return self.dest / "lib_pypy"
+
+ def ensure_directories(self):
+ dirs = super(PyPy2, self).ensure_directories()
+ dirs.add(self.lib_pypy)
+ host_include_marker = self.host_include_marker(self.interpreter)
+ if host_include_marker.exists():
+ dirs.add(self.include.parent)
+ else:
+ logging.debug("no include folders as can't find include marker %s", host_include_marker)
+ return dirs
+
+ @property
+ def skip_rewrite(self):
+ """
+ PyPy2 built-in imports are handled by this path entry, don't overwrite to not disable it
+ see: https://github.com/pypa/virtualenv/issues/1652
+ """
+ return 'or path.endswith("lib_pypy{}__extensions__") # PyPy2 built-in import marker'.format(os.sep)
+
+
+class PyPy2Posix(PyPy2, PosixSupports):
+ """PyPy 2 on POSIX"""
+
+ @classmethod
+ def modules(cls):
+ return super(PyPy2Posix, cls).modules() + ["posixpath"]
+
+ @classmethod
+ def _shared_libs(cls):
+ return ["libpypy-c.so", "libpypy-c.dylib"]
+
+ @property
+ def lib(self):
+ return self.dest / "lib"
+
+ @classmethod
+ def sources(cls, interpreter):
+ for src in super(PyPy2Posix, cls).sources(interpreter):
+ yield src
+ host_lib = Path(interpreter.system_prefix) / "lib"
+ if host_lib.exists():
+ yield PathRefToDest(host_lib, dest=lambda self, _: self.lib)
+
+
+class Pypy2Windows(PyPy2, WindowsSupports):
+ """PyPy 2 on Windows"""
+
+ @classmethod
+ def modules(cls):
+ return super(Pypy2Windows, cls).modules() + ["ntpath"]
+
+ @classmethod
+ def _shared_libs(cls):
+ return ["libpypy-c.dll"]
+
+ @classmethod
+ def sources(cls, interpreter):
+ for src in super(Pypy2Windows, cls).sources(interpreter):
+ yield src
+ yield PathRefToDest(Path(interpreter.system_prefix) / "libs", dest=lambda self, s: self.dest / s.name)
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py
new file mode 100644
index 0000000000..9588706786
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py
@@ -0,0 +1,63 @@
+from __future__ import absolute_import, unicode_literals
+
+import abc
+
+from six import add_metaclass
+
+from virtualenv.create.describe import PosixSupports, Python3Supports, WindowsSupports
+from virtualenv.create.via_global_ref.builtin.ref import PathRefToDest
+from virtualenv.util.path import Path
+
+from .common import PyPy
+
+
+@add_metaclass(abc.ABCMeta)
+class PyPy3(PyPy, Python3Supports):
+ @classmethod
+ def exe_stem(cls):
+ return "pypy3"
+
+ @property
+ def stdlib(self):
+ """
+ PyPy3 seems to respect sysconfig only for the host python...
+ virtual environments purelib is instead lib/pythonx.y
+ """
+ return self.dest / "lib" / "python{}".format(self.interpreter.version_release_str) / "site-packages"
+
+ @classmethod
+ def exe_names(cls, interpreter):
+ return super(PyPy3, cls).exe_names(interpreter) | {"pypy"}
+
+
+class PyPy3Posix(PyPy3, PosixSupports):
+ """PyPy 2 on POSIX"""
+
+ @classmethod
+ def _shared_libs(cls):
+ return ["libpypy3-c.so", "libpypy3-c.dylib"]
+
+ def to_lib(self, src):
+ return self.dest / "lib" / src.name
+
+ @classmethod
+ def sources(cls, interpreter):
+ for src in super(PyPy3Posix, cls).sources(interpreter):
+ yield src
+ host_lib = Path(interpreter.system_prefix) / "lib"
+ if host_lib.exists() and host_lib.is_dir():
+ for path in host_lib.iterdir():
+ yield PathRefToDest(path, dest=cls.to_lib)
+
+
+class Pypy3Windows(PyPy3, WindowsSupports):
+ """PyPy 2 on Windows"""
+
+ @property
+ def bin_dir(self):
+ """PyPy3 needs to fallback to pypy definition"""
+ return self.dest / "Scripts"
+
+ @classmethod
+ def _shared_libs(cls):
+ return ["libpypy3-c.dll"]
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/python2/__init__.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/python2/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/python2/__init__.py
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/python2/python2.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/python2/python2.py
new file mode 100644
index 0000000000..cacd56ecfc
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/python2/python2.py
@@ -0,0 +1,111 @@
+from __future__ import absolute_import, unicode_literals
+
+import abc
+import json
+import os
+
+from six import add_metaclass
+
+from virtualenv.create.describe import Python2Supports
+from virtualenv.create.via_global_ref.builtin.ref import PathRefToDest
+from virtualenv.info import IS_ZIPAPP
+from virtualenv.util.path import Path
+from virtualenv.util.six import ensure_text
+from virtualenv.util.zipapp import read as read_from_zipapp
+
+from ..via_global_self_do import ViaGlobalRefVirtualenvBuiltin
+
+HERE = Path(os.path.abspath(__file__)).parent
+
+
+@add_metaclass(abc.ABCMeta)
+class Python2(ViaGlobalRefVirtualenvBuiltin, Python2Supports):
+ def create(self):
+ """Perform operations needed to make the created environment work on Python 2"""
+ super(Python2, self).create()
+ # install a patched site-package, the default Python 2 site.py is not smart enough to understand pyvenv.cfg,
+ # so we inject a small shim that can do this, the location of this depends where it's on host
+ sys_std_plat = Path(self.interpreter.system_stdlib_platform)
+ site_py_in = (
+ self.stdlib_platform
+ if ((sys_std_plat / "site.py").exists() or (sys_std_plat / "site.pyc").exists())
+ else self.stdlib
+ )
+ site_py = site_py_in / "site.py"
+
+ custom_site = get_custom_site()
+ if IS_ZIPAPP:
+ custom_site_text = read_from_zipapp(custom_site)
+ else:
+ custom_site_text = custom_site.read_text()
+ expected = json.dumps([os.path.relpath(ensure_text(str(i)), ensure_text(str(site_py))) for i in self.libs])
+
+ custom_site_text = custom_site_text.replace("___EXPECTED_SITE_PACKAGES___", expected)
+
+ reload_code = os.linesep.join(" {}".format(i) for i in self.reload_code.splitlines()).lstrip()
+ custom_site_text = custom_site_text.replace("# ___RELOAD_CODE___", reload_code)
+
+ skip_rewrite = os.linesep.join(" {}".format(i) for i in self.skip_rewrite.splitlines()).lstrip()
+ custom_site_text = custom_site_text.replace("# ___SKIP_REWRITE____", skip_rewrite)
+
+ site_py.write_text(custom_site_text)
+
+ @property
+ def reload_code(self):
+ return 'reload(sys.modules["site"]) # noqa # call system site.py to setup import libraries'
+
+ @property
+ def skip_rewrite(self):
+ return ""
+
+ @classmethod
+ def sources(cls, interpreter):
+ for src in super(Python2, cls).sources(interpreter):
+ yield src
+ # install files needed to run site.py, either from stdlib or stdlib_platform, at least pyc, but both if exists
+ # if neither exists return the module file to trigger failure
+ mappings, needs_py_module = (
+ cls.mappings(interpreter),
+ cls.needs_stdlib_py_module(),
+ )
+ for req in cls.modules():
+ module_file, to_module, module_exists = cls.from_stdlib(mappings, "{}.py".format(req))
+ compiled_file, to_compiled, compiled_exists = cls.from_stdlib(mappings, "{}.pyc".format(req))
+ if needs_py_module or module_exists or not compiled_exists:
+ yield PathRefToDest(module_file, dest=to_module)
+ if compiled_exists:
+ yield PathRefToDest(compiled_file, dest=to_compiled)
+
+ @staticmethod
+ def from_stdlib(mappings, name):
+ for from_std, to_std in mappings:
+ src = from_std / name
+ if src.exists():
+ return src, to_std, True
+ # if not exists, fallback to first in list
+ return mappings[0][0] / name, mappings[0][1], False
+
+ @classmethod
+ def mappings(cls, interpreter):
+ mappings = [(Path(interpreter.system_stdlib_platform), cls.to_stdlib_platform)]
+ if interpreter.system_stdlib_platform != interpreter.system_stdlib:
+ mappings.append((Path(interpreter.system_stdlib), cls.to_stdlib))
+ return mappings
+
+ def to_stdlib(self, src):
+ return self.stdlib / src.name
+
+ def to_stdlib_platform(self, src):
+ return self.stdlib_platform / src.name
+
+ @classmethod
+ def needs_stdlib_py_module(cls):
+ raise NotImplementedError
+
+ @classmethod
+ def modules(cls):
+ return []
+
+
+def get_custom_site():
+ return HERE / "site.py"
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/python2/site.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/python2/site.py
new file mode 100644
index 0000000000..85eee842ae
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/python2/site.py
@@ -0,0 +1,164 @@
+# -*- coding: utf-8 -*-
+"""
+A simple shim module to fix up things on Python 2 only.
+
+Note: until we setup correctly the paths we can only import built-ins.
+"""
+import sys
+
+
+def main():
+ """Patch what needed, and invoke the original site.py"""
+ config = read_pyvenv()
+ sys.real_prefix = sys.base_prefix = config["base-prefix"]
+ sys.base_exec_prefix = config["base-exec-prefix"]
+ sys.base_executable = config["base-executable"]
+ global_site_package_enabled = config.get("include-system-site-packages", False) == "true"
+ rewrite_standard_library_sys_path()
+ disable_user_site_package()
+ load_host_site()
+ if global_site_package_enabled:
+ add_global_site_package()
+
+
+def load_host_site():
+ """trigger reload of site.py - now it will use the standard library instance that will take care of init"""
+ # we have a duality here, we generate the platform and pure library path based on what distutils.install specifies
+ # because this is what pip will be using; the host site.py though may contain it's own pattern for where the
+ # platform and pure library paths should exist
+
+ # notably on Ubuntu there's a patch for getsitepackages to point to
+ # - prefix + local/lib/pythonx.y/dist-packages
+ # - prefix + lib/pythonx.y/dist-packages
+ # while distutils.install.cmd still points both of these to
+ # - prefix + lib/python2.7/site-packages
+
+ # to facilitate when the two match, or not we first reload the site.py, now triggering the import of host site.py,
+ # as this will ensure that initialization code within host site.py runs
+
+ here = __file__ # the distutils.install patterns will be injected relative to this site.py, save it here
+
+ # ___RELOAD_CODE___
+
+ # and then if the distutils site packages are not on the sys.path we add them via add_site_dir; note we must add
+ # them by invoking add_site_dir to trigger the processing of pth files
+ import os
+
+ site_packages = r"""
+ ___EXPECTED_SITE_PACKAGES___
+ """
+ import json
+
+ add_site_dir = sys.modules["site"].addsitedir
+ for path in json.loads(site_packages):
+ full_path = os.path.abspath(os.path.join(here, path.encode("utf-8")))
+ add_site_dir(full_path)
+
+
+sep = "\\" if sys.platform == "win32" else "/" # no os module here yet - poor mans version
+
+
+def read_pyvenv():
+ """read pyvenv.cfg"""
+ config_file = "{}{}pyvenv.cfg".format(sys.prefix, sep)
+ with open(config_file) as file_handler:
+ lines = file_handler.readlines()
+ config = {}
+ for line in lines:
+ try:
+ split_at = line.index("=")
+ except ValueError:
+ continue # ignore bad/empty lines
+ else:
+ config[line[:split_at].strip()] = line[split_at + 1 :].strip()
+ return config
+
+
+def rewrite_standard_library_sys_path():
+ """Once this site file is loaded the standard library paths have already been set, fix them up"""
+ exe, prefix, exec_prefix = get_exe_prefixes(base=False)
+ base_exe, base_prefix, base_exec = get_exe_prefixes(base=True)
+ exe_dir = exe[: exe.rfind(sep)]
+ for at, path in enumerate(sys.path):
+ path = abs_path(path) # replace old sys prefix path starts with new
+ skip_rewrite = path == exe_dir # don't fix the current executable location, notably on Windows this gets added
+ skip_rewrite = skip_rewrite # ___SKIP_REWRITE____
+ if not skip_rewrite:
+ sys.path[at] = map_path(path, base_exe, exe_dir, exec_prefix, base_prefix, prefix, base_exec)
+
+ # the rewrite above may have changed elements from PYTHONPATH, revert these if on
+ if sys.flags.ignore_environment:
+ return
+ import os
+
+ python_paths = []
+ if "PYTHONPATH" in os.environ and os.environ["PYTHONPATH"]:
+ for path in os.environ["PYTHONPATH"].split(os.pathsep):
+ if path not in python_paths:
+ python_paths.append(path)
+ sys.path[: len(python_paths)] = python_paths
+
+
+def get_exe_prefixes(base=False):
+ return tuple(abs_path(getattr(sys, ("base_" if base else "") + i)) for i in ("executable", "prefix", "exec_prefix"))
+
+
+def abs_path(value):
+ values, keep = value.split(sep), []
+ at = len(values) - 1
+ while at >= 0:
+ if values[at] == "..":
+ at -= 1
+ else:
+ keep.append(values[at])
+ at -= 1
+ return sep.join(keep[::-1])
+
+
+def map_path(path, base_executable, exe_dir, exec_prefix, base_prefix, prefix, base_exec_prefix):
+ if path_starts_with(path, exe_dir):
+ # content inside the exe folder needs to remap to original executables folder
+ orig_exe_folder = base_executable[: base_executable.rfind(sep)]
+ return "{}{}".format(orig_exe_folder, path[len(exe_dir) :])
+ elif path_starts_with(path, prefix):
+ return "{}{}".format(base_prefix, path[len(prefix) :])
+ elif path_starts_with(path, exec_prefix):
+ return "{}{}".format(base_exec_prefix, path[len(exec_prefix) :])
+ return path
+
+
+def path_starts_with(directory, value):
+ return directory.startswith(value if value[-1] == sep else value + sep)
+
+
+def disable_user_site_package():
+ """Flip the switch on enable user site package"""
+ # sys.flags is a c-extension type, so we cannot monkeypatch it, replace it with a python class to flip it
+ sys.original_flags = sys.flags
+
+ class Flags(object):
+ def __init__(self):
+ self.__dict__ = {key: getattr(sys.flags, key) for key in dir(sys.flags) if not key.startswith("_")}
+
+ sys.flags = Flags()
+ sys.flags.no_user_site = 1
+
+
+def add_global_site_package():
+ """add the global site package"""
+ import site
+
+ # add user site package
+ sys.flags = sys.original_flags # restore original
+ site.ENABLE_USER_SITE = None # reset user site check
+ # add the global site package to the path - use new prefix and delegate to site.py
+ orig_prefixes = None
+ try:
+ orig_prefixes = site.PREFIXES
+ site.PREFIXES = [sys.base_prefix, sys.base_exec_prefix]
+ site.main()
+ finally:
+ site.PREFIXES = orig_prefixes
+
+
+main()
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/ref.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/ref.py
new file mode 100644
index 0000000000..69f243bf98
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/ref.py
@@ -0,0 +1,172 @@
+"""
+Virtual environments in the traditional sense are built as reference to the host python. This file allows declarative
+references to elements on the file system, allowing our system to automatically detect what modes it can support given
+the constraints: e.g. can the file system symlink, can the files be read, executed, etc.
+"""
+from __future__ import absolute_import, unicode_literals
+
+import os
+from abc import ABCMeta, abstractmethod
+from collections import OrderedDict
+from stat import S_IXGRP, S_IXOTH, S_IXUSR
+
+from six import add_metaclass
+
+from virtualenv.info import fs_is_case_sensitive, fs_supports_symlink
+from virtualenv.util.path import copy, make_exe, symlink
+from virtualenv.util.six import ensure_text
+
+
+class RefMust(object):
+ NA = "NA"
+ COPY = "copy"
+ SYMLINK = "symlink"
+
+
+class RefWhen(object):
+ ANY = "ANY"
+ COPY = "copy"
+ SYMLINK = "symlink"
+
+
+@add_metaclass(ABCMeta)
+class PathRef(object):
+ """Base class that checks if a file reference can be symlink/copied"""
+
+ FS_SUPPORTS_SYMLINK = fs_supports_symlink()
+ FS_CASE_SENSITIVE = fs_is_case_sensitive()
+
+ def __init__(self, src, must=RefMust.NA, when=RefWhen.ANY):
+ self.must = must
+ self.when = when
+ self.src = src
+ try:
+ self.exists = src.exists()
+ except OSError:
+ self.exists = False
+ self._can_read = None if self.exists else False
+ self._can_copy = None if self.exists else False
+ self._can_symlink = None if self.exists else False
+
+ def __repr__(self):
+ return "{}(src={})".format(self.__class__.__name__, self.src)
+
+ @property
+ def can_read(self):
+ if self._can_read is None:
+ if self.src.is_file():
+ try:
+ with self.src.open("rb"):
+ self._can_read = True
+ except OSError:
+ self._can_read = False
+ else:
+ self._can_read = os.access(ensure_text(str(self.src)), os.R_OK)
+ return self._can_read
+
+ @property
+ def can_copy(self):
+ if self._can_copy is None:
+ if self.must == RefMust.SYMLINK:
+ self._can_copy = self.can_symlink
+ else:
+ self._can_copy = self.can_read
+ return self._can_copy
+
+ @property
+ def can_symlink(self):
+ if self._can_symlink is None:
+ if self.must == RefMust.COPY:
+ self._can_symlink = self.can_copy
+ else:
+ self._can_symlink = self.FS_SUPPORTS_SYMLINK and self.can_read
+ return self._can_symlink
+
+ @abstractmethod
+ def run(self, creator, symlinks):
+ raise NotImplementedError
+
+ def method(self, symlinks):
+ if self.must == RefMust.SYMLINK:
+ return symlink
+ if self.must == RefMust.COPY:
+ return copy
+ return symlink if symlinks else copy
+
+
+@add_metaclass(ABCMeta)
+class ExePathRef(PathRef):
+ """Base class that checks if a executable can be references via symlink/copy"""
+
+ def __init__(self, src, must=RefMust.NA, when=RefWhen.ANY):
+ super(ExePathRef, self).__init__(src, must, when)
+ self._can_run = None
+
+ @property
+ def can_symlink(self):
+ if self.FS_SUPPORTS_SYMLINK:
+ return self.can_run
+ return False
+
+ @property
+ def can_run(self):
+ if self._can_run is None:
+ mode = self.src.stat().st_mode
+ for key in [S_IXUSR, S_IXGRP, S_IXOTH]:
+ if mode & key:
+ self._can_run = True
+ break
+ else:
+ self._can_run = False
+ return self._can_run
+
+
+class PathRefToDest(PathRef):
+ """Link a path on the file system"""
+
+ def __init__(self, src, dest, must=RefMust.NA, when=RefWhen.ANY):
+ super(PathRefToDest, self).__init__(src, must, when)
+ self.dest = dest
+
+ def run(self, creator, symlinks):
+ dest = self.dest(creator, self.src)
+ method = self.method(symlinks)
+ dest_iterable = dest if isinstance(dest, list) else (dest,)
+ if not dest.parent.exists():
+ dest.parent.mkdir(parents=True, exist_ok=True)
+ for dst in dest_iterable:
+ method(self.src, dst)
+
+
+class ExePathRefToDest(PathRefToDest, ExePathRef):
+ """Link a exe path on the file system"""
+
+ def __init__(self, src, targets, dest, must=RefMust.NA, when=RefWhen.ANY):
+ ExePathRef.__init__(self, src, must, when)
+ PathRefToDest.__init__(self, src, dest, must, when)
+ if not self.FS_CASE_SENSITIVE:
+ targets = list(OrderedDict((i.lower(), None) for i in targets).keys())
+ self.base = targets[0]
+ self.aliases = targets[1:]
+ self.dest = dest
+
+ def run(self, creator, symlinks):
+ bin_dir = self.dest(creator, self.src).parent
+ dest = bin_dir / self.base
+ method = self.method(symlinks)
+ method(self.src, dest)
+ if not symlinks:
+ make_exe(dest)
+ for extra in self.aliases:
+ link_file = bin_dir / extra
+ if link_file.exists():
+ link_file.unlink()
+ if symlinks:
+ link_file.symlink_to(self.base)
+ else:
+ copy(self.src, link_file)
+ if not symlinks:
+ make_exe(link_file)
+
+ def __repr__(self):
+ return "{}(src={}, alias={})".format(self.__class__.__name__, self.src, self.aliases)
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/via_global_self_do.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/via_global_self_do.py
new file mode 100644
index 0000000000..863ae16e1d
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/builtin/via_global_self_do.py
@@ -0,0 +1,114 @@
+from __future__ import absolute_import, unicode_literals
+
+from abc import ABCMeta
+
+from six import add_metaclass
+
+from virtualenv.create.via_global_ref.builtin.ref import ExePathRefToDest, RefMust, RefWhen
+from virtualenv.util.path import ensure_dir
+
+from ..api import ViaGlobalRefApi, ViaGlobalRefMeta
+from .builtin_way import VirtualenvBuiltin
+
+
+class BuiltinViaGlobalRefMeta(ViaGlobalRefMeta):
+ def __init__(self):
+ super(BuiltinViaGlobalRefMeta, self).__init__()
+ self.sources = []
+
+
+@add_metaclass(ABCMeta)
+class ViaGlobalRefVirtualenvBuiltin(ViaGlobalRefApi, VirtualenvBuiltin):
+ def __init__(self, options, interpreter):
+ super(ViaGlobalRefVirtualenvBuiltin, self).__init__(options, interpreter)
+ self._sources = getattr(options.meta, "sources", None) # if we're created as a describer this might be missing
+
+ @classmethod
+ def can_create(cls, interpreter):
+ """By default all built-in methods assume that if we can describe it we can create it"""
+ # first we must be able to describe it
+ if not cls.can_describe(interpreter):
+ return None
+ meta = cls.setup_meta(interpreter)
+ if meta is not None and meta:
+ cls._sources_can_be_applied(interpreter, meta)
+ return meta
+
+ @classmethod
+ def _sources_can_be_applied(cls, interpreter, meta):
+ for src in cls.sources(interpreter):
+ if src.exists:
+ if meta.can_copy and not src.can_copy:
+ meta.copy_error = "cannot copy {}".format(src)
+ if meta.can_symlink and not src.can_symlink:
+ meta.symlink_error = "cannot symlink {}".format(src)
+ else:
+ msg = "missing required file {}".format(src)
+ if src.when == RefMust.NA:
+ meta.error = msg
+ elif src.when == RefMust.COPY:
+ meta.copy_error = msg
+ elif src.when == RefMust.SYMLINK:
+ meta.symlink_error = msg
+ if not meta.can_copy and not meta.can_symlink:
+ meta.error = "neither copy or symlink supported, copy: {} symlink: {}".format(
+ meta.copy_error,
+ meta.symlink_error,
+ )
+ if meta.error:
+ break
+ meta.sources.append(src)
+
+ @classmethod
+ def setup_meta(cls, interpreter):
+ return BuiltinViaGlobalRefMeta()
+
+ @classmethod
+ def sources(cls, interpreter):
+ for host_exe, targets, must, when in cls._executables(interpreter):
+ yield ExePathRefToDest(host_exe, dest=cls.to_bin, targets=targets, must=must, when=when)
+
+ def to_bin(self, src):
+ return self.bin_dir / src.name
+
+ @classmethod
+ def _executables(cls, interpreter):
+ raise NotImplementedError
+
+ def create(self):
+ dirs = self.ensure_directories()
+ for directory in list(dirs):
+ if any(i for i in dirs if i is not directory and directory.parts == i.parts[: len(directory.parts)]):
+ dirs.remove(directory)
+ for directory in sorted(dirs):
+ ensure_dir(directory)
+
+ self.set_pyenv_cfg()
+ self.pyenv_cfg.write()
+ true_system_site = self.enable_system_site_package
+ try:
+ self.enable_system_site_package = False
+ for src in self._sources:
+ if (
+ src.when == RefWhen.ANY
+ or (src.when == RefWhen.SYMLINK and self.symlinks is True)
+ or (src.when == RefWhen.COPY and self.symlinks is False)
+ ):
+ src.run(self, self.symlinks)
+ finally:
+ if true_system_site != self.enable_system_site_package:
+ self.enable_system_site_package = true_system_site
+ super(ViaGlobalRefVirtualenvBuiltin, self).create()
+
+ def ensure_directories(self):
+ return {self.dest, self.bin_dir, self.script_dir, self.stdlib} | set(self.libs)
+
+ def set_pyenv_cfg(self):
+ """
+ We directly inject the base prefix and base exec prefix to avoid site.py needing to discover these
+ from home (which usually is done within the interpreter itself)
+ """
+ super(ViaGlobalRefVirtualenvBuiltin, self).set_pyenv_cfg()
+ self.pyenv_cfg["base-prefix"] = self.interpreter.system_prefix
+ self.pyenv_cfg["base-exec-prefix"] = self.interpreter.system_exec_prefix
+ self.pyenv_cfg["base-executable"] = self.interpreter.system_executable
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/store.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/store.py
new file mode 100644
index 0000000000..134a535859
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/store.py
@@ -0,0 +1,26 @@
+from __future__ import absolute_import, unicode_literals
+
+from virtualenv.util.path import Path
+
+
+def handle_store_python(meta, interpreter):
+ if is_store_python(interpreter):
+ meta.symlink_error = "Windows Store Python does not support virtual environments via symlink"
+ return meta
+
+
+def is_store_python(interpreter):
+ parts = Path(interpreter.system_executable).parts
+ return (
+ len(parts) > 4
+ and parts[-4] == "Microsoft"
+ and parts[-3] == "WindowsApps"
+ and parts[-2].startswith("PythonSoftwareFoundation.Python.3.")
+ and parts[-1].startswith("python")
+ )
+
+
+__all__ = (
+ "handle_store_python",
+ "is_store_python",
+)
diff --git a/third_party/python/virtualenv/virtualenv/create/via_global_ref/venv.py b/third_party/python/virtualenv/virtualenv/create/via_global_ref/venv.py
new file mode 100644
index 0000000000..aaa67947f1
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/create/via_global_ref/venv.py
@@ -0,0 +1,83 @@
+from __future__ import absolute_import, unicode_literals
+
+import logging
+from copy import copy
+
+from virtualenv.create.via_global_ref.store import handle_store_python
+from virtualenv.discovery.py_info import PythonInfo
+from virtualenv.util.error import ProcessCallFailed
+from virtualenv.util.path import ensure_dir
+from virtualenv.util.subprocess import run_cmd
+
+from .api import ViaGlobalRefApi, ViaGlobalRefMeta
+
+
+class Venv(ViaGlobalRefApi):
+ def __init__(self, options, interpreter):
+ self.describe = options.describe
+ super(Venv, self).__init__(options, interpreter)
+ self.can_be_inline = (
+ interpreter is PythonInfo.current() and interpreter.executable == interpreter.system_executable
+ )
+ self._context = None
+
+ def _args(self):
+ return super(Venv, self)._args() + ([("describe", self.describe.__class__.__name__)] if self.describe else [])
+
+ @classmethod
+ def can_create(cls, interpreter):
+ if interpreter.has_venv:
+ meta = ViaGlobalRefMeta()
+ if interpreter.platform == "win32" and interpreter.version_info.major == 3:
+ meta = handle_store_python(meta, interpreter)
+ return meta
+ return None
+
+ def create(self):
+ if self.can_be_inline:
+ self.create_inline()
+ else:
+ self.create_via_sub_process()
+ for lib in self.libs:
+ ensure_dir(lib)
+ super(Venv, self).create()
+
+ def create_inline(self):
+ from venv import EnvBuilder
+
+ builder = EnvBuilder(
+ system_site_packages=self.enable_system_site_package,
+ clear=False,
+ symlinks=self.symlinks,
+ with_pip=False,
+ )
+ builder.create(str(self.dest))
+
+ def create_via_sub_process(self):
+ cmd = self.get_host_create_cmd()
+ logging.info("using host built-in venv to create via %s", " ".join(cmd))
+ code, out, err = run_cmd(cmd)
+ if code != 0:
+ raise ProcessCallFailed(code, out, err, cmd)
+
+ def get_host_create_cmd(self):
+ cmd = [self.interpreter.system_executable, "-m", "venv", "--without-pip"]
+ if self.enable_system_site_package:
+ cmd.append("--system-site-packages")
+ cmd.append("--symlinks" if self.symlinks else "--copies")
+ cmd.append(str(self.dest))
+ return cmd
+
+ def set_pyenv_cfg(self):
+ # prefer venv options over ours, but keep our extra
+ venv_content = copy(self.pyenv_cfg.refresh())
+ super(Venv, self).set_pyenv_cfg()
+ self.pyenv_cfg.update(venv_content)
+
+ def __getattribute__(self, item):
+ describe = object.__getattribute__(self, "describe")
+ if describe is not None and hasattr(describe, item):
+ element = getattr(describe, item)
+ if not callable(element) or item in ("script",):
+ return element
+ return object.__getattribute__(self, item)
diff --git a/third_party/python/virtualenv/virtualenv/discovery/__init__.py b/third_party/python/virtualenv/virtualenv/discovery/__init__.py
new file mode 100644
index 0000000000..01e6d4f49d
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/discovery/__init__.py
@@ -0,0 +1 @@
+from __future__ import absolute_import, unicode_literals
diff --git a/third_party/python/virtualenv/virtualenv/discovery/builtin.py b/third_party/python/virtualenv/virtualenv/discovery/builtin.py
new file mode 100644
index 0000000000..b66ecb1932
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/discovery/builtin.py
@@ -0,0 +1,163 @@
+from __future__ import absolute_import, unicode_literals
+
+import logging
+import os
+import sys
+
+from virtualenv.info import IS_WIN
+from virtualenv.util.six import ensure_str, ensure_text
+
+from .discover import Discover
+from .py_info import PythonInfo
+from .py_spec import PythonSpec
+
+
+class Builtin(Discover):
+ def __init__(self, options):
+ super(Builtin, self).__init__(options)
+ self.python_spec = options.python if options.python else [sys.executable]
+ self.app_data = options.app_data
+
+ @classmethod
+ def add_parser_arguments(cls, parser):
+ parser.add_argument(
+ "-p",
+ "--python",
+ dest="python",
+ metavar="py",
+ type=str,
+ action="append",
+ default=[],
+ help="interpreter based on what to create environment (path/identifier) "
+ "- by default use the interpreter where the tool is installed - first found wins",
+ )
+
+ def run(self):
+ for python_spec in self.python_spec:
+ result = get_interpreter(python_spec, self.app_data)
+ if result is not None:
+ return result
+ return None
+
+ def __repr__(self):
+ return ensure_str(self.__unicode__())
+
+ def __unicode__(self):
+ spec = self.python_spec[0] if len(self.python_spec) == 1 else self.python_spec
+ return "{} discover of python_spec={!r}".format(self.__class__.__name__, spec)
+
+
+def get_interpreter(key, app_data=None):
+ spec = PythonSpec.from_string_spec(key)
+ logging.info("find interpreter for spec %r", spec)
+ proposed_paths = set()
+ for interpreter, impl_must_match in propose_interpreters(spec, app_data):
+ key = interpreter.system_executable, impl_must_match
+ if key in proposed_paths:
+ continue
+ logging.info("proposed %s", interpreter)
+ if interpreter.satisfies(spec, impl_must_match):
+ logging.debug("accepted %s", interpreter)
+ return interpreter
+ proposed_paths.add(key)
+
+
+def propose_interpreters(spec, app_data):
+ # 1. if it's a path and exists
+ if spec.path is not None:
+ try:
+ os.lstat(spec.path) # Windows Store Python does not work with os.path.exists, but does for os.lstat
+ except OSError:
+ if spec.is_abs:
+ raise
+ else:
+ yield PythonInfo.from_exe(os.path.abspath(spec.path), app_data), True
+ if spec.is_abs:
+ return
+ else:
+ # 2. otherwise try with the current
+ yield PythonInfo.current_system(app_data), True
+
+ # 3. otherwise fallback to platform default logic
+ if IS_WIN:
+ from .windows import propose_interpreters
+
+ for interpreter in propose_interpreters(spec, app_data):
+ yield interpreter, True
+ # finally just find on path, the path order matters (as the candidates are less easy to control by end user)
+ paths = get_paths()
+ tested_exes = set()
+ for pos, path in enumerate(paths):
+ path = ensure_text(path)
+ logging.debug(LazyPathDump(pos, path))
+ for candidate, match in possible_specs(spec):
+ found = check_path(candidate, path)
+ if found is not None:
+ exe = os.path.abspath(found)
+ if exe not in tested_exes:
+ tested_exes.add(exe)
+ interpreter = PathPythonInfo.from_exe(exe, app_data, raise_on_error=False)
+ if interpreter is not None:
+ yield interpreter, match
+
+
+def get_paths():
+ path = os.environ.get(str("PATH"), None)
+ if path is None:
+ try:
+ path = os.confstr("CS_PATH")
+ except (AttributeError, ValueError):
+ path = os.defpath
+ if not path:
+ paths = []
+ else:
+ paths = [p for p in path.split(os.pathsep) if os.path.exists(p)]
+ return paths
+
+
+class LazyPathDump(object):
+ def __init__(self, pos, path):
+ self.pos = pos
+ self.path = path
+
+ def __repr__(self):
+ return ensure_str(self.__unicode__())
+
+ def __unicode__(self):
+ content = "discover PATH[{}]={}".format(self.pos, self.path)
+ if os.environ.get(str("_VIRTUALENV_DEBUG")): # this is the over the board debug
+ content += " with =>"
+ for file_name in os.listdir(self.path):
+ try:
+ file_path = os.path.join(self.path, file_name)
+ if os.path.isdir(file_path) or not os.access(file_path, os.X_OK):
+ continue
+ except OSError:
+ pass
+ content += " "
+ content += file_name
+ return content
+
+
+def check_path(candidate, path):
+ _, ext = os.path.splitext(candidate)
+ if sys.platform == "win32" and ext != ".exe":
+ candidate = candidate + ".exe"
+ if os.path.isfile(candidate):
+ return candidate
+ candidate = os.path.join(path, candidate)
+ if os.path.isfile(candidate):
+ return candidate
+ return None
+
+
+def possible_specs(spec):
+ # 4. then maybe it's something exact on PATH - if it was direct lookup implementation no longer counts
+ yield spec.str_spec, False
+ # 5. or from the spec we can deduce a name on path that matches
+ for exe, match in spec.generate_names():
+ yield exe, match
+
+
+class PathPythonInfo(PythonInfo):
+ """"""
diff --git a/third_party/python/virtualenv/virtualenv/discovery/cached_py_info.py b/third_party/python/virtualenv/virtualenv/discovery/cached_py_info.py
new file mode 100644
index 0000000000..ce79ef14b1
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/discovery/cached_py_info.py
@@ -0,0 +1,148 @@
+"""
+
+We acquire the python information by running an interrogation script via subprocess trigger. This operation is not
+cheap, especially not on Windows. To not have to pay this hefty cost every time we apply multiple levels of
+caching.
+"""
+from __future__ import absolute_import, unicode_literals
+
+import logging
+import os
+import pipes
+import sys
+from collections import OrderedDict
+
+from virtualenv.app_data import AppDataDisabled
+from virtualenv.discovery.py_info import PythonInfo
+from virtualenv.info import PY2
+from virtualenv.util.path import Path
+from virtualenv.util.six import ensure_text
+from virtualenv.util.subprocess import Popen, subprocess
+
+_CACHE = OrderedDict()
+_CACHE[Path(sys.executable)] = PythonInfo()
+
+
+def from_exe(cls, app_data, exe, raise_on_error=True, ignore_cache=False):
+ """"""
+ result = _get_from_cache(cls, app_data, exe, ignore_cache=ignore_cache)
+ if isinstance(result, Exception):
+ if raise_on_error:
+ raise result
+ else:
+ logging.info("%s", str(result))
+ result = None
+ return result
+
+
+def _get_from_cache(cls, app_data, exe, ignore_cache=True):
+ # note here we cannot resolve symlinks, as the symlink may trigger different prefix information if there's a
+ # pyenv.cfg somewhere alongside on python3.4+
+ exe_path = Path(exe)
+ if not ignore_cache and exe_path in _CACHE: # check in the in-memory cache
+ result = _CACHE[exe_path]
+ else: # otherwise go through the app data cache
+ py_info = _get_via_file_cache(cls, app_data, exe_path, exe)
+ result = _CACHE[exe_path] = py_info
+ # independent if it was from the file or in-memory cache fix the original executable location
+ if isinstance(result, PythonInfo):
+ result.executable = exe
+ return result
+
+
+def _get_via_file_cache(cls, app_data, path, exe):
+ path_text = ensure_text(str(path))
+ try:
+ path_modified = path.stat().st_mtime
+ except OSError:
+ path_modified = -1
+ if app_data is None:
+ app_data = AppDataDisabled()
+ py_info, py_info_store = None, app_data.py_info(path)
+ with py_info_store.locked():
+ if py_info_store.exists(): # if exists and matches load
+ data = py_info_store.read()
+ of_path, of_st_mtime, of_content = data["path"], data["st_mtime"], data["content"]
+ if of_path == path_text and of_st_mtime == path_modified:
+ py_info = cls._from_dict({k: v for k, v in of_content.items()})
+ else:
+ py_info_store.remove()
+ if py_info is None: # if not loaded run and save
+ failure, py_info = _run_subprocess(cls, exe, app_data)
+ if failure is None:
+ data = {"st_mtime": path_modified, "path": path_text, "content": py_info._to_dict()}
+ py_info_store.write(data)
+ else:
+ py_info = failure
+ return py_info
+
+
+def _run_subprocess(cls, exe, app_data):
+ py_info_script = Path(os.path.abspath(__file__)).parent / "py_info.py"
+ with app_data.ensure_extracted(py_info_script) as py_info_script:
+ cmd = [exe, str(py_info_script)]
+ # prevent sys.prefix from leaking into the child process - see https://bugs.python.org/issue22490
+ env = os.environ.copy()
+ env.pop("__PYVENV_LAUNCHER__", None)
+ logging.debug("get interpreter info via cmd: %s", LogCmd(cmd))
+ try:
+ process = Popen(
+ cmd,
+ universal_newlines=True,
+ stdin=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ env=env,
+ )
+ out, err = process.communicate()
+ code = process.returncode
+ except OSError as os_error:
+ out, err, code = "", os_error.strerror, os_error.errno
+ result, failure = None, None
+ if code == 0:
+ result = cls._from_json(out)
+ result.executable = exe # keep original executable as this may contain initialization code
+ else:
+ msg = "failed to query {} with code {}{}{}".format(
+ exe,
+ code,
+ " out: {!r}".format(out) if out else "",
+ " err: {!r}".format(err) if err else "",
+ )
+ failure = RuntimeError(msg)
+ return failure, result
+
+
+class LogCmd(object):
+ def __init__(self, cmd, env=None):
+ self.cmd = cmd
+ self.env = env
+
+ def __repr__(self):
+ def e(v):
+ return v.decode("utf-8") if isinstance(v, bytes) else v
+
+ cmd_repr = e(" ").join(pipes.quote(e(c)) for c in self.cmd)
+ if self.env is not None:
+ cmd_repr += e(" env of {!r}").format(self.env)
+ if PY2:
+ return cmd_repr.encode("utf-8")
+ return cmd_repr
+
+ def __unicode__(self):
+ raw = repr(self)
+ if PY2:
+ return raw.decode("utf-8")
+ return raw
+
+
+def clear(app_data):
+ app_data.py_info_clear()
+ _CACHE.clear()
+
+
+___all___ = (
+ "from_exe",
+ "clear",
+ "LogCmd",
+)
diff --git a/third_party/python/virtualenv/virtualenv/discovery/discover.py b/third_party/python/virtualenv/virtualenv/discovery/discover.py
new file mode 100644
index 0000000000..93c3ea7ad7
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/discovery/discover.py
@@ -0,0 +1,46 @@
+from __future__ import absolute_import, unicode_literals
+
+from abc import ABCMeta, abstractmethod
+
+from six import add_metaclass
+
+
+@add_metaclass(ABCMeta)
+class Discover(object):
+ """Discover and provide the requested Python interpreter"""
+
+ @classmethod
+ def add_parser_arguments(cls, parser):
+ """Add CLI arguments for this discovery mechanisms.
+
+ :param parser: the CLI parser
+ """
+ raise NotImplementedError
+
+ # noinspection PyUnusedLocal
+ def __init__(self, options):
+ """Create a new discovery mechanism.
+
+ :param options: the parsed options as defined within :meth:`add_parser_arguments`
+ """
+ self._has_run = False
+ self._interpreter = None
+
+ @abstractmethod
+ def run(self):
+ """Discovers an interpreter.
+
+
+ :return: the interpreter ready to use for virtual environment creation
+ """
+ raise NotImplementedError
+
+ @property
+ def interpreter(self):
+ """
+ :return: the interpreter as returned by :meth:`run`, cached
+ """
+ if self._has_run is False:
+ self._interpreter = self.run()
+ self._has_run = True
+ return self._interpreter
diff --git a/third_party/python/virtualenv/virtualenv/discovery/py_info.py b/third_party/python/virtualenv/virtualenv/discovery/py_info.py
new file mode 100644
index 0000000000..46b51df1b3
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/discovery/py_info.py
@@ -0,0 +1,490 @@
+"""
+The PythonInfo contains information about a concrete instance of a Python interpreter
+
+Note: this file is also used to query target interpreters, so can only use standard library methods
+"""
+from __future__ import absolute_import, print_function
+
+import json
+import logging
+import os
+import platform
+import re
+import sys
+import sysconfig
+from collections import OrderedDict, namedtuple
+from distutils import dist
+from distutils.command.install import SCHEME_KEYS
+from string import digits
+
+VersionInfo = namedtuple("VersionInfo", ["major", "minor", "micro", "releaselevel", "serial"])
+
+
+def _get_path_extensions():
+ return list(OrderedDict.fromkeys([""] + os.environ.get("PATHEXT", "").lower().split(os.pathsep)))
+
+
+EXTENSIONS = _get_path_extensions()
+_CONF_VAR_RE = re.compile(r"\{\w+\}")
+
+
+class PythonInfo(object):
+ """Contains information for a Python interpreter"""
+
+ def __init__(self):
+ def u(v):
+ return v.decode("utf-8") if isinstance(v, bytes) else v
+
+ def abs_path(v):
+ return None if v is None else os.path.abspath(v) # unroll relative elements from path (e.g. ..)
+
+ # qualifies the python
+ self.platform = u(sys.platform)
+ self.implementation = u(platform.python_implementation())
+ if self.implementation == "PyPy":
+ self.pypy_version_info = tuple(u(i) for i in sys.pypy_version_info)
+
+ # this is a tuple in earlier, struct later, unify to our own named tuple
+ self.version_info = VersionInfo(*list(u(i) for i in sys.version_info))
+ self.architecture = 64 if sys.maxsize > 2 ** 32 else 32
+
+ self.version = u(sys.version)
+ self.os = u(os.name)
+
+ # information about the prefix - determines python home
+ self.prefix = u(abs_path(getattr(sys, "prefix", None))) # prefix we think
+ self.base_prefix = u(abs_path(getattr(sys, "base_prefix", None))) # venv
+ self.real_prefix = u(abs_path(getattr(sys, "real_prefix", None))) # old virtualenv
+
+ # information about the exec prefix - dynamic stdlib modules
+ self.base_exec_prefix = u(abs_path(getattr(sys, "base_exec_prefix", None)))
+ self.exec_prefix = u(abs_path(getattr(sys, "exec_prefix", None)))
+
+ self.executable = u(abs_path(sys.executable)) # the executable we were invoked via
+ self.original_executable = u(abs_path(self.executable)) # the executable as known by the interpreter
+ self.system_executable = self._fast_get_system_executable() # the executable we are based of (if available)
+
+ try:
+ __import__("venv")
+ has = True
+ except ImportError:
+ has = False
+ self.has_venv = has
+ self.path = [u(i) for i in sys.path]
+ self.file_system_encoding = u(sys.getfilesystemencoding())
+ self.stdout_encoding = u(getattr(sys.stdout, "encoding", None))
+
+ self.sysconfig_paths = {u(i): u(sysconfig.get_path(i, expand=False)) for i in sysconfig.get_path_names()}
+ # https://bugs.python.org/issue22199
+ makefile = getattr(sysconfig, "get_makefile_filename", getattr(sysconfig, "_get_makefile_filename", None))
+ self.sysconfig = {
+ u(k): u(v)
+ for k, v in [
+ # a list of content to store from sysconfig
+ ("makefile_filename", makefile()),
+ ]
+ if k is not None
+ }
+
+ config_var_keys = set()
+ for element in self.sysconfig_paths.values():
+ for k in _CONF_VAR_RE.findall(element):
+ config_var_keys.add(u(k[1:-1]))
+ config_var_keys.add("PYTHONFRAMEWORK")
+
+ self.sysconfig_vars = {u(i): u(sysconfig.get_config_var(i) or "") for i in config_var_keys}
+ if self.implementation == "PyPy" and sys.version_info.major == 2:
+ self.sysconfig_vars[u"implementation_lower"] = u"python"
+
+ self.distutils_install = {u(k): u(v) for k, v in self._distutils_install().items()}
+ confs = {k: (self.system_prefix if v.startswith(self.prefix) else v) for k, v in self.sysconfig_vars.items()}
+ self.system_stdlib = self.sysconfig_path("stdlib", confs)
+ self.system_stdlib_platform = self.sysconfig_path("platstdlib", confs)
+ self.max_size = getattr(sys, "maxsize", getattr(sys, "maxint", None))
+ self._creators = None
+
+ def _fast_get_system_executable(self):
+ """Try to get the system executable by just looking at properties"""
+ if self.real_prefix or (
+ self.base_prefix is not None and self.base_prefix != self.prefix
+ ): # if this is a virtual environment
+ if self.real_prefix is None:
+ base_executable = getattr(sys, "_base_executable", None) # some platforms may set this to help us
+ if base_executable is not None: # use the saved system executable if present
+ if sys.executable != base_executable: # we know we're in a virtual environment, cannot be us
+ return base_executable
+ return None # in this case we just can't tell easily without poking around FS and calling them, bail
+ # if we're not in a virtual environment, this is already a system python, so return the original executable
+ # note we must choose the original and not the pure executable as shim scripts might throw us off
+ return self.original_executable
+
+ @staticmethod
+ def _distutils_install():
+ # follow https://github.com/pypa/pip/blob/main/src/pip/_internal/locations.py#L95
+ # note here we don't import Distribution directly to allow setuptools to patch it
+ d = dist.Distribution({"script_args": "--no-user-cfg"}) # conf files not parsed so they do not hijack paths
+ if hasattr(sys, "_framework"):
+ sys._framework = None # disable macOS static paths for framework
+ i = d.get_command_obj("install", create=True)
+ i.prefix = os.sep # paths generated are relative to prefix that contains the path sep, this makes it relative
+ i.finalize_options()
+ result = {key: (getattr(i, "install_{}".format(key))[1:]).lstrip(os.sep) for key in SCHEME_KEYS}
+ return result
+
+ @property
+ def version_str(self):
+ return ".".join(str(i) for i in self.version_info[0:3])
+
+ @property
+ def version_release_str(self):
+ return ".".join(str(i) for i in self.version_info[0:2])
+
+ @property
+ def python_name(self):
+ version_info = self.version_info
+ return "python{}.{}".format(version_info.major, version_info.minor)
+
+ @property
+ def is_old_virtualenv(self):
+ return self.real_prefix is not None
+
+ @property
+ def is_venv(self):
+ return self.base_prefix is not None and self.version_info.major == 3
+
+ def sysconfig_path(self, key, config_var=None, sep=os.sep):
+ pattern = self.sysconfig_paths[key]
+ if config_var is None:
+ config_var = self.sysconfig_vars
+ else:
+ base = {k: v for k, v in self.sysconfig_vars.items()}
+ base.update(config_var)
+ config_var = base
+ return pattern.format(**config_var).replace(u"/", sep)
+
+ def creators(self, refresh=False):
+ if self._creators is None or refresh is True:
+ from virtualenv.run.plugin.creators import CreatorSelector
+
+ self._creators = CreatorSelector.for_interpreter(self)
+ return self._creators
+
+ @property
+ def system_include(self):
+ path = self.sysconfig_path(
+ "include",
+ {k: (self.system_prefix if v.startswith(self.prefix) else v) for k, v in self.sysconfig_vars.items()},
+ )
+ if not os.path.exists(path): # some broken packaging don't respect the sysconfig, fallback to distutils path
+ # the pattern include the distribution name too at the end, remove that via the parent call
+ fallback = os.path.join(self.prefix, os.path.dirname(self.distutils_install["headers"]))
+ if os.path.exists(fallback):
+ path = fallback
+ return path
+
+ @property
+ def system_prefix(self):
+ return self.real_prefix or self.base_prefix or self.prefix
+
+ @property
+ def system_exec_prefix(self):
+ return self.real_prefix or self.base_exec_prefix or self.exec_prefix
+
+ def __unicode__(self):
+ content = repr(self)
+ if sys.version_info == 2:
+ content = content.decode("utf-8")
+ return content
+
+ def __repr__(self):
+ return "{}({!r})".format(
+ self.__class__.__name__,
+ {k: v for k, v in self.__dict__.items() if not k.startswith("_")},
+ )
+
+ def __str__(self):
+ content = "{}({})".format(
+ self.__class__.__name__,
+ ", ".join(
+ "{}={}".format(k, v)
+ for k, v in (
+ ("spec", self.spec),
+ (
+ "system"
+ if self.system_executable is not None and self.system_executable != self.executable
+ else None,
+ self.system_executable,
+ ),
+ (
+ "original"
+ if (
+ self.original_executable != self.system_executable
+ and self.original_executable != self.executable
+ )
+ else None,
+ self.original_executable,
+ ),
+ ("exe", self.executable),
+ ("platform", self.platform),
+ ("version", repr(self.version)),
+ ("encoding_fs_io", "{}-{}".format(self.file_system_encoding, self.stdout_encoding)),
+ )
+ if k is not None
+ ),
+ )
+ return content
+
+ @property
+ def spec(self):
+ return "{}{}-{}".format(self.implementation, ".".join(str(i) for i in self.version_info), self.architecture)
+
+ @classmethod
+ def clear_cache(cls, app_data):
+ # this method is not used by itself, so here and called functions can import stuff locally
+ from virtualenv.discovery.cached_py_info import clear
+
+ clear(app_data)
+ cls._cache_exe_discovery.clear()
+
+ def satisfies(self, spec, impl_must_match):
+ """check if a given specification can be satisfied by the this python interpreter instance"""
+ if spec.path:
+ if self.executable == os.path.abspath(spec.path):
+ return True # if the path is a our own executable path we're done
+ if not spec.is_abs:
+ # if path set, and is not our original executable name, this does not match
+ basename = os.path.basename(self.original_executable)
+ spec_path = spec.path
+ if sys.platform == "win32":
+ basename, suffix = os.path.splitext(basename)
+ if spec_path.endswith(suffix):
+ spec_path = spec_path[: -len(suffix)]
+ if basename != spec_path:
+ return False
+
+ if impl_must_match:
+ if spec.implementation is not None and spec.implementation.lower() != self.implementation.lower():
+ return False
+
+ if spec.architecture is not None and spec.architecture != self.architecture:
+ return False
+
+ for our, req in zip(self.version_info[0:3], (spec.major, spec.minor, spec.micro)):
+ if req is not None and our is not None and our != req:
+ return False
+ return True
+
+ _current_system = None
+ _current = None
+
+ @classmethod
+ def current(cls, app_data=None):
+ """
+ This locates the current host interpreter information. This might be different than what we run into in case
+ the host python has been upgraded from underneath us.
+ """
+ if cls._current is None:
+ cls._current = cls.from_exe(sys.executable, app_data, raise_on_error=True, resolve_to_host=False)
+ return cls._current
+
+ @classmethod
+ def current_system(cls, app_data=None):
+ """
+ This locates the current host interpreter information. This might be different than what we run into in case
+ the host python has been upgraded from underneath us.
+ """
+ if cls._current_system is None:
+ cls._current_system = cls.from_exe(sys.executable, app_data, raise_on_error=True, resolve_to_host=True)
+ return cls._current_system
+
+ def _to_json(self):
+ # don't save calculated paths, as these are non primitive types
+ return json.dumps(self._to_dict(), indent=2)
+
+ def _to_dict(self):
+ data = {var: (getattr(self, var) if var not in ("_creators",) else None) for var in vars(self)}
+ # noinspection PyProtectedMember
+ data["version_info"] = data["version_info"]._asdict() # namedtuple to dictionary
+ return data
+
+ @classmethod
+ def from_exe(cls, exe, app_data=None, raise_on_error=True, ignore_cache=False, resolve_to_host=True):
+ """Given a path to an executable get the python information"""
+ # this method is not used by itself, so here and called functions can import stuff locally
+ from virtualenv.discovery.cached_py_info import from_exe
+
+ proposed = from_exe(cls, app_data, exe, raise_on_error=raise_on_error, ignore_cache=ignore_cache)
+ # noinspection PyProtectedMember
+ if isinstance(proposed, PythonInfo) and resolve_to_host:
+ try:
+ proposed = proposed._resolve_to_system(app_data, proposed)
+ except Exception as exception:
+ if raise_on_error:
+ raise exception
+ logging.info("ignore %s due cannot resolve system due to %r", proposed.original_executable, exception)
+ proposed = None
+ return proposed
+
+ @classmethod
+ def _from_json(cls, payload):
+ # the dictionary unroll here is to protect against pypy bug of interpreter crashing
+ raw = json.loads(payload)
+ return cls._from_dict({k: v for k, v in raw.items()})
+
+ @classmethod
+ def _from_dict(cls, data):
+ data["version_info"] = VersionInfo(**data["version_info"]) # restore this to a named tuple structure
+ result = cls()
+ result.__dict__ = {k: v for k, v in data.items()}
+ return result
+
+ @classmethod
+ def _resolve_to_system(cls, app_data, target):
+ start_executable = target.executable
+ prefixes = OrderedDict()
+ while target.system_executable is None:
+ prefix = target.real_prefix or target.base_prefix or target.prefix
+ if prefix in prefixes:
+ if len(prefixes) == 1:
+ # if we're linking back to ourselves accept ourselves with a WARNING
+ logging.info("%r links back to itself via prefixes", target)
+ target.system_executable = target.executable
+ break
+ for at, (p, t) in enumerate(prefixes.items(), start=1):
+ logging.error("%d: prefix=%s, info=%r", at, p, t)
+ logging.error("%d: prefix=%s, info=%r", len(prefixes) + 1, prefix, target)
+ raise RuntimeError("prefixes are causing a circle {}".format("|".join(prefixes.keys())))
+ prefixes[prefix] = target
+ target = target.discover_exe(app_data, prefix=prefix, exact=False)
+ if target.executable != target.system_executable:
+ target = cls.from_exe(target.system_executable, app_data)
+ target.executable = start_executable
+ return target
+
+ _cache_exe_discovery = {}
+
+ def discover_exe(self, app_data, prefix, exact=True):
+ key = prefix, exact
+ if key in self._cache_exe_discovery and prefix:
+ logging.debug("discover exe from cache %s - exact %s: %r", prefix, exact, self._cache_exe_discovery[key])
+ return self._cache_exe_discovery[key]
+ logging.debug("discover exe for %s in %s", self, prefix)
+ # we don't know explicitly here, do some guess work - our executable name should tell
+ possible_names = self._find_possible_exe_names()
+ possible_folders = self._find_possible_folders(prefix)
+ discovered = []
+ for folder in possible_folders:
+ for name in possible_names:
+ info = self._check_exe(app_data, folder, name, exact, discovered)
+ if info is not None:
+ self._cache_exe_discovery[key] = info
+ return info
+ if exact is False and discovered:
+ info = self._select_most_likely(discovered, self)
+ folders = os.pathsep.join(possible_folders)
+ self._cache_exe_discovery[key] = info
+ logging.debug("no exact match found, chosen most similar of %s within base folders %s", info, folders)
+ return info
+ msg = "failed to detect {} in {}".format("|".join(possible_names), os.pathsep.join(possible_folders))
+ raise RuntimeError(msg)
+
+ def _check_exe(self, app_data, folder, name, exact, discovered):
+ exe_path = os.path.join(folder, name)
+ if not os.path.exists(exe_path):
+ return None
+ info = self.from_exe(exe_path, app_data, resolve_to_host=False, raise_on_error=False)
+ if info is None: # ignore if for some reason we can't query
+ return None
+ for item in ["implementation", "architecture", "version_info"]:
+ found = getattr(info, item)
+ searched = getattr(self, item)
+ if found != searched:
+ if item == "version_info":
+ found, searched = ".".join(str(i) for i in found), ".".join(str(i) for i in searched)
+ executable = info.executable
+ logging.debug("refused interpreter %s because %s differs %s != %s", executable, item, found, searched)
+ if exact is False:
+ discovered.append(info)
+ break
+ else:
+ return info
+ return None
+
+ @staticmethod
+ def _select_most_likely(discovered, target):
+ # no exact match found, start relaxing our requirements then to facilitate system package upgrades that
+ # could cause this (when using copy strategy of the host python)
+ def sort_by(info):
+ # we need to setup some priority of traits, this is as follows:
+ # implementation, major, minor, micro, architecture, tag, serial
+ matches = [
+ info.implementation == target.implementation,
+ info.version_info.major == target.version_info.major,
+ info.version_info.minor == target.version_info.minor,
+ info.architecture == target.architecture,
+ info.version_info.micro == target.version_info.micro,
+ info.version_info.releaselevel == target.version_info.releaselevel,
+ info.version_info.serial == target.version_info.serial,
+ ]
+ priority = sum((1 << pos if match else 0) for pos, match in enumerate(reversed(matches)))
+ return priority
+
+ sorted_discovered = sorted(discovered, key=sort_by, reverse=True) # sort by priority in decreasing order
+ most_likely = sorted_discovered[0]
+ return most_likely
+
+ def _find_possible_folders(self, inside_folder):
+ candidate_folder = OrderedDict()
+ executables = OrderedDict()
+ executables[os.path.realpath(self.executable)] = None
+ executables[self.executable] = None
+ executables[os.path.realpath(self.original_executable)] = None
+ executables[self.original_executable] = None
+ for exe in executables.keys():
+ base = os.path.dirname(exe)
+ # following path pattern of the current
+ if base.startswith(self.prefix):
+ relative = base[len(self.prefix) :]
+ candidate_folder["{}{}".format(inside_folder, relative)] = None
+
+ # or at root level
+ candidate_folder[inside_folder] = None
+ return list(i for i in candidate_folder.keys() if os.path.exists(i))
+
+ def _find_possible_exe_names(self):
+ name_candidate = OrderedDict()
+ for name in self._possible_base():
+ for at in (3, 2, 1, 0):
+ version = ".".join(str(i) for i in self.version_info[:at])
+ for arch in ["-{}".format(self.architecture), ""]:
+ for ext in EXTENSIONS:
+ candidate = "{}{}{}{}".format(name, version, arch, ext)
+ name_candidate[candidate] = None
+ return list(name_candidate.keys())
+
+ def _possible_base(self):
+ possible_base = OrderedDict()
+ basename = os.path.splitext(os.path.basename(self.executable))[0].rstrip(digits)
+ possible_base[basename] = None
+ possible_base[self.implementation] = None
+ # python is always the final option as in practice is used by multiple implementation as exe name
+ if "python" in possible_base:
+ del possible_base["python"]
+ possible_base["python"] = None
+ for base in possible_base:
+ lower = base.lower()
+ yield lower
+ from virtualenv.info import fs_is_case_sensitive
+
+ if fs_is_case_sensitive():
+ if base != lower:
+ yield base
+ upper = base.upper()
+ if upper != base:
+ yield upper
+
+
+if __name__ == "__main__":
+ # dump a JSON representation of the current python
+ # noinspection PyProtectedMember
+ print(PythonInfo()._to_json())
diff --git a/third_party/python/virtualenv/virtualenv/discovery/py_spec.py b/third_party/python/virtualenv/virtualenv/discovery/py_spec.py
new file mode 100644
index 0000000000..cb63e1516b
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/discovery/py_spec.py
@@ -0,0 +1,122 @@
+"""A Python specification is an abstract requirement definition of a interpreter"""
+from __future__ import absolute_import, unicode_literals
+
+import os
+import re
+import sys
+from collections import OrderedDict
+
+from virtualenv.info import fs_is_case_sensitive
+from virtualenv.util.six import ensure_str
+
+PATTERN = re.compile(r"^(?P<impl>[a-zA-Z]+)?(?P<version>[0-9.]+)?(?:-(?P<arch>32|64))?$")
+IS_WIN = sys.platform == "win32"
+
+
+class PythonSpec(object):
+ """Contains specification about a Python Interpreter"""
+
+ def __init__(self, str_spec, implementation, major, minor, micro, architecture, path):
+ self.str_spec = str_spec
+ self.implementation = implementation
+ self.major = major
+ self.minor = minor
+ self.micro = micro
+ self.architecture = architecture
+ self.path = path
+
+ @classmethod
+ def from_string_spec(cls, string_spec):
+ impl, major, minor, micro, arch, path = None, None, None, None, None, None
+ if os.path.isabs(string_spec):
+ path = string_spec
+ else:
+ ok = False
+ match = re.match(PATTERN, string_spec)
+ if match:
+
+ def _int_or_none(val):
+ return None if val is None else int(val)
+
+ try:
+ groups = match.groupdict()
+ version = groups["version"]
+ if version is not None:
+ versions = tuple(int(i) for i in version.split(".") if i)
+ if len(versions) > 3:
+ raise ValueError
+ if len(versions) == 3:
+ major, minor, micro = versions
+ elif len(versions) == 2:
+ major, minor = versions
+ elif len(versions) == 1:
+ version_data = versions[0]
+ major = int(str(version_data)[0]) # first digit major
+ if version_data > 9:
+ minor = int(str(version_data)[1:])
+ ok = True
+ except ValueError:
+ pass
+ else:
+ impl = groups["impl"]
+ if impl == "py" or impl == "python":
+ impl = "CPython"
+ arch = _int_or_none(groups["arch"])
+
+ if not ok:
+ path = string_spec
+
+ return cls(string_spec, impl, major, minor, micro, arch, path)
+
+ def generate_names(self):
+ impls = OrderedDict()
+ if self.implementation:
+ # first consider implementation as it is
+ impls[self.implementation] = False
+ if fs_is_case_sensitive():
+ # for case sensitive file systems consider lower and upper case versions too
+ # trivia: MacBooks and all pre 2018 Windows-es were case insensitive by default
+ impls[self.implementation.lower()] = False
+ impls[self.implementation.upper()] = False
+ impls["python"] = True # finally consider python as alias, implementation must match now
+ version = self.major, self.minor, self.micro
+ try:
+ version = version[: version.index(None)]
+ except ValueError:
+ pass
+ for impl, match in impls.items():
+ for at in range(len(version), -1, -1):
+ cur_ver = version[0:at]
+ spec = "{}{}".format(impl, ".".join(str(i) for i in cur_ver))
+ yield spec, match
+
+ @property
+ def is_abs(self):
+ return self.path is not None and os.path.isabs(self.path)
+
+ def satisfies(self, spec):
+ """called when there's a candidate metadata spec to see if compatible - e.g. PEP-514 on Windows"""
+ if spec.is_abs and self.is_abs and self.path != spec.path:
+ return False
+ if spec.implementation is not None and spec.implementation.lower() != self.implementation.lower():
+ return False
+ if spec.architecture is not None and spec.architecture != self.architecture:
+ return False
+
+ for our, req in zip((self.major, self.minor, self.micro), (spec.major, spec.minor, spec.micro)):
+ if req is not None and our is not None and our != req:
+ return False
+ return True
+
+ def __unicode__(self):
+ return "{}({})".format(
+ type(self).__name__,
+ ", ".join(
+ "{}={}".format(k, getattr(self, k))
+ for k in ("implementation", "major", "minor", "micro", "architecture", "path")
+ if getattr(self, k) is not None
+ ),
+ )
+
+ def __repr__(self):
+ return ensure_str(self.__unicode__())
diff --git a/third_party/python/virtualenv/virtualenv/discovery/windows/__init__.py b/third_party/python/virtualenv/virtualenv/discovery/windows/__init__.py
new file mode 100644
index 0000000000..9063ab8df7
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/discovery/windows/__init__.py
@@ -0,0 +1,28 @@
+from __future__ import absolute_import, unicode_literals
+
+from ..py_info import PythonInfo
+from ..py_spec import PythonSpec
+from .pep514 import discover_pythons
+
+
+class Pep514PythonInfo(PythonInfo):
+ """"""
+
+
+def propose_interpreters(spec, cache_dir):
+ # see if PEP-514 entries are good
+
+ # start with higher python versions in an effort to use the latest version available
+ existing = list(discover_pythons())
+ existing.sort(key=lambda i: tuple(-1 if j is None else j for j in i[1:4]), reverse=True)
+
+ for name, major, minor, arch, exe, _ in existing:
+ # pre-filter
+ if name in ("PythonCore", "ContinuumAnalytics"):
+ name = "CPython"
+ registry_spec = PythonSpec(None, name, major, minor, None, arch, exe)
+ if registry_spec.satisfies(spec):
+ interpreter = Pep514PythonInfo.from_exe(exe, cache_dir, raise_on_error=False)
+ if interpreter is not None:
+ if interpreter.satisfies(spec, impl_must_match=True):
+ yield interpreter
diff --git a/third_party/python/virtualenv/virtualenv/discovery/windows/pep514.py b/third_party/python/virtualenv/virtualenv/discovery/windows/pep514.py
new file mode 100644
index 0000000000..048436a600
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/discovery/windows/pep514.py
@@ -0,0 +1,161 @@
+"""Implement https://www.python.org/dev/peps/pep-0514/ to discover interpreters - Windows only"""
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+import re
+from logging import basicConfig, getLogger
+
+import six
+
+if six.PY3:
+ import winreg
+else:
+ # noinspection PyUnresolvedReferences
+ import _winreg as winreg
+
+LOGGER = getLogger(__name__)
+
+
+def enum_keys(key):
+ at = 0
+ while True:
+ try:
+ yield winreg.EnumKey(key, at)
+ except OSError:
+ break
+ at += 1
+
+
+def get_value(key, value_name):
+ try:
+ return winreg.QueryValueEx(key, value_name)[0]
+ except OSError:
+ return None
+
+
+def discover_pythons():
+ for hive, hive_name, key, flags, default_arch in [
+ (winreg.HKEY_CURRENT_USER, "HKEY_CURRENT_USER", r"Software\Python", 0, 64),
+ (winreg.HKEY_LOCAL_MACHINE, "HKEY_LOCAL_MACHINE", r"Software\Python", winreg.KEY_WOW64_64KEY, 64),
+ (winreg.HKEY_LOCAL_MACHINE, "HKEY_LOCAL_MACHINE", r"Software\Python", winreg.KEY_WOW64_32KEY, 32),
+ ]:
+ for spec in process_set(hive, hive_name, key, flags, default_arch):
+ yield spec
+
+
+def process_set(hive, hive_name, key, flags, default_arch):
+ try:
+ with winreg.OpenKeyEx(hive, key, 0, winreg.KEY_READ | flags) as root_key:
+ for company in enum_keys(root_key):
+ if company == "PyLauncher": # reserved
+ continue
+ for spec in process_company(hive_name, company, root_key, default_arch):
+ yield spec
+ except OSError:
+ pass
+
+
+def process_company(hive_name, company, root_key, default_arch):
+ with winreg.OpenKeyEx(root_key, company) as company_key:
+ for tag in enum_keys(company_key):
+ spec = process_tag(hive_name, company, company_key, tag, default_arch)
+ if spec is not None:
+ yield spec
+
+
+def process_tag(hive_name, company, company_key, tag, default_arch):
+ with winreg.OpenKeyEx(company_key, tag) as tag_key:
+ version = load_version_data(hive_name, company, tag, tag_key)
+ if version is not None: # if failed to get version bail
+ major, minor, _ = version
+ arch = load_arch_data(hive_name, company, tag, tag_key, default_arch)
+ if arch is not None:
+ exe_data = load_exe(hive_name, company, company_key, tag)
+ if exe_data is not None:
+ exe, args = exe_data
+ return company, major, minor, arch, exe, args
+
+
+def load_exe(hive_name, company, company_key, tag):
+ key_path = "{}/{}/{}".format(hive_name, company, tag)
+ try:
+ with winreg.OpenKeyEx(company_key, r"{}\InstallPath".format(tag)) as ip_key:
+ with ip_key:
+ exe = get_value(ip_key, "ExecutablePath")
+ if exe is None:
+ ip = get_value(ip_key, None)
+ if ip is None:
+ msg(key_path, "no ExecutablePath or default for it")
+
+ else:
+ exe = os.path.join(ip, str("python.exe"))
+ if exe is not None and os.path.exists(exe):
+ args = get_value(ip_key, "ExecutableArguments")
+ return exe, args
+ else:
+ msg(key_path, "could not load exe with value {}".format(exe))
+ except OSError:
+ msg("{}/{}".format(key_path, "InstallPath"), "missing")
+ return None
+
+
+def load_arch_data(hive_name, company, tag, tag_key, default_arch):
+ arch_str = get_value(tag_key, "SysArchitecture")
+ if arch_str is not None:
+ key_path = "{}/{}/{}/SysArchitecture".format(hive_name, company, tag)
+ try:
+ return parse_arch(arch_str)
+ except ValueError as sys_arch:
+ msg(key_path, sys_arch)
+ return default_arch
+
+
+def parse_arch(arch_str):
+ if isinstance(arch_str, six.string_types):
+ match = re.match(r"^(\d+)bit$", arch_str)
+ if match:
+ return int(next(iter(match.groups())))
+ error = "invalid format {}".format(arch_str)
+ else:
+ error = "arch is not string: {}".format(repr(arch_str))
+ raise ValueError(error)
+
+
+def load_version_data(hive_name, company, tag, tag_key):
+ for candidate, key_path in [
+ (get_value(tag_key, "SysVersion"), "{}/{}/{}/SysVersion".format(hive_name, company, tag)),
+ (tag, "{}/{}/{}".format(hive_name, company, tag)),
+ ]:
+ if candidate is not None:
+ try:
+ return parse_version(candidate)
+ except ValueError as sys_version:
+ msg(key_path, sys_version)
+ return None
+
+
+def parse_version(version_str):
+ if isinstance(version_str, six.string_types):
+ match = re.match(r"^(\d+)(?:\.(\d+))?(?:\.(\d+))?$", version_str)
+ if match:
+ return tuple(int(i) if i is not None else None for i in match.groups())
+ error = "invalid format {}".format(version_str)
+ else:
+ error = "version is not string: {}".format(repr(version_str))
+ raise ValueError(error)
+
+
+def msg(path, what):
+ LOGGER.warning("PEP-514 violation in Windows Registry at {} error: {}".format(path, what))
+
+
+def _run():
+ basicConfig()
+ interpreters = []
+ for spec in discover_pythons():
+ interpreters.append(repr(spec))
+ print("\n".join(sorted(interpreters)))
+
+
+if __name__ == "__main__":
+ _run()
diff --git a/third_party/python/virtualenv/virtualenv/info.py b/third_party/python/virtualenv/virtualenv/info.py
new file mode 100644
index 0000000000..afe4097736
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/info.py
@@ -0,0 +1,65 @@
+from __future__ import absolute_import, unicode_literals
+
+import logging
+import os
+import platform
+import sys
+import tempfile
+
+IMPLEMENTATION = platform.python_implementation()
+IS_PYPY = IMPLEMENTATION == "PyPy"
+IS_CPYTHON = IMPLEMENTATION == "CPython"
+PY3 = sys.version_info[0] == 3
+PY2 = sys.version_info[0] == 2
+IS_WIN = sys.platform == "win32"
+ROOT = os.path.realpath(os.path.join(os.path.abspath(__file__), os.path.pardir, os.path.pardir))
+IS_ZIPAPP = os.path.isfile(ROOT)
+WIN_CPYTHON_2 = IS_CPYTHON and IS_WIN and PY2
+
+_CAN_SYMLINK = _FS_CASE_SENSITIVE = _CFG_DIR = _DATA_DIR = None
+
+
+def fs_is_case_sensitive():
+ global _FS_CASE_SENSITIVE
+
+ if _FS_CASE_SENSITIVE is None:
+ with tempfile.NamedTemporaryFile(prefix="TmP") as tmp_file:
+ _FS_CASE_SENSITIVE = not os.path.exists(tmp_file.name.lower())
+ logging.debug("filesystem is %scase-sensitive", "" if _FS_CASE_SENSITIVE else "not ")
+ return _FS_CASE_SENSITIVE
+
+
+def fs_supports_symlink():
+ global _CAN_SYMLINK
+
+ if _CAN_SYMLINK is None:
+ can = False
+ if hasattr(os, "symlink"):
+ if IS_WIN:
+ with tempfile.NamedTemporaryFile(prefix="TmP") as tmp_file:
+ temp_dir = os.path.dirname(tmp_file.name)
+ dest = os.path.join(temp_dir, "{}-{}".format(tmp_file.name, "b"))
+ try:
+ os.symlink(tmp_file.name, dest)
+ can = True
+ except (OSError, NotImplementedError):
+ pass
+ logging.debug("symlink on filesystem does%s work", "" if can else " not")
+ else:
+ can = True
+ _CAN_SYMLINK = can
+ return _CAN_SYMLINK
+
+
+__all__ = (
+ "IS_PYPY",
+ "IS_CPYTHON",
+ "PY3",
+ "PY2",
+ "IS_WIN",
+ "fs_is_case_sensitive",
+ "fs_supports_symlink",
+ "ROOT",
+ "IS_ZIPAPP",
+ "WIN_CPYTHON_2",
+)
diff --git a/third_party/python/virtualenv/virtualenv/report.py b/third_party/python/virtualenv/virtualenv/report.py
new file mode 100644
index 0000000000..665b293cb2
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/report.py
@@ -0,0 +1,57 @@
+from __future__ import absolute_import, unicode_literals
+
+import logging
+import sys
+
+from virtualenv.util.six import ensure_str
+
+LEVELS = {
+ 0: logging.CRITICAL,
+ 1: logging.ERROR,
+ 2: logging.WARNING,
+ 3: logging.INFO,
+ 4: logging.DEBUG,
+ 5: logging.NOTSET,
+}
+
+MAX_LEVEL = max(LEVELS.keys())
+LOGGER = logging.getLogger()
+
+
+def setup_report(verbosity, show_pid=False):
+ _clean_handlers(LOGGER)
+ if verbosity > MAX_LEVEL:
+ verbosity = MAX_LEVEL # pragma: no cover
+ level = LEVELS[verbosity]
+ msg_format = "%(message)s"
+ filelock_logger = logging.getLogger("filelock")
+ if level <= logging.DEBUG:
+ locate = "module"
+ msg_format = "%(relativeCreated)d {} [%(levelname)s %({})s:%(lineno)d]".format(msg_format, locate)
+ filelock_logger.setLevel(level)
+ else:
+ filelock_logger.setLevel(logging.WARN)
+ if show_pid:
+ msg_format = "[%(process)d] " + msg_format
+ formatter = logging.Formatter(ensure_str(msg_format))
+ stream_handler = logging.StreamHandler(stream=sys.stdout)
+ stream_handler.setLevel(level)
+ LOGGER.setLevel(logging.NOTSET)
+ stream_handler.setFormatter(formatter)
+ LOGGER.addHandler(stream_handler)
+ level_name = logging.getLevelName(level)
+ logging.debug("setup logging to %s", level_name)
+ logging.getLogger("distlib").setLevel(logging.ERROR)
+ return verbosity
+
+
+def _clean_handlers(log):
+ for log_handler in list(log.handlers): # remove handlers of libraries
+ log.removeHandler(log_handler)
+
+
+__all__ = (
+ "LEVELS",
+ "MAX_LEVEL",
+ "setup_report",
+)
diff --git a/third_party/python/virtualenv/virtualenv/run/__init__.py b/third_party/python/virtualenv/virtualenv/run/__init__.py
new file mode 100644
index 0000000000..66083df82b
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/run/__init__.py
@@ -0,0 +1,151 @@
+from __future__ import absolute_import, unicode_literals
+
+import logging
+from functools import partial
+
+from ..app_data import make_app_data
+from ..config.cli.parser import VirtualEnvConfigParser
+from ..report import LEVELS, setup_report
+from ..run.session import Session
+from ..seed.wheels.periodic_update import manual_upgrade
+from ..version import __version__
+from .plugin.activators import ActivationSelector
+from .plugin.creators import CreatorSelector
+from .plugin.discovery import get_discover
+from .plugin.seeders import SeederSelector
+
+
+def cli_run(args, options=None, setup_logging=True):
+ """
+ Create a virtual environment given some command line interface arguments.
+
+ :param args: the command line arguments
+ :param options: passing in a ``VirtualEnvOptions`` object allows return of the parsed options
+ :param setup_logging: ``True`` if setup logging handlers, ``False`` to use handlers already registered
+ :return: the session object of the creation (its structure for now is experimental and might change on short notice)
+ """
+ of_session = session_via_cli(args, options, setup_logging)
+ with of_session:
+ of_session.run()
+ return of_session
+
+
+def session_via_cli(args, options=None, setup_logging=True):
+ """
+ Create a virtualenv session (same as cli_run, but this does not perform the creation). Use this if you just want to
+ query what the virtual environment would look like, but not actually create it.
+
+ :param args: the command line arguments
+ :param options: passing in a ``VirtualEnvOptions`` object allows return of the parsed options
+ :param setup_logging: ``True`` if setup logging handlers, ``False`` to use handlers already registered
+ :return: the session object of the creation (its structure for now is experimental and might change on short notice)
+ """
+ parser, elements = build_parser(args, options, setup_logging)
+ options = parser.parse_args(args)
+ creator, seeder, activators = tuple(e.create(options) for e in elements) # create types
+ of_session = Session(options.verbosity, options.app_data, parser._interpreter, creator, seeder, activators) # noqa
+ return of_session
+
+
+def build_parser(args=None, options=None, setup_logging=True):
+ parser = VirtualEnvConfigParser(options)
+ add_version_flag(parser)
+ parser.add_argument(
+ "--with-traceback",
+ dest="with_traceback",
+ action="store_true",
+ default=False,
+ help="on failure also display the stacktrace internals of virtualenv",
+ )
+ _do_report_setup(parser, args, setup_logging)
+ options = load_app_data(args, parser, options)
+ handle_extra_commands(options)
+
+ discover = get_discover(parser, args)
+ parser._interpreter = interpreter = discover.interpreter
+ if interpreter is None:
+ raise RuntimeError("failed to find interpreter for {}".format(discover))
+ elements = [
+ CreatorSelector(interpreter, parser),
+ SeederSelector(interpreter, parser),
+ ActivationSelector(interpreter, parser),
+ ]
+ options, _ = parser.parse_known_args(args)
+ for element in elements:
+ element.handle_selected_arg_parse(options)
+ parser.enable_help()
+ return parser, elements
+
+
+def build_parser_only(args=None):
+ """Used to provide a parser for the doc generation"""
+ return build_parser(args)[0]
+
+
+def handle_extra_commands(options):
+ if options.upgrade_embed_wheels:
+ result = manual_upgrade(options.app_data)
+ raise SystemExit(result)
+
+
+def load_app_data(args, parser, options):
+ parser.add_argument(
+ "--read-only-app-data",
+ action="store_true",
+ help="use app data folder in read-only mode (write operations will fail with error)",
+ )
+ options, _ = parser.parse_known_args(args, namespace=options)
+
+ # here we need a write-able application data (e.g. the zipapp might need this for discovery cache)
+ parser.add_argument(
+ "--app-data",
+ help="a data folder used as cache by the virtualenv",
+ type=partial(make_app_data, read_only=options.read_only_app_data),
+ default=make_app_data(None, read_only=options.read_only_app_data),
+ )
+ parser.add_argument(
+ "--reset-app-data",
+ action="store_true",
+ help="start with empty app data folder",
+ )
+ parser.add_argument(
+ "--upgrade-embed-wheels",
+ action="store_true",
+ help="trigger a manual update of the embedded wheels",
+ )
+ options, _ = parser.parse_known_args(args, namespace=options)
+ if options.reset_app_data:
+ options.app_data.reset()
+ return options
+
+
+def add_version_flag(parser):
+ import virtualenv
+
+ parser.add_argument(
+ "--version",
+ action="version",
+ version="%(prog)s {} from {}".format(__version__, virtualenv.__file__),
+ help="display the version of the virtualenv package and its location, then exit",
+ )
+
+
+def _do_report_setup(parser, args, setup_logging):
+ level_map = ", ".join("{}={}".format(logging.getLevelName(l), c) for c, l in sorted(list(LEVELS.items())))
+ msg = "verbosity = verbose - quiet, default {}, mapping => {}"
+ verbosity_group = parser.add_argument_group(
+ title="verbosity",
+ description=msg.format(logging.getLevelName(LEVELS[3]), level_map),
+ )
+ verbosity = verbosity_group.add_mutually_exclusive_group()
+ verbosity.add_argument("-v", "--verbose", action="count", dest="verbose", help="increase verbosity", default=2)
+ verbosity.add_argument("-q", "--quiet", action="count", dest="quiet", help="decrease verbosity", default=0)
+ option, _ = parser.parse_known_args(args)
+ if setup_logging:
+ setup_report(option.verbosity)
+
+
+__all__ = (
+ "cli_run",
+ "session_via_cli",
+)
diff --git a/third_party/python/virtualenv/virtualenv/run/plugin/__init__.py b/third_party/python/virtualenv/virtualenv/run/plugin/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/run/plugin/__init__.py
diff --git a/third_party/python/virtualenv/virtualenv/run/plugin/activators.py b/third_party/python/virtualenv/virtualenv/run/plugin/activators.py
new file mode 100644
index 0000000000..dea28277f1
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/run/plugin/activators.py
@@ -0,0 +1,53 @@
+from __future__ import absolute_import, unicode_literals
+
+from argparse import ArgumentTypeError
+from collections import OrderedDict
+
+from .base import ComponentBuilder
+
+
+class ActivationSelector(ComponentBuilder):
+ def __init__(self, interpreter, parser):
+ self.default = None
+ possible = OrderedDict(
+ (k, v) for k, v in self.options("virtualenv.activate").items() if v.supports(interpreter)
+ )
+ super(ActivationSelector, self).__init__(interpreter, parser, "activators", possible)
+ self.parser.description = "options for activation scripts"
+ self.active = None
+
+ def add_selector_arg_parse(self, name, choices):
+ self.default = ",".join(choices)
+ self.parser.add_argument(
+ "--{}".format(name),
+ default=self.default,
+ metavar="comma_sep_list",
+ required=False,
+ help="activators to generate - default is all supported",
+ type=self._extract_activators,
+ )
+
+ def _extract_activators(self, entered_str):
+ elements = [e.strip() for e in entered_str.split(",") if e.strip()]
+ missing = [e for e in elements if e not in self.possible]
+ if missing:
+ raise ArgumentTypeError("the following activators are not available {}".format(",".join(missing)))
+ return elements
+
+ def handle_selected_arg_parse(self, options):
+ selected_activators = (
+ self._extract_activators(self.default) if options.activators is self.default else options.activators
+ )
+ self.active = {k: v for k, v in self.possible.items() if k in selected_activators}
+ self.parser.add_argument(
+ "--prompt",
+ dest="prompt",
+ metavar="prompt",
+ help="provides an alternative prompt prefix for this environment",
+ default=None,
+ )
+ for activator in self.active.values():
+ activator.add_parser_arguments(self.parser, self.interpreter)
+
+ def create(self, options):
+ return [activator_class(options) for activator_class in self.active.values()]
diff --git a/third_party/python/virtualenv/virtualenv/run/plugin/base.py b/third_party/python/virtualenv/virtualenv/run/plugin/base.py
new file mode 100644
index 0000000000..ed10fe0e27
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/run/plugin/base.py
@@ -0,0 +1,58 @@
+from __future__ import absolute_import, unicode_literals
+
+import sys
+from collections import OrderedDict
+
+if sys.version_info >= (3, 8):
+ from importlib.metadata import entry_points
+else:
+ from importlib_metadata import entry_points
+
+
+class PluginLoader(object):
+ _OPTIONS = None
+ _ENTRY_POINTS = None
+
+ @classmethod
+ def entry_points_for(cls, key):
+ return OrderedDict((e.name, e.load()) for e in cls.entry_points().get(key, {}))
+
+ @staticmethod
+ def entry_points():
+ if PluginLoader._ENTRY_POINTS is None:
+ PluginLoader._ENTRY_POINTS = entry_points()
+ return PluginLoader._ENTRY_POINTS
+
+
+class ComponentBuilder(PluginLoader):
+ def __init__(self, interpreter, parser, name, possible):
+ self.interpreter = interpreter
+ self.name = name
+ self._impl_class = None
+ self.possible = possible
+ self.parser = parser.add_argument_group(title=name)
+ self.add_selector_arg_parse(name, list(self.possible))
+
+ @classmethod
+ def options(cls, key):
+ if cls._OPTIONS is None:
+ cls._OPTIONS = cls.entry_points_for(key)
+ return cls._OPTIONS
+
+ def add_selector_arg_parse(self, name, choices):
+ raise NotImplementedError
+
+ def handle_selected_arg_parse(self, options):
+ selected = getattr(options, self.name)
+ if selected not in self.possible:
+ raise RuntimeError("No implementation for {}".format(self.interpreter))
+ self._impl_class = self.possible[selected]
+ self.populate_selected_argparse(selected, options.app_data)
+ return selected
+
+ def populate_selected_argparse(self, selected, app_data):
+ self.parser.description = "options for {} {}".format(self.name, selected)
+ self._impl_class.add_parser_arguments(self.parser, self.interpreter, app_data)
+
+ def create(self, options):
+ return self._impl_class(options, self.interpreter)
diff --git a/third_party/python/virtualenv/virtualenv/run/plugin/creators.py b/third_party/python/virtualenv/virtualenv/run/plugin/creators.py
new file mode 100644
index 0000000000..ef4177a595
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/run/plugin/creators.py
@@ -0,0 +1,77 @@
+from __future__ import absolute_import, unicode_literals
+
+from collections import OrderedDict, defaultdict, namedtuple
+
+from virtualenv.create.describe import Describe
+from virtualenv.create.via_global_ref.builtin.builtin_way import VirtualenvBuiltin
+
+from .base import ComponentBuilder
+
+CreatorInfo = namedtuple("CreatorInfo", ["key_to_class", "key_to_meta", "describe", "builtin_key"])
+
+
+class CreatorSelector(ComponentBuilder):
+ def __init__(self, interpreter, parser):
+ creators, self.key_to_meta, self.describe, self.builtin_key = self.for_interpreter(interpreter)
+ super(CreatorSelector, self).__init__(interpreter, parser, "creator", creators)
+
+ @classmethod
+ def for_interpreter(cls, interpreter):
+ key_to_class, key_to_meta, builtin_key, describe = OrderedDict(), {}, None, None
+ errors = defaultdict(list)
+ for key, creator_class in cls.options("virtualenv.create").items():
+ if key == "builtin":
+ raise RuntimeError("builtin creator is a reserved name")
+ meta = creator_class.can_create(interpreter)
+ if meta:
+ if meta.error:
+ errors[meta.error].append(creator_class)
+ else:
+ if "builtin" not in key_to_class and issubclass(creator_class, VirtualenvBuiltin):
+ builtin_key = key
+ key_to_class["builtin"] = creator_class
+ key_to_meta["builtin"] = meta
+ key_to_class[key] = creator_class
+ key_to_meta[key] = meta
+ if describe is None and issubclass(creator_class, Describe) and creator_class.can_describe(interpreter):
+ describe = creator_class
+ if not key_to_meta:
+ if errors:
+ rows = ["{} for creators {}".format(k, ", ".join(i.__name__ for i in v)) for k, v in errors.items()]
+ raise RuntimeError("\n".join(rows))
+ else:
+ raise RuntimeError("No virtualenv implementation for {}".format(interpreter))
+ return CreatorInfo(
+ key_to_class=key_to_class,
+ key_to_meta=key_to_meta,
+ describe=describe,
+ builtin_key=builtin_key,
+ )
+
+ def add_selector_arg_parse(self, name, choices):
+ # prefer the built-in venv if present, otherwise fallback to first defined type
+ choices = sorted(choices, key=lambda a: 0 if a == "builtin" else 1)
+ default_value = self._get_default(choices)
+ self.parser.add_argument(
+ "--{}".format(name),
+ choices=choices,
+ default=default_value,
+ required=False,
+ help="create environment via{}".format(
+ "" if self.builtin_key is None else " (builtin = {})".format(self.builtin_key),
+ ),
+ )
+
+ @staticmethod
+ def _get_default(choices):
+ return next(iter(choices))
+
+ def populate_selected_argparse(self, selected, app_data):
+ self.parser.description = "options for {} {}".format(self.name, selected)
+ self._impl_class.add_parser_arguments(self.parser, self.interpreter, self.key_to_meta[selected], app_data)
+
+ def create(self, options):
+ options.meta = self.key_to_meta[getattr(options, self.name)]
+ if not issubclass(self._impl_class, Describe):
+ options.describe = self.describe(options, self.interpreter)
+ return super(CreatorSelector, self).create(options)
diff --git a/third_party/python/virtualenv/virtualenv/run/plugin/discovery.py b/third_party/python/virtualenv/virtualenv/run/plugin/discovery.py
new file mode 100644
index 0000000000..3b6fc60d83
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/run/plugin/discovery.py
@@ -0,0 +1,32 @@
+from __future__ import absolute_import, unicode_literals
+
+from .base import PluginLoader
+
+
+class Discovery(PluginLoader):
+ """"""
+
+
+def get_discover(parser, args):
+ discover_types = Discovery.entry_points_for("virtualenv.discovery")
+ discovery_parser = parser.add_argument_group(
+ title="discovery",
+ description="discover and provide a target interpreter",
+ )
+ discovery_parser.add_argument(
+ "--discovery",
+ choices=_get_default_discovery(discover_types),
+ default=next(i for i in discover_types.keys()),
+ required=False,
+ help="interpreter discovery method",
+ )
+ options, _ = parser.parse_known_args(args)
+ discover_class = discover_types[options.discovery]
+ discover_class.add_parser_arguments(discovery_parser)
+ options, _ = parser.parse_known_args(args, namespace=options)
+ discover = discover_class(options)
+ return discover
+
+
+def _get_default_discovery(discover_types):
+ return list(discover_types.keys())
diff --git a/third_party/python/virtualenv/virtualenv/run/plugin/seeders.py b/third_party/python/virtualenv/virtualenv/run/plugin/seeders.py
new file mode 100644
index 0000000000..d182c6f731
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/run/plugin/seeders.py
@@ -0,0 +1,35 @@
+from __future__ import absolute_import, unicode_literals
+
+from .base import ComponentBuilder
+
+
+class SeederSelector(ComponentBuilder):
+ def __init__(self, interpreter, parser):
+ possible = self.options("virtualenv.seed")
+ super(SeederSelector, self).__init__(interpreter, parser, "seeder", possible)
+
+ def add_selector_arg_parse(self, name, choices):
+ self.parser.add_argument(
+ "--{}".format(name),
+ choices=choices,
+ default=self._get_default(),
+ required=False,
+ help="seed packages install method",
+ )
+ self.parser.add_argument(
+ "--no-seed",
+ "--without-pip",
+ help="do not install seed packages",
+ action="store_true",
+ dest="no_seed",
+ )
+
+ @staticmethod
+ def _get_default():
+ return "app-data"
+
+ def handle_selected_arg_parse(self, options):
+ return super(SeederSelector, self).handle_selected_arg_parse(options)
+
+ def create(self, options):
+ return self._impl_class(options)
diff --git a/third_party/python/virtualenv/virtualenv/run/session.py b/third_party/python/virtualenv/virtualenv/run/session.py
new file mode 100644
index 0000000000..24836d2855
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/run/session.py
@@ -0,0 +1,91 @@
+from __future__ import absolute_import, unicode_literals
+
+import json
+import logging
+
+from virtualenv.util.six import ensure_text
+
+
+class Session(object):
+ """Represents a virtual environment creation session"""
+
+ def __init__(self, verbosity, app_data, interpreter, creator, seeder, activators):
+ self._verbosity = verbosity
+ self._app_data = app_data
+ self._interpreter = interpreter
+ self._creator = creator
+ self._seeder = seeder
+ self._activators = activators
+
+ @property
+ def verbosity(self):
+ """The verbosity of the run"""
+ return self._verbosity
+
+ @property
+ def interpreter(self):
+ """Create a virtual environment based on this reference interpreter"""
+ return self._interpreter
+
+ @property
+ def creator(self):
+ """The creator used to build the virtual environment (must be compatible with the interpreter)"""
+ return self._creator
+
+ @property
+ def seeder(self):
+ """The mechanism used to provide the seed packages (pip, setuptools, wheel)"""
+ return self._seeder
+
+ @property
+ def activators(self):
+ """Activators used to generate activations scripts"""
+ return self._activators
+
+ def run(self):
+ self._create()
+ self._seed()
+ self._activate()
+ self.creator.pyenv_cfg.write()
+
+ def _create(self):
+ logging.info("create virtual environment via %s", ensure_text(str(self.creator)))
+ self.creator.run()
+ logging.debug(_DEBUG_MARKER)
+ logging.debug("%s", _Debug(self.creator))
+
+ def _seed(self):
+ if self.seeder is not None and self.seeder.enabled:
+ logging.info("add seed packages via %s", self.seeder)
+ self.seeder.run(self.creator)
+
+ def _activate(self):
+ if self.activators:
+ logging.info(
+ "add activators for %s",
+ ", ".join(type(i).__name__.replace("Activator", "") for i in self.activators),
+ )
+ for activator in self.activators:
+ activator.generate(self.creator)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self._app_data.close()
+
+
+_DEBUG_MARKER = "=" * 30 + " target debug " + "=" * 30
+
+
+class _Debug(object):
+ """lazily populate debug"""
+
+ def __init__(self, creator):
+ self.creator = creator
+
+ def __unicode__(self):
+ return ensure_text(repr(self))
+
+ def __repr__(self):
+ return json.dumps(self.creator.debug, indent=2)
diff --git a/third_party/python/virtualenv/virtualenv/seed/__init__.py b/third_party/python/virtualenv/virtualenv/seed/__init__.py
new file mode 100644
index 0000000000..01e6d4f49d
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/__init__.py
@@ -0,0 +1 @@
+from __future__ import absolute_import, unicode_literals
diff --git a/third_party/python/virtualenv/virtualenv/seed/embed/__init__.py b/third_party/python/virtualenv/virtualenv/seed/embed/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/embed/__init__.py
diff --git a/third_party/python/virtualenv/virtualenv/seed/embed/base_embed.py b/third_party/python/virtualenv/virtualenv/seed/embed/base_embed.py
new file mode 100644
index 0000000000..c794e834de
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/embed/base_embed.py
@@ -0,0 +1,118 @@
+from __future__ import absolute_import, unicode_literals
+
+from abc import ABCMeta
+
+from six import add_metaclass
+
+from virtualenv.util.path import Path
+from virtualenv.util.six import ensure_str, ensure_text
+
+from ..seeder import Seeder
+from ..wheels import Version
+
+PERIODIC_UPDATE_ON_BY_DEFAULT = True
+
+
+@add_metaclass(ABCMeta)
+class BaseEmbed(Seeder):
+ def __init__(self, options):
+ super(BaseEmbed, self).__init__(options, enabled=options.no_seed is False)
+
+ self.download = options.download
+ self.extra_search_dir = [i.resolve() for i in options.extra_search_dir if i.exists()]
+
+ self.pip_version = options.pip
+ self.setuptools_version = options.setuptools
+ self.wheel_version = options.wheel
+
+ self.no_pip = options.no_pip
+ self.no_setuptools = options.no_setuptools
+ self.no_wheel = options.no_wheel
+ self.app_data = options.app_data
+ self.periodic_update = not options.no_periodic_update
+
+ if not self.distribution_to_versions():
+ self.enabled = False
+
+ @classmethod
+ def distributions(cls):
+ return {
+ "pip": Version.bundle,
+ "setuptools": Version.bundle,
+ "wheel": Version.bundle,
+ }
+
+ def distribution_to_versions(self):
+ return {
+ distribution: getattr(self, "{}_version".format(distribution))
+ for distribution in self.distributions()
+ if getattr(self, "no_{}".format(distribution)) is False
+ }
+
+ @classmethod
+ def add_parser_arguments(cls, parser, interpreter, app_data):
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument(
+ "--no-download",
+ "--never-download",
+ dest="download",
+ action="store_false",
+ help="pass to disable download of the latest {} from PyPI".format("/".join(cls.distributions())),
+ default=True,
+ )
+ group.add_argument(
+ "--download",
+ dest="download",
+ action="store_true",
+ help="pass to enable download of the latest {} from PyPI".format("/".join(cls.distributions())),
+ default=False,
+ )
+ parser.add_argument(
+ "--extra-search-dir",
+ metavar="d",
+ type=Path,
+ nargs="+",
+ help="a path containing wheels to extend the internal wheel list (can be set 1+ times)",
+ default=[],
+ )
+ for distribution, default in cls.distributions().items():
+ parser.add_argument(
+ "--{}".format(distribution),
+ dest=distribution,
+ metavar="version",
+ help="version of {} to install as seed: embed, bundle or exact version".format(distribution),
+ default=default,
+ )
+ for distribution in cls.distributions():
+ parser.add_argument(
+ "--no-{}".format(distribution),
+ dest="no_{}".format(distribution),
+ action="store_true",
+ help="do not install {}".format(distribution),
+ default=False,
+ )
+ parser.add_argument(
+ "--no-periodic-update",
+ dest="no_periodic_update",
+ action="store_true",
+ help="disable the periodic (once every 14 days) update of the embedded wheels",
+ default=not PERIODIC_UPDATE_ON_BY_DEFAULT,
+ )
+
+ def __unicode__(self):
+ result = self.__class__.__name__
+ result += "("
+ if self.extra_search_dir:
+ result += "extra_search_dir={},".format(", ".join(ensure_text(str(i)) for i in self.extra_search_dir))
+ result += "download={},".format(self.download)
+ for distribution in self.distributions():
+ if getattr(self, "no_{}".format(distribution)):
+ continue
+ result += " {}{},".format(
+ distribution,
+ "={}".format(getattr(self, "{}_version".format(distribution), None) or "latest"),
+ )
+ return result[:-1] + ")"
+
+ def __repr__(self):
+ return ensure_str(self.__unicode__())
diff --git a/third_party/python/virtualenv/virtualenv/seed/embed/pip_invoke.py b/third_party/python/virtualenv/virtualenv/seed/embed/pip_invoke.py
new file mode 100644
index 0000000000..372e140dc4
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/embed/pip_invoke.py
@@ -0,0 +1,56 @@
+from __future__ import absolute_import, unicode_literals
+
+import logging
+from contextlib import contextmanager
+
+from virtualenv.discovery.cached_py_info import LogCmd
+from virtualenv.seed.embed.base_embed import BaseEmbed
+from virtualenv.util.subprocess import Popen
+
+from ..wheels import Version, get_wheel, pip_wheel_env_run
+
+
+class PipInvoke(BaseEmbed):
+ def __init__(self, options):
+ super(PipInvoke, self).__init__(options)
+
+ def run(self, creator):
+ if not self.enabled:
+ return
+ for_py_version = creator.interpreter.version_release_str
+ with self.get_pip_install_cmd(creator.exe, for_py_version) as cmd:
+ env = pip_wheel_env_run(self.extra_search_dir, self.app_data)
+ self._execute(cmd, env)
+
+ @staticmethod
+ def _execute(cmd, env):
+ logging.debug("pip seed by running: %s", LogCmd(cmd, env))
+ process = Popen(cmd, env=env)
+ process.communicate()
+ if process.returncode != 0:
+ raise RuntimeError("failed seed with code {}".format(process.returncode))
+ return process
+
+ @contextmanager
+ def get_pip_install_cmd(self, exe, for_py_version):
+ cmd = [str(exe), "-m", "pip", "-q", "install", "--only-binary", ":all:", "--disable-pip-version-check"]
+ if not self.download:
+ cmd.append("--no-index")
+ folders = set()
+ for dist, version in self.distribution_to_versions().items():
+ wheel = get_wheel(
+ distribution=dist,
+ version=version,
+ for_py_version=for_py_version,
+ search_dirs=self.extra_search_dir,
+ download=False,
+ app_data=self.app_data,
+ do_periodic_update=self.periodic_update,
+ )
+ if wheel is None:
+ raise RuntimeError("could not get wheel for distribution {}".format(dist))
+ folders.add(str(wheel.path.parent))
+ cmd.append(Version.as_pip_req(dist, wheel.version))
+ for folder in sorted(folders):
+ cmd.extend(["--find-links", str(folder)])
+ yield cmd
diff --git a/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/__init__.py b/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/__init__.py
diff --git a/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/__init__.py b/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/__init__.py
diff --git a/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/base.py b/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/base.py
new file mode 100644
index 0000000000..a1d946d509
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/base.py
@@ -0,0 +1,158 @@
+from __future__ import absolute_import, unicode_literals
+
+import logging
+import os
+import re
+import zipfile
+from abc import ABCMeta, abstractmethod
+from tempfile import mkdtemp
+
+from distlib.scripts import ScriptMaker, enquote_executable
+from six import PY3, add_metaclass
+
+from virtualenv.util import ConfigParser
+from virtualenv.util.path import Path, safe_delete
+from virtualenv.util.six import ensure_text
+
+
+@add_metaclass(ABCMeta)
+class PipInstall(object):
+ def __init__(self, wheel, creator, image_folder):
+ self._wheel = wheel
+ self._creator = creator
+ self._image_dir = image_folder
+ self._extracted = False
+ self.__dist_info = None
+ self._console_entry_points = None
+
+ @abstractmethod
+ def _sync(self, src, dst):
+ raise NotImplementedError
+
+ def install(self, version_info):
+ self._extracted = True
+ # sync image
+ for filename in self._image_dir.iterdir():
+ into = self._creator.purelib / filename.name
+ if into.exists():
+ if into.is_dir() and not into.is_symlink():
+ safe_delete(into)
+ else:
+ into.unlink()
+ self._sync(filename, into)
+ # generate console executables
+ consoles = set()
+ script_dir = self._creator.script_dir
+ for name, module in self._console_scripts.items():
+ consoles.update(self._create_console_entry_point(name, module, script_dir, version_info))
+ logging.debug("generated console scripts %s", " ".join(i.name for i in consoles))
+
+ def build_image(self):
+ # 1. first extract the wheel
+ logging.debug("build install image for %s to %s", self._wheel.name, self._image_dir)
+ with zipfile.ZipFile(str(self._wheel)) as zip_ref:
+ zip_ref.extractall(str(self._image_dir))
+ self._extracted = True
+ # 2. now add additional files not present in the distribution
+ new_files = self._generate_new_files()
+ # 3. finally fix the records file
+ self._fix_records(new_files)
+
+ def _records_text(self, files):
+ record_data = "\n".join(
+ "{},,".format(os.path.relpath(ensure_text(str(rec)), ensure_text(str(self._image_dir)))) for rec in files
+ )
+ return record_data
+
+ def _generate_new_files(self):
+ new_files = set()
+ installer = self._dist_info / "INSTALLER"
+ installer.write_text("pip\n")
+ new_files.add(installer)
+ # inject a no-op root element, as workaround for bug in https://github.com/pypa/pip/issues/7226
+ marker = self._image_dir / "{}.virtualenv".format(self._dist_info.stem)
+ marker.write_text("")
+ new_files.add(marker)
+ folder = mkdtemp()
+ try:
+ to_folder = Path(folder)
+ rel = os.path.relpath(ensure_text(str(self._creator.script_dir)), ensure_text(str(self._creator.purelib)))
+ version_info = self._creator.interpreter.version_info
+ for name, module in self._console_scripts.items():
+ new_files.update(
+ Path(os.path.normpath(ensure_text(str(self._image_dir / rel / i.name))))
+ for i in self._create_console_entry_point(name, module, to_folder, version_info)
+ )
+ finally:
+ safe_delete(folder)
+ return new_files
+
+ @property
+ def _dist_info(self):
+ if self._extracted is False:
+ return None # pragma: no cover
+ if self.__dist_info is None:
+ files = []
+ for filename in self._image_dir.iterdir():
+ files.append(filename.name)
+ if filename.suffix == ".dist-info":
+ self.__dist_info = filename
+ break
+ else:
+ msg = "no .dist-info at {}, has {}".format(self._image_dir, ", ".join(files)) # pragma: no cover
+ raise RuntimeError(msg) # pragma: no cover
+ return self.__dist_info
+
+ @abstractmethod
+ def _fix_records(self, extra_record_data):
+ raise NotImplementedError
+
+ @property
+ def _console_scripts(self):
+ if self._extracted is False:
+ return None # pragma: no cover
+ if self._console_entry_points is None:
+ self._console_entry_points = {}
+ entry_points = self._dist_info / "entry_points.txt"
+ if entry_points.exists():
+ parser = ConfigParser.ConfigParser()
+ with entry_points.open() as file_handler:
+ reader = getattr(parser, "read_file" if PY3 else "readfp")
+ reader(file_handler)
+ if "console_scripts" in parser.sections():
+ for name, value in parser.items("console_scripts"):
+ match = re.match(r"(.*?)-?\d\.?\d*", name)
+ if match:
+ name = match.groups(1)[0]
+ self._console_entry_points[name] = value
+ return self._console_entry_points
+
+ def _create_console_entry_point(self, name, value, to_folder, version_info):
+ result = []
+ maker = ScriptMakerCustom(to_folder, version_info, self._creator.exe, name)
+ specification = "{} = {}".format(name, value)
+ new_files = maker.make(specification)
+ result.extend(Path(i) for i in new_files)
+ return result
+
+ def clear(self):
+ if self._image_dir.exists():
+ safe_delete(self._image_dir)
+
+ def has_image(self):
+ return self._image_dir.exists() and next(self._image_dir.iterdir()) is not None
+
+
+class ScriptMakerCustom(ScriptMaker):
+ def __init__(self, target_dir, version_info, executable, name):
+ super(ScriptMakerCustom, self).__init__(None, str(target_dir))
+ self.clobber = True # overwrite
+ self.set_mode = True # ensure they are executable
+ self.executable = enquote_executable(str(executable))
+ self.version_info = version_info.major, version_info.minor
+ self.variants = {"", "X", "X.Y"}
+ self._name = name
+
+ def _write_script(self, names, shebang, script_bytes, filenames, ext):
+ names.add("{}{}.{}".format(self._name, *self.version_info))
+ super(ScriptMakerCustom, self)._write_script(names, shebang, script_bytes, filenames, ext)
diff --git a/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/copy.py b/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/copy.py
new file mode 100644
index 0000000000..29d0bc88d1
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/copy.py
@@ -0,0 +1,35 @@
+from __future__ import absolute_import, unicode_literals
+
+import os
+
+from virtualenv.util.path import Path, copy
+from virtualenv.util.six import ensure_text
+
+from .base import PipInstall
+
+
+class CopyPipInstall(PipInstall):
+ def _sync(self, src, dst):
+ copy(src, dst)
+
+ def _generate_new_files(self):
+ # create the pyc files
+ new_files = super(CopyPipInstall, self)._generate_new_files()
+ new_files.update(self._cache_files())
+ return new_files
+
+ def _cache_files(self):
+ version = self._creator.interpreter.version_info
+ py_c_ext = ".{}-{}{}.pyc".format(self._creator.interpreter.implementation.lower(), version.major, version.minor)
+ for root, dirs, files in os.walk(ensure_text(str(self._image_dir)), topdown=True):
+ root_path = Path(root)
+ for name in files:
+ if name.endswith(".py"):
+ yield root_path / "{}{}".format(name[:-3], py_c_ext)
+ for name in dirs:
+ yield root_path / name / "__pycache__"
+
+ def _fix_records(self, new_files):
+ extra_record_data_str = self._records_text(new_files)
+ with open(ensure_text(str(self._dist_info / "RECORD")), "ab") as file_handler:
+ file_handler.write(extra_record_data_str.encode("utf-8"))
diff --git a/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/symlink.py b/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/symlink.py
new file mode 100644
index 0000000000..f958b65451
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/pip_install/symlink.py
@@ -0,0 +1,61 @@
+from __future__ import absolute_import, unicode_literals
+
+import os
+import subprocess
+from stat import S_IREAD, S_IRGRP, S_IROTH
+
+from virtualenv.util.path import safe_delete, set_tree
+from virtualenv.util.six import ensure_text
+from virtualenv.util.subprocess import Popen
+
+from .base import PipInstall
+
+
+class SymlinkPipInstall(PipInstall):
+ def _sync(self, src, dst):
+ src_str = ensure_text(str(src))
+ dest_str = ensure_text(str(dst))
+ os.symlink(src_str, dest_str)
+
+ def _generate_new_files(self):
+ # create the pyc files, as the build image will be R/O
+ process = Popen(
+ [ensure_text(str(self._creator.exe)), "-m", "compileall", ensure_text(str(self._image_dir))],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+ process.communicate()
+ # the root pyc is shared, so we'll not symlink that - but still add the pyc files to the RECORD for close
+ root_py_cache = self._image_dir / "__pycache__"
+ new_files = set()
+ if root_py_cache.exists():
+ new_files.update(root_py_cache.iterdir())
+ new_files.add(root_py_cache)
+ safe_delete(root_py_cache)
+ core_new_files = super(SymlinkPipInstall, self)._generate_new_files()
+ # remove files that are within the image folder deeper than one level (as these will be not linked directly)
+ for file in core_new_files:
+ try:
+ rel = file.relative_to(self._image_dir)
+ if len(rel.parts) > 1:
+ continue
+ except ValueError:
+ pass
+ new_files.add(file)
+ return new_files
+
+ def _fix_records(self, new_files):
+ new_files.update(i for i in self._image_dir.iterdir())
+ extra_record_data_str = self._records_text(sorted(new_files, key=str))
+ with open(ensure_text(str(self._dist_info / "RECORD")), "wb") as file_handler:
+ file_handler.write(extra_record_data_str.encode("utf-8"))
+
+ def build_image(self):
+ super(SymlinkPipInstall, self).build_image()
+ # protect the image by making it read only
+ set_tree(self._image_dir, S_IREAD | S_IRGRP | S_IROTH)
+
+ def clear(self):
+ if self._image_dir.exists():
+ safe_delete(self._image_dir)
+ super(SymlinkPipInstall, self).clear()
diff --git a/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/via_app_data.py b/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/via_app_data.py
new file mode 100644
index 0000000000..1afa7978c5
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/embed/via_app_data/via_app_data.py
@@ -0,0 +1,139 @@
+"""Bootstrap"""
+from __future__ import absolute_import, unicode_literals
+
+import logging
+import sys
+import traceback
+from contextlib import contextmanager
+from subprocess import CalledProcessError
+from threading import Lock, Thread
+
+from virtualenv.info import fs_supports_symlink
+from virtualenv.seed.embed.base_embed import BaseEmbed
+from virtualenv.seed.wheels import get_wheel
+from virtualenv.util.path import Path
+
+from .pip_install.copy import CopyPipInstall
+from .pip_install.symlink import SymlinkPipInstall
+
+
+class FromAppData(BaseEmbed):
+ def __init__(self, options):
+ super(FromAppData, self).__init__(options)
+ self.symlinks = options.symlink_app_data
+
+ @classmethod
+ def add_parser_arguments(cls, parser, interpreter, app_data):
+ super(FromAppData, cls).add_parser_arguments(parser, interpreter, app_data)
+ can_symlink = app_data.transient is False and fs_supports_symlink()
+ parser.add_argument(
+ "--symlink-app-data",
+ dest="symlink_app_data",
+ action="store_true" if can_symlink else "store_false",
+ help="{} symlink the python packages from the app-data folder (requires seed pip>=19.3)".format(
+ "" if can_symlink else "not supported - ",
+ ),
+ default=False,
+ )
+
+ def run(self, creator):
+ if not self.enabled:
+ return
+ with self._get_seed_wheels(creator) as name_to_whl:
+ pip_version = name_to_whl["pip"].version_tuple if "pip" in name_to_whl else None
+ installer_class = self.installer_class(pip_version)
+ exceptions = {}
+
+ def _install(name, wheel):
+ try:
+ logging.debug("install %s from wheel %s via %s", name, wheel, installer_class.__name__)
+ key = Path(installer_class.__name__) / wheel.path.stem
+ wheel_img = self.app_data.wheel_image(creator.interpreter.version_release_str, key)
+ installer = installer_class(wheel.path, creator, wheel_img)
+ parent = self.app_data.lock / wheel_img.parent
+ with parent.non_reentrant_lock_for_key(wheel_img.name):
+ if not installer.has_image():
+ installer.build_image()
+ installer.install(creator.interpreter.version_info)
+ except Exception: # noqa
+ exceptions[name] = sys.exc_info()
+
+ threads = list(Thread(target=_install, args=(n, w)) for n, w in name_to_whl.items())
+ for thread in threads:
+ thread.start()
+ for thread in threads:
+ thread.join()
+ if exceptions:
+ messages = ["failed to build image {} because:".format(", ".join(exceptions.keys()))]
+ for value in exceptions.values():
+ exc_type, exc_value, exc_traceback = value
+ messages.append("".join(traceback.format_exception(exc_type, exc_value, exc_traceback)))
+ raise RuntimeError("\n".join(messages))
+
+ @contextmanager
+ def _get_seed_wheels(self, creator):
+ name_to_whl, lock, fail = {}, Lock(), {}
+
+ def _get(distribution, version):
+ for_py_version = creator.interpreter.version_release_str
+ failure, result = None, None
+ # fallback to download in case the exact version is not available
+ for download in [True] if self.download else [False, True]:
+ failure = None
+ try:
+ result = get_wheel(
+ distribution=distribution,
+ version=version,
+ for_py_version=for_py_version,
+ search_dirs=self.extra_search_dir,
+ download=download,
+ app_data=self.app_data,
+ do_periodic_update=self.periodic_update,
+ )
+ if result is not None:
+ break
+ except Exception as exception: # noqa
+ logging.exception("fail")
+ failure = exception
+ if failure:
+ if isinstance(failure, CalledProcessError):
+ msg = "failed to download {}".format(distribution)
+ if version is not None:
+ msg += " version {}".format(version)
+ msg += ", pip download exit code {}".format(failure.returncode)
+ output = failure.output if sys.version_info < (3, 5) else (failure.output + failure.stderr)
+ if output:
+ msg += "\n"
+ msg += output
+ else:
+ msg = repr(failure)
+ logging.error(msg)
+ with lock:
+ fail[distribution] = version
+ else:
+ with lock:
+ name_to_whl[distribution] = result
+
+ threads = list(
+ Thread(target=_get, args=(distribution, version))
+ for distribution, version in self.distribution_to_versions().items()
+ )
+ for thread in threads:
+ thread.start()
+ for thread in threads:
+ thread.join()
+ if fail:
+ raise RuntimeError("seed failed due to failing to download wheels {}".format(", ".join(fail.keys())))
+ yield name_to_whl
+
+ def installer_class(self, pip_version_tuple):
+ if self.symlinks and pip_version_tuple:
+ # symlink support requires pip 19.3+
+ if pip_version_tuple >= (19, 3):
+ return SymlinkPipInstall
+ return CopyPipInstall
+
+ def __unicode__(self):
+ base = super(FromAppData, self).__unicode__()
+ msg = ", via={}, app_data_dir={}".format("symlink" if self.symlinks else "copy", self.app_data)
+ return base[:-1] + msg + base[-1]
diff --git a/third_party/python/virtualenv/virtualenv/seed/seeder.py b/third_party/python/virtualenv/virtualenv/seed/seeder.py
new file mode 100644
index 0000000000..2bcccfc727
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/seeder.py
@@ -0,0 +1,39 @@
+from __future__ import absolute_import, unicode_literals
+
+from abc import ABCMeta, abstractmethod
+
+from six import add_metaclass
+
+
+@add_metaclass(ABCMeta)
+class Seeder(object):
+ """A seeder will install some seed packages into a virtual environment."""
+
+ # noinspection PyUnusedLocal
+ def __init__(self, options, enabled):
+ """
+
+ :param options: the parsed options as defined within :meth:`add_parser_arguments`
+ :param enabled: a flag weather the seeder is enabled or not
+ """
+ self.enabled = enabled
+
+ @classmethod
+ def add_parser_arguments(cls, parser, interpreter, app_data):
+ """
+ Add CLI arguments for this seed mechanisms.
+
+ :param parser: the CLI parser
+ :param app_data: the CLI parser
+ :param interpreter: the interpreter this virtual environment is based of
+ """
+ raise NotImplementedError
+
+ @abstractmethod
+ def run(self, creator):
+ """Perform the seed operation.
+
+ :param creator: the creator (based of :class:`virtualenv.create.creator.Creator`) we used to create this \
+ virtual environment
+ """
+ raise NotImplementedError
diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/__init__.py b/third_party/python/virtualenv/virtualenv/seed/wheels/__init__.py
new file mode 100644
index 0000000000..dbffe2e433
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/wheels/__init__.py
@@ -0,0 +1,11 @@
+from __future__ import absolute_import, unicode_literals
+
+from .acquire import get_wheel, pip_wheel_env_run
+from .util import Version, Wheel
+
+__all__ = (
+ "get_wheel",
+ "pip_wheel_env_run",
+ "Version",
+ "Wheel",
+)
diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/acquire.py b/third_party/python/virtualenv/virtualenv/seed/wheels/acquire.py
new file mode 100644
index 0000000000..e63ecb67cf
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/wheels/acquire.py
@@ -0,0 +1,120 @@
+"""Bootstrap"""
+from __future__ import absolute_import, unicode_literals
+
+import logging
+import os
+import sys
+from operator import eq, lt
+
+from virtualenv.util.path import Path
+from virtualenv.util.six import ensure_str
+from virtualenv.util.subprocess import Popen, subprocess
+
+from .bundle import from_bundle
+from .util import Version, Wheel, discover_wheels
+
+
+def get_wheel(distribution, version, for_py_version, search_dirs, download, app_data, do_periodic_update):
+ """
+ Get a wheel with the given distribution-version-for_py_version trio, by using the extra search dir + download
+ """
+ # not all wheels are compatible with all python versions, so we need to py version qualify it
+ # 1. acquire from bundle
+ wheel = from_bundle(distribution, version, for_py_version, search_dirs, app_data, do_periodic_update)
+
+ # 2. download from the internet
+ if version not in Version.non_version and download:
+ wheel = download_wheel(
+ distribution=distribution,
+ version_spec=Version.as_version_spec(version),
+ for_py_version=for_py_version,
+ search_dirs=search_dirs,
+ app_data=app_data,
+ to_folder=app_data.house,
+ )
+ return wheel
+
+
+def download_wheel(distribution, version_spec, for_py_version, search_dirs, app_data, to_folder):
+ to_download = "{}{}".format(distribution, version_spec or "")
+ logging.debug("download wheel %s %s to %s", to_download, for_py_version, to_folder)
+ cmd = [
+ sys.executable,
+ "-m",
+ "pip",
+ "download",
+ "--progress-bar",
+ "off",
+ "--disable-pip-version-check",
+ "--only-binary=:all:",
+ "--no-deps",
+ "--python-version",
+ for_py_version,
+ "-d",
+ str(to_folder),
+ to_download,
+ ]
+ # pip has no interface in python - must be a new sub-process
+ env = pip_wheel_env_run(search_dirs, app_data)
+ process = Popen(cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
+ out, err = process.communicate()
+ if process.returncode != 0:
+ kwargs = {"output": out}
+ if sys.version_info < (3, 5):
+ kwargs["output"] += err
+ else:
+ kwargs["stderr"] = err
+ raise subprocess.CalledProcessError(process.returncode, cmd, **kwargs)
+ result = _find_downloaded_wheel(distribution, version_spec, for_py_version, to_folder, out)
+ logging.debug("downloaded wheel %s", result.name)
+ return result
+
+
+def _find_downloaded_wheel(distribution, version_spec, for_py_version, to_folder, out):
+ for line in out.splitlines():
+ line = line.lstrip()
+ for marker in ("Saved ", "File was already downloaded "):
+ if line.startswith(marker):
+ return Wheel(Path(line[len(marker) :]).absolute())
+ # if for some reason the output does not match fallback to latest version with that spec
+ return find_compatible_in_house(distribution, version_spec, for_py_version, to_folder)
+
+
+def find_compatible_in_house(distribution, version_spec, for_py_version, in_folder):
+ wheels = discover_wheels(in_folder, distribution, None, for_py_version)
+ start, end = 0, len(wheels)
+ if version_spec is not None:
+ if version_spec.startswith("<"):
+ from_pos, op = 1, lt
+ elif version_spec.startswith("=="):
+ from_pos, op = 2, eq
+ else:
+ raise ValueError(version_spec)
+ version = Wheel.as_version_tuple(version_spec[from_pos:])
+ start = next((at for at, w in enumerate(wheels) if op(w.version_tuple, version)), len(wheels))
+
+ return None if start == end else wheels[start]
+
+
+def pip_wheel_env_run(search_dirs, app_data):
+ for_py_version = "{}.{}".format(*sys.version_info[0:2])
+ env = os.environ.copy()
+ env.update(
+ {
+ ensure_str(k): str(v) # python 2 requires these to be string only (non-unicode)
+ for k, v in {"PIP_USE_WHEEL": "1", "PIP_USER": "0", "PIP_NO_INPUT": "1"}.items()
+ },
+ )
+ wheel = get_wheel(
+ distribution="pip",
+ version=None,
+ for_py_version=for_py_version,
+ search_dirs=search_dirs,
+ download=False,
+ app_data=app_data,
+ do_periodic_update=False,
+ )
+ if wheel is None:
+ raise RuntimeError("could not find the embedded pip")
+ env[str("PYTHONPATH")] = str(wheel.path)
+ return env
diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/bundle.py b/third_party/python/virtualenv/virtualenv/seed/wheels/bundle.py
new file mode 100644
index 0000000000..7c664bd389
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/wheels/bundle.py
@@ -0,0 +1,49 @@
+from __future__ import absolute_import, unicode_literals
+
+from ..wheels.embed import get_embed_wheel
+from .periodic_update import periodic_update
+from .util import Version, Wheel, discover_wheels
+
+
+def from_bundle(distribution, version, for_py_version, search_dirs, app_data, do_periodic_update):
+ """
+ Load the bundled wheel to a cache directory.
+ """
+ of_version = Version.of_version(version)
+ wheel = load_embed_wheel(app_data, distribution, for_py_version, of_version)
+
+ if version != Version.embed:
+ # 2. check if we have upgraded embed
+ if app_data.can_update:
+ wheel = periodic_update(distribution, for_py_version, wheel, search_dirs, app_data, do_periodic_update)
+
+ # 3. acquire from extra search dir
+ found_wheel = from_dir(distribution, of_version, for_py_version, search_dirs)
+ if found_wheel is not None:
+ if wheel is None:
+ wheel = found_wheel
+ elif found_wheel.version_tuple > wheel.version_tuple:
+ wheel = found_wheel
+ return wheel
+
+
+def load_embed_wheel(app_data, distribution, for_py_version, version):
+ wheel = get_embed_wheel(distribution, for_py_version)
+ if wheel is not None:
+ version_match = version == wheel.version
+ if version is None or version_match:
+ with app_data.ensure_extracted(wheel.path, lambda: app_data.house) as wheel_path:
+ wheel = Wheel(wheel_path)
+ else: # if version does not match ignore
+ wheel = None
+ return wheel
+
+
+def from_dir(distribution, version, for_py_version, directories):
+ """
+ Load a compatible wheel from a given folder.
+ """
+ for folder in directories:
+ for wheel in discover_wheels(folder, distribution, version, for_py_version):
+ return wheel
+ return None
diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/__init__.py b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/__init__.py
new file mode 100644
index 0000000000..5233e48761
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/__init__.py
@@ -0,0 +1,62 @@
+from __future__ import absolute_import, unicode_literals
+
+from virtualenv.seed.wheels.util import Wheel
+from virtualenv.util.path import Path
+
+BUNDLE_FOLDER = Path(__file__).absolute().parent
+BUNDLE_SUPPORT = {
+ "3.10": {
+ "pip": "pip-20.3.1-py2.py3-none-any.whl",
+ "setuptools": "setuptools-51.0.0-py3-none-any.whl",
+ "wheel": "wheel-0.36.1-py2.py3-none-any.whl",
+ },
+ "3.9": {
+ "pip": "pip-20.3.1-py2.py3-none-any.whl",
+ "setuptools": "setuptools-51.0.0-py3-none-any.whl",
+ "wheel": "wheel-0.36.1-py2.py3-none-any.whl",
+ },
+ "3.8": {
+ "pip": "pip-20.3.1-py2.py3-none-any.whl",
+ "setuptools": "setuptools-51.0.0-py3-none-any.whl",
+ "wheel": "wheel-0.36.1-py2.py3-none-any.whl",
+ },
+ "3.7": {
+ "pip": "pip-20.3.1-py2.py3-none-any.whl",
+ "setuptools": "setuptools-51.0.0-py3-none-any.whl",
+ "wheel": "wheel-0.36.1-py2.py3-none-any.whl",
+ },
+ "3.6": {
+ "pip": "pip-20.3.1-py2.py3-none-any.whl",
+ "setuptools": "setuptools-51.0.0-py3-none-any.whl",
+ "wheel": "wheel-0.36.1-py2.py3-none-any.whl",
+ },
+ "3.5": {
+ "pip": "pip-20.3.1-py2.py3-none-any.whl",
+ "setuptools": "setuptools-50.3.2-py3-none-any.whl",
+ "wheel": "wheel-0.36.1-py2.py3-none-any.whl",
+ },
+ "3.4": {
+ "pip": "pip-19.1.1-py2.py3-none-any.whl",
+ "setuptools": "setuptools-43.0.0-py2.py3-none-any.whl",
+ "wheel": "wheel-0.33.6-py2.py3-none-any.whl",
+ },
+ "2.7": {
+ "pip": "pip-20.3.1-py2.py3-none-any.whl",
+ "setuptools": "setuptools-44.1.1-py2.py3-none-any.whl",
+ "wheel": "wheel-0.36.1-py2.py3-none-any.whl",
+ },
+}
+MAX = "3.10"
+
+
+def get_embed_wheel(distribution, for_py_version):
+ path = BUNDLE_FOLDER / (BUNDLE_SUPPORT.get(for_py_version, {}) or BUNDLE_SUPPORT[MAX]).get(distribution)
+ return Wheel.from_path(path)
+
+
+__all__ = (
+ "get_embed_wheel",
+ "BUNDLE_SUPPORT",
+ "MAX",
+ "BUNDLE_FOLDER",
+)
diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-19.1.1-py2.py3-none-any.whl b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-19.1.1-py2.py3-none-any.whl
new file mode 100644
index 0000000000..8476c11930
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-19.1.1-py2.py3-none-any.whl
Binary files differ
diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-20.3.1-py2.py3-none-any.whl b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-20.3.1-py2.py3-none-any.whl
new file mode 100644
index 0000000000..fbac5d3c90
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/pip-20.3.1-py2.py3-none-any.whl
Binary files differ
diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-43.0.0-py2.py3-none-any.whl b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-43.0.0-py2.py3-none-any.whl
new file mode 100644
index 0000000000..733faa6a54
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-43.0.0-py2.py3-none-any.whl
Binary files differ
diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-44.1.1-py2.py3-none-any.whl b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-44.1.1-py2.py3-none-any.whl
new file mode 100644
index 0000000000..bf28513c99
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-44.1.1-py2.py3-none-any.whl
Binary files differ
diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-50.3.2-py3-none-any.whl b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-50.3.2-py3-none-any.whl
new file mode 100644
index 0000000000..56d1bf92d7
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-50.3.2-py3-none-any.whl
Binary files differ
diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-51.0.0-py3-none-any.whl b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-51.0.0-py3-none-any.whl
new file mode 100644
index 0000000000..7e60e11305
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/setuptools-51.0.0-py3-none-any.whl
Binary files differ
diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/wheel-0.33.6-py2.py3-none-any.whl b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/wheel-0.33.6-py2.py3-none-any.whl
new file mode 100644
index 0000000000..2a71896be9
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/wheel-0.33.6-py2.py3-none-any.whl
Binary files differ
diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/embed/wheel-0.36.1-py2.py3-none-any.whl b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/wheel-0.36.1-py2.py3-none-any.whl
new file mode 100644
index 0000000000..1f17303bf9
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/wheels/embed/wheel-0.36.1-py2.py3-none-any.whl
Binary files differ
diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/periodic_update.py b/third_party/python/virtualenv/virtualenv/seed/wheels/periodic_update.py
new file mode 100644
index 0000000000..fd0ff4c264
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/wheels/periodic_update.py
@@ -0,0 +1,367 @@
+"""
+Periodically update bundled versions.
+"""
+
+from __future__ import absolute_import, unicode_literals
+
+import json
+import logging
+import os
+import ssl
+import subprocess
+import sys
+from datetime import datetime, timedelta
+from itertools import groupby
+from shutil import copy2
+from textwrap import dedent
+from threading import Thread
+
+from six.moves.urllib.error import URLError
+from six.moves.urllib.request import urlopen
+
+from virtualenv.app_data import AppDataDiskFolder
+from virtualenv.info import PY2
+from virtualenv.util.path import Path
+from virtualenv.util.subprocess import CREATE_NO_WINDOW, Popen
+
+from ..wheels.embed import BUNDLE_SUPPORT
+from ..wheels.util import Wheel
+
+if PY2:
+ # on Python 2 datetime.strptime throws the error below if the import did not trigger on main thread
+ # Failed to import _strptime because the import lock is held by
+ try:
+ import _strptime # noqa
+ except ImportError: # pragma: no cov
+ pass # pragma: no cov
+
+
+def periodic_update(distribution, for_py_version, wheel, search_dirs, app_data, do_periodic_update):
+ if do_periodic_update:
+ handle_auto_update(distribution, for_py_version, wheel, search_dirs, app_data)
+
+ now = datetime.now()
+
+ u_log = UpdateLog.from_app_data(app_data, distribution, for_py_version)
+ u_log_older_than_hour = now - u_log.completed > timedelta(hours=1) if u_log.completed is not None else False
+ for _, group in groupby(u_log.versions, key=lambda v: v.wheel.version_tuple[0:2]):
+ version = next(group) # use only latest patch version per minor, earlier assumed to be buggy
+ if wheel is not None and Path(version.filename).name == wheel.name:
+ break
+ if u_log.periodic is False or (u_log_older_than_hour and version.use(now)):
+ updated_wheel = Wheel(app_data.house / version.filename)
+ logging.debug("using %supdated wheel %s", "periodically " if updated_wheel else "", updated_wheel)
+ wheel = updated_wheel
+ break
+
+ return wheel
+
+
+def handle_auto_update(distribution, for_py_version, wheel, search_dirs, app_data):
+ embed_update_log = app_data.embed_update_log(distribution, for_py_version)
+ u_log = UpdateLog.from_dict(embed_update_log.read())
+ if u_log.needs_update:
+ u_log.periodic = True
+ u_log.started = datetime.now()
+ embed_update_log.write(u_log.to_dict())
+ trigger_update(distribution, for_py_version, wheel, search_dirs, app_data, periodic=True)
+
+
+DATETIME_FMT = "%Y-%m-%dT%H:%M:%S.%fZ"
+
+
+def dump_datetime(value):
+ return None if value is None else value.strftime(DATETIME_FMT)
+
+
+def load_datetime(value):
+ return None if value is None else datetime.strptime(value, DATETIME_FMT)
+
+
+class NewVersion(object):
+ def __init__(self, filename, found_date, release_date):
+ self.filename = filename
+ self.found_date = found_date
+ self.release_date = release_date
+
+ @classmethod
+ def from_dict(cls, dictionary):
+ return cls(
+ filename=dictionary["filename"],
+ found_date=load_datetime(dictionary["found_date"]),
+ release_date=load_datetime(dictionary["release_date"]),
+ )
+
+ def to_dict(self):
+ return {
+ "filename": self.filename,
+ "release_date": dump_datetime(self.release_date),
+ "found_date": dump_datetime(self.found_date),
+ }
+
+ def use(self, now):
+ compare_from = self.release_date or self.found_date
+ return now - compare_from >= timedelta(days=28)
+
+ def __repr__(self):
+ return "{}(filename={}), found_date={}, release_date={})".format(
+ self.__class__.__name__,
+ self.filename,
+ self.found_date,
+ self.release_date,
+ )
+
+ def __eq__(self, other):
+ return type(self) == type(other) and all(
+ getattr(self, k) == getattr(other, k) for k in ["filename", "release_date", "found_date"]
+ )
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ @property
+ def wheel(self):
+ return Wheel(Path(self.filename))
+
+
+class UpdateLog(object):
+ def __init__(self, started, completed, versions, periodic):
+ self.started = started
+ self.completed = completed
+ self.versions = versions
+ self.periodic = periodic
+
+ @classmethod
+ def from_dict(cls, dictionary):
+ if dictionary is None:
+ dictionary = {}
+ return cls(
+ load_datetime(dictionary.get("started")),
+ load_datetime(dictionary.get("completed")),
+ [NewVersion.from_dict(v) for v in dictionary.get("versions", [])],
+ dictionary.get("periodic"),
+ )
+
+ @classmethod
+ def from_app_data(cls, app_data, distribution, for_py_version):
+ raw_json = app_data.embed_update_log(distribution, for_py_version).read()
+ return cls.from_dict(raw_json)
+
+ def to_dict(self):
+ return {
+ "started": dump_datetime(self.started),
+ "completed": dump_datetime(self.completed),
+ "periodic": self.periodic,
+ "versions": [r.to_dict() for r in self.versions],
+ }
+
+ @property
+ def needs_update(self):
+ now = datetime.now()
+ if self.completed is None: # never completed
+ return self._check_start(now)
+ else:
+ if now - self.completed <= timedelta(days=14):
+ return False
+ return self._check_start(now)
+
+ def _check_start(self, now):
+ return self.started is None or now - self.started > timedelta(hours=1)
+
+
+def trigger_update(distribution, for_py_version, wheel, search_dirs, app_data, periodic):
+ wheel_path = None if wheel is None else str(wheel.path)
+ cmd = [
+ sys.executable,
+ "-c",
+ dedent(
+ """
+ from virtualenv.report import setup_report, MAX_LEVEL
+ from virtualenv.seed.wheels.periodic_update import do_update
+ setup_report(MAX_LEVEL, show_pid=True)
+ do_update({!r}, {!r}, {!r}, {!r}, {!r}, {!r})
+ """,
+ )
+ .strip()
+ .format(distribution, for_py_version, wheel_path, str(app_data), [str(p) for p in search_dirs], periodic),
+ ]
+ debug = os.environ.get(str("_VIRTUALENV_PERIODIC_UPDATE_INLINE")) == str("1")
+ pipe = None if debug else subprocess.PIPE
+ kwargs = {"stdout": pipe, "stderr": pipe}
+ if not debug and sys.platform == "win32":
+ kwargs["creationflags"] = CREATE_NO_WINDOW
+ process = Popen(cmd, **kwargs)
+ logging.info(
+ "triggered periodic upgrade of %s%s (for python %s) via background process having PID %d",
+ distribution,
+ "" if wheel is None else "=={}".format(wheel.version),
+ for_py_version,
+ process.pid,
+ )
+ if debug:
+ process.communicate() # on purpose not called to make it a background process
+
+
+def do_update(distribution, for_py_version, embed_filename, app_data, search_dirs, periodic):
+ versions = None
+ try:
+ versions = _run_do_update(app_data, distribution, embed_filename, for_py_version, periodic, search_dirs)
+ finally:
+ logging.debug("done %s %s with %s", distribution, for_py_version, versions)
+ return versions
+
+
+def _run_do_update(app_data, distribution, embed_filename, for_py_version, periodic, search_dirs):
+ from virtualenv.seed.wheels import acquire
+
+ wheel_filename = None if embed_filename is None else Path(embed_filename)
+ embed_version = None if wheel_filename is None else Wheel(wheel_filename).version_tuple
+ app_data = AppDataDiskFolder(app_data) if isinstance(app_data, str) else app_data
+ search_dirs = [Path(p) if isinstance(p, str) else p for p in search_dirs]
+ wheelhouse = app_data.house
+ embed_update_log = app_data.embed_update_log(distribution, for_py_version)
+ u_log = UpdateLog.from_dict(embed_update_log.read())
+ now = datetime.now()
+ if wheel_filename is not None:
+ dest = wheelhouse / wheel_filename.name
+ if not dest.exists():
+ copy2(str(wheel_filename), str(wheelhouse))
+ last, last_version, versions = None, None, []
+ while last is None or not last.use(now):
+ download_time = datetime.now()
+ dest = acquire.download_wheel(
+ distribution=distribution,
+ version_spec=None if last_version is None else "<{}".format(last_version),
+ for_py_version=for_py_version,
+ search_dirs=search_dirs,
+ app_data=app_data,
+ to_folder=wheelhouse,
+ )
+ if dest is None or (u_log.versions and u_log.versions[0].filename == dest.name):
+ break
+ release_date = release_date_for_wheel_path(dest.path)
+ last = NewVersion(filename=dest.path.name, release_date=release_date, found_date=download_time)
+ logging.info("detected %s in %s", last, datetime.now() - download_time)
+ versions.append(last)
+ last_wheel = Wheel(Path(last.filename))
+ last_version = last_wheel.version
+ if embed_version is not None:
+ if embed_version >= last_wheel.version_tuple: # stop download if we reach the embed version
+ break
+ u_log.periodic = periodic
+ if not u_log.periodic:
+ u_log.started = now
+ u_log.versions = versions + u_log.versions
+ u_log.completed = datetime.now()
+ embed_update_log.write(u_log.to_dict())
+ return versions
+
+
+def release_date_for_wheel_path(dest):
+ wheel = Wheel(dest)
+ # the most accurate is to ask PyPi - e.g. https://pypi.org/pypi/pip/json,
+ # see https://warehouse.pypa.io/api-reference/json/ for more details
+ content = _pypi_get_distribution_info_cached(wheel.distribution)
+ if content is not None:
+ try:
+ upload_time = content["releases"][wheel.version][0]["upload_time"]
+ return datetime.strptime(upload_time, "%Y-%m-%dT%H:%M:%S")
+ except Exception as exception:
+ logging.error("could not load release date %s because %r", content, exception)
+ return None
+
+
+def _request_context():
+ yield None
+ # fallback to non verified HTTPS (the information we request is not sensitive, so fallback)
+ yield ssl._create_unverified_context() # noqa
+
+
+_PYPI_CACHE = {}
+
+
+def _pypi_get_distribution_info_cached(distribution):
+ if distribution not in _PYPI_CACHE:
+ _PYPI_CACHE[distribution] = _pypi_get_distribution_info(distribution)
+ return _PYPI_CACHE[distribution]
+
+
+def _pypi_get_distribution_info(distribution):
+ content, url = None, "https://pypi.org/pypi/{}/json".format(distribution)
+ try:
+ for context in _request_context():
+ try:
+ with urlopen(url, context=context) as file_handler:
+ content = json.load(file_handler)
+ break
+ except URLError as exception:
+ logging.error("failed to access %s because %r", url, exception)
+ except Exception as exception:
+ logging.error("failed to access %s because %r", url, exception)
+ return content
+
+
+def manual_upgrade(app_data):
+ threads = []
+
+ for for_py_version, distribution_to_package in BUNDLE_SUPPORT.items():
+ # load extra search dir for the given for_py
+ for distribution in distribution_to_package.keys():
+ thread = Thread(target=_run_manual_upgrade, args=(app_data, distribution, for_py_version))
+ thread.start()
+ threads.append(thread)
+
+ for thread in threads:
+ thread.join()
+
+
+def _run_manual_upgrade(app_data, distribution, for_py_version):
+ start = datetime.now()
+ from .bundle import from_bundle
+
+ current = from_bundle(
+ distribution=distribution,
+ version=None,
+ for_py_version=for_py_version,
+ search_dirs=[],
+ app_data=app_data,
+ do_periodic_update=False,
+ )
+ logging.warning(
+ "upgrade %s for python %s with current %s",
+ distribution,
+ for_py_version,
+ "" if current is None else current.name,
+ )
+ versions = do_update(
+ distribution=distribution,
+ for_py_version=for_py_version,
+ embed_filename=current.path,
+ app_data=app_data,
+ search_dirs=[],
+ periodic=False,
+ )
+ msg = "upgraded %s for python %s in %s {}".format(
+ "new entries found:\n%s" if versions else "no new versions found",
+ )
+ args = [
+ distribution,
+ for_py_version,
+ datetime.now() - start,
+ ]
+ if versions:
+ args.append("\n".join("\t{}".format(v) for v in versions))
+ logging.warning(msg, *args)
+
+
+__all__ = (
+ "periodic_update",
+ "do_update",
+ "manual_upgrade",
+ "NewVersion",
+ "UpdateLog",
+ "load_datetime",
+ "dump_datetime",
+ "trigger_update",
+ "release_date_for_wheel_path",
+)
diff --git a/third_party/python/virtualenv/virtualenv/seed/wheels/util.py b/third_party/python/virtualenv/virtualenv/seed/wheels/util.py
new file mode 100644
index 0000000000..1240eb2d24
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/seed/wheels/util.py
@@ -0,0 +1,116 @@
+from __future__ import absolute_import, unicode_literals
+
+from operator import attrgetter
+from zipfile import ZipFile
+
+from virtualenv.util.six import ensure_text
+
+
+class Wheel(object):
+ def __init__(self, path):
+ # https://www.python.org/dev/peps/pep-0427/#file-name-convention
+ # The wheel filename is {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}.whl
+ self.path = path
+ self._parts = path.stem.split("-")
+
+ @classmethod
+ def from_path(cls, path):
+ if path is not None and path.suffix == ".whl" and len(path.stem.split("-")) >= 5:
+ return cls(path)
+ return None
+
+ @property
+ def distribution(self):
+ return self._parts[0]
+
+ @property
+ def version(self):
+ return self._parts[1]
+
+ @property
+ def version_tuple(self):
+ return self.as_version_tuple(self.version)
+
+ @staticmethod
+ def as_version_tuple(version):
+ result = []
+ for part in version.split(".")[0:3]:
+ try:
+ result.append(int(part))
+ except ValueError:
+ break
+ if not result:
+ raise ValueError(version)
+ return tuple(result)
+
+ @property
+ def name(self):
+ return self.path.name
+
+ def support_py(self, py_version):
+ name = "{}.dist-info/METADATA".format("-".join(self.path.stem.split("-")[0:2]))
+ with ZipFile(ensure_text(str(self.path)), "r") as zip_file:
+ metadata = zip_file.read(name).decode("utf-8")
+ marker = "Requires-Python:"
+ requires = next((i[len(marker) :] for i in metadata.splitlines() if i.startswith(marker)), None)
+ if requires is None: # if it does not specify a python requires the assumption is compatible
+ return True
+ py_version_int = tuple(int(i) for i in py_version.split("."))
+ for require in (i.strip() for i in requires.split(",")):
+ # https://www.python.org/dev/peps/pep-0345/#version-specifiers
+ for operator, check in [
+ ("!=", lambda v: py_version_int != v),
+ ("==", lambda v: py_version_int == v),
+ ("<=", lambda v: py_version_int <= v),
+ (">=", lambda v: py_version_int >= v),
+ ("<", lambda v: py_version_int < v),
+ (">", lambda v: py_version_int > v),
+ ]:
+ if require.startswith(operator):
+ ver_str = require[len(operator) :].strip()
+ version = tuple((int(i) if i != "*" else None) for i in ver_str.split("."))[0:2]
+ if not check(version):
+ return False
+ break
+ return True
+
+ def __repr__(self):
+ return "{}({})".format(self.__class__.__name__, self.path)
+
+ def __str__(self):
+ return str(self.path)
+
+
+def discover_wheels(from_folder, distribution, version, for_py_version):
+ wheels = []
+ for filename in from_folder.iterdir():
+ wheel = Wheel.from_path(filename)
+ if wheel and wheel.distribution == distribution:
+ if version is None or wheel.version == version:
+ if wheel.support_py(for_py_version):
+ wheels.append(wheel)
+ return sorted(wheels, key=attrgetter("version_tuple", "distribution"), reverse=True)
+
+
+class Version:
+ #: the version bundled with virtualenv
+ bundle = "bundle"
+ embed = "embed"
+ #: custom version handlers
+ non_version = (
+ bundle,
+ embed,
+ )
+
+ @staticmethod
+ def of_version(value):
+ return None if value in Version.non_version else value
+
+ @staticmethod
+ def as_pip_req(distribution, version):
+ return "{}{}".format(distribution, Version.as_version_spec(version))
+
+ @staticmethod
+ def as_version_spec(version):
+ of_version = Version.of_version(version)
+ return "" if of_version is None else "=={}".format(of_version)
diff --git a/third_party/python/virtualenv/virtualenv/util/__init__.py b/third_party/python/virtualenv/virtualenv/util/__init__.py
new file mode 100644
index 0000000000..32d02925bf
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/util/__init__.py
@@ -0,0 +1,11 @@
+from __future__ import absolute_import, unicode_literals
+
+import sys
+
+if sys.version_info[0] == 3:
+ import configparser as ConfigParser
+else:
+ import ConfigParser
+
+
+__all__ = ("ConfigParser",)
diff --git a/third_party/python/virtualenv/virtualenv/util/error.py b/third_party/python/virtualenv/virtualenv/util/error.py
new file mode 100644
index 0000000000..ac5aa502dd
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/util/error.py
@@ -0,0 +1,13 @@
+"""Errors"""
+from __future__ import absolute_import, unicode_literals
+
+
+class ProcessCallFailed(RuntimeError):
+ """Failed a process call"""
+
+ def __init__(self, code, out, err, cmd):
+ super(ProcessCallFailed, self).__init__(code, out, err, cmd)
+ self.code = code
+ self.out = out
+ self.err = err
+ self.cmd = cmd
diff --git a/third_party/python/virtualenv/virtualenv/util/lock.py b/third_party/python/virtualenv/virtualenv/util/lock.py
new file mode 100644
index 0000000000..739dc5af80
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/util/lock.py
@@ -0,0 +1,168 @@
+"""holds locking functionality that works across processes"""
+from __future__ import absolute_import, unicode_literals
+
+import logging
+import os
+from abc import ABCMeta, abstractmethod
+from contextlib import contextmanager
+from threading import Lock, RLock
+
+from filelock import FileLock, Timeout
+from six import add_metaclass
+
+from virtualenv.util.path import Path
+
+
+class _CountedFileLock(FileLock):
+ def __init__(self, lock_file):
+ parent = os.path.dirname(lock_file)
+ if not os.path.isdir(parent):
+ try:
+ os.makedirs(parent)
+ except OSError:
+ pass
+ super(_CountedFileLock, self).__init__(lock_file)
+ self.count = 0
+ self.thread_safe = RLock()
+
+ def acquire(self, timeout=None, poll_intervall=0.05):
+ with self.thread_safe:
+ if self.count == 0:
+ super(_CountedFileLock, self).acquire(timeout=timeout, poll_intervall=poll_intervall)
+ self.count += 1
+
+ def release(self, force=False):
+ with self.thread_safe:
+ if self.count == 1:
+ super(_CountedFileLock, self).release(force=force)
+ self.count = max(self.count - 1, 0)
+
+
+_lock_store = {}
+_store_lock = Lock()
+
+
+@add_metaclass(ABCMeta)
+class PathLockBase(object):
+ def __init__(self, folder):
+ path = Path(folder)
+ self.path = path.resolve() if path.exists() else path
+
+ def __repr__(self):
+ return "{}({})".format(self.__class__.__name__, self.path)
+
+ def __div__(self, other):
+ return type(self)(self.path / other)
+
+ def __truediv__(self, other):
+ return self.__div__(other)
+
+ @abstractmethod
+ def __enter__(self):
+ raise NotImplementedError
+
+ @abstractmethod
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ raise NotImplementedError
+
+ @abstractmethod
+ @contextmanager
+ def lock_for_key(self, name, no_block=False):
+ raise NotImplementedError
+
+ @abstractmethod
+ @contextmanager
+ def non_reentrant_lock_for_key(name):
+ raise NotImplementedError
+
+
+class ReentrantFileLock(PathLockBase):
+ def __init__(self, folder):
+ super(ReentrantFileLock, self).__init__(folder)
+ self._lock = None
+
+ def _create_lock(self, name=""):
+ lock_file = str(self.path / "{}.lock".format(name))
+ with _store_lock:
+ if lock_file not in _lock_store:
+ _lock_store[lock_file] = _CountedFileLock(lock_file)
+ return _lock_store[lock_file]
+
+ @staticmethod
+ def _del_lock(lock):
+ with _store_lock:
+ if lock is not None:
+ with lock.thread_safe:
+ if lock.count == 0:
+ _lock_store.pop(lock.lock_file, None)
+
+ def __del__(self):
+ self._del_lock(self._lock)
+
+ def __enter__(self):
+ self._lock = self._create_lock()
+ self._lock_file(self._lock)
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self._release(self._lock)
+
+ def _lock_file(self, lock, no_block=False):
+ # multiple processes might be trying to get a first lock... so we cannot check if this directory exist without
+ # a lock, but that lock might then become expensive, and it's not clear where that lock should live.
+ # Instead here we just ignore if we fail to create the directory.
+ try:
+ os.makedirs(str(self.path))
+ except OSError:
+ pass
+ try:
+ lock.acquire(0.0001)
+ except Timeout:
+ if no_block:
+ raise
+ logging.debug("lock file %s present, will block until released", lock.lock_file)
+ lock.release() # release the acquire try from above
+ lock.acquire()
+
+ @staticmethod
+ def _release(lock):
+ lock.release()
+
+ @contextmanager
+ def lock_for_key(self, name, no_block=False):
+ lock = self._create_lock(name)
+ try:
+ try:
+ self._lock_file(lock, no_block)
+ yield
+ finally:
+ self._release(lock)
+ finally:
+ self._del_lock(lock)
+
+ @contextmanager
+ def non_reentrant_lock_for_key(self, name):
+ with _CountedFileLock(str(self.path / "{}.lock".format(name))):
+ yield
+
+
+class NoOpFileLock(PathLockBase):
+ def __enter__(self):
+ raise NotImplementedError
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ raise NotImplementedError
+
+ @contextmanager
+ def lock_for_key(self, name, no_block=False):
+ yield
+
+ @contextmanager
+ def non_reentrant_lock_for_key(self, name):
+ yield
+
+
+__all__ = (
+ "NoOpFileLock",
+ "ReentrantFileLock",
+ "Timeout",
+)
diff --git a/third_party/python/virtualenv/virtualenv/util/path/__init__.py b/third_party/python/virtualenv/virtualenv/util/path/__init__.py
new file mode 100644
index 0000000000..a7f71634b5
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/util/path/__init__.py
@@ -0,0 +1,16 @@
+from __future__ import absolute_import, unicode_literals
+
+from ._pathlib import Path
+from ._permission import make_exe, set_tree
+from ._sync import copy, copytree, ensure_dir, safe_delete, symlink
+
+__all__ = (
+ "ensure_dir",
+ "symlink",
+ "copy",
+ "copytree",
+ "Path",
+ "make_exe",
+ "set_tree",
+ "safe_delete",
+)
diff --git a/third_party/python/virtualenv/virtualenv/util/path/_pathlib/__init__.py b/third_party/python/virtualenv/virtualenv/util/path/_pathlib/__init__.py
new file mode 100644
index 0000000000..6bb045c2d8
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/util/path/_pathlib/__init__.py
@@ -0,0 +1,63 @@
+from __future__ import absolute_import, unicode_literals
+
+import sys
+
+import six
+
+if six.PY3:
+ from pathlib import Path
+
+ if sys.version_info[0:2] == (3, 4):
+ # no read/write text on python3.4
+ BuiltinPath = Path
+
+ class Path(type(BuiltinPath())):
+ def read_text(self, encoding=None, errors=None):
+ """
+ Open the file in text mode, read it, and close the file.
+ """
+ with self.open(mode="r", encoding=encoding, errors=errors) as f:
+ return f.read()
+
+ def read_bytes(self):
+ """
+ Open the file in bytes mode, read it, and close the file.
+ """
+ with self.open(mode="rb") as f:
+ return f.read()
+
+ def write_text(self, data, encoding=None, errors=None):
+ """
+ Open the file in text mode, write to it, and close the file.
+ """
+ if not isinstance(data, str):
+ raise TypeError("data must be str, not %s" % data.__class__.__name__)
+ with self.open(mode="w", encoding=encoding, errors=errors) as f:
+ return f.write(data)
+
+ def write_bytes(self, data):
+ """
+ Open the file in bytes mode, write to it, and close the file.
+ """
+ # type-check for the buffer interface before truncating the file
+ view = memoryview(data)
+ with self.open(mode="wb") as f:
+ return f.write(view)
+
+ def mkdir(self, mode=0o777, parents=False, exist_ok=False):
+ try:
+ super(type(BuiltinPath()), self).mkdir(mode, parents)
+ except FileExistsError as exception:
+ if not exist_ok:
+ raise exception
+
+
+else:
+ if sys.platform == "win32":
+ # workaround for https://github.com/mcmtroffaes/pathlib2/issues/56
+ from .via_os_path import Path
+ else:
+ from pathlib2 import Path
+
+
+__all__ = ("Path",)
diff --git a/third_party/python/virtualenv/virtualenv/util/path/_pathlib/via_os_path.py b/third_party/python/virtualenv/virtualenv/util/path/_pathlib/via_os_path.py
new file mode 100644
index 0000000000..ac78d4f00a
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/util/path/_pathlib/via_os_path.py
@@ -0,0 +1,148 @@
+from __future__ import absolute_import, unicode_literals
+
+import os
+import platform
+from contextlib import contextmanager
+
+from virtualenv.util.six import ensure_str, ensure_text
+
+IS_PYPY = platform.python_implementation() == "PyPy"
+
+
+class Path(object):
+ def __init__(self, path):
+ if isinstance(path, Path):
+ _path = path._path
+ else:
+ _path = ensure_text(path)
+ if IS_PYPY:
+ _path = _path.encode("utf-8")
+ self._path = _path
+
+ def __repr__(self):
+ return ensure_str("Path({})".format(ensure_text(self._path)))
+
+ def __unicode__(self):
+ return ensure_text(self._path)
+
+ def __str__(self):
+ return ensure_str(self._path)
+
+ def __div__(self, other):
+ if isinstance(other, Path):
+ right = other._path
+ else:
+ right = ensure_text(other)
+ if IS_PYPY:
+ right = right.encode("utf-8")
+ return Path(os.path.join(self._path, right))
+
+ def __truediv__(self, other):
+ return self.__div__(other)
+
+ def __eq__(self, other):
+ return self._path == (other._path if isinstance(other, Path) else None)
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __hash__(self):
+ return hash(self._path)
+
+ def exists(self):
+ return os.path.exists(self._path)
+
+ @property
+ def parent(self):
+ return Path(os.path.abspath(os.path.join(self._path, os.path.pardir)))
+
+ def resolve(self):
+ return Path(os.path.realpath(self._path))
+
+ @property
+ def name(self):
+ return os.path.basename(self._path)
+
+ @property
+ def parts(self):
+ return self._path.split(os.sep)
+
+ def is_file(self):
+ return os.path.isfile(self._path)
+
+ def is_dir(self):
+ return os.path.isdir(self._path)
+
+ def mkdir(self, parents=True, exist_ok=True):
+ try:
+ os.makedirs(self._path)
+ except OSError:
+ if not exist_ok:
+ raise
+
+ def read_text(self, encoding="utf-8"):
+ return self.read_bytes().decode(encoding)
+
+ def read_bytes(self):
+ with open(self._path, "rb") as file_handler:
+ return file_handler.read()
+
+ def write_bytes(self, content):
+ with open(self._path, "wb") as file_handler:
+ file_handler.write(content)
+
+ def write_text(self, text, encoding="utf-8"):
+ self.write_bytes(text.encode(encoding))
+
+ def iterdir(self):
+ for p in os.listdir(self._path):
+ yield Path(os.path.join(self._path, p))
+
+ @property
+ def suffix(self):
+ _, ext = os.path.splitext(self.name)
+ return ext
+
+ @property
+ def stem(self):
+ base, _ = os.path.splitext(self.name)
+ return base
+
+ @contextmanager
+ def open(self, mode="r"):
+ with open(self._path, mode) as file_handler:
+ yield file_handler
+
+ @property
+ def parents(self):
+ result = []
+ parts = self.parts
+ for i in range(len(parts) - 1):
+ result.append(Path(os.sep.join(parts[0 : i + 1])))
+ return result[::-1]
+
+ def unlink(self):
+ os.remove(self._path)
+
+ def with_name(self, name):
+ return self.parent / name
+
+ def is_symlink(self):
+ return os.path.islink(self._path)
+
+ def relative_to(self, other):
+ if not self._path.startswith(other._path):
+ raise ValueError("{} does not start with {}".format(self._path, other._path))
+ return Path(os.sep.join(self.parts[len(other.parts) :]))
+
+ def stat(self):
+ return os.stat(self._path)
+
+ def chmod(self, mode):
+ os.chmod(self._path, mode)
+
+ def absolute(self):
+ return Path(os.path.abspath(self._path))
+
+
+__all__ = ("Path",)
diff --git a/third_party/python/virtualenv/virtualenv/util/path/_permission.py b/third_party/python/virtualenv/virtualenv/util/path/_permission.py
new file mode 100644
index 0000000000..73bb6e81a3
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/util/path/_permission.py
@@ -0,0 +1,32 @@
+from __future__ import absolute_import, unicode_literals
+
+import os
+from stat import S_IXGRP, S_IXOTH, S_IXUSR
+
+from virtualenv.util.six import ensure_text
+
+
+def make_exe(filename):
+ original_mode = filename.stat().st_mode
+ levels = [S_IXUSR, S_IXGRP, S_IXOTH]
+ for at in range(len(levels), 0, -1):
+ try:
+ mode = original_mode
+ for level in levels[:at]:
+ mode |= level
+ filename.chmod(mode)
+ break
+ except OSError:
+ continue
+
+
+def set_tree(folder, stat):
+ for root, _, files in os.walk(ensure_text(str(folder))):
+ for filename in files:
+ os.chmod(os.path.join(root, filename), stat)
+
+
+__all__ = (
+ "make_exe",
+ "set_tree",
+)
diff --git a/third_party/python/virtualenv/virtualenv/util/path/_sync.py b/third_party/python/virtualenv/virtualenv/util/path/_sync.py
new file mode 100644
index 0000000000..c3d4af78aa
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/util/path/_sync.py
@@ -0,0 +1,98 @@
+from __future__ import absolute_import, unicode_literals
+
+import logging
+import os
+import shutil
+from stat import S_IWUSR
+
+from six import PY2
+
+from virtualenv.info import IS_CPYTHON, IS_WIN
+from virtualenv.util.six import ensure_text
+
+if PY2 and IS_CPYTHON and IS_WIN: # CPython2 on Windows supports unicode paths if passed as unicode
+
+ def norm(src):
+ return ensure_text(str(src))
+
+
+else:
+ norm = str
+
+
+def ensure_dir(path):
+ if not path.exists():
+ logging.debug("create folder %s", ensure_text(str(path)))
+ os.makedirs(norm(path))
+
+
+def ensure_safe_to_do(src, dest):
+ if src == dest:
+ raise ValueError("source and destination is the same {}".format(src))
+ if not dest.exists():
+ return
+ if dest.is_dir() and not dest.is_symlink():
+ logging.debug("remove directory %s", dest)
+ safe_delete(dest)
+ else:
+ logging.debug("remove file %s", dest)
+ dest.unlink()
+
+
+def symlink(src, dest):
+ ensure_safe_to_do(src, dest)
+ logging.debug("symlink %s", _Debug(src, dest))
+ dest.symlink_to(src, target_is_directory=src.is_dir())
+
+
+def copy(src, dest):
+ ensure_safe_to_do(src, dest)
+ is_dir = src.is_dir()
+ method = copytree if is_dir else shutil.copy
+ logging.debug("copy %s", _Debug(src, dest))
+ method(norm(src), norm(dest))
+
+
+def copytree(src, dest):
+ for root, _, files in os.walk(src):
+ dest_dir = os.path.join(dest, os.path.relpath(root, src))
+ if not os.path.isdir(dest_dir):
+ os.makedirs(dest_dir)
+ for name in files:
+ src_f = os.path.join(root, name)
+ dest_f = os.path.join(dest_dir, name)
+ shutil.copy(src_f, dest_f)
+
+
+def safe_delete(dest):
+ def onerror(func, path, exc_info):
+ if not os.access(path, os.W_OK):
+ os.chmod(path, S_IWUSR)
+ func(path)
+ else:
+ raise
+
+ shutil.rmtree(ensure_text(str(dest)), ignore_errors=True, onerror=onerror)
+
+
+class _Debug(object):
+ def __init__(self, src, dest):
+ self.src = src
+ self.dest = dest
+
+ def __str__(self):
+ return "{}{} to {}".format(
+ "directory " if self.src.is_dir() else "",
+ ensure_text(str(self.src)),
+ ensure_text(str(self.dest)),
+ )
+
+
+__all__ = (
+ "ensure_dir",
+ "symlink",
+ "copy",
+ "symlink",
+ "copytree",
+ "safe_delete",
+)
diff --git a/third_party/python/virtualenv/virtualenv/util/six.py b/third_party/python/virtualenv/virtualenv/util/six.py
new file mode 100644
index 0000000000..16f1c6c95e
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/util/six.py
@@ -0,0 +1,50 @@
+"""Backward compatibility layer with older version of six.
+
+This is used to avoid virtualenv requring a version of six newer than what
+the system may have.
+"""
+from __future__ import absolute_import
+
+from six import PY2, PY3, binary_type, text_type
+
+try:
+ from six import ensure_text
+except ImportError:
+
+ def ensure_text(s, encoding="utf-8", errors="strict"):
+ """Coerce *s* to six.text_type.
+ For Python 2:
+ - `unicode` -> `unicode`
+ - `str` -> `unicode`
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif isinstance(s, text_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+try:
+ from six import ensure_str
+except ImportError:
+
+ def ensure_str(s, encoding="utf-8", errors="strict"):
+ """Coerce *s* to `str`.
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if not isinstance(s, (text_type, binary_type)):
+ raise TypeError("not expecting type '%s'" % type(s))
+ if PY2 and isinstance(s, text_type):
+ s = s.encode(encoding, errors)
+ elif PY3 and isinstance(s, binary_type):
+ s = s.decode(encoding, errors)
+ return s
diff --git a/third_party/python/virtualenv/virtualenv/util/subprocess/__init__.py b/third_party/python/virtualenv/virtualenv/util/subprocess/__init__.py
new file mode 100644
index 0000000000..f5066268f8
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/util/subprocess/__init__.py
@@ -0,0 +1,40 @@
+from __future__ import absolute_import, unicode_literals
+
+import subprocess
+import sys
+
+import six
+
+if six.PY2 and sys.platform == "win32":
+ from . import _win_subprocess
+
+ Popen = _win_subprocess.Popen
+else:
+ Popen = subprocess.Popen
+
+
+CREATE_NO_WINDOW = 0x80000000
+
+
+def run_cmd(cmd):
+ try:
+ process = Popen(
+ cmd,
+ universal_newlines=True,
+ stdin=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ )
+ out, err = process.communicate() # input disabled
+ code = process.returncode
+ except OSError as os_error:
+ code, out, err = os_error.errno, "", os_error.strerror
+ return code, out, err
+
+
+__all__ = (
+ "subprocess",
+ "Popen",
+ "run_cmd",
+ "CREATE_NO_WINDOW",
+)
diff --git a/third_party/python/virtualenv/virtualenv/util/subprocess/_win_subprocess.py b/third_party/python/virtualenv/virtualenv/util/subprocess/_win_subprocess.py
new file mode 100644
index 0000000000..4c4c5d0295
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/util/subprocess/_win_subprocess.py
@@ -0,0 +1,175 @@
+# flake8: noqa
+# fmt: off
+## issue: https://bugs.python.org/issue19264
+
+import ctypes
+import os
+import platform
+import subprocess
+from ctypes import Structure, WinError, byref, c_char_p, c_void_p, c_wchar, c_wchar_p, sizeof, windll
+from ctypes.wintypes import BOOL, BYTE, DWORD, HANDLE, LPCWSTR, LPVOID, LPWSTR, WORD
+
+import _subprocess
+
+##
+## Types
+##
+
+CREATE_UNICODE_ENVIRONMENT = 0x00000400
+LPCTSTR = c_char_p
+LPTSTR = c_wchar_p
+LPSECURITY_ATTRIBUTES = c_void_p
+LPBYTE = ctypes.POINTER(BYTE)
+
+class STARTUPINFOW(Structure):
+ _fields_ = [
+ ("cb", DWORD), ("lpReserved", LPWSTR),
+ ("lpDesktop", LPWSTR), ("lpTitle", LPWSTR),
+ ("dwX", DWORD), ("dwY", DWORD),
+ ("dwXSize", DWORD), ("dwYSize", DWORD),
+ ("dwXCountChars", DWORD), ("dwYCountChars", DWORD),
+ ("dwFillAtrribute", DWORD), ("dwFlags", DWORD),
+ ("wShowWindow", WORD), ("cbReserved2", WORD),
+ ("lpReserved2", LPBYTE), ("hStdInput", HANDLE),
+ ("hStdOutput", HANDLE), ("hStdError", HANDLE),
+ ]
+
+LPSTARTUPINFOW = ctypes.POINTER(STARTUPINFOW)
+
+
+class PROCESS_INFORMATION(Structure):
+ _fields_ = [
+ ("hProcess", HANDLE), ("hThread", HANDLE),
+ ("dwProcessId", DWORD), ("dwThreadId", DWORD),
+ ]
+
+LPPROCESS_INFORMATION = ctypes.POINTER(PROCESS_INFORMATION)
+
+
+class DUMMY_HANDLE(ctypes.c_void_p):
+
+ def __init__(self, *a, **kw):
+ super(DUMMY_HANDLE, self).__init__(*a, **kw)
+ self.closed = False
+
+ def Close(self):
+ if not self.closed:
+ windll.kernel32.CloseHandle(self)
+ self.closed = True
+
+ def __int__(self):
+ return self.value
+
+
+CreateProcessW = windll.kernel32.CreateProcessW
+CreateProcessW.argtypes = [
+ LPCWSTR, LPWSTR, LPSECURITY_ATTRIBUTES,
+ LPSECURITY_ATTRIBUTES, BOOL, DWORD, LPVOID, LPCWSTR,
+ LPSTARTUPINFOW, LPPROCESS_INFORMATION,
+]
+CreateProcessW.restype = BOOL
+
+
+##
+## Patched functions/classes
+##
+
+def CreateProcess(
+ executable, args, _p_attr, _t_attr,
+ inherit_handles, creation_flags, env, cwd,
+ startup_info,
+):
+ """Create a process supporting unicode executable and args for win32
+
+ Python implementation of CreateProcess using CreateProcessW for Win32
+
+ """
+
+ si = STARTUPINFOW(
+ dwFlags=startup_info.dwFlags,
+ wShowWindow=startup_info.wShowWindow,
+ cb=sizeof(STARTUPINFOW),
+ ## XXXvlab: not sure of the casting here to ints.
+ hStdInput=startup_info.hStdInput if startup_info.hStdInput is None else int(startup_info.hStdInput),
+ hStdOutput=startup_info.hStdOutput if startup_info.hStdOutput is None else int(startup_info.hStdOutput),
+ hStdError=startup_info.hStdError if startup_info.hStdError is None else int(startup_info.hStdError),
+ )
+
+ wenv = None
+ if env is not None:
+ ## LPCWSTR seems to be c_wchar_p, so let's say CWSTR is c_wchar
+ env = (
+ unicode("").join([
+ unicode("%s=%s\0") % (k, v)
+ for k, v in env.items()
+ ])
+ ) + unicode("\0")
+ wenv = (c_wchar * len(env))()
+ wenv.value = env
+
+ wcwd = None
+ if cwd is not None:
+ wcwd = unicode(cwd)
+
+ pi = PROCESS_INFORMATION()
+ creation_flags |= CREATE_UNICODE_ENVIRONMENT
+
+ if CreateProcessW(
+ executable, args, None, None,
+ inherit_handles, creation_flags,
+ wenv, wcwd, byref(si), byref(pi),
+ ):
+ return (
+ DUMMY_HANDLE(pi.hProcess), DUMMY_HANDLE(pi.hThread),
+ pi.dwProcessId, pi.dwThreadId,
+ )
+ raise WinError()
+
+
+class Popen(subprocess.Popen):
+ """This superseeds Popen and corrects a bug in cPython 2.7 implem"""
+
+ def _execute_child(
+ self, args, executable, preexec_fn, close_fds,
+ cwd, env, universal_newlines,
+ startupinfo, creationflags, shell, to_close,
+ p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite,
+ ):
+ """Code from part of _execute_child from Python 2.7 (9fbb65e)
+
+ There are only 2 little changes concerning the construction of
+ the the final string in shell mode: we preempt the creation of
+ the command string when shell is True, because original function
+ will try to encode unicode args which we want to avoid to be able to
+ sending it as-is to ``CreateProcess``.
+
+ """
+ if startupinfo is None:
+ startupinfo = subprocess.STARTUPINFO()
+ if not isinstance(args, subprocess.types.StringTypes):
+ args = [i if isinstance(i, bytes) else i.encode('utf-8') for i in args]
+ args = subprocess.list2cmdline(args)
+ if platform.python_implementation() == "CPython":
+ args = args.decode('utf-8')
+ startupinfo.dwFlags |= _subprocess.STARTF_USESHOWWINDOW
+ startupinfo.wShowWindow = _subprocess.SW_HIDE
+ comspec = os.environ.get("COMSPEC", unicode("cmd.exe"))
+ if (
+ _subprocess.GetVersion() >= 0x80000000 or
+ os.path.basename(comspec).lower() == "command.com"
+ ):
+ w9xpopen = self._find_w9xpopen()
+ args = unicode('"%s" %s') % (w9xpopen, args)
+ creationflags |= _subprocess.CREATE_NEW_CONSOLE
+
+ super(Popen, self)._execute_child(
+ args, executable,
+ preexec_fn, close_fds, cwd, env, universal_newlines,
+ startupinfo, creationflags, False, to_close, p2cread,
+ p2cwrite, c2pread, c2pwrite, errread, errwrite,
+ )
+
+_subprocess.CreateProcess = CreateProcess
+# fmt: on
diff --git a/third_party/python/virtualenv/virtualenv/util/zipapp.py b/third_party/python/virtualenv/virtualenv/util/zipapp.py
new file mode 100644
index 0000000000..85d9294f4d
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/util/zipapp.py
@@ -0,0 +1,33 @@
+from __future__ import absolute_import, unicode_literals
+
+import logging
+import os
+import zipfile
+
+from virtualenv.info import IS_WIN, ROOT
+from virtualenv.util.six import ensure_text
+
+
+def read(full_path):
+ sub_file = _get_path_within_zip(full_path)
+ with zipfile.ZipFile(ROOT, "r") as zip_file:
+ with zip_file.open(sub_file) as file_handler:
+ return file_handler.read().decode("utf-8")
+
+
+def extract(full_path, dest):
+ logging.debug("extract %s to %s", full_path, dest)
+ sub_file = _get_path_within_zip(full_path)
+ with zipfile.ZipFile(ROOT, "r") as zip_file:
+ info = zip_file.getinfo(sub_file)
+ info.filename = dest.name
+ zip_file.extract(info, ensure_text(str(dest.parent)))
+
+
+def _get_path_within_zip(full_path):
+ full_path = os.path.abspath(str(full_path))
+ sub_file = full_path[len(ROOT) + 1 :]
+ if IS_WIN:
+ # paths are always UNIX separators, even on Windows, though __file__ still follows platform default
+ sub_file = sub_file.replace(os.sep, "/")
+ return sub_file
diff --git a/third_party/python/virtualenv/virtualenv/version.py b/third_party/python/virtualenv/virtualenv/version.py
new file mode 100644
index 0000000000..7f21daf026
--- /dev/null
+++ b/third_party/python/virtualenv/virtualenv/version.py
@@ -0,0 +1,3 @@
+from __future__ import unicode_literals
+
+__version__ = "20.2.2"
diff --git a/third_party/python/voluptuous/CHANGELOG.md b/third_party/python/voluptuous/CHANGELOG.md
new file mode 100644
index 0000000000..90d644f34a
--- /dev/null
+++ b/third_party/python/voluptuous/CHANGELOG.md
@@ -0,0 +1,104 @@
+# Changelog
+
+## [0.11.0]
+
+**Changes**:
+
+- [#293](https://github.com/alecthomas/voluptuous/pull/293): Support Python 3.6.
+- [#294](https://github.com/alecthomas/voluptuous/pull/294): Drop support for Python 2.6, 3.1 and 3.2.
+- [#318](https://github.com/alecthomas/voluptuous/pull/318): Allow to use nested schema and allow any validator to be compiled.
+- [#324](https://github.com/alecthomas/voluptuous/pull/324):
+ Default values MUST now pass validation just as any regular value. This is a backward incompatible change if a schema uses default values that don't pass validation against the specified schema.
+- [#328](https://github.com/alecthomas/voluptuous/pull/328):
+ Modify `__lt__` in Marker class to allow comparison with non Marker objects, such as str and int.
+
+**New**:
+
+- [#307](https://github.com/alecthomas/voluptuous/pull/307): Add description field to `Marker` instances.
+- [#311](https://github.com/alecthomas/voluptuous/pull/311): Add `Schema.infer` method for basic schema inference.
+- [#314](https://github.com/alecthomas/voluptuous/pull/314): Add `SomeOf` validator.
+
+**Fixes**:
+
+- [#279](https://github.com/alecthomas/voluptuous/pull/279):
+ Treat Python 2 old-style classes like types when validating.
+- [#280](https://github.com/alecthomas/voluptuous/pull/280): Make
+ `IsDir()`, `IsFile()` and `PathExists()` consistent between different Python versions.
+- [#290](https://github.com/alecthomas/voluptuous/pull/290): Use absolute imports to avoid import conflicts.
+- [#291](https://github.com/alecthomas/voluptuous/pull/291): Fix `Coerce` validator to catch `decimal.InvalidOperation`.
+- [#298](https://github.com/alecthomas/voluptuous/pull/298): Make `Schema([])` usage consistent with `Schema({})`.
+- [#303](https://github.com/alecthomas/voluptuous/pull/303): Allow partial validation when using validate decorator.
+- [#316](https://github.com/alecthomas/voluptuous/pull/316): Make `Schema.__eq__` deterministic.
+- [#319](https://github.com/alecthomas/voluptuous/pull/319): Replace implementation of `Maybe(s)` with `Any(None, s)` to allow it to be compiled.
+
+## [0.10.5]
+
+- [#278](https://github.com/alecthomas/voluptuous/pull/278): Unicode
+translation to python 2 issue fixed.
+
+## [0.10.2]
+
+**Changes**:
+
+- [#195](https://github.com/alecthomas/voluptuous/pull/195):
+ `Range` raises `RangeInvalid` when testing `math.nan`.
+- [#215](https://github.com/alecthomas/voluptuous/pull/215):
+ `{}` and `[]` now always evaluate as is, instead of as any dict or any list.
+ To specify a free-form list, use `list` instead of `[]`. To specify a
+ free-form dict, use `dict` instead of `Schema({}, extra=ALLOW_EXTRA)`.
+- [#224](https://github.com/alecthomas/voluptuous/pull/224):
+ Change the encoding of keys in error messages from Unicode to UTF-8.
+
+**New**:
+
+- [#185](https://github.com/alecthomas/voluptuous/pull/185):
+ Add argument validation decorator.
+- [#199](https://github.com/alecthomas/voluptuous/pull/199):
+ Add `Unordered`.
+- [#200](https://github.com/alecthomas/voluptuous/pull/200):
+ Add `Equal`.
+- [#207](https://github.com/alecthomas/voluptuous/pull/207):
+ Add `Number`.
+- [#210](https://github.com/alecthomas/voluptuous/pull/210):
+ Add `Schema` equality check.
+- [#212](https://github.com/alecthomas/voluptuous/pull/212):
+ Add `coveralls`.
+- [#227](https://github.com/alecthomas/voluptuous/pull/227):
+ Improve `Marker` management in `Schema`.
+- [#232](https://github.com/alecthomas/voluptuous/pull/232):
+ Add `Maybe`.
+- [#234](https://github.com/alecthomas/voluptuous/pull/234):
+ Add `Date`.
+- [#236](https://github.com/alecthomas/voluptuous/pull/236), [#237](https://github.com/alecthomas/voluptuous/pull/237), and [#238](https://github.com/alecthomas/voluptuous/pull/238):
+ Add script for updating `gh-pages`.
+- [#256](https://github.com/alecthomas/voluptuous/pull/256):
+ Add support for `OrderedDict` validation.
+- [#258](https://github.com/alecthomas/voluptuous/pull/258):
+ Add `Contains`.
+
+**Fixes**:
+
+- [#197](https://github.com/alecthomas/voluptuous/pull/197):
+ `ExactSequence` checks sequences are the same length.
+- [#201](https://github.com/alecthomas/voluptuous/pull/201):
+ Empty lists are evaluated as is.
+- [#205](https://github.com/alecthomas/voluptuous/pull/205):
+ Filepath validators correctly handle `None`.
+- [#206](https://github.com/alecthomas/voluptuous/pull/206):
+ Handle non-subscriptable types in `humanize_error`.
+- [#231](https://github.com/alecthomas/voluptuous/pull/231):
+ Validate `namedtuple` as a `tuple`.
+- [#235](https://github.com/alecthomas/voluptuous/pull/235):
+ Update docstring.
+- [#249](https://github.com/alecthomas/voluptuous/pull/249):
+ Update documentation.
+- [#262](https://github.com/alecthomas/voluptuous/pull/262):
+ Fix a performance issue of exponential complexity where all of the dict keys were matched against all keys in the schema.
+ This resulted in O(n*m) complexity where n is the number of keys in the dict being validated and m is the number of keys in the schema.
+ The fix ensures that each key in the dict is matched against the relevant schema keys only. It now works in O(n).
+- [#266](https://github.com/alecthomas/voluptuous/pull/266):
+ Remove setuptools as a dependency.
+
+## 0.9.3 (2016-08-03)
+
+Changelog not kept for 0.9.3 and earlier releases.
diff --git a/third_party/python/voluptuous/COPYING b/third_party/python/voluptuous/COPYING
new file mode 100644
index 0000000000..a19b7057fa
--- /dev/null
+++ b/third_party/python/voluptuous/COPYING
@@ -0,0 +1,25 @@
+Copyright (c) 2010, Alec Thomas
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ - Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+ - Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ - Neither the name of SwapOff.org nor the names of its contributors may
+ be used to endorse or promote products derived from this software without
+ specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/third_party/python/voluptuous/MANIFEST.in b/third_party/python/voluptuous/MANIFEST.in
new file mode 100644
index 0000000000..dd15c3a1f6
--- /dev/null
+++ b/third_party/python/voluptuous/MANIFEST.in
@@ -0,0 +1,4 @@
+include *.md
+include COPYING
+include voluptuous/tests/*.py
+include voluptuous/tests/*.md
diff --git a/third_party/python/voluptuous/PKG-INFO b/third_party/python/voluptuous/PKG-INFO
new file mode 100644
index 0000000000..999071c44e
--- /dev/null
+++ b/third_party/python/voluptuous/PKG-INFO
@@ -0,0 +1,744 @@
+Metadata-Version: 2.1
+Name: voluptuous
+Version: 0.11.5
+Summary: # Voluptuous is a Python data validation library
+Home-page: https://github.com/alecthomas/voluptuous
+Author: Alec Thomas
+Author-email: alec@swapoff.org
+License: BSD
+Download-URL: https://pypi.python.org/pypi/voluptuous
+Description: # Voluptuous is a Python data validation library
+
+ [![Build Status](https://travis-ci.org/alecthomas/voluptuous.png)](https://travis-ci.org/alecthomas/voluptuous)
+ [![Coverage Status](https://coveralls.io/repos/github/alecthomas/voluptuous/badge.svg?branch=master)](https://coveralls.io/github/alecthomas/voluptuous?branch=master) [![Gitter chat](https://badges.gitter.im/alecthomas.png)](https://gitter.im/alecthomas/Lobby)
+
+ Voluptuous, *despite* the name, is a Python data validation library. It
+ is primarily intended for validating data coming into Python as JSON,
+ YAML, etc.
+
+ It has three goals:
+
+ 1. Simplicity.
+ 2. Support for complex data structures.
+ 3. Provide useful error messages.
+
+ ## Contact
+
+ Voluptuous now has a mailing list! Send a mail to
+ [<voluptuous@librelist.com>](mailto:voluptuous@librelist.com) to subscribe. Instructions
+ will follow.
+
+ You can also contact me directly via [email](mailto:alec@swapoff.org) or
+ [Twitter](https://twitter.com/alecthomas).
+
+ To file a bug, create a [new issue](https://github.com/alecthomas/voluptuous/issues/new) on GitHub with a short example of how to replicate the issue.
+
+ ## Documentation
+
+ The documentation is provided [here](http://alecthomas.github.io/voluptuous/).
+
+ ## Changelog
+
+ See [CHANGELOG.md](https://github.com/alecthomas/voluptuous/blob/master/CHANGELOG.md).
+
+ ## Show me an example
+
+ Twitter's [user search API](https://dev.twitter.com/rest/reference/get/users/search) accepts
+ query URLs like:
+
+ ```
+ $ curl 'https://api.twitter.com/1.1/users/search.json?q=python&per_page=20&page=1'
+ ```
+
+ To validate this we might use a schema like:
+
+ ```pycon
+ >>> from voluptuous import Schema
+ >>> schema = Schema({
+ ... 'q': str,
+ ... 'per_page': int,
+ ... 'page': int,
+ ... })
+
+ ```
+
+ This schema very succinctly and roughly describes the data required by
+ the API, and will work fine. But it has a few problems. Firstly, it
+ doesn't fully express the constraints of the API. According to the API,
+ `per_page` should be restricted to at most 20, defaulting to 5, for
+ example. To describe the semantics of the API more accurately, our
+ schema will need to be more thoroughly defined:
+
+ ```pycon
+ >>> from voluptuous import Required, All, Length, Range
+ >>> schema = Schema({
+ ... Required('q'): All(str, Length(min=1)),
+ ... Required('per_page', default=5): All(int, Range(min=1, max=20)),
+ ... 'page': All(int, Range(min=0)),
+ ... })
+
+ ```
+
+ This schema fully enforces the interface defined in Twitter's
+ documentation, and goes a little further for completeness.
+
+ "q" is required:
+
+ ```pycon
+ >>> from voluptuous import MultipleInvalid, Invalid
+ >>> try:
+ ... schema({})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "required key not provided @ data['q']"
+ True
+
+ ```
+
+ ...must be a string:
+
+ ```pycon
+ >>> try:
+ ... schema({'q': 123})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "expected str for dictionary value @ data['q']"
+ True
+
+ ```
+
+ ...and must be at least one character in length:
+
+ ```pycon
+ >>> try:
+ ... schema({'q': ''})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "length of value must be at least 1 for dictionary value @ data['q']"
+ True
+ >>> schema({'q': '#topic'}) == {'q': '#topic', 'per_page': 5}
+ True
+
+ ```
+
+ "per\_page" is a positive integer no greater than 20:
+
+ ```pycon
+ >>> try:
+ ... schema({'q': '#topic', 'per_page': 900})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "value must be at most 20 for dictionary value @ data['per_page']"
+ True
+ >>> try:
+ ... schema({'q': '#topic', 'per_page': -10})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "value must be at least 1 for dictionary value @ data['per_page']"
+ True
+
+ ```
+
+ "page" is an integer \>= 0:
+
+ ```pycon
+ >>> try:
+ ... schema({'q': '#topic', 'per_page': 'one'})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "expected int for dictionary value @ data['per_page']"
+ >>> schema({'q': '#topic', 'page': 1}) == {'q': '#topic', 'page': 1, 'per_page': 5}
+ True
+
+ ```
+
+ ## Defining schemas
+
+ Schemas are nested data structures consisting of dictionaries, lists,
+ scalars and *validators*. Each node in the input schema is pattern
+ matched against corresponding nodes in the input data.
+
+ ### Literals
+
+ Literals in the schema are matched using normal equality checks:
+
+ ```pycon
+ >>> schema = Schema(1)
+ >>> schema(1)
+ 1
+ >>> schema = Schema('a string')
+ >>> schema('a string')
+ 'a string'
+
+ ```
+
+ ### Types
+
+ Types in the schema are matched by checking if the corresponding value
+ is an instance of the type:
+
+ ```pycon
+ >>> schema = Schema(int)
+ >>> schema(1)
+ 1
+ >>> try:
+ ... schema('one')
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "expected int"
+ True
+
+ ```
+
+ ### URL's
+
+ URL's in the schema are matched by using `urlparse` library.
+
+ ```pycon
+ >>> from voluptuous import Url
+ >>> schema = Schema(Url())
+ >>> schema('http://w3.org')
+ 'http://w3.org'
+ >>> try:
+ ... schema('one')
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "expected a URL"
+ True
+
+ ```
+
+ ### Lists
+
+ Lists in the schema are treated as a set of valid values. Each element
+ in the schema list is compared to each value in the input data:
+
+ ```pycon
+ >>> schema = Schema([1, 'a', 'string'])
+ >>> schema([1])
+ [1]
+ >>> schema([1, 1, 1])
+ [1, 1, 1]
+ >>> schema(['a', 1, 'string', 1, 'string'])
+ ['a', 1, 'string', 1, 'string']
+
+ ```
+
+ However, an empty list (`[]`) is treated as is. If you want to specify a list that can
+ contain anything, specify it as `list`:
+
+ ```pycon
+ >>> schema = Schema([])
+ >>> try:
+ ... schema([1])
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "not a valid value @ data[1]"
+ True
+ >>> schema([])
+ []
+ >>> schema = Schema(list)
+ >>> schema([])
+ []
+ >>> schema([1, 2])
+ [1, 2]
+
+ ```
+
+ ### Sets and frozensets
+
+ Sets and frozensets are treated as a set of valid values. Each element
+ in the schema set is compared to each value in the input data:
+
+ ```pycon
+ >>> schema = Schema({42})
+ >>> schema({42}) == {42}
+ True
+ >>> try:
+ ... schema({43})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "invalid value in set"
+ True
+ >>> schema = Schema({int})
+ >>> schema({1, 2, 3}) == {1, 2, 3}
+ True
+ >>> schema = Schema({int, str})
+ >>> schema({1, 2, 'abc'}) == {1, 2, 'abc'}
+ True
+ >>> schema = Schema(frozenset([int]))
+ >>> try:
+ ... schema({3})
+ ... raise AssertionError('Invalid not raised')
+ ... except Invalid as e:
+ ... exc = e
+ >>> str(exc) == 'expected a frozenset'
+ True
+
+ ```
+
+ However, an empty set (`set()`) is treated as is. If you want to specify a set
+ that can contain anything, specify it as `set`:
+
+ ```pycon
+ >>> schema = Schema(set())
+ >>> try:
+ ... schema({1})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "invalid value in set"
+ True
+ >>> schema(set()) == set()
+ True
+ >>> schema = Schema(set)
+ >>> schema({1, 2}) == {1, 2}
+ True
+
+ ```
+
+ ### Validation functions
+
+ Validators are simple callables that raise an `Invalid` exception when
+ they encounter invalid data. The criteria for determining validity is
+ entirely up to the implementation; it may check that a value is a valid
+ username with `pwd.getpwnam()`, it may check that a value is of a
+ specific type, and so on.
+
+ The simplest kind of validator is a Python function that raises
+ ValueError when its argument is invalid. Conveniently, many builtin
+ Python functions have this property. Here's an example of a date
+ validator:
+
+ ```pycon
+ >>> from datetime import datetime
+ >>> def Date(fmt='%Y-%m-%d'):
+ ... return lambda v: datetime.strptime(v, fmt)
+
+ ```
+
+ ```pycon
+ >>> schema = Schema(Date())
+ >>> schema('2013-03-03')
+ datetime.datetime(2013, 3, 3, 0, 0)
+ >>> try:
+ ... schema('2013-03')
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "not a valid value"
+ True
+
+ ```
+
+ In addition to simply determining if a value is valid, validators may
+ mutate the value into a valid form. An example of this is the
+ `Coerce(type)` function, which returns a function that coerces its
+ argument to the given type:
+
+ ```python
+ def Coerce(type, msg=None):
+ """Coerce a value to a type.
+
+ If the type constructor throws a ValueError, the value will be marked as
+ Invalid.
+ """
+ def f(v):
+ try:
+ return type(v)
+ except ValueError:
+ raise Invalid(msg or ('expected %s' % type.__name__))
+ return f
+
+ ```
+
+ This example also shows a common idiom where an optional human-readable
+ message can be provided. This can vastly improve the usefulness of the
+ resulting error messages.
+
+ ### Dictionaries
+
+ Each key-value pair in a schema dictionary is validated against each
+ key-value pair in the corresponding data dictionary:
+
+ ```pycon
+ >>> schema = Schema({1: 'one', 2: 'two'})
+ >>> schema({1: 'one'})
+ {1: 'one'}
+
+ ```
+
+ #### Extra dictionary keys
+
+ By default any additional keys in the data, not in the schema will
+ trigger exceptions:
+
+ ```pycon
+ >>> schema = Schema({2: 3})
+ >>> try:
+ ... schema({1: 2, 2: 3})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "extra keys not allowed @ data[1]"
+ True
+
+ ```
+
+ This behaviour can be altered on a per-schema basis. To allow
+ additional keys use
+ `Schema(..., extra=ALLOW_EXTRA)`:
+
+ ```pycon
+ >>> from voluptuous import ALLOW_EXTRA
+ >>> schema = Schema({2: 3}, extra=ALLOW_EXTRA)
+ >>> schema({1: 2, 2: 3})
+ {1: 2, 2: 3}
+
+ ```
+
+ To remove additional keys use
+ `Schema(..., extra=REMOVE_EXTRA)`:
+
+ ```pycon
+ >>> from voluptuous import REMOVE_EXTRA
+ >>> schema = Schema({2: 3}, extra=REMOVE_EXTRA)
+ >>> schema({1: 2, 2: 3})
+ {2: 3}
+
+ ```
+
+ It can also be overridden per-dictionary by using the catch-all marker
+ token `extra` as a key:
+
+ ```pycon
+ >>> from voluptuous import Extra
+ >>> schema = Schema({1: {Extra: object}})
+ >>> schema({1: {'foo': 'bar'}})
+ {1: {'foo': 'bar'}}
+
+ ```
+
+ #### Required dictionary keys
+
+ By default, keys in the schema are not required to be in the data:
+
+ ```pycon
+ >>> schema = Schema({1: 2, 3: 4})
+ >>> schema({3: 4})
+ {3: 4}
+
+ ```
+
+ Similarly to how extra\_ keys work, this behaviour can be overridden
+ per-schema:
+
+ ```pycon
+ >>> schema = Schema({1: 2, 3: 4}, required=True)
+ >>> try:
+ ... schema({3: 4})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "required key not provided @ data[1]"
+ True
+
+ ```
+
+ And per-key, with the marker token `Required(key)`:
+
+ ```pycon
+ >>> schema = Schema({Required(1): 2, 3: 4})
+ >>> try:
+ ... schema({3: 4})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "required key not provided @ data[1]"
+ True
+ >>> schema({1: 2})
+ {1: 2}
+
+ ```
+
+ #### Optional dictionary keys
+
+ If a schema has `required=True`, keys may be individually marked as
+ optional using the marker token `Optional(key)`:
+
+ ```pycon
+ >>> from voluptuous import Optional
+ >>> schema = Schema({1: 2, Optional(3): 4}, required=True)
+ >>> try:
+ ... schema({})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "required key not provided @ data[1]"
+ True
+ >>> schema({1: 2})
+ {1: 2}
+ >>> try:
+ ... schema({1: 2, 4: 5})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "extra keys not allowed @ data[4]"
+ True
+
+ ```
+
+ ```pycon
+ >>> schema({1: 2, 3: 4})
+ {1: 2, 3: 4}
+
+ ```
+
+ ### Recursive / nested schema
+
+ You can use `voluptuous.Self` to define a nested schema:
+
+ ```pycon
+ >>> from voluptuous import Schema, Self
+ >>> recursive = Schema({"more": Self, "value": int})
+ >>> recursive({"more": {"value": 42}, "value": 41}) == {'more': {'value': 42}, 'value': 41}
+ True
+
+ ```
+
+ ### Extending an existing Schema
+
+ Often it comes handy to have a base `Schema` that is extended with more
+ requirements. In that case you can use `Schema.extend` to create a new
+ `Schema`:
+
+ ```pycon
+ >>> from voluptuous import Schema
+ >>> person = Schema({'name': str})
+ >>> person_with_age = person.extend({'age': int})
+ >>> sorted(list(person_with_age.schema.keys()))
+ ['age', 'name']
+
+ ```
+
+ The original `Schema` remains unchanged.
+
+ ### Objects
+
+ Each key-value pair in a schema dictionary is validated against each
+ attribute-value pair in the corresponding object:
+
+ ```pycon
+ >>> from voluptuous import Object
+ >>> class Structure(object):
+ ... def __init__(self, q=None):
+ ... self.q = q
+ ... def __repr__(self):
+ ... return '<Structure(q={0.q!r})>'.format(self)
+ ...
+ >>> schema = Schema(Object({'q': 'one'}, cls=Structure))
+ >>> schema(Structure(q='one'))
+ <Structure(q='one')>
+
+ ```
+
+ ### Allow None values
+
+ To allow value to be None as well, use Any:
+
+ ```pycon
+ >>> from voluptuous import Any
+
+ >>> schema = Schema(Any(None, int))
+ >>> schema(None)
+ >>> schema(5)
+ 5
+
+ ```
+
+ ## Error reporting
+
+ Validators must throw an `Invalid` exception if invalid data is passed
+ to them. All other exceptions are treated as errors in the validator and
+ will not be caught.
+
+ Each `Invalid` exception has an associated `path` attribute representing
+ the path in the data structure to our currently validating value, as well
+ as an `error_message` attribute that contains the message of the original
+ exception. This is especially useful when you want to catch `Invalid`
+ exceptions and give some feedback to the user, for instance in the context of
+ an HTTP API.
+
+
+ ```pycon
+ >>> def validate_email(email):
+ ... """Validate email."""
+ ... if not "@" in email:
+ ... raise Invalid("This email is invalid.")
+ ... return email
+ >>> schema = Schema({"email": validate_email})
+ >>> exc = None
+ >>> try:
+ ... schema({"email": "whatever"})
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "This email is invalid. for dictionary value @ data['email']"
+ >>> exc.path
+ ['email']
+ >>> exc.msg
+ 'This email is invalid.'
+ >>> exc.error_message
+ 'This email is invalid.'
+
+ ```
+
+ The `path` attribute is used during error reporting, but also during matching
+ to determine whether an error should be reported to the user or if the next
+ match should be attempted. This is determined by comparing the depth of the
+ path where the check is, to the depth of the path where the error occurred. If
+ the error is more than one level deeper, it is reported.
+
+ The upshot of this is that *matching is depth-first and fail-fast*.
+
+ To illustrate this, here is an example schema:
+
+ ```pycon
+ >>> schema = Schema([[2, 3], 6])
+
+ ```
+
+ Each value in the top-level list is matched depth-first in-order. Given
+ input data of `[[6]]`, the inner list will match the first element of
+ the schema, but the literal `6` will not match any of the elements of
+ that list. This error will be reported back to the user immediately. No
+ backtracking is attempted:
+
+ ```pycon
+ >>> try:
+ ... schema([[6]])
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "not a valid value @ data[0][0]"
+ True
+
+ ```
+
+ If we pass the data `[6]`, the `6` is not a list type and so will not
+ recurse into the first element of the schema. Matching will continue on
+ to the second element in the schema, and succeed:
+
+ ```pycon
+ >>> schema([6])
+ [6]
+
+ ```
+
+ ## Multi-field validation
+
+ Validation rules that involve multiple fields can be implemented as
+ custom validators. It's recommended to use `All()` to do a two-pass
+ validation - the first pass checking the basic structure of the data,
+ and only after that, the second pass applying your cross-field
+ validator:
+
+ ```python
+ def passwords_must_match(passwords):
+ if passwords['password'] != passwords['password_again']:
+ raise Invalid('passwords must match')
+ return passwords
+
+ s=Schema(All(
+ # First "pass" for field types
+ {'password':str, 'password_again':str},
+ # Follow up the first "pass" with your multi-field rules
+ passwords_must_match
+ ))
+
+ # valid
+ s({'password':'123', 'password_again':'123'})
+
+ # raises MultipleInvalid: passwords must match
+ s({'password':'123', 'password_again':'and now for something completely different'})
+
+ ```
+
+ With this structure, your multi-field validator will run with
+ pre-validated data from the first "pass" and so will not have to do
+ its own type checking on its inputs.
+
+ The flipside is that if the first "pass" of validation fails, your
+ cross-field validator will not run:
+
+ ```
+ # raises Invalid because password_again is not a string
+ # passwords_must_match() will not run because first-pass validation already failed
+ s({'password':'123', 'password_again': 1337})
+ ```
+
+ ## Running tests.
+
+ Voluptuous is using nosetests:
+
+ $ nosetests
+
+
+ ## Why use Voluptuous over another validation library?
+
+ **Validators are simple callables**
+ : No need to subclass anything, just use a function.
+
+ **Errors are simple exceptions.**
+ : A validator can just `raise Invalid(msg)` and expect the user to get
+ useful messages.
+
+ **Schemas are basic Python data structures.**
+ : Should your data be a dictionary of integer keys to strings?
+ `{int: str}` does what you expect. List of integers, floats or
+ strings? `[int, float, str]`.
+
+ **Designed from the ground up for validating more than just forms.**
+ : Nested data structures are treated in the same way as any other
+ type. Need a list of dictionaries? `[{}]`
+
+ **Consistency.**
+ : Types in the schema are checked as types. Values are compared as
+ values. Callables are called to validate. Simple.
+
+ ## Other libraries and inspirations
+
+ Voluptuous is heavily inspired by
+ [Validino](http://code.google.com/p/validino/), and to a lesser extent,
+ [jsonvalidator](http://code.google.com/p/jsonvalidator/) and
+ [json\_schema](http://blog.sendapatch.se/category/json_schema.html).
+
+ [pytest-voluptuous](https://github.com/F-Secure/pytest-voluptuous) is a
+ [pytest](https://github.com/pytest-dev/pytest) plugin that helps in
+ using voluptuous validators in `assert`s.
+
+ I greatly prefer the light-weight style promoted by these libraries to
+ the complexity of libraries like FormEncode.
+
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Description-Content-Type: text/markdown
diff --git a/third_party/python/voluptuous/README.md b/third_party/python/voluptuous/README.md
new file mode 100644
index 0000000000..46e2288f4b
--- /dev/null
+++ b/third_party/python/voluptuous/README.md
@@ -0,0 +1,723 @@
+# Voluptuous is a Python data validation library
+
+[![Build Status](https://travis-ci.org/alecthomas/voluptuous.png)](https://travis-ci.org/alecthomas/voluptuous)
+[![Coverage Status](https://coveralls.io/repos/github/alecthomas/voluptuous/badge.svg?branch=master)](https://coveralls.io/github/alecthomas/voluptuous?branch=master) [![Gitter chat](https://badges.gitter.im/alecthomas.png)](https://gitter.im/alecthomas/Lobby)
+
+Voluptuous, *despite* the name, is a Python data validation library. It
+is primarily intended for validating data coming into Python as JSON,
+YAML, etc.
+
+It has three goals:
+
+1. Simplicity.
+2. Support for complex data structures.
+3. Provide useful error messages.
+
+## Contact
+
+Voluptuous now has a mailing list! Send a mail to
+[<voluptuous@librelist.com>](mailto:voluptuous@librelist.com) to subscribe. Instructions
+will follow.
+
+You can also contact me directly via [email](mailto:alec@swapoff.org) or
+[Twitter](https://twitter.com/alecthomas).
+
+To file a bug, create a [new issue](https://github.com/alecthomas/voluptuous/issues/new) on GitHub with a short example of how to replicate the issue.
+
+## Documentation
+
+The documentation is provided [here](http://alecthomas.github.io/voluptuous/).
+
+## Changelog
+
+See [CHANGELOG.md](https://github.com/alecthomas/voluptuous/blob/master/CHANGELOG.md).
+
+## Show me an example
+
+Twitter's [user search API](https://dev.twitter.com/rest/reference/get/users/search) accepts
+query URLs like:
+
+```
+$ curl 'https://api.twitter.com/1.1/users/search.json?q=python&per_page=20&page=1'
+```
+
+To validate this we might use a schema like:
+
+```pycon
+>>> from voluptuous import Schema
+>>> schema = Schema({
+... 'q': str,
+... 'per_page': int,
+... 'page': int,
+... })
+
+```
+
+This schema very succinctly and roughly describes the data required by
+the API, and will work fine. But it has a few problems. Firstly, it
+doesn't fully express the constraints of the API. According to the API,
+`per_page` should be restricted to at most 20, defaulting to 5, for
+example. To describe the semantics of the API more accurately, our
+schema will need to be more thoroughly defined:
+
+```pycon
+>>> from voluptuous import Required, All, Length, Range
+>>> schema = Schema({
+... Required('q'): All(str, Length(min=1)),
+... Required('per_page', default=5): All(int, Range(min=1, max=20)),
+... 'page': All(int, Range(min=0)),
+... })
+
+```
+
+This schema fully enforces the interface defined in Twitter's
+documentation, and goes a little further for completeness.
+
+"q" is required:
+
+```pycon
+>>> from voluptuous import MultipleInvalid, Invalid
+>>> try:
+... schema({})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "required key not provided @ data['q']"
+True
+
+```
+
+...must be a string:
+
+```pycon
+>>> try:
+... schema({'q': 123})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "expected str for dictionary value @ data['q']"
+True
+
+```
+
+...and must be at least one character in length:
+
+```pycon
+>>> try:
+... schema({'q': ''})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "length of value must be at least 1 for dictionary value @ data['q']"
+True
+>>> schema({'q': '#topic'}) == {'q': '#topic', 'per_page': 5}
+True
+
+```
+
+"per\_page" is a positive integer no greater than 20:
+
+```pycon
+>>> try:
+... schema({'q': '#topic', 'per_page': 900})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "value must be at most 20 for dictionary value @ data['per_page']"
+True
+>>> try:
+... schema({'q': '#topic', 'per_page': -10})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "value must be at least 1 for dictionary value @ data['per_page']"
+True
+
+```
+
+"page" is an integer \>= 0:
+
+```pycon
+>>> try:
+... schema({'q': '#topic', 'per_page': 'one'})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc)
+"expected int for dictionary value @ data['per_page']"
+>>> schema({'q': '#topic', 'page': 1}) == {'q': '#topic', 'page': 1, 'per_page': 5}
+True
+
+```
+
+## Defining schemas
+
+Schemas are nested data structures consisting of dictionaries, lists,
+scalars and *validators*. Each node in the input schema is pattern
+matched against corresponding nodes in the input data.
+
+### Literals
+
+Literals in the schema are matched using normal equality checks:
+
+```pycon
+>>> schema = Schema(1)
+>>> schema(1)
+1
+>>> schema = Schema('a string')
+>>> schema('a string')
+'a string'
+
+```
+
+### Types
+
+Types in the schema are matched by checking if the corresponding value
+is an instance of the type:
+
+```pycon
+>>> schema = Schema(int)
+>>> schema(1)
+1
+>>> try:
+... schema('one')
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "expected int"
+True
+
+```
+
+### URL's
+
+URL's in the schema are matched by using `urlparse` library.
+
+```pycon
+>>> from voluptuous import Url
+>>> schema = Schema(Url())
+>>> schema('http://w3.org')
+'http://w3.org'
+>>> try:
+... schema('one')
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "expected a URL"
+True
+
+```
+
+### Lists
+
+Lists in the schema are treated as a set of valid values. Each element
+in the schema list is compared to each value in the input data:
+
+```pycon
+>>> schema = Schema([1, 'a', 'string'])
+>>> schema([1])
+[1]
+>>> schema([1, 1, 1])
+[1, 1, 1]
+>>> schema(['a', 1, 'string', 1, 'string'])
+['a', 1, 'string', 1, 'string']
+
+```
+
+However, an empty list (`[]`) is treated as is. If you want to specify a list that can
+contain anything, specify it as `list`:
+
+```pycon
+>>> schema = Schema([])
+>>> try:
+... schema([1])
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "not a valid value @ data[1]"
+True
+>>> schema([])
+[]
+>>> schema = Schema(list)
+>>> schema([])
+[]
+>>> schema([1, 2])
+[1, 2]
+
+```
+
+### Sets and frozensets
+
+Sets and frozensets are treated as a set of valid values. Each element
+in the schema set is compared to each value in the input data:
+
+```pycon
+>>> schema = Schema({42})
+>>> schema({42}) == {42}
+True
+>>> try:
+... schema({43})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "invalid value in set"
+True
+>>> schema = Schema({int})
+>>> schema({1, 2, 3}) == {1, 2, 3}
+True
+>>> schema = Schema({int, str})
+>>> schema({1, 2, 'abc'}) == {1, 2, 'abc'}
+True
+>>> schema = Schema(frozenset([int]))
+>>> try:
+... schema({3})
+... raise AssertionError('Invalid not raised')
+... except Invalid as e:
+... exc = e
+>>> str(exc) == 'expected a frozenset'
+True
+
+```
+
+However, an empty set (`set()`) is treated as is. If you want to specify a set
+that can contain anything, specify it as `set`:
+
+```pycon
+>>> schema = Schema(set())
+>>> try:
+... schema({1})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "invalid value in set"
+True
+>>> schema(set()) == set()
+True
+>>> schema = Schema(set)
+>>> schema({1, 2}) == {1, 2}
+True
+
+```
+
+### Validation functions
+
+Validators are simple callables that raise an `Invalid` exception when
+they encounter invalid data. The criteria for determining validity is
+entirely up to the implementation; it may check that a value is a valid
+username with `pwd.getpwnam()`, it may check that a value is of a
+specific type, and so on.
+
+The simplest kind of validator is a Python function that raises
+ValueError when its argument is invalid. Conveniently, many builtin
+Python functions have this property. Here's an example of a date
+validator:
+
+```pycon
+>>> from datetime import datetime
+>>> def Date(fmt='%Y-%m-%d'):
+... return lambda v: datetime.strptime(v, fmt)
+
+```
+
+```pycon
+>>> schema = Schema(Date())
+>>> schema('2013-03-03')
+datetime.datetime(2013, 3, 3, 0, 0)
+>>> try:
+... schema('2013-03')
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "not a valid value"
+True
+
+```
+
+In addition to simply determining if a value is valid, validators may
+mutate the value into a valid form. An example of this is the
+`Coerce(type)` function, which returns a function that coerces its
+argument to the given type:
+
+```python
+def Coerce(type, msg=None):
+ """Coerce a value to a type.
+
+ If the type constructor throws a ValueError, the value will be marked as
+ Invalid.
+ """
+ def f(v):
+ try:
+ return type(v)
+ except ValueError:
+ raise Invalid(msg or ('expected %s' % type.__name__))
+ return f
+
+```
+
+This example also shows a common idiom where an optional human-readable
+message can be provided. This can vastly improve the usefulness of the
+resulting error messages.
+
+### Dictionaries
+
+Each key-value pair in a schema dictionary is validated against each
+key-value pair in the corresponding data dictionary:
+
+```pycon
+>>> schema = Schema({1: 'one', 2: 'two'})
+>>> schema({1: 'one'})
+{1: 'one'}
+
+```
+
+#### Extra dictionary keys
+
+By default any additional keys in the data, not in the schema will
+trigger exceptions:
+
+```pycon
+>>> schema = Schema({2: 3})
+>>> try:
+... schema({1: 2, 2: 3})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "extra keys not allowed @ data[1]"
+True
+
+```
+
+This behaviour can be altered on a per-schema basis. To allow
+additional keys use
+`Schema(..., extra=ALLOW_EXTRA)`:
+
+```pycon
+>>> from voluptuous import ALLOW_EXTRA
+>>> schema = Schema({2: 3}, extra=ALLOW_EXTRA)
+>>> schema({1: 2, 2: 3})
+{1: 2, 2: 3}
+
+```
+
+To remove additional keys use
+`Schema(..., extra=REMOVE_EXTRA)`:
+
+```pycon
+>>> from voluptuous import REMOVE_EXTRA
+>>> schema = Schema({2: 3}, extra=REMOVE_EXTRA)
+>>> schema({1: 2, 2: 3})
+{2: 3}
+
+```
+
+It can also be overridden per-dictionary by using the catch-all marker
+token `extra` as a key:
+
+```pycon
+>>> from voluptuous import Extra
+>>> schema = Schema({1: {Extra: object}})
+>>> schema({1: {'foo': 'bar'}})
+{1: {'foo': 'bar'}}
+
+```
+
+#### Required dictionary keys
+
+By default, keys in the schema are not required to be in the data:
+
+```pycon
+>>> schema = Schema({1: 2, 3: 4})
+>>> schema({3: 4})
+{3: 4}
+
+```
+
+Similarly to how extra\_ keys work, this behaviour can be overridden
+per-schema:
+
+```pycon
+>>> schema = Schema({1: 2, 3: 4}, required=True)
+>>> try:
+... schema({3: 4})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "required key not provided @ data[1]"
+True
+
+```
+
+And per-key, with the marker token `Required(key)`:
+
+```pycon
+>>> schema = Schema({Required(1): 2, 3: 4})
+>>> try:
+... schema({3: 4})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "required key not provided @ data[1]"
+True
+>>> schema({1: 2})
+{1: 2}
+
+```
+
+#### Optional dictionary keys
+
+If a schema has `required=True`, keys may be individually marked as
+optional using the marker token `Optional(key)`:
+
+```pycon
+>>> from voluptuous import Optional
+>>> schema = Schema({1: 2, Optional(3): 4}, required=True)
+>>> try:
+... schema({})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "required key not provided @ data[1]"
+True
+>>> schema({1: 2})
+{1: 2}
+>>> try:
+... schema({1: 2, 4: 5})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "extra keys not allowed @ data[4]"
+True
+
+```
+
+```pycon
+>>> schema({1: 2, 3: 4})
+{1: 2, 3: 4}
+
+```
+
+### Recursive / nested schema
+
+You can use `voluptuous.Self` to define a nested schema:
+
+```pycon
+>>> from voluptuous import Schema, Self
+>>> recursive = Schema({"more": Self, "value": int})
+>>> recursive({"more": {"value": 42}, "value": 41}) == {'more': {'value': 42}, 'value': 41}
+True
+
+```
+
+### Extending an existing Schema
+
+Often it comes handy to have a base `Schema` that is extended with more
+requirements. In that case you can use `Schema.extend` to create a new
+`Schema`:
+
+```pycon
+>>> from voluptuous import Schema
+>>> person = Schema({'name': str})
+>>> person_with_age = person.extend({'age': int})
+>>> sorted(list(person_with_age.schema.keys()))
+['age', 'name']
+
+```
+
+The original `Schema` remains unchanged.
+
+### Objects
+
+Each key-value pair in a schema dictionary is validated against each
+attribute-value pair in the corresponding object:
+
+```pycon
+>>> from voluptuous import Object
+>>> class Structure(object):
+... def __init__(self, q=None):
+... self.q = q
+... def __repr__(self):
+... return '<Structure(q={0.q!r})>'.format(self)
+...
+>>> schema = Schema(Object({'q': 'one'}, cls=Structure))
+>>> schema(Structure(q='one'))
+<Structure(q='one')>
+
+```
+
+### Allow None values
+
+To allow value to be None as well, use Any:
+
+```pycon
+>>> from voluptuous import Any
+
+>>> schema = Schema(Any(None, int))
+>>> schema(None)
+>>> schema(5)
+5
+
+```
+
+## Error reporting
+
+Validators must throw an `Invalid` exception if invalid data is passed
+to them. All other exceptions are treated as errors in the validator and
+will not be caught.
+
+Each `Invalid` exception has an associated `path` attribute representing
+the path in the data structure to our currently validating value, as well
+as an `error_message` attribute that contains the message of the original
+exception. This is especially useful when you want to catch `Invalid`
+exceptions and give some feedback to the user, for instance in the context of
+an HTTP API.
+
+
+```pycon
+>>> def validate_email(email):
+... """Validate email."""
+... if not "@" in email:
+... raise Invalid("This email is invalid.")
+... return email
+>>> schema = Schema({"email": validate_email})
+>>> exc = None
+>>> try:
+... schema({"email": "whatever"})
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc)
+"This email is invalid. for dictionary value @ data['email']"
+>>> exc.path
+['email']
+>>> exc.msg
+'This email is invalid.'
+>>> exc.error_message
+'This email is invalid.'
+
+```
+
+The `path` attribute is used during error reporting, but also during matching
+to determine whether an error should be reported to the user or if the next
+match should be attempted. This is determined by comparing the depth of the
+path where the check is, to the depth of the path where the error occurred. If
+the error is more than one level deeper, it is reported.
+
+The upshot of this is that *matching is depth-first and fail-fast*.
+
+To illustrate this, here is an example schema:
+
+```pycon
+>>> schema = Schema([[2, 3], 6])
+
+```
+
+Each value in the top-level list is matched depth-first in-order. Given
+input data of `[[6]]`, the inner list will match the first element of
+the schema, but the literal `6` will not match any of the elements of
+that list. This error will be reported back to the user immediately. No
+backtracking is attempted:
+
+```pycon
+>>> try:
+... schema([[6]])
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "not a valid value @ data[0][0]"
+True
+
+```
+
+If we pass the data `[6]`, the `6` is not a list type and so will not
+recurse into the first element of the schema. Matching will continue on
+to the second element in the schema, and succeed:
+
+```pycon
+>>> schema([6])
+[6]
+
+```
+
+## Multi-field validation
+
+Validation rules that involve multiple fields can be implemented as
+custom validators. It's recommended to use `All()` to do a two-pass
+validation - the first pass checking the basic structure of the data,
+and only after that, the second pass applying your cross-field
+validator:
+
+```python
+def passwords_must_match(passwords):
+ if passwords['password'] != passwords['password_again']:
+ raise Invalid('passwords must match')
+ return passwords
+
+s=Schema(All(
+ # First "pass" for field types
+ {'password':str, 'password_again':str},
+ # Follow up the first "pass" with your multi-field rules
+ passwords_must_match
+))
+
+# valid
+s({'password':'123', 'password_again':'123'})
+
+# raises MultipleInvalid: passwords must match
+s({'password':'123', 'password_again':'and now for something completely different'})
+
+```
+
+With this structure, your multi-field validator will run with
+pre-validated data from the first "pass" and so will not have to do
+its own type checking on its inputs.
+
+The flipside is that if the first "pass" of validation fails, your
+cross-field validator will not run:
+
+```
+# raises Invalid because password_again is not a string
+# passwords_must_match() will not run because first-pass validation already failed
+s({'password':'123', 'password_again': 1337})
+```
+
+## Running tests.
+
+Voluptuous is using nosetests:
+
+ $ nosetests
+
+
+## Why use Voluptuous over another validation library?
+
+**Validators are simple callables**
+: No need to subclass anything, just use a function.
+
+**Errors are simple exceptions.**
+: A validator can just `raise Invalid(msg)` and expect the user to get
+useful messages.
+
+**Schemas are basic Python data structures.**
+: Should your data be a dictionary of integer keys to strings?
+`{int: str}` does what you expect. List of integers, floats or
+strings? `[int, float, str]`.
+
+**Designed from the ground up for validating more than just forms.**
+: Nested data structures are treated in the same way as any other
+type. Need a list of dictionaries? `[{}]`
+
+**Consistency.**
+: Types in the schema are checked as types. Values are compared as
+values. Callables are called to validate. Simple.
+
+## Other libraries and inspirations
+
+Voluptuous is heavily inspired by
+[Validino](http://code.google.com/p/validino/), and to a lesser extent,
+[jsonvalidator](http://code.google.com/p/jsonvalidator/) and
+[json\_schema](http://blog.sendapatch.se/category/json_schema.html).
+
+[pytest-voluptuous](https://github.com/F-Secure/pytest-voluptuous) is a
+[pytest](https://github.com/pytest-dev/pytest) plugin that helps in
+using voluptuous validators in `assert`s.
+
+I greatly prefer the light-weight style promoted by these libraries to
+the complexity of libraries like FormEncode.
diff --git a/third_party/python/voluptuous/setup.cfg b/third_party/python/voluptuous/setup.cfg
new file mode 100644
index 0000000000..1ccde4cee7
--- /dev/null
+++ b/third_party/python/voluptuous/setup.cfg
@@ -0,0 +1,9 @@
+[nosetests]
+doctest-extension = md
+with-doctest = 1
+where = .
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/voluptuous/setup.py b/third_party/python/voluptuous/setup.py
new file mode 100644
index 0000000000..6408f8ba96
--- /dev/null
+++ b/third_party/python/voluptuous/setup.py
@@ -0,0 +1,40 @@
+from setuptools import setup
+
+import sys
+import io
+import os
+import atexit
+sys.path.insert(0, '.')
+version = __import__('voluptuous').__version__
+
+
+with io.open('README.md', encoding='utf-8') as f:
+ long_description = f.read()
+ description = long_description.splitlines()[0].strip()
+
+
+setup(
+ name='voluptuous',
+ url='https://github.com/alecthomas/voluptuous',
+ download_url='https://pypi.python.org/pypi/voluptuous',
+ version=version,
+ description=description,
+ long_description=long_description,
+ long_description_content_type='text/markdown',
+ license='BSD',
+ platforms=['any'],
+ packages=['voluptuous'],
+ author='Alec Thomas',
+ author_email='alec@swapoff.org',
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: BSD License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ ]
+)
diff --git a/third_party/python/voluptuous/voluptuous/__init__.py b/third_party/python/voluptuous/voluptuous/__init__.py
new file mode 100644
index 0000000000..10236d5a66
--- /dev/null
+++ b/third_party/python/voluptuous/voluptuous/__init__.py
@@ -0,0 +1,9 @@
+# flake8: noqa
+
+from voluptuous.schema_builder import *
+from voluptuous.validators import *
+from voluptuous.util import *
+from voluptuous.error import *
+
+__version__ = '0.11.5'
+__author__ = 'alecthomas'
diff --git a/third_party/python/voluptuous/voluptuous/error.py b/third_party/python/voluptuous/voluptuous/error.py
new file mode 100644
index 0000000000..86c4e0a359
--- /dev/null
+++ b/third_party/python/voluptuous/voluptuous/error.py
@@ -0,0 +1,199 @@
+
+class Error(Exception):
+ """Base validation exception."""
+
+
+class SchemaError(Error):
+ """An error was encountered in the schema."""
+
+
+class Invalid(Error):
+ """The data was invalid.
+
+ :attr msg: The error message.
+ :attr path: The path to the error, as a list of keys in the source data.
+ :attr error_message: The actual error message that was raised, as a
+ string.
+
+ """
+
+ def __init__(self, message, path=None, error_message=None, error_type=None):
+ Error.__init__(self, message)
+ self.path = path or []
+ self.error_message = error_message or message
+ self.error_type = error_type
+
+ @property
+ def msg(self):
+ return self.args[0]
+
+ def __str__(self):
+ path = ' @ data[%s]' % ']['.join(map(repr, self.path)) \
+ if self.path else ''
+ output = Exception.__str__(self)
+ if self.error_type:
+ output += ' for ' + self.error_type
+ return output + path
+
+ def prepend(self, path):
+ self.path = path + self.path
+
+
+class MultipleInvalid(Invalid):
+ def __init__(self, errors=None):
+ self.errors = errors[:] if errors else []
+
+ def __repr__(self):
+ return 'MultipleInvalid(%r)' % self.errors
+
+ @property
+ def msg(self):
+ return self.errors[0].msg
+
+ @property
+ def path(self):
+ return self.errors[0].path
+
+ @property
+ def error_message(self):
+ return self.errors[0].error_message
+
+ def add(self, error):
+ self.errors.append(error)
+
+ def __str__(self):
+ return str(self.errors[0])
+
+ def prepend(self, path):
+ for error in self.errors:
+ error.prepend(path)
+
+
+class RequiredFieldInvalid(Invalid):
+ """Required field was missing."""
+
+
+class ObjectInvalid(Invalid):
+ """The value we found was not an object."""
+
+
+class DictInvalid(Invalid):
+ """The value found was not a dict."""
+
+
+class ExclusiveInvalid(Invalid):
+ """More than one value found in exclusion group."""
+
+
+class InclusiveInvalid(Invalid):
+ """Not all values found in inclusion group."""
+
+
+class SequenceTypeInvalid(Invalid):
+ """The type found is not a sequence type."""
+
+
+class TypeInvalid(Invalid):
+ """The value was not of required type."""
+
+
+class ValueInvalid(Invalid):
+ """The value was found invalid by evaluation function."""
+
+
+class ContainsInvalid(Invalid):
+ """List does not contain item"""
+
+
+class ScalarInvalid(Invalid):
+ """Scalars did not match."""
+
+
+class CoerceInvalid(Invalid):
+ """Impossible to coerce value to type."""
+
+
+class AnyInvalid(Invalid):
+ """The value did not pass any validator."""
+
+
+class AllInvalid(Invalid):
+ """The value did not pass all validators."""
+
+
+class MatchInvalid(Invalid):
+ """The value does not match the given regular expression."""
+
+
+class RangeInvalid(Invalid):
+ """The value is not in given range."""
+
+
+class TrueInvalid(Invalid):
+ """The value is not True."""
+
+
+class FalseInvalid(Invalid):
+ """The value is not False."""
+
+
+class BooleanInvalid(Invalid):
+ """The value is not a boolean."""
+
+
+class UrlInvalid(Invalid):
+ """The value is not a url."""
+
+
+class EmailInvalid(Invalid):
+ """The value is not a email."""
+
+
+class FileInvalid(Invalid):
+ """The value is not a file."""
+
+
+class DirInvalid(Invalid):
+ """The value is not a directory."""
+
+
+class PathInvalid(Invalid):
+ """The value is not a path."""
+
+
+class LiteralInvalid(Invalid):
+ """The literal values do not match."""
+
+
+class LengthInvalid(Invalid):
+ pass
+
+
+class DatetimeInvalid(Invalid):
+ """The value is not a formatted datetime string."""
+
+
+class DateInvalid(Invalid):
+ """The value is not a formatted date string."""
+
+
+class InInvalid(Invalid):
+ pass
+
+
+class NotInInvalid(Invalid):
+ pass
+
+
+class ExactSequenceInvalid(Invalid):
+ pass
+
+
+class NotEnoughValid(Invalid):
+ """The value did not pass enough validations."""
+ pass
+
+
+class TooManyValid(Invalid):
+ """The value passed more than expected validations."""
+ pass
diff --git a/third_party/python/voluptuous/voluptuous/humanize.py b/third_party/python/voluptuous/voluptuous/humanize.py
new file mode 100644
index 0000000000..91ab2015fb
--- /dev/null
+++ b/third_party/python/voluptuous/voluptuous/humanize.py
@@ -0,0 +1,40 @@
+from voluptuous import Invalid, MultipleInvalid
+from voluptuous.error import Error
+
+
+MAX_VALIDATION_ERROR_ITEM_LENGTH = 500
+
+
+def _nested_getitem(data, path):
+ for item_index in path:
+ try:
+ data = data[item_index]
+ except (KeyError, IndexError, TypeError):
+ # The index is not present in the dictionary, list or other
+ # indexable or data is not subscriptable
+ return None
+ return data
+
+
+def humanize_error(data, validation_error, max_sub_error_length=MAX_VALIDATION_ERROR_ITEM_LENGTH):
+ """ Provide a more helpful + complete validation error message than that provided automatically
+ Invalid and MultipleInvalid do not include the offending value in error messages,
+ and MultipleInvalid.__str__ only provides the first error.
+ """
+ if isinstance(validation_error, MultipleInvalid):
+ return '\n'.join(sorted(
+ humanize_error(data, sub_error, max_sub_error_length)
+ for sub_error in validation_error.errors
+ ))
+ else:
+ offending_item_summary = repr(_nested_getitem(data, validation_error.path))
+ if len(offending_item_summary) > max_sub_error_length:
+ offending_item_summary = offending_item_summary[:max_sub_error_length - 3] + '...'
+ return '%s. Got %s' % (validation_error, offending_item_summary)
+
+
+def validate_with_humanized_errors(data, schema, max_sub_error_length=MAX_VALIDATION_ERROR_ITEM_LENGTH):
+ try:
+ return schema(data)
+ except (Invalid, MultipleInvalid) as e:
+ raise Error(humanize_error(data, e, max_sub_error_length))
diff --git a/third_party/python/voluptuous/voluptuous/schema_builder.py b/third_party/python/voluptuous/voluptuous/schema_builder.py
new file mode 100644
index 0000000000..8d7a81a3e3
--- /dev/null
+++ b/third_party/python/voluptuous/voluptuous/schema_builder.py
@@ -0,0 +1,1292 @@
+import collections
+import inspect
+import re
+from functools import wraps
+import sys
+from contextlib import contextmanager
+
+import itertools
+from voluptuous import error as er
+
+if sys.version_info >= (3,):
+ long = int
+ unicode = str
+ basestring = str
+ ifilter = filter
+
+ def iteritems(d):
+ return d.items()
+else:
+ from itertools import ifilter
+
+ def iteritems(d):
+ return d.iteritems()
+
+"""Schema validation for Python data structures.
+
+Given eg. a nested data structure like this:
+
+ {
+ 'exclude': ['Users', 'Uptime'],
+ 'include': [],
+ 'set': {
+ 'snmp_community': 'public',
+ 'snmp_timeout': 15,
+ 'snmp_version': '2c',
+ },
+ 'targets': {
+ 'localhost': {
+ 'exclude': ['Uptime'],
+ 'features': {
+ 'Uptime': {
+ 'retries': 3,
+ },
+ 'Users': {
+ 'snmp_community': 'monkey',
+ 'snmp_port': 15,
+ },
+ },
+ 'include': ['Users'],
+ 'set': {
+ 'snmp_community': 'monkeys',
+ },
+ },
+ },
+ }
+
+A schema like this:
+
+ >>> settings = {
+ ... 'snmp_community': str,
+ ... 'retries': int,
+ ... 'snmp_version': All(Coerce(str), Any('3', '2c', '1')),
+ ... }
+ >>> features = ['Ping', 'Uptime', 'Http']
+ >>> schema = Schema({
+ ... 'exclude': features,
+ ... 'include': features,
+ ... 'set': settings,
+ ... 'targets': {
+ ... 'exclude': features,
+ ... 'include': features,
+ ... 'features': {
+ ... str: settings,
+ ... },
+ ... },
+ ... })
+
+Validate like so:
+
+ >>> schema({
+ ... 'set': {
+ ... 'snmp_community': 'public',
+ ... 'snmp_version': '2c',
+ ... },
+ ... 'targets': {
+ ... 'exclude': ['Ping'],
+ ... 'features': {
+ ... 'Uptime': {'retries': 3},
+ ... 'Users': {'snmp_community': 'monkey'},
+ ... },
+ ... },
+ ... }) == {
+ ... 'set': {'snmp_version': '2c', 'snmp_community': 'public'},
+ ... 'targets': {
+ ... 'exclude': ['Ping'],
+ ... 'features': {'Uptime': {'retries': 3},
+ ... 'Users': {'snmp_community': 'monkey'}}}}
+ True
+"""
+
+# options for extra keys
+PREVENT_EXTRA = 0 # any extra key not in schema will raise an error
+ALLOW_EXTRA = 1 # extra keys not in schema will be included in output
+REMOVE_EXTRA = 2 # extra keys not in schema will be excluded from output
+
+
+def _isnamedtuple(obj):
+ return isinstance(obj, tuple) and hasattr(obj, '_fields')
+
+
+primitive_types = (str, unicode, bool, int, float)
+
+
+class Undefined(object):
+ def __nonzero__(self):
+ return False
+
+ def __repr__(self):
+ return '...'
+
+
+UNDEFINED = Undefined()
+
+
+def Self():
+ raise er.SchemaError('"Self" should never be called')
+
+
+def default_factory(value):
+ if value is UNDEFINED or callable(value):
+ return value
+ return lambda: value
+
+
+@contextmanager
+def raises(exc, msg=None, regex=None):
+ try:
+ yield
+ except exc as e:
+ if msg is not None:
+ assert str(e) == msg, '%r != %r' % (str(e), msg)
+ if regex is not None:
+ assert re.search(regex, str(e)), '%r does not match %r' % (str(e), regex)
+
+
+def Extra(_):
+ """Allow keys in the data that are not present in the schema."""
+ raise er.SchemaError('"Extra" should never be called')
+
+
+# As extra() is never called there's no way to catch references to the
+# deprecated object, so we just leave an alias here instead.
+extra = Extra
+
+
+class Schema(object):
+ """A validation schema.
+
+ The schema is a Python tree-like structure where nodes are pattern
+ matched against corresponding trees of values.
+
+ Nodes can be values, in which case a direct comparison is used, types,
+ in which case an isinstance() check is performed, or callables, which will
+ validate and optionally convert the value.
+
+ We can equate schemas also.
+
+ For Example:
+
+ >>> v = Schema({Required('a'): unicode})
+ >>> v1 = Schema({Required('a'): unicode})
+ >>> v2 = Schema({Required('b'): unicode})
+ >>> assert v == v1
+ >>> assert v != v2
+
+ """
+
+ _extra_to_name = {
+ REMOVE_EXTRA: 'REMOVE_EXTRA',
+ ALLOW_EXTRA: 'ALLOW_EXTRA',
+ PREVENT_EXTRA: 'PREVENT_EXTRA',
+ }
+
+ def __init__(self, schema, required=False, extra=PREVENT_EXTRA):
+ """Create a new Schema.
+
+ :param schema: Validation schema. See :module:`voluptuous` for details.
+ :param required: Keys defined in the schema must be in the data.
+ :param extra: Specify how extra keys in the data are treated:
+ - :const:`~voluptuous.PREVENT_EXTRA`: to disallow any undefined
+ extra keys (raise ``Invalid``).
+ - :const:`~voluptuous.ALLOW_EXTRA`: to include undefined extra
+ keys in the output.
+ - :const:`~voluptuous.REMOVE_EXTRA`: to exclude undefined extra keys
+ from the output.
+ - Any value other than the above defaults to
+ :const:`~voluptuous.PREVENT_EXTRA`
+ """
+ self.schema = schema
+ self.required = required
+ self.extra = int(extra) # ensure the value is an integer
+ self._compiled = self._compile(schema)
+
+ @classmethod
+ def infer(cls, data, **kwargs):
+ """Create a Schema from concrete data (e.g. an API response).
+
+ For example, this will take a dict like:
+
+ {
+ 'foo': 1,
+ 'bar': {
+ 'a': True,
+ 'b': False
+ },
+ 'baz': ['purple', 'monkey', 'dishwasher']
+ }
+
+ And return a Schema:
+
+ {
+ 'foo': int,
+ 'bar': {
+ 'a': bool,
+ 'b': bool
+ },
+ 'baz': [str]
+ }
+
+ Note: only very basic inference is supported.
+ """
+ def value_to_schema_type(value):
+ if isinstance(value, dict):
+ if len(value) == 0:
+ return dict
+ return {k: value_to_schema_type(v)
+ for k, v in iteritems(value)}
+ if isinstance(value, list):
+ if len(value) == 0:
+ return list
+ else:
+ return [value_to_schema_type(v)
+ for v in value]
+ return type(value)
+
+ return cls(value_to_schema_type(data), **kwargs)
+
+ def __eq__(self, other):
+ if not isinstance(other, Schema):
+ return False
+ return other.schema == self.schema
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __str__(self):
+ return str(self.schema)
+
+ def __repr__(self):
+ return "<Schema(%s, extra=%s, required=%s) object at 0x%x>" % (
+ self.schema, self._extra_to_name.get(self.extra, '??'),
+ self.required, id(self))
+
+ def __call__(self, data):
+ """Validate data against this schema."""
+ try:
+ return self._compiled([], data)
+ except er.MultipleInvalid:
+ raise
+ except er.Invalid as e:
+ raise er.MultipleInvalid([e])
+ # return self.validate([], self.schema, data)
+
+ def _compile(self, schema):
+ if schema is Extra:
+ return lambda _, v: v
+ if schema is Self:
+ return lambda p, v: self._compiled(p, v)
+ elif hasattr(schema, "__voluptuous_compile__"):
+ return schema.__voluptuous_compile__(self)
+ if isinstance(schema, Object):
+ return self._compile_object(schema)
+ if isinstance(schema, collections.Mapping):
+ return self._compile_dict(schema)
+ elif isinstance(schema, list):
+ return self._compile_list(schema)
+ elif isinstance(schema, tuple):
+ return self._compile_tuple(schema)
+ elif isinstance(schema, (frozenset, set)):
+ return self._compile_set(schema)
+ type_ = type(schema)
+ if inspect.isclass(schema):
+ type_ = schema
+ if type_ in (bool, bytes, int, long, str, unicode, float, complex, object,
+ list, dict, type(None)) or callable(schema):
+ return _compile_scalar(schema)
+ raise er.SchemaError('unsupported schema data type %r' %
+ type(schema).__name__)
+
+ def _compile_mapping(self, schema, invalid_msg=None):
+ """Create validator for given mapping."""
+ invalid_msg = invalid_msg or 'mapping value'
+
+ # Keys that may be required
+ all_required_keys = set(key for key in schema
+ if key is not Extra and
+ ((self.required and not isinstance(key, (Optional, Remove))) or
+ isinstance(key, Required)))
+
+ # Keys that may have defaults
+ all_default_keys = set(key for key in schema
+ if isinstance(key, Required) or
+ isinstance(key, Optional))
+
+ _compiled_schema = {}
+ for skey, svalue in iteritems(schema):
+ new_key = self._compile(skey)
+ new_value = self._compile(svalue)
+ _compiled_schema[skey] = (new_key, new_value)
+
+ candidates = list(_iterate_mapping_candidates(_compiled_schema))
+
+ # After we have the list of candidates in the correct order, we want to apply some optimization so that each
+ # key in the data being validated will be matched against the relevant schema keys only.
+ # No point in matching against different keys
+ additional_candidates = []
+ candidates_by_key = {}
+ for skey, (ckey, cvalue) in candidates:
+ if type(skey) in primitive_types:
+ candidates_by_key.setdefault(skey, []).append((skey, (ckey, cvalue)))
+ elif isinstance(skey, Marker) and type(skey.schema) in primitive_types:
+ candidates_by_key.setdefault(skey.schema, []).append((skey, (ckey, cvalue)))
+ else:
+ # These are wildcards such as 'int', 'str', 'Remove' and others which should be applied to all keys
+ additional_candidates.append((skey, (ckey, cvalue)))
+
+ def validate_mapping(path, iterable, out):
+ required_keys = all_required_keys.copy()
+
+ # Build a map of all provided key-value pairs.
+ # The type(out) is used to retain ordering in case a ordered
+ # map type is provided as input.
+ key_value_map = type(out)()
+ for key, value in iterable:
+ key_value_map[key] = value
+
+ # Insert default values for non-existing keys.
+ for key in all_default_keys:
+ if not isinstance(key.default, Undefined) and \
+ key.schema not in key_value_map:
+ # A default value has been specified for this missing
+ # key, insert it.
+ key_value_map[key.schema] = key.default()
+
+ error = None
+ errors = []
+ for key, value in key_value_map.items():
+ key_path = path + [key]
+ remove_key = False
+
+ # Optimization. Validate against the matching key first, then fallback to the rest
+ relevant_candidates = itertools.chain(candidates_by_key.get(key, []), additional_candidates)
+
+ # compare each given key/value against all compiled key/values
+ # schema key, (compiled key, compiled value)
+ for skey, (ckey, cvalue) in relevant_candidates:
+ try:
+ new_key = ckey(key_path, key)
+ except er.Invalid as e:
+ if len(e.path) > len(key_path):
+ raise
+ if not error or len(e.path) > len(error.path):
+ error = e
+ continue
+ # Backtracking is not performed once a key is selected, so if
+ # the value is invalid we immediately throw an exception.
+ exception_errors = []
+ # check if the key is marked for removal
+ is_remove = new_key is Remove
+ try:
+ cval = cvalue(key_path, value)
+ # include if it's not marked for removal
+ if not is_remove:
+ out[new_key] = cval
+ else:
+ remove_key = True
+ continue
+ except er.MultipleInvalid as e:
+ exception_errors.extend(e.errors)
+ except er.Invalid as e:
+ exception_errors.append(e)
+
+ if exception_errors:
+ if is_remove or remove_key:
+ continue
+ for err in exception_errors:
+ if len(err.path) <= len(key_path):
+ err.error_type = invalid_msg
+ errors.append(err)
+ # If there is a validation error for a required
+ # key, this means that the key was provided.
+ # Discard the required key so it does not
+ # create an additional, noisy exception.
+ required_keys.discard(skey)
+ break
+
+ # Key and value okay, mark as found in case it was
+ # a Required() field.
+ required_keys.discard(skey)
+
+ break
+ else:
+ if remove_key:
+ # remove key
+ continue
+ elif self.extra == ALLOW_EXTRA:
+ out[key] = value
+ elif self.extra != REMOVE_EXTRA:
+ errors.append(er.Invalid('extra keys not allowed', key_path))
+ # else REMOVE_EXTRA: ignore the key so it's removed from output
+
+ # for any required keys left that weren't found and don't have defaults:
+ for key in required_keys:
+ msg = key.msg if hasattr(key, 'msg') and key.msg else 'required key not provided'
+ errors.append(er.RequiredFieldInvalid(msg, path + [key]))
+ if errors:
+ raise er.MultipleInvalid(errors)
+
+ return out
+
+ return validate_mapping
+
+ def _compile_object(self, schema):
+ """Validate an object.
+
+ Has the same behavior as dictionary validator but work with object
+ attributes.
+
+ For example:
+
+ >>> class Structure(object):
+ ... def __init__(self, one=None, three=None):
+ ... self.one = one
+ ... self.three = three
+ ...
+ >>> validate = Schema(Object({'one': 'two', 'three': 'four'}, cls=Structure))
+ >>> with raises(er.MultipleInvalid, "not a valid value for object value @ data['one']"):
+ ... validate(Structure(one='three'))
+
+ """
+ base_validate = self._compile_mapping(
+ schema, invalid_msg='object value')
+
+ def validate_object(path, data):
+ if schema.cls is not UNDEFINED and not isinstance(data, schema.cls):
+ raise er.ObjectInvalid('expected a {0!r}'.format(schema.cls), path)
+ iterable = _iterate_object(data)
+ iterable = ifilter(lambda item: item[1] is not None, iterable)
+ out = base_validate(path, iterable, {})
+ return type(data)(**out)
+
+ return validate_object
+
+ def _compile_dict(self, schema):
+ """Validate a dictionary.
+
+ A dictionary schema can contain a set of values, or at most one
+ validator function/type.
+
+ A dictionary schema will only validate a dictionary:
+
+ >>> validate = Schema({})
+ >>> with raises(er.MultipleInvalid, 'expected a dictionary'):
+ ... validate([])
+
+ An invalid dictionary value:
+
+ >>> validate = Schema({'one': 'two', 'three': 'four'})
+ >>> with raises(er.MultipleInvalid, "not a valid value for dictionary value @ data['one']"):
+ ... validate({'one': 'three'})
+
+ An invalid key:
+
+ >>> with raises(er.MultipleInvalid, "extra keys not allowed @ data['two']"):
+ ... validate({'two': 'three'})
+
+
+ Validation function, in this case the "int" type:
+
+ >>> validate = Schema({'one': 'two', 'three': 'four', int: str})
+
+ Valid integer input:
+
+ >>> validate({10: 'twenty'})
+ {10: 'twenty'}
+
+ By default, a "type" in the schema (in this case "int") will be used
+ purely to validate that the corresponding value is of that type. It
+ will not Coerce the value:
+
+ >>> with raises(er.MultipleInvalid, "extra keys not allowed @ data['10']"):
+ ... validate({'10': 'twenty'})
+
+ Wrap them in the Coerce() function to achieve this:
+ >>> from voluptuous import Coerce
+ >>> validate = Schema({'one': 'two', 'three': 'four',
+ ... Coerce(int): str})
+ >>> validate({'10': 'twenty'})
+ {10: 'twenty'}
+
+ Custom message for required key
+
+ >>> validate = Schema({Required('one', 'required'): 'two'})
+ >>> with raises(er.MultipleInvalid, "required @ data['one']"):
+ ... validate({})
+
+ (This is to avoid unexpected surprises.)
+
+ Multiple errors for nested field in a dict:
+
+ >>> validate = Schema({
+ ... 'adict': {
+ ... 'strfield': str,
+ ... 'intfield': int
+ ... }
+ ... })
+ >>> try:
+ ... validate({
+ ... 'adict': {
+ ... 'strfield': 123,
+ ... 'intfield': 'one'
+ ... }
+ ... })
+ ... except er.MultipleInvalid as e:
+ ... print(sorted(str(i) for i in e.errors)) # doctest: +NORMALIZE_WHITESPACE
+ ["expected int for dictionary value @ data['adict']['intfield']",
+ "expected str for dictionary value @ data['adict']['strfield']"]
+
+ """
+ base_validate = self._compile_mapping(
+ schema, invalid_msg='dictionary value')
+
+ groups_of_exclusion = {}
+ groups_of_inclusion = {}
+ for node in schema:
+ if isinstance(node, Exclusive):
+ g = groups_of_exclusion.setdefault(node.group_of_exclusion, [])
+ g.append(node)
+ elif isinstance(node, Inclusive):
+ g = groups_of_inclusion.setdefault(node.group_of_inclusion, [])
+ g.append(node)
+
+ def validate_dict(path, data):
+ if not isinstance(data, dict):
+ raise er.DictInvalid('expected a dictionary', path)
+
+ errors = []
+ for label, group in groups_of_exclusion.items():
+ exists = False
+ for exclusive in group:
+ if exclusive.schema in data:
+ if exists:
+ msg = exclusive.msg if hasattr(exclusive, 'msg') and exclusive.msg else \
+ "two or more values in the same group of exclusion '%s'" % label
+ next_path = path + [VirtualPathComponent(label)]
+ errors.append(er.ExclusiveInvalid(msg, next_path))
+ break
+ exists = True
+
+ if errors:
+ raise er.MultipleInvalid(errors)
+
+ for label, group in groups_of_inclusion.items():
+ included = [node.schema in data for node in group]
+ if any(included) and not all(included):
+ msg = "some but not all values in the same group of inclusion '%s'" % label
+ for g in group:
+ if hasattr(g, 'msg') and g.msg:
+ msg = g.msg
+ break
+ next_path = path + [VirtualPathComponent(label)]
+ errors.append(er.InclusiveInvalid(msg, next_path))
+ break
+
+ if errors:
+ raise er.MultipleInvalid(errors)
+
+ out = data.__class__()
+ return base_validate(path, iteritems(data), out)
+
+ return validate_dict
+
+ def _compile_sequence(self, schema, seq_type):
+ """Validate a sequence type.
+
+ This is a sequence of valid values or validators tried in order.
+
+ >>> validator = Schema(['one', 'two', int])
+ >>> validator(['one'])
+ ['one']
+ >>> with raises(er.MultipleInvalid, 'expected int @ data[0]'):
+ ... validator([3.5])
+ >>> validator([1])
+ [1]
+ """
+ _compiled = [self._compile(s) for s in schema]
+ seq_type_name = seq_type.__name__
+
+ def validate_sequence(path, data):
+ if not isinstance(data, seq_type):
+ raise er.SequenceTypeInvalid('expected a %s' % seq_type_name, path)
+
+ # Empty seq schema, allow any data.
+ if not schema:
+ if data:
+ raise er.MultipleInvalid([
+ er.ValueInvalid('not a valid value', [value]) for value in data
+ ])
+ return data
+
+ out = []
+ invalid = None
+ errors = []
+ index_path = UNDEFINED
+ for i, value in enumerate(data):
+ index_path = path + [i]
+ invalid = None
+ for validate in _compiled:
+ try:
+ cval = validate(index_path, value)
+ if cval is not Remove: # do not include Remove values
+ out.append(cval)
+ break
+ except er.Invalid as e:
+ if len(e.path) > len(index_path):
+ raise
+ invalid = e
+ else:
+ errors.append(invalid)
+ if errors:
+ raise er.MultipleInvalid(errors)
+
+ if _isnamedtuple(data):
+ return type(data)(*out)
+ else:
+ return type(data)(out)
+
+ return validate_sequence
+
+ def _compile_tuple(self, schema):
+ """Validate a tuple.
+
+ A tuple is a sequence of valid values or validators tried in order.
+
+ >>> validator = Schema(('one', 'two', int))
+ >>> validator(('one',))
+ ('one',)
+ >>> with raises(er.MultipleInvalid, 'expected int @ data[0]'):
+ ... validator((3.5,))
+ >>> validator((1,))
+ (1,)
+ """
+ return self._compile_sequence(schema, tuple)
+
+ def _compile_list(self, schema):
+ """Validate a list.
+
+ A list is a sequence of valid values or validators tried in order.
+
+ >>> validator = Schema(['one', 'two', int])
+ >>> validator(['one'])
+ ['one']
+ >>> with raises(er.MultipleInvalid, 'expected int @ data[0]'):
+ ... validator([3.5])
+ >>> validator([1])
+ [1]
+ """
+ return self._compile_sequence(schema, list)
+
+ def _compile_set(self, schema):
+ """Validate a set.
+
+ A set is an unordered collection of unique elements.
+
+ >>> validator = Schema({int})
+ >>> validator(set([42])) == set([42])
+ True
+ >>> with raises(er.Invalid, 'expected a set'):
+ ... validator(42)
+ >>> with raises(er.MultipleInvalid, 'invalid value in set'):
+ ... validator(set(['a']))
+ """
+ type_ = type(schema)
+ type_name = type_.__name__
+
+ def validate_set(path, data):
+ if not isinstance(data, type_):
+ raise er.Invalid('expected a %s' % type_name, path)
+
+ _compiled = [self._compile(s) for s in schema]
+ errors = []
+ for value in data:
+ for validate in _compiled:
+ try:
+ validate(path, value)
+ break
+ except er.Invalid:
+ pass
+ else:
+ invalid = er.Invalid('invalid value in %s' % type_name, path)
+ errors.append(invalid)
+
+ if errors:
+ raise er.MultipleInvalid(errors)
+
+ return data
+
+ return validate_set
+
+ def extend(self, schema, required=None, extra=None):
+ """Create a new `Schema` by merging this and the provided `schema`.
+
+ Neither this `Schema` nor the provided `schema` are modified. The
+ resulting `Schema` inherits the `required` and `extra` parameters of
+ this, unless overridden.
+
+ Both schemas must be dictionary-based.
+
+ :param schema: dictionary to extend this `Schema` with
+ :param required: if set, overrides `required` of this `Schema`
+ :param extra: if set, overrides `extra` of this `Schema`
+ """
+
+ assert type(self.schema) == dict and type(schema) == dict, 'Both schemas must be dictionary-based'
+
+ result = self.schema.copy()
+
+ # returns the key that may have been passed as arugment to Marker constructor
+ def key_literal(key):
+ return (key.schema if isinstance(key, Marker) else key)
+
+ # build a map that takes the key literals to the needed objects
+ # literal -> Required|Optional|literal
+ result_key_map = dict((key_literal(key), key) for key in result)
+
+ # for each item in the extension schema, replace duplicates
+ # or add new keys
+ for key, value in iteritems(schema):
+
+ # if the key is already in the dictionary, we need to replace it
+ # transform key to literal before checking presence
+ if key_literal(key) in result_key_map:
+
+ result_key = result_key_map[key_literal(key)]
+ result_value = result[result_key]
+
+ # if both are dictionaries, we need to extend recursively
+ # create the new extended sub schema, then remove the old key and add the new one
+ if type(result_value) == dict and type(value) == dict:
+ new_value = Schema(result_value).extend(value).schema
+ del result[result_key]
+ result[key] = new_value
+ # one or the other or both are not sub-schemas, simple replacement is fine
+ # remove old key and add new one
+ else:
+ del result[result_key]
+ result[key] = value
+
+ # key is new and can simply be added
+ else:
+ result[key] = value
+
+ # recompile and send old object
+ result_required = (required if required is not None else self.required)
+ result_extra = (extra if extra is not None else self.extra)
+ return Schema(result, required=result_required, extra=result_extra)
+
+
+def _compile_scalar(schema):
+ """A scalar value.
+
+ The schema can either be a value or a type.
+
+ >>> _compile_scalar(int)([], 1)
+ 1
+ >>> with raises(er.Invalid, 'expected float'):
+ ... _compile_scalar(float)([], '1')
+
+ Callables have
+ >>> _compile_scalar(lambda v: float(v))([], '1')
+ 1.0
+
+ As a convenience, ValueError's are trapped:
+
+ >>> with raises(er.Invalid, 'not a valid value'):
+ ... _compile_scalar(lambda v: float(v))([], 'a')
+ """
+ if inspect.isclass(schema):
+ def validate_instance(path, data):
+ if isinstance(data, schema):
+ return data
+ else:
+ msg = 'expected %s' % schema.__name__
+ raise er.TypeInvalid(msg, path)
+
+ return validate_instance
+
+ if callable(schema):
+ def validate_callable(path, data):
+ try:
+ return schema(data)
+ except ValueError as e:
+ raise er.ValueInvalid('not a valid value', path)
+ except er.Invalid as e:
+ e.prepend(path)
+ raise
+
+ return validate_callable
+
+ def validate_value(path, data):
+ if data != schema:
+ raise er.ScalarInvalid('not a valid value', path)
+ return data
+
+ return validate_value
+
+
+def _compile_itemsort():
+ '''return sort function of mappings'''
+
+ def is_extra(key_):
+ return key_ is Extra
+
+ def is_remove(key_):
+ return isinstance(key_, Remove)
+
+ def is_marker(key_):
+ return isinstance(key_, Marker)
+
+ def is_type(key_):
+ return inspect.isclass(key_)
+
+ def is_callable(key_):
+ return callable(key_)
+
+ # priority list for map sorting (in order of checking)
+ # We want Extra to match last, because it's a catch-all. On the other hand,
+ # Remove markers should match first (since invalid values will not
+ # raise an Error, instead the validator will check if other schemas match
+ # the same value).
+ priority = [(1, is_remove), # Remove highest priority after values
+ (2, is_marker), # then other Markers
+ (4, is_type), # types/classes lowest before Extra
+ (3, is_callable), # callables after markers
+ (5, is_extra)] # Extra lowest priority
+
+ def item_priority(item_):
+ key_ = item_[0]
+ for i, check_ in priority:
+ if check_(key_):
+ return i
+ # values have hightest priorities
+ return 0
+
+ return item_priority
+
+
+_sort_item = _compile_itemsort()
+
+
+def _iterate_mapping_candidates(schema):
+ """Iterate over schema in a meaningful order."""
+ # Without this, Extra might appear first in the iterator, and fail to
+ # validate a key even though it's a Required that has its own validation,
+ # generating a false positive.
+ return sorted(iteritems(schema), key=_sort_item)
+
+
+def _iterate_object(obj):
+ """Return iterator over object attributes. Respect objects with
+ defined __slots__.
+
+ """
+ d = {}
+ try:
+ d = vars(obj)
+ except TypeError:
+ # maybe we have named tuple here?
+ if hasattr(obj, '_asdict'):
+ d = obj._asdict()
+ for item in iteritems(d):
+ yield item
+ try:
+ slots = obj.__slots__
+ except AttributeError:
+ pass
+ else:
+ for key in slots:
+ if key != '__dict__':
+ yield (key, getattr(obj, key))
+
+
+class Msg(object):
+ """Report a user-friendly message if a schema fails to validate.
+
+ >>> validate = Schema(
+ ... Msg(['one', 'two', int],
+ ... 'should be one of "one", "two" or an integer'))
+ >>> with raises(er.MultipleInvalid, 'should be one of "one", "two" or an integer'):
+ ... validate(['three'])
+
+ Messages are only applied to invalid direct descendants of the schema:
+
+ >>> validate = Schema(Msg([['one', 'two', int]], 'not okay!'))
+ >>> with raises(er.MultipleInvalid, 'expected int @ data[0][0]'):
+ ... validate([['three']])
+
+ The type which is thrown can be overridden but needs to be a subclass of Invalid
+
+ >>> with raises(er.SchemaError, 'Msg can only use subclases of Invalid as custom class'):
+ ... validate = Schema(Msg([int], 'should be int', cls=KeyError))
+
+ If you do use a subclass of Invalid, that error will be thrown (wrapped in a MultipleInvalid)
+
+ >>> validate = Schema(Msg([['one', 'two', int]], 'not okay!', cls=er.RangeInvalid))
+ >>> try:
+ ... validate(['three'])
+ ... except er.MultipleInvalid as e:
+ ... assert isinstance(e.errors[0], er.RangeInvalid)
+ """
+
+ def __init__(self, schema, msg, cls=None):
+ if cls and not issubclass(cls, er.Invalid):
+ raise er.SchemaError("Msg can only use subclases of"
+ " Invalid as custom class")
+ self._schema = schema
+ self.schema = Schema(schema)
+ self.msg = msg
+ self.cls = cls
+
+ def __call__(self, v):
+ try:
+ return self.schema(v)
+ except er.Invalid as e:
+ if len(e.path) > 1:
+ raise e
+ else:
+ raise (self.cls or er.Invalid)(self.msg)
+
+ def __repr__(self):
+ return 'Msg(%s, %s, cls=%s)' % (self._schema, self.msg, self.cls)
+
+
+class Object(dict):
+ """Indicate that we should work with attributes, not keys."""
+
+ def __init__(self, schema, cls=UNDEFINED):
+ self.cls = cls
+ super(Object, self).__init__(schema)
+
+
+class VirtualPathComponent(str):
+ def __str__(self):
+ return '<' + self + '>'
+
+ def __repr__(self):
+ return self.__str__()
+
+
+# Markers.py
+
+
+class Marker(object):
+ """Mark nodes for special treatment."""
+
+ def __init__(self, schema_, msg=None, description=None):
+ self.schema = schema_
+ self._schema = Schema(schema_)
+ self.msg = msg
+ self.description = description
+
+ def __call__(self, v):
+ try:
+ return self._schema(v)
+ except er.Invalid as e:
+ if not self.msg or len(e.path) > 1:
+ raise
+ raise er.Invalid(self.msg)
+
+ def __str__(self):
+ return str(self.schema)
+
+ def __repr__(self):
+ return repr(self.schema)
+
+ def __lt__(self, other):
+ if isinstance(other, Marker):
+ return self.schema < other.schema
+ return self.schema < other
+
+ def __hash__(self):
+ return hash(self.schema)
+
+ def __eq__(self, other):
+ return self.schema == other
+
+ def __ne__(self, other):
+ return not(self.schema == other)
+
+
+class Optional(Marker):
+ """Mark a node in the schema as optional, and optionally provide a default
+
+ >>> schema = Schema({Optional('key'): str})
+ >>> schema({})
+ {}
+ >>> schema = Schema({Optional('key', default='value'): str})
+ >>> schema({})
+ {'key': 'value'}
+ >>> schema = Schema({Optional('key', default=list): list})
+ >>> schema({})
+ {'key': []}
+
+ If 'required' flag is set for an entire schema, optional keys aren't required
+
+ >>> schema = Schema({
+ ... Optional('key'): str,
+ ... 'key2': str
+ ... }, required=True)
+ >>> schema({'key2':'value'})
+ {'key2': 'value'}
+ """
+
+ def __init__(self, schema, msg=None, default=UNDEFINED, description=None):
+ super(Optional, self).__init__(schema, msg=msg,
+ description=description)
+ self.default = default_factory(default)
+
+
+class Exclusive(Optional):
+ """Mark a node in the schema as exclusive.
+
+ Exclusive keys inherited from Optional:
+
+ >>> schema = Schema({Exclusive('alpha', 'angles'): int, Exclusive('beta', 'angles'): int})
+ >>> schema({'alpha': 30})
+ {'alpha': 30}
+
+ Keys inside a same group of exclusion cannot be together, it only makes sense for dictionaries:
+
+ >>> with raises(er.MultipleInvalid, "two or more values in the same group of exclusion 'angles' @ data[<angles>]"):
+ ... schema({'alpha': 30, 'beta': 45})
+
+ For example, API can provides multiple types of authentication, but only one works in the same time:
+
+ >>> msg = 'Please, use only one type of authentication at the same time.'
+ >>> schema = Schema({
+ ... Exclusive('classic', 'auth', msg=msg):{
+ ... Required('email'): basestring,
+ ... Required('password'): basestring
+ ... },
+ ... Exclusive('internal', 'auth', msg=msg):{
+ ... Required('secret_key'): basestring
+ ... },
+ ... Exclusive('social', 'auth', msg=msg):{
+ ... Required('social_network'): basestring,
+ ... Required('token'): basestring
+ ... }
+ ... })
+
+ >>> with raises(er.MultipleInvalid, "Please, use only one type of authentication at the same time. @ data[<auth>]"):
+ ... schema({'classic': {'email': 'foo@example.com', 'password': 'bar'},
+ ... 'social': {'social_network': 'barfoo', 'token': 'tEMp'}})
+ """
+
+ def __init__(self, schema, group_of_exclusion, msg=None, description=None):
+ super(Exclusive, self).__init__(schema, msg=msg,
+ description=description)
+ self.group_of_exclusion = group_of_exclusion
+
+
+class Inclusive(Optional):
+ """ Mark a node in the schema as inclusive.
+
+ Inclusive keys inherited from Optional:
+
+ >>> schema = Schema({
+ ... Inclusive('filename', 'file'): str,
+ ... Inclusive('mimetype', 'file'): str
+ ... })
+ >>> data = {'filename': 'dog.jpg', 'mimetype': 'image/jpeg'}
+ >>> data == schema(data)
+ True
+
+ Keys inside a same group of inclusive must exist together, it only makes sense for dictionaries:
+
+ >>> with raises(er.MultipleInvalid, "some but not all values in the same group of inclusion 'file' @ data[<file>]"):
+ ... schema({'filename': 'dog.jpg'})
+
+ If none of the keys in the group are present, it is accepted:
+
+ >>> schema({})
+ {}
+
+ For example, API can return 'height' and 'width' together, but not separately.
+
+ >>> msg = "Height and width must exist together"
+ >>> schema = Schema({
+ ... Inclusive('height', 'size', msg=msg): int,
+ ... Inclusive('width', 'size', msg=msg): int
+ ... })
+
+ >>> with raises(er.MultipleInvalid, msg + " @ data[<size>]"):
+ ... schema({'height': 100})
+
+ >>> with raises(er.MultipleInvalid, msg + " @ data[<size>]"):
+ ... schema({'width': 100})
+
+ >>> data = {'height': 100, 'width': 100}
+ >>> data == schema(data)
+ True
+ """
+
+ def __init__(self, schema, group_of_inclusion, msg=None):
+ super(Inclusive, self).__init__(schema, msg=msg)
+ self.group_of_inclusion = group_of_inclusion
+
+
+class Required(Marker):
+ """Mark a node in the schema as being required, and optionally provide a default value.
+
+ >>> schema = Schema({Required('key'): str})
+ >>> with raises(er.MultipleInvalid, "required key not provided @ data['key']"):
+ ... schema({})
+
+ >>> schema = Schema({Required('key', default='value'): str})
+ >>> schema({})
+ {'key': 'value'}
+ >>> schema = Schema({Required('key', default=list): list})
+ >>> schema({})
+ {'key': []}
+ """
+
+ def __init__(self, schema, msg=None, default=UNDEFINED, description=None):
+ super(Required, self).__init__(schema, msg=msg,
+ description=description)
+ self.default = default_factory(default)
+
+
+class Remove(Marker):
+ """Mark a node in the schema to be removed and excluded from the validated
+ output. Keys that fail validation will not raise ``Invalid``. Instead, these
+ keys will be treated as extras.
+
+ >>> schema = Schema({str: int, Remove(int): str})
+ >>> with raises(er.MultipleInvalid, "extra keys not allowed @ data[1]"):
+ ... schema({'keep': 1, 1: 1.0})
+ >>> schema({1: 'red', 'red': 1, 2: 'green'})
+ {'red': 1}
+ >>> schema = Schema([int, Remove(float), Extra])
+ >>> schema([1, 2, 3, 4.0, 5, 6.0, '7'])
+ [1, 2, 3, 5, '7']
+ """
+
+ def __call__(self, v):
+ super(Remove, self).__call__(v)
+ return self.__class__
+
+ def __repr__(self):
+ return "Remove(%r)" % (self.schema,)
+
+ def __hash__(self):
+ return object.__hash__(self)
+
+
+def message(default=None, cls=None):
+ """Convenience decorator to allow functions to provide a message.
+
+ Set a default message:
+
+ >>> @message('not an integer')
+ ... def isint(v):
+ ... return int(v)
+
+ >>> validate = Schema(isint())
+ >>> with raises(er.MultipleInvalid, 'not an integer'):
+ ... validate('a')
+
+ The message can be overridden on a per validator basis:
+
+ >>> validate = Schema(isint('bad'))
+ >>> with raises(er.MultipleInvalid, 'bad'):
+ ... validate('a')
+
+ The class thrown too:
+
+ >>> class IntegerInvalid(er.Invalid): pass
+ >>> validate = Schema(isint('bad', clsoverride=IntegerInvalid))
+ >>> try:
+ ... validate('a')
+ ... except er.MultipleInvalid as e:
+ ... assert isinstance(e.errors[0], IntegerInvalid)
+ """
+ if cls and not issubclass(cls, er.Invalid):
+ raise er.SchemaError("message can only use subclases of Invalid as custom class")
+
+ def decorator(f):
+ @wraps(f)
+ def check(msg=None, clsoverride=None):
+ @wraps(f)
+ def wrapper(*args, **kwargs):
+ try:
+ return f(*args, **kwargs)
+ except ValueError:
+ raise (clsoverride or cls or er.ValueInvalid)(msg or default or 'invalid value')
+
+ return wrapper
+
+ return check
+
+ return decorator
+
+
+def _args_to_dict(func, args):
+ """Returns argument names as values as key-value pairs."""
+ if sys.version_info >= (3, 0):
+ arg_count = func.__code__.co_argcount
+ arg_names = func.__code__.co_varnames[:arg_count]
+ else:
+ arg_count = func.func_code.co_argcount
+ arg_names = func.func_code.co_varnames[:arg_count]
+
+ arg_value_list = list(args)
+ arguments = dict((arg_name, arg_value_list[i])
+ for i, arg_name in enumerate(arg_names)
+ if i < len(arg_value_list))
+ return arguments
+
+
+def _merge_args_with_kwargs(args_dict, kwargs_dict):
+ """Merge args with kwargs."""
+ ret = args_dict.copy()
+ ret.update(kwargs_dict)
+ return ret
+
+
+def validate(*a, **kw):
+ """Decorator for validating arguments of a function against a given schema.
+
+ Set restrictions for arguments:
+
+ >>> @validate(arg1=int, arg2=int)
+ ... def foo(arg1, arg2):
+ ... return arg1 * arg2
+
+ Set restriction for returned value:
+
+ >>> @validate(arg=int, __return__=int)
+ ... def bar(arg1):
+ ... return arg1 * 2
+
+ """
+ RETURNS_KEY = '__return__'
+
+ def validate_schema_decorator(func):
+
+ returns_defined = False
+ returns = None
+
+ schema_args_dict = _args_to_dict(func, a)
+ schema_arguments = _merge_args_with_kwargs(schema_args_dict, kw)
+
+ if RETURNS_KEY in schema_arguments:
+ returns_defined = True
+ returns = schema_arguments[RETURNS_KEY]
+ del schema_arguments[RETURNS_KEY]
+
+ input_schema = (Schema(schema_arguments, extra=ALLOW_EXTRA)
+ if len(schema_arguments) != 0 else lambda x: x)
+ output_schema = Schema(returns) if returns_defined else lambda x: x
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ args_dict = _args_to_dict(func, args)
+ arguments = _merge_args_with_kwargs(args_dict, kwargs)
+ validated_arguments = input_schema(arguments)
+ output = func(**validated_arguments)
+ return output_schema(output)
+
+ return func_wrapper
+
+ return validate_schema_decorator
diff --git a/third_party/python/voluptuous/voluptuous/tests/__init__.py b/third_party/python/voluptuous/voluptuous/tests/__init__.py
new file mode 100644
index 0000000000..f29719c726
--- /dev/null
+++ b/third_party/python/voluptuous/voluptuous/tests/__init__.py
@@ -0,0 +1 @@
+__author__ = 'tusharmakkar08'
diff --git a/third_party/python/voluptuous/voluptuous/tests/tests.md b/third_party/python/voluptuous/voluptuous/tests/tests.md
new file mode 100644
index 0000000000..5ba97ab64b
--- /dev/null
+++ b/third_party/python/voluptuous/voluptuous/tests/tests.md
@@ -0,0 +1,273 @@
+Error reporting should be accurate:
+
+ >>> from voluptuous import *
+ >>> schema = Schema(['one', {'two': 'three', 'four': ['five'],
+ ... 'six': {'seven': 'eight'}}])
+ >>> schema(['one'])
+ ['one']
+ >>> schema([{'two': 'three'}])
+ [{'two': 'three'}]
+
+It should show the exact index and container type, in this case a list
+value:
+
+ >>> try:
+ ... schema(['one', 'two'])
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == 'expected a dictionary @ data[1]'
+ True
+
+It should also be accurate for nested values:
+
+ >>> try:
+ ... schema([{'two': 'nine'}])
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "not a valid value for dictionary value @ data[0]['two']"
+
+ >>> try:
+ ... schema([{'four': ['nine']}])
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "not a valid value @ data[0]['four'][0]"
+
+ >>> try:
+ ... schema([{'six': {'seven': 'nine'}}])
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "not a valid value for dictionary value @ data[0]['six']['seven']"
+
+Errors should be reported depth-first:
+
+ >>> validate = Schema({'one': {'two': 'three', 'four': 'five'}})
+ >>> try:
+ ... validate({'one': {'four': 'six'}})
+ ... except Invalid as e:
+ ... print(e)
+ ... print(e.path)
+ not a valid value for dictionary value @ data['one']['four']
+ ['one', 'four']
+
+Voluptuous supports validation when extra fields are present in the
+data:
+
+ >>> schema = Schema({'one': 1, Extra: object})
+ >>> schema({'two': 'two', 'one': 1}) == {'two': 'two', 'one': 1}
+ True
+ >>> schema = Schema({'one': 1})
+ >>> try:
+ ... schema({'two': 2})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "extra keys not allowed @ data['two']"
+
+dict, list, and tuple should be available as type validators:
+
+ >>> Schema(dict)({'a': 1, 'b': 2}) == {'a': 1, 'b': 2}
+ True
+ >>> Schema(list)([1,2,3])
+ [1, 2, 3]
+ >>> Schema(tuple)((1,2,3))
+ (1, 2, 3)
+
+Validation should return instances of the right types when the types are
+subclasses of dict or list:
+
+ >>> class Dict(dict):
+ ... pass
+ >>>
+ >>> d = Schema(dict)(Dict(a=1, b=2))
+ >>> d == {'a': 1, 'b': 2}
+ True
+ >>> type(d) is Dict
+ True
+ >>> class List(list):
+ ... pass
+ >>>
+ >>> l = Schema(list)(List([1,2,3]))
+ >>> l
+ [1, 2, 3]
+ >>> type(l) is List
+ True
+
+Multiple errors are reported:
+
+ >>> schema = Schema({'one': 1, 'two': 2})
+ >>> try:
+ ... schema({'one': 2, 'two': 3, 'three': 4})
+ ... except MultipleInvalid as e:
+ ... errors = sorted(e.errors, key=lambda k: str(k))
+ ... print([str(i) for i in errors]) # doctest: +NORMALIZE_WHITESPACE
+ ["extra keys not allowed @ data['three']",
+ "not a valid value for dictionary value @ data['one']",
+ "not a valid value for dictionary value @ data['two']"]
+ >>> schema = Schema([[1], [2], [3]])
+ >>> try:
+ ... schema([1, 2, 3])
+ ... except MultipleInvalid as e:
+ ... print([str(i) for i in e.errors]) # doctest: +NORMALIZE_WHITESPACE
+ ['expected a list @ data[0]',
+ 'expected a list @ data[1]',
+ 'expected a list @ data[2]']
+
+Required fields in dictionary which are invalid should not have required :
+
+ >>> from voluptuous import *
+ >>> schema = Schema({'one': {'two': 3}}, required=True)
+ >>> try:
+ ... schema({'one': {'two': 2}})
+ ... except MultipleInvalid as e:
+ ... errors = e.errors
+ >>> 'required' in ' '.join([x.msg for x in errors])
+ False
+
+Multiple errors for nested fields in dicts and objects:
+
+> \>\>\> from collections import namedtuple \>\>\> validate = Schema({
+> ... 'anobject': Object({ ... 'strfield': str, ... 'intfield': int ...
+> }) ... }) \>\>\> try: ... SomeObj = namedtuple('SomeObj', ('strfield',
+> 'intfield')) ... validate({'anobject': SomeObj(strfield=123,
+> intfield='one')}) ... except MultipleInvalid as e: ...
+> print(sorted(str(i) for i in e.errors)) \# doctest:
+> +NORMALIZE\_WHITESPACE ["expected int for object value @
+> data['anobject']['intfield']", "expected str for object value @
+> data['anobject']['strfield']"]
+
+Custom classes validate as schemas:
+
+ >>> class Thing(object):
+ ... pass
+ >>> schema = Schema(Thing)
+ >>> t = schema(Thing())
+ >>> type(t) is Thing
+ True
+
+Classes with custom metaclasses should validate as schemas:
+
+ >>> class MyMeta(type):
+ ... pass
+ >>> class Thing(object):
+ ... __metaclass__ = MyMeta
+ >>> schema = Schema(Thing)
+ >>> t = schema(Thing())
+ >>> type(t) is Thing
+ True
+
+Schemas built with All() should give the same error as the original
+validator (Issue \#26):
+
+ >>> schema = Schema({
+ ... Required('items'): All([{
+ ... Required('foo'): str
+ ... }])
+ ... })
+
+ >>> try:
+ ... schema({'items': [{}]})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "required key not provided @ data['items'][0]['foo']"
+
+Validator should return same instance of the same type for object:
+
+ >>> class Structure(object):
+ ... def __init__(self, q=None):
+ ... self.q = q
+ ... def __repr__(self):
+ ... return '{0.__name__}(q={1.q!r})'.format(type(self), self)
+ ...
+ >>> schema = Schema(Object({'q': 'one'}, cls=Structure))
+ >>> type(schema(Structure(q='one'))) is Structure
+ True
+
+Object validator should treat cls argument as optional. In this case it
+shouldn't check object type:
+
+ >>> from collections import namedtuple
+ >>> NamedTuple = namedtuple('NamedTuple', ('q',))
+ >>> schema = Schema(Object({'q': 'one'}))
+ >>> named = NamedTuple(q='one')
+ >>> schema(named) == named
+ True
+ >>> schema(named)
+ NamedTuple(q='one')
+
+If cls argument passed to object validator we should check object type:
+
+ >>> schema = Schema(Object({'q': 'one'}, cls=Structure))
+ >>> schema(NamedTuple(q='one')) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ MultipleInvalid: expected a <class 'Structure'>
+ >>> schema = Schema(Object({'q': 'one'}, cls=NamedTuple))
+ >>> schema(NamedTuple(q='one'))
+ NamedTuple(q='one')
+
+Ensure that objects with \_\_slots\_\_ supported properly:
+
+ >>> class SlotsStructure(Structure):
+ ... __slots__ = ['q']
+ ...
+ >>> schema = Schema(Object({'q': 'one'}))
+ >>> schema(SlotsStructure(q='one'))
+ SlotsStructure(q='one')
+ >>> class DictStructure(object):
+ ... __slots__ = ['q', '__dict__']
+ ... def __init__(self, q=None, page=None):
+ ... self.q = q
+ ... self.page = page
+ ... def __repr__(self):
+ ... return '{0.__name__}(q={1.q!r}, page={1.page!r})'.format(type(self), self)
+ ...
+ >>> structure = DictStructure(q='one')
+ >>> structure.page = 1
+ >>> try:
+ ... schema(structure)
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "extra keys not allowed @ data['page']"
+
+ >>> schema = Schema(Object({'q': 'one', Extra: object}))
+ >>> schema(structure)
+ DictStructure(q='one', page=1)
+
+Ensure that objects can be used with other validators:
+
+ >>> schema = Schema({'meta': Object({'q': 'one'})})
+ >>> schema({'meta': Structure(q='one')})
+ {'meta': Structure(q='one')}
+
+Ensure that subclasses of Invalid of are raised as is.
+
+ >>> class SpecialInvalid(Invalid):
+ ... pass
+ ...
+ >>> def custom_validator(value):
+ ... raise SpecialInvalid('boom')
+ ...
+ >>> schema = Schema({'thing': custom_validator})
+ >>> try:
+ ... schema({'thing': 'not an int'})
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> exc.errors[0].__class__.__name__
+ 'SpecialInvalid'
+
+Ensure that Optional('Classification') < 'Name' will return True instead of throwing an AttributeError
+
+ >>> Optional('Classification') < 'Name'
+ True
diff --git a/third_party/python/voluptuous/voluptuous/tests/tests.py b/third_party/python/voluptuous/voluptuous/tests/tests.py
new file mode 100644
index 0000000000..fa44fbf770
--- /dev/null
+++ b/third_party/python/voluptuous/voluptuous/tests/tests.py
@@ -0,0 +1,1265 @@
+import copy
+import collections
+import os
+import sys
+
+from nose.tools import assert_equal, assert_false, assert_raises, assert_true
+
+from voluptuous import (
+ Schema, Required, Exclusive, Optional, Extra, Invalid, In, Remove, Literal,
+ Url, MultipleInvalid, LiteralInvalid, TypeInvalid, NotIn, Match, Email,
+ Replace, Range, Coerce, All, Any, Length, FqdnUrl, ALLOW_EXTRA, PREVENT_EXTRA,
+ validate, ExactSequence, Equal, Unordered, Number, Maybe, Datetime, Date,
+ Contains, Marker, IsDir, IsFile, PathExists, SomeOf, TooManyValid, Self,
+ raises)
+from voluptuous.humanize import humanize_error
+from voluptuous.util import u
+
+
+def test_exact_sequence():
+ schema = Schema(ExactSequence([int, int]))
+ try:
+ schema([1, 2, 3])
+ except Invalid:
+ assert True
+ else:
+ assert False, "Did not raise Invalid"
+ assert_equal(schema([1, 2]), [1, 2])
+
+
+def test_required():
+ """Verify that Required works."""
+ schema = Schema({Required('q'): 1})
+ # Can't use nose's raises (because we need to access the raised
+ # exception, nor assert_raises which fails with Python 2.6.9.
+ try:
+ schema({})
+ except Invalid as e:
+ assert_equal(str(e), "required key not provided @ data['q']")
+ else:
+ assert False, "Did not raise Invalid"
+
+
+def test_extra_with_required():
+ """Verify that Required does not break Extra."""
+ schema = Schema({Required('toaster'): str, Extra: object})
+ r = schema({'toaster': 'blue', 'another_valid_key': 'another_valid_value'})
+ assert_equal(
+ r, {'toaster': 'blue', 'another_valid_key': 'another_valid_value'})
+
+
+def test_iterate_candidates():
+ """Verify that the order for iterating over mapping candidates is right."""
+ schema = {
+ "toaster": str,
+ Extra: object,
+ }
+ # toaster should be first.
+ from voluptuous.schema_builder import _iterate_mapping_candidates
+ assert_equal(_iterate_mapping_candidates(schema)[0][0], 'toaster')
+
+
+def test_in():
+ """Verify that In works."""
+ schema = Schema({"color": In(frozenset(["blue", "red", "yellow"]))})
+ schema({"color": "blue"})
+
+
+def test_not_in():
+ """Verify that NotIn works."""
+ schema = Schema({"color": NotIn(frozenset(["blue", "red", "yellow"]))})
+ schema({"color": "orange"})
+ try:
+ schema({"color": "blue"})
+ except Invalid as e:
+ assert_equal(str(e), "value is not allowed for dictionary value @ data['color']")
+ else:
+ assert False, "Did not raise NotInInvalid"
+
+
+def test_contains():
+ """Verify contains validation method."""
+ schema = Schema({'color': Contains('red')})
+ schema({'color': ['blue', 'red', 'yellow']})
+ try:
+ schema({'color': ['blue', 'yellow']})
+ except Invalid as e:
+ assert_equal(str(e),
+ "value is not allowed for dictionary value @ data['color']")
+
+
+def test_remove():
+ """Verify that Remove works."""
+ # remove dict keys
+ schema = Schema({"weight": int,
+ Remove("color"): str,
+ Remove("amount"): int})
+ out_ = schema({"weight": 10, "color": "red", "amount": 1})
+ assert "color" not in out_ and "amount" not in out_
+
+ # remove keys by type
+ schema = Schema({"weight": float,
+ "amount": int,
+ # remvove str keys with int values
+ Remove(str): int,
+ # keep str keys with str values
+ str: str})
+ out_ = schema({"weight": 73.4,
+ "condition": "new",
+ "amount": 5,
+ "left": 2})
+ # amount should stay since it's defined
+ # other string keys with int values will be removed
+ assert "amount" in out_ and "left" not in out_
+ # string keys with string values will stay
+ assert "condition" in out_
+
+ # remove value from list
+ schema = Schema([Remove(1), int])
+ out_ = schema([1, 2, 3, 4, 1, 5, 6, 1, 1, 1])
+ assert_equal(out_, [2, 3, 4, 5, 6])
+
+ # remove values from list by type
+ schema = Schema([1.0, Remove(float), int])
+ out_ = schema([1, 2, 1.0, 2.0, 3.0, 4])
+ assert_equal(out_, [1, 2, 1.0, 4])
+
+
+def test_extra_empty_errors():
+ schema = Schema({'a': {Extra: object}}, required=True)
+ schema({'a': {}})
+
+
+def test_literal():
+ """ test with Literal """
+
+ schema = Schema([Literal({"a": 1}), Literal({"b": 1})])
+ schema([{"a": 1}])
+ schema([{"b": 1}])
+ schema([{"a": 1}, {"b": 1}])
+
+ try:
+ schema([{"c": 1}])
+ except Invalid as e:
+ assert_equal(str(e), "{'c': 1} not match for {'b': 1} @ data[0]")
+ else:
+ assert False, "Did not raise Invalid"
+
+ schema = Schema(Literal({"a": 1}))
+ try:
+ schema({"b": 1})
+ except MultipleInvalid as e:
+ assert_equal(str(e), "{'b': 1} not match for {'a': 1}")
+ assert_equal(len(e.errors), 1)
+ assert_equal(type(e.errors[0]), LiteralInvalid)
+ else:
+ assert False, "Did not raise Invalid"
+
+
+def test_class():
+ class C1(object):
+ pass
+
+ schema = Schema(C1)
+ schema(C1())
+
+ try:
+ schema(None)
+ except MultipleInvalid as e:
+ assert_equal(str(e), "expected C1")
+ assert_equal(len(e.errors), 1)
+ assert_equal(type(e.errors[0]), TypeInvalid)
+ else:
+ assert False, "Did not raise Invalid"
+
+ # In Python 2, this will be an old-style class (classobj instance)
+ class C2:
+ pass
+
+ schema = Schema(C2)
+ schema(C2())
+
+ try:
+ schema(None)
+ except MultipleInvalid as e:
+ assert_equal(str(e), "expected C2")
+ assert_equal(len(e.errors), 1)
+ assert_equal(type(e.errors[0]), TypeInvalid)
+ else:
+ assert False, "Did not raise Invalid"
+
+
+def test_email_validation():
+ """ test with valid email """
+ schema = Schema({"email": Email()})
+ out_ = schema({"email": "example@example.com"})
+
+ assert 'example@example.com"', out_.get("url")
+
+
+def test_email_validation_with_none():
+ """ test with invalid None Email"""
+ schema = Schema({"email": Email()})
+ try:
+ schema({"email": None})
+ except MultipleInvalid as e:
+ assert_equal(str(e),
+ "expected an Email for dictionary value @ data['email']")
+ else:
+ assert False, "Did not raise Invalid for None url"
+
+
+def test_email_validation_with_empty_string():
+ """ test with empty string Email"""
+ schema = Schema({"email": Email()})
+ try:
+ schema({"email": ''})
+ except MultipleInvalid as e:
+ assert_equal(str(e),
+ "expected an Email for dictionary value @ data['email']")
+ else:
+ assert False, "Did not raise Invalid for empty string url"
+
+
+def test_email_validation_without_host():
+ """ test with empty host name in email """
+ schema = Schema({"email": Email()})
+ try:
+ schema({"email": 'a@.com'})
+ except MultipleInvalid as e:
+ assert_equal(str(e),
+ "expected an Email for dictionary value @ data['email']")
+ else:
+ assert False, "Did not raise Invalid for empty string url"
+
+
+def test_fqdn_url_validation():
+ """ test with valid fully qualified domain name url """
+ schema = Schema({"url": FqdnUrl()})
+ out_ = schema({"url": "http://example.com/"})
+
+ assert 'http://example.com/', out_.get("url")
+
+
+def test_fqdn_url_without_domain_name():
+ """ test with invalid fully qualified domain name url """
+ schema = Schema({"url": FqdnUrl()})
+ try:
+ schema({"url": "http://localhost/"})
+ except MultipleInvalid as e:
+ assert_equal(str(e),
+ "expected a Fully qualified domain name URL for dictionary value @ data['url']")
+ else:
+ assert False, "Did not raise Invalid for None url"
+
+
+def test_fqdnurl_validation_with_none():
+ """ test with invalid None FQDN url"""
+ schema = Schema({"url": FqdnUrl()})
+ try:
+ schema({"url": None})
+ except MultipleInvalid as e:
+ assert_equal(str(e),
+ "expected a Fully qualified domain name URL for dictionary value @ data['url']")
+ else:
+ assert False, "Did not raise Invalid for None url"
+
+
+def test_fqdnurl_validation_with_empty_string():
+ """ test with empty string FQDN URL """
+ schema = Schema({"url": FqdnUrl()})
+ try:
+ schema({"url": ''})
+ except MultipleInvalid as e:
+ assert_equal(str(e),
+ "expected a Fully qualified domain name URL for dictionary value @ data['url']")
+ else:
+ assert False, "Did not raise Invalid for empty string url"
+
+
+def test_fqdnurl_validation_without_host():
+ """ test with empty host FQDN URL """
+ schema = Schema({"url": FqdnUrl()})
+ try:
+ schema({"url": 'http://'})
+ except MultipleInvalid as e:
+ assert_equal(str(e),
+ "expected a Fully qualified domain name URL for dictionary value @ data['url']")
+ else:
+ assert False, "Did not raise Invalid for empty string url"
+
+
+def test_url_validation():
+ """ test with valid URL """
+ schema = Schema({"url": Url()})
+ out_ = schema({"url": "http://example.com/"})
+
+ assert 'http://example.com/', out_.get("url")
+
+
+def test_url_validation_with_none():
+ """ test with invalid None url"""
+ schema = Schema({"url": Url()})
+ try:
+ schema({"url": None})
+ except MultipleInvalid as e:
+ assert_equal(str(e),
+ "expected a URL for dictionary value @ data['url']")
+ else:
+ assert False, "Did not raise Invalid for None url"
+
+
+def test_url_validation_with_empty_string():
+ """ test with empty string URL """
+ schema = Schema({"url": Url()})
+ try:
+ schema({"url": ''})
+ except MultipleInvalid as e:
+ assert_equal(str(e),
+ "expected a URL for dictionary value @ data['url']")
+ else:
+ assert False, "Did not raise Invalid for empty string url"
+
+
+def test_url_validation_without_host():
+ """ test with empty host URL """
+ schema = Schema({"url": Url()})
+ try:
+ schema({"url": 'http://'})
+ except MultipleInvalid as e:
+ assert_equal(str(e),
+ "expected a URL for dictionary value @ data['url']")
+ else:
+ assert False, "Did not raise Invalid for empty string url"
+
+
+def test_copy_dict_undefined():
+ """ test with a copied dictionary """
+ fields = {
+ Required("foo"): int
+ }
+ copied_fields = copy.deepcopy(fields)
+
+ schema = Schema(copied_fields)
+
+ # This used to raise a `TypeError` because the instance of `Undefined`
+ # was a copy, so object comparison would not work correctly.
+ try:
+ schema({"foo": "bar"})
+ except Exception as e:
+ assert isinstance(e, MultipleInvalid)
+
+
+def test_sorting():
+ """ Expect alphabetic sorting """
+ foo = Required('foo')
+ bar = Required('bar')
+ items = [foo, bar]
+ expected = [bar, foo]
+ result = sorted(items)
+ assert result == expected
+
+
+def test_schema_extend():
+ """Verify that Schema.extend copies schema keys from both."""
+
+ base = Schema({'a': int}, required=True)
+ extension = {'b': str}
+ extended = base.extend(extension)
+
+ assert base.schema == {'a': int}
+ assert extension == {'b': str}
+ assert extended.schema == {'a': int, 'b': str}
+ assert extended.required == base.required
+ assert extended.extra == base.extra
+
+
+def test_schema_extend_overrides():
+ """Verify that Schema.extend can override required/extra parameters."""
+
+ base = Schema({'a': int}, required=True)
+ extended = base.extend({'b': str}, required=False, extra=ALLOW_EXTRA)
+
+ assert base.required is True
+ assert base.extra == PREVENT_EXTRA
+ assert extended.required is False
+ assert extended.extra == ALLOW_EXTRA
+
+
+def test_schema_extend_key_swap():
+ """Verify that Schema.extend can replace keys, even when different markers are used"""
+
+ base = Schema({Optional('a'): int})
+ extension = {Required('a'): int}
+ extended = base.extend(extension)
+
+ assert_equal(len(base.schema), 1)
+ assert_true(isinstance(list(base.schema)[0], Optional))
+ assert_equal(len(extended.schema), 1)
+ assert_true((list(extended.schema)[0], Required))
+
+
+def test_subschema_extension():
+ """Verify that Schema.extend adds and replaces keys in a subschema"""
+
+ base = Schema({'a': {'b': int, 'c': float}})
+ extension = {'d': str, 'a': {'b': str, 'e': int}}
+ extended = base.extend(extension)
+
+ assert_equal(base.schema, {'a': {'b': int, 'c': float}})
+ assert_equal(extension, {'d': str, 'a': {'b': str, 'e': int}})
+ assert_equal(extended.schema, {'a': {'b': str, 'c': float, 'e': int}, 'd': str})
+
+
+def test_equality():
+ assert_equal(Schema('foo'), Schema('foo'))
+
+ assert_equal(Schema(['foo', 'bar', 'baz']),
+ Schema(['foo', 'bar', 'baz']))
+
+ # Ensure two Schemas w/ two equivalent dicts initialized in a different
+ # order are considered equal.
+ dict_a = {}
+ dict_a['foo'] = 1
+ dict_a['bar'] = 2
+ dict_a['baz'] = 3
+
+ dict_b = {}
+ dict_b['baz'] = 3
+ dict_b['bar'] = 2
+ dict_b['foo'] = 1
+
+ assert_equal(Schema(dict_a), Schema(dict_b))
+
+
+def test_equality_negative():
+ """Verify that Schema objects are not equal to string representations"""
+ assert_false(Schema('foo') == 'foo')
+
+ assert_false(Schema(['foo', 'bar']) == "['foo', 'bar']")
+ assert_false(Schema(['foo', 'bar']) == Schema("['foo', 'bar']"))
+
+ assert_false(Schema({'foo': 1, 'bar': 2}) == "{'foo': 1, 'bar': 2}")
+ assert_false(Schema({'foo': 1, 'bar': 2}) == Schema("{'foo': 1, 'bar': 2}"))
+
+
+def test_inequality():
+ assert_true(Schema('foo') != 'foo')
+
+ assert_true(Schema(['foo', 'bar']) != "['foo', 'bar']")
+ assert_true(Schema(['foo', 'bar']) != Schema("['foo', 'bar']"))
+
+ assert_true(Schema({'foo': 1, 'bar': 2}) != "{'foo': 1, 'bar': 2}")
+ assert_true(Schema({'foo': 1, 'bar': 2}) != Schema("{'foo': 1, 'bar': 2}"))
+
+
+def test_inequality_negative():
+ assert_false(Schema('foo') != Schema('foo'))
+
+ assert_false(Schema(['foo', 'bar', 'baz']) !=
+ Schema(['foo', 'bar', 'baz']))
+
+ # Ensure two Schemas w/ two equivalent dicts initialized in a different
+ # order are considered equal.
+ dict_a = {}
+ dict_a['foo'] = 1
+ dict_a['bar'] = 2
+ dict_a['baz'] = 3
+
+ dict_b = {}
+ dict_b['baz'] = 3
+ dict_b['bar'] = 2
+ dict_b['foo'] = 1
+
+ assert_false(Schema(dict_a) != Schema(dict_b))
+
+
+def test_repr():
+ """Verify that __repr__ returns valid Python expressions"""
+ match = Match('a pattern', msg='message')
+ replace = Replace('you', 'I', msg='you and I')
+ range_ = Range(min=0, max=42, min_included=False,
+ max_included=False, msg='number not in range')
+ coerce_ = Coerce(int, msg="moo")
+ all_ = All('10', Coerce(int), msg='all msg')
+ maybe_int = Maybe(int)
+
+ assert_equal(repr(match), "Match('a pattern', msg='message')")
+ assert_equal(repr(replace), "Replace('you', 'I', msg='you and I')")
+ assert_equal(
+ repr(range_),
+ "Range(min=0, max=42, min_included=False, max_included=False, msg='number not in range')"
+ )
+ assert_equal(repr(coerce_), "Coerce(int, msg='moo')")
+ assert_equal(repr(all_), "All('10', Coerce(int, msg=None), msg='all msg')")
+ assert_equal(repr(maybe_int), "Any(None, %s, msg=None)" % str(int))
+
+
+def test_list_validation_messages():
+ """ Make sure useful error messages are available """
+
+ def is_even(value):
+ if value % 2:
+ raise Invalid('%i is not even' % value)
+ return value
+
+ schema = Schema(dict(even_numbers=[All(int, is_even)]))
+
+ try:
+ schema(dict(even_numbers=[3]))
+ except Invalid as e:
+ assert_equal(len(e.errors), 1, e.errors)
+ assert_equal(str(e.errors[0]), "3 is not even @ data['even_numbers'][0]")
+ assert_equal(str(e), "3 is not even @ data['even_numbers'][0]")
+ else:
+ assert False, "Did not raise Invalid"
+
+
+def test_nested_multiple_validation_errors():
+ """ Make sure useful error messages are available """
+
+ def is_even(value):
+ if value % 2:
+ raise Invalid('%i is not even' % value)
+ return value
+
+ schema = Schema(dict(even_numbers=All([All(int, is_even)],
+ Length(min=1))))
+
+ try:
+ schema(dict(even_numbers=[3]))
+ except Invalid as e:
+ assert_equal(len(e.errors), 1, e.errors)
+ assert_equal(str(e.errors[0]), "3 is not even @ data['even_numbers'][0]")
+ assert_equal(str(e), "3 is not even @ data['even_numbers'][0]")
+ else:
+ assert False, "Did not raise Invalid"
+
+
+def test_humanize_error():
+ data = {
+ 'a': 'not an int',
+ 'b': [123]
+ }
+ schema = Schema({
+ 'a': int,
+ 'b': [str]
+ })
+ try:
+ schema(data)
+ except MultipleInvalid as e:
+ assert_equal(
+ humanize_error(data, e),
+ "expected int for dictionary value @ data['a']. Got 'not an int'\n"
+ "expected str @ data['b'][0]. Got 123"
+ )
+ else:
+ assert False, 'Did not raise MultipleInvalid'
+
+
+def test_fix_157():
+ s = Schema(All([Any('one', 'two', 'three')]), Length(min=1))
+ assert_equal(['one'], s(['one']))
+ assert_raises(MultipleInvalid, s, ['four'])
+
+
+def test_range_exlcudes_nan():
+ s = Schema(Range(min=0, max=10))
+ assert_raises(MultipleInvalid, s, float('nan'))
+
+
+def test_equal():
+ s = Schema(Equal(1))
+ s(1)
+ assert_raises(Invalid, s, 2)
+ s = Schema(Equal('foo'))
+ s('foo')
+ assert_raises(Invalid, s, 'bar')
+ s = Schema(Equal([1, 2]))
+ s([1, 2])
+ assert_raises(Invalid, s, [])
+ assert_raises(Invalid, s, [1, 2, 3])
+ # Evaluates exactly, not through validators
+ s = Schema(Equal(str))
+ assert_raises(Invalid, s, 'foo')
+
+
+def test_unordered():
+ # Any order is OK
+ s = Schema(Unordered([2, 1]))
+ s([2, 1])
+ s([1, 2])
+ # Amount of errors is OK
+ assert_raises(Invalid, s, [2, 0])
+ assert_raises(MultipleInvalid, s, [0, 0])
+ # Different length is NOK
+ assert_raises(Invalid, s, [1])
+ assert_raises(Invalid, s, [1, 2, 0])
+ assert_raises(MultipleInvalid, s, [1, 2, 0, 0])
+ # Other type than list or tuple is NOK
+ assert_raises(Invalid, s, 'foo')
+ assert_raises(Invalid, s, 10)
+ # Validators are evaluated through as schemas
+ s = Schema(Unordered([int, str]))
+ s([1, '2'])
+ s(['1', 2])
+ s = Schema(Unordered([{'foo': int}, []]))
+ s([{'foo': 3}, []])
+ # Most accurate validators must be positioned on left
+ s = Schema(Unordered([int, 3]))
+ assert_raises(Invalid, s, [3, 2])
+ s = Schema(Unordered([3, int]))
+ s([3, 2])
+
+
+def test_maybe():
+ s = Schema(Maybe(int))
+ assert s(1) == 1
+ assert s(None) is None
+ assert_raises(Invalid, s, 'foo')
+
+ s = Schema(Maybe({str: Coerce(int)}))
+ assert s({'foo': '100'}) == {'foo': 100}
+ assert s(None) is None
+ assert_raises(Invalid, s, {'foo': 'bar'})
+
+
+def test_empty_list_as_exact():
+ s = Schema([])
+ assert_raises(Invalid, s, [1])
+ s([])
+
+
+def test_schema_decorator_match_with_args():
+ @validate(int)
+ def fn(arg):
+ return arg
+
+ fn(1)
+
+
+def test_schema_decorator_unmatch_with_args():
+ @validate(int)
+ def fn(arg):
+ return arg
+
+ assert_raises(Invalid, fn, 1.0)
+
+
+def test_schema_decorator_match_with_kwargs():
+ @validate(arg=int)
+ def fn(arg):
+ return arg
+
+ fn(1)
+
+
+def test_schema_decorator_unmatch_with_kwargs():
+ @validate(arg=int)
+ def fn(arg):
+ return arg
+
+ assert_raises(Invalid, fn, 1.0)
+
+
+def test_schema_decorator_match_return_with_args():
+ @validate(int, __return__=int)
+ def fn(arg):
+ return arg
+
+ fn(1)
+
+
+def test_schema_decorator_unmatch_return_with_args():
+ @validate(int, __return__=int)
+ def fn(arg):
+ return "hello"
+
+ assert_raises(Invalid, fn, 1)
+
+
+def test_schema_decorator_match_return_with_kwargs():
+ @validate(arg=int, __return__=int)
+ def fn(arg):
+ return arg
+
+ fn(1)
+
+
+def test_schema_decorator_unmatch_return_with_kwargs():
+ @validate(arg=int, __return__=int)
+ def fn(arg):
+ return "hello"
+
+ assert_raises(Invalid, fn, 1)
+
+
+def test_schema_decorator_return_only_match():
+ @validate(__return__=int)
+ def fn(arg):
+ return arg
+
+ fn(1)
+
+
+def test_schema_decorator_return_only_unmatch():
+ @validate(__return__=int)
+ def fn(arg):
+ return "hello"
+
+ assert_raises(Invalid, fn, 1)
+
+
+def test_schema_decorator_partial_match_called_with_args():
+ @validate(arg1=int)
+ def fn(arg1, arg2):
+ return arg1
+
+ fn(1, "foo")
+
+
+def test_schema_decorator_partial_unmatch_called_with_args():
+ @validate(arg1=int)
+ def fn(arg1, arg2):
+ return arg1
+
+ assert_raises(Invalid, fn, "bar", "foo")
+
+
+def test_schema_decorator_partial_match_called_with_kwargs():
+ @validate(arg2=int)
+ def fn(arg1, arg2):
+ return arg1
+
+ fn(arg1="foo", arg2=1)
+
+
+def test_schema_decorator_partial_unmatch_called_with_kwargs():
+ @validate(arg2=int)
+ def fn(arg1, arg2):
+ return arg1
+
+ assert_raises(Invalid, fn, arg1=1, arg2="foo")
+
+
+def test_unicode_as_key():
+ if sys.version_info >= (3,):
+ text_type = str
+ else:
+ text_type = unicode
+ schema = Schema({text_type: int})
+ schema({u("foobar"): 1})
+
+
+def test_number_validation_with_string():
+ """ test with Number with string"""
+ schema = Schema({"number": Number(precision=6, scale=2)})
+ try:
+ schema({"number": 'teststr'})
+ except MultipleInvalid as e:
+ assert_equal(str(e),
+ "Value must be a number enclosed with string for dictionary value @ data['number']")
+ else:
+ assert False, "Did not raise Invalid for String"
+
+
+def test_number_validation_with_invalid_precision_invalid_scale():
+ """ test with Number with invalid precision and scale"""
+ schema = Schema({"number": Number(precision=6, scale=2)})
+ try:
+ schema({"number": '123456.712'})
+ except MultipleInvalid as e:
+ assert_equal(str(e),
+ "Precision must be equal to 6, and Scale must be equal to 2 for dictionary value @ data['number']")
+ else:
+ assert False, "Did not raise Invalid for String"
+
+
+def test_number_validation_with_valid_precision_scale_yield_decimal_true():
+ """ test with Number with valid precision and scale"""
+ schema = Schema({"number": Number(precision=6, scale=2, yield_decimal=True)})
+ out_ = schema({"number": '1234.00'})
+ assert_equal(float(out_.get("number")), 1234.00)
+
+
+def test_number_when_precision_scale_none_yield_decimal_true():
+ """ test with Number with no precision and scale"""
+ schema = Schema({"number": Number(yield_decimal=True)})
+ out_ = schema({"number": '12345678901234'})
+ assert_equal(out_.get("number"), 12345678901234)
+
+
+def test_number_when_precision_none_n_valid_scale_case1_yield_decimal_true():
+ """ test with Number with no precision and valid scale case 1"""
+ schema = Schema({"number": Number(scale=2, yield_decimal=True)})
+ out_ = schema({"number": '123456789.34'})
+ assert_equal(float(out_.get("number")), 123456789.34)
+
+
+def test_number_when_precision_none_n_valid_scale_case2_yield_decimal_true():
+ """ test with Number with no precision and valid scale case 2 with zero in decimal part"""
+ schema = Schema({"number": Number(scale=2, yield_decimal=True)})
+ out_ = schema({"number": '123456789012.00'})
+ assert_equal(float(out_.get("number")), 123456789012.00)
+
+
+def test_number_when_precision_none_n_invalid_scale_yield_decimal_true():
+ """ test with Number with no precision and invalid scale"""
+ schema = Schema({"number": Number(scale=2, yield_decimal=True)})
+ try:
+ schema({"number": '12345678901.234'})
+ except MultipleInvalid as e:
+ assert_equal(str(e),
+ "Scale must be equal to 2 for dictionary value @ data['number']")
+ else:
+ assert False, "Did not raise Invalid for String"
+
+
+def test_number_when_valid_precision_n_scale_none_yield_decimal_true():
+ """ test with Number with no precision and valid scale"""
+ schema = Schema({"number": Number(precision=14, yield_decimal=True)})
+ out_ = schema({"number": '1234567.8901234'})
+ assert_equal(float(out_.get("number")), 1234567.8901234)
+
+
+def test_number_when_invalid_precision_n_scale_none_yield_decimal_true():
+ """ test with Number with no precision and invalid scale"""
+ schema = Schema({"number": Number(precision=14, yield_decimal=True)})
+ try:
+ schema({"number": '12345674.8901234'})
+ except MultipleInvalid as e:
+ assert_equal(str(e),
+ "Precision must be equal to 14 for dictionary value @ data['number']")
+ else:
+ assert False, "Did not raise Invalid for String"
+
+
+def test_number_validation_with_valid_precision_scale_yield_decimal_false():
+ """ test with Number with valid precision, scale and no yield_decimal"""
+ schema = Schema({"number": Number(precision=6, scale=2, yield_decimal=False)})
+ out_ = schema({"number": '1234.00'})
+ assert_equal(out_.get("number"), '1234.00')
+
+
+def test_named_tuples_validate_as_tuples():
+ NT = collections.namedtuple('NT', ['a', 'b'])
+ nt = NT(1, 2)
+ t = (1, 2)
+
+ Schema((int, int))(nt)
+ Schema((int, int))(t)
+ Schema(NT(int, int))(nt)
+ Schema(NT(int, int))(t)
+
+
+def test_datetime():
+ schema = Schema({"datetime": Datetime()})
+ schema({"datetime": "2016-10-24T14:01:57.102152Z"})
+ assert_raises(MultipleInvalid, schema, {"datetime": "2016-10-24T14:01:57"})
+
+
+def test_date():
+ schema = Schema({"date": Date()})
+ schema({"date": "2016-10-24"})
+ assert_raises(MultipleInvalid, schema, {"date": "2016-10-24Z"})
+
+
+def test_date_custom_format():
+ schema = Schema({"date": Date("%Y%m%d")})
+ schema({"date": "20161024"})
+ assert_raises(MultipleInvalid, schema, {"date": "2016-10-24"})
+
+
+def test_ordered_dict():
+ if not hasattr(collections, 'OrderedDict'):
+ # collections.OrderedDict was added in Python2.7; only run if present
+ return
+ schema = Schema({Number(): Number()}) # x, y pairs (for interpolation or something)
+ data = collections.OrderedDict([(5.0, 3.7), (24.0, 8.7), (43.0, 1.5),
+ (62.0, 2.1), (71.5, 6.7), (90.5, 4.1),
+ (109.0, 3.9)])
+ out = schema(data)
+ assert isinstance(out, collections.OrderedDict), 'Collection is no longer ordered'
+ assert data.keys() == out.keys(), 'Order is not consistent'
+
+
+def test_marker_hashable():
+ """Verify that you can get schema keys, even if markers were used"""
+ definition = {
+ Required('x'): int, Optional('y'): float,
+ Remove('j'): int, Remove(int): str, int: int
+ }
+ assert_equal(definition.get('x'), int)
+ assert_equal(definition.get('y'), float)
+ assert_true(Required('x') == Required('x'))
+ assert_true(Required('x') != Required('y'))
+ # Remove markers are not hashable
+ assert_equal(definition.get('j'), None)
+
+
+def test_schema_infer():
+ schema = Schema.infer({
+ 'str': 'foo',
+ 'bool': True,
+ 'int': 42,
+ 'float': 3.14
+ })
+ assert_equal(schema, Schema({
+ Required('str'): str,
+ Required('bool'): bool,
+ Required('int'): int,
+ Required('float'): float
+ }))
+
+
+def test_schema_infer_dict():
+ schema = Schema.infer({
+ 'a': {
+ 'b': {
+ 'c': 'foo'
+ }
+ }
+ })
+
+ assert_equal(schema, Schema({
+ Required('a'): {
+ Required('b'): {
+ Required('c'): str
+ }
+ }
+ }))
+
+
+def test_schema_infer_list():
+ schema = Schema.infer({
+ 'list': ['foo', True, 42, 3.14]
+ })
+
+ assert_equal(schema, Schema({
+ Required('list'): [str, bool, int, float]
+ }))
+
+
+def test_schema_infer_scalar():
+ assert_equal(Schema.infer('foo'), Schema(str))
+ assert_equal(Schema.infer(True), Schema(bool))
+ assert_equal(Schema.infer(42), Schema(int))
+ assert_equal(Schema.infer(3.14), Schema(float))
+ assert_equal(Schema.infer({}), Schema(dict))
+ assert_equal(Schema.infer([]), Schema(list))
+
+
+def test_schema_infer_accepts_kwargs():
+ schema = Schema.infer({
+ 'str': 'foo',
+ 'bool': True
+ }, required=False, extra=True)
+
+ # Subset of schema should be acceptable thanks to required=False.
+ schema({'bool': False})
+
+ # Keys that are in schema should still match required types.
+ try:
+ schema({'str': 42})
+ except Invalid:
+ pass
+ else:
+ assert False, 'Did not raise Invalid for Number'
+
+ # Extra fields should be acceptable thanks to extra=True.
+ schema({'str': 'bar', 'int': 42})
+
+
+def test_validation_performance():
+ """
+ This test comes to make sure the validation complexity of dictionaries is done in a linear time.
+ to achieve this a custom marker is used in the scheme that counts each time it is evaluated.
+ By doing so we can determine if the validation is done in linear complexity.
+ Prior to issue https://github.com/alecthomas/voluptuous/issues/259 this was exponential
+ """
+ num_of_keys = 1000
+
+ schema_dict = {}
+ data = {}
+ data_extra_keys = {}
+
+ counter = [0]
+
+ class CounterMarker(Marker):
+ def __call__(self, *args, **kwargs):
+ counter[0] += 1
+ return super(CounterMarker, self).__call__(*args, **kwargs)
+
+ for i in range(num_of_keys):
+ schema_dict[CounterMarker(str(i))] = str
+ data[str(i)] = str(i)
+ data_extra_keys[str(i * 2)] = str(i) # half of the keys are present, and half aren't
+
+ schema = Schema(schema_dict, extra=ALLOW_EXTRA)
+
+ schema(data)
+
+ assert counter[0] <= num_of_keys, "Validation complexity is not linear! %s > %s" % (counter[0], num_of_keys)
+
+ counter[0] = 0 # reset counter
+ schema(data_extra_keys)
+
+ assert counter[0] <= num_of_keys, "Validation complexity is not linear! %s > %s" % (counter[0], num_of_keys)
+
+
+def test_IsDir():
+ schema = Schema(IsDir())
+ assert_raises(MultipleInvalid, schema, 3)
+ schema(os.path.dirname(os.path.abspath(__file__)))
+
+
+def test_IsFile():
+ schema = Schema(IsFile())
+ assert_raises(MultipleInvalid, schema, 3)
+ schema(os.path.abspath(__file__))
+
+
+def test_PathExists():
+ schema = Schema(PathExists())
+ assert_raises(MultipleInvalid, schema, 3)
+ schema(os.path.abspath(__file__))
+
+
+def test_description():
+ marker = Marker(Schema(str), description='Hello')
+ assert marker.description == 'Hello'
+
+ optional = Optional('key', description='Hello')
+ assert optional.description == 'Hello'
+
+ exclusive = Exclusive('alpha', 'angles', description='Hello')
+ assert exclusive.description == 'Hello'
+
+ required = Required('key', description='Hello')
+ assert required.description == 'Hello'
+
+
+def test_SomeOf_min_validation():
+ validator = All(Length(min=8), SomeOf(
+ min_valid=3,
+ validators=[Match(r'.*[A-Z]', 'no uppercase letters'),
+ Match(r'.*[a-z]', 'no lowercase letters'),
+ Match(r'.*[0-9]', 'no numbers'),
+ Match(r'.*[$@$!%*#?&^:;/<,>|{}()\-\'._+=]', 'no symbols')]))
+
+ validator('ffe532A1!')
+ with raises(MultipleInvalid, 'length of value must be at least 8'):
+ validator('a')
+
+ with raises(MultipleInvalid, 'no uppercase letters, no lowercase letters'):
+ validator('wqs2!#s111')
+
+ with raises(MultipleInvalid, 'no lowercase letters, no symbols'):
+ validator('3A34SDEF5')
+
+
+def test_SomeOf_max_validation():
+ validator = SomeOf(
+ max_valid=2,
+ validators=[Match(r'.*[A-Z]', 'no uppercase letters'),
+ Match(r'.*[a-z]', 'no lowercase letters'),
+ Match(r'.*[0-9]', 'no numbers')],
+ msg='max validation test failed')
+
+ validator('Aa')
+ with raises(TooManyValid, 'max validation test failed'):
+ validator('Aa1')
+
+
+def test_self_validation():
+ schema = Schema({"number": int,
+ "follow": Self})
+ try:
+ schema({"number": "abc"})
+ except MultipleInvalid:
+ pass
+ else:
+ assert False, "Did not raise Invalid"
+ try:
+ schema({"follow": {"number": '123456.712'}})
+ except MultipleInvalid:
+ pass
+ else:
+ assert False, "Did not raise Invalid"
+ schema({"follow": {"number": 123456}})
+ schema({"follow": {"follow": {"number": 123456}}})
+
+
+def test_any_error_has_path():
+ """https://github.com/alecthomas/voluptuous/issues/347"""
+ s = Schema({
+ Optional('q'): int,
+ Required('q2'): Any(int, msg='toto')
+ })
+ try:
+ s({'q': 'str', 'q2': 'tata'})
+ except MultipleInvalid as exc:
+ assert (
+ (exc.errors[0].path == ['q'] and exc.errors[1].path == ['q2']) or
+ (exc.errors[1].path == ['q'] and exc.errors[0].path == ['q2'])
+ )
+ else:
+ assert False, "Did not raise AnyInvalid"
+
+
+def test_all_error_has_path():
+ """https://github.com/alecthomas/voluptuous/issues/347"""
+ s = Schema({
+ Optional('q'): int,
+ Required('q2'): All([str, Length(min=10)], msg='toto'),
+ })
+ try:
+ s({'q': 'str', 'q2': 12})
+ except MultipleInvalid as exc:
+ assert (
+ (exc.errors[0].path == ['q'] and exc.errors[1].path == ['q2']) or
+ (exc.errors[1].path == ['q'] and exc.errors[0].path == ['q2'])
+ )
+ else:
+ assert False, "Did not raise AllInvalid"
+
+
+def test_match_error_has_path():
+ """https://github.com/alecthomas/voluptuous/issues/347"""
+ s = Schema({
+ Required('q2'): Match("a"),
+ })
+ try:
+ s({'q2': 12})
+ except MultipleInvalid as exc:
+ assert exc.errors[0].path == ['q2']
+ else:
+ assert False, "Did not raise MatchInvalid"
+
+
+def test_self_any():
+ schema = Schema({"number": int,
+ "follow": Any(Self, "stop")})
+ try:
+ schema({"number": "abc"})
+ except MultipleInvalid:
+ pass
+ else:
+ assert False, "Did not raise Invalid"
+ try:
+ schema({"follow": {"number": '123456.712'}})
+ except MultipleInvalid:
+ pass
+ else:
+ assert False, "Did not raise Invalid"
+ schema({"follow": {"number": 123456}})
+ schema({"follow": {"follow": {"number": 123456}}})
+ schema({"follow": {"follow": {"number": 123456, "follow": "stop"}}})
+
+
+def test_self_all():
+ schema = Schema({"number": int,
+ "follow": All(Self,
+ Schema({"extra_number": int},
+ extra=ALLOW_EXTRA))},
+ extra=ALLOW_EXTRA)
+ try:
+ schema({"number": "abc"})
+ except MultipleInvalid:
+ pass
+ else:
+ assert False, "Did not raise Invalid"
+ try:
+ schema({"follow": {"number": '123456.712'}})
+ except MultipleInvalid:
+ pass
+ else:
+ assert False, "Did not raise Invalid"
+ schema({"follow": {"number": 123456}})
+ schema({"follow": {"follow": {"number": 123456}}})
+ schema({"follow": {"number": 123456, "extra_number": 123}})
+ try:
+ schema({"follow": {"number": 123456, "extra_number": "123"}})
+ except MultipleInvalid:
+ pass
+ else:
+ assert False, "Did not raise Invalid"
+
+
+def test_SomeOf_on_bounds_assertion():
+ with raises(AssertionError, 'when using "SomeOf" you should specify at least one of min_valid and max_valid'):
+ SomeOf(validators=[])
+
+
+def test_comparing_voluptuous_object_to_str():
+ assert_true(Optional('Classification') < 'Name')
+
+
+def test_set_of_integers():
+ schema = Schema({int})
+ with raises(Invalid, 'expected a set'):
+ schema(42)
+ with raises(Invalid, 'expected a set'):
+ schema(frozenset([42]))
+
+ schema(set())
+ schema(set([42]))
+ schema(set([42, 43, 44]))
+ try:
+ schema(set(['abc']))
+ except MultipleInvalid as e:
+ assert_equal(str(e), "invalid value in set")
+ else:
+ assert False, "Did not raise Invalid"
+
+
+def test_frozenset_of_integers():
+ schema = Schema(frozenset([int]))
+ with raises(Invalid, 'expected a frozenset'):
+ schema(42)
+ with raises(Invalid, 'expected a frozenset'):
+ schema(set([42]))
+
+ schema(frozenset())
+ schema(frozenset([42]))
+ schema(frozenset([42, 43, 44]))
+ try:
+ schema(frozenset(['abc']))
+ except MultipleInvalid as e:
+ assert_equal(str(e), "invalid value in frozenset")
+ else:
+ assert False, "Did not raise Invalid"
+
+
+def test_set_of_integers_and_strings():
+ schema = Schema({int, str})
+ with raises(Invalid, 'expected a set'):
+ schema(42)
+
+ schema(set())
+ schema(set([42]))
+ schema(set(['abc']))
+ schema(set([42, 'abc']))
+ try:
+ schema(set([None]))
+ except MultipleInvalid as e:
+ assert_equal(str(e), "invalid value in set")
+ else:
+ assert False, "Did not raise Invalid"
+
+
+def test_frozenset_of_integers_and_strings():
+ schema = Schema(frozenset([int, str]))
+ with raises(Invalid, 'expected a frozenset'):
+ schema(42)
+
+ schema(frozenset())
+ schema(frozenset([42]))
+ schema(frozenset(['abc']))
+ schema(frozenset([42, 'abc']))
+ try:
+ schema(frozenset([None]))
+ except MultipleInvalid as e:
+ assert_equal(str(e), "invalid value in frozenset")
+ else:
+ assert False, "Did not raise Invalid"
diff --git a/third_party/python/voluptuous/voluptuous/util.py b/third_party/python/voluptuous/voluptuous/util.py
new file mode 100644
index 0000000000..434c360c7e
--- /dev/null
+++ b/third_party/python/voluptuous/voluptuous/util.py
@@ -0,0 +1,154 @@
+import sys
+
+from voluptuous.error import LiteralInvalid, TypeInvalid, Invalid
+from voluptuous.schema_builder import Schema, default_factory, raises
+from voluptuous import validators
+
+__author__ = 'tusharmakkar08'
+
+
+def Lower(v):
+ """Transform a string to lower case.
+
+ >>> s = Schema(Lower)
+ >>> s('HI')
+ 'hi'
+ """
+ return str(v).lower()
+
+
+def Upper(v):
+ """Transform a string to upper case.
+
+ >>> s = Schema(Upper)
+ >>> s('hi')
+ 'HI'
+ """
+ return str(v).upper()
+
+
+def Capitalize(v):
+ """Capitalise a string.
+
+ >>> s = Schema(Capitalize)
+ >>> s('hello world')
+ 'Hello world'
+ """
+ return str(v).capitalize()
+
+
+def Title(v):
+ """Title case a string.
+
+ >>> s = Schema(Title)
+ >>> s('hello world')
+ 'Hello World'
+ """
+ return str(v).title()
+
+
+def Strip(v):
+ """Strip whitespace from a string.
+
+ >>> s = Schema(Strip)
+ >>> s(' hello world ')
+ 'hello world'
+ """
+ return str(v).strip()
+
+
+class DefaultTo(object):
+ """Sets a value to default_value if none provided.
+
+ >>> s = Schema(DefaultTo(42))
+ >>> s(None)
+ 42
+ >>> s = Schema(DefaultTo(list))
+ >>> s(None)
+ []
+ """
+
+ def __init__(self, default_value, msg=None):
+ self.default_value = default_factory(default_value)
+ self.msg = msg
+
+ def __call__(self, v):
+ if v is None:
+ v = self.default_value()
+ return v
+
+ def __repr__(self):
+ return 'DefaultTo(%s)' % (self.default_value(),)
+
+
+class SetTo(object):
+ """Set a value, ignoring any previous value.
+
+ >>> s = Schema(validators.Any(int, SetTo(42)))
+ >>> s(2)
+ 2
+ >>> s("foo")
+ 42
+ """
+
+ def __init__(self, value):
+ self.value = default_factory(value)
+
+ def __call__(self, v):
+ return self.value()
+
+ def __repr__(self):
+ return 'SetTo(%s)' % (self.value(),)
+
+
+class Set(object):
+ """Convert a list into a set.
+
+ >>> s = Schema(Set())
+ >>> s([]) == set([])
+ True
+ >>> s([1, 2]) == set([1, 2])
+ True
+ >>> with raises(Invalid, regex="^cannot be presented as set: "):
+ ... s([set([1, 2]), set([3, 4])])
+ """
+
+ def __init__(self, msg=None):
+ self.msg = msg
+
+ def __call__(self, v):
+ try:
+ set_v = set(v)
+ except Exception as e:
+ raise TypeInvalid(
+ self.msg or 'cannot be presented as set: {0}'.format(e))
+ return set_v
+
+ def __repr__(self):
+ return 'Set()'
+
+
+class Literal(object):
+ def __init__(self, lit):
+ self.lit = lit
+
+ def __call__(self, value, msg=None):
+ if self.lit != value:
+ raise LiteralInvalid(
+ msg or '%s not match for %s' % (value, self.lit)
+ )
+ else:
+ return self.lit
+
+ def __str__(self):
+ return str(self.lit)
+
+ def __repr__(self):
+ return repr(self.lit)
+
+
+def u(x):
+ if sys.version_info < (3,):
+ return unicode(x)
+ else:
+ return x
diff --git a/third_party/python/voluptuous/voluptuous/validators.py b/third_party/python/voluptuous/voluptuous/validators.py
new file mode 100644
index 0000000000..d5e3ed5980
--- /dev/null
+++ b/third_party/python/voluptuous/voluptuous/validators.py
@@ -0,0 +1,1004 @@
+import os
+import re
+import datetime
+import sys
+from functools import wraps
+from decimal import Decimal, InvalidOperation
+
+from voluptuous.schema_builder import Schema, raises, message
+from voluptuous.error import (MultipleInvalid, CoerceInvalid, TrueInvalid, FalseInvalid, BooleanInvalid, Invalid,
+ AnyInvalid, AllInvalid, MatchInvalid, UrlInvalid, EmailInvalid, FileInvalid, DirInvalid,
+ RangeInvalid, PathInvalid, ExactSequenceInvalid, LengthInvalid, DatetimeInvalid,
+ DateInvalid, InInvalid, TypeInvalid, NotInInvalid, ContainsInvalid, NotEnoughValid,
+ TooManyValid)
+
+if sys.version_info >= (3,):
+ import urllib.parse as urlparse
+
+ basestring = str
+else:
+ import urlparse
+
+# Taken from https://github.com/kvesteri/validators/blob/master/validators/email.py
+USER_REGEX = re.compile(
+ # dot-atom
+ r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+"
+ r"(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*$"
+ # quoted-string
+ r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|'
+ r"""\\[\001-\011\013\014\016-\177])*"$)""",
+ re.IGNORECASE
+)
+DOMAIN_REGEX = re.compile(
+ # domain
+ r'(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+'
+ r'(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?$)'
+ # literal form, ipv4 address (SMTP 4.1.3)
+ r'|^\[(25[0-5]|2[0-4]\d|[0-1]?\d?\d)'
+ r'(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}\]$',
+ re.IGNORECASE)
+
+__author__ = 'tusharmakkar08'
+
+
+def truth(f):
+ """Convenience decorator to convert truth functions into validators.
+
+ >>> @truth
+ ... def isdir(v):
+ ... return os.path.isdir(v)
+ >>> validate = Schema(isdir)
+ >>> validate('/')
+ '/'
+ >>> with raises(MultipleInvalid, 'not a valid value'):
+ ... validate('/notavaliddir')
+ """
+
+ @wraps(f)
+ def check(v):
+ t = f(v)
+ if not t:
+ raise ValueError
+ return v
+
+ return check
+
+
+class Coerce(object):
+ """Coerce a value to a type.
+
+ If the type constructor throws a ValueError or TypeError, the value
+ will be marked as Invalid.
+
+ Default behavior:
+
+ >>> validate = Schema(Coerce(int))
+ >>> with raises(MultipleInvalid, 'expected int'):
+ ... validate(None)
+ >>> with raises(MultipleInvalid, 'expected int'):
+ ... validate('foo')
+
+ With custom message:
+
+ >>> validate = Schema(Coerce(int, "moo"))
+ >>> with raises(MultipleInvalid, 'moo'):
+ ... validate('foo')
+ """
+
+ def __init__(self, type, msg=None):
+ self.type = type
+ self.msg = msg
+ self.type_name = type.__name__
+
+ def __call__(self, v):
+ try:
+ return self.type(v)
+ except (ValueError, TypeError, InvalidOperation):
+ msg = self.msg or ('expected %s' % self.type_name)
+ raise CoerceInvalid(msg)
+
+ def __repr__(self):
+ return 'Coerce(%s, msg=%r)' % (self.type_name, self.msg)
+
+
+@message('value was not true', cls=TrueInvalid)
+@truth
+def IsTrue(v):
+ """Assert that a value is true, in the Python sense.
+
+ >>> validate = Schema(IsTrue())
+
+ "In the Python sense" means that implicitly false values, such as empty
+ lists, dictionaries, etc. are treated as "false":
+
+ >>> with raises(MultipleInvalid, "value was not true"):
+ ... validate([])
+ >>> validate([1])
+ [1]
+ >>> with raises(MultipleInvalid, "value was not true"):
+ ... validate(False)
+
+ ...and so on.
+
+ >>> try:
+ ... validate([])
+ ... except MultipleInvalid as e:
+ ... assert isinstance(e.errors[0], TrueInvalid)
+ """
+ return v
+
+
+@message('value was not false', cls=FalseInvalid)
+def IsFalse(v):
+ """Assert that a value is false, in the Python sense.
+
+ (see :func:`IsTrue` for more detail)
+
+ >>> validate = Schema(IsFalse())
+ >>> validate([])
+ []
+ >>> with raises(MultipleInvalid, "value was not false"):
+ ... validate(True)
+
+ >>> try:
+ ... validate(True)
+ ... except MultipleInvalid as e:
+ ... assert isinstance(e.errors[0], FalseInvalid)
+ """
+ if v:
+ raise ValueError
+ return v
+
+
+@message('expected boolean', cls=BooleanInvalid)
+def Boolean(v):
+ """Convert human-readable boolean values to a bool.
+
+ Accepted values are 1, true, yes, on, enable, and their negatives.
+ Non-string values are cast to bool.
+
+ >>> validate = Schema(Boolean())
+ >>> validate(True)
+ True
+ >>> validate("1")
+ True
+ >>> validate("0")
+ False
+ >>> with raises(MultipleInvalid, "expected boolean"):
+ ... validate('moo')
+ >>> try:
+ ... validate('moo')
+ ... except MultipleInvalid as e:
+ ... assert isinstance(e.errors[0], BooleanInvalid)
+ """
+ if isinstance(v, basestring):
+ v = v.lower()
+ if v in ('1', 'true', 'yes', 'on', 'enable'):
+ return True
+ if v in ('0', 'false', 'no', 'off', 'disable'):
+ return False
+ raise ValueError
+ return bool(v)
+
+
+class _WithSubValidators(object):
+ """Base class for validators that use sub-validators.
+
+ Special class to use as a parent class for validators using sub-validators.
+ This class provides the `__voluptuous_compile__` method so the
+ sub-validators are compiled by the parent `Schema`.
+ """
+
+ def __init__(self, *validators, **kwargs):
+ self.validators = validators
+ self.msg = kwargs.pop('msg', None)
+
+ def __voluptuous_compile__(self, schema):
+ self._compiled = [
+ schema._compile(v)
+ for v in self.validators
+ ]
+ return self._run
+
+ def _run(self, path, value):
+ return self._exec(self._compiled, value, path)
+
+ def __call__(self, v):
+ return self._exec((Schema(val) for val in self.validators), v)
+
+ def __repr__(self):
+ return '%s(%s, msg=%r)' % (
+ self.__class__.__name__,
+ ", ".join(repr(v) for v in self.validators),
+ self.msg
+ )
+
+
+class Any(_WithSubValidators):
+ """Use the first validated value.
+
+ :param msg: Message to deliver to user if validation fails.
+ :param kwargs: All other keyword arguments are passed to the sub-Schema constructors.
+ :returns: Return value of the first validator that passes.
+
+ >>> validate = Schema(Any('true', 'false',
+ ... All(Any(int, bool), Coerce(bool))))
+ >>> validate('true')
+ 'true'
+ >>> validate(1)
+ True
+ >>> with raises(MultipleInvalid, "not a valid value"):
+ ... validate('moo')
+
+ msg argument is used
+
+ >>> validate = Schema(Any(1, 2, 3, msg="Expected 1 2 or 3"))
+ >>> validate(1)
+ 1
+ >>> with raises(MultipleInvalid, "Expected 1 2 or 3"):
+ ... validate(4)
+ """
+
+ def _exec(self, funcs, v, path=None):
+ error = None
+ for func in funcs:
+ try:
+ if path is None:
+ return func(v)
+ else:
+ return func(path, v)
+ except Invalid as e:
+ if error is None or len(e.path) > len(error.path):
+ error = e
+ else:
+ if error:
+ raise error if self.msg is None else AnyInvalid(
+ self.msg, path=path)
+ raise AnyInvalid(self.msg or 'no valid value found',
+ path=path)
+
+
+# Convenience alias
+Or = Any
+
+
+class All(_WithSubValidators):
+ """Value must pass all validators.
+
+ The output of each validator is passed as input to the next.
+
+ :param msg: Message to deliver to user if validation fails.
+ :param kwargs: All other keyword arguments are passed to the sub-Schema constructors.
+
+ >>> validate = Schema(All('10', Coerce(int)))
+ >>> validate('10')
+ 10
+ """
+
+ def _exec(self, funcs, v, path=None):
+ try:
+ for func in funcs:
+ if path is None:
+ v = func(v)
+ else:
+ v = func(path, v)
+ except Invalid as e:
+ raise e if self.msg is None else AllInvalid(self.msg, path=path)
+ return v
+
+
+# Convenience alias
+And = All
+
+
+class Match(object):
+ """Value must be a string that matches the regular expression.
+
+ >>> validate = Schema(Match(r'^0x[A-F0-9]+$'))
+ >>> validate('0x123EF4')
+ '0x123EF4'
+ >>> with raises(MultipleInvalid, "does not match regular expression"):
+ ... validate('123EF4')
+
+ >>> with raises(MultipleInvalid, 'expected string or buffer'):
+ ... validate(123)
+
+ Pattern may also be a _compiled regular expression:
+
+ >>> validate = Schema(Match(re.compile(r'0x[A-F0-9]+', re.I)))
+ >>> validate('0x123ef4')
+ '0x123ef4'
+ """
+
+ def __init__(self, pattern, msg=None):
+ if isinstance(pattern, basestring):
+ pattern = re.compile(pattern)
+ self.pattern = pattern
+ self.msg = msg
+
+ def __call__(self, v):
+ try:
+ match = self.pattern.match(v)
+ except TypeError:
+ raise MatchInvalid("expected string or buffer")
+ if not match:
+ raise MatchInvalid(self.msg or 'does not match regular expression')
+ return v
+
+ def __repr__(self):
+ return 'Match(%r, msg=%r)' % (self.pattern.pattern, self.msg)
+
+
+class Replace(object):
+ """Regex substitution.
+
+ >>> validate = Schema(All(Replace('you', 'I'),
+ ... Replace('hello', 'goodbye')))
+ >>> validate('you say hello')
+ 'I say goodbye'
+ """
+
+ def __init__(self, pattern, substitution, msg=None):
+ if isinstance(pattern, basestring):
+ pattern = re.compile(pattern)
+ self.pattern = pattern
+ self.substitution = substitution
+ self.msg = msg
+
+ def __call__(self, v):
+ return self.pattern.sub(self.substitution, v)
+
+ def __repr__(self):
+ return 'Replace(%r, %r, msg=%r)' % (self.pattern.pattern,
+ self.substitution,
+ self.msg)
+
+
+def _url_validation(v):
+ parsed = urlparse.urlparse(v)
+ if not parsed.scheme or not parsed.netloc:
+ raise UrlInvalid("must have a URL scheme and host")
+ return parsed
+
+
+@message('expected an Email', cls=EmailInvalid)
+def Email(v):
+ """Verify that the value is an Email or not.
+
+ >>> s = Schema(Email())
+ >>> with raises(MultipleInvalid, 'expected an Email'):
+ ... s("a.com")
+ >>> with raises(MultipleInvalid, 'expected an Email'):
+ ... s("a@.com")
+ >>> with raises(MultipleInvalid, 'expected an Email'):
+ ... s("a@.com")
+ >>> s('t@x.com')
+ 't@x.com'
+ """
+ try:
+ if not v or "@" not in v:
+ raise EmailInvalid("Invalid Email")
+ user_part, domain_part = v.rsplit('@', 1)
+
+ if not (USER_REGEX.match(user_part) and DOMAIN_REGEX.match(domain_part)):
+ raise EmailInvalid("Invalid Email")
+ return v
+ except:
+ raise ValueError
+
+
+@message('expected a Fully qualified domain name URL', cls=UrlInvalid)
+def FqdnUrl(v):
+ """Verify that the value is a Fully qualified domain name URL.
+
+ >>> s = Schema(FqdnUrl())
+ >>> with raises(MultipleInvalid, 'expected a Fully qualified domain name URL'):
+ ... s("http://localhost/")
+ >>> s('http://w3.org')
+ 'http://w3.org'
+ """
+ try:
+ parsed_url = _url_validation(v)
+ if "." not in parsed_url.netloc:
+ raise UrlInvalid("must have a domain name in URL")
+ return v
+ except:
+ raise ValueError
+
+
+@message('expected a URL', cls=UrlInvalid)
+def Url(v):
+ """Verify that the value is a URL.
+
+ >>> s = Schema(Url())
+ >>> with raises(MultipleInvalid, 'expected a URL'):
+ ... s(1)
+ >>> s('http://w3.org')
+ 'http://w3.org'
+ """
+ try:
+ _url_validation(v)
+ return v
+ except:
+ raise ValueError
+
+
+@message('not a file', cls=FileInvalid)
+@truth
+def IsFile(v):
+ """Verify the file exists.
+
+ >>> os.path.basename(IsFile()(__file__)).startswith('validators.py')
+ True
+ >>> with raises(FileInvalid, 'not a file'):
+ ... IsFile()("random_filename_goes_here.py")
+ >>> with raises(FileInvalid, 'Not a file'):
+ ... IsFile()(None)
+ """
+ try:
+ if v:
+ v = str(v)
+ return os.path.isfile(v)
+ else:
+ raise FileInvalid('Not a file')
+ except TypeError:
+ raise FileInvalid('Not a file')
+
+
+@message('not a directory', cls=DirInvalid)
+@truth
+def IsDir(v):
+ """Verify the directory exists.
+
+ >>> IsDir()('/')
+ '/'
+ >>> with raises(DirInvalid, 'Not a directory'):
+ ... IsDir()(None)
+ """
+ try:
+ if v:
+ v = str(v)
+ return os.path.isdir(v)
+ else:
+ raise DirInvalid("Not a directory")
+ except TypeError:
+ raise DirInvalid("Not a directory")
+
+
+@message('path does not exist', cls=PathInvalid)
+@truth
+def PathExists(v):
+ """Verify the path exists, regardless of its type.
+
+ >>> os.path.basename(PathExists()(__file__)).startswith('validators.py')
+ True
+ >>> with raises(Invalid, 'path does not exist'):
+ ... PathExists()("random_filename_goes_here.py")
+ >>> with raises(PathInvalid, 'Not a Path'):
+ ... PathExists()(None)
+ """
+ try:
+ if v:
+ v = str(v)
+ return os.path.exists(v)
+ else:
+ raise PathInvalid("Not a Path")
+ except TypeError:
+ raise PathInvalid("Not a Path")
+
+
+def Maybe(validator):
+ """Validate that the object matches given validator or is None.
+
+ :raises Invalid: if the value does not match the given validator and is not
+ None
+
+ >>> s = Schema(Maybe(int))
+ >>> s(10)
+ 10
+ >>> with raises(Invalid):
+ ... s("string")
+
+ """
+ return Any(None, validator)
+
+
+class Range(object):
+ """Limit a value to a range.
+
+ Either min or max may be omitted.
+ Either min or max can be excluded from the range of accepted values.
+
+ :raises Invalid: If the value is outside the range.
+
+ >>> s = Schema(Range(min=1, max=10, min_included=False))
+ >>> s(5)
+ 5
+ >>> s(10)
+ 10
+ >>> with raises(MultipleInvalid, 'value must be at most 10'):
+ ... s(20)
+ >>> with raises(MultipleInvalid, 'value must be higher than 1'):
+ ... s(1)
+ >>> with raises(MultipleInvalid, 'value must be lower than 10'):
+ ... Schema(Range(max=10, max_included=False))(20)
+ """
+
+ def __init__(self, min=None, max=None, min_included=True,
+ max_included=True, msg=None):
+ self.min = min
+ self.max = max
+ self.min_included = min_included
+ self.max_included = max_included
+ self.msg = msg
+
+ def __call__(self, v):
+ if self.min_included:
+ if self.min is not None and not v >= self.min:
+ raise RangeInvalid(
+ self.msg or 'value must be at least %s' % self.min)
+ else:
+ if self.min is not None and not v > self.min:
+ raise RangeInvalid(
+ self.msg or 'value must be higher than %s' % self.min)
+ if self.max_included:
+ if self.max is not None and not v <= self.max:
+ raise RangeInvalid(
+ self.msg or 'value must be at most %s' % self.max)
+ else:
+ if self.max is not None and not v < self.max:
+ raise RangeInvalid(
+ self.msg or 'value must be lower than %s' % self.max)
+ return v
+
+ def __repr__(self):
+ return ('Range(min=%r, max=%r, min_included=%r,'
+ ' max_included=%r, msg=%r)' % (self.min, self.max,
+ self.min_included,
+ self.max_included,
+ self.msg))
+
+
+class Clamp(object):
+ """Clamp a value to a range.
+
+ Either min or max may be omitted.
+
+ >>> s = Schema(Clamp(min=0, max=1))
+ >>> s(0.5)
+ 0.5
+ >>> s(5)
+ 1
+ >>> s(-1)
+ 0
+ """
+
+ def __init__(self, min=None, max=None, msg=None):
+ self.min = min
+ self.max = max
+ self.msg = msg
+
+ def __call__(self, v):
+ if self.min is not None and v < self.min:
+ v = self.min
+ if self.max is not None and v > self.max:
+ v = self.max
+ return v
+
+ def __repr__(self):
+ return 'Clamp(min=%s, max=%s)' % (self.min, self.max)
+
+
+class Length(object):
+ """The length of a value must be in a certain range."""
+
+ def __init__(self, min=None, max=None, msg=None):
+ self.min = min
+ self.max = max
+ self.msg = msg
+
+ def __call__(self, v):
+ if self.min is not None and len(v) < self.min:
+ raise LengthInvalid(
+ self.msg or 'length of value must be at least %s' % self.min)
+ if self.max is not None and len(v) > self.max:
+ raise LengthInvalid(
+ self.msg or 'length of value must be at most %s' % self.max)
+ return v
+
+ def __repr__(self):
+ return 'Length(min=%s, max=%s)' % (self.min, self.max)
+
+
+class Datetime(object):
+ """Validate that the value matches the datetime format."""
+
+ DEFAULT_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
+
+ def __init__(self, format=None, msg=None):
+ self.format = format or self.DEFAULT_FORMAT
+ self.msg = msg
+
+ def __call__(self, v):
+ try:
+ datetime.datetime.strptime(v, self.format)
+ except (TypeError, ValueError):
+ raise DatetimeInvalid(
+ self.msg or 'value does not match'
+ ' expected format %s' % self.format)
+ return v
+
+ def __repr__(self):
+ return 'Datetime(format=%s)' % self.format
+
+
+class Date(Datetime):
+ """Validate that the value matches the date format."""
+
+ DEFAULT_FORMAT = '%Y-%m-%d'
+
+ def __call__(self, v):
+ try:
+ datetime.datetime.strptime(v, self.format)
+ except (TypeError, ValueError):
+ raise DateInvalid(
+ self.msg or 'value does not match'
+ ' expected format %s' % self.format)
+ return v
+
+ def __repr__(self):
+ return 'Date(format=%s)' % self.format
+
+
+class In(object):
+ """Validate that a value is in a collection."""
+
+ def __init__(self, container, msg=None):
+ self.container = container
+ self.msg = msg
+
+ def __call__(self, v):
+ try:
+ check = v not in self.container
+ except TypeError:
+ check = True
+ if check:
+ raise InInvalid(self.msg or 'value is not allowed')
+ return v
+
+ def __repr__(self):
+ return 'In(%s)' % (self.container,)
+
+
+class NotIn(object):
+ """Validate that a value is not in a collection."""
+
+ def __init__(self, container, msg=None):
+ self.container = container
+ self.msg = msg
+
+ def __call__(self, v):
+ try:
+ check = v in self.container
+ except TypeError:
+ check = True
+ if check:
+ raise NotInInvalid(self.msg or 'value is not allowed')
+ return v
+
+ def __repr__(self):
+ return 'NotIn(%s)' % (self.container,)
+
+
+class Contains(object):
+ """Validate that the given schema element is in the sequence being validated.
+
+ >>> s = Contains(1)
+ >>> s([3, 2, 1])
+ [3, 2, 1]
+ >>> with raises(ContainsInvalid, 'value is not allowed'):
+ ... s([3, 2])
+ """
+
+ def __init__(self, item, msg=None):
+ self.item = item
+ self.msg = msg
+
+ def __call__(self, v):
+ try:
+ check = self.item not in v
+ except TypeError:
+ check = True
+ if check:
+ raise ContainsInvalid(self.msg or 'value is not allowed')
+ return v
+
+ def __repr__(self):
+ return 'Contains(%s)' % (self.item,)
+
+
+class ExactSequence(object):
+ """Matches each element in a sequence against the corresponding element in
+ the validators.
+
+ :param msg: Message to deliver to user if validation fails.
+ :param kwargs: All other keyword arguments are passed to the sub-Schema
+ constructors.
+
+ >>> from voluptuous import Schema, ExactSequence
+ >>> validate = Schema(ExactSequence([str, int, list, list]))
+ >>> validate(['hourly_report', 10, [], []])
+ ['hourly_report', 10, [], []]
+ >>> validate(('hourly_report', 10, [], []))
+ ('hourly_report', 10, [], [])
+ """
+
+ def __init__(self, validators, **kwargs):
+ self.validators = validators
+ self.msg = kwargs.pop('msg', None)
+ self._schemas = [Schema(val, **kwargs) for val in validators]
+
+ def __call__(self, v):
+ if not isinstance(v, (list, tuple)) or len(v) != len(self._schemas):
+ raise ExactSequenceInvalid(self.msg)
+ try:
+ v = type(v)(schema(x) for x, schema in zip(v, self._schemas))
+ except Invalid as e:
+ raise e if self.msg is None else ExactSequenceInvalid(self.msg)
+ return v
+
+ def __repr__(self):
+ return 'ExactSequence([%s])' % (", ".join(repr(v)
+ for v in self.validators))
+
+
+class Unique(object):
+ """Ensure an iterable does not contain duplicate items.
+
+ Only iterables convertable to a set are supported (native types and
+ objects with correct __eq__).
+
+ JSON does not support set, so they need to be presented as arrays.
+ Unique allows ensuring that such array does not contain dupes.
+
+ >>> s = Schema(Unique())
+ >>> s([])
+ []
+ >>> s([1, 2])
+ [1, 2]
+ >>> with raises(Invalid, 'contains duplicate items: [1]'):
+ ... s([1, 1, 2])
+ >>> with raises(Invalid, "contains duplicate items: ['one']"):
+ ... s(['one', 'two', 'one'])
+ >>> with raises(Invalid, regex="^contains unhashable elements: "):
+ ... s([set([1, 2]), set([3, 4])])
+ >>> s('abc')
+ 'abc'
+ >>> with raises(Invalid, regex="^contains duplicate items: "):
+ ... s('aabbc')
+ """
+
+ def __init__(self, msg=None):
+ self.msg = msg
+
+ def __call__(self, v):
+ try:
+ set_v = set(v)
+ except TypeError as e:
+ raise TypeInvalid(
+ self.msg or 'contains unhashable elements: {0}'.format(e))
+ if len(set_v) != len(v):
+ seen = set()
+ dupes = list(set(x for x in v if x in seen or seen.add(x)))
+ raise Invalid(
+ self.msg or 'contains duplicate items: {0}'.format(dupes))
+ return v
+
+ def __repr__(self):
+ return 'Unique()'
+
+
+class Equal(object):
+ """Ensure that value matches target.
+
+ >>> s = Schema(Equal(1))
+ >>> s(1)
+ 1
+ >>> with raises(Invalid):
+ ... s(2)
+
+ Validators are not supported, match must be exact:
+
+ >>> s = Schema(Equal(str))
+ >>> with raises(Invalid):
+ ... s('foo')
+ """
+
+ def __init__(self, target, msg=None):
+ self.target = target
+ self.msg = msg
+
+ def __call__(self, v):
+ if v != self.target:
+ raise Invalid(self.msg or 'Values are not equal: value:{} != target:{}'.format(v, self.target))
+ return v
+
+ def __repr__(self):
+ return 'Equal({})'.format(self.target)
+
+
+class Unordered(object):
+ """Ensures sequence contains values in unspecified order.
+
+ >>> s = Schema(Unordered([2, 1]))
+ >>> s([2, 1])
+ [2, 1]
+ >>> s([1, 2])
+ [1, 2]
+ >>> s = Schema(Unordered([str, int]))
+ >>> s(['foo', 1])
+ ['foo', 1]
+ >>> s([1, 'foo'])
+ [1, 'foo']
+ """
+
+ def __init__(self, validators, msg=None, **kwargs):
+ self.validators = validators
+ self.msg = msg
+ self._schemas = [Schema(val, **kwargs) for val in validators]
+
+ def __call__(self, v):
+ if not isinstance(v, (list, tuple)):
+ raise Invalid(self.msg or 'Value {} is not sequence!'.format(v))
+
+ if len(v) != len(self._schemas):
+ raise Invalid(self.msg or 'List lengths differ, value:{} != target:{}'.format(len(v), len(self._schemas)))
+
+ consumed = set()
+ missing = []
+ for index, value in enumerate(v):
+ found = False
+ for i, s in enumerate(self._schemas):
+ if i in consumed:
+ continue
+ try:
+ s(value)
+ except Invalid:
+ pass
+ else:
+ found = True
+ consumed.add(i)
+ break
+ if not found:
+ missing.append((index, value))
+
+ if len(missing) == 1:
+ el = missing[0]
+ raise Invalid(self.msg or 'Element #{} ({}) is not valid against any validator'.format(el[0], el[1]))
+ elif missing:
+ raise MultipleInvalid([Invalid(self.msg or 'Element #{} ({}) is not valid against any validator'.format(
+ el[0], el[1])) for el in missing])
+ return v
+
+ def __repr__(self):
+ return 'Unordered([{}])'.format(", ".join(repr(v) for v in self.validators))
+
+
+class Number(object):
+ """
+ Verify the number of digits that are present in the number(Precision),
+ and the decimal places(Scale)
+
+ :raises Invalid: If the value does not match the provided Precision and Scale.
+
+ >>> schema = Schema(Number(precision=6, scale=2))
+ >>> schema('1234.01')
+ '1234.01'
+ >>> schema = Schema(Number(precision=6, scale=2, yield_decimal=True))
+ >>> schema('1234.01')
+ Decimal('1234.01')
+ """
+
+ def __init__(self, precision=None, scale=None, msg=None, yield_decimal=False):
+ self.precision = precision
+ self.scale = scale
+ self.msg = msg
+ self.yield_decimal = yield_decimal
+
+ def __call__(self, v):
+ """
+ :param v: is a number enclosed with string
+ :return: Decimal number
+ """
+ precision, scale, decimal_num = self._get_precision_scale(v)
+
+ if self.precision is not None and self.scale is not None and precision != self.precision\
+ and scale != self.scale:
+ raise Invalid(self.msg or "Precision must be equal to %s, and Scale must be equal to %s" % (self.precision,
+ self.scale))
+ else:
+ if self.precision is not None and precision != self.precision:
+ raise Invalid(self.msg or "Precision must be equal to %s" % self.precision)
+
+ if self.scale is not None and scale != self.scale:
+ raise Invalid(self.msg or "Scale must be equal to %s" % self.scale)
+
+ if self.yield_decimal:
+ return decimal_num
+ else:
+ return v
+
+ def __repr__(self):
+ return ('Number(precision=%s, scale=%s, msg=%s)' % (self.precision, self.scale, self.msg))
+
+ def _get_precision_scale(self, number):
+ """
+ :param number:
+ :return: tuple(precision, scale, decimal_number)
+ """
+ try:
+ decimal_num = Decimal(number)
+ except InvalidOperation:
+ raise Invalid(self.msg or 'Value must be a number enclosed with string')
+
+ return (len(decimal_num.as_tuple().digits), -(decimal_num.as_tuple().exponent), decimal_num)
+
+
+class SomeOf(_WithSubValidators):
+ """Value must pass at least some validations, determined by the given parameter.
+ Optionally, number of passed validations can be capped.
+
+ The output of each validator is passed as input to the next.
+
+ :param min_valid: Minimum number of valid schemas.
+ :param validators: a list of schemas or validators to match input against
+ :param max_valid: Maximum number of valid schemas.
+ :param msg: Message to deliver to user if validation fails.
+ :param kwargs: All other keyword arguments are passed to the sub-Schema constructors.
+
+ :raises NotEnoughValid: if the minimum number of validations isn't met
+ :raises TooManyValid: if the more validations than the given amount is met
+
+ >>> validate = Schema(SomeOf(min_valid=2, validators=[Range(1, 5), Any(float, int), 6.6]))
+ >>> validate(6.6)
+ 6.6
+ >>> validate(3)
+ 3
+ >>> with raises(MultipleInvalid, 'value must be at most 5, not a valid value'):
+ ... validate(6.2)
+ """
+
+ def __init__(self, validators, min_valid=None, max_valid=None, **kwargs):
+ assert min_valid is not None or max_valid is not None, \
+ 'when using "%s" you should specify at least one of min_valid and max_valid' % (type(self).__name__,)
+ self.min_valid = min_valid or 0
+ self.max_valid = max_valid or len(validators)
+ super(SomeOf, self).__init__(*validators, **kwargs)
+
+ def _exec(self, funcs, v, path=None):
+ errors = []
+ funcs = list(funcs)
+ for func in funcs:
+ try:
+ if path is None:
+ v = func(v)
+ else:
+ v = func(path, v)
+ except Invalid as e:
+ errors.append(e)
+
+ passed_count = len(funcs) - len(errors)
+ if self.min_valid <= passed_count <= self.max_valid:
+ return v
+
+ msg = self.msg
+ if not msg:
+ msg = ', '.join(map(str, errors))
+
+ if passed_count > self.max_valid:
+ raise TooManyValid(msg)
+ raise NotEnoughValid(msg)
+
+ def __repr__(self):
+ return 'SomeOf(min_valid=%s, validators=[%s], max_valid=%s, msg=%r)' % (
+ self.min_valid, ", ".join(repr(v) for v in self.validators), self.max_valid, self.msg)
diff --git a/third_party/python/yamllint/LICENSE b/third_party/python/yamllint/LICENSE
new file mode 100644
index 0000000000..94a9ed024d
--- /dev/null
+++ b/third_party/python/yamllint/LICENSE
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ <program> Copyright (C) <year> <name of author>
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<http://www.gnu.org/licenses/>.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+<http://www.gnu.org/philosophy/why-not-lgpl.html>.
diff --git a/third_party/python/yamllint/MANIFEST.in b/third_party/python/yamllint/MANIFEST.in
new file mode 100644
index 0000000000..792a6720cb
--- /dev/null
+++ b/third_party/python/yamllint/MANIFEST.in
@@ -0,0 +1,4 @@
+include LICENSE
+include README.rst
+include docs/*
+include tests/*.py tests/rules/*.py tests/yaml-1.2-spec-examples/*
diff --git a/third_party/python/yamllint/PKG-INFO b/third_party/python/yamllint/PKG-INFO
new file mode 100644
index 0000000000..f3a33638b7
--- /dev/null
+++ b/third_party/python/yamllint/PKG-INFO
@@ -0,0 +1,30 @@
+Metadata-Version: 1.2
+Name: yamllint
+Version: 1.23.0
+Summary: A linter for YAML files.
+Home-page: https://github.com/adrienverge/yamllint
+Author: Adrien Vergé
+License: GPLv3
+Description: A linter for YAML files.
+
+ yamllint does not only check for syntax validity, but for weirdnesses like key
+ repetition and cosmetic problems such as lines length, trailing spaces,
+ indentation, etc.
+Keywords: yaml,lint,linter,syntax,checker
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Topic :: Software Development
+Classifier: Topic :: Software Development :: Debuggers
+Classifier: Topic :: Software Development :: Quality Assurance
+Classifier: Topic :: Software Development :: Testing
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
diff --git a/third_party/python/yamllint/README.rst b/third_party/python/yamllint/README.rst
new file mode 100644
index 0000000000..89bc0e86f9
--- /dev/null
+++ b/third_party/python/yamllint/README.rst
@@ -0,0 +1,144 @@
+yamllint
+========
+
+A linter for YAML files.
+
+yamllint does not only check for syntax validity, but for weirdnesses like key
+repetition and cosmetic problems such as lines length, trailing spaces,
+indentation, etc.
+
+.. image::
+ https://travis-ci.org/adrienverge/yamllint.svg?branch=master
+ :target: https://travis-ci.org/adrienverge/yamllint
+ :alt: CI tests status
+.. image::
+ https://coveralls.io/repos/github/adrienverge/yamllint/badge.svg?branch=master
+ :target: https://coveralls.io/github/adrienverge/yamllint?branch=master
+ :alt: Code coverage status
+.. image:: https://readthedocs.org/projects/yamllint/badge/?version=latest
+ :target: https://yamllint.readthedocs.io/en/latest/?badge=latest
+ :alt: Documentation status
+
+Written in Python (compatible with Python 2 & 3).
+
+⚠ Python 2 upstream support stopped on January 1, 2020. yamllint will keep
+best-effort support for Python 2.7 until January 1, 2021. Passed that date,
+yamllint will drop all Python 2-related code.
+
+Documentation
+-------------
+
+https://yamllint.readthedocs.io/
+
+Overview
+--------
+
+Screenshot
+^^^^^^^^^^
+
+.. image:: docs/screenshot.png
+ :alt: yamllint screenshot
+
+Installation
+^^^^^^^^^^^^
+
+Using pip, the Python package manager:
+
+.. code:: bash
+
+ pip install --user yamllint
+
+yamllint is also packaged for all major operating systems, see installation
+examples (``dnf``, ``apt-get``...) `in the documentation
+<https://yamllint.readthedocs.io/en/stable/quickstart.html>`_.
+
+Usage
+^^^^^
+
+.. code:: bash
+
+ # Lint one or more files
+ yamllint my_file.yml my_other_file.yaml ...
+
+.. code:: bash
+
+ # Lint all YAML files in a directory
+ yamllint .
+
+.. code:: bash
+
+ # Use a pre-defined lint configuration
+ yamllint -d relaxed file.yaml
+
+ # Use a custom lint configuration
+ yamllint -c /path/to/myconfig file-to-lint.yaml
+
+.. code:: bash
+
+ # Output a parsable format (for syntax checking in editors like Vim, emacs...)
+ yamllint -f parsable file.yaml
+
+`Read more in the complete documentation! <https://yamllint.readthedocs.io/>`_
+
+Features
+^^^^^^^^
+
+Here is a yamllint configuration file example:
+
+.. code:: yaml
+
+ extends: default
+
+ rules:
+ # 80 chars should be enough, but don't fail if a line is longer
+ line-length:
+ max: 80
+ level: warning
+
+ # don't bother me with this rule
+ indentation: disable
+
+Within a YAML file, special comments can be used to disable checks for a single
+line:
+
+.. code:: yaml
+
+ This line is waaaaaaaaaay too long # yamllint disable-line
+
+or for a whole block:
+
+.. code:: yaml
+
+ # yamllint disable rule:colons
+ - Lorem : ipsum
+ dolor : sit amet,
+ consectetur : adipiscing elit
+ # yamllint enable
+
+Specific files can be ignored (totally or for some rules only) using a
+``.gitignore``-style pattern:
+
+.. code:: yaml
+
+ # For all rules
+ ignore: |
+ *.dont-lint-me.yaml
+ /bin/
+ !/bin/*.lint-me-anyway.yaml
+
+ rules:
+ key-duplicates:
+ ignore: |
+ generated
+ *.template.yaml
+ trailing-spaces:
+ ignore: |
+ *.ignore-trailing-spaces.yaml
+ /ascii-art/*
+
+`Read more in the complete documentation! <https://yamllint.readthedocs.io/>`_
+
+License
+-------
+
+`GPL version 3 <LICENSE>`_
diff --git a/third_party/python/yamllint/setup.cfg b/third_party/python/yamllint/setup.cfg
new file mode 100644
index 0000000000..90a4cb0ec4
--- /dev/null
+++ b/third_party/python/yamllint/setup.cfg
@@ -0,0 +1,17 @@
+[bdist_wheel]
+universal = 1
+
+[flake8]
+import-order-style = pep8
+application-import-names = yamllint
+
+[build_sphinx]
+all-files = 1
+source-dir = docs
+build-dir = docs/_build
+warning-is-error = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/yamllint/setup.py b/third_party/python/yamllint/setup.py
new file mode 100644
index 0000000000..ffa2ee226b
--- /dev/null
+++ b/third_party/python/yamllint/setup.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+from setuptools import find_packages, setup
+
+from yamllint import (__author__, __license__,
+ APP_NAME, APP_VERSION, APP_DESCRIPTION)
+
+
+setup(
+ name=APP_NAME,
+ version=APP_VERSION,
+ author=__author__,
+ description=APP_DESCRIPTION.split('\n')[0],
+ long_description=APP_DESCRIPTION,
+ license=__license__,
+ keywords=['yaml', 'lint', 'linter', 'syntax', 'checker'],
+ url='https://github.com/adrienverge/yamllint',
+ python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Environment :: Console',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Topic :: Software Development',
+ 'Topic :: Software Development :: Debuggers',
+ 'Topic :: Software Development :: Quality Assurance',
+ 'Topic :: Software Development :: Testing',
+ ],
+
+ packages=find_packages(exclude=['tests', 'tests.*']),
+ entry_points={'console_scripts': ['yamllint=yamllint.cli:run']},
+ package_data={'yamllint': ['conf/*.yaml']},
+ install_requires=['pathspec >=0.5.3', 'pyyaml'],
+ test_suite='tests',
+)
diff --git a/third_party/python/yamllint/yamllint/__init__.py b/third_party/python/yamllint/yamllint/__init__.py
new file mode 100644
index 0000000000..b78fe9c29e
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/__init__.py
@@ -0,0 +1,31 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""A linter for YAML files.
+
+yamllint does not only check for syntax validity, but for weirdnesses like key
+repetition and cosmetic problems such as lines length, trailing spaces,
+indentation, etc."""
+
+
+APP_NAME = 'yamllint'
+APP_VERSION = '1.23.0'
+APP_DESCRIPTION = __doc__
+
+__author__ = u'Adrien Vergé'
+__copyright__ = u'Copyright 2016, Adrien Vergé'
+__license__ = 'GPLv3'
+__version__ = APP_VERSION
diff --git a/third_party/python/yamllint/yamllint/__main__.py b/third_party/python/yamllint/yamllint/__main__.py
new file mode 100644
index 0000000000..bc16534ec7
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/__main__.py
@@ -0,0 +1,4 @@
+from yamllint.cli import run
+
+if __name__ == '__main__':
+ run()
diff --git a/third_party/python/yamllint/yamllint/cli.py b/third_party/python/yamllint/yamllint/cli.py
new file mode 100644
index 0000000000..e99fd2ca84
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/cli.py
@@ -0,0 +1,207 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+from __future__ import print_function
+
+import argparse
+import io
+import os
+import platform
+import sys
+
+from yamllint import APP_DESCRIPTION, APP_NAME, APP_VERSION
+from yamllint import linter
+from yamllint.config import YamlLintConfig, YamlLintConfigError
+from yamllint.linter import PROBLEM_LEVELS
+
+
+def find_files_recursively(items, conf):
+ for item in items:
+ if os.path.isdir(item):
+ for root, dirnames, filenames in os.walk(item):
+ for f in filenames:
+ filepath = os.path.join(root, f)
+ if conf.is_yaml_file(filepath):
+ yield filepath
+ else:
+ yield item
+
+
+def supports_color():
+ supported_platform = not (platform.system() == 'Windows' and not
+ ('ANSICON' in os.environ or
+ ('TERM' in os.environ and
+ os.environ['TERM'] == 'ANSI')))
+ return (supported_platform and
+ hasattr(sys.stdout, 'isatty') and sys.stdout.isatty())
+
+
+class Format(object):
+ @staticmethod
+ def parsable(problem, filename):
+ return ('%(file)s:%(line)s:%(column)s: [%(level)s] %(message)s' %
+ {'file': filename,
+ 'line': problem.line,
+ 'column': problem.column,
+ 'level': problem.level,
+ 'message': problem.message})
+
+ @staticmethod
+ def standard(problem, filename):
+ line = ' %d:%d' % (problem.line, problem.column)
+ line += max(12 - len(line), 0) * ' '
+ line += problem.level
+ line += max(21 - len(line), 0) * ' '
+ line += problem.desc
+ if problem.rule:
+ line += ' (%s)' % problem.rule
+ return line
+
+ @staticmethod
+ def standard_color(problem, filename):
+ line = ' \033[2m%d:%d\033[0m' % (problem.line, problem.column)
+ line += max(20 - len(line), 0) * ' '
+ if problem.level == 'warning':
+ line += '\033[33m%s\033[0m' % problem.level
+ else:
+ line += '\033[31m%s\033[0m' % problem.level
+ line += max(38 - len(line), 0) * ' '
+ line += problem.desc
+ if problem.rule:
+ line += ' \033[2m(%s)\033[0m' % problem.rule
+ return line
+
+
+def show_problems(problems, file, args_format, no_warn):
+ max_level = 0
+ first = True
+
+ for problem in problems:
+ max_level = max(max_level, PROBLEM_LEVELS[problem.level])
+ if no_warn and (problem.level != 'error'):
+ continue
+ if args_format == 'parsable':
+ print(Format.parsable(problem, file))
+ elif args_format == 'colored' or \
+ (args_format == 'auto' and supports_color()):
+ if first:
+ print('\033[4m%s\033[0m' % file)
+ first = False
+ print(Format.standard_color(problem, file))
+ else:
+ if first:
+ print(file)
+ first = False
+ print(Format.standard(problem, file))
+
+ if not first and args_format != 'parsable':
+ print('')
+
+ return max_level
+
+
+def run(argv=None):
+ parser = argparse.ArgumentParser(prog=APP_NAME,
+ description=APP_DESCRIPTION)
+ files_group = parser.add_mutually_exclusive_group(required=True)
+ files_group.add_argument('files', metavar='FILE_OR_DIR', nargs='*',
+ default=(),
+ help='files to check')
+ files_group.add_argument('-', action='store_true', dest='stdin',
+ help='read from standard input')
+ config_group = parser.add_mutually_exclusive_group()
+ config_group.add_argument('-c', '--config-file', dest='config_file',
+ action='store',
+ help='path to a custom configuration')
+ config_group.add_argument('-d', '--config-data', dest='config_data',
+ action='store',
+ help='custom configuration (as YAML source)')
+ parser.add_argument('-f', '--format',
+ choices=('parsable', 'standard', 'colored', 'auto'),
+ default='auto', help='format for parsing output')
+ parser.add_argument('-s', '--strict',
+ action='store_true',
+ help='return non-zero exit code on warnings '
+ 'as well as errors')
+ parser.add_argument('--no-warnings',
+ action='store_true',
+ help='output only error level problems')
+ parser.add_argument('-v', '--version', action='version',
+ version='{} {}'.format(APP_NAME, APP_VERSION))
+
+ args = parser.parse_args(argv)
+
+ # User-global config is supposed to be in ~/.config/yamllint/config
+ if 'XDG_CONFIG_HOME' in os.environ:
+ user_global_config = os.path.join(
+ os.environ['XDG_CONFIG_HOME'], 'yamllint', 'config')
+ else:
+ user_global_config = os.path.expanduser('~/.config/yamllint/config')
+
+ try:
+ if args.config_data is not None:
+ if args.config_data != '' and ':' not in args.config_data:
+ args.config_data = 'extends: ' + args.config_data
+ conf = YamlLintConfig(content=args.config_data)
+ elif args.config_file is not None:
+ conf = YamlLintConfig(file=args.config_file)
+ elif os.path.isfile('.yamllint'):
+ conf = YamlLintConfig(file='.yamllint')
+ elif os.path.isfile('.yamllint.yaml'):
+ conf = YamlLintConfig(file='.yamllint.yaml')
+ elif os.path.isfile('.yamllint.yml'):
+ conf = YamlLintConfig(file='.yamllint.yml')
+ elif os.path.isfile(user_global_config):
+ conf = YamlLintConfig(file=user_global_config)
+ else:
+ conf = YamlLintConfig('extends: default')
+ except YamlLintConfigError as e:
+ print(e, file=sys.stderr)
+ sys.exit(-1)
+
+ max_level = 0
+
+ for file in find_files_recursively(args.files, conf):
+ filepath = file[2:] if file.startswith('./') else file
+ try:
+ with io.open(file, newline='') as f:
+ problems = linter.run(f, conf, filepath)
+ except EnvironmentError as e:
+ print(e, file=sys.stderr)
+ sys.exit(-1)
+ prob_level = show_problems(problems, file, args_format=args.format,
+ no_warn=args.no_warnings)
+ max_level = max(max_level, prob_level)
+
+ # read yaml from stdin
+ if args.stdin:
+ try:
+ problems = linter.run(sys.stdin, conf, '')
+ except EnvironmentError as e:
+ print(e, file=sys.stderr)
+ sys.exit(-1)
+ prob_level = show_problems(problems, 'stdin', args_format=args.format,
+ no_warn=args.no_warnings)
+ max_level = max(max_level, prob_level)
+
+ if max_level == PROBLEM_LEVELS['error']:
+ return_code = 1
+ elif max_level == PROBLEM_LEVELS['warning']:
+ return_code = 2 if args.strict else 0
+ else:
+ return_code = 0
+
+ sys.exit(return_code)
diff --git a/third_party/python/yamllint/yamllint/conf/default.yaml b/third_party/python/yamllint/yamllint/conf/default.yaml
new file mode 100644
index 0000000000..0720dede32
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/conf/default.yaml
@@ -0,0 +1,33 @@
+---
+
+yaml-files:
+ - '*.yaml'
+ - '*.yml'
+ - '.yamllint'
+
+rules:
+ braces: enable
+ brackets: enable
+ colons: enable
+ commas: enable
+ comments:
+ level: warning
+ comments-indentation:
+ level: warning
+ document-end: disable
+ document-start:
+ level: warning
+ empty-lines: enable
+ empty-values: disable
+ hyphens: enable
+ indentation: enable
+ key-duplicates: enable
+ key-ordering: disable
+ line-length: enable
+ new-line-at-end-of-file: enable
+ new-lines: enable
+ octal-values: disable
+ quoted-strings: disable
+ trailing-spaces: enable
+ truthy:
+ level: warning
diff --git a/third_party/python/yamllint/yamllint/conf/relaxed.yaml b/third_party/python/yamllint/yamllint/conf/relaxed.yaml
new file mode 100644
index 0000000000..83f5340c7f
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/conf/relaxed.yaml
@@ -0,0 +1,29 @@
+---
+
+extends: default
+
+rules:
+ braces:
+ level: warning
+ max-spaces-inside: 1
+ brackets:
+ level: warning
+ max-spaces-inside: 1
+ colons:
+ level: warning
+ commas:
+ level: warning
+ comments: disable
+ comments-indentation: disable
+ document-start: disable
+ empty-lines:
+ level: warning
+ hyphens:
+ level: warning
+ indentation:
+ level: warning
+ indent-sequences: consistent
+ line-length:
+ level: warning
+ allow-non-breakable-inline-mappings: true
+ truthy: disable
diff --git a/third_party/python/yamllint/yamllint/config.py b/third_party/python/yamllint/yamllint/config.py
new file mode 100644
index 0000000000..a955d8e62b
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/config.py
@@ -0,0 +1,205 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import os.path
+
+import pathspec
+import yaml
+
+import yamllint.rules
+
+
+class YamlLintConfigError(Exception):
+ pass
+
+
+class YamlLintConfig(object):
+ def __init__(self, content=None, file=None):
+ assert (content is None) ^ (file is None)
+
+ self.ignore = None
+
+ self.yaml_files = pathspec.PathSpec.from_lines(
+ 'gitwildmatch', ['*.yaml', '*.yml', '.yamllint'])
+
+ if file is not None:
+ with open(file) as f:
+ content = f.read()
+
+ self.parse(content)
+ self.validate()
+
+ def is_file_ignored(self, filepath):
+ return self.ignore and self.ignore.match_file(filepath)
+
+ def is_yaml_file(self, filepath):
+ return self.yaml_files.match_file(filepath)
+
+ def enabled_rules(self, filepath):
+ return [yamllint.rules.get(id) for id, val in self.rules.items()
+ if val is not False and (
+ filepath is None or 'ignore' not in val or
+ not val['ignore'].match_file(filepath))]
+
+ def extend(self, base_config):
+ assert isinstance(base_config, YamlLintConfig)
+
+ for rule in self.rules:
+ if (isinstance(self.rules[rule], dict) and
+ rule in base_config.rules and
+ base_config.rules[rule] is not False):
+ base_config.rules[rule].update(self.rules[rule])
+ else:
+ base_config.rules[rule] = self.rules[rule]
+
+ self.rules = base_config.rules
+
+ if base_config.ignore is not None:
+ self.ignore = base_config.ignore
+
+ def parse(self, raw_content):
+ try:
+ conf = yaml.safe_load(raw_content)
+ except Exception as e:
+ raise YamlLintConfigError('invalid config: %s' % e)
+
+ if not isinstance(conf, dict):
+ raise YamlLintConfigError('invalid config: not a dict')
+
+ self.rules = conf.get('rules', {})
+ for rule in self.rules:
+ if self.rules[rule] == 'enable':
+ self.rules[rule] = {}
+ elif self.rules[rule] == 'disable':
+ self.rules[rule] = False
+
+ # Does this conf override another conf that we need to load?
+ if 'extends' in conf:
+ path = get_extended_config_file(conf['extends'])
+ base = YamlLintConfig(file=path)
+ try:
+ self.extend(base)
+ except Exception as e:
+ raise YamlLintConfigError('invalid config: %s' % e)
+
+ if 'ignore' in conf:
+ if not isinstance(conf['ignore'], str):
+ raise YamlLintConfigError(
+ 'invalid config: ignore should contain file patterns')
+ self.ignore = pathspec.PathSpec.from_lines(
+ 'gitwildmatch', conf['ignore'].splitlines())
+
+ if 'yaml-files' in conf:
+ if not (isinstance(conf['yaml-files'], list)
+ and all(isinstance(i, str) for i in conf['yaml-files'])):
+ raise YamlLintConfigError(
+ 'invalid config: yaml-files '
+ 'should be a list of file patterns')
+ self.yaml_files = pathspec.PathSpec.from_lines('gitwildmatch',
+ conf['yaml-files'])
+
+ def validate(self):
+ for id in self.rules:
+ try:
+ rule = yamllint.rules.get(id)
+ except Exception as e:
+ raise YamlLintConfigError('invalid config: %s' % e)
+
+ self.rules[id] = validate_rule_conf(rule, self.rules[id])
+
+
+def validate_rule_conf(rule, conf):
+ if conf is False: # disable
+ return False
+
+ if isinstance(conf, dict):
+ if ('ignore' in conf and
+ not isinstance(conf['ignore'], pathspec.pathspec.PathSpec)):
+ if not isinstance(conf['ignore'], str):
+ raise YamlLintConfigError(
+ 'invalid config: ignore should contain file patterns')
+ conf['ignore'] = pathspec.PathSpec.from_lines(
+ 'gitwildmatch', conf['ignore'].splitlines())
+
+ if 'level' not in conf:
+ conf['level'] = 'error'
+ elif conf['level'] not in ('error', 'warning'):
+ raise YamlLintConfigError(
+ 'invalid config: level should be "error" or "warning"')
+
+ options = getattr(rule, 'CONF', {})
+ options_default = getattr(rule, 'DEFAULT', {})
+ for optkey in conf:
+ if optkey in ('ignore', 'level'):
+ continue
+ if optkey not in options:
+ raise YamlLintConfigError(
+ 'invalid config: unknown option "%s" for rule "%s"' %
+ (optkey, rule.ID))
+ # Example: CONF = {option: (bool, 'mixed')}
+ # → {option: true} → {option: mixed}
+ if isinstance(options[optkey], tuple):
+ if (conf[optkey] not in options[optkey] and
+ type(conf[optkey]) not in options[optkey]):
+ raise YamlLintConfigError(
+ 'invalid config: option "%s" of "%s" should be in %s'
+ % (optkey, rule.ID, options[optkey]))
+ # Example: CONF = {option: ['flag1', 'flag2', int]}
+ # → {option: [flag1]} → {option: [42, flag1, flag2]}
+ elif isinstance(options[optkey], list):
+ if (type(conf[optkey]) is not list or
+ any(flag not in options[optkey] and
+ type(flag) not in options[optkey]
+ for flag in conf[optkey])):
+ raise YamlLintConfigError(
+ ('invalid config: option "%s" of "%s" should only '
+ 'contain values in %s')
+ % (optkey, rule.ID, str(options[optkey])))
+ # Example: CONF = {option: int}
+ # → {option: 42}
+ else:
+ if not isinstance(conf[optkey], options[optkey]):
+ raise YamlLintConfigError(
+ 'invalid config: option "%s" of "%s" should be %s'
+ % (optkey, rule.ID, options[optkey].__name__))
+ for optkey in options:
+ if optkey not in conf:
+ conf[optkey] = options_default[optkey]
+
+ if hasattr(rule, 'VALIDATE'):
+ res = rule.VALIDATE(conf)
+ if res:
+ raise YamlLintConfigError('invalid config: %s: %s' %
+ (rule.ID, res))
+ else:
+ raise YamlLintConfigError(('invalid config: rule "%s": should be '
+ 'either "enable", "disable" or a dict')
+ % rule.ID)
+
+ return conf
+
+
+def get_extended_config_file(name):
+ # Is it a standard conf shipped with yamllint...
+ if '/' not in name:
+ std_conf = os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ 'conf', name + '.yaml')
+
+ if os.path.isfile(std_conf):
+ return std_conf
+
+ # or a custom conf on filesystem?
+ return name
diff --git a/third_party/python/yamllint/yamllint/linter.py b/third_party/python/yamllint/yamllint/linter.py
new file mode 100644
index 0000000000..c687f142ec
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/linter.py
@@ -0,0 +1,240 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import re
+
+import yaml
+
+from yamllint import parser
+
+
+PROBLEM_LEVELS = {
+ 0: None,
+ 1: 'warning',
+ 2: 'error',
+ None: 0,
+ 'warning': 1,
+ 'error': 2,
+}
+
+
+class LintProblem(object):
+ """Represents a linting problem found by yamllint."""
+ def __init__(self, line, column, desc='<no description>', rule=None):
+ #: Line on which the problem was found (starting at 1)
+ self.line = line
+ #: Column on which the problem was found (starting at 1)
+ self.column = column
+ #: Human-readable description of the problem
+ self.desc = desc
+ #: Identifier of the rule that detected the problem
+ self.rule = rule
+ self.level = None
+
+ @property
+ def message(self):
+ if self.rule is not None:
+ return '{} ({})'.format(self.desc, self.rule)
+ return self.desc
+
+ def __eq__(self, other):
+ return (self.line == other.line and
+ self.column == other.column and
+ self.rule == other.rule)
+
+ def __lt__(self, other):
+ return (self.line < other.line or
+ (self.line == other.line and self.column < other.column))
+
+ def __repr__(self):
+ return '%d:%d: %s' % (self.line, self.column, self.message)
+
+
+def get_cosmetic_problems(buffer, conf, filepath):
+ rules = conf.enabled_rules(filepath)
+
+ # Split token rules from line rules
+ token_rules = [r for r in rules if r.TYPE == 'token']
+ comment_rules = [r for r in rules if r.TYPE == 'comment']
+ line_rules = [r for r in rules if r.TYPE == 'line']
+
+ context = {}
+ for rule in token_rules:
+ context[rule.ID] = {}
+
+ class DisableDirective:
+ def __init__(self):
+ self.rules = set()
+ self.all_rules = {r.ID for r in rules}
+
+ def process_comment(self, comment):
+ try:
+ comment = str(comment)
+ except UnicodeError:
+ return # this certainly wasn't a yamllint directive comment
+
+ if re.match(r'^# yamllint disable( rule:\S+)*\s*$', comment):
+ rules = [item[5:] for item in comment[18:].split(' ')][1:]
+ if len(rules) == 0:
+ self.rules = self.all_rules.copy()
+ else:
+ for id in rules:
+ if id in self.all_rules:
+ self.rules.add(id)
+
+ elif re.match(r'^# yamllint enable( rule:\S+)*\s*$', comment):
+ rules = [item[5:] for item in comment[17:].split(' ')][1:]
+ if len(rules) == 0:
+ self.rules.clear()
+ else:
+ for id in rules:
+ self.rules.discard(id)
+
+ def is_disabled_by_directive(self, problem):
+ return problem.rule in self.rules
+
+ class DisableLineDirective(DisableDirective):
+ def process_comment(self, comment):
+ try:
+ comment = str(comment)
+ except UnicodeError:
+ return # this certainly wasn't a yamllint directive comment
+
+ if re.match(r'^# yamllint disable-line( rule:\S+)*\s*$', comment):
+ rules = [item[5:] for item in comment[23:].split(' ')][1:]
+ if len(rules) == 0:
+ self.rules = self.all_rules.copy()
+ else:
+ for id in rules:
+ if id in self.all_rules:
+ self.rules.add(id)
+
+ # Use a cache to store problems and flush it only when a end of line is
+ # found. This allows the use of yamllint directive to disable some rules on
+ # some lines.
+ cache = []
+ disabled = DisableDirective()
+ disabled_for_line = DisableLineDirective()
+ disabled_for_next_line = DisableLineDirective()
+
+ for elem in parser.token_or_comment_or_line_generator(buffer):
+ if isinstance(elem, parser.Token):
+ for rule in token_rules:
+ rule_conf = conf.rules[rule.ID]
+ for problem in rule.check(rule_conf,
+ elem.curr, elem.prev, elem.next,
+ elem.nextnext,
+ context[rule.ID]):
+ problem.rule = rule.ID
+ problem.level = rule_conf['level']
+ cache.append(problem)
+ elif isinstance(elem, parser.Comment):
+ for rule in comment_rules:
+ rule_conf = conf.rules[rule.ID]
+ for problem in rule.check(rule_conf, elem):
+ problem.rule = rule.ID
+ problem.level = rule_conf['level']
+ cache.append(problem)
+
+ disabled.process_comment(elem)
+ if elem.is_inline():
+ disabled_for_line.process_comment(elem)
+ else:
+ disabled_for_next_line.process_comment(elem)
+ elif isinstance(elem, parser.Line):
+ for rule in line_rules:
+ rule_conf = conf.rules[rule.ID]
+ for problem in rule.check(rule_conf, elem):
+ problem.rule = rule.ID
+ problem.level = rule_conf['level']
+ cache.append(problem)
+
+ # This is the last token/comment/line of this line, let's flush the
+ # problems found (but filter them according to the directives)
+ for problem in cache:
+ if not (disabled_for_line.is_disabled_by_directive(problem) or
+ disabled.is_disabled_by_directive(problem)):
+ yield problem
+
+ disabled_for_line = disabled_for_next_line
+ disabled_for_next_line = DisableLineDirective()
+ cache = []
+
+
+def get_syntax_error(buffer):
+ try:
+ list(yaml.parse(buffer, Loader=yaml.BaseLoader))
+ except yaml.error.MarkedYAMLError as e:
+ problem = LintProblem(e.problem_mark.line + 1,
+ e.problem_mark.column + 1,
+ 'syntax error: ' + e.problem + ' (syntax)')
+ problem.level = 'error'
+ return problem
+
+
+def _run(buffer, conf, filepath):
+ assert hasattr(buffer, '__getitem__'), \
+ '_run() argument must be a buffer, not a stream'
+
+ first_line = next(parser.line_generator(buffer)).content
+ if re.match(r'^#\s*yamllint disable-file\s*$', first_line):
+ return
+
+ # If the document contains a syntax error, save it and yield it at the
+ # right line
+ syntax_error = get_syntax_error(buffer)
+
+ for problem in get_cosmetic_problems(buffer, conf, filepath):
+ # Insert the syntax error (if any) at the right place...
+ if (syntax_error and syntax_error.line <= problem.line and
+ syntax_error.column <= problem.column):
+ yield syntax_error
+
+ # If there is already a yamllint error at the same place, discard
+ # it as it is probably redundant (and maybe it's just a 'warning',
+ # in which case the script won't even exit with a failure status).
+ if (syntax_error.line == problem.line and
+ syntax_error.column == problem.column):
+ syntax_error = None
+ continue
+
+ syntax_error = None
+
+ yield problem
+
+ if syntax_error:
+ yield syntax_error
+
+
+def run(input, conf, filepath=None):
+ """Lints a YAML source.
+
+ Returns a generator of LintProblem objects.
+
+ :param input: buffer, string or stream to read from
+ :param conf: yamllint configuration object
+ """
+ if conf.is_file_ignored(filepath):
+ return ()
+
+ if isinstance(input, (type(b''), type(u''))): # compat with Python 2 & 3
+ return _run(input, conf, filepath)
+ elif hasattr(input, 'read'): # Python 2's file or Python 3's io.IOBase
+ # We need to have everything in memory to parse correctly
+ content = input.read()
+ return _run(content, conf, filepath)
+ else:
+ raise TypeError('input should be a string or a stream')
diff --git a/third_party/python/yamllint/yamllint/parser.py b/third_party/python/yamllint/yamllint/parser.py
new file mode 100644
index 0000000000..de331f4729
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/parser.py
@@ -0,0 +1,161 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import yaml
+
+
+class Line(object):
+ def __init__(self, line_no, buffer, start, end):
+ self.line_no = line_no
+ self.start = start
+ self.end = end
+ self.buffer = buffer
+
+ @property
+ def content(self):
+ return self.buffer[self.start:self.end]
+
+
+class Token(object):
+ def __init__(self, line_no, curr, prev, next, nextnext):
+ self.line_no = line_no
+ self.curr = curr
+ self.prev = prev
+ self.next = next
+ self.nextnext = nextnext
+
+
+class Comment(object):
+ def __init__(self, line_no, column_no, buffer, pointer,
+ token_before=None, token_after=None, comment_before=None):
+ self.line_no = line_no
+ self.column_no = column_no
+ self.buffer = buffer
+ self.pointer = pointer
+ self.token_before = token_before
+ self.token_after = token_after
+ self.comment_before = comment_before
+
+ def __str__(self):
+ end = self.buffer.find('\n', self.pointer)
+ if end == -1:
+ end = self.buffer.find('\0', self.pointer)
+ if end != -1:
+ return self.buffer[self.pointer:end]
+ return self.buffer[self.pointer:]
+
+ def __eq__(self, other):
+ return (isinstance(other, Comment) and
+ self.line_no == other.line_no and
+ self.column_no == other.column_no and
+ str(self) == str(other))
+
+ def is_inline(self):
+ return (
+ not isinstance(self.token_before, yaml.StreamStartToken) and
+ self.line_no == self.token_before.end_mark.line + 1 and
+ # sometimes token end marks are on the next line
+ self.buffer[self.token_before.end_mark.pointer - 1] != '\n'
+ )
+
+
+def line_generator(buffer):
+ line_no = 1
+ cur = 0
+ next = buffer.find('\n')
+ while next != -1:
+ if next > 0 and buffer[next - 1] == '\r':
+ yield Line(line_no, buffer, start=cur, end=next - 1)
+ else:
+ yield Line(line_no, buffer, start=cur, end=next)
+ cur = next + 1
+ next = buffer.find('\n', cur)
+ line_no += 1
+
+ yield Line(line_no, buffer, start=cur, end=len(buffer))
+
+
+def comments_between_tokens(token1, token2):
+ """Find all comments between two tokens"""
+ if token2 is None:
+ buf = token1.end_mark.buffer[token1.end_mark.pointer:]
+ elif (token1.end_mark.line == token2.start_mark.line and
+ not isinstance(token1, yaml.StreamStartToken) and
+ not isinstance(token2, yaml.StreamEndToken)):
+ return
+ else:
+ buf = token1.end_mark.buffer[token1.end_mark.pointer:
+ token2.start_mark.pointer]
+
+ line_no = token1.end_mark.line + 1
+ column_no = token1.end_mark.column + 1
+ pointer = token1.end_mark.pointer
+
+ comment_before = None
+ for line in buf.split('\n'):
+ pos = line.find('#')
+ if pos != -1:
+ comment = Comment(line_no, column_no + pos,
+ token1.end_mark.buffer, pointer + pos,
+ token1, token2, comment_before)
+ yield comment
+
+ comment_before = comment
+
+ pointer += len(line) + 1
+ line_no += 1
+ column_no = 1
+
+
+def token_or_comment_generator(buffer):
+ yaml_loader = yaml.BaseLoader(buffer)
+
+ try:
+ prev = None
+ curr = yaml_loader.get_token()
+ while curr is not None:
+ next = yaml_loader.get_token()
+ nextnext = (yaml_loader.peek_token()
+ if yaml_loader.check_token() else None)
+
+ yield Token(curr.start_mark.line + 1, curr, prev, next, nextnext)
+
+ for comment in comments_between_tokens(curr, next):
+ yield comment
+
+ prev = curr
+ curr = next
+
+ except yaml.scanner.ScannerError:
+ pass
+
+
+def token_or_comment_or_line_generator(buffer):
+ """Generator that mixes tokens and lines, ordering them by line number"""
+ tok_or_com_gen = token_or_comment_generator(buffer)
+ line_gen = line_generator(buffer)
+
+ tok_or_com = next(tok_or_com_gen, None)
+ line = next(line_gen, None)
+
+ while tok_or_com is not None or line is not None:
+ if tok_or_com is None or (line is not None and
+ tok_or_com.line_no > line.line_no):
+ yield line
+ line = next(line_gen, None)
+ else:
+ yield tok_or_com
+ tok_or_com = next(tok_or_com_gen, None)
diff --git a/third_party/python/yamllint/yamllint/rules/__init__.py b/third_party/python/yamllint/yamllint/rules/__init__.py
new file mode 100644
index 0000000000..a084d6ee16
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/__init__.py
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+from yamllint.rules import (
+ braces,
+ brackets,
+ colons,
+ commas,
+ comments,
+ comments_indentation,
+ document_end,
+ document_start,
+ empty_lines,
+ empty_values,
+ hyphens,
+ indentation,
+ key_duplicates,
+ key_ordering,
+ line_length,
+ new_line_at_end_of_file,
+ new_lines,
+ octal_values,
+ quoted_strings,
+ trailing_spaces,
+ truthy,
+)
+
+_RULES = {
+ braces.ID: braces,
+ brackets.ID: brackets,
+ colons.ID: colons,
+ commas.ID: commas,
+ comments.ID: comments,
+ comments_indentation.ID: comments_indentation,
+ document_end.ID: document_end,
+ document_start.ID: document_start,
+ empty_lines.ID: empty_lines,
+ empty_values.ID: empty_values,
+ hyphens.ID: hyphens,
+ indentation.ID: indentation,
+ key_duplicates.ID: key_duplicates,
+ key_ordering.ID: key_ordering,
+ line_length.ID: line_length,
+ new_line_at_end_of_file.ID: new_line_at_end_of_file,
+ new_lines.ID: new_lines,
+ octal_values.ID: octal_values,
+ quoted_strings.ID: quoted_strings,
+ trailing_spaces.ID: trailing_spaces,
+ truthy.ID: truthy,
+}
+
+
+def get(id):
+ if id not in _RULES:
+ raise ValueError('no such rule: "%s"' % id)
+
+ return _RULES[id]
diff --git a/third_party/python/yamllint/yamllint/rules/braces.py b/third_party/python/yamllint/yamllint/rules/braces.py
new file mode 100644
index 0000000000..654b36d330
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/braces.py
@@ -0,0 +1,143 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to control the number of spaces inside braces (``{`` and ``}``).
+
+.. rubric:: Options
+
+* ``min-spaces-inside`` defines the minimal number of spaces required inside
+ braces.
+* ``max-spaces-inside`` defines the maximal number of spaces allowed inside
+ braces.
+* ``min-spaces-inside-empty`` defines the minimal number of spaces required
+ inside empty braces.
+* ``max-spaces-inside-empty`` defines the maximal number of spaces allowed
+ inside empty braces.
+
+.. rubric:: Examples
+
+#. With ``braces: {min-spaces-inside: 0, max-spaces-inside: 0}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ object: {key1: 4, key2: 8}
+
+ the following code snippet would **FAIL**:
+ ::
+
+ object: { key1: 4, key2: 8 }
+
+#. With ``braces: {min-spaces-inside: 1, max-spaces-inside: 3}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ object: { key1: 4, key2: 8 }
+
+ the following code snippet would **PASS**:
+ ::
+
+ object: { key1: 4, key2: 8 }
+
+ the following code snippet would **FAIL**:
+ ::
+
+ object: { key1: 4, key2: 8 }
+
+ the following code snippet would **FAIL**:
+ ::
+
+ object: {key1: 4, key2: 8 }
+
+#. With ``braces: {min-spaces-inside-empty: 0, max-spaces-inside-empty: 0}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ object: {}
+
+ the following code snippet would **FAIL**:
+ ::
+
+ object: { }
+
+#. With ``braces: {min-spaces-inside-empty: 1, max-spaces-inside-empty: -1}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ object: { }
+
+ the following code snippet would **FAIL**:
+ ::
+
+ object: {}
+"""
+
+
+import yaml
+
+from yamllint.rules.common import spaces_after, spaces_before
+
+
+ID = 'braces'
+TYPE = 'token'
+CONF = {'min-spaces-inside': int,
+ 'max-spaces-inside': int,
+ 'min-spaces-inside-empty': int,
+ 'max-spaces-inside-empty': int}
+DEFAULT = {'min-spaces-inside': 0,
+ 'max-spaces-inside': 0,
+ 'min-spaces-inside-empty': -1,
+ 'max-spaces-inside-empty': -1}
+
+
+def check(conf, token, prev, next, nextnext, context):
+ if (isinstance(token, yaml.FlowMappingStartToken) and
+ isinstance(next, yaml.FlowMappingEndToken)):
+ problem = spaces_after(token, prev, next,
+ min=(conf['min-spaces-inside-empty']
+ if conf['min-spaces-inside-empty'] != -1
+ else conf['min-spaces-inside']),
+ max=(conf['max-spaces-inside-empty']
+ if conf['max-spaces-inside-empty'] != -1
+ else conf['max-spaces-inside']),
+ min_desc='too few spaces inside empty braces',
+ max_desc='too many spaces inside empty braces')
+ if problem is not None:
+ yield problem
+
+ elif isinstance(token, yaml.FlowMappingStartToken):
+ problem = spaces_after(token, prev, next,
+ min=conf['min-spaces-inside'],
+ max=conf['max-spaces-inside'],
+ min_desc='too few spaces inside braces',
+ max_desc='too many spaces inside braces')
+ if problem is not None:
+ yield problem
+
+ elif (isinstance(token, yaml.FlowMappingEndToken) and
+ (prev is None or
+ not isinstance(prev, yaml.FlowMappingStartToken))):
+ problem = spaces_before(token, prev, next,
+ min=conf['min-spaces-inside'],
+ max=conf['max-spaces-inside'],
+ min_desc='too few spaces inside braces',
+ max_desc='too many spaces inside braces')
+ if problem is not None:
+ yield problem
diff --git a/third_party/python/yamllint/yamllint/rules/brackets.py b/third_party/python/yamllint/yamllint/rules/brackets.py
new file mode 100644
index 0000000000..b54c5154aa
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/brackets.py
@@ -0,0 +1,145 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to control the number of spaces inside brackets (``[`` and
+``]``).
+
+.. rubric:: Options
+
+* ``min-spaces-inside`` defines the minimal number of spaces required inside
+ brackets.
+* ``max-spaces-inside`` defines the maximal number of spaces allowed inside
+ brackets.
+* ``min-spaces-inside-empty`` defines the minimal number of spaces required
+ inside empty brackets.
+* ``max-spaces-inside-empty`` defines the maximal number of spaces allowed
+ inside empty brackets.
+
+.. rubric:: Examples
+
+#. With ``brackets: {min-spaces-inside: 0, max-spaces-inside: 0}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ object: [1, 2, abc]
+
+ the following code snippet would **FAIL**:
+ ::
+
+ object: [ 1, 2, abc ]
+
+#. With ``brackets: {min-spaces-inside: 1, max-spaces-inside: 3}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ object: [ 1, 2, abc ]
+
+ the following code snippet would **PASS**:
+ ::
+
+ object: [ 1, 2, abc ]
+
+ the following code snippet would **FAIL**:
+ ::
+
+ object: [ 1, 2, abc ]
+
+ the following code snippet would **FAIL**:
+ ::
+
+ object: [1, 2, abc ]
+
+#. With ``brackets: {min-spaces-inside-empty: 0, max-spaces-inside-empty: 0}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ object: []
+
+ the following code snippet would **FAIL**:
+ ::
+
+ object: [ ]
+
+#. With ``brackets: {min-spaces-inside-empty: 1, max-spaces-inside-empty: -1}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ object: [ ]
+
+ the following code snippet would **FAIL**:
+ ::
+
+ object: []
+"""
+
+
+import yaml
+
+from yamllint.rules.common import spaces_after, spaces_before
+
+
+ID = 'brackets'
+TYPE = 'token'
+CONF = {'min-spaces-inside': int,
+ 'max-spaces-inside': int,
+ 'min-spaces-inside-empty': int,
+ 'max-spaces-inside-empty': int}
+DEFAULT = {'min-spaces-inside': 0,
+ 'max-spaces-inside': 0,
+ 'min-spaces-inside-empty': -1,
+ 'max-spaces-inside-empty': -1}
+
+
+def check(conf, token, prev, next, nextnext, context):
+ if (isinstance(token, yaml.FlowSequenceStartToken) and
+ isinstance(next, yaml.FlowSequenceEndToken)):
+ problem = spaces_after(token, prev, next,
+ min=(conf['min-spaces-inside-empty']
+ if conf['min-spaces-inside-empty'] != -1
+ else conf['min-spaces-inside']),
+ max=(conf['max-spaces-inside-empty']
+ if conf['max-spaces-inside-empty'] != -1
+ else conf['max-spaces-inside']),
+ min_desc='too few spaces inside empty brackets',
+ max_desc=('too many spaces inside empty '
+ 'brackets'))
+ if problem is not None:
+ yield problem
+
+ elif isinstance(token, yaml.FlowSequenceStartToken):
+ problem = spaces_after(token, prev, next,
+ min=conf['min-spaces-inside'],
+ max=conf['max-spaces-inside'],
+ min_desc='too few spaces inside brackets',
+ max_desc='too many spaces inside brackets')
+ if problem is not None:
+ yield problem
+
+ elif (isinstance(token, yaml.FlowSequenceEndToken) and
+ (prev is None or
+ not isinstance(prev, yaml.FlowSequenceStartToken))):
+ problem = spaces_before(token, prev, next,
+ min=conf['min-spaces-inside'],
+ max=conf['max-spaces-inside'],
+ min_desc='too few spaces inside brackets',
+ max_desc='too many spaces inside brackets')
+ if problem is not None:
+ yield problem
diff --git a/third_party/python/yamllint/yamllint/rules/colons.py b/third_party/python/yamllint/yamllint/rules/colons.py
new file mode 100644
index 0000000000..1a63cadab6
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/colons.py
@@ -0,0 +1,105 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to control the number of spaces before and after colons (``:``).
+
+.. rubric:: Options
+
+* ``max-spaces-before`` defines the maximal number of spaces allowed before
+ colons (use ``-1`` to disable).
+* ``max-spaces-after`` defines the maximal number of spaces allowed after
+ colons (use ``-1`` to disable).
+
+.. rubric:: Examples
+
+#. With ``colons: {max-spaces-before: 0, max-spaces-after: 1}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ object:
+ - a
+ - b
+ key: value
+
+#. With ``colons: {max-spaces-before: 1}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ object :
+ - a
+ - b
+
+ the following code snippet would **FAIL**:
+ ::
+
+ object :
+ - a
+ - b
+
+#. With ``colons: {max-spaces-after: 2}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ first: 1
+ second: 2
+ third: 3
+
+ the following code snippet would **FAIL**:
+ ::
+
+ first: 1
+ 2nd: 2
+ third: 3
+"""
+
+
+import yaml
+
+from yamllint.rules.common import is_explicit_key, spaces_after, spaces_before
+
+
+ID = 'colons'
+TYPE = 'token'
+CONF = {'max-spaces-before': int,
+ 'max-spaces-after': int}
+DEFAULT = {'max-spaces-before': 0,
+ 'max-spaces-after': 1}
+
+
+def check(conf, token, prev, next, nextnext, context):
+ if isinstance(token, yaml.ValueToken):
+ problem = spaces_before(token, prev, next,
+ max=conf['max-spaces-before'],
+ max_desc='too many spaces before colon')
+ if problem is not None:
+ yield problem
+
+ problem = spaces_after(token, prev, next,
+ max=conf['max-spaces-after'],
+ max_desc='too many spaces after colon')
+ if problem is not None:
+ yield problem
+
+ if isinstance(token, yaml.KeyToken) and is_explicit_key(token):
+ problem = spaces_after(token, prev, next,
+ max=conf['max-spaces-after'],
+ max_desc='too many spaces after question mark')
+ if problem is not None:
+ yield problem
diff --git a/third_party/python/yamllint/yamllint/rules/commas.py b/third_party/python/yamllint/yamllint/rules/commas.py
new file mode 100644
index 0000000000..bb73044545
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/commas.py
@@ -0,0 +1,131 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to control the number of spaces before and after commas (``,``).
+
+.. rubric:: Options
+
+* ``max-spaces-before`` defines the maximal number of spaces allowed before
+ commas (use ``-1`` to disable).
+* ``min-spaces-after`` defines the minimal number of spaces required after
+ commas.
+* ``max-spaces-after`` defines the maximal number of spaces allowed after
+ commas (use ``-1`` to disable).
+
+.. rubric:: Examples
+
+#. With ``commas: {max-spaces-before: 0}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ strange var:
+ [10, 20, 30, {x: 1, y: 2}]
+
+ the following code snippet would **FAIL**:
+ ::
+
+ strange var:
+ [10, 20 , 30, {x: 1, y: 2}]
+
+#. With ``commas: {max-spaces-before: 2}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ strange var:
+ [10 , 20 , 30, {x: 1 , y: 2}]
+
+#. With ``commas: {max-spaces-before: -1}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ strange var:
+ [10,
+ 20 , 30
+ , {x: 1, y: 2}]
+
+#. With ``commas: {min-spaces-after: 1, max-spaces-after: 1}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ strange var:
+ [10, 20,30, {x: 1, y: 2}]
+
+ the following code snippet would **FAIL**:
+ ::
+
+ strange var:
+ [10, 20,30, {x: 1, y: 2}]
+
+#. With ``commas: {min-spaces-after: 1, max-spaces-after: 3}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ strange var:
+ [10, 20, 30, {x: 1, y: 2}]
+
+#. With ``commas: {min-spaces-after: 0, max-spaces-after: 1}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ strange var:
+ [10, 20,30, {x: 1, y: 2}]
+"""
+
+
+import yaml
+
+from yamllint.linter import LintProblem
+from yamllint.rules.common import spaces_after, spaces_before
+
+
+ID = 'commas'
+TYPE = 'token'
+CONF = {'max-spaces-before': int,
+ 'min-spaces-after': int,
+ 'max-spaces-after': int}
+DEFAULT = {'max-spaces-before': 0,
+ 'min-spaces-after': 1,
+ 'max-spaces-after': 1}
+
+
+def check(conf, token, prev, next, nextnext, context):
+ if isinstance(token, yaml.FlowEntryToken):
+ if (prev is not None and conf['max-spaces-before'] != -1 and
+ prev.end_mark.line < token.start_mark.line):
+ yield LintProblem(token.start_mark.line + 1,
+ max(1, token.start_mark.column),
+ 'too many spaces before comma')
+ else:
+ problem = spaces_before(token, prev, next,
+ max=conf['max-spaces-before'],
+ max_desc='too many spaces before comma')
+ if problem is not None:
+ yield problem
+
+ problem = spaces_after(token, prev, next,
+ min=conf['min-spaces-after'],
+ max=conf['max-spaces-after'],
+ min_desc='too few spaces after comma',
+ max_desc='too many spaces after comma')
+ if problem is not None:
+ yield problem
diff --git a/third_party/python/yamllint/yamllint/rules/comments.py b/third_party/python/yamllint/yamllint/rules/comments.py
new file mode 100644
index 0000000000..0122838f61
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/comments.py
@@ -0,0 +1,104 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to control the position and formatting of comments.
+
+.. rubric:: Options
+
+* Use ``require-starting-space`` to require a space character right after the
+ ``#``. Set to ``true`` to enable, ``false`` to disable.
+* Use ``ignore-shebangs`` to ignore a
+ `shebang <https://en.wikipedia.org/wiki/Shebang_(Unix)>`_ at the beginning of
+ the file when ``require-starting-space`` is set.
+* ``min-spaces-from-content`` is used to visually separate inline comments from
+ content. It defines the minimal required number of spaces between a comment
+ and its preceding content.
+
+.. rubric:: Examples
+
+#. With ``comments: {require-starting-space: true}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ # This sentence
+ # is a block comment
+
+ the following code snippet would **PASS**:
+ ::
+
+ ##############################
+ ## This is some documentation
+
+ the following code snippet would **FAIL**:
+ ::
+
+ #This sentence
+ #is a block comment
+
+#. With ``comments: {min-spaces-from-content: 2}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ x = 2 ^ 127 - 1 # Mersenne prime number
+
+ the following code snippet would **FAIL**:
+ ::
+
+ x = 2 ^ 127 - 1 # Mersenne prime number
+"""
+
+
+import re
+
+from yamllint.linter import LintProblem
+
+
+ID = 'comments'
+TYPE = 'comment'
+CONF = {'require-starting-space': bool,
+ 'ignore-shebangs': bool,
+ 'min-spaces-from-content': int}
+DEFAULT = {'require-starting-space': True,
+ 'ignore-shebangs': True,
+ 'min-spaces-from-content': 2}
+
+
+def check(conf, comment):
+ if (conf['min-spaces-from-content'] != -1 and comment.is_inline() and
+ comment.pointer - comment.token_before.end_mark.pointer <
+ conf['min-spaces-from-content']):
+ yield LintProblem(comment.line_no, comment.column_no,
+ 'too few spaces before comment')
+
+ if conf['require-starting-space']:
+ text_start = comment.pointer + 1
+ while (comment.buffer[text_start] == '#' and
+ text_start < len(comment.buffer)):
+ text_start += 1
+ if text_start < len(comment.buffer):
+ if (conf['ignore-shebangs'] and
+ comment.line_no == 1 and
+ comment.column_no == 1 and
+ re.match(r'^!\S', comment.buffer[text_start:])):
+ return
+ elif comment.buffer[text_start] not in (' ', '\n', '\0'):
+ column = comment.column_no + text_start - comment.pointer
+ yield LintProblem(comment.line_no,
+ column,
+ 'missing starting space in comment')
diff --git a/third_party/python/yamllint/yamllint/rules/comments_indentation.py b/third_party/python/yamllint/yamllint/rules/comments_indentation.py
new file mode 100644
index 0000000000..22ab55d6d3
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/comments_indentation.py
@@ -0,0 +1,139 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to force comments to be indented like content.
+
+.. rubric:: Examples
+
+#. With ``comments-indentation: {}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ # Fibonacci
+ [0, 1, 1, 2, 3, 5]
+
+ the following code snippet would **FAIL**:
+ ::
+
+ # Fibonacci
+ [0, 1, 1, 2, 3, 5]
+
+ the following code snippet would **PASS**:
+ ::
+
+ list:
+ - 2
+ - 3
+ # - 4
+ - 5
+
+ the following code snippet would **FAIL**:
+ ::
+
+ list:
+ - 2
+ - 3
+ # - 4
+ - 5
+
+ the following code snippet would **PASS**:
+ ::
+
+ # This is the first object
+ obj1:
+ - item A
+ # - item B
+ # This is the second object
+ obj2: []
+
+ the following code snippet would **PASS**:
+ ::
+
+ # This sentence
+ # is a block comment
+
+ the following code snippet would **FAIL**:
+ ::
+
+ # This sentence
+ # is a block comment
+"""
+
+
+import yaml
+
+from yamllint.linter import LintProblem
+from yamllint.rules.common import get_line_indent
+
+
+ID = 'comments-indentation'
+TYPE = 'comment'
+
+
+# Case A:
+#
+# prev: line:
+# # commented line
+# current: line
+#
+# Case B:
+#
+# prev: line
+# # commented line 1
+# # commented line 2
+# current: line
+
+def check(conf, comment):
+ # Only check block comments
+ if (not isinstance(comment.token_before, yaml.StreamStartToken) and
+ comment.token_before.end_mark.line + 1 == comment.line_no):
+ return
+
+ next_line_indent = comment.token_after.start_mark.column
+ if isinstance(comment.token_after, yaml.StreamEndToken):
+ next_line_indent = 0
+
+ if isinstance(comment.token_before, yaml.StreamStartToken):
+ prev_line_indent = 0
+ else:
+ prev_line_indent = get_line_indent(comment.token_before)
+
+ # In the following case only the next line indent is valid:
+ # list:
+ # # comment
+ # - 1
+ # - 2
+ if prev_line_indent <= next_line_indent:
+ prev_line_indent = next_line_indent
+
+ # If two indents are valid but a previous comment went back to normal
+ # indent, for the next ones to do the same. In other words, avoid this:
+ # list:
+ # - 1
+ # # comment on valid indent (0)
+ # # comment on valid indent (4)
+ # other-list:
+ # - 2
+ if (comment.comment_before is not None and
+ not comment.comment_before.is_inline()):
+ prev_line_indent = comment.comment_before.column_no - 1
+
+ if (comment.column_no - 1 != prev_line_indent and
+ comment.column_no - 1 != next_line_indent):
+ yield LintProblem(comment.line_no, comment.column_no,
+ 'comment not indented like content')
diff --git a/third_party/python/yamllint/yamllint/rules/common.py b/third_party/python/yamllint/yamllint/rules/common.py
new file mode 100644
index 0000000000..989345965c
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/common.py
@@ -0,0 +1,89 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import string
+
+import yaml
+
+from yamllint.linter import LintProblem
+
+
+def spaces_after(token, prev, next, min=-1, max=-1,
+ min_desc=None, max_desc=None):
+ if next is not None and token.end_mark.line == next.start_mark.line:
+ spaces = next.start_mark.pointer - token.end_mark.pointer
+ if max != - 1 and spaces > max:
+ return LintProblem(token.start_mark.line + 1,
+ next.start_mark.column, max_desc)
+ elif min != - 1 and spaces < min:
+ return LintProblem(token.start_mark.line + 1,
+ next.start_mark.column + 1, min_desc)
+
+
+def spaces_before(token, prev, next, min=-1, max=-1,
+ min_desc=None, max_desc=None):
+ if (prev is not None and prev.end_mark.line == token.start_mark.line and
+ # Discard tokens (only scalars?) that end at the start of next line
+ (prev.end_mark.pointer == 0 or
+ prev.end_mark.buffer[prev.end_mark.pointer - 1] != '\n')):
+ spaces = token.start_mark.pointer - prev.end_mark.pointer
+ if max != - 1 and spaces > max:
+ return LintProblem(token.start_mark.line + 1,
+ token.start_mark.column, max_desc)
+ elif min != - 1 and spaces < min:
+ return LintProblem(token.start_mark.line + 1,
+ token.start_mark.column + 1, min_desc)
+
+
+def get_line_indent(token):
+ """Finds the indent of the line the token starts in."""
+ start = token.start_mark.buffer.rfind('\n', 0,
+ token.start_mark.pointer) + 1
+ content = start
+ while token.start_mark.buffer[content] == ' ':
+ content += 1
+ return content - start
+
+
+def get_real_end_line(token):
+ """Finds the line on which the token really ends.
+
+ With pyyaml, scalar tokens often end on a next line.
+ """
+ end_line = token.end_mark.line + 1
+
+ if not isinstance(token, yaml.ScalarToken):
+ return end_line
+
+ pos = token.end_mark.pointer - 1
+ while (pos >= token.start_mark.pointer - 1 and
+ token.end_mark.buffer[pos] in string.whitespace):
+ if token.end_mark.buffer[pos] == '\n':
+ end_line -= 1
+ pos -= 1
+ return end_line
+
+
+def is_explicit_key(token):
+ # explicit key:
+ # ? key
+ # : v
+ # or
+ # ?
+ # key
+ # : v
+ return (token.start_mark.pointer < token.end_mark.pointer and
+ token.start_mark.buffer[token.start_mark.pointer] == '?')
diff --git a/third_party/python/yamllint/yamllint/rules/document_end.py b/third_party/python/yamllint/yamllint/rules/document_end.py
new file mode 100644
index 0000000000..e98aac1d12
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/document_end.py
@@ -0,0 +1,107 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to require or forbid the use of document end marker (``...``).
+
+.. rubric:: Options
+
+* Set ``present`` to ``true`` when the document end marker is required, or to
+ ``false`` when it is forbidden.
+
+.. rubric:: Examples
+
+#. With ``document-end: {present: true}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ ---
+ this:
+ is: [a, document]
+ ...
+ ---
+ - this
+ - is: another one
+ ...
+
+ the following code snippet would **FAIL**:
+ ::
+
+ ---
+ this:
+ is: [a, document]
+ ---
+ - this
+ - is: another one
+ ...
+
+#. With ``document-end: {present: false}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ ---
+ this:
+ is: [a, document]
+ ---
+ - this
+ - is: another one
+
+ the following code snippet would **FAIL**:
+ ::
+
+ ---
+ this:
+ is: [a, document]
+ ...
+ ---
+ - this
+ - is: another one
+"""
+
+
+import yaml
+
+from yamllint.linter import LintProblem
+
+
+ID = 'document-end'
+TYPE = 'token'
+CONF = {'present': bool}
+DEFAULT = {'present': True}
+
+
+def check(conf, token, prev, next, nextnext, context):
+ if conf['present']:
+ is_stream_end = isinstance(token, yaml.StreamEndToken)
+ is_start = isinstance(token, yaml.DocumentStartToken)
+ prev_is_end_or_stream_start = isinstance(
+ prev, (yaml.DocumentEndToken, yaml.StreamStartToken)
+ )
+
+ if is_stream_end and not prev_is_end_or_stream_start:
+ yield LintProblem(token.start_mark.line, 1,
+ 'missing document end "..."')
+ elif is_start and not prev_is_end_or_stream_start:
+ yield LintProblem(token.start_mark.line + 1, 1,
+ 'missing document end "..."')
+
+ else:
+ if isinstance(token, yaml.DocumentEndToken):
+ yield LintProblem(token.start_mark.line + 1,
+ token.start_mark.column + 1,
+ 'found forbidden document end "..."')
diff --git a/third_party/python/yamllint/yamllint/rules/document_start.py b/third_party/python/yamllint/yamllint/rules/document_start.py
new file mode 100644
index 0000000000..36c3d8e8db
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/document_start.py
@@ -0,0 +1,93 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to require or forbid the use of document start marker (``---``).
+
+.. rubric:: Options
+
+* Set ``present`` to ``true`` when the document start marker is required, or to
+ ``false`` when it is forbidden.
+
+.. rubric:: Examples
+
+#. With ``document-start: {present: true}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ ---
+ this:
+ is: [a, document]
+ ---
+ - this
+ - is: another one
+
+ the following code snippet would **FAIL**:
+ ::
+
+ this:
+ is: [a, document]
+ ---
+ - this
+ - is: another one
+
+#. With ``document-start: {present: false}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ this:
+ is: [a, document]
+ ...
+
+ the following code snippet would **FAIL**:
+ ::
+
+ ---
+ this:
+ is: [a, document]
+ ...
+"""
+
+
+import yaml
+
+from yamllint.linter import LintProblem
+
+
+ID = 'document-start'
+TYPE = 'token'
+CONF = {'present': bool}
+DEFAULT = {'present': True}
+
+
+def check(conf, token, prev, next, nextnext, context):
+ if conf['present']:
+ if (isinstance(prev, (yaml.StreamStartToken,
+ yaml.DocumentEndToken,
+ yaml.DirectiveToken)) and
+ not isinstance(token, (yaml.DocumentStartToken,
+ yaml.DirectiveToken,
+ yaml.StreamEndToken))):
+ yield LintProblem(token.start_mark.line + 1, 1,
+ 'missing document start "---"')
+
+ else:
+ if isinstance(token, yaml.DocumentStartToken):
+ yield LintProblem(token.start_mark.line + 1,
+ token.start_mark.column + 1,
+ 'found forbidden document start "---"')
diff --git a/third_party/python/yamllint/yamllint/rules/empty_lines.py b/third_party/python/yamllint/yamllint/rules/empty_lines.py
new file mode 100644
index 0000000000..d9a8c4d173
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/empty_lines.py
@@ -0,0 +1,108 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to set a maximal number of allowed consecutive blank lines.
+
+.. rubric:: Options
+
+* ``max`` defines the maximal number of empty lines allowed in the document.
+* ``max-start`` defines the maximal number of empty lines allowed at the
+ beginning of the file. This option takes precedence over ``max``.
+* ``max-end`` defines the maximal number of empty lines allowed at the end of
+ the file. This option takes precedence over ``max``.
+
+.. rubric:: Examples
+
+#. With ``empty-lines: {max: 1}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ - foo:
+ - 1
+ - 2
+
+ - bar: [3, 4]
+
+ the following code snippet would **FAIL**:
+ ::
+
+ - foo:
+ - 1
+ - 2
+
+
+ - bar: [3, 4]
+"""
+
+
+from yamllint.linter import LintProblem
+
+
+ID = 'empty-lines'
+TYPE = 'line'
+CONF = {'max': int,
+ 'max-start': int,
+ 'max-end': int}
+DEFAULT = {'max': 2,
+ 'max-start': 0,
+ 'max-end': 0}
+
+
+def check(conf, line):
+ if line.start == line.end and line.end < len(line.buffer):
+ # Only alert on the last blank line of a series
+ if (line.end + 2 <= len(line.buffer) and
+ line.buffer[line.end:line.end + 2] == '\n\n'):
+ return
+ elif (line.end + 4 <= len(line.buffer) and
+ line.buffer[line.end:line.end + 4] == '\r\n\r\n'):
+ return
+
+ blank_lines = 0
+
+ start = line.start
+ while start >= 2 and line.buffer[start - 2:start] == '\r\n':
+ blank_lines += 1
+ start -= 2
+ while start >= 1 and line.buffer[start - 1] == '\n':
+ blank_lines += 1
+ start -= 1
+
+ max = conf['max']
+
+ # Special case: start of document
+ if start == 0:
+ blank_lines += 1 # first line doesn't have a preceding \n
+ max = conf['max-start']
+
+ # Special case: end of document
+ # NOTE: The last line of a file is always supposed to end with a new
+ # line. See POSIX definition of a line at:
+ if ((line.end == len(line.buffer) - 1 and
+ line.buffer[line.end] == '\n') or
+ (line.end == len(line.buffer) - 2 and
+ line.buffer[line.end:line.end + 2] == '\r\n')):
+ # Allow the exception of the one-byte file containing '\n'
+ if line.end == 0:
+ return
+
+ max = conf['max-end']
+
+ if blank_lines > max:
+ yield LintProblem(line.line_no, 1, 'too many blank lines (%d > %d)'
+ % (blank_lines, max))
diff --git a/third_party/python/yamllint/yamllint/rules/empty_values.py b/third_party/python/yamllint/yamllint/rules/empty_values.py
new file mode 100644
index 0000000000..bb4982bfdb
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/empty_values.py
@@ -0,0 +1,96 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2017 Greg Dubicki
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to prevent nodes with empty content, that implicitly result in
+``null`` values.
+
+.. rubric:: Options
+
+* Use ``forbid-in-block-mappings`` to prevent empty values in block mappings.
+* Use ``forbid-in-flow-mappings`` to prevent empty values in flow mappings.
+
+.. rubric:: Examples
+
+#. With ``empty-values: {forbid-in-block-mappings: true}``
+
+ the following code snippets would **PASS**:
+ ::
+
+ some-mapping:
+ sub-element: correctly indented
+
+ ::
+
+ explicitly-null: null
+
+ the following code snippets would **FAIL**:
+ ::
+
+ some-mapping:
+ sub-element: incorrectly indented
+
+ ::
+
+ implicitly-null:
+
+#. With ``empty-values: {forbid-in-flow-mappings: true}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ {prop: null}
+ {a: 1, b: 2, c: 3}
+
+ the following code snippets would **FAIL**:
+ ::
+
+ {prop: }
+
+ ::
+
+ {a: 1, b:, c: 3}
+
+"""
+
+import yaml
+
+from yamllint.linter import LintProblem
+
+
+ID = 'empty-values'
+TYPE = 'token'
+CONF = {'forbid-in-block-mappings': bool,
+ 'forbid-in-flow-mappings': bool}
+DEFAULT = {'forbid-in-block-mappings': True,
+ 'forbid-in-flow-mappings': True}
+
+
+def check(conf, token, prev, next, nextnext, context):
+
+ if conf['forbid-in-block-mappings']:
+ if isinstance(token, yaml.ValueToken) and isinstance(next, (
+ yaml.KeyToken, yaml.BlockEndToken)):
+ yield LintProblem(token.start_mark.line + 1,
+ token.end_mark.column + 1,
+ 'empty value in block mapping')
+
+ if conf['forbid-in-flow-mappings']:
+ if isinstance(token, yaml.ValueToken) and isinstance(next, (
+ yaml.FlowEntryToken, yaml.FlowMappingEndToken)):
+ yield LintProblem(token.start_mark.line + 1,
+ token.end_mark.column + 1,
+ 'empty value in flow mapping')
diff --git a/third_party/python/yamllint/yamllint/rules/hyphens.py b/third_party/python/yamllint/yamllint/rules/hyphens.py
new file mode 100644
index 0000000000..df38b4c519
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/hyphens.py
@@ -0,0 +1,88 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to control the number of spaces after hyphens (``-``).
+
+.. rubric:: Options
+
+* ``max-spaces-after`` defines the maximal number of spaces allowed after
+ hyphens.
+
+.. rubric:: Examples
+
+#. With ``hyphens: {max-spaces-after: 1}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ - first list:
+ - a
+ - b
+ - - 1
+ - 2
+ - 3
+
+ the following code snippet would **FAIL**:
+ ::
+
+ - first list:
+ - a
+ - b
+
+ the following code snippet would **FAIL**:
+ ::
+
+ - - 1
+ - 2
+ - 3
+
+#. With ``hyphens: {max-spaces-after: 3}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ - key
+ - key2
+ - key42
+
+ the following code snippet would **FAIL**:
+ ::
+
+ - key
+ - key2
+ - key42
+"""
+
+
+import yaml
+
+from yamllint.rules.common import spaces_after
+
+
+ID = 'hyphens'
+TYPE = 'token'
+CONF = {'max-spaces-after': int}
+DEFAULT = {'max-spaces-after': 1}
+
+
+def check(conf, token, prev, next, nextnext, context):
+ if isinstance(token, yaml.BlockEntryToken):
+ problem = spaces_after(token, prev, next,
+ max=conf['max-spaces-after'],
+ max_desc='too many spaces after hyphen')
+ if problem is not None:
+ yield problem
diff --git a/third_party/python/yamllint/yamllint/rules/indentation.py b/third_party/python/yamllint/yamllint/rules/indentation.py
new file mode 100644
index 0000000000..d83eb6594b
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/indentation.py
@@ -0,0 +1,575 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to control the indentation.
+
+.. rubric:: Options
+
+* ``spaces`` defines the indentation width, in spaces. Set either to an integer
+ (e.g. ``2`` or ``4``, representing the number of spaces in an indentation
+ level) or to ``consistent`` to allow any number, as long as it remains the
+ same within the file.
+* ``indent-sequences`` defines whether block sequences should be indented or
+ not (when in a mapping, this indentation is not mandatory -- some people
+ perceive the ``-`` as part of the indentation). Possible values: ``true``,
+ ``false``, ``whatever`` and ``consistent``. ``consistent`` requires either
+ all block sequences to be indented, or none to be. ``whatever`` means either
+ indenting or not indenting individual block sequences is OK.
+* ``check-multi-line-strings`` defines whether to lint indentation in
+ multi-line strings. Set to ``true`` to enable, ``false`` to disable.
+
+.. rubric:: Examples
+
+#. With ``indentation: {spaces: 1}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ history:
+ - name: Unix
+ date: 1969
+ - name: Linux
+ date: 1991
+ nest:
+ recurse:
+ - haystack:
+ needle
+
+#. With ``indentation: {spaces: 4}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ history:
+ - name: Unix
+ date: 1969
+ - name: Linux
+ date: 1991
+ nest:
+ recurse:
+ - haystack:
+ needle
+
+ the following code snippet would **FAIL**:
+ ::
+
+ history:
+ - name: Unix
+ date: 1969
+ - name: Linux
+ date: 1991
+ nest:
+ recurse:
+ - haystack:
+ needle
+
+#. With ``indentation: {spaces: consistent}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ history:
+ - name: Unix
+ date: 1969
+ - name: Linux
+ date: 1991
+ nest:
+ recurse:
+ - haystack:
+ needle
+
+ the following code snippet would **FAIL**:
+ ::
+
+ some:
+ Russian:
+ dolls
+
+#. With ``indentation: {spaces: 2, indent-sequences: false}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ list:
+ - flying
+ - spaghetti
+ - monster
+
+ the following code snippet would **FAIL**:
+ ::
+
+ list:
+ - flying
+ - spaghetti
+ - monster
+
+#. With ``indentation: {spaces: 2, indent-sequences: whatever}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ list:
+ - flying:
+ - spaghetti
+ - monster
+ - not flying:
+ - spaghetti
+ - sauce
+
+#. With ``indentation: {spaces: 2, indent-sequences: consistent}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ - flying:
+ - spaghetti
+ - monster
+ - not flying:
+ - spaghetti
+ - sauce
+
+ the following code snippet would **FAIL**:
+ ::
+
+ - flying:
+ - spaghetti
+ - monster
+ - not flying:
+ - spaghetti
+ - sauce
+
+#. With ``indentation: {spaces: 4, check-multi-line-strings: true}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ Blaise Pascal:
+ Je vous écris une longue lettre parce que
+ je n'ai pas le temps d'en écrire une courte.
+
+ the following code snippet would **PASS**:
+ ::
+
+ Blaise Pascal: Je vous écris une longue lettre parce que
+ je n'ai pas le temps d'en écrire une courte.
+
+ the following code snippet would **FAIL**:
+ ::
+
+ Blaise Pascal: Je vous écris une longue lettre parce que
+ je n'ai pas le temps d'en écrire une courte.
+
+ the following code snippet would **FAIL**:
+ ::
+
+ C code:
+ void main() {
+ printf("foo");
+ }
+
+ the following code snippet would **PASS**:
+ ::
+
+ C code:
+ void main() {
+ printf("bar");
+ }
+"""
+
+import yaml
+
+from yamllint.linter import LintProblem
+from yamllint.rules.common import get_real_end_line, is_explicit_key
+
+
+ID = 'indentation'
+TYPE = 'token'
+CONF = {'spaces': (int, 'consistent'),
+ 'indent-sequences': (bool, 'whatever', 'consistent'),
+ 'check-multi-line-strings': bool}
+DEFAULT = {'spaces': 'consistent',
+ 'indent-sequences': True,
+ 'check-multi-line-strings': False}
+
+ROOT, B_MAP, F_MAP, B_SEQ, F_SEQ, B_ENT, KEY, VAL = range(8)
+labels = ('ROOT', 'B_MAP', 'F_MAP', 'B_SEQ', 'F_SEQ', 'B_ENT', 'KEY', 'VAL')
+
+
+class Parent(object):
+ def __init__(self, type, indent, line_indent=None):
+ self.type = type
+ self.indent = indent
+ self.line_indent = line_indent
+ self.explicit_key = False
+ self.implicit_block_seq = False
+
+ def __repr__(self):
+ return '%s:%d' % (labels[self.type], self.indent)
+
+
+def check_scalar_indentation(conf, token, context):
+ if token.start_mark.line == token.end_mark.line:
+ return
+
+ def compute_expected_indent(found_indent):
+ def detect_indent(base_indent):
+ if not isinstance(context['spaces'], int):
+ context['spaces'] = found_indent - base_indent
+ return base_indent + context['spaces']
+
+ if token.plain:
+ return token.start_mark.column
+ elif token.style in ('"', "'"):
+ return token.start_mark.column + 1
+ elif token.style in ('>', '|'):
+ if context['stack'][-1].type == B_ENT:
+ # - >
+ # multi
+ # line
+ return detect_indent(token.start_mark.column)
+ elif context['stack'][-1].type == KEY:
+ assert context['stack'][-1].explicit_key
+ # - ? >
+ # multi-line
+ # key
+ # : >
+ # multi-line
+ # value
+ return detect_indent(token.start_mark.column)
+ elif context['stack'][-1].type == VAL:
+ if token.start_mark.line + 1 > context['cur_line']:
+ # - key:
+ # >
+ # multi
+ # line
+ return detect_indent(context['stack'][-1].indent)
+ elif context['stack'][-2].explicit_key:
+ # - ? key
+ # : >
+ # multi-line
+ # value
+ return detect_indent(token.start_mark.column)
+ else:
+ # - key: >
+ # multi
+ # line
+ return detect_indent(context['stack'][-2].indent)
+ else:
+ return detect_indent(context['stack'][-1].indent)
+
+ expected_indent = None
+
+ line_no = token.start_mark.line + 1
+
+ line_start = token.start_mark.pointer
+ while True:
+ line_start = token.start_mark.buffer.find(
+ '\n', line_start, token.end_mark.pointer - 1) + 1
+ if line_start == 0:
+ break
+ line_no += 1
+
+ indent = 0
+ while token.start_mark.buffer[line_start + indent] == ' ':
+ indent += 1
+ if token.start_mark.buffer[line_start + indent] == '\n':
+ continue
+
+ if expected_indent is None:
+ expected_indent = compute_expected_indent(indent)
+
+ if indent != expected_indent:
+ yield LintProblem(line_no, indent + 1,
+ 'wrong indentation: expected %d but found %d' %
+ (expected_indent, indent))
+
+
+def _check(conf, token, prev, next, nextnext, context):
+ if 'stack' not in context:
+ context['stack'] = [Parent(ROOT, 0)]
+ context['cur_line'] = -1
+ context['spaces'] = conf['spaces']
+ context['indent-sequences'] = conf['indent-sequences']
+
+ # Step 1: Lint
+
+ is_visible = (
+ not isinstance(token, (yaml.StreamStartToken, yaml.StreamEndToken)) and
+ not isinstance(token, yaml.BlockEndToken) and
+ not (isinstance(token, yaml.ScalarToken) and token.value == ''))
+ first_in_line = (is_visible and
+ token.start_mark.line + 1 > context['cur_line'])
+
+ def detect_indent(base_indent, next):
+ if not isinstance(context['spaces'], int):
+ context['spaces'] = next.start_mark.column - base_indent
+ return base_indent + context['spaces']
+
+ if first_in_line:
+ found_indentation = token.start_mark.column
+ expected = context['stack'][-1].indent
+
+ if isinstance(token, (yaml.FlowMappingEndToken,
+ yaml.FlowSequenceEndToken)):
+ expected = context['stack'][-1].line_indent
+ elif (context['stack'][-1].type == KEY and
+ context['stack'][-1].explicit_key and
+ not isinstance(token, yaml.ValueToken)):
+ expected = detect_indent(expected, token)
+
+ if found_indentation != expected:
+ yield LintProblem(token.start_mark.line + 1, found_indentation + 1,
+ 'wrong indentation: expected %d but found %d' %
+ (expected, found_indentation))
+
+ if (isinstance(token, yaml.ScalarToken) and
+ conf['check-multi-line-strings']):
+ for problem in check_scalar_indentation(conf, token, context):
+ yield problem
+
+ # Step 2.a:
+
+ if is_visible:
+ context['cur_line'] = get_real_end_line(token)
+ if first_in_line:
+ context['cur_line_indent'] = found_indentation
+
+ # Step 2.b: Update state
+
+ if isinstance(token, yaml.BlockMappingStartToken):
+ # - a: 1
+ # or
+ # - ? a
+ # : 1
+ # or
+ # - ?
+ # a
+ # : 1
+ assert isinstance(next, yaml.KeyToken)
+ assert next.start_mark.line == token.start_mark.line
+
+ indent = token.start_mark.column
+
+ context['stack'].append(Parent(B_MAP, indent))
+
+ elif isinstance(token, yaml.FlowMappingStartToken):
+ if next.start_mark.line == token.start_mark.line:
+ # - {a: 1, b: 2}
+ indent = next.start_mark.column
+ else:
+ # - {
+ # a: 1, b: 2
+ # }
+ indent = detect_indent(context['cur_line_indent'], next)
+
+ context['stack'].append(Parent(F_MAP, indent,
+ line_indent=context['cur_line_indent']))
+
+ elif isinstance(token, yaml.BlockSequenceStartToken):
+ # - - a
+ # - b
+ assert isinstance(next, yaml.BlockEntryToken)
+ assert next.start_mark.line == token.start_mark.line
+
+ indent = token.start_mark.column
+
+ context['stack'].append(Parent(B_SEQ, indent))
+
+ elif (isinstance(token, yaml.BlockEntryToken) and
+ # in case of an empty entry
+ not isinstance(next, (yaml.BlockEntryToken, yaml.BlockEndToken))):
+ # It looks like pyyaml doesn't issue BlockSequenceStartTokens when the
+ # list is not indented. We need to compensate that.
+ if context['stack'][-1].type != B_SEQ:
+ context['stack'].append(Parent(B_SEQ, token.start_mark.column))
+ context['stack'][-1].implicit_block_seq = True
+
+ if next.start_mark.line == token.end_mark.line:
+ # - item 1
+ # - item 2
+ indent = next.start_mark.column
+ elif next.start_mark.column == token.start_mark.column:
+ # -
+ # key: value
+ indent = next.start_mark.column
+ else:
+ # -
+ # item 1
+ # -
+ # key:
+ # value
+ indent = detect_indent(token.start_mark.column, next)
+
+ context['stack'].append(Parent(B_ENT, indent))
+
+ elif isinstance(token, yaml.FlowSequenceStartToken):
+ if next.start_mark.line == token.start_mark.line:
+ # - [a, b]
+ indent = next.start_mark.column
+ else:
+ # - [
+ # a, b
+ # ]
+ indent = detect_indent(context['cur_line_indent'], next)
+
+ context['stack'].append(Parent(F_SEQ, indent,
+ line_indent=context['cur_line_indent']))
+
+ elif isinstance(token, yaml.KeyToken):
+ indent = context['stack'][-1].indent
+
+ context['stack'].append(Parent(KEY, indent))
+
+ context['stack'][-1].explicit_key = is_explicit_key(token)
+
+ elif isinstance(token, yaml.ValueToken):
+ assert context['stack'][-1].type == KEY
+
+ # Special cases:
+ # key: &anchor
+ # value
+ # and:
+ # key: !!tag
+ # value
+ if isinstance(next, (yaml.AnchorToken, yaml.TagToken)):
+ if (next.start_mark.line == prev.start_mark.line and
+ next.start_mark.line < nextnext.start_mark.line):
+ next = nextnext
+
+ # Only if value is not empty
+ if not isinstance(next, (yaml.BlockEndToken,
+ yaml.FlowMappingEndToken,
+ yaml.FlowSequenceEndToken,
+ yaml.KeyToken)):
+ if context['stack'][-1].explicit_key:
+ # ? k
+ # : value
+ # or
+ # ? k
+ # :
+ # value
+ indent = detect_indent(context['stack'][-1].indent, next)
+ elif next.start_mark.line == prev.start_mark.line:
+ # k: value
+ indent = next.start_mark.column
+ elif isinstance(next, (yaml.BlockSequenceStartToken,
+ yaml.BlockEntryToken)):
+ # NOTE: We add BlockEntryToken in the test above because
+ # sometimes BlockSequenceStartToken are not issued. Try
+ # yaml.scan()ning this:
+ # '- lib:\n'
+ # ' - var\n'
+ if context['indent-sequences'] is False:
+ indent = context['stack'][-1].indent
+ elif context['indent-sequences'] is True:
+ if (context['spaces'] == 'consistent' and
+ next.start_mark.column -
+ context['stack'][-1].indent == 0):
+ # In this case, the block sequence item is not indented
+ # (while it should be), but we don't know yet the
+ # indentation it should have (because `spaces` is
+ # `consistent` and its value has not been computed yet
+ # -- this is probably the beginning of the document).
+ # So we choose an arbitrary value (2).
+ indent = 2
+ else:
+ indent = detect_indent(context['stack'][-1].indent,
+ next)
+ else: # 'whatever' or 'consistent'
+ if next.start_mark.column == context['stack'][-1].indent:
+ # key:
+ # - e1
+ # - e2
+ if context['indent-sequences'] == 'consistent':
+ context['indent-sequences'] = False
+ indent = context['stack'][-1].indent
+ else:
+ if context['indent-sequences'] == 'consistent':
+ context['indent-sequences'] = True
+ # key:
+ # - e1
+ # - e2
+ indent = detect_indent(context['stack'][-1].indent,
+ next)
+ else:
+ # k:
+ # value
+ indent = detect_indent(context['stack'][-1].indent, next)
+
+ context['stack'].append(Parent(VAL, indent))
+
+ consumed_current_token = False
+ while True:
+ if (context['stack'][-1].type == F_SEQ and
+ isinstance(token, yaml.FlowSequenceEndToken) and
+ not consumed_current_token):
+ context['stack'].pop()
+ consumed_current_token = True
+
+ elif (context['stack'][-1].type == F_MAP and
+ isinstance(token, yaml.FlowMappingEndToken) and
+ not consumed_current_token):
+ context['stack'].pop()
+ consumed_current_token = True
+
+ elif (context['stack'][-1].type in (B_MAP, B_SEQ) and
+ isinstance(token, yaml.BlockEndToken) and
+ not context['stack'][-1].implicit_block_seq and
+ not consumed_current_token):
+ context['stack'].pop()
+ consumed_current_token = True
+
+ elif (context['stack'][-1].type == B_ENT and
+ not isinstance(token, yaml.BlockEntryToken) and
+ context['stack'][-2].implicit_block_seq and
+ not isinstance(token, (yaml.AnchorToken, yaml.TagToken)) and
+ not isinstance(next, yaml.BlockEntryToken)):
+ context['stack'].pop()
+ context['stack'].pop()
+
+ elif (context['stack'][-1].type == B_ENT and
+ isinstance(next, (yaml.BlockEntryToken, yaml.BlockEndToken))):
+ context['stack'].pop()
+
+ elif (context['stack'][-1].type == VAL and
+ not isinstance(token, yaml.ValueToken) and
+ not isinstance(token, (yaml.AnchorToken, yaml.TagToken))):
+ assert context['stack'][-2].type == KEY
+ context['stack'].pop()
+ context['stack'].pop()
+
+ elif (context['stack'][-1].type == KEY and
+ isinstance(next, (yaml.BlockEndToken,
+ yaml.FlowMappingEndToken,
+ yaml.FlowSequenceEndToken,
+ yaml.KeyToken))):
+ # A key without a value: it's part of a set. Let's drop this key
+ # and leave room for the next one.
+ context['stack'].pop()
+
+ else:
+ break
+
+
+def check(conf, token, prev, next, nextnext, context):
+ try:
+ for problem in _check(conf, token, prev, next, nextnext, context):
+ yield problem
+ except AssertionError:
+ yield LintProblem(token.start_mark.line + 1,
+ token.start_mark.column + 1,
+ 'cannot infer indentation: unexpected token')
diff --git a/third_party/python/yamllint/yamllint/rules/key_duplicates.py b/third_party/python/yamllint/yamllint/rules/key_duplicates.py
new file mode 100644
index 0000000000..bd38b14345
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/key_duplicates.py
@@ -0,0 +1,100 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to prevent multiple entries with the same key in mappings.
+
+.. rubric:: Examples
+
+#. With ``key-duplicates: {}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ - key 1: v
+ key 2: val
+ key 3: value
+ - {a: 1, b: 2, c: 3}
+
+ the following code snippet would **FAIL**:
+ ::
+
+ - key 1: v
+ key 2: val
+ key 1: value
+
+ the following code snippet would **FAIL**:
+ ::
+
+ - {a: 1, b: 2, b: 3}
+
+ the following code snippet would **FAIL**:
+ ::
+
+ duplicated key: 1
+ "duplicated key": 2
+
+ other duplication: 1
+ ? >-
+ other
+ duplication
+ : 2
+"""
+
+import yaml
+
+from yamllint.linter import LintProblem
+
+
+ID = 'key-duplicates'
+TYPE = 'token'
+
+MAP, SEQ = range(2)
+
+
+class Parent(object):
+ def __init__(self, type):
+ self.type = type
+ self.keys = []
+
+
+def check(conf, token, prev, next, nextnext, context):
+ if 'stack' not in context:
+ context['stack'] = []
+
+ if isinstance(token, (yaml.BlockMappingStartToken,
+ yaml.FlowMappingStartToken)):
+ context['stack'].append(Parent(MAP))
+ elif isinstance(token, (yaml.BlockSequenceStartToken,
+ yaml.FlowSequenceStartToken)):
+ context['stack'].append(Parent(SEQ))
+ elif isinstance(token, (yaml.BlockEndToken,
+ yaml.FlowMappingEndToken,
+ yaml.FlowSequenceEndToken)):
+ context['stack'].pop()
+ elif (isinstance(token, yaml.KeyToken) and
+ isinstance(next, yaml.ScalarToken)):
+ # This check is done because KeyTokens can be found inside flow
+ # sequences... strange, but allowed.
+ if len(context['stack']) > 0 and context['stack'][-1].type == MAP:
+ if (next.value in context['stack'][-1].keys and
+ # `<<` is "merge key", see http://yaml.org/type/merge.html
+ next.value != '<<'):
+ yield LintProblem(
+ next.start_mark.line + 1, next.start_mark.column + 1,
+ 'duplication of key "%s" in mapping' % next.value)
+ else:
+ context['stack'][-1].keys.append(next.value)
diff --git a/third_party/python/yamllint/yamllint/rules/key_ordering.py b/third_party/python/yamllint/yamllint/rules/key_ordering.py
new file mode 100644
index 0000000000..1ca992b66e
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/key_ordering.py
@@ -0,0 +1,109 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2017 Johannes F. Knauf
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to enforce alphabetical ordering of keys in mappings. The sorting
+order uses the Unicode code point number. As a result, the ordering is
+case-sensitive and not accent-friendly (see examples below).
+
+.. rubric:: Examples
+
+#. With ``key-ordering: {}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ - key 1: v
+ key 2: val
+ key 3: value
+ - {a: 1, b: 2, c: 3}
+ - T-shirt: 1
+ T-shirts: 2
+ t-shirt: 3
+ t-shirts: 4
+ - hair: true
+ hais: true
+ haïr: true
+ haïssable: true
+
+ the following code snippet would **FAIL**:
+ ::
+
+ - key 2: v
+ key 1: val
+
+ the following code snippet would **FAIL**:
+ ::
+
+ - {b: 1, a: 2}
+
+ the following code snippet would **FAIL**:
+ ::
+
+ - T-shirt: 1
+ t-shirt: 2
+ T-shirts: 3
+ t-shirts: 4
+
+ the following code snippet would **FAIL**:
+ ::
+
+ - haïr: true
+ hais: true
+"""
+
+import yaml
+
+from yamllint.linter import LintProblem
+
+
+ID = 'key-ordering'
+TYPE = 'token'
+
+MAP, SEQ = range(2)
+
+
+class Parent(object):
+ def __init__(self, type):
+ self.type = type
+ self.keys = []
+
+
+def check(conf, token, prev, next, nextnext, context):
+ if 'stack' not in context:
+ context['stack'] = []
+
+ if isinstance(token, (yaml.BlockMappingStartToken,
+ yaml.FlowMappingStartToken)):
+ context['stack'].append(Parent(MAP))
+ elif isinstance(token, (yaml.BlockSequenceStartToken,
+ yaml.FlowSequenceStartToken)):
+ context['stack'].append(Parent(SEQ))
+ elif isinstance(token, (yaml.BlockEndToken,
+ yaml.FlowMappingEndToken,
+ yaml.FlowSequenceEndToken)):
+ context['stack'].pop()
+ elif (isinstance(token, yaml.KeyToken) and
+ isinstance(next, yaml.ScalarToken)):
+ # This check is done because KeyTokens can be found inside flow
+ # sequences... strange, but allowed.
+ if len(context['stack']) > 0 and context['stack'][-1].type == MAP:
+ if any(next.value < key for key in context['stack'][-1].keys):
+ yield LintProblem(
+ next.start_mark.line + 1, next.start_mark.column + 1,
+ 'wrong ordering of key "%s" in mapping' % next.value)
+ else:
+ context['stack'][-1].keys.append(next.value)
diff --git a/third_party/python/yamllint/yamllint/rules/line_length.py b/third_party/python/yamllint/yamllint/rules/line_length.py
new file mode 100644
index 0000000000..9b5a1ab687
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/line_length.py
@@ -0,0 +1,149 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to set a limit to lines length.
+
+Note: with Python 2, the ``line-length`` rule may not work properly with
+unicode characters because of the way strings are represented in bytes. We
+recommend running yamllint with Python 3.
+
+.. rubric:: Options
+
+* ``max`` defines the maximal (inclusive) length of lines.
+* ``allow-non-breakable-words`` is used to allow non breakable words (without
+ spaces inside) to overflow the limit. This is useful for long URLs, for
+ instance. Use ``true`` to allow, ``false`` to forbid.
+* ``allow-non-breakable-inline-mappings`` implies ``allow-non-breakable-words``
+ and extends it to also allow non-breakable words in inline mappings.
+
+.. rubric:: Examples
+
+#. With ``line-length: {max: 70}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ long sentence:
+ Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do
+ eiusmod tempor incididunt ut labore et dolore magna aliqua.
+
+ the following code snippet would **FAIL**:
+ ::
+
+ long sentence:
+ Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
+ tempor incididunt ut labore et dolore magna aliqua.
+
+#. With ``line-length: {max: 60, allow-non-breakable-words: true}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ this:
+ is:
+ - a:
+ http://localhost/very/very/very/very/very/very/very/very/long/url
+
+ # this comment is too long,
+ # but hard to split:
+ # http://localhost/another/very/very/very/very/very/very/very/very/long/url
+
+ the following code snippet would **FAIL**:
+ ::
+
+ - this line is waaaaaaaaaaaaaay too long but could be easily split...
+
+ and the following code snippet would also **FAIL**:
+ ::
+
+ - foobar: http://localhost/very/very/very/very/very/very/very/very/long/url
+
+#. With ``line-length: {max: 60, allow-non-breakable-words: true,
+ allow-non-breakable-inline-mappings: true}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ - foobar: http://localhost/very/very/very/very/very/very/very/very/long/url
+
+#. With ``line-length: {max: 60, allow-non-breakable-words: false}``
+
+ the following code snippet would **FAIL**:
+ ::
+
+ this:
+ is:
+ - a:
+ http://localhost/very/very/very/very/very/very/very/very/long/url
+"""
+
+
+import yaml
+
+from yamllint.linter import LintProblem
+
+
+ID = 'line-length'
+TYPE = 'line'
+CONF = {'max': int,
+ 'allow-non-breakable-words': bool,
+ 'allow-non-breakable-inline-mappings': bool}
+DEFAULT = {'max': 80,
+ 'allow-non-breakable-words': True,
+ 'allow-non-breakable-inline-mappings': False}
+
+
+def check_inline_mapping(line):
+ loader = yaml.SafeLoader(line.content)
+ try:
+ while loader.peek_token():
+ if isinstance(loader.get_token(), yaml.BlockMappingStartToken):
+ while loader.peek_token():
+ if isinstance(loader.get_token(), yaml.ValueToken):
+ t = loader.get_token()
+ if isinstance(t, yaml.ScalarToken):
+ return (
+ ' ' not in line.content[t.start_mark.column:])
+ except yaml.scanner.ScannerError:
+ pass
+
+ return False
+
+
+def check(conf, line):
+ if line.end - line.start > conf['max']:
+ conf['allow-non-breakable-words'] |= \
+ conf['allow-non-breakable-inline-mappings']
+ if conf['allow-non-breakable-words']:
+ start = line.start
+ while start < line.end and line.buffer[start] == ' ':
+ start += 1
+
+ if start != line.end:
+ if line.buffer[start] in ('#', '-'):
+ start += 2
+
+ if line.buffer.find(' ', start, line.end) == -1:
+ return
+
+ if (conf['allow-non-breakable-inline-mappings'] and
+ check_inline_mapping(line)):
+ return
+
+ yield LintProblem(line.line_no, conf['max'] + 1,
+ 'line too long (%d > %d characters)' %
+ (line.end - line.start, conf['max']))
diff --git a/third_party/python/yamllint/yamllint/rules/new_line_at_end_of_file.py b/third_party/python/yamllint/yamllint/rules/new_line_at_end_of_file.py
new file mode 100644
index 0000000000..90b1cc2ae7
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/new_line_at_end_of_file.py
@@ -0,0 +1,37 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to require a new line character (``\\n``) at the end of files.
+
+The POSIX standard `requires the last line to end with a new line character
+<http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html#tag_03_206>`_.
+All UNIX tools expect a new line at the end of files. Most text editors use
+this convention too.
+"""
+
+
+from yamllint.linter import LintProblem
+
+
+ID = 'new-line-at-end-of-file'
+TYPE = 'line'
+
+
+def check(conf, line):
+ if line.end == len(line.buffer) and line.end > line.start:
+ yield LintProblem(line.line_no, line.end - line.start + 1,
+ 'no new line character at the end of file')
diff --git a/third_party/python/yamllint/yamllint/rules/new_lines.py b/third_party/python/yamllint/yamllint/rules/new_lines.py
new file mode 100644
index 0000000000..686bac244b
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/new_lines.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to force the type of new line characters.
+
+.. rubric:: Options
+
+* Set ``type`` to ``unix`` to use UNIX-typed new line characters (``\\n``), or
+ ``dos`` to use DOS-typed new line characters (``\\r\\n``).
+"""
+
+
+from yamllint.linter import LintProblem
+
+
+ID = 'new-lines'
+TYPE = 'line'
+CONF = {'type': ('unix', 'dos')}
+DEFAULT = {'type': 'unix'}
+
+
+def check(conf, line):
+ if line.start == 0 and len(line.buffer) > line.end:
+ if conf['type'] == 'dos':
+ if (line.end + 2 > len(line.buffer) or
+ line.buffer[line.end:line.end + 2] != '\r\n'):
+ yield LintProblem(1, line.end - line.start + 1,
+ 'wrong new line character: expected \\r\\n')
+ else:
+ if line.buffer[line.end] == '\r':
+ yield LintProblem(1, line.end - line.start + 1,
+ 'wrong new line character: expected \\n')
diff --git a/third_party/python/yamllint/yamllint/rules/octal_values.py b/third_party/python/yamllint/yamllint/rules/octal_values.py
new file mode 100644
index 0000000000..f6e80cef56
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/octal_values.py
@@ -0,0 +1,95 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2017 ScienJus
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to prevent values with octal numbers. In YAML, numbers that
+start with ``0`` are interpreted as octal, but this is not always wanted.
+For instance ``010`` is the city code of Beijing, and should not be
+converted to ``8``.
+
+.. rubric:: Examples
+
+#. With ``octal-values: {forbid-implicit-octal: true}``
+
+ the following code snippets would **PASS**:
+ ::
+
+ user:
+ city-code: '010'
+
+ the following code snippets would **PASS**:
+ ::
+
+ user:
+ city-code: 010,021
+
+ the following code snippets would **FAIL**:
+ ::
+
+ user:
+ city-code: 010
+
+#. With ``octal-values: {forbid-explicit-octal: true}``
+
+ the following code snippets would **PASS**:
+ ::
+
+ user:
+ city-code: '0o10'
+
+ the following code snippets would **FAIL**:
+ ::
+
+ user:
+ city-code: 0o10
+"""
+
+import yaml
+
+from yamllint.linter import LintProblem
+
+
+ID = 'octal-values'
+TYPE = 'token'
+CONF = {'forbid-implicit-octal': bool,
+ 'forbid-explicit-octal': bool}
+DEFAULT = {'forbid-implicit-octal': True,
+ 'forbid-explicit-octal': True}
+
+
+def check(conf, token, prev, next, nextnext, context):
+ if prev and isinstance(prev, yaml.tokens.TagToken):
+ return
+
+ if conf['forbid-implicit-octal']:
+ if isinstance(token, yaml.tokens.ScalarToken):
+ if not token.style:
+ val = token.value
+ if val.isdigit() and len(val) > 1 and val[0] == '0':
+ yield LintProblem(
+ token.start_mark.line + 1, token.end_mark.column + 1,
+ 'forbidden implicit octal value "%s"' %
+ token.value)
+
+ if conf['forbid-explicit-octal']:
+ if isinstance(token, yaml.tokens.ScalarToken):
+ if not token.style:
+ val = token.value
+ if len(val) > 2 and val[:2] == '0o' and val[2:].isdigit():
+ yield LintProblem(
+ token.start_mark.line + 1, token.end_mark.column + 1,
+ 'forbidden explicit octal value "%s"' %
+ token.value)
diff --git a/third_party/python/yamllint/yamllint/rules/quoted_strings.py b/third_party/python/yamllint/yamllint/rules/quoted_strings.py
new file mode 100644
index 0000000000..1d997294da
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/quoted_strings.py
@@ -0,0 +1,230 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2018 ClearScore
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to forbid any string values that are not quoted, or to prevent
+quoted strings without needing it. You can also enforce the type of the quote
+used.
+
+.. rubric:: Options
+
+* ``quote-type`` defines allowed quotes: ``single``, ``double`` or ``any``
+ (default).
+* ``required`` defines whether using quotes in string values is required
+ (``true``, default) or not (``false``), or only allowed when really needed
+ (``only-when-needed``).
+* ``extra-required`` is a list of PCRE regexes to force string values to be
+ quoted, if they match any regex. This option can only be used with
+ ``required: false`` and ``required: only-when-needed``.
+* ``extra-allowed`` is a list of PCRE regexes to allow quoted string values,
+ even if ``required: only-when-needed`` is set.
+
+**Note**: Multi-line strings (with ``|`` or ``>``) will not be checked.
+
+.. rubric:: Examples
+
+#. With ``quoted-strings: {quote-type: any, required: true}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ foo: "bar"
+ bar: 'foo'
+ number: 123
+ boolean: true
+
+ the following code snippet would **FAIL**:
+ ::
+
+ foo: bar
+
+#. With ``quoted-strings: {quote-type: single, required: only-when-needed}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ foo: bar
+ bar: foo
+ not_number: '123'
+ not_boolean: 'true'
+ not_comment: '# comment'
+ not_list: '[1, 2, 3]'
+ not_map: '{a: 1, b: 2}'
+
+ the following code snippet would **FAIL**:
+ ::
+
+ foo: 'bar'
+
+#. With ``quoted-strings: {required: false, extra-required: [^http://,
+ ^ftp://]}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ - localhost
+ - "localhost"
+ - "http://localhost"
+ - "ftp://localhost"
+
+ the following code snippet would **FAIL**:
+ ::
+
+ - http://localhost
+ - ftp://localhost
+
+#. With ``quoted-strings: {required: only-when-needed, extra-allowed:
+ [^http://, ^ftp://], extra-required: [QUOTED]}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ - localhost
+ - "http://localhost"
+ - "ftp://localhost"
+ - "this is a string that needs to be QUOTED"
+
+ the following code snippet would **FAIL**:
+ ::
+
+ - "localhost"
+ - this is a string that needs to be QUOTED
+"""
+
+import re
+
+import yaml
+
+from yamllint.linter import LintProblem
+
+ID = 'quoted-strings'
+TYPE = 'token'
+CONF = {'quote-type': ('any', 'single', 'double'),
+ 'required': (True, False, 'only-when-needed'),
+ 'extra-required': [str],
+ 'extra-allowed': [str]}
+DEFAULT = {'quote-type': 'any',
+ 'required': True,
+ 'extra-required': [],
+ 'extra-allowed': []}
+
+
+def VALIDATE(conf):
+ if conf['required'] is True and len(conf['extra-allowed']) > 0:
+ return 'cannot use both "required: true" and "extra-allowed"'
+ if conf['required'] is True and len(conf['extra-required']) > 0:
+ return 'cannot use both "required: true" and "extra-required"'
+ if conf['required'] is False and len(conf['extra-allowed']) > 0:
+ return 'cannot use both "required: false" and "extra-allowed"'
+
+
+DEFAULT_SCALAR_TAG = u'tag:yaml.org,2002:str'
+
+
+def _quote_match(quote_type, token_style):
+ return ((quote_type == 'any') or
+ (quote_type == 'single' and token_style == "'") or
+ (quote_type == 'double' and token_style == '"'))
+
+
+def _quotes_are_needed(string):
+ loader = yaml.BaseLoader('key: ' + string)
+ # Remove the 5 first tokens corresponding to 'key: ' (StreamStartToken,
+ # BlockMappingStartToken, KeyToken, ScalarToken(value=key), ValueToken)
+ for _ in range(5):
+ loader.get_token()
+ try:
+ a, b = loader.get_token(), loader.get_token()
+ if (isinstance(a, yaml.ScalarToken) and a.style is None and
+ isinstance(b, yaml.BlockEndToken)):
+ return False
+ return True
+ except yaml.scanner.ScannerError:
+ return True
+
+
+def check(conf, token, prev, next, nextnext, context):
+ if not (isinstance(token, yaml.tokens.ScalarToken) and
+ isinstance(prev, (yaml.BlockEntryToken, yaml.FlowEntryToken,
+ yaml.FlowSequenceStartToken, yaml.TagToken,
+ yaml.ValueToken))):
+
+ return
+
+ # Ignore explicit types, e.g. !!str testtest or !!int 42
+ if (prev and isinstance(prev, yaml.tokens.TagToken) and
+ prev.value[0] == '!!'):
+ return
+
+ # Ignore numbers, booleans, etc.
+ resolver = yaml.resolver.Resolver()
+ tag = resolver.resolve(yaml.nodes.ScalarNode, token.value, (True, False))
+ if token.plain and tag != DEFAULT_SCALAR_TAG:
+ return
+
+ # Ignore multi-line strings
+ if (not token.plain) and (token.style == "|" or token.style == ">"):
+ return
+
+ quote_type = conf['quote-type']
+
+ msg = None
+ if conf['required'] is True:
+
+ # Quotes are mandatory and need to match config
+ if token.style is None or not _quote_match(quote_type, token.style):
+ msg = "string value is not quoted with %s quotes" % quote_type
+
+ elif conf['required'] is False:
+
+ # Quotes are not mandatory but when used need to match config
+ if token.style and not _quote_match(quote_type, token.style):
+ msg = "string value is not quoted with %s quotes" % quote_type
+
+ elif not token.style:
+ is_extra_required = any(re.search(r, token.value)
+ for r in conf['extra-required'])
+ if is_extra_required:
+ msg = "string value is not quoted"
+
+ elif conf['required'] == 'only-when-needed':
+
+ # Quotes are not strictly needed here
+ if (token.style and tag == DEFAULT_SCALAR_TAG and token.value and
+ not _quotes_are_needed(token.value)):
+ is_extra_required = any(re.search(r, token.value)
+ for r in conf['extra-required'])
+ is_extra_allowed = any(re.search(r, token.value)
+ for r in conf['extra-allowed'])
+ if not (is_extra_required or is_extra_allowed):
+ msg = "string value is redundantly quoted with %s quotes" % (
+ quote_type)
+
+ # But when used need to match config
+ elif token.style and not _quote_match(quote_type, token.style):
+ msg = "string value is not quoted with %s quotes" % quote_type
+
+ elif not token.style:
+ is_extra_required = len(conf['extra-required']) and any(
+ re.search(r, token.value) for r in conf['extra-required'])
+ if is_extra_required:
+ msg = "string value is not quoted"
+
+ if msg is not None:
+ yield LintProblem(
+ token.start_mark.line + 1,
+ token.start_mark.column + 1,
+ msg)
diff --git a/third_party/python/yamllint/yamllint/rules/trailing_spaces.py b/third_party/python/yamllint/yamllint/rules/trailing_spaces.py
new file mode 100644
index 0000000000..2fc4bbba03
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/trailing_spaces.py
@@ -0,0 +1,62 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Adrien Vergé
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to forbid trailing spaces at the end of lines.
+
+.. rubric:: Examples
+
+#. With ``trailing-spaces: {}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ this document doesn't contain
+ any trailing
+ spaces
+
+ the following code snippet would **FAIL**:
+ ::
+
+ this document contains """ """
+ trailing spaces
+ on lines 1 and 3 """ """
+"""
+
+
+import string
+
+from yamllint.linter import LintProblem
+
+
+ID = 'trailing-spaces'
+TYPE = 'line'
+
+
+def check(conf, line):
+ if line.end == 0:
+ return
+
+ # YAML recognizes two white space characters: space and tab.
+ # http://yaml.org/spec/1.2/spec.html#id2775170
+
+ pos = line.end
+ while line.buffer[pos - 1] in string.whitespace and pos > line.start:
+ pos -= 1
+
+ if pos != line.end and line.buffer[pos] in ' \t':
+ yield LintProblem(line.line_no, pos - line.start + 1,
+ 'trailing spaces')
diff --git a/third_party/python/yamllint/yamllint/rules/truthy.py b/third_party/python/yamllint/yamllint/rules/truthy.py
new file mode 100644
index 0000000000..64ccaaa45c
--- /dev/null
+++ b/third_party/python/yamllint/yamllint/rules/truthy.py
@@ -0,0 +1,149 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2016 Peter Ericson
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Use this rule to forbid non-explictly typed truthy values other than allowed
+ones (by default: ``true`` and ``false``), for example ``YES`` or ``off``.
+
+This can be useful to prevent surprises from YAML parsers transforming
+``[yes, FALSE, Off]`` into ``[true, false, false]`` or
+``{y: 1, yes: 2, on: 3, true: 4, True: 5}`` into ``{y: 1, true: 5}``.
+
+.. rubric:: Options
+
+* ``allowed-values`` defines the list of truthy values which will be ignored
+ during linting. The default is ``['true', 'false']``, but can be changed to
+ any list containing: ``'TRUE'``, ``'True'``, ``'true'``, ``'FALSE'``,
+ ``'False'``, ``'false'``, ``'YES'``, ``'Yes'``, ``'yes'``, ``'NO'``,
+ ``'No'``, ``'no'``, ``'ON'``, ``'On'``, ``'on'``, ``'OFF'``, ``'Off'``,
+ ``'off'``.
+* ``check-keys`` disables verification for keys in mappings. By default,
+ ``truthy`` rule applies to both keys and values. Set this option to ``false``
+ to prevent this.
+
+.. rubric:: Examples
+
+#. With ``truthy: {}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ boolean: true
+
+ object: {"True": 1, 1: "True"}
+
+ "yes": 1
+ "on": 2
+ "True": 3
+
+ explicit:
+ string1: !!str True
+ string2: !!str yes
+ string3: !!str off
+ encoded: !!binary |
+ True
+ OFF
+ pad== # this decodes as 'N\xbb\x9e8Qii'
+ boolean1: !!bool true
+ boolean2: !!bool "false"
+ boolean3: !!bool FALSE
+ boolean4: !!bool True
+ boolean5: !!bool off
+ boolean6: !!bool NO
+
+ the following code snippet would **FAIL**:
+ ::
+
+ object: {True: 1, 1: True}
+
+ the following code snippet would **FAIL**:
+ ::
+
+ yes: 1
+ on: 2
+ True: 3
+
+#. With ``truthy: {allowed-values: ["yes", "no"]}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ - yes
+ - no
+ - "true"
+ - 'false'
+ - foo
+ - bar
+
+ the following code snippet would **FAIL**:
+ ::
+
+ - true
+ - false
+ - on
+ - off
+
+#. With ``truthy: {check-keys: false}``
+
+ the following code snippet would **PASS**:
+ ::
+
+ yes: 1
+ on: 2
+ true: 3
+
+ the following code snippet would **FAIL**:
+ ::
+
+ yes: Yes
+ on: On
+ true: True
+"""
+
+import yaml
+
+from yamllint.linter import LintProblem
+
+
+TRUTHY = ['YES', 'Yes', 'yes',
+ 'NO', 'No', 'no',
+ 'TRUE', 'True', 'true',
+ 'FALSE', 'False', 'false',
+ 'ON', 'On', 'on',
+ 'OFF', 'Off', 'off']
+
+
+ID = 'truthy'
+TYPE = 'token'
+CONF = {'allowed-values': list(TRUTHY), 'check-keys': bool}
+DEFAULT = {'allowed-values': ['true', 'false'], 'check-keys': True}
+
+
+def check(conf, token, prev, next, nextnext, context):
+ if prev and isinstance(prev, yaml.tokens.TagToken):
+ return
+
+ if (not conf['check-keys'] and isinstance(prev, yaml.tokens.KeyToken) and
+ isinstance(token, yaml.tokens.ScalarToken)):
+ return
+
+ if isinstance(token, yaml.tokens.ScalarToken):
+ if (token.value in (set(TRUTHY) - set(conf['allowed-values'])) and
+ token.style is None):
+ yield LintProblem(token.start_mark.line + 1,
+ token.start_mark.column + 1,
+ "truthy value should be one of [" +
+ ", ".join(sorted(conf['allowed-values'])) + "]")
diff --git a/third_party/python/zipp/.coveragerc b/third_party/python/zipp/.coveragerc
new file mode 100644
index 0000000000..45823064a3
--- /dev/null
+++ b/third_party/python/zipp/.coveragerc
@@ -0,0 +1,5 @@
+[run]
+omit = .tox/*
+
+[report]
+show_missing = True
diff --git a/third_party/python/zipp/.flake8 b/third_party/python/zipp/.flake8
new file mode 100644
index 0000000000..790c109fdb
--- /dev/null
+++ b/third_party/python/zipp/.flake8
@@ -0,0 +1,9 @@
+[flake8]
+max-line-length = 88
+ignore =
+ # W503 violates spec https://github.com/PyCQA/pycodestyle/issues/513
+ W503
+ # W504 has issues https://github.com/OCA/maintainer-quality-tools/issues/545
+ W504
+ # Black creates whitespace before colon
+ E203
diff --git a/third_party/python/zipp/.pre-commit-config.yaml b/third_party/python/zipp/.pre-commit-config.yaml
new file mode 100644
index 0000000000..fe46b8c596
--- /dev/null
+++ b/third_party/python/zipp/.pre-commit-config.yaml
@@ -0,0 +1,10 @@
+repos:
+- repo: https://github.com/psf/black
+ rev: 19.10b0
+ hooks:
+ - id: black
+
+- repo: https://github.com/asottile/blacken-docs
+ rev: v1.4.0
+ hooks:
+ - id: blacken-docs
diff --git a/third_party/python/zipp/.readthedocs.yml b/third_party/python/zipp/.readthedocs.yml
new file mode 100644
index 0000000000..8ae4468428
--- /dev/null
+++ b/third_party/python/zipp/.readthedocs.yml
@@ -0,0 +1,5 @@
+python:
+ version: 3
+ extra_requirements:
+ - docs
+ pip_install: true
diff --git a/third_party/python/zipp/.travis.yml b/third_party/python/zipp/.travis.yml
new file mode 100644
index 0000000000..17e45a678c
--- /dev/null
+++ b/third_party/python/zipp/.travis.yml
@@ -0,0 +1,18 @@
+dist: xenial
+language: python
+
+python:
+- 3.6
+- &latest_py3 3.8
+
+cache: pip
+
+install:
+- pip install tox tox-venv
+
+before_script:
+ # Disable IPv6. Ref travis-ci/travis-ci#8361
+ - if [ "${TRAVIS_OS_NAME}" == "linux" ]; then
+ sudo sh -c 'echo 0 > /proc/sys/net/ipv6/conf/all/disable_ipv6';
+ fi
+script: tox
diff --git a/third_party/python/zipp/CHANGES.rst b/third_party/python/zipp/CHANGES.rst
new file mode 100644
index 0000000000..159e43de2a
--- /dev/null
+++ b/third_party/python/zipp/CHANGES.rst
@@ -0,0 +1,137 @@
+v3.1.0
+======
+
+#47: ``.open`` now raises ``FileNotFoundError`` and
+``IsADirectoryError`` when appropriate.
+
+v3.0.0
+======
+
+#44: Merge with v1.2.0.
+
+v1.2.0
+======
+
+#44: ``zipp.Path.open()`` now supports a compatible signature
+as ``pathlib.Path.open()``, accepting text (default) or binary
+modes and soliciting keyword parameters passed through to
+``io.TextIOWrapper`` (encoding, newline, etc). The stream is
+opened in text-mode by default now. ``open`` no
+longer accepts ``pwd`` as a positional argument and does not
+accept the ``force_zip64`` parameter at all. This change is
+a backward-incompatible change for that single function.
+
+v2.2.1
+======
+
+#43: Merge with v1.1.1.
+
+v1.1.1
+======
+
+#43: Restored performance of implicit dir computation.
+
+v2.2.0
+======
+
+#36: Rebuild package with minimum Python version declared both
+in package metadata and in the python tag.
+
+v2.1.0
+======
+
+#32: Merge with v1.1.0.
+
+v1.1.0
+======
+
+#32: For read-only zip files, complexity of ``.exists`` and
+``joinpath`` is now constant time instead of ``O(n)``, preventing
+quadratic time in common use-cases and rendering large
+zip files unusable for Path. Big thanks to Benjy Weinberger
+for the bug report and contributed fix (#33).
+
+v2.0.1
+======
+
+#30: Corrected version inference (from jaraco/skeleton#12).
+
+v2.0.0
+======
+
+Require Python 3.6 or later.
+
+v1.0.0
+======
+
+Re-release of 0.6 to correspond with release as found in
+Python 3.8.
+
+v0.6.0
+======
+
+#12: When adding implicit dirs, ensure that ancestral directories
+are added and that duplicates are excluded.
+
+The library now relies on
+`more_itertools <https://pypi.org/project/more_itertools>`_.
+
+v0.5.2
+======
+
+#7: Parent of a directory now actually returns the parent.
+
+v0.5.1
+======
+
+Declared package as backport.
+
+v0.5.0
+======
+
+Add ``.joinpath()`` method and ``.parent`` property.
+
+Now a backport release of the ``zipfile.Path`` class.
+
+v0.4.0
+======
+
+#4: Add support for zip files with implied directories.
+
+v0.3.3
+======
+
+#3: Fix issue where ``.name`` on a directory was empty.
+
+v0.3.2
+======
+
+#2: Fix TypeError on Python 2.7 when classic division is used.
+
+v0.3.1
+======
+
+#1: Fix TypeError on Python 3.5 when joining to a path-like object.
+
+v0.3.0
+======
+
+Add support for constructing a ``zipp.Path`` from any path-like
+object.
+
+``zipp.Path`` is now a new-style class on Python 2.7.
+
+v0.2.1
+======
+
+Fix issue with ``__str__``.
+
+v0.2.0
+======
+
+Drop reliance on future-fstrings.
+
+v0.1.0
+======
+
+Initial release with basic functionality.
diff --git a/third_party/python/zipp/LICENSE b/third_party/python/zipp/LICENSE
new file mode 100644
index 0000000000..353924be0e
--- /dev/null
+++ b/third_party/python/zipp/LICENSE
@@ -0,0 +1,19 @@
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/third_party/python/zipp/PKG-INFO b/third_party/python/zipp/PKG-INFO
new file mode 100644
index 0000000000..1898e121c9
--- /dev/null
+++ b/third_party/python/zipp/PKG-INFO
@@ -0,0 +1,45 @@
+Metadata-Version: 2.1
+Name: zipp
+Version: 3.1.0
+Summary: Backport of pathlib-compatible object wrapper for zip files
+Home-page: https://github.com/jaraco/zipp
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+License: UNKNOWN
+Description: .. image:: https://img.shields.io/pypi/v/zipp.svg
+ :target: `PyPI link`_
+
+ .. image:: https://img.shields.io/pypi/pyversions/zipp.svg
+ :target: `PyPI link`_
+
+ .. _PyPI link: https://pypi.org/project/zipp
+
+ .. image:: https://dev.azure.com/jaraco/zipp/_apis/build/status/jaraco.zipp?branchName=master
+ :target: https://dev.azure.com/jaraco/zipp/_build/latest?definitionId=1&branchName=master
+
+ .. image:: https://img.shields.io/travis/jaraco/zipp/master.svg
+ :target: https://travis-ci.org/jaraco/zipp
+
+ .. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+ .. image:: https://img.shields.io/appveyor/ci/jaraco/zipp/master.svg
+ :target: https://ci.appveyor.com/project/jaraco/zipp/branch/master
+
+ .. .. image:: https://readthedocs.org/projects/zipp/badge/?version=latest
+ .. :target: https://zipp.readthedocs.io/en/latest/?badge=latest
+
+
+ A pathlib-compatible Zipfile object wrapper. A backport of the
+ `Path object <https://docs.python.org/3.8/library/zipfile.html#path-objects>`_.
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.6
+Provides-Extra: testing
+Provides-Extra: docs
diff --git a/third_party/python/zipp/README.rst b/third_party/python/zipp/README.rst
new file mode 100644
index 0000000000..37bf0856fa
--- /dev/null
+++ b/third_party/python/zipp/README.rst
@@ -0,0 +1,27 @@
+.. image:: https://img.shields.io/pypi/v/zipp.svg
+ :target: `PyPI link`_
+
+.. image:: https://img.shields.io/pypi/pyversions/zipp.svg
+ :target: `PyPI link`_
+
+.. _PyPI link: https://pypi.org/project/zipp
+
+.. image:: https://dev.azure.com/jaraco/zipp/_apis/build/status/jaraco.zipp?branchName=master
+ :target: https://dev.azure.com/jaraco/zipp/_build/latest?definitionId=1&branchName=master
+
+.. image:: https://img.shields.io/travis/jaraco/zipp/master.svg
+ :target: https://travis-ci.org/jaraco/zipp
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. image:: https://img.shields.io/appveyor/ci/jaraco/zipp/master.svg
+ :target: https://ci.appveyor.com/project/jaraco/zipp/branch/master
+
+.. .. image:: https://readthedocs.org/projects/zipp/badge/?version=latest
+.. :target: https://zipp.readthedocs.io/en/latest/?badge=latest
+
+
+A pathlib-compatible Zipfile object wrapper. A backport of the
+`Path object <https://docs.python.org/3.8/library/zipfile.html#path-objects>`_.
diff --git a/third_party/python/zipp/appveyor.yml b/third_party/python/zipp/appveyor.yml
new file mode 100644
index 0000000000..6a1c99a98f
--- /dev/null
+++ b/third_party/python/zipp/appveyor.yml
@@ -0,0 +1,24 @@
+environment:
+
+ APPVEYOR: true
+
+ matrix:
+ - PYTHON: "C:\\Python36-x64"
+ - PYTHON: "C:\\Python38-x64"
+
+install:
+ # symlink python from a directory with a space
+ - "mklink /d \"C:\\Program Files\\Python\" %PYTHON%"
+ - "SET PYTHON=\"C:\\Program Files\\Python\""
+ - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
+
+build: off
+
+cache:
+ - '%LOCALAPPDATA%\pip\Cache'
+
+test_script:
+ - "python -m pip install -U tox tox-venv virtualenv"
+ - "tox"
+
+version: '{build}'
diff --git a/third_party/python/zipp/azure-pipelines.yml b/third_party/python/zipp/azure-pipelines.yml
new file mode 100644
index 0000000000..3e80bf443d
--- /dev/null
+++ b/third_party/python/zipp/azure-pipelines.yml
@@ -0,0 +1,71 @@
+# Create the project in Azure with:
+# az devops project create --name $name --organization https://dev.azure.com/$org/ --visibility public
+# then configure the pipelines (through web UI)
+
+trigger:
+ branches:
+ include:
+ - '*'
+ tags:
+ include:
+ - '*'
+
+pool:
+ vmimage: 'Ubuntu-18.04'
+
+variables:
+- group: Azure secrets
+
+stages:
+- stage: Test
+ jobs:
+
+ - job: 'Test'
+ strategy:
+ matrix:
+ Python36:
+ python.version: '3.6'
+ Python38:
+ python.version: '3.8'
+ maxParallel: 4
+
+ steps:
+ - task: UsePythonVersion@0
+ inputs:
+ versionSpec: '$(python.version)'
+ architecture: 'x64'
+
+ - script: python -m pip install tox
+ displayName: 'Install tox'
+
+ - script: |
+ tox -- --junit-xml=test-results.xml
+ displayName: 'run tests'
+
+ - task: PublishTestResults@2
+ inputs:
+ testResultsFiles: '**/test-results.xml'
+ testRunTitle: 'Python $(python.version)'
+ condition: succeededOrFailed()
+
+- stage: Publish
+ dependsOn: Test
+ jobs:
+ - job: 'Publish'
+
+ steps:
+ - task: UsePythonVersion@0
+ inputs:
+ versionSpec: '3.8'
+ architecture: 'x64'
+
+ - script: python -m pip install tox
+ displayName: 'Install tox'
+
+ - script: |
+ tox -e release
+ env:
+ TWINE_PASSWORD: $(PyPI-token)
+ displayName: 'publish to PyPI'
+
+ condition: contains(variables['Build.SourceBranch'], 'tags')
diff --git a/third_party/python/zipp/conftest.py b/third_party/python/zipp/conftest.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/zipp/conftest.py
diff --git a/third_party/python/zipp/pyproject.toml b/third_party/python/zipp/pyproject.toml
new file mode 100644
index 0000000000..6ee7df2313
--- /dev/null
+++ b/third_party/python/zipp/pyproject.toml
@@ -0,0 +1,8 @@
+[build-system]
+requires = ["setuptools>=42", "wheel", "setuptools_scm[toml]>=3.4.1"]
+build-backend = "setuptools.build_meta"
+
+[tool.black]
+skip-string-normalization = true
+
+[tool.setuptools_scm]
diff --git a/third_party/python/zipp/setup.cfg b/third_party/python/zipp/setup.cfg
new file mode 100644
index 0000000000..148267ce8d
--- /dev/null
+++ b/third_party/python/zipp/setup.cfg
@@ -0,0 +1,39 @@
+[metadata]
+license_file = LICENSE
+name = zipp
+author = Jason R. Coombs
+author_email = jaraco@jaraco.com
+description = Backport of pathlib-compatible object wrapper for zip files
+long_description = file:README.rst
+url = https://github.com/jaraco/zipp
+classifiers =
+ Development Status :: 5 - Production/Stable
+ Intended Audience :: Developers
+ License :: OSI Approved :: MIT License
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3 :: Only
+
+[options]
+py_modules = zipp
+packages = find:
+include_package_data = true
+python_requires = >=3.6
+install_requires =
+setup_requires = setuptools_scm[toml] >= 3.4.1
+
+[options.extras_require]
+testing =
+
+ jaraco.itertools
+ func-timeout
+docs =
+ sphinx
+ jaraco.packaging >= 3.2
+ rst.linker >= 1.9
+
+[options.entry_points]
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/third_party/python/zipp/setup.py b/third_party/python/zipp/setup.py
new file mode 100644
index 0000000000..bac24a43d9
--- /dev/null
+++ b/third_party/python/zipp/setup.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+
+import setuptools
+
+if __name__ == "__main__":
+ setuptools.setup()
diff --git a/third_party/python/zipp/skeleton.md b/third_party/python/zipp/skeleton.md
new file mode 100644
index 0000000000..340b65cf50
--- /dev/null
+++ b/third_party/python/zipp/skeleton.md
@@ -0,0 +1,161 @@
+# Overview
+
+This project is merged with [skeleton](https://github.com/jaraco/skeleton). What is skeleton? It's the scaffolding of a Python project jaraco [introduced in his blog](https://blog.jaraco.com/a-project-skeleton-for-python-projects/). It seeks to provide a means to re-use techniques and inherit advances when managing projects for distribution.
+
+## An SCM-Managed Approach
+
+While maintaining dozens of projects in PyPI, jaraco derives best practices for project distribution and publishes them in the [skeleton repo](https://github.com/jaraco/skeleton), a Git repo capturing the evolution and culmination of these best practices.
+
+It's intended to be used by a new or existing project to adopt these practices and honed and proven techniques. Adopters are encouraged to use the project directly and maintain a small deviation from the technique, make their own fork for more substantial changes unique to their environment or preferences, or simply adopt the skeleton once and abandon it thereafter.
+
+The primary advantage to using an SCM for maintaining these techniques is that those tools help facilitate the merge between the template and its adopting projects.
+
+Another advantage to using an SCM-managed approach is that tools like GitHub recognize that a change in the skeleton is the _same change_ across all projects that merge with that skeleton. Without the ancestry, with a traditional copy/paste approach, a [commit like this](https://github.com/jaraco/skeleton/commit/12eed1326e1bc26ce256e7b3f8cd8d3a5beab2d5) would produce notifications in the upstream project issue for each and every application, but because it's centralized, GitHub provides just the one notification when the change is added to the skeleton.
+
+# Usage
+
+## new projects
+
+To use skeleton for a new project, simply pull the skeleton into a new project:
+
+```
+$ git init my-new-project
+$ cd my-new-project
+$ git pull gh://jaraco/skeleton
+```
+
+Now customize the project to suit your individual project needs.
+
+## existing projects
+
+If you have an existing project, you can still incorporate the skeleton by merging it into the codebase.
+
+```
+$ git merge skeleton --allow-unrelated-histories
+```
+
+The `--allow-unrelated-histories` is necessary because the history from the skeleton was previously unrelated to the existing codebase. Resolve any merge conflicts and commit to the master, and now the project is based on the shared skeleton.
+
+## Updating
+
+Whenever a change is needed or desired for the general technique for packaging, it can be made in the skeleton project and then merged into each of the derived projects as needed, recommended before each release. As a result, features and best practices for packaging are centrally maintained and readily trickle into a whole suite of packages. This technique lowers the amount of tedious work necessary to create or maintain a project, and coupled with other techniques like continuous integration and deployment, lowers the cost of creating and maintaining refined Python projects to just a few, familiar Git operations.
+
+Thereafter, the target project can make whatever customizations it deems relevant to the scaffolding. The project may even at some point decide that the divergence is too great to merit renewed merging with the original skeleton. This approach applies maximal guidance while creating minimal constraints.
+
+# Features
+
+The features/techniques employed by the skeleton include:
+
+- PEP 517/518-based build relying on Setuptools as the build tool
+- Setuptools declarative configuration using setup.cfg
+- tox for running tests
+- A README.rst as reStructuredText with some popular badges, but with Read the Docs and AppVeyor badges commented out
+- A CHANGES.rst file intended for publishing release notes about the project
+- Use of [Black](https://black.readthedocs.io/en/stable/) for code formatting (disabled on unsupported Python 3.5 and earlier)
+
+## Packaging Conventions
+
+A pyproject.toml is included to enable PEP 517 and PEP 518 compatibility and declares the requirements necessary to build the project on Setuptools (a minimum version compatible with setup.cfg declarative config).
+
+The setup.cfg file implements the following features:
+
+- Assumes universal wheel for release
+- Advertises the project's LICENSE file (MIT by default)
+- Reads the README.rst file into the long description
+- Some common Trove classifiers
+- Includes all packages discovered in the repo
+- Data files in the package are also included (not just Python files)
+- Declares the required Python versions
+- Declares install requirements (empty by default)
+- Declares setup requirements for legacy environments
+- Supplies two 'extras':
+ - testing: requirements for running tests
+ - docs: requirements for building docs
+ - these extras split the declaration into "upstream" (requirements as declared by the skeleton) and "local" (those specific to the local project); these markers help avoid merge conflicts
+- Placeholder for defining entry points
+
+Additionally, the setup.py file declares `use_scm_version` which relies on [setuptools_scm](https://pypi.org/project/setuptools_scm) to do two things:
+
+- derive the project version from SCM tags
+- ensure that all files committed to the repo are automatically included in releases
+
+## Running Tests
+
+The skeleton assumes the developer has [tox](https://pypi.org/project/tox) installed. The developer is expected to run `tox` to run tests on the current Python version using [pytest](https://pypi.org/project/pytest).
+
+Other environments (invoked with `tox -e {name}`) supplied include:
+
+ - a `docs` environment to build the documentation
+ - a `release` environment to publish the package to PyPI
+
+A pytest.ini is included to define common options around running tests. In particular:
+
+- rely on default test discovery in the current directory
+- avoid recursing into common directories not containing tests
+- run doctests on modules and invoke Flake8 tests
+- in doctests, allow Unicode literals and regular literals to match, allowing for doctests to run on Python 2 and 3. Also enable ELLIPSES, a default that would be undone by supplying the prior option.
+- filters out known warnings caused by libraries/functionality included by the skeleton
+
+Relies on a .flake8 file to correct some default behaviors:
+
+- disable mutually incompatible rules W503 and W504
+- support for Black format
+
+## Continuous Integration
+
+The project is pre-configured to run tests through multiple CI providers.
+
+### Azure Pipelines
+
+[Azure Pipelines](https://azure.microsoft.com/en-us/services/devops/pipelines/) are the preferred provider as they provide free, fast, multi-platform services. See azure-pipelines.yml for more details.
+
+Features include:
+
+- test against multiple Python versions
+- run on Ubuntu Bionic
+
+### Travis CI
+
+[Travis CI](https://travis-ci.org) is configured through .travis.yml. Any new project must be enabled either through their web site or with the `travis enable` command.
+
+Features include:
+- test against Python 3
+- run on Ubuntu Xenial
+- correct for broken IPv6
+
+### AppVeyor
+
+A minimal template for running under AppVeyor (Windows) is provided.
+
+### Continuous Deployments
+
+In addition to running tests, an additional deploy stage is configured to automatically release tagged commits to PyPI using [API tokens](https://pypi.org/help/#apitoken). The release process expects an authorized token to be configured with Azure as the `Azure secrets` variable group. This variable group needs to be created only once per organization. For example:
+
+```
+# create a resource group if none exists
+az group create --name main --location eastus2
+# create the vault (try different names until something works)
+az keyvault create --name secrets007 --resource-group main
+# create the secret
+az keyvault secret set --vault-name secrets007 --name PyPI-token --value $token
+```
+
+Then, in the web UI for the project's Pipelines Library, create the `Azure secrets` variable group referencing the key vault name.
+
+For more details, see [this blog entry](https://blog.jaraco.com/configuring-azure-pipelines-with-secets/).
+
+## Building Documentation
+
+Documentation is automatically built by [Read the Docs](https://readthedocs.org) when the project is registered with it, by way of the .readthedocs.yml file. To test the docs build manually, a tox env may be invoked as `tox -e docs`. Both techniques rely on the dependencies declared in `setup.cfg/options.extras_require.docs`.
+
+In addition to building the Sphinx docs scaffolded in `docs/`, the docs build a `history.html` file that first injects release dates and hyperlinks into the CHANGES.rst before incorporating it as history in the docs.
+
+## Cutting releases
+
+By default, tagged commits are released through the continuous integration deploy stage.
+
+Releases may also be cut manually by invoking the tox environment `release` with the PyPI token set as the TWINE_PASSWORD:
+
+```
+TWINE_PASSWORD={token} tox -e release
+```
diff --git a/third_party/python/zipp/test_zipp.py b/third_party/python/zipp/test_zipp.py
new file mode 100644
index 0000000000..a6fbf39f6d
--- /dev/null
+++ b/third_party/python/zipp/test_zipp.py
@@ -0,0 +1,261 @@
+import io
+import zipfile
+import contextlib
+import pathlib
+import unittest
+import tempfile
+import shutil
+import string
+
+import jaraco.itertools
+import func_timeout
+
+import zipp
+
+consume = tuple
+
+
+def add_dirs(zf):
+ """
+ Given a writable zip file zf, inject directory entries for
+ any directories implied by the presence of children.
+ """
+ for name in zipp.CompleteDirs._implied_dirs(zf.namelist()):
+ zf.writestr(name, b"")
+ return zf
+
+
+def build_alpharep_fixture():
+ """
+ Create a zip file with this structure:
+
+ .
+ ├── a.txt
+ ├── b
+ │ ├── c.txt
+ │ ├── d
+ │ │ └── e.txt
+ │ └── f.txt
+ └── g
+ └── h
+ └── i.txt
+
+ This fixture has the following key characteristics:
+
+ - a file at the root (a)
+ - a file two levels deep (b/d/e)
+ - multiple files in a directory (b/c, b/f)
+ - a directory containing only a directory (g/h)
+
+ "alpha" because it uses alphabet
+ "rep" because it's a representative example
+ """
+ data = io.BytesIO()
+ zf = zipfile.ZipFile(data, "w")
+ zf.writestr("a.txt", b"content of a")
+ zf.writestr("b/c.txt", b"content of c")
+ zf.writestr("b/d/e.txt", b"content of e")
+ zf.writestr("b/f.txt", b"content of f")
+ zf.writestr("g/h/i.txt", b"content of i")
+ zf.filename = "alpharep.zip"
+ return zf
+
+
+@contextlib.contextmanager
+def temp_dir():
+ tmpdir = tempfile.mkdtemp()
+ try:
+ yield pathlib.Path(tmpdir)
+ finally:
+ shutil.rmtree(tmpdir)
+
+
+class TestPath(unittest.TestCase):
+ def setUp(self):
+ self.fixtures = contextlib.ExitStack()
+ self.addCleanup(self.fixtures.close)
+
+ def zipfile_alpharep(self):
+ with self.subTest():
+ yield build_alpharep_fixture()
+ with self.subTest():
+ yield add_dirs(build_alpharep_fixture())
+
+ def zipfile_ondisk(self):
+ tmpdir = pathlib.Path(self.fixtures.enter_context(temp_dir()))
+ for alpharep in self.zipfile_alpharep():
+ buffer = alpharep.fp
+ alpharep.close()
+ path = tmpdir / alpharep.filename
+ with path.open("wb") as strm:
+ strm.write(buffer.getvalue())
+ yield path
+
+ def test_iterdir_and_types(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ assert root.is_dir()
+ a, b, g = root.iterdir()
+ assert a.is_file()
+ assert b.is_dir()
+ assert g.is_dir()
+ c, f, d = b.iterdir()
+ assert c.is_file() and f.is_file()
+ e, = d.iterdir()
+ assert e.is_file()
+ h, = g.iterdir()
+ i, = h.iterdir()
+ assert i.is_file()
+
+ def test_subdir_is_dir(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ assert (root / 'b').is_dir()
+ assert (root / 'b/').is_dir()
+ assert (root / 'g').is_dir()
+ assert (root / 'g/').is_dir()
+
+ def test_open(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ a, b, g = root.iterdir()
+ with a.open() as strm:
+ data = strm.read()
+ assert data == "content of a"
+
+ def test_open_write(self):
+ """
+ If the zipfile is open for write, it should be possible to
+ write bytes or text to it.
+ """
+ zf = zipp.Path(zipfile.ZipFile(io.BytesIO(), mode='w'))
+ with zf.joinpath('file.bin').open('wb') as strm:
+ strm.write(b'binary contents')
+ with zf.joinpath('file.txt').open('w') as strm:
+ strm.write('text file')
+
+ def test_open_extant_directory(self):
+ """
+ Attempting to open a directory raises IsADirectoryError.
+ """
+ zf = zipp.Path(add_dirs(build_alpharep_fixture()))
+ with self.assertRaises(IsADirectoryError):
+ zf.joinpath('b').open()
+
+ def test_open_missing_directory(self):
+ """
+ Attempting to open a missing directory raises FileNotFoundError.
+ """
+ zf = zipp.Path(add_dirs(build_alpharep_fixture()))
+ with self.assertRaises(FileNotFoundError):
+ zf.joinpath('z').open()
+
+ def test_read(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ a, b, g = root.iterdir()
+ assert a.read_text() == "content of a"
+ assert a.read_bytes() == b"content of a"
+
+ def test_joinpath(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ a = root.joinpath("a")
+ assert a.is_file()
+ e = root.joinpath("b").joinpath("d").joinpath("e.txt")
+ assert e.read_text() == "content of e"
+
+ def test_traverse_truediv(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ a = root / "a"
+ assert a.is_file()
+ e = root / "b" / "d" / "e.txt"
+ assert e.read_text() == "content of e"
+
+ def test_traverse_simplediv(self):
+ """
+ Disable the __future__.division when testing traversal.
+ """
+ for alpharep in self.zipfile_alpharep():
+ code = compile(
+ source="zipp.Path(alpharep) / 'a'",
+ filename="(test)",
+ mode="eval",
+ dont_inherit=True,
+ )
+ eval(code)
+
+ def test_pathlike_construction(self):
+ """
+ zipp.Path should be constructable from a path-like object
+ """
+ for zipfile_ondisk in self.zipfile_ondisk():
+ pathlike = pathlib.Path(str(zipfile_ondisk))
+ zipp.Path(pathlike)
+
+ def test_traverse_pathlike(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ root / pathlib.Path("a")
+
+ def test_parent(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ assert (root / 'a').parent.at == ''
+ assert (root / 'a' / 'b').parent.at == 'a/'
+
+ def test_dir_parent(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ assert (root / 'b').parent.at == ''
+ assert (root / 'b/').parent.at == ''
+
+ def test_missing_dir_parent(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ assert (root / 'missing dir/').parent.at == ''
+
+ def test_mutability(self):
+ """
+ If the underlying zipfile is changed, the Path object should
+ reflect that change.
+ """
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ a, b, g = root.iterdir()
+ alpharep.writestr('foo.txt', 'foo')
+ alpharep.writestr('bar/baz.txt', 'baz')
+ assert any(
+ child.name == 'foo.txt'
+ for child in root.iterdir())
+ assert (root / 'foo.txt').read_text() == 'foo'
+ baz, = (root / 'bar').iterdir()
+ assert baz.read_text() == 'baz'
+
+ HUGE_ZIPFILE_NUM_ENTRIES = 2 ** 13
+
+ def huge_zipfile(self):
+ """Create a read-only zipfile with a huge number of entries entries."""
+ strm = io.BytesIO()
+ zf = zipfile.ZipFile(strm, "w")
+ for entry in map(str, range(self.HUGE_ZIPFILE_NUM_ENTRIES)):
+ zf.writestr(entry, entry)
+ zf.mode = 'r'
+ return zf
+
+ def test_joinpath_constant_time(self):
+ """
+ Ensure joinpath on items in zipfile is linear time.
+ """
+ root = zipp.Path(self.huge_zipfile())
+ entries = jaraco.itertools.Counter(root.iterdir())
+ for entry in entries:
+ entry.joinpath('suffix')
+ # Check the file iterated all items
+ assert entries.count == self.HUGE_ZIPFILE_NUM_ENTRIES
+
+ @func_timeout.func_set_timeout(3)
+ def test_implied_dirs_performance(self):
+ data = ['/'.join(string.ascii_lowercase + str(n)) for n in range(10000)]
+ zipp.CompleteDirs._implied_dirs(data)
diff --git a/third_party/python/zipp/tox.ini b/third_party/python/zipp/tox.ini
new file mode 100644
index 0000000000..b37e9890e0
--- /dev/null
+++ b/third_party/python/zipp/tox.ini
@@ -0,0 +1,41 @@
+[tox]
+envlist = python
+minversion = 3.2
+# https://github.com/jaraco/skeleton/issues/6
+tox_pip_extensions_ext_venv_update = true
+# Ensure that a late version of pip is used even on tox-venv.
+requires =
+ tox-pip-version>=0.0.6
+ tox-venv
+
+
+[testenv]
+deps =
+pip_version = pip
+commands =
+ python -m unittest discover
+usedevelop = True
+extras = testing
+
+[testenv:docs]
+extras =
+ docs
+ testing
+changedir = docs
+commands =
+ python -m sphinx . {toxinidir}/build/html
+
+[testenv:release]
+skip_install = True
+deps =
+ pep517>=0.5
+ twine[keyring]>=1.13
+ path
+passenv =
+ TWINE_PASSWORD
+setenv =
+ TWINE_USERNAME = {env:TWINE_USERNAME:__token__}
+commands =
+ python -c "import path; path.Path('dist').rmtree_p()"
+ python -m pep517.build .
+ python -m twine upload dist/*
diff --git a/third_party/python/zipp/zipp.py b/third_party/python/zipp/zipp.py
new file mode 100644
index 0000000000..cc0fe5f038
--- /dev/null
+++ b/third_party/python/zipp/zipp.py
@@ -0,0 +1,285 @@
+import io
+import posixpath
+import zipfile
+import itertools
+import contextlib
+import sys
+
+if sys.version_info < (3, 7):
+ from collections import OrderedDict
+else:
+ OrderedDict = dict
+
+
+def _parents(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all parents of that path.
+
+ >>> list(_parents('b/d'))
+ ['b']
+ >>> list(_parents('/b/d/'))
+ ['/b']
+ >>> list(_parents('b/d/f/'))
+ ['b/d', 'b']
+ >>> list(_parents('b'))
+ []
+ >>> list(_parents(''))
+ []
+ """
+ return itertools.islice(_ancestry(path), 1, None)
+
+
+def _ancestry(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all elements of that path
+
+ >>> list(_ancestry('b/d'))
+ ['b/d', 'b']
+ >>> list(_ancestry('/b/d/'))
+ ['/b/d', '/b']
+ >>> list(_ancestry('b/d/f/'))
+ ['b/d/f', 'b/d', 'b']
+ >>> list(_ancestry('b'))
+ ['b']
+ >>> list(_ancestry(''))
+ []
+ """
+ path = path.rstrip(posixpath.sep)
+ while path and path != posixpath.sep:
+ yield path
+ path, tail = posixpath.split(path)
+
+
+_dedupe = OrderedDict.fromkeys
+"""Deduplicate an iterable in original order"""
+
+
+def _difference(minuend, subtrahend):
+ """
+ Return items in minuend not in subtrahend, retaining order
+ with O(1) lookup.
+ """
+ return itertools.filterfalse(set(subtrahend).__contains__, minuend)
+
+
+class CompleteDirs(zipfile.ZipFile):
+ """
+ A ZipFile subclass that ensures that implied directories
+ are always included in the namelist.
+ """
+
+ @staticmethod
+ def _implied_dirs(names):
+ parents = itertools.chain.from_iterable(map(_parents, names))
+ as_dirs = (p + posixpath.sep for p in parents)
+ return _dedupe(_difference(as_dirs, names))
+
+ def namelist(self):
+ names = super(CompleteDirs, self).namelist()
+ return names + list(self._implied_dirs(names))
+
+ def _name_set(self):
+ return set(self.namelist())
+
+ def resolve_dir(self, name):
+ """
+ If the name represents a directory, return that name
+ as a directory (with the trailing slash).
+ """
+ names = self._name_set()
+ dirname = name + '/'
+ dir_match = name not in names and dirname in names
+ return dirname if dir_match else name
+
+ @classmethod
+ def make(cls, source):
+ """
+ Given a source (filename or zipfile), return an
+ appropriate CompleteDirs subclass.
+ """
+ if isinstance(source, CompleteDirs):
+ return source
+
+ if not isinstance(source, zipfile.ZipFile):
+ return cls(_pathlib_compat(source))
+
+ # Only allow for FastPath when supplied zipfile is read-only
+ if 'r' not in source.mode:
+ cls = CompleteDirs
+
+ res = cls.__new__(cls)
+ vars(res).update(vars(source))
+ return res
+
+
+class FastLookup(CompleteDirs):
+ """
+ ZipFile subclass to ensure implicit
+ dirs exist and are resolved rapidly.
+ """
+ def namelist(self):
+ with contextlib.suppress(AttributeError):
+ return self.__names
+ self.__names = super(FastLookup, self).namelist()
+ return self.__names
+
+ def _name_set(self):
+ with contextlib.suppress(AttributeError):
+ return self.__lookup
+ self.__lookup = super(FastLookup, self)._name_set()
+ return self.__lookup
+
+
+def _pathlib_compat(path):
+ """
+ For path-like objects, convert to a filename for compatibility
+ on Python 3.6.1 and earlier.
+ """
+ try:
+ return path.__fspath__()
+ except AttributeError:
+ return str(path)
+
+
+class Path:
+ """
+ A pathlib-compatible interface for zip files.
+
+ Consider a zip file with this structure::
+
+ .
+ ├── a.txt
+ └── b
+ ├── c.txt
+ └── d
+ └── e.txt
+
+ >>> data = io.BytesIO()
+ >>> zf = zipfile.ZipFile(data, 'w')
+ >>> zf.writestr('a.txt', 'content of a')
+ >>> zf.writestr('b/c.txt', 'content of c')
+ >>> zf.writestr('b/d/e.txt', 'content of e')
+ >>> zf.filename = 'abcde.zip'
+
+ Path accepts the zipfile object itself or a filename
+
+ >>> root = Path(zf)
+
+ From there, several path operations are available.
+
+ Directory iteration (including the zip file itself):
+
+ >>> a, b = root.iterdir()
+ >>> a
+ Path('abcde.zip', 'a.txt')
+ >>> b
+ Path('abcde.zip', 'b/')
+
+ name property:
+
+ >>> b.name
+ 'b'
+
+ join with divide operator:
+
+ >>> c = b / 'c.txt'
+ >>> c
+ Path('abcde.zip', 'b/c.txt')
+ >>> c.name
+ 'c.txt'
+
+ Read text:
+
+ >>> c.read_text()
+ 'content of c'
+
+ existence:
+
+ >>> c.exists()
+ True
+ >>> (b / 'missing.txt').exists()
+ False
+
+ Coercion to string:
+
+ >>> str(c)
+ 'abcde.zip/b/c.txt'
+ """
+
+ __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
+
+ def __init__(self, root, at=""):
+ self.root = FastLookup.make(root)
+ self.at = at
+
+ def open(self, mode='r', *args, pwd=None, **kwargs):
+ """
+ Open this entry as text or binary following the semantics
+ of ``pathlib.Path.open()`` by passing arguments through
+ to io.TextIOWrapper().
+ """
+ if self.is_dir():
+ raise IsADirectoryError(self)
+ zip_mode = mode[0]
+ if not self.exists() and zip_mode == 'r':
+ raise FileNotFoundError(self)
+ stream = self.root.open(self.at, zip_mode, pwd=pwd)
+ if 'b' in mode:
+ if args or kwargs:
+ raise ValueError("encoding args invalid for binary operation")
+ return stream
+ return io.TextIOWrapper(stream, *args, **kwargs)
+
+ @property
+ def name(self):
+ return posixpath.basename(self.at.rstrip("/"))
+
+ def read_text(self, *args, **kwargs):
+ with self.open('r', *args, **kwargs) as strm:
+ return strm.read()
+
+ def read_bytes(self):
+ with self.open('rb') as strm:
+ return strm.read()
+
+ def _is_child(self, path):
+ return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
+
+ def _next(self, at):
+ return Path(self.root, at)
+
+ def is_dir(self):
+ return not self.at or self.at.endswith("/")
+
+ def is_file(self):
+ return not self.is_dir()
+
+ def exists(self):
+ return self.at in self.root._name_set()
+
+ def iterdir(self):
+ if not self.is_dir():
+ raise ValueError("Can't listdir a file")
+ subs = map(self._next, self.root.namelist())
+ return filter(self._is_child, subs)
+
+ def __str__(self):
+ return posixpath.join(self.root.filename, self.at)
+
+ def __repr__(self):
+ return self.__repr.format(self=self)
+
+ def joinpath(self, add):
+ next = posixpath.join(self.at, _pathlib_compat(add))
+ return self._next(self.root.resolve_dir(next))
+
+ __truediv__ = joinpath
+
+ @property
+ def parent(self):
+ parent_at = posixpath.dirname(self.at.rstrip('/'))
+ if parent_at:
+ parent_at += '/'
+ return self._next(parent_at)